diff --git a/.github/scripts/upload.py b/.github/scripts/upload.py index 565c71b3ca..05e158052b 100644 --- a/.github/scripts/upload.py +++ b/.github/scripts/upload.py @@ -64,6 +64,8 @@ def sendMessage(message, user_id = BOT_TARGET, entities = None) -> int: "text": message, "entities": entities } + print(message) + print(entities) resp = requests.post(API_PREFIX + "sendMessage", json=data).json() print(resp) return int(resp["result"]["message_id"]) @@ -105,6 +107,8 @@ def sendRelease(): text += " " text += addEntity(entities, text, "bold", VERSION_NAME) text += "\n\n" + if "entities" not in admin_resp: + admin_resp["entities"] = list() resp_entities = admin_resp["entities"] for en in resp_entities: copy = en.copy() @@ -140,24 +144,21 @@ def sendCIRelease(): print(sys.argv) if len(sys.argv) != 2: print("Run Type: release, ci, debug") + sys.stdout.flush() + sys.stderr.flush() exit(1) mode = sys.argv[1] - try: - if mode == "release": - sendRelease() - elif mode == "ci": - if COMMIT_MESSAGE.startswith("ci"): - CI_CHANNEL_ID = BOT_TARGET - sendCIRelease() - elif mode == "debug": - APK_CHANNEL_ID = "@test_channel_nekox" - UPDATE_CHANNEL_ID = "@test_channel_nekox" - UPDATE_METADATA_CHANNEL_ID = "@test_channel_nekox" - sendRelease() - else: - print("unknown mode") - exit(1) - except Exception as e: - print(e) - exit(1) + if mode == "release": + sendRelease() + elif mode == "ci": + if COMMIT_MESSAGE.startswith("ci"): + CI_CHANNEL_ID = BOT_TARGET + sendCIRelease() + elif mode == "debug": + APK_CHANNEL_ID = "@test_channel_nekox" + UPDATE_CHANNEL_ID = "@test_channel_nekox" + UPDATE_METADATA_CHANNEL_ID = "@test_channel_nekox" + sendRelease() + else: + print("unknown mode") diff --git a/TMessagesProj/build.gradle b/TMessagesProj/build.gradle index 122ee11504..124cbf42fb 100644 --- a/TMessagesProj/build.gradle +++ b/TMessagesProj/build.gradle @@ -3,12 +3,12 @@ import cn.hutool.core.util.RuntimeUtil apply plugin: "com.android.application" apply plugin: "kotlin-android" -def verName = "9.3.3" -def verCode = 1125 +def verName = "9.5.6" +def verCode = 1126 -def officialVer = "9.3.3" -def officialCode = 3026 +def officialVer = "9.5.6" +def officialCode = 3237 def serviceAccountCredentialsFile = rootProject.file("service_account_credentials.json") @@ -69,8 +69,8 @@ def nativeTarget = System.getenv("NATIVE_TARGET") if (nativeTarget == null) nativeTarget = "" android { - compileSdkVersion 32 - buildToolsVersion "32.0.0" + compileSdkVersion 33 + buildToolsVersion "33.0.0" ndkVersion rootProject.ext.ndkVersion defaultConfig.applicationId = "xyz.nextalone.nagram" @@ -313,26 +313,27 @@ def playCoreVersion = "1.10.3" dependencies { - implementation "androidx.browser:browser:1.4.0" - implementation "androidx.core:core-ktx:1.8.0" + implementation "androidx.browser:browser:1.5.0" + implementation "androidx.core:core-ktx:1.9.0" implementation "androidx.palette:palette-ktx:1.0.0" implementation "androidx.viewpager:viewpager:1.0.0" - implementation "androidx.exifinterface:exifinterface:1.3.3" + implementation "androidx.exifinterface:exifinterface:1.3.6" implementation "androidx.interpolator:interpolator:1.0.0" implementation "androidx.dynamicanimation:dynamicanimation:1.0.0" implementation "androidx.multidex:multidex:2.0.1" implementation "androidx.sharetarget:sharetarget:1.2.0" - compileOnly "org.checkerframework:checker-qual:3.16.0" - compileOnly "org.checkerframework:checker-compat-qual:2.5.5" + // just follow official + compileOnly 'org.checkerframework:checker-qual:2.5.2' + compileOnly 'org.checkerframework:checker-compat-qual:2.5.0' // don"t change this :) //noinspection GradleDependency implementation "com.googlecode.mp4parser:isoparser:1.0.6" - implementation "com.google.code.gson:gson:2.8.8" + implementation "com.google.code.gson:gson:2.8.9" implementation "org.osmdroid:osmdroid-android:6.1.10" - implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk8:1.6.21" + implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk8:1.7.10" implementation "org.jetbrains.kotlinx:kotlinx-coroutines-android:1.6.1" implementation "com.squareup.okhttp3:okhttp:5.0.0-alpha.10" @@ -344,6 +345,7 @@ dependencies { implementation "cn.hutool:hutool-crypto:5.7.13" implementation 'cn.hutool:hutool-http:5.7.5' implementation "com.jakewharton:process-phoenix:2.1.2" + implementation 'com.google.guava:guava:31.1-android' compileOnly 'org.yaml:snakeyaml:1.29' fullImplementation 'org.yaml:snakeyaml:1.29' @@ -369,7 +371,7 @@ dependencies { releaseImplementation "com.google.android.play:core:$playCoreVersion" testImplementation "junit:junit:4.13.2" - testImplementation "androidx.test:core:1.4.0" + testImplementation "androidx.test:core:1.5.0" testImplementation "org.robolectric:robolectric:4.5.1" coreLibraryDesugaring "com.android.tools:desugar_jdk_libs:1.2.0" diff --git a/TMessagesProj/jni/CMakeLists.txt b/TMessagesProj/jni/CMakeLists.txt index d68f0812ba..f579fa547b 100644 --- a/TMessagesProj/jni/CMakeLists.txt +++ b/TMessagesProj/jni/CMakeLists.txt @@ -460,10 +460,41 @@ set_target_properties(sqlite PROPERTIES target_compile_definitions(sqlite PUBLIC NULL=0 SOCKLEN_T=socklen_t LOCALE_NOT_USED ANDROID_NDK DISABLE_IMPORTGL AVOID_TABLES ANDROID_TILE_BASED_DECODE HAVE_STRCHRNUL=0 ANDROID_ARMV6_IDCT) +#breakpad +add_library(breakpad STATIC + third_party/breakpad/src/client/linux/crash_generation/crash_generation_client.cc + third_party/breakpad/src/client/linux/handler/exception_handler.cc + third_party/breakpad/src/client/linux/handler/minidump_descriptor.cc + third_party/breakpad/src/client/linux/log/log.cc + third_party/breakpad/src/client/linux/dump_writer_common/thread_info.cc + third_party/breakpad/src/client/linux/dump_writer_common/seccomp_unwinder.cc + third_party/breakpad/src/client/linux/dump_writer_common/ucontext_reader.cc + third_party/breakpad/src/client/linux/microdump_writer/microdump_writer.cc + third_party/breakpad/src/client/linux/minidump_writer/linux_dumper.cc + third_party/breakpad/src/client/linux/minidump_writer/linux_ptrace_dumper.cc + third_party/breakpad/src/client/linux/minidump_writer/minidump_writer.cc + third_party/breakpad/src/client/minidump_file_writer.cc + third_party/breakpad/src/common/android/breakpad_getcontext.S + third_party/breakpad/src/common/convert_UTF.c + third_party/breakpad/src/common/md5.cc + third_party/breakpad/src/common/string_conversion.cc + third_party/breakpad/src/common/linux/elfutils.cc + third_party/breakpad/src/common/linux/file_id.cc + third_party/breakpad/src/common/linux/guid_creator.cc + third_party/breakpad/src/common/linux/linux_libc_support.cc + third_party/breakpad/src/common/linux/memory_mapped_file.cc + third_party/breakpad/src/common/linux/safe_readlink.cc) +set_target_properties(breakpad PROPERTIES ANDROID_ARM_MODE arm) +set_property(SOURCE third_party/breakpad/src/common/android/breakpad_getcontext.S PROPERTY LANGUAGE C) +target_include_directories(breakpad PUBLIC + third_party/breakpad/src/common/android/include + third_party/breakpad/src) + + #voip include(${CMAKE_HOME_DIRECTORY}/voip/CMakeLists.txt) -set(NATIVE_LIB "tmessages.42") +set(NATIVE_LIB "tmessages.43") #tmessages add_library(${NATIVE_LIB} SHARED @@ -711,7 +742,12 @@ target_sources(${NATIVE_LIB} PRIVATE third_party/libyuv/source/scale_win.cc third_party/libyuv/source/scale.cc third_party/libyuv/source/video_common.cc - third_party/libyuv/source/scale_uv.cc) + third_party/libyuv/source/scale_uv.cc + third_party/libyuv/source/rotate_lsx.cc + third_party/libyuv/source/row_lasx.cc + third_party/libyuv/source/row_lsx.cc + third_party/libyuv/source/scale_lsx.cc + third_party/libyuv/source/scale_rgb.cc) target_include_directories(${NATIVE_LIB} PUBLIC opus/include @@ -761,7 +797,8 @@ target_link_libraries(${NATIVE_LIB} EGL android OpenSLES - cpufeatures) + cpufeatures + breakpad) include(AndroidNdkModules) diff --git a/TMessagesProj/jni/TgNetWrapper.cpp b/TMessagesProj/jni/TgNetWrapper.cpp index 37dd409c59..19ed6db338 100644 --- a/TMessagesProj/jni/TgNetWrapper.cpp +++ b/TMessagesProj/jni/TgNetWrapper.cpp @@ -5,6 +5,7 @@ #include "tgnet/ConnectionsManager.h" #include "tgnet/MTProtoScheme.h" #include "tgnet/ConnectionSocket.h" +#include "tgnet/FileLog.h" JavaVM *java; jclass jclass_RequestDelegateInternal; @@ -65,10 +66,10 @@ jobject getJavaByteBuffer(JNIEnv *env, jclass c, jlong address) { static const char *NativeByteBufferClassPathName = "org/telegram/tgnet/NativeByteBuffer"; static JNINativeMethod NativeByteBufferMethods[] = { - {"native_getFreeBuffer", "(I)J", (void *) getFreeBuffer}, - {"native_limit", "(J)I", (void *) limit}, - {"native_position", "(J)I", (void *) position}, - {"native_reuse", "(J)V", (void *) reuse}, + {"native_getFreeBuffer", "(I)J", (void *) getFreeBuffer}, + {"native_limit", "(J)I", (void *) limit}, + {"native_position", "(J)I", (void *) position}, + {"native_reuse", "(J)V", (void *) reuse}, {"native_getJavaByteBuffer", "(J)Ljava/nio/ByteBuffer;", (void *) getJavaByteBuffer} }; @@ -99,12 +100,15 @@ void sendRequest(JNIEnv *env, jclass c, jint instanceNum, jlong object, jobject TL_api_request *request = new TL_api_request(); request->request = (NativeByteBuffer *) (intptr_t) object; if (onComplete != nullptr) { + DEBUG_REF("sendRequest onComplete"); onComplete = env->NewGlobalRef(onComplete); } if (onQuickAck != nullptr) { + DEBUG_REF("sendRequest onQuickAck"); onQuickAck = env->NewGlobalRef(onQuickAck); } if (onWriteToSocket != nullptr) { + DEBUG_REF("sendRequest onWriteToSocket"); onWriteToSocket = env->NewGlobalRef(onWriteToSocket); } ConnectionsManager::getInstance(instanceNum).sendRequest(request, ([onComplete, instanceNum]( @@ -262,6 +266,7 @@ checkProxy(JNIEnv *env, jclass c, jint instanceNum, jstring address, jint port, const char *secretStr = env->GetStringUTFChars(secret, 0); if (requestTimeFunc != nullptr) { + DEBUG_REF("sendRequest requestTimeFunc"); requestTimeFunc = env->NewGlobalRef(requestTimeFunc); } @@ -434,7 +439,7 @@ void init(JNIEnv *env, jclass c, jint instanceNum, jint version, jint layer, jin jstring deviceModel, jstring systemVersion, jstring appVersion, jstring langCode, jstring systemLangCode, jstring configPath, jstring logPath, jstring regId, jstring cFingerprint, jstring installerId, jstring packageId, jint timezoneOffset, jlong userId, - jboolean enablePushConnection, jboolean hasNetwork, jint networkType) { + jboolean enablePushConnection, jboolean hasNetwork, jint networkType, jint performanceClass) { const char *deviceModelStr = env->GetStringUTFChars(deviceModel, 0); const char *systemVersionStr = env->GetStringUTFChars(systemVersion, 0); const char *appVersionStr = env->GetStringUTFChars(appVersion, 0); @@ -459,7 +464,7 @@ void init(JNIEnv *env, jclass c, jint instanceNum, jint version, jint layer, jin std::string(cFingerprintStr), std::string(installerIdStr), std::string(packageIdStr), timezoneOffset, userId, true, enablePushConnection, - hasNetwork, networkType); + hasNetwork, networkType, performanceClass); if (deviceModelStr != 0) { env->ReleaseStringUTFChars(deviceModel, deviceModelStr); @@ -524,7 +529,7 @@ static JNINativeMethod ConnectionsManagerMethods[] = { {"native_setProxySettings", "(ILjava/lang/String;ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;)V", (void *) setProxySettings}, {"native_getConnectionState", "(I)I", (void *) getConnectionState}, {"native_setUserId", "(IJ)V", (void *) setUserId}, - {"native_init", "(IIIILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;IJZZI)V", (void *) init}, + {"native_init", "(IIIILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;IJZZII)V", (void *) init}, {"native_setLangCode", "(ILjava/lang/String;)V", (void *) setLangCode}, {"native_setRegId", "(ILjava/lang/String;)V", (void *) setRegId}, {"native_setSystemLangCode", "(ILjava/lang/String;)V", (void *) setSystemLangCode}, @@ -569,7 +574,7 @@ extern "C" int registerNativeTgNetFunctions(JavaVM *vm, JNIEnv *env) { sizeof(ConnectionsManagerMethods[0]))) { return JNI_FALSE; } - +DEBUG_REF("RequestDelegateInternal class"); jclass_RequestDelegateInternal = (jclass) env->NewGlobalRef( env->FindClass("org/telegram/tgnet/RequestDelegateInternal")); if (jclass_RequestDelegateInternal == 0) { @@ -581,6 +586,7 @@ extern "C" int registerNativeTgNetFunctions(JavaVM *vm, JNIEnv *env) { return JNI_FALSE; } + DEBUG_REF("RequestTimeDelegate class"); jclass_RequestTimeDelegate = (jclass) env->NewGlobalRef( env->FindClass("org/telegram/tgnet/RequestTimeDelegate")); if (jclass_RequestTimeDelegate == 0) { @@ -591,6 +597,7 @@ extern "C" int registerNativeTgNetFunctions(JavaVM *vm, JNIEnv *env) { return JNI_FALSE; } + DEBUG_REF("QuickAckDelegate class"); jclass_QuickAckDelegate = (jclass) env->NewGlobalRef( env->FindClass("org/telegram/tgnet/QuickAckDelegate")); if (jclass_RequestDelegateInternal == 0) { @@ -601,6 +608,7 @@ extern "C" int registerNativeTgNetFunctions(JavaVM *vm, JNIEnv *env) { return JNI_FALSE; } + DEBUG_REF("WriteToSocketDelegate class"); jclass_WriteToSocketDelegate = (jclass) env->NewGlobalRef( env->FindClass("org/telegram/tgnet/WriteToSocketDelegate")); if (jclass_WriteToSocketDelegate == 0) { @@ -610,6 +618,7 @@ extern "C" int registerNativeTgNetFunctions(JavaVM *vm, JNIEnv *env) { if (jclass_WriteToSocketDelegate_run == 0) { return JNI_FALSE; } + DEBUG_REF("ConnectionsManager class"); jclass_ConnectionsManager = (jclass) env->NewGlobalRef( env->FindClass("org/telegram/tgnet/ConnectionsManager")); if (jclass_ConnectionsManager == 0) { diff --git a/TMessagesProj/jni/audio.c b/TMessagesProj/jni/audio.c index 445b788953..55f1ec5a3b 100644 --- a/TMessagesProj/jni/audio.c +++ b/TMessagesProj/jni/audio.c @@ -290,11 +290,13 @@ int initRecorder(const char *path, opus_int32 sampleRate) { rate = sampleRate; if (!path) { + LOGE("path is null"); return 0; } - _fileOs = fopen(path, "wb"); + _fileOs = fopen(path, "w"); if (!_fileOs) { + LOGE("error cannot open file: %s", path); return 0; } diff --git a/TMessagesProj/jni/ffmpeg b/TMessagesProj/jni/ffmpeg index a77521cd5d..4bc4cafaef 160000 --- a/TMessagesProj/jni/ffmpeg +++ b/TMessagesProj/jni/ffmpeg @@ -1 +1 @@ -Subproject commit a77521cd5d27e955b16e8097eecefc779ffdcb6d +Subproject commit 4bc4cafaef8a55462138d7b6f7579c1522de26dc diff --git a/TMessagesProj/jni/gifvideo.cpp b/TMessagesProj/jni/gifvideo.cpp index 8fabd4974e..5de5d4c34e 100644 --- a/TMessagesProj/jni/gifvideo.cpp +++ b/TMessagesProj/jni/gifvideo.cpp @@ -121,6 +121,7 @@ typedef struct VideoInfo { } else { attached = false; } + DEBUG_DELREF("gifvideocpp stream"); jniEnv->DeleteGlobalRef(stream); if (attached) { javaVm->DetachCurrentThread(); @@ -790,8 +791,14 @@ int readCallback(void *opaque, uint8_t *buf, int buf_size) { if (attached) { javaVm->DetachCurrentThread(); } + if (buf_size == 0) { + return AVERROR_EOF; + } int ret = (int) read(info->fd, buf, (size_t) buf_size); - return ret ? ret : AVERROR_EOF; + if (ret <= 0) { + return AVERROR_EOF; + } + return ret; } } } @@ -949,6 +956,7 @@ extern "C" JNIEXPORT jlong JNICALL Java_org_telegram_ui_Components_AnimatedFileD int ret; if (streamFileSize != 0) { info->file_size = streamFileSize; + DEBUG_REF("gifvideo.cpp new stream"); info->stream = env->NewGlobalRef(stream); info->account = account; info->fd = open(info->src, O_RDONLY, S_IRUSR); @@ -1375,6 +1383,7 @@ extern "C" JNIEXPORT jint JNICALL Java_org_telegram_ui_Components_AnimatedFileDr extern "C" jint videoOnJNILoad(JavaVM *vm, JNIEnv *env) { //av_log_set_callback(custom_log); + DEBUG_REF("gifvideo.cpp AnimatedFileDrawableStream ref"); jclass_AnimatedFileDrawableStream = (jclass) env->NewGlobalRef(env->FindClass("org/telegram/messenger/AnimatedFileDrawableStream")); if (jclass_AnimatedFileDrawableStream == 0) { return JNI_FALSE; diff --git a/TMessagesProj/jni/image.cpp b/TMessagesProj/jni/image.cpp index 3312c6a73c..cc4a33a7af 100644 --- a/TMessagesProj/jni/image.cpp +++ b/TMessagesProj/jni/image.cpp @@ -27,14 +27,17 @@ jfieldID jclass_Options_outHeight; jfieldID jclass_Options_outWidth; jint imageOnJNILoad(JavaVM *vm, JNIEnv *env) { + DEBUG_REF("image.cpp nullpointerexception class"); jclass_NullPointerException = (jclass) env->NewGlobalRef(env->FindClass("java/lang/NullPointerException")); if (jclass_NullPointerException == 0) { return JNI_FALSE; } + DEBUG_REF("image.cpp runtimeexception class"); jclass_RuntimeException = (jclass) env->NewGlobalRef(env->FindClass("java/lang/RuntimeException")); if (jclass_RuntimeException == 0) { return JNI_FALSE; } + DEBUG_REF("image.cpp bitmapfactoryoptions class"); jclass_Options = (jclass) env->NewGlobalRef(env->FindClass("android/graphics/BitmapFactory$Options")); if (jclass_Options == 0) { return JNI_FALSE; diff --git a/TMessagesProj/jni/jni.c b/TMessagesProj/jni/jni.c index 88d776cf6c..62f31ccc47 100644 --- a/TMessagesProj/jni/jni.c +++ b/TMessagesProj/jni/jni.c @@ -111,7 +111,7 @@ JNIEXPORT void Java_org_telegram_messenger_Utilities_aesCtrDecryption(JNIEnv *en (*env)->ReleaseByteArrayElements(env, iv, ivBuff, JNI_ABORT); } -JNIEXPORT void Java_org_telegram_messenger_Utilities_aesCtrDecryptionByteArray(JNIEnv *env, jclass class, jbyteArray buffer, jbyteArray key, jbyteArray iv, jint offset, jint length, jint fileOffset) { +JNIEXPORT void Java_org_telegram_messenger_Utilities_aesCtrDecryptionByteArray(JNIEnv *env, jclass class, jbyteArray buffer, jbyteArray key, jbyteArray iv, jint offset, jlong length, jint fileOffset) { unsigned char *bufferBuff = (unsigned char *) (*env)->GetByteArrayElements(env, buffer, NULL); unsigned char *keyBuff = (unsigned char *) (*env)->GetByteArrayElements(env, key, NULL); unsigned char *ivBuff = (unsigned char *) (*env)->GetByteArrayElements(env, iv, NULL); @@ -274,7 +274,7 @@ JNIEXPORT jlong Java_org_telegram_messenger_Utilities_getLastUsageFileTime(JNIEn struct stat attrib; stat(fileName, &attrib); jlong value; - if (attrib.st_atim.tv_sec != 0) { + if (attrib.st_atim.tv_sec > 316000000) { value = attrib.st_atim.tv_sec; } else { value = attrib.st_mtim.tv_sec; diff --git a/TMessagesProj/jni/opus/opusfile/opusfile.c b/TMessagesProj/jni/opus/opusfile/opusfile.c index 392ddb29ea..6a0d399dec 100644 --- a/TMessagesProj/jni/opus/opusfile/opusfile.c +++ b/TMessagesProj/jni/opus/opusfile/opusfile.c @@ -1083,6 +1083,9 @@ static opus_int64 op_predict_link_start(const OpusSeekRecord *_sr,int _nsr, offset2=_sr[srj].offset; /*For once, we can subtract with impunity.*/ den=gp2-gp1; + if (den == 0) { + return -1; + } ipart=gp2/den; num=offset2-offset1; OP_ASSERT(num>0); diff --git a/TMessagesProj/jni/tgnet/ApiScheme.cpp b/TMessagesProj/jni/tgnet/ApiScheme.cpp index 5ea31be64d..2c0881e048 100644 --- a/TMessagesProj/jni/tgnet/ApiScheme.cpp +++ b/TMessagesProj/jni/tgnet/ApiScheme.cpp @@ -56,6 +56,7 @@ void TL_dcOption::readParams(NativeByteBuffer *stream, int32_t instanceNum, bool cdn = (flags & 8) != 0; isStatic = (flags & 16) != 0; thisPortOnly = (flags & 32) != 0; + force_try_ipv6 = (flags & 16384) != 0; id = stream->readInt32(&error); ip_address = stream->readString(&error); port = stream->readInt32(&error); @@ -72,6 +73,7 @@ void TL_dcOption::serializeToStream(NativeByteBuffer *stream) { flags = cdn ? (flags | 8) : (flags &~ 8); flags = isStatic ? (flags | 16) : (flags &~ 16); flags = thisPortOnly ? (flags | 32) : (flags &~ 32); + flags = force_try_ipv6 ? (flags | 16384) : (flags &~ 16384); stream->writeInt32(flags); stream->writeInt32(id); stream->writeString(ip_address); @@ -196,19 +198,19 @@ void TL_config::readParams(NativeByteBuffer *stream, int32_t instanceNum, bool & notify_default_delay_ms = stream->readInt32(&error); push_chat_period_ms = stream->readInt32(&error); push_chat_limit = stream->readInt32(&error); - saved_gifs_limit = stream->readInt32(&error); + // saved_gifs_limit = stream->readInt32(&error); edit_time_limit = stream->readInt32(&error); revoke_time_limit = stream->readInt32(&error); revoke_pm_time_limit = stream->readInt32(&error); rating_e_decay = stream->readInt32(&error); stickers_recent_limit = stream->readInt32(&error); - stickers_faved_limit = stream->readInt32(&error); + // stickers_faved_limit = stream->readInt32(&error); channels_read_media_period = stream->readInt32(&error); if ((flags & 1) != 0) { tmp_sessions = stream->readInt32(&error); } - pinned_dialogs_count_max = stream->readInt32(&error); - pinned_infolder_count_max = stream->readInt32(&error); + // pinned_dialogs_count_max = stream->readInt32(&error); + // pinned_infolder_count_max = stream->readInt32(&error); call_receive_timeout_ms = stream->readInt32(&error); call_ring_timeout_ms = stream->readInt32(&error); call_connect_timeout_ms = stream->readInt32(&error); @@ -244,6 +246,9 @@ void TL_config::readParams(NativeByteBuffer *stream, int32_t instanceNum, bool & if ((flags & 32768) != 0) { reactions_default = std::unique_ptr(Reaction::TLdeserialize(stream, stream->readUint32(&error), instanceNum, error)); } + if ((flags & 65536) != 0) { + autologin_token = stream->readString(&error); + } } void TL_config::serializeToStream(NativeByteBuffer *stream) { @@ -271,19 +276,19 @@ void TL_config::serializeToStream(NativeByteBuffer *stream) { stream->writeInt32(notify_default_delay_ms); stream->writeInt32(push_chat_period_ms); stream->writeInt32(push_chat_limit); - stream->writeInt32(saved_gifs_limit); + // stream->writeInt32(saved_gifs_limit); stream->writeInt32(edit_time_limit); stream->writeInt32(revoke_time_limit); stream->writeInt32(revoke_pm_time_limit); stream->writeInt32(rating_e_decay); stream->writeInt32(stickers_recent_limit); - stream->writeInt32(stickers_faved_limit); + // stream->writeInt32(stickers_faved_limit); stream->writeInt32(channels_read_media_period); if ((flags & 1) != 0) { stream->writeInt32(tmp_sessions); } - stream->writeInt32(pinned_dialogs_count_max); - stream->writeInt32(pinned_infolder_count_max); + // stream->writeInt32(pinned_dialogs_count_max); + // stream->writeInt32(pinned_infolder_count_max); stream->writeInt32(call_receive_timeout_ms); stream->writeInt32(call_ring_timeout_ms); stream->writeInt32(call_connect_timeout_ms); @@ -319,6 +324,9 @@ void TL_config::serializeToStream(NativeByteBuffer *stream) { if ((flags & 32768) != 0 && reactions_default != nullptr) { reactions_default->serializeToStream(stream); } + if ((flags & 65536) != 0) { + stream->writeString(autologin_token); + } } TLObject *TL_help_getConfig::deserializeResponse(NativeByteBuffer *stream, uint32_t constructor, int32_t instanceNum, bool &error) { @@ -1057,7 +1065,7 @@ auth_Authorization *auth_Authorization::TLdeserialize(NativeByteBuffer *stream, case 0x44747e9a: result = new TL_auth_authorizationSignUpRequired(); break; - case 0x33fb7bb8: + case 0x2ea2c0d4: result = new TL_auth_authorization(); break; default: @@ -1086,9 +1094,15 @@ void TL_auth_authorizationSignUpRequired::serializeToStream(NativeByteBuffer *st void TL_auth_authorization::readParams(NativeByteBuffer *stream, int32_t instanceNum, bool &error) { flags = stream->readInt32(&error); + if ((flags & 2) != 0) { + otherwise_relogin_days = stream->readInt32(&error); + } if ((flags & 1) != 0) { tmp_sessions = stream->readInt32(&error); } + if ((flags & 4) != 0) { + future_auth_token = std::unique_ptr(stream->readByteArray(&error)); + } user = std::unique_ptr(User::TLdeserialize(stream, stream->readUint32(&error), instanceNum, error)); } diff --git a/TMessagesProj/jni/tgnet/ApiScheme.h b/TMessagesProj/jni/tgnet/ApiScheme.h index 8f8a046e20..fe1e9cfd99 100644 --- a/TMessagesProj/jni/tgnet/ApiScheme.h +++ b/TMessagesProj/jni/tgnet/ApiScheme.h @@ -50,6 +50,7 @@ class TL_dcOption : public TLObject { bool cdn; bool isStatic; bool thisPortOnly; + bool force_try_ipv6; int32_t id; std::string ip_address; int32_t port; @@ -105,7 +106,7 @@ class Reaction : public TLObject { class TL_config : public TLObject { public: - static const uint32_t constructor = 0x232566ac; + static const uint32_t constructor = 0xcc1a241e; int32_t flags; int32_t date; @@ -125,17 +126,17 @@ class TL_config : public TLObject { int32_t notify_default_delay_ms; int32_t push_chat_period_ms; int32_t push_chat_limit; - int32_t saved_gifs_limit; + // int32_t saved_gifs_limit; int32_t edit_time_limit; int32_t revoke_time_limit; int32_t revoke_pm_time_limit; int32_t rating_e_decay; int32_t stickers_recent_limit; - int32_t stickers_faved_limit; + // int32_t stickers_faved_limit; int32_t channels_read_media_period; int32_t tmp_sessions; - int32_t pinned_dialogs_count_max; - int32_t pinned_infolder_count_max; + // int32_t pinned_dialogs_count_max; + // int32_t pinned_infolder_count_max; int32_t call_receive_timeout_ms; int32_t call_ring_timeout_ms; int32_t call_connect_timeout_ms; @@ -153,6 +154,7 @@ class TL_config : public TLObject { int32_t lang_pack_version; int32_t base_lang_pack_version; std::unique_ptr reactions_default; + std::string autologin_token; static TL_config *TLdeserialize(NativeByteBuffer *stream, uint32_t constructor, int32_t instanceNum, bool &error); void readParams(NativeByteBuffer *stream, int32_t instanceNum, bool &error); @@ -705,10 +707,12 @@ class TL_auth_authorizationSignUpRequired : public auth_Authorization { class TL_auth_authorization : public auth_Authorization { public: - static const uint32_t constructor = 0x33fb7bb8; + static const uint32_t constructor = 0x2ea2c0d4; int32_t flags; int32_t tmp_sessions; + int32_t otherwise_relogin_days; + std::unique_ptr future_auth_token; std::unique_ptr user; void readParams(NativeByteBuffer *stream, int32_t instanceNum, bool &error); diff --git a/TMessagesProj/jni/tgnet/ConnectionsManager.cpp b/TMessagesProj/jni/tgnet/ConnectionsManager.cpp index d10e4e9315..b775eef6f5 100644 --- a/TMessagesProj/jni/tgnet/ConnectionsManager.cpp +++ b/TMessagesProj/jni/tgnet/ConnectionsManager.cpp @@ -889,10 +889,12 @@ void ConnectionsManager::onConnectionDataReceived(Connection *connection, Native if (object != nullptr) { if (datacenter->isHandshaking(connection->isMediaConnection)) { + if (LOGS_ENABLED) DEBUG_E("process handshake"); datacenter->processHandshakeResponse(connection->isMediaConnection, object, messageId); } else { - processServerResponse(object, messageId, 0, 0, connection, 0, 0); - connection->addProcessedMessageId(messageId); + if (LOGS_ENABLED) DEBUG_E("connection(%p) received incorrect unencrypted message type", connection); + connection->reconnect(); + return; } lastProtocolUsefullData = true; connection->setHasUsefullData(); @@ -1764,6 +1766,13 @@ int32_t ConnectionsManager::sendRequestInternal(TLObject *object, onCompleteFunc auto request = new Request(instanceNum, lastRequestToken++, connetionType, flags, datacenterId, onComplete, onQuickAck, nullptr); request->rawRequest = object; request->rpcRequest = wrapInLayer(object, getDatacenterWithId(datacenterId), request); + auto cancelledIterator = tokensToBeCancelled.find(request->requestToken); + if (cancelledIterator != tokensToBeCancelled.end()) { + if (LOGS_ENABLED) DEBUG_D("(3) request is cancelled before sending, token %d", request->requestToken); + tokensToBeCancelled.erase(cancelledIterator); + delete request; + return request->requestToken; + } requestsQueue.push_back(std::unique_ptr(request)); if (immediate) { processRequestQueue(0, 0); @@ -1789,6 +1798,12 @@ int32_t ConnectionsManager::sendRequest(TLObject *object, onCompleteFunc onCompl auto request = new Request(instanceNum, requestToken, connetionType, flags, datacenterId, onComplete, onQuickAck, nullptr); request->rawRequest = object; request->rpcRequest = wrapInLayer(object, getDatacenterWithId(datacenterId), request); + auto cancelledIterator = tokensToBeCancelled.find(request->requestToken); + if (cancelledIterator != tokensToBeCancelled.end()) { + if (LOGS_ENABLED) DEBUG_D("(1) request is cancelled before sending, token %d", requestToken); + tokensToBeCancelled.erase(cancelledIterator); + delete request; + } requestsQueue.push_back(std::unique_ptr(request)); if (immediate) { processRequestQueue(0, 0); @@ -1808,14 +1823,17 @@ void ConnectionsManager::sendRequest(TLObject *object, onCompleteFunc onComplete exit(1); } if (ptr1 != nullptr) { + DEBUG_DELREF("connectionsmanager ptr1"); env->DeleteGlobalRef(ptr1); ptr1 = nullptr; } if (ptr2 != nullptr) { + DEBUG_DELREF("connectionsmanager ptr2"); env->DeleteGlobalRef(ptr2); ptr2 = nullptr; } if (ptr3 != nullptr) { + DEBUG_DELREF("connectionsmanager ptr3"); env->DeleteGlobalRef(ptr3); ptr3 = nullptr; } @@ -1830,6 +1848,13 @@ void ConnectionsManager::sendRequest(TLObject *object, onCompleteFunc onComplete request->ptr3 = ptr3; request->rpcRequest = wrapInLayer(object, getDatacenterWithId(datacenterId), request); if (LOGS_ENABLED) DEBUG_D("send request wrapped %p - %s", request->rpcRequest.get(), typeid(*(request->rpcRequest.get())).name()); + auto cancelledIterator = tokensToBeCancelled.find(request->requestToken); + if (cancelledIterator != tokensToBeCancelled.end()) { + if (LOGS_ENABLED) DEBUG_D("(2) request is cancelled before sending, token %d", requestToken); + tokensToBeCancelled.erase(cancelledIterator); + delete request; + return; + } requestsQueue.push_back(std::unique_ptr(request)); if (immediate) { processRequestQueue(0, 0); @@ -1924,6 +1949,10 @@ void ConnectionsManager::removeRequestFromGuid(int32_t requestToken) { } bool ConnectionsManager::cancelRequestInternal(int32_t token, int64_t messageId, bool notifyServer, bool removeFromClass) { + if (!tokensToBeCancelled.empty() && (connectionState != ConnectionStateWaitingForNetwork || tokensToBeCancelled.size() > 5000)) { + tokensToBeCancelled.clear(); + } + for (auto iter = requestsQueue.begin(); iter != requestsQueue.end(); iter++) { Request *request = iter->get(); if ((token != 0 && request->requestToken == token) || (messageId != 0 && request->respondsToMessageId(messageId))) { @@ -1954,6 +1983,12 @@ bool ConnectionsManager::cancelRequestInternal(int32_t token, int64_t messageId, return true; } } + + if (token != 0 && connectionState == ConnectionStateWaitingForNetwork) { + if (LOGS_ENABLED) DEBUG_D("request is tried to be cancelled, but it does not even exist, token %d", token); + tokensToBeCancelled.insert(token); + } + return false; } @@ -2829,6 +2864,16 @@ std::unique_ptr ConnectionsManager::wrapInLayer(TLObject *object, Data objectValue->key = "tz_offset"; objectValue->value = std::unique_ptr(jsonNumber); + if (currentPerformanceClass != -1) { + objectValue = new TL_jsonObjectValue(); + jsonObject->value.push_back(std::unique_ptr(objectValue)); + + auto jsonNumber = new TL_jsonNumber(); + jsonNumber->value = currentPerformanceClass + 1; + objectValue->key = "perf_cat"; + objectValue->value = std::unique_ptr(jsonNumber); + } + request->flags |= 2; if (!proxyAddress.empty() && !proxySecret.empty()) { @@ -2985,12 +3030,14 @@ void ConnectionsManager::updateDcSettings(uint32_t dcNum, bool workaround) { std::vector addressesIpv4Download; std::vector addressesIpv6Download; bool isCdn = false; + bool forceTryIpV6; void addAddressAndPort(TL_dcOption *dcOption) { std::vector *addresses; if (!isCdn) { isCdn = dcOption->cdn; } + forceTryIpV6 = dcOption->force_try_ipv6; if (dcOption->media_only) { if (dcOption->ipv6) { addresses = &addressesIpv6Download; @@ -3013,7 +3060,7 @@ void ConnectionsManager::updateDcSettings(uint32_t dcNum, bool workaround) { if (dcOption->secret != nullptr) { secret = std::string((const char *) dcOption->secret->bytes, dcOption->secret->length); } - if (LOGS_ENABLED) DEBUG_D("getConfig add %s:%d to dc%d, flags %d, has_secret = %d[%d], try_this_port_only = %d", dcOption->ip_address.c_str(), dcOption->port, dcOption->id, dcOption->flags, dcOption->secret != nullptr ? 1 : 0, dcOption->secret != nullptr ? dcOption->secret->length : 0, dcOption->thisPortOnly ? 1 : 0); + if (LOGS_ENABLED) DEBUG_D("getConfig add %s:%d to dc%d, flags %d, has_secret = %d[%d], try_this_port_only = %d, force_try_ipv6 = %d", dcOption->ip_address.c_str(), dcOption->port, dcOption->id, dcOption->flags, dcOption->secret != nullptr ? 1 : 0, dcOption->secret != nullptr ? dcOption->secret->length : 0, dcOption->thisPortOnly ? 1 : 0, dcOption->force_try_ipv6 ? 1 : 0); if (dcOption->thisPortOnly) { addresses->insert(addresses->begin(), TcpAddress(dcOption->ip_address, dcOption->port, dcOption->flags, secret)); } else { @@ -3274,7 +3321,7 @@ void ConnectionsManager::applyDnsConfig(NativeByteBuffer *buffer, std::string ph }); } -void ConnectionsManager::init(uint32_t version, int32_t layer, int32_t apiId, std::string deviceModel, std::string systemVersion, std::string appVersion, std::string langCode, std::string systemLangCode, std::string configPath, std::string logPath, std::string regId, std::string cFingerpting, std::string installerId, std::string packageId, int32_t timezoneOffset, int64_t userId, bool isPaused, bool enablePushConnection, bool hasNetwork, int32_t networkType) { +void ConnectionsManager::init(uint32_t version, int32_t layer, int32_t apiId, std::string deviceModel, std::string systemVersion, std::string appVersion, std::string langCode, std::string systemLangCode, std::string configPath, std::string logPath, std::string regId, std::string cFingerpting, std::string installerId, std::string packageId, int32_t timezoneOffset, int64_t userId, bool isPaused, bool enablePushConnection, bool hasNetwork, int32_t networkType, int32_t performanceClass) { currentVersion = version; currentLayer = layer; currentApiId = apiId; @@ -3294,6 +3341,7 @@ void ConnectionsManager::init(uint32_t version, int32_t layer, int32_t apiId, st pushConnectionEnabled = enablePushConnection; currentNetworkType = networkType; networkAvailable = hasNetwork; + currentPerformanceClass = performanceClass; if (isPaused) { lastPauseTime = getCurrentTimeMonotonicMillis(); } @@ -3562,6 +3610,7 @@ void ConnectionsManager::useJavaVM(JavaVM *vm, bool useJavaByteBuffers) { if (LOGS_ENABLED) DEBUG_E("can't get jnienv"); exit(1); } + DEBUG_REF("connectionsmanager byte buffer"); jclass_ByteBuffer = (jclass) env->NewGlobalRef(env->FindClass("java/nio/ByteBuffer")); if (jclass_ByteBuffer == nullptr) { if (LOGS_ENABLED) DEBUG_E("can't find java ByteBuffer class"); diff --git a/TMessagesProj/jni/tgnet/ConnectionsManager.h b/TMessagesProj/jni/tgnet/ConnectionsManager.h index c68886d350..74f21ba3d5 100644 --- a/TMessagesProj/jni/tgnet/ConnectionsManager.h +++ b/TMessagesProj/jni/tgnet/ConnectionsManager.h @@ -15,6 +15,7 @@ #include #include #include +#include #include "Defines.h" #ifdef ANDROID @@ -63,7 +64,7 @@ class ConnectionsManager { void pauseNetwork(); void setNetworkAvailable(bool value, int32_t type, bool slow); void setIpStrategy(uint8_t value); - void init(uint32_t version, int32_t layer, int32_t apiId, std::string deviceModel, std::string systemVersion, std::string appVersion, std::string langCode, std::string systemLangCode, std::string configPath, std::string logPath, std::string regId, std::string cFingerprint, std::string installerId, std::string packageId, int32_t timezoneOffset, int64_t userId, bool isPaused, bool enablePushConnection, bool hasNetwork, int32_t networkType); + void init(uint32_t version, int32_t layer, int32_t apiId, std::string deviceModel, std::string systemVersion, std::string appVersion, std::string langCode, std::string systemLangCode, std::string configPath, std::string logPath, std::string regId, std::string cFingerprint, std::string installerId, std::string packageId, int32_t timezoneOffset, int64_t userId, bool isPaused, bool enablePushConnection, bool hasNetwork, int32_t networkType, int32_t performanceClass); void setProxySettings(std::string address, uint16_t port, std::string username, std::string password, std::string secret); void setLangCode(std::string langCode); void setRegId(std::string regId); @@ -205,6 +206,7 @@ class ConnectionsManager { requestsList requestsQueue; requestsList runningRequests; std::vector requestingSaltsForDc; + std::unordered_set tokensToBeCancelled; int32_t lastPingId = 0; int64_t lastInvokeAfterMessageId = 0; @@ -227,6 +229,7 @@ class ConnectionsManager { int64_t currentUserId = 0; bool registeredForInternalPush = false; bool pushConnectionEnabled = true; + int32_t currentPerformanceClass = -1; std::map>> genericMessagesToDatacenters; std::map>> genericMediaMessagesToDatacenters; diff --git a/TMessagesProj/jni/tgnet/Defines.h b/TMessagesProj/jni/tgnet/Defines.h index 36244b50b2..3308849967 100644 --- a/TMessagesProj/jni/tgnet/Defines.h +++ b/TMessagesProj/jni/tgnet/Defines.h @@ -27,7 +27,7 @@ #define DOWNLOAD_CONNECTIONS_COUNT 2 #define UPLOAD_CONNECTIONS_COUNT 4 #define CONNECTION_BACKGROUND_KEEP_TIME 10000 -#define MAX_ACCOUNT_COUNT 16 +//#define MAX_ACCOUNT_COUNT 16 #define USE_DELEGATE_HOST_RESOLVE #define USE_IPV4_ONLY 0 diff --git a/TMessagesProj/jni/tgnet/FileLog.cpp b/TMessagesProj/jni/tgnet/FileLog.cpp index 8f9fc22aec..d81cfb49ee 100644 --- a/TMessagesProj/jni/tgnet/FileLog.cpp +++ b/TMessagesProj/jni/tgnet/FileLog.cpp @@ -22,6 +22,8 @@ bool LOGS_ENABLED = true; bool LOGS_ENABLED = false; #endif +bool REF_LOGS_ENABLED = false; + FileLog &FileLog::getInstance() { static FileLog instance; return instance; @@ -166,3 +168,43 @@ void FileLog::d(const char *message, ...) { va_end(argptr); } + +static int refsCount = 0; + +void FileLog::ref(const char *message, ...) { + if (!REF_LOGS_ENABLED) { + return; + } + va_list argptr; + va_start(argptr, message); + time_t t = time(0); + struct tm *now = localtime(&t); + refsCount++; +#ifdef ANDROID + std::ostringstream s; + s << refsCount << " refs (+ref): " << message; + __android_log_vprint(ANDROID_LOG_VERBOSE, "tgnetREF", s.str().c_str(), argptr); + va_end(argptr); + va_start(argptr, message); +#endif + va_end(argptr); +} + +void FileLog::delref(const char *message, ...) { + if (!REF_LOGS_ENABLED) { + return; + } + va_list argptr; + va_start(argptr, message); + time_t t = time(0); + struct tm *now = localtime(&t); + refsCount--; +#ifdef ANDROID + std::ostringstream s; + s << refsCount << " refs (-ref): " << message; + __android_log_vprint(ANDROID_LOG_VERBOSE, "tgnetREF", s.str().c_str(), argptr); + va_end(argptr); + va_start(argptr, message); +#endif + va_end(argptr); +} \ No newline at end of file diff --git a/TMessagesProj/jni/tgnet/FileLog.h b/TMessagesProj/jni/tgnet/FileLog.h index 79945e72da..0569d6a0bc 100644 --- a/TMessagesProj/jni/tgnet/FileLog.h +++ b/TMessagesProj/jni/tgnet/FileLog.h @@ -19,6 +19,8 @@ class FileLog { static void e(const char *message, ...) __attribute__((format (printf, 1, 2))); static void w(const char *message, ...) __attribute__((format (printf, 1, 2))); static void d(const char *message, ...) __attribute__((format (printf, 1, 2))); + static void ref(const char *message, ...) __attribute__((format (printf, 1, 2))); + static void delref(const char *message, ...) __attribute__((format (printf, 1, 2))); static FileLog &getInstance(); @@ -34,4 +36,7 @@ extern bool LOGS_ENABLED; #define DEBUG_W FileLog::getInstance().w #define DEBUG_D FileLog::getInstance().d +#define DEBUG_REF FileLog::getInstance().ref +#define DEBUG_DELREF FileLog::getInstance().delref + #endif diff --git a/TMessagesProj/jni/tgnet/NativeByteBuffer.cpp b/TMessagesProj/jni/tgnet/NativeByteBuffer.cpp index afbc4cbbe3..b7e1d69606 100644 --- a/TMessagesProj/jni/tgnet/NativeByteBuffer.cpp +++ b/TMessagesProj/jni/tgnet/NativeByteBuffer.cpp @@ -14,6 +14,8 @@ #include "ConnectionsManager.h" #include "BuffersStorage.h" +static int buffersCount = 0; + NativeByteBuffer::NativeByteBuffer(uint32_t size) { #ifdef ANDROID if (jclass_ByteBuffer != nullptr) { @@ -27,6 +29,7 @@ NativeByteBuffer::NativeByteBuffer(uint32_t size) { if (LOGS_ENABLED) DEBUG_E("can't create javaByteBuffer"); exit(1); } + DEBUG_REF("nativebytebuffer"); jobject globalRef = env->NewGlobalRef(javaByteBuffer); env->DeleteLocalRef(javaByteBuffer); javaByteBuffer = globalRef; @@ -43,6 +46,7 @@ NativeByteBuffer::NativeByteBuffer(uint32_t size) { if (LOGS_ENABLED) DEBUG_E("can't allocate NativeByteBuffer buffer"); exit(1); } + _limit = _capacity = size; } @@ -64,6 +68,7 @@ NativeByteBuffer::~NativeByteBuffer() { if (LOGS_ENABLED) DEBUG_E("can't get jnienv"); exit(1); } + DEBUG_DELREF("nativebytebuffer"); env->DeleteGlobalRef(javaByteBuffer); javaByteBuffer = nullptr; } @@ -694,6 +699,7 @@ jobject NativeByteBuffer::getJavaByteBuffer() { if (LOGS_ENABLED) DEBUG_E("can't allocate NativeByteBuffer buffer"); exit(1); } + DEBUG_REF("nativebytebuffer"); jobject globalRef = env->NewGlobalRef(javaByteBuffer); env->DeleteLocalRef(javaByteBuffer); javaByteBuffer = globalRef; diff --git a/TMessagesProj/jni/tgnet/ProxyCheckInfo.cpp b/TMessagesProj/jni/tgnet/ProxyCheckInfo.cpp index c6835b9708..73c84c55d3 100644 --- a/TMessagesProj/jni/tgnet/ProxyCheckInfo.cpp +++ b/TMessagesProj/jni/tgnet/ProxyCheckInfo.cpp @@ -8,10 +8,12 @@ #include "ProxyCheckInfo.h" #include "ConnectionsManager.h" +#include "FileLog.h" ProxyCheckInfo::~ProxyCheckInfo() { #ifdef ANDROID if (ptr1 != nullptr) { + DEBUG_DELREF("tgnet (2) request ptr1"); jniEnv[instanceNum]->DeleteGlobalRef(ptr1); ptr1 = nullptr; } diff --git a/TMessagesProj/jni/tgnet/Request.cpp b/TMessagesProj/jni/tgnet/Request.cpp index c041e253ae..14667314bc 100644 --- a/TMessagesProj/jni/tgnet/Request.cpp +++ b/TMessagesProj/jni/tgnet/Request.cpp @@ -13,6 +13,7 @@ #include "ConnectionsManager.h" #include "Datacenter.h" #include "Connection.h" +#include "FileLog.h" Request::Request(int32_t instance, int32_t token, ConnectionType type, uint32_t flags, uint32_t datacenter, onCompleteFunc completeFunc, onQuickAckFunc quickAckFunc, onWriteToSocketFunc writeToSocketFunc) { requestToken = token; @@ -29,14 +30,17 @@ Request::Request(int32_t instance, int32_t token, ConnectionType type, uint32_t Request::~Request() { #ifdef ANDROID if (ptr1 != nullptr) { + DEBUG_DELREF("tgnet request ptr1"); jniEnv[instanceNum]->DeleteGlobalRef(ptr1); ptr1 = nullptr; } if (ptr2 != nullptr) { + DEBUG_DELREF("tgnet request ptr2"); jniEnv[instanceNum]->DeleteGlobalRef(ptr2); ptr2 = nullptr; } if (ptr3 != nullptr) { + DEBUG_DELREF("tgnet request ptr3"); jniEnv[instanceNum]->DeleteGlobalRef(ptr3); ptr3 = nullptr; } diff --git a/TMessagesProj/jni/third_party/breakpad/Android.mk b/TMessagesProj/jni/third_party/breakpad/Android.mk new file mode 100644 index 0000000000..13d1589d58 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/Android.mk @@ -0,0 +1,104 @@ +# Copyright (c) 2012, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# ndk-build module definition for the Google Breakpad client library +# +# To use this file, do the following: +# +# 1/ Include this file from your own Android.mk, either directly +# or with through the NDK's import-module function. +# +# 2/ Use the client static library in your project with: +# +# LOCAL_STATIC_LIBRARIES += breakpad_client +# +# 3/ In your source code, include "src/client/linux/exception_handler.h" +# and use the Linux instructions to use it. +# +# This module works with either the STLport or GNU libstdc++, but you need +# to select one in your Application.mk +# + +# The top Google Breakpad directory. +# We assume this Android.mk to be under 'android/google_breakpad' + +LOCAL_PATH := $(call my-dir)/ + +# Defube the client library module, as a simple static library that +# exports the right include path / linker flags to its users. + +include $(CLEAR_VARS) + +LOCAL_MODULE := breakpad_client + +LOCAL_CPP_EXTENSION := .cc + +# Breakpad uses inline ARM assembly that requires the library +# to be built in ARM mode. Otherwise, the build will fail with +# cryptic assembler messages like: +# Compile++ thumb : google_breakpad_client <= crash_generation_client.cc +# /tmp/cc8aMSoD.s: Assembler messages: +# /tmp/cc8aMSoD.s:132: Error: invalid immediate: 288 is out of range +# /tmp/cc8aMSoD.s:244: Error: invalid immediate: 296 is out of range +LOCAL_ARM_MODE := arm + +# List of client source files, directly taken from Makefile.am +LOCAL_SRC_FILES := \ + src/client/linux/crash_generation/crash_generation_client.cc \ + src/client/linux/handler/exception_handler.cc \ + src/client/linux/handler/minidump_descriptor.cc \ + src/client/linux/log/log.cc \ + src/client/linux/dump_writer_common/thread_info.cc \ + src/client/linux/dump_writer_common/seccomp_unwinder.cc \ + src/client/linux/dump_writer_common/ucontext_reader.cc \ + src/client/linux/microdump_writer/microdump_writer.cc \ + src/client/linux/minidump_writer/linux_dumper.cc \ + src/client/linux/minidump_writer/linux_ptrace_dumper.cc \ + src/client/linux/minidump_writer/minidump_writer.cc \ + src/client/minidump_file_writer.cc \ + src/common/android/breakpad_getcontext.S \ + src/common/convert_UTF.c \ + src/common/md5.cc \ + src/common/string_conversion.cc \ + src/common/linux/elfutils.cc \ + src/common/linux/file_id.cc \ + src/common/linux/guid_creator.cc \ + src/common/linux/linux_libc_support.cc \ + src/common/linux/memory_mapped_file.cc \ + src/common/linux/safe_readlink.cc + +LOCAL_C_INCLUDES := $(LOCAL_PATH)/src/common/android/include \ + $(LOCAL_PATH)/src + +LOCAL_EXPORT_C_INCLUDES := $(LOCAL_C_INCLUDES) +LOCAL_EXPORT_LDLIBS := -llog + +include $(BUILD_STATIC_LIBRARY) + +# Done. \ No newline at end of file diff --git a/TMessagesProj/jni/third_party/breakpad/src/breakpad_googletest_includes.h b/TMessagesProj/jni/third_party/breakpad/src/breakpad_googletest_includes.h new file mode 100644 index 0000000000..1cc324b232 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/breakpad_googletest_includes.h @@ -0,0 +1,57 @@ +// Copyright (c) 2009, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef BREAKPAD_GOOGLETEST_INCLUDES_H__ +#define BREAKPAD_GOOGLETEST_INCLUDES_H__ + +#include "testing/gtest/include/gtest/gtest.h" +#include "testing/include/gmock/gmock.h" + +// If AddressSanitizer is used, NULL pointer dereferences generate SIGILL +// (illegal instruction) instead of SIGSEGV (segmentation fault). Also, +// the number of memory regions differs, so there is no point in running +// this test if AddressSanitizer is used. +// +// Ideally we'd use this attribute to disable ASAN on a per-func basis, +// but this doesn't seem to actually work, and it's changed names over +// time. So just stick with disabling the actual tests. +// http://crbug.com/304575 +//#define NO_ASAN __attribute__((no_sanitize_address)) +#if defined(__clang__) && defined(__has_feature) +// Have to keep this check sep from above as newer gcc will barf on it. +# if __has_feature(address_sanitizer) +# define ADDRESS_SANITIZER +# endif +#elif defined(__GNUC__) && defined(__SANITIZE_ADDRESS__) +# define ADDRESS_SANITIZER +#else +# undef ADDRESS_SANITIZER +#endif + +#endif // BREAKPAD_GOOGLETEST_INCLUDES_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/crash_generation/client_info.h b/TMessagesProj/jni/third_party/breakpad/src/client/linux/crash_generation/client_info.h new file mode 100644 index 0000000000..d0a184a637 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/crash_generation/client_info.h @@ -0,0 +1,53 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef CLIENT_LINUX_CRASH_GENERATION_CLIENT_INFO_H_ +#define CLIENT_LINUX_CRASH_GENERATION_CLIENT_INFO_H_ + +namespace google_breakpad { + +class CrashGenerationServer; + +class ClientInfo { + public: + ClientInfo(pid_t pid, CrashGenerationServer* crash_server) + : crash_server_(crash_server), + pid_(pid) {} + + CrashGenerationServer* crash_server() const { return crash_server_; } + pid_t pid() const { return pid_; } + + private: + CrashGenerationServer* crash_server_; + pid_t pid_; +}; + +} + +#endif // CLIENT_LINUX_CRASH_GENERATION_CLIENT_INFO_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/crash_generation/crash_generation_client.cc b/TMessagesProj/jni/third_party/breakpad/src/client/linux/crash_generation/crash_generation_client.cc new file mode 100644 index 0000000000..d8bfbbad27 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/crash_generation/crash_generation_client.cc @@ -0,0 +1,105 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#include "client/linux/crash_generation/crash_generation_client.h" + +#include +#include +#include + +#include + +#include "common/linux/eintr_wrapper.h" +#include "common/linux/ignore_ret.h" +#include "third_party/lss/linux_syscall_support.h" + +namespace google_breakpad { + +namespace { + +class CrashGenerationClientImpl : public CrashGenerationClient { + public: + explicit CrashGenerationClientImpl(int server_fd) : server_fd_(server_fd) {} + virtual ~CrashGenerationClientImpl() {} + + virtual bool RequestDump(const void* blob, size_t blob_size) { + int fds[2]; + if (sys_pipe(fds) < 0) + return false; + static const unsigned kControlMsgSize = CMSG_SPACE(sizeof(int)); + + struct kernel_iovec iov; + iov.iov_base = const_cast(blob); + iov.iov_len = blob_size; + + struct kernel_msghdr msg = { 0 }; + msg.msg_iov = &iov; + msg.msg_iovlen = 1; + char cmsg[kControlMsgSize] = ""; + msg.msg_control = cmsg; + msg.msg_controllen = sizeof(cmsg); + + struct cmsghdr* hdr = CMSG_FIRSTHDR(&msg); + hdr->cmsg_level = SOL_SOCKET; + hdr->cmsg_type = SCM_RIGHTS; + hdr->cmsg_len = CMSG_LEN(sizeof(int)); + int* p = reinterpret_cast(CMSG_DATA(hdr)); + *p = fds[1]; + + ssize_t ret = HANDLE_EINTR(sys_sendmsg(server_fd_, &msg, 0)); + sys_close(fds[1]); + if (ret < 0) { + sys_close(fds[0]); + return false; + } + + // Wait for an ACK from the server. + char b; + IGNORE_RET(HANDLE_EINTR(sys_read(fds[0], &b, 1))); + sys_close(fds[0]); + + return true; + } + + private: + int server_fd_; + + DISALLOW_COPY_AND_ASSIGN(CrashGenerationClientImpl); +}; + +} // namespace + +// static +CrashGenerationClient* CrashGenerationClient::TryCreate(int server_fd) { + if (server_fd < 0) + return NULL; + return new CrashGenerationClientImpl(server_fd); +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/crash_generation/crash_generation_client.h b/TMessagesProj/jni/third_party/breakpad/src/client/linux/crash_generation/crash_generation_client.h new file mode 100644 index 0000000000..4e68424ae8 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/crash_generation/crash_generation_client.h @@ -0,0 +1,65 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef CLIENT_LINUX_CRASH_GENERATION_CRASH_GENERATION_CLIENT_H_ +#define CLIENT_LINUX_CRASH_GENERATION_CRASH_GENERATION_CLIENT_H_ + +#include "common/basictypes.h" + +#include + +namespace google_breakpad { + +// CrashGenerationClient is an interface for implementing out-of-process crash +// dumping. The default implementation, accessed via the TryCreate() factory, +// works in conjunction with the CrashGenerationServer to generate a minidump +// via a remote process. +class CrashGenerationClient { + public: + CrashGenerationClient() {} + virtual ~CrashGenerationClient() {} + + // Request the crash server to generate a dump. |blob| is an opaque + // CrashContext pointer from exception_handler.h. + // Returns true if the dump was successful; false otherwise. + virtual bool RequestDump(const void* blob, size_t blob_size) = 0; + + // Returns a new CrashGenerationClient if |server_fd| is valid and + // connects to a CrashGenerationServer. Otherwise, return NULL. + // The returned CrashGenerationClient* is owned by the caller of + // this function. + static CrashGenerationClient* TryCreate(int server_fd); + + private: + DISALLOW_COPY_AND_ASSIGN(CrashGenerationClient); +}; + +} // namespace google_breakpad + +#endif // CLIENT_LINUX_CRASH_GENERATION_CRASH_GENERATION_CLIENT_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/crash_generation/crash_generation_server.cc b/TMessagesProj/jni/third_party/breakpad/src/client/linux/crash_generation/crash_generation_server.cc new file mode 100644 index 0000000000..860e8bc9f3 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/crash_generation/crash_generation_server.cc @@ -0,0 +1,330 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include + +#include "client/linux/crash_generation/crash_generation_server.h" +#include "client/linux/crash_generation/client_info.h" +#include "client/linux/handler/exception_handler.h" +#include "client/linux/minidump_writer/minidump_writer.h" +#include "common/linux/eintr_wrapper.h" +#include "common/linux/guid_creator.h" +#include "common/linux/safe_readlink.h" + +static const char kCommandQuit = 'x'; + +namespace google_breakpad { + +CrashGenerationServer::CrashGenerationServer( + const int listen_fd, + OnClientDumpRequestCallback dump_callback, + void* dump_context, + OnClientExitingCallback exit_callback, + void* exit_context, + bool generate_dumps, + const string* dump_path) : + server_fd_(listen_fd), + dump_callback_(dump_callback), + dump_context_(dump_context), + exit_callback_(exit_callback), + exit_context_(exit_context), + generate_dumps_(generate_dumps), + started_(false) +{ + if (dump_path) + dump_dir_ = *dump_path; + else + dump_dir_ = "/tmp"; +} + +CrashGenerationServer::~CrashGenerationServer() +{ + if (started_) + Stop(); +} + +bool +CrashGenerationServer::Start() +{ + if (started_ || 0 > server_fd_) + return false; + + int control_pipe[2]; + if (pipe(control_pipe)) + return false; + + if (fcntl(control_pipe[0], F_SETFD, FD_CLOEXEC)) + return false; + if (fcntl(control_pipe[1], F_SETFD, FD_CLOEXEC)) + return false; + + if (fcntl(control_pipe[0], F_SETFL, O_NONBLOCK)) + return false; + + control_pipe_in_ = control_pipe[0]; + control_pipe_out_ = control_pipe[1]; + + if (pthread_create(&thread_, NULL, + ThreadMain, reinterpret_cast(this))) + return false; + + started_ = true; + return true; +} + +void +CrashGenerationServer::Stop() +{ + assert(pthread_self() != thread_); + + if (!started_) + return; + + HANDLE_EINTR(write(control_pipe_out_, &kCommandQuit, 1)); + + void* dummy; + pthread_join(thread_, &dummy); + + started_ = false; +} + +//static +bool +CrashGenerationServer::CreateReportChannel(int* server_fd, int* client_fd) +{ + int fds[2]; + + if (socketpair(AF_UNIX, SOCK_SEQPACKET, 0, fds)) + return false; + + static const int on = 1; + // Enable passcred on the server end of the socket + if (setsockopt(fds[1], SOL_SOCKET, SO_PASSCRED, &on, sizeof(on))) + return false; + + if (fcntl(fds[1], F_SETFL, O_NONBLOCK)) + return false; + if (fcntl(fds[1], F_SETFD, FD_CLOEXEC)) + return false; + + *client_fd = fds[0]; + *server_fd = fds[1]; + return true; +} + +// The following methods/functions execute on the server thread + +void +CrashGenerationServer::Run() +{ + struct pollfd pollfds[2]; + memset(&pollfds, 0, sizeof(pollfds)); + + pollfds[0].fd = server_fd_; + pollfds[0].events = POLLIN; + + pollfds[1].fd = control_pipe_in_; + pollfds[1].events = POLLIN; + + while (true) { + // infinite timeout + int nevents = poll(pollfds, sizeof(pollfds)/sizeof(pollfds[0]), -1); + if (-1 == nevents) { + if (EINTR == errno) { + continue; + } else { + return; + } + } + + if (pollfds[0].revents && !ClientEvent(pollfds[0].revents)) + return; + + if (pollfds[1].revents && !ControlEvent(pollfds[1].revents)) + return; + } +} + +bool +CrashGenerationServer::ClientEvent(short revents) +{ + if (POLLHUP & revents) + return false; + assert(POLLIN & revents); + + // A process has crashed and has signaled us by writing a datagram + // to the death signal socket. The datagram contains the crash context needed + // for writing the minidump as well as a file descriptor and a credentials + // block so that they can't lie about their pid. + + // The length of the control message: + static const unsigned kControlMsgSize = + CMSG_SPACE(sizeof(int)) + CMSG_SPACE(sizeof(struct ucred)); + // The length of the regular payload: + static const unsigned kCrashContextSize = + sizeof(google_breakpad::ExceptionHandler::CrashContext); + + struct msghdr msg = {0}; + struct iovec iov[1]; + char crash_context[kCrashContextSize]; + char control[kControlMsgSize]; + const ssize_t expected_msg_size = sizeof(crash_context); + + iov[0].iov_base = crash_context; + iov[0].iov_len = sizeof(crash_context); + msg.msg_iov = iov; + msg.msg_iovlen = sizeof(iov)/sizeof(iov[0]); + msg.msg_control = control; + msg.msg_controllen = kControlMsgSize; + + const ssize_t msg_size = HANDLE_EINTR(recvmsg(server_fd_, &msg, 0)); + if (msg_size != expected_msg_size) + return true; + + if (msg.msg_controllen != kControlMsgSize || + msg.msg_flags & ~MSG_TRUNC) + return true; + + // Walk the control payload and extract the file descriptor and validated pid. + pid_t crashing_pid = -1; + int signal_fd = -1; + for (struct cmsghdr *hdr = CMSG_FIRSTHDR(&msg); hdr; + hdr = CMSG_NXTHDR(&msg, hdr)) { + if (hdr->cmsg_level != SOL_SOCKET) + continue; + if (hdr->cmsg_type == SCM_RIGHTS) { + const unsigned len = hdr->cmsg_len - + (((uint8_t*)CMSG_DATA(hdr)) - (uint8_t*)hdr); + assert(len % sizeof(int) == 0u); + const unsigned num_fds = len / sizeof(int); + if (num_fds > 1 || num_fds == 0) { + // A nasty process could try and send us too many descriptors and + // force a leak. + for (unsigned i = 0; i < num_fds; ++i) + close(reinterpret_cast(CMSG_DATA(hdr))[i]); + return true; + } else { + signal_fd = reinterpret_cast(CMSG_DATA(hdr))[0]; + } + } else if (hdr->cmsg_type == SCM_CREDENTIALS) { + const struct ucred *cred = + reinterpret_cast(CMSG_DATA(hdr)); + crashing_pid = cred->pid; + } + } + + if (crashing_pid == -1 || signal_fd == -1) { + if (signal_fd) + close(signal_fd); + return true; + } + + string minidump_filename; + if (!MakeMinidumpFilename(minidump_filename)) + return true; + + if (!google_breakpad::WriteMinidump(minidump_filename.c_str(), + crashing_pid, crash_context, + kCrashContextSize)) { + close(signal_fd); + return true; + } + + if (dump_callback_) { + ClientInfo info(crashing_pid, this); + + dump_callback_(dump_context_, &info, &minidump_filename); + } + + // Send the done signal to the process: it can exit now. + // (Closing this will make the child's sys_read unblock and return 0.) + close(signal_fd); + + return true; +} + +bool +CrashGenerationServer::ControlEvent(short revents) +{ + if (POLLHUP & revents) + return false; + assert(POLLIN & revents); + + char command; + if (read(control_pipe_in_, &command, 1)) + return false; + + switch (command) { + case kCommandQuit: + return false; + default: + assert(0); + } + + return true; +} + +bool +CrashGenerationServer::MakeMinidumpFilename(string& outFilename) +{ + GUID guid; + char guidString[kGUIDStringLength+1]; + + if (!(CreateGUID(&guid) + && GUIDToString(&guid, guidString, sizeof(guidString)))) + return false; + + char path[PATH_MAX]; + snprintf(path, sizeof(path), "%s/%s.dmp", dump_dir_.c_str(), guidString); + + outFilename = path; + return true; +} + +// static +void* +CrashGenerationServer::ThreadMain(void *arg) +{ + reinterpret_cast(arg)->Run(); + return NULL; +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/crash_generation/crash_generation_server.h b/TMessagesProj/jni/third_party/breakpad/src/client/linux/crash_generation/crash_generation_server.h new file mode 100644 index 0000000000..483fb709bf --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/crash_generation/crash_generation_server.h @@ -0,0 +1,135 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef CLIENT_LINUX_CRASH_GENERATION_CRASH_GENERATION_SERVER_H_ +#define CLIENT_LINUX_CRASH_GENERATION_CRASH_GENERATION_SERVER_H_ + +#include + +#include + +#include "common/using_std_string.h" + +namespace google_breakpad { + +class ClientInfo; + +class CrashGenerationServer { +public: + // WARNING: callbacks may be invoked on a different thread + // than that which creates the CrashGenerationServer. They must + // be thread safe. + typedef void (*OnClientDumpRequestCallback)(void* context, + const ClientInfo* client_info, + const string* file_path); + + typedef void (*OnClientExitingCallback)(void* context, + const ClientInfo* client_info); + + // Create an instance with the given parameters. + // + // Parameter listen_fd: The server fd created by CreateReportChannel(). + // Parameter dump_callback: Callback for a client crash dump request. + // Parameter dump_context: Context for client crash dump request callback. + // Parameter exit_callback: Callback for client process exit. + // Parameter exit_context: Context for client exit callback. + // Parameter generate_dumps: Whether to automatically generate dumps. + // Client code of this class might want to generate dumps explicitly + // in the crash dump request callback. In that case, false can be + // passed for this parameter. + // Parameter dump_path: Path for generating dumps; required only if true is + // passed for generateDumps parameter; NULL can be passed otherwise. + CrashGenerationServer(const int listen_fd, + OnClientDumpRequestCallback dump_callback, + void* dump_context, + OnClientExitingCallback exit_callback, + void* exit_context, + bool generate_dumps, + const string* dump_path); + + ~CrashGenerationServer(); + + // Perform initialization steps needed to start listening to clients. + // + // Return true if initialization is successful; false otherwise. + bool Start(); + + // Stop the server. + void Stop(); + + // Create a "channel" that can be used by clients to report crashes + // to a CrashGenerationServer. |*server_fd| should be passed to + // this class's constructor, and |*client_fd| should be passed to + // the ExceptionHandler constructor in the client process. + static bool CreateReportChannel(int* server_fd, int* client_fd); + +private: + // Run the server's event loop + void Run(); + + // Invoked when an child process (client) event occurs + // Returning true => "keep running", false => "exit loop" + bool ClientEvent(short revents); + + // Invoked when the controlling thread (main) event occurs + // Returning true => "keep running", false => "exit loop" + bool ControlEvent(short revents); + + // Return a unique filename at which a minidump can be written + bool MakeMinidumpFilename(string& outFilename); + + // Trampoline to |Run()| + static void* ThreadMain(void* arg); + + int server_fd_; + + OnClientDumpRequestCallback dump_callback_; + void* dump_context_; + + OnClientExitingCallback exit_callback_; + void* exit_context_; + + bool generate_dumps_; + + string dump_dir_; + + bool started_; + + pthread_t thread_; + int control_pipe_in_; + int control_pipe_out_; + + // disable these + CrashGenerationServer(const CrashGenerationServer&); + CrashGenerationServer& operator=(const CrashGenerationServer&); +}; + +} // namespace google_breakpad + +#endif // CLIENT_LINUX_CRASH_GENERATION_CRASH_GENERATION_SERVER_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/data/linux-gate-amd.sym b/TMessagesProj/jni/third_party/breakpad/src/client/linux/data/linux-gate-amd.sym new file mode 100644 index 0000000000..e042a5ec42 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/data/linux-gate-amd.sym @@ -0,0 +1,3 @@ +MODULE Linux x86 B8CFDE93002D54DA1900A40AA1BD67690 linux-gate.so +PUBLIC 400 0 __kernel_vsyscall +STACK WIN 4 400 100 1 1 0 0 0 0 0 1 diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/data/linux-gate-intel.sym b/TMessagesProj/jni/third_party/breakpad/src/client/linux/data/linux-gate-intel.sym new file mode 100644 index 0000000000..c209c23756 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/data/linux-gate-intel.sym @@ -0,0 +1,3 @@ +MODULE Linux x86 4FBDA58B5A1DF5A379E3CF19A235EA090 linux-gate.so +PUBLIC 400 0 __kernel_vsyscall +STACK WIN 4 400 200 3 3 0 0 0 0 0 1 \ No newline at end of file diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/dump_writer_common/mapping_info.h b/TMessagesProj/jni/third_party/breakpad/src/client/linux/dump_writer_common/mapping_info.h new file mode 100644 index 0000000000..5f247cfd4e --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/dump_writer_common/mapping_info.h @@ -0,0 +1,61 @@ +// Copyright (c) 2014, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef CLIENT_LINUX_DUMP_WRITER_COMMON_MAPPING_INFO_H_ +#define CLIENT_LINUX_DUMP_WRITER_COMMON_MAPPING_INFO_H_ + +#include +#include +#include + +#include "google_breakpad/common/minidump_format.h" + +namespace google_breakpad { + +// One of these is produced for each mapping in the process (i.e. line in +// /proc/$x/maps). +struct MappingInfo { + uintptr_t start_addr; + size_t size; + size_t offset; // offset into the backed file. + bool exec; // true if the mapping has the execute bit set. + char name[NAME_MAX]; +}; + +struct MappingEntry { + MappingInfo first; + uint8_t second[sizeof(MDGUID)]; +}; + +// A list of +typedef std::list MappingList; + +} // namespace google_breakpad + +#endif // CLIENT_LINUX_DUMP_WRITER_COMMON_MAPPING_INFO_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/dump_writer_common/raw_context_cpu.h b/TMessagesProj/jni/third_party/breakpad/src/client/linux/dump_writer_common/raw_context_cpu.h new file mode 100644 index 0000000000..e2ef45df5d --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/dump_writer_common/raw_context_cpu.h @@ -0,0 +1,53 @@ +// Copyright (c) 2014, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef CLIENT_LINUX_DUMP_WRITER_COMMON_RAW_CONTEXT_CPU_H +#define CLIENT_LINUX_DUMP_WRITER_COMMON_RAW_CONTEXT_CPU_H + +#include "google_breakpad/common/minidump_format.h" + +namespace google_breakpad { + +#if defined(__i386__) +typedef MDRawContextX86 RawContextCPU; +#elif defined(__x86_64) +typedef MDRawContextAMD64 RawContextCPU; +#elif defined(__ARM_EABI__) +typedef MDRawContextARM RawContextCPU; +#elif defined(__aarch64__) +typedef MDRawContextARM64 RawContextCPU; +#elif defined(__mips__) +typedef MDRawContextMIPS RawContextCPU; +#else +#error "This code has not been ported to your platform yet." +#endif + +} // namespace google_breakpad + +#endif // CLIENT_LINUX_DUMP_WRITER_COMMON_RAW_CONTEXT_CPU_H diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/dump_writer_common/seccomp_unwinder.cc b/TMessagesProj/jni/third_party/breakpad/src/client/linux/dump_writer_common/seccomp_unwinder.cc new file mode 100644 index 0000000000..e9d1ec7801 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/dump_writer_common/seccomp_unwinder.cc @@ -0,0 +1,154 @@ +// Copyright (c) 2014, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#include "client/linux/dump_writer_common/seccomp_unwinder.h" + +#include + +#include "google_breakpad/common/minidump_format.h" +#include "common/linux/linux_libc_support.h" + +namespace google_breakpad { + +void SeccompUnwinder::PopSeccompStackFrame(RawContextCPU* cpu, + const MDRawThread& thread, + uint8_t* stack_copy) { +#if defined(__x86_64) + uint64_t bp = cpu->rbp; + uint64_t top = thread.stack.start_of_memory_range; + for (int i = 4; i--; ) { + if (bp < top || + bp > thread.stack.start_of_memory_range + + thread.stack.memory.data_size - sizeof(bp) || + bp & 1) { + break; + } + uint64_t old_top = top; + top = bp; + uint8_t* bp_addr = stack_copy + bp - thread.stack.start_of_memory_range; + my_memcpy(&bp, bp_addr, sizeof(bp)); + if (bp == 0xDEADBEEFDEADBEEFull) { + struct { + uint64_t r15; + uint64_t r14; + uint64_t r13; + uint64_t r12; + uint64_t r11; + uint64_t r10; + uint64_t r9; + uint64_t r8; + uint64_t rdi; + uint64_t rsi; + uint64_t rdx; + uint64_t rcx; + uint64_t rbx; + uint64_t deadbeef; + uint64_t rbp; + uint64_t fakeret; + uint64_t ret; + /* char redzone[128]; */ + } seccomp_stackframe; + if (top - offsetof(__typeof__(seccomp_stackframe), deadbeef) < old_top || + top - offsetof(__typeof__(seccomp_stackframe), deadbeef) + + sizeof(seccomp_stackframe) > + thread.stack.start_of_memory_range+thread.stack.memory.data_size) { + break; + } + my_memcpy(&seccomp_stackframe, + bp_addr - offsetof(__typeof__(seccomp_stackframe), deadbeef), + sizeof(seccomp_stackframe)); + cpu->rbx = seccomp_stackframe.rbx; + cpu->rcx = seccomp_stackframe.rcx; + cpu->rdx = seccomp_stackframe.rdx; + cpu->rsi = seccomp_stackframe.rsi; + cpu->rdi = seccomp_stackframe.rdi; + cpu->rbp = seccomp_stackframe.rbp; + cpu->rsp = top + 4*sizeof(uint64_t) + 128; + cpu->r8 = seccomp_stackframe.r8; + cpu->r9 = seccomp_stackframe.r9; + cpu->r10 = seccomp_stackframe.r10; + cpu->r11 = seccomp_stackframe.r11; + cpu->r12 = seccomp_stackframe.r12; + cpu->r13 = seccomp_stackframe.r13; + cpu->r14 = seccomp_stackframe.r14; + cpu->r15 = seccomp_stackframe.r15; + cpu->rip = seccomp_stackframe.fakeret; + return; + } + } +#elif defined(__i386__) + uint32_t bp = cpu->ebp; + uint32_t top = thread.stack.start_of_memory_range; + for (int i = 4; i--; ) { + if (bp < top || + bp > thread.stack.start_of_memory_range + + thread.stack.memory.data_size - sizeof(bp) || + bp & 1) { + break; + } + uint32_t old_top = top; + top = bp; + uint8_t* bp_addr = stack_copy + bp - thread.stack.start_of_memory_range; + my_memcpy(&bp, bp_addr, sizeof(bp)); + if (bp == 0xDEADBEEFu) { + struct { + uint32_t edi; + uint32_t esi; + uint32_t edx; + uint32_t ecx; + uint32_t ebx; + uint32_t deadbeef; + uint32_t ebp; + uint32_t fakeret; + uint32_t ret; + } seccomp_stackframe; + if (top - offsetof(__typeof__(seccomp_stackframe), deadbeef) < old_top || + top - offsetof(__typeof__(seccomp_stackframe), deadbeef) + + sizeof(seccomp_stackframe) > + thread.stack.start_of_memory_range+thread.stack.memory.data_size) { + break; + } + my_memcpy(&seccomp_stackframe, + bp_addr - offsetof(__typeof__(seccomp_stackframe), deadbeef), + sizeof(seccomp_stackframe)); + cpu->ebx = seccomp_stackframe.ebx; + cpu->ecx = seccomp_stackframe.ecx; + cpu->edx = seccomp_stackframe.edx; + cpu->esi = seccomp_stackframe.esi; + cpu->edi = seccomp_stackframe.edi; + cpu->ebp = seccomp_stackframe.ebp; + cpu->esp = top + 4*sizeof(void*); + cpu->eip = seccomp_stackframe.fakeret; + return; + } + } +#endif +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/dump_writer_common/seccomp_unwinder.h b/TMessagesProj/jni/third_party/breakpad/src/client/linux/dump_writer_common/seccomp_unwinder.h new file mode 100644 index 0000000000..0f5637b69c --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/dump_writer_common/seccomp_unwinder.h @@ -0,0 +1,50 @@ +// Copyright (c) 2014, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef CLIENT_LINUX_DUMP_WRITER_COMMON_SECCOMP_UNWINDER_H +#define CLIENT_LINUX_DUMP_WRITER_COMMON_SECCOMP_UNWINDER_H + +#include "client/linux/dump_writer_common/raw_context_cpu.h" +#include "google_breakpad/common/minidump_format.h" + +namespace google_breakpad { + +struct SeccompUnwinder { + + // Check if the top of the stack is part of a system call that has been + // redirected by the seccomp sandbox. If so, try to pop the stack frames + // all the way back to the point where the interception happened. + static void PopSeccompStackFrame(RawContextCPU* cpu, + const MDRawThread& thread, + uint8_t* stack_copy); +}; + +} // namespace google_breakpad + +#endif // CLIENT_LINUX_DUMP_WRITER_COMMON_SECCOMP_UNWINDER_H diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/dump_writer_common/thread_info.cc b/TMessagesProj/jni/third_party/breakpad/src/client/linux/dump_writer_common/thread_info.cc new file mode 100644 index 0000000000..9956d4450b --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/dump_writer_common/thread_info.cc @@ -0,0 +1,299 @@ +// Copyright (c) 2014, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#include "client/linux/dump_writer_common/thread_info.h" + +#include +#include + +#include "common/linux/linux_libc_support.h" +#include "google_breakpad/common/minidump_format.h" + +namespace { + +#if defined(__i386__) +// Write a uint16_t to memory +// out: memory location to write to +// v: value to write. +void U16(void* out, uint16_t v) { + my_memcpy(out, &v, sizeof(v)); +} + +// Write a uint32_t to memory +// out: memory location to write to +// v: value to write. +void U32(void* out, uint32_t v) { + my_memcpy(out, &v, sizeof(v)); +} +#endif + +} + +namespace google_breakpad { + +#if defined(__i386__) + +uintptr_t ThreadInfo::GetInstructionPointer() const { + return regs.eip; +} + +void ThreadInfo::FillCPUContext(RawContextCPU* out) const { + out->context_flags = MD_CONTEXT_X86_ALL; + + out->dr0 = dregs[0]; + out->dr1 = dregs[1]; + out->dr2 = dregs[2]; + out->dr3 = dregs[3]; + // 4 and 5 deliberatly omitted because they aren't included in the minidump + // format. + out->dr6 = dregs[6]; + out->dr7 = dregs[7]; + + out->gs = regs.xgs; + out->fs = regs.xfs; + out->es = regs.xes; + out->ds = regs.xds; + + out->edi = regs.edi; + out->esi = regs.esi; + out->ebx = regs.ebx; + out->edx = regs.edx; + out->ecx = regs.ecx; + out->eax = regs.eax; + + out->ebp = regs.ebp; + out->eip = regs.eip; + out->cs = regs.xcs; + out->eflags = regs.eflags; + out->esp = regs.esp; + out->ss = regs.xss; + + out->float_save.control_word = fpregs.cwd; + out->float_save.status_word = fpregs.swd; + out->float_save.tag_word = fpregs.twd; + out->float_save.error_offset = fpregs.fip; + out->float_save.error_selector = fpregs.fcs; + out->float_save.data_offset = fpregs.foo; + out->float_save.data_selector = fpregs.fos; + + // 8 registers * 10 bytes per register. + my_memcpy(out->float_save.register_area, fpregs.st_space, 10 * 8); + + // This matches the Intel fpsave format. + U16(out->extended_registers + 0, fpregs.cwd); + U16(out->extended_registers + 2, fpregs.swd); + U16(out->extended_registers + 4, fpregs.twd); + U16(out->extended_registers + 6, fpxregs.fop); + U32(out->extended_registers + 8, fpxregs.fip); + U16(out->extended_registers + 12, fpxregs.fcs); + U32(out->extended_registers + 16, fpregs.foo); + U16(out->extended_registers + 20, fpregs.fos); + U32(out->extended_registers + 24, fpxregs.mxcsr); + + my_memcpy(out->extended_registers + 32, &fpxregs.st_space, 128); + my_memcpy(out->extended_registers + 160, &fpxregs.xmm_space, 128); +} + +#elif defined(__x86_64) + +uintptr_t ThreadInfo::GetInstructionPointer() const { + return regs.rip; +} + +void ThreadInfo::FillCPUContext(RawContextCPU* out) const { + out->context_flags = MD_CONTEXT_AMD64_FULL | + MD_CONTEXT_AMD64_SEGMENTS; + + out->cs = regs.cs; + + out->ds = regs.ds; + out->es = regs.es; + out->fs = regs.fs; + out->gs = regs.gs; + + out->ss = regs.ss; + out->eflags = regs.eflags; + + out->dr0 = dregs[0]; + out->dr1 = dregs[1]; + out->dr2 = dregs[2]; + out->dr3 = dregs[3]; + // 4 and 5 deliberatly omitted because they aren't included in the minidump + // format. + out->dr6 = dregs[6]; + out->dr7 = dregs[7]; + + out->rax = regs.rax; + out->rcx = regs.rcx; + out->rdx = regs.rdx; + out->rbx = regs.rbx; + + out->rsp = regs.rsp; + + out->rbp = regs.rbp; + out->rsi = regs.rsi; + out->rdi = regs.rdi; + out->r8 = regs.r8; + out->r9 = regs.r9; + out->r10 = regs.r10; + out->r11 = regs.r11; + out->r12 = regs.r12; + out->r13 = regs.r13; + out->r14 = regs.r14; + out->r15 = regs.r15; + + out->rip = regs.rip; + + out->flt_save.control_word = fpregs.cwd; + out->flt_save.status_word = fpregs.swd; + out->flt_save.tag_word = fpregs.ftw; + out->flt_save.error_opcode = fpregs.fop; + out->flt_save.error_offset = fpregs.rip; + out->flt_save.error_selector = 0; // We don't have this. + out->flt_save.data_offset = fpregs.rdp; + out->flt_save.data_selector = 0; // We don't have this. + out->flt_save.mx_csr = fpregs.mxcsr; + out->flt_save.mx_csr_mask = fpregs.mxcr_mask; + + my_memcpy(&out->flt_save.float_registers, &fpregs.st_space, 8 * 16); + my_memcpy(&out->flt_save.xmm_registers, &fpregs.xmm_space, 16 * 16); +} + +#elif defined(__ARM_EABI__) + +uintptr_t ThreadInfo::GetInstructionPointer() const { + return regs.uregs[15]; +} + +void ThreadInfo::FillCPUContext(RawContextCPU* out) const { + out->context_flags = MD_CONTEXT_ARM_FULL; + + for (int i = 0; i < MD_CONTEXT_ARM_GPR_COUNT; ++i) + out->iregs[i] = regs.uregs[i]; + // No CPSR register in ThreadInfo(it's not accessible via ptrace) + out->cpsr = 0; +#if !defined(__ANDROID__) + out->float_save.fpscr = fpregs.fpsr | + (static_cast(fpregs.fpcr) << 32); + // TODO: sort this out, actually collect floating point registers + my_memset(&out->float_save.regs, 0, sizeof(out->float_save.regs)); + my_memset(&out->float_save.extra, 0, sizeof(out->float_save.extra)); +#endif +} + +#elif defined(__aarch64__) + +uintptr_t ThreadInfo::GetInstructionPointer() const { + return regs.pc; +} + +void ThreadInfo::FillCPUContext(RawContextCPU* out) const { + out->context_flags = MD_CONTEXT_ARM64_FULL; + + out->cpsr = static_cast(regs.pstate); + for (int i = 0; i < MD_CONTEXT_ARM64_REG_SP; ++i) + out->iregs[i] = regs.regs[i]; + out->iregs[MD_CONTEXT_ARM64_REG_SP] = regs.sp; + out->iregs[MD_CONTEXT_ARM64_REG_PC] = regs.pc; + + out->float_save.fpsr = fpregs.fpsr; + out->float_save.fpcr = fpregs.fpcr; + my_memcpy(&out->float_save.regs, &fpregs.vregs, + MD_FLOATINGSAVEAREA_ARM64_FPR_COUNT * 16); +} + +#elif defined(__mips__) + +uintptr_t ThreadInfo::GetInstructionPointer() const { + return mcontext.pc; +} + +void ThreadInfo::FillCPUContext(RawContextCPU* out) const { + out->context_flags = MD_CONTEXT_MIPS_FULL; + + for (int i = 0; i < MD_CONTEXT_MIPS_GPR_COUNT; ++i) + out->iregs[i] = mcontext.gregs[i]; + + out->mdhi = mcontext.mdhi; + out->mdlo = mcontext.mdlo; + out->dsp_control = mcontext.dsp; + + out->hi[0] = mcontext.hi1; + out->lo[0] = mcontext.lo1; + out->hi[1] = mcontext.hi2; + out->lo[1] = mcontext.lo2; + out->hi[2] = mcontext.hi3; + out->lo[2] = mcontext.lo3; + + out->epc = mcontext.pc; + out->badvaddr = 0; // Not stored in mcontext + out->status = 0; // Not stored in mcontext + out->cause = 0; // Not stored in mcontext + + for (int i = 0; i < MD_FLOATINGSAVEAREA_MIPS_FPR_COUNT; ++i) + out->float_save.regs[i] = mcontext.fpregs.fp_r.fp_fregs[i]._fp_fregs; + + out->float_save.fpcsr = mcontext.fpc_csr; +#if _MIPS_SIM == _ABIO32 + out->float_save.fir = mcontext.fpc_eir; +#endif +} +#endif // __mips__ + +void ThreadInfo::GetGeneralPurposeRegisters(void** gp_regs, size_t* size) { + assert(gp_regs || size); +#if defined(__mips__) + if (gp_regs) + *gp_regs = mcontext.gregs; + if (size) + *size = sizeof(mcontext.gregs); +#else + if (gp_regs) + *gp_regs = ®s; + if (size) + *size = sizeof(regs); +#endif +} + +void ThreadInfo::GetFloatingPointRegisters(void** fp_regs, size_t* size) { + assert(fp_regs || size); +#if defined(__mips__) + if (fp_regs) + *fp_regs = &mcontext.fpregs; + if (size) + *size = sizeof(mcontext.fpregs); +#else + if (fp_regs) + *fp_regs = &fpregs; + if (size) + *size = sizeof(fpregs); +#endif +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/dump_writer_common/thread_info.h b/TMessagesProj/jni/third_party/breakpad/src/client/linux/dump_writer_common/thread_info.h new file mode 100644 index 0000000000..a05ffea2e2 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/dump_writer_common/thread_info.h @@ -0,0 +1,91 @@ +// Copyright (c) 2014, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef CLIENT_LINUX_DUMP_WRITER_COMMON_THREAD_INFO_H_ +#define CLIENT_LINUX_DUMP_WRITER_COMMON_THREAD_INFO_H_ + +#include +#include + +#include "client/linux/dump_writer_common/raw_context_cpu.h" +#include "common/memory.h" +#include "google_breakpad/common/minidump_format.h" + +namespace google_breakpad { + +#if defined(__i386) || defined(__x86_64) +typedef __typeof__(((struct user*) 0)->u_debugreg[0]) debugreg_t; +#endif + +// We produce one of these structures for each thread in the crashed process. +struct ThreadInfo { + pid_t tgid; // thread group id + pid_t ppid; // parent process + + uintptr_t stack_pointer; // thread stack pointer + + +#if defined(__i386) || defined(__x86_64) + user_regs_struct regs; + user_fpregs_struct fpregs; + static const unsigned kNumDebugRegisters = 8; + debugreg_t dregs[8]; +#if defined(__i386) + user_fpxregs_struct fpxregs; +#endif // defined(__i386) + +#elif defined(__ARM_EABI__) + // Mimicking how strace does this(see syscall.c, search for GETREGS) + struct user_regs regs; + struct user_fpregs fpregs; +#elif defined(__aarch64__) + // Use the structures defined in + struct user_pt_regs regs; + struct user_fpsimd_state fpregs; +#elif defined(__mips__) + // Use the structure defined in . + mcontext_t mcontext; +#endif + + // Returns the instruction pointer (platform-dependent impl.). + uintptr_t GetInstructionPointer() const; + + // Fills a RawContextCPU using the context in the ThreadInfo object. + void FillCPUContext(RawContextCPU* out) const; + + // Returns the pointer and size of general purpose register area. + void GetGeneralPurposeRegisters(void** gp_regs, size_t* size); + + // Returns the pointer and size of float point register area. + void GetFloatingPointRegisters(void** fp_regs, size_t* size); +}; + +} // namespace google_breakpad + +#endif // CLIENT_LINUX_DUMP_WRITER_COMMON_THREAD_INFO_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/dump_writer_common/ucontext_reader.cc b/TMessagesProj/jni/third_party/breakpad/src/client/linux/dump_writer_common/ucontext_reader.cc new file mode 100644 index 0000000000..d37fdeb01f --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/dump_writer_common/ucontext_reader.cc @@ -0,0 +1,253 @@ +// Copyright (c) 2014, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#include "client/linux/dump_writer_common/ucontext_reader.h" + +#include "common/linux/linux_libc_support.h" +#include "google_breakpad/common/minidump_format.h" + +namespace google_breakpad { + +// Minidump defines register structures which are different from the raw +// structures which we get from the kernel. These are platform specific +// functions to juggle the ucontext and user structures into minidump format. + +#if defined(__i386__) + +uintptr_t UContextReader::GetStackPointer(const struct ucontext* uc) { + return uc->uc_mcontext.gregs[REG_ESP]; +} + +uintptr_t UContextReader::GetInstructionPointer(const struct ucontext* uc) { + return uc->uc_mcontext.gregs[REG_EIP]; +} + +void UContextReader::FillCPUContext(RawContextCPU *out, const ucontext *uc, + const struct _libc_fpstate* fp) { + const greg_t* regs = uc->uc_mcontext.gregs; + + out->context_flags = MD_CONTEXT_X86_FULL | + MD_CONTEXT_X86_FLOATING_POINT; + + out->gs = regs[REG_GS]; + out->fs = regs[REG_FS]; + out->es = regs[REG_ES]; + out->ds = regs[REG_DS]; + + out->edi = regs[REG_EDI]; + out->esi = regs[REG_ESI]; + out->ebx = regs[REG_EBX]; + out->edx = regs[REG_EDX]; + out->ecx = regs[REG_ECX]; + out->eax = regs[REG_EAX]; + + out->ebp = regs[REG_EBP]; + out->eip = regs[REG_EIP]; + out->cs = regs[REG_CS]; + out->eflags = regs[REG_EFL]; + out->esp = regs[REG_UESP]; + out->ss = regs[REG_SS]; + + out->float_save.control_word = fp->cw; + out->float_save.status_word = fp->sw; + out->float_save.tag_word = fp->tag; + out->float_save.error_offset = fp->ipoff; + out->float_save.error_selector = fp->cssel; + out->float_save.data_offset = fp->dataoff; + out->float_save.data_selector = fp->datasel; + + // 8 registers * 10 bytes per register. + my_memcpy(out->float_save.register_area, fp->_st, 10 * 8); +} + +#elif defined(__x86_64) + +uintptr_t UContextReader::GetStackPointer(const struct ucontext* uc) { + return uc->uc_mcontext.gregs[REG_RSP]; +} + +uintptr_t UContextReader::GetInstructionPointer(const struct ucontext* uc) { + return uc->uc_mcontext.gregs[REG_RIP]; +} + +void UContextReader::FillCPUContext(RawContextCPU *out, const ucontext *uc, + const struct _libc_fpstate* fpregs) { + const greg_t* regs = uc->uc_mcontext.gregs; + + out->context_flags = MD_CONTEXT_AMD64_FULL; + + out->cs = regs[REG_CSGSFS] & 0xffff; + + out->fs = (regs[REG_CSGSFS] >> 32) & 0xffff; + out->gs = (regs[REG_CSGSFS] >> 16) & 0xffff; + + out->eflags = regs[REG_EFL]; + + out->rax = regs[REG_RAX]; + out->rcx = regs[REG_RCX]; + out->rdx = regs[REG_RDX]; + out->rbx = regs[REG_RBX]; + + out->rsp = regs[REG_RSP]; + out->rbp = regs[REG_RBP]; + out->rsi = regs[REG_RSI]; + out->rdi = regs[REG_RDI]; + out->r8 = regs[REG_R8]; + out->r9 = regs[REG_R9]; + out->r10 = regs[REG_R10]; + out->r11 = regs[REG_R11]; + out->r12 = regs[REG_R12]; + out->r13 = regs[REG_R13]; + out->r14 = regs[REG_R14]; + out->r15 = regs[REG_R15]; + + out->rip = regs[REG_RIP]; + + out->flt_save.control_word = fpregs->cwd; + out->flt_save.status_word = fpregs->swd; + out->flt_save.tag_word = fpregs->ftw; + out->flt_save.error_opcode = fpregs->fop; + out->flt_save.error_offset = fpregs->rip; + out->flt_save.data_offset = fpregs->rdp; + out->flt_save.error_selector = 0; // We don't have this. + out->flt_save.data_selector = 0; // We don't have this. + out->flt_save.mx_csr = fpregs->mxcsr; + out->flt_save.mx_csr_mask = fpregs->mxcr_mask; + my_memcpy(&out->flt_save.float_registers, &fpregs->_st, 8 * 16); + my_memcpy(&out->flt_save.xmm_registers, &fpregs->_xmm, 16 * 16); +} + +#elif defined(__ARM_EABI__) + +uintptr_t UContextReader::GetStackPointer(const struct ucontext* uc) { + return uc->uc_mcontext.arm_sp; +} + +uintptr_t UContextReader::GetInstructionPointer(const struct ucontext* uc) { + return uc->uc_mcontext.arm_pc; +} + +void UContextReader::FillCPUContext(RawContextCPU *out, const ucontext *uc) { + out->context_flags = MD_CONTEXT_ARM_FULL; + + out->iregs[0] = uc->uc_mcontext.arm_r0; + out->iregs[1] = uc->uc_mcontext.arm_r1; + out->iregs[2] = uc->uc_mcontext.arm_r2; + out->iregs[3] = uc->uc_mcontext.arm_r3; + out->iregs[4] = uc->uc_mcontext.arm_r4; + out->iregs[5] = uc->uc_mcontext.arm_r5; + out->iregs[6] = uc->uc_mcontext.arm_r6; + out->iregs[7] = uc->uc_mcontext.arm_r7; + out->iregs[8] = uc->uc_mcontext.arm_r8; + out->iregs[9] = uc->uc_mcontext.arm_r9; + out->iregs[10] = uc->uc_mcontext.arm_r10; + + out->iregs[11] = uc->uc_mcontext.arm_fp; + out->iregs[12] = uc->uc_mcontext.arm_ip; + out->iregs[13] = uc->uc_mcontext.arm_sp; + out->iregs[14] = uc->uc_mcontext.arm_lr; + out->iregs[15] = uc->uc_mcontext.arm_pc; + + out->cpsr = uc->uc_mcontext.arm_cpsr; + + // TODO: fix this after fixing ExceptionHandler + out->float_save.fpscr = 0; + my_memset(&out->float_save.regs, 0, sizeof(out->float_save.regs)); + my_memset(&out->float_save.extra, 0, sizeof(out->float_save.extra)); +} + +#elif defined(__aarch64__) + +uintptr_t UContextReader::GetStackPointer(const struct ucontext* uc) { + return uc->uc_mcontext.sp; +} + +uintptr_t UContextReader::GetInstructionPointer(const struct ucontext* uc) { + return uc->uc_mcontext.pc; +} + +void UContextReader::FillCPUContext(RawContextCPU *out, const ucontext *uc, + const struct fpsimd_context* fpregs) { + out->context_flags = MD_CONTEXT_ARM64_FULL; + + out->cpsr = static_cast(uc->uc_mcontext.pstate); + for (int i = 0; i < MD_CONTEXT_ARM64_REG_SP; ++i) + out->iregs[i] = uc->uc_mcontext.regs[i]; + out->iregs[MD_CONTEXT_ARM64_REG_SP] = uc->uc_mcontext.sp; + out->iregs[MD_CONTEXT_ARM64_REG_PC] = uc->uc_mcontext.pc; + + out->float_save.fpsr = fpregs->fpsr; + out->float_save.fpcr = fpregs->fpcr; + my_memcpy(&out->float_save.regs, &fpregs->vregs, + MD_FLOATINGSAVEAREA_ARM64_FPR_COUNT * 16); +} + +#elif defined(__mips__) + +uintptr_t UContextReader::GetStackPointer(const struct ucontext* uc) { + return uc->uc_mcontext.gregs[MD_CONTEXT_MIPS_REG_SP]; +} + +uintptr_t UContextReader::GetInstructionPointer(const struct ucontext* uc) { + return uc->uc_mcontext.pc; +} + +void UContextReader::FillCPUContext(RawContextCPU *out, const ucontext *uc) { + out->context_flags = MD_CONTEXT_MIPS_FULL; + + for (int i = 0; i < MD_CONTEXT_MIPS_GPR_COUNT; ++i) + out->iregs[i] = uc->uc_mcontext.gregs[i]; + + out->mdhi = uc->uc_mcontext.mdhi; + out->mdlo = uc->uc_mcontext.mdlo; + + out->hi[0] = uc->uc_mcontext.hi1; + out->hi[1] = uc->uc_mcontext.hi2; + out->hi[2] = uc->uc_mcontext.hi3; + out->lo[0] = uc->uc_mcontext.lo1; + out->lo[1] = uc->uc_mcontext.lo2; + out->lo[2] = uc->uc_mcontext.lo3; + out->dsp_control = uc->uc_mcontext.dsp; + + out->epc = uc->uc_mcontext.pc; + out->badvaddr = 0; // Not reported in signal context. + out->status = 0; // Not reported in signal context. + out->cause = 0; // Not reported in signal context. + + for (int i = 0; i < MD_FLOATINGSAVEAREA_MIPS_FPR_COUNT; ++i) + out->float_save.regs[i] = uc->uc_mcontext.fpregs.fp_r.fp_dregs[i]; + + out->float_save.fpcsr = uc->uc_mcontext.fpc_csr; +#if _MIPS_SIM == _ABIO32 + out->float_save.fir = uc->uc_mcontext.fpc_eir; // Unused. +#endif +} +#endif + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/dump_writer_common/ucontext_reader.h b/TMessagesProj/jni/third_party/breakpad/src/client/linux/dump_writer_common/ucontext_reader.h new file mode 100644 index 0000000000..b6e77b4b52 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/dump_writer_common/ucontext_reader.h @@ -0,0 +1,64 @@ +// Copyright (c) 2014, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef CLIENT_LINUX_DUMP_WRITER_COMMON_UCONTEXT_READER_H +#define CLIENT_LINUX_DUMP_WRITER_COMMON_UCONTEXT_READER_H + +#include +#include + +#include "client/linux/dump_writer_common/raw_context_cpu.h" +#include "common/memory.h" +#include "google_breakpad/common/minidump_format.h" + +namespace google_breakpad { + +// Wraps platform-dependent implementations of accessors to ucontext structs. +struct UContextReader { + static uintptr_t GetStackPointer(const struct ucontext* uc); + + static uintptr_t GetInstructionPointer(const struct ucontext* uc); + + // Juggle a arch-specific ucontext into a minidump format + // out: the minidump structure + // info: the collection of register structures. +#if defined(__i386__) || defined(__x86_64) + static void FillCPUContext(RawContextCPU *out, const ucontext *uc, + const struct _libc_fpstate* fp); +#elif defined(__aarch64__) + static void FillCPUContext(RawContextCPU *out, const ucontext *uc, + const struct fpsimd_context* fpregs); +#else + static void FillCPUContext(RawContextCPU *out, const ucontext *uc); +#endif +}; + +} // namespace google_breakpad + +#endif // CLIENT_LINUX_DUMP_WRITER_COMMON_UCONTEXT_READER_H diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/handler/exception_handler.cc b/TMessagesProj/jni/third_party/breakpad/src/client/linux/handler/exception_handler.cc new file mode 100644 index 0000000000..c483155be4 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/handler/exception_handler.cc @@ -0,0 +1,743 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// The ExceptionHandler object installs signal handlers for a number of +// signals. We rely on the signal handler running on the thread which crashed +// in order to identify it. This is true of the synchronous signals (SEGV etc), +// but not true of ABRT. Thus, if you send ABRT to yourself in a program which +// uses ExceptionHandler, you need to use tgkill to direct it to the current +// thread. +// +// The signal flow looks like this: +// +// SignalHandler (uses a global stack of ExceptionHandler objects to find +// | one to handle the signal. If the first rejects it, try +// | the second etc...) +// V +// HandleSignal ----------------------------| (clones a new process which +// | | shares an address space with +// (wait for cloned | the crashed process. This +// process) | allows us to ptrace the crashed +// | | process) +// V V +// (set signal handler to ThreadEntry (static function to bounce +// SIG_DFL and rethrow, | back into the object) +// killing the crashed | +// process) V +// DoDump (writes minidump) +// | +// V +// sys_exit +// + +// This code is a little fragmented. Different functions of the ExceptionHandler +// class run in a number of different contexts. Some of them run in a normal +// context and are easy to code, others run in a compromised context and the +// restrictions at the top of minidump_writer.cc apply: no libc and use the +// alternative malloc. Each function should have comment above it detailing the +// context which it runs in. + +#include "client/linux/handler/exception_handler.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +#include +#include +#include + +#include "common/basictypes.h" +#include "common/linux/linux_libc_support.h" +#include "common/memory.h" +#include "client/linux/log/log.h" +#include "client/linux/microdump_writer/microdump_writer.h" +#include "client/linux/minidump_writer/linux_dumper.h" +#include "client/linux/minidump_writer/minidump_writer.h" +#include "common/linux/eintr_wrapper.h" +#include "third_party/lss/linux_syscall_support.h" + +#if defined(__ANDROID__) +#include "linux/sched.h" +#endif + +#ifndef PR_SET_PTRACER +#define PR_SET_PTRACER 0x59616d61 +#endif + +namespace google_breakpad { + +namespace { +// The list of signals which we consider to be crashes. The default action for +// all these signals must be Core (see man 7 signal) because we rethrow the +// signal after handling it and expect that it'll be fatal. +const int kExceptionSignals[] = { + SIGSEGV, SIGABRT, SIGFPE, SIGILL, SIGBUS +}; +const int kNumHandledSignals = + sizeof(kExceptionSignals) / sizeof(kExceptionSignals[0]); +struct sigaction old_handlers[kNumHandledSignals]; +bool handlers_installed = false; + +// InstallAlternateStackLocked will store the newly installed stack in new_stack +// and (if it exists) the previously installed stack in old_stack. +stack_t old_stack; +stack_t new_stack; +bool stack_installed = false; + +// Create an alternative stack to run the signal handlers on. This is done since +// the signal might have been caused by a stack overflow. +// Runs before crashing: normal context. +void InstallAlternateStackLocked() { + if (stack_installed) + return; + + memset(&old_stack, 0, sizeof(old_stack)); + memset(&new_stack, 0, sizeof(new_stack)); + + // SIGSTKSZ may be too small to prevent the signal handlers from overrunning + // the alternative stack. Ensure that the size of the alternative stack is + // large enough. + static const unsigned kSigStackSize = std::max(16384, SIGSTKSZ); + + // Only set an alternative stack if there isn't already one, or if the current + // one is too small. + if (sys_sigaltstack(NULL, &old_stack) == -1 || !old_stack.ss_sp || + old_stack.ss_size < kSigStackSize) { + new_stack.ss_sp = calloc(1, kSigStackSize); + new_stack.ss_size = kSigStackSize; + + if (sys_sigaltstack(&new_stack, NULL) == -1) { + free(new_stack.ss_sp); + return; + } + stack_installed = true; + } +} + +// Runs before crashing: normal context. +void RestoreAlternateStackLocked() { + if (!stack_installed) + return; + + stack_t current_stack; + if (sys_sigaltstack(NULL, ¤t_stack) == -1) + return; + + // Only restore the old_stack if the current alternative stack is the one + // installed by the call to InstallAlternateStackLocked. + if (current_stack.ss_sp == new_stack.ss_sp) { + if (old_stack.ss_sp) { + if (sys_sigaltstack(&old_stack, NULL) == -1) + return; + } else { + stack_t disable_stack; + disable_stack.ss_flags = SS_DISABLE; + if (sys_sigaltstack(&disable_stack, NULL) == -1) + return; + } + } + + free(new_stack.ss_sp); + stack_installed = false; +} + +void InstallDefaultHandler(int sig) { +#if defined(__ANDROID__) + // Android L+ expose signal and sigaction symbols that override the system + // ones. There is a bug in these functions where a request to set the handler + // to SIG_DFL is ignored. In that case, an infinite loop is entered as the + // signal is repeatedly sent to breakpad's signal handler. + // To work around this, directly call the system's sigaction. + struct kernel_sigaction sa; + memset(&sa, 0, sizeof(sa)); + sys_sigemptyset(&sa.sa_mask); + sa.sa_handler_ = SIG_DFL; + sa.sa_flags = SA_RESTART; + sys_rt_sigaction(sig, &sa, NULL, sizeof(kernel_sigset_t)); +#else + signal(sig, SIG_DFL); +#endif +} + +// The global exception handler stack. This is needed because there may exist +// multiple ExceptionHandler instances in a process. Each will have itself +// registered in this stack. +std::vector* g_handler_stack_ = NULL; +pthread_mutex_t g_handler_stack_mutex_ = PTHREAD_MUTEX_INITIALIZER; + +} // namespace + +// Runs before crashing: normal context. +ExceptionHandler::ExceptionHandler(const MinidumpDescriptor& descriptor, + FilterCallback filter, + MinidumpCallback callback, + void* callback_context, + bool install_handler, + const int server_fd) + : filter_(filter), + callback_(callback), + callback_context_(callback_context), + minidump_descriptor_(descriptor), + crash_handler_(NULL) { + if (server_fd >= 0) + crash_generation_client_.reset(CrashGenerationClient::TryCreate(server_fd)); + + if (!IsOutOfProcess() && !minidump_descriptor_.IsFD() && + !minidump_descriptor_.IsMicrodumpOnConsole()) + minidump_descriptor_.UpdatePath(); + + pthread_mutex_lock(&g_handler_stack_mutex_); + if (!g_handler_stack_) + g_handler_stack_ = new std::vector; + if (install_handler) { + InstallAlternateStackLocked(); + InstallHandlersLocked(); + } + g_handler_stack_->push_back(this); + pthread_mutex_unlock(&g_handler_stack_mutex_); +} + +// Runs before crashing: normal context. +ExceptionHandler::~ExceptionHandler() { + pthread_mutex_lock(&g_handler_stack_mutex_); + std::vector::iterator handler = + std::find(g_handler_stack_->begin(), g_handler_stack_->end(), this); + g_handler_stack_->erase(handler); + if (g_handler_stack_->empty()) { + delete g_handler_stack_; + g_handler_stack_ = NULL; + RestoreAlternateStackLocked(); + RestoreHandlersLocked(); + } + pthread_mutex_unlock(&g_handler_stack_mutex_); +} + +// Runs before crashing: normal context. +// static +bool ExceptionHandler::InstallHandlersLocked() { + if (handlers_installed) + return false; + + // Fail if unable to store all the old handlers. + for (int i = 0; i < kNumHandledSignals; ++i) { + if (sigaction(kExceptionSignals[i], NULL, &old_handlers[i]) == -1) + return false; + } + + struct sigaction sa; + memset(&sa, 0, sizeof(sa)); + sigemptyset(&sa.sa_mask); + + // Mask all exception signals when we're handling one of them. + for (int i = 0; i < kNumHandledSignals; ++i) + sigaddset(&sa.sa_mask, kExceptionSignals[i]); + + sa.sa_sigaction = SignalHandler; + sa.sa_flags = SA_ONSTACK | SA_SIGINFO; + + for (int i = 0; i < kNumHandledSignals; ++i) { + if (sigaction(kExceptionSignals[i], &sa, NULL) == -1) { + // At this point it is impractical to back out changes, and so failure to + // install a signal is intentionally ignored. + } + } + handlers_installed = true; + return true; +} + +// This function runs in a compromised context: see the top of the file. +// Runs on the crashing thread. +// static +void ExceptionHandler::RestoreHandlersLocked() { + if (!handlers_installed) + return; + + for (int i = 0; i < kNumHandledSignals; ++i) { + if (sigaction(kExceptionSignals[i], &old_handlers[i], NULL) == -1) { + InstallDefaultHandler(kExceptionSignals[i]); + } + } + handlers_installed = false; +} + +// void ExceptionHandler::set_crash_handler(HandlerCallback callback) { +// crash_handler_ = callback; +// } + +// This function runs in a compromised context: see the top of the file. +// Runs on the crashing thread. +// static +void ExceptionHandler::SignalHandler(int sig, siginfo_t* info, void* uc) { + // All the exception signals are blocked at this point. + pthread_mutex_lock(&g_handler_stack_mutex_); + + // Sometimes, Breakpad runs inside a process where some other buggy code + // saves and restores signal handlers temporarily with 'signal' + // instead of 'sigaction'. This loses the SA_SIGINFO flag associated + // with this function. As a consequence, the values of 'info' and 'uc' + // become totally bogus, generally inducing a crash. + // + // The following code tries to detect this case. When it does, it + // resets the signal handlers with sigaction + SA_SIGINFO and returns. + // This forces the signal to be thrown again, but this time the kernel + // will call the function with the right arguments. + struct sigaction cur_handler; + if (sigaction(sig, NULL, &cur_handler) == 0 && + (cur_handler.sa_flags & SA_SIGINFO) == 0) { + // Reset signal handler with the right flags. + sigemptyset(&cur_handler.sa_mask); + sigaddset(&cur_handler.sa_mask, sig); + + cur_handler.sa_sigaction = SignalHandler; + cur_handler.sa_flags = SA_ONSTACK | SA_SIGINFO; + + if (sigaction(sig, &cur_handler, NULL) == -1) { + // When resetting the handler fails, try to reset the + // default one to avoid an infinite loop here. + InstallDefaultHandler(sig); + } + pthread_mutex_unlock(&g_handler_stack_mutex_); + return; + } + + bool handled = false; + for (int i = g_handler_stack_->size() - 1; !handled && i >= 0; --i) { + handled = (*g_handler_stack_)[i]->HandleSignal(sig, info, uc); + } + + // Upon returning from this signal handler, sig will become unmasked and then + // it will be retriggered. If one of the ExceptionHandlers handled it + // successfully, restore the default handler. Otherwise, restore the + // previously installed handler. Then, when the signal is retriggered, it will + // be delivered to the appropriate handler. + if (handled) { + InstallDefaultHandler(sig); + } else { + RestoreHandlersLocked(); + } + + pthread_mutex_unlock(&g_handler_stack_mutex_); + + // info->si_code <= 0 iff SI_FROMUSER (SI_FROMKERNEL otherwise). + if (info->si_code <= 0 || sig == SIGABRT) { + // This signal was triggered by somebody sending us the signal with kill(). + // In order to retrigger it, we have to queue a new signal by calling + // kill() ourselves. The special case (si_pid == 0 && sig == SIGABRT) is + // due to the kernel sending a SIGABRT from a user request via SysRQ. + if (tgkill(getpid(), syscall(__NR_gettid), sig) < 0) { + // If we failed to kill ourselves (e.g. because a sandbox disallows us + // to do so), we instead resort to terminating our process. This will + // result in an incorrect exit code. + _exit(1); + } + } else { + // This was a synchronous signal triggered by a hard fault (e.g. SIGSEGV). + // No need to reissue the signal. It will automatically trigger again, + // when we return from the signal handler. + } +} + +struct ThreadArgument { + pid_t pid; // the crashing process + const MinidumpDescriptor* minidump_descriptor; + ExceptionHandler* handler; + const void* context; // a CrashContext structure + size_t context_size; +}; + +// This is the entry function for the cloned process. We are in a compromised +// context here: see the top of the file. +// static +int ExceptionHandler::ThreadEntry(void *arg) { + const ThreadArgument *thread_arg = reinterpret_cast(arg); + + // Block here until the crashing process unblocks us when + // we're allowed to use ptrace + thread_arg->handler->WaitForContinueSignal(); + + return thread_arg->handler->DoDump(thread_arg->pid, thread_arg->context, + thread_arg->context_size) == false; +} + +// This function runs in a compromised context: see the top of the file. +// Runs on the crashing thread. +bool ExceptionHandler::HandleSignal(int sig, siginfo_t* info, void* uc) { + if (filter_ && !filter_(callback_context_)) + return false; + + // Allow ourselves to be dumped if the signal is trusted. + bool signal_trusted = info->si_code > 0; + bool signal_pid_trusted = info->si_code == SI_USER || + info->si_code == SI_TKILL; + if (signal_trusted || (signal_pid_trusted && info->si_pid == getpid())) { + sys_prctl(PR_SET_DUMPABLE, 1, 0, 0, 0); + } + CrashContext context; + // Fill in all the holes in the struct to make Valgrind happy. + memset(&context, 0, sizeof(context)); + memcpy(&context.siginfo, info, sizeof(siginfo_t)); + memcpy(&context.context, uc, sizeof(struct ucontext)); +#if defined(__aarch64__) + struct ucontext *uc_ptr = (struct ucontext*)uc; + struct fpsimd_context *fp_ptr = + (struct fpsimd_context*)&uc_ptr->uc_mcontext.__reserved; + if (fp_ptr->head.magic == FPSIMD_MAGIC) { + memcpy(&context.float_state, fp_ptr, sizeof(context.float_state)); + } +#elif !defined(__ARM_EABI__) && !defined(__mips__) + // FP state is not part of user ABI on ARM Linux. + // In case of MIPS Linux FP state is already part of struct ucontext + // and 'float_state' is not a member of CrashContext. + struct ucontext *uc_ptr = (struct ucontext*)uc; + if (uc_ptr->uc_mcontext.fpregs) { + memcpy(&context.float_state, + uc_ptr->uc_mcontext.fpregs, + sizeof(context.float_state)); + } +#endif + context.tid = syscall(__NR_gettid); + if (crash_handler_ != NULL) { + if (crash_handler_(&context, sizeof(context), callback_context_)) { + return true; + } + } + return GenerateDump(&context); +} + +// This is a public interface to HandleSignal that allows the client to +// generate a crash dump. This function may run in a compromised context. +bool ExceptionHandler::SimulateSignalDelivery(int sig) { + siginfo_t siginfo = {}; + // Mimic a trusted signal to allow tracing the process (see + // ExceptionHandler::HandleSignal(). + siginfo.si_code = SI_USER; + siginfo.si_pid = getpid(); + struct ucontext context; + getcontext(&context); + return HandleSignal(sig, &siginfo, &context); +} + +// This function may run in a compromised context: see the top of the file. +bool ExceptionHandler::GenerateDump(CrashContext *context) { + if (IsOutOfProcess()) + return crash_generation_client_->RequestDump(context, sizeof(*context)); + + // Allocating too much stack isn't a problem, and better to err on the side + // of caution than smash it into random locations. + static const unsigned kChildStackSize = 16000; + PageAllocator allocator; + uint8_t* stack = reinterpret_cast(allocator.Alloc(kChildStackSize)); + if (!stack) + return false; + // clone() needs the top-most address. (scrub just to be safe) + stack += kChildStackSize; + my_memset(stack - 16, 0, 16); + + ThreadArgument thread_arg; + thread_arg.handler = this; + thread_arg.minidump_descriptor = &minidump_descriptor_; + thread_arg.pid = getpid(); + thread_arg.context = context; + thread_arg.context_size = sizeof(*context); + + // We need to explicitly enable ptrace of parent processes on some + // kernels, but we need to know the PID of the cloned process before we + // can do this. Create a pipe here which we can use to block the + // cloned process after creating it, until we have explicitly enabled ptrace + if (sys_pipe(fdes) == -1) { + // Creating the pipe failed. We'll log an error but carry on anyway, + // as we'll probably still get a useful crash report. All that will happen + // is the write() and read() calls will fail with EBADF + static const char no_pipe_msg[] = "ExceptionHandler::GenerateDump " + "sys_pipe failed:"; + logger::write(no_pipe_msg, sizeof(no_pipe_msg) - 1); + logger::write(strerror(errno), strlen(strerror(errno))); + logger::write("\n", 1); + + // Ensure fdes[0] and fdes[1] are invalid file descriptors. + fdes[0] = fdes[1] = -1; + } + + const pid_t child = sys_clone( + ThreadEntry, stack, CLONE_FILES | CLONE_FS | CLONE_UNTRACED, + &thread_arg, NULL, NULL, NULL); + if (child == -1) { + sys_close(fdes[0]); + sys_close(fdes[1]); + return false; + } + + // Allow the child to ptrace us + sys_prctl(PR_SET_PTRACER, child, 0, 0, 0); + SendContinueSignalToChild(); + int status; + const int r = HANDLE_EINTR(sys_waitpid(child, &status, __WALL)); + + sys_close(fdes[0]); + sys_close(fdes[1]); + + if (r == -1) { + static const char msg[] = "ExceptionHandler::GenerateDump waitpid failed:"; + logger::write(msg, sizeof(msg) - 1); + logger::write(strerror(errno), strlen(strerror(errno))); + logger::write("\n", 1); + } + + bool success = r != -1 && WIFEXITED(status) && WEXITSTATUS(status) == 0; + if (callback_) + success = callback_(minidump_descriptor_, callback_context_, success); + return success; +} + +// This function runs in a compromised context: see the top of the file. +void ExceptionHandler::SendContinueSignalToChild() { + static const char okToContinueMessage = 'a'; + int r; + r = HANDLE_EINTR(sys_write(fdes[1], &okToContinueMessage, sizeof(char))); + if (r == -1) { + static const char msg[] = "ExceptionHandler::SendContinueSignalToChild " + "sys_write failed:"; + logger::write(msg, sizeof(msg) - 1); + logger::write(strerror(errno), strlen(strerror(errno))); + logger::write("\n", 1); + } +} + +// This function runs in a compromised context: see the top of the file. +// Runs on the cloned process. +void ExceptionHandler::WaitForContinueSignal() { + int r; + char receivedMessage; + r = HANDLE_EINTR(sys_read(fdes[0], &receivedMessage, sizeof(char))); + if (r == -1) { + static const char msg[] = "ExceptionHandler::WaitForContinueSignal " + "sys_read failed:"; + logger::write(msg, sizeof(msg) - 1); + logger::write(strerror(errno), strlen(strerror(errno))); + logger::write("\n", 1); + } +} + +// This function runs in a compromised context: see the top of the file. +// Runs on the cloned process. +bool ExceptionHandler::DoDump(pid_t crashing_process, const void* context, + size_t context_size) { + if (minidump_descriptor_.IsMicrodumpOnConsole()) { + return google_breakpad::WriteMicrodump( + crashing_process, + context, + context_size, + mapping_list_, + minidump_descriptor_.microdump_build_fingerprint(), + minidump_descriptor_.microdump_product_info()); + } + if (minidump_descriptor_.IsFD()) { + return google_breakpad::WriteMinidump(minidump_descriptor_.fd(), + minidump_descriptor_.size_limit(), + crashing_process, + context, + context_size, + mapping_list_, + app_memory_list_); + } + return google_breakpad::WriteMinidump(minidump_descriptor_.path(), + minidump_descriptor_.size_limit(), + crashing_process, + context, + context_size, + mapping_list_, + app_memory_list_); +} + +// static +bool ExceptionHandler::WriteMinidump(const string& dump_path, + MinidumpCallback callback, + void* callback_context) { + MinidumpDescriptor descriptor(dump_path); + ExceptionHandler eh(descriptor, NULL, callback, callback_context, false, -1); + return eh.WriteMinidump(); +} + +// In order to making using EBP to calculate the desired value for ESP +// a valid operation, ensure that this function is compiled with a +// frame pointer using the following attribute. This attribute +// is supported on GCC but not on clang. +#if defined(__i386__) && defined(__GNUC__) && !defined(__clang__) +__attribute__((optimize("no-omit-frame-pointer"))) +#endif +bool ExceptionHandler::WriteMinidump() { + if (!IsOutOfProcess() && !minidump_descriptor_.IsFD() && + !minidump_descriptor_.IsMicrodumpOnConsole()) { + // Update the path of the minidump so that this can be called multiple times + // and new files are created for each minidump. This is done before the + // generation happens, as clients may want to access the MinidumpDescriptor + // after this call to find the exact path to the minidump file. + minidump_descriptor_.UpdatePath(); + } else if (minidump_descriptor_.IsFD()) { + // Reposition the FD to its beginning and resize it to get rid of the + // previous minidump info. + lseek(minidump_descriptor_.fd(), 0, SEEK_SET); + ignore_result(ftruncate(minidump_descriptor_.fd(), 0)); + } + + // Allow this process to be dumped. + sys_prctl(PR_SET_DUMPABLE, 1, 0, 0, 0); + + CrashContext context; + int getcontext_result = getcontext(&context.context); + if (getcontext_result) + return false; + +#if defined(__i386__) + // In CPUFillFromUContext in minidumpwriter.cc the stack pointer is retrieved + // from REG_UESP instead of from REG_ESP. REG_UESP is the user stack pointer + // and it only makes sense when running in kernel mode with a different stack + // pointer. When WriteMiniDump is called during normal processing REG_UESP is + // zero which leads to bad minidump files. + if (!context.context.uc_mcontext.gregs[REG_UESP]) { + // If REG_UESP is set to REG_ESP then that includes the stack space for the + // CrashContext object in this function, which is about 128 KB. Since the + // Linux dumper only records 32 KB of stack this would mean that nothing + // useful would be recorded. A better option is to set REG_UESP to REG_EBP, + // perhaps with a small negative offset in case there is any code that + // objects to them being equal. + context.context.uc_mcontext.gregs[REG_UESP] = + context.context.uc_mcontext.gregs[REG_EBP] - 16; + // The stack saving is based off of REG_ESP so it must be set to match the + // new REG_UESP. + context.context.uc_mcontext.gregs[REG_ESP] = + context.context.uc_mcontext.gregs[REG_UESP]; + } +#endif + +#if !defined(__ARM_EABI__) && !defined(__aarch64__) && !defined(__mips__) + // FPU state is not part of ARM EABI ucontext_t. + memcpy(&context.float_state, context.context.uc_mcontext.fpregs, + sizeof(context.float_state)); +#endif + context.tid = sys_gettid(); + + // Add an exception stream to the minidump for better reporting. + memset(&context.siginfo, 0, sizeof(context.siginfo)); + context.siginfo.si_signo = MD_EXCEPTION_CODE_LIN_DUMP_REQUESTED; +#if defined(__i386__) + context.siginfo.si_addr = + reinterpret_cast(context.context.uc_mcontext.gregs[REG_EIP]); +#elif defined(__x86_64__) + context.siginfo.si_addr = + reinterpret_cast(context.context.uc_mcontext.gregs[REG_RIP]); +#elif defined(__arm__) + context.siginfo.si_addr = + reinterpret_cast(context.context.uc_mcontext.arm_pc); +#elif defined(__aarch64__) + context.siginfo.si_addr = + reinterpret_cast(context.context.uc_mcontext.pc); +#elif defined(__mips__) + context.siginfo.si_addr = + reinterpret_cast(context.context.uc_mcontext.pc); +#else +#error "This code has not been ported to your platform yet." +#endif + + return GenerateDump(&context); +} + +void ExceptionHandler::AddMappingInfo(const string& name, + const uint8_t identifier[sizeof(MDGUID)], + uintptr_t start_address, + size_t mapping_size, + size_t file_offset) { + MappingInfo info; + info.start_addr = start_address; + info.size = mapping_size; + info.offset = file_offset; + strncpy(info.name, name.c_str(), sizeof(info.name) - 1); + info.name[sizeof(info.name) - 1] = '\0'; + + MappingEntry mapping; + mapping.first = info; + memcpy(mapping.second, identifier, sizeof(MDGUID)); + mapping_list_.push_back(mapping); +} + +void ExceptionHandler::RegisterAppMemory(void* ptr, size_t length) { + AppMemoryList::iterator iter = + std::find(app_memory_list_.begin(), app_memory_list_.end(), ptr); + if (iter != app_memory_list_.end()) { + // Don't allow registering the same pointer twice. + return; + } + + AppMemory app_memory; + app_memory.ptr = ptr; + app_memory.length = length; + app_memory_list_.push_back(app_memory); +} + +void ExceptionHandler::UnregisterAppMemory(void* ptr) { + AppMemoryList::iterator iter = + std::find(app_memory_list_.begin(), app_memory_list_.end(), ptr); + if (iter != app_memory_list_.end()) { + app_memory_list_.erase(iter); + } +} + +// static +bool ExceptionHandler::WriteMinidumpForChild(pid_t child, + pid_t child_blamed_thread, + const string& dump_path, + MinidumpCallback callback, + void* callback_context) { + // This function is not run in a compromised context. + MinidumpDescriptor descriptor(dump_path); + descriptor.UpdatePath(); + if (!google_breakpad::WriteMinidump(descriptor.path(), + child, + child_blamed_thread)) + return false; + + return callback ? callback(descriptor, callback_context, true) : true; +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/handler/exception_handler.h b/TMessagesProj/jni/third_party/breakpad/src/client/linux/handler/exception_handler.h new file mode 100644 index 0000000000..591c310855 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/handler/exception_handler.h @@ -0,0 +1,278 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef CLIENT_LINUX_HANDLER_EXCEPTION_HANDLER_H_ +#define CLIENT_LINUX_HANDLER_EXCEPTION_HANDLER_H_ + +#include +#include +#include +#include + +#include + +#include "client/linux/crash_generation/crash_generation_client.h" +#include "client/linux/handler/minidump_descriptor.h" +#include "client/linux/minidump_writer/minidump_writer.h" +#include "common/scoped_ptr.h" +#include "common/using_std_string.h" +#include "google_breakpad/common/minidump_format.h" + +namespace google_breakpad { + +// ExceptionHandler +// +// ExceptionHandler can write a minidump file when an exception occurs, +// or when WriteMinidump() is called explicitly by your program. +// +// To have the exception handler write minidumps when an uncaught exception +// (crash) occurs, you should create an instance early in the execution +// of your program, and keep it around for the entire time you want to +// have crash handling active (typically, until shutdown). +// (NOTE): There should be only be one this kind of exception handler +// object per process. +// +// If you want to write minidumps without installing the exception handler, +// you can create an ExceptionHandler with install_handler set to false, +// then call WriteMinidump. You can also use this technique if you want to +// use different minidump callbacks for different call sites. +// +// In either case, a callback function is called when a minidump is written, +// which receives the full path or file descriptor of the minidump. The +// caller can collect and write additional application state to that minidump, +// and launch an external crash-reporting application. +// +// Caller should try to make the callbacks as crash-friendly as possible, +// it should avoid use heap memory allocation as much as possible. + +class ExceptionHandler { + public: + // A callback function to run before Breakpad performs any substantial + // processing of an exception. A FilterCallback is called before writing + // a minidump. |context| is the parameter supplied by the user as + // callback_context when the handler was created. + // + // If a FilterCallback returns true, Breakpad will continue processing, + // attempting to write a minidump. If a FilterCallback returns false, + // Breakpad will immediately report the exception as unhandled without + // writing a minidump, allowing another handler the opportunity to handle it. + typedef bool (*FilterCallback)(void *context); + + // A callback function to run after the minidump has been written. + // |descriptor| contains the file descriptor or file path containing the + // minidump. |context| is the parameter supplied by the user as + // callback_context when the handler was created. |succeeded| indicates + // whether a minidump file was successfully written. + // + // If an exception occurred and the callback returns true, Breakpad will + // treat the exception as fully-handled, suppressing any other handlers from + // being notified of the exception. If the callback returns false, Breakpad + // will treat the exception as unhandled, and allow another handler to handle + // it. If there are no other handlers, Breakpad will report the exception to + // the system as unhandled, allowing a debugger or native crash dialog the + // opportunity to handle the exception. Most callback implementations + // should normally return the value of |succeeded|, or when they wish to + // not report an exception of handled, false. Callbacks will rarely want to + // return true directly (unless |succeeded| is true). + typedef bool (*MinidumpCallback)(const MinidumpDescriptor& descriptor, + void* context, + bool succeeded); + + // In certain cases, a user may wish to handle the generation of the minidump + // themselves. In this case, they can install a handler callback which is + // called when a crash has occurred. If this function returns true, no other + // processing of occurs and the process will shortly be crashed. If this + // returns false, the normal processing continues. + typedef bool (*HandlerCallback)(const void* crash_context, + size_t crash_context_size, + void* context); + + // Creates a new ExceptionHandler instance to handle writing minidumps. + // Before writing a minidump, the optional |filter| callback will be called. + // Its return value determines whether or not Breakpad should write a + // minidump. The minidump content will be written to the file path or file + // descriptor from |descriptor|, and the optional |callback| is called after + // writing the dump file, as described above. + // If install_handler is true, then a minidump will be written whenever + // an unhandled exception occurs. If it is false, minidumps will only + // be written when WriteMinidump is called. + // If |server_fd| is valid, the minidump is generated out-of-process. If it + // is -1, in-process generation will always be used. + ExceptionHandler(const MinidumpDescriptor& descriptor, + FilterCallback filter, + MinidumpCallback callback, + void* callback_context, + bool install_handler, + const int server_fd); + ~ExceptionHandler(); + + const MinidumpDescriptor& minidump_descriptor() const { + return minidump_descriptor_; + } + + void set_minidump_descriptor(const MinidumpDescriptor& descriptor) { + minidump_descriptor_ = descriptor; + } + + void set_crash_handler(HandlerCallback callback) { + crash_handler_ = callback; + } + + void set_crash_generation_client(CrashGenerationClient* client) { + crash_generation_client_.reset(client); + } + + // Writes a minidump immediately. This can be used to capture the execution + // state independently of a crash. + // Returns true on success. + // If the ExceptionHandler has been created with a path, a new file is + // generated for each minidump. The file path can be retrieved in the + // MinidumpDescriptor passed to the MinidumpCallback or by accessing the + // MinidumpDescriptor directly from the ExceptionHandler (with + // minidump_descriptor()). + // If the ExceptionHandler has been created with a file descriptor, the file + // descriptor is repositioned to its beginning and the previous generated + // minidump is overwritten. + // Note that this method is not supposed to be called from a compromised + // context as it uses the heap. + bool WriteMinidump(); + + // Convenience form of WriteMinidump which does not require an + // ExceptionHandler instance. + static bool WriteMinidump(const string& dump_path, + MinidumpCallback callback, + void* callback_context); + + // Write a minidump of |child| immediately. This can be used to + // capture the execution state of |child| independently of a crash. + // Pass a meaningful |child_blamed_thread| to make that thread in + // the child process the one from which a crash signature is + // extracted. + // + // WARNING: the return of this function *must* happen before + // the code that will eventually reap |child| executes. + // Otherwise there's a pernicious race condition in which |child| + // exits, is reaped, another process created with its pid, then that + // new process dumped. + static bool WriteMinidumpForChild(pid_t child, + pid_t child_blamed_thread, + const string& dump_path, + MinidumpCallback callback, + void* callback_context); + + // This structure is passed to minidump_writer.h:WriteMinidump via an opaque + // blob. It shouldn't be needed in any user code. + struct CrashContext { + siginfo_t siginfo; + pid_t tid; // the crashing thread. + struct ucontext context; +#if !defined(__ARM_EABI__) && !defined(__mips__) + // #ifdef this out because FP state is not part of user ABI for Linux ARM. + // In case of MIPS Linux FP state is already part of struct + // ucontext so 'float_state' is not required. + fpstate_t float_state; +#endif + }; + + // Returns whether out-of-process dump generation is used or not. + bool IsOutOfProcess() const { + return crash_generation_client_.get() != NULL; + } + + // Add information about a memory mapping. This can be used if + // a custom library loader is used that maps things in a way + // that the linux dumper can't handle by reading the maps file. + void AddMappingInfo(const string& name, + const uint8_t identifier[sizeof(MDGUID)], + uintptr_t start_address, + size_t mapping_size, + size_t file_offset); + + // Register a block of memory of length bytes starting at address ptr + // to be copied to the minidump when a crash happens. + void RegisterAppMemory(void* ptr, size_t length); + + // Unregister a block of memory that was registered with RegisterAppMemory. + void UnregisterAppMemory(void* ptr); + + // Force signal handling for the specified signal. + bool SimulateSignalDelivery(int sig); + + // Report a crash signal from an SA_SIGINFO signal handler. + bool HandleSignal(int sig, siginfo_t* info, void* uc); + + private: + // Save the old signal handlers and install new ones. + static bool InstallHandlersLocked(); + // Restore the old signal handlers. + static void RestoreHandlersLocked(); + + void PreresolveSymbols(); + bool GenerateDump(CrashContext *context); + void SendContinueSignalToChild(); + void WaitForContinueSignal(); + + static void SignalHandler(int sig, siginfo_t* info, void* uc); + static int ThreadEntry(void* arg); + bool DoDump(pid_t crashing_process, const void* context, + size_t context_size); + + const FilterCallback filter_; + const MinidumpCallback callback_; + void* const callback_context_; + + scoped_ptr crash_generation_client_; + + MinidumpDescriptor minidump_descriptor_; + + // Must be volatile. The compiler is unaware of the code which runs in + // the signal handler which reads this variable. Without volatile the + // compiler is free to optimise away writes to this variable which it + // believes are never read. + volatile HandlerCallback crash_handler_; + + // We need to explicitly enable ptrace of parent processes on some + // kernels, but we need to know the PID of the cloned process before we + // can do this. We create a pipe which we can use to block the + // cloned process after creating it, until we have explicitly enabled + // ptrace. This is used to store the file descriptors for the pipe + int fdes[2]; + + // Callers can add extra info about mappings for cases where the + // dumper code cannot extract enough information from /proc//maps. + MappingList mapping_list_; + + // Callers can request additional memory regions to be included in + // the dump. + AppMemoryList app_memory_list_; +}; + +} // namespace google_breakpad + +#endif // CLIENT_LINUX_HANDLER_EXCEPTION_HANDLER_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/handler/exception_handler_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/client/linux/handler/exception_handler_unittest.cc new file mode 100644 index 0000000000..289c9cd127 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/handler/exception_handler_unittest.cc @@ -0,0 +1,1195 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#include +#include +#include +#include +#include +#include +#include +#include +#if defined(__mips__) +#include +#endif + +#include + +#include "breakpad_googletest_includes.h" +#include "client/linux/handler/exception_handler.h" +#include "client/linux/minidump_writer/minidump_writer.h" +#include "common/linux/eintr_wrapper.h" +#include "common/linux/file_id.h" +#include "common/linux/ignore_ret.h" +#include "common/linux/linux_libc_support.h" +#include "common/tests/auto_tempdir.h" +#include "common/using_std_string.h" +#include "third_party/lss/linux_syscall_support.h" +#include "google_breakpad/processor/minidump.h" + +using namespace google_breakpad; + +namespace { + +// Flush the instruction cache for a given memory range. +// Only required on ARM and mips. +void FlushInstructionCache(const char* memory, uint32_t memory_size) { +#if defined(__arm__) + long begin = reinterpret_cast(memory); + long end = begin + static_cast(memory_size); +# if defined(__ANDROID__) + // Provided by Android's + cacheflush(begin, end, 0); +# elif defined(__linux__) + // GLibc/ARM doesn't provide a wrapper for it, do a direct syscall. +# ifndef __ARM_NR_cacheflush +# define __ARM_NR_cacheflush 0xf0002 +# endif + syscall(__ARM_NR_cacheflush, begin, end, 0); +# else +# error "Your operating system is not supported yet" +# endif +#elif defined(__mips__) +# if defined(__ANDROID__) + // Provided by Android's + long begin = reinterpret_cast(memory); + long end = begin + static_cast(memory_size); +#if _MIPS_SIM == _ABIO32 + cacheflush(begin, end, 0); +#else + syscall(__NR_cacheflush, begin, end, ICACHE); +#endif +# elif defined(__linux__) + // See http://www.linux-mips.org/wiki/Cacheflush_Syscall. + cacheflush(const_cast(memory), memory_size, ICACHE); +# else +# error "Your operating system is not supported yet" +# endif +#endif +} + +// Length of a formatted GUID string = +// sizeof(MDGUID) * 2 + 4 (for dashes) + 1 (null terminator) +const int kGUIDStringSize = 37; + +void sigchld_handler(int signo) { } + +int CreateTMPFile(const string& dir, string* path) { + string file = dir + "/exception-handler-unittest.XXXXXX"; + const char* c_file = file.c_str(); + // Copy that string, mkstemp needs a C string it can modify. + char* c_path = strdup(c_file); + const int fd = mkstemp(c_path); + if (fd >= 0) + *path = c_path; + free(c_path); + return fd; +} + +class ExceptionHandlerTest : public ::testing::Test { + protected: + void SetUp() { + // We need to be able to wait for children, so SIGCHLD cannot be SIG_IGN. + struct sigaction sa; + memset(&sa, 0, sizeof(sa)); + sa.sa_handler = sigchld_handler; + ASSERT_NE(sigaction(SIGCHLD, &sa, &old_action), -1); + } + + void TearDown() { + sigaction(SIGCHLD, &old_action, NULL); + } + + struct sigaction old_action; +}; + + +void WaitForProcessToTerminate(pid_t process_id, int expected_status) { + int status; + ASSERT_NE(HANDLE_EINTR(waitpid(process_id, &status, 0)), -1); + ASSERT_TRUE(WIFSIGNALED(status)); + ASSERT_EQ(expected_status, WTERMSIG(status)); +} + +// Reads the minidump path sent over the pipe |fd| and sets it in |path|. +void ReadMinidumpPathFromPipe(int fd, string* path) { + struct pollfd pfd; + memset(&pfd, 0, sizeof(pfd)); + pfd.fd = fd; + pfd.events = POLLIN | POLLERR; + + const int r = HANDLE_EINTR(poll(&pfd, 1, 0)); + ASSERT_EQ(1, r); + ASSERT_TRUE(pfd.revents & POLLIN); + + int32_t len; + ASSERT_EQ(static_cast(sizeof(len)), read(fd, &len, sizeof(len))); + ASSERT_LT(len, 2048); + char* filename = static_cast(malloc(len + 1)); + ASSERT_EQ(len, read(fd, filename, len)); + filename[len] = 0; + close(fd); + *path = filename; + free(filename); +} + +} // namespace + +TEST(ExceptionHandlerTest, SimpleWithPath) { + AutoTempDir temp_dir; + ExceptionHandler handler( + MinidumpDescriptor(temp_dir.path()), NULL, NULL, NULL, true, -1); + EXPECT_EQ(temp_dir.path(), handler.minidump_descriptor().directory()); + string temp_subdir = temp_dir.path() + "/subdir"; + handler.set_minidump_descriptor(MinidumpDescriptor(temp_subdir)); + EXPECT_EQ(temp_subdir, handler.minidump_descriptor().directory()); +} + +TEST(ExceptionHandlerTest, SimpleWithFD) { + AutoTempDir temp_dir; + string path; + const int fd = CreateTMPFile(temp_dir.path(), &path); + ExceptionHandler handler(MinidumpDescriptor(fd), NULL, NULL, NULL, true, -1); + close(fd); +} + +static bool DoneCallback(const MinidumpDescriptor& descriptor, + void* context, + bool succeeded) { + if (!succeeded) + return false; + + if (!descriptor.IsFD()) { + int fd = reinterpret_cast(context); + uint32_t len = 0; + len = my_strlen(descriptor.path()); + IGNORE_RET(HANDLE_EINTR(sys_write(fd, &len, sizeof(len)))); + IGNORE_RET(HANDLE_EINTR(sys_write(fd, descriptor.path(), len))); + } + return true; +} + +#ifndef ADDRESS_SANITIZER + +// This is a replacement for "*reinterpret_cast(NULL) = 0;" +// It is needed because GCC is allowed to assume that the program will +// not execute any undefined behavior (UB) operation. Further, when GCC +// observes that UB statement is reached, it can assume that all statements +// leading to the UB one are never executed either, and can completely +// optimize them out. In the case of ExceptionHandlerTest::ExternalDumper, +// GCC-4.9 optimized out the entire set up of ExceptionHandler, causing +// test failure. +volatile int *p_null; // external linkage, so GCC can't tell that it + // remains NULL. Volatile just for a good measure. +static void DoNullPointerDereference() { + *p_null = 1; +} + +void ChildCrash(bool use_fd) { + AutoTempDir temp_dir; + int fds[2] = {0}; + int minidump_fd = -1; + string minidump_path; + if (use_fd) { + minidump_fd = CreateTMPFile(temp_dir.path(), &minidump_path); + } else { + ASSERT_NE(pipe(fds), -1); + } + + const pid_t child = fork(); + if (child == 0) { + { + google_breakpad::scoped_ptr handler; + if (use_fd) { + handler.reset(new ExceptionHandler(MinidumpDescriptor(minidump_fd), + NULL, NULL, NULL, true, -1)); + } else { + close(fds[0]); // Close the reading end. + void* fd_param = reinterpret_cast(fds[1]); + handler.reset(new ExceptionHandler(MinidumpDescriptor(temp_dir.path()), + NULL, DoneCallback, fd_param, + true, -1)); + } + // Crash with the exception handler in scope. + DoNullPointerDereference(); + } + } + if (!use_fd) + close(fds[1]); // Close the writting end. + + ASSERT_NO_FATAL_FAILURE(WaitForProcessToTerminate(child, SIGSEGV)); + + if (!use_fd) + ASSERT_NO_FATAL_FAILURE(ReadMinidumpPathFromPipe(fds[0], &minidump_path)); + + struct stat st; + ASSERT_EQ(0, stat(minidump_path.c_str(), &st)); + ASSERT_GT(st.st_size, 0); + unlink(minidump_path.c_str()); +} + +TEST(ExceptionHandlerTest, ChildCrashWithPath) { + ASSERT_NO_FATAL_FAILURE(ChildCrash(false)); +} + +TEST(ExceptionHandlerTest, ChildCrashWithFD) { + ASSERT_NO_FATAL_FAILURE(ChildCrash(true)); +} + +#endif // !ADDRESS_SANITIZER + +static bool DoneCallbackReturnFalse(const MinidumpDescriptor& descriptor, + void* context, + bool succeeded) { + return false; +} + +static bool DoneCallbackReturnTrue(const MinidumpDescriptor& descriptor, + void* context, + bool succeeded) { + return true; +} + +static bool DoneCallbackRaiseSIGKILL(const MinidumpDescriptor& descriptor, + void* context, + bool succeeded) { + raise(SIGKILL); + return true; +} + +static bool FilterCallbackReturnFalse(void* context) { + return false; +} + +static bool FilterCallbackReturnTrue(void* context) { + return true; +} + +// SIGKILL cannot be blocked and a handler cannot be installed for it. In the +// following tests, if the child dies with signal SIGKILL, then the signal was +// redelivered to this handler. If the child dies with SIGSEGV then it wasn't. +static void RaiseSIGKILL(int sig) { + raise(SIGKILL); +} + +static bool InstallRaiseSIGKILL() { + struct sigaction sa; + memset(&sa, 0, sizeof(sa)); + sa.sa_handler = RaiseSIGKILL; + return sigaction(SIGSEGV, &sa, NULL) != -1; +} + +#ifndef ADDRESS_SANITIZER + +static void CrashWithCallbacks(ExceptionHandler::FilterCallback filter, + ExceptionHandler::MinidumpCallback done, + string path) { + ExceptionHandler handler( + MinidumpDescriptor(path), filter, done, NULL, true, -1); + // Crash with the exception handler in scope. + DoNullPointerDereference(); +} + +TEST(ExceptionHandlerTest, RedeliveryOnFilterCallbackFalse) { + AutoTempDir temp_dir; + + const pid_t child = fork(); + if (child == 0) { + ASSERT_TRUE(InstallRaiseSIGKILL()); + CrashWithCallbacks(FilterCallbackReturnFalse, NULL, temp_dir.path()); + } + + ASSERT_NO_FATAL_FAILURE(WaitForProcessToTerminate(child, SIGKILL)); +} + +TEST(ExceptionHandlerTest, RedeliveryOnDoneCallbackFalse) { + AutoTempDir temp_dir; + + const pid_t child = fork(); + if (child == 0) { + ASSERT_TRUE(InstallRaiseSIGKILL()); + CrashWithCallbacks(NULL, DoneCallbackReturnFalse, temp_dir.path()); + } + + ASSERT_NO_FATAL_FAILURE(WaitForProcessToTerminate(child, SIGKILL)); +} + +TEST(ExceptionHandlerTest, NoRedeliveryOnDoneCallbackTrue) { + AutoTempDir temp_dir; + + const pid_t child = fork(); + if (child == 0) { + ASSERT_TRUE(InstallRaiseSIGKILL()); + CrashWithCallbacks(NULL, DoneCallbackReturnTrue, temp_dir.path()); + } + + ASSERT_NO_FATAL_FAILURE(WaitForProcessToTerminate(child, SIGSEGV)); +} + +TEST(ExceptionHandlerTest, NoRedeliveryOnFilterCallbackTrue) { + AutoTempDir temp_dir; + + const pid_t child = fork(); + if (child == 0) { + ASSERT_TRUE(InstallRaiseSIGKILL()); + CrashWithCallbacks(FilterCallbackReturnTrue, NULL, temp_dir.path()); + } + + ASSERT_NO_FATAL_FAILURE(WaitForProcessToTerminate(child, SIGSEGV)); +} + +TEST(ExceptionHandlerTest, RedeliveryToDefaultHandler) { + AutoTempDir temp_dir; + + const pid_t child = fork(); + if (child == 0) { + CrashWithCallbacks(FilterCallbackReturnFalse, NULL, temp_dir.path()); + } + + // As RaiseSIGKILL wasn't installed, the redelivery should just kill the child + // with SIGSEGV. + ASSERT_NO_FATAL_FAILURE(WaitForProcessToTerminate(child, SIGSEGV)); +} + +// Check that saving and restoring the signal handler with 'signal' +// instead of 'sigaction' doesn't make the Breakpad signal handler +// crash. See comments in ExceptionHandler::SignalHandler for full +// details. +TEST(ExceptionHandlerTest, RedeliveryOnBadSignalHandlerFlag) { + AutoTempDir temp_dir; + const pid_t child = fork(); + if (child == 0) { + // Install the RaiseSIGKILL handler for SIGSEGV. + ASSERT_TRUE(InstallRaiseSIGKILL()); + + // Create a new exception handler, this installs a new SIGSEGV + // handler, after saving the old one. + ExceptionHandler handler( + MinidumpDescriptor(temp_dir.path()), NULL, + DoneCallbackReturnFalse, NULL, true, -1); + + // Install the default SIGSEGV handler, saving the current one. + // Then re-install the current one with 'signal', this loses the + // SA_SIGINFO flag associated with the Breakpad handler. + sighandler_t old_handler = signal(SIGSEGV, SIG_DFL); + ASSERT_NE(reinterpret_cast(old_handler), + reinterpret_cast(SIG_ERR)); + ASSERT_NE(reinterpret_cast(signal(SIGSEGV, old_handler)), + reinterpret_cast(SIG_ERR)); + + // Crash with the exception handler in scope. + DoNullPointerDereference(); + } + // SIGKILL means Breakpad's signal handler didn't crash. + ASSERT_NO_FATAL_FAILURE(WaitForProcessToTerminate(child, SIGKILL)); +} + +TEST(ExceptionHandlerTest, StackedHandlersDeliveredToTop) { + AutoTempDir temp_dir; + + const pid_t child = fork(); + if (child == 0) { + ExceptionHandler bottom(MinidumpDescriptor(temp_dir.path()), + NULL, + NULL, + NULL, + true, + -1); + CrashWithCallbacks(NULL, DoneCallbackRaiseSIGKILL, temp_dir.path()); + } + ASSERT_NO_FATAL_FAILURE(WaitForProcessToTerminate(child, SIGKILL)); +} + +TEST(ExceptionHandlerTest, StackedHandlersNotDeliveredToBottom) { + AutoTempDir temp_dir; + + const pid_t child = fork(); + if (child == 0) { + ExceptionHandler bottom(MinidumpDescriptor(temp_dir.path()), + NULL, + DoneCallbackRaiseSIGKILL, + NULL, + true, + -1); + CrashWithCallbacks(NULL, NULL, temp_dir.path()); + } + ASSERT_NO_FATAL_FAILURE(WaitForProcessToTerminate(child, SIGSEGV)); +} + +TEST(ExceptionHandlerTest, StackedHandlersFilteredToBottom) { + AutoTempDir temp_dir; + + const pid_t child = fork(); + if (child == 0) { + ExceptionHandler bottom(MinidumpDescriptor(temp_dir.path()), + NULL, + DoneCallbackRaiseSIGKILL, + NULL, + true, + -1); + CrashWithCallbacks(FilterCallbackReturnFalse, NULL, temp_dir.path()); + } + ASSERT_NO_FATAL_FAILURE(WaitForProcessToTerminate(child, SIGKILL)); +} + +TEST(ExceptionHandlerTest, StackedHandlersUnhandledToBottom) { + AutoTempDir temp_dir; + + const pid_t child = fork(); + if (child == 0) { + ExceptionHandler bottom(MinidumpDescriptor(temp_dir.path()), + NULL, + DoneCallbackRaiseSIGKILL, + NULL, + true, + -1); + CrashWithCallbacks(NULL, DoneCallbackReturnFalse, temp_dir.path()); + } + ASSERT_NO_FATAL_FAILURE(WaitForProcessToTerminate(child, SIGKILL)); +} + +#endif // !ADDRESS_SANITIZER + +const unsigned char kIllegalInstruction[] = { +#if defined(__mips__) + // mfc2 zero,Impl - usually illegal in userspace. + 0x48, 0x00, 0x00, 0x48 +#else + // This crashes with SIGILL on x86/x86-64/arm. + 0xff, 0xff, 0xff, 0xff +#endif +}; + +// Test that memory around the instruction pointer is written +// to the dump as a MinidumpMemoryRegion. +TEST(ExceptionHandlerTest, InstructionPointerMemory) { + AutoTempDir temp_dir; + int fds[2]; + ASSERT_NE(pipe(fds), -1); + + // These are defined here so the parent can use them to check the + // data from the minidump afterwards. + const uint32_t kMemorySize = 256; // bytes + const int kOffset = kMemorySize / 2; + + const pid_t child = fork(); + if (child == 0) { + close(fds[0]); + ExceptionHandler handler(MinidumpDescriptor(temp_dir.path()), NULL, + DoneCallback, reinterpret_cast(fds[1]), + true, -1); + // Get some executable memory. + char* memory = + reinterpret_cast(mmap(NULL, + kMemorySize, + PROT_READ | PROT_WRITE | PROT_EXEC, + MAP_PRIVATE | MAP_ANON, + -1, + 0)); + if (!memory) + exit(0); + + // Write some instructions that will crash. Put them in the middle + // of the block of memory, because the minidump should contain 128 + // bytes on either side of the instruction pointer. + memcpy(memory + kOffset, kIllegalInstruction, sizeof(kIllegalInstruction)); + FlushInstructionCache(memory, kMemorySize); + + // Now execute the instructions, which should crash. + typedef void (*void_function)(void); + void_function memory_function = + reinterpret_cast(memory + kOffset); + memory_function(); + } + close(fds[1]); + + ASSERT_NO_FATAL_FAILURE(WaitForProcessToTerminate(child, SIGILL)); + + string minidump_path; + ASSERT_NO_FATAL_FAILURE(ReadMinidumpPathFromPipe(fds[0], &minidump_path)); + + struct stat st; + ASSERT_EQ(0, stat(minidump_path.c_str(), &st)); + ASSERT_GT(st.st_size, 0); + + // Read the minidump. Locate the exception record and the + // memory list, and then ensure that there is a memory region + // in the memory list that covers the instruction pointer from + // the exception record. + Minidump minidump(minidump_path); + ASSERT_TRUE(minidump.Read()); + + MinidumpException* exception = minidump.GetException(); + MinidumpMemoryList* memory_list = minidump.GetMemoryList(); + ASSERT_TRUE(exception); + ASSERT_TRUE(memory_list); + ASSERT_LT(0U, memory_list->region_count()); + + MinidumpContext* context = exception->GetContext(); + ASSERT_TRUE(context); + + uint64_t instruction_pointer; + ASSERT_TRUE(context->GetInstructionPointer(&instruction_pointer)); + + MinidumpMemoryRegion* region = + memory_list->GetMemoryRegionForAddress(instruction_pointer); + ASSERT_TRUE(region); + + EXPECT_EQ(kMemorySize, region->GetSize()); + const uint8_t* bytes = region->GetMemory(); + ASSERT_TRUE(bytes); + + uint8_t prefix_bytes[kOffset]; + uint8_t suffix_bytes[kMemorySize - kOffset - sizeof(kIllegalInstruction)]; + memset(prefix_bytes, 0, sizeof(prefix_bytes)); + memset(suffix_bytes, 0, sizeof(suffix_bytes)); + EXPECT_TRUE(memcmp(bytes, prefix_bytes, sizeof(prefix_bytes)) == 0); + EXPECT_TRUE(memcmp(bytes + kOffset, kIllegalInstruction, + sizeof(kIllegalInstruction)) == 0); + EXPECT_TRUE(memcmp(bytes + kOffset + sizeof(kIllegalInstruction), + suffix_bytes, sizeof(suffix_bytes)) == 0); + + unlink(minidump_path.c_str()); +} + +// Test that the memory region around the instruction pointer is +// bounded correctly on the low end. +TEST(ExceptionHandlerTest, InstructionPointerMemoryMinBound) { + AutoTempDir temp_dir; + int fds[2]; + ASSERT_NE(pipe(fds), -1); + + // These are defined here so the parent can use them to check the + // data from the minidump afterwards. + const uint32_t kMemorySize = 256; // bytes + const int kOffset = 0; + + const pid_t child = fork(); + if (child == 0) { + close(fds[0]); + ExceptionHandler handler(MinidumpDescriptor(temp_dir.path()), NULL, + DoneCallback, reinterpret_cast(fds[1]), + true, -1); + // Get some executable memory. + char* memory = + reinterpret_cast(mmap(NULL, + kMemorySize, + PROT_READ | PROT_WRITE | PROT_EXEC, + MAP_PRIVATE | MAP_ANON, + -1, + 0)); + if (!memory) + exit(0); + + // Write some instructions that will crash. Put them in the middle + // of the block of memory, because the minidump should contain 128 + // bytes on either side of the instruction pointer. + memcpy(memory + kOffset, kIllegalInstruction, sizeof(kIllegalInstruction)); + FlushInstructionCache(memory, kMemorySize); + + // Now execute the instructions, which should crash. + typedef void (*void_function)(void); + void_function memory_function = + reinterpret_cast(memory + kOffset); + memory_function(); + } + close(fds[1]); + + ASSERT_NO_FATAL_FAILURE(WaitForProcessToTerminate(child, SIGILL)); + + string minidump_path; + ASSERT_NO_FATAL_FAILURE(ReadMinidumpPathFromPipe(fds[0], &minidump_path)); + + struct stat st; + ASSERT_EQ(0, stat(minidump_path.c_str(), &st)); + ASSERT_GT(st.st_size, 0); + + // Read the minidump. Locate the exception record and the + // memory list, and then ensure that there is a memory region + // in the memory list that covers the instruction pointer from + // the exception record. + Minidump minidump(minidump_path); + ASSERT_TRUE(minidump.Read()); + + MinidumpException* exception = minidump.GetException(); + MinidumpMemoryList* memory_list = minidump.GetMemoryList(); + ASSERT_TRUE(exception); + ASSERT_TRUE(memory_list); + ASSERT_LT(0U, memory_list->region_count()); + + MinidumpContext* context = exception->GetContext(); + ASSERT_TRUE(context); + + uint64_t instruction_pointer; + ASSERT_TRUE(context->GetInstructionPointer(&instruction_pointer)); + + MinidumpMemoryRegion* region = + memory_list->GetMemoryRegionForAddress(instruction_pointer); + ASSERT_TRUE(region); + + EXPECT_EQ(kMemorySize / 2, region->GetSize()); + const uint8_t* bytes = region->GetMemory(); + ASSERT_TRUE(bytes); + + uint8_t suffix_bytes[kMemorySize / 2 - sizeof(kIllegalInstruction)]; + memset(suffix_bytes, 0, sizeof(suffix_bytes)); + EXPECT_TRUE(memcmp(bytes + kOffset, kIllegalInstruction, + sizeof(kIllegalInstruction)) == 0); + EXPECT_TRUE(memcmp(bytes + kOffset + sizeof(kIllegalInstruction), + suffix_bytes, sizeof(suffix_bytes)) == 0); + unlink(minidump_path.c_str()); +} + +// Test that the memory region around the instruction pointer is +// bounded correctly on the high end. +TEST(ExceptionHandlerTest, InstructionPointerMemoryMaxBound) { + AutoTempDir temp_dir; + int fds[2]; + ASSERT_NE(pipe(fds), -1); + + // These are defined here so the parent can use them to check the + // data from the minidump afterwards. + // Use 4k here because the OS will hand out a single page even + // if a smaller size is requested, and this test wants to + // test the upper bound of the memory range. + const uint32_t kMemorySize = 4096; // bytes + const int kOffset = kMemorySize - sizeof(kIllegalInstruction); + + const pid_t child = fork(); + if (child == 0) { + close(fds[0]); + ExceptionHandler handler(MinidumpDescriptor(temp_dir.path()), NULL, + DoneCallback, reinterpret_cast(fds[1]), + true, -1); + // Get some executable memory. + char* memory = + reinterpret_cast(mmap(NULL, + kMemorySize, + PROT_READ | PROT_WRITE | PROT_EXEC, + MAP_PRIVATE | MAP_ANON, + -1, + 0)); + if (!memory) + exit(0); + + // Write some instructions that will crash. Put them in the middle + // of the block of memory, because the minidump should contain 128 + // bytes on either side of the instruction pointer. + memcpy(memory + kOffset, kIllegalInstruction, sizeof(kIllegalInstruction)); + FlushInstructionCache(memory, kMemorySize); + + // Now execute the instructions, which should crash. + typedef void (*void_function)(void); + void_function memory_function = + reinterpret_cast(memory + kOffset); + memory_function(); + } + close(fds[1]); + + ASSERT_NO_FATAL_FAILURE(WaitForProcessToTerminate(child, SIGILL)); + + string minidump_path; + ASSERT_NO_FATAL_FAILURE(ReadMinidumpPathFromPipe(fds[0], &minidump_path)); + + struct stat st; + ASSERT_EQ(0, stat(minidump_path.c_str(), &st)); + ASSERT_GT(st.st_size, 0); + + // Read the minidump. Locate the exception record and the memory list, and + // then ensure that there is a memory region in the memory list that covers + // the instruction pointer from the exception record. + Minidump minidump(minidump_path); + ASSERT_TRUE(minidump.Read()); + + MinidumpException* exception = minidump.GetException(); + MinidumpMemoryList* memory_list = minidump.GetMemoryList(); + ASSERT_TRUE(exception); + ASSERT_TRUE(memory_list); + ASSERT_LT(0U, memory_list->region_count()); + + MinidumpContext* context = exception->GetContext(); + ASSERT_TRUE(context); + + uint64_t instruction_pointer; + ASSERT_TRUE(context->GetInstructionPointer(&instruction_pointer)); + + MinidumpMemoryRegion* region = + memory_list->GetMemoryRegionForAddress(instruction_pointer); + ASSERT_TRUE(region); + + const size_t kPrefixSize = 128; // bytes + EXPECT_EQ(kPrefixSize + sizeof(kIllegalInstruction), region->GetSize()); + const uint8_t* bytes = region->GetMemory(); + ASSERT_TRUE(bytes); + + uint8_t prefix_bytes[kPrefixSize]; + memset(prefix_bytes, 0, sizeof(prefix_bytes)); + EXPECT_TRUE(memcmp(bytes, prefix_bytes, sizeof(prefix_bytes)) == 0); + EXPECT_TRUE(memcmp(bytes + kPrefixSize, + kIllegalInstruction, sizeof(kIllegalInstruction)) == 0); + + unlink(minidump_path.c_str()); +} + +#ifndef ADDRESS_SANITIZER + +// Ensure that an extra memory block doesn't get added when the instruction +// pointer is not in mapped memory. +TEST(ExceptionHandlerTest, InstructionPointerMemoryNullPointer) { + AutoTempDir temp_dir; + int fds[2]; + ASSERT_NE(pipe(fds), -1); + + const pid_t child = fork(); + if (child == 0) { + close(fds[0]); + ExceptionHandler handler(MinidumpDescriptor(temp_dir.path()), NULL, + DoneCallback, reinterpret_cast(fds[1]), + true, -1); + // Try calling a NULL pointer. + typedef void (*void_function)(void); + void_function memory_function = reinterpret_cast(NULL); + memory_function(); + } + close(fds[1]); + + ASSERT_NO_FATAL_FAILURE(WaitForProcessToTerminate(child, SIGSEGV)); + + string minidump_path; + ASSERT_NO_FATAL_FAILURE(ReadMinidumpPathFromPipe(fds[0], &minidump_path)); + + struct stat st; + ASSERT_EQ(0, stat(minidump_path.c_str(), &st)); + ASSERT_GT(st.st_size, 0); + + // Read the minidump. Locate the exception record and the + // memory list, and then ensure that there is a memory region + // in the memory list that covers the instruction pointer from + // the exception record. + Minidump minidump(minidump_path); + ASSERT_TRUE(minidump.Read()); + + MinidumpException* exception = minidump.GetException(); + MinidumpMemoryList* memory_list = minidump.GetMemoryList(); + ASSERT_TRUE(exception); + ASSERT_TRUE(memory_list); + ASSERT_EQ(static_cast(1), memory_list->region_count()); + + unlink(minidump_path.c_str()); +} + +#endif // !ADDRESS_SANITIZER + +// Test that anonymous memory maps can be annotated with names and IDs. +TEST(ExceptionHandlerTest, ModuleInfo) { + // These are defined here so the parent can use them to check the + // data from the minidump afterwards. + const uint32_t kMemorySize = sysconf(_SC_PAGESIZE); + const char* kMemoryName = "a fake module"; + const uint8_t kModuleGUID[sizeof(MDGUID)] = { + 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, + 0x88, 0x99, 0xAA, 0xBB, 0xCC, 0xDD, 0xEE, 0xFF + }; + char module_identifier_buffer[kGUIDStringSize]; + FileID::ConvertIdentifierToString(kModuleGUID, + module_identifier_buffer, + sizeof(module_identifier_buffer)); + string module_identifier(module_identifier_buffer); + // Strip out dashes + size_t pos; + while ((pos = module_identifier.find('-')) != string::npos) { + module_identifier.erase(pos, 1); + } + // And append a zero, because module IDs include an "age" field + // which is always zero on Linux. + module_identifier += "0"; + + // Get some memory. + char* memory = + reinterpret_cast(mmap(NULL, + kMemorySize, + PROT_READ | PROT_WRITE, + MAP_PRIVATE | MAP_ANON, + -1, + 0)); + const uintptr_t kMemoryAddress = reinterpret_cast(memory); + ASSERT_TRUE(memory); + + AutoTempDir temp_dir; + ExceptionHandler handler( + MinidumpDescriptor(temp_dir.path()), NULL, NULL, NULL, true, -1); + + // Add info about the anonymous memory mapping. + handler.AddMappingInfo(kMemoryName, + kModuleGUID, + kMemoryAddress, + kMemorySize, + 0); + ASSERT_TRUE(handler.WriteMinidump()); + + const MinidumpDescriptor& minidump_desc = handler.minidump_descriptor(); + // Read the minidump. Load the module list, and ensure that the mmap'ed + // |memory| is listed with the given module name and debug ID. + Minidump minidump(minidump_desc.path()); + ASSERT_TRUE(minidump.Read()); + + MinidumpModuleList* module_list = minidump.GetModuleList(); + ASSERT_TRUE(module_list); + const MinidumpModule* module = + module_list->GetModuleForAddress(kMemoryAddress); + ASSERT_TRUE(module); + + EXPECT_EQ(kMemoryAddress, module->base_address()); + EXPECT_EQ(kMemorySize, module->size()); + EXPECT_EQ(kMemoryName, module->code_file()); + EXPECT_EQ(module_identifier, module->debug_identifier()); + + unlink(minidump_desc.path()); +} + +static const unsigned kControlMsgSize = + CMSG_SPACE(sizeof(int)) + CMSG_SPACE(sizeof(struct ucred)); + +static bool +CrashHandler(const void* crash_context, size_t crash_context_size, + void* context) { + const int fd = (intptr_t) context; + int fds[2]; + if (pipe(fds) == -1) { + // There doesn't seem to be any way to reliably handle + // this failure without the parent process hanging + // At least make sure that this process doesn't access + // unexpected file descriptors + fds[0] = -1; + fds[1] = -1; + } + struct kernel_msghdr msg = {0}; + struct kernel_iovec iov; + iov.iov_base = const_cast(crash_context); + iov.iov_len = crash_context_size; + msg.msg_iov = &iov; + msg.msg_iovlen = 1; + char cmsg[kControlMsgSize]; + memset(cmsg, 0, kControlMsgSize); + msg.msg_control = cmsg; + msg.msg_controllen = sizeof(cmsg); + + struct cmsghdr *hdr = CMSG_FIRSTHDR(&msg); + hdr->cmsg_level = SOL_SOCKET; + hdr->cmsg_type = SCM_RIGHTS; + hdr->cmsg_len = CMSG_LEN(sizeof(int)); + *((int*) CMSG_DATA(hdr)) = fds[1]; + hdr = CMSG_NXTHDR((struct msghdr*) &msg, hdr); + hdr->cmsg_level = SOL_SOCKET; + hdr->cmsg_type = SCM_CREDENTIALS; + hdr->cmsg_len = CMSG_LEN(sizeof(struct ucred)); + struct ucred *cred = reinterpret_cast(CMSG_DATA(hdr)); + cred->uid = getuid(); + cred->gid = getgid(); + cred->pid = getpid(); + + ssize_t ret = HANDLE_EINTR(sys_sendmsg(fd, &msg, 0)); + sys_close(fds[1]); + if (ret <= 0) + return false; + + char b; + IGNORE_RET(HANDLE_EINTR(sys_read(fds[0], &b, 1))); + + return true; +} + +#ifndef ADDRESS_SANITIZER + +TEST(ExceptionHandlerTest, ExternalDumper) { + int fds[2]; + ASSERT_NE(socketpair(AF_UNIX, SOCK_DGRAM, 0, fds), -1); + static const int on = 1; + setsockopt(fds[0], SOL_SOCKET, SO_PASSCRED, &on, sizeof(on)); + setsockopt(fds[1], SOL_SOCKET, SO_PASSCRED, &on, sizeof(on)); + + const pid_t child = fork(); + if (child == 0) { + close(fds[0]); + ExceptionHandler handler(MinidumpDescriptor("/tmp1"), NULL, NULL, + reinterpret_cast(fds[1]), true, -1); + handler.set_crash_handler(CrashHandler); + DoNullPointerDereference(); + } + close(fds[1]); + struct msghdr msg = {0}; + struct iovec iov; + static const unsigned kCrashContextSize = + sizeof(ExceptionHandler::CrashContext); + char context[kCrashContextSize]; + char control[kControlMsgSize]; + iov.iov_base = context; + iov.iov_len = kCrashContextSize; + msg.msg_iov = &iov; + msg.msg_iovlen = 1; + msg.msg_control = control; + msg.msg_controllen = kControlMsgSize; + + const ssize_t n = HANDLE_EINTR(recvmsg(fds[0], &msg, 0)); + ASSERT_EQ(static_cast(kCrashContextSize), n); + ASSERT_EQ(kControlMsgSize, msg.msg_controllen); + ASSERT_EQ(static_cast<__typeof__(msg.msg_flags)>(0), msg.msg_flags); + ASSERT_EQ(0, close(fds[0])); + + pid_t crashing_pid = -1; + int signal_fd = -1; + for (struct cmsghdr *hdr = CMSG_FIRSTHDR(&msg); hdr; + hdr = CMSG_NXTHDR(&msg, hdr)) { + if (hdr->cmsg_level != SOL_SOCKET) + continue; + if (hdr->cmsg_type == SCM_RIGHTS) { + const unsigned len = hdr->cmsg_len - + (((uint8_t*)CMSG_DATA(hdr)) - (uint8_t*)hdr); + ASSERT_EQ(sizeof(int), len); + signal_fd = *(reinterpret_cast(CMSG_DATA(hdr))); + } else if (hdr->cmsg_type == SCM_CREDENTIALS) { + const struct ucred *cred = + reinterpret_cast(CMSG_DATA(hdr)); + crashing_pid = cred->pid; + } + } + + ASSERT_NE(crashing_pid, -1); + ASSERT_NE(signal_fd, -1); + + AutoTempDir temp_dir; + string templ = temp_dir.path() + "/exception-handler-unittest"; + ASSERT_TRUE(WriteMinidump(templ.c_str(), crashing_pid, context, + kCrashContextSize)); + static const char b = 0; + ASSERT_EQ(1, (HANDLE_EINTR(write(signal_fd, &b, 1)))); + ASSERT_EQ(0, close(signal_fd)); + + ASSERT_NO_FATAL_FAILURE(WaitForProcessToTerminate(child, SIGSEGV)); + + struct stat st; + ASSERT_EQ(0, stat(templ.c_str(), &st)); + ASSERT_GT(st.st_size, 0); + unlink(templ.c_str()); +} + +#endif // !ADDRESS_SANITIZER + +TEST(ExceptionHandlerTest, WriteMinidumpExceptionStream) { + AutoTempDir temp_dir; + ExceptionHandler handler(MinidumpDescriptor(temp_dir.path()), NULL, NULL, + NULL, false, -1); + ASSERT_TRUE(handler.WriteMinidump()); + + string minidump_path = handler.minidump_descriptor().path(); + + // Read the minidump and check the exception stream. + Minidump minidump(minidump_path); + ASSERT_TRUE(minidump.Read()); + MinidumpException* exception = minidump.GetException(); + ASSERT_TRUE(exception); + const MDRawExceptionStream* raw = exception->exception(); + ASSERT_TRUE(raw); + EXPECT_EQ(MD_EXCEPTION_CODE_LIN_DUMP_REQUESTED, + raw->exception_record.exception_code); +} + +TEST(ExceptionHandlerTest, GenerateMultipleDumpsWithFD) { + AutoTempDir temp_dir; + string path; + const int fd = CreateTMPFile(temp_dir.path(), &path); + ExceptionHandler handler(MinidumpDescriptor(fd), NULL, NULL, NULL, false, -1); + ASSERT_TRUE(handler.WriteMinidump()); + // Check by the size of the data written to the FD that a minidump was + // generated. + off_t size = lseek(fd, 0, SEEK_CUR); + ASSERT_GT(size, 0); + + // Generate another minidump. + ASSERT_TRUE(handler.WriteMinidump()); + size = lseek(fd, 0, SEEK_CUR); + ASSERT_GT(size, 0); +} + +TEST(ExceptionHandlerTest, GenerateMultipleDumpsWithPath) { + AutoTempDir temp_dir; + ExceptionHandler handler(MinidumpDescriptor(temp_dir.path()), NULL, NULL, + NULL, false, -1); + ASSERT_TRUE(handler.WriteMinidump()); + + const MinidumpDescriptor& minidump_1 = handler.minidump_descriptor(); + struct stat st; + ASSERT_EQ(0, stat(minidump_1.path(), &st)); + ASSERT_GT(st.st_size, 0); + string minidump_1_path(minidump_1.path()); + // Check it is a valid minidump. + Minidump minidump1(minidump_1_path); + ASSERT_TRUE(minidump1.Read()); + unlink(minidump_1.path()); + + // Generate another minidump, it should go to a different file. + ASSERT_TRUE(handler.WriteMinidump()); + const MinidumpDescriptor& minidump_2 = handler.minidump_descriptor(); + ASSERT_EQ(0, stat(minidump_2.path(), &st)); + ASSERT_GT(st.st_size, 0); + string minidump_2_path(minidump_2.path()); + // Check it is a valid minidump. + Minidump minidump2(minidump_2_path); + ASSERT_TRUE(minidump2.Read()); + unlink(minidump_2.path()); + + // 2 distinct files should be produced. + ASSERT_STRNE(minidump_1_path.c_str(), minidump_2_path.c_str()); +} + +// Test that an additional memory region can be added to the minidump. +TEST(ExceptionHandlerTest, AdditionalMemory) { + const uint32_t kMemorySize = sysconf(_SC_PAGESIZE); + + // Get some heap memory. + uint8_t* memory = new uint8_t[kMemorySize]; + const uintptr_t kMemoryAddress = reinterpret_cast(memory); + ASSERT_TRUE(memory); + + // Stick some data into the memory so the contents can be verified. + for (uint32_t i = 0; i < kMemorySize; ++i) { + memory[i] = i % 255; + } + + AutoTempDir temp_dir; + ExceptionHandler handler( + MinidumpDescriptor(temp_dir.path()), NULL, NULL, NULL, true, -1); + + // Add the memory region to the list of memory to be included. + handler.RegisterAppMemory(memory, kMemorySize); + handler.WriteMinidump(); + + const MinidumpDescriptor& minidump_desc = handler.minidump_descriptor(); + + // Read the minidump. Ensure that the memory region is present + Minidump minidump(minidump_desc.path()); + ASSERT_TRUE(minidump.Read()); + + MinidumpMemoryList* dump_memory_list = minidump.GetMemoryList(); + ASSERT_TRUE(dump_memory_list); + const MinidumpMemoryRegion* region = + dump_memory_list->GetMemoryRegionForAddress(kMemoryAddress); + ASSERT_TRUE(region); + + EXPECT_EQ(kMemoryAddress, region->GetBase()); + EXPECT_EQ(kMemorySize, region->GetSize()); + + // Verify memory contents. + EXPECT_EQ(0, memcmp(region->GetMemory(), memory, kMemorySize)); + + delete[] memory; +} + +// Test that a memory region that was previously registered +// can be unregistered. +TEST(ExceptionHandlerTest, AdditionalMemoryRemove) { + const uint32_t kMemorySize = sysconf(_SC_PAGESIZE); + + // Get some heap memory. + uint8_t* memory = new uint8_t[kMemorySize]; + const uintptr_t kMemoryAddress = reinterpret_cast(memory); + ASSERT_TRUE(memory); + + AutoTempDir temp_dir; + ExceptionHandler handler( + MinidumpDescriptor(temp_dir.path()), NULL, NULL, NULL, true, -1); + + // Add the memory region to the list of memory to be included. + handler.RegisterAppMemory(memory, kMemorySize); + + // ...and then remove it + handler.UnregisterAppMemory(memory); + handler.WriteMinidump(); + + const MinidumpDescriptor& minidump_desc = handler.minidump_descriptor(); + + // Read the minidump. Ensure that the memory region is not present. + Minidump minidump(minidump_desc.path()); + ASSERT_TRUE(minidump.Read()); + + MinidumpMemoryList* dump_memory_list = minidump.GetMemoryList(); + ASSERT_TRUE(dump_memory_list); + const MinidumpMemoryRegion* region = + dump_memory_list->GetMemoryRegionForAddress(kMemoryAddress); + EXPECT_FALSE(region); + + delete[] memory; +} + +static bool SimpleCallback(const MinidumpDescriptor& descriptor, + void* context, + bool succeeded) { + string* filename = reinterpret_cast(context); + *filename = descriptor.path(); + return true; +} + +TEST(ExceptionHandlerTest, WriteMinidumpForChild) { + int fds[2]; + ASSERT_NE(-1, pipe(fds)); + + const pid_t child = fork(); + if (child == 0) { + close(fds[1]); + char b; + HANDLE_EINTR(read(fds[0], &b, sizeof(b))); + close(fds[0]); + syscall(__NR_exit); + } + close(fds[0]); + + AutoTempDir temp_dir; + string minidump_filename; + ASSERT_TRUE( + ExceptionHandler::WriteMinidumpForChild(child, child, + temp_dir.path(), SimpleCallback, + (void*)&minidump_filename)); + + Minidump minidump(minidump_filename); + ASSERT_TRUE(minidump.Read()); + // Check that the crashing thread is the main thread of |child| + MinidumpException* exception = minidump.GetException(); + ASSERT_TRUE(exception); + uint32_t thread_id; + ASSERT_TRUE(exception->GetThreadID(&thread_id)); + EXPECT_EQ(child, static_cast(thread_id)); + + const MDRawExceptionStream* raw = exception->exception(); + ASSERT_TRUE(raw); + EXPECT_EQ(MD_EXCEPTION_CODE_LIN_DUMP_REQUESTED, + raw->exception_record.exception_code); + + close(fds[1]); + unlink(minidump_filename.c_str()); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/handler/minidump_descriptor.cc b/TMessagesProj/jni/third_party/breakpad/src/client/linux/handler/minidump_descriptor.cc new file mode 100644 index 0000000000..c601d35f04 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/handler/minidump_descriptor.cc @@ -0,0 +1,100 @@ +// Copyright (c) 2012 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#include + +#include "client/linux/handler/minidump_descriptor.h" + +#include "common/linux/guid_creator.h" + +namespace google_breakpad { + +//static +const MinidumpDescriptor::MicrodumpOnConsole + MinidumpDescriptor::kMicrodumpOnConsole = {}; + +MinidumpDescriptor::MinidumpDescriptor(const MinidumpDescriptor& descriptor) + : mode_(descriptor.mode_), + fd_(descriptor.fd_), + directory_(descriptor.directory_), + c_path_(NULL), + size_limit_(descriptor.size_limit_), + microdump_build_fingerprint_(descriptor.microdump_build_fingerprint_), + microdump_product_info_(descriptor.microdump_product_info_) { + // The copy constructor is not allowed to be called on a MinidumpDescriptor + // with a valid path_, as getting its c_path_ would require the heap which + // can cause problems in compromised environments. + assert(descriptor.path_.empty()); +} + +MinidumpDescriptor& MinidumpDescriptor::operator=( + const MinidumpDescriptor& descriptor) { + assert(descriptor.path_.empty()); + + mode_ = descriptor.mode_; + fd_ = descriptor.fd_; + directory_ = descriptor.directory_; + path_.clear(); + if (c_path_) { + // This descriptor already had a path set, so generate a new one. + c_path_ = NULL; + UpdatePath(); + } + size_limit_ = descriptor.size_limit_; + microdump_build_fingerprint_ = descriptor.microdump_build_fingerprint_; + microdump_product_info_ = descriptor.microdump_product_info_; + return *this; +} + +void MinidumpDescriptor::UpdatePath() { + assert(mode_ == kWriteMinidumpToFile && !directory_.empty()); + + GUID guid; + char guid_str[kGUIDStringLength + 1]; + if (!CreateGUID(&guid) || !GUIDToString(&guid, guid_str, sizeof(guid_str))) { + assert(false); + } + + path_.clear(); + path_ = directory_ + "/" + guid_str + ".dmp"; + c_path_ = path_.c_str(); +} + +void MinidumpDescriptor::SetMicrodumpBuildFingerprint( + const char* build_fingerprint) { + assert(mode_ == kWriteMicrodumpToConsole); + microdump_build_fingerprint_ = build_fingerprint; +} + +void MinidumpDescriptor::SetMicrodumpProductInfo(const char* product_info) { + assert(mode_ == kWriteMicrodumpToConsole); + microdump_product_info_ = product_info; +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/handler/minidump_descriptor.h b/TMessagesProj/jni/third_party/breakpad/src/client/linux/handler/minidump_descriptor.h new file mode 100644 index 0000000000..3584c6922a --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/handler/minidump_descriptor.h @@ -0,0 +1,161 @@ +// Copyright (c) 2012 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef CLIENT_LINUX_HANDLER_MINIDUMP_DESCRIPTOR_H_ +#define CLIENT_LINUX_HANDLER_MINIDUMP_DESCRIPTOR_H_ + +#include +#include + +#include + +#include "common/using_std_string.h" + +// This class describes how a crash dump should be generated, either: +// - Writing a full minidump to a file in a given directory (the actual path, +// inside the directory, is determined by this class). +// - Writing a full minidump to a given fd. +// - Writing a reduced microdump to the console (logcat on Android). +namespace google_breakpad { + +class MinidumpDescriptor { + public: + struct MicrodumpOnConsole {}; + static const MicrodumpOnConsole kMicrodumpOnConsole; + + MinidumpDescriptor() : mode_(kUninitialized), + fd_(-1), + size_limit_(-1), + microdump_build_fingerprint_(NULL), + microdump_product_info_(NULL) {} + + explicit MinidumpDescriptor(const string& directory) + : mode_(kWriteMinidumpToFile), + fd_(-1), + directory_(directory), + c_path_(NULL), + size_limit_(-1), + microdump_build_fingerprint_(NULL), + microdump_product_info_(NULL) { + assert(!directory.empty()); + } + + explicit MinidumpDescriptor(int fd) + : mode_(kWriteMinidumpToFd), + fd_(fd), + c_path_(NULL), + size_limit_(-1), + microdump_build_fingerprint_(NULL), + microdump_product_info_(NULL) { + assert(fd != -1); + } + + explicit MinidumpDescriptor(const MicrodumpOnConsole&) + : mode_(kWriteMicrodumpToConsole), + fd_(-1), + size_limit_(-1), + microdump_build_fingerprint_(NULL), + microdump_product_info_(NULL) {} + + explicit MinidumpDescriptor(const MinidumpDescriptor& descriptor); + MinidumpDescriptor& operator=(const MinidumpDescriptor& descriptor); + + static MinidumpDescriptor getMicrodumpDescriptor(); + + bool IsFD() const { return mode_ == kWriteMinidumpToFd; } + + int fd() const { return fd_; } + + string directory() const { return directory_; } + + const char* path() const { return c_path_; } + + bool IsMicrodumpOnConsole() const { + return mode_ == kWriteMicrodumpToConsole; + } + + // Updates the path so it is unique. + // Should be called from a normal context: this methods uses the heap. + void UpdatePath(); + + off_t size_limit() const { return size_limit_; } + void set_size_limit(off_t limit) { size_limit_ = limit; } + + // TODO(primiano): make this and product info (below) just part of the + // microdump ctor once it is rolled stably into Chrome. ETA: June 2015. + void SetMicrodumpBuildFingerprint(const char* build_fingerprint); + const char* microdump_build_fingerprint() const { + return microdump_build_fingerprint_; + } + + void SetMicrodumpProductInfo(const char* product_info); + const char* microdump_product_info() const { + return microdump_product_info_; + } + + private: + enum DumpMode { + kUninitialized = 0, + kWriteMinidumpToFile, + kWriteMinidumpToFd, + kWriteMicrodumpToConsole + }; + + // Specifies the dump mode (see DumpMode). + DumpMode mode_; + + // The file descriptor where the minidump is generated. + int fd_; + + // The directory where the minidump should be generated. + string directory_; + + // The full path to the generated minidump. + string path_; + + // The C string of |path_|. Precomputed so it can be access from a compromised + // context. + const char* c_path_; + + off_t size_limit_; + + // The product name/version and build fingerprint that should be appended to + // the dump (microdump only). Microdumps don't have the ability of appending + // extra metadata after the dump is generated (as opposite to minidumps + // MIME fields), therefore the product details must be provided upfront. + // The string pointers are supposed to be valid through all the lifetime of + // the process (read: the caller has to guarantee that they are stored in + // global static storage). + const char* microdump_build_fingerprint_; + const char* microdump_product_info_; +}; + +} // namespace google_breakpad + +#endif // CLIENT_LINUX_HANDLER_MINIDUMP_DESCRIPTOR_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/log/log.cc b/TMessagesProj/jni/third_party/breakpad/src/client/linux/log/log.cc new file mode 100644 index 0000000000..1863591390 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/log/log.cc @@ -0,0 +1,48 @@ +// Copyright (c) 2012 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#include "client/linux/log/log.h" + +#if defined(__ANDROID__) +#include +#else +#include "third_party/lss/linux_syscall_support.h" +#endif + +namespace logger { + +int write(const char* buf, size_t nbytes) { +#if defined(__ANDROID__) + return __android_log_write(ANDROID_LOG_WARN, "google-breakpad", buf); +#else + return sys_write(2, buf, nbytes); +#endif +} + +} // namespace logger diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/log/log.h b/TMessagesProj/jni/third_party/breakpad/src/client/linux/log/log.h new file mode 100644 index 0000000000..a50e30dcd4 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/log/log.h @@ -0,0 +1,41 @@ +// Copyright (c) 2012, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef CLIENT_LINUX_LOG_LOG_H_ +#define CLIENT_LINUX_LOG_LOG_H_ + +#include + +namespace logger { + +int write(const char* buf, size_t nbytes); + +} // namespace logger + +#endif // CLIENT_LINUX_LOG_LOG_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/microdump_writer/microdump_writer.cc b/TMessagesProj/jni/third_party/breakpad/src/client/linux/microdump_writer/microdump_writer.cc new file mode 100644 index 0000000000..f45925fecb --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/microdump_writer/microdump_writer.cc @@ -0,0 +1,423 @@ +// Copyright (c) 2014, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// This translation unit generates microdumps into the console (logcat on +// Android). See crbug.com/410294 for more info and design docs. + +#include "client/linux/microdump_writer/microdump_writer.h" + +#include + +#include "client/linux/dump_writer_common/seccomp_unwinder.h" +#include "client/linux/dump_writer_common/thread_info.h" +#include "client/linux/dump_writer_common/ucontext_reader.h" +#include "client/linux/handler/exception_handler.h" +#include "client/linux/log/log.h" +#include "client/linux/minidump_writer/linux_ptrace_dumper.h" +#include "common/linux/linux_libc_support.h" + +namespace { + +using google_breakpad::ExceptionHandler; +using google_breakpad::LinuxDumper; +using google_breakpad::LinuxPtraceDumper; +using google_breakpad::MappingInfo; +using google_breakpad::MappingList; +using google_breakpad::RawContextCPU; +using google_breakpad::SeccompUnwinder; +using google_breakpad::ThreadInfo; +using google_breakpad::UContextReader; + +const size_t kLineBufferSize = 2048; + +class MicrodumpWriter { + public: + MicrodumpWriter(const ExceptionHandler::CrashContext* context, + const MappingList& mappings, + const char* build_fingerprint, + const char* product_info, + LinuxDumper* dumper) + : ucontext_(context ? &context->context : NULL), +#if !defined(__ARM_EABI__) && !defined(__mips__) + float_state_(context ? &context->float_state : NULL), +#endif + dumper_(dumper), + mapping_list_(mappings), + build_fingerprint_(build_fingerprint), + product_info_(product_info), + log_line_(NULL) { + log_line_ = reinterpret_cast(Alloc(kLineBufferSize)); + if (log_line_) + log_line_[0] = '\0'; // Clear out the log line buffer. + } + + ~MicrodumpWriter() { dumper_->ThreadsResume(); } + + bool Init() { + // In the exceptional case where the system was out of memory and there + // wasn't even room to allocate the line buffer, bail out. There is nothing + // useful we can possibly achieve without the ability to Log. At least let's + // try to not crash. + if (!dumper_->Init() || !log_line_) + return false; + return dumper_->ThreadsSuspend(); + } + + bool Dump() { + bool success; + LogLine("-----BEGIN BREAKPAD MICRODUMP-----"); + DumpProductInformation(); + DumpOSInformation(); + success = DumpCrashingThread(); + if (success) + success = DumpMappings(); + LogLine("-----END BREAKPAD MICRODUMP-----"); + dumper_->ThreadsResume(); + return success; + } + + private: + // Writes one line to the system log. + void LogLine(const char* msg) { + logger::write(msg, my_strlen(msg)); +#if !defined(__ANDROID__) + logger::write("\n", 1); // Android logger appends the \n. Linux's doesn't. +#endif + } + + // Stages the given string in the current line buffer. + void LogAppend(const char* str) { + my_strlcat(log_line_, str, kLineBufferSize); + } + + // As above (required to take precedence over template specialization below). + void LogAppend(char* str) { + LogAppend(const_cast(str)); + } + + // Stages the hex repr. of the given int type in the current line buffer. + template + void LogAppend(T value) { + // Make enough room to hex encode the largest int type + NUL. + static const char HEX[] = {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', + 'A', 'B', 'C', 'D', 'E', 'F'}; + char hexstr[sizeof(T) * 2 + 1]; + for (int i = sizeof(T) * 2 - 1; i >= 0; --i, value >>= 4) + hexstr[i] = HEX[static_cast(value) & 0x0F]; + hexstr[sizeof(T) * 2] = '\0'; + LogAppend(hexstr); + } + + // Stages the buffer content hex-encoded in the current line buffer. + void LogAppend(const void* buf, size_t length) { + const uint8_t* ptr = reinterpret_cast(buf); + for (size_t i = 0; i < length; ++i, ++ptr) + LogAppend(*ptr); + } + + // Writes out the current line buffer on the system log. + void LogCommitLine() { + LogLine(log_line_); + my_strlcpy(log_line_, "", kLineBufferSize); + } + + void DumpProductInformation() { + LogAppend("V "); + if (product_info_) { + LogAppend(product_info_); + } else { + LogAppend("UNKNOWN:0.0.0.0"); + } + LogCommitLine(); + } + + void DumpOSInformation() { + const uint8_t n_cpus = static_cast(sysconf(_SC_NPROCESSORS_CONF)); + +#if defined(__ANDROID__) + const char kOSId[] = "A"; +#else + const char kOSId[] = "L"; +#endif + +// We cannot depend on uts.machine. On multiarch devices it always returns the +// primary arch, not the one that match the executable being run. +#if defined(__aarch64__) + const char kArch[] = "arm64"; +#elif defined(__ARMEL__) + const char kArch[] = "arm"; +#elif defined(__x86_64__) + const char kArch[] = "x86_64"; +#elif defined(__i386__) + const char kArch[] = "x86"; +#elif defined(__mips__) + const char kArch[] = "mips"; +#else +#error "This code has not been ported to your platform yet" +#endif + + LogAppend("O "); + LogAppend(kOSId); + LogAppend(" "); + LogAppend(kArch); + LogAppend(" "); + LogAppend(n_cpus); + LogAppend(" "); + // If the client has attached a build fingerprint to the MinidumpDescriptor + // use that one. Otherwise try to get some basic info from uname(). + if (build_fingerprint_) { + LogAppend(build_fingerprint_); + } else { + struct utsname uts; + if (uname(&uts) == 0) { + LogAppend(uts.machine); + LogAppend(" "); + LogAppend(uts.release); + LogAppend(" "); + LogAppend(uts.version); + } else { + LogAppend("no build fingerprint available"); + } + } + LogCommitLine(); + } + + bool DumpThreadStack(uint32_t thread_id, + uintptr_t stack_pointer, + int max_stack_len, + uint8_t** stack_copy) { + *stack_copy = NULL; + const void* stack; + size_t stack_len; + + if (!dumper_->GetStackInfo(&stack, &stack_len, stack_pointer)) { + // The stack pointer might not be available. In this case we don't hard + // fail, just produce a (almost useless) microdump w/o a stack section. + return true; + } + + LogAppend("S 0 "); + LogAppend(stack_pointer); + LogAppend(" "); + LogAppend(reinterpret_cast(stack)); + LogAppend(" "); + LogAppend(stack_len); + LogCommitLine(); + + if (max_stack_len >= 0 && + stack_len > static_cast(max_stack_len)) { + stack_len = max_stack_len; + } + + *stack_copy = reinterpret_cast(Alloc(stack_len)); + dumper_->CopyFromProcess(*stack_copy, thread_id, stack, stack_len); + + // Dump the content of the stack, splicing it into chunks which size is + // compatible with the max logcat line size (see LOGGER_ENTRY_MAX_PAYLOAD). + const size_t STACK_DUMP_CHUNK_SIZE = 384; + for (size_t stack_off = 0; stack_off < stack_len; + stack_off += STACK_DUMP_CHUNK_SIZE) { + LogAppend("S "); + LogAppend(reinterpret_cast(stack) + stack_off); + LogAppend(" "); + LogAppend(*stack_copy + stack_off, + std::min(STACK_DUMP_CHUNK_SIZE, stack_len - stack_off)); + LogCommitLine(); + } + return true; + } + + // Write information about the crashing thread. + bool DumpCrashingThread() { + const unsigned num_threads = dumper_->threads().size(); + + for (unsigned i = 0; i < num_threads; ++i) { + MDRawThread thread; + my_memset(&thread, 0, sizeof(thread)); + thread.thread_id = dumper_->threads()[i]; + + // Dump only the crashing thread. + if (static_cast(thread.thread_id) != dumper_->crash_thread()) + continue; + + assert(ucontext_); + assert(!dumper_->IsPostMortem()); + + uint8_t* stack_copy; + const uintptr_t stack_ptr = UContextReader::GetStackPointer(ucontext_); + if (!DumpThreadStack(thread.thread_id, stack_ptr, -1, &stack_copy)) + return false; + + RawContextCPU cpu; + my_memset(&cpu, 0, sizeof(RawContextCPU)); +#if !defined(__ARM_EABI__) && !defined(__mips__) + UContextReader::FillCPUContext(&cpu, ucontext_, float_state_); +#else + UContextReader::FillCPUContext(&cpu, ucontext_); +#endif + if (stack_copy) + SeccompUnwinder::PopSeccompStackFrame(&cpu, thread, stack_copy); + DumpCPUState(&cpu); + } + return true; + } + + void DumpCPUState(RawContextCPU* cpu) { + LogAppend("C "); + LogAppend(cpu, sizeof(*cpu)); + LogCommitLine(); + } + + // If there is caller-provided information about this mapping + // in the mapping_list_ list, return true. Otherwise, return false. + bool HaveMappingInfo(const MappingInfo& mapping) { + for (MappingList::const_iterator iter = mapping_list_.begin(); + iter != mapping_list_.end(); + ++iter) { + // Ignore any mappings that are wholly contained within + // mappings in the mapping_info_ list. + if (mapping.start_addr >= iter->first.start_addr && + (mapping.start_addr + mapping.size) <= + (iter->first.start_addr + iter->first.size)) { + return true; + } + } + return false; + } + + // Dump information about the provided |mapping|. If |identifier| is non-NULL, + // use it instead of calculating a file ID from the mapping. + void DumpModule(const MappingInfo& mapping, + bool member, + unsigned int mapping_id, + const uint8_t* identifier) { + MDGUID module_identifier; + if (identifier) { + // GUID was provided by caller. + my_memcpy(&module_identifier, identifier, sizeof(MDGUID)); + } else { + dumper_->ElfFileIdentifierForMapping( + mapping, + member, + mapping_id, + reinterpret_cast(&module_identifier)); + } + + char file_name[NAME_MAX]; + char file_path[NAME_MAX]; + LinuxDumper::GetMappingEffectiveNameAndPath( + mapping, file_path, sizeof(file_path), file_name, sizeof(file_name)); + + LogAppend("M "); + LogAppend(static_cast(mapping.start_addr)); + LogAppend(" "); + LogAppend(mapping.offset); + LogAppend(" "); + LogAppend(mapping.size); + LogAppend(" "); + LogAppend(module_identifier.data1); + LogAppend(module_identifier.data2); + LogAppend(module_identifier.data3); + LogAppend(module_identifier.data4[0]); + LogAppend(module_identifier.data4[1]); + LogAppend(module_identifier.data4[2]); + LogAppend(module_identifier.data4[3]); + LogAppend(module_identifier.data4[4]); + LogAppend(module_identifier.data4[5]); + LogAppend(module_identifier.data4[6]); + LogAppend(module_identifier.data4[7]); + LogAppend("0 "); // Age is always 0 on Linux. + LogAppend(file_name); + LogCommitLine(); + } + + // Write information about the mappings in effect. + bool DumpMappings() { + // First write all the mappings from the dumper + for (unsigned i = 0; i < dumper_->mappings().size(); ++i) { + const MappingInfo& mapping = *dumper_->mappings()[i]; + if (mapping.name[0] == 0 || // only want modules with filenames. + !mapping.exec || // only want executable mappings. + mapping.size < 4096 || // too small to get a signature for. + HaveMappingInfo(mapping)) { + continue; + } + + DumpModule(mapping, true, i, NULL); + } + // Next write all the mappings provided by the caller + for (MappingList::const_iterator iter = mapping_list_.begin(); + iter != mapping_list_.end(); + ++iter) { + DumpModule(iter->first, false, 0, iter->second); + } + return true; + } + + void* Alloc(unsigned bytes) { return dumper_->allocator()->Alloc(bytes); } + + const struct ucontext* const ucontext_; +#if !defined(__ARM_EABI__) && !defined(__mips__) + const google_breakpad::fpstate_t* const float_state_; +#endif + LinuxDumper* dumper_; + const MappingList& mapping_list_; + const char* const build_fingerprint_; + const char* const product_info_; + char* log_line_; +}; +} // namespace + +namespace google_breakpad { + +bool WriteMicrodump(pid_t crashing_process, + const void* blob, + size_t blob_size, + const MappingList& mappings, + const char* build_fingerprint, + const char* product_info) { + LinuxPtraceDumper dumper(crashing_process); + const ExceptionHandler::CrashContext* context = NULL; + if (blob) { + if (blob_size != sizeof(ExceptionHandler::CrashContext)) + return false; + context = reinterpret_cast(blob); + dumper.set_crash_address( + reinterpret_cast(context->siginfo.si_addr)); + dumper.set_crash_signal(context->siginfo.si_signo); + dumper.set_crash_thread(context->tid); + } + MicrodumpWriter writer(context, mappings, build_fingerprint, product_info, + &dumper); + if (!writer.Init()) + return false; + return writer.Dump(); +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/microdump_writer/microdump_writer.h b/TMessagesProj/jni/third_party/breakpad/src/client/linux/microdump_writer/microdump_writer.h new file mode 100644 index 0000000000..e218558363 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/microdump_writer/microdump_writer.h @@ -0,0 +1,64 @@ +// Copyright (c) 2014, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef CLIENT_LINUX_MINIDUMP_WRITER_MICRODUMP_WRITER_H_ +#define CLIENT_LINUX_MINIDUMP_WRITER_MICRODUMP_WRITER_H_ + +#include +#include + +#include "client/linux/dump_writer_common/mapping_info.h" + +namespace google_breakpad { + +// Writes a microdump (a reduced dump containing only the state of the crashing +// thread) on the console (logcat on Android). These functions do not malloc nor +// use libc functions which may. Thus, it can be used in contexts where the +// state of the heap may be corrupt. +// Args: +// crashing_process: the pid of the crashing process. This must be trusted. +// blob: a blob of data from the crashing process. See exception_handler.h +// blob_size: the length of |blob| in bytes. +// mappings: a list of additional mappings provided by the application. +// build_fingerprint: a (optional) C string which determines the OS +// build fingerprint (e.g., aosp/occam/mako:5.1.1/LMY47W/1234:eng/dev-keys). +// product_info: a (optional) C string which determines the product name and +// version (e.g., WebView:42.0.2311.136). +// +// Returns true iff successful. +bool WriteMicrodump(pid_t crashing_process, + const void* blob, + size_t blob_size, + const MappingList& mappings, + const char* build_fingerprint, + const char* product_info); + +} // namespace google_breakpad + +#endif // CLIENT_LINUX_MINIDUMP_WRITER_MICRODUMP_WRITER_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/microdump_writer/microdump_writer_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/client/linux/microdump_writer/microdump_writer_unittest.cc new file mode 100644 index 0000000000..1fa6f1ff0a --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/microdump_writer/microdump_writer_unittest.cc @@ -0,0 +1,164 @@ +// Copyright (c) 2014 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#include +#include +#include + +#include + +#include "breakpad_googletest_includes.h" +#include "client/linux/handler/exception_handler.h" +#include "client/linux/microdump_writer/microdump_writer.h" +#include "common/linux/eintr_wrapper.h" +#include "common/linux/ignore_ret.h" +#include "common/scoped_ptr.h" +#include "common/tests/auto_tempdir.h" +#include "common/using_std_string.h" + +using namespace google_breakpad; + +namespace { + +typedef testing::Test MicrodumpWriterTest; + +void CrashAndGetMicrodump( + const MappingList& mappings, + const char* build_fingerprint, + const char* product_info, + scoped_array* buf) { + int fds[2]; + ASSERT_NE(-1, pipe(fds)); + + AutoTempDir temp_dir; + string stderr_file = temp_dir.path() + "/stderr.log"; + int err_fd = open(stderr_file.c_str(), O_CREAT | O_RDWR, S_IRUSR | S_IWUSR); + ASSERT_NE(-1, err_fd); + + const pid_t child = fork(); + if (child == 0) { + close(fds[1]); + char b; + IGNORE_RET(HANDLE_EINTR(read(fds[0], &b, sizeof(b)))); + close(fds[0]); + syscall(__NR_exit); + } + close(fds[0]); + + ExceptionHandler::CrashContext context; + memset(&context, 0, sizeof(context)); + + // Set a non-zero tid to avoid tripping asserts. + context.tid = child; + + // Redirect temporarily stderr to the stderr.log file. + int save_err = dup(STDERR_FILENO); + ASSERT_NE(-1, save_err); + ASSERT_NE(-1, dup2(err_fd, STDERR_FILENO)); + + ASSERT_TRUE(WriteMicrodump(child, &context, sizeof(context), mappings, + build_fingerprint, product_info)); + + // Revert stderr back to the console. + dup2(save_err, STDERR_FILENO); + close(save_err); + + // Read back the stderr file and check for the microdump marker. + fsync(err_fd); + lseek(err_fd, 0, SEEK_SET); + const size_t kBufSize = 64 * 1024; + buf->reset(new char[kBufSize]); + ASSERT_GT(read(err_fd, buf->get(), kBufSize), 0); + + close(err_fd); + close(fds[1]); + + ASSERT_NE(static_cast(0), strstr( + buf->get(), "-----BEGIN BREAKPAD MICRODUMP-----")); + ASSERT_NE(static_cast(0), strstr( + buf->get(), "-----END BREAKPAD MICRODUMP-----")); + +} + +TEST(MicrodumpWriterTest, BasicWithMappings) { + // Push some extra mapping to check the MappingList logic. + const uint32_t memory_size = sysconf(_SC_PAGESIZE); + const char* kMemoryName = "libfoo.so"; + const uint8_t kModuleGUID[sizeof(MDGUID)] = { + 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, + 0x88, 0x99, 0xAA, 0xBB, 0xCC, 0xDD, 0xEE, 0xFF + }; + + MappingInfo info; + info.start_addr = memory_size; + info.size = memory_size; + info.offset = 42; + strcpy(info.name, kMemoryName); + + MappingList mappings; + MappingEntry mapping; + mapping.first = info; + memcpy(mapping.second, kModuleGUID, sizeof(MDGUID)); + mappings.push_back(mapping); + + scoped_array buf; + CrashAndGetMicrodump(mappings, NULL, NULL, &buf); + +#ifdef __LP64__ + ASSERT_NE(static_cast(0), strstr( + buf.get(), "M 0000000000001000 000000000000002A 0000000000001000 " + "33221100554477668899AABBCCDDEEFF0 libfoo.so")); +#else + ASSERT_NE(static_cast(0), strstr( + buf.get(), "M 00001000 0000002A 00001000 " + "33221100554477668899AABBCCDDEEFF0 libfoo.so")); +#endif + + // In absence of a product info in the minidump, the writer should just write + // an unknown marker. + ASSERT_NE(static_cast(0), strstr( + buf.get(), "V UNKNOWN:0.0.0.0")); +} + +// Ensure that the product info and build fingerprint metadata show up in the +// final microdump if present. +TEST(MicrodumpWriterTest, BuildFingerprintAndProductInfo) { + const char kProductInfo[] = "MockProduct:42.0.2311.99"; + const char kBuildFingerprint[] = + "aosp/occam/mako:5.1.1/LMY47W/12345678:userdegbug/dev-keys"; + scoped_array buf; + MappingList no_mappings; + + CrashAndGetMicrodump(no_mappings, kBuildFingerprint, kProductInfo, &buf); + + ASSERT_NE(static_cast(0), strstr(buf.get(), kBuildFingerprint)); + ASSERT_NE(static_cast(0), strstr(buf.get(), kProductInfo)); +} + +} // namespace diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/cpu_set.h b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/cpu_set.h new file mode 100644 index 0000000000..1cca9aa5a0 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/cpu_set.h @@ -0,0 +1,144 @@ +// Copyright (c) 2013, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef CLIENT_LINUX_MINIDUMP_WRITER_CPU_SET_H_ +#define CLIENT_LINUX_MINIDUMP_WRITER_CPU_SET_H_ + +#include +#include +#include + +#include "common/linux/linux_libc_support.h" +#include "third_party/lss/linux_syscall_support.h" + +namespace google_breakpad { + +// Helper class used to model a set of CPUs, as read from sysfs +// files like /sys/devices/system/cpu/present +// See See http://www.kernel.org/doc/Documentation/cputopology.txt +class CpuSet { +public: + // The maximum number of supported CPUs. + static const size_t kMaxCpus = 1024; + + CpuSet() { + my_memset(mask_, 0, sizeof(mask_)); + } + + // Parse a sysfs file to extract the corresponding CPU set. + bool ParseSysFile(int fd) { + char buffer[512]; + int ret = sys_read(fd, buffer, sizeof(buffer)-1); + if (ret < 0) + return false; + + buffer[ret] = '\0'; + + // Expected format: comma-separated list of items, where each + // item can be a decimal integer, or two decimal integers separated + // by a dash. + // E.g.: + // 0 + // 0,1,2,3 + // 0-3 + // 1,10-23 + const char* p = buffer; + const char* p_end = p + ret; + while (p < p_end) { + // Skip leading space, if any + while (p < p_end && my_isspace(*p)) + p++; + + // Find start and size of current item. + const char* item = p; + size_t item_len = static_cast(p_end - p); + const char* item_next = + static_cast(my_memchr(p, ',', item_len)); + if (item_next != NULL) { + p = item_next + 1; + item_len = static_cast(item_next - item); + } else { + p = p_end; + item_next = p_end; + } + + // Ignore trailing spaces. + while (item_next > item && my_isspace(item_next[-1])) + item_next--; + + // skip empty items. + if (item_next == item) + continue; + + // read first decimal value. + uintptr_t start = 0; + const char* next = my_read_decimal_ptr(&start, item); + uintptr_t end = start; + if (*next == '-') + my_read_decimal_ptr(&end, next+1); + + while (start <= end) + SetBit(start++); + } + return true; + } + + // Intersect this CPU set with another one. + void IntersectWith(const CpuSet& other) { + for (size_t nn = 0; nn < kMaskWordCount; ++nn) + mask_[nn] &= other.mask_[nn]; + } + + // Return the number of CPUs in this set. + int GetCount() { + int result = 0; + for (size_t nn = 0; nn < kMaskWordCount; ++nn) { + result += __builtin_popcount(mask_[nn]); + } + return result; + } + +private: + void SetBit(uintptr_t index) { + size_t nn = static_cast(index); + if (nn < kMaxCpus) + mask_[nn / kMaskWordBits] |= (1U << (nn % kMaskWordBits)); + } + + typedef uint32_t MaskWordType; + static const size_t kMaskWordBits = 8*sizeof(MaskWordType); + static const size_t kMaskWordCount = + (kMaxCpus + kMaskWordBits - 1) / kMaskWordBits; + + MaskWordType mask_[kMaskWordCount]; +}; + +} // namespace google_breakpad + +#endif // CLIENT_LINUX_MINIDUMP_WRITER_CPU_SET_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/cpu_set_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/cpu_set_unittest.cc new file mode 100644 index 0000000000..e2274bd17a --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/cpu_set_unittest.cc @@ -0,0 +1,164 @@ +// Copyright (c) 2013, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#include +#include +#include +#include +#include +#include + +#include "breakpad_googletest_includes.h" +#include "client/linux/minidump_writer/cpu_set.h" +#include "common/linux/tests/auto_testfile.h" + +using namespace google_breakpad; + +namespace { + +typedef testing::Test CpuSetTest; + +// Helper class to write test text file to a temporary file and return +// its file descriptor. +class ScopedTestFile : public AutoTestFile { +public: + explicit ScopedTestFile(const char* text) + : AutoTestFile("cpu_set", text) { + } +}; + +} + +TEST(CpuSetTest, EmptyCount) { + CpuSet set; + ASSERT_EQ(0, set.GetCount()); +} + +TEST(CpuSetTest, OneCpu) { + ScopedTestFile file("10"); + ASSERT_TRUE(file.IsOk()); + + CpuSet set; + ASSERT_TRUE(set.ParseSysFile(file.GetFd())); + ASSERT_EQ(1, set.GetCount()); +} + +TEST(CpuSetTest, OneCpuTerminated) { + ScopedTestFile file("10\n"); + ASSERT_TRUE(file.IsOk()); + + CpuSet set; + ASSERT_TRUE(set.ParseSysFile(file.GetFd())); + ASSERT_EQ(1, set.GetCount()); +} + +TEST(CpuSetTest, TwoCpusWithComma) { + ScopedTestFile file("1,10"); + ASSERT_TRUE(file.IsOk()); + + CpuSet set; + ASSERT_TRUE(set.ParseSysFile(file.GetFd())); + ASSERT_EQ(2, set.GetCount()); +} + +TEST(CpuSetTest, TwoCpusWithRange) { + ScopedTestFile file("1-2"); + ASSERT_TRUE(file.IsOk()); + + CpuSet set; + ASSERT_TRUE(set.ParseSysFile(file.GetFd())); + ASSERT_EQ(2, set.GetCount()); +} + +TEST(CpuSetTest, TenCpusWithRange) { + ScopedTestFile file("9-18"); + ASSERT_TRUE(file.IsOk()); + + CpuSet set; + ASSERT_TRUE(set.ParseSysFile(file.GetFd())); + ASSERT_EQ(10, set.GetCount()); +} + +TEST(CpuSetTest, MultiItems) { + ScopedTestFile file("0, 2-4, 128"); + ASSERT_TRUE(file.IsOk()); + + CpuSet set; + ASSERT_TRUE(set.ParseSysFile(file.GetFd())); + ASSERT_EQ(5, set.GetCount()); +} + +TEST(CpuSetTest, IntersectWith) { + ScopedTestFile file1("9-19"); + ASSERT_TRUE(file1.IsOk()); + CpuSet set1; + ASSERT_TRUE(set1.ParseSysFile(file1.GetFd())); + ASSERT_EQ(11, set1.GetCount()); + + ScopedTestFile file2("16-24"); + ASSERT_TRUE(file2.IsOk()); + CpuSet set2; + ASSERT_TRUE(set2.ParseSysFile(file2.GetFd())); + ASSERT_EQ(9, set2.GetCount()); + + set1.IntersectWith(set2); + ASSERT_EQ(4, set1.GetCount()); + ASSERT_EQ(9, set2.GetCount()); +} + +TEST(CpuSetTest, SelfIntersection) { + ScopedTestFile file1("9-19"); + ASSERT_TRUE(file1.IsOk()); + CpuSet set1; + ASSERT_TRUE(set1.ParseSysFile(file1.GetFd())); + ASSERT_EQ(11, set1.GetCount()); + + set1.IntersectWith(set1); + ASSERT_EQ(11, set1.GetCount()); +} + +TEST(CpuSetTest, EmptyIntersection) { + ScopedTestFile file1("0-19"); + ASSERT_TRUE(file1.IsOk()); + CpuSet set1; + ASSERT_TRUE(set1.ParseSysFile(file1.GetFd())); + ASSERT_EQ(20, set1.GetCount()); + + ScopedTestFile file2("20-39"); + ASSERT_TRUE(file2.IsOk()); + CpuSet set2; + ASSERT_TRUE(set2.ParseSysFile(file2.GetFd())); + ASSERT_EQ(20, set2.GetCount()); + + set1.IntersectWith(set2); + ASSERT_EQ(0, set1.GetCount()); + + ASSERT_EQ(20, set2.GetCount()); +} + diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/directory_reader.h b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/directory_reader.h new file mode 100644 index 0000000000..a4bde18031 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/directory_reader.h @@ -0,0 +1,106 @@ +// Copyright (c) 2009, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef CLIENT_LINUX_MINIDUMP_WRITER_DIRECTORY_READER_H_ +#define CLIENT_LINUX_MINIDUMP_WRITER_DIRECTORY_READER_H_ + +#include +#include +#include +#include +#include +#include + +#include "common/linux/linux_libc_support.h" +#include "third_party/lss/linux_syscall_support.h" + +namespace google_breakpad { + +// A class for enumerating a directory without using diropen/readdir or other +// functions which may allocate memory. +class DirectoryReader { + public: + DirectoryReader(int fd) + : fd_(fd), + buf_used_(0) { + } + + // Return the next entry from the directory + // name: (output) the NUL terminated entry name + // + // Returns true iff successful (false on EOF). + // + // After calling this, one must call |PopEntry| otherwise you'll get the same + // entry over and over. + bool GetNextEntry(const char** name) { + struct kernel_dirent* const dent = + reinterpret_cast(buf_); + + if (buf_used_ == 0) { + // need to read more entries. + const int n = sys_getdents(fd_, dent, sizeof(buf_)); + if (n < 0) { + return false; + } else if (n == 0) { + hit_eof_ = true; + } else { + buf_used_ += n; + } + } + + if (buf_used_ == 0 && hit_eof_) + return false; + + assert(buf_used_ > 0); + + *name = dent->d_name; + return true; + } + + void PopEntry() { + if (!buf_used_) + return; + + const struct kernel_dirent* const dent = + reinterpret_cast(buf_); + + buf_used_ -= dent->d_reclen; + my_memmove(buf_, buf_ + dent->d_reclen, buf_used_); + } + + private: + const int fd_; + bool hit_eof_; + unsigned buf_used_; + uint8_t buf_[sizeof(struct kernel_dirent) + NAME_MAX + 1]; +}; + +} // namespace google_breakpad + +#endif // CLIENT_LINUX_MINIDUMP_WRITER_DIRECTORY_READER_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/directory_reader_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/directory_reader_unittest.cc new file mode 100644 index 0000000000..326f9e36b4 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/directory_reader_unittest.cc @@ -0,0 +1,78 @@ +// Copyright (c) 2009, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#include +#include + +#include +#include +#include + +#include "client/linux/minidump_writer/directory_reader.h" +#include "common/using_std_string.h" +#include "breakpad_googletest_includes.h" + +using namespace google_breakpad; + +namespace { +typedef testing::Test DirectoryReaderTest; +} + +TEST(DirectoryReaderTest, CompareResults) { + std::set dent_set; + + DIR *const dir = opendir("/proc/self"); + ASSERT_TRUE(dir != NULL); + + struct dirent* dent; + while ((dent = readdir(dir))) + dent_set.insert(dent->d_name); + + closedir(dir); + + const int fd = open("/proc/self", O_DIRECTORY | O_RDONLY); + ASSERT_GE(fd, 0); + + DirectoryReader dir_reader(fd); + unsigned seen = 0; + + const char* name; + while (dir_reader.GetNextEntry(&name)) { + ASSERT_TRUE(dent_set.find(name) != dent_set.end()); + seen++; + dir_reader.PopEntry(); + } + + ASSERT_TRUE(dent_set.find("status") != dent_set.end()); + ASSERT_TRUE(dent_set.find("stat") != dent_set.end()); + ASSERT_TRUE(dent_set.find("cmdline") != dent_set.end()); + + ASSERT_EQ(dent_set.size(), seen); + close(fd); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/line_reader.h b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/line_reader.h new file mode 100644 index 0000000000..779cfeb603 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/line_reader.h @@ -0,0 +1,131 @@ +// Copyright (c) 2009, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef CLIENT_LINUX_MINIDUMP_WRITER_LINE_READER_H_ +#define CLIENT_LINUX_MINIDUMP_WRITER_LINE_READER_H_ + +#include +#include +#include + +#include "common/linux/linux_libc_support.h" +#include "third_party/lss/linux_syscall_support.h" + +namespace google_breakpad { + +// A class for reading a file, line by line, without using fopen/fgets or other +// functions which may allocate memory. +class LineReader { + public: + LineReader(int fd) + : fd_(fd), + hit_eof_(false), + buf_used_(0) { + } + + // The maximum length of a line. + static const size_t kMaxLineLen = 512; + + // Return the next line from the file. + // line: (output) a pointer to the start of the line. The line is NUL + // terminated. + // len: (output) the length of the line (not inc the NUL byte) + // + // Returns true iff successful (false on EOF). + // + // One must call |PopLine| after this function, otherwise you'll continue to + // get the same line over and over. + bool GetNextLine(const char **line, unsigned *len) { + for (;;) { + if (buf_used_ == 0 && hit_eof_) + return false; + + for (unsigned i = 0; i < buf_used_; ++i) { + if (buf_[i] == '\n' || buf_[i] == 0) { + buf_[i] = 0; + *len = i; + *line = buf_; + return true; + } + } + + if (buf_used_ == sizeof(buf_)) { + // we scanned the whole buffer and didn't find an end-of-line marker. + // This line is too long to process. + return false; + } + + // We didn't find any end-of-line terminators in the buffer. However, if + // this is the last line in the file it might not have one: + if (hit_eof_) { + assert(buf_used_); + // There's room for the NUL because of the buf_used_ == sizeof(buf_) + // check above. + buf_[buf_used_] = 0; + *len = buf_used_; + buf_used_ += 1; // since we appended the NUL. + *line = buf_; + return true; + } + + // Otherwise, we should pull in more data from the file + const ssize_t n = sys_read(fd_, buf_ + buf_used_, + sizeof(buf_) - buf_used_); + if (n < 0) { + return false; + } else if (n == 0) { + hit_eof_ = true; + } else { + buf_used_ += n; + } + + // At this point, we have either set the hit_eof_ flag, or we have more + // data to process... + } + } + + void PopLine(unsigned len) { + // len doesn't include the NUL byte at the end. + + assert(buf_used_ >= len + 1); + buf_used_ -= len + 1; + my_memmove(buf_, buf_ + len + 1, buf_used_); + } + + private: + const int fd_; + + bool hit_eof_; + unsigned buf_used_; + char buf_[kMaxLineLen]; +}; + +} // namespace google_breakpad + +#endif // CLIENT_LINUX_MINIDUMP_WRITER_LINE_READER_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/line_reader_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/line_reader_unittest.cc new file mode 100644 index 0000000000..29686f04aa --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/line_reader_unittest.cc @@ -0,0 +1,169 @@ +// Copyright (c) 2009, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#include +#include +#include + +#include "client/linux/minidump_writer/line_reader.h" +#include "breakpad_googletest_includes.h" +#include "common/linux/tests/auto_testfile.h" + +using namespace google_breakpad; + +namespace { + +typedef testing::Test LineReaderTest; + +class ScopedTestFile : public AutoTestFile { +public: + explicit ScopedTestFile(const char* text) + : AutoTestFile("line_reader", text) { + } + + ScopedTestFile(const char* text, size_t text_len) + : AutoTestFile("line_reader", text, text_len) { + } +}; + +} + +TEST(LineReaderTest, EmptyFile) { + ScopedTestFile file(""); + ASSERT_TRUE(file.IsOk()); + LineReader reader(file.GetFd()); + + const char *line; + unsigned len; + ASSERT_FALSE(reader.GetNextLine(&line, &len)); +} + +TEST(LineReaderTest, OneLineTerminated) { + ScopedTestFile file("a\n"); + ASSERT_TRUE(file.IsOk()); + LineReader reader(file.GetFd()); + + const char *line; + unsigned int len; + ASSERT_TRUE(reader.GetNextLine(&line, &len)); + ASSERT_EQ((unsigned int)1, len); + ASSERT_EQ('a', line[0]); + ASSERT_EQ('\0', line[1]); + reader.PopLine(len); + + ASSERT_FALSE(reader.GetNextLine(&line, &len)); +} + +TEST(LineReaderTest, OneLine) { + ScopedTestFile file("a"); + ASSERT_TRUE(file.IsOk()); + LineReader reader(file.GetFd()); + + const char *line; + unsigned len; + ASSERT_TRUE(reader.GetNextLine(&line, &len)); + ASSERT_EQ((unsigned)1, len); + ASSERT_EQ('a', line[0]); + ASSERT_EQ('\0', line[1]); + reader.PopLine(len); + + ASSERT_FALSE(reader.GetNextLine(&line, &len)); +} + +TEST(LineReaderTest, TwoLinesTerminated) { + ScopedTestFile file("a\nb\n"); + ASSERT_TRUE(file.IsOk()); + LineReader reader(file.GetFd()); + + const char *line; + unsigned len; + ASSERT_TRUE(reader.GetNextLine(&line, &len)); + ASSERT_EQ((unsigned)1, len); + ASSERT_EQ('a', line[0]); + ASSERT_EQ('\0', line[1]); + reader.PopLine(len); + + ASSERT_TRUE(reader.GetNextLine(&line, &len)); + ASSERT_EQ((unsigned)1, len); + ASSERT_EQ('b', line[0]); + ASSERT_EQ('\0', line[1]); + reader.PopLine(len); + + ASSERT_FALSE(reader.GetNextLine(&line, &len)); +} + +TEST(LineReaderTest, TwoLines) { + ScopedTestFile file("a\nb"); + ASSERT_TRUE(file.IsOk()); + LineReader reader(file.GetFd()); + + const char *line; + unsigned len; + ASSERT_TRUE(reader.GetNextLine(&line, &len)); + ASSERT_EQ((unsigned)1, len); + ASSERT_EQ('a', line[0]); + ASSERT_EQ('\0', line[1]); + reader.PopLine(len); + + ASSERT_TRUE(reader.GetNextLine(&line, &len)); + ASSERT_EQ((unsigned)1, len); + ASSERT_EQ('b', line[0]); + ASSERT_EQ('\0', line[1]); + reader.PopLine(len); + + ASSERT_FALSE(reader.GetNextLine(&line, &len)); +} + +TEST(LineReaderTest, MaxLength) { + char l[LineReader::kMaxLineLen-1]; + memset(l, 'a', sizeof(l)); + ScopedTestFile file(l, sizeof(l)); + ASSERT_TRUE(file.IsOk()); + LineReader reader(file.GetFd()); + + const char *line; + unsigned len; + ASSERT_TRUE(reader.GetNextLine(&line, &len)); + ASSERT_EQ(sizeof(l), len); + ASSERT_TRUE(memcmp(l, line, sizeof(l)) == 0); + ASSERT_EQ('\0', line[len]); +} + +TEST(LineReaderTest, TooLong) { + // Note: this writes kMaxLineLen 'a' chars in the test file. + char l[LineReader::kMaxLineLen]; + memset(l, 'a', sizeof(l)); + ScopedTestFile file(l, sizeof(l)); + ASSERT_TRUE(file.IsOk()); + LineReader reader(file.GetFd()); + + const char *line; + unsigned len; + ASSERT_FALSE(reader.GetNextLine(&line, &len)); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/linux_core_dumper.cc b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/linux_core_dumper.cc new file mode 100644 index 0000000000..d732824593 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/linux_core_dumper.cc @@ -0,0 +1,257 @@ +// Copyright (c) 2012, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// linux_core_dumper.cc: Implement google_breakpad::LinuxCoreDumper. +// See linux_core_dumper.h for details. + +#include "client/linux/minidump_writer/linux_core_dumper.h" + +#include +#include +#include +#include +#include +#include +#if defined(__mips__) && defined(__ANDROID__) +// To get register definitions. +#include +#endif + +#include "common/linux/linux_libc_support.h" + +namespace google_breakpad { + +LinuxCoreDumper::LinuxCoreDumper(pid_t pid, + const char* core_path, + const char* procfs_path) + : LinuxDumper(pid), + core_path_(core_path), + procfs_path_(procfs_path), + thread_infos_(&allocator_, 8) { + assert(core_path_); +} + +bool LinuxCoreDumper::BuildProcPath(char* path, pid_t pid, + const char* node) const { + if (!path || !node) + return false; + + size_t node_len = my_strlen(node); + if (node_len == 0) + return false; + + size_t procfs_path_len = my_strlen(procfs_path_); + size_t total_length = procfs_path_len + 1 + node_len; + if (total_length >= NAME_MAX) + return false; + + memcpy(path, procfs_path_, procfs_path_len); + path[procfs_path_len] = '/'; + memcpy(path + procfs_path_len + 1, node, node_len); + path[total_length] = '\0'; + return true; +} + +bool LinuxCoreDumper::CopyFromProcess(void* dest, pid_t child, + const void* src, size_t length) { + ElfCoreDump::Addr virtual_address = reinterpret_cast(src); + // TODO(benchan): Investigate whether the data to be copied could span + // across multiple segments in the core dump file. ElfCoreDump::CopyData + // and this method do not handle that case yet. + if (!core_.CopyData(dest, virtual_address, length)) { + // If the data segment is not found in the core dump, fill the result + // with marker characters. + memset(dest, 0xab, length); + return false; + } + return true; +} + +bool LinuxCoreDumper::GetThreadInfoByIndex(size_t index, ThreadInfo* info) { + if (index >= thread_infos_.size()) + return false; + + *info = thread_infos_[index]; + const uint8_t* stack_pointer; +#if defined(__i386) + memcpy(&stack_pointer, &info->regs.esp, sizeof(info->regs.esp)); +#elif defined(__x86_64) + memcpy(&stack_pointer, &info->regs.rsp, sizeof(info->regs.rsp)); +#elif defined(__ARM_EABI__) + memcpy(&stack_pointer, &info->regs.ARM_sp, sizeof(info->regs.ARM_sp)); +#elif defined(__aarch64__) + memcpy(&stack_pointer, &info->regs.sp, sizeof(info->regs.sp)); +#elif defined(__mips__) + stack_pointer = + reinterpret_cast(info->mcontext.gregs[MD_CONTEXT_MIPS_REG_SP]); +#else +#error "This code hasn't been ported to your platform yet." +#endif + info->stack_pointer = reinterpret_cast(stack_pointer); + return true; +} + +bool LinuxCoreDumper::IsPostMortem() const { + return true; +} + +bool LinuxCoreDumper::ThreadsSuspend() { + return true; +} + +bool LinuxCoreDumper::ThreadsResume() { + return true; +} + +bool LinuxCoreDumper::EnumerateThreads() { + if (!mapped_core_file_.Map(core_path_, 0)) { + fprintf(stderr, "Could not map core dump file into memory\n"); + return false; + } + + core_.SetContent(mapped_core_file_.content()); + if (!core_.IsValid()) { + fprintf(stderr, "Invalid core dump file\n"); + return false; + } + + ElfCoreDump::Note note = core_.GetFirstNote(); + if (!note.IsValid()) { + fprintf(stderr, "PT_NOTE section not found\n"); + return false; + } + + bool first_thread = true; + do { + ElfCoreDump::Word type = note.GetType(); + MemoryRange name = note.GetName(); + MemoryRange description = note.GetDescription(); + + if (type == 0 || name.IsEmpty() || description.IsEmpty()) { + fprintf(stderr, "Could not found a valid PT_NOTE.\n"); + return false; + } + + // Based on write_note_info() in linux/kernel/fs/binfmt_elf.c, notes are + // ordered as follows (NT_PRXFPREG and NT_386_TLS are i386 specific): + // Thread Name Type + // ------------------------------------------------------------------- + // 1st thread CORE NT_PRSTATUS + // process-wide CORE NT_PRPSINFO + // process-wide CORE NT_AUXV + // 1st thread CORE NT_FPREGSET + // 1st thread LINUX NT_PRXFPREG + // 1st thread LINUX NT_386_TLS + // + // 2nd thread CORE NT_PRSTATUS + // 2nd thread CORE NT_FPREGSET + // 2nd thread LINUX NT_PRXFPREG + // 2nd thread LINUX NT_386_TLS + // + // 3rd thread CORE NT_PRSTATUS + // 3rd thread CORE NT_FPREGSET + // 3rd thread LINUX NT_PRXFPREG + // 3rd thread LINUX NT_386_TLS + // + // The following code only works if notes are ordered as expected. + switch (type) { + case NT_PRSTATUS: { + if (description.length() != sizeof(elf_prstatus)) { + fprintf(stderr, "Found NT_PRSTATUS descriptor of unexpected size\n"); + return false; + } + + const elf_prstatus* status = + reinterpret_cast(description.data()); + pid_t pid = status->pr_pid; + ThreadInfo info; + memset(&info, 0, sizeof(ThreadInfo)); + info.tgid = status->pr_pgrp; + info.ppid = status->pr_ppid; +#if defined(__mips__) +#if defined(__ANDROID__) + for (int i = EF_R0; i <= EF_R31; i++) + info.mcontext.gregs[i - EF_R0] = status->pr_reg[i]; +#else // __ANDROID__ + for (int i = EF_REG0; i <= EF_REG31; i++) + info.mcontext.gregs[i - EF_REG0] = status->pr_reg[i]; +#endif // __ANDROID__ + info.mcontext.mdlo = status->pr_reg[EF_LO]; + info.mcontext.mdhi = status->pr_reg[EF_HI]; + info.mcontext.pc = status->pr_reg[EF_CP0_EPC]; +#else // __mips__ + memcpy(&info.regs, status->pr_reg, sizeof(info.regs)); +#endif // __mips__ + if (first_thread) { + crash_thread_ = pid; + crash_signal_ = status->pr_info.si_signo; + } + first_thread = false; + threads_.push_back(pid); + thread_infos_.push_back(info); + break; + } +#if defined(__i386) || defined(__x86_64) + case NT_FPREGSET: { + if (thread_infos_.empty()) + return false; + + ThreadInfo* info = &thread_infos_.back(); + if (description.length() != sizeof(info->fpregs)) { + fprintf(stderr, "Found NT_FPREGSET descriptor of unexpected size\n"); + return false; + } + + memcpy(&info->fpregs, description.data(), sizeof(info->fpregs)); + break; + } +#endif +#if defined(__i386) + case NT_PRXFPREG: { + if (thread_infos_.empty()) + return false; + + ThreadInfo* info = &thread_infos_.back(); + if (description.length() != sizeof(info->fpxregs)) { + fprintf(stderr, "Found NT_PRXFPREG descriptor of unexpected size\n"); + return false; + } + + memcpy(&info->fpxregs, description.data(), sizeof(info->fpxregs)); + break; + } +#endif + } + note = note.GetNextNote(); + } while (note.IsValid()); + + return true; +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/linux_core_dumper.h b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/linux_core_dumper.h new file mode 100644 index 0000000000..8537896eec --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/linux_core_dumper.h @@ -0,0 +1,123 @@ +// Copyright (c) 2012, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// linux_core_dumper.h: Define the google_breakpad::LinuxCoreDumper +// class, which is derived from google_breakpad::LinuxDumper to extract +// information from a crashed process via its core dump and proc files. + +#ifndef CLIENT_LINUX_MINIDUMP_WRITER_LINUX_CORE_DUMPER_H_ +#define CLIENT_LINUX_MINIDUMP_WRITER_LINUX_CORE_DUMPER_H_ + +#include "client/linux/minidump_writer/linux_dumper.h" +#include "common/linux/elf_core_dump.h" +#include "common/linux/memory_mapped_file.h" + +namespace google_breakpad { + +class LinuxCoreDumper : public LinuxDumper { + public: + // Constructs a dumper for extracting information of a given process + // with a process ID of |pid| via its core dump file at |core_path| and + // its proc files at |procfs_path|. If |procfs_path| is a copy of + // /proc/, it should contain the following files: + // auxv, cmdline, environ, exe, maps, status + LinuxCoreDumper(pid_t pid, const char* core_path, const char* procfs_path); + + // Implements LinuxDumper::BuildProcPath(). + // Builds a proc path for a certain pid for a node (/proc//). + // |path| is a character array of at least NAME_MAX bytes to return the + // result.|node| is the final node without any slashes. Return true on + // success. + // + // As this dumper performs a post-mortem dump and makes use of a copy + // of the proc files of the crashed process, this derived method does + // not actually make use of |pid| and always returns a subpath of + // |procfs_path_| regardless of whether |pid| corresponds to the main + // process or a thread of the process, i.e. assuming both the main process + // and its threads have the following proc files with the same content: + // auxv, cmdline, environ, exe, maps, status + virtual bool BuildProcPath(char* path, pid_t pid, const char* node) const; + + // Implements LinuxDumper::CopyFromProcess(). + // Copies content of |length| bytes from a given process |child|, + // starting from |src|, into |dest|. This method extracts the content + // the core dump and fills |dest| with a sequence of marker bytes + // if the expected data is not found in the core dump. Returns true if + // the expected data is found in the core dump. + virtual bool CopyFromProcess(void* dest, pid_t child, const void* src, + size_t length); + + // Implements LinuxDumper::GetThreadInfoByIndex(). + // Reads information about the |index|-th thread of |threads_|. + // Returns true on success. One must have called |ThreadsSuspend| first. + virtual bool GetThreadInfoByIndex(size_t index, ThreadInfo* info); + + // Implements LinuxDumper::IsPostMortem(). + // Always returns true to indicate that this dumper performs a + // post-mortem dump of a crashed process via a core dump file. + virtual bool IsPostMortem() const; + + // Implements LinuxDumper::ThreadsSuspend(). + // As the dumper performs a post-mortem dump via a core dump file, + // there is no threads to suspend. This method does nothing and + // always returns true. + virtual bool ThreadsSuspend(); + + // Implements LinuxDumper::ThreadsResume(). + // As the dumper performs a post-mortem dump via a core dump file, + // there is no threads to resume. This method does nothing and + // always returns true. + virtual bool ThreadsResume(); + + protected: + // Implements LinuxDumper::EnumerateThreads(). + // Enumerates all threads of the given process into |threads_|. + virtual bool EnumerateThreads(); + + private: + // Path of the core dump file. + const char* core_path_; + + // Path of the directory containing the proc files of the given process, + // which is usually a copy of /proc/. + const char* procfs_path_; + + // Memory-mapped core dump file at |core_path_|. + MemoryMappedFile mapped_core_file_; + + // Content of the core dump file. + ElfCoreDump core_; + + // Thread info found in the core dump file. + wasteful_vector thread_infos_; +}; + +} // namespace google_breakpad + +#endif // CLIENT_LINUX_HANDLER_LINUX_CORE_DUMPER_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/linux_core_dumper_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/linux_core_dumper_unittest.cc new file mode 100644 index 0000000000..8f6a423ee0 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/linux_core_dumper_unittest.cc @@ -0,0 +1,118 @@ +// Copyright (c) 2012, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// linux_core_dumper_unittest.cc: +// Unit tests for google_breakpad::LinuxCoreDumoer. + +#include + +#include "breakpad_googletest_includes.h" +#include "client/linux/minidump_writer/linux_core_dumper.h" +#include "common/linux/tests/crash_generator.h" +#include "common/using_std_string.h" + +using namespace google_breakpad; + +TEST(LinuxCoreDumperTest, BuildProcPath) { + const pid_t pid = getpid(); + const char procfs_path[] = "/procfs_copy"; + LinuxCoreDumper dumper(getpid(), "core_file", procfs_path); + + char maps_path[NAME_MAX] = ""; + char maps_path_expected[NAME_MAX]; + snprintf(maps_path_expected, sizeof(maps_path_expected), + "%s/maps", procfs_path); + EXPECT_TRUE(dumper.BuildProcPath(maps_path, pid, "maps")); + EXPECT_STREQ(maps_path_expected, maps_path); + + EXPECT_FALSE(dumper.BuildProcPath(NULL, pid, "maps")); + EXPECT_FALSE(dumper.BuildProcPath(maps_path, pid, "")); + EXPECT_FALSE(dumper.BuildProcPath(maps_path, pid, NULL)); + + char long_node[NAME_MAX]; + size_t long_node_len = NAME_MAX - strlen(procfs_path) - 1; + memset(long_node, 'a', long_node_len); + long_node[long_node_len] = '\0'; + EXPECT_FALSE(dumper.BuildProcPath(maps_path, pid, long_node)); +} + +TEST(LinuxCoreDumperTest, VerifyDumpWithMultipleThreads) { + CrashGenerator crash_generator; + if (!crash_generator.HasDefaultCorePattern()) { + fprintf(stderr, "LinuxCoreDumperTest.VerifyDumpWithMultipleThreads test " + "is skipped due to non-default core pattern\n"); + return; + } + + const unsigned kNumOfThreads = 3; + const unsigned kCrashThread = 1; + const int kCrashSignal = SIGABRT; + pid_t child_pid; + ASSERT_TRUE(crash_generator.CreateChildCrash(kNumOfThreads, kCrashThread, + kCrashSignal, &child_pid)); + + const string core_file = crash_generator.GetCoreFilePath(); + const string procfs_path = crash_generator.GetDirectoryOfProcFilesCopy(); + +#if defined(__ANDROID__) + struct stat st; + if (stat(core_file.c_str(), &st) != 0) { + fprintf(stderr, "LinuxCoreDumperTest.VerifyDumpWithMultipleThreads test is " + "skipped due to no core file being generated"); + return; + } +#endif + + LinuxCoreDumper dumper(child_pid, core_file.c_str(), procfs_path.c_str()); + + EXPECT_TRUE(dumper.Init()); + + EXPECT_TRUE(dumper.IsPostMortem()); + + // These are no-ops and should always return true. + EXPECT_TRUE(dumper.ThreadsSuspend()); + EXPECT_TRUE(dumper.ThreadsResume()); + + // LinuxCoreDumper cannot determine the crash address and thus it always + // sets the crash address to 0. + EXPECT_EQ(0U, dumper.crash_address()); + EXPECT_EQ(kCrashSignal, dumper.crash_signal()); + EXPECT_EQ(crash_generator.GetThreadId(kCrashThread), + dumper.crash_thread()); + + EXPECT_EQ(kNumOfThreads, dumper.threads().size()); + for (unsigned i = 0; i < kNumOfThreads; ++i) { + ThreadInfo info; + EXPECT_TRUE(dumper.GetThreadInfoByIndex(i, &info)); + const void* stack; + size_t stack_len; + EXPECT_TRUE(dumper.GetStackInfo(&stack, &stack_len, info.stack_pointer)); + EXPECT_EQ(getpid(), info.ppid); + } +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/linux_dumper.cc b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/linux_dumper.cc new file mode 100644 index 0000000000..ebb008d669 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/linux_dumper.cc @@ -0,0 +1,475 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// linux_dumper.cc: Implement google_breakpad::LinuxDumper. +// See linux_dumper.h for details. + +// This code deals with the mechanics of getting information about a crashed +// process. Since this code may run in a compromised address space, the same +// rules apply as detailed at the top of minidump_writer.h: no libc calls and +// use the alternative allocator. + +#include "client/linux/minidump_writer/linux_dumper.h" + +#include +#include +#include +#include +#include +#include + +#include "client/linux/minidump_writer/line_reader.h" +#include "common/linux/elfutils.h" +#include "common/linux/file_id.h" +#include "common/linux/linux_libc_support.h" +#include "common/linux/memory_mapped_file.h" +#include "common/linux/safe_readlink.h" +#include "third_party/lss/linux_syscall_support.h" + +static const char kMappedFileUnsafePrefix[] = "/dev/"; +static const char kDeletedSuffix[] = " (deleted)"; +static const char kReservedFlags[] = " ---p"; + +inline static bool IsMappedFileOpenUnsafe( + const google_breakpad::MappingInfo& mapping) { + // It is unsafe to attempt to open a mapped file that lives under /dev, + // because the semantics of the open may be driver-specific so we'd risk + // hanging the crash dumper. And a file in /dev/ almost certainly has no + // ELF file identifier anyways. + return my_strncmp(mapping.name, + kMappedFileUnsafePrefix, + sizeof(kMappedFileUnsafePrefix) - 1) == 0; +} + +namespace google_breakpad { + +// All interesting auvx entry types are below AT_SYSINFO_EHDR +#define AT_MAX AT_SYSINFO_EHDR + +LinuxDumper::LinuxDumper(pid_t pid) + : pid_(pid), + crash_address_(0), + crash_signal_(0), + crash_thread_(pid), + threads_(&allocator_, 8), + mappings_(&allocator_), + auxv_(&allocator_, AT_MAX + 1) { + // The passed-in size to the constructor (above) is only a hint. + // Must call .resize() to do actual initialization of the elements. + auxv_.resize(AT_MAX + 1); +} + +LinuxDumper::~LinuxDumper() { +} + +bool LinuxDumper::Init() { + return ReadAuxv() && EnumerateThreads() && EnumerateMappings(); +} + +bool +LinuxDumper::ElfFileIdentifierForMapping(const MappingInfo& mapping, + bool member, + unsigned int mapping_id, + uint8_t identifier[sizeof(MDGUID)]) { + assert(!member || mapping_id < mappings_.size()); + my_memset(identifier, 0, sizeof(MDGUID)); + if (IsMappedFileOpenUnsafe(mapping)) + return false; + + // Special-case linux-gate because it's not a real file. + if (my_strcmp(mapping.name, kLinuxGateLibraryName) == 0) { + void* linux_gate = NULL; + if (pid_ == sys_getpid()) { + linux_gate = reinterpret_cast(mapping.start_addr); + } else { + linux_gate = allocator_.Alloc(mapping.size); + CopyFromProcess(linux_gate, pid_, + reinterpret_cast(mapping.start_addr), + mapping.size); + } + return FileID::ElfFileIdentifierFromMappedFile(linux_gate, identifier); + } + + char filename[NAME_MAX]; + size_t filename_len = my_strlen(mapping.name); + if (filename_len >= NAME_MAX) { + assert(false); + return false; + } + my_memcpy(filename, mapping.name, filename_len); + filename[filename_len] = '\0'; + bool filename_modified = HandleDeletedFileInMapping(filename); + + MemoryMappedFile mapped_file(filename, mapping.offset); + if (!mapped_file.data() || mapped_file.size() < SELFMAG) + return false; + + bool success = + FileID::ElfFileIdentifierFromMappedFile(mapped_file.data(), identifier); + if (success && member && filename_modified) { + mappings_[mapping_id]->name[filename_len - + sizeof(kDeletedSuffix) + 1] = '\0'; + } + + return success; +} + +namespace { +bool ElfFileSoNameFromMappedFile( + const void* elf_base, char* soname, size_t soname_size) { + if (!IsValidElf(elf_base)) { + // Not ELF + return false; + } + + const void* segment_start; + size_t segment_size; + int elf_class; + if (!FindElfSection(elf_base, ".dynamic", SHT_DYNAMIC, + &segment_start, &segment_size, &elf_class)) { + // No dynamic section + return false; + } + + const void* dynstr_start; + size_t dynstr_size; + if (!FindElfSection(elf_base, ".dynstr", SHT_STRTAB, + &dynstr_start, &dynstr_size, &elf_class)) { + // No dynstr section + return false; + } + + const ElfW(Dyn)* dynamic = static_cast(segment_start); + size_t dcount = segment_size / sizeof(ElfW(Dyn)); + for (const ElfW(Dyn)* dyn = dynamic; dyn < dynamic + dcount; ++dyn) { + if (dyn->d_tag == DT_SONAME) { + const char* dynstr = static_cast(dynstr_start); + if (dyn->d_un.d_val >= dynstr_size) { + // Beyond the end of the dynstr section + return false; + } + const char* str = dynstr + dyn->d_un.d_val; + const size_t maxsize = dynstr_size - dyn->d_un.d_val; + my_strlcpy(soname, str, maxsize < soname_size ? maxsize : soname_size); + return true; + } + } + + // Did not find SONAME + return false; +} + +// Find the shared object name (SONAME) by examining the ELF information +// for |mapping|. If the SONAME is found copy it into the passed buffer +// |soname| and return true. The size of the buffer is |soname_size|. +// The SONAME will be truncated if it is too long to fit in the buffer. +bool ElfFileSoName( + const MappingInfo& mapping, char* soname, size_t soname_size) { + if (IsMappedFileOpenUnsafe(mapping)) { + // Not safe + return false; + } + + char filename[NAME_MAX]; + size_t filename_len = my_strlen(mapping.name); + if (filename_len >= NAME_MAX) { + assert(false); + // name too long + return false; + } + + my_memcpy(filename, mapping.name, filename_len); + filename[filename_len] = '\0'; + + MemoryMappedFile mapped_file(filename, mapping.offset); + if (!mapped_file.data() || mapped_file.size() < SELFMAG) { + // mmap failed + return false; + } + + return ElfFileSoNameFromMappedFile(mapped_file.data(), soname, soname_size); +} + +} // namespace + + +// static +void LinuxDumper::GetMappingEffectiveNameAndPath(const MappingInfo& mapping, + char* file_path, + size_t file_path_size, + char* file_name, + size_t file_name_size) { + my_strlcpy(file_path, mapping.name, file_path_size); + + // If an executable is mapped from a non-zero offset, this is likely because + // the executable was loaded directly from inside an archive file (e.g., an + // apk on Android). We try to find the name of the shared object (SONAME) by + // looking in the file for ELF sections. + bool mapped_from_archive = false; + if (mapping.exec && mapping.offset != 0) + mapped_from_archive = ElfFileSoName(mapping, file_name, file_name_size); + + if (mapped_from_archive) { + // Some tools (e.g., stackwalk) extract the basename from the pathname. In + // this case, we append the file_name to the mapped archive path as follows: + // file_name := libname.so + // file_path := /path/to/ARCHIVE.APK/libname.so + if (my_strlen(file_path) + 1 + my_strlen(file_name) < file_path_size) { + my_strlcat(file_path, "/", file_path_size); + my_strlcat(file_path, file_name, file_path_size); + } + } else { + // Common case: + // file_path := /path/to/libname.so + // file_name := libname.so + const char* basename = my_strrchr(file_path, '/'); + basename = basename == NULL ? file_path : (basename + 1); + my_strlcpy(file_name, basename, file_name_size); + } +} + +bool LinuxDumper::ReadAuxv() { + char auxv_path[NAME_MAX]; + if (!BuildProcPath(auxv_path, pid_, "auxv")) { + return false; + } + + int fd = sys_open(auxv_path, O_RDONLY, 0); + if (fd < 0) { + return false; + } + + elf_aux_entry one_aux_entry; + bool res = false; + while (sys_read(fd, + &one_aux_entry, + sizeof(elf_aux_entry)) == sizeof(elf_aux_entry) && + one_aux_entry.a_type != AT_NULL) { + if (one_aux_entry.a_type <= AT_MAX) { + auxv_[one_aux_entry.a_type] = one_aux_entry.a_un.a_val; + res = true; + } + } + sys_close(fd); + return res; +} + +bool LinuxDumper::EnumerateMappings() { + char maps_path[NAME_MAX]; + if (!BuildProcPath(maps_path, pid_, "maps")) + return false; + + // linux_gate_loc is the beginning of the kernel's mapping of + // linux-gate.so in the process. It doesn't actually show up in the + // maps list as a filename, but it can be found using the AT_SYSINFO_EHDR + // aux vector entry, which gives the information necessary to special + // case its entry when creating the list of mappings. + // See http://www.trilithium.com/johan/2005/08/linux-gate/ for more + // information. + const void* linux_gate_loc = + reinterpret_cast(auxv_[AT_SYSINFO_EHDR]); + // Although the initial executable is usually the first mapping, it's not + // guaranteed (see http://crosbug.com/25355); therefore, try to use the + // actual entry point to find the mapping. + const void* entry_point_loc = reinterpret_cast(auxv_[AT_ENTRY]); + + const int fd = sys_open(maps_path, O_RDONLY, 0); + if (fd < 0) + return false; + LineReader* const line_reader = new(allocator_) LineReader(fd); + + const char* line; + unsigned line_len; + while (line_reader->GetNextLine(&line, &line_len)) { + uintptr_t start_addr, end_addr, offset; + + const char* i1 = my_read_hex_ptr(&start_addr, line); + if (*i1 == '-') { + const char* i2 = my_read_hex_ptr(&end_addr, i1 + 1); + if (*i2 == ' ') { + bool exec = (*(i2 + 3) == 'x'); + const char* i3 = my_read_hex_ptr(&offset, i2 + 6 /* skip ' rwxp ' */); + if (*i3 == ' ') { + const char* name = NULL; + // Only copy name if the name is a valid path name, or if + // it's the VDSO image. + if (((name = my_strchr(line, '/')) == NULL) && + linux_gate_loc && + reinterpret_cast(start_addr) == linux_gate_loc) { + name = kLinuxGateLibraryName; + offset = 0; + } + // Merge adjacent mappings with the same name into one module, + // assuming they're a single library mapped by the dynamic linker + if (name && !mappings_.empty()) { + MappingInfo* module = mappings_.back(); + if ((start_addr == module->start_addr + module->size) && + (my_strlen(name) == my_strlen(module->name)) && + (my_strncmp(name, module->name, my_strlen(name)) == 0)) { + module->size = end_addr - module->start_addr; + line_reader->PopLine(line_len); + continue; + } + } + // Also merge mappings that result from address ranges that the + // linker reserved but which a loaded library did not use. These + // appear as an anonymous private mapping with no access flags set + // and which directly follow an executable mapping. + if (!name && !mappings_.empty()) { + MappingInfo* module = mappings_.back(); + if ((start_addr == module->start_addr + module->size) && + module->exec && + module->name[0] == '/' && + offset == 0 && my_strncmp(i2, + kReservedFlags, + sizeof(kReservedFlags) - 1) == 0) { + module->size = end_addr - module->start_addr; + line_reader->PopLine(line_len); + continue; + } + } + MappingInfo* const module = new(allocator_) MappingInfo; + my_memset(module, 0, sizeof(MappingInfo)); + module->start_addr = start_addr; + module->size = end_addr - start_addr; + module->offset = offset; + module->exec = exec; + if (name != NULL) { + const unsigned l = my_strlen(name); + if (l < sizeof(module->name)) + my_memcpy(module->name, name, l); + } + // If this is the entry-point mapping, and it's not already the + // first one, then we need to make it be first. This is because + // the minidump format assumes the first module is the one that + // corresponds to the main executable (as codified in + // processor/minidump.cc:MinidumpModuleList::GetMainModule()). + if (entry_point_loc && + (entry_point_loc >= + reinterpret_cast(module->start_addr)) && + (entry_point_loc < + reinterpret_cast(module->start_addr+module->size)) && + !mappings_.empty()) { + // push the module onto the front of the list. + mappings_.resize(mappings_.size() + 1); + for (size_t idx = mappings_.size() - 1; idx > 0; idx--) + mappings_[idx] = mappings_[idx - 1]; + mappings_[0] = module; + } else { + mappings_.push_back(module); + } + } + } + } + line_reader->PopLine(line_len); + } + + sys_close(fd); + + return !mappings_.empty(); +} + +// Get information about the stack, given the stack pointer. We don't try to +// walk the stack since we might not have all the information needed to do +// unwind. So we just grab, up to, 32k of stack. +bool LinuxDumper::GetStackInfo(const void** stack, size_t* stack_len, + uintptr_t int_stack_pointer) { + // Move the stack pointer to the bottom of the page that it's in. + const uintptr_t page_size = getpagesize(); + + uint8_t* const stack_pointer = + reinterpret_cast(int_stack_pointer & ~(page_size - 1)); + + // The number of bytes of stack which we try to capture. + static const ptrdiff_t kStackToCapture = 32 * 1024; + + const MappingInfo* mapping = FindMapping(stack_pointer); + if (!mapping) + return false; + const ptrdiff_t offset = stack_pointer - + reinterpret_cast(mapping->start_addr); + const ptrdiff_t distance_to_end = + static_cast(mapping->size) - offset; + *stack_len = distance_to_end > kStackToCapture ? + kStackToCapture : distance_to_end; + *stack = stack_pointer; + return true; +} + +// Find the mapping which the given memory address falls in. +const MappingInfo* LinuxDumper::FindMapping(const void* address) const { + const uintptr_t addr = (uintptr_t) address; + + for (size_t i = 0; i < mappings_.size(); ++i) { + const uintptr_t start = static_cast(mappings_[i]->start_addr); + if (addr >= start && addr - start < mappings_[i]->size) + return mappings_[i]; + } + + return NULL; +} + +bool LinuxDumper::HandleDeletedFileInMapping(char* path) const { + static const size_t kDeletedSuffixLen = sizeof(kDeletedSuffix) - 1; + + // Check for ' (deleted)' in |path|. + // |path| has to be at least as long as "/x (deleted)". + const size_t path_len = my_strlen(path); + if (path_len < kDeletedSuffixLen + 2) + return false; + if (my_strncmp(path + path_len - kDeletedSuffixLen, kDeletedSuffix, + kDeletedSuffixLen) != 0) { + return false; + } + + // Check |path| against the /proc/pid/exe 'symlink'. + char exe_link[NAME_MAX]; + char new_path[NAME_MAX]; + if (!BuildProcPath(exe_link, pid_, "exe")) + return false; + if (!SafeReadLink(exe_link, new_path)) + return false; + if (my_strcmp(path, new_path) != 0) + return false; + + // Check to see if someone actually named their executable 'foo (deleted)'. + struct kernel_stat exe_stat; + struct kernel_stat new_path_stat; + if (sys_stat(exe_link, &exe_stat) == 0 && + sys_stat(new_path, &new_path_stat) == 0 && + exe_stat.st_dev == new_path_stat.st_dev && + exe_stat.st_ino == new_path_stat.st_ino) { + return false; + } + + my_memcpy(path, exe_link, NAME_MAX); + return true; +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/linux_dumper.h b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/linux_dumper.h new file mode 100644 index 0000000000..87dfadb4fd --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/linux_dumper.h @@ -0,0 +1,189 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// linux_dumper.h: Define the google_breakpad::LinuxDumper class, which +// is a base class for extracting information of a crashed process. It +// was originally a complete implementation using the ptrace API, but +// has been refactored to allow derived implementations supporting both +// ptrace and core dump. A portion of the original implementation is now +// in google_breakpad::LinuxPtraceDumper (see linux_ptrace_dumper.h for +// details). + +#ifndef CLIENT_LINUX_MINIDUMP_WRITER_LINUX_DUMPER_H_ +#define CLIENT_LINUX_MINIDUMP_WRITER_LINUX_DUMPER_H_ + +#include +#include +#include +#include +#include + +#include "client/linux/dump_writer_common/mapping_info.h" +#include "client/linux/dump_writer_common/thread_info.h" +#include "common/memory.h" +#include "google_breakpad/common/minidump_format.h" + +namespace google_breakpad { + +// Typedef for our parsing of the auxv variables in /proc/pid/auxv. +#if defined(__i386) || defined(__ARM_EABI__) || \ + (defined(__mips__) && _MIPS_SIM == _ABIO32) +typedef Elf32_auxv_t elf_aux_entry; +#elif defined(__x86_64) || defined(__aarch64__) || \ + (defined(__mips__) && _MIPS_SIM != _ABIO32) +typedef Elf64_auxv_t elf_aux_entry; +#endif + +typedef __typeof__(((elf_aux_entry*) 0)->a_un.a_val) elf_aux_val_t; + +// When we find the VDSO mapping in the process's address space, this +// is the name we use for it when writing it to the minidump. +// This should always be less than NAME_MAX! +const char kLinuxGateLibraryName[] = "linux-gate.so"; + +class LinuxDumper { + public: + explicit LinuxDumper(pid_t pid); + + virtual ~LinuxDumper(); + + // Parse the data for |threads| and |mappings|. + virtual bool Init(); + + // Return true if the dumper performs a post-mortem dump. + virtual bool IsPostMortem() const = 0; + + // Suspend/resume all threads in the given process. + virtual bool ThreadsSuspend() = 0; + virtual bool ThreadsResume() = 0; + + // Read information about the |index|-th thread of |threads_|. + // Returns true on success. One must have called |ThreadsSuspend| first. + virtual bool GetThreadInfoByIndex(size_t index, ThreadInfo* info) = 0; + + // These are only valid after a call to |Init|. + const wasteful_vector &threads() { return threads_; } + const wasteful_vector &mappings() { return mappings_; } + const MappingInfo* FindMapping(const void* address) const; + const wasteful_vector& auxv() { return auxv_; } + + // Find a block of memory to take as the stack given the top of stack pointer. + // stack: (output) the lowest address in the memory area + // stack_len: (output) the length of the memory area + // stack_top: the current top of the stack + bool GetStackInfo(const void** stack, size_t* stack_len, uintptr_t stack_top); + + PageAllocator* allocator() { return &allocator_; } + + // Copy content of |length| bytes from a given process |child|, + // starting from |src|, into |dest|. Returns true on success. + virtual bool CopyFromProcess(void* dest, pid_t child, const void* src, + size_t length) = 0; + + // Builds a proc path for a certain pid for a node (/proc//). + // |path| is a character array of at least NAME_MAX bytes to return the + // result.|node| is the final node without any slashes. Returns true on + // success. + virtual bool BuildProcPath(char* path, pid_t pid, const char* node) const = 0; + + // Generate a File ID from the .text section of a mapped entry. + // If not a member, mapping_id is ignored. This method can also manipulate the + // |mapping|.name to truncate "(deleted)" from the file name if necessary. + bool ElfFileIdentifierForMapping(const MappingInfo& mapping, + bool member, + unsigned int mapping_id, + uint8_t identifier[sizeof(MDGUID)]); + + uintptr_t crash_address() const { return crash_address_; } + void set_crash_address(uintptr_t crash_address) { + crash_address_ = crash_address; + } + + int crash_signal() const { return crash_signal_; } + void set_crash_signal(int crash_signal) { crash_signal_ = crash_signal; } + + pid_t crash_thread() const { return crash_thread_; } + void set_crash_thread(pid_t crash_thread) { crash_thread_ = crash_thread; } + + // Extracts the effective path and file name of from |mapping|. In most cases + // the effective name/path are just the mapping's path and basename. In some + // other cases, however, a library can be mapped from an archive (e.g., when + // loading .so libs from an apk on Android) and this method is able to + // reconstruct the original file name. + static void GetMappingEffectiveNameAndPath(const MappingInfo& mapping, + char* file_path, + size_t file_path_size, + char* file_name, + size_t file_name_size); + + protected: + bool ReadAuxv(); + + virtual bool EnumerateMappings(); + + virtual bool EnumerateThreads() = 0; + + // For the case where a running program has been deleted, it'll show up in + // /proc/pid/maps as "/path/to/program (deleted)". If this is the case, then + // see if '/path/to/program (deleted)' matches /proc/pid/exe and return + // /proc/pid/exe in |path| so ELF identifier generation works correctly. This + // also checks to see if '/path/to/program (deleted)' exists, so it does not + // get fooled by a poorly named binary. + // For programs that don't end with ' (deleted)', this is a no-op. + // This assumes |path| is a buffer with length NAME_MAX. + // Returns true if |path| is modified. + bool HandleDeletedFileInMapping(char* path) const; + + // ID of the crashed process. + const pid_t pid_; + + // Virtual address at which the process crashed. + uintptr_t crash_address_; + + // Signal that terminated the crashed process. + int crash_signal_; + + // ID of the crashed thread. + pid_t crash_thread_; + + mutable PageAllocator allocator_; + + // IDs of all the threads. + wasteful_vector threads_; + + // Info from /proc//maps. + wasteful_vector mappings_; + + // Info from /proc//auxv + wasteful_vector auxv_; +}; + +} // namespace google_breakpad + +#endif // CLIENT_LINUX_HANDLER_LINUX_DUMPER_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/linux_dumper_unittest_helper.cc b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/linux_dumper_unittest_helper.cc new file mode 100644 index 0000000000..4ccb7201fa --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/linux_dumper_unittest_helper.cc @@ -0,0 +1,94 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Helper program for the linux_dumper class, which creates a bunch of +// threads. The first word of each thread's stack is set to the thread +// id. + +#include +#include +#include +#include +#include +#include + +#include "common/scoped_ptr.h" +#include "third_party/lss/linux_syscall_support.h" + +#if defined(__ARM_EABI__) +#define TID_PTR_REGISTER "r3" +#elif defined(__aarch64__) +#define TID_PTR_REGISTER "x3" +#elif defined(__i386) +#define TID_PTR_REGISTER "ecx" +#elif defined(__x86_64) +#define TID_PTR_REGISTER "rcx" +#elif defined(__mips__) +#define TID_PTR_REGISTER "$1" +#else +#error This test has not been ported to this platform. +#endif + +void *thread_function(void *data) { + int pipefd = *static_cast(data); + volatile pid_t thread_id = syscall(__NR_gettid); + // Signal parent that a thread has started. + uint8_t byte = 1; + if (write(pipefd, &byte, sizeof(byte)) != sizeof(byte)) { + perror("ERROR: parent notification failed"); + return NULL; + } + register volatile pid_t *thread_id_ptr asm(TID_PTR_REGISTER) = &thread_id; + while (true) + asm volatile ("" : : "r" (thread_id_ptr)); + return NULL; +} + +int main(int argc, char *argv[]) { + if (argc < 3) { + fprintf(stderr, + "usage: linux_dumper_unittest_helper <# of threads>\n"); + return 1; + } + int pipefd = atoi(argv[1]); + int num_threads = atoi(argv[2]); + if (num_threads < 1) { + fprintf(stderr, "ERROR: number of threads is 0"); + return 1; + } + google_breakpad::scoped_array threads(new pthread_t[num_threads]); + pthread_attr_t thread_attributes; + pthread_attr_init(&thread_attributes); + pthread_attr_setdetachstate(&thread_attributes, PTHREAD_CREATE_DETACHED); + for (int i = 1; i < num_threads; i++) { + pthread_create(&threads[i], &thread_attributes, &thread_function, &pipefd); + } + thread_function(&pipefd); + return 0; +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/linux_ptrace_dumper.cc b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/linux_ptrace_dumper.cc new file mode 100644 index 0000000000..5ea0050de5 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/linux_ptrace_dumper.cc @@ -0,0 +1,349 @@ +// Copyright (c) 2012, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// linux_ptrace_dumper.cc: Implement google_breakpad::LinuxPtraceDumper. +// See linux_ptrace_dumper.h for detals. +// This class was originally splitted from google_breakpad::LinuxDumper. + +// This code deals with the mechanics of getting information about a crashed +// process. Since this code may run in a compromised address space, the same +// rules apply as detailed at the top of minidump_writer.h: no libc calls and +// use the alternative allocator. + +#include "client/linux/minidump_writer/linux_ptrace_dumper.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#if defined(__i386) +#include +#endif + +#include "client/linux/minidump_writer/directory_reader.h" +#include "client/linux/minidump_writer/line_reader.h" +#include "common/linux/linux_libc_support.h" +#include "third_party/lss/linux_syscall_support.h" + +// Suspends a thread by attaching to it. +static bool SuspendThread(pid_t pid) { + // This may fail if the thread has just died or debugged. + errno = 0; + if (sys_ptrace(PTRACE_ATTACH, pid, NULL, NULL) != 0 && + errno != 0) { + return false; + } + while (sys_waitpid(pid, NULL, __WALL) < 0) { + if (errno != EINTR) { + sys_ptrace(PTRACE_DETACH, pid, NULL, NULL); + return false; + } + } +#if defined(__i386) || defined(__x86_64) + // On x86, the stack pointer is NULL or -1, when executing trusted code in + // the seccomp sandbox. Not only does this cause difficulties down the line + // when trying to dump the thread's stack, it also results in the minidumps + // containing information about the trusted threads. This information is + // generally completely meaningless and just pollutes the minidumps. + // We thus test the stack pointer and exclude any threads that are part of + // the seccomp sandbox's trusted code. + user_regs_struct regs; + if (sys_ptrace(PTRACE_GETREGS, pid, NULL, ®s) == -1 || +#if defined(__i386) + !regs.esp +#elif defined(__x86_64) + !regs.rsp +#endif + ) { + sys_ptrace(PTRACE_DETACH, pid, NULL, NULL); + return false; + } +#endif + return true; +} + +// Resumes a thread by detaching from it. +static bool ResumeThread(pid_t pid) { + return sys_ptrace(PTRACE_DETACH, pid, NULL, NULL) >= 0; +} + +namespace google_breakpad { + +LinuxPtraceDumper::LinuxPtraceDumper(pid_t pid) + : LinuxDumper(pid), + threads_suspended_(false) { +} + +bool LinuxPtraceDumper::BuildProcPath(char* path, pid_t pid, + const char* node) const { + if (!path || !node || pid <= 0) + return false; + + size_t node_len = my_strlen(node); + if (node_len == 0) + return false; + + const unsigned pid_len = my_uint_len(pid); + const size_t total_length = 6 + pid_len + 1 + node_len; + if (total_length >= NAME_MAX) + return false; + + my_memcpy(path, "/proc/", 6); + my_uitos(path + 6, pid, pid_len); + path[6 + pid_len] = '/'; + my_memcpy(path + 6 + pid_len + 1, node, node_len); + path[total_length] = '\0'; + return true; +} + +bool LinuxPtraceDumper::CopyFromProcess(void* dest, pid_t child, + const void* src, size_t length) { + unsigned long tmp = 55; + size_t done = 0; + static const size_t word_size = sizeof(tmp); + uint8_t* const local = (uint8_t*) dest; + uint8_t* const remote = (uint8_t*) src; + + while (done < length) { + const size_t l = (length - done > word_size) ? word_size : (length - done); + if (sys_ptrace(PTRACE_PEEKDATA, child, remote + done, &tmp) == -1) { + tmp = 0; + } + my_memcpy(local + done, &tmp, l); + done += l; + } + return true; +} + +// Read thread info from /proc/$pid/status. +// Fill out the |tgid|, |ppid| and |pid| members of |info|. If unavailable, +// these members are set to -1. Returns true iff all three members are +// available. +bool LinuxPtraceDumper::GetThreadInfoByIndex(size_t index, ThreadInfo* info) { + if (index >= threads_.size()) + return false; + + pid_t tid = threads_[index]; + + assert(info != NULL); + char status_path[NAME_MAX]; + if (!BuildProcPath(status_path, tid, "status")) + return false; + + const int fd = sys_open(status_path, O_RDONLY, 0); + if (fd < 0) + return false; + + LineReader* const line_reader = new(allocator_) LineReader(fd); + const char* line; + unsigned line_len; + + info->ppid = info->tgid = -1; + + while (line_reader->GetNextLine(&line, &line_len)) { + if (my_strncmp("Tgid:\t", line, 6) == 0) { + my_strtoui(&info->tgid, line + 6); + } else if (my_strncmp("PPid:\t", line, 6) == 0) { + my_strtoui(&info->ppid, line + 6); + } + + line_reader->PopLine(line_len); + } + sys_close(fd); + + if (info->ppid == -1 || info->tgid == -1) + return false; + +#ifdef PTRACE_GETREGSET + struct iovec io; + info->GetGeneralPurposeRegisters(&io.iov_base, &io.iov_len); + if (sys_ptrace(PTRACE_GETREGSET, tid, (void*)NT_PRSTATUS, (void*)&io) == -1) { + return false; + } + + info->GetFloatingPointRegisters(&io.iov_base, &io.iov_len); + if (sys_ptrace(PTRACE_GETREGSET, tid, (void*)NT_FPREGSET, (void*)&io) == -1) { + return false; + } +#else // PTRACE_GETREGSET + void* gp_addr; + info->GetGeneralPurposeRegisters(&gp_addr, NULL); + if (sys_ptrace(PTRACE_GETREGS, tid, NULL, gp_addr) == -1) { + return false; + } + + void* fp_addr; + info->GetFloatingPointRegisters(&fp_addr, NULL); + if (sys_ptrace(PTRACE_GETFPREGS, tid, NULL, fp_addr) == -1) { + return false; + } +#endif + +#if defined(__i386) +#if !defined(bit_FXSAVE) // e.g. Clang +#define bit_FXSAVE bit_FXSR +#endif + // Detect if the CPU supports the FXSAVE/FXRSTOR instructions + int eax, ebx, ecx, edx; + __cpuid(1, eax, ebx, ecx, edx); + if (edx & bit_FXSAVE) { + if (sys_ptrace(PTRACE_GETFPXREGS, tid, NULL, &info->fpxregs) == -1) { + return false; + } + } else { + memset(&info->fpxregs, 0, sizeof(info->fpxregs)); + } +#endif // defined(__i386) + +#if defined(__i386) || defined(__x86_64) + for (unsigned i = 0; i < ThreadInfo::kNumDebugRegisters; ++i) { + if (sys_ptrace( + PTRACE_PEEKUSER, tid, + reinterpret_cast (offsetof(struct user, + u_debugreg[0]) + i * + sizeof(debugreg_t)), + &info->dregs[i]) == -1) { + return false; + } + } +#endif + +#if defined(__mips__) + sys_ptrace(PTRACE_PEEKUSER, tid, + reinterpret_cast(DSP_BASE), &info->mcontext.hi1); + sys_ptrace(PTRACE_PEEKUSER, tid, + reinterpret_cast(DSP_BASE + 1), &info->mcontext.lo1); + sys_ptrace(PTRACE_PEEKUSER, tid, + reinterpret_cast(DSP_BASE + 2), &info->mcontext.hi2); + sys_ptrace(PTRACE_PEEKUSER, tid, + reinterpret_cast(DSP_BASE + 3), &info->mcontext.lo2); + sys_ptrace(PTRACE_PEEKUSER, tid, + reinterpret_cast(DSP_BASE + 4), &info->mcontext.hi3); + sys_ptrace(PTRACE_PEEKUSER, tid, + reinterpret_cast(DSP_BASE + 5), &info->mcontext.lo3); + sys_ptrace(PTRACE_PEEKUSER, tid, + reinterpret_cast(DSP_CONTROL), &info->mcontext.dsp); +#endif + + const uint8_t* stack_pointer; +#if defined(__i386) + my_memcpy(&stack_pointer, &info->regs.esp, sizeof(info->regs.esp)); +#elif defined(__x86_64) + my_memcpy(&stack_pointer, &info->regs.rsp, sizeof(info->regs.rsp)); +#elif defined(__ARM_EABI__) + my_memcpy(&stack_pointer, &info->regs.ARM_sp, sizeof(info->regs.ARM_sp)); +#elif defined(__aarch64__) + my_memcpy(&stack_pointer, &info->regs.sp, sizeof(info->regs.sp)); +#elif defined(__mips__) + stack_pointer = + reinterpret_cast(info->mcontext.gregs[MD_CONTEXT_MIPS_REG_SP]); +#else +#error "This code hasn't been ported to your platform yet." +#endif + info->stack_pointer = reinterpret_cast(stack_pointer); + + return true; +} + +bool LinuxPtraceDumper::IsPostMortem() const { + return false; +} + +bool LinuxPtraceDumper::ThreadsSuspend() { + if (threads_suspended_) + return true; + for (size_t i = 0; i < threads_.size(); ++i) { + if (!SuspendThread(threads_[i])) { + // If the thread either disappeared before we could attach to it, or if + // it was part of the seccomp sandbox's trusted code, it is OK to + // silently drop it from the minidump. + if (i < threads_.size() - 1) { + my_memmove(&threads_[i], &threads_[i + 1], + (threads_.size() - i - 1) * sizeof(threads_[i])); + } + threads_.resize(threads_.size() - 1); + --i; + } + } + threads_suspended_ = true; + return threads_.size() > 0; +} + +bool LinuxPtraceDumper::ThreadsResume() { + if (!threads_suspended_) + return false; + bool good = true; + for (size_t i = 0; i < threads_.size(); ++i) + good &= ResumeThread(threads_[i]); + threads_suspended_ = false; + return good; +} + +// Parse /proc/$pid/task to list all the threads of the process identified by +// pid. +bool LinuxPtraceDumper::EnumerateThreads() { + char task_path[NAME_MAX]; + if (!BuildProcPath(task_path, pid_, "task")) + return false; + + const int fd = sys_open(task_path, O_RDONLY | O_DIRECTORY, 0); + if (fd < 0) + return false; + DirectoryReader* dir_reader = new(allocator_) DirectoryReader(fd); + + // The directory may contain duplicate entries which we filter by assuming + // that they are consecutive. + int last_tid = -1; + const char* dent_name; + while (dir_reader->GetNextEntry(&dent_name)) { + if (my_strcmp(dent_name, ".") && + my_strcmp(dent_name, "..")) { + int tid = 0; + if (my_strtoui(&tid, dent_name) && + last_tid != tid) { + last_tid = tid; + threads_.push_back(tid); + } + } + dir_reader->PopEntry(); + } + + sys_close(fd); + return true; +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/linux_ptrace_dumper.h b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/linux_ptrace_dumper.h new file mode 100644 index 0000000000..2ce834b0fd --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/linux_ptrace_dumper.h @@ -0,0 +1,92 @@ +// Copyright (c) 2012, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// linux_ptrace_dumper.h: Define the google_breakpad::LinuxPtraceDumper +// class, which is derived from google_breakpad::LinuxDumper to extract +// information from a crashed process via ptrace. +// This class was originally splitted from google_breakpad::LinuxDumper. + +#ifndef CLIENT_LINUX_MINIDUMP_WRITER_LINUX_PTRACE_DUMPER_H_ +#define CLIENT_LINUX_MINIDUMP_WRITER_LINUX_PTRACE_DUMPER_H_ + +#include "client/linux/minidump_writer/linux_dumper.h" + +namespace google_breakpad { + +class LinuxPtraceDumper : public LinuxDumper { + public: + // Constructs a dumper for extracting information of a given process + // with a process ID of |pid|. + explicit LinuxPtraceDumper(pid_t pid); + + // Implements LinuxDumper::BuildProcPath(). + // Builds a proc path for a certain pid for a node (/proc//). + // |path| is a character array of at least NAME_MAX bytes to return the + // result. |node| is the final node without any slashes. Returns true on + // success. + virtual bool BuildProcPath(char* path, pid_t pid, const char* node) const; + + // Implements LinuxDumper::CopyFromProcess(). + // Copies content of |length| bytes from a given process |child|, + // starting from |src|, into |dest|. This method uses ptrace to extract + // the content from the target process. Always returns true. + virtual bool CopyFromProcess(void* dest, pid_t child, const void* src, + size_t length); + + // Implements LinuxDumper::GetThreadInfoByIndex(). + // Reads information about the |index|-th thread of |threads_|. + // Returns true on success. One must have called |ThreadsSuspend| first. + virtual bool GetThreadInfoByIndex(size_t index, ThreadInfo* info); + + // Implements LinuxDumper::IsPostMortem(). + // Always returns false to indicate this dumper performs a dump of + // a crashed process via ptrace. + virtual bool IsPostMortem() const; + + // Implements LinuxDumper::ThreadsSuspend(). + // Suspends all threads in the given process. Returns true on success. + virtual bool ThreadsSuspend(); + + // Implements LinuxDumper::ThreadsResume(). + // Resumes all threads in the given process. Returns true on success. + virtual bool ThreadsResume(); + + protected: + // Implements LinuxDumper::EnumerateThreads(). + // Enumerates all threads of the given process into |threads_|. + virtual bool EnumerateThreads(); + + private: + // Set to true if all threads of the crashed process are suspended. + bool threads_suspended_; +}; + +} // namespace google_breakpad + +#endif // CLIENT_LINUX_HANDLER_LINUX_PTRACE_DUMPER_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/linux_ptrace_dumper_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/linux_ptrace_dumper_unittest.cc new file mode 100644 index 0000000000..838ea5f6ba --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/linux_ptrace_dumper_unittest.cc @@ -0,0 +1,463 @@ +// Copyright (c) 2009, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// linux_ptrace_dumper_unittest.cc: +// Unit tests for google_breakpad::LinuxPtraceDumper. +// +// This file was renamed from linux_dumper_unittest.cc and modified due +// to LinuxDumper being splitted into two classes. + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include + +#include "breakpad_googletest_includes.h" +#include "client/linux/minidump_writer/linux_ptrace_dumper.h" +#include "client/linux/minidump_writer/minidump_writer_unittest_utils.h" +#include "common/linux/eintr_wrapper.h" +#include "common/linux/file_id.h" +#include "common/linux/ignore_ret.h" +#include "common/linux/safe_readlink.h" +#include "common/memory.h" +#include "common/using_std_string.h" + +#ifndef PR_SET_PTRACER +#define PR_SET_PTRACER 0x59616d61 +#endif + +using namespace google_breakpad; + +namespace { + +typedef testing::Test LinuxPtraceDumperTest; + +/* Fixture for running tests in a child process. */ +class LinuxPtraceDumperChildTest : public testing::Test { + protected: + virtual void SetUp() { + child_pid_ = fork(); +#ifndef __ANDROID__ + prctl(PR_SET_PTRACER, child_pid_); +#endif + } + + /* Gtest is calling TestBody from this class, which sets up a child + * process in which the RealTestBody virtual member is called. + * As such, TestBody is not supposed to be overridden in derived classes. + */ + virtual void TestBody() /* final */ { + if (child_pid_ == 0) { + // child process + RealTestBody(); + exit(HasFatalFailure() ? kFatalFailure : + (HasNonfatalFailure() ? kNonFatalFailure : 0)); + } + + ASSERT_TRUE(child_pid_ > 0); + int status; + waitpid(child_pid_, &status, 0); + if (WEXITSTATUS(status) == kFatalFailure) { + GTEST_FATAL_FAILURE_("Test failed in child process"); + } else if (WEXITSTATUS(status) == kNonFatalFailure) { + GTEST_NONFATAL_FAILURE_("Test failed in child process"); + } + } + + /* Gtest defines TestBody functions through its macros, but classes + * derived from this one need to define RealTestBody instead. + * This is achieved by defining a TestBody macro further below. + */ + virtual void RealTestBody() = 0; + private: + static const int kFatalFailure = 1; + static const int kNonFatalFailure = 2; + + pid_t child_pid_; +}; + +} // namespace + +/* Replace TestBody declarations within TEST*() with RealTestBody + * declarations */ +#define TestBody RealTestBody + +TEST_F(LinuxPtraceDumperChildTest, Setup) { + LinuxPtraceDumper dumper(getppid()); +} + +TEST_F(LinuxPtraceDumperChildTest, FindMappings) { + LinuxPtraceDumper dumper(getppid()); + ASSERT_TRUE(dumper.Init()); + + ASSERT_TRUE(dumper.FindMapping(reinterpret_cast(getpid))); + ASSERT_TRUE(dumper.FindMapping(reinterpret_cast(printf))); + ASSERT_FALSE(dumper.FindMapping(NULL)); +} + +TEST_F(LinuxPtraceDumperChildTest, ThreadList) { + LinuxPtraceDumper dumper(getppid()); + ASSERT_TRUE(dumper.Init()); + + ASSERT_GE(dumper.threads().size(), (size_t)1); + bool found = false; + for (size_t i = 0; i < dumper.threads().size(); ++i) { + if (dumper.threads()[i] == getppid()) { + ASSERT_FALSE(found); + found = true; + } + } + ASSERT_TRUE(found); +} + +// Helper stack class to close a file descriptor and unmap +// a mmap'ed mapping. +class StackHelper { + public: + StackHelper() + : fd_(-1), mapping_(NULL), size_(0) {} + ~StackHelper() { + if (size_) + munmap(mapping_, size_); + if (fd_ >= 0) + close(fd_); + } + void Init(int fd, char* mapping, size_t size) { + fd_ = fd; + mapping_ = mapping; + size_ = size; + } + + char* mapping() const { return mapping_; } + size_t size() const { return size_; } + + private: + int fd_; + char* mapping_; + size_t size_; +}; + +class LinuxPtraceDumperMappingsTest : public LinuxPtraceDumperChildTest { + protected: + virtual void SetUp(); + + string helper_path_; + size_t page_size_; + StackHelper helper_; +}; + +void LinuxPtraceDumperMappingsTest::SetUp() { + helper_path_ = GetHelperBinary(); + if (helper_path_.empty()) { + FAIL() << "Couldn't find helper binary"; + exit(1); + } + + // mmap two segments out of the helper binary, one + // enclosed in the other, but with different protections. + page_size_ = sysconf(_SC_PAGESIZE); + const size_t kMappingSize = 3 * page_size_; + int fd = open(helper_path_.c_str(), O_RDONLY); + ASSERT_NE(-1, fd) << "Failed to open file: " << helper_path_ + << ", Error: " << strerror(errno); + char* mapping = + reinterpret_cast(mmap(NULL, + kMappingSize, + PROT_READ, + MAP_SHARED, + fd, + 0)); + ASSERT_TRUE(mapping); + + // Ensure that things get cleaned up. + helper_.Init(fd, mapping, kMappingSize); + + // Carve a page out of the first mapping with different permissions. + char* inside_mapping = reinterpret_cast( + mmap(mapping + 2 * page_size_, + page_size_, + PROT_NONE, + MAP_SHARED | MAP_FIXED, + fd, + // Map a different offset just to + // better test real-world conditions. + page_size_)); + ASSERT_TRUE(inside_mapping); + + LinuxPtraceDumperChildTest::SetUp(); +} + +TEST_F(LinuxPtraceDumperMappingsTest, MergedMappings) { + // Now check that LinuxPtraceDumper interpreted the mappings properly. + LinuxPtraceDumper dumper(getppid()); + ASSERT_TRUE(dumper.Init()); + int mapping_count = 0; + for (unsigned i = 0; i < dumper.mappings().size(); ++i) { + const MappingInfo& mapping = *dumper.mappings()[i]; + if (strcmp(mapping.name, this->helper_path_.c_str()) == 0) { + // This mapping should encompass the entire original mapped + // range. + EXPECT_EQ(reinterpret_cast(this->helper_.mapping()), + mapping.start_addr); + EXPECT_EQ(this->helper_.size(), mapping.size); + EXPECT_EQ(0U, mapping.offset); + mapping_count++; + } + } + EXPECT_EQ(1, mapping_count); +} + +TEST_F(LinuxPtraceDumperChildTest, BuildProcPath) { + const pid_t pid = getppid(); + LinuxPtraceDumper dumper(pid); + + char maps_path[NAME_MAX] = ""; + char maps_path_expected[NAME_MAX]; + snprintf(maps_path_expected, sizeof(maps_path_expected), + "/proc/%d/maps", pid); + EXPECT_TRUE(dumper.BuildProcPath(maps_path, pid, "maps")); + EXPECT_STREQ(maps_path_expected, maps_path); + + EXPECT_FALSE(dumper.BuildProcPath(NULL, pid, "maps")); + EXPECT_FALSE(dumper.BuildProcPath(maps_path, 0, "maps")); + EXPECT_FALSE(dumper.BuildProcPath(maps_path, pid, "")); + EXPECT_FALSE(dumper.BuildProcPath(maps_path, pid, NULL)); + + char long_node[NAME_MAX]; + size_t long_node_len = NAME_MAX - strlen("/proc/123") - 1; + memset(long_node, 'a', long_node_len); + long_node[long_node_len] = '\0'; + EXPECT_FALSE(dumper.BuildProcPath(maps_path, 123, long_node)); +} + +#if !defined(__ARM_EABI__) && !defined(__mips__) +// Ensure that the linux-gate VDSO is included in the mapping list. +TEST_F(LinuxPtraceDumperChildTest, MappingsIncludeLinuxGate) { + LinuxPtraceDumper dumper(getppid()); + ASSERT_TRUE(dumper.Init()); + + void* linux_gate_loc = + reinterpret_cast(dumper.auxv()[AT_SYSINFO_EHDR]); + ASSERT_TRUE(linux_gate_loc); + bool found_linux_gate = false; + + const wasteful_vector mappings = dumper.mappings(); + const MappingInfo* mapping; + for (unsigned i = 0; i < mappings.size(); ++i) { + mapping = mappings[i]; + if (!strcmp(mapping->name, kLinuxGateLibraryName)) { + found_linux_gate = true; + break; + } + } + EXPECT_TRUE(found_linux_gate); + EXPECT_EQ(linux_gate_loc, reinterpret_cast(mapping->start_addr)); + EXPECT_EQ(0, memcmp(linux_gate_loc, ELFMAG, SELFMAG)); +} + +// Ensure that the linux-gate VDSO can generate a non-zeroed File ID. +TEST_F(LinuxPtraceDumperChildTest, LinuxGateMappingID) { + LinuxPtraceDumper dumper(getppid()); + ASSERT_TRUE(dumper.Init()); + + bool found_linux_gate = false; + const wasteful_vector mappings = dumper.mappings(); + unsigned index = 0; + for (unsigned i = 0; i < mappings.size(); ++i) { + if (!strcmp(mappings[i]->name, kLinuxGateLibraryName)) { + found_linux_gate = true; + index = i; + break; + } + } + ASSERT_TRUE(found_linux_gate); + + // Need to suspend the child so ptrace actually works. + ASSERT_TRUE(dumper.ThreadsSuspend()); + uint8_t identifier[sizeof(MDGUID)]; + ASSERT_TRUE(dumper.ElfFileIdentifierForMapping(*mappings[index], + true, + index, + identifier)); + uint8_t empty_identifier[sizeof(MDGUID)]; + memset(empty_identifier, 0, sizeof(empty_identifier)); + EXPECT_NE(0, memcmp(empty_identifier, identifier, sizeof(identifier))); + EXPECT_TRUE(dumper.ThreadsResume()); +} +#endif + +TEST_F(LinuxPtraceDumperChildTest, FileIDsMatch) { + // Calculate the File ID of our binary using both + // FileID::ElfFileIdentifier and LinuxDumper::ElfFileIdentifierForMapping + // and ensure that we get the same result from both. + char exe_name[PATH_MAX]; + ASSERT_TRUE(SafeReadLink("/proc/self/exe", exe_name)); + + LinuxPtraceDumper dumper(getppid()); + ASSERT_TRUE(dumper.Init()); + const wasteful_vector mappings = dumper.mappings(); + bool found_exe = false; + unsigned i; + for (i = 0; i < mappings.size(); ++i) { + const MappingInfo* mapping = mappings[i]; + if (!strcmp(mapping->name, exe_name)) { + found_exe = true; + break; + } + } + ASSERT_TRUE(found_exe); + + uint8_t identifier1[sizeof(MDGUID)]; + uint8_t identifier2[sizeof(MDGUID)]; + EXPECT_TRUE(dumper.ElfFileIdentifierForMapping(*mappings[i], true, i, + identifier1)); + FileID fileid(exe_name); + EXPECT_TRUE(fileid.ElfFileIdentifier(identifier2)); + char identifier_string1[37]; + char identifier_string2[37]; + FileID::ConvertIdentifierToString(identifier1, identifier_string1, + 37); + FileID::ConvertIdentifierToString(identifier2, identifier_string2, + 37); + EXPECT_STREQ(identifier_string1, identifier_string2); +} + +/* Get back to normal behavior of TEST*() macros wrt TestBody. */ +#undef TestBody + +TEST(LinuxPtraceDumperTest, VerifyStackReadWithMultipleThreads) { + static const int kNumberOfThreadsInHelperProgram = 5; + char kNumberOfThreadsArgument[2]; + sprintf(kNumberOfThreadsArgument, "%d", kNumberOfThreadsInHelperProgram); + + int fds[2]; + ASSERT_NE(-1, pipe(fds)); + + pid_t child_pid = fork(); + if (child_pid == 0) { + // In child process. + close(fds[0]); + + string helper_path(GetHelperBinary()); + if (helper_path.empty()) { + FAIL() << "Couldn't find helper binary"; + exit(1); + } + + // Pass the pipe fd and the number of threads as arguments. + char pipe_fd_string[8]; + sprintf(pipe_fd_string, "%d", fds[1]); + execl(helper_path.c_str(), + "linux_dumper_unittest_helper", + pipe_fd_string, + kNumberOfThreadsArgument, + NULL); + // Kill if we get here. + printf("Errno from exec: %d", errno); + FAIL() << "Exec of " << helper_path << " failed: " << strerror(errno); + exit(0); + } + close(fds[1]); + + // Wait for all child threads to indicate that they have started + for (int threads = 0; threads < kNumberOfThreadsInHelperProgram; threads++) { + struct pollfd pfd; + memset(&pfd, 0, sizeof(pfd)); + pfd.fd = fds[0]; + pfd.events = POLLIN | POLLERR; + + const int r = HANDLE_EINTR(poll(&pfd, 1, 1000)); + ASSERT_EQ(1, r); + ASSERT_TRUE(pfd.revents & POLLIN); + uint8_t junk; + ASSERT_EQ(read(fds[0], &junk, sizeof(junk)), + static_cast(sizeof(junk))); + } + close(fds[0]); + + // There is a race here because we may stop a child thread before + // it is actually running the busy loop. Empirically this sleep + // is sufficient to avoid the race. + usleep(100000); + + // Children are ready now. + LinuxPtraceDumper dumper(child_pid); + ASSERT_TRUE(dumper.Init()); + EXPECT_EQ((size_t)kNumberOfThreadsInHelperProgram, dumper.threads().size()); + EXPECT_TRUE(dumper.ThreadsSuspend()); + + ThreadInfo one_thread; + for (size_t i = 0; i < dumper.threads().size(); ++i) { + EXPECT_TRUE(dumper.GetThreadInfoByIndex(i, &one_thread)); + const void* stack; + size_t stack_len; + EXPECT_TRUE(dumper.GetStackInfo(&stack, &stack_len, + one_thread.stack_pointer)); + // In the helper program, we stored a pointer to the thread id in a + // specific register. Check that we can recover its value. +#if defined(__ARM_EABI__) + pid_t* process_tid_location = (pid_t*)(one_thread.regs.uregs[3]); +#elif defined(__aarch64__) + pid_t* process_tid_location = (pid_t*)(one_thread.regs.regs[3]); +#elif defined(__i386) + pid_t* process_tid_location = (pid_t*)(one_thread.regs.ecx); +#elif defined(__x86_64) + pid_t* process_tid_location = (pid_t*)(one_thread.regs.rcx); +#elif defined(__mips__) + pid_t* process_tid_location = + reinterpret_cast(one_thread.mcontext.gregs[1]); +#else +#error This test has not been ported to this platform. +#endif + pid_t one_thread_id; + dumper.CopyFromProcess(&one_thread_id, + dumper.threads()[i], + process_tid_location, + 4); + EXPECT_EQ(dumper.threads()[i], one_thread_id); + } + EXPECT_TRUE(dumper.ThreadsResume()); + kill(child_pid, SIGKILL); + + // Reap child + int status; + ASSERT_NE(-1, HANDLE_EINTR(waitpid(child_pid, &status, 0))); + ASSERT_TRUE(WIFSIGNALED(status)); + ASSERT_EQ(SIGKILL, WTERMSIG(status)); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/minidump_writer.cc b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/minidump_writer.cc new file mode 100644 index 0000000000..0414bb72a7 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/minidump_writer.cc @@ -0,0 +1,1373 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// This code writes out minidump files: +// http://msdn.microsoft.com/en-us/library/ms680378(VS.85,loband).aspx +// +// Minidumps are a Microsoft format which Breakpad uses for recording crash +// dumps. This code has to run in a compromised environment (the address space +// may have received SIGSEGV), thus the following rules apply: +// * You may not enter the dynamic linker. This means that we cannot call +// any symbols in a shared library (inc libc). Because of this we replace +// libc functions in linux_libc_support.h. +// * You may not call syscalls via the libc wrappers. This rule is a subset +// of the first rule but it bears repeating. We have direct wrappers +// around the system calls in linux_syscall_support.h. +// * You may not malloc. There's an alternative allocator in memory.h and +// a canonical instance in the LinuxDumper object. We use the placement +// new form to allocate objects and we don't delete them. + +#include "client/linux/handler/minidump_descriptor.h" +#include "client/linux/minidump_writer/minidump_writer.h" +#include "client/minidump_file_writer-inl.h" + +#include +#include +#include +#include +#include +#if defined(__ANDROID__) +#include +#endif +#include +#include +#include +#include +#include +#include + +#include + +#include "client/linux/dump_writer_common/seccomp_unwinder.h" +#include "client/linux/dump_writer_common/thread_info.h" +#include "client/linux/dump_writer_common/ucontext_reader.h" +#include "client/linux/handler/exception_handler.h" +#include "client/linux/minidump_writer/cpu_set.h" +#include "client/linux/minidump_writer/line_reader.h" +#include "client/linux/minidump_writer/linux_dumper.h" +#include "client/linux/minidump_writer/linux_ptrace_dumper.h" +#include "client/linux/minidump_writer/proc_cpuinfo_reader.h" +#include "client/minidump_file_writer.h" +#include "common/linux/linux_libc_support.h" +#include "common/minidump_type_helper.h" +#include "google_breakpad/common/minidump_format.h" +#include "third_party/lss/linux_syscall_support.h" + +namespace { + +using google_breakpad::AppMemoryList; +using google_breakpad::ExceptionHandler; +using google_breakpad::CpuSet; +using google_breakpad::LineReader; +using google_breakpad::LinuxDumper; +using google_breakpad::LinuxPtraceDumper; +using google_breakpad::MDTypeHelper; +using google_breakpad::MappingEntry; +using google_breakpad::MappingInfo; +using google_breakpad::MappingList; +using google_breakpad::MinidumpFileWriter; +using google_breakpad::PageAllocator; +using google_breakpad::ProcCpuInfoReader; +using google_breakpad::RawContextCPU; +using google_breakpad::SeccompUnwinder; +using google_breakpad::ThreadInfo; +using google_breakpad::TypedMDRVA; +using google_breakpad::UContextReader; +using google_breakpad::UntypedMDRVA; +using google_breakpad::wasteful_vector; + +typedef MDTypeHelper::MDRawDebug MDRawDebug; +typedef MDTypeHelper::MDRawLinkMap MDRawLinkMap; + +class MinidumpWriter { + public: + // The following kLimit* constants are for when minidump_size_limit_ is set + // and the minidump size might exceed it. + // + // Estimate for how big each thread's stack will be (in bytes). + static const unsigned kLimitAverageThreadStackLength = 8 * 1024; + // Number of threads whose stack size we don't want to limit. These base + // threads will simply be the first N threads returned by the dumper (although + // the crashing thread will never be limited). Threads beyond this count are + // the extra threads. + static const unsigned kLimitBaseThreadCount = 20; + // Maximum stack size to dump for any extra thread (in bytes). + static const unsigned kLimitMaxExtraThreadStackLen = 2 * 1024; + // Make sure this number of additional bytes can fit in the minidump + // (exclude the stack data). + static const unsigned kLimitMinidumpFudgeFactor = 64 * 1024; + + MinidumpWriter(const char* minidump_path, + int minidump_fd, + const ExceptionHandler::CrashContext* context, + const MappingList& mappings, + const AppMemoryList& appmem, + LinuxDumper* dumper) + : fd_(minidump_fd), + path_(minidump_path), + ucontext_(context ? &context->context : NULL), +#if !defined(__ARM_EABI__) && !defined(__mips__) + float_state_(context ? &context->float_state : NULL), +#endif + dumper_(dumper), + minidump_size_limit_(-1), + memory_blocks_(dumper_->allocator()), + mapping_list_(mappings), + app_memory_list_(appmem) { + // Assert there should be either a valid fd or a valid path, not both. + assert(fd_ != -1 || minidump_path); + assert(fd_ == -1 || !minidump_path); + } + + bool Init() { + if (!dumper_->Init()) + return false; + + if (fd_ != -1) + minidump_writer_.SetFile(fd_); + else if (!minidump_writer_.Open(path_)) + return false; + + return dumper_->ThreadsSuspend(); + } + + ~MinidumpWriter() { + // Don't close the file descriptor when it's been provided explicitly. + // Callers might still need to use it. + if (fd_ == -1) + minidump_writer_.Close(); + dumper_->ThreadsResume(); + } + + bool Dump() { + // A minidump file contains a number of tagged streams. This is the number + // of stream which we write. + unsigned kNumWriters = 13; + + TypedMDRVA header(&minidump_writer_); + TypedMDRVA dir(&minidump_writer_); + if (!header.Allocate()) + return false; + if (!dir.AllocateArray(kNumWriters)) + return false; + my_memset(header.get(), 0, sizeof(MDRawHeader)); + + header.get()->signature = MD_HEADER_SIGNATURE; + header.get()->version = MD_HEADER_VERSION; + header.get()->time_date_stamp = time(NULL); + header.get()->stream_count = kNumWriters; + header.get()->stream_directory_rva = dir.position(); + + unsigned dir_index = 0; + MDRawDirectory dirent; + + if (!WriteThreadListStream(&dirent)) + return false; + dir.CopyIndex(dir_index++, &dirent); + + if (!WriteMappings(&dirent)) + return false; + dir.CopyIndex(dir_index++, &dirent); + + if (!WriteAppMemory()) + return false; + + if (!WriteMemoryListStream(&dirent)) + return false; + dir.CopyIndex(dir_index++, &dirent); + + if (!WriteExceptionStream(&dirent)) + return false; + dir.CopyIndex(dir_index++, &dirent); + + if (!WriteSystemInfoStream(&dirent)) + return false; + dir.CopyIndex(dir_index++, &dirent); + + dirent.stream_type = MD_LINUX_CPU_INFO; + if (!WriteFile(&dirent.location, "/proc/cpuinfo")) + NullifyDirectoryEntry(&dirent); + dir.CopyIndex(dir_index++, &dirent); + + dirent.stream_type = MD_LINUX_PROC_STATUS; + if (!WriteProcFile(&dirent.location, GetCrashThread(), "status")) + NullifyDirectoryEntry(&dirent); + dir.CopyIndex(dir_index++, &dirent); + + dirent.stream_type = MD_LINUX_LSB_RELEASE; + if (!WriteFile(&dirent.location, "/etc/lsb-release")) + NullifyDirectoryEntry(&dirent); + dir.CopyIndex(dir_index++, &dirent); + + dirent.stream_type = MD_LINUX_CMD_LINE; + if (!WriteProcFile(&dirent.location, GetCrashThread(), "cmdline")) + NullifyDirectoryEntry(&dirent); + dir.CopyIndex(dir_index++, &dirent); + + dirent.stream_type = MD_LINUX_ENVIRON; + if (!WriteProcFile(&dirent.location, GetCrashThread(), "environ")) + NullifyDirectoryEntry(&dirent); + dir.CopyIndex(dir_index++, &dirent); + + dirent.stream_type = MD_LINUX_AUXV; + if (!WriteProcFile(&dirent.location, GetCrashThread(), "auxv")) + NullifyDirectoryEntry(&dirent); + dir.CopyIndex(dir_index++, &dirent); + + dirent.stream_type = MD_LINUX_MAPS; + if (!WriteProcFile(&dirent.location, GetCrashThread(), "maps")) + NullifyDirectoryEntry(&dirent); + dir.CopyIndex(dir_index++, &dirent); + + dirent.stream_type = MD_LINUX_DSO_DEBUG; + if (!WriteDSODebugStream(&dirent)) + NullifyDirectoryEntry(&dirent); + dir.CopyIndex(dir_index++, &dirent); + + // If you add more directory entries, don't forget to update kNumWriters, + // above. + + dumper_->ThreadsResume(); + return true; + } + + bool FillThreadStack(MDRawThread* thread, uintptr_t stack_pointer, + int max_stack_len, uint8_t** stack_copy) { + *stack_copy = NULL; + const void* stack; + size_t stack_len; + if (dumper_->GetStackInfo(&stack, &stack_len, stack_pointer)) { + UntypedMDRVA memory(&minidump_writer_); + if (max_stack_len >= 0 && + stack_len > static_cast(max_stack_len)) { + stack_len = max_stack_len; + } + if (!memory.Allocate(stack_len)) + return false; + *stack_copy = reinterpret_cast(Alloc(stack_len)); + dumper_->CopyFromProcess(*stack_copy, thread->thread_id, stack, + stack_len); + memory.Copy(*stack_copy, stack_len); + thread->stack.start_of_memory_range = + reinterpret_cast(stack); + thread->stack.memory = memory.location(); + memory_blocks_.push_back(thread->stack); + } else { + thread->stack.start_of_memory_range = stack_pointer; + thread->stack.memory.data_size = 0; + thread->stack.memory.rva = minidump_writer_.position(); + } + return true; + } + + // Write information about the threads. + bool WriteThreadListStream(MDRawDirectory* dirent) { + const unsigned num_threads = dumper_->threads().size(); + + TypedMDRVA list(&minidump_writer_); + if (!list.AllocateObjectAndArray(num_threads, sizeof(MDRawThread))) + return false; + + dirent->stream_type = MD_THREAD_LIST_STREAM; + dirent->location = list.location(); + + *list.get() = num_threads; + + // If there's a minidump size limit, check if it might be exceeded. Since + // most of the space is filled with stack data, just check against that. + // If this expects to exceed the limit, set extra_thread_stack_len such + // that any thread beyond the first kLimitBaseThreadCount threads will + // have only kLimitMaxExtraThreadStackLen bytes dumped. + int extra_thread_stack_len = -1; // default to no maximum + if (minidump_size_limit_ >= 0) { + const unsigned estimated_total_stack_size = num_threads * + kLimitAverageThreadStackLength; + const off_t estimated_minidump_size = minidump_writer_.position() + + estimated_total_stack_size + kLimitMinidumpFudgeFactor; + if (estimated_minidump_size > minidump_size_limit_) + extra_thread_stack_len = kLimitMaxExtraThreadStackLen; + } + + for (unsigned i = 0; i < num_threads; ++i) { + MDRawThread thread; + my_memset(&thread, 0, sizeof(thread)); + thread.thread_id = dumper_->threads()[i]; + + // We have a different source of information for the crashing thread. If + // we used the actual state of the thread we would find it running in the + // signal handler with the alternative stack, which would be deeply + // unhelpful. + if (static_cast(thread.thread_id) == GetCrashThread() && + ucontext_ && + !dumper_->IsPostMortem()) { + uint8_t* stack_copy; + const uintptr_t stack_ptr = UContextReader::GetStackPointer(ucontext_); + if (!FillThreadStack(&thread, stack_ptr, -1, &stack_copy)) + return false; + + // Copy 256 bytes around crashing instruction pointer to minidump. + const size_t kIPMemorySize = 256; + uint64_t ip = UContextReader::GetInstructionPointer(ucontext_); + // Bound it to the upper and lower bounds of the memory map + // it's contained within. If it's not in mapped memory, + // don't bother trying to write it. + bool ip_is_mapped = false; + MDMemoryDescriptor ip_memory_d; + for (unsigned j = 0; j < dumper_->mappings().size(); ++j) { + const MappingInfo& mapping = *dumper_->mappings()[j]; + if (ip >= mapping.start_addr && + ip < mapping.start_addr + mapping.size) { + ip_is_mapped = true; + // Try to get 128 bytes before and after the IP, but + // settle for whatever's available. + ip_memory_d.start_of_memory_range = + std::max(mapping.start_addr, + uintptr_t(ip - (kIPMemorySize / 2))); + uintptr_t end_of_range = + std::min(uintptr_t(ip + (kIPMemorySize / 2)), + uintptr_t(mapping.start_addr + mapping.size)); + ip_memory_d.memory.data_size = + end_of_range - ip_memory_d.start_of_memory_range; + break; + } + } + + if (ip_is_mapped) { + UntypedMDRVA ip_memory(&minidump_writer_); + if (!ip_memory.Allocate(ip_memory_d.memory.data_size)) + return false; + uint8_t* memory_copy = + reinterpret_cast(Alloc(ip_memory_d.memory.data_size)); + dumper_->CopyFromProcess( + memory_copy, + thread.thread_id, + reinterpret_cast(ip_memory_d.start_of_memory_range), + ip_memory_d.memory.data_size); + ip_memory.Copy(memory_copy, ip_memory_d.memory.data_size); + ip_memory_d.memory = ip_memory.location(); + memory_blocks_.push_back(ip_memory_d); + } + + TypedMDRVA cpu(&minidump_writer_); + if (!cpu.Allocate()) + return false; + my_memset(cpu.get(), 0, sizeof(RawContextCPU)); +#if !defined(__ARM_EABI__) && !defined(__mips__) + UContextReader::FillCPUContext(cpu.get(), ucontext_, float_state_); +#else + UContextReader::FillCPUContext(cpu.get(), ucontext_); +#endif + if (stack_copy) + SeccompUnwinder::PopSeccompStackFrame(cpu.get(), thread, stack_copy); + thread.thread_context = cpu.location(); + crashing_thread_context_ = cpu.location(); + } else { + ThreadInfo info; + if (!dumper_->GetThreadInfoByIndex(i, &info)) + return false; + + uint8_t* stack_copy; + int max_stack_len = -1; // default to no maximum for this thread + if (minidump_size_limit_ >= 0 && i >= kLimitBaseThreadCount) + max_stack_len = extra_thread_stack_len; + if (!FillThreadStack(&thread, info.stack_pointer, max_stack_len, + &stack_copy)) + return false; + + TypedMDRVA cpu(&minidump_writer_); + if (!cpu.Allocate()) + return false; + my_memset(cpu.get(), 0, sizeof(RawContextCPU)); + info.FillCPUContext(cpu.get()); + if (stack_copy) + SeccompUnwinder::PopSeccompStackFrame(cpu.get(), thread, stack_copy); + thread.thread_context = cpu.location(); + if (dumper_->threads()[i] == GetCrashThread()) { + crashing_thread_context_ = cpu.location(); + if (!dumper_->IsPostMortem()) { + // This is the crashing thread of a live process, but + // no context was provided, so set the crash address + // while the instruction pointer is already here. + dumper_->set_crash_address(info.GetInstructionPointer()); + } + } + } + + list.CopyIndexAfterObject(i, &thread, sizeof(thread)); + } + + return true; + } + + // Write application-provided memory regions. + bool WriteAppMemory() { + for (AppMemoryList::const_iterator iter = app_memory_list_.begin(); + iter != app_memory_list_.end(); + ++iter) { + uint8_t* data_copy = + reinterpret_cast(dumper_->allocator()->Alloc(iter->length)); + dumper_->CopyFromProcess(data_copy, GetCrashThread(), iter->ptr, + iter->length); + + UntypedMDRVA memory(&minidump_writer_); + if (!memory.Allocate(iter->length)) { + return false; + } + memory.Copy(data_copy, iter->length); + MDMemoryDescriptor desc; + desc.start_of_memory_range = reinterpret_cast(iter->ptr); + desc.memory = memory.location(); + memory_blocks_.push_back(desc); + } + + return true; + } + + static bool ShouldIncludeMapping(const MappingInfo& mapping) { + if (mapping.name[0] == 0 || // only want modules with filenames. + // Only want to include one mapping per shared lib. + // Avoid filtering executable mappings. + (mapping.offset != 0 && !mapping.exec) || + mapping.size < 4096) { // too small to get a signature for. + return false; + } + + return true; + } + + // If there is caller-provided information about this mapping + // in the mapping_list_ list, return true. Otherwise, return false. + bool HaveMappingInfo(const MappingInfo& mapping) { + for (MappingList::const_iterator iter = mapping_list_.begin(); + iter != mapping_list_.end(); + ++iter) { + // Ignore any mappings that are wholly contained within + // mappings in the mapping_info_ list. + if (mapping.start_addr >= iter->first.start_addr && + (mapping.start_addr + mapping.size) <= + (iter->first.start_addr + iter->first.size)) { + return true; + } + } + return false; + } + + // Write information about the mappings in effect. Because we are using the + // minidump format, the information about the mappings is pretty limited. + // Because of this, we also include the full, unparsed, /proc/$x/maps file in + // another stream in the file. + bool WriteMappings(MDRawDirectory* dirent) { + const unsigned num_mappings = dumper_->mappings().size(); + unsigned num_output_mappings = mapping_list_.size(); + + for (unsigned i = 0; i < dumper_->mappings().size(); ++i) { + const MappingInfo& mapping = *dumper_->mappings()[i]; + if (ShouldIncludeMapping(mapping) && !HaveMappingInfo(mapping)) + num_output_mappings++; + } + + TypedMDRVA list(&minidump_writer_); + if (num_output_mappings) { + if (!list.AllocateObjectAndArray(num_output_mappings, MD_MODULE_SIZE)) + return false; + } else { + // Still create the module list stream, although it will have zero + // modules. + if (!list.Allocate()) + return false; + } + + dirent->stream_type = MD_MODULE_LIST_STREAM; + dirent->location = list.location(); + *list.get() = num_output_mappings; + + // First write all the mappings from the dumper + unsigned int j = 0; + for (unsigned i = 0; i < num_mappings; ++i) { + const MappingInfo& mapping = *dumper_->mappings()[i]; + if (!ShouldIncludeMapping(mapping) || HaveMappingInfo(mapping)) + continue; + + MDRawModule mod; + if (!FillRawModule(mapping, true, i, mod, NULL)) + return false; + list.CopyIndexAfterObject(j++, &mod, MD_MODULE_SIZE); + } + // Next write all the mappings provided by the caller + for (MappingList::const_iterator iter = mapping_list_.begin(); + iter != mapping_list_.end(); + ++iter) { + MDRawModule mod; + if (!FillRawModule(iter->first, false, 0, mod, iter->second)) + return false; + list.CopyIndexAfterObject(j++, &mod, MD_MODULE_SIZE); + } + + return true; + } + + // Fill the MDRawModule |mod| with information about the provided + // |mapping|. If |identifier| is non-NULL, use it instead of calculating + // a file ID from the mapping. + bool FillRawModule(const MappingInfo& mapping, + bool member, + unsigned int mapping_id, + MDRawModule& mod, + const uint8_t* identifier) { + my_memset(&mod, 0, MD_MODULE_SIZE); + + mod.base_of_image = mapping.start_addr; + mod.size_of_image = mapping.size; + + uint8_t cv_buf[MDCVInfoPDB70_minsize + NAME_MAX]; + uint8_t* cv_ptr = cv_buf; + + const uint32_t cv_signature = MD_CVINFOPDB70_SIGNATURE; + my_memcpy(cv_ptr, &cv_signature, sizeof(cv_signature)); + cv_ptr += sizeof(cv_signature); + uint8_t* signature = cv_ptr; + cv_ptr += sizeof(MDGUID); + if (identifier) { + // GUID was provided by caller. + my_memcpy(signature, identifier, sizeof(MDGUID)); + } else { + // Note: ElfFileIdentifierForMapping() can manipulate the |mapping.name|. + dumper_->ElfFileIdentifierForMapping(mapping, member, + mapping_id, signature); + } + my_memset(cv_ptr, 0, sizeof(uint32_t)); // Set age to 0 on Linux. + cv_ptr += sizeof(uint32_t); + + char file_name[NAME_MAX]; + char file_path[NAME_MAX]; + LinuxDumper::GetMappingEffectiveNameAndPath( + mapping, file_path, sizeof(file_path), file_name, sizeof(file_name)); + + const size_t file_name_len = my_strlen(file_name); + UntypedMDRVA cv(&minidump_writer_); + if (!cv.Allocate(MDCVInfoPDB70_minsize + file_name_len + 1)) + return false; + + // Write pdb_file_name + my_memcpy(cv_ptr, file_name, file_name_len + 1); + cv.Copy(cv_buf, MDCVInfoPDB70_minsize + file_name_len + 1); + + mod.cv_record = cv.location(); + + MDLocationDescriptor ld; + if (!minidump_writer_.WriteString(file_path, my_strlen(file_path), &ld)) + return false; + mod.module_name_rva = ld.rva; + return true; + } + + bool WriteMemoryListStream(MDRawDirectory* dirent) { + TypedMDRVA list(&minidump_writer_); + if (memory_blocks_.size()) { + if (!list.AllocateObjectAndArray(memory_blocks_.size(), + sizeof(MDMemoryDescriptor))) + return false; + } else { + // Still create the memory list stream, although it will have zero + // memory blocks. + if (!list.Allocate()) + return false; + } + + dirent->stream_type = MD_MEMORY_LIST_STREAM; + dirent->location = list.location(); + + *list.get() = memory_blocks_.size(); + + for (size_t i = 0; i < memory_blocks_.size(); ++i) { + list.CopyIndexAfterObject(i, &memory_blocks_[i], + sizeof(MDMemoryDescriptor)); + } + return true; + } + + bool WriteExceptionStream(MDRawDirectory* dirent) { + TypedMDRVA exc(&minidump_writer_); + if (!exc.Allocate()) + return false; + my_memset(exc.get(), 0, sizeof(MDRawExceptionStream)); + + dirent->stream_type = MD_EXCEPTION_STREAM; + dirent->location = exc.location(); + + exc.get()->thread_id = GetCrashThread(); + exc.get()->exception_record.exception_code = dumper_->crash_signal(); + exc.get()->exception_record.exception_address = dumper_->crash_address(); + exc.get()->thread_context = crashing_thread_context_; + + return true; + } + + bool WriteSystemInfoStream(MDRawDirectory* dirent) { + TypedMDRVA si(&minidump_writer_); + if (!si.Allocate()) + return false; + my_memset(si.get(), 0, sizeof(MDRawSystemInfo)); + + dirent->stream_type = MD_SYSTEM_INFO_STREAM; + dirent->location = si.location(); + + WriteCPUInformation(si.get()); + WriteOSInformation(si.get()); + + return true; + } + + bool WriteDSODebugStream(MDRawDirectory* dirent) { + ElfW(Phdr)* phdr = reinterpret_cast(dumper_->auxv()[AT_PHDR]); + char* base; + int phnum = dumper_->auxv()[AT_PHNUM]; + if (!phnum || !phdr) + return false; + + // Assume the program base is at the beginning of the same page as the PHDR + base = reinterpret_cast(reinterpret_cast(phdr) & ~0xfff); + + // Search for the program PT_DYNAMIC segment + ElfW(Addr) dyn_addr = 0; + for (; phnum >= 0; phnum--, phdr++) { + ElfW(Phdr) ph; + if (!dumper_->CopyFromProcess(&ph, GetCrashThread(), phdr, sizeof(ph))) + return false; + + // Adjust base address with the virtual address of the PT_LOAD segment + // corresponding to offset 0 + if (ph.p_type == PT_LOAD && ph.p_offset == 0) { + base -= ph.p_vaddr; + } + if (ph.p_type == PT_DYNAMIC) { + dyn_addr = ph.p_vaddr; + } + } + if (!dyn_addr) + return false; + + ElfW(Dyn) *dynamic = reinterpret_cast(dyn_addr + base); + + // The dynamic linker makes information available that helps gdb find all + // DSOs loaded into the program. If this information is indeed available, + // dump it to a MD_LINUX_DSO_DEBUG stream. + struct r_debug* r_debug = NULL; + uint32_t dynamic_length = 0; + + for (int i = 0; ; ++i) { + ElfW(Dyn) dyn; + dynamic_length += sizeof(dyn); + if (!dumper_->CopyFromProcess(&dyn, GetCrashThread(), dynamic + i, + sizeof(dyn))) { + return false; + } + +#ifdef __mips__ + if (dyn.d_tag == DT_MIPS_RLD_MAP) { + r_debug = reinterpret_cast(dyn.d_un.d_ptr); + continue; + } +#else + if (dyn.d_tag == DT_DEBUG) { + r_debug = reinterpret_cast(dyn.d_un.d_ptr); + continue; + } +#endif + else if (dyn.d_tag == DT_NULL) { + break; + } + } + + // The "r_map" field of that r_debug struct contains a linked list of all + // loaded DSOs. + // Our list of DSOs potentially is different from the ones in the crashing + // process. So, we have to be careful to never dereference pointers + // directly. Instead, we use CopyFromProcess() everywhere. + // See for a more detailed discussion of the how the dynamic + // loader communicates with debuggers. + + // Count the number of loaded DSOs + int dso_count = 0; + struct r_debug debug_entry; + if (!dumper_->CopyFromProcess(&debug_entry, GetCrashThread(), r_debug, + sizeof(debug_entry))) { + return false; + } + for (struct link_map* ptr = debug_entry.r_map; ptr; ) { + struct link_map map; + if (!dumper_->CopyFromProcess(&map, GetCrashThread(), ptr, sizeof(map))) + return false; + + ptr = map.l_next; + dso_count++; + } + + MDRVA linkmap_rva = minidump_writer_.kInvalidMDRVA; + if (dso_count > 0) { + // If we have at least one DSO, create an array of MDRawLinkMap + // entries in the minidump file. + TypedMDRVA linkmap(&minidump_writer_); + if (!linkmap.AllocateArray(dso_count)) + return false; + linkmap_rva = linkmap.location().rva; + int idx = 0; + + // Iterate over DSOs and write their information to mini dump + for (struct link_map* ptr = debug_entry.r_map; ptr; ) { + struct link_map map; + if (!dumper_->CopyFromProcess(&map, GetCrashThread(), ptr, sizeof(map))) + return false; + + ptr = map.l_next; + char filename[257] = { 0 }; + if (map.l_name) { + dumper_->CopyFromProcess(filename, GetCrashThread(), map.l_name, + sizeof(filename) - 1); + } + MDLocationDescriptor location; + if (!minidump_writer_.WriteString(filename, 0, &location)) + return false; + MDRawLinkMap entry; + entry.name = location.rva; + entry.addr = map.l_addr; + entry.ld = reinterpret_cast(map.l_ld); + linkmap.CopyIndex(idx++, &entry); + } + } + + // Write MD_LINUX_DSO_DEBUG record + TypedMDRVA debug(&minidump_writer_); + if (!debug.AllocateObjectAndArray(1, dynamic_length)) + return false; + my_memset(debug.get(), 0, sizeof(MDRawDebug)); + dirent->stream_type = MD_LINUX_DSO_DEBUG; + dirent->location = debug.location(); + + debug.get()->version = debug_entry.r_version; + debug.get()->map = linkmap_rva; + debug.get()->dso_count = dso_count; + debug.get()->brk = debug_entry.r_brk; + debug.get()->ldbase = debug_entry.r_ldbase; + debug.get()->dynamic = reinterpret_cast(dynamic); + + wasteful_vector dso_debug_data(dumper_->allocator(), dynamic_length); + // The passed-in size to the constructor (above) is only a hint. + // Must call .resize() to do actual initialization of the elements. + dso_debug_data.resize(dynamic_length); + dumper_->CopyFromProcess(&dso_debug_data[0], GetCrashThread(), dynamic, + dynamic_length); + debug.CopyIndexAfterObject(0, &dso_debug_data[0], dynamic_length); + + return true; + } + + void set_minidump_size_limit(off_t limit) { minidump_size_limit_ = limit; } + + private: + void* Alloc(unsigned bytes) { + return dumper_->allocator()->Alloc(bytes); + } + + pid_t GetCrashThread() const { + return dumper_->crash_thread(); + } + + void NullifyDirectoryEntry(MDRawDirectory* dirent) { + dirent->stream_type = 0; + dirent->location.data_size = 0; + dirent->location.rva = 0; + } + +#if defined(__i386__) || defined(__x86_64__) || defined(__mips__) + bool WriteCPUInformation(MDRawSystemInfo* sys_info) { + char vendor_id[sizeof(sys_info->cpu.x86_cpu_info.vendor_id) + 1] = {0}; + static const char vendor_id_name[] = "vendor_id"; + + struct CpuInfoEntry { + const char* info_name; + int value; + bool found; + } cpu_info_table[] = { + { "processor", -1, false }, +#if defined(__i386__) || defined(__x86_64__) + { "model", 0, false }, + { "stepping", 0, false }, + { "cpu family", 0, false }, +#endif + }; + + // processor_architecture should always be set, do this first + sys_info->processor_architecture = +#if defined(__mips__) + MD_CPU_ARCHITECTURE_MIPS; +#elif defined(__i386__) + MD_CPU_ARCHITECTURE_X86; +#else + MD_CPU_ARCHITECTURE_AMD64; +#endif + + const int fd = sys_open("/proc/cpuinfo", O_RDONLY, 0); + if (fd < 0) + return false; + + { + PageAllocator allocator; + ProcCpuInfoReader* const reader = new(allocator) ProcCpuInfoReader(fd); + const char* field; + while (reader->GetNextField(&field)) { + for (size_t i = 0; + i < sizeof(cpu_info_table) / sizeof(cpu_info_table[0]); + i++) { + CpuInfoEntry* entry = &cpu_info_table[i]; + if (i > 0 && entry->found) { + // except for the 'processor' field, ignore repeated values. + continue; + } + if (!my_strcmp(field, entry->info_name)) { + size_t value_len; + const char* value = reader->GetValueAndLen(&value_len); + if (value_len == 0) + continue; + + uintptr_t val; + if (my_read_decimal_ptr(&val, value) == value) + continue; + + entry->value = static_cast(val); + entry->found = true; + } + } + + // special case for vendor_id + if (!my_strcmp(field, vendor_id_name)) { + size_t value_len; + const char* value = reader->GetValueAndLen(&value_len); + if (value_len > 0) + my_strlcpy(vendor_id, value, sizeof(vendor_id)); + } + } + sys_close(fd); + } + + // make sure we got everything we wanted + for (size_t i = 0; + i < sizeof(cpu_info_table) / sizeof(cpu_info_table[0]); + i++) { + if (!cpu_info_table[i].found) { + return false; + } + } + // cpu_info_table[0] holds the last cpu id listed in /proc/cpuinfo, + // assuming this is the highest id, change it to the number of CPUs + // by adding one. + cpu_info_table[0].value++; + + sys_info->number_of_processors = cpu_info_table[0].value; +#if defined(__i386__) || defined(__x86_64__) + sys_info->processor_level = cpu_info_table[3].value; + sys_info->processor_revision = cpu_info_table[1].value << 8 | + cpu_info_table[2].value; +#endif + + if (vendor_id[0] != '\0') { + my_memcpy(sys_info->cpu.x86_cpu_info.vendor_id, vendor_id, + sizeof(sys_info->cpu.x86_cpu_info.vendor_id)); + } + return true; + } +#elif defined(__arm__) || defined(__aarch64__) + bool WriteCPUInformation(MDRawSystemInfo* sys_info) { + // The CPUID value is broken up in several entries in /proc/cpuinfo. + // This table is used to rebuild it from the entries. + const struct CpuIdEntry { + const char* field; + char format; + char bit_lshift; + char bit_length; + } cpu_id_entries[] = { + { "CPU implementer", 'x', 24, 8 }, + { "CPU variant", 'x', 20, 4 }, + { "CPU part", 'x', 4, 12 }, + { "CPU revision", 'd', 0, 4 }, + }; + + // The ELF hwcaps are listed in the "Features" entry as textual tags. + // This table is used to rebuild them. + const struct CpuFeaturesEntry { + const char* tag; + uint32_t hwcaps; + } cpu_features_entries[] = { +#if defined(__arm__) + { "swp", MD_CPU_ARM_ELF_HWCAP_SWP }, + { "half", MD_CPU_ARM_ELF_HWCAP_HALF }, + { "thumb", MD_CPU_ARM_ELF_HWCAP_THUMB }, + { "26bit", MD_CPU_ARM_ELF_HWCAP_26BIT }, + { "fastmult", MD_CPU_ARM_ELF_HWCAP_FAST_MULT }, + { "fpa", MD_CPU_ARM_ELF_HWCAP_FPA }, + { "vfp", MD_CPU_ARM_ELF_HWCAP_VFP }, + { "edsp", MD_CPU_ARM_ELF_HWCAP_EDSP }, + { "java", MD_CPU_ARM_ELF_HWCAP_JAVA }, + { "iwmmxt", MD_CPU_ARM_ELF_HWCAP_IWMMXT }, + { "crunch", MD_CPU_ARM_ELF_HWCAP_CRUNCH }, + { "thumbee", MD_CPU_ARM_ELF_HWCAP_THUMBEE }, + { "neon", MD_CPU_ARM_ELF_HWCAP_NEON }, + { "vfpv3", MD_CPU_ARM_ELF_HWCAP_VFPv3 }, + { "vfpv3d16", MD_CPU_ARM_ELF_HWCAP_VFPv3D16 }, + { "tls", MD_CPU_ARM_ELF_HWCAP_TLS }, + { "vfpv4", MD_CPU_ARM_ELF_HWCAP_VFPv4 }, + { "idiva", MD_CPU_ARM_ELF_HWCAP_IDIVA }, + { "idivt", MD_CPU_ARM_ELF_HWCAP_IDIVT }, + { "idiv", MD_CPU_ARM_ELF_HWCAP_IDIVA | MD_CPU_ARM_ELF_HWCAP_IDIVT }, +#elif defined(__aarch64__) + // No hwcaps on aarch64. +#endif + }; + + // processor_architecture should always be set, do this first + sys_info->processor_architecture = +#if defined(__aarch64__) + MD_CPU_ARCHITECTURE_ARM64; +#else + MD_CPU_ARCHITECTURE_ARM; +#endif + + // /proc/cpuinfo is not readable under various sandboxed environments + // (e.g. Android services with the android:isolatedProcess attribute) + // prepare for this by setting default values now, which will be + // returned when this happens. + // + // Note: Bogus values are used to distinguish between failures (to + // read /sys and /proc files) and really badly configured kernels. + sys_info->number_of_processors = 0; + sys_info->processor_level = 1U; // There is no ARMv1 + sys_info->processor_revision = 42; + sys_info->cpu.arm_cpu_info.cpuid = 0; + sys_info->cpu.arm_cpu_info.elf_hwcaps = 0; + + // Counting the number of CPUs involves parsing two sysfs files, + // because the content of /proc/cpuinfo will only mirror the number + // of 'online' cores, and thus will vary with time. + // See http://www.kernel.org/doc/Documentation/cputopology.txt + { + CpuSet cpus_present; + CpuSet cpus_possible; + + int fd = sys_open("/sys/devices/system/cpu/present", O_RDONLY, 0); + if (fd >= 0) { + cpus_present.ParseSysFile(fd); + sys_close(fd); + + fd = sys_open("/sys/devices/system/cpu/possible", O_RDONLY, 0); + if (fd >= 0) { + cpus_possible.ParseSysFile(fd); + sys_close(fd); + + cpus_present.IntersectWith(cpus_possible); + int cpu_count = cpus_present.GetCount(); + if (cpu_count > 255) + cpu_count = 255; + sys_info->number_of_processors = static_cast(cpu_count); + } + } + } + + // Parse /proc/cpuinfo to reconstruct the CPUID value, as well + // as the ELF hwcaps field. For the latter, it would be easier to + // read /proc/self/auxv but unfortunately, this file is not always + // readable from regular Android applications on later versions + // (>= 4.1) of the Android platform. + const int fd = sys_open("/proc/cpuinfo", O_RDONLY, 0); + if (fd < 0) { + // Do not return false here to allow the minidump generation + // to happen properly. + return true; + } + + { + PageAllocator allocator; + ProcCpuInfoReader* const reader = + new(allocator) ProcCpuInfoReader(fd); + const char* field; + while (reader->GetNextField(&field)) { + for (size_t i = 0; + i < sizeof(cpu_id_entries)/sizeof(cpu_id_entries[0]); + ++i) { + const CpuIdEntry* entry = &cpu_id_entries[i]; + if (my_strcmp(entry->field, field) != 0) + continue; + uintptr_t result = 0; + const char* value = reader->GetValue(); + const char* p = value; + if (value[0] == '0' && value[1] == 'x') { + p = my_read_hex_ptr(&result, value+2); + } else if (entry->format == 'x') { + p = my_read_hex_ptr(&result, value); + } else { + p = my_read_decimal_ptr(&result, value); + } + if (p == value) + continue; + + result &= (1U << entry->bit_length)-1; + result <<= entry->bit_lshift; + sys_info->cpu.arm_cpu_info.cpuid |= + static_cast(result); + } +#if defined(__arm__) + // Get the architecture version from the "Processor" field. + // Note that it is also available in the "CPU architecture" field, + // however, some existing kernels are misconfigured and will report + // invalid values here (e.g. 6, while the CPU is ARMv7-A based). + // The "Processor" field doesn't have this issue. + if (!my_strcmp(field, "Processor")) { + size_t value_len; + const char* value = reader->GetValueAndLen(&value_len); + // Expected format: (v) + // Where is some text like "ARMv7 Processor rev 2" + // and is a decimal corresponding to the ARM + // architecture number. is either 'l' or 'b' + // and corresponds to the endianess, it is ignored here. + while (value_len > 0 && my_isspace(value[value_len-1])) + value_len--; + + size_t nn = value_len; + while (nn > 0 && value[nn-1] != '(') + nn--; + if (nn > 0 && value[nn] == 'v') { + uintptr_t arch_level = 5; + my_read_decimal_ptr(&arch_level, value + nn + 1); + sys_info->processor_level = static_cast(arch_level); + } + } +#elif defined(__aarch64__) + // The aarch64 architecture does not provide the architecture level + // in the Processor field, so we instead check the "CPU architecture" + // field. + if (!my_strcmp(field, "CPU architecture")) { + uintptr_t arch_level = 0; + const char* value = reader->GetValue(); + const char* p = value; + p = my_read_decimal_ptr(&arch_level, value); + if (p == value) + continue; + sys_info->processor_level = static_cast(arch_level); + } +#endif + // Rebuild the ELF hwcaps from the 'Features' field. + if (!my_strcmp(field, "Features")) { + size_t value_len; + const char* value = reader->GetValueAndLen(&value_len); + + // Parse each space-separated tag. + while (value_len > 0) { + const char* tag = value; + size_t tag_len = value_len; + const char* p = my_strchr(tag, ' '); + if (p != NULL) { + tag_len = static_cast(p - tag); + value += tag_len + 1; + value_len -= tag_len + 1; + } else { + tag_len = strlen(tag); + value_len = 0; + } + for (size_t i = 0; + i < sizeof(cpu_features_entries)/ + sizeof(cpu_features_entries[0]); + ++i) { + const CpuFeaturesEntry* entry = &cpu_features_entries[i]; + if (tag_len == strlen(entry->tag) && + !memcmp(tag, entry->tag, tag_len)) { + sys_info->cpu.arm_cpu_info.elf_hwcaps |= entry->hwcaps; + break; + } + } + } + } + } + sys_close(fd); + } + + return true; + } +#else +# error "Unsupported CPU" +#endif + + bool WriteFile(MDLocationDescriptor* result, const char* filename) { + const int fd = sys_open(filename, O_RDONLY, 0); + if (fd < 0) + return false; + + // We can't stat the files because several of the files that we want to + // read are kernel seqfiles, which always have a length of zero. So we have + // to read as much as we can into a buffer. + static const unsigned kBufSize = 1024 - 2*sizeof(void*); + struct Buffers { + Buffers* next; + size_t len; + uint8_t data[kBufSize]; + } *buffers = reinterpret_cast(Alloc(sizeof(Buffers))); + buffers->next = NULL; + buffers->len = 0; + + size_t total = 0; + for (Buffers* bufptr = buffers;;) { + ssize_t r; + do { + r = sys_read(fd, &bufptr->data[bufptr->len], kBufSize - bufptr->len); + } while (r == -1 && errno == EINTR); + + if (r < 1) + break; + + total += r; + bufptr->len += r; + if (bufptr->len == kBufSize) { + bufptr->next = reinterpret_cast(Alloc(sizeof(Buffers))); + bufptr = bufptr->next; + bufptr->next = NULL; + bufptr->len = 0; + } + } + sys_close(fd); + + if (!total) + return false; + + UntypedMDRVA memory(&minidump_writer_); + if (!memory.Allocate(total)) + return false; + for (MDRVA pos = memory.position(); buffers; buffers = buffers->next) { + // Check for special case of a zero-length buffer. This should only + // occur if a file's size happens to be a multiple of the buffer's + // size, in which case the final sys_read() will have resulted in + // zero bytes being read after the final buffer was just allocated. + if (buffers->len == 0) { + // This can only occur with final buffer. + assert(buffers->next == NULL); + continue; + } + memory.Copy(pos, &buffers->data, buffers->len); + pos += buffers->len; + } + *result = memory.location(); + return true; + } + + bool WriteOSInformation(MDRawSystemInfo* sys_info) { +#if defined(__ANDROID__) + sys_info->platform_id = MD_OS_ANDROID; +#else + sys_info->platform_id = MD_OS_LINUX; +#endif + + struct utsname uts; + if (uname(&uts)) + return false; + + static const size_t buf_len = 512; + char buf[buf_len] = {0}; + size_t space_left = buf_len - 1; + const char* info_table[] = { + uts.sysname, + uts.release, + uts.version, + uts.machine, + NULL + }; + bool first_item = true; + for (const char** cur_info = info_table; *cur_info; cur_info++) { + static const char separator[] = " "; + size_t separator_len = sizeof(separator) - 1; + size_t info_len = my_strlen(*cur_info); + if (info_len == 0) + continue; + + if (space_left < info_len + (first_item ? 0 : separator_len)) + break; + + if (!first_item) { + my_strlcat(buf, separator, sizeof(buf)); + space_left -= separator_len; + } + + first_item = false; + my_strlcat(buf, *cur_info, sizeof(buf)); + space_left -= info_len; + } + + MDLocationDescriptor location; + if (!minidump_writer_.WriteString(buf, 0, &location)) + return false; + sys_info->csd_version_rva = location.rva; + + return true; + } + + bool WriteProcFile(MDLocationDescriptor* result, pid_t pid, + const char* filename) { + char buf[NAME_MAX]; + if (!dumper_->BuildProcPath(buf, pid, filename)) + return false; + return WriteFile(result, buf); + } + + // Only one of the 2 member variables below should be set to a valid value. + const int fd_; // File descriptor where the minidum should be written. + const char* path_; // Path to the file where the minidum should be written. + + const struct ucontext* const ucontext_; // also from the signal handler +#if !defined(__ARM_EABI__) && !defined(__mips__) + const google_breakpad::fpstate_t* const float_state_; // ditto +#endif + LinuxDumper* dumper_; + MinidumpFileWriter minidump_writer_; + off_t minidump_size_limit_; + MDLocationDescriptor crashing_thread_context_; + // Blocks of memory written to the dump. These are all currently + // written while writing the thread list stream, but saved here + // so a memory list stream can be written afterwards. + wasteful_vector memory_blocks_; + // Additional information about some mappings provided by the caller. + const MappingList& mapping_list_; + // Additional memory regions to be included in the dump, + // provided by the caller. + const AppMemoryList& app_memory_list_; +}; + + +bool WriteMinidumpImpl(const char* minidump_path, + int minidump_fd, + off_t minidump_size_limit, + pid_t crashing_process, + const void* blob, size_t blob_size, + const MappingList& mappings, + const AppMemoryList& appmem) { + LinuxPtraceDumper dumper(crashing_process); + const ExceptionHandler::CrashContext* context = NULL; + if (blob) { + if (blob_size != sizeof(ExceptionHandler::CrashContext)) + return false; + context = reinterpret_cast(blob); + dumper.set_crash_address( + reinterpret_cast(context->siginfo.si_addr)); + dumper.set_crash_signal(context->siginfo.si_signo); + dumper.set_crash_thread(context->tid); + } + MinidumpWriter writer(minidump_path, minidump_fd, context, mappings, + appmem, &dumper); + // Set desired limit for file size of minidump (-1 means no limit). + writer.set_minidump_size_limit(minidump_size_limit); + if (!writer.Init()) + return false; + return writer.Dump(); +} + +} // namespace + +namespace google_breakpad { + +bool WriteMinidump(const char* minidump_path, pid_t crashing_process, + const void* blob, size_t blob_size) { + return WriteMinidumpImpl(minidump_path, -1, -1, + crashing_process, blob, blob_size, + MappingList(), AppMemoryList()); +} + +bool WriteMinidump(int minidump_fd, pid_t crashing_process, + const void* blob, size_t blob_size) { + return WriteMinidumpImpl(NULL, minidump_fd, -1, + crashing_process, blob, blob_size, + MappingList(), AppMemoryList()); +} + +bool WriteMinidump(const char* minidump_path, pid_t process, + pid_t process_blamed_thread) { + LinuxPtraceDumper dumper(process); + // MinidumpWriter will set crash address + dumper.set_crash_signal(MD_EXCEPTION_CODE_LIN_DUMP_REQUESTED); + dumper.set_crash_thread(process_blamed_thread); + MinidumpWriter writer(minidump_path, -1, NULL, MappingList(), + AppMemoryList(), &dumper); + if (!writer.Init()) + return false; + return writer.Dump(); +} + +bool WriteMinidump(const char* minidump_path, pid_t crashing_process, + const void* blob, size_t blob_size, + const MappingList& mappings, + const AppMemoryList& appmem) { + return WriteMinidumpImpl(minidump_path, -1, -1, crashing_process, + blob, blob_size, + mappings, appmem); +} + +bool WriteMinidump(int minidump_fd, pid_t crashing_process, + const void* blob, size_t blob_size, + const MappingList& mappings, + const AppMemoryList& appmem) { + return WriteMinidumpImpl(NULL, minidump_fd, -1, crashing_process, + blob, blob_size, + mappings, appmem); +} + +bool WriteMinidump(const char* minidump_path, off_t minidump_size_limit, + pid_t crashing_process, + const void* blob, size_t blob_size, + const MappingList& mappings, + const AppMemoryList& appmem) { + return WriteMinidumpImpl(minidump_path, -1, minidump_size_limit, + crashing_process, blob, blob_size, + mappings, appmem); +} + +bool WriteMinidump(int minidump_fd, off_t minidump_size_limit, + pid_t crashing_process, + const void* blob, size_t blob_size, + const MappingList& mappings, + const AppMemoryList& appmem) { + return WriteMinidumpImpl(NULL, minidump_fd, minidump_size_limit, + crashing_process, blob, blob_size, + mappings, appmem); +} + +bool WriteMinidump(const char* filename, + const MappingList& mappings, + const AppMemoryList& appmem, + LinuxDumper* dumper) { + MinidumpWriter writer(filename, -1, NULL, mappings, appmem, dumper); + if (!writer.Init()) + return false; + return writer.Dump(); +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/minidump_writer.h b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/minidump_writer.h new file mode 100644 index 0000000000..d13fb120b5 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/minidump_writer.h @@ -0,0 +1,124 @@ +// Copyright (c) 2009, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef CLIENT_LINUX_MINIDUMP_WRITER_MINIDUMP_WRITER_H_ +#define CLIENT_LINUX_MINIDUMP_WRITER_MINIDUMP_WRITER_H_ + +#include +#include +#include +#include + +#include +#include + +#include "client/linux/minidump_writer/linux_dumper.h" +#include "google_breakpad/common/minidump_format.h" + +namespace google_breakpad { + +class ExceptionHandler; + +#if defined(__aarch64__) +typedef struct fpsimd_context fpstate_t; +#elif !defined(__ARM_EABI__) && !defined(__mips__) +typedef struct _libc_fpstate fpstate_t; +#endif + +// These entries store a list of memory regions that the client wants included +// in the minidump. +struct AppMemory { + void* ptr; + size_t length; + + bool operator==(const struct AppMemory& other) const { + return ptr == other.ptr; + } + + bool operator==(const void* other) const { + return ptr == other; + } +}; +typedef std::list AppMemoryList; + +// Writes a minidump to the filesystem. These functions do not malloc nor use +// libc functions which may. Thus, it can be used in contexts where the state +// of the heap may be corrupt. +// minidump_path: the path to the file to write to. This is opened O_EXCL and +// fails open fails. +// crashing_process: the pid of the crashing process. This must be trusted. +// blob: a blob of data from the crashing process. See exception_handler.h +// blob_size: the length of |blob|, in bytes +// +// Returns true iff successful. +bool WriteMinidump(const char* minidump_path, pid_t crashing_process, + const void* blob, size_t blob_size); +// Same as above but takes an open file descriptor instead of a path. +bool WriteMinidump(int minidump_fd, pid_t crashing_process, + const void* blob, size_t blob_size); + +// Alternate form of WriteMinidump() that works with processes that +// are not expected to have crashed. If |process_blamed_thread| is +// meaningful, it will be the one from which a crash signature is +// extracted. It is not expected that this function will be called +// from a compromised context, but it is safe to do so. +bool WriteMinidump(const char* minidump_path, pid_t process, + pid_t process_blamed_thread); + +// These overloads also allow passing a list of known mappings and +// a list of additional memory regions to be included in the minidump. +bool WriteMinidump(const char* minidump_path, pid_t crashing_process, + const void* blob, size_t blob_size, + const MappingList& mappings, + const AppMemoryList& appdata); +bool WriteMinidump(int minidump_fd, pid_t crashing_process, + const void* blob, size_t blob_size, + const MappingList& mappings, + const AppMemoryList& appdata); + +// These overloads also allow passing a file size limit for the minidump. +bool WriteMinidump(const char* minidump_path, off_t minidump_size_limit, + pid_t crashing_process, + const void* blob, size_t blob_size, + const MappingList& mappings, + const AppMemoryList& appdata); +bool WriteMinidump(int minidump_fd, off_t minidump_size_limit, + pid_t crashing_process, + const void* blob, size_t blob_size, + const MappingList& mappings, + const AppMemoryList& appdata); + +bool WriteMinidump(const char* filename, + const MappingList& mappings, + const AppMemoryList& appdata, + LinuxDumper* dumper); + +} // namespace google_breakpad + +#endif // CLIENT_LINUX_MINIDUMP_WRITER_MINIDUMP_WRITER_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/minidump_writer_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/minidump_writer_unittest.cc new file mode 100644 index 0000000000..e1046e12a0 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/minidump_writer_unittest.cc @@ -0,0 +1,756 @@ +// Copyright (c) 2011 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#include +#include +#include +#include +#include +#include +#include + +#include + +#include "breakpad_googletest_includes.h" +#include "client/linux/handler/exception_handler.h" +#include "client/linux/minidump_writer/linux_dumper.h" +#include "client/linux/minidump_writer/minidump_writer.h" +#include "client/linux/minidump_writer/minidump_writer_unittest_utils.h" +#include "common/linux/eintr_wrapper.h" +#include "common/linux/file_id.h" +#include "common/linux/ignore_ret.h" +#include "common/linux/safe_readlink.h" +#include "common/scoped_ptr.h" +#include "common/tests/auto_tempdir.h" +#include "common/tests/file_utils.h" +#include "common/using_std_string.h" +#include "google_breakpad/processor/minidump.h" + +using namespace google_breakpad; + +// Length of a formatted GUID string = +// sizeof(MDGUID) * 2 + 4 (for dashes) + 1 (null terminator) +const int kGUIDStringSize = 37; + +namespace { + +typedef testing::Test MinidumpWriterTest; + +const char kMDWriterUnitTestFileName[] = "/minidump-writer-unittest"; + +TEST(MinidumpWriterTest, SetupWithPath) { + int fds[2]; + ASSERT_NE(-1, pipe(fds)); + + const pid_t child = fork(); + if (child == 0) { + close(fds[1]); + char b; + IGNORE_RET(HANDLE_EINTR(read(fds[0], &b, sizeof(b)))); + close(fds[0]); + syscall(__NR_exit); + } + close(fds[0]); + + ExceptionHandler::CrashContext context; + memset(&context, 0, sizeof(context)); + + AutoTempDir temp_dir; + string templ = temp_dir.path() + kMDWriterUnitTestFileName; + // Set a non-zero tid to avoid tripping asserts. + context.tid = child; + ASSERT_TRUE(WriteMinidump(templ.c_str(), child, &context, sizeof(context))); + struct stat st; + ASSERT_EQ(0, stat(templ.c_str(), &st)); + ASSERT_GT(st.st_size, 0); + + close(fds[1]); +} + +TEST(MinidumpWriterTest, SetupWithFD) { + int fds[2]; + ASSERT_NE(-1, pipe(fds)); + + const pid_t child = fork(); + if (child == 0) { + close(fds[1]); + char b; + HANDLE_EINTR(read(fds[0], &b, sizeof(b))); + close(fds[0]); + syscall(__NR_exit); + } + close(fds[0]); + + ExceptionHandler::CrashContext context; + memset(&context, 0, sizeof(context)); + + AutoTempDir temp_dir; + string templ = temp_dir.path() + kMDWriterUnitTestFileName; + int fd = open(templ.c_str(), O_CREAT | O_WRONLY, S_IRWXU); + // Set a non-zero tid to avoid tripping asserts. + context.tid = child; + ASSERT_TRUE(WriteMinidump(fd, child, &context, sizeof(context))); + struct stat st; + ASSERT_EQ(0, stat(templ.c_str(), &st)); + ASSERT_GT(st.st_size, 0); + + close(fds[1]); +} + +// Test that mapping info can be specified when writing a minidump, +// and that it ends up in the module list of the minidump. +TEST(MinidumpWriterTest, MappingInfo) { + int fds[2]; + ASSERT_NE(-1, pipe(fds)); + + // These are defined here so the parent can use them to check the + // data from the minidump afterwards. + const uint32_t memory_size = sysconf(_SC_PAGESIZE); + const char* kMemoryName = "a fake module"; + const uint8_t kModuleGUID[sizeof(MDGUID)] = { + 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, + 0x88, 0x99, 0xAA, 0xBB, 0xCC, 0xDD, 0xEE, 0xFF + }; + char module_identifier_buffer[kGUIDStringSize]; + FileID::ConvertIdentifierToString(kModuleGUID, + module_identifier_buffer, + sizeof(module_identifier_buffer)); + string module_identifier(module_identifier_buffer); + // Strip out dashes + size_t pos; + while ((pos = module_identifier.find('-')) != string::npos) { + module_identifier.erase(pos, 1); + } + // And append a zero, because module IDs include an "age" field + // which is always zero on Linux. + module_identifier += "0"; + + // Get some memory. + char* memory = + reinterpret_cast(mmap(NULL, + memory_size, + PROT_READ | PROT_WRITE, + MAP_PRIVATE | MAP_ANON, + -1, + 0)); + const uintptr_t kMemoryAddress = reinterpret_cast(memory); + ASSERT_TRUE(memory); + + const pid_t child = fork(); + if (child == 0) { + close(fds[1]); + char b; + IGNORE_RET(HANDLE_EINTR(read(fds[0], &b, sizeof(b)))); + close(fds[0]); + syscall(__NR_exit); + } + close(fds[0]); + + ExceptionHandler::CrashContext context; + memset(&context, 0, sizeof(context)); + ASSERT_EQ(0, getcontext(&context.context)); + context.tid = child; + + AutoTempDir temp_dir; + string templ = temp_dir.path() + kMDWriterUnitTestFileName; + + // Add information about the mapped memory. + MappingInfo info; + info.start_addr = kMemoryAddress; + info.size = memory_size; + info.offset = 0; + strcpy(info.name, kMemoryName); + + MappingList mappings; + AppMemoryList memory_list; + MappingEntry mapping; + mapping.first = info; + memcpy(mapping.second, kModuleGUID, sizeof(MDGUID)); + mappings.push_back(mapping); + ASSERT_TRUE(WriteMinidump(templ.c_str(), child, &context, sizeof(context), + mappings, memory_list)); + + // Read the minidump. Load the module list, and ensure that + // the mmap'ed |memory| is listed with the given module name + // and debug ID. + Minidump minidump(templ); + ASSERT_TRUE(minidump.Read()); + + MinidumpModuleList* module_list = minidump.GetModuleList(); + ASSERT_TRUE(module_list); + const MinidumpModule* module = + module_list->GetModuleForAddress(kMemoryAddress); + ASSERT_TRUE(module); + + EXPECT_EQ(kMemoryAddress, module->base_address()); + EXPECT_EQ(memory_size, module->size()); + EXPECT_EQ(kMemoryName, module->code_file()); + EXPECT_EQ(module_identifier, module->debug_identifier()); + + uint32_t len; + // These streams are expected to be there + EXPECT_TRUE(minidump.SeekToStreamType(MD_THREAD_LIST_STREAM, &len)); + EXPECT_TRUE(minidump.SeekToStreamType(MD_MEMORY_LIST_STREAM, &len)); + EXPECT_TRUE(minidump.SeekToStreamType(MD_EXCEPTION_STREAM, &len)); + EXPECT_TRUE(minidump.SeekToStreamType(MD_SYSTEM_INFO_STREAM, &len)); + EXPECT_TRUE(minidump.SeekToStreamType(MD_LINUX_CPU_INFO, &len)); + EXPECT_TRUE(minidump.SeekToStreamType(MD_LINUX_PROC_STATUS, &len)); + EXPECT_TRUE(minidump.SeekToStreamType(MD_LINUX_CMD_LINE, &len)); + EXPECT_TRUE(minidump.SeekToStreamType(MD_LINUX_ENVIRON, &len)); + EXPECT_TRUE(minidump.SeekToStreamType(MD_LINUX_AUXV, &len)); + EXPECT_TRUE(minidump.SeekToStreamType(MD_LINUX_MAPS, &len)); + EXPECT_TRUE(minidump.SeekToStreamType(MD_LINUX_DSO_DEBUG, &len)); + + close(fds[1]); +} + +// Test that mapping info can be specified, and that it overrides +// existing mappings that are wholly contained within the specified +// range. +TEST(MinidumpWriterTest, MappingInfoContained) { + int fds[2]; + ASSERT_NE(-1, pipe(fds)); + + // These are defined here so the parent can use them to check the + // data from the minidump afterwards. + const int32_t memory_size = sysconf(_SC_PAGESIZE); + const char* kMemoryName = "a fake module"; + const uint8_t kModuleGUID[sizeof(MDGUID)] = { + 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, + 0x88, 0x99, 0xAA, 0xBB, 0xCC, 0xDD, 0xEE, 0xFF + }; + char module_identifier_buffer[kGUIDStringSize]; + FileID::ConvertIdentifierToString(kModuleGUID, + module_identifier_buffer, + sizeof(module_identifier_buffer)); + string module_identifier(module_identifier_buffer); + // Strip out dashes + size_t pos; + while ((pos = module_identifier.find('-')) != string::npos) { + module_identifier.erase(pos, 1); + } + // And append a zero, because module IDs include an "age" field + // which is always zero on Linux. + module_identifier += "0"; + + // mmap a file + AutoTempDir temp_dir; + string tempfile = temp_dir.path() + "/minidump-writer-unittest-temp"; + int fd = open(tempfile.c_str(), O_RDWR | O_CREAT, 0); + ASSERT_NE(-1, fd); + unlink(tempfile.c_str()); + // fill with zeros + google_breakpad::scoped_array buffer(new char[memory_size]); + memset(buffer.get(), 0, memory_size); + ASSERT_EQ(memory_size, write(fd, buffer.get(), memory_size)); + lseek(fd, 0, SEEK_SET); + + char* memory = + reinterpret_cast(mmap(NULL, + memory_size, + PROT_READ | PROT_WRITE, + MAP_PRIVATE, + fd, + 0)); + const uintptr_t kMemoryAddress = reinterpret_cast(memory); + ASSERT_TRUE(memory); + close(fd); + + const pid_t child = fork(); + if (child == 0) { + close(fds[1]); + char b; + IGNORE_RET(HANDLE_EINTR(read(fds[0], &b, sizeof(b)))); + close(fds[0]); + syscall(__NR_exit); + } + close(fds[0]); + + ExceptionHandler::CrashContext context; + memset(&context, 0, sizeof(context)); + context.tid = 1; + + string dumpfile = temp_dir.path() + kMDWriterUnitTestFileName; + + // Add information about the mapped memory. Report it as being larger than + // it actually is. + MappingInfo info; + info.start_addr = kMemoryAddress - memory_size; + info.size = memory_size * 3; + info.offset = 0; + strcpy(info.name, kMemoryName); + + MappingList mappings; + AppMemoryList memory_list; + MappingEntry mapping; + mapping.first = info; + memcpy(mapping.second, kModuleGUID, sizeof(MDGUID)); + mappings.push_back(mapping); + ASSERT_TRUE(WriteMinidump(dumpfile.c_str(), child, &context, sizeof(context), + mappings, memory_list)); + + // Read the minidump. Load the module list, and ensure that + // the mmap'ed |memory| is listed with the given module name + // and debug ID. + Minidump minidump(dumpfile); + ASSERT_TRUE(minidump.Read()); + + MinidumpModuleList* module_list = minidump.GetModuleList(); + ASSERT_TRUE(module_list); + const MinidumpModule* module = + module_list->GetModuleForAddress(kMemoryAddress); + ASSERT_TRUE(module); + + EXPECT_EQ(info.start_addr, module->base_address()); + EXPECT_EQ(info.size, module->size()); + EXPECT_EQ(kMemoryName, module->code_file()); + EXPECT_EQ(module_identifier, module->debug_identifier()); + + close(fds[1]); +} + +TEST(MinidumpWriterTest, DeletedBinary) { + const string kNumberOfThreadsArgument = "1"; + const string helper_path(GetHelperBinary()); + if (helper_path.empty()) { + FAIL() << "Couldn't find helper binary"; + exit(1); + } + + // Copy binary to a temp file. + AutoTempDir temp_dir; + string binpath = temp_dir.path() + "/linux-dumper-unittest-helper"; + ASSERT_TRUE(CopyFile(helper_path.c_str(), binpath.c_str())) + << "Failed to copy " << helper_path << " to " << binpath; + ASSERT_EQ(0, chmod(binpath.c_str(), 0755)); + + int fds[2]; + ASSERT_NE(-1, pipe(fds)); + + pid_t child_pid = fork(); + if (child_pid == 0) { + // In child process. + close(fds[0]); + + // Pass the pipe fd and the number of threads as arguments. + char pipe_fd_string[8]; + sprintf(pipe_fd_string, "%d", fds[1]); + execl(binpath.c_str(), + binpath.c_str(), + pipe_fd_string, + kNumberOfThreadsArgument.c_str(), + NULL); + } + close(fds[1]); + // Wait for the child process to signal that it's ready. + struct pollfd pfd; + memset(&pfd, 0, sizeof(pfd)); + pfd.fd = fds[0]; + pfd.events = POLLIN | POLLERR; + + const int r = HANDLE_EINTR(poll(&pfd, 1, 1000)); + ASSERT_EQ(1, r); + ASSERT_TRUE(pfd.revents & POLLIN); + uint8_t junk; + const int nr = HANDLE_EINTR(read(fds[0], &junk, sizeof(junk))); + ASSERT_EQ(static_cast(sizeof(junk)), nr); + close(fds[0]); + + // Child is ready now. + // Unlink the test binary. + unlink(binpath.c_str()); + + ExceptionHandler::CrashContext context; + memset(&context, 0, sizeof(context)); + + string templ = temp_dir.path() + kMDWriterUnitTestFileName; + // Set a non-zero tid to avoid tripping asserts. + context.tid = child_pid; + ASSERT_TRUE(WriteMinidump(templ.c_str(), child_pid, &context, + sizeof(context))); + kill(child_pid, SIGKILL); + + struct stat st; + ASSERT_EQ(0, stat(templ.c_str(), &st)); + ASSERT_GT(st.st_size, 0); + + Minidump minidump(templ); + ASSERT_TRUE(minidump.Read()); + + // Check that the main module filename is correct. + MinidumpModuleList* module_list = minidump.GetModuleList(); + ASSERT_TRUE(module_list); + const MinidumpModule* module = module_list->GetMainModule(); + EXPECT_STREQ(binpath.c_str(), module->code_file().c_str()); + // Check that the file ID is correct. + FileID fileid(helper_path.c_str()); + uint8_t identifier[sizeof(MDGUID)]; + EXPECT_TRUE(fileid.ElfFileIdentifier(identifier)); + char identifier_string[kGUIDStringSize]; + FileID::ConvertIdentifierToString(identifier, + identifier_string, + kGUIDStringSize); + string module_identifier(identifier_string); + // Strip out dashes + size_t pos; + while ((pos = module_identifier.find('-')) != string::npos) { + module_identifier.erase(pos, 1); + } + // And append a zero, because module IDs include an "age" field + // which is always zero on Linux. + module_identifier += "0"; + EXPECT_EQ(module_identifier, module->debug_identifier()); +} + +// Test that an additional memory region can be added to the minidump. +TEST(MinidumpWriterTest, AdditionalMemory) { + int fds[2]; + ASSERT_NE(-1, pipe(fds)); + + // These are defined here so the parent can use them to check the + // data from the minidump afterwards. + const uint32_t kMemorySize = sysconf(_SC_PAGESIZE); + + // Get some heap memory. + uint8_t* memory = new uint8_t[kMemorySize]; + const uintptr_t kMemoryAddress = reinterpret_cast(memory); + ASSERT_TRUE(memory); + + // Stick some data into the memory so the contents can be verified. + for (uint32_t i = 0; i < kMemorySize; ++i) { + memory[i] = i % 255; + } + + const pid_t child = fork(); + if (child == 0) { + close(fds[1]); + char b; + HANDLE_EINTR(read(fds[0], &b, sizeof(b))); + close(fds[0]); + syscall(__NR_exit); + } + close(fds[0]); + + ExceptionHandler::CrashContext context; + + // This needs a valid context for minidump writing to work, but getting + // a useful one from the child is too much work, so just use one from + // the parent since the child is just a forked copy anyway. + ASSERT_EQ(0, getcontext(&context.context)); + context.tid = child; + + AutoTempDir temp_dir; + string templ = temp_dir.path() + kMDWriterUnitTestFileName; + unlink(templ.c_str()); + + MappingList mappings; + AppMemoryList memory_list; + + // Add the memory region to the list of memory to be included. + AppMemory app_memory; + app_memory.ptr = memory; + app_memory.length = kMemorySize; + memory_list.push_back(app_memory); + ASSERT_TRUE(WriteMinidump(templ.c_str(), child, &context, sizeof(context), + mappings, memory_list)); + + // Read the minidump. Ensure that the memory region is present + Minidump minidump(templ); + ASSERT_TRUE(minidump.Read()); + + MinidumpMemoryList* dump_memory_list = minidump.GetMemoryList(); + ASSERT_TRUE(dump_memory_list); + const MinidumpMemoryRegion* region = + dump_memory_list->GetMemoryRegionForAddress(kMemoryAddress); + ASSERT_TRUE(region); + + EXPECT_EQ(kMemoryAddress, region->GetBase()); + EXPECT_EQ(kMemorySize, region->GetSize()); + + // Verify memory contents. + EXPECT_EQ(0, memcmp(region->GetMemory(), memory, kMemorySize)); + + delete[] memory; + close(fds[1]); +} + +// Test that an invalid thread stack pointer still results in a minidump. +TEST(MinidumpWriterTest, InvalidStackPointer) { + int fds[2]; + ASSERT_NE(-1, pipe(fds)); + + const pid_t child = fork(); + if (child == 0) { + close(fds[1]); + char b; + HANDLE_EINTR(read(fds[0], &b, sizeof(b))); + close(fds[0]); + syscall(__NR_exit); + } + close(fds[0]); + + ExceptionHandler::CrashContext context; + + // This needs a valid context for minidump writing to work, but getting + // a useful one from the child is too much work, so just use one from + // the parent since the child is just a forked copy anyway. + ASSERT_EQ(0, getcontext(&context.context)); + context.tid = child; + + // Fake the child's stack pointer for its crashing thread. NOTE: This must + // be an invalid memory address for the child process (stack or otherwise). + // Try 1MB below the current stack. + uintptr_t invalid_stack_pointer = + reinterpret_cast(&context) - 1024*1024; +#if defined(__i386) + context.context.uc_mcontext.gregs[REG_ESP] = invalid_stack_pointer; +#elif defined(__x86_64) + context.context.uc_mcontext.gregs[REG_RSP] = invalid_stack_pointer; +#elif defined(__ARM_EABI__) + context.context.uc_mcontext.arm_sp = invalid_stack_pointer; +#elif defined(__aarch64__) + context.context.uc_mcontext.sp = invalid_stack_pointer; +#elif defined(__mips__) + context.context.uc_mcontext.gregs[MD_CONTEXT_MIPS_REG_SP] = + invalid_stack_pointer; +#else +# error "This code has not been ported to your platform yet." +#endif + + AutoTempDir temp_dir; + string templ = temp_dir.path() + kMDWriterUnitTestFileName; + // NOTE: In previous versions of Breakpad, WriteMinidump() would fail if + // presented with an invalid stack pointer. + ASSERT_TRUE(WriteMinidump(templ.c_str(), child, &context, sizeof(context))); + + // Read the minidump. Ensure that the memory region is present + Minidump minidump(templ); + ASSERT_TRUE(minidump.Read()); + + // TODO(ted.mielczarek,mkrebs): Enable this part of the test once + // https://breakpad.appspot.com/413002/ is committed. +#if 0 + // Make sure there's a thread without a stack. NOTE: It's okay if + // GetThreadList() shows the error: "ERROR: MinidumpThread has a memory + // region problem". + MinidumpThreadList* dump_thread_list = minidump.GetThreadList(); + ASSERT_TRUE(dump_thread_list); + bool found_empty_stack = false; + for (int i = 0; i < dump_thread_list->thread_count(); i++) { + MinidumpThread* thread = dump_thread_list->GetThreadAtIndex(i); + ASSERT_TRUE(thread->thread() != NULL); + // When the stack size is zero bytes, GetMemory() returns NULL. + if (thread->GetMemory() == NULL) { + found_empty_stack = true; + break; + } + } + // NOTE: If you fail this, first make sure that "invalid_stack_pointer" + // above is indeed set to an invalid address. + ASSERT_TRUE(found_empty_stack); +#endif + + close(fds[1]); +} + +// Test that limiting the size of the minidump works. +TEST(MinidumpWriterTest, MinidumpSizeLimit) { + static const int kNumberOfThreadsInHelperProgram = 40; + + char number_of_threads_arg[3]; + sprintf(number_of_threads_arg, "%d", kNumberOfThreadsInHelperProgram); + + string helper_path(GetHelperBinary()); + if (helper_path.empty()) { + FAIL() << "Couldn't find helper binary"; + exit(1); + } + + int fds[2]; + ASSERT_NE(-1, pipe(fds)); + + pid_t child_pid = fork(); + if (child_pid == 0) { + // In child process. + close(fds[0]); + + // Pass the pipe fd and the number of threads as arguments. + char pipe_fd_string[8]; + sprintf(pipe_fd_string, "%d", fds[1]); + execl(helper_path.c_str(), + helper_path.c_str(), + pipe_fd_string, + number_of_threads_arg, + NULL); + } + close(fds[1]); + + // Wait for all child threads to indicate that they have started + for (int threads = 0; threads < kNumberOfThreadsInHelperProgram; threads++) { + struct pollfd pfd; + memset(&pfd, 0, sizeof(pfd)); + pfd.fd = fds[0]; + pfd.events = POLLIN | POLLERR; + + const int r = HANDLE_EINTR(poll(&pfd, 1, 1000)); + ASSERT_EQ(1, r); + ASSERT_TRUE(pfd.revents & POLLIN); + uint8_t junk; + ASSERT_EQ(read(fds[0], &junk, sizeof(junk)), + static_cast(sizeof(junk))); + } + close(fds[0]); + + // There is a race here because we may stop a child thread before + // it is actually running the busy loop. Empirically this sleep + // is sufficient to avoid the race. + usleep(100000); + + // Child and its threads are ready now. + + + off_t normal_file_size; + int total_normal_stack_size = 0; + AutoTempDir temp_dir; + + // First, write a minidump with no size limit. + { + string normal_dump = temp_dir.path() + + "/minidump-writer-unittest.dmp"; + ASSERT_TRUE(WriteMinidump(normal_dump.c_str(), -1, + child_pid, NULL, 0, + MappingList(), AppMemoryList())); + struct stat st; + ASSERT_EQ(0, stat(normal_dump.c_str(), &st)); + ASSERT_GT(st.st_size, 0); + normal_file_size = st.st_size; + + Minidump minidump(normal_dump); + ASSERT_TRUE(minidump.Read()); + MinidumpThreadList* dump_thread_list = minidump.GetThreadList(); + ASSERT_TRUE(dump_thread_list); + for (unsigned int i = 0; i < dump_thread_list->thread_count(); i++) { + MinidumpThread* thread = dump_thread_list->GetThreadAtIndex(i); + ASSERT_TRUE(thread->thread() != NULL); + // When the stack size is zero bytes, GetMemory() returns NULL. + MinidumpMemoryRegion* memory = thread->GetMemory(); + ASSERT_TRUE(memory != NULL); + total_normal_stack_size += memory->GetSize(); + } + } + + // Second, write a minidump with a size limit big enough to not trigger + // anything. + { + // Set size limit arbitrarily 1MB larger than the normal file size -- such + // that the limiting code will not kick in. + const off_t minidump_size_limit = normal_file_size + 1024*1024; + + string same_dump = temp_dir.path() + + "/minidump-writer-unittest-same.dmp"; + ASSERT_TRUE(WriteMinidump(same_dump.c_str(), minidump_size_limit, + child_pid, NULL, 0, + MappingList(), AppMemoryList())); + struct stat st; + ASSERT_EQ(0, stat(same_dump.c_str(), &st)); + // Make sure limiting wasn't actually triggered. NOTE: If you fail this, + // first make sure that "minidump_size_limit" above is indeed set to a + // large enough value -- the limit-checking code in minidump_writer.cc + // does just a rough estimate. + ASSERT_EQ(normal_file_size, st.st_size); + } + + // Third, write a minidump with a size limit small enough to be triggered. + { + // Set size limit to some arbitrary amount, such that the limiting code + // will kick in. The equation used to set this value was determined by + // simply reversing the size-limit logic a little bit in order to pick a + // size we know will trigger it. The definition of + // kLimitAverageThreadStackLength here was copied from class + // MinidumpWriter in minidump_writer.cc. + static const unsigned kLimitAverageThreadStackLength = 8 * 1024; + off_t minidump_size_limit = kNumberOfThreadsInHelperProgram * + kLimitAverageThreadStackLength; + // If, in reality, each of the threads' stack is *smaller* than + // kLimitAverageThreadStackLength, the normal file size could very well be + // smaller than the arbitrary limit that was just set. In that case, + // either of these numbers should trigger the size-limiting code, but we + // might as well pick the smallest. + if (normal_file_size < minidump_size_limit) + minidump_size_limit = normal_file_size; + + string limit_dump = temp_dir.path() + + "/minidump-writer-unittest-limit.dmp"; + ASSERT_TRUE(WriteMinidump(limit_dump.c_str(), minidump_size_limit, + child_pid, NULL, 0, + MappingList(), AppMemoryList())); + struct stat st; + ASSERT_EQ(0, stat(limit_dump.c_str(), &st)); + ASSERT_GT(st.st_size, 0); + // Make sure the file size is at least smaller than the original. If this + // fails because it's the same size, then the size-limit logic didn't kick + // in like it was supposed to. + EXPECT_LT(st.st_size, normal_file_size); + + Minidump minidump(limit_dump); + ASSERT_TRUE(minidump.Read()); + MinidumpThreadList* dump_thread_list = minidump.GetThreadList(); + ASSERT_TRUE(dump_thread_list); + int total_limit_stack_size = 0; + for (unsigned int i = 0; i < dump_thread_list->thread_count(); i++) { + MinidumpThread* thread = dump_thread_list->GetThreadAtIndex(i); + ASSERT_TRUE(thread->thread() != NULL); + // When the stack size is zero bytes, GetMemory() returns NULL. + MinidumpMemoryRegion* memory = thread->GetMemory(); + ASSERT_TRUE(memory != NULL); + total_limit_stack_size += memory->GetSize(); + } + + // Make sure stack size shrunk by at least 1KB per extra thread. The + // definition of kLimitBaseThreadCount here was copied from class + // MinidumpWriter in minidump_writer.cc. + // Note: The 1KB is arbitrary, and assumes that the thread stacks are big + // enough to shrink by that much. For example, if each thread stack was + // originally only 2KB, the current size-limit logic wouldn't actually + // shrink them because that's the size to which it tries to shrink. If + // you fail this part of the test due to something like that, the test + // logic should probably be improved to account for your situation. + const unsigned kLimitBaseThreadCount = 20; + const unsigned kMinPerExtraThreadStackReduction = 1024; + const int min_expected_reduction = (kNumberOfThreadsInHelperProgram - + kLimitBaseThreadCount) * kMinPerExtraThreadStackReduction; + EXPECT_LT(total_limit_stack_size, + total_normal_stack_size - min_expected_reduction); + } + + // Kill the helper program. + kill(child_pid, SIGKILL); +} + +} // namespace diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/minidump_writer_unittest_utils.cc b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/minidump_writer_unittest_utils.cc new file mode 100644 index 0000000000..9f46fa65c8 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/minidump_writer_unittest_utils.cc @@ -0,0 +1,66 @@ +// Copyright (c) 2011 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// minidump_writer_unittest_utils.cc: +// Shared routines used by unittests under client/linux/minidump_writer. + +#include +#include + +#include "client/linux/minidump_writer/minidump_writer_unittest_utils.h" +#include "common/linux/safe_readlink.h" +#include "common/using_std_string.h" + +namespace google_breakpad { + +string GetHelperBinary() { + string helper_path; + char *bindir = getenv("bindir"); + if (bindir) { + helper_path = string(bindir) + "/"; + } else { + // Locate helper binary next to the current binary. + char self_path[PATH_MAX]; + if (!SafeReadLink("/proc/self/exe", self_path)) { + return ""; + } + helper_path = string(self_path); + size_t pos = helper_path.rfind('/'); + if (pos == string::npos) { + return ""; + } + helper_path.erase(pos + 1); + } + + helper_path += "linux_dumper_unittest_helper"; + + return helper_path; +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/minidump_writer_unittest_utils.h b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/minidump_writer_unittest_utils.h new file mode 100644 index 0000000000..f16cc086bd --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/minidump_writer_unittest_utils.h @@ -0,0 +1,49 @@ +// Copyright (c) 2012, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// minidump_writer_unittest_utils.h: +// Shared routines used by unittests under client/linux/minidump_writer. + +#ifndef CLIENT_LINUX_MINIDUMP_WRITER_MINIDUMP_WRITER_UNITTEST_UTILS_H_ +#define CLIENT_LINUX_MINIDUMP_WRITER_MINIDUMP_WRITER_UNITTEST_UTILS_H_ + +#include + +#include "common/using_std_string.h" + +namespace google_breakpad { + +// Returns the full path to linux_dumper_unittest_helper. The full path is +// discovered either by using the environment variable "bindir" or by using +// the location of the main module of the currently running process. +string GetHelperBinary(); + +} // namespace google_breakpad + +#endif // CLIENT_LINUX_MINIDUMP_WRITER_MINIDUMP_WRITER_UNITTEST_UTILS_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/proc_cpuinfo_reader.h b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/proc_cpuinfo_reader.h new file mode 100644 index 0000000000..d9461bf301 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/proc_cpuinfo_reader.h @@ -0,0 +1,130 @@ +// Copyright (c) 2013, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef CLIENT_LINUX_MINIDUMP_WRITER_PROC_CPUINFO_READER_H_ +#define CLIENT_LINUX_MINIDUMP_WRITER_PROC_CPUINFO_READER_H_ + +#include +#include +#include + +#include "client/linux/minidump_writer/line_reader.h" +#include "common/linux/linux_libc_support.h" +#include "third_party/lss/linux_syscall_support.h" + +namespace google_breakpad { + +// A class for reading /proc/cpuinfo without using fopen/fgets or other +// functions which may allocate memory. +class ProcCpuInfoReader { +public: + ProcCpuInfoReader(int fd) + : line_reader_(fd), pop_count_(-1) { + } + + // Return the next field name, or NULL in case of EOF. + // field: (output) Pointer to zero-terminated field name. + // Returns true on success, or false on EOF or error (line too long). + bool GetNextField(const char** field) { + for (;;) { + const char* line; + unsigned line_len; + + // Try to read next line. + if (pop_count_ >= 0) { + line_reader_.PopLine(pop_count_); + pop_count_ = -1; + } + + if (!line_reader_.GetNextLine(&line, &line_len)) + return false; + + pop_count_ = static_cast(line_len); + + const char* line_end = line + line_len; + + // Expected format: + ':' + // Note that: + // - empty lines happen. + // - can contain spaces. + // - some fields have an empty + char* sep = static_cast(my_memchr(line, ':', line_len)); + if (sep == NULL) + continue; + + // Record the value. Skip leading space after the column to get + // its start. + const char* val = sep+1; + while (val < line_end && my_isspace(*val)) + val++; + + value_ = val; + value_len_ = static_cast(line_end - val); + + // Remove trailing spaces before the column to properly 0-terminate + // the field name. + while (sep > line && my_isspace(sep[-1])) + sep--; + + if (sep == line) + continue; + + // zero-terminate field name. + *sep = '\0'; + + *field = line; + return true; + } + } + + // Return the field value. This must be called after a succesful + // call to GetNextField(). + const char* GetValue() { + assert(value_); + return value_; + } + + // Same as GetValue(), but also returns the length in characters of + // the value. + const char* GetValueAndLen(size_t* length) { + assert(value_); + *length = value_len_; + return value_; + } + +private: + LineReader line_reader_; + int pop_count_; + const char* value_; + size_t value_len_; +}; + +} // namespace google_breakpad + +#endif // CLIENT_LINUX_MINIDUMP_WRITER_PROC_CPUINFO_READER_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/proc_cpuinfo_reader_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/proc_cpuinfo_reader_unittest.cc new file mode 100644 index 0000000000..6037c7e665 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/minidump_writer/proc_cpuinfo_reader_unittest.cc @@ -0,0 +1,199 @@ +// Copyright (c) 2013, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#include +#include +#include +#include +#include +#include + +#include "client/linux/minidump_writer/proc_cpuinfo_reader.h" +#include "breakpad_googletest_includes.h" +#include "common/linux/tests/auto_testfile.h" + +using namespace google_breakpad; + +#if !defined(__ANDROID__) +#define TEMPDIR "/tmp" +#else +#define TEMPDIR "/data/local/tmp" +#endif + + +namespace { + +typedef testing::Test ProcCpuInfoReaderTest; + +class ScopedTestFile : public AutoTestFile { +public: + explicit ScopedTestFile(const char* text) + : AutoTestFile("proc_cpuinfo_reader", text) { + } +}; + +} + +TEST(ProcCpuInfoReaderTest, EmptyFile) { + ScopedTestFile file(""); + ASSERT_TRUE(file.IsOk()); + ProcCpuInfoReader reader(file.GetFd()); + + const char *field; + ASSERT_FALSE(reader.GetNextField(&field)); +} + +TEST(ProcCpuInfoReaderTest, OneLineTerminated) { + ScopedTestFile file("foo : bar\n"); + ASSERT_TRUE(file.IsOk()); + ProcCpuInfoReader reader(file.GetFd()); + + const char *field; + ASSERT_TRUE(reader.GetNextField(&field)); + ASSERT_STREQ("foo", field); + ASSERT_STREQ("bar", reader.GetValue()); + + ASSERT_FALSE(reader.GetNextField(&field)); +} + +TEST(ProcCpuInfoReaderTest, OneLine) { + ScopedTestFile file("foo : bar"); + ASSERT_TRUE(file.IsOk()); + ProcCpuInfoReader reader(file.GetFd()); + + const char *field; + size_t value_len; + ASSERT_TRUE(reader.GetNextField(&field)); + ASSERT_STREQ("foo", field); + ASSERT_STREQ("bar", reader.GetValueAndLen(&value_len)); + ASSERT_EQ(3U, value_len); + + ASSERT_FALSE(reader.GetNextField(&field)); +} + +TEST(ProcCpuInfoReaderTest, TwoLinesTerminated) { + ScopedTestFile file("foo : bar\nzoo : tut\n"); + ASSERT_TRUE(file.IsOk()); + ProcCpuInfoReader reader(file.GetFd()); + + const char* field; + ASSERT_TRUE(reader.GetNextField(&field)); + ASSERT_STREQ("foo", field); + ASSERT_STREQ("bar", reader.GetValue()); + + ASSERT_TRUE(reader.GetNextField(&field)); + ASSERT_STREQ("zoo", field); + ASSERT_STREQ("tut", reader.GetValue()); + + ASSERT_FALSE(reader.GetNextField(&field)); +} + +TEST(ProcCpuInfoReaderTest, SkipMalformedLine) { + ScopedTestFile file("this line should have a column\nfoo : bar\n"); + ASSERT_TRUE(file.IsOk()); + ProcCpuInfoReader reader(file.GetFd()); + + const char* field; + ASSERT_TRUE(reader.GetNextField(&field)); + ASSERT_STREQ("foo", field); + ASSERT_STREQ("bar", reader.GetValue()); + + ASSERT_FALSE(reader.GetNextField(&field)); +} + +TEST(ProcCpuInfoReaderTest, SkipOneEmptyLine) { + ScopedTestFile file("\n\nfoo : bar\n"); + ASSERT_TRUE(file.IsOk()); + ProcCpuInfoReader reader(file.GetFd()); + + const char* field; + ASSERT_TRUE(reader.GetNextField(&field)); + ASSERT_STREQ("foo", field); + ASSERT_STREQ("bar", reader.GetValue()); + + ASSERT_FALSE(reader.GetNextField(&field)); +} + +TEST(ProcCpuInfoReaderTest, SkipEmptyField) { + ScopedTestFile file(" : bar\nzoo : tut\n"); + ASSERT_TRUE(file.IsOk()); + ProcCpuInfoReader reader(file.GetFd()); + + const char* field; + ASSERT_TRUE(reader.GetNextField(&field)); + ASSERT_STREQ("zoo", field); + ASSERT_STREQ("tut", reader.GetValue()); + + ASSERT_FALSE(reader.GetNextField(&field)); +} + +TEST(ProcCpuInfoReaderTest, SkipTwoEmptyLines) { + ScopedTestFile file("foo : bar\n\n\nfoo : bar\n"); + ASSERT_TRUE(file.IsOk()); + ProcCpuInfoReader reader(file.GetFd()); + + const char* field; + ASSERT_TRUE(reader.GetNextField(&field)); + ASSERT_STREQ("foo", field); + ASSERT_STREQ("bar", reader.GetValue()); + + ASSERT_TRUE(reader.GetNextField(&field)); + ASSERT_STREQ("foo", field); + ASSERT_STREQ("bar", reader.GetValue()); + + ASSERT_FALSE(reader.GetNextField(&field)); +} + +TEST(ProcCpuInfoReaderTest, FieldWithSpaces) { + ScopedTestFile file("foo bar : zoo\n"); + ASSERT_TRUE(file.IsOk()); + ProcCpuInfoReader reader(file.GetFd()); + + const char* field; + ASSERT_TRUE(reader.GetNextField(&field)); + ASSERT_STREQ("foo bar", field); + ASSERT_STREQ("zoo", reader.GetValue()); + + ASSERT_FALSE(reader.GetNextField(&field)); +} + +TEST(ProcCpuInfoReaderTest, EmptyValue) { + ScopedTestFile file("foo :\n"); + ASSERT_TRUE(file.IsOk()); + ProcCpuInfoReader reader(file.GetFd()); + + const char* field; + ASSERT_TRUE(reader.GetNextField(&field)); + ASSERT_STREQ("foo", field); + size_t value_len; + ASSERT_STREQ("", reader.GetValueAndLen(&value_len)); + ASSERT_EQ(0U, value_len); + + ASSERT_FALSE(reader.GetNextField(&field)); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/linux/sender/google_crash_report_sender.cc b/TMessagesProj/jni/third_party/breakpad/src/client/linux/sender/google_crash_report_sender.cc new file mode 100644 index 0000000000..ec6c06e87d --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/linux/sender/google_crash_report_sender.cc @@ -0,0 +1,104 @@ +// Copyright (c) 2009, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#include "common/linux/google_crashdump_uploader.h" +#include "third_party/linux/include/gflags/gflags.h" +#include +#include + +#include "common/using_std_string.h" + +DEFINE_string(crash_server, "https://clients2.google.com/cr", + "The crash server to upload minidumps to."); +DEFINE_string(product_name, "", + "The product name that the minidump corresponds to."); +DEFINE_string(product_version, "", + "The version of the product that produced the minidump."); +DEFINE_string(client_id, "", + "The client GUID"); +DEFINE_string(minidump_path, "", + "The path of the minidump file."); +DEFINE_string(ptime, "", + "The process uptime in milliseconds."); +DEFINE_string(ctime, "", + "The cumulative process uptime in milliseconds."); +DEFINE_string(email, "", + "The user's email address."); +DEFINE_string(comments, "", + "Extra user comments"); +DEFINE_string(proxy_host, "", + "Proxy host"); +DEFINE_string(proxy_userpasswd, "", + "Proxy username/password in user:pass format."); + + +bool CheckForRequiredFlagsOrDie() { + string error_text = ""; + if (FLAGS_product_name.empty()) { + error_text.append("\nProduct name must be specified."); + } + + if (FLAGS_product_version.empty()) { + error_text.append("\nProduct version must be specified."); + } + + if (FLAGS_client_id.empty()) { + error_text.append("\nClient ID must be specified."); + } + + if (FLAGS_minidump_path.empty()) { + error_text.append("\nMinidump pathname must be specified."); + } + + if (!error_text.empty()) { + std::cout << error_text; + return false; + } + return true; +} + +int main(int argc, char *argv[]) { + google::InitGoogleLogging(argv[0]); + google::ParseCommandLineFlags(&argc, &argv, true); + if (!CheckForRequiredFlagsOrDie()) { + return 1; + } + google_breakpad::GoogleCrashdumpUploader g(FLAGS_product_name, + FLAGS_product_version, + FLAGS_client_id, + FLAGS_ptime, + FLAGS_ctime, + FLAGS_email, + FLAGS_comments, + FLAGS_minidump_path, + FLAGS_crash_server, + FLAGS_proxy_host, + FLAGS_proxy_userpasswd); + g.Upload(NULL, NULL, NULL); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/minidump_file_writer-inl.h b/TMessagesProj/jni/third_party/breakpad/src/client/minidump_file_writer-inl.h new file mode 100644 index 0000000000..0e12e00b69 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/minidump_file_writer-inl.h @@ -0,0 +1,97 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// minidump_file_writer-inl.h: Minidump file writer implementation. +// +// See minidump_file_writer.h for documentation. + +#ifndef CLIENT_MINIDUMP_FILE_WRITER_INL_H__ +#define CLIENT_MINIDUMP_FILE_WRITER_INL_H__ + +#include + +#include "client/minidump_file_writer.h" +#include "google_breakpad/common/minidump_size.h" + +namespace google_breakpad { + +template +inline bool TypedMDRVA::Allocate() { + allocation_state_ = SINGLE_OBJECT; + return UntypedMDRVA::Allocate(minidump_size::size()); +} + +template +inline bool TypedMDRVA::Allocate(size_t additional) { + allocation_state_ = SINGLE_OBJECT; + return UntypedMDRVA::Allocate(minidump_size::size() + additional); +} + +template +inline bool TypedMDRVA::AllocateArray(size_t count) { + assert(count); + allocation_state_ = ARRAY; + return UntypedMDRVA::Allocate(minidump_size::size() * count); +} + +template +inline bool TypedMDRVA::AllocateObjectAndArray(size_t count, + size_t length) { + assert(count && length); + allocation_state_ = SINGLE_OBJECT_WITH_ARRAY; + return UntypedMDRVA::Allocate(minidump_size::size() + count * length); +} + +template +inline bool TypedMDRVA::CopyIndex(unsigned int index, MDType *item) { + assert(allocation_state_ == ARRAY); + return writer_->Copy( + static_cast(position_ + index * minidump_size::size()), + item, minidump_size::size()); +} + +template +inline bool TypedMDRVA::CopyIndexAfterObject(unsigned int index, + const void *src, + size_t length) { + assert(allocation_state_ == SINGLE_OBJECT_WITH_ARRAY); + return writer_->Copy( + static_cast(position_ + minidump_size::size() + + index * length), + src, length); +} + +template +inline bool TypedMDRVA::Flush() { + return writer_->Copy(position_, &data_, minidump_size::size()); +} + +} // namespace google_breakpad + +#endif // CLIENT_MINIDUMP_FILE_WRITER_INL_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/minidump_file_writer.cc b/TMessagesProj/jni/third_party/breakpad/src/client/minidump_file_writer.cc new file mode 100644 index 0000000000..9e90533532 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/minidump_file_writer.cc @@ -0,0 +1,284 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// minidump_file_writer.cc: Minidump file writer implementation. +// +// See minidump_file_writer.h for documentation. + +#include +#include +#include +#include +#include + +#include "client/minidump_file_writer-inl.h" +#include "common/linux/linux_libc_support.h" +#include "common/string_conversion.h" +#if defined(__linux__) && __linux__ +#include "third_party/lss/linux_syscall_support.h" +#endif + +namespace google_breakpad { + +const MDRVA MinidumpFileWriter::kInvalidMDRVA = static_cast(-1); + +MinidumpFileWriter::MinidumpFileWriter() + : file_(-1), + close_file_when_destroyed_(true), + position_(0), + size_(0) { +} + +MinidumpFileWriter::~MinidumpFileWriter() { + if (close_file_when_destroyed_) + Close(); +} + +bool MinidumpFileWriter::Open(const char *path) { + assert(file_ == -1); +#if defined(__linux__) && __linux__ + file_ = sys_open(path, O_WRONLY | O_CREAT | O_EXCL, 0600); +#else + file_ = open(path, O_WRONLY | O_CREAT | O_EXCL, 0600); +#endif + + return file_ != -1; +} + +void MinidumpFileWriter::SetFile(const int file) { + assert(file_ == -1); + file_ = file; + close_file_when_destroyed_ = false; +} + +bool MinidumpFileWriter::Close() { + bool result = true; + + if (file_ != -1) { + if (-1 == ftruncate(file_, position_)) { + return false; + } +#if defined(__linux__) && __linux__ + result = (sys_close(file_) == 0); +#else + result = (close(file_) == 0); +#endif + file_ = -1; + } + + return result; +} + +bool MinidumpFileWriter::CopyStringToMDString(const wchar_t *str, + unsigned int length, + TypedMDRVA *mdstring) { + bool result = true; + if (sizeof(wchar_t) == sizeof(uint16_t)) { + // Shortcut if wchar_t is the same size as MDString's buffer + result = mdstring->Copy(str, mdstring->get()->length); + } else { + uint16_t out[2]; + int out_idx = 0; + + // Copy the string character by character + while (length && result) { + UTF32ToUTF16Char(*str, out); + if (!out[0]) + return false; + + // Process one character at a time + --length; + ++str; + + // Append the one or two UTF-16 characters. The first one will be non- + // zero, but the second one may be zero, depending on the conversion from + // UTF-32. + int out_count = out[1] ? 2 : 1; + size_t out_size = sizeof(uint16_t) * out_count; + result = mdstring->CopyIndexAfterObject(out_idx, out, out_size); + out_idx += out_count; + } + } + return result; +} + +bool MinidumpFileWriter::CopyStringToMDString(const char *str, + unsigned int length, + TypedMDRVA *mdstring) { + bool result = true; + uint16_t out[2]; + int out_idx = 0; + + // Copy the string character by character + while (length && result) { + int conversion_count = UTF8ToUTF16Char(str, length, out); + if (!conversion_count) + return false; + + // Move the pointer along based on the nubmer of converted characters + length -= conversion_count; + str += conversion_count; + + // Append the one or two UTF-16 characters + int out_count = out[1] ? 2 : 1; + size_t out_size = sizeof(uint16_t) * out_count; + result = mdstring->CopyIndexAfterObject(out_idx, out, out_size); + out_idx += out_count; + } + return result; +} + +template +bool MinidumpFileWriter::WriteStringCore(const CharType *str, + unsigned int length, + MDLocationDescriptor *location) { + assert(str); + assert(location); + // Calculate the mdstring length by either limiting to |length| as passed in + // or by finding the location of the NULL character. + unsigned int mdstring_length = 0; + if (!length) + length = INT_MAX; + for (; mdstring_length < length && str[mdstring_length]; ++mdstring_length) + ; + + // Allocate the string buffer + TypedMDRVA mdstring(this); + if (!mdstring.AllocateObjectAndArray(mdstring_length + 1, sizeof(uint16_t))) + return false; + + // Set length excluding the NULL and copy the string + mdstring.get()->length = + static_cast(mdstring_length * sizeof(uint16_t)); + bool result = CopyStringToMDString(str, mdstring_length, &mdstring); + + // NULL terminate + if (result) { + uint16_t ch = 0; + result = mdstring.CopyIndexAfterObject(mdstring_length, &ch, sizeof(ch)); + + if (result) + *location = mdstring.location(); + } + + return result; +} + +bool MinidumpFileWriter::WriteString(const wchar_t *str, unsigned int length, + MDLocationDescriptor *location) { + return WriteStringCore(str, length, location); +} + +bool MinidumpFileWriter::WriteString(const char *str, unsigned int length, + MDLocationDescriptor *location) { + return WriteStringCore(str, length, location); +} + +bool MinidumpFileWriter::WriteMemory(const void *src, size_t size, + MDMemoryDescriptor *output) { + assert(src); + assert(output); + UntypedMDRVA mem(this); + + if (!mem.Allocate(size)) + return false; + if (!mem.Copy(src, mem.size())) + return false; + + output->start_of_memory_range = reinterpret_cast(src); + output->memory = mem.location(); + + return true; +} + +MDRVA MinidumpFileWriter::Allocate(size_t size) { + assert(size); + assert(file_ != -1); + size_t aligned_size = (size + 7) & ~7; // 64-bit alignment + + if (position_ + aligned_size > size_) { + size_t growth = aligned_size; + size_t minimal_growth = getpagesize(); + + // Ensure that the file grows by at least the size of a memory page + if (growth < minimal_growth) + growth = minimal_growth; + + size_t new_size = size_ + growth; + if (ftruncate(file_, new_size) != 0) + return kInvalidMDRVA; + + size_ = new_size; + } + + MDRVA current_position = position_; + position_ += static_cast(aligned_size); + + return current_position; +} + +bool MinidumpFileWriter::Copy(MDRVA position, const void *src, ssize_t size) { + assert(src); + assert(size); + assert(file_ != -1); + + // Ensure that the data will fit in the allocated space + if (static_cast(size + position) > size_) + return false; + + // Seek and write the data +#if defined(__linux__) && __linux__ + if (sys_lseek(file_, position, SEEK_SET) == static_cast(position)) { + if (sys_write(file_, src, size) == size) { +#else + if (lseek(file_, position, SEEK_SET) == static_cast(position)) { + if (write(file_, src, size) == size) { +#endif + return true; + } + } + + return false; +} + +bool UntypedMDRVA::Allocate(size_t size) { + assert(size_ == 0); + size_ = size; + position_ = writer_->Allocate(size_); + return position_ != MinidumpFileWriter::kInvalidMDRVA; +} + +bool UntypedMDRVA::Copy(MDRVA pos, const void *src, size_t size) { + assert(src); + assert(size); + assert(pos + size <= position_ + size_); + return writer_->Copy(pos, src, size); +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/minidump_file_writer.h b/TMessagesProj/jni/third_party/breakpad/src/client/minidump_file_writer.h new file mode 100644 index 0000000000..ce32b6d087 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/minidump_file_writer.h @@ -0,0 +1,272 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// minidump_file_writer.h: Implements file-based minidump generation. It's +// intended to be used with the Google Breakpad open source crash handling +// project. + +#ifndef CLIENT_MINIDUMP_FILE_WRITER_H__ +#define CLIENT_MINIDUMP_FILE_WRITER_H__ + +#include + +#include "google_breakpad/common/minidump_format.h" + +namespace google_breakpad { + +class UntypedMDRVA; +template class TypedMDRVA; + +// The user of this class can Open() a file and add minidump streams, data, and +// strings using the definitions in minidump_format.h. Since this class is +// expected to be used in a situation where the current process may be +// damaged, it will not allocate heap memory. +// Sample usage: +// MinidumpFileWriter writer; +// writer.Open("/tmp/minidump.dmp"); +// TypedMDRVA header(&writer_); +// header.Allocate(); +// header->get()->signature = MD_HEADER_SIGNATURE; +// : +// writer.Close(); +// +// An alternative is to use SetFile and provide a file descriptor: +// MinidumpFileWriter writer; +// writer.SetFile(minidump_fd); +// TypedMDRVA header(&writer_); +// header.Allocate(); +// header->get()->signature = MD_HEADER_SIGNATURE; +// : +// writer.Close(); + +class MinidumpFileWriter { +public: + // Invalid MDRVA (Minidump Relative Virtual Address) + // returned on failed allocation + static const MDRVA kInvalidMDRVA; + + MinidumpFileWriter(); + ~MinidumpFileWriter(); + + // Open |path| as the destination of the minidump data. If |path| already + // exists, then Open() will fail. + // Return true on success, or false on failure. + bool Open(const char *path); + + // Sets the file descriptor |file| as the destination of the minidump data. + // Can be used as an alternative to Open() when a file descriptor is + // available. + // Note that |fd| is not closed when the instance of MinidumpFileWriter is + // destroyed. + void SetFile(const int file); + + // Close the current file (that was either created when Open was called, or + // specified with SetFile). + // Return true on success, or false on failure. + bool Close(); + + // Copy the contents of |str| to a MDString and write it to the file. + // |str| is expected to be either UTF-16 or UTF-32 depending on the size + // of wchar_t. + // Maximum |length| of characters to copy from |str|, or specify 0 to use the + // entire NULL terminated string. Copying will stop at the first NULL. + // |location| the allocated location + // Return true on success, or false on failure + bool WriteString(const wchar_t *str, unsigned int length, + MDLocationDescriptor *location); + + // Same as above, except with |str| as a UTF-8 string + bool WriteString(const char *str, unsigned int length, + MDLocationDescriptor *location); + + // Write |size| bytes starting at |src| into the current position. + // Return true on success and set |output| to position, or false on failure + bool WriteMemory(const void *src, size_t size, MDMemoryDescriptor *output); + + // Copies |size| bytes from |src| to |position| + // Return true on success, or false on failure + bool Copy(MDRVA position, const void *src, ssize_t size); + + // Return the current position for writing to the minidump + inline MDRVA position() const { return position_; } + + private: + friend class UntypedMDRVA; + + // Allocates an area of |size| bytes. + // Returns the position of the allocation, or kInvalidMDRVA if it was + // unable to allocate the bytes. + MDRVA Allocate(size_t size); + + // The file descriptor for the output file. + int file_; + + // Whether |file_| should be closed when the instance is destroyed. + bool close_file_when_destroyed_; + + // Current position in buffer + MDRVA position_; + + // Current allocated size + size_t size_; + + // Copy |length| characters from |str| to |mdstring|. These are distinct + // because the underlying MDString is a UTF-16 based string. The wchar_t + // variant may need to create a MDString that has more characters than the + // source |str|, whereas the UTF-8 variant may coalesce characters to form + // a single UTF-16 character. + bool CopyStringToMDString(const wchar_t *str, unsigned int length, + TypedMDRVA *mdstring); + bool CopyStringToMDString(const char *str, unsigned int length, + TypedMDRVA *mdstring); + + // The common templated code for writing a string + template + bool WriteStringCore(const CharType *str, unsigned int length, + MDLocationDescriptor *location); +}; + +// Represents an untyped allocated chunk +class UntypedMDRVA { + public: + explicit UntypedMDRVA(MinidumpFileWriter *writer) + : writer_(writer), + position_(writer->position()), + size_(0) {} + + // Allocates |size| bytes. Must not call more than once. + // Return true on success, or false on failure + bool Allocate(size_t size); + + // Returns the current position or kInvalidMDRVA if allocation failed + inline MDRVA position() const { return position_; } + + // Number of bytes allocated + inline size_t size() const { return size_; } + + // Return size and position + inline MDLocationDescriptor location() const { + MDLocationDescriptor location = { static_cast(size_), + position_ }; + return location; + } + + // Copy |size| bytes starting at |src| into the minidump at |position| + // Return true on success, or false on failure + bool Copy(MDRVA position, const void *src, size_t size); + + // Copy |size| bytes from |src| to the current position + inline bool Copy(const void *src, size_t size) { + return Copy(position_, src, size); + } + + protected: + // Writer we associate with + MinidumpFileWriter *writer_; + + // Position of the start of the data + MDRVA position_; + + // Allocated size + size_t size_; +}; + +// Represents a Minidump object chunk. Additional memory can be allocated at +// the end of the object as a: +// - single allocation +// - Array of MDType objects +// - A MDType object followed by an array +template +class TypedMDRVA : public UntypedMDRVA { + public: + // Constructs an unallocated MDRVA + explicit TypedMDRVA(MinidumpFileWriter *writer) + : UntypedMDRVA(writer), + data_(), + allocation_state_(UNALLOCATED) {} + + inline ~TypedMDRVA() { + // Ensure that the data_ object is written out + if (allocation_state_ != ARRAY) + Flush(); + } + + // Address of object data_ of MDType. This is not declared const as the + // typical usage will be to access the underlying |data_| object as to + // alter its contents. + MDType *get() { return &data_; } + + // Allocates minidump_size::size() bytes. + // Must not call more than once. + // Return true on success, or false on failure + bool Allocate(); + + // Allocates minidump_size::size() + |additional| bytes. + // Must not call more than once. + // Return true on success, or false on failure + bool Allocate(size_t additional); + + // Allocate an array of |count| elements of MDType. + // Must not call more than once. + // Return true on success, or false on failure + bool AllocateArray(size_t count); + + // Allocate an array of |count| elements of |size| after object of MDType + // Must not call more than once. + // Return true on success, or false on failure + bool AllocateObjectAndArray(size_t count, size_t size); + + // Copy |item| to |index| + // Must have been allocated using AllocateArray(). + // Return true on success, or false on failure + bool CopyIndex(unsigned int index, MDType *item); + + // Copy |size| bytes starting at |str| to |index| + // Must have been allocated using AllocateObjectAndArray(). + // Return true on success, or false on failure + bool CopyIndexAfterObject(unsigned int index, const void *src, size_t size); + + // Write data_ + bool Flush(); + + private: + enum AllocationState { + UNALLOCATED = 0, + SINGLE_OBJECT, + ARRAY, + SINGLE_OBJECT_WITH_ARRAY + }; + + MDType data_; + AllocationState allocation_state_; +}; + +} // namespace google_breakpad + +#endif // CLIENT_MINIDUMP_FILE_WRITER_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/client/minidump_file_writer_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/client/minidump_file_writer_unittest.cc new file mode 100644 index 0000000000..60c364e689 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/client/minidump_file_writer_unittest.cc @@ -0,0 +1,179 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: waylonis@google.com (Dan Waylonis) + +/* + g++ -I../ ../common/convert_UTF.c \ + ../common/string_conversion.cc \ + minidump_file_writer.cc \ + minidump_file_writer_unittest.cc \ + -o minidump_file_writer_unittest + */ + +#include +#include + +#include "minidump_file_writer-inl.h" + +using google_breakpad::MinidumpFileWriter; + +#define ASSERT_TRUE(cond) \ +if (!(cond)) { \ + fprintf(stderr, "FAILED: %s at %s:%d\n", #cond, __FILE__, __LINE__); \ + return false; \ +} + +#define ASSERT_EQ(e1, e2) ASSERT_TRUE((e1) == (e2)) +#define ASSERT_NE(e1, e2) ASSERT_TRUE((e1) != (e2)) + +struct StringStructure { + unsigned long integer_value; + MDLocationDescriptor first_string; + MDLocationDescriptor second_string; +}; + +struct ArrayStructure { + unsigned char char_value; + unsigned short short_value; + unsigned long long_value; +}; + +typedef struct { + unsigned long count; + ArrayStructure array[0]; +} ObjectAndArrayStructure; + +static bool WriteFile(const char *path) { + MinidumpFileWriter writer; + if (writer.Open(path)) { + // Test a single structure + google_breakpad::TypedMDRVA strings(&writer); + ASSERT_TRUE(strings.Allocate()); + strings.get()->integer_value = 0xBEEF; + const char *first = "First String"; + ASSERT_TRUE(writer.WriteString(first, 0, &strings.get()->first_string)); + const wchar_t *second = L"Second String"; + ASSERT_TRUE(writer.WriteString(second, 0, &strings.get()->second_string)); + + // Test an array structure + google_breakpad::TypedMDRVA array(&writer); + unsigned int count = 10; + ASSERT_TRUE(array.AllocateArray(count)); + for (unsigned char i = 0; i < count; ++i) { + ArrayStructure local; + local.char_value = i; + local.short_value = i + 1; + local.long_value = i + 2; + ASSERT_TRUE(array.CopyIndex(i, &local)); + } + + // Test an object followed by an array + google_breakpad::TypedMDRVA obj_array(&writer); + ASSERT_TRUE(obj_array.AllocateObjectAndArray(count, + sizeof(ArrayStructure))); + obj_array.get()->count = count; + for (unsigned char i = 0; i < count; ++i) { + ArrayStructure local; + local.char_value = i; + local.short_value = i + 1; + local.long_value = i + 2; + ASSERT_TRUE(obj_array.CopyIndexAfterObject(i, &local, sizeof(local))); + } + } + + return writer.Close(); +} + +static bool CompareFile(const char *path) { + unsigned long expected[] = { +#if defined(__BIG_ENDIAN__) + 0x0000beef, 0x0000001e, 0x00000018, 0x00000020, 0x00000038, 0x00000000, + 0x00000018, 0x00460069, 0x00720073, 0x00740020, 0x00530074, 0x00720069, + 0x006e0067, 0x00000000, 0x0000001a, 0x00530065, 0x0063006f, 0x006e0064, + 0x00200053, 0x00740072, 0x0069006e, 0x00670000, 0x00000001, 0x00000002, + 0x01000002, 0x00000003, 0x02000003, 0x00000004, 0x03000004, 0x00000005, + 0x04000005, 0x00000006, 0x05000006, 0x00000007, 0x06000007, 0x00000008, + 0x07000008, 0x00000009, 0x08000009, 0x0000000a, 0x0900000a, 0x0000000b, + 0x0000000a, 0x00000001, 0x00000002, 0x01000002, 0x00000003, 0x02000003, + 0x00000004, 0x03000004, 0x00000005, 0x04000005, 0x00000006, 0x05000006, + 0x00000007, 0x06000007, 0x00000008, 0x07000008, 0x00000009, 0x08000009, + 0x0000000a, 0x0900000a, 0x0000000b, 0x00000000 +#else + 0x0000beef, 0x0000001e, 0x00000018, 0x00000020, + 0x00000038, 0x00000000, 0x00000018, 0x00690046, + 0x00730072, 0x00200074, 0x00740053, 0x00690072, + 0x0067006e, 0x00000000, 0x0000001a, 0x00650053, + 0x006f0063, 0x0064006e, 0x00530020, 0x00720074, + 0x006e0069, 0x00000067, 0x00011e00, 0x00000002, + 0x00021e01, 0x00000003, 0x00031e02, 0x00000004, + 0x00041e03, 0x00000005, 0x00051e04, 0x00000006, + 0x00061e05, 0x00000007, 0x00071e06, 0x00000008, + 0x00081e07, 0x00000009, 0x00091e08, 0x0000000a, + 0x000a1e09, 0x0000000b, 0x0000000a, 0x00011c00, + 0x00000002, 0x00021c01, 0x00000003, 0x00031c02, + 0x00000004, 0x00041c03, 0x00000005, 0x00051c04, + 0x00000006, 0x00061c05, 0x00000007, 0x00071c06, + 0x00000008, 0x00081c07, 0x00000009, 0x00091c08, + 0x0000000a, 0x000a1c09, 0x0000000b, 0x00000000, +#endif + }; + size_t expected_byte_count = sizeof(expected); + int fd = open(path, O_RDONLY, 0600); + void *buffer = malloc(expected_byte_count); + ASSERT_NE(fd, -1); + ASSERT_TRUE(buffer); + ASSERT_EQ(read(fd, buffer, expected_byte_count), + static_cast(expected_byte_count)); + + char *b1, *b2; + b1 = reinterpret_cast(buffer); + b2 = reinterpret_cast(expected); + while (*b1 == *b2) { + b1++; + b2++; + } + + printf("%p\n", reinterpret_cast(b1 - (char*)buffer)); + + ASSERT_EQ(memcmp(buffer, expected, expected_byte_count), 0); + return true; +} + +static bool RunTests() { + const char *path = "/tmp/minidump_file_writer_unittest.dmp"; + ASSERT_TRUE(WriteFile(path)); + ASSERT_TRUE(CompareFile(path)); + unlink(path); + return true; +} + +extern "C" int main(int argc, const char *argv[]) { + return RunTests() ? 0 : 1; +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/android/breakpad_getcontext.S b/TMessagesProj/jni/third_party/breakpad/src/common/android/breakpad_getcontext.S new file mode 100644 index 0000000000..fd6326adfe --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/android/breakpad_getcontext.S @@ -0,0 +1,489 @@ +// Copyright (c) 2012, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// A minimalistic implementation of getcontext() to be used by +// Google Breakpad on Android. + +#include "common/android/ucontext_constants.h" + +/* int getcontext (ucontext_t *ucp) */ + +#if defined(__arm__) + + .text + .global breakpad_getcontext + .hidden breakpad_getcontext + .type breakpad_getcontext, #function + .align 0 + .fnstart +breakpad_getcontext: + + /* First, save r4-r11 */ + add r1, r0, #(MCONTEXT_GREGS_OFFSET + 4*4) + stm r1, {r4-r11} + + /* r12 is a scratch register, don't save it */ + + /* Save sp and lr explicitly. */ + /* - sp can't be stored with stmia in Thumb-2 */ + /* - STM instructions that store sp and pc are deprecated in ARM */ + str sp, [r0, #(MCONTEXT_GREGS_OFFSET + 13*4)] + str lr, [r0, #(MCONTEXT_GREGS_OFFSET + 14*4)] + + /* Save the caller's address in 'pc' */ + str lr, [r0, #(MCONTEXT_GREGS_OFFSET + 15*4)] + + /* Save ucontext_t* pointer across next call */ + mov r4, r0 + + /* Call sigprocmask(SIG_BLOCK, NULL, &(ucontext->uc_sigmask)) */ + mov r0, #0 /* SIG_BLOCK */ + mov r1, #0 /* NULL */ + add r2, r4, #UCONTEXT_SIGMASK_OFFSET + bl sigprocmask(PLT) + + /* Intentionally do not save the FPU state here. This is because on + * Linux/ARM, one should instead use ptrace(PTRACE_GETFPREGS) or + * ptrace(PTRACE_GETVFPREGS) to get it. + * + * Note that a real implementation of getcontext() would need to save + * this here to allow setcontext()/swapcontext() to work correctly. + */ + + /* Restore the values of r4 and lr */ + mov r0, r4 + ldr lr, [r0, #(MCONTEXT_GREGS_OFFSET + 14*4)] + ldr r4, [r0, #(MCONTEXT_GREGS_OFFSET + 4*4)] + + /* Return 0 */ + mov r0, #0 + bx lr + + .fnend + .size breakpad_getcontext, . - breakpad_getcontext + +#elif defined(__aarch64__) + +#define _NSIG 64 +#define __NR_rt_sigprocmask 135 + + .text + .global breakpad_getcontext + .hidden breakpad_getcontext + .type breakpad_getcontext, #function + .align 4 + .cfi_startproc +breakpad_getcontext: + + /* The saved context will return to the getcontext() call point + with a return value of 0 */ + str xzr, [x0, MCONTEXT_GREGS_OFFSET + 0 * REGISTER_SIZE] + + stp x18, x19, [x0, MCONTEXT_GREGS_OFFSET + 18 * REGISTER_SIZE] + stp x20, x21, [x0, MCONTEXT_GREGS_OFFSET + 20 * REGISTER_SIZE] + stp x22, x23, [x0, MCONTEXT_GREGS_OFFSET + 22 * REGISTER_SIZE] + stp x24, x25, [x0, MCONTEXT_GREGS_OFFSET + 24 * REGISTER_SIZE] + stp x26, x27, [x0, MCONTEXT_GREGS_OFFSET + 26 * REGISTER_SIZE] + stp x28, x29, [x0, MCONTEXT_GREGS_OFFSET + 28 * REGISTER_SIZE] + str x30, [x0, MCONTEXT_GREGS_OFFSET + 30 * REGISTER_SIZE] + + /* Place LR into the saved PC, this will ensure that when + switching to this saved context with setcontext() control + will pass back to the caller of getcontext(), we have + already arranged to return the appropriate return value in x0 + above. */ + str x30, [x0, MCONTEXT_PC_OFFSET] + + /* Save the current SP */ + mov x2, sp + str x2, [x0, MCONTEXT_SP_OFFSET] + + /* Initialize the pstate. */ + str xzr, [x0, MCONTEXT_PSTATE_OFFSET] + + /* Figure out where to place the first context extension + block. */ + add x2, x0, #MCONTEXT_EXTENSION_OFFSET + + /* Write the context extension fpsimd header. */ + mov w3, #(FPSIMD_MAGIC & 0xffff) + movk w3, #(FPSIMD_MAGIC >> 16), lsl #16 + str w3, [x2, #FPSIMD_CONTEXT_MAGIC_OFFSET] + mov w3, #FPSIMD_CONTEXT_SIZE + str w3, [x2, #FPSIMD_CONTEXT_SIZE_OFFSET] + + /* Fill in the FP SIMD context. */ + add x3, x2, #(FPSIMD_CONTEXT_VREGS_OFFSET + 8 * SIMD_REGISTER_SIZE) + stp d8, d9, [x3], #(2 * SIMD_REGISTER_SIZE) + stp d10, d11, [x3], #(2 * SIMD_REGISTER_SIZE) + stp d12, d13, [x3], #(2 * SIMD_REGISTER_SIZE) + stp d14, d15, [x3], #(2 * SIMD_REGISTER_SIZE) + + add x3, x2, FPSIMD_CONTEXT_FPSR_OFFSET + + mrs x4, fpsr + str w4, [x3] + + mrs x4, fpcr + str w4, [x3, FPSIMD_CONTEXT_FPCR_OFFSET - FPSIMD_CONTEXT_FPSR_OFFSET] + + /* Write the termination context extension header. */ + add x2, x2, #FPSIMD_CONTEXT_SIZE + + str xzr, [x2, #FPSIMD_CONTEXT_MAGIC_OFFSET] + str xzr, [x2, #FPSIMD_CONTEXT_SIZE_OFFSET] + + /* Grab the signal mask */ + /* rt_sigprocmask (SIG_BLOCK, NULL, &ucp->uc_sigmask, _NSIG8) */ + add x2, x0, #UCONTEXT_SIGMASK_OFFSET + mov x0, #0 /* SIG_BLOCK */ + mov x1, #0 /* NULL */ + mov x3, #(_NSIG / 8) + mov x8, #__NR_rt_sigprocmask + svc 0 + + /* Return x0 for success */ + mov x0, 0 + ret + + .cfi_endproc + .size breakpad_getcontext, . - breakpad_getcontext + +#elif defined(__i386__) + + .text + .global breakpad_getcontext + .hidden breakpad_getcontext + .align 4 + .type breakpad_getcontext, @function + +breakpad_getcontext: + + movl 4(%esp), %eax /* eax = uc */ + + /* Save register values */ + movl %ecx, MCONTEXT_ECX_OFFSET(%eax) + movl %edx, MCONTEXT_EDX_OFFSET(%eax) + movl %ebx, MCONTEXT_EBX_OFFSET(%eax) + movl %edi, MCONTEXT_EDI_OFFSET(%eax) + movl %esi, MCONTEXT_ESI_OFFSET(%eax) + movl %ebp, MCONTEXT_EBP_OFFSET(%eax) + + movl (%esp), %edx /* return address */ + lea 4(%esp), %ecx /* exclude return address from stack */ + mov %edx, MCONTEXT_EIP_OFFSET(%eax) + mov %ecx, MCONTEXT_ESP_OFFSET(%eax) + + xorl %ecx, %ecx + movw %fs, %cx + mov %ecx, MCONTEXT_FS_OFFSET(%eax) + + movl $0, MCONTEXT_EAX_OFFSET(%eax) + + /* Save floating point state to fpregstate, then update + * the fpregs pointer to point to it */ + leal UCONTEXT_FPREGS_MEM_OFFSET(%eax), %ecx + fnstenv (%ecx) + fldenv (%ecx) + mov %ecx, UCONTEXT_FPREGS_OFFSET(%eax) + + /* Save signal mask: sigprocmask(SIGBLOCK, NULL, &uc->uc_sigmask) */ + leal UCONTEXT_SIGMASK_OFFSET(%eax), %edx + xorl %ecx, %ecx + push %edx /* &uc->uc_sigmask */ + push %ecx /* NULL */ + push %ecx /* SIGBLOCK == 0 on i386 */ + call sigprocmask@PLT + addl $12, %esp + + movl $0, %eax + ret + + .size breakpad_getcontext, . - breakpad_getcontext + +#elif defined(__mips__) + +// This implementation is inspired by implementation of getcontext in glibc. +#if _MIPS_SIM == _ABIO32 +#include +#include +#include +#else +#include +#include +#endif + +// from asm/asm.h +#if _MIPS_SIM == _ABIO32 +#define ALSZ 7 +#define ALMASK ~7 +#define SZREG 4 +#else // _MIPS_SIM != _ABIO32 +#define ALSZ 15 +#define ALMASK ~15 +#define SZREG 8 +#endif + +#include // for __NR_rt_sigprocmask + +#define _NSIG8 128 / 8 +#define SIG_BLOCK 1 + + + .text +LOCALS_NUM = 1 // save gp on stack +FRAME_SIZE = ((LOCALS_NUM * SZREG) + ALSZ) & ALMASK + +GP_FRAME_OFFSET = FRAME_SIZE - (1 * SZREG) +MCONTEXT_REG_SIZE = 8 + +#if _MIPS_SIM == _ABIO32 + +NESTED (breakpad_getcontext, FRAME_SIZE, ra) + .mask 0x00000000, 0 + .fmask 0x00000000, 0 + + .set noreorder + .cpload t9 + .set reorder + + move a2, sp +#define _SP a2 + + addiu sp, -FRAME_SIZE + .cprestore GP_FRAME_OFFSET + + sw s0, (16 * MCONTEXT_REG_SIZE + MCONTEXT_GREGS_OFFSET)(a0) + sw s1, (17 * MCONTEXT_REG_SIZE + MCONTEXT_GREGS_OFFSET)(a0) + sw s2, (18 * MCONTEXT_REG_SIZE + MCONTEXT_GREGS_OFFSET)(a0) + sw s3, (19 * MCONTEXT_REG_SIZE + MCONTEXT_GREGS_OFFSET)(a0) + sw s4, (20 * MCONTEXT_REG_SIZE + MCONTEXT_GREGS_OFFSET)(a0) + sw s5, (21 * MCONTEXT_REG_SIZE + MCONTEXT_GREGS_OFFSET)(a0) + sw s6, (22 * MCONTEXT_REG_SIZE + MCONTEXT_GREGS_OFFSET)(a0) + sw s7, (23 * MCONTEXT_REG_SIZE + MCONTEXT_GREGS_OFFSET)(a0) + sw _SP, (29 * MCONTEXT_REG_SIZE + MCONTEXT_GREGS_OFFSET)(a0) + sw fp, (30 * MCONTEXT_REG_SIZE + MCONTEXT_GREGS_OFFSET)(a0) + sw ra, (31 * MCONTEXT_REG_SIZE + MCONTEXT_GREGS_OFFSET)(a0) + sw ra, MCONTEXT_PC_OFFSET(a0) + +#ifdef __mips_hard_float + s.d fs0, (20 * MCONTEXT_REG_SIZE + MCONTEXT_FPREGS_OFFSET)(a0) + s.d fs1, (22 * MCONTEXT_REG_SIZE + MCONTEXT_FPREGS_OFFSET)(a0) + s.d fs2, (24 * MCONTEXT_REG_SIZE + MCONTEXT_FPREGS_OFFSET)(a0) + s.d fs3, (26 * MCONTEXT_REG_SIZE + MCONTEXT_FPREGS_OFFSET)(a0) + s.d fs4, (28 * MCONTEXT_REG_SIZE + MCONTEXT_FPREGS_OFFSET)(a0) + s.d fs5, (30 * MCONTEXT_REG_SIZE + MCONTEXT_FPREGS_OFFSET)(a0) + + cfc1 v1, fcr31 + sw v1, MCONTEXT_FPC_CSR(a0) +#endif // __mips_hard_float + + /* rt_sigprocmask (SIG_BLOCK, NULL, &ucp->uc_sigmask, _NSIG8) */ + li a3, _NSIG8 + addu a2, a0, UCONTEXT_SIGMASK_OFFSET + move a1, zero + li a0, SIG_BLOCK + li v0, __NR_rt_sigprocmask + syscall + + addiu sp, FRAME_SIZE + jr ra + +END (breakpad_getcontext) +#else + +#ifndef NESTED +/* + * NESTED - declare nested routine entry point + */ +#define NESTED(symbol, framesize, rpc) \ + .globl symbol; \ + .align 2; \ + .type symbol,@function; \ + .ent symbol,0; \ +symbol: .frame sp, framesize, rpc; +#endif + +/* + * END - mark end of function + */ +#ifndef END +# define END(function) \ + .end function; \ + .size function,.-function +#endif + +/* int getcontext (ucontext_t *ucp) */ + +NESTED (breakpad_getcontext, FRAME_SIZE, ra) + .mask 0x10000000, 0 + .fmask 0x00000000, 0 + + move a2, sp +#define _SP a2 + move a3, gp +#define _GP a3 + + daddiu sp, -FRAME_SIZE + .cpsetup $25, GP_FRAME_OFFSET, breakpad_getcontext + + /* Store a magic flag. */ + li v1, 1 + sd v1, (0 * MCONTEXT_REG_SIZE + MCONTEXT_GREGS_OFFSET)(a0) /* zero */ + + sd s0, (16 * MCONTEXT_REG_SIZE + MCONTEXT_GREGS_OFFSET)(a0) + sd s1, (17 * MCONTEXT_REG_SIZE + MCONTEXT_GREGS_OFFSET)(a0) + sd s2, (18 * MCONTEXT_REG_SIZE + MCONTEXT_GREGS_OFFSET)(a0) + sd s3, (19 * MCONTEXT_REG_SIZE + MCONTEXT_GREGS_OFFSET)(a0) + sd s4, (20 * MCONTEXT_REG_SIZE + MCONTEXT_GREGS_OFFSET)(a0) + sd s5, (21 * MCONTEXT_REG_SIZE + MCONTEXT_GREGS_OFFSET)(a0) + sd s6, (22 * MCONTEXT_REG_SIZE + MCONTEXT_GREGS_OFFSET)(a0) + sd s7, (23 * MCONTEXT_REG_SIZE + MCONTEXT_GREGS_OFFSET)(a0) + sd _GP, (28 * MCONTEXT_REG_SIZE + MCONTEXT_GREGS_OFFSET)(a0) + sd _SP, (29 * MCONTEXT_REG_SIZE + MCONTEXT_GREGS_OFFSET)(a0) + sd s8, (30 * MCONTEXT_REG_SIZE + MCONTEXT_GREGS_OFFSET)(a0) + sd ra, (31 * MCONTEXT_REG_SIZE + MCONTEXT_GREGS_OFFSET)(a0) + sd ra, MCONTEXT_PC_OFFSET(a0) + +#ifdef __mips_hard_float + s.d $f24, (24 * MCONTEXT_REG_SIZE + MCONTEXT_FPREGS_OFFSET)(a0) + s.d $f25, (25 * MCONTEXT_REG_SIZE + MCONTEXT_FPREGS_OFFSET)(a0) + s.d $f26, (26 * MCONTEXT_REG_SIZE + MCONTEXT_FPREGS_OFFSET)(a0) + s.d $f27, (27 * MCONTEXT_REG_SIZE + MCONTEXT_FPREGS_OFFSET)(a0) + s.d $f28, (28 * MCONTEXT_REG_SIZE + MCONTEXT_FPREGS_OFFSET)(a0) + s.d $f29, (29 * MCONTEXT_REG_SIZE + MCONTEXT_FPREGS_OFFSET)(a0) + s.d $f30, (30 * MCONTEXT_REG_SIZE + MCONTEXT_FPREGS_OFFSET)(a0) + s.d $f31, (31 * MCONTEXT_REG_SIZE + MCONTEXT_FPREGS_OFFSET)(a0) + + cfc1 v1, $31 + sw v1, MCONTEXT_FPC_CSR(a0) +#endif /* __mips_hard_float */ + +/* rt_sigprocmask (SIG_BLOCK, NULL, &ucp->uc_sigmask, _NSIG8) */ + li a3, _NSIG8 + daddu a2, a0, UCONTEXT_SIGMASK_OFFSET + move a1, zero + li a0, SIG_BLOCK + + li v0, __NR_rt_sigprocmask + syscall + + .cpreturn + daddiu sp, FRAME_SIZE + move v0, zero + jr ra + +END (breakpad_getcontext) +#endif // _MIPS_SIM == _ABIO32 + +#elif defined(__x86_64__) +/* The x64 implementation of breakpad_getcontext was derived in part + from the implementation of libunwind which requires the following + notice. */ +/* libunwind - a platform-independent unwind library + Copyright (C) 2008 Google, Inc + Contributed by Paul Pluzhnikov + Copyright (C) 2010 Konstantin Belousov + +This file is part of libunwind. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ + + .text + .global breakpad_getcontext + .hidden breakpad_getcontext + .align 4 + .type breakpad_getcontext, @function + +breakpad_getcontext: + .cfi_startproc + + /* Callee saved: RBX, RBP, R12-R15 */ + movq %r12, MCONTEXT_GREGS_R12(%rdi) + movq %r13, MCONTEXT_GREGS_R13(%rdi) + movq %r14, MCONTEXT_GREGS_R14(%rdi) + movq %r15, MCONTEXT_GREGS_R15(%rdi) + movq %rbp, MCONTEXT_GREGS_RBP(%rdi) + movq %rbx, MCONTEXT_GREGS_RBX(%rdi) + + /* Save argument registers (not strictly needed, but setcontext + restores them, so don't restore garbage). */ + movq %r8, MCONTEXT_GREGS_R8(%rdi) + movq %r9, MCONTEXT_GREGS_R9(%rdi) + movq %rdi, MCONTEXT_GREGS_RDI(%rdi) + movq %rsi, MCONTEXT_GREGS_RSI(%rdi) + movq %rdx, MCONTEXT_GREGS_RDX(%rdi) + movq %rax, MCONTEXT_GREGS_RAX(%rdi) + movq %rcx, MCONTEXT_GREGS_RCX(%rdi) + + /* Save fp state (not needed, except for setcontext not + restoring garbage). */ + leaq MCONTEXT_FPREGS_MEM(%rdi),%r8 + movq %r8, MCONTEXT_FPREGS_PTR(%rdi) + fnstenv (%r8) + stmxcsr FPREGS_OFFSET_MXCSR(%r8) + + leaq 8(%rsp), %rax /* exclude this call. */ + movq %rax, MCONTEXT_GREGS_RSP(%rdi) + + movq 0(%rsp), %rax + movq %rax, MCONTEXT_GREGS_RIP(%rdi) + + /* Save signal mask: sigprocmask(SIGBLOCK, NULL, &uc->uc_sigmask) */ + leaq UCONTEXT_SIGMASK_OFFSET(%rdi), %rdx // arg3 + xorq %rsi, %rsi // arg2 NULL + xorq %rdi, %rdi // arg1 SIGBLOCK == 0 + call sigprocmask@PLT + + /* Always return 0 for success, even if sigprocmask failed. */ + xorl %eax, %eax + ret + .cfi_endproc + .size breakpad_getcontext, . - breakpad_getcontext + +#else +#error "This file has not been ported for your CPU!" +#endif diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/android/breakpad_getcontext_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/common/android/breakpad_getcontext_unittest.cc new file mode 100644 index 0000000000..2c550bf280 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/android/breakpad_getcontext_unittest.cc @@ -0,0 +1,186 @@ +// Copyright (c) 2012, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#if defined(__x86_64__) +#include +#endif + +#include + +#include "breakpad_googletest_includes.h" +#include "common/android/ucontext_constants.h" + +template +struct CompileAssertEquals { + // a compilation error here indicates left and right are not equal. + char left_too_large[right - left]; + // a compilation error here indicates left and right are not equal. + char right_too_large[left - right]; +}; + +#define COMPILE_ASSERT_EQ(left, right, tag) \ + CompileAssertEquals tag; + +TEST(AndroidUContext, GRegsOffset) { +#if defined(__arm__) + // There is no gregs[] array on ARM, so compare to the offset of + // first register fields, since they're stored in order. + ASSERT_EQ(static_cast(MCONTEXT_GREGS_OFFSET), + offsetof(ucontext_t,uc_mcontext.arm_r0)); +#elif defined(__aarch64__) + // There is no gregs[] array on ARM, so compare to the offset of + // first register fields, since they're stored in order. + ASSERT_EQ(static_cast(MCONTEXT_GREGS_OFFSET), + offsetof(ucontext_t,uc_mcontext.regs[0])); + ASSERT_EQ(static_cast(MCONTEXT_SP_OFFSET), + offsetof(ucontext_t,uc_mcontext.sp)); + ASSERT_EQ(static_cast(MCONTEXT_PC_OFFSET), + offsetof(ucontext_t,uc_mcontext.pc)); + ASSERT_EQ(static_cast(MCONTEXT_PSTATE_OFFSET), + offsetof(ucontext_t,uc_mcontext.pstate)); + ASSERT_EQ(static_cast(MCONTEXT_EXTENSION_OFFSET), + offsetof(ucontext_t,uc_mcontext.__reserved)); +#elif defined(__i386__) + ASSERT_EQ(static_cast(MCONTEXT_GREGS_OFFSET), + offsetof(ucontext_t,uc_mcontext.gregs)); +#define CHECK_REG(x) \ + ASSERT_EQ(static_cast(MCONTEXT_##x##_OFFSET), \ + offsetof(ucontext_t,uc_mcontext.gregs[REG_##x])) + CHECK_REG(GS); + CHECK_REG(FS); + CHECK_REG(ES); + CHECK_REG(DS); + CHECK_REG(EDI); + CHECK_REG(ESI); + CHECK_REG(EBP); + CHECK_REG(ESP); + CHECK_REG(EBX); + CHECK_REG(EDX); + CHECK_REG(ECX); + CHECK_REG(EAX); + CHECK_REG(TRAPNO); + CHECK_REG(ERR); + CHECK_REG(EIP); + CHECK_REG(CS); + CHECK_REG(EFL); + CHECK_REG(UESP); + CHECK_REG(SS); + + ASSERT_EQ(static_cast(UCONTEXT_FPREGS_OFFSET), + offsetof(ucontext_t,uc_mcontext.fpregs)); + + ASSERT_EQ(static_cast(UCONTEXT_FPREGS_MEM_OFFSET), + offsetof(ucontext_t,__fpregs_mem)); +#elif defined(__mips__) + ASSERT_EQ(static_cast(MCONTEXT_GREGS_OFFSET), + offsetof(ucontext_t,uc_mcontext.gregs)); + + // PC for mips is not part of gregs. + ASSERT_EQ(static_cast(MCONTEXT_PC_OFFSET), + offsetof(ucontext_t,uc_mcontext.pc)); + + ASSERT_EQ(static_cast(MCONTEXT_FPREGS_OFFSET), + offsetof(ucontext_t,uc_mcontext.fpregs)); + + ASSERT_EQ(static_cast(MCONTEXT_FPC_CSR), + offsetof(ucontext_t,uc_mcontext.fpc_csr)); +#elif defined(__x86_64__) + + COMPILE_ASSERT_EQ(static_cast(MCONTEXT_GREGS_OFFSET), + offsetof(ucontext_t,uc_mcontext.gregs), + mcontext_gregs_offset); +#define CHECK_REG(x) \ + COMPILE_ASSERT_EQ(static_cast(MCONTEXT_GREGS_##x), \ + offsetof(ucontext_t,uc_mcontext.gregs[REG_##x]), reg_##x) + CHECK_REG(R8); + CHECK_REG(R9); + CHECK_REG(R10); + CHECK_REG(R11); + CHECK_REG(R12); + CHECK_REG(R13); + CHECK_REG(R14); + CHECK_REG(R15); + CHECK_REG(RDI); + CHECK_REG(RSI); + CHECK_REG(RBP); + CHECK_REG(RBX); + CHECK_REG(RDX); + CHECK_REG(RAX); + CHECK_REG(RCX); + CHECK_REG(RSP); + CHECK_REG(RIP); + + // sigcontext is an analog to mcontext_t. The layout should be the same. + COMPILE_ASSERT_EQ(offsetof(mcontext_t,fpregs), + offsetof(sigcontext,fpstate), sigcontext_fpstate); + // Check that _fpstate from asm/sigcontext.h is essentially the same + // as _libc_fpstate. + COMPILE_ASSERT_EQ(sizeof(_libc_fpstate), sizeof(_fpstate), + sigcontext_fpstate_size); + COMPILE_ASSERT_EQ(offsetof(_libc_fpstate,cwd),offsetof(_fpstate,cwd), + sigcontext_fpstate_cwd); + COMPILE_ASSERT_EQ(offsetof(_libc_fpstate,swd),offsetof(_fpstate,swd), + sigcontext_fpstate_swd); + COMPILE_ASSERT_EQ(offsetof(_libc_fpstate,ftw),offsetof(_fpstate,twd), + sigcontext_fpstate_twd); + COMPILE_ASSERT_EQ(offsetof(_libc_fpstate,fop),offsetof(_fpstate,fop), + sigcontext_fpstate_fop); + COMPILE_ASSERT_EQ(offsetof(_libc_fpstate,rip),offsetof(_fpstate,rip), + sigcontext_fpstate_rip); + COMPILE_ASSERT_EQ(offsetof(_libc_fpstate,rdp),offsetof(_fpstate,rdp), + sigcontext_fpstate_rdp); + COMPILE_ASSERT_EQ(offsetof(_libc_fpstate,mxcsr),offsetof(_fpstate,mxcsr), + sigcontext_fpstate_mxcsr); + COMPILE_ASSERT_EQ(offsetof(_libc_fpstate,mxcr_mask), + offsetof(_fpstate,mxcsr_mask), + sigcontext_fpstate_mxcsr_mask); + COMPILE_ASSERT_EQ(offsetof(_libc_fpstate,_st), offsetof(_fpstate,st_space), + sigcontext_fpstate_stspace); + COMPILE_ASSERT_EQ(offsetof(_libc_fpstate,_xmm), offsetof(_fpstate,xmm_space), + sigcontext_fpstate_xmm_space); + + COMPILE_ASSERT_EQ(MCONTEXT_FPREGS_PTR, + offsetof(ucontext_t,uc_mcontext.fpregs), + mcontext_fpregs_ptr); + COMPILE_ASSERT_EQ(MCONTEXT_FPREGS_MEM, offsetof(ucontext_t,__fpregs_mem), + mcontext_fpregs_mem); + COMPILE_ASSERT_EQ(FPREGS_OFFSET_MXCSR, offsetof(_libc_fpstate,mxcsr), + fpregs_offset_mxcsr); + COMPILE_ASSERT_EQ(UCONTEXT_SIGMASK_OFFSET, offsetof(ucontext_t, uc_sigmask), + ucontext_sigmask); +#else + ASSERT_EQ(static_cast(MCONTEXT_GREGS_OFFSET), + offsetof(ucontext_t,uc_mcontext.gregs)); +#endif +} + +TEST(AndroidUContext, SigmakOffset) { + ASSERT_EQ(static_cast(UCONTEXT_SIGMASK_OFFSET), + offsetof(ucontext_t,uc_sigmask)); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/android/include/elf.h b/TMessagesProj/jni/third_party/breakpad/src/common/android/include/elf.h new file mode 100644 index 0000000000..b2a28df448 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/android/include/elf.h @@ -0,0 +1,168 @@ +// Copyright (c) 2012, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef GOOGLE_BREAKPAD_COMMON_ANDROID_INCLUDE_ELF_H +#define GOOGLE_BREAKPAD_COMMON_ANDROID_INCLUDE_ELF_H + +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +// The Android provides BSD-based definitions for the ElfXX_Nhdr +// types +// always source-compatible with the GLibc/kernel ones. To overcome this +// issue without modifying a lot of code in Breakpad, use an ugly macro +// renaming trick with #include_next + +// Avoid conflict with BSD-based definition of ElfXX_Nhdr. +// Unfortunately, their field member names do not use a 'n_' prefix. +#define Elf32_Nhdr __bsd_Elf32_Nhdr +#define Elf64_Nhdr __bsd_Elf64_Nhdr + +// In case they are defined by the NDK version +#define Elf32_auxv_t __bionic_Elf32_auxv_t +#define Elf64_auxv_t __bionic_Elf64_auxv_t + +#define Elf32_Dyn __bionic_Elf32_Dyn +#define Elf64_Dyn __bionic_Elf64_Dyn + +#include_next + +#undef Elf32_Nhdr +#undef Elf64_Nhdr + +typedef struct { + Elf32_Word n_namesz; + Elf32_Word n_descsz; + Elf32_Word n_type; +} Elf32_Nhdr; + +typedef struct { + Elf64_Word n_namesz; + Elf64_Word n_descsz; + Elf64_Word n_type; +} Elf64_Nhdr; + +#undef Elf32_auxv_t +#undef Elf64_auxv_t + +typedef struct { + uint32_t a_type; + union { + uint32_t a_val; + } a_un; +} Elf32_auxv_t; + +typedef struct { + uint64_t a_type; + union { + uint64_t a_val; + } a_un; +} Elf64_auxv_t; + +#undef Elf32_Dyn +#undef Elf64_Dyn + +typedef struct { + Elf32_Sword d_tag; + union { + Elf32_Word d_val; + Elf32_Addr d_ptr; + } d_un; +} Elf32_Dyn; + +typedef struct { + Elf64_Sxword d_tag; + union { + Elf64_Xword d_val; + Elf64_Addr d_ptr; + } d_un; +} Elf64_Dyn; + + +// __WORDSIZE is GLibc-specific and used by Google Breakpad on Linux. +#ifndef __WORDSIZE +#if defined(__i386__) || defined(__ARM_EABI__) || defined(__mips__) +#define __WORDSIZE 32 +#elif defined(__x86_64__) || defined(__aarch64__) +#define __WORDSIZE 64 +#else +#error "Unsupported Android CPU ABI" +#endif +#endif + +// The Android headers don't always define this constant. +#ifndef EM_X86_64 +#define EM_X86_64 62 +#endif + +#ifndef EM_PPC64 +#define EM_PPC64 21 +#endif + +#ifndef EM_S390 +#define EM_S390 22 +#endif + +#if !defined(AT_SYSINFO_EHDR) +#define AT_SYSINFO_EHDR 33 +#endif + +#if !defined(NT_PRSTATUS) +#define NT_PRSTATUS 1 +#endif + +#if !defined(NT_PRPSINFO) +#define NT_PRPSINFO 3 +#endif + +#if !defined(NT_AUXV) +#define NT_AUXV 6 +#endif + +#if !defined(NT_PRXFPREG) +#define NT_PRXFPREG 0x46e62b7f +#endif + +#if !defined(NT_FPREGSET) +#define NT_FPREGSET 2 +#endif + +#if !defined(SHT_MIPS_DWARF) +#define SHT_MIPS_DWARF 0x7000001e +#endif + +#ifdef __cplusplus +} // extern "C" +#endif // __cplusplus + +#endif // GOOGLE_BREAKPAD_COMMON_ANDROID_INCLUDE_ELF_H diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/android/include/sgidefs.h b/TMessagesProj/jni/third_party/breakpad/src/common/android/include/sgidefs.h new file mode 100644 index 0000000000..33796dcf74 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/android/include/sgidefs.h @@ -0,0 +1,41 @@ +// Copyright (c) 2013, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef GOOGLE_BREAKPAD_ANDROID_INCLUDE_SGIDEFS_H +#define GOOGLE_BREAKPAD_ANDROID_INCLUDE_SGIDEFS_H + +#ifdef __mips__ + +// Android doesn't contain sgidefs.h, but does have which +// contains what we need. +#include + +#endif // __mips__ + +#endif // GOOGLE_BREAKPAD_ANDROID_INCLUDE_SGIDEFS_H diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/android/include/stab.h b/TMessagesProj/jni/third_party/breakpad/src/common/android/include/stab.h new file mode 100644 index 0000000000..cd92902151 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/android/include/stab.h @@ -0,0 +1,100 @@ +// Copyright (c) 2012, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef GOOGLE_BREAKPAD_COMMON_ANDROID_INCLUDE_STAB_H +#define GOOGLE_BREAKPAD_COMMON_ANDROID_INCLUDE_STAB_H + +#include + +#ifdef __BIONIC_HAVE_STAB_H +#include +#else + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +#define _STAB_CODE_LIST \ + _STAB_CODE_DEF(UNDF,0x00) \ + _STAB_CODE_DEF(GSYM,0x20) \ + _STAB_CODE_DEF(FNAME,0x22) \ + _STAB_CODE_DEF(FUN,0x24) \ + _STAB_CODE_DEF(STSYM,0x26) \ + _STAB_CODE_DEF(LCSYM,0x28) \ + _STAB_CODE_DEF(MAIN,0x2a) \ + _STAB_CODE_DEF(PC,0x30) \ + _STAB_CODE_DEF(NSYMS,0x32) \ + _STAB_CODE_DEF(NOMAP,0x34) \ + _STAB_CODE_DEF(OBJ,0x38) \ + _STAB_CODE_DEF(OPT,0x3c) \ + _STAB_CODE_DEF(RSYM,0x40) \ + _STAB_CODE_DEF(M2C,0x42) \ + _STAB_CODE_DEF(SLINE,0x44) \ + _STAB_CODE_DEF(DSLINE,0x46) \ + _STAB_CODE_DEF(BSLINE,0x48) \ + _STAB_CODE_DEF(BROWS,0x48) \ + _STAB_CODE_DEF(DEFD,0x4a) \ + _STAB_CODE_DEF(EHDECL,0x50) \ + _STAB_CODE_DEF(MOD2,0x50) \ + _STAB_CODE_DEF(CATCH,0x54) \ + _STAB_CODE_DEF(SSYM,0x60) \ + _STAB_CODE_DEF(SO,0x64) \ + _STAB_CODE_DEF(LSYM,0x80) \ + _STAB_CODE_DEF(BINCL,0x82) \ + _STAB_CODE_DEF(SOL,0x84) \ + _STAB_CODE_DEF(PSYM,0xa0) \ + _STAB_CODE_DEF(EINCL,0xa2) \ + _STAB_CODE_DEF(ENTRY,0xa4) \ + _STAB_CODE_DEF(LBRAC,0xc0) \ + _STAB_CODE_DEF(EXCL,0xc2) \ + _STAB_CODE_DEF(SCOPE,0xc4) \ + _STAB_CODE_DEF(RBRAC,0xe0) \ + _STAB_CODE_DEF(BCOMM,0xe2) \ + _STAB_CODE_DEF(ECOMM,0xe4) \ + _STAB_CODE_DEF(ECOML,0xe8) \ + _STAB_CODE_DEF(NBTEXT,0xf0) \ + _STAB_CODE_DEF(NBDATA,0xf2) \ + _STAB_CODE_DEF(NBBSS,0xf4) \ + _STAB_CODE_DEF(NBSTS,0xf6) \ + _STAB_CODE_DEF(NBLCS,0xf8) \ + _STAB_CODE_DEF(LENG,0xfe) + +enum __stab_debug_code { +#define _STAB_CODE_DEF(x,y) N_##x = y, +_STAB_CODE_LIST +#undef _STAB_CODE_DEF +}; + +#ifdef __cplusplus +} // extern "C" +#endif // __cplusplus + +#endif // __BIONIC_HAVE_STAB_H + +#endif // GOOGLE_BREAKPAD_COMMON_ANDROID_INCLUDE_STAB_H diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/android/include/sys/procfs.h b/TMessagesProj/jni/third_party/breakpad/src/common/android/include/sys/procfs.h new file mode 100644 index 0000000000..27223ea34a --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/android/include/sys/procfs.h @@ -0,0 +1,124 @@ +// Copyright (c) 2012, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef GOOGLE_BREAKPAD_COMMON_ANDROID_SYS_PROCFS_H +#define GOOGLE_BREAKPAD_COMMON_ANDROID_SYS_PROCFS_H + +#ifdef __BIONIC_HAVE_SYS_PROCFS_H + +#include_next + +#else + +#include +#include +#if defined (__mips__) +#include +#endif +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +#if defined(__x86_64__) || defined(__aarch64__) +typedef unsigned long long elf_greg_t; +#else +typedef unsigned long elf_greg_t; +#endif + +#ifdef __arm__ +#define ELF_NGREG (sizeof(struct user_regs) / sizeof(elf_greg_t)) +#elif defined(__aarch64__) +#define ELF_NGREG (sizeof(struct user_pt_regs) / sizeof(elf_greg_t)) +#elif defined(__mips__) +#define ELF_NGREG 45 +#else +#define ELF_NGREG (sizeof(struct user_regs_struct) / sizeof(elf_greg_t)) +#endif + +typedef elf_greg_t elf_gregset_t[ELF_NGREG]; + +struct elf_siginfo { + int si_signo; + int si_code; + int si_errno; +}; + +struct elf_prstatus { + struct elf_siginfo pr_info; + short pr_cursig; + unsigned long pr_sigpend; + unsigned long pr_sighold; + pid_t pr_pid; + pid_t pr_ppid; + pid_t pr_pgrp; + pid_t pd_sid; + struct timeval pr_utime; + struct timeval pr_stime; + struct timeval pr_cutime; + struct timeval pr_cstime; + elf_gregset_t pr_reg; + int pr_fpvalid; +}; + +#define ELF_PRARGSZ 80 + +struct elf_prpsinfo { + char pr_state; + char pr_sname; + char pr_zomb; + char pr_nice; + unsigned long pr_flags; +#ifdef __x86_64__ + unsigned int pr_uid; + unsigned int pr_gid; +#elif defined(__mips__) + unsigned long pr_uid; + unsigned long pr_gid; +#else + unsigned short pr_uid; + unsigned short pr_gid; +#endif + int pr_pid; + int pr_ppid; + int pr_pgrp; + int pr_sid; + char pr_fname[16]; + char pr_psargs[ELF_PRARGSZ]; +}; + +#ifdef __cplusplus +} // extern "C" +#endif // __cplusplus + +#endif // __BIONIC_HAVE_SYS_PROCFS_H + +#endif // GOOGLE_BREAKPAD_COMMON_ANDROID_SYS_PROCFS_H diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/android/include/sys/signal.h b/TMessagesProj/jni/third_party/breakpad/src/common/android/include/sys/signal.h new file mode 100644 index 0000000000..20c81e9379 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/android/include/sys/signal.h @@ -0,0 +1,35 @@ +// Copyright (c) 2012, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef GOOGLE_BREAKPAD_COMMON_ANDROID_INCLUDE_SYS_SIGNAL_H +#define GOOGLE_BREAKPAD_COMMON_ANDROID_INCLUDE_SYS_SIGNAL_H + +#include + +#endif // GOOGLE_BREAKPAD_COMMON_ANDROID_INCLUDE_SYS_SIGNAL_H diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/android/include/ucontext.h b/TMessagesProj/jni/third_party/breakpad/src/common/android/include/ucontext.h new file mode 100644 index 0000000000..29db8adee6 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/android/include/ucontext.h @@ -0,0 +1,56 @@ +// Copyright (c) 2012, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef GOOGLE_BREAKPAD_COMMON_ANDROID_INCLUDE_UCONTEXT_H +#define GOOGLE_BREAKPAD_COMMON_ANDROID_INCLUDE_UCONTEXT_H + +#include + +#ifdef __BIONIC_UCONTEXT_H +#include +#else + +#include + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +// Provided by src/android/common/breakpad_getcontext.S +int breakpad_getcontext(ucontext_t* ucp); + +#define getcontext(x) breakpad_getcontext(x) + +#ifdef __cplusplus +} // extern "C" +#endif // __cplusplus + +#endif // __BIONIC_UCONTEXT_H + +#endif // GOOGLE_BREAKPAD_COMMON_ANDROID_INCLUDE_UCONTEXT_H diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/android/testing/include/wchar.h b/TMessagesProj/jni/third_party/breakpad/src/common/android/testing/include/wchar.h new file mode 100644 index 0000000000..85373fd2a0 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/android/testing/include/wchar.h @@ -0,0 +1,76 @@ +// Copyright (c) 2012, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Android doesn't provide wcscasecmp(), so provide an alternative here. +// +// Note that this header is not needed when Breakpad is compiled against +// a recent version of Googletest. It shall be considered for removal once +// src/testing/ is updated to an appropriate revision in the future. + +#ifndef GOOGLEBREAKPAD_COMMON_ANDROID_INCLUDE_WCHAR_H +#define GOOGLEBREAKPAD_COMMON_ANDROID_INCLUDE_WCHAR_H + +#include_next + +#if !defined(__aarch64__) && !defined(__x86_64__) && \ + !(defined(__mips__) && _MIPS_SIM == _ABI64) + +// This needs to be in an extern "C" namespace, or Googletest will not +// compile against it. +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +static wchar_t inline wcstolower(wchar_t ch) { + if (ch >= L'a' && ch <= L'A') + ch -= L'a' - L'A'; + return ch; +} + +static int inline wcscasecmp(const wchar_t* s1, const wchar_t* s2) { + for (;;) { + wchar_t c1 = wcstolower(*s1); + wchar_t c2 = wcstolower(*s2); + if (c1 < c2) + return -1; + if (c1 > c2) + return 1; + if (c1 == L'0') + return 0; + s1++; + s2++; + } +} + +#ifdef __cplusplus +} // extern "C" +#endif // __cplusplus +#endif + +#endif // GOOGLEBREAKPAD_COMMON_ANDROID_INCLUDE_WCHAR_H diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/android/testing/mkdtemp.h b/TMessagesProj/jni/third_party/breakpad/src/common/android/testing/mkdtemp.h new file mode 100644 index 0000000000..b86e2cd783 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/android/testing/mkdtemp.h @@ -0,0 +1,110 @@ +// Copyright (c) 2012, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// mkdtemp() wasn't declared in until NDK r9b due to a simple +// packaging bug (the function has always been implemented in all versions +// of the C library). This header is provided to build Breakpad with earlier +// NDK revisions (e.g. the one used by Chromium). It may be removed in the +// future once all major projects upgrade to use a more recent NDK. +// +// The reason this is inlined here is to avoid linking a new object file +// into each unit test program (i.e. keep build files simple). + +#ifndef GOOGLE_BREAKPAD_COMMON_ANDROID_TESTING_MKDTEMP_H +#define GOOGLE_BREAKPAD_COMMON_ANDROID_TESTING_MKDTEMP_H + +#include +#include +#include +#include +#include +#include + +// Using a macro renaming trick here is necessary when building against +// NDK r9b. Otherwise the compiler will complain that calls to mkdtemp() +// are ambiguous. +#define mkdtemp breakpad_mkdtemp + +namespace { + +char* breakpad_mkdtemp(char* path) { + if (path == NULL) { + errno = EINVAL; + return NULL; + } + + // 'path' must be terminated with six 'X' + const char kSuffix[] = "XXXXXX"; + const size_t kSuffixLen = strlen(kSuffix); + char* path_end = path + strlen(path); + + if (static_cast(path_end - path) < kSuffixLen || + memcmp(path_end - kSuffixLen, kSuffix, kSuffixLen) != 0) { + errno = EINVAL; + return NULL; + } + + // If 'path' contains a directory separator, check that it exists to + // avoid looping later. + char* sep = strrchr(path, '/'); + if (sep != NULL) { + struct stat st; + int ret; + *sep = '\0'; // temporarily zero-terminate the dirname. + ret = stat(path, &st); + *sep = '/'; // restore full path. + if (ret < 0) + return NULL; + if (!S_ISDIR(st.st_mode)) { + errno = ENOTDIR; + return NULL; + } + } + + // Loop. On each iteration, replace the XXXXXX suffix with a random + // number. + int tries; + for (tries = 128; tries > 0; tries--) { + int random = rand() % 1000000; + + snprintf(path_end - kSuffixLen, kSuffixLen + 1, "%0d", random); + if (mkdir(path, 0700) == 0) + return path; // Success + + if (errno != EEXIST) + return NULL; + } + + assert(errno == EEXIST); + return NULL; +} + +} // namespace + +#endif // GOOGLE_BREAKPAD_COMMON_ANDROID_TESTING_MKDTEMP_H diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/android/testing/pthread_fixes.h b/TMessagesProj/jni/third_party/breakpad/src/common/android/testing/pthread_fixes.h new file mode 100644 index 0000000000..15c6309ecc --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/android/testing/pthread_fixes.h @@ -0,0 +1,99 @@ +// Copyright (c) 2012, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// This contains Pthread-related functions not provided by the Android NDK +// but required by the Breakpad unit test. The functions are inlined here +// in a C++ anonymous namespace in order to keep the build files simples. + +#ifndef GOOGLE_BREAKPAD_COMMON_ANDROID_TESTING_PTHREAD_FIXES_H +#define GOOGLE_BREAKPAD_COMMON_ANDROID_TESTING_PTHREAD_FIXES_H + +#include + +namespace { + +// Android doesn't provide pthread_barrier_t for now. +#ifndef PTHREAD_BARRIER_SERIAL_THREAD + +// Anything except 0 will do here. +#define PTHREAD_BARRIER_SERIAL_THREAD 0x12345 + +typedef struct { + pthread_mutex_t mutex; + pthread_cond_t cond; + unsigned count; +} pthread_barrier_t; + +int pthread_barrier_init(pthread_barrier_t* barrier, + const void* /* barrier_attr */, + unsigned count) { + barrier->count = count; + pthread_mutex_init(&barrier->mutex, NULL); + pthread_cond_init(&barrier->cond, NULL); + return 0; +} + +int pthread_barrier_wait(pthread_barrier_t* barrier) { + // Lock the mutex + pthread_mutex_lock(&barrier->mutex); + // Decrement the count. If this is the first thread to reach 0, wake up + // waiters, unlock the mutex, then return PTHREAD_BARRIER_SERIAL_THREAD. + if (--barrier->count == 0) { + // First thread to reach the barrier + pthread_cond_broadcast(&barrier->cond); + pthread_mutex_unlock(&barrier->mutex); + return PTHREAD_BARRIER_SERIAL_THREAD; + } + // Otherwise, wait for other threads until the count reaches 0, then + // return 0 to indicate this is not the first thread. + do { + pthread_cond_wait(&barrier->cond, &barrier->mutex); + } while (barrier->count > 0); + + pthread_mutex_unlock(&barrier->mutex); + return 0; +} + +int pthread_barrier_destroy(pthread_barrier_t *barrier) { + barrier->count = 0; + pthread_cond_destroy(&barrier->cond); + pthread_mutex_destroy(&barrier->mutex); + return 0; +} + +#endif // defined(PTHREAD_BARRIER_SERIAL_THREAD) + +int pthread_yield(void) { + sched_yield(); + return 0; +} + +} // namespace + +#endif // GOOGLE_BREAKPAD_COMMON_ANDROID_TESTING_PTHREAD_FIXES_H diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/android/ucontext_constants.h b/TMessagesProj/jni/third_party/breakpad/src/common/android/ucontext_constants.h new file mode 100644 index 0000000000..1932d5739c --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/android/ucontext_constants.h @@ -0,0 +1,144 @@ +// Copyright (c) 2012, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// This header can be included either from a C, C++ or Assembly file. +// Its purpose is to contain constants that must match the offsets of +// various fields in ucontext_t. +// +// They should match the definitions from +// src/common/android/include/sys/ucontext.h +// +// Used by src/common/android/breakpad_getcontext.S +// Tested by src/common/android/testing/breakpad_getcontext_unittest.cc + +#ifndef GOOGLEBREAKPAD_COMMON_ANDROID_UCONTEXT_CONSTANTS_H +#define GOOGLEBREAKPAD_COMMON_ANDROID_UCONTEXT_CONSTANTS_H + +#if defined(__arm__) + +#define MCONTEXT_GREGS_OFFSET 32 +#define UCONTEXT_SIGMASK_OFFSET 104 + +#elif defined(__aarch64__) + +#define UCONTEXT_SIGMASK_OFFSET 40 + +#define MCONTEXT_GREGS_OFFSET 184 +#define MCONTEXT_SP_OFFSET 432 +#define MCONTEXT_PC_OFFSET 440 +#define MCONTEXT_PSTATE_OFFSET 448 +#define MCONTEXT_EXTENSION_OFFSET 464 + +#define FPSIMD_MAGIC 0x46508001 + +#define FPSIMD_CONTEXT_MAGIC_OFFSET 0 +#define FPSIMD_CONTEXT_SIZE_OFFSET 4 +#define FPSIMD_CONTEXT_FPSR_OFFSET 8 +#define FPSIMD_CONTEXT_FPCR_OFFSET 12 +#define FPSIMD_CONTEXT_VREGS_OFFSET 16 +#define FPSIMD_CONTEXT_SIZE 528 + +#define REGISTER_SIZE 8 +#define SIMD_REGISTER_SIZE 16 + +#elif defined(__i386__) + +#define MCONTEXT_GREGS_OFFSET 20 +#define MCONTEXT_GS_OFFSET (MCONTEXT_GREGS_OFFSET + 0*4) +#define MCONTEXT_FS_OFFSET (MCONTEXT_GREGS_OFFSET + 1*4) +#define MCONTEXT_ES_OFFSET (MCONTEXT_GREGS_OFFSET + 2*4) +#define MCONTEXT_DS_OFFSET (MCONTEXT_GREGS_OFFSET + 3*4) +#define MCONTEXT_EDI_OFFSET (MCONTEXT_GREGS_OFFSET + 4*4) +#define MCONTEXT_ESI_OFFSET (MCONTEXT_GREGS_OFFSET + 5*4) +#define MCONTEXT_EBP_OFFSET (MCONTEXT_GREGS_OFFSET + 6*4) +#define MCONTEXT_ESP_OFFSET (MCONTEXT_GREGS_OFFSET + 7*4) +#define MCONTEXT_EBX_OFFSET (MCONTEXT_GREGS_OFFSET + 8*4) +#define MCONTEXT_EDX_OFFSET (MCONTEXT_GREGS_OFFSET + 9*4) +#define MCONTEXT_ECX_OFFSET (MCONTEXT_GREGS_OFFSET + 10*4) +#define MCONTEXT_EAX_OFFSET (MCONTEXT_GREGS_OFFSET + 11*4) +#define MCONTEXT_TRAPNO_OFFSET (MCONTEXT_GREGS_OFFSET + 12*4) +#define MCONTEXT_ERR_OFFSET (MCONTEXT_GREGS_OFFSET + 13*4) +#define MCONTEXT_EIP_OFFSET (MCONTEXT_GREGS_OFFSET + 14*4) +#define MCONTEXT_CS_OFFSET (MCONTEXT_GREGS_OFFSET + 15*4) +#define MCONTEXT_EFL_OFFSET (MCONTEXT_GREGS_OFFSET + 16*4) +#define MCONTEXT_UESP_OFFSET (MCONTEXT_GREGS_OFFSET + 17*4) +#define MCONTEXT_SS_OFFSET (MCONTEXT_GREGS_OFFSET + 18*4) + +#define UCONTEXT_SIGMASK_OFFSET 108 + +#define UCONTEXT_FPREGS_OFFSET 96 +#define UCONTEXT_FPREGS_MEM_OFFSET 116 + +#elif defined(__mips__) + +#if _MIPS_SIM == _ABIO32 +#define MCONTEXT_PC_OFFSET 32 +#define MCONTEXT_GREGS_OFFSET 40 +#define MCONTEXT_FPREGS_OFFSET 296 +#define MCONTEXT_FPC_CSR 556 +#define UCONTEXT_SIGMASK_OFFSET 616 +#else +#define MCONTEXT_GREGS_OFFSET 40 +#define MCONTEXT_FPREGS_OFFSET 296 +#define MCONTEXT_PC_OFFSET 616 +#define MCONTEXT_FPC_CSR 624 +#define UCONTEXT_SIGMASK_OFFSET 640 +#endif + +#elif defined(__x86_64__) + +#define MCONTEXT_GREGS_OFFSET 40 +#define UCONTEXT_SIGMASK_OFFSET 296 + +#define MCONTEXT_GREGS_R8 40 +#define MCONTEXT_GREGS_R9 48 +#define MCONTEXT_GREGS_R10 56 +#define MCONTEXT_GREGS_R11 64 +#define MCONTEXT_GREGS_R12 72 +#define MCONTEXT_GREGS_R13 80 +#define MCONTEXT_GREGS_R14 88 +#define MCONTEXT_GREGS_R15 96 +#define MCONTEXT_GREGS_RDI 104 +#define MCONTEXT_GREGS_RSI 112 +#define MCONTEXT_GREGS_RBP 120 +#define MCONTEXT_GREGS_RBX 128 +#define MCONTEXT_GREGS_RDX 136 +#define MCONTEXT_GREGS_RAX 144 +#define MCONTEXT_GREGS_RCX 152 +#define MCONTEXT_GREGS_RSP 160 +#define MCONTEXT_GREGS_RIP 168 +#define MCONTEXT_FPREGS_PTR 224 +#define MCONTEXT_FPREGS_MEM 304 +#define FPREGS_OFFSET_MXCSR 24 + +#else +#error "This header has not been ported for your CPU" +#endif + +#endif // GOOGLEBREAKPAD_COMMON_ANDROID_UCONTEXT_CONSTANTS_H diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/basictypes.h b/TMessagesProj/jni/third_party/breakpad/src/common/basictypes.h new file mode 100644 index 0000000000..9426c1f6c2 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/basictypes.h @@ -0,0 +1,58 @@ +// Copyright (c) 2011 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef COMMON_BASICTYPES_H_ +#define COMMON_BASICTYPES_H_ + +// A macro to disallow the copy constructor and operator= functions +// This should be used in the private: declarations for a class +#ifndef DISALLOW_COPY_AND_ASSIGN +#define DISALLOW_COPY_AND_ASSIGN(TypeName) \ + TypeName(const TypeName&); \ + void operator=(const TypeName&) +#endif // DISALLOW_COPY_AND_ASSIGN + +namespace google_breakpad { + +// Used to explicitly mark the return value of a function as unused. If you are +// really sure you don't want to do anything with the return value of a function +// that has been marked with __attribute__((warn_unused_result)), wrap it with +// this. Example: +// +// scoped_ptr my_var = ...; +// if (TakeOwnership(my_var.get()) == SUCCESS) +// ignore_result(my_var.release()); +// +template +inline void ignore_result(const T&) { +} + +} // namespace google_breakpad + +#endif // COMMON_BASICTYPES_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/byte_cursor.h b/TMessagesProj/jni/third_party/breakpad/src/common/byte_cursor.h new file mode 100644 index 0000000000..accd54e0a4 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/byte_cursor.h @@ -0,0 +1,265 @@ +// -*- mode: c++ -*- + +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// byte_cursor.h: Classes for parsing values from a buffer of bytes. +// The ByteCursor class provides a convenient interface for reading +// fixed-size integers of arbitrary endianness, being thorough about +// checking for buffer overruns. + +#ifndef COMMON_BYTE_CURSOR_H_ +#define COMMON_BYTE_CURSOR_H_ + +#include +#include +#include +#include +#include + +#include "common/using_std_string.h" + +namespace google_breakpad { + +// A buffer holding a series of bytes. +struct ByteBuffer { + ByteBuffer() : start(0), end(0) { } + ByteBuffer(const uint8_t *set_start, size_t set_size) + : start(set_start), end(set_start + set_size) { } + ~ByteBuffer() { }; + + // Equality operators. Useful in unit tests, and when we're using + // ByteBuffers to refer to regions of a larger buffer. + bool operator==(const ByteBuffer &that) const { + return start == that.start && end == that.end; + } + bool operator!=(const ByteBuffer &that) const { + return start != that.start || end != that.end; + } + + // Not C++ style guide compliant, but this definitely belongs here. + size_t Size() const { + assert(start <= end); + return end - start; + } + + const uint8_t *start, *end; +}; + +// A cursor pointing into a ByteBuffer that can parse numbers of various +// widths and representations, strings, and data blocks, advancing through +// the buffer as it goes. All ByteCursor operations check that accesses +// haven't gone beyond the end of the enclosing ByteBuffer. +class ByteCursor { + public: + // Create a cursor reading bytes from the start of BUFFER. By default, the + // cursor reads multi-byte values in little-endian form. + ByteCursor(const ByteBuffer *buffer, bool big_endian = false) + : buffer_(buffer), here_(buffer->start), + big_endian_(big_endian), complete_(true) { } + + // Accessor and setter for this cursor's endianness flag. + bool big_endian() const { return big_endian_; } + void set_big_endian(bool big_endian) { big_endian_ = big_endian; } + + // Accessor and setter for this cursor's current position. The setter + // returns a reference to this cursor. + const uint8_t *here() const { return here_; } + ByteCursor &set_here(const uint8_t *here) { + assert(buffer_->start <= here && here <= buffer_->end); + here_ = here; + return *this; + } + + // Return the number of bytes available to read at the cursor. + size_t Available() const { return size_t(buffer_->end - here_); } + + // Return true if this cursor is at the end of its buffer. + bool AtEnd() const { return Available() == 0; } + + // When used as a boolean value this cursor converts to true if all + // prior reads have been completed, or false if we ran off the end + // of the buffer. + operator bool() const { return complete_; } + + // Read a SIZE-byte integer at this cursor, signed if IS_SIGNED is true, + // unsigned otherwise, using the cursor's established endianness, and set + // *RESULT to the number. If we read off the end of our buffer, clear + // this cursor's complete_ flag, and store a dummy value in *RESULT. + // Return a reference to this cursor. + template + ByteCursor &Read(size_t size, bool is_signed, T *result) { + if (CheckAvailable(size)) { + T v = 0; + if (big_endian_) { + for (size_t i = 0; i < size; i++) + v = (v << 8) + here_[i]; + } else { + // This loop condition looks weird, but size_t is unsigned, so + // decrementing i after it is zero yields the largest size_t value. + for (size_t i = size - 1; i < size; i--) + v = (v << 8) + here_[i]; + } + if (is_signed && size < sizeof(T)) { + size_t sign_bit = (T)1 << (size * 8 - 1); + v = (v ^ sign_bit) - sign_bit; + } + here_ += size; + *result = v; + } else { + *result = (T) 0xdeadbeef; + } + return *this; + } + + // Read an integer, using the cursor's established endianness and + // *RESULT's size and signedness, and set *RESULT to the number. If we + // read off the end of our buffer, clear this cursor's complete_ flag. + // Return a reference to this cursor. + template + ByteCursor &operator>>(T &result) { + bool T_is_signed = (T)-1 < 0; + return Read(sizeof(T), T_is_signed, &result); + } + + // Copy the SIZE bytes at the cursor to BUFFER, and advance this + // cursor to the end of them. If we read off the end of our buffer, + // clear this cursor's complete_ flag, and set *POINTER to NULL. + // Return a reference to this cursor. + ByteCursor &Read(uint8_t *buffer, size_t size) { + if (CheckAvailable(size)) { + memcpy(buffer, here_, size); + here_ += size; + } + return *this; + } + + // Set STR to a copy of the '\0'-terminated string at the cursor. If the + // byte buffer does not contain a terminating zero, clear this cursor's + // complete_ flag, and set STR to the empty string. Return a reference to + // this cursor. + ByteCursor &CString(string *str) { + const uint8_t *end + = static_cast(memchr(here_, '\0', Available())); + if (end) { + str->assign(reinterpret_cast(here_), end - here_); + here_ = end + 1; + } else { + str->clear(); + here_ = buffer_->end; + complete_ = false; + } + return *this; + } + + // Like CString(STR), but extract the string from a fixed-width buffer + // LIMIT bytes long, which may or may not contain a terminating '\0' + // byte. Specifically: + // + // - If there are not LIMIT bytes available at the cursor, clear the + // cursor's complete_ flag and set STR to the empty string. + // + // - Otherwise, if the LIMIT bytes at the cursor contain any '\0' + // characters, set *STR to a copy of the bytes before the first '\0', + // and advance the cursor by LIMIT bytes. + // + // - Otherwise, set *STR to a copy of those LIMIT bytes, and advance the + // cursor by LIMIT bytes. + ByteCursor &CString(string *str, size_t limit) { + if (CheckAvailable(limit)) { + const uint8_t *end + = static_cast(memchr(here_, '\0', limit)); + if (end) + str->assign(reinterpret_cast(here_), end - here_); + else + str->assign(reinterpret_cast(here_), limit); + here_ += limit; + } else { + str->clear(); + } + return *this; + } + + // Set *POINTER to point to the SIZE bytes at the cursor, and advance + // this cursor to the end of them. If SIZE is omitted, don't move the + // cursor. If we read off the end of our buffer, clear this cursor's + // complete_ flag, and set *POINTER to NULL. Return a reference to this + // cursor. + ByteCursor &PointTo(const uint8_t **pointer, size_t size = 0) { + if (CheckAvailable(size)) { + *pointer = here_; + here_ += size; + } else { + *pointer = NULL; + } + return *this; + } + + // Skip SIZE bytes at the cursor. If doing so would advance us off + // the end of our buffer, clear this cursor's complete_ flag, and + // set *POINTER to NULL. Return a reference to this cursor. + ByteCursor &Skip(size_t size) { + if (CheckAvailable(size)) + here_ += size; + return *this; + } + + private: + // If there are at least SIZE bytes available to read from the buffer, + // return true. Otherwise, set here_ to the end of the buffer, set + // complete_ to false, and return false. + bool CheckAvailable(size_t size) { + if (Available() >= size) { + return true; + } else { + here_ = buffer_->end; + complete_ = false; + return false; + } + } + + // The buffer we're reading bytes from. + const ByteBuffer *buffer_; + + // The next byte within buffer_ that we'll read. + const uint8_t *here_; + + // True if we should read numbers in big-endian form; false if we + // should read in little-endian form. + bool big_endian_; + + // True if we've been able to read all we've been asked to. + bool complete_; +}; + +} // namespace google_breakpad + +#endif // COMMON_BYTE_CURSOR_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/byte_cursor_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/common/byte_cursor_unittest.cc new file mode 100644 index 0000000000..06bfd89d73 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/byte_cursor_unittest.cc @@ -0,0 +1,776 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// byte_cursor_unittest.cc: Unit tests for google_breakpad::ByteBuffer +// and google_breakpad::ByteCursor. + +#include + +#include + +#include "breakpad_googletest_includes.h" +#include "common/byte_cursor.h" +#include "common/using_std_string.h" + +using google_breakpad::ByteBuffer; +using google_breakpad::ByteCursor; + +TEST(Buffer, SizeOfNothing) { + uint8_t data[1]; + ByteBuffer buffer(data, 0); + EXPECT_EQ(0U, buffer.Size()); +} + +TEST(Buffer, SizeOfSomething) { + uint8_t data[10]; + ByteBuffer buffer(data, sizeof(data)); + EXPECT_EQ(10U, buffer.Size()); +} + +TEST(Extent, AvailableEmpty) { + uint8_t data[1]; + ByteBuffer buffer(data, 0); + ByteCursor cursor(&buffer); + EXPECT_EQ(0U, cursor.Available()); +} + +TEST(Extent, AtEndEmpty) { + uint8_t data[1]; + ByteBuffer buffer(data, 0); + ByteCursor cursor(&buffer); + EXPECT_TRUE(cursor.AtEnd()); +} + +TEST(Extent, AsBoolEmpty) { + uint8_t data[1]; + ByteBuffer buffer(data, 0); + ByteCursor cursor(&buffer); + EXPECT_TRUE(cursor); +} + +TEST(Extent, AvailableSome) { + uint8_t data[10]; + ByteBuffer buffer(data, sizeof(data)); + ByteCursor cursor(&buffer); + EXPECT_EQ(10U, cursor.Available()); +} + +TEST(Extent, AtEndSome) { + uint8_t data[10]; + ByteBuffer buffer(data, sizeof(data)); + ByteCursor cursor(&buffer); + EXPECT_FALSE(cursor.AtEnd()); + EXPECT_TRUE(cursor.Skip(sizeof(data)).AtEnd()); +} + +TEST(Extent, AsBoolSome) { + uint8_t data[10]; + ByteBuffer buffer(data, sizeof(data)); + ByteCursor cursor(&buffer); + EXPECT_TRUE(cursor); + EXPECT_TRUE(cursor.Skip(sizeof(data))); + EXPECT_FALSE(cursor.Skip(1)); +} + +TEST(Extent, Cursor) { + uint8_t data[] = { 0xf7, + 0x9f, 0xbe, + 0x67, 0xfb, 0xd3, 0x58, + 0x6f, 0x36, 0xde, 0xd1, + 0x2a, 0x2a, 0x2a }; + ByteBuffer buffer(data, sizeof(data)); + ByteCursor cursor(&buffer); + + uint8_t a; + uint16_t b; + uint32_t c; + uint32_t d; + uint8_t stars[3]; + + EXPECT_EQ(data + 0U, cursor.here()); + + EXPECT_TRUE(cursor >> a); + EXPECT_EQ(data + 1U, cursor.here()); + + EXPECT_TRUE(cursor >> b); + EXPECT_EQ(data + 3U, cursor.here()); + + EXPECT_TRUE(cursor >> c); + EXPECT_EQ(data + 7U, cursor.here()); + + EXPECT_TRUE(cursor.Skip(4)); + EXPECT_EQ(data + 11U, cursor.here()); + + EXPECT_TRUE(cursor.Read(stars, 3)); + EXPECT_EQ(data + 14U, cursor.here()); + + EXPECT_FALSE(cursor >> d); + EXPECT_EQ(data + 14U, cursor.here()); +} + +TEST(Extent, SetOffset) { + uint8_t data[] = { 0x5c, 0x79, 0x8c, 0xd5 }; + ByteBuffer buffer(data, sizeof(data)); + ByteCursor cursor(&buffer); + + uint8_t a, b, c, d, e; + EXPECT_TRUE(cursor >> a); + EXPECT_EQ(0x5cU, a); + EXPECT_EQ(data + 1U, cursor.here()); + EXPECT_TRUE(((cursor >> b).set_here(data + 3) >> c).set_here(data + 1) + >> d >> e); + EXPECT_EQ(0x79U, b); + EXPECT_EQ(0xd5U, c); + EXPECT_EQ(0x79U, d); + EXPECT_EQ(0x8cU, e); + EXPECT_EQ(data + 3U, cursor.here()); +} + +TEST(BigEndian, Signed1) { + uint8_t data[] = { 0x00, 0x7f, 0x80, 0xff }; + ByteBuffer buffer(data, sizeof(data)); + ByteCursor cursor(&buffer); + cursor.set_big_endian(true); + int a, b, c, d, e; + ASSERT_TRUE(cursor + .Read(1, true, &a) + .Read(1, true, &b) + .Read(1, true, &c) + .Read(1, true, &d)); + EXPECT_EQ(0, a); + EXPECT_EQ(0x7f, b); + EXPECT_EQ(-0x80, c); + EXPECT_EQ(-1, d); + EXPECT_TRUE(cursor.AtEnd()); + EXPECT_FALSE(cursor.Read(1, true, &e)); +} + +TEST(BigEndian, Signed2) { + uint8_t data[] = { 0x00, 0x00, 0x00, 0x80, 0x7f, 0xff, + 0x80, 0x00, 0x80, 0x80, 0xff, 0xff, + 0x39, 0xf1, 0x8a, 0xbc, 0x5a, 0xec }; + ByteBuffer buffer(data, sizeof(data)); + ByteCursor cursor(&buffer, true); + int a, b, c, d, e, f, g, h, i, j; + ASSERT_TRUE(cursor + .Read(2, true, &a) + .Read(2, true, &b) + .Read(2, true, &c) + .Read(2, true, &d) + .Read(2, true, &e) + .Read(2, true, &f) + .Read(2, true, &g) + .Read(2, true, &h) + .Read(2, true, &i)); + EXPECT_EQ(0, a); + EXPECT_EQ(0x80, b); + EXPECT_EQ(0x7fff, c); + EXPECT_EQ(-0x8000, d); + EXPECT_EQ(-0x7f80, e); + EXPECT_EQ(-1, f); + EXPECT_EQ(0x39f1, g); + EXPECT_EQ(-0x7544, h); + EXPECT_EQ(0x5aec, i); + EXPECT_TRUE(cursor.AtEnd()); + EXPECT_FALSE(cursor.Read(2, true, &j)); +} + +TEST(BigEndian, Signed4) { + uint8_t data[] = { 0x00, 0x00, 0x00, 0x00, + 0x7f, 0xff, 0xff, 0xff, + 0x80, 0x00, 0x00, 0x00, + 0xff, 0xff, 0xff, 0xff, + 0xb6, 0xb1, 0xff, 0xef, + 0x19, 0x6a, 0xca, 0x46 }; + ByteBuffer buffer(data, sizeof(data)); + ByteCursor cursor(&buffer); + cursor.set_big_endian(true); + int64_t a, b, c, d, e, f, g; + ASSERT_TRUE(cursor + .Read(4, true, &a) + .Read(4, true, &b) + .Read(4, true, &c) + .Read(4, true, &d) + .Read(4, true, &e) + .Read(4, true, &f)); + EXPECT_EQ(0, a); + EXPECT_EQ(0x7fffffff, b); + EXPECT_EQ(-0x80000000LL, c); + EXPECT_EQ(-1, d); + EXPECT_EQ((int32_t) 0xb6b1ffef, e); + EXPECT_EQ(0x196aca46, f); + EXPECT_TRUE(cursor.AtEnd()); + EXPECT_FALSE(cursor.Read(4, true, &g)); +} + +TEST(BigEndian, Signed8) { + uint8_t data[] = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0x93, 0x20, 0xd5, 0xe9, 0xd2, 0xd5, 0x87, 0x9c, + 0x4e, 0x42, 0x49, 0xd2, 0x7f, 0x84, 0x14, 0xa4 }; + ByteBuffer buffer(data, sizeof(data)); + ByteCursor cursor(&buffer, true); + int64_t a, b, c, d, e, f, g; + ASSERT_TRUE(cursor + .Read(8, true, &a) + .Read(8, true, &b) + .Read(8, true, &c) + .Read(8, true, &d) + .Read(8, true, &e) + .Read(8, true, &f)); + EXPECT_EQ(0, a); + EXPECT_EQ(0x7fffffffffffffffLL, b); + EXPECT_EQ(-0x7fffffffffffffffLL - 1, c); + EXPECT_EQ(-1, d); + EXPECT_EQ((int64_t) 0x9320d5e9d2d5879cULL, e); + EXPECT_EQ(0x4e4249d27f8414a4LL, f); + EXPECT_TRUE(cursor.AtEnd()); + EXPECT_FALSE(cursor.Read(8, true, &g)); +} + +TEST(BigEndian, Unsigned1) { + uint8_t data[] = { 0x00, 0x7f, 0x80, 0xff }; + ByteBuffer buffer(data, sizeof(data)); + ByteCursor cursor(&buffer); + cursor.set_big_endian(true); + int32_t a, b, c, d, e; + ASSERT_TRUE(cursor + .Read(1, false, &a) + .Read(1, false, &b) + .Read(1, false, &c) + .Read(1, false, &d)); + EXPECT_EQ(0, a); + EXPECT_EQ(0x7f, b); + EXPECT_EQ(0x80, c); + EXPECT_EQ(0xff, d); + EXPECT_TRUE(cursor.AtEnd()); + EXPECT_FALSE(cursor.Read(1, false, &e)); +} + +TEST(BigEndian, Unsigned2) { + uint8_t data[] = { 0x00, 0x00, 0x00, 0x80, 0x7f, 0xff, + 0x80, 0x00, 0x80, 0x80, 0xff, 0xff, + 0x39, 0xf1, 0x8a, 0xbc, 0x5a, 0xec }; + ByteBuffer buffer(data, sizeof(data)); + ByteCursor cursor(&buffer, true); + int64_t a, b, c, d, e, f, g, h, i, j; + ASSERT_TRUE(cursor + .Read(2, false, &a) + .Read(2, false, &b) + .Read(2, false, &c) + .Read(2, false, &d) + .Read(2, false, &e) + .Read(2, false, &f) + .Read(2, false, &g) + .Read(2, false, &h) + .Read(2, false, &i)); + EXPECT_EQ(0, a); + EXPECT_EQ(0x80, b); + EXPECT_EQ(0x7fff, c); + EXPECT_EQ(0x8000, d); + EXPECT_EQ(0x8080, e); + EXPECT_EQ(0xffff, f); + EXPECT_EQ(0x39f1, g); + EXPECT_EQ(0x8abc, h); + EXPECT_EQ(0x5aec, i); + EXPECT_TRUE(cursor.AtEnd()); + EXPECT_FALSE(cursor.Read(2, false, &j)); +} + +TEST(BigEndian, Unsigned4) { + uint8_t data[] = { 0x00, 0x00, 0x00, 0x00, + 0x7f, 0xff, 0xff, 0xff, + 0x80, 0x00, 0x00, 0x00, + 0xff, 0xff, 0xff, 0xff, + 0xb6, 0xb1, 0xff, 0xef, + 0x19, 0x6a, 0xca, 0x46 }; + ByteBuffer buffer(data, sizeof(data)); + ByteCursor cursor(&buffer); + cursor.set_big_endian(true); + int64_t a, b, c, d, e, f, g; + ASSERT_TRUE(cursor + .Read(4, false, &a) + .Read(4, false, &b) + .Read(4, false, &c) + .Read(4, false, &d) + .Read(4, false, &e) + .Read(4, false, &f)); + EXPECT_EQ(0, a); + EXPECT_EQ(0x7fffffff, b); + EXPECT_EQ(0x80000000, c); + EXPECT_EQ(0xffffffff, d); + EXPECT_EQ(0xb6b1ffef, e); + EXPECT_EQ(0x196aca46, f); + EXPECT_TRUE(cursor.AtEnd()); + EXPECT_FALSE(cursor.Read(4, false, &g)); +} + +TEST(BigEndian, Unsigned8) { + uint8_t data[] = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0x93, 0x20, 0xd5, 0xe9, 0xd2, 0xd5, 0x87, 0x9c, + 0x4e, 0x42, 0x49, 0xd2, 0x7f, 0x84, 0x14, 0xa4 }; + ByteBuffer buffer(data, sizeof(data)); + ByteCursor cursor(&buffer, true); + uint64_t a, b, c, d, e, f, g; + ASSERT_TRUE(cursor + .Read(8, false, &a) + .Read(8, false, &b) + .Read(8, false, &c) + .Read(8, false, &d) + .Read(8, false, &e) + .Read(8, false, &f)); + EXPECT_EQ(0U, a); + EXPECT_EQ(0x7fffffffffffffffULL, b); + EXPECT_EQ(0x8000000000000000ULL, c); + EXPECT_EQ(0xffffffffffffffffULL, d); + EXPECT_EQ(0x9320d5e9d2d5879cULL, e); + EXPECT_EQ(0x4e4249d27f8414a4ULL, f); + EXPECT_TRUE(cursor.AtEnd()); + EXPECT_FALSE(cursor.Read(8, false, &g)); +} + +TEST(LittleEndian, Signed1) { + uint8_t data[] = { 0x00, 0x7f, 0x80, 0xff }; + ByteBuffer buffer(data, sizeof(data)); + ByteCursor cursor(&buffer); + int32_t a, b, c, d, e; + ASSERT_TRUE(cursor + .Read(1, true, &a) + .Read(1, true, &b) + .Read(1, true, &c) + .Read(1, true, &d)); + EXPECT_EQ(0, a); + EXPECT_EQ(0x7f, b); + EXPECT_EQ(-0x80, c); + EXPECT_EQ(-1, d); + EXPECT_TRUE(cursor.AtEnd()); + EXPECT_FALSE(cursor.Read(1, true, &e)); +} + +TEST(LittleEndian, Signed2) { + uint8_t data[] = { 0x00, 0x00, 0x80, 0x00, 0xff, 0x7f, + 0x00, 0x80, 0x80, 0x80, 0xff, 0xff, + 0xf1, 0x39, 0xbc, 0x8a, 0xec, 0x5a }; + ByteBuffer buffer(data, sizeof(data)); + ByteCursor cursor(&buffer, false); + int32_t a, b, c, d, e, f, g, h, i, j; + ASSERT_TRUE(cursor + .Read(2, true, &a) + .Read(2, true, &b) + .Read(2, true, &c) + .Read(2, true, &d) + .Read(2, true, &e) + .Read(2, true, &f) + .Read(2, true, &g) + .Read(2, true, &h) + .Read(2, true, &i)); + EXPECT_EQ(0, a); + EXPECT_EQ(0x80, b); + EXPECT_EQ(0x7fff, c); + EXPECT_EQ(-0x8000, d); + EXPECT_EQ(-0x7f80, e); + EXPECT_EQ(-1, f); + EXPECT_EQ(0x39f1, g); + EXPECT_EQ(-0x7544, h); + EXPECT_EQ(0x5aec, i); + EXPECT_TRUE(cursor.AtEnd()); + EXPECT_FALSE(cursor.Read(2, true, &j)); +} + +TEST(LittleEndian, Signed4) { + uint8_t data[] = { 0x00, 0x00, 0x00, 0x00, + 0xff, 0xff, 0xff, 0x7f, + 0x00, 0x00, 0x00, 0x80, + 0xff, 0xff, 0xff, 0xff, + 0xef, 0xff, 0xb1, 0xb6, + 0x46, 0xca, 0x6a, 0x19 }; + ByteBuffer buffer(data, sizeof(data)); + ByteCursor cursor(&buffer); + int64_t a, b, c, d, e, f, g; + ASSERT_TRUE(cursor + .Read(4, true, &a) + .Read(4, true, &b) + .Read(4, true, &c) + .Read(4, true, &d) + .Read(4, true, &e) + .Read(4, true, &f)); + EXPECT_EQ(0, a); + EXPECT_EQ(0x7fffffff, b); + EXPECT_EQ(-0x80000000LL, c); + EXPECT_EQ(-1, d); + EXPECT_EQ((int32_t) 0xb6b1ffef, e); + EXPECT_EQ(0x196aca46, f); + EXPECT_TRUE(cursor.AtEnd()); + EXPECT_FALSE(cursor.Read(4, true, &g)); +} + +TEST(LittleEndian, Signed8) { + uint8_t data[] = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0x9c, 0x87, 0xd5, 0xd2, 0xe9, 0xd5, 0x20, 0x93, + 0xa4, 0x14, 0x84, 0x7f, 0xd2, 0x49, 0x42, 0x4e }; + ByteBuffer buffer(data, sizeof(data)); + ByteCursor cursor(&buffer, false); + int64_t a, b, c, d, e, f, g; + ASSERT_TRUE(cursor + .Read(8, true, &a) + .Read(8, true, &b) + .Read(8, true, &c) + .Read(8, true, &d) + .Read(8, true, &e) + .Read(8, true, &f)); + EXPECT_EQ(0, a); + EXPECT_EQ(0x7fffffffffffffffLL, b); + EXPECT_EQ(-0x7fffffffffffffffLL - 1, c); + EXPECT_EQ(-1, d); + EXPECT_EQ((int64_t) 0x9320d5e9d2d5879cULL, e); + EXPECT_EQ(0x4e4249d27f8414a4LL, f); + EXPECT_TRUE(cursor.AtEnd()); + EXPECT_FALSE(cursor.Read(8, true, &g)); +} + +TEST(LittleEndian, Unsigned1) { + uint8_t data[] = { 0x00, 0x7f, 0x80, 0xff }; + ByteBuffer buffer(data, sizeof(data)); + ByteCursor cursor(&buffer); + int32_t a, b, c, d, e; + ASSERT_TRUE(cursor + .Read(1, false, &a) + .Read(1, false, &b) + .Read(1, false, &c) + .Read(1, false, &d)); + EXPECT_EQ(0, a); + EXPECT_EQ(0x7f, b); + EXPECT_EQ(0x80, c); + EXPECT_EQ(0xff, d); + EXPECT_TRUE(cursor.AtEnd()); + EXPECT_FALSE(cursor.Read(1, false, &e)); +} + +TEST(LittleEndian, Unsigned2) { + uint8_t data[] = { 0x00, 0x00, 0x80, 0x00, 0xff, 0x7f, + 0x00, 0x80, 0x80, 0x80, 0xff, 0xff, + 0xf1, 0x39, 0xbc, 0x8a, 0xec, 0x5a }; + ByteBuffer buffer(data, sizeof(data)); + ByteCursor cursor(&buffer); + int32_t a, b, c, d, e, f, g, h, i, j; + ASSERT_TRUE(cursor + .Read(2, false, &a) + .Read(2, false, &b) + .Read(2, false, &c) + .Read(2, false, &d) + .Read(2, false, &e) + .Read(2, false, &f) + .Read(2, false, &g) + .Read(2, false, &h) + .Read(2, false, &i)); + EXPECT_EQ(0, a); + EXPECT_EQ(0x80, b); + EXPECT_EQ(0x7fff, c); + EXPECT_EQ(0x8000, d); + EXPECT_EQ(0x8080, e); + EXPECT_EQ(0xffff, f); + EXPECT_EQ(0x39f1, g); + EXPECT_EQ(0x8abc, h); + EXPECT_EQ(0x5aec, i); + EXPECT_TRUE(cursor.AtEnd()); + EXPECT_FALSE(cursor.Read(2, false, &j)); +} + +TEST(LittleEndian, Unsigned4) { + uint8_t data[] = { 0x00, 0x00, 0x00, 0x00, + 0xff, 0xff, 0xff, 0x7f, + 0x00, 0x00, 0x00, 0x80, + 0xff, 0xff, 0xff, 0xff, + 0xef, 0xff, 0xb1, 0xb6, + 0x46, 0xca, 0x6a, 0x19 }; + ByteBuffer buffer(data, sizeof(data)); + ByteCursor cursor(&buffer); + int64_t a, b, c, d, e, f, g; + ASSERT_TRUE(cursor + .Read(4, false, &a) + .Read(4, false, &b) + .Read(4, false, &c) + .Read(4, false, &d) + .Read(4, false, &e) + .Read(4, false, &f)); + EXPECT_EQ(0, a); + EXPECT_EQ(0x7fffffff, b); + EXPECT_EQ(0x80000000, c); + EXPECT_EQ(0xffffffff, d); + EXPECT_EQ(0xb6b1ffef, e); + EXPECT_EQ(0x196aca46, f); + EXPECT_TRUE(cursor.AtEnd()); + EXPECT_FALSE(cursor.Read(4, false, &g)); +} + +TEST(LittleEndian, Unsigned8) { + uint8_t data[] = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0x9c, 0x87, 0xd5, 0xd2, 0xe9, 0xd5, 0x20, 0x93, + 0xa4, 0x14, 0x84, 0x7f, 0xd2, 0x49, 0x42, 0x4e }; + ByteBuffer buffer(data, sizeof(data)); + ByteCursor cursor(&buffer); + uint64_t a, b, c, d, e, f, g; + ASSERT_TRUE(cursor + .Read(8, false, &a) + .Read(8, false, &b) + .Read(8, false, &c) + .Read(8, false, &d) + .Read(8, false, &e) + .Read(8, false, &f)); + EXPECT_EQ(0U, a); + EXPECT_EQ(0x7fffffffffffffffULL, b); + EXPECT_EQ(0x8000000000000000ULL, c); + EXPECT_EQ(0xffffffffffffffffULL, d); + EXPECT_EQ(0x9320d5e9d2d5879cULL, e); + EXPECT_EQ(0x4e4249d27f8414a4ULL, f); + EXPECT_TRUE(cursor.AtEnd()); + EXPECT_FALSE(cursor.Read(8, false, &g)); +} + +TEST(Extractor, Signed1) { + uint8_t data[] = { 0xfd }; + ByteBuffer buffer(data, sizeof(data)); + ByteCursor cursor(&buffer); + int8_t a; + EXPECT_TRUE(cursor >> a); + EXPECT_EQ(-3, a); + EXPECT_FALSE(cursor >> a); +} + +TEST(Extractor, Signed2) { + uint8_t data[] = { 0x13, 0xcd }; + ByteBuffer buffer(data, sizeof(data)); + ByteCursor cursor(&buffer); + int16_t a; + EXPECT_TRUE(cursor >> a); + EXPECT_EQ(-13037, a); + EXPECT_FALSE(cursor >> a); +} + +TEST(Extractor, Signed4) { + uint8_t data[] = { 0xd2, 0xe4, 0x53, 0xe9 }; + ByteBuffer buffer(data, sizeof(data)); + ByteCursor cursor(&buffer); + int32_t a; + // For some reason, G++ 4.4.1 complains: + // warning: array subscript is above array bounds + // in ByteCursor::Read(size_t, bool, T *) as it inlines this call, but + // I'm not able to see how such a reference would occur. + EXPECT_TRUE(cursor >> a); + EXPECT_EQ(-380377902, a); + EXPECT_FALSE(cursor >> a); +} + +TEST(Extractor, Unsigned1) { + uint8_t data[] = { 0xfd }; + ByteBuffer buffer(data, sizeof(data)); + ByteCursor cursor(&buffer); + uint8_t a; + EXPECT_TRUE(cursor >> a); + EXPECT_EQ(0xfd, a); + EXPECT_FALSE(cursor >> a); +} + +TEST(Extractor, Unsigned2) { + uint8_t data[] = { 0x13, 0xcd }; + ByteBuffer buffer(data, sizeof(data)); + ByteCursor cursor(&buffer); + uint16_t a; + EXPECT_TRUE(cursor >> a); + EXPECT_EQ(0xcd13, a); + EXPECT_FALSE(cursor >> a); +} + +TEST(Extractor, Unsigned4) { + uint8_t data[] = { 0xd2, 0xe4, 0x53, 0xe9 }; + ByteBuffer buffer(data, sizeof(data)); + ByteCursor cursor(&buffer); + uint32_t a; + // For some reason, G++ 4.4.1 complains: + // warning: array subscript is above array bounds + // in ByteCursor::Read(size_t, bool, T *) as it inlines this call, but + // I'm not able to see how such a reference would occur. + EXPECT_TRUE(cursor >> a); + EXPECT_EQ(0xe953e4d2, a); + EXPECT_FALSE(cursor >> a); + EXPECT_FALSE(cursor >> a); +} + +TEST(Extractor, Mixed) { + uint8_t data[] = { 0x42, + 0x25, 0x0b, + 0x3d, 0x25, 0xed, 0x2a, + 0xec, 0x16, 0x9e, 0x14, 0x61, 0x5b, 0x2c, 0xcf, + 0xd8, + 0x22, 0xa5, + 0x3a, 0x02, 0x6a, 0xd7, + 0x93, 0x2a, 0x2d, 0x8d, 0xb4, 0x95, 0xe0, 0xc6 }; + ByteBuffer buffer(data, sizeof(data)); + ByteCursor cursor(&buffer); + cursor.set_big_endian(true); + + uint8_t a; + uint16_t b; + uint32_t c; + uint64_t d; + int8_t e; + int16_t f; + int32_t g; + int64_t h; + int z; + EXPECT_FALSE(cursor.AtEnd()); + EXPECT_TRUE(cursor >> a >> b >> c >> d >> e >> f >> g >> h); + EXPECT_EQ(0x42U, a); + EXPECT_EQ(0x250bU, b); + EXPECT_EQ(0x3d25ed2aU, c); + EXPECT_EQ(0xec169e14615b2ccfULL, d); + EXPECT_EQ(-40, e); + EXPECT_EQ(0x22a5, f); + EXPECT_EQ(0x3a026ad7, g); + EXPECT_EQ(-7842405714468937530LL, h); + + EXPECT_TRUE(cursor.AtEnd()); + EXPECT_FALSE(cursor >> z); +} + +TEST(Strings, Zero) { + uint8_t data[] = { 0xa6 }; + ByteBuffer buffer(data, 0); + ByteCursor cursor(&buffer); + + uint8_t received[1]; + received[0] = 0xc2; + EXPECT_TRUE(cursor.Read(received, 0)); + EXPECT_EQ(0xc2U, received[0]); +} + +TEST(Strings, Some) { + uint8_t data[] = { 0x5d, 0x31, 0x09, 0xa6, 0x2e, 0x2c, 0x83, 0xbb }; + ByteBuffer buffer(data, sizeof(data)); + ByteCursor cursor(&buffer); + + uint8_t received[7] = { 0xa7, 0xf7, 0x43, 0x0c, 0x27, 0xea, 0xed }; + EXPECT_TRUE(cursor.Skip(2).Read(received, 5)); + uint8_t expected[7] = { 0x09, 0xa6, 0x2e, 0x2c, 0x83, 0xea, 0xed }; + EXPECT_TRUE(memcmp(received, expected, 7) == 0); +} + +TEST(Strings, TooMuch) { + uint8_t data[] = { 0x5d, 0x31, 0x09, 0xa6, 0x2e, 0x2c, 0x83, 0xbb }; + ByteBuffer buffer(data, sizeof(data)); + ByteCursor cursor(&buffer); + + uint8_t received1[3]; + uint8_t received2[3]; + uint8_t received3[3]; + EXPECT_FALSE(cursor + .Read(received1, 3) + .Read(received2, 3) + .Read(received3, 3)); + uint8_t expected1[3] = { 0x5d, 0x31, 0x09 }; + uint8_t expected2[3] = { 0xa6, 0x2e, 0x2c }; + + EXPECT_TRUE(memcmp(received1, expected1, 3) == 0); + EXPECT_TRUE(memcmp(received2, expected2, 3) == 0); +} + +TEST(Strings, PointTo) { + uint8_t data[] = { 0x83, 0x80, 0xb4, 0x38, 0x00, 0x2c, 0x0a, 0x27 }; + ByteBuffer buffer(data, sizeof(data)); + ByteCursor cursor(&buffer); + + const uint8_t *received1; + const uint8_t *received2; + const uint8_t *received3; + const uint8_t *received4; + EXPECT_FALSE(cursor + .PointTo(&received1, 3) + .PointTo(&received2, 3) + .PointTo(&received3) + .PointTo(&received4, 3)); + EXPECT_EQ(data + 0, received1); + EXPECT_EQ(data + 3, received2); + EXPECT_EQ(data + 6, received3); + EXPECT_EQ(NULL, received4); +} + +TEST(Strings, CString) { + uint8_t data[] = "abc\0\0foo"; + ByteBuffer buffer(data, sizeof(data) - 1); // don't include terminating '\0' + ByteCursor cursor(&buffer); + + string a, b, c; + EXPECT_TRUE(cursor.CString(&a).CString(&b)); + EXPECT_EQ("abc", a); + EXPECT_EQ("", b); + EXPECT_FALSE(cursor.CString(&c)); + EXPECT_EQ("", c); + EXPECT_TRUE(cursor.AtEnd()); +} + +TEST(Strings, CStringLimit) { + uint8_t data[] = "abcdef\0\0foobar"; + ByteBuffer buffer(data, sizeof(data) - 1); // don't include terminating '\0' + ByteCursor cursor(&buffer); + + string a, b, c, d, e; + + EXPECT_TRUE(cursor.CString(&a, 3)); + EXPECT_EQ("abc", a); + + EXPECT_TRUE(cursor.CString(&b, 0)); + EXPECT_EQ("", b); + + EXPECT_TRUE(cursor.CString(&c, 6)); + EXPECT_EQ("def", c); + + EXPECT_TRUE(cursor.CString(&d, 4)); + EXPECT_EQ("ooba", d); + + EXPECT_FALSE(cursor.CString(&e, 4)); + EXPECT_EQ("", e); + + EXPECT_TRUE(cursor.AtEnd()); +} + +// uint8_t data[] = { 0xa6, 0x54, 0xdf, 0x67, 0x51, 0x43, 0xac, 0xf1 }; +// ByteBuffer buffer(data, sizeof(data)); diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/common.gyp b/TMessagesProj/jni/third_party/breakpad/src/common/common.gyp new file mode 100644 index 0000000000..6ccd21a2f9 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/common.gyp @@ -0,0 +1,243 @@ +# Copyright 2014 Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +{ + 'target_defaults': { + 'target_conditions': [ + ['OS=="mac"', { + 'defines': ['HAVE_MACH_O_NLIST_H'], + }], + ['OS=="linux"', { + 'defines': ['HAVE_A_OUT_H'], + }], + ], + }, + 'targets': [ + { + 'target_name': 'common', + 'type': 'static_library', + 'sources': [ + 'android/breakpad_getcontext.S', + 'android/include/elf.h', + 'android/include/link.h', + 'android/include/sgidefs.h', + 'android/include/stab.h', + 'android/include/sys/procfs.h', + 'android/include/sys/signal.h', + 'android/include/sys/user.h', + 'android/include/ucontext.h', + 'android/testing/include/wchar.h', + 'android/testing/mkdtemp.h', + 'android/testing/pthread_fixes.h', + 'android/ucontext_constants.h', + 'basictypes.h', + 'byte_cursor.h', + 'convert_UTF.c', + 'convert_UTF.h', + 'dwarf/bytereader-inl.h', + 'dwarf/bytereader.cc', + 'dwarf/bytereader.h', + 'dwarf/cfi_assembler.cc', + 'dwarf/cfi_assembler.h', + 'dwarf/dwarf2diehandler.cc', + 'dwarf/dwarf2diehandler.h', + 'dwarf/dwarf2enums.h', + 'dwarf/dwarf2reader.cc', + 'dwarf/dwarf2reader.h', + 'dwarf/dwarf2reader_test_common.h', + 'dwarf/functioninfo.cc', + 'dwarf/functioninfo.h', + 'dwarf/line_state_machine.h', + 'dwarf/types.h', + 'dwarf_cfi_to_module.cc', + 'dwarf_cfi_to_module.h', + 'dwarf_cu_to_module.cc', + 'dwarf_cu_to_module.h', + 'dwarf_line_to_module.cc', + 'dwarf_line_to_module.h', + 'language.cc', + 'language.h', + 'linux/crc32.cc', + 'linux/crc32.h', + 'linux/dump_symbols.cc', + 'linux/dump_symbols.h', + 'linux/eintr_wrapper.h', + 'linux/elf_core_dump.cc', + 'linux/elf_core_dump.h', + 'linux/elf_gnu_compat.h', + 'linux/elf_symbols_to_module.cc', + 'linux/elf_symbols_to_module.h', + 'linux/elfutils-inl.h', + 'linux/elfutils.cc', + 'linux/elfutils.h', + 'linux/file_id.cc', + 'linux/file_id.h', + 'linux/google_crashdump_uploader.cc', + 'linux/google_crashdump_uploader.h', + 'linux/guid_creator.cc', + 'linux/guid_creator.h', + 'linux/http_upload.cc', + 'linux/http_upload.h', + 'linux/ignore_ret.h', + 'linux/libcurl_wrapper.cc', + 'linux/libcurl_wrapper.h', + 'linux/linux_libc_support.cc', + 'linux/linux_libc_support.h', + 'linux/memory_mapped_file.cc', + 'linux/memory_mapped_file.h', + 'linux/safe_readlink.cc', + 'linux/safe_readlink.h', + 'linux/synth_elf.cc', + 'linux/synth_elf.h', + 'mac/arch_utilities.cc', + 'mac/arch_utilities.h', + 'mac/bootstrap_compat.cc', + 'mac/bootstrap_compat.h', + 'mac/byteswap.h', + 'mac/dump_syms.h', + 'mac/dump_syms.mm', + 'mac/file_id.cc', + 'mac/file_id.h', + 'mac/GTMDefines.h', + 'mac/GTMLogger.h', + 'mac/GTMLogger.m', + 'mac/HTTPMultipartUpload.h', + 'mac/HTTPMultipartUpload.m', + 'mac/MachIPC.h', + 'mac/MachIPC.mm', + 'mac/macho_id.cc', + 'mac/macho_id.h', + 'mac/macho_reader.cc', + 'mac/macho_reader.h', + 'mac/macho_utilities.cc', + 'mac/macho_utilities.h', + 'mac/macho_walker.cc', + 'mac/macho_walker.h', + 'mac/scoped_task_suspend-inl.h', + 'mac/string_utilities.cc', + 'mac/string_utilities.h', + 'mac/super_fat_arch.h', + 'md5.cc', + 'md5.h', + 'memory.h', + 'memory_range.h', + 'module.cc', + 'module.h', + 'scoped_ptr.h', + 'simple_string_dictionary.cc', + 'simple_string_dictionary.h', + 'solaris/dump_symbols.cc', + 'solaris/dump_symbols.h', + 'solaris/file_id.cc', + 'solaris/file_id.h', + 'solaris/guid_creator.cc', + 'solaris/guid_creator.h', + 'solaris/message_output.h', + 'stabs_reader.cc', + 'stabs_reader.h', + 'stabs_to_module.cc', + 'stabs_to_module.h', + 'string_conversion.cc', + 'string_conversion.h', + 'symbol_data.h', + 'test_assembler.cc', + 'test_assembler.h', + 'unordered.h', + 'using_std_string.h', + 'windows/common_windows.gyp', + 'windows/dia_util.cc', + 'windows/dia_util.h', + 'windows/guid_string.cc', + 'windows/guid_string.h', + 'windows/http_upload.cc', + 'windows/http_upload.h', + 'windows/omap.cc', + 'windows/omap.h', + 'windows/omap_internal.h', + 'windows/pdb_source_line_writer.cc', + 'windows/pdb_source_line_writer.h', + 'windows/string_utils-inl.h', + 'windows/string_utils.cc', + ], + 'include_dirs': [ + '..', + ], + }, + { + 'target_name': 'common_unittests', + 'type': 'executable', + 'sources': [ + 'android/breakpad_getcontext_unittest.cc', + 'byte_cursor_unittest.cc', + 'dwarf/bytereader_unittest.cc', + 'dwarf/dwarf2diehandler_unittest.cc', + 'dwarf/dwarf2reader_cfi_unittest.cc', + 'dwarf/dwarf2reader_die_unittest.cc', + 'dwarf_cfi_to_module_unittest.cc', + 'dwarf_cu_to_module_unittest.cc', + 'dwarf_line_to_module_unittest.cc', + 'linux/dump_symbols_unittest.cc', + 'linux/elf_core_dump_unittest.cc', + 'linux/elf_symbols_to_module_unittest.cc', + 'linux/file_id_unittest.cc', + 'linux/google_crashdump_uploader_test.cc', + 'linux/linux_libc_support_unittest.cc', + 'linux/memory_mapped_file_unittest.cc', + 'linux/safe_readlink_unittest.cc', + 'linux/synth_elf_unittest.cc', + 'linux/tests/auto_testfile.h', + 'linux/tests/crash_generator.cc', + 'linux/tests/crash_generator.h', + 'mac/macho_reader_unittest.cc', + 'memory_range_unittest.cc', + 'memory_unittest.cc', + 'module_unittest.cc', + 'simple_string_dictionary_unittest.cc', + 'stabs_reader_unittest.cc', + 'stabs_to_module_unittest.cc', + 'test_assembler_unittest.cc', + 'tests/auto_tempdir.h', + 'tests/file_utils.cc', + 'tests/file_utils.h', + 'windows/omap_unittest.cc', + ], + 'include_dirs': [ + '..', + ], + 'dependencies': [ + 'common', + '../build/testing.gypi:gmock_main', + '../build/testing.gypi:gmock', + '../build/testing.gypi:gtest', + ], + 'libraries': [ + '-ldl', + ], + }, + ], +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/convert_UTF.c b/TMessagesProj/jni/third_party/breakpad/src/common/convert_UTF.c new file mode 100644 index 0000000000..12a3c89174 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/convert_UTF.c @@ -0,0 +1,554 @@ +/* + * Copyright © 1991-2015 Unicode, Inc. All rights reserved. + * Distributed under the Terms of Use in + * http://www.unicode.org/copyright.html. + * + * Permission is hereby granted, free of charge, to any person obtaining + * a copy of the Unicode data files and any associated documentation + * (the "Data Files") or Unicode software and any associated documentation + * (the "Software") to deal in the Data Files or Software + * without restriction, including without limitation the rights to use, + * copy, modify, merge, publish, distribute, and/or sell copies of + * the Data Files or Software, and to permit persons to whom the Data Files + * or Software are furnished to do so, provided that + * (a) this copyright and permission notice appear with all copies + * of the Data Files or Software, + * (b) this copyright and permission notice appear in associated + * documentation, and + * (c) there is clear notice in each modified Data File or in the Software + * as well as in the documentation associated with the Data File(s) or + * Software that the data or software has been modified. + * + * THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF + * ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE + * WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT OF THIRD PARTY RIGHTS. + * IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS + * NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL + * DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, + * DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER + * TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR + * PERFORMANCE OF THE DATA FILES OR SOFTWARE. + * + * Except as contained in this notice, the name of a copyright holder + * shall not be used in advertising or otherwise to promote the sale, + * use or other dealings in these Data Files or Software without prior + * written authorization of the copyright holder. + */ + +/* --------------------------------------------------------------------- + +Conversions between UTF32, UTF-16, and UTF-8. Source code file. +Author: Mark E. Davis, 1994. +Rev History: Rick McGowan, fixes & updates May 2001. +Sept 2001: fixed const & error conditions per +mods suggested by S. Parent & A. Lillich. +June 2002: Tim Dodd added detection and handling of incomplete +source sequences, enhanced error detection, added casts +to eliminate compiler warnings. +July 2003: slight mods to back out aggressive FFFE detection. +Jan 2004: updated switches in from-UTF8 conversions. +Oct 2004: updated to use UNI_MAX_LEGAL_UTF32 in UTF-32 conversions. + +See the header file "ConvertUTF.h" for complete documentation. + +------------------------------------------------------------------------ */ + + +#include "convert_UTF.h" +#ifdef CVTUTF_DEBUG +#include +#endif + +static const int halfShift = 10; /* used for shifting by 10 bits */ + +static const UTF32 halfBase = 0x0010000UL; +static const UTF32 halfMask = 0x3FFUL; + +#define UNI_SUR_HIGH_START (UTF32)0xD800 +#define UNI_SUR_HIGH_END (UTF32)0xDBFF +#define UNI_SUR_LOW_START (UTF32)0xDC00 +#define UNI_SUR_LOW_END (UTF32)0xDFFF + +#ifndef false +#define false 0 +#endif +#ifndef true +#define true 1 +#endif + +/* --------------------------------------------------------------------- */ + +ConversionResult ConvertUTF32toUTF16 (const UTF32** sourceStart, const UTF32* sourceEnd, + UTF16** targetStart, UTF16* targetEnd, ConversionFlags flags) { + ConversionResult result = conversionOK; + const UTF32* source = *sourceStart; + UTF16* target = *targetStart; + while (source < sourceEnd) { + UTF32 ch; + if (target >= targetEnd) { + result = targetExhausted; break; + } + ch = *source++; + if (ch <= UNI_MAX_BMP) { /* Target is a character <= 0xFFFF */ + /* UTF-16 surrogate values are illegal in UTF-32; 0xffff or 0xfffe are both reserved values */ + if (ch >= UNI_SUR_HIGH_START && ch <= UNI_SUR_LOW_END) { + if (flags == strictConversion) { + --source; /* return to the illegal value itself */ + result = sourceIllegal; + break; + } else { + *target++ = UNI_REPLACEMENT_CHAR; + } + } else { + *target++ = (UTF16)ch; /* normal case */ + } + } else if (ch > UNI_MAX_LEGAL_UTF32) { + if (flags == strictConversion) { + result = sourceIllegal; + } else { + *target++ = UNI_REPLACEMENT_CHAR; + } + } else { + /* target is a character in range 0xFFFF - 0x10FFFF. */ + if (target + 1 >= targetEnd) { + --source; /* Back up source pointer! */ + result = targetExhausted; break; + } + ch -= halfBase; + *target++ = (UTF16)((ch >> halfShift) + UNI_SUR_HIGH_START); + *target++ = (UTF16)((ch & halfMask) + UNI_SUR_LOW_START); + } + } +*sourceStart = source; +*targetStart = target; +return result; +} + +/* --------------------------------------------------------------------- */ + +ConversionResult ConvertUTF16toUTF32 (const UTF16** sourceStart, const UTF16* sourceEnd, + UTF32** targetStart, UTF32* targetEnd, ConversionFlags flags) { + ConversionResult result = conversionOK; + const UTF16* source = *sourceStart; + UTF32* target = *targetStart; + UTF32 ch, ch2; + while (source < sourceEnd) { + const UTF16* oldSource = source; /* In case we have to back up because of target overflow. */ + ch = *source++; + /* If we have a surrogate pair, convert to UTF32 first. */ + if (ch >= UNI_SUR_HIGH_START && ch <= UNI_SUR_HIGH_END) { + /* If the 16 bits following the high surrogate are in the source buffer... */ + if (source < sourceEnd) { + ch2 = *source; + /* If it's a low surrogate, convert to UTF32. */ + if (ch2 >= UNI_SUR_LOW_START && ch2 <= UNI_SUR_LOW_END) { + ch = ((ch - UNI_SUR_HIGH_START) << halfShift) + + (ch2 - UNI_SUR_LOW_START) + halfBase; + ++source; + } else if (flags == strictConversion) { /* it's an unpaired high surrogate */ + --source; /* return to the illegal value itself */ + result = sourceIllegal; + break; + } + } else { /* We don't have the 16 bits following the high surrogate. */ + --source; /* return to the high surrogate */ + result = sourceExhausted; + break; + } + } else if (flags == strictConversion) { + /* UTF-16 surrogate values are illegal in UTF-32 */ + if (ch >= UNI_SUR_LOW_START && ch <= UNI_SUR_LOW_END) { + --source; /* return to the illegal value itself */ + result = sourceIllegal; + break; + } + } + if (target >= targetEnd) { + source = oldSource; /* Back up source pointer! */ + result = targetExhausted; break; + } + *target++ = ch; + } + *sourceStart = source; + *targetStart = target; +#ifdef CVTUTF_DEBUG + if (result == sourceIllegal) { + fprintf(stderr, "ConvertUTF16toUTF32 illegal seq 0x%04x,%04x\n", ch, ch2); + fflush(stderr); + } +#endif + return result; +} + +/* --------------------------------------------------------------------- */ + +/* + * Index into the table below with the first byte of a UTF-8 sequence to + * get the number of trailing bytes that are supposed to follow it. + * Note that *legal* UTF-8 values can't have 4 or 5-bytes. The table is + * left as-is for anyone who may want to do such conversion, which was + * allowed in earlier algorithms. + */ +static const char trailingBytesForUTF8[256] = { + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, + 2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2, 3,3,3,3,3,3,3,3,4,4,4,4,5,5,5,5 +}; + +/* + * Magic values subtracted from a buffer value during UTF8 conversion. + * This table contains as many values as there might be trailing bytes + * in a UTF-8 sequence. + */ +static const UTF32 offsetsFromUTF8[6] = { 0x00000000UL, 0x00003080UL, 0x000E2080UL, + 0x03C82080UL, 0xFA082080UL, 0x82082080UL }; + +/* + * Once the bits are split out into bytes of UTF-8, this is a mask OR-ed + * into the first byte, depending on how many bytes follow. There are + * as many entries in this table as there are UTF-8 sequence types. + * (I.e., one byte sequence, two byte... etc.). Remember that sequencs + * for *legal* UTF-8 will be 4 or fewer bytes total. + */ +static const UTF8 firstByteMark[7] = { 0x00, 0x00, 0xC0, 0xE0, 0xF0, 0xF8, 0xFC }; + +/* --------------------------------------------------------------------- */ + +/* The interface converts a whole buffer to avoid function-call overhead. +* Constants have been gathered. Loops & conditionals have been removed as +* much as possible for efficiency, in favor of drop-through switches. +* (See "Note A" at the bottom of the file for equivalent code.) +* If your compiler supports it, the "isLegalUTF8" call can be turned +* into an inline function. +*/ + +/* --------------------------------------------------------------------- */ + +ConversionResult ConvertUTF16toUTF8 (const UTF16** sourceStart, const UTF16* sourceEnd, + UTF8** targetStart, UTF8* targetEnd, ConversionFlags flags) { + ConversionResult result = conversionOK; + const UTF16* source = *sourceStart; + UTF8* target = *targetStart; + while (source < sourceEnd) { + UTF32 ch; + unsigned short bytesToWrite = 0; + const UTF32 byteMask = 0xBF; + const UTF32 byteMark = 0x80; + const UTF16* oldSource = source; /* In case we have to back up because of target overflow. */ + ch = *source++; + /* If we have a surrogate pair, convert to UTF32 first. */ + if (ch >= UNI_SUR_HIGH_START && ch <= UNI_SUR_HIGH_END) { + /* If the 16 bits following the high surrogate are in the source buffer... */ + if (source < sourceEnd) { + UTF32 ch2 = *source; + /* If it's a low surrogate, convert to UTF32. */ + if (ch2 >= UNI_SUR_LOW_START && ch2 <= UNI_SUR_LOW_END) { + ch = ((ch - UNI_SUR_HIGH_START) << halfShift) + + (ch2 - UNI_SUR_LOW_START) + halfBase; + ++source; + } else if (flags == strictConversion) { /* it's an unpaired high surrogate */ + --source; /* return to the illegal value itself */ + result = sourceIllegal; + break; + } + } else { /* We don't have the 16 bits following the high surrogate. */ + --source; /* return to the high surrogate */ + result = sourceExhausted; + break; + } + } else if (flags == strictConversion) { + /* UTF-16 surrogate values are illegal in UTF-32 */ + if (ch >= UNI_SUR_LOW_START && ch <= UNI_SUR_LOW_END) { + --source; /* return to the illegal value itself */ + result = sourceIllegal; + break; + } + } + /* Figure out how many bytes the result will require */ + if (ch < (UTF32)0x80) { bytesToWrite = 1; + } else if (ch < (UTF32)0x800) { bytesToWrite = 2; + } else if (ch < (UTF32)0x10000) { bytesToWrite = 3; + } else if (ch < (UTF32)0x110000) { bytesToWrite = 4; + } else { bytesToWrite = 3; + ch = UNI_REPLACEMENT_CHAR; + } + + target += bytesToWrite; + if (target > targetEnd) { + source = oldSource; /* Back up source pointer! */ + target -= bytesToWrite; result = targetExhausted; break; + } + switch (bytesToWrite) { /* note: everything falls through. */ + case 4: *--target = (UTF8)((ch | byteMark) & byteMask); ch >>= 6; + case 3: *--target = (UTF8)((ch | byteMark) & byteMask); ch >>= 6; + case 2: *--target = (UTF8)((ch | byteMark) & byteMask); ch >>= 6; + case 1: *--target = (UTF8)(ch | firstByteMark[bytesToWrite]); + } + target += bytesToWrite; + } +*sourceStart = source; +*targetStart = target; +return result; +} + +/* --------------------------------------------------------------------- */ + +/* + * Utility routine to tell whether a sequence of bytes is legal UTF-8. + * This must be called with the length pre-determined by the first byte. + * If not calling this from ConvertUTF8to*, then the length can be set by: + * length = trailingBytesForUTF8[*source]+1; + * and the sequence is illegal right away if there aren't that many bytes + * available. + * If presented with a length > 4, this returns false. The Unicode + * definition of UTF-8 goes up to 4-byte sequences. + */ + +static Boolean isLegalUTF8(const UTF8 *source, int length) { + UTF8 a; + const UTF8 *srcptr = source+length; + switch (length) { + default: return false; + /* Everything else falls through when "true"... */ + case 4: if ((a = (*--srcptr)) < 0x80 || a > 0xBF) return false; + case 3: if ((a = (*--srcptr)) < 0x80 || a > 0xBF) return false; + case 2: if ((a = (*--srcptr)) > 0xBF) return false; + + switch (*source) { + /* no fall-through in this inner switch */ + case 0xE0: if (a < 0xA0) return false; break; + case 0xED: if (a > 0x9F) return false; break; + case 0xF0: if (a < 0x90) return false; break; + case 0xF4: if (a > 0x8F) return false; break; + default: if (a < 0x80) return false; + } + + case 1: if (*source >= 0x80 && *source < 0xC2) return false; + } + if (*source > 0xF4) return false; + return true; +} + +/* --------------------------------------------------------------------- */ + +/* + * Exported function to return whether a UTF-8 sequence is legal or not. + * This is not used here; it's just exported. + */ +Boolean isLegalUTF8Sequence(const UTF8 *source, const UTF8 *sourceEnd) { + int length = trailingBytesForUTF8[*source]+1; + if (source+length > sourceEnd) { + return false; + } + return isLegalUTF8(source, length); +} + +/* --------------------------------------------------------------------- */ + +ConversionResult ConvertUTF8toUTF16 (const UTF8** sourceStart, const UTF8* sourceEnd, + UTF16** targetStart, UTF16* targetEnd, ConversionFlags flags) { + ConversionResult result = conversionOK; + const UTF8* source = *sourceStart; + UTF16* target = *targetStart; + while (source < sourceEnd) { + UTF32 ch = 0; + unsigned short extraBytesToRead = trailingBytesForUTF8[*source]; + if (source + extraBytesToRead >= sourceEnd) { + result = sourceExhausted; break; + } + /* Do this check whether lenient or strict */ + if (! isLegalUTF8(source, extraBytesToRead+1)) { + result = sourceIllegal; + break; + } + /* + * The cases all fall through. See "Note A" below. + */ + switch (extraBytesToRead) { + case 5: ch += *source++; ch <<= 6; /* remember, illegal UTF-8 */ + case 4: ch += *source++; ch <<= 6; /* remember, illegal UTF-8 */ + case 3: ch += *source++; ch <<= 6; + case 2: ch += *source++; ch <<= 6; + case 1: ch += *source++; ch <<= 6; + case 0: ch += *source++; + } + ch -= offsetsFromUTF8[extraBytesToRead]; + + if (target >= targetEnd) { + source -= (extraBytesToRead+1); /* Back up source pointer! */ + result = targetExhausted; break; + } + if (ch <= UNI_MAX_BMP) { /* Target is a character <= 0xFFFF */ + /* UTF-16 surrogate values are illegal in UTF-32 */ + if (ch >= UNI_SUR_HIGH_START && ch <= UNI_SUR_LOW_END) { + if (flags == strictConversion) { + source -= (extraBytesToRead+1); /* return to the illegal value itself */ + result = sourceIllegal; + break; + } else { + *target++ = UNI_REPLACEMENT_CHAR; + } + } else { + *target++ = (UTF16)ch; /* normal case */ + } + } else if (ch > UNI_MAX_UTF16) { + if (flags == strictConversion) { + result = sourceIllegal; + source -= (extraBytesToRead+1); /* return to the start */ + break; /* Bail out; shouldn't continue */ + } else { + *target++ = UNI_REPLACEMENT_CHAR; + } + } else { + /* target is a character in range 0xFFFF - 0x10FFFF. */ + if (target + 1 >= targetEnd) { + source -= (extraBytesToRead+1); /* Back up source pointer! */ + result = targetExhausted; break; + } + ch -= halfBase; + *target++ = (UTF16)((ch >> halfShift) + UNI_SUR_HIGH_START); + *target++ = (UTF16)((ch & halfMask) + UNI_SUR_LOW_START); + } + } +*sourceStart = source; +*targetStart = target; +return result; +} + +/* --------------------------------------------------------------------- */ + +ConversionResult ConvertUTF32toUTF8 (const UTF32** sourceStart, const UTF32* sourceEnd, + UTF8** targetStart, UTF8* targetEnd, ConversionFlags flags) { + ConversionResult result = conversionOK; + const UTF32* source = *sourceStart; + UTF8* target = *targetStart; + while (source < sourceEnd) { + UTF32 ch; + unsigned short bytesToWrite = 0; + const UTF32 byteMask = 0xBF; + const UTF32 byteMark = 0x80; + ch = *source++; + if (flags == strictConversion ) { + /* UTF-16 surrogate values are illegal in UTF-32 */ + if (ch >= UNI_SUR_HIGH_START && ch <= UNI_SUR_LOW_END) { + --source; /* return to the illegal value itself */ + result = sourceIllegal; + break; + } + } + /* + * Figure out how many bytes the result will require. Turn any + * illegally large UTF32 things (> Plane 17) into replacement chars. + */ + if (ch < (UTF32)0x80) { bytesToWrite = 1; + } else if (ch < (UTF32)0x800) { bytesToWrite = 2; + } else if (ch < (UTF32)0x10000) { bytesToWrite = 3; + } else if (ch <= UNI_MAX_LEGAL_UTF32) { bytesToWrite = 4; + } else { bytesToWrite = 3; + ch = UNI_REPLACEMENT_CHAR; + result = sourceIllegal; + } + + target += bytesToWrite; + if (target > targetEnd) { + --source; /* Back up source pointer! */ + target -= bytesToWrite; result = targetExhausted; break; + } + switch (bytesToWrite) { /* note: everything falls through. */ + case 4: *--target = (UTF8)((ch | byteMark) & byteMask); ch >>= 6; + case 3: *--target = (UTF8)((ch | byteMark) & byteMask); ch >>= 6; + case 2: *--target = (UTF8)((ch | byteMark) & byteMask); ch >>= 6; + case 1: *--target = (UTF8) (ch | firstByteMark[bytesToWrite]); + } + target += bytesToWrite; + } +*sourceStart = source; +*targetStart = target; +return result; +} + +/* --------------------------------------------------------------------- */ + +ConversionResult ConvertUTF8toUTF32 (const UTF8** sourceStart, const UTF8* sourceEnd, + UTF32** targetStart, UTF32* targetEnd, ConversionFlags flags) { + ConversionResult result = conversionOK; + const UTF8* source = *sourceStart; + UTF32* target = *targetStart; + while (source < sourceEnd) { + UTF32 ch = 0; + unsigned short extraBytesToRead = trailingBytesForUTF8[*source]; + if (source + extraBytesToRead >= sourceEnd) { + result = sourceExhausted; break; + } + /* Do this check whether lenient or strict */ + if (! isLegalUTF8(source, extraBytesToRead+1)) { + result = sourceIllegal; + break; + } + /* + * The cases all fall through. See "Note A" below. + */ + switch (extraBytesToRead) { + case 5: ch += *source++; ch <<= 6; + case 4: ch += *source++; ch <<= 6; + case 3: ch += *source++; ch <<= 6; + case 2: ch += *source++; ch <<= 6; + case 1: ch += *source++; ch <<= 6; + case 0: ch += *source++; + } + ch -= offsetsFromUTF8[extraBytesToRead]; + + if (target >= targetEnd) { + source -= (extraBytesToRead+1); /* Back up the source pointer! */ + result = targetExhausted; break; + } + if (ch <= UNI_MAX_LEGAL_UTF32) { + /* + * UTF-16 surrogate values are illegal in UTF-32, and anything + * over Plane 17 (> 0x10FFFF) is illegal. + */ + if (ch >= UNI_SUR_HIGH_START && ch <= UNI_SUR_LOW_END) { + if (flags == strictConversion) { + source -= (extraBytesToRead+1); /* return to the illegal value itself */ + result = sourceIllegal; + break; + } else { + *target++ = UNI_REPLACEMENT_CHAR; + } + } else { + *target++ = ch; + } + } else { /* i.e., ch > UNI_MAX_LEGAL_UTF32 */ + result = sourceIllegal; + *target++ = UNI_REPLACEMENT_CHAR; + } + } + *sourceStart = source; + *targetStart = target; + return result; +} + +/* --------------------------------------------------------------------- + +Note A. +The fall-through switches in UTF-8 reading code save a +temp variable, some decrements & conditionals. The switches +are equivalent to the following loop: +{ + int tmpBytesToRead = extraBytesToRead+1; + do { + ch += *source++; + --tmpBytesToRead; + if (tmpBytesToRead) ch <<= 6; + } while (tmpBytesToRead > 0); +} +In UTF-8 writing code, the switches on "bytesToWrite" are +similarly unrolled loops. + +--------------------------------------------------------------------- */ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/convert_UTF.h b/TMessagesProj/jni/third_party/breakpad/src/common/convert_UTF.h new file mode 100644 index 0000000000..644d099506 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/convert_UTF.h @@ -0,0 +1,164 @@ +/* + * Copyright © 1991-2015 Unicode, Inc. All rights reserved. + * Distributed under the Terms of Use in + * http://www.unicode.org/copyright.html. + * + * Permission is hereby granted, free of charge, to any person obtaining + * a copy of the Unicode data files and any associated documentation + * (the "Data Files") or Unicode software and any associated documentation + * (the "Software") to deal in the Data Files or Software + * without restriction, including without limitation the rights to use, + * copy, modify, merge, publish, distribute, and/or sell copies of + * the Data Files or Software, and to permit persons to whom the Data Files + * or Software are furnished to do so, provided that + * (a) this copyright and permission notice appear with all copies + * of the Data Files or Software, + * (b) this copyright and permission notice appear in associated + * documentation, and + * (c) there is clear notice in each modified Data File or in the Software + * as well as in the documentation associated with the Data File(s) or + * Software that the data or software has been modified. + * + * THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF + * ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE + * WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT OF THIRD PARTY RIGHTS. + * IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS + * NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL + * DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, + * DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER + * TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR + * PERFORMANCE OF THE DATA FILES OR SOFTWARE. + * + * Except as contained in this notice, the name of a copyright holder + * shall not be used in advertising or otherwise to promote the sale, + * use or other dealings in these Data Files or Software without prior + * written authorization of the copyright holder. + */ + +#ifndef COMMON_CONVERT_UTF_H_ +#define COMMON_CONVERT_UTF_H_ + +/* --------------------------------------------------------------------- + +Conversions between UTF32, UTF-16, and UTF-8. Header file. + +Several funtions are included here, forming a complete set of +conversions between the three formats. UTF-7 is not included +here, but is handled in a separate source file. + +Each of these routines takes pointers to input buffers and output +buffers. The input buffers are const. + +Each routine converts the text between *sourceStart and sourceEnd, +putting the result into the buffer between *targetStart and +targetEnd. Note: the end pointers are *after* the last item: e.g. +*(sourceEnd - 1) is the last item. + +The return result indicates whether the conversion was successful, +and if not, whether the problem was in the source or target buffers. +(Only the first encountered problem is indicated.) + +After the conversion, *sourceStart and *targetStart are both +updated to point to the end of last text successfully converted in +the respective buffers. + +Input parameters: +sourceStart - pointer to a pointer to the source buffer. +The contents of this are modified on return so that +it points at the next thing to be converted. +targetStart - similarly, pointer to pointer to the target buffer. +sourceEnd, targetEnd - respectively pointers to the ends of the +two buffers, for overflow checking only. + +These conversion functions take a ConversionFlags argument. When this +flag is set to strict, both irregular sequences and isolated surrogates +will cause an error. When the flag is set to lenient, both irregular +sequences and isolated surrogates are converted. + +Whether the flag is strict or lenient, all illegal sequences will cause +an error return. This includes sequences such as: , , +or in UTF-8, and values above 0x10FFFF in UTF-32. Conformant code +must check for illegal sequences. + +When the flag is set to lenient, characters over 0x10FFFF are converted +to the replacement character; otherwise (when the flag is set to strict) +they constitute an error. + +Output parameters: +The value "sourceIllegal" is returned from some routines if the input +sequence is malformed. When "sourceIllegal" is returned, the source +value will point to the illegal value that caused the problem. E.g., +in UTF-8 when a sequence is malformed, it points to the start of the +malformed sequence. + +Author: Mark E. Davis, 1994. +Rev History: Rick McGowan, fixes & updates May 2001. +Fixes & updates, Sept 2001. + +------------------------------------------------------------------------ */ + +/* --------------------------------------------------------------------- +The following 4 definitions are compiler-specific. +The C standard does not guarantee that wchar_t has at least +16 bits, so wchar_t is no less portable than unsigned short! +All should be unsigned values to avoid sign extension during +bit mask & shift operations. +------------------------------------------------------------------------ */ + +typedef unsigned long UTF32; /* at least 32 bits */ +typedef unsigned short UTF16; /* at least 16 bits */ +typedef unsigned char UTF8; /* typically 8 bits */ +typedef unsigned char Boolean; /* 0 or 1 */ + +/* Some fundamental constants */ +#define UNI_REPLACEMENT_CHAR (UTF32)0x0000FFFD +#define UNI_MAX_BMP (UTF32)0x0000FFFF +#define UNI_MAX_UTF16 (UTF32)0x0010FFFF +#define UNI_MAX_UTF32 (UTF32)0x7FFFFFFF +#define UNI_MAX_LEGAL_UTF32 (UTF32)0x0010FFFF + +typedef enum { + conversionOK, /* conversion successful */ + sourceExhausted, /* partial character in source, but hit end */ + targetExhausted, /* insuff. room in target for conversion */ + sourceIllegal /* source sequence is illegal/malformed */ +} ConversionResult; + +typedef enum { + strictConversion = 0, + lenientConversion +} ConversionFlags; + +/* This is for C++ and does no harm in C */ +#ifdef __cplusplus +extern "C" { +#endif + +ConversionResult ConvertUTF8toUTF16 (const UTF8** sourceStart, const UTF8* sourceEnd, + UTF16** targetStart, UTF16* targetEnd, ConversionFlags flags); + +ConversionResult ConvertUTF16toUTF8 (const UTF16** sourceStart, const UTF16* sourceEnd, + UTF8** targetStart, UTF8* targetEnd, ConversionFlags flags); + +ConversionResult ConvertUTF8toUTF32 (const UTF8** sourceStart, const UTF8* sourceEnd, + UTF32** targetStart, UTF32* targetEnd, ConversionFlags flags); + +ConversionResult ConvertUTF32toUTF8 (const UTF32** sourceStart, const UTF32* sourceEnd, + UTF8** targetStart, UTF8* targetEnd, ConversionFlags flags); + +ConversionResult ConvertUTF16toUTF32 (const UTF16** sourceStart, const UTF16* sourceEnd, + UTF32** targetStart, UTF32* targetEnd, ConversionFlags flags); + +ConversionResult ConvertUTF32toUTF16 (const UTF32** sourceStart, const UTF32* sourceEnd, + UTF16** targetStart, UTF16* targetEnd, ConversionFlags flags); + +Boolean isLegalUTF8Sequence(const UTF8 *source, const UTF8 *sourceEnd); + +#ifdef __cplusplus +} +#endif + +/* --------------------------------------------------------------------- */ + +#endif // COMMON_CONVERT_UTF_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/bytereader-inl.h b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/bytereader-inl.h new file mode 100644 index 0000000000..3c167089f7 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/bytereader-inl.h @@ -0,0 +1,175 @@ +// Copyright 2006 Google Inc. All Rights Reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef UTIL_DEBUGINFO_BYTEREADER_INL_H__ +#define UTIL_DEBUGINFO_BYTEREADER_INL_H__ + +#include "common/dwarf/bytereader.h" + +#include + +namespace dwarf2reader { + +inline uint8 ByteReader::ReadOneByte(const char* buffer) const { + return buffer[0]; +} + +inline uint16 ByteReader::ReadTwoBytes(const char* signed_buffer) const { + const unsigned char *buffer + = reinterpret_cast(signed_buffer); + const uint16 buffer0 = buffer[0]; + const uint16 buffer1 = buffer[1]; + if (endian_ == ENDIANNESS_LITTLE) { + return buffer0 | buffer1 << 8; + } else { + return buffer1 | buffer0 << 8; + } +} + +inline uint64 ByteReader::ReadFourBytes(const char* signed_buffer) const { + const unsigned char *buffer + = reinterpret_cast(signed_buffer); + const uint32 buffer0 = buffer[0]; + const uint32 buffer1 = buffer[1]; + const uint32 buffer2 = buffer[2]; + const uint32 buffer3 = buffer[3]; + if (endian_ == ENDIANNESS_LITTLE) { + return buffer0 | buffer1 << 8 | buffer2 << 16 | buffer3 << 24; + } else { + return buffer3 | buffer2 << 8 | buffer1 << 16 | buffer0 << 24; + } +} + +inline uint64 ByteReader::ReadEightBytes(const char* signed_buffer) const { + const unsigned char *buffer + = reinterpret_cast(signed_buffer); + const uint64 buffer0 = buffer[0]; + const uint64 buffer1 = buffer[1]; + const uint64 buffer2 = buffer[2]; + const uint64 buffer3 = buffer[3]; + const uint64 buffer4 = buffer[4]; + const uint64 buffer5 = buffer[5]; + const uint64 buffer6 = buffer[6]; + const uint64 buffer7 = buffer[7]; + if (endian_ == ENDIANNESS_LITTLE) { + return buffer0 | buffer1 << 8 | buffer2 << 16 | buffer3 << 24 | + buffer4 << 32 | buffer5 << 40 | buffer6 << 48 | buffer7 << 56; + } else { + return buffer7 | buffer6 << 8 | buffer5 << 16 | buffer4 << 24 | + buffer3 << 32 | buffer2 << 40 | buffer1 << 48 | buffer0 << 56; + } +} + +// Read an unsigned LEB128 number. Each byte contains 7 bits of +// information, plus one bit saying whether the number continues or +// not. + +inline uint64 ByteReader::ReadUnsignedLEB128(const char* buffer, + size_t* len) const { + uint64 result = 0; + size_t num_read = 0; + unsigned int shift = 0; + unsigned char byte; + + do { + byte = *buffer++; + num_read++; + + result |= (static_cast(byte & 0x7f)) << shift; + + shift += 7; + + } while (byte & 0x80); + + *len = num_read; + + return result; +} + +// Read a signed LEB128 number. These are like regular LEB128 +// numbers, except the last byte may have a sign bit set. + +inline int64 ByteReader::ReadSignedLEB128(const char* buffer, + size_t* len) const { + int64 result = 0; + unsigned int shift = 0; + size_t num_read = 0; + unsigned char byte; + + do { + byte = *buffer++; + num_read++; + result |= (static_cast(byte & 0x7f) << shift); + shift += 7; + } while (byte & 0x80); + + if ((shift < 8 * sizeof (result)) && (byte & 0x40)) + result |= -((static_cast(1)) << shift); + *len = num_read; + return result; +} + +inline uint64 ByteReader::ReadOffset(const char* buffer) const { + assert(this->offset_reader_); + return (this->*offset_reader_)(buffer); +} + +inline uint64 ByteReader::ReadAddress(const char* buffer) const { + assert(this->address_reader_); + return (this->*address_reader_)(buffer); +} + +inline void ByteReader::SetCFIDataBase(uint64 section_base, + const char *buffer_base) { + section_base_ = section_base; + buffer_base_ = buffer_base; + have_section_base_ = true; +} + +inline void ByteReader::SetTextBase(uint64 text_base) { + text_base_ = text_base; + have_text_base_ = true; +} + +inline void ByteReader::SetDataBase(uint64 data_base) { + data_base_ = data_base; + have_data_base_ = true; +} + +inline void ByteReader::SetFunctionBase(uint64 function_base) { + function_base_ = function_base; + have_function_base_ = true; +} + +inline void ByteReader::ClearFunctionBase() { + have_function_base_ = false; +} + +} // namespace dwarf2reader + +#endif // UTIL_DEBUGINFO_BYTEREADER_INL_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/bytereader.cc b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/bytereader.cc new file mode 100644 index 0000000000..6802026449 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/bytereader.cc @@ -0,0 +1,245 @@ +// Copyright (c) 2010 Google Inc. All Rights Reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#include +#include + +#include "common/dwarf/bytereader-inl.h" +#include "common/dwarf/bytereader.h" + +namespace dwarf2reader { + +ByteReader::ByteReader(enum Endianness endian) + :offset_reader_(NULL), address_reader_(NULL), endian_(endian), + address_size_(0), offset_size_(0), + have_section_base_(), have_text_base_(), have_data_base_(), + have_function_base_() { } + +ByteReader::~ByteReader() { } + +void ByteReader::SetOffsetSize(uint8 size) { + offset_size_ = size; + assert(size == 4 || size == 8); + if (size == 4) { + this->offset_reader_ = &ByteReader::ReadFourBytes; + } else { + this->offset_reader_ = &ByteReader::ReadEightBytes; + } +} + +void ByteReader::SetAddressSize(uint8 size) { + address_size_ = size; + assert(size == 4 || size == 8); + if (size == 4) { + this->address_reader_ = &ByteReader::ReadFourBytes; + } else { + this->address_reader_ = &ByteReader::ReadEightBytes; + } +} + +uint64 ByteReader::ReadInitialLength(const char* start, size_t* len) { + const uint64 initial_length = ReadFourBytes(start); + start += 4; + + // In DWARF2/3, if the initial length is all 1 bits, then the offset + // size is 8 and we need to read the next 8 bytes for the real length. + if (initial_length == 0xffffffff) { + SetOffsetSize(8); + *len = 12; + return ReadOffset(start); + } else { + SetOffsetSize(4); + *len = 4; + } + return initial_length; +} + +bool ByteReader::ValidEncoding(DwarfPointerEncoding encoding) const { + if (encoding == DW_EH_PE_omit) return true; + if (encoding == DW_EH_PE_aligned) return true; + if ((encoding & 0x7) > DW_EH_PE_udata8) + return false; + if ((encoding & 0x70) > DW_EH_PE_funcrel) + return false; + return true; +} + +bool ByteReader::UsableEncoding(DwarfPointerEncoding encoding) const { + switch (encoding & 0x70) { + case DW_EH_PE_absptr: return true; + case DW_EH_PE_pcrel: return have_section_base_; + case DW_EH_PE_textrel: return have_text_base_; + case DW_EH_PE_datarel: return have_data_base_; + case DW_EH_PE_funcrel: return have_function_base_; + default: return false; + } +} + +uint64 ByteReader::ReadEncodedPointer(const char *buffer, + DwarfPointerEncoding encoding, + size_t *len) const { + // UsableEncoding doesn't approve of DW_EH_PE_omit, so we shouldn't + // see it here. + assert(encoding != DW_EH_PE_omit); + + // The Linux Standards Base 4.0 does not make this clear, but the + // GNU tools (gcc/unwind-pe.h; readelf/dwarf.c; gdb/dwarf2-frame.c) + // agree that aligned pointers are always absolute, machine-sized, + // machine-signed pointers. + if (encoding == DW_EH_PE_aligned) { + assert(have_section_base_); + + // We don't need to align BUFFER in *our* address space. Rather, we + // need to find the next position in our buffer that would be aligned + // when the .eh_frame section the buffer contains is loaded into the + // program's memory. So align assuming that buffer_base_ gets loaded at + // address section_base_, where section_base_ itself may or may not be + // aligned. + + // First, find the offset to START from the closest prior aligned + // address. + uint64 skew = section_base_ & (AddressSize() - 1); + // Now find the offset from that aligned address to buffer. + uint64 offset = skew + (buffer - buffer_base_); + // Round up to the next boundary. + uint64 aligned = (offset + AddressSize() - 1) & -AddressSize(); + // Convert back to a pointer. + const char *aligned_buffer = buffer_base_ + (aligned - skew); + // Finally, store the length and actually fetch the pointer. + *len = aligned_buffer - buffer + AddressSize(); + return ReadAddress(aligned_buffer); + } + + // Extract the value first, ignoring whether it's a pointer or an + // offset relative to some base. + uint64 offset; + switch (encoding & 0x0f) { + case DW_EH_PE_absptr: + // DW_EH_PE_absptr is weird, as it is used as a meaningful value for + // both the high and low nybble of encoding bytes. When it appears in + // the high nybble, it means that the pointer is absolute, not an + // offset from some base address. When it appears in the low nybble, + // as here, it means that the pointer is stored as a normal + // machine-sized and machine-signed address. A low nybble of + // DW_EH_PE_absptr does not imply that the pointer is absolute; it is + // correct for us to treat the value as an offset from a base address + // if the upper nybble is not DW_EH_PE_absptr. + offset = ReadAddress(buffer); + *len = AddressSize(); + break; + + case DW_EH_PE_uleb128: + offset = ReadUnsignedLEB128(buffer, len); + break; + + case DW_EH_PE_udata2: + offset = ReadTwoBytes(buffer); + *len = 2; + break; + + case DW_EH_PE_udata4: + offset = ReadFourBytes(buffer); + *len = 4; + break; + + case DW_EH_PE_udata8: + offset = ReadEightBytes(buffer); + *len = 8; + break; + + case DW_EH_PE_sleb128: + offset = ReadSignedLEB128(buffer, len); + break; + + case DW_EH_PE_sdata2: + offset = ReadTwoBytes(buffer); + // Sign-extend from 16 bits. + offset = (offset ^ 0x8000) - 0x8000; + *len = 2; + break; + + case DW_EH_PE_sdata4: + offset = ReadFourBytes(buffer); + // Sign-extend from 32 bits. + offset = (offset ^ 0x80000000ULL) - 0x80000000ULL; + *len = 4; + break; + + case DW_EH_PE_sdata8: + // No need to sign-extend; this is the full width of our type. + offset = ReadEightBytes(buffer); + *len = 8; + break; + + default: + abort(); + } + + // Find the appropriate base address. + uint64 base; + switch (encoding & 0x70) { + case DW_EH_PE_absptr: + base = 0; + break; + + case DW_EH_PE_pcrel: + assert(have_section_base_); + base = section_base_ + (buffer - buffer_base_); + break; + + case DW_EH_PE_textrel: + assert(have_text_base_); + base = text_base_; + break; + + case DW_EH_PE_datarel: + assert(have_data_base_); + base = data_base_; + break; + + case DW_EH_PE_funcrel: + assert(have_function_base_); + base = function_base_; + break; + + default: + abort(); + } + + uint64 pointer = base + offset; + + // Remove inappropriate upper bits. + if (AddressSize() == 4) + pointer = pointer & 0xffffffff; + else + assert(AddressSize() == sizeof(uint64)); + + return pointer; +} + +} // namespace dwarf2reader diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/bytereader.h b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/bytereader.h new file mode 100644 index 0000000000..e3894273f1 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/bytereader.h @@ -0,0 +1,310 @@ +// -*- mode: C++ -*- + +// Copyright (c) 2010 Google Inc. All Rights Reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef COMMON_DWARF_BYTEREADER_H__ +#define COMMON_DWARF_BYTEREADER_H__ + +#include +#include "common/dwarf/types.h" +#include "common/dwarf/dwarf2enums.h" + +namespace dwarf2reader { + +// We can't use the obvious name of LITTLE_ENDIAN and BIG_ENDIAN +// because it conflicts with a macro +enum Endianness { + ENDIANNESS_BIG, + ENDIANNESS_LITTLE +}; + +// A ByteReader knows how to read single- and multi-byte values of +// various endiannesses, sizes, and encodings, as used in DWARF +// debugging information and Linux C++ exception handling data. +class ByteReader { + public: + // Construct a ByteReader capable of reading one-, two-, four-, and + // eight-byte values according to ENDIANNESS, absolute machine-sized + // addresses, DWARF-style "initial length" values, signed and + // unsigned LEB128 numbers, and Linux C++ exception handling data's + // encoded pointers. + explicit ByteReader(enum Endianness endianness); + virtual ~ByteReader(); + + // Read a single byte from BUFFER and return it as an unsigned 8 bit + // number. + uint8 ReadOneByte(const char* buffer) const; + + // Read two bytes from BUFFER and return them as an unsigned 16 bit + // number, using this ByteReader's endianness. + uint16 ReadTwoBytes(const char* buffer) const; + + // Read four bytes from BUFFER and return them as an unsigned 32 bit + // number, using this ByteReader's endianness. This function returns + // a uint64 so that it is compatible with ReadAddress and + // ReadOffset. The number it returns will never be outside the range + // of an unsigned 32 bit integer. + uint64 ReadFourBytes(const char* buffer) const; + + // Read eight bytes from BUFFER and return them as an unsigned 64 + // bit number, using this ByteReader's endianness. + uint64 ReadEightBytes(const char* buffer) const; + + // Read an unsigned LEB128 (Little Endian Base 128) number from + // BUFFER and return it as an unsigned 64 bit integer. Set LEN to + // the number of bytes read. + // + // The unsigned LEB128 representation of an integer N is a variable + // number of bytes: + // + // - If N is between 0 and 0x7f, then its unsigned LEB128 + // representation is a single byte whose value is N. + // + // - Otherwise, its unsigned LEB128 representation is (N & 0x7f) | + // 0x80, followed by the unsigned LEB128 representation of N / + // 128, rounded towards negative infinity. + // + // In other words, we break VALUE into groups of seven bits, put + // them in little-endian order, and then write them as eight-bit + // bytes with the high bit on all but the last. + uint64 ReadUnsignedLEB128(const char* buffer, size_t* len) const; + + // Read a signed LEB128 number from BUFFER and return it as an + // signed 64 bit integer. Set LEN to the number of bytes read. + // + // The signed LEB128 representation of an integer N is a variable + // number of bytes: + // + // - If N is between -0x40 and 0x3f, then its signed LEB128 + // representation is a single byte whose value is N in two's + // complement. + // + // - Otherwise, its signed LEB128 representation is (N & 0x7f) | + // 0x80, followed by the signed LEB128 representation of N / 128, + // rounded towards negative infinity. + // + // In other words, we break VALUE into groups of seven bits, put + // them in little-endian order, and then write them as eight-bit + // bytes with the high bit on all but the last. + int64 ReadSignedLEB128(const char* buffer, size_t* len) const; + + // Indicate that addresses on this architecture are SIZE bytes long. SIZE + // must be either 4 or 8. (DWARF allows addresses to be any number of + // bytes in length from 1 to 255, but we only support 32- and 64-bit + // addresses at the moment.) You must call this before using the + // ReadAddress member function. + // + // For data in a .debug_info section, or something that .debug_info + // refers to like line number or macro data, the compilation unit + // header's address_size field indicates the address size to use. Call + // frame information doesn't indicate its address size (a shortcoming of + // the spec); you must supply the appropriate size based on the + // architecture of the target machine. + void SetAddressSize(uint8 size); + + // Return the current address size, in bytes. This is either 4, + // indicating 32-bit addresses, or 8, indicating 64-bit addresses. + uint8 AddressSize() const { return address_size_; } + + // Read an address from BUFFER and return it as an unsigned 64 bit + // integer, respecting this ByteReader's endianness and address size. You + // must call SetAddressSize before calling this function. + uint64 ReadAddress(const char* buffer) const; + + // DWARF actually defines two slightly different formats: 32-bit DWARF + // and 64-bit DWARF. This is *not* related to the size of registers or + // addresses on the target machine; it refers only to the size of section + // offsets and data lengths appearing in the DWARF data. One only needs + // 64-bit DWARF when the debugging data itself is larger than 4GiB. + // 32-bit DWARF can handle x86_64 or PPC64 code just fine, unless the + // debugging data itself is very large. + // + // DWARF information identifies itself as 32-bit or 64-bit DWARF: each + // compilation unit and call frame information entry begins with an + // "initial length" field, which, in addition to giving the length of the + // data, also indicates the size of section offsets and lengths appearing + // in that data. The ReadInitialLength member function, below, reads an + // initial length and sets the ByteReader's offset size as a side effect. + // Thus, in the normal process of reading DWARF data, the appropriate + // offset size is set automatically. So, you should only need to call + // SetOffsetSize if you are using the same ByteReader to jump from the + // midst of one block of DWARF data into another. + + // Read a DWARF "initial length" field from START, and return it as + // an unsigned 64 bit integer, respecting this ByteReader's + // endianness. Set *LEN to the length of the initial length in + // bytes, either four or twelve. As a side effect, set this + // ByteReader's offset size to either 4 (if we see a 32-bit DWARF + // initial length) or 8 (if we see a 64-bit DWARF initial length). + // + // A DWARF initial length is either: + // + // - a byte count stored as an unsigned 32-bit value less than + // 0xffffff00, indicating that the data whose length is being + // measured uses the 32-bit DWARF format, or + // + // - The 32-bit value 0xffffffff, followed by a 64-bit byte count, + // indicating that the data whose length is being measured uses + // the 64-bit DWARF format. + uint64 ReadInitialLength(const char* start, size_t* len); + + // Read an offset from BUFFER and return it as an unsigned 64 bit + // integer, respecting the ByteReader's endianness. In 32-bit DWARF, the + // offset is 4 bytes long; in 64-bit DWARF, the offset is eight bytes + // long. You must call ReadInitialLength or SetOffsetSize before calling + // this function; see the comments above for details. + uint64 ReadOffset(const char* buffer) const; + + // Return the current offset size, in bytes. + // A return value of 4 indicates that we are reading 32-bit DWARF. + // A return value of 8 indicates that we are reading 64-bit DWARF. + uint8 OffsetSize() const { return offset_size_; } + + // Indicate that section offsets and lengths are SIZE bytes long. SIZE + // must be either 4 (meaning 32-bit DWARF) or 8 (meaning 64-bit DWARF). + // Usually, you should not call this function yourself; instead, let a + // call to ReadInitialLength establish the data's offset size + // automatically. + void SetOffsetSize(uint8 size); + + // The Linux C++ ABI uses a variant of DWARF call frame information + // for exception handling. This data is included in the program's + // address space as the ".eh_frame" section, and intepreted at + // runtime to walk the stack, find exception handlers, and run + // cleanup code. The format is mostly the same as DWARF CFI, with + // some adjustments made to provide the additional + // exception-handling data, and to make the data easier to work with + // in memory --- for example, to allow it to be placed in read-only + // memory even when describing position-independent code. + // + // In particular, exception handling data can select a number of + // different encodings for pointers that appear in the data, as + // described by the DwarfPointerEncoding enum. There are actually + // four axes(!) to the encoding: + // + // - The pointer size: pointers can be 2, 4, or 8 bytes long, or use + // the DWARF LEB128 encoding. + // + // - The pointer's signedness: pointers can be signed or unsigned. + // + // - The pointer's base address: the data stored in the exception + // handling data can be the actual address (that is, an absolute + // pointer), or relative to one of a number of different base + // addreses --- including that of the encoded pointer itself, for + // a form of "pc-relative" addressing. + // + // - The pointer may be indirect: it may be the address where the + // true pointer is stored. (This is used to refer to things via + // global offset table entries, program linkage table entries, or + // other tricks used in position-independent code.) + // + // There are also two options that fall outside that matrix + // altogether: the pointer may be omitted, or it may have padding to + // align it on an appropriate address boundary. (That last option + // may seem like it should be just another axis, but it is not.) + + // Indicate that the exception handling data is loaded starting at + // SECTION_BASE, and that the start of its buffer in our own memory + // is BUFFER_BASE. This allows us to find the address that a given + // byte in our buffer would have when loaded into the program the + // data describes. We need this to resolve DW_EH_PE_pcrel pointers. + void SetCFIDataBase(uint64 section_base, const char *buffer_base); + + // Indicate that the base address of the program's ".text" section + // is TEXT_BASE. We need this to resolve DW_EH_PE_textrel pointers. + void SetTextBase(uint64 text_base); + + // Indicate that the base address for DW_EH_PE_datarel pointers is + // DATA_BASE. The proper value depends on the ABI; it is usually the + // address of the global offset table, held in a designated register in + // position-independent code. You will need to look at the startup code + // for the target system to be sure. I tried; my eyes bled. + void SetDataBase(uint64 data_base); + + // Indicate that the base address for the FDE we are processing is + // FUNCTION_BASE. This is the start address of DW_EH_PE_funcrel + // pointers. (This encoding does not seem to be used by the GNU + // toolchain.) + void SetFunctionBase(uint64 function_base); + + // Indicate that we are no longer processing any FDE, so any use of + // a DW_EH_PE_funcrel encoding is an error. + void ClearFunctionBase(); + + // Return true if ENCODING is a valid pointer encoding. + bool ValidEncoding(DwarfPointerEncoding encoding) const; + + // Return true if we have all the information we need to read a + // pointer that uses ENCODING. This checks that the appropriate + // SetFooBase function for ENCODING has been called. + bool UsableEncoding(DwarfPointerEncoding encoding) const; + + // Read an encoded pointer from BUFFER using ENCODING; return the + // absolute address it represents, and set *LEN to the pointer's + // length in bytes, including any padding for aligned pointers. + // + // This function calls 'abort' if ENCODING is invalid or refers to a + // base address this reader hasn't been given, so you should check + // with ValidEncoding and UsableEncoding first if you would rather + // die in a more helpful way. + uint64 ReadEncodedPointer(const char *buffer, DwarfPointerEncoding encoding, + size_t *len) const; + + private: + + // Function pointer type for our address and offset readers. + typedef uint64 (ByteReader::*AddressReader)(const char*) const; + + // Read an offset from BUFFER and return it as an unsigned 64 bit + // integer. DWARF2/3 define offsets as either 4 or 8 bytes, + // generally depending on the amount of DWARF2/3 info present. + // This function pointer gets set by SetOffsetSize. + AddressReader offset_reader_; + + // Read an address from BUFFER and return it as an unsigned 64 bit + // integer. DWARF2/3 allow addresses to be any size from 0-255 + // bytes currently. Internally we support 4 and 8 byte addresses, + // and will CHECK on anything else. + // This function pointer gets set by SetAddressSize. + AddressReader address_reader_; + + Endianness endian_; + uint8 address_size_; + uint8 offset_size_; + + // Base addresses for Linux C++ exception handling data's encoded pointers. + bool have_section_base_, have_text_base_, have_data_base_; + bool have_function_base_; + uint64 section_base_, text_base_, data_base_, function_base_; + const char *buffer_base_; +}; + +} // namespace dwarf2reader + +#endif // COMMON_DWARF_BYTEREADER_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/bytereader_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/bytereader_unittest.cc new file mode 100644 index 0000000000..4311ab6a25 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/bytereader_unittest.cc @@ -0,0 +1,697 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// bytereader_unittest.cc: Unit tests for dwarf2reader::ByteReader + +#include + +#include "breakpad_googletest_includes.h" +#include "common/dwarf/bytereader.h" +#include "common/dwarf/bytereader-inl.h" +#include "common/dwarf/cfi_assembler.h" +#include "common/using_std_string.h" + +using dwarf2reader::ByteReader; +using dwarf2reader::DwarfPointerEncoding; +using dwarf2reader::ENDIANNESS_BIG; +using dwarf2reader::ENDIANNESS_LITTLE; +using google_breakpad::CFISection; +using google_breakpad::test_assembler::Label; +using google_breakpad::test_assembler::kBigEndian; +using google_breakpad::test_assembler::kLittleEndian; +using google_breakpad::test_assembler::Section; +using testing::Test; + +struct ReaderFixture { + string contents; + size_t pointer_size; +}; + +class Reader: public ReaderFixture, public Test { }; +class ReaderDeathTest: public ReaderFixture, public Test { }; + +TEST_F(Reader, SimpleConstructor) { + ByteReader reader(ENDIANNESS_BIG); + reader.SetAddressSize(4); + CFISection section(kBigEndian, 4); + section + .D8(0xc0) + .D16(0xcf0d) + .D32(0x96fdd219) + .D64(0xbbf55fef0825f117ULL) + .ULEB128(0xa0927048ba8121afULL) + .LEB128(-0x4f337badf4483f83LL) + .D32(0xfec319c9); + ASSERT_TRUE(section.GetContents(&contents)); + const char *data = contents.data(); + EXPECT_EQ(0xc0U, reader.ReadOneByte(data)); + EXPECT_EQ(0xcf0dU, reader.ReadTwoBytes(data + 1)); + EXPECT_EQ(0x96fdd219U, reader.ReadFourBytes(data + 3)); + EXPECT_EQ(0xbbf55fef0825f117ULL, reader.ReadEightBytes(data + 7)); + size_t leb128_size; + EXPECT_EQ(0xa0927048ba8121afULL, + reader.ReadUnsignedLEB128(data + 15, &leb128_size)); + EXPECT_EQ(10U, leb128_size); + EXPECT_EQ(-0x4f337badf4483f83LL, + reader.ReadSignedLEB128(data + 25, &leb128_size)); + EXPECT_EQ(10U, leb128_size); + EXPECT_EQ(0xfec319c9, reader.ReadAddress(data + 35)); +} + +TEST_F(Reader, ValidEncodings) { + ByteReader reader(ENDIANNESS_LITTLE); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_absptr))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_omit))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_aligned))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_uleb128))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_udata2))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_udata4))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_udata8))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_sleb128))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_sdata2))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_sdata4))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_sdata8))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_absptr | + dwarf2reader::DW_EH_PE_pcrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_uleb128 | + dwarf2reader::DW_EH_PE_pcrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_udata2 | + dwarf2reader::DW_EH_PE_pcrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_udata4 | + dwarf2reader::DW_EH_PE_pcrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_udata8 | + dwarf2reader::DW_EH_PE_pcrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_sleb128 | + dwarf2reader::DW_EH_PE_pcrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_sdata2 | + dwarf2reader::DW_EH_PE_pcrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_sdata4 | + dwarf2reader::DW_EH_PE_pcrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_sdata8 | + dwarf2reader::DW_EH_PE_pcrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_absptr | + dwarf2reader::DW_EH_PE_textrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_uleb128 | + dwarf2reader::DW_EH_PE_textrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_udata2 | + dwarf2reader::DW_EH_PE_textrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_udata4 | + dwarf2reader::DW_EH_PE_textrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_udata8 | + dwarf2reader::DW_EH_PE_textrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_sleb128 | + dwarf2reader::DW_EH_PE_textrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_sdata2 | + dwarf2reader::DW_EH_PE_textrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_sdata4 | + dwarf2reader::DW_EH_PE_textrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_sdata8 | + dwarf2reader::DW_EH_PE_textrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_absptr | + dwarf2reader::DW_EH_PE_datarel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_uleb128 | + dwarf2reader::DW_EH_PE_datarel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_udata2 | + dwarf2reader::DW_EH_PE_datarel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_udata4 | + dwarf2reader::DW_EH_PE_datarel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_udata8 | + dwarf2reader::DW_EH_PE_datarel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_sleb128 | + dwarf2reader::DW_EH_PE_datarel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_sdata2 | + dwarf2reader::DW_EH_PE_datarel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_sdata4 | + dwarf2reader::DW_EH_PE_datarel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_sdata8 | + dwarf2reader::DW_EH_PE_datarel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_absptr | + dwarf2reader::DW_EH_PE_funcrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_uleb128 | + dwarf2reader::DW_EH_PE_funcrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_udata2 | + dwarf2reader::DW_EH_PE_funcrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_udata4 | + dwarf2reader::DW_EH_PE_funcrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_udata8 | + dwarf2reader::DW_EH_PE_funcrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_sleb128 | + dwarf2reader::DW_EH_PE_funcrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_sdata2 | + dwarf2reader::DW_EH_PE_funcrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_sdata4 | + dwarf2reader::DW_EH_PE_funcrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_sdata8 | + dwarf2reader::DW_EH_PE_funcrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_indirect | + dwarf2reader::DW_EH_PE_absptr | + dwarf2reader::DW_EH_PE_pcrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_indirect | + dwarf2reader::DW_EH_PE_uleb128 | + dwarf2reader::DW_EH_PE_pcrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_indirect | + dwarf2reader::DW_EH_PE_udata2 | + dwarf2reader::DW_EH_PE_pcrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_indirect | + dwarf2reader::DW_EH_PE_udata4 | + dwarf2reader::DW_EH_PE_pcrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_indirect | + dwarf2reader::DW_EH_PE_udata8 | + dwarf2reader::DW_EH_PE_pcrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_indirect | + dwarf2reader::DW_EH_PE_sleb128 | + dwarf2reader::DW_EH_PE_pcrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_indirect | + dwarf2reader::DW_EH_PE_sdata2 | + dwarf2reader::DW_EH_PE_pcrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_indirect | + dwarf2reader::DW_EH_PE_sdata4 | + dwarf2reader::DW_EH_PE_pcrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_indirect | + dwarf2reader::DW_EH_PE_sdata8 | + dwarf2reader::DW_EH_PE_pcrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_indirect | + dwarf2reader::DW_EH_PE_absptr | + dwarf2reader::DW_EH_PE_textrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_indirect | + dwarf2reader::DW_EH_PE_uleb128 | + dwarf2reader::DW_EH_PE_textrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_indirect | + dwarf2reader::DW_EH_PE_udata2 | + dwarf2reader::DW_EH_PE_textrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_indirect | + dwarf2reader::DW_EH_PE_udata4 | + dwarf2reader::DW_EH_PE_textrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_indirect | + dwarf2reader::DW_EH_PE_udata8 | + dwarf2reader::DW_EH_PE_textrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_indirect | + dwarf2reader::DW_EH_PE_sleb128 | + dwarf2reader::DW_EH_PE_textrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_indirect | + dwarf2reader::DW_EH_PE_sdata2 | + dwarf2reader::DW_EH_PE_textrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_indirect | + dwarf2reader::DW_EH_PE_sdata4 | + dwarf2reader::DW_EH_PE_textrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_indirect | + dwarf2reader::DW_EH_PE_sdata8 | + dwarf2reader::DW_EH_PE_textrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_indirect | + dwarf2reader::DW_EH_PE_absptr | + dwarf2reader::DW_EH_PE_datarel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_indirect | + dwarf2reader::DW_EH_PE_uleb128 | + dwarf2reader::DW_EH_PE_datarel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_indirect | + dwarf2reader::DW_EH_PE_udata2 | + dwarf2reader::DW_EH_PE_datarel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_indirect | + dwarf2reader::DW_EH_PE_udata4 | + dwarf2reader::DW_EH_PE_datarel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_indirect | + dwarf2reader::DW_EH_PE_udata8 | + dwarf2reader::DW_EH_PE_datarel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_indirect | + dwarf2reader::DW_EH_PE_sleb128 | + dwarf2reader::DW_EH_PE_datarel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_indirect | + dwarf2reader::DW_EH_PE_sdata2 | + dwarf2reader::DW_EH_PE_datarel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_indirect | + dwarf2reader::DW_EH_PE_sdata4 | + dwarf2reader::DW_EH_PE_datarel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_indirect | + dwarf2reader::DW_EH_PE_sdata8 | + dwarf2reader::DW_EH_PE_datarel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_indirect | + dwarf2reader::DW_EH_PE_absptr | + dwarf2reader::DW_EH_PE_funcrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_indirect | + dwarf2reader::DW_EH_PE_uleb128 | + dwarf2reader::DW_EH_PE_funcrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_indirect | + dwarf2reader::DW_EH_PE_udata2 | + dwarf2reader::DW_EH_PE_funcrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_indirect | + dwarf2reader::DW_EH_PE_udata4 | + dwarf2reader::DW_EH_PE_funcrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_indirect | + dwarf2reader::DW_EH_PE_udata8 | + dwarf2reader::DW_EH_PE_funcrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_indirect | + dwarf2reader::DW_EH_PE_sleb128 | + dwarf2reader::DW_EH_PE_funcrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_indirect | + dwarf2reader::DW_EH_PE_sdata2 | + dwarf2reader::DW_EH_PE_funcrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_indirect | + dwarf2reader::DW_EH_PE_sdata4 | + dwarf2reader::DW_EH_PE_funcrel))); + EXPECT_TRUE(reader.ValidEncoding( + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_indirect | + dwarf2reader::DW_EH_PE_sdata8 | + dwarf2reader::DW_EH_PE_funcrel))); + + EXPECT_FALSE(reader.ValidEncoding(DwarfPointerEncoding(0x05))); + EXPECT_FALSE(reader.ValidEncoding(DwarfPointerEncoding(0x07))); + EXPECT_FALSE(reader.ValidEncoding(DwarfPointerEncoding(0x0d))); + EXPECT_FALSE(reader.ValidEncoding(DwarfPointerEncoding(0x0f))); + EXPECT_FALSE(reader.ValidEncoding(DwarfPointerEncoding(0x51))); + EXPECT_FALSE(reader.ValidEncoding(DwarfPointerEncoding(0x60))); + EXPECT_FALSE(reader.ValidEncoding(DwarfPointerEncoding(0x70))); + EXPECT_FALSE(reader.ValidEncoding(DwarfPointerEncoding(0xf0))); + EXPECT_FALSE(reader.ValidEncoding(DwarfPointerEncoding(0xd0))); +} + +TEST_F(ReaderDeathTest, DW_EH_PE_omit) { + static const char data[1] = { 42 }; + ByteReader reader(ENDIANNESS_BIG); + reader.SetAddressSize(4); + EXPECT_DEATH(reader.ReadEncodedPointer(data, dwarf2reader::DW_EH_PE_omit, + &pointer_size), + "encoding != DW_EH_PE_omit"); +} + +TEST_F(Reader, DW_EH_PE_absptr4) { + static const char data[] = { 0x27, 0x57, 0xea, 0x40 }; + ByteReader reader(ENDIANNESS_LITTLE); + reader.SetAddressSize(4); + EXPECT_EQ(0x40ea5727U, + reader.ReadEncodedPointer(data, dwarf2reader::DW_EH_PE_absptr, + &pointer_size)); + EXPECT_EQ(4U, pointer_size); +} + +TEST_F(Reader, DW_EH_PE_absptr8) { + static const char data[] = { + 0x60, 0x27, 0x57, 0xea, 0x40, 0xc2, 0x98, 0x05, 0x01, 0x50 + }; + ByteReader reader(ENDIANNESS_LITTLE); + reader.SetAddressSize(8); + EXPECT_EQ(0x010598c240ea5727ULL, + reader.ReadEncodedPointer(data + 1, dwarf2reader::DW_EH_PE_absptr, + &pointer_size)); + EXPECT_EQ(8U, pointer_size); +} + +TEST_F(Reader, DW_EH_PE_uleb128) { + static const char data[] = { 0x81, 0x84, 0x4c }; + ByteReader reader(ENDIANNESS_LITTLE); + reader.SetAddressSize(4); + EXPECT_EQ(0x130201U, + reader.ReadEncodedPointer(data, dwarf2reader::DW_EH_PE_uleb128, + &pointer_size)); + EXPECT_EQ(3U, pointer_size); +} + +TEST_F(Reader, DW_EH_PE_udata2) { + static const char data[] = { 0xf4, 0x8d }; + ByteReader reader(ENDIANNESS_BIG); + reader.SetAddressSize(4); + EXPECT_EQ(0xf48dU, + reader.ReadEncodedPointer(data, dwarf2reader::DW_EH_PE_udata2, + &pointer_size)); + EXPECT_EQ(2U, pointer_size); +} + +TEST_F(Reader, DW_EH_PE_udata4) { + static const char data[] = { 0xb2, 0x68, 0xa5, 0x62, 0x8f, 0x8b }; + ByteReader reader(ENDIANNESS_BIG); + reader.SetAddressSize(8); + EXPECT_EQ(0xa5628f8b, + reader.ReadEncodedPointer(data + 2, dwarf2reader::DW_EH_PE_udata4, + &pointer_size)); + EXPECT_EQ(4U, pointer_size); +} + +TEST_F(Reader, DW_EH_PE_udata8Addr8) { + static const char data[] = { + 0x27, 0x04, 0x73, 0x04, 0x69, 0x9f, 0x19, 0xed, 0x8f, 0xfe + }; + ByteReader reader(ENDIANNESS_LITTLE); + reader.SetAddressSize(8); + EXPECT_EQ(0x8fed199f69047304ULL, + reader.ReadEncodedPointer(data + 1, dwarf2reader::DW_EH_PE_udata8, + &pointer_size)); + EXPECT_EQ(8U, pointer_size); +} + +TEST_F(Reader, DW_EH_PE_udata8Addr4) { + static const char data[] = { + 0x27, 0x04, 0x73, 0x04, 0x69, 0x9f, 0x19, 0xed, 0x8f, 0xfe + }; + ByteReader reader(ENDIANNESS_LITTLE); + reader.SetAddressSize(4); + EXPECT_EQ(0x69047304ULL, + reader.ReadEncodedPointer(data + 1, dwarf2reader::DW_EH_PE_udata8, + &pointer_size)); + EXPECT_EQ(8U, pointer_size); +} + +TEST_F(Reader, DW_EH_PE_sleb128) { + static const char data[] = { 0x42, 0xff, 0xfb, 0x73 }; + ByteReader reader(ENDIANNESS_BIG); + reader.SetAddressSize(4); + EXPECT_EQ(-0x030201U & 0xffffffff, + reader.ReadEncodedPointer(data + 1, dwarf2reader::DW_EH_PE_sleb128, + &pointer_size)); + EXPECT_EQ(3U, pointer_size); +} + +TEST_F(Reader, DW_EH_PE_sdata2) { + static const char data[] = { 0xb9, 0xbf }; + ByteReader reader(ENDIANNESS_LITTLE); + reader.SetAddressSize(8); + EXPECT_EQ(0xffffffffffffbfb9ULL, + reader.ReadEncodedPointer(data, dwarf2reader::DW_EH_PE_sdata2, + &pointer_size)); + EXPECT_EQ(2U, pointer_size); +} + +TEST_F(Reader, DW_EH_PE_sdata4) { + static const char data[] = { 0xa0, 0xca, 0xf2, 0xb8, 0xc2, 0xad }; + ByteReader reader(ENDIANNESS_LITTLE); + reader.SetAddressSize(8); + EXPECT_EQ(0xffffffffadc2b8f2ULL, + reader.ReadEncodedPointer(data + 2, dwarf2reader::DW_EH_PE_sdata4, + &pointer_size)); + EXPECT_EQ(4U, pointer_size); +} + +TEST_F(Reader, DW_EH_PE_sdata8) { + static const char data[] = { + 0xf6, 0x66, 0x57, 0x79, 0xe0, 0x0c, 0x9b, 0x26, 0x87 + }; + ByteReader reader(ENDIANNESS_LITTLE); + reader.SetAddressSize(8); + EXPECT_EQ(0x87269b0ce0795766ULL, + reader.ReadEncodedPointer(data + 1, dwarf2reader::DW_EH_PE_sdata8, + &pointer_size)); + EXPECT_EQ(8U, pointer_size); +} + +TEST_F(Reader, DW_EH_PE_pcrel) { + static const char data[] = { 0x4a, 0x8b, 0x1b, 0x14, 0xc8, 0xc4, 0x02, 0xce }; + ByteReader reader(ENDIANNESS_BIG); + reader.SetAddressSize(4); + DwarfPointerEncoding encoding = + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_pcrel + | dwarf2reader::DW_EH_PE_absptr); + reader.SetCFIDataBase(0x89951377, data); + EXPECT_EQ(0x89951377 + 3 + 0x14c8c402, + reader.ReadEncodedPointer(data + 3, encoding, &pointer_size)); + EXPECT_EQ(4U, pointer_size); +} + +TEST_F(Reader, DW_EH_PE_textrel) { + static const char data[] = { 0xd9, 0x0d, 0x05, 0x17, 0xc9, 0x7a, 0x42, 0x1e }; + ByteReader reader(ENDIANNESS_LITTLE); + reader.SetAddressSize(4); + reader.SetTextBase(0xb91beaf0); + DwarfPointerEncoding encoding = + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_textrel + | dwarf2reader::DW_EH_PE_sdata2); + EXPECT_EQ((0xb91beaf0 + 0xffffc917) & 0xffffffff, + reader.ReadEncodedPointer(data + 3, encoding, &pointer_size)); + EXPECT_EQ(2U, pointer_size); +} + +TEST_F(Reader, DW_EH_PE_datarel) { + static const char data[] = { 0x16, 0xf2, 0xbb, 0x82, 0x68, 0xa7, 0xbc, 0x39 }; + ByteReader reader(ENDIANNESS_BIG); + reader.SetAddressSize(8); + reader.SetDataBase(0xbef308bd25ce74f0ULL); + DwarfPointerEncoding encoding = + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_datarel + | dwarf2reader::DW_EH_PE_sleb128); + EXPECT_EQ(0xbef308bd25ce74f0ULL + 0xfffffffffffa013bULL, + reader.ReadEncodedPointer(data + 2, encoding, &pointer_size)); + EXPECT_EQ(3U, pointer_size); +} + +TEST_F(Reader, DW_EH_PE_funcrel) { + static const char data[] = { 0x84, 0xf8, 0x14, 0x01, 0x61, 0xd1, 0x48, 0xc9 }; + ByteReader reader(ENDIANNESS_BIG); + reader.SetAddressSize(4); + reader.SetFunctionBase(0x823c3520); + DwarfPointerEncoding encoding = + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_funcrel + | dwarf2reader::DW_EH_PE_udata2); + EXPECT_EQ(0x823c3520 + 0xd148, + reader.ReadEncodedPointer(data + 5, encoding, &pointer_size)); + EXPECT_EQ(2U, pointer_size); +} + +TEST(UsableBase, CFI) { + static const char data[1] = { 0x42 }; + ByteReader reader(ENDIANNESS_BIG); + reader.SetCFIDataBase(0xb31cbd20, data); + EXPECT_TRUE(reader.UsableEncoding(dwarf2reader::DW_EH_PE_absptr)); + EXPECT_TRUE(reader.UsableEncoding(dwarf2reader::DW_EH_PE_pcrel)); + EXPECT_FALSE(reader.UsableEncoding(dwarf2reader::DW_EH_PE_textrel)); + EXPECT_FALSE(reader.UsableEncoding(dwarf2reader::DW_EH_PE_datarel)); + EXPECT_FALSE(reader.UsableEncoding(dwarf2reader::DW_EH_PE_funcrel)); + EXPECT_FALSE(reader.UsableEncoding(dwarf2reader::DW_EH_PE_omit)); + EXPECT_FALSE(reader.UsableEncoding(DwarfPointerEncoding(0x60))); +} + +TEST(UsableBase, Text) { + ByteReader reader(ENDIANNESS_BIG); + reader.SetTextBase(0xa899ccb9); + EXPECT_TRUE(reader.UsableEncoding(dwarf2reader::DW_EH_PE_absptr)); + EXPECT_FALSE(reader.UsableEncoding(dwarf2reader::DW_EH_PE_pcrel)); + EXPECT_TRUE(reader.UsableEncoding(dwarf2reader::DW_EH_PE_textrel)); + EXPECT_FALSE(reader.UsableEncoding(dwarf2reader::DW_EH_PE_datarel)); + EXPECT_FALSE(reader.UsableEncoding(dwarf2reader::DW_EH_PE_funcrel)); + EXPECT_FALSE(reader.UsableEncoding(dwarf2reader::DW_EH_PE_omit)); + EXPECT_FALSE(reader.UsableEncoding(DwarfPointerEncoding(0x60))); +} + +TEST(UsableBase, Data) { + ByteReader reader(ENDIANNESS_BIG); + reader.SetDataBase(0xf7b10bcd); + EXPECT_TRUE(reader.UsableEncoding(dwarf2reader::DW_EH_PE_absptr)); + EXPECT_FALSE(reader.UsableEncoding(dwarf2reader::DW_EH_PE_pcrel)); + EXPECT_FALSE(reader.UsableEncoding(dwarf2reader::DW_EH_PE_textrel)); + EXPECT_TRUE(reader.UsableEncoding(dwarf2reader::DW_EH_PE_datarel)); + EXPECT_FALSE(reader.UsableEncoding(dwarf2reader::DW_EH_PE_funcrel)); + EXPECT_FALSE(reader.UsableEncoding(dwarf2reader::DW_EH_PE_omit)); + EXPECT_FALSE(reader.UsableEncoding(DwarfPointerEncoding(0x60))); +} + +TEST(UsableBase, Function) { + ByteReader reader(ENDIANNESS_BIG); + reader.SetFunctionBase(0xc2c0ed81); + EXPECT_TRUE(reader.UsableEncoding(dwarf2reader::DW_EH_PE_absptr)); + EXPECT_FALSE(reader.UsableEncoding(dwarf2reader::DW_EH_PE_pcrel)); + EXPECT_FALSE(reader.UsableEncoding(dwarf2reader::DW_EH_PE_textrel)); + EXPECT_FALSE(reader.UsableEncoding(dwarf2reader::DW_EH_PE_datarel)); + EXPECT_TRUE(reader.UsableEncoding(dwarf2reader::DW_EH_PE_funcrel)); + EXPECT_FALSE(reader.UsableEncoding(dwarf2reader::DW_EH_PE_omit)); + EXPECT_FALSE(reader.UsableEncoding(DwarfPointerEncoding(0x60))); +} + +TEST(UsableBase, ClearFunction) { + ByteReader reader(ENDIANNESS_BIG); + reader.SetFunctionBase(0xc2c0ed81); + reader.ClearFunctionBase(); + EXPECT_TRUE(reader.UsableEncoding(dwarf2reader::DW_EH_PE_absptr)); + EXPECT_FALSE(reader.UsableEncoding(dwarf2reader::DW_EH_PE_pcrel)); + EXPECT_FALSE(reader.UsableEncoding(dwarf2reader::DW_EH_PE_textrel)); + EXPECT_FALSE(reader.UsableEncoding(dwarf2reader::DW_EH_PE_datarel)); + EXPECT_FALSE(reader.UsableEncoding(dwarf2reader::DW_EH_PE_funcrel)); + EXPECT_FALSE(reader.UsableEncoding(dwarf2reader::DW_EH_PE_omit)); + EXPECT_FALSE(reader.UsableEncoding(DwarfPointerEncoding(0x60))); +} + +struct AlignedFixture { + AlignedFixture() : reader(ENDIANNESS_BIG) { reader.SetAddressSize(4); } + static const char data[10]; + ByteReader reader; + size_t pointer_size; +}; + +const char AlignedFixture::data[10] = { + 0xfe, 0x6e, 0x93, 0xd8, 0x34, 0xd5, 0x1c, 0xd3, 0xac, 0x2b +}; + +class Aligned: public AlignedFixture, public Test { }; + +TEST_F(Aligned, DW_EH_PE_aligned0) { + reader.SetCFIDataBase(0xb440305c, data); + EXPECT_EQ(0xfe6e93d8U, + reader.ReadEncodedPointer(data, dwarf2reader::DW_EH_PE_aligned, + &pointer_size)); + EXPECT_EQ(4U, pointer_size); +} + +TEST_F(Aligned, DW_EH_PE_aligned1) { + reader.SetCFIDataBase(0xb440305d, data); + EXPECT_EQ(0xd834d51cU, + reader.ReadEncodedPointer(data, dwarf2reader::DW_EH_PE_aligned, + &pointer_size)); + EXPECT_EQ(7U, pointer_size); +} + +TEST_F(Aligned, DW_EH_PE_aligned2) { + reader.SetCFIDataBase(0xb440305e, data); + EXPECT_EQ(0x93d834d5U, + reader.ReadEncodedPointer(data, dwarf2reader::DW_EH_PE_aligned, + &pointer_size)); + EXPECT_EQ(6U, pointer_size); +} + +TEST_F(Aligned, DW_EH_PE_aligned3) { + reader.SetCFIDataBase(0xb440305f, data); + EXPECT_EQ(0x6e93d834U, + reader.ReadEncodedPointer(data, dwarf2reader::DW_EH_PE_aligned, + &pointer_size)); + EXPECT_EQ(5U, pointer_size); +} + +TEST_F(Aligned, DW_EH_PE_aligned11) { + reader.SetCFIDataBase(0xb4403061, data); + EXPECT_EQ(0xd834d51cU, + reader.ReadEncodedPointer(data + 1, + dwarf2reader::DW_EH_PE_aligned, + &pointer_size)); + EXPECT_EQ(6U, pointer_size); +} + +TEST_F(Aligned, DW_EH_PE_aligned30) { + reader.SetCFIDataBase(0xb4403063, data); + EXPECT_EQ(0x6e93d834U, + reader.ReadEncodedPointer(data + 1, + dwarf2reader::DW_EH_PE_aligned, + &pointer_size)); + EXPECT_EQ(4U, pointer_size); +} + +TEST_F(Aligned, DW_EH_PE_aligned23) { + reader.SetCFIDataBase(0xb4403062, data); + EXPECT_EQ(0x1cd3ac2bU, + reader.ReadEncodedPointer(data + 3, + dwarf2reader::DW_EH_PE_aligned, + &pointer_size)); + EXPECT_EQ(7U, pointer_size); +} + +TEST_F(Aligned, DW_EH_PE_aligned03) { + reader.SetCFIDataBase(0xb4403064, data); + EXPECT_EQ(0x34d51cd3U, + reader.ReadEncodedPointer(data + 3, + dwarf2reader::DW_EH_PE_aligned, + &pointer_size)); + EXPECT_EQ(5U, pointer_size); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/cfi_assembler.cc b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/cfi_assembler.cc new file mode 100644 index 0000000000..dbc2efae6f --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/cfi_assembler.cc @@ -0,0 +1,198 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// cfi_assembler.cc: Implementation of google_breakpad::CFISection class. +// See cfi_assembler.h for details. + +#include "common/dwarf/cfi_assembler.h" + +#include +#include + +namespace google_breakpad { + +using dwarf2reader::DwarfPointerEncoding; + +CFISection &CFISection::CIEHeader(uint64_t code_alignment_factor, + int data_alignment_factor, + unsigned return_address_register, + uint8_t version, + const string &augmentation, + bool dwarf64) { + assert(!entry_length_); + entry_length_ = new PendingLength(); + in_fde_ = false; + + if (dwarf64) { + D32(kDwarf64InitialLengthMarker); + D64(entry_length_->length); + entry_length_->start = Here(); + D64(eh_frame_ ? kEHFrame64CIEIdentifier : kDwarf64CIEIdentifier); + } else { + D32(entry_length_->length); + entry_length_->start = Here(); + D32(eh_frame_ ? kEHFrame32CIEIdentifier : kDwarf32CIEIdentifier); + } + D8(version); + AppendCString(augmentation); + ULEB128(code_alignment_factor); + LEB128(data_alignment_factor); + if (version == 1) + D8(return_address_register); + else + ULEB128(return_address_register); + return *this; +} + +CFISection &CFISection::FDEHeader(Label cie_pointer, + uint64_t initial_location, + uint64_t address_range, + bool dwarf64) { + assert(!entry_length_); + entry_length_ = new PendingLength(); + in_fde_ = true; + fde_start_address_ = initial_location; + + if (dwarf64) { + D32(0xffffffff); + D64(entry_length_->length); + entry_length_->start = Here(); + if (eh_frame_) + D64(Here() - cie_pointer); + else + D64(cie_pointer); + } else { + D32(entry_length_->length); + entry_length_->start = Here(); + if (eh_frame_) + D32(Here() - cie_pointer); + else + D32(cie_pointer); + } + EncodedPointer(initial_location); + // The FDE length in an .eh_frame section uses the same encoding as the + // initial location, but ignores the base address (selected by the upper + // nybble of the encoding), as it's a length, not an address that can be + // made relative. + EncodedPointer(address_range, + DwarfPointerEncoding(pointer_encoding_ & 0x0f)); + return *this; +} + +CFISection &CFISection::FinishEntry() { + assert(entry_length_); + Align(address_size_, dwarf2reader::DW_CFA_nop); + entry_length_->length = Here() - entry_length_->start; + delete entry_length_; + entry_length_ = NULL; + in_fde_ = false; + return *this; +} + +CFISection &CFISection::EncodedPointer(uint64_t address, + DwarfPointerEncoding encoding, + const EncodedPointerBases &bases) { + // Omitted data is extremely easy to emit. + if (encoding == dwarf2reader::DW_EH_PE_omit) + return *this; + + // If (encoding & dwarf2reader::DW_EH_PE_indirect) != 0, then we assume + // that ADDRESS is the address at which the pointer is stored --- in + // other words, that bit has no effect on how we write the pointer. + encoding = DwarfPointerEncoding(encoding & ~dwarf2reader::DW_EH_PE_indirect); + + // Find the base address to which this pointer is relative. The upper + // nybble of the encoding specifies this. + uint64_t base; + switch (encoding & 0xf0) { + case dwarf2reader::DW_EH_PE_absptr: base = 0; break; + case dwarf2reader::DW_EH_PE_pcrel: base = bases.cfi + Size(); break; + case dwarf2reader::DW_EH_PE_textrel: base = bases.text; break; + case dwarf2reader::DW_EH_PE_datarel: base = bases.data; break; + case dwarf2reader::DW_EH_PE_funcrel: base = fde_start_address_; break; + case dwarf2reader::DW_EH_PE_aligned: base = 0; break; + default: abort(); + }; + + // Make ADDRESS relative. Yes, this is appropriate even for "absptr" + // values; see gcc/unwind-pe.h. + address -= base; + + // Align the pointer, if required. + if ((encoding & 0xf0) == dwarf2reader::DW_EH_PE_aligned) + Align(AddressSize()); + + // Append ADDRESS to this section in the appropriate form. For the + // fixed-width forms, we don't need to differentiate between signed and + // unsigned encodings, because ADDRESS has already been extended to 64 + // bits before it was passed to us. + switch (encoding & 0x0f) { + case dwarf2reader::DW_EH_PE_absptr: + Address(address); + break; + + case dwarf2reader::DW_EH_PE_uleb128: + ULEB128(address); + break; + + case dwarf2reader::DW_EH_PE_sleb128: + LEB128(address); + break; + + case dwarf2reader::DW_EH_PE_udata2: + case dwarf2reader::DW_EH_PE_sdata2: + D16(address); + break; + + case dwarf2reader::DW_EH_PE_udata4: + case dwarf2reader::DW_EH_PE_sdata4: + D32(address); + break; + + case dwarf2reader::DW_EH_PE_udata8: + case dwarf2reader::DW_EH_PE_sdata8: + D64(address); + break; + + default: + abort(); + } + + return *this; +}; + +const uint32_t CFISection::kDwarf64InitialLengthMarker; +const uint32_t CFISection::kDwarf32CIEIdentifier; +const uint64_t CFISection::kDwarf64CIEIdentifier; +const uint32_t CFISection::kEHFrame32CIEIdentifier; +const uint64_t CFISection::kEHFrame64CIEIdentifier; + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/cfi_assembler.h b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/cfi_assembler.h new file mode 100644 index 0000000000..227812b588 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/cfi_assembler.h @@ -0,0 +1,269 @@ +// -*- mode: C++ -*- + +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// cfi_assembler.h: Define CFISection, a class for creating properly +// (and improperly) formatted DWARF CFI data for unit tests. + +#ifndef PROCESSOR_CFI_ASSEMBLER_H_ +#define PROCESSOR_CFI_ASSEMBLER_H_ + +#include + +#include "common/dwarf/dwarf2enums.h" +#include "common/test_assembler.h" +#include "common/using_std_string.h" +#include "google_breakpad/common/breakpad_types.h" + +namespace google_breakpad { + +using dwarf2reader::DwarfPointerEncoding; +using google_breakpad::test_assembler::Endianness; +using google_breakpad::test_assembler::Label; +using google_breakpad::test_assembler::Section; + +class CFISection: public Section { + public: + + // CFI augmentation strings beginning with 'z', defined by the + // Linux/IA-64 C++ ABI, can specify interesting encodings for + // addresses appearing in FDE headers and call frame instructions (and + // for additional fields whose presence the augmentation string + // specifies). In particular, pointers can be specified to be relative + // to various base address: the start of the .text section, the + // location holding the address itself, and so on. These allow the + // frame data to be position-independent even when they live in + // write-protected pages. These variants are specified at the + // following two URLs: + // + // http://refspecs.linux-foundation.org/LSB_4.0.0/LSB-Core-generic/LSB-Core-generic/dwarfext.html + // http://refspecs.linux-foundation.org/LSB_4.0.0/LSB-Core-generic/LSB-Core-generic/ehframechpt.html + // + // CFISection leaves the production of well-formed 'z'-augmented CIEs and + // FDEs to the user, but does provide EncodedPointer, to emit + // properly-encoded addresses for a given pointer encoding. + // EncodedPointer uses an instance of this structure to find the base + // addresses it should use; you can establish a default for all encoded + // pointers appended to this section with SetEncodedPointerBases. + struct EncodedPointerBases { + EncodedPointerBases() : cfi(), text(), data() { } + + // The starting address of this CFI section in memory, for + // DW_EH_PE_pcrel. DW_EH_PE_pcrel pointers may only be used in data + // that has is loaded into the program's address space. + uint64_t cfi; + + // The starting address of this file's .text section, for DW_EH_PE_textrel. + uint64_t text; + + // The starting address of this file's .got or .eh_frame_hdr section, + // for DW_EH_PE_datarel. + uint64_t data; + }; + + // Create a CFISection whose endianness is ENDIANNESS, and where + // machine addresses are ADDRESS_SIZE bytes long. If EH_FRAME is + // true, use the .eh_frame format, as described by the Linux + // Standards Base Core Specification, instead of the DWARF CFI + // format. + CFISection(Endianness endianness, size_t address_size, + bool eh_frame = false) + : Section(endianness), address_size_(address_size), eh_frame_(eh_frame), + pointer_encoding_(dwarf2reader::DW_EH_PE_absptr), + encoded_pointer_bases_(), entry_length_(NULL), in_fde_(false) { + // The 'start', 'Here', and 'Mark' members of a CFISection all refer + // to section offsets. + start() = 0; + } + + // Return this CFISection's address size. + size_t AddressSize() const { return address_size_; } + + // Return true if this CFISection uses the .eh_frame format, or + // false if it contains ordinary DWARF CFI data. + bool ContainsEHFrame() const { return eh_frame_; } + + // Use ENCODING for pointers in calls to FDEHeader and EncodedPointer. + void SetPointerEncoding(DwarfPointerEncoding encoding) { + pointer_encoding_ = encoding; + } + + // Use the addresses in BASES as the base addresses for encoded + // pointers in subsequent calls to FDEHeader or EncodedPointer. + // This function makes a copy of BASES. + void SetEncodedPointerBases(const EncodedPointerBases &bases) { + encoded_pointer_bases_ = bases; + } + + // Append a Common Information Entry header to this section with the + // given values. If dwarf64 is true, use the 64-bit DWARF initial + // length format for the CIE's initial length. Return a reference to + // this section. You should call FinishEntry after writing the last + // instruction for the CIE. + // + // Before calling this function, you will typically want to use Mark + // or Here to make a label to pass to FDEHeader that refers to this + // CIE's position in the section. + CFISection &CIEHeader(uint64_t code_alignment_factor, + int data_alignment_factor, + unsigned return_address_register, + uint8_t version = 3, + const string &augmentation = "", + bool dwarf64 = false); + + // Append a Frame Description Entry header to this section with the + // given values. If dwarf64 is true, use the 64-bit DWARF initial + // length format for the CIE's initial length. Return a reference to + // this section. You should call FinishEntry after writing the last + // instruction for the CIE. + // + // This function doesn't support entries that are longer than + // 0xffffff00 bytes. (The "initial length" is always a 32-bit + // value.) Nor does it support .debug_frame sections longer than + // 0xffffff00 bytes. + CFISection &FDEHeader(Label cie_pointer, + uint64_t initial_location, + uint64_t address_range, + bool dwarf64 = false); + + // Note the current position as the end of the last CIE or FDE we + // started, after padding with DW_CFA_nops for alignment. This + // defines the label representing the entry's length, cited in the + // entry's header. Return a reference to this section. + CFISection &FinishEntry(); + + // Append the contents of BLOCK as a DW_FORM_block value: an + // unsigned LEB128 length, followed by that many bytes of data. + CFISection &Block(const string &block) { + ULEB128(block.size()); + Append(block); + return *this; + } + + // Append ADDRESS to this section, in the appropriate size and + // endianness. Return a reference to this section. + CFISection &Address(uint64_t address) { + Section::Append(endianness(), address_size_, address); + return *this; + } + CFISection &Address(Label address) { + Section::Append(endianness(), address_size_, address); + return *this; + } + + // Append ADDRESS to this section, using ENCODING and BASES. ENCODING + // defaults to this section's default encoding, established by + // SetPointerEncoding. BASES defaults to this section's bases, set by + // SetEncodedPointerBases. If the DW_EH_PE_indirect bit is set in the + // encoding, assume that ADDRESS is where the true address is stored. + // Return a reference to this section. + // + // (C++ doesn't let me use default arguments here, because I want to + // refer to members of *this in the default argument expression.) + CFISection &EncodedPointer(uint64_t address) { + return EncodedPointer(address, pointer_encoding_, encoded_pointer_bases_); + } + CFISection &EncodedPointer(uint64_t address, DwarfPointerEncoding encoding) { + return EncodedPointer(address, encoding, encoded_pointer_bases_); + } + CFISection &EncodedPointer(uint64_t address, DwarfPointerEncoding encoding, + const EncodedPointerBases &bases); + + // Restate some member functions, to keep chaining working nicely. + CFISection &Mark(Label *label) { Section::Mark(label); return *this; } + CFISection &D8(uint8_t v) { Section::D8(v); return *this; } + CFISection &D16(uint16_t v) { Section::D16(v); return *this; } + CFISection &D16(Label v) { Section::D16(v); return *this; } + CFISection &D32(uint32_t v) { Section::D32(v); return *this; } + CFISection &D32(const Label &v) { Section::D32(v); return *this; } + CFISection &D64(uint64_t v) { Section::D64(v); return *this; } + CFISection &D64(const Label &v) { Section::D64(v); return *this; } + CFISection &LEB128(long long v) { Section::LEB128(v); return *this; } + CFISection &ULEB128(uint64_t v) { Section::ULEB128(v); return *this; } + + private: + // A length value that we've appended to the section, but is not yet + // known. LENGTH is the appended value; START is a label referring + // to the start of the data whose length was cited. + struct PendingLength { + Label length; + Label start; + }; + + // Constants used in CFI/.eh_frame data: + + // If the first four bytes of an "initial length" are this constant, then + // the data uses the 64-bit DWARF format, and the length itself is the + // subsequent eight bytes. + static const uint32_t kDwarf64InitialLengthMarker = 0xffffffffU; + + // The CIE identifier for 32- and 64-bit DWARF CFI and .eh_frame data. + static const uint32_t kDwarf32CIEIdentifier = ~(uint32_t)0; + static const uint64_t kDwarf64CIEIdentifier = ~(uint64_t)0; + static const uint32_t kEHFrame32CIEIdentifier = 0; + static const uint64_t kEHFrame64CIEIdentifier = 0; + + // The size of a machine address for the data in this section. + size_t address_size_; + + // If true, we are generating a Linux .eh_frame section, instead of + // a standard DWARF .debug_frame section. + bool eh_frame_; + + // The encoding to use for FDE pointers. + DwarfPointerEncoding pointer_encoding_; + + // The base addresses to use when emitting encoded pointers. + EncodedPointerBases encoded_pointer_bases_; + + // The length value for the current entry. + // + // Oddly, this must be dynamically allocated. Labels never get new + // values; they only acquire constraints on the value they already + // have, or assert if you assign them something incompatible. So + // each header needs truly fresh Label objects to cite in their + // headers and track their positions. The alternative is explicit + // destructor invocation and a placement new. Ick. + PendingLength *entry_length_; + + // True if we are currently emitting an FDE --- that is, we have + // called FDEHeader but have not yet called FinishEntry. + bool in_fde_; + + // If in_fde_ is true, this is its starting address. We use this for + // emitting DW_EH_PE_funcrel pointers. + uint64_t fde_start_address_; +}; + +} // namespace google_breakpad + +#endif // PROCESSOR_CFI_ASSEMBLER_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/dwarf2diehandler.cc b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/dwarf2diehandler.cc new file mode 100644 index 0000000000..20c15fa9d1 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/dwarf2diehandler.cc @@ -0,0 +1,198 @@ +// Copyright (c) 2010 Google Inc. All Rights Reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// dwarf2diehandler.cc: Implement the dwarf2reader::DieDispatcher class. +// See dwarf2diehandler.h for details. + +#include + +#include + +#include "common/dwarf/dwarf2diehandler.h" +#include "common/using_std_string.h" + +namespace dwarf2reader { + +DIEDispatcher::~DIEDispatcher() { + while (!die_handlers_.empty()) { + HandlerStack &entry = die_handlers_.top(); + if (entry.handler_ != root_handler_) + delete entry.handler_; + die_handlers_.pop(); + } +} + +bool DIEDispatcher::StartCompilationUnit(uint64 offset, uint8 address_size, + uint8 offset_size, uint64 cu_length, + uint8 dwarf_version) { + return root_handler_->StartCompilationUnit(offset, address_size, + offset_size, cu_length, + dwarf_version); +} + +bool DIEDispatcher::StartDIE(uint64 offset, enum DwarfTag tag) { + // The stack entry for the parent of this DIE, if there is one. + HandlerStack *parent = die_handlers_.empty() ? NULL : &die_handlers_.top(); + + // Does this call indicate that we're done receiving the parent's + // attributes' values? If so, call its EndAttributes member function. + if (parent && parent->handler_ && !parent->reported_attributes_end_) { + parent->reported_attributes_end_ = true; + if (!parent->handler_->EndAttributes()) { + // Finish off this handler now. and edit *PARENT to indicate that + // we don't want to visit any of the children. + parent->handler_->Finish(); + if (parent->handler_ != root_handler_) + delete parent->handler_; + parent->handler_ = NULL; + return false; + } + } + + // Find a handler for this DIE. + DIEHandler *handler; + if (parent) { + if (parent->handler_) + // Ask the parent to find a handler. + handler = parent->handler_->FindChildHandler(offset, tag); + else + // No parent handler means we're not interested in any of our + // children. + handler = NULL; + } else { + // This is the root DIE. For a non-root DIE, the parent's handler + // decides whether to visit it, but the root DIE has no parent + // handler, so we have a special method on the root DIE handler + // itself to decide. + if (root_handler_->StartRootDIE(offset, tag)) + handler = root_handler_; + else + handler = NULL; + } + + // Push a handler stack entry for this new handler. As an + // optimization, we don't push NULL-handler entries on top of other + // NULL-handler entries; we just let the oldest such entry stand for + // the whole subtree. + if (handler || !parent || parent->handler_) { + HandlerStack entry; + entry.offset_ = offset; + entry.handler_ = handler; + entry.reported_attributes_end_ = false; + die_handlers_.push(entry); + } + + return handler != NULL; +} + +void DIEDispatcher::EndDIE(uint64 offset) { + assert(!die_handlers_.empty()); + HandlerStack *entry = &die_handlers_.top(); + if (entry->handler_) { + // This entry had better be the handler for this DIE. + assert(entry->offset_ == offset); + // If a DIE has no children, this EndDIE call indicates that we're + // done receiving its attributes' values. + if (!entry->reported_attributes_end_) + entry->handler_->EndAttributes(); // Ignore return value: no children. + entry->handler_->Finish(); + if (entry->handler_ != root_handler_) + delete entry->handler_; + } else { + // If this DIE is within a tree we're ignoring, then don't pop the + // handler stack: that entry stands for the whole tree. + if (entry->offset_ != offset) + return; + } + die_handlers_.pop(); +} + +void DIEDispatcher::ProcessAttributeUnsigned(uint64 offset, + enum DwarfAttribute attr, + enum DwarfForm form, + uint64 data) { + HandlerStack ¤t = die_handlers_.top(); + // This had better be an attribute of the DIE we were meant to handle. + assert(offset == current.offset_); + current.handler_->ProcessAttributeUnsigned(attr, form, data); +} + +void DIEDispatcher::ProcessAttributeSigned(uint64 offset, + enum DwarfAttribute attr, + enum DwarfForm form, + int64 data) { + HandlerStack ¤t = die_handlers_.top(); + // This had better be an attribute of the DIE we were meant to handle. + assert(offset == current.offset_); + current.handler_->ProcessAttributeSigned(attr, form, data); +} + +void DIEDispatcher::ProcessAttributeReference(uint64 offset, + enum DwarfAttribute attr, + enum DwarfForm form, + uint64 data) { + HandlerStack ¤t = die_handlers_.top(); + // This had better be an attribute of the DIE we were meant to handle. + assert(offset == current.offset_); + current.handler_->ProcessAttributeReference(attr, form, data); +} + +void DIEDispatcher::ProcessAttributeBuffer(uint64 offset, + enum DwarfAttribute attr, + enum DwarfForm form, + const char* data, + uint64 len) { + HandlerStack ¤t = die_handlers_.top(); + // This had better be an attribute of the DIE we were meant to handle. + assert(offset == current.offset_); + current.handler_->ProcessAttributeBuffer(attr, form, data, len); +} + +void DIEDispatcher::ProcessAttributeString(uint64 offset, + enum DwarfAttribute attr, + enum DwarfForm form, + const string& data) { + HandlerStack ¤t = die_handlers_.top(); + // This had better be an attribute of the DIE we were meant to handle. + assert(offset == current.offset_); + current.handler_->ProcessAttributeString(attr, form, data); +} + +void DIEDispatcher::ProcessAttributeSignature(uint64 offset, + enum DwarfAttribute attr, + enum DwarfForm form, + uint64 signature) { + HandlerStack ¤t = die_handlers_.top(); + // This had better be an attribute of the DIE we were meant to handle. + assert(offset == current.offset_); + current.handler_->ProcessAttributeSignature(attr, form, signature); +} + +} // namespace dwarf2reader diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/dwarf2diehandler.h b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/dwarf2diehandler.h new file mode 100644 index 0000000000..81f40f072f --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/dwarf2diehandler.h @@ -0,0 +1,363 @@ +// -*- mode: c++ -*- + +// Copyright (c) 2010 Google Inc. All Rights Reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// dwarf2reader::CompilationUnit is a simple and direct parser for +// DWARF data, but its handler interface is not convenient to use. In +// particular: +// +// - CompilationUnit calls Dwarf2Handler's member functions to report +// every attribute's value, regardless of what sort of DIE it is. +// As a result, the ProcessAttributeX functions end up looking like +// this: +// +// switch (parent_die_tag) { +// case DW_TAG_x: +// switch (attribute_name) { +// case DW_AT_y: +// handle attribute y of DIE type x +// ... +// } break; +// ... +// } +// +// In C++ it's much nicer to use virtual function dispatch to find +// the right code for a given case than to switch on the DIE tag +// like this. +// +// - Processing different kinds of DIEs requires different sets of +// data: lexical block DIEs have start and end addresses, but struct +// type DIEs don't. It would be nice to be able to have separate +// handler classes for separate kinds of DIEs, each with the members +// appropriate to its role, instead of having one handler class that +// needs to hold data for every DIE type. +// +// - There should be a separate instance of the appropriate handler +// class for each DIE, instead of a single object with tables +// tracking all the dies in the compilation unit. +// +// - It's not convenient to take some action after all a DIE's +// attributes have been seen, but before visiting any of its +// children. The only indication you have that a DIE's attribute +// list is complete is that you get either a StartDIE or an EndDIE +// call. +// +// - It's not convenient to make use of the tree structure of the +// DIEs. Skipping all the children of a given die requires +// maintaining state and returning false from StartDIE until we get +// an EndDIE call with the appropriate offset. +// +// This interface tries to take care of all that. (You're shocked, I'm sure.) +// +// Using the classes here, you provide an initial handler for the root +// DIE of the compilation unit. Each handler receives its DIE's +// attributes, and provides fresh handler objects for children of +// interest, if any. The three classes are: +// +// - DIEHandler: the base class for your DIE-type-specific handler +// classes. +// +// - RootDIEHandler: derived from DIEHandler, the base class for your +// root DIE handler class. +// +// - DIEDispatcher: derived from Dwarf2Handler, an instance of this +// invokes your DIE-type-specific handler objects. +// +// In detail: +// +// - Define handler classes specialized for the DIE types you're +// interested in. These handler classes must inherit from +// DIEHandler. Thus: +// +// class My_DW_TAG_X_Handler: public DIEHandler { ... }; +// class My_DW_TAG_Y_Handler: public DIEHandler { ... }; +// +// DIEHandler subclasses needn't correspond exactly to single DIE +// types, as shown here; the point is that you can have several +// different classes appropriate to different kinds of DIEs. +// +// - In particular, define a handler class for the compilation +// unit's root DIE, that inherits from RootDIEHandler: +// +// class My_DW_TAG_compile_unit_Handler: public RootDIEHandler { ... }; +// +// RootDIEHandler inherits from DIEHandler, adding a few additional +// member functions for examining the compilation unit as a whole, +// and other quirks of rootness. +// +// - Then, create a DIEDispatcher instance, passing it an instance of +// your root DIE handler class, and use that DIEDispatcher as the +// dwarf2reader::CompilationUnit's handler: +// +// My_DW_TAG_compile_unit_Handler root_die_handler(...); +// DIEDispatcher die_dispatcher(&root_die_handler); +// CompilationUnit reader(sections, offset, bytereader, &die_dispatcher); +// +// Here, 'die_dispatcher' acts as a shim between 'reader' and the +// various DIE-specific handlers you have defined. +// +// - When you call reader.Start(), die_dispatcher behaves as follows, +// starting with your root die handler and the compilation unit's +// root DIE: +// +// - It calls the handler's ProcessAttributeX member functions for +// each of the DIE's attributes. +// +// - It calls the handler's EndAttributes member function. This +// should return true if any of the DIE's children should be +// visited, in which case: +// +// - For each of the DIE's children, die_dispatcher calls the +// DIE's handler's FindChildHandler member function. If that +// returns a pointer to a DIEHandler instance, then +// die_dispatcher uses that handler to process the child, using +// this procedure recursively. Alternatively, if +// FindChildHandler returns NULL, die_dispatcher ignores that +// child and its descendants. +// +// - When die_dispatcher has finished processing all the DIE's +// children, it invokes the handler's Finish() member function, +// and destroys the handler. (As a special case, it doesn't +// destroy the root DIE handler.) +// +// This allows the code for handling a particular kind of DIE to be +// gathered together in a single class, makes it easy to skip all the +// children or individual children of a particular DIE, and provides +// appropriate parental context for each die. + +#ifndef COMMON_DWARF_DWARF2DIEHANDLER_H__ +#define COMMON_DWARF_DWARF2DIEHANDLER_H__ + +#include +#include + +#include "common/dwarf/types.h" +#include "common/dwarf/dwarf2enums.h" +#include "common/dwarf/dwarf2reader.h" +#include "common/using_std_string.h" + +namespace dwarf2reader { + +// A base class for handlers for specific DIE types. The series of +// calls made on a DIE handler is as follows: +// +// - for each attribute of the DIE: +// - ProcessAttributeX() +// - EndAttributes() +// - if that returned true, then for each child: +// - FindChildHandler() +// - if that returns a non-NULL pointer to a new handler: +// - recurse, with the new handler and the child die +// - Finish() +// - destruction +class DIEHandler { + public: + DIEHandler() { } + virtual ~DIEHandler() { } + + // When we visit a DIE, we first use these member functions to + // report the DIE's attributes and their values. These have the + // same restrictions as the corresponding member functions of + // dwarf2reader::Dwarf2Handler. + // + // Since DWARF does not specify in what order attributes must + // appear, avoid making decisions in these functions that would be + // affected by the presence of other attributes. The EndAttributes + // function is a more appropriate place for such work, as all the + // DIE's attributes have been seen at that point. + // + // The default definitions ignore the values they are passed. + virtual void ProcessAttributeUnsigned(enum DwarfAttribute attr, + enum DwarfForm form, + uint64 data) { } + virtual void ProcessAttributeSigned(enum DwarfAttribute attr, + enum DwarfForm form, + int64 data) { } + virtual void ProcessAttributeReference(enum DwarfAttribute attr, + enum DwarfForm form, + uint64 data) { } + virtual void ProcessAttributeBuffer(enum DwarfAttribute attr, + enum DwarfForm form, + const char* data, + uint64 len) { } + virtual void ProcessAttributeString(enum DwarfAttribute attr, + enum DwarfForm form, + const string& data) { } + virtual void ProcessAttributeSignature(enum DwarfAttribute attr, + enum DwarfForm form, + uint64 signture) { } + + // Once we have reported all the DIE's attributes' values, we call + // this member function. If it returns false, we skip all the DIE's + // children. If it returns true, we call FindChildHandler on each + // child. If that returns a handler object, we use that to visit + // the child; otherwise, we skip the child. + // + // This is a good place to make decisions that depend on more than + // one attribute. DWARF does not specify in what order attributes + // must appear, so only when the EndAttributes function is called + // does the handler have a complete picture of the DIE's attributes. + // + // The default definition elects to ignore the DIE's children. + // You'll need to override this if you override FindChildHandler, + // but at least the default behavior isn't to pass the children to + // FindChildHandler, which then ignores them all. + virtual bool EndAttributes() { return false; } + + // If EndAttributes returns true to indicate that some of the DIE's + // children might be of interest, then we apply this function to + // each of the DIE's children. If it returns a handler object, then + // we use that to visit the child DIE. If it returns NULL, we skip + // that child DIE (and all its descendants). + // + // OFFSET is the offset of the child; TAG indicates what kind of DIE + // it is. + // + // The default definition skips all children. + virtual DIEHandler *FindChildHandler(uint64 offset, enum DwarfTag tag) { + return NULL; + } + + // When we are done processing a DIE, we call this member function. + // This happens after the EndAttributes call, all FindChildHandler + // calls (if any), and all operations on the children themselves (if + // any). We call Finish on every handler --- even if EndAttributes + // returns false. + virtual void Finish() { }; +}; + +// A subclass of DIEHandler, with additional kludges for handling the +// compilation unit's root die. +class RootDIEHandler: public DIEHandler { + public: + RootDIEHandler() { } + virtual ~RootDIEHandler() { } + + // We pass the values reported via Dwarf2Handler::StartCompilationUnit + // to this member function, and skip the entire compilation unit if it + // returns false. So the root DIE handler is actually also + // responsible for handling the compilation unit metadata. + // The default definition always visits the compilation unit. + virtual bool StartCompilationUnit(uint64 offset, uint8 address_size, + uint8 offset_size, uint64 cu_length, + uint8 dwarf_version) { return true; } + + // For the root DIE handler only, we pass the offset, tag and + // attributes of the compilation unit's root DIE. This is the only + // way the root DIE handler can find the root DIE's tag. If this + // function returns true, we will visit the root DIE using the usual + // DIEHandler methods; otherwise, we skip the entire compilation + // unit. + // + // The default definition elects to visit the root DIE. + virtual bool StartRootDIE(uint64 offset, enum DwarfTag tag) { return true; } +}; + +class DIEDispatcher: public Dwarf2Handler { + public: + // Create a Dwarf2Handler which uses ROOT_HANDLER as the handler for + // the compilation unit's root die, as described for the DIEHandler + // class. + DIEDispatcher(RootDIEHandler *root_handler) : root_handler_(root_handler) { } + // Destroying a DIEDispatcher destroys all active handler objects + // except the root handler. + ~DIEDispatcher(); + bool StartCompilationUnit(uint64 offset, uint8 address_size, + uint8 offset_size, uint64 cu_length, + uint8 dwarf_version); + bool StartDIE(uint64 offset, enum DwarfTag tag); + void ProcessAttributeUnsigned(uint64 offset, + enum DwarfAttribute attr, + enum DwarfForm form, + uint64 data); + void ProcessAttributeSigned(uint64 offset, + enum DwarfAttribute attr, + enum DwarfForm form, + int64 data); + void ProcessAttributeReference(uint64 offset, + enum DwarfAttribute attr, + enum DwarfForm form, + uint64 data); + void ProcessAttributeBuffer(uint64 offset, + enum DwarfAttribute attr, + enum DwarfForm form, + const char* data, + uint64 len); + void ProcessAttributeString(uint64 offset, + enum DwarfAttribute attr, + enum DwarfForm form, + const string &data); + void ProcessAttributeSignature(uint64 offset, + enum DwarfAttribute attr, + enum DwarfForm form, + uint64 signature); + void EndDIE(uint64 offset); + + private: + + // The type of a handler stack entry. This includes some fields + // which don't really need to be on the stack --- they could just be + // single data members of DIEDispatcher --- but putting them here + // makes it easier to see that the code is correct. + struct HandlerStack { + // The offset of the DIE for this handler stack entry. + uint64 offset_; + + // The handler object interested in this DIE's attributes and + // children. If NULL, we're not interested in either. + DIEHandler *handler_; + + // Have we reported the end of this DIE's attributes to the handler? + bool reported_attributes_end_; + }; + + // Stack of DIE attribute handlers. At StartDIE(D), the top of the + // stack is the handler of D's parent, whom we may ask for a handler + // for D itself. At EndDIE(D), the top of the stack is D's handler. + // Special cases: + // + // - Before we've seen the compilation unit's root DIE, the stack is + // empty; we'll call root_handler_'s special member functions, and + // perhaps push root_handler_ on the stack to look at the root's + // immediate children. + // + // - When we decide to ignore a subtree, we only push an entry on + // the stack for the root of the tree being ignored, rather than + // pushing lots of stack entries with handler_ set to NULL. + std::stack die_handlers_; + + // The root handler. We don't push it on die_handlers_ until we + // actually get the StartDIE call for the root. + RootDIEHandler *root_handler_; +}; + +} // namespace dwarf2reader +#endif // COMMON_DWARF_DWARF2DIEHANDLER_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/dwarf2diehandler_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/dwarf2diehandler_unittest.cc new file mode 100644 index 0000000000..c0a532aa01 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/dwarf2diehandler_unittest.cc @@ -0,0 +1,524 @@ +// -*- mode: c++ -*- + +// Copyright (c) 2010 Google Inc. All Rights Reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// dwarf2diehander_unittest.cc: Unit tests for google_breakpad::DIEDispatcher. + +#include +#include + +#include "breakpad_googletest_includes.h" + +#include "common/dwarf/dwarf2diehandler.h" +#include "common/using_std_string.h" + +using std::make_pair; + +using ::testing::_; +using ::testing::ContainerEq; +using ::testing::ElementsAreArray; +using ::testing::Eq; +using ::testing::InSequence; +using ::testing::Return; +using ::testing::Sequence; +using ::testing::StrEq; + +using dwarf2reader::DIEDispatcher; +using dwarf2reader::DIEHandler; +using dwarf2reader::DwarfAttribute; +using dwarf2reader::DwarfForm; +using dwarf2reader::DwarfTag; +using dwarf2reader::RootDIEHandler; + +class MockDIEHandler: public DIEHandler { + public: + MOCK_METHOD3(ProcessAttributeUnsigned, + void(DwarfAttribute, DwarfForm, uint64)); + MOCK_METHOD3(ProcessAttributeSigned, + void(DwarfAttribute, DwarfForm, int64)); + MOCK_METHOD3(ProcessAttributeReference, + void(DwarfAttribute, DwarfForm, uint64)); + MOCK_METHOD4(ProcessAttributeBuffer, + void(DwarfAttribute, DwarfForm, const char *, uint64)); + MOCK_METHOD3(ProcessAttributeString, + void(DwarfAttribute, DwarfForm, const string &)); + MOCK_METHOD3(ProcessAttributeSignature, + void(DwarfAttribute, DwarfForm, uint64)); + MOCK_METHOD0(EndAttributes, bool()); + MOCK_METHOD2(FindChildHandler, DIEHandler *(uint64, DwarfTag)); + MOCK_METHOD0(Finish, void()); +}; + +class MockRootDIEHandler: public RootDIEHandler { + public: + MOCK_METHOD3(ProcessAttributeUnsigned, + void(DwarfAttribute, DwarfForm, uint64)); + MOCK_METHOD3(ProcessAttributeSigned, + void(DwarfAttribute, DwarfForm, int64)); + MOCK_METHOD3(ProcessAttributeReference, + void(DwarfAttribute, DwarfForm, uint64)); + MOCK_METHOD4(ProcessAttributeBuffer, + void(DwarfAttribute, DwarfForm, const char *, uint64)); + MOCK_METHOD3(ProcessAttributeString, + void(DwarfAttribute, DwarfForm, const string &)); + MOCK_METHOD3(ProcessAttributeSignature, + void(DwarfAttribute, DwarfForm, uint64)); + MOCK_METHOD0(EndAttributes, bool()); + MOCK_METHOD2(FindChildHandler, DIEHandler *(uint64, DwarfTag)); + MOCK_METHOD0(Finish, void()); + MOCK_METHOD5(StartCompilationUnit, bool(uint64, uint8, uint8, uint64, uint8)); + MOCK_METHOD2(StartRootDIE, bool(uint64, DwarfTag)); +}; + +// If the handler elects to skip the compilation unit, the dispatcher +// should tell the reader so. +TEST(Dwarf2DIEHandler, SkipCompilationUnit) { + Sequence s; + MockRootDIEHandler mock_root_handler; + DIEDispatcher die_dispatcher(&mock_root_handler); + + EXPECT_CALL(mock_root_handler, + StartCompilationUnit(0x8d42aed77cfccf3eLL, + 0x89, 0xdc, + 0x2ecb4dc778a80f21LL, + 0x66)) + .InSequence(s) + .WillOnce(Return(false)); + + EXPECT_FALSE(die_dispatcher.StartCompilationUnit(0x8d42aed77cfccf3eLL, + 0x89, 0xdc, + 0x2ecb4dc778a80f21LL, + 0x66)); +} + +// If the handler elects to skip the root DIE, the dispatcher should +// tell the reader so. +TEST(Dwarf2DIEHandler, SkipRootDIE) { + Sequence s; + MockRootDIEHandler mock_root_handler; + DIEDispatcher die_dispatcher(&mock_root_handler); + + EXPECT_CALL(mock_root_handler, + StartCompilationUnit(0xde8994029fc8b999LL, 0xf4, 0x02, + 0xb00febffa76e2b2bLL, 0x5c)) + .InSequence(s) + .WillOnce(Return(true)); + EXPECT_CALL(mock_root_handler, + StartRootDIE(0x7d08242b4b510cf2LL, (DwarfTag) 0xb4f98da6)) + .InSequence(s) + .WillOnce(Return(false)); + + EXPECT_TRUE(die_dispatcher.StartCompilationUnit(0xde8994029fc8b999LL, + 0xf4, 0x02, + 0xb00febffa76e2b2bLL, 0x5c)); + EXPECT_FALSE(die_dispatcher.StartDIE(0x7d08242b4b510cf2LL, + (DwarfTag) 0xb4f98da6)); + die_dispatcher.EndDIE(0x7d08242b4b510cf2LL); +} + +// If the handler elects to skip the root DIE's children, the +// dispatcher should tell the reader so --- and avoid deleting the +// root handler. +TEST(Dwarf2DIEHandler, SkipRootDIEChildren) { + MockRootDIEHandler mock_root_handler; + DIEDispatcher die_dispatcher(&mock_root_handler); + + { + InSequence s; + + EXPECT_CALL(mock_root_handler, + StartCompilationUnit(0x15d6897480cc65a7LL, 0x26, 0xa0, + 0x09f8bf0767f91675LL, 0xdb)) + .WillOnce(Return(true)); + EXPECT_CALL(mock_root_handler, + StartRootDIE(0x7d08242b4b510cf2LL, (DwarfTag) 0xb4f98da6)) + .WillOnce(Return(true)); + // Please don't tell me about my children. + EXPECT_CALL(mock_root_handler, EndAttributes()) + .WillOnce(Return(false)); + EXPECT_CALL(mock_root_handler, Finish()) + .WillOnce(Return()); + } + + EXPECT_TRUE(die_dispatcher.StartCompilationUnit(0x15d6897480cc65a7LL, + 0x26, 0xa0, + 0x09f8bf0767f91675LL, 0xdb)); + EXPECT_TRUE(die_dispatcher.StartDIE(0x7d08242b4b510cf2LL, + (DwarfTag) 0xb4f98da6)); + EXPECT_FALSE(die_dispatcher.StartDIE(0x435150ceedccda18LL, + (DwarfTag) 0xc3a17bba)); + die_dispatcher.EndDIE(0x435150ceedccda18LL); + die_dispatcher.EndDIE(0x7d08242b4b510cf2LL); +} + +// The dispatcher should pass attribute values through to the die +// handler accurately. +TEST(Dwarf2DIEHandler, PassAttributeValues) { + MockRootDIEHandler mock_root_handler; + DIEDispatcher die_dispatcher(&mock_root_handler); + + const char buffer[10] = { 0x24, 0x24, 0x35, 0x9a, 0xca, + 0xcf, 0xa8, 0x84, 0xa7, 0x18 }; + string str = "\xc8\x26\x2e\x0d\xa4\x9c\x37\xd6\xfb\x1d"; + + // Set expectations. + { + InSequence s; + + // We'll like the compilation unit header. + EXPECT_CALL(mock_root_handler, + StartCompilationUnit(0x8d42aed77cfccf3eLL, 0x89, 0xdc, + 0x2ecb4dc778a80f21LL, 0x66)) + .WillOnce(Return(true)); + + // We'll like the root DIE. + EXPECT_CALL(mock_root_handler, + StartRootDIE(0xe2222da01e29f2a9LL, (DwarfTag) 0x9829445c)) + .WillOnce(Return(true)); + + // Expect some attribute values. + EXPECT_CALL(mock_root_handler, + ProcessAttributeUnsigned((DwarfAttribute) 0x1cc0bfed, + (DwarfForm) 0x424f1468, + 0xa592571997facda1ULL)) + .WillOnce(Return()); + EXPECT_CALL(mock_root_handler, + ProcessAttributeSigned((DwarfAttribute) 0x43694dc9, + (DwarfForm) 0xf6f78901L, + 0x92602a4e3bf1f446LL)) + .WillOnce(Return()); + EXPECT_CALL(mock_root_handler, + ProcessAttributeReference((DwarfAttribute) 0x4033e8cL, + (DwarfForm) 0xf66fbe0bL, + 0x50fddef44734fdecULL)) + .WillOnce(Return()); + EXPECT_CALL(mock_root_handler, + ProcessAttributeBuffer((DwarfAttribute) 0x25d7e0af, + (DwarfForm) 0xe99a539a, + buffer, sizeof(buffer))) + .WillOnce(Return()); + EXPECT_CALL(mock_root_handler, + ProcessAttributeString((DwarfAttribute) 0x310ed065, + (DwarfForm) 0x15762fec, + StrEq(str))) + .WillOnce(Return()); + EXPECT_CALL(mock_root_handler, + ProcessAttributeSignature((DwarfAttribute) 0x58790d72, + (DwarfForm) 0x4159f138, + 0x94682463613e6a5fULL)) + .WillOnce(Return()); + EXPECT_CALL(mock_root_handler, EndAttributes()) + .WillOnce(Return(true)); + EXPECT_CALL(mock_root_handler, FindChildHandler(_, _)) + .Times(0); + EXPECT_CALL(mock_root_handler, Finish()) + .WillOnce(Return()); + } + + // Drive the dispatcher. + + // Report the CU header. + EXPECT_TRUE(die_dispatcher.StartCompilationUnit(0x8d42aed77cfccf3eLL, + 0x89, 0xdc, + 0x2ecb4dc778a80f21LL, + 0x66)); + // Report the root DIE. + EXPECT_TRUE(die_dispatcher.StartDIE(0xe2222da01e29f2a9LL, + (DwarfTag) 0x9829445c)); + + // Report some attribute values. + die_dispatcher.ProcessAttributeUnsigned(0xe2222da01e29f2a9LL, + (DwarfAttribute) 0x1cc0bfed, + (DwarfForm) 0x424f1468, + 0xa592571997facda1ULL); + die_dispatcher.ProcessAttributeSigned(0xe2222da01e29f2a9LL, + (DwarfAttribute) 0x43694dc9, + (DwarfForm) 0xf6f78901, + 0x92602a4e3bf1f446LL); + die_dispatcher.ProcessAttributeReference(0xe2222da01e29f2a9LL, + (DwarfAttribute) 0x4033e8c, + (DwarfForm) 0xf66fbe0b, + 0x50fddef44734fdecULL); + die_dispatcher.ProcessAttributeBuffer(0xe2222da01e29f2a9LL, + (DwarfAttribute) 0x25d7e0af, + (DwarfForm) 0xe99a539a, + buffer, sizeof(buffer)); + die_dispatcher.ProcessAttributeString(0xe2222da01e29f2a9LL, + (DwarfAttribute) 0x310ed065, + (DwarfForm) 0x15762fec, + str); + die_dispatcher.ProcessAttributeSignature(0xe2222da01e29f2a9LL, + (DwarfAttribute) 0x58790d72, + (DwarfForm) 0x4159f138, + 0x94682463613e6a5fULL); + + // Finish the root DIE (and thus the CU). + die_dispatcher.EndDIE(0xe2222da01e29f2a9LL); +} + +TEST(Dwarf2DIEHandler, FindAndSkipChildren) { + MockRootDIEHandler mock_root_handler; + MockDIEHandler *mock_child1_handler = new(MockDIEHandler); + MockDIEHandler *mock_child3_handler = new(MockDIEHandler); + DIEDispatcher die_dispatcher(&mock_root_handler); + + { + InSequence s; + + // We'll like the compilation unit header. + EXPECT_CALL(mock_root_handler, + StartCompilationUnit(0x9ec1e6d05e434a0eLL, 0xeb, 0x21, + 0x47dd3c764275a216LL, 0xa5)) + .WillOnce(Return(true)); + + // Root DIE. + { + EXPECT_CALL(mock_root_handler, + StartRootDIE(0x15f0e06bdfe3c372LL, (DwarfTag) 0xf5d60c59)) + .WillOnce(Return(true)); + EXPECT_CALL(mock_root_handler, + ProcessAttributeSigned((DwarfAttribute) 0xf779a642, + (DwarfForm) 0x2cb63027, + 0x18e744661769d08fLL)) + .WillOnce(Return()); + EXPECT_CALL(mock_root_handler, EndAttributes()) + .WillOnce(Return(true)); + + // First child DIE. + EXPECT_CALL(mock_root_handler, + FindChildHandler(0x149f644f8116fe8cLL, + (DwarfTag) 0xac2cbd8c)) + .WillOnce(Return(mock_child1_handler)); + { + EXPECT_CALL(*mock_child1_handler, + ProcessAttributeSigned((DwarfAttribute) 0xa6fd6f65, + (DwarfForm) 0xe4f64c41, + 0x1b04e5444a55fe67LL)) + .WillOnce(Return()); + EXPECT_CALL(*mock_child1_handler, EndAttributes()) + .WillOnce(Return(false)); + // Skip first grandchild DIE and first great-grandchild DIE. + EXPECT_CALL(*mock_child1_handler, Finish()) + .WillOnce(Return()); + } + + // Second child DIE. Root handler will decline to return a handler + // for this child. + EXPECT_CALL(mock_root_handler, + FindChildHandler(0x97412be24875de9dLL, + (DwarfTag) 0x505a068b)) + .WillOnce(Return((DIEHandler *) NULL)); + + // Third child DIE. + EXPECT_CALL(mock_root_handler, + FindChildHandler(0x753c964c8ab538aeLL, + (DwarfTag) 0x8c22970e)) + .WillOnce(Return(mock_child3_handler)); + { + EXPECT_CALL(*mock_child3_handler, + ProcessAttributeSigned((DwarfAttribute) 0x4e2b7cfb, + (DwarfForm) 0x610b7ae1, + 0x3ea5c609d7d7560fLL)) + .WillOnce(Return()); + EXPECT_CALL(*mock_child3_handler, EndAttributes()) + .WillOnce(Return(true)); + EXPECT_CALL(*mock_child3_handler, Finish()) + .WillOnce(Return()); + } + + EXPECT_CALL(mock_root_handler, Finish()) + .WillOnce(Return()); + } + } + + + // Drive the dispatcher. + + // Report the CU header. + EXPECT_TRUE(die_dispatcher + .StartCompilationUnit(0x9ec1e6d05e434a0eLL, 0xeb, 0x21, + 0x47dd3c764275a216LL, 0xa5)); + // Report the root DIE. + { + EXPECT_TRUE(die_dispatcher.StartDIE(0x15f0e06bdfe3c372LL, + (DwarfTag) 0xf5d60c59)); + die_dispatcher.ProcessAttributeSigned(0x15f0e06bdfe3c372LL, + (DwarfAttribute) 0xf779a642, + (DwarfForm) 0x2cb63027, + 0x18e744661769d08fLL); + + // First child DIE. + { + EXPECT_TRUE(die_dispatcher.StartDIE(0x149f644f8116fe8cLL, + (DwarfTag) 0xac2cbd8c)); + die_dispatcher.ProcessAttributeSigned(0x149f644f8116fe8cLL, + (DwarfAttribute) 0xa6fd6f65, + (DwarfForm) 0xe4f64c41, + 0x1b04e5444a55fe67LL); + + // First grandchild DIE. Will be skipped. + { + EXPECT_FALSE(die_dispatcher.StartDIE(0xd68de1ee0bd29419LL, + (DwarfTag) 0x22f05a15)); + // First great-grandchild DIE. Will be skipped without being + // mentioned to any handler. + { + EXPECT_FALSE(die_dispatcher + .StartDIE(0xb3076285d25cac25LL, + (DwarfTag) 0xcff4061b)); + die_dispatcher.EndDIE(0xb3076285d25cac25LL); + } + die_dispatcher.EndDIE(0xd68de1ee0bd29419LL); + } + die_dispatcher.EndDIE(0x149f644f8116fe8cLL); + } + + // Second child DIE. Root handler will decline to find a handler for it. + { + EXPECT_FALSE(die_dispatcher.StartDIE(0x97412be24875de9dLL, + (DwarfTag) 0x505a068b)); + die_dispatcher.EndDIE(0x97412be24875de9dLL); + } + + // Third child DIE. + { + EXPECT_TRUE(die_dispatcher.StartDIE(0x753c964c8ab538aeLL, + (DwarfTag) 0x8c22970e)); + die_dispatcher.ProcessAttributeSigned(0x753c964c8ab538aeLL, + (DwarfAttribute) 0x4e2b7cfb, + (DwarfForm) 0x610b7ae1, + 0x3ea5c609d7d7560fLL); + die_dispatcher.EndDIE(0x753c964c8ab538aeLL); + } + + // Finish the root DIE (and thus the CU). + die_dispatcher.EndDIE(0x15f0e06bdfe3c372LL); + } +} + +// The DIEDispatcher destructor is supposed to delete all handlers on +// the stack, except for the root. +TEST(Dwarf2DIEHandler, FreeHandlersOnStack) { + MockRootDIEHandler mock_root_handler; + MockDIEHandler *mock_child_handler = new(MockDIEHandler); + MockDIEHandler *mock_grandchild_handler = new(MockDIEHandler); + + { + InSequence s; + + // We'll like the compilation unit header. + EXPECT_CALL(mock_root_handler, + StartCompilationUnit(0x87b41ba8381cd71cLL, 0xff, 0x89, + 0x76d392ff393ddda2LL, 0xbf)) + .WillOnce(Return(true)); + + // Root DIE. + { + EXPECT_CALL(mock_root_handler, + StartRootDIE(0xbf13b761691ddc91LL, (DwarfTag) 0x98980361)) + .WillOnce(Return(true)); + EXPECT_CALL(mock_root_handler, EndAttributes()) + .WillOnce(Return(true)); + + // Child DIE. + EXPECT_CALL(mock_root_handler, + FindChildHandler(0x058f09240c5fc8c9LL, + (DwarfTag) 0x898bf0d0)) + .WillOnce(Return(mock_child_handler)); + { + EXPECT_CALL(*mock_child_handler, EndAttributes()) + .WillOnce(Return(true)); + + // Grandchild DIE. + EXPECT_CALL(*mock_child_handler, + FindChildHandler(0x32dc00c9945dc0c8LL, + (DwarfTag) 0x2802d007)) + .WillOnce(Return(mock_grandchild_handler)); + { + EXPECT_CALL(*mock_grandchild_handler, + ProcessAttributeSigned((DwarfAttribute) 0x4e2b7cfb, + (DwarfForm) 0x610b7ae1, + 0x3ea5c609d7d7560fLL)) + .WillOnce(Return()); + + // At this point, we abandon the traversal, so none of the + // usual stuff should get called. + EXPECT_CALL(*mock_grandchild_handler, EndAttributes()) + .Times(0); + EXPECT_CALL(*mock_grandchild_handler, Finish()) + .Times(0); + } + + EXPECT_CALL(*mock_child_handler, Finish()) + .Times(0); + } + + EXPECT_CALL(mock_root_handler, Finish()) + .Times(0); + } + } + + // The dispatcher. + DIEDispatcher die_dispatcher(&mock_root_handler); + + // Report the CU header. + EXPECT_TRUE(die_dispatcher + .StartCompilationUnit(0x87b41ba8381cd71cLL, 0xff, 0x89, + 0x76d392ff393ddda2LL, 0xbf)); + // Report the root DIE. + { + EXPECT_TRUE(die_dispatcher.StartDIE(0xbf13b761691ddc91LL, + (DwarfTag) 0x98980361)); + + // Child DIE. + { + EXPECT_TRUE(die_dispatcher.StartDIE(0x058f09240c5fc8c9LL, + (DwarfTag) 0x898bf0d0)); + + // Grandchild DIE. + { + EXPECT_TRUE(die_dispatcher.StartDIE(0x32dc00c9945dc0c8LL, + (DwarfTag) 0x2802d007)); + die_dispatcher.ProcessAttributeSigned(0x32dc00c9945dc0c8LL, + (DwarfAttribute) 0x4e2b7cfb, + (DwarfForm) 0x610b7ae1, + 0x3ea5c609d7d7560fLL); + + // Stop the traversal abruptly, so that there will still be + // handlers on the stack when the dispatcher is destructed. + + // No EndDIE call... + } + // No EndDIE call... + } + // No EndDIE call... + } +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/dwarf2enums.h b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/dwarf2enums.h new file mode 100644 index 0000000000..5565d66e12 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/dwarf2enums.h @@ -0,0 +1,650 @@ +// -*- mode: c++ -*- + +// Copyright (c) 2010 Google Inc. All Rights Reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef COMMON_DWARF_DWARF2ENUMS_H__ +#define COMMON_DWARF_DWARF2ENUMS_H__ + +namespace dwarf2reader { + +// These enums do not follow the google3 style only because they are +// known universally (specs, other implementations) by the names in +// exactly this capitalization. +// Tag names and codes. +enum DwarfTag { + DW_TAG_padding = 0x00, + DW_TAG_array_type = 0x01, + DW_TAG_class_type = 0x02, + DW_TAG_entry_point = 0x03, + DW_TAG_enumeration_type = 0x04, + DW_TAG_formal_parameter = 0x05, + DW_TAG_imported_declaration = 0x08, + DW_TAG_label = 0x0a, + DW_TAG_lexical_block = 0x0b, + DW_TAG_member = 0x0d, + DW_TAG_pointer_type = 0x0f, + DW_TAG_reference_type = 0x10, + DW_TAG_compile_unit = 0x11, + DW_TAG_string_type = 0x12, + DW_TAG_structure_type = 0x13, + DW_TAG_subroutine_type = 0x15, + DW_TAG_typedef = 0x16, + DW_TAG_union_type = 0x17, + DW_TAG_unspecified_parameters = 0x18, + DW_TAG_variant = 0x19, + DW_TAG_common_block = 0x1a, + DW_TAG_common_inclusion = 0x1b, + DW_TAG_inheritance = 0x1c, + DW_TAG_inlined_subroutine = 0x1d, + DW_TAG_module = 0x1e, + DW_TAG_ptr_to_member_type = 0x1f, + DW_TAG_set_type = 0x20, + DW_TAG_subrange_type = 0x21, + DW_TAG_with_stmt = 0x22, + DW_TAG_access_declaration = 0x23, + DW_TAG_base_type = 0x24, + DW_TAG_catch_block = 0x25, + DW_TAG_const_type = 0x26, + DW_TAG_constant = 0x27, + DW_TAG_enumerator = 0x28, + DW_TAG_file_type = 0x29, + DW_TAG_friend = 0x2a, + DW_TAG_namelist = 0x2b, + DW_TAG_namelist_item = 0x2c, + DW_TAG_packed_type = 0x2d, + DW_TAG_subprogram = 0x2e, + DW_TAG_template_type_param = 0x2f, + DW_TAG_template_value_param = 0x30, + DW_TAG_thrown_type = 0x31, + DW_TAG_try_block = 0x32, + DW_TAG_variant_part = 0x33, + DW_TAG_variable = 0x34, + DW_TAG_volatile_type = 0x35, + // DWARF 3. + DW_TAG_dwarf_procedure = 0x36, + DW_TAG_restrict_type = 0x37, + DW_TAG_interface_type = 0x38, + DW_TAG_namespace = 0x39, + DW_TAG_imported_module = 0x3a, + DW_TAG_unspecified_type = 0x3b, + DW_TAG_partial_unit = 0x3c, + DW_TAG_imported_unit = 0x3d, + // SGI/MIPS Extensions. + DW_TAG_MIPS_loop = 0x4081, + // HP extensions. See: + // ftp://ftp.hp.com/pub/lang/tools/WDB/wdb-4.0.tar.gz + DW_TAG_HP_array_descriptor = 0x4090, + // GNU extensions. + DW_TAG_format_label = 0x4101, // For FORTRAN 77 and Fortran 90. + DW_TAG_function_template = 0x4102, // For C++. + DW_TAG_class_template = 0x4103, // For C++. + DW_TAG_GNU_BINCL = 0x4104, + DW_TAG_GNU_EINCL = 0x4105, + // Extensions for UPC. See: http://upc.gwu.edu/~upc. + DW_TAG_upc_shared_type = 0x8765, + DW_TAG_upc_strict_type = 0x8766, + DW_TAG_upc_relaxed_type = 0x8767, + // PGI (STMicroelectronics) extensions. No documentation available. + DW_TAG_PGI_kanji_type = 0xA000, + DW_TAG_PGI_interface_block = 0xA020 +}; + + +enum DwarfHasChild { + DW_children_no = 0, + DW_children_yes = 1 +}; + +// Form names and codes. +enum DwarfForm { + DW_FORM_addr = 0x01, + DW_FORM_block2 = 0x03, + DW_FORM_block4 = 0x04, + DW_FORM_data2 = 0x05, + DW_FORM_data4 = 0x06, + DW_FORM_data8 = 0x07, + DW_FORM_string = 0x08, + DW_FORM_block = 0x09, + DW_FORM_block1 = 0x0a, + DW_FORM_data1 = 0x0b, + DW_FORM_flag = 0x0c, + DW_FORM_sdata = 0x0d, + DW_FORM_strp = 0x0e, + DW_FORM_udata = 0x0f, + DW_FORM_ref_addr = 0x10, + DW_FORM_ref1 = 0x11, + DW_FORM_ref2 = 0x12, + DW_FORM_ref4 = 0x13, + DW_FORM_ref8 = 0x14, + DW_FORM_ref_udata = 0x15, + DW_FORM_indirect = 0x16, + + // Added in DWARF 4: + DW_FORM_sec_offset = 0x17, + DW_FORM_exprloc = 0x18, + DW_FORM_flag_present = 0x19, + DW_FORM_ref_sig8 = 0x20 +}; + +// Attribute names and codes +enum DwarfAttribute { + DW_AT_sibling = 0x01, + DW_AT_location = 0x02, + DW_AT_name = 0x03, + DW_AT_ordering = 0x09, + DW_AT_subscr_data = 0x0a, + DW_AT_byte_size = 0x0b, + DW_AT_bit_offset = 0x0c, + DW_AT_bit_size = 0x0d, + DW_AT_element_list = 0x0f, + DW_AT_stmt_list = 0x10, + DW_AT_low_pc = 0x11, + DW_AT_high_pc = 0x12, + DW_AT_language = 0x13, + DW_AT_member = 0x14, + DW_AT_discr = 0x15, + DW_AT_discr_value = 0x16, + DW_AT_visibility = 0x17, + DW_AT_import = 0x18, + DW_AT_string_length = 0x19, + DW_AT_common_reference = 0x1a, + DW_AT_comp_dir = 0x1b, + DW_AT_const_value = 0x1c, + DW_AT_containing_type = 0x1d, + DW_AT_default_value = 0x1e, + DW_AT_inline = 0x20, + DW_AT_is_optional = 0x21, + DW_AT_lower_bound = 0x22, + DW_AT_producer = 0x25, + DW_AT_prototyped = 0x27, + DW_AT_return_addr = 0x2a, + DW_AT_start_scope = 0x2c, + DW_AT_stride_size = 0x2e, + DW_AT_upper_bound = 0x2f, + DW_AT_abstract_origin = 0x31, + DW_AT_accessibility = 0x32, + DW_AT_address_class = 0x33, + DW_AT_artificial = 0x34, + DW_AT_base_types = 0x35, + DW_AT_calling_convention = 0x36, + DW_AT_count = 0x37, + DW_AT_data_member_location = 0x38, + DW_AT_decl_column = 0x39, + DW_AT_decl_file = 0x3a, + DW_AT_decl_line = 0x3b, + DW_AT_declaration = 0x3c, + DW_AT_discr_list = 0x3d, + DW_AT_encoding = 0x3e, + DW_AT_external = 0x3f, + DW_AT_frame_base = 0x40, + DW_AT_friend = 0x41, + DW_AT_identifier_case = 0x42, + DW_AT_macro_info = 0x43, + DW_AT_namelist_items = 0x44, + DW_AT_priority = 0x45, + DW_AT_segment = 0x46, + DW_AT_specification = 0x47, + DW_AT_static_link = 0x48, + DW_AT_type = 0x49, + DW_AT_use_location = 0x4a, + DW_AT_variable_parameter = 0x4b, + DW_AT_virtuality = 0x4c, + DW_AT_vtable_elem_location = 0x4d, + // DWARF 3 values. + DW_AT_allocated = 0x4e, + DW_AT_associated = 0x4f, + DW_AT_data_location = 0x50, + DW_AT_stride = 0x51, + DW_AT_entry_pc = 0x52, + DW_AT_use_UTF8 = 0x53, + DW_AT_extension = 0x54, + DW_AT_ranges = 0x55, + DW_AT_trampoline = 0x56, + DW_AT_call_column = 0x57, + DW_AT_call_file = 0x58, + DW_AT_call_line = 0x59, + // SGI/MIPS extensions. + DW_AT_MIPS_fde = 0x2001, + DW_AT_MIPS_loop_begin = 0x2002, + DW_AT_MIPS_tail_loop_begin = 0x2003, + DW_AT_MIPS_epilog_begin = 0x2004, + DW_AT_MIPS_loop_unroll_factor = 0x2005, + DW_AT_MIPS_software_pipeline_depth = 0x2006, + DW_AT_MIPS_linkage_name = 0x2007, + DW_AT_MIPS_stride = 0x2008, + DW_AT_MIPS_abstract_name = 0x2009, + DW_AT_MIPS_clone_origin = 0x200a, + DW_AT_MIPS_has_inlines = 0x200b, + // HP extensions. + DW_AT_HP_block_index = 0x2000, + DW_AT_HP_unmodifiable = 0x2001, // Same as DW_AT_MIPS_fde. + DW_AT_HP_actuals_stmt_list = 0x2010, + DW_AT_HP_proc_per_section = 0x2011, + DW_AT_HP_raw_data_ptr = 0x2012, + DW_AT_HP_pass_by_reference = 0x2013, + DW_AT_HP_opt_level = 0x2014, + DW_AT_HP_prof_version_id = 0x2015, + DW_AT_HP_opt_flags = 0x2016, + DW_AT_HP_cold_region_low_pc = 0x2017, + DW_AT_HP_cold_region_high_pc = 0x2018, + DW_AT_HP_all_variables_modifiable = 0x2019, + DW_AT_HP_linkage_name = 0x201a, + DW_AT_HP_prof_flags = 0x201b, // In comp unit of procs_info for -g. + // GNU extensions. + DW_AT_sf_names = 0x2101, + DW_AT_src_info = 0x2102, + DW_AT_mac_info = 0x2103, + DW_AT_src_coords = 0x2104, + DW_AT_body_begin = 0x2105, + DW_AT_body_end = 0x2106, + DW_AT_GNU_vector = 0x2107, + // VMS extensions. + DW_AT_VMS_rtnbeg_pd_address = 0x2201, + // UPC extension. + DW_AT_upc_threads_scaled = 0x3210, + // PGI (STMicroelectronics) extensions. + DW_AT_PGI_lbase = 0x3a00, + DW_AT_PGI_soffset = 0x3a01, + DW_AT_PGI_lstride = 0x3a02 +}; + + +// Line number opcodes. +enum DwarfLineNumberOps { + DW_LNS_extended_op = 0, + DW_LNS_copy = 1, + DW_LNS_advance_pc = 2, + DW_LNS_advance_line = 3, + DW_LNS_set_file = 4, + DW_LNS_set_column = 5, + DW_LNS_negate_stmt = 6, + DW_LNS_set_basic_block = 7, + DW_LNS_const_add_pc = 8, + DW_LNS_fixed_advance_pc = 9, + // DWARF 3. + DW_LNS_set_prologue_end = 10, + DW_LNS_set_epilogue_begin = 11, + DW_LNS_set_isa = 12 +}; + +// Line number extended opcodes. +enum DwarfLineNumberExtendedOps { + DW_LNE_end_sequence = 1, + DW_LNE_set_address = 2, + DW_LNE_define_file = 3, + // HP extensions. + DW_LNE_HP_negate_is_UV_update = 0x11, + DW_LNE_HP_push_context = 0x12, + DW_LNE_HP_pop_context = 0x13, + DW_LNE_HP_set_file_line_column = 0x14, + DW_LNE_HP_set_routine_name = 0x15, + DW_LNE_HP_set_sequence = 0x16, + DW_LNE_HP_negate_post_semantics = 0x17, + DW_LNE_HP_negate_function_exit = 0x18, + DW_LNE_HP_negate_front_end_logical = 0x19, + DW_LNE_HP_define_proc = 0x20 +}; + +// Type encoding names and codes +enum DwarfEncoding { + DW_ATE_address =0x1, + DW_ATE_boolean =0x2, + DW_ATE_complex_float =0x3, + DW_ATE_float =0x4, + DW_ATE_signed =0x5, + DW_ATE_signed_char =0x6, + DW_ATE_unsigned =0x7, + DW_ATE_unsigned_char =0x8, + // DWARF3/DWARF3f + DW_ATE_imaginary_float =0x9, + DW_ATE_packed_decimal =0xa, + DW_ATE_numeric_string =0xb, + DW_ATE_edited =0xc, + DW_ATE_signed_fixed =0xd, + DW_ATE_unsigned_fixed =0xe, + DW_ATE_decimal_float =0xf, + DW_ATE_lo_user =0x80, + DW_ATE_hi_user =0xff +}; + +// Location virtual machine opcodes +enum DwarfOpcode { + DW_OP_addr =0x03, + DW_OP_deref =0x06, + DW_OP_const1u =0x08, + DW_OP_const1s =0x09, + DW_OP_const2u =0x0a, + DW_OP_const2s =0x0b, + DW_OP_const4u =0x0c, + DW_OP_const4s =0x0d, + DW_OP_const8u =0x0e, + DW_OP_const8s =0x0f, + DW_OP_constu =0x10, + DW_OP_consts =0x11, + DW_OP_dup =0x12, + DW_OP_drop =0x13, + DW_OP_over =0x14, + DW_OP_pick =0x15, + DW_OP_swap =0x16, + DW_OP_rot =0x17, + DW_OP_xderef =0x18, + DW_OP_abs =0x19, + DW_OP_and =0x1a, + DW_OP_div =0x1b, + DW_OP_minus =0x1c, + DW_OP_mod =0x1d, + DW_OP_mul =0x1e, + DW_OP_neg =0x1f, + DW_OP_not =0x20, + DW_OP_or =0x21, + DW_OP_plus =0x22, + DW_OP_plus_uconst =0x23, + DW_OP_shl =0x24, + DW_OP_shr =0x25, + DW_OP_shra =0x26, + DW_OP_xor =0x27, + DW_OP_bra =0x28, + DW_OP_eq =0x29, + DW_OP_ge =0x2a, + DW_OP_gt =0x2b, + DW_OP_le =0x2c, + DW_OP_lt =0x2d, + DW_OP_ne =0x2e, + DW_OP_skip =0x2f, + DW_OP_lit0 =0x30, + DW_OP_lit1 =0x31, + DW_OP_lit2 =0x32, + DW_OP_lit3 =0x33, + DW_OP_lit4 =0x34, + DW_OP_lit5 =0x35, + DW_OP_lit6 =0x36, + DW_OP_lit7 =0x37, + DW_OP_lit8 =0x38, + DW_OP_lit9 =0x39, + DW_OP_lit10 =0x3a, + DW_OP_lit11 =0x3b, + DW_OP_lit12 =0x3c, + DW_OP_lit13 =0x3d, + DW_OP_lit14 =0x3e, + DW_OP_lit15 =0x3f, + DW_OP_lit16 =0x40, + DW_OP_lit17 =0x41, + DW_OP_lit18 =0x42, + DW_OP_lit19 =0x43, + DW_OP_lit20 =0x44, + DW_OP_lit21 =0x45, + DW_OP_lit22 =0x46, + DW_OP_lit23 =0x47, + DW_OP_lit24 =0x48, + DW_OP_lit25 =0x49, + DW_OP_lit26 =0x4a, + DW_OP_lit27 =0x4b, + DW_OP_lit28 =0x4c, + DW_OP_lit29 =0x4d, + DW_OP_lit30 =0x4e, + DW_OP_lit31 =0x4f, + DW_OP_reg0 =0x50, + DW_OP_reg1 =0x51, + DW_OP_reg2 =0x52, + DW_OP_reg3 =0x53, + DW_OP_reg4 =0x54, + DW_OP_reg5 =0x55, + DW_OP_reg6 =0x56, + DW_OP_reg7 =0x57, + DW_OP_reg8 =0x58, + DW_OP_reg9 =0x59, + DW_OP_reg10 =0x5a, + DW_OP_reg11 =0x5b, + DW_OP_reg12 =0x5c, + DW_OP_reg13 =0x5d, + DW_OP_reg14 =0x5e, + DW_OP_reg15 =0x5f, + DW_OP_reg16 =0x60, + DW_OP_reg17 =0x61, + DW_OP_reg18 =0x62, + DW_OP_reg19 =0x63, + DW_OP_reg20 =0x64, + DW_OP_reg21 =0x65, + DW_OP_reg22 =0x66, + DW_OP_reg23 =0x67, + DW_OP_reg24 =0x68, + DW_OP_reg25 =0x69, + DW_OP_reg26 =0x6a, + DW_OP_reg27 =0x6b, + DW_OP_reg28 =0x6c, + DW_OP_reg29 =0x6d, + DW_OP_reg30 =0x6e, + DW_OP_reg31 =0x6f, + DW_OP_breg0 =0x70, + DW_OP_breg1 =0x71, + DW_OP_breg2 =0x72, + DW_OP_breg3 =0x73, + DW_OP_breg4 =0x74, + DW_OP_breg5 =0x75, + DW_OP_breg6 =0x76, + DW_OP_breg7 =0x77, + DW_OP_breg8 =0x78, + DW_OP_breg9 =0x79, + DW_OP_breg10 =0x7a, + DW_OP_breg11 =0x7b, + DW_OP_breg12 =0x7c, + DW_OP_breg13 =0x7d, + DW_OP_breg14 =0x7e, + DW_OP_breg15 =0x7f, + DW_OP_breg16 =0x80, + DW_OP_breg17 =0x81, + DW_OP_breg18 =0x82, + DW_OP_breg19 =0x83, + DW_OP_breg20 =0x84, + DW_OP_breg21 =0x85, + DW_OP_breg22 =0x86, + DW_OP_breg23 =0x87, + DW_OP_breg24 =0x88, + DW_OP_breg25 =0x89, + DW_OP_breg26 =0x8a, + DW_OP_breg27 =0x8b, + DW_OP_breg28 =0x8c, + DW_OP_breg29 =0x8d, + DW_OP_breg30 =0x8e, + DW_OP_breg31 =0x8f, + DW_OP_regX =0x90, + DW_OP_fbreg =0x91, + DW_OP_bregX =0x92, + DW_OP_piece =0x93, + DW_OP_deref_size =0x94, + DW_OP_xderef_size =0x95, + DW_OP_nop =0x96, + // DWARF3/DWARF3f + DW_OP_push_object_address =0x97, + DW_OP_call2 =0x98, + DW_OP_call4 =0x99, + DW_OP_call_ref =0x9a, + DW_OP_form_tls_address =0x9b, + DW_OP_call_frame_cfa =0x9c, + DW_OP_bit_piece =0x9d, + DW_OP_lo_user =0xe0, + DW_OP_hi_user =0xff, + // GNU extensions + DW_OP_GNU_push_tls_address =0xe0 +}; + +// Source languages. These are values for DW_AT_language. +enum DwarfLanguage + { + DW_LANG_none =0x0000, + DW_LANG_C89 =0x0001, + DW_LANG_C =0x0002, + DW_LANG_Ada83 =0x0003, + DW_LANG_C_plus_plus =0x0004, + DW_LANG_Cobol74 =0x0005, + DW_LANG_Cobol85 =0x0006, + DW_LANG_Fortran77 =0x0007, + DW_LANG_Fortran90 =0x0008, + DW_LANG_Pascal83 =0x0009, + DW_LANG_Modula2 =0x000a, + DW_LANG_Java =0x000b, + DW_LANG_C99 =0x000c, + DW_LANG_Ada95 =0x000d, + DW_LANG_Fortran95 =0x000e, + DW_LANG_PLI =0x000f, + DW_LANG_ObjC =0x0010, + DW_LANG_ObjC_plus_plus =0x0011, + DW_LANG_UPC =0x0012, + DW_LANG_D =0x0013, + // Implementation-defined language code range. + DW_LANG_lo_user = 0x8000, + DW_LANG_hi_user = 0xffff, + + // Extensions. + + // MIPS assembly language. The GNU toolchain uses this for all + // assembly languages, since there's no generic DW_LANG_ value for that. + // See include/dwarf2.h in the binutils, gdb, or gcc source trees. + DW_LANG_Mips_Assembler =0x8001, + DW_LANG_Upc =0x8765 // Unified Parallel C + }; + +// Inline codes. These are values for DW_AT_inline. +enum DwarfInline { + DW_INL_not_inlined =0x0, + DW_INL_inlined =0x1, + DW_INL_declared_not_inlined =0x2, + DW_INL_declared_inlined =0x3 +}; + +// Call Frame Info instructions. +enum DwarfCFI + { + DW_CFA_advance_loc = 0x40, + DW_CFA_offset = 0x80, + DW_CFA_restore = 0xc0, + DW_CFA_nop = 0x00, + DW_CFA_set_loc = 0x01, + DW_CFA_advance_loc1 = 0x02, + DW_CFA_advance_loc2 = 0x03, + DW_CFA_advance_loc4 = 0x04, + DW_CFA_offset_extended = 0x05, + DW_CFA_restore_extended = 0x06, + DW_CFA_undefined = 0x07, + DW_CFA_same_value = 0x08, + DW_CFA_register = 0x09, + DW_CFA_remember_state = 0x0a, + DW_CFA_restore_state = 0x0b, + DW_CFA_def_cfa = 0x0c, + DW_CFA_def_cfa_register = 0x0d, + DW_CFA_def_cfa_offset = 0x0e, + DW_CFA_def_cfa_expression = 0x0f, + DW_CFA_expression = 0x10, + DW_CFA_offset_extended_sf = 0x11, + DW_CFA_def_cfa_sf = 0x12, + DW_CFA_def_cfa_offset_sf = 0x13, + DW_CFA_val_offset = 0x14, + DW_CFA_val_offset_sf = 0x15, + DW_CFA_val_expression = 0x16, + + // Opcodes in this range are reserved for user extensions. + DW_CFA_lo_user = 0x1c, + DW_CFA_hi_user = 0x3f, + + // SGI/MIPS specific. + DW_CFA_MIPS_advance_loc8 = 0x1d, + + // GNU extensions. + DW_CFA_GNU_window_save = 0x2d, + DW_CFA_GNU_args_size = 0x2e, + DW_CFA_GNU_negative_offset_extended = 0x2f + }; + +// Exception handling 'z' augmentation letters. +enum DwarfZAugmentationCodes { + // If the CFI augmentation string begins with 'z', then the CIE and FDE + // have an augmentation data area just before the instructions, whose + // contents are determined by the subsequent augmentation letters. + DW_Z_augmentation_start = 'z', + + // If this letter is present in a 'z' augmentation string, the CIE + // augmentation data includes a pointer encoding, and the FDE + // augmentation data includes a language-specific data area pointer, + // represented using that encoding. + DW_Z_has_LSDA = 'L', + + // If this letter is present in a 'z' augmentation string, the CIE + // augmentation data includes a pointer encoding, followed by a pointer + // to a personality routine, represented using that encoding. + DW_Z_has_personality_routine = 'P', + + // If this letter is present in a 'z' augmentation string, the CIE + // augmentation data includes a pointer encoding describing how the FDE's + // initial location, address range, and DW_CFA_set_loc operands are + // encoded. + DW_Z_has_FDE_address_encoding = 'R', + + // If this letter is present in a 'z' augmentation string, then code + // addresses covered by FDEs that cite this CIE are signal delivery + // trampolines. Return addresses of frames in trampolines should not be + // adjusted as described in section 6.4.4 of the DWARF 3 spec. + DW_Z_is_signal_trampoline = 'S' +}; + +// Exception handling frame description pointer formats, as described +// by the Linux Standard Base Core Specification 4.0, section 11.5, +// DWARF Extensions. +enum DwarfPointerEncoding + { + DW_EH_PE_absptr = 0x00, + DW_EH_PE_omit = 0xff, + DW_EH_PE_uleb128 = 0x01, + DW_EH_PE_udata2 = 0x02, + DW_EH_PE_udata4 = 0x03, + DW_EH_PE_udata8 = 0x04, + DW_EH_PE_sleb128 = 0x09, + DW_EH_PE_sdata2 = 0x0A, + DW_EH_PE_sdata4 = 0x0B, + DW_EH_PE_sdata8 = 0x0C, + DW_EH_PE_pcrel = 0x10, + DW_EH_PE_textrel = 0x20, + DW_EH_PE_datarel = 0x30, + DW_EH_PE_funcrel = 0x40, + DW_EH_PE_aligned = 0x50, + + // The GNU toolchain sources define this enum value as well, + // simply to help classify the lower nybble values into signed and + // unsigned groups. + DW_EH_PE_signed = 0x08, + + // This is not documented in LSB 4.0, but it is used in both the + // Linux and OS X toolchains. It can be added to any other + // encoding (except DW_EH_PE_aligned), and indicates that the + // encoded value represents the address at which the true address + // is stored, not the true address itself. + DW_EH_PE_indirect = 0x80 + }; + +} // namespace dwarf2reader +#endif // COMMON_DWARF_DWARF2ENUMS_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/dwarf2reader.cc b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/dwarf2reader.cc new file mode 100644 index 0000000000..f2f3d5810e --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/dwarf2reader.cc @@ -0,0 +1,2344 @@ +// Copyright (c) 2010 Google Inc. All Rights Reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// CFI reader author: Jim Blandy + +// Implementation of dwarf2reader::LineInfo, dwarf2reader::CompilationUnit, +// and dwarf2reader::CallFrameInfo. See dwarf2reader.h for details. + +#include "common/dwarf/dwarf2reader.h" + +#include +#include +#include +#include + +#include +#include +#include +#include +#include + +#include "common/dwarf/bytereader-inl.h" +#include "common/dwarf/bytereader.h" +#include "common/dwarf/line_state_machine.h" +#include "common/using_std_string.h" + +namespace dwarf2reader { + +CompilationUnit::CompilationUnit(const SectionMap& sections, uint64 offset, + ByteReader* reader, Dwarf2Handler* handler) + : offset_from_section_start_(offset), reader_(reader), + sections_(sections), handler_(handler), abbrevs_(NULL), + string_buffer_(NULL), string_buffer_length_(0) {} + +// Read a DWARF2/3 abbreviation section. +// Each abbrev consists of a abbreviation number, a tag, a byte +// specifying whether the tag has children, and a list of +// attribute/form pairs. +// The list of forms is terminated by a 0 for the attribute, and a +// zero for the form. The entire abbreviation section is terminated +// by a zero for the code. + +void CompilationUnit::ReadAbbrevs() { + if (abbrevs_) + return; + + // First get the debug_abbrev section. ".debug_abbrev" is the name + // recommended in the DWARF spec, and used on Linux; + // "__debug_abbrev" is the name used in Mac OS X Mach-O files. + SectionMap::const_iterator iter = sections_.find(".debug_abbrev"); + if (iter == sections_.end()) + iter = sections_.find("__debug_abbrev"); + assert(iter != sections_.end()); + + abbrevs_ = new std::vector; + abbrevs_->resize(1); + + // The only way to check whether we are reading over the end of the + // buffer would be to first compute the size of the leb128 data by + // reading it, then go back and read it again. + const char* abbrev_start = iter->second.first + + header_.abbrev_offset; + const char* abbrevptr = abbrev_start; +#ifndef NDEBUG + const uint64 abbrev_length = iter->second.second - header_.abbrev_offset; +#endif + + while (1) { + CompilationUnit::Abbrev abbrev; + size_t len; + const uint64 number = reader_->ReadUnsignedLEB128(abbrevptr, &len); + + if (number == 0) + break; + abbrev.number = number; + abbrevptr += len; + + assert(abbrevptr < abbrev_start + abbrev_length); + const uint64 tag = reader_->ReadUnsignedLEB128(abbrevptr, &len); + abbrevptr += len; + abbrev.tag = static_cast(tag); + + assert(abbrevptr < abbrev_start + abbrev_length); + abbrev.has_children = reader_->ReadOneByte(abbrevptr); + abbrevptr += 1; + + assert(abbrevptr < abbrev_start + abbrev_length); + + while (1) { + const uint64 nametemp = reader_->ReadUnsignedLEB128(abbrevptr, &len); + abbrevptr += len; + + assert(abbrevptr < abbrev_start + abbrev_length); + const uint64 formtemp = reader_->ReadUnsignedLEB128(abbrevptr, &len); + abbrevptr += len; + if (nametemp == 0 && formtemp == 0) + break; + + const enum DwarfAttribute name = + static_cast(nametemp); + const enum DwarfForm form = static_cast(formtemp); + abbrev.attributes.push_back(std::make_pair(name, form)); + } + assert(abbrev.number == abbrevs_->size()); + abbrevs_->push_back(abbrev); + } +} + +// Skips a single DIE's attributes. +const char* CompilationUnit::SkipDIE(const char* start, + const Abbrev& abbrev) { + for (AttributeList::const_iterator i = abbrev.attributes.begin(); + i != abbrev.attributes.end(); + i++) { + start = SkipAttribute(start, i->second); + } + return start; +} + +// Skips a single attribute form's data. +const char* CompilationUnit::SkipAttribute(const char* start, + enum DwarfForm form) { + size_t len; + + switch (form) { + case DW_FORM_indirect: + form = static_cast(reader_->ReadUnsignedLEB128(start, + &len)); + start += len; + return SkipAttribute(start, form); + + case DW_FORM_flag_present: + return start; + case DW_FORM_data1: + case DW_FORM_flag: + case DW_FORM_ref1: + return start + 1; + case DW_FORM_ref2: + case DW_FORM_data2: + return start + 2; + case DW_FORM_ref4: + case DW_FORM_data4: + return start + 4; + case DW_FORM_ref8: + case DW_FORM_data8: + case DW_FORM_ref_sig8: + return start + 8; + case DW_FORM_string: + return start + strlen(start) + 1; + case DW_FORM_udata: + case DW_FORM_ref_udata: + reader_->ReadUnsignedLEB128(start, &len); + return start + len; + + case DW_FORM_sdata: + reader_->ReadSignedLEB128(start, &len); + return start + len; + case DW_FORM_addr: + return start + reader_->AddressSize(); + case DW_FORM_ref_addr: + // DWARF2 and 3/4 differ on whether ref_addr is address size or + // offset size. + assert(header_.version >= 2); + if (header_.version == 2) { + return start + reader_->AddressSize(); + } else if (header_.version >= 3) { + return start + reader_->OffsetSize(); + } + break; + + case DW_FORM_block1: + return start + 1 + reader_->ReadOneByte(start); + case DW_FORM_block2: + return start + 2 + reader_->ReadTwoBytes(start); + case DW_FORM_block4: + return start + 4 + reader_->ReadFourBytes(start); + case DW_FORM_block: + case DW_FORM_exprloc: { + uint64 size = reader_->ReadUnsignedLEB128(start, &len); + return start + size + len; + } + case DW_FORM_strp: + case DW_FORM_sec_offset: + return start + reader_->OffsetSize(); + } + fprintf(stderr,"Unhandled form type"); + return NULL; +} + +// Read a DWARF2/3 header. +// The header is variable length in DWARF3 (and DWARF2 as extended by +// most compilers), and consists of an length field, a version number, +// the offset in the .debug_abbrev section for our abbrevs, and an +// address size. +void CompilationUnit::ReadHeader() { + const char* headerptr = buffer_; + size_t initial_length_size; + + assert(headerptr + 4 < buffer_ + buffer_length_); + const uint64 initial_length + = reader_->ReadInitialLength(headerptr, &initial_length_size); + headerptr += initial_length_size; + header_.length = initial_length; + + assert(headerptr + 2 < buffer_ + buffer_length_); + header_.version = reader_->ReadTwoBytes(headerptr); + headerptr += 2; + + assert(headerptr + reader_->OffsetSize() < buffer_ + buffer_length_); + header_.abbrev_offset = reader_->ReadOffset(headerptr); + headerptr += reader_->OffsetSize(); + + assert(headerptr + 1 < buffer_ + buffer_length_); + header_.address_size = reader_->ReadOneByte(headerptr); + reader_->SetAddressSize(header_.address_size); + headerptr += 1; + + after_header_ = headerptr; + + // This check ensures that we don't have to do checking during the + // reading of DIEs. header_.length does not include the size of the + // initial length. + assert(buffer_ + initial_length_size + header_.length <= + buffer_ + buffer_length_); +} + +uint64 CompilationUnit::Start() { + // First get the debug_info section. ".debug_info" is the name + // recommended in the DWARF spec, and used on Linux; "__debug_info" + // is the name used in Mac OS X Mach-O files. + SectionMap::const_iterator iter = sections_.find(".debug_info"); + if (iter == sections_.end()) + iter = sections_.find("__debug_info"); + assert(iter != sections_.end()); + + // Set up our buffer + buffer_ = iter->second.first + offset_from_section_start_; + buffer_length_ = iter->second.second - offset_from_section_start_; + + // Read the header + ReadHeader(); + + // Figure out the real length from the end of the initial length to + // the end of the compilation unit, since that is the value we + // return. + uint64 ourlength = header_.length; + if (reader_->OffsetSize() == 8) + ourlength += 12; + else + ourlength += 4; + + // See if the user wants this compilation unit, and if not, just return. + if (!handler_->StartCompilationUnit(offset_from_section_start_, + reader_->AddressSize(), + reader_->OffsetSize(), + header_.length, + header_.version)) + return ourlength; + + // Otherwise, continue by reading our abbreviation entries. + ReadAbbrevs(); + + // Set the string section if we have one. ".debug_str" is the name + // recommended in the DWARF spec, and used on Linux; "__debug_str" + // is the name used in Mac OS X Mach-O files. + iter = sections_.find(".debug_str"); + if (iter == sections_.end()) + iter = sections_.find("__debug_str"); + if (iter != sections_.end()) { + string_buffer_ = iter->second.first; + string_buffer_length_ = iter->second.second; + } + + // Now that we have our abbreviations, start processing DIE's. + ProcessDIEs(); + + return ourlength; +} + +// If one really wanted, you could merge SkipAttribute and +// ProcessAttribute +// This is all boring data manipulation and calling of the handler. +const char* CompilationUnit::ProcessAttribute( + uint64 dieoffset, const char* start, enum DwarfAttribute attr, + enum DwarfForm form) { + size_t len; + + switch (form) { + // DW_FORM_indirect is never used because it is such a space + // waster. + case DW_FORM_indirect: + form = static_cast(reader_->ReadUnsignedLEB128(start, + &len)); + start += len; + return ProcessAttribute(dieoffset, start, attr, form); + + case DW_FORM_flag_present: + handler_->ProcessAttributeUnsigned(dieoffset, attr, form, 1); + return start; + case DW_FORM_data1: + case DW_FORM_flag: + handler_->ProcessAttributeUnsigned(dieoffset, attr, form, + reader_->ReadOneByte(start)); + return start + 1; + case DW_FORM_data2: + handler_->ProcessAttributeUnsigned(dieoffset, attr, form, + reader_->ReadTwoBytes(start)); + return start + 2; + case DW_FORM_data4: + handler_->ProcessAttributeUnsigned(dieoffset, attr, form, + reader_->ReadFourBytes(start)); + return start + 4; + case DW_FORM_data8: + handler_->ProcessAttributeUnsigned(dieoffset, attr, form, + reader_->ReadEightBytes(start)); + return start + 8; + case DW_FORM_string: { + const char* str = start; + handler_->ProcessAttributeString(dieoffset, attr, form, + str); + return start + strlen(str) + 1; + } + case DW_FORM_udata: + handler_->ProcessAttributeUnsigned(dieoffset, attr, form, + reader_->ReadUnsignedLEB128(start, + &len)); + return start + len; + + case DW_FORM_sdata: + handler_->ProcessAttributeSigned(dieoffset, attr, form, + reader_->ReadSignedLEB128(start, &len)); + return start + len; + case DW_FORM_addr: + handler_->ProcessAttributeUnsigned(dieoffset, attr, form, + reader_->ReadAddress(start)); + return start + reader_->AddressSize(); + case DW_FORM_sec_offset: + handler_->ProcessAttributeUnsigned(dieoffset, attr, form, + reader_->ReadOffset(start)); + return start + reader_->OffsetSize(); + + case DW_FORM_ref1: + handler_->ProcessAttributeReference(dieoffset, attr, form, + reader_->ReadOneByte(start) + + offset_from_section_start_); + return start + 1; + case DW_FORM_ref2: + handler_->ProcessAttributeReference(dieoffset, attr, form, + reader_->ReadTwoBytes(start) + + offset_from_section_start_); + return start + 2; + case DW_FORM_ref4: + handler_->ProcessAttributeReference(dieoffset, attr, form, + reader_->ReadFourBytes(start) + + offset_from_section_start_); + return start + 4; + case DW_FORM_ref8: + handler_->ProcessAttributeReference(dieoffset, attr, form, + reader_->ReadEightBytes(start) + + offset_from_section_start_); + return start + 8; + case DW_FORM_ref_udata: + handler_->ProcessAttributeReference(dieoffset, attr, form, + reader_->ReadUnsignedLEB128(start, + &len) + + offset_from_section_start_); + return start + len; + case DW_FORM_ref_addr: + // DWARF2 and 3/4 differ on whether ref_addr is address size or + // offset size. + assert(header_.version >= 2); + if (header_.version == 2) { + handler_->ProcessAttributeReference(dieoffset, attr, form, + reader_->ReadAddress(start)); + return start + reader_->AddressSize(); + } else if (header_.version >= 3) { + handler_->ProcessAttributeReference(dieoffset, attr, form, + reader_->ReadOffset(start)); + return start + reader_->OffsetSize(); + } + break; + case DW_FORM_ref_sig8: + handler_->ProcessAttributeSignature(dieoffset, attr, form, + reader_->ReadEightBytes(start)); + return start + 8; + + case DW_FORM_block1: { + uint64 datalen = reader_->ReadOneByte(start); + handler_->ProcessAttributeBuffer(dieoffset, attr, form, start + 1, + datalen); + return start + 1 + datalen; + } + case DW_FORM_block2: { + uint64 datalen = reader_->ReadTwoBytes(start); + handler_->ProcessAttributeBuffer(dieoffset, attr, form, start + 2, + datalen); + return start + 2 + datalen; + } + case DW_FORM_block4: { + uint64 datalen = reader_->ReadFourBytes(start); + handler_->ProcessAttributeBuffer(dieoffset, attr, form, start + 4, + datalen); + return start + 4 + datalen; + } + case DW_FORM_block: + case DW_FORM_exprloc: { + uint64 datalen = reader_->ReadUnsignedLEB128(start, &len); + handler_->ProcessAttributeBuffer(dieoffset, attr, form, start + len, + datalen); + return start + datalen + len; + } + case DW_FORM_strp: { + assert(string_buffer_ != NULL); + + const uint64 offset = reader_->ReadOffset(start); + assert(string_buffer_ + offset < string_buffer_ + string_buffer_length_); + + const char* str = string_buffer_ + offset; + handler_->ProcessAttributeString(dieoffset, attr, form, + str); + return start + reader_->OffsetSize(); + } + } + fprintf(stderr, "Unhandled form type\n"); + return NULL; +} + +const char* CompilationUnit::ProcessDIE(uint64 dieoffset, + const char* start, + const Abbrev& abbrev) { + for (AttributeList::const_iterator i = abbrev.attributes.begin(); + i != abbrev.attributes.end(); + i++) { + start = ProcessAttribute(dieoffset, start, i->first, i->second); + } + return start; +} + +void CompilationUnit::ProcessDIEs() { + const char* dieptr = after_header_; + size_t len; + + // lengthstart is the place the length field is based on. + // It is the point in the header after the initial length field + const char* lengthstart = buffer_; + + // In 64 bit dwarf, the initial length is 12 bytes, because of the + // 0xffffffff at the start. + if (reader_->OffsetSize() == 8) + lengthstart += 12; + else + lengthstart += 4; + + std::stack die_stack; + + while (dieptr < (lengthstart + header_.length)) { + // We give the user the absolute offset from the beginning of + // debug_info, since they need it to deal with ref_addr forms. + uint64 absolute_offset = (dieptr - buffer_) + offset_from_section_start_; + + uint64 abbrev_num = reader_->ReadUnsignedLEB128(dieptr, &len); + + dieptr += len; + + // Abbrev == 0 represents the end of a list of children, or padding + // at the end of the compilation unit. + if (abbrev_num == 0) { + if (die_stack.size() == 0) + // If it is padding, then we are done with the compilation unit's DIEs. + return; + const uint64 offset = die_stack.top(); + die_stack.pop(); + handler_->EndDIE(offset); + continue; + } + + const Abbrev& abbrev = abbrevs_->at(static_cast(abbrev_num)); + const enum DwarfTag tag = abbrev.tag; + if (!handler_->StartDIE(absolute_offset, tag)) { + dieptr = SkipDIE(dieptr, abbrev); + } else { + dieptr = ProcessDIE(absolute_offset, dieptr, abbrev); + } + + if (abbrev.has_children) { + die_stack.push(absolute_offset); + } else { + handler_->EndDIE(absolute_offset); + } + } +} + +LineInfo::LineInfo(const char* buffer, uint64 buffer_length, + ByteReader* reader, LineInfoHandler* handler): + handler_(handler), reader_(reader), buffer_(buffer), + buffer_length_(buffer_length) { + header_.std_opcode_lengths = NULL; +} + +uint64 LineInfo::Start() { + ReadHeader(); + ReadLines(); + return after_header_ - buffer_; +} + +// The header for a debug_line section is mildly complicated, because +// the line info is very tightly encoded. +void LineInfo::ReadHeader() { + const char* lineptr = buffer_; + size_t initial_length_size; + + const uint64 initial_length + = reader_->ReadInitialLength(lineptr, &initial_length_size); + + lineptr += initial_length_size; + header_.total_length = initial_length; + assert(buffer_ + initial_length_size + header_.total_length <= + buffer_ + buffer_length_); + + // Address size *must* be set by CU ahead of time. + assert(reader_->AddressSize() != 0); + + header_.version = reader_->ReadTwoBytes(lineptr); + lineptr += 2; + + header_.prologue_length = reader_->ReadOffset(lineptr); + lineptr += reader_->OffsetSize(); + + header_.min_insn_length = reader_->ReadOneByte(lineptr); + lineptr += 1; + + header_.default_is_stmt = reader_->ReadOneByte(lineptr); + lineptr += 1; + + header_.line_base = *reinterpret_cast(lineptr); + lineptr += 1; + + header_.line_range = reader_->ReadOneByte(lineptr); + lineptr += 1; + + header_.opcode_base = reader_->ReadOneByte(lineptr); + lineptr += 1; + + header_.std_opcode_lengths = new std::vector; + header_.std_opcode_lengths->resize(header_.opcode_base + 1); + (*header_.std_opcode_lengths)[0] = 0; + for (int i = 1; i < header_.opcode_base; i++) { + (*header_.std_opcode_lengths)[i] = reader_->ReadOneByte(lineptr); + lineptr += 1; + } + + // It is legal for the directory entry table to be empty. + if (*lineptr) { + uint32 dirindex = 1; + while (*lineptr) { + const char* dirname = lineptr; + handler_->DefineDir(dirname, dirindex); + lineptr += strlen(dirname) + 1; + dirindex++; + } + } + lineptr++; + + // It is also legal for the file entry table to be empty. + if (*lineptr) { + uint32 fileindex = 1; + size_t len; + while (*lineptr) { + const char* filename = lineptr; + lineptr += strlen(filename) + 1; + + uint64 dirindex = reader_->ReadUnsignedLEB128(lineptr, &len); + lineptr += len; + + uint64 mod_time = reader_->ReadUnsignedLEB128(lineptr, &len); + lineptr += len; + + uint64 filelength = reader_->ReadUnsignedLEB128(lineptr, &len); + lineptr += len; + handler_->DefineFile(filename, fileindex, static_cast(dirindex), + mod_time, filelength); + fileindex++; + } + } + lineptr++; + + after_header_ = lineptr; +} + +/* static */ +bool LineInfo::ProcessOneOpcode(ByteReader* reader, + LineInfoHandler* handler, + const struct LineInfoHeader &header, + const char* start, + struct LineStateMachine* lsm, + size_t* len, + uintptr pc, + bool *lsm_passes_pc) { + size_t oplen = 0; + size_t templen; + uint8 opcode = reader->ReadOneByte(start); + oplen++; + start++; + + // If the opcode is great than the opcode_base, it is a special + // opcode. Most line programs consist mainly of special opcodes. + if (opcode >= header.opcode_base) { + opcode -= header.opcode_base; + const int64 advance_address = (opcode / header.line_range) + * header.min_insn_length; + const int32 advance_line = (opcode % header.line_range) + + header.line_base; + + // Check if the lsm passes "pc". If so, mark it as passed. + if (lsm_passes_pc && + lsm->address <= pc && pc < lsm->address + advance_address) { + *lsm_passes_pc = true; + } + + lsm->address += advance_address; + lsm->line_num += advance_line; + lsm->basic_block = true; + *len = oplen; + return true; + } + + // Otherwise, we have the regular opcodes + switch (opcode) { + case DW_LNS_copy: { + lsm->basic_block = false; + *len = oplen; + return true; + } + + case DW_LNS_advance_pc: { + uint64 advance_address = reader->ReadUnsignedLEB128(start, &templen); + oplen += templen; + + // Check if the lsm passes "pc". If so, mark it as passed. + if (lsm_passes_pc && lsm->address <= pc && + pc < lsm->address + header.min_insn_length * advance_address) { + *lsm_passes_pc = true; + } + + lsm->address += header.min_insn_length * advance_address; + } + break; + case DW_LNS_advance_line: { + const int64 advance_line = reader->ReadSignedLEB128(start, &templen); + oplen += templen; + lsm->line_num += static_cast(advance_line); + + // With gcc 4.2.1, we can get the line_no here for the first time + // since DW_LNS_advance_line is called after DW_LNE_set_address is + // called. So we check if the lsm passes "pc" here, not in + // DW_LNE_set_address. + if (lsm_passes_pc && lsm->address == pc) { + *lsm_passes_pc = true; + } + } + break; + case DW_LNS_set_file: { + const uint64 fileno = reader->ReadUnsignedLEB128(start, &templen); + oplen += templen; + lsm->file_num = static_cast(fileno); + } + break; + case DW_LNS_set_column: { + const uint64 colno = reader->ReadUnsignedLEB128(start, &templen); + oplen += templen; + lsm->column_num = static_cast(colno); + } + break; + case DW_LNS_negate_stmt: { + lsm->is_stmt = !lsm->is_stmt; + } + break; + case DW_LNS_set_basic_block: { + lsm->basic_block = true; + } + break; + case DW_LNS_fixed_advance_pc: { + const uint16 advance_address = reader->ReadTwoBytes(start); + oplen += 2; + + // Check if the lsm passes "pc". If so, mark it as passed. + if (lsm_passes_pc && + lsm->address <= pc && pc < lsm->address + advance_address) { + *lsm_passes_pc = true; + } + + lsm->address += advance_address; + } + break; + case DW_LNS_const_add_pc: { + const int64 advance_address = header.min_insn_length + * ((255 - header.opcode_base) + / header.line_range); + + // Check if the lsm passes "pc". If so, mark it as passed. + if (lsm_passes_pc && + lsm->address <= pc && pc < lsm->address + advance_address) { + *lsm_passes_pc = true; + } + + lsm->address += advance_address; + } + break; + case DW_LNS_extended_op: { + const uint64 extended_op_len = reader->ReadUnsignedLEB128(start, + &templen); + start += templen; + oplen += templen + extended_op_len; + + const uint64 extended_op = reader->ReadOneByte(start); + start++; + + switch (extended_op) { + case DW_LNE_end_sequence: { + lsm->end_sequence = true; + *len = oplen; + return true; + } + break; + case DW_LNE_set_address: { + // With gcc 4.2.1, we cannot tell the line_no here since + // DW_LNE_set_address is called before DW_LNS_advance_line is + // called. So we do not check if the lsm passes "pc" here. See + // also the comment in DW_LNS_advance_line. + uint64 address = reader->ReadAddress(start); + lsm->address = address; + } + break; + case DW_LNE_define_file: { + const char* filename = start; + + templen = strlen(filename) + 1; + start += templen; + + uint64 dirindex = reader->ReadUnsignedLEB128(start, &templen); + oplen += templen; + + const uint64 mod_time = reader->ReadUnsignedLEB128(start, + &templen); + oplen += templen; + + const uint64 filelength = reader->ReadUnsignedLEB128(start, + &templen); + oplen += templen; + + if (handler) { + handler->DefineFile(filename, -1, static_cast(dirindex), + mod_time, filelength); + } + } + break; + } + } + break; + + default: { + // Ignore unknown opcode silently + if (header.std_opcode_lengths) { + for (int i = 0; i < (*header.std_opcode_lengths)[opcode]; i++) { + reader->ReadUnsignedLEB128(start, &templen); + start += templen; + oplen += templen; + } + } + } + break; + } + *len = oplen; + return false; +} + +void LineInfo::ReadLines() { + struct LineStateMachine lsm; + + // lengthstart is the place the length field is based on. + // It is the point in the header after the initial length field + const char* lengthstart = buffer_; + + // In 64 bit dwarf, the initial length is 12 bytes, because of the + // 0xffffffff at the start. + if (reader_->OffsetSize() == 8) + lengthstart += 12; + else + lengthstart += 4; + + const char* lineptr = after_header_; + lsm.Reset(header_.default_is_stmt); + + // The LineInfoHandler interface expects each line's length along + // with its address, but DWARF only provides addresses (sans + // length), and an end-of-sequence address; one infers the length + // from the next address. So we report a line only when we get the + // next line's address, or the end-of-sequence address. + bool have_pending_line = false; + uint64 pending_address = 0; + uint32 pending_file_num = 0, pending_line_num = 0, pending_column_num = 0; + + while (lineptr < lengthstart + header_.total_length) { + size_t oplength; + bool add_row = ProcessOneOpcode(reader_, handler_, header_, + lineptr, &lsm, &oplength, (uintptr)-1, + NULL); + if (add_row) { + if (have_pending_line) + handler_->AddLine(pending_address, lsm.address - pending_address, + pending_file_num, pending_line_num, + pending_column_num); + if (lsm.end_sequence) { + lsm.Reset(header_.default_is_stmt); + have_pending_line = false; + } else { + pending_address = lsm.address; + pending_file_num = lsm.file_num; + pending_line_num = lsm.line_num; + pending_column_num = lsm.column_num; + have_pending_line = true; + } + } + lineptr += oplength; + } + + after_header_ = lengthstart + header_.total_length; +} + +// A DWARF rule for recovering the address or value of a register, or +// computing the canonical frame address. There is one subclass of this for +// each '*Rule' member function in CallFrameInfo::Handler. +// +// It's annoying that we have to handle Rules using pointers (because +// the concrete instances can have an arbitrary size). They're small, +// so it would be much nicer if we could just handle them by value +// instead of fretting about ownership and destruction. +// +// It seems like all these could simply be instances of std::tr1::bind, +// except that we need instances to be EqualityComparable, too. +// +// This could logically be nested within State, but then the qualified names +// get horrendous. +class CallFrameInfo::Rule { + public: + virtual ~Rule() { } + + // Tell HANDLER that, at ADDRESS in the program, REGISTER can be + // recovered using this rule. If REGISTER is kCFARegister, then this rule + // describes how to compute the canonical frame address. Return what the + // HANDLER member function returned. + virtual bool Handle(Handler *handler, + uint64 address, int register) const = 0; + + // Equality on rules. We use these to decide which rules we need + // to report after a DW_CFA_restore_state instruction. + virtual bool operator==(const Rule &rhs) const = 0; + + bool operator!=(const Rule &rhs) const { return ! (*this == rhs); } + + // Return a pointer to a copy of this rule. + virtual Rule *Copy() const = 0; + + // If this is a base+offset rule, change its base register to REG. + // Otherwise, do nothing. (Ugly, but required for DW_CFA_def_cfa_register.) + virtual void SetBaseRegister(unsigned reg) { } + + // If this is a base+offset rule, change its offset to OFFSET. Otherwise, + // do nothing. (Ugly, but required for DW_CFA_def_cfa_offset.) + virtual void SetOffset(long long offset) { } +}; + +// Rule: the value the register had in the caller cannot be recovered. +class CallFrameInfo::UndefinedRule: public CallFrameInfo::Rule { + public: + UndefinedRule() { } + ~UndefinedRule() { } + bool Handle(Handler *handler, uint64 address, int reg) const { + return handler->UndefinedRule(address, reg); + } + bool operator==(const Rule &rhs) const { + // dynamic_cast is allowed by the Google C++ Style Guide, if the use has + // been carefully considered; cheap RTTI-like workarounds are forbidden. + const UndefinedRule *our_rhs = dynamic_cast(&rhs); + return (our_rhs != NULL); + } + Rule *Copy() const { return new UndefinedRule(*this); } +}; + +// Rule: the register's value is the same as that it had in the caller. +class CallFrameInfo::SameValueRule: public CallFrameInfo::Rule { + public: + SameValueRule() { } + ~SameValueRule() { } + bool Handle(Handler *handler, uint64 address, int reg) const { + return handler->SameValueRule(address, reg); + } + bool operator==(const Rule &rhs) const { + // dynamic_cast is allowed by the Google C++ Style Guide, if the use has + // been carefully considered; cheap RTTI-like workarounds are forbidden. + const SameValueRule *our_rhs = dynamic_cast(&rhs); + return (our_rhs != NULL); + } + Rule *Copy() const { return new SameValueRule(*this); } +}; + +// Rule: the register is saved at OFFSET from BASE_REGISTER. BASE_REGISTER +// may be CallFrameInfo::Handler::kCFARegister. +class CallFrameInfo::OffsetRule: public CallFrameInfo::Rule { + public: + OffsetRule(int base_register, long offset) + : base_register_(base_register), offset_(offset) { } + ~OffsetRule() { } + bool Handle(Handler *handler, uint64 address, int reg) const { + return handler->OffsetRule(address, reg, base_register_, offset_); + } + bool operator==(const Rule &rhs) const { + // dynamic_cast is allowed by the Google C++ Style Guide, if the use has + // been carefully considered; cheap RTTI-like workarounds are forbidden. + const OffsetRule *our_rhs = dynamic_cast(&rhs); + return (our_rhs && + base_register_ == our_rhs->base_register_ && + offset_ == our_rhs->offset_); + } + Rule *Copy() const { return new OffsetRule(*this); } + // We don't actually need SetBaseRegister or SetOffset here, since they + // are only ever applied to CFA rules, for DW_CFA_def_cfa_offset, and it + // doesn't make sense to use OffsetRule for computing the CFA: it + // computes the address at which a register is saved, not a value. + private: + int base_register_; + long offset_; +}; + +// Rule: the value the register had in the caller is the value of +// BASE_REGISTER plus offset. BASE_REGISTER may be +// CallFrameInfo::Handler::kCFARegister. +class CallFrameInfo::ValOffsetRule: public CallFrameInfo::Rule { + public: + ValOffsetRule(int base_register, long offset) + : base_register_(base_register), offset_(offset) { } + ~ValOffsetRule() { } + bool Handle(Handler *handler, uint64 address, int reg) const { + return handler->ValOffsetRule(address, reg, base_register_, offset_); + } + bool operator==(const Rule &rhs) const { + // dynamic_cast is allowed by the Google C++ Style Guide, if the use has + // been carefully considered; cheap RTTI-like workarounds are forbidden. + const ValOffsetRule *our_rhs = dynamic_cast(&rhs); + return (our_rhs && + base_register_ == our_rhs->base_register_ && + offset_ == our_rhs->offset_); + } + Rule *Copy() const { return new ValOffsetRule(*this); } + void SetBaseRegister(unsigned reg) { base_register_ = reg; } + void SetOffset(long long offset) { offset_ = offset; } + private: + int base_register_; + long offset_; +}; + +// Rule: the register has been saved in another register REGISTER_NUMBER_. +class CallFrameInfo::RegisterRule: public CallFrameInfo::Rule { + public: + explicit RegisterRule(int register_number) + : register_number_(register_number) { } + ~RegisterRule() { } + bool Handle(Handler *handler, uint64 address, int reg) const { + return handler->RegisterRule(address, reg, register_number_); + } + bool operator==(const Rule &rhs) const { + // dynamic_cast is allowed by the Google C++ Style Guide, if the use has + // been carefully considered; cheap RTTI-like workarounds are forbidden. + const RegisterRule *our_rhs = dynamic_cast(&rhs); + return (our_rhs && register_number_ == our_rhs->register_number_); + } + Rule *Copy() const { return new RegisterRule(*this); } + private: + int register_number_; +}; + +// Rule: EXPRESSION evaluates to the address at which the register is saved. +class CallFrameInfo::ExpressionRule: public CallFrameInfo::Rule { + public: + explicit ExpressionRule(const string &expression) + : expression_(expression) { } + ~ExpressionRule() { } + bool Handle(Handler *handler, uint64 address, int reg) const { + return handler->ExpressionRule(address, reg, expression_); + } + bool operator==(const Rule &rhs) const { + // dynamic_cast is allowed by the Google C++ Style Guide, if the use has + // been carefully considered; cheap RTTI-like workarounds are forbidden. + const ExpressionRule *our_rhs = dynamic_cast(&rhs); + return (our_rhs && expression_ == our_rhs->expression_); + } + Rule *Copy() const { return new ExpressionRule(*this); } + private: + string expression_; +}; + +// Rule: EXPRESSION evaluates to the address at which the register is saved. +class CallFrameInfo::ValExpressionRule: public CallFrameInfo::Rule { + public: + explicit ValExpressionRule(const string &expression) + : expression_(expression) { } + ~ValExpressionRule() { } + bool Handle(Handler *handler, uint64 address, int reg) const { + return handler->ValExpressionRule(address, reg, expression_); + } + bool operator==(const Rule &rhs) const { + // dynamic_cast is allowed by the Google C++ Style Guide, if the use has + // been carefully considered; cheap RTTI-like workarounds are forbidden. + const ValExpressionRule *our_rhs = + dynamic_cast(&rhs); + return (our_rhs && expression_ == our_rhs->expression_); + } + Rule *Copy() const { return new ValExpressionRule(*this); } + private: + string expression_; +}; + +// A map from register numbers to rules. +class CallFrameInfo::RuleMap { + public: + RuleMap() : cfa_rule_(NULL) { } + RuleMap(const RuleMap &rhs) : cfa_rule_(NULL) { *this = rhs; } + ~RuleMap() { Clear(); } + + RuleMap &operator=(const RuleMap &rhs); + + // Set the rule for computing the CFA to RULE. Take ownership of RULE. + void SetCFARule(Rule *rule) { delete cfa_rule_; cfa_rule_ = rule; } + + // Return the current CFA rule. Unlike RegisterRule, this RuleMap retains + // ownership of the rule. We use this for DW_CFA_def_cfa_offset and + // DW_CFA_def_cfa_register, and for detecting references to the CFA before + // a rule for it has been established. + Rule *CFARule() const { return cfa_rule_; } + + // Return the rule for REG, or NULL if there is none. The caller takes + // ownership of the result. + Rule *RegisterRule(int reg) const; + + // Set the rule for computing REG to RULE. Take ownership of RULE. + void SetRegisterRule(int reg, Rule *rule); + + // Make all the appropriate calls to HANDLER as if we were changing from + // this RuleMap to NEW_RULES at ADDRESS. We use this to implement + // DW_CFA_restore_state, where lots of rules can change simultaneously. + // Return true if all handlers returned true; otherwise, return false. + bool HandleTransitionTo(Handler *handler, uint64 address, + const RuleMap &new_rules) const; + + private: + // A map from register numbers to Rules. + typedef std::map RuleByNumber; + + // Remove all register rules and clear cfa_rule_. + void Clear(); + + // The rule for computing the canonical frame address. This RuleMap owns + // this rule. + Rule *cfa_rule_; + + // A map from register numbers to postfix expressions to recover + // their values. This RuleMap owns the Rules the map refers to. + RuleByNumber registers_; +}; + +CallFrameInfo::RuleMap &CallFrameInfo::RuleMap::operator=(const RuleMap &rhs) { + Clear(); + // Since each map owns the rules it refers to, assignment must copy them. + if (rhs.cfa_rule_) cfa_rule_ = rhs.cfa_rule_->Copy(); + for (RuleByNumber::const_iterator it = rhs.registers_.begin(); + it != rhs.registers_.end(); it++) + registers_[it->first] = it->second->Copy(); + return *this; +} + +CallFrameInfo::Rule *CallFrameInfo::RuleMap::RegisterRule(int reg) const { + assert(reg != Handler::kCFARegister); + RuleByNumber::const_iterator it = registers_.find(reg); + if (it != registers_.end()) + return it->second->Copy(); + else + return NULL; +} + +void CallFrameInfo::RuleMap::SetRegisterRule(int reg, Rule *rule) { + assert(reg != Handler::kCFARegister); + assert(rule); + Rule **slot = ®isters_[reg]; + delete *slot; + *slot = rule; +} + +bool CallFrameInfo::RuleMap::HandleTransitionTo( + Handler *handler, + uint64 address, + const RuleMap &new_rules) const { + // Transition from cfa_rule_ to new_rules.cfa_rule_. + if (cfa_rule_ && new_rules.cfa_rule_) { + if (*cfa_rule_ != *new_rules.cfa_rule_ && + !new_rules.cfa_rule_->Handle(handler, address, + Handler::kCFARegister)) + return false; + } else if (cfa_rule_) { + // this RuleMap has a CFA rule but new_rules doesn't. + // CallFrameInfo::Handler has no way to handle this --- and shouldn't; + // it's garbage input. The instruction interpreter should have + // detected this and warned, so take no action here. + } else if (new_rules.cfa_rule_) { + // This shouldn't be possible: NEW_RULES is some prior state, and + // there's no way to remove entries. + assert(0); + } else { + // Both CFA rules are empty. No action needed. + } + + // Traverse the two maps in order by register number, and report + // whatever differences we find. + RuleByNumber::const_iterator old_it = registers_.begin(); + RuleByNumber::const_iterator new_it = new_rules.registers_.begin(); + while (old_it != registers_.end() && new_it != new_rules.registers_.end()) { + if (old_it->first < new_it->first) { + // This RuleMap has an entry for old_it->first, but NEW_RULES + // doesn't. + // + // This isn't really the right thing to do, but since CFI generally + // only mentions callee-saves registers, and GCC's convention for + // callee-saves registers is that they are unchanged, it's a good + // approximation. + if (!handler->SameValueRule(address, old_it->first)) + return false; + old_it++; + } else if (old_it->first > new_it->first) { + // NEW_RULES has entry for new_it->first, but this RuleMap + // doesn't. This shouldn't be possible: NEW_RULES is some prior + // state, and there's no way to remove entries. + assert(0); + } else { + // Both maps have an entry for this register. Report the new + // rule if it is different. + if (*old_it->second != *new_it->second && + !new_it->second->Handle(handler, address, new_it->first)) + return false; + new_it++, old_it++; + } + } + // Finish off entries from this RuleMap with no counterparts in new_rules. + while (old_it != registers_.end()) { + if (!handler->SameValueRule(address, old_it->first)) + return false; + old_it++; + } + // Since we only make transitions from a rule set to some previously + // saved rule set, and we can only add rules to the map, NEW_RULES + // must have fewer rules than *this. + assert(new_it == new_rules.registers_.end()); + + return true; +} + +// Remove all register rules and clear cfa_rule_. +void CallFrameInfo::RuleMap::Clear() { + delete cfa_rule_; + cfa_rule_ = NULL; + for (RuleByNumber::iterator it = registers_.begin(); + it != registers_.end(); it++) + delete it->second; + registers_.clear(); +} + +// The state of the call frame information interpreter as it processes +// instructions from a CIE and FDE. +class CallFrameInfo::State { + public: + // Create a call frame information interpreter state with the given + // reporter, reader, handler, and initial call frame info address. + State(ByteReader *reader, Handler *handler, Reporter *reporter, + uint64 address) + : reader_(reader), handler_(handler), reporter_(reporter), + address_(address), entry_(NULL), cursor_(NULL) { } + + // Interpret instructions from CIE, save the resulting rule set for + // DW_CFA_restore instructions, and return true. On error, report + // the problem to reporter_ and return false. + bool InterpretCIE(const CIE &cie); + + // Interpret instructions from FDE, and return true. On error, + // report the problem to reporter_ and return false. + bool InterpretFDE(const FDE &fde); + + private: + // The operands of a CFI instruction, for ParseOperands. + struct Operands { + unsigned register_number; // A register number. + uint64 offset; // An offset or address. + long signed_offset; // A signed offset. + string expression; // A DWARF expression. + }; + + // Parse CFI instruction operands from STATE's instruction stream as + // described by FORMAT. On success, populate OPERANDS with the + // results, and return true. On failure, report the problem and + // return false. + // + // Each character of FORMAT should be one of the following: + // + // 'r' unsigned LEB128 register number (OPERANDS->register_number) + // 'o' unsigned LEB128 offset (OPERANDS->offset) + // 's' signed LEB128 offset (OPERANDS->signed_offset) + // 'a' machine-size address (OPERANDS->offset) + // (If the CIE has a 'z' augmentation string, 'a' uses the + // encoding specified by the 'R' argument.) + // '1' a one-byte offset (OPERANDS->offset) + // '2' a two-byte offset (OPERANDS->offset) + // '4' a four-byte offset (OPERANDS->offset) + // '8' an eight-byte offset (OPERANDS->offset) + // 'e' a DW_FORM_block holding a (OPERANDS->expression) + // DWARF expression + bool ParseOperands(const char *format, Operands *operands); + + // Interpret one CFI instruction from STATE's instruction stream, update + // STATE, report any rule changes to handler_, and return true. On + // failure, report the problem and return false. + bool DoInstruction(); + + // The following Do* member functions are subroutines of DoInstruction, + // factoring out the actual work of operations that have several + // different encodings. + + // Set the CFA rule to be the value of BASE_REGISTER plus OFFSET, and + // return true. On failure, report and return false. (Used for + // DW_CFA_def_cfa and DW_CFA_def_cfa_sf.) + bool DoDefCFA(unsigned base_register, long offset); + + // Change the offset of the CFA rule to OFFSET, and return true. On + // failure, report and return false. (Subroutine for + // DW_CFA_def_cfa_offset and DW_CFA_def_cfa_offset_sf.) + bool DoDefCFAOffset(long offset); + + // Specify that REG can be recovered using RULE, and return true. On + // failure, report and return false. + bool DoRule(unsigned reg, Rule *rule); + + // Specify that REG can be found at OFFSET from the CFA, and return true. + // On failure, report and return false. (Subroutine for DW_CFA_offset, + // DW_CFA_offset_extended, and DW_CFA_offset_extended_sf.) + bool DoOffset(unsigned reg, long offset); + + // Specify that the caller's value for REG is the CFA plus OFFSET, + // and return true. On failure, report and return false. (Subroutine + // for DW_CFA_val_offset and DW_CFA_val_offset_sf.) + bool DoValOffset(unsigned reg, long offset); + + // Restore REG to the rule established in the CIE, and return true. On + // failure, report and return false. (Subroutine for DW_CFA_restore and + // DW_CFA_restore_extended.) + bool DoRestore(unsigned reg); + + // Return the section offset of the instruction at cursor. For use + // in error messages. + uint64 CursorOffset() { return entry_->offset + (cursor_ - entry_->start); } + + // Report that entry_ is incomplete, and return false. For brevity. + bool ReportIncomplete() { + reporter_->Incomplete(entry_->offset, entry_->kind); + return false; + } + + // For reading multi-byte values with the appropriate endianness. + ByteReader *reader_; + + // The handler to which we should report the data we find. + Handler *handler_; + + // For reporting problems in the info we're parsing. + Reporter *reporter_; + + // The code address to which the next instruction in the stream applies. + uint64 address_; + + // The entry whose instructions we are currently processing. This is + // first a CIE, and then an FDE. + const Entry *entry_; + + // The next instruction to process. + const char *cursor_; + + // The current set of rules. + RuleMap rules_; + + // The set of rules established by the CIE, used by DW_CFA_restore + // and DW_CFA_restore_extended. We set this after interpreting the + // CIE's instructions. + RuleMap cie_rules_; + + // A stack of saved states, for DW_CFA_remember_state and + // DW_CFA_restore_state. + std::stack saved_rules_; +}; + +bool CallFrameInfo::State::InterpretCIE(const CIE &cie) { + entry_ = &cie; + cursor_ = entry_->instructions; + while (cursor_ < entry_->end) + if (!DoInstruction()) + return false; + // Note the rules established by the CIE, for use by DW_CFA_restore + // and DW_CFA_restore_extended. + cie_rules_ = rules_; + return true; +} + +bool CallFrameInfo::State::InterpretFDE(const FDE &fde) { + entry_ = &fde; + cursor_ = entry_->instructions; + while (cursor_ < entry_->end) + if (!DoInstruction()) + return false; + return true; +} + +bool CallFrameInfo::State::ParseOperands(const char *format, + Operands *operands) { + size_t len; + const char *operand; + + for (operand = format; *operand; operand++) { + size_t bytes_left = entry_->end - cursor_; + switch (*operand) { + case 'r': + operands->register_number = reader_->ReadUnsignedLEB128(cursor_, &len); + if (len > bytes_left) return ReportIncomplete(); + cursor_ += len; + break; + + case 'o': + operands->offset = reader_->ReadUnsignedLEB128(cursor_, &len); + if (len > bytes_left) return ReportIncomplete(); + cursor_ += len; + break; + + case 's': + operands->signed_offset = reader_->ReadSignedLEB128(cursor_, &len); + if (len > bytes_left) return ReportIncomplete(); + cursor_ += len; + break; + + case 'a': + operands->offset = + reader_->ReadEncodedPointer(cursor_, entry_->cie->pointer_encoding, + &len); + if (len > bytes_left) return ReportIncomplete(); + cursor_ += len; + break; + + case '1': + if (1 > bytes_left) return ReportIncomplete(); + operands->offset = static_cast(*cursor_++); + break; + + case '2': + if (2 > bytes_left) return ReportIncomplete(); + operands->offset = reader_->ReadTwoBytes(cursor_); + cursor_ += 2; + break; + + case '4': + if (4 > bytes_left) return ReportIncomplete(); + operands->offset = reader_->ReadFourBytes(cursor_); + cursor_ += 4; + break; + + case '8': + if (8 > bytes_left) return ReportIncomplete(); + operands->offset = reader_->ReadEightBytes(cursor_); + cursor_ += 8; + break; + + case 'e': { + size_t expression_length = reader_->ReadUnsignedLEB128(cursor_, &len); + if (len > bytes_left || expression_length > bytes_left - len) + return ReportIncomplete(); + cursor_ += len; + operands->expression = string(cursor_, expression_length); + cursor_ += expression_length; + break; + } + + default: + assert(0); + } + } + + return true; +} + +bool CallFrameInfo::State::DoInstruction() { + CIE *cie = entry_->cie; + Operands ops; + + // Our entry's kind should have been set by now. + assert(entry_->kind != kUnknown); + + // We shouldn't have been invoked unless there were more + // instructions to parse. + assert(cursor_ < entry_->end); + + unsigned opcode = *cursor_++; + if ((opcode & 0xc0) != 0) { + switch (opcode & 0xc0) { + // Advance the address. + case DW_CFA_advance_loc: { + size_t code_offset = opcode & 0x3f; + address_ += code_offset * cie->code_alignment_factor; + break; + } + + // Find a register at an offset from the CFA. + case DW_CFA_offset: + if (!ParseOperands("o", &ops) || + !DoOffset(opcode & 0x3f, ops.offset * cie->data_alignment_factor)) + return false; + break; + + // Restore the rule established for a register by the CIE. + case DW_CFA_restore: + if (!DoRestore(opcode & 0x3f)) return false; + break; + + // The 'if' above should have excluded this possibility. + default: + assert(0); + } + + // Return here, so the big switch below won't be indented. + return true; + } + + switch (opcode) { + // Set the address. + case DW_CFA_set_loc: + if (!ParseOperands("a", &ops)) return false; + address_ = ops.offset; + break; + + // Advance the address. + case DW_CFA_advance_loc1: + if (!ParseOperands("1", &ops)) return false; + address_ += ops.offset * cie->code_alignment_factor; + break; + + // Advance the address. + case DW_CFA_advance_loc2: + if (!ParseOperands("2", &ops)) return false; + address_ += ops.offset * cie->code_alignment_factor; + break; + + // Advance the address. + case DW_CFA_advance_loc4: + if (!ParseOperands("4", &ops)) return false; + address_ += ops.offset * cie->code_alignment_factor; + break; + + // Advance the address. + case DW_CFA_MIPS_advance_loc8: + if (!ParseOperands("8", &ops)) return false; + address_ += ops.offset * cie->code_alignment_factor; + break; + + // Compute the CFA by adding an offset to a register. + case DW_CFA_def_cfa: + if (!ParseOperands("ro", &ops) || + !DoDefCFA(ops.register_number, ops.offset)) + return false; + break; + + // Compute the CFA by adding an offset to a register. + case DW_CFA_def_cfa_sf: + if (!ParseOperands("rs", &ops) || + !DoDefCFA(ops.register_number, + ops.signed_offset * cie->data_alignment_factor)) + return false; + break; + + // Change the base register used to compute the CFA. + case DW_CFA_def_cfa_register: { + if (!ParseOperands("r", &ops)) return false; + Rule *cfa_rule = rules_.CFARule(); + if (!cfa_rule) { + if (!DoDefCFA(ops.register_number, ops.offset)) { + reporter_->NoCFARule(entry_->offset, entry_->kind, CursorOffset()); + return false; + } + } else { + cfa_rule->SetBaseRegister(ops.register_number); + if (!cfa_rule->Handle(handler_, address_, + Handler::kCFARegister)) + return false; + } + break; + } + + // Change the offset used to compute the CFA. + case DW_CFA_def_cfa_offset: + if (!ParseOperands("o", &ops) || + !DoDefCFAOffset(ops.offset)) + return false; + break; + + // Change the offset used to compute the CFA. + case DW_CFA_def_cfa_offset_sf: + if (!ParseOperands("s", &ops) || + !DoDefCFAOffset(ops.signed_offset * cie->data_alignment_factor)) + return false; + break; + + // Specify an expression whose value is the CFA. + case DW_CFA_def_cfa_expression: { + if (!ParseOperands("e", &ops)) + return false; + Rule *rule = new ValExpressionRule(ops.expression); + rules_.SetCFARule(rule); + if (!rule->Handle(handler_, address_, + Handler::kCFARegister)) + return false; + break; + } + + // The register's value cannot be recovered. + case DW_CFA_undefined: { + if (!ParseOperands("r", &ops) || + !DoRule(ops.register_number, new UndefinedRule())) + return false; + break; + } + + // The register's value is unchanged from its value in the caller. + case DW_CFA_same_value: { + if (!ParseOperands("r", &ops) || + !DoRule(ops.register_number, new SameValueRule())) + return false; + break; + } + + // Find a register at an offset from the CFA. + case DW_CFA_offset_extended: + if (!ParseOperands("ro", &ops) || + !DoOffset(ops.register_number, + ops.offset * cie->data_alignment_factor)) + return false; + break; + + // The register is saved at an offset from the CFA. + case DW_CFA_offset_extended_sf: + if (!ParseOperands("rs", &ops) || + !DoOffset(ops.register_number, + ops.signed_offset * cie->data_alignment_factor)) + return false; + break; + + // The register is saved at an offset from the CFA. + case DW_CFA_GNU_negative_offset_extended: + if (!ParseOperands("ro", &ops) || + !DoOffset(ops.register_number, + -ops.offset * cie->data_alignment_factor)) + return false; + break; + + // The register's value is the sum of the CFA plus an offset. + case DW_CFA_val_offset: + if (!ParseOperands("ro", &ops) || + !DoValOffset(ops.register_number, + ops.offset * cie->data_alignment_factor)) + return false; + break; + + // The register's value is the sum of the CFA plus an offset. + case DW_CFA_val_offset_sf: + if (!ParseOperands("rs", &ops) || + !DoValOffset(ops.register_number, + ops.signed_offset * cie->data_alignment_factor)) + return false; + break; + + // The register has been saved in another register. + case DW_CFA_register: { + if (!ParseOperands("ro", &ops) || + !DoRule(ops.register_number, new RegisterRule(ops.offset))) + return false; + break; + } + + // An expression yields the address at which the register is saved. + case DW_CFA_expression: { + if (!ParseOperands("re", &ops) || + !DoRule(ops.register_number, new ExpressionRule(ops.expression))) + return false; + break; + } + + // An expression yields the caller's value for the register. + case DW_CFA_val_expression: { + if (!ParseOperands("re", &ops) || + !DoRule(ops.register_number, new ValExpressionRule(ops.expression))) + return false; + break; + } + + // Restore the rule established for a register by the CIE. + case DW_CFA_restore_extended: + if (!ParseOperands("r", &ops) || + !DoRestore( ops.register_number)) + return false; + break; + + // Save the current set of rules on a stack. + case DW_CFA_remember_state: + saved_rules_.push(rules_); + break; + + // Pop the current set of rules off the stack. + case DW_CFA_restore_state: { + if (saved_rules_.empty()) { + reporter_->EmptyStateStack(entry_->offset, entry_->kind, + CursorOffset()); + return false; + } + const RuleMap &new_rules = saved_rules_.top(); + if (rules_.CFARule() && !new_rules.CFARule()) { + reporter_->ClearingCFARule(entry_->offset, entry_->kind, + CursorOffset()); + return false; + } + rules_.HandleTransitionTo(handler_, address_, new_rules); + rules_ = new_rules; + saved_rules_.pop(); + break; + } + + // No operation. (Padding instruction.) + case DW_CFA_nop: + break; + + // A SPARC register window save: Registers 8 through 15 (%o0-%o7) + // are saved in registers 24 through 31 (%i0-%i7), and registers + // 16 through 31 (%l0-%l7 and %i0-%i7) are saved at CFA offsets + // (0-15 * the register size). The register numbers must be + // hard-coded. A GNU extension, and not a pretty one. + case DW_CFA_GNU_window_save: { + // Save %o0-%o7 in %i0-%i7. + for (int i = 8; i < 16; i++) + if (!DoRule(i, new RegisterRule(i + 16))) + return false; + // Save %l0-%l7 and %i0-%i7 at the CFA. + for (int i = 16; i < 32; i++) + // Assume that the byte reader's address size is the same as + // the architecture's register size. !@#%*^ hilarious. + if (!DoRule(i, new OffsetRule(Handler::kCFARegister, + (i - 16) * reader_->AddressSize()))) + return false; + break; + } + + // I'm not sure what this is. GDB doesn't use it for unwinding. + case DW_CFA_GNU_args_size: + if (!ParseOperands("o", &ops)) return false; + break; + + // An opcode we don't recognize. + default: { + reporter_->BadInstruction(entry_->offset, entry_->kind, CursorOffset()); + return false; + } + } + + return true; +} + +bool CallFrameInfo::State::DoDefCFA(unsigned base_register, long offset) { + Rule *rule = new ValOffsetRule(base_register, offset); + rules_.SetCFARule(rule); + return rule->Handle(handler_, address_, + Handler::kCFARegister); +} + +bool CallFrameInfo::State::DoDefCFAOffset(long offset) { + Rule *cfa_rule = rules_.CFARule(); + if (!cfa_rule) { + reporter_->NoCFARule(entry_->offset, entry_->kind, CursorOffset()); + return false; + } + cfa_rule->SetOffset(offset); + return cfa_rule->Handle(handler_, address_, + Handler::kCFARegister); +} + +bool CallFrameInfo::State::DoRule(unsigned reg, Rule *rule) { + rules_.SetRegisterRule(reg, rule); + return rule->Handle(handler_, address_, reg); +} + +bool CallFrameInfo::State::DoOffset(unsigned reg, long offset) { + if (!rules_.CFARule()) { + reporter_->NoCFARule(entry_->offset, entry_->kind, CursorOffset()); + return false; + } + return DoRule(reg, + new OffsetRule(Handler::kCFARegister, offset)); +} + +bool CallFrameInfo::State::DoValOffset(unsigned reg, long offset) { + if (!rules_.CFARule()) { + reporter_->NoCFARule(entry_->offset, entry_->kind, CursorOffset()); + return false; + } + return DoRule(reg, + new ValOffsetRule(Handler::kCFARegister, offset)); +} + +bool CallFrameInfo::State::DoRestore(unsigned reg) { + // DW_CFA_restore and DW_CFA_restore_extended don't make sense in a CIE. + if (entry_->kind == kCIE) { + reporter_->RestoreInCIE(entry_->offset, CursorOffset()); + return false; + } + Rule *rule = cie_rules_.RegisterRule(reg); + if (!rule) { + // This isn't really the right thing to do, but since CFI generally + // only mentions callee-saves registers, and GCC's convention for + // callee-saves registers is that they are unchanged, it's a good + // approximation. + rule = new SameValueRule(); + } + return DoRule(reg, rule); +} + +bool CallFrameInfo::ReadEntryPrologue(const char *cursor, Entry *entry) { + const char *buffer_end = buffer_ + buffer_length_; + + // Initialize enough of ENTRY for use in error reporting. + entry->offset = cursor - buffer_; + entry->start = cursor; + entry->kind = kUnknown; + entry->end = NULL; + + // Read the initial length. This sets reader_'s offset size. + size_t length_size; + uint64 length = reader_->ReadInitialLength(cursor, &length_size); + if (length_size > size_t(buffer_end - cursor)) + return ReportIncomplete(entry); + cursor += length_size; + + // In a .eh_frame section, a length of zero marks the end of the series + // of entries. + if (length == 0 && eh_frame_) { + entry->kind = kTerminator; + entry->end = cursor; + return true; + } + + // Validate the length. + if (length > size_t(buffer_end - cursor)) + return ReportIncomplete(entry); + + // The length is the number of bytes after the initial length field; + // we have that position handy at this point, so compute the end + // now. (If we're parsing 64-bit-offset DWARF on a 32-bit machine, + // and the length didn't fit in a size_t, we would have rejected it + // above.) + entry->end = cursor + length; + + // Parse the next field: either the offset of a CIE or a CIE id. + size_t offset_size = reader_->OffsetSize(); + if (offset_size > size_t(entry->end - cursor)) return ReportIncomplete(entry); + entry->id = reader_->ReadOffset(cursor); + + // Don't advance cursor past id field yet; in .eh_frame data we need + // the id's position to compute the section offset of an FDE's CIE. + + // Now we can decide what kind of entry this is. + if (eh_frame_) { + // In .eh_frame data, an ID of zero marks the entry as a CIE, and + // anything else is an offset from the id field of the FDE to the start + // of the CIE. + if (entry->id == 0) { + entry->kind = kCIE; + } else { + entry->kind = kFDE; + // Turn the offset from the id into an offset from the buffer's start. + entry->id = (cursor - buffer_) - entry->id; + } + } else { + // In DWARF CFI data, an ID of ~0 (of the appropriate width, given the + // offset size for the entry) marks the entry as a CIE, and anything + // else is the offset of the CIE from the beginning of the section. + if (offset_size == 4) + entry->kind = (entry->id == 0xffffffff) ? kCIE : kFDE; + else { + assert(offset_size == 8); + entry->kind = (entry->id == 0xffffffffffffffffULL) ? kCIE : kFDE; + } + } + + // Now advance cursor past the id. + cursor += offset_size; + + // The fields specific to this kind of entry start here. + entry->fields = cursor; + + entry->cie = NULL; + + return true; +} + +bool CallFrameInfo::ReadCIEFields(CIE *cie) { + const char *cursor = cie->fields; + size_t len; + + assert(cie->kind == kCIE); + + // Prepare for early exit. + cie->version = 0; + cie->augmentation.clear(); + cie->code_alignment_factor = 0; + cie->data_alignment_factor = 0; + cie->return_address_register = 0; + cie->has_z_augmentation = false; + cie->pointer_encoding = DW_EH_PE_absptr; + cie->instructions = 0; + + // Parse the version number. + if (cie->end - cursor < 1) + return ReportIncomplete(cie); + cie->version = reader_->ReadOneByte(cursor); + cursor++; + + // If we don't recognize the version, we can't parse any more fields of the + // CIE. For DWARF CFI, we handle versions 1 through 3 (there was never a + // version 2 of CFI data). For .eh_frame, we handle versions 1 and 3 as well; + // the difference between those versions seems to be the same as for + // .debug_frame. + if (cie->version < 1 || cie->version > 3) { + reporter_->UnrecognizedVersion(cie->offset, cie->version); + return false; + } + + const char *augmentation_start = cursor; + const void *augmentation_end = + memchr(augmentation_start, '\0', cie->end - augmentation_start); + if (! augmentation_end) return ReportIncomplete(cie); + cursor = static_cast(augmentation_end); + cie->augmentation = string(augmentation_start, + cursor - augmentation_start); + // Skip the terminating '\0'. + cursor++; + + // Is this CFI augmented? + if (!cie->augmentation.empty()) { + // Is it an augmentation we recognize? + if (cie->augmentation[0] == DW_Z_augmentation_start) { + // Linux C++ ABI 'z' augmentation, used for exception handling data. + cie->has_z_augmentation = true; + } else { + // Not an augmentation we recognize. Augmentations can have arbitrary + // effects on the form of rest of the content, so we have to give up. + reporter_->UnrecognizedAugmentation(cie->offset, cie->augmentation); + return false; + } + } + + // Parse the code alignment factor. + cie->code_alignment_factor = reader_->ReadUnsignedLEB128(cursor, &len); + if (size_t(cie->end - cursor) < len) return ReportIncomplete(cie); + cursor += len; + + // Parse the data alignment factor. + cie->data_alignment_factor = reader_->ReadSignedLEB128(cursor, &len); + if (size_t(cie->end - cursor) < len) return ReportIncomplete(cie); + cursor += len; + + // Parse the return address register. This is a ubyte in version 1, and + // a ULEB128 in version 3. + if (cie->version == 1) { + if (cursor >= cie->end) return ReportIncomplete(cie); + cie->return_address_register = uint8(*cursor++); + } else { + cie->return_address_register = reader_->ReadUnsignedLEB128(cursor, &len); + if (size_t(cie->end - cursor) < len) return ReportIncomplete(cie); + cursor += len; + } + + // If we have a 'z' augmentation string, find the augmentation data and + // use the augmentation string to parse it. + if (cie->has_z_augmentation) { + uint64_t data_size = reader_->ReadUnsignedLEB128(cursor, &len); + if (size_t(cie->end - cursor) < len + data_size) + return ReportIncomplete(cie); + cursor += len; + const char *data = cursor; + cursor += data_size; + const char *data_end = cursor; + + cie->has_z_lsda = false; + cie->has_z_personality = false; + cie->has_z_signal_frame = false; + + // Walk the augmentation string, and extract values from the + // augmentation data as the string directs. + for (size_t i = 1; i < cie->augmentation.size(); i++) { + switch (cie->augmentation[i]) { + case DW_Z_has_LSDA: + // The CIE's augmentation data holds the language-specific data + // area pointer's encoding, and the FDE's augmentation data holds + // the pointer itself. + cie->has_z_lsda = true; + // Fetch the LSDA encoding from the augmentation data. + if (data >= data_end) return ReportIncomplete(cie); + cie->lsda_encoding = DwarfPointerEncoding(*data++); + if (!reader_->ValidEncoding(cie->lsda_encoding)) { + reporter_->InvalidPointerEncoding(cie->offset, cie->lsda_encoding); + return false; + } + // Don't check if the encoding is usable here --- we haven't + // read the FDE's fields yet, so we're not prepared for + // DW_EH_PE_funcrel, although that's a fine encoding for the + // LSDA to use, since it appears in the FDE. + break; + + case DW_Z_has_personality_routine: + // The CIE's augmentation data holds the personality routine + // pointer's encoding, followed by the pointer itself. + cie->has_z_personality = true; + // Fetch the personality routine pointer's encoding from the + // augmentation data. + if (data >= data_end) return ReportIncomplete(cie); + cie->personality_encoding = DwarfPointerEncoding(*data++); + if (!reader_->ValidEncoding(cie->personality_encoding)) { + reporter_->InvalidPointerEncoding(cie->offset, + cie->personality_encoding); + return false; + } + if (!reader_->UsableEncoding(cie->personality_encoding)) { + reporter_->UnusablePointerEncoding(cie->offset, + cie->personality_encoding); + return false; + } + // Fetch the personality routine's pointer itself from the data. + cie->personality_address = + reader_->ReadEncodedPointer(data, cie->personality_encoding, + &len); + if (len > size_t(data_end - data)) + return ReportIncomplete(cie); + data += len; + break; + + case DW_Z_has_FDE_address_encoding: + // The CIE's augmentation data holds the pointer encoding to use + // for addresses in the FDE. + if (data >= data_end) return ReportIncomplete(cie); + cie->pointer_encoding = DwarfPointerEncoding(*data++); + if (!reader_->ValidEncoding(cie->pointer_encoding)) { + reporter_->InvalidPointerEncoding(cie->offset, + cie->pointer_encoding); + return false; + } + if (!reader_->UsableEncoding(cie->pointer_encoding)) { + reporter_->UnusablePointerEncoding(cie->offset, + cie->pointer_encoding); + return false; + } + break; + + case DW_Z_is_signal_trampoline: + // Frames using this CIE are signal delivery frames. + cie->has_z_signal_frame = true; + break; + + default: + // An augmentation we don't recognize. + reporter_->UnrecognizedAugmentation(cie->offset, cie->augmentation); + return false; + } + } + } + + // The CIE's instructions start here. + cie->instructions = cursor; + + return true; +} + +bool CallFrameInfo::ReadFDEFields(FDE *fde) { + const char *cursor = fde->fields; + size_t size; + + fde->address = reader_->ReadEncodedPointer(cursor, fde->cie->pointer_encoding, + &size); + if (size > size_t(fde->end - cursor)) + return ReportIncomplete(fde); + cursor += size; + reader_->SetFunctionBase(fde->address); + + // For the length, we strip off the upper nybble of the encoding used for + // the starting address. + DwarfPointerEncoding length_encoding = + DwarfPointerEncoding(fde->cie->pointer_encoding & 0x0f); + fde->size = reader_->ReadEncodedPointer(cursor, length_encoding, &size); + if (size > size_t(fde->end - cursor)) + return ReportIncomplete(fde); + cursor += size; + + // If the CIE has a 'z' augmentation string, then augmentation data + // appears here. + if (fde->cie->has_z_augmentation) { + uint64_t data_size = reader_->ReadUnsignedLEB128(cursor, &size); + if (size_t(fde->end - cursor) < size + data_size) + return ReportIncomplete(fde); + cursor += size; + + // In the abstract, we should walk the augmentation string, and extract + // items from the FDE's augmentation data as we encounter augmentation + // string characters that specify their presence: the ordering of items + // in the augmentation string determines the arrangement of values in + // the augmentation data. + // + // In practice, there's only ever one value in FDE augmentation data + // that we support --- the LSDA pointer --- and we have to bail if we + // see any unrecognized augmentation string characters. So if there is + // anything here at all, we know what it is, and where it starts. + if (fde->cie->has_z_lsda) { + // Check whether the LSDA's pointer encoding is usable now: only once + // we've parsed the FDE's starting address do we call reader_-> + // SetFunctionBase, so that the DW_EH_PE_funcrel encoding becomes + // usable. + if (!reader_->UsableEncoding(fde->cie->lsda_encoding)) { + reporter_->UnusablePointerEncoding(fde->cie->offset, + fde->cie->lsda_encoding); + return false; + } + + fde->lsda_address = + reader_->ReadEncodedPointer(cursor, fde->cie->lsda_encoding, &size); + if (size > data_size) + return ReportIncomplete(fde); + // Ideally, we would also complain here if there were unconsumed + // augmentation data. + } + + cursor += data_size; + } + + // The FDE's instructions start after those. + fde->instructions = cursor; + + return true; +} + +bool CallFrameInfo::Start() { + const char *buffer_end = buffer_ + buffer_length_; + const char *cursor; + bool all_ok = true; + const char *entry_end; + bool ok; + + // Traverse all the entries in buffer_, skipping CIEs and offering + // FDEs to the handler. + for (cursor = buffer_; cursor < buffer_end; + cursor = entry_end, all_ok = all_ok && ok) { + FDE fde; + + // Make it easy to skip this entry with 'continue': assume that + // things are not okay until we've checked all the data, and + // prepare the address of the next entry. + ok = false; + + // Read the entry's prologue. + if (!ReadEntryPrologue(cursor, &fde)) { + if (!fde.end) { + // If we couldn't even figure out this entry's extent, then we + // must stop processing entries altogether. + all_ok = false; + break; + } + entry_end = fde.end; + continue; + } + + // The next iteration picks up after this entry. + entry_end = fde.end; + + // Did we see an .eh_frame terminating mark? + if (fde.kind == kTerminator) { + // If there appears to be more data left in the section after the + // terminating mark, warn the user. But this is just a warning; + // we leave all_ok true. + if (fde.end < buffer_end) reporter_->EarlyEHTerminator(fde.offset); + break; + } + + // In this loop, we skip CIEs. We only parse them fully when we + // parse an FDE that refers to them. This limits our memory + // consumption (beyond the buffer itself) to that needed to + // process the largest single entry. + if (fde.kind != kFDE) { + ok = true; + continue; + } + + // Validate the CIE pointer. + if (fde.id > buffer_length_) { + reporter_->CIEPointerOutOfRange(fde.offset, fde.id); + continue; + } + + CIE cie; + + // Parse this FDE's CIE header. + if (!ReadEntryPrologue(buffer_ + fde.id, &cie)) + continue; + // This had better be an actual CIE. + if (cie.kind != kCIE) { + reporter_->BadCIEId(fde.offset, fde.id); + continue; + } + if (!ReadCIEFields(&cie)) + continue; + + // We now have the values that govern both the CIE and the FDE. + cie.cie = &cie; + fde.cie = &cie; + + // Parse the FDE's header. + if (!ReadFDEFields(&fde)) + continue; + + // Call Entry to ask the consumer if they're interested. + if (!handler_->Entry(fde.offset, fde.address, fde.size, + cie.version, cie.augmentation, + cie.return_address_register)) { + // The handler isn't interested in this entry. That's not an error. + ok = true; + continue; + } + + if (cie.has_z_augmentation) { + // Report the personality routine address, if we have one. + if (cie.has_z_personality) { + if (!handler_ + ->PersonalityRoutine(cie.personality_address, + IsIndirectEncoding(cie.personality_encoding))) + continue; + } + + // Report the language-specific data area address, if we have one. + if (cie.has_z_lsda) { + if (!handler_ + ->LanguageSpecificDataArea(fde.lsda_address, + IsIndirectEncoding(cie.lsda_encoding))) + continue; + } + + // If this is a signal-handling frame, report that. + if (cie.has_z_signal_frame) { + if (!handler_->SignalHandler()) + continue; + } + } + + // Interpret the CIE's instructions, and then the FDE's instructions. + State state(reader_, handler_, reporter_, fde.address); + ok = state.InterpretCIE(cie) && state.InterpretFDE(fde); + + // Tell the ByteReader that the function start address from the + // FDE header is no longer valid. + reader_->ClearFunctionBase(); + + // Report the end of the entry. + handler_->End(); + } + + return all_ok; +} + +const char *CallFrameInfo::KindName(EntryKind kind) { + if (kind == CallFrameInfo::kUnknown) + return "entry"; + else if (kind == CallFrameInfo::kCIE) + return "common information entry"; + else if (kind == CallFrameInfo::kFDE) + return "frame description entry"; + else { + assert (kind == CallFrameInfo::kTerminator); + return ".eh_frame sequence terminator"; + } +} + +bool CallFrameInfo::ReportIncomplete(Entry *entry) { + reporter_->Incomplete(entry->offset, entry->kind); + return false; +} + +void CallFrameInfo::Reporter::Incomplete(uint64 offset, + CallFrameInfo::EntryKind kind) { + fprintf(stderr, + "%s: CFI %s at offset 0x%llx in '%s': entry ends early\n", + filename_.c_str(), CallFrameInfo::KindName(kind), offset, + section_.c_str()); +} + +void CallFrameInfo::Reporter::EarlyEHTerminator(uint64 offset) { + fprintf(stderr, + "%s: CFI at offset 0x%llx in '%s': saw end-of-data marker" + " before end of section contents\n", + filename_.c_str(), offset, section_.c_str()); +} + +void CallFrameInfo::Reporter::CIEPointerOutOfRange(uint64 offset, + uint64 cie_offset) { + fprintf(stderr, + "%s: CFI frame description entry at offset 0x%llx in '%s':" + " CIE pointer is out of range: 0x%llx\n", + filename_.c_str(), offset, section_.c_str(), cie_offset); +} + +void CallFrameInfo::Reporter::BadCIEId(uint64 offset, uint64 cie_offset) { + fprintf(stderr, + "%s: CFI frame description entry at offset 0x%llx in '%s':" + " CIE pointer does not point to a CIE: 0x%llx\n", + filename_.c_str(), offset, section_.c_str(), cie_offset); +} + +void CallFrameInfo::Reporter::UnrecognizedVersion(uint64 offset, int version) { + fprintf(stderr, + "%s: CFI frame description entry at offset 0x%llx in '%s':" + " CIE specifies unrecognized version: %d\n", + filename_.c_str(), offset, section_.c_str(), version); +} + +void CallFrameInfo::Reporter::UnrecognizedAugmentation(uint64 offset, + const string &aug) { + fprintf(stderr, + "%s: CFI frame description entry at offset 0x%llx in '%s':" + " CIE specifies unrecognized augmentation: '%s'\n", + filename_.c_str(), offset, section_.c_str(), aug.c_str()); +} + +void CallFrameInfo::Reporter::InvalidPointerEncoding(uint64 offset, + uint8 encoding) { + fprintf(stderr, + "%s: CFI common information entry at offset 0x%llx in '%s':" + " 'z' augmentation specifies invalid pointer encoding: 0x%02x\n", + filename_.c_str(), offset, section_.c_str(), encoding); +} + +void CallFrameInfo::Reporter::UnusablePointerEncoding(uint64 offset, + uint8 encoding) { + fprintf(stderr, + "%s: CFI common information entry at offset 0x%llx in '%s':" + " 'z' augmentation specifies a pointer encoding for which" + " we have no base address: 0x%02x\n", + filename_.c_str(), offset, section_.c_str(), encoding); +} + +void CallFrameInfo::Reporter::RestoreInCIE(uint64 offset, uint64 insn_offset) { + fprintf(stderr, + "%s: CFI common information entry at offset 0x%llx in '%s':" + " the DW_CFA_restore instruction at offset 0x%llx" + " cannot be used in a common information entry\n", + filename_.c_str(), offset, section_.c_str(), insn_offset); +} + +void CallFrameInfo::Reporter::BadInstruction(uint64 offset, + CallFrameInfo::EntryKind kind, + uint64 insn_offset) { + fprintf(stderr, + "%s: CFI %s at offset 0x%llx in section '%s':" + " the instruction at offset 0x%llx is unrecognized\n", + filename_.c_str(), CallFrameInfo::KindName(kind), + offset, section_.c_str(), insn_offset); +} + +void CallFrameInfo::Reporter::NoCFARule(uint64 offset, + CallFrameInfo::EntryKind kind, + uint64 insn_offset) { + fprintf(stderr, + "%s: CFI %s at offset 0x%llx in section '%s':" + " the instruction at offset 0x%llx assumes that a CFA rule has" + " been set, but none has been set\n", + filename_.c_str(), CallFrameInfo::KindName(kind), offset, + section_.c_str(), insn_offset); +} + +void CallFrameInfo::Reporter::EmptyStateStack(uint64 offset, + CallFrameInfo::EntryKind kind, + uint64 insn_offset) { + fprintf(stderr, + "%s: CFI %s at offset 0x%llx in section '%s':" + " the DW_CFA_restore_state instruction at offset 0x%llx" + " should pop a saved state from the stack, but the stack is empty\n", + filename_.c_str(), CallFrameInfo::KindName(kind), offset, + section_.c_str(), insn_offset); +} + +void CallFrameInfo::Reporter::ClearingCFARule(uint64 offset, + CallFrameInfo::EntryKind kind, + uint64 insn_offset) { + fprintf(stderr, + "%s: CFI %s at offset 0x%llx in section '%s':" + " the DW_CFA_restore_state instruction at offset 0x%llx" + " would clear the CFA rule in effect\n", + filename_.c_str(), CallFrameInfo::KindName(kind), offset, + section_.c_str(), insn_offset); +} + +} // namespace dwarf2reader diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/dwarf2reader.h b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/dwarf2reader.h new file mode 100644 index 0000000000..8824bf90e7 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/dwarf2reader.h @@ -0,0 +1,1050 @@ +// -*- mode: C++ -*- + +// Copyright (c) 2010 Google Inc. All Rights Reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// CFI reader author: Jim Blandy + +// This file contains definitions related to the DWARF2/3 reader and +// it's handler interfaces. +// The DWARF2/3 specification can be found at +// http://dwarf.freestandards.org and should be considered required +// reading if you wish to modify the implementation. +// Only a cursory attempt is made to explain terminology that is +// used here, as it is much better explained in the standard documents +#ifndef COMMON_DWARF_DWARF2READER_H__ +#define COMMON_DWARF_DWARF2READER_H__ + +#include +#include +#include +#include +#include + +#include "common/dwarf/bytereader.h" +#include "common/dwarf/dwarf2enums.h" +#include "common/dwarf/types.h" +#include "common/using_std_string.h" + +namespace dwarf2reader { +struct LineStateMachine; +class Dwarf2Handler; +class LineInfoHandler; + +// This maps from a string naming a section to a pair containing a +// the data for the section, and the size of the section. +typedef std::map > SectionMap; +typedef std::list > + AttributeList; +typedef AttributeList::iterator AttributeIterator; +typedef AttributeList::const_iterator ConstAttributeIterator; + +struct LineInfoHeader { + uint64 total_length; + uint16 version; + uint64 prologue_length; + uint8 min_insn_length; // insn stands for instructin + bool default_is_stmt; // stmt stands for statement + int8 line_base; + uint8 line_range; + uint8 opcode_base; + // Use a pointer so that signalsafe_addr2line is able to use this structure + // without heap allocation problem. + std::vector *std_opcode_lengths; +}; + +class LineInfo { + public: + + // Initializes a .debug_line reader. Buffer and buffer length point + // to the beginning and length of the line information to read. + // Reader is a ByteReader class that has the endianness set + // properly. + LineInfo(const char* buffer_, uint64 buffer_length, + ByteReader* reader, LineInfoHandler* handler); + + virtual ~LineInfo() { + if (header_.std_opcode_lengths) { + delete header_.std_opcode_lengths; + } + } + + // Start processing line info, and calling callbacks in the handler. + // Consumes the line number information for a single compilation unit. + // Returns the number of bytes processed. + uint64 Start(); + + // Process a single line info opcode at START using the state + // machine at LSM. Return true if we should define a line using the + // current state of the line state machine. Place the length of the + // opcode in LEN. + // If LSM_PASSES_PC is non-NULL, this function also checks if the lsm + // passes the address of PC. In other words, LSM_PASSES_PC will be + // set to true, if the following condition is met. + // + // lsm's old address < PC <= lsm's new address + static bool ProcessOneOpcode(ByteReader* reader, + LineInfoHandler* handler, + const struct LineInfoHeader &header, + const char* start, + struct LineStateMachine* lsm, + size_t* len, + uintptr pc, + bool *lsm_passes_pc); + + private: + // Reads the DWARF2/3 header for this line info. + void ReadHeader(); + + // Reads the DWARF2/3 line information + void ReadLines(); + + // The associated handler to call processing functions in + LineInfoHandler* handler_; + + // The associated ByteReader that handles endianness issues for us + ByteReader* reader_; + + // A DWARF2/3 line info header. This is not the same size as + // in the actual file, as the one in the file may have a 32 bit or + // 64 bit lengths + + struct LineInfoHeader header_; + + // buffer is the buffer for our line info, starting at exactly where + // the line info to read is. after_header is the place right after + // the end of the line information header. + const char* buffer_; + uint64 buffer_length_; + const char* after_header_; +}; + +// This class is the main interface between the line info reader and +// the client. The virtual functions inside this get called for +// interesting events that happen during line info reading. The +// default implementation does nothing + +class LineInfoHandler { + public: + LineInfoHandler() { } + + virtual ~LineInfoHandler() { } + + // Called when we define a directory. NAME is the directory name, + // DIR_NUM is the directory number + virtual void DefineDir(const string& name, uint32 dir_num) { } + + // Called when we define a filename. NAME is the filename, FILE_NUM + // is the file number which is -1 if the file index is the next + // index after the last numbered index (this happens when files are + // dynamically defined by the line program), DIR_NUM is the + // directory index for the directory name of this file, MOD_TIME is + // the modification time of the file, and LENGTH is the length of + // the file + virtual void DefineFile(const string& name, int32 file_num, + uint32 dir_num, uint64 mod_time, + uint64 length) { } + + // Called when the line info reader has a new line, address pair + // ready for us. ADDRESS is the address of the code, LENGTH is the + // length of its machine code in bytes, FILE_NUM is the file number + // containing the code, LINE_NUM is the line number in that file for + // the code, and COLUMN_NUM is the column number the code starts at, + // if we know it (0 otherwise). + virtual void AddLine(uint64 address, uint64 length, + uint32 file_num, uint32 line_num, uint32 column_num) { } +}; + +// The base of DWARF2/3 debug info is a DIE (Debugging Information +// Entry. +// DWARF groups DIE's into a tree and calls the root of this tree a +// "compilation unit". Most of the time, there is one compilation +// unit in the .debug_info section for each file that had debug info +// generated. +// Each DIE consists of + +// 1. a tag specifying a thing that is being described (ie +// DW_TAG_subprogram for functions, DW_TAG_variable for variables, etc +// 2. attributes (such as DW_AT_location for location in memory, +// DW_AT_name for name), and data for each attribute. +// 3. A flag saying whether the DIE has children or not + +// In order to gain some amount of compression, the format of +// each DIE (tag name, attributes and data forms for the attributes) +// are stored in a separate table called the "abbreviation table". +// This is done because a large number of DIEs have the exact same tag +// and list of attributes, but different data for those attributes. +// As a result, the .debug_info section is just a stream of data, and +// requires reading of the .debug_abbrev section to say what the data +// means. + +// As a warning to the user, it should be noted that the reason for +// using absolute offsets from the beginning of .debug_info is that +// DWARF2/3 supports referencing DIE's from other DIE's by their offset +// from either the current compilation unit start, *or* the beginning +// of the .debug_info section. This means it is possible to reference +// a DIE in one compilation unit from a DIE in another compilation +// unit. This style of reference is usually used to eliminate +// duplicated information that occurs across compilation +// units, such as base types, etc. GCC 3.4+ support this with +// -feliminate-dwarf2-dups. Other toolchains will sometimes do +// duplicate elimination in the linker. + +class CompilationUnit { + public: + + // Initialize a compilation unit. This requires a map of sections, + // the offset of this compilation unit in the .debug_info section, a + // ByteReader, and a Dwarf2Handler class to call callbacks in. + CompilationUnit(const SectionMap& sections, uint64 offset, + ByteReader* reader, Dwarf2Handler* handler); + virtual ~CompilationUnit() { + if (abbrevs_) delete abbrevs_; + } + + // Begin reading a Dwarf2 compilation unit, and calling the + // callbacks in the Dwarf2Handler + + // Return the full length of the compilation unit, including + // headers. This plus the starting offset passed to the constructor + // is the offset of the end of the compilation unit --- and the + // start of the next compilation unit, if there is one. + uint64 Start(); + + private: + + // This struct represents a single DWARF2/3 abbreviation + // The abbreviation tells how to read a DWARF2/3 DIE, and consist of a + // tag and a list of attributes, as well as the data form of each attribute. + struct Abbrev { + uint64 number; + enum DwarfTag tag; + bool has_children; + AttributeList attributes; + }; + + // A DWARF2/3 compilation unit header. This is not the same size as + // in the actual file, as the one in the file may have a 32 bit or + // 64 bit length. + struct CompilationUnitHeader { + uint64 length; + uint16 version; + uint64 abbrev_offset; + uint8 address_size; + } header_; + + // Reads the DWARF2/3 header for this compilation unit. + void ReadHeader(); + + // Reads the DWARF2/3 abbreviations for this compilation unit + void ReadAbbrevs(); + + // Processes a single DIE for this compilation unit and return a new + // pointer just past the end of it + const char* ProcessDIE(uint64 dieoffset, + const char* start, + const Abbrev& abbrev); + + // Processes a single attribute and return a new pointer just past the + // end of it + const char* ProcessAttribute(uint64 dieoffset, + const char* start, + enum DwarfAttribute attr, + enum DwarfForm form); + + // Processes all DIEs for this compilation unit + void ProcessDIEs(); + + // Skips the die with attributes specified in ABBREV starting at + // START, and return the new place to position the stream to. + const char* SkipDIE(const char* start, + const Abbrev& abbrev); + + // Skips the attribute starting at START, with FORM, and return the + // new place to position the stream to. + const char* SkipAttribute(const char* start, + enum DwarfForm form); + + // Offset from section start is the offset of this compilation unit + // from the beginning of the .debug_info section. + uint64 offset_from_section_start_; + + // buffer is the buffer for our CU, starting at .debug_info + offset + // passed in from constructor. + // after_header points to right after the compilation unit header. + const char* buffer_; + uint64 buffer_length_; + const char* after_header_; + + // The associated ByteReader that handles endianness issues for us + ByteReader* reader_; + + // The map of sections in our file to buffers containing their data + const SectionMap& sections_; + + // The associated handler to call processing functions in + Dwarf2Handler* handler_; + + // Set of DWARF2/3 abbreviations for this compilation unit. Indexed + // by abbreviation number, which means that abbrevs_[0] is not + // valid. + std::vector* abbrevs_; + + // String section buffer and length, if we have a string section. + // This is here to avoid doing a section lookup for strings in + // ProcessAttribute, which is in the hot path for DWARF2 reading. + const char* string_buffer_; + uint64 string_buffer_length_; +}; + +// This class is the main interface between the reader and the +// client. The virtual functions inside this get called for +// interesting events that happen during DWARF2 reading. +// The default implementation skips everything. + +class Dwarf2Handler { + public: + Dwarf2Handler() { } + + virtual ~Dwarf2Handler() { } + + // Start to process a compilation unit at OFFSET from the beginning of the + // .debug_info section. Return false if you would like to skip this + // compilation unit. + virtual bool StartCompilationUnit(uint64 offset, uint8 address_size, + uint8 offset_size, uint64 cu_length, + uint8 dwarf_version) { return false; } + + // Start to process a DIE at OFFSET from the beginning of the .debug_info + // section. Return false if you would like to skip this DIE. + virtual bool StartDIE(uint64 offset, enum DwarfTag tag) { return false; } + + // Called when we have an attribute with unsigned data to give to our + // handler. The attribute is for the DIE at OFFSET from the beginning of the + // .debug_info section. Its name is ATTR, its form is FORM, and its value is + // DATA. + virtual void ProcessAttributeUnsigned(uint64 offset, + enum DwarfAttribute attr, + enum DwarfForm form, + uint64 data) { } + + // Called when we have an attribute with signed data to give to our handler. + // The attribute is for the DIE at OFFSET from the beginning of the + // .debug_info section. Its name is ATTR, its form is FORM, and its value is + // DATA. + virtual void ProcessAttributeSigned(uint64 offset, + enum DwarfAttribute attr, + enum DwarfForm form, + int64 data) { } + + // Called when we have an attribute whose value is a reference to + // another DIE. The attribute belongs to the DIE at OFFSET from the + // beginning of the .debug_info section. Its name is ATTR, its form + // is FORM, and the offset of the DIE being referred to from the + // beginning of the .debug_info section is DATA. + virtual void ProcessAttributeReference(uint64 offset, + enum DwarfAttribute attr, + enum DwarfForm form, + uint64 data) { } + + // Called when we have an attribute with a buffer of data to give to our + // handler. The attribute is for the DIE at OFFSET from the beginning of the + // .debug_info section. Its name is ATTR, its form is FORM, DATA points to + // the buffer's contents, and its length in bytes is LENGTH. The buffer is + // owned by the caller, not the callee, and may not persist for very long. + // If you want the data to be available later, it needs to be copied. + virtual void ProcessAttributeBuffer(uint64 offset, + enum DwarfAttribute attr, + enum DwarfForm form, + const char* data, + uint64 len) { } + + // Called when we have an attribute with string data to give to our handler. + // The attribute is for the DIE at OFFSET from the beginning of the + // .debug_info section. Its name is ATTR, its form is FORM, and its value is + // DATA. + virtual void ProcessAttributeString(uint64 offset, + enum DwarfAttribute attr, + enum DwarfForm form, + const string& data) { } + + // Called when we have an attribute whose value is the 64-bit signature + // of a type unit in the .debug_types section. OFFSET is the offset of + // the DIE whose attribute we're reporting. ATTR and FORM are the + // attribute's name and form. SIGNATURE is the type unit's signature. + virtual void ProcessAttributeSignature(uint64 offset, + enum DwarfAttribute attr, + enum DwarfForm form, + uint64 signature) { } + + // Called when finished processing the DIE at OFFSET. + // Because DWARF2/3 specifies a tree of DIEs, you may get starts + // before ends of the previous DIE, as we process children before + // ending the parent. + virtual void EndDIE(uint64 offset) { } + +}; + +// This class is a reader for DWARF's Call Frame Information. CFI +// describes how to unwind stack frames --- even for functions that do +// not follow fixed conventions for saving registers, whose frame size +// varies as they execute, etc. +// +// CFI describes, at each machine instruction, how to compute the +// stack frame's base address, how to find the return address, and +// where to find the saved values of the caller's registers (if the +// callee has stashed them somewhere to free up the registers for its +// own use). +// +// For example, suppose we have a function whose machine code looks +// like this (imagine an assembly language that looks like C, for a +// machine with 32-bit registers, and a stack that grows towards lower +// addresses): +// +// func: ; entry point; return address at sp +// func+0: sp = sp - 16 ; allocate space for stack frame +// func+1: sp[12] = r0 ; save r0 at sp+12 +// ... ; other code, not frame-related +// func+10: sp -= 4; *sp = x ; push some x on the stack +// ... ; other code, not frame-related +// func+20: r0 = sp[16] ; restore saved r0 +// func+21: sp += 20 ; pop whole stack frame +// func+22: pc = *sp; sp += 4 ; pop return address and jump to it +// +// DWARF CFI is (a very compressed representation of) a table with a +// row for each machine instruction address and a column for each +// register showing how to restore it, if possible. +// +// A special column named "CFA", for "Canonical Frame Address", tells how +// to compute the base address of the frame; registers' entries may +// refer to the CFA in describing where the registers are saved. +// +// Another special column, named "RA", represents the return address. +// +// For example, here is a complete (uncompressed) table describing the +// function above: +// +// insn cfa r0 r1 ... ra +// ======================================= +// func+0: sp cfa[0] +// func+1: sp+16 cfa[0] +// func+2: sp+16 cfa[-4] cfa[0] +// func+11: sp+20 cfa[-4] cfa[0] +// func+21: sp+20 cfa[0] +// func+22: sp cfa[0] +// +// Some things to note here: +// +// - Each row describes the state of affairs *before* executing the +// instruction at the given address. Thus, the row for func+0 +// describes the state before we allocate the stack frame. In the +// next row, the formula for computing the CFA has changed, +// reflecting that allocation. +// +// - The other entries are written in terms of the CFA; this allows +// them to remain unchanged as the stack pointer gets bumped around. +// For example, the rule for recovering the return address (the "ra" +// column) remains unchanged throughout the function, even as the +// stack pointer takes on three different offsets from the return +// address. +// +// - Although we haven't shown it, most calling conventions designate +// "callee-saves" and "caller-saves" registers. The callee must +// preserve the values of callee-saves registers; if it uses them, +// it must save their original values somewhere, and restore them +// before it returns. In contrast, the callee is free to trash +// caller-saves registers; if the callee uses these, it will +// probably not bother to save them anywhere, and the CFI will +// probably mark their values as "unrecoverable". +// +// (However, since the caller cannot assume the callee was going to +// save them, caller-saves registers are probably dead in the caller +// anyway, so compilers usually don't generate CFA for caller-saves +// registers.) +// +// - Exactly where the CFA points is a matter of convention that +// depends on the architecture and ABI in use. In the example, the +// CFA is the value the stack pointer had upon entry to the +// function, pointing at the saved return address. But on the x86, +// the call frame information generated by GCC follows the +// convention that the CFA is the address *after* the saved return +// address. +// +// But by definition, the CFA remains constant throughout the +// lifetime of the frame. This makes it a useful value for other +// columns to refer to. It is also gives debuggers a useful handle +// for identifying a frame. +// +// If you look at the table above, you'll notice that a given entry is +// often the same as the one immediately above it: most instructions +// change only one or two aspects of the stack frame, if they affect +// it at all. The DWARF format takes advantage of this fact, and +// reduces the size of the data by mentioning only the addresses and +// columns at which changes take place. So for the above, DWARF CFI +// data would only actually mention the following: +// +// insn cfa r0 r1 ... ra +// ======================================= +// func+0: sp cfa[0] +// func+1: sp+16 +// func+2: cfa[-4] +// func+11: sp+20 +// func+21: r0 +// func+22: sp +// +// In fact, this is the way the parser reports CFI to the consumer: as +// a series of statements of the form, "At address X, column Y changed +// to Z," and related conventions for describing the initial state. +// +// Naturally, it would be impractical to have to scan the entire +// program's CFI, noting changes as we go, just to recover the +// unwinding rules in effect at one particular instruction. To avoid +// this, CFI data is grouped into "entries", each of which covers a +// specified range of addresses and begins with a complete statement +// of the rules for all recoverable registers at that starting +// address. Each entry typically covers a single function. +// +// Thus, to compute the contents of a given row of the table --- that +// is, rules for recovering the CFA, RA, and registers at a given +// instruction --- the consumer should find the entry that covers that +// instruction's address, start with the initial state supplied at the +// beginning of the entry, and work forward until it has processed all +// the changes up to and including those for the present instruction. +// +// There are seven kinds of rules that can appear in an entry of the +// table: +// +// - "undefined": The given register is not preserved by the callee; +// its value cannot be recovered. +// +// - "same value": This register has the same value it did in the callee. +// +// - offset(N): The register is saved at offset N from the CFA. +// +// - val_offset(N): The value the register had in the caller is the +// CFA plus offset N. (This is usually only useful for describing +// the stack pointer.) +// +// - register(R): The register's value was saved in another register R. +// +// - expression(E): Evaluating the DWARF expression E using the +// current frame's registers' values yields the address at which the +// register was saved. +// +// - val_expression(E): Evaluating the DWARF expression E using the +// current frame's registers' values yields the value the register +// had in the caller. + +class CallFrameInfo { + public: + // The different kinds of entries one finds in CFI. Used internally, + // and for error reporting. + enum EntryKind { kUnknown, kCIE, kFDE, kTerminator }; + + // The handler class to which the parser hands the parsed call frame + // information. Defined below. + class Handler; + + // A reporter class, which CallFrameInfo uses to report errors + // encountered while parsing call frame information. Defined below. + class Reporter; + + // Create a DWARF CFI parser. BUFFER points to the contents of the + // .debug_frame section to parse; BUFFER_LENGTH is its length in bytes. + // REPORTER is an error reporter the parser should use to report + // problems. READER is a ByteReader instance that has the endianness and + // address size set properly. Report the data we find to HANDLER. + // + // This class can also parse Linux C++ exception handling data, as found + // in '.eh_frame' sections. This data is a variant of DWARF CFI that is + // placed in loadable segments so that it is present in the program's + // address space, and is interpreted by the C++ runtime to search the + // call stack for a handler interested in the exception being thrown, + // actually pop the frames, and find cleanup code to run. + // + // There are two differences between the call frame information described + // in the DWARF standard and the exception handling data Linux places in + // the .eh_frame section: + // + // - Exception handling data uses uses a different format for call frame + // information entry headers. The distinguished CIE id, the way FDEs + // refer to their CIEs, and the way the end of the series of entries is + // determined are all slightly different. + // + // If the constructor's EH_FRAME argument is true, then the + // CallFrameInfo parses the entry headers as Linux C++ exception + // handling data. If EH_FRAME is false or omitted, the CallFrameInfo + // parses standard DWARF call frame information. + // + // - Linux C++ exception handling data uses CIE augmentation strings + // beginning with 'z' to specify the presence of additional data after + // the CIE and FDE headers and special encodings used for addresses in + // frame description entries. + // + // CallFrameInfo can handle 'z' augmentations in either DWARF CFI or + // exception handling data if you have supplied READER with the base + // addresses needed to interpret the pointer encodings that 'z' + // augmentations can specify. See the ByteReader interface for details + // about the base addresses. See the CallFrameInfo::Handler interface + // for details about the additional information one might find in + // 'z'-augmented data. + // + // Thus: + // + // - If you are parsing standard DWARF CFI, as found in a .debug_frame + // section, you should pass false for the EH_FRAME argument, or omit + // it, and you need not worry about providing READER with the + // additional base addresses. + // + // - If you want to parse Linux C++ exception handling data from a + // .eh_frame section, you should pass EH_FRAME as true, and call + // READER's Set*Base member functions before calling our Start method. + // + // - If you want to parse DWARF CFI that uses the 'z' augmentations + // (although I don't think any toolchain ever emits such data), you + // could pass false for EH_FRAME, but call READER's Set*Base members. + // + // The extensions the Linux C++ ABI makes to DWARF for exception + // handling are described here, rather poorly: + // http://refspecs.linux-foundation.org/LSB_4.0.0/LSB-Core-generic/LSB-Core-generic/dwarfext.html + // http://refspecs.linux-foundation.org/LSB_4.0.0/LSB-Core-generic/LSB-Core-generic/ehframechpt.html + // + // The mechanics of C++ exception handling, personality routines, + // and language-specific data areas are described here, rather nicely: + // http://www.codesourcery.com/public/cxx-abi/abi-eh.html + CallFrameInfo(const char *buffer, size_t buffer_length, + ByteReader *reader, Handler *handler, Reporter *reporter, + bool eh_frame = false) + : buffer_(buffer), buffer_length_(buffer_length), + reader_(reader), handler_(handler), reporter_(reporter), + eh_frame_(eh_frame) { } + + ~CallFrameInfo() { } + + // Parse the entries in BUFFER, reporting what we find to HANDLER. + // Return true if we reach the end of the section successfully, or + // false if we encounter an error. + bool Start(); + + // Return the textual name of KIND. For error reporting. + static const char *KindName(EntryKind kind); + + private: + + struct CIE; + + // A CFI entry, either an FDE or a CIE. + struct Entry { + // The starting offset of the entry in the section, for error + // reporting. + size_t offset; + + // The start of this entry in the buffer. + const char *start; + + // Which kind of entry this is. + // + // We want to be able to use this for error reporting even while we're + // in the midst of parsing. Error reporting code may assume that kind, + // offset, and start fields are valid, although kind may be kUnknown. + EntryKind kind; + + // The end of this entry's common prologue (initial length and id), and + // the start of this entry's kind-specific fields. + const char *fields; + + // The start of this entry's instructions. + const char *instructions; + + // The address past the entry's last byte in the buffer. (Note that + // since offset points to the entry's initial length field, and the + // length field is the number of bytes after that field, this is not + // simply buffer_ + offset + length.) + const char *end; + + // For both DWARF CFI and .eh_frame sections, this is the CIE id in a + // CIE, and the offset of the associated CIE in an FDE. + uint64 id; + + // The CIE that applies to this entry, if we've parsed it. If this is a + // CIE, then this field points to this structure. + CIE *cie; + }; + + // A common information entry (CIE). + struct CIE: public Entry { + uint8 version; // CFI data version number + string augmentation; // vendor format extension markers + uint64 code_alignment_factor; // scale for code address adjustments + int data_alignment_factor; // scale for stack pointer adjustments + unsigned return_address_register; // which register holds the return addr + + // True if this CIE includes Linux C++ ABI 'z' augmentation data. + bool has_z_augmentation; + + // Parsed 'z' augmentation data. These are meaningful only if + // has_z_augmentation is true. + bool has_z_lsda; // The 'z' augmentation included 'L'. + bool has_z_personality; // The 'z' augmentation included 'P'. + bool has_z_signal_frame; // The 'z' augmentation included 'S'. + + // If has_z_lsda is true, this is the encoding to be used for language- + // specific data area pointers in FDEs. + DwarfPointerEncoding lsda_encoding; + + // If has_z_personality is true, this is the encoding used for the + // personality routine pointer in the augmentation data. + DwarfPointerEncoding personality_encoding; + + // If has_z_personality is true, this is the address of the personality + // routine --- or, if personality_encoding & DW_EH_PE_indirect, the + // address where the personality routine's address is stored. + uint64 personality_address; + + // This is the encoding used for addresses in the FDE header and + // in DW_CFA_set_loc instructions. This is always valid, whether + // or not we saw a 'z' augmentation string; its default value is + // DW_EH_PE_absptr, which is what normal DWARF CFI uses. + DwarfPointerEncoding pointer_encoding; + }; + + // A frame description entry (FDE). + struct FDE: public Entry { + uint64 address; // start address of described code + uint64 size; // size of described code, in bytes + + // If cie->has_z_lsda is true, then this is the language-specific data + // area's address --- or its address's address, if cie->lsda_encoding + // has the DW_EH_PE_indirect bit set. + uint64 lsda_address; + }; + + // Internal use. + class Rule; + class UndefinedRule; + class SameValueRule; + class OffsetRule; + class ValOffsetRule; + class RegisterRule; + class ExpressionRule; + class ValExpressionRule; + class RuleMap; + class State; + + // Parse the initial length and id of a CFI entry, either a CIE, an FDE, + // or a .eh_frame end-of-data mark. CURSOR points to the beginning of the + // data to parse. On success, populate ENTRY as appropriate, and return + // true. On failure, report the problem, and return false. Even if we + // return false, set ENTRY->end to the first byte after the entry if we + // were able to figure that out, or NULL if we weren't. + bool ReadEntryPrologue(const char *cursor, Entry *entry); + + // Parse the fields of a CIE after the entry prologue, including any 'z' + // augmentation data. Assume that the 'Entry' fields of CIE are + // populated; use CIE->fields and CIE->end as the start and limit for + // parsing. On success, populate the rest of *CIE, and return true; on + // failure, report the problem and return false. + bool ReadCIEFields(CIE *cie); + + // Parse the fields of an FDE after the entry prologue, including any 'z' + // augmentation data. Assume that the 'Entry' fields of *FDE are + // initialized; use FDE->fields and FDE->end as the start and limit for + // parsing. Assume that FDE->cie is fully initialized. On success, + // populate the rest of *FDE, and return true; on failure, report the + // problem and return false. + bool ReadFDEFields(FDE *fde); + + // Report that ENTRY is incomplete, and return false. This is just a + // trivial wrapper for invoking reporter_->Incomplete; it provides a + // little brevity. + bool ReportIncomplete(Entry *entry); + + // Return true if ENCODING has the DW_EH_PE_indirect bit set. + static bool IsIndirectEncoding(DwarfPointerEncoding encoding) { + return encoding & DW_EH_PE_indirect; + } + + // The contents of the DWARF .debug_info section we're parsing. + const char *buffer_; + size_t buffer_length_; + + // For reading multi-byte values with the appropriate endianness. + ByteReader *reader_; + + // The handler to which we should report the data we find. + Handler *handler_; + + // For reporting problems in the info we're parsing. + Reporter *reporter_; + + // True if we are processing .eh_frame-format data. + bool eh_frame_; +}; + +// The handler class for CallFrameInfo. The a CFI parser calls the +// member functions of a handler object to report the data it finds. +class CallFrameInfo::Handler { + public: + // The pseudo-register number for the canonical frame address. + enum { kCFARegister = -1 }; + + Handler() { } + virtual ~Handler() { } + + // The parser has found CFI for the machine code at ADDRESS, + // extending for LENGTH bytes. OFFSET is the offset of the frame + // description entry in the section, for use in error messages. + // VERSION is the version number of the CFI format. AUGMENTATION is + // a string describing any producer-specific extensions present in + // the data. RETURN_ADDRESS is the number of the register that holds + // the address to which the function should return. + // + // Entry should return true to process this CFI, or false to skip to + // the next entry. + // + // The parser invokes Entry for each Frame Description Entry (FDE) + // it finds. The parser doesn't report Common Information Entries + // to the handler explicitly; instead, if the handler elects to + // process a given FDE, the parser reiterates the appropriate CIE's + // contents at the beginning of the FDE's rules. + virtual bool Entry(size_t offset, uint64 address, uint64 length, + uint8 version, const string &augmentation, + unsigned return_address) = 0; + + // When the Entry function returns true, the parser calls these + // handler functions repeatedly to describe the rules for recovering + // registers at each instruction in the given range of machine code. + // Immediately after a call to Entry, the handler should assume that + // the rule for each callee-saves register is "unchanged" --- that + // is, that the register still has the value it had in the caller. + // + // If a *Rule function returns true, we continue processing this entry's + // instructions. If a *Rule function returns false, we stop evaluating + // instructions, and skip to the next entry. Either way, we call End + // before going on to the next entry. + // + // In all of these functions, if the REG parameter is kCFARegister, then + // the rule describes how to find the canonical frame address. + // kCFARegister may be passed as a BASE_REGISTER argument, meaning that + // the canonical frame address should be used as the base address for the + // computation. All other REG values will be positive. + + // At ADDRESS, register REG's value is not recoverable. + virtual bool UndefinedRule(uint64 address, int reg) = 0; + + // At ADDRESS, register REG's value is the same as that it had in + // the caller. + virtual bool SameValueRule(uint64 address, int reg) = 0; + + // At ADDRESS, register REG has been saved at offset OFFSET from + // BASE_REGISTER. + virtual bool OffsetRule(uint64 address, int reg, + int base_register, long offset) = 0; + + // At ADDRESS, the caller's value of register REG is the current + // value of BASE_REGISTER plus OFFSET. (This rule doesn't provide an + // address at which the register's value is saved.) + virtual bool ValOffsetRule(uint64 address, int reg, + int base_register, long offset) = 0; + + // At ADDRESS, register REG has been saved in BASE_REGISTER. This differs + // from ValOffsetRule(ADDRESS, REG, BASE_REGISTER, 0), in that + // BASE_REGISTER is the "home" for REG's saved value: if you want to + // assign to a variable whose home is REG in the calling frame, you + // should put the value in BASE_REGISTER. + virtual bool RegisterRule(uint64 address, int reg, int base_register) = 0; + + // At ADDRESS, the DWARF expression EXPRESSION yields the address at + // which REG was saved. + virtual bool ExpressionRule(uint64 address, int reg, + const string &expression) = 0; + + // At ADDRESS, the DWARF expression EXPRESSION yields the caller's + // value for REG. (This rule doesn't provide an address at which the + // register's value is saved.) + virtual bool ValExpressionRule(uint64 address, int reg, + const string &expression) = 0; + + // Indicate that the rules for the address range reported by the + // last call to Entry are complete. End should return true if + // everything is okay, or false if an error has occurred and parsing + // should stop. + virtual bool End() = 0; + + // Handler functions for Linux C++ exception handling data. These are + // only called if the data includes 'z' augmentation strings. + + // The Linux C++ ABI uses an extension of the DWARF CFI format to + // walk the stack to propagate exceptions from the throw to the + // appropriate catch, and do the appropriate cleanups along the way. + // CFI entries used for exception handling have two additional data + // associated with them: + // + // - The "language-specific data area" describes which exception + // types the function has 'catch' clauses for, and indicates how + // to go about re-entering the function at the appropriate catch + // clause. If the exception is not caught, it describes the + // destructors that must run before the frame is popped. + // + // - The "personality routine" is responsible for interpreting the + // language-specific data area's contents, and deciding whether + // the exception should continue to propagate down the stack, + // perhaps after doing some cleanup for this frame, or whether the + // exception will be caught here. + // + // In principle, the language-specific data area is opaque to + // everybody but the personality routine. In practice, these values + // may be useful or interesting to readers with extra context, and + // we have to at least skip them anyway, so we might as well report + // them to the handler. + + // This entry's exception handling personality routine's address is + // ADDRESS. If INDIRECT is true, then ADDRESS is the address at + // which the routine's address is stored. The default definition for + // this handler function simply returns true, allowing parsing of + // the entry to continue. + virtual bool PersonalityRoutine(uint64 address, bool indirect) { + return true; + } + + // This entry's language-specific data area (LSDA) is located at + // ADDRESS. If INDIRECT is true, then ADDRESS is the address at + // which the area's address is stored. The default definition for + // this handler function simply returns true, allowing parsing of + // the entry to continue. + virtual bool LanguageSpecificDataArea(uint64 address, bool indirect) { + return true; + } + + // This entry describes a signal trampoline --- this frame is the + // caller of a signal handler. The default definition for this + // handler function simply returns true, allowing parsing of the + // entry to continue. + // + // The best description of the rationale for and meaning of signal + // trampoline CFI entries seems to be in the GCC bug database: + // http://gcc.gnu.org/bugzilla/show_bug.cgi?id=26208 + virtual bool SignalHandler() { return true; } +}; + +// The CallFrameInfo class makes calls on an instance of this class to +// report errors or warn about problems in the data it is parsing. The +// default definitions of these methods print a message to stderr, but +// you can make a derived class that overrides them. +class CallFrameInfo::Reporter { + public: + // Create an error reporter which attributes troubles to the section + // named SECTION in FILENAME. + // + // Normally SECTION would be .debug_frame, but the Mac puts CFI data + // in a Mach-O section named __debug_frame. If we support + // Linux-style exception handling data, we could be reading an + // .eh_frame section. + Reporter(const string &filename, + const string §ion = ".debug_frame") + : filename_(filename), section_(section) { } + virtual ~Reporter() { } + + // The CFI entry at OFFSET ends too early to be well-formed. KIND + // indicates what kind of entry it is; KIND can be kUnknown if we + // haven't parsed enough of the entry to tell yet. + virtual void Incomplete(uint64 offset, CallFrameInfo::EntryKind kind); + + // The .eh_frame data has a four-byte zero at OFFSET where the next + // entry's length would be; this is a terminator. However, the buffer + // length as given to the CallFrameInfo constructor says there should be + // more data. + virtual void EarlyEHTerminator(uint64 offset); + + // The FDE at OFFSET refers to the CIE at CIE_OFFSET, but the + // section is not that large. + virtual void CIEPointerOutOfRange(uint64 offset, uint64 cie_offset); + + // The FDE at OFFSET refers to the CIE at CIE_OFFSET, but the entry + // there is not a CIE. + virtual void BadCIEId(uint64 offset, uint64 cie_offset); + + // The FDE at OFFSET refers to a CIE with version number VERSION, + // which we don't recognize. We cannot parse DWARF CFI if it uses + // a version number we don't recognize. + virtual void UnrecognizedVersion(uint64 offset, int version); + + // The FDE at OFFSET refers to a CIE with augmentation AUGMENTATION, + // which we don't recognize. We cannot parse DWARF CFI if it uses + // augmentations we don't recognize. + virtual void UnrecognizedAugmentation(uint64 offset, + const string &augmentation); + + // The pointer encoding ENCODING, specified by the CIE at OFFSET, is not + // a valid encoding. + virtual void InvalidPointerEncoding(uint64 offset, uint8 encoding); + + // The pointer encoding ENCODING, specified by the CIE at OFFSET, depends + // on a base address which has not been supplied. + virtual void UnusablePointerEncoding(uint64 offset, uint8 encoding); + + // The CIE at OFFSET contains a DW_CFA_restore instruction at + // INSN_OFFSET, which may not appear in a CIE. + virtual void RestoreInCIE(uint64 offset, uint64 insn_offset); + + // The entry at OFFSET, of kind KIND, has an unrecognized + // instruction at INSN_OFFSET. + virtual void BadInstruction(uint64 offset, CallFrameInfo::EntryKind kind, + uint64 insn_offset); + + // The instruction at INSN_OFFSET in the entry at OFFSET, of kind + // KIND, establishes a rule that cites the CFA, but we have not + // established a CFA rule yet. + virtual void NoCFARule(uint64 offset, CallFrameInfo::EntryKind kind, + uint64 insn_offset); + + // The instruction at INSN_OFFSET in the entry at OFFSET, of kind + // KIND, is a DW_CFA_restore_state instruction, but the stack of + // saved states is empty. + virtual void EmptyStateStack(uint64 offset, CallFrameInfo::EntryKind kind, + uint64 insn_offset); + + // The DW_CFA_remember_state instruction at INSN_OFFSET in the entry + // at OFFSET, of kind KIND, would restore a state that has no CFA + // rule, whereas the current state does have a CFA rule. This is + // bogus input, which the CallFrameInfo::Handler interface doesn't + // (and shouldn't) have any way to report. + virtual void ClearingCFARule(uint64 offset, CallFrameInfo::EntryKind kind, + uint64 insn_offset); + + protected: + // The name of the file whose CFI we're reading. + string filename_; + + // The name of the CFI section in that file. + string section_; +}; + +} // namespace dwarf2reader + +#endif // UTIL_DEBUGINFO_DWARF2READER_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/dwarf2reader_cfi_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/dwarf2reader_cfi_unittest.cc new file mode 100644 index 0000000000..66c6198b11 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/dwarf2reader_cfi_unittest.cc @@ -0,0 +1,2452 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// dwarf2reader_cfi_unittest.cc: Unit tests for dwarf2reader::CallFrameInfo + +#include + +#include +#include + +// The '.eh_frame' format, used by the Linux C++ ABI for exception +// handling, is poorly specified. To help test our support for .eh_frame, +// if you #define WRITE_ELF while compiling this file, and add the +// 'include' directory from the binutils, gcc, or gdb source tree to the +// #include path, then each test that calls the +// PERHAPS_WRITE_DEBUG_FRAME_FILE or PERHAPS_WRITE_EH_FRAME_FILE will write +// an ELF file containing a .debug_frame or .eh_frame section; you can then +// use tools like readelf to examine the test data, and check the tools' +// interpretation against the test's intentions. Each ELF file is named +// "cfitest-TEST", where TEST identifies the particular test. +#ifdef WRITE_ELF +#include +#include +#include +extern "C" { +// To compile with WRITE_ELF, you should add the 'include' directory +// of the binutils, gcc, or gdb source tree to your #include path; +// that directory contains this header. +#include "elf/common.h" +} +#endif + +#include "breakpad_googletest_includes.h" +#include "common/dwarf/bytereader-inl.h" +#include "common/dwarf/cfi_assembler.h" +#include "common/dwarf/dwarf2reader.h" +#include "common/using_std_string.h" +#include "google_breakpad/common/breakpad_types.h" + +using google_breakpad::CFISection; +using google_breakpad::test_assembler::Label; +using google_breakpad::test_assembler::kBigEndian; +using google_breakpad::test_assembler::kLittleEndian; +using google_breakpad::test_assembler::Section; + +using dwarf2reader::DwarfPointerEncoding; +using dwarf2reader::ENDIANNESS_BIG; +using dwarf2reader::ENDIANNESS_LITTLE; +using dwarf2reader::ByteReader; +using dwarf2reader::CallFrameInfo; + +using std::vector; +using testing::InSequence; +using testing::Return; +using testing::Sequence; +using testing::Test; +using testing::_; + +#ifdef WRITE_ELF +void WriteELFFrameSection(const char *filename, const char *section_name, + const CFISection §ion); +#define PERHAPS_WRITE_DEBUG_FRAME_FILE(name, section) \ + WriteELFFrameSection("cfitest-" name, ".debug_frame", section); +#define PERHAPS_WRITE_EH_FRAME_FILE(name, section) \ + WriteELFFrameSection("cfitest-" name, ".eh_frame", section); +#else +#define PERHAPS_WRITE_DEBUG_FRAME_FILE(name, section) +#define PERHAPS_WRITE_EH_FRAME_FILE(name, section) +#endif + +class MockCallFrameInfoHandler: public CallFrameInfo::Handler { + public: + MOCK_METHOD6(Entry, bool(size_t offset, uint64 address, uint64 length, + uint8 version, const string &augmentation, + unsigned return_address)); + MOCK_METHOD2(UndefinedRule, bool(uint64 address, int reg)); + MOCK_METHOD2(SameValueRule, bool(uint64 address, int reg)); + MOCK_METHOD4(OffsetRule, bool(uint64 address, int reg, int base_register, + long offset)); + MOCK_METHOD4(ValOffsetRule, bool(uint64 address, int reg, int base_register, + long offset)); + MOCK_METHOD3(RegisterRule, bool(uint64 address, int reg, int base_register)); + MOCK_METHOD3(ExpressionRule, bool(uint64 address, int reg, + const string &expression)); + MOCK_METHOD3(ValExpressionRule, bool(uint64 address, int reg, + const string &expression)); + MOCK_METHOD0(End, bool()); + MOCK_METHOD2(PersonalityRoutine, bool(uint64 address, bool indirect)); + MOCK_METHOD2(LanguageSpecificDataArea, bool(uint64 address, bool indirect)); + MOCK_METHOD0(SignalHandler, bool()); +}; + +class MockCallFrameErrorReporter: public CallFrameInfo::Reporter { + public: + MockCallFrameErrorReporter() : Reporter("mock filename", "mock section") { } + MOCK_METHOD2(Incomplete, void(uint64, CallFrameInfo::EntryKind)); + MOCK_METHOD1(EarlyEHTerminator, void(uint64)); + MOCK_METHOD2(CIEPointerOutOfRange, void(uint64, uint64)); + MOCK_METHOD2(BadCIEId, void(uint64, uint64)); + MOCK_METHOD2(UnrecognizedVersion, void(uint64, int version)); + MOCK_METHOD2(UnrecognizedAugmentation, void(uint64, const string &)); + MOCK_METHOD2(InvalidPointerEncoding, void(uint64, uint8)); + MOCK_METHOD2(UnusablePointerEncoding, void(uint64, uint8)); + MOCK_METHOD2(RestoreInCIE, void(uint64, uint64)); + MOCK_METHOD3(BadInstruction, void(uint64, CallFrameInfo::EntryKind, uint64)); + MOCK_METHOD3(NoCFARule, void(uint64, CallFrameInfo::EntryKind, uint64)); + MOCK_METHOD3(EmptyStateStack, void(uint64, CallFrameInfo::EntryKind, uint64)); +}; + +struct CFIFixture { + + enum { kCFARegister = CallFrameInfo::Handler::kCFARegister }; + + CFIFixture() { + // Default expectations for the data handler. + // + // - Leave Entry and End without expectations, as it's probably a + // good idea to set those explicitly in each test. + // + // - Expect the *Rule functions to not be called, + // so that each test can simply list the calls they expect. + // + // I gather I could use StrictMock for this, but the manual seems + // to suggest using that only as a last resort, and this isn't so + // bad. + EXPECT_CALL(handler, UndefinedRule(_, _)).Times(0); + EXPECT_CALL(handler, SameValueRule(_, _)).Times(0); + EXPECT_CALL(handler, OffsetRule(_, _, _, _)).Times(0); + EXPECT_CALL(handler, ValOffsetRule(_, _, _, _)).Times(0); + EXPECT_CALL(handler, RegisterRule(_, _, _)).Times(0); + EXPECT_CALL(handler, ExpressionRule(_, _, _)).Times(0); + EXPECT_CALL(handler, ValExpressionRule(_, _, _)).Times(0); + EXPECT_CALL(handler, PersonalityRoutine(_, _)).Times(0); + EXPECT_CALL(handler, LanguageSpecificDataArea(_, _)).Times(0); + EXPECT_CALL(handler, SignalHandler()).Times(0); + + // Default expectations for the error/warning reporer. + EXPECT_CALL(reporter, Incomplete(_, _)).Times(0); + EXPECT_CALL(reporter, EarlyEHTerminator(_)).Times(0); + EXPECT_CALL(reporter, CIEPointerOutOfRange(_, _)).Times(0); + EXPECT_CALL(reporter, BadCIEId(_, _)).Times(0); + EXPECT_CALL(reporter, UnrecognizedVersion(_, _)).Times(0); + EXPECT_CALL(reporter, UnrecognizedAugmentation(_, _)).Times(0); + EXPECT_CALL(reporter, InvalidPointerEncoding(_, _)).Times(0); + EXPECT_CALL(reporter, UnusablePointerEncoding(_, _)).Times(0); + EXPECT_CALL(reporter, RestoreInCIE(_, _)).Times(0); + EXPECT_CALL(reporter, BadInstruction(_, _, _)).Times(0); + EXPECT_CALL(reporter, NoCFARule(_, _, _)).Times(0); + EXPECT_CALL(reporter, EmptyStateStack(_, _, _)).Times(0); + } + + MockCallFrameInfoHandler handler; + MockCallFrameErrorReporter reporter; +}; + +class CFI: public CFIFixture, public Test { }; + +TEST_F(CFI, EmptyRegion) { + EXPECT_CALL(handler, Entry(_, _, _, _, _, _)).Times(0); + EXPECT_CALL(handler, End()).Times(0); + static const char data[1] = { 42 }; + + ByteReader byte_reader(ENDIANNESS_BIG); + CallFrameInfo parser(data, 0, &byte_reader, &handler, &reporter); + EXPECT_TRUE(parser.Start()); +} + +TEST_F(CFI, IncompleteLength32) { + CFISection section(kBigEndian, 8); + section + // Not even long enough for an initial length. + .D16(0xa0f) + // Padding to keep valgrind happy. We subtract these off when we + // construct the parser. + .D16(0); + + EXPECT_CALL(handler, Entry(_, _, _, _, _, _)).Times(0); + EXPECT_CALL(handler, End()).Times(0); + + EXPECT_CALL(reporter, Incomplete(_, CallFrameInfo::kUnknown)) + .WillOnce(Return()); + + string contents; + ASSERT_TRUE(section.GetContents(&contents)); + + ByteReader byte_reader(ENDIANNESS_BIG); + byte_reader.SetAddressSize(8); + CallFrameInfo parser(contents.data(), contents.size() - 2, + &byte_reader, &handler, &reporter); + EXPECT_FALSE(parser.Start()); +} + +TEST_F(CFI, IncompleteLength64) { + CFISection section(kLittleEndian, 4); + section + // An incomplete 64-bit DWARF initial length. + .D32(0xffffffff).D32(0x71fbaec2) + // Padding to keep valgrind happy. We subtract these off when we + // construct the parser. + .D32(0); + + EXPECT_CALL(handler, Entry(_, _, _, _, _, _)).Times(0); + EXPECT_CALL(handler, End()).Times(0); + + EXPECT_CALL(reporter, Incomplete(_, CallFrameInfo::kUnknown)) + .WillOnce(Return()); + + string contents; + ASSERT_TRUE(section.GetContents(&contents)); + + ByteReader byte_reader(ENDIANNESS_LITTLE); + byte_reader.SetAddressSize(4); + CallFrameInfo parser(contents.data(), contents.size() - 4, + &byte_reader, &handler, &reporter); + EXPECT_FALSE(parser.Start()); +} + +TEST_F(CFI, IncompleteId32) { + CFISection section(kBigEndian, 8); + section + .D32(3) // Initial length, not long enough for id + .D8(0xd7).D8(0xe5).D8(0xf1) // incomplete id + .CIEHeader(8727, 3983, 8889, 3, "") + .FinishEntry(); + + EXPECT_CALL(handler, Entry(_, _, _, _, _, _)).Times(0); + EXPECT_CALL(handler, End()).Times(0); + + EXPECT_CALL(reporter, Incomplete(_, CallFrameInfo::kUnknown)) + .WillOnce(Return()); + + string contents; + ASSERT_TRUE(section.GetContents(&contents)); + + ByteReader byte_reader(ENDIANNESS_BIG); + byte_reader.SetAddressSize(8); + CallFrameInfo parser(contents.data(), contents.size(), + &byte_reader, &handler, &reporter); + EXPECT_FALSE(parser.Start()); +} + +TEST_F(CFI, BadId32) { + CFISection section(kBigEndian, 8); + section + .D32(0x100) // Initial length + .D32(0xe802fade) // bogus ID + .Append(0x100 - 4, 0x42); // make the length true + section + .CIEHeader(1672, 9872, 8529, 3, "") + .FinishEntry(); + + EXPECT_CALL(handler, Entry(_, _, _, _, _, _)).Times(0); + EXPECT_CALL(handler, End()).Times(0); + + EXPECT_CALL(reporter, CIEPointerOutOfRange(_, 0xe802fade)) + .WillOnce(Return()); + + string contents; + ASSERT_TRUE(section.GetContents(&contents)); + + ByteReader byte_reader(ENDIANNESS_BIG); + byte_reader.SetAddressSize(8); + CallFrameInfo parser(contents.data(), contents.size(), + &byte_reader, &handler, &reporter); + EXPECT_FALSE(parser.Start()); +} + +// A lone CIE shouldn't cause any handler calls. +TEST_F(CFI, SingleCIE) { + CFISection section(kLittleEndian, 4); + section.CIEHeader(0xffe799a8, 0x3398dcdd, 0x6e9683de, 3, ""); + section.Append(10, dwarf2reader::DW_CFA_nop); + section.FinishEntry(); + + PERHAPS_WRITE_DEBUG_FRAME_FILE("SingleCIE", section); + + EXPECT_CALL(handler, Entry(_, _, _, _, _, _)).Times(0); + EXPECT_CALL(handler, End()).Times(0); + + string contents; + EXPECT_TRUE(section.GetContents(&contents)); + ByteReader byte_reader(ENDIANNESS_LITTLE); + byte_reader.SetAddressSize(4); + CallFrameInfo parser(contents.data(), contents.size(), + &byte_reader, &handler, &reporter); + EXPECT_TRUE(parser.Start()); +} + +// One FDE, one CIE. +TEST_F(CFI, OneFDE) { + CFISection section(kBigEndian, 4); + Label cie; + section + .Mark(&cie) + .CIEHeader(0x4be22f75, 0x2492236e, 0x6b6efb87, 3, "") + .FinishEntry() + .FDEHeader(cie, 0x7714740d, 0x3d5a10cd) + .FinishEntry(); + + PERHAPS_WRITE_DEBUG_FRAME_FILE("OneFDE", section); + + { + InSequence s; + EXPECT_CALL(handler, + Entry(_, 0x7714740d, 0x3d5a10cd, 3, "", 0x6b6efb87)) + .WillOnce(Return(true)); + EXPECT_CALL(handler, End()).WillOnce(Return(true)); + } + + string contents; + EXPECT_TRUE(section.GetContents(&contents)); + ByteReader byte_reader(ENDIANNESS_BIG); + byte_reader.SetAddressSize(4); + CallFrameInfo parser(contents.data(), contents.size(), + &byte_reader, &handler, &reporter); + EXPECT_TRUE(parser.Start()); +} + +// Two FDEs share a CIE. +TEST_F(CFI, TwoFDEsOneCIE) { + CFISection section(kBigEndian, 4); + Label cie; + section + // First FDE. readelf complains about this one because it makes + // a forward reference to its CIE. + .FDEHeader(cie, 0xa42744df, 0xa3b42121) + .FinishEntry() + // CIE. + .Mark(&cie) + .CIEHeader(0x04f7dc7b, 0x3d00c05f, 0xbd43cb59, 3, "") + .FinishEntry() + // Second FDE. + .FDEHeader(cie, 0x6057d391, 0x700f608d) + .FinishEntry(); + + PERHAPS_WRITE_DEBUG_FRAME_FILE("TwoFDEsOneCIE", section); + + { + InSequence s; + EXPECT_CALL(handler, + Entry(_, 0xa42744df, 0xa3b42121, 3, "", 0xbd43cb59)) + .WillOnce(Return(true)); + EXPECT_CALL(handler, End()).WillOnce(Return(true)); + } + { + InSequence s; + EXPECT_CALL(handler, + Entry(_, 0x6057d391, 0x700f608d, 3, "", 0xbd43cb59)) + .WillOnce(Return(true)); + EXPECT_CALL(handler, End()).WillOnce(Return(true)); + } + + string contents; + EXPECT_TRUE(section.GetContents(&contents)); + ByteReader byte_reader(ENDIANNESS_BIG); + byte_reader.SetAddressSize(4); + CallFrameInfo parser(contents.data(), contents.size(), + &byte_reader, &handler, &reporter); + EXPECT_TRUE(parser.Start()); +} + +// Two FDEs, two CIEs. +TEST_F(CFI, TwoFDEsTwoCIEs) { + CFISection section(kLittleEndian, 8); + Label cie1, cie2; + section + // First CIE. + .Mark(&cie1) + .CIEHeader(0x694d5d45, 0x4233221b, 0xbf45e65a, 3, "") + .FinishEntry() + // First FDE which cites second CIE. readelf complains about + // this one because it makes a forward reference to its CIE. + .FDEHeader(cie2, 0x778b27dfe5871f05ULL, 0x324ace3448070926ULL) + .FinishEntry() + // Second FDE, which cites first CIE. + .FDEHeader(cie1, 0xf6054ca18b10bf5fULL, 0x45fdb970d8bca342ULL) + .FinishEntry() + // Second CIE. + .Mark(&cie2) + .CIEHeader(0xfba3fad7, 0x6287e1fd, 0x61d2c581, 2, "") + .FinishEntry(); + + PERHAPS_WRITE_DEBUG_FRAME_FILE("TwoFDEsTwoCIEs", section); + + { + InSequence s; + EXPECT_CALL(handler, + Entry(_, 0x778b27dfe5871f05ULL, 0x324ace3448070926ULL, 2, + "", 0x61d2c581)) + .WillOnce(Return(true)); + EXPECT_CALL(handler, End()).WillOnce(Return(true)); + } + { + InSequence s; + EXPECT_CALL(handler, + Entry(_, 0xf6054ca18b10bf5fULL, 0x45fdb970d8bca342ULL, 3, + "", 0xbf45e65a)) + .WillOnce(Return(true)); + EXPECT_CALL(handler, End()).WillOnce(Return(true)); + } + + string contents; + EXPECT_TRUE(section.GetContents(&contents)); + ByteReader byte_reader(ENDIANNESS_LITTLE); + byte_reader.SetAddressSize(8); + CallFrameInfo parser(contents.data(), contents.size(), + &byte_reader, &handler, &reporter); + EXPECT_TRUE(parser.Start()); +} + +// An FDE whose CIE specifies a version we don't recognize. +TEST_F(CFI, BadVersion) { + CFISection section(kBigEndian, 4); + Label cie1, cie2; + section + .Mark(&cie1) + .CIEHeader(0xca878cf0, 0x7698ec04, 0x7b616f54, 0x52, "") + .FinishEntry() + // We should skip this entry, as its CIE specifies a version we + // don't recognize. + .FDEHeader(cie1, 0x08852292, 0x2204004a) + .FinishEntry() + // Despite the above, we should visit this entry. + .Mark(&cie2) + .CIEHeader(0x7c3ae7c9, 0xb9b9a512, 0x96cb3264, 3, "") + .FinishEntry() + .FDEHeader(cie2, 0x2094735a, 0x6e875501) + .FinishEntry(); + + PERHAPS_WRITE_DEBUG_FRAME_FILE("BadVersion", section); + + EXPECT_CALL(reporter, UnrecognizedVersion(_, 0x52)) + .WillOnce(Return()); + + { + InSequence s; + // We should see no mention of the first FDE, but we should get + // a call to Entry for the second. + EXPECT_CALL(handler, Entry(_, 0x2094735a, 0x6e875501, 3, "", + 0x96cb3264)) + .WillOnce(Return(true)); + EXPECT_CALL(handler, End()) + .WillOnce(Return(true)); + } + + string contents; + EXPECT_TRUE(section.GetContents(&contents)); + ByteReader byte_reader(ENDIANNESS_BIG); + byte_reader.SetAddressSize(4); + CallFrameInfo parser(contents.data(), contents.size(), + &byte_reader, &handler, &reporter); + EXPECT_FALSE(parser.Start()); +} + +// An FDE whose CIE specifies an augmentation we don't recognize. +TEST_F(CFI, BadAugmentation) { + CFISection section(kBigEndian, 4); + Label cie1, cie2; + section + .Mark(&cie1) + .CIEHeader(0x4be22f75, 0x2492236e, 0x6b6efb87, 3, "spaniels!") + .FinishEntry() + // We should skip this entry, as its CIE specifies an + // augmentation we don't recognize. + .FDEHeader(cie1, 0x7714740d, 0x3d5a10cd) + .FinishEntry() + // Despite the above, we should visit this entry. + .Mark(&cie2) + .CIEHeader(0xf8bc4399, 0x8cf09931, 0xf2f519b2, 3, "") + .FinishEntry() + .FDEHeader(cie2, 0x7bf0fda0, 0xcbcd28d8) + .FinishEntry(); + + PERHAPS_WRITE_DEBUG_FRAME_FILE("BadAugmentation", section); + + EXPECT_CALL(reporter, UnrecognizedAugmentation(_, "spaniels!")) + .WillOnce(Return()); + + { + InSequence s; + // We should see no mention of the first FDE, but we should get + // a call to Entry for the second. + EXPECT_CALL(handler, Entry(_, 0x7bf0fda0, 0xcbcd28d8, 3, "", + 0xf2f519b2)) + .WillOnce(Return(true)); + EXPECT_CALL(handler, End()) + .WillOnce(Return(true)); + } + + string contents; + EXPECT_TRUE(section.GetContents(&contents)); + ByteReader byte_reader(ENDIANNESS_BIG); + byte_reader.SetAddressSize(4); + CallFrameInfo parser(contents.data(), contents.size(), + &byte_reader, &handler, &reporter); + EXPECT_FALSE(parser.Start()); +} + +// The return address column field is a byte in CFI version 1 +// (DWARF2), but a ULEB128 value in version 3 (DWARF3). +TEST_F(CFI, CIEVersion1ReturnColumn) { + CFISection section(kBigEndian, 4); + Label cie; + section + // CIE, using the version 1 format: return column is a ubyte. + .Mark(&cie) + // Use a value for the return column that is parsed differently + // as a ubyte and as a ULEB128. + .CIEHeader(0xbcdea24f, 0x5be28286, 0x9f, 1, "") + .FinishEntry() + // FDE, citing that CIE. + .FDEHeader(cie, 0xb8d347b5, 0x825e55dc) + .FinishEntry(); + + PERHAPS_WRITE_DEBUG_FRAME_FILE("CIEVersion1ReturnColumn", section); + + { + InSequence s; + EXPECT_CALL(handler, Entry(_, 0xb8d347b5, 0x825e55dc, 1, "", 0x9f)) + .WillOnce(Return(true)); + EXPECT_CALL(handler, End()).WillOnce(Return(true)); + } + + string contents; + EXPECT_TRUE(section.GetContents(&contents)); + ByteReader byte_reader(ENDIANNESS_BIG); + byte_reader.SetAddressSize(4); + CallFrameInfo parser(contents.data(), contents.size(), + &byte_reader, &handler, &reporter); + EXPECT_TRUE(parser.Start()); +} + +// The return address column field is a byte in CFI version 1 +// (DWARF2), but a ULEB128 value in version 3 (DWARF3). +TEST_F(CFI, CIEVersion3ReturnColumn) { + CFISection section(kBigEndian, 4); + Label cie; + section + // CIE, using the version 3 format: return column is a ULEB128. + .Mark(&cie) + // Use a value for the return column that is parsed differently + // as a ubyte and as a ULEB128. + .CIEHeader(0x0ab4758d, 0xc010fdf7, 0x89, 3, "") + .FinishEntry() + // FDE, citing that CIE. + .FDEHeader(cie, 0x86763f2b, 0x2a66dc23) + .FinishEntry(); + + PERHAPS_WRITE_DEBUG_FRAME_FILE("CIEVersion3ReturnColumn", section); + + { + InSequence s; + EXPECT_CALL(handler, Entry(_, 0x86763f2b, 0x2a66dc23, 3, "", 0x89)) + .WillOnce(Return(true)); + EXPECT_CALL(handler, End()).WillOnce(Return(true)); + } + + string contents; + EXPECT_TRUE(section.GetContents(&contents)); + ByteReader byte_reader(ENDIANNESS_BIG); + byte_reader.SetAddressSize(4); + CallFrameInfo parser(contents.data(), contents.size(), + &byte_reader, &handler, &reporter); + EXPECT_TRUE(parser.Start()); +} + +struct CFIInsnFixture: public CFIFixture { + CFIInsnFixture() : CFIFixture() { + data_factor = 0xb6f; + return_register = 0x9be1ed9f; + version = 3; + cfa_base_register = 0x383a3aa; + cfa_offset = 0xf748; + } + + // Prepare SECTION to receive FDE instructions. + // + // - Append a stock CIE header that establishes the fixture's + // code_factor, data_factor, return_register, version, and + // augmentation values. + // - Have the CIE set up a CFA rule using cfa_base_register and + // cfa_offset. + // - Append a stock FDE header, referring to the above CIE, for the + // fde_size bytes at fde_start. Choose fde_start and fde_size + // appropriately for the section's address size. + // - Set appropriate expectations on handler in sequence s for the + // frame description entry and the CIE's CFA rule. + // + // On return, SECTION is ready to have FDE instructions appended to + // it, and its FinishEntry member called. + void StockCIEAndFDE(CFISection *section) { + // Choose appropriate constants for our address size. + if (section->AddressSize() == 4) { + fde_start = 0xc628ecfbU; + fde_size = 0x5dee04a2; + code_factor = 0x60b; + } else { + assert(section->AddressSize() == 8); + fde_start = 0x0005c57ce7806bd3ULL; + fde_size = 0x2699521b5e333100ULL; + code_factor = 0x01008e32855274a8ULL; + } + + // Create the CIE. + (*section) + .Mark(&cie_label) + .CIEHeader(code_factor, data_factor, return_register, version, + "") + .D8(dwarf2reader::DW_CFA_def_cfa) + .ULEB128(cfa_base_register) + .ULEB128(cfa_offset) + .FinishEntry(); + + // Create the FDE. + section->FDEHeader(cie_label, fde_start, fde_size); + + // Expect an Entry call for the FDE and a ValOffsetRule call for the + // CIE's CFA rule. + EXPECT_CALL(handler, Entry(_, fde_start, fde_size, version, "", + return_register)) + .InSequence(s) + .WillOnce(Return(true)); + EXPECT_CALL(handler, ValOffsetRule(fde_start, kCFARegister, + cfa_base_register, cfa_offset)) + .InSequence(s) + .WillOnce(Return(true)); + } + + // Run the contents of SECTION through a CallFrameInfo parser, + // expecting parser.Start to return SUCCEEDS + void ParseSection(CFISection *section, bool succeeds = true) { + string contents; + EXPECT_TRUE(section->GetContents(&contents)); + dwarf2reader::Endianness endianness; + if (section->endianness() == kBigEndian) + endianness = ENDIANNESS_BIG; + else { + assert(section->endianness() == kLittleEndian); + endianness = ENDIANNESS_LITTLE; + } + ByteReader byte_reader(endianness); + byte_reader.SetAddressSize(section->AddressSize()); + CallFrameInfo parser(contents.data(), contents.size(), + &byte_reader, &handler, &reporter); + if (succeeds) + EXPECT_TRUE(parser.Start()); + else + EXPECT_FALSE(parser.Start()); + } + + Label cie_label; + Sequence s; + uint64 code_factor; + int data_factor; + unsigned return_register; + unsigned version; + unsigned cfa_base_register; + int cfa_offset; + uint64 fde_start, fde_size; +}; + +class CFIInsn: public CFIInsnFixture, public Test { }; + +TEST_F(CFIInsn, DW_CFA_set_loc) { + CFISection section(kBigEndian, 4); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_set_loc).D32(0xb1ee3e7a) + // Use DW_CFA_def_cfa to force a handler call that we can use to + // check the effect of the DW_CFA_set_loc. + .D8(dwarf2reader::DW_CFA_def_cfa).ULEB128(0x4defb431).ULEB128(0x6d17b0ee) + .FinishEntry(); + + PERHAPS_WRITE_DEBUG_FRAME_FILE("DW_CFA_set_loc", section); + + EXPECT_CALL(handler, + ValOffsetRule(0xb1ee3e7a, kCFARegister, 0x4defb431, 0x6d17b0ee)) + .InSequence(s) + .WillOnce(Return(true)); + EXPECT_CALL(handler, End()).InSequence(s).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIInsn, DW_CFA_advance_loc) { + CFISection section(kBigEndian, 8); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_advance_loc | 0x2a) + // Use DW_CFA_def_cfa to force a handler call that we can use to + // check the effect of the DW_CFA_advance_loc. + .D8(dwarf2reader::DW_CFA_def_cfa).ULEB128(0x5bbb3715).ULEB128(0x0186c7bf) + .FinishEntry(); + + PERHAPS_WRITE_DEBUG_FRAME_FILE("DW_CFA_advance_loc", section); + + EXPECT_CALL(handler, + ValOffsetRule(fde_start + 0x2a * code_factor, + kCFARegister, 0x5bbb3715, 0x0186c7bf)) + .InSequence(s) + .WillOnce(Return(true)); + EXPECT_CALL(handler, End()).InSequence(s).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIInsn, DW_CFA_advance_loc1) { + CFISection section(kLittleEndian, 8); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_advance_loc1).D8(0xd8) + .D8(dwarf2reader::DW_CFA_def_cfa).ULEB128(0x69d5696a).ULEB128(0x1eb7fc93) + .FinishEntry(); + + PERHAPS_WRITE_DEBUG_FRAME_FILE("DW_CFA_advance_loc1", section); + + EXPECT_CALL(handler, + ValOffsetRule((fde_start + 0xd8 * code_factor), + kCFARegister, 0x69d5696a, 0x1eb7fc93)) + .InSequence(s) + .WillOnce(Return(true)); + EXPECT_CALL(handler, End()).InSequence(s).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIInsn, DW_CFA_advance_loc2) { + CFISection section(kLittleEndian, 4); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_advance_loc2).D16(0x3adb) + .D8(dwarf2reader::DW_CFA_def_cfa).ULEB128(0x3a368bed).ULEB128(0x3194ee37) + .FinishEntry(); + + PERHAPS_WRITE_DEBUG_FRAME_FILE("DW_CFA_advance_loc2", section); + + EXPECT_CALL(handler, + ValOffsetRule((fde_start + 0x3adb * code_factor), + kCFARegister, 0x3a368bed, 0x3194ee37)) + .InSequence(s) + .WillOnce(Return(true)); + EXPECT_CALL(handler, End()).InSequence(s).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIInsn, DW_CFA_advance_loc4) { + CFISection section(kBigEndian, 8); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_advance_loc4).D32(0x15813c88) + .D8(dwarf2reader::DW_CFA_def_cfa).ULEB128(0x135270c5).ULEB128(0x24bad7cb) + .FinishEntry(); + + PERHAPS_WRITE_DEBUG_FRAME_FILE("DW_CFA_advance_loc4", section); + + EXPECT_CALL(handler, + ValOffsetRule((fde_start + 0x15813c88ULL * code_factor), + kCFARegister, 0x135270c5, 0x24bad7cb)) + .InSequence(s) + .WillOnce(Return(true)); + EXPECT_CALL(handler, End()).InSequence(s).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIInsn, DW_CFA_MIPS_advance_loc8) { + code_factor = 0x2d; + CFISection section(kBigEndian, 8); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_MIPS_advance_loc8).D64(0x3c4f3945b92c14ULL) + .D8(dwarf2reader::DW_CFA_def_cfa).ULEB128(0xe17ed602).ULEB128(0x3d162e7f) + .FinishEntry(); + + PERHAPS_WRITE_DEBUG_FRAME_FILE("DW_CFA_advance_loc8", section); + + EXPECT_CALL(handler, + ValOffsetRule((fde_start + 0x3c4f3945b92c14ULL * code_factor), + kCFARegister, 0xe17ed602, 0x3d162e7f)) + .InSequence(s) + .WillOnce(Return(true)); + EXPECT_CALL(handler, End()).InSequence(s).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIInsn, DW_CFA_def_cfa) { + CFISection section(kLittleEndian, 4); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_def_cfa).ULEB128(0x4e363a85).ULEB128(0x815f9aa7) + .FinishEntry(); + + PERHAPS_WRITE_DEBUG_FRAME_FILE("DW_CFA_def_cfa", section); + + EXPECT_CALL(handler, + ValOffsetRule(fde_start, kCFARegister, 0x4e363a85, 0x815f9aa7)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()).InSequence(s).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIInsn, DW_CFA_def_cfa_sf) { + CFISection section(kBigEndian, 4); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_def_cfa_sf).ULEB128(0x8ccb32b7).LEB128(0x9ea) + .D8(dwarf2reader::DW_CFA_def_cfa_sf).ULEB128(0x9b40f5da).LEB128(-0x40a2) + .FinishEntry(); + + EXPECT_CALL(handler, + ValOffsetRule(fde_start, kCFARegister, 0x8ccb32b7, + 0x9ea * data_factor)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, + ValOffsetRule(fde_start, kCFARegister, 0x9b40f5da, + -0x40a2 * data_factor)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()).InSequence(s).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIInsn, DW_CFA_def_cfa_register) { + CFISection section(kLittleEndian, 8); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_def_cfa_register).ULEB128(0x3e7e9363) + .FinishEntry(); + + EXPECT_CALL(handler, + ValOffsetRule(fde_start, kCFARegister, 0x3e7e9363, cfa_offset)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()).InSequence(s).WillOnce(Return(true)); + + ParseSection(§ion); +} + +// DW_CFA_def_cfa_register should have no effect when applied to a +// non-base/offset rule. +TEST_F(CFIInsn, DW_CFA_def_cfa_registerBadRule) { + CFISection section(kBigEndian, 4); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_def_cfa_expression).Block("needle in a haystack") + .D8(dwarf2reader::DW_CFA_def_cfa_register).ULEB128(0xf1b49e49) + .FinishEntry(); + + EXPECT_CALL(handler, + ValExpressionRule(fde_start, kCFARegister, + "needle in a haystack")) + .WillRepeatedly(Return(true)); + EXPECT_CALL(handler, End()).InSequence(s).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIInsn, DW_CFA_def_cfa_offset) { + CFISection section(kBigEndian, 4); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_def_cfa_offset).ULEB128(0x1e8e3b9b) + .FinishEntry(); + + EXPECT_CALL(handler, + ValOffsetRule(fde_start, kCFARegister, cfa_base_register, + 0x1e8e3b9b)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()).InSequence(s).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIInsn, DW_CFA_def_cfa_offset_sf) { + CFISection section(kLittleEndian, 4); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_def_cfa_offset_sf).LEB128(0x970) + .D8(dwarf2reader::DW_CFA_def_cfa_offset_sf).LEB128(-0x2cd) + .FinishEntry(); + + EXPECT_CALL(handler, + ValOffsetRule(fde_start, kCFARegister, cfa_base_register, + 0x970 * data_factor)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, + ValOffsetRule(fde_start, kCFARegister, cfa_base_register, + -0x2cd * data_factor)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()).InSequence(s).WillOnce(Return(true)); + + ParseSection(§ion); +} + +// DW_CFA_def_cfa_offset should have no effect when applied to a +// non-base/offset rule. +TEST_F(CFIInsn, DW_CFA_def_cfa_offsetBadRule) { + CFISection section(kBigEndian, 4); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_def_cfa_expression).Block("six ways to Sunday") + .D8(dwarf2reader::DW_CFA_def_cfa_offset).ULEB128(0x1e8e3b9b) + .FinishEntry(); + + EXPECT_CALL(handler, + ValExpressionRule(fde_start, kCFARegister, "six ways to Sunday")) + .WillRepeatedly(Return(true)); + EXPECT_CALL(handler, End()).InSequence(s).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIInsn, DW_CFA_def_cfa_expression) { + CFISection section(kLittleEndian, 8); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_def_cfa_expression).Block("eating crow") + .FinishEntry(); + + EXPECT_CALL(handler, ValExpressionRule(fde_start, kCFARegister, + "eating crow")) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()).InSequence(s).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIInsn, DW_CFA_undefined) { + CFISection section(kLittleEndian, 4); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_undefined).ULEB128(0x300ce45d) + .FinishEntry(); + + EXPECT_CALL(handler, UndefinedRule(fde_start, 0x300ce45d)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()).InSequence(s).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIInsn, DW_CFA_same_value) { + CFISection section(kLittleEndian, 4); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_same_value).ULEB128(0x3865a760) + .FinishEntry(); + + EXPECT_CALL(handler, SameValueRule(fde_start, 0x3865a760)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()).InSequence(s).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIInsn, DW_CFA_offset) { + CFISection section(kBigEndian, 4); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_offset | 0x2c).ULEB128(0x9f6) + .FinishEntry(); + + EXPECT_CALL(handler, + OffsetRule(fde_start, 0x2c, kCFARegister, 0x9f6 * data_factor)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()).InSequence(s).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIInsn, DW_CFA_offset_extended) { + CFISection section(kBigEndian, 4); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_offset_extended).ULEB128(0x402b).ULEB128(0xb48) + .FinishEntry(); + + EXPECT_CALL(handler, + OffsetRule(fde_start, 0x402b, kCFARegister, 0xb48 * data_factor)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()).InSequence(s).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIInsn, DW_CFA_offset_extended_sf) { + CFISection section(kBigEndian, 8); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_offset_extended_sf) + .ULEB128(0x997c23ee).LEB128(0x2d00) + .D8(dwarf2reader::DW_CFA_offset_extended_sf) + .ULEB128(0x9519eb82).LEB128(-0xa77) + .FinishEntry(); + + EXPECT_CALL(handler, + OffsetRule(fde_start, 0x997c23ee, + kCFARegister, 0x2d00 * data_factor)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, + OffsetRule(fde_start, 0x9519eb82, + kCFARegister, -0xa77 * data_factor)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()).InSequence(s).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIInsn, DW_CFA_val_offset) { + CFISection section(kBigEndian, 4); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_val_offset).ULEB128(0x623562fe).ULEB128(0x673) + .FinishEntry(); + + EXPECT_CALL(handler, + ValOffsetRule(fde_start, 0x623562fe, + kCFARegister, 0x673 * data_factor)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()).InSequence(s).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIInsn, DW_CFA_val_offset_sf) { + CFISection section(kBigEndian, 4); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_val_offset_sf).ULEB128(0x6f4f).LEB128(0xaab) + .D8(dwarf2reader::DW_CFA_val_offset_sf).ULEB128(0x2483).LEB128(-0x8a2) + .FinishEntry(); + + EXPECT_CALL(handler, + ValOffsetRule(fde_start, 0x6f4f, + kCFARegister, 0xaab * data_factor)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, + ValOffsetRule(fde_start, 0x2483, + kCFARegister, -0x8a2 * data_factor)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()).InSequence(s).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIInsn, DW_CFA_register) { + CFISection section(kLittleEndian, 8); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_register).ULEB128(0x278d18f9).ULEB128(0x1a684414) + .FinishEntry(); + + EXPECT_CALL(handler, RegisterRule(fde_start, 0x278d18f9, 0x1a684414)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()).InSequence(s).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIInsn, DW_CFA_expression) { + CFISection section(kBigEndian, 8); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_expression).ULEB128(0xa1619fb2) + .Block("plus ça change, plus c'est la même chose") + .FinishEntry(); + + EXPECT_CALL(handler, + ExpressionRule(fde_start, 0xa1619fb2, + "plus ça change, plus c'est la même chose")) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()).InSequence(s).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIInsn, DW_CFA_val_expression) { + CFISection section(kBigEndian, 4); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_val_expression).ULEB128(0xc5e4a9e3) + .Block("he who has the gold makes the rules") + .FinishEntry(); + + EXPECT_CALL(handler, + ValExpressionRule(fde_start, 0xc5e4a9e3, + "he who has the gold makes the rules")) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()).InSequence(s).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIInsn, DW_CFA_restore) { + CFISection section(kLittleEndian, 8); + code_factor = 0x01bd188a9b1fa083ULL; + data_factor = -0x1ac8; + return_register = 0x8c35b049; + version = 2; + fde_start = 0x2d70fe998298bbb1ULL; + fde_size = 0x46ccc2e63cf0b108ULL; + Label cie; + section + .Mark(&cie) + .CIEHeader(code_factor, data_factor, return_register, version, + "") + // Provide a CFA rule, because register rules require them. + .D8(dwarf2reader::DW_CFA_def_cfa).ULEB128(0x6ca1d50e).ULEB128(0x372e38e8) + // Provide an offset(N) rule for register 0x3c. + .D8(dwarf2reader::DW_CFA_offset | 0x3c).ULEB128(0xb348) + .FinishEntry() + // In the FDE... + .FDEHeader(cie, fde_start, fde_size) + // At a second address, provide a new offset(N) rule for register 0x3c. + .D8(dwarf2reader::DW_CFA_advance_loc | 0x13) + .D8(dwarf2reader::DW_CFA_offset | 0x3c).ULEB128(0x9a50) + // At a third address, restore the original rule for register 0x3c. + .D8(dwarf2reader::DW_CFA_advance_loc | 0x01) + .D8(dwarf2reader::DW_CFA_restore | 0x3c) + .FinishEntry(); + + { + InSequence s; + EXPECT_CALL(handler, + Entry(_, fde_start, fde_size, version, "", return_register)) + .WillOnce(Return(true)); + // CIE's CFA rule. + EXPECT_CALL(handler, + ValOffsetRule(fde_start, kCFARegister, 0x6ca1d50e, 0x372e38e8)) + .WillOnce(Return(true)); + // CIE's rule for register 0x3c. + EXPECT_CALL(handler, + OffsetRule(fde_start, 0x3c, kCFARegister, 0xb348 * data_factor)) + .WillOnce(Return(true)); + // FDE's rule for register 0x3c. + EXPECT_CALL(handler, + OffsetRule(fde_start + 0x13 * code_factor, 0x3c, + kCFARegister, 0x9a50 * data_factor)) + .WillOnce(Return(true)); + // Restore CIE's rule for register 0x3c. + EXPECT_CALL(handler, + OffsetRule(fde_start + (0x13 + 0x01) * code_factor, 0x3c, + kCFARegister, 0xb348 * data_factor)) + .WillOnce(Return(true)); + EXPECT_CALL(handler, End()).WillOnce(Return(true)); + } + + ParseSection(§ion); +} + +TEST_F(CFIInsn, DW_CFA_restoreNoRule) { + CFISection section(kBigEndian, 4); + code_factor = 0x005f78143c1c3b82ULL; + data_factor = 0x25d0; + return_register = 0xe8; + version = 1; + fde_start = 0x4062e30f; + fde_size = 0x5302a389; + Label cie; + section + .Mark(&cie) + .CIEHeader(code_factor, data_factor, return_register, version, "") + // Provide a CFA rule, because register rules require them. + .D8(dwarf2reader::DW_CFA_def_cfa).ULEB128(0x470aa334).ULEB128(0x099ef127) + .FinishEntry() + // In the FDE... + .FDEHeader(cie, fde_start, fde_size) + // At a second address, provide an offset(N) rule for register 0x2c. + .D8(dwarf2reader::DW_CFA_advance_loc | 0x7) + .D8(dwarf2reader::DW_CFA_offset | 0x2c).ULEB128(0x1f47) + // At a third address, restore the (missing) CIE rule for register 0x2c. + .D8(dwarf2reader::DW_CFA_advance_loc | 0xb) + .D8(dwarf2reader::DW_CFA_restore | 0x2c) + .FinishEntry(); + + { + InSequence s; + EXPECT_CALL(handler, + Entry(_, fde_start, fde_size, version, "", return_register)) + .WillOnce(Return(true)); + // CIE's CFA rule. + EXPECT_CALL(handler, + ValOffsetRule(fde_start, kCFARegister, 0x470aa334, 0x099ef127)) + .WillOnce(Return(true)); + // FDE's rule for register 0x2c. + EXPECT_CALL(handler, + OffsetRule(fde_start + 0x7 * code_factor, 0x2c, + kCFARegister, 0x1f47 * data_factor)) + .WillOnce(Return(true)); + // Restore CIE's (missing) rule for register 0x2c. + EXPECT_CALL(handler, + SameValueRule(fde_start + (0x7 + 0xb) * code_factor, 0x2c)) + .WillOnce(Return(true)); + EXPECT_CALL(handler, End()).WillOnce(Return(true)); + } + + ParseSection(§ion); +} + +TEST_F(CFIInsn, DW_CFA_restore_extended) { + CFISection section(kBigEndian, 4); + code_factor = 0x126e; + data_factor = -0xd8b; + return_register = 0x77711787; + version = 3; + fde_start = 0x01f55a45; + fde_size = 0x452adb80; + Label cie; + section + .Mark(&cie) + .CIEHeader(code_factor, data_factor, return_register, version, + "", true /* dwarf64 */ ) + // Provide a CFA rule, because register rules require them. + .D8(dwarf2reader::DW_CFA_def_cfa).ULEB128(0x56fa0edd).ULEB128(0x097f78a5) + // Provide an offset(N) rule for register 0x0f9b8a1c. + .D8(dwarf2reader::DW_CFA_offset_extended) + .ULEB128(0x0f9b8a1c).ULEB128(0xc979) + .FinishEntry() + // In the FDE... + .FDEHeader(cie, fde_start, fde_size) + // At a second address, provide a new offset(N) rule for reg 0x0f9b8a1c. + .D8(dwarf2reader::DW_CFA_advance_loc | 0x3) + .D8(dwarf2reader::DW_CFA_offset_extended) + .ULEB128(0x0f9b8a1c).ULEB128(0x3b7b) + // At a third address, restore the original rule for register 0x0f9b8a1c. + .D8(dwarf2reader::DW_CFA_advance_loc | 0x04) + .D8(dwarf2reader::DW_CFA_restore_extended).ULEB128(0x0f9b8a1c) + .FinishEntry(); + + { + InSequence s; + EXPECT_CALL(handler, + Entry(_, fde_start, fde_size, version, "", return_register)) + .WillOnce(Return(true)); + // CIE's CFA rule. + EXPECT_CALL(handler, + ValOffsetRule(fde_start, kCFARegister, 0x56fa0edd, 0x097f78a5)) + .WillOnce(Return(true)); + // CIE's rule for register 0x0f9b8a1c. + EXPECT_CALL(handler, + OffsetRule(fde_start, 0x0f9b8a1c, kCFARegister, + 0xc979 * data_factor)) + .WillOnce(Return(true)); + // FDE's rule for register 0x0f9b8a1c. + EXPECT_CALL(handler, + OffsetRule(fde_start + 0x3 * code_factor, 0x0f9b8a1c, + kCFARegister, 0x3b7b * data_factor)) + .WillOnce(Return(true)); + // Restore CIE's rule for register 0x0f9b8a1c. + EXPECT_CALL(handler, + OffsetRule(fde_start + (0x3 + 0x4) * code_factor, 0x0f9b8a1c, + kCFARegister, 0xc979 * data_factor)) + .WillOnce(Return(true)); + EXPECT_CALL(handler, End()).WillOnce(Return(true)); + } + + ParseSection(§ion); +} + +TEST_F(CFIInsn, DW_CFA_remember_and_restore_state) { + CFISection section(kLittleEndian, 8); + StockCIEAndFDE(§ion); + + // We create a state, save it, modify it, and then restore. We + // refer to the state that is overridden the restore as the + // "outgoing" state, and the restored state the "incoming" state. + // + // Register outgoing incoming expect + // 1 offset(N) no rule new "same value" rule + // 2 register(R) offset(N) report changed rule + // 3 offset(N) offset(M) report changed offset + // 4 offset(N) offset(N) no report + // 5 offset(N) no rule new "same value" rule + section + // Create the "incoming" state, which we will save and later restore. + .D8(dwarf2reader::DW_CFA_offset | 2).ULEB128(0x9806) + .D8(dwarf2reader::DW_CFA_offset | 3).ULEB128(0x995d) + .D8(dwarf2reader::DW_CFA_offset | 4).ULEB128(0x7055) + .D8(dwarf2reader::DW_CFA_remember_state) + // Advance to a new instruction; an implementation could legitimately + // ignore all but the final rule for a given register at a given address. + .D8(dwarf2reader::DW_CFA_advance_loc | 1) + // Create the "outgoing" state, which we will discard. + .D8(dwarf2reader::DW_CFA_offset | 1).ULEB128(0xea1a) + .D8(dwarf2reader::DW_CFA_register).ULEB128(2).ULEB128(0x1d2a3767) + .D8(dwarf2reader::DW_CFA_offset | 3).ULEB128(0xdd29) + .D8(dwarf2reader::DW_CFA_offset | 5).ULEB128(0xf1ce) + // At a third address, restore the incoming state. + .D8(dwarf2reader::DW_CFA_advance_loc | 1) + .D8(dwarf2reader::DW_CFA_restore_state) + .FinishEntry(); + + uint64 addr = fde_start; + + // Expect the incoming rules to be reported. + EXPECT_CALL(handler, OffsetRule(addr, 2, kCFARegister, 0x9806 * data_factor)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, OffsetRule(addr, 3, kCFARegister, 0x995d * data_factor)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, OffsetRule(addr, 4, kCFARegister, 0x7055 * data_factor)) + .InSequence(s).WillOnce(Return(true)); + + addr += code_factor; + + // After the save, we establish the outgoing rule set. + EXPECT_CALL(handler, OffsetRule(addr, 1, kCFARegister, 0xea1a * data_factor)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, RegisterRule(addr, 2, 0x1d2a3767)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, OffsetRule(addr, 3, kCFARegister, 0xdd29 * data_factor)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, OffsetRule(addr, 5, kCFARegister, 0xf1ce * data_factor)) + .InSequence(s).WillOnce(Return(true)); + + addr += code_factor; + + // Finally, after the restore, expect to see the differences from + // the outgoing to the incoming rules reported. + EXPECT_CALL(handler, SameValueRule(addr, 1)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, OffsetRule(addr, 2, kCFARegister, 0x9806 * data_factor)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, OffsetRule(addr, 3, kCFARegister, 0x995d * data_factor)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, SameValueRule(addr, 5)) + .InSequence(s).WillOnce(Return(true)); + + EXPECT_CALL(handler, End()).WillOnce(Return(true)); + + ParseSection(§ion); +} + +// Check that restoring a rule set reports changes to the CFA rule. +TEST_F(CFIInsn, DW_CFA_remember_and_restore_stateCFA) { + CFISection section(kBigEndian, 4); + StockCIEAndFDE(§ion); + + section + .D8(dwarf2reader::DW_CFA_remember_state) + .D8(dwarf2reader::DW_CFA_advance_loc | 1) + .D8(dwarf2reader::DW_CFA_def_cfa_offset).ULEB128(0x90481102) + .D8(dwarf2reader::DW_CFA_advance_loc | 1) + .D8(dwarf2reader::DW_CFA_restore_state) + .FinishEntry(); + + EXPECT_CALL(handler, ValOffsetRule(fde_start + code_factor, kCFARegister, + cfa_base_register, 0x90481102)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, ValOffsetRule(fde_start + code_factor * 2, kCFARegister, + cfa_base_register, cfa_offset)) + .InSequence(s).WillOnce(Return(true)); + + EXPECT_CALL(handler, End()).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIInsn, DW_CFA_nop) { + CFISection section(kLittleEndian, 4); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_nop) + .D8(dwarf2reader::DW_CFA_def_cfa).ULEB128(0x3fb8d4f1).ULEB128(0x078dc67b) + .D8(dwarf2reader::DW_CFA_nop) + .FinishEntry(); + + EXPECT_CALL(handler, + ValOffsetRule(fde_start, kCFARegister, 0x3fb8d4f1, 0x078dc67b)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()).InSequence(s).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIInsn, DW_CFA_GNU_window_save) { + CFISection section(kBigEndian, 4); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_GNU_window_save) + .FinishEntry(); + + // Don't include all the rules in any particular sequence. + + // The caller's %o0-%o7 have become the callee's %i0-%i7. This is + // the GCC register numbering. + for (int i = 8; i < 16; i++) + EXPECT_CALL(handler, RegisterRule(fde_start, i, i + 16)) + .WillOnce(Return(true)); + // The caller's %l0-%l7 and %i0-%i7 have been saved at the top of + // its frame. + for (int i = 16; i < 32; i++) + EXPECT_CALL(handler, OffsetRule(fde_start, i, kCFARegister, (i-16) * 4)) + .WillOnce(Return(true)); + + EXPECT_CALL(handler, End()).InSequence(s).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIInsn, DW_CFA_GNU_args_size) { + CFISection section(kLittleEndian, 8); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_GNU_args_size).ULEB128(0xeddfa520) + // Verify that we see this, meaning we parsed the above properly. + .D8(dwarf2reader::DW_CFA_offset | 0x23).ULEB128(0x269) + .FinishEntry(); + + EXPECT_CALL(handler, + OffsetRule(fde_start, 0x23, kCFARegister, 0x269 * data_factor)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()).InSequence(s).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIInsn, DW_CFA_GNU_negative_offset_extended) { + CFISection section(kLittleEndian, 4); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_GNU_negative_offset_extended) + .ULEB128(0x430cc87a).ULEB128(0x613) + .FinishEntry(); + + EXPECT_CALL(handler, + OffsetRule(fde_start, 0x430cc87a, + kCFARegister, -0x613 * data_factor)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()).InSequence(s).WillOnce(Return(true)); + + ParseSection(§ion); +} + +// Three FDEs: skip the second +TEST_F(CFIInsn, SkipFDE) { + CFISection section(kBigEndian, 4); + Label cie; + section + // CIE, used by all FDEs. + .Mark(&cie) + .CIEHeader(0x010269f2, 0x9177, 0xedca5849, 2, "") + .D8(dwarf2reader::DW_CFA_def_cfa).ULEB128(0x42ed390b).ULEB128(0x98f43aad) + .FinishEntry() + // First FDE. + .FDEHeader(cie, 0xa870ebdd, 0x60f6aa4) + .D8(dwarf2reader::DW_CFA_register).ULEB128(0x3a860351).ULEB128(0x6c9a6bcf) + .FinishEntry() + // Second FDE. + .FDEHeader(cie, 0xc534f7c0, 0xf6552e9, true /* dwarf64 */) + .D8(dwarf2reader::DW_CFA_register).ULEB128(0x1b62c234).ULEB128(0x26586b18) + .FinishEntry() + // Third FDE. + .FDEHeader(cie, 0xf681cfc8, 0x7e4594e) + .D8(dwarf2reader::DW_CFA_register).ULEB128(0x26c53934).ULEB128(0x18eeb8a4) + .FinishEntry(); + + { + InSequence s; + + // Process the first FDE. + EXPECT_CALL(handler, Entry(_, 0xa870ebdd, 0x60f6aa4, 2, "", 0xedca5849)) + .WillOnce(Return(true)); + EXPECT_CALL(handler, ValOffsetRule(0xa870ebdd, kCFARegister, + 0x42ed390b, 0x98f43aad)) + .WillOnce(Return(true)); + EXPECT_CALL(handler, RegisterRule(0xa870ebdd, 0x3a860351, 0x6c9a6bcf)) + .WillOnce(Return(true)); + EXPECT_CALL(handler, End()) + .WillOnce(Return(true)); + + // Skip the second FDE. + EXPECT_CALL(handler, Entry(_, 0xc534f7c0, 0xf6552e9, 2, "", 0xedca5849)) + .WillOnce(Return(false)); + + // Process the third FDE. + EXPECT_CALL(handler, Entry(_, 0xf681cfc8, 0x7e4594e, 2, "", 0xedca5849)) + .WillOnce(Return(true)); + EXPECT_CALL(handler, ValOffsetRule(0xf681cfc8, kCFARegister, + 0x42ed390b, 0x98f43aad)) + .WillOnce(Return(true)); + EXPECT_CALL(handler, RegisterRule(0xf681cfc8, 0x26c53934, 0x18eeb8a4)) + .WillOnce(Return(true)); + EXPECT_CALL(handler, End()) + .WillOnce(Return(true)); + } + + ParseSection(§ion); +} + +// Quit processing in the middle of an entry's instructions. +TEST_F(CFIInsn, QuitMidentry) { + CFISection section(kLittleEndian, 8); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_register).ULEB128(0xe0cf850d).ULEB128(0x15aab431) + .D8(dwarf2reader::DW_CFA_expression).ULEB128(0x46750aa5).Block("meat") + .FinishEntry(); + + EXPECT_CALL(handler, RegisterRule(fde_start, 0xe0cf850d, 0x15aab431)) + .InSequence(s).WillOnce(Return(false)); + EXPECT_CALL(handler, End()) + .InSequence(s).WillOnce(Return(true)); + + ParseSection(§ion, false); +} + +class CFIRestore: public CFIInsnFixture, public Test { }; + +TEST_F(CFIRestore, RestoreUndefinedRuleUnchanged) { + CFISection section(kLittleEndian, 4); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_undefined).ULEB128(0x0bac878e) + .D8(dwarf2reader::DW_CFA_remember_state) + .D8(dwarf2reader::DW_CFA_advance_loc | 1) + .D8(dwarf2reader::DW_CFA_restore_state) + .FinishEntry(); + + EXPECT_CALL(handler, UndefinedRule(fde_start, 0x0bac878e)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIRestore, RestoreUndefinedRuleChanged) { + CFISection section(kLittleEndian, 4); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_undefined).ULEB128(0x7dedff5f) + .D8(dwarf2reader::DW_CFA_remember_state) + .D8(dwarf2reader::DW_CFA_advance_loc | 1) + .D8(dwarf2reader::DW_CFA_same_value).ULEB128(0x7dedff5f) + .D8(dwarf2reader::DW_CFA_advance_loc | 1) + .D8(dwarf2reader::DW_CFA_restore_state) + .FinishEntry(); + + EXPECT_CALL(handler, UndefinedRule(fde_start, 0x7dedff5f)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, SameValueRule(fde_start + code_factor, 0x7dedff5f)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, UndefinedRule(fde_start + 2 * code_factor, 0x7dedff5f)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIRestore, RestoreSameValueRuleUnchanged) { + CFISection section(kLittleEndian, 4); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_same_value).ULEB128(0xadbc9b3a) + .D8(dwarf2reader::DW_CFA_remember_state) + .D8(dwarf2reader::DW_CFA_advance_loc | 1) + .D8(dwarf2reader::DW_CFA_restore_state) + .FinishEntry(); + + EXPECT_CALL(handler, SameValueRule(fde_start, 0xadbc9b3a)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIRestore, RestoreSameValueRuleChanged) { + CFISection section(kLittleEndian, 4); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_same_value).ULEB128(0x3d90dcb5) + .D8(dwarf2reader::DW_CFA_remember_state) + .D8(dwarf2reader::DW_CFA_advance_loc | 1) + .D8(dwarf2reader::DW_CFA_undefined).ULEB128(0x3d90dcb5) + .D8(dwarf2reader::DW_CFA_advance_loc | 1) + .D8(dwarf2reader::DW_CFA_restore_state) + .FinishEntry(); + + EXPECT_CALL(handler, SameValueRule(fde_start, 0x3d90dcb5)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, UndefinedRule(fde_start + code_factor, 0x3d90dcb5)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, SameValueRule(fde_start + 2 * code_factor, 0x3d90dcb5)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIRestore, RestoreOffsetRuleUnchanged) { + CFISection section(kLittleEndian, 4); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_offset | 0x14).ULEB128(0xb6f) + .D8(dwarf2reader::DW_CFA_remember_state) + .D8(dwarf2reader::DW_CFA_advance_loc | 1) + .D8(dwarf2reader::DW_CFA_restore_state) + .FinishEntry(); + + EXPECT_CALL(handler, OffsetRule(fde_start, 0x14, + kCFARegister, 0xb6f * data_factor)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIRestore, RestoreOffsetRuleChanged) { + CFISection section(kLittleEndian, 4); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_offset | 0x21).ULEB128(0xeb7) + .D8(dwarf2reader::DW_CFA_remember_state) + .D8(dwarf2reader::DW_CFA_advance_loc | 1) + .D8(dwarf2reader::DW_CFA_undefined).ULEB128(0x21) + .D8(dwarf2reader::DW_CFA_advance_loc | 1) + .D8(dwarf2reader::DW_CFA_restore_state) + .FinishEntry(); + + EXPECT_CALL(handler, OffsetRule(fde_start, 0x21, + kCFARegister, 0xeb7 * data_factor)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, UndefinedRule(fde_start + code_factor, 0x21)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, OffsetRule(fde_start + 2 * code_factor, 0x21, + kCFARegister, 0xeb7 * data_factor)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIRestore, RestoreOffsetRuleChangedOffset) { + CFISection section(kLittleEndian, 4); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_offset | 0x21).ULEB128(0x134) + .D8(dwarf2reader::DW_CFA_remember_state) + .D8(dwarf2reader::DW_CFA_advance_loc | 1) + .D8(dwarf2reader::DW_CFA_offset | 0x21).ULEB128(0xf4f) + .D8(dwarf2reader::DW_CFA_advance_loc | 1) + .D8(dwarf2reader::DW_CFA_restore_state) + .FinishEntry(); + + EXPECT_CALL(handler, OffsetRule(fde_start, 0x21, + kCFARegister, 0x134 * data_factor)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, OffsetRule(fde_start + code_factor, 0x21, + kCFARegister, 0xf4f * data_factor)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, OffsetRule(fde_start + 2 * code_factor, 0x21, + kCFARegister, 0x134 * data_factor)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIRestore, RestoreValOffsetRuleUnchanged) { + CFISection section(kLittleEndian, 4); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_val_offset).ULEB128(0x829caee6).ULEB128(0xe4c) + .D8(dwarf2reader::DW_CFA_remember_state) + .D8(dwarf2reader::DW_CFA_advance_loc | 1) + .D8(dwarf2reader::DW_CFA_restore_state) + .FinishEntry(); + + EXPECT_CALL(handler, ValOffsetRule(fde_start, 0x829caee6, + kCFARegister, 0xe4c * data_factor)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIRestore, RestoreValOffsetRuleChanged) { + CFISection section(kLittleEndian, 4); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_val_offset).ULEB128(0xf17c36d6).ULEB128(0xeb7) + .D8(dwarf2reader::DW_CFA_remember_state) + .D8(dwarf2reader::DW_CFA_advance_loc | 1) + .D8(dwarf2reader::DW_CFA_undefined).ULEB128(0xf17c36d6) + .D8(dwarf2reader::DW_CFA_advance_loc | 1) + .D8(dwarf2reader::DW_CFA_restore_state) + .FinishEntry(); + + EXPECT_CALL(handler, ValOffsetRule(fde_start, 0xf17c36d6, + kCFARegister, 0xeb7 * data_factor)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, UndefinedRule(fde_start + code_factor, 0xf17c36d6)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, ValOffsetRule(fde_start + 2 * code_factor, 0xf17c36d6, + kCFARegister, 0xeb7 * data_factor)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIRestore, RestoreValOffsetRuleChangedValOffset) { + CFISection section(kLittleEndian, 4); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_val_offset).ULEB128(0x2cf0ab1b).ULEB128(0x562) + .D8(dwarf2reader::DW_CFA_remember_state) + .D8(dwarf2reader::DW_CFA_advance_loc | 1) + .D8(dwarf2reader::DW_CFA_val_offset).ULEB128(0x2cf0ab1b).ULEB128(0xe88) + .D8(dwarf2reader::DW_CFA_advance_loc | 1) + .D8(dwarf2reader::DW_CFA_restore_state) + .FinishEntry(); + + EXPECT_CALL(handler, ValOffsetRule(fde_start, 0x2cf0ab1b, + kCFARegister, 0x562 * data_factor)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, ValOffsetRule(fde_start + code_factor, 0x2cf0ab1b, + kCFARegister, 0xe88 * data_factor)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, ValOffsetRule(fde_start + 2 * code_factor, 0x2cf0ab1b, + kCFARegister, 0x562 * data_factor)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIRestore, RestoreRegisterRuleUnchanged) { + CFISection section(kLittleEndian, 4); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_register).ULEB128(0x77514acc).ULEB128(0x464de4ce) + .D8(dwarf2reader::DW_CFA_remember_state) + .D8(dwarf2reader::DW_CFA_advance_loc | 1) + .D8(dwarf2reader::DW_CFA_restore_state) + .FinishEntry(); + + EXPECT_CALL(handler, RegisterRule(fde_start, 0x77514acc, 0x464de4ce)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIRestore, RestoreRegisterRuleChanged) { + CFISection section(kLittleEndian, 4); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_register).ULEB128(0xe39acce5).ULEB128(0x095f1559) + .D8(dwarf2reader::DW_CFA_remember_state) + .D8(dwarf2reader::DW_CFA_advance_loc | 1) + .D8(dwarf2reader::DW_CFA_undefined).ULEB128(0xe39acce5) + .D8(dwarf2reader::DW_CFA_advance_loc | 1) + .D8(dwarf2reader::DW_CFA_restore_state) + .FinishEntry(); + + EXPECT_CALL(handler, RegisterRule(fde_start, 0xe39acce5, 0x095f1559)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, UndefinedRule(fde_start + code_factor, 0xe39acce5)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, RegisterRule(fde_start + 2 * code_factor, 0xe39acce5, + 0x095f1559)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIRestore, RestoreRegisterRuleChangedRegister) { + CFISection section(kLittleEndian, 4); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_register).ULEB128(0xd40e21b1).ULEB128(0x16607d6a) + .D8(dwarf2reader::DW_CFA_remember_state) + .D8(dwarf2reader::DW_CFA_advance_loc | 1) + .D8(dwarf2reader::DW_CFA_register).ULEB128(0xd40e21b1).ULEB128(0xbabb4742) + .D8(dwarf2reader::DW_CFA_advance_loc | 1) + .D8(dwarf2reader::DW_CFA_restore_state) + .FinishEntry(); + + EXPECT_CALL(handler, RegisterRule(fde_start, 0xd40e21b1, 0x16607d6a)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, RegisterRule(fde_start + code_factor, 0xd40e21b1, + 0xbabb4742)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, RegisterRule(fde_start + 2 * code_factor, 0xd40e21b1, + 0x16607d6a)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIRestore, RestoreExpressionRuleUnchanged) { + CFISection section(kLittleEndian, 4); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_expression).ULEB128(0x666ae152).Block("dwarf") + .D8(dwarf2reader::DW_CFA_remember_state) + .D8(dwarf2reader::DW_CFA_advance_loc | 1) + .D8(dwarf2reader::DW_CFA_restore_state) + .FinishEntry(); + + EXPECT_CALL(handler, ExpressionRule(fde_start, 0x666ae152, "dwarf")) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIRestore, RestoreExpressionRuleChanged) { + CFISection section(kLittleEndian, 4); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_expression).ULEB128(0xb5ca5c46).Block("elf") + .D8(dwarf2reader::DW_CFA_remember_state) + .D8(dwarf2reader::DW_CFA_advance_loc | 1) + .D8(dwarf2reader::DW_CFA_undefined).ULEB128(0xb5ca5c46) + .D8(dwarf2reader::DW_CFA_advance_loc | 1) + .D8(dwarf2reader::DW_CFA_restore_state) + .FinishEntry(); + + EXPECT_CALL(handler, ExpressionRule(fde_start, 0xb5ca5c46, "elf")) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, UndefinedRule(fde_start + code_factor, 0xb5ca5c46)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, ExpressionRule(fde_start + 2 * code_factor, 0xb5ca5c46, + "elf")) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIRestore, RestoreExpressionRuleChangedExpression) { + CFISection section(kLittleEndian, 4); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_expression).ULEB128(0x500f5739).Block("smurf") + .D8(dwarf2reader::DW_CFA_remember_state) + .D8(dwarf2reader::DW_CFA_advance_loc | 1) + .D8(dwarf2reader::DW_CFA_expression).ULEB128(0x500f5739).Block("orc") + .D8(dwarf2reader::DW_CFA_advance_loc | 1) + .D8(dwarf2reader::DW_CFA_restore_state) + .FinishEntry(); + + EXPECT_CALL(handler, ExpressionRule(fde_start, 0x500f5739, "smurf")) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, ExpressionRule(fde_start + code_factor, 0x500f5739, + "orc")) + .InSequence(s).WillOnce(Return(true)); + // Expectations are not wishes. + EXPECT_CALL(handler, ExpressionRule(fde_start + 2 * code_factor, 0x500f5739, + "smurf")) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIRestore, RestoreValExpressionRuleUnchanged) { + CFISection section(kLittleEndian, 4); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_val_expression).ULEB128(0x666ae152) + .Block("hideous") + .D8(dwarf2reader::DW_CFA_remember_state) + .D8(dwarf2reader::DW_CFA_advance_loc | 1) + .D8(dwarf2reader::DW_CFA_restore_state) + .FinishEntry(); + + EXPECT_CALL(handler, ValExpressionRule(fde_start, 0x666ae152, "hideous")) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIRestore, RestoreValExpressionRuleChanged) { + CFISection section(kLittleEndian, 4); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_val_expression).ULEB128(0xb5ca5c46) + .Block("revolting") + .D8(dwarf2reader::DW_CFA_remember_state) + .D8(dwarf2reader::DW_CFA_advance_loc | 1) + .D8(dwarf2reader::DW_CFA_undefined).ULEB128(0xb5ca5c46) + .D8(dwarf2reader::DW_CFA_advance_loc | 1) + .D8(dwarf2reader::DW_CFA_restore_state) + .FinishEntry(); + + PERHAPS_WRITE_DEBUG_FRAME_FILE("RestoreValExpressionRuleChanged", section); + + EXPECT_CALL(handler, ValExpressionRule(fde_start, 0xb5ca5c46, "revolting")) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, UndefinedRule(fde_start + code_factor, 0xb5ca5c46)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, ValExpressionRule(fde_start + 2 * code_factor, 0xb5ca5c46, + "revolting")) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()).WillOnce(Return(true)); + + ParseSection(§ion); +} + +TEST_F(CFIRestore, RestoreValExpressionRuleChangedValExpression) { + CFISection section(kLittleEndian, 4); + StockCIEAndFDE(§ion); + section + .D8(dwarf2reader::DW_CFA_val_expression).ULEB128(0x500f5739) + .Block("repulsive") + .D8(dwarf2reader::DW_CFA_remember_state) + .D8(dwarf2reader::DW_CFA_advance_loc | 1) + .D8(dwarf2reader::DW_CFA_val_expression).ULEB128(0x500f5739) + .Block("nauseous") + .D8(dwarf2reader::DW_CFA_advance_loc | 1) + .D8(dwarf2reader::DW_CFA_restore_state) + .FinishEntry(); + + PERHAPS_WRITE_DEBUG_FRAME_FILE("RestoreValExpressionRuleChangedValExpression", + section); + + EXPECT_CALL(handler, ValExpressionRule(fde_start, 0x500f5739, "repulsive")) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, ValExpressionRule(fde_start + code_factor, 0x500f5739, + "nauseous")) + .InSequence(s).WillOnce(Return(true)); + // Expectations are not wishes. + EXPECT_CALL(handler, ValExpressionRule(fde_start + 2 * code_factor, 0x500f5739, + "repulsive")) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()).WillOnce(Return(true)); + + ParseSection(§ion); +} + +struct EHFrameFixture: public CFIInsnFixture { + EHFrameFixture() + : CFIInsnFixture(), section(kBigEndian, 4, true) { + encoded_pointer_bases.cfi = 0x7f496cb2; + encoded_pointer_bases.text = 0x540f67b6; + encoded_pointer_bases.data = 0xe3eab768; + section.SetEncodedPointerBases(encoded_pointer_bases); + } + CFISection section; + CFISection::EncodedPointerBases encoded_pointer_bases; + + // Parse CFIInsnFixture::ParseSection, but parse the section as + // .eh_frame data, supplying stock base addresses. + void ParseEHFrameSection(CFISection *section, bool succeeds = true) { + EXPECT_TRUE(section->ContainsEHFrame()); + string contents; + EXPECT_TRUE(section->GetContents(&contents)); + dwarf2reader::Endianness endianness; + if (section->endianness() == kBigEndian) + endianness = ENDIANNESS_BIG; + else { + assert(section->endianness() == kLittleEndian); + endianness = ENDIANNESS_LITTLE; + } + ByteReader byte_reader(endianness); + byte_reader.SetAddressSize(section->AddressSize()); + byte_reader.SetCFIDataBase(encoded_pointer_bases.cfi, contents.data()); + byte_reader.SetTextBase(encoded_pointer_bases.text); + byte_reader.SetDataBase(encoded_pointer_bases.data); + CallFrameInfo parser(contents.data(), contents.size(), + &byte_reader, &handler, &reporter, true); + if (succeeds) + EXPECT_TRUE(parser.Start()); + else + EXPECT_FALSE(parser.Start()); + } + +}; + +class EHFrame: public EHFrameFixture, public Test { }; + +// A simple CIE, an FDE, and a terminator. +TEST_F(EHFrame, Terminator) { + Label cie; + section + .Mark(&cie) + .CIEHeader(9968, 2466, 67, 1, "") + .D8(dwarf2reader::DW_CFA_def_cfa).ULEB128(3772).ULEB128(1372) + .FinishEntry() + .FDEHeader(cie, 0x848037a1, 0x7b30475e) + .D8(dwarf2reader::DW_CFA_set_loc).D32(0x17713850) + .D8(dwarf2reader::DW_CFA_undefined).ULEB128(5721) + .FinishEntry() + .D32(0) // Terminate the sequence. + // This FDE should be ignored. + .FDEHeader(cie, 0xf19629fe, 0x439fb09b) + .FinishEntry(); + + PERHAPS_WRITE_EH_FRAME_FILE("EHFrame.Terminator", section); + + EXPECT_CALL(handler, Entry(_, 0x848037a1, 0x7b30475e, 1, "", 67)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, ValOffsetRule(0x848037a1, kCFARegister, 3772, 1372)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, UndefinedRule(0x17713850, 5721)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(reporter, EarlyEHTerminator(_)) + .InSequence(s).WillOnce(Return()); + + ParseEHFrameSection(§ion); +} + +// The parser should recognize the Linux Standards Base 'z' augmentations. +TEST_F(EHFrame, SimpleFDE) { + DwarfPointerEncoding lsda_encoding = + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_indirect + | dwarf2reader::DW_EH_PE_datarel + | dwarf2reader::DW_EH_PE_sdata2); + DwarfPointerEncoding fde_encoding = + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_textrel + | dwarf2reader::DW_EH_PE_udata2); + + section.SetPointerEncoding(fde_encoding); + section.SetEncodedPointerBases(encoded_pointer_bases); + Label cie; + section + .Mark(&cie) + .CIEHeader(4873, 7012, 100, 1, "zSLPR") + .ULEB128(7) // Augmentation data length + .D8(lsda_encoding) // LSDA pointer format + .D8(dwarf2reader::DW_EH_PE_pcrel) // personality pointer format + .EncodedPointer(0x97baa00, dwarf2reader::DW_EH_PE_pcrel) // and value + .D8(fde_encoding) // FDE pointer format + .D8(dwarf2reader::DW_CFA_def_cfa).ULEB128(6706).ULEB128(31) + .FinishEntry() + .FDEHeader(cie, 0x540f6b56, 0xf686) + .ULEB128(2) // Augmentation data length + .EncodedPointer(0xe3eab475, lsda_encoding) // LSDA pointer, signed + .D8(dwarf2reader::DW_CFA_set_loc) + .EncodedPointer(0x540fa4ce, fde_encoding) + .D8(dwarf2reader::DW_CFA_undefined).ULEB128(0x675e) + .FinishEntry() + .D32(0); // terminator + + PERHAPS_WRITE_EH_FRAME_FILE("EHFrame.SimpleFDE", section); + + EXPECT_CALL(handler, Entry(_, 0x540f6b56, 0xf686, 1, "zSLPR", 100)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, PersonalityRoutine(0x97baa00, false)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, LanguageSpecificDataArea(0xe3eab475, true)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, SignalHandler()) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, ValOffsetRule(0x540f6b56, kCFARegister, 6706, 31)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, UndefinedRule(0x540fa4ce, 0x675e)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()) + .InSequence(s).WillOnce(Return(true)); + + ParseEHFrameSection(§ion); +} + +// Check that we can handle an empty 'z' augmentation. +TEST_F(EHFrame, EmptyZ) { + Label cie; + section + .Mark(&cie) + .CIEHeader(5955, 5805, 228, 1, "z") + .ULEB128(0) // Augmentation data length + .D8(dwarf2reader::DW_CFA_def_cfa).ULEB128(3629).ULEB128(247) + .FinishEntry() + .FDEHeader(cie, 0xda007738, 0xfb55c641) + .ULEB128(0) // Augmentation data length + .D8(dwarf2reader::DW_CFA_advance_loc1).D8(11) + .D8(dwarf2reader::DW_CFA_undefined).ULEB128(3769) + .FinishEntry(); + + PERHAPS_WRITE_EH_FRAME_FILE("EHFrame.EmptyZ", section); + + EXPECT_CALL(handler, Entry(_, 0xda007738, 0xfb55c641, 1, "z", 228)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, ValOffsetRule(0xda007738, kCFARegister, 3629, 247)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, UndefinedRule(0xda007738 + 11 * 5955, 3769)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()) + .InSequence(s).WillOnce(Return(true)); + + ParseEHFrameSection(§ion); +} + +// Check that we recognize bad 'z' augmentation characters. +TEST_F(EHFrame, BadZ) { + Label cie; + section + .Mark(&cie) + .CIEHeader(6937, 1045, 142, 1, "zQ") + .ULEB128(0) // Augmentation data length + .D8(dwarf2reader::DW_CFA_def_cfa).ULEB128(9006).ULEB128(7725) + .FinishEntry() + .FDEHeader(cie, 0x1293efa8, 0x236f53f2) + .ULEB128(0) // Augmentation data length + .D8(dwarf2reader::DW_CFA_advance_loc | 12) + .D8(dwarf2reader::DW_CFA_register).ULEB128(5667).ULEB128(3462) + .FinishEntry(); + + PERHAPS_WRITE_EH_FRAME_FILE("EHFrame.BadZ", section); + + EXPECT_CALL(reporter, UnrecognizedAugmentation(_, "zQ")) + .WillOnce(Return()); + + ParseEHFrameSection(§ion, false); +} + +TEST_F(EHFrame, zL) { + Label cie; + DwarfPointerEncoding lsda_encoding = + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_funcrel + | dwarf2reader::DW_EH_PE_udata2); + section + .Mark(&cie) + .CIEHeader(9285, 9959, 54, 1, "zL") + .ULEB128(1) // Augmentation data length + .D8(lsda_encoding) // encoding for LSDA pointer in FDE + + .FinishEntry() + .FDEHeader(cie, 0xd40091aa, 0x9aa6e746) + .ULEB128(2) // Augmentation data length + .EncodedPointer(0xd40099cd, lsda_encoding) // LSDA pointer + .FinishEntry() + .D32(0); // terminator + + PERHAPS_WRITE_EH_FRAME_FILE("EHFrame.zL", section); + + EXPECT_CALL(handler, Entry(_, 0xd40091aa, 0x9aa6e746, 1, "zL", 54)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, LanguageSpecificDataArea(0xd40099cd, false)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()) + .InSequence(s).WillOnce(Return(true)); + + ParseEHFrameSection(§ion); +} + +TEST_F(EHFrame, zP) { + Label cie; + DwarfPointerEncoding personality_encoding = + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_datarel + | dwarf2reader::DW_EH_PE_udata2); + section + .Mark(&cie) + .CIEHeader(1097, 6313, 17, 1, "zP") + .ULEB128(3) // Augmentation data length + .D8(personality_encoding) // encoding for personality routine + .EncodedPointer(0xe3eaccac, personality_encoding) // value + .FinishEntry() + .FDEHeader(cie, 0x0c8350c9, 0xbef11087) + .ULEB128(0) // Augmentation data length + .FinishEntry() + .D32(0); // terminator + + PERHAPS_WRITE_EH_FRAME_FILE("EHFrame.zP", section); + + EXPECT_CALL(handler, Entry(_, 0x0c8350c9, 0xbef11087, 1, "zP", 17)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, PersonalityRoutine(0xe3eaccac, false)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()) + .InSequence(s).WillOnce(Return(true)); + + ParseEHFrameSection(§ion); +} + +TEST_F(EHFrame, zR) { + Label cie; + DwarfPointerEncoding pointer_encoding = + DwarfPointerEncoding(dwarf2reader::DW_EH_PE_textrel + | dwarf2reader::DW_EH_PE_sdata2); + section.SetPointerEncoding(pointer_encoding); + section + .Mark(&cie) + .CIEHeader(8011, 5496, 75, 1, "zR") + .ULEB128(1) // Augmentation data length + .D8(pointer_encoding) // encoding for FDE addresses + .FinishEntry() + .FDEHeader(cie, 0x540f9431, 0xbd0) + .ULEB128(0) // Augmentation data length + .FinishEntry() + .D32(0); // terminator + + PERHAPS_WRITE_EH_FRAME_FILE("EHFrame.zR", section); + + EXPECT_CALL(handler, Entry(_, 0x540f9431, 0xbd0, 1, "zR", 75)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()) + .InSequence(s).WillOnce(Return(true)); + + ParseEHFrameSection(§ion); +} + +TEST_F(EHFrame, zS) { + Label cie; + section + .Mark(&cie) + .CIEHeader(9217, 7694, 57, 1, "zS") + .ULEB128(0) // Augmentation data length + .FinishEntry() + .FDEHeader(cie, 0xd40091aa, 0x9aa6e746) + .ULEB128(0) // Augmentation data length + .FinishEntry() + .D32(0); // terminator + + PERHAPS_WRITE_EH_FRAME_FILE("EHFrame.zS", section); + + EXPECT_CALL(handler, Entry(_, 0xd40091aa, 0x9aa6e746, 1, "zS", 57)) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, SignalHandler()) + .InSequence(s).WillOnce(Return(true)); + EXPECT_CALL(handler, End()) + .InSequence(s).WillOnce(Return(true)); + + ParseEHFrameSection(§ion); +} + +// These tests require manual inspection of the test output. +struct CFIReporterFixture { + CFIReporterFixture() : reporter("test file name", "test section name") { } + CallFrameInfo::Reporter reporter; +}; + +class CFIReporter: public CFIReporterFixture, public Test { }; + +TEST_F(CFIReporter, Incomplete) { + reporter.Incomplete(0x0102030405060708ULL, CallFrameInfo::kUnknown); +} + +TEST_F(CFIReporter, EarlyEHTerminator) { + reporter.EarlyEHTerminator(0x0102030405060708ULL); +} + +TEST_F(CFIReporter, CIEPointerOutOfRange) { + reporter.CIEPointerOutOfRange(0x0123456789abcdefULL, 0xfedcba9876543210ULL); +} + +TEST_F(CFIReporter, BadCIEId) { + reporter.BadCIEId(0x0123456789abcdefULL, 0xfedcba9876543210ULL); +} + +TEST_F(CFIReporter, UnrecognizedVersion) { + reporter.UnrecognizedVersion(0x0123456789abcdefULL, 43); +} + +TEST_F(CFIReporter, UnrecognizedAugmentation) { + reporter.UnrecognizedAugmentation(0x0123456789abcdefULL, "poodles"); +} + +TEST_F(CFIReporter, InvalidPointerEncoding) { + reporter.InvalidPointerEncoding(0x0123456789abcdefULL, 0x42); +} + +TEST_F(CFIReporter, UnusablePointerEncoding) { + reporter.UnusablePointerEncoding(0x0123456789abcdefULL, 0x42); +} + +TEST_F(CFIReporter, RestoreInCIE) { + reporter.RestoreInCIE(0x0123456789abcdefULL, 0xfedcba9876543210ULL); +} + +TEST_F(CFIReporter, BadInstruction) { + reporter.BadInstruction(0x0123456789abcdefULL, CallFrameInfo::kFDE, + 0xfedcba9876543210ULL); +} + +TEST_F(CFIReporter, NoCFARule) { + reporter.NoCFARule(0x0123456789abcdefULL, CallFrameInfo::kCIE, + 0xfedcba9876543210ULL); +} + +TEST_F(CFIReporter, EmptyStateStack) { + reporter.EmptyStateStack(0x0123456789abcdefULL, CallFrameInfo::kTerminator, + 0xfedcba9876543210ULL); +} + +TEST_F(CFIReporter, ClearingCFARule) { + reporter.ClearingCFARule(0x0123456789abcdefULL, CallFrameInfo::kFDE, + 0xfedcba9876543210ULL); +} + +#ifdef WRITE_ELF +// See comments at the top of the file mentioning WRITE_ELF for details. + +using google_breakpad::test_assembler::Section; + +struct ELFSectionHeader { + ELFSectionHeader(unsigned int set_type) + : type(set_type), flags(0), address(0), link(0), info(0), + alignment(1), entry_size(0) { } + Label name; + unsigned int type; + uint64_t flags; + uint64_t address; + Label file_offset; + Label file_size; + unsigned int link; + unsigned int info; + uint64_t alignment; + uint64_t entry_size; +}; + +void AppendSectionHeader(CFISection *table, const ELFSectionHeader &header) { + (*table) + .D32(header.name) // name, index in string tbl + .D32(header.type) // type + .Address(header.flags) // flags + .Address(header.address) // address in memory + .Address(header.file_offset) // offset in ELF file + .Address(header.file_size) // length in bytes + .D32(header.link) // link to related section + .D32(header.info) // miscellaneous + .Address(header.alignment) // alignment + .Address(header.entry_size); // entry size +} + +void WriteELFFrameSection(const char *filename, const char *cfi_name, + const CFISection &cfi) { + int elf_class = cfi.AddressSize() == 4 ? ELFCLASS32 : ELFCLASS64; + int elf_data = (cfi.endianness() == kBigEndian + ? ELFDATA2MSB : ELFDATA2LSB); + CFISection elf(cfi.endianness(), cfi.AddressSize()); + Label elf_header_size, section_table_offset; + elf + .Append("\x7f" "ELF") + .D8(elf_class) // 32-bit or 64-bit ELF + .D8(elf_data) // endianness + .D8(1) // ELF version + .D8(ELFOSABI_LINUX) // Operating System/ABI indication + .D8(0) // ABI version + .Append(7, 0xda) // padding + .D16(ET_EXEC) // file type: executable file + .D16(EM_386) // architecture: Intel IA-32 + .D32(EV_CURRENT); // ELF version + elf + .Address(0x0123456789abcdefULL) // program entry point + .Address(0) // program header offset + .Address(section_table_offset) // section header offset + .D32(0) // processor-specific flags + .D16(elf_header_size) // ELF header size in bytes */ + .D16(elf_class == ELFCLASS32 ? 32 : 56) // program header entry size + .D16(0) // program header table entry count + .D16(elf_class == ELFCLASS32 ? 40 : 64) // section header entry size + .D16(3) // section count + .D16(1) // section name string table + .Mark(&elf_header_size); + + // The null section. Every ELF file has one, as the first entry in + // the section header table. + ELFSectionHeader null_header(SHT_NULL); + null_header.file_offset = 0; + null_header.file_size = 0; + + // The CFI section. The whole reason for writing out this ELF file + // is to put this in it so that we can run other dumping programs on + // it to check its contents. + ELFSectionHeader cfi_header(SHT_PROGBITS); + cfi_header.file_size = cfi.Size(); + + // The section holding the names of the sections. This is the + // section whose index appears in the e_shstrndx member of the ELF + // header. + ELFSectionHeader section_names_header(SHT_STRTAB); + CFISection section_names(cfi.endianness(), cfi.AddressSize()); + section_names + .Mark(&null_header.name) + .AppendCString("") + .Mark(§ion_names_header.name) + .AppendCString(".shstrtab") + .Mark(&cfi_header.name) + .AppendCString(cfi_name) + .Mark(§ion_names_header.file_size); + + // Create the section table. The ELF header's e_shoff member refers + // to this, and the e_shnum member gives the number of entries it + // contains. + CFISection section_table(cfi.endianness(), cfi.AddressSize()); + AppendSectionHeader(§ion_table, null_header); + AppendSectionHeader(§ion_table, section_names_header); + AppendSectionHeader(§ion_table, cfi_header); + + // Append the section table and the section contents to the ELF file. + elf + .Mark(§ion_table_offset) + .Append(section_table) + .Mark(§ion_names_header.file_offset) + .Append(section_names) + .Mark(&cfi_header.file_offset) + .Append(cfi); + + string contents; + if (!elf.GetContents(&contents)) { + fprintf(stderr, "failed to get ELF file contents\n"); + exit(1); + } + + FILE *out = fopen(filename, "w"); + if (!out) { + fprintf(stderr, "error opening ELF file '%s': %s\n", + filename, strerror(errno)); + exit(1); + } + + if (fwrite(contents.data(), 1, contents.size(), out) != contents.size()) { + fprintf(stderr, "error writing ELF data to '%s': %s\n", + filename, strerror(errno)); + exit(1); + } + + if (fclose(out) == EOF) { + fprintf(stderr, "error closing ELF file '%s': %s\n", + filename, strerror(errno)); + exit(1); + } +} +#endif diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/dwarf2reader_die_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/dwarf2reader_die_unittest.cc new file mode 100644 index 0000000000..4e34436931 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/dwarf2reader_die_unittest.cc @@ -0,0 +1,484 @@ +// Copyright (c) 2012, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// dwarf2reader_die_unittest.cc: Unit tests for dwarf2reader::CompilationUnit + +#include + +#include +#include +#include + +#include "breakpad_googletest_includes.h" +#include "common/dwarf/bytereader-inl.h" +#include "common/dwarf/dwarf2reader_test_common.h" +#include "common/dwarf/dwarf2reader.h" +#include "common/using_std_string.h" +#include "google_breakpad/common/breakpad_types.h" + +using google_breakpad::test_assembler::Endianness; +using google_breakpad::test_assembler::Label; +using google_breakpad::test_assembler::Section; +using google_breakpad::test_assembler::kBigEndian; +using google_breakpad::test_assembler::kLittleEndian; + +using dwarf2reader::ByteReader; +using dwarf2reader::CompilationUnit; +using dwarf2reader::Dwarf2Handler; +using dwarf2reader::DwarfAttribute; +using dwarf2reader::DwarfForm; +using dwarf2reader::DwarfHasChild; +using dwarf2reader::DwarfTag; +using dwarf2reader::ENDIANNESS_BIG; +using dwarf2reader::ENDIANNESS_LITTLE; +using dwarf2reader::SectionMap; + +using std::vector; +using testing::InSequence; +using testing::Pointee; +using testing::Return; +using testing::Sequence; +using testing::Test; +using testing::TestWithParam; +using testing::_; + +class MockDwarf2Handler: public Dwarf2Handler { + public: + MOCK_METHOD5(StartCompilationUnit, bool(uint64 offset, uint8 address_size, + uint8 offset_size, uint64 cu_length, + uint8 dwarf_version)); + MOCK_METHOD2(StartDIE, bool(uint64 offset, enum DwarfTag tag)); + MOCK_METHOD4(ProcessAttributeUnsigned, void(uint64 offset, + DwarfAttribute attr, + enum DwarfForm form, + uint64 data)); + MOCK_METHOD4(ProcessAttributeSigned, void(uint64 offset, + enum DwarfAttribute attr, + enum DwarfForm form, + int64 data)); + MOCK_METHOD4(ProcessAttributeReference, void(uint64 offset, + enum DwarfAttribute attr, + enum DwarfForm form, + uint64 data)); + MOCK_METHOD5(ProcessAttributeBuffer, void(uint64 offset, + enum DwarfAttribute attr, + enum DwarfForm form, + const char* data, + uint64 len)); + MOCK_METHOD4(ProcessAttributeString, void(uint64 offset, + enum DwarfAttribute attr, + enum DwarfForm form, + const string& data)); + MOCK_METHOD4(ProcessAttributeSignature, void(uint64 offset, + DwarfAttribute attr, + enum DwarfForm form, + uint64 signature)); + MOCK_METHOD1(EndDIE, void(uint64 offset)); +}; + +struct DIEFixture { + + DIEFixture() { + // Fix the initial offset of the .debug_info and .debug_abbrev sections. + info.start() = 0; + abbrevs.start() = 0; + + // Default expectations for the data handler. + EXPECT_CALL(handler, StartCompilationUnit(_, _, _, _, _)).Times(0); + EXPECT_CALL(handler, StartDIE(_, _)).Times(0); + EXPECT_CALL(handler, ProcessAttributeUnsigned(_, _, _, _)).Times(0); + EXPECT_CALL(handler, ProcessAttributeSigned(_, _, _, _)).Times(0); + EXPECT_CALL(handler, ProcessAttributeReference(_, _, _, _)).Times(0); + EXPECT_CALL(handler, ProcessAttributeBuffer(_, _, _, _, _)).Times(0); + EXPECT_CALL(handler, ProcessAttributeString(_, _, _, _)).Times(0); + EXPECT_CALL(handler, EndDIE(_)).Times(0); + } + + // Return a reference to a section map whose .debug_info section refers + // to |info|, and whose .debug_abbrev section refers to |abbrevs|. This + // function returns a reference to the same SectionMap each time; new + // calls wipe out maps established by earlier calls. + const SectionMap &MakeSectionMap() { + // Copy the sections' contents into strings that will live as long as + // the map itself. + assert(info.GetContents(&info_contents)); + assert(abbrevs.GetContents(&abbrevs_contents)); + section_map.clear(); + section_map[".debug_info"].first = info_contents.data(); + section_map[".debug_info"].second = info_contents.size(); + section_map[".debug_abbrev"].first = abbrevs_contents.data(); + section_map[".debug_abbrev"].second = abbrevs_contents.size(); + return section_map; + } + + TestCompilationUnit info; + TestAbbrevTable abbrevs; + MockDwarf2Handler handler; + string abbrevs_contents, info_contents; + SectionMap section_map; +}; + +struct DwarfHeaderParams { + DwarfHeaderParams(Endianness endianness, size_t format_size, + int version, size_t address_size) + : endianness(endianness), format_size(format_size), + version(version), address_size(address_size) { } + Endianness endianness; + size_t format_size; // 4-byte or 8-byte DWARF offsets + int version; + size_t address_size; +}; + +class DwarfHeader: public DIEFixture, + public TestWithParam { }; + +TEST_P(DwarfHeader, Header) { + Label abbrev_table = abbrevs.Here(); + abbrevs.Abbrev(1, dwarf2reader::DW_TAG_compile_unit, + dwarf2reader::DW_children_yes) + .Attribute(dwarf2reader::DW_AT_name, dwarf2reader::DW_FORM_string) + .EndAbbrev() + .EndTable(); + + info.set_format_size(GetParam().format_size); + info.set_endianness(GetParam().endianness); + + info.Header(GetParam().version, abbrev_table, GetParam().address_size) + .ULEB128(1) // DW_TAG_compile_unit, with children + .AppendCString("sam") // DW_AT_name, DW_FORM_string + .D8(0); // end of children + info.Finish(); + + { + InSequence s; + EXPECT_CALL(handler, + StartCompilationUnit(0, GetParam().address_size, + GetParam().format_size, _, + GetParam().version)) + .WillOnce(Return(true)); + EXPECT_CALL(handler, StartDIE(_, dwarf2reader::DW_TAG_compile_unit)) + .WillOnce(Return(true)); + EXPECT_CALL(handler, ProcessAttributeString(_, dwarf2reader::DW_AT_name, + dwarf2reader::DW_FORM_string, + "sam")) + .WillOnce(Return()); + EXPECT_CALL(handler, EndDIE(_)) + .WillOnce(Return()); + } + + ByteReader byte_reader(GetParam().endianness == kLittleEndian ? + ENDIANNESS_LITTLE : ENDIANNESS_BIG); + CompilationUnit parser(MakeSectionMap(), 0, &byte_reader, &handler); + EXPECT_EQ(parser.Start(), info_contents.size()); +} + +INSTANTIATE_TEST_CASE_P( + HeaderVariants, DwarfHeader, + ::testing::Values(DwarfHeaderParams(kLittleEndian, 4, 2, 4), + DwarfHeaderParams(kLittleEndian, 4, 2, 8), + DwarfHeaderParams(kLittleEndian, 4, 3, 4), + DwarfHeaderParams(kLittleEndian, 4, 3, 8), + DwarfHeaderParams(kLittleEndian, 4, 4, 4), + DwarfHeaderParams(kLittleEndian, 4, 4, 8), + DwarfHeaderParams(kLittleEndian, 8, 2, 4), + DwarfHeaderParams(kLittleEndian, 8, 2, 8), + DwarfHeaderParams(kLittleEndian, 8, 3, 4), + DwarfHeaderParams(kLittleEndian, 8, 3, 8), + DwarfHeaderParams(kLittleEndian, 8, 4, 4), + DwarfHeaderParams(kLittleEndian, 8, 4, 8), + DwarfHeaderParams(kBigEndian, 4, 2, 4), + DwarfHeaderParams(kBigEndian, 4, 2, 8), + DwarfHeaderParams(kBigEndian, 4, 3, 4), + DwarfHeaderParams(kBigEndian, 4, 3, 8), + DwarfHeaderParams(kBigEndian, 4, 4, 4), + DwarfHeaderParams(kBigEndian, 4, 4, 8), + DwarfHeaderParams(kBigEndian, 8, 2, 4), + DwarfHeaderParams(kBigEndian, 8, 2, 8), + DwarfHeaderParams(kBigEndian, 8, 3, 4), + DwarfHeaderParams(kBigEndian, 8, 3, 8), + DwarfHeaderParams(kBigEndian, 8, 4, 4), + DwarfHeaderParams(kBigEndian, 8, 4, 8))); + +struct DwarfFormsFixture: public DIEFixture { + // Start a compilation unit, as directed by |params|, containing one + // childless DIE of the given tag, with one attribute of the given name + // and form. The 'info' fixture member is left just after the abbrev + // code, waiting for the attribute value to be appended. + void StartSingleAttributeDIE(const DwarfHeaderParams ¶ms, + DwarfTag tag, DwarfAttribute name, + DwarfForm form) { + // Create the abbreviation table. + Label abbrev_table = abbrevs.Here(); + abbrevs.Abbrev(1, tag, dwarf2reader::DW_children_no) + .Attribute(name, form) + .EndAbbrev() + .EndTable(); + + // Create the compilation unit, up to the attribute value. + info.set_format_size(params.format_size); + info.set_endianness(params.endianness); + info.Header(params.version, abbrev_table, params.address_size) + .ULEB128(1); // abbrev code + } + + // Set up handler to expect a compilation unit matching |params|, + // containing one childless DIE of the given tag, in the sequence s. Stop + // just before the expectations. + void ExpectBeginCompilationUnit(const DwarfHeaderParams ¶ms, + DwarfTag tag, uint64 offset=0) { + EXPECT_CALL(handler, + StartCompilationUnit(offset, params.address_size, + params.format_size, _, + params.version)) + .InSequence(s) + .WillOnce(Return(true)); + EXPECT_CALL(handler, StartDIE(_, tag)) + .InSequence(s) + .WillOnce(Return(true)); + } + + void ExpectEndCompilationUnit() { + EXPECT_CALL(handler, EndDIE(_)) + .InSequence(s) + .WillOnce(Return()); + } + + void ParseCompilationUnit(const DwarfHeaderParams ¶ms, uint64 offset=0) { + ByteReader byte_reader(params.endianness == kLittleEndian ? + ENDIANNESS_LITTLE : ENDIANNESS_BIG); + CompilationUnit parser(MakeSectionMap(), offset, &byte_reader, &handler); + EXPECT_EQ(offset + parser.Start(), info_contents.size()); + } + + // The sequence to which the fixture's methods append expectations. + Sequence s; +}; + +struct DwarfForms: public DwarfFormsFixture, + public TestWithParam { }; + +TEST_P(DwarfForms, addr) { + StartSingleAttributeDIE(GetParam(), dwarf2reader::DW_TAG_compile_unit, + dwarf2reader::DW_AT_low_pc, + dwarf2reader::DW_FORM_addr); + uint64_t value; + if (GetParam().address_size == 4) { + value = 0xc8e9ffcc; + info.D32(value); + } else { + value = 0xe942517fc2768564ULL; + info.D64(value); + } + info.Finish(); + + ExpectBeginCompilationUnit(GetParam(), dwarf2reader::DW_TAG_compile_unit); + EXPECT_CALL(handler, ProcessAttributeUnsigned(_, dwarf2reader::DW_AT_low_pc, + dwarf2reader::DW_FORM_addr, + value)) + .InSequence(s) + .WillOnce(Return()); + ExpectEndCompilationUnit(); + + ParseCompilationUnit(GetParam()); +} + +TEST_P(DwarfForms, block2_empty) { + StartSingleAttributeDIE(GetParam(), (DwarfTag) 0x16e4d2f7, + (DwarfAttribute) 0xe52c4463, + dwarf2reader::DW_FORM_block2); + info.D16(0); + info.Finish(); + + ExpectBeginCompilationUnit(GetParam(), (DwarfTag) 0x16e4d2f7); + EXPECT_CALL(handler, ProcessAttributeBuffer(_, (DwarfAttribute) 0xe52c4463, + dwarf2reader::DW_FORM_block2, + _, 0)) + .InSequence(s) + .WillOnce(Return()); + ExpectEndCompilationUnit(); + + ParseCompilationUnit(GetParam()); +} + +TEST_P(DwarfForms, block2) { + StartSingleAttributeDIE(GetParam(), (DwarfTag) 0x16e4d2f7, + (DwarfAttribute) 0xe52c4463, + dwarf2reader::DW_FORM_block2); + unsigned char data[258]; + memset(data, '*', sizeof(data)); + info.D16(sizeof(data)) + .Append(data, sizeof(data)); + info.Finish(); + + ExpectBeginCompilationUnit(GetParam(), (DwarfTag) 0x16e4d2f7); + EXPECT_CALL(handler, ProcessAttributeBuffer(_, (DwarfAttribute) 0xe52c4463, + dwarf2reader::DW_FORM_block2, + Pointee('*'), 258)) + .InSequence(s) + .WillOnce(Return()); + ExpectEndCompilationUnit(); + + ParseCompilationUnit(GetParam()); +} + +TEST_P(DwarfForms, flag_present) { + StartSingleAttributeDIE(GetParam(), (DwarfTag) 0x3e449ac2, + (DwarfAttribute) 0x359d1972, + dwarf2reader::DW_FORM_flag_present); + // DW_FORM_flag_present occupies no space in the DIE. + info.Finish(); + + ExpectBeginCompilationUnit(GetParam(), (DwarfTag) 0x3e449ac2); + EXPECT_CALL(handler, + ProcessAttributeUnsigned(_, (DwarfAttribute) 0x359d1972, + dwarf2reader::DW_FORM_flag_present, + 1)) + .InSequence(s) + .WillOnce(Return()); + ExpectEndCompilationUnit(); + + ParseCompilationUnit(GetParam()); +} + +TEST_P(DwarfForms, sec_offset) { + StartSingleAttributeDIE(GetParam(), (DwarfTag) 0x1d971689, + (DwarfAttribute) 0xa060bfd1, + dwarf2reader::DW_FORM_sec_offset); + uint64_t value; + if (GetParam().format_size == 4) { + value = 0xacc9c388; + info.D32(value); + } else { + value = 0xcffe5696ffe3ed0aULL; + info.D64(value); + } + info.Finish(); + + ExpectBeginCompilationUnit(GetParam(), (DwarfTag) 0x1d971689); + EXPECT_CALL(handler, ProcessAttributeUnsigned(_, (DwarfAttribute) 0xa060bfd1, + dwarf2reader::DW_FORM_sec_offset, + value)) + .InSequence(s) + .WillOnce(Return()); + ExpectEndCompilationUnit(); + + ParseCompilationUnit(GetParam()); +} + +TEST_P(DwarfForms, exprloc) { + StartSingleAttributeDIE(GetParam(), (DwarfTag) 0xb6d167bb, + (DwarfAttribute) 0xba3ae5cb, + dwarf2reader::DW_FORM_exprloc); + info.ULEB128(29) + .Append(29, 173); + info.Finish(); + + ExpectBeginCompilationUnit(GetParam(), (DwarfTag) 0xb6d167bb); + EXPECT_CALL(handler, ProcessAttributeBuffer(_, (DwarfAttribute) 0xba3ae5cb, + dwarf2reader::DW_FORM_exprloc, + Pointee(173), 29)) + .InSequence(s) + .WillOnce(Return()); + ExpectEndCompilationUnit(); + + ParseCompilationUnit(GetParam()); +} + +TEST_P(DwarfForms, ref_sig8) { + StartSingleAttributeDIE(GetParam(), (DwarfTag) 0x253e7b2b, + (DwarfAttribute) 0xd708d908, + dwarf2reader::DW_FORM_ref_sig8); + info.D64(0xf72fa0cb6ddcf9d6ULL); + info.Finish(); + + ExpectBeginCompilationUnit(GetParam(), (DwarfTag) 0x253e7b2b); + EXPECT_CALL(handler, ProcessAttributeSignature(_, (DwarfAttribute) 0xd708d908, + dwarf2reader::DW_FORM_ref_sig8, + 0xf72fa0cb6ddcf9d6ULL)) + .InSequence(s) + .WillOnce(Return()); + ExpectEndCompilationUnit(); + + ParseCompilationUnit(GetParam()); +} + +// A value passed to ProcessAttributeSignature is just an absolute number, +// not an offset within the compilation unit as most of the other +// DW_FORM_ref forms are. Check that the reader doesn't try to apply any +// offset to the signature, by reading it from a compilation unit that does +// not start at the beginning of the section. +TEST_P(DwarfForms, ref_sig8_not_first) { + info.Append(98, '*'); + StartSingleAttributeDIE(GetParam(), (DwarfTag) 0x253e7b2b, + (DwarfAttribute) 0xd708d908, + dwarf2reader::DW_FORM_ref_sig8); + info.D64(0xf72fa0cb6ddcf9d6ULL); + info.Finish(); + + ExpectBeginCompilationUnit(GetParam(), (DwarfTag) 0x253e7b2b, 98); + EXPECT_CALL(handler, ProcessAttributeSignature(_, (DwarfAttribute) 0xd708d908, + dwarf2reader::DW_FORM_ref_sig8, + 0xf72fa0cb6ddcf9d6ULL)) + .InSequence(s) + .WillOnce(Return()); + ExpectEndCompilationUnit(); + + ParseCompilationUnit(GetParam(), 98); +} + +// Tests for the other attribute forms could go here. + +INSTANTIATE_TEST_CASE_P( + HeaderVariants, DwarfForms, + ::testing::Values(DwarfHeaderParams(kLittleEndian, 4, 2, 4), + DwarfHeaderParams(kLittleEndian, 4, 2, 8), + DwarfHeaderParams(kLittleEndian, 4, 3, 4), + DwarfHeaderParams(kLittleEndian, 4, 3, 8), + DwarfHeaderParams(kLittleEndian, 4, 4, 4), + DwarfHeaderParams(kLittleEndian, 4, 4, 8), + DwarfHeaderParams(kLittleEndian, 8, 2, 4), + DwarfHeaderParams(kLittleEndian, 8, 2, 8), + DwarfHeaderParams(kLittleEndian, 8, 3, 4), + DwarfHeaderParams(kLittleEndian, 8, 3, 8), + DwarfHeaderParams(kLittleEndian, 8, 4, 4), + DwarfHeaderParams(kLittleEndian, 8, 4, 8), + DwarfHeaderParams(kBigEndian, 4, 2, 4), + DwarfHeaderParams(kBigEndian, 4, 2, 8), + DwarfHeaderParams(kBigEndian, 4, 3, 4), + DwarfHeaderParams(kBigEndian, 4, 3, 8), + DwarfHeaderParams(kBigEndian, 4, 4, 4), + DwarfHeaderParams(kBigEndian, 4, 4, 8), + DwarfHeaderParams(kBigEndian, 8, 2, 4), + DwarfHeaderParams(kBigEndian, 8, 2, 8), + DwarfHeaderParams(kBigEndian, 8, 3, 4), + DwarfHeaderParams(kBigEndian, 8, 3, 8), + DwarfHeaderParams(kBigEndian, 8, 4, 4), + DwarfHeaderParams(kBigEndian, 8, 4, 8))); diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/dwarf2reader_test_common.h b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/dwarf2reader_test_common.h new file mode 100644 index 0000000000..e91de90610 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/dwarf2reader_test_common.h @@ -0,0 +1,149 @@ +// -*- mode: c++ -*- + +// Copyright (c) 2012, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// dwarf2reader_test_common.h: Define TestCompilationUnit and +// TestAbbrevTable, classes for creating properly (and improperly) +// formatted DWARF compilation unit data for unit tests. + +#ifndef COMMON_DWARF_DWARF2READER_TEST_COMMON_H__ +#define COMMON_DWARF_DWARF2READER_TEST_COMMON_H__ + +#include "common/test_assembler.h" +#include "common/dwarf/dwarf2enums.h" + +// A subclass of test_assembler::Section, specialized for constructing +// DWARF compilation units. +class TestCompilationUnit: public google_breakpad::test_assembler::Section { + public: + typedef dwarf2reader::DwarfTag DwarfTag; + typedef dwarf2reader::DwarfAttribute DwarfAttribute; + typedef dwarf2reader::DwarfForm DwarfForm; + typedef google_breakpad::test_assembler::Label Label; + + // Set the section's DWARF format size (the 32-bit DWARF format or the + // 64-bit DWARF format, for lengths and section offsets --- not the + // address size) to format_size. + void set_format_size(size_t format_size) { + assert(format_size == 4 || format_size == 8); + format_size_ = format_size; + } + + // Append a DWARF section offset value, of the appropriate size for this + // compilation unit. + template + void SectionOffset(T offset) { + if (format_size_ == 4) + D32(offset); + else + D64(offset); + } + + // Append a DWARF compilation unit header to the section, with the given + // DWARF version, abbrev table offset, and address size. + TestCompilationUnit &Header(int version, const Label &abbrev_offset, + size_t address_size) { + if (format_size_ == 4) { + D32(length_); + } else { + D32(0xffffffff); + D64(length_); + } + post_length_offset_ = Size(); + D16(version); + SectionOffset(abbrev_offset); + D8(address_size); + return *this; + } + + // Mark the end of this header's DIEs. + TestCompilationUnit &Finish() { + length_ = Size() - post_length_offset_; + return *this; + } + + private: + // The DWARF format size for this compilation unit. + size_t format_size_; + + // The offset of the point in the compilation unit header immediately + // after the initial length field. + uint64_t post_length_offset_; + + // The length of the compilation unit, not including the initial length field. + Label length_; +}; + +// A subclass of test_assembler::Section specialized for constructing DWARF +// abbreviation tables. +class TestAbbrevTable: public google_breakpad::test_assembler::Section { + public: + typedef dwarf2reader::DwarfTag DwarfTag; + typedef dwarf2reader::DwarfAttribute DwarfAttribute; + typedef dwarf2reader::DwarfForm DwarfForm; + typedef dwarf2reader::DwarfHasChild DwarfHasChild; + typedef google_breakpad::test_assembler::Label Label; + + // Start a new abbreviation table entry for abbreviation code |code|, + // encoding a DIE whose tag is |tag|, and which has children if and only + // if |has_children| is true. + TestAbbrevTable &Abbrev(int code, DwarfTag tag, DwarfHasChild has_children) { + assert(code != 0); + ULEB128(code); + ULEB128(static_cast(tag)); + D8(static_cast(has_children)); + return *this; + }; + + // Add an attribute to the current abbreviation code whose name is |name| + // and whose form is |form|. + TestAbbrevTable &Attribute(DwarfAttribute name, DwarfForm form) { + ULEB128(static_cast(name)); + ULEB128(static_cast(form)); + return *this; + } + + // Finish the current abbreviation code. + TestAbbrevTable &EndAbbrev() { + ULEB128(0); + ULEB128(0); + return *this; + } + + // Finish the current abbreviation table. + TestAbbrevTable &EndTable() { + ULEB128(0); + return *this; + } +}; + +#endif // COMMON_DWARF_DWARF2READER_TEST_COMMON_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/functioninfo.cc b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/functioninfo.cc new file mode 100644 index 0000000000..55a255eda6 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/functioninfo.cc @@ -0,0 +1,231 @@ +// Copyright (c) 2010 Google Inc. All Rights Reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// This is a client for the dwarf2reader to extract function and line +// information from the debug info. + +#include +#include +#include + +#include +#include +#include + +#include "common/dwarf/functioninfo.h" +#include "common/dwarf/bytereader.h" +#include "common/scoped_ptr.h" +#include "common/using_std_string.h" + +using google_breakpad::scoped_ptr; + +namespace dwarf2reader { + +CULineInfoHandler::CULineInfoHandler(std::vector* files, + std::vector* dirs, + LineMap* linemap):linemap_(linemap), + files_(files), + dirs_(dirs) { + // The dirs and files are 1 indexed, so just make sure we put + // nothing in the 0 vector. + assert(dirs->size() == 0); + assert(files->size() == 0); + dirs->push_back(""); + SourceFileInfo s; + s.name = ""; + s.lowpc = ULLONG_MAX; + files->push_back(s); +} + +void CULineInfoHandler::DefineDir(const string& name, uint32 dir_num) { + // These should never come out of order, actually + assert(dir_num == dirs_->size()); + dirs_->push_back(name); +} + +void CULineInfoHandler::DefineFile(const string& name, + int32 file_num, uint32 dir_num, + uint64 mod_time, uint64 length) { + assert(dir_num >= 0); + assert(dir_num < dirs_->size()); + + // These should never come out of order, actually. + if (file_num == (int32)files_->size() || file_num == -1) { + string dir = dirs_->at(dir_num); + + SourceFileInfo s; + s.lowpc = ULLONG_MAX; + + if (dir == "") { + s.name = name; + } else { + s.name = dir + "/" + name; + } + + files_->push_back(s); + } else { + fprintf(stderr, "error in DefineFile"); + } +} + +void CULineInfoHandler::AddLine(uint64 address, uint64 length, uint32 file_num, + uint32 line_num, uint32 column_num) { + if (file_num < files_->size()) { + linemap_->insert( + std::make_pair(address, + std::make_pair(files_->at(file_num).name.c_str(), + line_num))); + + if (address < files_->at(file_num).lowpc) { + files_->at(file_num).lowpc = address; + } + } else { + fprintf(stderr, "error in AddLine"); + } +} + +bool CUFunctionInfoHandler::StartCompilationUnit(uint64 offset, + uint8 address_size, + uint8 offset_size, + uint64 cu_length, + uint8 dwarf_version) { + current_compilation_unit_offset_ = offset; + return true; +} + + +// For function info, we only care about subprograms and inlined +// subroutines. For line info, the DW_AT_stmt_list lives in the +// compile unit tag. + +bool CUFunctionInfoHandler::StartDIE(uint64 offset, enum DwarfTag tag) { + switch (tag) { + case DW_TAG_subprogram: + case DW_TAG_inlined_subroutine: { + current_function_info_ = new FunctionInfo; + current_function_info_->lowpc = current_function_info_->highpc = 0; + current_function_info_->name = ""; + current_function_info_->line = 0; + current_function_info_->file = ""; + offset_to_funcinfo_->insert(std::make_pair(offset, + current_function_info_)); + }; + // FALLTHROUGH + case DW_TAG_compile_unit: + return true; + default: + return false; + } + return false; +} + +// Only care about the name attribute for functions + +void CUFunctionInfoHandler::ProcessAttributeString(uint64 offset, + enum DwarfAttribute attr, + enum DwarfForm form, + const string &data) { + if (current_function_info_) { + if (attr == DW_AT_name) + current_function_info_->name = data; + else if (attr == DW_AT_MIPS_linkage_name) + current_function_info_->mangled_name = data; + } +} + +void CUFunctionInfoHandler::ProcessAttributeUnsigned(uint64 offset, + enum DwarfAttribute attr, + enum DwarfForm form, + uint64 data) { + if (attr == DW_AT_stmt_list) { + SectionMap::const_iterator iter = sections_.find("__debug_line"); + assert(iter != sections_.end()); + + scoped_ptr lireader(new LineInfo(iter->second.first + data, + iter->second.second - data, + reader_, linehandler_)); + lireader->Start(); + } else if (current_function_info_) { + switch (attr) { + case DW_AT_low_pc: + current_function_info_->lowpc = data; + break; + case DW_AT_high_pc: + current_function_info_->highpc = data; + break; + case DW_AT_decl_line: + current_function_info_->line = data; + break; + case DW_AT_decl_file: + current_function_info_->file = files_->at(data).name; + break; + default: + break; + } + } +} + +void CUFunctionInfoHandler::ProcessAttributeReference(uint64 offset, + enum DwarfAttribute attr, + enum DwarfForm form, + uint64 data) { + if (current_function_info_) { + switch (attr) { + case DW_AT_specification: { + // Some functions have a "specification" attribute + // which means they were defined elsewhere. The name + // attribute is not repeated, and must be taken from + // the specification DIE. Here we'll assume that + // any DIE referenced in this manner will already have + // been seen, but that's not really required by the spec. + FunctionMap::iterator iter = offset_to_funcinfo_->find(data); + if (iter != offset_to_funcinfo_->end()) { + current_function_info_->name = iter->second->name; + current_function_info_->mangled_name = iter->second->mangled_name; + } else { + // If you hit this, this code probably needs to be rewritten. + fprintf(stderr, + "Error: DW_AT_specification was seen before the referenced " + "DIE! (Looking for DIE at offset %08llx, in DIE at " + "offset %08llx)\n", data, offset); + } + break; + } + default: + break; + } + } +} + +void CUFunctionInfoHandler::EndDIE(uint64 offset) { + if (current_function_info_ && current_function_info_->lowpc) + address_to_funcinfo_->insert(std::make_pair(current_function_info_->lowpc, + current_function_info_)); +} + +} // namespace dwarf2reader diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/functioninfo.h b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/functioninfo.h new file mode 100644 index 0000000000..0b08a5fc5f --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/functioninfo.h @@ -0,0 +1,188 @@ +// Copyright (c) 2010 Google Inc. All Rights Reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +// This file contains the definitions for a DWARF2/3 information +// collector that uses the DWARF2/3 reader interface to build a mapping +// of addresses to files, lines, and functions. + +#ifndef COMMON_DWARF_FUNCTIONINFO_H__ +#define COMMON_DWARF_FUNCTIONINFO_H__ + +#include +#include +#include +#include + +#include "common/dwarf/dwarf2reader.h" +#include "common/using_std_string.h" + + +namespace dwarf2reader { + +struct FunctionInfo { + // Name of the function + string name; + // Mangled name of the function + string mangled_name; + // File containing this function + string file; + // Line number for start of function. + uint32 line; + // Beginning address for this function + uint64 lowpc; + // End address for this function. + uint64 highpc; +}; + +struct SourceFileInfo { + // Name of the source file name + string name; + // Low address of source file name + uint64 lowpc; +}; + +typedef std::map FunctionMap; +typedef std::map > LineMap; + +// This class is a basic line info handler that fills in the dirs, +// file, and linemap passed into it with the data produced from the +// LineInfoHandler. +class CULineInfoHandler: public LineInfoHandler { + public: + + // + CULineInfoHandler(std::vector* files, + std::vector* dirs, + LineMap* linemap); + virtual ~CULineInfoHandler() { } + + // Called when we define a directory. We just place NAME into dirs_ + // at position DIR_NUM. + virtual void DefineDir(const string& name, uint32 dir_num); + + // Called when we define a filename. We just place + // concat(dirs_[DIR_NUM], NAME) into files_ at position FILE_NUM. + virtual void DefineFile(const string& name, int32 file_num, + uint32 dir_num, uint64 mod_time, uint64 length); + + + // Called when the line info reader has a new line, address pair + // ready for us. ADDRESS is the address of the code, LENGTH is the + // length of its machine code in bytes, FILE_NUM is the file number + // containing the code, LINE_NUM is the line number in that file for + // the code, and COLUMN_NUM is the column number the code starts at, + // if we know it (0 otherwise). + virtual void AddLine(uint64 address, uint64 length, + uint32 file_num, uint32 line_num, uint32 column_num); + + private: + LineMap* linemap_; + std::vector* files_; + std::vector* dirs_; +}; + +class CUFunctionInfoHandler: public Dwarf2Handler { + public: + CUFunctionInfoHandler(std::vector* files, + std::vector* dirs, + LineMap* linemap, + FunctionMap* offset_to_funcinfo, + FunctionMap* address_to_funcinfo, + CULineInfoHandler* linehandler, + const SectionMap& sections, + ByteReader* reader) + : files_(files), dirs_(dirs), linemap_(linemap), + offset_to_funcinfo_(offset_to_funcinfo), + address_to_funcinfo_(address_to_funcinfo), + linehandler_(linehandler), sections_(sections), + reader_(reader), current_function_info_(NULL) { } + + virtual ~CUFunctionInfoHandler() { } + + // Start to process a compilation unit at OFFSET from the beginning of the + // .debug_info section. We want to see all compilation units, so we + // always return true. + + virtual bool StartCompilationUnit(uint64 offset, uint8 address_size, + uint8 offset_size, uint64 cu_length, + uint8 dwarf_version); + + // Start to process a DIE at OFFSET from the beginning of the + // .debug_info section. We only care about function related DIE's. + virtual bool StartDIE(uint64 offset, enum DwarfTag tag); + + // Called when we have an attribute with unsigned data to give to + // our handler. The attribute is for the DIE at OFFSET from the + // beginning of the .debug_info section, has a name of ATTR, a form of + // FORM, and the actual data of the attribute is in DATA. + virtual void ProcessAttributeUnsigned(uint64 offset, + enum DwarfAttribute attr, + enum DwarfForm form, + uint64 data); + + // Called when we have an attribute with a DIE reference to give to + // our handler. The attribute is for the DIE at OFFSET from the + // beginning of the .debug_info section, has a name of ATTR, a form of + // FORM, and the offset of the referenced DIE from the start of the + // .debug_info section is in DATA. + virtual void ProcessAttributeReference(uint64 offset, + enum DwarfAttribute attr, + enum DwarfForm form, + uint64 data); + + // Called when we have an attribute with string data to give to + // our handler. The attribute is for the DIE at OFFSET from the + // beginning of the .debug_info section, has a name of ATTR, a form of + // FORM, and the actual data of the attribute is in DATA. + virtual void ProcessAttributeString(uint64 offset, + enum DwarfAttribute attr, + enum DwarfForm form, + const string& data); + + // Called when finished processing the DIE at OFFSET. + // Because DWARF2/3 specifies a tree of DIEs, you may get starts + // before ends of the previous DIE, as we process children before + // ending the parent. + virtual void EndDIE(uint64 offset); + + private: + std::vector* files_; + std::vector* dirs_; + LineMap* linemap_; + FunctionMap* offset_to_funcinfo_; + FunctionMap* address_to_funcinfo_; + CULineInfoHandler* linehandler_; + const SectionMap& sections_; + ByteReader* reader_; + FunctionInfo* current_function_info_; + uint64 current_compilation_unit_offset_; +}; + +} // namespace dwarf2reader +#endif // COMMON_DWARF_FUNCTIONINFO_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/line_state_machine.h b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/line_state_machine.h new file mode 100644 index 0000000000..0ff72abcfc --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/line_state_machine.h @@ -0,0 +1,61 @@ +// Copyright 2008 Google Inc. All Rights Reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +#ifndef COMMON_DWARF_LINE_STATE_MACHINE_H__ +#define COMMON_DWARF_LINE_STATE_MACHINE_H__ + +namespace dwarf2reader { + +// This is the format of a DWARF2/3 line state machine that we process +// opcodes using. There is no need for anything outside the lineinfo +// processor to know how this works. +struct LineStateMachine { + void Reset(bool default_is_stmt) { + file_num = 1; + address = 0; + line_num = 1; + column_num = 0; + is_stmt = default_is_stmt; + basic_block = false; + end_sequence = false; + } + + uint32 file_num; + uint64 address; + uint32 line_num; + uint32 column_num; + bool is_stmt; // stmt means statement. + bool basic_block; + bool end_sequence; +}; + +} // namespace dwarf2reader + + +#endif // COMMON_DWARF_LINE_STATE_MACHINE_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/types.h b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/types.h new file mode 100644 index 0000000000..61ca4579a9 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf/types.h @@ -0,0 +1,55 @@ +// Copyright 2008 Google, Inc. All Rights reserved +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +// This file contains some typedefs for basic types + + +#ifndef _COMMON_DWARF_TYPES_H__ +#define _COMMON_DWARF_TYPES_H__ + +#include + +typedef signed char int8; +typedef short int16; +typedef int int32; +typedef long long int64; + +typedef unsigned char uint8; +typedef unsigned short uint16; +typedef unsigned int uint32; +typedef unsigned long long uint64; + +#ifdef __PTRDIFF_TYPE__ +typedef __PTRDIFF_TYPE__ intptr; +typedef unsigned __PTRDIFF_TYPE__ uintptr; +#else +#error "Can't find pointer-sized integral types." +#endif + +#endif // _COMMON_DWARF_TYPES_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/dwarf_cfi_to_module.cc b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf_cfi_to_module.cc new file mode 100644 index 0000000000..1bf1d96d59 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf_cfi_to_module.cc @@ -0,0 +1,295 @@ +// -*- mode: c++ -*- + +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// Implementation of google_breakpad::DwarfCFIToModule. +// See dwarf_cfi_to_module.h for details. + +#include + +#include "common/dwarf_cfi_to_module.h" + +namespace google_breakpad { + +using std::ostringstream; + +vector DwarfCFIToModule::RegisterNames::MakeVector( + const char * const *strings, + size_t size) { + vector names(strings, strings + size); + return names; +} + +vector DwarfCFIToModule::RegisterNames::I386() { + static const char *const names[] = { + "$eax", "$ecx", "$edx", "$ebx", "$esp", "$ebp", "$esi", "$edi", + "$eip", "$eflags", "$unused1", + "$st0", "$st1", "$st2", "$st3", "$st4", "$st5", "$st6", "$st7", + "$unused2", "$unused3", + "$xmm0", "$xmm1", "$xmm2", "$xmm3", "$xmm4", "$xmm5", "$xmm6", "$xmm7", + "$mm0", "$mm1", "$mm2", "$mm3", "$mm4", "$mm5", "$mm6", "$mm7", + "$fcw", "$fsw", "$mxcsr", + "$es", "$cs", "$ss", "$ds", "$fs", "$gs", "$unused4", "$unused5", + "$tr", "$ldtr" + }; + + return MakeVector(names, sizeof(names) / sizeof(names[0])); +} + +vector DwarfCFIToModule::RegisterNames::X86_64() { + static const char *const names[] = { + "$rax", "$rdx", "$rcx", "$rbx", "$rsi", "$rdi", "$rbp", "$rsp", + "$r8", "$r9", "$r10", "$r11", "$r12", "$r13", "$r14", "$r15", + "$rip", + "$xmm0","$xmm1","$xmm2", "$xmm3", "$xmm4", "$xmm5", "$xmm6", "$xmm7", + "$xmm8","$xmm9","$xmm10","$xmm11","$xmm12","$xmm13","$xmm14","$xmm15", + "$st0", "$st1", "$st2", "$st3", "$st4", "$st5", "$st6", "$st7", + "$mm0", "$mm1", "$mm2", "$mm3", "$mm4", "$mm5", "$mm6", "$mm7", + "$rflags", + "$es", "$cs", "$ss", "$ds", "$fs", "$gs", "$unused1", "$unused2", + "$fs.base", "$gs.base", "$unused3", "$unused4", + "$tr", "$ldtr", + "$mxcsr", "$fcw", "$fsw" + }; + + return MakeVector(names, sizeof(names) / sizeof(names[0])); +} + +// Per ARM IHI 0040A, section 3.1 +vector DwarfCFIToModule::RegisterNames::ARM() { + static const char *const names[] = { + "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7", + "r8", "r9", "r10", "r11", "r12", "sp", "lr", "pc", + "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7", + "fps", "cpsr", "", "", "", "", "", "", + "", "", "", "", "", "", "", "", + "", "", "", "", "", "", "", "", + "", "", "", "", "", "", "", "", + "", "", "", "", "", "", "", "", + "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7", + "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15", + "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23", + "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31", + "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7" + }; + + return MakeVector(names, sizeof(names) / sizeof(names[0])); +} + +// Per ARM IHI 0057A, section 3.1 +vector DwarfCFIToModule::RegisterNames::ARM64() { + static const char *const names[] = { + "x0", "x1", "x2", "x3", "x4", "x5", "x6", "x7", + "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", + "x16", "x17", "x18", "x19", "x20", "x21", "x22", "x23", + "x24", "x25", "x26", "x27", "x28", "x29", "x30", "sp", + "", "", "", "", "", "", "", "", + "", "", "", "", "", "", "", "", + "", "", "", "", "", "", "", "", + "", "", "", "", "", "", "", "", + "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", + "v8", "v9", "v10", "v11", "v12", "v13", "v14", "v15", + "v16", "v17", "v18", "v19", "v20", "v21", "v22", "v23", + "v24", "v25", "v26", "v27", "v28", "v29", "v30", "v31" + }; + + return MakeVector(names, sizeof(names) / sizeof(names[0])); +} + +vector DwarfCFIToModule::RegisterNames::MIPS() { + static const char* const kRegisterNames[] = { + "$zero", "$at", "$v0", "$v1", "$a0", "$a1", "$a2", "$a3", + "$t0", "$t1", "$t2", "$t3", "$t4", "$t5", "$t6", "$t7", + "$s0", "$s1", "$s2", "$s3", "$s4", "$s5", "$s6", "$s7", + "$t8", "$t9", "$k0", "$k1", "$gp", "$sp", "$fp", "$ra", + "$lo", "$hi", "$pc", "$f0", "$f2", "$f3", "$f4", "$f5", + "$f6", "$f7", "$f8", "$f9", "$f10", "$f11", "$f12", "$f13", + "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", + "$f21", "$f22", "$f23", "$f24", "$f25", "$f26", "$f27", + "$f28", "$f29", "$f30", "$f31", "$fcsr", "$fir" + }; + + return MakeVector(kRegisterNames, + sizeof(kRegisterNames) / sizeof(kRegisterNames[0])); +} + +bool DwarfCFIToModule::Entry(size_t offset, uint64 address, uint64 length, + uint8 version, const string &augmentation, + unsigned return_address) { + assert(!entry_); + + // If dwarf2reader::CallFrameInfo can handle this version and + // augmentation, then we should be okay with that, so there's no + // need to check them here. + + // Get ready to collect entries. + entry_ = new Module::StackFrameEntry; + entry_->address = address; + entry_->size = length; + entry_offset_ = offset; + return_address_ = return_address; + + // Breakpad STACK CFI records must provide a .ra rule, but DWARF CFI + // may not establish any rule for .ra if the return address column + // is an ordinary register, and that register holds the return + // address on entry to the function. So establish an initial .ra + // rule citing the return address register. + if (return_address_ < register_names_.size()) + entry_->initial_rules[ra_name_] = register_names_[return_address_]; + + return true; +} + +string DwarfCFIToModule::RegisterName(int i) { + assert(entry_); + if (i < 0) { + assert(i == kCFARegister); + return cfa_name_; + } + unsigned reg = i; + if (reg == return_address_) + return ra_name_; + + // Ensure that a non-empty name exists for this register value. + if (reg < register_names_.size() && !register_names_[reg].empty()) + return register_names_[reg]; + + reporter_->UnnamedRegister(entry_offset_, reg); + char buf[30]; + sprintf(buf, "unnamed_register%u", reg); + return buf; +} + +void DwarfCFIToModule::Record(Module::Address address, int reg, + const string &rule) { + assert(entry_); + + // Place the name in our global set of strings, and then use the string + // from the set. Even though the assignment looks like a copy, all the + // major std::string implementations use reference counting internally, + // so the effect is to have all our data structures share copies of rules + // whenever possible. Since register names are drawn from a + // vector, register names are already shared. + string shared_rule = *common_strings_.insert(rule).first; + + // Is this one of this entry's initial rules? + if (address == entry_->address) + entry_->initial_rules[RegisterName(reg)] = shared_rule; + // File it under the appropriate address. + else + entry_->rule_changes[address][RegisterName(reg)] = shared_rule; +} + +bool DwarfCFIToModule::UndefinedRule(uint64 address, int reg) { + reporter_->UndefinedNotSupported(entry_offset_, RegisterName(reg)); + // Treat this as a non-fatal error. + return true; +} + +bool DwarfCFIToModule::SameValueRule(uint64 address, int reg) { + ostringstream s; + s << RegisterName(reg); + Record(address, reg, s.str()); + return true; +} + +bool DwarfCFIToModule::OffsetRule(uint64 address, int reg, + int base_register, long offset) { + ostringstream s; + s << RegisterName(base_register) << " " << offset << " + ^"; + Record(address, reg, s.str()); + return true; +} + +bool DwarfCFIToModule::ValOffsetRule(uint64 address, int reg, + int base_register, long offset) { + ostringstream s; + s << RegisterName(base_register) << " " << offset << " +"; + Record(address, reg, s.str()); + return true; +} + +bool DwarfCFIToModule::RegisterRule(uint64 address, int reg, + int base_register) { + ostringstream s; + s << RegisterName(base_register); + Record(address, reg, s.str()); + return true; +} + +bool DwarfCFIToModule::ExpressionRule(uint64 address, int reg, + const string &expression) { + reporter_->ExpressionsNotSupported(entry_offset_, RegisterName(reg)); + // Treat this as a non-fatal error. + return true; +} + +bool DwarfCFIToModule::ValExpressionRule(uint64 address, int reg, + const string &expression) { + reporter_->ExpressionsNotSupported(entry_offset_, RegisterName(reg)); + // Treat this as a non-fatal error. + return true; +} + +bool DwarfCFIToModule::End() { + module_->AddStackFrameEntry(entry_); + entry_ = NULL; + return true; +} + +void DwarfCFIToModule::Reporter::UnnamedRegister(size_t offset, int reg) { + fprintf(stderr, "%s, section '%s': " + "the call frame entry at offset 0x%zx refers to register %d," + " whose name we don't know\n", + file_.c_str(), section_.c_str(), offset, reg); +} + +void DwarfCFIToModule::Reporter::UndefinedNotSupported(size_t offset, + const string ®) { + fprintf(stderr, "%s, section '%s': " + "the call frame entry at offset 0x%zx sets the rule for " + "register '%s' to 'undefined', but the Breakpad symbol file format" + " cannot express this\n", + file_.c_str(), section_.c_str(), offset, reg.c_str()); +} + +void DwarfCFIToModule::Reporter::ExpressionsNotSupported(size_t offset, + const string ®) { + fprintf(stderr, "%s, section '%s': " + "the call frame entry at offset 0x%zx uses a DWARF expression to" + " describe how to recover register '%s', " + " but this translator cannot yet translate DWARF expressions to" + " Breakpad postfix expressions\n", + file_.c_str(), section_.c_str(), offset, reg.c_str()); +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/dwarf_cfi_to_module.h b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf_cfi_to_module.h new file mode 100644 index 0000000000..084b8f5a75 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf_cfi_to_module.h @@ -0,0 +1,202 @@ +// -*- mode: c++ -*- + +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// dwarf_cfi_to_module.h: Define the DwarfCFIToModule class, which +// accepts parsed DWARF call frame info and adds it to a +// google_breakpad::Module object, which can write that information to +// a Breakpad symbol file. + +#ifndef COMMON_LINUX_DWARF_CFI_TO_MODULE_H +#define COMMON_LINUX_DWARF_CFI_TO_MODULE_H + +#include +#include + +#include +#include +#include + +#include "common/module.h" +#include "common/dwarf/dwarf2reader.h" +#include "common/using_std_string.h" + +namespace google_breakpad { + +using dwarf2reader::CallFrameInfo; +using google_breakpad::Module; +using std::set; +using std::vector; + +// A class that accepts parsed call frame information from the DWARF +// CFI parser and populates a google_breakpad::Module object with the +// contents. +class DwarfCFIToModule: public CallFrameInfo::Handler { + public: + + // DwarfCFIToModule uses an instance of this class to report errors + // detected while converting DWARF CFI to Breakpad STACK CFI records. + class Reporter { + public: + // Create a reporter that writes messages to the standard error + // stream. FILE is the name of the file we're processing, and + // SECTION is the name of the section within that file that we're + // looking at (.debug_frame, .eh_frame, etc.). + Reporter(const string &file, const string §ion) + : file_(file), section_(section) { } + virtual ~Reporter() { } + + // The DWARF CFI entry at OFFSET cites register REG, but REG is not + // covered by the vector of register names passed to the + // DwarfCFIToModule constructor, nor does it match the return + // address column number for this entry. + virtual void UnnamedRegister(size_t offset, int reg); + + // The DWARF CFI entry at OFFSET says that REG is undefined, but the + // Breakpad symbol file format cannot express this. + virtual void UndefinedNotSupported(size_t offset, const string ®); + + // The DWARF CFI entry at OFFSET says that REG uses a DWARF + // expression to find its value, but DwarfCFIToModule is not + // capable of translating DWARF expressions to Breakpad postfix + // expressions. + virtual void ExpressionsNotSupported(size_t offset, const string ®); + + protected: + string file_, section_; + }; + + // Register name tables. If TABLE is a vector returned by one of these + // functions, then TABLE[R] is the name of the register numbered R in + // DWARF call frame information. + class RegisterNames { + public: + // Intel's "x86" or IA-32. + static vector I386(); + + // AMD x86_64, AMD64, Intel EM64T, or Intel 64 + static vector X86_64(); + + // ARM. + static vector ARM(); + + // ARM64, aka AARCH64. + static vector ARM64(); + + // MIPS. + static vector MIPS(); + + private: + // Given STRINGS, an array of C strings with SIZE elements, return an + // equivalent vector. + static vector MakeVector(const char * const *strings, size_t size); + }; + + // Create a handler for the dwarf2reader::CallFrameInfo parser that + // records the stack unwinding information it receives in MODULE. + // + // Use REGISTER_NAMES[I] as the name of register number I; *this + // keeps a reference to the vector, so the vector should remain + // alive for as long as the DwarfCFIToModule does. + // + // Use REPORTER for reporting problems encountered in the conversion + // process. + DwarfCFIToModule(Module *module, const vector ®ister_names, + Reporter *reporter) + : module_(module), register_names_(register_names), reporter_(reporter), + entry_(NULL), return_address_(-1), cfa_name_(".cfa"), ra_name_(".ra") { + } + virtual ~DwarfCFIToModule() { delete entry_; } + + virtual bool Entry(size_t offset, uint64 address, uint64 length, + uint8 version, const string &augmentation, + unsigned return_address); + virtual bool UndefinedRule(uint64 address, int reg); + virtual bool SameValueRule(uint64 address, int reg); + virtual bool OffsetRule(uint64 address, int reg, + int base_register, long offset); + virtual bool ValOffsetRule(uint64 address, int reg, + int base_register, long offset); + virtual bool RegisterRule(uint64 address, int reg, int base_register); + virtual bool ExpressionRule(uint64 address, int reg, + const string &expression); + virtual bool ValExpressionRule(uint64 address, int reg, + const string &expression); + virtual bool End(); + + private: + // Return the name to use for register REG. + string RegisterName(int i); + + // Record RULE for register REG at ADDRESS. + void Record(Module::Address address, int reg, const string &rule); + + // The module to which we should add entries. + Module *module_; + + // Map from register numbers to register names. + const vector ®ister_names_; + + // The reporter to use to report problems. + Reporter *reporter_; + + // The current entry we're constructing. + Module::StackFrameEntry *entry_; + + // The section offset of the current frame description entry, for + // use in error messages. + size_t entry_offset_; + + // The return address column for that entry. + unsigned return_address_; + + // The names of the return address and canonical frame address. Putting + // these here instead of using string literals allows us to share their + // texts in reference-counted std::string implementations (all the + // popular ones). Many, many rules cite these strings. + string cfa_name_, ra_name_; + + // A set of strings used by this CFI. Before storing a string in one of + // our data structures, insert it into this set, and then use the string + // from the set. + // + // Because std::string uses reference counting internally, simply using + // strings from this set, even if passed by value, assigned, or held + // directly in structures and containers (map, for example), + // causes those strings to share a single instance of each distinct piece + // of text. + set common_strings_; +}; + +} // namespace google_breakpad + +#endif // COMMON_LINUX_DWARF_CFI_TO_MODULE_H diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/dwarf_cfi_to_module_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf_cfi_to_module_unittest.cc new file mode 100644 index 0000000000..807d1b20c3 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf_cfi_to_module_unittest.cc @@ -0,0 +1,306 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// dwarf_cfi_to_module_unittest.cc: Tests for google_breakpad::DwarfCFIToModule. + +#include +#include + +#include "breakpad_googletest_includes.h" +#include "common/dwarf_cfi_to_module.h" +#include "common/using_std_string.h" + +using std::vector; + +using google_breakpad::Module; +using google_breakpad::DwarfCFIToModule; +using testing::ContainerEq; +using testing::Test; +using testing::_; + +struct MockCFIReporter: public DwarfCFIToModule::Reporter { + MockCFIReporter(const string &file, const string §ion) + : Reporter(file, section) { } + MOCK_METHOD2(UnnamedRegister, void(size_t offset, int reg)); + MOCK_METHOD2(UndefinedNotSupported, void(size_t offset, const string ®)); + MOCK_METHOD2(ExpressionsNotSupported, void(size_t offset, const string ®)); +}; + +struct DwarfCFIToModuleFixture { + DwarfCFIToModuleFixture() + : module("module name", "module os", "module arch", "module id"), + reporter("reporter file", "reporter section"), + handler(&module, register_names, &reporter) { + register_names.push_back("reg0"); + register_names.push_back("reg1"); + register_names.push_back("reg2"); + register_names.push_back("reg3"); + register_names.push_back("reg4"); + register_names.push_back("reg5"); + register_names.push_back("reg6"); + register_names.push_back("reg7"); + register_names.push_back("sp"); + register_names.push_back("pc"); + register_names.push_back(""); + + EXPECT_CALL(reporter, UnnamedRegister(_, _)).Times(0); + EXPECT_CALL(reporter, UndefinedNotSupported(_, _)).Times(0); + EXPECT_CALL(reporter, ExpressionsNotSupported(_, _)).Times(0); + } + + Module module; + vector register_names; + MockCFIReporter reporter; + DwarfCFIToModule handler; + vector entries; +}; + +class Entry: public DwarfCFIToModuleFixture, public Test { }; + +TEST_F(Entry, Accept) { + ASSERT_TRUE(handler.Entry(0x3b8961b8, 0xa21069698096fc98ULL, + 0xb440ce248169c8d6ULL, 3, "", 0xea93c106)); + ASSERT_TRUE(handler.End()); + module.GetStackFrameEntries(&entries); + EXPECT_EQ(1U, entries.size()); + EXPECT_EQ(0xa21069698096fc98ULL, entries[0]->address); + EXPECT_EQ(0xb440ce248169c8d6ULL, entries[0]->size); + EXPECT_EQ(0U, entries[0]->initial_rules.size()); + EXPECT_EQ(0U, entries[0]->rule_changes.size()); +} + +TEST_F(Entry, AcceptOldVersion) { + ASSERT_TRUE(handler.Entry(0xeb60e0fc, 0x75b8806bb09eab78ULL, + 0xc771f44958d40bbcULL, 1, "", 0x093c945e)); + ASSERT_TRUE(handler.End()); + module.GetStackFrameEntries(&entries); + EXPECT_EQ(1U, entries.size()); + EXPECT_EQ(0x75b8806bb09eab78ULL, entries[0]->address); + EXPECT_EQ(0xc771f44958d40bbcULL, entries[0]->size); + EXPECT_EQ(0U, entries[0]->initial_rules.size()); + EXPECT_EQ(0U, entries[0]->rule_changes.size()); +} + +struct RuleFixture: public DwarfCFIToModuleFixture { + RuleFixture() : DwarfCFIToModuleFixture() { + entry_address = 0x89327ebf86b47492ULL; + entry_size = 0x2f8cd573072fe02aULL; + return_reg = 0x7886a346; + } + void StartEntry() { + ASSERT_TRUE(handler.Entry(0x4445c05c, entry_address, entry_size, + 3, "", return_reg)); + } + void CheckEntry() { + module.GetStackFrameEntries(&entries); + EXPECT_EQ(1U, entries.size()); + EXPECT_EQ(entry_address, entries[0]->address); + EXPECT_EQ(entry_size, entries[0]->size); + } + uint64 entry_address, entry_size; + unsigned return_reg; +}; + +class Rule: public RuleFixture, public Test { }; + +TEST_F(Rule, UndefinedRule) { + EXPECT_CALL(reporter, UndefinedNotSupported(_, "reg7")); + StartEntry(); + ASSERT_TRUE(handler.UndefinedRule(entry_address, 7)); + ASSERT_TRUE(handler.End()); + CheckEntry(); + EXPECT_EQ(0U, entries[0]->initial_rules.size()); + EXPECT_EQ(0U, entries[0]->rule_changes.size()); +} + +TEST_F(Rule, RegisterWithEmptyName) { + EXPECT_CALL(reporter, UnnamedRegister(_, 10)); + EXPECT_CALL(reporter, UndefinedNotSupported(_, "unnamed_register10")); + StartEntry(); + ASSERT_TRUE(handler.UndefinedRule(entry_address, 10)); + ASSERT_TRUE(handler.End()); + CheckEntry(); + EXPECT_EQ(0U, entries[0]->initial_rules.size()); + EXPECT_EQ(0U, entries[0]->rule_changes.size()); +} + +TEST_F(Rule, SameValueRule) { + StartEntry(); + ASSERT_TRUE(handler.SameValueRule(entry_address, 6)); + ASSERT_TRUE(handler.End()); + CheckEntry(); + Module::RuleMap expected_initial; + expected_initial["reg6"] = "reg6"; + EXPECT_THAT(entries[0]->initial_rules, ContainerEq(expected_initial)); + EXPECT_EQ(0U, entries[0]->rule_changes.size()); +} + +TEST_F(Rule, OffsetRule) { + StartEntry(); + ASSERT_TRUE(handler.OffsetRule(entry_address + 1, return_reg, + DwarfCFIToModule::kCFARegister, + 16927065)); + ASSERT_TRUE(handler.End()); + CheckEntry(); + EXPECT_EQ(0U, entries[0]->initial_rules.size()); + Module::RuleChangeMap expected_changes; + expected_changes[entry_address + 1][".ra"] = ".cfa 16927065 + ^"; + EXPECT_THAT(entries[0]->rule_changes, ContainerEq(expected_changes)); +} + +TEST_F(Rule, OffsetRuleNegative) { + StartEntry(); + ASSERT_TRUE(handler.OffsetRule(entry_address + 1, + DwarfCFIToModule::kCFARegister, 4, -34530721)); + ASSERT_TRUE(handler.End()); + CheckEntry(); + EXPECT_EQ(0U, entries[0]->initial_rules.size()); + Module::RuleChangeMap expected_changes; + expected_changes[entry_address + 1][".cfa"] = "reg4 -34530721 + ^"; + EXPECT_THAT(entries[0]->rule_changes, ContainerEq(expected_changes)); +} + +TEST_F(Rule, ValOffsetRule) { + // Use an unnamed register number, to exercise that branch of RegisterName. + EXPECT_CALL(reporter, UnnamedRegister(_, 11)); + StartEntry(); + ASSERT_TRUE(handler.ValOffsetRule(entry_address + 0x5ab7, + DwarfCFIToModule::kCFARegister, + 11, 61812979)); + ASSERT_TRUE(handler.End()); + CheckEntry(); + EXPECT_EQ(0U, entries[0]->initial_rules.size()); + Module::RuleChangeMap expected_changes; + expected_changes[entry_address + 0x5ab7][".cfa"] = + "unnamed_register11 61812979 +"; + EXPECT_THAT(entries[0]->rule_changes, ContainerEq(expected_changes)); +} + +TEST_F(Rule, RegisterRule) { + StartEntry(); + ASSERT_TRUE(handler.RegisterRule(entry_address, return_reg, 3)); + ASSERT_TRUE(handler.End()); + CheckEntry(); + Module::RuleMap expected_initial; + expected_initial[".ra"] = "reg3"; + EXPECT_THAT(entries[0]->initial_rules, ContainerEq(expected_initial)); + EXPECT_EQ(0U, entries[0]->rule_changes.size()); +} + +TEST_F(Rule, ExpressionRule) { + EXPECT_CALL(reporter, ExpressionsNotSupported(_, "reg2")); + StartEntry(); + ASSERT_TRUE(handler.ExpressionRule(entry_address + 0xf326, 2, + "it takes two to tango")); + ASSERT_TRUE(handler.End()); + CheckEntry(); + EXPECT_EQ(0U, entries[0]->initial_rules.size()); + EXPECT_EQ(0U, entries[0]->rule_changes.size()); +} + +TEST_F(Rule, ValExpressionRule) { + EXPECT_CALL(reporter, ExpressionsNotSupported(_, "reg0")); + StartEntry(); + ASSERT_TRUE(handler.ValExpressionRule(entry_address + 0x6367, 0, + "bit off more than he could chew")); + ASSERT_TRUE(handler.End()); + CheckEntry(); + EXPECT_EQ(0U, entries[0]->initial_rules.size()); + EXPECT_EQ(0U, entries[0]->rule_changes.size()); +} + +TEST_F(Rule, DefaultReturnAddressRule) { + return_reg = 2; + StartEntry(); + ASSERT_TRUE(handler.RegisterRule(entry_address, 0, 1)); + ASSERT_TRUE(handler.End()); + CheckEntry(); + Module::RuleMap expected_initial; + expected_initial[".ra"] = "reg2"; + expected_initial["reg0"] = "reg1"; + EXPECT_THAT(entries[0]->initial_rules, ContainerEq(expected_initial)); + EXPECT_EQ(0U, entries[0]->rule_changes.size()); +} + +TEST_F(Rule, DefaultReturnAddressRuleOverride) { + return_reg = 2; + StartEntry(); + ASSERT_TRUE(handler.RegisterRule(entry_address, return_reg, 1)); + ASSERT_TRUE(handler.End()); + CheckEntry(); + Module::RuleMap expected_initial; + expected_initial[".ra"] = "reg1"; + EXPECT_THAT(entries[0]->initial_rules, ContainerEq(expected_initial)); + EXPECT_EQ(0U, entries[0]->rule_changes.size()); +} + +TEST_F(Rule, DefaultReturnAddressRuleLater) { + return_reg = 2; + StartEntry(); + ASSERT_TRUE(handler.RegisterRule(entry_address + 1, return_reg, 1)); + ASSERT_TRUE(handler.End()); + CheckEntry(); + Module::RuleMap expected_initial; + expected_initial[".ra"] = "reg2"; + EXPECT_THAT(entries[0]->initial_rules, ContainerEq(expected_initial)); + Module::RuleChangeMap expected_changes; + expected_changes[entry_address + 1][".ra"] = "reg1"; + EXPECT_THAT(entries[0]->rule_changes, ContainerEq(expected_changes)); +} + +TEST(RegisterNames, I386) { + vector names = DwarfCFIToModule::RegisterNames::I386(); + + EXPECT_EQ("$eax", names[0]); + EXPECT_EQ("$ecx", names[1]); + EXPECT_EQ("$esp", names[4]); + EXPECT_EQ("$eip", names[8]); +} + +TEST(RegisterNames, ARM) { + vector names = DwarfCFIToModule::RegisterNames::ARM(); + + EXPECT_EQ("r0", names[0]); + EXPECT_EQ("r10", names[10]); + EXPECT_EQ("sp", names[13]); + EXPECT_EQ("lr", names[14]); + EXPECT_EQ("pc", names[15]); +} + +TEST(RegisterNames, X86_64) { + vector names = DwarfCFIToModule::RegisterNames::X86_64(); + + EXPECT_EQ("$rax", names[0]); + EXPECT_EQ("$rdx", names[1]); + EXPECT_EQ("$rbp", names[6]); + EXPECT_EQ("$rsp", names[7]); + EXPECT_EQ("$rip", names[16]); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/dwarf_cu_to_module.cc b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf_cu_to_module.cc new file mode 100644 index 0000000000..1729d4ac93 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf_cu_to_module.cc @@ -0,0 +1,1073 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// Implement the DwarfCUToModule class; see dwarf_cu_to_module.h. + +// For PRI* macros, before anything else might #include it. +#ifndef __STDC_FORMAT_MACROS +#define __STDC_FORMAT_MACROS +#endif /* __STDC_FORMAT_MACROS */ + +#include "common/dwarf_cu_to_module.h" + +#include +#if !defined(__ANDROID__) +#include +#endif +#include +#include + +#include +#include + +#include "common/dwarf_line_to_module.h" +#include "common/unordered.h" + +namespace google_breakpad { + +using std::map; +using std::pair; +using std::sort; +using std::vector; + +// Data provided by a DWARF specification DIE. +// +// In DWARF, the DIE for a definition may contain a DW_AT_specification +// attribute giving the offset of the corresponding declaration DIE, and +// the definition DIE may omit information given in the declaration. For +// example, it's common for a function's address range to appear only in +// its definition DIE, but its name to appear only in its declaration +// DIE. +// +// The dumper needs to be able to follow DW_AT_specification links to +// bring all this information together in a FUNC record. Conveniently, +// DIEs that are the target of such links have a DW_AT_declaration flag +// set, so we can identify them when we first see them, and record their +// contents for later reference. +// +// A Specification holds information gathered from a declaration DIE that +// we may need if we find a DW_AT_specification link pointing to it. +struct DwarfCUToModule::Specification { + // The qualified name that can be found by demangling DW_AT_MIPS_linkage_name. + string qualified_name; + + // The name of the enclosing scope, or the empty string if there is none. + string enclosing_name; + + // The name for the specification DIE itself, without any enclosing + // name components. + string unqualified_name; +}; + +// An abstract origin -- base definition of an inline function. +struct AbstractOrigin { + AbstractOrigin() : name() {} + explicit AbstractOrigin(const string& name) : name(name) {} + + string name; +}; + +typedef map AbstractOriginByOffset; + +// Data global to the DWARF-bearing file that is private to the +// DWARF-to-Module process. +struct DwarfCUToModule::FilePrivate { + // A set of strings used in this CU. Before storing a string in one of + // our data structures, insert it into this set, and then use the string + // from the set. + // + // In some STL implementations, strings are reference-counted internally, + // meaning that simply using strings from this set, even if passed by + // value, assigned, or held directly in structures and containers + // (map, for example), causes those strings to share a + // single instance of each distinct piece of text. GNU's libstdc++ uses + // reference counts, and I believe MSVC did as well, at some point. + // However, C++ '11 implementations are moving away from reference + // counting. + // + // In other implementations, string assignments copy the string's text, + // so this set will actually hold yet another copy of the string (although + // everything will still work). To improve memory consumption portably, + // we will probably need to use pointers to strings held in this set. + unordered_set common_strings; + + // A map from offsets of DIEs within the .debug_info section to + // Specifications describing those DIEs. Specification references can + // cross compilation unit boundaries. + SpecificationByOffset specifications; + + AbstractOriginByOffset origins; +}; + +DwarfCUToModule::FileContext::FileContext(const string &filename, + Module *module, + bool handle_inter_cu_refs) + : filename_(filename), + module_(module), + handle_inter_cu_refs_(handle_inter_cu_refs), + file_private_(new FilePrivate()) { +} + +DwarfCUToModule::FileContext::~FileContext() { +} + +void DwarfCUToModule::FileContext::AddSectionToSectionMap( + const string& name, const char* contents, uint64 length) { + section_map_[name] = std::make_pair(contents, length); +} + +void DwarfCUToModule::FileContext::ClearSectionMapForTest() { + section_map_.clear(); +} + +const dwarf2reader::SectionMap& +DwarfCUToModule::FileContext::section_map() const { + return section_map_; +} + +void DwarfCUToModule::FileContext::ClearSpecifications() { + if (!handle_inter_cu_refs_) + file_private_->specifications.clear(); +} + +bool DwarfCUToModule::FileContext::IsUnhandledInterCUReference( + uint64 offset, uint64 compilation_unit_start) const { + if (handle_inter_cu_refs_) + return false; + return offset < compilation_unit_start; +} + +// Information global to the particular compilation unit we're +// parsing. This is for data shared across the CU's entire DIE tree, +// and parameters from the code invoking the CU parser. +struct DwarfCUToModule::CUContext { + CUContext(FileContext *file_context_arg, WarningReporter *reporter_arg) + : file_context(file_context_arg), + reporter(reporter_arg), + language(Language::CPlusPlus) {} + + ~CUContext() { + for (vector::iterator it = functions.begin(); + it != functions.end(); ++it) { + delete *it; + } + }; + + // The DWARF-bearing file into which this CU was incorporated. + FileContext *file_context; + + // For printing error messages. + WarningReporter *reporter; + + // The source language of this compilation unit. + const Language *language; + + // The functions defined in this compilation unit. We accumulate + // them here during parsing. Then, in DwarfCUToModule::Finish, we + // assign them lines and add them to file_context->module. + // + // Destroying this destroys all the functions this vector points to. + vector functions; +}; + +// Information about the context of a particular DIE. This is for +// information that changes as we descend the tree towards the leaves: +// the containing classes/namespaces, etc. +struct DwarfCUToModule::DIEContext { + // The fully-qualified name of the context. For example, for a + // tree like: + // + // DW_TAG_namespace Foo + // DW_TAG_class Bar + // DW_TAG_subprogram Baz + // + // in a C++ compilation unit, the DIEContext's name for the + // DW_TAG_subprogram DIE would be "Foo::Bar". The DIEContext's + // name for the DW_TAG_namespace DIE would be "". + string name; +}; + +// An abstract base class for all the dumper's DIE handlers. +class DwarfCUToModule::GenericDIEHandler: public dwarf2reader::DIEHandler { + public: + // Create a handler for the DIE at OFFSET whose compilation unit is + // described by CU_CONTEXT, and whose immediate context is described + // by PARENT_CONTEXT. + GenericDIEHandler(CUContext *cu_context, DIEContext *parent_context, + uint64 offset) + : cu_context_(cu_context), + parent_context_(parent_context), + offset_(offset), + declaration_(false), + specification_(NULL) { } + + // Derived classes' ProcessAttributeUnsigned can defer to this to + // handle DW_AT_declaration, or simply not override it. + void ProcessAttributeUnsigned(enum DwarfAttribute attr, + enum DwarfForm form, + uint64 data); + + // Derived classes' ProcessAttributeReference can defer to this to + // handle DW_AT_specification, or simply not override it. + void ProcessAttributeReference(enum DwarfAttribute attr, + enum DwarfForm form, + uint64 data); + + // Derived classes' ProcessAttributeReference can defer to this to + // handle DW_AT_specification, or simply not override it. + void ProcessAttributeString(enum DwarfAttribute attr, + enum DwarfForm form, + const string &data); + + protected: + // Compute and return the fully-qualified name of the DIE. If this + // DIE is a declaration DIE, to be cited by other DIEs' + // DW_AT_specification attributes, record its enclosing name and + // unqualified name in the specification table. + // + // Use this from EndAttributes member functions, not ProcessAttribute* + // functions; only the former can be sure that all the DIE's attributes + // have been seen. + string ComputeQualifiedName(); + + CUContext *cu_context_; + DIEContext *parent_context_; + uint64 offset_; + + // Place the name in the global set of strings. Even though this looks + // like a copy, all the major std::string implementations use reference + // counting internally, so the effect is to have all the data structures + // share copies of strings whenever possible. + // FIXME: Should this return something like a string_ref to avoid the + // assumption about how strings are implemented? + string AddStringToPool(const string &str); + + // If this DIE has a DW_AT_declaration attribute, this is its value. + // It is false on DIEs with no DW_AT_declaration attribute. + bool declaration_; + + // If this DIE has a DW_AT_specification attribute, this is the + // Specification structure for the DIE the attribute refers to. + // Otherwise, this is NULL. + Specification *specification_; + + // The value of the DW_AT_name attribute, or the empty string if the + // DIE has no such attribute. + string name_attribute_; + + // The demangled value of the DW_AT_MIPS_linkage_name attribute, or the empty + // string if the DIE has no such attribute or its content could not be + // demangled. + string demangled_name_; +}; + +void DwarfCUToModule::GenericDIEHandler::ProcessAttributeUnsigned( + enum DwarfAttribute attr, + enum DwarfForm form, + uint64 data) { + switch (attr) { + case dwarf2reader::DW_AT_declaration: declaration_ = (data != 0); break; + default: break; + } +} + +void DwarfCUToModule::GenericDIEHandler::ProcessAttributeReference( + enum DwarfAttribute attr, + enum DwarfForm form, + uint64 data) { + switch (attr) { + case dwarf2reader::DW_AT_specification: { + FileContext *file_context = cu_context_->file_context; + if (file_context->IsUnhandledInterCUReference( + data, cu_context_->reporter->cu_offset())) { + cu_context_->reporter->UnhandledInterCUReference(offset_, data); + break; + } + // Find the Specification to which this attribute refers, and + // set specification_ appropriately. We could do more processing + // here, but it's better to leave the real work to our + // EndAttribute member function, at which point we know we have + // seen all the DIE's attributes. + SpecificationByOffset *specifications = + &file_context->file_private_->specifications; + SpecificationByOffset::iterator spec = specifications->find(data); + if (spec != specifications->end()) { + specification_ = &spec->second; + } else { + // Technically, there's no reason a DW_AT_specification + // couldn't be a forward reference, but supporting that would + // be a lot of work (changing to a two-pass structure), and I + // don't think any producers we care about ever emit such + // things. + cu_context_->reporter->UnknownSpecification(offset_, data); + } + break; + } + default: break; + } +} + +string DwarfCUToModule::GenericDIEHandler::AddStringToPool(const string &str) { + pair::iterator, bool> result = + cu_context_->file_context->file_private_->common_strings.insert(str); + return *result.first; +} + +void DwarfCUToModule::GenericDIEHandler::ProcessAttributeString( + enum DwarfAttribute attr, + enum DwarfForm form, + const string &data) { + switch (attr) { + case dwarf2reader::DW_AT_name: + name_attribute_ = AddStringToPool(data); + break; + case dwarf2reader::DW_AT_MIPS_linkage_name: { + char* demangled = NULL; + int status = -1; +#if !defined(__ANDROID__) // Android NDK doesn't provide abi::__cxa_demangle. + demangled = abi::__cxa_demangle(data.c_str(), NULL, NULL, &status); +#endif + if (status != 0) { + cu_context_->reporter->DemangleError(data, status); + demangled_name_ = ""; + break; + } + if (demangled) { + demangled_name_ = AddStringToPool(demangled); + free(reinterpret_cast(demangled)); + } + break; + } + default: break; + } +} + +string DwarfCUToModule::GenericDIEHandler::ComputeQualifiedName() { + // Use the demangled name, if one is available. Demangled names are + // preferable to those inferred from the DWARF structure because they + // include argument types. + const string *qualified_name = NULL; + if (!demangled_name_.empty()) { + // Found it is this DIE. + qualified_name = &demangled_name_; + } else if (specification_ && !specification_->qualified_name.empty()) { + // Found it on the specification. + qualified_name = &specification_->qualified_name; + } + + const string *unqualified_name; + const string *enclosing_name; + if (!qualified_name) { + // Find our unqualified name. If the DIE has its own DW_AT_name + // attribute, then use that; otherwise, check our specification. + if (name_attribute_.empty() && specification_) + unqualified_name = &specification_->unqualified_name; + else + unqualified_name = &name_attribute_; + + // Find the name of our enclosing context. If we have a + // specification, it's the specification's enclosing context that + // counts; otherwise, use this DIE's context. + if (specification_) + enclosing_name = &specification_->enclosing_name; + else + enclosing_name = &parent_context_->name; + } + + // Prepare the return value before upcoming mutations possibly invalidate the + // existing pointers. + string return_value; + if (qualified_name) { + return_value = *qualified_name; + } else { + // Combine the enclosing name and unqualified name to produce our + // own fully-qualified name. + return_value = cu_context_->language->MakeQualifiedName(*enclosing_name, + *unqualified_name); + } + + // If this DIE was marked as a declaration, record its names in the + // specification table. + if (declaration_) { + Specification spec; + if (qualified_name) { + spec.qualified_name = *qualified_name; + } else { + spec.enclosing_name = *enclosing_name; + spec.unqualified_name = *unqualified_name; + } + cu_context_->file_context->file_private_->specifications[offset_] = spec; + } + + return return_value; +} + +// A handler class for DW_TAG_subprogram DIEs. +class DwarfCUToModule::FuncHandler: public GenericDIEHandler { + public: + FuncHandler(CUContext *cu_context, DIEContext *parent_context, + uint64 offset) + : GenericDIEHandler(cu_context, parent_context, offset), + low_pc_(0), high_pc_(0), high_pc_form_(dwarf2reader::DW_FORM_addr), + abstract_origin_(NULL), inline_(false) { } + void ProcessAttributeUnsigned(enum DwarfAttribute attr, + enum DwarfForm form, + uint64 data); + void ProcessAttributeSigned(enum DwarfAttribute attr, + enum DwarfForm form, + int64 data); + void ProcessAttributeReference(enum DwarfAttribute attr, + enum DwarfForm form, + uint64 data); + + bool EndAttributes(); + void Finish(); + + private: + // The fully-qualified name, as derived from name_attribute_, + // specification_, parent_context_. Computed in EndAttributes. + string name_; + uint64 low_pc_, high_pc_; // DW_AT_low_pc, DW_AT_high_pc + DwarfForm high_pc_form_; // DW_AT_high_pc can be length or address. + const AbstractOrigin* abstract_origin_; + bool inline_; +}; + +void DwarfCUToModule::FuncHandler::ProcessAttributeUnsigned( + enum DwarfAttribute attr, + enum DwarfForm form, + uint64 data) { + switch (attr) { + // If this attribute is present at all --- even if its value is + // DW_INL_not_inlined --- then GCC may cite it as someone else's + // DW_AT_abstract_origin attribute. + case dwarf2reader::DW_AT_inline: inline_ = true; break; + + case dwarf2reader::DW_AT_low_pc: low_pc_ = data; break; + case dwarf2reader::DW_AT_high_pc: + high_pc_form_ = form; + high_pc_ = data; + break; + + default: + GenericDIEHandler::ProcessAttributeUnsigned(attr, form, data); + break; + } +} + +void DwarfCUToModule::FuncHandler::ProcessAttributeSigned( + enum DwarfAttribute attr, + enum DwarfForm form, + int64 data) { + switch (attr) { + // If this attribute is present at all --- even if its value is + // DW_INL_not_inlined --- then GCC may cite it as someone else's + // DW_AT_abstract_origin attribute. + case dwarf2reader::DW_AT_inline: inline_ = true; break; + + default: + break; + } +} + +void DwarfCUToModule::FuncHandler::ProcessAttributeReference( + enum DwarfAttribute attr, + enum DwarfForm form, + uint64 data) { + switch (attr) { + case dwarf2reader::DW_AT_abstract_origin: { + const AbstractOriginByOffset& origins = + cu_context_->file_context->file_private_->origins; + AbstractOriginByOffset::const_iterator origin = origins.find(data); + if (origin != origins.end()) { + abstract_origin_ = &(origin->second); + } else { + cu_context_->reporter->UnknownAbstractOrigin(offset_, data); + } + break; + } + default: + GenericDIEHandler::ProcessAttributeReference(attr, form, data); + break; + } +} + +bool DwarfCUToModule::FuncHandler::EndAttributes() { + // Compute our name, and record a specification, if appropriate. + name_ = ComputeQualifiedName(); + if (name_.empty() && abstract_origin_) { + name_ = abstract_origin_->name; + } + return true; +} + +void DwarfCUToModule::FuncHandler::Finish() { + // Make high_pc_ an address, if it isn't already. + if (high_pc_form_ != dwarf2reader::DW_FORM_addr) { + high_pc_ += low_pc_; + } + + // Did we collect the information we need? Not all DWARF function + // entries have low and high addresses (for example, inlined + // functions that were never used), but all the ones we're + // interested in cover a non-empty range of bytes. + if (low_pc_ < high_pc_) { + // Malformed DWARF may omit the name, but all Module::Functions must + // have names. + string name; + if (!name_.empty()) { + name = name_; + } else { + cu_context_->reporter->UnnamedFunction(offset_); + name = ""; + } + + // Create a Module::Function based on the data we've gathered, and + // add it to the functions_ list. + scoped_ptr func(new Module::Function(name, low_pc_)); + func->size = high_pc_ - low_pc_; + func->parameter_size = 0; + if (func->address) { + // If the function address is zero this is a sign that this function + // description is just empty debug data and should just be discarded. + cu_context_->functions.push_back(func.release()); + } + } else if (inline_) { + AbstractOrigin origin(name_); + cu_context_->file_context->file_private_->origins[offset_] = origin; + } +} + +// A handler for DIEs that contain functions and contribute a +// component to their names: namespaces, classes, etc. +class DwarfCUToModule::NamedScopeHandler: public GenericDIEHandler { + public: + NamedScopeHandler(CUContext *cu_context, DIEContext *parent_context, + uint64 offset) + : GenericDIEHandler(cu_context, parent_context, offset) { } + bool EndAttributes(); + DIEHandler *FindChildHandler(uint64 offset, enum DwarfTag tag); + + private: + DIEContext child_context_; // A context for our children. +}; + +bool DwarfCUToModule::NamedScopeHandler::EndAttributes() { + child_context_.name = ComputeQualifiedName(); + return true; +} + +dwarf2reader::DIEHandler *DwarfCUToModule::NamedScopeHandler::FindChildHandler( + uint64 offset, + enum DwarfTag tag) { + switch (tag) { + case dwarf2reader::DW_TAG_subprogram: + return new FuncHandler(cu_context_, &child_context_, offset); + case dwarf2reader::DW_TAG_namespace: + case dwarf2reader::DW_TAG_class_type: + case dwarf2reader::DW_TAG_structure_type: + case dwarf2reader::DW_TAG_union_type: + return new NamedScopeHandler(cu_context_, &child_context_, offset); + default: + return NULL; + } +} + +void DwarfCUToModule::WarningReporter::CUHeading() { + if (printed_cu_header_) + return; + fprintf(stderr, "%s: in compilation unit '%s' (offset 0x%llx):\n", + filename_.c_str(), cu_name_.c_str(), cu_offset_); + printed_cu_header_ = true; +} + +void DwarfCUToModule::WarningReporter::UnknownSpecification(uint64 offset, + uint64 target) { + CUHeading(); + fprintf(stderr, "%s: the DIE at offset 0x%llx has a DW_AT_specification" + " attribute referring to the die at offset 0x%llx, which either" + " was not marked as a declaration, or comes later in the file\n", + filename_.c_str(), offset, target); +} + +void DwarfCUToModule::WarningReporter::UnknownAbstractOrigin(uint64 offset, + uint64 target) { + CUHeading(); + fprintf(stderr, "%s: the DIE at offset 0x%llx has a DW_AT_abstract_origin" + " attribute referring to the die at offset 0x%llx, which either" + " was not marked as an inline, or comes later in the file\n", + filename_.c_str(), offset, target); +} + +void DwarfCUToModule::WarningReporter::MissingSection(const string &name) { + CUHeading(); + fprintf(stderr, "%s: warning: couldn't find DWARF '%s' section\n", + filename_.c_str(), name.c_str()); +} + +void DwarfCUToModule::WarningReporter::BadLineInfoOffset(uint64 offset) { + CUHeading(); + fprintf(stderr, "%s: warning: line number data offset beyond end" + " of '.debug_line' section\n", + filename_.c_str()); +} + +void DwarfCUToModule::WarningReporter::UncoveredHeading() { + if (printed_unpaired_header_) + return; + CUHeading(); + fprintf(stderr, "%s: warning: skipping unpaired lines/functions:\n", + filename_.c_str()); + printed_unpaired_header_ = true; +} + +void DwarfCUToModule::WarningReporter::UncoveredFunction( + const Module::Function &function) { + if (!uncovered_warnings_enabled_) + return; + UncoveredHeading(); + fprintf(stderr, " function%s: %s\n", + function.size == 0 ? " (zero-length)" : "", + function.name.c_str()); +} + +void DwarfCUToModule::WarningReporter::UncoveredLine(const Module::Line &line) { + if (!uncovered_warnings_enabled_) + return; + UncoveredHeading(); + fprintf(stderr, " line%s: %s:%d at 0x%" PRIx64 "\n", + (line.size == 0 ? " (zero-length)" : ""), + line.file->name.c_str(), line.number, line.address); +} + +void DwarfCUToModule::WarningReporter::UnnamedFunction(uint64 offset) { + CUHeading(); + fprintf(stderr, "%s: warning: function at offset 0x%llx has no name\n", + filename_.c_str(), offset); +} + +void DwarfCUToModule::WarningReporter::DemangleError( + const string &input, int error) { + CUHeading(); + fprintf(stderr, "%s: warning: failed to demangle %s with error %d\n", + filename_.c_str(), input.c_str(), error); +} + +void DwarfCUToModule::WarningReporter::UnhandledInterCUReference( + uint64 offset, uint64 target) { + CUHeading(); + fprintf(stderr, "%s: warning: the DIE at offset 0x%llx has a " + "DW_FORM_ref_addr attribute with an inter-CU reference to " + "0x%llx, but inter-CU reference handling is turned off.\n", + filename_.c_str(), offset, target); +} + +DwarfCUToModule::DwarfCUToModule(FileContext *file_context, + LineToModuleHandler *line_reader, + WarningReporter *reporter) + : line_reader_(line_reader), + cu_context_(new CUContext(file_context, reporter)), + child_context_(new DIEContext()), + has_source_line_info_(false) { +} + +DwarfCUToModule::~DwarfCUToModule() { +} + +void DwarfCUToModule::ProcessAttributeSigned(enum DwarfAttribute attr, + enum DwarfForm form, + int64 data) { + switch (attr) { + case dwarf2reader::DW_AT_language: // source language of this CU + SetLanguage(static_cast(data)); + break; + default: + break; + } +} + +void DwarfCUToModule::ProcessAttributeUnsigned(enum DwarfAttribute attr, + enum DwarfForm form, + uint64 data) { + switch (attr) { + case dwarf2reader::DW_AT_stmt_list: // Line number information. + has_source_line_info_ = true; + source_line_offset_ = data; + break; + case dwarf2reader::DW_AT_language: // source language of this CU + SetLanguage(static_cast(data)); + break; + default: + break; + } +} + +void DwarfCUToModule::ProcessAttributeString(enum DwarfAttribute attr, + enum DwarfForm form, + const string &data) { + switch (attr) { + case dwarf2reader::DW_AT_name: + cu_context_->reporter->SetCUName(data); + break; + case dwarf2reader::DW_AT_comp_dir: + line_reader_->StartCompilationUnit(data); + break; + default: + break; + } +} + +bool DwarfCUToModule::EndAttributes() { + return true; +} + +dwarf2reader::DIEHandler *DwarfCUToModule::FindChildHandler( + uint64 offset, + enum DwarfTag tag) { + switch (tag) { + case dwarf2reader::DW_TAG_subprogram: + return new FuncHandler(cu_context_.get(), child_context_.get(), offset); + case dwarf2reader::DW_TAG_namespace: + case dwarf2reader::DW_TAG_class_type: + case dwarf2reader::DW_TAG_structure_type: + case dwarf2reader::DW_TAG_union_type: + return new NamedScopeHandler(cu_context_.get(), child_context_.get(), + offset); + default: + return NULL; + } +} + +void DwarfCUToModule::SetLanguage(DwarfLanguage language) { + switch (language) { + case dwarf2reader::DW_LANG_Java: + cu_context_->language = Language::Java; + break; + + // DWARF has no generic language code for assembly language; this is + // what the GNU toolchain uses. + case dwarf2reader::DW_LANG_Mips_Assembler: + cu_context_->language = Language::Assembler; + break; + + // C++ covers so many cases that it probably has some way to cope + // with whatever the other languages throw at us. So make it the + // default. + // + // Objective C and Objective C++ seem to create entries for + // methods whose DW_AT_name values are already fully-qualified: + // "-[Classname method:]". These appear at the top level. + // + // DWARF data for C should never include namespaces or functions + // nested in struct types, but if it ever does, then C++'s + // notation is probably not a bad choice for that. + default: + case dwarf2reader::DW_LANG_ObjC: + case dwarf2reader::DW_LANG_ObjC_plus_plus: + case dwarf2reader::DW_LANG_C: + case dwarf2reader::DW_LANG_C89: + case dwarf2reader::DW_LANG_C99: + case dwarf2reader::DW_LANG_C_plus_plus: + cu_context_->language = Language::CPlusPlus; + break; + } +} + +void DwarfCUToModule::ReadSourceLines(uint64 offset) { + const dwarf2reader::SectionMap §ion_map + = cu_context_->file_context->section_map(); + dwarf2reader::SectionMap::const_iterator map_entry + = section_map.find(".debug_line"); + // Mac OS X puts DWARF data in sections whose names begin with "__" + // instead of ".". + if (map_entry == section_map.end()) + map_entry = section_map.find("__debug_line"); + if (map_entry == section_map.end()) { + cu_context_->reporter->MissingSection(".debug_line"); + return; + } + const char *section_start = map_entry->second.first; + uint64 section_length = map_entry->second.second; + if (offset >= section_length) { + cu_context_->reporter->BadLineInfoOffset(offset); + return; + } + line_reader_->ReadProgram(section_start + offset, section_length - offset, + cu_context_->file_context->module_, &lines_); +} + +namespace { +// Return true if ADDRESS falls within the range of ITEM. +template +inline bool within(const T &item, Module::Address address) { + // Because Module::Address is unsigned, and unsigned arithmetic + // wraps around, this will be false if ADDRESS falls before the + // start of ITEM, or if it falls after ITEM's end. + return address - item.address < item.size; +} +} + +void DwarfCUToModule::AssignLinesToFunctions() { + vector *functions = &cu_context_->functions; + WarningReporter *reporter = cu_context_->reporter; + + // This would be simpler if we assumed that source line entries + // don't cross function boundaries. However, there's no real reason + // to assume that (say) a series of function definitions on the same + // line wouldn't get coalesced into one line number entry. The + // DWARF spec certainly makes no such promises. + // + // So treat the functions and lines as peers, and take the trouble + // to compute their ranges' intersections precisely. In any case, + // the hair here is a constant factor for performance; the + // complexity from here on out is linear. + + // Put both our functions and lines in order by address. + std::sort(functions->begin(), functions->end(), + Module::Function::CompareByAddress); + std::sort(lines_.begin(), lines_.end(), Module::Line::CompareByAddress); + + // The last line that we used any piece of. We use this only for + // generating warnings. + const Module::Line *last_line_used = NULL; + + // The last function and line we warned about --- so we can avoid + // doing so more than once. + const Module::Function *last_function_cited = NULL; + const Module::Line *last_line_cited = NULL; + + // Make a single pass through both vectors from lower to higher + // addresses, populating each Function's lines vector with lines + // from our lines_ vector that fall within the function's address + // range. + vector::iterator func_it = functions->begin(); + vector::const_iterator line_it = lines_.begin(); + + Module::Address current; + + // Pointers to the referents of func_it and line_it, or NULL if the + // iterator is at the end of the sequence. + Module::Function *func; + const Module::Line *line; + + // Start current at the beginning of the first line or function, + // whichever is earlier. + if (func_it != functions->end() && line_it != lines_.end()) { + func = *func_it; + line = &*line_it; + current = std::min(func->address, line->address); + } else if (line_it != lines_.end()) { + func = NULL; + line = &*line_it; + current = line->address; + } else if (func_it != functions->end()) { + func = *func_it; + line = NULL; + current = (*func_it)->address; + } else { + return; + } + + while (func || line) { + // This loop has two invariants that hold at the top. + // + // First, at least one of the iterators is not at the end of its + // sequence, and those that are not refer to the earliest + // function or line that contains or starts after CURRENT. + // + // Note that every byte is in one of four states: it is covered + // or not covered by a function, and, independently, it is + // covered or not covered by a line. + // + // The second invariant is that CURRENT refers to a byte whose + // state is different from its predecessor, or it refers to the + // first byte in the address space. In other words, CURRENT is + // always the address of a transition. + // + // Note that, although each iteration advances CURRENT from one + // transition address to the next in each iteration, it might + // not advance the iterators. Suppose we have a function that + // starts with a line, has a gap, and then a second line, and + // suppose that we enter an iteration with CURRENT at the end of + // the first line. The next transition address is the start of + // the second line, after the gap, so the iteration should + // advance CURRENT to that point. At the head of that iteration, + // the invariants require that the line iterator be pointing at + // the second line. But this is also true at the head of the + // next. And clearly, the iteration must not change the function + // iterator. So neither iterator moves. + + // Assert the first invariant (see above). + assert(!func || current < func->address || within(*func, current)); + assert(!line || current < line->address || within(*line, current)); + + // The next transition after CURRENT. + Module::Address next_transition; + + // Figure out which state we're in, add lines or warn, and compute + // the next transition address. + if (func && current >= func->address) { + if (line && current >= line->address) { + // Covered by both a line and a function. + Module::Address func_left = func->size - (current - func->address); + Module::Address line_left = line->size - (current - line->address); + // This may overflow, but things work out. + next_transition = current + std::min(func_left, line_left); + Module::Line l = *line; + l.address = current; + l.size = next_transition - current; + func->lines.push_back(l); + last_line_used = line; + } else { + // Covered by a function, but no line. + if (func != last_function_cited) { + reporter->UncoveredFunction(*func); + last_function_cited = func; + } + if (line && within(*func, line->address)) + next_transition = line->address; + else + // If this overflows, we'll catch it below. + next_transition = func->address + func->size; + } + } else { + if (line && current >= line->address) { + // Covered by a line, but no function. + // + // If GCC emits padding after one function to align the start + // of the next, then it will attribute the padding + // instructions to the last source line of function (to reduce + // the size of the line number info), but omit it from the + // DW_AT_{low,high}_pc range given in .debug_info (since it + // costs nothing to be precise there). If we did use at least + // some of the line we're about to skip, and it ends at the + // start of the next function, then assume this is what + // happened, and don't warn. + if (line != last_line_cited + && !(func + && line == last_line_used + && func->address - line->address == line->size)) { + reporter->UncoveredLine(*line); + last_line_cited = line; + } + if (func && within(*line, func->address)) + next_transition = func->address; + else + // If this overflows, we'll catch it below. + next_transition = line->address + line->size; + } else { + // Covered by neither a function nor a line. By the invariant, + // both func and line begin after CURRENT. The next transition + // is the start of the next function or next line, whichever + // is earliest. + assert(func || line); + if (func && line) + next_transition = std::min(func->address, line->address); + else if (func) + next_transition = func->address; + else + next_transition = line->address; + } + } + + // If a function or line abuts the end of the address space, then + // next_transition may end up being zero, in which case we've completed + // our pass. Handle that here, instead of trying to deal with it in + // each place we compute next_transition. + if (!next_transition) + break; + + // Advance iterators as needed. If lines overlap or functions overlap, + // then we could go around more than once. We don't worry too much + // about what result we produce in that case, just as long as we don't + // hang or crash. + while (func_it != functions->end() + && next_transition >= (*func_it)->address + && !within(**func_it, next_transition)) + func_it++; + func = (func_it != functions->end()) ? *func_it : NULL; + while (line_it != lines_.end() + && next_transition >= line_it->address + && !within(*line_it, next_transition)) + line_it++; + line = (line_it != lines_.end()) ? &*line_it : NULL; + + // We must make progress. + assert(next_transition > current); + current = next_transition; + } +} + +void DwarfCUToModule::Finish() { + // Assembly language files have no function data, and that gives us + // no place to store our line numbers (even though the GNU toolchain + // will happily produce source line info for assembly language + // files). To avoid spurious warnings about lines we can't assign + // to functions, skip CUs in languages that lack functions. + if (!cu_context_->language->HasFunctions()) + return; + + // Read source line info, if we have any. + if (has_source_line_info_) + ReadSourceLines(source_line_offset_); + + vector *functions = &cu_context_->functions; + + // Dole out lines to the appropriate functions. + AssignLinesToFunctions(); + + // Add our functions, which now have source lines assigned to them, + // to module_. + cu_context_->file_context->module_->AddFunctions(functions->begin(), + functions->end()); + + // Ownership of the function objects has shifted from cu_context to + // the Module. + functions->clear(); + + cu_context_->file_context->ClearSpecifications(); +} + +bool DwarfCUToModule::StartCompilationUnit(uint64 offset, + uint8 address_size, + uint8 offset_size, + uint64 cu_length, + uint8 dwarf_version) { + return dwarf_version >= 2; +} + +bool DwarfCUToModule::StartRootDIE(uint64 offset, enum DwarfTag tag) { + // We don't deal with partial compilation units (the only other tag + // likely to be used for root DIE). + return tag == dwarf2reader::DW_TAG_compile_unit; +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/dwarf_cu_to_module.h b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf_cu_to_module.h new file mode 100644 index 0000000000..fd9c380d9d --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf_cu_to_module.h @@ -0,0 +1,318 @@ +// -*- mode: c++ -*- + +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// Add DWARF debugging information to a Breakpad symbol file. This +// file defines the DwarfCUToModule class, which accepts parsed DWARF +// data and populates a google_breakpad::Module with the results; the +// Module can then write its contents as a Breakpad symbol file. + +#ifndef COMMON_LINUX_DWARF_CU_TO_MODULE_H__ +#define COMMON_LINUX_DWARF_CU_TO_MODULE_H__ + +#include + +#include "common/language.h" +#include "common/module.h" +#include "common/dwarf/bytereader.h" +#include "common/dwarf/dwarf2diehandler.h" +#include "common/dwarf/dwarf2reader.h" +#include "common/scoped_ptr.h" +#include "common/using_std_string.h" + +namespace google_breakpad { + +using dwarf2reader::DwarfAttribute; +using dwarf2reader::DwarfForm; +using dwarf2reader::DwarfLanguage; +using dwarf2reader::DwarfTag; + +// Populate a google_breakpad::Module with DWARF debugging information. +// +// An instance of this class can be provided as a handler to a +// dwarf2reader::DIEDispatcher, which can in turn be a handler for a +// dwarf2reader::CompilationUnit DWARF parser. The handler uses the results +// of parsing to populate a google_breakpad::Module with source file, +// function, and source line information. +class DwarfCUToModule: public dwarf2reader::RootDIEHandler { + struct FilePrivate; + public: + // Information global to the DWARF-bearing file we are processing, + // for use by DwarfCUToModule. Each DwarfCUToModule instance deals + // with a single compilation unit within the file, but information + // global to the whole file is held here. The client is responsible + // for filling it in appropriately (except for the 'file_private' + // field, which the constructor and destructor take care of), and + // then providing it to the DwarfCUToModule instance for each + // compilation unit we process in that file. Set HANDLE_INTER_CU_REFS + // to true to handle debugging symbols with DW_FORM_ref_addr entries. + class FileContext { + public: + FileContext(const string &filename, + Module *module, + bool handle_inter_cu_refs); + ~FileContext(); + + // Add CONTENTS of size LENGTH to the section map as NAME. + void AddSectionToSectionMap(const string& name, + const char* contents, + uint64 length); + + // Clear the section map for testing. + void ClearSectionMapForTest(); + + const dwarf2reader::SectionMap& section_map() const; + + private: + friend class DwarfCUToModule; + + // Clears all the Specifications if HANDLE_INTER_CU_REFS_ is false. + void ClearSpecifications(); + + // Given an OFFSET and a CU that starts at COMPILATION_UNIT_START, returns + // true if this is an inter-compilation unit reference that is not being + // handled. + bool IsUnhandledInterCUReference(uint64 offset, + uint64 compilation_unit_start) const; + + // The name of this file, for use in error messages. + const string filename_; + + // A map of this file's sections, used for finding other DWARF + // sections that the .debug_info section may refer to. + dwarf2reader::SectionMap section_map_; + + // The Module to which we're contributing definitions. + Module *module_; + + // True if we are handling references between compilation units. + const bool handle_inter_cu_refs_; + + // Inter-compilation unit data used internally by the handlers. + scoped_ptr file_private_; + }; + + // An abstract base class for handlers that handle DWARF line data + // for DwarfCUToModule. DwarfCUToModule could certainly just use + // dwarf2reader::LineInfo itself directly, but decoupling things + // this way makes unit testing a little easier. + class LineToModuleHandler { + public: + LineToModuleHandler() { } + virtual ~LineToModuleHandler() { } + + // Called at the beginning of a new compilation unit, prior to calling + // ReadProgram(). compilation_dir will indicate the path that the + // current compilation unit was compiled in, consistent with the + // DW_AT_comp_dir DIE. + virtual void StartCompilationUnit(const string& compilation_dir) = 0; + + // Populate MODULE and LINES with source file names and code/line + // mappings, given a pointer to some DWARF line number data + // PROGRAM, and an overestimate of its size. Add no zero-length + // lines to LINES. + virtual void ReadProgram(const char *program, uint64 length, + Module *module, vector *lines) = 0; + }; + + // The interface DwarfCUToModule uses to report warnings. The member + // function definitions for this class write messages to stderr, but + // you can override them if you'd like to detect or report these + // conditions yourself. + class WarningReporter { + public: + // Warn about problems in the DWARF file FILENAME, in the + // compilation unit at OFFSET. + WarningReporter(const string &filename, uint64 cu_offset) + : filename_(filename), cu_offset_(cu_offset), printed_cu_header_(false), + printed_unpaired_header_(false), + uncovered_warnings_enabled_(false) { } + virtual ~WarningReporter() { } + + // Set the name of the compilation unit we're processing to NAME. + virtual void SetCUName(const string &name) { cu_name_ = name; } + + // Accessor and setter for uncovered_warnings_enabled_. + // UncoveredFunction and UncoveredLine only report a problem if that is + // true. By default, these warnings are disabled, because those + // conditions occur occasionally in healthy code. + virtual bool uncovered_warnings_enabled() const { + return uncovered_warnings_enabled_; + } + virtual void set_uncovered_warnings_enabled(bool value) { + uncovered_warnings_enabled_ = value; + } + + // A DW_AT_specification in the DIE at OFFSET refers to a DIE we + // haven't processed yet, or that wasn't marked as a declaration, + // at TARGET. + virtual void UnknownSpecification(uint64 offset, uint64 target); + + // A DW_AT_abstract_origin in the DIE at OFFSET refers to a DIE we + // haven't processed yet, or that wasn't marked as inline, at TARGET. + virtual void UnknownAbstractOrigin(uint64 offset, uint64 target); + + // We were unable to find the DWARF section named SECTION_NAME. + virtual void MissingSection(const string §ion_name); + + // The CU's DW_AT_stmt_list offset OFFSET is bogus. + virtual void BadLineInfoOffset(uint64 offset); + + // FUNCTION includes code covered by no line number data. + virtual void UncoveredFunction(const Module::Function &function); + + // Line number NUMBER in LINE_FILE, of length LENGTH, includes code + // covered by no function. + virtual void UncoveredLine(const Module::Line &line); + + // The DW_TAG_subprogram DIE at OFFSET has no name specified directly + // in the DIE, nor via a DW_AT_specification or DW_AT_abstract_origin + // link. + virtual void UnnamedFunction(uint64 offset); + + // __cxa_demangle() failed to demangle INPUT. + virtual void DemangleError(const string &input, int error); + + // The DW_FORM_ref_addr at OFFSET to TARGET was not handled because + // FilePrivate did not retain the inter-CU specification data. + virtual void UnhandledInterCUReference(uint64 offset, uint64 target); + + uint64 cu_offset() const { + return cu_offset_; + } + + protected: + const string filename_; + const uint64 cu_offset_; + string cu_name_; + bool printed_cu_header_; + bool printed_unpaired_header_; + bool uncovered_warnings_enabled_; + + private: + // Print a per-CU heading, once. + void CUHeading(); + // Print an unpaired function/line heading, once. + void UncoveredHeading(); + }; + + // Create a DWARF debugging info handler for a compilation unit + // within FILE_CONTEXT. This uses information received from the + // dwarf2reader::CompilationUnit DWARF parser to populate + // FILE_CONTEXT->module. Use LINE_READER to handle the compilation + // unit's line number data. Use REPORTER to report problems with the + // data we find. + DwarfCUToModule(FileContext *file_context, + LineToModuleHandler *line_reader, + WarningReporter *reporter); + ~DwarfCUToModule(); + + void ProcessAttributeSigned(enum DwarfAttribute attr, + enum DwarfForm form, + int64 data); + void ProcessAttributeUnsigned(enum DwarfAttribute attr, + enum DwarfForm form, + uint64 data); + void ProcessAttributeString(enum DwarfAttribute attr, + enum DwarfForm form, + const string &data); + bool EndAttributes(); + DIEHandler *FindChildHandler(uint64 offset, enum DwarfTag tag); + + // Assign all our source Lines to the Functions that cover their + // addresses, and then add them to module_. + void Finish(); + + bool StartCompilationUnit(uint64 offset, uint8 address_size, + uint8 offset_size, uint64 cu_length, + uint8 dwarf_version); + bool StartRootDIE(uint64 offset, enum DwarfTag tag); + + private: + // Used internally by the handler. Full definitions are in + // dwarf_cu_to_module.cc. + struct CUContext; + struct DIEContext; + struct Specification; + class GenericDIEHandler; + class FuncHandler; + class NamedScopeHandler; + + // A map from section offsets to specifications. + typedef map SpecificationByOffset; + + // Set this compilation unit's source language to LANGUAGE. + void SetLanguage(DwarfLanguage language); + + // Read source line information at OFFSET in the .debug_line + // section. Record source files in module_, but record source lines + // in lines_; we apportion them to functions in + // AssignLinesToFunctions. + void ReadSourceLines(uint64 offset); + + // Assign the lines in lines_ to the individual line lists of the + // functions in functions_. (DWARF line information maps an entire + // compilation unit at a time, and gives no indication of which + // lines belong to which functions, beyond their addresses.) + void AssignLinesToFunctions(); + + // The only reason cu_context_ and child_context_ are pointers is + // that we want to keep their definitions private to + // dwarf_cu_to_module.cc, instead of listing them all here. They are + // owned by this DwarfCUToModule: the constructor sets them, and the + // destructor deletes them. + + // The handler to use to handle line number data. + LineToModuleHandler *line_reader_; + + // This compilation unit's context. + scoped_ptr cu_context_; + + // A context for our children. + scoped_ptr child_context_; + + // True if this compilation unit has source line information. + bool has_source_line_info_; + + // The offset of this compilation unit's line number information in + // the .debug_line section. + uint64 source_line_offset_; + + // The line numbers we have seen thus far. We accumulate these here + // during parsing. Then, in Finish, we call AssignLinesToFunctions + // to dole them out to the appropriate functions. + vector lines_; +}; + +} // namespace google_breakpad + +#endif // COMMON_LINUX_DWARF_CU_TO_MODULE_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/dwarf_cu_to_module_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf_cu_to_module_unittest.cc new file mode 100644 index 0000000000..9ff842c882 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf_cu_to_module_unittest.cc @@ -0,0 +1,1779 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// dwarf_cu_to_module.cc: Unit tests for google_breakpad::DwarfCUToModule. + +#include +#include +#include + +#include "breakpad_googletest_includes.h" +#include "common/dwarf_cu_to_module.h" +#include "common/using_std_string.h" + +using std::make_pair; +using std::vector; + +using dwarf2reader::DIEHandler; +using dwarf2reader::DwarfTag; +using dwarf2reader::DwarfAttribute; +using dwarf2reader::DwarfForm; +using dwarf2reader::DwarfInline; +using dwarf2reader::RootDIEHandler; +using google_breakpad::DwarfCUToModule; +using google_breakpad::Module; + +using ::testing::_; +using ::testing::AtMost; +using ::testing::Invoke; +using ::testing::Return; +using ::testing::Test; +using ::testing::TestWithParam; +using ::testing::Values; +using ::testing::ValuesIn; + +// Mock classes. + +class MockLineToModuleHandler: public DwarfCUToModule::LineToModuleHandler { + public: + MOCK_METHOD1(StartCompilationUnit, void(const string& compilation_dir)); + MOCK_METHOD4(ReadProgram, void(const char* program, uint64 length, + Module *module, vector *lines)); +}; + +class MockWarningReporter: public DwarfCUToModule::WarningReporter { + public: + MockWarningReporter(const string &filename, uint64 cu_offset) + : DwarfCUToModule::WarningReporter(filename, cu_offset) { } + MOCK_METHOD1(SetCUName, void(const string &name)); + MOCK_METHOD2(UnknownSpecification, void(uint64 offset, uint64 target)); + MOCK_METHOD2(UnknownAbstractOrigin, void(uint64 offset, uint64 target)); + MOCK_METHOD1(MissingSection, void(const string §ion_name)); + MOCK_METHOD1(BadLineInfoOffset, void(uint64 offset)); + MOCK_METHOD1(UncoveredFunction, void(const Module::Function &function)); + MOCK_METHOD1(UncoveredLine, void(const Module::Line &line)); + MOCK_METHOD1(UnnamedFunction, void(uint64 offset)); + MOCK_METHOD2(DemangleError, void(const string &input, int error)); + MOCK_METHOD2(UnhandledInterCUReference, void(uint64 offset, uint64 target)); +}; + +// A fixture class including all the objects needed to handle a +// compilation unit, and their entourage. It includes member functions +// for doing common kinds of setup and tests. +class CUFixtureBase { + public: + // If we have: + // + // vector lines; + // AppendLinesFunctor appender(lines); + // + // then doing: + // + // appender(line_program, length, module, line_vector); + // + // will append lines to the end of line_vector. We can use this with + // MockLineToModuleHandler like this: + // + // MockLineToModuleHandler l2m; + // EXPECT_CALL(l2m, ReadProgram(_,_,_,_)) + // .WillOnce(DoAll(Invoke(appender), Return())); + // + // in which case calling l2m with some line vector will append lines. + class AppendLinesFunctor { + public: + explicit AppendLinesFunctor( + const vector *lines) : lines_(lines) { } + void operator()(const char *program, uint64 length, + Module *module, vector *lines) { + lines->insert(lines->end(), lines_->begin(), lines_->end()); + } + private: + const vector *lines_; + }; + + CUFixtureBase() + : module_("module-name", "module-os", "module-arch", "module-id"), + file_context_("dwarf-filename", &module_, true), + language_(dwarf2reader::DW_LANG_none), + language_signed_(false), + appender_(&lines_), + reporter_("dwarf-filename", 0xcf8f9bb6443d29b5LL), + root_handler_(&file_context_, &line_reader_, &reporter_), + functions_filled_(false) { + // By default, expect no warnings to be reported, and expect the + // compilation unit's name to be provided. The test can override + // these expectations. + EXPECT_CALL(reporter_, SetCUName("compilation-unit-name")).Times(1); + EXPECT_CALL(reporter_, UnknownSpecification(_, _)).Times(0); + EXPECT_CALL(reporter_, UnknownAbstractOrigin(_, _)).Times(0); + EXPECT_CALL(reporter_, MissingSection(_)).Times(0); + EXPECT_CALL(reporter_, BadLineInfoOffset(_)).Times(0); + EXPECT_CALL(reporter_, UncoveredFunction(_)).Times(0); + EXPECT_CALL(reporter_, UncoveredLine(_)).Times(0); + EXPECT_CALL(reporter_, UnnamedFunction(_)).Times(0); + EXPECT_CALL(reporter_, UnhandledInterCUReference(_, _)).Times(0); + + // By default, expect the line program reader not to be invoked. We + // may override this in StartCU. + EXPECT_CALL(line_reader_, StartCompilationUnit(_)).Times(0); + EXPECT_CALL(line_reader_, ReadProgram(_,_,_,_)).Times(0); + + // The handler will consult this section map to decide what to + // pass to our line reader. + file_context_.AddSectionToSectionMap(".debug_line", + dummy_line_program_, + dummy_line_size_); + } + + // Add a line with the given address, size, filename, and line + // number to the end of the statement list the handler will receive + // when it invokes its LineToModuleHandler. Call this before calling + // StartCU. + void PushLine(Module::Address address, Module::Address size, + const string &filename, int line_number); + + // Use LANGUAGE for the compilation unit. More precisely, arrange + // for StartCU to pass the compilation unit's root DIE a + // DW_AT_language attribute whose value is LANGUAGE. + void SetLanguage(dwarf2reader::DwarfLanguage language) { + language_ = language; + } + + // If SIGNED true, have StartCU report DW_AT_language as a signed + // attribute; if false, have it report it as unsigned. + void SetLanguageSigned(bool is_signed) { language_signed_ = is_signed; } + + // Call the handler this.root_handler_'s StartCompilationUnit and + // StartRootDIE member functions, passing it appropriate attributes as + // determined by prior calls to PushLine and SetLanguage. Leave + // this.root_handler_ ready to hear about children: call + // this.root_handler_.EndAttributes, but not this.root_handler_.Finish. + void StartCU(); + + // Have HANDLER process some strange attribute/form/value triples. + void ProcessStrangeAttributes(dwarf2reader::DIEHandler *handler); + + // Start a child DIE of PARENT with the given tag and name. Leave + // the handler ready to hear about children: call EndAttributes, but + // not Finish. + DIEHandler *StartNamedDIE(DIEHandler *parent, DwarfTag tag, + const string &name); + + // Start a child DIE of PARENT with the given tag and a + // DW_AT_specification attribute whose value is SPECIFICATION. Leave + // the handler ready to hear about children: call EndAttributes, but + // not Finish. If NAME is non-zero, use it as the DW_AT_name + // attribute. + DIEHandler *StartSpecifiedDIE(DIEHandler *parent, DwarfTag tag, + uint64 specification, const char *name = NULL); + + // Define a function as a child of PARENT with the given name, address, and + // size. If high_pc_form is DW_FORM_addr then the DW_AT_high_pc attribute + // will be written as an address; otherwise it will be written as the + // function's size. Call EndAttributes and Finish; one cannot define + // children of the defined function's DIE. + void DefineFunction(DIEHandler *parent, const string &name, + Module::Address address, Module::Address size, + const char* mangled_name, + DwarfForm high_pc_form = dwarf2reader::DW_FORM_addr); + + // Create a declaration DIE as a child of PARENT with the given + // offset, tag and name. If NAME is the empty string, don't provide + // a DW_AT_name attribute. Call EndAttributes and Finish. + void DeclarationDIE(DIEHandler *parent, uint64 offset, + DwarfTag tag, const string &name, + const string &mangled_name); + + // Create a definition DIE as a child of PARENT with the given tag + // that refers to the declaration DIE at offset SPECIFICATION as its + // specification. If NAME is non-empty, pass it as the DW_AT_name + // attribute. If SIZE is non-zero, record ADDRESS and SIZE as + // low_pc/high_pc attributes. + void DefinitionDIE(DIEHandler *parent, DwarfTag tag, + uint64 specification, const string &name, + Module::Address address = 0, Module::Address size = 0); + + // Create an inline DW_TAG_subprogram DIE as a child of PARENT. If + // SPECIFICATION is non-zero, then the DIE refers to the declaration DIE at + // offset SPECIFICATION as its specification. If Name is non-empty, pass it + // as the DW_AT_name attribute. + void AbstractInstanceDIE(DIEHandler *parent, uint64 offset, + DwarfInline type, uint64 specification, + const string &name, + DwarfForm form = dwarf2reader::DW_FORM_data1); + + // Create a DW_TAG_subprogram DIE as a child of PARENT that refers to + // ORIGIN in its DW_AT_abstract_origin attribute. If NAME is the empty + // string, don't provide a DW_AT_name attribute. + void DefineInlineInstanceDIE(DIEHandler *parent, const string &name, + uint64 origin, Module::Address address, + Module::Address size); + + // The following Test* functions should be called after calling + // this.root_handler_.Finish. After that point, no further calls + // should be made on the handler. + + // Test that the number of functions defined in the module this.module_ is + // equal to EXPECTED. + void TestFunctionCount(size_t expected); + + // Test that the I'th function (ordered by address) in the module + // this.module_ has the given name, address, and size, and that its + // parameter size is zero. + void TestFunction(int i, const string &name, + Module::Address address, Module::Address size); + + // Test that the number of source lines owned by the I'th function + // in the module this.module_ is equal to EXPECTED. + void TestLineCount(int i, size_t expected); + + // Test that the J'th line (ordered by address) of the I'th function + // (again, by address) has the given address, size, filename, and + // line number. + void TestLine(int i, int j, Module::Address address, Module::Address size, + const string &filename, int number); + + // Actual objects under test. + Module module_; + DwarfCUToModule::FileContext file_context_; + + // If this is not DW_LANG_none, we'll pass it as a DW_AT_language + // attribute to the compilation unit. This defaults to DW_LANG_none. + dwarf2reader::DwarfLanguage language_; + + // If this is true, report DW_AT_language as a signed value; if false, + // report it as an unsigned value. + bool language_signed_; + + // If this is not empty, we'll give the CU a DW_AT_comp_dir attribute that + // indicates the path that this compilation unit was compiled in. + string compilation_dir_; + + // If this is not empty, we'll give the CU a DW_AT_stmt_list + // attribute that, when passed to line_reader_, adds these lines to the + // provided lines array. + vector lines_; + + // Mock line program reader. + MockLineToModuleHandler line_reader_; + AppendLinesFunctor appender_; + static const char dummy_line_program_[]; + static const size_t dummy_line_size_; + + MockWarningReporter reporter_; + DwarfCUToModule root_handler_; + + private: + // Fill functions_, if we haven't already. + void FillFunctions(); + + // If functions_filled_ is true, this is a table of functions we've + // extracted from module_, sorted by address. + vector functions_; + // True if we have filled the above vector with this.module_'s function list. + bool functions_filled_; +}; + +const char CUFixtureBase::dummy_line_program_[] = "lots of fun data"; +const size_t CUFixtureBase::dummy_line_size_ = + sizeof(CUFixtureBase::dummy_line_program_); + +void CUFixtureBase::PushLine(Module::Address address, Module::Address size, + const string &filename, int line_number) { + Module::Line l; + l.address = address; + l.size = size; + l.file = module_.FindFile(filename); + l.number = line_number; + lines_.push_back(l); +} + +void CUFixtureBase::StartCU() { + if (!compilation_dir_.empty()) + EXPECT_CALL(line_reader_, + StartCompilationUnit(compilation_dir_)).Times(1); + + // If we have lines, make the line reader expect to be invoked at + // most once. (Hey, if the handler can pass its tests without + // bothering to read the line number data, that's great.) + // Have it add the lines passed to PushLine. Otherwise, leave the + // initial expectation (no calls) in force. + if (!lines_.empty()) + EXPECT_CALL(line_reader_, + ReadProgram(&dummy_line_program_[0], dummy_line_size_, + &module_, _)) + .Times(AtMost(1)) + .WillOnce(DoAll(Invoke(appender_), Return())); + + ASSERT_TRUE(root_handler_ + .StartCompilationUnit(0x51182ec307610b51ULL, 0x81, 0x44, + 0x4241b4f33720dd5cULL, 3)); + { + ASSERT_TRUE(root_handler_.StartRootDIE(0x02e56bfbda9e7337ULL, + dwarf2reader::DW_TAG_compile_unit)); + } + root_handler_.ProcessAttributeString(dwarf2reader::DW_AT_name, + dwarf2reader::DW_FORM_strp, + "compilation-unit-name"); + if (!compilation_dir_.empty()) + root_handler_.ProcessAttributeString(dwarf2reader::DW_AT_comp_dir, + dwarf2reader::DW_FORM_strp, + compilation_dir_); + if (!lines_.empty()) + root_handler_.ProcessAttributeUnsigned(dwarf2reader::DW_AT_stmt_list, + dwarf2reader::DW_FORM_ref4, + 0); + if (language_ != dwarf2reader::DW_LANG_none) { + if (language_signed_) + root_handler_.ProcessAttributeSigned(dwarf2reader::DW_AT_language, + dwarf2reader::DW_FORM_sdata, + language_); + else + root_handler_.ProcessAttributeUnsigned(dwarf2reader::DW_AT_language, + dwarf2reader::DW_FORM_udata, + language_); + } + ASSERT_TRUE(root_handler_.EndAttributes()); +} + +void CUFixtureBase::ProcessStrangeAttributes( + dwarf2reader::DIEHandler *handler) { + handler->ProcessAttributeUnsigned((DwarfAttribute) 0xf560dead, + (DwarfForm) 0x4106e4db, + 0xa592571997facda1ULL); + handler->ProcessAttributeSigned((DwarfAttribute) 0x85380095, + (DwarfForm) 0x0f16fe87, + 0x12602a4e3bf1f446LL); + handler->ProcessAttributeReference((DwarfAttribute) 0xf7f7480f, + (DwarfForm) 0x829e038a, + 0x50fddef44734fdecULL); + static const char buffer[10] = "frobynode"; + handler->ProcessAttributeBuffer((DwarfAttribute) 0xa55ffb51, + (DwarfForm) 0x2f43b041, + buffer, sizeof(buffer)); + handler->ProcessAttributeString((DwarfAttribute) 0x2f43b041, + (DwarfForm) 0x895ffa23, + "strange string"); +} + +DIEHandler *CUFixtureBase::StartNamedDIE(DIEHandler *parent, + DwarfTag tag, + const string &name) { + dwarf2reader::DIEHandler *handler + = parent->FindChildHandler(0x8f4c783c0467c989ULL, tag); + if (!handler) + return NULL; + handler->ProcessAttributeString(dwarf2reader::DW_AT_name, + dwarf2reader::DW_FORM_strp, + name); + ProcessStrangeAttributes(handler); + if (!handler->EndAttributes()) { + handler->Finish(); + delete handler; + return NULL; + } + + return handler; +} + +DIEHandler *CUFixtureBase::StartSpecifiedDIE(DIEHandler *parent, + DwarfTag tag, + uint64 specification, + const char *name) { + dwarf2reader::DIEHandler *handler + = parent->FindChildHandler(0x8f4c783c0467c989ULL, tag); + if (!handler) + return NULL; + if (name) + handler->ProcessAttributeString(dwarf2reader::DW_AT_name, + dwarf2reader::DW_FORM_strp, + name); + handler->ProcessAttributeReference(dwarf2reader::DW_AT_specification, + dwarf2reader::DW_FORM_ref4, + specification); + if (!handler->EndAttributes()) { + handler->Finish(); + delete handler; + return NULL; + } + + return handler; +} + +void CUFixtureBase::DefineFunction(dwarf2reader::DIEHandler *parent, + const string &name, Module::Address address, + Module::Address size, + const char* mangled_name, + DwarfForm high_pc_form) { + dwarf2reader::DIEHandler *func + = parent->FindChildHandler(0xe34797c7e68590a8LL, + dwarf2reader::DW_TAG_subprogram); + ASSERT_TRUE(func != NULL); + func->ProcessAttributeString(dwarf2reader::DW_AT_name, + dwarf2reader::DW_FORM_strp, + name); + func->ProcessAttributeUnsigned(dwarf2reader::DW_AT_low_pc, + dwarf2reader::DW_FORM_addr, + address); + + Module::Address high_pc = size; + if (high_pc_form == dwarf2reader::DW_FORM_addr) { + high_pc += address; + } + func->ProcessAttributeUnsigned(dwarf2reader::DW_AT_high_pc, + high_pc_form, + high_pc); + + if (mangled_name) + func->ProcessAttributeString(dwarf2reader::DW_AT_MIPS_linkage_name, + dwarf2reader::DW_FORM_strp, + mangled_name); + + ProcessStrangeAttributes(func); + EXPECT_TRUE(func->EndAttributes()); + func->Finish(); + delete func; +} + +void CUFixtureBase::DeclarationDIE(DIEHandler *parent, uint64 offset, + DwarfTag tag, + const string &name, + const string &mangled_name) { + dwarf2reader::DIEHandler *die = parent->FindChildHandler(offset, tag); + ASSERT_TRUE(die != NULL); + if (!name.empty()) + die->ProcessAttributeString(dwarf2reader::DW_AT_name, + dwarf2reader::DW_FORM_strp, + name); + if (!mangled_name.empty()) + die->ProcessAttributeString(dwarf2reader::DW_AT_MIPS_linkage_name, + dwarf2reader::DW_FORM_strp, + mangled_name); + + die->ProcessAttributeUnsigned(dwarf2reader::DW_AT_declaration, + dwarf2reader::DW_FORM_flag, + 1); + EXPECT_TRUE(die->EndAttributes()); + die->Finish(); + delete die; +} + +void CUFixtureBase::DefinitionDIE(DIEHandler *parent, + DwarfTag tag, + uint64 specification, + const string &name, + Module::Address address, + Module::Address size) { + dwarf2reader::DIEHandler *die + = parent->FindChildHandler(0x6ccfea031a9e6cc9ULL, tag); + ASSERT_TRUE(die != NULL); + die->ProcessAttributeReference(dwarf2reader::DW_AT_specification, + dwarf2reader::DW_FORM_ref4, + specification); + if (!name.empty()) + die->ProcessAttributeString(dwarf2reader::DW_AT_name, + dwarf2reader::DW_FORM_strp, + name); + if (size) { + die->ProcessAttributeUnsigned(dwarf2reader::DW_AT_low_pc, + dwarf2reader::DW_FORM_addr, + address); + die->ProcessAttributeUnsigned(dwarf2reader::DW_AT_high_pc, + dwarf2reader::DW_FORM_addr, + address + size); + } + EXPECT_TRUE(die->EndAttributes()); + die->Finish(); + delete die; +} + +void CUFixtureBase::AbstractInstanceDIE(DIEHandler *parent, + uint64 offset, + DwarfInline type, + uint64 specification, + const string &name, + DwarfForm form) { + dwarf2reader::DIEHandler *die + = parent->FindChildHandler(offset, dwarf2reader::DW_TAG_subprogram); + ASSERT_TRUE(die != NULL); + if (specification != 0ULL) + die->ProcessAttributeReference(dwarf2reader::DW_AT_specification, + dwarf2reader::DW_FORM_ref4, + specification); + if (form == dwarf2reader::DW_FORM_sdata) { + die->ProcessAttributeSigned(dwarf2reader::DW_AT_inline, form, type); + } else { + die->ProcessAttributeUnsigned(dwarf2reader::DW_AT_inline, form, type); + } + if (!name.empty()) + die->ProcessAttributeString(dwarf2reader::DW_AT_name, + dwarf2reader::DW_FORM_strp, + name); + + EXPECT_TRUE(die->EndAttributes()); + die->Finish(); + delete die; +} + +void CUFixtureBase::DefineInlineInstanceDIE(DIEHandler *parent, + const string &name, + uint64 origin, + Module::Address address, + Module::Address size) { + dwarf2reader::DIEHandler *func + = parent->FindChildHandler(0x11c70f94c6e87ccdLL, + dwarf2reader::DW_TAG_subprogram); + ASSERT_TRUE(func != NULL); + if (!name.empty()) { + func->ProcessAttributeString(dwarf2reader::DW_AT_name, + dwarf2reader::DW_FORM_strp, + name); + } + func->ProcessAttributeUnsigned(dwarf2reader::DW_AT_low_pc, + dwarf2reader::DW_FORM_addr, + address); + func->ProcessAttributeUnsigned(dwarf2reader::DW_AT_high_pc, + dwarf2reader::DW_FORM_addr, + address + size); + func->ProcessAttributeReference(dwarf2reader::DW_AT_abstract_origin, + dwarf2reader::DW_FORM_ref4, + origin); + ProcessStrangeAttributes(func); + EXPECT_TRUE(func->EndAttributes()); + func->Finish(); + delete func; +} + +void CUFixtureBase::FillFunctions() { + if (functions_filled_) + return; + module_.GetFunctions(&functions_, functions_.end()); + sort(functions_.begin(), functions_.end(), + Module::Function::CompareByAddress); + functions_filled_ = true; +} + +void CUFixtureBase::TestFunctionCount(size_t expected) { + FillFunctions(); + ASSERT_EQ(expected, functions_.size()); +} + +void CUFixtureBase::TestFunction(int i, const string &name, + Module::Address address, + Module::Address size) { + FillFunctions(); + ASSERT_LT((size_t) i, functions_.size()); + + Module::Function *function = functions_[i]; + EXPECT_EQ(name, function->name); + EXPECT_EQ(address, function->address); + EXPECT_EQ(size, function->size); + EXPECT_EQ(0U, function->parameter_size); +} + +void CUFixtureBase::TestLineCount(int i, size_t expected) { + FillFunctions(); + ASSERT_LT((size_t) i, functions_.size()); + + ASSERT_EQ(expected, functions_[i]->lines.size()); +} + +void CUFixtureBase::TestLine(int i, int j, + Module::Address address, Module::Address size, + const string &filename, int number) { + FillFunctions(); + ASSERT_LT((size_t) i, functions_.size()); + ASSERT_LT((size_t) j, functions_[i]->lines.size()); + + Module::Line *line = &functions_[i]->lines[j]; + EXPECT_EQ(address, line->address); + EXPECT_EQ(size, line->size); + EXPECT_EQ(filename, line->file->name.c_str()); + EXPECT_EQ(number, line->number); +} + +// Include caller locations for our test subroutines. +#define TRACE(call) do { SCOPED_TRACE("called from here"); call; } while (0) +#define PushLine(a,b,c,d) TRACE(PushLine((a),(b),(c),(d))) +#define SetLanguage(a) TRACE(SetLanguage(a)) +#define StartCU() TRACE(StartCU()) +#define DefineFunction(a,b,c,d,e) TRACE(DefineFunction((a),(b),(c),(d),(e))) +// (DefineFunction) instead of DefineFunction to avoid macro expansion. +#define DefineFunction6(a,b,c,d,e,f) \ + TRACE((DefineFunction)((a),(b),(c),(d),(e),(f))) +#define DeclarationDIE(a,b,c,d,e) TRACE(DeclarationDIE((a),(b),(c),(d),(e))) +#define DefinitionDIE(a,b,c,d,e,f) \ + TRACE(DefinitionDIE((a),(b),(c),(d),(e),(f))) +#define TestFunctionCount(a) TRACE(TestFunctionCount(a)) +#define TestFunction(a,b,c,d) TRACE(TestFunction((a),(b),(c),(d))) +#define TestLineCount(a,b) TRACE(TestLineCount((a),(b))) +#define TestLine(a,b,c,d,e,f) TRACE(TestLine((a),(b),(c),(d),(e),(f))) + +class SimpleCU: public CUFixtureBase, public Test { +}; + +TEST_F(SimpleCU, CompilationDir) { + compilation_dir_ = "/src/build/"; + + StartCU(); + root_handler_.Finish(); +} + +TEST_F(SimpleCU, OneFunc) { + PushLine(0x938cf8c07def4d34ULL, 0x55592d727f6cd01fLL, "line-file", 246571772); + + StartCU(); + DefineFunction(&root_handler_, "function1", + 0x938cf8c07def4d34ULL, 0x55592d727f6cd01fLL, NULL); + root_handler_.Finish(); + + TestFunctionCount(1); + TestFunction(0, "function1", 0x938cf8c07def4d34ULL, 0x55592d727f6cd01fLL); + TestLineCount(0, 1); + TestLine(0, 0, 0x938cf8c07def4d34ULL, 0x55592d727f6cd01fLL, "line-file", + 246571772); +} + +// As above, only DW_AT_high_pc is a length rather than an address. +TEST_F(SimpleCU, OneFuncHighPcIsLength) { + PushLine(0x938cf8c07def4d34ULL, 0x55592d727f6cd01fLL, "line-file", 246571772); + + StartCU(); + DefineFunction6(&root_handler_, "function1", + 0x938cf8c07def4d34ULL, 0x55592d727f6cd01fLL, NULL, + dwarf2reader::DW_FORM_udata); + root_handler_.Finish(); + + TestFunctionCount(1); + TestFunction(0, "function1", 0x938cf8c07def4d34ULL, 0x55592d727f6cd01fLL); + TestLineCount(0, 1); + TestLine(0, 0, 0x938cf8c07def4d34ULL, 0x55592d727f6cd01fLL, "line-file", + 246571772); +} + +TEST_F(SimpleCU, MangledName) { + PushLine(0x938cf8c07def4d34ULL, 0x55592d727f6cd01fLL, "line-file", 246571772); + + StartCU(); + DefineFunction(&root_handler_, "function1", + 0x938cf8c07def4d34ULL, 0x55592d727f6cd01fLL, "_ZN1n1fEi"); + root_handler_.Finish(); + + TestFunctionCount(1); + TestFunction(0, "n::f(int)", 0x938cf8c07def4d34ULL, 0x55592d727f6cd01fLL); +} + +TEST_F(SimpleCU, IrrelevantRootChildren) { + StartCU(); + EXPECT_FALSE(root_handler_ + .FindChildHandler(0x7db32bff4e2dcfb1ULL, + dwarf2reader::DW_TAG_lexical_block)); +} + +TEST_F(SimpleCU, IrrelevantNamedScopeChildren) { + StartCU(); + DIEHandler *class_A_handler + = StartNamedDIE(&root_handler_, dwarf2reader::DW_TAG_class_type, "class_A"); + EXPECT_TRUE(class_A_handler != NULL); + EXPECT_FALSE(class_A_handler + ->FindChildHandler(0x02e55999b865e4e9ULL, + dwarf2reader::DW_TAG_lexical_block)); + delete class_A_handler; +} + +// Verify that FileContexts can safely be deleted unused. +TEST_F(SimpleCU, UnusedFileContext) { + Module m("module-name", "module-os", "module-arch", "module-id"); + DwarfCUToModule::FileContext fc("dwarf-filename", &m, true); + + // Kludge: satisfy reporter_'s expectation. + reporter_.SetCUName("compilation-unit-name"); +} + +TEST_F(SimpleCU, InlineFunction) { + PushLine(0x1758a0f941b71efbULL, 0x1cf154f1f545e146ULL, "line-file", 75173118); + + StartCU(); + AbstractInstanceDIE(&root_handler_, 0x1e8dac5d507ed7abULL, + dwarf2reader::DW_INL_inlined, 0, "inline-name"); + DefineInlineInstanceDIE(&root_handler_, "", 0x1e8dac5d507ed7abULL, + 0x1758a0f941b71efbULL, 0x1cf154f1f545e146ULL); + root_handler_.Finish(); + + TestFunctionCount(1); + TestFunction(0, "inline-name", + 0x1758a0f941b71efbULL, 0x1cf154f1f545e146ULL); +} + +TEST_F(SimpleCU, InlineFunctionSignedAttribute) { + PushLine(0x1758a0f941b71efbULL, 0x1cf154f1f545e146ULL, "line-file", 75173118); + + StartCU(); + AbstractInstanceDIE(&root_handler_, 0x1e8dac5d507ed7abULL, + dwarf2reader::DW_INL_inlined, 0, "inline-name", + dwarf2reader::DW_FORM_sdata); + DefineInlineInstanceDIE(&root_handler_, "", 0x1e8dac5d507ed7abULL, + 0x1758a0f941b71efbULL, 0x1cf154f1f545e146ULL); + root_handler_.Finish(); + + TestFunctionCount(1); + TestFunction(0, "inline-name", + 0x1758a0f941b71efbULL, 0x1cf154f1f545e146ULL); +} + +// Any DIE with an DW_AT_inline attribute can be cited by +// DW_AT_abstract_origin attributes --- even if the value of the +// DW_AT_inline attribute is DW_INL_not_inlined. +TEST_F(SimpleCU, AbstractOriginNotInlined) { + PushLine(0x2805c4531be6ca0eULL, 0x686b52155a8d4d2cULL, "line-file", 6111581); + + StartCU(); + AbstractInstanceDIE(&root_handler_, 0x93e9cdad52826b39ULL, + dwarf2reader::DW_INL_not_inlined, 0, "abstract-instance"); + DefineInlineInstanceDIE(&root_handler_, "", 0x93e9cdad52826b39ULL, + 0x2805c4531be6ca0eULL, 0x686b52155a8d4d2cULL); + root_handler_.Finish(); + + TestFunctionCount(1); + TestFunction(0, "abstract-instance", + 0x2805c4531be6ca0eULL, 0x686b52155a8d4d2cULL); +} + +TEST_F(SimpleCU, UnknownAbstractOrigin) { + EXPECT_CALL(reporter_, UnknownAbstractOrigin(_, 1ULL)).WillOnce(Return()); + EXPECT_CALL(reporter_, UnnamedFunction(0x11c70f94c6e87ccdLL)) + .WillOnce(Return()); + PushLine(0x1758a0f941b71efbULL, 0x1cf154f1f545e146ULL, "line-file", 75173118); + + StartCU(); + AbstractInstanceDIE(&root_handler_, 0x1e8dac5d507ed7abULL, + dwarf2reader::DW_INL_inlined, 0, "inline-name"); + DefineInlineInstanceDIE(&root_handler_, "", 1ULL, + 0x1758a0f941b71efbULL, 0x1cf154f1f545e146ULL); + root_handler_.Finish(); + + TestFunctionCount(1); + TestFunction(0, "", + 0x1758a0f941b71efbULL, 0x1cf154f1f545e146ULL); +} + +TEST_F(SimpleCU, UnnamedFunction) { + EXPECT_CALL(reporter_, UnnamedFunction(0xe34797c7e68590a8LL)) + .WillOnce(Return()); + PushLine(0x72b80e41a0ac1d40ULL, 0x537174f231ee181cULL, "line-file", 14044850); + + StartCU(); + DefineFunction(&root_handler_, "", + 0x72b80e41a0ac1d40ULL, 0x537174f231ee181cULL, NULL); + root_handler_.Finish(); + + TestFunctionCount(1); + TestFunction(0, "", + 0x72b80e41a0ac1d40ULL, 0x537174f231ee181cULL); +} + +// An address range. +struct Range { + Module::Address start, end; +}; + +// Test data for pairing functions and lines. +struct Situation { + // Two function intervals, and two line intervals. + Range functions[2], lines[2]; + + // The number of lines we expect to be assigned to each of the + // functions, and the address ranges. + int paired_count[2]; + Range paired[2][2]; + + // The number of functions that are not entirely covered by lines, + // and vice versa. + int uncovered_functions, uncovered_lines; +}; + +#define PAIRING(func1_start, func1_end, func2_start, func2_end, \ + line1_start, line1_end, line2_start, line2_end, \ + func1_num_lines, func2_num_lines, \ + func1_line1_start, func1_line1_end, \ + func1_line2_start, func1_line2_end, \ + func2_line1_start, func2_line1_end, \ + func2_line2_start, func2_line2_end, \ + uncovered_functions, uncovered_lines) \ + { { { func1_start, func1_end }, { func2_start, func2_end } }, \ + { { line1_start, line1_end }, { line2_start, line2_end } }, \ + { func1_num_lines, func2_num_lines }, \ + { { { func1_line1_start, func1_line1_end }, \ + { func1_line2_start, func1_line2_end } }, \ + { { func2_line1_start, func2_line1_end }, \ + { func2_line2_start, func2_line2_end } } }, \ + uncovered_functions, uncovered_lines }, + +Situation situations[] = { +#include "common/testdata/func-line-pairing.h" +}; + +#undef PAIRING + +class FuncLinePairing: public CUFixtureBase, + public TestWithParam { }; + +INSTANTIATE_TEST_CASE_P(AllSituations, FuncLinePairing, + ValuesIn(situations)); + +TEST_P(FuncLinePairing, Pairing) { + const Situation &s = GetParam(); + PushLine(s.lines[0].start, + s.lines[0].end - s.lines[0].start, + "line-file", 67636963); + PushLine(s.lines[1].start, + s.lines[1].end - s.lines[1].start, + "line-file", 67636963); + if (s.uncovered_functions) + EXPECT_CALL(reporter_, UncoveredFunction(_)) + .Times(s.uncovered_functions) + .WillRepeatedly(Return()); + if (s.uncovered_lines) + EXPECT_CALL(reporter_, UncoveredLine(_)) + .Times(s.uncovered_lines) + .WillRepeatedly(Return()); + + StartCU(); + DefineFunction(&root_handler_, "function1", + s.functions[0].start, + s.functions[0].end - s.functions[0].start, NULL); + DefineFunction(&root_handler_, "function2", + s.functions[1].start, + s.functions[1].end - s.functions[1].start, NULL); + root_handler_.Finish(); + + TestFunctionCount(2); + TestFunction(0, "function1", + s.functions[0].start, + s.functions[0].end - s.functions[0].start); + TestLineCount(0, s.paired_count[0]); + for (int i = 0; i < s.paired_count[0]; i++) + TestLine(0, i, s.paired[0][i].start, + s.paired[0][i].end - s.paired[0][i].start, + "line-file", 67636963); + TestFunction(1, "function2", + s.functions[1].start, + s.functions[1].end - s.functions[1].start); + TestLineCount(1, s.paired_count[1]); + for (int i = 0; i < s.paired_count[1]; i++) + TestLine(1, i, s.paired[1][i].start, + s.paired[1][i].end - s.paired[1][i].start, + "line-file", 67636963); +} + +TEST_F(FuncLinePairing, EmptyCU) { + StartCU(); + root_handler_.Finish(); + + TestFunctionCount(0); +} + +TEST_F(FuncLinePairing, LinesNoFuncs) { + PushLine(40, 2, "line-file", 82485646); + EXPECT_CALL(reporter_, UncoveredLine(_)).WillOnce(Return()); + + StartCU(); + root_handler_.Finish(); + + TestFunctionCount(0); +} + +TEST_F(FuncLinePairing, FuncsNoLines) { + EXPECT_CALL(reporter_, UncoveredFunction(_)).WillOnce(Return()); + + StartCU(); + DefineFunction(&root_handler_, "function1", 0x127da12ffcf5c51fULL, 0x1000U, + NULL); + root_handler_.Finish(); + + TestFunctionCount(1); + TestFunction(0, "function1", 0x127da12ffcf5c51fULL, 0x1000U); +} + +TEST_F(FuncLinePairing, GapThenFunction) { + PushLine(20, 2, "line-file-2", 174314698); + PushLine(10, 2, "line-file-1", 263008005); + + StartCU(); + DefineFunction(&root_handler_, "function1", 10, 2, NULL); + DefineFunction(&root_handler_, "function2", 20, 2, NULL); + root_handler_.Finish(); + + TestFunctionCount(2); + TestFunction(0, "function1", 10, 2); + TestLineCount(0, 1); + TestLine(0, 0, 10, 2, "line-file-1", 263008005); + TestFunction(1, "function2", 20, 2); + TestLineCount(1, 1); + TestLine(1, 0, 20, 2, "line-file-2", 174314698); +} + +// If GCC emits padding after one function to align the start of +// the next, then it will attribute the padding instructions to +// the last source line of function (to reduce the size of the +// line number info), but omit it from the DW_AT_{low,high}_pc +// range given in .debug_info (since it costs nothing to be +// precise there). If we did use at least some of the line +// we're about to skip, then assume this is what happened, and +// don't warn. +TEST_F(FuncLinePairing, GCCAlignmentStretch) { + PushLine(10, 10, "line-file", 63351048); + PushLine(20, 10, "line-file", 61661044); + + StartCU(); + DefineFunction(&root_handler_, "function1", 10, 5, NULL); + // five-byte gap between functions, covered by line 63351048. + // This should not elicit a warning. + DefineFunction(&root_handler_, "function2", 20, 10, NULL); + root_handler_.Finish(); + + TestFunctionCount(2); + TestFunction(0, "function1", 10, 5); + TestLineCount(0, 1); + TestLine(0, 0, 10, 5, "line-file", 63351048); + TestFunction(1, "function2", 20, 10); + TestLineCount(1, 1); + TestLine(1, 0, 20, 10, "line-file", 61661044); +} + +// Unfortunately, neither the DWARF parser's handler interface nor the +// DIEHandler interface is capable of expressing a function that abuts +// the end of the address space: the high_pc value looks like zero. + +TEST_F(FuncLinePairing, LineAtEndOfAddressSpace) { + PushLine(0xfffffffffffffff0ULL, 16, "line-file", 63351048); + EXPECT_CALL(reporter_, UncoveredLine(_)).WillOnce(Return()); + + StartCU(); + DefineFunction(&root_handler_, "function1", 0xfffffffffffffff0ULL, 6, NULL); + DefineFunction(&root_handler_, "function2", 0xfffffffffffffffaULL, 5, NULL); + root_handler_.Finish(); + + TestFunctionCount(2); + TestFunction(0, "function1", 0xfffffffffffffff0ULL, 6); + TestLineCount(0, 1); + TestLine(0, 0, 0xfffffffffffffff0ULL, 6, "line-file", 63351048); + TestFunction(1, "function2", 0xfffffffffffffffaULL, 5); + TestLineCount(1, 1); + TestLine(1, 0, 0xfffffffffffffffaULL, 5, "line-file", 63351048); +} + +// A function with more than one uncovered area should only be warned +// about once. +TEST_F(FuncLinePairing, WarnOnceFunc) { + PushLine(20, 1, "line-file-2", 262951329); + PushLine(11, 1, "line-file-1", 219964021); + EXPECT_CALL(reporter_, UncoveredFunction(_)).WillOnce(Return()); + + StartCU(); + DefineFunction(&root_handler_, "function", 10, 11, NULL); + root_handler_.Finish(); + + TestFunctionCount(1); + TestFunction(0, "function", 10, 11); + TestLineCount(0, 2); + TestLine(0, 0, 11, 1, "line-file-1", 219964021); + TestLine(0, 1, 20, 1, "line-file-2", 262951329); +} + +// A line with more than one uncovered area should only be warned +// about once. +TEST_F(FuncLinePairing, WarnOnceLine) { + PushLine(10, 20, "filename1", 118581871); + EXPECT_CALL(reporter_, UncoveredLine(_)).WillOnce(Return()); + + StartCU(); + DefineFunction(&root_handler_, "function1", 11, 1, NULL); + DefineFunction(&root_handler_, "function2", 13, 1, NULL); + root_handler_.Finish(); + + TestFunctionCount(2); + TestFunction(0, "function1", 11, 1); + TestLineCount(0, 1); + TestLine(0, 0, 11, 1, "filename1", 118581871); + TestFunction(1, "function2", 13, 1); + TestLineCount(1, 1); + TestLine(1, 0, 13, 1, "filename1", 118581871); +} + +class CXXQualifiedNames: public CUFixtureBase, + public TestWithParam { }; + +INSTANTIATE_TEST_CASE_P(VersusEnclosures, CXXQualifiedNames, + Values(dwarf2reader::DW_TAG_class_type, + dwarf2reader::DW_TAG_structure_type, + dwarf2reader::DW_TAG_union_type, + dwarf2reader::DW_TAG_namespace)); + +TEST_P(CXXQualifiedNames, TwoFunctions) { + DwarfTag tag = GetParam(); + + SetLanguage(dwarf2reader::DW_LANG_C_plus_plus); + PushLine(10, 1, "filename1", 69819327); + PushLine(20, 1, "filename2", 95115701); + + StartCU(); + DIEHandler *enclosure_handler = StartNamedDIE(&root_handler_, tag, + "Enclosure"); + EXPECT_TRUE(enclosure_handler != NULL); + DefineFunction(enclosure_handler, "func_B", 10, 1, NULL); + DefineFunction(enclosure_handler, "func_C", 20, 1, NULL); + enclosure_handler->Finish(); + delete enclosure_handler; + root_handler_.Finish(); + + TestFunctionCount(2); + TestFunction(0, "Enclosure::func_B", 10, 1); + TestFunction(1, "Enclosure::func_C", 20, 1); +} + +TEST_P(CXXQualifiedNames, FuncInEnclosureInNamespace) { + DwarfTag tag = GetParam(); + + SetLanguage(dwarf2reader::DW_LANG_C_plus_plus); + PushLine(10, 1, "line-file", 69819327); + + StartCU(); + DIEHandler *namespace_handler + = StartNamedDIE(&root_handler_, dwarf2reader::DW_TAG_namespace, + "Namespace"); + EXPECT_TRUE(namespace_handler != NULL); + DIEHandler *enclosure_handler = StartNamedDIE(namespace_handler, tag, + "Enclosure"); + EXPECT_TRUE(enclosure_handler != NULL); + DefineFunction(enclosure_handler, "function", 10, 1, NULL); + enclosure_handler->Finish(); + delete enclosure_handler; + namespace_handler->Finish(); + delete namespace_handler; + root_handler_.Finish(); + + TestFunctionCount(1); + TestFunction(0, "Namespace::Enclosure::function", 10, 1); +} + +TEST_F(CXXQualifiedNames, FunctionInClassInStructInNamespace) { + SetLanguage(dwarf2reader::DW_LANG_C_plus_plus); + PushLine(10, 1, "filename1", 69819327); + + StartCU(); + DIEHandler *namespace_handler + = StartNamedDIE(&root_handler_, dwarf2reader::DW_TAG_namespace, + "namespace_A"); + EXPECT_TRUE(namespace_handler != NULL); + DIEHandler *struct_handler + = StartNamedDIE(namespace_handler, dwarf2reader::DW_TAG_structure_type, + "struct_B"); + EXPECT_TRUE(struct_handler != NULL); + DIEHandler *class_handler + = StartNamedDIE(struct_handler, dwarf2reader::DW_TAG_class_type, + "class_C"); + DefineFunction(class_handler, "function_D", 10, 1, NULL); + class_handler->Finish(); + delete class_handler; + struct_handler->Finish(); + delete struct_handler; + namespace_handler->Finish(); + delete namespace_handler; + root_handler_.Finish(); + + TestFunctionCount(1); + TestFunction(0, "namespace_A::struct_B::class_C::function_D", 10, 1); +} + +struct LanguageAndQualifiedName { + dwarf2reader::DwarfLanguage language; + const char *name; +}; + +const LanguageAndQualifiedName LanguageAndQualifiedNameCases[] = { + { dwarf2reader::DW_LANG_none, "class_A::function_B" }, + { dwarf2reader::DW_LANG_C, "class_A::function_B" }, + { dwarf2reader::DW_LANG_C89, "class_A::function_B" }, + { dwarf2reader::DW_LANG_C99, "class_A::function_B" }, + { dwarf2reader::DW_LANG_C_plus_plus, "class_A::function_B" }, + { dwarf2reader::DW_LANG_Java, "class_A.function_B" }, + { dwarf2reader::DW_LANG_Cobol74, "class_A::function_B" }, + { dwarf2reader::DW_LANG_Mips_Assembler, NULL } +}; + +class QualifiedForLanguage + : public CUFixtureBase, + public TestWithParam { }; + +INSTANTIATE_TEST_CASE_P(LanguageAndQualifiedName, QualifiedForLanguage, + ValuesIn(LanguageAndQualifiedNameCases)); + +TEST_P(QualifiedForLanguage, MemberFunction) { + const LanguageAndQualifiedName ¶m = GetParam(); + + PushLine(10, 1, "line-file", 212966758); + SetLanguage(param.language); + + StartCU(); + DIEHandler *class_handler + = StartNamedDIE(&root_handler_, dwarf2reader::DW_TAG_class_type, + "class_A"); + DefineFunction(class_handler, "function_B", 10, 1, NULL); + class_handler->Finish(); + delete class_handler; + root_handler_.Finish(); + + if (param.name) { + TestFunctionCount(1); + TestFunction(0, param.name, 10, 1); + } else { + TestFunctionCount(0); + } +} + +TEST_P(QualifiedForLanguage, MemberFunctionSignedLanguage) { + const LanguageAndQualifiedName ¶m = GetParam(); + + PushLine(10, 1, "line-file", 212966758); + SetLanguage(param.language); + SetLanguageSigned(true); + + StartCU(); + DIEHandler *class_handler + = StartNamedDIE(&root_handler_, dwarf2reader::DW_TAG_class_type, + "class_A"); + DefineFunction(class_handler, "function_B", 10, 1, NULL); + class_handler->Finish(); + delete class_handler; + root_handler_.Finish(); + + if (param.name) { + TestFunctionCount(1); + TestFunction(0, param.name, 10, 1); + } else { + TestFunctionCount(0); + } +} + +class Specifications: public CUFixtureBase, public Test { }; + +TEST_F(Specifications, Function) { + PushLine(0x93cd3dfc1aa10097ULL, 0x0397d47a0b4ca0d4ULL, "line-file", 54883661); + + StartCU(); + DeclarationDIE(&root_handler_, 0xcd3c51b946fb1eeeLL, + dwarf2reader::DW_TAG_subprogram, "declaration-name", ""); + DefinitionDIE(&root_handler_, dwarf2reader::DW_TAG_subprogram, + 0xcd3c51b946fb1eeeLL, "", + 0x93cd3dfc1aa10097ULL, 0x0397d47a0b4ca0d4ULL); + root_handler_.Finish(); + + TestFunctionCount(1); + TestFunction(0, "declaration-name", + 0x93cd3dfc1aa10097ULL, 0x0397d47a0b4ca0d4ULL); +} + +TEST_F(Specifications, MangledName) { + PushLine(0x93cd3dfc1aa10097ULL, 0x0397d47a0b4ca0d4ULL, "line-file", 54883661); + + StartCU(); + DeclarationDIE(&root_handler_, 0xcd3c51b946fb1eeeLL, + dwarf2reader::DW_TAG_subprogram, "declaration-name", + "_ZN1C1fEi"); + DefinitionDIE(&root_handler_, dwarf2reader::DW_TAG_subprogram, + 0xcd3c51b946fb1eeeLL, "", + 0x93cd3dfc1aa10097ULL, 0x0397d47a0b4ca0d4ULL); + root_handler_.Finish(); + + TestFunctionCount(1); + TestFunction(0, "C::f(int)", + 0x93cd3dfc1aa10097ULL, 0x0397d47a0b4ca0d4ULL); +} + +TEST_F(Specifications, MemberFunction) { + PushLine(0x3341a248634e7170ULL, 0x5f6938ee5553b953ULL, "line-file", 18116691); + + StartCU(); + DIEHandler *class_handler + = StartNamedDIE(&root_handler_, dwarf2reader::DW_TAG_class_type, "class_A"); + DeclarationDIE(class_handler, 0x7d83028c431406e8ULL, + dwarf2reader::DW_TAG_subprogram, "declaration-name", ""); + class_handler->Finish(); + delete class_handler; + DefinitionDIE(&root_handler_, dwarf2reader::DW_TAG_subprogram, + 0x7d83028c431406e8ULL, "", + 0x3341a248634e7170ULL, 0x5f6938ee5553b953ULL); + root_handler_.Finish(); + + TestFunctionCount(1); + TestFunction(0, "class_A::declaration-name", + 0x3341a248634e7170ULL, 0x5f6938ee5553b953ULL); +} + +// This case should gather the name from both the definition and the +// declaration's parent. +TEST_F(Specifications, FunctionDeclarationParent) { + PushLine(0x463c9ddf405be227ULL, 0x6a47774af5049680ULL, "line-file", 70254922); + + StartCU(); + { + DIEHandler *class_handler + = StartNamedDIE(&root_handler_, dwarf2reader::DW_TAG_class_type, + "class_A"); + ASSERT_TRUE(class_handler != NULL); + DeclarationDIE(class_handler, 0x0e0e877c8404544aULL, + dwarf2reader::DW_TAG_subprogram, "declaration-name", ""); + class_handler->Finish(); + delete class_handler; + } + + DefinitionDIE(&root_handler_, dwarf2reader::DW_TAG_subprogram, + 0x0e0e877c8404544aULL, "definition-name", + 0x463c9ddf405be227ULL, 0x6a47774af5049680ULL); + + root_handler_.Finish(); + + TestFunctionCount(1); + TestFunction(0, "class_A::definition-name", + 0x463c9ddf405be227ULL, 0x6a47774af5049680ULL); +} + +// Named scopes should also gather enclosing name components from +// their declarations. +TEST_F(Specifications, NamedScopeDeclarationParent) { + PushLine(0x5d13433d0df13d00ULL, 0x48ebebe5ade2cab4ULL, "line-file", 77392604); + + StartCU(); + { + DIEHandler *space_handler + = StartNamedDIE(&root_handler_, dwarf2reader::DW_TAG_namespace, + "space_A"); + ASSERT_TRUE(space_handler != NULL); + DeclarationDIE(space_handler, 0x419bb1d12f9a73a2ULL, + dwarf2reader::DW_TAG_class_type, "class-declaration-name", + ""); + space_handler->Finish(); + delete space_handler; + } + + { + DIEHandler *class_handler + = StartSpecifiedDIE(&root_handler_, dwarf2reader::DW_TAG_class_type, + 0x419bb1d12f9a73a2ULL, "class-definition-name"); + ASSERT_TRUE(class_handler != NULL); + DefineFunction(class_handler, "function", + 0x5d13433d0df13d00ULL, 0x48ebebe5ade2cab4ULL, NULL); + class_handler->Finish(); + delete class_handler; + } + + root_handler_.Finish(); + + TestFunctionCount(1); + TestFunction(0, "space_A::class-definition-name::function", + 0x5d13433d0df13d00ULL, 0x48ebebe5ade2cab4ULL); +} + +// This test recreates bug 364. +TEST_F(Specifications, InlineFunction) { + PushLine(0x1758a0f941b71efbULL, 0x1cf154f1f545e146ULL, "line-file", 75173118); + + StartCU(); + DeclarationDIE(&root_handler_, 0xcd3c51b946fb1eeeLL, + dwarf2reader::DW_TAG_subprogram, "inline-name", ""); + AbstractInstanceDIE(&root_handler_, 0x1e8dac5d507ed7abULL, + dwarf2reader::DW_INL_inlined, 0xcd3c51b946fb1eeeLL, ""); + DefineInlineInstanceDIE(&root_handler_, "", 0x1e8dac5d507ed7abULL, + 0x1758a0f941b71efbULL, 0x1cf154f1f545e146ULL); + root_handler_.Finish(); + + TestFunctionCount(1); + TestFunction(0, "inline-name", + 0x1758a0f941b71efbULL, 0x1cf154f1f545e146ULL); +} + +// Check name construction for a long chain containing each combination of: +// - struct, union, class, namespace +// - direct and definition +TEST_F(Specifications, LongChain) { + PushLine(0x5a0dd6bb85db754cULL, 0x3bccb213d08c7fd3ULL, "line-file", 21192926); + SetLanguage(dwarf2reader::DW_LANG_C_plus_plus); + + StartCU(); + // The structure we're building here is: + // space_A full definition + // space_B declaration + // space_B definition + // struct_C full definition + // struct_D declaration + // struct_D definition + // union_E full definition + // union_F declaration + // union_F definition + // class_G full definition + // class_H declaration + // class_H definition + // func_I declaration + // func_I definition + // + // So: + // - space_A, struct_C, union_E, and class_G don't use specifications; + // - space_B, struct_D, union_F, and class_H do. + // - func_I uses a specification. + // + // The full name for func_I is thus: + // + // space_A::space_B::struct_C::struct_D::union_E::union_F:: + // class_G::class_H::func_I + { + DIEHandler *space_A_handler + = StartNamedDIE(&root_handler_, dwarf2reader::DW_TAG_namespace, + "space_A"); + DeclarationDIE(space_A_handler, 0x2e111126496596e2ULL, + dwarf2reader::DW_TAG_namespace, "space_B", ""); + space_A_handler->Finish(); + delete space_A_handler; + } + + { + DIEHandler *space_B_handler + = StartSpecifiedDIE(&root_handler_, dwarf2reader::DW_TAG_namespace, + 0x2e111126496596e2ULL); + DIEHandler *struct_C_handler + = StartNamedDIE(space_B_handler, dwarf2reader::DW_TAG_structure_type, + "struct_C"); + DeclarationDIE(struct_C_handler, 0x20cd423bf2a25a4cULL, + dwarf2reader::DW_TAG_structure_type, "struct_D", ""); + struct_C_handler->Finish(); + delete struct_C_handler; + space_B_handler->Finish(); + delete space_B_handler; + } + + { + DIEHandler *struct_D_handler + = StartSpecifiedDIE(&root_handler_, dwarf2reader::DW_TAG_structure_type, + 0x20cd423bf2a25a4cULL); + DIEHandler *union_E_handler + = StartNamedDIE(struct_D_handler, dwarf2reader::DW_TAG_union_type, + "union_E"); + DeclarationDIE(union_E_handler, 0xe25c84805aa58c32ULL, + dwarf2reader::DW_TAG_union_type, "union_F", ""); + union_E_handler->Finish(); + delete union_E_handler; + struct_D_handler->Finish(); + delete struct_D_handler; + } + + { + DIEHandler *union_F_handler + = StartSpecifiedDIE(&root_handler_, dwarf2reader::DW_TAG_union_type, + 0xe25c84805aa58c32ULL); + DIEHandler *class_G_handler + = StartNamedDIE(union_F_handler, dwarf2reader::DW_TAG_class_type, + "class_G"); + DeclarationDIE(class_G_handler, 0xb70d960dcc173b6eULL, + dwarf2reader::DW_TAG_class_type, "class_H", ""); + class_G_handler->Finish(); + delete class_G_handler; + union_F_handler->Finish(); + delete union_F_handler; + } + + { + DIEHandler *class_H_handler + = StartSpecifiedDIE(&root_handler_, dwarf2reader::DW_TAG_class_type, + 0xb70d960dcc173b6eULL); + DeclarationDIE(class_H_handler, 0x27ff829e3bf69f37ULL, + dwarf2reader::DW_TAG_subprogram, "func_I", ""); + class_H_handler->Finish(); + delete class_H_handler; + } + + DefinitionDIE(&root_handler_, dwarf2reader::DW_TAG_subprogram, + 0x27ff829e3bf69f37ULL, "", + 0x5a0dd6bb85db754cULL, 0x3bccb213d08c7fd3ULL); + root_handler_.Finish(); + + TestFunctionCount(1); + TestFunction(0, "space_A::space_B::struct_C::struct_D::union_E::union_F" + "::class_G::class_H::func_I", + 0x5a0dd6bb85db754cULL, 0x3bccb213d08c7fd3ULL); +} + +TEST_F(Specifications, InterCU) { + Module m("module-name", "module-os", "module-arch", "module-id"); + DwarfCUToModule::FileContext fc("dwarf-filename", &m, true); + EXPECT_CALL(reporter_, UncoveredFunction(_)).WillOnce(Return()); + MockLineToModuleHandler lr; + EXPECT_CALL(lr, ReadProgram(_,_,_,_)).Times(0); + + // Kludge: satisfy reporter_'s expectation. + reporter_.SetCUName("compilation-unit-name"); + + // First CU. Declares class_A. + { + DwarfCUToModule root1_handler(&fc, &lr, &reporter_); + ASSERT_TRUE(root1_handler.StartCompilationUnit(0, 1, 2, 3, 3)); + ASSERT_TRUE(root1_handler.StartRootDIE(1, + dwarf2reader::DW_TAG_compile_unit)); + ProcessStrangeAttributes(&root1_handler); + ASSERT_TRUE(root1_handler.EndAttributes()); + DeclarationDIE(&root1_handler, 0xb8fbfdd5f0b26fceULL, + dwarf2reader::DW_TAG_class_type, "class_A", ""); + root1_handler.Finish(); + } + + // Second CU. Defines class_A, declares member_func_B. + { + DwarfCUToModule root2_handler(&fc, &lr, &reporter_); + ASSERT_TRUE(root2_handler.StartCompilationUnit(0, 1, 2, 3, 3)); + ASSERT_TRUE(root2_handler.StartRootDIE(1, + dwarf2reader::DW_TAG_compile_unit)); + ASSERT_TRUE(root2_handler.EndAttributes()); + DIEHandler *class_A_handler + = StartSpecifiedDIE(&root2_handler, dwarf2reader::DW_TAG_class_type, + 0xb8fbfdd5f0b26fceULL); + DeclarationDIE(class_A_handler, 0xb01fef8b380bd1a2ULL, + dwarf2reader::DW_TAG_subprogram, "member_func_B", ""); + class_A_handler->Finish(); + delete class_A_handler; + root2_handler.Finish(); + } + + // Third CU. Defines member_func_B. + { + DwarfCUToModule root3_handler(&fc, &lr, &reporter_); + ASSERT_TRUE(root3_handler.StartCompilationUnit(0, 1, 2, 3, 3)); + ASSERT_TRUE(root3_handler.StartRootDIE(1, + dwarf2reader::DW_TAG_compile_unit)); + ASSERT_TRUE(root3_handler.EndAttributes()); + DefinitionDIE(&root3_handler, dwarf2reader::DW_TAG_subprogram, + 0xb01fef8b380bd1a2ULL, "", + 0x2618f00a1a711e53ULL, 0x4fd94b76d7c2caf5ULL); + root3_handler.Finish(); + } + + vector functions; + m.GetFunctions(&functions, functions.end()); + EXPECT_EQ(1U, functions.size()); + EXPECT_STREQ("class_A::member_func_B", functions[0]->name.c_str()); +} + +TEST_F(Specifications, UnhandledInterCU) { + Module m("module-name", "module-os", "module-arch", "module-id"); + DwarfCUToModule::FileContext fc("dwarf-filename", &m, false); + EXPECT_CALL(reporter_, UncoveredFunction(_)).WillOnce(Return()); + MockLineToModuleHandler lr; + EXPECT_CALL(lr, ReadProgram(_,_,_,_)).Times(0); + + // Kludge: satisfy reporter_'s expectation. + reporter_.SetCUName("compilation-unit-name"); + + // First CU. Declares class_A. + { + DwarfCUToModule root1_handler(&fc, &lr, &reporter_); + ASSERT_TRUE(root1_handler.StartCompilationUnit(0, 1, 2, 3, 3)); + ASSERT_TRUE(root1_handler.StartRootDIE(1, + dwarf2reader::DW_TAG_compile_unit)); + ProcessStrangeAttributes(&root1_handler); + ASSERT_TRUE(root1_handler.EndAttributes()); + DeclarationDIE(&root1_handler, 0xb8fbfdd5f0b26fceULL, + dwarf2reader::DW_TAG_class_type, "class_A", ""); + root1_handler.Finish(); + } + + // Second CU. Defines class_A, declares member_func_B. + { + DwarfCUToModule root2_handler(&fc, &lr, &reporter_); + ASSERT_TRUE(root2_handler.StartCompilationUnit(0, 1, 2, 3, 3)); + ASSERT_TRUE(root2_handler.StartRootDIE(1, + dwarf2reader::DW_TAG_compile_unit)); + ASSERT_TRUE(root2_handler.EndAttributes()); + EXPECT_CALL(reporter_, UnhandledInterCUReference(_, _)).Times(1); + DIEHandler *class_A_handler + = StartSpecifiedDIE(&root2_handler, dwarf2reader::DW_TAG_class_type, + 0xb8fbfdd5f0b26fceULL); + DeclarationDIE(class_A_handler, 0xb01fef8b380bd1a2ULL, + dwarf2reader::DW_TAG_subprogram, "member_func_B", ""); + class_A_handler->Finish(); + delete class_A_handler; + root2_handler.Finish(); + } + + // Third CU. Defines member_func_B. + { + DwarfCUToModule root3_handler(&fc, &lr, &reporter_); + ASSERT_TRUE(root3_handler.StartCompilationUnit(0, 1, 2, 3, 3)); + ASSERT_TRUE(root3_handler.StartRootDIE(1, + dwarf2reader::DW_TAG_compile_unit)); + ASSERT_TRUE(root3_handler.EndAttributes()); + EXPECT_CALL(reporter_, UnhandledInterCUReference(_, _)).Times(1); + EXPECT_CALL(reporter_, UnnamedFunction(_)).Times(1); + DefinitionDIE(&root3_handler, dwarf2reader::DW_TAG_subprogram, + 0xb01fef8b380bd1a2ULL, "", + 0x2618f00a1a711e53ULL, 0x4fd94b76d7c2caf5ULL); + root3_handler.Finish(); + } +} + +TEST_F(Specifications, BadOffset) { + PushLine(0xa0277efd7ce83771ULL, 0x149554a184c730c1ULL, "line-file", 56636272); + EXPECT_CALL(reporter_, UnknownSpecification(_, 0x2be953efa6f9a996ULL)) + .WillOnce(Return()); + + StartCU(); + DeclarationDIE(&root_handler_, 0xefd7f7752c27b7e4ULL, + dwarf2reader::DW_TAG_subprogram, "", ""); + DefinitionDIE(&root_handler_, dwarf2reader::DW_TAG_subprogram, + 0x2be953efa6f9a996ULL, "function", + 0xa0277efd7ce83771ULL, 0x149554a184c730c1ULL); + root_handler_.Finish(); +} + +TEST_F(Specifications, FunctionDefinitionHasOwnName) { + PushLine(0xced50b3eea81022cULL, 0x08dd4d301cc7a7d2ULL, "line-file", 56792403); + + StartCU(); + DeclarationDIE(&root_handler_, 0xc34ff4786cae78bdULL, + dwarf2reader::DW_TAG_subprogram, "declaration-name", ""); + DefinitionDIE(&root_handler_, dwarf2reader::DW_TAG_subprogram, + 0xc34ff4786cae78bdULL, "definition-name", + 0xced50b3eea81022cULL, 0x08dd4d301cc7a7d2ULL); + root_handler_.Finish(); + + TestFunctionCount(1); + TestFunction(0, "definition-name", + 0xced50b3eea81022cULL, 0x08dd4d301cc7a7d2ULL); +} + +TEST_F(Specifications, ClassDefinitionHasOwnName) { + PushLine(0x1d0f5e0f6ce309bdULL, 0x654e1852ec3599e7ULL, "line-file", 57119241); + + StartCU(); + DeclarationDIE(&root_handler_, 0xd0fe467ec2f1a58cULL, + dwarf2reader::DW_TAG_class_type, "class-declaration-name", ""); + + dwarf2reader::DIEHandler *class_definition + = StartSpecifiedDIE(&root_handler_, dwarf2reader::DW_TAG_class_type, + 0xd0fe467ec2f1a58cULL, "class-definition-name"); + ASSERT_TRUE(class_definition); + DeclarationDIE(class_definition, 0x6d028229c15623dbULL, + dwarf2reader::DW_TAG_subprogram, + "function-declaration-name", ""); + class_definition->Finish(); + delete class_definition; + + DefinitionDIE(&root_handler_, dwarf2reader::DW_TAG_subprogram, + 0x6d028229c15623dbULL, "function-definition-name", + 0x1d0f5e0f6ce309bdULL, 0x654e1852ec3599e7ULL); + + root_handler_.Finish(); + + TestFunctionCount(1); + TestFunction(0, "class-definition-name::function-definition-name", + 0x1d0f5e0f6ce309bdULL, 0x654e1852ec3599e7ULL); +} + +// DIEs that cite a specification should prefer the specification's +// parents over their own when choosing qualified names. In this test, +// we take the name from our definition but the enclosing scope name +// from our declaration. I don't see why they'd ever be different, but +// we want to verify what DwarfCUToModule is looking at. +TEST_F(Specifications, PreferSpecificationParents) { + PushLine(0xbbd9d54dce3b95b7ULL, 0x39188b7b52b0899fULL, "line-file", 79488694); + + StartCU(); + { + dwarf2reader::DIEHandler *declaration_class_handler = + StartNamedDIE(&root_handler_, dwarf2reader::DW_TAG_class_type, + "declaration-class"); + DeclarationDIE(declaration_class_handler, 0x9ddb35517455ef7aULL, + dwarf2reader::DW_TAG_subprogram, "function-declaration", + ""); + declaration_class_handler->Finish(); + delete declaration_class_handler; + } + { + dwarf2reader::DIEHandler *definition_class_handler + = StartNamedDIE(&root_handler_, dwarf2reader::DW_TAG_class_type, + "definition-class"); + DefinitionDIE(definition_class_handler, dwarf2reader::DW_TAG_subprogram, + 0x9ddb35517455ef7aULL, "function-definition", + 0xbbd9d54dce3b95b7ULL, 0x39188b7b52b0899fULL); + definition_class_handler->Finish(); + delete definition_class_handler; + } + root_handler_.Finish(); + + TestFunctionCount(1); + TestFunction(0, "declaration-class::function-definition", + 0xbbd9d54dce3b95b7ULL, 0x39188b7b52b0899fULL); +} + +class CUErrors: public CUFixtureBase, public Test { }; + +TEST_F(CUErrors, BadStmtList) { + EXPECT_CALL(reporter_, BadLineInfoOffset(dummy_line_size_ + 10)).Times(1); + + ASSERT_TRUE(root_handler_ + .StartCompilationUnit(0xc591d5b037543d7cULL, 0x11, 0xcd, + 0x2d7d19546cf6590cULL, 3)); + ASSERT_TRUE(root_handler_.StartRootDIE(0xae789dc102cfca54ULL, + dwarf2reader::DW_TAG_compile_unit)); + root_handler_.ProcessAttributeString(dwarf2reader::DW_AT_name, + dwarf2reader::DW_FORM_strp, + "compilation-unit-name"); + root_handler_.ProcessAttributeUnsigned(dwarf2reader::DW_AT_stmt_list, + dwarf2reader::DW_FORM_ref4, + dummy_line_size_ + 10); + root_handler_.EndAttributes(); + root_handler_.Finish(); +} + +TEST_F(CUErrors, NoLineSection) { + EXPECT_CALL(reporter_, MissingSection(".debug_line")).Times(1); + PushLine(0x88507fb678052611ULL, 0x42c8e9de6bbaa0faULL, "line-file", 64472290); + // Delete the entry for .debug_line added by the fixture class's constructor. + file_context_.ClearSectionMapForTest(); + + StartCU(); + root_handler_.Finish(); +} + +TEST_F(CUErrors, BadDwarfVersion1) { + // Kludge: satisfy reporter_'s expectation. + reporter_.SetCUName("compilation-unit-name"); + + ASSERT_FALSE(root_handler_ + .StartCompilationUnit(0xadf6e0eb71e2b0d9ULL, 0x4d, 0x90, + 0xc9de224ccb99ac3eULL, 1)); +} + +TEST_F(CUErrors, GoodDwarfVersion2) { + // Kludge: satisfy reporter_'s expectation. + reporter_.SetCUName("compilation-unit-name"); + + ASSERT_TRUE(root_handler_ + .StartCompilationUnit(0xadf6e0eb71e2b0d9ULL, 0x4d, 0x90, + 0xc9de224ccb99ac3eULL, 2)); +} + +TEST_F(CUErrors, GoodDwarfVersion3) { + // Kludge: satisfy reporter_'s expectation. + reporter_.SetCUName("compilation-unit-name"); + + ASSERT_TRUE(root_handler_ + .StartCompilationUnit(0xadf6e0eb71e2b0d9ULL, 0x4d, 0x90, + 0xc9de224ccb99ac3eULL, 3)); +} + +TEST_F(CUErrors, BadCURootDIETag) { + // Kludge: satisfy reporter_'s expectation. + reporter_.SetCUName("compilation-unit-name"); + + ASSERT_TRUE(root_handler_ + .StartCompilationUnit(0xadf6e0eb71e2b0d9ULL, 0x4d, 0x90, + 0xc9de224ccb99ac3eULL, 3)); + + ASSERT_FALSE(root_handler_.StartRootDIE(0x02e56bfbda9e7337ULL, + dwarf2reader::DW_TAG_subprogram)); +} + +// Tests for DwarfCUToModule::Reporter. These just produce (or fail to +// produce) output, so their results need to be checked by hand. +struct Reporter: public Test { + Reporter() + : reporter("filename", 0x123456789abcdef0ULL), + function("function name", 0x19c45c30770c1eb0ULL), + file("source file name") { + reporter.SetCUName("compilation-unit-name"); + + function.size = 0x89808a5bdfa0a6a3ULL; + function.parameter_size = 0x6a329f18683dcd51ULL; + + line.address = 0x3606ac6267aebeccULL; + line.size = 0x5de482229f32556aULL; + line.file = &file; + line.number = 93400201; + } + + DwarfCUToModule::WarningReporter reporter; + Module::Function function; + Module::File file; + Module::Line line; +}; + +TEST_F(Reporter, UnknownSpecification) { + reporter.UnknownSpecification(0x123456789abcdef1ULL, 0x323456789abcdef2ULL); +} + +TEST_F(Reporter, UnknownAbstractOrigin) { + reporter.UnknownAbstractOrigin(0x123456789abcdef1ULL, 0x323456789abcdef2ULL); +} + +TEST_F(Reporter, MissingSection) { + reporter.MissingSection("section name"); +} + +TEST_F(Reporter, BadLineInfoOffset) { + reporter.BadLineInfoOffset(0x123456789abcdef1ULL); +} + +TEST_F(Reporter, UncoveredFunctionDisabled) { + reporter.UncoveredFunction(function); + EXPECT_FALSE(reporter.uncovered_warnings_enabled()); +} + +TEST_F(Reporter, UncoveredFunctionEnabled) { + reporter.set_uncovered_warnings_enabled(true); + reporter.UncoveredFunction(function); + EXPECT_TRUE(reporter.uncovered_warnings_enabled()); +} + +TEST_F(Reporter, UncoveredLineDisabled) { + reporter.UncoveredLine(line); + EXPECT_FALSE(reporter.uncovered_warnings_enabled()); +} + +TEST_F(Reporter, UncoveredLineEnabled) { + reporter.set_uncovered_warnings_enabled(true); + reporter.UncoveredLine(line); + EXPECT_TRUE(reporter.uncovered_warnings_enabled()); +} + +TEST_F(Reporter, UnnamedFunction) { + reporter.UnnamedFunction(0x90c0baff9dedb2d9ULL); +} + +// Would be nice to also test: +// - overlapping lines, functions diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/dwarf_line_to_module.cc b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf_line_to_module.cc new file mode 100644 index 0000000000..258b0b603f --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf_line_to_module.cc @@ -0,0 +1,143 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// dwarf_line_to_module.cc: Implementation of DwarfLineToModule class. +// See dwarf_line_to_module.h for details. + +#include + +#include + +#include "common/dwarf_line_to_module.h" +#include "common/using_std_string.h" + +// Trying to support Windows paths in a reasonable way adds a lot of +// variations to test; it would be better to just put off dealing with +// it until we actually have to deal with DWARF on Windows. + +// Return true if PATH is an absolute path, false if it is relative. +static bool PathIsAbsolute(const string &path) { + return (path.size() >= 1 && path[0] == '/'); +} + +static bool HasTrailingSlash(const string &path) { + return (path.size() >= 1 && path[path.size() - 1] == '/'); +} + +// If PATH is an absolute path, return PATH. If PATH is a relative path, +// treat it as relative to BASE and return the combined path. +static string ExpandPath(const string &path, + const string &base) { + if (PathIsAbsolute(path) || base.empty()) + return path; + return base + (HasTrailingSlash(base) ? "" : "/") + path; +} + +namespace google_breakpad { + +void DwarfLineToModule::DefineDir(const string &name, uint32 dir_num) { + // Directory number zero is reserved to mean the compilation + // directory. Silently ignore attempts to redefine it. + if (dir_num != 0) + directories_[dir_num] = ExpandPath(name, compilation_dir_); +} + +void DwarfLineToModule::DefineFile(const string &name, int32 file_num, + uint32 dir_num, uint64 mod_time, + uint64 length) { + if (file_num == -1) + file_num = ++highest_file_number_; + else if (file_num > highest_file_number_) + highest_file_number_ = file_num; + + string dir_name; + if (dir_num == 0) { + // Directory number zero is the compilation directory, and is stored as + // an attribute on the compilation unit, rather than in the program table. + dir_name = compilation_dir_; + } else { + DirectoryTable::const_iterator directory_it = directories_.find(dir_num); + if (directory_it != directories_.end()) { + dir_name = directory_it->second; + } else { + if (!warned_bad_directory_number_) { + fprintf(stderr, "warning: DWARF line number data refers to undefined" + " directory numbers\n"); + warned_bad_directory_number_ = true; + } + } + } + + string full_name = ExpandPath(name, dir_name); + + // Find a Module::File object of the given name, and add it to the + // file table. + files_[file_num] = module_->FindFile(full_name); +} + +void DwarfLineToModule::AddLine(uint64 address, uint64 length, + uint32 file_num, uint32 line_num, + uint32 column_num) { + if (length == 0) + return; + + // Clip lines not to extend beyond the end of the address space. + if (address + length < address) + length = -address; + + // Should we omit this line? (See the comments for omitted_line_end_.) + if (address == 0 || address == omitted_line_end_) { + omitted_line_end_ = address + length; + return; + } else { + omitted_line_end_ = 0; + } + + // Find the source file being referred to. + Module::File *file = files_[file_num]; + if (!file) { + if (!warned_bad_file_number_) { + fprintf(stderr, "warning: DWARF line number data refers to " + "undefined file numbers\n"); + warned_bad_file_number_ = true; + } + return; + } + Module::Line line; + line.address = address; + // We set the size when we get the next line or the EndSequence call. + line.size = length; + line.file = file; + line.number = line_num; + lines_->push_back(line); +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/dwarf_line_to_module.h b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf_line_to_module.h new file mode 100644 index 0000000000..1fdd4cb716 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf_line_to_module.h @@ -0,0 +1,188 @@ +// -*- mode: c++ -*- + +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// The DwarfLineToModule class accepts line number information from a +// DWARF parser and adds it to a google_breakpad::Module. The Module +// can write that data out as a Breakpad symbol file. + +#ifndef COMMON_LINUX_DWARF_LINE_TO_MODULE_H +#define COMMON_LINUX_DWARF_LINE_TO_MODULE_H + +#include + +#include "common/module.h" +#include "common/dwarf/dwarf2reader.h" +#include "common/using_std_string.h" + +namespace google_breakpad { + +// A class for producing a vector of google_breakpad::Module::Line +// instances from parsed DWARF line number data. +// +// An instance of this class can be provided as a handler to a +// dwarf2reader::LineInfo DWARF line number information parser. The +// handler accepts source location information from the parser and +// uses it to produce a vector of google_breakpad::Module::Line +// objects, referring to google_breakpad::Module::File objects added +// to a particular google_breakpad::Module. +// +// GNU toolchain omitted sections support: +// ====================================== +// +// Given the right options, the GNU toolchain will omit unreferenced +// functions from the final executable. Unfortunately, when it does so, it +// does not remove the associated portions of the DWARF line number +// program; instead, it gives the DW_LNE_set_address instructions referring +// to the now-deleted code addresses of zero. Given this input, the DWARF +// line parser will call AddLine with a series of lines starting at address +// zero. For example, here is the output from 'readelf -wl' for a program +// with four functions, the first three of which have been omitted: +// +// Line Number Statements: +// Extended opcode 2: set Address to 0x0 +// Advance Line by 14 to 15 +// Copy +// Special opcode 48: advance Address by 3 to 0x3 and Line by 1 to 16 +// Special opcode 119: advance Address by 8 to 0xb and Line by 2 to 18 +// Advance PC by 2 to 0xd +// Extended opcode 1: End of Sequence +// +// Extended opcode 2: set Address to 0x0 +// Advance Line by 14 to 15 +// Copy +// Special opcode 48: advance Address by 3 to 0x3 and Line by 1 to 16 +// Special opcode 119: advance Address by 8 to 0xb and Line by 2 to 18 +// Advance PC by 2 to 0xd +// Extended opcode 1: End of Sequence +// +// Extended opcode 2: set Address to 0x0 +// Advance Line by 19 to 20 +// Copy +// Special opcode 48: advance Address by 3 to 0x3 and Line by 1 to 21 +// Special opcode 76: advance Address by 5 to 0x8 and Line by 1 to 22 +// Advance PC by 2 to 0xa +// Extended opcode 1: End of Sequence +// +// Extended opcode 2: set Address to 0x80483a4 +// Advance Line by 23 to 24 +// Copy +// Special opcode 202: advance Address by 14 to 0x80483b2 and Line by 1 to 25 +// Special opcode 76: advance Address by 5 to 0x80483b7 and Line by 1 to 26 +// Advance PC by 6 to 0x80483bd +// Extended opcode 1: End of Sequence +// +// Instead of collecting runs of lines describing code that is not there, +// we try to recognize and drop them. Since the linker doesn't explicitly +// distinguish references to dropped sections from genuine references to +// code at address zero, we must use a heuristic. We have chosen: +// +// - If a line starts at address zero, omit it. (On the platforms +// breakpad targets, it is extremely unlikely that there will be code +// at address zero.) +// +// - If a line starts immediately after an omitted line, omit it too. +class DwarfLineToModule: public dwarf2reader::LineInfoHandler { + public: + // As the DWARF line info parser passes us line records, add source + // files to MODULE, and add all lines to the end of LINES. LINES + // need not be empty. If the parser hands us a zero-length line, we + // omit it. If the parser hands us a line that extends beyond the + // end of the address space, we clip it. It's up to our client to + // sort out which lines belong to which functions; we don't add them + // to any particular function in MODULE ourselves. + DwarfLineToModule(Module *module, const string& compilation_dir, + vector *lines) + : module_(module), + compilation_dir_(compilation_dir), + lines_(lines), + highest_file_number_(-1), + omitted_line_end_(0), + warned_bad_file_number_(false), + warned_bad_directory_number_(false) { } + + ~DwarfLineToModule() { } + + void DefineDir(const string &name, uint32 dir_num); + void DefineFile(const string &name, int32 file_num, + uint32 dir_num, uint64 mod_time, + uint64 length); + void AddLine(uint64 address, uint64 length, + uint32 file_num, uint32 line_num, uint32 column_num); + + private: + + typedef std::map DirectoryTable; + typedef std::map FileTable; + + // The module we're contributing debugging info to. Owned by our + // client. + Module *module_; + + // The compilation directory for the current compilation unit whose + // lines are being accumulated. + string compilation_dir_; + + // The vector of lines we're accumulating. Owned by our client. + // + // In a Module, as in a breakpad symbol file, lines belong to + // specific functions, but DWARF simply assigns lines to addresses; + // one must infer the line/function relationship using the + // functions' beginning and ending addresses. So we can't add these + // to the appropriate function from module_ until we've read the + // function info as well. Instead, we accumulate lines here, and let + // whoever constructed this sort it all out. + vector *lines_; + + // A table mapping directory numbers to paths. + DirectoryTable directories_; + + // A table mapping file numbers to Module::File pointers. + FileTable files_; + + // The highest file number we've seen so far, or -1 if we've seen + // none. Used for dynamically defined file numbers. + int32 highest_file_number_; + + // This is the ending address of the last line we omitted, or zero if we + // didn't omit the previous line. It is zero before we have received any + // AddLine calls. + uint64 omitted_line_end_; + + // True if we've warned about: + bool warned_bad_file_number_; // bad file numbers + bool warned_bad_directory_number_; // bad directory numbers +}; + +} // namespace google_breakpad + +#endif // COMMON_LINUX_DWARF_LINE_TO_MODULE_H diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/dwarf_line_to_module_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf_line_to_module_unittest.cc new file mode 100644 index 0000000000..7c0fcfd35a --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/dwarf_line_to_module_unittest.cc @@ -0,0 +1,391 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// dwarf_line_to_module.cc: Unit tests for google_breakpad::DwarfLineToModule. + +#include + +#include "breakpad_googletest_includes.h" +#include "common/dwarf_line_to_module.h" + +using std::vector; + +using google_breakpad::DwarfLineToModule; +using google_breakpad::Module; +using google_breakpad::Module; + +TEST(SimpleModule, One) { + Module m("name", "os", "architecture", "id"); + vector lines; + DwarfLineToModule h(&m, "/", &lines); + + h.DefineFile("file1", 0x30bf0f27, 0, 0, 0); + h.AddLine(0x6fd126fbf74f2680LL, 0x63c9a14cf556712bLL, 0x30bf0f27, + 0x4c090cbf, 0x1cf9fe0d); + + vector files; + m.GetFiles(&files); + EXPECT_EQ(1U, files.size()); + EXPECT_STREQ("/file1", files[0]->name.c_str()); + + EXPECT_EQ(1U, lines.size()); + EXPECT_EQ(0x6fd126fbf74f2680ULL, lines[0].address); + EXPECT_EQ(0x63c9a14cf556712bULL, lines[0].size); + EXPECT_TRUE(lines[0].file == files[0]); + EXPECT_EQ(0x4c090cbf, lines[0].number); +} + +TEST(SimpleModule, Many) { + Module m("name", "os", "architecture", "id"); + vector lines; + DwarfLineToModule h(&m, "/", &lines); + + h.DefineDir("directory1", 0x838299ab); + h.DefineDir("directory2", 0xf85de023); + h.DefineFile("file1", 0x2b80377a, 0x838299ab, 0, 0); + h.DefineFile("file1", 0x63beb4a4, 0xf85de023, 0, 0); + h.DefineFile("file2", 0x1d161d56, 0x838299ab, 0, 0); + h.DefineFile("file2", 0x1e7a667c, 0xf85de023, 0, 0); + h.AddLine(0x69900c5d553b7274ULL, 0x90fded183f0d0d3cULL, 0x2b80377a, + 0x15b0f0a9U, 0x3ff5abd6U); + h.AddLine(0x45811219a39b7101ULL, 0x25a5e6a924afc41fULL, 0x63beb4a4, + 0x4d259ce9U, 0x41c5ee32U); + h.AddLine(0xfa90514c1dc9704bULL, 0x0063efeabc02f313ULL, 0x1d161d56, + 0x1ee9fa4fU, 0xbf70e46aU); + h.AddLine(0x556b55fb6a647b10ULL, 0x3f3089ca2bfd80f5ULL, 0x1e7a667c, + 0x77fc280eU, 0x2c4a728cU); + h.DefineFile("file3", -1, 0, 0, 0); + h.AddLine(0xe2d72a37f8d9403aULL, 0x034dfab5b0d4d236ULL, 0x63beb4a5, + 0x75047044U, 0xb6a0016cU); + + vector files; + m.GetFiles(&files); + ASSERT_EQ(5U, files.size()); + EXPECT_STREQ("/directory1/file1", files[0]->name.c_str()); + EXPECT_STREQ("/directory1/file2", files[1]->name.c_str()); + EXPECT_STREQ("/directory2/file1", files[2]->name.c_str()); + EXPECT_STREQ("/directory2/file2", files[3]->name.c_str()); + EXPECT_STREQ("/file3", files[4]->name.c_str()); + + ASSERT_EQ(5U, lines.size()); + + EXPECT_EQ(0x69900c5d553b7274ULL, lines[0].address); + EXPECT_EQ(0x90fded183f0d0d3cULL, lines[0].size); + EXPECT_TRUE(lines[0].file == files[0]); + EXPECT_EQ(0x15b0f0a9, lines[0].number); + + EXPECT_EQ(0x45811219a39b7101ULL, lines[1].address); + EXPECT_EQ(0x25a5e6a924afc41fULL, lines[1].size); + EXPECT_TRUE(lines[1].file == files[2]); + EXPECT_EQ(0x4d259ce9, lines[1].number); + + EXPECT_EQ(0xfa90514c1dc9704bULL, lines[2].address); + EXPECT_EQ(0x0063efeabc02f313ULL, lines[2].size); + EXPECT_TRUE(lines[2].file == files[1]); + EXPECT_EQ(0x1ee9fa4f, lines[2].number); + + EXPECT_EQ(0x556b55fb6a647b10ULL, lines[3].address); + EXPECT_EQ(0x3f3089ca2bfd80f5ULL, lines[3].size); + EXPECT_TRUE(lines[3].file == files[3]); + EXPECT_EQ(0x77fc280e, lines[3].number); + + EXPECT_EQ(0xe2d72a37f8d9403aULL, lines[4].address); + EXPECT_EQ(0x034dfab5b0d4d236ULL, lines[4].size); + EXPECT_TRUE(lines[4].file == files[4]); + EXPECT_EQ(0x75047044, lines[4].number); +} + +TEST(Filenames, Absolute) { + Module m("name", "os", "architecture", "id"); + vector lines; + DwarfLineToModule h(&m, "/", &lines); + + h.DefineDir("directory1", 1); + h.DefineFile("/absolute", 1, 1, 0, 0); + + h.AddLine(1, 1, 1, 0, 0); + + vector files; + m.GetFiles(&files); + ASSERT_EQ(1U, files.size()); + EXPECT_STREQ("/absolute", files[0]->name.c_str()); + ASSERT_EQ(1U, lines.size()); + EXPECT_TRUE(lines[0].file == files[0]); +} + +TEST(Filenames, Relative) { + Module m("name", "os", "architecture", "id"); + vector lines; + DwarfLineToModule h(&m, "/", &lines); + + h.DefineDir("directory1", 1); + h.DefineFile("relative", 1, 1, 0, 0); + + h.AddLine(1, 1, 1, 0, 0); + + vector files; + m.GetFiles(&files); + ASSERT_EQ(1U, files.size()); + EXPECT_STREQ("/directory1/relative", files[0]->name.c_str()); + ASSERT_EQ(1U, lines.size()); + EXPECT_TRUE(lines[0].file == files[0]); +} + +TEST(Filenames, StrangeFile) { + Module m("name", "os", "architecture", "id"); + vector lines; + DwarfLineToModule h(&m, "/", &lines); + + h.DefineDir("directory1", 1); + h.DefineFile("", 1, 1, 0, 0); + h.AddLine(1, 1, 1, 0, 0); + + ASSERT_EQ(1U, lines.size()); + EXPECT_STREQ("/directory1/", lines[0].file->name.c_str()); +} + +TEST(Filenames, StrangeDirectory) { + Module m("name", "os", "architecture", "id"); + vector lines; + DwarfLineToModule h(&m, "/", &lines); + + h.DefineDir("", 1); + h.DefineFile("file1", 1, 1, 0, 0); + h.AddLine(1, 1, 1, 0, 0); + + ASSERT_EQ(1U, lines.size()); + EXPECT_STREQ("/file1", lines[0].file->name.c_str()); +} + +TEST(Filenames, StrangeDirectoryAndFile) { + Module m("name", "os", "architecture", "id"); + vector lines; + DwarfLineToModule h(&m, "/", &lines); + + h.DefineDir("", 1); + h.DefineFile("", 1, 1, 0, 0); + h.AddLine(1, 1, 1, 0, 0); + + ASSERT_EQ(1U, lines.size()); + EXPECT_STREQ("/", lines[0].file->name.c_str()); +} + +// We should use the compilation directory when encountering a file for +// directory number zero. +TEST(Filenames, DirectoryZeroFileIsRelativeToCompilationDir) { + Module m("name", "os", "architecture", "id"); + vector lines; + DwarfLineToModule h(&m, "src/build", &lines); + + h.DefineDir("Dir", 1); + h.DefineFile("File", 1, 0, 0, 0); + + h.AddLine(1, 1, 1, 0, 0); + + ASSERT_EQ(1U, lines.size()); + EXPECT_STREQ("src/build/File", lines[0].file->name.c_str()); +} + +// We should treat non-absolute directories as relative to the compilation +// directory. +TEST(Filenames, IncludeDirectoryRelativeToDirectoryZero) { + Module m("name", "os", "architecture", "id"); + vector lines; + DwarfLineToModule h(&m, "src/build", &lines); + + h.DefineDir("Dir", 1); + h.DefineFile("File", 1, 1, 0, 0); + + h.AddLine(1, 1, 1, 0, 0); + + ASSERT_EQ(1U, lines.size()); + EXPECT_STREQ("src/build/Dir/File", lines[0].file->name.c_str()); +} + +// We should treat absolute directories as absolute, and not relative to +// the compilation dir. +TEST(Filenames, IncludeDirectoryAbsolute) { + Module m("name", "os", "architecture", "id"); + vector lines; + DwarfLineToModule h(&m, "src/build", &lines); + + h.DefineDir("/Dir", 1); + h.DefineFile("File", 1, 1, 0, 0); + + h.AddLine(1, 1, 1, 0, 0); + + ASSERT_EQ(1U, lines.size()); + EXPECT_STREQ("/Dir/File", lines[0].file->name.c_str()); +} + +// We should silently ignore attempts to define directory number zero, +// since that is always the compilation directory. +TEST(ModuleErrors, DirectoryZero) { + Module m("name", "os", "architecture", "id"); + vector lines; + DwarfLineToModule h(&m, "/", &lines); + + h.DefineDir("directory0", 0); // should be ignored + h.DefineFile("relative", 1, 0, 0, 0); + + h.AddLine(1, 1, 1, 0, 0); + + ASSERT_EQ(1U, lines.size()); + EXPECT_STREQ("/relative", lines[0].file->name.c_str()); +} + +// We should refuse to add lines with bogus file numbers. We should +// produce only one warning, however. +TEST(ModuleErrors, BadFileNumber) { + Module m("name", "os", "architecture", "id"); + vector lines; + DwarfLineToModule h(&m, "/", &lines); + + h.DefineFile("relative", 1, 0, 0, 0); + h.AddLine(1, 1, 2, 0, 0); // bad file number + h.AddLine(2, 1, 2, 0, 0); // bad file number (no duplicate warning) + + EXPECT_EQ(0U, lines.size()); +} + +// We should treat files with bogus directory numbers as relative to +// the compilation unit. +TEST(ModuleErrors, BadDirectoryNumber) { + Module m("name", "os", "architecture", "id"); + vector lines; + DwarfLineToModule h(&m, "/", &lines); + + h.DefineDir("directory1", 1); + h.DefineFile("baddirnumber1", 1, 2, 0, 0); // bad directory number + h.DefineFile("baddirnumber2", 2, 2, 0, 0); // bad dir number (no warning) + h.AddLine(1, 1, 1, 0, 0); + + ASSERT_EQ(1U, lines.size()); + EXPECT_STREQ("baddirnumber1", lines[0].file->name.c_str()); +} + +// We promise not to report empty lines. +TEST(ModuleErrors, EmptyLine) { + Module m("name", "os", "architecture", "id"); + vector lines; + DwarfLineToModule h(&m, "/", &lines); + + h.DefineFile("filename1", 1, 0, 0, 0); + h.AddLine(1, 0, 1, 0, 0); + + ASSERT_EQ(0U, lines.size()); +} + +// We are supposed to clip lines that extend beyond the end of the +// address space. +TEST(ModuleErrors, BigLine) { + Module m("name", "os", "architecture", "id"); + vector lines; + DwarfLineToModule h(&m, "/", &lines); + + h.DefineFile("filename1", 1, 0, 0, 0); + h.AddLine(0xffffffffffffffffULL, 2, 1, 0, 0); + + ASSERT_EQ(1U, lines.size()); + EXPECT_EQ(1U, lines[0].size); +} + +// The 'Omitted' tests verify that we correctly omit line information +// for code in sections that the linker has dropped. See "GNU +// toolchain omitted sections support" at the top of the +// DwarfLineToModule class. + +TEST(Omitted, DroppedThenGood) { + Module m("name", "os", "architecture", "id"); + vector lines; + DwarfLineToModule h(&m, "/", &lines); + + h.DefineFile("filename1", 1, 0, 0, 0); + h.AddLine(0, 10, 1, 83816211, 0); // should be omitted + h.AddLine(20, 10, 1, 13059195, 0); // should be recorded + + ASSERT_EQ(1U, lines.size()); + EXPECT_EQ(13059195, lines[0].number); +} + +TEST(Omitted, GoodThenDropped) { + Module m("name", "os", "architecture", "id"); + vector lines; + DwarfLineToModule h(&m, "/", &lines); + + h.DefineFile("filename1", 1, 0, 0, 0); + h.AddLine(0x9dd6a372, 10, 1, 41454594, 0); // should be recorded + h.AddLine(0, 10, 1, 44793413, 0); // should be omitted + + ASSERT_EQ(1U, lines.size()); + EXPECT_EQ(41454594, lines[0].number); +} + +TEST(Omitted, Mix1) { + Module m("name", "os", "architecture", "id"); + vector lines; + DwarfLineToModule h(&m, "/", &lines); + + h.DefineFile("filename1", 1, 0, 0, 0); + h.AddLine(0x679ed72f, 10, 1, 58932642, 0); // should be recorded + h.AddLine(0xdfb5a72d, 10, 1, 39847385, 0); // should be recorded + h.AddLine(0, 0x78, 1, 23053829, 0); // should be omitted + h.AddLine(0x78, 0x6a, 1, 65317783, 0); // should be omitted + h.AddLine(0x78 + 0x6a, 0x2a, 1, 77601423, 0); // should be omitted + h.AddLine(0x9fe0cea5, 10, 1, 91806582, 0); // should be recorded + h.AddLine(0x7e41a109, 10, 1, 56169221, 0); // should be recorded + + ASSERT_EQ(4U, lines.size()); + EXPECT_EQ(58932642, lines[0].number); + EXPECT_EQ(39847385, lines[1].number); + EXPECT_EQ(91806582, lines[2].number); + EXPECT_EQ(56169221, lines[3].number); +} + +TEST(Omitted, Mix2) { + Module m("name", "os", "architecture", "id"); + vector lines; + DwarfLineToModule h(&m, "/", &lines); + + h.DefineFile("filename1", 1, 0, 0, 0); + h.AddLine(0, 0xf2, 1, 58802211, 0); // should be omitted + h.AddLine(0xf2, 0xb9, 1, 78958222, 0); // should be omitted + h.AddLine(0xf2 + 0xb9, 0xf7, 1, 64861892, 0); // should be omitted + h.AddLine(0x4e4d271e, 9, 1, 67355743, 0); // should be recorded + h.AddLine(0xdfb5a72d, 30, 1, 23365776, 0); // should be recorded + h.AddLine(0, 0x64, 1, 76196762, 0); // should be omitted + h.AddLine(0x64, 0x33, 1, 71066611, 0); // should be omitted + h.AddLine(0x64 + 0x33, 0xe3, 1, 61749337, 0); // should be omitted + + ASSERT_EQ(2U, lines.size()); + EXPECT_EQ(67355743, lines[0].number); + EXPECT_EQ(23365776, lines[1].number); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/language.cc b/TMessagesProj/jni/third_party/breakpad/src/common/language.cc new file mode 100644 index 0000000000..c2fd81f644 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/language.cc @@ -0,0 +1,83 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// language.cc: Subclasses and singletons for google_breakpad::Language. +// See language.h for details. + +#include "common/language.h" + +namespace google_breakpad { + +// C++ language-specific operations. +class CPPLanguage: public Language { + public: + CPPLanguage() {} + string MakeQualifiedName(const string &parent_name, + const string &name) const { + if (parent_name.empty()) + return name; + else + return parent_name + "::" + name; + } +}; + +CPPLanguage CPPLanguageSingleton; + +// Java language-specific operations. +class JavaLanguage: public Language { + public: + string MakeQualifiedName(const string &parent_name, + const string &name) const { + if (parent_name.empty()) + return name; + else + return parent_name + "." + name; + } +}; + +JavaLanguage JavaLanguageSingleton; + +// Assembler language-specific operations. +class AssemblerLanguage: public Language { + bool HasFunctions() const { return false; } + string MakeQualifiedName(const string &parent_name, + const string &name) const { + return name; + } +}; + +AssemblerLanguage AssemblerLanguageSingleton; + +const Language * const Language::CPlusPlus = &CPPLanguageSingleton; +const Language * const Language::Java = &JavaLanguageSingleton; +const Language * const Language::Assembler = &AssemblerLanguageSingleton; + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/language.h b/TMessagesProj/jni/third_party/breakpad/src/common/language.h new file mode 100644 index 0000000000..bbe303347a --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/language.h @@ -0,0 +1,88 @@ +// -*- mode: c++ -*- + +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// language.h: Define google_breakpad::Language. Instances of +// subclasses of this class provide language-appropriate operations +// for the Breakpad symbol dumper. + +#ifndef COMMON_LINUX_LANGUAGE_H__ +#define COMMON_LINUX_LANGUAGE_H__ + +#include + +#include "common/using_std_string.h" + +namespace google_breakpad { + +// An abstract base class for language-specific operations. We choose +// an instance of a subclass of this when we find the CU's language. +// This class's definitions are appropriate for CUs with no specified +// language. +class Language { + public: + // A base class destructor should be either public and virtual, + // or protected and nonvirtual. + virtual ~Language() {} + + // Return true if this language has functions to which we can assign + // line numbers. (Debugging info for assembly language, for example, + // can have source location information, but does not have functions + // recorded using DW_TAG_subprogram DIEs.) + virtual bool HasFunctions() const { return true; } + + // Construct a fully-qualified, language-appropriate form of NAME, + // given that PARENT_NAME is the name of the construct enclosing + // NAME. If PARENT_NAME is the empty string, then NAME is a + // top-level name. + // + // This API sort of assumes that a fully-qualified name is always + // some simple textual composition of the unqualified name and its + // parent's name, and that we don't need to know anything else about + // the parent or the child (say, their DIEs' tags) to do the job. + // This is true for the languages we support at the moment, and + // keeps things concrete. Perhaps a more refined operation would + // take into account the parent and child DIE types, allow languages + // to use their own data type for complex parent names, etc. But if + // C++ doesn't need all that, who would? + virtual string MakeQualifiedName (const string &parent_name, + const string &name) const = 0; + + // Instances for specific languages. + static const Language * const CPlusPlus, + * const Java, + * const Assembler; +}; + +} // namespace google_breakpad + +#endif // COMMON_LINUX_LANGUAGE_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/crc32.cc b/TMessagesProj/jni/third_party/breakpad/src/common/linux/crc32.cc new file mode 100644 index 0000000000..8df636ce4d --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/crc32.cc @@ -0,0 +1,70 @@ +// Copyright 2014 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#include "common/linux/crc32.h" + +namespace google_breakpad { + +// This implementation is based on the sample implementation in RFC 1952. + +// CRC32 polynomial, in reversed form. +// See RFC 1952, or http://en.wikipedia.org/wiki/Cyclic_redundancy_check +static const uint32_t kCrc32Polynomial = 0xEDB88320; +static uint32_t kCrc32Table[256] = { 0 }; + +#define arraysize(f) (sizeof(f) / sizeof(*f)) + +static void EnsureCrc32TableInited() { + if (kCrc32Table[arraysize(kCrc32Table) - 1]) + return; // already inited + for (uint32_t i = 0; i < arraysize(kCrc32Table); ++i) { + uint32_t c = i; + for (size_t j = 0; j < 8; ++j) { + if (c & 1) { + c = kCrc32Polynomial ^ (c >> 1); + } else { + c >>= 1; + } + } + kCrc32Table[i] = c; + } +} + +uint32_t UpdateCrc32(uint32_t start, const void* buf, size_t len) { + EnsureCrc32TableInited(); + + uint32_t c = start ^ 0xFFFFFFFF; + const uint8_t* u = static_cast(buf); + for (size_t i = 0; i < len; ++i) { + c = kCrc32Table[(c ^ u[i]) & 0xFF] ^ (c >> 8); + } + return c ^ 0xFFFFFFFF; +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/crc32.h b/TMessagesProj/jni/third_party/breakpad/src/common/linux/crc32.h new file mode 100644 index 0000000000..e3d9db92be --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/crc32.h @@ -0,0 +1,53 @@ +// Copyright 2014 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef COMMON_LINUX_CRC32_H_ +#define COMMON_LINUX_CRC32_H_ + +#include + +#include + +namespace google_breakpad { + +// Updates a CRC32 checksum with |len| bytes from |buf|. |initial| holds the +// checksum result from the previous update; for the first call, it should be 0. +uint32_t UpdateCrc32(uint32_t initial, const void* buf, size_t len); + +// Computes a CRC32 checksum using |len| bytes from |buf|. +inline uint32_t ComputeCrc32(const void* buf, size_t len) { + return UpdateCrc32(0, buf, len); +} +inline uint32_t ComputeCrc32(const std::string& str) { + return ComputeCrc32(str.c_str(), str.size()); +} + +} // namespace google_breakpad + +#endif // COMMON_LINUX_CRC32_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/dump_symbols.cc b/TMessagesProj/jni/third_party/breakpad/src/common/linux/dump_symbols.cc new file mode 100644 index 0000000000..0bcc18ab10 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/dump_symbols.cc @@ -0,0 +1,976 @@ +// Copyright (c) 2011 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Restructured in 2009 by: Jim Blandy + +// dump_symbols.cc: implement google_breakpad::WriteSymbolFile: +// Find all the debugging info in a file and dump it as a Breakpad symbol file. + +#include "common/linux/dump_symbols.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include + +#include "common/dwarf/bytereader-inl.h" +#include "common/dwarf/dwarf2diehandler.h" +#include "common/dwarf_cfi_to_module.h" +#include "common/dwarf_cu_to_module.h" +#include "common/dwarf_line_to_module.h" +#include "common/linux/crc32.h" +#include "common/linux/eintr_wrapper.h" +#include "common/linux/elfutils.h" +#include "common/linux/elfutils-inl.h" +#include "common/linux/elf_symbols_to_module.h" +#include "common/linux/file_id.h" +#include "common/module.h" +#include "common/scoped_ptr.h" +#ifndef NO_STABS_SUPPORT +#include "common/stabs_reader.h" +#include "common/stabs_to_module.h" +#endif +#include "common/using_std_string.h" + +// This namespace contains helper functions. +namespace { + +using google_breakpad::DumpOptions; +using google_breakpad::DwarfCFIToModule; +using google_breakpad::DwarfCUToModule; +using google_breakpad::DwarfLineToModule; +using google_breakpad::ElfClass; +using google_breakpad::ElfClass32; +using google_breakpad::ElfClass64; +using google_breakpad::FindElfSectionByName; +using google_breakpad::GetOffset; +using google_breakpad::IsValidElf; +using google_breakpad::Module; +#ifndef NO_STABS_SUPPORT +using google_breakpad::StabsToModule; +#endif +using google_breakpad::scoped_ptr; + +// Define AARCH64 ELF architecture if host machine does not include this define. +#ifndef EM_AARCH64 +#define EM_AARCH64 183 +#endif + +// +// FDWrapper +// +// Wrapper class to make sure opened file is closed. +// +class FDWrapper { + public: + explicit FDWrapper(int fd) : + fd_(fd) {} + ~FDWrapper() { + if (fd_ != -1) + close(fd_); + } + int get() { + return fd_; + } + int release() { + int fd = fd_; + fd_ = -1; + return fd; + } + private: + int fd_; +}; + +// +// MmapWrapper +// +// Wrapper class to make sure mapped regions are unmapped. +// +class MmapWrapper { + public: + MmapWrapper() : is_set_(false) {} + ~MmapWrapper() { + if (is_set_ && base_ != NULL) { + assert(size_ > 0); + munmap(base_, size_); + } + } + void set(void *mapped_address, size_t mapped_size) { + is_set_ = true; + base_ = mapped_address; + size_ = mapped_size; + } + void release() { + assert(is_set_); + is_set_ = false; + base_ = NULL; + size_ = 0; + } + + private: + bool is_set_; + void* base_; + size_t size_; +}; + +// Find the preferred loading address of the binary. +template +typename ElfClass::Addr GetLoadingAddress( + const typename ElfClass::Phdr* program_headers, + int nheader) { + typedef typename ElfClass::Phdr Phdr; + + // For non-PIC executables (e_type == ET_EXEC), the load address is + // the start address of the first PT_LOAD segment. (ELF requires + // the segments to be sorted by load address.) For PIC executables + // and dynamic libraries (e_type == ET_DYN), this address will + // normally be zero. + for (int i = 0; i < nheader; ++i) { + const Phdr& header = program_headers[i]; + if (header.p_type == PT_LOAD) + return header.p_vaddr; + } + return 0; +} + +#ifndef NO_STABS_SUPPORT +template +bool LoadStabs(const typename ElfClass::Ehdr* elf_header, + const typename ElfClass::Shdr* stab_section, + const typename ElfClass::Shdr* stabstr_section, + const bool big_endian, + Module* module) { + // A callback object to handle data from the STABS reader. + StabsToModule handler(module); + // Find the addresses of the STABS data, and create a STABS reader object. + // On Linux, STABS entries always have 32-bit values, regardless of the + // address size of the architecture whose code they're describing, and + // the strings are always "unitized". + const uint8_t* stabs = + GetOffset(elf_header, stab_section->sh_offset); + const uint8_t* stabstr = + GetOffset(elf_header, stabstr_section->sh_offset); + google_breakpad::StabsReader reader(stabs, stab_section->sh_size, + stabstr, stabstr_section->sh_size, + big_endian, 4, true, &handler); + // Read the STABS data, and do post-processing. + if (!reader.Process()) + return false; + handler.Finalize(); + return true; +} +#endif // NO_STABS_SUPPORT + +// A line-to-module loader that accepts line number info parsed by +// dwarf2reader::LineInfo and populates a Module and a line vector +// with the results. +class DumperLineToModule: public DwarfCUToModule::LineToModuleHandler { + public: + // Create a line-to-module converter using BYTE_READER. + explicit DumperLineToModule(dwarf2reader::ByteReader *byte_reader) + : byte_reader_(byte_reader) { } + void StartCompilationUnit(const string& compilation_dir) { + compilation_dir_ = compilation_dir; + } + void ReadProgram(const char* program, uint64 length, + Module* module, std::vector* lines) { + DwarfLineToModule handler(module, compilation_dir_, lines); + dwarf2reader::LineInfo parser(program, length, byte_reader_, &handler); + parser.Start(); + } + private: + string compilation_dir_; + dwarf2reader::ByteReader *byte_reader_; +}; + +template +bool LoadDwarf(const string& dwarf_filename, + const typename ElfClass::Ehdr* elf_header, + const bool big_endian, + bool handle_inter_cu_refs, + Module* module) { + typedef typename ElfClass::Shdr Shdr; + + const dwarf2reader::Endianness endianness = big_endian ? + dwarf2reader::ENDIANNESS_BIG : dwarf2reader::ENDIANNESS_LITTLE; + dwarf2reader::ByteReader byte_reader(endianness); + + // Construct a context for this file. + DwarfCUToModule::FileContext file_context(dwarf_filename, + module, + handle_inter_cu_refs); + + // Build a map of the ELF file's sections. + const Shdr* sections = + GetOffset(elf_header, elf_header->e_shoff); + int num_sections = elf_header->e_shnum; + const Shdr* section_names = sections + elf_header->e_shstrndx; + for (int i = 0; i < num_sections; i++) { + const Shdr* section = §ions[i]; + string name = GetOffset(elf_header, + section_names->sh_offset) + + section->sh_name; + const char* contents = GetOffset(elf_header, + section->sh_offset); + file_context.AddSectionToSectionMap(name, contents, section->sh_size); + } + + // Parse all the compilation units in the .debug_info section. + DumperLineToModule line_to_module(&byte_reader); + dwarf2reader::SectionMap::const_iterator debug_info_entry = + file_context.section_map().find(".debug_info"); + assert(debug_info_entry != file_context.section_map().end()); + const std::pair& debug_info_section = + debug_info_entry->second; + // This should never have been called if the file doesn't have a + // .debug_info section. + assert(debug_info_section.first); + uint64 debug_info_length = debug_info_section.second; + for (uint64 offset = 0; offset < debug_info_length;) { + // Make a handler for the root DIE that populates MODULE with the + // data that was found. + DwarfCUToModule::WarningReporter reporter(dwarf_filename, offset); + DwarfCUToModule root_handler(&file_context, &line_to_module, &reporter); + // Make a Dwarf2Handler that drives the DIEHandler. + dwarf2reader::DIEDispatcher die_dispatcher(&root_handler); + // Make a DWARF parser for the compilation unit at OFFSET. + dwarf2reader::CompilationUnit reader(file_context.section_map(), + offset, + &byte_reader, + &die_dispatcher); + // Process the entire compilation unit; get the offset of the next. + offset += reader.Start(); + } + return true; +} + +// Fill REGISTER_NAMES with the register names appropriate to the +// machine architecture given in HEADER, indexed by the register +// numbers used in DWARF call frame information. Return true on +// success, or false if HEADER's machine architecture is not +// supported. +template +bool DwarfCFIRegisterNames(const typename ElfClass::Ehdr* elf_header, + std::vector* register_names) { + switch (elf_header->e_machine) { + case EM_386: + *register_names = DwarfCFIToModule::RegisterNames::I386(); + return true; + case EM_ARM: + *register_names = DwarfCFIToModule::RegisterNames::ARM(); + return true; + case EM_AARCH64: + *register_names = DwarfCFIToModule::RegisterNames::ARM64(); + return true; + case EM_MIPS: + *register_names = DwarfCFIToModule::RegisterNames::MIPS(); + return true; + case EM_X86_64: + *register_names = DwarfCFIToModule::RegisterNames::X86_64(); + return true; + default: + return false; + } +} + +template +bool LoadDwarfCFI(const string& dwarf_filename, + const typename ElfClass::Ehdr* elf_header, + const char* section_name, + const typename ElfClass::Shdr* section, + const bool eh_frame, + const typename ElfClass::Shdr* got_section, + const typename ElfClass::Shdr* text_section, + const bool big_endian, + Module* module) { + // Find the appropriate set of register names for this file's + // architecture. + std::vector register_names; + if (!DwarfCFIRegisterNames(elf_header, ®ister_names)) { + fprintf(stderr, "%s: unrecognized ELF machine architecture '%d';" + " cannot convert DWARF call frame information\n", + dwarf_filename.c_str(), elf_header->e_machine); + return false; + } + + const dwarf2reader::Endianness endianness = big_endian ? + dwarf2reader::ENDIANNESS_BIG : dwarf2reader::ENDIANNESS_LITTLE; + + // Find the call frame information and its size. + const char* cfi = + GetOffset(elf_header, section->sh_offset); + size_t cfi_size = section->sh_size; + + // Plug together the parser, handler, and their entourages. + DwarfCFIToModule::Reporter module_reporter(dwarf_filename, section_name); + DwarfCFIToModule handler(module, register_names, &module_reporter); + dwarf2reader::ByteReader byte_reader(endianness); + + byte_reader.SetAddressSize(ElfClass::kAddrSize); + + // Provide the base addresses for .eh_frame encoded pointers, if + // possible. + byte_reader.SetCFIDataBase(section->sh_addr, cfi); + if (got_section) + byte_reader.SetDataBase(got_section->sh_addr); + if (text_section) + byte_reader.SetTextBase(text_section->sh_addr); + + dwarf2reader::CallFrameInfo::Reporter dwarf_reporter(dwarf_filename, + section_name); + dwarf2reader::CallFrameInfo parser(cfi, cfi_size, + &byte_reader, &handler, &dwarf_reporter, + eh_frame); + parser.Start(); + return true; +} + +bool LoadELF(const string& obj_file, MmapWrapper* map_wrapper, + void** elf_header) { + int obj_fd = open(obj_file.c_str(), O_RDONLY); + if (obj_fd < 0) { + fprintf(stderr, "Failed to open ELF file '%s': %s\n", + obj_file.c_str(), strerror(errno)); + return false; + } + FDWrapper obj_fd_wrapper(obj_fd); + struct stat st; + if (fstat(obj_fd, &st) != 0 && st.st_size <= 0) { + fprintf(stderr, "Unable to fstat ELF file '%s': %s\n", + obj_file.c_str(), strerror(errno)); + return false; + } + void* obj_base = mmap(NULL, st.st_size, + PROT_READ | PROT_WRITE, MAP_PRIVATE, obj_fd, 0); + if (obj_base == MAP_FAILED) { + fprintf(stderr, "Failed to mmap ELF file '%s': %s\n", + obj_file.c_str(), strerror(errno)); + return false; + } + map_wrapper->set(obj_base, st.st_size); + *elf_header = obj_base; + if (!IsValidElf(*elf_header)) { + fprintf(stderr, "Not a valid ELF file: %s\n", obj_file.c_str()); + return false; + } + return true; +} + +// Get the endianness of ELF_HEADER. If it's invalid, return false. +template +bool ElfEndianness(const typename ElfClass::Ehdr* elf_header, + bool* big_endian) { + if (elf_header->e_ident[EI_DATA] == ELFDATA2LSB) { + *big_endian = false; + return true; + } + if (elf_header->e_ident[EI_DATA] == ELFDATA2MSB) { + *big_endian = true; + return true; + } + + fprintf(stderr, "bad data encoding in ELF header: %d\n", + elf_header->e_ident[EI_DATA]); + return false; +} + +// Given |left_abspath|, find the absolute path for |right_path| and see if the +// two absolute paths are the same. +bool IsSameFile(const char* left_abspath, const string& right_path) { + char right_abspath[PATH_MAX]; + if (!realpath(right_path.c_str(), right_abspath)) + return false; + return strcmp(left_abspath, right_abspath) == 0; +} + +// Read the .gnu_debuglink and get the debug file name. If anything goes +// wrong, return an empty string. +string ReadDebugLink(const char* debuglink, + const size_t debuglink_size, + const bool big_endian, + const string& obj_file, + const std::vector& debug_dirs) { + size_t debuglink_len = strlen(debuglink) + 5; // Include '\0' + CRC32. + debuglink_len = 4 * ((debuglink_len + 3) / 4); // Round up to 4 bytes. + + // Sanity check. + if (debuglink_len != debuglink_size) { + fprintf(stderr, "Mismatched .gnu_debuglink string / section size: " + "%zx %zx\n", debuglink_len, debuglink_size); + return string(); + } + + char obj_file_abspath[PATH_MAX]; + if (!realpath(obj_file.c_str(), obj_file_abspath)) { + fprintf(stderr, "Cannot resolve absolute path for %s\n", obj_file.c_str()); + return string(); + } + + std::vector searched_paths; + string debuglink_path; + std::vector::const_iterator it; + for (it = debug_dirs.begin(); it < debug_dirs.end(); ++it) { + const string& debug_dir = *it; + debuglink_path = debug_dir + "/" + debuglink; + + // There is the annoying case of /path/to/foo.so having foo.so as the + // debug link file name. Thus this may end up opening /path/to/foo.so again, + // and there is a small chance of the two files having the same CRC. + if (IsSameFile(obj_file_abspath, debuglink_path)) + continue; + + searched_paths.push_back(debug_dir); + int debuglink_fd = open(debuglink_path.c_str(), O_RDONLY); + if (debuglink_fd < 0) + continue; + + FDWrapper debuglink_fd_wrapper(debuglink_fd); + + // The CRC is the last 4 bytes in |debuglink|. + const dwarf2reader::Endianness endianness = big_endian ? + dwarf2reader::ENDIANNESS_BIG : dwarf2reader::ENDIANNESS_LITTLE; + dwarf2reader::ByteReader byte_reader(endianness); + uint32_t expected_crc = + byte_reader.ReadFourBytes(&debuglink[debuglink_size - 4]); + + uint32_t actual_crc = 0; + while (true) { + const size_t kReadSize = 4096; + char buf[kReadSize]; + ssize_t bytes_read = HANDLE_EINTR(read(debuglink_fd, &buf, kReadSize)); + if (bytes_read < 0) { + fprintf(stderr, "Error reading debug ELF file %s.\n", + debuglink_path.c_str()); + return string(); + } + if (bytes_read == 0) + break; + actual_crc = google_breakpad::UpdateCrc32(actual_crc, buf, bytes_read); + } + if (actual_crc != expected_crc) { + fprintf(stderr, "Error reading debug ELF file - CRC32 mismatch: %s\n", + debuglink_path.c_str()); + continue; + } + + // Found debug file. + return debuglink_path; + } + + // Not found case. + fprintf(stderr, "Failed to find debug ELF file for '%s' after trying:\n", + obj_file.c_str()); + for (it = searched_paths.begin(); it < searched_paths.end(); ++it) { + const string& debug_dir = *it; + fprintf(stderr, " %s/%s\n", debug_dir.c_str(), debuglink); + } + return string(); +} + +// +// LoadSymbolsInfo +// +// Holds the state between the two calls to LoadSymbols() in case it's necessary +// to follow the .gnu_debuglink section and load debug information from a +// different file. +// +template +class LoadSymbolsInfo { + public: + typedef typename ElfClass::Addr Addr; + + explicit LoadSymbolsInfo(const std::vector& dbg_dirs) : + debug_dirs_(dbg_dirs), + has_loading_addr_(false) {} + + // Keeps track of which sections have been loaded so sections don't + // accidentally get loaded twice from two different files. + void LoadedSection(const string §ion) { + if (loaded_sections_.count(section) == 0) { + loaded_sections_.insert(section); + } else { + fprintf(stderr, "Section %s has already been loaded.\n", + section.c_str()); + } + } + + // The ELF file and linked debug file are expected to have the same preferred + // loading address. + void set_loading_addr(Addr addr, const string &filename) { + if (!has_loading_addr_) { + loading_addr_ = addr; + loaded_file_ = filename; + return; + } + + if (addr != loading_addr_) { + fprintf(stderr, + "ELF file '%s' and debug ELF file '%s' " + "have different load addresses.\n", + loaded_file_.c_str(), filename.c_str()); + assert(false); + } + } + + // Setters and getters + const std::vector& debug_dirs() const { + return debug_dirs_; + } + + string debuglink_file() const { + return debuglink_file_; + } + void set_debuglink_file(string file) { + debuglink_file_ = file; + } + + private: + const std::vector& debug_dirs_; // Directories in which to + // search for the debug ELF file. + + string debuglink_file_; // Full path to the debug ELF file. + + bool has_loading_addr_; // Indicate if LOADING_ADDR_ is valid. + + Addr loading_addr_; // Saves the preferred loading address from the + // first call to LoadSymbols(). + + string loaded_file_; // Name of the file loaded from the first call to + // LoadSymbols(). + + std::set loaded_sections_; // Tracks the Loaded ELF sections + // between calls to LoadSymbols(). +}; + +template +bool LoadSymbols(const string& obj_file, + const bool big_endian, + const typename ElfClass::Ehdr* elf_header, + const bool read_gnu_debug_link, + LoadSymbolsInfo* info, + const DumpOptions& options, + Module* module) { + typedef typename ElfClass::Addr Addr; + typedef typename ElfClass::Phdr Phdr; + typedef typename ElfClass::Shdr Shdr; + typedef typename ElfClass::Word Word; + + Addr loading_addr = GetLoadingAddress( + GetOffset(elf_header, elf_header->e_phoff), + elf_header->e_phnum); + module->SetLoadAddress(loading_addr); + info->set_loading_addr(loading_addr, obj_file); + + Word debug_section_type = + elf_header->e_machine == EM_MIPS ? SHT_MIPS_DWARF : SHT_PROGBITS; + const Shdr* sections = + GetOffset(elf_header, elf_header->e_shoff); + const Shdr* section_names = sections + elf_header->e_shstrndx; + const char* names = + GetOffset(elf_header, section_names->sh_offset); + const char *names_end = names + section_names->sh_size; + bool found_debug_info_section = false; + bool found_usable_info = false; + + if (options.symbol_data != ONLY_CFI) { +#ifndef NO_STABS_SUPPORT + // Look for STABS debugging information, and load it if present. + const Shdr* stab_section = + FindElfSectionByName(".stab", SHT_PROGBITS, + sections, names, names_end, + elf_header->e_shnum); + if (stab_section) { + const Shdr* stabstr_section = stab_section->sh_link + sections; + if (stabstr_section) { + found_debug_info_section = true; + found_usable_info = true; + info->LoadedSection(".stab"); + if (!LoadStabs(elf_header, stab_section, stabstr_section, + big_endian, module)) { + fprintf(stderr, "%s: \".stab\" section found, but failed to load" + " STABS debugging information\n", obj_file.c_str()); + } + } + } +#endif // NO_STABS_SUPPORT + + // Look for DWARF debugging information, and load it if present. + const Shdr* dwarf_section = + FindElfSectionByName(".debug_info", debug_section_type, + sections, names, names_end, + elf_header->e_shnum); + if (dwarf_section) { + found_debug_info_section = true; + found_usable_info = true; + info->LoadedSection(".debug_info"); + if (!LoadDwarf(obj_file, elf_header, big_endian, + options.handle_inter_cu_refs, module)) { + fprintf(stderr, "%s: \".debug_info\" section found, but failed to load " + "DWARF debugging information\n", obj_file.c_str()); + } + } + + // See if there are export symbols available. + const Shdr* dynsym_section = + FindElfSectionByName(".dynsym", SHT_DYNSYM, + sections, names, names_end, + elf_header->e_shnum); + const Shdr* dynstr_section = + FindElfSectionByName(".dynstr", SHT_STRTAB, + sections, names, names_end, + elf_header->e_shnum); + if (dynsym_section && dynstr_section) { + info->LoadedSection(".dynsym"); + + const uint8_t* dynsyms = + GetOffset(elf_header, + dynsym_section->sh_offset); + const uint8_t* dynstrs = + GetOffset(elf_header, + dynstr_section->sh_offset); + bool result = + ELFSymbolsToModule(dynsyms, + dynsym_section->sh_size, + dynstrs, + dynstr_section->sh_size, + big_endian, + ElfClass::kAddrSize, + module); + found_usable_info = found_usable_info || result; + } + } + + if (options.symbol_data != NO_CFI) { + // Dwarf Call Frame Information (CFI) is actually independent from + // the other DWARF debugging information, and can be used alone. + const Shdr* dwarf_cfi_section = + FindElfSectionByName(".debug_frame", debug_section_type, + sections, names, names_end, + elf_header->e_shnum); + if (dwarf_cfi_section) { + // Ignore the return value of this function; even without call frame + // information, the other debugging information could be perfectly + // useful. + info->LoadedSection(".debug_frame"); + bool result = + LoadDwarfCFI(obj_file, elf_header, ".debug_frame", + dwarf_cfi_section, false, 0, 0, big_endian, + module); + found_usable_info = found_usable_info || result; + } + + // Linux C++ exception handling information can also provide + // unwinding data. + const Shdr* eh_frame_section = + FindElfSectionByName(".eh_frame", SHT_PROGBITS, + sections, names, names_end, + elf_header->e_shnum); + if (eh_frame_section) { + // Pointers in .eh_frame data may be relative to the base addresses of + // certain sections. Provide those sections if present. + const Shdr* got_section = + FindElfSectionByName(".got", SHT_PROGBITS, + sections, names, names_end, + elf_header->e_shnum); + const Shdr* text_section = + FindElfSectionByName(".text", SHT_PROGBITS, + sections, names, names_end, + elf_header->e_shnum); + info->LoadedSection(".eh_frame"); + // As above, ignore the return value of this function. + bool result = + LoadDwarfCFI(obj_file, elf_header, ".eh_frame", + eh_frame_section, true, + got_section, text_section, big_endian, module); + found_usable_info = found_usable_info || result; + } + } + + if (!found_debug_info_section) { + fprintf(stderr, "%s: file contains no debugging information" + " (no \".stab\" or \".debug_info\" sections)\n", + obj_file.c_str()); + + // Failed, but maybe there's a .gnu_debuglink section? + if (read_gnu_debug_link) { + const Shdr* gnu_debuglink_section + = FindElfSectionByName(".gnu_debuglink", SHT_PROGBITS, + sections, names, + names_end, elf_header->e_shnum); + if (gnu_debuglink_section) { + if (!info->debug_dirs().empty()) { + const char* debuglink_contents = + GetOffset(elf_header, + gnu_debuglink_section->sh_offset); + string debuglink_file = + ReadDebugLink(debuglink_contents, + gnu_debuglink_section->sh_size, + big_endian, + obj_file, + info->debug_dirs()); + info->set_debuglink_file(debuglink_file); + } else { + fprintf(stderr, ".gnu_debuglink section found in '%s', " + "but no debug path specified.\n", obj_file.c_str()); + } + } else { + fprintf(stderr, "%s does not contain a .gnu_debuglink section.\n", + obj_file.c_str()); + } + } else { + // Return true if some usable information was found, since the caller + // doesn't want to use .gnu_debuglink. + return found_usable_info; + } + + // No debug info was found, let the user try again with .gnu_debuglink + // if present. + return false; + } + + return true; +} + +// Return the breakpad symbol file identifier for the architecture of +// ELF_HEADER. +template +const char* ElfArchitecture(const typename ElfClass::Ehdr* elf_header) { + typedef typename ElfClass::Half Half; + Half arch = elf_header->e_machine; + switch (arch) { + case EM_386: return "x86"; + case EM_ARM: return "arm"; + case EM_AARCH64: return "arm64"; + case EM_MIPS: return "mips"; + case EM_PPC64: return "ppc64"; + case EM_PPC: return "ppc"; + case EM_S390: return "s390"; + case EM_SPARC: return "sparc"; + case EM_SPARCV9: return "sparcv9"; + case EM_X86_64: return "x86_64"; + default: return NULL; + } +} + +// Format the Elf file identifier in IDENTIFIER as a UUID with the +// dashes removed. +string FormatIdentifier(unsigned char identifier[16]) { + char identifier_str[40]; + google_breakpad::FileID::ConvertIdentifierToString( + identifier, + identifier_str, + sizeof(identifier_str)); + string id_no_dash; + for (int i = 0; identifier_str[i] != '\0'; ++i) + if (identifier_str[i] != '-') + id_no_dash += identifier_str[i]; + // Add an extra "0" by the end. PDB files on Windows have an 'age' + // number appended to the end of the file identifier; this isn't + // really used or necessary on other platforms, but be consistent. + id_no_dash += '0'; + return id_no_dash; +} + +// Return the non-directory portion of FILENAME: the portion after the +// last slash, or the whole filename if there are no slashes. +string BaseFileName(const string &filename) { + // Lots of copies! basename's behavior is less than ideal. + char* c_filename = strdup(filename.c_str()); + string base = basename(c_filename); + free(c_filename); + return base; +} + +template +bool SanitizeDebugFile(const typename ElfClass::Ehdr* debug_elf_header, + const string& debuglink_file, + const string& obj_filename, + const char* obj_file_architecture, + const bool obj_file_is_big_endian) { + const char* debug_architecture = + ElfArchitecture(debug_elf_header); + if (!debug_architecture) { + fprintf(stderr, "%s: unrecognized ELF machine architecture: %d\n", + debuglink_file.c_str(), debug_elf_header->e_machine); + return false; + } + if (strcmp(obj_file_architecture, debug_architecture)) { + fprintf(stderr, "%s with ELF machine architecture %s does not match " + "%s with ELF architecture %s\n", + debuglink_file.c_str(), debug_architecture, + obj_filename.c_str(), obj_file_architecture); + return false; + } + bool debug_big_endian; + if (!ElfEndianness(debug_elf_header, &debug_big_endian)) + return false; + if (debug_big_endian != obj_file_is_big_endian) { + fprintf(stderr, "%s and %s does not match in endianness\n", + obj_filename.c_str(), debuglink_file.c_str()); + return false; + } + return true; +} + +template +bool ReadSymbolDataElfClass(const typename ElfClass::Ehdr* elf_header, + const string& obj_filename, + const std::vector& debug_dirs, + const DumpOptions& options, + Module** out_module) { + typedef typename ElfClass::Ehdr Ehdr; + typedef typename ElfClass::Shdr Shdr; + + *out_module = NULL; + + unsigned char identifier[16]; + if (!google_breakpad::FileID::ElfFileIdentifierFromMappedFile(elf_header, + identifier)) { + fprintf(stderr, "%s: unable to generate file identifier\n", + obj_filename.c_str()); + return false; + } + + const char *architecture = ElfArchitecture(elf_header); + if (!architecture) { + fprintf(stderr, "%s: unrecognized ELF machine architecture: %d\n", + obj_filename.c_str(), elf_header->e_machine); + return false; + } + + // Figure out what endianness this file is. + bool big_endian; + if (!ElfEndianness(elf_header, &big_endian)) + return false; + + string name = BaseFileName(obj_filename); + string os = "Linux"; + string id = FormatIdentifier(identifier); + + LoadSymbolsInfo info(debug_dirs); + scoped_ptr module(new Module(name, os, architecture, id)); + if (!LoadSymbols(obj_filename, big_endian, elf_header, + !debug_dirs.empty(), &info, + options, module.get())) { + const string debuglink_file = info.debuglink_file(); + if (debuglink_file.empty()) + return false; + + // Load debuglink ELF file. + fprintf(stderr, "Found debugging info in %s\n", debuglink_file.c_str()); + MmapWrapper debug_map_wrapper; + Ehdr* debug_elf_header = NULL; + if (!LoadELF(debuglink_file, &debug_map_wrapper, + reinterpret_cast(&debug_elf_header)) || + !SanitizeDebugFile(debug_elf_header, debuglink_file, + obj_filename, architecture, big_endian)) { + return false; + } + + if (!LoadSymbols(debuglink_file, big_endian, + debug_elf_header, false, &info, + options, module.get())) { + return false; + } + } + + *out_module = module.release(); + return true; +} + +} // namespace + +namespace google_breakpad { + +// Not explicitly exported, but not static so it can be used in unit tests. +bool ReadSymbolDataInternal(const uint8_t* obj_file, + const string& obj_filename, + const std::vector& debug_dirs, + const DumpOptions& options, + Module** module) { + if (!IsValidElf(obj_file)) { + fprintf(stderr, "Not a valid ELF file: %s\n", obj_filename.c_str()); + return false; + } + + int elfclass = ElfClass(obj_file); + if (elfclass == ELFCLASS32) { + return ReadSymbolDataElfClass( + reinterpret_cast(obj_file), obj_filename, debug_dirs, + options, module); + } + if (elfclass == ELFCLASS64) { + return ReadSymbolDataElfClass( + reinterpret_cast(obj_file), obj_filename, debug_dirs, + options, module); + } + + return false; +} + +bool WriteSymbolFile(const string &obj_file, + const std::vector& debug_dirs, + const DumpOptions& options, + std::ostream &sym_stream) { + Module* module; + if (!ReadSymbolData(obj_file, debug_dirs, options, &module)) + return false; + + bool result = module->Write(sym_stream, options.symbol_data); + delete module; + return result; +} + +bool ReadSymbolData(const string& obj_file, + const std::vector& debug_dirs, + const DumpOptions& options, + Module** module) { + MmapWrapper map_wrapper; + void* elf_header = NULL; + if (!LoadELF(obj_file, &map_wrapper, &elf_header)) + return false; + + return ReadSymbolDataInternal(reinterpret_cast(elf_header), + obj_file, debug_dirs, options, module); +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/dump_symbols.h b/TMessagesProj/jni/third_party/breakpad/src/common/linux/dump_symbols.h new file mode 100644 index 0000000000..636bb72f73 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/dump_symbols.h @@ -0,0 +1,80 @@ +// -*- mode: c++ -*- + +// Copyright (c) 2011, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// dump_symbols.h: Read debugging information from an ELF file, and write +// it out as a Breakpad symbol file. + +#ifndef COMMON_LINUX_DUMP_SYMBOLS_H__ +#define COMMON_LINUX_DUMP_SYMBOLS_H__ + +#include +#include +#include + +#include "common/symbol_data.h" +#include "common/using_std_string.h" + +namespace google_breakpad { + +class Module; + +struct DumpOptions { + DumpOptions(SymbolData symbol_data, bool handle_inter_cu_refs) + : symbol_data(symbol_data), + handle_inter_cu_refs(handle_inter_cu_refs) { + } + + SymbolData symbol_data; + bool handle_inter_cu_refs; +}; + +// Find all the debugging information in OBJ_FILE, an ELF executable +// or shared library, and write it to SYM_STREAM in the Breakpad symbol +// file format. +// If OBJ_FILE has been stripped but contains a .gnu_debuglink section, +// then look for the debug file in DEBUG_DIRS. +// SYMBOL_DATA allows limiting the type of symbol data written. +bool WriteSymbolFile(const string &obj_file, + const std::vector& debug_dirs, + const DumpOptions& options, + std::ostream &sym_stream); + +// As above, but simply return the debugging information in MODULE +// instead of writing it to a stream. The caller owns the resulting +// Module object and must delete it when finished. +bool ReadSymbolData(const string& obj_file, + const std::vector& debug_dirs, + const DumpOptions& options, + Module** module); + +} // namespace google_breakpad + +#endif // COMMON_LINUX_DUMP_SYMBOLS_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/dump_symbols_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/common/linux/dump_symbols_unittest.cc new file mode 100644 index 0000000000..3f86dbe6a3 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/dump_symbols_unittest.cc @@ -0,0 +1,172 @@ +// Copyright (c) 2011 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Ted Mielczarek + +// dump_symbols_unittest.cc: +// Unittests for google_breakpad::DumpSymbols + +#include +#include +#include + +#include +#include + +#include "breakpad_googletest_includes.h" +#include "common/linux/dump_symbols.h" +#include "common/linux/synth_elf.h" +#include "common/module.h" +#include "common/using_std_string.h" + +namespace google_breakpad { + +bool ReadSymbolDataInternal(const uint8_t* obj_file, + const string& obj_filename, + const std::vector& debug_dir, + const DumpOptions& options, + Module** module); + +using google_breakpad::synth_elf::ELF; +using google_breakpad::synth_elf::StringTable; +using google_breakpad::synth_elf::SymbolTable; +using google_breakpad::test_assembler::kLittleEndian; +using google_breakpad::test_assembler::Section; +using std::stringstream; +using std::vector; +using ::testing::Test; + +class DumpSymbols : public Test { + public: + void GetElfContents(ELF& elf) { + string contents; + ASSERT_TRUE(elf.GetContents(&contents)); + ASSERT_LT(0U, contents.size()); + + elfdata_v.clear(); + elfdata_v.insert(elfdata_v.begin(), contents.begin(), contents.end()); + elfdata = &elfdata_v[0]; + } + + vector elfdata_v; + uint8_t* elfdata; +}; + +TEST_F(DumpSymbols, Invalid) { + Elf32_Ehdr header; + memset(&header, 0, sizeof(header)); + Module* module; + DumpOptions options(ALL_SYMBOL_DATA, true); + EXPECT_FALSE(ReadSymbolDataInternal(reinterpret_cast(&header), + "foo", + vector(), + options, + &module)); +} + +TEST_F(DumpSymbols, SimplePublic32) { + ELF elf(EM_386, ELFCLASS32, kLittleEndian); + // Zero out text section for simplicity. + Section text(kLittleEndian); + text.Append(4096, 0); + elf.AddSection(".text", text, SHT_PROGBITS); + + // Add a public symbol. + StringTable table(kLittleEndian); + SymbolTable syms(kLittleEndian, 4, table); + syms.AddSymbol("superfunc", (uint32_t)0x1000, (uint32_t)0x10, + ELF32_ST_INFO(STB_GLOBAL, STT_FUNC), + SHN_UNDEF + 1); + int index = elf.AddSection(".dynstr", table, SHT_STRTAB); + elf.AddSection(".dynsym", syms, + SHT_DYNSYM, // type + SHF_ALLOC, // flags + 0, // addr + index, // link + sizeof(Elf32_Sym)); // entsize + + elf.Finish(); + GetElfContents(elf); + + Module* module; + DumpOptions options(ALL_SYMBOL_DATA, true); + EXPECT_TRUE(ReadSymbolDataInternal(elfdata, + "foo", + vector(), + options, + &module)); + + stringstream s; + module->Write(s, ALL_SYMBOL_DATA); + EXPECT_EQ("MODULE Linux x86 000000000000000000000000000000000 foo\n" + "PUBLIC 1000 0 superfunc\n", + s.str()); + delete module; +} + +TEST_F(DumpSymbols, SimplePublic64) { + ELF elf(EM_X86_64, ELFCLASS64, kLittleEndian); + // Zero out text section for simplicity. + Section text(kLittleEndian); + text.Append(4096, 0); + elf.AddSection(".text", text, SHT_PROGBITS); + + // Add a public symbol. + StringTable table(kLittleEndian); + SymbolTable syms(kLittleEndian, 8, table); + syms.AddSymbol("superfunc", (uint64_t)0x1000, (uint64_t)0x10, + ELF64_ST_INFO(STB_GLOBAL, STT_FUNC), + SHN_UNDEF + 1); + int index = elf.AddSection(".dynstr", table, SHT_STRTAB); + elf.AddSection(".dynsym", syms, + SHT_DYNSYM, // type + SHF_ALLOC, // flags + 0, // addr + index, // link + sizeof(Elf64_Sym)); // entsize + + elf.Finish(); + GetElfContents(elf); + + Module* module; + DumpOptions options(ALL_SYMBOL_DATA, true); + EXPECT_TRUE(ReadSymbolDataInternal(elfdata, + "foo", + vector(), + options, + &module)); + + stringstream s; + module->Write(s, ALL_SYMBOL_DATA); + EXPECT_EQ("MODULE Linux x86_64 000000000000000000000000000000000 foo\n" + "PUBLIC 1000 0 superfunc\n", + s.str()); +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/eintr_wrapper.h b/TMessagesProj/jni/third_party/breakpad/src/common/linux/eintr_wrapper.h new file mode 100644 index 0000000000..3f1d184817 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/eintr_wrapper.h @@ -0,0 +1,58 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef COMMON_LINUX_EINTR_WRAPPER_H_ +#define COMMON_LINUX_EINTR_WRAPPER_H_ + +#include + +// This provides a wrapper around system calls which may be interrupted by a +// signal and return EINTR. See man 7 signal. +// + +#define HANDLE_EINTR(x) ({ \ + __typeof__(x) eintr_wrapper_result; \ + do { \ + eintr_wrapper_result = (x); \ + } while (eintr_wrapper_result == -1 && errno == EINTR); \ + eintr_wrapper_result; \ +}) + +#define IGNORE_EINTR(x) ({ \ + __typeof__(x) eintr_wrapper_result; \ + do { \ + eintr_wrapper_result = (x); \ + if (eintr_wrapper_result == -1 && errno == EINTR) { \ + eintr_wrapper_result = 0; \ + } \ + } while (0); \ + eintr_wrapper_result; \ +}) + +#endif // COMMON_LINUX_EINTR_WRAPPER_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/elf_core_dump.cc b/TMessagesProj/jni/third_party/breakpad/src/common/linux/elf_core_dump.cc new file mode 100644 index 0000000000..0e7db7b1fa --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/elf_core_dump.cc @@ -0,0 +1,179 @@ +// Copyright (c) 2011, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// elf_core_dump.cc: Implement google_breakpad::ElfCoreDump. +// See elf_core_dump.h for details. + +#include "common/linux/elf_core_dump.h" + +#include +#include + +namespace google_breakpad { + +// Implementation of ElfCoreDump::Note. + +ElfCoreDump::Note::Note() {} + +ElfCoreDump::Note::Note(const MemoryRange& content) : content_(content) {} + +bool ElfCoreDump::Note::IsValid() const { + return GetHeader() != NULL; +} + +const ElfCoreDump::Nhdr* ElfCoreDump::Note::GetHeader() const { + return content_.GetData(0); +} + +ElfCoreDump::Word ElfCoreDump::Note::GetType() const { + const Nhdr* header = GetHeader(); + // 0 is not being used as a NOTE type. + return header ? header->n_type : 0; +} + +MemoryRange ElfCoreDump::Note::GetName() const { + const Nhdr* header = GetHeader(); + if (header) { + return content_.Subrange(sizeof(Nhdr), header->n_namesz); + } + return MemoryRange(); +} + +MemoryRange ElfCoreDump::Note::GetDescription() const { + const Nhdr* header = GetHeader(); + if (header) { + return content_.Subrange(AlignedSize(sizeof(Nhdr) + header->n_namesz), + header->n_descsz); + } + return MemoryRange(); +} + +ElfCoreDump::Note ElfCoreDump::Note::GetNextNote() const { + MemoryRange next_content; + const Nhdr* header = GetHeader(); + if (header) { + size_t next_offset = AlignedSize(sizeof(Nhdr) + header->n_namesz); + next_offset = AlignedSize(next_offset + header->n_descsz); + next_content = + content_.Subrange(next_offset, content_.length() - next_offset); + } + return Note(next_content); +} + +// static +size_t ElfCoreDump::Note::AlignedSize(size_t size) { + size_t mask = sizeof(Word) - 1; + return (size + mask) & ~mask; +} + + +// Implementation of ElfCoreDump. + +ElfCoreDump::ElfCoreDump() {} + +ElfCoreDump::ElfCoreDump(const MemoryRange& content) + : content_(content) { +} + +void ElfCoreDump::SetContent(const MemoryRange& content) { + content_ = content; +} + +bool ElfCoreDump::IsValid() const { + const Ehdr* header = GetHeader(); + return (header && + header->e_ident[0] == ELFMAG0 && + header->e_ident[1] == ELFMAG1 && + header->e_ident[2] == ELFMAG2 && + header->e_ident[3] == ELFMAG3 && + header->e_ident[4] == kClass && + header->e_version == EV_CURRENT && + header->e_type == ET_CORE); +} + +const ElfCoreDump::Ehdr* ElfCoreDump::GetHeader() const { + return content_.GetData(0); +} + +const ElfCoreDump::Phdr* ElfCoreDump::GetProgramHeader(unsigned index) const { + const Ehdr* header = GetHeader(); + if (header) { + return reinterpret_cast(content_.GetArrayElement( + header->e_phoff, header->e_phentsize, index)); + } + return NULL; +} + +const ElfCoreDump::Phdr* ElfCoreDump::GetFirstProgramHeaderOfType( + Word type) const { + for (unsigned i = 0, n = GetProgramHeaderCount(); i < n; ++i) { + const Phdr* program = GetProgramHeader(i); + if (program->p_type == type) { + return program; + } + } + return NULL; +} + +unsigned ElfCoreDump::GetProgramHeaderCount() const { + const Ehdr* header = GetHeader(); + return header ? header->e_phnum : 0; +} + +bool ElfCoreDump::CopyData(void* buffer, Addr virtual_address, size_t length) { + for (unsigned i = 0, n = GetProgramHeaderCount(); i < n; ++i) { + const Phdr* program = GetProgramHeader(i); + if (program->p_type != PT_LOAD) + continue; + + size_t offset_in_segment = virtual_address - program->p_vaddr; + if (virtual_address >= program->p_vaddr && + offset_in_segment < program->p_filesz) { + const void* data = + content_.GetData(program->p_offset + offset_in_segment, length); + if (data) { + memcpy(buffer, data, length); + return true; + } + } + } + return false; +} + +ElfCoreDump::Note ElfCoreDump::GetFirstNote() const { + MemoryRange note_content; + const Phdr* program_header = GetFirstProgramHeaderOfType(PT_NOTE); + if (program_header) { + note_content = content_.Subrange(program_header->p_offset, + program_header->p_filesz); + } + return Note(note_content); +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/elf_core_dump.h b/TMessagesProj/jni/third_party/breakpad/src/common/linux/elf_core_dump.h new file mode 100644 index 0000000000..d03c7a88d3 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/elf_core_dump.h @@ -0,0 +1,148 @@ +// Copyright (c) 2011, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// elf_core_dump.h: Define the google_breakpad::ElfCoreDump class, which +// encapsulates an ELF core dump file mapped into memory. + +#ifndef COMMON_LINUX_ELF_CORE_DUMP_H_ +#define COMMON_LINUX_ELF_CORE_DUMP_H_ + +#include +#include +#include + +#include "common/memory_range.h" + +namespace google_breakpad { + +// A class encapsulating an ELF core dump file mapped into memory, which +// provides methods for accessing program headers and the note section. +class ElfCoreDump { + public: + // ELF types based on the value of __WORDSIZE. + typedef ElfW(Ehdr) Ehdr; + typedef ElfW(Nhdr) Nhdr; + typedef ElfW(Phdr) Phdr; + typedef ElfW(Word) Word; + typedef ElfW(Addr) Addr; +#if __WORDSIZE == 32 + static const int kClass = ELFCLASS32; +#elif __WORDSIZE == 64 + static const int kClass = ELFCLASS64; +#else +#error "Unsupported __WORDSIZE for ElfCoreDump." +#endif + + // A class encapsulating the note content in a core dump, which provides + // methods for accessing the name and description of a note. + class Note { + public: + Note(); + + // Constructor that takes the note content from |content|. + explicit Note(const MemoryRange& content); + + // Returns true if this note is valid, i,e. a note header is found in + // |content_|, or false otherwise. + bool IsValid() const; + + // Returns the note header, or NULL if no note header is found in + // |content_|. + const Nhdr* GetHeader() const; + + // Returns the note type, or 0 if no note header is found in |content_|. + Word GetType() const; + + // Returns a memory range covering the note name, or an empty range + // if no valid note name is found in |content_|. + MemoryRange GetName() const; + + // Returns a memory range covering the note description, or an empty + // range if no valid note description is found in |content_|. + MemoryRange GetDescription() const; + + // Returns the note following this note, or an empty note if no valid + // note is found after this note. + Note GetNextNote() const; + + private: + // Returns the size in bytes round up to the word alignment, specified + // for the note section, of a given size in bytes. + static size_t AlignedSize(size_t size); + + // Note content. + MemoryRange content_; + }; + + ElfCoreDump(); + + // Constructor that takes the core dump content from |content|. + explicit ElfCoreDump(const MemoryRange& content); + + // Sets the core dump content to |content|. + void SetContent(const MemoryRange& content); + + // Returns true if a valid ELF header in the core dump, or false otherwise. + bool IsValid() const; + + // Returns the ELF header in the core dump, or NULL if no ELF header + // is found in |content_|. + const Ehdr* GetHeader() const; + + // Returns the |index|-th program header in the core dump, or NULL if no + // ELF header is found in |content_| or |index| is out of bounds. + const Phdr* GetProgramHeader(unsigned index) const; + + // Returns the first program header of |type| in the core dump, or NULL if + // no ELF header is found in |content_| or no program header of |type| is + // found. + const Phdr* GetFirstProgramHeaderOfType(Word type) const; + + // Returns the number of program headers in the core dump, or 0 if no + // ELF header is found in |content_|. + unsigned GetProgramHeaderCount() const; + + // Copies |length| bytes of data starting at |virtual_address| in the core + // dump to |buffer|. |buffer| should be a valid pointer to a buffer of at + // least |length| bytes. Returns true if the data to be copied is found in + // the core dump, or false otherwise. + bool CopyData(void* buffer, Addr virtual_address, size_t length); + + // Returns the first note found in the note section of the core dump, or + // an empty note if no note is found. + Note GetFirstNote() const; + + private: + // Core dump content. + MemoryRange content_; +}; + +} // namespace google_breakpad + +#endif // COMMON_LINUX_ELF_CORE_DUMP_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/elf_core_dump_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/common/linux/elf_core_dump_unittest.cc new file mode 100644 index 0000000000..9b41dceee2 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/elf_core_dump_unittest.cc @@ -0,0 +1,256 @@ +// Copyright (c) 2011, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// elf_core_dump_unittest.cc: Unit tests for google_breakpad::ElfCoreDump. + +#include + +#include +#include + +#include "breakpad_googletest_includes.h" +#include "common/linux/elf_core_dump.h" +#include "common/linux/memory_mapped_file.h" +#include "common/tests/file_utils.h" +#include "common/linux/tests/crash_generator.h" +#include "common/using_std_string.h" + +using google_breakpad::AutoTempDir; +using google_breakpad::CrashGenerator; +using google_breakpad::ElfCoreDump; +using google_breakpad::MemoryMappedFile; +using google_breakpad::MemoryRange; +using google_breakpad::WriteFile; +using std::set; + +TEST(ElfCoreDumpTest, DefaultConstructor) { + ElfCoreDump core; + EXPECT_FALSE(core.IsValid()); + EXPECT_EQ(NULL, core.GetHeader()); + EXPECT_EQ(0U, core.GetProgramHeaderCount()); + EXPECT_EQ(NULL, core.GetProgramHeader(0)); + EXPECT_EQ(NULL, core.GetFirstProgramHeaderOfType(PT_LOAD)); + EXPECT_FALSE(core.GetFirstNote().IsValid()); +} + +TEST(ElfCoreDumpTest, TestElfHeader) { + ElfCoreDump::Ehdr header; + memset(&header, 0, sizeof(header)); + + AutoTempDir temp_dir; + string core_path = temp_dir.path() + "/core"; + const char* core_file = core_path.c_str(); + MemoryMappedFile mapped_core_file; + ElfCoreDump core; + + ASSERT_TRUE(WriteFile(core_file, &header, sizeof(header) - 1)); + ASSERT_TRUE(mapped_core_file.Map(core_file, 0)); + core.SetContent(mapped_core_file.content()); + EXPECT_FALSE(core.IsValid()); + EXPECT_EQ(NULL, core.GetHeader()); + EXPECT_EQ(0U, core.GetProgramHeaderCount()); + EXPECT_EQ(NULL, core.GetProgramHeader(0)); + EXPECT_EQ(NULL, core.GetFirstProgramHeaderOfType(PT_LOAD)); + EXPECT_FALSE(core.GetFirstNote().IsValid()); + + ASSERT_TRUE(WriteFile(core_file, &header, sizeof(header))); + ASSERT_TRUE(mapped_core_file.Map(core_file, 0)); + core.SetContent(mapped_core_file.content()); + EXPECT_FALSE(core.IsValid()); + + header.e_ident[0] = ELFMAG0; + ASSERT_TRUE(WriteFile(core_file, &header, sizeof(header))); + ASSERT_TRUE(mapped_core_file.Map(core_file, 0)); + core.SetContent(mapped_core_file.content()); + EXPECT_FALSE(core.IsValid()); + + header.e_ident[1] = ELFMAG1; + ASSERT_TRUE(WriteFile(core_file, &header, sizeof(header))); + ASSERT_TRUE(mapped_core_file.Map(core_file, 0)); + core.SetContent(mapped_core_file.content()); + EXPECT_FALSE(core.IsValid()); + + header.e_ident[2] = ELFMAG2; + ASSERT_TRUE(WriteFile(core_file, &header, sizeof(header))); + ASSERT_TRUE(mapped_core_file.Map(core_file, 0)); + core.SetContent(mapped_core_file.content()); + EXPECT_FALSE(core.IsValid()); + + header.e_ident[3] = ELFMAG3; + ASSERT_TRUE(WriteFile(core_file, &header, sizeof(header))); + ASSERT_TRUE(mapped_core_file.Map(core_file, 0)); + core.SetContent(mapped_core_file.content()); + EXPECT_FALSE(core.IsValid()); + + header.e_ident[4] = ElfCoreDump::kClass; + ASSERT_TRUE(WriteFile(core_file, &header, sizeof(header))); + ASSERT_TRUE(mapped_core_file.Map(core_file, 0)); + core.SetContent(mapped_core_file.content()); + EXPECT_FALSE(core.IsValid()); + + header.e_version = EV_CURRENT; + ASSERT_TRUE(WriteFile(core_file, &header, sizeof(header))); + ASSERT_TRUE(mapped_core_file.Map(core_file, 0)); + core.SetContent(mapped_core_file.content()); + EXPECT_FALSE(core.IsValid()); + + header.e_type = ET_CORE; + ASSERT_TRUE(WriteFile(core_file, &header, sizeof(header))); + ASSERT_TRUE(mapped_core_file.Map(core_file, 0)); + core.SetContent(mapped_core_file.content()); + EXPECT_TRUE(core.IsValid()); +} + +TEST(ElfCoreDumpTest, ValidCoreFile) { + CrashGenerator crash_generator; + if (!crash_generator.HasDefaultCorePattern()) { + fprintf(stderr, "ElfCoreDumpTest.ValidCoreFile test is skipped " + "due to non-default core pattern"); + return; + } + + const unsigned kNumOfThreads = 3; + const unsigned kCrashThread = 1; + const int kCrashSignal = SIGABRT; + ASSERT_TRUE(crash_generator.CreateChildCrash(kNumOfThreads, kCrashThread, + kCrashSignal, NULL)); + pid_t expected_crash_thread_id = crash_generator.GetThreadId(kCrashThread); + set expected_thread_ids; + for (unsigned i = 0; i < kNumOfThreads; ++i) { + expected_thread_ids.insert(crash_generator.GetThreadId(i)); + } + +#if defined(__ANDROID__) + struct stat st; + if (stat(crash_generator.GetCoreFilePath().c_str(), &st) != 0) { + fprintf(stderr, "ElfCoreDumpTest.ValidCoreFile test is skipped " + "due to no core file being generated"); + return; + } +#endif + + MemoryMappedFile mapped_core_file; + ASSERT_TRUE( + mapped_core_file.Map(crash_generator.GetCoreFilePath().c_str(), 0)); + + ElfCoreDump core; + core.SetContent(mapped_core_file.content()); + EXPECT_TRUE(core.IsValid()); + + // Based on write_note_info() in linux/kernel/fs/binfmt_elf.c, notes are + // ordered as follows (NT_PRXFPREG and NT_386_TLS are i386 specific): + // Thread Name Type + // ------------------------------------------------------------------- + // 1st thread CORE NT_PRSTATUS + // process-wide CORE NT_PRPSINFO + // process-wide CORE NT_AUXV + // 1st thread CORE NT_FPREGSET + // 1st thread LINUX NT_PRXFPREG + // 1st thread LINUX NT_386_TLS + // + // 2nd thread CORE NT_PRSTATUS + // 2nd thread CORE NT_FPREGSET + // 2nd thread LINUX NT_PRXFPREG + // 2nd thread LINUX NT_386_TLS + // + // 3rd thread CORE NT_PRSTATUS + // 3rd thread CORE NT_FPREGSET + // 3rd thread LINUX NT_PRXFPREG + // 3rd thread LINUX NT_386_TLS + + size_t num_nt_prpsinfo = 0; + size_t num_nt_prstatus = 0; + size_t num_pr_fpvalid = 0; +#if defined(__i386__) || defined(__x86_64__) + size_t num_nt_fpregset = 0; +#endif +#if defined(__i386__) + size_t num_nt_prxfpreg = 0; +#endif + set actual_thread_ids; + ElfCoreDump::Note note = core.GetFirstNote(); + while (note.IsValid()) { + MemoryRange name = note.GetName(); + MemoryRange description = note.GetDescription(); + EXPECT_FALSE(name.IsEmpty()); + EXPECT_FALSE(description.IsEmpty()); + + switch (note.GetType()) { + case NT_PRPSINFO: { + EXPECT_TRUE(description.data() != NULL); + EXPECT_EQ(sizeof(elf_prpsinfo), description.length()); + ++num_nt_prpsinfo; + break; + } + case NT_PRSTATUS: { + EXPECT_TRUE(description.data() != NULL); + EXPECT_EQ(sizeof(elf_prstatus), description.length()); + const elf_prstatus* status = description.GetData(0); + actual_thread_ids.insert(status->pr_pid); + if (num_nt_prstatus == 0) { + EXPECT_EQ(expected_crash_thread_id, status->pr_pid); + EXPECT_EQ(kCrashSignal, status->pr_info.si_signo); + } + ++num_nt_prstatus; + if (status->pr_fpvalid) + ++num_pr_fpvalid; + break; + } +#if defined(__i386__) || defined(__x86_64__) + case NT_FPREGSET: { + EXPECT_TRUE(description.data() != NULL); + EXPECT_EQ(sizeof(user_fpregs_struct), description.length()); + ++num_nt_fpregset; + break; + } +#endif +#if defined(__i386__) + case NT_PRXFPREG: { + EXPECT_TRUE(description.data() != NULL); + EXPECT_EQ(sizeof(user_fpxregs_struct), description.length()); + ++num_nt_prxfpreg; + break; + } +#endif + default: + break; + } + note = note.GetNextNote(); + } + + EXPECT_TRUE(expected_thread_ids == actual_thread_ids); + EXPECT_EQ(1U, num_nt_prpsinfo); + EXPECT_EQ(kNumOfThreads, num_nt_prstatus); +#if defined(__i386__) || defined(__x86_64__) + EXPECT_EQ(num_pr_fpvalid, num_nt_fpregset); +#endif +#if defined(__i386__) + EXPECT_EQ(num_pr_fpvalid, num_nt_prxfpreg); +#endif +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/elf_gnu_compat.h b/TMessagesProj/jni/third_party/breakpad/src/common/linux/elf_gnu_compat.h new file mode 100644 index 0000000000..f870cbc7d2 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/elf_gnu_compat.h @@ -0,0 +1,46 @@ +// -*- mode: C++ -*- + +// Copyright (c) 2013, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Lei Zhang + +// elf_gnu_compat.h: #defines unique to glibc's elf.h. + +#ifndef COMMON_LINUX_ELF_GNU_COMPAT_H_ +#define COMMON_LINUX_ELF_GNU_COMPAT_H_ + +#include + +// A note type on GNU systems corresponding to the .note.gnu.build-id section. +#ifndef NT_GNU_BUILD_ID +#define NT_GNU_BUILD_ID 3 +#endif + +#endif // COMMON_LINUX_ELF_GNU_COMPAT_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/elf_symbols_to_module.cc b/TMessagesProj/jni/third_party/breakpad/src/common/linux/elf_symbols_to_module.cc new file mode 100644 index 0000000000..562875e11b --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/elf_symbols_to_module.cc @@ -0,0 +1,178 @@ +// -*- mode: c++ -*- + +// Copyright (c) 2011 Google Inc. All Rights Reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Ted Mielczarek + +#include "common/linux/elf_symbols_to_module.h" + +#include +#include +#include + +#include "common/byte_cursor.h" +#include "common/module.h" + +namespace google_breakpad { + +class ELFSymbolIterator { +public: + // The contents of an ELF symbol, adjusted for the host's endianness, + // word size, and so on. Corresponds to the data in Elf32_Sym / Elf64_Sym. + struct Symbol { + // True if this iterator has reached the end of the symbol array. When + // this is set, the other members of this structure are not valid. + bool at_end; + + // The number of this symbol within the list. + size_t index; + + // The current symbol's name offset. This is the offset within the + // string table. + size_t name_offset; + + // The current symbol's value, size, info and shndx fields. + uint64_t value; + uint64_t size; + unsigned char info; + uint16_t shndx; + }; + + // Create an ELFSymbolIterator walking the symbols in BUFFER. Treat the + // symbols as big-endian if BIG_ENDIAN is true, as little-endian + // otherwise. Assume each symbol has a 'value' field whose size is + // VALUE_SIZE. + // + ELFSymbolIterator(const ByteBuffer *buffer, bool big_endian, + size_t value_size) + : value_size_(value_size), cursor_(buffer, big_endian) { + // Actually, weird sizes could be handled just fine, but they're + // probably mistakes --- expressed in bits, say. + assert(value_size == 4 || value_size == 8); + symbol_.index = 0; + Fetch(); + } + + // Move to the next symbol. This function's behavior is undefined if + // at_end() is true when it is called. + ELFSymbolIterator &operator++() { Fetch(); symbol_.index++; return *this; } + + // Dereferencing this iterator produces a reference to an Symbol structure + // that holds the current symbol's values. The symbol is owned by this + // SymbolIterator, and will be invalidated at the next call to operator++. + const Symbol &operator*() const { return symbol_; } + const Symbol *operator->() const { return &symbol_; } + +private: + // Read the symbol at cursor_, and set symbol_ appropriately. + void Fetch() { + // Elf32_Sym and Elf64_Sym have different layouts. + unsigned char other; + if (value_size_ == 4) { + // Elf32_Sym + cursor_ + .Read(4, false, &symbol_.name_offset) + .Read(4, false, &symbol_.value) + .Read(4, false, &symbol_.size) + .Read(1, false, &symbol_.info) + .Read(1, false, &other) + .Read(2, false, &symbol_.shndx); + } else { + // Elf64_Sym + cursor_ + .Read(4, false, &symbol_.name_offset) + .Read(1, false, &symbol_.info) + .Read(1, false, &other) + .Read(2, false, &symbol_.shndx) + .Read(8, false, &symbol_.value) + .Read(8, false, &symbol_.size); + } + symbol_.at_end = !cursor_; + } + + // The size of symbols' value field, in bytes. + size_t value_size_; + + // A byte cursor traversing buffer_. + ByteCursor cursor_; + + // Values for the symbol this iterator refers to. + Symbol symbol_; +}; + +const char *SymbolString(ptrdiff_t offset, ByteBuffer& strings) { + if (offset < 0 || (size_t) offset >= strings.Size()) { + // Return the null string. + offset = 0; + } + return reinterpret_cast(strings.start + offset); +} + +bool ELFSymbolsToModule(const uint8_t *symtab_section, + size_t symtab_size, + const uint8_t *string_section, + size_t string_size, + const bool big_endian, + size_t value_size, + Module *module) { + ByteBuffer symbols(symtab_section, symtab_size); + // Ensure that the string section is null-terminated. + if (string_section[string_size - 1] != '\0') { + const void* null_terminator = memrchr(string_section, '\0', string_size); + string_size = reinterpret_cast(null_terminator) + - string_section; + } + ByteBuffer strings(string_section, string_size); + + // The iterator walking the symbol table. + ELFSymbolIterator iterator(&symbols, big_endian, value_size); + + while(!iterator->at_end) { + if (ELF32_ST_TYPE(iterator->info) == STT_FUNC && + iterator->shndx != SHN_UNDEF) { + Module::Extern *ext = new Module::Extern(iterator->value); + ext->name = SymbolString(iterator->name_offset, strings); +#if !defined(__ANDROID__) // Android NDK doesn't provide abi::__cxa_demangle. + int status = 0; + char* demangled = + abi::__cxa_demangle(ext->name.c_str(), NULL, NULL, &status); + if (demangled) { + if (status == 0) + ext->name = demangled; + free(demangled); + } +#endif + module->AddExtern(ext); + } + ++iterator; + } + return true; +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/elf_symbols_to_module.h b/TMessagesProj/jni/third_party/breakpad/src/common/linux/elf_symbols_to_module.h new file mode 100644 index 0000000000..2e7c097151 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/elf_symbols_to_module.h @@ -0,0 +1,58 @@ +// -*- mode: c++ -*- + +// Copyright (c) 2011 Google Inc. All Rights Reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Ted Mielczarek + +// elf_symbols_to_module.h: Exposes ELFSymbolsToModule, a function +// for reading ELF symbol tables and inserting exported symbol names +// into a google_breakpad::Module as Extern definitions. + +#ifndef BREAKPAD_COMMON_LINUX_ELF_SYMBOLS_TO_MODULE_H_ +#define BREAKPAD_COMMON_LINUX_ELF_SYMBOLS_TO_MODULE_H_ + +#include +#include + +namespace google_breakpad { + +class Module; + +bool ELFSymbolsToModule(const uint8_t *symtab_section, + size_t symtab_size, + const uint8_t *string_section, + size_t string_size, + const bool big_endian, + size_t value_size, + Module *module); + +} // namespace google_breakpad + + +#endif // BREAKPAD_COMMON_LINUX_ELF_SYMBOLS_TO_MODULE_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/elf_symbols_to_module_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/common/linux/elf_symbols_to_module_unittest.cc new file mode 100644 index 0000000000..8984449ab6 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/elf_symbols_to_module_unittest.cc @@ -0,0 +1,370 @@ +// Copyright (c) 2011 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Ted Mielczarek + +// elf_symbols_to_module_unittest.cc: +// Unittests for google_breakpad::ELFSymbolsToModule + +#include + +#include +#include + +#include "breakpad_googletest_includes.h" +#include "common/linux/elf_symbols_to_module.h" +#include "common/linux/synth_elf.h" +#include "common/module.h" +#include "common/test_assembler.h" +#include "common/using_std_string.h" + +using google_breakpad::Module; +using google_breakpad::synth_elf::StringTable; +using google_breakpad::test_assembler::Endianness; +using google_breakpad::test_assembler::kBigEndian; +using google_breakpad::test_assembler::kLittleEndian; +using google_breakpad::test_assembler::Label; +using google_breakpad::test_assembler::Section; +using ::testing::Test; +using ::testing::TestWithParam; +using std::vector; + +class ELFSymbolsToModuleTestFixture { +public: + ELFSymbolsToModuleTestFixture(Endianness endianness, + size_t value_size) : module("a", "b", "c", "d"), + section(endianness), + table(endianness), + value_size(value_size) {} + + bool ProcessSection() { + string section_contents, table_contents; + section.GetContents(§ion_contents); + table.GetContents(&table_contents); + + bool ret = ELFSymbolsToModule(reinterpret_cast(section_contents.data()), + section_contents.size(), + reinterpret_cast(table_contents.data()), + table_contents.size(), + section.endianness() == kBigEndian, + value_size, + &module); + module.GetExterns(&externs, externs.end()); + return ret; + } + + Module module; + Section section; + StringTable table; + string section_contents; + // 4 or 8 (bytes) + size_t value_size; + + vector externs; +}; + +class ELFSymbolsToModuleTest32 : public ELFSymbolsToModuleTestFixture, + public TestWithParam { +public: + ELFSymbolsToModuleTest32() : ELFSymbolsToModuleTestFixture(GetParam(), 4) {} + + void AddElf32Sym(const string& name, uint32_t value, + uint32_t size, unsigned info, uint16_t shndx) { + section + .D32(table.Add(name)) + .D32(value) + .D32(size) + .D8(info) + .D8(0) // other + .D16(shndx); + } +}; + +TEST_P(ELFSymbolsToModuleTest32, NoFuncs) { + ProcessSection(); + + ASSERT_EQ((size_t)0, externs.size()); +} + +TEST_P(ELFSymbolsToModuleTest32, OneFunc) { + const string kFuncName = "superfunc"; + const uint32_t kFuncAddr = 0x1000; + const uint32_t kFuncSize = 0x10; + + AddElf32Sym(kFuncName, kFuncAddr, kFuncSize, + ELF32_ST_INFO(STB_GLOBAL, STT_FUNC), + // Doesn't really matter, just can't be SHN_UNDEF. + SHN_UNDEF + 1); + + ProcessSection(); + + ASSERT_EQ((size_t)1, externs.size()); + Module::Extern *extern1 = externs[0]; + EXPECT_EQ(kFuncName, extern1->name); + EXPECT_EQ((Module::Address)kFuncAddr, extern1->address); +} + +TEST_P(ELFSymbolsToModuleTest32, NameOutOfBounds) { + const string kFuncName = ""; + const uint32_t kFuncAddr = 0x1000; + const uint32_t kFuncSize = 0x10; + + table.Add("Foo"); + table.Add("Bar"); + // Can't use AddElf32Sym because it puts in a valid string offset. + section + .D32((uint32_t)table.Here().Value() + 1) + .D32(kFuncAddr) + .D32(kFuncSize) + .D8(ELF32_ST_INFO(STB_GLOBAL, STT_FUNC)) + .D8(0) // other + .D16(SHN_UNDEF + 1); + + ProcessSection(); + + ASSERT_EQ((size_t)1, externs.size()); + Module::Extern *extern1 = externs[0]; + EXPECT_EQ(kFuncName, extern1->name); + EXPECT_EQ((Module::Address)kFuncAddr, extern1->address); +} + +TEST_P(ELFSymbolsToModuleTest32, NonTerminatedStringTable) { + const string kFuncName = ""; + const uint32_t kFuncAddr = 0x1000; + const uint32_t kFuncSize = 0x10; + + table.Add("Foo"); + table.Add("Bar"); + // Add a non-null-terminated string to the end of the string table + Label l; + table + .Mark(&l) + .Append("Unterminated"); + // Can't use AddElf32Sym because it puts in a valid string offset. + section + .D32((uint32_t)l.Value()) + .D32(kFuncAddr) + .D32(kFuncSize) + .D8(ELF32_ST_INFO(STB_GLOBAL, STT_FUNC)) + .D8(0) // other + .D16(SHN_UNDEF + 1); + + ProcessSection(); + + ASSERT_EQ((size_t)1, externs.size()); + Module::Extern *extern1 = externs[0]; + EXPECT_EQ(kFuncName, extern1->name); + EXPECT_EQ((Module::Address)kFuncAddr, extern1->address); +} + +TEST_P(ELFSymbolsToModuleTest32, MultipleFuncs) { + const string kFuncName1 = "superfunc"; + const uint32_t kFuncAddr1 = 0x10001000; + const uint32_t kFuncSize1 = 0x10; + const string kFuncName2 = "awesomefunc"; + const uint32_t kFuncAddr2 = 0x20002000; + const uint32_t kFuncSize2 = 0x2f; + const string kFuncName3 = "megafunc"; + const uint32_t kFuncAddr3 = 0x30003000; + const uint32_t kFuncSize3 = 0x3c; + + AddElf32Sym(kFuncName1, kFuncAddr1, kFuncSize1, + ELF32_ST_INFO(STB_GLOBAL, STT_FUNC), + // Doesn't really matter, just can't be SHN_UNDEF. + SHN_UNDEF + 1); + AddElf32Sym(kFuncName2, kFuncAddr2, kFuncSize2, + ELF32_ST_INFO(STB_LOCAL, STT_FUNC), + // Doesn't really matter, just can't be SHN_UNDEF. + SHN_UNDEF + 2); + AddElf32Sym(kFuncName3, kFuncAddr3, kFuncSize3, + ELF32_ST_INFO(STB_LOCAL, STT_FUNC), + // Doesn't really matter, just can't be SHN_UNDEF. + SHN_UNDEF + 3); + + ProcessSection(); + + ASSERT_EQ((size_t)3, externs.size()); + Module::Extern *extern1 = externs[0]; + EXPECT_EQ(kFuncName1, extern1->name); + EXPECT_EQ((Module::Address)kFuncAddr1, extern1->address); + Module::Extern *extern2 = externs[1]; + EXPECT_EQ(kFuncName2, extern2->name); + EXPECT_EQ((Module::Address)kFuncAddr2, extern2->address); + Module::Extern *extern3 = externs[2]; + EXPECT_EQ(kFuncName3, extern3->name); + EXPECT_EQ((Module::Address)kFuncAddr3, extern3->address); +} + +TEST_P(ELFSymbolsToModuleTest32, SkipStuff) { + const string kFuncName = "superfunc"; + const uint32_t kFuncAddr = 0x1000; + const uint32_t kFuncSize = 0x10; + + // Should skip functions in SHN_UNDEF + AddElf32Sym("skipme", 0xFFFF, 0x10, + ELF32_ST_INFO(STB_GLOBAL, STT_FUNC), + SHN_UNDEF); + AddElf32Sym(kFuncName, kFuncAddr, kFuncSize, + ELF32_ST_INFO(STB_GLOBAL, STT_FUNC), + // Doesn't really matter, just can't be SHN_UNDEF. + SHN_UNDEF + 1); + // Should skip non-STT_FUNC entries. + AddElf32Sym("skipmetoo", 0xAAAA, 0x10, + ELF32_ST_INFO(STB_GLOBAL, STT_FILE), + SHN_UNDEF + 1); + + ProcessSection(); + + ASSERT_EQ((size_t)1, externs.size()); + Module::Extern *extern1 = externs[0]; + EXPECT_EQ(kFuncName, extern1->name); + EXPECT_EQ((Module::Address)kFuncAddr, extern1->address); +} + +// Run all the 32-bit tests with both endianness +INSTANTIATE_TEST_CASE_P(Endian, + ELFSymbolsToModuleTest32, + ::testing::Values(kLittleEndian, kBigEndian)); + +// Similar tests, but with 64-bit values. Ostensibly this could be +// shoehorned into the parameterization by using ::testing::Combine, +// but that would make it difficult to get the types right since these +// actual test cases aren't parameterized. This could also be written +// as a type-parameterized test, but combining that with a value-parameterized +// test seemed really ugly, and also makes it harder to test 64-bit +// values. +class ELFSymbolsToModuleTest64 : public ELFSymbolsToModuleTestFixture, + public TestWithParam { +public: + ELFSymbolsToModuleTest64() : ELFSymbolsToModuleTestFixture(GetParam(), 8) {} + + void AddElf64Sym(const string& name, uint64_t value, + uint64_t size, unsigned info, uint16_t shndx) { + section + .D32(table.Add(name)) + .D8(info) + .D8(0) // other + .D16(shndx) + .D64(value) + .D64(size); + } +}; + +TEST_P(ELFSymbolsToModuleTest64, NoFuncs) { + ProcessSection(); + + ASSERT_EQ((size_t)0, externs.size()); +} + +TEST_P(ELFSymbolsToModuleTest64, OneFunc) { + const string kFuncName = "superfunc"; + const uint64_t kFuncAddr = 0x1000200030004000ULL; + const uint64_t kFuncSize = 0x1000; + + AddElf64Sym(kFuncName, kFuncAddr, kFuncSize, + ELF64_ST_INFO(STB_GLOBAL, STT_FUNC), + // Doesn't really matter, just can't be SHN_UNDEF. + SHN_UNDEF + 1); + + ProcessSection(); + + ASSERT_EQ((size_t)1, externs.size()); + Module::Extern *extern1 = externs[0]; + EXPECT_EQ(kFuncName, extern1->name); + EXPECT_EQ((Module::Address)kFuncAddr, extern1->address); +} + +TEST_P(ELFSymbolsToModuleTest64, MultipleFuncs) { + const string kFuncName1 = "superfunc"; + const uint64_t kFuncAddr1 = 0x1000100010001000ULL; + const uint64_t kFuncSize1 = 0x1000; + const string kFuncName2 = "awesomefunc"; + const uint64_t kFuncAddr2 = 0x2000200020002000ULL; + const uint64_t kFuncSize2 = 0x2f00; + const string kFuncName3 = "megafunc"; + const uint64_t kFuncAddr3 = 0x3000300030003000ULL; + const uint64_t kFuncSize3 = 0x3c00; + + AddElf64Sym(kFuncName1, kFuncAddr1, kFuncSize1, + ELF64_ST_INFO(STB_GLOBAL, STT_FUNC), + // Doesn't really matter, just can't be SHN_UNDEF. + SHN_UNDEF + 1); + AddElf64Sym(kFuncName2, kFuncAddr2, kFuncSize2, + ELF64_ST_INFO(STB_LOCAL, STT_FUNC), + // Doesn't really matter, just can't be SHN_UNDEF. + SHN_UNDEF + 2); + AddElf64Sym(kFuncName3, kFuncAddr3, kFuncSize3, + ELF64_ST_INFO(STB_LOCAL, STT_FUNC), + // Doesn't really matter, just can't be SHN_UNDEF. + SHN_UNDEF + 3); + + ProcessSection(); + + ASSERT_EQ((size_t)3, externs.size()); + Module::Extern *extern1 = externs[0]; + EXPECT_EQ(kFuncName1, extern1->name); + EXPECT_EQ((Module::Address)kFuncAddr1, extern1->address); + Module::Extern *extern2 = externs[1]; + EXPECT_EQ(kFuncName2, extern2->name); + EXPECT_EQ((Module::Address)kFuncAddr2, extern2->address); + Module::Extern *extern3 = externs[2]; + EXPECT_EQ(kFuncName3, extern3->name); + EXPECT_EQ((Module::Address)kFuncAddr3, extern3->address); +} + +TEST_P(ELFSymbolsToModuleTest64, SkipStuff) { + const string kFuncName = "superfunc"; + const uint64_t kFuncAddr = 0x1000100010001000ULL; + const uint64_t kFuncSize = 0x1000; + + // Should skip functions in SHN_UNDEF + AddElf64Sym("skipme", 0xFFFF, 0x10, + ELF64_ST_INFO(STB_GLOBAL, STT_FUNC), + SHN_UNDEF); + AddElf64Sym(kFuncName, kFuncAddr, kFuncSize, + ELF64_ST_INFO(STB_GLOBAL, STT_FUNC), + // Doesn't really matter, just can't be SHN_UNDEF. + SHN_UNDEF + 1); + // Should skip non-STT_FUNC entries. + AddElf64Sym("skipmetoo", 0xAAAA, 0x10, + ELF64_ST_INFO(STB_GLOBAL, STT_FILE), + SHN_UNDEF + 1); + + ProcessSection(); + + ASSERT_EQ((size_t)1, externs.size()); + Module::Extern *extern1 = externs[0]; + EXPECT_EQ(kFuncName, extern1->name); + EXPECT_EQ((Module::Address)kFuncAddr, extern1->address); +} + +// Run all the 64-bit tests with both endianness +INSTANTIATE_TEST_CASE_P(Endian, + ELFSymbolsToModuleTest64, + ::testing::Values(kLittleEndian, kBigEndian)); diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/elfutils-inl.h b/TMessagesProj/jni/third_party/breakpad/src/common/linux/elfutils-inl.h new file mode 100644 index 0000000000..e56b37a9f5 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/elfutils-inl.h @@ -0,0 +1,74 @@ +// Copyright (c) 2012, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef COMMON_LINUX_ELFUTILS_INL_H__ +#define COMMON_LINUX_ELFUTILS_INL_H__ + +#include "common/linux/linux_libc_support.h" +#include "elfutils.h" + +namespace google_breakpad { + +template +const T* GetOffset(const typename ElfClass::Ehdr* elf_header, + typename ElfClass::Off offset) { + return reinterpret_cast(reinterpret_cast(elf_header) + + offset); +} + +template +const typename ElfClass::Shdr* FindElfSectionByName( + const char* name, + typename ElfClass::Word section_type, + const typename ElfClass::Shdr* sections, + const char* section_names, + const char* names_end, + int nsection) { + assert(name != NULL); + assert(sections != NULL); + assert(nsection > 0); + + int name_len = my_strlen(name); + if (name_len == 0) + return NULL; + + for (int i = 0; i < nsection; ++i) { + const char* section_name = section_names + sections[i].sh_name; + if (sections[i].sh_type == section_type && + names_end - section_name >= name_len + 1 && + my_strcmp(name, section_name) == 0) { + return sections + i; + } + } + return NULL; +} + +} // namespace google_breakpad + +#endif // COMMON_LINUX_ELFUTILS_INL_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/elfutils.cc b/TMessagesProj/jni/third_party/breakpad/src/common/linux/elfutils.cc new file mode 100644 index 0000000000..a79391c133 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/elfutils.cc @@ -0,0 +1,194 @@ +// Copyright (c) 2012, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#include "common/linux/elfutils.h" + +#include +#include + +#include "common/linux/linux_libc_support.h" +#include "common/linux/elfutils-inl.h" + +namespace google_breakpad { + +namespace { + +template +void FindElfClassSection(const char *elf_base, + const char *section_name, + typename ElfClass::Word section_type, + const void **section_start, + size_t *section_size) { + typedef typename ElfClass::Ehdr Ehdr; + typedef typename ElfClass::Shdr Shdr; + + assert(elf_base); + assert(section_start); + assert(section_size); + + assert(my_strncmp(elf_base, ELFMAG, SELFMAG) == 0); + + const Ehdr* elf_header = reinterpret_cast(elf_base); + assert(elf_header->e_ident[EI_CLASS] == ElfClass::kClass); + + const Shdr* sections = + GetOffset(elf_header, elf_header->e_shoff); + const Shdr* section_names = sections + elf_header->e_shstrndx; + const char* names = + GetOffset(elf_header, section_names->sh_offset); + const char *names_end = names + section_names->sh_size; + + const Shdr* section = + FindElfSectionByName(section_name, section_type, + sections, names, names_end, + elf_header->e_shnum); + + if (section != NULL && section->sh_size > 0) { + *section_start = elf_base + section->sh_offset; + *section_size = section->sh_size; + } +} + +template +void FindElfClassSegment(const char *elf_base, + typename ElfClass::Word segment_type, + const void **segment_start, + size_t *segment_size) { + typedef typename ElfClass::Ehdr Ehdr; + typedef typename ElfClass::Phdr Phdr; + + assert(elf_base); + assert(segment_start); + assert(segment_size); + + assert(my_strncmp(elf_base, ELFMAG, SELFMAG) == 0); + + const Ehdr* elf_header = reinterpret_cast(elf_base); + assert(elf_header->e_ident[EI_CLASS] == ElfClass::kClass); + + const Phdr* phdrs = + GetOffset(elf_header, elf_header->e_phoff); + + for (int i = 0; i < elf_header->e_phnum; ++i) { + if (phdrs[i].p_type == segment_type) { + *segment_start = elf_base + phdrs[i].p_offset; + *segment_size = phdrs[i].p_filesz; + return; + } + } +} + +} // namespace + +bool IsValidElf(const void* elf_base) { + return my_strncmp(reinterpret_cast(elf_base), + ELFMAG, SELFMAG) == 0; +} + +int ElfClass(const void* elf_base) { + const ElfW(Ehdr)* elf_header = + reinterpret_cast(elf_base); + + return elf_header->e_ident[EI_CLASS]; +} + +bool FindElfSection(const void *elf_mapped_base, + const char *section_name, + uint32_t section_type, + const void **section_start, + size_t *section_size, + int *elfclass) { + assert(elf_mapped_base); + assert(section_start); + assert(section_size); + + *section_start = NULL; + *section_size = 0; + + if (!IsValidElf(elf_mapped_base)) + return false; + + int cls = ElfClass(elf_mapped_base); + if (elfclass) { + *elfclass = cls; + } + + const char* elf_base = + static_cast(elf_mapped_base); + + if (cls == ELFCLASS32) { + FindElfClassSection(elf_base, section_name, section_type, + section_start, section_size); + return *section_start != NULL; + } else if (cls == ELFCLASS64) { + FindElfClassSection(elf_base, section_name, section_type, + section_start, section_size); + return *section_start != NULL; + } + + return false; +} + +bool FindElfSegment(const void *elf_mapped_base, + uint32_t segment_type, + const void **segment_start, + size_t *segment_size, + int *elfclass) { + assert(elf_mapped_base); + assert(segment_start); + assert(segment_size); + + *segment_start = NULL; + *segment_size = 0; + + if (!IsValidElf(elf_mapped_base)) + return false; + + int cls = ElfClass(elf_mapped_base); + if (elfclass) { + *elfclass = cls; + } + + const char* elf_base = + static_cast(elf_mapped_base); + + if (cls == ELFCLASS32) { + FindElfClassSegment(elf_base, segment_type, + segment_start, segment_size); + return *segment_start != NULL; + } else if (cls == ELFCLASS64) { + FindElfClassSegment(elf_base, segment_type, + segment_start, segment_size); + return *segment_start != NULL; + } + + return false; +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/elfutils.h b/TMessagesProj/jni/third_party/breakpad/src/common/linux/elfutils.h new file mode 100644 index 0000000000..dccdc235e2 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/elfutils.h @@ -0,0 +1,118 @@ +// Copyright (c) 2012, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// elfutils.h: Utilities for dealing with ELF files. +// + +#ifndef COMMON_LINUX_ELFUTILS_H_ +#define COMMON_LINUX_ELFUTILS_H_ + +#include +#include +#include + +namespace google_breakpad { + +// Traits classes so consumers can write templatized code to deal +// with specific ELF bits. +struct ElfClass32 { + typedef Elf32_Addr Addr; + typedef Elf32_Ehdr Ehdr; + typedef Elf32_Nhdr Nhdr; + typedef Elf32_Phdr Phdr; + typedef Elf32_Shdr Shdr; + typedef Elf32_Half Half; + typedef Elf32_Off Off; + typedef Elf32_Word Word; + static const int kClass = ELFCLASS32; + static const size_t kAddrSize = sizeof(Elf32_Addr); +}; + +struct ElfClass64 { + typedef Elf64_Addr Addr; + typedef Elf64_Ehdr Ehdr; + typedef Elf64_Nhdr Nhdr; + typedef Elf64_Phdr Phdr; + typedef Elf64_Shdr Shdr; + typedef Elf64_Half Half; + typedef Elf64_Off Off; + typedef Elf64_Word Word; + static const int kClass = ELFCLASS64; + static const size_t kAddrSize = sizeof(Elf64_Addr); +}; + +bool IsValidElf(const void* elf_header); +int ElfClass(const void* elf_base); + +// Attempt to find a section named |section_name| of type |section_type| +// in the ELF binary data at |elf_mapped_base|. On success, returns true +// and sets |*section_start| to point to the start of the section data, +// and |*section_size| to the size of the section's data. If |elfclass| +// is not NULL, set |*elfclass| to the ELF file class. +bool FindElfSection(const void *elf_mapped_base, + const char *section_name, + uint32_t section_type, + const void **section_start, + size_t *section_size, + int *elfclass); + +// Internal helper method, exposed for convenience for callers +// that already have more info. +template +const typename ElfClass::Shdr* +FindElfSectionByName(const char* name, + typename ElfClass::Word section_type, + const typename ElfClass::Shdr* sections, + const char* section_names, + const char* names_end, + int nsection); + +// Attempt to find the first segment of type |segment_type| in the ELF +// binary data at |elf_mapped_base|. On success, returns true and sets +// |*segment_start| to point to the start of the segment data, and +// and |*segment_size| to the size of the segment's data. If |elfclass| +// is not NULL, set |*elfclass| to the ELF file class. +bool FindElfSegment(const void *elf_mapped_base, + uint32_t segment_type, + const void **segment_start, + size_t *segment_size, + int *elfclass); + +// Convert an offset from an Elf header into a pointer to the mapped +// address in the current process. Takes an extra template parameter +// to specify the return type to avoid having to dynamic_cast the +// result. +template +const T* +GetOffset(const typename ElfClass::Ehdr* elf_header, + typename ElfClass::Off offset); + +} // namespace google_breakpad + +#endif // COMMON_LINUX_ELFUTILS_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/file_id.cc b/TMessagesProj/jni/third_party/breakpad/src/common/linux/file_id.cc new file mode 100644 index 0000000000..00b37313af --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/file_id.cc @@ -0,0 +1,191 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// file_id.cc: Return a unique identifier for a file +// +// See file_id.h for documentation +// + +#include "common/linux/file_id.h" + +#include +#include +#include + +#include + +#include "common/linux/elf_gnu_compat.h" +#include "common/linux/elfutils.h" +#include "common/linux/linux_libc_support.h" +#include "common/linux/memory_mapped_file.h" +#include "third_party/lss/linux_syscall_support.h" + +namespace google_breakpad { + +FileID::FileID(const char* path) : path_(path) {} + +// ELF note name and desc are 32-bits word padded. +#define NOTE_PADDING(a) ((a + 3) & ~3) + +// These functions are also used inside the crashed process, so be safe +// and use the syscall/libc wrappers instead of direct syscalls or libc. + +template +static bool ElfClassBuildIDNoteIdentifier(const void *section, size_t length, + uint8_t identifier[kMDGUIDSize]) { + typedef typename ElfClass::Nhdr Nhdr; + + const void* section_end = reinterpret_cast(section) + length; + const Nhdr* note_header = reinterpret_cast(section); + while (reinterpret_cast(note_header) < section_end) { + if (note_header->n_type == NT_GNU_BUILD_ID) + break; + note_header = reinterpret_cast( + reinterpret_cast(note_header) + sizeof(Nhdr) + + NOTE_PADDING(note_header->n_namesz) + + NOTE_PADDING(note_header->n_descsz)); + } + if (reinterpret_cast(note_header) >= section_end || + note_header->n_descsz == 0) { + return false; + } + + const char* build_id = reinterpret_cast(note_header) + + sizeof(Nhdr) + NOTE_PADDING(note_header->n_namesz); + // Copy as many bits of the build ID as will fit + // into the GUID space. + my_memset(identifier, 0, kMDGUIDSize); + memcpy(identifier, build_id, + std::min(kMDGUIDSize, (size_t)note_header->n_descsz)); + + return true; +} + +// Attempt to locate a .note.gnu.build-id section in an ELF binary +// and copy as many bytes of it as will fit into |identifier|. +static bool FindElfBuildIDNote(const void *elf_mapped_base, + uint8_t identifier[kMDGUIDSize]) { + void* note_section; + size_t note_size; + int elfclass; + if ((!FindElfSegment(elf_mapped_base, PT_NOTE, + (const void**)¬e_section, ¬e_size, &elfclass) || + note_size == 0) && + (!FindElfSection(elf_mapped_base, ".note.gnu.build-id", SHT_NOTE, + (const void**)¬e_section, ¬e_size, &elfclass) || + note_size == 0)) { + return false; + } + + if (elfclass == ELFCLASS32) { + return ElfClassBuildIDNoteIdentifier(note_section, note_size, + identifier); + } else if (elfclass == ELFCLASS64) { + return ElfClassBuildIDNoteIdentifier(note_section, note_size, + identifier); + } + + return false; +} + +// Attempt to locate the .text section of an ELF binary and generate +// a simple hash by XORing the first page worth of bytes into |identifier|. +static bool HashElfTextSection(const void *elf_mapped_base, + uint8_t identifier[kMDGUIDSize]) { + void* text_section; + size_t text_size; + if (!FindElfSection(elf_mapped_base, ".text", SHT_PROGBITS, + (const void**)&text_section, &text_size, NULL) || + text_size == 0) { + return false; + } + + my_memset(identifier, 0, kMDGUIDSize); + const uint8_t* ptr = reinterpret_cast(text_section); + const uint8_t* ptr_end = ptr + std::min(text_size, static_cast(4096)); + while (ptr < ptr_end) { + for (unsigned i = 0; i < kMDGUIDSize; i++) + identifier[i] ^= ptr[i]; + ptr += kMDGUIDSize; + } + return true; +} + +// static +bool FileID::ElfFileIdentifierFromMappedFile(const void* base, + uint8_t identifier[kMDGUIDSize]) { + // Look for a build id note first. + if (FindElfBuildIDNote(base, identifier)) + return true; + + // Fall back on hashing the first page of the text section. + return HashElfTextSection(base, identifier); +} + +bool FileID::ElfFileIdentifier(uint8_t identifier[kMDGUIDSize]) { + MemoryMappedFile mapped_file(path_.c_str(), 0); + if (!mapped_file.data()) // Should probably check if size >= ElfW(Ehdr)? + return false; + + return ElfFileIdentifierFromMappedFile(mapped_file.data(), identifier); +} + +// static +void FileID::ConvertIdentifierToString(const uint8_t identifier[kMDGUIDSize], + char* buffer, int buffer_length) { + uint8_t identifier_swapped[kMDGUIDSize]; + + // Endian-ness swap to match dump processor expectation. + memcpy(identifier_swapped, identifier, kMDGUIDSize); + uint32_t* data1 = reinterpret_cast(identifier_swapped); + *data1 = htonl(*data1); + uint16_t* data2 = reinterpret_cast(identifier_swapped + 4); + *data2 = htons(*data2); + uint16_t* data3 = reinterpret_cast(identifier_swapped + 6); + *data3 = htons(*data3); + + int buffer_idx = 0; + for (unsigned int idx = 0; + (buffer_idx < buffer_length) && (idx < kMDGUIDSize); + ++idx) { + int hi = (identifier_swapped[idx] >> 4) & 0x0F; + int lo = (identifier_swapped[idx]) & 0x0F; + + if (idx == 4 || idx == 6 || idx == 8 || idx == 10) + buffer[buffer_idx++] = '-'; + + buffer[buffer_idx++] = (hi >= 10) ? 'A' + hi - 10 : '0' + hi; + buffer[buffer_idx++] = (lo >= 10) ? 'A' + lo - 10 : '0' + lo; + } + + // NULL terminate + buffer[(buffer_idx < buffer_length) ? buffer_idx : buffer_idx - 1] = 0; +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/file_id.h b/TMessagesProj/jni/third_party/breakpad/src/common/linux/file_id.h new file mode 100644 index 0000000000..2642722a63 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/file_id.h @@ -0,0 +1,78 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// file_id.h: Return a unique identifier for a file +// + +#ifndef COMMON_LINUX_FILE_ID_H__ +#define COMMON_LINUX_FILE_ID_H__ + +#include +#include + +#include "common/linux/guid_creator.h" + +namespace google_breakpad { + +static const size_t kMDGUIDSize = sizeof(MDGUID); + +class FileID { + public: + explicit FileID(const char* path); + ~FileID() {} + + // Load the identifier for the elf file path specified in the constructor into + // |identifier|. Return false if the identifier could not be created for the + // file. + // The current implementation will look for a .note.gnu.build-id + // section and use that as the file id, otherwise it falls back to + // XORing the first 4096 bytes of the .text section to generate an identifier. + bool ElfFileIdentifier(uint8_t identifier[kMDGUIDSize]); + + // Load the identifier for the elf file mapped into memory at |base| into + // |identifier|. Return false if the identifier could not be created for the + // file. + static bool ElfFileIdentifierFromMappedFile(const void* base, + uint8_t identifier[kMDGUIDSize]); + + // Convert the |identifier| data to a NULL terminated string. The string will + // be formatted as a UUID (e.g., 22F065BB-FC9C-49F7-80FE-26A7CEBD7BCE). + // The |buffer| should be at least 37 bytes long to receive all of the data + // and termination. Shorter buffers will contain truncated data. + static void ConvertIdentifierToString(const uint8_t identifier[kMDGUIDSize], + char* buffer, int buffer_length); + + private: + // Storage for the path specified + std::string path_; +}; + +} // namespace google_breakpad + +#endif // COMMON_LINUX_FILE_ID_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/file_id_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/common/linux/file_id_unittest.cc new file mode 100644 index 0000000000..760eae8260 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/file_id_unittest.cc @@ -0,0 +1,263 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Unit tests for FileID + +#include +#include + +#include + +#include "common/linux/elf_gnu_compat.h" +#include "common/linux/elfutils.h" +#include "common/linux/file_id.h" +#include "common/linux/safe_readlink.h" +#include "common/linux/synth_elf.h" +#include "common/test_assembler.h" +#include "common/tests/auto_tempdir.h" +#include "common/using_std_string.h" +#include "breakpad_googletest_includes.h" + +using namespace google_breakpad; +using google_breakpad::ElfClass32; +using google_breakpad::ElfClass64; +using google_breakpad::SafeReadLink; +using google_breakpad::synth_elf::ELF; +using google_breakpad::synth_elf::Notes; +using google_breakpad::test_assembler::kLittleEndian; +using google_breakpad::test_assembler::Section; +using ::testing::Types; + +namespace { + +// Simply calling Section::Append(size, byte) produces a uninteresting pattern +// that tends to get hashed to 0000...0000. This populates the section with +// data to produce better hashes. +void PopulateSection(Section* section, int size, int prime_number) { + for (int i = 0; i < size; i++) + section->Append(1, (i % prime_number) % 256); +} + +} // namespace + +#ifndef __ANDROID__ +// This test is disabled on Android: It will always fail, since there is no +// 'strip' binary installed on test devices. +TEST(FileIDStripTest, StripSelf) { + // Calculate the File ID of this binary using + // FileID::ElfFileIdentifier, then make a copy of this binary, + // strip it, and ensure that the result is the same. + char exe_name[PATH_MAX]; + ASSERT_TRUE(SafeReadLink("/proc/self/exe", exe_name)); + + // copy our binary to a temp file, and strip it + AutoTempDir temp_dir; + string templ = temp_dir.path() + "/file-id-unittest"; + char cmdline[4096]; + sprintf(cmdline, "cp \"%s\" \"%s\"", exe_name, templ.c_str()); + ASSERT_EQ(0, system(cmdline)) << "Failed to execute: " << cmdline; + sprintf(cmdline, "chmod u+w \"%s\"", templ.c_str()); + ASSERT_EQ(0, system(cmdline)) << "Failed to execute: " << cmdline; + sprintf(cmdline, "strip \"%s\"", templ.c_str()); + ASSERT_EQ(0, system(cmdline)) << "Failed to execute: " << cmdline; + + uint8_t identifier1[sizeof(MDGUID)]; + uint8_t identifier2[sizeof(MDGUID)]; + FileID fileid1(exe_name); + EXPECT_TRUE(fileid1.ElfFileIdentifier(identifier1)); + FileID fileid2(templ.c_str()); + EXPECT_TRUE(fileid2.ElfFileIdentifier(identifier2)); + char identifier_string1[37]; + char identifier_string2[37]; + FileID::ConvertIdentifierToString(identifier1, identifier_string1, + 37); + FileID::ConvertIdentifierToString(identifier2, identifier_string2, + 37); + EXPECT_STREQ(identifier_string1, identifier_string2); +} +#endif // !__ANDROID__ + +template +class FileIDTest : public testing::Test { +public: + void GetElfContents(ELF& elf) { + string contents; + ASSERT_TRUE(elf.GetContents(&contents)); + ASSERT_LT(0U, contents.size()); + + elfdata_v.clear(); + elfdata_v.insert(elfdata_v.begin(), contents.begin(), contents.end()); + elfdata = &elfdata_v[0]; + } + + vector elfdata_v; + uint8_t* elfdata; +}; + +typedef Types ElfClasses; + +TYPED_TEST_CASE(FileIDTest, ElfClasses); + +TYPED_TEST(FileIDTest, ElfClass) { + uint8_t identifier[sizeof(MDGUID)]; + const char expected_identifier_string[] = + "80808080-8080-0000-0000-008080808080"; + char identifier_string[sizeof(expected_identifier_string)]; + const size_t kTextSectionSize = 128; + + ELF elf(EM_386, TypeParam::kClass, kLittleEndian); + Section text(kLittleEndian); + for (size_t i = 0; i < kTextSectionSize; ++i) { + text.D8(i * 3); + } + elf.AddSection(".text", text, SHT_PROGBITS); + elf.Finish(); + this->GetElfContents(elf); + + EXPECT_TRUE(FileID::ElfFileIdentifierFromMappedFile(this->elfdata, + identifier)); + + FileID::ConvertIdentifierToString(identifier, identifier_string, + sizeof(identifier_string)); + EXPECT_STREQ(expected_identifier_string, identifier_string); +} + +TYPED_TEST(FileIDTest, BuildID) { + const uint8_t kExpectedIdentifier[sizeof(MDGUID)] = + {0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, + 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F}; + char expected_identifier_string[] = + "00000000-0000-0000-0000-000000000000"; + FileID::ConvertIdentifierToString(kExpectedIdentifier, + expected_identifier_string, + sizeof(expected_identifier_string)); + + uint8_t identifier[sizeof(MDGUID)]; + char identifier_string[sizeof(expected_identifier_string)]; + + ELF elf(EM_386, TypeParam::kClass, kLittleEndian); + Section text(kLittleEndian); + text.Append(4096, 0); + elf.AddSection(".text", text, SHT_PROGBITS); + Notes notes(kLittleEndian); + notes.AddNote(NT_GNU_BUILD_ID, "GNU", kExpectedIdentifier, + sizeof(kExpectedIdentifier)); + elf.AddSection(".note.gnu.build-id", notes, SHT_NOTE); + elf.Finish(); + this->GetElfContents(elf); + + EXPECT_TRUE(FileID::ElfFileIdentifierFromMappedFile(this->elfdata, + identifier)); + + FileID::ConvertIdentifierToString(identifier, identifier_string, + sizeof(identifier_string)); + EXPECT_STREQ(expected_identifier_string, identifier_string); +} + +TYPED_TEST(FileIDTest, BuildIDPH) { + const uint8_t kExpectedIdentifier[sizeof(MDGUID)] = + {0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, + 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F}; + char expected_identifier_string[] = + "00000000-0000-0000-0000-000000000000"; + FileID::ConvertIdentifierToString(kExpectedIdentifier, + expected_identifier_string, + sizeof(expected_identifier_string)); + + uint8_t identifier[sizeof(MDGUID)]; + char identifier_string[sizeof(expected_identifier_string)]; + + ELF elf(EM_386, TypeParam::kClass, kLittleEndian); + Section text(kLittleEndian); + text.Append(4096, 0); + elf.AddSection(".text", text, SHT_PROGBITS); + Notes notes(kLittleEndian); + notes.AddNote(0, "Linux", + reinterpret_cast("\0x42\0x02\0\0"), 4); + notes.AddNote(NT_GNU_BUILD_ID, "GNU", kExpectedIdentifier, + sizeof(kExpectedIdentifier)); + int note_idx = elf.AddSection(".note", notes, SHT_NOTE); + elf.AddSegment(note_idx, note_idx, PT_NOTE); + elf.Finish(); + this->GetElfContents(elf); + + EXPECT_TRUE(FileID::ElfFileIdentifierFromMappedFile(this->elfdata, + identifier)); + + FileID::ConvertIdentifierToString(identifier, identifier_string, + sizeof(identifier_string)); + EXPECT_STREQ(expected_identifier_string, identifier_string); +} + +// Test to make sure two files with different text sections produce +// different hashes when not using a build id. +TYPED_TEST(FileIDTest, UniqueHashes) { + char identifier_string_1[] = + "00000000-0000-0000-0000-000000000000"; + char identifier_string_2[] = + "00000000-0000-0000-0000-000000000000"; + uint8_t identifier_1[sizeof(MDGUID)]; + uint8_t identifier_2[sizeof(MDGUID)]; + + { + ELF elf1(EM_386, TypeParam::kClass, kLittleEndian); + Section foo_1(kLittleEndian); + PopulateSection(&foo_1, 32, 5); + elf1.AddSection(".foo", foo_1, SHT_PROGBITS); + Section text_1(kLittleEndian); + PopulateSection(&text_1, 4096, 17); + elf1.AddSection(".text", text_1, SHT_PROGBITS); + elf1.Finish(); + this->GetElfContents(elf1); + } + + EXPECT_TRUE(FileID::ElfFileIdentifierFromMappedFile(this->elfdata, + identifier_1)); + FileID::ConvertIdentifierToString(identifier_1, identifier_string_1, + sizeof(identifier_string_1)); + + { + ELF elf2(EM_386, TypeParam::kClass, kLittleEndian); + Section text_2(kLittleEndian); + Section foo_2(kLittleEndian); + PopulateSection(&foo_2, 32, 5); + elf2.AddSection(".foo", foo_2, SHT_PROGBITS); + PopulateSection(&text_2, 4096, 31); + elf2.AddSection(".text", text_2, SHT_PROGBITS); + elf2.Finish(); + this->GetElfContents(elf2); + } + + EXPECT_TRUE(FileID::ElfFileIdentifierFromMappedFile(this->elfdata, + identifier_2)); + FileID::ConvertIdentifierToString(identifier_2, identifier_string_2, + sizeof(identifier_string_2)); + + EXPECT_STRNE(identifier_string_1, identifier_string_2); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/google_crashdump_uploader.cc b/TMessagesProj/jni/third_party/breakpad/src/common/linux/google_crashdump_uploader.cc new file mode 100644 index 0000000000..6d86fb3697 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/google_crashdump_uploader.cc @@ -0,0 +1,202 @@ +// Copyright (c) 2009, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +#include "common/linux/google_crashdump_uploader.h" + +#include +#include +#include + +#include + +#include "common/using_std_string.h" + +namespace google_breakpad { + +GoogleCrashdumpUploader::GoogleCrashdumpUploader(const string& product, + const string& version, + const string& guid, + const string& ptime, + const string& ctime, + const string& email, + const string& comments, + const string& minidump_pathname, + const string& crash_server, + const string& proxy_host, + const string& proxy_userpassword) { + LibcurlWrapper* http_layer = new LibcurlWrapper(); + Init(product, + version, + guid, + ptime, + ctime, + email, + comments, + minidump_pathname, + crash_server, + proxy_host, + proxy_userpassword, + http_layer); +} + +GoogleCrashdumpUploader::GoogleCrashdumpUploader(const string& product, + const string& version, + const string& guid, + const string& ptime, + const string& ctime, + const string& email, + const string& comments, + const string& minidump_pathname, + const string& crash_server, + const string& proxy_host, + const string& proxy_userpassword, + LibcurlWrapper* http_layer) { + Init(product, + version, + guid, + ptime, + ctime, + email, + comments, + minidump_pathname, + crash_server, + proxy_host, + proxy_userpassword, + http_layer); +} + +void GoogleCrashdumpUploader::Init(const string& product, + const string& version, + const string& guid, + const string& ptime, + const string& ctime, + const string& email, + const string& comments, + const string& minidump_pathname, + const string& crash_server, + const string& proxy_host, + const string& proxy_userpassword, + LibcurlWrapper* http_layer) { + product_ = product; + version_ = version; + guid_ = guid; + ptime_ = ptime; + ctime_ = ctime; + email_ = email; + comments_ = comments; + http_layer_.reset(http_layer); + + crash_server_ = crash_server; + proxy_host_ = proxy_host; + proxy_userpassword_ = proxy_userpassword; + minidump_pathname_ = minidump_pathname; + std::cout << "Uploader initializing"; + std::cout << "\tProduct: " << product_; + std::cout << "\tVersion: " << version_; + std::cout << "\tGUID: " << guid_; + if (!ptime_.empty()) { + std::cout << "\tProcess uptime: " << ptime_; + } + if (!ctime_.empty()) { + std::cout << "\tCumulative Process uptime: " << ctime_; + } + if (!email_.empty()) { + std::cout << "\tEmail: " << email_; + } + if (!comments_.empty()) { + std::cout << "\tComments: " << comments_; + } +} + +bool GoogleCrashdumpUploader::CheckRequiredParametersArePresent() { + string error_text; + if (product_.empty()) { + error_text.append("\nProduct name must be specified."); + } + + if (version_.empty()) { + error_text.append("\nProduct version must be specified."); + } + + if (guid_.empty()) { + error_text.append("\nClient ID must be specified."); + } + + if (minidump_pathname_.empty()) { + error_text.append("\nMinidump pathname must be specified."); + } + + if (!error_text.empty()) { + std::cout << error_text; + return false; + } + return true; + +} + +bool GoogleCrashdumpUploader::Upload(int* http_status_code, + string* http_response_header, + string* http_response_body) { + bool ok = http_layer_->Init(); + if (!ok) { + std::cout << "http layer init failed"; + return ok; + } + + if (!CheckRequiredParametersArePresent()) { + return false; + } + + struct stat st; + int err = stat(minidump_pathname_.c_str(), &st); + if (err) { + std::cout << minidump_pathname_ << " could not be found"; + return false; + } + + parameters_["prod"] = product_; + parameters_["ver"] = version_; + parameters_["guid"] = guid_; + parameters_["ptime"] = ptime_; + parameters_["ctime"] = ctime_; + parameters_["email"] = email_; + parameters_["comments_"] = comments_; + if (!http_layer_->AddFile(minidump_pathname_, + "upload_file_minidump")) { + return false; + } + std::cout << "Sending request to " << crash_server_; + return http_layer_->SendRequest(crash_server_, + parameters_, + http_status_code, + http_response_header, + http_response_body); +} +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/google_crashdump_uploader.h b/TMessagesProj/jni/third_party/breakpad/src/common/linux/google_crashdump_uploader.h new file mode 100644 index 0000000000..a2d0575b5e --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/google_crashdump_uploader.h @@ -0,0 +1,107 @@ +// Copyright (c) 2009, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +#ifndef COMMON_LINUX_GOOGLE_CRASHDUMP_UPLOADER_H_ +#define COMMON_LINUX_GOOGLE_CRASHDUMP_UPLOADER_H_ + +#include +#include + +#include "common/linux/libcurl_wrapper.h" +#include "common/scoped_ptr.h" +#include "common/using_std_string.h" + +namespace google_breakpad { + +class GoogleCrashdumpUploader { + public: + GoogleCrashdumpUploader(const string& product, + const string& version, + const string& guid, + const string& ptime, + const string& ctime, + const string& email, + const string& comments, + const string& minidump_pathname, + const string& crash_server, + const string& proxy_host, + const string& proxy_userpassword); + + GoogleCrashdumpUploader(const string& product, + const string& version, + const string& guid, + const string& ptime, + const string& ctime, + const string& email, + const string& comments, + const string& minidump_pathname, + const string& crash_server, + const string& proxy_host, + const string& proxy_userpassword, + LibcurlWrapper* http_layer); + + void Init(const string& product, + const string& version, + const string& guid, + const string& ptime, + const string& ctime, + const string& email, + const string& comments, + const string& minidump_pathname, + const string& crash_server, + const string& proxy_host, + const string& proxy_userpassword, + LibcurlWrapper* http_layer); + bool Upload(int* http_status_code, + string* http_response_header, + string* http_response_body); + + private: + bool CheckRequiredParametersArePresent(); + + scoped_ptr http_layer_; + string product_; + string version_; + string guid_; + string ptime_; + string ctime_; + string email_; + string comments_; + string minidump_pathname_; + + string crash_server_; + string proxy_host_; + string proxy_userpassword_; + + std::map parameters_; +}; +} + +#endif // COMMON_LINUX_GOOGLE_CRASHDUMP_UPLOADER_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/google_crashdump_uploader_test.cc b/TMessagesProj/jni/third_party/breakpad/src/common/linux/google_crashdump_uploader_test.cc new file mode 100644 index 0000000000..e94c5d62a2 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/google_crashdump_uploader_test.cc @@ -0,0 +1,170 @@ +// Copyright (c) 2009, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Unit test for crash dump uploader. + +#include + +#include "common/linux/google_crashdump_uploader.h" +#include "breakpad_googletest_includes.h" +#include "common/using_std_string.h" + +namespace google_breakpad { + +using ::testing::Return; +using ::testing::_; + +class MockLibcurlWrapper : public LibcurlWrapper { + public: + MOCK_METHOD0(Init, bool()); + MOCK_METHOD2(SetProxy, bool(const string& proxy_host, + const string& proxy_userpwd)); + MOCK_METHOD2(AddFile, bool(const string& upload_file_path, + const string& basename)); + MOCK_METHOD5(SendRequest, + bool(const string& url, + const std::map& parameters, + int* http_status_code, + string* http_header_data, + string* http_response_data)); +}; + +class GoogleCrashdumpUploaderTest : public ::testing::Test { +}; + +TEST_F(GoogleCrashdumpUploaderTest, InitFailsCausesUploadFailure) { + MockLibcurlWrapper m; + EXPECT_CALL(m, Init()).Times(1).WillOnce(Return(false)); + GoogleCrashdumpUploader *uploader = new GoogleCrashdumpUploader("foobar", + "1.0", + "AAA-BBB", + "", + "", + "test@test.com", + "none", + "/tmp/foo.dmp", + "http://foo.com", + "", + "", + &m); + ASSERT_FALSE(uploader->Upload(NULL, NULL, NULL)); +} + +TEST_F(GoogleCrashdumpUploaderTest, TestSendRequestHappensWithValidParameters) { + // Create a temp file + char tempfn[80] = "/tmp/googletest-upload-XXXXXX"; + int fd = mkstemp(tempfn); + ASSERT_NE(fd, -1); + close(fd); + + MockLibcurlWrapper m; + EXPECT_CALL(m, Init()).Times(1).WillOnce(Return(true)); + EXPECT_CALL(m, AddFile(tempfn, _)).WillOnce(Return(true)); + EXPECT_CALL(m, + SendRequest("http://foo.com",_,_,_,_)).Times(1).WillOnce(Return(true)); + GoogleCrashdumpUploader *uploader = new GoogleCrashdumpUploader("foobar", + "1.0", + "AAA-BBB", + "", + "", + "test@test.com", + "none", + tempfn, + "http://foo.com", + "", + "", + &m); + ASSERT_TRUE(uploader->Upload(NULL, NULL, NULL)); +} + + +TEST_F(GoogleCrashdumpUploaderTest, InvalidPathname) { + MockLibcurlWrapper m; + EXPECT_CALL(m, Init()).Times(1).WillOnce(Return(true)); + EXPECT_CALL(m, SendRequest(_,_,_,_,_)).Times(0); + GoogleCrashdumpUploader *uploader = new GoogleCrashdumpUploader("foobar", + "1.0", + "AAA-BBB", + "", + "", + "test@test.com", + "none", + "/tmp/foo.dmp", + "http://foo.com", + "", + "", + &m); + ASSERT_FALSE(uploader->Upload(NULL, NULL, NULL)); +} + +TEST_F(GoogleCrashdumpUploaderTest, TestRequiredParametersMustBePresent) { + // Test with empty product name. + GoogleCrashdumpUploader uploader("", + "1.0", + "AAA-BBB", + "", + "", + "test@test.com", + "none", + "/tmp/foo.dmp", + "http://foo.com", + "", + ""); + ASSERT_FALSE(uploader.Upload(NULL, NULL, NULL)); + + // Test with empty product version. + GoogleCrashdumpUploader uploader1("product", + "", + "AAA-BBB", + "", + "", + "", + "", + "/tmp/foo.dmp", + "", + "", + ""); + + ASSERT_FALSE(uploader1.Upload(NULL, NULL, NULL)); + + // Test with empty client GUID. + GoogleCrashdumpUploader uploader2("product", + "1.0", + "", + "", + "", + "", + "", + "/tmp/foo.dmp", + "", + "", + ""); + ASSERT_FALSE(uploader2.Upload(NULL, NULL, NULL)); +} +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/guid_creator.cc b/TMessagesProj/jni/third_party/breakpad/src/common/linux/guid_creator.cc new file mode 100644 index 0000000000..bfb308ee20 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/guid_creator.cc @@ -0,0 +1,104 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#include "common/linux/guid_creator.h" + +#include +#include +#include +#include +#include +#include + +// +// GUIDGenerator +// +// This class is used to generate random GUID. +// Currently use random number to generate a GUID since Linux has +// no native GUID generator. This should be OK since we don't expect +// crash to happen very offen. +// +class GUIDGenerator { + public: + static uint32_t BytesToUInt32(const uint8_t bytes[]) { + return ((uint32_t) bytes[0] + | ((uint32_t) bytes[1] << 8) + | ((uint32_t) bytes[2] << 16) + | ((uint32_t) bytes[3] << 24)); + } + + static void UInt32ToBytes(uint8_t bytes[], uint32_t n) { + bytes[0] = n & 0xff; + bytes[1] = (n >> 8) & 0xff; + bytes[2] = (n >> 16) & 0xff; + bytes[3] = (n >> 24) & 0xff; + } + + static bool CreateGUID(GUID *guid) { + InitOnce(); + guid->data1 = random(); + guid->data2 = (uint16_t)(random()); + guid->data3 = (uint16_t)(random()); + UInt32ToBytes(&guid->data4[0], random()); + UInt32ToBytes(&guid->data4[4], random()); + return true; + } + + private: + static void InitOnce() { + pthread_once(&once_control, &InitOnceImpl); + } + + static void InitOnceImpl() { + srandom(time(NULL)); + } + + static pthread_once_t once_control; +}; + +pthread_once_t GUIDGenerator::once_control = PTHREAD_ONCE_INIT; + +bool CreateGUID(GUID *guid) { + return GUIDGenerator::CreateGUID(guid); +} + +// Parse guid to string. +bool GUIDToString(const GUID *guid, char *buf, int buf_len) { + // Should allow more space the the max length of GUID. + assert(buf_len > kGUIDStringLength); + int num = snprintf(buf, buf_len, kGUIDFormatString, + guid->data1, guid->data2, guid->data3, + GUIDGenerator::BytesToUInt32(&(guid->data4[0])), + GUIDGenerator::BytesToUInt32(&(guid->data4[4]))); + if (num != kGUIDStringLength) + return false; + + buf[num] = '\0'; + return true; +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/guid_creator.h b/TMessagesProj/jni/third_party/breakpad/src/common/linux/guid_creator.h new file mode 100644 index 0000000000..c86d856c4d --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/guid_creator.h @@ -0,0 +1,48 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef COMMON_LINUX_GUID_CREATOR_H__ +#define COMMON_LINUX_GUID_CREATOR_H__ + +#include "google_breakpad/common/minidump_format.h" + +typedef MDGUID GUID; + +// Format string for parsing GUID. +#define kGUIDFormatString "%08x-%04x-%04x-%08x-%08x" +// Length of GUID string. Don't count the ending '\0'. +#define kGUIDStringLength 36 + +// Create a guid. +bool CreateGUID(GUID *guid); + +// Get the string from guid. +bool GUIDToString(const GUID *guid, char *buf, int buf_len); + +#endif diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/http_upload.cc b/TMessagesProj/jni/third_party/breakpad/src/common/linux/http_upload.cc new file mode 100644 index 0000000000..d49f2276b6 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/http_upload.cc @@ -0,0 +1,216 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#include "common/linux/http_upload.h" + +#include +#include +#include "third_party/curl/curl.h" + +namespace { + +// Callback to get the response data from server. +static size_t WriteCallback(void *ptr, size_t size, + size_t nmemb, void *userp) { + if (!userp) + return 0; + + string *response = reinterpret_cast(userp); + size_t real_size = size * nmemb; + response->append(reinterpret_cast(ptr), real_size); + return real_size; +} + +} // namespace + +namespace google_breakpad { + +static const char kUserAgent[] = "Breakpad/1.0 (Linux)"; + +// static +bool HTTPUpload::SendRequest(const string &url, + const map ¶meters, + const string &upload_file, + const string &file_part_name, + const string &proxy, + const string &proxy_user_pwd, + const string &ca_certificate_file, + string *response_body, + long *response_code, + string *error_description) { + if (response_code != NULL) + *response_code = 0; + + if (!CheckParameters(parameters)) + return false; + + // We may have been linked statically; if curl_easy_init is in the + // current binary, no need to search for a dynamic version. + void* curl_lib = dlopen(NULL, RTLD_NOW); + if (!curl_lib || dlsym(curl_lib, "curl_easy_init") == NULL) { + dlerror(); // Clear dlerror before attempting to open libraries. + dlclose(curl_lib); + curl_lib = NULL; + } + if (!curl_lib) { + curl_lib = dlopen("libcurl.so", RTLD_NOW); + } + if (!curl_lib) { + if (error_description != NULL) + *error_description = dlerror(); + curl_lib = dlopen("libcurl.so.4", RTLD_NOW); + } + if (!curl_lib) { + // Debian gives libcurl a different name when it is built against GnuTLS + // instead of OpenSSL. + curl_lib = dlopen("libcurl-gnutls.so.4", RTLD_NOW); + } + if (!curl_lib) { + curl_lib = dlopen("libcurl.so.3", RTLD_NOW); + } + if (!curl_lib) { + return false; + } + + CURL* (*curl_easy_init)(void); + *(void**) (&curl_easy_init) = dlsym(curl_lib, "curl_easy_init"); + CURL *curl = (*curl_easy_init)(); + if (error_description != NULL) + *error_description = "No Error"; + + if (!curl) { + dlclose(curl_lib); + return false; + } + + CURLcode err_code = CURLE_OK; + CURLcode (*curl_easy_setopt)(CURL *, CURLoption, ...); + *(void**) (&curl_easy_setopt) = dlsym(curl_lib, "curl_easy_setopt"); + (*curl_easy_setopt)(curl, CURLOPT_URL, url.c_str()); + (*curl_easy_setopt)(curl, CURLOPT_USERAGENT, kUserAgent); + // Set proxy information if necessary. + if (!proxy.empty()) + (*curl_easy_setopt)(curl, CURLOPT_PROXY, proxy.c_str()); + if (!proxy_user_pwd.empty()) + (*curl_easy_setopt)(curl, CURLOPT_PROXYUSERPWD, proxy_user_pwd.c_str()); + + if (!ca_certificate_file.empty()) + (*curl_easy_setopt)(curl, CURLOPT_CAINFO, ca_certificate_file.c_str()); + + struct curl_httppost *formpost = NULL; + struct curl_httppost *lastptr = NULL; + // Add form data. + CURLFORMcode (*curl_formadd)(struct curl_httppost **, struct curl_httppost **, ...); + *(void**) (&curl_formadd) = dlsym(curl_lib, "curl_formadd"); + map::const_iterator iter = parameters.begin(); + for (; iter != parameters.end(); ++iter) + (*curl_formadd)(&formpost, &lastptr, + CURLFORM_COPYNAME, iter->first.c_str(), + CURLFORM_COPYCONTENTS, iter->second.c_str(), + CURLFORM_END); + + // Add form file. + (*curl_formadd)(&formpost, &lastptr, + CURLFORM_COPYNAME, file_part_name.c_str(), + CURLFORM_FILE, upload_file.c_str(), + CURLFORM_END); + + (*curl_easy_setopt)(curl, CURLOPT_HTTPPOST, formpost); + + // Disable 100-continue header. + struct curl_slist *headerlist = NULL; + char buf[] = "Expect:"; + struct curl_slist* (*curl_slist_append)(struct curl_slist *, const char *); + *(void**) (&curl_slist_append) = dlsym(curl_lib, "curl_slist_append"); + headerlist = (*curl_slist_append)(headerlist, buf); + (*curl_easy_setopt)(curl, CURLOPT_HTTPHEADER, headerlist); + + if (response_body != NULL) { + (*curl_easy_setopt)(curl, CURLOPT_WRITEFUNCTION, WriteCallback); + (*curl_easy_setopt)(curl, CURLOPT_WRITEDATA, + reinterpret_cast(response_body)); + } + + // Fail if 400+ is returned from the web server. + (*curl_easy_setopt)(curl, CURLOPT_FAILONERROR, 1); + + CURLcode (*curl_easy_perform)(CURL *); + *(void**) (&curl_easy_perform) = dlsym(curl_lib, "curl_easy_perform"); + err_code = (*curl_easy_perform)(curl); + if (response_code != NULL) { + CURLcode (*curl_easy_getinfo)(CURL *, CURLINFO, ...); + *(void**) (&curl_easy_getinfo) = dlsym(curl_lib, "curl_easy_getinfo"); + (*curl_easy_getinfo)(curl, CURLINFO_RESPONSE_CODE, response_code); + } + const char* (*curl_easy_strerror)(CURLcode); + *(void**) (&curl_easy_strerror) = dlsym(curl_lib, "curl_easy_strerror"); +#ifndef NDEBUG + if (err_code != CURLE_OK) + fprintf(stderr, "Failed to send http request to %s, error: %s\n", + url.c_str(), + (*curl_easy_strerror)(err_code)); +#endif + if (error_description != NULL) + *error_description = (*curl_easy_strerror)(err_code); + + void (*curl_easy_cleanup)(CURL *); + *(void**) (&curl_easy_cleanup) = dlsym(curl_lib, "curl_easy_cleanup"); + (*curl_easy_cleanup)(curl); + if (formpost != NULL) { + void (*curl_formfree)(struct curl_httppost *); + *(void**) (&curl_formfree) = dlsym(curl_lib, "curl_formfree"); + (*curl_formfree)(formpost); + } + if (headerlist != NULL) { + void (*curl_slist_free_all)(struct curl_slist *); + *(void**) (&curl_slist_free_all) = dlsym(curl_lib, "curl_slist_free_all"); + (*curl_slist_free_all)(headerlist); + } + dlclose(curl_lib); + return err_code == CURLE_OK; +} + +// static +bool HTTPUpload::CheckParameters(const map ¶meters) { + for (map::const_iterator pos = parameters.begin(); + pos != parameters.end(); ++pos) { + const string &str = pos->first; + if (str.size() == 0) + return false; // disallow empty parameter names + for (unsigned int i = 0; i < str.size(); ++i) { + int c = str[i]; + if (c < 32 || c == '"' || c > 127) { + return false; + } + } + } + return true; +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/http_upload.h b/TMessagesProj/jni/third_party/breakpad/src/common/linux/http_upload.h new file mode 100644 index 0000000000..6dd36ea04e --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/http_upload.h @@ -0,0 +1,88 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// HTTPUpload provides a "nice" API to send a multipart HTTP(S) POST +// request using libcurl. It currently supports requests that contain +// a set of string parameters (key/value pairs), and a file to upload. + +#ifndef COMMON_LINUX_HTTP_UPLOAD_H__ +#define COMMON_LINUX_HTTP_UPLOAD_H__ + +#include +#include + +#include "common/using_std_string.h" + +namespace google_breakpad { + +using std::map; + +class HTTPUpload { + public: + // Sends the given set of parameters, along with the contents of + // upload_file, as a multipart POST request to the given URL. + // file_part_name contains the name of the file part of the request + // (i.e. it corresponds to the name= attribute on an . + // Parameter names must contain only printable ASCII characters, + // and may not contain a quote (") character. + // Only HTTP(S) URLs are currently supported. Returns true on success. + // If the request is successful and response_body is non-NULL, + // the response body will be returned in response_body. + // If response_code is non-NULL, it will be set to the HTTP response code + // received (or 0 if the request failed before getting an HTTP response). + // If the send fails, a description of the error will be + // returned in error_description. + static bool SendRequest(const string &url, + const map ¶meters, + const string &upload_file, + const string &file_part_name, + const string &proxy, + const string &proxy_user_pwd, + const string &ca_certificate_file, + string *response_body, + long *response_code, + string *error_description); + + private: + // Checks that the given list of parameters has only printable + // ASCII characters in the parameter name, and does not contain + // any quote (") characters. Returns true if so. + static bool CheckParameters(const map ¶meters); + + // No instances of this class should be created. + // Disallow all constructors, destructors, and operator=. + HTTPUpload(); + explicit HTTPUpload(const HTTPUpload &); + void operator=(const HTTPUpload &); + ~HTTPUpload(); +}; + +} // namespace google_breakpad + +#endif // COMMON_LINUX_HTTP_UPLOAD_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/ignore_ret.h b/TMessagesProj/jni/third_party/breakpad/src/common/linux/ignore_ret.h new file mode 100644 index 0000000000..f60384bbad --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/ignore_ret.h @@ -0,0 +1,40 @@ +// Copyright (c) 2012 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef COMMON_LINUX_IGNORE_RET_H_ +#define COMMON_LINUX_IGNORE_RET_H_ + +// Some compilers are prone to warn about unused return values. In cases where +// either a) the call cannot fail, or b) there is nothing that can be done when +// the call fails, IGNORE_RET() can be used to mark the return code as ignored. +// This avoids spurious compiler warnings. + +#define IGNORE_RET(x) do { if (x); } while (0) + +#endif // COMMON_LINUX_IGNORE_RET_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/libcurl_wrapper.cc b/TMessagesProj/jni/third_party/breakpad/src/common/linux/libcurl_wrapper.cc new file mode 100644 index 0000000000..fd4e34cd8f --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/libcurl_wrapper.cc @@ -0,0 +1,241 @@ +// Copyright (c) 2009, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#include + +#include +#include + +#include "common/linux/libcurl_wrapper.h" +#include "common/using_std_string.h" + +namespace google_breakpad { +LibcurlWrapper::LibcurlWrapper() + : init_ok_(false), + formpost_(NULL), + lastptr_(NULL), + headerlist_(NULL) { + curl_lib_ = dlopen("libcurl.so", RTLD_NOW); + if (!curl_lib_) { + curl_lib_ = dlopen("libcurl.so.4", RTLD_NOW); + } + if (!curl_lib_) { + curl_lib_ = dlopen("libcurl.so.3", RTLD_NOW); + } + if (!curl_lib_) { + std::cout << "Could not find libcurl via dlopen"; + return; + } + std::cout << "LibcurlWrapper init succeeded"; + init_ok_ = true; + return; +} + +LibcurlWrapper::~LibcurlWrapper() {} + +bool LibcurlWrapper::SetProxy(const string& proxy_host, + const string& proxy_userpwd) { + if (!init_ok_) { + return false; + } + // Set proxy information if necessary. + if (!proxy_host.empty()) { + (*easy_setopt_)(curl_, CURLOPT_PROXY, proxy_host.c_str()); + } else { + std::cout << "SetProxy called with empty proxy host."; + return false; + } + if (!proxy_userpwd.empty()) { + (*easy_setopt_)(curl_, CURLOPT_PROXYUSERPWD, proxy_userpwd.c_str()); + } else { + std::cout << "SetProxy called with empty proxy username/password."; + return false; + } + std::cout << "Set proxy host to " << proxy_host; + return true; +} + +bool LibcurlWrapper::AddFile(const string& upload_file_path, + const string& basename) { + if (!init_ok_) { + return false; + } + std::cout << "Adding " << upload_file_path << " to form upload."; + // Add form file. + (*formadd_)(&formpost_, &lastptr_, + CURLFORM_COPYNAME, basename.c_str(), + CURLFORM_FILE, upload_file_path.c_str(), + CURLFORM_END); + + return true; +} + +// Callback to get the response data from server. +static size_t WriteCallback(void *ptr, size_t size, + size_t nmemb, void *userp) { + if (!userp) + return 0; + + string *response = reinterpret_cast(userp); + size_t real_size = size * nmemb; + response->append(reinterpret_cast(ptr), real_size); + return real_size; +} + +bool LibcurlWrapper::SendRequest(const string& url, + const std::map& parameters, + int* http_status_code, + string* http_header_data, + string* http_response_data) { + (*easy_setopt_)(curl_, CURLOPT_URL, url.c_str()); + std::map::const_iterator iter = parameters.begin(); + for (; iter != parameters.end(); ++iter) + (*formadd_)(&formpost_, &lastptr_, + CURLFORM_COPYNAME, iter->first.c_str(), + CURLFORM_COPYCONTENTS, iter->second.c_str(), + CURLFORM_END); + + (*easy_setopt_)(curl_, CURLOPT_HTTPPOST, formpost_); + if (http_response_data != NULL) { + http_response_data->clear(); + (*easy_setopt_)(curl_, CURLOPT_WRITEFUNCTION, WriteCallback); + (*easy_setopt_)(curl_, CURLOPT_WRITEDATA, + reinterpret_cast(http_response_data)); + } + if (http_header_data != NULL) { + http_header_data->clear(); + (*easy_setopt_)(curl_, CURLOPT_HEADERFUNCTION, WriteCallback); + (*easy_setopt_)(curl_, CURLOPT_HEADERDATA, + reinterpret_cast(http_header_data)); + } + + CURLcode err_code = CURLE_OK; + err_code = (*easy_perform_)(curl_); + easy_strerror_ = reinterpret_cast + (dlsym(curl_lib_, "curl_easy_strerror")); + + if (http_status_code != NULL) { + (*easy_getinfo_)(curl_, CURLINFO_RESPONSE_CODE, http_status_code); + } + +#ifndef NDEBUG + if (err_code != CURLE_OK) + fprintf(stderr, "Failed to send http request to %s, error: %s\n", + url.c_str(), + (*easy_strerror_)(err_code)); +#endif + if (headerlist_ != NULL) { + (*slist_free_all_)(headerlist_); + } + + (*easy_cleanup_)(curl_); + if (formpost_ != NULL) { + (*formfree_)(formpost_); + } + + return err_code == CURLE_OK; +} + +bool LibcurlWrapper::Init() { + if (!init_ok_) { + std::cout << "Init_OK was not true in LibcurlWrapper::Init(), check earlier log messages"; + return false; + } + + if (!SetFunctionPointers()) { + std::cout << "Could not find function pointers"; + init_ok_ = false; + return false; + } + + curl_ = (*easy_init_)(); + + last_curl_error_ = "No Error"; + + if (!curl_) { + dlclose(curl_lib_); + std::cout << "Curl initialization failed"; + return false; + } + + // Disable 100-continue header. + char buf[] = "Expect:"; + + headerlist_ = (*slist_append_)(headerlist_, buf); + (*easy_setopt_)(curl_, CURLOPT_HTTPHEADER, headerlist_); + return true; +} + +#define SET_AND_CHECK_FUNCTION_POINTER(var, function_name, type) \ + var = reinterpret_cast(dlsym(curl_lib_, function_name)); \ + if (!var) { \ + std::cout << "Could not find libcurl function " << function_name; \ + init_ok_ = false; \ + return false; \ + } + +bool LibcurlWrapper::SetFunctionPointers() { + + SET_AND_CHECK_FUNCTION_POINTER(easy_init_, + "curl_easy_init", + CURL*(*)()); + + SET_AND_CHECK_FUNCTION_POINTER(easy_setopt_, + "curl_easy_setopt", + CURLcode(*)(CURL*, CURLoption, ...)); + + SET_AND_CHECK_FUNCTION_POINTER(formadd_, "curl_formadd", + CURLFORMcode(*)(curl_httppost**, curl_httppost**, ...)); + + SET_AND_CHECK_FUNCTION_POINTER(slist_append_, "curl_slist_append", + curl_slist*(*)(curl_slist*, const char*)); + + SET_AND_CHECK_FUNCTION_POINTER(easy_perform_, + "curl_easy_perform", + CURLcode(*)(CURL*)); + + SET_AND_CHECK_FUNCTION_POINTER(easy_cleanup_, + "curl_easy_cleanup", + void(*)(CURL*)); + + SET_AND_CHECK_FUNCTION_POINTER(easy_getinfo_, + "curl_easy_getinfo", + CURLcode(*)(CURL *, CURLINFO info, ...)); + + SET_AND_CHECK_FUNCTION_POINTER(slist_free_all_, + "curl_slist_free_all", + void(*)(curl_slist*)); + + SET_AND_CHECK_FUNCTION_POINTER(formfree_, + "curl_formfree", + void(*)(curl_httppost*)); + return true; +} + +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/libcurl_wrapper.h b/TMessagesProj/jni/third_party/breakpad/src/common/linux/libcurl_wrapper.h new file mode 100644 index 0000000000..de84a63b40 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/libcurl_wrapper.h @@ -0,0 +1,93 @@ +// Copyright (c) 2009, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// A wrapper for libcurl to do HTTP Uploads, to support easy mocking +// and unit testing of the HTTPUpload class. + +#ifndef COMMON_LINUX_LIBCURL_WRAPPER_H_ +#define COMMON_LINUX_LIBCURL_WRAPPER_H_ + +#include +#include + +#include "common/using_std_string.h" +#include "third_party/curl/curl.h" + +namespace google_breakpad { +class LibcurlWrapper { + public: + LibcurlWrapper(); + ~LibcurlWrapper(); + virtual bool Init(); + virtual bool SetProxy(const string& proxy_host, + const string& proxy_userpwd); + virtual bool AddFile(const string& upload_file_path, + const string& basename); + virtual bool SendRequest(const string& url, + const std::map& parameters, + int* http_status_code, + string* http_header_data, + string* http_response_data); + private: + // This function initializes class state corresponding to function + // pointers into the CURL library. + bool SetFunctionPointers(); + + bool init_ok_; // Whether init succeeded + void* curl_lib_; // Pointer to result of dlopen() on + // curl library + string last_curl_error_; // The text of the last error when + // dealing + // with CURL. + + CURL *curl_; // Pointer for handle for CURL calls. + + CURL* (*easy_init_)(void); + + // Stateful pointers for calling into curl_formadd() + struct curl_httppost *formpost_; + struct curl_httppost *lastptr_; + struct curl_slist *headerlist_; + + // Function pointers into CURL library + CURLcode (*easy_setopt_)(CURL *, CURLoption, ...); + CURLFORMcode (*formadd_)(struct curl_httppost **, + struct curl_httppost **, ...); + struct curl_slist* (*slist_append_)(struct curl_slist *, const char *); + void (*slist_free_all_)(struct curl_slist *); + CURLcode (*easy_perform_)(CURL *); + const char* (*easy_strerror_)(CURLcode); + void (*easy_cleanup_)(CURL *); + CURLcode (*easy_getinfo_)(CURL *, CURLINFO info, ...); + void (*formfree_)(struct curl_httppost *); + +}; +} + +#endif // COMMON_LINUX_LIBCURL_WRAPPER_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/linux_libc_support.cc b/TMessagesProj/jni/third_party/breakpad/src/common/linux/linux_libc_support.cc new file mode 100644 index 0000000000..08b0325e6d --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/linux_libc_support.cc @@ -0,0 +1,237 @@ +// Copyright (c) 2012, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// This source file provides replacements for libc functions that we need. If +// we call the libc functions directly we risk crashing in the dynamic linker +// as it tries to resolve uncached PLT entries. + +#include "common/linux/linux_libc_support.h" + +#include + +extern "C" { + +size_t my_strlen(const char* s) { + size_t len = 0; + while (*s++) len++; + return len; +} + +int my_strcmp(const char* a, const char* b) { + for (;;) { + if (*a < *b) + return -1; + else if (*a > *b) + return 1; + else if (*a == 0) + return 0; + a++; + b++; + } +} + +int my_strncmp(const char* a, const char* b, size_t len) { + for (size_t i = 0; i < len; ++i) { + if (*a < *b) + return -1; + else if (*a > *b) + return 1; + else if (*a == 0) + return 0; + a++; + b++; + } + + return 0; +} + +// Parse a non-negative integer. +// result: (output) the resulting non-negative integer +// s: a NUL terminated string +// Return true iff successful. +bool my_strtoui(int* result, const char* s) { + if (*s == 0) + return false; + int r = 0; + for (;; s++) { + if (*s == 0) + break; + const int old_r = r; + r *= 10; + if (*s < '0' || *s > '9') + return false; + r += *s - '0'; + if (r < old_r) + return false; + } + + *result = r; + return true; +} + +// Return the length of the given unsigned integer when expressed in base 10. +unsigned my_uint_len(uintmax_t i) { + if (!i) + return 1; + + int len = 0; + while (i) { + len++; + i /= 10; + } + + return len; +} + +// Convert an unsigned integer to a string +// output: (output) the resulting string is written here. This buffer must be +// large enough to hold the resulting string. Call |my_uint_len| to get the +// required length. +// i: the unsigned integer to serialise. +// i_len: the length of the integer in base 10 (see |my_uint_len|). +void my_uitos(char* output, uintmax_t i, unsigned i_len) { + for (unsigned index = i_len; index; --index, i /= 10) + output[index - 1] = '0' + (i % 10); +} + +const char* my_strchr(const char* haystack, char needle) { + while (*haystack && *haystack != needle) + haystack++; + if (*haystack == needle) + return haystack; + return (const char*) 0; +} + +const char* my_strrchr(const char* haystack, char needle) { + const char* ret = NULL; + while (*haystack) { + if (*haystack == needle) + ret = haystack; + haystack++; + } + return ret; +} + +void* my_memchr(const void* src, int needle, size_t src_len) { + const unsigned char* p = (const unsigned char*)src; + const unsigned char* p_end = p + src_len; + for (; p < p_end; ++p) { + if (*p == needle) + return (void*)p; + } + return NULL; +} + +// Read a hex value +// result: (output) the resulting value +// s: a string +// Returns a pointer to the first invalid charactor. +const char* my_read_hex_ptr(uintptr_t* result, const char* s) { + uintptr_t r = 0; + + for (;; ++s) { + if (*s >= '0' && *s <= '9') { + r <<= 4; + r += *s - '0'; + } else if (*s >= 'a' && *s <= 'f') { + r <<= 4; + r += (*s - 'a') + 10; + } else if (*s >= 'A' && *s <= 'F') { + r <<= 4; + r += (*s - 'A') + 10; + } else { + break; + } + } + + *result = r; + return s; +} + +const char* my_read_decimal_ptr(uintptr_t* result, const char* s) { + uintptr_t r = 0; + + for (;; ++s) { + if (*s >= '0' && *s <= '9') { + r *= 10; + r += *s - '0'; + } else { + break; + } + } + *result = r; + return s; +} + +void my_memset(void* ip, char c, size_t len) { + char* p = (char *) ip; + while (len--) + *p++ = c; +} + +size_t my_strlcpy(char* s1, const char* s2, size_t len) { + size_t pos1 = 0; + size_t pos2 = 0; + + while (s2[pos2] != '\0') { + if (pos1 + 1 < len) { + s1[pos1] = s2[pos2]; + pos1++; + } + pos2++; + } + if (len > 0) + s1[pos1] = '\0'; + + return pos2; +} + +size_t my_strlcat(char* s1, const char* s2, size_t len) { + size_t pos1 = 0; + + while (pos1 < len && s1[pos1] != '\0') + pos1++; + + if (pos1 == len) + return pos1; + + return pos1 + my_strlcpy(s1 + pos1, s2, len - pos1); +} + +int my_isspace(int ch) { + // Matches the C locale. + const char spaces[] = " \t\f\n\r\t\v"; + for (size_t i = 0; i < sizeof(spaces); i++) { + if (ch == spaces[i]) + return 1; + } + return 0; +} + +} // extern "C" diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/linux_libc_support.h b/TMessagesProj/jni/third_party/breakpad/src/common/linux/linux_libc_support.h new file mode 100644 index 0000000000..ec5a8d6b6b --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/linux_libc_support.h @@ -0,0 +1,96 @@ +// Copyright (c) 2009, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// This header provides replacements for libc functions that we need. We if +// call the libc functions directly we risk crashing in the dynamic linker as +// it tries to resolve uncached PLT entries. + +#ifndef CLIENT_LINUX_LINUX_LIBC_SUPPORT_H_ +#define CLIENT_LINUX_LINUX_LIBC_SUPPORT_H_ + +#include +#include +#include + +extern "C" { + +extern size_t my_strlen(const char* s); + +extern int my_strcmp(const char* a, const char* b); + +extern int my_strncmp(const char* a, const char* b, size_t len); + +// Parse a non-negative integer. +// result: (output) the resulting non-negative integer +// s: a NUL terminated string +// Return true iff successful. +extern bool my_strtoui(int* result, const char* s); + +// Return the length of the given unsigned integer when expressed in base 10. +extern unsigned my_uint_len(uintmax_t i); + +// Convert an unsigned integer to a string +// output: (output) the resulting string is written here. This buffer must be +// large enough to hold the resulting string. Call |my_uint_len| to get the +// required length. +// i: the unsigned integer to serialise. +// i_len: the length of the integer in base 10 (see |my_uint_len|). +extern void my_uitos(char* output, uintmax_t i, unsigned i_len); + +extern const char* my_strchr(const char* haystack, char needle); + +extern const char* my_strrchr(const char* haystack, char needle); + +// Read a hex value +// result: (output) the resulting value +// s: a string +// Returns a pointer to the first invalid charactor. +extern const char* my_read_hex_ptr(uintptr_t* result, const char* s); + +extern const char* my_read_decimal_ptr(uintptr_t* result, const char* s); + +extern void my_memset(void* ip, char c, size_t len); + +extern void* my_memchr(const void* src, int c, size_t len); + +// The following are considered safe to use in a compromised environment. +// Besides, this gives the compiler an opportunity to optimize their calls. +#define my_memcpy memcpy +#define my_memmove memmove +#define my_memcmp memcmp + +extern size_t my_strlcpy(char* s1, const char* s2, size_t len); + +extern size_t my_strlcat(char* s1, const char* s2, size_t len); + +extern int my_isspace(int ch); + +} // extern "C" + +#endif // CLIENT_LINUX_LINUX_LIBC_SUPPORT_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/linux_libc_support_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/common/linux/linux_libc_support_unittest.cc new file mode 100644 index 0000000000..adadfed44d --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/linux_libc_support_unittest.cc @@ -0,0 +1,213 @@ +// Copyright (c) 2009, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#include "breakpad_googletest_includes.h" +#include "common/linux/linux_libc_support.h" + +namespace { +typedef testing::Test LinuxLibcSupportTest; +} + +TEST(LinuxLibcSupportTest, strlen) { + static const char* test_data[] = { "", "a", "aa", "aaa", "aabc", NULL }; + for (unsigned i = 0; ; ++i) { + if (!test_data[i]) + break; + ASSERT_EQ(strlen(test_data[i]), my_strlen(test_data[i])); + } +} + +TEST(LinuxLibcSupportTest, strcmp) { + static const char* test_data[] = { + "", "", + "a", "", + "", "a", + "a", "b", + "a", "a", + "ab", "aa", + "abc", "ab", + "abc", "abc", + NULL, + }; + + for (unsigned i = 0; ; ++i) { + if (!test_data[i*2]) + break; + int libc_result = strcmp(test_data[i*2], test_data[i*2 + 1]); + if (libc_result > 1) + libc_result = 1; + else if (libc_result < -1) + libc_result = -1; + ASSERT_EQ(my_strcmp(test_data[i*2], test_data[i*2 + 1]), libc_result); + } +} + +TEST(LinuxLibcSupportTest, strtoui) { + int result; + + ASSERT_FALSE(my_strtoui(&result, "")); + ASSERT_FALSE(my_strtoui(&result, "-1")); + ASSERT_FALSE(my_strtoui(&result, "-")); + ASSERT_FALSE(my_strtoui(&result, "a")); + ASSERT_FALSE(my_strtoui(&result, "23472893472938472987987398472398")); + + ASSERT_TRUE(my_strtoui(&result, "0")); + ASSERT_EQ(result, 0); + ASSERT_TRUE(my_strtoui(&result, "1")); + ASSERT_EQ(result, 1); + ASSERT_TRUE(my_strtoui(&result, "12")); + ASSERT_EQ(result, 12); + ASSERT_TRUE(my_strtoui(&result, "123")); + ASSERT_EQ(result, 123); + ASSERT_TRUE(my_strtoui(&result, "0123")); + ASSERT_EQ(result, 123); +} + +TEST(LinuxLibcSupportTest, uint_len) { + ASSERT_EQ(my_uint_len(0), 1U); + ASSERT_EQ(my_uint_len(2), 1U); + ASSERT_EQ(my_uint_len(5), 1U); + ASSERT_EQ(my_uint_len(9), 1U); + ASSERT_EQ(my_uint_len(10), 2U); + ASSERT_EQ(my_uint_len(99), 2U); + ASSERT_EQ(my_uint_len(100), 3U); + ASSERT_EQ(my_uint_len(101), 3U); + ASSERT_EQ(my_uint_len(1000), 4U); + // 0xFFFFFFFFFFFFFFFF + ASSERT_EQ(my_uint_len(18446744073709551615LLU), 20U); +} + +TEST(LinuxLibcSupportTest, uitos) { + char buf[32]; + + my_uitos(buf, 0, 1); + ASSERT_EQ(0, memcmp(buf, "0", 1)); + + my_uitos(buf, 1, 1); + ASSERT_EQ(0, memcmp(buf, "1", 1)); + + my_uitos(buf, 10, 2); + ASSERT_EQ(0, memcmp(buf, "10", 2)); + + my_uitos(buf, 63, 2); + ASSERT_EQ(0, memcmp(buf, "63", 2)); + + my_uitos(buf, 101, 3); + ASSERT_EQ(0, memcmp(buf, "101", 2)); + + // 0xFFFFFFFFFFFFFFFF + my_uitos(buf, 18446744073709551615LLU, 20); + ASSERT_EQ(0, memcmp(buf, "18446744073709551615", 20)); +} + +TEST(LinuxLibcSupportTest, strchr) { + ASSERT_EQ(NULL, my_strchr("abc", 'd')); + ASSERT_EQ(NULL, my_strchr("", 'd')); + ASSERT_EQ(NULL, my_strchr("efghi", 'd')); + + ASSERT_TRUE(my_strchr("a", 'a')); + ASSERT_TRUE(my_strchr("abc", 'a')); + ASSERT_TRUE(my_strchr("bcda", 'a')); + ASSERT_TRUE(my_strchr("sdfasdf", 'a')); + + static const char abc3[] = "abcabcabc"; + ASSERT_EQ(abc3, my_strchr(abc3, 'a')); +} + +TEST(LinuxLibcSupportTest, strrchr) { + ASSERT_EQ(NULL, my_strrchr("abc", 'd')); + ASSERT_EQ(NULL, my_strrchr("", 'd')); + ASSERT_EQ(NULL, my_strrchr("efghi", 'd')); + + ASSERT_TRUE(my_strrchr("a", 'a')); + ASSERT_TRUE(my_strrchr("abc", 'a')); + ASSERT_TRUE(my_strrchr("bcda", 'a')); + ASSERT_TRUE(my_strrchr("sdfasdf", 'a')); + + static const char abc3[] = "abcabcabc"; + ASSERT_EQ(abc3 + 6, my_strrchr(abc3, 'a')); +} + +TEST(LinuxLibcSupportTest, memchr) { + ASSERT_EQ(NULL, my_memchr("abc", 'd', 3)); + ASSERT_EQ(NULL, my_memchr("abcd", 'd', 3)); + ASSERT_EQ(NULL, my_memchr("a", 'a', 0)); + + static const char abc3[] = "abcabcabc"; + ASSERT_EQ(abc3, my_memchr(abc3, 'a', 3)); + ASSERT_EQ(abc3, my_memchr(abc3, 'a', 9)); + ASSERT_EQ(abc3+1, my_memchr(abc3, 'b', 9)); + ASSERT_EQ(abc3+2, my_memchr(abc3, 'c', 9)); +} + +TEST(LinuxLibcSupportTest, read_hex_ptr) { + uintptr_t result; + const char* last; + + last = my_read_hex_ptr(&result, ""); + ASSERT_EQ(result, 0U); + ASSERT_EQ(*last, 0); + + last = my_read_hex_ptr(&result, "0"); + ASSERT_EQ(result, 0U); + ASSERT_EQ(*last, 0); + + last = my_read_hex_ptr(&result, "0123"); + ASSERT_EQ(result, 0x123U); + ASSERT_EQ(*last, 0); + + last = my_read_hex_ptr(&result, "0123a"); + ASSERT_EQ(result, 0x123aU); + ASSERT_EQ(*last, 0); + + last = my_read_hex_ptr(&result, "0123a-"); + ASSERT_EQ(result, 0x123aU); + ASSERT_EQ(*last, '-'); +} + +TEST(LinuxLibcSupportTest, read_decimal_ptr) { + uintptr_t result; + const char* last; + + last = my_read_decimal_ptr(&result, "0"); + ASSERT_EQ(result, 0U); + ASSERT_EQ(*last, 0); + + last = my_read_decimal_ptr(&result, "0123"); + ASSERT_EQ(result, 123U); + ASSERT_EQ(*last, 0); + + last = my_read_decimal_ptr(&result, "1234"); + ASSERT_EQ(result, 1234U); + ASSERT_EQ(*last, 0); + + last = my_read_decimal_ptr(&result, "01234-"); + ASSERT_EQ(result, 1234U); + ASSERT_EQ(*last, '-'); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/memory_mapped_file.cc b/TMessagesProj/jni/third_party/breakpad/src/common/linux/memory_mapped_file.cc new file mode 100644 index 0000000000..592b66c8d6 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/memory_mapped_file.cc @@ -0,0 +1,118 @@ +// Copyright (c) 2011, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// memory_mapped_file.cc: Implement google_breakpad::MemoryMappedFile. +// See memory_mapped_file.h for details. + +#include "common/linux/memory_mapped_file.h" + +#include +#include +#if defined(__ANDROID__) +#include +#endif +#include + +#include "common/memory_range.h" +#include "third_party/lss/linux_syscall_support.h" + +namespace google_breakpad { + +MemoryMappedFile::MemoryMappedFile() {} + +MemoryMappedFile::MemoryMappedFile(const char* path, size_t offset) { + Map(path, offset); +} + +MemoryMappedFile::~MemoryMappedFile() { + Unmap(); +} + +#include + +bool MemoryMappedFile::Map(const char* path, size_t offset) { + Unmap(); + + int fd = sys_open(path, O_RDONLY, 0); + if (fd == -1) { + return false; + } + +#if defined(__x86_64__) || defined(__aarch64__) || \ + (defined(__mips__) && _MIPS_SIM == _ABI64) + + struct kernel_stat st; + if (sys_fstat(fd, &st) == -1 || st.st_size < 0) { +#else + struct kernel_stat64 st; + if (sys_fstat64(fd, &st) == -1 || st.st_size < 0) { +#endif + sys_close(fd); + return false; + } + + // Strangely file size can be negative, but we check above that it is not. + size_t file_len = static_cast(st.st_size); + // If the file does not extend beyond the offset, simply use an empty + // MemoryRange and return true. Don't bother to call mmap() + // even though mmap() can handle an empty file on some platforms. + if (offset >= file_len) { + sys_close(fd); + return true; + } + +#if defined(__x86_64__) || defined(__aarch64__) || \ + (defined(__mips__) && _MIPS_SIM == _ABI64) + void* data = sys_mmap(NULL, file_len, PROT_READ, MAP_PRIVATE, fd, offset); +#else + if ((offset & 4095) != 0) { + // Not page aligned. + sys_close(fd); + return false; + } + void* data = sys_mmap2( + NULL, file_len, PROT_READ, MAP_PRIVATE, fd, offset >> 12); +#endif + sys_close(fd); + if (data == MAP_FAILED) { + return false; + } + + content_.Set(data, file_len - offset); + return true; +} + +void MemoryMappedFile::Unmap() { + if (content_.data()) { + sys_munmap(const_cast(content_.data()), content_.length()); + content_.Set(NULL, 0); + } +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/memory_mapped_file.h b/TMessagesProj/jni/third_party/breakpad/src/common/linux/memory_mapped_file.h new file mode 100644 index 0000000000..fa660cc91a --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/memory_mapped_file.h @@ -0,0 +1,87 @@ +// Copyright (c) 2011, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// memory_mapped_file.h: Define the google_breakpad::MemoryMappedFile +// class, which maps a file into memory for read-only access. + +#ifndef COMMON_LINUX_MEMORY_MAPPED_FILE_H_ +#define COMMON_LINUX_MEMORY_MAPPED_FILE_H_ + +#include +#include "common/basictypes.h" +#include "common/memory_range.h" + +namespace google_breakpad { + +// A utility class for mapping a file into memory for read-only access of +// the file content. Its implementation avoids calling into libc functions +// by directly making system calls for open, close, mmap, and munmap. +class MemoryMappedFile { + public: + MemoryMappedFile(); + + // Constructor that calls Map() to map a file at |path| into memory. + // If Map() fails, the object behaves as if it is default constructed. + MemoryMappedFile(const char* path, size_t offset); + + ~MemoryMappedFile(); + + // Maps a file at |path| into memory, which can then be accessed via + // content() as a MemoryRange object or via data(), and returns true on + // success. Mapping an empty file will succeed but with data() and size() + // returning NULL and 0, respectively. An existing mapping is unmapped + // before a new mapping is created. + bool Map(const char* path, size_t offset); + + // Unmaps the memory for the mapped file. It's a no-op if no file is + // mapped. + void Unmap(); + + // Returns a MemoryRange object that covers the memory for the mapped + // file. The MemoryRange object is empty if no file is mapped. + const MemoryRange& content() const { return content_; } + + // Returns a pointer to the beginning of the memory for the mapped file. + // or NULL if no file is mapped or the mapped file is empty. + const void* data() const { return content_.data(); } + + // Returns the size in bytes of the mapped file, or zero if no file + // is mapped. + size_t size() const { return content_.length(); } + + private: + // Mapped file content as a MemoryRange object. + MemoryRange content_; + + DISALLOW_COPY_AND_ASSIGN(MemoryMappedFile); +}; + +} // namespace google_breakpad + +#endif // COMMON_LINUX_MEMORY_MAPPED_FILE_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/memory_mapped_file_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/common/linux/memory_mapped_file_unittest.cc new file mode 100644 index 0000000000..fad59f40cd --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/memory_mapped_file_unittest.cc @@ -0,0 +1,208 @@ +// Copyright (c) 2011, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// memory_mapped_file_unittest.cc: +// Unit tests for google_breakpad::MemoryMappedFile. + +#include +#include +#include + +#include + +#include "breakpad_googletest_includes.h" +#include "common/linux/memory_mapped_file.h" +#include "common/tests/auto_tempdir.h" +#include "common/tests/file_utils.h" +#include "common/using_std_string.h" + +using google_breakpad::AutoTempDir; +using google_breakpad::MemoryMappedFile; +using google_breakpad::WriteFile; + +namespace { + +class MemoryMappedFileTest : public testing::Test { + protected: + void ExpectNoMappedData(const MemoryMappedFile& mapped_file) { + EXPECT_TRUE(mapped_file.content().IsEmpty()); + EXPECT_TRUE(mapped_file.data() == NULL); + EXPECT_EQ(0U, mapped_file.size()); + } +}; + +} // namespace + +TEST_F(MemoryMappedFileTest, DefaultConstructor) { + MemoryMappedFile mapped_file; + ExpectNoMappedData(mapped_file); +} + +TEST_F(MemoryMappedFileTest, UnmapWithoutMap) { + MemoryMappedFile mapped_file; + mapped_file.Unmap(); +} + +TEST_F(MemoryMappedFileTest, MapNonexistentFile) { + { + MemoryMappedFile mapped_file("nonexistent-file", 0); + ExpectNoMappedData(mapped_file); + } + { + MemoryMappedFile mapped_file; + EXPECT_FALSE(mapped_file.Map("nonexistent-file", 0)); + ExpectNoMappedData(mapped_file); + } +} + +TEST_F(MemoryMappedFileTest, MapEmptyFile) { + AutoTempDir temp_dir; + string test_file = temp_dir.path() + "/empty_file"; + ASSERT_TRUE(WriteFile(test_file.c_str(), NULL, 0)); + + { + MemoryMappedFile mapped_file(test_file.c_str(), 0); + ExpectNoMappedData(mapped_file); + } + { + MemoryMappedFile mapped_file; + EXPECT_TRUE(mapped_file.Map(test_file.c_str(), 0)); + ExpectNoMappedData(mapped_file); + } +} + +TEST_F(MemoryMappedFileTest, MapNonEmptyFile) { + char data[256]; + size_t data_size = sizeof(data); + for (size_t i = 0; i < data_size; ++i) { + data[i] = i; + } + + AutoTempDir temp_dir; + string test_file = temp_dir.path() + "/test_file"; + ASSERT_TRUE(WriteFile(test_file.c_str(), data, data_size)); + + { + MemoryMappedFile mapped_file(test_file.c_str(), 0); + EXPECT_FALSE(mapped_file.content().IsEmpty()); + EXPECT_TRUE(mapped_file.data() != NULL); + EXPECT_EQ(data_size, mapped_file.size()); + EXPECT_EQ(0, memcmp(data, mapped_file.data(), data_size)); + } + { + MemoryMappedFile mapped_file; + EXPECT_TRUE(mapped_file.Map(test_file.c_str(), 0)); + EXPECT_FALSE(mapped_file.content().IsEmpty()); + EXPECT_TRUE(mapped_file.data() != NULL); + EXPECT_EQ(data_size, mapped_file.size()); + EXPECT_EQ(0, memcmp(data, mapped_file.data(), data_size)); + } +} + +TEST_F(MemoryMappedFileTest, RemapAfterMap) { + char data1[256]; + size_t data1_size = sizeof(data1); + for (size_t i = 0; i < data1_size; ++i) { + data1[i] = i; + } + + char data2[50]; + size_t data2_size = sizeof(data2); + for (size_t i = 0; i < data2_size; ++i) { + data2[i] = 255 - i; + } + + AutoTempDir temp_dir; + string test_file1 = temp_dir.path() + "/test_file1"; + string test_file2 = temp_dir.path() + "/test_file2"; + ASSERT_TRUE(WriteFile(test_file1.c_str(), data1, data1_size)); + ASSERT_TRUE(WriteFile(test_file2.c_str(), data2, data2_size)); + + { + MemoryMappedFile mapped_file(test_file1.c_str(), 0); + EXPECT_FALSE(mapped_file.content().IsEmpty()); + EXPECT_TRUE(mapped_file.data() != NULL); + EXPECT_EQ(data1_size, mapped_file.size()); + EXPECT_EQ(0, memcmp(data1, mapped_file.data(), data1_size)); + + mapped_file.Map(test_file2.c_str(), 0); + EXPECT_FALSE(mapped_file.content().IsEmpty()); + EXPECT_TRUE(mapped_file.data() != NULL); + EXPECT_EQ(data2_size, mapped_file.size()); + EXPECT_EQ(0, memcmp(data2, mapped_file.data(), data2_size)); + } + { + MemoryMappedFile mapped_file; + EXPECT_TRUE(mapped_file.Map(test_file1.c_str(), 0)); + EXPECT_FALSE(mapped_file.content().IsEmpty()); + EXPECT_TRUE(mapped_file.data() != NULL); + EXPECT_EQ(data1_size, mapped_file.size()); + EXPECT_EQ(0, memcmp(data1, mapped_file.data(), data1_size)); + + mapped_file.Map(test_file2.c_str(), 0); + EXPECT_FALSE(mapped_file.content().IsEmpty()); + EXPECT_TRUE(mapped_file.data() != NULL); + EXPECT_EQ(data2_size, mapped_file.size()); + EXPECT_EQ(0, memcmp(data2, mapped_file.data(), data2_size)); + } +} + +TEST_F(MemoryMappedFileTest, MapWithOffset) { + // Put more data in the test file this time. Offsets can only be + // done on page boundaries, so we need a two page file to test this. + const int page_size = 4096; + char data1[2 * page_size]; + size_t data1_size = sizeof(data1); + for (size_t i = 0; i < data1_size; ++i) { + data1[i] = i & 0x7f; + } + + AutoTempDir temp_dir; + string test_file1 = temp_dir.path() + "/test_file1"; + ASSERT_TRUE(WriteFile(test_file1.c_str(), data1, data1_size)); + { + MemoryMappedFile mapped_file(test_file1.c_str(), page_size); + EXPECT_FALSE(mapped_file.content().IsEmpty()); + EXPECT_TRUE(mapped_file.data() != NULL); + EXPECT_EQ(data1_size - page_size, mapped_file.size()); + EXPECT_EQ( + 0, + memcmp(data1 + page_size, mapped_file.data(), data1_size - page_size)); + } + { + MemoryMappedFile mapped_file; + mapped_file.Map(test_file1.c_str(), page_size); + EXPECT_FALSE(mapped_file.content().IsEmpty()); + EXPECT_TRUE(mapped_file.data() != NULL); + EXPECT_EQ(data1_size - page_size, mapped_file.size()); + EXPECT_EQ( + 0, + memcmp(data1 + page_size, mapped_file.data(), data1_size - page_size)); + } +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/safe_readlink.cc b/TMessagesProj/jni/third_party/breakpad/src/common/linux/safe_readlink.cc new file mode 100644 index 0000000000..870c28af3b --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/safe_readlink.cc @@ -0,0 +1,53 @@ +// Copyright (c) 2011, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// safe_readlink.cc: Implement google_breakpad::SafeReadLink. +// See safe_readlink.h for details. + +#include + +#include "third_party/lss/linux_syscall_support.h" + +namespace google_breakpad { + +bool SafeReadLink(const char* path, char* buffer, size_t buffer_size) { + // sys_readlink() does not add a NULL byte to |buffer|. In order to return + // a NULL-terminated string in |buffer|, |buffer_size| should be at least + // one byte longer than the expected path length. Also, sys_readlink() + // returns the actual path length on success, which does not count the + // NULL byte, so |result_size| should be less than |buffer_size|. + ssize_t result_size = sys_readlink(path, buffer, buffer_size); + if (result_size >= 0 && static_cast(result_size) < buffer_size) { + buffer[result_size] = '\0'; + return true; + } + return false; +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/safe_readlink.h b/TMessagesProj/jni/third_party/breakpad/src/common/linux/safe_readlink.h new file mode 100644 index 0000000000..4ae131b580 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/safe_readlink.h @@ -0,0 +1,65 @@ +// Copyright (c) 2011, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// safe_readlink.h: Define the google_breakpad::SafeReadLink function, +// which wraps sys_readlink and gurantees the result is NULL-terminated. + +#ifndef COMMON_LINUX_SAFE_READLINK_H_ +#define COMMON_LINUX_SAFE_READLINK_H_ + +#include + +namespace google_breakpad { + +// This function wraps sys_readlink() and performs the same functionalty, +// but guarantees |buffer| is NULL-terminated if sys_readlink() returns +// no error. It takes the same arguments as sys_readlink(), but unlike +// sys_readlink(), it returns true on success. +// +// |buffer_size| specifies the size of |buffer| in bytes. As this function +// always NULL-terminates |buffer| on success, |buffer_size| should be +// at least one byte longer than the expected path length (e.g. PATH_MAX, +// which is typically defined as the maximum length of a path name +// including the NULL byte). +// +// The implementation of this function calls sys_readlink() instead of +// readlink(), it can thus be used in the context where calling to libc +// functions is discouraged. +bool SafeReadLink(const char* path, char* buffer, size_t buffer_size); + +// Same as the three-argument version of SafeReadLink() but deduces the +// size of |buffer| if it is a char array of known size. +template +bool SafeReadLink(const char* path, char (&buffer)[N]) { + return SafeReadLink(path, buffer, sizeof(buffer)); +} + +} // namespace google_breakpad + +#endif // COMMON_LINUX_SAFE_READLINK_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/safe_readlink_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/common/linux/safe_readlink_unittest.cc new file mode 100644 index 0000000000..d346b2a807 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/safe_readlink_unittest.cc @@ -0,0 +1,89 @@ +// Copyright (c) 2011, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// safe_readlink_unittest.cc: Unit tests for google_breakpad::SafeReadLink. + +#include "breakpad_googletest_includes.h" +#include "common/linux/safe_readlink.h" + +using google_breakpad::SafeReadLink; + +TEST(SafeReadLinkTest, ZeroBufferSize) { + char buffer[1]; + EXPECT_FALSE(SafeReadLink("/proc/self/exe", buffer, 0)); +} + +TEST(SafeReadLinkTest, BufferSizeTooSmall) { + char buffer[1]; + EXPECT_FALSE(SafeReadLink("/proc/self/exe", buffer, 1)); +} + +TEST(SafeReadLinkTest, BoundaryBufferSize) { + char buffer[PATH_MAX]; + EXPECT_TRUE(SafeReadLink("/proc/self/exe", buffer, sizeof(buffer))); + size_t path_length = strlen(buffer); + EXPECT_LT(0U, path_length); + EXPECT_GT(sizeof(buffer), path_length); + + // Buffer size equals to the expected path length plus 1 for the NULL byte. + char buffer2[PATH_MAX]; + EXPECT_TRUE(SafeReadLink("/proc/self/exe", buffer2, path_length + 1)); + EXPECT_EQ(path_length, strlen(buffer2)); + EXPECT_EQ(0, strncmp(buffer, buffer2, PATH_MAX)); + + // Buffer size equals to the expected path length. + EXPECT_FALSE(SafeReadLink("/proc/self/exe", buffer, path_length)); +} + +TEST(SafeReadLinkTest, NonexistentPath) { + char buffer[PATH_MAX]; + EXPECT_FALSE(SafeReadLink("nonexistent_path", buffer, sizeof(buffer))); +} + +TEST(SafeReadLinkTest, NonSymbolicLinkPath) { + char actual_path[PATH_MAX]; + EXPECT_TRUE(SafeReadLink("/proc/self/exe", actual_path, sizeof(actual_path))); + + char buffer[PATH_MAX]; + EXPECT_FALSE(SafeReadLink(actual_path, buffer, sizeof(buffer))); +} + +TEST(SafeReadLinkTest, DeduceBufferSizeFromCharArray) { + char buffer[PATH_MAX]; + char* buffer_pointer = buffer; + EXPECT_TRUE(SafeReadLink("/proc/self/exe", buffer_pointer, sizeof(buffer))); + size_t path_length = strlen(buffer); + + // Use the template version of SafeReadLink to deduce the buffer size + // from the char array. + char buffer2[PATH_MAX]; + EXPECT_TRUE(SafeReadLink("/proc/self/exe", buffer2)); + EXPECT_EQ(path_length, strlen(buffer2)); + EXPECT_EQ(0, strncmp(buffer, buffer2, PATH_MAX)); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/synth_elf.cc b/TMessagesProj/jni/third_party/breakpad/src/common/linux/synth_elf.cc new file mode 100644 index 0000000000..b978550f85 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/synth_elf.cc @@ -0,0 +1,261 @@ +#include "common/linux/synth_elf.h" + +#include +#include +#include +#include + +#include "common/linux/elf_gnu_compat.h" +#include "common/using_std_string.h" + +namespace google_breakpad { +namespace synth_elf { + +ELF::ELF(uint16_t machine, + uint8_t file_class, + Endianness endianness) + : Section(endianness), + addr_size_(file_class == ELFCLASS64 ? 8 : 4), + program_count_(0), + program_header_table_(endianness), + section_count_(0), + section_header_table_(endianness), + section_header_strings_(endianness) { + // Could add support for more machine types here if needed. + assert(machine == EM_386 || + machine == EM_X86_64 || + machine == EM_ARM); + assert(file_class == ELFCLASS32 || file_class == ELFCLASS64); + + start() = 0; + // Add ELF header + // e_ident + // EI_MAG0...EI_MAG3 + D8(ELFMAG0); + D8(ELFMAG1); + D8(ELFMAG2); + D8(ELFMAG3); + // EI_CLASS + D8(file_class); + // EI_DATA + D8(endianness == kLittleEndian ? ELFDATA2LSB : ELFDATA2MSB); + // EI_VERSION + D8(EV_CURRENT); + // EI_OSABI + D8(ELFOSABI_SYSV); + // EI_ABIVERSION + D8(0); + // EI_PAD + Append(7, 0); + assert(Size() == EI_NIDENT); + + // e_type + D16(ET_EXEC); //TODO: allow passing ET_DYN? + // e_machine + D16(machine); + // e_version + D32(EV_CURRENT); + // e_entry + Append(endianness, addr_size_, 0); + // e_phoff + Append(endianness, addr_size_, program_header_label_); + // e_shoff + Append(endianness, addr_size_, section_header_label_); + // e_flags + D32(0); + // e_ehsize + D16(addr_size_ == 8 ? sizeof(Elf64_Ehdr) : sizeof(Elf32_Ehdr)); + // e_phentsize + D16(addr_size_ == 8 ? sizeof(Elf64_Phdr) : sizeof(Elf32_Phdr)); + // e_phnum + D16(program_count_label_); + // e_shentsize + D16(addr_size_ == 8 ? sizeof(Elf64_Shdr) : sizeof(Elf32_Shdr)); + // e_shnum + D16(section_count_label_); + // e_shstrndx + D16(section_header_string_index_); + + // Add an empty section for SHN_UNDEF. + Section shn_undef; + AddSection("", shn_undef, SHT_NULL); +} + +int ELF::AddSection(const string& name, const Section& section, + uint32_t type, uint32_t flags, uint64_t addr, + uint32_t link, uint64_t entsize, uint64_t offset) { + Label offset_label; + Label string_label(section_header_strings_.Add(name)); + size_t size = section.Size(); + + int index = section_count_; + ++section_count_; + + section_header_table_ + // sh_name + .D32(string_label) + // sh_type + .D32(type) + // sh_flags + .Append(endianness(), addr_size_, flags) + // sh_addr + .Append(endianness(), addr_size_, addr) + // sh_offset + .Append(endianness(), addr_size_, offset_label) + // sh_size + .Append(endianness(), addr_size_, size) + // sh_link + .D32(link) + // sh_info + .D32(0) + // sh_addralign + .Append(endianness(), addr_size_, 0) + // sh_entsize + .Append(endianness(), addr_size_, entsize); + + sections_.push_back(ElfSection(section, type, addr, offset, offset_label, + size)); + return index; +} + +void ELF::AppendSection(ElfSection §ion) { + // NULL and NOBITS sections have no content, so they + // don't need to be written to the file. + if (section.type_ == SHT_NULL) { + section.offset_label_ = 0; + } else if (section.type_ == SHT_NOBITS) { + section.offset_label_ = section.offset_; + } else { + Mark(§ion.offset_label_); + Append(section); + Align(4); + } +} + +void ELF::AddSegment(int start, int end, uint32_t type, uint32_t flags) { + assert(start > 0); + assert(size_t(start) < sections_.size()); + assert(end > 0); + assert(size_t(end) < sections_.size()); + ++program_count_; + + // p_type + program_header_table_.D32(type); + + if (addr_size_ == 8) { + // p_flags + program_header_table_.D32(flags); + } + + size_t filesz = 0; + size_t memsz = 0; + bool prev_was_nobits = false; + for (int i = start; i <= end; ++i) { + size_t size = sections_[i].size_; + if (sections_[i].type_ != SHT_NOBITS) { + assert(!prev_was_nobits); + // non SHT_NOBITS sections are 4-byte aligned (see AddSection) + size = (size + 3) & ~3; + filesz += size; + } else { + prev_was_nobits = true; + } + memsz += size; + } + + program_header_table_ + // p_offset + .Append(endianness(), addr_size_, sections_[start].offset_label_) + // p_vaddr + .Append(endianness(), addr_size_, sections_[start].addr_) + // p_paddr + .Append(endianness(), addr_size_, sections_[start].addr_) + // p_filesz + .Append(endianness(), addr_size_, filesz) + // p_memsz + .Append(endianness(), addr_size_, memsz); + + if (addr_size_ == 4) { + // p_flags + program_header_table_.D32(flags); + } + + // p_align + program_header_table_.Append(endianness(), addr_size_, 0); +} + +void ELF::Finish() { + // Add the section header string table at the end. + section_header_string_index_ = section_count_; + //printf(".shstrtab size: %ld\n", section_header_strings_.Size()); + AddSection(".shstrtab", section_header_strings_, SHT_STRTAB); + //printf("section_count_: %ld, sections_.size(): %ld\n", + // section_count_, sections_.size()); + if (program_count_) { + Mark(&program_header_label_); + Append(program_header_table_); + } else { + program_header_label_ = 0; + } + + for (vector::iterator it = sections_.begin(); + it < sections_.end(); ++it) { + AppendSection(*it); + } + section_count_label_ = section_count_; + program_count_label_ = program_count_; + + // Section header table starts here. + Mark(§ion_header_label_); + Append(section_header_table_); +} + +SymbolTable::SymbolTable(Endianness endianness, + size_t addr_size, + StringTable& table) : Section(endianness), + addr_size_(addr_size), + table_(table) { + assert(addr_size_ == 4 || addr_size_ == 8); +} + +void SymbolTable::AddSymbol(const string& name, uint32_t value, + uint32_t size, unsigned info, uint16_t shndx) { + assert(addr_size_ == 4); + D32(table_.Add(name)); + D32(value); + D32(size); + D8(info); + D8(0); // other + D16(shndx); +} + +void SymbolTable::AddSymbol(const string& name, uint64_t value, + uint64_t size, unsigned info, uint16_t shndx) { + assert(addr_size_ == 8); + D32(table_.Add(name)); + D8(info); + D8(0); // other + D16(shndx); + D64(value); + D64(size); +} + +void Notes::AddNote(int type, const string &name, const uint8_t* desc_bytes, + size_t desc_size) { + // Elf32_Nhdr and Elf64_Nhdr are exactly the same. + Elf32_Nhdr note_header; + memset(¬e_header, 0, sizeof(note_header)); + note_header.n_namesz = name.length() + 1; + note_header.n_descsz = desc_size; + note_header.n_type = type; + + Append(reinterpret_cast(¬e_header), + sizeof(note_header)); + AppendCString(name); + Align(4); + Append(desc_bytes, desc_size); + Align(4); +} + +} // namespace synth_elf +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/synth_elf.h b/TMessagesProj/jni/third_party/breakpad/src/common/linux/synth_elf.h new file mode 100644 index 0000000000..330ceae8ed --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/synth_elf.h @@ -0,0 +1,195 @@ +// -*- mode: C++ -*- + +// Copyright (c) 2011, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Ted Mielczarek + +// synth_elf.h: Interface to synth_elf::ELF: fake ELF generator. + +#ifndef COMMON_LINUX_SYNTH_ELF_H_ +#define COMMON_LINUX_SYNTH_ELF_H_ + +#include "common/test_assembler.h" + +#include +#include +#include +#include +#include + +#include "common/using_std_string.h" + +namespace google_breakpad { +namespace synth_elf { + +using std::list; +using std::vector; +using std::map; +using std::pair; +using test_assembler::Endianness; +using test_assembler::kLittleEndian; +using test_assembler::kUnsetEndian; +using test_assembler::Label; +using test_assembler::Section; + +// String tables are common in ELF headers, so subclass Section +// to make them easy to generate. +class StringTable : public Section { +public: + StringTable(Endianness endianness = kUnsetEndian) + : Section(endianness) { + start() = 0; + empty_string = Add(""); + } + + // Add the string s to the string table, and return + // a label containing the offset into the string table + // at which it was added. + Label Add(const string& s) { + if (strings_.find(s) != strings_.end()) + return strings_[s]; + + Label string_label(Here()); + AppendCString(s); + strings_[s] = string_label; + return string_label; + } + + // All StringTables contain an empty string as their first + // entry. + Label empty_string; + + // Avoid inserting duplicate strings. + map strings_; +}; + +// A Section representing an entire ELF file. +class ELF : public Section { + public: + ELF(uint16_t machine, // EM_386, etc + uint8_t file_class, // ELFCLASS{32,64} + Endianness endianness = kLittleEndian); + + // Add the Section section to the section header table and append it + // to the file. Returns the index of the section in the section + // header table. + int AddSection(const string& name, const Section& section, + uint32_t type, uint32_t flags = 0, uint64_t addr = 0, + uint32_t link = 0, uint64_t entsize = 0, uint64_t offset = 0); + + // Add a segment containing from section index start to section index end. + // The indexes must have been gotten from AddSection. + void AddSegment(int start, int end, uint32_t type, uint32_t flags = 0); + + // Write out all data. GetContents may be used after this. + void Finish(); + + private: + // Size of an address, in bytes. + const size_t addr_size_; + + // Offset to the program header table. + Label program_header_label_; + // Number of entries in the program header table. + int program_count_; + Label program_count_label_; + // The program header table itself. + Section program_header_table_; + + // Offset to the section header table. + Label section_header_label_; + // Number of entries in the section header table. + int section_count_; + Label section_count_label_; + // The section header table itself. + Section section_header_table_; + + // Index of the section header string table in the section + // header table. + Label section_header_string_index_; + // Section containing the names of section header table entries. + StringTable section_header_strings_; + + // Record of an added section + struct ElfSection : public Section { + ElfSection(const Section& section, uint32_t type, uint32_t addr, + uint32_t offset, Label offset_label, uint32_t size) + : Section(section), type_(type), addr_(addr), offset_(offset) + , offset_label_(offset_label), size_(size) { + } + + uint32_t type_; + uint32_t addr_; + uint32_t offset_; + Label offset_label_; + uint32_t size_; + }; + + vector sections_; + + void AppendSection(ElfSection §ion); +}; + +// A class to build .symtab or .dynsym sections. +class SymbolTable : public Section { + public: + // table is the StringTable that contains symbol names. The caller + // must ensure that it remains alive for the life of the + // SymbolTable. + SymbolTable(Endianness endianness, size_t addr_size, StringTable& table); + + // Add an Elf32_Sym. + void AddSymbol(const string& name, uint32_t value, + uint32_t size, unsigned info, uint16_t shndx); + // Add an Elf64_Sym. + void AddSymbol(const string& name, uint64_t value, + uint64_t size, unsigned info, uint16_t shndx); + + private: + size_t addr_size_; + StringTable& table_; +}; + +// A class for note sections +class Notes : public Section { +public: + Notes(Endianness endianness) + : Section(endianness) { + } + + // Add a note. + void AddNote(int type, const string &name, const uint8_t* desc_bytes, + size_t desc_size); +}; + +} // namespace synth_elf +} // namespace google_breakpad + +#endif // COMMON_LINUX_SYNTH_ELF_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/synth_elf_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/common/linux/synth_elf_unittest.cc new file mode 100644 index 0000000000..3715b6e608 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/synth_elf_unittest.cc @@ -0,0 +1,413 @@ +// Copyright (c) 2011 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Ted Mielczarek + +// synth_elf_unittest.cc: +// Unittests for google_breakpad::synth_elf::ELF + +#include + +#include "breakpad_googletest_includes.h" +#include "common/linux/elfutils.h" +#include "common/linux/synth_elf.h" +#include "common/using_std_string.h" + +using google_breakpad::ElfClass32; +using google_breakpad::ElfClass64; +using google_breakpad::synth_elf::ELF; +using google_breakpad::synth_elf::Notes; +using google_breakpad::synth_elf::Section; +using google_breakpad::synth_elf::StringTable; +using google_breakpad::synth_elf::SymbolTable; +using google_breakpad::test_assembler::Endianness; +using google_breakpad::test_assembler::kBigEndian; +using google_breakpad::test_assembler::kLittleEndian; +using google_breakpad::test_assembler::Label; +using ::testing::Test; +using ::testing::Types; + +class StringTableTest : public Test { +public: + StringTableTest() : table(kLittleEndian) {} + + StringTable table; +}; + +TEST_F(StringTableTest, Empty) { + EXPECT_EQ(1U, table.Size()); + string contents; + ASSERT_TRUE(table.GetContents(&contents)); + const char* kExpectedContents = "\0"; + EXPECT_EQ(0, memcmp(kExpectedContents, + contents.c_str(), + contents.size())); + ASSERT_TRUE(table.empty_string.IsKnownConstant()); + EXPECT_EQ(0U, table.empty_string.Value()); +} + +TEST_F(StringTableTest, Basic) { + const string s1("table fills with strings"); + const string s2("offsets preserved as labels"); + const string s3("verified with tests"); + const char* kExpectedContents = + "\0table fills with strings\0" + "offsets preserved as labels\0" + "verified with tests\0"; + Label l1(table.Add(s1)); + Label l2(table.Add(s2)); + Label l3(table.Add(s3)); + string contents; + ASSERT_TRUE(table.GetContents(&contents)); + EXPECT_EQ(0, memcmp(kExpectedContents, + contents.c_str(), + contents.size())); + // empty_string is at zero, other strings start at 1. + ASSERT_TRUE(l1.IsKnownConstant()); + EXPECT_EQ(1U, l1.Value()); + // Each string has an extra byte for a trailing null. + EXPECT_EQ(1 + s1.length() + 1, l2.Value()); + EXPECT_EQ(1 + s1.length() + 1 + s2.length() + 1, l3.Value()); +} + +TEST_F(StringTableTest, Duplicates) { + const string s1("string 1"); + const string s2("string 2"); + const string s3(""); + const char* kExpectedContents = "\0string 1\0string 2\0"; + Label l1(table.Add(s1)); + Label l2(table.Add(s2)); + // Adding strings twice should return the same Label. + Label l3(table.Add(s3)); + Label l4(table.Add(s2)); + string contents; + ASSERT_TRUE(table.GetContents(&contents)); + EXPECT_EQ(0, memcmp(kExpectedContents, + contents.c_str(), + contents.size())); + EXPECT_EQ(0U, table.empty_string.Value()); + EXPECT_EQ(table.empty_string.Value(), l3.Value()); + EXPECT_EQ(l2.Value(), l4.Value()); +} + +class SymbolTableTest : public Test {}; + +TEST_F(SymbolTableTest, Simple32) { + StringTable table(kLittleEndian); + SymbolTable syms(kLittleEndian, 4, table); + + const string kFuncName1 = "superfunc"; + const uint32_t kFuncAddr1 = 0x10001000; + const uint32_t kFuncSize1 = 0x10; + const string kFuncName2 = "awesomefunc"; + const uint32_t kFuncAddr2 = 0x20002000; + const uint32_t kFuncSize2 = 0x2f; + const string kFuncName3 = "megafunc"; + const uint32_t kFuncAddr3 = 0x30003000; + const uint32_t kFuncSize3 = 0x3c; + + syms.AddSymbol(kFuncName1, kFuncAddr1, kFuncSize1, + ELF32_ST_INFO(STB_GLOBAL, STT_FUNC), + SHN_UNDEF + 1); + syms.AddSymbol(kFuncName2, kFuncAddr2, kFuncSize2, + ELF32_ST_INFO(STB_LOCAL, STT_FUNC), + SHN_UNDEF + 2); + syms.AddSymbol(kFuncName3, kFuncAddr3, kFuncSize3, + ELF32_ST_INFO(STB_LOCAL, STT_FUNC), + SHN_UNDEF + 3); + + const char kExpectedStringTable[] = "\0superfunc\0awesomefunc\0megafunc"; + const size_t kExpectedStringTableSize = sizeof(kExpectedStringTable); + EXPECT_EQ(kExpectedStringTableSize, table.Size()); + string table_contents; + table.GetContents(&table_contents); + EXPECT_EQ(0, memcmp(kExpectedStringTable, + table_contents.c_str(), + table_contents.size())); + + const uint8_t kExpectedSymbolContents[] = { + // Symbol 1 + 0x01, 0x00, 0x00, 0x00, // name + 0x00, 0x10, 0x00, 0x10, // value + 0x10, 0x00, 0x00, 0x00, // size + ELF32_ST_INFO(STB_GLOBAL, STT_FUNC), // info + 0x00, // other + 0x01, 0x00, // shndx + // Symbol 2 + 0x0B, 0x00, 0x00, 0x00, // name + 0x00, 0x20, 0x00, 0x20, // value + 0x2f, 0x00, 0x00, 0x00, // size + ELF32_ST_INFO(STB_LOCAL, STT_FUNC), // info + 0x00, // other + 0x02, 0x00, // shndx + // Symbol 3 + 0x17, 0x00, 0x00, 0x00, // name + 0x00, 0x30, 0x00, 0x30, // value + 0x3c, 0x00, 0x00, 0x00, // size + ELF32_ST_INFO(STB_LOCAL, STT_FUNC), // info + 0x00, // other + 0x03, 0x00, // shndx + }; + const size_t kExpectedSymbolSize = sizeof(kExpectedSymbolContents); + EXPECT_EQ(kExpectedSymbolSize, syms.Size()); + + string symbol_contents; + syms.GetContents(&symbol_contents); + EXPECT_EQ(0, memcmp(kExpectedSymbolContents, + symbol_contents.c_str(), + symbol_contents.size())); +} + +template +class BasicElf : public Test {}; + +// Doesn't seem worthwhile writing the tests to be endian-independent +// when they're unlikely to ever be run on big-endian systems. +#if defined(__i386__) || defined(__x86_64__) + +typedef Types ElfClasses; + +TYPED_TEST_CASE(BasicElf, ElfClasses); + +TYPED_TEST(BasicElf, EmptyLE) { + typedef typename TypeParam::Ehdr Ehdr; + typedef typename TypeParam::Phdr Phdr; + typedef typename TypeParam::Shdr Shdr; + const size_t kStringTableSize = sizeof("\0.shstrtab"); + const size_t kStringTableAlign = 4 - kStringTableSize % 4; + const size_t kExpectedSize = sizeof(Ehdr) + + // Two sections, SHT_NULL + the section header string table. + 2 * sizeof(Shdr) + + kStringTableSize + kStringTableAlign; + + // It doesn't really matter that the machine type is right for the class. + ELF elf(EM_386, TypeParam::kClass, kLittleEndian); + elf.Finish(); + EXPECT_EQ(kExpectedSize, elf.Size()); + + string contents; + ASSERT_TRUE(elf.GetContents(&contents)); + ASSERT_EQ(kExpectedSize, contents.size()); + const Ehdr* header = + reinterpret_cast(contents.data()); + const uint8_t kIdent[] = { + ELFMAG0, ELFMAG1, ELFMAG2, ELFMAG3, + TypeParam::kClass, ELFDATA2LSB, EV_CURRENT, ELFOSABI_SYSV, + 0, 0, 0, 0, 0, 0, 0, 0 + }; + EXPECT_EQ(0, memcmp(kIdent, header->e_ident, sizeof(kIdent))); + EXPECT_EQ(ET_EXEC, header->e_type); + EXPECT_EQ(EM_386, header->e_machine); + EXPECT_EQ(static_cast(EV_CURRENT), header->e_version); + EXPECT_EQ(0U, header->e_entry); + EXPECT_EQ(0U, header->e_phoff); + EXPECT_EQ(sizeof(Ehdr) + kStringTableSize + kStringTableAlign, + header->e_shoff); + EXPECT_EQ(0U, header->e_flags); + EXPECT_EQ(sizeof(Ehdr), header->e_ehsize); + EXPECT_EQ(sizeof(Phdr), header->e_phentsize); + EXPECT_EQ(0, header->e_phnum); + EXPECT_EQ(sizeof(Shdr), header->e_shentsize); + EXPECT_EQ(2, header->e_shnum); + EXPECT_EQ(1, header->e_shstrndx); + + const Shdr* shdr = + reinterpret_cast(contents.data() + header->e_shoff); + EXPECT_EQ(0U, shdr[0].sh_name); + EXPECT_EQ(static_cast(SHT_NULL), shdr[0].sh_type); + EXPECT_EQ(0U, shdr[0].sh_flags); + EXPECT_EQ(0U, shdr[0].sh_addr); + EXPECT_EQ(0U, shdr[0].sh_offset); + EXPECT_EQ(0U, shdr[0].sh_size); + EXPECT_EQ(0U, shdr[0].sh_link); + EXPECT_EQ(0U, shdr[0].sh_info); + EXPECT_EQ(0U, shdr[0].sh_addralign); + EXPECT_EQ(0U, shdr[0].sh_entsize); + + EXPECT_EQ(1U, shdr[1].sh_name); + EXPECT_EQ(static_cast(SHT_STRTAB), shdr[1].sh_type); + EXPECT_EQ(0U, shdr[1].sh_flags); + EXPECT_EQ(0U, shdr[1].sh_addr); + EXPECT_EQ(sizeof(Ehdr), shdr[1].sh_offset); + EXPECT_EQ(kStringTableSize, shdr[1].sh_size); + EXPECT_EQ(0U, shdr[1].sh_link); + EXPECT_EQ(0U, shdr[1].sh_info); + EXPECT_EQ(0U, shdr[1].sh_addralign); + EXPECT_EQ(0U, shdr[1].sh_entsize); +} + +TYPED_TEST(BasicElf, BasicLE) { + typedef typename TypeParam::Ehdr Ehdr; + typedef typename TypeParam::Phdr Phdr; + typedef typename TypeParam::Shdr Shdr; + const size_t kStringTableSize = sizeof("\0.text\0.bss\0.shstrtab"); + const size_t kStringTableAlign = 4 - kStringTableSize % 4; + const size_t kExpectedSize = sizeof(Ehdr) + + // Four sections, SHT_NULL + the section header string table + + // 4096 bytes of the size-aligned .text section + one program header. + sizeof(Phdr) + 4 * sizeof(Shdr) + 4096 + + kStringTableSize + kStringTableAlign; + + // It doesn't really matter that the machine type is right for the class. + ELF elf(EM_386, TypeParam::kClass, kLittleEndian); + Section text(kLittleEndian); + text.Append(4094, 0); + int text_idx = elf.AddSection(".text", text, SHT_PROGBITS); + Section bss(kLittleEndian); + bss.Append(16, 0); + int bss_idx = elf.AddSection(".bss", bss, SHT_NOBITS); + elf.AddSegment(text_idx, bss_idx, PT_LOAD); + elf.Finish(); + EXPECT_EQ(kExpectedSize, elf.Size()); + + string contents; + ASSERT_TRUE(elf.GetContents(&contents)); + ASSERT_EQ(kExpectedSize, contents.size()); + const Ehdr* header = + reinterpret_cast(contents.data()); + const uint8_t kIdent[] = { + ELFMAG0, ELFMAG1, ELFMAG2, ELFMAG3, + TypeParam::kClass, ELFDATA2LSB, EV_CURRENT, ELFOSABI_SYSV, + 0, 0, 0, 0, 0, 0, 0, 0 + }; + EXPECT_EQ(0, memcmp(kIdent, header->e_ident, sizeof(kIdent))); + EXPECT_EQ(ET_EXEC, header->e_type); + EXPECT_EQ(EM_386, header->e_machine); + EXPECT_EQ(static_cast(EV_CURRENT), header->e_version); + EXPECT_EQ(0U, header->e_entry); + EXPECT_EQ(sizeof(Ehdr), header->e_phoff); + EXPECT_EQ(sizeof(Ehdr) + sizeof(Phdr) + 4096 + kStringTableSize + + kStringTableAlign, header->e_shoff); + EXPECT_EQ(0U, header->e_flags); + EXPECT_EQ(sizeof(Ehdr), header->e_ehsize); + EXPECT_EQ(sizeof(Phdr), header->e_phentsize); + EXPECT_EQ(1, header->e_phnum); + EXPECT_EQ(sizeof(Shdr), header->e_shentsize); + EXPECT_EQ(4, header->e_shnum); + EXPECT_EQ(3, header->e_shstrndx); + + const Shdr* shdr = + reinterpret_cast(contents.data() + header->e_shoff); + EXPECT_EQ(0U, shdr[0].sh_name); + EXPECT_EQ(static_cast(SHT_NULL), shdr[0].sh_type); + EXPECT_EQ(0U, shdr[0].sh_flags); + EXPECT_EQ(0U, shdr[0].sh_addr); + EXPECT_EQ(0U, shdr[0].sh_offset); + EXPECT_EQ(0U, shdr[0].sh_size); + EXPECT_EQ(0U, shdr[0].sh_link); + EXPECT_EQ(0U, shdr[0].sh_info); + EXPECT_EQ(0U, shdr[0].sh_addralign); + EXPECT_EQ(0U, shdr[0].sh_entsize); + + EXPECT_EQ(1U, shdr[1].sh_name); + EXPECT_EQ(static_cast(SHT_PROGBITS), shdr[1].sh_type); + EXPECT_EQ(0U, shdr[1].sh_flags); + EXPECT_EQ(0U, shdr[1].sh_addr); + EXPECT_EQ(sizeof(Ehdr) + sizeof(Phdr), shdr[1].sh_offset); + EXPECT_EQ(4094U, shdr[1].sh_size); + EXPECT_EQ(0U, shdr[1].sh_link); + EXPECT_EQ(0U, shdr[1].sh_info); + EXPECT_EQ(0U, shdr[1].sh_addralign); + EXPECT_EQ(0U, shdr[1].sh_entsize); + + EXPECT_EQ(sizeof("\0.text"), shdr[2].sh_name); + EXPECT_EQ(static_cast(SHT_NOBITS), shdr[2].sh_type); + EXPECT_EQ(0U, shdr[2].sh_flags); + EXPECT_EQ(0U, shdr[2].sh_addr); + EXPECT_EQ(0U, shdr[2].sh_offset); + EXPECT_EQ(16U, shdr[2].sh_size); + EXPECT_EQ(0U, shdr[2].sh_link); + EXPECT_EQ(0U, shdr[2].sh_info); + EXPECT_EQ(0U, shdr[2].sh_addralign); + EXPECT_EQ(0U, shdr[2].sh_entsize); + + EXPECT_EQ(sizeof("\0.text\0.bss"), shdr[3].sh_name); + EXPECT_EQ(static_cast(SHT_STRTAB), shdr[3].sh_type); + EXPECT_EQ(0U, shdr[3].sh_flags); + EXPECT_EQ(0U, shdr[3].sh_addr); + EXPECT_EQ(sizeof(Ehdr) + sizeof(Phdr) + 4096, shdr[3].sh_offset); + EXPECT_EQ(kStringTableSize, shdr[3].sh_size); + EXPECT_EQ(0U, shdr[3].sh_link); + EXPECT_EQ(0U, shdr[3].sh_info); + EXPECT_EQ(0U, shdr[3].sh_addralign); + EXPECT_EQ(0U, shdr[3].sh_entsize); + + const Phdr* phdr = + reinterpret_cast(contents.data() + header->e_phoff); + EXPECT_EQ(static_cast(PT_LOAD), phdr->p_type); + EXPECT_EQ(sizeof(Ehdr) + sizeof(Phdr), phdr->p_offset); + EXPECT_EQ(0U, phdr->p_vaddr); + EXPECT_EQ(0U, phdr->p_paddr); + EXPECT_EQ(4096U, phdr->p_filesz); + EXPECT_EQ(4096U + 16U, phdr->p_memsz); + EXPECT_EQ(0U, phdr->p_flags); + EXPECT_EQ(0U, phdr->p_align); +} + +class ElfNotesTest : public Test {}; + +TEST_F(ElfNotesTest, Empty) { + Notes notes(kLittleEndian); + string contents; + ASSERT_TRUE(notes.GetContents(&contents)); + EXPECT_EQ(0U, contents.size()); +} + +TEST_F(ElfNotesTest, Notes) { + Notes notes(kLittleEndian); + notes.AddNote(1, "Linux", reinterpret_cast("\x42\x02\0\0"), + 4); + notes.AddNote(2, "a", reinterpret_cast("foobar"), + sizeof("foobar") - 1); + + const uint8_t kExpectedNotesContents[] = { + // Note 1 + 0x06, 0x00, 0x00, 0x00, // name size, including terminating zero + 0x04, 0x00, 0x00, 0x00, // desc size + 0x01, 0x00, 0x00, 0x00, // type + 'L', 'i', 'n', 'u', 'x', 0x00, 0x00, 0x00, // padded "Linux" + 0x42, 0x02, 0x00, 0x00, // desc + // Note 2 + 0x02, 0x00, 0x00, 0x00, // name size + 0x06, 0x00, 0x00, 0x00, // desc size + 0x02, 0x00, 0x00, 0x00, // type + 'a', 0x00, 0x00, 0x00, // padded "a" + 'f', 'o', 'o', 'b', 'a', 'r', 0x00, 0x00, // padded "foobar" + }; + const size_t kExpectedNotesSize = sizeof(kExpectedNotesContents); + EXPECT_EQ(kExpectedNotesSize, notes.Size()); + + string notes_contents; + ASSERT_TRUE(notes.GetContents(¬es_contents)); + EXPECT_EQ(0, memcmp(kExpectedNotesContents, + notes_contents.data(), + notes_contents.size())); +} + +#endif // defined(__i386__) || defined(__x86_64__) diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/tests/auto_testfile.h b/TMessagesProj/jni/third_party/breakpad/src/common/linux/tests/auto_testfile.h new file mode 100644 index 0000000000..92fe017b92 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/tests/auto_testfile.h @@ -0,0 +1,124 @@ +// Copyright (c) 2013, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Utility class for creating a temporary file for unit tests +// that is deleted in the destructor. + +#ifndef GOOGLE_BREAKPAD_COMMON_LINUX_TESTS_AUTO_TESTFILE +#define GOOGLE_BREAKPAD_COMMON_LINUX_TESTS_AUTO_TESTFILE + +#include +#include + +#include + +#include "breakpad_googletest_includes.h" +#include "common/linux/eintr_wrapper.h" +#include "common/tests/auto_tempdir.h" + +namespace google_breakpad { + +class AutoTestFile { + public: + // Create a new empty test file. + // test_prefix: (input) test-specific prefix, can't be NULL. + explicit AutoTestFile(const char* test_prefix) { + Init(test_prefix); + } + + // Create a new test file, and fill it with initial data from a C string. + // The terminating zero is not written. + // test_prefix: (input) test-specific prefix, can't be NULL. + // text: (input) initial content. + AutoTestFile(const char* test_prefix, const char* text) { + Init(test_prefix); + if (fd_ >= 0) + WriteText(text, static_cast(strlen(text))); + } + + AutoTestFile(const char* test_prefix, const char* text, size_t text_len) { + Init(test_prefix); + if (fd_ >= 0) + WriteText(text, text_len); + } + + // Destroy test file on scope exit. + ~AutoTestFile() { + if (fd_ >= 0) { + close(fd_); + fd_ = -1; + } + } + + // Returns true iff the test file could be created properly. + // Useful in tests inside EXPECT_TRUE(file.IsOk()); + bool IsOk() { + return fd_ >= 0; + } + + // Returns the Posix file descriptor for the test file, or -1 + // If IsOk() returns false. Note: on Windows, this always returns -1. + int GetFd() { + return fd_; + } + + private: + void Init(const char* test_prefix) { + fd_ = -1; + char path_templ[PATH_MAX]; + int ret = snprintf(path_templ, sizeof(path_templ), + TEMPDIR "/%s-unittest.XXXXXX", + test_prefix); + if (ret >= static_cast(sizeof(path_templ))) + return; + + fd_ = mkstemp(path_templ); + if (fd_ < 0) + return; + + unlink(path_templ); + } + + void WriteText(const char* text, size_t text_len) { + ssize_t r = HANDLE_EINTR(write(fd_, text, text_len)); + if (r != static_cast(text_len)) { + close(fd_); + fd_ = -1; + return; + } + + lseek(fd_, 0, SEEK_SET); + } + + int fd_; +}; + +} // namespace google_breakpad + +#endif // GOOGLE_BREAKPAD_COMMON_LINUX_TESTS_AUTO_TESTFILE diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/tests/crash_generator.cc b/TMessagesProj/jni/third_party/breakpad/src/common/linux/tests/crash_generator.cc new file mode 100644 index 0000000000..c9491f6f29 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/tests/crash_generator.cc @@ -0,0 +1,322 @@ +// Copyright (c) 2011, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// crash_generator.cc: Implement google_breakpad::CrashGenerator. +// See crash_generator.h for details. + +#include "common/linux/tests/crash_generator.h" + +#include +#include +#include +#include +#include +#include +#include +#include + +#include + +#if defined(__ANDROID__) +#include "common/android/testing/pthread_fixes.h" +#endif +#include "common/linux/eintr_wrapper.h" +#include "common/tests/auto_tempdir.h" +#include "common/tests/file_utils.h" +#include "common/using_std_string.h" + +namespace { + +struct ThreadData { + pthread_t thread; + pthread_barrier_t* barrier; + pid_t* thread_id_ptr; +}; + +const char* const kProcFilesToCopy[] = { + "auxv", "cmdline", "environ", "maps", "status" +}; +const size_t kNumProcFilesToCopy = + sizeof(kProcFilesToCopy) / sizeof(kProcFilesToCopy[0]); + +int gettid() { + // Glibc does not provide a wrapper for this. + return syscall(__NR_gettid); +} + +int tkill(pid_t tid, int sig) { + // Glibc does not provide a wrapper for this. + return syscall(__NR_tkill, tid, sig); +} + +// Core file size limit set to 1 MB, which is big enough for test purposes. +const rlim_t kCoreSizeLimit = 1024 * 1024; + +void *thread_function(void *data) { + ThreadData* thread_data = reinterpret_cast(data); + volatile pid_t thread_id = gettid(); + *(thread_data->thread_id_ptr) = thread_id; + int result = pthread_barrier_wait(thread_data->barrier); + if (result != 0 && result != PTHREAD_BARRIER_SERIAL_THREAD) { + perror("Failed to wait for sync barrier"); + exit(1); + } + while (true) { + pthread_yield(); + } +} + +} // namespace + +namespace google_breakpad { + +CrashGenerator::CrashGenerator() + : shared_memory_(NULL), + shared_memory_size_(0) { +} + +CrashGenerator::~CrashGenerator() { + UnmapSharedMemory(); +} + +bool CrashGenerator::HasDefaultCorePattern() const { + char buffer[8]; + ssize_t buffer_size = sizeof(buffer); + return ReadFile("/proc/sys/kernel/core_pattern", buffer, &buffer_size) && + buffer_size == 5 && memcmp(buffer, "core", 4) == 0; +} + +string CrashGenerator::GetCoreFilePath() const { + return temp_dir_.path() + "/core"; +} + +string CrashGenerator::GetDirectoryOfProcFilesCopy() const { + return temp_dir_.path() + "/proc"; +} + +pid_t CrashGenerator::GetThreadId(unsigned index) const { + return reinterpret_cast(shared_memory_)[index]; +} + +pid_t* CrashGenerator::GetThreadIdPointer(unsigned index) { + return reinterpret_cast(shared_memory_) + index; +} + +bool CrashGenerator::MapSharedMemory(size_t memory_size) { + if (!UnmapSharedMemory()) + return false; + + void* mapped_memory = mmap(0, memory_size, PROT_READ | PROT_WRITE, + MAP_SHARED | MAP_ANONYMOUS, -1, 0); + if (mapped_memory == MAP_FAILED) { + perror("CrashGenerator: Failed to map shared memory"); + return false; + } + + memset(mapped_memory, 0, memory_size); + shared_memory_ = mapped_memory; + shared_memory_size_ = memory_size; + return true; +} + +bool CrashGenerator::UnmapSharedMemory() { + if (!shared_memory_) + return true; + + if (munmap(shared_memory_, shared_memory_size_) == 0) { + shared_memory_ = NULL; + shared_memory_size_ = 0; + return true; + } + + perror("CrashGenerator: Failed to unmap shared memory"); + return false; +} + +bool CrashGenerator::SetCoreFileSizeLimit(rlim_t limit) const { + struct rlimit limits = { limit, limit }; + if (setrlimit(RLIMIT_CORE, &limits) == -1) { + perror("CrashGenerator: Failed to set core file size limit"); + return false; + } + return true; +} + +bool CrashGenerator::CreateChildCrash( + unsigned num_threads, unsigned crash_thread, int crash_signal, + pid_t* child_pid) { + if (num_threads == 0 || crash_thread >= num_threads) { + fprintf(stderr, "CrashGenerator: Invalid thread counts; num_threads=%u" + " crash_thread=%u\n", num_threads, crash_thread); + return false; + } + + if (!MapSharedMemory(num_threads * sizeof(pid_t))) { + perror("CrashGenerator: Unable to map shared memory"); + return false; + } + + pid_t pid = fork(); + if (pid == 0) { + if (chdir(temp_dir_.path().c_str()) == -1) { + perror("CrashGenerator: Failed to change directory"); + exit(1); + } + if (SetCoreFileSizeLimit(kCoreSizeLimit)) { + CreateThreadsInChildProcess(num_threads); + string proc_dir = GetDirectoryOfProcFilesCopy(); + if (mkdir(proc_dir.c_str(), 0755) == -1) { + perror("CrashGenerator: Failed to create proc directory"); + exit(1); + } + if (!CopyProcFiles(getpid(), proc_dir.c_str())) { + fprintf(stderr, "CrashGenerator: Failed to copy proc files\n"); + exit(1); + } + // On Android the signal sometimes doesn't seem to get sent even though + // tkill returns '0'. Retry a couple of times if the signal doesn't get + // through on the first go: + // https://code.google.com/p/google-breakpad/issues/detail?id=579 +#if defined(__ANDROID__) + const int kRetries = 60; + const unsigned int kSleepTimeInSeconds = 1; +#else + const int kRetries = 1; + const unsigned int kSleepTimeInSeconds = 600; +#endif + for (int i = 0; i < kRetries; i++) { + if (tkill(*GetThreadIdPointer(crash_thread), crash_signal) == -1) { + perror("CrashGenerator: Failed to kill thread by signal"); + } else { + // At this point, we've queued the signal for delivery, but there's no + // guarantee when it'll be delivered. We don't want the main thread to + // race and exit before the thread we signaled is processed. So sleep + // long enough that we won't flake even under fairly high load. + // TODO: See if we can't be a bit more deterministic. There doesn't + // seem to be an API to check on signal delivery status, so we can't + // really poll and wait for the kernel to declare the signal has been + // delivered. If it has, and things worked, we'd be killed, so the + // sleep length doesn't really matter. + sleep(kSleepTimeInSeconds); + } + } + } else { + perror("CrashGenerator: Failed to set core limit"); + } + exit(1); + } else if (pid == -1) { + perror("CrashGenerator: Failed to create child process"); + return false; + } + + int status; + if (HANDLE_EINTR(waitpid(pid, &status, 0)) == -1) { + perror("CrashGenerator: Failed to wait for child process"); + return false; + } + if (!WIFSIGNALED(status) || WTERMSIG(status) != crash_signal) { + fprintf(stderr, "CrashGenerator: Child process not killed by the expected signal\n" + " exit status=0x%x pid=%u signaled=%s sig=%d expected=%d\n", + status, pid, WIFSIGNALED(status) ? "true" : "false", + WTERMSIG(status), crash_signal); + return false; + } + + if (child_pid) + *child_pid = pid; + return true; +} + +bool CrashGenerator::CopyProcFiles(pid_t pid, const char* path) const { + char from_path[PATH_MAX], to_path[PATH_MAX]; + for (size_t i = 0; i < kNumProcFilesToCopy; ++i) { + int num_chars = snprintf(from_path, PATH_MAX, "/proc/%d/%s", + pid, kProcFilesToCopy[i]); + if (num_chars < 0 || num_chars >= PATH_MAX) + return false; + + num_chars = snprintf(to_path, PATH_MAX, "%s/%s", + path, kProcFilesToCopy[i]); + if (num_chars < 0 || num_chars >= PATH_MAX) + return false; + + if (!CopyFile(from_path, to_path)) + return false; + } + return true; +} + +void CrashGenerator::CreateThreadsInChildProcess(unsigned num_threads) { + *GetThreadIdPointer(0) = getpid(); + + if (num_threads <= 1) + return; + + // This method does not clean up any pthread resource, as the process + // is expected to be killed anyway. + ThreadData* thread_data = new ThreadData[num_threads]; + + // Create detached threads so that we do not worry about pthread_join() + // later being called or not. + pthread_attr_t thread_attributes; + if (pthread_attr_init(&thread_attributes) != 0 || + pthread_attr_setdetachstate(&thread_attributes, + PTHREAD_CREATE_DETACHED) != 0) { + fprintf(stderr, "CrashGenerator: Failed to initialize thread attribute\n"); + exit(1); + } + + pthread_barrier_t thread_barrier; + if (pthread_barrier_init(&thread_barrier, NULL, num_threads) != 0) { + fprintf(stderr, "CrashGenerator: Failed to initialize thread barrier\n"); + exit(1); + } + + for (unsigned i = 1; i < num_threads; ++i) { + thread_data[i].barrier = &thread_barrier; + thread_data[i].thread_id_ptr = GetThreadIdPointer(i); + if (pthread_create(&thread_data[i].thread, &thread_attributes, + thread_function, &thread_data[i]) != 0) { + fprintf(stderr, "CrashGenerator: Failed to create thread %d\n", i); + exit(1); + } + } + + int result = pthread_barrier_wait(&thread_barrier); + if (result != 0 && result != PTHREAD_BARRIER_SERIAL_THREAD) { + fprintf(stderr, "CrashGenerator: Failed to wait for thread barrier\n"); + exit(1); + } + + pthread_barrier_destroy(&thread_barrier); + pthread_attr_destroy(&thread_attributes); + delete[] thread_data; +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/linux/tests/crash_generator.h b/TMessagesProj/jni/third_party/breakpad/src/common/linux/tests/crash_generator.h new file mode 100644 index 0000000000..7e2fcbf98a --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/linux/tests/crash_generator.h @@ -0,0 +1,117 @@ +// Copyright (c) 2011, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// crash_generator.h: Define the google_breakpad::CrashGenerator class, +// which is used to generate a crash (and a core dump file) for testing. + +#ifndef COMMON_LINUX_TESTS_CRASH_GENERATOR_H_ +#define COMMON_LINUX_TESTS_CRASH_GENERATOR_H_ + +#include + +#include + +#include "common/tests/auto_tempdir.h" +#include "common/using_std_string.h" + +namespace google_breakpad { + +// A utility class for generating a crash (and a core dump file) for +// testing. It creates a child process with the specified number of +// threads, which is then termainated by the specified signal. A core +// dump file is expected to be created upon the termination of the child +// process, which can then be used for testing code that processes core +// dump files. +class CrashGenerator { + public: + CrashGenerator(); + + ~CrashGenerator(); + + // Returns true if a core dump file named 'core' will be generated in + // the current directory for a test that produces a crash by checking + // if /proc/sys/kernel/core_pattern has the default value 'core'. + bool HasDefaultCorePattern() const; + + // Returns the expected path of the core dump file. + string GetCoreFilePath() const; + + // Returns the directory of a copy of proc files of the child process. + string GetDirectoryOfProcFilesCopy() const; + + // Creates a crash (and a core dump file) by creating a child process with + // |num_threads| threads, and the terminating the child process by sending + // a signal with number |crash_signal| to the |crash_thread|-th thread. + // Returns true on success. + bool CreateChildCrash(unsigned num_threads, unsigned crash_thread, + int crash_signal, pid_t* child_pid); + + // Returns the thread ID of the |index|-th thread in the child process. + // This method does not validate |index|. + pid_t GetThreadId(unsigned index) const; + + private: + // Copies the following proc files of the process with |pid| to the directory + // at |path|: auxv, cmdline, environ, maps, status + // The directory must have been created. Returns true on success. + bool CopyProcFiles(pid_t pid, const char* path) const; + + // Creates |num_threads| threads in the child process. + void CreateThreadsInChildProcess(unsigned num_threads); + + // Sets the maximum size of core dump file (both the soft and hard limit) + // to |limit| bytes. Returns true on success. + bool SetCoreFileSizeLimit(rlim_t limit) const; + + // Creates a shared memory of |memory_size| bytes for communicating thread + // IDs between the parent and child process. Returns true on success. + bool MapSharedMemory(size_t memory_size); + + // Releases any shared memory created by MapSharedMemory(). Returns true on + // success. + bool UnmapSharedMemory(); + + // Returns the pointer to the thread ID of the |index|-th thread in the child + // process. This method does not validate |index|. + pid_t* GetThreadIdPointer(unsigned index); + + // Temporary directory in which a core file is generated. + AutoTempDir temp_dir_; + + // Shared memory for communicating thread IDs between the parent and + // child process. + void* shared_memory_; + + // Number of bytes mapped for |shared_memory_|. + size_t shared_memory_size_; +}; + +} // namespace google_breakpad + +#endif // COMMON_LINUX_TESTS_CRASH_GENERATOR_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/md5.cc b/TMessagesProj/jni/third_party/breakpad/src/common/md5.cc new file mode 100644 index 0000000000..a0d9a1bdd8 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/md5.cc @@ -0,0 +1,251 @@ +/* + * written by Colin Plumb in 1993, no copyright is claimed. + * This code is in the public domain; do with it what you wish. + * + * Equivalent code is available from RSA Data Security, Inc. + * This code has been tested against that, and is equivalent, + * except that you don't need to include two pages of legalese + * with every copy. + * + * To compute the message digest of a chunk of bytes, declare an + * MD5Context structure, pass it to MD5Init, call MD5Update as + * needed on buffers full of bytes, and then call MD5Final, which + * will fill a supplied 16-byte array with the digest. + */ + +#include + +#include "common/md5.h" + +namespace google_breakpad { + +#ifndef WORDS_BIGENDIAN +#define byteReverse(buf, len) /* Nothing */ +#else +/* + * Note: this code is harmless on little-endian machines. + */ +static void byteReverse(unsigned char *buf, unsigned longs) +{ + u32 t; + do { + t = (u32) ((unsigned) buf[3] << 8 | buf[2]) << 16 | + ((unsigned) buf[1] << 8 | buf[0]); + *(u32 *) buf = t; + buf += 4; + } while (--longs); +} +#endif + +static void MD5Transform(u32 buf[4], u32 const in[16]); + +/* + * Start MD5 accumulation. Set bit count to 0 and buffer to mysterious + * initialization constants. + */ +void MD5Init(struct MD5Context *ctx) +{ + ctx->buf[0] = 0x67452301; + ctx->buf[1] = 0xefcdab89; + ctx->buf[2] = 0x98badcfe; + ctx->buf[3] = 0x10325476; + + ctx->bits[0] = 0; + ctx->bits[1] = 0; +} + +/* + * Update context to reflect the concatenation of another buffer full + * of bytes. + */ +void MD5Update(struct MD5Context *ctx, unsigned char const *buf, size_t len) +{ + u32 t; + + /* Update bitcount */ + + t = ctx->bits[0]; + if ((ctx->bits[0] = t + ((u32) len << 3)) < t) + ctx->bits[1]++; /* Carry from low to high */ + ctx->bits[1] += len >> 29; + + t = (t >> 3) & 0x3f; /* Bytes already in shsInfo->data */ + + /* Handle any leading odd-sized chunks */ + + if (t) { + unsigned char *p = (unsigned char *) ctx->in + t; + + t = 64 - t; + if (len < t) { + memcpy(p, buf, len); + return; + } + memcpy(p, buf, t); + byteReverse(ctx->in, 16); + MD5Transform(ctx->buf, (u32 *) ctx->in); + buf += t; + len -= t; + } + /* Process data in 64-byte chunks */ + + while (len >= 64) { + memcpy(ctx->in, buf, 64); + byteReverse(ctx->in, 16); + MD5Transform(ctx->buf, (u32 *) ctx->in); + buf += 64; + len -= 64; + } + + /* Handle any remaining bytes of data. */ + + memcpy(ctx->in, buf, len); +} + +/* + * Final wrapup - pad to 64-byte boundary with the bit pattern + * 1 0* (64-bit count of bits processed, MSB-first) + */ +void MD5Final(unsigned char digest[16], struct MD5Context *ctx) +{ + unsigned count; + unsigned char *p; + + /* Compute number of bytes mod 64 */ + count = (ctx->bits[0] >> 3) & 0x3F; + + /* Set the first char of padding to 0x80. This is safe since there is + always at least one byte free */ + p = ctx->in + count; + *p++ = 0x80; + + /* Bytes of padding needed to make 64 bytes */ + count = 64 - 1 - count; + + /* Pad out to 56 mod 64 */ + if (count < 8) { + /* Two lots of padding: Pad the first block to 64 bytes */ + memset(p, 0, count); + byteReverse(ctx->in, 16); + MD5Transform(ctx->buf, (u32 *) ctx->in); + + /* Now fill the next block with 56 bytes */ + memset(ctx->in, 0, 56); + } else { + /* Pad block to 56 bytes */ + memset(p, 0, count - 8); + } + byteReverse(ctx->in, 14); + + /* Append length in bits and transform */ + ((u32 *) ctx->in)[14] = ctx->bits[0]; + ((u32 *) ctx->in)[15] = ctx->bits[1]; + + MD5Transform(ctx->buf, (u32 *) ctx->in); + byteReverse((unsigned char *) ctx->buf, 4); + memcpy(digest, ctx->buf, 16); + memset(ctx, 0, sizeof(*ctx)); /* In case it's sensitive */ +} + +/* The four core functions - F1 is optimized somewhat */ + +/* #define F1(x, y, z) (x & y | ~x & z) */ +#define F1(x, y, z) (z ^ (x & (y ^ z))) +#define F2(x, y, z) F1(z, x, y) +#define F3(x, y, z) (x ^ y ^ z) +#define F4(x, y, z) (y ^ (x | ~z)) + +/* This is the central step in the MD5 algorithm. */ +#define MD5STEP(f, w, x, y, z, data, s) \ + ( w += f(x, y, z) + data, w = w<>(32-s), w += x ) + +/* + * The core of the MD5 algorithm, this alters an existing MD5 hash to + * reflect the addition of 16 longwords of new data. MD5Update blocks + * the data and converts bytes into longwords for this routine. + */ +static void MD5Transform(u32 buf[4], u32 const in[16]) +{ + u32 a, b, c, d; + + a = buf[0]; + b = buf[1]; + c = buf[2]; + d = buf[3]; + + MD5STEP(F1, a, b, c, d, in[0] + 0xd76aa478, 7); + MD5STEP(F1, d, a, b, c, in[1] + 0xe8c7b756, 12); + MD5STEP(F1, c, d, a, b, in[2] + 0x242070db, 17); + MD5STEP(F1, b, c, d, a, in[3] + 0xc1bdceee, 22); + MD5STEP(F1, a, b, c, d, in[4] + 0xf57c0faf, 7); + MD5STEP(F1, d, a, b, c, in[5] + 0x4787c62a, 12); + MD5STEP(F1, c, d, a, b, in[6] + 0xa8304613, 17); + MD5STEP(F1, b, c, d, a, in[7] + 0xfd469501, 22); + MD5STEP(F1, a, b, c, d, in[8] + 0x698098d8, 7); + MD5STEP(F1, d, a, b, c, in[9] + 0x8b44f7af, 12); + MD5STEP(F1, c, d, a, b, in[10] + 0xffff5bb1, 17); + MD5STEP(F1, b, c, d, a, in[11] + 0x895cd7be, 22); + MD5STEP(F1, a, b, c, d, in[12] + 0x6b901122, 7); + MD5STEP(F1, d, a, b, c, in[13] + 0xfd987193, 12); + MD5STEP(F1, c, d, a, b, in[14] + 0xa679438e, 17); + MD5STEP(F1, b, c, d, a, in[15] + 0x49b40821, 22); + + MD5STEP(F2, a, b, c, d, in[1] + 0xf61e2562, 5); + MD5STEP(F2, d, a, b, c, in[6] + 0xc040b340, 9); + MD5STEP(F2, c, d, a, b, in[11] + 0x265e5a51, 14); + MD5STEP(F2, b, c, d, a, in[0] + 0xe9b6c7aa, 20); + MD5STEP(F2, a, b, c, d, in[5] + 0xd62f105d, 5); + MD5STEP(F2, d, a, b, c, in[10] + 0x02441453, 9); + MD5STEP(F2, c, d, a, b, in[15] + 0xd8a1e681, 14); + MD5STEP(F2, b, c, d, a, in[4] + 0xe7d3fbc8, 20); + MD5STEP(F2, a, b, c, d, in[9] + 0x21e1cde6, 5); + MD5STEP(F2, d, a, b, c, in[14] + 0xc33707d6, 9); + MD5STEP(F2, c, d, a, b, in[3] + 0xf4d50d87, 14); + MD5STEP(F2, b, c, d, a, in[8] + 0x455a14ed, 20); + MD5STEP(F2, a, b, c, d, in[13] + 0xa9e3e905, 5); + MD5STEP(F2, d, a, b, c, in[2] + 0xfcefa3f8, 9); + MD5STEP(F2, c, d, a, b, in[7] + 0x676f02d9, 14); + MD5STEP(F2, b, c, d, a, in[12] + 0x8d2a4c8a, 20); + + MD5STEP(F3, a, b, c, d, in[5] + 0xfffa3942, 4); + MD5STEP(F3, d, a, b, c, in[8] + 0x8771f681, 11); + MD5STEP(F3, c, d, a, b, in[11] + 0x6d9d6122, 16); + MD5STEP(F3, b, c, d, a, in[14] + 0xfde5380c, 23); + MD5STEP(F3, a, b, c, d, in[1] + 0xa4beea44, 4); + MD5STEP(F3, d, a, b, c, in[4] + 0x4bdecfa9, 11); + MD5STEP(F3, c, d, a, b, in[7] + 0xf6bb4b60, 16); + MD5STEP(F3, b, c, d, a, in[10] + 0xbebfbc70, 23); + MD5STEP(F3, a, b, c, d, in[13] + 0x289b7ec6, 4); + MD5STEP(F3, d, a, b, c, in[0] + 0xeaa127fa, 11); + MD5STEP(F3, c, d, a, b, in[3] + 0xd4ef3085, 16); + MD5STEP(F3, b, c, d, a, in[6] + 0x04881d05, 23); + MD5STEP(F3, a, b, c, d, in[9] + 0xd9d4d039, 4); + MD5STEP(F3, d, a, b, c, in[12] + 0xe6db99e5, 11); + MD5STEP(F3, c, d, a, b, in[15] + 0x1fa27cf8, 16); + MD5STEP(F3, b, c, d, a, in[2] + 0xc4ac5665, 23); + + MD5STEP(F4, a, b, c, d, in[0] + 0xf4292244, 6); + MD5STEP(F4, d, a, b, c, in[7] + 0x432aff97, 10); + MD5STEP(F4, c, d, a, b, in[14] + 0xab9423a7, 15); + MD5STEP(F4, b, c, d, a, in[5] + 0xfc93a039, 21); + MD5STEP(F4, a, b, c, d, in[12] + 0x655b59c3, 6); + MD5STEP(F4, d, a, b, c, in[3] + 0x8f0ccc92, 10); + MD5STEP(F4, c, d, a, b, in[10] + 0xffeff47d, 15); + MD5STEP(F4, b, c, d, a, in[1] + 0x85845dd1, 21); + MD5STEP(F4, a, b, c, d, in[8] + 0x6fa87e4f, 6); + MD5STEP(F4, d, a, b, c, in[15] + 0xfe2ce6e0, 10); + MD5STEP(F4, c, d, a, b, in[6] + 0xa3014314, 15); + MD5STEP(F4, b, c, d, a, in[13] + 0x4e0811a1, 21); + MD5STEP(F4, a, b, c, d, in[4] + 0xf7537e82, 6); + MD5STEP(F4, d, a, b, c, in[11] + 0xbd3af235, 10); + MD5STEP(F4, c, d, a, b, in[2] + 0x2ad7d2bb, 15); + MD5STEP(F4, b, c, d, a, in[9] + 0xeb86d391, 21); + + buf[0] += a; + buf[1] += b; + buf[2] += c; + buf[3] += d; +} + +} // namespace google_breakpad + diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/md5.h b/TMessagesProj/jni/third_party/breakpad/src/common/md5.h new file mode 100644 index 0000000000..2ab0ab95ae --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/md5.h @@ -0,0 +1,27 @@ +// Copyright 2007 Google Inc. All Rights Reserved. +// Author: liuli@google.com (Liu Li) +#ifndef COMMON_MD5_H__ +#define COMMON_MD5_H__ + +#include + +namespace google_breakpad { + +typedef uint32_t u32; +typedef uint8_t u8; + +struct MD5Context { + u32 buf[4]; + u32 bits[2]; + u8 in[64]; +}; + +void MD5Init(struct MD5Context *ctx); + +void MD5Update(struct MD5Context *ctx, unsigned char const *buf, size_t len); + +void MD5Final(unsigned char digest[16], struct MD5Context *ctx); + +} // namespace google_breakpad + +#endif // COMMON_MD5_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/memory.h b/TMessagesProj/jni/third_party/breakpad/src/common/memory.h new file mode 100644 index 0000000000..d6aa137d35 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/memory.h @@ -0,0 +1,212 @@ +// Copyright (c) 2009, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef GOOGLE_BREAKPAD_COMMON_MEMORY_H_ +#define GOOGLE_BREAKPAD_COMMON_MEMORY_H_ + +#include +#include +#include +#include + +#include +#include + +#if defined(MEMORY_SANITIZER) +#include +#endif + +#ifdef __APPLE__ +#define sys_mmap mmap +#define sys_mmap2 mmap +#define sys_munmap munmap +#define MAP_ANONYMOUS MAP_ANON +#else +#include "third_party/lss/linux_syscall_support.h" +#endif + +namespace google_breakpad { + +// This is very simple allocator which fetches pages from the kernel directly. +// Thus, it can be used even when the heap may be corrupted. +// +// There is no free operation. The pages are only freed when the object is +// destroyed. +class PageAllocator { + public: + PageAllocator() + : page_size_(getpagesize()), + last_(NULL), + current_page_(NULL), + page_offset_(0) { + } + + ~PageAllocator() { + FreeAll(); + } + + void *Alloc(size_t bytes) { + if (!bytes) + return NULL; + + if (current_page_ && page_size_ - page_offset_ >= bytes) { + uint8_t *const ret = current_page_ + page_offset_; + page_offset_ += bytes; + if (page_offset_ == page_size_) { + page_offset_ = 0; + current_page_ = NULL; + } + + return ret; + } + + const size_t pages = + (bytes + sizeof(PageHeader) + page_size_ - 1) / page_size_; + uint8_t *const ret = GetNPages(pages); + if (!ret) + return NULL; + + page_offset_ = + (page_size_ - (page_size_ * pages - (bytes + sizeof(PageHeader)))) % + page_size_; + current_page_ = page_offset_ ? ret + page_size_ * (pages - 1) : NULL; + + return ret + sizeof(PageHeader); + } + + // Checks whether the page allocator owns the passed-in pointer. + // This method exists for testing pursposes only. + bool OwnsPointer(const void* p) { + for (PageHeader* header = last_; header; header = header->next) { + const char* current = reinterpret_cast(header); + if ((p >= current) && (p < current + header->num_pages * page_size_)) + return true; + } + + return false; + } + + private: + uint8_t *GetNPages(size_t num_pages) { +#if defined(__x86_64__) || defined(__aarch64__) || defined(__aarch64__) || \ + ((defined(__mips__) && _MIPS_SIM == _ABI64)) + void *a = sys_mmap(NULL, page_size_ * num_pages, PROT_READ | PROT_WRITE, + MAP_PRIVATE | MAP_ANONYMOUS, -1, 0); +#else + void *a = sys_mmap2(NULL, page_size_ * num_pages, PROT_READ | PROT_WRITE, + MAP_PRIVATE | MAP_ANONYMOUS, -1, 0); +#endif + if (a == MAP_FAILED) + return NULL; + +#if defined(MEMORY_SANITIZER) + // We need to indicate to MSan that memory allocated through sys_mmap is + // initialized, since linux_syscall_support.h doesn't have MSan hooks. + __msan_unpoison(a, page_size_ * num_pages); +#endif + + struct PageHeader *header = reinterpret_cast(a); + header->next = last_; + header->num_pages = num_pages; + last_ = header; + + return reinterpret_cast(a); + } + + void FreeAll() { + PageHeader *next; + + for (PageHeader *cur = last_; cur; cur = next) { + next = cur->next; + sys_munmap(cur, cur->num_pages * page_size_); + } + } + + struct PageHeader { + PageHeader *next; // pointer to the start of the next set of pages. + size_t num_pages; // the number of pages in this set. + }; + + const size_t page_size_; + PageHeader *last_; + uint8_t *current_page_; + size_t page_offset_; +}; + +// Wrapper to use with STL containers +template +struct PageStdAllocator : public std::allocator { + typedef typename std::allocator::pointer pointer; + typedef typename std::allocator::size_type size_type; + + explicit PageStdAllocator(PageAllocator& allocator): allocator_(allocator) {} + template PageStdAllocator(const PageStdAllocator& other) + : allocator_(other.allocator_) {} + + inline pointer allocate(size_type n, const void* = 0) { + return static_cast(allocator_.Alloc(sizeof(T) * n)); + } + + inline void deallocate(pointer, size_type) { + // The PageAllocator doesn't free. + } + + template struct rebind { + typedef PageStdAllocator other; + }; + + private: + // Silly workaround for the gcc from Android's ndk (gcc 4.6), which will + // otherwise complain that `other.allocator_` is private in the constructor + // code. + template friend struct PageStdAllocator; + + PageAllocator& allocator_; +}; + +// A wasteful vector is a std::vector, except that it allocates memory from a +// PageAllocator. It's wasteful because, when resizing, it always allocates a +// whole new array since the PageAllocator doesn't support realloc. +template +class wasteful_vector : public std::vector > { + public: + wasteful_vector(PageAllocator* allocator, unsigned size_hint = 16) + : std::vector >(PageStdAllocator(*allocator)) { + std::vector >::reserve(size_hint); + } +}; + +} // namespace google_breakpad + +inline void* operator new(size_t nbytes, + google_breakpad::PageAllocator& allocator) { + return allocator.Alloc(nbytes); +} + +#endif // GOOGLE_BREAKPAD_COMMON_MEMORY_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/memory_range.h b/TMessagesProj/jni/third_party/breakpad/src/common/memory_range.h new file mode 100644 index 0000000000..41dd2da622 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/memory_range.h @@ -0,0 +1,145 @@ +// Copyright (c) 2011, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// memory_range.h: Define the google_breakpad::MemoryRange class, which +// is a lightweight wrapper with a pointer and a length to encapsulate +// a contiguous range of memory. + +#ifndef COMMON_MEMORY_RANGE_H_ +#define COMMON_MEMORY_RANGE_H_ + +#include + +#include "google_breakpad/common/breakpad_types.h" + +namespace google_breakpad { + +// A lightweight wrapper with a pointer and a length to encapsulate a +// contiguous range of memory. It provides helper methods for checked +// access of a subrange of the memory. Its implemementation does not +// allocate memory or call into libc functions, and is thus safer to use +// in a crashed environment. +class MemoryRange { + public: + MemoryRange() : data_(NULL), length_(0) {} + + MemoryRange(const void* data, size_t length) { + Set(data, length); + } + + // Returns true if this memory range contains no data. + bool IsEmpty() const { + // Set() guarantees that |length_| is zero if |data_| is NULL. + return length_ == 0; + } + + // Resets to an empty range. + void Reset() { + data_ = NULL; + length_ = 0; + } + + // Sets this memory range to point to |data| and its length to |length|. + void Set(const void* data, size_t length) { + data_ = reinterpret_cast(data); + // Always set |length_| to zero if |data_| is NULL. + length_ = data ? length : 0; + } + + // Returns true if this range covers a subrange of |sub_length| bytes + // at |sub_offset| bytes of this memory range, or false otherwise. + bool Covers(size_t sub_offset, size_t sub_length) const { + // The following checks verify that: + // 1. sub_offset is within [ 0 .. length_ - 1 ] + // 2. sub_offset + sub_length is within + // [ sub_offset .. length_ ] + return sub_offset < length_ && + sub_offset + sub_length >= sub_offset && + sub_offset + sub_length <= length_; + } + + // Returns a raw data pointer to a subrange of |sub_length| bytes at + // |sub_offset| bytes of this memory range, or NULL if the subrange + // is out of bounds. + const void* GetData(size_t sub_offset, size_t sub_length) const { + return Covers(sub_offset, sub_length) ? (data_ + sub_offset) : NULL; + } + + // Same as the two-argument version of GetData() but uses sizeof(DataType) + // as the subrange length and returns an |DataType| pointer for convenience. + template + const DataType* GetData(size_t sub_offset) const { + return reinterpret_cast( + GetData(sub_offset, sizeof(DataType))); + } + + // Returns a raw pointer to the |element_index|-th element of an array + // of elements of length |element_size| starting at |sub_offset| bytes + // of this memory range, or NULL if the element is out of bounds. + const void* GetArrayElement(size_t element_offset, + size_t element_size, + unsigned element_index) const { + size_t sub_offset = element_offset + element_index * element_size; + return GetData(sub_offset, element_size); + } + + // Same as the three-argument version of GetArrayElement() but deduces + // the element size using sizeof(ElementType) and returns an |ElementType| + // pointer for convenience. + template + const ElementType* GetArrayElement(size_t element_offset, + unsigned element_index) const { + return reinterpret_cast( + GetArrayElement(element_offset, sizeof(ElementType), element_index)); + } + + // Returns a subrange of |sub_length| bytes at |sub_offset| bytes of + // this memory range, or an empty range if the subrange is out of bounds. + MemoryRange Subrange(size_t sub_offset, size_t sub_length) const { + return Covers(sub_offset, sub_length) ? + MemoryRange(data_ + sub_offset, sub_length) : MemoryRange(); + } + + // Returns a pointer to the beginning of this memory range. + const uint8_t* data() const { return data_; } + + // Returns the length, in bytes, of this memory range. + size_t length() const { return length_; } + + private: + // Pointer to the beginning of this memory range. + const uint8_t* data_; + + // Length, in bytes, of this memory range. + size_t length_; +}; + +} // namespace google_breakpad + +#endif // COMMON_MEMORY_RANGE_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/memory_range_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/common/memory_range_unittest.cc new file mode 100644 index 0000000000..f6cf8c8b2a --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/memory_range_unittest.cc @@ -0,0 +1,193 @@ +// Copyright (c) 2011, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// memory_range_unittest.cc: Unit tests for google_breakpad::MemoryRange. + +#include "breakpad_googletest_includes.h" +#include "common/memory_range.h" + +using google_breakpad::MemoryRange; +using testing::Message; + +namespace { + +const uint32_t kBuffer[10] = { 0 }; +const size_t kBufferSize = sizeof(kBuffer); +const uint8_t* kBufferPointer = reinterpret_cast(kBuffer); + +// Test vectors for verifying Covers, GetData, and Subrange. +const struct { + bool valid; + size_t offset; + size_t length; +} kSubranges[] = { + { true, 0, 0 }, + { true, 0, 2 }, + { true, 0, kBufferSize }, + { true, 2, 0 }, + { true, 2, 4 }, + { true, 2, kBufferSize - 2 }, + { true, kBufferSize - 1, 1 }, + { false, kBufferSize, 0 }, + { false, kBufferSize, static_cast(-1) }, + { false, kBufferSize + 1, 0 }, + { false, static_cast(-1), 2 }, + { false, 1, kBufferSize }, + { false, kBufferSize - 1, 2 }, + { false, 0, static_cast(-1) }, + { false, 1, static_cast(-1) }, +}; +const size_t kNumSubranges = sizeof(kSubranges) / sizeof(kSubranges[0]); + +// Test vectors for verifying GetArrayElement. +const struct { + size_t offset; + size_t size; + size_t index; + const void* const pointer; +} kElements[] = { + // Valid array elemenets + { 0, 1, 0, kBufferPointer }, + { 0, 1, 1, kBufferPointer + 1 }, + { 0, 1, kBufferSize - 1, kBufferPointer + kBufferSize - 1 }, + { 0, 2, 1, kBufferPointer + 2 }, + { 0, 4, 2, kBufferPointer + 8 }, + { 0, 4, 9, kBufferPointer + 36 }, + { kBufferSize - 1, 1, 0, kBufferPointer + kBufferSize - 1 }, + // Invalid array elemenets + { 0, 1, kBufferSize, NULL }, + { 0, 4, 10, NULL }, + { kBufferSize - 1, 1, 1, NULL }, + { kBufferSize - 1, 2, 0, NULL }, + { kBufferSize, 1, 0, NULL }, +}; +const size_t kNumElements = sizeof(kElements) / sizeof(kElements[0]); + +} // namespace + +TEST(MemoryRangeTest, DefaultConstructor) { + MemoryRange range; + EXPECT_EQ(NULL, range.data()); + EXPECT_EQ(0U, range.length()); +} + +TEST(MemoryRangeTest, ConstructorWithDataAndLength) { + MemoryRange range(kBuffer, kBufferSize); + EXPECT_EQ(kBufferPointer, range.data()); + EXPECT_EQ(kBufferSize, range.length()); +} + +TEST(MemoryRangeTest, Reset) { + MemoryRange range; + range.Reset(); + EXPECT_EQ(NULL, range.data()); + EXPECT_EQ(0U, range.length()); + + range.Set(kBuffer, kBufferSize); + EXPECT_EQ(kBufferPointer, range.data()); + EXPECT_EQ(kBufferSize, range.length()); + + range.Reset(); + EXPECT_EQ(NULL, range.data()); + EXPECT_EQ(0U, range.length()); +} + +TEST(MemoryRangeTest, Set) { + MemoryRange range; + range.Set(kBuffer, kBufferSize); + EXPECT_EQ(kBufferPointer, range.data()); + EXPECT_EQ(kBufferSize, range.length()); + + range.Set(NULL, 0); + EXPECT_EQ(NULL, range.data()); + EXPECT_EQ(0U, range.length()); +} + +TEST(MemoryRangeTest, SubrangeOfEmptyMemoryRange) { + MemoryRange range; + MemoryRange subrange = range.Subrange(0, 10); + EXPECT_EQ(NULL, subrange.data()); + EXPECT_EQ(0U, subrange.length()); +} + +TEST(MemoryRangeTest, SubrangeAndGetData) { + MemoryRange range(kBuffer, kBufferSize); + for (size_t i = 0; i < kNumSubranges; ++i) { + bool valid = kSubranges[i].valid; + size_t sub_offset = kSubranges[i].offset; + size_t sub_length = kSubranges[i].length; + SCOPED_TRACE(Message() << "offset=" << sub_offset + << ", length=" << sub_length); + + MemoryRange subrange = range.Subrange(sub_offset, sub_length); + if (valid) { + EXPECT_TRUE(range.Covers(sub_offset, sub_length)); + EXPECT_EQ(kBufferPointer + sub_offset, + range.GetData(sub_offset, sub_length)); + EXPECT_EQ(kBufferPointer + sub_offset, subrange.data()); + EXPECT_EQ(sub_length, subrange.length()); + } else { + EXPECT_FALSE(range.Covers(sub_offset, sub_length)); + EXPECT_EQ(NULL, range.GetData(sub_offset, sub_length)); + EXPECT_EQ(NULL, subrange.data()); + EXPECT_EQ(0U, subrange.length()); + } + } +} + +TEST(MemoryRangeTest, GetDataWithTemplateType) { + MemoryRange range(kBuffer, kBufferSize); + const char* char_pointer = range.GetData(0); + EXPECT_EQ(reinterpret_cast(kBufferPointer), char_pointer); + const int* int_pointer = range.GetData(0); + EXPECT_EQ(reinterpret_cast(kBufferPointer), int_pointer); +} + +TEST(MemoryRangeTest, GetArrayElement) { + MemoryRange range(kBuffer, kBufferSize); + for (size_t i = 0; i < kNumElements; ++i) { + size_t element_offset = kElements[i].offset; + size_t element_size = kElements[i].size; + unsigned element_index = kElements[i].index; + const void* const element_pointer = kElements[i].pointer; + SCOPED_TRACE(Message() << "offset=" << element_offset + << ", size=" << element_size + << ", index=" << element_index); + EXPECT_EQ(element_pointer, range.GetArrayElement( + element_offset, element_size, element_index)); + } +} + +TEST(MemoryRangeTest, GetArrayElmentWithTemplateType) { + MemoryRange range(kBuffer, kBufferSize); + const char* char_pointer = range.GetArrayElement(0, 0); + EXPECT_EQ(reinterpret_cast(kBufferPointer), char_pointer); + const int* int_pointer = range.GetArrayElement(0, 0); + EXPECT_EQ(reinterpret_cast(kBufferPointer), int_pointer); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/memory_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/common/memory_unittest.cc new file mode 100644 index 0000000000..1e511ca56e --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/memory_unittest.cc @@ -0,0 +1,97 @@ +// Copyright (c) 2009, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#include "breakpad_googletest_includes.h" +#include "common/memory.h" + +using namespace google_breakpad; + +namespace { +typedef testing::Test PageAllocatorTest; +} + +TEST(PageAllocatorTest, Setup) { + PageAllocator allocator; +} + +TEST(PageAllocatorTest, SmallObjects) { + PageAllocator allocator; + + for (unsigned i = 1; i < 1024; ++i) { + uint8_t *p = reinterpret_cast(allocator.Alloc(i)); + ASSERT_FALSE(p == NULL); + memset(p, 0, i); + } +} + +TEST(PageAllocatorTest, LargeObject) { + PageAllocator allocator; + + uint8_t *p = reinterpret_cast(allocator.Alloc(10000)); + ASSERT_FALSE(p == NULL); + for (unsigned i = 1; i < 10; ++i) { + uint8_t *p = reinterpret_cast(allocator.Alloc(i)); + ASSERT_FALSE(p == NULL); + memset(p, 0, i); + } +} + +namespace { +typedef testing::Test WastefulVectorTest; +} + +TEST(WastefulVectorTest, Setup) { + PageAllocator allocator_; + wasteful_vector v(&allocator_); + ASSERT_TRUE(v.empty()); + ASSERT_EQ(v.size(), 0u); +} + +TEST(WastefulVectorTest, Simple) { + PageAllocator allocator_; + wasteful_vector v(&allocator_); + + for (unsigned i = 0; i < 256; ++i) { + v.push_back(i); + ASSERT_EQ(i, v.back()); + ASSERT_EQ(&v.back(), &v[i]); + } + ASSERT_FALSE(v.empty()); + ASSERT_EQ(v.size(), 256u); + for (unsigned i = 0; i < 256; ++i) + ASSERT_EQ(v[i], i); +} + +TEST(WastefulVectorTest, UsesPageAllocator) { + PageAllocator allocator_; + wasteful_vector v(&allocator_); + + v.push_back(1); + ASSERT_TRUE(allocator_.OwnsPointer(&v[0])); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/minidump_type_helper.h b/TMessagesProj/jni/third_party/breakpad/src/common/minidump_type_helper.h new file mode 100644 index 0000000000..5a7d5a6a87 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/minidump_type_helper.h @@ -0,0 +1,56 @@ +// Copyright (c) 2014, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef GOOGLE_BREAKPAD_COMMON_MINIDUMP_TYPE_HELPER_H_ +#define GOOGLE_BREAKPAD_COMMON_MINIDUMP_TYPE_HELPER_H_ + +#include + +#include "google_breakpad/common/minidump_format.h" + +namespace google_breakpad { + +template +struct MDTypeHelper; + +template <> +struct MDTypeHelper { + typedef MDRawDebug32 MDRawDebug; + typedef MDRawLinkMap32 MDRawLinkMap; +}; + +template <> +struct MDTypeHelper { + typedef MDRawDebug64 MDRawDebug; + typedef MDRawLinkMap64 MDRawLinkMap; +}; + +} // namespace google_breakpad + +#endif // GOOGLE_BREAKPAD_COMMON_MINIDUMP_TYPE_HELPER_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/module.cc b/TMessagesProj/jni/third_party/breakpad/src/common/module.cc new file mode 100644 index 0000000000..fa798f48dc --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/module.cc @@ -0,0 +1,319 @@ +// Copyright (c) 2011 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// module.cc: Implement google_breakpad::Module. See module.h. + +#include "common/module.h" + +#include +#include +#include +#include + +#include +#include + +namespace google_breakpad { + +using std::dec; +using std::endl; +using std::hex; + + +Module::Module(const string &name, const string &os, + const string &architecture, const string &id) : + name_(name), + os_(os), + architecture_(architecture), + id_(id), + load_address_(0) { } + +Module::~Module() { + for (FileByNameMap::iterator it = files_.begin(); it != files_.end(); ++it) + delete it->second; + for (FunctionSet::iterator it = functions_.begin(); + it != functions_.end(); ++it) { + delete *it; + } + for (vector::iterator it = stack_frame_entries_.begin(); + it != stack_frame_entries_.end(); ++it) { + delete *it; + } + for (ExternSet::iterator it = externs_.begin(); it != externs_.end(); ++it) + delete *it; +} + +void Module::SetLoadAddress(Address address) { + load_address_ = address; +} + +void Module::AddFunction(Function *function) { + // FUNC lines must not hold an empty name, so catch the problem early if + // callers try to add one. + assert(!function->name.empty()); + + // FUNCs are better than PUBLICs as they come with sizes, so remove an extern + // with the same address if present. + Extern ext(function->address); + ExternSet::iterator it_ext = externs_.find(&ext); + if (it_ext == externs_.end() && + architecture_ == "arm" && + (function->address & 0x1) == 0) { + // ARM THUMB functions have bit 0 set. ARM64 does not have THUMB. + Extern arm_thumb_ext(function->address | 0x1); + it_ext = externs_.find(&arm_thumb_ext); + } + if (it_ext != externs_.end()) { + delete *it_ext; + externs_.erase(it_ext); + } +#if _DEBUG + { + // There should be no other PUBLIC symbols that overlap with the function. + Extern debug_ext(function->address); + ExternSet::iterator it_debug = externs_.lower_bound(&ext); + assert(it_debug == externs_.end() || + (*it_debug)->address >= function->address + function->size); + } +#endif + + std::pair ret = functions_.insert(function); + if (!ret.second && (*ret.first != function)) { + // Free the duplicate that was not inserted because this Module + // now owns it. + delete function; + } +} + +void Module::AddFunctions(vector::iterator begin, + vector::iterator end) { + for (vector::iterator it = begin; it != end; ++it) + AddFunction(*it); +} + +void Module::AddStackFrameEntry(StackFrameEntry *stack_frame_entry) { + stack_frame_entries_.push_back(stack_frame_entry); +} + +void Module::AddExtern(Extern *ext) { + std::pair ret = externs_.insert(ext); + if (!ret.second) { + // Free the duplicate that was not inserted because this Module + // now owns it. + delete ext; + } +} + +void Module::GetFunctions(vector *vec, + vector::iterator i) { + vec->insert(i, functions_.begin(), functions_.end()); +} + +void Module::GetExterns(vector *vec, + vector::iterator i) { + vec->insert(i, externs_.begin(), externs_.end()); +} + +Module::File *Module::FindFile(const string &name) { + // A tricky bit here. The key of each map entry needs to be a + // pointer to the entry's File's name string. This means that we + // can't do the initial lookup with any operation that would create + // an empty entry for us if the name isn't found (like, say, + // operator[] or insert do), because such a created entry's key will + // be a pointer the string passed as our argument. Since the key of + // a map's value type is const, we can't fix it up once we've + // created our file. lower_bound does the lookup without doing an + // insertion, and returns a good hint iterator to pass to insert. + // Our "destiny" is where we belong, whether we're there or not now. + FileByNameMap::iterator destiny = files_.lower_bound(&name); + if (destiny == files_.end() + || *destiny->first != name) { // Repeated string comparison, boo hoo. + File *file = new File(name); + file->source_id = -1; + destiny = files_.insert(destiny, + FileByNameMap::value_type(&file->name, file)); + } + return destiny->second; +} + +Module::File *Module::FindFile(const char *name) { + string name_string = name; + return FindFile(name_string); +} + +Module::File *Module::FindExistingFile(const string &name) { + FileByNameMap::iterator it = files_.find(&name); + return (it == files_.end()) ? NULL : it->second; +} + +void Module::GetFiles(vector *vec) { + vec->clear(); + for (FileByNameMap::iterator it = files_.begin(); it != files_.end(); ++it) + vec->push_back(it->second); +} + +void Module::GetStackFrameEntries(vector *vec) const { + *vec = stack_frame_entries_; +} + +void Module::AssignSourceIds() { + // First, give every source file an id of -1. + for (FileByNameMap::iterator file_it = files_.begin(); + file_it != files_.end(); ++file_it) { + file_it->second->source_id = -1; + } + + // Next, mark all files actually cited by our functions' line number + // info, by setting each one's source id to zero. + for (FunctionSet::const_iterator func_it = functions_.begin(); + func_it != functions_.end(); ++func_it) { + Function *func = *func_it; + for (vector::iterator line_it = func->lines.begin(); + line_it != func->lines.end(); ++line_it) + line_it->file->source_id = 0; + } + + // Finally, assign source ids to those files that have been marked. + // We could have just assigned source id numbers while traversing + // the line numbers, but doing it this way numbers the files in + // lexicographical order by name, which is neat. + int next_source_id = 0; + for (FileByNameMap::iterator file_it = files_.begin(); + file_it != files_.end(); ++file_it) { + if (!file_it->second->source_id) + file_it->second->source_id = next_source_id++; + } +} + +bool Module::ReportError() { + fprintf(stderr, "error writing symbol file: %s\n", + strerror(errno)); + return false; +} + +bool Module::WriteRuleMap(const RuleMap &rule_map, std::ostream &stream) { + for (RuleMap::const_iterator it = rule_map.begin(); + it != rule_map.end(); ++it) { + if (it != rule_map.begin()) + stream << ' '; + stream << it->first << ": " << it->second; + } + return stream.good(); +} + +bool Module::Write(std::ostream &stream, SymbolData symbol_data) { + stream << "MODULE " << os_ << " " << architecture_ << " " + << id_ << " " << name_ << endl; + if (!stream.good()) + return ReportError(); + + if (symbol_data != ONLY_CFI) { + AssignSourceIds(); + + // Write out files. + for (FileByNameMap::iterator file_it = files_.begin(); + file_it != files_.end(); ++file_it) { + File *file = file_it->second; + if (file->source_id >= 0) { + stream << "FILE " << file->source_id << " " << file->name << endl; + if (!stream.good()) + return ReportError(); + } + } + + // Write out functions and their lines. + for (FunctionSet::const_iterator func_it = functions_.begin(); + func_it != functions_.end(); ++func_it) { + Function *func = *func_it; + stream << "FUNC " << hex + << (func->address - load_address_) << " " + << func->size << " " + << func->parameter_size << " " + << func->name << dec << endl; + if (!stream.good()) + return ReportError(); + + for (vector::iterator line_it = func->lines.begin(); + line_it != func->lines.end(); ++line_it) { + stream << hex + << (line_it->address - load_address_) << " " + << line_it->size << " " + << dec + << line_it->number << " " + << line_it->file->source_id << endl; + if (!stream.good()) + return ReportError(); + } + } + + // Write out 'PUBLIC' records. + for (ExternSet::const_iterator extern_it = externs_.begin(); + extern_it != externs_.end(); ++extern_it) { + Extern *ext = *extern_it; + stream << "PUBLIC " << hex + << (ext->address - load_address_) << " 0 " + << ext->name << dec << endl; + } + } + + if (symbol_data != NO_CFI) { + // Write out 'STACK CFI INIT' and 'STACK CFI' records. + vector::const_iterator frame_it; + for (frame_it = stack_frame_entries_.begin(); + frame_it != stack_frame_entries_.end(); ++frame_it) { + StackFrameEntry *entry = *frame_it; + stream << "STACK CFI INIT " << hex + << (entry->address - load_address_) << " " + << entry->size << " " << dec; + if (!stream.good() + || !WriteRuleMap(entry->initial_rules, stream)) + return ReportError(); + + stream << endl; + + // Write out this entry's delta rules as 'STACK CFI' records. + for (RuleChangeMap::const_iterator delta_it = entry->rule_changes.begin(); + delta_it != entry->rule_changes.end(); ++delta_it) { + stream << "STACK CFI " << hex + << (delta_it->first - load_address_) << " " << dec; + if (!stream.good() + || !WriteRuleMap(delta_it->second, stream)) + return ReportError(); + + stream << endl; + } + } + } + + return true; +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/module.h b/TMessagesProj/jni/third_party/breakpad/src/common/module.h new file mode 100644 index 0000000000..65b5595de0 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/module.h @@ -0,0 +1,336 @@ +// -*- mode: c++ -*- + +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// module.h: Define google_breakpad::Module. A Module holds debugging +// information, and can write that information out as a Breakpad +// symbol file. + +#ifndef COMMON_LINUX_MODULE_H__ +#define COMMON_LINUX_MODULE_H__ + +#include +#include +#include +#include +#include + +#include "common/symbol_data.h" +#include "common/using_std_string.h" +#include "google_breakpad/common/breakpad_types.h" + +namespace google_breakpad { + +using std::set; +using std::vector; +using std::map; + +// A Module represents the contents of a module, and supports methods +// for adding information produced by parsing STABS or DWARF data +// --- possibly both from the same file --- and then writing out the +// unified contents as a Breakpad-format symbol file. +class Module { + public: + // The type of addresses and sizes in a symbol table. + typedef uint64_t Address; + struct File; + struct Function; + struct Line; + struct Extern; + + // Addresses appearing in File, Function, and Line structures are + // absolute, not relative to the the module's load address. That + // is, if the module were loaded at its nominal load address, the + // addresses would be correct. + + // A source file. + struct File { + explicit File(const string &name_input) : name(name_input), source_id(0) {} + + // The name of the source file. + const string name; + + // The file's source id. The Write member function clears this + // field and assigns source ids a fresh, so any value placed here + // before calling Write will be lost. + int source_id; + }; + + // A function. + struct Function { + Function(const string &name_input, const Address &address_input) : + name(name_input), address(address_input), size(0), parameter_size(0) {} + + // For sorting by address. (Not style-guide compliant, but it's + // stupid not to put this in the struct.) + static bool CompareByAddress(const Function *x, const Function *y) { + return x->address < y->address; + } + + // The function's name. + const string name; + + // The start address and length of the function's code. + const Address address; + Address size; + + // The function's parameter size. + Address parameter_size; + + // Source lines belonging to this function, sorted by increasing + // address. + vector lines; + }; + + // A source line. + struct Line { + // For sorting by address. (Not style-guide compliant, but it's + // stupid not to put this in the struct.) + static bool CompareByAddress(const Module::Line &x, const Module::Line &y) { + return x.address < y.address; + } + + Address address, size; // The address and size of the line's code. + File *file; // The source file. + int number; // The source line number. + }; + + // An exported symbol. + struct Extern { + explicit Extern(const Address &address_input) : address(address_input) {} + const Address address; + string name; + }; + + // A map from register names to postfix expressions that recover + // their their values. This can represent a complete set of rules to + // follow at some address, or a set of changes to be applied to an + // extant set of rules. + typedef map RuleMap; + + // A map from addresses to RuleMaps, representing changes that take + // effect at given addresses. + typedef map RuleChangeMap; + + // A range of 'STACK CFI' stack walking information. An instance of + // this structure corresponds to a 'STACK CFI INIT' record and the + // subsequent 'STACK CFI' records that fall within its range. + struct StackFrameEntry { + // The starting address and number of bytes of machine code this + // entry covers. + Address address, size; + + // The initial register recovery rules, in force at the starting + // address. + RuleMap initial_rules; + + // A map from addresses to rule changes. To find the rules in + // force at a given address, start with initial_rules, and then + // apply the changes given in this map for all addresses up to and + // including the address you're interested in. + RuleChangeMap rule_changes; + }; + + struct FunctionCompare { + bool operator() (const Function *lhs, + const Function *rhs) const { + if (lhs->address == rhs->address) + return lhs->name < rhs->name; + return lhs->address < rhs->address; + } + }; + + struct ExternCompare { + bool operator() (const Extern *lhs, + const Extern *rhs) const { + return lhs->address < rhs->address; + } + }; + + // Create a new module with the given name, operating system, + // architecture, and ID string. + Module(const string &name, const string &os, const string &architecture, + const string &id); + ~Module(); + + // Set the module's load address to LOAD_ADDRESS; addresses given + // for functions and lines will be written to the Breakpad symbol + // file as offsets from this address. Construction initializes this + // module's load address to zero: addresses written to the symbol + // file will be the same as they appear in the Function, Line, and + // StackFrameEntry structures. + // + // Note that this member function has no effect on addresses stored + // in the data added to this module; the Write member function + // simply subtracts off the load address from addresses before it + // prints them. Only the last load address given before calling + // Write is used. + void SetLoadAddress(Address load_address); + + // Add FUNCTION to the module. FUNCTION's name must not be empty. + // This module owns all Function objects added with this function: + // destroying the module destroys them as well. + void AddFunction(Function *function); + + // Add all the functions in [BEGIN,END) to the module. + // This module owns all Function objects added with this function: + // destroying the module destroys them as well. + void AddFunctions(vector::iterator begin, + vector::iterator end); + + // Add STACK_FRAME_ENTRY to the module. + // This module owns all StackFrameEntry objects added with this + // function: destroying the module destroys them as well. + void AddStackFrameEntry(StackFrameEntry *stack_frame_entry); + + // Add PUBLIC to the module. + // This module owns all Extern objects added with this function: + // destroying the module destroys them as well. + void AddExtern(Extern *ext); + + // If this module has a file named NAME, return a pointer to it. If + // it has none, then create one and return a pointer to the new + // file. This module owns all File objects created using these + // functions; destroying the module destroys them as well. + File *FindFile(const string &name); + File *FindFile(const char *name); + + // If this module has a file named NAME, return a pointer to it. + // Otherwise, return NULL. + File *FindExistingFile(const string &name); + + // Insert pointers to the functions added to this module at I in + // VEC. The pointed-to Functions are still owned by this module. + // (Since this is effectively a copy of the function list, this is + // mostly useful for testing; other uses should probably get a more + // appropriate interface.) + void GetFunctions(vector *vec, vector::iterator i); + + // Insert pointers to the externs added to this module at I in + // VEC. The pointed-to Externs are still owned by this module. + // (Since this is effectively a copy of the extern list, this is + // mostly useful for testing; other uses should probably get a more + // appropriate interface.) + void GetExterns(vector *vec, vector::iterator i); + + // Clear VEC and fill it with pointers to the Files added to this + // module, sorted by name. The pointed-to Files are still owned by + // this module. (Since this is effectively a copy of the file list, + // this is mostly useful for testing; other uses should probably get + // a more appropriate interface.) + void GetFiles(vector *vec); + + // Clear VEC and fill it with pointers to the StackFrameEntry + // objects that have been added to this module. (Since this is + // effectively a copy of the stack frame entry list, this is mostly + // useful for testing; other uses should probably get + // a more appropriate interface.) + void GetStackFrameEntries(vector *vec) const; + + // Find those files in this module that are actually referred to by + // functions' line number data, and assign them source id numbers. + // Set the source id numbers for all other files --- unused by the + // source line data --- to -1. We do this before writing out the + // symbol file, at which point we omit any unused files. + void AssignSourceIds(); + + // Call AssignSourceIds, and write this module to STREAM in the + // breakpad symbol format. Return true if all goes well, or false if + // an error occurs. This method writes out: + // - a header based on the values given to the constructor, + // If symbol_data is not ONLY_CFI then: + // - the source files added via FindFile, + // - the functions added via AddFunctions, each with its lines, + // - all public records, + // If symbol_data is not NO_CFI then: + // - all CFI records. + // Addresses in the output are all relative to the load address + // established by SetLoadAddress. + bool Write(std::ostream &stream, SymbolData symbol_data); + + string name() const { return name_; } + string os() const { return os_; } + string architecture() const { return architecture_; } + string identifier() const { return id_; } + + private: + // Report an error that has occurred writing the symbol file, using + // errno to find the appropriate cause. Return false. + static bool ReportError(); + + // Write RULE_MAP to STREAM, in the form appropriate for 'STACK CFI' + // records, without a final newline. Return true if all goes well; + // if an error occurs, return false, and leave errno set. + static bool WriteRuleMap(const RuleMap &rule_map, std::ostream &stream); + + // Module header entries. + string name_, os_, architecture_, id_; + + // The module's nominal load address. Addresses for functions and + // lines are absolute, assuming the module is loaded at this + // address. + Address load_address_; + + // Relation for maps whose keys are strings shared with some other + // structure. + struct CompareStringPtrs { + bool operator()(const string *x, const string *y) const { return *x < *y; } + }; + + // A map from filenames to File structures. The map's keys are + // pointers to the Files' names. + typedef map FileByNameMap; + + // A set containing Function structures, sorted by address. + typedef set FunctionSet; + + // A set containing Extern structures, sorted by address. + typedef set ExternSet; + + // The module owns all the files and functions that have been added + // to it; destroying the module frees the Files and Functions these + // point to. + FileByNameMap files_; // This module's source files. + FunctionSet functions_; // This module's functions. + + // The module owns all the call frame info entries that have been + // added to it. + vector stack_frame_entries_; + + // The module owns all the externs that have been added to it; + // destroying the module frees the Externs these point to. + ExternSet externs_; +}; + +} // namespace google_breakpad + +#endif // COMMON_LINUX_MODULE_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/module_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/common/module_unittest.cc new file mode 100644 index 0000000000..0b6432716e --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/module_unittest.cc @@ -0,0 +1,546 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// module_unittest.cc: Unit tests for google_breakpad::Module. + +#include +#include +#include +#include + +#include +#include +#include + +#include "breakpad_googletest_includes.h" +#include "common/module.h" +#include "common/using_std_string.h" + +using google_breakpad::Module; +using std::stringstream; +using std::vector; +using testing::ContainerEq; + +static Module::Function *generate_duplicate_function(const string &name) { + const Module::Address DUP_ADDRESS = 0xd35402aac7a7ad5cLL; + const Module::Address DUP_SIZE = 0x200b26e605f99071LL; + const Module::Address DUP_PARAMETER_SIZE = 0xf14ac4fed48c4a99LL; + + Module::Function *function = new Module::Function(name, DUP_ADDRESS); + function->size = DUP_SIZE; + function->parameter_size = DUP_PARAMETER_SIZE; + return function; +} + +#define MODULE_NAME "name with spaces" +#define MODULE_OS "os-name" +#define MODULE_ARCH "architecture" +#define MODULE_ID "id-string" + +TEST(Write, Header) { + stringstream s; + Module m(MODULE_NAME, MODULE_OS, MODULE_ARCH, MODULE_ID); + m.Write(s, ALL_SYMBOL_DATA); + string contents = s.str(); + EXPECT_STREQ("MODULE os-name architecture id-string name with spaces\n", + contents.c_str()); +} + +TEST(Write, OneLineFunc) { + stringstream s; + Module m(MODULE_NAME, MODULE_OS, MODULE_ARCH, MODULE_ID); + + Module::File *file = m.FindFile("file_name.cc"); + Module::Function *function = new Module::Function( + "function_name", 0xe165bf8023b9d9abLL); + function->size = 0x1e4bb0eb1cbf5b09LL; + function->parameter_size = 0x772beee89114358aLL; + Module::Line line = { 0xe165bf8023b9d9abLL, 0x1e4bb0eb1cbf5b09LL, + file, 67519080 }; + function->lines.push_back(line); + m.AddFunction(function); + + m.Write(s, ALL_SYMBOL_DATA); + string contents = s.str(); + EXPECT_STREQ("MODULE os-name architecture id-string name with spaces\n" + "FILE 0 file_name.cc\n" + "FUNC e165bf8023b9d9ab 1e4bb0eb1cbf5b09 772beee89114358a" + " function_name\n" + "e165bf8023b9d9ab 1e4bb0eb1cbf5b09 67519080 0\n", + contents.c_str()); +} + +TEST(Write, RelativeLoadAddress) { + stringstream s; + Module m(MODULE_NAME, MODULE_OS, MODULE_ARCH, MODULE_ID); + + // Some source files. We will expect to see them in lexicographic order. + Module::File *file1 = m.FindFile("filename-b.cc"); + Module::File *file2 = m.FindFile("filename-a.cc"); + + // A function. + Module::Function *function = new Module::Function( + "A_FLIBBERTIJIBBET::a_will_o_the_wisp(a clown)", 0xbec774ea5dd935f3LL); + function->size = 0x2922088f98d3f6fcLL; + function->parameter_size = 0xe5e9aa008bd5f0d0LL; + + // Some source lines. The module should not sort these. + Module::Line line1 = { 0xbec774ea5dd935f3LL, 0x1c2be6d6c5af2611LL, + file1, 41676901 }; + Module::Line line2 = { 0xdaf35bc123885c04LL, 0xcf621b8d324d0ebLL, + file2, 67519080 }; + function->lines.push_back(line2); + function->lines.push_back(line1); + + m.AddFunction(function); + + // Some stack information. + Module::StackFrameEntry *entry = new Module::StackFrameEntry(); + entry->address = 0x30f9e5c83323973dULL; + entry->size = 0x49fc9ca7c7c13dc2ULL; + entry->initial_rules[".cfa"] = "he was a handsome man"; + entry->initial_rules["and"] = "what i want to know is"; + entry->rule_changes[0x30f9e5c83323973eULL]["how"] = + "do you like your blueeyed boy"; + entry->rule_changes[0x30f9e5c83323973eULL]["Mister"] = "Death"; + m.AddStackFrameEntry(entry); + + // Set the load address. Doing this after adding all the data to + // the module must work fine. + m.SetLoadAddress(0x2ab698b0b6407073LL); + + m.Write(s, ALL_SYMBOL_DATA); + string contents = s.str(); + EXPECT_STREQ("MODULE os-name architecture id-string name with spaces\n" + "FILE 0 filename-a.cc\n" + "FILE 1 filename-b.cc\n" + "FUNC 9410dc39a798c580 2922088f98d3f6fc e5e9aa008bd5f0d0" + " A_FLIBBERTIJIBBET::a_will_o_the_wisp(a clown)\n" + "b03cc3106d47eb91 cf621b8d324d0eb 67519080 0\n" + "9410dc39a798c580 1c2be6d6c5af2611 41676901 1\n" + "STACK CFI INIT 6434d177ce326ca 49fc9ca7c7c13dc2" + " .cfa: he was a handsome man" + " and: what i want to know is\n" + "STACK CFI 6434d177ce326cb" + " Mister: Death" + " how: do you like your blueeyed boy\n", + contents.c_str()); +} + +TEST(Write, OmitUnusedFiles) { + Module m(MODULE_NAME, MODULE_OS, MODULE_ARCH, MODULE_ID); + + // Create some source files. + Module::File *file1 = m.FindFile("filename1"); + m.FindFile("filename2"); // not used by any line + Module::File *file3 = m.FindFile("filename3"); + + // Create a function. + Module::Function *function = new Module::Function( + "function_name", 0x9b926d464f0b9384LL); + function->size = 0x4f524a4ba795e6a6LL; + function->parameter_size = 0xbbe8133a6641c9b7LL; + + // Source files that refer to some files, but not others. + Module::Line line1 = { 0x595fa44ebacc1086LL, 0x1e1e0191b066c5b3LL, + file1, 137850127 }; + Module::Line line2 = { 0x401ce8c8a12d25e3LL, 0x895751c41b8d2ce2LL, + file3, 28113549 }; + function->lines.push_back(line1); + function->lines.push_back(line2); + m.AddFunction(function); + + m.AssignSourceIds(); + + vector vec; + m.GetFiles(&vec); + EXPECT_EQ((size_t) 3, vec.size()); + EXPECT_STREQ("filename1", vec[0]->name.c_str()); + EXPECT_NE(-1, vec[0]->source_id); + // Expect filename2 not to be used. + EXPECT_STREQ("filename2", vec[1]->name.c_str()); + EXPECT_EQ(-1, vec[1]->source_id); + EXPECT_STREQ("filename3", vec[2]->name.c_str()); + EXPECT_NE(-1, vec[2]->source_id); + + stringstream s; + m.Write(s, ALL_SYMBOL_DATA); + string contents = s.str(); + EXPECT_STREQ("MODULE os-name architecture id-string name with spaces\n" + "FILE 0 filename1\n" + "FILE 1 filename3\n" + "FUNC 9b926d464f0b9384 4f524a4ba795e6a6 bbe8133a6641c9b7" + " function_name\n" + "595fa44ebacc1086 1e1e0191b066c5b3 137850127 0\n" + "401ce8c8a12d25e3 895751c41b8d2ce2 28113549 1\n", + contents.c_str()); +} + +TEST(Write, NoCFI) { + stringstream s; + Module m(MODULE_NAME, MODULE_OS, MODULE_ARCH, MODULE_ID); + + // Some source files. We will expect to see them in lexicographic order. + Module::File *file1 = m.FindFile("filename.cc"); + + // A function. + Module::Function *function = new Module::Function( + "A_FLIBBERTIJIBBET::a_will_o_the_wisp(a clown)", 0xbec774ea5dd935f3LL); + function->size = 0x2922088f98d3f6fcLL; + function->parameter_size = 0xe5e9aa008bd5f0d0LL; + + // Some source lines. The module should not sort these. + Module::Line line1 = { 0xbec774ea5dd935f3LL, 0x1c2be6d6c5af2611LL, + file1, 41676901 }; + function->lines.push_back(line1); + + m.AddFunction(function); + + // Some stack information. + Module::StackFrameEntry *entry = new Module::StackFrameEntry(); + entry->address = 0x30f9e5c83323973dULL; + entry->size = 0x49fc9ca7c7c13dc2ULL; + entry->initial_rules[".cfa"] = "he was a handsome man"; + entry->initial_rules["and"] = "what i want to know is"; + entry->rule_changes[0x30f9e5c83323973eULL]["how"] = + "do you like your blueeyed boy"; + entry->rule_changes[0x30f9e5c83323973eULL]["Mister"] = "Death"; + m.AddStackFrameEntry(entry); + + // Set the load address. Doing this after adding all the data to + // the module must work fine. + m.SetLoadAddress(0x2ab698b0b6407073LL); + + m.Write(s, NO_CFI); + string contents = s.str(); + EXPECT_STREQ("MODULE os-name architecture id-string name with spaces\n" + "FILE 0 filename.cc\n" + "FUNC 9410dc39a798c580 2922088f98d3f6fc e5e9aa008bd5f0d0" + " A_FLIBBERTIJIBBET::a_will_o_the_wisp(a clown)\n" + "9410dc39a798c580 1c2be6d6c5af2611 41676901 0\n", + contents.c_str()); +} + +TEST(Construct, AddFunctions) { + stringstream s; + Module m(MODULE_NAME, MODULE_OS, MODULE_ARCH, MODULE_ID); + + // Two functions. + Module::Function *function1 = new Module::Function( + "_without_form", 0xd35024aa7ca7da5cLL); + function1->size = 0x200b26e605f99071LL; + function1->parameter_size = 0xf14ac4fed48c4a99LL; + + Module::Function *function2 = new Module::Function( + "_and_void", 0x2987743d0b35b13fLL); + function2->size = 0xb369db048deb3010LL; + function2->parameter_size = 0x938e556cb5a79988LL; + + // Put them in a vector. + vector vec; + vec.push_back(function1); + vec.push_back(function2); + + m.AddFunctions(vec.begin(), vec.end()); + + m.Write(s, ALL_SYMBOL_DATA); + string contents = s.str(); + EXPECT_STREQ("MODULE os-name architecture id-string name with spaces\n" + "FUNC 2987743d0b35b13f b369db048deb3010 938e556cb5a79988" + " _and_void\n" + "FUNC d35024aa7ca7da5c 200b26e605f99071 f14ac4fed48c4a99" + " _without_form\n", + contents.c_str()); + + // Check that m.GetFunctions returns the functions we expect. + vec.clear(); + m.GetFunctions(&vec, vec.end()); + EXPECT_TRUE(vec.end() != find(vec.begin(), vec.end(), function1)); + EXPECT_TRUE(vec.end() != find(vec.begin(), vec.end(), function2)); + EXPECT_EQ((size_t) 2, vec.size()); +} + +TEST(Construct, AddFrames) { + stringstream s; + Module m(MODULE_NAME, MODULE_OS, MODULE_ARCH, MODULE_ID); + + // First STACK CFI entry, with no initial rules or deltas. + Module::StackFrameEntry *entry1 = new Module::StackFrameEntry(); + entry1->address = 0xddb5f41285aa7757ULL; + entry1->size = 0x1486493370dc5073ULL; + m.AddStackFrameEntry(entry1); + + // Second STACK CFI entry, with initial rules but no deltas. + Module::StackFrameEntry *entry2 = new Module::StackFrameEntry(); + entry2->address = 0x8064f3af5e067e38ULL; + entry2->size = 0x0de2a5ee55509407ULL; + entry2->initial_rules[".cfa"] = "I think that I shall never see"; + entry2->initial_rules["stromboli"] = "a poem lovely as a tree"; + entry2->initial_rules["cannoli"] = "a tree whose hungry mouth is prest"; + m.AddStackFrameEntry(entry2); + + // Third STACK CFI entry, with initial rules and deltas. + Module::StackFrameEntry *entry3 = new Module::StackFrameEntry(); + entry3->address = 0x5e8d0db0a7075c6cULL; + entry3->size = 0x1c7edb12a7aea229ULL; + entry3->initial_rules[".cfa"] = "Whose woods are these"; + entry3->rule_changes[0x47ceb0f63c269d7fULL]["calzone"] = + "the village though"; + entry3->rule_changes[0x47ceb0f63c269d7fULL]["cannoli"] = + "he will not see me stopping here"; + entry3->rule_changes[0x36682fad3763ffffULL]["stromboli"] = + "his house is in"; + entry3->rule_changes[0x36682fad3763ffffULL][".cfa"] = + "I think I know"; + m.AddStackFrameEntry(entry3); + + // Check that Write writes STACK CFI records properly. + m.Write(s, ALL_SYMBOL_DATA); + string contents = s.str(); + EXPECT_STREQ("MODULE os-name architecture id-string name with spaces\n" + "STACK CFI INIT ddb5f41285aa7757 1486493370dc5073 \n" + "STACK CFI INIT 8064f3af5e067e38 de2a5ee55509407" + " .cfa: I think that I shall never see" + " cannoli: a tree whose hungry mouth is prest" + " stromboli: a poem lovely as a tree\n" + "STACK CFI INIT 5e8d0db0a7075c6c 1c7edb12a7aea229" + " .cfa: Whose woods are these\n" + "STACK CFI 36682fad3763ffff" + " .cfa: I think I know" + " stromboli: his house is in\n" + "STACK CFI 47ceb0f63c269d7f" + " calzone: the village though" + " cannoli: he will not see me stopping here\n", + contents.c_str()); + + // Check that GetStackFrameEntries works. + vector entries; + m.GetStackFrameEntries(&entries); + ASSERT_EQ(3U, entries.size()); + // Check first entry. + EXPECT_EQ(0xddb5f41285aa7757ULL, entries[0]->address); + EXPECT_EQ(0x1486493370dc5073ULL, entries[0]->size); + ASSERT_EQ(0U, entries[0]->initial_rules.size()); + ASSERT_EQ(0U, entries[0]->rule_changes.size()); + // Check second entry. + EXPECT_EQ(0x8064f3af5e067e38ULL, entries[1]->address); + EXPECT_EQ(0x0de2a5ee55509407ULL, entries[1]->size); + ASSERT_EQ(3U, entries[1]->initial_rules.size()); + Module::RuleMap entry2_initial; + entry2_initial[".cfa"] = "I think that I shall never see"; + entry2_initial["stromboli"] = "a poem lovely as a tree"; + entry2_initial["cannoli"] = "a tree whose hungry mouth is prest"; + EXPECT_THAT(entries[1]->initial_rules, ContainerEq(entry2_initial)); + ASSERT_EQ(0U, entries[1]->rule_changes.size()); + // Check third entry. + EXPECT_EQ(0x5e8d0db0a7075c6cULL, entries[2]->address); + EXPECT_EQ(0x1c7edb12a7aea229ULL, entries[2]->size); + Module::RuleMap entry3_initial; + entry3_initial[".cfa"] = "Whose woods are these"; + EXPECT_THAT(entries[2]->initial_rules, ContainerEq(entry3_initial)); + Module::RuleChangeMap entry3_changes; + entry3_changes[0x36682fad3763ffffULL][".cfa"] = "I think I know"; + entry3_changes[0x36682fad3763ffffULL]["stromboli"] = "his house is in"; + entry3_changes[0x47ceb0f63c269d7fULL]["calzone"] = "the village though"; + entry3_changes[0x47ceb0f63c269d7fULL]["cannoli"] = + "he will not see me stopping here"; + EXPECT_THAT(entries[2]->rule_changes, ContainerEq(entry3_changes)); +} + +TEST(Construct, UniqueFiles) { + Module m(MODULE_NAME, MODULE_OS, MODULE_ARCH, MODULE_ID); + Module::File *file1 = m.FindFile("foo"); + Module::File *file2 = m.FindFile(string("bar")); + Module::File *file3 = m.FindFile(string("foo")); + Module::File *file4 = m.FindFile("bar"); + EXPECT_NE(file1, file2); + EXPECT_EQ(file1, file3); + EXPECT_EQ(file2, file4); + EXPECT_EQ(file1, m.FindExistingFile("foo")); + EXPECT_TRUE(m.FindExistingFile("baz") == NULL); +} + +TEST(Construct, DuplicateFunctions) { + stringstream s; + Module m(MODULE_NAME, MODULE_OS, MODULE_ARCH, MODULE_ID); + + // Two functions. + Module::Function *function1 = generate_duplicate_function("_without_form"); + Module::Function *function2 = generate_duplicate_function("_without_form"); + + m.AddFunction(function1); + m.AddFunction(function2); + + m.Write(s, ALL_SYMBOL_DATA); + string contents = s.str(); + EXPECT_STREQ("MODULE os-name architecture id-string name with spaces\n" + "FUNC d35402aac7a7ad5c 200b26e605f99071 f14ac4fed48c4a99" + " _without_form\n", + contents.c_str()); +} + +TEST(Construct, FunctionsWithSameAddress) { + stringstream s; + Module m(MODULE_NAME, MODULE_OS, MODULE_ARCH, MODULE_ID); + + // Two functions. + Module::Function *function1 = generate_duplicate_function("_without_form"); + Module::Function *function2 = generate_duplicate_function("_and_void"); + + m.AddFunction(function1); + m.AddFunction(function2); + + m.Write(s, ALL_SYMBOL_DATA); + string contents = s.str(); + EXPECT_STREQ("MODULE os-name architecture id-string name with spaces\n" + "FUNC d35402aac7a7ad5c 200b26e605f99071 f14ac4fed48c4a99" + " _and_void\n" + "FUNC d35402aac7a7ad5c 200b26e605f99071 f14ac4fed48c4a99" + " _without_form\n", + contents.c_str()); +} + +// Externs should be written out as PUBLIC records, sorted by +// address. +TEST(Construct, Externs) { + stringstream s; + Module m(MODULE_NAME, MODULE_OS, MODULE_ARCH, MODULE_ID); + + // Two externs. + Module::Extern *extern1 = new Module::Extern(0xffff); + extern1->name = "_abc"; + Module::Extern *extern2 = new Module::Extern(0xaaaa); + extern2->name = "_xyz"; + + m.AddExtern(extern1); + m.AddExtern(extern2); + + m.Write(s, ALL_SYMBOL_DATA); + string contents = s.str(); + + EXPECT_STREQ("MODULE " MODULE_OS " " MODULE_ARCH " " + MODULE_ID " " MODULE_NAME "\n" + "PUBLIC aaaa 0 _xyz\n" + "PUBLIC ffff 0 _abc\n", + contents.c_str()); +} + +// Externs with the same address should only keep the first entry +// added. +TEST(Construct, DuplicateExterns) { + stringstream s; + Module m(MODULE_NAME, MODULE_OS, MODULE_ARCH, MODULE_ID); + + // Two externs. + Module::Extern *extern1 = new Module::Extern(0xffff); + extern1->name = "_xyz"; + Module::Extern *extern2 = new Module::Extern(0xffff); + extern2->name = "_abc"; + + m.AddExtern(extern1); + m.AddExtern(extern2); + + m.Write(s, ALL_SYMBOL_DATA); + string contents = s.str(); + + EXPECT_STREQ("MODULE " MODULE_OS " " MODULE_ARCH " " + MODULE_ID " " MODULE_NAME "\n" + "PUBLIC ffff 0 _xyz\n", + contents.c_str()); +} + +// If there exists an extern and a function at the same address, only write +// out the FUNC entry. +TEST(Construct, FunctionsAndExternsWithSameAddress) { + stringstream s; + Module m(MODULE_NAME, MODULE_OS, MODULE_ARCH, MODULE_ID); + + // Two externs. + Module::Extern* extern1 = new Module::Extern(0xabc0); + extern1->name = "abc"; + Module::Extern* extern2 = new Module::Extern(0xfff0); + extern2->name = "xyz"; + + m.AddExtern(extern1); + m.AddExtern(extern2); + + Module::Function* function = new Module::Function("_xyz", 0xfff0); + function->size = 0x10; + function->parameter_size = 0; + m.AddFunction(function); + + m.Write(s, ALL_SYMBOL_DATA); + string contents = s.str(); + + EXPECT_STREQ("MODULE " MODULE_OS " " MODULE_ARCH " " + MODULE_ID " " MODULE_NAME "\n" + "FUNC fff0 10 0 _xyz\n" + "PUBLIC abc0 0 abc\n", + contents.c_str()); +} + +// If there exists an extern and a function at the same address, only write +// out the FUNC entry. For ARM THUMB, the extern that comes from the ELF +// symbol section has bit 0 set. +TEST(Construct, FunctionsAndThumbExternsWithSameAddress) { + stringstream s; + Module m(MODULE_NAME, MODULE_OS, "arm", MODULE_ID); + + // Two THUMB externs. + Module::Extern* thumb_extern1 = new Module::Extern(0xabc1); + thumb_extern1->name = "thumb_abc"; + Module::Extern* thumb_extern2 = new Module::Extern(0xfff1); + thumb_extern2->name = "thumb_xyz"; + + Module::Extern* arm_extern1 = new Module::Extern(0xcc00); + arm_extern1->name = "arm_func"; + + m.AddExtern(thumb_extern1); + m.AddExtern(thumb_extern2); + m.AddExtern(arm_extern1); + + // The corresponding function from the DWARF debug data have the actual + // address. + Module::Function* function = new Module::Function("_thumb_xyz", 0xfff0); + function->size = 0x10; + function->parameter_size = 0; + m.AddFunction(function); + + m.Write(s, ALL_SYMBOL_DATA); + string contents = s.str(); + + EXPECT_STREQ("MODULE " MODULE_OS " arm " + MODULE_ID " " MODULE_NAME "\n" + "FUNC fff0 10 0 _thumb_xyz\n" + "PUBLIC abc1 0 thumb_abc\n" + "PUBLIC cc00 0 arm_func\n", + contents.c_str()); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/scoped_ptr.h b/TMessagesProj/jni/third_party/breakpad/src/common/scoped_ptr.h new file mode 100644 index 0000000000..d137c18681 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/scoped_ptr.h @@ -0,0 +1,404 @@ +// Copyright 2013 Google Inc. All Rights Reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Scopers help you manage ownership of a pointer, helping you easily manage the +// a pointer within a scope, and automatically destroying the pointer at the +// end of a scope. There are two main classes you will use, which correspond +// to the operators new/delete and new[]/delete[]. +// +// Example usage (scoped_ptr): +// { +// scoped_ptr foo(new Foo("wee")); +// } // foo goes out of scope, releasing the pointer with it. +// +// { +// scoped_ptr foo; // No pointer managed. +// foo.reset(new Foo("wee")); // Now a pointer is managed. +// foo.reset(new Foo("wee2")); // Foo("wee") was destroyed. +// foo.reset(new Foo("wee3")); // Foo("wee2") was destroyed. +// foo->Method(); // Foo::Method() called. +// foo.get()->Method(); // Foo::Method() called. +// SomeFunc(foo.release()); // SomeFunc takes ownership, foo no longer +// // manages a pointer. +// foo.reset(new Foo("wee4")); // foo manages a pointer again. +// foo.reset(); // Foo("wee4") destroyed, foo no longer +// // manages a pointer. +// } // foo wasn't managing a pointer, so nothing was destroyed. +// +// Example usage (scoped_array): +// { +// scoped_array foo(new Foo[100]); +// foo.get()->Method(); // Foo::Method on the 0th element. +// foo[10].Method(); // Foo::Method on the 10th element. +// } + +#ifndef COMMON_SCOPED_PTR_H_ +#define COMMON_SCOPED_PTR_H_ + +// This is an implementation designed to match the anticipated future TR2 +// implementation of the scoped_ptr class, and its closely-related brethren, +// scoped_array, scoped_ptr_malloc. + +#include +#include +#include + +namespace google_breakpad { + +// A scoped_ptr is like a T*, except that the destructor of scoped_ptr +// automatically deletes the pointer it holds (if any). +// That is, scoped_ptr owns the T object that it points to. +// Like a T*, a scoped_ptr may hold either NULL or a pointer to a T object. +// Also like T*, scoped_ptr is thread-compatible, and once you +// dereference it, you get the threadsafety guarantees of T. +// +// The size of a scoped_ptr is small: +// sizeof(scoped_ptr) == sizeof(C*) +template +class scoped_ptr { + public: + + // The element type + typedef C element_type; + + // Constructor. Defaults to initializing with NULL. + // There is no way to create an uninitialized scoped_ptr. + // The input parameter must be allocated with new. + explicit scoped_ptr(C* p = NULL) : ptr_(p) { } + + // Destructor. If there is a C object, delete it. + // We don't need to test ptr_ == NULL because C++ does that for us. + ~scoped_ptr() { + enum { type_must_be_complete = sizeof(C) }; + delete ptr_; + } + + // Reset. Deletes the current owned object, if any. + // Then takes ownership of a new object, if given. + // this->reset(this->get()) works. + void reset(C* p = NULL) { + if (p != ptr_) { + enum { type_must_be_complete = sizeof(C) }; + delete ptr_; + ptr_ = p; + } + } + + // Accessors to get the owned object. + // operator* and operator-> will assert() if there is no current object. + C& operator*() const { + assert(ptr_ != NULL); + return *ptr_; + } + C* operator->() const { + assert(ptr_ != NULL); + return ptr_; + } + C* get() const { return ptr_; } + + // Comparison operators. + // These return whether two scoped_ptr refer to the same object, not just to + // two different but equal objects. + bool operator==(C* p) const { return ptr_ == p; } + bool operator!=(C* p) const { return ptr_ != p; } + + // Swap two scoped pointers. + void swap(scoped_ptr& p2) { + C* tmp = ptr_; + ptr_ = p2.ptr_; + p2.ptr_ = tmp; + } + + // Release a pointer. + // The return value is the current pointer held by this object. + // If this object holds a NULL pointer, the return value is NULL. + // After this operation, this object will hold a NULL pointer, + // and will not own the object any more. + C* release() { + C* retVal = ptr_; + ptr_ = NULL; + return retVal; + } + + private: + C* ptr_; + + // Forbid comparison of scoped_ptr types. If C2 != C, it totally doesn't + // make sense, and if C2 == C, it still doesn't make sense because you should + // never have the same object owned by two different scoped_ptrs. + template bool operator==(scoped_ptr const& p2) const; + template bool operator!=(scoped_ptr const& p2) const; + + // Disallow evil constructors + scoped_ptr(const scoped_ptr&); + void operator=(const scoped_ptr&); +}; + +// Free functions +template +void swap(scoped_ptr& p1, scoped_ptr& p2) { + p1.swap(p2); +} + +template +bool operator==(C* p1, const scoped_ptr& p2) { + return p1 == p2.get(); +} + +template +bool operator!=(C* p1, const scoped_ptr& p2) { + return p1 != p2.get(); +} + +// scoped_array is like scoped_ptr, except that the caller must allocate +// with new [] and the destructor deletes objects with delete []. +// +// As with scoped_ptr, a scoped_array either points to an object +// or is NULL. A scoped_array owns the object that it points to. +// scoped_array is thread-compatible, and once you index into it, +// the returned objects have only the threadsafety guarantees of T. +// +// Size: sizeof(scoped_array) == sizeof(C*) +template +class scoped_array { + public: + + // The element type + typedef C element_type; + + // Constructor. Defaults to intializing with NULL. + // There is no way to create an uninitialized scoped_array. + // The input parameter must be allocated with new []. + explicit scoped_array(C* p = NULL) : array_(p) { } + + // Destructor. If there is a C object, delete it. + // We don't need to test ptr_ == NULL because C++ does that for us. + ~scoped_array() { + enum { type_must_be_complete = sizeof(C) }; + delete[] array_; + } + + // Reset. Deletes the current owned object, if any. + // Then takes ownership of a new object, if given. + // this->reset(this->get()) works. + void reset(C* p = NULL) { + if (p != array_) { + enum { type_must_be_complete = sizeof(C) }; + delete[] array_; + array_ = p; + } + } + + // Get one element of the current object. + // Will assert() if there is no current object, or index i is negative. + C& operator[](ptrdiff_t i) const { + assert(i >= 0); + assert(array_ != NULL); + return array_[i]; + } + + // Get a pointer to the zeroth element of the current object. + // If there is no current object, return NULL. + C* get() const { + return array_; + } + + // Comparison operators. + // These return whether two scoped_array refer to the same object, not just to + // two different but equal objects. + bool operator==(C* p) const { return array_ == p; } + bool operator!=(C* p) const { return array_ != p; } + + // Swap two scoped arrays. + void swap(scoped_array& p2) { + C* tmp = array_; + array_ = p2.array_; + p2.array_ = tmp; + } + + // Release an array. + // The return value is the current pointer held by this object. + // If this object holds a NULL pointer, the return value is NULL. + // After this operation, this object will hold a NULL pointer, + // and will not own the object any more. + C* release() { + C* retVal = array_; + array_ = NULL; + return retVal; + } + + private: + C* array_; + + // Forbid comparison of different scoped_array types. + template bool operator==(scoped_array const& p2) const; + template bool operator!=(scoped_array const& p2) const; + + // Disallow evil constructors + scoped_array(const scoped_array&); + void operator=(const scoped_array&); +}; + +// Free functions +template +void swap(scoped_array& p1, scoped_array& p2) { + p1.swap(p2); +} + +template +bool operator==(C* p1, const scoped_array& p2) { + return p1 == p2.get(); +} + +template +bool operator!=(C* p1, const scoped_array& p2) { + return p1 != p2.get(); +} + +// This class wraps the c library function free() in a class that can be +// passed as a template argument to scoped_ptr_malloc below. +class ScopedPtrMallocFree { + public: + inline void operator()(void* x) const { + free(x); + } +}; + +// scoped_ptr_malloc<> is similar to scoped_ptr<>, but it accepts a +// second template argument, the functor used to free the object. + +template +class scoped_ptr_malloc { + public: + + // The element type + typedef C element_type; + + // Constructor. Defaults to initializing with NULL. + // There is no way to create an uninitialized scoped_ptr. + // The input parameter must be allocated with an allocator that matches the + // Free functor. For the default Free functor, this is malloc, calloc, or + // realloc. + explicit scoped_ptr_malloc(C* p = NULL): ptr_(p) {} + + // Destructor. If there is a C object, call the Free functor. + ~scoped_ptr_malloc() { + reset(); + } + + // Reset. Calls the Free functor on the current owned object, if any. + // Then takes ownership of a new object, if given. + // this->reset(this->get()) works. + void reset(C* p = NULL) { + if (ptr_ != p) { + FreeProc free_proc; + free_proc(ptr_); + ptr_ = p; + } + } + + // Get the current object. + // operator* and operator-> will cause an assert() failure if there is + // no current object. + C& operator*() const { + assert(ptr_ != NULL); + return *ptr_; + } + + C* operator->() const { + assert(ptr_ != NULL); + return ptr_; + } + + C* get() const { + return ptr_; + } + + // Comparison operators. + // These return whether a scoped_ptr_malloc and a plain pointer refer + // to the same object, not just to two different but equal objects. + // For compatibility with the boost-derived implementation, these + // take non-const arguments. + bool operator==(C* p) const { + return ptr_ == p; + } + + bool operator!=(C* p) const { + return ptr_ != p; + } + + // Swap two scoped pointers. + void swap(scoped_ptr_malloc & b) { + C* tmp = b.ptr_; + b.ptr_ = ptr_; + ptr_ = tmp; + } + + // Release a pointer. + // The return value is the current pointer held by this object. + // If this object holds a NULL pointer, the return value is NULL. + // After this operation, this object will hold a NULL pointer, + // and will not own the object any more. + C* release() { + C* tmp = ptr_; + ptr_ = NULL; + return tmp; + } + + private: + C* ptr_; + + // no reason to use these: each scoped_ptr_malloc should have its own object + template + bool operator==(scoped_ptr_malloc const& p) const; + template + bool operator!=(scoped_ptr_malloc const& p) const; + + // Disallow evil constructors + scoped_ptr_malloc(const scoped_ptr_malloc&); + void operator=(const scoped_ptr_malloc&); +}; + +template inline +void swap(scoped_ptr_malloc& a, scoped_ptr_malloc& b) { + a.swap(b); +} + +template inline +bool operator==(C* p, const scoped_ptr_malloc& b) { + return p == b.get(); +} + +template inline +bool operator!=(C* p, const scoped_ptr_malloc& b) { + return p != b.get(); +} + +} // namespace google_breakpad + +#endif // COMMON_SCOPED_PTR_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/simple_string_dictionary.cc b/TMessagesProj/jni/third_party/breakpad/src/common/simple_string_dictionary.cc new file mode 100644 index 0000000000..e0a74ceeb4 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/simple_string_dictionary.cc @@ -0,0 +1,45 @@ +// Copyright (c) 2007, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#include "common/simple_string_dictionary.h" + +namespace google_breakpad { + +namespace { + +// In C++98 (ISO 14882), section 9.5.1 says that a union cannot have a member +// with a non-trivial ctor, copy ctor, dtor, or assignment operator. Use this +// property to ensure that Entry remains POD. +union Compile_Assert { + NonAllocatingMap<1, 1, 1>::Entry Compile_Assert__entry_must_be_pod; +}; + +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/simple_string_dictionary.h b/TMessagesProj/jni/third_party/breakpad/src/common/simple_string_dictionary.h new file mode 100644 index 0000000000..d2ab17fda5 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/simple_string_dictionary.h @@ -0,0 +1,260 @@ +// Copyright (c) 2007, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef COMMON_SIMPLE_STRING_DICTIONARY_H_ +#define COMMON_SIMPLE_STRING_DICTIONARY_H_ + +#include +#include + +#include "common/basictypes.h" + +namespace google_breakpad { + +// Opaque type for the serialized representation of a NonAllocatingMap. One is +// created in NonAllocatingMap::Serialize and can be deserialized using one of +// the constructors. +struct SerializedNonAllocatingMap; + +// NonAllocatingMap is an implementation of a map/dictionary collection that +// uses a fixed amount of storage, so that it does not perform any dynamic +// allocations for its operations. +// +// The actual map storage (the Entry) is guaranteed to be POD, so that it can +// be transmitted over various IPC mechanisms. +// +// The template parameters control the amount of storage used for the key, +// value, and map. The KeySize and ValueSize are measured in bytes, not glyphs, +// and includes space for a \0 byte. This gives space for KeySize-1 and +// ValueSize-1 characters in an entry. NumEntries is the total number of +// entries that will fit in the map. +template +class NonAllocatingMap { + public: + // Constant and publicly accessible versions of the template parameters. + static const size_t key_size = KeySize; + static const size_t value_size = ValueSize; + static const size_t num_entries = NumEntries; + + // An Entry object is a single entry in the map. If the key is a 0-length + // NUL-terminated string, the entry is empty. + struct Entry { + char key[KeySize]; + char value[ValueSize]; + + bool is_active() const { + return key[0] != '\0'; + } + }; + + // An Iterator can be used to iterate over all the active entries in a + // NonAllocatingMap. + class Iterator { + public: + explicit Iterator(const NonAllocatingMap& map) + : map_(map), + current_(0) { + } + + // Returns the next entry in the map, or NULL if at the end of the + // collection. + const Entry* Next() { + while (current_ < map_.num_entries) { + const Entry* entry = &map_.entries_[current_++]; + if (entry->is_active()) { + return entry; + } + } + return NULL; + } + + private: + const NonAllocatingMap& map_; + size_t current_; + + DISALLOW_COPY_AND_ASSIGN(Iterator); + }; + + NonAllocatingMap() : entries_() { + } + + NonAllocatingMap(const NonAllocatingMap& other) { + *this = other; + } + + NonAllocatingMap& operator=(const NonAllocatingMap& other) { + assert(other.key_size == key_size); + assert(other.value_size == value_size); + assert(other.num_entries == num_entries); + if (other.key_size == key_size && other.value_size == value_size && + other.num_entries == num_entries) { + memcpy(entries_, other.entries_, sizeof(entries_)); + } + return *this; + } + + // Constructs a map from its serialized form. |map| should be the out + // parameter from Serialize() and |size| should be its return value. + NonAllocatingMap(const SerializedNonAllocatingMap* map, size_t size) { + assert(size == sizeof(entries_)); + if (size == sizeof(entries_)) { + memcpy(entries_, map, size); + } + } + + // Returns the number of active key/value pairs. The upper limit for this + // is NumEntries. + size_t GetCount() const { + size_t count = 0; + for (size_t i = 0; i < num_entries; ++i) { + if (entries_[i].is_active()) { + ++count; + } + } + return count; + } + + // Given |key|, returns its corresponding |value|. |key| must not be NULL. If + // the key is not found, NULL is returned. + const char* GetValueForKey(const char* key) const { + assert(key); + if (!key) + return NULL; + + const Entry* entry = GetConstEntryForKey(key); + if (!entry) + return NULL; + + return entry->value; + } + + // Stores |value| into |key|, replacing the existing value if |key| is + // already present. |key| must not be NULL. If |value| is NULL, the key is + // removed from the map. If there is no more space in the map, then the + // operation silently fails. + void SetKeyValue(const char* key, const char* value) { + if (!value) { + RemoveKey(key); + return; + } + + assert(key); + if (!key) + return; + + // Key must not be an empty string. + assert(key[0] != '\0'); + if (key[0] == '\0') + return; + + Entry* entry = GetEntryForKey(key); + + // If it does not yet exist, attempt to insert it. + if (!entry) { + for (size_t i = 0; i < num_entries; ++i) { + if (!entries_[i].is_active()) { + entry = &entries_[i]; + + strncpy(entry->key, key, key_size); + entry->key[key_size - 1] = '\0'; + + break; + } + } + } + + // If the map is out of space, entry will be NULL. + if (!entry) + return; + +#ifndef NDEBUG + // Sanity check that the key only appears once. + int count = 0; + for (size_t i = 0; i < num_entries; ++i) { + if (strncmp(entries_[i].key, key, key_size) == 0) + ++count; + } + assert(count == 1); +#endif + + strncpy(entry->value, value, value_size); + entry->value[value_size - 1] = '\0'; + } + + // Given |key|, removes any associated value. |key| must not be NULL. If + // the key is not found, this is a noop. + void RemoveKey(const char* key) { + assert(key); + if (!key) + return; + + Entry* entry = GetEntryForKey(key); + if (entry) { + entry->key[0] = '\0'; + entry->value[0] = '\0'; + } + +#ifndef NDEBUG + assert(GetEntryForKey(key) == NULL); +#endif + } + + // Places a serialized version of the map into |map| and returns the size. + // Both of these should be passed to the deserializing constructor. Note that + // the serialized |map| is scoped to the lifetime of the non-serialized + // instance of this class. The |map| can be copied across IPC boundaries. + size_t Serialize(const SerializedNonAllocatingMap** map) const { + *map = reinterpret_cast(entries_); + return sizeof(entries_); + } + + private: + const Entry* GetConstEntryForKey(const char* key) const { + for (size_t i = 0; i < num_entries; ++i) { + if (strncmp(key, entries_[i].key, key_size) == 0) { + return &entries_[i]; + } + } + return NULL; + } + + Entry* GetEntryForKey(const char* key) { + return const_cast(GetConstEntryForKey(key)); + } + + Entry entries_[NumEntries]; +}; + +// For historical reasons this specialized version is available with the same +// size factors as a previous implementation. +typedef NonAllocatingMap<256, 256, 64> SimpleStringDictionary; + +} // namespace google_breakpad + +#endif // COMMON_SIMPLE_STRING_DICTIONARY_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/simple_string_dictionary_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/common/simple_string_dictionary_unittest.cc new file mode 100644 index 0000000000..34f4b9ef5d --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/simple_string_dictionary_unittest.cc @@ -0,0 +1,308 @@ +// Copyright (c) 2008, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#include "breakpad_googletest_includes.h" +#include "common/simple_string_dictionary.h" + +namespace google_breakpad { + +TEST(NonAllocatingMapTest, Entry) { + typedef NonAllocatingMap<5, 9, 15> TestMap; + TestMap map; + + const TestMap::Entry* entry = TestMap::Iterator(map).Next(); + EXPECT_FALSE(entry); + + // Try setting a key/value and then verify. + map.SetKeyValue("key1", "value1"); + entry = TestMap::Iterator(map).Next(); + ASSERT_TRUE(entry); + EXPECT_STREQ(entry->key, "key1"); + EXPECT_STREQ(entry->value, "value1"); + + // Try setting a new value. + map.SetKeyValue("key1", "value3"); + EXPECT_STREQ(entry->value, "value3"); + + // Make sure the key didn't change. + EXPECT_STREQ(entry->key, "key1"); + + // Clear the entry and verify the key and value are empty strings. + map.RemoveKey("key1"); + EXPECT_FALSE(entry->is_active()); + EXPECT_EQ(strlen(entry->key), 0u); + EXPECT_EQ(strlen(entry->value), 0u); +} + +TEST(NonAllocatingMapTest, SimpleStringDictionary) { + // Make a new dictionary + SimpleStringDictionary dict; + + // Set three distinct values on three keys + dict.SetKeyValue("key1", "value1"); + dict.SetKeyValue("key2", "value2"); + dict.SetKeyValue("key3", "value3"); + + EXPECT_NE(dict.GetValueForKey("key1"), "value1"); + EXPECT_NE(dict.GetValueForKey("key2"), "value2"); + EXPECT_NE(dict.GetValueForKey("key3"), "value3"); + EXPECT_EQ(dict.GetCount(), 3u); + // try an unknown key + EXPECT_FALSE(dict.GetValueForKey("key4")); + + // Remove a key + dict.RemoveKey("key3"); + + // Now make sure it's not there anymore + EXPECT_FALSE(dict.GetValueForKey("key3")); + + // Remove by setting value to NULL + dict.SetKeyValue("key2", NULL); + + // Now make sure it's not there anymore + EXPECT_FALSE(dict.GetValueForKey("key2")); +} + +TEST(NonAllocatingMapTest, CopyAndAssign) { + NonAllocatingMap<10, 10, 10> map; + map.SetKeyValue("one", "a"); + map.SetKeyValue("two", "b"); + map.SetKeyValue("three", "c"); + map.RemoveKey("two"); + EXPECT_EQ(2u, map.GetCount()); + + // Test copy. + NonAllocatingMap<10, 10, 10> map_copy(map); + EXPECT_EQ(2u, map_copy.GetCount()); + EXPECT_STREQ("a", map_copy.GetValueForKey("one")); + EXPECT_STREQ("c", map_copy.GetValueForKey("three")); + map_copy.SetKeyValue("four", "d"); + EXPECT_STREQ("d", map_copy.GetValueForKey("four")); + EXPECT_FALSE(map.GetValueForKey("four")); + + // Test assign. + NonAllocatingMap<10, 10, 10> map_assign; + map_assign = map; + EXPECT_EQ(2u, map_assign.GetCount()); + EXPECT_STREQ("a", map_assign.GetValueForKey("one")); + EXPECT_STREQ("c", map_assign.GetValueForKey("three")); + map_assign.SetKeyValue("four", "d"); + EXPECT_STREQ("d", map_assign.GetValueForKey("four")); + EXPECT_FALSE(map.GetValueForKey("four")); + + map.RemoveKey("one"); + EXPECT_FALSE(map.GetValueForKey("one")); + EXPECT_STREQ("a", map_copy.GetValueForKey("one")); + EXPECT_STREQ("a", map_assign.GetValueForKey("one")); +} + +// Add a bunch of values to the dictionary, remove some entries in the middle, +// and then add more. +TEST(NonAllocatingMapTest, Iterator) { + SimpleStringDictionary* dict = new SimpleStringDictionary(); + ASSERT_TRUE(dict); + + char key[SimpleStringDictionary::key_size]; + char value[SimpleStringDictionary::value_size]; + + const int kDictionaryCapacity = SimpleStringDictionary::num_entries; + const int kPartitionIndex = kDictionaryCapacity - 5; + + // We assume at least this size in the tests below + ASSERT_GE(kDictionaryCapacity, 64); + + // We'll keep track of the number of key/value pairs we think should + // be in the dictionary + int expectedDictionarySize = 0; + + // Set a bunch of key/value pairs like key0/value0, key1/value1, ... + for (int i = 0; i < kPartitionIndex; ++i) { + sprintf(key, "key%d", i); + sprintf(value, "value%d", i); + dict->SetKeyValue(key, value); + } + expectedDictionarySize = kPartitionIndex; + + // set a couple of the keys twice (with the same value) - should be nop + dict->SetKeyValue("key2", "value2"); + dict->SetKeyValue("key4", "value4"); + dict->SetKeyValue("key15", "value15"); + + // Remove some random elements in the middle + dict->RemoveKey("key7"); + dict->RemoveKey("key18"); + dict->RemoveKey("key23"); + dict->RemoveKey("key31"); + expectedDictionarySize -= 4; // we just removed four key/value pairs + + // Set some more key/value pairs like key59/value59, key60/value60, ... + for (int i = kPartitionIndex; i < kDictionaryCapacity; ++i) { + sprintf(key, "key%d", i); + sprintf(value, "value%d", i); + dict->SetKeyValue(key, value); + } + expectedDictionarySize += kDictionaryCapacity - kPartitionIndex; + + // Now create an iterator on the dictionary + SimpleStringDictionary::Iterator iter(*dict); + + // We then verify that it iterates through exactly the number of + // key/value pairs we expect, and that they match one-for-one with what we + // would expect. The ordering of the iteration does not matter... + + // used to keep track of number of occurrences found for key/value pairs + int count[kDictionaryCapacity]; + memset(count, 0, sizeof(count)); + + int totalCount = 0; + + const SimpleStringDictionary::Entry* entry; + while ((entry = iter.Next())) { + totalCount++; + + // Extract keyNumber from a string of the form key + int keyNumber; + sscanf(entry->key, "key%d", &keyNumber); + + // Extract valueNumber from a string of the form value + int valueNumber; + sscanf(entry->value, "value%d", &valueNumber); + + // The value number should equal the key number since that's how we set them + EXPECT_EQ(keyNumber, valueNumber); + + // Key and value numbers should be in proper range: + // 0 <= keyNumber < kDictionaryCapacity + bool isKeyInGoodRange = + (keyNumber >= 0 && keyNumber < kDictionaryCapacity); + bool isValueInGoodRange = + (valueNumber >= 0 && valueNumber < kDictionaryCapacity); + EXPECT_TRUE(isKeyInGoodRange); + EXPECT_TRUE(isValueInGoodRange); + + if (isKeyInGoodRange && isValueInGoodRange) { + ++count[keyNumber]; + } + } + + // Make sure each of the key/value pairs showed up exactly one time, except + // for the ones which we removed. + for (size_t i = 0; i < kDictionaryCapacity; ++i) { + // Skip over key7, key18, key23, and key31, since we removed them + if (!(i == 7 || i == 18 || i == 23 || i == 31)) { + EXPECT_EQ(count[i], 1); + } + } + + // Make sure the number of iterations matches the expected dictionary size. + EXPECT_EQ(totalCount, expectedDictionarySize); +} + + +TEST(NonAllocatingMapTest, AddRemove) { + NonAllocatingMap<5, 7, 6> map; + map.SetKeyValue("rob", "ert"); + map.SetKeyValue("mike", "pink"); + map.SetKeyValue("mark", "allays"); + + EXPECT_EQ(3u, map.GetCount()); + EXPECT_STREQ("ert", map.GetValueForKey("rob")); + EXPECT_STREQ("pink", map.GetValueForKey("mike")); + EXPECT_STREQ("allays", map.GetValueForKey("mark")); + + map.RemoveKey("mike"); + + EXPECT_EQ(2u, map.GetCount()); + EXPECT_FALSE(map.GetValueForKey("mike")); + + map.SetKeyValue("mark", "mal"); + EXPECT_EQ(2u, map.GetCount()); + EXPECT_STREQ("mal", map.GetValueForKey("mark")); + + map.RemoveKey("mark"); + EXPECT_EQ(1u, map.GetCount()); + EXPECT_FALSE(map.GetValueForKey("mark")); +} + +TEST(NonAllocatingMapTest, Serialize) { + typedef NonAllocatingMap<4, 5, 7> TestMap; + TestMap map; + map.SetKeyValue("one", "abc"); + map.SetKeyValue("two", "def"); + map.SetKeyValue("tre", "hig"); + + EXPECT_STREQ("abc", map.GetValueForKey("one")); + EXPECT_STREQ("def", map.GetValueForKey("two")); + EXPECT_STREQ("hig", map.GetValueForKey("tre")); + + const SerializedNonAllocatingMap* serialized; + size_t size = map.Serialize(&serialized); + + SerializedNonAllocatingMap* serialized_copy = + reinterpret_cast(malloc(size)); + ASSERT_TRUE(serialized_copy); + memcpy(serialized_copy, serialized, size); + + TestMap deserialized(serialized_copy, size); + free(serialized_copy); + + EXPECT_EQ(3u, deserialized.GetCount()); + EXPECT_STREQ("abc", deserialized.GetValueForKey("one")); + EXPECT_STREQ("def", deserialized.GetValueForKey("two")); + EXPECT_STREQ("hig", deserialized.GetValueForKey("tre")); +} + +// Running out of space shouldn't crash. +TEST(NonAllocatingMapTest, OutOfSpace) { + NonAllocatingMap<3, 2, 2> map; + map.SetKeyValue("a", "1"); + map.SetKeyValue("b", "2"); + map.SetKeyValue("c", "3"); + EXPECT_EQ(2u, map.GetCount()); + EXPECT_FALSE(map.GetValueForKey("c")); +} + +#ifndef NDEBUG + +TEST(NonAllocatingMapTest, NullKey) { + NonAllocatingMap<4, 6, 6> map; + ASSERT_DEATH(map.SetKeyValue(NULL, "hello"), ""); + + map.SetKeyValue("hi", "there"); + ASSERT_DEATH(map.GetValueForKey(NULL), ""); + EXPECT_STREQ("there", map.GetValueForKey("hi")); + + ASSERT_DEATH(map.GetValueForKey(NULL), ""); + map.RemoveKey("hi"); + EXPECT_EQ(0u, map.GetCount()); +} + +#endif // !NDEBUG + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/solaris/dump_symbols.cc b/TMessagesProj/jni/third_party/breakpad/src/common/solaris/dump_symbols.cc new file mode 100644 index 0000000000..168d0b2879 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/solaris/dump_symbols.cc @@ -0,0 +1,681 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: Alfred Peng + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +#include "common/scoped_ptr.h" +#include "common/solaris/dump_symbols.h" +#include "common/solaris/file_id.h" +#include "common/solaris/guid_creator.h" + +// This namespace contains helper functions. +namespace { + +using std::make_pair; + +#if defined(_LP64) +typedef Elf64_Sym Elf_Sym; +#else +typedef Elf32_Sym Elf_Sym; +#endif + +// Symbol table entry from stabs. Sun CC specific. +struct slist { + // String table index. + unsigned int n_strx; + // Stab type. + unsigned char n_type; + char n_other; + short n_desc; + unsigned long n_value; +}; + +// Symbol table entry +struct SymbolEntry { + // Offset from the start of the file. + GElf_Addr offset; + // Function size. + GElf_Word size; +}; + +// Infomation of a line. +struct LineInfo { + // Offset from start of the function. + // Load from stab symbol. + GElf_Off rva_to_func; + // Offset from base of the loading binary. + GElf_Off rva_to_base; + // Size of the line. + // The first line: equals to rva_to_func. + // The other lines: the difference of rva_to_func of the line and + // rva_to_func of the previous N_SLINE. + uint32_t size; + // Line number. + uint32_t line_num; +}; + +// Information of a function. +struct FuncInfo { + // Name of the function. + const char *name; + // Offset from the base of the loading address. + GElf_Off rva_to_base; + // Virtual address of the function. + // Load from stab symbol. + GElf_Addr addr; + // Size of the function. + // Equal to rva_to_func of the last function line. + uint32_t size; + // Total size of stack parameters. + uint32_t stack_param_size; + // Line information array. + std::vector line_info; +}; + +// Information of a source file. +struct SourceFileInfo { + // Name of the source file. + const char *name; + // Starting address of the source file. + GElf_Addr addr; + // Id of the source file. + int source_id; + // Functions information. + std::vector func_info; +}; + +struct CompareString { + bool operator()(const char *s1, const char *s2) const { + return strcmp(s1, s2) < 0; + } +}; + +typedef std::map SymbolMap; + +// Information of a symbol table. +// This is the root of all types of symbol. +struct SymbolInfo { + std::vector source_file_info; + // Symbols information. + SymbolMap symbol_entries; +}; + +// Stab section name. +const char *kStabName = ".stab"; + +// Stab str section name. +const char *kStabStrName = ".stabstr"; + +// Symtab section name. +const char *kSymtabName = ".symtab"; + +// Strtab section name. +const char *kStrtabName = ".strtab"; + +// Default buffer lenght for demangle. +const int demangleLen = 20000; + +// Offset to the string table. +uint64_t stringOffset = 0; + +// Update the offset to the start of the string index of the next +// object module for every N_ENDM stabs. +inline void RecalculateOffset(struct slist* cur_list, char *stabstr) { + while ((--cur_list)->n_strx == 0) ; + stringOffset += cur_list->n_strx; + + char *temp = stabstr + stringOffset; + while (*temp != '\0') { + ++stringOffset; + ++temp; + } + // Skip the extra '\0' + ++stringOffset; +} + +// Demangle using demangle library on Solaris. +std::string Demangle(const char *mangled) { + int status = 0; + std::string str(mangled); + char *demangled = (char *)malloc(demangleLen); + + if (!demangled) { + fprintf(stderr, "no enough memory.\n"); + goto out; + } + + if ((status = cplus_demangle(mangled, demangled, demangleLen)) == + DEMANGLE_ESPACE) { + fprintf(stderr, "incorrect demangle.\n"); + goto out; + } + + str = demangled; + free(demangled); + +out: + return str; +} + +bool WriteFormat(int fd, const char *fmt, ...) { + va_list list; + char buffer[4096]; + ssize_t expected, written; + va_start(list, fmt); + vsnprintf(buffer, sizeof(buffer), fmt, list); + expected = strlen(buffer); + written = write(fd, buffer, strlen(buffer)); + va_end(list); + return expected == written; +} + +bool IsValidElf(const GElf_Ehdr *elf_header) { + return memcmp(elf_header, ELFMAG, SELFMAG) == 0; +} + +static bool FindSectionByName(Elf *elf, const char *name, + int shstrndx, + GElf_Shdr *shdr) { + assert(name != NULL); + + if (strlen(name) == 0) + return false; + + Elf_Scn *scn = NULL; + + while ((scn = elf_nextscn(elf, scn)) != NULL) { + if (gelf_getshdr(scn, shdr) == (GElf_Shdr *)0) { + fprintf(stderr, "failed to read section header: %s\n", elf_errmsg(0)); + return false; + } + + const char *section_name = elf_strptr(elf, shstrndx, shdr->sh_name); + if (!section_name) { + fprintf(stderr, "Section name error: %s\n", elf_errmsg(-1)); + continue; + } + + if (strcmp(section_name, name) == 0) + return true; + } + + return false; +} + +// The parameter size is used for FPO-optimized code, and +// this is all tied up with the debugging data for Windows x86. +// Set it to 0 on Solaris. +int LoadStackParamSize(struct slist *list, + struct slist *list_end, + struct FuncInfo *func_info) { + struct slist *cur_list = list; + int step = 1; + while (cur_list < list_end && cur_list->n_type == N_PSYM) { + ++cur_list; + ++step; + } + + func_info->stack_param_size = 0; + return step; +} + +int LoadLineInfo(struct slist *list, + struct slist *list_end, + struct FuncInfo *func_info) { + struct slist *cur_list = list; + do { + // Skip non line information. + while (cur_list < list_end && cur_list->n_type != N_SLINE) { + // Only exit when got another function, or source file, or end stab. + if (cur_list->n_type == N_FUN || cur_list->n_type == N_SO || + cur_list->n_type == N_ENDM) { + return cur_list - list; + } + ++cur_list; + } + struct LineInfo line; + while (cur_list < list_end && cur_list->n_type == N_SLINE) { + line.rva_to_func = cur_list->n_value; + // n_desc is a signed short + line.line_num = (unsigned short)cur_list->n_desc; + func_info->line_info.push_back(line); + ++cur_list; + } + if (cur_list == list_end && cur_list->n_type == N_ENDM) + break; + } while (list < list_end); + + return cur_list - list; +} + +int LoadFuncSymbols(struct slist *list, + struct slist *list_end, + char *stabstr, + GElf_Word base, + struct SourceFileInfo *source_file_info) { + struct slist *cur_list = list; + assert(cur_list->n_type == N_SO); + ++cur_list; + + source_file_info->func_info.clear(); + while (cur_list < list_end) { + // Go until the function symbol. + while (cur_list < list_end && cur_list->n_type != N_FUN) { + if (cur_list->n_type == N_SO) { + return cur_list - list; + } + ++cur_list; + if (cur_list->n_type == N_ENDM) + RecalculateOffset(cur_list, stabstr); + continue; + } + while (cur_list->n_type == N_FUN) { + struct FuncInfo func_info; + memset(&func_info, 0, sizeof(func_info)); + func_info.name = stabstr + cur_list->n_strx + stringOffset; + // The n_value field is always 0 from stab generated by Sun CC. + // TODO(Alfred): Find the correct value. + func_info.addr = cur_list->n_value; + ++cur_list; + if (cur_list->n_type == N_ENDM) + RecalculateOffset(cur_list, stabstr); + if (cur_list->n_type != N_ESYM && cur_list->n_type != N_ISYM && + cur_list->n_type != N_FUN) { + // Stack parameter size. + cur_list += LoadStackParamSize(cur_list, list_end, &func_info); + // Line info. + cur_list += LoadLineInfo(cur_list, list_end, &func_info); + } + if (cur_list < list_end && cur_list->n_type == N_ENDM) + RecalculateOffset(cur_list, stabstr); + // Functions in this module should have address bigger than the module + // starting address. + // + // These two values are always 0 with Sun CC. + // TODO(Alfred): Get the correct value or remove the condition statement. + if (func_info.addr >= source_file_info->addr) { + source_file_info->func_info.push_back(func_info); + } + } + } + return cur_list - list; +} + +// Compute size and rva information based on symbols loaded from stab section. +bool ComputeSizeAndRVA(struct SymbolInfo *symbols) { + std::vector *sorted_files = + &(symbols->source_file_info); + SymbolMap *symbol_entries = &(symbols->symbol_entries); + for (size_t i = 0; i < sorted_files->size(); ++i) { + struct SourceFileInfo &source_file = (*sorted_files)[i]; + std::vector *sorted_functions = &(source_file.func_info); + int func_size = sorted_functions->size(); + + for (size_t j = 0; j < func_size; ++j) { + struct FuncInfo &func_info = (*sorted_functions)[j]; + int line_count = func_info.line_info.size(); + + // Discard the ending part of the name. + std::string func_name(func_info.name); + std::string::size_type last_colon = func_name.find_first_of(':'); + if (last_colon != std::string::npos) + func_name = func_name.substr(0, last_colon); + + // Fine the symbol offset from the loading address and size by name. + SymbolMap::const_iterator it = symbol_entries->find(func_name.c_str()); + if (it->second) { + func_info.rva_to_base = it->second->offset; + func_info.size = (line_count == 0) ? 0 : it->second->size; + } else { + func_info.rva_to_base = 0; + func_info.size = 0; + } + + // Compute function and line size. + for (size_t k = 0; k < line_count; ++k) { + struct LineInfo &line_info = func_info.line_info[k]; + + line_info.rva_to_base = line_info.rva_to_func + func_info.rva_to_base; + if (k == line_count - 1) { + line_info.size = func_info.size - line_info.rva_to_func; + } else { + struct LineInfo &next_line = func_info.line_info[k + 1]; + line_info.size = next_line.rva_to_func - line_info.rva_to_func; + } + } // for each line. + } // for each function. + } // for each source file. + for (SymbolMap::iterator it = symbol_entries->begin(); + it != symbol_entries->end(); ++it) { + free(it->second); + } + return true; +} + +bool LoadAllSymbols(const GElf_Shdr *stab_section, + const GElf_Shdr *stabstr_section, + GElf_Word base, + struct SymbolInfo *symbols) { + if (stab_section == NULL || stabstr_section == NULL) + return false; + + char *stabstr = + reinterpret_cast(stabstr_section->sh_offset + base); + struct slist *lists = + reinterpret_cast(stab_section->sh_offset + base); + int nstab = stab_section->sh_size / sizeof(struct slist); + int source_id = 0; + + // First pass, load all symbols from the object file. + for (int i = 0; i < nstab; ) { + int step = 1; + struct slist *cur_list = lists + i; + if (cur_list->n_type == N_SO) { + // FUNC
+ struct SourceFileInfo source_file_info; + source_file_info.name = stabstr + cur_list->n_strx + stringOffset; + // The n_value field is always 0 from stab generated by Sun CC. + // TODO(Alfred): Find the correct value. + source_file_info.addr = cur_list->n_value; + if (strchr(source_file_info.name, '.')) + source_file_info.source_id = source_id++; + else + source_file_info.source_id = -1; + step = LoadFuncSymbols(cur_list, lists + nstab - 1, stabstr, + base, &source_file_info); + symbols->source_file_info.push_back(source_file_info); + } + i += step; + } + // Second pass, compute the size of functions and lines. + return ComputeSizeAndRVA(symbols); +} + +bool LoadSymbols(Elf *elf, GElf_Ehdr *elf_header, struct SymbolInfo *symbols, + void *obj_base) { + GElf_Word base = reinterpret_cast(obj_base); + + const GElf_Shdr *sections = + reinterpret_cast(elf_header->e_shoff + base); + GElf_Shdr stab_section; + if (!FindSectionByName(elf, kStabName, elf_header->e_shstrndx, + &stab_section)) { + fprintf(stderr, "Stab section not found.\n"); + return false; + } + GElf_Shdr stabstr_section; + if (!FindSectionByName(elf, kStabStrName, elf_header->e_shstrndx, + &stabstr_section)) { + fprintf(stderr, "Stabstr section not found.\n"); + return false; + } + GElf_Shdr symtab_section; + if (!FindSectionByName(elf, kSymtabName, elf_header->e_shstrndx, + &symtab_section)) { + fprintf(stderr, "Symtab section not found.\n"); + return false; + } + GElf_Shdr strtab_section; + if (!FindSectionByName(elf, kStrtabName, elf_header->e_shstrndx, + &strtab_section)) { + fprintf(stderr, "Strtab section not found.\n"); + return false; + } + + Elf_Sym *symbol = (Elf_Sym *)((char *)base + symtab_section.sh_offset); + for (int i = 0; i < symtab_section.sh_size/symtab_section.sh_entsize; ++i) { + struct SymbolEntry *symbol_entry = + (struct SymbolEntry *)malloc(sizeof(struct SymbolEntry)); + const char *name = reinterpret_cast( + strtab_section.sh_offset + (GElf_Word)base + symbol->st_name); + symbol_entry->offset = symbol->st_value; + symbol_entry->size = symbol->st_size; + symbols->symbol_entries.insert(make_pair(name, symbol_entry)); + ++symbol; + } + + + // Load symbols. + return LoadAllSymbols(&stab_section, &stabstr_section, base, symbols); +} + +bool WriteModuleInfo(int fd, GElf_Half arch, const std::string &obj_file) { + const char *arch_name = NULL; + if (arch == EM_386) + arch_name = "x86"; + else if (arch == EM_X86_64) + arch_name = "x86_64"; + else if (arch == EM_SPARC32PLUS) + arch_name = "SPARC_32+"; + else { + printf("Please add more ARCH support\n"); + return false; + } + + unsigned char identifier[16]; + google_breakpad::FileID file_id(obj_file.c_str()); + if (file_id.ElfFileIdentifier(identifier)) { + char identifier_str[40]; + file_id.ConvertIdentifierToString(identifier, + identifier_str, sizeof(identifier_str)); + std::string filename = obj_file; + size_t slash_pos = obj_file.find_last_of("/"); + if (slash_pos != std::string::npos) + filename = obj_file.substr(slash_pos + 1); + return WriteFormat(fd, "MODULE solaris %s %s %s\n", arch_name, + identifier_str, filename.c_str()); + } + return false; +} + +bool WriteSourceFileInfo(int fd, const struct SymbolInfo &symbols) { + for (size_t i = 0; i < symbols.source_file_info.size(); ++i) { + if (symbols.source_file_info[i].source_id != -1) { + const char *name = symbols.source_file_info[i].name; + if (!WriteFormat(fd, "FILE %d %s\n", + symbols.source_file_info[i].source_id, name)) + return false; + } + } + return true; +} + +bool WriteOneFunction(int fd, int source_id, + const struct FuncInfo &func_info){ + // Discard the ending part of the name. + std::string func_name(func_info.name); + std::string::size_type last_colon = func_name.find_last_of(':'); + if (last_colon != std::string::npos) + func_name = func_name.substr(0, last_colon); + func_name = Demangle(func_name.c_str()); + + if (func_info.size <= 0) + return true; + + // rva_to_base could be unsigned long(32 bit) or unsigned long long(64 bit). + if (WriteFormat(fd, "FUNC %llx %x %d %s\n", + (long long)func_info.rva_to_base, + func_info.size, + func_info.stack_param_size, + func_name.c_str())) { + for (size_t i = 0; i < func_info.line_info.size(); ++i) { + const struct LineInfo &line_info = func_info.line_info[i]; + if (line_info.line_num == 0) + return true; + if (!WriteFormat(fd, "%llx %x %d %d\n", + (long long)line_info.rva_to_base, + line_info.size, + line_info.line_num, + source_id)) + return false; + } + return true; + } + return false; +} + +bool WriteFunctionInfo(int fd, const struct SymbolInfo &symbols) { + for (size_t i = 0; i < symbols.source_file_info.size(); ++i) { + const struct SourceFileInfo &file_info = symbols.source_file_info[i]; + for (size_t j = 0; j < file_info.func_info.size(); ++j) { + const struct FuncInfo &func_info = file_info.func_info[j]; + if (!WriteOneFunction(fd, file_info.source_id, func_info)) + return false; + } + } + return true; +} + +bool DumpStabSymbols(int fd, const struct SymbolInfo &symbols) { + return WriteSourceFileInfo(fd, symbols) && + WriteFunctionInfo(fd, symbols); +} + +// +// FDWrapper +// +// Wrapper class to make sure opened file is closed. +// +class FDWrapper { + public: + explicit FDWrapper(int fd) : + fd_(fd) { + } + ~FDWrapper() { + if (fd_ != -1) + close(fd_); + } + int get() { + return fd_; + } + int release() { + int fd = fd_; + fd_ = -1; + return fd; + } + private: + int fd_; +}; + +// +// MmapWrapper +// +// Wrapper class to make sure mapped regions are unmapped. +// +class MmapWrapper { + public: + MmapWrapper(void *mapped_address, size_t mapped_size) : + base_(mapped_address), size_(mapped_size) { + } + ~MmapWrapper() { + if (base_ != NULL) { + assert(size_ > 0); + munmap((char *)base_, size_); + } + } + void release() { + base_ = NULL; + size_ = 0; + } + + private: + void *base_; + size_t size_; +}; + +} // namespace + +namespace google_breakpad { + +class AutoElfEnder { + public: + AutoElfEnder(Elf *elf) : elf_(elf) {} + ~AutoElfEnder() { if (elf_) elf_end(elf_); } + private: + Elf *elf_; +}; + + +bool DumpSymbols::WriteSymbolFile(const std::string &obj_file, int sym_fd) { + if (elf_version(EV_CURRENT) == EV_NONE) { + fprintf(stderr, "elf_version() failed: %s\n", elf_errmsg(0)); + return false; + } + + int obj_fd = open(obj_file.c_str(), O_RDONLY); + if (obj_fd < 0) + return false; + FDWrapper obj_fd_wrapper(obj_fd); + struct stat st; + if (fstat(obj_fd, &st) != 0 && st.st_size <= 0) + return false; + void *obj_base = mmap(NULL, st.st_size, + PROT_READ, MAP_PRIVATE, obj_fd, 0); + if (obj_base == MAP_FAILED) + return false; + MmapWrapper map_wrapper(obj_base, st.st_size); + GElf_Ehdr elf_header; + Elf *elf = elf_begin(obj_fd, ELF_C_READ, NULL); + AutoElfEnder elfEnder(elf); + + if (gelf_getehdr(elf, &elf_header) == (GElf_Ehdr *)NULL) { + fprintf(stderr, "failed to read elf header: %s\n", elf_errmsg(-1)); + return false; + } + + if (!IsValidElf(&elf_header)) { + fprintf(stderr, "header magic doesn't match\n"); + return false; + } + struct SymbolInfo symbols; + if (!LoadSymbols(elf, &elf_header, &symbols, obj_base)) + return false; + // Write to symbol file. + if (WriteModuleInfo(sym_fd, elf_header.e_machine, obj_file) && + DumpStabSymbols(sym_fd, symbols)) + return true; + + return false; +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/solaris/dump_symbols.h b/TMessagesProj/jni/third_party/breakpad/src/common/solaris/dump_symbols.h new file mode 100644 index 0000000000..7f4baadcfc --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/solaris/dump_symbols.h @@ -0,0 +1,49 @@ +// Copyright (c) 2007, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// dump_symbols.cc: Implements a Solaris stab debugging format dumper. +// +// Author: Alfred Peng + +#ifndef COMMON_SOLARIS_DUMP_SYMBOLS_H__ +#define COMMON_SOLARIS_DUMP_SYMBOLS_H__ + +#include + +namespace google_breakpad { + +class DumpSymbols { + public: + bool WriteSymbolFile(const std::string &obj_file, + int sym_fd); +}; + +} // namespace google_breakpad + +#endif // COMMON_SOLARIS_DUMP_SYMBOLS_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/solaris/file_id.cc b/TMessagesProj/jni/third_party/breakpad/src/common/solaris/file_id.cc new file mode 100644 index 0000000000..643a14629d --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/solaris/file_id.cc @@ -0,0 +1,197 @@ +// Copyright (c) 2007, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// file_id.cc: Return a unique identifier for a file +// +// See file_id.h for documentation +// +// Author: Alfred Peng + +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +#include "common/md5.h" +#include "common/solaris/file_id.h" +#include "common/solaris/message_output.h" +#include "google_breakpad/common/minidump_format.h" + +namespace google_breakpad { + +class AutoElfEnder { + public: + AutoElfEnder(Elf *elf) : elf_(elf) {} + ~AutoElfEnder() { if (elf_) elf_end(elf_); } + private: + Elf *elf_; +}; + +// Find the text section in elf object file. +// Return the section start address and the size. +static bool FindElfTextSection(int fd, const void *elf_base, + const void **text_start, + int *text_size) { + assert(text_start); + assert(text_size); + + *text_start = NULL; + *text_size = 0; + + if (elf_version(EV_CURRENT) == EV_NONE) { + print_message2(2, "elf_version() failed: %s\n", elf_errmsg(0)); + return false; + } + + GElf_Ehdr elf_header; + lseek(fd, 0L, 0); + Elf *elf = elf_begin(fd, ELF_C_READ, NULL); + AutoElfEnder elfEnder(elf); + + if (gelf_getehdr(elf, &elf_header) == (GElf_Ehdr *)NULL) { + print_message2(2, "failed to read elf header: %s\n", elf_errmsg(-1)); + return false; + } + + if (elf_header.e_ident[EI_MAG0] != ELFMAG0 || + elf_header.e_ident[EI_MAG1] != ELFMAG1 || + elf_header.e_ident[EI_MAG2] != ELFMAG2 || + elf_header.e_ident[EI_MAG3] != ELFMAG3) { + print_message1(2, "header magic doesn't match\n"); + return false; + } + + static const char kTextSectionName[] = ".text"; + const GElf_Shdr *text_section = NULL; + Elf_Scn *scn = NULL; + GElf_Shdr shdr; + + while ((scn = elf_nextscn(elf, scn)) != NULL) { + if (gelf_getshdr(scn, &shdr) == (GElf_Shdr *)0) { + print_message2(2, "failed to read section header: %s\n", elf_errmsg(0)); + return false; + } + + if (shdr.sh_type == SHT_PROGBITS) { + const char *section_name = elf_strptr(elf, elf_header.e_shstrndx, + shdr.sh_name); + if (!section_name) { + print_message2(2, "Section name error: %s\n", elf_errmsg(-1)); + continue; + } + + if (strcmp(section_name, kTextSectionName) == 0) { + text_section = &shdr; + break; + } + } + } + if (text_section != NULL && text_section->sh_size > 0) { + *text_start = (char *)elf_base + text_section->sh_offset; + *text_size = text_section->sh_size; + return true; + } + + return false; +} + +FileID::FileID(const char *path) { + strcpy(path_, path); +} + +class AutoCloser { + public: + AutoCloser(int fd) : fd_(fd) {} + ~AutoCloser() { if (fd_) close(fd_); } + private: + int fd_; +}; + +bool FileID::ElfFileIdentifier(unsigned char identifier[16]) { + int fd = 0; + if ((fd = open(path_, O_RDONLY)) < 0) + return false; + + AutoCloser autocloser(fd); + struct stat st; + if (fstat(fd, &st) != 0 || st.st_size <= 0) + return false; + + void *base = mmap(NULL, st.st_size, PROT_READ, MAP_PRIVATE, fd, 0); + if (base == MAP_FAILED) + return false; + + bool success = false; + const void *text_section = NULL; + int text_size = 0; + + if (FindElfTextSection(fd, base, &text_section, &text_size)) { + MD5Context md5; + MD5Init(&md5); + MD5Update(&md5, (const unsigned char *)text_section, text_size); + MD5Final(identifier, &md5); + success = true; + } + + munmap((char *)base, st.st_size); + return success; +} + +// static +bool FileID::ConvertIdentifierToString(const unsigned char identifier[16], + char *buffer, int buffer_length) { + if (buffer_length < 34) + return false; + + int buffer_idx = 0; + for (int idx = 0; idx < 16; ++idx) { + int hi = (identifier[idx] >> 4) & 0x0F; + int lo = (identifier[idx]) & 0x0F; + + buffer[buffer_idx++] = (hi >= 10) ? 'A' + hi - 10 : '0' + hi; + buffer[buffer_idx++] = (lo >= 10) ? 'A' + lo - 10 : '0' + lo; + } + + // Add an extra "0" by the end. + buffer[buffer_idx++] = '0'; + + // NULL terminate + buffer[buffer_idx] = 0; + + return true; +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/solaris/file_id.h b/TMessagesProj/jni/third_party/breakpad/src/common/solaris/file_id.h new file mode 100644 index 0000000000..375e857512 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/solaris/file_id.h @@ -0,0 +1,66 @@ +// Copyright (c) 2007, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// file_id.h: Return a unique identifier for a file +// +// Author: Alfred Peng + +#ifndef COMMON_SOLARIS_FILE_ID_H__ +#define COMMON_SOLARIS_FILE_ID_H__ + +#include + +namespace google_breakpad { + +class FileID { + public: + FileID(const char *path); + ~FileID() {}; + + // Load the identifier for the elf file path specified in the constructor into + // |identifier|. Return false if the identifier could not be created for the + // file. + // The current implementation will return the MD5 hash of the file's bytes. + bool ElfFileIdentifier(unsigned char identifier[16]); + + // Convert the |identifier| data to a NULL terminated string. The string will + // be formatted as a MDCVInfoPDB70 struct. + // The |buffer| should be at least 34 bytes long to receive all of the data + // and termination. Shorter buffers will return false. + static bool ConvertIdentifierToString(const unsigned char identifier[16], + char *buffer, int buffer_length); + + private: + // Storage for the path specified + char path_[PATH_MAX]; +}; + +} // namespace google_breakpad + +#endif // COMMON_SOLARIS_FILE_ID_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/solaris/guid_creator.cc b/TMessagesProj/jni/third_party/breakpad/src/common/solaris/guid_creator.cc new file mode 100644 index 0000000000..c4d58e0aae --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/solaris/guid_creator.cc @@ -0,0 +1,84 @@ +// Copyright (c) 2007, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: Alfred Peng + +#include +#include + +#include +#include +#include + +#include "common/solaris/guid_creator.h" + +// +// GUIDGenerator +// +// This class is used to generate random GUID. +// Currently use random number to generate a GUID. This should be OK since +// we don't expect crash to happen very offen. +// +class GUIDGenerator { + public: + GUIDGenerator() { + srandom(time(NULL)); + } + + bool CreateGUID(GUID *guid) const { + guid->data1 = random(); + guid->data2 = (uint16_t)(random()); + guid->data3 = (uint16_t)(random()); + *reinterpret_cast(&guid->data4[0]) = random(); + *reinterpret_cast(&guid->data4[4]) = random(); + return true; + } +}; + +// Guid generator. +const GUIDGenerator kGuidGenerator; + +bool CreateGUID(GUID *guid) { + return kGuidGenerator.CreateGUID(guid); +}; + +// Parse guid to string. +bool GUIDToString(const GUID *guid, char *buf, int buf_len) { + // Should allow more space the the max length of GUID. + assert(buf_len > kGUIDStringLength); + int num = snprintf(buf, buf_len, kGUIDFormatString, + guid->data1, guid->data2, guid->data3, + *reinterpret_cast(&(guid->data4[0])), + *reinterpret_cast(&(guid->data4[4]))); + if (num != kGUIDStringLength) + return false; + + buf[num] = '\0'; + return true; +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/solaris/guid_creator.h b/TMessagesProj/jni/third_party/breakpad/src/common/solaris/guid_creator.h new file mode 100644 index 0000000000..4aee3a1c24 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/solaris/guid_creator.h @@ -0,0 +1,50 @@ +// Copyright (c) 2007, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: Alfred Peng + +#ifndef COMMON_SOLARIS_GUID_CREATOR_H__ +#define COMMON_SOLARIS_GUID_CREATOR_H__ + +#include "google_breakpad/common/minidump_format.h" + +typedef MDGUID GUID; + +// Format string for parsing GUID. +#define kGUIDFormatString "%08x-%04x-%04x-%08x-%08x" +// Length of GUID string. Don't count the ending '\0'. +#define kGUIDStringLength 36 + +// Create a guid. +bool CreateGUID(GUID *guid); + +// Get the string from guid. +bool GUIDToString(const GUID *guid, char *buf, int buf_len); + +#endif // COMMON_SOLARIS_GUID_CREATOR_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/solaris/message_output.h b/TMessagesProj/jni/third_party/breakpad/src/common/solaris/message_output.h new file mode 100644 index 0000000000..3e3b1d4652 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/solaris/message_output.h @@ -0,0 +1,54 @@ +// Copyright (c) 2007, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: Alfred Peng + +#ifndef COMMON_SOLARIS_MESSAGE_OUTPUT_H__ +#define COMMON_SOLARIS_MESSAGE_OUTPUT_H__ + +namespace google_breakpad { + +const int MESSAGE_MAX = 1000; + +// Message output macros. +// snprintf doesn't operate heap on Solaris, while printf and fprintf do. +// Use snprintf here to avoid heap allocation. +#define print_message1(std, message) \ + char buffer[MESSAGE_MAX]; \ + int len = snprintf(buffer, MESSAGE_MAX, message); \ + write(std, buffer, len) + +#define print_message2(std, message, para) \ + char buffer[MESSAGE_MAX]; \ + int len = snprintf(buffer, MESSAGE_MAX, message, para); \ + write(std, buffer, len); + +} // namespace google_breakpad + +#endif // COMMON_SOLARIS_MESSAGE_OUTPUT_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/stabs_reader.cc b/TMessagesProj/jni/third_party/breakpad/src/common/stabs_reader.cc new file mode 100644 index 0000000000..6019fc7ee2 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/stabs_reader.cc @@ -0,0 +1,315 @@ +// Copyright (c) 2010 Google Inc. All Rights Reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// This file implements the google_breakpad::StabsReader class. +// See stabs_reader.h. + +#include "common/stabs_reader.h" + +#include +#include +#include + +#include + +#include "common/using_std_string.h" + +using std::vector; + +namespace google_breakpad { + +StabsReader::EntryIterator::EntryIterator(const ByteBuffer *buffer, + bool big_endian, size_t value_size) + : value_size_(value_size), cursor_(buffer, big_endian) { + // Actually, we could handle weird sizes just fine, but they're + // probably mistakes --- expressed in bits, say. + assert(value_size == 4 || value_size == 8); + entry_.index = 0; + Fetch(); +} + +void StabsReader::EntryIterator::Fetch() { + cursor_ + .Read(4, false, &entry_.name_offset) + .Read(1, false, &entry_.type) + .Read(1, false, &entry_.other) + .Read(2, false, &entry_.descriptor) + .Read(value_size_, false, &entry_.value); + entry_.at_end = !cursor_; +} + +StabsReader::StabsReader(const uint8_t *stab, size_t stab_size, + const uint8_t *stabstr, size_t stabstr_size, + bool big_endian, size_t value_size, bool unitized, + StabsHandler *handler) + : entries_(stab, stab_size), + strings_(stabstr, stabstr_size), + iterator_(&entries_, big_endian, value_size), + unitized_(unitized), + handler_(handler), + string_offset_(0), + next_cu_string_offset_(0), + current_source_file_(NULL) { } + +const char *StabsReader::SymbolString() { + ptrdiff_t offset = string_offset_ + iterator_->name_offset; + if (offset < 0 || (size_t) offset >= strings_.Size()) { + handler_->Warning("symbol %d: name offset outside the string section\n", + iterator_->index); + // Return our null string, to keep our promise about all names being + // taken from the string section. + offset = 0; + } + return reinterpret_cast(strings_.start + offset); +} + +bool StabsReader::Process() { + while (!iterator_->at_end) { + if (iterator_->type == N_SO) { + if (! ProcessCompilationUnit()) + return false; + } else if (iterator_->type == N_UNDF && unitized_) { + // In unitized STABS (including Linux STABS, and pretty much anything + // else that puts STABS data in sections), at the head of each + // compilation unit's entries there is an N_UNDF stab giving the + // number of symbols in the compilation unit, and the number of bytes + // that compilation unit's strings take up in the .stabstr section. + // Each CU's strings are separate; the n_strx values are offsets + // within the current CU's portion of the .stabstr section. + // + // As an optimization, the GNU linker combines all the + // compilation units into one, with a single N_UNDF at the + // beginning. However, other linkers, like Gold, do not perform + // this optimization. + string_offset_ = next_cu_string_offset_; + next_cu_string_offset_ = iterator_->value; + ++iterator_; + } +#if defined(HAVE_MACH_O_NLIST_H) + // Export symbols in Mach-O binaries look like this. + // This is necessary in order to be able to dump symbols + // from OS X system libraries. + else if ((iterator_->type & N_STAB) == 0 && + (iterator_->type & N_TYPE) == N_SECT) { + ProcessExtern(); + } +#endif + else { + ++iterator_; + } + } + return true; +} + +bool StabsReader::ProcessCompilationUnit() { + assert(!iterator_->at_end && iterator_->type == N_SO); + + // There may be an N_SO entry whose name ends with a slash, + // indicating the directory in which the compilation occurred. + // The build directory defaults to NULL. + const char *build_directory = NULL; + { + const char *name = SymbolString(); + if (name[0] && name[strlen(name) - 1] == '/') { + build_directory = name; + ++iterator_; + } + } + + // We expect to see an N_SO entry with a filename next, indicating + // the start of the compilation unit. + { + if (iterator_->at_end || iterator_->type != N_SO) + return true; + const char *name = SymbolString(); + if (name[0] == '\0') { + // This seems to be a stray end-of-compilation-unit marker; + // consume it, but don't report the end, since we didn't see a + // beginning. + ++iterator_; + return true; + } + current_source_file_ = name; + } + + if (! handler_->StartCompilationUnit(current_source_file_, + iterator_->value, + build_directory)) + return false; + + ++iterator_; + + // The STABS documentation says that some compilers may emit + // additional N_SO entries with names immediately following the + // first, and that they should be ignored. However, the original + // Breakpad STABS reader doesn't ignore them, so we won't either. + + // Process the body of the compilation unit, up to the next N_SO. + while (!iterator_->at_end && iterator_->type != N_SO) { + if (iterator_->type == N_FUN) { + if (! ProcessFunction()) + return false; + } else if (iterator_->type == N_SLINE) { + // Mac OS X STABS place SLINE records before functions. + Line line; + // The value of an N_SLINE entry that appears outside a function is + // the absolute address of the line. + line.address = iterator_->value; + line.filename = current_source_file_; + // The n_desc of a N_SLINE entry is the line number. It's a + // signed 16-bit field; line numbers from 32768 to 65535 are + // stored as n-65536. + line.number = (uint16_t) iterator_->descriptor; + queued_lines_.push_back(line); + ++iterator_; + } else if (iterator_->type == N_SOL) { + current_source_file_ = SymbolString(); + ++iterator_; + } else { + // Ignore anything else. + ++iterator_; + } + } + + // An N_SO with an empty name indicates the end of the compilation + // unit. Default to zero. + uint64_t ending_address = 0; + if (!iterator_->at_end) { + assert(iterator_->type == N_SO); + const char *name = SymbolString(); + if (name[0] == '\0') { + ending_address = iterator_->value; + ++iterator_; + } + } + + if (! handler_->EndCompilationUnit(ending_address)) + return false; + + queued_lines_.clear(); + + return true; +} + +bool StabsReader::ProcessFunction() { + assert(!iterator_->at_end && iterator_->type == N_FUN); + + uint64_t function_address = iterator_->value; + // The STABS string for an N_FUN entry is the name of the function, + // followed by a colon, followed by type information for the + // function. We want to pass the name alone to StartFunction. + const char *stab_string = SymbolString(); + const char *name_end = strchr(stab_string, ':'); + if (! name_end) + name_end = stab_string + strlen(stab_string); + string name(stab_string, name_end - stab_string); + if (! handler_->StartFunction(name, function_address)) + return false; + ++iterator_; + + // If there were any SLINE records given before the function, report them now. + for (vector::const_iterator it = queued_lines_.begin(); + it != queued_lines_.end(); it++) { + if (!handler_->Line(it->address, it->filename, it->number)) + return false; + } + queued_lines_.clear(); + + while (!iterator_->at_end) { + if (iterator_->type == N_SO || iterator_->type == N_FUN) + break; + else if (iterator_->type == N_SLINE) { + // The value of an N_SLINE entry is the offset of the line from + // the function's start address. + uint64_t line_address = function_address + iterator_->value; + // The n_desc of a N_SLINE entry is the line number. It's a + // signed 16-bit field; line numbers from 32768 to 65535 are + // stored as n-65536. + uint16_t line_number = iterator_->descriptor; + if (! handler_->Line(line_address, current_source_file_, line_number)) + return false; + ++iterator_; + } else if (iterator_->type == N_SOL) { + current_source_file_ = SymbolString(); + ++iterator_; + } else + // Ignore anything else. + ++iterator_; + } + + // We've reached the end of the function. See if we can figure out its + // ending address. + uint64_t ending_address = 0; + if (!iterator_->at_end) { + assert(iterator_->type == N_SO || iterator_->type == N_FUN); + if (iterator_->type == N_FUN) { + const char *symbol_name = SymbolString(); + if (symbol_name[0] == '\0') { + // An N_FUN entry with no name is a terminator for this function; + // its value is the function's size. + ending_address = function_address + iterator_->value; + ++iterator_; + } else { + // An N_FUN entry with a name is the next function, and we can take + // its value as our ending address. Don't advance the iterator, as + // we'll use this symbol to start the next function as well. + ending_address = iterator_->value; + } + } else { + // An N_SO entry could be an end-of-compilation-unit marker, or the + // start of the next compilation unit, but in either case, its value + // is our ending address. We don't advance the iterator; + // ProcessCompilationUnit will decide what to do with this symbol. + ending_address = iterator_->value; + } + } + + if (! handler_->EndFunction(ending_address)) + return false; + + return true; +} + +bool StabsReader::ProcessExtern() { +#if defined(HAVE_MACH_O_NLIST_H) + assert(!iterator_->at_end && + (iterator_->type & N_STAB) == 0 && + (iterator_->type & N_TYPE) == N_SECT); +#endif + + // TODO(mark): only do symbols in the text section? + if (!handler_->Extern(SymbolString(), iterator_->value)) + return false; + + ++iterator_; + return true; +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/stabs_reader.h b/TMessagesProj/jni/third_party/breakpad/src/common/stabs_reader.h new file mode 100644 index 0000000000..d89afc0013 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/stabs_reader.h @@ -0,0 +1,326 @@ +// -*- mode: c++ -*- + +// Copyright (c) 2010 Google Inc. All Rights Reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// stabs_reader.h: Define StabsReader, a parser for STABS debugging +// information. A description of the STABS debugging format can be +// found at: +// +// http://sourceware.org/gdb/current/onlinedocs/stabs_toc.html +// +// The comments here assume you understand the format. +// +// This parser can handle big-endian and little-endian data, and the symbol +// values may be either 32 or 64 bits long. It handles both STABS in +// sections (as used on Linux) and STABS appearing directly in an +// a.out-like symbol table (as used in Darwin OS X Mach-O files). + +#ifndef COMMON_STABS_READER_H__ +#define COMMON_STABS_READER_H__ + +#include +#include + +#ifdef HAVE_CONFIG_H +#include +#endif + +#ifdef HAVE_A_OUT_H +#include +#endif +#ifdef HAVE_MACH_O_NLIST_H +#include +#endif + +#include +#include + +#include "common/byte_cursor.h" +#include "common/using_std_string.h" + +namespace google_breakpad { + +class StabsHandler; + +class StabsReader { + public: + // Create a reader for the STABS debug information whose .stab section is + // being traversed by ITERATOR, and whose .stabstr section is referred to + // by STRINGS. The reader will call the member functions of HANDLER to + // report the information it finds, when the reader's 'Process' member + // function is called. + // + // BIG_ENDIAN should be true if the entries in the .stab section are in + // big-endian form, or false if they are in little-endian form. + // + // VALUE_SIZE should be either 4 or 8, indicating the size of the 'value' + // field in each entry in bytes. + // + // UNITIZED should be true if the STABS data is stored in units with + // N_UNDF headers. This is usually the case for STABS stored in sections, + // like .stab/.stabstr, and usually not the case for STABS stored in the + // actual symbol table; UNITIZED should be true when parsing Linux stabs, + // false when parsing Mac OS X STABS. For details, see: + // http://sourceware.org/gdb/current/onlinedocs/stabs/Stab-Section-Basics.html + // + // Note that, in ELF, the .stabstr section should be found using the + // 'sh_link' field of the .stab section header, not by name. + StabsReader(const uint8_t *stab, size_t stab_size, + const uint8_t *stabstr, size_t stabstr_size, + bool big_endian, size_t value_size, bool unitized, + StabsHandler *handler); + + // Process the STABS data, calling the handler's member functions to + // report what we find. While the handler functions return true, + // continue to process until we reach the end of the section. If we + // processed the entire section and all handlers returned true, + // return true. If any handler returned false, return false. + // + // This is only meant to be called once per StabsReader instance; + // resuming a prior processing pass that stopped abruptly isn't supported. + bool Process(); + + private: + + // An class for walking arrays of STABS entries. This isolates the main + // STABS reader from the exact format (size; endianness) of the entries + // themselves. + class EntryIterator { + public: + // The contents of a STABS entry, adjusted for the host's endianness, + // word size, 'struct nlist' layout, and so on. + struct Entry { + // True if this iterator has reached the end of the entry array. When + // this is set, the other members of this structure are not valid. + bool at_end; + + // The number of this entry within the list. + size_t index; + + // The current entry's name offset. This is the offset within the + // current compilation unit's strings, as establish by the N_UNDF entries. + size_t name_offset; + + // The current entry's type, 'other' field, descriptor, and value. + unsigned char type; + unsigned char other; + short descriptor; + uint64_t value; + }; + + // Create a EntryIterator walking the entries in BUFFER. Treat the + // entries as big-endian if BIG_ENDIAN is true, as little-endian + // otherwise. Assume each entry has a 'value' field whose size is + // VALUE_SIZE. + // + // This would not be terribly clean to extend to other format variations, + // but it's enough to handle Linux and Mac, and we'd like STABS to die + // anyway. + // + // For the record: on Linux, STABS entry values are always 32 bits, + // regardless of the architecture address size (don't ask me why); on + // Mac, they are 32 or 64 bits long. Oddly, the section header's entry + // size for a Linux ELF .stab section varies according to the ELF class + // from 12 to 20 even as the actual entries remain unchanged. + EntryIterator(const ByteBuffer *buffer, bool big_endian, size_t value_size); + + // Move to the next entry. This function's behavior is undefined if + // at_end() is true when it is called. + EntryIterator &operator++() { Fetch(); entry_.index++; return *this; } + + // Dereferencing this iterator produces a reference to an Entry structure + // that holds the current entry's values. The entry is owned by this + // EntryIterator, and will be invalidated at the next call to operator++. + const Entry &operator*() const { return entry_; } + const Entry *operator->() const { return &entry_; } + + private: + // Read the STABS entry at cursor_, and set entry_ appropriately. + void Fetch(); + + // The size of entries' value field, in bytes. + size_t value_size_; + + // A byte cursor traversing buffer_. + ByteCursor cursor_; + + // Values for the entry this iterator refers to. + Entry entry_; + }; + + // A source line, saved to be reported later. + struct Line { + uint64_t address; + const char *filename; + int number; + }; + + // Return the name of the current symbol. + const char *SymbolString(); + + // Process a compilation unit starting at symbol_. Return true + // to continue processing, or false to abort. + bool ProcessCompilationUnit(); + + // Process a function in current_source_file_ starting at symbol_. + // Return true to continue processing, or false to abort. + bool ProcessFunction(); + + // Process an exported function symbol. + // Return true to continue processing, or false to abort. + bool ProcessExtern(); + + // The STABS entries being parsed. + ByteBuffer entries_; + + // The string section to which the entries refer. + ByteBuffer strings_; + + // The iterator walking the STABS entries. + EntryIterator iterator_; + + // True if the data is "unitized"; see the explanation in the comment for + // StabsReader::StabsReader. + bool unitized_; + + StabsHandler *handler_; + + // The offset of the current compilation unit's strings within stabstr_. + size_t string_offset_; + + // The value string_offset_ should have for the next compilation unit, + // as established by N_UNDF entries. + size_t next_cu_string_offset_; + + // The current source file name. + const char *current_source_file_; + + // Mac OS X STABS place SLINE records before functions; we accumulate a + // vector of these until we see the FUN record, and then report them + // after the StartFunction call. + std::vector queued_lines_; +}; + +// Consumer-provided callback structure for the STABS reader. Clients +// of the STABS reader provide an instance of this structure. The +// reader then invokes the member functions of that instance to report +// the information it finds. +// +// The default definitions of the member functions do nothing, and return +// true so processing will continue. +class StabsHandler { + public: + StabsHandler() { } + virtual ~StabsHandler() { } + + // Some general notes about the handler callback functions: + + // Processing proceeds until the end of the .stabs section, or until + // one of these functions returns false. + + // The addresses given are as reported in the STABS info, without + // regard for whether the module may be loaded at different + // addresses at different times (a shared library, say). When + // processing STABS from an ELF shared library, the addresses given + // all assume the library is loaded at its nominal load address. + // They are *not* offsets from the nominal load address. If you + // want offsets, you must subtract off the library's nominal load + // address. + + // The arguments to these functions named FILENAME are all + // references to strings stored in the .stabstr section. Because + // both the Linux and Solaris linkers factor out duplicate strings + // from the .stabstr section, the consumer can assume that if two + // FILENAME values are different addresses, they represent different + // file names. + // + // Thus, it's safe to use (say) std::map, which does + // string address comparisons, not string content comparisons. + // Since all the strings are in same array of characters --- the + // .stabstr section --- comparing their addresses produces + // predictable, if not lexicographically meaningful, results. + + // Begin processing a compilation unit whose main source file is + // named FILENAME, and whose base address is ADDRESS. If + // BUILD_DIRECTORY is non-NULL, it is the name of the build + // directory in which the compilation occurred. + virtual bool StartCompilationUnit(const char *filename, uint64_t address, + const char *build_directory) { + return true; + } + + // Finish processing the compilation unit. If ADDRESS is non-zero, + // it is the ending address of the compilation unit. If ADDRESS is + // zero, then the compilation unit's ending address is not + // available, and the consumer must infer it by other means. + virtual bool EndCompilationUnit(uint64_t address) { return true; } + + // Begin processing a function named NAME, whose starting address is + // ADDRESS. This function belongs to the compilation unit that was + // most recently started but not ended. + // + // Note that, unlike filenames, NAME is not a pointer into the + // .stabstr section; this is because the name as it appears in the + // STABS data is followed by type information. The value passed to + // StartFunction is the function name alone. + // + // In languages that use name mangling, like C++, NAME is mangled. + virtual bool StartFunction(const string &name, uint64_t address) { + return true; + } + + // Finish processing the function. If ADDRESS is non-zero, it is + // the ending address for the function. If ADDRESS is zero, then + // the function's ending address is not available, and the consumer + // must infer it by other means. + virtual bool EndFunction(uint64_t address) { return true; } + + // Report that the code at ADDRESS is attributable to line NUMBER of + // the source file named FILENAME. The caller must infer the ending + // address of the line. + virtual bool Line(uint64_t address, const char *filename, int number) { + return true; + } + + // Report that an exported function NAME is present at ADDRESS. + // The size of the function is unknown. + virtual bool Extern(const string &name, uint64_t address) { + return true; + } + + // Report a warning. FORMAT is a printf-like format string, + // specifying how to format the subsequent arguments. + virtual void Warning(const char *format, ...) = 0; +}; + +} // namespace google_breakpad + +#endif // COMMON_STABS_READER_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/stabs_reader_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/common/stabs_reader_unittest.cc new file mode 100644 index 0000000000..a84da1c4ca --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/stabs_reader_unittest.cc @@ -0,0 +1,611 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// stabs_reader_unittest.cc: Unit tests for google_breakpad::StabsReader. + +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include + +#include "breakpad_googletest_includes.h" +#include "common/stabs_reader.h" +#include "common/test_assembler.h" +#include "common/using_std_string.h" + +using ::testing::Eq; +using ::testing::InSequence; +using ::testing::Return; +using ::testing::StrEq; +using ::testing::Test; +using ::testing::_; +using google_breakpad::StabsHandler; +using google_breakpad::StabsReader; +using google_breakpad::test_assembler::Label; +using google_breakpad::test_assembler::Section; +using google_breakpad::test_assembler::kBigEndian; +using google_breakpad::test_assembler::kLittleEndian; +using std::map; + +namespace { + +// A StringAssembler is a class for generating .stabstr sections to present +// as input to the STABS parser. +class StringAssembler: public Section { + public: + StringAssembler() : in_cu_(false) { StartCU(); } + + // Add the string S to this StringAssembler, and return the string's + // offset within this compilation unit's strings. If S has been added + // already, this returns the offset of its first instance. + size_t Add(const string &s) { + map::iterator it = added_.find(s); + if (it != added_.end()) + return it->second; + size_t offset = Size() - cu_start_; + AppendCString(s); + added_[s] = offset; + return offset; + } + + // Start a fresh compilation unit string collection. + void StartCU() { + // Ignore duplicate calls to StartCU. Our test data don't always call + // StartCU at all, meaning that our constructor has to take care of it, + // meaning that tests that *do* call StartCU call it twice at the + // beginning. This is not worth smoothing out. + if (in_cu_) return; + + added_.clear(); + cu_start_ = Size(); + + // Each compilation unit's strings start with an empty string. + AppendCString(""); + added_[""] = 0; + + in_cu_ = true; + } + + // Finish off the current CU's strings. + size_t EndCU() { + assert(in_cu_); + in_cu_ = false; + return Size() - cu_start_; + } + + private: + // The offset of the start of this compilation unit's strings. + size_t cu_start_; + + // True if we're in a CU. + bool in_cu_; + + // A map from the strings that have been added to this section to + // their starting indices within their compilation unit. + map added_; +}; + +// A StabsAssembler is a class for generating .stab sections to present as +// test input for the STABS parser. +class StabsAssembler: public Section { + public: + // Create a StabsAssembler that uses StringAssembler for its strings. + StabsAssembler(StringAssembler *string_assembler) + : Section(string_assembler->endianness()), + string_assembler_(string_assembler), + value_size_(0), + entry_count_(0), + cu_header_(NULL) { } + ~StabsAssembler() { assert(!cu_header_); } + + // Accessor and setter for value_size_. + size_t value_size() const { return value_size_; } + StabsAssembler &set_value_size(size_t value_size) { + value_size_ = value_size; + return *this; + } + + // Append a STAB entry to the end of this section with the given + // characteristics. NAME is the offset of this entry's name string within + // its compilation unit's portion of the .stabstr section; this can be a + // value generated by a StringAssembler. Return a reference to this + // StabsAssembler. + StabsAssembler &Stab(uint8_t type, uint8_t other, Label descriptor, + Label value, Label name) { + D32(name); + D8(type); + D8(other); + D16(descriptor); + Append(endianness(), value_size_, value); + entry_count_++; + return *this; + } + + // As above, but automatically add NAME to our StringAssembler. + StabsAssembler &Stab(uint8_t type, uint8_t other, Label descriptor, + Label value, const string &name) { + return Stab(type, other, descriptor, value, string_assembler_->Add(name)); + } + + // Start a compilation unit named NAME, with an N_UNDF symbol to start + // it, and its own portion of the string section. Return a reference to + // this StabsAssembler. + StabsAssembler &StartCU(const string &name) { + assert(!cu_header_); + cu_header_ = new CUHeader; + string_assembler_->StartCU(); + entry_count_ = 0; + return Stab(N_UNDF, 0, + cu_header_->final_entry_count, + cu_header_->final_string_size, + string_assembler_->Add(name)); + } + + // Close off the current compilation unit. Return a reference to this + // StabsAssembler. + StabsAssembler &EndCU() { + assert(cu_header_); + cu_header_->final_entry_count = entry_count_; + cu_header_->final_string_size = string_assembler_->EndCU(); + delete cu_header_; + cu_header_ = NULL; + return *this; + } + + private: + // Data used in a compilation unit header STAB that we won't know until + // we've finished the compilation unit. + struct CUHeader { + // The final number of entries this compilation unit will hold. + Label final_entry_count; + + // The final size of this compilation unit's strings. + Label final_string_size; + }; + + // The strings for our STABS entries. + StringAssembler *string_assembler_; + + // The size of the 'value' field of stabs entries in this section. + size_t value_size_; + + // The number of entries in this compilation unit so far. + size_t entry_count_; + + // Header labels for this compilation unit, if we've started one but not + // finished it. + CUHeader *cu_header_; +}; + +class MockStabsReaderHandler: public StabsHandler { + public: + MOCK_METHOD3(StartCompilationUnit, + bool(const char *, uint64_t, const char *)); + MOCK_METHOD1(EndCompilationUnit, bool(uint64_t)); + MOCK_METHOD2(StartFunction, bool(const string &, uint64_t)); + MOCK_METHOD1(EndFunction, bool(uint64_t)); + MOCK_METHOD3(Line, bool(uint64_t, const char *, int)); + MOCK_METHOD2(Extern, bool(const string &, uint64_t)); + void Warning(const char *format, ...) { MockWarning(format); } + MOCK_METHOD1(MockWarning, void(const char *)); +}; + +struct StabsFixture { + StabsFixture() : stabs(&strings), unitized(true) { } + + // Create a StabsReader to parse the mock stabs data in stabs and + // strings, and pass the parsed information to mock_handler. Use the + // endianness and value size of stabs to parse the data. If all goes + // well, return the result of calling the reader's Process member + // function. Otherwise, return false. + bool ApplyHandlerToMockStabsData() { + string stabs_contents, stabstr_contents; + if (!stabs.GetContents(&stabs_contents) || + !strings.GetContents(&stabstr_contents)) + return false; + + // Run the parser on the test input, passing whatever we find to HANDLER. + StabsReader reader( + reinterpret_cast(stabs_contents.data()), + stabs_contents.size(), + reinterpret_cast(stabstr_contents.data()), + stabstr_contents.size(), + stabs.endianness() == kBigEndian, stabs.value_size(), unitized, + &mock_handler); + return reader.Process(); + } + + StringAssembler strings; + StabsAssembler stabs; + bool unitized; + MockStabsReaderHandler mock_handler; +}; + +class Stabs: public StabsFixture, public Test { }; + +TEST_F(Stabs, MockStabsInput) { + stabs.set_endianness(kLittleEndian); + stabs.set_value_size(4); + stabs + .Stab(N_SO, 149, 40232, 0x18a2a72bU, "builddir/") + .Stab(N_FUN, 83, 50010, 0x91a5353fU, + "not the SO with source file name we expected ") + .Stab(N_SO, 165, 24791, 0xfe69d23cU, "") + .Stab(N_SO, 184, 34178, 0xca4d883aU, "builddir1/") + .Stab(N_SO, 83, 40859, 0xd2fe5df3U, "file1.c") + .Stab(N_LSYM, 147, 39565, 0x60d4bb8aU, "not the FUN we're looking for") + .Stab(N_FUN, 120, 50271, 0xa049f4b1U, "fun1") + .Stab(N_BINCL, 150, 15694, 0xef65c659U, + "something to ignore in a FUN body") + .Stab(N_SLINE, 147, 4967, 0xd904b3f, "") + .Stab(N_SOL, 177, 56135, 0xbd97b1dcU, "header.h") + .Stab(N_SLINE, 130, 24610, 0x90f145b, "") + .Stab(N_FUN, 45, 32441, 0xbf27cf93U, + "fun2:some stabs type info here:to trim from the name") + .Stab(N_SLINE, 138, 39002, 0x8148b87, "") + .Stab(N_SOL, 60, 49318, 0x1d06e025U, "file1.c") + .Stab(N_SLINE, 29, 52163, 0x6eebbb7, "") + .Stab(N_SO, 167, 4647, 0xd04b7448U, "") + .Stab(N_LSYM, 58, 37837, 0xe6b14d37U, "") + .Stab(N_SO, 152, 7810, 0x11759f10U, "file3.c") + .Stab(N_SO, 218, 12447, 0x11cfe4b5U, ""); + + { + InSequence s; + + EXPECT_CALL(mock_handler, + StartCompilationUnit(StrEq("file1.c"), 0xd2fe5df3U, + StrEq("builddir1/"))) + .WillOnce(Return(true)); + EXPECT_CALL(mock_handler, StartFunction(StrEq("fun1"), 0xa049f4b1U)) + .WillOnce(Return(true)); + EXPECT_CALL(mock_handler, + Line(0xa049f4b1U + 0xd904b3f, StrEq("file1.c"), 4967)) + .WillOnce(Return(true)); + EXPECT_CALL(mock_handler, + Line(0xa049f4b1U + 0x90f145b, StrEq("header.h"), 24610)) + .WillOnce(Return(true)); + EXPECT_CALL(mock_handler, EndFunction(0xbf27cf93U)) + .WillOnce(Return(true)); + EXPECT_CALL(mock_handler, StartFunction(StrEq("fun2"), 0xbf27cf93U)) + .WillOnce(Return(true)); + EXPECT_CALL(mock_handler, + Line(0xbf27cf93U + 0x8148b87, StrEq("header.h"), 39002)) + .WillOnce(Return(true)); + EXPECT_CALL(mock_handler, + Line(0xbf27cf93U + 0x6eebbb7, StrEq("file1.c"), 52163)) + .WillOnce(Return(true)); + EXPECT_CALL(mock_handler, EndFunction(0xd04b7448U)) + .WillOnce(Return(true)); + EXPECT_CALL(mock_handler, EndCompilationUnit(0xd04b7448U)) + .WillOnce(Return(true)); + EXPECT_CALL(mock_handler, StartCompilationUnit(StrEq("file3.c"), + 0x11759f10U, NULL)) + .WillOnce(Return(true)); + EXPECT_CALL(mock_handler, EndCompilationUnit(0x11cfe4b5U)) + .WillOnce(Return(true)); + } + + ASSERT_TRUE(ApplyHandlerToMockStabsData()); +} + +TEST_F(Stabs, AbruptCU) { + stabs.set_endianness(kBigEndian); + stabs.set_value_size(4); + stabs.Stab(N_SO, 177, 23446, 0xbf10d5e4, "file2-1.c"); + + { + InSequence s; + + EXPECT_CALL(mock_handler, + StartCompilationUnit(StrEq("file2-1.c"), 0xbf10d5e4, NULL)) + .WillOnce(Return(true)); + EXPECT_CALL(mock_handler, EndCompilationUnit(0)) + .WillOnce(Return(true)); + } + + ASSERT_TRUE(ApplyHandlerToMockStabsData()); +} + +TEST_F(Stabs, AbruptFunction) { + stabs.set_endianness(kLittleEndian); + stabs.set_value_size(8); + stabs + .Stab(N_SO, 218, 26631, 0xb83ddf10U, "file3-1.c") + .Stab(N_FUN, 113, 24765, 0xbbd4a145U, "fun3_1"); + + { + InSequence s; + + EXPECT_CALL(mock_handler, + StartCompilationUnit(StrEq("file3-1.c"), 0xb83ddf10U, NULL)) + .WillOnce(Return(true)); + EXPECT_CALL(mock_handler, StartFunction(StrEq("fun3_1"), 0xbbd4a145U)) + .WillOnce(Return(true)); + EXPECT_CALL(mock_handler, EndFunction(0)) + .WillOnce(Return(true)); + EXPECT_CALL(mock_handler, EndCompilationUnit(0)) + .WillOnce(Return(true)); + } + + ASSERT_TRUE(ApplyHandlerToMockStabsData()); +} + +TEST_F(Stabs, NoCU) { + stabs.set_endianness(kBigEndian); + stabs.set_value_size(8); + stabs.Stab(N_SO, 161, 25673, 0x8f676e7bU, "build-directory/"); + + EXPECT_CALL(mock_handler, StartCompilationUnit(_, _, _)) + .Times(0); + EXPECT_CALL(mock_handler, StartFunction(_, _)) + .Times(0); + + ASSERT_TRUE(ApplyHandlerToMockStabsData()); +} + +TEST_F(Stabs, NoCUEnd) { + stabs.set_endianness(kBigEndian); + stabs.set_value_size(8); + stabs + .Stab(N_SO, 116, 58280, 0x2f7493c9U, "file5-1.c") + .Stab(N_SO, 224, 23057, 0xf9f1d50fU, "file5-2.c"); + + { + InSequence s; + + EXPECT_CALL(mock_handler, + StartCompilationUnit(StrEq("file5-1.c"), 0x2f7493c9U, NULL)) + .WillOnce(Return(true)); + EXPECT_CALL(mock_handler, EndCompilationUnit(0)) + .WillOnce(Return(true)); + EXPECT_CALL(mock_handler, + StartCompilationUnit(StrEq("file5-2.c"), 0xf9f1d50fU, NULL)) + .WillOnce(Return(true)); + EXPECT_CALL(mock_handler, EndCompilationUnit(0)) + .WillOnce(Return(true)); + } + + ASSERT_TRUE(ApplyHandlerToMockStabsData()); +} + +// On systems that store STABS in sections, string offsets are relative to +// the beginning of that compilation unit's strings, marked with N_UNDF +// symbols; see the comments for StabsReader::StabsReader. +TEST_F(Stabs, Unitized) { + stabs.set_endianness(kBigEndian); + stabs.set_value_size(4); + stabs + .StartCU("antimony") + .Stab(N_SO, 49, 26043, 0x7e259f1aU, "antimony") + .Stab(N_FUN, 101, 63253, 0x7fbcccaeU, "arsenic") + .Stab(N_SO, 124, 37175, 0x80b0014cU, "") + .EndCU() + .StartCU("aluminum") + .Stab(N_SO, 72, 23084, 0x86756839U, "aluminum") + .Stab(N_FUN, 59, 3305, 0xa8e120b0U, "selenium") + .Stab(N_SO, 178, 56949, 0xbffff983U, "") + .EndCU(); + + { + InSequence s; + EXPECT_CALL(mock_handler, + StartCompilationUnit(StrEq("antimony"), 0x7e259f1aU, NULL)) + .WillOnce(Return(true)); + EXPECT_CALL(mock_handler, StartFunction(Eq("arsenic"), 0x7fbcccaeU)) + .WillOnce(Return(true)); + EXPECT_CALL(mock_handler, EndFunction(0x80b0014cU)) + .WillOnce(Return(true)); + EXPECT_CALL(mock_handler, EndCompilationUnit(0x80b0014cU)) + .WillOnce(Return(true)); + EXPECT_CALL(mock_handler, + StartCompilationUnit(StrEq("aluminum"), 0x86756839U, NULL)) + .WillOnce(Return(true)); + EXPECT_CALL(mock_handler, StartFunction(Eq("selenium"), 0xa8e120b0U)) + .WillOnce(Return(true)); + EXPECT_CALL(mock_handler, EndFunction(0xbffff983U)) + .WillOnce(Return(true)); + EXPECT_CALL(mock_handler, EndCompilationUnit(0xbffff983U)) + .WillOnce(Return(true)); + } + + ASSERT_TRUE(ApplyHandlerToMockStabsData()); +} + +// On systems that store STABS entries in the real symbol table, the N_UNDF +// entries have no special meaning, and shouldn't mess up the string +// indices. +TEST_F(Stabs, NonUnitized) { + stabs.set_endianness(kLittleEndian); + stabs.set_value_size(4); + unitized = false; + stabs + .Stab(N_UNDF, 21, 11551, 0x9bad2b2e, "") + .Stab(N_UNDF, 21, 11551, 0x9bad2b2e, "") + .Stab(N_SO, 71, 45139, 0x11a97352, "Tanzania") + .Stab(N_SO, 221, 41976, 0x21a97352, ""); + + { + InSequence s; + EXPECT_CALL(mock_handler, + StartCompilationUnit(StrEq("Tanzania"), + 0x11a97352, NULL)) + .WillOnce(Return(true)); + EXPECT_CALL(mock_handler, EndCompilationUnit(0x21a97352)) + .WillOnce(Return(true)); + } + + ASSERT_TRUE(ApplyHandlerToMockStabsData()); +} + +TEST_F(Stabs, FunctionEnd) { + stabs.set_endianness(kLittleEndian); + stabs.set_value_size(8); + stabs + .Stab(N_SO, 102, 62362, 0x52a830d644cd6942ULL, "compilation unit") + // This function is terminated by the start of the next function. + .Stab(N_FUN, 216, 38405, 0xbb5ab70ecdd23bfeULL, "function 1") + // This function is terminated by an explicit end-of-function stab, + // whose value is a size in bytes. + .Stab(N_FUN, 240, 10973, 0xc954de9b8fb3e5e2ULL, "function 2") + .Stab(N_FUN, 14, 36749, 0xc1ab, "") + // This function is terminated by the end of the compilation unit. + .Stab(N_FUN, 143, 64514, 0xdff98c9a35386e1fULL, "function 3") + .Stab(N_SO, 164, 60142, 0xfdacb856e78bbf57ULL, ""); + + { + InSequence s; + EXPECT_CALL(mock_handler, + StartCompilationUnit(StrEq("compilation unit"), + 0x52a830d644cd6942ULL, NULL)) + .WillOnce(Return(true)); + EXPECT_CALL(mock_handler, + StartFunction(Eq("function 1"), 0xbb5ab70ecdd23bfeULL)) + .WillOnce(Return(true)); + EXPECT_CALL(mock_handler, EndFunction(0xc954de9b8fb3e5e2ULL)) + .WillOnce(Return(true)); + EXPECT_CALL(mock_handler, + StartFunction(Eq("function 2"), 0xc954de9b8fb3e5e2ULL)) + .WillOnce(Return(true)); + EXPECT_CALL(mock_handler, EndFunction(0xc954de9b8fb3e5e2ULL + 0xc1ab)) + .WillOnce(Return(true)); + EXPECT_CALL(mock_handler, + StartFunction(Eq("function 3"), 0xdff98c9a35386e1fULL)) + .WillOnce(Return(true)); + EXPECT_CALL(mock_handler, EndFunction(0xfdacb856e78bbf57ULL)) + .WillOnce(Return(true)); + EXPECT_CALL(mock_handler, EndCompilationUnit(0xfdacb856e78bbf57ULL)) + .WillOnce(Return(true)); + } + + ASSERT_TRUE(ApplyHandlerToMockStabsData()); +} + +// On Mac OS X, SLINE records can appear before the FUN stab to which they +// belong, and their values are absolute addresses, not offsets. +TEST_F(Stabs, LeadingLine) { + stabs.set_endianness(kBigEndian); + stabs.set_value_size(4); + stabs + .Stab(N_SO, 179, 27357, 0x8adabc15, "build directory/") + .Stab(N_SO, 52, 53058, 0x4c7e3bf4, "compilation unit") + .Stab(N_SOL, 165, 12086, 0x6a797ca3, "source file name") + .Stab(N_SLINE, 229, 20015, 0x4cb3d7e0, "") + .Stab(N_SLINE, 89, 43802, 0x4cba8b88, "") + .Stab(N_FUN, 251, 51639, 0xce1b98fa, "rutabaga") + .Stab(N_FUN, 218, 16113, 0x5798, "") + .Stab(N_SO, 52, 53058, 0xd4af4415, ""); + + { + InSequence s; + EXPECT_CALL(mock_handler, + StartCompilationUnit(StrEq("compilation unit"), + 0x4c7e3bf4, StrEq("build directory/"))) + .WillOnce(Return(true)); + EXPECT_CALL(mock_handler, + StartFunction(Eq("rutabaga"), 0xce1b98fa)) + .WillOnce(Return(true)); + EXPECT_CALL(mock_handler, + Line(0x4cb3d7e0, StrEq("source file name"), 20015)) + .WillOnce(Return(true)); + EXPECT_CALL(mock_handler, + Line(0x4cba8b88, StrEq("source file name"), 43802)) + .WillOnce(Return(true)); + EXPECT_CALL(mock_handler, EndFunction(0xce1b98fa + 0x5798)) + .WillOnce(Return(true)); + EXPECT_CALL(mock_handler, EndCompilationUnit(0xd4af4415)) + .WillOnce(Return(true)); + } + + ASSERT_TRUE(ApplyHandlerToMockStabsData()); +} + + +#if defined(HAVE_MACH_O_NLIST_H) +// These tests have no meaning on non-Mach-O-based systems, as +// only Mach-O uses N_SECT to represent public symbols. +TEST_F(Stabs, OnePublicSymbol) { + stabs.set_endianness(kLittleEndian); + stabs.set_value_size(4); + + const uint32_t kExpectedAddress = 0x9000; + const string kExpectedFunctionName("public_function"); + stabs + .Stab(N_SECT, 1, 0, kExpectedAddress, kExpectedFunctionName); + + { + InSequence s; + EXPECT_CALL(mock_handler, + Extern(StrEq(kExpectedFunctionName), + kExpectedAddress)) + .WillOnce(Return(true)); + } + ASSERT_TRUE(ApplyHandlerToMockStabsData()); +} + +TEST_F(Stabs, TwoPublicSymbols) { + stabs.set_endianness(kLittleEndian); + stabs.set_value_size(4); + + const uint32_t kExpectedAddress1 = 0xB0B0B0B0; + const string kExpectedFunctionName1("public_function"); + const uint32_t kExpectedAddress2 = 0xF0F0F0F0; + const string kExpectedFunctionName2("something else"); + stabs + .Stab(N_SECT, 1, 0, kExpectedAddress1, kExpectedFunctionName1) + .Stab(N_SECT, 1, 0, kExpectedAddress2, kExpectedFunctionName2); + + { + InSequence s; + EXPECT_CALL(mock_handler, + Extern(StrEq(kExpectedFunctionName1), + kExpectedAddress1)) + .WillOnce(Return(true)); + EXPECT_CALL(mock_handler, + Extern(StrEq(kExpectedFunctionName2), + kExpectedAddress2)) + .WillOnce(Return(true)); + } + ASSERT_TRUE(ApplyHandlerToMockStabsData()); +} + +#endif + +} // anonymous namespace diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/stabs_to_module.cc b/TMessagesProj/jni/third_party/breakpad/src/common/stabs_to_module.cc new file mode 100644 index 0000000000..0a83cf21cb --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/stabs_to_module.cc @@ -0,0 +1,197 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// dump_stabs.cc --- implement the StabsToModule class. + +#include +#include +#include +#include + +#include + +#include "common/stabs_to_module.h" +#include "common/using_std_string.h" + +namespace google_breakpad { + +// Demangle using abi call. +// Older GCC may not support it. +static string Demangle(const string &mangled) { + int status = 0; + char *demangled = abi::__cxa_demangle(mangled.c_str(), NULL, NULL, &status); + if (status == 0 && demangled != NULL) { + string str(demangled); + free(demangled); + return str; + } + return string(mangled); +} + +StabsToModule::~StabsToModule() { + // Free any functions we've accumulated but not added to the module. + for (vector::const_iterator func_it = functions_.begin(); + func_it != functions_.end(); func_it++) + delete *func_it; + // Free any function that we're currently within. + delete current_function_; +} + +bool StabsToModule::StartCompilationUnit(const char *name, uint64_t address, + const char *build_directory) { + assert(!in_compilation_unit_); + in_compilation_unit_ = true; + current_source_file_name_ = name; + current_source_file_ = module_->FindFile(name); + comp_unit_base_address_ = address; + boundaries_.push_back(static_cast(address)); + return true; +} + +bool StabsToModule::EndCompilationUnit(uint64_t address) { + assert(in_compilation_unit_); + in_compilation_unit_ = false; + comp_unit_base_address_ = 0; + current_source_file_ = NULL; + current_source_file_name_ = NULL; + if (address) + boundaries_.push_back(static_cast(address)); + return true; +} + +bool StabsToModule::StartFunction(const string &name, + uint64_t address) { + assert(!current_function_); + Module::Function *f = new Module::Function(Demangle(name), address); + f->size = 0; // We compute this in StabsToModule::Finalize(). + f->parameter_size = 0; // We don't provide this information. + current_function_ = f; + boundaries_.push_back(static_cast(address)); + return true; +} + +bool StabsToModule::EndFunction(uint64_t address) { + assert(current_function_); + // Functions in this compilation unit should have address bigger + // than the compilation unit's starting address. There may be a lot + // of duplicated entries for functions in the STABS data. We will + // count on the Module to remove the duplicates. + if (current_function_->address >= comp_unit_base_address_) + functions_.push_back(current_function_); + else + delete current_function_; + current_function_ = NULL; + if (address) + boundaries_.push_back(static_cast(address)); + return true; +} + +bool StabsToModule::Line(uint64_t address, const char *name, int number) { + assert(current_function_); + assert(current_source_file_); + if (name != current_source_file_name_) { + current_source_file_ = module_->FindFile(name); + current_source_file_name_ = name; + } + Module::Line line; + line.address = address; + line.size = 0; // We compute this in StabsToModule::Finalize(). + line.file = current_source_file_; + line.number = number; + current_function_->lines.push_back(line); + return true; +} + +bool StabsToModule::Extern(const string &name, uint64_t address) { + Module::Extern *ext = new Module::Extern(address); + // Older libstdc++ demangle implementations can crash on unexpected + // input, so be careful about what gets passed in. + if (name.compare(0, 3, "__Z") == 0) { + ext->name = Demangle(name.substr(1)); + } else if (name[0] == '_') { + ext->name = name.substr(1); + } else { + ext->name = name; + } + module_->AddExtern(ext); + return true; +} + +void StabsToModule::Warning(const char *format, ...) { + va_list args; + va_start(args, format); + vfprintf(stderr, format, args); + va_end(args); +} + +void StabsToModule::Finalize() { + // Sort our boundary list, so we can search it quickly. + sort(boundaries_.begin(), boundaries_.end()); + // Sort all functions by address, just for neatness. + sort(functions_.begin(), functions_.end(), + Module::Function::CompareByAddress); + + for (vector::const_iterator func_it = functions_.begin(); + func_it != functions_.end(); + func_it++) { + Module::Function *f = *func_it; + // Compute the function f's size. + vector::const_iterator boundary + = std::upper_bound(boundaries_.begin(), boundaries_.end(), f->address); + if (boundary != boundaries_.end()) + f->size = *boundary - f->address; + else + // If this is the last function in the module, and the STABS + // reader was unable to give us its ending address, then assign + // it a bogus, very large value. This will happen at most once + // per module: since we've added all functions' addresses to the + // boundary table, only one can be the last. + f->size = kFallbackSize; + + // Compute sizes for each of the function f's lines --- if it has any. + if (!f->lines.empty()) { + stable_sort(f->lines.begin(), f->lines.end(), + Module::Line::CompareByAddress); + vector::iterator last_line = f->lines.end() - 1; + for (vector::iterator line_it = f->lines.begin(); + line_it != last_line; line_it++) + line_it[0].size = line_it[1].address - line_it[0].address; + // Compute the size of the last line from f's end address. + last_line->size = (f->address + f->size) - last_line->address; + } + } + // Now that everything has a size, add our functions to the module, and + // dispose of our private list. + module_->AddFunctions(functions_.begin(), functions_.end()); + functions_.clear(); +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/stabs_to_module.h b/TMessagesProj/jni/third_party/breakpad/src/common/stabs_to_module.h new file mode 100644 index 0000000000..5e04fa7927 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/stabs_to_module.h @@ -0,0 +1,143 @@ +// -*- mode: C++ -*- + +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// dump_stabs.h: Define the StabsToModule class, which receives +// STABS debugging information from a parser and adds it to a Breakpad +// symbol file. + +#ifndef BREAKPAD_COMMON_STABS_TO_MODULE_H_ +#define BREAKPAD_COMMON_STABS_TO_MODULE_H_ + +#include + +#include +#include + +#include "common/module.h" +#include "common/stabs_reader.h" +#include "common/using_std_string.h" + +namespace google_breakpad { + +using std::vector; + +// A StabsToModule is a handler that receives parsed STABS debugging +// information from a StabsReader, and uses that to populate +// a Module. (All classes are in the google_breakpad namespace.) A +// Module represents the contents of a Breakpad symbol file, and knows +// how to write itself out as such. A StabsToModule thus acts as +// the bridge between STABS and Breakpad data. +// When processing Darwin Mach-O files, this also receives public linker +// symbols, like those found in system libraries. +class StabsToModule: public google_breakpad::StabsHandler { + public: + // Receive parsed debugging information from a StabsReader, and + // store it all in MODULE. + StabsToModule(Module *module) : + module_(module), + in_compilation_unit_(false), + comp_unit_base_address_(0), + current_function_(NULL), + current_source_file_(NULL), + current_source_file_name_(NULL) { } + ~StabsToModule(); + + // The standard StabsHandler virtual member functions. + bool StartCompilationUnit(const char *name, uint64_t address, + const char *build_directory); + bool EndCompilationUnit(uint64_t address); + bool StartFunction(const string &name, uint64_t address); + bool EndFunction(uint64_t address); + bool Line(uint64_t address, const char *name, int number); + bool Extern(const string &name, uint64_t address); + void Warning(const char *format, ...); + + // Do any final processing necessary to make module_ contain all the + // data provided by the STABS reader. + // + // Because STABS does not provide reliable size information for + // functions and lines, we need to make a pass over the data after + // processing all the STABS to compute those sizes. We take care of + // that here. + void Finalize(); + + private: + + // An arbitrary, but very large, size to use for functions whose + // size we can't compute properly. + static const uint64_t kFallbackSize = 0x10000000; + + // The module we're contributing debugging info to. + Module *module_; + + // The functions we've generated so far. We don't add these to + // module_ as we parse them. Instead, we wait until we've computed + // their ending address, and their lines' ending addresses. + // + // We could just stick them in module_ from the outset, but if + // module_ already contains data gathered from other debugging + // formats, that would complicate the size computation. + vector functions_; + + // Boundary addresses. STABS doesn't necessarily supply sizes for + // functions and lines, so we need to compute them ourselves by + // finding the next object. + vector boundaries_; + + // True if we are currently within a compilation unit: we have gotten a + // StartCompilationUnit call, but no matching EndCompilationUnit call + // yet. We use this for sanity checks. + bool in_compilation_unit_; + + // The base address of the current compilation unit. We use this to + // recognize functions we should omit from the symbol file. (If you + // know the details of why we omit these, please patch this + // comment.) + Module::Address comp_unit_base_address_; + + // The function we're currently contributing lines to. + Module::Function *current_function_; + + // The last Module::File we got a line number in. + Module::File *current_source_file_; + + // The pointer in the .stabstr section of the name that + // current_source_file_ is built from. This allows us to quickly + // recognize when the current line is in the same file as the + // previous one (which it usually is). + const char *current_source_file_name_; +}; + +} // namespace google_breakpad + +#endif // BREAKPAD_COMMON_STABS_TO_MODULE_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/stabs_to_module_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/common/stabs_to_module_unittest.cc new file mode 100644 index 0000000000..d445d1d64b --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/stabs_to_module_unittest.cc @@ -0,0 +1,258 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// dump_stabs_unittest.cc: Unit tests for StabsToModule. + +#include + +#include "breakpad_googletest_includes.h" +#include "common/stabs_to_module.h" + +using google_breakpad::Module; +using google_breakpad::StabsToModule; +using std::vector; + +TEST(StabsToModule, SimpleCU) { + Module m("name", "os", "arch", "id"); + StabsToModule h(&m); + + // Feed in a simple compilation unit that defines a function with + // one line. + EXPECT_TRUE(h.StartCompilationUnit("compilation-unit", 0x9f4d1271e50db93bLL, + "build-directory")); + EXPECT_TRUE(h.StartFunction("function", 0xfde4abbed390c394LL)); + EXPECT_TRUE(h.Line(0xfde4abbed390c394LL, "source-file-name", 174823314)); + EXPECT_TRUE(h.EndFunction(0xfde4abbed390c3a4LL)); + EXPECT_TRUE(h.EndCompilationUnit(0xfee4abbed390c3a4LL)); + h.Finalize(); + + // Now check to see what has been added to the Module. + Module::File *file = m.FindExistingFile("source-file-name"); + ASSERT_TRUE(file != NULL); + + vector functions; + m.GetFunctions(&functions, functions.end()); + ASSERT_EQ((size_t) 1, functions.size()); + Module::Function *function = functions[0]; + EXPECT_STREQ("function", function->name.c_str()); + EXPECT_EQ(0xfde4abbed390c394LL, function->address); + EXPECT_EQ(0x10U, function->size); + EXPECT_EQ(0U, function->parameter_size); + ASSERT_EQ((size_t) 1, function->lines.size()); + Module::Line *line = &function->lines[0]; + EXPECT_EQ(0xfde4abbed390c394LL, line->address); + EXPECT_EQ(0x10U, line->size); // derived from EndFunction + EXPECT_TRUE(line->file == file); + EXPECT_EQ(174823314, line->number); +} + +#ifdef __GNUC__ +// Function name mangling can vary by compiler, so only run mangled-name +// tests on GCC for simplicity's sake. +TEST(StabsToModule, Externs) { + Module m("name", "os", "arch", "id"); + StabsToModule h(&m); + + // Feed in a few Extern symbols. + EXPECT_TRUE(h.Extern("_foo", 0xffff)); + EXPECT_TRUE(h.Extern("__Z21dyldGlobalLockAcquirev", 0xaaaa)); + EXPECT_TRUE(h.Extern("_MorphTableGetNextMorphChain", 0x1111)); + h.Finalize(); + + // Now check to see what has been added to the Module. + vector externs; + m.GetExterns(&externs, externs.end()); + ASSERT_EQ((size_t) 3, externs.size()); + Module::Extern *extern1 = externs[0]; + EXPECT_STREQ("MorphTableGetNextMorphChain", extern1->name.c_str()); + EXPECT_EQ((Module::Address)0x1111, extern1->address); + Module::Extern *extern2 = externs[1]; + EXPECT_STREQ("dyldGlobalLockAcquire()", extern2->name.c_str()); + EXPECT_EQ((Module::Address)0xaaaa, extern2->address); + Module::Extern *extern3 = externs[2]; + EXPECT_STREQ("foo", extern3->name.c_str()); + EXPECT_EQ((Module::Address)0xffff, extern3->address); +} +#endif // __GNUC__ + +TEST(StabsToModule, DuplicateFunctionNames) { + Module m("name", "os", "arch", "id"); + StabsToModule h(&m); + + // Compilation unit with one function, mangled name. + EXPECT_TRUE(h.StartCompilationUnit("compilation-unit", 0xf2cfda36ecf7f46cLL, + "build-directory")); + EXPECT_TRUE(h.StartFunction("funcfoo", + 0xf2cfda36ecf7f46dLL)); + EXPECT_TRUE(h.EndFunction(0)); + EXPECT_TRUE(h.StartFunction("funcfoo", + 0xf2cfda36ecf7f46dLL)); + EXPECT_TRUE(h.EndFunction(0)); + EXPECT_TRUE(h.EndCompilationUnit(0)); + + h.Finalize(); + + // Now check to see what has been added to the Module. + Module::File *file = m.FindExistingFile("compilation-unit"); + ASSERT_TRUE(file != NULL); + + vector functions; + m.GetFunctions(&functions, functions.end()); + ASSERT_EQ(1U, functions.size()); + + Module::Function *function = functions[0]; + EXPECT_EQ(0xf2cfda36ecf7f46dLL, function->address); + EXPECT_LT(0U, function->size); // should have used dummy size + EXPECT_EQ(0U, function->parameter_size); + ASSERT_EQ(0U, function->lines.size()); +} + +TEST(InferSizes, LineSize) { + Module m("name", "os", "arch", "id"); + StabsToModule h(&m); + + // Feed in a simple compilation unit that defines a function with + // one line. + EXPECT_TRUE(h.StartCompilationUnit("compilation-unit", 0xb4513962eff94e92LL, + "build-directory")); + EXPECT_TRUE(h.StartFunction("function", 0xb4513962eff94e92LL)); + EXPECT_TRUE(h.Line(0xb4513962eff94e92LL, "source-file-name-1", 77396614)); + EXPECT_TRUE(h.Line(0xb4513963eff94e92LL, "source-file-name-2", 87660088)); + EXPECT_TRUE(h.EndFunction(0)); // unknown function end address + EXPECT_TRUE(h.EndCompilationUnit(0)); // unknown CU end address + EXPECT_TRUE(h.StartCompilationUnit("compilation-unit-2", 0xb4523963eff94e92LL, + "build-directory-2")); // next boundary + EXPECT_TRUE(h.EndCompilationUnit(0)); + h.Finalize(); + + // Now check to see what has been added to the Module. + Module::File *file1 = m.FindExistingFile("source-file-name-1"); + ASSERT_TRUE(file1 != NULL); + Module::File *file2 = m.FindExistingFile("source-file-name-2"); + ASSERT_TRUE(file2 != NULL); + + vector functions; + m.GetFunctions(&functions, functions.end()); + ASSERT_EQ((size_t) 1, functions.size()); + + Module::Function *function = functions[0]; + EXPECT_STREQ("function", function->name.c_str()); + EXPECT_EQ(0xb4513962eff94e92LL, function->address); + EXPECT_EQ(0x1000100000000ULL, function->size); // inferred from CU end + EXPECT_EQ(0U, function->parameter_size); + ASSERT_EQ((size_t) 2, function->lines.size()); + + Module::Line *line1 = &function->lines[0]; + EXPECT_EQ(0xb4513962eff94e92LL, line1->address); + EXPECT_EQ(0x100000000ULL, line1->size); // derived from EndFunction + EXPECT_TRUE(line1->file == file1); + EXPECT_EQ(77396614, line1->number); + + Module::Line *line2 = &function->lines[1]; + EXPECT_EQ(0xb4513963eff94e92LL, line2->address); + EXPECT_EQ(0x1000000000000ULL, line2->size); // derived from EndFunction + EXPECT_TRUE(line2->file == file2); + EXPECT_EQ(87660088, line2->number); +} + +#ifdef __GNUC__ +// Function name mangling can vary by compiler, so only run mangled-name +// tests on GCC for simplicity's sake. +TEST(FunctionNames, Mangled) { + Module m("name", "os", "arch", "id"); + StabsToModule h(&m); + + // Compilation unit with one function, mangled name. + EXPECT_TRUE(h.StartCompilationUnit("compilation-unit", 0xf2cfda63cef7f46cLL, + "build-directory")); + EXPECT_TRUE(h.StartFunction("_ZNSt6vectorIySaIyEE9push_backERKy", + 0xf2cfda63cef7f46dLL)); + EXPECT_TRUE(h.EndFunction(0)); + EXPECT_TRUE(h.EndCompilationUnit(0)); + + h.Finalize(); + + // Now check to see what has been added to the Module. + Module::File *file = m.FindExistingFile("compilation-unit"); + ASSERT_TRUE(file != NULL); + + vector functions; + m.GetFunctions(&functions, functions.end()); + ASSERT_EQ(1U, functions.size()); + + Module::Function *function = functions[0]; + // This is GCC-specific, but we shouldn't be seeing STABS data anywhere + // but Linux. + EXPECT_STREQ("std::vector >::" + "push_back(unsigned long long const&)", + function->name.c_str()); + EXPECT_EQ(0xf2cfda63cef7f46dLL, function->address); + EXPECT_LT(0U, function->size); // should have used dummy size + EXPECT_EQ(0U, function->parameter_size); + ASSERT_EQ(0U, function->lines.size()); +} +#endif // __GNUC__ + +// The GNU toolchain can omit functions that are not used; however, +// when it does so, it doesn't clean up the debugging information that +// refers to them. In STABS, this results in compilation units whose +// SO addresses are zero. +TEST(Omitted, Function) { + Module m("name", "os", "arch", "id"); + StabsToModule h(&m); + + // The StartCompilationUnit and EndCompilationUnit calls may both have an + // address of zero if the compilation unit has had sections removed. + EXPECT_TRUE(h.StartCompilationUnit("compilation-unit", 0, "build-directory")); + EXPECT_TRUE(h.StartFunction("function", 0x2a133596)); + EXPECT_TRUE(h.EndFunction(0)); + EXPECT_TRUE(h.EndCompilationUnit(0)); +} + +// TODO --- if we actually cared about STABS. Even without these we've +// got full coverage of non-failure source lines in dump_stabs.cc. + +// Line size from next line +// Line size from function end +// Line size from next function start +// line size from cu end +// line size from next cu start +// fallback size is something plausible + +// function size from function end +// function size from next function start +// function size from cu end +// function size from next cu start +// fallback size is something plausible + +// omitting functions outside the compilation unit's address range +// zero-line, one-line, many-line functions diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/string_conversion.cc b/TMessagesProj/jni/third_party/breakpad/src/common/string_conversion.cc new file mode 100644 index 0000000000..9c0d623fc1 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/string_conversion.cc @@ -0,0 +1,155 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#include + +#include "common/convert_UTF.h" +#include "common/scoped_ptr.h" +#include "common/string_conversion.h" +#include "common/using_std_string.h" + +namespace google_breakpad { + +using std::vector; + +void UTF8ToUTF16(const char *in, vector *out) { + size_t source_length = strlen(in); + const UTF8 *source_ptr = reinterpret_cast(in); + const UTF8 *source_end_ptr = source_ptr + source_length; + // Erase the contents and zero fill to the expected size + out->clear(); + out->insert(out->begin(), source_length, 0); + uint16_t *target_ptr = &(*out)[0]; + uint16_t *target_end_ptr = target_ptr + out->capacity() * sizeof(uint16_t); + ConversionResult result = ConvertUTF8toUTF16(&source_ptr, source_end_ptr, + &target_ptr, target_end_ptr, + strictConversion); + + // Resize to be the size of the # of converted characters + NULL + out->resize(result == conversionOK ? target_ptr - &(*out)[0] + 1: 0); +} + +int UTF8ToUTF16Char(const char *in, int in_length, uint16_t out[2]) { + const UTF8 *source_ptr = reinterpret_cast(in); + const UTF8 *source_end_ptr = source_ptr + sizeof(char); + uint16_t *target_ptr = out; + uint16_t *target_end_ptr = target_ptr + 2 * sizeof(uint16_t); + out[0] = out[1] = 0; + + // Process one character at a time + while (1) { + ConversionResult result = ConvertUTF8toUTF16(&source_ptr, source_end_ptr, + &target_ptr, target_end_ptr, + strictConversion); + + if (result == conversionOK) + return static_cast(source_ptr - reinterpret_cast(in)); + + // Add another character to the input stream and try again + source_ptr = reinterpret_cast(in); + ++source_end_ptr; + + if (source_end_ptr > reinterpret_cast(in) + in_length) + break; + } + + return 0; +} + +void UTF32ToUTF16(const wchar_t *in, vector *out) { + size_t source_length = wcslen(in); + const UTF32 *source_ptr = reinterpret_cast(in); + const UTF32 *source_end_ptr = source_ptr + source_length; + // Erase the contents and zero fill to the expected size + out->clear(); + out->insert(out->begin(), source_length, 0); + uint16_t *target_ptr = &(*out)[0]; + uint16_t *target_end_ptr = target_ptr + out->capacity() * sizeof(uint16_t); + ConversionResult result = ConvertUTF32toUTF16(&source_ptr, source_end_ptr, + &target_ptr, target_end_ptr, + strictConversion); + + // Resize to be the size of the # of converted characters + NULL + out->resize(result == conversionOK ? target_ptr - &(*out)[0] + 1: 0); +} + +void UTF32ToUTF16Char(wchar_t in, uint16_t out[2]) { + const UTF32 *source_ptr = reinterpret_cast(&in); + const UTF32 *source_end_ptr = source_ptr + 1; + uint16_t *target_ptr = out; + uint16_t *target_end_ptr = target_ptr + 2 * sizeof(uint16_t); + out[0] = out[1] = 0; + ConversionResult result = ConvertUTF32toUTF16(&source_ptr, source_end_ptr, + &target_ptr, target_end_ptr, + strictConversion); + + if (result != conversionOK) { + out[0] = out[1] = 0; + } +} + +static inline uint16_t Swap(uint16_t value) { + return (value >> 8) | static_cast(value << 8); +} + +string UTF16ToUTF8(const vector &in, bool swap) { + const UTF16 *source_ptr = &in[0]; + scoped_array source_buffer; + + // If we're to swap, we need to make a local copy and swap each byte pair + if (swap) { + int idx = 0; + source_buffer.reset(new uint16_t[in.size()]); + UTF16 *source_buffer_ptr = source_buffer.get(); + for (vector::const_iterator it = in.begin(); + it != in.end(); ++it, ++idx) + source_buffer_ptr[idx] = Swap(*it); + + source_ptr = source_buffer.get(); + } + + // The maximum expansion would be 4x the size of the input string. + const UTF16 *source_end_ptr = source_ptr + in.size(); + size_t target_capacity = in.size() * 4; + scoped_array target_buffer(new UTF8[target_capacity]); + UTF8 *target_ptr = target_buffer.get(); + UTF8 *target_end_ptr = target_ptr + target_capacity; + ConversionResult result = ConvertUTF16toUTF8(&source_ptr, source_end_ptr, + &target_ptr, target_end_ptr, + strictConversion); + + if (result == conversionOK) { + const char *targetPtr = reinterpret_cast(target_buffer.get()); + return targetPtr; + } + + return ""; +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/string_conversion.h b/TMessagesProj/jni/third_party/breakpad/src/common/string_conversion.h new file mode 100644 index 0000000000..b9ba96a2e1 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/string_conversion.h @@ -0,0 +1,68 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// string_conversion.h: Conversion between different UTF-8/16/32 encodings. + +#ifndef COMMON_STRING_CONVERSION_H__ +#define COMMON_STRING_CONVERSION_H__ + +#include +#include + +#include "common/using_std_string.h" +#include "google_breakpad/common/breakpad_types.h" + +namespace google_breakpad { + +using std::vector; + +// Convert |in| to UTF-16 into |out|. Use platform byte ordering. If the +// conversion failed, |out| will be zero length. +void UTF8ToUTF16(const char *in, vector *out); + +// Convert at least one character (up to a maximum of |in_length|) from |in| +// to UTF-16 into |out|. Return the number of characters consumed from |in|. +// Any unused characters in |out| will be initialized to 0. No memory will +// be allocated by this routine. +int UTF8ToUTF16Char(const char *in, int in_length, uint16_t out[2]); + +// Convert |in| to UTF-16 into |out|. Use platform byte ordering. If the +// conversion failed, |out| will be zero length. +void UTF32ToUTF16(const wchar_t *in, vector *out); + +// Convert |in| to UTF-16 into |out|. Any unused characters in |out| will be +// initialized to 0. No memory will be allocated by this routine. +void UTF32ToUTF16Char(wchar_t in, uint16_t out[2]); + +// Convert |in| to UTF-8. If |swap| is true, swap bytes before converting. +string UTF16ToUTF8(const vector &in, bool swap); + +} // namespace google_breakpad + +#endif // COMMON_STRING_CONVERSION_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/symbol_data.h b/TMessagesProj/jni/third_party/breakpad/src/common/symbol_data.h new file mode 100644 index 0000000000..2cf15a855d --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/symbol_data.h @@ -0,0 +1,42 @@ +// -*- mode: c++ -*- + +// Copyright (c) 2013 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef COMMON_SYMBOL_DATA_H_ +#define COMMON_SYMBOL_DATA_H_ + +// Control what data is used from the symbol file. +enum SymbolData { + ALL_SYMBOL_DATA, + NO_CFI, + ONLY_CFI +}; + +#endif // COMMON_SYMBOL_DATA_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/test_assembler.cc b/TMessagesProj/jni/third_party/breakpad/src/common/test_assembler.cc new file mode 100644 index 0000000000..1e783b45c5 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/test_assembler.cc @@ -0,0 +1,359 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// test_assembler.cc: Implementation of google_breakpad::TestAssembler. +// See test_assembler.h for details. + +#include "common/test_assembler.h" + +#include +#include + +#include + +namespace google_breakpad { +namespace test_assembler { + +using std::back_insert_iterator; + +Label::Label() : value_(new Binding()) { } +Label::Label(uint64_t value) : value_(new Binding(value)) { } +Label::Label(const Label &label) { + value_ = label.value_; + value_->Acquire(); +} +Label::~Label() { + if (value_->Release()) delete value_; +} + +Label &Label::operator=(uint64_t value) { + value_->Set(NULL, value); + return *this; +} + +Label &Label::operator=(const Label &label) { + value_->Set(label.value_, 0); + return *this; +} + +Label Label::operator+(uint64_t addend) const { + Label l; + l.value_->Set(this->value_, addend); + return l; +} + +Label Label::operator-(uint64_t subtrahend) const { + Label l; + l.value_->Set(this->value_, -subtrahend); + return l; +} + +// When NDEBUG is #defined, assert doesn't evaluate its argument. This +// means you can't simply use assert to check the return value of a +// function with necessary side effects. +// +// ALWAYS_EVALUATE_AND_ASSERT(x) evaluates x regardless of whether +// NDEBUG is #defined; when NDEBUG is not #defined, it further asserts +// that x is true. +#ifdef NDEBUG +#define ALWAYS_EVALUATE_AND_ASSERT(x) x +#else +#define ALWAYS_EVALUATE_AND_ASSERT(x) assert(x) +#endif + +uint64_t Label::operator-(const Label &label) const { + uint64_t offset; + ALWAYS_EVALUATE_AND_ASSERT(IsKnownOffsetFrom(label, &offset)); + return offset; +} + +uint64_t Label::Value() const { + uint64_t v = 0; + ALWAYS_EVALUATE_AND_ASSERT(IsKnownConstant(&v)); + return v; +}; + +bool Label::IsKnownConstant(uint64_t *value_p) const { + Binding *base; + uint64_t addend; + value_->Get(&base, &addend); + if (base != NULL) return false; + if (value_p) *value_p = addend; + return true; +} + +bool Label::IsKnownOffsetFrom(const Label &label, uint64_t *offset_p) const +{ + Binding *label_base, *this_base; + uint64_t label_addend, this_addend; + label.value_->Get(&label_base, &label_addend); + value_->Get(&this_base, &this_addend); + // If this and label are related, Get will find their final + // common ancestor, regardless of how indirect the relation is. This + // comparison also handles the constant vs. constant case. + if (this_base != label_base) return false; + if (offset_p) *offset_p = this_addend - label_addend; + return true; +} + +Label::Binding::Binding() : base_(this), addend_(), reference_count_(1) { } + +Label::Binding::Binding(uint64_t addend) + : base_(NULL), addend_(addend), reference_count_(1) { } + +Label::Binding::~Binding() { + assert(reference_count_ == 0); + if (base_ && base_ != this && base_->Release()) + delete base_; +} + +void Label::Binding::Set(Binding *binding, uint64_t addend) { + if (!base_ && !binding) { + // We're equating two constants. This could be okay. + assert(addend_ == addend); + } else if (!base_) { + // We are a known constant, but BINDING may not be, so turn the + // tables and try to set BINDING's value instead. + binding->Set(NULL, addend_ - addend); + } else { + if (binding) { + // Find binding's final value. Since the final value is always either + // completely unconstrained or a constant, never a reference to + // another variable (otherwise, it wouldn't be final), this + // guarantees we won't create cycles here, even for code like this: + // l = m, m = n, n = l; + uint64_t binding_addend; + binding->Get(&binding, &binding_addend); + addend += binding_addend; + } + + // It seems likely that setting a binding to itself is a bug + // (although I can imagine this might turn out to be helpful to + // permit). + assert(binding != this); + + if (base_ != this) { + // Set the other bindings on our chain as well. Note that this + // is sufficient even though binding relationships form trees: + // All binding operations traverse their chains to the end, and + // all bindings related to us share some tail of our chain, so + // they will see the changes we make here. + base_->Set(binding, addend - addend_); + // We're not going to use base_ any more. + if (base_->Release()) delete base_; + } + + // Adopt BINDING as our base. Note that it should be correct to + // acquire here, after the release above, even though the usual + // reference-counting rules call for acquiring first, and then + // releasing: the self-reference assertion above should have + // complained if BINDING were 'this' or anywhere along our chain, + // so we didn't release BINDING. + if (binding) binding->Acquire(); + base_ = binding; + addend_ = addend; + } +} + +void Label::Binding::Get(Binding **base, uint64_t *addend) { + if (base_ && base_ != this) { + // Recurse to find the end of our reference chain (the root of our + // tree), and then rewrite every binding along the chain to refer + // to it directly, adjusting addends appropriately. (This is why + // this member function isn't this-const.) + Binding *final_base; + uint64_t final_addend; + base_->Get(&final_base, &final_addend); + if (final_base) final_base->Acquire(); + if (base_->Release()) delete base_; + base_ = final_base; + addend_ += final_addend; + } + *base = base_; + *addend = addend_; +} + +template +static inline void InsertEndian(test_assembler::Endianness endianness, + size_t size, uint64_t number, Inserter dest) { + assert(size > 0); + if (endianness == kLittleEndian) { + for (size_t i = 0; i < size; i++) { + *dest++ = (char) (number & 0xff); + number >>= 8; + } + } else { + assert(endianness == kBigEndian); + // The loop condition is odd, but it's correct for size_t. + for (size_t i = size - 1; i < size; i--) + *dest++ = (char) ((number >> (i * 8)) & 0xff); + } +} + +Section &Section::Append(Endianness endianness, size_t size, uint64_t number) { + InsertEndian(endianness, size, number, + back_insert_iterator(contents_)); + return *this; +} + +Section &Section::Append(Endianness endianness, size_t size, + const Label &label) { + // If this label's value is known, there's no reason to waste an + // entry in references_ on it. + uint64_t value; + if (label.IsKnownConstant(&value)) + return Append(endianness, size, value); + + // This will get caught when the references are resolved, but it's + // nicer to find out earlier. + assert(endianness != kUnsetEndian); + + references_.push_back(Reference(contents_.size(), endianness, size, label)); + contents_.append(size, 0); + return *this; +} + +#define ENDIANNESS_L kLittleEndian +#define ENDIANNESS_B kBigEndian +#define ENDIANNESS(e) ENDIANNESS_ ## e + +#define DEFINE_SHORT_APPEND_NUMBER_ENDIAN(e, bits) \ + Section &Section::e ## bits(uint ## bits ## _t v) { \ + InsertEndian(ENDIANNESS(e), bits / 8, v, \ + back_insert_iterator(contents_)); \ + return *this; \ + } + +#define DEFINE_SHORT_APPEND_LABEL_ENDIAN(e, bits) \ + Section &Section::e ## bits(const Label &v) { \ + return Append(ENDIANNESS(e), bits / 8, v); \ + } + +// Define L16, B32, and friends. +#define DEFINE_SHORT_APPEND_ENDIAN(e, bits) \ + DEFINE_SHORT_APPEND_NUMBER_ENDIAN(e, bits) \ + DEFINE_SHORT_APPEND_LABEL_ENDIAN(e, bits) + +DEFINE_SHORT_APPEND_LABEL_ENDIAN(L, 8); +DEFINE_SHORT_APPEND_LABEL_ENDIAN(B, 8); +DEFINE_SHORT_APPEND_ENDIAN(L, 16); +DEFINE_SHORT_APPEND_ENDIAN(L, 32); +DEFINE_SHORT_APPEND_ENDIAN(L, 64); +DEFINE_SHORT_APPEND_ENDIAN(B, 16); +DEFINE_SHORT_APPEND_ENDIAN(B, 32); +DEFINE_SHORT_APPEND_ENDIAN(B, 64); + +#define DEFINE_SHORT_APPEND_NUMBER_DEFAULT(bits) \ + Section &Section::D ## bits(uint ## bits ## _t v) { \ + InsertEndian(endianness_, bits / 8, v, \ + back_insert_iterator(contents_)); \ + return *this; \ + } +#define DEFINE_SHORT_APPEND_LABEL_DEFAULT(bits) \ + Section &Section::D ## bits(const Label &v) { \ + return Append(endianness_, bits / 8, v); \ + } +#define DEFINE_SHORT_APPEND_DEFAULT(bits) \ + DEFINE_SHORT_APPEND_NUMBER_DEFAULT(bits) \ + DEFINE_SHORT_APPEND_LABEL_DEFAULT(bits) + +DEFINE_SHORT_APPEND_LABEL_DEFAULT(8) +DEFINE_SHORT_APPEND_DEFAULT(16); +DEFINE_SHORT_APPEND_DEFAULT(32); +DEFINE_SHORT_APPEND_DEFAULT(64); + +Section &Section::Append(const Section §ion) { + size_t base = contents_.size(); + contents_.append(section.contents_); + for (vector::const_iterator it = section.references_.begin(); + it != section.references_.end(); it++) + references_.push_back(Reference(base + it->offset, it->endianness, + it->size, it->label)); + return *this; +} + +Section &Section::LEB128(long long value) { + while (value < -0x40 || 0x3f < value) { + contents_ += (value & 0x7f) | 0x80; + if (value < 0) + value = (value >> 7) | ~(((unsigned long long) -1) >> 7); + else + value = (value >> 7); + } + contents_ += value & 0x7f; + return *this; +} + +Section &Section::ULEB128(uint64_t value) { + while (value > 0x7f) { + contents_ += (value & 0x7f) | 0x80; + value = (value >> 7); + } + contents_ += value; + return *this; +} + +Section &Section::Align(size_t alignment, uint8_t pad_byte) { + // ALIGNMENT must be a power of two. + assert(((alignment - 1) & alignment) == 0); + size_t new_size = (contents_.size() + alignment - 1) & ~(alignment - 1); + contents_.append(new_size - contents_.size(), pad_byte); + assert((contents_.size() & (alignment - 1)) == 0); + return *this; +} + +void Section::Clear() { + contents_.clear(); + references_.clear(); +} + +bool Section::GetContents(string *contents) { + // For each label reference, find the label's value, and patch it into + // the section's contents. + for (size_t i = 0; i < references_.size(); i++) { + Reference &r = references_[i]; + uint64_t value; + if (!r.label.IsKnownConstant(&value)) { + fprintf(stderr, "Undefined label #%zu at offset 0x%zx\n", i, r.offset); + return false; + } + assert(r.offset < contents_.size()); + assert(contents_.size() - r.offset >= r.size); + InsertEndian(r.endianness, r.size, value, contents_.begin() + r.offset); + } + contents->clear(); + std::swap(contents_, *contents); + references_.clear(); + return true; +} + +} // namespace test_assembler +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/test_assembler.h b/TMessagesProj/jni/third_party/breakpad/src/common/test_assembler.h new file mode 100644 index 0000000000..373dbebacd --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/test_assembler.h @@ -0,0 +1,484 @@ +// -*- mode: C++ -*- + +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// test-assembler.h: interface to class for building complex binary streams. + +// To test the Breakpad symbol dumper and processor thoroughly, for +// all combinations of host system and minidump processor +// architecture, we need to be able to easily generate complex test +// data like debugging information and minidump files. +// +// For example, if we want our unit tests to provide full code +// coverage for stack walking, it may be difficult to persuade the +// compiler to generate every possible sort of stack walking +// information that we want to support; there are probably DWARF CFI +// opcodes that GCC never emits. Similarly, if we want to test our +// error handling, we will need to generate damaged minidumps or +// debugging information that (we hope) the client or compiler will +// never produce on its own. +// +// google_breakpad::TestAssembler provides a predictable and +// (relatively) simple way to generate complex formatted data streams +// like minidumps and CFI. Furthermore, because TestAssembler is +// portable, developers without access to (say) Visual Studio or a +// SPARC assembler can still work on test data for those targets. + +#ifndef PROCESSOR_TEST_ASSEMBLER_H_ +#define PROCESSOR_TEST_ASSEMBLER_H_ + +#include +#include +#include + +#include "common/using_std_string.h" +#include "google_breakpad/common/breakpad_types.h" + +namespace google_breakpad { + +using std::list; +using std::vector; + +namespace test_assembler { + +// A Label represents a value not yet known that we need to store in a +// section. As long as all the labels a section refers to are defined +// by the time we retrieve its contents as bytes, we can use undefined +// labels freely in that section's construction. +// +// A label can be in one of three states: +// - undefined, +// - defined as the sum of some other label and a constant, or +// - a constant. +// +// A label's value never changes, but it can accumulate constraints. +// Adding labels and integers is permitted, and yields a label. +// Subtracting a constant from a label is permitted, and also yields a +// label. Subtracting two labels that have some relationship to each +// other is permitted, and yields a constant. +// +// For example: +// +// Label a; // a's value is undefined +// Label b; // b's value is undefined +// { +// Label c = a + 4; // okay, even though a's value is unknown +// b = c + 4; // also okay; b is now a+8 +// } +// Label d = b - 2; // okay; d == a+6, even though c is gone +// d.Value(); // error: d's value is not yet known +// d - a; // is 6, even though their values are not known +// a = 12; // now b == 20, and d == 18 +// d.Value(); // 18: no longer an error +// b.Value(); // 20 +// d = 10; // error: d is already defined. +// +// Label objects' lifetimes are unconstrained: notice that, in the +// above example, even though a and b are only related through c, and +// c goes out of scope, the assignment to a sets b's value as well. In +// particular, it's not necessary to ensure that a Label lives beyond +// Sections that refer to it. +class Label { + public: + Label(); // An undefined label. + Label(uint64_t value); // A label with a fixed value + Label(const Label &value); // A label equal to another. + ~Label(); + + // Return this label's value; it must be known. + // + // Providing this as a cast operator is nifty, but the conversions + // happen in unexpected places. In particular, ISO C++ says that + // Label + size_t becomes ambigious, because it can't decide whether + // to convert the Label to a uint64_t and then to a size_t, or use + // the overloaded operator that returns a new label, even though the + // former could fail if the label is not yet defined and the latter won't. + uint64_t Value() const; + + Label &operator=(uint64_t value); + Label &operator=(const Label &value); + Label operator+(uint64_t addend) const; + Label operator-(uint64_t subtrahend) const; + uint64_t operator-(const Label &subtrahend) const; + + // We could also provide == and != that work on undefined, but + // related, labels. + + // Return true if this label's value is known. If VALUE_P is given, + // set *VALUE_P to the known value if returning true. + bool IsKnownConstant(uint64_t *value_p = NULL) const; + + // Return true if the offset from LABEL to this label is known. If + // OFFSET_P is given, set *OFFSET_P to the offset when returning true. + // + // You can think of l.KnownOffsetFrom(m, &d) as being like 'd = l-m', + // except that it also returns a value indicating whether the + // subtraction is possible given what we currently know of l and m. + // It can be possible even if we don't know l and m's values. For + // example: + // + // Label l, m; + // m = l + 10; + // l.IsKnownConstant(); // false + // m.IsKnownConstant(); // false + // uint64_t d; + // l.IsKnownOffsetFrom(m, &d); // true, and sets d to -10. + // l-m // -10 + // m-l // 10 + // m.Value() // error: m's value is not known + bool IsKnownOffsetFrom(const Label &label, uint64_t *offset_p = NULL) const; + + private: + // A label's value, or if that is not yet known, how the value is + // related to other labels' values. A binding may be: + // - a known constant, + // - constrained to be equal to some other binding plus a constant, or + // - unconstrained, and free to take on any value. + // + // Many labels may point to a single binding, and each binding may + // refer to another, so bindings and labels form trees whose leaves + // are labels, whose interior nodes (and roots) are bindings, and + // where links point from children to parents. Bindings are + // reference counted, allowing labels to be lightweight, copyable, + // assignable, placed in containers, and so on. + class Binding { + public: + Binding(); + Binding(uint64_t addend); + ~Binding(); + + // Increment our reference count. + void Acquire() { reference_count_++; }; + // Decrement our reference count, and return true if it is zero. + bool Release() { return --reference_count_ == 0; } + + // Set this binding to be equal to BINDING + ADDEND. If BINDING is + // NULL, then set this binding to the known constant ADDEND. + // Update every binding on this binding's chain to point directly + // to BINDING, or to be a constant, with addends adjusted + // appropriately. + void Set(Binding *binding, uint64_t value); + + // Return what we know about the value of this binding. + // - If this binding's value is a known constant, set BASE to + // NULL, and set ADDEND to its value. + // - If this binding is not a known constant but related to other + // bindings, set BASE to the binding at the end of the relation + // chain (which will always be unconstrained), and set ADDEND to the + // value to add to that binding's value to get this binding's + // value. + // - If this binding is unconstrained, set BASE to this, and leave + // ADDEND unchanged. + void Get(Binding **base, uint64_t *addend); + + private: + // There are three cases: + // + // - A binding representing a known constant value has base_ NULL, + // and addend_ equal to the value. + // + // - A binding representing a completely unconstrained value has + // base_ pointing to this; addend_ is unused. + // + // - A binding whose value is related to some other binding's + // value has base_ pointing to that other binding, and addend_ + // set to the amount to add to that binding's value to get this + // binding's value. We only represent relationships of the form + // x = y+c. + // + // Thus, the bind_ links form a chain terminating in either a + // known constant value or a completely unconstrained value. Most + // operations on bindings do path compression: they change every + // binding on the chain to point directly to the final value, + // adjusting addends as appropriate. + Binding *base_; + uint64_t addend_; + + // The number of Labels and Bindings pointing to this binding. + // (When a binding points to itself, indicating a completely + // unconstrained binding, that doesn't count as a reference.) + int reference_count_; + }; + + // This label's value. + Binding *value_; +}; + +inline Label operator+(uint64_t a, const Label &l) { return l + a; } +// Note that int-Label isn't defined, as negating a Label is not an +// operation we support. + +// Conventions for representing larger numbers as sequences of bytes. +enum Endianness { + kBigEndian, // Big-endian: the most significant byte comes first. + kLittleEndian, // Little-endian: the least significant byte comes first. + kUnsetEndian, // used internally +}; + +// A section is a sequence of bytes, constructed by appending bytes +// to the end. Sections have a convenient and flexible set of member +// functions for appending data in various formats: big-endian and +// little-endian signed and unsigned values of different sizes; +// LEB128 and ULEB128 values (see below), and raw blocks of bytes. +// +// If you need to append a value to a section that is not convenient +// to compute immediately, you can create a label, append the +// label's value to the section, and then set the label's value +// later, when it's convenient to do so. Once a label's value is +// known, the section class takes care of updating all previously +// appended references to it. +// +// Once all the labels to which a section refers have had their +// values determined, you can get a copy of the section's contents +// as a string. +// +// Note that there is no specified "start of section" label. This is +// because there are typically several different meanings for "the +// start of a section": the offset of the section within an object +// file, the address in memory at which the section's content appear, +// and so on. It's up to the code that uses the Section class to +// keep track of these explicitly, as they depend on the application. +class Section { + public: + Section(Endianness endianness = kUnsetEndian) + : endianness_(endianness) { }; + + // A base class destructor should be either public and virtual, + // or protected and nonvirtual. + virtual ~Section() { }; + + // Set the default endianness of this section to ENDIANNESS. This + // sets the behavior of the D appending functions. If the + // assembler's default endianness was set, this is the + void set_endianness(Endianness endianness) { + endianness_ = endianness; + } + + // Return the default endianness of this section. + Endianness endianness() const { return endianness_; } + + // Append the SIZE bytes at DATA or the contents of STRING to the + // end of this section. Return a reference to this section. + Section &Append(const uint8_t *data, size_t size) { + contents_.append(reinterpret_cast(data), size); + return *this; + }; + Section &Append(const string &data) { + contents_.append(data); + return *this; + }; + + // Append SIZE copies of BYTE to the end of this section. Return a + // reference to this section. + Section &Append(size_t size, uint8_t byte) { + contents_.append(size, (char) byte); + return *this; + } + + // Append NUMBER to this section. ENDIANNESS is the endianness to + // use to write the number. SIZE is the length of the number in + // bytes. Return a reference to this section. + Section &Append(Endianness endianness, size_t size, uint64_t number); + Section &Append(Endianness endianness, size_t size, const Label &label); + + // Append SECTION to the end of this section. The labels SECTION + // refers to need not be defined yet. + // + // Note that this has no effect on any Labels' values, or on + // SECTION. If placing SECTION within 'this' provides new + // constraints on existing labels' values, then it's up to the + // caller to fiddle with those labels as needed. + Section &Append(const Section §ion); + + // Append the contents of DATA as a series of bytes terminated by + // a NULL character. + Section &AppendCString(const string &data) { + Append(data); + contents_ += '\0'; + return *this; + } + + // Append at most SIZE bytes from DATA; if DATA is less than SIZE bytes + // long, pad with '\0' characters. + Section &AppendCString(const string &data, size_t size) { + contents_.append(data, 0, size); + if (data.size() < size) + Append(size - data.size(), 0); + return *this; + } + + // Append VALUE or LABEL to this section, with the given bit width and + // endianness. Return a reference to this section. + // + // The names of these functions have the form : + // is either 'L' (little-endian, least significant byte first), + // 'B' (big-endian, most significant byte first), or + // 'D' (default, the section's default endianness) + // is 8, 16, 32, or 64. + // + // Since endianness doesn't matter for a single byte, all the + // =8 functions are equivalent. + // + // These can be used to write both signed and unsigned values, as + // the compiler will properly sign-extend a signed value before + // passing it to the function, at which point the function's + // behavior is the same either way. + Section &L8(uint8_t value) { contents_ += value; return *this; } + Section &B8(uint8_t value) { contents_ += value; return *this; } + Section &D8(uint8_t value) { contents_ += value; return *this; } + Section &L16(uint16_t), &L32(uint32_t), &L64(uint64_t), + &B16(uint16_t), &B32(uint32_t), &B64(uint64_t), + &D16(uint16_t), &D32(uint32_t), &D64(uint64_t); + Section &L8(const Label &label), &L16(const Label &label), + &L32(const Label &label), &L64(const Label &label), + &B8(const Label &label), &B16(const Label &label), + &B32(const Label &label), &B64(const Label &label), + &D8(const Label &label), &D16(const Label &label), + &D32(const Label &label), &D64(const Label &label); + + // Append VALUE in a signed LEB128 (Little-Endian Base 128) form. + // + // The signed LEB128 representation of an integer N is a variable + // number of bytes: + // + // - If N is between -0x40 and 0x3f, then its signed LEB128 + // representation is a single byte whose value is N. + // + // - Otherwise, its signed LEB128 representation is (N & 0x7f) | + // 0x80, followed by the signed LEB128 representation of N / 128, + // rounded towards negative infinity. + // + // In other words, we break VALUE into groups of seven bits, put + // them in little-endian order, and then write them as eight-bit + // bytes with the high bit on all but the last. + // + // Note that VALUE cannot be a Label (we would have to implement + // relaxation). + Section &LEB128(long long value); + + // Append VALUE in unsigned LEB128 (Little-Endian Base 128) form. + // + // The unsigned LEB128 representation of an integer N is a variable + // number of bytes: + // + // - If N is between 0 and 0x7f, then its unsigned LEB128 + // representation is a single byte whose value is N. + // + // - Otherwise, its unsigned LEB128 representation is (N & 0x7f) | + // 0x80, followed by the unsigned LEB128 representation of N / + // 128, rounded towards negative infinity. + // + // Note that VALUE cannot be a Label (we would have to implement + // relaxation). + Section &ULEB128(uint64_t value); + + // Jump to the next location aligned on an ALIGNMENT-byte boundary, + // relative to the start of the section. Fill the gap with PAD_BYTE. + // ALIGNMENT must be a power of two. Return a reference to this + // section. + Section &Align(size_t alignment, uint8_t pad_byte = 0); + + // Clear the contents of this section. + void Clear(); + + // Return the current size of the section. + size_t Size() const { return contents_.size(); } + + // Return a label representing the start of the section. + // + // It is up to the user whether this label represents the section's + // position in an object file, the section's address in memory, or + // what have you; some applications may need both, in which case + // this simple-minded interface won't be enough. This class only + // provides a single start label, for use with the Here and Mark + // member functions. + // + // Ideally, we'd provide this in a subclass that actually knows more + // about the application at hand and can provide an appropriate + // collection of start labels. But then the appending member + // functions like Append and D32 would return a reference to the + // base class, not the derived class, and the chaining won't work. + // Since the only value here is in pretty notation, that's a fatal + // flaw. + Label start() const { return start_; } + + // Return a label representing the point at which the next Appended + // item will appear in the section, relative to start(). + Label Here() const { return start_ + Size(); } + + // Set *LABEL to Here, and return a reference to this section. + Section &Mark(Label *label) { *label = Here(); return *this; } + + // If there are no undefined label references left in this + // section, set CONTENTS to the contents of this section, as a + // string, and clear this section. Return true on success, or false + // if there were still undefined labels. + bool GetContents(string *contents); + + private: + // Used internally. A reference to a label's value. + struct Reference { + Reference(size_t set_offset, Endianness set_endianness, size_t set_size, + const Label &set_label) + : offset(set_offset), endianness(set_endianness), size(set_size), + label(set_label) { } + + // The offset of the reference within the section. + size_t offset; + + // The endianness of the reference. + Endianness endianness; + + // The size of the reference. + size_t size; + + // The label to which this is a reference. + Label label; + }; + + // The default endianness of this section. + Endianness endianness_; + + // The contents of the section. + string contents_; + + // References to labels within those contents. + vector references_; + + // A label referring to the beginning of the section. + Label start_; +}; + +} // namespace test_assembler +} // namespace google_breakpad + +#endif // PROCESSOR_TEST_ASSEMBLER_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/test_assembler_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/common/test_assembler_unittest.cc new file mode 100644 index 0000000000..94b5a5ce5f --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/test_assembler_unittest.cc @@ -0,0 +1,1662 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// test_assembler_unittest.cc: Unit tests for google_breakpad::TestAssembler. + +#include +#include + +#include "breakpad_googletest_includes.h" +#include "common/test_assembler.h" +#include "common/using_std_string.h" + +using google_breakpad::test_assembler::Label; +using google_breakpad::test_assembler::Section; +using google_breakpad::test_assembler::kBigEndian; +using google_breakpad::test_assembler::kLittleEndian; +using testing::Test; + +TEST(ConstructLabel, Simple) { + Label l; +} + +TEST(ConstructLabel, Undefined) { + Label l; + EXPECT_FALSE(l.IsKnownConstant()); +} + +TEST(ConstructLabelDeathTest, Undefined) { + Label l; + ASSERT_DEATH(l.Value(), "IsKnownConstant\\(&v\\)"); +} + +TEST(ConstructLabel, Constant) { + Label l(0x060b9f974eaf301eULL); + uint64_t v; + EXPECT_TRUE(l.IsKnownConstant(&v)); + EXPECT_EQ(v, 0x060b9f974eaf301eULL); + EXPECT_EQ(l.Value(), 0x060b9f974eaf301eULL); +} + +TEST(ConstructLabel, Copy) { + Label l; + Label m(l); + uint64_t v; + EXPECT_TRUE(l.IsKnownOffsetFrom(m, &v)); + EXPECT_EQ(0U, v); +} + +// The left-hand-side of a label assignment can be either +// unconstrained, related, or known. The right-hand-side can be any of +// those, or an integer. +TEST(Assignment, UnconstrainedToUnconstrained) { + Label l, m; + l = m; + EXPECT_EQ(0U, l-m); + EXPECT_TRUE(l.IsKnownOffsetFrom(m)); + uint64_t d; + EXPECT_TRUE(l.IsKnownOffsetFrom(m, &d)); + EXPECT_EQ(0U, d); + EXPECT_FALSE(l.IsKnownConstant()); +} + +TEST(Assignment, UnconstrainedToRelated) { + Label l, m, n; + l = n; + l = m; + EXPECT_EQ(0U, l-m); + EXPECT_TRUE(l.IsKnownOffsetFrom(m)); + uint64_t d; + EXPECT_TRUE(l.IsKnownOffsetFrom(m, &d)); + EXPECT_EQ(0U, d); + EXPECT_FALSE(l.IsKnownConstant()); +} + +TEST(Assignment, UnconstrainedToKnown) { + Label l, m; + l = 0x8fd16e55b20a39c1ULL; + l = m; + EXPECT_EQ(0U, l-m); + EXPECT_TRUE(l.IsKnownOffsetFrom(m)); + uint64_t d; + EXPECT_TRUE(l.IsKnownOffsetFrom(m, &d)); + EXPECT_EQ(0U, d); + EXPECT_TRUE(m.IsKnownConstant()); + EXPECT_EQ(0x8fd16e55b20a39c1ULL, m.Value()); +} + +TEST(Assignment, RelatedToUnconstrained) { + Label l, m, n; + m = n; + l = m; + EXPECT_EQ(0U, l-n); + EXPECT_TRUE(l.IsKnownOffsetFrom(n)); + uint64_t d; + EXPECT_TRUE(l.IsKnownOffsetFrom(n, &d)); + EXPECT_EQ(0U, d); + EXPECT_FALSE(l.IsKnownConstant()); +} + +TEST(Assignment, RelatedToRelated) { + Label l, m, n, o; + l = n; + m = o; + l = m; + EXPECT_EQ(0U, n-o); + EXPECT_TRUE(n.IsKnownOffsetFrom(o)); + uint64_t d; + EXPECT_TRUE(n.IsKnownOffsetFrom(o, &d)); + EXPECT_EQ(0U, d); + EXPECT_FALSE(l.IsKnownConstant()); +} + +TEST(Assignment, RelatedToKnown) { + Label l, m, n; + m = n; + l = 0xd2011f8c82ad56f2ULL; + l = m; + EXPECT_TRUE(l.IsKnownConstant()); + EXPECT_EQ(0xd2011f8c82ad56f2ULL, l.Value()); + EXPECT_TRUE(m.IsKnownConstant()); + EXPECT_EQ(0xd2011f8c82ad56f2ULL, m.Value()); + EXPECT_TRUE(n.IsKnownConstant()); + EXPECT_EQ(0xd2011f8c82ad56f2ULL, n.Value()); +} + +TEST(Assignment, KnownToUnconstrained) { + Label l, m; + m = 0x50b024c0d6073887ULL; + l = m; + EXPECT_TRUE(l.IsKnownConstant()); + EXPECT_EQ(0x50b024c0d6073887ULL, l.Value()); + EXPECT_TRUE(m.IsKnownConstant()); + EXPECT_EQ(0x50b024c0d6073887ULL, m.Value()); +} + +TEST(Assignment, KnownToRelated) { + Label l, m, n; + l = n; + m = 0x5348883655c727e5ULL; + l = m; + EXPECT_TRUE(l.IsKnownConstant()); + EXPECT_EQ(0x5348883655c727e5ULL, l.Value()); + EXPECT_TRUE(m.IsKnownConstant()); + EXPECT_EQ(0x5348883655c727e5ULL, m.Value()); + EXPECT_TRUE(n.IsKnownConstant()); + EXPECT_EQ(0x5348883655c727e5ULL, n.Value()); +} + +TEST(Assignment, KnownToKnown) { + Label l, m; + l = 0x36c209c20987564eULL; + m = 0x36c209c20987564eULL; + l = m; + EXPECT_TRUE(l.IsKnownConstant()); + EXPECT_EQ(0x36c209c20987564eULL, l.Value()); + EXPECT_TRUE(m.IsKnownConstant()); + EXPECT_EQ(0x36c209c20987564eULL, m.Value()); +} + +TEST(Assignment, ConstantToUnconstrained) { + Label l; + l = 0xc02495f4d7f5a957ULL; + EXPECT_TRUE(l.IsKnownConstant()); + EXPECT_EQ(0xc02495f4d7f5a957ULL, l.Value()); +} + +TEST(Assignment, ConstantToRelated) { + Label l, m; + l = m; + l = 0x4577901cf275488dULL; + EXPECT_TRUE(l.IsKnownConstant()); + EXPECT_EQ(0x4577901cf275488dULL, l.Value()); + EXPECT_TRUE(m.IsKnownConstant()); + EXPECT_EQ(0x4577901cf275488dULL, m.Value()); +} + +TEST(Assignment, ConstantToKnown) { + Label l; + l = 0xec0b9c369b7e8ea7ULL; + l = 0xec0b9c369b7e8ea7ULL; + EXPECT_TRUE(l.IsKnownConstant()); + EXPECT_EQ(0xec0b9c369b7e8ea7ULL, l.Value()); +} + +TEST(AssignmentDeathTest, Self) { + Label l; + ASSERT_DEATH(l = l, "binding != this"); +} + +TEST(AssignmentDeathTest, IndirectCycle) { + Label l, m, n; + l = m; + m = n; + ASSERT_DEATH(n = l, "binding != this"); +} + +TEST(AssignmentDeathTest, Cycle) { + Label l, m, n, o; + l = m; + m = n; + o = n; + ASSERT_DEATH(o = l, "binding != this"); +} + +TEST(Addition, LabelConstant) { + Label l, m; + m = l + 0x5248d93e8bbe9497ULL; + EXPECT_TRUE(m.IsKnownOffsetFrom(l)); + uint64_t d; + EXPECT_TRUE(m.IsKnownOffsetFrom(l, &d)); + EXPECT_EQ(0x5248d93e8bbe9497ULL, d); + EXPECT_FALSE(m.IsKnownConstant()); +} + +TEST(Addition, ConstantLabel) { + Label l, m; + m = 0xf51e94e00d6e3c84ULL + l; + EXPECT_TRUE(m.IsKnownOffsetFrom(l)); + uint64_t d; + EXPECT_TRUE(m.IsKnownOffsetFrom(l, &d)); + EXPECT_EQ(0xf51e94e00d6e3c84ULL, d); + EXPECT_FALSE(m.IsKnownConstant()); +} + +TEST(Addition, KnownLabelConstant) { + Label l, m; + l = 0x16286307042ce0d8ULL; + m = l + 0x3fdddd91306719d7ULL; + EXPECT_TRUE(m.IsKnownOffsetFrom(l)); + uint64_t d; + EXPECT_TRUE(m.IsKnownOffsetFrom(l, &d)); + EXPECT_EQ(0x3fdddd91306719d7ULL, d); + EXPECT_TRUE(m.IsKnownConstant()); + EXPECT_EQ(0x16286307042ce0d8ULL + 0x3fdddd91306719d7ULL, m.Value()); +} + +TEST(Addition, ConstantKnownLabel) { + Label l, m; + l = 0x50f62d0cdd1031deULL; + m = 0x1b13462d8577c538ULL + l; + EXPECT_TRUE(m.IsKnownOffsetFrom(l)); + uint64_t d; + EXPECT_TRUE(m.IsKnownOffsetFrom(l, &d)); + EXPECT_EQ(0x1b13462d8577c538ULL, d); + EXPECT_TRUE(m.IsKnownConstant()); + EXPECT_EQ(0x50f62d0cdd1031deULL + 0x1b13462d8577c538ULL, m.Value()); +} + +TEST(Subtraction, LabelConstant) { + Label l, m; + m = l - 0x0620884d21d3138eULL; + EXPECT_TRUE(m.IsKnownOffsetFrom(l)); + uint64_t d; + EXPECT_TRUE(m.IsKnownOffsetFrom(l, &d)); + EXPECT_EQ(-0x0620884d21d3138eULL, d); + EXPECT_FALSE(m.IsKnownConstant()); +} + +TEST(Subtraction, KnownLabelConstant) { + Label l, m; + l = 0x6237fbaf9ef7929eULL; + m = l - 0x317730995d2ab6eeULL; + EXPECT_TRUE(m.IsKnownOffsetFrom(l)); + uint64_t d; + EXPECT_TRUE(m.IsKnownOffsetFrom(l, &d)); + EXPECT_EQ(-0x317730995d2ab6eeULL, d); + EXPECT_TRUE(m.IsKnownConstant()); + EXPECT_EQ(0x6237fbaf9ef7929eULL - 0x317730995d2ab6eeULL, m.Value()); +} + +TEST(SubtractionDeathTest, LabelLabel) { + Label l, m; + ASSERT_DEATH(l - m, "IsKnownOffsetFrom\\(label, &offset\\)"); +} + +TEST(Subtraction, LabelLabel) { + Label l, m; + l = m + 0x7fa77ec63e28a17aULL; + EXPECT_EQ(0x7fa77ec63e28a17aULL, l - m); + EXPECT_EQ(-0x7fa77ec63e28a17aULL, m - l); +} + +TEST(IsKnownConstant, Undefined) { + Label l; + EXPECT_FALSE(l.IsKnownConstant()); +} + +TEST(IsKnownConstant, RelatedLabel) { + Label l, m; + l = m; + EXPECT_FALSE(l.IsKnownConstant()); + EXPECT_FALSE(m.IsKnownConstant()); +} + +TEST(IsKnownConstant, Constant) { + Label l; + l = 0xf374b1bdd6a22576ULL; + EXPECT_TRUE(l.IsKnownConstant()); +} + +TEST(IsKnownOffsetFrom, Unrelated) { + Label l, m; + EXPECT_FALSE(l.IsKnownOffsetFrom(m)); +} + +TEST(IsKnownOffsetFrom, Related) { + Label l, m; + l = m; + EXPECT_TRUE(l.IsKnownOffsetFrom(m)); +} + +// Test the construction of chains of related labels, and the +// propagation of values through them. +// +// Although the relations between labels are supposed to behave +// symmetrically --- that is, 'a = b' should put a and b in +// indistinguishable states --- there's a distinction made internally +// between the target (a) and the source (b). +// +// So there are five test axes to cover: +// +// - Do we construct the chain with assignment ("Assign") or with constructors +// ("Construct")? +// +// - Do we set the value of the label at the start of the chain +// ("Start") or the label at the end ("End")? +// +// - Are we testing the propagation of a relationship between variable +// values ("Relation"), or the propagation of a known constant value +// ("Value")? +// +// - Do we set the value before building the chain ("Before") or after +// the chain has been built ("After")? +// +// - Do we add new relationships to the end of the existing chain +// ("Forward") or to the beginning ("Backward")? +// +// Of course, "Construct" and "Backward" can't be combined, which +// eliminates eight combinations, and "Construct", "End", and "Before" +// can't be combined, which eliminates two more, so there are are 22 +// combinations, not 32. + +TEST(LabelChain, AssignStartRelationBeforeForward) { + Label a, b, c, d; + Label x; + a = x; + b = a + 0x1; + c = b + 0x10; + d = c + 0x100; + EXPECT_EQ(0x111U, d-x); + EXPECT_EQ(0x11U, c-x); + EXPECT_EQ(0x1U, b-x); + EXPECT_EQ(0U, a-x); +} + +TEST(LabelChain, AssignStartRelationBeforeBackward) { + Label a, b, c, d; + Label x; + a = x; + d = c + 0x100; + c = b + 0x10; + b = a + 0x1; + EXPECT_EQ(0x111U, d-x); + EXPECT_EQ(0x11U, c-x); + EXPECT_EQ(0x1U, b-x); + EXPECT_EQ(0U, a-x); +} + +TEST(LabelChain, AssignStartRelationAfterForward) { + Label a, b, c, d; + Label x; + b = a + 0x1; + c = b + 0x10; + d = c + 0x100; + a = x; + EXPECT_EQ(0x111U, d-x); + EXPECT_EQ(0x11U, c-x); + EXPECT_EQ(0x1U, b-x); + EXPECT_EQ(0U, a-x); +} + +TEST(LabelChain, AssignStartRelationAfterBackward) { + Label a, b, c, d; + Label x; + d = c + 0x100; + c = b + 0x10; + b = a + 0x1; + a = x; + EXPECT_EQ(0x111U, d-x); + EXPECT_EQ(0x11U, c-x); + EXPECT_EQ(0x1U, b-x); + EXPECT_EQ(0U, a-x); +} + +TEST(LabelChain, AssignStartValueBeforeForward) { + Label a, b, c, d; + a = 0xa131200190546ac2ULL; + b = a + 0x1; + c = b + 0x10; + d = c + 0x100; + EXPECT_EQ(0xa131200190546ac2ULL + 0x111U, d.Value()); + EXPECT_EQ(0xa131200190546ac2ULL + 0x11U, c.Value()); + EXPECT_EQ(0xa131200190546ac2ULL + 0x1U, b.Value()); + EXPECT_EQ(0xa131200190546ac2ULL + 0U, a.Value()); +} + +TEST(LabelChain, AssignStartValueBeforeBackward) { + Label a, b, c, d; + a = 0x8da17e1670ad4fa2ULL; + d = c + 0x100; + c = b + 0x10; + b = a + 0x1; + EXPECT_EQ(0x8da17e1670ad4fa2ULL + 0x111U, d.Value()); + EXPECT_EQ(0x8da17e1670ad4fa2ULL + 0x11U, c.Value()); + EXPECT_EQ(0x8da17e1670ad4fa2ULL + 0x1U, b.Value()); + EXPECT_EQ(0x8da17e1670ad4fa2ULL + 0U, a.Value()); +} + +TEST(LabelChain, AssignStartValueAfterForward) { + Label a, b, c, d; + b = a + 0x1; + c = b + 0x10; + d = c + 0x100; + a = 0x99b8f51bafd41adaULL; + EXPECT_EQ(0x99b8f51bafd41adaULL + 0x111U, d.Value()); + EXPECT_EQ(0x99b8f51bafd41adaULL + 0x11U, c.Value()); + EXPECT_EQ(0x99b8f51bafd41adaULL + 0x1U, b.Value()); + EXPECT_EQ(0x99b8f51bafd41adaULL + 0U, a.Value()); +} + +TEST(LabelChain, AssignStartValueAfterBackward) { + Label a, b, c, d; + d = c + 0x100; + c = b + 0x10; + b = a + 0x1; + a = 0xc86ca1d97ab5df6eULL; + EXPECT_EQ(0xc86ca1d97ab5df6eULL + 0x111U, d.Value()); + EXPECT_EQ(0xc86ca1d97ab5df6eULL + 0x11U, c.Value()); + EXPECT_EQ(0xc86ca1d97ab5df6eULL + 0x1U, b.Value()); + EXPECT_EQ(0xc86ca1d97ab5df6eULL + 0U, a.Value()); +} + +TEST(LabelChain, AssignEndRelationBeforeForward) { + Label a, b, c, d; + Label x; + x = d; + b = a + 0x1; + c = b + 0x10; + d = c + 0x100; + EXPECT_EQ(-(uint64_t)0x111U, a-x); + EXPECT_EQ(-(uint64_t)0x110U, b-x); + EXPECT_EQ(-(uint64_t)0x100U, c-x); + EXPECT_EQ(-(uint64_t)0U, d-x); +} + +TEST(LabelChain, AssignEndRelationBeforeBackward) { + Label a, b, c, d; + Label x; + x = d; + d = c + 0x100; + c = b + 0x10; + b = a + 0x1; + EXPECT_EQ(-(uint64_t)0x111U, a-x); + EXPECT_EQ(-(uint64_t)0x110U, b-x); + EXPECT_EQ(-(uint64_t)0x100U, c-x); + EXPECT_EQ(-(uint64_t)0U, d-x); +} + +TEST(LabelChain, AssignEndRelationAfterForward) { + Label a, b, c, d; + Label x; + b = a + 0x1; + c = b + 0x10; + d = c + 0x100; + x = d; + EXPECT_EQ(-(uint64_t)0x111U, a-x); + EXPECT_EQ(-(uint64_t)0x110U, b-x); + EXPECT_EQ(-(uint64_t)0x100U, c-x); + EXPECT_EQ(-(uint64_t)0x000U, d-x); +} + +TEST(LabelChain, AssignEndRelationAfterBackward) { + Label a, b, c, d; + Label x; + d = c + 0x100; + c = b + 0x10; + b = a + 0x1; + x = d; + EXPECT_EQ(-(uint64_t)0x111U, a-x); + EXPECT_EQ(-(uint64_t)0x110U, b-x); + EXPECT_EQ(-(uint64_t)0x100U, c-x); + EXPECT_EQ(-(uint64_t)0x000U, d-x); +} + +TEST(LabelChain, AssignEndValueBeforeForward) { + Label a, b, c, d; + d = 0xa131200190546ac2ULL; + b = a + 0x1; + c = b + 0x10; + d = c + 0x100; + EXPECT_EQ(0xa131200190546ac2ULL - 0x111, a.Value()); + EXPECT_EQ(0xa131200190546ac2ULL - 0x110, b.Value()); + EXPECT_EQ(0xa131200190546ac2ULL - 0x100, c.Value()); + EXPECT_EQ(0xa131200190546ac2ULL - 0x000, d.Value()); +} + +TEST(LabelChain, AssignEndValueBeforeBackward) { + Label a, b, c, d; + d = 0x8da17e1670ad4fa2ULL; + d = c + 0x100; + c = b + 0x10; + b = a + 0x1; + EXPECT_EQ(0x8da17e1670ad4fa2ULL - 0x111, a.Value()); + EXPECT_EQ(0x8da17e1670ad4fa2ULL - 0x110, b.Value()); + EXPECT_EQ(0x8da17e1670ad4fa2ULL - 0x100, c.Value()); + EXPECT_EQ(0x8da17e1670ad4fa2ULL - 0x000, d.Value()); +} + +TEST(LabelChain, AssignEndValueAfterForward) { + Label a, b, c, d; + b = a + 0x1; + c = b + 0x10; + d = c + 0x100; + d = 0x99b8f51bafd41adaULL; + EXPECT_EQ(0x99b8f51bafd41adaULL - 0x111, a.Value()); + EXPECT_EQ(0x99b8f51bafd41adaULL - 0x110, b.Value()); + EXPECT_EQ(0x99b8f51bafd41adaULL - 0x100, c.Value()); + EXPECT_EQ(0x99b8f51bafd41adaULL - 0x000, d.Value()); +} + +TEST(LabelChain, AssignEndValueAfterBackward) { + Label a, b, c, d; + d = c + 0x100; + c = b + 0x10; + b = a + 0x1; + d = 0xc86ca1d97ab5df6eULL; + EXPECT_EQ(0xc86ca1d97ab5df6eULL - 0x111, a.Value()); + EXPECT_EQ(0xc86ca1d97ab5df6eULL - 0x110, b.Value()); + EXPECT_EQ(0xc86ca1d97ab5df6eULL - 0x100, c.Value()); + EXPECT_EQ(0xc86ca1d97ab5df6eULL - 0x000, d.Value()); +} + +TEST(LabelChain, ConstructStartRelationBeforeForward) { + Label x; + Label a(x); + Label b(a + 0x1); + Label c(b + 0x10); + Label d(c + 0x100); + EXPECT_EQ(0x111U, d-x); + EXPECT_EQ(0x11U, c-x); + EXPECT_EQ(0x1U, b-x); + EXPECT_EQ(0U, a-x); +} + +TEST(LabelChain, ConstructStartRelationAfterForward) { + Label x; + Label a; + Label b(a + 0x1); + Label c(b + 0x10); + Label d(c + 0x100); + a = x; + EXPECT_EQ(0x111U, d-x); + EXPECT_EQ(0x11U, c-x); + EXPECT_EQ(0x1U, b-x); + EXPECT_EQ(0U, a-x); +} + +TEST(LabelChain, ConstructStartValueBeforeForward) { + Label a(0x5d234d177d01ccc8ULL); + Label b(a + 0x1); + Label c(b + 0x10); + Label d(c + 0x100); + EXPECT_EQ(0x5d234d177d01ccc8ULL + 0x111U, d.Value()); + EXPECT_EQ(0x5d234d177d01ccc8ULL + 0x011U, c.Value()); + EXPECT_EQ(0x5d234d177d01ccc8ULL + 0x001U, b.Value()); + EXPECT_EQ(0x5d234d177d01ccc8ULL + 0x000U, a.Value()); +} + +TEST(LabelChain, ConstructStartValueAfterForward) { + Label a; + Label b(a + 0x1); + Label c(b + 0x10); + Label d(c + 0x100); + a = 0xded85d54586e84fcULL; + EXPECT_EQ(0xded85d54586e84fcULL + 0x111U, d.Value()); + EXPECT_EQ(0xded85d54586e84fcULL + 0x011U, c.Value()); + EXPECT_EQ(0xded85d54586e84fcULL + 0x001U, b.Value()); + EXPECT_EQ(0xded85d54586e84fcULL + 0x000U, a.Value()); +} + +TEST(LabelChain, ConstructEndRelationAfterForward) { + Label x; + Label a; + Label b(a + 0x1); + Label c(b + 0x10); + Label d(c + 0x100); + x = d; + EXPECT_EQ(-(uint64_t)0x111U, a-x); + EXPECT_EQ(-(uint64_t)0x110U, b-x); + EXPECT_EQ(-(uint64_t)0x100U, c-x); + EXPECT_EQ(-(uint64_t)0x000U, d-x); +} + +TEST(LabelChain, ConstructEndValueAfterForward) { + Label a; + Label b(a + 0x1); + Label c(b + 0x10); + Label d(c + 0x100); + d = 0x99b8f51bafd41adaULL; + EXPECT_EQ(0x99b8f51bafd41adaULL - 0x111, a.Value()); + EXPECT_EQ(0x99b8f51bafd41adaULL - 0x110, b.Value()); + EXPECT_EQ(0x99b8f51bafd41adaULL - 0x100, c.Value()); + EXPECT_EQ(0x99b8f51bafd41adaULL - 0x000, d.Value()); +} + +TEST(LabelTree, KnownValue) { + Label l, m, n, o, p; + l = m; + m = n; + o = p; + p = n; + l = 0x536b5de3d468a1b5ULL; + EXPECT_EQ(0x536b5de3d468a1b5ULL, o.Value()); +} + +TEST(LabelTree, Related) { + Label l, m, n, o, p; + l = m - 1; + m = n - 10; + o = p + 100; + p = n + 1000; + EXPECT_EQ(1111U, o - l); +} + +TEST(EquationDeathTest, EqualConstants) { + Label m = 0x0d3962f280f07d24ULL; + Label n = 0x0d3962f280f07d24ULL; + m = n; // no death expected +} + +TEST(EquationDeathTest, EqualIndirectConstants) { + Label m = 0xa347f1e5238fe6a1ULL; + Label n; + Label o = n; + n = 0xa347f1e5238fe6a1ULL; + n = m; // no death expected +} + +TEST(EquationDeathTest, ConstantClash) { + Label m = 0xd4cc0f4f630ec741ULL; + Label n = 0x934cd2d8254fc3eaULL; + ASSERT_DEATH(m = n, "addend_ == addend"); +} + +TEST(EquationDeathTest, IndirectConstantClash) { + Label m = 0xd4cc0f4f630ec741ULL; + Label n, o; + n = o; + o = 0xcfbe3b83ac49ce86ULL; + ASSERT_DEATH(m = n, "addend_ == addend"); +} + +// Assigning to a related label may free the next Binding on its +// chain. This test always passes; it is interesting to memory +// checkers and coverage analysis. +TEST(LabelReferenceCount, AssignmentFree) { + Label l; + { + Label m; + l = m; + } + // This should free m's Binding. + l = 0xca8bae92f0376d4fULL; + ASSERT_EQ(0xca8bae92f0376d4fULL, l.Value()); +} + +// Finding the value of a label may free the Binding it refers to. This test +// always passes; it is interesting to memory checkers and coverage analysis. +TEST(LabelReferenceCount, FindValueFree) { + Label l; + { + Label m, n; + l = m; + m = n; + n = 0x7a0b0c576672daafULL; + // At this point, l's Binding refers to m's Binding, which refers + // to n's binding. + } + // Now, l is the only reference keeping the three Bindings alive. + // Resolving its value should free l's and m's original bindings. + ASSERT_EQ(0x7a0b0c576672daafULL, l.Value()); +} + +TEST(ConstructSection, Simple) { + Section s; +} + +TEST(ConstructSection, WithEndian) { + Section s(kBigEndian); +} + +// A fixture class for TestAssembler::Section tests. +class SectionFixture { + public: + Section section; + string contents; + static const uint8_t data[]; + static const size_t data_size; +}; + +const uint8_t SectionFixture::data[] = { + 0x87, 0x4f, 0x43, 0x67, 0x30, 0xd0, 0xd4, 0x0e +}; + +#define I0() +#define I1(a) { a } +#define I2(a,b) { a,b } +#define I3(a,b,c) { a,b,c } +#define I4(a,b,c,d) { a,b,c,d } +#define I5(a,b,c,d,e) { a,b,c,d,e } +#define I6(a,b,c,d,e,f) { a,b,c,d,e,f } +#define I7(a,b,c,d,e,f,g) { a,b,c,d,e,f,g } +#define I8(a,b,c,d,e,f,g,h) { a,b,c,d,e,f,g,h } +#define I9(a,b,c,d,e,f,g,h,i) { a,b,c,d,e,f,g,h,i } +#define ASSERT_BYTES(s, b) \ + do \ + { \ + static const uint8_t expected_bytes[] = b; \ + ASSERT_EQ(sizeof(expected_bytes), s.size()); \ + ASSERT_TRUE(memcmp(s.data(), (const char *) expected_bytes, \ + sizeof(expected_bytes)) == 0); \ + } \ + while(0) + +class Append: public SectionFixture, public Test { }; + +TEST_F(Append, Bytes) { + section.Append(data, sizeof(data)); + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_EQ(sizeof(data), contents.size()); + EXPECT_TRUE(0 == memcmp(contents.data(), (const char *) data, sizeof(data))); +} + +TEST_F(Append, BytesTwice) { + section.Append(data, sizeof(data)); + section.Append(data, sizeof(data)); + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_EQ(2 * sizeof(data), contents.size()); + ASSERT_TRUE(0 == memcmp(contents.data(), (const char *) data, sizeof(data))); + ASSERT_TRUE(0 == memcmp(contents.data() + sizeof(data), + (const char *) data, sizeof(data))); +} + +TEST_F(Append, String) { + string s1 = "howdy "; + string s2 = "there"; + section.Append(s1); + section.Append(s2); + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_STREQ(contents.c_str(), "howdy there"); +} + +TEST_F(Append, CString) { + section.AppendCString("howdy"); + section.AppendCString(""); + section.AppendCString("there"); + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_EQ(string("howdy\0\0there\0", 13), contents); +} + +TEST_F(Append, CStringSize) { + section.AppendCString("howdy", 3); + section.AppendCString("there", 5); + section.AppendCString("fred", 6); + section.AppendCString("natalie", 0); + section.AppendCString("", 10); + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_EQ(string("howtherefred\0\0\0\0\0\0\0\0\0\0\0\0", 24), contents); +} + +TEST_F(Append, RepeatedBytes) { + section.Append((size_t) 10, '*'); + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_STREQ(contents.c_str(), "**********"); +} + +TEST_F(Append, GeneralLE1) { + section.Append(kLittleEndian, 1, 42); + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I1(42)); +} + +TEST_F(Append, GeneralLE2) { + section.Append(kLittleEndian, 2, 0x15a1); + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I2(0xa1, 0x15)); +} + +TEST_F(Append, GeneralLE3) { + section.Append(kLittleEndian, 3, 0x59ae8d); + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I3(0x8d, 0xae, 0x59)); +} + +TEST_F(Append, GeneralLE4) { + section.Append(kLittleEndian, 4, 0x51603c56); + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I4(0x56, 0x3c, 0x60, 0x51)); +} + +TEST_F(Append, GeneralLE5) { + section.Append(kLittleEndian, 5, 0x385e2803b4ULL); + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I5(0xb4, 0x03, 0x28, 0x5e, 0x38)); +} + +TEST_F(Append, GeneralLE6) { + section.Append(kLittleEndian, 6, 0xc7db9534dd1fULL); + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I6(0x1f, 0xdd, 0x34, 0x95, 0xdb, 0xc7)); +} + +TEST_F(Append, GeneralLE7) { + section.Append(kLittleEndian, 7, 0x1445c9f1b843e6ULL); + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I7(0xe6, 0x43, 0xb8, 0xf1, 0xc9, 0x45, 0x14)); +} + +TEST_F(Append, GeneralLE8) { + section.Append(kLittleEndian, 8, 0xaf48019dfe5c01e5ULL); + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I8(0xe5, 0x01, 0x5c, 0xfe, 0x9d, 0x01, 0x48, 0xaf)); +} + +TEST_F(Append, GeneralBE1) { + section.Append(kBigEndian, 1, 0xd0ULL); + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I1(0xd0)); +} + +TEST_F(Append, GeneralBE2) { + section.Append(kBigEndian, 2, 0x2e7eULL); + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I2(0x2e, 0x7e)); +} + +TEST_F(Append, GeneralBE3) { + section.Append(kBigEndian, 3, 0x37dad6ULL); + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I3(0x37, 0xda, 0xd6)); +} + +TEST_F(Append, GeneralBE4) { + section.Append(kBigEndian, 4, 0x715935c7ULL); + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I4(0x71, 0x59, 0x35, 0xc7)); +} + +TEST_F(Append, GeneralBE5) { + section.Append(kBigEndian, 5, 0x42baeb02b7ULL); + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I5(0x42, 0xba, 0xeb, 0x02, 0xb7)); +} + +TEST_F(Append, GeneralBE6) { + section.Append(kBigEndian, 6, 0xf1cdf10e7b18ULL); + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I6(0xf1, 0xcd, 0xf1, 0x0e, 0x7b, 0x18)); +} + +TEST_F(Append, GeneralBE7) { + section.Append(kBigEndian, 7, 0xf50a724f0b0d20ULL); + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I7(0xf5, 0x0a, 0x72, 0x4f, 0x0b, 0x0d, 0x20)); +} + +TEST_F(Append, GeneralBE8) { + section.Append(kBigEndian, 8, 0xa6b2cb5e98dc9c16ULL); + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I8(0xa6, 0xb2, 0xcb, 0x5e, 0x98, 0xdc, 0x9c, 0x16)); +} + +TEST_F(Append, GeneralLE1Label) { + Label l; + section.Append(kLittleEndian, 1, l); + l = 42; + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I1(42)); +} + +TEST_F(Append, GeneralLE2Label) { + Label l; + section.Append(kLittleEndian, 2, l); + l = 0x15a1; + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I2(0xa1, 0x15)); +} + +TEST_F(Append, GeneralLE3Label) { + Label l; + section.Append(kLittleEndian, 3, l); + l = 0x59ae8d; + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I3(0x8d, 0xae, 0x59)); +} + +TEST_F(Append, GeneralLE4Label) { + Label l; + section.Append(kLittleEndian, 4, l); + l = 0x51603c56; + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I4(0x56, 0x3c, 0x60, 0x51)); +} + +TEST_F(Append, GeneralLE5Label) { + Label l; + section.Append(kLittleEndian, 5, l); + l = 0x385e2803b4ULL; + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I5(0xb4, 0x03, 0x28, 0x5e, 0x38)); +} + +TEST_F(Append, GeneralLE6Label) { + Label l; + section.Append(kLittleEndian, 6, l); + l = 0xc7db9534dd1fULL; + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I6(0x1f, 0xdd, 0x34, 0x95, 0xdb, 0xc7)); +} + +TEST_F(Append, GeneralLE7Label) { + Label l; + section.Append(kLittleEndian, 7, l); + l = 0x1445c9f1b843e6ULL; + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I7(0xe6, 0x43, 0xb8, 0xf1, 0xc9, 0x45, 0x14)); +} + +TEST_F(Append, GeneralLE8Label) { + Label l; + section.Append(kLittleEndian, 8, l); + l = 0xaf48019dfe5c01e5ULL; + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I8(0xe5, 0x01, 0x5c, 0xfe, 0x9d, 0x01, 0x48, 0xaf)); +} + +TEST_F(Append, GeneralBE1Label) { + Label l; + section.Append(kBigEndian, 1, l); + l = 0xd0ULL; + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I1(0xd0)); +} + +TEST_F(Append, GeneralBE2Label) { + Label l; + section.Append(kBigEndian, 2, l); + l = 0x2e7eULL; + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I2(0x2e, 0x7e)); +} + +TEST_F(Append, GeneralBE3Label) { + Label l; + section.Append(kBigEndian, 3, l); + l = 0x37dad6ULL; + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I3(0x37, 0xda, 0xd6)); +} + +TEST_F(Append, GeneralBE4Label) { + Label l; + section.Append(kBigEndian, 4, l); + l = 0x715935c7ULL; + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I4(0x71, 0x59, 0x35, 0xc7)); +} + +TEST_F(Append, GeneralBE5Label) { + Label l; + section.Append(kBigEndian, 5, l); + l = 0x42baeb02b7ULL; + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I5(0x42, 0xba, 0xeb, 0x02, 0xb7)); +} + +TEST_F(Append, GeneralBE6Label) { + Label l; + section.Append(kBigEndian, 6, l); + l = 0xf1cdf10e7b18ULL; + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I6(0xf1, 0xcd, 0xf1, 0x0e, 0x7b, 0x18)); +} + +TEST_F(Append, GeneralBE7Label) { + Label l; + section.Append(kBigEndian, 7, l); + l = 0xf50a724f0b0d20ULL; + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I7(0xf5, 0x0a, 0x72, 0x4f, 0x0b, 0x0d, 0x20)); +} + +TEST_F(Append, GeneralBE8Label) { + Label l; + section.Append(kBigEndian, 8, l); + l = 0xa6b2cb5e98dc9c16ULL; + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I8(0xa6, 0xb2, 0xcb, 0x5e, 0x98, 0xdc, 0x9c, 0x16)); +} + +TEST_F(Append, B8) { + section.Append(1, 0x2a); + section.B8(0xd3U); + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I2(0x2a, 0xd3)); +} + +TEST_F(Append, B8Label) { + Label l; + section.Append(1, 0x2a); + section.B8(l); + l = 0x4bU; + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I2(0x2a, 0x4b)); +} + +TEST_F(Append, B16) { + section.Append(1, 0x2a); + section.B16(0x472aU); + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I3(0x2a, 0x47, 0x2a)); +} + +TEST_F(Append, B16Label) { + Label l; + section.Append(1, 0x2a); + section.B16(l); + l = 0x55e8U; + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I3(0x2a, 0x55, 0xe8)); +} + +TEST_F(Append, B32) { + section.Append(1, 0x2a); + section.B32(0xbd412cbcU); + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I5(0x2a, 0xbd, 0x41, 0x2c, 0xbc)); +} + +TEST_F(Append, B32Label) { + Label l; + section.Append(1, 0x2a); + section.B32(l); + l = 0x208e37d5U; + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I5(0x2a, 0x20, 0x8e, 0x37, 0xd5)); +} + +TEST_F(Append, B64) { + section.Append(1, 0x2a); + section.B64(0x3402a013111e68adULL); + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, + I9(0x2a, 0x34, 0x02, 0xa0, 0x13, 0x11, 0x1e, 0x68, 0xad)); +} + +TEST_F(Append, B64Label) { + Label l; + section.Append(1, 0x2a); + section.B64(l); + l = 0x355dbfbb4ac6d57fULL; + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, + I9(0x2a, 0x35, 0x5d, 0xbf, 0xbb, 0x4a, 0xc6, 0xd5, 0x7f)); +} + +TEST_F(Append, L8) { + section.Append(1, 0x2a); + section.L8(0x26U); + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I2(0x2a, 0x26)); +} + +TEST_F(Append, L8Label) { + Label l; + section.Append(1, 0x2a); + section.L8(l); + l = 0xa8U; + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I2(0x2a, 0xa8)); +} + +TEST_F(Append, L16) { + section.Append(1, 0x2a); + section.L16(0xca6dU); + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I3(0x2a, 0x6d, 0xca)); +} + +TEST_F(Append, L16Label) { + Label l; + section.Append(1, 0x2a); + section.L16(l); + l = 0xd21fU; + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I3(0x2a, 0x1f, 0xd2)); +} + +TEST_F(Append, L32) { + section.Append(1, 0x2a); + section.L32(0x558f6181U); + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I5(0x2a, 0x81, 0x61, 0x8f, 0x55)); +} + +TEST_F(Append, L32Label) { + Label l; + section.Append(1, 0x2a); + section.L32(l); + l = 0x4b810f82U; + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I5(0x2a, 0x82, 0x0f, 0x81, 0x4b)); +} + +TEST_F(Append, L64) { + section.Append(1, 0x2a); + section.L64(0x564384f7579515bfULL); + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, + I9(0x2a, 0xbf, 0x15, 0x95, 0x57, 0xf7, 0x84, 0x43, 0x56)); +} + +TEST_F(Append, L64Label) { + Label l; + section.Append(1, 0x2a); + section.L64(l); + l = 0x424b1d020667c8dbULL; + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, + I9(0x2a, 0xdb, 0xc8, 0x67, 0x06, 0x02, 0x1d, 0x4b, 0x42)); +} + +TEST_F(Append, D8Big) { + section.set_endianness(kBigEndian); + section.Append(1, 0x2a); + section.D8(0xe6U); + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I2(0x2a, 0xe6)); +} + +TEST_F(Append, D8BigLabel) { + Label l; + section.set_endianness(kBigEndian); + section.Append(1, 0x2a); + section.D8(l); + l = 0xeeU; + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I2(0x2a, 0xee)); +} + +TEST_F(Append, D16Big) { + section.set_endianness(kBigEndian); + section.Append(1, 0x2a); + section.D16(0x83b1U); + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I3(0x2a, 0x83, 0xb1)); +} + +TEST_F(Append, D16BigLabel) { + Label l; + section.set_endianness(kBigEndian); + section.Append(1, 0x2a); + section.D16(l); + l = 0x5b55U; + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I3(0x2a, 0x5b, 0x55)); +} + +TEST_F(Append, D32Big) { + section.set_endianness(kBigEndian); + section.Append(1, 0x2a); + section.D32(0xd0b0e431U); + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I5(0x2a, 0xd0, 0xb0, 0xe4, 0x31)); +} + +TEST_F(Append, D32BigLabel) { + Label l; + section.set_endianness(kBigEndian); + section.Append(1, 0x2a); + section.D32(l); + l = 0x312fb340U; + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I5(0x2a, 0x31, 0x2f, 0xb3, 0x40)); +} + +TEST_F(Append, D64Big) { + section.set_endianness(kBigEndian); + section.Append(1, 0x2a); + section.D64(0xb109843500dbcb16ULL); + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, + I9(0x2a, 0xb1, 0x09, 0x84, 0x35, 0x00, 0xdb, 0xcb, 0x16)); +} + +TEST_F(Append, D64BigLabel) { + Label l; + section.set_endianness(kBigEndian); + section.Append(1, 0x2a); + section.D64(l); + l = 0x9a0d61b70f671fd7ULL; + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, + I9(0x2a, 0x9a, 0x0d, 0x61, 0xb7, 0x0f, 0x67, 0x1f, 0xd7)); +} + +TEST_F(Append, D8Little) { + section.set_endianness(kLittleEndian); + section.Append(1, 0x2a); + section.D8(0x42U); + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I2(0x2a, 0x42)); +} + +TEST_F(Append, D8LittleLabel) { + Label l; + section.set_endianness(kLittleEndian); + section.Append(1, 0x2a); + section.D8(l); + l = 0x05U; + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I2(0x2a, 0x05)); +} + +TEST_F(Append, D16Little) { + section.set_endianness(kLittleEndian); + section.Append(1, 0x2a); + section.D16(0xc5c5U); + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I3(0x2a, 0xc5, 0xc5)); +} + +TEST_F(Append, D16LittleLabel) { + Label l; + section.set_endianness(kLittleEndian); + section.Append(1, 0x2a); + section.D16(l); + l = 0xb620U; + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I3(0x2a, 0x20, 0xb6)); +} + +TEST_F(Append, D32Little) { + section.set_endianness(kLittleEndian); + section.Append(1, 0x2a); + section.D32(0x1a87d0feU); + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I5(0x2a, 0xfe, 0xd0, 0x87, 0x1a)); +} + +TEST_F(Append, D32LittleLabel) { + Label l; + section.set_endianness(kLittleEndian); + section.Append(1, 0x2a); + section.D32(l); + l = 0xb8012d6bU; + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I5(0x2a, 0x6b, 0x2d, 0x01, 0xb8)); +} + +TEST_F(Append, D64Little) { + section.set_endianness(kLittleEndian); + section.Append(1, 0x2a); + section.D64(0x42de75c61375a1deULL); + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, + I9(0x2a, 0xde, 0xa1, 0x75, 0x13, 0xc6, 0x75, 0xde, 0x42)); +} + +TEST_F(Append, D64LittleLabel) { + Label l; + section.set_endianness(kLittleEndian); + section.Append(1, 0x2a); + section.D64(l); + l = 0x8b3bececf3fb5312ULL; + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, + I9(0x2a, 0x12, 0x53, 0xfb, 0xf3, 0xec, 0xec, 0x3b, 0x8b)); +} + +TEST_F(Append, Variety) { + Label a, b, c, d, e, f, g, h; + section.Append(kBigEndian, 1, a) + .Append(kLittleEndian, 8, h) + .Append(kBigEndian, 1, 0x8bULL) + .Append(kLittleEndian, 8, 0x0ea56540448f4439ULL) + .Append(kBigEndian, 2, b) + .Append(kLittleEndian, 7, g) + .Append(kBigEndian, 2, 0xcf15ULL) + .Append(kLittleEndian, 7, 0x29694f04c5724aULL) + .Append(kBigEndian, 3, c) + .Append(kLittleEndian, 6, f) + .Append(kBigEndian, 3, 0x8c3ffdULL) + .Append(kLittleEndian, 6, 0x6f11ba80187aULL) + .Append(kBigEndian, 4, d) + .Append(kLittleEndian, 5, e) + .Append(kBigEndian, 4, 0x2fda2472ULL) + .Append(kLittleEndian, 5, 0x0aa02d423fULL) + .Append(kBigEndian, 5, e) + .Append(kLittleEndian, 4, d) + .Append(kBigEndian, 5, 0x53ba432138ULL) + .Append(kLittleEndian, 4, 0xf139ae60ULL) + .Append(kBigEndian, 6, f) + .Append(kLittleEndian, 3, c) + .Append(kBigEndian, 6, 0x168e436af716ULL) + .Append(kLittleEndian, 3, 0x3ef189ULL) + .Append(kBigEndian, 7, g) + .Append(kLittleEndian, 2, b) + .Append(kBigEndian, 7, 0xacd4ef233e47d9ULL) + .Append(kLittleEndian, 2, 0x5311ULL) + .Append(kBigEndian, 8, h) + .Append(kLittleEndian, 1, a) + .Append(kBigEndian, 8, 0x4668d5f1c93637a1ULL) + .Append(kLittleEndian, 1, 0x65ULL); + a = 0x79ac9bd8aa256b35ULL; + b = 0x22d13097ef86c91cULL; + c = 0xf204968b0a05862fULL; + d = 0x163177f15a0eb4ecULL; + e = 0xbd1b0f1d977f2246ULL; + f = 0x2b0842eee83c6461ULL; + g = 0x92f4b928a4bf875eULL; + h = 0x61a199a8f7286ba6ULL; + ASSERT_EQ(8 * 18U, section.Size()); + ASSERT_TRUE(section.GetContents(&contents)); + + static const uint8_t expected[] = { + 0x35, 0xa6, 0x6b, 0x28, 0xf7, 0xa8, 0x99, 0xa1, 0x61, + 0x8b, 0x39, 0x44, 0x8f, 0x44, 0x40, 0x65, 0xa5, 0x0e, + 0xc9, 0x1c, 0x5e, 0x87, 0xbf, 0xa4, 0x28, 0xb9, 0xf4, + 0xcf, 0x15, 0x4a, 0x72, 0xc5, 0x04, 0x4f, 0x69, 0x29, + 0x05, 0x86, 0x2f, 0x61, 0x64, 0x3c, 0xe8, 0xee, 0x42, + 0x8c, 0x3f, 0xfd, 0x7a, 0x18, 0x80, 0xba, 0x11, 0x6f, + 0x5a, 0x0e, 0xb4, 0xec, 0x46, 0x22, 0x7f, 0x97, 0x1d, + 0x2f, 0xda, 0x24, 0x72, 0x3f, 0x42, 0x2d, 0xa0, 0x0a, + 0x1d, 0x97, 0x7f, 0x22, 0x46, 0xec, 0xb4, 0x0e, 0x5a, + 0x53, 0xba, 0x43, 0x21, 0x38, 0x60, 0xae, 0x39, 0xf1, + 0x42, 0xee, 0xe8, 0x3c, 0x64, 0x61, 0x2f, 0x86, 0x05, + 0x16, 0x8e, 0x43, 0x6a, 0xf7, 0x16, 0x89, 0xf1, 0x3e, + 0xf4, 0xb9, 0x28, 0xa4, 0xbf, 0x87, 0x5e, 0x1c, 0xc9, + 0xac, 0xd4, 0xef, 0x23, 0x3e, 0x47, 0xd9, 0x11, 0x53, + 0x61, 0xa1, 0x99, 0xa8, 0xf7, 0x28, 0x6b, 0xa6, 0x35, + 0x46, 0x68, 0xd5, 0xf1, 0xc9, 0x36, 0x37, 0xa1, 0x65, + }; + + ASSERT_TRUE(0 == memcmp(contents.data(), expected, sizeof(expected))); +} + +TEST_F(Append, Section) { + section.Append("murder"); + { + Section middle; + middle.Append(" she"); + section.Append(middle); + } + section.Append(" wrote"); + EXPECT_EQ(16U, section.Size()); + EXPECT_TRUE(section.GetContents(&contents)); + EXPECT_STREQ(contents.c_str(), "murder she wrote"); +} + +TEST_F(Append, SectionRefs) { + section.Append("sugar "); + Label l; + { + Section middle; + Label m; + middle.B32(m); + section.Append(middle); + m = 0x66726565; + } + section.Append(" jazz"); + EXPECT_EQ(15U, section.Size()); + EXPECT_TRUE(section.GetContents(&contents)); + EXPECT_STREQ(contents.c_str(), "sugar free jazz"); +} + +TEST_F(Append, LEB128_0) { + section.LEB128(0); + EXPECT_TRUE(section.GetContents(&contents)); + EXPECT_EQ(string("\0", 1), contents); +} + +TEST_F(Append, LEB128_0x3f) { + section.LEB128(0x3f); + EXPECT_TRUE(section.GetContents(&contents)); + EXPECT_EQ(string("\x3f", 1), contents); +} + +TEST_F(Append, LEB128_0x40) { + section.LEB128(0x40); + EXPECT_TRUE(section.GetContents(&contents)); + EXPECT_EQ(string("\xc0\x00", 2), contents); +} + +TEST_F(Append, LEB128_0x7f) { + section.LEB128(0x7f); + EXPECT_TRUE(section.GetContents(&contents)); + EXPECT_EQ(string("\xff\x00", 2), contents); +} + +TEST_F(Append, LEB128_0x80) { + section.LEB128(0x80); + EXPECT_TRUE(section.GetContents(&contents)); + EXPECT_EQ(string("\x80\x01", 2), contents); +} + +TEST_F(Append, LEB128_0xff) { + section.LEB128(0xff); + EXPECT_TRUE(section.GetContents(&contents)); + EXPECT_EQ(string("\xff\x01", 2), contents); +} + +TEST_F(Append, LEB128_0x1fff) { + section.LEB128(0x1fff); + EXPECT_TRUE(section.GetContents(&contents)); + EXPECT_EQ(string("\xff\x3f", 2), contents); +} + +TEST_F(Append, LEB128_0x2000) { + section.LEB128(0x2000); + EXPECT_TRUE(section.GetContents(&contents)); + EXPECT_EQ(string("\x80\xc0\x00", 3), contents); +} + +TEST_F(Append, LEB128_n1) { + section.LEB128(-1); + EXPECT_TRUE(section.GetContents(&contents)); + EXPECT_EQ(string("\x7f", 1), contents); +} + +TEST_F(Append, LEB128_n0x40) { + section.LEB128(-0x40); + EXPECT_TRUE(section.GetContents(&contents)); + EXPECT_EQ(string("\x40", 1), contents); +} + +TEST_F(Append, LEB128_n0x41) { + section.LEB128(-0x41); + EXPECT_TRUE(section.GetContents(&contents)); + EXPECT_EQ(string("\xbf\x7f", 2), contents); +} + +TEST_F(Append, LEB128_n0x7f) { + section.LEB128(-0x7f); + EXPECT_TRUE(section.GetContents(&contents)); + EXPECT_EQ(string("\x81\x7f", 2), contents); +} + +TEST_F(Append, LEB128_n0x80) { + section.LEB128(-0x80); + EXPECT_TRUE(section.GetContents(&contents)); + EXPECT_EQ(string("\x80\x7f", 2), contents); +} + +TEST_F(Append, LEB128_n0x2000) { + section.LEB128(-0x2000); + EXPECT_TRUE(section.GetContents(&contents)); + EXPECT_EQ(string("\x80\x40", 2), contents); +} + +TEST_F(Append, LEB128_n0x2001) { + section.LEB128(-0x2001); + EXPECT_TRUE(section.GetContents(&contents)); + EXPECT_EQ(string("\xff\xbf\x7f", 3), contents); +} + +TEST_F(Append,ULEB128_0) { + section.ULEB128(0); + EXPECT_TRUE(section.GetContents(&contents)); + EXPECT_EQ(string("\0", 1), contents); +} + +TEST_F(Append,ULEB128_1) { + section.ULEB128(1); + EXPECT_TRUE(section.GetContents(&contents)); + EXPECT_EQ(string("\x01", 1), contents); +} + +TEST_F(Append,ULEB128_0x3f) { + section.ULEB128(0x3f); + EXPECT_TRUE(section.GetContents(&contents)); + EXPECT_EQ(string("\x3f", 1), contents); +} + +TEST_F(Append,ULEB128_0x40) { + section.ULEB128(0x40); + EXPECT_TRUE(section.GetContents(&contents)); + EXPECT_EQ(string("\x40", 1), contents); +} + +TEST_F(Append,ULEB128_0x7f) { + section.ULEB128(0x7f); + EXPECT_TRUE(section.GetContents(&contents)); + EXPECT_EQ(string("\x7f", 1), contents); +} + +TEST_F(Append,ULEB128_0x80) { + section.ULEB128(0x80); + EXPECT_TRUE(section.GetContents(&contents)); + EXPECT_EQ(string("\x80\x01", 2), contents); +} + +TEST_F(Append,ULEB128_0xff) { + section.ULEB128(0xff); + EXPECT_TRUE(section.GetContents(&contents)); + EXPECT_EQ(string("\xff\x01", 2), contents); +} + +TEST_F(Append,ULEB128_0x100) { + section.ULEB128(0x100); + EXPECT_TRUE(section.GetContents(&contents)); + EXPECT_EQ(string("\x80\x02", 2), contents); +} + +TEST_F(Append,ULEB128_0x1fff) { + section.ULEB128(0x1fff); + EXPECT_TRUE(section.GetContents(&contents)); + EXPECT_EQ(string("\xff\x3f", 2), contents); +} + +TEST_F(Append,ULEB128_0x2000) { + section.ULEB128(0x2000); + EXPECT_TRUE(section.GetContents(&contents)); + EXPECT_EQ(string("\x80\x40", 2), contents); +} + +TEST_F(Append,ULEB128_0x3fff) { + section.ULEB128(0x3fff); + EXPECT_TRUE(section.GetContents(&contents)); + EXPECT_EQ(string("\xff\x7f", 2), contents); +} + +TEST_F(Append,ULEB128_0x4000) { + section.ULEB128(0x4000); + EXPECT_TRUE(section.GetContents(&contents)); + EXPECT_EQ(string("\x80\x80\x01", 3), contents); +} + +TEST_F(Append,ULEB128_12857) { + section.ULEB128(12857); + EXPECT_TRUE(section.GetContents(&contents)); + EXPECT_EQ(string("\xb9\x64", 2), contents); +} + +TEST_F(Append, LEBChain) { + section.LEB128(-0x80).ULEB128(12857).Append("*"); + EXPECT_TRUE(section.GetContents(&contents)); + EXPECT_EQ(string("\x80\x7f\xb9\x64*", 5), contents); +} + + +class GetContents: public SectionFixture, public Test { }; + +TEST_F(GetContents, Undefined) { + Label l; + section.Append(kLittleEndian, 8, l); + ASSERT_FALSE(section.GetContents(&contents)); +} + +TEST_F(GetContents, ClearsContents) { + section.Append((size_t) 10, '*'); + EXPECT_EQ(10U, section.Size()); + EXPECT_TRUE(section.GetContents(&contents)); + EXPECT_EQ(0U, section.Size()); +} + +TEST_F(GetContents, ClearsReferences) { + Label l; + section.Append(kBigEndian, 1, l); + l = 42; + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_BYTES(contents, I1(42)); + ASSERT_TRUE(section.GetContents(&contents)); // should not die +} + +class Miscellanea: public SectionFixture, public Test { }; + +TEST_F(Miscellanea, Clear) { + section.Append("howdy"); + Label l; + section.L32(l); + EXPECT_EQ(9U, section.Size()); + section.Clear(); + EXPECT_EQ(0U, section.Size()); + l = 0x8d231bf0U; + ASSERT_TRUE(section.GetContents(&contents)); // should not die +} + +TEST_F(Miscellanea, Align) { + section.Append("*"); + EXPECT_EQ(1U, section.Size()); + section.Align(4).Append("*"); + EXPECT_EQ(5U, section.Size()); + section.Append("*").Align(2); + EXPECT_EQ(6U, section.Size()); +} + +TEST_F(Miscellanea, AlignPad) { + section.Append("*"); + EXPECT_EQ(1U, section.Size()); + section.Align(4, ' ').Append("*"); + EXPECT_EQ(5U, section.Size()); + section.Append("*").Align(2, ' '); + EXPECT_EQ(6U, section.Size()); + ASSERT_TRUE(section.GetContents(&contents)); + ASSERT_EQ(string("* **"), contents); +} + +TEST_F(Miscellanea, StartHereMark) { + Label m; + section.Append(42, ' ').Mark(&m).Append(13, '+'); + EXPECT_EQ(42U, m - section.start()); + EXPECT_EQ(42U + 13U, section.Here() - section.start()); + EXPECT_FALSE(section.start().IsKnownConstant()); + EXPECT_FALSE(m.IsKnownConstant()); + EXPECT_FALSE(section.Here().IsKnownConstant()); +} + +TEST_F(Miscellanea, Endianness) { + section.set_endianness(kBigEndian); + EXPECT_EQ(kBigEndian, section.endianness()); + section.set_endianness(kLittleEndian); + EXPECT_EQ(kLittleEndian, section.endianness()); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/testdata/func-line-pairing.h b/TMessagesProj/jni/third_party/breakpad/src/common/testdata/func-line-pairing.h new file mode 100644 index 0000000000..05538f961b --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/testdata/func-line-pairing.h @@ -0,0 +1,676 @@ +// -*- mode: c++ -*- + +// Test data for pairing functions and lines. +// +// For a pair of functions that are adjacent (10,20),(20,25) and a +// pair that are not (10,15),(20,25), we include a test case for every +// possible arrangement of two lines relative to those functions. We +// include cases only for non-empty ranges, since empty functions and +// lines are dropped before we do any pairing. +// +// Each test case is represented by a macro call of the form: +// +// PAIRING(func1_start, func1_end, func2_start, func2_end, +// line1_start, line1_end, line2_start, line2_end, +// func1_num_lines, func2_num_lines, +// func1_line1_start, func1_line1_end, +// func1_line2_start, func1_line2_end, +// func2_line1_start, func2_line1_end, +// func2_line2_start, func2_line2_end, +// uncovered_funcs, uncovered_lines) +// +// where: +// - funcN_{start,end} is the range of the N'th function +// - lineN_{start,end} is the range of the N'th function +// - funcN_num_lines is the number of source lines that should be +// paired with the N'th function +// - funcN_lineM_{start,end} is the range of the M'th line +// paired with the N'th function, where 0,0 indicates that +// there should be no such line paired +// - uncovered_funcs is the number of functions with area that is +// uncovered by any line, and +// - uncovered_lines is the reverse. + +// func1 func2 line1 line2 num pairing1 pairing2 uncovered +PAIRING(10, 20, 20, 25, 6, 7, 7, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #0 +PAIRING(10, 20, 20, 25, 6, 7, 7, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #1 +PAIRING(10, 20, 20, 25, 6, 7, 7, 11, 1, 0, 10, 11, 0, 0, 0, 0, 0, 0, 2, 2) // #2 +PAIRING(10, 20, 20, 25, 6, 7, 7, 20, 1, 0, 10, 20, 0, 0, 0, 0, 0, 0, 1, 2) // #3 +PAIRING(10, 20, 20, 25, 6, 7, 7, 21, 1, 1, 10, 20, 0, 0, 20, 21, 0, 0, 1, 2) // #4 +PAIRING(10, 20, 20, 25, 6, 7, 7, 25, 1, 1, 10, 20, 0, 0, 20, 25, 0, 0, 0, 2) // #5 +PAIRING(10, 20, 20, 25, 6, 7, 7, 26, 1, 1, 10, 20, 0, 0, 20, 25, 0, 0, 0, 2) // #6 +PAIRING(10, 20, 20, 25, 6, 7, 8, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #7 +PAIRING(10, 20, 20, 25, 6, 7, 8, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #8 +PAIRING(10, 20, 20, 25, 6, 7, 8, 11, 1, 0, 10, 11, 0, 0, 0, 0, 0, 0, 2, 2) // #9 +PAIRING(10, 20, 20, 25, 6, 7, 8, 20, 1, 0, 10, 20, 0, 0, 0, 0, 0, 0, 1, 2) // #10 +PAIRING(10, 20, 20, 25, 6, 7, 8, 21, 1, 1, 10, 20, 0, 0, 20, 21, 0, 0, 1, 2) // #11 +PAIRING(10, 20, 20, 25, 6, 7, 8, 25, 1, 1, 10, 20, 0, 0, 20, 25, 0, 0, 0, 2) // #12 +PAIRING(10, 20, 20, 25, 6, 7, 8, 26, 1, 1, 10, 20, 0, 0, 20, 25, 0, 0, 0, 2) // #13 +PAIRING(10, 20, 20, 25, 6, 7, 10, 11, 1, 0, 10, 11, 0, 0, 0, 0, 0, 0, 2, 1) // #14 +PAIRING(10, 20, 20, 25, 6, 7, 10, 20, 1, 0, 10, 20, 0, 0, 0, 0, 0, 0, 1, 1) // #15 +PAIRING(10, 20, 20, 25, 6, 7, 10, 21, 1, 1, 10, 20, 0, 0, 20, 21, 0, 0, 1, 1) // #16 +PAIRING(10, 20, 20, 25, 6, 7, 10, 25, 1, 1, 10, 20, 0, 0, 20, 25, 0, 0, 0, 1) // #17 +PAIRING(10, 20, 20, 25, 6, 7, 10, 26, 1, 1, 10, 20, 0, 0, 20, 25, 0, 0, 0, 2) // #18 +PAIRING(10, 20, 20, 25, 6, 7, 11, 12, 1, 0, 11, 12, 0, 0, 0, 0, 0, 0, 2, 1) // #19 +PAIRING(10, 20, 20, 25, 6, 7, 11, 20, 1, 0, 11, 20, 0, 0, 0, 0, 0, 0, 2, 1) // #20 +PAIRING(10, 20, 20, 25, 6, 7, 11, 21, 1, 1, 11, 20, 0, 0, 20, 21, 0, 0, 2, 1) // #21 +PAIRING(10, 20, 20, 25, 6, 7, 11, 25, 1, 1, 11, 20, 0, 0, 20, 25, 0, 0, 1, 1) // #22 +PAIRING(10, 20, 20, 25, 6, 7, 11, 26, 1, 1, 11, 20, 0, 0, 20, 25, 0, 0, 1, 2) // #23 +PAIRING(10, 20, 20, 25, 6, 7, 20, 21, 0, 1, 0, 0, 0, 0, 20, 21, 0, 0, 2, 1) // #24 +PAIRING(10, 20, 20, 25, 6, 7, 20, 25, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 1) // #25 +PAIRING(10, 20, 20, 25, 6, 7, 20, 26, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 2) // #26 +PAIRING(10, 20, 20, 25, 6, 7, 21, 22, 0, 1, 0, 0, 0, 0, 21, 22, 0, 0, 2, 1) // #27 +PAIRING(10, 20, 20, 25, 6, 7, 21, 25, 0, 1, 0, 0, 0, 0, 21, 25, 0, 0, 2, 1) // #28 +PAIRING(10, 20, 20, 25, 6, 7, 21, 26, 0, 1, 0, 0, 0, 0, 21, 25, 0, 0, 2, 2) // #29 +PAIRING(10, 20, 20, 25, 6, 7, 25, 26, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #30 +PAIRING(10, 20, 20, 25, 6, 7, 26, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #31 +PAIRING(10, 20, 20, 25, 6, 10, 10, 11, 1, 0, 10, 11, 0, 0, 0, 0, 0, 0, 2, 1) // #32 +PAIRING(10, 20, 20, 25, 6, 10, 10, 20, 1, 0, 10, 20, 0, 0, 0, 0, 0, 0, 1, 1) // #33 +PAIRING(10, 20, 20, 25, 6, 10, 10, 21, 1, 1, 10, 20, 0, 0, 20, 21, 0, 0, 1, 1) // #34 +PAIRING(10, 20, 20, 25, 6, 10, 10, 25, 1, 1, 10, 20, 0, 0, 20, 25, 0, 0, 0, 1) // #35 +PAIRING(10, 20, 20, 25, 6, 10, 10, 26, 1, 1, 10, 20, 0, 0, 20, 25, 0, 0, 0, 2) // #36 +PAIRING(10, 20, 20, 25, 6, 10, 11, 12, 1, 0, 11, 12, 0, 0, 0, 0, 0, 0, 2, 1) // #37 +PAIRING(10, 20, 20, 25, 6, 10, 11, 20, 1, 0, 11, 20, 0, 0, 0, 0, 0, 0, 2, 1) // #38 +PAIRING(10, 20, 20, 25, 6, 10, 11, 21, 1, 1, 11, 20, 0, 0, 20, 21, 0, 0, 2, 1) // #39 +PAIRING(10, 20, 20, 25, 6, 10, 11, 25, 1, 1, 11, 20, 0, 0, 20, 25, 0, 0, 1, 1) // #40 +PAIRING(10, 20, 20, 25, 6, 10, 11, 26, 1, 1, 11, 20, 0, 0, 20, 25, 0, 0, 1, 2) // #41 +PAIRING(10, 20, 20, 25, 6, 10, 20, 21, 0, 1, 0, 0, 0, 0, 20, 21, 0, 0, 2, 1) // #42 +PAIRING(10, 20, 20, 25, 6, 10, 20, 25, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 1) // #43 +PAIRING(10, 20, 20, 25, 6, 10, 20, 26, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 2) // #44 +PAIRING(10, 20, 20, 25, 6, 10, 21, 22, 0, 1, 0, 0, 0, 0, 21, 22, 0, 0, 2, 1) // #45 +PAIRING(10, 20, 20, 25, 6, 10, 21, 25, 0, 1, 0, 0, 0, 0, 21, 25, 0, 0, 2, 1) // #46 +PAIRING(10, 20, 20, 25, 6, 10, 21, 26, 0, 1, 0, 0, 0, 0, 21, 25, 0, 0, 2, 2) // #47 +PAIRING(10, 20, 20, 25, 6, 10, 25, 26, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #48 +PAIRING(10, 20, 20, 25, 6, 10, 26, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #49 +PAIRING(10, 20, 20, 25, 6, 11, 11, 12, 2, 0, 10, 11, 11, 12, 0, 0, 0, 0, 2, 1) // #50 +PAIRING(10, 20, 20, 25, 6, 11, 11, 20, 2, 0, 10, 11, 11, 20, 0, 0, 0, 0, 1, 1) // #51 +PAIRING(10, 20, 20, 25, 6, 11, 11, 21, 2, 1, 10, 11, 11, 20, 20, 21, 0, 0, 1, 1) // #52 +PAIRING(10, 20, 20, 25, 6, 11, 11, 25, 2, 1, 10, 11, 11, 20, 20, 25, 0, 0, 0, 1) // #53 +PAIRING(10, 20, 20, 25, 6, 11, 11, 26, 2, 1, 10, 11, 11, 20, 20, 25, 0, 0, 0, 2) // #54 +PAIRING(10, 20, 20, 25, 6, 11, 12, 13, 2, 0, 10, 11, 12, 13, 0, 0, 0, 0, 2, 1) // #55 +PAIRING(10, 20, 20, 25, 6, 11, 12, 20, 2, 0, 10, 11, 12, 20, 0, 0, 0, 0, 2, 1) // #56 +PAIRING(10, 20, 20, 25, 6, 11, 12, 21, 2, 1, 10, 11, 12, 20, 20, 21, 0, 0, 2, 1) // #57 +PAIRING(10, 20, 20, 25, 6, 11, 12, 25, 2, 1, 10, 11, 12, 20, 20, 25, 0, 0, 1, 1) // #58 +PAIRING(10, 20, 20, 25, 6, 11, 12, 26, 2, 1, 10, 11, 12, 20, 20, 25, 0, 0, 1, 2) // #59 +PAIRING(10, 20, 20, 25, 6, 11, 20, 21, 1, 1, 10, 11, 0, 0, 20, 21, 0, 0, 2, 1) // #60 +PAIRING(10, 20, 20, 25, 6, 11, 20, 25, 1, 1, 10, 11, 0, 0, 20, 25, 0, 0, 1, 1) // #61 +PAIRING(10, 20, 20, 25, 6, 11, 20, 26, 1, 1, 10, 11, 0, 0, 20, 25, 0, 0, 1, 2) // #62 +PAIRING(10, 20, 20, 25, 6, 11, 21, 22, 1, 1, 10, 11, 0, 0, 21, 22, 0, 0, 2, 1) // #63 +PAIRING(10, 20, 20, 25, 6, 11, 21, 25, 1, 1, 10, 11, 0, 0, 21, 25, 0, 0, 2, 1) // #64 +PAIRING(10, 20, 20, 25, 6, 11, 21, 26, 1, 1, 10, 11, 0, 0, 21, 25, 0, 0, 2, 2) // #65 +PAIRING(10, 20, 20, 25, 6, 11, 25, 26, 1, 0, 10, 11, 0, 0, 0, 0, 0, 0, 2, 2) // #66 +PAIRING(10, 20, 20, 25, 6, 11, 26, 27, 1, 0, 10, 11, 0, 0, 0, 0, 0, 0, 2, 2) // #67 +PAIRING(10, 20, 20, 25, 6, 20, 20, 21, 1, 1, 10, 20, 0, 0, 20, 21, 0, 0, 1, 1) // #68 +PAIRING(10, 20, 20, 25, 6, 20, 20, 25, 1, 1, 10, 20, 0, 0, 20, 25, 0, 0, 0, 1) // #69 +PAIRING(10, 20, 20, 25, 6, 20, 20, 26, 1, 1, 10, 20, 0, 0, 20, 25, 0, 0, 0, 2) // #70 +PAIRING(10, 20, 20, 25, 6, 20, 21, 22, 1, 1, 10, 20, 0, 0, 21, 22, 0, 0, 1, 1) // #71 +PAIRING(10, 20, 20, 25, 6, 20, 21, 25, 1, 1, 10, 20, 0, 0, 21, 25, 0, 0, 1, 1) // #72 +PAIRING(10, 20, 20, 25, 6, 20, 21, 26, 1, 1, 10, 20, 0, 0, 21, 25, 0, 0, 1, 2) // #73 +PAIRING(10, 20, 20, 25, 6, 20, 25, 26, 1, 0, 10, 20, 0, 0, 0, 0, 0, 0, 1, 2) // #74 +PAIRING(10, 20, 20, 25, 6, 20, 26, 27, 1, 0, 10, 20, 0, 0, 0, 0, 0, 0, 1, 2) // #75 +PAIRING(10, 20, 20, 25, 6, 21, 21, 22, 1, 2, 10, 20, 0, 0, 20, 21, 21, 22, 1, 1) // #76 +PAIRING(10, 20, 20, 25, 6, 21, 21, 25, 1, 2, 10, 20, 0, 0, 20, 21, 21, 25, 0, 1) // #77 +PAIRING(10, 20, 20, 25, 6, 21, 21, 26, 1, 2, 10, 20, 0, 0, 20, 21, 21, 25, 0, 2) // #78 +PAIRING(10, 20, 20, 25, 6, 21, 22, 23, 1, 2, 10, 20, 0, 0, 20, 21, 22, 23, 1, 1) // #79 +PAIRING(10, 20, 20, 25, 6, 21, 22, 25, 1, 2, 10, 20, 0, 0, 20, 21, 22, 25, 1, 1) // #80 +PAIRING(10, 20, 20, 25, 6, 21, 22, 26, 1, 2, 10, 20, 0, 0, 20, 21, 22, 25, 1, 2) // #81 +PAIRING(10, 20, 20, 25, 6, 21, 25, 26, 1, 1, 10, 20, 0, 0, 20, 21, 0, 0, 1, 2) // #82 +PAIRING(10, 20, 20, 25, 6, 21, 26, 27, 1, 1, 10, 20, 0, 0, 20, 21, 0, 0, 1, 2) // #83 +PAIRING(10, 20, 20, 25, 6, 25, 25, 26, 1, 1, 10, 20, 0, 0, 20, 25, 0, 0, 0, 2) // #84 +PAIRING(10, 20, 20, 25, 6, 25, 26, 27, 1, 1, 10, 20, 0, 0, 20, 25, 0, 0, 0, 2) // #85 +PAIRING(10, 20, 20, 25, 6, 26, 26, 27, 1, 1, 10, 20, 0, 0, 20, 25, 0, 0, 0, 2) // #86 +PAIRING(10, 20, 20, 25, 6, 26, 27, 28, 1, 1, 10, 20, 0, 0, 20, 25, 0, 0, 0, 2) // #87 +PAIRING(10, 20, 20, 25, 10, 11, 11, 12, 2, 0, 10, 11, 11, 12, 0, 0, 0, 0, 2, 0) // #88 +PAIRING(10, 20, 20, 25, 10, 11, 11, 20, 2, 0, 10, 11, 11, 20, 0, 0, 0, 0, 1, 0) // #89 +PAIRING(10, 20, 20, 25, 10, 11, 11, 21, 2, 1, 10, 11, 11, 20, 20, 21, 0, 0, 1, 0) // #90 +PAIRING(10, 20, 20, 25, 10, 11, 11, 25, 2, 1, 10, 11, 11, 20, 20, 25, 0, 0, 0, 0) // #91 +PAIRING(10, 20, 20, 25, 10, 11, 11, 26, 2, 1, 10, 11, 11, 20, 20, 25, 0, 0, 0, 1) // #92 +PAIRING(10, 20, 20, 25, 10, 11, 12, 13, 2, 0, 10, 11, 12, 13, 0, 0, 0, 0, 2, 0) // #93 +PAIRING(10, 20, 20, 25, 10, 11, 12, 20, 2, 0, 10, 11, 12, 20, 0, 0, 0, 0, 2, 0) // #94 +PAIRING(10, 20, 20, 25, 10, 11, 12, 21, 2, 1, 10, 11, 12, 20, 20, 21, 0, 0, 2, 0) // #95 +PAIRING(10, 20, 20, 25, 10, 11, 12, 25, 2, 1, 10, 11, 12, 20, 20, 25, 0, 0, 1, 0) // #96 +PAIRING(10, 20, 20, 25, 10, 11, 12, 26, 2, 1, 10, 11, 12, 20, 20, 25, 0, 0, 1, 1) // #97 +PAIRING(10, 20, 20, 25, 10, 11, 20, 21, 1, 1, 10, 11, 0, 0, 20, 21, 0, 0, 2, 0) // #98 +PAIRING(10, 20, 20, 25, 10, 11, 20, 25, 1, 1, 10, 11, 0, 0, 20, 25, 0, 0, 1, 0) // #99 +PAIRING(10, 20, 20, 25, 10, 11, 20, 26, 1, 1, 10, 11, 0, 0, 20, 25, 0, 0, 1, 1) // #100 +PAIRING(10, 20, 20, 25, 10, 11, 21, 22, 1, 1, 10, 11, 0, 0, 21, 22, 0, 0, 2, 0) // #101 +PAIRING(10, 20, 20, 25, 10, 11, 21, 25, 1, 1, 10, 11, 0, 0, 21, 25, 0, 0, 2, 0) // #102 +PAIRING(10, 20, 20, 25, 10, 11, 21, 26, 1, 1, 10, 11, 0, 0, 21, 25, 0, 0, 2, 1) // #103 +PAIRING(10, 20, 20, 25, 10, 11, 25, 26, 1, 0, 10, 11, 0, 0, 0, 0, 0, 0, 2, 1) // #104 +PAIRING(10, 20, 20, 25, 10, 11, 26, 27, 1, 0, 10, 11, 0, 0, 0, 0, 0, 0, 2, 1) // #105 +PAIRING(10, 20, 20, 25, 10, 20, 20, 21, 1, 1, 10, 20, 0, 0, 20, 21, 0, 0, 1, 0) // #106 +PAIRING(10, 20, 20, 25, 10, 20, 20, 25, 1, 1, 10, 20, 0, 0, 20, 25, 0, 0, 0, 0) // #107 +PAIRING(10, 20, 20, 25, 10, 20, 20, 26, 1, 1, 10, 20, 0, 0, 20, 25, 0, 0, 0, 1) // #108 +PAIRING(10, 20, 20, 25, 10, 20, 21, 22, 1, 1, 10, 20, 0, 0, 21, 22, 0, 0, 1, 0) // #109 +PAIRING(10, 20, 20, 25, 10, 20, 21, 25, 1, 1, 10, 20, 0, 0, 21, 25, 0, 0, 1, 0) // #110 +PAIRING(10, 20, 20, 25, 10, 20, 21, 26, 1, 1, 10, 20, 0, 0, 21, 25, 0, 0, 1, 1) // #111 +PAIRING(10, 20, 20, 25, 10, 20, 25, 26, 1, 0, 10, 20, 0, 0, 0, 0, 0, 0, 1, 1) // #112 +PAIRING(10, 20, 20, 25, 10, 20, 26, 27, 1, 0, 10, 20, 0, 0, 0, 0, 0, 0, 1, 1) // #113 +PAIRING(10, 20, 20, 25, 10, 21, 21, 22, 1, 2, 10, 20, 0, 0, 20, 21, 21, 22, 1, 0) // #114 +PAIRING(10, 20, 20, 25, 10, 21, 21, 25, 1, 2, 10, 20, 0, 0, 20, 21, 21, 25, 0, 0) // #115 +PAIRING(10, 20, 20, 25, 10, 21, 21, 26, 1, 2, 10, 20, 0, 0, 20, 21, 21, 25, 0, 1) // #116 +PAIRING(10, 20, 20, 25, 10, 21, 22, 23, 1, 2, 10, 20, 0, 0, 20, 21, 22, 23, 1, 0) // #117 +PAIRING(10, 20, 20, 25, 10, 21, 22, 25, 1, 2, 10, 20, 0, 0, 20, 21, 22, 25, 1, 0) // #118 +PAIRING(10, 20, 20, 25, 10, 21, 22, 26, 1, 2, 10, 20, 0, 0, 20, 21, 22, 25, 1, 1) // #119 +PAIRING(10, 20, 20, 25, 10, 21, 25, 26, 1, 1, 10, 20, 0, 0, 20, 21, 0, 0, 1, 1) // #120 +PAIRING(10, 20, 20, 25, 10, 21, 26, 27, 1, 1, 10, 20, 0, 0, 20, 21, 0, 0, 1, 1) // #121 +PAIRING(10, 20, 20, 25, 10, 25, 25, 26, 1, 1, 10, 20, 0, 0, 20, 25, 0, 0, 0, 1) // #122 +PAIRING(10, 20, 20, 25, 10, 25, 26, 27, 1, 1, 10, 20, 0, 0, 20, 25, 0, 0, 0, 1) // #123 +PAIRING(10, 20, 20, 25, 10, 26, 26, 27, 1, 1, 10, 20, 0, 0, 20, 25, 0, 0, 0, 2) // #124 +PAIRING(10, 20, 20, 25, 10, 26, 27, 28, 1, 1, 10, 20, 0, 0, 20, 25, 0, 0, 0, 2) // #125 +PAIRING(10, 20, 20, 25, 11, 12, 12, 13, 2, 0, 11, 12, 12, 13, 0, 0, 0, 0, 2, 0) // #126 +PAIRING(10, 20, 20, 25, 11, 12, 12, 20, 2, 0, 11, 12, 12, 20, 0, 0, 0, 0, 2, 0) // #127 +PAIRING(10, 20, 20, 25, 11, 12, 12, 21, 2, 1, 11, 12, 12, 20, 20, 21, 0, 0, 2, 0) // #128 +PAIRING(10, 20, 20, 25, 11, 12, 12, 25, 2, 1, 11, 12, 12, 20, 20, 25, 0, 0, 1, 0) // #129 +PAIRING(10, 20, 20, 25, 11, 12, 12, 26, 2, 1, 11, 12, 12, 20, 20, 25, 0, 0, 1, 1) // #130 +PAIRING(10, 20, 20, 25, 11, 12, 13, 14, 2, 0, 11, 12, 13, 14, 0, 0, 0, 0, 2, 0) // #131 +PAIRING(10, 20, 20, 25, 11, 12, 13, 20, 2, 0, 11, 12, 13, 20, 0, 0, 0, 0, 2, 0) // #132 +PAIRING(10, 20, 20, 25, 11, 12, 13, 21, 2, 1, 11, 12, 13, 20, 20, 21, 0, 0, 2, 0) // #133 +PAIRING(10, 20, 20, 25, 11, 12, 13, 25, 2, 1, 11, 12, 13, 20, 20, 25, 0, 0, 1, 0) // #134 +PAIRING(10, 20, 20, 25, 11, 12, 13, 26, 2, 1, 11, 12, 13, 20, 20, 25, 0, 0, 1, 1) // #135 +PAIRING(10, 20, 20, 25, 11, 12, 20, 21, 1, 1, 11, 12, 0, 0, 20, 21, 0, 0, 2, 0) // #136 +PAIRING(10, 20, 20, 25, 11, 12, 20, 25, 1, 1, 11, 12, 0, 0, 20, 25, 0, 0, 1, 0) // #137 +PAIRING(10, 20, 20, 25, 11, 12, 20, 26, 1, 1, 11, 12, 0, 0, 20, 25, 0, 0, 1, 1) // #138 +PAIRING(10, 20, 20, 25, 11, 12, 21, 22, 1, 1, 11, 12, 0, 0, 21, 22, 0, 0, 2, 0) // #139 +PAIRING(10, 20, 20, 25, 11, 12, 21, 25, 1, 1, 11, 12, 0, 0, 21, 25, 0, 0, 2, 0) // #140 +PAIRING(10, 20, 20, 25, 11, 12, 21, 26, 1, 1, 11, 12, 0, 0, 21, 25, 0, 0, 2, 1) // #141 +PAIRING(10, 20, 20, 25, 11, 12, 25, 26, 1, 0, 11, 12, 0, 0, 0, 0, 0, 0, 2, 1) // #142 +PAIRING(10, 20, 20, 25, 11, 12, 26, 27, 1, 0, 11, 12, 0, 0, 0, 0, 0, 0, 2, 1) // #143 +PAIRING(10, 20, 20, 25, 11, 20, 20, 21, 1, 1, 11, 20, 0, 0, 20, 21, 0, 0, 2, 0) // #144 +PAIRING(10, 20, 20, 25, 11, 20, 20, 25, 1, 1, 11, 20, 0, 0, 20, 25, 0, 0, 1, 0) // #145 +PAIRING(10, 20, 20, 25, 11, 20, 20, 26, 1, 1, 11, 20, 0, 0, 20, 25, 0, 0, 1, 1) // #146 +PAIRING(10, 20, 20, 25, 11, 20, 21, 22, 1, 1, 11, 20, 0, 0, 21, 22, 0, 0, 2, 0) // #147 +PAIRING(10, 20, 20, 25, 11, 20, 21, 25, 1, 1, 11, 20, 0, 0, 21, 25, 0, 0, 2, 0) // #148 +PAIRING(10, 20, 20, 25, 11, 20, 21, 26, 1, 1, 11, 20, 0, 0, 21, 25, 0, 0, 2, 1) // #149 +PAIRING(10, 20, 20, 25, 11, 20, 25, 26, 1, 0, 11, 20, 0, 0, 0, 0, 0, 0, 2, 1) // #150 +PAIRING(10, 20, 20, 25, 11, 20, 26, 27, 1, 0, 11, 20, 0, 0, 0, 0, 0, 0, 2, 1) // #151 +PAIRING(10, 20, 20, 25, 11, 21, 21, 22, 1, 2, 11, 20, 0, 0, 20, 21, 21, 22, 2, 0) // #152 +PAIRING(10, 20, 20, 25, 11, 21, 21, 25, 1, 2, 11, 20, 0, 0, 20, 21, 21, 25, 1, 0) // #153 +PAIRING(10, 20, 20, 25, 11, 21, 21, 26, 1, 2, 11, 20, 0, 0, 20, 21, 21, 25, 1, 1) // #154 +PAIRING(10, 20, 20, 25, 11, 21, 22, 23, 1, 2, 11, 20, 0, 0, 20, 21, 22, 23, 2, 0) // #155 +PAIRING(10, 20, 20, 25, 11, 21, 22, 25, 1, 2, 11, 20, 0, 0, 20, 21, 22, 25, 2, 0) // #156 +PAIRING(10, 20, 20, 25, 11, 21, 22, 26, 1, 2, 11, 20, 0, 0, 20, 21, 22, 25, 2, 1) // #157 +PAIRING(10, 20, 20, 25, 11, 21, 25, 26, 1, 1, 11, 20, 0, 0, 20, 21, 0, 0, 2, 1) // #158 +PAIRING(10, 20, 20, 25, 11, 21, 26, 27, 1, 1, 11, 20, 0, 0, 20, 21, 0, 0, 2, 1) // #159 +PAIRING(10, 20, 20, 25, 11, 25, 25, 26, 1, 1, 11, 20, 0, 0, 20, 25, 0, 0, 1, 1) // #160 +PAIRING(10, 20, 20, 25, 11, 25, 26, 27, 1, 1, 11, 20, 0, 0, 20, 25, 0, 0, 1, 1) // #161 +PAIRING(10, 20, 20, 25, 11, 26, 26, 27, 1, 1, 11, 20, 0, 0, 20, 25, 0, 0, 1, 2) // #162 +PAIRING(10, 20, 20, 25, 11, 26, 27, 28, 1, 1, 11, 20, 0, 0, 20, 25, 0, 0, 1, 2) // #163 +PAIRING(10, 20, 20, 25, 20, 21, 21, 22, 0, 2, 0, 0, 0, 0, 20, 21, 21, 22, 2, 0) // #164 +PAIRING(10, 20, 20, 25, 20, 21, 21, 25, 0, 2, 0, 0, 0, 0, 20, 21, 21, 25, 1, 0) // #165 +PAIRING(10, 20, 20, 25, 20, 21, 21, 26, 0, 2, 0, 0, 0, 0, 20, 21, 21, 25, 1, 1) // #166 +PAIRING(10, 20, 20, 25, 20, 21, 22, 23, 0, 2, 0, 0, 0, 0, 20, 21, 22, 23, 2, 0) // #167 +PAIRING(10, 20, 20, 25, 20, 21, 22, 25, 0, 2, 0, 0, 0, 0, 20, 21, 22, 25, 2, 0) // #168 +PAIRING(10, 20, 20, 25, 20, 21, 22, 26, 0, 2, 0, 0, 0, 0, 20, 21, 22, 25, 2, 1) // #169 +PAIRING(10, 20, 20, 25, 20, 21, 25, 26, 0, 1, 0, 0, 0, 0, 20, 21, 0, 0, 2, 1) // #170 +PAIRING(10, 20, 20, 25, 20, 21, 26, 27, 0, 1, 0, 0, 0, 0, 20, 21, 0, 0, 2, 1) // #171 +PAIRING(10, 20, 20, 25, 20, 25, 25, 26, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 1) // #172 +PAIRING(10, 20, 20, 25, 20, 25, 26, 27, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 1) // #173 +PAIRING(10, 20, 20, 25, 20, 26, 26, 27, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 2) // #174 +PAIRING(10, 20, 20, 25, 20, 26, 27, 28, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 2) // #175 +PAIRING(10, 20, 20, 25, 21, 22, 22, 23, 0, 2, 0, 0, 0, 0, 21, 22, 22, 23, 2, 0) // #176 +PAIRING(10, 20, 20, 25, 21, 22, 22, 25, 0, 2, 0, 0, 0, 0, 21, 22, 22, 25, 2, 0) // #177 +PAIRING(10, 20, 20, 25, 21, 22, 22, 26, 0, 2, 0, 0, 0, 0, 21, 22, 22, 25, 2, 1) // #178 +PAIRING(10, 20, 20, 25, 21, 22, 23, 24, 0, 2, 0, 0, 0, 0, 21, 22, 23, 24, 2, 0) // #179 +PAIRING(10, 20, 20, 25, 21, 22, 23, 25, 0, 2, 0, 0, 0, 0, 21, 22, 23, 25, 2, 0) // #180 +PAIRING(10, 20, 20, 25, 21, 22, 23, 26, 0, 2, 0, 0, 0, 0, 21, 22, 23, 25, 2, 1) // #181 +PAIRING(10, 20, 20, 25, 21, 22, 25, 26, 0, 1, 0, 0, 0, 0, 21, 22, 0, 0, 2, 1) // #182 +PAIRING(10, 20, 20, 25, 21, 22, 26, 27, 0, 1, 0, 0, 0, 0, 21, 22, 0, 0, 2, 1) // #183 +PAIRING(10, 20, 20, 25, 21, 25, 25, 26, 0, 1, 0, 0, 0, 0, 21, 25, 0, 0, 2, 1) // #184 +PAIRING(10, 20, 20, 25, 21, 25, 26, 27, 0, 1, 0, 0, 0, 0, 21, 25, 0, 0, 2, 1) // #185 +PAIRING(10, 20, 20, 25, 21, 26, 26, 27, 0, 1, 0, 0, 0, 0, 21, 25, 0, 0, 2, 2) // #186 +PAIRING(10, 20, 20, 25, 21, 26, 27, 28, 0, 1, 0, 0, 0, 0, 21, 25, 0, 0, 2, 2) // #187 +PAIRING(10, 20, 20, 25, 25, 26, 26, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #188 +PAIRING(10, 20, 20, 25, 25, 26, 27, 28, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #189 +PAIRING(10, 20, 20, 25, 26, 27, 27, 28, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #190 +PAIRING(10, 20, 20, 25, 26, 27, 28, 29, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #191 +PAIRING(10, 15, 20, 25, 6, 7, 7, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #192 +PAIRING(10, 15, 20, 25, 6, 7, 7, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #193 +PAIRING(10, 15, 20, 25, 6, 7, 7, 11, 1, 0, 10, 11, 0, 0, 0, 0, 0, 0, 2, 2) // #194 +PAIRING(10, 15, 20, 25, 6, 7, 7, 15, 1, 0, 10, 15, 0, 0, 0, 0, 0, 0, 1, 2) // #195 +PAIRING(10, 15, 20, 25, 6, 7, 7, 16, 1, 0, 10, 15, 0, 0, 0, 0, 0, 0, 1, 2) // #196 +PAIRING(10, 15, 20, 25, 6, 7, 7, 20, 1, 0, 10, 15, 0, 0, 0, 0, 0, 0, 1, 2) // #197 +PAIRING(10, 15, 20, 25, 6, 7, 7, 21, 1, 1, 10, 15, 0, 0, 20, 21, 0, 0, 1, 2) // #198 +PAIRING(10, 15, 20, 25, 6, 7, 7, 25, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 2) // #199 +PAIRING(10, 15, 20, 25, 6, 7, 7, 26, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 2) // #200 +PAIRING(10, 15, 20, 25, 6, 7, 8, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #201 +PAIRING(10, 15, 20, 25, 6, 7, 8, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #202 +PAIRING(10, 15, 20, 25, 6, 7, 8, 11, 1, 0, 10, 11, 0, 0, 0, 0, 0, 0, 2, 2) // #203 +PAIRING(10, 15, 20, 25, 6, 7, 8, 15, 1, 0, 10, 15, 0, 0, 0, 0, 0, 0, 1, 2) // #204 +PAIRING(10, 15, 20, 25, 6, 7, 8, 16, 1, 0, 10, 15, 0, 0, 0, 0, 0, 0, 1, 2) // #205 +PAIRING(10, 15, 20, 25, 6, 7, 8, 20, 1, 0, 10, 15, 0, 0, 0, 0, 0, 0, 1, 2) // #206 +PAIRING(10, 15, 20, 25, 6, 7, 8, 21, 1, 1, 10, 15, 0, 0, 20, 21, 0, 0, 1, 2) // #207 +PAIRING(10, 15, 20, 25, 6, 7, 8, 25, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 2) // #208 +PAIRING(10, 15, 20, 25, 6, 7, 8, 26, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 2) // #209 +PAIRING(10, 15, 20, 25, 6, 7, 10, 11, 1, 0, 10, 11, 0, 0, 0, 0, 0, 0, 2, 1) // #210 +PAIRING(10, 15, 20, 25, 6, 7, 10, 15, 1, 0, 10, 15, 0, 0, 0, 0, 0, 0, 1, 1) // #211 +PAIRING(10, 15, 20, 25, 6, 7, 10, 16, 1, 0, 10, 15, 0, 0, 0, 0, 0, 0, 1, 2) // #212 +PAIRING(10, 15, 20, 25, 6, 7, 10, 20, 1, 0, 10, 15, 0, 0, 0, 0, 0, 0, 1, 1) // #213 +PAIRING(10, 15, 20, 25, 6, 7, 10, 21, 1, 1, 10, 15, 0, 0, 20, 21, 0, 0, 1, 2) // #214 +PAIRING(10, 15, 20, 25, 6, 7, 10, 25, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 2) // #215 +PAIRING(10, 15, 20, 25, 6, 7, 10, 26, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 2) // #216 +PAIRING(10, 15, 20, 25, 6, 7, 11, 12, 1, 0, 11, 12, 0, 0, 0, 0, 0, 0, 2, 1) // #217 +PAIRING(10, 15, 20, 25, 6, 7, 11, 15, 1, 0, 11, 15, 0, 0, 0, 0, 0, 0, 2, 1) // #218 +PAIRING(10, 15, 20, 25, 6, 7, 11, 16, 1, 0, 11, 15, 0, 0, 0, 0, 0, 0, 2, 2) // #219 +PAIRING(10, 15, 20, 25, 6, 7, 11, 20, 1, 0, 11, 15, 0, 0, 0, 0, 0, 0, 2, 1) // #220 +PAIRING(10, 15, 20, 25, 6, 7, 11, 21, 1, 1, 11, 15, 0, 0, 20, 21, 0, 0, 2, 2) // #221 +PAIRING(10, 15, 20, 25, 6, 7, 11, 25, 1, 1, 11, 15, 0, 0, 20, 25, 0, 0, 1, 2) // #222 +PAIRING(10, 15, 20, 25, 6, 7, 11, 26, 1, 1, 11, 15, 0, 0, 20, 25, 0, 0, 1, 2) // #223 +PAIRING(10, 15, 20, 25, 6, 7, 15, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #224 +PAIRING(10, 15, 20, 25, 6, 7, 15, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #225 +PAIRING(10, 15, 20, 25, 6, 7, 15, 21, 0, 1, 0, 0, 0, 0, 20, 21, 0, 0, 2, 2) // #226 +PAIRING(10, 15, 20, 25, 6, 7, 15, 25, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 2) // #227 +PAIRING(10, 15, 20, 25, 6, 7, 15, 26, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 2) // #228 +PAIRING(10, 15, 20, 25, 6, 7, 16, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #229 +PAIRING(10, 15, 20, 25, 6, 7, 16, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #230 +PAIRING(10, 15, 20, 25, 6, 7, 16, 21, 0, 1, 0, 0, 0, 0, 20, 21, 0, 0, 2, 2) // #231 +PAIRING(10, 15, 20, 25, 6, 7, 16, 25, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 2) // #232 +PAIRING(10, 15, 20, 25, 6, 7, 16, 26, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 2) // #233 +PAIRING(10, 15, 20, 25, 6, 7, 20, 21, 0, 1, 0, 0, 0, 0, 20, 21, 0, 0, 2, 1) // #234 +PAIRING(10, 15, 20, 25, 6, 7, 20, 25, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 1) // #235 +PAIRING(10, 15, 20, 25, 6, 7, 20, 26, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 2) // #236 +PAIRING(10, 15, 20, 25, 6, 7, 21, 22, 0, 1, 0, 0, 0, 0, 21, 22, 0, 0, 2, 1) // #237 +PAIRING(10, 15, 20, 25, 6, 7, 21, 25, 0, 1, 0, 0, 0, 0, 21, 25, 0, 0, 2, 1) // #238 +PAIRING(10, 15, 20, 25, 6, 7, 21, 26, 0, 1, 0, 0, 0, 0, 21, 25, 0, 0, 2, 2) // #239 +PAIRING(10, 15, 20, 25, 6, 7, 25, 26, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #240 +PAIRING(10, 15, 20, 25, 6, 7, 26, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #241 +PAIRING(10, 15, 20, 25, 6, 10, 10, 11, 1, 0, 10, 11, 0, 0, 0, 0, 0, 0, 2, 1) // #242 +PAIRING(10, 15, 20, 25, 6, 10, 10, 15, 1, 0, 10, 15, 0, 0, 0, 0, 0, 0, 1, 1) // #243 +PAIRING(10, 15, 20, 25, 6, 10, 10, 16, 1, 0, 10, 15, 0, 0, 0, 0, 0, 0, 1, 2) // #244 +PAIRING(10, 15, 20, 25, 6, 10, 10, 20, 1, 0, 10, 15, 0, 0, 0, 0, 0, 0, 1, 1) // #245 +PAIRING(10, 15, 20, 25, 6, 10, 10, 21, 1, 1, 10, 15, 0, 0, 20, 21, 0, 0, 1, 2) // #246 +PAIRING(10, 15, 20, 25, 6, 10, 10, 25, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 2) // #247 +PAIRING(10, 15, 20, 25, 6, 10, 10, 26, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 2) // #248 +PAIRING(10, 15, 20, 25, 6, 10, 11, 12, 1, 0, 11, 12, 0, 0, 0, 0, 0, 0, 2, 1) // #249 +PAIRING(10, 15, 20, 25, 6, 10, 11, 15, 1, 0, 11, 15, 0, 0, 0, 0, 0, 0, 2, 1) // #250 +PAIRING(10, 15, 20, 25, 6, 10, 11, 16, 1, 0, 11, 15, 0, 0, 0, 0, 0, 0, 2, 2) // #251 +PAIRING(10, 15, 20, 25, 6, 10, 11, 20, 1, 0, 11, 15, 0, 0, 0, 0, 0, 0, 2, 1) // #252 +PAIRING(10, 15, 20, 25, 6, 10, 11, 21, 1, 1, 11, 15, 0, 0, 20, 21, 0, 0, 2, 2) // #253 +PAIRING(10, 15, 20, 25, 6, 10, 11, 25, 1, 1, 11, 15, 0, 0, 20, 25, 0, 0, 1, 2) // #254 +PAIRING(10, 15, 20, 25, 6, 10, 11, 26, 1, 1, 11, 15, 0, 0, 20, 25, 0, 0, 1, 2) // #255 +PAIRING(10, 15, 20, 25, 6, 10, 15, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #256 +PAIRING(10, 15, 20, 25, 6, 10, 15, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #257 +PAIRING(10, 15, 20, 25, 6, 10, 15, 21, 0, 1, 0, 0, 0, 0, 20, 21, 0, 0, 2, 2) // #258 +PAIRING(10, 15, 20, 25, 6, 10, 15, 25, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 2) // #259 +PAIRING(10, 15, 20, 25, 6, 10, 15, 26, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 2) // #260 +PAIRING(10, 15, 20, 25, 6, 10, 16, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #261 +PAIRING(10, 15, 20, 25, 6, 10, 16, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #262 +PAIRING(10, 15, 20, 25, 6, 10, 16, 21, 0, 1, 0, 0, 0, 0, 20, 21, 0, 0, 2, 2) // #263 +PAIRING(10, 15, 20, 25, 6, 10, 16, 25, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 2) // #264 +PAIRING(10, 15, 20, 25, 6, 10, 16, 26, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 2) // #265 +PAIRING(10, 15, 20, 25, 6, 10, 20, 21, 0, 1, 0, 0, 0, 0, 20, 21, 0, 0, 2, 1) // #266 +PAIRING(10, 15, 20, 25, 6, 10, 20, 25, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 1) // #267 +PAIRING(10, 15, 20, 25, 6, 10, 20, 26, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 2) // #268 +PAIRING(10, 15, 20, 25, 6, 10, 21, 22, 0, 1, 0, 0, 0, 0, 21, 22, 0, 0, 2, 1) // #269 +PAIRING(10, 15, 20, 25, 6, 10, 21, 25, 0, 1, 0, 0, 0, 0, 21, 25, 0, 0, 2, 1) // #270 +PAIRING(10, 15, 20, 25, 6, 10, 21, 26, 0, 1, 0, 0, 0, 0, 21, 25, 0, 0, 2, 2) // #271 +PAIRING(10, 15, 20, 25, 6, 10, 25, 26, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #272 +PAIRING(10, 15, 20, 25, 6, 10, 26, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #273 +PAIRING(10, 15, 20, 25, 6, 11, 11, 12, 2, 0, 10, 11, 11, 12, 0, 0, 0, 0, 2, 1) // #274 +PAIRING(10, 15, 20, 25, 6, 11, 11, 15, 2, 0, 10, 11, 11, 15, 0, 0, 0, 0, 1, 1) // #275 +PAIRING(10, 15, 20, 25, 6, 11, 11, 16, 2, 0, 10, 11, 11, 15, 0, 0, 0, 0, 1, 2) // #276 +PAIRING(10, 15, 20, 25, 6, 11, 11, 20, 2, 0, 10, 11, 11, 15, 0, 0, 0, 0, 1, 1) // #277 +PAIRING(10, 15, 20, 25, 6, 11, 11, 21, 2, 1, 10, 11, 11, 15, 20, 21, 0, 0, 1, 2) // #278 +PAIRING(10, 15, 20, 25, 6, 11, 11, 25, 2, 1, 10, 11, 11, 15, 20, 25, 0, 0, 0, 2) // #279 +PAIRING(10, 15, 20, 25, 6, 11, 11, 26, 2, 1, 10, 11, 11, 15, 20, 25, 0, 0, 0, 2) // #280 +PAIRING(10, 15, 20, 25, 6, 11, 12, 13, 2, 0, 10, 11, 12, 13, 0, 0, 0, 0, 2, 1) // #281 +PAIRING(10, 15, 20, 25, 6, 11, 12, 15, 2, 0, 10, 11, 12, 15, 0, 0, 0, 0, 2, 1) // #282 +PAIRING(10, 15, 20, 25, 6, 11, 12, 16, 2, 0, 10, 11, 12, 15, 0, 0, 0, 0, 2, 2) // #283 +PAIRING(10, 15, 20, 25, 6, 11, 12, 20, 2, 0, 10, 11, 12, 15, 0, 0, 0, 0, 2, 1) // #284 +PAIRING(10, 15, 20, 25, 6, 11, 12, 21, 2, 1, 10, 11, 12, 15, 20, 21, 0, 0, 2, 2) // #285 +PAIRING(10, 15, 20, 25, 6, 11, 12, 25, 2, 1, 10, 11, 12, 15, 20, 25, 0, 0, 1, 2) // #286 +PAIRING(10, 15, 20, 25, 6, 11, 12, 26, 2, 1, 10, 11, 12, 15, 20, 25, 0, 0, 1, 2) // #287 +PAIRING(10, 15, 20, 25, 6, 11, 15, 16, 1, 0, 10, 11, 0, 0, 0, 0, 0, 0, 2, 2) // #288 +PAIRING(10, 15, 20, 25, 6, 11, 15, 20, 1, 0, 10, 11, 0, 0, 0, 0, 0, 0, 2, 2) // #289 +PAIRING(10, 15, 20, 25, 6, 11, 15, 21, 1, 1, 10, 11, 0, 0, 20, 21, 0, 0, 2, 2) // #290 +PAIRING(10, 15, 20, 25, 6, 11, 15, 25, 1, 1, 10, 11, 0, 0, 20, 25, 0, 0, 1, 2) // #291 +PAIRING(10, 15, 20, 25, 6, 11, 15, 26, 1, 1, 10, 11, 0, 0, 20, 25, 0, 0, 1, 2) // #292 +PAIRING(10, 15, 20, 25, 6, 11, 16, 17, 1, 0, 10, 11, 0, 0, 0, 0, 0, 0, 2, 2) // #293 +PAIRING(10, 15, 20, 25, 6, 11, 16, 20, 1, 0, 10, 11, 0, 0, 0, 0, 0, 0, 2, 2) // #294 +PAIRING(10, 15, 20, 25, 6, 11, 16, 21, 1, 1, 10, 11, 0, 0, 20, 21, 0, 0, 2, 2) // #295 +PAIRING(10, 15, 20, 25, 6, 11, 16, 25, 1, 1, 10, 11, 0, 0, 20, 25, 0, 0, 1, 2) // #296 +PAIRING(10, 15, 20, 25, 6, 11, 16, 26, 1, 1, 10, 11, 0, 0, 20, 25, 0, 0, 1, 2) // #297 +PAIRING(10, 15, 20, 25, 6, 11, 20, 21, 1, 1, 10, 11, 0, 0, 20, 21, 0, 0, 2, 1) // #298 +PAIRING(10, 15, 20, 25, 6, 11, 20, 25, 1, 1, 10, 11, 0, 0, 20, 25, 0, 0, 1, 1) // #299 +PAIRING(10, 15, 20, 25, 6, 11, 20, 26, 1, 1, 10, 11, 0, 0, 20, 25, 0, 0, 1, 2) // #300 +PAIRING(10, 15, 20, 25, 6, 11, 21, 22, 1, 1, 10, 11, 0, 0, 21, 22, 0, 0, 2, 1) // #301 +PAIRING(10, 15, 20, 25, 6, 11, 21, 25, 1, 1, 10, 11, 0, 0, 21, 25, 0, 0, 2, 1) // #302 +PAIRING(10, 15, 20, 25, 6, 11, 21, 26, 1, 1, 10, 11, 0, 0, 21, 25, 0, 0, 2, 2) // #303 +PAIRING(10, 15, 20, 25, 6, 11, 25, 26, 1, 0, 10, 11, 0, 0, 0, 0, 0, 0, 2, 2) // #304 +PAIRING(10, 15, 20, 25, 6, 11, 26, 27, 1, 0, 10, 11, 0, 0, 0, 0, 0, 0, 2, 2) // #305 +PAIRING(10, 15, 20, 25, 6, 15, 15, 16, 1, 0, 10, 15, 0, 0, 0, 0, 0, 0, 1, 2) // #306 +PAIRING(10, 15, 20, 25, 6, 15, 15, 20, 1, 0, 10, 15, 0, 0, 0, 0, 0, 0, 1, 2) // #307 +PAIRING(10, 15, 20, 25, 6, 15, 15, 21, 1, 1, 10, 15, 0, 0, 20, 21, 0, 0, 1, 2) // #308 +PAIRING(10, 15, 20, 25, 6, 15, 15, 25, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 2) // #309 +PAIRING(10, 15, 20, 25, 6, 15, 15, 26, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 2) // #310 +PAIRING(10, 15, 20, 25, 6, 15, 16, 17, 1, 0, 10, 15, 0, 0, 0, 0, 0, 0, 1, 2) // #311 +PAIRING(10, 15, 20, 25, 6, 15, 16, 20, 1, 0, 10, 15, 0, 0, 0, 0, 0, 0, 1, 2) // #312 +PAIRING(10, 15, 20, 25, 6, 15, 16, 21, 1, 1, 10, 15, 0, 0, 20, 21, 0, 0, 1, 2) // #313 +PAIRING(10, 15, 20, 25, 6, 15, 16, 25, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 2) // #314 +PAIRING(10, 15, 20, 25, 6, 15, 16, 26, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 2) // #315 +PAIRING(10, 15, 20, 25, 6, 15, 20, 21, 1, 1, 10, 15, 0, 0, 20, 21, 0, 0, 1, 1) // #316 +PAIRING(10, 15, 20, 25, 6, 15, 20, 25, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 1) // #317 +PAIRING(10, 15, 20, 25, 6, 15, 20, 26, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 2) // #318 +PAIRING(10, 15, 20, 25, 6, 15, 21, 22, 1, 1, 10, 15, 0, 0, 21, 22, 0, 0, 1, 1) // #319 +PAIRING(10, 15, 20, 25, 6, 15, 21, 25, 1, 1, 10, 15, 0, 0, 21, 25, 0, 0, 1, 1) // #320 +PAIRING(10, 15, 20, 25, 6, 15, 21, 26, 1, 1, 10, 15, 0, 0, 21, 25, 0, 0, 1, 2) // #321 +PAIRING(10, 15, 20, 25, 6, 15, 25, 26, 1, 0, 10, 15, 0, 0, 0, 0, 0, 0, 1, 2) // #322 +PAIRING(10, 15, 20, 25, 6, 15, 26, 27, 1, 0, 10, 15, 0, 0, 0, 0, 0, 0, 1, 2) // #323 +PAIRING(10, 15, 20, 25, 6, 16, 16, 17, 1, 0, 10, 15, 0, 0, 0, 0, 0, 0, 1, 2) // #324 +PAIRING(10, 15, 20, 25, 6, 16, 16, 20, 1, 0, 10, 15, 0, 0, 0, 0, 0, 0, 1, 2) // #325 +PAIRING(10, 15, 20, 25, 6, 16, 16, 21, 1, 1, 10, 15, 0, 0, 20, 21, 0, 0, 1, 2) // #326 +PAIRING(10, 15, 20, 25, 6, 16, 16, 25, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 2) // #327 +PAIRING(10, 15, 20, 25, 6, 16, 16, 26, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 2) // #328 +PAIRING(10, 15, 20, 25, 6, 16, 17, 18, 1, 0, 10, 15, 0, 0, 0, 0, 0, 0, 1, 2) // #329 +PAIRING(10, 15, 20, 25, 6, 16, 17, 20, 1, 0, 10, 15, 0, 0, 0, 0, 0, 0, 1, 2) // #330 +PAIRING(10, 15, 20, 25, 6, 16, 17, 21, 1, 1, 10, 15, 0, 0, 20, 21, 0, 0, 1, 2) // #331 +PAIRING(10, 15, 20, 25, 6, 16, 17, 25, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 2) // #332 +PAIRING(10, 15, 20, 25, 6, 16, 17, 26, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 2) // #333 +PAIRING(10, 15, 20, 25, 6, 16, 20, 21, 1, 1, 10, 15, 0, 0, 20, 21, 0, 0, 1, 1) // #334 +PAIRING(10, 15, 20, 25, 6, 16, 20, 25, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 1) // #335 +PAIRING(10, 15, 20, 25, 6, 16, 20, 26, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 2) // #336 +PAIRING(10, 15, 20, 25, 6, 16, 21, 22, 1, 1, 10, 15, 0, 0, 21, 22, 0, 0, 1, 1) // #337 +PAIRING(10, 15, 20, 25, 6, 16, 21, 25, 1, 1, 10, 15, 0, 0, 21, 25, 0, 0, 1, 1) // #338 +PAIRING(10, 15, 20, 25, 6, 16, 21, 26, 1, 1, 10, 15, 0, 0, 21, 25, 0, 0, 1, 2) // #339 +PAIRING(10, 15, 20, 25, 6, 16, 25, 26, 1, 0, 10, 15, 0, 0, 0, 0, 0, 0, 1, 2) // #340 +PAIRING(10, 15, 20, 25, 6, 16, 26, 27, 1, 0, 10, 15, 0, 0, 0, 0, 0, 0, 1, 2) // #341 +PAIRING(10, 15, 20, 25, 6, 20, 20, 21, 1, 1, 10, 15, 0, 0, 20, 21, 0, 0, 1, 1) // #342 +PAIRING(10, 15, 20, 25, 6, 20, 20, 25, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 1) // #343 +PAIRING(10, 15, 20, 25, 6, 20, 20, 26, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 2) // #344 +PAIRING(10, 15, 20, 25, 6, 20, 21, 22, 1, 1, 10, 15, 0, 0, 21, 22, 0, 0, 1, 1) // #345 +PAIRING(10, 15, 20, 25, 6, 20, 21, 25, 1, 1, 10, 15, 0, 0, 21, 25, 0, 0, 1, 1) // #346 +PAIRING(10, 15, 20, 25, 6, 20, 21, 26, 1, 1, 10, 15, 0, 0, 21, 25, 0, 0, 1, 2) // #347 +PAIRING(10, 15, 20, 25, 6, 20, 25, 26, 1, 0, 10, 15, 0, 0, 0, 0, 0, 0, 1, 2) // #348 +PAIRING(10, 15, 20, 25, 6, 20, 26, 27, 1, 0, 10, 15, 0, 0, 0, 0, 0, 0, 1, 2) // #349 +PAIRING(10, 15, 20, 25, 6, 21, 21, 22, 1, 2, 10, 15, 0, 0, 20, 21, 21, 22, 1, 1) // #350 +PAIRING(10, 15, 20, 25, 6, 21, 21, 25, 1, 2, 10, 15, 0, 0, 20, 21, 21, 25, 0, 1) // #351 +PAIRING(10, 15, 20, 25, 6, 21, 21, 26, 1, 2, 10, 15, 0, 0, 20, 21, 21, 25, 0, 2) // #352 +PAIRING(10, 15, 20, 25, 6, 21, 22, 23, 1, 2, 10, 15, 0, 0, 20, 21, 22, 23, 1, 1) // #353 +PAIRING(10, 15, 20, 25, 6, 21, 22, 25, 1, 2, 10, 15, 0, 0, 20, 21, 22, 25, 1, 1) // #354 +PAIRING(10, 15, 20, 25, 6, 21, 22, 26, 1, 2, 10, 15, 0, 0, 20, 21, 22, 25, 1, 2) // #355 +PAIRING(10, 15, 20, 25, 6, 21, 25, 26, 1, 1, 10, 15, 0, 0, 20, 21, 0, 0, 1, 2) // #356 +PAIRING(10, 15, 20, 25, 6, 21, 26, 27, 1, 1, 10, 15, 0, 0, 20, 21, 0, 0, 1, 2) // #357 +PAIRING(10, 15, 20, 25, 6, 25, 25, 26, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 2) // #358 +PAIRING(10, 15, 20, 25, 6, 25, 26, 27, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 2) // #359 +PAIRING(10, 15, 20, 25, 6, 26, 26, 27, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 2) // #360 +PAIRING(10, 15, 20, 25, 6, 26, 27, 28, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 2) // #361 +PAIRING(10, 15, 20, 25, 10, 11, 11, 12, 2, 0, 10, 11, 11, 12, 0, 0, 0, 0, 2, 0) // #362 +PAIRING(10, 15, 20, 25, 10, 11, 11, 15, 2, 0, 10, 11, 11, 15, 0, 0, 0, 0, 1, 0) // #363 +PAIRING(10, 15, 20, 25, 10, 11, 11, 16, 2, 0, 10, 11, 11, 15, 0, 0, 0, 0, 1, 1) // #364 +PAIRING(10, 15, 20, 25, 10, 11, 11, 20, 2, 0, 10, 11, 11, 15, 0, 0, 0, 0, 1, 0) // #365 +PAIRING(10, 15, 20, 25, 10, 11, 11, 21, 2, 1, 10, 11, 11, 15, 20, 21, 0, 0, 1, 1) // #366 +PAIRING(10, 15, 20, 25, 10, 11, 11, 25, 2, 1, 10, 11, 11, 15, 20, 25, 0, 0, 0, 1) // #367 +PAIRING(10, 15, 20, 25, 10, 11, 11, 26, 2, 1, 10, 11, 11, 15, 20, 25, 0, 0, 0, 1) // #368 +PAIRING(10, 15, 20, 25, 10, 11, 12, 13, 2, 0, 10, 11, 12, 13, 0, 0, 0, 0, 2, 0) // #369 +PAIRING(10, 15, 20, 25, 10, 11, 12, 15, 2, 0, 10, 11, 12, 15, 0, 0, 0, 0, 2, 0) // #370 +PAIRING(10, 15, 20, 25, 10, 11, 12, 16, 2, 0, 10, 11, 12, 15, 0, 0, 0, 0, 2, 1) // #371 +PAIRING(10, 15, 20, 25, 10, 11, 12, 20, 2, 0, 10, 11, 12, 15, 0, 0, 0, 0, 2, 0) // #372 +PAIRING(10, 15, 20, 25, 10, 11, 12, 21, 2, 1, 10, 11, 12, 15, 20, 21, 0, 0, 2, 1) // #373 +PAIRING(10, 15, 20, 25, 10, 11, 12, 25, 2, 1, 10, 11, 12, 15, 20, 25, 0, 0, 1, 1) // #374 +PAIRING(10, 15, 20, 25, 10, 11, 12, 26, 2, 1, 10, 11, 12, 15, 20, 25, 0, 0, 1, 1) // #375 +PAIRING(10, 15, 20, 25, 10, 11, 15, 16, 1, 0, 10, 11, 0, 0, 0, 0, 0, 0, 2, 1) // #376 +PAIRING(10, 15, 20, 25, 10, 11, 15, 20, 1, 0, 10, 11, 0, 0, 0, 0, 0, 0, 2, 1) // #377 +PAIRING(10, 15, 20, 25, 10, 11, 15, 21, 1, 1, 10, 11, 0, 0, 20, 21, 0, 0, 2, 1) // #378 +PAIRING(10, 15, 20, 25, 10, 11, 15, 25, 1, 1, 10, 11, 0, 0, 20, 25, 0, 0, 1, 1) // #379 +PAIRING(10, 15, 20, 25, 10, 11, 15, 26, 1, 1, 10, 11, 0, 0, 20, 25, 0, 0, 1, 1) // #380 +PAIRING(10, 15, 20, 25, 10, 11, 16, 17, 1, 0, 10, 11, 0, 0, 0, 0, 0, 0, 2, 1) // #381 +PAIRING(10, 15, 20, 25, 10, 11, 16, 20, 1, 0, 10, 11, 0, 0, 0, 0, 0, 0, 2, 1) // #382 +PAIRING(10, 15, 20, 25, 10, 11, 16, 21, 1, 1, 10, 11, 0, 0, 20, 21, 0, 0, 2, 1) // #383 +PAIRING(10, 15, 20, 25, 10, 11, 16, 25, 1, 1, 10, 11, 0, 0, 20, 25, 0, 0, 1, 1) // #384 +PAIRING(10, 15, 20, 25, 10, 11, 16, 26, 1, 1, 10, 11, 0, 0, 20, 25, 0, 0, 1, 1) // #385 +PAIRING(10, 15, 20, 25, 10, 11, 20, 21, 1, 1, 10, 11, 0, 0, 20, 21, 0, 0, 2, 0) // #386 +PAIRING(10, 15, 20, 25, 10, 11, 20, 25, 1, 1, 10, 11, 0, 0, 20, 25, 0, 0, 1, 0) // #387 +PAIRING(10, 15, 20, 25, 10, 11, 20, 26, 1, 1, 10, 11, 0, 0, 20, 25, 0, 0, 1, 1) // #388 +PAIRING(10, 15, 20, 25, 10, 11, 21, 22, 1, 1, 10, 11, 0, 0, 21, 22, 0, 0, 2, 0) // #389 +PAIRING(10, 15, 20, 25, 10, 11, 21, 25, 1, 1, 10, 11, 0, 0, 21, 25, 0, 0, 2, 0) // #390 +PAIRING(10, 15, 20, 25, 10, 11, 21, 26, 1, 1, 10, 11, 0, 0, 21, 25, 0, 0, 2, 1) // #391 +PAIRING(10, 15, 20, 25, 10, 11, 25, 26, 1, 0, 10, 11, 0, 0, 0, 0, 0, 0, 2, 1) // #392 +PAIRING(10, 15, 20, 25, 10, 11, 26, 27, 1, 0, 10, 11, 0, 0, 0, 0, 0, 0, 2, 1) // #393 +PAIRING(10, 15, 20, 25, 10, 15, 15, 16, 1, 0, 10, 15, 0, 0, 0, 0, 0, 0, 1, 1) // #394 +PAIRING(10, 15, 20, 25, 10, 15, 15, 20, 1, 0, 10, 15, 0, 0, 0, 0, 0, 0, 1, 1) // #395 +PAIRING(10, 15, 20, 25, 10, 15, 15, 21, 1, 1, 10, 15, 0, 0, 20, 21, 0, 0, 1, 1) // #396 +PAIRING(10, 15, 20, 25, 10, 15, 15, 25, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 1) // #397 +PAIRING(10, 15, 20, 25, 10, 15, 15, 26, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 1) // #398 +PAIRING(10, 15, 20, 25, 10, 15, 16, 17, 1, 0, 10, 15, 0, 0, 0, 0, 0, 0, 1, 1) // #399 +PAIRING(10, 15, 20, 25, 10, 15, 16, 20, 1, 0, 10, 15, 0, 0, 0, 0, 0, 0, 1, 1) // #400 +PAIRING(10, 15, 20, 25, 10, 15, 16, 21, 1, 1, 10, 15, 0, 0, 20, 21, 0, 0, 1, 1) // #401 +PAIRING(10, 15, 20, 25, 10, 15, 16, 25, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 1) // #402 +PAIRING(10, 15, 20, 25, 10, 15, 16, 26, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 1) // #403 +PAIRING(10, 15, 20, 25, 10, 15, 20, 21, 1, 1, 10, 15, 0, 0, 20, 21, 0, 0, 1, 0) // #404 +PAIRING(10, 15, 20, 25, 10, 15, 20, 25, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 0) // #405 +PAIRING(10, 15, 20, 25, 10, 15, 20, 26, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 1) // #406 +PAIRING(10, 15, 20, 25, 10, 15, 21, 22, 1, 1, 10, 15, 0, 0, 21, 22, 0, 0, 1, 0) // #407 +PAIRING(10, 15, 20, 25, 10, 15, 21, 25, 1, 1, 10, 15, 0, 0, 21, 25, 0, 0, 1, 0) // #408 +PAIRING(10, 15, 20, 25, 10, 15, 21, 26, 1, 1, 10, 15, 0, 0, 21, 25, 0, 0, 1, 1) // #409 +PAIRING(10, 15, 20, 25, 10, 15, 25, 26, 1, 0, 10, 15, 0, 0, 0, 0, 0, 0, 1, 1) // #410 +PAIRING(10, 15, 20, 25, 10, 15, 26, 27, 1, 0, 10, 15, 0, 0, 0, 0, 0, 0, 1, 1) // #411 +PAIRING(10, 15, 20, 25, 10, 16, 16, 17, 1, 0, 10, 15, 0, 0, 0, 0, 0, 0, 1, 2) // #412 +PAIRING(10, 15, 20, 25, 10, 16, 16, 20, 1, 0, 10, 15, 0, 0, 0, 0, 0, 0, 1, 2) // #413 +PAIRING(10, 15, 20, 25, 10, 16, 16, 21, 1, 1, 10, 15, 0, 0, 20, 21, 0, 0, 1, 2) // #414 +PAIRING(10, 15, 20, 25, 10, 16, 16, 25, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 2) // #415 +PAIRING(10, 15, 20, 25, 10, 16, 16, 26, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 2) // #416 +PAIRING(10, 15, 20, 25, 10, 16, 17, 18, 1, 0, 10, 15, 0, 0, 0, 0, 0, 0, 1, 2) // #417 +PAIRING(10, 15, 20, 25, 10, 16, 17, 20, 1, 0, 10, 15, 0, 0, 0, 0, 0, 0, 1, 2) // #418 +PAIRING(10, 15, 20, 25, 10, 16, 17, 21, 1, 1, 10, 15, 0, 0, 20, 21, 0, 0, 1, 2) // #419 +PAIRING(10, 15, 20, 25, 10, 16, 17, 25, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 2) // #420 +PAIRING(10, 15, 20, 25, 10, 16, 17, 26, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 2) // #421 +PAIRING(10, 15, 20, 25, 10, 16, 20, 21, 1, 1, 10, 15, 0, 0, 20, 21, 0, 0, 1, 1) // #422 +PAIRING(10, 15, 20, 25, 10, 16, 20, 25, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 1) // #423 +PAIRING(10, 15, 20, 25, 10, 16, 20, 26, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 2) // #424 +PAIRING(10, 15, 20, 25, 10, 16, 21, 22, 1, 1, 10, 15, 0, 0, 21, 22, 0, 0, 1, 1) // #425 +PAIRING(10, 15, 20, 25, 10, 16, 21, 25, 1, 1, 10, 15, 0, 0, 21, 25, 0, 0, 1, 1) // #426 +PAIRING(10, 15, 20, 25, 10, 16, 21, 26, 1, 1, 10, 15, 0, 0, 21, 25, 0, 0, 1, 2) // #427 +PAIRING(10, 15, 20, 25, 10, 16, 25, 26, 1, 0, 10, 15, 0, 0, 0, 0, 0, 0, 1, 2) // #428 +PAIRING(10, 15, 20, 25, 10, 16, 26, 27, 1, 0, 10, 15, 0, 0, 0, 0, 0, 0, 1, 2) // #429 +PAIRING(10, 15, 20, 25, 10, 20, 20, 21, 1, 1, 10, 15, 0, 0, 20, 21, 0, 0, 1, 0) // #430 +PAIRING(10, 15, 20, 25, 10, 20, 20, 25, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 0) // #431 +PAIRING(10, 15, 20, 25, 10, 20, 20, 26, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 1) // #432 +PAIRING(10, 15, 20, 25, 10, 20, 21, 22, 1, 1, 10, 15, 0, 0, 21, 22, 0, 0, 1, 0) // #433 +PAIRING(10, 15, 20, 25, 10, 20, 21, 25, 1, 1, 10, 15, 0, 0, 21, 25, 0, 0, 1, 0) // #434 +PAIRING(10, 15, 20, 25, 10, 20, 21, 26, 1, 1, 10, 15, 0, 0, 21, 25, 0, 0, 1, 1) // #435 +PAIRING(10, 15, 20, 25, 10, 20, 25, 26, 1, 0, 10, 15, 0, 0, 0, 0, 0, 0, 1, 1) // #436 +PAIRING(10, 15, 20, 25, 10, 20, 26, 27, 1, 0, 10, 15, 0, 0, 0, 0, 0, 0, 1, 1) // #437 +PAIRING(10, 15, 20, 25, 10, 21, 21, 22, 1, 2, 10, 15, 0, 0, 20, 21, 21, 22, 1, 1) // #438 +PAIRING(10, 15, 20, 25, 10, 21, 21, 25, 1, 2, 10, 15, 0, 0, 20, 21, 21, 25, 0, 1) // #439 +PAIRING(10, 15, 20, 25, 10, 21, 21, 26, 1, 2, 10, 15, 0, 0, 20, 21, 21, 25, 0, 2) // #440 +PAIRING(10, 15, 20, 25, 10, 21, 22, 23, 1, 2, 10, 15, 0, 0, 20, 21, 22, 23, 1, 1) // #441 +PAIRING(10, 15, 20, 25, 10, 21, 22, 25, 1, 2, 10, 15, 0, 0, 20, 21, 22, 25, 1, 1) // #442 +PAIRING(10, 15, 20, 25, 10, 21, 22, 26, 1, 2, 10, 15, 0, 0, 20, 21, 22, 25, 1, 2) // #443 +PAIRING(10, 15, 20, 25, 10, 21, 25, 26, 1, 1, 10, 15, 0, 0, 20, 21, 0, 0, 1, 2) // #444 +PAIRING(10, 15, 20, 25, 10, 21, 26, 27, 1, 1, 10, 15, 0, 0, 20, 21, 0, 0, 1, 2) // #445 +PAIRING(10, 15, 20, 25, 10, 25, 25, 26, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 2) // #446 +PAIRING(10, 15, 20, 25, 10, 25, 26, 27, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 2) // #447 +PAIRING(10, 15, 20, 25, 10, 26, 26, 27, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 2) // #448 +PAIRING(10, 15, 20, 25, 10, 26, 27, 28, 1, 1, 10, 15, 0, 0, 20, 25, 0, 0, 0, 2) // #449 +PAIRING(10, 15, 20, 25, 11, 12, 12, 13, 2, 0, 11, 12, 12, 13, 0, 0, 0, 0, 2, 0) // #450 +PAIRING(10, 15, 20, 25, 11, 12, 12, 15, 2, 0, 11, 12, 12, 15, 0, 0, 0, 0, 2, 0) // #451 +PAIRING(10, 15, 20, 25, 11, 12, 12, 16, 2, 0, 11, 12, 12, 15, 0, 0, 0, 0, 2, 1) // #452 +PAIRING(10, 15, 20, 25, 11, 12, 12, 20, 2, 0, 11, 12, 12, 15, 0, 0, 0, 0, 2, 0) // #453 +PAIRING(10, 15, 20, 25, 11, 12, 12, 21, 2, 1, 11, 12, 12, 15, 20, 21, 0, 0, 2, 1) // #454 +PAIRING(10, 15, 20, 25, 11, 12, 12, 25, 2, 1, 11, 12, 12, 15, 20, 25, 0, 0, 1, 1) // #455 +PAIRING(10, 15, 20, 25, 11, 12, 12, 26, 2, 1, 11, 12, 12, 15, 20, 25, 0, 0, 1, 1) // #456 +PAIRING(10, 15, 20, 25, 11, 12, 13, 14, 2, 0, 11, 12, 13, 14, 0, 0, 0, 0, 2, 0) // #457 +PAIRING(10, 15, 20, 25, 11, 12, 13, 15, 2, 0, 11, 12, 13, 15, 0, 0, 0, 0, 2, 0) // #458 +PAIRING(10, 15, 20, 25, 11, 12, 13, 16, 2, 0, 11, 12, 13, 15, 0, 0, 0, 0, 2, 1) // #459 +PAIRING(10, 15, 20, 25, 11, 12, 13, 20, 2, 0, 11, 12, 13, 15, 0, 0, 0, 0, 2, 0) // #460 +PAIRING(10, 15, 20, 25, 11, 12, 13, 21, 2, 1, 11, 12, 13, 15, 20, 21, 0, 0, 2, 1) // #461 +PAIRING(10, 15, 20, 25, 11, 12, 13, 25, 2, 1, 11, 12, 13, 15, 20, 25, 0, 0, 1, 1) // #462 +PAIRING(10, 15, 20, 25, 11, 12, 13, 26, 2, 1, 11, 12, 13, 15, 20, 25, 0, 0, 1, 1) // #463 +PAIRING(10, 15, 20, 25, 11, 12, 15, 16, 1, 0, 11, 12, 0, 0, 0, 0, 0, 0, 2, 1) // #464 +PAIRING(10, 15, 20, 25, 11, 12, 15, 20, 1, 0, 11, 12, 0, 0, 0, 0, 0, 0, 2, 1) // #465 +PAIRING(10, 15, 20, 25, 11, 12, 15, 21, 1, 1, 11, 12, 0, 0, 20, 21, 0, 0, 2, 1) // #466 +PAIRING(10, 15, 20, 25, 11, 12, 15, 25, 1, 1, 11, 12, 0, 0, 20, 25, 0, 0, 1, 1) // #467 +PAIRING(10, 15, 20, 25, 11, 12, 15, 26, 1, 1, 11, 12, 0, 0, 20, 25, 0, 0, 1, 1) // #468 +PAIRING(10, 15, 20, 25, 11, 12, 16, 17, 1, 0, 11, 12, 0, 0, 0, 0, 0, 0, 2, 1) // #469 +PAIRING(10, 15, 20, 25, 11, 12, 16, 20, 1, 0, 11, 12, 0, 0, 0, 0, 0, 0, 2, 1) // #470 +PAIRING(10, 15, 20, 25, 11, 12, 16, 21, 1, 1, 11, 12, 0, 0, 20, 21, 0, 0, 2, 1) // #471 +PAIRING(10, 15, 20, 25, 11, 12, 16, 25, 1, 1, 11, 12, 0, 0, 20, 25, 0, 0, 1, 1) // #472 +PAIRING(10, 15, 20, 25, 11, 12, 16, 26, 1, 1, 11, 12, 0, 0, 20, 25, 0, 0, 1, 1) // #473 +PAIRING(10, 15, 20, 25, 11, 12, 20, 21, 1, 1, 11, 12, 0, 0, 20, 21, 0, 0, 2, 0) // #474 +PAIRING(10, 15, 20, 25, 11, 12, 20, 25, 1, 1, 11, 12, 0, 0, 20, 25, 0, 0, 1, 0) // #475 +PAIRING(10, 15, 20, 25, 11, 12, 20, 26, 1, 1, 11, 12, 0, 0, 20, 25, 0, 0, 1, 1) // #476 +PAIRING(10, 15, 20, 25, 11, 12, 21, 22, 1, 1, 11, 12, 0, 0, 21, 22, 0, 0, 2, 0) // #477 +PAIRING(10, 15, 20, 25, 11, 12, 21, 25, 1, 1, 11, 12, 0, 0, 21, 25, 0, 0, 2, 0) // #478 +PAIRING(10, 15, 20, 25, 11, 12, 21, 26, 1, 1, 11, 12, 0, 0, 21, 25, 0, 0, 2, 1) // #479 +PAIRING(10, 15, 20, 25, 11, 12, 25, 26, 1, 0, 11, 12, 0, 0, 0, 0, 0, 0, 2, 1) // #480 +PAIRING(10, 15, 20, 25, 11, 12, 26, 27, 1, 0, 11, 12, 0, 0, 0, 0, 0, 0, 2, 1) // #481 +PAIRING(10, 15, 20, 25, 11, 15, 15, 16, 1, 0, 11, 15, 0, 0, 0, 0, 0, 0, 2, 1) // #482 +PAIRING(10, 15, 20, 25, 11, 15, 15, 20, 1, 0, 11, 15, 0, 0, 0, 0, 0, 0, 2, 1) // #483 +PAIRING(10, 15, 20, 25, 11, 15, 15, 21, 1, 1, 11, 15, 0, 0, 20, 21, 0, 0, 2, 1) // #484 +PAIRING(10, 15, 20, 25, 11, 15, 15, 25, 1, 1, 11, 15, 0, 0, 20, 25, 0, 0, 1, 1) // #485 +PAIRING(10, 15, 20, 25, 11, 15, 15, 26, 1, 1, 11, 15, 0, 0, 20, 25, 0, 0, 1, 1) // #486 +PAIRING(10, 15, 20, 25, 11, 15, 16, 17, 1, 0, 11, 15, 0, 0, 0, 0, 0, 0, 2, 1) // #487 +PAIRING(10, 15, 20, 25, 11, 15, 16, 20, 1, 0, 11, 15, 0, 0, 0, 0, 0, 0, 2, 1) // #488 +PAIRING(10, 15, 20, 25, 11, 15, 16, 21, 1, 1, 11, 15, 0, 0, 20, 21, 0, 0, 2, 1) // #489 +PAIRING(10, 15, 20, 25, 11, 15, 16, 25, 1, 1, 11, 15, 0, 0, 20, 25, 0, 0, 1, 1) // #490 +PAIRING(10, 15, 20, 25, 11, 15, 16, 26, 1, 1, 11, 15, 0, 0, 20, 25, 0, 0, 1, 1) // #491 +PAIRING(10, 15, 20, 25, 11, 15, 20, 21, 1, 1, 11, 15, 0, 0, 20, 21, 0, 0, 2, 0) // #492 +PAIRING(10, 15, 20, 25, 11, 15, 20, 25, 1, 1, 11, 15, 0, 0, 20, 25, 0, 0, 1, 0) // #493 +PAIRING(10, 15, 20, 25, 11, 15, 20, 26, 1, 1, 11, 15, 0, 0, 20, 25, 0, 0, 1, 1) // #494 +PAIRING(10, 15, 20, 25, 11, 15, 21, 22, 1, 1, 11, 15, 0, 0, 21, 22, 0, 0, 2, 0) // #495 +PAIRING(10, 15, 20, 25, 11, 15, 21, 25, 1, 1, 11, 15, 0, 0, 21, 25, 0, 0, 2, 0) // #496 +PAIRING(10, 15, 20, 25, 11, 15, 21, 26, 1, 1, 11, 15, 0, 0, 21, 25, 0, 0, 2, 1) // #497 +PAIRING(10, 15, 20, 25, 11, 15, 25, 26, 1, 0, 11, 15, 0, 0, 0, 0, 0, 0, 2, 1) // #498 +PAIRING(10, 15, 20, 25, 11, 15, 26, 27, 1, 0, 11, 15, 0, 0, 0, 0, 0, 0, 2, 1) // #499 +PAIRING(10, 15, 20, 25, 11, 16, 16, 17, 1, 0, 11, 15, 0, 0, 0, 0, 0, 0, 2, 2) // #500 +PAIRING(10, 15, 20, 25, 11, 16, 16, 20, 1, 0, 11, 15, 0, 0, 0, 0, 0, 0, 2, 2) // #501 +PAIRING(10, 15, 20, 25, 11, 16, 16, 21, 1, 1, 11, 15, 0, 0, 20, 21, 0, 0, 2, 2) // #502 +PAIRING(10, 15, 20, 25, 11, 16, 16, 25, 1, 1, 11, 15, 0, 0, 20, 25, 0, 0, 1, 2) // #503 +PAIRING(10, 15, 20, 25, 11, 16, 16, 26, 1, 1, 11, 15, 0, 0, 20, 25, 0, 0, 1, 2) // #504 +PAIRING(10, 15, 20, 25, 11, 16, 17, 18, 1, 0, 11, 15, 0, 0, 0, 0, 0, 0, 2, 2) // #505 +PAIRING(10, 15, 20, 25, 11, 16, 17, 20, 1, 0, 11, 15, 0, 0, 0, 0, 0, 0, 2, 2) // #506 +PAIRING(10, 15, 20, 25, 11, 16, 17, 21, 1, 1, 11, 15, 0, 0, 20, 21, 0, 0, 2, 2) // #507 +PAIRING(10, 15, 20, 25, 11, 16, 17, 25, 1, 1, 11, 15, 0, 0, 20, 25, 0, 0, 1, 2) // #508 +PAIRING(10, 15, 20, 25, 11, 16, 17, 26, 1, 1, 11, 15, 0, 0, 20, 25, 0, 0, 1, 2) // #509 +PAIRING(10, 15, 20, 25, 11, 16, 20, 21, 1, 1, 11, 15, 0, 0, 20, 21, 0, 0, 2, 1) // #510 +PAIRING(10, 15, 20, 25, 11, 16, 20, 25, 1, 1, 11, 15, 0, 0, 20, 25, 0, 0, 1, 1) // #511 +PAIRING(10, 15, 20, 25, 11, 16, 20, 26, 1, 1, 11, 15, 0, 0, 20, 25, 0, 0, 1, 2) // #512 +PAIRING(10, 15, 20, 25, 11, 16, 21, 22, 1, 1, 11, 15, 0, 0, 21, 22, 0, 0, 2, 1) // #513 +PAIRING(10, 15, 20, 25, 11, 16, 21, 25, 1, 1, 11, 15, 0, 0, 21, 25, 0, 0, 2, 1) // #514 +PAIRING(10, 15, 20, 25, 11, 16, 21, 26, 1, 1, 11, 15, 0, 0, 21, 25, 0, 0, 2, 2) // #515 +PAIRING(10, 15, 20, 25, 11, 16, 25, 26, 1, 0, 11, 15, 0, 0, 0, 0, 0, 0, 2, 2) // #516 +PAIRING(10, 15, 20, 25, 11, 16, 26, 27, 1, 0, 11, 15, 0, 0, 0, 0, 0, 0, 2, 2) // #517 +PAIRING(10, 15, 20, 25, 11, 20, 20, 21, 1, 1, 11, 15, 0, 0, 20, 21, 0, 0, 2, 0) // #518 +PAIRING(10, 15, 20, 25, 11, 20, 20, 25, 1, 1, 11, 15, 0, 0, 20, 25, 0, 0, 1, 0) // #519 +PAIRING(10, 15, 20, 25, 11, 20, 20, 26, 1, 1, 11, 15, 0, 0, 20, 25, 0, 0, 1, 1) // #520 +PAIRING(10, 15, 20, 25, 11, 20, 21, 22, 1, 1, 11, 15, 0, 0, 21, 22, 0, 0, 2, 0) // #521 +PAIRING(10, 15, 20, 25, 11, 20, 21, 25, 1, 1, 11, 15, 0, 0, 21, 25, 0, 0, 2, 0) // #522 +PAIRING(10, 15, 20, 25, 11, 20, 21, 26, 1, 1, 11, 15, 0, 0, 21, 25, 0, 0, 2, 1) // #523 +PAIRING(10, 15, 20, 25, 11, 20, 25, 26, 1, 0, 11, 15, 0, 0, 0, 0, 0, 0, 2, 1) // #524 +PAIRING(10, 15, 20, 25, 11, 20, 26, 27, 1, 0, 11, 15, 0, 0, 0, 0, 0, 0, 2, 1) // #525 +PAIRING(10, 15, 20, 25, 11, 21, 21, 22, 1, 2, 11, 15, 0, 0, 20, 21, 21, 22, 2, 1) // #526 +PAIRING(10, 15, 20, 25, 11, 21, 21, 25, 1, 2, 11, 15, 0, 0, 20, 21, 21, 25, 1, 1) // #527 +PAIRING(10, 15, 20, 25, 11, 21, 21, 26, 1, 2, 11, 15, 0, 0, 20, 21, 21, 25, 1, 2) // #528 +PAIRING(10, 15, 20, 25, 11, 21, 22, 23, 1, 2, 11, 15, 0, 0, 20, 21, 22, 23, 2, 1) // #529 +PAIRING(10, 15, 20, 25, 11, 21, 22, 25, 1, 2, 11, 15, 0, 0, 20, 21, 22, 25, 2, 1) // #530 +PAIRING(10, 15, 20, 25, 11, 21, 22, 26, 1, 2, 11, 15, 0, 0, 20, 21, 22, 25, 2, 2) // #531 +PAIRING(10, 15, 20, 25, 11, 21, 25, 26, 1, 1, 11, 15, 0, 0, 20, 21, 0, 0, 2, 2) // #532 +PAIRING(10, 15, 20, 25, 11, 21, 26, 27, 1, 1, 11, 15, 0, 0, 20, 21, 0, 0, 2, 2) // #533 +PAIRING(10, 15, 20, 25, 11, 25, 25, 26, 1, 1, 11, 15, 0, 0, 20, 25, 0, 0, 1, 2) // #534 +PAIRING(10, 15, 20, 25, 11, 25, 26, 27, 1, 1, 11, 15, 0, 0, 20, 25, 0, 0, 1, 2) // #535 +PAIRING(10, 15, 20, 25, 11, 26, 26, 27, 1, 1, 11, 15, 0, 0, 20, 25, 0, 0, 1, 2) // #536 +PAIRING(10, 15, 20, 25, 11, 26, 27, 28, 1, 1, 11, 15, 0, 0, 20, 25, 0, 0, 1, 2) // #537 +PAIRING(10, 15, 20, 25, 15, 16, 16, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #538 +PAIRING(10, 15, 20, 25, 15, 16, 16, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #539 +PAIRING(10, 15, 20, 25, 15, 16, 16, 21, 0, 1, 0, 0, 0, 0, 20, 21, 0, 0, 2, 2) // #540 +PAIRING(10, 15, 20, 25, 15, 16, 16, 25, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 2) // #541 +PAIRING(10, 15, 20, 25, 15, 16, 16, 26, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 2) // #542 +PAIRING(10, 15, 20, 25, 15, 16, 17, 18, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #543 +PAIRING(10, 15, 20, 25, 15, 16, 17, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #544 +PAIRING(10, 15, 20, 25, 15, 16, 17, 21, 0, 1, 0, 0, 0, 0, 20, 21, 0, 0, 2, 2) // #545 +PAIRING(10, 15, 20, 25, 15, 16, 17, 25, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 2) // #546 +PAIRING(10, 15, 20, 25, 15, 16, 17, 26, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 2) // #547 +PAIRING(10, 15, 20, 25, 15, 16, 20, 21, 0, 1, 0, 0, 0, 0, 20, 21, 0, 0, 2, 1) // #548 +PAIRING(10, 15, 20, 25, 15, 16, 20, 25, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 1) // #549 +PAIRING(10, 15, 20, 25, 15, 16, 20, 26, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 2) // #550 +PAIRING(10, 15, 20, 25, 15, 16, 21, 22, 0, 1, 0, 0, 0, 0, 21, 22, 0, 0, 2, 1) // #551 +PAIRING(10, 15, 20, 25, 15, 16, 21, 25, 0, 1, 0, 0, 0, 0, 21, 25, 0, 0, 2, 1) // #552 +PAIRING(10, 15, 20, 25, 15, 16, 21, 26, 0, 1, 0, 0, 0, 0, 21, 25, 0, 0, 2, 2) // #553 +PAIRING(10, 15, 20, 25, 15, 16, 25, 26, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #554 +PAIRING(10, 15, 20, 25, 15, 16, 26, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #555 +PAIRING(10, 15, 20, 25, 15, 20, 20, 21, 0, 1, 0, 0, 0, 0, 20, 21, 0, 0, 2, 1) // #556 +PAIRING(10, 15, 20, 25, 15, 20, 20, 25, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 1) // #557 +PAIRING(10, 15, 20, 25, 15, 20, 20, 26, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 2) // #558 +PAIRING(10, 15, 20, 25, 15, 20, 21, 22, 0, 1, 0, 0, 0, 0, 21, 22, 0, 0, 2, 1) // #559 +PAIRING(10, 15, 20, 25, 15, 20, 21, 25, 0, 1, 0, 0, 0, 0, 21, 25, 0, 0, 2, 1) // #560 +PAIRING(10, 15, 20, 25, 15, 20, 21, 26, 0, 1, 0, 0, 0, 0, 21, 25, 0, 0, 2, 2) // #561 +PAIRING(10, 15, 20, 25, 15, 20, 25, 26, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #562 +PAIRING(10, 15, 20, 25, 15, 20, 26, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #563 +PAIRING(10, 15, 20, 25, 15, 21, 21, 22, 0, 2, 0, 0, 0, 0, 20, 21, 21, 22, 2, 1) // #564 +PAIRING(10, 15, 20, 25, 15, 21, 21, 25, 0, 2, 0, 0, 0, 0, 20, 21, 21, 25, 1, 1) // #565 +PAIRING(10, 15, 20, 25, 15, 21, 21, 26, 0, 2, 0, 0, 0, 0, 20, 21, 21, 25, 1, 2) // #566 +PAIRING(10, 15, 20, 25, 15, 21, 22, 23, 0, 2, 0, 0, 0, 0, 20, 21, 22, 23, 2, 1) // #567 +PAIRING(10, 15, 20, 25, 15, 21, 22, 25, 0, 2, 0, 0, 0, 0, 20, 21, 22, 25, 2, 1) // #568 +PAIRING(10, 15, 20, 25, 15, 21, 22, 26, 0, 2, 0, 0, 0, 0, 20, 21, 22, 25, 2, 2) // #569 +PAIRING(10, 15, 20, 25, 15, 21, 25, 26, 0, 1, 0, 0, 0, 0, 20, 21, 0, 0, 2, 2) // #570 +PAIRING(10, 15, 20, 25, 15, 21, 26, 27, 0, 1, 0, 0, 0, 0, 20, 21, 0, 0, 2, 2) // #571 +PAIRING(10, 15, 20, 25, 15, 25, 25, 26, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 2) // #572 +PAIRING(10, 15, 20, 25, 15, 25, 26, 27, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 2) // #573 +PAIRING(10, 15, 20, 25, 15, 26, 26, 27, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 2) // #574 +PAIRING(10, 15, 20, 25, 15, 26, 27, 28, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 2) // #575 +PAIRING(10, 15, 20, 25, 16, 17, 17, 18, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #576 +PAIRING(10, 15, 20, 25, 16, 17, 17, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #577 +PAIRING(10, 15, 20, 25, 16, 17, 17, 21, 0, 1, 0, 0, 0, 0, 20, 21, 0, 0, 2, 2) // #578 +PAIRING(10, 15, 20, 25, 16, 17, 17, 25, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 2) // #579 +PAIRING(10, 15, 20, 25, 16, 17, 17, 26, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 2) // #580 +PAIRING(10, 15, 20, 25, 16, 17, 18, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #581 +PAIRING(10, 15, 20, 25, 16, 17, 18, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #582 +PAIRING(10, 15, 20, 25, 16, 17, 18, 21, 0, 1, 0, 0, 0, 0, 20, 21, 0, 0, 2, 2) // #583 +PAIRING(10, 15, 20, 25, 16, 17, 18, 25, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 2) // #584 +PAIRING(10, 15, 20, 25, 16, 17, 18, 26, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 2) // #585 +PAIRING(10, 15, 20, 25, 16, 17, 20, 21, 0, 1, 0, 0, 0, 0, 20, 21, 0, 0, 2, 1) // #586 +PAIRING(10, 15, 20, 25, 16, 17, 20, 25, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 1) // #587 +PAIRING(10, 15, 20, 25, 16, 17, 20, 26, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 2) // #588 +PAIRING(10, 15, 20, 25, 16, 17, 21, 22, 0, 1, 0, 0, 0, 0, 21, 22, 0, 0, 2, 1) // #589 +PAIRING(10, 15, 20, 25, 16, 17, 21, 25, 0, 1, 0, 0, 0, 0, 21, 25, 0, 0, 2, 1) // #590 +PAIRING(10, 15, 20, 25, 16, 17, 21, 26, 0, 1, 0, 0, 0, 0, 21, 25, 0, 0, 2, 2) // #591 +PAIRING(10, 15, 20, 25, 16, 17, 25, 26, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #592 +PAIRING(10, 15, 20, 25, 16, 17, 26, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #593 +PAIRING(10, 15, 20, 25, 16, 20, 20, 21, 0, 1, 0, 0, 0, 0, 20, 21, 0, 0, 2, 1) // #594 +PAIRING(10, 15, 20, 25, 16, 20, 20, 25, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 1) // #595 +PAIRING(10, 15, 20, 25, 16, 20, 20, 26, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 2) // #596 +PAIRING(10, 15, 20, 25, 16, 20, 21, 22, 0, 1, 0, 0, 0, 0, 21, 22, 0, 0, 2, 1) // #597 +PAIRING(10, 15, 20, 25, 16, 20, 21, 25, 0, 1, 0, 0, 0, 0, 21, 25, 0, 0, 2, 1) // #598 +PAIRING(10, 15, 20, 25, 16, 20, 21, 26, 0, 1, 0, 0, 0, 0, 21, 25, 0, 0, 2, 2) // #599 +PAIRING(10, 15, 20, 25, 16, 20, 25, 26, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #600 +PAIRING(10, 15, 20, 25, 16, 20, 26, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #601 +PAIRING(10, 15, 20, 25, 16, 21, 21, 22, 0, 2, 0, 0, 0, 0, 20, 21, 21, 22, 2, 1) // #602 +PAIRING(10, 15, 20, 25, 16, 21, 21, 25, 0, 2, 0, 0, 0, 0, 20, 21, 21, 25, 1, 1) // #603 +PAIRING(10, 15, 20, 25, 16, 21, 21, 26, 0, 2, 0, 0, 0, 0, 20, 21, 21, 25, 1, 2) // #604 +PAIRING(10, 15, 20, 25, 16, 21, 22, 23, 0, 2, 0, 0, 0, 0, 20, 21, 22, 23, 2, 1) // #605 +PAIRING(10, 15, 20, 25, 16, 21, 22, 25, 0, 2, 0, 0, 0, 0, 20, 21, 22, 25, 2, 1) // #606 +PAIRING(10, 15, 20, 25, 16, 21, 22, 26, 0, 2, 0, 0, 0, 0, 20, 21, 22, 25, 2, 2) // #607 +PAIRING(10, 15, 20, 25, 16, 21, 25, 26, 0, 1, 0, 0, 0, 0, 20, 21, 0, 0, 2, 2) // #608 +PAIRING(10, 15, 20, 25, 16, 21, 26, 27, 0, 1, 0, 0, 0, 0, 20, 21, 0, 0, 2, 2) // #609 +PAIRING(10, 15, 20, 25, 16, 25, 25, 26, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 2) // #610 +PAIRING(10, 15, 20, 25, 16, 25, 26, 27, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 2) // #611 +PAIRING(10, 15, 20, 25, 16, 26, 26, 27, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 2) // #612 +PAIRING(10, 15, 20, 25, 16, 26, 27, 28, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 2) // #613 +PAIRING(10, 15, 20, 25, 20, 21, 21, 22, 0, 2, 0, 0, 0, 0, 20, 21, 21, 22, 2, 0) // #614 +PAIRING(10, 15, 20, 25, 20, 21, 21, 25, 0, 2, 0, 0, 0, 0, 20, 21, 21, 25, 1, 0) // #615 +PAIRING(10, 15, 20, 25, 20, 21, 21, 26, 0, 2, 0, 0, 0, 0, 20, 21, 21, 25, 1, 1) // #616 +PAIRING(10, 15, 20, 25, 20, 21, 22, 23, 0, 2, 0, 0, 0, 0, 20, 21, 22, 23, 2, 0) // #617 +PAIRING(10, 15, 20, 25, 20, 21, 22, 25, 0, 2, 0, 0, 0, 0, 20, 21, 22, 25, 2, 0) // #618 +PAIRING(10, 15, 20, 25, 20, 21, 22, 26, 0, 2, 0, 0, 0, 0, 20, 21, 22, 25, 2, 1) // #619 +PAIRING(10, 15, 20, 25, 20, 21, 25, 26, 0, 1, 0, 0, 0, 0, 20, 21, 0, 0, 2, 1) // #620 +PAIRING(10, 15, 20, 25, 20, 21, 26, 27, 0, 1, 0, 0, 0, 0, 20, 21, 0, 0, 2, 1) // #621 +PAIRING(10, 15, 20, 25, 20, 25, 25, 26, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 1) // #622 +PAIRING(10, 15, 20, 25, 20, 25, 26, 27, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 1) // #623 +PAIRING(10, 15, 20, 25, 20, 26, 26, 27, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 2) // #624 +PAIRING(10, 15, 20, 25, 20, 26, 27, 28, 0, 1, 0, 0, 0, 0, 20, 25, 0, 0, 1, 2) // #625 +PAIRING(10, 15, 20, 25, 21, 22, 22, 23, 0, 2, 0, 0, 0, 0, 21, 22, 22, 23, 2, 0) // #626 +PAIRING(10, 15, 20, 25, 21, 22, 22, 25, 0, 2, 0, 0, 0, 0, 21, 22, 22, 25, 2, 0) // #627 +PAIRING(10, 15, 20, 25, 21, 22, 22, 26, 0, 2, 0, 0, 0, 0, 21, 22, 22, 25, 2, 1) // #628 +PAIRING(10, 15, 20, 25, 21, 22, 23, 24, 0, 2, 0, 0, 0, 0, 21, 22, 23, 24, 2, 0) // #629 +PAIRING(10, 15, 20, 25, 21, 22, 23, 25, 0, 2, 0, 0, 0, 0, 21, 22, 23, 25, 2, 0) // #630 +PAIRING(10, 15, 20, 25, 21, 22, 23, 26, 0, 2, 0, 0, 0, 0, 21, 22, 23, 25, 2, 1) // #631 +PAIRING(10, 15, 20, 25, 21, 22, 25, 26, 0, 1, 0, 0, 0, 0, 21, 22, 0, 0, 2, 1) // #632 +PAIRING(10, 15, 20, 25, 21, 22, 26, 27, 0, 1, 0, 0, 0, 0, 21, 22, 0, 0, 2, 1) // #633 +PAIRING(10, 15, 20, 25, 21, 25, 25, 26, 0, 1, 0, 0, 0, 0, 21, 25, 0, 0, 2, 1) // #634 +PAIRING(10, 15, 20, 25, 21, 25, 26, 27, 0, 1, 0, 0, 0, 0, 21, 25, 0, 0, 2, 1) // #635 +PAIRING(10, 15, 20, 25, 21, 26, 26, 27, 0, 1, 0, 0, 0, 0, 21, 25, 0, 0, 2, 2) // #636 +PAIRING(10, 15, 20, 25, 21, 26, 27, 28, 0, 1, 0, 0, 0, 0, 21, 25, 0, 0, 2, 2) // #637 +PAIRING(10, 15, 20, 25, 25, 26, 26, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #638 +PAIRING(10, 15, 20, 25, 25, 26, 27, 28, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #639 +PAIRING(10, 15, 20, 25, 26, 27, 27, 28, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #640 +PAIRING(10, 15, 20, 25, 26, 27, 28, 29, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2) // #641 diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/unordered.h b/TMessagesProj/jni/third_party/breakpad/src/common/unordered.h new file mode 100644 index 0000000000..ec665cc026 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/unordered.h @@ -0,0 +1,62 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Include this file to use unordered_map and unordered_set. If tr1 +// or C++11 is not available, you can switch to using hash_set and +// hash_map by defining BP_USE_HASH_SET. + +#ifndef COMMON_UNORDERED_H_ +#define COMMON_UNORDERED_H_ + +#if defined(BP_USE_HASH_SET) +#include +#include + +// For hash. +#include "util/hash/hash.h" + +template > +struct unordered_map : public hash_map {}; +template > +struct unordered_set : public hash_set {}; + +#elif defined(_LIBCPP_VERSION) // c++11 +#include +#include +using std::unordered_map; +using std::unordered_set; + +#else // Fallback to tr1::unordered +#include +#include +using std::tr1::unordered_map; +using std::tr1::unordered_set; +#endif + +#endif // COMMON_UNORDERED_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/common/using_std_string.h b/TMessagesProj/jni/third_party/breakpad/src/common/using_std_string.h new file mode 100644 index 0000000000..13c1da59cc --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/common/using_std_string.h @@ -0,0 +1,65 @@ +// -*- mode: C++ -*- + +// Copyright (c) 2012, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Ivan Penkov + +// using_std_string.h: Allows building this code in environments where +// global string (::string) exists. +// +// The problem: +// ------------- +// Let's say you want to build this code in an environment where a global +// string type is defined (i.e. ::string). Now, let's suppose that ::string +// is different that std::string and you'd like to have the option to easily +// choose between the two string types. Ideally you'd like to control which +// string type is chosen by simply #defining an identifier. +// +// The solution: +// ------------- +// #define HAS_GLOBAL_STRING somewhere in a global header file and then +// globally replace std::string with string. Then include this header +// file everywhere where string is used. If you want to revert back to +// using std::string, simply remove the #define (HAS_GLOBAL_STRING). + +#ifndef THIRD_PARTY_BREAKPAD_SRC_COMMON_USING_STD_STRING_H_ +#define THIRD_PARTY_BREAKPAD_SRC_COMMON_USING_STD_STRING_H_ + +#ifdef HAS_GLOBAL_STRING + typedef ::string google_breakpad_string; +#else + using std::string; + typedef std::string google_breakpad_string; +#endif + +// Inicates that type google_breakpad_string is defined +#define HAS_GOOGLE_BREAKPAD_STRING + +#endif // THIRD_PARTY_BREAKPAD_SRC_COMMON_USING_STD_STRING_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/config.h.in b/TMessagesProj/jni/third_party/breakpad/src/config.h.in new file mode 100644 index 0000000000..1db0159309 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/config.h.in @@ -0,0 +1,76 @@ +/* src/config.h.in. Generated from configure.ac by autoheader. */ + +/* Define to 1 if you have the header file. */ +#undef HAVE_A_OUT_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_INTTYPES_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_MEMORY_H + +/* Define if you have POSIX threads libraries and header files. */ +#undef HAVE_PTHREAD + +/* Define to 1 if you have the header file. */ +#undef HAVE_STDINT_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_STDLIB_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_STRINGS_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_STRING_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_SYS_STAT_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_SYS_TYPES_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_UNISTD_H + +/* Name of package */ +#undef PACKAGE + +/* Define to the address where bug reports for this package should be sent. */ +#undef PACKAGE_BUGREPORT + +/* Define to the full name of this package. */ +#undef PACKAGE_NAME + +/* Define to the full name and version of this package. */ +#undef PACKAGE_STRING + +/* Define to the one symbol short name of this package. */ +#undef PACKAGE_TARNAME + +/* Define to the home page for this package. */ +#undef PACKAGE_URL + +/* Define to the version of this package. */ +#undef PACKAGE_VERSION + +/* Define to necessary symbol if this constant uses a non-standard name on + your system. */ +#undef PTHREAD_CREATE_JOINABLE + +/* Define to 1 if you have the ANSI C header files. */ +#undef STDC_HEADERS + +/* Version number of package */ +#undef VERSION + +/* Enable large inode numbers on Mac OS X 10.5. */ +#ifndef _DARWIN_USE_64_BIT_INODE +# define _DARWIN_USE_64_BIT_INODE 1 +#endif + +/* Number of bits in a file offset, on hosts where this is settable. */ +#undef _FILE_OFFSET_BITS + +/* Define for large files, on AIX-style hosts. */ +#undef _LARGE_FILES diff --git a/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/breakpad_types.h b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/breakpad_types.h new file mode 100644 index 0000000000..e92436ff29 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/breakpad_types.h @@ -0,0 +1,86 @@ +/* Copyright (c) 2006, Google Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following disclaimer + * in the documentation and/or other materials provided with the + * distribution. + * * Neither the name of Google Inc. nor the names of its + * contributors may be used to endorse or promote products derived from + * this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ + +/* breakpad_types.h: Precise-width types + * + * (This is C99 source, please don't corrupt it with C++.) + * + * This file ensures that types uintN_t are defined for N = 8, 16, 32, and + * 64. Types of precise widths are crucial to the task of writing data + * structures on one platform and reading them on another. + * + * Author: Mark Mentovai */ + +#ifndef GOOGLE_BREAKPAD_COMMON_BREAKPAD_TYPES_H__ +#define GOOGLE_BREAKPAD_COMMON_BREAKPAD_TYPES_H__ + +#ifndef _WIN32 + +#ifndef __STDC_FORMAT_MACROS +#define __STDC_FORMAT_MACROS +#endif /* __STDC_FORMAT_MACROS */ +#include + +#else /* !_WIN32 */ + +#if _MSC_VER >= 1600 +#include +#elif defined(BREAKPAD_CUSTOM_STDINT_H) +/* Visual C++ Pre-2010 did not ship a stdint.h, so allow + * consumers of this library to provide their own because + * there are often subtle type incompatibilities. + */ +#include BREAKPAD_CUSTOM_STDINT_H +#else +#include + +typedef unsigned __int8 uint8_t; +typedef unsigned __int16 uint16_t; +typedef __int32 int32_t; +typedef unsigned __int32 uint32_t; +typedef unsigned __int64 uint64_t; +#endif + +#endif /* !_WIN32 */ + +typedef struct { + uint64_t high; + uint64_t low; +} uint128_struct; + +typedef uint64_t breakpad_time_t; + +/* Try to get PRIx64 from inttypes.h, but if it's not defined, fall back to + * llx, which is the format string for "long long" - this is a 64-bit + * integral type on many systems. */ +#ifndef PRIx64 +#define PRIx64 "llx" +#endif /* !PRIx64 */ + +#endif /* GOOGLE_BREAKPAD_COMMON_BREAKPAD_TYPES_H__ */ diff --git a/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_cpu_amd64.h b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_cpu_amd64.h new file mode 100644 index 0000000000..4256706d77 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_cpu_amd64.h @@ -0,0 +1,235 @@ +/* Copyright (c) 2006, Google Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following disclaimer + * in the documentation and/or other materials provided with the + * distribution. + * * Neither the name of Google Inc. nor the names of its + * contributors may be used to endorse or promote products derived from + * this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ + +/* minidump_format.h: A cross-platform reimplementation of minidump-related + * portions of DbgHelp.h from the Windows Platform SDK. + * + * (This is C99 source, please don't corrupt it with C++.) + * + * This file contains the necessary definitions to read minidump files + * produced on amd64. These files may be read on any platform provided + * that the alignments of these structures on the processing system are + * identical to the alignments of these structures on the producing system. + * For this reason, precise-sized types are used. The structures defined + * by this file have been laid out to minimize alignment problems by ensuring + * ensuring that all members are aligned on their natural boundaries. In + * In some cases, tail-padding may be significant when different ABIs specify + * different tail-padding behaviors. To avoid problems when reading or + * writing affected structures, MD_*_SIZE macros are provided where needed, + * containing the useful size of the structures without padding. + * + * Structures that are defined by Microsoft to contain a zero-length array + * are instead defined here to contain an array with one element, as + * zero-length arrays are forbidden by standard C and C++. In these cases, + * *_minsize constants are provided to be used in place of sizeof. For a + * cleaner interface to these sizes when using C++, see minidump_size.h. + * + * These structures are also sufficient to populate minidump files. + * + * These definitions may be extended to support handling minidump files + * for other CPUs and other operating systems. + * + * Because precise data type sizes are crucial for this implementation to + * function properly and portably in terms of interoperability with minidumps + * produced by DbgHelp on Windows, a set of primitive types with known sizes + * are used as the basis of each structure defined by this file. DbgHelp + * on Windows is assumed to be the reference implementation; this file + * seeks to provide a cross-platform compatible implementation. To avoid + * collisions with the types and values defined and used by DbgHelp in the + * event that this implementation is used on Windows, each type and value + * defined here is given a new name, beginning with "MD". Names of the + * equivalent types and values in the Windows Platform SDK are given in + * comments. + * + * Author: Mark Mentovai + * Change to split into its own file: Neal Sidhwaney */ + +#ifndef GOOGLE_BREAKPAD_COMMON_MINIDUMP_CPU_AMD64_H__ +#define GOOGLE_BREAKPAD_COMMON_MINIDUMP_CPU_AMD64_H__ + + +/* + * AMD64 support, see WINNT.H + */ + +typedef struct { + uint16_t control_word; + uint16_t status_word; + uint8_t tag_word; + uint8_t reserved1; + uint16_t error_opcode; + uint32_t error_offset; + uint16_t error_selector; + uint16_t reserved2; + uint32_t data_offset; + uint16_t data_selector; + uint16_t reserved3; + uint32_t mx_csr; + uint32_t mx_csr_mask; + uint128_struct float_registers[8]; + uint128_struct xmm_registers[16]; + uint8_t reserved4[96]; +} MDXmmSaveArea32AMD64; /* XMM_SAVE_AREA32 */ + +#define MD_CONTEXT_AMD64_VR_COUNT 26 + +typedef struct { + /* + * Register parameter home addresses. + */ + uint64_t p1_home; + uint64_t p2_home; + uint64_t p3_home; + uint64_t p4_home; + uint64_t p5_home; + uint64_t p6_home; + + /* The next field determines the layout of the structure, and which parts + * of it are populated */ + uint32_t context_flags; + uint32_t mx_csr; + + /* The next register is included with MD_CONTEXT_AMD64_CONTROL */ + uint16_t cs; + + /* The next 4 registers are included with MD_CONTEXT_AMD64_SEGMENTS */ + uint16_t ds; + uint16_t es; + uint16_t fs; + uint16_t gs; + + /* The next 2 registers are included with MD_CONTEXT_AMD64_CONTROL */ + uint16_t ss; + uint32_t eflags; + + /* The next 6 registers are included with MD_CONTEXT_AMD64_DEBUG_REGISTERS */ + uint64_t dr0; + uint64_t dr1; + uint64_t dr2; + uint64_t dr3; + uint64_t dr6; + uint64_t dr7; + + /* The next 4 registers are included with MD_CONTEXT_AMD64_INTEGER */ + uint64_t rax; + uint64_t rcx; + uint64_t rdx; + uint64_t rbx; + + /* The next register is included with MD_CONTEXT_AMD64_CONTROL */ + uint64_t rsp; + + /* The next 11 registers are included with MD_CONTEXT_AMD64_INTEGER */ + uint64_t rbp; + uint64_t rsi; + uint64_t rdi; + uint64_t r8; + uint64_t r9; + uint64_t r10; + uint64_t r11; + uint64_t r12; + uint64_t r13; + uint64_t r14; + uint64_t r15; + + /* The next register is included with MD_CONTEXT_AMD64_CONTROL */ + uint64_t rip; + + /* The next set of registers are included with + * MD_CONTEXT_AMD64_FLOATING_POINT + */ + union { + MDXmmSaveArea32AMD64 flt_save; + struct { + uint128_struct header[2]; + uint128_struct legacy[8]; + uint128_struct xmm0; + uint128_struct xmm1; + uint128_struct xmm2; + uint128_struct xmm3; + uint128_struct xmm4; + uint128_struct xmm5; + uint128_struct xmm6; + uint128_struct xmm7; + uint128_struct xmm8; + uint128_struct xmm9; + uint128_struct xmm10; + uint128_struct xmm11; + uint128_struct xmm12; + uint128_struct xmm13; + uint128_struct xmm14; + uint128_struct xmm15; + } sse_registers; + }; + + uint128_struct vector_register[MD_CONTEXT_AMD64_VR_COUNT]; + uint64_t vector_control; + + /* The next 5 registers are included with MD_CONTEXT_AMD64_DEBUG_REGISTERS */ + uint64_t debug_control; + uint64_t last_branch_to_rip; + uint64_t last_branch_from_rip; + uint64_t last_exception_to_rip; + uint64_t last_exception_from_rip; + +} MDRawContextAMD64; /* CONTEXT */ + +/* For (MDRawContextAMD64).context_flags. These values indicate the type of + * context stored in the structure. The high 24 bits identify the CPU, the + * low 8 bits identify the type of context saved. */ +#define MD_CONTEXT_AMD64 0x00100000 /* CONTEXT_AMD64 */ +#define MD_CONTEXT_AMD64_CONTROL (MD_CONTEXT_AMD64 | 0x00000001) + /* CONTEXT_CONTROL */ +#define MD_CONTEXT_AMD64_INTEGER (MD_CONTEXT_AMD64 | 0x00000002) + /* CONTEXT_INTEGER */ +#define MD_CONTEXT_AMD64_SEGMENTS (MD_CONTEXT_AMD64 | 0x00000004) + /* CONTEXT_SEGMENTS */ +#define MD_CONTEXT_AMD64_FLOATING_POINT (MD_CONTEXT_AMD64 | 0x00000008) + /* CONTEXT_FLOATING_POINT */ +#define MD_CONTEXT_AMD64_DEBUG_REGISTERS (MD_CONTEXT_AMD64 | 0x00000010) + /* CONTEXT_DEBUG_REGISTERS */ +#define MD_CONTEXT_AMD64_XSTATE (MD_CONTEXT_AMD64 | 0x00000040) + /* CONTEXT_XSTATE */ + +/* WinNT.h refers to CONTEXT_MMX_REGISTERS but doesn't appear to define it + * I think it really means CONTEXT_FLOATING_POINT. + */ + +#define MD_CONTEXT_AMD64_FULL (MD_CONTEXT_AMD64_CONTROL | \ + MD_CONTEXT_AMD64_INTEGER | \ + MD_CONTEXT_AMD64_FLOATING_POINT) + /* CONTEXT_FULL */ + +#define MD_CONTEXT_AMD64_ALL (MD_CONTEXT_AMD64_FULL | \ + MD_CONTEXT_AMD64_SEGMENTS | \ + MD_CONTEXT_X86_DEBUG_REGISTERS) + /* CONTEXT_ALL */ + + +#endif /* GOOGLE_BREAKPAD_COMMON_MINIDUMP_CPU_AMD64_H__ */ diff --git a/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_cpu_arm.h b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_cpu_arm.h new file mode 100644 index 0000000000..6a71138337 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_cpu_arm.h @@ -0,0 +1,151 @@ +/* Copyright (c) 2009, Google Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following disclaimer + * in the documentation and/or other materials provided with the + * distribution. + * * Neither the name of Google Inc. nor the names of its + * contributors may be used to endorse or promote products derived from + * this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ + +/* minidump_format.h: A cross-platform reimplementation of minidump-related + * portions of DbgHelp.h from the Windows Platform SDK. + * + * (This is C99 source, please don't corrupt it with C++.) + * + * This file contains the necessary definitions to read minidump files + * produced on ARM. These files may be read on any platform provided + * that the alignments of these structures on the processing system are + * identical to the alignments of these structures on the producing system. + * For this reason, precise-sized types are used. The structures defined + * by this file have been laid out to minimize alignment problems by + * ensuring that all members are aligned on their natural boundaries. + * In some cases, tail-padding may be significant when different ABIs specify + * different tail-padding behaviors. To avoid problems when reading or + * writing affected structures, MD_*_SIZE macros are provided where needed, + * containing the useful size of the structures without padding. + * + * Structures that are defined by Microsoft to contain a zero-length array + * are instead defined here to contain an array with one element, as + * zero-length arrays are forbidden by standard C and C++. In these cases, + * *_minsize constants are provided to be used in place of sizeof. For a + * cleaner interface to these sizes when using C++, see minidump_size.h. + * + * These structures are also sufficient to populate minidump files. + * + * Because precise data type sizes are crucial for this implementation to + * function properly and portably, a set of primitive types with known sizes + * are used as the basis of each structure defined by this file. + * + * Author: Julian Seward + */ + +/* + * ARM support + */ + +#ifndef GOOGLE_BREAKPAD_COMMON_MINIDUMP_CPU_ARM_H__ +#define GOOGLE_BREAKPAD_COMMON_MINIDUMP_CPU_ARM_H__ + +#define MD_FLOATINGSAVEAREA_ARM_FPR_COUNT 32 +#define MD_FLOATINGSAVEAREA_ARM_FPEXTRA_COUNT 8 + +/* + * Note that these structures *do not* map directly to the CONTEXT + * structure defined in WinNT.h in the Windows Mobile SDK. That structure + * does not accomodate VFPv3, and I'm unsure if it was ever used in the + * wild anyway, as Windows CE only seems to produce "cedumps" which + * are not exactly minidumps. + */ +typedef struct { + uint64_t fpscr; /* FPU status register */ + + /* 32 64-bit floating point registers, d0 .. d31. */ + uint64_t regs[MD_FLOATINGSAVEAREA_ARM_FPR_COUNT]; + + /* Miscellaneous control words */ + uint32_t extra[MD_FLOATINGSAVEAREA_ARM_FPEXTRA_COUNT]; +} MDFloatingSaveAreaARM; + +#define MD_CONTEXT_ARM_GPR_COUNT 16 + +typedef struct { + /* The next field determines the layout of the structure, and which parts + * of it are populated + */ + uint32_t context_flags; + + /* 16 32-bit integer registers, r0 .. r15 + * Note the following fixed uses: + * r13 is the stack pointer + * r14 is the link register + * r15 is the program counter + */ + uint32_t iregs[MD_CONTEXT_ARM_GPR_COUNT]; + + /* CPSR (flags, basically): 32 bits: + bit 31 - N (negative) + bit 30 - Z (zero) + bit 29 - C (carry) + bit 28 - V (overflow) + bit 27 - Q (saturation flag, sticky) + All other fields -- ignore */ + uint32_t cpsr; + + /* The next field is included with MD_CONTEXT_ARM_FLOATING_POINT */ + MDFloatingSaveAreaARM float_save; + +} MDRawContextARM; + +/* Indices into iregs for registers with a dedicated or conventional + * purpose. + */ +enum MDARMRegisterNumbers { + MD_CONTEXT_ARM_REG_IOS_FP = 7, + MD_CONTEXT_ARM_REG_FP = 11, + MD_CONTEXT_ARM_REG_SP = 13, + MD_CONTEXT_ARM_REG_LR = 14, + MD_CONTEXT_ARM_REG_PC = 15 +}; + +/* For (MDRawContextARM).context_flags. These values indicate the type of + * context stored in the structure. */ +/* CONTEXT_ARM from the Windows CE 5.0 SDK. This value isn't correct + * because this bit can be used for flags. Presumably this value was + * never actually used in minidumps, but only in "CEDumps" which + * are a whole parallel minidump file format for Windows CE. + * Therefore, Breakpad defines its own value for ARM CPUs. + */ +#define MD_CONTEXT_ARM_OLD 0x00000040 +/* This value was chosen to avoid likely conflicts with MD_CONTEXT_* + * for other CPUs. */ +#define MD_CONTEXT_ARM 0x40000000 +#define MD_CONTEXT_ARM_INTEGER (MD_CONTEXT_ARM | 0x00000002) +#define MD_CONTEXT_ARM_FLOATING_POINT (MD_CONTEXT_ARM | 0x00000004) + +#define MD_CONTEXT_ARM_FULL (MD_CONTEXT_ARM_INTEGER | \ + MD_CONTEXT_ARM_FLOATING_POINT) + +#define MD_CONTEXT_ARM_ALL (MD_CONTEXT_ARM_INTEGER | \ + MD_CONTEXT_ARM_FLOATING_POINT) + +#endif /* GOOGLE_BREAKPAD_COMMON_MINIDUMP_CPU_ARM_H__ */ diff --git a/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_cpu_arm64.h b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_cpu_arm64.h new file mode 100644 index 0000000000..5ace0d9de4 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_cpu_arm64.h @@ -0,0 +1,140 @@ +/* Copyright 2013 Google Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following disclaimer + * in the documentation and/or other materials provided with the + * distribution. + * * Neither the name of Google Inc. nor the names of its + * contributors may be used to endorse or promote products derived from + * this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ + +/* minidump_format.h: A cross-platform reimplementation of minidump-related + * portions of DbgHelp.h from the Windows Platform SDK. + * + * (This is C99 source, please don't corrupt it with C++.) + * + * This file contains the necessary definitions to read minidump files + * produced on ARM. These files may be read on any platform provided + * that the alignments of these structures on the processing system are + * identical to the alignments of these structures on the producing system. + * For this reason, precise-sized types are used. The structures defined + * by this file have been laid out to minimize alignment problems by + * ensuring that all members are aligned on their natural boundaries. + * In some cases, tail-padding may be significant when different ABIs specify + * different tail-padding behaviors. To avoid problems when reading or + * writing affected structures, MD_*_SIZE macros are provided where needed, + * containing the useful size of the structures without padding. + * + * Structures that are defined by Microsoft to contain a zero-length array + * are instead defined here to contain an array with one element, as + * zero-length arrays are forbidden by standard C and C++. In these cases, + * *_minsize constants are provided to be used in place of sizeof. For a + * cleaner interface to these sizes when using C++, see minidump_size.h. + * + * These structures are also sufficient to populate minidump files. + * + * Because precise data type sizes are crucial for this implementation to + * function properly and portably, a set of primitive types with known sizes + * are used as the basis of each structure defined by this file. + * + * Author: Colin Blundell + */ + +/* + * ARM64 support + */ + +#ifndef GOOGLE_BREAKPAD_COMMON_MINIDUMP_CPU_ARM64_H__ +#define GOOGLE_BREAKPAD_COMMON_MINIDUMP_CPU_ARM64_H__ + +#define MD_FLOATINGSAVEAREA_ARM64_FPR_COUNT 32 + +typedef struct { + uint32_t fpsr; /* FPU status register */ + uint32_t fpcr; /* FPU control register */ + + /* 32 128-bit floating point registers, d0 .. d31. */ + uint128_struct regs[MD_FLOATINGSAVEAREA_ARM64_FPR_COUNT]; +} MDFloatingSaveAreaARM64; + +#define MD_CONTEXT_ARM64_GPR_COUNT 33 + +/* Use the same 32-bit alignment when accessing this structure from 64-bit code + * as is used natively in 32-bit code. */ +#pragma pack(push, 4) + +typedef struct { + /* The next field determines the layout of the structure, and which parts + * of it are populated + */ + uint64_t context_flags; + + /* 33 64-bit integer registers, x0 .. x31 + the PC + * Note the following fixed uses: + * x29 is the frame pointer + * x30 is the link register + * x31 is the stack pointer + * The PC is effectively x32. + */ + uint64_t iregs[MD_CONTEXT_ARM64_GPR_COUNT]; + + /* CPSR (flags, basically): 32 bits: + bit 31 - N (negative) + bit 30 - Z (zero) + bit 29 - C (carry) + bit 28 - V (overflow) + bit 27 - Q (saturation flag, sticky) + All other fields -- ignore */ + uint32_t cpsr; + + /* The next field is included with MD_CONTEXT64_ARM_FLOATING_POINT */ + MDFloatingSaveAreaARM64 float_save; + +} MDRawContextARM64; + +#pragma pack(pop) + +/* Indices into iregs for registers with a dedicated or conventional + * purpose. + */ +enum MDARM64RegisterNumbers { + MD_CONTEXT_ARM64_REG_FP = 29, + MD_CONTEXT_ARM64_REG_LR = 30, + MD_CONTEXT_ARM64_REG_SP = 31, + MD_CONTEXT_ARM64_REG_PC = 32 +}; + +/* For (MDRawContextARM64).context_flags. These values indicate the type of + * context stored in the structure. MD_CONTEXT_ARM64 is Breakpad-defined. + * This value was chosen to avoid likely conflicts with MD_CONTEXT_* + * for other CPUs. */ +#define MD_CONTEXT_ARM64 0x80000000 +#define MD_CONTEXT_ARM64_INTEGER (MD_CONTEXT_ARM64 | 0x00000002) +#define MD_CONTEXT_ARM64_FLOATING_POINT (MD_CONTEXT_ARM64 | 0x00000004) + +#define MD_CONTEXT_ARM64_FULL (MD_CONTEXT_ARM64_INTEGER | \ + MD_CONTEXT_ARM64_FLOATING_POINT) + +#define MD_CONTEXT_ARM64_ALL (MD_CONTEXT_ARM64_INTEGER | \ + MD_CONTEXT_ARM64_FLOATING_POINT) + +#endif /* GOOGLE_BREAKPAD_COMMON_MINIDUMP_CPU_ARM64_H__ */ diff --git a/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_cpu_mips.h b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_cpu_mips.h new file mode 100644 index 0000000000..6cbe3023f9 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_cpu_mips.h @@ -0,0 +1,160 @@ +/* Copyright (c) 2013, Google Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following disclaimer + * in the documentation and/or other materials provided with the + * distribution. + * * Neither the name of Google Inc. nor the names of its + * contributors may be used to endorse or promote products derived from + * this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ + +/* minidump_format.h: A cross-platform reimplementation of minidump-related + * portions of DbgHelp.h from the Windows Platform SDK. + * + * (This is C99 source, please don't corrupt it with C++.) + * + * This file contains the necessary definitions to read minidump files + * produced on MIPS. These files may be read on any platform provided + * that the alignments of these structures on the processing system are + * identical to the alignments of these structures on the producing system. + * For this reason, precise-sized types are used. The structures defined + * by this file have been laid out to minimize alignment problems by + * ensuring that all members are aligned on their natural boundaries. + * In some cases, tail-padding may be significant when different ABIs specify + * different tail-padding behaviors. To avoid problems when reading or + * writing affected structures, MD_*_SIZE macros are provided where needed, + * containing the useful size of the structures without padding. + * + * Structures that are defined by Microsoft to contain a zero-length array + * are instead defined here to contain an array with one element, as + * zero-length arrays are forbidden by standard C and C++. In these cases, + * *_minsize constants are provided to be used in place of sizeof. For a + * cleaner interface to these sizes when using C++, see minidump_size.h. + * + * These structures are also sufficient to populate minidump files. + * + * Because precise data type sizes are crucial for this implementation to + * function properly and portably, a set of primitive types with known sizes + * are used as the basis of each structure defined by this file. + * + * Author: Chris Dearman + */ + +/* + * MIPS support + */ + +#ifndef GOOGLE_BREAKPAD_COMMON_MINIDUMP_CPU_MIPS_H__ +#define GOOGLE_BREAKPAD_COMMON_MINIDUMP_CPU_MIPS_H__ + +#define MD_CONTEXT_MIPS_GPR_COUNT 32 +#define MD_FLOATINGSAVEAREA_MIPS_FPR_COUNT 32 +#define MD_CONTEXT_MIPS_DSP_COUNT 3 + +/* + * Note that these structures *do not* map directly to the CONTEXT + * structure defined in WinNT.h in the Windows Mobile SDK. That structure + * does not accomodate VFPv3, and I'm unsure if it was ever used in the + * wild anyway, as Windows CE only seems to produce "cedumps" which + * are not exactly minidumps. + */ +typedef struct { + /* 32 64-bit floating point registers, f0..f31 */ + uint64_t regs[MD_FLOATINGSAVEAREA_MIPS_FPR_COUNT]; + + uint32_t fpcsr; /* FPU status register. */ + uint32_t fir; /* FPU implementation register. */ +} MDFloatingSaveAreaMIPS; + +typedef struct { + /* The next field determines the layout of the structure, and which parts + * of it are populated. + */ + uint32_t context_flags; + uint32_t _pad0; + + /* 32 64-bit integer registers, r0..r31. + * Note the following fixed uses: + * r29 is the stack pointer. + * r31 is the return address. + */ + uint64_t iregs[MD_CONTEXT_MIPS_GPR_COUNT]; + + /* multiply/divide result. */ + uint64_t mdhi, mdlo; + + /* DSP accumulators. */ + uint32_t hi[MD_CONTEXT_MIPS_DSP_COUNT]; + uint32_t lo[MD_CONTEXT_MIPS_DSP_COUNT]; + uint32_t dsp_control; + uint32_t _pad1; + + uint64_t epc; + uint64_t badvaddr; + uint32_t status; + uint32_t cause; + + /* The next field is included with MD_CONTEXT_MIPS_FLOATING_POINT. */ + MDFloatingSaveAreaMIPS float_save; + +} MDRawContextMIPS; + +/* Indices into iregs for registers with a dedicated or conventional + * purpose. + */ +enum MDMIPSRegisterNumbers { + MD_CONTEXT_MIPS_REG_S0 = 16, + MD_CONTEXT_MIPS_REG_S1 = 17, + MD_CONTEXT_MIPS_REG_S2 = 18, + MD_CONTEXT_MIPS_REG_S3 = 19, + MD_CONTEXT_MIPS_REG_S4 = 20, + MD_CONTEXT_MIPS_REG_S5 = 21, + MD_CONTEXT_MIPS_REG_S6 = 22, + MD_CONTEXT_MIPS_REG_S7 = 23, + MD_CONTEXT_MIPS_REG_GP = 28, + MD_CONTEXT_MIPS_REG_SP = 29, + MD_CONTEXT_MIPS_REG_FP = 30, + MD_CONTEXT_MIPS_REG_RA = 31, +}; + +/* For (MDRawContextMIPS).context_flags. These values indicate the type of + * context stored in the structure. */ +/* CONTEXT_MIPS from the Windows CE 5.0 SDK. This value isn't correct + * because this bit can be used for flags. Presumably this value was + * never actually used in minidumps, but only in "CEDumps" which + * are a whole parallel minidump file format for Windows CE. + * Therefore, Breakpad defines its own value for MIPS CPUs. + */ +#define MD_CONTEXT_MIPS 0x00040000 +#define MD_CONTEXT_MIPS_INTEGER (MD_CONTEXT_MIPS | 0x00000002) +#define MD_CONTEXT_MIPS_FLOATING_POINT (MD_CONTEXT_MIPS | 0x00000004) +#define MD_CONTEXT_MIPS_DSP (MD_CONTEXT_MIPS | 0x00000008) + +#define MD_CONTEXT_MIPS_FULL (MD_CONTEXT_MIPS_INTEGER | \ + MD_CONTEXT_MIPS_FLOATING_POINT | \ + MD_CONTEXT_MIPS_DSP) + +#define MD_CONTEXT_MIPS_ALL (MD_CONTEXT_MIPS_INTEGER | \ + MD_CONTEXT_MIPS_FLOATING_POINT \ + MD_CONTEXT_MIPS_DSP) + +#endif // GOOGLE_BREAKPAD_COMMON_MINIDUMP_CPU_MIPS_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_cpu_ppc.h b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_cpu_ppc.h new file mode 100644 index 0000000000..02ac322023 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_cpu_ppc.h @@ -0,0 +1,163 @@ +/* Copyright (c) 2006, Google Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following disclaimer + * in the documentation and/or other materials provided with the + * distribution. + * * Neither the name of Google Inc. nor the names of its + * contributors may be used to endorse or promote products derived from + * this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ + +/* minidump_format.h: A cross-platform reimplementation of minidump-related + * portions of DbgHelp.h from the Windows Platform SDK. + * + * (This is C99 source, please don't corrupt it with C++.) + * + * This file contains the necessary definitions to read minidump files + * produced on ppc. These files may be read on any platform provided + * that the alignments of these structures on the processing system are + * identical to the alignments of these structures on the producing system. + * For this reason, precise-sized types are used. The structures defined + * by this file have been laid out to minimize alignment problems by ensuring + * ensuring that all members are aligned on their natural boundaries. In + * In some cases, tail-padding may be significant when different ABIs specify + * different tail-padding behaviors. To avoid problems when reading or + * writing affected structures, MD_*_SIZE macros are provided where needed, + * containing the useful size of the structures without padding. + * + * Structures that are defined by Microsoft to contain a zero-length array + * are instead defined here to contain an array with one element, as + * zero-length arrays are forbidden by standard C and C++. In these cases, + * *_minsize constants are provided to be used in place of sizeof. For a + * cleaner interface to these sizes when using C++, see minidump_size.h. + * + * These structures are also sufficient to populate minidump files. + * + * These definitions may be extended to support handling minidump files + * for other CPUs and other operating systems. + * + * Because precise data type sizes are crucial for this implementation to + * function properly and portably in terms of interoperability with minidumps + * produced by DbgHelp on Windows, a set of primitive types with known sizes + * are used as the basis of each structure defined by this file. DbgHelp + * on Windows is assumed to be the reference implementation; this file + * seeks to provide a cross-platform compatible implementation. To avoid + * collisions with the types and values defined and used by DbgHelp in the + * event that this implementation is used on Windows, each type and value + * defined here is given a new name, beginning with "MD". Names of the + * equivalent types and values in the Windows Platform SDK are given in + * comments. + * + * Author: Mark Mentovai + * Change to split into its own file: Neal Sidhwaney */ + +/* + * Breakpad minidump extension for PowerPC support. Based on Darwin/Mac OS X' + * mach/ppc/_types.h + */ + +#ifndef GOOGLE_BREAKPAD_COMMON_MINIDUMP_CPU_PPC_H__ +#define GOOGLE_BREAKPAD_COMMON_MINIDUMP_CPU_PPC_H__ + +#define MD_FLOATINGSAVEAREA_PPC_FPR_COUNT 32 + +typedef struct { + /* fpregs is a double[32] in mach/ppc/_types.h, but a uint64_t is used + * here for precise sizing. */ + uint64_t fpregs[MD_FLOATINGSAVEAREA_PPC_FPR_COUNT]; + uint32_t fpscr_pad; + uint32_t fpscr; /* Status/control */ +} MDFloatingSaveAreaPPC; /* Based on ppc_float_state */ + + +#define MD_VECTORSAVEAREA_PPC_VR_COUNT 32 + +typedef struct { + /* Vector registers (including vscr) are 128 bits, but mach/ppc/_types.h + * exposes them as four 32-bit quantities. */ + uint128_struct save_vr[MD_VECTORSAVEAREA_PPC_VR_COUNT]; + uint128_struct save_vscr; /* Status/control */ + uint32_t save_pad5[4]; + uint32_t save_vrvalid; /* Indicates which vector registers are saved */ + uint32_t save_pad6[7]; +} MDVectorSaveAreaPPC; /* ppc_vector_state */ + + +#define MD_CONTEXT_PPC_GPR_COUNT 32 + +/* Use the same 32-bit alignment when accessing this structure from 64-bit code + * as is used natively in 32-bit code. #pragma pack is a MSVC extension + * supported by gcc. */ +#if defined(__SUNPRO_C) || defined(__SUNPRO_CC) +#pragma pack(4) +#else +#pragma pack(push, 4) +#endif + +typedef struct { + /* context_flags is not present in ppc_thread_state, but it aids + * identification of MDRawContextPPC among other raw context types, + * and it guarantees alignment when we get to float_save. */ + uint32_t context_flags; + + uint32_t srr0; /* Machine status save/restore: stores pc + * (instruction) */ + uint32_t srr1; /* Machine status save/restore: stores msr + * (ps, program/machine state) */ + /* ppc_thread_state contains 32 fields, r0 .. r31. Here, an array is + * used for brevity. */ + uint32_t gpr[MD_CONTEXT_PPC_GPR_COUNT]; + uint32_t cr; /* Condition */ + uint32_t xer; /* Integer (fiXed-point) exception */ + uint32_t lr; /* Link */ + uint32_t ctr; /* Count */ + uint32_t mq; /* Multiply/Quotient (PPC 601, POWER only) */ + uint32_t vrsave; /* Vector save */ + + /* float_save and vector_save aren't present in ppc_thread_state, but + * are represented in separate structures that still define a thread's + * context. */ + MDFloatingSaveAreaPPC float_save; + MDVectorSaveAreaPPC vector_save; +} MDRawContextPPC; /* Based on ppc_thread_state */ + +#if defined(__SUNPRO_C) || defined(__SUNPRO_CC) +#pragma pack(0) +#else +#pragma pack(pop) +#endif + +/* For (MDRawContextPPC).context_flags. These values indicate the type of + * context stored in the structure. MD_CONTEXT_PPC is Breakpad-defined. Its + * value was chosen to avoid likely conflicts with MD_CONTEXT_* for other + * CPUs. */ +#define MD_CONTEXT_PPC 0x20000000 +#define MD_CONTEXT_PPC_BASE (MD_CONTEXT_PPC | 0x00000001) +#define MD_CONTEXT_PPC_FLOATING_POINT (MD_CONTEXT_PPC | 0x00000008) +#define MD_CONTEXT_PPC_VECTOR (MD_CONTEXT_PPC | 0x00000020) + +#define MD_CONTEXT_PPC_FULL MD_CONTEXT_PPC_BASE +#define MD_CONTEXT_PPC_ALL (MD_CONTEXT_PPC_FULL | \ + MD_CONTEXT_PPC_FLOATING_POINT | \ + MD_CONTEXT_PPC_VECTOR) + +#endif /* GOOGLE_BREAKPAD_COMMON_MINIDUMP_CPU_PPC_H__ */ diff --git a/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_cpu_ppc64.h b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_cpu_ppc64.h new file mode 100644 index 0000000000..3a883230a6 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_cpu_ppc64.h @@ -0,0 +1,129 @@ +/* Copyright (c) 2008, Google Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following disclaimer + * in the documentation and/or other materials provided with the + * distribution. + * * Neither the name of Google Inc. nor the names of its + * contributors may be used to endorse or promote products derived from + * this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ + +/* minidump_format.h: A cross-platform reimplementation of minidump-related + * portions of DbgHelp.h from the Windows Platform SDK. + * + * (This is C99 source, please don't corrupt it with C++.) + * + * This file contains the necessary definitions to read minidump files + * produced on ppc64. These files may be read on any platform provided + * that the alignments of these structures on the processing system are + * identical to the alignments of these structures on the producing system. + * For this reason, precise-sized types are used. The structures defined + * by this file have been laid out to minimize alignment problems by ensuring + * ensuring that all members are aligned on their natural boundaries. In + * In some cases, tail-padding may be significant when different ABIs specify + * different tail-padding behaviors. To avoid problems when reading or + * writing affected structures, MD_*_SIZE macros are provided where needed, + * containing the useful size of the structures without padding. + * + * Structures that are defined by Microsoft to contain a zero-length array + * are instead defined here to contain an array with one element, as + * zero-length arrays are forbidden by standard C and C++. In these cases, + * *_minsize constants are provided to be used in place of sizeof. For a + * cleaner interface to these sizes when using C++, see minidump_size.h. + * + * These structures are also sufficient to populate minidump files. + * + * These definitions may be extended to support handling minidump files + * for other CPUs and other operating systems. + * + * Because precise data type sizes are crucial for this implementation to + * function properly and portably in terms of interoperability with minidumps + * produced by DbgHelp on Windows, a set of primitive types with known sizes + * are used as the basis of each structure defined by this file. DbgHelp + * on Windows is assumed to be the reference implementation; this file + * seeks to provide a cross-platform compatible implementation. To avoid + * collisions with the types and values defined and used by DbgHelp in the + * event that this implementation is used on Windows, each type and value + * defined here is given a new name, beginning with "MD". Names of the + * equivalent types and values in the Windows Platform SDK are given in + * comments. + * + * Author: Neal Sidhwaney */ + + +/* + * Breakpad minidump extension for PPC64 support. Based on Darwin/Mac OS X' + * mach/ppc/_types.h + */ + +#ifndef GOOGLE_BREAKPAD_COMMON_MINIDUMP_CPU_PPC64_H__ +#define GOOGLE_BREAKPAD_COMMON_MINIDUMP_CPU_PPC64_H__ + +#include "minidump_cpu_ppc.h" + +// these types are the same in ppc64 & ppc +typedef MDFloatingSaveAreaPPC MDFloatingSaveAreaPPC64; +typedef MDVectorSaveAreaPPC MDVectorSaveAreaPPC64; + +#define MD_CONTEXT_PPC64_GPR_COUNT MD_CONTEXT_PPC_GPR_COUNT + +typedef struct { + /* context_flags is not present in ppc_thread_state, but it aids + * identification of MDRawContextPPC among other raw context types, + * and it guarantees alignment when we get to float_save. */ + uint64_t context_flags; + + uint64_t srr0; /* Machine status save/restore: stores pc + * (instruction) */ + uint64_t srr1; /* Machine status save/restore: stores msr + * (ps, program/machine state) */ + /* ppc_thread_state contains 32 fields, r0 .. r31. Here, an array is + * used for brevity. */ + uint64_t gpr[MD_CONTEXT_PPC64_GPR_COUNT]; + uint64_t cr; /* Condition */ + uint64_t xer; /* Integer (fiXed-point) exception */ + uint64_t lr; /* Link */ + uint64_t ctr; /* Count */ + uint64_t vrsave; /* Vector save */ + + /* float_save and vector_save aren't present in ppc_thread_state, but + * are represented in separate structures that still define a thread's + * context. */ + MDFloatingSaveAreaPPC float_save; + MDVectorSaveAreaPPC vector_save; +} MDRawContextPPC64; /* Based on ppc_thread_state */ + +/* For (MDRawContextPPC).context_flags. These values indicate the type of + * context stored in the structure. MD_CONTEXT_PPC is Breakpad-defined. Its + * value was chosen to avoid likely conflicts with MD_CONTEXT_* for other + * CPUs. */ +#define MD_CONTEXT_PPC64 0x01000000 +#define MD_CONTEXT_PPC64_BASE (MD_CONTEXT_PPC64 | 0x00000001) +#define MD_CONTEXT_PPC64_FLOATING_POINT (MD_CONTEXT_PPC64 | 0x00000008) +#define MD_CONTEXT_PPC64_VECTOR (MD_CONTEXT_PPC64 | 0x00000020) + +#define MD_CONTEXT_PPC64_FULL MD_CONTEXT_PPC64_BASE +#define MD_CONTEXT_PPC64_ALL (MD_CONTEXT_PPC64_FULL | \ + MD_CONTEXT_PPC64_FLOATING_POINT | \ + MD_CONTEXT_PPC64_VECTOR) + +#endif /* GOOGLE_BREAKPAD_COMMON_MINIDUMP_CPU_PPC64_H__ */ diff --git a/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_cpu_sparc.h b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_cpu_sparc.h new file mode 100644 index 0000000000..ddc4c7590d --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_cpu_sparc.h @@ -0,0 +1,158 @@ +/* Copyright (c) 2006, Google Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following disclaimer + * in the documentation and/or other materials provided with the + * distribution. + * * Neither the name of Google Inc. nor the names of its + * contributors may be used to endorse or promote products derived from + * this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ + +/* minidump_format.h: A cross-platform reimplementation of minidump-related + * portions of DbgHelp.h from the Windows Platform SDK. + * + * (This is C99 source, please don't corrupt it with C++.) + * + * This file contains the necessary definitions to read minidump files + * produced on sparc. These files may be read on any platform provided + * that the alignments of these structures on the processing system are + * identical to the alignments of these structures on the producing system. + * For this reason, precise-sized types are used. The structures defined + * by this file have been laid out to minimize alignment problems by ensuring + * ensuring that all members are aligned on their natural boundaries. In + * In some cases, tail-padding may be significant when different ABIs specify + * different tail-padding behaviors. To avoid problems when reading or + * writing affected structures, MD_*_SIZE macros are provided where needed, + * containing the useful size of the structures without padding. + * + * Structures that are defined by Microsoft to contain a zero-length array + * are instead defined here to contain an array with one element, as + * zero-length arrays are forbidden by standard C and C++. In these cases, + * *_minsize constants are provided to be used in place of sizeof. For a + * cleaner interface to these sizes when using C++, see minidump_size.h. + * + * These structures are also sufficient to populate minidump files. + * + * These definitions may be extended to support handling minidump files + * for other CPUs and other operating systems. + * + * Because precise data type sizes are crucial for this implementation to + * function properly and portably in terms of interoperability with minidumps + * produced by DbgHelp on Windows, a set of primitive types with known sizes + * are used as the basis of each structure defined by this file. DbgHelp + * on Windows is assumed to be the reference implementation; this file + * seeks to provide a cross-platform compatible implementation. To avoid + * collisions with the types and values defined and used by DbgHelp in the + * event that this implementation is used on Windows, each type and value + * defined here is given a new name, beginning with "MD". Names of the + * equivalent types and values in the Windows Platform SDK are given in + * comments. + * + * Author: Mark Mentovai + * Change to split into its own file: Neal Sidhwaney */ + +/* + * SPARC support, see (solaris)sys/procfs_isa.h also + */ + +#ifndef GOOGLE_BREAKPAD_COMMON_MINIDUMP_CPU_SPARC_H__ +#define GOOGLE_BREAKPAD_COMMON_MINIDUMP_CPU_SPARC_H__ + +#define MD_FLOATINGSAVEAREA_SPARC_FPR_COUNT 32 + +typedef struct { + + /* FPU floating point regs */ + uint64_t regs[MD_FLOATINGSAVEAREA_SPARC_FPR_COUNT]; + + uint64_t filler; + uint64_t fsr; /* FPU status register */ +} MDFloatingSaveAreaSPARC; /* FLOATING_SAVE_AREA */ + +#define MD_CONTEXT_SPARC_GPR_COUNT 32 + +typedef struct { + /* The next field determines the layout of the structure, and which parts + * of it are populated + */ + uint32_t context_flags; + uint32_t flag_pad; + /* + * General register access (SPARC). + * Don't confuse definitions here with definitions in . + * Registers are 32 bits for ILP32, 64 bits for LP64. + * SPARC V7/V8 is for 32bit, SPARC V9 is for 64bit + */ + + /* 32 Integer working registers */ + + /* g_r[0-7] global registers(g0-g7) + * g_r[8-15] out registers(o0-o7) + * g_r[16-23] local registers(l0-l7) + * g_r[24-31] in registers(i0-i7) + */ + uint64_t g_r[MD_CONTEXT_SPARC_GPR_COUNT]; + + /* several control registers */ + + /* Processor State register(PSR) for SPARC V7/V8 + * Condition Code register (CCR) for SPARC V9 + */ + uint64_t ccr; + + uint64_t pc; /* Program Counter register (PC) */ + uint64_t npc; /* Next Program Counter register (nPC) */ + uint64_t y; /* Y register (Y) */ + + /* Address Space Identifier register (ASI) for SPARC V9 + * WIM for SPARC V7/V8 + */ + uint64_t asi; + + /* Floating-Point Registers State register (FPRS) for SPARC V9 + * TBR for for SPARC V7/V8 + */ + uint64_t fprs; + + /* The next field is included with MD_CONTEXT_SPARC_FLOATING_POINT */ + MDFloatingSaveAreaSPARC float_save; + +} MDRawContextSPARC; /* CONTEXT_SPARC */ + +/* For (MDRawContextSPARC).context_flags. These values indicate the type of + * context stored in the structure. MD_CONTEXT_SPARC is Breakpad-defined. Its + * value was chosen to avoid likely conflicts with MD_CONTEXT_* for other + * CPUs. */ +#define MD_CONTEXT_SPARC 0x10000000 +#define MD_CONTEXT_SPARC_CONTROL (MD_CONTEXT_SPARC | 0x00000001) +#define MD_CONTEXT_SPARC_INTEGER (MD_CONTEXT_SPARC | 0x00000002) +#define MD_CONTEXT_SAPARC_FLOATING_POINT (MD_CONTEXT_SPARC | 0x00000004) +#define MD_CONTEXT_SAPARC_EXTRA (MD_CONTEXT_SPARC | 0x00000008) + +#define MD_CONTEXT_SPARC_FULL (MD_CONTEXT_SPARC_CONTROL | \ + MD_CONTEXT_SPARC_INTEGER) + +#define MD_CONTEXT_SPARC_ALL (MD_CONTEXT_SPARC_FULL | \ + MD_CONTEXT_SAPARC_FLOATING_POINT | \ + MD_CONTEXT_SAPARC_EXTRA) + +#endif /* GOOGLE_BREAKPAD_COMMON_MINIDUMP_CPU_SPARC_H__ */ diff --git a/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_cpu_x86.h b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_cpu_x86.h new file mode 100644 index 0000000000..e09cb7cb52 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_cpu_x86.h @@ -0,0 +1,174 @@ +/* Copyright (c) 2006, Google Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following disclaimer + * in the documentation and/or other materials provided with the + * distribution. + * * Neither the name of Google Inc. nor the names of its + * contributors may be used to endorse or promote products derived from + * this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ + +/* minidump_format.h: A cross-platform reimplementation of minidump-related + * portions of DbgHelp.h from the Windows Platform SDK. + * + * (This is C99 source, please don't corrupt it with C++.) + * + * This file contains the necessary definitions to read minidump files + * produced on x86. These files may be read on any platform provided + * that the alignments of these structures on the processing system are + * identical to the alignments of these structures on the producing system. + * For this reason, precise-sized types are used. The structures defined + * by this file have been laid out to minimize alignment problems by ensuring + * ensuring that all members are aligned on their natural boundaries. In + * In some cases, tail-padding may be significant when different ABIs specify + * different tail-padding behaviors. To avoid problems when reading or + * writing affected structures, MD_*_SIZE macros are provided where needed, + * containing the useful size of the structures without padding. + * + * Structures that are defined by Microsoft to contain a zero-length array + * are instead defined here to contain an array with one element, as + * zero-length arrays are forbidden by standard C and C++. In these cases, + * *_minsize constants are provided to be used in place of sizeof. For a + * cleaner interface to these sizes when using C++, see minidump_size.h. + * + * These structures are also sufficient to populate minidump files. + * + * These definitions may be extended to support handling minidump files + * for other CPUs and other operating systems. + * + * Because precise data type sizes are crucial for this implementation to + * function properly and portably in terms of interoperability with minidumps + * produced by DbgHelp on Windows, a set of primitive types with known sizes + * are used as the basis of each structure defined by this file. DbgHelp + * on Windows is assumed to be the reference implementation; this file + * seeks to provide a cross-platform compatible implementation. To avoid + * collisions with the types and values defined and used by DbgHelp in the + * event that this implementation is used on Windows, each type and value + * defined here is given a new name, beginning with "MD". Names of the + * equivalent types and values in the Windows Platform SDK are given in + * comments. + * + * Author: Mark Mentovai */ + +#ifndef GOOGLE_BREAKPAD_COMMON_MINIDUMP_CPU_X86_H__ +#define GOOGLE_BREAKPAD_COMMON_MINIDUMP_CPU_X86_H__ + +#define MD_FLOATINGSAVEAREA_X86_REGISTERAREA_SIZE 80 + /* SIZE_OF_80387_REGISTERS */ + +typedef struct { + uint32_t control_word; + uint32_t status_word; + uint32_t tag_word; + uint32_t error_offset; + uint32_t error_selector; + uint32_t data_offset; + uint32_t data_selector; + + /* register_area contains eight 80-bit (x87 "long double") quantities for + * floating-point registers %st0 (%mm0) through %st7 (%mm7). */ + uint8_t register_area[MD_FLOATINGSAVEAREA_X86_REGISTERAREA_SIZE]; + uint32_t cr0_npx_state; +} MDFloatingSaveAreaX86; /* FLOATING_SAVE_AREA */ + + +#define MD_CONTEXT_X86_EXTENDED_REGISTERS_SIZE 512 + /* MAXIMUM_SUPPORTED_EXTENSION */ + +typedef struct { + /* The next field determines the layout of the structure, and which parts + * of it are populated */ + uint32_t context_flags; + + /* The next 6 registers are included with MD_CONTEXT_X86_DEBUG_REGISTERS */ + uint32_t dr0; + uint32_t dr1; + uint32_t dr2; + uint32_t dr3; + uint32_t dr6; + uint32_t dr7; + + /* The next field is included with MD_CONTEXT_X86_FLOATING_POINT */ + MDFloatingSaveAreaX86 float_save; + + /* The next 4 registers are included with MD_CONTEXT_X86_SEGMENTS */ + uint32_t gs; + uint32_t fs; + uint32_t es; + uint32_t ds; + /* The next 6 registers are included with MD_CONTEXT_X86_INTEGER */ + uint32_t edi; + uint32_t esi; + uint32_t ebx; + uint32_t edx; + uint32_t ecx; + uint32_t eax; + + /* The next 6 registers are included with MD_CONTEXT_X86_CONTROL */ + uint32_t ebp; + uint32_t eip; + uint32_t cs; /* WinNT.h says "must be sanitized" */ + uint32_t eflags; /* WinNT.h says "must be sanitized" */ + uint32_t esp; + uint32_t ss; + + /* The next field is included with MD_CONTEXT_X86_EXTENDED_REGISTERS. + * It contains vector (MMX/SSE) registers. It it laid out in the + * format used by the fxsave and fsrstor instructions, so it includes + * a copy of the x87 floating-point registers as well. See FXSAVE in + * "Intel Architecture Software Developer's Manual, Volume 2." */ + uint8_t extended_registers[ + MD_CONTEXT_X86_EXTENDED_REGISTERS_SIZE]; +} MDRawContextX86; /* CONTEXT */ + +/* For (MDRawContextX86).context_flags. These values indicate the type of + * context stored in the structure. The high 24 bits identify the CPU, the + * low 8 bits identify the type of context saved. */ +#define MD_CONTEXT_X86 0x00010000 + /* CONTEXT_i386, CONTEXT_i486: identifies CPU */ +#define MD_CONTEXT_X86_CONTROL (MD_CONTEXT_X86 | 0x00000001) + /* CONTEXT_CONTROL */ +#define MD_CONTEXT_X86_INTEGER (MD_CONTEXT_X86 | 0x00000002) + /* CONTEXT_INTEGER */ +#define MD_CONTEXT_X86_SEGMENTS (MD_CONTEXT_X86 | 0x00000004) + /* CONTEXT_SEGMENTS */ +#define MD_CONTEXT_X86_FLOATING_POINT (MD_CONTEXT_X86 | 0x00000008) + /* CONTEXT_FLOATING_POINT */ +#define MD_CONTEXT_X86_DEBUG_REGISTERS (MD_CONTEXT_X86 | 0x00000010) + /* CONTEXT_DEBUG_REGISTERS */ +#define MD_CONTEXT_X86_EXTENDED_REGISTERS (MD_CONTEXT_X86 | 0x00000020) + /* CONTEXT_EXTENDED_REGISTERS */ +#define MD_CONTEXT_X86_XSTATE (MD_CONTEXT_X86 | 0x00000040) + /* CONTEXT_XSTATE */ + +#define MD_CONTEXT_X86_FULL (MD_CONTEXT_X86_CONTROL | \ + MD_CONTEXT_X86_INTEGER | \ + MD_CONTEXT_X86_SEGMENTS) + /* CONTEXT_FULL */ + +#define MD_CONTEXT_X86_ALL (MD_CONTEXT_X86_FULL | \ + MD_CONTEXT_X86_FLOATING_POINT | \ + MD_CONTEXT_X86_DEBUG_REGISTERS | \ + MD_CONTEXT_X86_EXTENDED_REGISTERS) + /* CONTEXT_ALL */ + +#endif /* GOOGLE_BREAKPAD_COMMON_MINIDUMP_CPU_X86_H__ */ diff --git a/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_exception_linux.h b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_exception_linux.h new file mode 100644 index 0000000000..9e7e4f1e12 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_exception_linux.h @@ -0,0 +1,87 @@ +/* Copyright (c) 2006, Google Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following disclaimer + * in the documentation and/or other materials provided with the + * distribution. + * * Neither the name of Google Inc. nor the names of its + * contributors may be used to endorse or promote products derived from + * this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ + +/* minidump_exception_linux.h: A definition of exception codes for + * Linux + * + * (This is C99 source, please don't corrupt it with C++.) + * + * Author: Mark Mentovai + * Split into its own file: Neal Sidhwaney */ + + +#ifndef GOOGLE_BREAKPAD_COMMON_MINIDUMP_EXCEPTION_LINUX_H__ +#define GOOGLE_BREAKPAD_COMMON_MINIDUMP_EXCEPTION_LINUX_H__ + +#include + +#include "google_breakpad/common/breakpad_types.h" + + +/* For (MDException).exception_code. These values come from bits/signum.h. + */ +typedef enum { + MD_EXCEPTION_CODE_LIN_SIGHUP = 1, /* Hangup (POSIX) */ + MD_EXCEPTION_CODE_LIN_SIGINT = 2, /* Interrupt (ANSI) */ + MD_EXCEPTION_CODE_LIN_SIGQUIT = 3, /* Quit (POSIX) */ + MD_EXCEPTION_CODE_LIN_SIGILL = 4, /* Illegal instruction (ANSI) */ + MD_EXCEPTION_CODE_LIN_SIGTRAP = 5, /* Trace trap (POSIX) */ + MD_EXCEPTION_CODE_LIN_SIGABRT = 6, /* Abort (ANSI) */ + MD_EXCEPTION_CODE_LIN_SIGBUS = 7, /* BUS error (4.2 BSD) */ + MD_EXCEPTION_CODE_LIN_SIGFPE = 8, /* Floating-point exception (ANSI) */ + MD_EXCEPTION_CODE_LIN_SIGKILL = 9, /* Kill, unblockable (POSIX) */ + MD_EXCEPTION_CODE_LIN_SIGUSR1 = 10, /* User-defined signal 1 (POSIX). */ + MD_EXCEPTION_CODE_LIN_SIGSEGV = 11, /* Segmentation violation (ANSI) */ + MD_EXCEPTION_CODE_LIN_SIGUSR2 = 12, /* User-defined signal 2 (POSIX) */ + MD_EXCEPTION_CODE_LIN_SIGPIPE = 13, /* Broken pipe (POSIX) */ + MD_EXCEPTION_CODE_LIN_SIGALRM = 14, /* Alarm clock (POSIX) */ + MD_EXCEPTION_CODE_LIN_SIGTERM = 15, /* Termination (ANSI) */ + MD_EXCEPTION_CODE_LIN_SIGSTKFLT = 16, /* Stack faultd */ + MD_EXCEPTION_CODE_LIN_SIGCHLD = 17, /* Child status has changed (POSIX) */ + MD_EXCEPTION_CODE_LIN_SIGCONT = 18, /* Continue (POSIX) */ + MD_EXCEPTION_CODE_LIN_SIGSTOP = 19, /* Stop, unblockable (POSIX) */ + MD_EXCEPTION_CODE_LIN_SIGTSTP = 20, /* Keyboard stop (POSIX) */ + MD_EXCEPTION_CODE_LIN_SIGTTIN = 21, /* Background read from tty (POSIX) */ + MD_EXCEPTION_CODE_LIN_SIGTTOU = 22, /* Background write to tty (POSIX) */ + MD_EXCEPTION_CODE_LIN_SIGURG = 23, + /* Urgent condition on socket (4.2 BSD) */ + MD_EXCEPTION_CODE_LIN_SIGXCPU = 24, /* CPU limit exceeded (4.2 BSD) */ + MD_EXCEPTION_CODE_LIN_SIGXFSZ = 25, + /* File size limit exceeded (4.2 BSD) */ + MD_EXCEPTION_CODE_LIN_SIGVTALRM = 26, /* Virtual alarm clock (4.2 BSD) */ + MD_EXCEPTION_CODE_LIN_SIGPROF = 27, /* Profiling alarm clock (4.2 BSD) */ + MD_EXCEPTION_CODE_LIN_SIGWINCH = 28, /* Window size change (4.3 BSD, Sun) */ + MD_EXCEPTION_CODE_LIN_SIGIO = 29, /* I/O now possible (4.2 BSD) */ + MD_EXCEPTION_CODE_LIN_SIGPWR = 30, /* Power failure restart (System V) */ + MD_EXCEPTION_CODE_LIN_SIGSYS = 31, /* Bad system call */ + MD_EXCEPTION_CODE_LIN_DUMP_REQUESTED = 0xFFFFFFFF /* No exception, + dump requested. */ +} MDExceptionCodeLinux; + +#endif /* GOOGLE_BREAKPAD_COMMON_MINIDUMP_EXCEPTION_LINUX_H__ */ diff --git a/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_exception_mac.h b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_exception_mac.h new file mode 100644 index 0000000000..91c1c09746 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_exception_mac.h @@ -0,0 +1,205 @@ +/* Copyright (c) 2006, Google Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following disclaimer + * in the documentation and/or other materials provided with the + * distribution. + * * Neither the name of Google Inc. nor the names of its + * contributors may be used to endorse or promote products derived from + * this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ + +/* minidump_exception_mac.h: A definition of exception codes for Mac + * OS X + * + * (This is C99 source, please don't corrupt it with C++.) + * + * Author: Mark Mentovai + * Split into its own file: Neal Sidhwaney */ + + +#ifndef GOOGLE_BREAKPAD_COMMON_MINIDUMP_EXCEPTION_MAC_H__ +#define GOOGLE_BREAKPAD_COMMON_MINIDUMP_EXCEPTION_MAC_H__ + +#include + +#include "google_breakpad/common/breakpad_types.h" + +/* For (MDException).exception_code. Breakpad minidump extension for Mac OS X + * support. Based on Darwin/Mac OS X' mach/exception_types.h. This is + * what Mac OS X calls an "exception", not a "code". */ +typedef enum { + /* Exception code. The high 16 bits of exception_code contains one of + * these values. */ + MD_EXCEPTION_MAC_BAD_ACCESS = 1, /* code can be a kern_return_t */ + /* EXC_BAD_ACCESS */ + MD_EXCEPTION_MAC_BAD_INSTRUCTION = 2, /* code is CPU-specific */ + /* EXC_BAD_INSTRUCTION */ + MD_EXCEPTION_MAC_ARITHMETIC = 3, /* code is CPU-specific */ + /* EXC_ARITHMETIC */ + MD_EXCEPTION_MAC_EMULATION = 4, /* code is CPU-specific */ + /* EXC_EMULATION */ + MD_EXCEPTION_MAC_SOFTWARE = 5, + /* EXC_SOFTWARE */ + MD_EXCEPTION_MAC_BREAKPOINT = 6, /* code is CPU-specific */ + /* EXC_BREAKPOINT */ + MD_EXCEPTION_MAC_SYSCALL = 7, + /* EXC_SYSCALL */ + MD_EXCEPTION_MAC_MACH_SYSCALL = 8, + /* EXC_MACH_SYSCALL */ + MD_EXCEPTION_MAC_RPC_ALERT = 9 + /* EXC_RPC_ALERT */ +} MDExceptionMac; + +/* For (MDException).exception_flags. Breakpad minidump extension for Mac OS X + * support. Based on Darwin/Mac OS X' mach/ppc/exception.h and + * mach/i386/exception.h. This is what Mac OS X calls a "code". */ +typedef enum { + /* With MD_EXCEPTION_BAD_ACCESS. These are relevant kern_return_t values + * from mach/kern_return.h. */ + MD_EXCEPTION_CODE_MAC_INVALID_ADDRESS = 1, + /* KERN_INVALID_ADDRESS */ + MD_EXCEPTION_CODE_MAC_PROTECTION_FAILURE = 2, + /* KERN_PROTECTION_FAILURE */ + MD_EXCEPTION_CODE_MAC_NO_ACCESS = 8, + /* KERN_NO_ACCESS */ + MD_EXCEPTION_CODE_MAC_MEMORY_FAILURE = 9, + /* KERN_MEMORY_FAILURE */ + MD_EXCEPTION_CODE_MAC_MEMORY_ERROR = 10, + /* KERN_MEMORY_ERROR */ + + /* With MD_EXCEPTION_SOFTWARE */ + MD_EXCEPTION_CODE_MAC_BAD_SYSCALL = 0x00010000, /* Mach SIGSYS */ + MD_EXCEPTION_CODE_MAC_BAD_PIPE = 0x00010001, /* Mach SIGPIPE */ + MD_EXCEPTION_CODE_MAC_ABORT = 0x00010002, /* Mach SIGABRT */ + /* Custom values */ + MD_EXCEPTION_CODE_MAC_NS_EXCEPTION = 0xDEADC0DE, /* uncaught NSException */ + + /* With MD_EXCEPTION_MAC_BAD_ACCESS on arm */ + MD_EXCEPTION_CODE_MAC_ARM_DA_ALIGN = 0x0101, /* EXC_ARM_DA_ALIGN */ + MD_EXCEPTION_CODE_MAC_ARM_DA_DEBUG = 0x0102, /* EXC_ARM_DA_DEBUG */ + + /* With MD_EXCEPTION_MAC_BAD_INSTRUCTION on arm */ + MD_EXCEPTION_CODE_MAC_ARM_UNDEFINED = 1, /* EXC_ARM_UNDEFINED */ + + /* With MD_EXCEPTION_MAC_BREAKPOINT on arm */ + MD_EXCEPTION_CODE_MAC_ARM_BREAKPOINT = 1, /* EXC_ARM_BREAKPOINT */ + + /* With MD_EXCEPTION_MAC_BAD_ACCESS on ppc */ + MD_EXCEPTION_CODE_MAC_PPC_VM_PROT_READ = 0x0101, + /* EXC_PPC_VM_PROT_READ */ + MD_EXCEPTION_CODE_MAC_PPC_BADSPACE = 0x0102, + /* EXC_PPC_BADSPACE */ + MD_EXCEPTION_CODE_MAC_PPC_UNALIGNED = 0x0103, + /* EXC_PPC_UNALIGNED */ + + /* With MD_EXCEPTION_MAC_BAD_INSTRUCTION on ppc */ + MD_EXCEPTION_CODE_MAC_PPC_INVALID_SYSCALL = 1, + /* EXC_PPC_INVALID_SYSCALL */ + MD_EXCEPTION_CODE_MAC_PPC_UNIMPLEMENTED_INSTRUCTION = 2, + /* EXC_PPC_UNIPL_INST */ + MD_EXCEPTION_CODE_MAC_PPC_PRIVILEGED_INSTRUCTION = 3, + /* EXC_PPC_PRIVINST */ + MD_EXCEPTION_CODE_MAC_PPC_PRIVILEGED_REGISTER = 4, + /* EXC_PPC_PRIVREG */ + MD_EXCEPTION_CODE_MAC_PPC_TRACE = 5, + /* EXC_PPC_TRACE */ + MD_EXCEPTION_CODE_MAC_PPC_PERFORMANCE_MONITOR = 6, + /* EXC_PPC_PERFMON */ + + /* With MD_EXCEPTION_MAC_ARITHMETIC on ppc */ + MD_EXCEPTION_CODE_MAC_PPC_OVERFLOW = 1, + /* EXC_PPC_OVERFLOW */ + MD_EXCEPTION_CODE_MAC_PPC_ZERO_DIVIDE = 2, + /* EXC_PPC_ZERO_DIVIDE */ + MD_EXCEPTION_CODE_MAC_PPC_FLOAT_INEXACT = 3, + /* EXC_FLT_INEXACT */ + MD_EXCEPTION_CODE_MAC_PPC_FLOAT_ZERO_DIVIDE = 4, + /* EXC_PPC_FLT_ZERO_DIVIDE */ + MD_EXCEPTION_CODE_MAC_PPC_FLOAT_UNDERFLOW = 5, + /* EXC_PPC_FLT_UNDERFLOW */ + MD_EXCEPTION_CODE_MAC_PPC_FLOAT_OVERFLOW = 6, + /* EXC_PPC_FLT_OVERFLOW */ + MD_EXCEPTION_CODE_MAC_PPC_FLOAT_NOT_A_NUMBER = 7, + /* EXC_PPC_FLT_NOT_A_NUMBER */ + + /* With MD_EXCEPTION_MAC_EMULATION on ppc */ + MD_EXCEPTION_CODE_MAC_PPC_NO_EMULATION = 8, + /* EXC_PPC_NOEMULATION */ + MD_EXCEPTION_CODE_MAC_PPC_ALTIVEC_ASSIST = 9, + /* EXC_PPC_ALTIVECASSIST */ + + /* With MD_EXCEPTION_MAC_SOFTWARE on ppc */ + MD_EXCEPTION_CODE_MAC_PPC_TRAP = 0x00000001, /* EXC_PPC_TRAP */ + MD_EXCEPTION_CODE_MAC_PPC_MIGRATE = 0x00010100, /* EXC_PPC_MIGRATE */ + + /* With MD_EXCEPTION_MAC_BREAKPOINT on ppc */ + MD_EXCEPTION_CODE_MAC_PPC_BREAKPOINT = 1, /* EXC_PPC_BREAKPOINT */ + + /* With MD_EXCEPTION_MAC_BAD_INSTRUCTION on x86, see also x86 interrupt + * values below. */ + MD_EXCEPTION_CODE_MAC_X86_INVALID_OPERATION = 1, /* EXC_I386_INVOP */ + + /* With MD_EXCEPTION_MAC_ARITHMETIC on x86 */ + MD_EXCEPTION_CODE_MAC_X86_DIV = 1, /* EXC_I386_DIV */ + MD_EXCEPTION_CODE_MAC_X86_INTO = 2, /* EXC_I386_INTO */ + MD_EXCEPTION_CODE_MAC_X86_NOEXT = 3, /* EXC_I386_NOEXT */ + MD_EXCEPTION_CODE_MAC_X86_EXTOVR = 4, /* EXC_I386_EXTOVR */ + MD_EXCEPTION_CODE_MAC_X86_EXTERR = 5, /* EXC_I386_EXTERR */ + MD_EXCEPTION_CODE_MAC_X86_EMERR = 6, /* EXC_I386_EMERR */ + MD_EXCEPTION_CODE_MAC_X86_BOUND = 7, /* EXC_I386_BOUND */ + MD_EXCEPTION_CODE_MAC_X86_SSEEXTERR = 8, /* EXC_I386_SSEEXTERR */ + + /* With MD_EXCEPTION_MAC_BREAKPOINT on x86 */ + MD_EXCEPTION_CODE_MAC_X86_SGL = 1, /* EXC_I386_SGL */ + MD_EXCEPTION_CODE_MAC_X86_BPT = 2, /* EXC_I386_BPT */ + + /* With MD_EXCEPTION_MAC_BAD_INSTRUCTION on x86. These are the raw + * x86 interrupt codes. Most of these are mapped to other Mach + * exceptions and codes, are handled, or should not occur in user space. + * A few of these will do occur with MD_EXCEPTION_MAC_BAD_INSTRUCTION. */ + /* EXC_I386_DIVERR = 0: mapped to EXC_ARITHMETIC/EXC_I386_DIV */ + /* EXC_I386_SGLSTP = 1: mapped to EXC_BREAKPOINT/EXC_I386_SGL */ + /* EXC_I386_NMIFLT = 2: should not occur in user space */ + /* EXC_I386_BPTFLT = 3: mapped to EXC_BREAKPOINT/EXC_I386_BPT */ + /* EXC_I386_INTOFLT = 4: mapped to EXC_ARITHMETIC/EXC_I386_INTO */ + /* EXC_I386_BOUNDFLT = 5: mapped to EXC_ARITHMETIC/EXC_I386_BOUND */ + /* EXC_I386_INVOPFLT = 6: mapped to EXC_BAD_INSTRUCTION/EXC_I386_INVOP */ + /* EXC_I386_NOEXTFLT = 7: should be handled by the kernel */ + /* EXC_I386_DBLFLT = 8: should be handled (if possible) by the kernel */ + /* EXC_I386_EXTOVRFLT = 9: mapped to EXC_BAD_ACCESS/(PROT_READ|PROT_EXEC) */ + MD_EXCEPTION_CODE_MAC_X86_INVALID_TASK_STATE_SEGMENT = 10, + /* EXC_INVTSSFLT */ + MD_EXCEPTION_CODE_MAC_X86_SEGMENT_NOT_PRESENT = 11, + /* EXC_SEGNPFLT */ + MD_EXCEPTION_CODE_MAC_X86_STACK_FAULT = 12, + /* EXC_STKFLT */ + MD_EXCEPTION_CODE_MAC_X86_GENERAL_PROTECTION_FAULT = 13, + /* EXC_GPFLT */ + /* EXC_I386_PGFLT = 14: should not occur in user space */ + /* EXC_I386_EXTERRFLT = 16: mapped to EXC_ARITHMETIC/EXC_I386_EXTERR */ + MD_EXCEPTION_CODE_MAC_X86_ALIGNMENT_FAULT = 17 + /* EXC_ALIGNFLT (for vector operations) */ + /* EXC_I386_ENOEXTFLT = 32: should be handled by the kernel */ + /* EXC_I386_ENDPERR = 33: should not occur */ +} MDExceptionCodeMac; + +#endif /* GOOGLE_BREAKPAD_COMMON_MINIDUMP_EXCEPTION_MAC_OSX_H__ */ diff --git a/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_exception_ps3.h b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_exception_ps3.h new file mode 100644 index 0000000000..adff5a6bbc --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_exception_ps3.h @@ -0,0 +1,67 @@ +/* Copyright (c) 2013, Google Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following disclaimer + * in the documentation and/or other materials provided with the + * distribution. + * * Neither the name of Google Inc. nor the names of its + * contributors may be used to endorse or promote products derived from + * this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ + +/* minidump_exception_ps3.h: A definition of exception codes for + * PS3 */ + + +#ifndef GOOGLE_BREAKPAD_COMMON_MINIDUMP_EXCEPTION_PS3_H__ +#define GOOGLE_BREAKPAD_COMMON_MINIDUMP_EXCEPTION_PS3_H__ + +#include + +#include "google_breakpad/common/breakpad_types.h" + +typedef enum { + MD_EXCEPTION_CODE_PS3_UNKNOWN = 0, + MD_EXCEPTION_CODE_PS3_TRAP_EXCEP = 1, + MD_EXCEPTION_CODE_PS3_PRIV_INSTR = 2, + MD_EXCEPTION_CODE_PS3_ILLEGAL_INSTR = 3, + MD_EXCEPTION_CODE_PS3_INSTR_STORAGE = 4, + MD_EXCEPTION_CODE_PS3_INSTR_SEGMENT = 5, + MD_EXCEPTION_CODE_PS3_DATA_STORAGE = 6, + MD_EXCEPTION_CODE_PS3_DATA_SEGMENT = 7, + MD_EXCEPTION_CODE_PS3_FLOAT_POINT = 8, + MD_EXCEPTION_CODE_PS3_DABR_MATCH = 9, + MD_EXCEPTION_CODE_PS3_ALIGN_EXCEP = 10, + MD_EXCEPTION_CODE_PS3_MEMORY_ACCESS = 11, + MD_EXCEPTION_CODE_PS3_COPRO_ALIGN = 12, + MD_EXCEPTION_CODE_PS3_COPRO_INVALID_COM = 13, + MD_EXCEPTION_CODE_PS3_COPRO_ERR = 14, + MD_EXCEPTION_CODE_PS3_COPRO_FIR = 15, + MD_EXCEPTION_CODE_PS3_COPRO_DATA_SEGMENT = 16, + MD_EXCEPTION_CODE_PS3_COPRO_DATA_STORAGE = 17, + MD_EXCEPTION_CODE_PS3_COPRO_STOP_INSTR = 18, + MD_EXCEPTION_CODE_PS3_COPRO_HALT_INSTR = 19, + MD_EXCEPTION_CODE_PS3_COPRO_HALTINST_UNKNOWN = 20, + MD_EXCEPTION_CODE_PS3_COPRO_MEMORY_ACCESS = 21, + MD_EXCEPTION_CODE_PS3_GRAPHIC = 22 +} MDExceptionCodePS3; + +#endif /* GOOGLE_BREAKPAD_COMMON_MINIDUMP_EXCEPTION_PS3_H__ */ diff --git a/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_exception_solaris.h b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_exception_solaris.h new file mode 100644 index 0000000000..f18ddf4247 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_exception_solaris.h @@ -0,0 +1,94 @@ +/* Copyright (c) 2006, Google Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following disclaimer + * in the documentation and/or other materials provided with the + * distribution. + * * Neither the name of Google Inc. nor the names of its + * contributors may be used to endorse or promote products derived from + * this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ + +/* minidump_exception_solaris.h: A definition of exception codes for + * Solaris + * + * (This is C99 source, please don't corrupt it with C++.) + * + * Author: Mark Mentovai + * Split into its own file: Neal Sidhwaney */ + + +#ifndef GOOGLE_BREAKPAD_COMMON_MINIDUMP_EXCEPTION_SOLARIS_H__ +#define GOOGLE_BREAKPAD_COMMON_MINIDUMP_EXCEPTION_SOLARIS_H__ + +#include + +#include "google_breakpad/common/breakpad_types.h" + +/* For (MDException).exception_code. These values come from sys/iso/signal_iso.h + */ +typedef enum { + MD_EXCEPTION_CODE_SOL_SIGHUP = 1, /* Hangup */ + MD_EXCEPTION_CODE_SOL_SIGINT = 2, /* interrupt (rubout) */ + MD_EXCEPTION_CODE_SOL_SIGQUIT = 3, /* quit (ASCII FS) */ + MD_EXCEPTION_CODE_SOL_SIGILL = 4, /* illegal instruction (not reset when caught) */ + MD_EXCEPTION_CODE_SOL_SIGTRAP = 5, /* trace trap (not reset when caught) */ + MD_EXCEPTION_CODE_SOL_SIGIOT = 6, /* IOT instruction */ + MD_EXCEPTION_CODE_SOL_SIGABRT = 6, /* used by abort, replace SIGIOT in the future */ + MD_EXCEPTION_CODE_SOL_SIGEMT = 7, /* EMT instruction */ + MD_EXCEPTION_CODE_SOL_SIGFPE = 8, /* floating point exception */ + MD_EXCEPTION_CODE_SOL_SIGKILL = 9, /* kill (cannot be caught or ignored) */ + MD_EXCEPTION_CODE_SOL_SIGBUS = 10, /* bus error */ + MD_EXCEPTION_CODE_SOL_SIGSEGV = 11, /* segmentation violation */ + MD_EXCEPTION_CODE_SOL_SIGSYS = 12, /* bad argument to system call */ + MD_EXCEPTION_CODE_SOL_SIGPIPE = 13, /* write on a pipe with no one to read it */ + MD_EXCEPTION_CODE_SOL_SIGALRM = 14, /* alarm clock */ + MD_EXCEPTION_CODE_SOL_SIGTERM = 15, /* software termination signal from kill */ + MD_EXCEPTION_CODE_SOL_SIGUSR1 = 16, /* user defined signal 1 */ + MD_EXCEPTION_CODE_SOL_SIGUSR2 = 17, /* user defined signal 2 */ + MD_EXCEPTION_CODE_SOL_SIGCLD = 18, /* child status change */ + MD_EXCEPTION_CODE_SOL_SIGCHLD = 18, /* child status change alias (POSIX) */ + MD_EXCEPTION_CODE_SOL_SIGPWR = 19, /* power-fail restart */ + MD_EXCEPTION_CODE_SOL_SIGWINCH = 20, /* window size change */ + MD_EXCEPTION_CODE_SOL_SIGURG = 21, /* urgent socket condition */ + MD_EXCEPTION_CODE_SOL_SIGPOLL = 22, /* pollable event occurred */ + MD_EXCEPTION_CODE_SOL_SIGIO = 22, /* socket I/O possible (SIGPOLL alias) */ + MD_EXCEPTION_CODE_SOL_SIGSTOP = 23, /* stop (cannot be caught or ignored) */ + MD_EXCEPTION_CODE_SOL_SIGTSTP = 24, /* user stop requested from tty */ + MD_EXCEPTION_CODE_SOL_SIGCONT = 25, /* stopped process has been continued */ + MD_EXCEPTION_CODE_SOL_SIGTTIN = 26, /* background tty read attempted */ + MD_EXCEPTION_CODE_SOL_SIGTTOU = 27, /* background tty write attempted */ + MD_EXCEPTION_CODE_SOL_SIGVTALRM = 28, /* virtual timer expired */ + MD_EXCEPTION_CODE_SOL_SIGPROF = 29, /* profiling timer expired */ + MD_EXCEPTION_CODE_SOL_SIGXCPU = 30, /* exceeded cpu limit */ + MD_EXCEPTION_CODE_SOL_SIGXFSZ = 31, /* exceeded file size limit */ + MD_EXCEPTION_CODE_SOL_SIGWAITING = 32, /* reserved signal no longer used by threading code */ + MD_EXCEPTION_CODE_SOL_SIGLWP = 33, /* reserved signal no longer used by threading code */ + MD_EXCEPTION_CODE_SOL_SIGFREEZE = 34, /* special signal used by CPR */ + MD_EXCEPTION_CODE_SOL_SIGTHAW = 35, /* special signal used by CPR */ + MD_EXCEPTION_CODE_SOL_SIGCANCEL = 36, /* reserved signal for thread cancellation */ + MD_EXCEPTION_CODE_SOL_SIGLOST = 37, /* resource lost (eg, record-lock lost) */ + MD_EXCEPTION_CODE_SOL_SIGXRES = 38, /* resource control exceeded */ + MD_EXCEPTION_CODE_SOL_SIGJVM1 = 39, /* reserved signal for Java Virtual Machine */ + MD_EXCEPTION_CODE_SOL_SIGJVM2 = 40 /* reserved signal for Java Virtual Machine */ +} MDExceptionCodeSolaris; + +#endif /* GOOGLE_BREAKPAD_COMMON_MINIDUMP_EXCEPTION_SOLARIS_H__ */ diff --git a/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_exception_win32.h b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_exception_win32.h new file mode 100644 index 0000000000..e4cd59edd5 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_exception_win32.h @@ -0,0 +1,2261 @@ +/* Copyright (c) 2006, Google Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following disclaimer + * in the documentation and/or other materials provided with the + * distribution. + * * Neither the name of Google Inc. nor the names of its + * contributors may be used to endorse or promote products derived from + * this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ + +/* minidump_exception_win32.h: Definitions of exception codes for + * Win32 platform + * + * (This is C99 source, please don't corrupt it with C++.) + * + * Author: Mark Mentovai + * Split into its own file: Neal Sidhwaney */ + + +#ifndef GOOGLE_BREAKPAD_COMMON_MINIDUMP_EXCEPTION_WIN32_H__ +#define GOOGLE_BREAKPAD_COMMON_MINIDUMP_EXCEPTION_WIN32_H__ + +#include + +#include "google_breakpad/common/breakpad_types.h" + + +/* For (MDException).exception_code. These values come from WinBase.h + * and WinNT.h (names beginning with EXCEPTION_ are in WinBase.h, + * they are STATUS_ in WinNT.h). */ +typedef enum { + MD_EXCEPTION_CODE_WIN_CONTROL_C = 0x40010005, + /* DBG_CONTROL_C */ + MD_EXCEPTION_CODE_WIN_GUARD_PAGE_VIOLATION = 0x80000001, + /* EXCEPTION_GUARD_PAGE */ + MD_EXCEPTION_CODE_WIN_DATATYPE_MISALIGNMENT = 0x80000002, + /* EXCEPTION_DATATYPE_MISALIGNMENT */ + MD_EXCEPTION_CODE_WIN_BREAKPOINT = 0x80000003, + /* EXCEPTION_BREAKPOINT */ + MD_EXCEPTION_CODE_WIN_SINGLE_STEP = 0x80000004, + /* EXCEPTION_SINGLE_STEP */ + MD_EXCEPTION_CODE_WIN_ACCESS_VIOLATION = 0xc0000005, + /* EXCEPTION_ACCESS_VIOLATION */ + MD_EXCEPTION_CODE_WIN_IN_PAGE_ERROR = 0xc0000006, + /* EXCEPTION_IN_PAGE_ERROR */ + MD_EXCEPTION_CODE_WIN_INVALID_HANDLE = 0xc0000008, + /* EXCEPTION_INVALID_HANDLE */ + MD_EXCEPTION_CODE_WIN_ILLEGAL_INSTRUCTION = 0xc000001d, + /* EXCEPTION_ILLEGAL_INSTRUCTION */ + MD_EXCEPTION_CODE_WIN_NONCONTINUABLE_EXCEPTION = 0xc0000025, + /* EXCEPTION_NONCONTINUABLE_EXCEPTION */ + MD_EXCEPTION_CODE_WIN_INVALID_DISPOSITION = 0xc0000026, + /* EXCEPTION_INVALID_DISPOSITION */ + MD_EXCEPTION_CODE_WIN_ARRAY_BOUNDS_EXCEEDED = 0xc000008c, + /* EXCEPTION_BOUNDS_EXCEEDED */ + MD_EXCEPTION_CODE_WIN_FLOAT_DENORMAL_OPERAND = 0xc000008d, + /* EXCEPTION_FLT_DENORMAL_OPERAND */ + MD_EXCEPTION_CODE_WIN_FLOAT_DIVIDE_BY_ZERO = 0xc000008e, + /* EXCEPTION_FLT_DIVIDE_BY_ZERO */ + MD_EXCEPTION_CODE_WIN_FLOAT_INEXACT_RESULT = 0xc000008f, + /* EXCEPTION_FLT_INEXACT_RESULT */ + MD_EXCEPTION_CODE_WIN_FLOAT_INVALID_OPERATION = 0xc0000090, + /* EXCEPTION_FLT_INVALID_OPERATION */ + MD_EXCEPTION_CODE_WIN_FLOAT_OVERFLOW = 0xc0000091, + /* EXCEPTION_FLT_OVERFLOW */ + MD_EXCEPTION_CODE_WIN_FLOAT_STACK_CHECK = 0xc0000092, + /* EXCEPTION_FLT_STACK_CHECK */ + MD_EXCEPTION_CODE_WIN_FLOAT_UNDERFLOW = 0xc0000093, + /* EXCEPTION_FLT_UNDERFLOW */ + MD_EXCEPTION_CODE_WIN_INTEGER_DIVIDE_BY_ZERO = 0xc0000094, + /* EXCEPTION_INT_DIVIDE_BY_ZERO */ + MD_EXCEPTION_CODE_WIN_INTEGER_OVERFLOW = 0xc0000095, + /* EXCEPTION_INT_OVERFLOW */ + MD_EXCEPTION_CODE_WIN_PRIVILEGED_INSTRUCTION = 0xc0000096, + /* EXCEPTION_PRIV_INSTRUCTION */ + MD_EXCEPTION_CODE_WIN_STACK_OVERFLOW = 0xc00000fd, + /* EXCEPTION_STACK_OVERFLOW */ + MD_EXCEPTION_CODE_WIN_POSSIBLE_DEADLOCK = 0xc0000194, + /* EXCEPTION_POSSIBLE_DEADLOCK */ + MD_EXCEPTION_CODE_WIN_STACK_BUFFER_OVERRUN = 0xc0000409, + /* STATUS_STACK_BUFFER_OVERRUN */ + MD_EXCEPTION_CODE_WIN_HEAP_CORRUPTION = 0xc0000374, + /* STATUS_HEAP_CORRUPTION */ + MD_EXCEPTION_CODE_WIN_UNHANDLED_CPP_EXCEPTION = 0xe06d7363 + /* Per http://support.microsoft.com/kb/185294, + generated by Visual C++ compiler */ +} MDExceptionCodeWin; + + +/* For (MDException).exception_information[2], when (MDException).exception_code + * is MD_EXCEPTION_CODE_WIN_IN_PAGE_ERROR. This describes the underlying reason + * for the error. These values come from ntstatus.h. + * + * The content of this enum was created from ntstatus.h in the 8.1 SDK with + * + * egrep '#define [A-Z_0-9]+\s+\(\(NTSTATUS\)0xC[0-9A-F]+L\)' ntstatus.h + * | tr -d '\r' + * | sed -r 's@#define ([A-Z_0-9]+)\s+\(\(NTSTATUS\)(0xC[0-9A-F]+)L\).*@\2 \1@' + * | sort + * | sed -r 's@(0xC[0-9A-F]+) ([A-Z_0-9]+)@ MD_NTSTATUS_WIN_\2 = \1,@' + * + * With easy copy to clipboard with + * | xclip -selection c # on linux + * | clip # on windows + * | pbcopy # on mac + * + * and then the last comma manually removed. */ +typedef enum { + MD_NTSTATUS_WIN_STATUS_UNSUCCESSFUL = 0xC0000001, + MD_NTSTATUS_WIN_STATUS_NOT_IMPLEMENTED = 0xC0000002, + MD_NTSTATUS_WIN_STATUS_INVALID_INFO_CLASS = 0xC0000003, + MD_NTSTATUS_WIN_STATUS_INFO_LENGTH_MISMATCH = 0xC0000004, + MD_NTSTATUS_WIN_STATUS_ACCESS_VIOLATION = 0xC0000005, + MD_NTSTATUS_WIN_STATUS_IN_PAGE_ERROR = 0xC0000006, + MD_NTSTATUS_WIN_STATUS_PAGEFILE_QUOTA = 0xC0000007, + MD_NTSTATUS_WIN_STATUS_INVALID_HANDLE = 0xC0000008, + MD_NTSTATUS_WIN_STATUS_BAD_INITIAL_STACK = 0xC0000009, + MD_NTSTATUS_WIN_STATUS_BAD_INITIAL_PC = 0xC000000A, + MD_NTSTATUS_WIN_STATUS_INVALID_CID = 0xC000000B, + MD_NTSTATUS_WIN_STATUS_TIMER_NOT_CANCELED = 0xC000000C, + MD_NTSTATUS_WIN_STATUS_INVALID_PARAMETER = 0xC000000D, + MD_NTSTATUS_WIN_STATUS_NO_SUCH_DEVICE = 0xC000000E, + MD_NTSTATUS_WIN_STATUS_NO_SUCH_FILE = 0xC000000F, + MD_NTSTATUS_WIN_STATUS_INVALID_DEVICE_REQUEST = 0xC0000010, + MD_NTSTATUS_WIN_STATUS_END_OF_FILE = 0xC0000011, + MD_NTSTATUS_WIN_STATUS_WRONG_VOLUME = 0xC0000012, + MD_NTSTATUS_WIN_STATUS_NO_MEDIA_IN_DEVICE = 0xC0000013, + MD_NTSTATUS_WIN_STATUS_UNRECOGNIZED_MEDIA = 0xC0000014, + MD_NTSTATUS_WIN_STATUS_NONEXISTENT_SECTOR = 0xC0000015, + MD_NTSTATUS_WIN_STATUS_MORE_PROCESSING_REQUIRED = 0xC0000016, + MD_NTSTATUS_WIN_STATUS_NO_MEMORY = 0xC0000017, + MD_NTSTATUS_WIN_STATUS_CONFLICTING_ADDRESSES = 0xC0000018, + MD_NTSTATUS_WIN_STATUS_NOT_MAPPED_VIEW = 0xC0000019, + MD_NTSTATUS_WIN_STATUS_UNABLE_TO_FREE_VM = 0xC000001A, + MD_NTSTATUS_WIN_STATUS_UNABLE_TO_DELETE_SECTION = 0xC000001B, + MD_NTSTATUS_WIN_STATUS_INVALID_SYSTEM_SERVICE = 0xC000001C, + MD_NTSTATUS_WIN_STATUS_ILLEGAL_INSTRUCTION = 0xC000001D, + MD_NTSTATUS_WIN_STATUS_INVALID_LOCK_SEQUENCE = 0xC000001E, + MD_NTSTATUS_WIN_STATUS_INVALID_VIEW_SIZE = 0xC000001F, + MD_NTSTATUS_WIN_STATUS_INVALID_FILE_FOR_SECTION = 0xC0000020, + MD_NTSTATUS_WIN_STATUS_ALREADY_COMMITTED = 0xC0000021, + MD_NTSTATUS_WIN_STATUS_ACCESS_DENIED = 0xC0000022, + MD_NTSTATUS_WIN_STATUS_BUFFER_TOO_SMALL = 0xC0000023, + MD_NTSTATUS_WIN_STATUS_OBJECT_TYPE_MISMATCH = 0xC0000024, + MD_NTSTATUS_WIN_STATUS_NONCONTINUABLE_EXCEPTION = 0xC0000025, + MD_NTSTATUS_WIN_STATUS_INVALID_DISPOSITION = 0xC0000026, + MD_NTSTATUS_WIN_STATUS_UNWIND = 0xC0000027, + MD_NTSTATUS_WIN_STATUS_BAD_STACK = 0xC0000028, + MD_NTSTATUS_WIN_STATUS_INVALID_UNWIND_TARGET = 0xC0000029, + MD_NTSTATUS_WIN_STATUS_NOT_LOCKED = 0xC000002A, + MD_NTSTATUS_WIN_STATUS_PARITY_ERROR = 0xC000002B, + MD_NTSTATUS_WIN_STATUS_UNABLE_TO_DECOMMIT_VM = 0xC000002C, + MD_NTSTATUS_WIN_STATUS_NOT_COMMITTED = 0xC000002D, + MD_NTSTATUS_WIN_STATUS_INVALID_PORT_ATTRIBUTES = 0xC000002E, + MD_NTSTATUS_WIN_STATUS_PORT_MESSAGE_TOO_LONG = 0xC000002F, + MD_NTSTATUS_WIN_STATUS_INVALID_PARAMETER_MIX = 0xC0000030, + MD_NTSTATUS_WIN_STATUS_INVALID_QUOTA_LOWER = 0xC0000031, + MD_NTSTATUS_WIN_STATUS_DISK_CORRUPT_ERROR = 0xC0000032, + MD_NTSTATUS_WIN_STATUS_OBJECT_NAME_INVALID = 0xC0000033, + MD_NTSTATUS_WIN_STATUS_OBJECT_NAME_NOT_FOUND = 0xC0000034, + MD_NTSTATUS_WIN_STATUS_OBJECT_NAME_COLLISION = 0xC0000035, + MD_NTSTATUS_WIN_STATUS_PORT_DISCONNECTED = 0xC0000037, + MD_NTSTATUS_WIN_STATUS_DEVICE_ALREADY_ATTACHED = 0xC0000038, + MD_NTSTATUS_WIN_STATUS_OBJECT_PATH_INVALID = 0xC0000039, + MD_NTSTATUS_WIN_STATUS_OBJECT_PATH_NOT_FOUND = 0xC000003A, + MD_NTSTATUS_WIN_STATUS_OBJECT_PATH_SYNTAX_BAD = 0xC000003B, + MD_NTSTATUS_WIN_STATUS_DATA_OVERRUN = 0xC000003C, + MD_NTSTATUS_WIN_STATUS_DATA_LATE_ERROR = 0xC000003D, + MD_NTSTATUS_WIN_STATUS_DATA_ERROR = 0xC000003E, + MD_NTSTATUS_WIN_STATUS_CRC_ERROR = 0xC000003F, + MD_NTSTATUS_WIN_STATUS_SECTION_TOO_BIG = 0xC0000040, + MD_NTSTATUS_WIN_STATUS_PORT_CONNECTION_REFUSED = 0xC0000041, + MD_NTSTATUS_WIN_STATUS_INVALID_PORT_HANDLE = 0xC0000042, + MD_NTSTATUS_WIN_STATUS_SHARING_VIOLATION = 0xC0000043, + MD_NTSTATUS_WIN_STATUS_QUOTA_EXCEEDED = 0xC0000044, + MD_NTSTATUS_WIN_STATUS_INVALID_PAGE_PROTECTION = 0xC0000045, + MD_NTSTATUS_WIN_STATUS_MUTANT_NOT_OWNED = 0xC0000046, + MD_NTSTATUS_WIN_STATUS_SEMAPHORE_LIMIT_EXCEEDED = 0xC0000047, + MD_NTSTATUS_WIN_STATUS_PORT_ALREADY_SET = 0xC0000048, + MD_NTSTATUS_WIN_STATUS_SECTION_NOT_IMAGE = 0xC0000049, + MD_NTSTATUS_WIN_STATUS_SUSPEND_COUNT_EXCEEDED = 0xC000004A, + MD_NTSTATUS_WIN_STATUS_THREAD_IS_TERMINATING = 0xC000004B, + MD_NTSTATUS_WIN_STATUS_BAD_WORKING_SET_LIMIT = 0xC000004C, + MD_NTSTATUS_WIN_STATUS_INCOMPATIBLE_FILE_MAP = 0xC000004D, + MD_NTSTATUS_WIN_STATUS_SECTION_PROTECTION = 0xC000004E, + MD_NTSTATUS_WIN_STATUS_EAS_NOT_SUPPORTED = 0xC000004F, + MD_NTSTATUS_WIN_STATUS_EA_TOO_LARGE = 0xC0000050, + MD_NTSTATUS_WIN_STATUS_NONEXISTENT_EA_ENTRY = 0xC0000051, + MD_NTSTATUS_WIN_STATUS_NO_EAS_ON_FILE = 0xC0000052, + MD_NTSTATUS_WIN_STATUS_EA_CORRUPT_ERROR = 0xC0000053, + MD_NTSTATUS_WIN_STATUS_FILE_LOCK_CONFLICT = 0xC0000054, + MD_NTSTATUS_WIN_STATUS_LOCK_NOT_GRANTED = 0xC0000055, + MD_NTSTATUS_WIN_STATUS_DELETE_PENDING = 0xC0000056, + MD_NTSTATUS_WIN_STATUS_CTL_FILE_NOT_SUPPORTED = 0xC0000057, + MD_NTSTATUS_WIN_STATUS_UNKNOWN_REVISION = 0xC0000058, + MD_NTSTATUS_WIN_STATUS_REVISION_MISMATCH = 0xC0000059, + MD_NTSTATUS_WIN_STATUS_INVALID_OWNER = 0xC000005A, + MD_NTSTATUS_WIN_STATUS_INVALID_PRIMARY_GROUP = 0xC000005B, + MD_NTSTATUS_WIN_STATUS_NO_IMPERSONATION_TOKEN = 0xC000005C, + MD_NTSTATUS_WIN_STATUS_CANT_DISABLE_MANDATORY = 0xC000005D, + MD_NTSTATUS_WIN_STATUS_NO_LOGON_SERVERS = 0xC000005E, + MD_NTSTATUS_WIN_STATUS_NO_SUCH_LOGON_SESSION = 0xC000005F, + MD_NTSTATUS_WIN_STATUS_NO_SUCH_PRIVILEGE = 0xC0000060, + MD_NTSTATUS_WIN_STATUS_PRIVILEGE_NOT_HELD = 0xC0000061, + MD_NTSTATUS_WIN_STATUS_INVALID_ACCOUNT_NAME = 0xC0000062, + MD_NTSTATUS_WIN_STATUS_USER_EXISTS = 0xC0000063, + MD_NTSTATUS_WIN_STATUS_NO_SUCH_USER = 0xC0000064, + MD_NTSTATUS_WIN_STATUS_GROUP_EXISTS = 0xC0000065, + MD_NTSTATUS_WIN_STATUS_NO_SUCH_GROUP = 0xC0000066, + MD_NTSTATUS_WIN_STATUS_MEMBER_IN_GROUP = 0xC0000067, + MD_NTSTATUS_WIN_STATUS_MEMBER_NOT_IN_GROUP = 0xC0000068, + MD_NTSTATUS_WIN_STATUS_LAST_ADMIN = 0xC0000069, + MD_NTSTATUS_WIN_STATUS_WRONG_PASSWORD = 0xC000006A, + MD_NTSTATUS_WIN_STATUS_ILL_FORMED_PASSWORD = 0xC000006B, + MD_NTSTATUS_WIN_STATUS_PASSWORD_RESTRICTION = 0xC000006C, + MD_NTSTATUS_WIN_STATUS_LOGON_FAILURE = 0xC000006D, + MD_NTSTATUS_WIN_STATUS_ACCOUNT_RESTRICTION = 0xC000006E, + MD_NTSTATUS_WIN_STATUS_INVALID_LOGON_HOURS = 0xC000006F, + MD_NTSTATUS_WIN_STATUS_INVALID_WORKSTATION = 0xC0000070, + MD_NTSTATUS_WIN_STATUS_PASSWORD_EXPIRED = 0xC0000071, + MD_NTSTATUS_WIN_STATUS_ACCOUNT_DISABLED = 0xC0000072, + MD_NTSTATUS_WIN_STATUS_NONE_MAPPED = 0xC0000073, + MD_NTSTATUS_WIN_STATUS_TOO_MANY_LUIDS_REQUESTED = 0xC0000074, + MD_NTSTATUS_WIN_STATUS_LUIDS_EXHAUSTED = 0xC0000075, + MD_NTSTATUS_WIN_STATUS_INVALID_SUB_AUTHORITY = 0xC0000076, + MD_NTSTATUS_WIN_STATUS_INVALID_ACL = 0xC0000077, + MD_NTSTATUS_WIN_STATUS_INVALID_SID = 0xC0000078, + MD_NTSTATUS_WIN_STATUS_INVALID_SECURITY_DESCR = 0xC0000079, + MD_NTSTATUS_WIN_STATUS_PROCEDURE_NOT_FOUND = 0xC000007A, + MD_NTSTATUS_WIN_STATUS_INVALID_IMAGE_FORMAT = 0xC000007B, + MD_NTSTATUS_WIN_STATUS_NO_TOKEN = 0xC000007C, + MD_NTSTATUS_WIN_STATUS_BAD_INHERITANCE_ACL = 0xC000007D, + MD_NTSTATUS_WIN_STATUS_RANGE_NOT_LOCKED = 0xC000007E, + MD_NTSTATUS_WIN_STATUS_DISK_FULL = 0xC000007F, + MD_NTSTATUS_WIN_STATUS_SERVER_DISABLED = 0xC0000080, + MD_NTSTATUS_WIN_STATUS_SERVER_NOT_DISABLED = 0xC0000081, + MD_NTSTATUS_WIN_STATUS_TOO_MANY_GUIDS_REQUESTED = 0xC0000082, + MD_NTSTATUS_WIN_STATUS_GUIDS_EXHAUSTED = 0xC0000083, + MD_NTSTATUS_WIN_STATUS_INVALID_ID_AUTHORITY = 0xC0000084, + MD_NTSTATUS_WIN_STATUS_AGENTS_EXHAUSTED = 0xC0000085, + MD_NTSTATUS_WIN_STATUS_INVALID_VOLUME_LABEL = 0xC0000086, + MD_NTSTATUS_WIN_STATUS_SECTION_NOT_EXTENDED = 0xC0000087, + MD_NTSTATUS_WIN_STATUS_NOT_MAPPED_DATA = 0xC0000088, + MD_NTSTATUS_WIN_STATUS_RESOURCE_DATA_NOT_FOUND = 0xC0000089, + MD_NTSTATUS_WIN_STATUS_RESOURCE_TYPE_NOT_FOUND = 0xC000008A, + MD_NTSTATUS_WIN_STATUS_RESOURCE_NAME_NOT_FOUND = 0xC000008B, + MD_NTSTATUS_WIN_STATUS_ARRAY_BOUNDS_EXCEEDED = 0xC000008C, + MD_NTSTATUS_WIN_STATUS_FLOAT_DENORMAL_OPERAND = 0xC000008D, + MD_NTSTATUS_WIN_STATUS_FLOAT_DIVIDE_BY_ZERO = 0xC000008E, + MD_NTSTATUS_WIN_STATUS_FLOAT_INEXACT_RESULT = 0xC000008F, + MD_NTSTATUS_WIN_STATUS_FLOAT_INVALID_OPERATION = 0xC0000090, + MD_NTSTATUS_WIN_STATUS_FLOAT_OVERFLOW = 0xC0000091, + MD_NTSTATUS_WIN_STATUS_FLOAT_STACK_CHECK = 0xC0000092, + MD_NTSTATUS_WIN_STATUS_FLOAT_UNDERFLOW = 0xC0000093, + MD_NTSTATUS_WIN_STATUS_INTEGER_DIVIDE_BY_ZERO = 0xC0000094, + MD_NTSTATUS_WIN_STATUS_INTEGER_OVERFLOW = 0xC0000095, + MD_NTSTATUS_WIN_STATUS_PRIVILEGED_INSTRUCTION = 0xC0000096, + MD_NTSTATUS_WIN_STATUS_TOO_MANY_PAGING_FILES = 0xC0000097, + MD_NTSTATUS_WIN_STATUS_FILE_INVALID = 0xC0000098, + MD_NTSTATUS_WIN_STATUS_ALLOTTED_SPACE_EXCEEDED = 0xC0000099, + MD_NTSTATUS_WIN_STATUS_INSUFFICIENT_RESOURCES = 0xC000009A, + MD_NTSTATUS_WIN_STATUS_DFS_EXIT_PATH_FOUND = 0xC000009B, + MD_NTSTATUS_WIN_STATUS_DEVICE_DATA_ERROR = 0xC000009C, + MD_NTSTATUS_WIN_STATUS_DEVICE_NOT_CONNECTED = 0xC000009D, + MD_NTSTATUS_WIN_STATUS_DEVICE_POWER_FAILURE = 0xC000009E, + MD_NTSTATUS_WIN_STATUS_FREE_VM_NOT_AT_BASE = 0xC000009F, + MD_NTSTATUS_WIN_STATUS_MEMORY_NOT_ALLOCATED = 0xC00000A0, + MD_NTSTATUS_WIN_STATUS_WORKING_SET_QUOTA = 0xC00000A1, + MD_NTSTATUS_WIN_STATUS_MEDIA_WRITE_PROTECTED = 0xC00000A2, + MD_NTSTATUS_WIN_STATUS_DEVICE_NOT_READY = 0xC00000A3, + MD_NTSTATUS_WIN_STATUS_INVALID_GROUP_ATTRIBUTES = 0xC00000A4, + MD_NTSTATUS_WIN_STATUS_BAD_IMPERSONATION_LEVEL = 0xC00000A5, + MD_NTSTATUS_WIN_STATUS_CANT_OPEN_ANONYMOUS = 0xC00000A6, + MD_NTSTATUS_WIN_STATUS_BAD_VALIDATION_CLASS = 0xC00000A7, + MD_NTSTATUS_WIN_STATUS_BAD_TOKEN_TYPE = 0xC00000A8, + MD_NTSTATUS_WIN_STATUS_BAD_MASTER_BOOT_RECORD = 0xC00000A9, + MD_NTSTATUS_WIN_STATUS_INSTRUCTION_MISALIGNMENT = 0xC00000AA, + MD_NTSTATUS_WIN_STATUS_INSTANCE_NOT_AVAILABLE = 0xC00000AB, + MD_NTSTATUS_WIN_STATUS_PIPE_NOT_AVAILABLE = 0xC00000AC, + MD_NTSTATUS_WIN_STATUS_INVALID_PIPE_STATE = 0xC00000AD, + MD_NTSTATUS_WIN_STATUS_PIPE_BUSY = 0xC00000AE, + MD_NTSTATUS_WIN_STATUS_ILLEGAL_FUNCTION = 0xC00000AF, + MD_NTSTATUS_WIN_STATUS_PIPE_DISCONNECTED = 0xC00000B0, + MD_NTSTATUS_WIN_STATUS_PIPE_CLOSING = 0xC00000B1, + MD_NTSTATUS_WIN_STATUS_PIPE_CONNECTED = 0xC00000B2, + MD_NTSTATUS_WIN_STATUS_PIPE_LISTENING = 0xC00000B3, + MD_NTSTATUS_WIN_STATUS_INVALID_READ_MODE = 0xC00000B4, + MD_NTSTATUS_WIN_STATUS_IO_TIMEOUT = 0xC00000B5, + MD_NTSTATUS_WIN_STATUS_FILE_FORCED_CLOSED = 0xC00000B6, + MD_NTSTATUS_WIN_STATUS_PROFILING_NOT_STARTED = 0xC00000B7, + MD_NTSTATUS_WIN_STATUS_PROFILING_NOT_STOPPED = 0xC00000B8, + MD_NTSTATUS_WIN_STATUS_COULD_NOT_INTERPRET = 0xC00000B9, + MD_NTSTATUS_WIN_STATUS_FILE_IS_A_DIRECTORY = 0xC00000BA, + MD_NTSTATUS_WIN_STATUS_NOT_SUPPORTED = 0xC00000BB, + MD_NTSTATUS_WIN_STATUS_REMOTE_NOT_LISTENING = 0xC00000BC, + MD_NTSTATUS_WIN_STATUS_DUPLICATE_NAME = 0xC00000BD, + MD_NTSTATUS_WIN_STATUS_BAD_NETWORK_PATH = 0xC00000BE, + MD_NTSTATUS_WIN_STATUS_NETWORK_BUSY = 0xC00000BF, + MD_NTSTATUS_WIN_STATUS_DEVICE_DOES_NOT_EXIST = 0xC00000C0, + MD_NTSTATUS_WIN_STATUS_TOO_MANY_COMMANDS = 0xC00000C1, + MD_NTSTATUS_WIN_STATUS_ADAPTER_HARDWARE_ERROR = 0xC00000C2, + MD_NTSTATUS_WIN_STATUS_INVALID_NETWORK_RESPONSE = 0xC00000C3, + MD_NTSTATUS_WIN_STATUS_UNEXPECTED_NETWORK_ERROR = 0xC00000C4, + MD_NTSTATUS_WIN_STATUS_BAD_REMOTE_ADAPTER = 0xC00000C5, + MD_NTSTATUS_WIN_STATUS_PRINT_QUEUE_FULL = 0xC00000C6, + MD_NTSTATUS_WIN_STATUS_NO_SPOOL_SPACE = 0xC00000C7, + MD_NTSTATUS_WIN_STATUS_PRINT_CANCELLED = 0xC00000C8, + MD_NTSTATUS_WIN_STATUS_NETWORK_NAME_DELETED = 0xC00000C9, + MD_NTSTATUS_WIN_STATUS_NETWORK_ACCESS_DENIED = 0xC00000CA, + MD_NTSTATUS_WIN_STATUS_BAD_DEVICE_TYPE = 0xC00000CB, + MD_NTSTATUS_WIN_STATUS_BAD_NETWORK_NAME = 0xC00000CC, + MD_NTSTATUS_WIN_STATUS_TOO_MANY_NAMES = 0xC00000CD, + MD_NTSTATUS_WIN_STATUS_TOO_MANY_SESSIONS = 0xC00000CE, + MD_NTSTATUS_WIN_STATUS_SHARING_PAUSED = 0xC00000CF, + MD_NTSTATUS_WIN_STATUS_REQUEST_NOT_ACCEPTED = 0xC00000D0, + MD_NTSTATUS_WIN_STATUS_REDIRECTOR_PAUSED = 0xC00000D1, + MD_NTSTATUS_WIN_STATUS_NET_WRITE_FAULT = 0xC00000D2, + MD_NTSTATUS_WIN_STATUS_PROFILING_AT_LIMIT = 0xC00000D3, + MD_NTSTATUS_WIN_STATUS_NOT_SAME_DEVICE = 0xC00000D4, + MD_NTSTATUS_WIN_STATUS_FILE_RENAMED = 0xC00000D5, + MD_NTSTATUS_WIN_STATUS_VIRTUAL_CIRCUIT_CLOSED = 0xC00000D6, + MD_NTSTATUS_WIN_STATUS_NO_SECURITY_ON_OBJECT = 0xC00000D7, + MD_NTSTATUS_WIN_STATUS_CANT_WAIT = 0xC00000D8, + MD_NTSTATUS_WIN_STATUS_PIPE_EMPTY = 0xC00000D9, + MD_NTSTATUS_WIN_STATUS_CANT_ACCESS_DOMAIN_INFO = 0xC00000DA, + MD_NTSTATUS_WIN_STATUS_CANT_TERMINATE_SELF = 0xC00000DB, + MD_NTSTATUS_WIN_STATUS_INVALID_SERVER_STATE = 0xC00000DC, + MD_NTSTATUS_WIN_STATUS_INVALID_DOMAIN_STATE = 0xC00000DD, + MD_NTSTATUS_WIN_STATUS_INVALID_DOMAIN_ROLE = 0xC00000DE, + MD_NTSTATUS_WIN_STATUS_NO_SUCH_DOMAIN = 0xC00000DF, + MD_NTSTATUS_WIN_STATUS_DOMAIN_EXISTS = 0xC00000E0, + MD_NTSTATUS_WIN_STATUS_DOMAIN_LIMIT_EXCEEDED = 0xC00000E1, + MD_NTSTATUS_WIN_STATUS_OPLOCK_NOT_GRANTED = 0xC00000E2, + MD_NTSTATUS_WIN_STATUS_INVALID_OPLOCK_PROTOCOL = 0xC00000E3, + MD_NTSTATUS_WIN_STATUS_INTERNAL_DB_CORRUPTION = 0xC00000E4, + MD_NTSTATUS_WIN_STATUS_INTERNAL_ERROR = 0xC00000E5, + MD_NTSTATUS_WIN_STATUS_GENERIC_NOT_MAPPED = 0xC00000E6, + MD_NTSTATUS_WIN_STATUS_BAD_DESCRIPTOR_FORMAT = 0xC00000E7, + MD_NTSTATUS_WIN_STATUS_INVALID_USER_BUFFER = 0xC00000E8, + MD_NTSTATUS_WIN_STATUS_UNEXPECTED_IO_ERROR = 0xC00000E9, + MD_NTSTATUS_WIN_STATUS_UNEXPECTED_MM_CREATE_ERR = 0xC00000EA, + MD_NTSTATUS_WIN_STATUS_UNEXPECTED_MM_MAP_ERROR = 0xC00000EB, + MD_NTSTATUS_WIN_STATUS_UNEXPECTED_MM_EXTEND_ERR = 0xC00000EC, + MD_NTSTATUS_WIN_STATUS_NOT_LOGON_PROCESS = 0xC00000ED, + MD_NTSTATUS_WIN_STATUS_LOGON_SESSION_EXISTS = 0xC00000EE, + MD_NTSTATUS_WIN_STATUS_INVALID_PARAMETER_1 = 0xC00000EF, + MD_NTSTATUS_WIN_STATUS_INVALID_PARAMETER_2 = 0xC00000F0, + MD_NTSTATUS_WIN_STATUS_INVALID_PARAMETER_3 = 0xC00000F1, + MD_NTSTATUS_WIN_STATUS_INVALID_PARAMETER_4 = 0xC00000F2, + MD_NTSTATUS_WIN_STATUS_INVALID_PARAMETER_5 = 0xC00000F3, + MD_NTSTATUS_WIN_STATUS_INVALID_PARAMETER_6 = 0xC00000F4, + MD_NTSTATUS_WIN_STATUS_INVALID_PARAMETER_7 = 0xC00000F5, + MD_NTSTATUS_WIN_STATUS_INVALID_PARAMETER_8 = 0xC00000F6, + MD_NTSTATUS_WIN_STATUS_INVALID_PARAMETER_9 = 0xC00000F7, + MD_NTSTATUS_WIN_STATUS_INVALID_PARAMETER_10 = 0xC00000F8, + MD_NTSTATUS_WIN_STATUS_INVALID_PARAMETER_11 = 0xC00000F9, + MD_NTSTATUS_WIN_STATUS_INVALID_PARAMETER_12 = 0xC00000FA, + MD_NTSTATUS_WIN_STATUS_REDIRECTOR_NOT_STARTED = 0xC00000FB, + MD_NTSTATUS_WIN_STATUS_REDIRECTOR_STARTED = 0xC00000FC, + MD_NTSTATUS_WIN_STATUS_STACK_OVERFLOW = 0xC00000FD, + MD_NTSTATUS_WIN_STATUS_NO_SUCH_PACKAGE = 0xC00000FE, + MD_NTSTATUS_WIN_STATUS_BAD_FUNCTION_TABLE = 0xC00000FF, + MD_NTSTATUS_WIN_STATUS_VARIABLE_NOT_FOUND = 0xC0000100, + MD_NTSTATUS_WIN_STATUS_DIRECTORY_NOT_EMPTY = 0xC0000101, + MD_NTSTATUS_WIN_STATUS_FILE_CORRUPT_ERROR = 0xC0000102, + MD_NTSTATUS_WIN_STATUS_NOT_A_DIRECTORY = 0xC0000103, + MD_NTSTATUS_WIN_STATUS_BAD_LOGON_SESSION_STATE = 0xC0000104, + MD_NTSTATUS_WIN_STATUS_LOGON_SESSION_COLLISION = 0xC0000105, + MD_NTSTATUS_WIN_STATUS_NAME_TOO_LONG = 0xC0000106, + MD_NTSTATUS_WIN_STATUS_FILES_OPEN = 0xC0000107, + MD_NTSTATUS_WIN_STATUS_CONNECTION_IN_USE = 0xC0000108, + MD_NTSTATUS_WIN_STATUS_MESSAGE_NOT_FOUND = 0xC0000109, + MD_NTSTATUS_WIN_STATUS_PROCESS_IS_TERMINATING = 0xC000010A, + MD_NTSTATUS_WIN_STATUS_INVALID_LOGON_TYPE = 0xC000010B, + MD_NTSTATUS_WIN_STATUS_NO_GUID_TRANSLATION = 0xC000010C, + MD_NTSTATUS_WIN_STATUS_CANNOT_IMPERSONATE = 0xC000010D, + MD_NTSTATUS_WIN_STATUS_IMAGE_ALREADY_LOADED = 0xC000010E, + MD_NTSTATUS_WIN_STATUS_ABIOS_NOT_PRESENT = 0xC000010F, + MD_NTSTATUS_WIN_STATUS_ABIOS_LID_NOT_EXIST = 0xC0000110, + MD_NTSTATUS_WIN_STATUS_ABIOS_LID_ALREADY_OWNED = 0xC0000111, + MD_NTSTATUS_WIN_STATUS_ABIOS_NOT_LID_OWNER = 0xC0000112, + MD_NTSTATUS_WIN_STATUS_ABIOS_INVALID_COMMAND = 0xC0000113, + MD_NTSTATUS_WIN_STATUS_ABIOS_INVALID_LID = 0xC0000114, + MD_NTSTATUS_WIN_STATUS_ABIOS_SELECTOR_NOT_AVAILABLE = 0xC0000115, + MD_NTSTATUS_WIN_STATUS_ABIOS_INVALID_SELECTOR = 0xC0000116, + MD_NTSTATUS_WIN_STATUS_NO_LDT = 0xC0000117, + MD_NTSTATUS_WIN_STATUS_INVALID_LDT_SIZE = 0xC0000118, + MD_NTSTATUS_WIN_STATUS_INVALID_LDT_OFFSET = 0xC0000119, + MD_NTSTATUS_WIN_STATUS_INVALID_LDT_DESCRIPTOR = 0xC000011A, + MD_NTSTATUS_WIN_STATUS_INVALID_IMAGE_NE_FORMAT = 0xC000011B, + MD_NTSTATUS_WIN_STATUS_RXACT_INVALID_STATE = 0xC000011C, + MD_NTSTATUS_WIN_STATUS_RXACT_COMMIT_FAILURE = 0xC000011D, + MD_NTSTATUS_WIN_STATUS_MAPPED_FILE_SIZE_ZERO = 0xC000011E, + MD_NTSTATUS_WIN_STATUS_TOO_MANY_OPENED_FILES = 0xC000011F, + MD_NTSTATUS_WIN_STATUS_CANCELLED = 0xC0000120, + MD_NTSTATUS_WIN_STATUS_CANNOT_DELETE = 0xC0000121, + MD_NTSTATUS_WIN_STATUS_INVALID_COMPUTER_NAME = 0xC0000122, + MD_NTSTATUS_WIN_STATUS_FILE_DELETED = 0xC0000123, + MD_NTSTATUS_WIN_STATUS_SPECIAL_ACCOUNT = 0xC0000124, + MD_NTSTATUS_WIN_STATUS_SPECIAL_GROUP = 0xC0000125, + MD_NTSTATUS_WIN_STATUS_SPECIAL_USER = 0xC0000126, + MD_NTSTATUS_WIN_STATUS_MEMBERS_PRIMARY_GROUP = 0xC0000127, + MD_NTSTATUS_WIN_STATUS_FILE_CLOSED = 0xC0000128, + MD_NTSTATUS_WIN_STATUS_TOO_MANY_THREADS = 0xC0000129, + MD_NTSTATUS_WIN_STATUS_THREAD_NOT_IN_PROCESS = 0xC000012A, + MD_NTSTATUS_WIN_STATUS_TOKEN_ALREADY_IN_USE = 0xC000012B, + MD_NTSTATUS_WIN_STATUS_PAGEFILE_QUOTA_EXCEEDED = 0xC000012C, + MD_NTSTATUS_WIN_STATUS_COMMITMENT_LIMIT = 0xC000012D, + MD_NTSTATUS_WIN_STATUS_INVALID_IMAGE_LE_FORMAT = 0xC000012E, + MD_NTSTATUS_WIN_STATUS_INVALID_IMAGE_NOT_MZ = 0xC000012F, + MD_NTSTATUS_WIN_STATUS_INVALID_IMAGE_PROTECT = 0xC0000130, + MD_NTSTATUS_WIN_STATUS_INVALID_IMAGE_WIN_16 = 0xC0000131, + MD_NTSTATUS_WIN_STATUS_LOGON_SERVER_CONFLICT = 0xC0000132, + MD_NTSTATUS_WIN_STATUS_TIME_DIFFERENCE_AT_DC = 0xC0000133, + MD_NTSTATUS_WIN_STATUS_SYNCHRONIZATION_REQUIRED = 0xC0000134, + MD_NTSTATUS_WIN_STATUS_DLL_NOT_FOUND = 0xC0000135, + MD_NTSTATUS_WIN_STATUS_OPEN_FAILED = 0xC0000136, + MD_NTSTATUS_WIN_STATUS_IO_PRIVILEGE_FAILED = 0xC0000137, + MD_NTSTATUS_WIN_STATUS_ORDINAL_NOT_FOUND = 0xC0000138, + MD_NTSTATUS_WIN_STATUS_ENTRYPOINT_NOT_FOUND = 0xC0000139, + MD_NTSTATUS_WIN_STATUS_CONTROL_C_EXIT = 0xC000013A, + MD_NTSTATUS_WIN_STATUS_LOCAL_DISCONNECT = 0xC000013B, + MD_NTSTATUS_WIN_STATUS_REMOTE_DISCONNECT = 0xC000013C, + MD_NTSTATUS_WIN_STATUS_REMOTE_RESOURCES = 0xC000013D, + MD_NTSTATUS_WIN_STATUS_LINK_FAILED = 0xC000013E, + MD_NTSTATUS_WIN_STATUS_LINK_TIMEOUT = 0xC000013F, + MD_NTSTATUS_WIN_STATUS_INVALID_CONNECTION = 0xC0000140, + MD_NTSTATUS_WIN_STATUS_INVALID_ADDRESS = 0xC0000141, + MD_NTSTATUS_WIN_STATUS_DLL_INIT_FAILED = 0xC0000142, + MD_NTSTATUS_WIN_STATUS_MISSING_SYSTEMFILE = 0xC0000143, + MD_NTSTATUS_WIN_STATUS_UNHANDLED_EXCEPTION = 0xC0000144, + MD_NTSTATUS_WIN_STATUS_APP_INIT_FAILURE = 0xC0000145, + MD_NTSTATUS_WIN_STATUS_PAGEFILE_CREATE_FAILED = 0xC0000146, + MD_NTSTATUS_WIN_STATUS_NO_PAGEFILE = 0xC0000147, + MD_NTSTATUS_WIN_STATUS_INVALID_LEVEL = 0xC0000148, + MD_NTSTATUS_WIN_STATUS_WRONG_PASSWORD_CORE = 0xC0000149, + MD_NTSTATUS_WIN_STATUS_ILLEGAL_FLOAT_CONTEXT = 0xC000014A, + MD_NTSTATUS_WIN_STATUS_PIPE_BROKEN = 0xC000014B, + MD_NTSTATUS_WIN_STATUS_REGISTRY_CORRUPT = 0xC000014C, + MD_NTSTATUS_WIN_STATUS_REGISTRY_IO_FAILED = 0xC000014D, + MD_NTSTATUS_WIN_STATUS_NO_EVENT_PAIR = 0xC000014E, + MD_NTSTATUS_WIN_STATUS_UNRECOGNIZED_VOLUME = 0xC000014F, + MD_NTSTATUS_WIN_STATUS_SERIAL_NO_DEVICE_INITED = 0xC0000150, + MD_NTSTATUS_WIN_STATUS_NO_SUCH_ALIAS = 0xC0000151, + MD_NTSTATUS_WIN_STATUS_MEMBER_NOT_IN_ALIAS = 0xC0000152, + MD_NTSTATUS_WIN_STATUS_MEMBER_IN_ALIAS = 0xC0000153, + MD_NTSTATUS_WIN_STATUS_ALIAS_EXISTS = 0xC0000154, + MD_NTSTATUS_WIN_STATUS_LOGON_NOT_GRANTED = 0xC0000155, + MD_NTSTATUS_WIN_STATUS_TOO_MANY_SECRETS = 0xC0000156, + MD_NTSTATUS_WIN_STATUS_SECRET_TOO_LONG = 0xC0000157, + MD_NTSTATUS_WIN_STATUS_INTERNAL_DB_ERROR = 0xC0000158, + MD_NTSTATUS_WIN_STATUS_FULLSCREEN_MODE = 0xC0000159, + MD_NTSTATUS_WIN_STATUS_TOO_MANY_CONTEXT_IDS = 0xC000015A, + MD_NTSTATUS_WIN_STATUS_LOGON_TYPE_NOT_GRANTED = 0xC000015B, + MD_NTSTATUS_WIN_STATUS_NOT_REGISTRY_FILE = 0xC000015C, + MD_NTSTATUS_WIN_STATUS_NT_CROSS_ENCRYPTION_REQUIRED = 0xC000015D, + MD_NTSTATUS_WIN_STATUS_DOMAIN_CTRLR_CONFIG_ERROR = 0xC000015E, + MD_NTSTATUS_WIN_STATUS_FT_MISSING_MEMBER = 0xC000015F, + MD_NTSTATUS_WIN_STATUS_ILL_FORMED_SERVICE_ENTRY = 0xC0000160, + MD_NTSTATUS_WIN_STATUS_ILLEGAL_CHARACTER = 0xC0000161, + MD_NTSTATUS_WIN_STATUS_UNMAPPABLE_CHARACTER = 0xC0000162, + MD_NTSTATUS_WIN_STATUS_UNDEFINED_CHARACTER = 0xC0000163, + MD_NTSTATUS_WIN_STATUS_FLOPPY_VOLUME = 0xC0000164, + MD_NTSTATUS_WIN_STATUS_FLOPPY_ID_MARK_NOT_FOUND = 0xC0000165, + MD_NTSTATUS_WIN_STATUS_FLOPPY_WRONG_CYLINDER = 0xC0000166, + MD_NTSTATUS_WIN_STATUS_FLOPPY_UNKNOWN_ERROR = 0xC0000167, + MD_NTSTATUS_WIN_STATUS_FLOPPY_BAD_REGISTERS = 0xC0000168, + MD_NTSTATUS_WIN_STATUS_DISK_RECALIBRATE_FAILED = 0xC0000169, + MD_NTSTATUS_WIN_STATUS_DISK_OPERATION_FAILED = 0xC000016A, + MD_NTSTATUS_WIN_STATUS_DISK_RESET_FAILED = 0xC000016B, + MD_NTSTATUS_WIN_STATUS_SHARED_IRQ_BUSY = 0xC000016C, + MD_NTSTATUS_WIN_STATUS_FT_ORPHANING = 0xC000016D, + MD_NTSTATUS_WIN_STATUS_BIOS_FAILED_TO_CONNECT_INTERRUPT = 0xC000016E, + MD_NTSTATUS_WIN_STATUS_PARTITION_FAILURE = 0xC0000172, + MD_NTSTATUS_WIN_STATUS_INVALID_BLOCK_LENGTH = 0xC0000173, + MD_NTSTATUS_WIN_STATUS_DEVICE_NOT_PARTITIONED = 0xC0000174, + MD_NTSTATUS_WIN_STATUS_UNABLE_TO_LOCK_MEDIA = 0xC0000175, + MD_NTSTATUS_WIN_STATUS_UNABLE_TO_UNLOAD_MEDIA = 0xC0000176, + MD_NTSTATUS_WIN_STATUS_EOM_OVERFLOW = 0xC0000177, + MD_NTSTATUS_WIN_STATUS_NO_MEDIA = 0xC0000178, + MD_NTSTATUS_WIN_STATUS_NO_SUCH_MEMBER = 0xC000017A, + MD_NTSTATUS_WIN_STATUS_INVALID_MEMBER = 0xC000017B, + MD_NTSTATUS_WIN_STATUS_KEY_DELETED = 0xC000017C, + MD_NTSTATUS_WIN_STATUS_NO_LOG_SPACE = 0xC000017D, + MD_NTSTATUS_WIN_STATUS_TOO_MANY_SIDS = 0xC000017E, + MD_NTSTATUS_WIN_STATUS_LM_CROSS_ENCRYPTION_REQUIRED = 0xC000017F, + MD_NTSTATUS_WIN_STATUS_KEY_HAS_CHILDREN = 0xC0000180, + MD_NTSTATUS_WIN_STATUS_CHILD_MUST_BE_VOLATILE = 0xC0000181, + MD_NTSTATUS_WIN_STATUS_DEVICE_CONFIGURATION_ERROR = 0xC0000182, + MD_NTSTATUS_WIN_STATUS_DRIVER_INTERNAL_ERROR = 0xC0000183, + MD_NTSTATUS_WIN_STATUS_INVALID_DEVICE_STATE = 0xC0000184, + MD_NTSTATUS_WIN_STATUS_IO_DEVICE_ERROR = 0xC0000185, + MD_NTSTATUS_WIN_STATUS_DEVICE_PROTOCOL_ERROR = 0xC0000186, + MD_NTSTATUS_WIN_STATUS_BACKUP_CONTROLLER = 0xC0000187, + MD_NTSTATUS_WIN_STATUS_LOG_FILE_FULL = 0xC0000188, + MD_NTSTATUS_WIN_STATUS_TOO_LATE = 0xC0000189, + MD_NTSTATUS_WIN_STATUS_NO_TRUST_LSA_SECRET = 0xC000018A, + MD_NTSTATUS_WIN_STATUS_NO_TRUST_SAM_ACCOUNT = 0xC000018B, + MD_NTSTATUS_WIN_STATUS_TRUSTED_DOMAIN_FAILURE = 0xC000018C, + MD_NTSTATUS_WIN_STATUS_TRUSTED_RELATIONSHIP_FAILURE = 0xC000018D, + MD_NTSTATUS_WIN_STATUS_EVENTLOG_FILE_CORRUPT = 0xC000018E, + MD_NTSTATUS_WIN_STATUS_EVENTLOG_CANT_START = 0xC000018F, + MD_NTSTATUS_WIN_STATUS_TRUST_FAILURE = 0xC0000190, + MD_NTSTATUS_WIN_STATUS_MUTANT_LIMIT_EXCEEDED = 0xC0000191, + MD_NTSTATUS_WIN_STATUS_NETLOGON_NOT_STARTED = 0xC0000192, + MD_NTSTATUS_WIN_STATUS_ACCOUNT_EXPIRED = 0xC0000193, + MD_NTSTATUS_WIN_STATUS_POSSIBLE_DEADLOCK = 0xC0000194, + MD_NTSTATUS_WIN_STATUS_NETWORK_CREDENTIAL_CONFLICT = 0xC0000195, + MD_NTSTATUS_WIN_STATUS_REMOTE_SESSION_LIMIT = 0xC0000196, + MD_NTSTATUS_WIN_STATUS_EVENTLOG_FILE_CHANGED = 0xC0000197, + MD_NTSTATUS_WIN_STATUS_NOLOGON_INTERDOMAIN_TRUST_ACCOUNT = 0xC0000198, + MD_NTSTATUS_WIN_STATUS_NOLOGON_WORKSTATION_TRUST_ACCOUNT = 0xC0000199, + MD_NTSTATUS_WIN_STATUS_NOLOGON_SERVER_TRUST_ACCOUNT = 0xC000019A, + MD_NTSTATUS_WIN_STATUS_DOMAIN_TRUST_INCONSISTENT = 0xC000019B, + MD_NTSTATUS_WIN_STATUS_FS_DRIVER_REQUIRED = 0xC000019C, + MD_NTSTATUS_WIN_STATUS_IMAGE_ALREADY_LOADED_AS_DLL = 0xC000019D, + MD_NTSTATUS_WIN_STATUS_INCOMPATIBLE_WITH_GLOBAL_SHORT_NAME_REGISTRY_SETTING = 0xC000019E, + MD_NTSTATUS_WIN_STATUS_SHORT_NAMES_NOT_ENABLED_ON_VOLUME = 0xC000019F, + MD_NTSTATUS_WIN_STATUS_SECURITY_STREAM_IS_INCONSISTENT = 0xC00001A0, + MD_NTSTATUS_WIN_STATUS_INVALID_LOCK_RANGE = 0xC00001A1, + MD_NTSTATUS_WIN_STATUS_INVALID_ACE_CONDITION = 0xC00001A2, + MD_NTSTATUS_WIN_STATUS_IMAGE_SUBSYSTEM_NOT_PRESENT = 0xC00001A3, + MD_NTSTATUS_WIN_STATUS_NOTIFICATION_GUID_ALREADY_DEFINED = 0xC00001A4, + MD_NTSTATUS_WIN_STATUS_INVALID_EXCEPTION_HANDLER = 0xC00001A5, + MD_NTSTATUS_WIN_STATUS_DUPLICATE_PRIVILEGES = 0xC00001A6, + MD_NTSTATUS_WIN_STATUS_NOT_ALLOWED_ON_SYSTEM_FILE = 0xC00001A7, + MD_NTSTATUS_WIN_STATUS_REPAIR_NEEDED = 0xC00001A8, + MD_NTSTATUS_WIN_STATUS_QUOTA_NOT_ENABLED = 0xC00001A9, + MD_NTSTATUS_WIN_STATUS_NO_APPLICATION_PACKAGE = 0xC00001AA, + MD_NTSTATUS_WIN_STATUS_NETWORK_OPEN_RESTRICTION = 0xC0000201, + MD_NTSTATUS_WIN_STATUS_NO_USER_SESSION_KEY = 0xC0000202, + MD_NTSTATUS_WIN_STATUS_USER_SESSION_DELETED = 0xC0000203, + MD_NTSTATUS_WIN_STATUS_RESOURCE_LANG_NOT_FOUND = 0xC0000204, + MD_NTSTATUS_WIN_STATUS_INSUFF_SERVER_RESOURCES = 0xC0000205, + MD_NTSTATUS_WIN_STATUS_INVALID_BUFFER_SIZE = 0xC0000206, + MD_NTSTATUS_WIN_STATUS_INVALID_ADDRESS_COMPONENT = 0xC0000207, + MD_NTSTATUS_WIN_STATUS_INVALID_ADDRESS_WILDCARD = 0xC0000208, + MD_NTSTATUS_WIN_STATUS_TOO_MANY_ADDRESSES = 0xC0000209, + MD_NTSTATUS_WIN_STATUS_ADDRESS_ALREADY_EXISTS = 0xC000020A, + MD_NTSTATUS_WIN_STATUS_ADDRESS_CLOSED = 0xC000020B, + MD_NTSTATUS_WIN_STATUS_CONNECTION_DISCONNECTED = 0xC000020C, + MD_NTSTATUS_WIN_STATUS_CONNECTION_RESET = 0xC000020D, + MD_NTSTATUS_WIN_STATUS_TOO_MANY_NODES = 0xC000020E, + MD_NTSTATUS_WIN_STATUS_TRANSACTION_ABORTED = 0xC000020F, + MD_NTSTATUS_WIN_STATUS_TRANSACTION_TIMED_OUT = 0xC0000210, + MD_NTSTATUS_WIN_STATUS_TRANSACTION_NO_RELEASE = 0xC0000211, + MD_NTSTATUS_WIN_STATUS_TRANSACTION_NO_MATCH = 0xC0000212, + MD_NTSTATUS_WIN_STATUS_TRANSACTION_RESPONDED = 0xC0000213, + MD_NTSTATUS_WIN_STATUS_TRANSACTION_INVALID_ID = 0xC0000214, + MD_NTSTATUS_WIN_STATUS_TRANSACTION_INVALID_TYPE = 0xC0000215, + MD_NTSTATUS_WIN_STATUS_NOT_SERVER_SESSION = 0xC0000216, + MD_NTSTATUS_WIN_STATUS_NOT_CLIENT_SESSION = 0xC0000217, + MD_NTSTATUS_WIN_STATUS_CANNOT_LOAD_REGISTRY_FILE = 0xC0000218, + MD_NTSTATUS_WIN_STATUS_DEBUG_ATTACH_FAILED = 0xC0000219, + MD_NTSTATUS_WIN_STATUS_SYSTEM_PROCESS_TERMINATED = 0xC000021A, + MD_NTSTATUS_WIN_STATUS_DATA_NOT_ACCEPTED = 0xC000021B, + MD_NTSTATUS_WIN_STATUS_NO_BROWSER_SERVERS_FOUND = 0xC000021C, + MD_NTSTATUS_WIN_STATUS_VDM_HARD_ERROR = 0xC000021D, + MD_NTSTATUS_WIN_STATUS_DRIVER_CANCEL_TIMEOUT = 0xC000021E, + MD_NTSTATUS_WIN_STATUS_REPLY_MESSAGE_MISMATCH = 0xC000021F, + MD_NTSTATUS_WIN_STATUS_MAPPED_ALIGNMENT = 0xC0000220, + MD_NTSTATUS_WIN_STATUS_IMAGE_CHECKSUM_MISMATCH = 0xC0000221, + MD_NTSTATUS_WIN_STATUS_LOST_WRITEBEHIND_DATA = 0xC0000222, + MD_NTSTATUS_WIN_STATUS_CLIENT_SERVER_PARAMETERS_INVALID = 0xC0000223, + MD_NTSTATUS_WIN_STATUS_PASSWORD_MUST_CHANGE = 0xC0000224, + MD_NTSTATUS_WIN_STATUS_NOT_FOUND = 0xC0000225, + MD_NTSTATUS_WIN_STATUS_NOT_TINY_STREAM = 0xC0000226, + MD_NTSTATUS_WIN_STATUS_RECOVERY_FAILURE = 0xC0000227, + MD_NTSTATUS_WIN_STATUS_STACK_OVERFLOW_READ = 0xC0000228, + MD_NTSTATUS_WIN_STATUS_FAIL_CHECK = 0xC0000229, + MD_NTSTATUS_WIN_STATUS_DUPLICATE_OBJECTID = 0xC000022A, + MD_NTSTATUS_WIN_STATUS_OBJECTID_EXISTS = 0xC000022B, + MD_NTSTATUS_WIN_STATUS_CONVERT_TO_LARGE = 0xC000022C, + MD_NTSTATUS_WIN_STATUS_RETRY = 0xC000022D, + MD_NTSTATUS_WIN_STATUS_FOUND_OUT_OF_SCOPE = 0xC000022E, + MD_NTSTATUS_WIN_STATUS_ALLOCATE_BUCKET = 0xC000022F, + MD_NTSTATUS_WIN_STATUS_PROPSET_NOT_FOUND = 0xC0000230, + MD_NTSTATUS_WIN_STATUS_MARSHALL_OVERFLOW = 0xC0000231, + MD_NTSTATUS_WIN_STATUS_INVALID_VARIANT = 0xC0000232, + MD_NTSTATUS_WIN_STATUS_DOMAIN_CONTROLLER_NOT_FOUND = 0xC0000233, + MD_NTSTATUS_WIN_STATUS_ACCOUNT_LOCKED_OUT = 0xC0000234, + MD_NTSTATUS_WIN_STATUS_HANDLE_NOT_CLOSABLE = 0xC0000235, + MD_NTSTATUS_WIN_STATUS_CONNECTION_REFUSED = 0xC0000236, + MD_NTSTATUS_WIN_STATUS_GRACEFUL_DISCONNECT = 0xC0000237, + MD_NTSTATUS_WIN_STATUS_ADDRESS_ALREADY_ASSOCIATED = 0xC0000238, + MD_NTSTATUS_WIN_STATUS_ADDRESS_NOT_ASSOCIATED = 0xC0000239, + MD_NTSTATUS_WIN_STATUS_CONNECTION_INVALID = 0xC000023A, + MD_NTSTATUS_WIN_STATUS_CONNECTION_ACTIVE = 0xC000023B, + MD_NTSTATUS_WIN_STATUS_NETWORK_UNREACHABLE = 0xC000023C, + MD_NTSTATUS_WIN_STATUS_HOST_UNREACHABLE = 0xC000023D, + MD_NTSTATUS_WIN_STATUS_PROTOCOL_UNREACHABLE = 0xC000023E, + MD_NTSTATUS_WIN_STATUS_PORT_UNREACHABLE = 0xC000023F, + MD_NTSTATUS_WIN_STATUS_REQUEST_ABORTED = 0xC0000240, + MD_NTSTATUS_WIN_STATUS_CONNECTION_ABORTED = 0xC0000241, + MD_NTSTATUS_WIN_STATUS_BAD_COMPRESSION_BUFFER = 0xC0000242, + MD_NTSTATUS_WIN_STATUS_USER_MAPPED_FILE = 0xC0000243, + MD_NTSTATUS_WIN_STATUS_AUDIT_FAILED = 0xC0000244, + MD_NTSTATUS_WIN_STATUS_TIMER_RESOLUTION_NOT_SET = 0xC0000245, + MD_NTSTATUS_WIN_STATUS_CONNECTION_COUNT_LIMIT = 0xC0000246, + MD_NTSTATUS_WIN_STATUS_LOGIN_TIME_RESTRICTION = 0xC0000247, + MD_NTSTATUS_WIN_STATUS_LOGIN_WKSTA_RESTRICTION = 0xC0000248, + MD_NTSTATUS_WIN_STATUS_IMAGE_MP_UP_MISMATCH = 0xC0000249, + MD_NTSTATUS_WIN_STATUS_INSUFFICIENT_LOGON_INFO = 0xC0000250, + MD_NTSTATUS_WIN_STATUS_BAD_DLL_ENTRYPOINT = 0xC0000251, + MD_NTSTATUS_WIN_STATUS_BAD_SERVICE_ENTRYPOINT = 0xC0000252, + MD_NTSTATUS_WIN_STATUS_LPC_REPLY_LOST = 0xC0000253, + MD_NTSTATUS_WIN_STATUS_IP_ADDRESS_CONFLICT1 = 0xC0000254, + MD_NTSTATUS_WIN_STATUS_IP_ADDRESS_CONFLICT2 = 0xC0000255, + MD_NTSTATUS_WIN_STATUS_REGISTRY_QUOTA_LIMIT = 0xC0000256, + MD_NTSTATUS_WIN_STATUS_PATH_NOT_COVERED = 0xC0000257, + MD_NTSTATUS_WIN_STATUS_NO_CALLBACK_ACTIVE = 0xC0000258, + MD_NTSTATUS_WIN_STATUS_LICENSE_QUOTA_EXCEEDED = 0xC0000259, + MD_NTSTATUS_WIN_STATUS_PWD_TOO_SHORT = 0xC000025A, + MD_NTSTATUS_WIN_STATUS_PWD_TOO_RECENT = 0xC000025B, + MD_NTSTATUS_WIN_STATUS_PWD_HISTORY_CONFLICT = 0xC000025C, + MD_NTSTATUS_WIN_STATUS_PLUGPLAY_NO_DEVICE = 0xC000025E, + MD_NTSTATUS_WIN_STATUS_UNSUPPORTED_COMPRESSION = 0xC000025F, + MD_NTSTATUS_WIN_STATUS_INVALID_HW_PROFILE = 0xC0000260, + MD_NTSTATUS_WIN_STATUS_INVALID_PLUGPLAY_DEVICE_PATH = 0xC0000261, + MD_NTSTATUS_WIN_STATUS_DRIVER_ORDINAL_NOT_FOUND = 0xC0000262, + MD_NTSTATUS_WIN_STATUS_DRIVER_ENTRYPOINT_NOT_FOUND = 0xC0000263, + MD_NTSTATUS_WIN_STATUS_RESOURCE_NOT_OWNED = 0xC0000264, + MD_NTSTATUS_WIN_STATUS_TOO_MANY_LINKS = 0xC0000265, + MD_NTSTATUS_WIN_STATUS_QUOTA_LIST_INCONSISTENT = 0xC0000266, + MD_NTSTATUS_WIN_STATUS_FILE_IS_OFFLINE = 0xC0000267, + MD_NTSTATUS_WIN_STATUS_EVALUATION_EXPIRATION = 0xC0000268, + MD_NTSTATUS_WIN_STATUS_ILLEGAL_DLL_RELOCATION = 0xC0000269, + MD_NTSTATUS_WIN_STATUS_LICENSE_VIOLATION = 0xC000026A, + MD_NTSTATUS_WIN_STATUS_DLL_INIT_FAILED_LOGOFF = 0xC000026B, + MD_NTSTATUS_WIN_STATUS_DRIVER_UNABLE_TO_LOAD = 0xC000026C, + MD_NTSTATUS_WIN_STATUS_DFS_UNAVAILABLE = 0xC000026D, + MD_NTSTATUS_WIN_STATUS_VOLUME_DISMOUNTED = 0xC000026E, + MD_NTSTATUS_WIN_STATUS_WX86_INTERNAL_ERROR = 0xC000026F, + MD_NTSTATUS_WIN_STATUS_WX86_FLOAT_STACK_CHECK = 0xC0000270, + MD_NTSTATUS_WIN_STATUS_VALIDATE_CONTINUE = 0xC0000271, + MD_NTSTATUS_WIN_STATUS_NO_MATCH = 0xC0000272, + MD_NTSTATUS_WIN_STATUS_NO_MORE_MATCHES = 0xC0000273, + MD_NTSTATUS_WIN_STATUS_NOT_A_REPARSE_POINT = 0xC0000275, + MD_NTSTATUS_WIN_STATUS_IO_REPARSE_TAG_INVALID = 0xC0000276, + MD_NTSTATUS_WIN_STATUS_IO_REPARSE_TAG_MISMATCH = 0xC0000277, + MD_NTSTATUS_WIN_STATUS_IO_REPARSE_DATA_INVALID = 0xC0000278, + MD_NTSTATUS_WIN_STATUS_IO_REPARSE_TAG_NOT_HANDLED = 0xC0000279, + MD_NTSTATUS_WIN_STATUS_PWD_TOO_LONG = 0xC000027A, + MD_NTSTATUS_WIN_STATUS_STOWED_EXCEPTION = 0xC000027B, + MD_NTSTATUS_WIN_STATUS_REPARSE_POINT_NOT_RESOLVED = 0xC0000280, + MD_NTSTATUS_WIN_STATUS_DIRECTORY_IS_A_REPARSE_POINT = 0xC0000281, + MD_NTSTATUS_WIN_STATUS_RANGE_LIST_CONFLICT = 0xC0000282, + MD_NTSTATUS_WIN_STATUS_SOURCE_ELEMENT_EMPTY = 0xC0000283, + MD_NTSTATUS_WIN_STATUS_DESTINATION_ELEMENT_FULL = 0xC0000284, + MD_NTSTATUS_WIN_STATUS_ILLEGAL_ELEMENT_ADDRESS = 0xC0000285, + MD_NTSTATUS_WIN_STATUS_MAGAZINE_NOT_PRESENT = 0xC0000286, + MD_NTSTATUS_WIN_STATUS_REINITIALIZATION_NEEDED = 0xC0000287, + MD_NTSTATUS_WIN_STATUS_ENCRYPTION_FAILED = 0xC000028A, + MD_NTSTATUS_WIN_STATUS_DECRYPTION_FAILED = 0xC000028B, + MD_NTSTATUS_WIN_STATUS_RANGE_NOT_FOUND = 0xC000028C, + MD_NTSTATUS_WIN_STATUS_NO_RECOVERY_POLICY = 0xC000028D, + MD_NTSTATUS_WIN_STATUS_NO_EFS = 0xC000028E, + MD_NTSTATUS_WIN_STATUS_WRONG_EFS = 0xC000028F, + MD_NTSTATUS_WIN_STATUS_NO_USER_KEYS = 0xC0000290, + MD_NTSTATUS_WIN_STATUS_FILE_NOT_ENCRYPTED = 0xC0000291, + MD_NTSTATUS_WIN_STATUS_NOT_EXPORT_FORMAT = 0xC0000292, + MD_NTSTATUS_WIN_STATUS_FILE_ENCRYPTED = 0xC0000293, + MD_NTSTATUS_WIN_STATUS_WMI_GUID_NOT_FOUND = 0xC0000295, + MD_NTSTATUS_WIN_STATUS_WMI_INSTANCE_NOT_FOUND = 0xC0000296, + MD_NTSTATUS_WIN_STATUS_WMI_ITEMID_NOT_FOUND = 0xC0000297, + MD_NTSTATUS_WIN_STATUS_WMI_TRY_AGAIN = 0xC0000298, + MD_NTSTATUS_WIN_STATUS_SHARED_POLICY = 0xC0000299, + MD_NTSTATUS_WIN_STATUS_POLICY_OBJECT_NOT_FOUND = 0xC000029A, + MD_NTSTATUS_WIN_STATUS_POLICY_ONLY_IN_DS = 0xC000029B, + MD_NTSTATUS_WIN_STATUS_VOLUME_NOT_UPGRADED = 0xC000029C, + MD_NTSTATUS_WIN_STATUS_REMOTE_STORAGE_NOT_ACTIVE = 0xC000029D, + MD_NTSTATUS_WIN_STATUS_REMOTE_STORAGE_MEDIA_ERROR = 0xC000029E, + MD_NTSTATUS_WIN_STATUS_NO_TRACKING_SERVICE = 0xC000029F, + MD_NTSTATUS_WIN_STATUS_SERVER_SID_MISMATCH = 0xC00002A0, + MD_NTSTATUS_WIN_STATUS_DS_NO_ATTRIBUTE_OR_VALUE = 0xC00002A1, + MD_NTSTATUS_WIN_STATUS_DS_INVALID_ATTRIBUTE_SYNTAX = 0xC00002A2, + MD_NTSTATUS_WIN_STATUS_DS_ATTRIBUTE_TYPE_UNDEFINED = 0xC00002A3, + MD_NTSTATUS_WIN_STATUS_DS_ATTRIBUTE_OR_VALUE_EXISTS = 0xC00002A4, + MD_NTSTATUS_WIN_STATUS_DS_BUSY = 0xC00002A5, + MD_NTSTATUS_WIN_STATUS_DS_UNAVAILABLE = 0xC00002A6, + MD_NTSTATUS_WIN_STATUS_DS_NO_RIDS_ALLOCATED = 0xC00002A7, + MD_NTSTATUS_WIN_STATUS_DS_NO_MORE_RIDS = 0xC00002A8, + MD_NTSTATUS_WIN_STATUS_DS_INCORRECT_ROLE_OWNER = 0xC00002A9, + MD_NTSTATUS_WIN_STATUS_DS_RIDMGR_INIT_ERROR = 0xC00002AA, + MD_NTSTATUS_WIN_STATUS_DS_OBJ_CLASS_VIOLATION = 0xC00002AB, + MD_NTSTATUS_WIN_STATUS_DS_CANT_ON_NON_LEAF = 0xC00002AC, + MD_NTSTATUS_WIN_STATUS_DS_CANT_ON_RDN = 0xC00002AD, + MD_NTSTATUS_WIN_STATUS_DS_CANT_MOD_OBJ_CLASS = 0xC00002AE, + MD_NTSTATUS_WIN_STATUS_DS_CROSS_DOM_MOVE_FAILED = 0xC00002AF, + MD_NTSTATUS_WIN_STATUS_DS_GC_NOT_AVAILABLE = 0xC00002B0, + MD_NTSTATUS_WIN_STATUS_DIRECTORY_SERVICE_REQUIRED = 0xC00002B1, + MD_NTSTATUS_WIN_STATUS_REPARSE_ATTRIBUTE_CONFLICT = 0xC00002B2, + MD_NTSTATUS_WIN_STATUS_CANT_ENABLE_DENY_ONLY = 0xC00002B3, + MD_NTSTATUS_WIN_STATUS_FLOAT_MULTIPLE_FAULTS = 0xC00002B4, + MD_NTSTATUS_WIN_STATUS_FLOAT_MULTIPLE_TRAPS = 0xC00002B5, + MD_NTSTATUS_WIN_STATUS_DEVICE_REMOVED = 0xC00002B6, + MD_NTSTATUS_WIN_STATUS_JOURNAL_DELETE_IN_PROGRESS = 0xC00002B7, + MD_NTSTATUS_WIN_STATUS_JOURNAL_NOT_ACTIVE = 0xC00002B8, + MD_NTSTATUS_WIN_STATUS_NOINTERFACE = 0xC00002B9, + MD_NTSTATUS_WIN_STATUS_DS_RIDMGR_DISABLED = 0xC00002BA, + MD_NTSTATUS_WIN_STATUS_DS_ADMIN_LIMIT_EXCEEDED = 0xC00002C1, + MD_NTSTATUS_WIN_STATUS_DRIVER_FAILED_SLEEP = 0xC00002C2, + MD_NTSTATUS_WIN_STATUS_MUTUAL_AUTHENTICATION_FAILED = 0xC00002C3, + MD_NTSTATUS_WIN_STATUS_CORRUPT_SYSTEM_FILE = 0xC00002C4, + MD_NTSTATUS_WIN_STATUS_DATATYPE_MISALIGNMENT_ERROR = 0xC00002C5, + MD_NTSTATUS_WIN_STATUS_WMI_READ_ONLY = 0xC00002C6, + MD_NTSTATUS_WIN_STATUS_WMI_SET_FAILURE = 0xC00002C7, + MD_NTSTATUS_WIN_STATUS_COMMITMENT_MINIMUM = 0xC00002C8, + MD_NTSTATUS_WIN_STATUS_REG_NAT_CONSUMPTION = 0xC00002C9, + MD_NTSTATUS_WIN_STATUS_TRANSPORT_FULL = 0xC00002CA, + MD_NTSTATUS_WIN_STATUS_DS_SAM_INIT_FAILURE = 0xC00002CB, + MD_NTSTATUS_WIN_STATUS_ONLY_IF_CONNECTED = 0xC00002CC, + MD_NTSTATUS_WIN_STATUS_DS_SENSITIVE_GROUP_VIOLATION = 0xC00002CD, + MD_NTSTATUS_WIN_STATUS_PNP_RESTART_ENUMERATION = 0xC00002CE, + MD_NTSTATUS_WIN_STATUS_JOURNAL_ENTRY_DELETED = 0xC00002CF, + MD_NTSTATUS_WIN_STATUS_DS_CANT_MOD_PRIMARYGROUPID = 0xC00002D0, + MD_NTSTATUS_WIN_STATUS_SYSTEM_IMAGE_BAD_SIGNATURE = 0xC00002D1, + MD_NTSTATUS_WIN_STATUS_PNP_REBOOT_REQUIRED = 0xC00002D2, + MD_NTSTATUS_WIN_STATUS_POWER_STATE_INVALID = 0xC00002D3, + MD_NTSTATUS_WIN_STATUS_DS_INVALID_GROUP_TYPE = 0xC00002D4, + MD_NTSTATUS_WIN_STATUS_DS_NO_NEST_GLOBALGROUP_IN_MIXEDDOMAIN = 0xC00002D5, + MD_NTSTATUS_WIN_STATUS_DS_NO_NEST_LOCALGROUP_IN_MIXEDDOMAIN = 0xC00002D6, + MD_NTSTATUS_WIN_STATUS_DS_GLOBAL_CANT_HAVE_LOCAL_MEMBER = 0xC00002D7, + MD_NTSTATUS_WIN_STATUS_DS_GLOBAL_CANT_HAVE_UNIVERSAL_MEMBER = 0xC00002D8, + MD_NTSTATUS_WIN_STATUS_DS_UNIVERSAL_CANT_HAVE_LOCAL_MEMBER = 0xC00002D9, + MD_NTSTATUS_WIN_STATUS_DS_GLOBAL_CANT_HAVE_CROSSDOMAIN_MEMBER = 0xC00002DA, + MD_NTSTATUS_WIN_STATUS_DS_LOCAL_CANT_HAVE_CROSSDOMAIN_LOCAL_MEMBER = 0xC00002DB, + MD_NTSTATUS_WIN_STATUS_DS_HAVE_PRIMARY_MEMBERS = 0xC00002DC, + MD_NTSTATUS_WIN_STATUS_WMI_NOT_SUPPORTED = 0xC00002DD, + MD_NTSTATUS_WIN_STATUS_INSUFFICIENT_POWER = 0xC00002DE, + MD_NTSTATUS_WIN_STATUS_SAM_NEED_BOOTKEY_PASSWORD = 0xC00002DF, + MD_NTSTATUS_WIN_STATUS_SAM_NEED_BOOTKEY_FLOPPY = 0xC00002E0, + MD_NTSTATUS_WIN_STATUS_DS_CANT_START = 0xC00002E1, + MD_NTSTATUS_WIN_STATUS_DS_INIT_FAILURE = 0xC00002E2, + MD_NTSTATUS_WIN_STATUS_SAM_INIT_FAILURE = 0xC00002E3, + MD_NTSTATUS_WIN_STATUS_DS_GC_REQUIRED = 0xC00002E4, + MD_NTSTATUS_WIN_STATUS_DS_LOCAL_MEMBER_OF_LOCAL_ONLY = 0xC00002E5, + MD_NTSTATUS_WIN_STATUS_DS_NO_FPO_IN_UNIVERSAL_GROUPS = 0xC00002E6, + MD_NTSTATUS_WIN_STATUS_DS_MACHINE_ACCOUNT_QUOTA_EXCEEDED = 0xC00002E7, + MD_NTSTATUS_WIN_STATUS_MULTIPLE_FAULT_VIOLATION = 0xC00002E8, + MD_NTSTATUS_WIN_STATUS_CURRENT_DOMAIN_NOT_ALLOWED = 0xC00002E9, + MD_NTSTATUS_WIN_STATUS_CANNOT_MAKE = 0xC00002EA, + MD_NTSTATUS_WIN_STATUS_SYSTEM_SHUTDOWN = 0xC00002EB, + MD_NTSTATUS_WIN_STATUS_DS_INIT_FAILURE_CONSOLE = 0xC00002EC, + MD_NTSTATUS_WIN_STATUS_DS_SAM_INIT_FAILURE_CONSOLE = 0xC00002ED, + MD_NTSTATUS_WIN_STATUS_UNFINISHED_CONTEXT_DELETED = 0xC00002EE, + MD_NTSTATUS_WIN_STATUS_NO_TGT_REPLY = 0xC00002EF, + MD_NTSTATUS_WIN_STATUS_OBJECTID_NOT_FOUND = 0xC00002F0, + MD_NTSTATUS_WIN_STATUS_NO_IP_ADDRESSES = 0xC00002F1, + MD_NTSTATUS_WIN_STATUS_WRONG_CREDENTIAL_HANDLE = 0xC00002F2, + MD_NTSTATUS_WIN_STATUS_CRYPTO_SYSTEM_INVALID = 0xC00002F3, + MD_NTSTATUS_WIN_STATUS_MAX_REFERRALS_EXCEEDED = 0xC00002F4, + MD_NTSTATUS_WIN_STATUS_MUST_BE_KDC = 0xC00002F5, + MD_NTSTATUS_WIN_STATUS_STRONG_CRYPTO_NOT_SUPPORTED = 0xC00002F6, + MD_NTSTATUS_WIN_STATUS_TOO_MANY_PRINCIPALS = 0xC00002F7, + MD_NTSTATUS_WIN_STATUS_NO_PA_DATA = 0xC00002F8, + MD_NTSTATUS_WIN_STATUS_PKINIT_NAME_MISMATCH = 0xC00002F9, + MD_NTSTATUS_WIN_STATUS_SMARTCARD_LOGON_REQUIRED = 0xC00002FA, + MD_NTSTATUS_WIN_STATUS_KDC_INVALID_REQUEST = 0xC00002FB, + MD_NTSTATUS_WIN_STATUS_KDC_UNABLE_TO_REFER = 0xC00002FC, + MD_NTSTATUS_WIN_STATUS_KDC_UNKNOWN_ETYPE = 0xC00002FD, + MD_NTSTATUS_WIN_STATUS_SHUTDOWN_IN_PROGRESS = 0xC00002FE, + MD_NTSTATUS_WIN_STATUS_SERVER_SHUTDOWN_IN_PROGRESS = 0xC00002FF, + MD_NTSTATUS_WIN_STATUS_NOT_SUPPORTED_ON_SBS = 0xC0000300, + MD_NTSTATUS_WIN_STATUS_WMI_GUID_DISCONNECTED = 0xC0000301, + MD_NTSTATUS_WIN_STATUS_WMI_ALREADY_DISABLED = 0xC0000302, + MD_NTSTATUS_WIN_STATUS_WMI_ALREADY_ENABLED = 0xC0000303, + MD_NTSTATUS_WIN_STATUS_MFT_TOO_FRAGMENTED = 0xC0000304, + MD_NTSTATUS_WIN_STATUS_COPY_PROTECTION_FAILURE = 0xC0000305, + MD_NTSTATUS_WIN_STATUS_CSS_AUTHENTICATION_FAILURE = 0xC0000306, + MD_NTSTATUS_WIN_STATUS_CSS_KEY_NOT_PRESENT = 0xC0000307, + MD_NTSTATUS_WIN_STATUS_CSS_KEY_NOT_ESTABLISHED = 0xC0000308, + MD_NTSTATUS_WIN_STATUS_CSS_SCRAMBLED_SECTOR = 0xC0000309, + MD_NTSTATUS_WIN_STATUS_CSS_REGION_MISMATCH = 0xC000030A, + MD_NTSTATUS_WIN_STATUS_CSS_RESETS_EXHAUSTED = 0xC000030B, + MD_NTSTATUS_WIN_STATUS_PASSWORD_CHANGE_REQUIRED = 0xC000030C, + MD_NTSTATUS_WIN_STATUS_PKINIT_FAILURE = 0xC0000320, + MD_NTSTATUS_WIN_STATUS_SMARTCARD_SUBSYSTEM_FAILURE = 0xC0000321, + MD_NTSTATUS_WIN_STATUS_NO_KERB_KEY = 0xC0000322, + MD_NTSTATUS_WIN_STATUS_HOST_DOWN = 0xC0000350, + MD_NTSTATUS_WIN_STATUS_UNSUPPORTED_PREAUTH = 0xC0000351, + MD_NTSTATUS_WIN_STATUS_EFS_ALG_BLOB_TOO_BIG = 0xC0000352, + MD_NTSTATUS_WIN_STATUS_PORT_NOT_SET = 0xC0000353, + MD_NTSTATUS_WIN_STATUS_DEBUGGER_INACTIVE = 0xC0000354, + MD_NTSTATUS_WIN_STATUS_DS_VERSION_CHECK_FAILURE = 0xC0000355, + MD_NTSTATUS_WIN_STATUS_AUDITING_DISABLED = 0xC0000356, + MD_NTSTATUS_WIN_STATUS_PRENT4_MACHINE_ACCOUNT = 0xC0000357, + MD_NTSTATUS_WIN_STATUS_DS_AG_CANT_HAVE_UNIVERSAL_MEMBER = 0xC0000358, + MD_NTSTATUS_WIN_STATUS_INVALID_IMAGE_WIN_32 = 0xC0000359, + MD_NTSTATUS_WIN_STATUS_INVALID_IMAGE_WIN_64 = 0xC000035A, + MD_NTSTATUS_WIN_STATUS_BAD_BINDINGS = 0xC000035B, + MD_NTSTATUS_WIN_STATUS_NETWORK_SESSION_EXPIRED = 0xC000035C, + MD_NTSTATUS_WIN_STATUS_APPHELP_BLOCK = 0xC000035D, + MD_NTSTATUS_WIN_STATUS_ALL_SIDS_FILTERED = 0xC000035E, + MD_NTSTATUS_WIN_STATUS_NOT_SAFE_MODE_DRIVER = 0xC000035F, + MD_NTSTATUS_WIN_STATUS_ACCESS_DISABLED_BY_POLICY_DEFAULT = 0xC0000361, + MD_NTSTATUS_WIN_STATUS_ACCESS_DISABLED_BY_POLICY_PATH = 0xC0000362, + MD_NTSTATUS_WIN_STATUS_ACCESS_DISABLED_BY_POLICY_PUBLISHER = 0xC0000363, + MD_NTSTATUS_WIN_STATUS_ACCESS_DISABLED_BY_POLICY_OTHER = 0xC0000364, + MD_NTSTATUS_WIN_STATUS_FAILED_DRIVER_ENTRY = 0xC0000365, + MD_NTSTATUS_WIN_STATUS_DEVICE_ENUMERATION_ERROR = 0xC0000366, + MD_NTSTATUS_WIN_STATUS_MOUNT_POINT_NOT_RESOLVED = 0xC0000368, + MD_NTSTATUS_WIN_STATUS_INVALID_DEVICE_OBJECT_PARAMETER = 0xC0000369, + MD_NTSTATUS_WIN_STATUS_MCA_OCCURED = 0xC000036A, + MD_NTSTATUS_WIN_STATUS_DRIVER_BLOCKED_CRITICAL = 0xC000036B, + MD_NTSTATUS_WIN_STATUS_DRIVER_BLOCKED = 0xC000036C, + MD_NTSTATUS_WIN_STATUS_DRIVER_DATABASE_ERROR = 0xC000036D, + MD_NTSTATUS_WIN_STATUS_SYSTEM_HIVE_TOO_LARGE = 0xC000036E, + MD_NTSTATUS_WIN_STATUS_INVALID_IMPORT_OF_NON_DLL = 0xC000036F, + MD_NTSTATUS_WIN_STATUS_NO_SECRETS = 0xC0000371, + MD_NTSTATUS_WIN_STATUS_ACCESS_DISABLED_NO_SAFER_UI_BY_POLICY = 0xC0000372, + MD_NTSTATUS_WIN_STATUS_FAILED_STACK_SWITCH = 0xC0000373, + MD_NTSTATUS_WIN_STATUS_HEAP_CORRUPTION = 0xC0000374, + MD_NTSTATUS_WIN_STATUS_SMARTCARD_WRONG_PIN = 0xC0000380, + MD_NTSTATUS_WIN_STATUS_SMARTCARD_CARD_BLOCKED = 0xC0000381, + MD_NTSTATUS_WIN_STATUS_SMARTCARD_CARD_NOT_AUTHENTICATED = 0xC0000382, + MD_NTSTATUS_WIN_STATUS_SMARTCARD_NO_CARD = 0xC0000383, + MD_NTSTATUS_WIN_STATUS_SMARTCARD_NO_KEY_CONTAINER = 0xC0000384, + MD_NTSTATUS_WIN_STATUS_SMARTCARD_NO_CERTIFICATE = 0xC0000385, + MD_NTSTATUS_WIN_STATUS_SMARTCARD_NO_KEYSET = 0xC0000386, + MD_NTSTATUS_WIN_STATUS_SMARTCARD_IO_ERROR = 0xC0000387, + MD_NTSTATUS_WIN_STATUS_DOWNGRADE_DETECTED = 0xC0000388, + MD_NTSTATUS_WIN_STATUS_SMARTCARD_CERT_REVOKED = 0xC0000389, + MD_NTSTATUS_WIN_STATUS_ISSUING_CA_UNTRUSTED = 0xC000038A, + MD_NTSTATUS_WIN_STATUS_REVOCATION_OFFLINE_C = 0xC000038B, + MD_NTSTATUS_WIN_STATUS_PKINIT_CLIENT_FAILURE = 0xC000038C, + MD_NTSTATUS_WIN_STATUS_SMARTCARD_CERT_EXPIRED = 0xC000038D, + MD_NTSTATUS_WIN_STATUS_DRIVER_FAILED_PRIOR_UNLOAD = 0xC000038E, + MD_NTSTATUS_WIN_STATUS_SMARTCARD_SILENT_CONTEXT = 0xC000038F, + MD_NTSTATUS_WIN_STATUS_PER_USER_TRUST_QUOTA_EXCEEDED = 0xC0000401, + MD_NTSTATUS_WIN_STATUS_ALL_USER_TRUST_QUOTA_EXCEEDED = 0xC0000402, + MD_NTSTATUS_WIN_STATUS_USER_DELETE_TRUST_QUOTA_EXCEEDED = 0xC0000403, + MD_NTSTATUS_WIN_STATUS_DS_NAME_NOT_UNIQUE = 0xC0000404, + MD_NTSTATUS_WIN_STATUS_DS_DUPLICATE_ID_FOUND = 0xC0000405, + MD_NTSTATUS_WIN_STATUS_DS_GROUP_CONVERSION_ERROR = 0xC0000406, + MD_NTSTATUS_WIN_STATUS_VOLSNAP_PREPARE_HIBERNATE = 0xC0000407, + MD_NTSTATUS_WIN_STATUS_USER2USER_REQUIRED = 0xC0000408, + MD_NTSTATUS_WIN_STATUS_STACK_BUFFER_OVERRUN = 0xC0000409, + MD_NTSTATUS_WIN_STATUS_NO_S4U_PROT_SUPPORT = 0xC000040A, + MD_NTSTATUS_WIN_STATUS_CROSSREALM_DELEGATION_FAILURE = 0xC000040B, + MD_NTSTATUS_WIN_STATUS_REVOCATION_OFFLINE_KDC = 0xC000040C, + MD_NTSTATUS_WIN_STATUS_ISSUING_CA_UNTRUSTED_KDC = 0xC000040D, + MD_NTSTATUS_WIN_STATUS_KDC_CERT_EXPIRED = 0xC000040E, + MD_NTSTATUS_WIN_STATUS_KDC_CERT_REVOKED = 0xC000040F, + MD_NTSTATUS_WIN_STATUS_PARAMETER_QUOTA_EXCEEDED = 0xC0000410, + MD_NTSTATUS_WIN_STATUS_HIBERNATION_FAILURE = 0xC0000411, + MD_NTSTATUS_WIN_STATUS_DELAY_LOAD_FAILED = 0xC0000412, + MD_NTSTATUS_WIN_STATUS_AUTHENTICATION_FIREWALL_FAILED = 0xC0000413, + MD_NTSTATUS_WIN_STATUS_VDM_DISALLOWED = 0xC0000414, + MD_NTSTATUS_WIN_STATUS_HUNG_DISPLAY_DRIVER_THREAD = 0xC0000415, + MD_NTSTATUS_WIN_STATUS_INSUFFICIENT_RESOURCE_FOR_SPECIFIED_SHARED_SECTION_SIZE = 0xC0000416, + MD_NTSTATUS_WIN_STATUS_INVALID_CRUNTIME_PARAMETER = 0xC0000417, + MD_NTSTATUS_WIN_STATUS_NTLM_BLOCKED = 0xC0000418, + MD_NTSTATUS_WIN_STATUS_DS_SRC_SID_EXISTS_IN_FOREST = 0xC0000419, + MD_NTSTATUS_WIN_STATUS_DS_DOMAIN_NAME_EXISTS_IN_FOREST = 0xC000041A, + MD_NTSTATUS_WIN_STATUS_DS_FLAT_NAME_EXISTS_IN_FOREST = 0xC000041B, + MD_NTSTATUS_WIN_STATUS_INVALID_USER_PRINCIPAL_NAME = 0xC000041C, + MD_NTSTATUS_WIN_STATUS_FATAL_USER_CALLBACK_EXCEPTION = 0xC000041D, + MD_NTSTATUS_WIN_STATUS_ASSERTION_FAILURE = 0xC0000420, + MD_NTSTATUS_WIN_STATUS_VERIFIER_STOP = 0xC0000421, + MD_NTSTATUS_WIN_STATUS_CALLBACK_POP_STACK = 0xC0000423, + MD_NTSTATUS_WIN_STATUS_INCOMPATIBLE_DRIVER_BLOCKED = 0xC0000424, + MD_NTSTATUS_WIN_STATUS_HIVE_UNLOADED = 0xC0000425, + MD_NTSTATUS_WIN_STATUS_COMPRESSION_DISABLED = 0xC0000426, + MD_NTSTATUS_WIN_STATUS_FILE_SYSTEM_LIMITATION = 0xC0000427, + MD_NTSTATUS_WIN_STATUS_INVALID_IMAGE_HASH = 0xC0000428, + MD_NTSTATUS_WIN_STATUS_NOT_CAPABLE = 0xC0000429, + MD_NTSTATUS_WIN_STATUS_REQUEST_OUT_OF_SEQUENCE = 0xC000042A, + MD_NTSTATUS_WIN_STATUS_IMPLEMENTATION_LIMIT = 0xC000042B, + MD_NTSTATUS_WIN_STATUS_ELEVATION_REQUIRED = 0xC000042C, + MD_NTSTATUS_WIN_STATUS_NO_SECURITY_CONTEXT = 0xC000042D, + MD_NTSTATUS_WIN_STATUS_PKU2U_CERT_FAILURE = 0xC000042F, + MD_NTSTATUS_WIN_STATUS_BEYOND_VDL = 0xC0000432, + MD_NTSTATUS_WIN_STATUS_ENCOUNTERED_WRITE_IN_PROGRESS = 0xC0000433, + MD_NTSTATUS_WIN_STATUS_PTE_CHANGED = 0xC0000434, + MD_NTSTATUS_WIN_STATUS_PURGE_FAILED = 0xC0000435, + MD_NTSTATUS_WIN_STATUS_CRED_REQUIRES_CONFIRMATION = 0xC0000440, + MD_NTSTATUS_WIN_STATUS_CS_ENCRYPTION_INVALID_SERVER_RESPONSE = 0xC0000441, + MD_NTSTATUS_WIN_STATUS_CS_ENCRYPTION_UNSUPPORTED_SERVER = 0xC0000442, + MD_NTSTATUS_WIN_STATUS_CS_ENCRYPTION_EXISTING_ENCRYPTED_FILE = 0xC0000443, + MD_NTSTATUS_WIN_STATUS_CS_ENCRYPTION_NEW_ENCRYPTED_FILE = 0xC0000444, + MD_NTSTATUS_WIN_STATUS_CS_ENCRYPTION_FILE_NOT_CSE = 0xC0000445, + MD_NTSTATUS_WIN_STATUS_INVALID_LABEL = 0xC0000446, + MD_NTSTATUS_WIN_STATUS_DRIVER_PROCESS_TERMINATED = 0xC0000450, + MD_NTSTATUS_WIN_STATUS_AMBIGUOUS_SYSTEM_DEVICE = 0xC0000451, + MD_NTSTATUS_WIN_STATUS_SYSTEM_DEVICE_NOT_FOUND = 0xC0000452, + MD_NTSTATUS_WIN_STATUS_RESTART_BOOT_APPLICATION = 0xC0000453, + MD_NTSTATUS_WIN_STATUS_INSUFFICIENT_NVRAM_RESOURCES = 0xC0000454, + MD_NTSTATUS_WIN_STATUS_INVALID_SESSION = 0xC0000455, + MD_NTSTATUS_WIN_STATUS_THREAD_ALREADY_IN_SESSION = 0xC0000456, + MD_NTSTATUS_WIN_STATUS_THREAD_NOT_IN_SESSION = 0xC0000457, + MD_NTSTATUS_WIN_STATUS_INVALID_WEIGHT = 0xC0000458, + MD_NTSTATUS_WIN_STATUS_REQUEST_PAUSED = 0xC0000459, + MD_NTSTATUS_WIN_STATUS_NO_RANGES_PROCESSED = 0xC0000460, + MD_NTSTATUS_WIN_STATUS_DISK_RESOURCES_EXHAUSTED = 0xC0000461, + MD_NTSTATUS_WIN_STATUS_NEEDS_REMEDIATION = 0xC0000462, + MD_NTSTATUS_WIN_STATUS_DEVICE_FEATURE_NOT_SUPPORTED = 0xC0000463, + MD_NTSTATUS_WIN_STATUS_DEVICE_UNREACHABLE = 0xC0000464, + MD_NTSTATUS_WIN_STATUS_INVALID_TOKEN = 0xC0000465, + MD_NTSTATUS_WIN_STATUS_SERVER_UNAVAILABLE = 0xC0000466, + MD_NTSTATUS_WIN_STATUS_FILE_NOT_AVAILABLE = 0xC0000467, + MD_NTSTATUS_WIN_STATUS_DEVICE_INSUFFICIENT_RESOURCES = 0xC0000468, + MD_NTSTATUS_WIN_STATUS_PACKAGE_UPDATING = 0xC0000469, + MD_NTSTATUS_WIN_STATUS_NOT_READ_FROM_COPY = 0xC000046A, + MD_NTSTATUS_WIN_STATUS_FT_WRITE_FAILURE = 0xC000046B, + MD_NTSTATUS_WIN_STATUS_FT_DI_SCAN_REQUIRED = 0xC000046C, + MD_NTSTATUS_WIN_STATUS_OBJECT_NOT_EXTERNALLY_BACKED = 0xC000046D, + MD_NTSTATUS_WIN_STATUS_EXTERNAL_BACKING_PROVIDER_UNKNOWN = 0xC000046E, + MD_NTSTATUS_WIN_STATUS_DATA_CHECKSUM_ERROR = 0xC0000470, + MD_NTSTATUS_WIN_STATUS_INTERMIXED_KERNEL_EA_OPERATION = 0xC0000471, + MD_NTSTATUS_WIN_STATUS_TRIM_READ_ZERO_NOT_SUPPORTED = 0xC0000472, + MD_NTSTATUS_WIN_STATUS_TOO_MANY_SEGMENT_DESCRIPTORS = 0xC0000473, + MD_NTSTATUS_WIN_STATUS_INVALID_OFFSET_ALIGNMENT = 0xC0000474, + MD_NTSTATUS_WIN_STATUS_INVALID_FIELD_IN_PARAMETER_LIST = 0xC0000475, + MD_NTSTATUS_WIN_STATUS_OPERATION_IN_PROGRESS = 0xC0000476, + MD_NTSTATUS_WIN_STATUS_INVALID_INITIATOR_TARGET_PATH = 0xC0000477, + MD_NTSTATUS_WIN_STATUS_SCRUB_DATA_DISABLED = 0xC0000478, + MD_NTSTATUS_WIN_STATUS_NOT_REDUNDANT_STORAGE = 0xC0000479, + MD_NTSTATUS_WIN_STATUS_RESIDENT_FILE_NOT_SUPPORTED = 0xC000047A, + MD_NTSTATUS_WIN_STATUS_COMPRESSED_FILE_NOT_SUPPORTED = 0xC000047B, + MD_NTSTATUS_WIN_STATUS_DIRECTORY_NOT_SUPPORTED = 0xC000047C, + MD_NTSTATUS_WIN_STATUS_IO_OPERATION_TIMEOUT = 0xC000047D, + MD_NTSTATUS_WIN_STATUS_SYSTEM_NEEDS_REMEDIATION = 0xC000047E, + MD_NTSTATUS_WIN_STATUS_APPX_INTEGRITY_FAILURE_CLR_NGEN = 0xC000047F, + MD_NTSTATUS_WIN_STATUS_SHARE_UNAVAILABLE = 0xC0000480, + MD_NTSTATUS_WIN_STATUS_APISET_NOT_HOSTED = 0xC0000481, + MD_NTSTATUS_WIN_STATUS_APISET_NOT_PRESENT = 0xC0000482, + MD_NTSTATUS_WIN_STATUS_DEVICE_HARDWARE_ERROR = 0xC0000483, + MD_NTSTATUS_WIN_STATUS_INVALID_TASK_NAME = 0xC0000500, + MD_NTSTATUS_WIN_STATUS_INVALID_TASK_INDEX = 0xC0000501, + MD_NTSTATUS_WIN_STATUS_THREAD_ALREADY_IN_TASK = 0xC0000502, + MD_NTSTATUS_WIN_STATUS_CALLBACK_BYPASS = 0xC0000503, + MD_NTSTATUS_WIN_STATUS_UNDEFINED_SCOPE = 0xC0000504, + MD_NTSTATUS_WIN_STATUS_INVALID_CAP = 0xC0000505, + MD_NTSTATUS_WIN_STATUS_NOT_GUI_PROCESS = 0xC0000506, + MD_NTSTATUS_WIN_STATUS_FAIL_FAST_EXCEPTION = 0xC0000602, + MD_NTSTATUS_WIN_STATUS_IMAGE_CERT_REVOKED = 0xC0000603, + MD_NTSTATUS_WIN_STATUS_DYNAMIC_CODE_BLOCKED = 0xC0000604, + MD_NTSTATUS_WIN_STATUS_PORT_CLOSED = 0xC0000700, + MD_NTSTATUS_WIN_STATUS_MESSAGE_LOST = 0xC0000701, + MD_NTSTATUS_WIN_STATUS_INVALID_MESSAGE = 0xC0000702, + MD_NTSTATUS_WIN_STATUS_REQUEST_CANCELED = 0xC0000703, + MD_NTSTATUS_WIN_STATUS_RECURSIVE_DISPATCH = 0xC0000704, + MD_NTSTATUS_WIN_STATUS_LPC_RECEIVE_BUFFER_EXPECTED = 0xC0000705, + MD_NTSTATUS_WIN_STATUS_LPC_INVALID_CONNECTION_USAGE = 0xC0000706, + MD_NTSTATUS_WIN_STATUS_LPC_REQUESTS_NOT_ALLOWED = 0xC0000707, + MD_NTSTATUS_WIN_STATUS_RESOURCE_IN_USE = 0xC0000708, + MD_NTSTATUS_WIN_STATUS_HARDWARE_MEMORY_ERROR = 0xC0000709, + MD_NTSTATUS_WIN_STATUS_THREADPOOL_HANDLE_EXCEPTION = 0xC000070A, + MD_NTSTATUS_WIN_STATUS_THREADPOOL_SET_EVENT_ON_COMPLETION_FAILED = 0xC000070B, + MD_NTSTATUS_WIN_STATUS_THREADPOOL_RELEASE_SEMAPHORE_ON_COMPLETION_FAILED = 0xC000070C, + MD_NTSTATUS_WIN_STATUS_THREADPOOL_RELEASE_MUTEX_ON_COMPLETION_FAILED = 0xC000070D, + MD_NTSTATUS_WIN_STATUS_THREADPOOL_FREE_LIBRARY_ON_COMPLETION_FAILED = 0xC000070E, + MD_NTSTATUS_WIN_STATUS_THREADPOOL_RELEASED_DURING_OPERATION = 0xC000070F, + MD_NTSTATUS_WIN_STATUS_CALLBACK_RETURNED_WHILE_IMPERSONATING = 0xC0000710, + MD_NTSTATUS_WIN_STATUS_APC_RETURNED_WHILE_IMPERSONATING = 0xC0000711, + MD_NTSTATUS_WIN_STATUS_PROCESS_IS_PROTECTED = 0xC0000712, + MD_NTSTATUS_WIN_STATUS_MCA_EXCEPTION = 0xC0000713, + MD_NTSTATUS_WIN_STATUS_CERTIFICATE_MAPPING_NOT_UNIQUE = 0xC0000714, + MD_NTSTATUS_WIN_STATUS_SYMLINK_CLASS_DISABLED = 0xC0000715, + MD_NTSTATUS_WIN_STATUS_INVALID_IDN_NORMALIZATION = 0xC0000716, + MD_NTSTATUS_WIN_STATUS_NO_UNICODE_TRANSLATION = 0xC0000717, + MD_NTSTATUS_WIN_STATUS_ALREADY_REGISTERED = 0xC0000718, + MD_NTSTATUS_WIN_STATUS_CONTEXT_MISMATCH = 0xC0000719, + MD_NTSTATUS_WIN_STATUS_PORT_ALREADY_HAS_COMPLETION_LIST = 0xC000071A, + MD_NTSTATUS_WIN_STATUS_CALLBACK_RETURNED_THREAD_PRIORITY = 0xC000071B, + MD_NTSTATUS_WIN_STATUS_INVALID_THREAD = 0xC000071C, + MD_NTSTATUS_WIN_STATUS_CALLBACK_RETURNED_TRANSACTION = 0xC000071D, + MD_NTSTATUS_WIN_STATUS_CALLBACK_RETURNED_LDR_LOCK = 0xC000071E, + MD_NTSTATUS_WIN_STATUS_CALLBACK_RETURNED_LANG = 0xC000071F, + MD_NTSTATUS_WIN_STATUS_CALLBACK_RETURNED_PRI_BACK = 0xC0000720, + MD_NTSTATUS_WIN_STATUS_CALLBACK_RETURNED_THREAD_AFFINITY = 0xC0000721, + MD_NTSTATUS_WIN_STATUS_DISK_REPAIR_DISABLED = 0xC0000800, + MD_NTSTATUS_WIN_STATUS_DS_DOMAIN_RENAME_IN_PROGRESS = 0xC0000801, + MD_NTSTATUS_WIN_STATUS_DISK_QUOTA_EXCEEDED = 0xC0000802, + MD_NTSTATUS_WIN_STATUS_CONTENT_BLOCKED = 0xC0000804, + MD_NTSTATUS_WIN_STATUS_BAD_CLUSTERS = 0xC0000805, + MD_NTSTATUS_WIN_STATUS_VOLUME_DIRTY = 0xC0000806, + MD_NTSTATUS_WIN_STATUS_DISK_REPAIR_UNSUCCESSFUL = 0xC0000808, + MD_NTSTATUS_WIN_STATUS_CORRUPT_LOG_OVERFULL = 0xC0000809, + MD_NTSTATUS_WIN_STATUS_CORRUPT_LOG_CORRUPTED = 0xC000080A, + MD_NTSTATUS_WIN_STATUS_CORRUPT_LOG_UNAVAILABLE = 0xC000080B, + MD_NTSTATUS_WIN_STATUS_CORRUPT_LOG_DELETED_FULL = 0xC000080C, + MD_NTSTATUS_WIN_STATUS_CORRUPT_LOG_CLEARED = 0xC000080D, + MD_NTSTATUS_WIN_STATUS_ORPHAN_NAME_EXHAUSTED = 0xC000080E, + MD_NTSTATUS_WIN_STATUS_PROACTIVE_SCAN_IN_PROGRESS = 0xC000080F, + MD_NTSTATUS_WIN_STATUS_ENCRYPTED_IO_NOT_POSSIBLE = 0xC0000810, + MD_NTSTATUS_WIN_STATUS_CORRUPT_LOG_UPLEVEL_RECORDS = 0xC0000811, + MD_NTSTATUS_WIN_STATUS_FILE_CHECKED_OUT = 0xC0000901, + MD_NTSTATUS_WIN_STATUS_CHECKOUT_REQUIRED = 0xC0000902, + MD_NTSTATUS_WIN_STATUS_BAD_FILE_TYPE = 0xC0000903, + MD_NTSTATUS_WIN_STATUS_FILE_TOO_LARGE = 0xC0000904, + MD_NTSTATUS_WIN_STATUS_FORMS_AUTH_REQUIRED = 0xC0000905, + MD_NTSTATUS_WIN_STATUS_VIRUS_INFECTED = 0xC0000906, + MD_NTSTATUS_WIN_STATUS_VIRUS_DELETED = 0xC0000907, + MD_NTSTATUS_WIN_STATUS_BAD_MCFG_TABLE = 0xC0000908, + MD_NTSTATUS_WIN_STATUS_CANNOT_BREAK_OPLOCK = 0xC0000909, + MD_NTSTATUS_WIN_STATUS_BAD_KEY = 0xC000090A, + MD_NTSTATUS_WIN_STATUS_BAD_DATA = 0xC000090B, + MD_NTSTATUS_WIN_STATUS_NO_KEY = 0xC000090C, + MD_NTSTATUS_WIN_STATUS_FILE_HANDLE_REVOKED = 0xC0000910, + MD_NTSTATUS_WIN_STATUS_WOW_ASSERTION = 0xC0009898, + MD_NTSTATUS_WIN_STATUS_INVALID_SIGNATURE = 0xC000A000, + MD_NTSTATUS_WIN_STATUS_HMAC_NOT_SUPPORTED = 0xC000A001, + MD_NTSTATUS_WIN_STATUS_AUTH_TAG_MISMATCH = 0xC000A002, + MD_NTSTATUS_WIN_STATUS_INVALID_STATE_TRANSITION = 0xC000A003, + MD_NTSTATUS_WIN_STATUS_INVALID_KERNEL_INFO_VERSION = 0xC000A004, + MD_NTSTATUS_WIN_STATUS_INVALID_PEP_INFO_VERSION = 0xC000A005, + MD_NTSTATUS_WIN_STATUS_IPSEC_QUEUE_OVERFLOW = 0xC000A010, + MD_NTSTATUS_WIN_STATUS_ND_QUEUE_OVERFLOW = 0xC000A011, + MD_NTSTATUS_WIN_STATUS_HOPLIMIT_EXCEEDED = 0xC000A012, + MD_NTSTATUS_WIN_STATUS_PROTOCOL_NOT_SUPPORTED = 0xC000A013, + MD_NTSTATUS_WIN_STATUS_FASTPATH_REJECTED = 0xC000A014, + MD_NTSTATUS_WIN_STATUS_LOST_WRITEBEHIND_DATA_NETWORK_DISCONNECTED = 0xC000A080, + MD_NTSTATUS_WIN_STATUS_LOST_WRITEBEHIND_DATA_NETWORK_SERVER_ERROR = 0xC000A081, + MD_NTSTATUS_WIN_STATUS_LOST_WRITEBEHIND_DATA_LOCAL_DISK_ERROR = 0xC000A082, + MD_NTSTATUS_WIN_STATUS_XML_PARSE_ERROR = 0xC000A083, + MD_NTSTATUS_WIN_STATUS_XMLDSIG_ERROR = 0xC000A084, + MD_NTSTATUS_WIN_STATUS_WRONG_COMPARTMENT = 0xC000A085, + MD_NTSTATUS_WIN_STATUS_AUTHIP_FAILURE = 0xC000A086, + MD_NTSTATUS_WIN_STATUS_DS_OID_MAPPED_GROUP_CANT_HAVE_MEMBERS = 0xC000A087, + MD_NTSTATUS_WIN_STATUS_DS_OID_NOT_FOUND = 0xC000A088, + MD_NTSTATUS_WIN_STATUS_INCORRECT_ACCOUNT_TYPE = 0xC000A089, + MD_NTSTATUS_WIN_STATUS_HASH_NOT_SUPPORTED = 0xC000A100, + MD_NTSTATUS_WIN_STATUS_HASH_NOT_PRESENT = 0xC000A101, + MD_NTSTATUS_WIN_STATUS_SECONDARY_IC_PROVIDER_NOT_REGISTERED = 0xC000A121, + MD_NTSTATUS_WIN_STATUS_GPIO_CLIENT_INFORMATION_INVALID = 0xC000A122, + MD_NTSTATUS_WIN_STATUS_GPIO_VERSION_NOT_SUPPORTED = 0xC000A123, + MD_NTSTATUS_WIN_STATUS_GPIO_INVALID_REGISTRATION_PACKET = 0xC000A124, + MD_NTSTATUS_WIN_STATUS_GPIO_OPERATION_DENIED = 0xC000A125, + MD_NTSTATUS_WIN_STATUS_GPIO_INCOMPATIBLE_CONNECT_MODE = 0xC000A126, + MD_NTSTATUS_WIN_STATUS_CANNOT_SWITCH_RUNLEVEL = 0xC000A141, + MD_NTSTATUS_WIN_STATUS_INVALID_RUNLEVEL_SETTING = 0xC000A142, + MD_NTSTATUS_WIN_STATUS_RUNLEVEL_SWITCH_TIMEOUT = 0xC000A143, + MD_NTSTATUS_WIN_STATUS_RUNLEVEL_SWITCH_AGENT_TIMEOUT = 0xC000A145, + MD_NTSTATUS_WIN_STATUS_RUNLEVEL_SWITCH_IN_PROGRESS = 0xC000A146, + MD_NTSTATUS_WIN_STATUS_NOT_APPCONTAINER = 0xC000A200, + MD_NTSTATUS_WIN_STATUS_NOT_SUPPORTED_IN_APPCONTAINER = 0xC000A201, + MD_NTSTATUS_WIN_STATUS_INVALID_PACKAGE_SID_LENGTH = 0xC000A202, + MD_NTSTATUS_WIN_STATUS_APP_DATA_NOT_FOUND = 0xC000A281, + MD_NTSTATUS_WIN_STATUS_APP_DATA_EXPIRED = 0xC000A282, + MD_NTSTATUS_WIN_STATUS_APP_DATA_CORRUPT = 0xC000A283, + MD_NTSTATUS_WIN_STATUS_APP_DATA_LIMIT_EXCEEDED = 0xC000A284, + MD_NTSTATUS_WIN_STATUS_APP_DATA_REBOOT_REQUIRED = 0xC000A285, + MD_NTSTATUS_WIN_STATUS_OFFLOAD_READ_FLT_NOT_SUPPORTED = 0xC000A2A1, + MD_NTSTATUS_WIN_STATUS_OFFLOAD_WRITE_FLT_NOT_SUPPORTED = 0xC000A2A2, + MD_NTSTATUS_WIN_STATUS_OFFLOAD_READ_FILE_NOT_SUPPORTED = 0xC000A2A3, + MD_NTSTATUS_WIN_STATUS_OFFLOAD_WRITE_FILE_NOT_SUPPORTED = 0xC000A2A4, + MD_NTSTATUS_WIN_DBG_NO_STATE_CHANGE = 0xC0010001, + MD_NTSTATUS_WIN_DBG_APP_NOT_IDLE = 0xC0010002, + MD_NTSTATUS_WIN_RPC_NT_INVALID_STRING_BINDING = 0xC0020001, + MD_NTSTATUS_WIN_RPC_NT_WRONG_KIND_OF_BINDING = 0xC0020002, + MD_NTSTATUS_WIN_RPC_NT_INVALID_BINDING = 0xC0020003, + MD_NTSTATUS_WIN_RPC_NT_PROTSEQ_NOT_SUPPORTED = 0xC0020004, + MD_NTSTATUS_WIN_RPC_NT_INVALID_RPC_PROTSEQ = 0xC0020005, + MD_NTSTATUS_WIN_RPC_NT_INVALID_STRING_UUID = 0xC0020006, + MD_NTSTATUS_WIN_RPC_NT_INVALID_ENDPOINT_FORMAT = 0xC0020007, + MD_NTSTATUS_WIN_RPC_NT_INVALID_NET_ADDR = 0xC0020008, + MD_NTSTATUS_WIN_RPC_NT_NO_ENDPOINT_FOUND = 0xC0020009, + MD_NTSTATUS_WIN_RPC_NT_INVALID_TIMEOUT = 0xC002000A, + MD_NTSTATUS_WIN_RPC_NT_OBJECT_NOT_FOUND = 0xC002000B, + MD_NTSTATUS_WIN_RPC_NT_ALREADY_REGISTERED = 0xC002000C, + MD_NTSTATUS_WIN_RPC_NT_TYPE_ALREADY_REGISTERED = 0xC002000D, + MD_NTSTATUS_WIN_RPC_NT_ALREADY_LISTENING = 0xC002000E, + MD_NTSTATUS_WIN_RPC_NT_NO_PROTSEQS_REGISTERED = 0xC002000F, + MD_NTSTATUS_WIN_RPC_NT_NOT_LISTENING = 0xC0020010, + MD_NTSTATUS_WIN_RPC_NT_UNKNOWN_MGR_TYPE = 0xC0020011, + MD_NTSTATUS_WIN_RPC_NT_UNKNOWN_IF = 0xC0020012, + MD_NTSTATUS_WIN_RPC_NT_NO_BINDINGS = 0xC0020013, + MD_NTSTATUS_WIN_RPC_NT_NO_PROTSEQS = 0xC0020014, + MD_NTSTATUS_WIN_RPC_NT_CANT_CREATE_ENDPOINT = 0xC0020015, + MD_NTSTATUS_WIN_RPC_NT_OUT_OF_RESOURCES = 0xC0020016, + MD_NTSTATUS_WIN_RPC_NT_SERVER_UNAVAILABLE = 0xC0020017, + MD_NTSTATUS_WIN_RPC_NT_SERVER_TOO_BUSY = 0xC0020018, + MD_NTSTATUS_WIN_RPC_NT_INVALID_NETWORK_OPTIONS = 0xC0020019, + MD_NTSTATUS_WIN_RPC_NT_NO_CALL_ACTIVE = 0xC002001A, + MD_NTSTATUS_WIN_RPC_NT_CALL_FAILED = 0xC002001B, + MD_NTSTATUS_WIN_RPC_NT_CALL_FAILED_DNE = 0xC002001C, + MD_NTSTATUS_WIN_RPC_NT_PROTOCOL_ERROR = 0xC002001D, + MD_NTSTATUS_WIN_RPC_NT_UNSUPPORTED_TRANS_SYN = 0xC002001F, + MD_NTSTATUS_WIN_RPC_NT_UNSUPPORTED_TYPE = 0xC0020021, + MD_NTSTATUS_WIN_RPC_NT_INVALID_TAG = 0xC0020022, + MD_NTSTATUS_WIN_RPC_NT_INVALID_BOUND = 0xC0020023, + MD_NTSTATUS_WIN_RPC_NT_NO_ENTRY_NAME = 0xC0020024, + MD_NTSTATUS_WIN_RPC_NT_INVALID_NAME_SYNTAX = 0xC0020025, + MD_NTSTATUS_WIN_RPC_NT_UNSUPPORTED_NAME_SYNTAX = 0xC0020026, + MD_NTSTATUS_WIN_RPC_NT_UUID_NO_ADDRESS = 0xC0020028, + MD_NTSTATUS_WIN_RPC_NT_DUPLICATE_ENDPOINT = 0xC0020029, + MD_NTSTATUS_WIN_RPC_NT_UNKNOWN_AUTHN_TYPE = 0xC002002A, + MD_NTSTATUS_WIN_RPC_NT_MAX_CALLS_TOO_SMALL = 0xC002002B, + MD_NTSTATUS_WIN_RPC_NT_STRING_TOO_LONG = 0xC002002C, + MD_NTSTATUS_WIN_RPC_NT_PROTSEQ_NOT_FOUND = 0xC002002D, + MD_NTSTATUS_WIN_RPC_NT_PROCNUM_OUT_OF_RANGE = 0xC002002E, + MD_NTSTATUS_WIN_RPC_NT_BINDING_HAS_NO_AUTH = 0xC002002F, + MD_NTSTATUS_WIN_RPC_NT_UNKNOWN_AUTHN_SERVICE = 0xC0020030, + MD_NTSTATUS_WIN_RPC_NT_UNKNOWN_AUTHN_LEVEL = 0xC0020031, + MD_NTSTATUS_WIN_RPC_NT_INVALID_AUTH_IDENTITY = 0xC0020032, + MD_NTSTATUS_WIN_RPC_NT_UNKNOWN_AUTHZ_SERVICE = 0xC0020033, + MD_NTSTATUS_WIN_EPT_NT_INVALID_ENTRY = 0xC0020034, + MD_NTSTATUS_WIN_EPT_NT_CANT_PERFORM_OP = 0xC0020035, + MD_NTSTATUS_WIN_EPT_NT_NOT_REGISTERED = 0xC0020036, + MD_NTSTATUS_WIN_RPC_NT_NOTHING_TO_EXPORT = 0xC0020037, + MD_NTSTATUS_WIN_RPC_NT_INCOMPLETE_NAME = 0xC0020038, + MD_NTSTATUS_WIN_RPC_NT_INVALID_VERS_OPTION = 0xC0020039, + MD_NTSTATUS_WIN_RPC_NT_NO_MORE_MEMBERS = 0xC002003A, + MD_NTSTATUS_WIN_RPC_NT_NOT_ALL_OBJS_UNEXPORTED = 0xC002003B, + MD_NTSTATUS_WIN_RPC_NT_INTERFACE_NOT_FOUND = 0xC002003C, + MD_NTSTATUS_WIN_RPC_NT_ENTRY_ALREADY_EXISTS = 0xC002003D, + MD_NTSTATUS_WIN_RPC_NT_ENTRY_NOT_FOUND = 0xC002003E, + MD_NTSTATUS_WIN_RPC_NT_NAME_SERVICE_UNAVAILABLE = 0xC002003F, + MD_NTSTATUS_WIN_RPC_NT_INVALID_NAF_ID = 0xC0020040, + MD_NTSTATUS_WIN_RPC_NT_CANNOT_SUPPORT = 0xC0020041, + MD_NTSTATUS_WIN_RPC_NT_NO_CONTEXT_AVAILABLE = 0xC0020042, + MD_NTSTATUS_WIN_RPC_NT_INTERNAL_ERROR = 0xC0020043, + MD_NTSTATUS_WIN_RPC_NT_ZERO_DIVIDE = 0xC0020044, + MD_NTSTATUS_WIN_RPC_NT_ADDRESS_ERROR = 0xC0020045, + MD_NTSTATUS_WIN_RPC_NT_FP_DIV_ZERO = 0xC0020046, + MD_NTSTATUS_WIN_RPC_NT_FP_UNDERFLOW = 0xC0020047, + MD_NTSTATUS_WIN_RPC_NT_FP_OVERFLOW = 0xC0020048, + MD_NTSTATUS_WIN_RPC_NT_CALL_IN_PROGRESS = 0xC0020049, + MD_NTSTATUS_WIN_RPC_NT_NO_MORE_BINDINGS = 0xC002004A, + MD_NTSTATUS_WIN_RPC_NT_GROUP_MEMBER_NOT_FOUND = 0xC002004B, + MD_NTSTATUS_WIN_EPT_NT_CANT_CREATE = 0xC002004C, + MD_NTSTATUS_WIN_RPC_NT_INVALID_OBJECT = 0xC002004D, + MD_NTSTATUS_WIN_RPC_NT_NO_INTERFACES = 0xC002004F, + MD_NTSTATUS_WIN_RPC_NT_CALL_CANCELLED = 0xC0020050, + MD_NTSTATUS_WIN_RPC_NT_BINDING_INCOMPLETE = 0xC0020051, + MD_NTSTATUS_WIN_RPC_NT_COMM_FAILURE = 0xC0020052, + MD_NTSTATUS_WIN_RPC_NT_UNSUPPORTED_AUTHN_LEVEL = 0xC0020053, + MD_NTSTATUS_WIN_RPC_NT_NO_PRINC_NAME = 0xC0020054, + MD_NTSTATUS_WIN_RPC_NT_NOT_RPC_ERROR = 0xC0020055, + MD_NTSTATUS_WIN_RPC_NT_SEC_PKG_ERROR = 0xC0020057, + MD_NTSTATUS_WIN_RPC_NT_NOT_CANCELLED = 0xC0020058, + MD_NTSTATUS_WIN_RPC_NT_INVALID_ASYNC_HANDLE = 0xC0020062, + MD_NTSTATUS_WIN_RPC_NT_INVALID_ASYNC_CALL = 0xC0020063, + MD_NTSTATUS_WIN_RPC_NT_PROXY_ACCESS_DENIED = 0xC0020064, + MD_NTSTATUS_WIN_RPC_NT_COOKIE_AUTH_FAILED = 0xC0020065, + MD_NTSTATUS_WIN_RPC_NT_NO_MORE_ENTRIES = 0xC0030001, + MD_NTSTATUS_WIN_RPC_NT_SS_CHAR_TRANS_OPEN_FAIL = 0xC0030002, + MD_NTSTATUS_WIN_RPC_NT_SS_CHAR_TRANS_SHORT_FILE = 0xC0030003, + MD_NTSTATUS_WIN_RPC_NT_SS_IN_NULL_CONTEXT = 0xC0030004, + MD_NTSTATUS_WIN_RPC_NT_SS_CONTEXT_MISMATCH = 0xC0030005, + MD_NTSTATUS_WIN_RPC_NT_SS_CONTEXT_DAMAGED = 0xC0030006, + MD_NTSTATUS_WIN_RPC_NT_SS_HANDLES_MISMATCH = 0xC0030007, + MD_NTSTATUS_WIN_RPC_NT_SS_CANNOT_GET_CALL_HANDLE = 0xC0030008, + MD_NTSTATUS_WIN_RPC_NT_NULL_REF_POINTER = 0xC0030009, + MD_NTSTATUS_WIN_RPC_NT_ENUM_VALUE_OUT_OF_RANGE = 0xC003000A, + MD_NTSTATUS_WIN_RPC_NT_BYTE_COUNT_TOO_SMALL = 0xC003000B, + MD_NTSTATUS_WIN_RPC_NT_BAD_STUB_DATA = 0xC003000C, + MD_NTSTATUS_WIN_RPC_NT_INVALID_ES_ACTION = 0xC0030059, + MD_NTSTATUS_WIN_RPC_NT_WRONG_ES_VERSION = 0xC003005A, + MD_NTSTATUS_WIN_RPC_NT_WRONG_STUB_VERSION = 0xC003005B, + MD_NTSTATUS_WIN_RPC_NT_INVALID_PIPE_OBJECT = 0xC003005C, + MD_NTSTATUS_WIN_RPC_NT_INVALID_PIPE_OPERATION = 0xC003005D, + MD_NTSTATUS_WIN_RPC_NT_WRONG_PIPE_VERSION = 0xC003005E, + MD_NTSTATUS_WIN_RPC_NT_PIPE_CLOSED = 0xC003005F, + MD_NTSTATUS_WIN_RPC_NT_PIPE_DISCIPLINE_ERROR = 0xC0030060, + MD_NTSTATUS_WIN_RPC_NT_PIPE_EMPTY = 0xC0030061, + MD_NTSTATUS_WIN_STATUS_PNP_BAD_MPS_TABLE = 0xC0040035, + MD_NTSTATUS_WIN_STATUS_PNP_TRANSLATION_FAILED = 0xC0040036, + MD_NTSTATUS_WIN_STATUS_PNP_IRQ_TRANSLATION_FAILED = 0xC0040037, + MD_NTSTATUS_WIN_STATUS_PNP_INVALID_ID = 0xC0040038, + MD_NTSTATUS_WIN_STATUS_IO_REISSUE_AS_CACHED = 0xC0040039, + MD_NTSTATUS_WIN_STATUS_CTX_WINSTATION_NAME_INVALID = 0xC00A0001, + MD_NTSTATUS_WIN_STATUS_CTX_INVALID_PD = 0xC00A0002, + MD_NTSTATUS_WIN_STATUS_CTX_PD_NOT_FOUND = 0xC00A0003, + MD_NTSTATUS_WIN_STATUS_CTX_CLOSE_PENDING = 0xC00A0006, + MD_NTSTATUS_WIN_STATUS_CTX_NO_OUTBUF = 0xC00A0007, + MD_NTSTATUS_WIN_STATUS_CTX_MODEM_INF_NOT_FOUND = 0xC00A0008, + MD_NTSTATUS_WIN_STATUS_CTX_INVALID_MODEMNAME = 0xC00A0009, + MD_NTSTATUS_WIN_STATUS_CTX_RESPONSE_ERROR = 0xC00A000A, + MD_NTSTATUS_WIN_STATUS_CTX_MODEM_RESPONSE_TIMEOUT = 0xC00A000B, + MD_NTSTATUS_WIN_STATUS_CTX_MODEM_RESPONSE_NO_CARRIER = 0xC00A000C, + MD_NTSTATUS_WIN_STATUS_CTX_MODEM_RESPONSE_NO_DIALTONE = 0xC00A000D, + MD_NTSTATUS_WIN_STATUS_CTX_MODEM_RESPONSE_BUSY = 0xC00A000E, + MD_NTSTATUS_WIN_STATUS_CTX_MODEM_RESPONSE_VOICE = 0xC00A000F, + MD_NTSTATUS_WIN_STATUS_CTX_TD_ERROR = 0xC00A0010, + MD_NTSTATUS_WIN_STATUS_CTX_LICENSE_CLIENT_INVALID = 0xC00A0012, + MD_NTSTATUS_WIN_STATUS_CTX_LICENSE_NOT_AVAILABLE = 0xC00A0013, + MD_NTSTATUS_WIN_STATUS_CTX_LICENSE_EXPIRED = 0xC00A0014, + MD_NTSTATUS_WIN_STATUS_CTX_WINSTATION_NOT_FOUND = 0xC00A0015, + MD_NTSTATUS_WIN_STATUS_CTX_WINSTATION_NAME_COLLISION = 0xC00A0016, + MD_NTSTATUS_WIN_STATUS_CTX_WINSTATION_BUSY = 0xC00A0017, + MD_NTSTATUS_WIN_STATUS_CTX_BAD_VIDEO_MODE = 0xC00A0018, + MD_NTSTATUS_WIN_STATUS_CTX_GRAPHICS_INVALID = 0xC00A0022, + MD_NTSTATUS_WIN_STATUS_CTX_NOT_CONSOLE = 0xC00A0024, + MD_NTSTATUS_WIN_STATUS_CTX_CLIENT_QUERY_TIMEOUT = 0xC00A0026, + MD_NTSTATUS_WIN_STATUS_CTX_CONSOLE_DISCONNECT = 0xC00A0027, + MD_NTSTATUS_WIN_STATUS_CTX_CONSOLE_CONNECT = 0xC00A0028, + MD_NTSTATUS_WIN_STATUS_CTX_SHADOW_DENIED = 0xC00A002A, + MD_NTSTATUS_WIN_STATUS_CTX_WINSTATION_ACCESS_DENIED = 0xC00A002B, + MD_NTSTATUS_WIN_STATUS_CTX_INVALID_WD = 0xC00A002E, + MD_NTSTATUS_WIN_STATUS_CTX_WD_NOT_FOUND = 0xC00A002F, + MD_NTSTATUS_WIN_STATUS_CTX_SHADOW_INVALID = 0xC00A0030, + MD_NTSTATUS_WIN_STATUS_CTX_SHADOW_DISABLED = 0xC00A0031, + MD_NTSTATUS_WIN_STATUS_RDP_PROTOCOL_ERROR = 0xC00A0032, + MD_NTSTATUS_WIN_STATUS_CTX_CLIENT_LICENSE_NOT_SET = 0xC00A0033, + MD_NTSTATUS_WIN_STATUS_CTX_CLIENT_LICENSE_IN_USE = 0xC00A0034, + MD_NTSTATUS_WIN_STATUS_CTX_SHADOW_ENDED_BY_MODE_CHANGE = 0xC00A0035, + MD_NTSTATUS_WIN_STATUS_CTX_SHADOW_NOT_RUNNING = 0xC00A0036, + MD_NTSTATUS_WIN_STATUS_CTX_LOGON_DISABLED = 0xC00A0037, + MD_NTSTATUS_WIN_STATUS_CTX_SECURITY_LAYER_ERROR = 0xC00A0038, + MD_NTSTATUS_WIN_STATUS_TS_INCOMPATIBLE_SESSIONS = 0xC00A0039, + MD_NTSTATUS_WIN_STATUS_TS_VIDEO_SUBSYSTEM_ERROR = 0xC00A003A, + MD_NTSTATUS_WIN_STATUS_MUI_FILE_NOT_FOUND = 0xC00B0001, + MD_NTSTATUS_WIN_STATUS_MUI_INVALID_FILE = 0xC00B0002, + MD_NTSTATUS_WIN_STATUS_MUI_INVALID_RC_CONFIG = 0xC00B0003, + MD_NTSTATUS_WIN_STATUS_MUI_INVALID_LOCALE_NAME = 0xC00B0004, + MD_NTSTATUS_WIN_STATUS_MUI_INVALID_ULTIMATEFALLBACK_NAME = 0xC00B0005, + MD_NTSTATUS_WIN_STATUS_MUI_FILE_NOT_LOADED = 0xC00B0006, + MD_NTSTATUS_WIN_STATUS_RESOURCE_ENUM_USER_STOP = 0xC00B0007, + MD_NTSTATUS_WIN_STATUS_CLUSTER_INVALID_NODE = 0xC0130001, + MD_NTSTATUS_WIN_STATUS_CLUSTER_NODE_EXISTS = 0xC0130002, + MD_NTSTATUS_WIN_STATUS_CLUSTER_JOIN_IN_PROGRESS = 0xC0130003, + MD_NTSTATUS_WIN_STATUS_CLUSTER_NODE_NOT_FOUND = 0xC0130004, + MD_NTSTATUS_WIN_STATUS_CLUSTER_LOCAL_NODE_NOT_FOUND = 0xC0130005, + MD_NTSTATUS_WIN_STATUS_CLUSTER_NETWORK_EXISTS = 0xC0130006, + MD_NTSTATUS_WIN_STATUS_CLUSTER_NETWORK_NOT_FOUND = 0xC0130007, + MD_NTSTATUS_WIN_STATUS_CLUSTER_NETINTERFACE_EXISTS = 0xC0130008, + MD_NTSTATUS_WIN_STATUS_CLUSTER_NETINTERFACE_NOT_FOUND = 0xC0130009, + MD_NTSTATUS_WIN_STATUS_CLUSTER_INVALID_REQUEST = 0xC013000A, + MD_NTSTATUS_WIN_STATUS_CLUSTER_INVALID_NETWORK_PROVIDER = 0xC013000B, + MD_NTSTATUS_WIN_STATUS_CLUSTER_NODE_DOWN = 0xC013000C, + MD_NTSTATUS_WIN_STATUS_CLUSTER_NODE_UNREACHABLE = 0xC013000D, + MD_NTSTATUS_WIN_STATUS_CLUSTER_NODE_NOT_MEMBER = 0xC013000E, + MD_NTSTATUS_WIN_STATUS_CLUSTER_JOIN_NOT_IN_PROGRESS = 0xC013000F, + MD_NTSTATUS_WIN_STATUS_CLUSTER_INVALID_NETWORK = 0xC0130010, + MD_NTSTATUS_WIN_STATUS_CLUSTER_NO_NET_ADAPTERS = 0xC0130011, + MD_NTSTATUS_WIN_STATUS_CLUSTER_NODE_UP = 0xC0130012, + MD_NTSTATUS_WIN_STATUS_CLUSTER_NODE_PAUSED = 0xC0130013, + MD_NTSTATUS_WIN_STATUS_CLUSTER_NODE_NOT_PAUSED = 0xC0130014, + MD_NTSTATUS_WIN_STATUS_CLUSTER_NO_SECURITY_CONTEXT = 0xC0130015, + MD_NTSTATUS_WIN_STATUS_CLUSTER_NETWORK_NOT_INTERNAL = 0xC0130016, + MD_NTSTATUS_WIN_STATUS_CLUSTER_POISONED = 0xC0130017, + MD_NTSTATUS_WIN_STATUS_CLUSTER_NON_CSV_PATH = 0xC0130018, + MD_NTSTATUS_WIN_STATUS_CLUSTER_CSV_VOLUME_NOT_LOCAL = 0xC0130019, + MD_NTSTATUS_WIN_STATUS_CLUSTER_CSV_READ_OPLOCK_BREAK_IN_PROGRESS = 0xC0130020, + MD_NTSTATUS_WIN_STATUS_CLUSTER_CSV_AUTO_PAUSE_ERROR = 0xC0130021, + MD_NTSTATUS_WIN_STATUS_CLUSTER_CSV_REDIRECTED = 0xC0130022, + MD_NTSTATUS_WIN_STATUS_CLUSTER_CSV_NOT_REDIRECTED = 0xC0130023, + MD_NTSTATUS_WIN_STATUS_CLUSTER_CSV_VOLUME_DRAINING = 0xC0130024, + MD_NTSTATUS_WIN_STATUS_CLUSTER_CSV_SNAPSHOT_CREATION_IN_PROGRESS = 0xC0130025, + MD_NTSTATUS_WIN_STATUS_CLUSTER_CSV_VOLUME_DRAINING_SUCCEEDED_DOWNLEVEL = 0xC0130026, + MD_NTSTATUS_WIN_STATUS_ACPI_INVALID_OPCODE = 0xC0140001, + MD_NTSTATUS_WIN_STATUS_ACPI_STACK_OVERFLOW = 0xC0140002, + MD_NTSTATUS_WIN_STATUS_ACPI_ASSERT_FAILED = 0xC0140003, + MD_NTSTATUS_WIN_STATUS_ACPI_INVALID_INDEX = 0xC0140004, + MD_NTSTATUS_WIN_STATUS_ACPI_INVALID_ARGUMENT = 0xC0140005, + MD_NTSTATUS_WIN_STATUS_ACPI_FATAL = 0xC0140006, + MD_NTSTATUS_WIN_STATUS_ACPI_INVALID_SUPERNAME = 0xC0140007, + MD_NTSTATUS_WIN_STATUS_ACPI_INVALID_ARGTYPE = 0xC0140008, + MD_NTSTATUS_WIN_STATUS_ACPI_INVALID_OBJTYPE = 0xC0140009, + MD_NTSTATUS_WIN_STATUS_ACPI_INVALID_TARGETTYPE = 0xC014000A, + MD_NTSTATUS_WIN_STATUS_ACPI_INCORRECT_ARGUMENT_COUNT = 0xC014000B, + MD_NTSTATUS_WIN_STATUS_ACPI_ADDRESS_NOT_MAPPED = 0xC014000C, + MD_NTSTATUS_WIN_STATUS_ACPI_INVALID_EVENTTYPE = 0xC014000D, + MD_NTSTATUS_WIN_STATUS_ACPI_HANDLER_COLLISION = 0xC014000E, + MD_NTSTATUS_WIN_STATUS_ACPI_INVALID_DATA = 0xC014000F, + MD_NTSTATUS_WIN_STATUS_ACPI_INVALID_REGION = 0xC0140010, + MD_NTSTATUS_WIN_STATUS_ACPI_INVALID_ACCESS_SIZE = 0xC0140011, + MD_NTSTATUS_WIN_STATUS_ACPI_ACQUIRE_GLOBAL_LOCK = 0xC0140012, + MD_NTSTATUS_WIN_STATUS_ACPI_ALREADY_INITIALIZED = 0xC0140013, + MD_NTSTATUS_WIN_STATUS_ACPI_NOT_INITIALIZED = 0xC0140014, + MD_NTSTATUS_WIN_STATUS_ACPI_INVALID_MUTEX_LEVEL = 0xC0140015, + MD_NTSTATUS_WIN_STATUS_ACPI_MUTEX_NOT_OWNED = 0xC0140016, + MD_NTSTATUS_WIN_STATUS_ACPI_MUTEX_NOT_OWNER = 0xC0140017, + MD_NTSTATUS_WIN_STATUS_ACPI_RS_ACCESS = 0xC0140018, + MD_NTSTATUS_WIN_STATUS_ACPI_INVALID_TABLE = 0xC0140019, + MD_NTSTATUS_WIN_STATUS_ACPI_REG_HANDLER_FAILED = 0xC0140020, + MD_NTSTATUS_WIN_STATUS_ACPI_POWER_REQUEST_FAILED = 0xC0140021, + MD_NTSTATUS_WIN_STATUS_SXS_SECTION_NOT_FOUND = 0xC0150001, + MD_NTSTATUS_WIN_STATUS_SXS_CANT_GEN_ACTCTX = 0xC0150002, + MD_NTSTATUS_WIN_STATUS_SXS_INVALID_ACTCTXDATA_FORMAT = 0xC0150003, + MD_NTSTATUS_WIN_STATUS_SXS_ASSEMBLY_NOT_FOUND = 0xC0150004, + MD_NTSTATUS_WIN_STATUS_SXS_MANIFEST_FORMAT_ERROR = 0xC0150005, + MD_NTSTATUS_WIN_STATUS_SXS_MANIFEST_PARSE_ERROR = 0xC0150006, + MD_NTSTATUS_WIN_STATUS_SXS_ACTIVATION_CONTEXT_DISABLED = 0xC0150007, + MD_NTSTATUS_WIN_STATUS_SXS_KEY_NOT_FOUND = 0xC0150008, + MD_NTSTATUS_WIN_STATUS_SXS_VERSION_CONFLICT = 0xC0150009, + MD_NTSTATUS_WIN_STATUS_SXS_WRONG_SECTION_TYPE = 0xC015000A, + MD_NTSTATUS_WIN_STATUS_SXS_THREAD_QUERIES_DISABLED = 0xC015000B, + MD_NTSTATUS_WIN_STATUS_SXS_ASSEMBLY_MISSING = 0xC015000C, + MD_NTSTATUS_WIN_STATUS_SXS_PROCESS_DEFAULT_ALREADY_SET = 0xC015000E, + MD_NTSTATUS_WIN_STATUS_SXS_EARLY_DEACTIVATION = 0xC015000F, + MD_NTSTATUS_WIN_STATUS_SXS_INVALID_DEACTIVATION = 0xC0150010, + MD_NTSTATUS_WIN_STATUS_SXS_MULTIPLE_DEACTIVATION = 0xC0150011, + MD_NTSTATUS_WIN_STATUS_SXS_SYSTEM_DEFAULT_ACTIVATION_CONTEXT_EMPTY = 0xC0150012, + MD_NTSTATUS_WIN_STATUS_SXS_PROCESS_TERMINATION_REQUESTED = 0xC0150013, + MD_NTSTATUS_WIN_STATUS_SXS_CORRUPT_ACTIVATION_STACK = 0xC0150014, + MD_NTSTATUS_WIN_STATUS_SXS_CORRUPTION = 0xC0150015, + MD_NTSTATUS_WIN_STATUS_SXS_INVALID_IDENTITY_ATTRIBUTE_VALUE = 0xC0150016, + MD_NTSTATUS_WIN_STATUS_SXS_INVALID_IDENTITY_ATTRIBUTE_NAME = 0xC0150017, + MD_NTSTATUS_WIN_STATUS_SXS_IDENTITY_DUPLICATE_ATTRIBUTE = 0xC0150018, + MD_NTSTATUS_WIN_STATUS_SXS_IDENTITY_PARSE_ERROR = 0xC0150019, + MD_NTSTATUS_WIN_STATUS_SXS_COMPONENT_STORE_CORRUPT = 0xC015001A, + MD_NTSTATUS_WIN_STATUS_SXS_FILE_HASH_MISMATCH = 0xC015001B, + MD_NTSTATUS_WIN_STATUS_SXS_MANIFEST_IDENTITY_SAME_BUT_CONTENTS_DIFFERENT = 0xC015001C, + MD_NTSTATUS_WIN_STATUS_SXS_IDENTITIES_DIFFERENT = 0xC015001D, + MD_NTSTATUS_WIN_STATUS_SXS_ASSEMBLY_IS_NOT_A_DEPLOYMENT = 0xC015001E, + MD_NTSTATUS_WIN_STATUS_SXS_FILE_NOT_PART_OF_ASSEMBLY = 0xC015001F, + MD_NTSTATUS_WIN_STATUS_ADVANCED_INSTALLER_FAILED = 0xC0150020, + MD_NTSTATUS_WIN_STATUS_XML_ENCODING_MISMATCH = 0xC0150021, + MD_NTSTATUS_WIN_STATUS_SXS_MANIFEST_TOO_BIG = 0xC0150022, + MD_NTSTATUS_WIN_STATUS_SXS_SETTING_NOT_REGISTERED = 0xC0150023, + MD_NTSTATUS_WIN_STATUS_SXS_TRANSACTION_CLOSURE_INCOMPLETE = 0xC0150024, + MD_NTSTATUS_WIN_STATUS_SMI_PRIMITIVE_INSTALLER_FAILED = 0xC0150025, + MD_NTSTATUS_WIN_STATUS_GENERIC_COMMAND_FAILED = 0xC0150026, + MD_NTSTATUS_WIN_STATUS_SXS_FILE_HASH_MISSING = 0xC0150027, + MD_NTSTATUS_WIN_STATUS_TRANSACTIONAL_CONFLICT = 0xC0190001, + MD_NTSTATUS_WIN_STATUS_INVALID_TRANSACTION = 0xC0190002, + MD_NTSTATUS_WIN_STATUS_TRANSACTION_NOT_ACTIVE = 0xC0190003, + MD_NTSTATUS_WIN_STATUS_TM_INITIALIZATION_FAILED = 0xC0190004, + MD_NTSTATUS_WIN_STATUS_RM_NOT_ACTIVE = 0xC0190005, + MD_NTSTATUS_WIN_STATUS_RM_METADATA_CORRUPT = 0xC0190006, + MD_NTSTATUS_WIN_STATUS_TRANSACTION_NOT_JOINED = 0xC0190007, + MD_NTSTATUS_WIN_STATUS_DIRECTORY_NOT_RM = 0xC0190008, + MD_NTSTATUS_WIN_STATUS_TRANSACTIONS_UNSUPPORTED_REMOTE = 0xC019000A, + MD_NTSTATUS_WIN_STATUS_LOG_RESIZE_INVALID_SIZE = 0xC019000B, + MD_NTSTATUS_WIN_STATUS_REMOTE_FILE_VERSION_MISMATCH = 0xC019000C, + MD_NTSTATUS_WIN_STATUS_CRM_PROTOCOL_ALREADY_EXISTS = 0xC019000F, + MD_NTSTATUS_WIN_STATUS_TRANSACTION_PROPAGATION_FAILED = 0xC0190010, + MD_NTSTATUS_WIN_STATUS_CRM_PROTOCOL_NOT_FOUND = 0xC0190011, + MD_NTSTATUS_WIN_STATUS_TRANSACTION_SUPERIOR_EXISTS = 0xC0190012, + MD_NTSTATUS_WIN_STATUS_TRANSACTION_REQUEST_NOT_VALID = 0xC0190013, + MD_NTSTATUS_WIN_STATUS_TRANSACTION_NOT_REQUESTED = 0xC0190014, + MD_NTSTATUS_WIN_STATUS_TRANSACTION_ALREADY_ABORTED = 0xC0190015, + MD_NTSTATUS_WIN_STATUS_TRANSACTION_ALREADY_COMMITTED = 0xC0190016, + MD_NTSTATUS_WIN_STATUS_TRANSACTION_INVALID_MARSHALL_BUFFER = 0xC0190017, + MD_NTSTATUS_WIN_STATUS_CURRENT_TRANSACTION_NOT_VALID = 0xC0190018, + MD_NTSTATUS_WIN_STATUS_LOG_GROWTH_FAILED = 0xC0190019, + MD_NTSTATUS_WIN_STATUS_OBJECT_NO_LONGER_EXISTS = 0xC0190021, + MD_NTSTATUS_WIN_STATUS_STREAM_MINIVERSION_NOT_FOUND = 0xC0190022, + MD_NTSTATUS_WIN_STATUS_STREAM_MINIVERSION_NOT_VALID = 0xC0190023, + MD_NTSTATUS_WIN_STATUS_MINIVERSION_INACCESSIBLE_FROM_SPECIFIED_TRANSACTION = 0xC0190024, + MD_NTSTATUS_WIN_STATUS_CANT_OPEN_MINIVERSION_WITH_MODIFY_INTENT = 0xC0190025, + MD_NTSTATUS_WIN_STATUS_CANT_CREATE_MORE_STREAM_MINIVERSIONS = 0xC0190026, + MD_NTSTATUS_WIN_STATUS_HANDLE_NO_LONGER_VALID = 0xC0190028, + MD_NTSTATUS_WIN_STATUS_LOG_CORRUPTION_DETECTED = 0xC0190030, + MD_NTSTATUS_WIN_STATUS_RM_DISCONNECTED = 0xC0190032, + MD_NTSTATUS_WIN_STATUS_ENLISTMENT_NOT_SUPERIOR = 0xC0190033, + MD_NTSTATUS_WIN_STATUS_FILE_IDENTITY_NOT_PERSISTENT = 0xC0190036, + MD_NTSTATUS_WIN_STATUS_CANT_BREAK_TRANSACTIONAL_DEPENDENCY = 0xC0190037, + MD_NTSTATUS_WIN_STATUS_CANT_CROSS_RM_BOUNDARY = 0xC0190038, + MD_NTSTATUS_WIN_STATUS_TXF_DIR_NOT_EMPTY = 0xC0190039, + MD_NTSTATUS_WIN_STATUS_INDOUBT_TRANSACTIONS_EXIST = 0xC019003A, + MD_NTSTATUS_WIN_STATUS_TM_VOLATILE = 0xC019003B, + MD_NTSTATUS_WIN_STATUS_ROLLBACK_TIMER_EXPIRED = 0xC019003C, + MD_NTSTATUS_WIN_STATUS_TXF_ATTRIBUTE_CORRUPT = 0xC019003D, + MD_NTSTATUS_WIN_STATUS_EFS_NOT_ALLOWED_IN_TRANSACTION = 0xC019003E, + MD_NTSTATUS_WIN_STATUS_TRANSACTIONAL_OPEN_NOT_ALLOWED = 0xC019003F, + MD_NTSTATUS_WIN_STATUS_TRANSACTED_MAPPING_UNSUPPORTED_REMOTE = 0xC0190040, + MD_NTSTATUS_WIN_STATUS_TRANSACTION_REQUIRED_PROMOTION = 0xC0190043, + MD_NTSTATUS_WIN_STATUS_CANNOT_EXECUTE_FILE_IN_TRANSACTION = 0xC0190044, + MD_NTSTATUS_WIN_STATUS_TRANSACTIONS_NOT_FROZEN = 0xC0190045, + MD_NTSTATUS_WIN_STATUS_TRANSACTION_FREEZE_IN_PROGRESS = 0xC0190046, + MD_NTSTATUS_WIN_STATUS_NOT_SNAPSHOT_VOLUME = 0xC0190047, + MD_NTSTATUS_WIN_STATUS_NO_SAVEPOINT_WITH_OPEN_FILES = 0xC0190048, + MD_NTSTATUS_WIN_STATUS_SPARSE_NOT_ALLOWED_IN_TRANSACTION = 0xC0190049, + MD_NTSTATUS_WIN_STATUS_TM_IDENTITY_MISMATCH = 0xC019004A, + MD_NTSTATUS_WIN_STATUS_FLOATED_SECTION = 0xC019004B, + MD_NTSTATUS_WIN_STATUS_CANNOT_ACCEPT_TRANSACTED_WORK = 0xC019004C, + MD_NTSTATUS_WIN_STATUS_CANNOT_ABORT_TRANSACTIONS = 0xC019004D, + MD_NTSTATUS_WIN_STATUS_TRANSACTION_NOT_FOUND = 0xC019004E, + MD_NTSTATUS_WIN_STATUS_RESOURCEMANAGER_NOT_FOUND = 0xC019004F, + MD_NTSTATUS_WIN_STATUS_ENLISTMENT_NOT_FOUND = 0xC0190050, + MD_NTSTATUS_WIN_STATUS_TRANSACTIONMANAGER_NOT_FOUND = 0xC0190051, + MD_NTSTATUS_WIN_STATUS_TRANSACTIONMANAGER_NOT_ONLINE = 0xC0190052, + MD_NTSTATUS_WIN_STATUS_TRANSACTIONMANAGER_RECOVERY_NAME_COLLISION = 0xC0190053, + MD_NTSTATUS_WIN_STATUS_TRANSACTION_NOT_ROOT = 0xC0190054, + MD_NTSTATUS_WIN_STATUS_TRANSACTION_OBJECT_EXPIRED = 0xC0190055, + MD_NTSTATUS_WIN_STATUS_COMPRESSION_NOT_ALLOWED_IN_TRANSACTION = 0xC0190056, + MD_NTSTATUS_WIN_STATUS_TRANSACTION_RESPONSE_NOT_ENLISTED = 0xC0190057, + MD_NTSTATUS_WIN_STATUS_TRANSACTION_RECORD_TOO_LONG = 0xC0190058, + MD_NTSTATUS_WIN_STATUS_NO_LINK_TRACKING_IN_TRANSACTION = 0xC0190059, + MD_NTSTATUS_WIN_STATUS_OPERATION_NOT_SUPPORTED_IN_TRANSACTION = 0xC019005A, + MD_NTSTATUS_WIN_STATUS_TRANSACTION_INTEGRITY_VIOLATED = 0xC019005B, + MD_NTSTATUS_WIN_STATUS_TRANSACTIONMANAGER_IDENTITY_MISMATCH = 0xC019005C, + MD_NTSTATUS_WIN_STATUS_RM_CANNOT_BE_FROZEN_FOR_SNAPSHOT = 0xC019005D, + MD_NTSTATUS_WIN_STATUS_TRANSACTION_MUST_WRITETHROUGH = 0xC019005E, + MD_NTSTATUS_WIN_STATUS_TRANSACTION_NO_SUPERIOR = 0xC019005F, + MD_NTSTATUS_WIN_STATUS_EXPIRED_HANDLE = 0xC0190060, + MD_NTSTATUS_WIN_STATUS_TRANSACTION_NOT_ENLISTED = 0xC0190061, + MD_NTSTATUS_WIN_STATUS_LOG_SECTOR_INVALID = 0xC01A0001, + MD_NTSTATUS_WIN_STATUS_LOG_SECTOR_PARITY_INVALID = 0xC01A0002, + MD_NTSTATUS_WIN_STATUS_LOG_SECTOR_REMAPPED = 0xC01A0003, + MD_NTSTATUS_WIN_STATUS_LOG_BLOCK_INCOMPLETE = 0xC01A0004, + MD_NTSTATUS_WIN_STATUS_LOG_INVALID_RANGE = 0xC01A0005, + MD_NTSTATUS_WIN_STATUS_LOG_BLOCKS_EXHAUSTED = 0xC01A0006, + MD_NTSTATUS_WIN_STATUS_LOG_READ_CONTEXT_INVALID = 0xC01A0007, + MD_NTSTATUS_WIN_STATUS_LOG_RESTART_INVALID = 0xC01A0008, + MD_NTSTATUS_WIN_STATUS_LOG_BLOCK_VERSION = 0xC01A0009, + MD_NTSTATUS_WIN_STATUS_LOG_BLOCK_INVALID = 0xC01A000A, + MD_NTSTATUS_WIN_STATUS_LOG_READ_MODE_INVALID = 0xC01A000B, + MD_NTSTATUS_WIN_STATUS_LOG_METADATA_CORRUPT = 0xC01A000D, + MD_NTSTATUS_WIN_STATUS_LOG_METADATA_INVALID = 0xC01A000E, + MD_NTSTATUS_WIN_STATUS_LOG_METADATA_INCONSISTENT = 0xC01A000F, + MD_NTSTATUS_WIN_STATUS_LOG_RESERVATION_INVALID = 0xC01A0010, + MD_NTSTATUS_WIN_STATUS_LOG_CANT_DELETE = 0xC01A0011, + MD_NTSTATUS_WIN_STATUS_LOG_CONTAINER_LIMIT_EXCEEDED = 0xC01A0012, + MD_NTSTATUS_WIN_STATUS_LOG_START_OF_LOG = 0xC01A0013, + MD_NTSTATUS_WIN_STATUS_LOG_POLICY_ALREADY_INSTALLED = 0xC01A0014, + MD_NTSTATUS_WIN_STATUS_LOG_POLICY_NOT_INSTALLED = 0xC01A0015, + MD_NTSTATUS_WIN_STATUS_LOG_POLICY_INVALID = 0xC01A0016, + MD_NTSTATUS_WIN_STATUS_LOG_POLICY_CONFLICT = 0xC01A0017, + MD_NTSTATUS_WIN_STATUS_LOG_PINNED_ARCHIVE_TAIL = 0xC01A0018, + MD_NTSTATUS_WIN_STATUS_LOG_RECORD_NONEXISTENT = 0xC01A0019, + MD_NTSTATUS_WIN_STATUS_LOG_RECORDS_RESERVED_INVALID = 0xC01A001A, + MD_NTSTATUS_WIN_STATUS_LOG_SPACE_RESERVED_INVALID = 0xC01A001B, + MD_NTSTATUS_WIN_STATUS_LOG_TAIL_INVALID = 0xC01A001C, + MD_NTSTATUS_WIN_STATUS_LOG_FULL = 0xC01A001D, + MD_NTSTATUS_WIN_STATUS_LOG_MULTIPLEXED = 0xC01A001E, + MD_NTSTATUS_WIN_STATUS_LOG_DEDICATED = 0xC01A001F, + MD_NTSTATUS_WIN_STATUS_LOG_ARCHIVE_NOT_IN_PROGRESS = 0xC01A0020, + MD_NTSTATUS_WIN_STATUS_LOG_ARCHIVE_IN_PROGRESS = 0xC01A0021, + MD_NTSTATUS_WIN_STATUS_LOG_EPHEMERAL = 0xC01A0022, + MD_NTSTATUS_WIN_STATUS_LOG_NOT_ENOUGH_CONTAINERS = 0xC01A0023, + MD_NTSTATUS_WIN_STATUS_LOG_CLIENT_ALREADY_REGISTERED = 0xC01A0024, + MD_NTSTATUS_WIN_STATUS_LOG_CLIENT_NOT_REGISTERED = 0xC01A0025, + MD_NTSTATUS_WIN_STATUS_LOG_FULL_HANDLER_IN_PROGRESS = 0xC01A0026, + MD_NTSTATUS_WIN_STATUS_LOG_CONTAINER_READ_FAILED = 0xC01A0027, + MD_NTSTATUS_WIN_STATUS_LOG_CONTAINER_WRITE_FAILED = 0xC01A0028, + MD_NTSTATUS_WIN_STATUS_LOG_CONTAINER_OPEN_FAILED = 0xC01A0029, + MD_NTSTATUS_WIN_STATUS_LOG_CONTAINER_STATE_INVALID = 0xC01A002A, + MD_NTSTATUS_WIN_STATUS_LOG_STATE_INVALID = 0xC01A002B, + MD_NTSTATUS_WIN_STATUS_LOG_PINNED = 0xC01A002C, + MD_NTSTATUS_WIN_STATUS_LOG_METADATA_FLUSH_FAILED = 0xC01A002D, + MD_NTSTATUS_WIN_STATUS_LOG_INCONSISTENT_SECURITY = 0xC01A002E, + MD_NTSTATUS_WIN_STATUS_LOG_APPENDED_FLUSH_FAILED = 0xC01A002F, + MD_NTSTATUS_WIN_STATUS_LOG_PINNED_RESERVATION = 0xC01A0030, + MD_NTSTATUS_WIN_STATUS_VIDEO_HUNG_DISPLAY_DRIVER_THREAD = 0xC01B00EA, + MD_NTSTATUS_WIN_STATUS_FLT_NO_HANDLER_DEFINED = 0xC01C0001, + MD_NTSTATUS_WIN_STATUS_FLT_CONTEXT_ALREADY_DEFINED = 0xC01C0002, + MD_NTSTATUS_WIN_STATUS_FLT_INVALID_ASYNCHRONOUS_REQUEST = 0xC01C0003, + MD_NTSTATUS_WIN_STATUS_FLT_DISALLOW_FAST_IO = 0xC01C0004, + MD_NTSTATUS_WIN_STATUS_FLT_INVALID_NAME_REQUEST = 0xC01C0005, + MD_NTSTATUS_WIN_STATUS_FLT_NOT_SAFE_TO_POST_OPERATION = 0xC01C0006, + MD_NTSTATUS_WIN_STATUS_FLT_NOT_INITIALIZED = 0xC01C0007, + MD_NTSTATUS_WIN_STATUS_FLT_FILTER_NOT_READY = 0xC01C0008, + MD_NTSTATUS_WIN_STATUS_FLT_POST_OPERATION_CLEANUP = 0xC01C0009, + MD_NTSTATUS_WIN_STATUS_FLT_INTERNAL_ERROR = 0xC01C000A, + MD_NTSTATUS_WIN_STATUS_FLT_DELETING_OBJECT = 0xC01C000B, + MD_NTSTATUS_WIN_STATUS_FLT_MUST_BE_NONPAGED_POOL = 0xC01C000C, + MD_NTSTATUS_WIN_STATUS_FLT_DUPLICATE_ENTRY = 0xC01C000D, + MD_NTSTATUS_WIN_STATUS_FLT_CBDQ_DISABLED = 0xC01C000E, + MD_NTSTATUS_WIN_STATUS_FLT_DO_NOT_ATTACH = 0xC01C000F, + MD_NTSTATUS_WIN_STATUS_FLT_DO_NOT_DETACH = 0xC01C0010, + MD_NTSTATUS_WIN_STATUS_FLT_INSTANCE_ALTITUDE_COLLISION = 0xC01C0011, + MD_NTSTATUS_WIN_STATUS_FLT_INSTANCE_NAME_COLLISION = 0xC01C0012, + MD_NTSTATUS_WIN_STATUS_FLT_FILTER_NOT_FOUND = 0xC01C0013, + MD_NTSTATUS_WIN_STATUS_FLT_VOLUME_NOT_FOUND = 0xC01C0014, + MD_NTSTATUS_WIN_STATUS_FLT_INSTANCE_NOT_FOUND = 0xC01C0015, + MD_NTSTATUS_WIN_STATUS_FLT_CONTEXT_ALLOCATION_NOT_FOUND = 0xC01C0016, + MD_NTSTATUS_WIN_STATUS_FLT_INVALID_CONTEXT_REGISTRATION = 0xC01C0017, + MD_NTSTATUS_WIN_STATUS_FLT_NAME_CACHE_MISS = 0xC01C0018, + MD_NTSTATUS_WIN_STATUS_FLT_NO_DEVICE_OBJECT = 0xC01C0019, + MD_NTSTATUS_WIN_STATUS_FLT_VOLUME_ALREADY_MOUNTED = 0xC01C001A, + MD_NTSTATUS_WIN_STATUS_FLT_ALREADY_ENLISTED = 0xC01C001B, + MD_NTSTATUS_WIN_STATUS_FLT_CONTEXT_ALREADY_LINKED = 0xC01C001C, + MD_NTSTATUS_WIN_STATUS_FLT_NO_WAITER_FOR_REPLY = 0xC01C0020, + MD_NTSTATUS_WIN_STATUS_FLT_REGISTRATION_BUSY = 0xC01C0023, + MD_NTSTATUS_WIN_STATUS_MONITOR_NO_DESCRIPTOR = 0xC01D0001, + MD_NTSTATUS_WIN_STATUS_MONITOR_UNKNOWN_DESCRIPTOR_FORMAT = 0xC01D0002, + MD_NTSTATUS_WIN_STATUS_MONITOR_INVALID_DESCRIPTOR_CHECKSUM = 0xC01D0003, + MD_NTSTATUS_WIN_STATUS_MONITOR_INVALID_STANDARD_TIMING_BLOCK = 0xC01D0004, + MD_NTSTATUS_WIN_STATUS_MONITOR_WMI_DATABLOCK_REGISTRATION_FAILED = 0xC01D0005, + MD_NTSTATUS_WIN_STATUS_MONITOR_INVALID_SERIAL_NUMBER_MONDSC_BLOCK = 0xC01D0006, + MD_NTSTATUS_WIN_STATUS_MONITOR_INVALID_USER_FRIENDLY_MONDSC_BLOCK = 0xC01D0007, + MD_NTSTATUS_WIN_STATUS_MONITOR_NO_MORE_DESCRIPTOR_DATA = 0xC01D0008, + MD_NTSTATUS_WIN_STATUS_MONITOR_INVALID_DETAILED_TIMING_BLOCK = 0xC01D0009, + MD_NTSTATUS_WIN_STATUS_MONITOR_INVALID_MANUFACTURE_DATE = 0xC01D000A, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_NOT_EXCLUSIVE_MODE_OWNER = 0xC01E0000, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INSUFFICIENT_DMA_BUFFER = 0xC01E0001, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_DISPLAY_ADAPTER = 0xC01E0002, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_ADAPTER_WAS_RESET = 0xC01E0003, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_DRIVER_MODEL = 0xC01E0004, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_PRESENT_MODE_CHANGED = 0xC01E0005, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_PRESENT_OCCLUDED = 0xC01E0006, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_PRESENT_DENIED = 0xC01E0007, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_CANNOTCOLORCONVERT = 0xC01E0008, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_DRIVER_MISMATCH = 0xC01E0009, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_PRESENT_REDIRECTION_DISABLED = 0xC01E000B, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_PRESENT_UNOCCLUDED = 0xC01E000C, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_WINDOWDC_NOT_AVAILABLE = 0xC01E000D, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_WINDOWLESS_PRESENT_DISABLED = 0xC01E000E, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_NO_VIDEO_MEMORY = 0xC01E0100, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_CANT_LOCK_MEMORY = 0xC01E0101, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_ALLOCATION_BUSY = 0xC01E0102, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_TOO_MANY_REFERENCES = 0xC01E0103, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_TRY_AGAIN_LATER = 0xC01E0104, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_TRY_AGAIN_NOW = 0xC01E0105, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_ALLOCATION_INVALID = 0xC01E0106, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_UNSWIZZLING_APERTURE_UNAVAILABLE = 0xC01E0107, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_UNSWIZZLING_APERTURE_UNSUPPORTED = 0xC01E0108, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_CANT_EVICT_PINNED_ALLOCATION = 0xC01E0109, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_ALLOCATION_USAGE = 0xC01E0110, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_CANT_RENDER_LOCKED_ALLOCATION = 0xC01E0111, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_ALLOCATION_CLOSED = 0xC01E0112, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_ALLOCATION_INSTANCE = 0xC01E0113, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_ALLOCATION_HANDLE = 0xC01E0114, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_WRONG_ALLOCATION_DEVICE = 0xC01E0115, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_ALLOCATION_CONTENT_LOST = 0xC01E0116, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_GPU_EXCEPTION_ON_DEVICE = 0xC01E0200, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_VIDPN_TOPOLOGY = 0xC01E0300, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_VIDPN_TOPOLOGY_NOT_SUPPORTED = 0xC01E0301, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_VIDPN_TOPOLOGY_CURRENTLY_NOT_SUPPORTED = 0xC01E0302, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_VIDPN = 0xC01E0303, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_VIDEO_PRESENT_SOURCE = 0xC01E0304, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_VIDEO_PRESENT_TARGET = 0xC01E0305, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_VIDPN_MODALITY_NOT_SUPPORTED = 0xC01E0306, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_VIDPN_SOURCEMODESET = 0xC01E0308, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_VIDPN_TARGETMODESET = 0xC01E0309, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_FREQUENCY = 0xC01E030A, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_ACTIVE_REGION = 0xC01E030B, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_TOTAL_REGION = 0xC01E030C, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_VIDEO_PRESENT_SOURCE_MODE = 0xC01E0310, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_VIDEO_PRESENT_TARGET_MODE = 0xC01E0311, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_PINNED_MODE_MUST_REMAIN_IN_SET = 0xC01E0312, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_PATH_ALREADY_IN_TOPOLOGY = 0xC01E0313, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_MODE_ALREADY_IN_MODESET = 0xC01E0314, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_VIDEOPRESENTSOURCESET = 0xC01E0315, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_VIDEOPRESENTTARGETSET = 0xC01E0316, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_SOURCE_ALREADY_IN_SET = 0xC01E0317, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_TARGET_ALREADY_IN_SET = 0xC01E0318, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_VIDPN_PRESENT_PATH = 0xC01E0319, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_NO_RECOMMENDED_VIDPN_TOPOLOGY = 0xC01E031A, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_MONITOR_FREQUENCYRANGESET = 0xC01E031B, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_MONITOR_FREQUENCYRANGE = 0xC01E031C, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_FREQUENCYRANGE_NOT_IN_SET = 0xC01E031D, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_FREQUENCYRANGE_ALREADY_IN_SET = 0xC01E031F, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_STALE_MODESET = 0xC01E0320, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_MONITOR_SOURCEMODESET = 0xC01E0321, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_MONITOR_SOURCE_MODE = 0xC01E0322, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_NO_RECOMMENDED_FUNCTIONAL_VIDPN = 0xC01E0323, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_MODE_ID_MUST_BE_UNIQUE = 0xC01E0324, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_EMPTY_ADAPTER_MONITOR_MODE_SUPPORT_INTERSECTION = 0xC01E0325, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_VIDEO_PRESENT_TARGETS_LESS_THAN_SOURCES = 0xC01E0326, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_PATH_NOT_IN_TOPOLOGY = 0xC01E0327, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_ADAPTER_MUST_HAVE_AT_LEAST_ONE_SOURCE = 0xC01E0328, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_ADAPTER_MUST_HAVE_AT_LEAST_ONE_TARGET = 0xC01E0329, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_MONITORDESCRIPTORSET = 0xC01E032A, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_MONITORDESCRIPTOR = 0xC01E032B, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_MONITORDESCRIPTOR_NOT_IN_SET = 0xC01E032C, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_MONITORDESCRIPTOR_ALREADY_IN_SET = 0xC01E032D, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_MONITORDESCRIPTOR_ID_MUST_BE_UNIQUE = 0xC01E032E, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_VIDPN_TARGET_SUBSET_TYPE = 0xC01E032F, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_RESOURCES_NOT_RELATED = 0xC01E0330, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_SOURCE_ID_MUST_BE_UNIQUE = 0xC01E0331, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_TARGET_ID_MUST_BE_UNIQUE = 0xC01E0332, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_NO_AVAILABLE_VIDPN_TARGET = 0xC01E0333, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_MONITOR_COULD_NOT_BE_ASSOCIATED_WITH_ADAPTER = 0xC01E0334, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_NO_VIDPNMGR = 0xC01E0335, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_NO_ACTIVE_VIDPN = 0xC01E0336, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_STALE_VIDPN_TOPOLOGY = 0xC01E0337, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_MONITOR_NOT_CONNECTED = 0xC01E0338, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_SOURCE_NOT_IN_TOPOLOGY = 0xC01E0339, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_PRIMARYSURFACE_SIZE = 0xC01E033A, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_VISIBLEREGION_SIZE = 0xC01E033B, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_STRIDE = 0xC01E033C, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_PIXELFORMAT = 0xC01E033D, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_COLORBASIS = 0xC01E033E, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_PIXELVALUEACCESSMODE = 0xC01E033F, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_TARGET_NOT_IN_TOPOLOGY = 0xC01E0340, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_NO_DISPLAY_MODE_MANAGEMENT_SUPPORT = 0xC01E0341, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_VIDPN_SOURCE_IN_USE = 0xC01E0342, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_CANT_ACCESS_ACTIVE_VIDPN = 0xC01E0343, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_PATH_IMPORTANCE_ORDINAL = 0xC01E0344, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_PATH_CONTENT_GEOMETRY_TRANSFORMATION = 0xC01E0345, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_PATH_CONTENT_GEOMETRY_TRANSFORMATION_NOT_SUPPORTED = 0xC01E0346, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_GAMMA_RAMP = 0xC01E0347, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_GAMMA_RAMP_NOT_SUPPORTED = 0xC01E0348, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_MULTISAMPLING_NOT_SUPPORTED = 0xC01E0349, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_MODE_NOT_IN_MODESET = 0xC01E034A, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_VIDPN_TOPOLOGY_RECOMMENDATION_REASON = 0xC01E034D, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_PATH_CONTENT_TYPE = 0xC01E034E, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_COPYPROTECTION_TYPE = 0xC01E034F, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_UNASSIGNED_MODESET_ALREADY_EXISTS = 0xC01E0350, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_SCANLINE_ORDERING = 0xC01E0352, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_TOPOLOGY_CHANGES_NOT_ALLOWED = 0xC01E0353, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_NO_AVAILABLE_IMPORTANCE_ORDINALS = 0xC01E0354, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INCOMPATIBLE_PRIVATE_FORMAT = 0xC01E0355, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_MODE_PRUNING_ALGORITHM = 0xC01E0356, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_MONITOR_CAPABILITY_ORIGIN = 0xC01E0357, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_MONITOR_FREQUENCYRANGE_CONSTRAINT = 0xC01E0358, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_MAX_NUM_PATHS_REACHED = 0xC01E0359, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_CANCEL_VIDPN_TOPOLOGY_AUGMENTATION = 0xC01E035A, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_CLIENT_TYPE = 0xC01E035B, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_CLIENTVIDPN_NOT_SET = 0xC01E035C, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_SPECIFIED_CHILD_ALREADY_CONNECTED = 0xC01E0400, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_CHILD_DESCRIPTOR_NOT_SUPPORTED = 0xC01E0401, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_NOT_A_LINKED_ADAPTER = 0xC01E0430, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_LEADLINK_NOT_ENUMERATED = 0xC01E0431, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_CHAINLINKS_NOT_ENUMERATED = 0xC01E0432, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_ADAPTER_CHAIN_NOT_READY = 0xC01E0433, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_CHAINLINKS_NOT_STARTED = 0xC01E0434, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_CHAINLINKS_NOT_POWERED_ON = 0xC01E0435, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INCONSISTENT_DEVICE_LINK_STATE = 0xC01E0436, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_NOT_POST_DEVICE_DRIVER = 0xC01E0438, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_ADAPTER_ACCESS_NOT_EXCLUDED = 0xC01E043B, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_NOT_SUPPORTED = 0xC01E0500, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_COPP_NOT_SUPPORTED = 0xC01E0501, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_UAB_NOT_SUPPORTED = 0xC01E0502, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_INVALID_ENCRYPTED_PARAMETERS = 0xC01E0503, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_NO_PROTECTED_OUTPUTS_EXIST = 0xC01E0505, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_INTERNAL_ERROR = 0xC01E050B, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_INVALID_HANDLE = 0xC01E050C, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_PVP_INVALID_CERTIFICATE_LENGTH = 0xC01E050E, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_SPANNING_MODE_ENABLED = 0xC01E050F, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_THEATER_MODE_ENABLED = 0xC01E0510, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_PVP_HFS_FAILED = 0xC01E0511, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_INVALID_SRM = 0xC01E0512, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_OUTPUT_DOES_NOT_SUPPORT_HDCP = 0xC01E0513, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_OUTPUT_DOES_NOT_SUPPORT_ACP = 0xC01E0514, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_OUTPUT_DOES_NOT_SUPPORT_CGMSA = 0xC01E0515, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_HDCP_SRM_NEVER_SET = 0xC01E0516, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_RESOLUTION_TOO_HIGH = 0xC01E0517, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_ALL_HDCP_HARDWARE_ALREADY_IN_USE = 0xC01E0518, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_PROTECTED_OUTPUT_NO_LONGER_EXISTS = 0xC01E051A, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_PROTECTED_OUTPUT_DOES_NOT_HAVE_COPP_SEMANTICS = 0xC01E051C, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_INVALID_INFORMATION_REQUEST = 0xC01E051D, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_DRIVER_INTERNAL_ERROR = 0xC01E051E, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_PROTECTED_OUTPUT_DOES_NOT_HAVE_OPM_SEMANTICS = 0xC01E051F, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_SIGNALING_NOT_SUPPORTED = 0xC01E0520, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_INVALID_CONFIGURATION_REQUEST = 0xC01E0521, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_I2C_NOT_SUPPORTED = 0xC01E0580, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_I2C_DEVICE_DOES_NOT_EXIST = 0xC01E0581, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_I2C_ERROR_TRANSMITTING_DATA = 0xC01E0582, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_I2C_ERROR_RECEIVING_DATA = 0xC01E0583, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_DDCCI_VCP_NOT_SUPPORTED = 0xC01E0584, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_DDCCI_INVALID_DATA = 0xC01E0585, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_DDCCI_MONITOR_RETURNED_INVALID_TIMING_STATUS_BYTE = 0xC01E0586, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_DDCCI_INVALID_CAPABILITIES_STRING = 0xC01E0587, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_MCA_INTERNAL_ERROR = 0xC01E0588, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_DDCCI_INVALID_MESSAGE_COMMAND = 0xC01E0589, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_DDCCI_INVALID_MESSAGE_LENGTH = 0xC01E058A, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_DDCCI_INVALID_MESSAGE_CHECKSUM = 0xC01E058B, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_PHYSICAL_MONITOR_HANDLE = 0xC01E058C, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_MONITOR_NO_LONGER_EXISTS = 0xC01E058D, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_ONLY_CONSOLE_SESSION_SUPPORTED = 0xC01E05E0, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_NO_DISPLAY_DEVICE_CORRESPONDS_TO_NAME = 0xC01E05E1, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_DISPLAY_DEVICE_NOT_ATTACHED_TO_DESKTOP = 0xC01E05E2, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_MIRRORING_DEVICES_NOT_SUPPORTED = 0xC01E05E3, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_POINTER = 0xC01E05E4, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_NO_MONITORS_CORRESPOND_TO_DISPLAY_DEVICE = 0xC01E05E5, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_PARAMETER_ARRAY_TOO_SMALL = 0xC01E05E6, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_INTERNAL_ERROR = 0xC01E05E7, + MD_NTSTATUS_WIN_STATUS_GRAPHICS_SESSION_TYPE_CHANGE_IN_PROGRESS = 0xC01E05E8, + MD_NTSTATUS_WIN_STATUS_FVE_LOCKED_VOLUME = 0xC0210000, + MD_NTSTATUS_WIN_STATUS_FVE_NOT_ENCRYPTED = 0xC0210001, + MD_NTSTATUS_WIN_STATUS_FVE_BAD_INFORMATION = 0xC0210002, + MD_NTSTATUS_WIN_STATUS_FVE_TOO_SMALL = 0xC0210003, + MD_NTSTATUS_WIN_STATUS_FVE_FAILED_WRONG_FS = 0xC0210004, + MD_NTSTATUS_WIN_STATUS_FVE_BAD_PARTITION_SIZE = 0xC0210005, + MD_NTSTATUS_WIN_STATUS_FVE_FS_NOT_EXTENDED = 0xC0210006, + MD_NTSTATUS_WIN_STATUS_FVE_FS_MOUNTED = 0xC0210007, + MD_NTSTATUS_WIN_STATUS_FVE_NO_LICENSE = 0xC0210008, + MD_NTSTATUS_WIN_STATUS_FVE_ACTION_NOT_ALLOWED = 0xC0210009, + MD_NTSTATUS_WIN_STATUS_FVE_BAD_DATA = 0xC021000A, + MD_NTSTATUS_WIN_STATUS_FVE_VOLUME_NOT_BOUND = 0xC021000B, + MD_NTSTATUS_WIN_STATUS_FVE_NOT_DATA_VOLUME = 0xC021000C, + MD_NTSTATUS_WIN_STATUS_FVE_CONV_READ_ERROR = 0xC021000D, + MD_NTSTATUS_WIN_STATUS_FVE_CONV_WRITE_ERROR = 0xC021000E, + MD_NTSTATUS_WIN_STATUS_FVE_OVERLAPPED_UPDATE = 0xC021000F, + MD_NTSTATUS_WIN_STATUS_FVE_FAILED_SECTOR_SIZE = 0xC0210010, + MD_NTSTATUS_WIN_STATUS_FVE_FAILED_AUTHENTICATION = 0xC0210011, + MD_NTSTATUS_WIN_STATUS_FVE_NOT_OS_VOLUME = 0xC0210012, + MD_NTSTATUS_WIN_STATUS_FVE_KEYFILE_NOT_FOUND = 0xC0210013, + MD_NTSTATUS_WIN_STATUS_FVE_KEYFILE_INVALID = 0xC0210014, + MD_NTSTATUS_WIN_STATUS_FVE_KEYFILE_NO_VMK = 0xC0210015, + MD_NTSTATUS_WIN_STATUS_FVE_TPM_DISABLED = 0xC0210016, + MD_NTSTATUS_WIN_STATUS_FVE_TPM_SRK_AUTH_NOT_ZERO = 0xC0210017, + MD_NTSTATUS_WIN_STATUS_FVE_TPM_INVALID_PCR = 0xC0210018, + MD_NTSTATUS_WIN_STATUS_FVE_TPM_NO_VMK = 0xC0210019, + MD_NTSTATUS_WIN_STATUS_FVE_PIN_INVALID = 0xC021001A, + MD_NTSTATUS_WIN_STATUS_FVE_AUTH_INVALID_APPLICATION = 0xC021001B, + MD_NTSTATUS_WIN_STATUS_FVE_AUTH_INVALID_CONFIG = 0xC021001C, + MD_NTSTATUS_WIN_STATUS_FVE_DEBUGGER_ENABLED = 0xC021001D, + MD_NTSTATUS_WIN_STATUS_FVE_DRY_RUN_FAILED = 0xC021001E, + MD_NTSTATUS_WIN_STATUS_FVE_BAD_METADATA_POINTER = 0xC021001F, + MD_NTSTATUS_WIN_STATUS_FVE_OLD_METADATA_COPY = 0xC0210020, + MD_NTSTATUS_WIN_STATUS_FVE_REBOOT_REQUIRED = 0xC0210021, + MD_NTSTATUS_WIN_STATUS_FVE_RAW_ACCESS = 0xC0210022, + MD_NTSTATUS_WIN_STATUS_FVE_RAW_BLOCKED = 0xC0210023, + MD_NTSTATUS_WIN_STATUS_FVE_NO_AUTOUNLOCK_MASTER_KEY = 0xC0210024, + MD_NTSTATUS_WIN_STATUS_FVE_MOR_FAILED = 0xC0210025, + MD_NTSTATUS_WIN_STATUS_FVE_NO_FEATURE_LICENSE = 0xC0210026, + MD_NTSTATUS_WIN_STATUS_FVE_POLICY_USER_DISABLE_RDV_NOT_ALLOWED = 0xC0210027, + MD_NTSTATUS_WIN_STATUS_FVE_CONV_RECOVERY_FAILED = 0xC0210028, + MD_NTSTATUS_WIN_STATUS_FVE_VIRTUALIZED_SPACE_TOO_BIG = 0xC0210029, + MD_NTSTATUS_WIN_STATUS_FVE_INVALID_DATUM_TYPE = 0xC021002A, + MD_NTSTATUS_WIN_STATUS_FVE_VOLUME_TOO_SMALL = 0xC0210030, + MD_NTSTATUS_WIN_STATUS_FVE_ENH_PIN_INVALID = 0xC0210031, + MD_NTSTATUS_WIN_STATUS_FVE_FULL_ENCRYPTION_NOT_ALLOWED_ON_TP_STORAGE = 0xC0210032, + MD_NTSTATUS_WIN_STATUS_FVE_WIPE_NOT_ALLOWED_ON_TP_STORAGE = 0xC0210033, + MD_NTSTATUS_WIN_STATUS_FVE_NOT_ALLOWED_ON_CSV_STACK = 0xC0210034, + MD_NTSTATUS_WIN_STATUS_FVE_NOT_ALLOWED_ON_CLUSTER = 0xC0210035, + MD_NTSTATUS_WIN_STATUS_FVE_NOT_ALLOWED_TO_UPGRADE_WHILE_CONVERTING = 0xC0210036, + MD_NTSTATUS_WIN_STATUS_FVE_WIPE_CANCEL_NOT_APPLICABLE = 0xC0210037, + MD_NTSTATUS_WIN_STATUS_FVE_EDRIVE_DRY_RUN_FAILED = 0xC0210038, + MD_NTSTATUS_WIN_STATUS_FVE_SECUREBOOT_DISABLED = 0xC0210039, + MD_NTSTATUS_WIN_STATUS_FVE_SECUREBOOT_CONFIG_CHANGE = 0xC021003A, + MD_NTSTATUS_WIN_STATUS_FVE_DEVICE_LOCKEDOUT = 0xC021003B, + MD_NTSTATUS_WIN_STATUS_FVE_VOLUME_EXTEND_PREVENTS_EOW_DECRYPT = 0xC021003C, + MD_NTSTATUS_WIN_STATUS_FVE_NOT_DE_VOLUME = 0xC021003D, + MD_NTSTATUS_WIN_STATUS_FVE_PROTECTION_DISABLED = 0xC021003E, + MD_NTSTATUS_WIN_STATUS_FVE_PROTECTION_CANNOT_BE_DISABLED = 0xC021003F, + MD_NTSTATUS_WIN_STATUS_FWP_CALLOUT_NOT_FOUND = 0xC0220001, + MD_NTSTATUS_WIN_STATUS_FWP_CONDITION_NOT_FOUND = 0xC0220002, + MD_NTSTATUS_WIN_STATUS_FWP_FILTER_NOT_FOUND = 0xC0220003, + MD_NTSTATUS_WIN_STATUS_FWP_LAYER_NOT_FOUND = 0xC0220004, + MD_NTSTATUS_WIN_STATUS_FWP_PROVIDER_NOT_FOUND = 0xC0220005, + MD_NTSTATUS_WIN_STATUS_FWP_PROVIDER_CONTEXT_NOT_FOUND = 0xC0220006, + MD_NTSTATUS_WIN_STATUS_FWP_SUBLAYER_NOT_FOUND = 0xC0220007, + MD_NTSTATUS_WIN_STATUS_FWP_NOT_FOUND = 0xC0220008, + MD_NTSTATUS_WIN_STATUS_FWP_ALREADY_EXISTS = 0xC0220009, + MD_NTSTATUS_WIN_STATUS_FWP_IN_USE = 0xC022000A, + MD_NTSTATUS_WIN_STATUS_FWP_DYNAMIC_SESSION_IN_PROGRESS = 0xC022000B, + MD_NTSTATUS_WIN_STATUS_FWP_WRONG_SESSION = 0xC022000C, + MD_NTSTATUS_WIN_STATUS_FWP_NO_TXN_IN_PROGRESS = 0xC022000D, + MD_NTSTATUS_WIN_STATUS_FWP_TXN_IN_PROGRESS = 0xC022000E, + MD_NTSTATUS_WIN_STATUS_FWP_TXN_ABORTED = 0xC022000F, + MD_NTSTATUS_WIN_STATUS_FWP_SESSION_ABORTED = 0xC0220010, + MD_NTSTATUS_WIN_STATUS_FWP_INCOMPATIBLE_TXN = 0xC0220011, + MD_NTSTATUS_WIN_STATUS_FWP_TIMEOUT = 0xC0220012, + MD_NTSTATUS_WIN_STATUS_FWP_NET_EVENTS_DISABLED = 0xC0220013, + MD_NTSTATUS_WIN_STATUS_FWP_INCOMPATIBLE_LAYER = 0xC0220014, + MD_NTSTATUS_WIN_STATUS_FWP_KM_CLIENTS_ONLY = 0xC0220015, + MD_NTSTATUS_WIN_STATUS_FWP_LIFETIME_MISMATCH = 0xC0220016, + MD_NTSTATUS_WIN_STATUS_FWP_BUILTIN_OBJECT = 0xC0220017, + MD_NTSTATUS_WIN_STATUS_FWP_TOO_MANY_CALLOUTS = 0xC0220018, + MD_NTSTATUS_WIN_STATUS_FWP_NOTIFICATION_DROPPED = 0xC0220019, + MD_NTSTATUS_WIN_STATUS_FWP_TRAFFIC_MISMATCH = 0xC022001A, + MD_NTSTATUS_WIN_STATUS_FWP_INCOMPATIBLE_SA_STATE = 0xC022001B, + MD_NTSTATUS_WIN_STATUS_FWP_NULL_POINTER = 0xC022001C, + MD_NTSTATUS_WIN_STATUS_FWP_INVALID_ENUMERATOR = 0xC022001D, + MD_NTSTATUS_WIN_STATUS_FWP_INVALID_FLAGS = 0xC022001E, + MD_NTSTATUS_WIN_STATUS_FWP_INVALID_NET_MASK = 0xC022001F, + MD_NTSTATUS_WIN_STATUS_FWP_INVALID_RANGE = 0xC0220020, + MD_NTSTATUS_WIN_STATUS_FWP_INVALID_INTERVAL = 0xC0220021, + MD_NTSTATUS_WIN_STATUS_FWP_ZERO_LENGTH_ARRAY = 0xC0220022, + MD_NTSTATUS_WIN_STATUS_FWP_NULL_DISPLAY_NAME = 0xC0220023, + MD_NTSTATUS_WIN_STATUS_FWP_INVALID_ACTION_TYPE = 0xC0220024, + MD_NTSTATUS_WIN_STATUS_FWP_INVALID_WEIGHT = 0xC0220025, + MD_NTSTATUS_WIN_STATUS_FWP_MATCH_TYPE_MISMATCH = 0xC0220026, + MD_NTSTATUS_WIN_STATUS_FWP_TYPE_MISMATCH = 0xC0220027, + MD_NTSTATUS_WIN_STATUS_FWP_OUT_OF_BOUNDS = 0xC0220028, + MD_NTSTATUS_WIN_STATUS_FWP_RESERVED = 0xC0220029, + MD_NTSTATUS_WIN_STATUS_FWP_DUPLICATE_CONDITION = 0xC022002A, + MD_NTSTATUS_WIN_STATUS_FWP_DUPLICATE_KEYMOD = 0xC022002B, + MD_NTSTATUS_WIN_STATUS_FWP_ACTION_INCOMPATIBLE_WITH_LAYER = 0xC022002C, + MD_NTSTATUS_WIN_STATUS_FWP_ACTION_INCOMPATIBLE_WITH_SUBLAYER = 0xC022002D, + MD_NTSTATUS_WIN_STATUS_FWP_CONTEXT_INCOMPATIBLE_WITH_LAYER = 0xC022002E, + MD_NTSTATUS_WIN_STATUS_FWP_CONTEXT_INCOMPATIBLE_WITH_CALLOUT = 0xC022002F, + MD_NTSTATUS_WIN_STATUS_FWP_INCOMPATIBLE_AUTH_METHOD = 0xC0220030, + MD_NTSTATUS_WIN_STATUS_FWP_INCOMPATIBLE_DH_GROUP = 0xC0220031, + MD_NTSTATUS_WIN_STATUS_FWP_EM_NOT_SUPPORTED = 0xC0220032, + MD_NTSTATUS_WIN_STATUS_FWP_NEVER_MATCH = 0xC0220033, + MD_NTSTATUS_WIN_STATUS_FWP_PROVIDER_CONTEXT_MISMATCH = 0xC0220034, + MD_NTSTATUS_WIN_STATUS_FWP_INVALID_PARAMETER = 0xC0220035, + MD_NTSTATUS_WIN_STATUS_FWP_TOO_MANY_SUBLAYERS = 0xC0220036, + MD_NTSTATUS_WIN_STATUS_FWP_CALLOUT_NOTIFICATION_FAILED = 0xC0220037, + MD_NTSTATUS_WIN_STATUS_FWP_INVALID_AUTH_TRANSFORM = 0xC0220038, + MD_NTSTATUS_WIN_STATUS_FWP_INVALID_CIPHER_TRANSFORM = 0xC0220039, + MD_NTSTATUS_WIN_STATUS_FWP_INCOMPATIBLE_CIPHER_TRANSFORM = 0xC022003A, + MD_NTSTATUS_WIN_STATUS_FWP_INVALID_TRANSFORM_COMBINATION = 0xC022003B, + MD_NTSTATUS_WIN_STATUS_FWP_DUPLICATE_AUTH_METHOD = 0xC022003C, + MD_NTSTATUS_WIN_STATUS_FWP_INVALID_TUNNEL_ENDPOINT = 0xC022003D, + MD_NTSTATUS_WIN_STATUS_FWP_L2_DRIVER_NOT_READY = 0xC022003E, + MD_NTSTATUS_WIN_STATUS_FWP_KEY_DICTATOR_ALREADY_REGISTERED = 0xC022003F, + MD_NTSTATUS_WIN_STATUS_FWP_KEY_DICTATION_INVALID_KEYING_MATERIAL = 0xC0220040, + MD_NTSTATUS_WIN_STATUS_FWP_CONNECTIONS_DISABLED = 0xC0220041, + MD_NTSTATUS_WIN_STATUS_FWP_INVALID_DNS_NAME = 0xC0220042, + MD_NTSTATUS_WIN_STATUS_FWP_STILL_ON = 0xC0220043, + MD_NTSTATUS_WIN_STATUS_FWP_IKEEXT_NOT_RUNNING = 0xC0220044, + MD_NTSTATUS_WIN_STATUS_FWP_TCPIP_NOT_READY = 0xC0220100, + MD_NTSTATUS_WIN_STATUS_FWP_INJECT_HANDLE_CLOSING = 0xC0220101, + MD_NTSTATUS_WIN_STATUS_FWP_INJECT_HANDLE_STALE = 0xC0220102, + MD_NTSTATUS_WIN_STATUS_FWP_CANNOT_PEND = 0xC0220103, + MD_NTSTATUS_WIN_STATUS_FWP_DROP_NOICMP = 0xC0220104, + MD_NTSTATUS_WIN_STATUS_NDIS_CLOSING = 0xC0230002, + MD_NTSTATUS_WIN_STATUS_NDIS_BAD_VERSION = 0xC0230004, + MD_NTSTATUS_WIN_STATUS_NDIS_BAD_CHARACTERISTICS = 0xC0230005, + MD_NTSTATUS_WIN_STATUS_NDIS_ADAPTER_NOT_FOUND = 0xC0230006, + MD_NTSTATUS_WIN_STATUS_NDIS_OPEN_FAILED = 0xC0230007, + MD_NTSTATUS_WIN_STATUS_NDIS_DEVICE_FAILED = 0xC0230008, + MD_NTSTATUS_WIN_STATUS_NDIS_MULTICAST_FULL = 0xC0230009, + MD_NTSTATUS_WIN_STATUS_NDIS_MULTICAST_EXISTS = 0xC023000A, + MD_NTSTATUS_WIN_STATUS_NDIS_MULTICAST_NOT_FOUND = 0xC023000B, + MD_NTSTATUS_WIN_STATUS_NDIS_REQUEST_ABORTED = 0xC023000C, + MD_NTSTATUS_WIN_STATUS_NDIS_RESET_IN_PROGRESS = 0xC023000D, + MD_NTSTATUS_WIN_STATUS_NDIS_INVALID_PACKET = 0xC023000F, + MD_NTSTATUS_WIN_STATUS_NDIS_INVALID_DEVICE_REQUEST = 0xC0230010, + MD_NTSTATUS_WIN_STATUS_NDIS_ADAPTER_NOT_READY = 0xC0230011, + MD_NTSTATUS_WIN_STATUS_NDIS_INVALID_LENGTH = 0xC0230014, + MD_NTSTATUS_WIN_STATUS_NDIS_INVALID_DATA = 0xC0230015, + MD_NTSTATUS_WIN_STATUS_NDIS_BUFFER_TOO_SHORT = 0xC0230016, + MD_NTSTATUS_WIN_STATUS_NDIS_INVALID_OID = 0xC0230017, + MD_NTSTATUS_WIN_STATUS_NDIS_ADAPTER_REMOVED = 0xC0230018, + MD_NTSTATUS_WIN_STATUS_NDIS_UNSUPPORTED_MEDIA = 0xC0230019, + MD_NTSTATUS_WIN_STATUS_NDIS_GROUP_ADDRESS_IN_USE = 0xC023001A, + MD_NTSTATUS_WIN_STATUS_NDIS_FILE_NOT_FOUND = 0xC023001B, + MD_NTSTATUS_WIN_STATUS_NDIS_ERROR_READING_FILE = 0xC023001C, + MD_NTSTATUS_WIN_STATUS_NDIS_ALREADY_MAPPED = 0xC023001D, + MD_NTSTATUS_WIN_STATUS_NDIS_RESOURCE_CONFLICT = 0xC023001E, + MD_NTSTATUS_WIN_STATUS_NDIS_MEDIA_DISCONNECTED = 0xC023001F, + MD_NTSTATUS_WIN_STATUS_NDIS_INVALID_ADDRESS = 0xC0230022, + MD_NTSTATUS_WIN_STATUS_NDIS_PAUSED = 0xC023002A, + MD_NTSTATUS_WIN_STATUS_NDIS_INTERFACE_NOT_FOUND = 0xC023002B, + MD_NTSTATUS_WIN_STATUS_NDIS_UNSUPPORTED_REVISION = 0xC023002C, + MD_NTSTATUS_WIN_STATUS_NDIS_INVALID_PORT = 0xC023002D, + MD_NTSTATUS_WIN_STATUS_NDIS_INVALID_PORT_STATE = 0xC023002E, + MD_NTSTATUS_WIN_STATUS_NDIS_LOW_POWER_STATE = 0xC023002F, + MD_NTSTATUS_WIN_STATUS_NDIS_REINIT_REQUIRED = 0xC0230030, + MD_NTSTATUS_WIN_STATUS_NDIS_NOT_SUPPORTED = 0xC02300BB, + MD_NTSTATUS_WIN_STATUS_NDIS_OFFLOAD_POLICY = 0xC023100F, + MD_NTSTATUS_WIN_STATUS_NDIS_OFFLOAD_CONNECTION_REJECTED = 0xC0231012, + MD_NTSTATUS_WIN_STATUS_NDIS_OFFLOAD_PATH_REJECTED = 0xC0231013, + MD_NTSTATUS_WIN_STATUS_NDIS_DOT11_AUTO_CONFIG_ENABLED = 0xC0232000, + MD_NTSTATUS_WIN_STATUS_NDIS_DOT11_MEDIA_IN_USE = 0xC0232001, + MD_NTSTATUS_WIN_STATUS_NDIS_DOT11_POWER_STATE_INVALID = 0xC0232002, + MD_NTSTATUS_WIN_STATUS_NDIS_PM_WOL_PATTERN_LIST_FULL = 0xC0232003, + MD_NTSTATUS_WIN_STATUS_NDIS_PM_PROTOCOL_OFFLOAD_LIST_FULL = 0xC0232004, + MD_NTSTATUS_WIN_STATUS_TPM_ERROR_MASK = 0xC0290000, + MD_NTSTATUS_WIN_STATUS_TPM_AUTHFAIL = 0xC0290001, + MD_NTSTATUS_WIN_STATUS_TPM_BADINDEX = 0xC0290002, + MD_NTSTATUS_WIN_STATUS_TPM_BAD_PARAMETER = 0xC0290003, + MD_NTSTATUS_WIN_STATUS_TPM_AUDITFAILURE = 0xC0290004, + MD_NTSTATUS_WIN_STATUS_TPM_CLEAR_DISABLED = 0xC0290005, + MD_NTSTATUS_WIN_STATUS_TPM_DEACTIVATED = 0xC0290006, + MD_NTSTATUS_WIN_STATUS_TPM_DISABLED = 0xC0290007, + MD_NTSTATUS_WIN_STATUS_TPM_DISABLED_CMD = 0xC0290008, + MD_NTSTATUS_WIN_STATUS_TPM_FAIL = 0xC0290009, + MD_NTSTATUS_WIN_STATUS_TPM_BAD_ORDINAL = 0xC029000A, + MD_NTSTATUS_WIN_STATUS_TPM_INSTALL_DISABLED = 0xC029000B, + MD_NTSTATUS_WIN_STATUS_TPM_INVALID_KEYHANDLE = 0xC029000C, + MD_NTSTATUS_WIN_STATUS_TPM_KEYNOTFOUND = 0xC029000D, + MD_NTSTATUS_WIN_STATUS_TPM_INAPPROPRIATE_ENC = 0xC029000E, + MD_NTSTATUS_WIN_STATUS_TPM_MIGRATEFAIL = 0xC029000F, + MD_NTSTATUS_WIN_STATUS_TPM_INVALID_PCR_INFO = 0xC0290010, + MD_NTSTATUS_WIN_STATUS_TPM_NOSPACE = 0xC0290011, + MD_NTSTATUS_WIN_STATUS_TPM_NOSRK = 0xC0290012, + MD_NTSTATUS_WIN_STATUS_TPM_NOTSEALED_BLOB = 0xC0290013, + MD_NTSTATUS_WIN_STATUS_TPM_OWNER_SET = 0xC0290014, + MD_NTSTATUS_WIN_STATUS_TPM_RESOURCES = 0xC0290015, + MD_NTSTATUS_WIN_STATUS_TPM_SHORTRANDOM = 0xC0290016, + MD_NTSTATUS_WIN_STATUS_TPM_SIZE = 0xC0290017, + MD_NTSTATUS_WIN_STATUS_TPM_WRONGPCRVAL = 0xC0290018, + MD_NTSTATUS_WIN_STATUS_TPM_BAD_PARAM_SIZE = 0xC0290019, + MD_NTSTATUS_WIN_STATUS_TPM_SHA_THREAD = 0xC029001A, + MD_NTSTATUS_WIN_STATUS_TPM_SHA_ERROR = 0xC029001B, + MD_NTSTATUS_WIN_STATUS_TPM_FAILEDSELFTEST = 0xC029001C, + MD_NTSTATUS_WIN_STATUS_TPM_AUTH2FAIL = 0xC029001D, + MD_NTSTATUS_WIN_STATUS_TPM_BADTAG = 0xC029001E, + MD_NTSTATUS_WIN_STATUS_TPM_IOERROR = 0xC029001F, + MD_NTSTATUS_WIN_STATUS_TPM_ENCRYPT_ERROR = 0xC0290020, + MD_NTSTATUS_WIN_STATUS_TPM_DECRYPT_ERROR = 0xC0290021, + MD_NTSTATUS_WIN_STATUS_TPM_INVALID_AUTHHANDLE = 0xC0290022, + MD_NTSTATUS_WIN_STATUS_TPM_NO_ENDORSEMENT = 0xC0290023, + MD_NTSTATUS_WIN_STATUS_TPM_INVALID_KEYUSAGE = 0xC0290024, + MD_NTSTATUS_WIN_STATUS_TPM_WRONG_ENTITYTYPE = 0xC0290025, + MD_NTSTATUS_WIN_STATUS_TPM_INVALID_POSTINIT = 0xC0290026, + MD_NTSTATUS_WIN_STATUS_TPM_INAPPROPRIATE_SIG = 0xC0290027, + MD_NTSTATUS_WIN_STATUS_TPM_BAD_KEY_PROPERTY = 0xC0290028, + MD_NTSTATUS_WIN_STATUS_TPM_BAD_MIGRATION = 0xC0290029, + MD_NTSTATUS_WIN_STATUS_TPM_BAD_SCHEME = 0xC029002A, + MD_NTSTATUS_WIN_STATUS_TPM_BAD_DATASIZE = 0xC029002B, + MD_NTSTATUS_WIN_STATUS_TPM_BAD_MODE = 0xC029002C, + MD_NTSTATUS_WIN_STATUS_TPM_BAD_PRESENCE = 0xC029002D, + MD_NTSTATUS_WIN_STATUS_TPM_BAD_VERSION = 0xC029002E, + MD_NTSTATUS_WIN_STATUS_TPM_NO_WRAP_TRANSPORT = 0xC029002F, + MD_NTSTATUS_WIN_STATUS_TPM_AUDITFAIL_UNSUCCESSFUL = 0xC0290030, + MD_NTSTATUS_WIN_STATUS_TPM_AUDITFAIL_SUCCESSFUL = 0xC0290031, + MD_NTSTATUS_WIN_STATUS_TPM_NOTRESETABLE = 0xC0290032, + MD_NTSTATUS_WIN_STATUS_TPM_NOTLOCAL = 0xC0290033, + MD_NTSTATUS_WIN_STATUS_TPM_BAD_TYPE = 0xC0290034, + MD_NTSTATUS_WIN_STATUS_TPM_INVALID_RESOURCE = 0xC0290035, + MD_NTSTATUS_WIN_STATUS_TPM_NOTFIPS = 0xC0290036, + MD_NTSTATUS_WIN_STATUS_TPM_INVALID_FAMILY = 0xC0290037, + MD_NTSTATUS_WIN_STATUS_TPM_NO_NV_PERMISSION = 0xC0290038, + MD_NTSTATUS_WIN_STATUS_TPM_REQUIRES_SIGN = 0xC0290039, + MD_NTSTATUS_WIN_STATUS_TPM_KEY_NOTSUPPORTED = 0xC029003A, + MD_NTSTATUS_WIN_STATUS_TPM_AUTH_CONFLICT = 0xC029003B, + MD_NTSTATUS_WIN_STATUS_TPM_AREA_LOCKED = 0xC029003C, + MD_NTSTATUS_WIN_STATUS_TPM_BAD_LOCALITY = 0xC029003D, + MD_NTSTATUS_WIN_STATUS_TPM_READ_ONLY = 0xC029003E, + MD_NTSTATUS_WIN_STATUS_TPM_PER_NOWRITE = 0xC029003F, + MD_NTSTATUS_WIN_STATUS_TPM_FAMILYCOUNT = 0xC0290040, + MD_NTSTATUS_WIN_STATUS_TPM_WRITE_LOCKED = 0xC0290041, + MD_NTSTATUS_WIN_STATUS_TPM_BAD_ATTRIBUTES = 0xC0290042, + MD_NTSTATUS_WIN_STATUS_TPM_INVALID_STRUCTURE = 0xC0290043, + MD_NTSTATUS_WIN_STATUS_TPM_KEY_OWNER_CONTROL = 0xC0290044, + MD_NTSTATUS_WIN_STATUS_TPM_BAD_COUNTER = 0xC0290045, + MD_NTSTATUS_WIN_STATUS_TPM_NOT_FULLWRITE = 0xC0290046, + MD_NTSTATUS_WIN_STATUS_TPM_CONTEXT_GAP = 0xC0290047, + MD_NTSTATUS_WIN_STATUS_TPM_MAXNVWRITES = 0xC0290048, + MD_NTSTATUS_WIN_STATUS_TPM_NOOPERATOR = 0xC0290049, + MD_NTSTATUS_WIN_STATUS_TPM_RESOURCEMISSING = 0xC029004A, + MD_NTSTATUS_WIN_STATUS_TPM_DELEGATE_LOCK = 0xC029004B, + MD_NTSTATUS_WIN_STATUS_TPM_DELEGATE_FAMILY = 0xC029004C, + MD_NTSTATUS_WIN_STATUS_TPM_DELEGATE_ADMIN = 0xC029004D, + MD_NTSTATUS_WIN_STATUS_TPM_TRANSPORT_NOTEXCLUSIVE = 0xC029004E, + MD_NTSTATUS_WIN_STATUS_TPM_OWNER_CONTROL = 0xC029004F, + MD_NTSTATUS_WIN_STATUS_TPM_DAA_RESOURCES = 0xC0290050, + MD_NTSTATUS_WIN_STATUS_TPM_DAA_INPUT_DATA0 = 0xC0290051, + MD_NTSTATUS_WIN_STATUS_TPM_DAA_INPUT_DATA1 = 0xC0290052, + MD_NTSTATUS_WIN_STATUS_TPM_DAA_ISSUER_SETTINGS = 0xC0290053, + MD_NTSTATUS_WIN_STATUS_TPM_DAA_TPM_SETTINGS = 0xC0290054, + MD_NTSTATUS_WIN_STATUS_TPM_DAA_STAGE = 0xC0290055, + MD_NTSTATUS_WIN_STATUS_TPM_DAA_ISSUER_VALIDITY = 0xC0290056, + MD_NTSTATUS_WIN_STATUS_TPM_DAA_WRONG_W = 0xC0290057, + MD_NTSTATUS_WIN_STATUS_TPM_BAD_HANDLE = 0xC0290058, + MD_NTSTATUS_WIN_STATUS_TPM_BAD_DELEGATE = 0xC0290059, + MD_NTSTATUS_WIN_STATUS_TPM_BADCONTEXT = 0xC029005A, + MD_NTSTATUS_WIN_STATUS_TPM_TOOMANYCONTEXTS = 0xC029005B, + MD_NTSTATUS_WIN_STATUS_TPM_MA_TICKET_SIGNATURE = 0xC029005C, + MD_NTSTATUS_WIN_STATUS_TPM_MA_DESTINATION = 0xC029005D, + MD_NTSTATUS_WIN_STATUS_TPM_MA_SOURCE = 0xC029005E, + MD_NTSTATUS_WIN_STATUS_TPM_MA_AUTHORITY = 0xC029005F, + MD_NTSTATUS_WIN_STATUS_TPM_PERMANENTEK = 0xC0290061, + MD_NTSTATUS_WIN_STATUS_TPM_BAD_SIGNATURE = 0xC0290062, + MD_NTSTATUS_WIN_STATUS_TPM_NOCONTEXTSPACE = 0xC0290063, + MD_NTSTATUS_WIN_STATUS_TPM_COMMAND_BLOCKED = 0xC0290400, + MD_NTSTATUS_WIN_STATUS_TPM_INVALID_HANDLE = 0xC0290401, + MD_NTSTATUS_WIN_STATUS_TPM_DUPLICATE_VHANDLE = 0xC0290402, + MD_NTSTATUS_WIN_STATUS_TPM_EMBEDDED_COMMAND_BLOCKED = 0xC0290403, + MD_NTSTATUS_WIN_STATUS_TPM_EMBEDDED_COMMAND_UNSUPPORTED = 0xC0290404, + MD_NTSTATUS_WIN_STATUS_TPM_RETRY = 0xC0290800, + MD_NTSTATUS_WIN_STATUS_TPM_NEEDS_SELFTEST = 0xC0290801, + MD_NTSTATUS_WIN_STATUS_TPM_DOING_SELFTEST = 0xC0290802, + MD_NTSTATUS_WIN_STATUS_TPM_DEFEND_LOCK_RUNNING = 0xC0290803, + MD_NTSTATUS_WIN_STATUS_TPM_COMMAND_CANCELED = 0xC0291001, + MD_NTSTATUS_WIN_STATUS_TPM_TOO_MANY_CONTEXTS = 0xC0291002, + MD_NTSTATUS_WIN_STATUS_TPM_NOT_FOUND = 0xC0291003, + MD_NTSTATUS_WIN_STATUS_TPM_ACCESS_DENIED = 0xC0291004, + MD_NTSTATUS_WIN_STATUS_TPM_INSUFFICIENT_BUFFER = 0xC0291005, + MD_NTSTATUS_WIN_STATUS_TPM_PPI_FUNCTION_UNSUPPORTED = 0xC0291006, + MD_NTSTATUS_WIN_STATUS_PCP_ERROR_MASK = 0xC0292000, + MD_NTSTATUS_WIN_STATUS_PCP_DEVICE_NOT_READY = 0xC0292001, + MD_NTSTATUS_WIN_STATUS_PCP_INVALID_HANDLE = 0xC0292002, + MD_NTSTATUS_WIN_STATUS_PCP_INVALID_PARAMETER = 0xC0292003, + MD_NTSTATUS_WIN_STATUS_PCP_FLAG_NOT_SUPPORTED = 0xC0292004, + MD_NTSTATUS_WIN_STATUS_PCP_NOT_SUPPORTED = 0xC0292005, + MD_NTSTATUS_WIN_STATUS_PCP_BUFFER_TOO_SMALL = 0xC0292006, + MD_NTSTATUS_WIN_STATUS_PCP_INTERNAL_ERROR = 0xC0292007, + MD_NTSTATUS_WIN_STATUS_PCP_AUTHENTICATION_FAILED = 0xC0292008, + MD_NTSTATUS_WIN_STATUS_PCP_AUTHENTICATION_IGNORED = 0xC0292009, + MD_NTSTATUS_WIN_STATUS_PCP_POLICY_NOT_FOUND = 0xC029200A, + MD_NTSTATUS_WIN_STATUS_PCP_PROFILE_NOT_FOUND = 0xC029200B, + MD_NTSTATUS_WIN_STATUS_PCP_VALIDATION_FAILED = 0xC029200C, + MD_NTSTATUS_WIN_STATUS_PCP_DEVICE_NOT_FOUND = 0xC029200D, + MD_NTSTATUS_WIN_STATUS_HV_INVALID_HYPERCALL_CODE = 0xC0350002, + MD_NTSTATUS_WIN_STATUS_HV_INVALID_HYPERCALL_INPUT = 0xC0350003, + MD_NTSTATUS_WIN_STATUS_HV_INVALID_ALIGNMENT = 0xC0350004, + MD_NTSTATUS_WIN_STATUS_HV_INVALID_PARAMETER = 0xC0350005, + MD_NTSTATUS_WIN_STATUS_HV_ACCESS_DENIED = 0xC0350006, + MD_NTSTATUS_WIN_STATUS_HV_INVALID_PARTITION_STATE = 0xC0350007, + MD_NTSTATUS_WIN_STATUS_HV_OPERATION_DENIED = 0xC0350008, + MD_NTSTATUS_WIN_STATUS_HV_UNKNOWN_PROPERTY = 0xC0350009, + MD_NTSTATUS_WIN_STATUS_HV_PROPERTY_VALUE_OUT_OF_RANGE = 0xC035000A, + MD_NTSTATUS_WIN_STATUS_HV_INSUFFICIENT_MEMORY = 0xC035000B, + MD_NTSTATUS_WIN_STATUS_HV_PARTITION_TOO_DEEP = 0xC035000C, + MD_NTSTATUS_WIN_STATUS_HV_INVALID_PARTITION_ID = 0xC035000D, + MD_NTSTATUS_WIN_STATUS_HV_INVALID_VP_INDEX = 0xC035000E, + MD_NTSTATUS_WIN_STATUS_HV_INVALID_PORT_ID = 0xC0350011, + MD_NTSTATUS_WIN_STATUS_HV_INVALID_CONNECTION_ID = 0xC0350012, + MD_NTSTATUS_WIN_STATUS_HV_INSUFFICIENT_BUFFERS = 0xC0350013, + MD_NTSTATUS_WIN_STATUS_HV_NOT_ACKNOWLEDGED = 0xC0350014, + MD_NTSTATUS_WIN_STATUS_HV_ACKNOWLEDGED = 0xC0350016, + MD_NTSTATUS_WIN_STATUS_HV_INVALID_SAVE_RESTORE_STATE = 0xC0350017, + MD_NTSTATUS_WIN_STATUS_HV_INVALID_SYNIC_STATE = 0xC0350018, + MD_NTSTATUS_WIN_STATUS_HV_OBJECT_IN_USE = 0xC0350019, + MD_NTSTATUS_WIN_STATUS_HV_INVALID_PROXIMITY_DOMAIN_INFO = 0xC035001A, + MD_NTSTATUS_WIN_STATUS_HV_NO_DATA = 0xC035001B, + MD_NTSTATUS_WIN_STATUS_HV_INACTIVE = 0xC035001C, + MD_NTSTATUS_WIN_STATUS_HV_NO_RESOURCES = 0xC035001D, + MD_NTSTATUS_WIN_STATUS_HV_FEATURE_UNAVAILABLE = 0xC035001E, + MD_NTSTATUS_WIN_STATUS_HV_INSUFFICIENT_BUFFER = 0xC0350033, + MD_NTSTATUS_WIN_STATUS_HV_INSUFFICIENT_DEVICE_DOMAINS = 0xC0350038, + MD_NTSTATUS_WIN_STATUS_HV_INVALID_LP_INDEX = 0xC0350041, + MD_NTSTATUS_WIN_STATUS_HV_NOT_PRESENT = 0xC0351000, + MD_NTSTATUS_WIN_STATUS_IPSEC_BAD_SPI = 0xC0360001, + MD_NTSTATUS_WIN_STATUS_IPSEC_SA_LIFETIME_EXPIRED = 0xC0360002, + MD_NTSTATUS_WIN_STATUS_IPSEC_WRONG_SA = 0xC0360003, + MD_NTSTATUS_WIN_STATUS_IPSEC_REPLAY_CHECK_FAILED = 0xC0360004, + MD_NTSTATUS_WIN_STATUS_IPSEC_INVALID_PACKET = 0xC0360005, + MD_NTSTATUS_WIN_STATUS_IPSEC_INTEGRITY_CHECK_FAILED = 0xC0360006, + MD_NTSTATUS_WIN_STATUS_IPSEC_CLEAR_TEXT_DROP = 0xC0360007, + MD_NTSTATUS_WIN_STATUS_IPSEC_AUTH_FIREWALL_DROP = 0xC0360008, + MD_NTSTATUS_WIN_STATUS_IPSEC_THROTTLE_DROP = 0xC0360009, + MD_NTSTATUS_WIN_STATUS_IPSEC_DOSP_BLOCK = 0xC0368000, + MD_NTSTATUS_WIN_STATUS_IPSEC_DOSP_RECEIVED_MULTICAST = 0xC0368001, + MD_NTSTATUS_WIN_STATUS_IPSEC_DOSP_INVALID_PACKET = 0xC0368002, + MD_NTSTATUS_WIN_STATUS_IPSEC_DOSP_STATE_LOOKUP_FAILED = 0xC0368003, + MD_NTSTATUS_WIN_STATUS_IPSEC_DOSP_MAX_ENTRIES = 0xC0368004, + MD_NTSTATUS_WIN_STATUS_IPSEC_DOSP_KEYMOD_NOT_ALLOWED = 0xC0368005, + MD_NTSTATUS_WIN_STATUS_IPSEC_DOSP_MAX_PER_IP_RATELIMIT_QUEUES = 0xC0368006, + MD_NTSTATUS_WIN_STATUS_VID_DUPLICATE_HANDLER = 0xC0370001, + MD_NTSTATUS_WIN_STATUS_VID_TOO_MANY_HANDLERS = 0xC0370002, + MD_NTSTATUS_WIN_STATUS_VID_QUEUE_FULL = 0xC0370003, + MD_NTSTATUS_WIN_STATUS_VID_HANDLER_NOT_PRESENT = 0xC0370004, + MD_NTSTATUS_WIN_STATUS_VID_INVALID_OBJECT_NAME = 0xC0370005, + MD_NTSTATUS_WIN_STATUS_VID_PARTITION_NAME_TOO_LONG = 0xC0370006, + MD_NTSTATUS_WIN_STATUS_VID_MESSAGE_QUEUE_NAME_TOO_LONG = 0xC0370007, + MD_NTSTATUS_WIN_STATUS_VID_PARTITION_ALREADY_EXISTS = 0xC0370008, + MD_NTSTATUS_WIN_STATUS_VID_PARTITION_DOES_NOT_EXIST = 0xC0370009, + MD_NTSTATUS_WIN_STATUS_VID_PARTITION_NAME_NOT_FOUND = 0xC037000A, + MD_NTSTATUS_WIN_STATUS_VID_MESSAGE_QUEUE_ALREADY_EXISTS = 0xC037000B, + MD_NTSTATUS_WIN_STATUS_VID_EXCEEDED_MBP_ENTRY_MAP_LIMIT = 0xC037000C, + MD_NTSTATUS_WIN_STATUS_VID_MB_STILL_REFERENCED = 0xC037000D, + MD_NTSTATUS_WIN_STATUS_VID_CHILD_GPA_PAGE_SET_CORRUPTED = 0xC037000E, + MD_NTSTATUS_WIN_STATUS_VID_INVALID_NUMA_SETTINGS = 0xC037000F, + MD_NTSTATUS_WIN_STATUS_VID_INVALID_NUMA_NODE_INDEX = 0xC0370010, + MD_NTSTATUS_WIN_STATUS_VID_NOTIFICATION_QUEUE_ALREADY_ASSOCIATED = 0xC0370011, + MD_NTSTATUS_WIN_STATUS_VID_INVALID_MEMORY_BLOCK_HANDLE = 0xC0370012, + MD_NTSTATUS_WIN_STATUS_VID_PAGE_RANGE_OVERFLOW = 0xC0370013, + MD_NTSTATUS_WIN_STATUS_VID_INVALID_MESSAGE_QUEUE_HANDLE = 0xC0370014, + MD_NTSTATUS_WIN_STATUS_VID_INVALID_GPA_RANGE_HANDLE = 0xC0370015, + MD_NTSTATUS_WIN_STATUS_VID_NO_MEMORY_BLOCK_NOTIFICATION_QUEUE = 0xC0370016, + MD_NTSTATUS_WIN_STATUS_VID_MEMORY_BLOCK_LOCK_COUNT_EXCEEDED = 0xC0370017, + MD_NTSTATUS_WIN_STATUS_VID_INVALID_PPM_HANDLE = 0xC0370018, + MD_NTSTATUS_WIN_STATUS_VID_MBPS_ARE_LOCKED = 0xC0370019, + MD_NTSTATUS_WIN_STATUS_VID_MESSAGE_QUEUE_CLOSED = 0xC037001A, + MD_NTSTATUS_WIN_STATUS_VID_VIRTUAL_PROCESSOR_LIMIT_EXCEEDED = 0xC037001B, + MD_NTSTATUS_WIN_STATUS_VID_STOP_PENDING = 0xC037001C, + MD_NTSTATUS_WIN_STATUS_VID_INVALID_PROCESSOR_STATE = 0xC037001D, + MD_NTSTATUS_WIN_STATUS_VID_EXCEEDED_KM_CONTEXT_COUNT_LIMIT = 0xC037001E, + MD_NTSTATUS_WIN_STATUS_VID_KM_INTERFACE_ALREADY_INITIALIZED = 0xC037001F, + MD_NTSTATUS_WIN_STATUS_VID_MB_PROPERTY_ALREADY_SET_RESET = 0xC0370020, + MD_NTSTATUS_WIN_STATUS_VID_MMIO_RANGE_DESTROYED = 0xC0370021, + MD_NTSTATUS_WIN_STATUS_VID_INVALID_CHILD_GPA_PAGE_SET = 0xC0370022, + MD_NTSTATUS_WIN_STATUS_VID_RESERVE_PAGE_SET_IS_BEING_USED = 0xC0370023, + MD_NTSTATUS_WIN_STATUS_VID_RESERVE_PAGE_SET_TOO_SMALL = 0xC0370024, + MD_NTSTATUS_WIN_STATUS_VID_MBP_ALREADY_LOCKED_USING_RESERVED_PAGE = 0xC0370025, + MD_NTSTATUS_WIN_STATUS_VID_MBP_COUNT_EXCEEDED_LIMIT = 0xC0370026, + MD_NTSTATUS_WIN_STATUS_VID_SAVED_STATE_CORRUPT = 0xC0370027, + MD_NTSTATUS_WIN_STATUS_VID_SAVED_STATE_UNRECOGNIZED_ITEM = 0xC0370028, + MD_NTSTATUS_WIN_STATUS_VID_SAVED_STATE_INCOMPATIBLE = 0xC0370029, + MD_NTSTATUS_WIN_STATUS_VOLMGR_DATABASE_FULL = 0xC0380001, + MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_CONFIGURATION_CORRUPTED = 0xC0380002, + MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_CONFIGURATION_NOT_IN_SYNC = 0xC0380003, + MD_NTSTATUS_WIN_STATUS_VOLMGR_PACK_CONFIG_UPDATE_FAILED = 0xC0380004, + MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_CONTAINS_NON_SIMPLE_VOLUME = 0xC0380005, + MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_DUPLICATE = 0xC0380006, + MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_DYNAMIC = 0xC0380007, + MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_ID_INVALID = 0xC0380008, + MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_INVALID = 0xC0380009, + MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_LAST_VOTER = 0xC038000A, + MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_LAYOUT_INVALID = 0xC038000B, + MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_LAYOUT_NON_BASIC_BETWEEN_BASIC_PARTITIONS = 0xC038000C, + MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_LAYOUT_NOT_CYLINDER_ALIGNED = 0xC038000D, + MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_LAYOUT_PARTITIONS_TOO_SMALL = 0xC038000E, + MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_LAYOUT_PRIMARY_BETWEEN_LOGICAL_PARTITIONS = 0xC038000F, + MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_LAYOUT_TOO_MANY_PARTITIONS = 0xC0380010, + MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_MISSING = 0xC0380011, + MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_NOT_EMPTY = 0xC0380012, + MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_NOT_ENOUGH_SPACE = 0xC0380013, + MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_REVECTORING_FAILED = 0xC0380014, + MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_SECTOR_SIZE_INVALID = 0xC0380015, + MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_SET_NOT_CONTAINED = 0xC0380016, + MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_USED_BY_MULTIPLE_MEMBERS = 0xC0380017, + MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_USED_BY_MULTIPLE_PLEXES = 0xC0380018, + MD_NTSTATUS_WIN_STATUS_VOLMGR_DYNAMIC_DISK_NOT_SUPPORTED = 0xC0380019, + MD_NTSTATUS_WIN_STATUS_VOLMGR_EXTENT_ALREADY_USED = 0xC038001A, + MD_NTSTATUS_WIN_STATUS_VOLMGR_EXTENT_NOT_CONTIGUOUS = 0xC038001B, + MD_NTSTATUS_WIN_STATUS_VOLMGR_EXTENT_NOT_IN_PUBLIC_REGION = 0xC038001C, + MD_NTSTATUS_WIN_STATUS_VOLMGR_EXTENT_NOT_SECTOR_ALIGNED = 0xC038001D, + MD_NTSTATUS_WIN_STATUS_VOLMGR_EXTENT_OVERLAPS_EBR_PARTITION = 0xC038001E, + MD_NTSTATUS_WIN_STATUS_VOLMGR_EXTENT_VOLUME_LENGTHS_DO_NOT_MATCH = 0xC038001F, + MD_NTSTATUS_WIN_STATUS_VOLMGR_FAULT_TOLERANT_NOT_SUPPORTED = 0xC0380020, + MD_NTSTATUS_WIN_STATUS_VOLMGR_INTERLEAVE_LENGTH_INVALID = 0xC0380021, + MD_NTSTATUS_WIN_STATUS_VOLMGR_MAXIMUM_REGISTERED_USERS = 0xC0380022, + MD_NTSTATUS_WIN_STATUS_VOLMGR_MEMBER_IN_SYNC = 0xC0380023, + MD_NTSTATUS_WIN_STATUS_VOLMGR_MEMBER_INDEX_DUPLICATE = 0xC0380024, + MD_NTSTATUS_WIN_STATUS_VOLMGR_MEMBER_INDEX_INVALID = 0xC0380025, + MD_NTSTATUS_WIN_STATUS_VOLMGR_MEMBER_MISSING = 0xC0380026, + MD_NTSTATUS_WIN_STATUS_VOLMGR_MEMBER_NOT_DETACHED = 0xC0380027, + MD_NTSTATUS_WIN_STATUS_VOLMGR_MEMBER_REGENERATING = 0xC0380028, + MD_NTSTATUS_WIN_STATUS_VOLMGR_ALL_DISKS_FAILED = 0xC0380029, + MD_NTSTATUS_WIN_STATUS_VOLMGR_NO_REGISTERED_USERS = 0xC038002A, + MD_NTSTATUS_WIN_STATUS_VOLMGR_NO_SUCH_USER = 0xC038002B, + MD_NTSTATUS_WIN_STATUS_VOLMGR_NOTIFICATION_RESET = 0xC038002C, + MD_NTSTATUS_WIN_STATUS_VOLMGR_NUMBER_OF_MEMBERS_INVALID = 0xC038002D, + MD_NTSTATUS_WIN_STATUS_VOLMGR_NUMBER_OF_PLEXES_INVALID = 0xC038002E, + MD_NTSTATUS_WIN_STATUS_VOLMGR_PACK_DUPLICATE = 0xC038002F, + MD_NTSTATUS_WIN_STATUS_VOLMGR_PACK_ID_INVALID = 0xC0380030, + MD_NTSTATUS_WIN_STATUS_VOLMGR_PACK_INVALID = 0xC0380031, + MD_NTSTATUS_WIN_STATUS_VOLMGR_PACK_NAME_INVALID = 0xC0380032, + MD_NTSTATUS_WIN_STATUS_VOLMGR_PACK_OFFLINE = 0xC0380033, + MD_NTSTATUS_WIN_STATUS_VOLMGR_PACK_HAS_QUORUM = 0xC0380034, + MD_NTSTATUS_WIN_STATUS_VOLMGR_PACK_WITHOUT_QUORUM = 0xC0380035, + MD_NTSTATUS_WIN_STATUS_VOLMGR_PARTITION_STYLE_INVALID = 0xC0380036, + MD_NTSTATUS_WIN_STATUS_VOLMGR_PARTITION_UPDATE_FAILED = 0xC0380037, + MD_NTSTATUS_WIN_STATUS_VOLMGR_PLEX_IN_SYNC = 0xC0380038, + MD_NTSTATUS_WIN_STATUS_VOLMGR_PLEX_INDEX_DUPLICATE = 0xC0380039, + MD_NTSTATUS_WIN_STATUS_VOLMGR_PLEX_INDEX_INVALID = 0xC038003A, + MD_NTSTATUS_WIN_STATUS_VOLMGR_PLEX_LAST_ACTIVE = 0xC038003B, + MD_NTSTATUS_WIN_STATUS_VOLMGR_PLEX_MISSING = 0xC038003C, + MD_NTSTATUS_WIN_STATUS_VOLMGR_PLEX_REGENERATING = 0xC038003D, + MD_NTSTATUS_WIN_STATUS_VOLMGR_PLEX_TYPE_INVALID = 0xC038003E, + MD_NTSTATUS_WIN_STATUS_VOLMGR_PLEX_NOT_RAID5 = 0xC038003F, + MD_NTSTATUS_WIN_STATUS_VOLMGR_PLEX_NOT_SIMPLE = 0xC0380040, + MD_NTSTATUS_WIN_STATUS_VOLMGR_STRUCTURE_SIZE_INVALID = 0xC0380041, + MD_NTSTATUS_WIN_STATUS_VOLMGR_TOO_MANY_NOTIFICATION_REQUESTS = 0xC0380042, + MD_NTSTATUS_WIN_STATUS_VOLMGR_TRANSACTION_IN_PROGRESS = 0xC0380043, + MD_NTSTATUS_WIN_STATUS_VOLMGR_UNEXPECTED_DISK_LAYOUT_CHANGE = 0xC0380044, + MD_NTSTATUS_WIN_STATUS_VOLMGR_VOLUME_CONTAINS_MISSING_DISK = 0xC0380045, + MD_NTSTATUS_WIN_STATUS_VOLMGR_VOLUME_ID_INVALID = 0xC0380046, + MD_NTSTATUS_WIN_STATUS_VOLMGR_VOLUME_LENGTH_INVALID = 0xC0380047, + MD_NTSTATUS_WIN_STATUS_VOLMGR_VOLUME_LENGTH_NOT_SECTOR_SIZE_MULTIPLE = 0xC0380048, + MD_NTSTATUS_WIN_STATUS_VOLMGR_VOLUME_NOT_MIRRORED = 0xC0380049, + MD_NTSTATUS_WIN_STATUS_VOLMGR_VOLUME_NOT_RETAINED = 0xC038004A, + MD_NTSTATUS_WIN_STATUS_VOLMGR_VOLUME_OFFLINE = 0xC038004B, + MD_NTSTATUS_WIN_STATUS_VOLMGR_VOLUME_RETAINED = 0xC038004C, + MD_NTSTATUS_WIN_STATUS_VOLMGR_NUMBER_OF_EXTENTS_INVALID = 0xC038004D, + MD_NTSTATUS_WIN_STATUS_VOLMGR_DIFFERENT_SECTOR_SIZE = 0xC038004E, + MD_NTSTATUS_WIN_STATUS_VOLMGR_BAD_BOOT_DISK = 0xC038004F, + MD_NTSTATUS_WIN_STATUS_VOLMGR_PACK_CONFIG_OFFLINE = 0xC0380050, + MD_NTSTATUS_WIN_STATUS_VOLMGR_PACK_CONFIG_ONLINE = 0xC0380051, + MD_NTSTATUS_WIN_STATUS_VOLMGR_NOT_PRIMARY_PACK = 0xC0380052, + MD_NTSTATUS_WIN_STATUS_VOLMGR_PACK_LOG_UPDATE_FAILED = 0xC0380053, + MD_NTSTATUS_WIN_STATUS_VOLMGR_NUMBER_OF_DISKS_IN_PLEX_INVALID = 0xC0380054, + MD_NTSTATUS_WIN_STATUS_VOLMGR_NUMBER_OF_DISKS_IN_MEMBER_INVALID = 0xC0380055, + MD_NTSTATUS_WIN_STATUS_VOLMGR_VOLUME_MIRRORED = 0xC0380056, + MD_NTSTATUS_WIN_STATUS_VOLMGR_PLEX_NOT_SIMPLE_SPANNED = 0xC0380057, + MD_NTSTATUS_WIN_STATUS_VOLMGR_NO_VALID_LOG_COPIES = 0xC0380058, + MD_NTSTATUS_WIN_STATUS_VOLMGR_PRIMARY_PACK_PRESENT = 0xC0380059, + MD_NTSTATUS_WIN_STATUS_VOLMGR_NUMBER_OF_DISKS_INVALID = 0xC038005A, + MD_NTSTATUS_WIN_STATUS_VOLMGR_MIRROR_NOT_SUPPORTED = 0xC038005B, + MD_NTSTATUS_WIN_STATUS_VOLMGR_RAID5_NOT_SUPPORTED = 0xC038005C, + MD_NTSTATUS_WIN_STATUS_BCD_TOO_MANY_ELEMENTS = 0xC0390002, + MD_NTSTATUS_WIN_STATUS_VHD_DRIVE_FOOTER_MISSING = 0xC03A0001, + MD_NTSTATUS_WIN_STATUS_VHD_DRIVE_FOOTER_CHECKSUM_MISMATCH = 0xC03A0002, + MD_NTSTATUS_WIN_STATUS_VHD_DRIVE_FOOTER_CORRUPT = 0xC03A0003, + MD_NTSTATUS_WIN_STATUS_VHD_FORMAT_UNKNOWN = 0xC03A0004, + MD_NTSTATUS_WIN_STATUS_VHD_FORMAT_UNSUPPORTED_VERSION = 0xC03A0005, + MD_NTSTATUS_WIN_STATUS_VHD_SPARSE_HEADER_CHECKSUM_MISMATCH = 0xC03A0006, + MD_NTSTATUS_WIN_STATUS_VHD_SPARSE_HEADER_UNSUPPORTED_VERSION = 0xC03A0007, + MD_NTSTATUS_WIN_STATUS_VHD_SPARSE_HEADER_CORRUPT = 0xC03A0008, + MD_NTSTATUS_WIN_STATUS_VHD_BLOCK_ALLOCATION_FAILURE = 0xC03A0009, + MD_NTSTATUS_WIN_STATUS_VHD_BLOCK_ALLOCATION_TABLE_CORRUPT = 0xC03A000A, + MD_NTSTATUS_WIN_STATUS_VHD_INVALID_BLOCK_SIZE = 0xC03A000B, + MD_NTSTATUS_WIN_STATUS_VHD_BITMAP_MISMATCH = 0xC03A000C, + MD_NTSTATUS_WIN_STATUS_VHD_PARENT_VHD_NOT_FOUND = 0xC03A000D, + MD_NTSTATUS_WIN_STATUS_VHD_CHILD_PARENT_ID_MISMATCH = 0xC03A000E, + MD_NTSTATUS_WIN_STATUS_VHD_CHILD_PARENT_TIMESTAMP_MISMATCH = 0xC03A000F, + MD_NTSTATUS_WIN_STATUS_VHD_METADATA_READ_FAILURE = 0xC03A0010, + MD_NTSTATUS_WIN_STATUS_VHD_METADATA_WRITE_FAILURE = 0xC03A0011, + MD_NTSTATUS_WIN_STATUS_VHD_INVALID_SIZE = 0xC03A0012, + MD_NTSTATUS_WIN_STATUS_VHD_INVALID_FILE_SIZE = 0xC03A0013, + MD_NTSTATUS_WIN_STATUS_VIRTDISK_PROVIDER_NOT_FOUND = 0xC03A0014, + MD_NTSTATUS_WIN_STATUS_VIRTDISK_NOT_VIRTUAL_DISK = 0xC03A0015, + MD_NTSTATUS_WIN_STATUS_VHD_PARENT_VHD_ACCESS_DENIED = 0xC03A0016, + MD_NTSTATUS_WIN_STATUS_VHD_CHILD_PARENT_SIZE_MISMATCH = 0xC03A0017, + MD_NTSTATUS_WIN_STATUS_VHD_DIFFERENCING_CHAIN_CYCLE_DETECTED = 0xC03A0018, + MD_NTSTATUS_WIN_STATUS_VHD_DIFFERENCING_CHAIN_ERROR_IN_PARENT = 0xC03A0019, + MD_NTSTATUS_WIN_STATUS_VIRTUAL_DISK_LIMITATION = 0xC03A001A, + MD_NTSTATUS_WIN_STATUS_VHD_INVALID_TYPE = 0xC03A001B, + MD_NTSTATUS_WIN_STATUS_VHD_INVALID_STATE = 0xC03A001C, + MD_NTSTATUS_WIN_STATUS_VIRTDISK_UNSUPPORTED_DISK_SECTOR_SIZE = 0xC03A001D, + MD_NTSTATUS_WIN_STATUS_VIRTDISK_DISK_ALREADY_OWNED = 0xC03A001E, + MD_NTSTATUS_WIN_STATUS_VIRTDISK_DISK_ONLINE_AND_WRITABLE = 0xC03A001F, + MD_NTSTATUS_WIN_STATUS_CTLOG_TRACKING_NOT_INITIALIZED = 0xC03A0020, + MD_NTSTATUS_WIN_STATUS_CTLOG_LOGFILE_SIZE_EXCEEDED_MAXSIZE = 0xC03A0021, + MD_NTSTATUS_WIN_STATUS_CTLOG_VHD_CHANGED_OFFLINE = 0xC03A0022, + MD_NTSTATUS_WIN_STATUS_CTLOG_INVALID_TRACKING_STATE = 0xC03A0023, + MD_NTSTATUS_WIN_STATUS_CTLOG_INCONSISTENT_TRACKING_FILE = 0xC03A0024, + MD_NTSTATUS_WIN_STATUS_VHD_METADATA_FULL = 0xC03A0028, + MD_NTSTATUS_WIN_STATUS_RKF_KEY_NOT_FOUND = 0xC0400001, + MD_NTSTATUS_WIN_STATUS_RKF_DUPLICATE_KEY = 0xC0400002, + MD_NTSTATUS_WIN_STATUS_RKF_BLOB_FULL = 0xC0400003, + MD_NTSTATUS_WIN_STATUS_RKF_STORE_FULL = 0xC0400004, + MD_NTSTATUS_WIN_STATUS_RKF_FILE_BLOCKED = 0xC0400005, + MD_NTSTATUS_WIN_STATUS_RKF_ACTIVE_KEY = 0xC0400006, + MD_NTSTATUS_WIN_STATUS_RDBSS_RESTART_OPERATION = 0xC0410001, + MD_NTSTATUS_WIN_STATUS_RDBSS_CONTINUE_OPERATION = 0xC0410002, + MD_NTSTATUS_WIN_STATUS_RDBSS_POST_OPERATION = 0xC0410003, + MD_NTSTATUS_WIN_STATUS_BTH_ATT_INVALID_HANDLE = 0xC0420001, + MD_NTSTATUS_WIN_STATUS_BTH_ATT_READ_NOT_PERMITTED = 0xC0420002, + MD_NTSTATUS_WIN_STATUS_BTH_ATT_WRITE_NOT_PERMITTED = 0xC0420003, + MD_NTSTATUS_WIN_STATUS_BTH_ATT_INVALID_PDU = 0xC0420004, + MD_NTSTATUS_WIN_STATUS_BTH_ATT_INSUFFICIENT_AUTHENTICATION = 0xC0420005, + MD_NTSTATUS_WIN_STATUS_BTH_ATT_REQUEST_NOT_SUPPORTED = 0xC0420006, + MD_NTSTATUS_WIN_STATUS_BTH_ATT_INVALID_OFFSET = 0xC0420007, + MD_NTSTATUS_WIN_STATUS_BTH_ATT_INSUFFICIENT_AUTHORIZATION = 0xC0420008, + MD_NTSTATUS_WIN_STATUS_BTH_ATT_PREPARE_QUEUE_FULL = 0xC0420009, + MD_NTSTATUS_WIN_STATUS_BTH_ATT_ATTRIBUTE_NOT_FOUND = 0xC042000A, + MD_NTSTATUS_WIN_STATUS_BTH_ATT_ATTRIBUTE_NOT_LONG = 0xC042000B, + MD_NTSTATUS_WIN_STATUS_BTH_ATT_INSUFFICIENT_ENCRYPTION_KEY_SIZE = 0xC042000C, + MD_NTSTATUS_WIN_STATUS_BTH_ATT_INVALID_ATTRIBUTE_VALUE_LENGTH = 0xC042000D, + MD_NTSTATUS_WIN_STATUS_BTH_ATT_UNLIKELY = 0xC042000E, + MD_NTSTATUS_WIN_STATUS_BTH_ATT_INSUFFICIENT_ENCRYPTION = 0xC042000F, + MD_NTSTATUS_WIN_STATUS_BTH_ATT_UNSUPPORTED_GROUP_TYPE = 0xC0420010, + MD_NTSTATUS_WIN_STATUS_BTH_ATT_INSUFFICIENT_RESOURCES = 0xC0420011, + MD_NTSTATUS_WIN_STATUS_BTH_ATT_UNKNOWN_ERROR = 0xC0421000, + MD_NTSTATUS_WIN_STATUS_SECUREBOOT_ROLLBACK_DETECTED = 0xC0430001, + MD_NTSTATUS_WIN_STATUS_SECUREBOOT_POLICY_VIOLATION = 0xC0430002, + MD_NTSTATUS_WIN_STATUS_SECUREBOOT_INVALID_POLICY = 0xC0430003, + MD_NTSTATUS_WIN_STATUS_SECUREBOOT_POLICY_PUBLISHER_NOT_FOUND = 0xC0430004, + MD_NTSTATUS_WIN_STATUS_SECUREBOOT_POLICY_NOT_SIGNED = 0xC0430005, + MD_NTSTATUS_WIN_STATUS_SECUREBOOT_FILE_REPLACED = 0xC0430007, + MD_NTSTATUS_WIN_STATUS_AUDIO_ENGINE_NODE_NOT_FOUND = 0xC0440001, + MD_NTSTATUS_WIN_STATUS_HDAUDIO_EMPTY_CONNECTION_LIST = 0xC0440002, + MD_NTSTATUS_WIN_STATUS_HDAUDIO_CONNECTION_LIST_NOT_SUPPORTED = 0xC0440003, + MD_NTSTATUS_WIN_STATUS_HDAUDIO_NO_LOGICAL_DEVICES_CREATED = 0xC0440004, + MD_NTSTATUS_WIN_STATUS_HDAUDIO_NULL_LINKED_LIST_ENTRY = 0xC0440005, + MD_NTSTATUS_WIN_STATUS_VOLSNAP_BOOTFILE_NOT_VALID = 0xC0500003, + MD_NTSTATUS_WIN_STATUS_IO_PREEMPTED = 0xC0510001, + MD_NTSTATUS_WIN_STATUS_SVHDX_ERROR_STORED = 0xC05C0000, + MD_NTSTATUS_WIN_STATUS_SVHDX_ERROR_NOT_AVAILABLE = 0xC05CFF00, + MD_NTSTATUS_WIN_STATUS_SVHDX_UNIT_ATTENTION_AVAILABLE = 0xC05CFF01, + MD_NTSTATUS_WIN_STATUS_SVHDX_UNIT_ATTENTION_CAPACITY_DATA_CHANGED = 0xC05CFF02, + MD_NTSTATUS_WIN_STATUS_SVHDX_UNIT_ATTENTION_RESERVATIONS_PREEMPTED = 0xC05CFF03, + MD_NTSTATUS_WIN_STATUS_SVHDX_UNIT_ATTENTION_RESERVATIONS_RELEASED = 0xC05CFF04, + MD_NTSTATUS_WIN_STATUS_SVHDX_UNIT_ATTENTION_REGISTRATIONS_PREEMPTED = 0xC05CFF05, + MD_NTSTATUS_WIN_STATUS_SVHDX_UNIT_ATTENTION_OPERATING_DEFINITION_CHANGED = 0xC05CFF06, + MD_NTSTATUS_WIN_STATUS_SVHDX_RESERVATION_CONFLICT = 0xC05CFF07, + MD_NTSTATUS_WIN_STATUS_SVHDX_WRONG_FILE_TYPE = 0xC05CFF08, + MD_NTSTATUS_WIN_STATUS_SVHDX_VERSION_MISMATCH = 0xC05CFF09, + MD_NTSTATUS_WIN_STATUS_VHD_SHARED = 0xC05CFF0A, + MD_NTSTATUS_WIN_STATUS_SPACES_RESILIENCY_TYPE_INVALID = 0xC0E70003, + MD_NTSTATUS_WIN_STATUS_SPACES_DRIVE_SECTOR_SIZE_INVALID = 0xC0E70004, + MD_NTSTATUS_WIN_STATUS_SPACES_INTERLEAVE_LENGTH_INVALID = 0xC0E70009, + MD_NTSTATUS_WIN_STATUS_SPACES_NUMBER_OF_COLUMNS_INVALID = 0xC0E7000A, + MD_NTSTATUS_WIN_STATUS_SPACES_NOT_ENOUGH_DRIVES = 0xC0E7000B +} MDNTStatusCodeWin; + +// These constants are defined in the MSDN documentation of +// the EXCEPTION_RECORD structure. +typedef enum { + MD_ACCESS_VIOLATION_WIN_READ = 0, + MD_ACCESS_VIOLATION_WIN_WRITE = 1, + MD_ACCESS_VIOLATION_WIN_EXEC = 8 +} MDAccessViolationTypeWin; + +// These constants are defined in the MSDN documentation of +// the EXCEPTION_RECORD structure. +typedef enum { + MD_IN_PAGE_ERROR_WIN_READ = 0, + MD_IN_PAGE_ERROR_WIN_WRITE = 1, + MD_IN_PAGE_ERROR_WIN_EXEC = 8 +} MDInPageErrorTypeWin; + +#endif /* GOOGLE_BREAKPAD_COMMON_MINIDUMP_EXCEPTION_WIN32_H__ */ diff --git a/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_format.h b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_format.h new file mode 100644 index 0000000000..17a5abba33 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_format.h @@ -0,0 +1,972 @@ +/* Copyright (c) 2006, Google Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following disclaimer + * in the documentation and/or other materials provided with the + * distribution. + * * Neither the name of Google Inc. nor the names of its + * contributors may be used to endorse or promote products derived from + * this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ + +/* minidump_format.h: A cross-platform reimplementation of minidump-related + * portions of DbgHelp.h from the Windows Platform SDK. + * + * (This is C99 source, please don't corrupt it with C++.) + * + * Structures that are defined by Microsoft to contain a zero-length array + * are instead defined here to contain an array with one element, as + * zero-length arrays are forbidden by standard C and C++. In these cases, + * *_minsize constants are provided to be used in place of sizeof. For a + * cleaner interface to these sizes when using C++, see minidump_size.h. + * + * These structures are also sufficient to populate minidump files. + * + * These definitions may be extended to support handling minidump files + * for other CPUs and other operating systems. + * + * Because precise data type sizes are crucial for this implementation to + * function properly and portably in terms of interoperability with minidumps + * produced by DbgHelp on Windows, a set of primitive types with known sizes + * are used as the basis of each structure defined by this file. DbgHelp + * on Windows is assumed to be the reference implementation; this file + * seeks to provide a cross-platform compatible implementation. To avoid + * collisions with the types and values defined and used by DbgHelp in the + * event that this implementation is used on Windows, each type and value + * defined here is given a new name, beginning with "MD". Names of the + * equivalent types and values in the Windows Platform SDK are given in + * comments. + * + * Author: Mark Mentovai */ + + +#ifndef GOOGLE_BREAKPAD_COMMON_MINIDUMP_FORMAT_H__ +#define GOOGLE_BREAKPAD_COMMON_MINIDUMP_FORMAT_H__ + +#include + +#include "google_breakpad/common/breakpad_types.h" + + +#if defined(_MSC_VER) +/* Disable "zero-sized array in struct/union" warnings when compiling in + * MSVC. DbgHelp.h does this too. */ +#pragma warning(push) +#pragma warning(disable:4200) +#endif /* _MSC_VER */ + + +/* + * guiddef.h + */ + +typedef struct { + uint32_t data1; + uint16_t data2; + uint16_t data3; + uint8_t data4[8]; +} MDGUID; /* GUID */ + + +/* + * WinNT.h + */ + +/* Non-x86 CPU identifiers found in the high 24 bits of + * (MDRawContext*).context_flags. These aren't used by Breakpad, but are + * defined here for reference, to avoid assigning values that conflict + * (although some values already conflict). */ +#define MD_CONTEXT_IA64 0x00080000 /* CONTEXT_IA64 */ +/* Additional values from winnt.h in the Windows CE 5.0 SDK: */ +#define MD_CONTEXT_SHX 0x000000c0 /* CONTEXT_SH4 (Super-H, includes SH3) */ +#define MD_CONTEXT_ALPHA 0x00020000 /* CONTEXT_ALPHA */ + +/* As of Windows 7 SP1, the number of flag bits has increased to + * include 0x40 (CONTEXT_XSTATE): + * http://msdn.microsoft.com/en-us/library/hh134238%28v=vs.85%29.aspx */ +#define MD_CONTEXT_CPU_MASK 0xffffff00 + + +/* This is a base type for MDRawContextX86 and MDRawContextPPC. This + * structure should never be allocated directly. The actual structure type + * can be determined by examining the context_flags field. */ +typedef struct { + uint32_t context_flags; +} MDRawContextBase; + +#include "minidump_cpu_amd64.h" +#include "minidump_cpu_arm.h" +#include "minidump_cpu_arm64.h" +#include "minidump_cpu_mips.h" +#include "minidump_cpu_ppc.h" +#include "minidump_cpu_ppc64.h" +#include "minidump_cpu_sparc.h" +#include "minidump_cpu_x86.h" + +/* + * WinVer.h + */ + + +typedef struct { + uint32_t signature; + uint32_t struct_version; + uint32_t file_version_hi; + uint32_t file_version_lo; + uint32_t product_version_hi; + uint32_t product_version_lo; + uint32_t file_flags_mask; /* Identifies valid bits in fileFlags */ + uint32_t file_flags; + uint32_t file_os; + uint32_t file_type; + uint32_t file_subtype; + uint32_t file_date_hi; + uint32_t file_date_lo; +} MDVSFixedFileInfo; /* VS_FIXEDFILEINFO */ + +/* For (MDVSFixedFileInfo).signature */ +#define MD_VSFIXEDFILEINFO_SIGNATURE 0xfeef04bd + /* VS_FFI_SIGNATURE */ + +/* For (MDVSFixedFileInfo).version */ +#define MD_VSFIXEDFILEINFO_VERSION 0x00010000 + /* VS_FFI_STRUCVERSION */ + +/* For (MDVSFixedFileInfo).file_flags_mask and + * (MDVSFixedFileInfo).file_flags */ +#define MD_VSFIXEDFILEINFO_FILE_FLAGS_DEBUG 0x00000001 + /* VS_FF_DEBUG */ +#define MD_VSFIXEDFILEINFO_FILE_FLAGS_PRERELEASE 0x00000002 + /* VS_FF_PRERELEASE */ +#define MD_VSFIXEDFILEINFO_FILE_FLAGS_PATCHED 0x00000004 + /* VS_FF_PATCHED */ +#define MD_VSFIXEDFILEINFO_FILE_FLAGS_PRIVATEBUILD 0x00000008 + /* VS_FF_PRIVATEBUILD */ +#define MD_VSFIXEDFILEINFO_FILE_FLAGS_INFOINFERRED 0x00000010 + /* VS_FF_INFOINFERRED */ +#define MD_VSFIXEDFILEINFO_FILE_FLAGS_SPECIALBUILD 0x00000020 + /* VS_FF_SPECIALBUILD */ + +/* For (MDVSFixedFileInfo).file_os: high 16 bits */ +#define MD_VSFIXEDFILEINFO_FILE_OS_UNKNOWN 0 /* VOS_UNKNOWN */ +#define MD_VSFIXEDFILEINFO_FILE_OS_DOS (1 << 16) /* VOS_DOS */ +#define MD_VSFIXEDFILEINFO_FILE_OS_OS216 (2 << 16) /* VOS_OS216 */ +#define MD_VSFIXEDFILEINFO_FILE_OS_OS232 (3 << 16) /* VOS_OS232 */ +#define MD_VSFIXEDFILEINFO_FILE_OS_NT (4 << 16) /* VOS_NT */ +#define MD_VSFIXEDFILEINFO_FILE_OS_WINCE (5 << 16) /* VOS_WINCE */ +/* Low 16 bits */ +#define MD_VSFIXEDFILEINFO_FILE_OS__BASE 0 /* VOS__BASE */ +#define MD_VSFIXEDFILEINFO_FILE_OS__WINDOWS16 1 /* VOS__WINDOWS16 */ +#define MD_VSFIXEDFILEINFO_FILE_OS__PM16 2 /* VOS__PM16 */ +#define MD_VSFIXEDFILEINFO_FILE_OS__PM32 3 /* VOS__PM32 */ +#define MD_VSFIXEDFILEINFO_FILE_OS__WINDOWS32 4 /* VOS__WINDOWS32 */ + +/* For (MDVSFixedFileInfo).file_type */ +#define MD_VSFIXEDFILEINFO_FILE_TYPE_UNKNOWN 0 /* VFT_UNKNOWN */ +#define MD_VSFIXEDFILEINFO_FILE_TYPE_APP 1 /* VFT_APP */ +#define MD_VSFIXEDFILEINFO_FILE_TYPE_DLL 2 /* VFT_DLL */ +#define MD_VSFIXEDFILEINFO_FILE_TYPE_DRV 3 /* VFT_DLL */ +#define MD_VSFIXEDFILEINFO_FILE_TYPE_FONT 4 /* VFT_FONT */ +#define MD_VSFIXEDFILEINFO_FILE_TYPE_VXD 5 /* VFT_VXD */ +#define MD_VSFIXEDFILEINFO_FILE_TYPE_STATIC_LIB 7 /* VFT_STATIC_LIB */ + +/* For (MDVSFixedFileInfo).file_subtype */ +#define MD_VSFIXEDFILEINFO_FILE_SUBTYPE_UNKNOWN 0 + /* VFT2_UNKNOWN */ +/* with file_type = MD_VSFIXEDFILEINFO_FILETYPE_DRV */ +#define MD_VSFIXEDFILEINFO_FILE_SUBTYPE_DRV_PRINTER 1 + /* VFT2_DRV_PRINTER */ +#define MD_VSFIXEDFILEINFO_FILE_SUBTYPE_DRV_KEYBOARD 2 + /* VFT2_DRV_KEYBOARD */ +#define MD_VSFIXEDFILEINFO_FILE_SUBTYPE_DRV_LANGUAGE 3 + /* VFT2_DRV_LANGUAGE */ +#define MD_VSFIXEDFILEINFO_FILE_SUBTYPE_DRV_DISPLAY 4 + /* VFT2_DRV_DISPLAY */ +#define MD_VSFIXEDFILEINFO_FILE_SUBTYPE_DRV_MOUSE 5 + /* VFT2_DRV_MOUSE */ +#define MD_VSFIXEDFILEINFO_FILE_SUBTYPE_DRV_NETWORK 6 + /* VFT2_DRV_NETWORK */ +#define MD_VSFIXEDFILEINFO_FILE_SUBTYPE_DRV_SYSTEM 7 + /* VFT2_DRV_SYSTEM */ +#define MD_VSFIXEDFILEINFO_FILE_SUBTYPE_DRV_INSTALLABLE 8 + /* VFT2_DRV_INSTALLABLE */ +#define MD_VSFIXEDFILEINFO_FILE_SUBTYPE_DRV_SOUND 9 + /* VFT2_DRV_SOUND */ +#define MD_VSFIXEDFILEINFO_FILE_SUBTYPE_DRV_COMM 10 + /* VFT2_DRV_COMM */ +#define MD_VSFIXEDFILEINFO_FILE_SUBTYPE_DRV_INPUTMETHOD 11 + /* VFT2_DRV_INPUTMETHOD */ +#define MD_VSFIXEDFILEINFO_FILE_SUBTYPE_DRV_VERSIONED_PRINTER 12 + /* VFT2_DRV_VERSIONED_PRINTER */ +/* with file_type = MD_VSFIXEDFILEINFO_FILETYPE_FONT */ +#define MD_VSFIXEDFILEINFO_FILE_SUBTYPE_FONT_RASTER 1 + /* VFT2_FONT_RASTER */ +#define MD_VSFIXEDFILEINFO_FILE_SUBTYPE_FONT_VECTOR 2 + /* VFT2_FONT_VECTOR */ +#define MD_VSFIXEDFILEINFO_FILE_SUBTYPE_FONT_TRUETYPE 3 + /* VFT2_FONT_TRUETYPE */ + + +/* + * DbgHelp.h + */ + + +/* An MDRVA is an offset into the minidump file. The beginning of the + * MDRawHeader is at offset 0. */ +typedef uint32_t MDRVA; /* RVA */ + +typedef struct { + uint32_t data_size; + MDRVA rva; +} MDLocationDescriptor; /* MINIDUMP_LOCATION_DESCRIPTOR */ + + +typedef struct { + /* The base address of the memory range on the host that produced the + * minidump. */ + uint64_t start_of_memory_range; + + MDLocationDescriptor memory; +} MDMemoryDescriptor; /* MINIDUMP_MEMORY_DESCRIPTOR */ + + +typedef struct { + uint32_t signature; + uint32_t version; + uint32_t stream_count; + MDRVA stream_directory_rva; /* A |stream_count|-sized array of + * MDRawDirectory structures. */ + uint32_t checksum; /* Can be 0. In fact, that's all that's + * been found in minidump files. */ + uint32_t time_date_stamp; /* time_t */ + uint64_t flags; +} MDRawHeader; /* MINIDUMP_HEADER */ + +/* For (MDRawHeader).signature and (MDRawHeader).version. Note that only the + * low 16 bits of (MDRawHeader).version are MD_HEADER_VERSION. Per the + * documentation, the high 16 bits are implementation-specific. */ +#define MD_HEADER_SIGNATURE 0x504d444d /* 'PMDM' */ + /* MINIDUMP_SIGNATURE */ +#define MD_HEADER_VERSION 0x0000a793 /* 42899 */ + /* MINIDUMP_VERSION */ + +/* For (MDRawHeader).flags: */ +typedef enum { + /* MD_NORMAL is the standard type of minidump. It includes full + * streams for the thread list, module list, exception, system info, + * and miscellaneous info. A memory list stream is also present, + * pointing to the same stack memory contained in the thread list, + * as well as a 256-byte region around the instruction address that + * was executing when the exception occurred. Stack memory is from + * 4 bytes below a thread's stack pointer up to the top of the + * memory region encompassing the stack. */ + MD_NORMAL = 0x00000000, + MD_WITH_DATA_SEGS = 0x00000001, + MD_WITH_FULL_MEMORY = 0x00000002, + MD_WITH_HANDLE_DATA = 0x00000004, + MD_FILTER_MEMORY = 0x00000008, + MD_SCAN_MEMORY = 0x00000010, + MD_WITH_UNLOADED_MODULES = 0x00000020, + MD_WITH_INDIRECTLY_REFERENCED_MEMORY = 0x00000040, + MD_FILTER_MODULE_PATHS = 0x00000080, + MD_WITH_PROCESS_THREAD_DATA = 0x00000100, + MD_WITH_PRIVATE_READ_WRITE_MEMORY = 0x00000200, + MD_WITHOUT_OPTIONAL_DATA = 0x00000400, + MD_WITH_FULL_MEMORY_INFO = 0x00000800, + MD_WITH_THREAD_INFO = 0x00001000, + MD_WITH_CODE_SEGS = 0x00002000, + MD_WITHOUT_AUXILLIARY_SEGS = 0x00004000, + MD_WITH_FULL_AUXILLIARY_STATE = 0x00008000, + MD_WITH_PRIVATE_WRITE_COPY_MEMORY = 0x00010000, + MD_IGNORE_INACCESSIBLE_MEMORY = 0x00020000, + MD_WITH_TOKEN_INFORMATION = 0x00040000 +} MDType; /* MINIDUMP_TYPE */ + + +typedef struct { + uint32_t stream_type; + MDLocationDescriptor location; +} MDRawDirectory; /* MINIDUMP_DIRECTORY */ + +/* For (MDRawDirectory).stream_type */ +typedef enum { + MD_UNUSED_STREAM = 0, + MD_RESERVED_STREAM_0 = 1, + MD_RESERVED_STREAM_1 = 2, + MD_THREAD_LIST_STREAM = 3, /* MDRawThreadList */ + MD_MODULE_LIST_STREAM = 4, /* MDRawModuleList */ + MD_MEMORY_LIST_STREAM = 5, /* MDRawMemoryList */ + MD_EXCEPTION_STREAM = 6, /* MDRawExceptionStream */ + MD_SYSTEM_INFO_STREAM = 7, /* MDRawSystemInfo */ + MD_THREAD_EX_LIST_STREAM = 8, + MD_MEMORY_64_LIST_STREAM = 9, + MD_COMMENT_STREAM_A = 10, + MD_COMMENT_STREAM_W = 11, + MD_HANDLE_DATA_STREAM = 12, + MD_FUNCTION_TABLE_STREAM = 13, + MD_UNLOADED_MODULE_LIST_STREAM = 14, + MD_MISC_INFO_STREAM = 15, /* MDRawMiscInfo */ + MD_MEMORY_INFO_LIST_STREAM = 16, /* MDRawMemoryInfoList */ + MD_THREAD_INFO_LIST_STREAM = 17, + MD_HANDLE_OPERATION_LIST_STREAM = 18, + MD_LAST_RESERVED_STREAM = 0x0000ffff, + + /* Breakpad extension types. 0x4767 = "Gg" */ + MD_BREAKPAD_INFO_STREAM = 0x47670001, /* MDRawBreakpadInfo */ + MD_ASSERTION_INFO_STREAM = 0x47670002, /* MDRawAssertionInfo */ + /* These are additional minidump stream values which are specific to + * the linux breakpad implementation. */ + MD_LINUX_CPU_INFO = 0x47670003, /* /proc/cpuinfo */ + MD_LINUX_PROC_STATUS = 0x47670004, /* /proc/$x/status */ + MD_LINUX_LSB_RELEASE = 0x47670005, /* /etc/lsb-release */ + MD_LINUX_CMD_LINE = 0x47670006, /* /proc/$x/cmdline */ + MD_LINUX_ENVIRON = 0x47670007, /* /proc/$x/environ */ + MD_LINUX_AUXV = 0x47670008, /* /proc/$x/auxv */ + MD_LINUX_MAPS = 0x47670009, /* /proc/$x/maps */ + MD_LINUX_DSO_DEBUG = 0x4767000A /* MDRawDebug{32,64} */ +} MDStreamType; /* MINIDUMP_STREAM_TYPE */ + + +typedef struct { + uint32_t length; /* Length of buffer in bytes (not characters), + * excluding 0-terminator */ + uint16_t buffer[1]; /* UTF-16-encoded, 0-terminated */ +} MDString; /* MINIDUMP_STRING */ + +static const size_t MDString_minsize = offsetof(MDString, buffer[0]); + + +typedef struct { + uint32_t thread_id; + uint32_t suspend_count; + uint32_t priority_class; + uint32_t priority; + uint64_t teb; /* Thread environment block */ + MDMemoryDescriptor stack; + MDLocationDescriptor thread_context; /* MDRawContext[CPU] */ +} MDRawThread; /* MINIDUMP_THREAD */ + + +typedef struct { + uint32_t number_of_threads; + MDRawThread threads[1]; +} MDRawThreadList; /* MINIDUMP_THREAD_LIST */ + +static const size_t MDRawThreadList_minsize = offsetof(MDRawThreadList, + threads[0]); + + +typedef struct { + uint64_t base_of_image; + uint32_t size_of_image; + uint32_t checksum; /* 0 if unknown */ + uint32_t time_date_stamp; /* time_t */ + MDRVA module_name_rva; /* MDString, pathname or filename */ + MDVSFixedFileInfo version_info; + + /* The next field stores a CodeView record and is populated when a module's + * debug information resides in a PDB file. It identifies the PDB file. */ + MDLocationDescriptor cv_record; + + /* The next field is populated when a module's debug information resides + * in a DBG file. It identifies the DBG file. This field is effectively + * obsolete with modules built by recent toolchains. */ + MDLocationDescriptor misc_record; + + /* Alignment problem: reserved0 and reserved1 are defined by the platform + * SDK as 64-bit quantities. However, that results in a structure whose + * alignment is unpredictable on different CPUs and ABIs. If the ABI + * specifies full alignment of 64-bit quantities in structures (as ppc + * does), there will be padding between miscRecord and reserved0. If + * 64-bit quantities can be aligned on 32-bit boundaries (as on x86), + * this padding will not exist. (Note that the structure up to this point + * contains 1 64-bit member followed by 21 32-bit members.) + * As a workaround, reserved0 and reserved1 are instead defined here as + * four 32-bit quantities. This should be harmless, as there are + * currently no known uses for these fields. */ + uint32_t reserved0[2]; + uint32_t reserved1[2]; +} MDRawModule; /* MINIDUMP_MODULE */ + +/* The inclusion of a 64-bit type in MINIDUMP_MODULE forces the struct to + * be tail-padded out to a multiple of 64 bits under some ABIs (such as PPC). + * This doesn't occur on systems that don't tail-pad in this manner. Define + * this macro to be the usable size of the MDRawModule struct, and use it in + * place of sizeof(MDRawModule). */ +#define MD_MODULE_SIZE 108 + + +/* (MDRawModule).cv_record can reference MDCVInfoPDB20 or MDCVInfoPDB70. + * Ref.: http://www.debuginfo.com/articles/debuginfomatch.html + * MDCVInfoPDB70 is the expected structure type with recent toolchains. */ + +typedef struct { + uint32_t signature; + uint32_t offset; /* Offset to debug data (expect 0 in minidump) */ +} MDCVHeader; + +typedef struct { + MDCVHeader cv_header; + uint32_t signature; /* time_t debug information created */ + uint32_t age; /* revision of PDB file */ + uint8_t pdb_file_name[1]; /* Pathname or filename of PDB file */ +} MDCVInfoPDB20; + +static const size_t MDCVInfoPDB20_minsize = offsetof(MDCVInfoPDB20, + pdb_file_name[0]); + +#define MD_CVINFOPDB20_SIGNATURE 0x3031424e /* cvHeader.signature = '01BN' */ + +typedef struct { + uint32_t cv_signature; + MDGUID signature; /* GUID, identifies PDB file */ + uint32_t age; /* Identifies incremental changes to PDB file */ + uint8_t pdb_file_name[1]; /* Pathname or filename of PDB file, + * 0-terminated 8-bit character data (UTF-8?) */ +} MDCVInfoPDB70; + +static const size_t MDCVInfoPDB70_minsize = offsetof(MDCVInfoPDB70, + pdb_file_name[0]); + +#define MD_CVINFOPDB70_SIGNATURE 0x53445352 /* cvSignature = 'SDSR' */ + +typedef struct { + uint32_t data1[2]; + uint32_t data2; + uint32_t data3; + uint32_t data4; + uint32_t data5[3]; + uint8_t extra[2]; +} MDCVInfoELF; + +/* In addition to the two CodeView record formats above, used for linking + * to external pdb files, it is possible for debugging data to be carried + * directly in the CodeView record itself. These signature values will + * be found in the first 4 bytes of the CodeView record. Additional values + * not commonly experienced in the wild are given by "Microsoft Symbol and + * Type Information", http://www.x86.org/ftp/manuals/tools/sym.pdf, section + * 7.2. An in-depth description of the CodeView 4.1 format is given by + * "Undocumented Windows 2000 Secrets", Windows 2000 Debugging Support/ + * Microsoft Symbol File Internals/CodeView Subsections, + * http://www.rawol.com/features/undocumented/sbs-w2k-1-windows-2000-debugging-support.pdf + */ +#define MD_CVINFOCV41_SIGNATURE 0x3930424e /* '90BN', CodeView 4.10. */ +#define MD_CVINFOCV50_SIGNATURE 0x3131424e /* '11BN', CodeView 5.0, + * MS C7-format (/Z7). */ + +#define MD_CVINFOUNKNOWN_SIGNATURE 0xffffffff /* An unlikely value. */ + +/* (MDRawModule).miscRecord can reference MDImageDebugMisc. The Windows + * structure is actually defined in WinNT.h. This structure is effectively + * obsolete with modules built by recent toolchains. */ + +typedef struct { + uint32_t data_type; /* IMAGE_DEBUG_TYPE_*, not defined here because + * this debug record type is mostly obsolete. */ + uint32_t length; /* Length of entire MDImageDebugMisc structure */ + uint8_t unicode; /* True if data is multibyte */ + uint8_t reserved[3]; + uint8_t data[1]; +} MDImageDebugMisc; /* IMAGE_DEBUG_MISC */ + +static const size_t MDImageDebugMisc_minsize = offsetof(MDImageDebugMisc, + data[0]); + + +typedef struct { + uint32_t number_of_modules; + MDRawModule modules[1]; +} MDRawModuleList; /* MINIDUMP_MODULE_LIST */ + +static const size_t MDRawModuleList_minsize = offsetof(MDRawModuleList, + modules[0]); + + +typedef struct { + uint32_t number_of_memory_ranges; + MDMemoryDescriptor memory_ranges[1]; +} MDRawMemoryList; /* MINIDUMP_MEMORY_LIST */ + +static const size_t MDRawMemoryList_minsize = offsetof(MDRawMemoryList, + memory_ranges[0]); + + +#define MD_EXCEPTION_MAXIMUM_PARAMETERS 15 + +typedef struct { + uint32_t exception_code; /* Windows: MDExceptionCodeWin, + * Mac OS X: MDExceptionMac, + * Linux: MDExceptionCodeLinux. */ + uint32_t exception_flags; /* Windows: 1 if noncontinuable, + Mac OS X: MDExceptionCodeMac. */ + uint64_t exception_record; /* Address (in the minidump-producing host's + * memory) of another MDException, for + * nested exceptions. */ + uint64_t exception_address; /* The address that caused the exception. + * Mac OS X: exception subcode (which is + * typically the address). */ + uint32_t number_parameters; /* Number of valid elements in + * exception_information. */ + uint32_t __align; + uint64_t exception_information[MD_EXCEPTION_MAXIMUM_PARAMETERS]; +} MDException; /* MINIDUMP_EXCEPTION */ + +#include "minidump_exception_linux.h" +#include "minidump_exception_mac.h" +#include "minidump_exception_ps3.h" +#include "minidump_exception_solaris.h" +#include "minidump_exception_win32.h" + +typedef struct { + uint32_t thread_id; /* Thread in which the exception + * occurred. Corresponds to + * (MDRawThread).thread_id. */ + uint32_t __align; + MDException exception_record; + MDLocationDescriptor thread_context; /* MDRawContext[CPU] */ +} MDRawExceptionStream; /* MINIDUMP_EXCEPTION_STREAM */ + + +typedef union { + struct { + uint32_t vendor_id[3]; /* cpuid 0: ebx, edx, ecx */ + uint32_t version_information; /* cpuid 1: eax */ + uint32_t feature_information; /* cpuid 1: edx */ + uint32_t amd_extended_cpu_features; /* cpuid 0x80000001, ebx */ + } x86_cpu_info; + struct { + uint32_t cpuid; + uint32_t elf_hwcaps; /* linux specific, 0 otherwise */ + } arm_cpu_info; + struct { + uint64_t processor_features[2]; + } other_cpu_info; +} MDCPUInformation; /* CPU_INFORMATION */ + +/* For (MDCPUInformation).arm_cpu_info.elf_hwcaps. + * This matches the Linux kernel definitions from */ +typedef enum { + MD_CPU_ARM_ELF_HWCAP_SWP = (1 << 0), + MD_CPU_ARM_ELF_HWCAP_HALF = (1 << 1), + MD_CPU_ARM_ELF_HWCAP_THUMB = (1 << 2), + MD_CPU_ARM_ELF_HWCAP_26BIT = (1 << 3), + MD_CPU_ARM_ELF_HWCAP_FAST_MULT = (1 << 4), + MD_CPU_ARM_ELF_HWCAP_FPA = (1 << 5), + MD_CPU_ARM_ELF_HWCAP_VFP = (1 << 6), + MD_CPU_ARM_ELF_HWCAP_EDSP = (1 << 7), + MD_CPU_ARM_ELF_HWCAP_JAVA = (1 << 8), + MD_CPU_ARM_ELF_HWCAP_IWMMXT = (1 << 9), + MD_CPU_ARM_ELF_HWCAP_CRUNCH = (1 << 10), + MD_CPU_ARM_ELF_HWCAP_THUMBEE = (1 << 11), + MD_CPU_ARM_ELF_HWCAP_NEON = (1 << 12), + MD_CPU_ARM_ELF_HWCAP_VFPv3 = (1 << 13), + MD_CPU_ARM_ELF_HWCAP_VFPv3D16 = (1 << 14), + MD_CPU_ARM_ELF_HWCAP_TLS = (1 << 15), + MD_CPU_ARM_ELF_HWCAP_VFPv4 = (1 << 16), + MD_CPU_ARM_ELF_HWCAP_IDIVA = (1 << 17), + MD_CPU_ARM_ELF_HWCAP_IDIVT = (1 << 18), +} MDCPUInformationARMElfHwCaps; + +typedef struct { + /* The next 3 fields and numberOfProcessors are from the SYSTEM_INFO + * structure as returned by GetSystemInfo */ + uint16_t processor_architecture; + uint16_t processor_level; /* x86: 5 = 586, 6 = 686, ... */ + /* ARM: 6 = ARMv6, 7 = ARMv7 ... */ + uint16_t processor_revision; /* x86: 0xMMSS, where MM=model, + * SS=stepping */ + /* ARM: 0 */ + + uint8_t number_of_processors; + uint8_t product_type; /* Windows: VER_NT_* from WinNT.h */ + + /* The next 5 fields are from the OSVERSIONINFO structure as returned + * by GetVersionEx */ + uint32_t major_version; + uint32_t minor_version; + uint32_t build_number; + uint32_t platform_id; + MDRVA csd_version_rva; /* MDString further identifying the + * host OS. + * Windows: name of the installed OS + * service pack. + * Mac OS X: the Apple OS build number + * (sw_vers -buildVersion). + * Linux: uname -srvmo */ + + uint16_t suite_mask; /* Windows: VER_SUITE_* from WinNT.h */ + uint16_t reserved2; + + MDCPUInformation cpu; +} MDRawSystemInfo; /* MINIDUMP_SYSTEM_INFO */ + +/* For (MDRawSystemInfo).processor_architecture: */ +typedef enum { + MD_CPU_ARCHITECTURE_X86 = 0, /* PROCESSOR_ARCHITECTURE_INTEL */ + MD_CPU_ARCHITECTURE_MIPS = 1, /* PROCESSOR_ARCHITECTURE_MIPS */ + MD_CPU_ARCHITECTURE_ALPHA = 2, /* PROCESSOR_ARCHITECTURE_ALPHA */ + MD_CPU_ARCHITECTURE_PPC = 3, /* PROCESSOR_ARCHITECTURE_PPC */ + MD_CPU_ARCHITECTURE_SHX = 4, /* PROCESSOR_ARCHITECTURE_SHX + * (Super-H) */ + MD_CPU_ARCHITECTURE_ARM = 5, /* PROCESSOR_ARCHITECTURE_ARM */ + MD_CPU_ARCHITECTURE_IA64 = 6, /* PROCESSOR_ARCHITECTURE_IA64 */ + MD_CPU_ARCHITECTURE_ALPHA64 = 7, /* PROCESSOR_ARCHITECTURE_ALPHA64 */ + MD_CPU_ARCHITECTURE_MSIL = 8, /* PROCESSOR_ARCHITECTURE_MSIL + * (Microsoft Intermediate Language) */ + MD_CPU_ARCHITECTURE_AMD64 = 9, /* PROCESSOR_ARCHITECTURE_AMD64 */ + MD_CPU_ARCHITECTURE_X86_WIN64 = 10, + /* PROCESSOR_ARCHITECTURE_IA32_ON_WIN64 (WoW64) */ + MD_CPU_ARCHITECTURE_SPARC = 0x8001, /* Breakpad-defined value for SPARC */ + MD_CPU_ARCHITECTURE_PPC64 = 0x8002, /* Breakpad-defined value for PPC64 */ + MD_CPU_ARCHITECTURE_ARM64 = 0x8003, /* Breakpad-defined value for ARM64 */ + MD_CPU_ARCHITECTURE_UNKNOWN = 0xffff /* PROCESSOR_ARCHITECTURE_UNKNOWN */ +} MDCPUArchitecture; + +/* For (MDRawSystemInfo).platform_id: */ +typedef enum { + MD_OS_WIN32S = 0, /* VER_PLATFORM_WIN32s (Windows 3.1) */ + MD_OS_WIN32_WINDOWS = 1, /* VER_PLATFORM_WIN32_WINDOWS (Windows 95-98-Me) */ + MD_OS_WIN32_NT = 2, /* VER_PLATFORM_WIN32_NT (Windows NT, 2000+) */ + MD_OS_WIN32_CE = 3, /* VER_PLATFORM_WIN32_CE, VER_PLATFORM_WIN32_HH + * (Windows CE, Windows Mobile, "Handheld") */ + + /* The following values are Breakpad-defined. */ + MD_OS_UNIX = 0x8000, /* Generic Unix-ish */ + MD_OS_MAC_OS_X = 0x8101, /* Mac OS X/Darwin */ + MD_OS_IOS = 0x8102, /* iOS */ + MD_OS_LINUX = 0x8201, /* Linux */ + MD_OS_SOLARIS = 0x8202, /* Solaris */ + MD_OS_ANDROID = 0x8203, /* Android */ + MD_OS_PS3 = 0x8204, /* PS3 */ + MD_OS_NACL = 0x8205 /* Native Client (NaCl) */ +} MDOSPlatform; + +typedef struct { + uint16_t year; + uint16_t month; + uint16_t day_of_week; + uint16_t day; + uint16_t hour; + uint16_t minute; + uint16_t second; + uint16_t milliseconds; +} MDSystemTime; /* SYSTEMTIME */ + +typedef struct { + /* Required field. The bias is the difference, in minutes, between + * Coordinated Universal Time (UTC) and local time. + * Formula: UTC = local time + bias */ + int32_t bias; + /* A description for standard time. For example, "EST" could indicate Eastern + * Standard Time. In practice this contains the full time zone names. This + * string can be empty. */ + uint16_t standard_name[32]; /* UTF-16-encoded, 0-terminated */ + /* A MDSystemTime structure that contains a date and local time when the + * transition from daylight saving time to standard time occurs on this + * operating system. If the time zone does not support daylight saving time, + * the month member in the MDSystemTime structure is zero. */ + MDSystemTime standard_date; + /* The bias value to be used during local time translations that occur during + * standard time. */ + int32_t standard_bias; + /* A description for daylight saving time. For example, "PDT" could indicate + * Pacific Daylight Time. In practice this contains the full time zone names. + * This string can be empty. */ + uint16_t daylight_name[32]; /* UTF-16-encoded, 0-terminated */ + /* A MDSystemTime structure that contains a date and local time when the + * transition from standard time to daylight saving time occurs on this + * operating system. If the time zone does not support daylight saving time, + * the month member in the MDSystemTime structure is zero.*/ + MDSystemTime daylight_date; + /* The bias value to be used during local time translations that occur during + * daylight saving time. */ + int32_t daylight_bias; +} MDTimeZoneInformation; /* TIME_ZONE_INFORMATION */ + +/* MAX_PATH from windef.h */ +#define MD_MAX_PATH 260 + +/* The miscellaneous information stream contains a variety + * of small pieces of information. A member is valid if + * it's within the available size and its corresponding + * bit is set. */ +typedef struct { + uint32_t size_of_info; /* Length of entire MDRawMiscInfo structure. */ + uint32_t flags1; + + /* The next field is only valid if flags1 contains + * MD_MISCINFO_FLAGS1_PROCESS_ID. */ + uint32_t process_id; + + /* The next 3 fields are only valid if flags1 contains + * MD_MISCINFO_FLAGS1_PROCESS_TIMES. */ + uint32_t process_create_time; /* time_t process started */ + uint32_t process_user_time; /* seconds of user CPU time */ + uint32_t process_kernel_time; /* seconds of kernel CPU time */ + + /* The following fields are not present in MINIDUMP_MISC_INFO but are + * in MINIDUMP_MISC_INFO_2. When this struct is populated, these values + * may not be set. Use flags1 and size_of_info to determine whether these + * values are present. These are only valid when flags1 contains + * MD_MISCINFO_FLAGS1_PROCESSOR_POWER_INFO. */ + uint32_t processor_max_mhz; + uint32_t processor_current_mhz; + uint32_t processor_mhz_limit; + uint32_t processor_max_idle_state; + uint32_t processor_current_idle_state; + + /* The following fields are not present in MINIDUMP_MISC_INFO_2 but are + * in MINIDUMP_MISC_INFO_3. When this struct is populated, these values + * may not be set. Use flags1 and size_of_info to determine whether these + * values are present. */ + + /* The following field is only valid if flags1 contains + * MD_MISCINFO_FLAGS1_PROCESS_INTEGRITY. */ + uint32_t process_integrity_level; + + /* The following field is only valid if flags1 contains + * MD_MISCINFO_FLAGS1_PROCESS_EXECUTE_FLAGS. */ + uint32_t process_execute_flags; + + /* The following field is only valid if flags1 contains + * MD_MISCINFO_FLAGS1_PROTECTED_PROCESS. */ + uint32_t protected_process; + + /* The following 2 fields are only valid if flags1 contains + * MD_MISCINFO_FLAGS1_TIMEZONE. */ + uint32_t time_zone_id; + MDTimeZoneInformation time_zone; + + /* The following fields are not present in MINIDUMP_MISC_INFO_3 but are + * in MINIDUMP_MISC_INFO_4. When this struct is populated, these values + * may not be set. Use flags1 and size_of_info to determine whether these + * values are present. */ + + /* The following 2 fields are only valid if flags1 contains + * MD_MISCINFO_FLAGS1_BUILDSTRING. */ + uint16_t build_string[MD_MAX_PATH]; /* UTF-16-encoded, 0-terminated */ + uint16_t dbg_bld_str[40]; /* UTF-16-encoded, 0-terminated */ +} MDRawMiscInfo; /* MINIDUMP_MISC_INFO, MINIDUMP_MISC_INFO_2, + * MINIDUMP_MISC_INFO_3, MINIDUMP_MISC_INFO_4, + * MINIDUMP_MISC_INFO_N */ + +static const size_t MD_MISCINFO_SIZE = + offsetof(MDRawMiscInfo, processor_max_mhz); +static const size_t MD_MISCINFO2_SIZE = + offsetof(MDRawMiscInfo, process_integrity_level); +static const size_t MD_MISCINFO3_SIZE = + offsetof(MDRawMiscInfo, build_string[0]); +static const size_t MD_MISCINFO4_SIZE = sizeof(MDRawMiscInfo); + +/* For (MDRawMiscInfo).flags1. These values indicate which fields in the + * MDRawMiscInfoStructure are valid. */ +typedef enum { + MD_MISCINFO_FLAGS1_PROCESS_ID = 0x00000001, + /* MINIDUMP_MISC1_PROCESS_ID */ + MD_MISCINFO_FLAGS1_PROCESS_TIMES = 0x00000002, + /* MINIDUMP_MISC1_PROCESS_TIMES */ + MD_MISCINFO_FLAGS1_PROCESSOR_POWER_INFO = 0x00000004, + /* MINIDUMP_MISC1_PROCESSOR_POWER_INFO */ + MD_MISCINFO_FLAGS1_PROCESS_INTEGRITY = 0x00000010, + /* MINIDUMP_MISC3_PROCESS_INTEGRITY */ + MD_MISCINFO_FLAGS1_PROCESS_EXECUTE_FLAGS = 0x00000020, + /* MINIDUMP_MISC3_PROCESS_EXECUTE_FLAGS */ + MD_MISCINFO_FLAGS1_TIMEZONE = 0x00000040, + /* MINIDUMP_MISC3_TIMEZONE */ + MD_MISCINFO_FLAGS1_PROTECTED_PROCESS = 0x00000080, + /* MINIDUMP_MISC3_PROTECTED_PROCESS */ + MD_MISCINFO_FLAGS1_BUILDSTRING = 0x00000100, + /* MINIDUMP_MISC4_BUILDSTRING */ +} MDMiscInfoFlags1; + +/* + * Around DbgHelp version 6.0, the style of new LIST structures changed + * from including an array of length 1 at the end of the struct to + * represent the variable-length data to including explicit + * "size of header", "size of entry" and "number of entries" fields + * in the header, presumably to allow backwards-compatibly-extending + * the structures in the future. The actual list entries follow the + * header data directly in this case. + */ + +typedef struct { + uint32_t size_of_header; /* sizeof(MDRawMemoryInfoList) */ + uint32_t size_of_entry; /* sizeof(MDRawMemoryInfo) */ + uint64_t number_of_entries; +} MDRawMemoryInfoList; /* MINIDUMP_MEMORY_INFO_LIST */ + +typedef struct { + uint64_t base_address; /* Base address of a region of pages */ + uint64_t allocation_base; /* Base address of a range of pages + * within this region. */ + uint32_t allocation_protection; /* Memory protection when this region + * was originally allocated: + * MDMemoryProtection */ + uint32_t __alignment1; + uint64_t region_size; + uint32_t state; /* MDMemoryState */ + uint32_t protection; /* MDMemoryProtection */ + uint32_t type; /* MDMemoryType */ + uint32_t __alignment2; +} MDRawMemoryInfo; /* MINIDUMP_MEMORY_INFO */ + +/* For (MDRawMemoryInfo).state */ +typedef enum { + MD_MEMORY_STATE_COMMIT = 0x1000, /* physical storage has been allocated */ + MD_MEMORY_STATE_RESERVE = 0x2000, /* reserved, but no physical storage */ + MD_MEMORY_STATE_FREE = 0x10000 /* available to be allocated */ +} MDMemoryState; + +/* For (MDRawMemoryInfo).allocation_protection and .protection */ +typedef enum { + MD_MEMORY_PROTECT_NOACCESS = 0x01, /* PAGE_NOACCESS */ + MD_MEMORY_PROTECT_READONLY = 0x02, /* PAGE_READONLY */ + MD_MEMORY_PROTECT_READWRITE = 0x04, /* PAGE_READWRITE */ + MD_MEMORY_PROTECT_WRITECOPY = 0x08, /* PAGE_WRITECOPY */ + MD_MEMORY_PROTECT_EXECUTE = 0x10, /* PAGE_EXECUTE */ + MD_MEMORY_PROTECT_EXECUTE_READ = 0x20, /* PAGE_EXECUTE_READ */ + MD_MEMORY_PROTECT_EXECUTE_READWRITE = 0x40, /* PAGE_EXECUTE_READWRITE */ + MD_MEMORY_PROTECT_EXECUTE_WRITECOPY = 0x80, /* PAGE_EXECUTE_WRITECOPY */ + /* These options can be combined with the previous flags. */ + MD_MEMORY_PROTECT_GUARD = 0x100, /* PAGE_GUARD */ + MD_MEMORY_PROTECT_NOCACHE = 0x200, /* PAGE_NOCACHE */ + MD_MEMORY_PROTECT_WRITECOMBINE = 0x400, /* PAGE_WRITECOMBINE */ +} MDMemoryProtection; + +/* Used to mask the mutually exclusive options from the combinable flags. */ +const uint32_t MD_MEMORY_PROTECTION_ACCESS_MASK = 0xFF; + +/* For (MDRawMemoryInfo).type */ +typedef enum { + MD_MEMORY_TYPE_PRIVATE = 0x20000, /* not shared by other processes */ + MD_MEMORY_TYPE_MAPPED = 0x40000, /* mapped into the view of a section */ + MD_MEMORY_TYPE_IMAGE = 0x1000000 /* mapped into the view of an image */ +} MDMemoryType; + +/* + * Breakpad extension types + */ + + +typedef struct { + /* validity is a bitmask with values from MDBreakpadInfoValidity, indicating + * which of the other fields in the structure are valid. */ + uint32_t validity; + + /* Thread ID of the handler thread. dump_thread_id should correspond to + * the thread_id of an MDRawThread in the minidump's MDRawThreadList if + * a dedicated thread in that list was used to produce the minidump. If + * the MDRawThreadList does not contain a dedicated thread used to produce + * the minidump, this field should be set to 0 and the validity field + * must not contain MD_BREAKPAD_INFO_VALID_DUMP_THREAD_ID. */ + uint32_t dump_thread_id; + + /* Thread ID of the thread that requested the minidump be produced. As + * with dump_thread_id, requesting_thread_id should correspond to the + * thread_id of an MDRawThread in the minidump's MDRawThreadList. For + * minidumps produced as a result of an exception, requesting_thread_id + * will be the same as the MDRawExceptionStream's thread_id field. For + * minidumps produced "manually" at the program's request, + * requesting_thread_id will indicate which thread caused the dump to be + * written. If the minidump was produced at the request of something + * other than a thread in the MDRawThreadList, this field should be set + * to 0 and the validity field must not contain + * MD_BREAKPAD_INFO_VALID_REQUESTING_THREAD_ID. */ + uint32_t requesting_thread_id; +} MDRawBreakpadInfo; + +/* For (MDRawBreakpadInfo).validity: */ +typedef enum { + /* When set, the dump_thread_id field is valid. */ + MD_BREAKPAD_INFO_VALID_DUMP_THREAD_ID = 1 << 0, + + /* When set, the requesting_thread_id field is valid. */ + MD_BREAKPAD_INFO_VALID_REQUESTING_THREAD_ID = 1 << 1 +} MDBreakpadInfoValidity; + +typedef struct { + /* expression, function, and file are 0-terminated UTF-16 strings. They + * may be truncated if necessary, but should always be 0-terminated when + * written to a file. + * Fixed-length strings are used because MiniDumpWriteDump doesn't offer + * a way for user streams to point to arbitrary RVAs for strings. */ + uint16_t expression[128]; /* Assertion that failed... */ + uint16_t function[128]; /* ...within this function... */ + uint16_t file[128]; /* ...in this file... */ + uint32_t line; /* ...at this line. */ + uint32_t type; +} MDRawAssertionInfo; + +/* For (MDRawAssertionInfo).type: */ +typedef enum { + MD_ASSERTION_INFO_TYPE_UNKNOWN = 0, + + /* Used for assertions that would be raised by the MSVC CRT but are + * directed to an invalid parameter handler instead. */ + MD_ASSERTION_INFO_TYPE_INVALID_PARAMETER, + + /* Used for assertions that would be raised by the MSVC CRT but are + * directed to a pure virtual call handler instead. */ + MD_ASSERTION_INFO_TYPE_PURE_VIRTUAL_CALL +} MDAssertionInfoData; + +/* These structs are used to store the DSO debug data in Linux minidumps, + * which is necessary for converting minidumps to usable coredumps. + * Because of a historical accident, several fields are variably encoded + * according to client word size, so tools potentially need to support both. */ + +typedef struct { + uint32_t addr; + MDRVA name; + uint32_t ld; +} MDRawLinkMap32; + +typedef struct { + uint32_t version; + MDRVA map; /* array of MDRawLinkMap32 */ + uint32_t dso_count; + uint32_t brk; + uint32_t ldbase; + uint32_t dynamic; +} MDRawDebug32; + +typedef struct { + uint64_t addr; + MDRVA name; + uint64_t ld; +} MDRawLinkMap64; + +typedef struct { + uint32_t version; + MDRVA map; /* array of MDRawLinkMap64 */ + uint32_t dso_count; + uint64_t brk; + uint64_t ldbase; + uint64_t dynamic; +} MDRawDebug64; + +#if defined(_MSC_VER) +#pragma warning(pop) +#endif /* _MSC_VER */ + + +#endif /* GOOGLE_BREAKPAD_COMMON_MINIDUMP_FORMAT_H__ */ diff --git a/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_size.h b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_size.h new file mode 100644 index 0000000000..918544b662 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/common/minidump_size.h @@ -0,0 +1,107 @@ +// Copyright (c) 2007, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ + +// minidump_size.h: Provides a C++ template for programmatic access to +// the sizes of various types defined in minidump_format.h. +// +// Author: Mark Mentovai + +#ifndef GOOGLE_BREAKPAD_COMMON_MINIDUMP_SIZE_H__ +#define GOOGLE_BREAKPAD_COMMON_MINIDUMP_SIZE_H__ + +#include + +#include "google_breakpad/common/minidump_format.h" + +namespace google_breakpad { + +template +class minidump_size { + public: + static size_t size() { return sizeof(T); } +}; + +// Explicit specializations for variable-length types. The size returned +// for these should be the size for an object without its variable-length +// section. + +template<> +class minidump_size { + public: + static size_t size() { return MDString_minsize; } +}; + +template<> +class minidump_size { + public: + static size_t size() { return MDRawThreadList_minsize; } +}; + +template<> +class minidump_size { + public: + static size_t size() { return MDCVInfoPDB20_minsize; } +}; + +template<> +class minidump_size { + public: + static size_t size() { return MDCVInfoPDB70_minsize; } +}; + +template<> +class minidump_size { + public: + static size_t size() { return MDImageDebugMisc_minsize; } +}; + +template<> +class minidump_size { + public: + static size_t size() { return MDRawModuleList_minsize; } +}; + +template<> +class minidump_size { + public: + static size_t size() { return MDRawMemoryList_minsize; } +}; + +// Explicit specialization for MDRawModule, for which sizeof may include +// tail-padding on some architectures but not others. + +template<> +class minidump_size { + public: + static size_t size() { return MD_MODULE_SIZE; } +}; + +} // namespace google_breakpad + +#endif // GOOGLE_BREAKPAD_COMMON_MINIDUMP_SIZE_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/basic_source_line_resolver.h b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/basic_source_line_resolver.h new file mode 100644 index 0000000000..6bb6d8639a --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/basic_source_line_resolver.h @@ -0,0 +1,144 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// basic_source_line_resolver.h: BasicSourceLineResolver is derived from +// SourceLineResolverBase, and is a concrete implementation of +// SourceLineResolverInterface, using address map files produced by a +// compatible writer, e.g. PDBSourceLineWriter. +// +// see "processor/source_line_resolver_base.h" +// and "source_line_resolver_interface.h" for more documentation. + +#ifndef GOOGLE_BREAKPAD_PROCESSOR_BASIC_SOURCE_LINE_RESOLVER_H__ +#define GOOGLE_BREAKPAD_PROCESSOR_BASIC_SOURCE_LINE_RESOLVER_H__ + +#include +#include + +#include "common/using_std_string.h" +#include "google_breakpad/processor/source_line_resolver_base.h" + +namespace google_breakpad { + +using std::map; + +class BasicSourceLineResolver : public SourceLineResolverBase { + public: + BasicSourceLineResolver(); + virtual ~BasicSourceLineResolver() { } + + using SourceLineResolverBase::LoadModule; + using SourceLineResolverBase::LoadModuleUsingMapBuffer; + using SourceLineResolverBase::LoadModuleUsingMemoryBuffer; + using SourceLineResolverBase::ShouldDeleteMemoryBufferAfterLoadModule; + using SourceLineResolverBase::UnloadModule; + using SourceLineResolverBase::HasModule; + using SourceLineResolverBase::IsModuleCorrupt; + using SourceLineResolverBase::FillSourceLineInfo; + using SourceLineResolverBase::FindWindowsFrameInfo; + using SourceLineResolverBase::FindCFIFrameInfo; + + private: + // friend declarations: + friend class BasicModuleFactory; + friend class ModuleComparer; + friend class ModuleSerializer; + template friend class SimpleSerializer; + + // Function derives from SourceLineResolverBase::Function. + struct Function; + // Module implements SourceLineResolverBase::Module interface. + class Module; + + // Disallow unwanted copy ctor and assignment operator + BasicSourceLineResolver(const BasicSourceLineResolver&); + void operator=(const BasicSourceLineResolver&); +}; + +// Helper class, containing useful methods for parsing of Breakpad symbol files. +class SymbolParseHelper { + public: + // Parses a |file_line| declaration. Returns true on success. + // Format: FILE . + // Notice, that this method modifies the input |file_line| which is why it + // can't be const. On success, , and are stored in |*index|, + // and |*filename|. No allocation is done, |*filename| simply points inside + // |file_line|. + static bool ParseFile(char *file_line, // in + long *index, // out + char **filename); // out + + // Parses a |function_line| declaration. Returns true on success. + // Format: FUNC
. + // Notice, that this method modifies the input |function_line| which is why it + // can't be const. On success,
, , , and + // are stored in |*address|, |*size|, |*stack_param_size|, and |*name|. + // No allocation is done, |*name| simply points inside |function_line|. + static bool ParseFunction(char *function_line, // in + uint64_t *address, // out + uint64_t *size, // out + long *stack_param_size, // out + char **name); // out + + // Parses a |line| declaration. Returns true on success. + // Format:
+ // Notice, that this method modifies the input |function_line| which is why + // it can't be const. On success,
, , , and + // are stored in |*address|, |*size|, |*line_number|, and + // |*source_file|. + static bool ParseLine(char *line_line, // in + uint64_t *address, // out + uint64_t *size, // out + long *line_number, // out + long *source_file); // out + + // Parses a |public_line| declaration. Returns true on success. + // Format: PUBLIC
+ // Notice, that this method modifies the input |function_line| which is why + // it can't be const. On success,
, , + // are stored in |*address|, |*stack_param_size|, and |*name|. + // No allocation is done, |*name| simply points inside |public_line|. + static bool ParsePublicSymbol(char *public_line, // in + uint64_t *address, // out + long *stack_param_size, // out + char **name); // out + + private: + // Used for success checks after strtoull and strtol. + static bool IsValidAfterNumber(char *after_number); + + // Only allow static methods. + SymbolParseHelper(); + SymbolParseHelper(const SymbolParseHelper&); + void operator=(const SymbolParseHelper&); +}; + +} // namespace google_breakpad + +#endif // GOOGLE_BREAKPAD_PROCESSOR_BASIC_SOURCE_LINE_RESOLVER_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/call_stack.h b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/call_stack.h new file mode 100644 index 0000000000..21f595e7b2 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/call_stack.h @@ -0,0 +1,77 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// call_stack.h: A call stack comprised of stack frames. +// +// This class manages a vector of stack frames. It is used instead of +// exposing the vector directly to allow the CallStack to own StackFrame +// pointers without having to publicly export the linked_ptr class. A +// CallStack must be composed of pointers instead of objects to allow for +// CPU-specific StackFrame subclasses. +// +// By convention, the stack frame at index 0 is the innermost callee frame, +// and the frame at the highest index in a call stack is the outermost +// caller. CallStack only allows stacks to be built by pushing frames, +// beginning with the innermost callee frame. +// +// Author: Mark Mentovai + +#ifndef GOOGLE_BREAKPAD_PROCESSOR_CALL_STACK_H__ +#define GOOGLE_BREAKPAD_PROCESSOR_CALL_STACK_H__ + +#include + +namespace google_breakpad { + +using std::vector; + +struct StackFrame; +template class linked_ptr; + +class CallStack { + public: + CallStack() { Clear(); } + ~CallStack(); + + // Resets the CallStack to its initial empty state + void Clear(); + + const vector* frames() const { return &frames_; } + + private: + // Stackwalker is responsible for building the frames_ vector. + friend class Stackwalker; + + // Storage for pushed frames. + vector frames_; +}; + +} // namespace google_breakpad + +#endif // GOOGLE_BREAKPAD_PROCSSOR_CALL_STACK_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/code_module.h b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/code_module.h new file mode 100644 index 0000000000..4e8928243c --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/code_module.h @@ -0,0 +1,94 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// code_module.h: Carries information about code modules that are loaded +// into a process. +// +// Author: Mark Mentovai + +#ifndef GOOGLE_BREAKPAD_PROCESSOR_CODE_MODULE_H__ +#define GOOGLE_BREAKPAD_PROCESSOR_CODE_MODULE_H__ + +#include + +#include "common/using_std_string.h" +#include "google_breakpad/common/breakpad_types.h" + +namespace google_breakpad { + +class CodeModule { + public: + virtual ~CodeModule() {} + + // The base address of this code module as it was loaded by the process. + // (uint64_t)-1 on error. + virtual uint64_t base_address() const = 0; + + // The size of the code module. 0 on error. + virtual uint64_t size() const = 0; + + // The path or file name that the code module was loaded from. Empty on + // error. + virtual string code_file() const = 0; + + // An identifying string used to discriminate between multiple versions and + // builds of the same code module. This may contain a uuid, timestamp, + // version number, or any combination of this or other information, in an + // implementation-defined format. Empty on error. + virtual string code_identifier() const = 0; + + // The filename containing debugging information associated with the code + // module. If debugging information is stored in a file separate from the + // code module itself (as is the case when .pdb or .dSYM files are used), + // this will be different from code_file. If debugging information is + // stored in the code module itself (possibly prior to stripping), this + // will be the same as code_file. Empty on error. + virtual string debug_file() const = 0; + + // An identifying string similar to code_identifier, but identifies a + // specific version and build of the associated debug file. This may be + // the same as code_identifier when the debug_file and code_file are + // identical or when the same identifier is used to identify distinct + // debug and code files. + virtual string debug_identifier() const = 0; + + // A human-readable representation of the code module's version. Empty on + // error. + virtual string version() const = 0; + + // Creates a new copy of this CodeModule object, which the caller takes + // ownership of. The new CodeModule may be of a different concrete class + // than the CodeModule being copied, but will behave identically to the + // copied CodeModule as far as the CodeModule interface is concerned. + virtual const CodeModule* Copy() const = 0; +}; + +} // namespace google_breakpad + +#endif // GOOGLE_BREAKPAD_PROCESSOR_CODE_MODULE_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/code_modules.h b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/code_modules.h new file mode 100644 index 0000000000..a38579af6d --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/code_modules.h @@ -0,0 +1,98 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// code_modules.h: Contains all of the CodeModule objects that were loaded +// into a single process. +// +// Author: Mark Mentovai + +#ifndef GOOGLE_BREAKPAD_PROCESSOR_CODE_MODULES_H__ +#define GOOGLE_BREAKPAD_PROCESSOR_CODE_MODULES_H__ + +#include "google_breakpad/common/breakpad_types.h" + +namespace google_breakpad { + +class CodeModule; + +class CodeModules { + public: + virtual ~CodeModules() {} + + // The number of contained CodeModule objects. + virtual unsigned int module_count() const = 0; + + // Random access to modules. Returns the module whose code is present + // at the address indicated by |address|. If no module is present at this + // address, returns NULL. Ownership of the returned CodeModule is retained + // by the CodeModules object; pointers returned by this method are valid for + // comparison with pointers returned by the other Get methods. + virtual const CodeModule* GetModuleForAddress(uint64_t address) const = 0; + + // Returns the module corresponding to the main executable. If there is + // no main executable, returns NULL. Ownership of the returned CodeModule + // is retained by the CodeModules object; pointers returned by this method + // are valid for comparison with pointers returned by the other Get + // methods. + virtual const CodeModule* GetMainModule() const = 0; + + // Sequential access to modules. A sequence number of 0 corresponds to the + // module residing lowest in memory. If the sequence number is out of + // range, returns NULL. Ownership of the returned CodeModule is retained + // by the CodeModules object; pointers returned by this method are valid for + // comparison with pointers returned by the other Get methods. + virtual const CodeModule* GetModuleAtSequence( + unsigned int sequence) const = 0; + + // Sequential access to modules. This is similar to GetModuleAtSequence, + // except no ordering requirement is enforced. A CodeModules implementation + // may return CodeModule objects from GetModuleAtIndex in any order it + // wishes, provided that the order remain the same throughout the life of + // the CodeModules object. Typically, GetModuleAtIndex would be used by + // a caller to enumerate all CodeModule objects quickly when the enumeration + // does not require any ordering. If the index argument is out of range, + // returns NULL. Ownership of the returned CodeModule is retained by + // the CodeModules object; pointers returned by this method are valid for + // comparison with pointers returned by the other Get methods. + virtual const CodeModule* GetModuleAtIndex(unsigned int index) const = 0; + + // Creates a new copy of this CodeModules object, which the caller takes + // ownership of. The new object will also contain copies of the existing + // object's child CodeModule objects. The new CodeModules object may be of + // a different concrete class than the object being copied, but will behave + // identically to the copied object as far as the CodeModules and CodeModule + // interfaces are concerned, except that the order that GetModuleAtIndex + // returns objects in may differ between a copy and the original CodeModules + // object. + virtual const CodeModules* Copy() const = 0; +}; + +} // namespace google_breakpad + +#endif // GOOGLE_BREAKPAD_PROCESSOR_CODE_MODULES_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/dump_context.h b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/dump_context.h new file mode 100644 index 0000000000..f6238ffa57 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/dump_context.h @@ -0,0 +1,112 @@ +// Copyright (c) 2014 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// dump_context.h: A (mini/micro) dump CPU-specific context. + +#ifndef GOOGLE_BREAKPAD_PROCESSOR_DUMP_CONTEXT_H__ +#define GOOGLE_BREAKPAD_PROCESSOR_DUMP_CONTEXT_H__ + +#include "google_breakpad/common/minidump_format.h" +#include "google_breakpad/processor/dump_object.h" + +namespace google_breakpad { + +// DumpContext carries a CPU-specific MDRawContext structure, which contains CPU +// context such as register states. +class DumpContext : public DumpObject { + public: + virtual ~DumpContext(); + + // Returns an MD_CONTEXT_* value such as MD_CONTEXT_X86 or MD_CONTEXT_PPC + // identifying the CPU type that the context was collected from. The + // returned value will identify the CPU only, and will have any other + // MD_CONTEXT_* bits masked out. Returns 0 on failure. + uint32_t GetContextCPU() const; + + // Return the raw value of |context_flags_| + uint32_t GetContextFlags() const; + + // Returns raw CPU-specific context data for the named CPU type. If the + // context data does not match the CPU type or does not exist, returns NULL. + const MDRawContextAMD64* GetContextAMD64() const; + const MDRawContextARM* GetContextARM() const; + const MDRawContextARM64* GetContextARM64() const; + const MDRawContextMIPS* GetContextMIPS() const; + const MDRawContextPPC* GetContextPPC() const; + const MDRawContextPPC64* GetContextPPC64() const; + const MDRawContextSPARC* GetContextSPARC() const; + const MDRawContextX86* GetContextX86() const; + + // A convenience method to get the instruction pointer out of the + // MDRawContext, since it varies per-CPU architecture. + bool GetInstructionPointer(uint64_t* ip) const; + + // Print a human-readable representation of the object to stdout. + void Print(); + + protected: + DumpContext(); + + // Sets row CPU-specific context data for the names CPU type. + void SetContextFlags(uint32_t context_flags); + void SetContextX86(MDRawContextX86* x86); + void SetContextPPC(MDRawContextPPC* ppc); + void SetContextPPC64(MDRawContextPPC64* ppc64); + void SetContextAMD64(MDRawContextAMD64* amd64); + void SetContextSPARC(MDRawContextSPARC* ctx_sparc); + void SetContextARM(MDRawContextARM* arm); + void SetContextARM64(MDRawContextARM64* arm64); + void SetContextMIPS(MDRawContextMIPS* ctx_mips); + + // Free the CPU-specific context structure. + void FreeContext(); + + private: + // The CPU-specific context structure. + union { + MDRawContextBase* base; + MDRawContextX86* x86; + MDRawContextPPC* ppc; + MDRawContextPPC64* ppc64; + MDRawContextAMD64* amd64; + // on Solaris SPARC, sparc is defined as a numeric constant, + // so variables can NOT be named as sparc + MDRawContextSPARC* ctx_sparc; + MDRawContextARM* arm; + MDRawContextARM64* arm64; + MDRawContextMIPS* ctx_mips; + } context_; + + // Store this separately because of the weirdo AMD64 context + uint32_t context_flags_; +}; + +} // namespace google_breakpad + +#endif // GOOGLE_BREAKPAD_PROCESSOR_DUMP_CONTEXT_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/dump_object.h b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/dump_object.h new file mode 100644 index 0000000000..112f687f4c --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/dump_object.h @@ -0,0 +1,53 @@ +// Copyright (c) 2014 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// dump_object.h: A base class for all mini/micro dump object. + +#ifndef GOOGLE_BREAKPAD_PROCESSOR_DUMP_OBJECT_H__ +#define GOOGLE_BREAKPAD_PROCESSOR_DUMP_OBJECT_H__ + +namespace google_breakpad { + +// DumpObject is the base of various mini/micro dump's objects. +class DumpObject { + public: + DumpObject(); + + bool valid() const { return valid_; } + + protected: + // DumpObjects are not valid when created. When a subclass populates its own + // fields, it can set valid_ to true. Accessors and mutators may wish to + // consider or alter the valid_ state as they interact with objects. + bool valid_; +}; + +} // namespace google_breakpad + +#endif // GOOGLE_BREAKPAD_PROCESSOR_DUMP_OBJECT_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/exploitability.h b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/exploitability.h new file mode 100644 index 0000000000..67255a3ac3 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/exploitability.h @@ -0,0 +1,73 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// exploitability_engine.h: Generic exploitability engine. +// +// The Exploitability class is an abstract base class providing common +// generic methods that apply to exploitability engines for specific platforms. +// Specific implementations will extend this class by providing run +// methods to fill in the exploitability_ enumeration of the ProcessState +// for a crash. +// +// Author: Cris Neckar + +#ifndef GOOGLE_BREAKPAD_PROCESSOR_EXPLOITABILITY_H_ +#define GOOGLE_BREAKPAD_PROCESSOR_EXPLOITABILITY_H_ + +#include "google_breakpad/common/breakpad_types.h" +#include "google_breakpad/processor/minidump.h" +#include "google_breakpad/processor/process_state.h" + +namespace google_breakpad { + +class Exploitability { + public: + virtual ~Exploitability() {} + + static Exploitability *ExploitabilityForPlatform(Minidump *dump, + ProcessState *process_state); + + ExploitabilityRating CheckExploitability(); + bool AddressIsAscii(uint64_t); + + protected: + Exploitability(Minidump *dump, + ProcessState *process_state); + + Minidump *dump_; + ProcessState *process_state_; + SystemInfo *system_info_; + + private: + virtual ExploitabilityRating CheckPlatformExploitability() = 0; +}; + +} // namespace google_breakpad + +#endif // GOOGLE_BREAKPAD_PROCESSOR_EXPLOITABILITY_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/fast_source_line_resolver.h b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/fast_source_line_resolver.h new file mode 100644 index 0000000000..fdf9107766 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/fast_source_line_resolver.h @@ -0,0 +1,100 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// fast_source_line_resolver.h: FastSourceLineResolver is derived from +// SourceLineResolverBase, and is a concrete implementation of +// SourceLineResolverInterface. +// +// FastSourceLineResolver is a sibling class of BasicSourceLineResolver. The +// difference is FastSourceLineResolver loads a serialized memory chunk of data +// which can be used directly a Module without parsing or copying of underlying +// data. Therefore loading a symbol in FastSourceLineResolver is much faster +// and more memory-efficient than BasicSourceLineResolver. +// +// See "source_line_resolver_base.h" and +// "google_breakpad/source_line_resolver_interface.h" for more reference. +// +// Author: Siyang Xie (lambxsy@google.com) + +#ifndef GOOGLE_BREAKPAD_PROCESSOR_FAST_SOURCE_LINE_RESOLVER_H__ +#define GOOGLE_BREAKPAD_PROCESSOR_FAST_SOURCE_LINE_RESOLVER_H__ + +#include +#include + +#include "google_breakpad/processor/source_line_resolver_base.h" + +namespace google_breakpad { + +using std::map; + +class FastSourceLineResolver : public SourceLineResolverBase { + public: + FastSourceLineResolver(); + virtual ~FastSourceLineResolver() { } + + using SourceLineResolverBase::FillSourceLineInfo; + using SourceLineResolverBase::FindCFIFrameInfo; + using SourceLineResolverBase::FindWindowsFrameInfo; + using SourceLineResolverBase::HasModule; + using SourceLineResolverBase::IsModuleCorrupt; + using SourceLineResolverBase::LoadModule; + using SourceLineResolverBase::LoadModuleUsingMapBuffer; + using SourceLineResolverBase::LoadModuleUsingMemoryBuffer; + using SourceLineResolverBase::UnloadModule; + + private: + // Friend declarations. + friend class ModuleComparer; + friend class ModuleSerializer; + friend class FastModuleFactory; + + // Nested types that will derive from corresponding nested types defined in + // SourceLineResolverBase. + struct Line; + struct Function; + struct PublicSymbol; + class Module; + + // Deserialize raw memory data to construct a WindowsFrameInfo object. + static WindowsFrameInfo CopyWFI(const char *raw_memory); + + // FastSourceLineResolver requires the memory buffer stays alive during the + // lifetime of a corresponding module, therefore it needs to redefine this + // virtual method. + virtual bool ShouldDeleteMemoryBufferAfterLoadModule(); + + // Disallow unwanted copy ctor and assignment operator + FastSourceLineResolver(const FastSourceLineResolver&); + void operator=(const FastSourceLineResolver&); +}; + +} // namespace google_breakpad + +#endif // GOOGLE_BREAKPAD_PROCESSOR_FAST_SOURCE_LINE_RESOLVER_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/memory_region.h b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/memory_region.h new file mode 100644 index 0000000000..30f88df490 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/memory_region.h @@ -0,0 +1,79 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// memory_region.h: Access to memory regions. +// +// A MemoryRegion provides virtual access to a range of memory. It is an +// abstraction allowing the actual source of memory to be independent of +// methods which need to access a virtual memory space. +// +// Author: Mark Mentovai + +#ifndef GOOGLE_BREAKPAD_PROCESSOR_MEMORY_REGION_H__ +#define GOOGLE_BREAKPAD_PROCESSOR_MEMORY_REGION_H__ + + +#include "google_breakpad/common/breakpad_types.h" + + +namespace google_breakpad { + + +class MemoryRegion { + public: + virtual ~MemoryRegion() {} + + // The base address of this memory region. + virtual uint64_t GetBase() const = 0; + + // The size of this memory region. + virtual uint32_t GetSize() const = 0; + + // Access to data of various sizes within the memory region. address + // is a pointer to read, and it must lie within the memory region as + // defined by its base address and size. The location pointed to by + // value is set to the value at address. Byte-swapping is performed + // if necessary so that the value is appropriate for the running + // program. Returns true on success. Fails and returns false if address + // is out of the region's bounds (after considering the width of value), + // or for other types of errors. + virtual bool GetMemoryAtAddress(uint64_t address, uint8_t* value) const = 0; + virtual bool GetMemoryAtAddress(uint64_t address, uint16_t* value) const = 0; + virtual bool GetMemoryAtAddress(uint64_t address, uint32_t* value) const = 0; + virtual bool GetMemoryAtAddress(uint64_t address, uint64_t* value) const = 0; + + // Print a human-readable representation of the object to stdout. + virtual void Print() const = 0; +}; + + +} // namespace google_breakpad + + +#endif // GOOGLE_BREAKPAD_PROCESSOR_MEMORY_REGION_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/microdump.h b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/microdump.h new file mode 100644 index 0000000000..abdaecb19a --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/microdump.h @@ -0,0 +1,126 @@ +// Copyright (c) 2014 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// microdump.h: A microdump reader. Microdump is a minified variant of a +// minidump (see minidump.h for documentation) which contains the minimum +// amount of information required to get a stack trace for the crashing thread. +// The information contained in a microdump is: +// - the crashing thread stack +// - system information (os type / version) +// - cpu context (state of the registers) +// - list of mmaps + +#ifndef GOOGLE_BREAKPAD_PROCESSOR_MICRODUMP_H__ +#define GOOGLE_BREAKPAD_PROCESSOR_MICRODUMP_H__ + +#include +#include + +#include "common/scoped_ptr.h" +#include "common/using_std_string.h" +#include "google_breakpad/processor/dump_context.h" +#include "google_breakpad/processor/memory_region.h" +#include "google_breakpad/processor/system_info.h" +#include "processor/basic_code_modules.h" + +namespace google_breakpad { + +// MicrodumpModuleList contains all of the loaded code modules for a process +// in the form of MicrodumpModules. It maintains a vector of these modules +// and provides access to a code module corresponding to a specific address. +class MicrodumpModules : public BasicCodeModules { + public: + // Takes over ownership of |module|. + void Add(const CodeModule* module); +}; + +// MicrodumpContext carries a CPU-specific context. +// See dump_context.h for documentation. +class MicrodumpContext : public DumpContext { + public: + virtual void SetContextARM(MDRawContextARM* arm); + virtual void SetContextARM64(MDRawContextARM64* arm64); +}; + +// This class provides access to microdump memory regions. +// See memory_region.h for documentation. +class MicrodumpMemoryRegion : public MemoryRegion { + public: + MicrodumpMemoryRegion(); + virtual ~MicrodumpMemoryRegion() {} + + // Set this region's address and contents. If we have placed an + // instance of this class in a test fixture class, individual tests + // can use this to provide the region's contents. + void Init(uint64_t base_address, const std::vector& contents); + + virtual uint64_t GetBase() const; + virtual uint32_t GetSize() const; + + virtual bool GetMemoryAtAddress(uint64_t address, uint8_t* value) const; + virtual bool GetMemoryAtAddress(uint64_t address, uint16_t* value) const; + virtual bool GetMemoryAtAddress(uint64_t address, uint32_t* value) const; + virtual bool GetMemoryAtAddress(uint64_t address, uint64_t* value) const; + + // Print a human-readable representation of the object to stdout. + virtual void Print() const; + + private: + // Fetch a little-endian value from ADDRESS in contents_ whose size + // is BYTES, and store it in *VALUE. Returns true on success. + template + bool GetMemoryLittleEndian(uint64_t address, ValueType* value) const; + + uint64_t base_address_; + std::vector contents_; +}; + +// Microdump is the user's interface to a microdump file. It provides access to +// the microdump's context, memory regions and modules. +class Microdump { + public: + explicit Microdump(const string& contents); + virtual ~Microdump() {} + + DumpContext* GetContext() { return context_.get(); } + MicrodumpMemoryRegion* GetMemory() { return stack_region_.get(); } + MicrodumpModules* GetModules() { return modules_.get(); } + SystemInfo* GetSystemInfo() { return system_info_.get(); } + + private: + scoped_ptr context_; + scoped_ptr stack_region_; + scoped_ptr modules_; + scoped_ptr system_info_; +}; + +} // namespace google_breakpad + +#endif // GOOGLE_BREAKPAD_PROCESSOR_MICRODUMP_H__ + diff --git a/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/microdump_processor.h b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/microdump_processor.h new file mode 100644 index 0000000000..1322a01c76 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/microdump_processor.h @@ -0,0 +1,63 @@ +// Copyright (c) 2014, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// The processor for microdump (a reduced dump containing only the state of the +// crashing thread). See crbug.com/410294 for more info and design docs. + +#ifndef GOOGLE_BREAKPAD_PROCESSOR_MICRODUMP_PROCESSOR_H__ +#define GOOGLE_BREAKPAD_PROCESSOR_MICRODUMP_PROCESSOR_H__ + +#include + +#include "common/using_std_string.h" +#include "google_breakpad/processor/process_result.h" + +namespace google_breakpad { + +class ProcessState; +class StackFrameSymbolizer; + +class MicrodumpProcessor { + public: + // Initializes the MicrodumpProcessor with a stack frame symbolizer. + // Does not take ownership of frame_symbolizer, which must NOT be NULL. + explicit MicrodumpProcessor(StackFrameSymbolizer* frame_symbolizer); + + virtual ~MicrodumpProcessor(); + + // Processes the microdump contents and fills process_state with the result. + google_breakpad::ProcessResult Process(const string& microdump_contents, + ProcessState* process_state); + private: + StackFrameSymbolizer* frame_symbolizer_; +}; + +} // namespace google_breakpad + +#endif // GOOGLE_BREAKPAD_PROCESSOR_MICRODUMP_PROCESSOR_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/minidump.h b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/minidump.h new file mode 100644 index 0000000000..51f15025c1 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/minidump.h @@ -0,0 +1,1030 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// minidump.h: A minidump reader. +// +// The basic structure of this module tracks the structure of the minidump +// file itself. At the top level, a minidump file is represented by a +// Minidump object. Like most other classes in this module, Minidump +// provides a Read method that initializes the object with information from +// the file. Most of the classes in this file are wrappers around the +// "raw" structures found in the minidump file itself, and defined in +// minidump_format.h. For example, each thread is represented by a +// MinidumpThread object, whose parameters are specified in an MDRawThread +// structure. A properly byte-swapped MDRawThread can be obtained from a +// MinidumpThread easily by calling its thread() method. +// +// Most of the module lazily reads only the portion of the minidump file +// necessary to fulfill the user's request. Calling Minidump::Read +// only reads the minidump's directory. The thread list is not read until +// it is needed, and even once it's read, the memory regions for each +// thread's stack aren't read until they're needed. This strategy avoids +// unnecessary file input, and allocating memory for data in which the user +// has no interest. Note that although memory allocations for a typical +// minidump file are not particularly large, it is possible for legitimate +// minidumps to be sizable. A full-memory minidump, for example, contains +// a snapshot of the entire mapped memory space. Even a normal minidump, +// with stack memory only, can be large if, for example, the dump was +// generated in response to a crash that occurred due to an infinite- +// recursion bug that caused the stack's limits to be exceeded. Finally, +// some users of this library will unfortunately find themselves in the +// position of having to process potentially-hostile minidumps that might +// attempt to cause problems by forcing the minidump processor to over- +// allocate memory. +// +// Memory management in this module is based on a strict +// you-don't-own-anything policy. The only object owned by the user is +// the top-level Minidump object, the creation and destruction of which +// must be the user's own responsibility. All other objects obtained +// through interaction with this module are ultimately owned by the +// Minidump object, and will be freed upon the Minidump object's destruction. +// Because memory regions can potentially involve large allocations, a +// FreeMemory method is provided by MinidumpMemoryRegion, allowing the user +// to release data when it is no longer needed. Use of this method is +// optional but recommended. If freed data is later required, it will +// be read back in from the minidump file again. +// +// There is one exception to this memory management policy: +// Minidump::ReadString will return a string object to the user, and the user +// is responsible for its deletion. +// +// Author: Mark Mentovai + +#ifndef GOOGLE_BREAKPAD_PROCESSOR_MINIDUMP_H__ +#define GOOGLE_BREAKPAD_PROCESSOR_MINIDUMP_H__ + +#ifndef _WIN32 +#include +#endif + +#include +#include +#include +#include + +#include "common/using_std_string.h" +#include "google_breakpad/processor/code_module.h" +#include "google_breakpad/processor/code_modules.h" +#include "google_breakpad/processor/dump_context.h" +#include "google_breakpad/processor/dump_object.h" +#include "google_breakpad/processor/memory_region.h" + + +namespace google_breakpad { + + +using std::map; +using std::vector; + + +class Minidump; +template class RangeMap; + + +// MinidumpObject is the base of all Minidump* objects except for Minidump +// itself. +class MinidumpObject : public DumpObject { + public: + virtual ~MinidumpObject() {} + + protected: + explicit MinidumpObject(Minidump* minidump); + + // Refers to the Minidump object that is the ultimate parent of this + // Some MinidumpObjects are owned by other MinidumpObjects, but at the + // root of the ownership tree is always a Minidump. The Minidump object + // is kept here for access to its seeking and reading facilities, and + // for access to data about the minidump file itself, such as whether + // it should be byte-swapped. + Minidump* minidump_; +}; + + +// This class exists primarily to provide a virtual destructor in a base +// class common to all objects that might be stored in +// Minidump::mStreamObjects. Some object types will never be stored in +// Minidump::mStreamObjects, but are represented as streams and adhere to the +// same interface, and may be derived from this class. +class MinidumpStream : public MinidumpObject { + public: + virtual ~MinidumpStream() {} + + protected: + explicit MinidumpStream(Minidump* minidump); + + private: + // Populate (and validate) the MinidumpStream. minidump_ is expected + // to be positioned at the beginning of the stream, so that the next + // read from the minidump will be at the beginning of the stream. + // expected_size should be set to the stream's length as contained in + // the MDRawDirectory record or other identifying record. A class + // that implements MinidumpStream can compare expected_size to a + // known size as an integrity check. + virtual bool Read(uint32_t expected_size) = 0; +}; + + +// MinidumpContext carries a CPU-specific MDRawContext structure, which +// contains CPU context such as register states. Each thread has its +// own context, and the exception record, if present, also has its own +// context. Note that if the exception record is present, the context it +// refers to is probably what the user wants to use for the exception +// thread, instead of that thread's own context. The exception thread's +// context (as opposed to the exception record's context) will contain +// context for the exception handler (which performs minidump generation), +// and not the context that caused the exception (which is probably what the +// user wants). +class MinidumpContext : public DumpContext { + public: + virtual ~MinidumpContext(); + + protected: + explicit MinidumpContext(Minidump* minidump); + + private: + friend class MinidumpThread; + friend class MinidumpException; + + bool Read(uint32_t expected_size); + + // If the minidump contains a SYSTEM_INFO_STREAM, makes sure that the + // system info stream gives an appropriate CPU type matching the context + // CPU type in context_cpu_type. Returns false if the CPU type does not + // match. Returns true if the CPU type matches or if the minidump does + // not contain a system info stream. + bool CheckAgainstSystemInfo(uint32_t context_cpu_type); + + // Refers to the Minidump object that is the ultimate parent of this + // Some MinidumpObjects are owned by other MinidumpObjects, but at the + // root of the ownership tree is always a Minidump. The Minidump object + // is kept here for access to its seeking and reading facilities, and + // for access to data about the minidump file itself, such as whether + // it should be byte-swapped. + Minidump* minidump_; +}; + + +// MinidumpMemoryRegion does not wrap any MDRaw structure, and only contains +// a reference to an MDMemoryDescriptor. This object is intended to wrap +// portions of a minidump file that contain memory dumps. In normal +// minidumps, each MinidumpThread owns a MinidumpMemoryRegion corresponding +// to the thread's stack memory. MinidumpMemoryList also gives access to +// memory regions in its list as MinidumpMemoryRegions. This class +// adheres to MemoryRegion so that it may be used as a data provider to +// the Stackwalker family of classes. +class MinidumpMemoryRegion : public MinidumpObject, + public MemoryRegion { + public: + virtual ~MinidumpMemoryRegion(); + + static void set_max_bytes(uint32_t max_bytes) { max_bytes_ = max_bytes; } + static uint32_t max_bytes() { return max_bytes_; } + + // Returns a pointer to the base of the memory region. Returns the + // cached value if available, otherwise, reads the minidump file and + // caches the memory region. + const uint8_t* GetMemory() const; + + // The address of the base of the memory region. + uint64_t GetBase() const; + + // The size, in bytes, of the memory region. + uint32_t GetSize() const; + + // Frees the cached memory region, if cached. + void FreeMemory(); + + // Obtains the value of memory at the pointer specified by address. + bool GetMemoryAtAddress(uint64_t address, uint8_t* value) const; + bool GetMemoryAtAddress(uint64_t address, uint16_t* value) const; + bool GetMemoryAtAddress(uint64_t address, uint32_t* value) const; + bool GetMemoryAtAddress(uint64_t address, uint64_t* value) const; + + // Print a human-readable representation of the object to stdout. + void Print() const; + + protected: + explicit MinidumpMemoryRegion(Minidump* minidump); + + private: + friend class MinidumpThread; + friend class MinidumpMemoryList; + + // Identify the base address and size of the memory region, and the + // location it may be found in the minidump file. + void SetDescriptor(MDMemoryDescriptor* descriptor); + + // Implementation for GetMemoryAtAddress + template bool GetMemoryAtAddressInternal(uint64_t address, + T* value) const; + + // The largest memory region that will be read from a minidump. The + // default is 1MB. + static uint32_t max_bytes_; + + // Base address and size of the memory region, and its position in the + // minidump file. + MDMemoryDescriptor* descriptor_; + + // Cached memory. + mutable vector* memory_; +}; + + +// MinidumpThread contains information about a thread of execution, +// including a snapshot of the thread's stack and CPU context. For +// the thread that caused an exception, the context carried by +// MinidumpException is probably desired instead of the CPU context +// provided here. +// Note that a MinidumpThread may be valid() even if it does not +// contain a memory region or context. +class MinidumpThread : public MinidumpObject { + public: + virtual ~MinidumpThread(); + + const MDRawThread* thread() const { return valid_ ? &thread_ : NULL; } + // GetMemory may return NULL even if the MinidumpThread is valid, + // if the thread memory cannot be read. + virtual MinidumpMemoryRegion* GetMemory(); + // GetContext may return NULL even if the MinidumpThread is valid. + virtual MinidumpContext* GetContext(); + + // The thread ID is used to determine if a thread is the exception thread, + // so a special getter is provided to retrieve this data from the + // MDRawThread structure. Returns false if the thread ID cannot be + // determined. + virtual bool GetThreadID(uint32_t *thread_id) const; + + // Print a human-readable representation of the object to stdout. + void Print(); + + // Returns the start address of the thread stack memory region. Returns 0 if + // MinidumpThread is invalid. Note that this method can be called even when + // the thread memory cannot be read and GetMemory returns NULL. + virtual uint64_t GetStartOfStackMemoryRange() const; + + protected: + explicit MinidumpThread(Minidump* minidump); + + private: + // These objects are managed by MinidumpThreadList. + friend class MinidumpThreadList; + + // This works like MinidumpStream::Read, but is driven by + // MinidumpThreadList. No size checking is done, because + // MinidumpThreadList handles that directly. + bool Read(); + + MDRawThread thread_; + MinidumpMemoryRegion* memory_; + MinidumpContext* context_; +}; + + +// MinidumpThreadList contains all of the threads (as MinidumpThreads) in +// a process. +class MinidumpThreadList : public MinidumpStream { + public: + virtual ~MinidumpThreadList(); + + static void set_max_threads(uint32_t max_threads) { + max_threads_ = max_threads; + } + static uint32_t max_threads() { return max_threads_; } + + virtual unsigned int thread_count() const { + return valid_ ? thread_count_ : 0; + } + + // Sequential access to threads. + virtual MinidumpThread* GetThreadAtIndex(unsigned int index) const; + + // Random access to threads. + MinidumpThread* GetThreadByID(uint32_t thread_id); + + // Print a human-readable representation of the object to stdout. + void Print(); + + protected: + explicit MinidumpThreadList(Minidump* aMinidump); + + private: + friend class Minidump; + + typedef map IDToThreadMap; + typedef vector MinidumpThreads; + + static const uint32_t kStreamType = MD_THREAD_LIST_STREAM; + + bool Read(uint32_t aExpectedSize); + + // The largest number of threads that will be read from a minidump. The + // default is 256. + static uint32_t max_threads_; + + // Access to threads using the thread ID as the key. + IDToThreadMap id_to_thread_map_; + + // The list of threads. + MinidumpThreads* threads_; + uint32_t thread_count_; +}; + + +// MinidumpModule wraps MDRawModule, which contains information about loaded +// code modules. Access is provided to various data referenced indirectly +// by MDRawModule, such as the module's name and a specification for where +// to locate debugging information for the module. +class MinidumpModule : public MinidumpObject, + public CodeModule { + public: + virtual ~MinidumpModule(); + + static void set_max_cv_bytes(uint32_t max_cv_bytes) { + max_cv_bytes_ = max_cv_bytes; + } + static uint32_t max_cv_bytes() { return max_cv_bytes_; } + + static void set_max_misc_bytes(uint32_t max_misc_bytes) { + max_misc_bytes_ = max_misc_bytes; + } + static uint32_t max_misc_bytes() { return max_misc_bytes_; } + + const MDRawModule* module() const { return valid_ ? &module_ : NULL; } + + // CodeModule implementation + virtual uint64_t base_address() const { + return valid_ ? module_.base_of_image : static_cast(-1); + } + virtual uint64_t size() const { return valid_ ? module_.size_of_image : 0; } + virtual string code_file() const; + virtual string code_identifier() const; + virtual string debug_file() const; + virtual string debug_identifier() const; + virtual string version() const; + virtual const CodeModule* Copy() const; + + // The CodeView record, which contains information to locate the module's + // debugging information (pdb). This is returned as uint8_t* because + // the data can be of types MDCVInfoPDB20* or MDCVInfoPDB70*, or it may be + // of a type unknown to Breakpad, in which case the raw data will still be + // returned but no byte-swapping will have been performed. Check the + // record's signature in the first four bytes to differentiate between + // the various types. Current toolchains generate modules which carry + // MDCVInfoPDB70 by default. Returns a pointer to the CodeView record on + // success, and NULL on failure. On success, the optional |size| argument + // is set to the size of the CodeView record. + const uint8_t* GetCVRecord(uint32_t* size); + + // The miscellaneous debug record, which is obsolete. Current toolchains + // do not generate this type of debugging information (dbg), and this + // field is not expected to be present. Returns a pointer to the debugging + // record on success, and NULL on failure. On success, the optional |size| + // argument is set to the size of the debugging record. + const MDImageDebugMisc* GetMiscRecord(uint32_t* size); + + // Print a human-readable representation of the object to stdout. + void Print(); + + private: + // These objects are managed by MinidumpModuleList. + friend class MinidumpModuleList; + + explicit MinidumpModule(Minidump* minidump); + + // This works like MinidumpStream::Read, but is driven by + // MinidumpModuleList. No size checking is done, because + // MinidumpModuleList handles that directly. + bool Read(); + + // Reads indirectly-referenced data, including the module name, CodeView + // record, and miscellaneous debugging record. This is necessary to allow + // MinidumpModuleList to fully construct MinidumpModule objects without + // requiring seeks to read a contiguous set of MinidumpModule objects. + // All auxiliary data should be available when Read is called, in order to + // allow the CodeModule getters to be const methods. + bool ReadAuxiliaryData(); + + // The largest number of bytes that will be read from a minidump for a + // CodeView record or miscellaneous debugging record, respectively. The + // default for each is 1024. + static uint32_t max_cv_bytes_; + static uint32_t max_misc_bytes_; + + // True after a successful Read. This is different from valid_, which is + // not set true until ReadAuxiliaryData also completes successfully. + // module_valid_ is only used by ReadAuxiliaryData and the functions it + // calls to determine whether the object is ready for auxiliary data to + // be read. + bool module_valid_; + + // True if debug info was read from the module. Certain modules + // may contain debug records in formats we don't support, + // so we can just set this to false to ignore them. + bool has_debug_info_; + + MDRawModule module_; + + // Cached module name. + const string* name_; + + // Cached CodeView record - this is MDCVInfoPDB20 or (likely) + // MDCVInfoPDB70, or possibly something else entirely. Stored as a uint8_t + // because the structure contains a variable-sized string and its exact + // size cannot be known until it is processed. + vector* cv_record_; + + // If cv_record_ is present, cv_record_signature_ contains a copy of the + // CodeView record's first four bytes, for ease of determinining the + // type of structure that cv_record_ contains. + uint32_t cv_record_signature_; + + // Cached MDImageDebugMisc (usually not present), stored as uint8_t + // because the structure contains a variable-sized string and its exact + // size cannot be known until it is processed. + vector* misc_record_; +}; + + +// MinidumpModuleList contains all of the loaded code modules for a process +// in the form of MinidumpModules. It maintains a map of these modules +// so that it may easily provide a code module corresponding to a specific +// address. +class MinidumpModuleList : public MinidumpStream, + public CodeModules { + public: + virtual ~MinidumpModuleList(); + + static void set_max_modules(uint32_t max_modules) { + max_modules_ = max_modules; + } + static uint32_t max_modules() { return max_modules_; } + + // CodeModules implementation. + virtual unsigned int module_count() const { + return valid_ ? module_count_ : 0; + } + virtual const MinidumpModule* GetModuleForAddress(uint64_t address) const; + virtual const MinidumpModule* GetMainModule() const; + virtual const MinidumpModule* GetModuleAtSequence( + unsigned int sequence) const; + virtual const MinidumpModule* GetModuleAtIndex(unsigned int index) const; + virtual const CodeModules* Copy() const; + + // Print a human-readable representation of the object to stdout. + void Print(); + + protected: + explicit MinidumpModuleList(Minidump* minidump); + + private: + friend class Minidump; + + typedef vector MinidumpModules; + + static const uint32_t kStreamType = MD_MODULE_LIST_STREAM; + + bool Read(uint32_t expected_size); + + // The largest number of modules that will be read from a minidump. The + // default is 1024. + static uint32_t max_modules_; + + // Access to modules using addresses as the key. + RangeMap *range_map_; + + MinidumpModules *modules_; + uint32_t module_count_; +}; + + +// MinidumpMemoryList corresponds to a minidump's MEMORY_LIST_STREAM stream, +// which references the snapshots of all of the memory regions contained +// within the minidump. For a normal minidump, this includes stack memory +// (also referenced by each MinidumpThread, in fact, the MDMemoryDescriptors +// here and in MDRawThread both point to exactly the same data in a +// minidump file, conserving space), as well as a 256-byte snapshot of memory +// surrounding the instruction pointer in the case of an exception. Other +// types of minidumps may contain significantly more memory regions. Full- +// memory minidumps contain all of a process' mapped memory. +class MinidumpMemoryList : public MinidumpStream { + public: + virtual ~MinidumpMemoryList(); + + static void set_max_regions(uint32_t max_regions) { + max_regions_ = max_regions; + } + static uint32_t max_regions() { return max_regions_; } + + unsigned int region_count() const { return valid_ ? region_count_ : 0; } + + // Sequential access to memory regions. + MinidumpMemoryRegion* GetMemoryRegionAtIndex(unsigned int index); + + // Random access to memory regions. Returns the region encompassing + // the address identified by address. + virtual MinidumpMemoryRegion* GetMemoryRegionForAddress(uint64_t address); + + // Print a human-readable representation of the object to stdout. + void Print(); + + private: + friend class Minidump; + friend class MockMinidumpMemoryList; + + typedef vector MemoryDescriptors; + typedef vector MemoryRegions; + + static const uint32_t kStreamType = MD_MEMORY_LIST_STREAM; + + explicit MinidumpMemoryList(Minidump* minidump); + + bool Read(uint32_t expected_size); + + // The largest number of memory regions that will be read from a minidump. + // The default is 256. + static uint32_t max_regions_; + + // Access to memory regions using addresses as the key. + RangeMap *range_map_; + + // The list of descriptors. This is maintained separately from the list + // of regions, because MemoryRegion doesn't own its MemoryDescriptor, it + // maintains a pointer to it. descriptors_ provides the storage for this + // purpose. + MemoryDescriptors *descriptors_; + + // The list of regions. + MemoryRegions *regions_; + uint32_t region_count_; +}; + + +// MinidumpException wraps MDRawExceptionStream, which contains information +// about the exception that caused the minidump to be generated, if the +// minidump was generated in an exception handler called as a result of an +// exception. It also provides access to a MinidumpContext object, which +// contains the CPU context for the exception thread at the time the exception +// occurred. +class MinidumpException : public MinidumpStream { + public: + virtual ~MinidumpException(); + + const MDRawExceptionStream* exception() const { + return valid_ ? &exception_ : NULL; + } + + // The thread ID is used to determine if a thread is the exception thread, + // so a special getter is provided to retrieve this data from the + // MDRawExceptionStream structure. Returns false if the thread ID cannot + // be determined. + bool GetThreadID(uint32_t *thread_id) const; + + MinidumpContext* GetContext(); + + // Print a human-readable representation of the object to stdout. + void Print(); + + private: + friend class Minidump; + + static const uint32_t kStreamType = MD_EXCEPTION_STREAM; + + explicit MinidumpException(Minidump* minidump); + + bool Read(uint32_t expected_size); + + MDRawExceptionStream exception_; + MinidumpContext* context_; +}; + +// MinidumpAssertion wraps MDRawAssertionInfo, which contains information +// about an assertion that caused the minidump to be generated. +class MinidumpAssertion : public MinidumpStream { + public: + virtual ~MinidumpAssertion(); + + const MDRawAssertionInfo* assertion() const { + return valid_ ? &assertion_ : NULL; + } + + string expression() const { + return valid_ ? expression_ : ""; + } + + string function() const { + return valid_ ? function_ : ""; + } + + string file() const { + return valid_ ? file_ : ""; + } + + // Print a human-readable representation of the object to stdout. + void Print(); + + private: + friend class Minidump; + + static const uint32_t kStreamType = MD_ASSERTION_INFO_STREAM; + + explicit MinidumpAssertion(Minidump* minidump); + + bool Read(uint32_t expected_size); + + MDRawAssertionInfo assertion_; + string expression_; + string function_; + string file_; +}; + + +// MinidumpSystemInfo wraps MDRawSystemInfo and provides information about +// the system on which the minidump was generated. See also MinidumpMiscInfo. +class MinidumpSystemInfo : public MinidumpStream { + public: + virtual ~MinidumpSystemInfo(); + + const MDRawSystemInfo* system_info() const { + return valid_ ? &system_info_ : NULL; + } + + // GetOS and GetCPU return textual representations of the operating system + // and CPU that produced the minidump. Unlike most other Minidump* methods, + // they return string objects, not weak pointers. Defined values for + // GetOS() are "mac", "windows", and "linux". Defined values for GetCPU + // are "x86" and "ppc". These methods return an empty string when their + // values are unknown. + string GetOS(); + string GetCPU(); + + // I don't know what CSD stands for, but this field is documented as + // returning a textual representation of the OS service pack. On other + // platforms, this provides additional information about an OS version + // level beyond major.minor.micro. Returns NULL if unknown. + const string* GetCSDVersion(); + + // If a CPU vendor string can be determined, returns a pointer to it, + // otherwise, returns NULL. CPU vendor strings can be determined from + // x86 CPUs with CPUID 0. + const string* GetCPUVendor(); + + // Print a human-readable representation of the object to stdout. + void Print(); + + protected: + explicit MinidumpSystemInfo(Minidump* minidump); + MDRawSystemInfo system_info_; + + // Textual representation of the OS service pack, for minidumps produced + // by MiniDumpWriteDump on Windows. + const string* csd_version_; + + private: + friend class Minidump; + + static const uint32_t kStreamType = MD_SYSTEM_INFO_STREAM; + + bool Read(uint32_t expected_size); + + // A string identifying the CPU vendor, if known. + const string* cpu_vendor_; +}; + + +// MinidumpMiscInfo wraps MDRawMiscInfo and provides information about +// the process that generated the minidump, and optionally additional system +// information. See also MinidumpSystemInfo. +class MinidumpMiscInfo : public MinidumpStream { + public: + const MDRawMiscInfo* misc_info() const { + return valid_ ? &misc_info_ : NULL; + } + + // Print a human-readable representation of the object to stdout. + void Print(); + + private: + friend class Minidump; + friend class TestMinidumpMiscInfo; + + static const uint32_t kStreamType = MD_MISC_INFO_STREAM; + + explicit MinidumpMiscInfo(Minidump* minidump_); + + bool Read(uint32_t expected_size_); + + MDRawMiscInfo misc_info_; + + // Populated by Read. Contains the converted strings from the corresponding + // UTF-16 fields in misc_info_ + string standard_name_; + string daylight_name_; + string build_string_; + string dbg_bld_str_; +}; + + +// MinidumpBreakpadInfo wraps MDRawBreakpadInfo, which is an optional stream in +// a minidump that provides additional information about the process state +// at the time the minidump was generated. +class MinidumpBreakpadInfo : public MinidumpStream { + public: + const MDRawBreakpadInfo* breakpad_info() const { + return valid_ ? &breakpad_info_ : NULL; + } + + // These thread IDs are used to determine if threads deserve special + // treatment, so special getters are provided to retrieve this data from + // the MDRawBreakpadInfo structure. The getters return false if the thread + // IDs cannot be determined. + bool GetDumpThreadID(uint32_t *thread_id) const; + bool GetRequestingThreadID(uint32_t *thread_id) const; + + // Print a human-readable representation of the object to stdout. + void Print(); + + private: + friend class Minidump; + + static const uint32_t kStreamType = MD_BREAKPAD_INFO_STREAM; + + explicit MinidumpBreakpadInfo(Minidump* minidump_); + + bool Read(uint32_t expected_size_); + + MDRawBreakpadInfo breakpad_info_; +}; + +// MinidumpMemoryInfo wraps MDRawMemoryInfo, which provides information +// about mapped memory regions in a process, including their ranges +// and protection. +class MinidumpMemoryInfo : public MinidumpObject { + public: + const MDRawMemoryInfo* info() const { return valid_ ? &memory_info_ : NULL; } + + // The address of the base of the memory region. + uint64_t GetBase() const { return valid_ ? memory_info_.base_address : 0; } + + // The size, in bytes, of the memory region. + uint64_t GetSize() const { return valid_ ? memory_info_.region_size : 0; } + + // Return true if the memory protection allows execution. + bool IsExecutable() const; + + // Return true if the memory protection allows writing. + bool IsWritable() const; + + // Print a human-readable representation of the object to stdout. + void Print(); + + private: + // These objects are managed by MinidumpMemoryInfoList. + friend class MinidumpMemoryInfoList; + + explicit MinidumpMemoryInfo(Minidump* minidump); + + // This works like MinidumpStream::Read, but is driven by + // MinidumpMemoryInfoList. No size checking is done, because + // MinidumpMemoryInfoList handles that directly. + bool Read(); + + MDRawMemoryInfo memory_info_; +}; + +// MinidumpMemoryInfoList contains a list of information about +// mapped memory regions for a process in the form of MDRawMemoryInfo. +// It maintains a map of these structures so that it may easily provide +// info corresponding to a specific address. +class MinidumpMemoryInfoList : public MinidumpStream { + public: + virtual ~MinidumpMemoryInfoList(); + + unsigned int info_count() const { return valid_ ? info_count_ : 0; } + + const MinidumpMemoryInfo* GetMemoryInfoForAddress(uint64_t address) const; + const MinidumpMemoryInfo* GetMemoryInfoAtIndex(unsigned int index) const; + + // Print a human-readable representation of the object to stdout. + void Print(); + + private: + friend class Minidump; + + typedef vector MinidumpMemoryInfos; + + static const uint32_t kStreamType = MD_MEMORY_INFO_LIST_STREAM; + + explicit MinidumpMemoryInfoList(Minidump* minidump); + + bool Read(uint32_t expected_size); + + // Access to memory info using addresses as the key. + RangeMap *range_map_; + + MinidumpMemoryInfos* infos_; + uint32_t info_count_; +}; + + +// Minidump is the user's interface to a minidump file. It wraps MDRawHeader +// and provides access to the minidump's top-level stream directory. +class Minidump { + public: + // path is the pathname of a file containing the minidump. + explicit Minidump(const string& path); + // input is an istream wrapping minidump data. Minidump holds a + // weak pointer to input, and the caller must ensure that the stream + // is valid as long as the Minidump object is. + explicit Minidump(std::istream& input); + + virtual ~Minidump(); + + // path may be empty if the minidump was not opened from a file + virtual string path() const { + return path_; + } + static void set_max_streams(uint32_t max_streams) { + max_streams_ = max_streams; + } + static uint32_t max_streams() { return max_streams_; } + + static void set_max_string_length(uint32_t max_string_length) { + max_string_length_ = max_string_length; + } + static uint32_t max_string_length() { return max_string_length_; } + + virtual const MDRawHeader* header() const { return valid_ ? &header_ : NULL; } + + // Reads the CPU information from the system info stream and generates the + // appropriate CPU flags. The returned context_cpu_flags are the same as + // if the CPU type bits were set in the context_flags of a context record. + // On success, context_cpu_flags will have the flags that identify the CPU. + // If a system info stream is missing, context_cpu_flags will be 0. + // Returns true if the current position in the stream was not changed. + // Returns false when the current location in the stream was changed and the + // attempt to restore the original position failed. + bool GetContextCPUFlagsFromSystemInfo(uint32_t* context_cpu_flags); + + // Reads the minidump file's header and top-level stream directory. + // The minidump is expected to be positioned at the beginning of the + // header. Read() sets up the stream list and map, and validates the + // Minidump object. + virtual bool Read(); + + // The next set of methods are stubs that call GetStream. They exist to + // force code generation of the templatized API within the module, and + // to avoid exposing an ugly API (GetStream needs to accept a garbage + // parameter). + virtual MinidumpThreadList* GetThreadList(); + virtual MinidumpModuleList* GetModuleList(); + virtual MinidumpMemoryList* GetMemoryList(); + virtual MinidumpException* GetException(); + virtual MinidumpAssertion* GetAssertion(); + virtual MinidumpSystemInfo* GetSystemInfo(); + virtual MinidumpMiscInfo* GetMiscInfo(); + virtual MinidumpBreakpadInfo* GetBreakpadInfo(); + virtual MinidumpMemoryInfoList* GetMemoryInfoList(); + + // The next set of methods are provided for users who wish to access + // data in minidump files directly, while leveraging the rest of + // this class and related classes to handle the basic minidump + // structure and known stream types. + + unsigned int GetDirectoryEntryCount() const { + return valid_ ? header_.stream_count : 0; + } + const MDRawDirectory* GetDirectoryEntryAtIndex(unsigned int index) const; + + // The next 2 methods are lower-level I/O routines. They use fd_. + + // Reads count bytes from the minidump at the current position into + // the storage area pointed to by bytes. bytes must be of sufficient + // size. After the read, the file position is advanced by count. + bool ReadBytes(void* bytes, size_t count); + + // Sets the position of the minidump file to offset. + bool SeekSet(off_t offset); + + // Returns the current position of the minidump file. + off_t Tell(); + + // The next 2 methods are medium-level I/O routines. + + // ReadString returns a string which is owned by the caller! offset + // specifies the offset that a length-encoded string is stored at in the + // minidump file. + string* ReadString(off_t offset); + + // SeekToStreamType positions the file at the beginning of a stream + // identified by stream_type, and informs the caller of the stream's + // length by setting *stream_length. Because stream_map maps each stream + // type to only one stream in the file, this might mislead the user into + // thinking that the stream that this seeks to is the only stream with + // type stream_type. That can't happen for streams that these classes + // deal with directly, because they're only supposed to be present in the + // file singly, and that's verified when stream_map_ is built. Users who + // are looking for other stream types should be aware of this + // possibility, and consider using GetDirectoryEntryAtIndex (possibly + // with GetDirectoryEntryCount) if expecting multiple streams of the same + // type in a single minidump file. + bool SeekToStreamType(uint32_t stream_type, uint32_t* stream_length); + + bool swap() const { return valid_ ? swap_ : false; } + + // Print a human-readable representation of the object to stdout. + void Print(); + + private: + // MinidumpStreamInfo is used in the MinidumpStreamMap. It lets + // the Minidump object locate interesting streams quickly, and + // provides a convenient place to stash MinidumpStream objects. + struct MinidumpStreamInfo { + MinidumpStreamInfo() : stream_index(0), stream(NULL) {} + ~MinidumpStreamInfo() { delete stream; } + + // Index into the MinidumpDirectoryEntries vector + unsigned int stream_index; + + // Pointer to the stream if cached, or NULL if not yet populated + MinidumpStream* stream; + }; + + typedef vector MinidumpDirectoryEntries; + typedef map MinidumpStreamMap; + + template T* GetStream(T** stream); + + // Opens the minidump file, or if already open, seeks to the beginning. + bool Open(); + + // The largest number of top-level streams that will be read from a minidump. + // Note that streams are only read (and only consume memory) as needed, + // when directed by the caller. The default is 128. + static uint32_t max_streams_; + + // The maximum length of a UTF-16 string that will be read from a minidump + // in 16-bit words. The default is 1024. UTF-16 strings are converted + // to UTF-8 when stored in memory, and each UTF-16 word will be represented + // by as many as 3 bytes in UTF-8. + static unsigned int max_string_length_; + + MDRawHeader header_; + + // The list of streams. + MinidumpDirectoryEntries* directory_; + + // Access to streams using the stream type as the key. + MinidumpStreamMap* stream_map_; + + // The pathname of the minidump file to process, set in the constructor. + // This may be empty if the minidump was opened directly from a stream. + const string path_; + + // The stream for all file I/O. Used by ReadBytes and SeekSet. + // Set based on the path in Open, or directly in the constructor. + std::istream* stream_; + + // swap_ is true if the minidump file should be byte-swapped. If the + // minidump was produced by a CPU that is other-endian than the CPU + // processing the minidump, this will be true. If the two CPUs are + // same-endian, this will be false. + bool swap_; + + // Validity of the Minidump structure, false immediately after + // construction or after a failed Read(); true following a successful + // Read(). + bool valid_; +}; + + +} // namespace google_breakpad + + +#endif // GOOGLE_BREAKPAD_PROCESSOR_MINIDUMP_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/minidump_processor.h b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/minidump_processor.h new file mode 100644 index 0000000000..d2c94e2b26 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/minidump_processor.h @@ -0,0 +1,141 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef GOOGLE_BREAKPAD_PROCESSOR_MINIDUMP_PROCESSOR_H__ +#define GOOGLE_BREAKPAD_PROCESSOR_MINIDUMP_PROCESSOR_H__ + +#include +#include + +#include "common/using_std_string.h" +#include "google_breakpad/common/breakpad_types.h" +#include "google_breakpad/processor/process_result.h" + +namespace google_breakpad { + +class Minidump; +class ProcessState; +class StackFrameSymbolizer; +class SourceLineResolverInterface; +class SymbolSupplier; +struct SystemInfo; + +class MinidumpProcessor { + public: + // Initializes this MinidumpProcessor. supplier should be an + // implementation of the SymbolSupplier abstract base class. + MinidumpProcessor(SymbolSupplier* supplier, + SourceLineResolverInterface* resolver); + + // Initializes the MinidumpProcessor with the option of + // enabling the exploitability framework to analyze dumps + // for probable security relevance. + MinidumpProcessor(SymbolSupplier* supplier, + SourceLineResolverInterface* resolver, + bool enable_exploitability); + + // Initializes the MinidumpProcessor with source line resolver helper, and + // the option of enabling the exploitability framework to analyze dumps + // for probable security relevance. + // Does not take ownership of resolver_helper, which must NOT be NULL. + MinidumpProcessor(StackFrameSymbolizer* stack_frame_symbolizer, + bool enable_exploitability); + + ~MinidumpProcessor(); + + // Processes the minidump file and fills process_state with the result. + ProcessResult Process(const string &minidump_file, + ProcessState* process_state); + + // Processes the minidump structure and fills process_state with the + // result. + ProcessResult Process(Minidump* minidump, + ProcessState* process_state); + // Populates the cpu_* fields of the |info| parameter with textual + // representations of the CPU type that the minidump in |dump| was + // produced on. Returns false if this information is not available in + // the minidump. + static bool GetCPUInfo(Minidump* dump, SystemInfo* info); + + // Populates the os_* fields of the |info| parameter with textual + // representations of the operating system that the minidump in |dump| + // was produced on. Returns false if this information is not available in + // the minidump. + static bool GetOSInfo(Minidump* dump, SystemInfo* info); + + // Populates the |process_create_time| parameter with the create time of the + // crashed process. Returns false if this information is not available in + // the minidump |dump|. + static bool GetProcessCreateTime(Minidump* dump, + uint32_t* process_create_time); + + // Returns a textual representation of the reason that a crash occurred, + // if the minidump in dump was produced as a result of a crash. Returns + // an empty string if this information cannot be determined. If address + // is non-NULL, it will be set to contain the address that caused the + // exception, if this information is available. This will be a code + // address when the crash was caused by problems such as illegal + // instructions or divisions by zero, or a data address when the crash + // was caused by a memory access violation. + static string GetCrashReason(Minidump* dump, uint64_t* address); + + // This function returns true if the passed-in error code is + // something unrecoverable(i.e. retry should not happen). For + // instance, if the minidump is corrupt, then it makes no sense to + // retry as we won't be able to glean additional information. + // However, as an example of the other case, the symbol supplier can + // return an error code indicating it was 'interrupted', which can + // happen of the symbols are fetched from a remote store, and a + // retry might be successful later on. + // You should not call this method with PROCESS_OK! Test for + // that separately before calling this. + static bool IsErrorUnrecoverable(ProcessResult p) { + assert(p != PROCESS_OK); + return (p != PROCESS_SYMBOL_SUPPLIER_INTERRUPTED); + } + + // Returns a textual representation of an assertion included + // in the minidump. Returns an empty string if this information + // does not exist or cannot be determined. + static string GetAssertion(Minidump* dump); + + private: + StackFrameSymbolizer* frame_symbolizer_; + // Indicate whether resolver_helper_ is owned by this instance. + bool own_frame_symbolizer_; + + // This flag enables the exploitability scanner which attempts to + // guess how likely it is that the crash represents an exploitable + // memory corruption issue. + bool enable_exploitability_; +}; + +} // namespace google_breakpad + +#endif // GOOGLE_BREAKPAD_PROCESSOR_MINIDUMP_PROCESSOR_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/process_result.h b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/process_result.h new file mode 100644 index 0000000000..15c7213e9b --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/process_result.h @@ -0,0 +1,66 @@ +// Copyright (c) 2014, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef GOOGLE_BREAKPAD_PROCESSOR_PROCESS_RESULT_H__ +#define GOOGLE_BREAKPAD_PROCESSOR_PROCESS_RESULT_H__ + +namespace google_breakpad { + +// Return type for MinidumpProcessor or MicrodumpProcessor's Process() +enum ProcessResult { + PROCESS_OK, // The dump was processed + // successfully. + + PROCESS_ERROR_MINIDUMP_NOT_FOUND, // The minidump file was not + // found. + + PROCESS_ERROR_NO_MINIDUMP_HEADER, // The minidump file had no + // header. + + PROCESS_ERROR_NO_THREAD_LIST, // The minidump file has no + // thread list. + + PROCESS_ERROR_GETTING_THREAD, // There was an error getting one + // thread's data from th dump. + + PROCESS_ERROR_GETTING_THREAD_ID, // There was an error getting a + // thread id from the thread's + // data. + + PROCESS_ERROR_DUPLICATE_REQUESTING_THREADS, // There was more than one + // requesting thread. + + PROCESS_SYMBOL_SUPPLIER_INTERRUPTED // The dump processing was + // interrupted by the + // SymbolSupplier(not fatal). +}; + +} // namespace google_breakpad + +#endif // GOOGLE_BREAKPAD_PROCESSOR_PROCESS_RESULT_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/process_state.h b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/process_state.h new file mode 100644 index 0000000000..ac3b603942 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/process_state.h @@ -0,0 +1,189 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// process_state.h: A snapshot of a process, in a fully-digested state. +// +// Author: Mark Mentovai + +#ifndef GOOGLE_BREAKPAD_PROCESSOR_PROCESS_STATE_H__ +#define GOOGLE_BREAKPAD_PROCESSOR_PROCESS_STATE_H__ + +#include +#include + +#include "common/using_std_string.h" +#include "google_breakpad/common/breakpad_types.h" +#include "google_breakpad/processor/system_info.h" +#include "google_breakpad/processor/minidump.h" + +namespace google_breakpad { + +using std::vector; + +class CallStack; +class CodeModules; + +enum ExploitabilityRating { + EXPLOITABILITY_HIGH, // The crash likely represents + // a exploitable memory corruption + // vulnerability. + + EXPLOITABILITY_MEDIUM, // The crash appears to corrupt + // memory in a way which may be + // exploitable in some situations. + + EXPLOITABLITY_MEDIUM = EXPLOITABILITY_MEDIUM, // an old misspelling + + EXPLOITABILITY_LOW, // The crash either does not corrupt + // memory directly or control over + // the affected data is limited. The + // issue may still be exploitable + // on certain platforms or situations. + + EXPLOITABILITY_INTERESTING, // The crash does not appear to be + // directly exploitable. However it + // represents a condition which should + // be further analyzed. + + EXPLOITABILITY_NONE, // The crash does not appear to represent + // an exploitable condition. + + EXPLOITABILITY_NOT_ANALYZED, // The crash was not analyzed for + // exploitability because the engine + // was disabled. + + EXPLOITABILITY_ERR_NOENGINE, // The supplied minidump's platform does + // not have a exploitability engine + // associated with it. + + EXPLOITABILITY_ERR_PROCESSING // An error occured within the + // exploitability engine and no rating + // was calculated. +}; + +class ProcessState { + public: + ProcessState() : modules_(NULL) { Clear(); } + ~ProcessState(); + + // Resets the ProcessState to its default values + void Clear(); + + // Accessors. See the data declarations below. + uint32_t time_date_stamp() const { return time_date_stamp_; } + uint32_t process_create_time() const { return process_create_time_; } + bool crashed() const { return crashed_; } + string crash_reason() const { return crash_reason_; } + uint64_t crash_address() const { return crash_address_; } + string assertion() const { return assertion_; } + int requesting_thread() const { return requesting_thread_; } + const vector* threads() const { return &threads_; } + const vector* thread_memory_regions() const { + return &thread_memory_regions_; + } + const SystemInfo* system_info() const { return &system_info_; } + const CodeModules* modules() const { return modules_; } + const vector* modules_without_symbols() const { + return &modules_without_symbols_; + } + const vector* modules_with_corrupt_symbols() const { + return &modules_with_corrupt_symbols_; + } + ExploitabilityRating exploitability() const { return exploitability_; } + + private: + // MinidumpProcessor and MicrodumpProcessor are responsible for building + // ProcessState objects. + friend class MinidumpProcessor; + friend class MicrodumpProcessor; + + // The time-date stamp of the minidump (time_t format) + uint32_t time_date_stamp_; + + // The time-date stamp when the process was created (time_t format) + uint32_t process_create_time_; + + // True if the process crashed, false if the dump was produced outside + // of an exception handler. + bool crashed_; + + // If the process crashed, the type of crash. OS- and possibly CPU- + // specific. For example, "EXCEPTION_ACCESS_VIOLATION" (Windows), + // "EXC_BAD_ACCESS / KERN_INVALID_ADDRESS" (Mac OS X), "SIGSEGV" + // (other Unix). + string crash_reason_; + + // If the process crashed, and if crash_reason implicates memory, + // the memory address that caused the crash. For data access errors, + // this will be the data address that caused the fault. For code errors, + // this will be the address of the instruction that caused the fault. + uint64_t crash_address_; + + // If there was an assertion that was hit, a textual representation + // of that assertion, possibly including the file and line at which + // it occurred. + string assertion_; + + // The index of the thread that requested a dump be written in the + // threads vector. If a dump was produced as a result of a crash, this + // will point to the thread that crashed. If the dump was produced as + // by user code without crashing, and the dump contains extended Breakpad + // information, this will point to the thread that requested the dump. + // If the dump was not produced as a result of an exception and no + // extended Breakpad information is present, this field will be set to -1, + // indicating that the dump thread is not available. + int requesting_thread_; + + // Stacks for each thread (except possibly the exception handler + // thread) at the time of the crash. + vector threads_; + vector thread_memory_regions_; + + // OS and CPU information. + SystemInfo system_info_; + + // The modules that were loaded into the process represented by the + // ProcessState. + const CodeModules *modules_; + + // The modules that didn't have symbols when the report was processed. + vector modules_without_symbols_; + + // The modules that had corrupt symbols when the report was processed. + vector modules_with_corrupt_symbols_; + + // The exploitability rating as determined by the exploitability + // engine. When the exploitability engine is not enabled this + // defaults to EXPLOITABILITY_NONE. + ExploitabilityRating exploitability_; +}; + +} // namespace google_breakpad + +#endif // GOOGLE_BREAKPAD_PROCESSOR_PROCESS_STATE_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/source_line_resolver_base.h b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/source_line_resolver_base.h new file mode 100644 index 0000000000..c720b0c325 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/source_line_resolver_base.h @@ -0,0 +1,128 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// source_line_resolver_base.h: SourceLineResolverBase, an (incomplete) +// implementation of SourceLineResolverInterface. It serves as a common base +// class for concrete implementations: FastSourceLineResolver and +// BasicSourceLineResolver. It is designed for refactoring that removes +// code redundancy in the two concrete source line resolver classes. +// +// See "google_breakpad/processor/source_line_resolver_interface.h" for more +// documentation. + +// Author: Siyang Xie (lambxsy@google.com) + +#ifndef GOOGLE_BREAKPAD_PROCESSOR_SOURCE_LINE_RESOLVER_BASE_H__ +#define GOOGLE_BREAKPAD_PROCESSOR_SOURCE_LINE_RESOLVER_BASE_H__ + +#include +#include +#include + +#include "google_breakpad/processor/source_line_resolver_interface.h" + +namespace google_breakpad { + +using std::map; +using std::set; + +// Forward declaration. +// ModuleFactory is a simple factory interface for creating a Module instance +// at run-time. +class ModuleFactory; + +class SourceLineResolverBase : public SourceLineResolverInterface { + public: + // Read the symbol_data from a file with given file_name. + // The part of code was originally in BasicSourceLineResolver::Module's + // LoadMap() method. + // Place dynamically allocated heap buffer in symbol_data. Caller has the + // ownership of the buffer, and should call delete [] to free the buffer. + static bool ReadSymbolFile(const string &file_name, + char **symbol_data, + size_t *symbol_data_size); + + protected: + // Users are not allowed create SourceLineResolverBase instance directly. + SourceLineResolverBase(ModuleFactory *module_factory); + virtual ~SourceLineResolverBase(); + + // Virtual methods inherited from SourceLineResolverInterface. + virtual bool LoadModule(const CodeModule *module, const string &map_file); + virtual bool LoadModuleUsingMapBuffer(const CodeModule *module, + const string &map_buffer); + virtual bool LoadModuleUsingMemoryBuffer(const CodeModule *module, + char *memory_buffer, + size_t memory_buffer_size); + virtual bool ShouldDeleteMemoryBufferAfterLoadModule(); + virtual void UnloadModule(const CodeModule *module); + virtual bool HasModule(const CodeModule *module); + virtual bool IsModuleCorrupt(const CodeModule *module); + virtual void FillSourceLineInfo(StackFrame *frame); + virtual WindowsFrameInfo *FindWindowsFrameInfo(const StackFrame *frame); + virtual CFIFrameInfo *FindCFIFrameInfo(const StackFrame *frame); + + // Nested structs and classes. + struct Line; + struct Function; + struct PublicSymbol; + struct CompareString { + bool operator()(const string &s1, const string &s2) const; + }; + // Module is an interface for an in-memory symbol file. + class Module; + class AutoFileCloser; + + // All of the modules that are loaded. + typedef map ModuleMap; + ModuleMap *modules_; + + // The loaded modules that were detecting to be corrupt during load. + typedef set ModuleSet; + ModuleSet *corrupt_modules_; + + // All of heap-allocated buffers that are owned locally by resolver. + typedef std::map MemoryMap; + MemoryMap *memory_buffers_; + + // Creates a concrete module at run-time. + ModuleFactory *module_factory_; + + private: + // ModuleFactory needs to have access to protected type Module. + friend class ModuleFactory; + + // Disallow unwanted copy ctor and assignment operator + SourceLineResolverBase(const SourceLineResolverBase&); + void operator=(const SourceLineResolverBase&); +}; + +} // namespace google_breakpad + +#endif // GOOGLE_BREAKPAD_PROCESSOR_SOURCE_LINE_RESOLVER_BASE_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/source_line_resolver_interface.h b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/source_line_resolver_interface.h new file mode 100644 index 0000000000..a694bf2ea1 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/source_line_resolver_interface.h @@ -0,0 +1,117 @@ +// -*- mode: C++ -*- + +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Abstract interface to return function/file/line info for a memory address. + +#ifndef GOOGLE_BREAKPAD_PROCESSOR_SOURCE_LINE_RESOLVER_INTERFACE_H__ +#define GOOGLE_BREAKPAD_PROCESSOR_SOURCE_LINE_RESOLVER_INTERFACE_H__ + +#include + +#include "common/using_std_string.h" +#include "google_breakpad/common/breakpad_types.h" +#include "google_breakpad/processor/code_module.h" + +namespace google_breakpad { + +struct StackFrame; +struct WindowsFrameInfo; +class CFIFrameInfo; + +class SourceLineResolverInterface { + public: + typedef uint64_t MemAddr; + + virtual ~SourceLineResolverInterface() {} + + // Adds a module to this resolver, returning true on success. + // + // module should have at least the code_file, debug_file, + // and debug_identifier members populated. + // + // map_file should contain line/address mappings for this module. + virtual bool LoadModule(const CodeModule *module, + const string &map_file) = 0; + // Same as above, but takes the contents of a pre-read map buffer + virtual bool LoadModuleUsingMapBuffer(const CodeModule *module, + const string &map_buffer) = 0; + + // Add an interface to load symbol using C-String data instead of string. + // This is useful in the optimization design for avoiding unnecessary copying + // of symbol data, in order to improve memory efficiency. + // LoadModuleUsingMemoryBuffer() does NOT take ownership of memory_buffer. + // LoadModuleUsingMemoryBuffer() null terminates the passed in buffer, if + // the last character is not a null terminator. + virtual bool LoadModuleUsingMemoryBuffer(const CodeModule *module, + char *memory_buffer, + size_t memory_buffer_size) = 0; + + // Return true if the memory buffer should be deleted immediately after + // LoadModuleUsingMemoryBuffer(). Return false if the memory buffer has to be + // alive during the lifetime of the corresponding Module. + virtual bool ShouldDeleteMemoryBufferAfterLoadModule() = 0; + + // Request that the specified module be unloaded from this resolver. + // A resolver may choose to ignore such a request. + virtual void UnloadModule(const CodeModule *module) = 0; + + // Returns true if the module has been loaded. + virtual bool HasModule(const CodeModule *module) = 0; + + // Returns true if the module has been loaded and it is corrupt. + virtual bool IsModuleCorrupt(const CodeModule *module) = 0; + + // Fills in the function_base, function_name, source_file_name, + // and source_line fields of the StackFrame. The instruction and + // module_name fields must already be filled in. + virtual void FillSourceLineInfo(StackFrame *frame) = 0; + + // If Windows stack walking information is available covering + // FRAME's instruction address, return a WindowsFrameInfo structure + // describing it. If the information is not available, returns NULL. + // A NULL return value does not indicate an error. The caller takes + // ownership of any returned WindowsFrameInfo object. + virtual WindowsFrameInfo *FindWindowsFrameInfo(const StackFrame *frame) = 0; + + // If CFI stack walking information is available covering ADDRESS, + // return a CFIFrameInfo structure describing it. If the information + // is not available, return NULL. The caller takes ownership of any + // returned CFIFrameInfo object. + virtual CFIFrameInfo *FindCFIFrameInfo(const StackFrame *frame) = 0; + + protected: + // SourceLineResolverInterface cannot be instantiated except by subclasses + SourceLineResolverInterface() {} +}; + +} // namespace google_breakpad + +#endif // GOOGLE_BREAKPAD_PROCESSOR_SOURCE_LINE_RESOLVER_INTERFACE_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/stack_frame.h b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/stack_frame.h new file mode 100644 index 0000000000..b55eb9c756 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/stack_frame.h @@ -0,0 +1,144 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef GOOGLE_BREAKPAD_PROCESSOR_STACK_FRAME_H__ +#define GOOGLE_BREAKPAD_PROCESSOR_STACK_FRAME_H__ + +#include + +#include "common/using_std_string.h" +#include "google_breakpad/common/breakpad_types.h" + +namespace google_breakpad { + +class CodeModule; + +struct StackFrame { + // Indicates how well the instruction pointer derived during + // stack walking is trusted. Since the stack walker can resort to + // stack scanning, it can wind up with dubious frames. + // In rough order of "trust metric". + enum FrameTrust { + FRAME_TRUST_NONE, // Unknown + FRAME_TRUST_SCAN, // Scanned the stack, found this + FRAME_TRUST_CFI_SCAN, // Found while scanning stack using call frame info + FRAME_TRUST_FP, // Derived from frame pointer + FRAME_TRUST_CFI, // Derived from call frame info + FRAME_TRUST_PREWALKED, // Explicitly provided by some external stack walker. + FRAME_TRUST_CONTEXT // Given as instruction pointer in a context + }; + + StackFrame() + : instruction(), + module(NULL), + function_name(), + function_base(), + source_file_name(), + source_line(), + source_line_base(), + trust(FRAME_TRUST_NONE) {} + virtual ~StackFrame() {} + + // Return a string describing how this stack frame was found + // by the stackwalker. + string trust_description() const { + switch (trust) { + case StackFrame::FRAME_TRUST_CONTEXT: + return "given as instruction pointer in context"; + case StackFrame::FRAME_TRUST_PREWALKED: + return "recovered by external stack walker"; + case StackFrame::FRAME_TRUST_CFI: + return "call frame info"; + case StackFrame::FRAME_TRUST_CFI_SCAN: + return "call frame info with scanning"; + case StackFrame::FRAME_TRUST_FP: + return "previous frame's frame pointer"; + case StackFrame::FRAME_TRUST_SCAN: + return "stack scanning"; + default: + return "unknown"; + } + }; + + // Return the actual return address, as saved on the stack or in a + // register. See the comments for 'instruction', below, for details. + virtual uint64_t ReturnAddress() const { return instruction; } + + // The program counter location as an absolute virtual address. + // + // - For the innermost called frame in a stack, this will be an exact + // program counter or instruction pointer value. + // + // - For all other frames, this address is within the instruction that + // caused execution to branch to this frame's callee (although it may + // not point to the exact beginning of that instruction). This ensures + // that, when we look up the source code location for this frame, we + // get the source location of the call, not of the point at which + // control will resume when the call returns, which may be on the next + // line. (If the compiler knows the callee never returns, it may even + // place the call instruction at the very end of the caller's machine + // code, such that the "return address" (which will never be used) + // immediately after the call instruction is in an entirely different + // function, perhaps even from a different source file.) + // + // On some architectures, the return address as saved on the stack or in + // a register is fine for looking up the point of the call. On others, it + // requires adjustment. ReturnAddress returns the address as saved by the + // machine. + uint64_t instruction; + + // The module in which the instruction resides. + const CodeModule *module; + + // The function name, may be omitted if debug symbols are not available. + string function_name; + + // The start address of the function, may be omitted if debug symbols + // are not available. + uint64_t function_base; + + // The source file name, may be omitted if debug symbols are not available. + string source_file_name; + + // The (1-based) source line number, may be omitted if debug symbols are + // not available. + int source_line; + + // The start address of the source line, may be omitted if debug symbols + // are not available. + uint64_t source_line_base; + + // Amount of trust the stack walker has in the instruction pointer + // of this frame. + FrameTrust trust; +}; + +} // namespace google_breakpad + +#endif // GOOGLE_BREAKPAD_PROCESSOR_STACK_FRAME_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/stack_frame_cpu.h b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/stack_frame_cpu.h new file mode 100644 index 0000000000..dc5d8ae673 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/stack_frame_cpu.h @@ -0,0 +1,405 @@ +// -*- mode: c++ -*- + +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// stack_frame_cpu.h: CPU-specific StackFrame extensions. +// +// These types extend the StackFrame structure to carry CPU-specific register +// state. They are defined in this header instead of stack_frame.h to +// avoid the need to include minidump_format.h when only the generic +// StackFrame type is needed. +// +// Author: Mark Mentovai + +#ifndef GOOGLE_BREAKPAD_PROCESSOR_STACK_FRAME_CPU_H__ +#define GOOGLE_BREAKPAD_PROCESSOR_STACK_FRAME_CPU_H__ + +#include "google_breakpad/common/minidump_format.h" +#include "google_breakpad/processor/stack_frame.h" + +namespace google_breakpad { + +struct WindowsFrameInfo; +class CFIFrameInfo; + +struct StackFrameX86 : public StackFrame { + // ContextValidity has one entry for each relevant hardware pointer + // register (%eip and %esp) and one entry for each general-purpose + // register. It's worthwhile having validity flags for caller-saves + // registers: they are valid in the youngest frame, and such a frame + // might save a callee-saves register in a caller-saves register, but + // SimpleCFIWalker won't touch registers unless they're marked as valid. + enum ContextValidity { + CONTEXT_VALID_NONE = 0, + CONTEXT_VALID_EIP = 1 << 0, + CONTEXT_VALID_ESP = 1 << 1, + CONTEXT_VALID_EBP = 1 << 2, + CONTEXT_VALID_EAX = 1 << 3, + CONTEXT_VALID_EBX = 1 << 4, + CONTEXT_VALID_ECX = 1 << 5, + CONTEXT_VALID_EDX = 1 << 6, + CONTEXT_VALID_ESI = 1 << 7, + CONTEXT_VALID_EDI = 1 << 8, + CONTEXT_VALID_ALL = -1 + }; + + StackFrameX86() + : context(), + context_validity(CONTEXT_VALID_NONE), + windows_frame_info(NULL), + cfi_frame_info(NULL) {} + ~StackFrameX86(); + + // Overriden to return the return address as saved on the stack. + virtual uint64_t ReturnAddress() const; + + // Register state. This is only fully valid for the topmost frame in a + // stack. In other frames, the values of nonvolatile registers may be + // present, given sufficient debugging information. Refer to + // context_validity. + MDRawContextX86 context; + + // context_validity is actually ContextValidity, but int is used because + // the OR operator doesn't work well with enumerated types. This indicates + // which fields in context are valid. + int context_validity; + + // Any stack walking information we found describing this.instruction. + // These may be NULL if there is no such information for that address. + WindowsFrameInfo *windows_frame_info; + CFIFrameInfo *cfi_frame_info; +}; + +struct StackFramePPC : public StackFrame { + // ContextValidity should eventually contain entries for the validity of + // other nonvolatile (callee-save) registers as in + // StackFrameX86::ContextValidity, but the ppc stackwalker doesn't currently + // locate registers other than the ones listed here. + enum ContextValidity { + CONTEXT_VALID_NONE = 0, + CONTEXT_VALID_SRR0 = 1 << 0, + CONTEXT_VALID_GPR1 = 1 << 1, + CONTEXT_VALID_ALL = -1 + }; + + StackFramePPC() : context(), context_validity(CONTEXT_VALID_NONE) {} + + // Register state. This is only fully valid for the topmost frame in a + // stack. In other frames, the values of nonvolatile registers may be + // present, given sufficient debugging information. Refer to + // context_validity. + MDRawContextPPC context; + + // context_validity is actually ContextValidity, but int is used because + // the OR operator doesn't work well with enumerated types. This indicates + // which fields in context are valid. + int context_validity; +}; + +struct StackFramePPC64 : public StackFrame { + // ContextValidity should eventually contain entries for the validity of + // other nonvolatile (callee-save) registers as in + // StackFrameX86::ContextValidity, but the ppc stackwalker doesn't currently + // locate registers other than the ones listed here. + enum ContextValidity { + CONTEXT_VALID_NONE = 0, + CONTEXT_VALID_SRR0 = 1 << 0, + CONTEXT_VALID_GPR1 = 1 << 1, + CONTEXT_VALID_ALL = -1 + }; + + StackFramePPC64() : context(), context_validity(CONTEXT_VALID_NONE) {} + + // Register state. This is only fully valid for the topmost frame in a + // stack. In other frames, the values of nonvolatile registers may be + // present, given sufficient debugging information. Refer to + // context_validity. + MDRawContextPPC64 context; + + // context_validity is actually ContextValidity, but int is used because + // the OR operator doesn't work well with enumerated types. This indicates + // which fields in context are valid. + int context_validity; +}; + +struct StackFrameAMD64 : public StackFrame { + // ContextValidity has one entry for each register that we might be able + // to recover. + enum ContextValidity { + CONTEXT_VALID_NONE = 0, + CONTEXT_VALID_RAX = 1 << 0, + CONTEXT_VALID_RDX = 1 << 1, + CONTEXT_VALID_RCX = 1 << 2, + CONTEXT_VALID_RBX = 1 << 3, + CONTEXT_VALID_RSI = 1 << 4, + CONTEXT_VALID_RDI = 1 << 5, + CONTEXT_VALID_RBP = 1 << 6, + CONTEXT_VALID_RSP = 1 << 7, + CONTEXT_VALID_R8 = 1 << 8, + CONTEXT_VALID_R9 = 1 << 9, + CONTEXT_VALID_R10 = 1 << 10, + CONTEXT_VALID_R11 = 1 << 11, + CONTEXT_VALID_R12 = 1 << 12, + CONTEXT_VALID_R13 = 1 << 13, + CONTEXT_VALID_R14 = 1 << 14, + CONTEXT_VALID_R15 = 1 << 15, + CONTEXT_VALID_RIP = 1 << 16, + CONTEXT_VALID_ALL = -1 + }; + + StackFrameAMD64() : context(), context_validity(CONTEXT_VALID_NONE) {} + + // Overriden to return the return address as saved on the stack. + virtual uint64_t ReturnAddress() const; + + // Register state. This is only fully valid for the topmost frame in a + // stack. In other frames, which registers are present depends on what + // debugging information we had available. Refer to context_validity. + MDRawContextAMD64 context; + + // For each register in context whose value has been recovered, we set + // the corresponding CONTEXT_VALID_ bit in context_validity. + // + // context_validity's type should actually be ContextValidity, but + // we use int instead because the bitwise inclusive or operator + // yields an int when applied to enum values, and C++ doesn't + // silently convert from ints to enums. + int context_validity; +}; + +struct StackFrameSPARC : public StackFrame { + // to be confirmed + enum ContextValidity { + CONTEXT_VALID_NONE = 0, + CONTEXT_VALID_PC = 1 << 0, + CONTEXT_VALID_SP = 1 << 1, + CONTEXT_VALID_FP = 1 << 2, + CONTEXT_VALID_ALL = -1 + }; + + StackFrameSPARC() : context(), context_validity(CONTEXT_VALID_NONE) {} + + // Register state. This is only fully valid for the topmost frame in a + // stack. In other frames, the values of nonvolatile registers may be + // present, given sufficient debugging information. Refer to + // context_validity. + MDRawContextSPARC context; + + // context_validity is actually ContextValidity, but int is used because + // the OR operator doesn't work well with enumerated types. This indicates + // which fields in context are valid. + int context_validity; +}; + +struct StackFrameARM : public StackFrame { + // A flag for each register we might know. + enum ContextValidity { + CONTEXT_VALID_NONE = 0, + CONTEXT_VALID_R0 = 1 << 0, + CONTEXT_VALID_R1 = 1 << 1, + CONTEXT_VALID_R2 = 1 << 2, + CONTEXT_VALID_R3 = 1 << 3, + CONTEXT_VALID_R4 = 1 << 4, + CONTEXT_VALID_R5 = 1 << 5, + CONTEXT_VALID_R6 = 1 << 6, + CONTEXT_VALID_R7 = 1 << 7, + CONTEXT_VALID_R8 = 1 << 8, + CONTEXT_VALID_R9 = 1 << 9, + CONTEXT_VALID_R10 = 1 << 10, + CONTEXT_VALID_R11 = 1 << 11, + CONTEXT_VALID_R12 = 1 << 12, + CONTEXT_VALID_R13 = 1 << 13, + CONTEXT_VALID_R14 = 1 << 14, + CONTEXT_VALID_R15 = 1 << 15, + CONTEXT_VALID_ALL = ~CONTEXT_VALID_NONE, + + // Aliases for registers with dedicated or conventional roles. + CONTEXT_VALID_FP = CONTEXT_VALID_R11, + CONTEXT_VALID_SP = CONTEXT_VALID_R13, + CONTEXT_VALID_LR = CONTEXT_VALID_R14, + CONTEXT_VALID_PC = CONTEXT_VALID_R15 + }; + + StackFrameARM() : context(), context_validity(CONTEXT_VALID_NONE) {} + + // Return the ContextValidity flag for register rN. + static ContextValidity RegisterValidFlag(int n) { + return ContextValidity(1 << n); + } + + // Register state. This is only fully valid for the topmost frame in a + // stack. In other frames, the values of nonvolatile registers may be + // present, given sufficient debugging information. Refer to + // context_validity. + MDRawContextARM context; + + // For each register in context whose value has been recovered, we set + // the corresponding CONTEXT_VALID_ bit in context_validity. + // + // context_validity's type should actually be ContextValidity, but + // we use int instead because the bitwise inclusive or operator + // yields an int when applied to enum values, and C++ doesn't + // silently convert from ints to enums. + int context_validity; +}; + +struct StackFrameARM64 : public StackFrame { + // A flag for each register we might know. Note that we can't use an enum + // here as there are 33 values to represent. + static const uint64_t CONTEXT_VALID_NONE = 0; + static const uint64_t CONTEXT_VALID_X0 = 1ULL << 0; + static const uint64_t CONTEXT_VALID_X1 = 1ULL << 1; + static const uint64_t CONTEXT_VALID_X2 = 1ULL << 2; + static const uint64_t CONTEXT_VALID_X3 = 1ULL << 3; + static const uint64_t CONTEXT_VALID_X4 = 1ULL << 4; + static const uint64_t CONTEXT_VALID_X5 = 1ULL << 5; + static const uint64_t CONTEXT_VALID_X6 = 1ULL << 6; + static const uint64_t CONTEXT_VALID_X7 = 1ULL << 7; + static const uint64_t CONTEXT_VALID_X8 = 1ULL << 8; + static const uint64_t CONTEXT_VALID_X9 = 1ULL << 9; + static const uint64_t CONTEXT_VALID_X10 = 1ULL << 10; + static const uint64_t CONTEXT_VALID_X11 = 1ULL << 11; + static const uint64_t CONTEXT_VALID_X12 = 1ULL << 12; + static const uint64_t CONTEXT_VALID_X13 = 1ULL << 13; + static const uint64_t CONTEXT_VALID_X14 = 1ULL << 14; + static const uint64_t CONTEXT_VALID_X15 = 1ULL << 15; + static const uint64_t CONTEXT_VALID_X16 = 1ULL << 16; + static const uint64_t CONTEXT_VALID_X17 = 1ULL << 17; + static const uint64_t CONTEXT_VALID_X18 = 1ULL << 18; + static const uint64_t CONTEXT_VALID_X19 = 1ULL << 19; + static const uint64_t CONTEXT_VALID_X20 = 1ULL << 20; + static const uint64_t CONTEXT_VALID_X21 = 1ULL << 21; + static const uint64_t CONTEXT_VALID_X22 = 1ULL << 22; + static const uint64_t CONTEXT_VALID_X23 = 1ULL << 23; + static const uint64_t CONTEXT_VALID_X24 = 1ULL << 24; + static const uint64_t CONTEXT_VALID_X25 = 1ULL << 25; + static const uint64_t CONTEXT_VALID_X26 = 1ULL << 26; + static const uint64_t CONTEXT_VALID_X27 = 1ULL << 27; + static const uint64_t CONTEXT_VALID_X28 = 1ULL << 28; + static const uint64_t CONTEXT_VALID_X29 = 1ULL << 29; + static const uint64_t CONTEXT_VALID_X30 = 1ULL << 30; + static const uint64_t CONTEXT_VALID_X31 = 1ULL << 31; + static const uint64_t CONTEXT_VALID_X32 = 1ULL << 32; + static const uint64_t CONTEXT_VALID_ALL = ~CONTEXT_VALID_NONE; + + // Aliases for registers with dedicated or conventional roles. + static const uint64_t CONTEXT_VALID_FP = CONTEXT_VALID_X29; + static const uint64_t CONTEXT_VALID_LR = CONTEXT_VALID_X30; + static const uint64_t CONTEXT_VALID_SP = CONTEXT_VALID_X31; + static const uint64_t CONTEXT_VALID_PC = CONTEXT_VALID_X32; + + StackFrameARM64() : context(), + context_validity(CONTEXT_VALID_NONE) {} + + // Return the validity flag for register xN. + static uint64_t RegisterValidFlag(int n) { + return 1ULL << n; + } + + // Register state. This is only fully valid for the topmost frame in a + // stack. In other frames, the values of nonvolatile registers may be + // present, given sufficient debugging information. Refer to + // context_validity. + MDRawContextARM64 context; + + // For each register in context whose value has been recovered, we set + // the corresponding CONTEXT_VALID_ bit in context_validity. + uint64_t context_validity; +}; + +struct StackFrameMIPS : public StackFrame { + // MIPS callee save registers for o32 ABI (32bit registers) are: + // 1. $s0-$s7, + // 2. $sp, $fp + // 3. $f20-$f31 + // + // The register structure is available at + // http://en.wikipedia.org/wiki/MIPS_architecture#Compiler_register_usage + +#define INDEX_MIPS_REG_S0 MD_CONTEXT_MIPS_REG_S0 // 16 +#define INDEX_MIPS_REG_S7 MD_CONTEXT_MIPS_REG_S7 // 23 +#define INDEX_MIPS_REG_GP MD_CONTEXT_MIPS_REG_GP // 28 +#define INDEX_MIPS_REG_RA MD_CONTEXT_MIPS_REG_RA // 31 +#define INDEX_MIPS_REG_PC 34 +#define SHIFT_MIPS_REG_S0 0 +#define SHIFT_MIPS_REG_GP 8 +#define SHIFT_MIPS_REG_PC 12 + + enum ContextValidity { + CONTEXT_VALID_NONE = 0, + CONTEXT_VALID_S0 = 1 << 0, // $16 + CONTEXT_VALID_S1 = 1 << 1, // $17 + CONTEXT_VALID_S2 = 1 << 2, // $18 + CONTEXT_VALID_S3 = 1 << 3, // $19 + CONTEXT_VALID_S4 = 1 << 4, // $20 + CONTEXT_VALID_S5 = 1 << 5, // $21 + CONTEXT_VALID_S6 = 1 << 6, // $22 + CONTEXT_VALID_S7 = 1 << 7, // $23 + // GP is not calee-save for o32 abi. + CONTEXT_VALID_GP = 1 << 8, // $28 + CONTEXT_VALID_SP = 1 << 9, // $29 + CONTEXT_VALID_FP = 1 << 10, // $30 + CONTEXT_VALID_RA = 1 << 11, // $31 + CONTEXT_VALID_PC = 1 << 12, // $34 + CONTEXT_VALID_ALL = ~CONTEXT_VALID_NONE + }; + + // Return the ContextValidity flag for register rN. + static ContextValidity RegisterValidFlag(int n) { + if (n >= INDEX_MIPS_REG_S0 && n <= INDEX_MIPS_REG_S7) + return ContextValidity(1 << (n - INDEX_MIPS_REG_S0 + SHIFT_MIPS_REG_S0)); + else if (n >= INDEX_MIPS_REG_GP && n <= INDEX_MIPS_REG_RA) + return ContextValidity(1 << (n - INDEX_MIPS_REG_GP + SHIFT_MIPS_REG_GP)); + else if (n == INDEX_MIPS_REG_PC) + return ContextValidity(1 << SHIFT_MIPS_REG_PC); + + return CONTEXT_VALID_NONE; + } + + StackFrameMIPS() : context(), context_validity(CONTEXT_VALID_NONE) {} + + // Register state. This is only fully valid for the topmost frame in a + // stack. In other frames, which registers are present depends on what + // debugging information were available. Refer to 'context_validity' below. + MDRawContextMIPS context; + + // For each register in context whose value has been recovered, + // the corresponding CONTEXT_VALID_ bit in 'context_validity' is set. + // + // context_validity's type should actually be ContextValidity, but + // type int is used instead because the bitwise inclusive or operator + // yields an int when applied to enum values, and C++ doesn't + // silently convert from ints to enums. + int context_validity; +}; + +} // namespace google_breakpad + +#endif // GOOGLE_BREAKPAD_PROCESSOR_STACK_FRAME_CPU_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/stack_frame_symbolizer.h b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/stack_frame_symbolizer.h new file mode 100644 index 0000000000..074907cb14 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/stack_frame_symbolizer.h @@ -0,0 +1,108 @@ +// -*- mode: C++ -*- + +// Copyright (c) 2012 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Helper class that encapsulates the logic of how symbol supplier interacts +// with source line resolver to fill stack frame information. + +#ifndef GOOGLE_BREAKPAD_PROCESSOR_STACK_FRAME_SYMBOLIZER_H__ +#define GOOGLE_BREAKPAD_PROCESSOR_STACK_FRAME_SYMBOLIZER_H__ + +#include +#include + +#include "common/using_std_string.h" +#include "google_breakpad/common/breakpad_types.h" +#include "google_breakpad/processor/code_module.h" + +namespace google_breakpad { +class CFIFrameInfo; +class CodeModules; +class SymbolSupplier; +class SourceLineResolverInterface; +struct StackFrame; +struct SystemInfo; +struct WindowsFrameInfo; + +class StackFrameSymbolizer { + public: + enum SymbolizerResult { + // Symbol data was found and successfully loaded in resolver. + // This does NOT guarantee source line info is found within symbol file. + kNoError, + // This indicates non-critical error, such as, no code module found for + // frame's instruction, no symbol file, or resolver failed to load symbol. + kError, + // This indicates error for which stack walk should be interrupted + // and retried in future. + kInterrupt, + // Symbol data was found and loaded in resolver however some corruptions + // were detected. + kWarningCorruptSymbols, + }; + + StackFrameSymbolizer(SymbolSupplier* supplier, + SourceLineResolverInterface* resolver); + + virtual ~StackFrameSymbolizer() { } + + // Encapsulate the step of resolving source line info for a stack frame. + // "frame" must not be NULL. + virtual SymbolizerResult FillSourceLineInfo(const CodeModules* modules, + const SystemInfo* system_info, + StackFrame* stack_frame); + + virtual WindowsFrameInfo* FindWindowsFrameInfo(const StackFrame* frame); + + virtual CFIFrameInfo* FindCFIFrameInfo(const StackFrame* frame); + + // Reset internal (locally owned) data as if the helper is re-instantiated. + // A typical case is to call Reset() after processing an individual report + // before start to process next one, in order to reset internal information + // about missing symbols found so far. + virtual void Reset() { no_symbol_modules_.clear(); } + + // Returns true if there is valid implementation for stack symbolization. + virtual bool HasImplementation() { return resolver_ && supplier_; } + + SourceLineResolverInterface* resolver() { return resolver_; } + SymbolSupplier* supplier() { return supplier_; } + + protected: + SymbolSupplier* supplier_; + SourceLineResolverInterface* resolver_; + // A list of modules known to have symbols missing. This helps avoid + // repeated lookups for the missing symbols within one minidump. + std::set no_symbol_modules_; +}; + +} // namespace google_breakpad + +#endif // GOOGLE_BREAKPAD_PROCESSOR_STACK_FRAME_SYMBOLIZER_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/stackwalker.h b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/stackwalker.h new file mode 100644 index 0000000000..a1bd3e7fe8 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/stackwalker.h @@ -0,0 +1,235 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// stackwalker.h: Generic stackwalker. +// +// The Stackwalker class is an abstract base class providing common generic +// methods that apply to stacks from all systems. Specific implementations +// will extend this class by providing GetContextFrame and GetCallerFrame +// methods to fill in system-specific data in a StackFrame structure. +// Stackwalker assembles these StackFrame strucutres into a CallStack. +// +// Author: Mark Mentovai + + +#ifndef GOOGLE_BREAKPAD_PROCESSOR_STACKWALKER_H__ +#define GOOGLE_BREAKPAD_PROCESSOR_STACKWALKER_H__ + +#include +#include +#include + +#include "common/using_std_string.h" +#include "google_breakpad/common/breakpad_types.h" +#include "google_breakpad/processor/code_modules.h" +#include "google_breakpad/processor/memory_region.h" +#include "google_breakpad/processor/stack_frame_symbolizer.h" + +namespace google_breakpad { + +class CallStack; +class DumpContext; +class StackFrameSymbolizer; + +using std::set; +using std::vector; + +class Stackwalker { + public: + virtual ~Stackwalker() {} + + // Populates the given CallStack by calling GetContextFrame and + // GetCallerFrame. The frames are further processed to fill all available + // data. Returns true if the stackwalk completed, or false if it was + // interrupted by SymbolSupplier::GetSymbolFile(). + // Upon return, |modules_without_symbols| will be populated with pointers to + // the code modules (CodeModule*) that DON'T have symbols. + // |modules_with_corrupt_symbols| will be populated with pointers to the + // modules which have corrupt symbols. |modules_without_symbols| and + // |modules_with_corrupt_symbols| DO NOT take ownership of the code modules. + // The lifetime of these code modules is the same as the lifetime of the + // CodeModules passed to the StackWalker constructor (which currently + // happens to be the lifetime of the Breakpad's ProcessingState object). + // There is a check for duplicate modules so no duplicates are expected. + bool Walk(CallStack* stack, + vector* modules_without_symbols, + vector* modules_with_corrupt_symbols); + + // Returns a new concrete subclass suitable for the CPU that a stack was + // generated on, according to the CPU type indicated by the context + // argument. If no suitable concrete subclass exists, returns NULL. + static Stackwalker* StackwalkerForCPU( + const SystemInfo* system_info, + DumpContext* context, + MemoryRegion* memory, + const CodeModules* modules, + StackFrameSymbolizer* resolver_helper); + + static void set_max_frames(uint32_t max_frames) { + max_frames_ = max_frames; + max_frames_set_ = true; + } + static uint32_t max_frames() { return max_frames_; } + + static void set_max_frames_scanned(uint32_t max_frames_scanned) { + max_frames_scanned_ = max_frames_scanned; + } + + protected: + // system_info identifies the operating system, NULL or empty if unknown. + // memory identifies a MemoryRegion that provides the stack memory + // for the stack to walk. modules, if non-NULL, is a CodeModules + // object that is used to look up which code module each stack frame is + // associated with. frame_symbolizer is a StackFrameSymbolizer object that + // encapsulates the logic of how source line resolver interacts with symbol + // supplier to symbolize stack frame and look up caller frame information + // (see stack_frame_symbolizer.h). + // frame_symbolizer MUST NOT be NULL (asserted). + Stackwalker(const SystemInfo* system_info, + MemoryRegion* memory, + const CodeModules* modules, + StackFrameSymbolizer* frame_symbolizer); + + // This can be used to filter out potential return addresses when + // the stack walker resorts to stack scanning. + // Returns true if any of: + // * This address is within a loaded module, but we don't have symbols + // for that module. + // * This address is within a loaded module for which we have symbols, + // and falls inside a function in that module. + // Returns false otherwise. + bool InstructionAddressSeemsValid(uint64_t address); + + // The default number of words to search through on the stack + // for a return address. + static const int kRASearchWords; + + template + bool ScanForReturnAddress(InstructionType location_start, + InstructionType* location_found, + InstructionType* ip_found, + bool is_context_frame) { + // When searching for the caller of the context frame, + // allow the scanner to look farther down the stack. + const int search_words = is_context_frame ? + kRASearchWords * 4 : + kRASearchWords; + + return ScanForReturnAddress(location_start, location_found, ip_found, + search_words); + } + + // Scan the stack starting at location_start, looking for an address + // that looks like a valid instruction pointer. Addresses must + // 1) be contained in the current stack memory + // 2) pass the checks in InstructionAddressSeemsValid + // + // Returns true if a valid-looking instruction pointer was found. + // When returning true, sets location_found to the address at which + // the value was found, and ip_found to the value contained at that + // location in memory. + template + bool ScanForReturnAddress(InstructionType location_start, + InstructionType* location_found, + InstructionType* ip_found, + int searchwords) { + for (InstructionType location = location_start; + location <= location_start + searchwords * sizeof(InstructionType); + location += sizeof(InstructionType)) { + InstructionType ip; + if (!memory_->GetMemoryAtAddress(location, &ip)) + break; + + if (modules_ && modules_->GetModuleForAddress(ip) && + InstructionAddressSeemsValid(ip)) { + *ip_found = ip; + *location_found = location; + return true; + } + } + // nothing found + return false; + } + + // Information about the system that produced the minidump. Subclasses + // and the SymbolSupplier may find this information useful. + const SystemInfo* system_info_; + + // The stack memory to walk. Subclasses will require this region to + // get information from the stack. + MemoryRegion* memory_; + + // A list of modules, for populating each StackFrame's module information. + // This field is optional and may be NULL. + const CodeModules* modules_; + + protected: + // The StackFrameSymbolizer implementation. + StackFrameSymbolizer* frame_symbolizer_; + + private: + // Obtains the context frame, the innermost called procedure in a stack + // trace. Returns NULL on failure. GetContextFrame allocates a new + // StackFrame (or StackFrame subclass), ownership of which is taken by + // the caller. + virtual StackFrame* GetContextFrame() = 0; + + // Obtains a caller frame. Each call to GetCallerFrame should return the + // frame that called the last frame returned by GetContextFrame or + // GetCallerFrame. To aid this purpose, stack contains the CallStack + // made of frames that have already been walked. GetCallerFrame should + // return NULL on failure or when there are no more caller frames (when + // the end of the stack has been reached). GetCallerFrame allocates a new + // StackFrame (or StackFrame subclass), ownership of which is taken by + // the caller. |stack_scan_allowed| controls whether stack scanning is + // an allowable frame-recovery method, since it is desirable to be able to + // disable stack scanning in performance-critical use cases. + virtual StackFrame* GetCallerFrame(const CallStack* stack, + bool stack_scan_allowed) = 0; + + // The maximum number of frames Stackwalker will walk through. + // This defaults to 1024 to prevent infinite loops. + static uint32_t max_frames_; + + // Keep track of whether max_frames_ has been set by the user, since + // it affects whether or not an error message is printed in the case + // where an unwind got stopped by the limit. + static bool max_frames_set_; + + // The maximum number of stack-scanned and otherwise untrustworthy + // frames allowed. Stack-scanning can be expensive, so the option to + // disable or limit it is helpful in cases where unwind performance is + // important. This defaults to 1024, the same as max_frames_. + static uint32_t max_frames_scanned_; +}; + +} // namespace google_breakpad + + +#endif // GOOGLE_BREAKPAD_PROCESSOR_STACKWALKER_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/symbol_supplier.h b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/symbol_supplier.h new file mode 100644 index 0000000000..a042081f3b --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/symbol_supplier.h @@ -0,0 +1,99 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// The caller may implement the SymbolSupplier abstract base class +// to provide symbols for a given module. + +#ifndef GOOGLE_BREAKPAD_PROCESSOR_SYMBOL_SUPPLIER_H__ +#define GOOGLE_BREAKPAD_PROCESSOR_SYMBOL_SUPPLIER_H__ + +#include +#include "common/using_std_string.h" + +namespace google_breakpad { + +class CodeModule; +struct SystemInfo; + +class SymbolSupplier { + public: + // Result type for GetSymbolFile + enum SymbolResult { + // no symbols were found, but continue processing + NOT_FOUND, + + // symbols were found, and the path has been placed in symbol_file + FOUND, + + // stops processing the minidump immediately + INTERRUPT + }; + + virtual ~SymbolSupplier() {} + + // Retrieves the symbol file for the given CodeModule, placing the + // path in symbol_file if successful. system_info contains strings + // identifying the operating system and CPU; SymbolSupplier may use + // to help locate the symbol file. system_info may be NULL or its + // fields may be empty if these values are unknown. symbol_file + // must be a pointer to a valid string + virtual SymbolResult GetSymbolFile(const CodeModule *module, + const SystemInfo *system_info, + string *symbol_file) = 0; + // Same as above, except also places symbol data into symbol_data. + // If symbol_data is NULL, the data is not returned. + // TODO(nealsid) Once we have symbol data caching behavior implemented + // investigate making all symbol suppliers implement all methods, + // and make this pure virtual + virtual SymbolResult GetSymbolFile(const CodeModule *module, + const SystemInfo *system_info, + string *symbol_file, + string *symbol_data) = 0; + + // Same as above, except allocates data buffer on heap and then places the + // symbol data into the buffer as C-string. + // SymbolSupplier is responsible for deleting the data buffer. After the call + // to GetCStringSymbolData(), the caller should call FreeSymbolData(const + // Module *module) once the data buffer is no longer needed. + // If symbol_data is not NULL, symbol supplier won't return FOUND unless it + // returns a valid buffer in symbol_data, e.g., returns INTERRUPT on memory + // allocation failure. + virtual SymbolResult GetCStringSymbolData(const CodeModule *module, + const SystemInfo *system_info, + string *symbol_file, + char **symbol_data, + size_t *symbol_data_size) = 0; + + // Frees the data buffer allocated for the module in GetCStringSymbolData. + virtual void FreeSymbolData(const CodeModule *module) = 0; +}; + +} // namespace google_breakpad + +#endif // GOOGLE_BREAKPAD_PROCESSOR_SYMBOL_SUPPLIER_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/system_info.h b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/system_info.h new file mode 100644 index 0000000000..9583d9e89c --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/google_breakpad/processor/system_info.h @@ -0,0 +1,98 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// system_info.h: Information about the system that was running a program +// when a crash report was produced. +// +// Author: Mark Mentovai + +#ifndef GOOGLE_BREAKPAD_PROCESSOR_SYSTEM_INFO_H__ +#define GOOGLE_BREAKPAD_PROCESSOR_SYSTEM_INFO_H__ + +#include + +#include "common/using_std_string.h" + +namespace google_breakpad { + +struct SystemInfo { + public: + SystemInfo() : os(), os_short(), os_version(), cpu(), cpu_info(), + cpu_count(0) {} + + // Resets the SystemInfo object to its default values. + void Clear() { + os.clear(); + os_short.clear(); + os_version.clear(); + cpu.clear(); + cpu_info.clear(); + cpu_count = 0; + } + + // A string identifying the operating system, such as "Windows NT", + // "Mac OS X", or "Linux". If the information is present in the dump but + // its value is unknown, this field will contain a numeric value. If + // the information is not present in the dump, this field will be empty. + string os; + + // A short form of the os string, using lowercase letters and no spaces, + // suitable for use in a filesystem. Possible values include "windows", + // "mac", "linux" and "nacl". Empty if the information is not present + // in the dump or if the OS given by the dump is unknown. The values + // stored in this field should match those used by + // MinidumpSystemInfo::GetOS. + string os_short; + + // A string identifying the version of the operating system, such as + // "5.1.2600 Service Pack 2" or "10.4.8 8L2127". If the dump does not + // contain this information, this field will be empty. + string os_version; + + // A string identifying the basic CPU family, such as "x86" or "ppc". + // If this information is present in the dump but its value is unknown, + // this field will contain a numeric value. If the information is not + // present in the dump, this field will be empty. The values stored in + // this field should match those used by MinidumpSystemInfo::GetCPU. + string cpu; + + // A string further identifying the specific CPU, such as + // "GenuineIntel level 6 model 13 stepping 8". If the information is not + // present in the dump, or additional identifying information is not + // defined for the CPU family, this field will be empty. + string cpu_info; + + // The number of processors in the system. Will be greater than one for + // multi-core systems. + int cpu_count; +}; + +} // namespace google_breakpad + +#endif // GOOGLE_BREAKPAD_PROCESSOR_SYSTEM_INFO_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/address_map-inl.h b/TMessagesProj/jni/third_party/breakpad/src/processor/address_map-inl.h new file mode 100644 index 0000000000..251c44781a --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/address_map-inl.h @@ -0,0 +1,93 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// address_map-inl.h: Address map implementation. +// +// See address_map.h for documentation. +// +// Author: Mark Mentovai + +#ifndef PROCESSOR_ADDRESS_MAP_INL_H__ +#define PROCESSOR_ADDRESS_MAP_INL_H__ + +#include "processor/address_map.h" + +#include + +#include "processor/logging.h" + +namespace google_breakpad { + +template +bool AddressMap::Store(const AddressType &address, + const EntryType &entry) { + // Ensure that the specified address doesn't conflict with something already + // in the map. + if (map_.find(address) != map_.end()) { + BPLOG(INFO) << "Store failed, address " << HexString(address) << + " is already present"; + return false; + } + + map_.insert(MapValue(address, entry)); + return true; +} + +template +bool AddressMap::Retrieve( + const AddressType &address, + EntryType *entry, AddressType *entry_address) const { + BPLOG_IF(ERROR, !entry) << "AddressMap::Retrieve requires |entry|"; + assert(entry); + + // upper_bound gives the first element whose key is greater than address, + // but we want the first element whose key is less than or equal to address. + // Decrement the iterator to get there, but not if the upper_bound already + // points to the beginning of the map - in that case, address is lower than + // the lowest stored key, so return false. + MapConstIterator iterator = map_.upper_bound(address); + if (iterator == map_.begin()) + return false; + --iterator; + + *entry = iterator->second; + if (entry_address) + *entry_address = iterator->first; + + return true; +} + +template +void AddressMap::Clear() { + map_.clear(); +} + +} // namespace google_breakpad + +#endif // PROCESSOR_ADDRESS_MAP_INL_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/address_map.h b/TMessagesProj/jni/third_party/breakpad/src/processor/address_map.h new file mode 100644 index 0000000000..2972cbb9f8 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/address_map.h @@ -0,0 +1,85 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// address_map.h: Address maps. +// +// An address map contains a set of objects keyed by address. Objects are +// retrieved from the map by returning the object with the highest key less +// than or equal to the lookup key. +// +// Author: Mark Mentovai + +#ifndef PROCESSOR_ADDRESS_MAP_H__ +#define PROCESSOR_ADDRESS_MAP_H__ + +#include + +namespace google_breakpad { + +// Forward declarations (for later friend declarations). +template class AddressMapSerializer; + +template +class AddressMap { + public: + AddressMap() : map_() {} + + // Inserts an entry into the map. Returns false without storing the entry + // if an entry is already stored in the map at the same address as specified + // by the address argument. + bool Store(const AddressType &address, const EntryType &entry); + + // Locates the entry stored at the highest address less than or equal to + // the address argument. If there is no such range, returns false. The + // entry is returned in entry, which is a required argument. If + // entry_address is not NULL, it will be set to the address that the entry + // was stored at. + bool Retrieve(const AddressType &address, + EntryType *entry, AddressType *entry_address) const; + + // Empties the address map, restoring it to the same state as when it was + // initially created. + void Clear(); + + private: + friend class AddressMapSerializer; + friend class ModuleComparer; + + // Convenience types. + typedef std::map AddressToEntryMap; + typedef typename AddressToEntryMap::const_iterator MapConstIterator; + typedef typename AddressToEntryMap::value_type MapValue; + + // Maps the address of each entry to an EntryType. + AddressToEntryMap map_; +}; + +} // namespace google_breakpad + +#endif // PROCESSOR_ADDRESS_MAP_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/address_map_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/address_map_unittest.cc new file mode 100644 index 0000000000..9b4095b16c --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/address_map_unittest.cc @@ -0,0 +1,196 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// address_map_unittest.cc: Unit tests for AddressMap. +// +// Author: Mark Mentovai + +#include +#include + +#include "processor/address_map-inl.h" +#include "processor/linked_ptr.h" +#include "processor/logging.h" + +#define ASSERT_TRUE(condition) \ + if (!(condition)) { \ + fprintf(stderr, "FAIL: %s @ %s:%d\n", #condition, __FILE__, __LINE__); \ + return false; \ + } + +#define ASSERT_FALSE(condition) ASSERT_TRUE(!(condition)) + +#define ASSERT_EQ(e1, e2) ASSERT_TRUE((e1) == (e2)) + +namespace { + +using google_breakpad::AddressMap; +using google_breakpad::linked_ptr; + +// A CountedObject holds an int. A global (not thread safe!) count of +// allocated CountedObjects is maintained to help test memory management. +class CountedObject { + public: + explicit CountedObject(int id) : id_(id) { ++count_; } + ~CountedObject() { --count_; } + + static int count() { return count_; } + int id() const { return id_; } + + private: + static int count_; + int id_; +}; + +int CountedObject::count_; + +typedef int AddressType; +typedef AddressMap< AddressType, linked_ptr > TestMap; + +static bool DoAddressMapTest() { + ASSERT_EQ(CountedObject::count(), 0); + + TestMap test_map; + linked_ptr entry; + AddressType address; + + // Check that a new map is truly empty. + ASSERT_FALSE(test_map.Retrieve(0, &entry, &address)); + ASSERT_FALSE(test_map.Retrieve(INT_MIN, &entry, &address)); + ASSERT_FALSE(test_map.Retrieve(INT_MAX, &entry, &address)); + + // Check that Clear clears the map without leaking. + ASSERT_EQ(CountedObject::count(), 0); + ASSERT_TRUE(test_map.Store(1, + linked_ptr(new CountedObject(0)))); + ASSERT_TRUE(test_map.Retrieve(1, &entry, &address)); + ASSERT_EQ(CountedObject::count(), 1); + test_map.Clear(); + ASSERT_EQ(CountedObject::count(), 1); // still holding entry in this scope + + // Check that a cleared map is truly empty. + ASSERT_FALSE(test_map.Retrieve(0, &entry, &address)); + ASSERT_FALSE(test_map.Retrieve(INT_MIN, &entry, &address)); + ASSERT_FALSE(test_map.Retrieve(INT_MAX, &entry, &address)); + + // Check a single-element map. + ASSERT_TRUE(test_map.Store(10, + linked_ptr(new CountedObject(1)))); + ASSERT_FALSE(test_map.Retrieve(9, &entry, &address)); + ASSERT_TRUE(test_map.Retrieve(10, &entry, &address)); + ASSERT_EQ(CountedObject::count(), 1); + ASSERT_EQ(entry->id(), 1); + ASSERT_EQ(address, 10); + ASSERT_TRUE(test_map.Retrieve(11, &entry, &address)); + ASSERT_TRUE(test_map.Retrieve(11, &entry, NULL)); // NULL ok here + + // Add some more elements. + ASSERT_TRUE(test_map.Store(5, + linked_ptr(new CountedObject(2)))); + ASSERT_EQ(CountedObject::count(), 2); + ASSERT_TRUE(test_map.Store(20, + linked_ptr(new CountedObject(3)))); + ASSERT_TRUE(test_map.Store(15, + linked_ptr(new CountedObject(4)))); + ASSERT_FALSE(test_map.Store(10, + linked_ptr(new CountedObject(5)))); // already in map + ASSERT_TRUE(test_map.Store(16, + linked_ptr(new CountedObject(6)))); + ASSERT_TRUE(test_map.Store(14, + linked_ptr(new CountedObject(7)))); + + // Nothing was stored with a key under 5. Don't use ASSERT inside loops + // because it won't show exactly which key/entry/address failed. + for (AddressType key = 0; key < 5; ++key) { + if (test_map.Retrieve(key, &entry, &address)) { + fprintf(stderr, + "FAIL: retrieve %d expected false observed true @ %s:%d\n", + key, __FILE__, __LINE__); + return false; + } + } + + // Check everything that was stored. + const int id_verify[] = { 0, 0, 0, 0, 0, // unused + 2, 2, 2, 2, 2, // 5 - 9 + 1, 1, 1, 1, 7, // 10 - 14 + 4, 6, 6, 6, 6, // 15 - 19 + 3, 3, 3, 3, 3, // 20 - 24 + 3, 3, 3, 3, 3 }; // 25 - 29 + const AddressType address_verify[] = { 0, 0, 0, 0, 0, // unused + 5, 5, 5, 5, 5, // 5 - 9 + 10, 10, 10, 10, 14, // 10 - 14 + 15, 16, 16, 16, 16, // 15 - 19 + 20, 20, 20, 20, 20, // 20 - 24 + 20, 20, 20, 20, 20 }; // 25 - 29 + + for (AddressType key = 5; key < 30; ++key) { + if (!test_map.Retrieve(key, &entry, &address)) { + fprintf(stderr, + "FAIL: retrieve %d expected true observed false @ %s:%d\n", + key, __FILE__, __LINE__); + return false; + } + if (entry->id() != id_verify[key]) { + fprintf(stderr, + "FAIL: retrieve %d expected entry %d observed %d @ %s:%d\n", + key, id_verify[key], entry->id(), __FILE__, __LINE__); + return false; + } + if (address != address_verify[key]) { + fprintf(stderr, + "FAIL: retrieve %d expected address %d observed %d @ %s:%d\n", + key, address_verify[key], address, __FILE__, __LINE__); + return false; + } + } + + // The stored objects should still be in the map. + ASSERT_EQ(CountedObject::count(), 6); + + return true; +} + +static bool RunTests() { + if (!DoAddressMapTest()) + return false; + + // Leak check. + ASSERT_EQ(CountedObject::count(), 0); + + return true; +} + +} // namespace + +int main(int argc, char **argv) { + BPLOG_INIT(&argc, &argv); + + return RunTests() ? 0 : 1; +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/basic_code_module.h b/TMessagesProj/jni/third_party/breakpad/src/processor/basic_code_module.h new file mode 100644 index 0000000000..3fe782bbea --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/basic_code_module.h @@ -0,0 +1,109 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// basic_code_module.h: Carries information about code modules that are loaded +// into a process. +// +// This is a basic concrete implementation of CodeModule. It cannot be +// instantiated directly, only based on other objects that implement +// the CodeModule interface. It exists to provide a CodeModule implementation +// a place to store information when the life of the original object (such as +// a MinidumpModule) cannot be guaranteed. +// +// Author: Mark Mentovai + +#ifndef PROCESSOR_BASIC_CODE_MODULE_H__ +#define PROCESSOR_BASIC_CODE_MODULE_H__ + +#include + +#include "common/using_std_string.h" +#include "google_breakpad/processor/code_module.h" + +namespace google_breakpad { + +class BasicCodeModule : public CodeModule { + public: + // Creates a new BasicCodeModule given any existing CodeModule + // implementation. This is useful to make a copy of the data relevant to + // the CodeModule interface without requiring all of the resources that + // other CodeModule implementations may require. + explicit BasicCodeModule(const CodeModule *that) + : base_address_(that->base_address()), + size_(that->size()), + code_file_(that->code_file()), + code_identifier_(that->code_identifier()), + debug_file_(that->debug_file()), + debug_identifier_(that->debug_identifier()), + version_(that->version()) {} + + BasicCodeModule(uint64_t base_address, uint64_t size, + const string &code_file, + const string &code_identifier, + const string &debug_file, + const string &debug_identifier, + const string &version) + : base_address_(base_address), + size_(size), + code_file_(code_file), + code_identifier_(code_identifier), + debug_file_(debug_file), + debug_identifier_(debug_identifier), + version_(version) + {} + virtual ~BasicCodeModule() {} + + // See code_module.h for descriptions of these methods and the associated + // members. + virtual uint64_t base_address() const { return base_address_; } + virtual uint64_t size() const { return size_; } + virtual string code_file() const { return code_file_; } + virtual string code_identifier() const { return code_identifier_; } + virtual string debug_file() const { return debug_file_; } + virtual string debug_identifier() const { return debug_identifier_; } + virtual string version() const { return version_; } + virtual const CodeModule* Copy() const { return new BasicCodeModule(this); } + + private: + uint64_t base_address_; + uint64_t size_; + string code_file_; + string code_identifier_; + string debug_file_; + string debug_identifier_; + string version_; + + // Disallow copy constructor and assignment operator. + BasicCodeModule(const BasicCodeModule &that); + void operator=(const BasicCodeModule &that); +}; + +} // namespace google_breakpad + +#endif // PROCESSOR_BASIC_CODE_MODULE_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/basic_code_modules.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/basic_code_modules.cc new file mode 100644 index 0000000000..40b45a8bf8 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/basic_code_modules.cc @@ -0,0 +1,128 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// basic_code_modules.cc: Contains all of the CodeModule objects that +// were loaded into a single process. +// +// See basic_code_modules.h for documentation. +// +// Author: Mark Mentovai + +#include "processor/basic_code_modules.h" + +#include + +#include "google_breakpad/processor/code_module.h" +#include "processor/linked_ptr.h" +#include "processor/logging.h" +#include "processor/range_map-inl.h" + +namespace google_breakpad { + +BasicCodeModules::BasicCodeModules(const CodeModules *that) + : main_address_(0), + map_(new RangeMap >()) { + BPLOG_IF(ERROR, !that) << "BasicCodeModules::BasicCodeModules requires " + "|that|"; + assert(that); + + const CodeModule *main_module = that->GetMainModule(); + if (main_module) + main_address_ = main_module->base_address(); + + unsigned int count = that->module_count(); + for (unsigned int module_sequence = 0; + module_sequence < count; + ++module_sequence) { + // Make a copy of the module and insert it into the map. Use + // GetModuleAtIndex because ordering is unimportant when slurping the + // entire list, and GetModuleAtIndex may be faster than + // GetModuleAtSequence. + linked_ptr module( + that->GetModuleAtIndex(module_sequence)->Copy()); + if (!map_->StoreRange(module->base_address(), module->size(), module)) { + BPLOG(ERROR) << "Module " << module->code_file() << + " could not be stored"; + } + } +} + +BasicCodeModules::BasicCodeModules() + : main_address_(0), + map_(new RangeMap >()) { +} + +BasicCodeModules::~BasicCodeModules() { + delete map_; +} + +unsigned int BasicCodeModules::module_count() const { + return map_->GetCount(); +} + +const CodeModule* BasicCodeModules::GetModuleForAddress( + uint64_t address) const { + linked_ptr module; + if (!map_->RetrieveRange(address, &module, NULL, NULL)) { + BPLOG(INFO) << "No module at " << HexString(address); + return NULL; + } + + return module.get(); +} + +const CodeModule* BasicCodeModules::GetMainModule() const { + return GetModuleForAddress(main_address_); +} + +const CodeModule* BasicCodeModules::GetModuleAtSequence( + unsigned int sequence) const { + linked_ptr module; + if (!map_->RetrieveRangeAtIndex(sequence, &module, NULL, NULL)) { + BPLOG(ERROR) << "RetrieveRangeAtIndex failed for sequence " << sequence; + return NULL; + } + + return module.get(); +} + +const CodeModule* BasicCodeModules::GetModuleAtIndex( + unsigned int index) const { + // This class stores everything in a RangeMap, without any more-efficient + // way to walk the list of CodeModule objects. Implement GetModuleAtIndex + // using GetModuleAtSequence, which meets all of the requirements, and + // in addition, guarantees ordering. + return GetModuleAtSequence(index); +} + +const CodeModules* BasicCodeModules::Copy() const { + return new BasicCodeModules(this); +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/basic_code_modules.h b/TMessagesProj/jni/third_party/breakpad/src/processor/basic_code_modules.h new file mode 100644 index 0000000000..ace569b8a7 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/basic_code_modules.h @@ -0,0 +1,88 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// basic_code_modules.h: Contains all of the CodeModule objects that +// were loaded into a single process. +// +// This is a basic concrete implementation of CodeModules. It cannot be +// instantiated directly, only based on other objects that implement +// the CodeModules interface. It exists to provide a CodeModules +// implementation a place to store information when the life of the original +// object (such as a MinidumpModuleList) cannot be guaranteed. +// +// Author: Mark Mentovai + +#ifndef PROCESSOR_BASIC_CODE_MODULES_H__ +#define PROCESSOR_BASIC_CODE_MODULES_H__ + +#include "google_breakpad/processor/code_modules.h" + +namespace google_breakpad { + +template class linked_ptr; +template class RangeMap; + +class BasicCodeModules : public CodeModules { + public: + // Creates a new BasicCodeModules object given any existing CodeModules + // implementation. This is useful to make a copy of the data relevant to + // the CodeModules and CodeModule interfaces without requiring all of the + // resources that other implementations may require. A copy will be + // made of each contained CodeModule using CodeModule::Copy. + explicit BasicCodeModules(const CodeModules *that); + + virtual ~BasicCodeModules(); + + // See code_modules.h for descriptions of these methods. + virtual unsigned int module_count() const; + virtual const CodeModule* GetModuleForAddress(uint64_t address) const; + virtual const CodeModule* GetMainModule() const; + virtual const CodeModule* GetModuleAtSequence(unsigned int sequence) const; + virtual const CodeModule* GetModuleAtIndex(unsigned int index) const; + virtual const CodeModules* Copy() const; + + protected: + BasicCodeModules(); + + // The base address of the main module. + uint64_t main_address_; + + // The map used to contain each CodeModule, keyed by each CodeModule's + // address range. + RangeMap > *map_; + + private: + // Disallow copy constructor and assignment operator. + BasicCodeModules(const BasicCodeModules &that); + void operator=(const BasicCodeModules &that); +}; + +} // namespace google_breakpad + +#endif // PROCESSOR_BASIC_CODE_MODULES_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/basic_source_line_resolver.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/basic_source_line_resolver.cc new file mode 100644 index 0000000000..62aa413831 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/basic_source_line_resolver.cc @@ -0,0 +1,609 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// basic_source_line_resolver.cc: BasicSourceLineResolver implementation. +// +// See basic_source_line_resolver.h and basic_source_line_resolver_types.h +// for documentation. + +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +#include "google_breakpad/processor/basic_source_line_resolver.h" +#include "processor/basic_source_line_resolver_types.h" +#include "processor/module_factory.h" + +#include "processor/tokenize.h" + +using std::map; +using std::vector; +using std::make_pair; + +namespace google_breakpad { + +#ifdef _WIN32 +#define strtok_r strtok_s +#define strtoull _strtoui64 +#endif + +static const char *kWhitespace = " \r\n"; +static const int kMaxErrorsPrinted = 5; +static const int kMaxErrorsBeforeBailing = 100; + +BasicSourceLineResolver::BasicSourceLineResolver() : + SourceLineResolverBase(new BasicModuleFactory) { } + +// static +void BasicSourceLineResolver::Module::LogParseError( + const string &message, + int line_number, + int *num_errors) { + if (++(*num_errors) <= kMaxErrorsPrinted) { + if (line_number > 0) { + BPLOG(ERROR) << "Line " << line_number << ": " << message; + } else { + BPLOG(ERROR) << message; + } + } +} + +bool BasicSourceLineResolver::Module::LoadMapFromMemory( + char *memory_buffer, + size_t memory_buffer_size) { + linked_ptr cur_func; + int line_number = 0; + int num_errors = 0; + char *save_ptr; + + // If the length is 0, we can still pretend we have a symbol file. This is + // for scenarios that want to test symbol lookup, but don't necessarily care + // if certain modules do not have any information, like system libraries. + if (memory_buffer_size == 0) { + return true; + } + + // Make sure the last character is null terminator. + size_t last_null_terminator = memory_buffer_size - 1; + if (memory_buffer[last_null_terminator] != '\0') { + memory_buffer[last_null_terminator] = '\0'; + } + + // Skip any null terminators at the end of the memory buffer, and make sure + // there are no other null terminators in the middle of the memory buffer. + bool has_null_terminator_in_the_middle = false; + while (last_null_terminator > 0 && + memory_buffer[last_null_terminator - 1] == '\0') { + last_null_terminator--; + } + for (size_t i = 0; i < last_null_terminator; i++) { + if (memory_buffer[i] == '\0') { + memory_buffer[i] = '_'; + has_null_terminator_in_the_middle = true; + } + } + if (has_null_terminator_in_the_middle) { + LogParseError( + "Null terminator is not expected in the middle of the symbol data", + line_number, + &num_errors); + } + + char *buffer; + buffer = strtok_r(memory_buffer, "\r\n", &save_ptr); + + while (buffer != NULL) { + ++line_number; + + if (strncmp(buffer, "FILE ", 5) == 0) { + if (!ParseFile(buffer)) { + LogParseError("ParseFile on buffer failed", line_number, &num_errors); + } + } else if (strncmp(buffer, "STACK ", 6) == 0) { + if (!ParseStackInfo(buffer)) { + LogParseError("ParseStackInfo failed", line_number, &num_errors); + } + } else if (strncmp(buffer, "FUNC ", 5) == 0) { + cur_func.reset(ParseFunction(buffer)); + if (!cur_func.get()) { + LogParseError("ParseFunction failed", line_number, &num_errors); + } else { + // StoreRange will fail if the function has an invalid address or size. + // We'll silently ignore this, the function and any corresponding lines + // will be destroyed when cur_func is released. + functions_.StoreRange(cur_func->address, cur_func->size, cur_func); + } + } else if (strncmp(buffer, "PUBLIC ", 7) == 0) { + // Clear cur_func: public symbols don't contain line number information. + cur_func.reset(); + + if (!ParsePublicSymbol(buffer)) { + LogParseError("ParsePublicSymbol failed", line_number, &num_errors); + } + } else if (strncmp(buffer, "MODULE ", 7) == 0) { + // Ignore these. They're not of any use to BasicSourceLineResolver, + // which is fed modules by a SymbolSupplier. These lines are present to + // aid other tools in properly placing symbol files so that they can + // be accessed by a SymbolSupplier. + // + // MODULE + } else if (strncmp(buffer, "INFO ", 5) == 0) { + // Ignore these as well, they're similarly just for housekeeping. + // + // INFO CODE_ID + } else { + if (!cur_func.get()) { + LogParseError("Found source line data without a function", + line_number, &num_errors); + } else { + Line *line = ParseLine(buffer); + if (!line) { + LogParseError("ParseLine failed", line_number, &num_errors); + } else { + cur_func->lines.StoreRange(line->address, line->size, + linked_ptr(line)); + } + } + } + if (num_errors > kMaxErrorsBeforeBailing) { + break; + } + buffer = strtok_r(NULL, "\r\n", &save_ptr); + } + is_corrupt_ = num_errors > 0; + return true; +} + +void BasicSourceLineResolver::Module::LookupAddress(StackFrame *frame) const { + MemAddr address = frame->instruction - frame->module->base_address(); + + // First, look for a FUNC record that covers address. Use + // RetrieveNearestRange instead of RetrieveRange so that, if there + // is no such function, we can use the next function to bound the + // extent of the PUBLIC symbol we find, below. This does mean we + // need to check that address indeed falls within the function we + // find; do the range comparison in an overflow-friendly way. + linked_ptr func; + linked_ptr public_symbol; + MemAddr function_base; + MemAddr function_size; + MemAddr public_address; + if (functions_.RetrieveNearestRange(address, &func, + &function_base, &function_size) && + address >= function_base && address - function_base < function_size) { + frame->function_name = func->name; + frame->function_base = frame->module->base_address() + function_base; + + linked_ptr line; + MemAddr line_base; + if (func->lines.RetrieveRange(address, &line, &line_base, NULL)) { + FileMap::const_iterator it = files_.find(line->source_file_id); + if (it != files_.end()) { + frame->source_file_name = files_.find(line->source_file_id)->second; + } + frame->source_line = line->line; + frame->source_line_base = frame->module->base_address() + line_base; + } + } else if (public_symbols_.Retrieve(address, + &public_symbol, &public_address) && + (!func.get() || public_address > function_base)) { + frame->function_name = public_symbol->name; + frame->function_base = frame->module->base_address() + public_address; + } +} + +WindowsFrameInfo *BasicSourceLineResolver::Module::FindWindowsFrameInfo( + const StackFrame *frame) const { + MemAddr address = frame->instruction - frame->module->base_address(); + scoped_ptr result(new WindowsFrameInfo()); + + // We only know about WindowsFrameInfo::STACK_INFO_FRAME_DATA and + // WindowsFrameInfo::STACK_INFO_FPO. Prefer them in this order. + // WindowsFrameInfo::STACK_INFO_FRAME_DATA is the newer type that + // includes its own program string. + // WindowsFrameInfo::STACK_INFO_FPO is the older type + // corresponding to the FPO_DATA struct. See stackwalker_x86.cc. + linked_ptr frame_info; + if ((windows_frame_info_[WindowsFrameInfo::STACK_INFO_FRAME_DATA] + .RetrieveRange(address, &frame_info)) + || (windows_frame_info_[WindowsFrameInfo::STACK_INFO_FPO] + .RetrieveRange(address, &frame_info))) { + result->CopyFrom(*frame_info.get()); + return result.release(); + } + + // Even without a relevant STACK line, many functions contain + // information about how much space their parameters consume on the + // stack. Use RetrieveNearestRange instead of RetrieveRange, so that + // we can use the function to bound the extent of the PUBLIC symbol, + // below. However, this does mean we need to check that ADDRESS + // falls within the retrieved function's range; do the range + // comparison in an overflow-friendly way. + linked_ptr function; + MemAddr function_base, function_size; + if (functions_.RetrieveNearestRange(address, &function, + &function_base, &function_size) && + address >= function_base && address - function_base < function_size) { + result->parameter_size = function->parameter_size; + result->valid |= WindowsFrameInfo::VALID_PARAMETER_SIZE; + return result.release(); + } + + // PUBLIC symbols might have a parameter size. Use the function we + // found above to limit the range the public symbol covers. + linked_ptr public_symbol; + MemAddr public_address; + if (public_symbols_.Retrieve(address, &public_symbol, &public_address) && + (!function.get() || public_address > function_base)) { + result->parameter_size = public_symbol->parameter_size; + } + + return NULL; +} + +CFIFrameInfo *BasicSourceLineResolver::Module::FindCFIFrameInfo( + const StackFrame *frame) const { + MemAddr address = frame->instruction - frame->module->base_address(); + MemAddr initial_base, initial_size; + string initial_rules; + + // Find the initial rule whose range covers this address. That + // provides an initial set of register recovery rules. Then, walk + // forward from the initial rule's starting address to frame's + // instruction address, applying delta rules. + if (!cfi_initial_rules_.RetrieveRange(address, &initial_rules, + &initial_base, &initial_size)) { + return NULL; + } + + // Create a frame info structure, and populate it with the rules from + // the STACK CFI INIT record. + scoped_ptr rules(new CFIFrameInfo()); + if (!ParseCFIRuleSet(initial_rules, rules.get())) + return NULL; + + // Find the first delta rule that falls within the initial rule's range. + map::const_iterator delta = + cfi_delta_rules_.lower_bound(initial_base); + + // Apply delta rules up to and including the frame's address. + while (delta != cfi_delta_rules_.end() && delta->first <= address) { + ParseCFIRuleSet(delta->second, rules.get()); + delta++; + } + + return rules.release(); +} + +bool BasicSourceLineResolver::Module::ParseFile(char *file_line) { + long index; + char *filename; + if (SymbolParseHelper::ParseFile(file_line, &index, &filename)) { + files_.insert(make_pair(index, string(filename))); + return true; + } + return false; +} + +BasicSourceLineResolver::Function* +BasicSourceLineResolver::Module::ParseFunction(char *function_line) { + uint64_t address; + uint64_t size; + long stack_param_size; + char *name; + if (SymbolParseHelper::ParseFunction(function_line, &address, &size, + &stack_param_size, &name)) { + return new Function(name, address, size, stack_param_size); + } + return NULL; +} + +BasicSourceLineResolver::Line* BasicSourceLineResolver::Module::ParseLine( + char *line_line) { + uint64_t address; + uint64_t size; + long line_number; + long source_file; + + if (SymbolParseHelper::ParseLine(line_line, &address, &size, &line_number, + &source_file)) { + return new Line(address, size, source_file, line_number); + } + return NULL; +} + +bool BasicSourceLineResolver::Module::ParsePublicSymbol(char *public_line) { + uint64_t address; + long stack_param_size; + char *name; + + if (SymbolParseHelper::ParsePublicSymbol(public_line, &address, + &stack_param_size, &name)) { + // A few public symbols show up with an address of 0. This has been seen + // in the dumped output of ntdll.pdb for symbols such as _CIlog, _CIpow, + // RtlDescribeChunkLZNT1, and RtlReserveChunkLZNT1. They would conflict + // with one another if they were allowed into the public_symbols_ map, + // but since the address is obviously invalid, gracefully accept them + // as input without putting them into the map. + if (address == 0) { + return true; + } + + linked_ptr symbol(new PublicSymbol(name, address, + stack_param_size)); + return public_symbols_.Store(address, symbol); + } + return false; +} + +bool BasicSourceLineResolver::Module::ParseStackInfo(char *stack_info_line) { + // Skip "STACK " prefix. + stack_info_line += 6; + + // Find the token indicating what sort of stack frame walking + // information this is. + while (*stack_info_line == ' ') + stack_info_line++; + const char *platform = stack_info_line; + while (!strchr(kWhitespace, *stack_info_line)) + stack_info_line++; + *stack_info_line++ = '\0'; + + // MSVC stack frame info. + if (strcmp(platform, "WIN") == 0) { + int type = 0; + uint64_t rva, code_size; + linked_ptr + stack_frame_info(WindowsFrameInfo::ParseFromString(stack_info_line, + type, + rva, + code_size)); + if (stack_frame_info == NULL) + return false; + + // TODO(mmentovai): I wanted to use StoreRange's return value as this + // method's return value, but MSVC infrequently outputs stack info that + // violates the containment rules. This happens with a section of code + // in strncpy_s in test_app.cc (testdata/minidump2). There, problem looks + // like this: + // STACK WIN 4 4242 1a a 0 ... (STACK WIN 4 base size prolog 0 ...) + // STACK WIN 4 4243 2e 9 0 ... + // ContainedRangeMap treats these two blocks as conflicting. In reality, + // when the prolog lengths are taken into account, the actual code of + // these blocks doesn't conflict. However, we can't take the prolog lengths + // into account directly here because we'd wind up with a different set + // of range conflicts when MSVC outputs stack info like this: + // STACK WIN 4 1040 73 33 0 ... + // STACK WIN 4 105a 59 19 0 ... + // because in both of these entries, the beginning of the code after the + // prolog is at 0x1073, and the last byte of contained code is at 0x10b2. + // Perhaps we could get away with storing ranges by rva + prolog_size + // if ContainedRangeMap were modified to allow replacement of + // already-stored values. + + windows_frame_info_[type].StoreRange(rva, code_size, stack_frame_info); + return true; + } else if (strcmp(platform, "CFI") == 0) { + // DWARF CFI stack frame info + return ParseCFIFrameInfo(stack_info_line); + } else { + // Something unrecognized. + return false; + } +} + +bool BasicSourceLineResolver::Module::ParseCFIFrameInfo( + char *stack_info_line) { + char *cursor; + + // Is this an INIT record or a delta record? + char *init_or_address = strtok_r(stack_info_line, " \r\n", &cursor); + if (!init_or_address) + return false; + + if (strcmp(init_or_address, "INIT") == 0) { + // This record has the form "STACK INIT
". + char *address_field = strtok_r(NULL, " \r\n", &cursor); + if (!address_field) return false; + + char *size_field = strtok_r(NULL, " \r\n", &cursor); + if (!size_field) return false; + + char *initial_rules = strtok_r(NULL, "\r\n", &cursor); + if (!initial_rules) return false; + + MemAddr address = strtoul(address_field, NULL, 16); + MemAddr size = strtoul(size_field, NULL, 16); + cfi_initial_rules_.StoreRange(address, size, initial_rules); + return true; + } + + // This record has the form "STACK
". + char *address_field = init_or_address; + char *delta_rules = strtok_r(NULL, "\r\n", &cursor); + if (!delta_rules) return false; + MemAddr address = strtoul(address_field, NULL, 16); + cfi_delta_rules_[address] = delta_rules; + return true; +} + +// static +bool SymbolParseHelper::ParseFile(char *file_line, long *index, + char **filename) { + // FILE + assert(strncmp(file_line, "FILE ", 5) == 0); + file_line += 5; // skip prefix + + vector tokens; + if (!Tokenize(file_line, kWhitespace, 2, &tokens)) { + return false; + } + + char *after_number; + *index = strtol(tokens[0], &after_number, 10); + if (!IsValidAfterNumber(after_number) || *index < 0 || + *index == std::numeric_limits::max()) { + return false; + } + + *filename = tokens[1]; + if (!filename) { + return false; + } + + return true; +} + +// static +bool SymbolParseHelper::ParseFunction(char *function_line, uint64_t *address, + uint64_t *size, long *stack_param_size, + char **name) { + // FUNC
+ assert(strncmp(function_line, "FUNC ", 5) == 0); + function_line += 5; // skip prefix + + vector tokens; + if (!Tokenize(function_line, kWhitespace, 4, &tokens)) { + return false; + } + + char *after_number; + *address = strtoull(tokens[0], &after_number, 16); + if (!IsValidAfterNumber(after_number) || + *address == std::numeric_limits::max()) { + return false; + } + *size = strtoull(tokens[1], &after_number, 16); + if (!IsValidAfterNumber(after_number) || + *size == std::numeric_limits::max()) { + return false; + } + *stack_param_size = strtol(tokens[2], &after_number, 16); + if (!IsValidAfterNumber(after_number) || + *stack_param_size == std::numeric_limits::max() || + *stack_param_size < 0) { + return false; + } + *name = tokens[3]; + + return true; +} + +// static +bool SymbolParseHelper::ParseLine(char *line_line, uint64_t *address, + uint64_t *size, long *line_number, + long *source_file) { + //
+ vector tokens; + if (!Tokenize(line_line, kWhitespace, 4, &tokens)) { + return false; + } + + char *after_number; + *address = strtoull(tokens[0], &after_number, 16); + if (!IsValidAfterNumber(after_number) || + *address == std::numeric_limits::max()) { + return false; + } + *size = strtoull(tokens[1], &after_number, 16); + if (!IsValidAfterNumber(after_number) || + *size == std::numeric_limits::max()) { + return false; + } + *line_number = strtol(tokens[2], &after_number, 10); + if (!IsValidAfterNumber(after_number) || + *line_number == std::numeric_limits::max()) { + return false; + } + *source_file = strtol(tokens[3], &after_number, 10); + if (!IsValidAfterNumber(after_number) || *source_file < 0 || + *source_file == std::numeric_limits::max()) { + return false; + } + + // Valid line numbers normally start from 1, however there are functions that + // are associated with a source file but not associated with any line number + // (block helper function) and for such functions the symbol file contains 0 + // for the line numbers. Hence, 0 should be treated as a valid line number. + // For more information on block helper functions, please, take a look at: + // http://clang.llvm.org/docs/Block-ABI-Apple.html + if (*line_number < 0) { + return false; + } + + return true; +} + +// static +bool SymbolParseHelper::ParsePublicSymbol(char *public_line, + uint64_t *address, + long *stack_param_size, + char **name) { + // PUBLIC
+ assert(strncmp(public_line, "PUBLIC ", 7) == 0); + public_line += 7; // skip prefix + + vector tokens; + if (!Tokenize(public_line, kWhitespace, 3, &tokens)) { + return false; + } + + char *after_number; + *address = strtoull(tokens[0], &after_number, 16); + if (!IsValidAfterNumber(after_number) || + *address == std::numeric_limits::max()) { + return false; + } + *stack_param_size = strtol(tokens[1], &after_number, 16); + if (!IsValidAfterNumber(after_number) || + *stack_param_size == std::numeric_limits::max() || + *stack_param_size < 0) { + return false; + } + *name = tokens[2]; + + return true; +} + +// static +bool SymbolParseHelper::IsValidAfterNumber(char *after_number) { + if (after_number != NULL && strchr(kWhitespace, *after_number) != NULL) { + return true; + } + return false; +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/basic_source_line_resolver_types.h b/TMessagesProj/jni/third_party/breakpad/src/processor/basic_source_line_resolver_types.h new file mode 100644 index 0000000000..a022bc0dbb --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/basic_source_line_resolver_types.h @@ -0,0 +1,177 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// basic_source_line_types.h: definition of nested classes/structs in +// BasicSourceLineResolver. It moves the definitions out of +// basic_source_line_resolver.cc, so that other classes could have access +// to these private nested types without including basic_source_line_resolver.cc +// +// Author: Siyang Xie (lambxsy@google.com) + +#ifndef PROCESSOR_BASIC_SOURCE_LINE_RESOLVER_TYPES_H__ +#define PROCESSOR_BASIC_SOURCE_LINE_RESOLVER_TYPES_H__ + +#include +#include + +#include "common/scoped_ptr.h" +#include "google_breakpad/processor/basic_source_line_resolver.h" +#include "processor/source_line_resolver_base_types.h" + +#include "processor/address_map-inl.h" +#include "processor/range_map-inl.h" +#include "processor/contained_range_map-inl.h" + +#include "processor/linked_ptr.h" +#include "google_breakpad/processor/stack_frame.h" +#include "processor/cfi_frame_info.h" +#include "processor/windows_frame_info.h" + +namespace google_breakpad { + +struct +BasicSourceLineResolver::Function : public SourceLineResolverBase::Function { + Function(const string &function_name, + MemAddr function_address, + MemAddr code_size, + int set_parameter_size) : Base(function_name, + function_address, + code_size, + set_parameter_size), + lines() { } + RangeMap< MemAddr, linked_ptr > lines; + private: + typedef SourceLineResolverBase::Function Base; +}; + + +class BasicSourceLineResolver::Module : public SourceLineResolverBase::Module { + public: + explicit Module(const string &name) : name_(name), is_corrupt_(false) { } + virtual ~Module() { } + + // Loads a map from the given buffer in char* type. + // Does NOT have ownership of memory_buffer. + // The passed in |memory buffer| is of size |memory_buffer_size|. If it is + // not null terminated, LoadMapFromMemory() will null terminate it by + // modifying the passed in buffer. + virtual bool LoadMapFromMemory(char *memory_buffer, + size_t memory_buffer_size); + + // Tells whether the loaded symbol data is corrupt. Return value is + // undefined, if the symbol data hasn't been loaded yet. + virtual bool IsCorrupt() const { return is_corrupt_; } + + // Looks up the given relative address, and fills the StackFrame struct + // with the result. + virtual void LookupAddress(StackFrame *frame) const; + + // If Windows stack walking information is available covering ADDRESS, + // return a WindowsFrameInfo structure describing it. If the information + // is not available, returns NULL. A NULL return value does not indicate + // an error. The caller takes ownership of any returned WindowsFrameInfo + // object. + virtual WindowsFrameInfo *FindWindowsFrameInfo(const StackFrame *frame) const; + + // If CFI stack walking information is available covering ADDRESS, + // return a CFIFrameInfo structure describing it. If the information + // is not available, return NULL. The caller takes ownership of any + // returned CFIFrameInfo object. + virtual CFIFrameInfo *FindCFIFrameInfo(const StackFrame *frame) const; + + private: + // Friend declarations. + friend class BasicSourceLineResolver; + friend class ModuleComparer; + friend class ModuleSerializer; + + typedef std::map FileMap; + + // Logs parse errors. |*num_errors| is increased every time LogParseError is + // called. + static void LogParseError( + const string &message, + int line_number, + int *num_errors); + + // Parses a file declaration + bool ParseFile(char *file_line); + + // Parses a function declaration, returning a new Function object. + Function* ParseFunction(char *function_line); + + // Parses a line declaration, returning a new Line object. + Line* ParseLine(char *line_line); + + // Parses a PUBLIC symbol declaration, storing it in public_symbols_. + // Returns false if an error occurs. + bool ParsePublicSymbol(char *public_line); + + // Parses a STACK WIN or STACK CFI frame info declaration, storing + // it in the appropriate table. + bool ParseStackInfo(char *stack_info_line); + + // Parses a STACK CFI record, storing it in cfi_frame_info_. + bool ParseCFIFrameInfo(char *stack_info_line); + + string name_; + FileMap files_; + RangeMap< MemAddr, linked_ptr > functions_; + AddressMap< MemAddr, linked_ptr > public_symbols_; + bool is_corrupt_; + + // Each element in the array is a ContainedRangeMap for a type + // listed in WindowsFrameInfoTypes. These are split by type because + // there may be overlaps between maps of different types, but some + // information is only available as certain types. + ContainedRangeMap< MemAddr, linked_ptr > + windows_frame_info_[WindowsFrameInfo::STACK_INFO_LAST]; + + // DWARF CFI stack walking data. The Module stores the initial rule sets + // and rule deltas as strings, just as they appear in the symbol file: + // although the file may contain hundreds of thousands of STACK CFI + // records, walking a stack will only ever use a few of them, so it's + // best to delay parsing a record until it's actually needed. + + // STACK CFI INIT records: for each range, an initial set of register + // recovery rules. The RangeMap's itself gives the starting and ending + // addresses. + RangeMap cfi_initial_rules_; + + // STACK CFI records: at a given address, the changes to the register + // recovery rules that take effect at that address. The map key is the + // starting address; the ending address is the key of the next entry in + // this map, or the end of the range as given by the cfi_initial_rules_ + // entry (which FindCFIFrameInfo looks up first). + std::map cfi_delta_rules_; +}; + +} // namespace google_breakpad + +#endif // PROCESSOR_BASIC_SOURCE_LINE_RESOLVER_TYPES_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/basic_source_line_resolver_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/basic_source_line_resolver_unittest.cc new file mode 100644 index 0000000000..7d4cd5c50b --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/basic_source_line_resolver_unittest.cc @@ -0,0 +1,680 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#include +#include + +#include + +#include "breakpad_googletest_includes.h" +#include "common/scoped_ptr.h" +#include "common/using_std_string.h" +#include "google_breakpad/processor/basic_source_line_resolver.h" +#include "google_breakpad/processor/code_module.h" +#include "google_breakpad/processor/stack_frame.h" +#include "google_breakpad/processor/memory_region.h" +#include "processor/linked_ptr.h" +#include "processor/logging.h" +#include "processor/windows_frame_info.h" +#include "processor/cfi_frame_info.h" + +namespace { + +using google_breakpad::BasicSourceLineResolver; +using google_breakpad::CFIFrameInfo; +using google_breakpad::CodeModule; +using google_breakpad::MemoryRegion; +using google_breakpad::StackFrame; +using google_breakpad::WindowsFrameInfo; +using google_breakpad::linked_ptr; +using google_breakpad::scoped_ptr; +using google_breakpad::SymbolParseHelper; + +class TestCodeModule : public CodeModule { + public: + TestCodeModule(string code_file) : code_file_(code_file) {} + virtual ~TestCodeModule() {} + + virtual uint64_t base_address() const { return 0; } + virtual uint64_t size() const { return 0xb000; } + virtual string code_file() const { return code_file_; } + virtual string code_identifier() const { return ""; } + virtual string debug_file() const { return ""; } + virtual string debug_identifier() const { return ""; } + virtual string version() const { return ""; } + virtual const CodeModule* Copy() const { + return new TestCodeModule(code_file_); + } + + private: + string code_file_; +}; + +// A mock memory region object, for use by the STACK CFI tests. +class MockMemoryRegion: public MemoryRegion { + uint64_t GetBase() const { return 0x10000; } + uint32_t GetSize() const { return 0x01000; } + bool GetMemoryAtAddress(uint64_t address, uint8_t *value) const { + *value = address & 0xff; + return true; + } + bool GetMemoryAtAddress(uint64_t address, uint16_t *value) const { + *value = address & 0xffff; + return true; + } + bool GetMemoryAtAddress(uint64_t address, uint32_t *value) const { + switch (address) { + case 0x10008: *value = 0x98ecadc3; break; // saved %ebx + case 0x1000c: *value = 0x878f7524; break; // saved %esi + case 0x10010: *value = 0x6312f9a5; break; // saved %edi + case 0x10014: *value = 0x10038; break; // caller's %ebp + case 0x10018: *value = 0xf6438648; break; // return address + default: *value = 0xdeadbeef; break; // junk + } + return true; + } + bool GetMemoryAtAddress(uint64_t address, uint64_t *value) const { + *value = address; + return true; + } + void Print() const { + assert(false); + } +}; + +// Verify that, for every association in ACTUAL, EXPECTED has the same +// association. (That is, ACTUAL's associations should be a subset of +// EXPECTED's.) Also verify that ACTUAL has associations for ".ra" and +// ".cfa". +static bool VerifyRegisters( + const char *file, int line, + const CFIFrameInfo::RegisterValueMap &expected, + const CFIFrameInfo::RegisterValueMap &actual) { + CFIFrameInfo::RegisterValueMap::const_iterator a; + a = actual.find(".cfa"); + if (a == actual.end()) + return false; + a = actual.find(".ra"); + if (a == actual.end()) + return false; + for (a = actual.begin(); a != actual.end(); a++) { + CFIFrameInfo::RegisterValueMap::const_iterator e = + expected.find(a->first); + if (e == expected.end()) { + fprintf(stderr, "%s:%d: unexpected register '%s' recovered, value 0x%x\n", + file, line, a->first.c_str(), a->second); + return false; + } + if (e->second != a->second) { + fprintf(stderr, + "%s:%d: register '%s' recovered value was 0x%x, expected 0x%x\n", + file, line, a->first.c_str(), a->second, e->second); + return false; + } + // Don't complain if this doesn't recover all registers. Although + // the DWARF spec says that unmentioned registers are undefined, + // GCC uses omission to mean that they are unchanged. + } + return true; +} + + +static bool VerifyEmpty(const StackFrame &frame) { + if (frame.function_name.empty() && + frame.source_file_name.empty() && + frame.source_line == 0) + return true; + return false; +} + +static void ClearSourceLineInfo(StackFrame *frame) { + frame->function_name.clear(); + frame->module = NULL; + frame->source_file_name.clear(); + frame->source_line = 0; +} + +class TestBasicSourceLineResolver : public ::testing::Test { +public: + void SetUp() { + testdata_dir = string(getenv("srcdir") ? getenv("srcdir") : ".") + + "/src/processor/testdata"; + } + + BasicSourceLineResolver resolver; + string testdata_dir; +}; + +TEST_F(TestBasicSourceLineResolver, TestLoadAndResolve) +{ + TestCodeModule module1("module1"); + ASSERT_TRUE(resolver.LoadModule(&module1, testdata_dir + "/module1.out")); + ASSERT_TRUE(resolver.HasModule(&module1)); + TestCodeModule module2("module2"); + ASSERT_TRUE(resolver.LoadModule(&module2, testdata_dir + "/module2.out")); + ASSERT_TRUE(resolver.HasModule(&module2)); + + + StackFrame frame; + scoped_ptr windows_frame_info; + scoped_ptr cfi_frame_info; + frame.instruction = 0x1000; + frame.module = NULL; + resolver.FillSourceLineInfo(&frame); + ASSERT_FALSE(frame.module); + ASSERT_TRUE(frame.function_name.empty()); + ASSERT_EQ(frame.function_base, 0U); + ASSERT_TRUE(frame.source_file_name.empty()); + ASSERT_EQ(frame.source_line, 0); + ASSERT_EQ(frame.source_line_base, 0U); + + frame.module = &module1; + resolver.FillSourceLineInfo(&frame); + ASSERT_EQ(frame.function_name, "Function1_1"); + ASSERT_TRUE(frame.module); + ASSERT_EQ(frame.module->code_file(), "module1"); + ASSERT_EQ(frame.function_base, 0x1000U); + ASSERT_EQ(frame.source_file_name, "file1_1.cc"); + ASSERT_EQ(frame.source_line, 44); + ASSERT_EQ(frame.source_line_base, 0x1000U); + windows_frame_info.reset(resolver.FindWindowsFrameInfo(&frame)); + ASSERT_TRUE(windows_frame_info.get()); + ASSERT_EQ(windows_frame_info->type_, WindowsFrameInfo::STACK_INFO_FRAME_DATA); + ASSERT_FALSE(windows_frame_info->allocates_base_pointer); + ASSERT_EQ(windows_frame_info->program_string, + "$eip 4 + ^ = $esp $ebp 8 + = $ebp $ebp ^ ="); + + ClearSourceLineInfo(&frame); + frame.instruction = 0x800; + frame.module = &module1; + resolver.FillSourceLineInfo(&frame); + ASSERT_TRUE(VerifyEmpty(frame)); + windows_frame_info.reset(resolver.FindWindowsFrameInfo(&frame)); + ASSERT_FALSE(windows_frame_info.get()); + + frame.instruction = 0x1280; + resolver.FillSourceLineInfo(&frame); + ASSERT_EQ(frame.function_name, "Function1_3"); + ASSERT_TRUE(frame.source_file_name.empty()); + ASSERT_EQ(frame.source_line, 0); + windows_frame_info.reset(resolver.FindWindowsFrameInfo(&frame)); + ASSERT_TRUE(windows_frame_info.get()); + ASSERT_EQ(windows_frame_info->type_, WindowsFrameInfo::STACK_INFO_UNKNOWN); + ASSERT_FALSE(windows_frame_info->allocates_base_pointer); + ASSERT_TRUE(windows_frame_info->program_string.empty()); + + frame.instruction = 0x1380; + resolver.FillSourceLineInfo(&frame); + ASSERT_EQ(frame.function_name, "Function1_4"); + ASSERT_TRUE(frame.source_file_name.empty()); + ASSERT_EQ(frame.source_line, 0); + windows_frame_info.reset(resolver.FindWindowsFrameInfo(&frame)); + ASSERT_EQ(windows_frame_info->type_, WindowsFrameInfo::STACK_INFO_FRAME_DATA); + ASSERT_TRUE(windows_frame_info.get()); + ASSERT_FALSE(windows_frame_info->allocates_base_pointer); + ASSERT_FALSE(windows_frame_info->program_string.empty()); + + frame.instruction = 0x2000; + windows_frame_info.reset(resolver.FindWindowsFrameInfo(&frame)); + ASSERT_FALSE(windows_frame_info.get()); + + // module1 has STACK CFI records covering 3d40..3def; + // module2 has STACK CFI records covering 3df0..3e9f; + // check that FindCFIFrameInfo doesn't claim to find any outside those ranges. + frame.instruction = 0x3d3f; + frame.module = &module1; + cfi_frame_info.reset(resolver.FindCFIFrameInfo(&frame)); + ASSERT_FALSE(cfi_frame_info.get()); + + frame.instruction = 0x3e9f; + frame.module = &module1; + cfi_frame_info.reset(resolver.FindCFIFrameInfo(&frame)); + ASSERT_FALSE(cfi_frame_info.get()); + + CFIFrameInfo::RegisterValueMap current_registers; + CFIFrameInfo::RegisterValueMap caller_registers; + CFIFrameInfo::RegisterValueMap expected_caller_registers; + MockMemoryRegion memory; + + // Regardless of which instruction evaluation takes place at, it + // should produce the same values for the caller's registers. + expected_caller_registers[".cfa"] = 0x1001c; + expected_caller_registers[".ra"] = 0xf6438648; + expected_caller_registers["$ebp"] = 0x10038; + expected_caller_registers["$ebx"] = 0x98ecadc3; + expected_caller_registers["$esi"] = 0x878f7524; + expected_caller_registers["$edi"] = 0x6312f9a5; + + frame.instruction = 0x3d40; + frame.module = &module1; + current_registers.clear(); + current_registers["$esp"] = 0x10018; + current_registers["$ebp"] = 0x10038; + current_registers["$ebx"] = 0x98ecadc3; + current_registers["$esi"] = 0x878f7524; + current_registers["$edi"] = 0x6312f9a5; + cfi_frame_info.reset(resolver.FindCFIFrameInfo(&frame)); + ASSERT_TRUE(cfi_frame_info.get()); + ASSERT_TRUE(cfi_frame_info.get() + ->FindCallerRegs(current_registers, memory, + &caller_registers)); + ASSERT_TRUE(VerifyRegisters(__FILE__, __LINE__, + expected_caller_registers, caller_registers)); + + frame.instruction = 0x3d41; + current_registers["$esp"] = 0x10014; + cfi_frame_info.reset(resolver.FindCFIFrameInfo(&frame)); + ASSERT_TRUE(cfi_frame_info.get()); + ASSERT_TRUE(cfi_frame_info.get() + ->FindCallerRegs(current_registers, memory, + &caller_registers)); + ASSERT_TRUE(VerifyRegisters(__FILE__, __LINE__, + expected_caller_registers, caller_registers)); + + frame.instruction = 0x3d43; + current_registers["$ebp"] = 0x10014; + cfi_frame_info.reset(resolver.FindCFIFrameInfo(&frame)); + ASSERT_TRUE(cfi_frame_info.get()); + ASSERT_TRUE(cfi_frame_info.get() + ->FindCallerRegs(current_registers, memory, + &caller_registers)); + VerifyRegisters(__FILE__, __LINE__, + expected_caller_registers, caller_registers); + + frame.instruction = 0x3d54; + current_registers["$ebx"] = 0x6864f054U; + cfi_frame_info.reset(resolver.FindCFIFrameInfo(&frame)); + ASSERT_TRUE(cfi_frame_info.get()); + ASSERT_TRUE(cfi_frame_info.get() + ->FindCallerRegs(current_registers, memory, + &caller_registers)); + VerifyRegisters(__FILE__, __LINE__, + expected_caller_registers, caller_registers); + + frame.instruction = 0x3d5a; + current_registers["$esi"] = 0x6285f79aU; + cfi_frame_info.reset(resolver.FindCFIFrameInfo(&frame)); + ASSERT_TRUE(cfi_frame_info.get()); + ASSERT_TRUE(cfi_frame_info.get() + ->FindCallerRegs(current_registers, memory, + &caller_registers)); + VerifyRegisters(__FILE__, __LINE__, + expected_caller_registers, caller_registers); + + frame.instruction = 0x3d84; + current_registers["$edi"] = 0x64061449U; + cfi_frame_info.reset(resolver.FindCFIFrameInfo(&frame)); + ASSERT_TRUE(cfi_frame_info.get()); + ASSERT_TRUE(cfi_frame_info.get() + ->FindCallerRegs(current_registers, memory, + &caller_registers)); + VerifyRegisters(__FILE__, __LINE__, + expected_caller_registers, caller_registers); + + frame.instruction = 0x2900; + frame.module = &module1; + resolver.FillSourceLineInfo(&frame); + ASSERT_EQ(frame.function_name, string("PublicSymbol")); + + frame.instruction = 0x4000; + frame.module = &module1; + resolver.FillSourceLineInfo(&frame); + ASSERT_EQ(frame.function_name, string("LargeFunction")); + + frame.instruction = 0x2181; + frame.module = &module2; + resolver.FillSourceLineInfo(&frame); + ASSERT_EQ(frame.function_name, "Function2_2"); + ASSERT_EQ(frame.function_base, 0x2170U); + ASSERT_TRUE(frame.module); + ASSERT_EQ(frame.module->code_file(), "module2"); + ASSERT_EQ(frame.source_file_name, "file2_2.cc"); + ASSERT_EQ(frame.source_line, 21); + ASSERT_EQ(frame.source_line_base, 0x2180U); + windows_frame_info.reset(resolver.FindWindowsFrameInfo(&frame)); + ASSERT_TRUE(windows_frame_info.get()); + ASSERT_EQ(windows_frame_info->type_, WindowsFrameInfo::STACK_INFO_FRAME_DATA); + ASSERT_EQ(windows_frame_info->prolog_size, 1U); + + frame.instruction = 0x216f; + resolver.FillSourceLineInfo(&frame); + ASSERT_EQ(frame.function_name, "Public2_1"); + + ClearSourceLineInfo(&frame); + frame.instruction = 0x219f; + frame.module = &module2; + resolver.FillSourceLineInfo(&frame); + ASSERT_TRUE(frame.function_name.empty()); + + frame.instruction = 0x21a0; + frame.module = &module2; + resolver.FillSourceLineInfo(&frame); + ASSERT_EQ(frame.function_name, "Public2_2"); +} + +TEST_F(TestBasicSourceLineResolver, TestInvalidLoads) +{ + TestCodeModule module3("module3"); + ASSERT_TRUE(resolver.LoadModule(&module3, + testdata_dir + "/module3_bad.out")); + ASSERT_TRUE(resolver.HasModule(&module3)); + ASSERT_TRUE(resolver.IsModuleCorrupt(&module3)); + TestCodeModule module4("module4"); + ASSERT_TRUE(resolver.LoadModule(&module4, + testdata_dir + "/module4_bad.out")); + ASSERT_TRUE(resolver.HasModule(&module4)); + ASSERT_TRUE(resolver.IsModuleCorrupt(&module4)); + TestCodeModule module5("module5"); + ASSERT_FALSE(resolver.LoadModule(&module5, + testdata_dir + "/invalid-filename")); + ASSERT_FALSE(resolver.HasModule(&module5)); + TestCodeModule invalidmodule("invalid-module"); + ASSERT_FALSE(resolver.HasModule(&invalidmodule)); +} + +TEST_F(TestBasicSourceLineResolver, TestUnload) +{ + TestCodeModule module1("module1"); + ASSERT_FALSE(resolver.HasModule(&module1)); + ASSERT_TRUE(resolver.LoadModule(&module1, testdata_dir + "/module1.out")); + ASSERT_TRUE(resolver.HasModule(&module1)); + resolver.UnloadModule(&module1); + ASSERT_FALSE(resolver.HasModule(&module1)); + ASSERT_TRUE(resolver.LoadModule(&module1, testdata_dir + "/module1.out")); + ASSERT_TRUE(resolver.HasModule(&module1)); +} + +// Test parsing of valid FILE lines. The format is: +// FILE +TEST(SymbolParseHelper, ParseFileValid) { + long index; + char *filename; + + char kTestLine[] = "FILE 1 file name"; + ASSERT_TRUE(SymbolParseHelper::ParseFile(kTestLine, &index, &filename)); + EXPECT_EQ(1, index); + EXPECT_EQ("file name", string(filename)); + + // 0 is a valid index. + char kTestLine1[] = "FILE 0 file name"; + ASSERT_TRUE(SymbolParseHelper::ParseFile(kTestLine1, &index, &filename)); + EXPECT_EQ(0, index); + EXPECT_EQ("file name", string(filename)); +} + +// Test parsing of invalid FILE lines. The format is: +// FILE +TEST(SymbolParseHelper, ParseFileInvalid) { + long index; + char *filename; + + // Test missing file name. + char kTestLine[] = "FILE 1 "; + ASSERT_FALSE(SymbolParseHelper::ParseFile(kTestLine, &index, &filename)); + + // Test bad index. + char kTestLine1[] = "FILE x1 file name"; + ASSERT_FALSE(SymbolParseHelper::ParseFile(kTestLine1, &index, &filename)); + + // Test large index. + char kTestLine2[] = "FILE 123123123123123123123123 file name"; + ASSERT_FALSE(SymbolParseHelper::ParseFile(kTestLine2, &index, &filename)); + + // Test negative index. + char kTestLine3[] = "FILE -2 file name"; + ASSERT_FALSE(SymbolParseHelper::ParseFile(kTestLine3, &index, &filename)); +} + +// Test parsing of valid FUNC lines. The format is: +// FUNC
+TEST(SymbolParseHelper, ParseFunctionValid) { + uint64_t address; + uint64_t size; + long stack_param_size; + char *name; + + char kTestLine[] = "FUNC 1 2 3 function name"; + ASSERT_TRUE(SymbolParseHelper::ParseFunction(kTestLine, &address, &size, + &stack_param_size, &name)); + EXPECT_EQ(1ULL, address); + EXPECT_EQ(2ULL, size); + EXPECT_EQ(3, stack_param_size); + EXPECT_EQ("function name", string(name)); + + // Test hex address, size, and param size. + char kTestLine1[] = "FUNC a1 a2 a3 function name"; + ASSERT_TRUE(SymbolParseHelper::ParseFunction(kTestLine1, &address, &size, + &stack_param_size, &name)); + EXPECT_EQ(0xa1ULL, address); + EXPECT_EQ(0xa2ULL, size); + EXPECT_EQ(0xa3, stack_param_size); + EXPECT_EQ("function name", string(name)); + + char kTestLine2[] = "FUNC 0 0 0 function name"; + ASSERT_TRUE(SymbolParseHelper::ParseFunction(kTestLine2, &address, &size, + &stack_param_size, &name)); + EXPECT_EQ(0ULL, address); + EXPECT_EQ(0ULL, size); + EXPECT_EQ(0, stack_param_size); + EXPECT_EQ("function name", string(name)); +} + +// Test parsing of invalid FUNC lines. The format is: +// FUNC
+TEST(SymbolParseHelper, ParseFunctionInvalid) { + uint64_t address; + uint64_t size; + long stack_param_size; + char *name; + + // Test missing function name. + char kTestLine[] = "FUNC 1 2 3 "; + ASSERT_FALSE(SymbolParseHelper::ParseFunction(kTestLine, &address, &size, + &stack_param_size, &name)); + // Test bad address. + char kTestLine1[] = "FUNC 1z 2 3 function name"; + ASSERT_FALSE(SymbolParseHelper::ParseFunction(kTestLine1, &address, &size, + &stack_param_size, &name)); + // Test large address. + char kTestLine2[] = "FUNC 123123123123123123123123123 2 3 function name"; + ASSERT_FALSE(SymbolParseHelper::ParseFunction(kTestLine2, &address, &size, + &stack_param_size, &name)); + // Test bad size. + char kTestLine3[] = "FUNC 1 z2 3 function name"; + ASSERT_FALSE(SymbolParseHelper::ParseFunction(kTestLine3, &address, &size, + &stack_param_size, &name)); + // Test large size. + char kTestLine4[] = "FUNC 1 231231231231231231231231232 3 function name"; + ASSERT_FALSE(SymbolParseHelper::ParseFunction(kTestLine4, &address, &size, + &stack_param_size, &name)); + // Test bad param size. + char kTestLine5[] = "FUNC 1 2 3z function name"; + ASSERT_FALSE(SymbolParseHelper::ParseFunction(kTestLine5, &address, &size, + &stack_param_size, &name)); + // Test large param size. + char kTestLine6[] = "FUNC 1 2 312312312312312312312312323 function name"; + ASSERT_FALSE(SymbolParseHelper::ParseFunction(kTestLine6, &address, &size, + &stack_param_size, &name)); + // Negative param size. + char kTestLine7[] = "FUNC 1 2 -5 function name"; + ASSERT_FALSE(SymbolParseHelper::ParseFunction(kTestLine7, &address, &size, + &stack_param_size, &name)); +} + +// Test parsing of valid lines. The format is: +//
+TEST(SymbolParseHelper, ParseLineValid) { + uint64_t address; + uint64_t size; + long line_number; + long source_file; + + char kTestLine[] = "1 2 3 4"; + ASSERT_TRUE(SymbolParseHelper::ParseLine(kTestLine, &address, &size, + &line_number, &source_file)); + EXPECT_EQ(1ULL, address); + EXPECT_EQ(2ULL, size); + EXPECT_EQ(3, line_number); + EXPECT_EQ(4, source_file); + + // Test hex size and address. + char kTestLine1[] = "a1 a2 3 4 // some comment"; + ASSERT_TRUE(SymbolParseHelper::ParseLine(kTestLine1, &address, &size, + &line_number, &source_file)); + EXPECT_EQ(0xa1ULL, address); + EXPECT_EQ(0xa2ULL, size); + EXPECT_EQ(3, line_number); + EXPECT_EQ(4, source_file); + + // 0 is a valid line number. + char kTestLine2[] = "a1 a2 0 4 // some comment"; + ASSERT_TRUE(SymbolParseHelper::ParseLine(kTestLine2, &address, &size, + &line_number, &source_file)); + EXPECT_EQ(0xa1ULL, address); + EXPECT_EQ(0xa2ULL, size); + EXPECT_EQ(0, line_number); + EXPECT_EQ(4, source_file); +} + +// Test parsing of invalid lines. The format is: +//
+TEST(SymbolParseHelper, ParseLineInvalid) { + uint64_t address; + uint64_t size; + long line_number; + long source_file; + + // Test missing source file id. + char kTestLine[] = "1 2 3"; + ASSERT_FALSE(SymbolParseHelper::ParseLine(kTestLine, &address, &size, + &line_number, &source_file)); + // Test bad address. + char kTestLine1[] = "1z 2 3 4"; + ASSERT_FALSE(SymbolParseHelper::ParseLine(kTestLine1, &address, &size, + &line_number, &source_file)); + // Test large address. + char kTestLine2[] = "123123123123123123123123 2 3 4"; + ASSERT_FALSE(SymbolParseHelper::ParseLine(kTestLine2, &address, &size, + &line_number, &source_file)); + // Test bad size. + char kTestLine3[] = "1 z2 3 4"; + ASSERT_FALSE(SymbolParseHelper::ParseLine(kTestLine3, &address, &size, + &line_number, &source_file)); + // Test large size. + char kTestLine4[] = "1 123123123123123123123123 3 4"; + ASSERT_FALSE(SymbolParseHelper::ParseLine(kTestLine4, &address, &size, + &line_number, &source_file)); + // Test bad line number. + char kTestLine5[] = "1 2 z3 4"; + ASSERT_FALSE(SymbolParseHelper::ParseLine(kTestLine5, &address, &size, + &line_number, &source_file)); + // Test negative line number. + char kTestLine6[] = "1 2 -1 4"; + ASSERT_FALSE(SymbolParseHelper::ParseLine(kTestLine6, &address, &size, + &line_number, &source_file)); + // Test large line number. + char kTestLine7[] = "1 2 123123123123123123123 4"; + ASSERT_FALSE(SymbolParseHelper::ParseLine(kTestLine7, &address, &size, + &line_number, &source_file)); + // Test bad source file id. + char kTestLine8[] = "1 2 3 f"; + ASSERT_FALSE(SymbolParseHelper::ParseLine(kTestLine8, &address, &size, + &line_number, &source_file)); +} + +// Test parsing of valid PUBLIC lines. The format is: +// PUBLIC
+TEST(SymbolParseHelper, ParsePublicSymbolValid) { + uint64_t address; + long stack_param_size; + char *name; + + char kTestLine[] = "PUBLIC 1 2 3"; + ASSERT_TRUE(SymbolParseHelper::ParsePublicSymbol(kTestLine, &address, + &stack_param_size, &name)); + EXPECT_EQ(1ULL, address); + EXPECT_EQ(2, stack_param_size); + EXPECT_EQ("3", string(name)); + + // Test hex size and address. + char kTestLine1[] = "PUBLIC a1 a2 function name"; + ASSERT_TRUE(SymbolParseHelper::ParsePublicSymbol(kTestLine1, &address, + &stack_param_size, &name)); + EXPECT_EQ(0xa1ULL, address); + EXPECT_EQ(0xa2, stack_param_size); + EXPECT_EQ("function name", string(name)); + + // Test 0 is a valid address. + char kTestLine2[] = "PUBLIC 0 a2 function name"; + ASSERT_TRUE(SymbolParseHelper::ParsePublicSymbol(kTestLine2, &address, + &stack_param_size, &name)); + EXPECT_EQ(0ULL, address); + EXPECT_EQ(0xa2, stack_param_size); + EXPECT_EQ("function name", string(name)); +} + +// Test parsing of invalid PUBLIC lines. The format is: +// PUBLIC
+TEST(SymbolParseHelper, ParsePublicSymbolInvalid) { + uint64_t address; + long stack_param_size; + char *name; + + // Test missing source function name. + char kTestLine[] = "PUBLIC 1 2 "; + ASSERT_FALSE(SymbolParseHelper::ParsePublicSymbol(kTestLine, &address, + &stack_param_size, &name)); + // Test bad address. + char kTestLine1[] = "PUBLIC 1z 2 3"; + ASSERT_FALSE(SymbolParseHelper::ParsePublicSymbol(kTestLine1, &address, + &stack_param_size, &name)); + // Test large address. + char kTestLine2[] = "PUBLIC 123123123123123123123123 2 3"; + ASSERT_FALSE(SymbolParseHelper::ParsePublicSymbol(kTestLine2, &address, + &stack_param_size, &name)); + // Test bad param stack size. + char kTestLine3[] = "PUBLIC 1 z2 3"; + ASSERT_FALSE(SymbolParseHelper::ParsePublicSymbol(kTestLine3, &address, + &stack_param_size, &name)); + // Test large param stack size. + char kTestLine4[] = "PUBLIC 1 123123123123123123123123123 3"; + ASSERT_FALSE(SymbolParseHelper::ParsePublicSymbol(kTestLine4, &address, + &stack_param_size, &name)); + // Test negative param stack size. + char kTestLine5[] = "PUBLIC 1 -5 3"; + ASSERT_FALSE(SymbolParseHelper::ParsePublicSymbol(kTestLine5, &address, + &stack_param_size, &name)); +} + +} // namespace + +int main(int argc, char *argv[]) { + ::testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/binarystream.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/binarystream.cc new file mode 100644 index 0000000000..bf92225b54 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/binarystream.cc @@ -0,0 +1,124 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#include +#include + +#include +#include + +#include "common/using_std_string.h" +#include "processor/binarystream.h" + +namespace google_breakpad { +using std::vector; + +binarystream &binarystream::operator>>(string &str) { + uint16_t length; + *this >> length; + if (eof()) + return *this; + if (length == 0) { + str.clear(); + return *this; + } + vector buffer(length); + stream_.read(&buffer[0], length); + if (!eof()) + str.assign(&buffer[0], length); + return *this; +} + +binarystream &binarystream::operator>>(uint8_t &u8) { + stream_.read((char *)&u8, 1); + return *this; +} + +binarystream &binarystream::operator>>(uint16_t &u16) { + uint16_t temp; + stream_.read((char *)&temp, 2); + if (!eof()) + u16 = ntohs(temp); + return *this; +} + +binarystream &binarystream::operator>>(uint32_t &u32) { + uint32_t temp; + stream_.read((char *)&temp, 4); + if (!eof()) + u32 = ntohl(temp); + return *this; +} + +binarystream &binarystream::operator>>(uint64_t &u64) { + uint32_t lower, upper; + *this >> lower >> upper; + if (!eof()) + u64 = static_cast(lower) | (static_cast(upper) << 32); + return *this; +} + +binarystream &binarystream::operator<<(const string &str) { + if (str.length() > USHRT_MAX) { + // truncate to 16-bit length + *this << static_cast(USHRT_MAX); + stream_.write(str.c_str(), USHRT_MAX); + } else { + *this << (uint16_t)(str.length() & 0xFFFF); + stream_.write(str.c_str(), str.length()); + } + return *this; +} + +binarystream &binarystream::operator<<(uint8_t u8) { + stream_.write((const char*)&u8, 1); + return *this; +} + +binarystream &binarystream::operator<<(uint16_t u16) { + u16 = htons(u16); + stream_.write((const char*)&u16, 2); + return *this; +} + +binarystream &binarystream::operator<<(uint32_t u32) { + u32 = htonl(u32); + stream_.write((const char*)&u32, 4); + return *this; +} + +binarystream &binarystream::operator<<(uint64_t u64) { + // write 64-bit ints as two 32-bit ints, so we can byte-swap them easily + uint32_t lower = static_cast(u64 & 0xFFFFFFFF); + uint32_t upper = static_cast(u64 >> 32); + *this << lower << upper; + return *this; +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/binarystream.h b/TMessagesProj/jni/third_party/breakpad/src/processor/binarystream.h new file mode 100644 index 0000000000..172e09b92f --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/binarystream.h @@ -0,0 +1,92 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// binarystream implements part of the std::iostream interface as a +// wrapper around std::stringstream to allow reading and writing strings +// and integers of known size. + +#ifndef GOOGLE_BREAKPAD_PROCESSOR_BINARYSTREAM_H_ +#define GOOGLE_BREAKPAD_PROCESSOR_BINARYSTREAM_H_ + +#include +#include + +#include "common/using_std_string.h" +#include "google_breakpad/common/breakpad_types.h" + +namespace google_breakpad { +using std::ios_base; +using std::ios; + +class binarystream { + public: + explicit binarystream(ios_base::openmode which = ios_base::out|ios_base::in) + : stream_(which) {} + explicit binarystream(const string &str, + ios_base::openmode which = ios_base::out|ios_base::in) + : stream_(str, which) {} + explicit binarystream(const char *str, size_t size, + ios_base::openmode which = ios_base::out|ios_base::in) + : stream_(string(str, size), which) {} + + binarystream &operator>>(string &str); + binarystream &operator>>(uint8_t &u8); + binarystream &operator>>(uint16_t &u16); + binarystream &operator>>(uint32_t &u32); + binarystream &operator>>(uint64_t &u64); + + // Note: strings are truncated at 65535 characters + binarystream &operator<<(const string &str); + binarystream &operator<<(uint8_t u8); + binarystream &operator<<(uint16_t u16); + binarystream &operator<<(uint32_t u32); + binarystream &operator<<(uint64_t u64); + + // Forward a few methods directly from the stream object + bool eof() const { return stream_.eof(); } + void clear() { stream_.clear(); } + string str() const { return stream_.str(); } + void str(const string &s) { stream_.str(s); } + + // Seek both read and write pointers to the beginning of the stream. + void rewind() { + stream_.seekg (0, ios::beg); + stream_.seekp (0, ios::beg); + // This is to clear all the error flags, since only the EOF flag is cleared + // with seekg(). + stream_.clear(); + } + + private: + std::stringstream stream_; +}; + +} // namespace google_breakpad + +#endif // GOOGLE_BREAKPAD_PROCESSOR_BINARYSTREAM_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/binarystream_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/binarystream_unittest.cc new file mode 100644 index 0000000000..bf020edb91 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/binarystream_unittest.cc @@ -0,0 +1,432 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#include +#include +#include + +#include "breakpad_googletest_includes.h" +#include "common/using_std_string.h" +#include "processor/binarystream.h" + +namespace { +using std::ios_base; +using std::vector; +using google_breakpad::binarystream; + + +class BinaryStreamBasicTest : public ::testing::Test { +protected: + binarystream stream; +}; + +TEST_F(BinaryStreamBasicTest, ReadU8) { + uint8_t u8 = 0; + ASSERT_FALSE(stream.eof()); + stream >> u8; + ASSERT_TRUE(stream.eof()); + EXPECT_EQ(0U, u8); + stream.rewind(); + stream.clear(); + stream << (uint8_t)1; + ASSERT_FALSE(stream.eof()); + stream >> u8; + EXPECT_EQ(1, u8); + EXPECT_FALSE(stream.eof()); +} + +TEST_F(BinaryStreamBasicTest, ReadU16) { + uint16_t u16 = 0; + ASSERT_FALSE(stream.eof()); + stream >> u16; + ASSERT_TRUE(stream.eof()); + EXPECT_EQ(0U, u16); + stream.rewind(); + stream.clear(); + stream << (uint16_t)1; + ASSERT_FALSE(stream.eof()); + stream >> u16; + EXPECT_EQ(1, u16); + EXPECT_FALSE(stream.eof()); +} + +TEST_F(BinaryStreamBasicTest, ReadU32) { + uint32_t u32 = 0; + ASSERT_FALSE(stream.eof()); + stream >> u32; + ASSERT_TRUE(stream.eof()); + EXPECT_EQ(0U, u32); + stream.rewind(); + stream.clear(); + stream << (uint32_t)1; + ASSERT_FALSE(stream.eof()); + stream >> u32; + EXPECT_EQ(1U, u32); + EXPECT_FALSE(stream.eof()); +} + +TEST_F(BinaryStreamBasicTest, ReadU64) { + uint64_t u64 = 0; + ASSERT_FALSE(stream.eof()); + stream >> u64; + ASSERT_TRUE(stream.eof()); + EXPECT_EQ(0U, u64); + stream.rewind(); + stream.clear(); + stream << (uint64_t)1; + ASSERT_FALSE(stream.eof()); + stream >> u64; + EXPECT_EQ(1U, u64); + EXPECT_FALSE(stream.eof()); +} + +TEST_F(BinaryStreamBasicTest, ReadString) { + string s(""); + ASSERT_FALSE(stream.eof()); + stream >> s; + ASSERT_TRUE(stream.eof()); + EXPECT_EQ("", s); + // write an empty string to the stream, read it back + s = "abcd"; + stream.rewind(); + stream.clear(); + stream << string(""); + stream >> s; + EXPECT_EQ("", s); + EXPECT_FALSE(stream.eof()); + stream.rewind(); + stream.clear(); + stream << string("test"); + ASSERT_FALSE(stream.eof()); + stream >> s; + EXPECT_EQ("test", s); + EXPECT_FALSE(stream.eof()); +} + +TEST_F(BinaryStreamBasicTest, ReadEmptyString) { + string s("abc"); + stream << string(""); + stream >> s; + EXPECT_EQ("", s); +} + +TEST_F(BinaryStreamBasicTest, ReadMultiU8) { + const uint8_t ea = 0, eb = 100, ec = 200, ed = 0xFF; + uint8_t a, b, c, d, e; + stream << ea << eb << ec << ed; + stream >> a >> b >> c >> d; + ASSERT_FALSE(stream.eof()); + EXPECT_EQ(ea, a); + EXPECT_EQ(eb, b); + EXPECT_EQ(ec, c); + EXPECT_EQ(ed, d); + ASSERT_FALSE(stream.eof()); + e = 0; + stream >> e; + EXPECT_EQ(0U, e); + ASSERT_TRUE(stream.eof()); + // try reading all at once, including one past eof + stream.rewind(); + stream.clear(); + ASSERT_FALSE(stream.eof()); + a = b = c = d = e = 0; + stream << ea << eb << ec << ed; + stream >> a >> b >> c >> d >> e; + EXPECT_EQ(ea, a); + EXPECT_EQ(eb, b); + EXPECT_EQ(ec, c); + EXPECT_EQ(ed, d); + EXPECT_EQ(0U, e); + EXPECT_TRUE(stream.eof()); +} + +TEST_F(BinaryStreamBasicTest, ReadMultiU16) { + const uint16_t ea = 0, eb = 0x100, ec = 0x8000, ed = 0xFFFF; + uint16_t a, b, c, d, e; + stream << ea << eb << ec << ed; + stream >> a >> b >> c >> d; + ASSERT_FALSE(stream.eof()); + EXPECT_EQ(ea, a); + EXPECT_EQ(eb, b); + EXPECT_EQ(ec, c); + EXPECT_EQ(ed, d); + ASSERT_FALSE(stream.eof()); + e = 0; + stream >> e; + EXPECT_EQ(0U, e); + EXPECT_TRUE(stream.eof()); + // try reading all at once, including one past eof + stream.rewind(); + stream.clear(); + ASSERT_FALSE(stream.eof()); + a = b = c = d = e = 0; + stream << ea << eb << ec << ed; + stream >> a >> b >> c >> d >> e; + EXPECT_EQ(ea, a); + EXPECT_EQ(eb, b); + EXPECT_EQ(ec, c); + EXPECT_EQ(ed, d); + EXPECT_EQ(0U, e); + EXPECT_TRUE(stream.eof()); +} + +TEST_F(BinaryStreamBasicTest, ReadMultiU32) { + const uint32_t ea = 0, eb = 0x10000, ec = 0x8000000, ed = 0xFFFFFFFF; + uint32_t a, b, c, d, e; + stream << ea << eb << ec << ed; + stream >> a >> b >> c >> d; + ASSERT_FALSE(stream.eof()); + EXPECT_EQ(ea, a); + EXPECT_EQ(eb, b); + EXPECT_EQ(ec, c); + EXPECT_EQ(ed, d); + ASSERT_FALSE(stream.eof()); + e = 0; + stream >> e; + EXPECT_EQ(0U, e); + EXPECT_TRUE(stream.eof()); + // try reading all at once, including one past eof + stream.rewind(); + stream.clear(); + ASSERT_FALSE(stream.eof()); + a = b = c = d = e = 0; + stream << ea << eb << ec << ed; + stream >> a >> b >> c >> d >> e; + EXPECT_EQ(ea, a); + EXPECT_EQ(eb, b); + EXPECT_EQ(ec, c); + EXPECT_EQ(ed, d); + EXPECT_EQ(0U, e); + EXPECT_TRUE(stream.eof()); +} + +TEST_F(BinaryStreamBasicTest, ReadMultiU64) { + const uint64_t ea = 0, eb = 0x10000, ec = 0x100000000ULL, + ed = 0xFFFFFFFFFFFFFFFFULL; + uint64_t a, b, c, d, e; + stream << ea << eb << ec << ed; + stream >> a >> b >> c >> d; + ASSERT_FALSE(stream.eof()); + EXPECT_EQ(ea, a); + EXPECT_EQ(eb, b); + EXPECT_EQ(ec, c); + EXPECT_EQ(ed, d); + ASSERT_FALSE(stream.eof()); + e = 0; + stream >> e; + EXPECT_EQ(0U, e); + EXPECT_TRUE(stream.eof()); + // try reading all at once, including one past eof + stream.rewind(); + stream.clear(); + ASSERT_FALSE(stream.eof()); + a = b = c = d = e = 0; + stream << ea << eb << ec << ed; + stream >> a >> b >> c >> d >> e; + EXPECT_EQ(ea, a); + EXPECT_EQ(eb, b); + EXPECT_EQ(ec, c); + EXPECT_EQ(ed, d); + EXPECT_EQ(0U, e); + EXPECT_TRUE(stream.eof()); +} + +TEST_F(BinaryStreamBasicTest, ReadMixed) { + const uint8_t e8 = 0x10; + const uint16_t e16 = 0x2020; + const uint32_t e32 = 0x30303030; + const uint64_t e64 = 0x4040404040404040ULL; + const string es = "test"; + uint8_t u8 = 0; + uint16_t u16 = 0; + uint32_t u32 = 0; + uint64_t u64 = 0; + string s("test"); + stream << e8 << e16 << e32 << e64 << es; + stream >> u8 >> u16 >> u32 >> u64 >> s; + EXPECT_FALSE(stream.eof()); + EXPECT_EQ(e8, u8); + EXPECT_EQ(e16, u16); + EXPECT_EQ(e32, u32); + EXPECT_EQ(e64, u64); + EXPECT_EQ(es, s); +} + +TEST_F(BinaryStreamBasicTest, ReadStringMissing) { + // ensure that reading a string where only the length is present fails + uint16_t u16 = 8; + stream << u16; + stream.rewind(); + string s(""); + stream >> s; + EXPECT_EQ("", s); + EXPECT_TRUE(stream.eof()); +} + +TEST_F(BinaryStreamBasicTest, ReadStringTruncated) { + // ensure that reading a string where not all the data is present fails + uint16_t u16 = 8; + stream << u16; + stream << (uint8_t)'t' << (uint8_t)'e' << (uint8_t)'s' << (uint8_t)'t'; + stream.rewind(); + string s(""); + stream >> s; + EXPECT_EQ("", s); + EXPECT_TRUE(stream.eof()); +} + +TEST_F(BinaryStreamBasicTest, StreamByteLength) { + // Test that the stream buffer contains the right amount of data + stream << (uint8_t)0 << (uint16_t)1 << (uint32_t)2 << (uint64_t)3 + << string("test"); + string s = stream.str(); + EXPECT_EQ(21U, s.length()); +} + +TEST_F(BinaryStreamBasicTest, AppendStreamResultsByteLength) { + // Test that appending the str() results from two streams + // gives the right byte length + binarystream stream2; + stream << (uint8_t)0 << (uint16_t)1; + stream2 << (uint32_t)0 << (uint64_t)2 + << string("test"); + string s = stream.str(); + string s2 = stream2.str(); + s.append(s2); + EXPECT_EQ(21U, s.length()); +} + +TEST_F(BinaryStreamBasicTest, StreamSetStr) { + const string es("test"); + stream << es; + binarystream stream2; + stream2.str(stream.str()); + string s; + stream2 >> s; + EXPECT_FALSE(stream2.eof()); + EXPECT_EQ("test", s); + s = ""; + stream2.str(stream.str()); + stream2.rewind(); + stream2 >> s; + EXPECT_FALSE(stream2.eof()); + EXPECT_EQ("test", s); +} + +class BinaryStreamU8Test : public ::testing::Test { +protected: + binarystream stream; + + void SetUp() { + stream << (uint8_t)1; + } +}; + +TEST_F(BinaryStreamU8Test, ReadU16) { + uint16_t u16 = 0; + ASSERT_FALSE(stream.eof()); + stream >> u16; + ASSERT_TRUE(stream.eof()); + EXPECT_EQ(0U, u16); +} + +TEST_F(BinaryStreamU8Test, ReadU32) { + uint32_t u32 = 0; + ASSERT_FALSE(stream.eof()); + stream >> u32; + ASSERT_TRUE(stream.eof()); + EXPECT_EQ(0U, u32); +} + +TEST_F(BinaryStreamU8Test, ReadU64) { + uint64_t u64 = 0; + ASSERT_FALSE(stream.eof()); + stream >> u64; + ASSERT_TRUE(stream.eof()); + EXPECT_EQ(0U, u64); +} + +TEST_F(BinaryStreamU8Test, ReadString) { + string s(""); + ASSERT_FALSE(stream.eof()); + stream >> s; + ASSERT_TRUE(stream.eof()); + EXPECT_EQ("", s); +} + + +TEST(BinaryStreamTest, InitWithData) { + const char *data = "abcd"; + binarystream stream(data); + uint8_t a, b, c, d; + stream >> a >> b >> c >> d; + ASSERT_FALSE(stream.eof()); + EXPECT_EQ('a', a); + EXPECT_EQ('b', b); + EXPECT_EQ('c', c); + EXPECT_EQ('d', d); +} + +TEST(BinaryStreamTest, InitWithDataLeadingNull) { + const char *data = "\0abcd"; + binarystream stream(data, 5); + uint8_t z, a, b, c, d; + stream >> z >> a >> b >> c >> d; + ASSERT_FALSE(stream.eof()); + EXPECT_EQ(0U, z); + EXPECT_EQ('a', a); + EXPECT_EQ('b', b); + EXPECT_EQ('c', c); + EXPECT_EQ('d', d); +} + +TEST(BinaryStreamTest, InitWithDataVector) { + vector data; + data.push_back('a'); + data.push_back('b'); + data.push_back('c'); + data.push_back('d'); + data.push_back('e'); + data.resize(4); + binarystream stream(&data[0], data.size()); + uint8_t a, b, c, d; + stream >> a >> b >> c >> d; + ASSERT_FALSE(stream.eof()); + EXPECT_EQ('a', a); + EXPECT_EQ('b', b); + EXPECT_EQ('c', c); + EXPECT_EQ('d', d); +} + +} // namespace + +int main(int argc, char *argv[]) { + ::testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/call_stack.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/call_stack.cc new file mode 100644 index 0000000000..e3276716c8 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/call_stack.cc @@ -0,0 +1,53 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// call_stack.cc: A call stack comprised of stack frames. +// +// See call_stack.h for documentation. +// +// Author: Mark Mentovai + +#include "google_breakpad/processor/call_stack.h" +#include "google_breakpad/processor/stack_frame.h" + +namespace google_breakpad { + +CallStack::~CallStack() { + Clear(); +} + +void CallStack::Clear() { + for (vector::const_iterator iterator = frames_.begin(); + iterator != frames_.end(); + ++iterator) { + delete *iterator; + } +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/cfi_frame_info-inl.h b/TMessagesProj/jni/third_party/breakpad/src/processor/cfi_frame_info-inl.h new file mode 100644 index 0000000000..7e7af0af93 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/cfi_frame_info-inl.h @@ -0,0 +1,119 @@ +// -*- mode: C++ -*- + +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// cfi_frame_info-inl.h: Definitions for cfi_frame_info.h inlined functions. + +#ifndef PROCESSOR_CFI_FRAME_INFO_INL_H_ +#define PROCESSOR_CFI_FRAME_INFO_INL_H_ + +#include + +namespace google_breakpad { + +template +bool SimpleCFIWalker::FindCallerRegisters( + const MemoryRegion &memory, + const CFIFrameInfo &cfi_frame_info, + const RawContextType &callee_context, + int callee_validity, + RawContextType *caller_context, + int *caller_validity) const { + typedef CFIFrameInfo::RegisterValueMap ValueMap; + ValueMap callee_registers; + ValueMap caller_registers; + // Just for brevity. + typename ValueMap::const_iterator caller_none = caller_registers.end(); + + // Populate callee_registers with register values from callee_context. + for (size_t i = 0; i < map_size_; i++) { + const RegisterSet &r = register_map_[i]; + if (callee_validity & r.validity_flag) + callee_registers[r.name] = callee_context.*r.context_member; + } + + // Apply the rules, and see what register values they yield. + if (!cfi_frame_info.FindCallerRegs(callee_registers, memory, + &caller_registers)) + return false; + + // Populate *caller_context with the values the rules placed in + // caller_registers. + memset(caller_context, 0xda, sizeof(*caller_context)); + *caller_validity = 0; + for (size_t i = 0; i < map_size_; i++) { + const RegisterSet &r = register_map_[i]; + typename ValueMap::const_iterator caller_entry; + + // Did the rules provide a value for this register by its name? + caller_entry = caller_registers.find(r.name); + if (caller_entry != caller_none) { + caller_context->*r.context_member = caller_entry->second; + *caller_validity |= r.validity_flag; + continue; + } + + // Did the rules provide a value for this register under its + // alternate name? + if (r.alternate_name) { + caller_entry = caller_registers.find(r.alternate_name); + if (caller_entry != caller_none) { + caller_context->*r.context_member = caller_entry->second; + *caller_validity |= r.validity_flag; + continue; + } + } + + // Is this a callee-saves register? The walker assumes that these + // still hold the caller's value if the CFI doesn't mention them. + // + // Note that other frame walkers may fail to recover callee-saves + // registers; for example, the x86 "traditional" strategy only + // recovers %eip, %esp, and %ebp, even though %ebx, %esi, and %edi + // are callee-saves, too. It is not correct to blindly set the + // valid bit for all callee-saves registers, without first + // checking its validity bit in the callee. + if (r.callee_saves && (callee_validity & r.validity_flag) != 0) { + caller_context->*r.context_member = callee_context.*r.context_member; + *caller_validity |= r.validity_flag; + continue; + } + + // Otherwise, the register's value is unknown. + } + + return true; +} + +} // namespace google_breakpad + +#endif // PROCESSOR_CFI_FRAME_INFO_INL_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/cfi_frame_info.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/cfi_frame_info.cc new file mode 100644 index 0000000000..5106ba094a --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/cfi_frame_info.cc @@ -0,0 +1,186 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// cfi_frame_info.cc: Implementation of CFIFrameInfo class. +// See cfi_frame_info.h for details. + +#include "processor/cfi_frame_info.h" + +#include + +#include + +#include "common/scoped_ptr.h" +#include "processor/postfix_evaluator-inl.h" + +namespace google_breakpad { + +#ifdef _WIN32 +#define strtok_r strtok_s +#endif + +template +bool CFIFrameInfo::FindCallerRegs(const RegisterValueMap ®isters, + const MemoryRegion &memory, + RegisterValueMap *caller_registers) const { + // If there are not rules for both .ra and .cfa in effect at this address, + // don't use this CFI data for stack walking. + if (cfa_rule_.empty() || ra_rule_.empty()) + return false; + + RegisterValueMap working; + PostfixEvaluator evaluator(&working, &memory); + + caller_registers->clear(); + + // First, compute the CFA. + V cfa; + working = registers; + if (!evaluator.EvaluateForValue(cfa_rule_, &cfa)) + return false; + + // Then, compute the return address. + V ra; + working = registers; + working[".cfa"] = cfa; + if (!evaluator.EvaluateForValue(ra_rule_, &ra)) + return false; + + // Now, compute values for all the registers register_rules_ mentions. + for (RuleMap::const_iterator it = register_rules_.begin(); + it != register_rules_.end(); it++) { + V value; + working = registers; + working[".cfa"] = cfa; + if (!evaluator.EvaluateForValue(it->second, &value)) + return false; + (*caller_registers)[it->first] = value; + } + + (*caller_registers)[".ra"] = ra; + (*caller_registers)[".cfa"] = cfa; + + return true; +} + +// Explicit instantiations for 32-bit and 64-bit architectures. +template bool CFIFrameInfo::FindCallerRegs( + const RegisterValueMap ®isters, + const MemoryRegion &memory, + RegisterValueMap *caller_registers) const; +template bool CFIFrameInfo::FindCallerRegs( + const RegisterValueMap ®isters, + const MemoryRegion &memory, + RegisterValueMap *caller_registers) const; + +string CFIFrameInfo::Serialize() const { + std::ostringstream stream; + + if (!cfa_rule_.empty()) { + stream << ".cfa: " << cfa_rule_; + } + if (!ra_rule_.empty()) { + if (static_cast(stream.tellp()) != 0) + stream << " "; + stream << ".ra: " << ra_rule_; + } + for (RuleMap::const_iterator iter = register_rules_.begin(); + iter != register_rules_.end(); + ++iter) { + if (static_cast(stream.tellp()) != 0) + stream << " "; + stream << iter->first << ": " << iter->second; + } + + return stream.str(); +} + +bool CFIRuleParser::Parse(const string &rule_set) { + size_t rule_set_len = rule_set.size(); + scoped_array working_copy(new char[rule_set_len + 1]); + memcpy(working_copy.get(), rule_set.data(), rule_set_len); + working_copy[rule_set_len] = '\0'; + + name_.clear(); + expression_.clear(); + + char *cursor; + static const char token_breaks[] = " \t\r\n"; + char *token = strtok_r(working_copy.get(), token_breaks, &cursor); + + for (;;) { + // End of rule set? + if (!token) return Report(); + + // Register/pseudoregister name? + size_t token_len = strlen(token); + if (token_len >= 1 && token[token_len - 1] == ':') { + // Names can't be empty. + if (token_len < 2) return false; + // If there is any pending content, report it. + if (!name_.empty() || !expression_.empty()) { + if (!Report()) return false; + } + name_.assign(token, token_len - 1); + expression_.clear(); + } else { + // Another expression component. + assert(token_len > 0); // strtok_r guarantees this, I think. + if (!expression_.empty()) + expression_ += ' '; + expression_ += token; + } + token = strtok_r(NULL, token_breaks, &cursor); + } +} + +bool CFIRuleParser::Report() { + if (name_.empty() || expression_.empty()) return false; + if (name_ == ".cfa") handler_->CFARule(expression_); + else if (name_ == ".ra") handler_->RARule(expression_); + else handler_->RegisterRule(name_, expression_); + return true; +} + +void CFIFrameInfoParseHandler::CFARule(const string &expression) { + frame_info_->SetCFARule(expression); +} + +void CFIFrameInfoParseHandler::RARule(const string &expression) { + frame_info_->SetRARule(expression); +} + +void CFIFrameInfoParseHandler::RegisterRule(const string &name, + const string &expression) { + frame_info_->SetRegisterRule(name, expression); +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/cfi_frame_info.h b/TMessagesProj/jni/third_party/breakpad/src/processor/cfi_frame_info.h new file mode 100644 index 0000000000..bba2978ebd --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/cfi_frame_info.h @@ -0,0 +1,275 @@ +// -*- mode: C++ -*- + +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// cfi_frame_info.h: Define the CFIFrameInfo class, which holds the +// set of 'STACK CFI'-derived register recovery rules that apply at a +// given instruction. + +#ifndef PROCESSOR_CFI_FRAME_INFO_H_ +#define PROCESSOR_CFI_FRAME_INFO_H_ + +#include +#include + +#include "common/using_std_string.h" +#include "google_breakpad/common/breakpad_types.h" + +namespace google_breakpad { + +using std::map; + +class MemoryRegion; + +// A set of rules for recovering the calling frame's registers' +// values, when the PC is at a given address in the current frame's +// function. See the description of 'STACK CFI' records at: +// +// http://code.google.com/p/google-breakpad/wiki/SymbolFiles +// +// To prepare an instance of CFIFrameInfo for use at a given +// instruction, first populate it with the rules from the 'STACK CFI +// INIT' record that covers that instruction, and then apply the +// changes given by the 'STACK CFI' records up to our instruction's +// address. Then, use the FindCallerRegs member function to apply the +// rules to the callee frame's register values, yielding the caller +// frame's register values. +class CFIFrameInfo { + public: + // A map from register names onto values. + template class RegisterValueMap: + public map { }; + + // Set the expression for computing a call frame address, return + // address, or register's value. At least the CFA rule and the RA + // rule must be set before calling FindCallerRegs. + void SetCFARule(const string &expression) { cfa_rule_ = expression; } + void SetRARule(const string &expression) { ra_rule_ = expression; } + void SetRegisterRule(const string ®ister_name, const string &expression) { + register_rules_[register_name] = expression; + } + + // Compute the values of the calling frame's registers, according to + // this rule set. Use ValueType in expression evaluation; this + // should be uint32_t on machines with 32-bit addresses, or + // uint64_t on machines with 64-bit addresses. + // + // Return true on success, false otherwise. + // + // MEMORY provides access to the contents of the stack. REGISTERS is + // a dictionary mapping the names of registers whose values are + // known in the current frame to their values. CALLER_REGISTERS is + // populated with the values of the recoverable registers in the + // frame that called the current frame. + // + // In addition, CALLER_REGISTERS[".ra"] will be the return address, + // and CALLER_REGISTERS[".cfa"] will be the call frame address. + // These may be helpful in computing the caller's PC and stack + // pointer, if their values are not explicitly specified. + template + bool FindCallerRegs(const RegisterValueMap ®isters, + const MemoryRegion &memory, + RegisterValueMap *caller_registers) const; + + // Serialize the rules in this object into a string in the format + // of STACK CFI records. + string Serialize() const; + + private: + + // A map from register names onto evaluation rules. + typedef map RuleMap; + + // In this type, a "postfix expression" is an expression of the sort + // interpreted by google_breakpad::PostfixEvaluator. + + // A postfix expression for computing the current frame's CFA (call + // frame address). The CFA is a reference address for the frame that + // remains unchanged throughout the frame's lifetime. You should + // evaluate this expression with a dictionary initially populated + // with the values of the current frame's known registers. + string cfa_rule_; + + // The following expressions should be evaluated with a dictionary + // initially populated with the values of the current frame's known + // registers, and with ".cfa" set to the result of evaluating the + // cfa_rule expression, above. + + // A postfix expression for computing the current frame's return + // address. + string ra_rule_; + + // For a register named REG, rules[REG] is a postfix expression + // which leaves the value of REG in the calling frame on the top of + // the stack. You should evaluate this expression + RuleMap register_rules_; +}; + +// A parser for STACK CFI-style rule sets. +// This may seem bureaucratic: there's no legitimate run-time reason +// to use a parser/handler pattern for this, as it's not a likely +// reuse boundary. But doing so makes finer-grained unit testing +// possible. +class CFIRuleParser { + public: + + class Handler { + public: + Handler() { } + virtual ~Handler() { } + + // The input specifies EXPRESSION as the CFA/RA computation rule. + virtual void CFARule(const string &expression) = 0; + virtual void RARule(const string &expression) = 0; + + // The input specifies EXPRESSION as the recovery rule for register NAME. + virtual void RegisterRule(const string &name, const string &expression) = 0; + }; + + // Construct a parser which feeds its results to HANDLER. + CFIRuleParser(Handler *handler) : handler_(handler) { } + + // Parse RULE_SET as a set of CFA computation and RA/register + // recovery rules, as appearing in STACK CFI records. Report the + // results of parsing by making the appropriate calls to handler_. + // Return true if parsing was successful, false otherwise. + bool Parse(const string &rule_set); + + private: + // Report any accumulated rule to handler_ + bool Report(); + + // The handler to which the parser reports its findings. + Handler *handler_; + + // Working data. + string name_, expression_; +}; + +// A handler for rule set parsing that populates a CFIFrameInfo with +// the results. +class CFIFrameInfoParseHandler: public CFIRuleParser::Handler { + public: + // Populate FRAME_INFO with the results of parsing. + CFIFrameInfoParseHandler(CFIFrameInfo *frame_info) + : frame_info_(frame_info) { } + + void CFARule(const string &expression); + void RARule(const string &expression); + void RegisterRule(const string &name, const string &expression); + + private: + CFIFrameInfo *frame_info_; +}; + +// A utility class template for simple 'STACK CFI'-driven stack walkers. +// Given a CFIFrameInfo instance, a table describing the architecture's +// register set, and a context holding the last frame's registers, an +// instance of this class can populate a new context with the caller's +// registers. +// +// This class template doesn't use any internal knowledge of CFIFrameInfo +// or the other stack walking structures; it just uses the public interface +// of CFIFrameInfo to do the usual things. But the logic it handles should +// be common to many different architectures' stack walkers, so wrapping it +// up in a class should allow the walkers to share code. +// +// RegisterType should be the type of this architecture's registers, either +// uint32_t or uint64_t. RawContextType should be the raw context +// structure type for this architecture. +template +class SimpleCFIWalker { + public: + // A structure describing one architecture register. + struct RegisterSet { + // The register name, as it appears in STACK CFI rules. + const char *name; + + // An alternate name that the register's value might be found + // under in a register value dictionary, or NULL. When generating + // names, prefer NAME to this value. It's common to list ".cfa" as + // an alternative name for the stack pointer, and ".ra" as an + // alternative name for the instruction pointer. + const char *alternate_name; + + // True if the callee is expected to preserve the value of this + // register. If this flag is true for some register R, and the STACK + // CFI records provide no rule to recover R, then SimpleCFIWalker + // assumes that the callee has not changed R's value, and the caller's + // value for R is that currently in the callee's context. + bool callee_saves; + + // The ContextValidity flag representing the register's presence. + int validity_flag; + + // A pointer to the RawContextType member that holds the + // register's value. + RegisterType RawContextType::*context_member; + }; + + // Create a simple CFI-based frame walker, given a description of the + // architecture's register set. REGISTER_MAP is an array of + // RegisterSet structures; MAP_SIZE is the number of elements in the + // array. + SimpleCFIWalker(const RegisterSet *register_map, size_t map_size) + : register_map_(register_map), map_size_(map_size) { } + + // Compute the calling frame's raw context given the callee's raw + // context. + // + // Given: + // + // - MEMORY, holding the stack's contents, + // - CFI_FRAME_INFO, describing the called function, + // - CALLEE_CONTEXT, holding the called frame's registers, and + // - CALLEE_VALIDITY, indicating which registers in CALLEE_CONTEXT are valid, + // + // fill in CALLER_CONTEXT with the caller's register values, and set + // CALLER_VALIDITY to indicate which registers are valid in + // CALLER_CONTEXT. Return true on success, or false on failure. + bool FindCallerRegisters(const MemoryRegion &memory, + const CFIFrameInfo &cfi_frame_info, + const RawContextType &callee_context, + int callee_validity, + RawContextType *caller_context, + int *caller_validity) const; + + private: + const RegisterSet *register_map_; + size_t map_size_; +}; + +} // namespace google_breakpad + +#include "cfi_frame_info-inl.h" + +#endif // PROCESSOR_CFI_FRAME_INFO_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/cfi_frame_info_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/cfi_frame_info_unittest.cc new file mode 100644 index 0000000000..542b28492e --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/cfi_frame_info_unittest.cc @@ -0,0 +1,546 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// cfi_frame_info_unittest.cc: Unit tests for CFIFrameInfo, +// CFIRuleParser, CFIFrameInfoParseHandler, and SimpleCFIWalker. + +#include + +#include "breakpad_googletest_includes.h" +#include "common/using_std_string.h" +#include "processor/cfi_frame_info.h" +#include "google_breakpad/processor/memory_region.h" + +using google_breakpad::CFIFrameInfo; +using google_breakpad::CFIFrameInfoParseHandler; +using google_breakpad::CFIRuleParser; +using google_breakpad::MemoryRegion; +using google_breakpad::SimpleCFIWalker; +using testing::_; +using testing::A; +using testing::AtMost; +using testing::DoAll; +using testing::Return; +using testing::SetArgumentPointee; +using testing::Test; + +class MockMemoryRegion: public MemoryRegion { + public: + MOCK_CONST_METHOD0(GetBase, uint64_t()); + MOCK_CONST_METHOD0(GetSize, uint32_t()); + MOCK_CONST_METHOD2(GetMemoryAtAddress, bool(uint64_t, uint8_t *)); + MOCK_CONST_METHOD2(GetMemoryAtAddress, bool(uint64_t, uint16_t *)); + MOCK_CONST_METHOD2(GetMemoryAtAddress, bool(uint64_t, uint32_t *)); + MOCK_CONST_METHOD2(GetMemoryAtAddress, bool(uint64_t, uint64_t *)); + MOCK_CONST_METHOD0(Print, void()); +}; + +// Handy definitions for all tests. +struct CFIFixture { + + // Set up the mock memory object to expect no references. + void ExpectNoMemoryReferences() { + EXPECT_CALL(memory, GetBase()).Times(0); + EXPECT_CALL(memory, GetSize()).Times(0); + EXPECT_CALL(memory, GetMemoryAtAddress(_, A())).Times(0); + EXPECT_CALL(memory, GetMemoryAtAddress(_, A())).Times(0); + EXPECT_CALL(memory, GetMemoryAtAddress(_, A())).Times(0); + EXPECT_CALL(memory, GetMemoryAtAddress(_, A())).Times(0); + } + + CFIFrameInfo cfi; + MockMemoryRegion memory; + CFIFrameInfo::RegisterValueMap registers, caller_registers; +}; + +class Simple: public CFIFixture, public Test { }; + +// FindCallerRegs should fail if no .cfa rule is provided. +TEST_F(Simple, NoCFA) { + ExpectNoMemoryReferences(); + + cfi.SetRARule("0"); + ASSERT_FALSE(cfi.FindCallerRegs(registers, memory, + &caller_registers)); + ASSERT_EQ(".ra: 0", cfi.Serialize()); +} + +// FindCallerRegs should fail if no .ra rule is provided. +TEST_F(Simple, NoRA) { + ExpectNoMemoryReferences(); + + cfi.SetCFARule("0"); + ASSERT_FALSE(cfi.FindCallerRegs(registers, memory, + &caller_registers)); + ASSERT_EQ(".cfa: 0", cfi.Serialize()); +} + +TEST_F(Simple, SetCFAAndRARule) { + ExpectNoMemoryReferences(); + + cfi.SetCFARule("330903416631436410"); + cfi.SetRARule("5870666104170902211"); + ASSERT_TRUE(cfi.FindCallerRegs(registers, memory, + &caller_registers)); + ASSERT_EQ(2U, caller_registers.size()); + ASSERT_EQ(330903416631436410ULL, caller_registers[".cfa"]); + ASSERT_EQ(5870666104170902211ULL, caller_registers[".ra"]); + + ASSERT_EQ(".cfa: 330903416631436410 .ra: 5870666104170902211", + cfi.Serialize()); +} + +TEST_F(Simple, SetManyRules) { + ExpectNoMemoryReferences(); + + cfi.SetCFARule("$temp1 68737028 = $temp2 61072337 = $temp1 $temp2 -"); + cfi.SetRARule(".cfa 99804755 +"); + cfi.SetRegisterRule("register1", ".cfa 54370437 *"); + cfi.SetRegisterRule("vodkathumbscrewingly", "24076308 .cfa +"); + cfi.SetRegisterRule("pubvexingfjordschmaltzy", ".cfa 29801007 -"); + cfi.SetRegisterRule("uncopyrightables", "92642917 .cfa /"); + ASSERT_TRUE(cfi.FindCallerRegs(registers, memory, + &caller_registers)); + ASSERT_EQ(6U, caller_registers.size()); + ASSERT_EQ(7664691U, caller_registers[".cfa"]); + ASSERT_EQ(107469446U, caller_registers[".ra"]); + ASSERT_EQ(416732599139967ULL, caller_registers["register1"]); + ASSERT_EQ(31740999U, caller_registers["vodkathumbscrewingly"]); + ASSERT_EQ(-22136316ULL, caller_registers["pubvexingfjordschmaltzy"]); + ASSERT_EQ(12U, caller_registers["uncopyrightables"]); + ASSERT_EQ(".cfa: $temp1 68737028 = $temp2 61072337 = $temp1 $temp2 - " + ".ra: .cfa 99804755 + " + "pubvexingfjordschmaltzy: .cfa 29801007 - " + "register1: .cfa 54370437 * " + "uncopyrightables: 92642917 .cfa / " + "vodkathumbscrewingly: 24076308 .cfa +", + cfi.Serialize()); +} + +TEST_F(Simple, RulesOverride) { + ExpectNoMemoryReferences(); + + cfi.SetCFARule("330903416631436410"); + cfi.SetRARule("5870666104170902211"); + cfi.SetCFARule("2828089117179001"); + ASSERT_TRUE(cfi.FindCallerRegs(registers, memory, + &caller_registers)); + ASSERT_EQ(2U, caller_registers.size()); + ASSERT_EQ(2828089117179001ULL, caller_registers[".cfa"]); + ASSERT_EQ(5870666104170902211ULL, caller_registers[".ra"]); + ASSERT_EQ(".cfa: 2828089117179001 .ra: 5870666104170902211", + cfi.Serialize()); +} + +class Scope: public CFIFixture, public Test { }; + +// There should be no value for .cfa in scope when evaluating the CFA rule. +TEST_F(Scope, CFALacksCFA) { + ExpectNoMemoryReferences(); + + cfi.SetCFARule(".cfa"); + cfi.SetRARule("0"); + ASSERT_FALSE(cfi.FindCallerRegs(registers, memory, + &caller_registers)); +} + +// There should be no value for .ra in scope when evaluating the CFA rule. +TEST_F(Scope, CFALacksRA) { + ExpectNoMemoryReferences(); + + cfi.SetCFARule(".ra"); + cfi.SetRARule("0"); + ASSERT_FALSE(cfi.FindCallerRegs(registers, memory, + &caller_registers)); +} + +// The current frame's registers should be in scope when evaluating +// the CFA rule. +TEST_F(Scope, CFASeesCurrentRegs) { + ExpectNoMemoryReferences(); + + registers[".baraminology"] = 0x06a7bc63e4f13893ULL; + registers[".ornithorhynchus"] = 0x5e0bf850bafce9d2ULL; + cfi.SetCFARule(".baraminology .ornithorhynchus +"); + cfi.SetRARule("0"); + ASSERT_TRUE(cfi.FindCallerRegs(registers, memory, + &caller_registers)); + ASSERT_EQ(2U, caller_registers.size()); + ASSERT_EQ(0x06a7bc63e4f13893ULL + 0x5e0bf850bafce9d2ULL, + caller_registers[".cfa"]); +} + +// .cfa should be in scope in the return address expression. +TEST_F(Scope, RASeesCFA) { + ExpectNoMemoryReferences(); + + cfi.SetCFARule("48364076"); + cfi.SetRARule(".cfa"); + ASSERT_TRUE(cfi.FindCallerRegs(registers, memory, + &caller_registers)); + ASSERT_EQ(2U, caller_registers.size()); + ASSERT_EQ(48364076U, caller_registers[".ra"]); +} + +// There should be no value for .ra in scope when evaluating the CFA rule. +TEST_F(Scope, RALacksRA) { + ExpectNoMemoryReferences(); + + cfi.SetCFARule("0"); + cfi.SetRARule(".ra"); + ASSERT_FALSE(cfi.FindCallerRegs(registers, memory, + &caller_registers)); +} + +// The current frame's registers should be in scope in the return +// address expression. +TEST_F(Scope, RASeesCurrentRegs) { + ExpectNoMemoryReferences(); + + registers["noachian"] = 0x54dc4a5d8e5eb503ULL; + cfi.SetCFARule("10359370"); + cfi.SetRARule("noachian"); + ASSERT_TRUE(cfi.FindCallerRegs(registers, memory, + &caller_registers)); + ASSERT_EQ(2U, caller_registers.size()); + ASSERT_EQ(0x54dc4a5d8e5eb503ULL, caller_registers[".ra"]); +} + +// .cfa should be in scope for register rules. +TEST_F(Scope, RegistersSeeCFA) { + ExpectNoMemoryReferences(); + + cfi.SetCFARule("6515179"); + cfi.SetRARule(".cfa"); + cfi.SetRegisterRule("rogerian", ".cfa"); + ASSERT_TRUE(cfi.FindCallerRegs(registers, memory, + &caller_registers)); + ASSERT_EQ(3U, caller_registers.size()); + ASSERT_EQ(6515179U, caller_registers["rogerian"]); +} + +// The return address should not be in scope for register rules. +TEST_F(Scope, RegsLackRA) { + ExpectNoMemoryReferences(); + + cfi.SetCFARule("42740329"); + cfi.SetRARule("27045204"); + cfi.SetRegisterRule("$r1", ".ra"); + ASSERT_FALSE(cfi.FindCallerRegs(registers, memory, + &caller_registers)); +} + +// Register rules can see the current frame's register values. +TEST_F(Scope, RegsSeeRegs) { + ExpectNoMemoryReferences(); + + registers["$r1"] = 0x6ed3582c4bedb9adULL; + registers["$r2"] = 0xd27d9e742b8df6d0ULL; + cfi.SetCFARule("88239303"); + cfi.SetRARule("30503835"); + cfi.SetRegisterRule("$r1", "$r1 42175211 = $r2"); + cfi.SetRegisterRule("$r2", "$r2 21357221 = $r1"); + ASSERT_TRUE(cfi.FindCallerRegs(registers, memory, + &caller_registers)); + ASSERT_EQ(4U, caller_registers.size()); + ASSERT_EQ(0xd27d9e742b8df6d0ULL, caller_registers["$r1"]); + ASSERT_EQ(0x6ed3582c4bedb9adULL, caller_registers["$r2"]); +} + +// Each rule's temporaries are separate. +TEST_F(Scope, SeparateTempsRA) { + ExpectNoMemoryReferences(); + + cfi.SetCFARule("$temp1 76569129 = $temp1"); + cfi.SetRARule("0"); + ASSERT_TRUE(cfi.FindCallerRegs(registers, memory, + &caller_registers)); + + cfi.SetCFARule("$temp1 76569129 = $temp1"); + cfi.SetRARule("$temp1"); + ASSERT_FALSE(cfi.FindCallerRegs(registers, memory, + &caller_registers)); +} + +class MockCFIRuleParserHandler: public CFIRuleParser::Handler { + public: + MOCK_METHOD1(CFARule, void(const string &)); + MOCK_METHOD1(RARule, void(const string &)); + MOCK_METHOD2(RegisterRule, void(const string &, const string &)); +}; + +// A fixture class for testing CFIRuleParser. +class CFIParserFixture { + public: + CFIParserFixture() : parser(&mock_handler) { + // Expect no parsing results to be reported to mock_handler. Individual + // tests can override this. + EXPECT_CALL(mock_handler, CFARule(_)).Times(0); + EXPECT_CALL(mock_handler, RARule(_)).Times(0); + EXPECT_CALL(mock_handler, RegisterRule(_, _)).Times(0); + } + + MockCFIRuleParserHandler mock_handler; + CFIRuleParser parser; +}; + +class Parser: public CFIParserFixture, public Test { }; + +TEST_F(Parser, Empty) { + EXPECT_FALSE(parser.Parse("")); +} + +TEST_F(Parser, LoneColon) { + EXPECT_FALSE(parser.Parse(":")); +} + +TEST_F(Parser, CFANoExpr) { + EXPECT_FALSE(parser.Parse(".cfa:")); +} + +TEST_F(Parser, CFANoColonNoExpr) { + EXPECT_FALSE(parser.Parse(".cfa")); +} + +TEST_F(Parser, RANoExpr) { + EXPECT_FALSE(parser.Parse(".ra:")); +} + +TEST_F(Parser, RANoColonNoExpr) { + EXPECT_FALSE(parser.Parse(".ra")); +} + +TEST_F(Parser, RegNoExpr) { + EXPECT_FALSE(parser.Parse("reg:")); +} + +TEST_F(Parser, NoName) { + EXPECT_FALSE(parser.Parse("expr")); +} + +TEST_F(Parser, NoNameTwo) { + EXPECT_FALSE(parser.Parse("expr1 expr2")); +} + +TEST_F(Parser, StartsWithExpr) { + EXPECT_FALSE(parser.Parse("expr1 reg: expr2")); +} + +TEST_F(Parser, CFA) { + EXPECT_CALL(mock_handler, CFARule("spleen")).WillOnce(Return()); + EXPECT_TRUE(parser.Parse(".cfa: spleen")); +} + +TEST_F(Parser, RA) { + EXPECT_CALL(mock_handler, RARule("notoriety")).WillOnce(Return()); + EXPECT_TRUE(parser.Parse(".ra: notoriety")); +} + +TEST_F(Parser, Reg) { + EXPECT_CALL(mock_handler, RegisterRule("nemo", "mellifluous")) + .WillOnce(Return()); + EXPECT_TRUE(parser.Parse("nemo: mellifluous")); +} + +TEST_F(Parser, CFARARegs) { + EXPECT_CALL(mock_handler, CFARule("cfa expression")).WillOnce(Return()); + EXPECT_CALL(mock_handler, RARule("ra expression")).WillOnce(Return()); + EXPECT_CALL(mock_handler, RegisterRule("galba", "praetorian")) + .WillOnce(Return()); + EXPECT_CALL(mock_handler, RegisterRule("otho", "vitellius")) + .WillOnce(Return()); + EXPECT_TRUE(parser.Parse(".cfa: cfa expression .ra: ra expression " + "galba: praetorian otho: vitellius")); +} + +TEST_F(Parser, Whitespace) { + EXPECT_CALL(mock_handler, RegisterRule("r1", "r1 expression")) + .WillOnce(Return()); + EXPECT_CALL(mock_handler, RegisterRule("r2", "r2 expression")) + .WillOnce(Return()); + EXPECT_TRUE(parser.Parse(" r1:\tr1\nexpression \tr2:\t\rr2\r\n " + "expression \n")); +} + +TEST_F(Parser, WhitespaceLoneColon) { + EXPECT_FALSE(parser.Parse(" \n:\t ")); +} + +TEST_F(Parser, EmptyName) { + EXPECT_CALL(mock_handler, RegisterRule("reg", _)) + .Times(AtMost(1)) + .WillRepeatedly(Return()); + EXPECT_FALSE(parser.Parse("reg: expr1 : expr2")); +} + +TEST_F(Parser, RuleLoneColon) { + EXPECT_CALL(mock_handler, RegisterRule("r1", "expr")) + .Times(AtMost(1)) + .WillRepeatedly(Return()); + EXPECT_FALSE(parser.Parse(" r1: expr :")); +} + +TEST_F(Parser, RegNoExprRule) { + EXPECT_CALL(mock_handler, RegisterRule("r1", "expr")) + .Times(AtMost(1)) + .WillRepeatedly(Return()); + EXPECT_FALSE(parser.Parse("r0: r1: expr")); +} + +class ParseHandlerFixture: public CFIFixture { + public: + ParseHandlerFixture() : CFIFixture(), handler(&cfi) { } + CFIFrameInfoParseHandler handler; +}; + +class ParseHandler: public ParseHandlerFixture, public Test { }; + +TEST_F(ParseHandler, CFARARule) { + handler.CFARule("reg-for-cfa"); + handler.RARule("reg-for-ra"); + registers["reg-for-cfa"] = 0x268a9a4a3821a797ULL; + registers["reg-for-ra"] = 0x6301b475b8b91c02ULL; + ASSERT_TRUE(cfi.FindCallerRegs(registers, memory, + &caller_registers)); + ASSERT_EQ(0x268a9a4a3821a797ULL, caller_registers[".cfa"]); + ASSERT_EQ(0x6301b475b8b91c02ULL, caller_registers[".ra"]); +} + +TEST_F(ParseHandler, RegisterRules) { + handler.CFARule("reg-for-cfa"); + handler.RARule("reg-for-ra"); + handler.RegisterRule("reg1", "reg-for-reg1"); + handler.RegisterRule("reg2", "reg-for-reg2"); + registers["reg-for-cfa"] = 0x268a9a4a3821a797ULL; + registers["reg-for-ra"] = 0x6301b475b8b91c02ULL; + registers["reg-for-reg1"] = 0x06cde8e2ff062481ULL; + registers["reg-for-reg2"] = 0xff0c4f76403173e2ULL; + ASSERT_TRUE(cfi.FindCallerRegs(registers, memory, + &caller_registers)); + ASSERT_EQ(0x268a9a4a3821a797ULL, caller_registers[".cfa"]); + ASSERT_EQ(0x6301b475b8b91c02ULL, caller_registers[".ra"]); + ASSERT_EQ(0x06cde8e2ff062481ULL, caller_registers["reg1"]); + ASSERT_EQ(0xff0c4f76403173e2ULL, caller_registers["reg2"]); +} + +struct SimpleCFIWalkerFixture { + struct RawContext { + uint64_t r0, r1, r2, r3, r4, sp, pc; + }; + enum Validity { + R0_VALID = 0x01, + R1_VALID = 0x02, + R2_VALID = 0x04, + R3_VALID = 0x08, + R4_VALID = 0x10, + SP_VALID = 0x20, + PC_VALID = 0x40 + }; + typedef SimpleCFIWalker CFIWalker; + + SimpleCFIWalkerFixture() + : walker(register_map, + sizeof(register_map) / sizeof(register_map[0])) { } + + static CFIWalker::RegisterSet register_map[7]; + CFIFrameInfo call_frame_info; + CFIWalker walker; + MockMemoryRegion memory; + RawContext callee_context, caller_context; +}; + +SimpleCFIWalkerFixture::CFIWalker::RegisterSet +SimpleCFIWalkerFixture::register_map[7] = { + { "r0", NULL, true, R0_VALID, &RawContext::r0 }, + { "r1", NULL, true, R1_VALID, &RawContext::r1 }, + { "r2", NULL, false, R2_VALID, &RawContext::r2 }, + { "r3", NULL, false, R3_VALID, &RawContext::r3 }, + { "r4", NULL, true, R4_VALID, &RawContext::r4 }, + { "sp", ".cfa", true, SP_VALID, &RawContext::sp }, + { "pc", ".ra", true, PC_VALID, &RawContext::pc }, +}; + +class SimpleWalker: public SimpleCFIWalkerFixture, public Test { }; + +TEST_F(SimpleWalker, Walk) { + // Stack_top is the current stack pointer, pointing to the lowest + // address of a frame that looks like this (all 64-bit words): + // + // sp -> saved r0 + // garbage + // return address + // cfa -> + // + // r0 has been saved on the stack. + // r1 has been saved in r2. + // r2 and r3 are not recoverable. + // r4 is not recoverable, even though it is a callee-saves register. + // Some earlier frame's unwinder must have failed to recover it. + + uint64_t stack_top = 0x83254944b20d5512ULL; + + // Saved r0. + EXPECT_CALL(memory, + GetMemoryAtAddress(stack_top, A())) + .WillRepeatedly(DoAll(SetArgumentPointee<1>(0xdc1975eba8602302ULL), + Return(true))); + // Saved return address. + EXPECT_CALL(memory, + GetMemoryAtAddress(stack_top + 16, A())) + .WillRepeatedly(DoAll(SetArgumentPointee<1>(0xba5ad6d9acce28deULL), + Return(true))); + + call_frame_info.SetCFARule("sp 24 +"); + call_frame_info.SetRARule(".cfa 8 - ^"); + call_frame_info.SetRegisterRule("r0", ".cfa 24 - ^"); + call_frame_info.SetRegisterRule("r1", "r2"); + + callee_context.r0 = 0x94e030ca79edd119ULL; + callee_context.r1 = 0x937b4d7e95ce52d9ULL; + callee_context.r2 = 0x5fe0027416b8b62aULL; // caller's r1 + // callee_context.r3 is not valid in callee. + // callee_context.r4 is not valid in callee. + callee_context.sp = stack_top; + callee_context.pc = 0x25b21b224311d280ULL; + int callee_validity = R0_VALID | R1_VALID | R2_VALID | SP_VALID | PC_VALID; + + memset(&caller_context, 0, sizeof(caller_context)); + + int caller_validity; + EXPECT_TRUE(walker.FindCallerRegisters(memory, call_frame_info, + callee_context, callee_validity, + &caller_context, &caller_validity)); + EXPECT_EQ(R0_VALID | R1_VALID | SP_VALID | PC_VALID, caller_validity); + EXPECT_EQ(0xdc1975eba8602302ULL, caller_context.r0); + EXPECT_EQ(0x5fe0027416b8b62aULL, caller_context.r1); + EXPECT_EQ(stack_top + 24, caller_context.sp); + EXPECT_EQ(0xba5ad6d9acce28deULL, caller_context.pc); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/contained_range_map-inl.h b/TMessagesProj/jni/third_party/breakpad/src/processor/contained_range_map-inl.h new file mode 100644 index 0000000000..4c0ad41f94 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/contained_range_map-inl.h @@ -0,0 +1,197 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// contained_range_map-inl.h: Hierarchically-organized range map implementation. +// +// See contained_range_map.h for documentation. +// +// Author: Mark Mentovai + +#ifndef PROCESSOR_CONTAINED_RANGE_MAP_INL_H__ +#define PROCESSOR_CONTAINED_RANGE_MAP_INL_H__ + +#include "processor/contained_range_map.h" + +#include + +#include "processor/logging.h" + + +namespace google_breakpad { + + +template +ContainedRangeMap::~ContainedRangeMap() { + // Clear frees the children pointed to by the map, and frees the map itself. + Clear(); +} + + +template +bool ContainedRangeMap::StoreRange( + const AddressType &base, const AddressType &size, const EntryType &entry) { + AddressType high = base + size - 1; + + // Check for undersize or overflow. + if (size <= 0 || high < base) { + //TODO(nealsid) We are commenting this out in order to prevent + // excessive logging. We plan to move to better logging as this + // failure happens quite often and is expected(see comment in + // basic_source_line_resolver.cc:671). + // BPLOG(INFO) << "StoreRange failed, " << HexString(base) << "+" + // << HexString(size) << ", " << HexString(high); + return false; + } + + if (!map_) + map_ = new AddressToRangeMap(); + + MapIterator iterator_base = map_->lower_bound(base); + MapIterator iterator_high = map_->lower_bound(high); + MapIterator iterator_end = map_->end(); + + if (iterator_base == iterator_high && iterator_base != iterator_end && + base >= iterator_base->second->base_) { + // The new range is entirely within an existing child range. + + // If the new range's geometry is exactly equal to an existing child + // range's, it violates the containment rules, and an attempt to store + // it must fail. iterator_base->first contains the key, which was the + // containing child's high address. + if (iterator_base->second->base_ == base && iterator_base->first == high) { + // TODO(nealsid): See the TODO above on why this is commented out. +// BPLOG(INFO) << "StoreRange failed, identical range is already " +// "present: " << HexString(base) << "+" << HexString(size); + return false; + } + + // Pass the new range on to the child to attempt to store. + return iterator_base->second->StoreRange(base, size, entry); + } + + // iterator_high might refer to an irrelevant range: one whose base address + // is higher than the new range's high address. Set contains_high to true + // only if iterator_high refers to a range that is at least partially + // within the new range. + bool contains_high = iterator_high != iterator_end && + high >= iterator_high->second->base_; + + // If the new range encompasses any existing child ranges, it must do so + // fully. Partial containment isn't allowed. + if ((iterator_base != iterator_end && base > iterator_base->second->base_) || + (contains_high && high < iterator_high->first)) { + // TODO(mmentovai): Some symbol files will trip this check frequently + // on STACK lines. Too many messages will be produced. These are more + // suitable for a DEBUG channel than an INFO channel. + // BPLOG(INFO) << "StoreRange failed, new range partially contains " + // "existing range: " << HexString(base) << "+" << + // HexString(size); + return false; + } + + // When copying and erasing contained ranges, the "end" iterator needs to + // point one past the last item of the range to copy. If contains_high is + // false, the iterator's already in the right place; the increment is safe + // because contains_high can't be true if iterator_high == iterator_end. + if (contains_high) + ++iterator_high; + + // Optimization: if the iterators are equal, no child ranges would be + // moved. Create the new child range with a NULL map to conserve space + // in leaf nodes, of which there will be many. + AddressToRangeMap *child_map = NULL; + + if (iterator_base != iterator_high) { + // The children of this range that are contained by the new range must + // be transferred over to the new range. Create the new child range map + // and copy the pointers to range maps it should contain into it. + child_map = new AddressToRangeMap(iterator_base, iterator_high); + + // Remove the copied child pointers from this range's map of children. + map_->erase(iterator_base, iterator_high); + } + + // Store the new range in the map by its high address. Any children that + // the new child range contains were formerly children of this range but + // are now this range's grandchildren. Ownership of these is transferred + // to the new child range. + map_->insert(MapValue(high, + new ContainedRangeMap(base, entry, child_map))); + return true; +} + + +template +bool ContainedRangeMap::RetrieveRange( + const AddressType &address, EntryType *entry) const { + BPLOG_IF(ERROR, !entry) << "ContainedRangeMap::RetrieveRange requires " + "|entry|"; + assert(entry); + + // If nothing was ever stored, then there's nothing to retrieve. + if (!map_) + return false; + + // Get an iterator to the child range whose high address is equal to or + // greater than the supplied address. If the supplied address is higher + // than all of the high addresses in the range, then this range does not + // contain a child at address, so return false. If the supplied address + // is lower than the base address of the child range, then it is not within + // the child range, so return false. + MapConstIterator iterator = map_->lower_bound(address); + if (iterator == map_->end() || address < iterator->second->base_) + return false; + + // The child in iterator->second contains the specified address. Find out + // if it has a more-specific descendant that also contains it. If it does, + // it will set |entry| appropriately. If not, set |entry| to the child. + if (!iterator->second->RetrieveRange(address, entry)) + *entry = iterator->second->entry_; + + return true; +} + + +template +void ContainedRangeMap::Clear() { + if (map_) { + MapConstIterator end = map_->end(); + for (MapConstIterator child = map_->begin(); child != end; ++child) + delete child->second; + + delete map_; + map_ = NULL; + } +} + + +} // namespace google_breakpad + + +#endif // PROCESSOR_CONTAINED_RANGE_MAP_INL_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/contained_range_map.h b/TMessagesProj/jni/third_party/breakpad/src/processor/contained_range_map.h new file mode 100644 index 0000000000..1015ae8cfd --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/contained_range_map.h @@ -0,0 +1,150 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// contained_range_map.h: Hierarchically-organized range maps. +// +// A contained range map is similar to a standard range map, except it allows +// objects to be organized hierarchically. A contained range map allows +// objects to contain other objects. It is not sensitive to the order that +// objects are added to the map: larger, more general, containing objects +// may be added either before or after smaller, more specific, contained +// ones. +// +// Contained range maps guarantee that each object may only contain smaller +// objects than itself, and that a parent object may only contain child +// objects located entirely within the parent's address space. Attempts +// to introduce objects (via StoreRange) that violate these rules will fail. +// Retrieval (via RetrieveRange) always returns the most specific (smallest) +// object that contains the address being queried. Note that while it is +// not possible to insert two objects into a map that have exactly the same +// geometry (base address and size), it is possible to completely mask a +// larger object by inserting smaller objects that entirely fill the larger +// object's address space. +// +// Internally, contained range maps are implemented as a tree. Each tree +// node except for the root node describes an object in the map. Each node +// maintains its list of children in a map similar to a standard range map, +// keyed by the highest address that each child occupies. Each node's +// children occupy address ranges entirely within the node. The root node +// is the only node directly accessible to the user, and represents the +// entire address space. +// +// Author: Mark Mentovai + +#ifndef PROCESSOR_CONTAINED_RANGE_MAP_H__ +#define PROCESSOR_CONTAINED_RANGE_MAP_H__ + + +#include + + +namespace google_breakpad { + +// Forward declarations (for later friend declarations of specialized template). +template class ContainedRangeMapSerializer; + +template +class ContainedRangeMap { + public: + // The default constructor creates a ContainedRangeMap with no geometry + // and no entry, and as such is only suitable for the root node of a + // ContainedRangeMap tree. + ContainedRangeMap() : base_(), entry_(), map_(NULL) {} + + ~ContainedRangeMap(); + + // Inserts a range into the map. If the new range is encompassed by + // an existing child range, the new range is passed into the child range's + // StoreRange method. If the new range encompasses any existing child + // ranges, those child ranges are moved to the new range, becoming + // grandchildren of this ContainedRangeMap. Returns false for a + // parameter error, or if the ContainedRangeMap hierarchy guarantees + // would be violated. + bool StoreRange(const AddressType &base, + const AddressType &size, + const EntryType &entry); + + // Retrieves the most specific (smallest) descendant range encompassing + // the specified address. This method will only return entries held by + // child ranges, and not the entry contained by |this|. This is necessary + // to support a sparsely-populated root range. If no descendant range + // encompasses the address, returns false. + bool RetrieveRange(const AddressType &address, EntryType *entry) const; + + // Removes all children. Note that Clear only removes descendants, + // leaving the node on which it is called intact. Because the only + // meaningful things contained by a root node are descendants, this + // is sufficient to restore an entire ContainedRangeMap to its initial + // empty state when called on the root node. + void Clear(); + + private: + friend class ContainedRangeMapSerializer; + friend class ModuleComparer; + + // AddressToRangeMap stores pointers. This makes reparenting simpler in + // StoreRange, because it doesn't need to copy entire objects. + typedef std::map AddressToRangeMap; + typedef typename AddressToRangeMap::const_iterator MapConstIterator; + typedef typename AddressToRangeMap::iterator MapIterator; + typedef typename AddressToRangeMap::value_type MapValue; + + // Creates a new ContainedRangeMap with the specified base address, entry, + // and initial child map, which may be NULL. This is only used internally + // by ContainedRangeMap when it creates a new child. + ContainedRangeMap(const AddressType &base, const EntryType &entry, + AddressToRangeMap *map) + : base_(base), entry_(entry), map_(map) {} + + // The base address of this range. The high address does not need to + // be stored, because it is used as the key to an object in its parent's + // map, and all ContainedRangeMaps except for the root range are contained + // within maps. The root range does not actually contain an entry, so its + // base_ field is meaningless, and the fact that it has no parent and thus + // no key is unimportant. For this reason, the base_ field should only be + // is accessed on child ContainedRangeMap objects, and never on |this|. + const AddressType base_; + + // The entry corresponding to this range. The root range does not + // actually contain an entry, so its entry_ field is meaningless. For + // this reason, the entry_ field should only be accessed on child + // ContainedRangeMap objects, and never on |this|. + const EntryType entry_; + + // The map containing child ranges, keyed by each child range's high + // address. This is a pointer to avoid allocating map structures for + // leaf nodes, where they are not needed. + AddressToRangeMap *map_; +}; + + +} // namespace google_breakpad + + +#endif // PROCESSOR_CONTAINED_RANGE_MAP_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/contained_range_map_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/contained_range_map_unittest.cc new file mode 100644 index 0000000000..e5910da0d5 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/contained_range_map_unittest.cc @@ -0,0 +1,263 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// contained_range_map_unittest.cc: Unit tests for ContainedRangeMap +// +// Author: Mark Mentovai + +#include + +#include "processor/contained_range_map-inl.h" + +#include "processor/logging.h" + + +#define ASSERT_TRUE(condition) \ + if (!(condition)) { \ + fprintf(stderr, "FAIL: %s @ %s:%d\n", #condition, __FILE__, __LINE__); \ + return false; \ + } + +#define ASSERT_FALSE(condition) ASSERT_TRUE(!(condition)) + + +namespace { + + +using google_breakpad::ContainedRangeMap; + + +static bool RunTests() { + ContainedRangeMap crm; + + // First, do the StoreRange tests. This validates the containment + // rules. + ASSERT_TRUE (crm.StoreRange(10, 10, 1)); + ASSERT_FALSE(crm.StoreRange(10, 10, 2)); // exactly equal to 1 + ASSERT_FALSE(crm.StoreRange(11, 10, 3)); // begins inside 1 and extends up + ASSERT_FALSE(crm.StoreRange( 9, 10, 4)); // begins below 1 and ends inside + ASSERT_TRUE (crm.StoreRange(11, 9, 5)); // contained by existing + ASSERT_TRUE (crm.StoreRange(12, 7, 6)); + ASSERT_TRUE (crm.StoreRange( 9, 12, 7)); // contains existing + ASSERT_TRUE (crm.StoreRange( 9, 13, 8)); + ASSERT_TRUE (crm.StoreRange( 8, 14, 9)); + ASSERT_TRUE (crm.StoreRange(30, 3, 10)); + ASSERT_TRUE (crm.StoreRange(33, 3, 11)); + ASSERT_TRUE (crm.StoreRange(30, 6, 12)); // storable but totally masked + ASSERT_TRUE (crm.StoreRange(40, 8, 13)); // will be totally masked + ASSERT_TRUE (crm.StoreRange(40, 4, 14)); + ASSERT_TRUE (crm.StoreRange(44, 4, 15)); + ASSERT_FALSE(crm.StoreRange(32, 10, 16)); // begins in #10, ends in #14 + ASSERT_FALSE(crm.StoreRange(50, 0, 17)); // zero length + ASSERT_TRUE (crm.StoreRange(50, 10, 18)); + ASSERT_TRUE (crm.StoreRange(50, 1, 19)); + ASSERT_TRUE (crm.StoreRange(59, 1, 20)); + ASSERT_TRUE (crm.StoreRange(60, 1, 21)); + ASSERT_TRUE (crm.StoreRange(69, 1, 22)); + ASSERT_TRUE (crm.StoreRange(60, 10, 23)); + ASSERT_TRUE (crm.StoreRange(68, 1, 24)); + ASSERT_TRUE (crm.StoreRange(61, 1, 25)); + ASSERT_TRUE (crm.StoreRange(61, 8, 26)); + ASSERT_FALSE(crm.StoreRange(59, 9, 27)); + ASSERT_FALSE(crm.StoreRange(59, 10, 28)); + ASSERT_FALSE(crm.StoreRange(59, 11, 29)); + ASSERT_TRUE (crm.StoreRange(70, 10, 30)); + ASSERT_TRUE (crm.StoreRange(74, 2, 31)); + ASSERT_TRUE (crm.StoreRange(77, 2, 32)); + ASSERT_FALSE(crm.StoreRange(72, 6, 33)); + ASSERT_TRUE (crm.StoreRange(80, 3, 34)); + ASSERT_TRUE (crm.StoreRange(81, 1, 35)); + ASSERT_TRUE (crm.StoreRange(82, 1, 36)); + ASSERT_TRUE (crm.StoreRange(83, 3, 37)); + ASSERT_TRUE (crm.StoreRange(84, 1, 38)); + ASSERT_TRUE (crm.StoreRange(83, 1, 39)); + ASSERT_TRUE (crm.StoreRange(86, 5, 40)); + ASSERT_TRUE (crm.StoreRange(88, 1, 41)); + ASSERT_TRUE (crm.StoreRange(90, 1, 42)); + ASSERT_TRUE (crm.StoreRange(86, 1, 43)); + ASSERT_TRUE (crm.StoreRange(87, 1, 44)); + ASSERT_TRUE (crm.StoreRange(89, 1, 45)); + ASSERT_TRUE (crm.StoreRange(87, 4, 46)); + ASSERT_TRUE (crm.StoreRange(87, 3, 47)); + ASSERT_FALSE(crm.StoreRange(86, 2, 48)); + + // Each element in test_data contains the expected result when calling + // RetrieveRange on an address. + const int test_data[] = { + 0, // 0 + 0, // 1 + 0, // 2 + 0, // 3 + 0, // 4 + 0, // 5 + 0, // 6 + 0, // 7 + 9, // 8 + 7, // 9 + 1, // 10 + 5, // 11 + 6, // 12 + 6, // 13 + 6, // 14 + 6, // 15 + 6, // 16 + 6, // 17 + 6, // 18 + 5, // 19 + 7, // 20 + 8, // 21 + 0, // 22 + 0, // 23 + 0, // 24 + 0, // 25 + 0, // 26 + 0, // 27 + 0, // 28 + 0, // 29 + 10, // 30 + 10, // 31 + 10, // 32 + 11, // 33 + 11, // 34 + 11, // 35 + 0, // 36 + 0, // 37 + 0, // 38 + 0, // 39 + 14, // 40 + 14, // 41 + 14, // 42 + 14, // 43 + 15, // 44 + 15, // 45 + 15, // 46 + 15, // 47 + 0, // 48 + 0, // 49 + 19, // 50 + 18, // 51 + 18, // 52 + 18, // 53 + 18, // 54 + 18, // 55 + 18, // 56 + 18, // 57 + 18, // 58 + 20, // 59 + 21, // 60 + 25, // 61 + 26, // 62 + 26, // 63 + 26, // 64 + 26, // 65 + 26, // 66 + 26, // 67 + 24, // 68 + 22, // 69 + 30, // 70 + 30, // 71 + 30, // 72 + 30, // 73 + 31, // 74 + 31, // 75 + 30, // 76 + 32, // 77 + 32, // 78 + 30, // 79 + 34, // 80 + 35, // 81 + 36, // 82 + 39, // 83 + 38, // 84 + 37, // 85 + 43, // 86 + 44, // 87 + 41, // 88 + 45, // 89 + 42, // 90 + 0, // 91 + 0, // 92 + 0, // 93 + 0, // 94 + 0, // 95 + 0, // 96 + 0, // 97 + 0, // 98 + 0 // 99 + }; + unsigned int test_high = sizeof(test_data) / sizeof(int); + + // Now, do the RetrieveRange tests. This further validates that the + // objects were stored properly and that retrieval returns the correct + // object. + // If GENERATE_TEST_DATA is defined, instead of the retrieval tests, a + // new test_data array will be printed. Exercise caution when doing this. + // Be sure to verify the results manually! +#ifdef GENERATE_TEST_DATA + printf(" const int test_data[] = {\n"); +#endif // GENERATE_TEST_DATA + + for (unsigned int address = 0; address < test_high; ++address) { + int value; + if (!crm.RetrieveRange(address, &value)) + value = 0; + +#ifndef GENERATE_TEST_DATA + // Don't use ASSERT inside the loop because it won't show the failed + // |address|, and the line number will always be the same. That makes + // it difficult to figure out which test failed. + if (value != test_data[address]) { + fprintf(stderr, "FAIL: retrieve %d expected %d observed %d @ %s:%d\n", + address, test_data[address], value, __FILE__, __LINE__); + return false; + } +#else // !GENERATE_TEST_DATA + printf(" %d%c%s // %d\n", value, + address == test_high - 1 ? ' ' : ',', + value < 10 ? " " : "", + address); +#endif // !GENERATE_TEST_DATA + } + +#ifdef GENERATE_TEST_DATA + printf(" };\n"); +#endif // GENERATE_TEST_DATA + + return true; +} + + +} // namespace + + +int main(int argc, char **argv) { + BPLOG_INIT(&argc, &argv); + + return RunTests() ? 0 : 1; +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/disassembler_x86.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/disassembler_x86.cc new file mode 100644 index 0000000000..9eba848d8c --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/disassembler_x86.cc @@ -0,0 +1,241 @@ +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// disassembler_x86.cc: simple x86 disassembler. +// +// Provides single step disassembly of x86 bytecode and flags instructions +// that utilize known bad register values. +// +// Author: Cris Neckar + +#include "processor/disassembler_x86.h" + +#include +#include + +namespace google_breakpad { + +DisassemblerX86::DisassemblerX86(const uint8_t *bytecode, + uint32_t size, + uint32_t virtual_address) : + bytecode_(bytecode), + size_(size), + virtual_address_(virtual_address), + current_byte_offset_(0), + current_inst_offset_(0), + instr_valid_(false), + register_valid_(false), + pushed_bad_value_(false), + end_of_block_(false), + flags_(0) { + libdis::x86_init(libdis::opt_none, NULL, NULL); +} + +DisassemblerX86::~DisassemblerX86() { + if (instr_valid_) + libdis::x86_oplist_free(¤t_instr_); + + libdis::x86_cleanup(); +} + +uint32_t DisassemblerX86::NextInstruction() { + if (instr_valid_) + libdis::x86_oplist_free(¤t_instr_); + + if (current_byte_offset_ >= size_) { + instr_valid_ = false; + return 0; + } + uint32_t instr_size = 0; + instr_size = libdis::x86_disasm((unsigned char *)bytecode_, size_, + virtual_address_, current_byte_offset_, + ¤t_instr_); + if (instr_size == 0) { + instr_valid_ = false; + return 0; + } + + current_byte_offset_ += instr_size; + current_inst_offset_++; + instr_valid_ = libdis::x86_insn_is_valid(¤t_instr_); + if (!instr_valid_) + return 0; + + if (current_instr_.type == libdis::insn_return) + end_of_block_ = true; + libdis::x86_op_t *src = libdis::x86_get_src_operand(¤t_instr_); + libdis::x86_op_t *dest = libdis::x86_get_dest_operand(¤t_instr_); + + if (register_valid_) { + switch (current_instr_.group) { + // Flag branches based off of bad registers and calls that occur + // after pushing bad values. + case libdis::insn_controlflow: + switch (current_instr_.type) { + case libdis::insn_jmp: + case libdis::insn_jcc: + case libdis::insn_call: + case libdis::insn_callcc: + if (dest) { + switch (dest->type) { + case libdis::op_expression: + if (dest->data.expression.base.id == bad_register_.id) + flags_ |= DISX86_BAD_BRANCH_TARGET; + break; + case libdis::op_register: + if (dest->data.reg.id == bad_register_.id) + flags_ |= DISX86_BAD_BRANCH_TARGET; + break; + default: + if (pushed_bad_value_ && + (current_instr_.type == libdis::insn_call || + current_instr_.type == libdis::insn_callcc)) + flags_ |= DISX86_BAD_ARGUMENT_PASSED; + break; + } + } + break; + default: + break; + } + break; + + // Flag block data operations that use bad registers for src or dest. + case libdis::insn_string: + if (dest && dest->type == libdis::op_expression && + dest->data.expression.base.id == bad_register_.id) + flags_ |= DISX86_BAD_BLOCK_WRITE; + if (src && src->type == libdis::op_expression && + src->data.expression.base.id == bad_register_.id) + flags_ |= DISX86_BAD_BLOCK_READ; + break; + + // Flag comparisons based on bad data. + case libdis::insn_comparison: + if ((dest && dest->type == libdis::op_expression && + dest->data.expression.base.id == bad_register_.id) || + (src && src->type == libdis::op_expression && + src->data.expression.base.id == bad_register_.id) || + (dest && dest->type == libdis::op_register && + dest->data.reg.id == bad_register_.id) || + (src && src->type == libdis::op_register && + src->data.reg.id == bad_register_.id)) + flags_ |= DISX86_BAD_COMPARISON; + break; + + // Flag any other instruction which derefs a bad register for + // src or dest. + default: + if (dest && dest->type == libdis::op_expression && + dest->data.expression.base.id == bad_register_.id) + flags_ |= DISX86_BAD_WRITE; + if (src && src->type == libdis::op_expression && + src->data.expression.base.id == bad_register_.id) + flags_ |= DISX86_BAD_READ; + break; + } + } + + // When a register is marked as tainted check if it is pushed. + // TODO(cdn): may also want to check for MOVs into EBP offsets. + if (register_valid_ && dest && current_instr_.type == libdis::insn_push) { + switch (dest->type) { + case libdis::op_expression: + if (dest->data.expression.base.id == bad_register_.id || + dest->data.expression.index.id == bad_register_.id) + pushed_bad_value_ = true; + break; + case libdis::op_register: + if (dest->data.reg.id == bad_register_.id) + pushed_bad_value_ = true; + break; + default: + break; + } + } + + // Check if a tainted register value is clobbered. + // For conditional MOVs and XCHGs assume that + // there is a hit. + if (register_valid_) { + switch (current_instr_.type) { + case libdis::insn_xor: + if (src && src->type == libdis::op_register && + dest && dest->type == libdis::op_register && + src->data.reg.id == bad_register_.id && + src->data.reg.id == dest->data.reg.id) + register_valid_ = false; + break; + case libdis::insn_pop: + case libdis::insn_mov: + case libdis::insn_movcc: + if (dest && dest->type == libdis::op_register && + dest->data.reg.id == bad_register_.id) + register_valid_ = false; + break; + case libdis::insn_popregs: + register_valid_ = false; + break; + case libdis::insn_xchg: + case libdis::insn_xchgcc: + if (dest && dest->type == libdis::op_register && + src && src->type == libdis::op_register) { + if (dest->data.reg.id == bad_register_.id) + memcpy(&bad_register_, &src->data.reg, sizeof(libdis::x86_reg_t)); + else if (src->data.reg.id == bad_register_.id) + memcpy(&bad_register_, &dest->data.reg, sizeof(libdis::x86_reg_t)); + } + break; + default: + break; + } + } + + return instr_size; +} + +bool DisassemblerX86::setBadRead() { + if (!instr_valid_) + return false; + + libdis::x86_op_t *operand = libdis::x86_get_src_operand(¤t_instr_); + if (!operand || operand->type != libdis::op_expression) + return false; + + memcpy(&bad_register_, &operand->data.expression.base, + sizeof(libdis::x86_reg_t)); + register_valid_ = true; + return true; +} + +bool DisassemblerX86::setBadWrite() { + if (!instr_valid_) + return false; + + libdis::x86_op_t *operand = libdis::x86_get_dest_operand(¤t_instr_); + if (!operand || operand->type != libdis::op_expression) + return false; + + memcpy(&bad_register_, &operand->data.expression.base, + sizeof(libdis::x86_reg_t)); + register_valid_ = true; + return true; +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/disassembler_x86.h b/TMessagesProj/jni/third_party/breakpad/src/processor/disassembler_x86.h new file mode 100644 index 0000000000..7106941072 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/disassembler_x86.h @@ -0,0 +1,127 @@ +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// disassembler_x86.h: Basic x86 bytecode disassembler +// +// Provides a simple disassembler which wraps libdisasm. This allows simple +// tests to be run against bytecode to test for various properties. +// +// Author: Cris Neckar + +#ifndef GOOGLE_BREAKPAD_PROCESSOR_DISASSEMBLER_X86_H_ +#define GOOGLE_BREAKPAD_PROCESSOR_DISASSEMBLER_X86_H_ + +#include +#include + +#include "google_breakpad/common/breakpad_types.h" + +namespace libdis { +#include "third_party/libdisasm/libdis.h" +} + +namespace google_breakpad { + +enum { + DISX86_NONE = 0x0, + DISX86_BAD_BRANCH_TARGET = 0x1, + DISX86_BAD_ARGUMENT_PASSED = 0x2, + DISX86_BAD_WRITE = 0x4, + DISX86_BAD_BLOCK_WRITE = 0x8, + DISX86_BAD_READ = 0x10, + DISX86_BAD_BLOCK_READ = 0x20, + DISX86_BAD_COMPARISON = 0x40 +}; + +class DisassemblerX86 { + public: + // TODO(cdn): Modify this class to take a MemoryRegion instead of just + // a raw buffer. This will make it easier to use this on arbitrary + // minidumps without first copying out the code segment. + DisassemblerX86(const uint8_t *bytecode, uint32_t, uint32_t); + ~DisassemblerX86(); + + // This walks to the next instruction in the memory region and + // sets flags based on the type of instruction and previous state + // including any registers marked as bad through setBadRead() + // or setBadWrite(). This method can be called in a loop to + // disassemble until the end of a region. + uint32_t NextInstruction(); + + // Indicates whether the current disassembled instruction was valid. + bool currentInstructionValid() { return instr_valid_; } + + // Returns the current instruction as defined in libdis.h, + // or NULL if the current instruction is not valid. + const libdis::x86_insn_t* currentInstruction() { + return instr_valid_ ? ¤t_instr_ : NULL; + } + + // Returns the type of the current instruction as defined in libdis.h. + libdis::x86_insn_group currentInstructionGroup() { + return current_instr_.group; + } + + // Indicates whether a return instruction has been encountered. + bool endOfBlock() { return end_of_block_; } + + // The flags set so far for the disassembly. + uint16_t flags() { return flags_; } + + // This sets an indicator that the register used to determine + // src or dest for the current instruction is tainted. These can + // be used after examining the current instruction to indicate, + // for example that a bad read or write occurred and the pointer + // stored in the register is currently invalid. + bool setBadRead(); + bool setBadWrite(); + + protected: + const uint8_t *bytecode_; + uint32_t size_; + uint32_t virtual_address_; + uint32_t current_byte_offset_; + uint32_t current_inst_offset_; + + bool instr_valid_; + libdis::x86_insn_t current_instr_; + + // TODO(cdn): Maybe also track an expression's index register. + // ex: mov eax, [ebx + ecx]; ebx is base, ecx is index. + bool register_valid_; + libdis::x86_reg_t bad_register_; + + bool pushed_bad_value_; + bool end_of_block_; + + uint16_t flags_; +}; + +} // namespace google_breakpad + +#endif // GOOGLE_BREAKPAD_PROCESSOR_DISASSEMBLER_X86_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/disassembler_x86_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/disassembler_x86_unittest.cc new file mode 100644 index 0000000000..352905f20d --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/disassembler_x86_unittest.cc @@ -0,0 +1,233 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE + +#include + +#include "breakpad_googletest_includes.h" +#include "processor/disassembler_x86.h" +#include "third_party/libdisasm/libdis.h" + +namespace { + +using google_breakpad::DisassemblerX86; + +unsigned char just_return[] = "\xc3"; // retn + +unsigned char invalid_instruction[] = "\x00"; // invalid + +unsigned char read_eax_jmp_eax[] = + "\x8b\x18" // mov ebx, [eax]; + "\x33\xc9" // xor ebx, ebx; + "\xff\x20" // jmp eax; + "\xc3"; // retn; + +unsigned char write_eax_arg_to_call[] = + "\x89\xa8\x00\x02\x00\x00" // mov [eax+200], ebp; + "\xc1\xeb\x02" // shr ebx, 2; + "\x50" // push eax; + "\xe8\xd1\x24\x77\x88" // call something; + "\xc3"; // retn; + +unsigned char read_edi_stosb[] = + "\x8b\x07" // mov eax, [edi]; + "\x8b\xc8" // mov ecx, eax; + "\xf3\xaa" // rep stosb; + "\xc3"; // retn; + +unsigned char read_clobber_write[] = + "\x03\x18" // add ebx, [eax]; + "\x8b\xc1" // mov eax, ecx; + "\x89\x10" // mov [eax], edx; + "\xc3"; // retn; + +unsigned char read_xchg_write[] = + "\x03\x18" // add ebx, [eax]; + "\x91" // xchg eax, ecx; + "\x89\x18" // mov [eax], ebx; + "\x89\x11" // mov [ecx], edx; + "\xc3"; // retn; + +unsigned char read_cmp[] = + "\x03\x18" // add ebx, [eax]; + "\x83\xf8\x00" // cmp eax, 0; + "\x74\x04" // je +4; + "\xc3"; // retn; + +TEST(DisassemblerX86Test, SimpleReturnInstruction) { + DisassemblerX86 dis(just_return, sizeof(just_return)-1, 0); + EXPECT_EQ(1U, dis.NextInstruction()); + EXPECT_TRUE(dis.currentInstructionValid()); + EXPECT_EQ(0U, dis.flags()); + EXPECT_TRUE(dis.endOfBlock()); + EXPECT_EQ(libdis::insn_controlflow, dis.currentInstructionGroup()); + const libdis::x86_insn_t* instruction = dis.currentInstruction(); + EXPECT_EQ(libdis::insn_controlflow, instruction->group); + EXPECT_EQ(libdis::insn_return, instruction->type); + EXPECT_EQ(0U, dis.NextInstruction()); + EXPECT_FALSE(dis.currentInstructionValid()); + EXPECT_EQ(NULL, dis.currentInstruction()); +} + +TEST(DisassemblerX86Test, SimpleInvalidInstruction) { + DisassemblerX86 dis(invalid_instruction, sizeof(invalid_instruction)-1, 0); + EXPECT_EQ(0U, dis.NextInstruction()); + EXPECT_FALSE(dis.currentInstructionValid()); +} + +TEST(DisassemblerX86Test, BadReadLeadsToBranch) { + DisassemblerX86 dis(read_eax_jmp_eax, sizeof(read_eax_jmp_eax)-1, 0); + EXPECT_EQ(2U, dis.NextInstruction()); + EXPECT_TRUE(dis.currentInstructionValid()); + EXPECT_EQ(0U, dis.flags()); + EXPECT_FALSE(dis.endOfBlock()); + EXPECT_EQ(libdis::insn_move, dis.currentInstructionGroup()); + EXPECT_TRUE(dis.setBadRead()); + EXPECT_EQ(2U, dis.NextInstruction()); + EXPECT_TRUE(dis.currentInstructionValid()); + EXPECT_EQ(0U, dis.flags()); + EXPECT_FALSE(dis.endOfBlock()); + EXPECT_EQ(libdis::insn_logic, dis.currentInstructionGroup()); + EXPECT_EQ(2U, dis.NextInstruction()); + EXPECT_TRUE(dis.currentInstructionValid()); + EXPECT_EQ(google_breakpad::DISX86_BAD_BRANCH_TARGET, dis.flags()); + EXPECT_FALSE(dis.endOfBlock()); + EXPECT_EQ(libdis::insn_controlflow, dis.currentInstructionGroup()); +} + +TEST(DisassemblerX86Test, BadWriteLeadsToPushedArg) { + DisassemblerX86 dis(write_eax_arg_to_call, + sizeof(write_eax_arg_to_call)-1, 0); + EXPECT_EQ(6U, dis.NextInstruction()); + EXPECT_TRUE(dis.currentInstructionValid()); + EXPECT_EQ(0U, dis.flags()); + EXPECT_FALSE(dis.endOfBlock()); + EXPECT_EQ(libdis::insn_move, dis.currentInstructionGroup()); + EXPECT_TRUE(dis.setBadWrite()); + EXPECT_EQ(3U, dis.NextInstruction()); + EXPECT_TRUE(dis.currentInstructionValid()); + EXPECT_EQ(0U, dis.flags()); + EXPECT_FALSE(dis.endOfBlock()); + EXPECT_EQ(libdis::insn_arithmetic, dis.currentInstructionGroup()); + EXPECT_EQ(1U, dis.NextInstruction()); + EXPECT_TRUE(dis.currentInstructionValid()); + EXPECT_EQ(0U, dis.flags()); + EXPECT_FALSE(dis.endOfBlock()); + EXPECT_EQ(5U, dis.NextInstruction()); + EXPECT_TRUE(dis.currentInstructionValid()); + EXPECT_EQ(google_breakpad::DISX86_BAD_ARGUMENT_PASSED, dis.flags()); + EXPECT_EQ(libdis::insn_controlflow, dis.currentInstructionGroup()); + EXPECT_FALSE(dis.endOfBlock()); +} + + +TEST(DisassemblerX86Test, BadReadLeadsToBlockWrite) { + DisassemblerX86 dis(read_edi_stosb, sizeof(read_edi_stosb)-1, 0); + EXPECT_EQ(2U, dis.NextInstruction()); + EXPECT_TRUE(dis.currentInstructionValid()); + EXPECT_EQ(0U, dis.flags()); + EXPECT_FALSE(dis.endOfBlock()); + EXPECT_EQ(libdis::insn_move, dis.currentInstructionGroup()); + EXPECT_TRUE(dis.setBadRead()); + EXPECT_EQ(2U, dis.NextInstruction()); + EXPECT_TRUE(dis.currentInstructionValid()); + EXPECT_EQ(0U, dis.flags()); + EXPECT_FALSE(dis.endOfBlock()); + EXPECT_EQ(libdis::insn_move, dis.currentInstructionGroup()); + EXPECT_EQ(2U, dis.NextInstruction()); + EXPECT_TRUE(dis.currentInstructionValid()); + EXPECT_EQ(google_breakpad::DISX86_BAD_BLOCK_WRITE, dis.flags()); + EXPECT_FALSE(dis.endOfBlock()); + EXPECT_EQ(libdis::insn_string, dis.currentInstructionGroup()); +} + +TEST(DisassemblerX86Test, BadReadClobberThenWrite) { + DisassemblerX86 dis(read_clobber_write, sizeof(read_clobber_write)-1, 0); + EXPECT_EQ(2U, dis.NextInstruction()); + EXPECT_TRUE(dis.currentInstructionValid()); + EXPECT_EQ(0U, dis.flags()); + EXPECT_FALSE(dis.endOfBlock()); + EXPECT_EQ(libdis::insn_arithmetic, dis.currentInstructionGroup()); + EXPECT_TRUE(dis.setBadRead()); + EXPECT_EQ(2U, dis.NextInstruction()); + EXPECT_TRUE(dis.currentInstructionValid()); + EXPECT_EQ(0U, dis.flags()); + EXPECT_FALSE(dis.endOfBlock()); + EXPECT_EQ(libdis::insn_move, dis.currentInstructionGroup()); + EXPECT_EQ(2U, dis.NextInstruction()); + EXPECT_TRUE(dis.currentInstructionValid()); + EXPECT_EQ(0U, dis.flags()); + EXPECT_FALSE(dis.endOfBlock()); + EXPECT_EQ(libdis::insn_move, dis.currentInstructionGroup()); +} + +TEST(DisassemblerX86Test, BadReadXCHGThenWrite) { + DisassemblerX86 dis(read_xchg_write, sizeof(read_xchg_write)-1, 0); + EXPECT_EQ(2U, dis.NextInstruction()); + EXPECT_TRUE(dis.currentInstructionValid()); + EXPECT_EQ(0U, dis.flags()); + EXPECT_FALSE(dis.endOfBlock()); + EXPECT_EQ(libdis::insn_arithmetic, dis.currentInstructionGroup()); + EXPECT_TRUE(dis.setBadRead()); + EXPECT_EQ(1U, dis.NextInstruction()); + EXPECT_TRUE(dis.currentInstructionValid()); + EXPECT_EQ(0U, dis.flags()); + EXPECT_FALSE(dis.endOfBlock()); + EXPECT_EQ(libdis::insn_move, dis.currentInstructionGroup()); + EXPECT_EQ(2U, dis.NextInstruction()); + EXPECT_TRUE(dis.currentInstructionValid()); + EXPECT_EQ(0U, dis.flags()); + EXPECT_FALSE(dis.endOfBlock()); + EXPECT_EQ(libdis::insn_move, dis.currentInstructionGroup()); + EXPECT_EQ(2U, dis.NextInstruction()); + EXPECT_TRUE(dis.currentInstructionValid()); + EXPECT_EQ(google_breakpad::DISX86_BAD_WRITE, dis.flags()); + EXPECT_FALSE(dis.endOfBlock()); + EXPECT_EQ(libdis::insn_move, dis.currentInstructionGroup()); +} + +TEST(DisassemblerX86Test, BadReadThenCMP) { + DisassemblerX86 dis(read_cmp, sizeof(read_cmp)-1, 0); + EXPECT_EQ(2U, dis.NextInstruction()); + EXPECT_TRUE(dis.currentInstructionValid()); + EXPECT_EQ(0U, dis.flags()); + EXPECT_FALSE(dis.endOfBlock()); + EXPECT_EQ(libdis::insn_arithmetic, dis.currentInstructionGroup()); + EXPECT_TRUE(dis.setBadRead()); + EXPECT_EQ(3U, dis.NextInstruction()); + EXPECT_TRUE(dis.currentInstructionValid()); + EXPECT_EQ(google_breakpad::DISX86_BAD_COMPARISON, dis.flags()); + EXPECT_FALSE(dis.endOfBlock()); + EXPECT_EQ(libdis::insn_comparison, dis.currentInstructionGroup()); + EXPECT_EQ(2U, dis.NextInstruction()); + EXPECT_TRUE(dis.currentInstructionValid()); + EXPECT_EQ(google_breakpad::DISX86_BAD_COMPARISON, dis.flags()); + EXPECT_FALSE(dis.endOfBlock()); + EXPECT_EQ(libdis::insn_controlflow, dis.currentInstructionGroup()); +} +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/dump_context.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/dump_context.cc new file mode 100644 index 0000000000..cadf93f7ea --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/dump_context.cc @@ -0,0 +1,614 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// dump_context.cc: A (mini/micro)dump context. +// +// See dump_context.h for documentation. + +#include "google_breakpad/processor/dump_context.h" + +#include +#include + +#ifdef _WIN32 +#include +#define PRIx64 "llx" +#define PRIx32 "lx" +#define snprintf _snprintf +#else // _WIN32 +#include +#endif // _WIN32 + +#include "processor/logging.h" + +namespace google_breakpad { + +DumpContext::DumpContext() : context_(), + context_flags_(0) { } + +DumpContext::~DumpContext() { + FreeContext(); +} + +uint32_t DumpContext::GetContextCPU() const { + if (!valid_) { + // Don't log a message, GetContextCPU can be legitimately called with + // valid_ false by FreeContext, which is called by Read. + return 0; + } + + return context_flags_ & MD_CONTEXT_CPU_MASK; +} + +uint32_t DumpContext::GetContextFlags() const { + return context_flags_; +} + +const MDRawContextX86* DumpContext::GetContextX86() const { + if (GetContextCPU() != MD_CONTEXT_X86) { + BPLOG(ERROR) << "DumpContext cannot get x86 context"; + return NULL; + } + + return context_.x86; +} + +const MDRawContextPPC* DumpContext::GetContextPPC() const { + if (GetContextCPU() != MD_CONTEXT_PPC) { + BPLOG(ERROR) << "DumpContext cannot get ppc context"; + return NULL; + } + + return context_.ppc; +} + +const MDRawContextPPC64* DumpContext::GetContextPPC64() const { + if (GetContextCPU() != MD_CONTEXT_PPC64) { + BPLOG(ERROR) << "DumpContext cannot get ppc64 context"; + return NULL; + } + + return context_.ppc64; +} + +const MDRawContextAMD64* DumpContext::GetContextAMD64() const { + if (GetContextCPU() != MD_CONTEXT_AMD64) { + BPLOG(ERROR) << "DumpContext cannot get amd64 context"; + return NULL; + } + + return context_.amd64; +} + +const MDRawContextSPARC* DumpContext::GetContextSPARC() const { + if (GetContextCPU() != MD_CONTEXT_SPARC) { + BPLOG(ERROR) << "DumpContext cannot get sparc context"; + return NULL; + } + + return context_.ctx_sparc; +} + +const MDRawContextARM* DumpContext::GetContextARM() const { + if (GetContextCPU() != MD_CONTEXT_ARM) { + BPLOG(ERROR) << "DumpContext cannot get arm context"; + return NULL; + } + + return context_.arm; +} + +const MDRawContextARM64* DumpContext::GetContextARM64() const { + if (GetContextCPU() != MD_CONTEXT_ARM64) { + BPLOG(ERROR) << "DumpContext cannot get arm64 context"; + return NULL; + } + + return context_.arm64; +} + +const MDRawContextMIPS* DumpContext::GetContextMIPS() const { + if (GetContextCPU() != MD_CONTEXT_MIPS) { + BPLOG(ERROR) << "DumpContext cannot get MIPS context"; + return NULL; + } + + return context_.ctx_mips; +} + +bool DumpContext::GetInstructionPointer(uint64_t* ip) const { + BPLOG_IF(ERROR, !ip) << "DumpContext::GetInstructionPointer requires |ip|"; + assert(ip); + *ip = 0; + + if (!valid_) { + BPLOG(ERROR) << "Invalid DumpContext for GetInstructionPointer"; + return false; + } + + switch (GetContextCPU()) { + case MD_CONTEXT_AMD64: + *ip = GetContextAMD64()->rip; + break; + case MD_CONTEXT_ARM: + *ip = GetContextARM()->iregs[MD_CONTEXT_ARM_REG_PC]; + break; + case MD_CONTEXT_ARM64: + *ip = GetContextARM64()->iregs[MD_CONTEXT_ARM64_REG_PC]; + break; + case MD_CONTEXT_PPC: + *ip = GetContextPPC()->srr0; + break; + case MD_CONTEXT_PPC64: + *ip = GetContextPPC64()->srr0; + break; + case MD_CONTEXT_SPARC: + *ip = GetContextSPARC()->pc; + break; + case MD_CONTEXT_X86: + *ip = GetContextX86()->eip; + break; + case MD_CONTEXT_MIPS: + *ip = GetContextMIPS()->epc; + break; + default: + // This should never happen. + BPLOG(ERROR) << "Unknown CPU architecture in GetInstructionPointer"; + return false; + } + return true; +} + +void DumpContext::SetContextFlags(uint32_t context_flags) { + context_flags_ = context_flags; +} + +void DumpContext::SetContextX86(MDRawContextX86* x86) { + context_.x86 = x86; +} + +void DumpContext::SetContextPPC(MDRawContextPPC* ppc) { + context_.ppc = ppc; +} + +void DumpContext::SetContextPPC64(MDRawContextPPC64* ppc64) { + context_.ppc64 = ppc64; +} + +void DumpContext::SetContextAMD64(MDRawContextAMD64* amd64) { + context_.amd64 = amd64; +} + +void DumpContext::SetContextSPARC(MDRawContextSPARC* ctx_sparc) { + context_.ctx_sparc = ctx_sparc; +} + +void DumpContext::SetContextARM(MDRawContextARM* arm) { + context_.arm = arm; +} + +void DumpContext::SetContextARM64(MDRawContextARM64* arm64) { + context_.arm64 = arm64; +} + +void DumpContext::SetContextMIPS(MDRawContextMIPS* ctx_mips) { + context_.ctx_mips = ctx_mips; +} + +void DumpContext::FreeContext() { + switch (GetContextCPU()) { + case MD_CONTEXT_X86: + delete context_.x86; + break; + + case MD_CONTEXT_PPC: + delete context_.ppc; + break; + + case MD_CONTEXT_PPC64: + delete context_.ppc64; + break; + + case MD_CONTEXT_AMD64: + delete context_.amd64; + break; + + case MD_CONTEXT_SPARC: + delete context_.ctx_sparc; + break; + + case MD_CONTEXT_ARM: + delete context_.arm; + break; + + case MD_CONTEXT_ARM64: + delete context_.arm64; + break; + + case MD_CONTEXT_MIPS: + delete context_.ctx_mips; + break; + + default: + // There is no context record (valid_ is false) or there's a + // context record for an unknown CPU (shouldn't happen, only known + // records are stored by Read). + break; + } + + context_flags_ = 0; + context_.base = NULL; +} + +void DumpContext::Print() { + if (!valid_) { + BPLOG(ERROR) << "DumpContext cannot print invalid data"; + return; + } + + switch (GetContextCPU()) { + case MD_CONTEXT_X86: { + const MDRawContextX86* context_x86 = GetContextX86(); + printf("MDRawContextX86\n"); + printf(" context_flags = 0x%x\n", + context_x86->context_flags); + printf(" dr0 = 0x%x\n", context_x86->dr0); + printf(" dr1 = 0x%x\n", context_x86->dr1); + printf(" dr2 = 0x%x\n", context_x86->dr2); + printf(" dr3 = 0x%x\n", context_x86->dr3); + printf(" dr6 = 0x%x\n", context_x86->dr6); + printf(" dr7 = 0x%x\n", context_x86->dr7); + printf(" float_save.control_word = 0x%x\n", + context_x86->float_save.control_word); + printf(" float_save.status_word = 0x%x\n", + context_x86->float_save.status_word); + printf(" float_save.tag_word = 0x%x\n", + context_x86->float_save.tag_word); + printf(" float_save.error_offset = 0x%x\n", + context_x86->float_save.error_offset); + printf(" float_save.error_selector = 0x%x\n", + context_x86->float_save.error_selector); + printf(" float_save.data_offset = 0x%x\n", + context_x86->float_save.data_offset); + printf(" float_save.data_selector = 0x%x\n", + context_x86->float_save.data_selector); + printf(" float_save.register_area[%2d] = 0x", + MD_FLOATINGSAVEAREA_X86_REGISTERAREA_SIZE); + for (unsigned int register_index = 0; + register_index < MD_FLOATINGSAVEAREA_X86_REGISTERAREA_SIZE; + ++register_index) { + printf("%02x", context_x86->float_save.register_area[register_index]); + } + printf("\n"); + printf(" float_save.cr0_npx_state = 0x%x\n", + context_x86->float_save.cr0_npx_state); + printf(" gs = 0x%x\n", context_x86->gs); + printf(" fs = 0x%x\n", context_x86->fs); + printf(" es = 0x%x\n", context_x86->es); + printf(" ds = 0x%x\n", context_x86->ds); + printf(" edi = 0x%x\n", context_x86->edi); + printf(" esi = 0x%x\n", context_x86->esi); + printf(" ebx = 0x%x\n", context_x86->ebx); + printf(" edx = 0x%x\n", context_x86->edx); + printf(" ecx = 0x%x\n", context_x86->ecx); + printf(" eax = 0x%x\n", context_x86->eax); + printf(" ebp = 0x%x\n", context_x86->ebp); + printf(" eip = 0x%x\n", context_x86->eip); + printf(" cs = 0x%x\n", context_x86->cs); + printf(" eflags = 0x%x\n", context_x86->eflags); + printf(" esp = 0x%x\n", context_x86->esp); + printf(" ss = 0x%x\n", context_x86->ss); + printf(" extended_registers[%3d] = 0x", + MD_CONTEXT_X86_EXTENDED_REGISTERS_SIZE); + for (unsigned int register_index = 0; + register_index < MD_CONTEXT_X86_EXTENDED_REGISTERS_SIZE; + ++register_index) { + printf("%02x", context_x86->extended_registers[register_index]); + } + printf("\n\n"); + + break; + } + + case MD_CONTEXT_PPC: { + const MDRawContextPPC* context_ppc = GetContextPPC(); + printf("MDRawContextPPC\n"); + printf(" context_flags = 0x%x\n", + context_ppc->context_flags); + printf(" srr0 = 0x%x\n", context_ppc->srr0); + printf(" srr1 = 0x%x\n", context_ppc->srr1); + for (unsigned int gpr_index = 0; + gpr_index < MD_CONTEXT_PPC_GPR_COUNT; + ++gpr_index) { + printf(" gpr[%2d] = 0x%x\n", + gpr_index, context_ppc->gpr[gpr_index]); + } + printf(" cr = 0x%x\n", context_ppc->cr); + printf(" xer = 0x%x\n", context_ppc->xer); + printf(" lr = 0x%x\n", context_ppc->lr); + printf(" ctr = 0x%x\n", context_ppc->ctr); + printf(" mq = 0x%x\n", context_ppc->mq); + printf(" vrsave = 0x%x\n", context_ppc->vrsave); + for (unsigned int fpr_index = 0; + fpr_index < MD_FLOATINGSAVEAREA_PPC_FPR_COUNT; + ++fpr_index) { + printf(" float_save.fpregs[%2d] = 0x%" PRIx64 "\n", + fpr_index, context_ppc->float_save.fpregs[fpr_index]); + } + printf(" float_save.fpscr = 0x%x\n", + context_ppc->float_save.fpscr); + // TODO(mmentovai): print the 128-bit quantities in + // context_ppc->vector_save. This isn't done yet because printf + // doesn't support 128-bit quantities, and printing them using + // PRIx64 as two 64-bit quantities requires knowledge of the CPU's + // byte ordering. + printf(" vector_save.save_vrvalid = 0x%x\n", + context_ppc->vector_save.save_vrvalid); + printf("\n"); + + break; + } + + case MD_CONTEXT_PPC64: { + const MDRawContextPPC64* context_ppc64 = GetContextPPC64(); + printf("MDRawContextPPC64\n"); + printf(" context_flags = 0x%" PRIx64 "\n", + context_ppc64->context_flags); + printf(" srr0 = 0x%" PRIx64 "\n", + context_ppc64->srr0); + printf(" srr1 = 0x%" PRIx64 "\n", + context_ppc64->srr1); + for (unsigned int gpr_index = 0; + gpr_index < MD_CONTEXT_PPC64_GPR_COUNT; + ++gpr_index) { + printf(" gpr[%2d] = 0x%" PRIx64 "\n", + gpr_index, context_ppc64->gpr[gpr_index]); + } + printf(" cr = 0x%" PRIx64 "\n", context_ppc64->cr); + printf(" xer = 0x%" PRIx64 "\n", + context_ppc64->xer); + printf(" lr = 0x%" PRIx64 "\n", context_ppc64->lr); + printf(" ctr = 0x%" PRIx64 "\n", + context_ppc64->ctr); + printf(" vrsave = 0x%" PRIx64 "\n", + context_ppc64->vrsave); + for (unsigned int fpr_index = 0; + fpr_index < MD_FLOATINGSAVEAREA_PPC_FPR_COUNT; + ++fpr_index) { + printf(" float_save.fpregs[%2d] = 0x%" PRIx64 "\n", + fpr_index, context_ppc64->float_save.fpregs[fpr_index]); + } + printf(" float_save.fpscr = 0x%x\n", + context_ppc64->float_save.fpscr); + // TODO(mmentovai): print the 128-bit quantities in + // context_ppc64->vector_save. This isn't done yet because printf + // doesn't support 128-bit quantities, and printing them using + // PRIx64 as two 64-bit quantities requires knowledge of the CPU's + // byte ordering. + printf(" vector_save.save_vrvalid = 0x%x\n", + context_ppc64->vector_save.save_vrvalid); + printf("\n"); + + break; + } + + case MD_CONTEXT_AMD64: { + const MDRawContextAMD64* context_amd64 = GetContextAMD64(); + printf("MDRawContextAMD64\n"); + printf(" p1_home = 0x%" PRIx64 "\n", + context_amd64->p1_home); + printf(" p2_home = 0x%" PRIx64 "\n", + context_amd64->p2_home); + printf(" p3_home = 0x%" PRIx64 "\n", + context_amd64->p3_home); + printf(" p4_home = 0x%" PRIx64 "\n", + context_amd64->p4_home); + printf(" p5_home = 0x%" PRIx64 "\n", + context_amd64->p5_home); + printf(" p6_home = 0x%" PRIx64 "\n", + context_amd64->p6_home); + printf(" context_flags = 0x%x\n", + context_amd64->context_flags); + printf(" mx_csr = 0x%x\n", + context_amd64->mx_csr); + printf(" cs = 0x%x\n", context_amd64->cs); + printf(" ds = 0x%x\n", context_amd64->ds); + printf(" es = 0x%x\n", context_amd64->es); + printf(" fs = 0x%x\n", context_amd64->fs); + printf(" gs = 0x%x\n", context_amd64->gs); + printf(" ss = 0x%x\n", context_amd64->ss); + printf(" eflags = 0x%x\n", context_amd64->eflags); + printf(" dr0 = 0x%" PRIx64 "\n", context_amd64->dr0); + printf(" dr1 = 0x%" PRIx64 "\n", context_amd64->dr1); + printf(" dr2 = 0x%" PRIx64 "\n", context_amd64->dr2); + printf(" dr3 = 0x%" PRIx64 "\n", context_amd64->dr3); + printf(" dr6 = 0x%" PRIx64 "\n", context_amd64->dr6); + printf(" dr7 = 0x%" PRIx64 "\n", context_amd64->dr7); + printf(" rax = 0x%" PRIx64 "\n", context_amd64->rax); + printf(" rcx = 0x%" PRIx64 "\n", context_amd64->rcx); + printf(" rdx = 0x%" PRIx64 "\n", context_amd64->rdx); + printf(" rbx = 0x%" PRIx64 "\n", context_amd64->rbx); + printf(" rsp = 0x%" PRIx64 "\n", context_amd64->rsp); + printf(" rbp = 0x%" PRIx64 "\n", context_amd64->rbp); + printf(" rsi = 0x%" PRIx64 "\n", context_amd64->rsi); + printf(" rdi = 0x%" PRIx64 "\n", context_amd64->rdi); + printf(" r8 = 0x%" PRIx64 "\n", context_amd64->r8); + printf(" r9 = 0x%" PRIx64 "\n", context_amd64->r9); + printf(" r10 = 0x%" PRIx64 "\n", context_amd64->r10); + printf(" r11 = 0x%" PRIx64 "\n", context_amd64->r11); + printf(" r12 = 0x%" PRIx64 "\n", context_amd64->r12); + printf(" r13 = 0x%" PRIx64 "\n", context_amd64->r13); + printf(" r14 = 0x%" PRIx64 "\n", context_amd64->r14); + printf(" r15 = 0x%" PRIx64 "\n", context_amd64->r15); + printf(" rip = 0x%" PRIx64 "\n", context_amd64->rip); + // TODO: print xmm, vector, debug registers + printf("\n"); + break; + } + + case MD_CONTEXT_SPARC: { + const MDRawContextSPARC* context_sparc = GetContextSPARC(); + printf("MDRawContextSPARC\n"); + printf(" context_flags = 0x%x\n", + context_sparc->context_flags); + for (unsigned int g_r_index = 0; + g_r_index < MD_CONTEXT_SPARC_GPR_COUNT; + ++g_r_index) { + printf(" g_r[%2d] = 0x%" PRIx64 "\n", + g_r_index, context_sparc->g_r[g_r_index]); + } + printf(" ccr = 0x%" PRIx64 "\n", context_sparc->ccr); + printf(" pc = 0x%" PRIx64 "\n", context_sparc->pc); + printf(" npc = 0x%" PRIx64 "\n", context_sparc->npc); + printf(" y = 0x%" PRIx64 "\n", context_sparc->y); + printf(" asi = 0x%" PRIx64 "\n", context_sparc->asi); + printf(" fprs = 0x%" PRIx64 "\n", context_sparc->fprs); + + for (unsigned int fpr_index = 0; + fpr_index < MD_FLOATINGSAVEAREA_SPARC_FPR_COUNT; + ++fpr_index) { + printf(" float_save.regs[%2d] = 0x%" PRIx64 "\n", + fpr_index, context_sparc->float_save.regs[fpr_index]); + } + printf(" float_save.filler = 0x%" PRIx64 "\n", + context_sparc->float_save.filler); + printf(" float_save.fsr = 0x%" PRIx64 "\n", + context_sparc->float_save.fsr); + break; + } + + case MD_CONTEXT_ARM: { + const MDRawContextARM* context_arm = GetContextARM(); + printf("MDRawContextARM\n"); + printf(" context_flags = 0x%x\n", + context_arm->context_flags); + for (unsigned int ireg_index = 0; + ireg_index < MD_CONTEXT_ARM_GPR_COUNT; + ++ireg_index) { + printf(" iregs[%2d] = 0x%x\n", + ireg_index, context_arm->iregs[ireg_index]); + } + printf(" cpsr = 0x%x\n", context_arm->cpsr); + printf(" float_save.fpscr = 0x%" PRIx64 "\n", + context_arm->float_save.fpscr); + for (unsigned int fpr_index = 0; + fpr_index < MD_FLOATINGSAVEAREA_ARM_FPR_COUNT; + ++fpr_index) { + printf(" float_save.regs[%2d] = 0x%" PRIx64 "\n", + fpr_index, context_arm->float_save.regs[fpr_index]); + } + for (unsigned int fpe_index = 0; + fpe_index < MD_FLOATINGSAVEAREA_ARM_FPEXTRA_COUNT; + ++fpe_index) { + printf(" float_save.extra[%2d] = 0x%" PRIx32 "\n", + fpe_index, context_arm->float_save.extra[fpe_index]); + } + + break; + } + + case MD_CONTEXT_ARM64: { + const MDRawContextARM64* context_arm64 = GetContextARM64(); + printf("MDRawContextARM64\n"); + printf(" context_flags = 0x%" PRIx64 "\n", + context_arm64->context_flags); + for (unsigned int ireg_index = 0; + ireg_index < MD_CONTEXT_ARM64_GPR_COUNT; + ++ireg_index) { + printf(" iregs[%2d] = 0x%" PRIx64 "\n", + ireg_index, context_arm64->iregs[ireg_index]); + } + printf(" cpsr = 0x%x\n", context_arm64->cpsr); + printf(" float_save.fpsr = 0x%x\n", context_arm64->float_save.fpsr); + printf(" float_save.fpcr = 0x%x\n", context_arm64->float_save.fpcr); + + for (unsigned int freg_index = 0; + freg_index < MD_FLOATINGSAVEAREA_ARM64_FPR_COUNT; + ++freg_index) { + uint128_struct fp_value = context_arm64->float_save.regs[freg_index]; + printf(" float_save.regs[%2d] = 0x%" PRIx64 "%" PRIx64 "\n", + freg_index, fp_value.high, fp_value.low); + } + break; + } + + case MD_CONTEXT_MIPS: { + const MDRawContextMIPS* context_mips = GetContextMIPS(); + printf("MDRawContextMIPS\n"); + printf(" context_flags = 0x%x\n", + context_mips->context_flags); + for (int ireg_index = 0; + ireg_index < MD_CONTEXT_MIPS_GPR_COUNT; + ++ireg_index) { + printf(" iregs[%2d] = 0x%" PRIx64 "\n", + ireg_index, context_mips->iregs[ireg_index]); + } + printf(" mdhi = 0x%" PRIx64 "\n", + context_mips->mdhi); + printf(" mdlo = 0x%" PRIx64 "\n", + context_mips->mdhi); + for (int dsp_index = 0; + dsp_index < MD_CONTEXT_MIPS_DSP_COUNT; + ++dsp_index) { + printf(" hi[%1d] = 0x%" PRIx32 "\n", + dsp_index, context_mips->hi[dsp_index]); + printf(" lo[%1d] = 0x%" PRIx32 "\n", + dsp_index, context_mips->lo[dsp_index]); + } + printf(" dsp_control = 0x%" PRIx32 "\n", + context_mips->dsp_control); + printf(" epc = 0x%" PRIx64 "\n", + context_mips->epc); + printf(" badvaddr = 0x%" PRIx64 "\n", + context_mips->badvaddr); + printf(" status = 0x%" PRIx32 "\n", + context_mips->status); + printf(" cause = 0x%" PRIx32 "\n", + context_mips->cause); + + for (int fpr_index = 0; + fpr_index < MD_FLOATINGSAVEAREA_MIPS_FPR_COUNT; + ++fpr_index) { + printf(" float_save.regs[%2d] = 0x%" PRIx64 "\n", + fpr_index, context_mips->float_save.regs[fpr_index]); + } + printf(" float_save.fpcsr = 0x%" PRIx32 "\n", + context_mips->float_save.fpcsr); + printf(" float_save.fir = 0x%" PRIx32 "\n", + context_mips->float_save.fir); + break; + } + + default: { + break; + } + } +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/dump_object.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/dump_object.cc new file mode 100644 index 0000000000..2c82b200b8 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/dump_object.cc @@ -0,0 +1,39 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// dump_object.cc: A base class for all mini/micro dump object. + +#include "google_breakpad/processor/dump_object.h" + +namespace google_breakpad { + +DumpObject::DumpObject() : valid_(false) { +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/exploitability.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/exploitability.cc new file mode 100644 index 0000000000..384c499c05 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/exploitability.cc @@ -0,0 +1,110 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// exploitability_engine.cc: Generic exploitability engine. +// +// See exploitable_engine.h for documentation. +// +// Author: Cris Neckar + + +#include + +#include "common/scoped_ptr.h" +#include "google_breakpad/processor/exploitability.h" +#include "google_breakpad/processor/minidump.h" +#include "google_breakpad/processor/process_state.h" +#include "processor/exploitability_linux.h" +#include "processor/exploitability_win.h" +#include "processor/logging.h" + +namespace google_breakpad { + +Exploitability::Exploitability(Minidump *dump, + ProcessState *process_state) + : dump_(dump), + process_state_(process_state) {} + +ExploitabilityRating Exploitability::CheckExploitability() { + return CheckPlatformExploitability(); +} + +Exploitability *Exploitability::ExploitabilityForPlatform( + Minidump *dump, + ProcessState *process_state) { + Exploitability *platform_exploitability = NULL; + MinidumpSystemInfo *minidump_system_info = dump->GetSystemInfo(); + if (!minidump_system_info) + return NULL; + + const MDRawSystemInfo *raw_system_info = + minidump_system_info->system_info(); + if (!raw_system_info) + return NULL; + + switch (raw_system_info->platform_id) { + case MD_OS_WIN32_NT: + case MD_OS_WIN32_WINDOWS: { + platform_exploitability = new ExploitabilityWin(dump, process_state); + break; + } + case MD_OS_LINUX: { + platform_exploitability = new ExploitabilityLinux(dump, process_state); + break; + } + case MD_OS_MAC_OS_X: + case MD_OS_IOS: + case MD_OS_UNIX: + case MD_OS_SOLARIS: + case MD_OS_ANDROID: + case MD_OS_PS3: + default: { + platform_exploitability = NULL; + break; + } + } + + BPLOG_IF(ERROR, !platform_exploitability) << + "No Exploitability module for platform: " << + process_state->system_info()->os; + return platform_exploitability; +} + +bool Exploitability::AddressIsAscii(uint64_t address) { + for (int i = 0; i < 8; i++) { + uint8_t byte = (address >> (8*i)) & 0xff; + if ((byte >= ' ' && byte <= '~') || byte == 0) + continue; + return false; + } + return true; +} + +} // namespace google_breakpad + diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/exploitability_linux.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/exploitability_linux.cc new file mode 100644 index 0000000000..13ebad12dc --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/exploitability_linux.cc @@ -0,0 +1,86 @@ +// Copyright (c) 2013 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// exploitability_linux.cc: Linux specific exploitability engine. +// +// Provides a guess at the exploitability of the crash for the Linux +// platform given a minidump and process_state. +// +// Author: Matthew Riley + +#include "processor/exploitability_linux.h" + +#include "google_breakpad/processor/process_state.h" +#include "google_breakpad/processor/call_stack.h" +#include "google_breakpad/processor/stack_frame.h" + +namespace { + +// This function in libc is called if the program was compiled with +// -fstack-protector and a function's stack canary changes. +const char kStackCheckFailureFunction[] = "__stack_chk_fail"; + +// This function in libc is called if the program was compiled with +// -D_FORTIFY_SOURCE=2, a function like strcpy() is called, and the runtime +// can determine that the call would overflow the target buffer. +const char kBoundsCheckFailureFunction[] = "__chk_fail"; + +} // namespace + +namespace google_breakpad { + +ExploitabilityLinux::ExploitabilityLinux(Minidump *dump, + ProcessState *process_state) + : Exploitability(dump, process_state) { } + +ExploitabilityRating ExploitabilityLinux::CheckPlatformExploitability() { + // Check the crashing thread for functions suggesting a buffer overflow or + // stack smash. + if (process_state_->requesting_thread() != -1) { + CallStack* crashing_thread = + process_state_->threads()->at(process_state_->requesting_thread()); + const vector& crashing_thread_frames = + *crashing_thread->frames(); + for (size_t i = 0; i < crashing_thread_frames.size(); ++i) { + if (crashing_thread_frames[i]->function_name == + kStackCheckFailureFunction) { + return EXPLOITABILITY_HIGH; + } + + if (crashing_thread_frames[i]->function_name == + kBoundsCheckFailureFunction) { + return EXPLOITABILITY_HIGH; + } + } + } + + return EXPLOITABILITY_NONE; +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/exploitability_linux.h b/TMessagesProj/jni/third_party/breakpad/src/processor/exploitability_linux.h new file mode 100644 index 0000000000..c63c04570d --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/exploitability_linux.h @@ -0,0 +1,55 @@ +// Copyright (c) 2013 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// exploitability_linux.h: Linux specific exploitability engine. +// +// Provides a guess at the exploitability of the crash for the Linux +// platform given a minidump and process_state. +// +// Author: Matthew Riley + +#ifndef GOOGLE_BREAKPAD_PROCESSOR_EXPLOITABILITY_LINUX_H_ +#define GOOGLE_BREAKPAD_PROCESSOR_EXPLOITABILITY_LINUX_H_ + +#include "google_breakpad/common/breakpad_types.h" +#include "google_breakpad/processor/exploitability.h" + +namespace google_breakpad { + +class ExploitabilityLinux : public Exploitability { + public: + ExploitabilityLinux(Minidump *dump, + ProcessState *process_state); + + virtual ExploitabilityRating CheckPlatformExploitability(); +}; + +} // namespace google_breakpad + +#endif // GOOGLE_BREAKPAD_PROCESSOR_EXPLOITABILITY_LINUX_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/exploitability_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/exploitability_unittest.cc new file mode 100644 index 0000000000..72994d5a2b --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/exploitability_unittest.cc @@ -0,0 +1,117 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE + +#include +#include + +#include + +#include "breakpad_googletest_includes.h" +#include "common/using_std_string.h" +#include "google_breakpad/processor/basic_source_line_resolver.h" +#include "google_breakpad/processor/minidump_processor.h" +#include "google_breakpad/processor/process_state.h" +#include "processor/simple_symbol_supplier.h" + +namespace { + +using google_breakpad::BasicSourceLineResolver; +using google_breakpad::MinidumpProcessor; +using google_breakpad::ProcessState; +using google_breakpad::SimpleSymbolSupplier; + +string TestDataDir() { + return string(getenv("srcdir") ? getenv("srcdir") : ".") + + "/src/processor/testdata"; +} + +// Find the given dump file in /src/processor/testdata, process it, +// and get the exploitability rating. Returns EXPLOITABILITY_ERR_PROCESSING +// if the crash dump can't be processed. +google_breakpad::ExploitabilityRating +ExploitabilityFor(const string& filename) { + SimpleSymbolSupplier supplier(TestDataDir() + "/symbols"); + BasicSourceLineResolver resolver; + MinidumpProcessor processor(&supplier, &resolver, true); + ProcessState state; + + string minidump_file = TestDataDir() + "/" + filename; + + if (processor.Process(minidump_file, &state) != + google_breakpad::PROCESS_OK) { + return google_breakpad::EXPLOITABILITY_ERR_PROCESSING; + } + + return state.exploitability(); +} + +TEST(ExploitabilityTest, TestWindowsEngine) { + ASSERT_EQ(google_breakpad::EXPLOITABILITY_HIGH, + ExploitabilityFor("ascii_read_av.dmp")); + ASSERT_EQ(google_breakpad::EXPLOITABILITY_HIGH, + ExploitabilityFor("ascii_read_av_block_write.dmp")); + ASSERT_EQ(google_breakpad::EXPLOITABILITY_HIGH, + ExploitabilityFor("ascii_read_av_clobber_write.dmp")); + ASSERT_EQ(google_breakpad::EXPLOITABILITY_HIGH, + ExploitabilityFor("ascii_read_av_conditional.dmp")); + ASSERT_EQ(google_breakpad::EXPLOITABILITY_HIGH, + ExploitabilityFor("ascii_read_av_then_jmp.dmp")); + ASSERT_EQ(google_breakpad::EXPLOITABILITY_HIGH, + ExploitabilityFor("ascii_read_av_xchg_write.dmp")); + ASSERT_EQ(google_breakpad::EXPLOITABILITY_HIGH, + ExploitabilityFor("ascii_write_av.dmp")); + ASSERT_EQ(google_breakpad::EXPLOITABILITY_HIGH, + ExploitabilityFor("ascii_write_av_arg_to_call.dmp")); + ASSERT_EQ(google_breakpad::EXPLOITABILITY_NONE, + ExploitabilityFor("null_read_av.dmp")); + ASSERT_EQ(google_breakpad::EXPLOITABILITY_NONE, + ExploitabilityFor("null_write_av.dmp")); + ASSERT_EQ(google_breakpad::EXPLOITABILITY_NONE, + ExploitabilityFor("stack_exhaustion.dmp")); + ASSERT_EQ(google_breakpad::EXPLOITABILITY_HIGH, + ExploitabilityFor("exec_av_on_stack.dmp")); + ASSERT_EQ(google_breakpad::EXPLOITABILITY_MEDIUM, + ExploitabilityFor("write_av_non_null.dmp")); + ASSERT_EQ(google_breakpad::EXPLOITABILITY_LOW, + ExploitabilityFor("read_av_non_null.dmp")); + ASSERT_EQ(google_breakpad::EXPLOITABILITY_LOW, + ExploitabilityFor("read_av_clobber_write.dmp")); + ASSERT_EQ(google_breakpad::EXPLOITABILITY_LOW, + ExploitabilityFor("read_av_conditional.dmp")); +} + +TEST(ExploitabilityTest, TestLinuxEngine) { + ASSERT_EQ(google_breakpad::EXPLOITABILITY_NONE, + ExploitabilityFor("linux_null_read_av.dmp")); + ASSERT_EQ(google_breakpad::EXPLOITABILITY_HIGH, + ExploitabilityFor("linux_overflow.dmp")); + ASSERT_EQ(google_breakpad::EXPLOITABILITY_HIGH, + ExploitabilityFor("linux_stacksmash.dmp")); +} +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/exploitability_win.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/exploitability_win.cc new file mode 100644 index 0000000000..8a444eb2a4 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/exploitability_win.cc @@ -0,0 +1,287 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// exploitability_win.cc: Windows specific exploitability engine. +// +// Provides a guess at the exploitability of the crash for the Windows +// platform given a minidump and process_state. +// +// Author: Cris Neckar + +#include + +#include "processor/exploitability_win.h" + +#include "common/scoped_ptr.h" +#include "google_breakpad/common/minidump_exception_win32.h" +#include "google_breakpad/processor/minidump.h" +#include "processor/disassembler_x86.h" +#include "processor/logging.h" + +#include "third_party/libdisasm/libdis.h" + +namespace google_breakpad { + +// The cutoff that we use to judge if and address is likely an offset +// from various interesting addresses. +static const uint64_t kProbableNullOffset = 4096; +static const uint64_t kProbableStackOffset = 8192; + +// The various cutoffs for the different ratings. +static const size_t kHighCutoff = 100; +static const size_t kMediumCutoff = 80; +static const size_t kLowCutoff = 50; +static const size_t kInterestingCutoff = 25; + +// Predefined incremental values for conditional weighting. +static const size_t kTinyBump = 5; +static const size_t kSmallBump = 20; +static const size_t kMediumBump = 50; +static const size_t kLargeBump = 70; +static const size_t kHugeBump = 90; + +// The maximum number of bytes to disassemble past the program counter. +static const size_t kDisassembleBytesBeyondPC = 2048; + +ExploitabilityWin::ExploitabilityWin(Minidump *dump, + ProcessState *process_state) + : Exploitability(dump, process_state) { } + +ExploitabilityRating ExploitabilityWin::CheckPlatformExploitability() { + MinidumpException *exception = dump_->GetException(); + if (!exception) { + BPLOG(INFO) << "Minidump does not have exception record."; + return EXPLOITABILITY_ERR_PROCESSING; + } + + const MDRawExceptionStream *raw_exception = exception->exception(); + if (!raw_exception) { + BPLOG(INFO) << "Could not obtain raw exception info."; + return EXPLOITABILITY_ERR_PROCESSING; + } + + const MinidumpContext *context = exception->GetContext(); + if (!context) { + BPLOG(INFO) << "Could not obtain exception context."; + return EXPLOITABILITY_ERR_PROCESSING; + } + + MinidumpMemoryList *memory_list = dump_->GetMemoryList(); + bool memory_available = true; + if (!memory_list) { + BPLOG(INFO) << "Minidump memory segments not available."; + memory_available = false; + } + uint64_t address = process_state_->crash_address(); + uint32_t exception_code = raw_exception->exception_record.exception_code; + + uint32_t exploitability_weight = 0; + + uint64_t stack_ptr = 0; + uint64_t instruction_ptr = 0; + + switch (context->GetContextCPU()) { + case MD_CONTEXT_X86: + stack_ptr = context->GetContextX86()->esp; + instruction_ptr = context->GetContextX86()->eip; + break; + case MD_CONTEXT_AMD64: + stack_ptr = context->GetContextAMD64()->rsp; + instruction_ptr = context->GetContextAMD64()->rip; + break; + default: + BPLOG(INFO) << "Unsupported architecture."; + return EXPLOITABILITY_ERR_PROCESSING; + } + + // Check if we are executing on the stack. + if (instruction_ptr <= (stack_ptr + kProbableStackOffset) && + instruction_ptr >= (stack_ptr - kProbableStackOffset)) + exploitability_weight += kHugeBump; + + switch (exception_code) { + // This is almost certainly recursion. + case MD_EXCEPTION_CODE_WIN_STACK_OVERFLOW: + exploitability_weight += kTinyBump; + break; + + // These exceptions tend to be benign and we can generally ignore them. + case MD_EXCEPTION_CODE_WIN_INTEGER_DIVIDE_BY_ZERO: + case MD_EXCEPTION_CODE_WIN_INTEGER_OVERFLOW: + case MD_EXCEPTION_CODE_WIN_FLOAT_DIVIDE_BY_ZERO: + case MD_EXCEPTION_CODE_WIN_FLOAT_INEXACT_RESULT: + case MD_EXCEPTION_CODE_WIN_FLOAT_OVERFLOW: + case MD_EXCEPTION_CODE_WIN_FLOAT_UNDERFLOW: + case MD_EXCEPTION_CODE_WIN_IN_PAGE_ERROR: + exploitability_weight += kTinyBump; + break; + + // These exceptions will typically mean that we have jumped where we + // shouldn't. + case MD_EXCEPTION_CODE_WIN_ILLEGAL_INSTRUCTION: + case MD_EXCEPTION_CODE_WIN_FLOAT_INVALID_OPERATION: + case MD_EXCEPTION_CODE_WIN_PRIVILEGED_INSTRUCTION: + exploitability_weight += kLargeBump; + break; + + // These represent bugs in exception handlers. + case MD_EXCEPTION_CODE_WIN_INVALID_DISPOSITION: + case MD_EXCEPTION_CODE_WIN_NONCONTINUABLE_EXCEPTION: + exploitability_weight += kSmallBump; + break; + + case MD_EXCEPTION_CODE_WIN_HEAP_CORRUPTION: + case MD_EXCEPTION_CODE_WIN_STACK_BUFFER_OVERRUN: + exploitability_weight += kHugeBump; + break; + + case MD_EXCEPTION_CODE_WIN_GUARD_PAGE_VIOLATION: + exploitability_weight += kLargeBump; + break; + + case MD_EXCEPTION_CODE_WIN_ACCESS_VIOLATION: + bool near_null = (address <= kProbableNullOffset); + bool bad_read = false; + bool bad_write = false; + if (raw_exception->exception_record.number_parameters >= 1) { + MDAccessViolationTypeWin av_type = + static_cast + (raw_exception->exception_record.exception_information[0]); + switch (av_type) { + case MD_ACCESS_VIOLATION_WIN_READ: + bad_read = true; + if (near_null) + exploitability_weight += kSmallBump; + else + exploitability_weight += kMediumBump; + break; + case MD_ACCESS_VIOLATION_WIN_WRITE: + bad_write = true; + if (near_null) + exploitability_weight += kSmallBump; + else + exploitability_weight += kHugeBump; + break; + case MD_ACCESS_VIOLATION_WIN_EXEC: + if (near_null) + exploitability_weight += kSmallBump; + else + exploitability_weight += kHugeBump; + break; + default: + BPLOG(INFO) << "Unrecognized access violation type."; + return EXPLOITABILITY_ERR_PROCESSING; + break; + } + MinidumpMemoryRegion *instruction_region = 0; + if (memory_available) { + instruction_region = + memory_list->GetMemoryRegionForAddress(instruction_ptr); + } + if (!near_null && instruction_region && + context->GetContextCPU() == MD_CONTEXT_X86 && + (bad_read || bad_write)) { + // Perform checks related to memory around instruction pointer. + uint32_t memory_offset = + instruction_ptr - instruction_region->GetBase(); + uint32_t available_memory = + instruction_region->GetSize() - memory_offset; + available_memory = available_memory > kDisassembleBytesBeyondPC ? + kDisassembleBytesBeyondPC : available_memory; + if (available_memory) { + const uint8_t *raw_memory = + instruction_region->GetMemory() + memory_offset; + DisassemblerX86 disassembler(raw_memory, + available_memory, + instruction_ptr); + disassembler.NextInstruction(); + if (bad_read) + disassembler.setBadRead(); + else + disassembler.setBadWrite(); + if (disassembler.currentInstructionValid()) { + // Check if the faulting instruction falls into one of + // several interesting groups. + switch (disassembler.currentInstructionGroup()) { + case libdis::insn_controlflow: + exploitability_weight += kLargeBump; + break; + case libdis::insn_string: + exploitability_weight += kHugeBump; + break; + default: + break; + } + // Loop the disassembler through the code and check if it + // IDed any interesting conditions in the near future. + // Multiple flags may be set so treat each equally. + while (disassembler.NextInstruction() && + disassembler.currentInstructionValid() && + !disassembler.endOfBlock()) + continue; + if (disassembler.flags() & DISX86_BAD_BRANCH_TARGET) + exploitability_weight += kLargeBump; + if (disassembler.flags() & DISX86_BAD_ARGUMENT_PASSED) + exploitability_weight += kTinyBump; + if (disassembler.flags() & DISX86_BAD_WRITE) + exploitability_weight += kMediumBump; + if (disassembler.flags() & DISX86_BAD_BLOCK_WRITE) + exploitability_weight += kMediumBump; + if (disassembler.flags() & DISX86_BAD_READ) + exploitability_weight += kTinyBump; + if (disassembler.flags() & DISX86_BAD_BLOCK_READ) + exploitability_weight += kTinyBump; + if (disassembler.flags() & DISX86_BAD_COMPARISON) + exploitability_weight += kTinyBump; + } + } + } + if (!near_null && AddressIsAscii(address)) + exploitability_weight += kMediumBump; + } else { + BPLOG(INFO) << "Access violation type parameter missing."; + return EXPLOITABILITY_ERR_PROCESSING; + } + } + + // Based on the calculated weight we return a simplified classification. + BPLOG(INFO) << "Calculated exploitability weight: " << exploitability_weight; + if (exploitability_weight >= kHighCutoff) + return EXPLOITABILITY_HIGH; + if (exploitability_weight >= kMediumCutoff) + return EXPLOITABLITY_MEDIUM; + if (exploitability_weight >= kLowCutoff) + return EXPLOITABILITY_LOW; + if (exploitability_weight >= kInterestingCutoff) + return EXPLOITABILITY_INTERESTING; + + return EXPLOITABILITY_NONE; +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/exploitability_win.h b/TMessagesProj/jni/third_party/breakpad/src/processor/exploitability_win.h new file mode 100644 index 0000000000..4e08aef030 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/exploitability_win.h @@ -0,0 +1,55 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// exploitability_win.h: Windows specific exploitability engine. +// +// Provides a guess at the exploitability of the crash for the Windows +// platform given a minidump and process_state. +// +// Author: Cris Neckar + +#ifndef GOOGLE_BREAKPAD_PROCESSOR_EXPLOITABILITY_WIN_H_ +#define GOOGLE_BREAKPAD_PROCESSOR_EXPLOITABILITY_WIN_H_ + +#include "google_breakpad/common/breakpad_types.h" +#include "google_breakpad/processor/exploitability.h" + +namespace google_breakpad { + +class ExploitabilityWin : public Exploitability { + public: + ExploitabilityWin(Minidump *dump, + ProcessState *process_state); + + virtual ExploitabilityRating CheckPlatformExploitability(); +}; + +} // namespace google_breakpad + +#endif // GOOGLE_BREAKPAD_PROCESSOR_EXPLOITABILITY_WIN_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/fast_source_line_resolver.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/fast_source_line_resolver.cc new file mode 100644 index 0000000000..4a3d000714 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/fast_source_line_resolver.cc @@ -0,0 +1,275 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// fast_source_line_resolver.cc: FastSourceLineResolver is a concrete class that +// implements SourceLineResolverInterface. Both FastSourceLineResolver and +// BasicSourceLineResolver inherit from SourceLineResolverBase class to reduce +// code redundancy. +// +// See fast_source_line_resolver.h and fast_source_line_resolver_types.h +// for more documentation. +// +// Author: Siyang Xie (lambxsy@google.com) + +#include "google_breakpad/processor/fast_source_line_resolver.h" +#include "processor/fast_source_line_resolver_types.h" + +#include +#include +#include + +#include "common/scoped_ptr.h" +#include "common/using_std_string.h" +#include "processor/module_factory.h" +#include "processor/simple_serializer-inl.h" + +using std::map; +using std::make_pair; + +namespace google_breakpad { + +FastSourceLineResolver::FastSourceLineResolver() + : SourceLineResolverBase(new FastModuleFactory) { } + +bool FastSourceLineResolver::ShouldDeleteMemoryBufferAfterLoadModule() { + return false; +} + +void FastSourceLineResolver::Module::LookupAddress(StackFrame *frame) const { + MemAddr address = frame->instruction - frame->module->base_address(); + + // First, look for a FUNC record that covers address. Use + // RetrieveNearestRange instead of RetrieveRange so that, if there + // is no such function, we can use the next function to bound the + // extent of the PUBLIC symbol we find, below. This does mean we + // need to check that address indeed falls within the function we + // find; do the range comparison in an overflow-friendly way. + scoped_ptr func(new Function); + const Function* func_ptr = 0; + scoped_ptr public_symbol(new PublicSymbol); + const PublicSymbol* public_symbol_ptr = 0; + MemAddr function_base; + MemAddr function_size; + MemAddr public_address; + + if (functions_.RetrieveNearestRange(address, func_ptr, + &function_base, &function_size) && + address >= function_base && address - function_base < function_size) { + func.get()->CopyFrom(func_ptr); + frame->function_name = func->name; + frame->function_base = frame->module->base_address() + function_base; + + scoped_ptr line(new Line); + const Line* line_ptr = 0; + MemAddr line_base; + if (func->lines.RetrieveRange(address, line_ptr, &line_base, NULL)) { + line.get()->CopyFrom(line_ptr); + FileMap::iterator it = files_.find(line->source_file_id); + if (it != files_.end()) { + frame->source_file_name = + files_.find(line->source_file_id).GetValuePtr(); + } + frame->source_line = line->line; + frame->source_line_base = frame->module->base_address() + line_base; + } + } else if (public_symbols_.Retrieve(address, + public_symbol_ptr, &public_address) && + (!func_ptr || public_address > function_base)) { + public_symbol.get()->CopyFrom(public_symbol_ptr); + frame->function_name = public_symbol->name; + frame->function_base = frame->module->base_address() + public_address; + } +} + +// WFI: WindowsFrameInfo. +// Returns a WFI object reading from a raw memory chunk of data +WindowsFrameInfo FastSourceLineResolver::CopyWFI(const char *raw) { + const WindowsFrameInfo::StackInfoTypes type = + static_cast( + *reinterpret_cast(raw)); + + // The first 8 bytes of int data are unused. + // They correspond to "StackInfoTypes type_;" and "int valid;" + // data member of WFI. + const uint32_t *para_uint32 = reinterpret_cast( + raw + 2 * sizeof(int32_t)); + + uint32_t prolog_size = para_uint32[0];; + uint32_t epilog_size = para_uint32[1]; + uint32_t parameter_size = para_uint32[2]; + uint32_t saved_register_size = para_uint32[3]; + uint32_t local_size = para_uint32[4]; + uint32_t max_stack_size = para_uint32[5]; + const char *boolean = reinterpret_cast(para_uint32 + 6); + bool allocates_base_pointer = (*boolean != 0); + string program_string = boolean + 1; + + return WindowsFrameInfo(type, + prolog_size, + epilog_size, + parameter_size, + saved_register_size, + local_size, + max_stack_size, + allocates_base_pointer, + program_string); +} + +// Loads a map from the given buffer in char* type. +// Does NOT take ownership of mem_buffer. +// In addition, treat mem_buffer as const char*. +bool FastSourceLineResolver::Module::LoadMapFromMemory( + char *memory_buffer, + size_t memory_buffer_size) { + if (!memory_buffer) return false; + + // Read the "is_corrupt" flag. + const char *mem_buffer = memory_buffer; + mem_buffer = SimpleSerializer::Read(mem_buffer, &is_corrupt_); + + const uint32_t *map_sizes = reinterpret_cast(mem_buffer); + + unsigned int header_size = kNumberMaps_ * sizeof(unsigned int); + + // offsets[]: an array of offset addresses (with respect to mem_buffer), + // for each "Static***Map" component of Module. + // "Static***Map": static version of std::map or map wrapper, i.e., StaticMap, + // StaticAddressMap, StaticContainedRangeMap, and StaticRangeMap. + unsigned int offsets[kNumberMaps_]; + offsets[0] = header_size; + for (int i = 1; i < kNumberMaps_; ++i) { + offsets[i] = offsets[i - 1] + map_sizes[i - 1]; + } + + // Use pointers to construct Static*Map data members in Module: + int map_id = 0; + files_ = StaticMap(mem_buffer + offsets[map_id++]); + functions_ = + StaticRangeMap(mem_buffer + offsets[map_id++]); + public_symbols_ = + StaticAddressMap(mem_buffer + offsets[map_id++]); + for (int i = 0; i < WindowsFrameInfo::STACK_INFO_LAST; ++i) + windows_frame_info_[i] = + StaticContainedRangeMap(mem_buffer + offsets[map_id++]); + + cfi_initial_rules_ = + StaticRangeMap(mem_buffer + offsets[map_id++]); + cfi_delta_rules_ = StaticMap(mem_buffer + offsets[map_id++]); + + return true; +} + +WindowsFrameInfo *FastSourceLineResolver::Module::FindWindowsFrameInfo( + const StackFrame *frame) const { + MemAddr address = frame->instruction - frame->module->base_address(); + scoped_ptr result(new WindowsFrameInfo()); + + // We only know about WindowsFrameInfo::STACK_INFO_FRAME_DATA and + // WindowsFrameInfo::STACK_INFO_FPO. Prefer them in this order. + // WindowsFrameInfo::STACK_INFO_FRAME_DATA is the newer type that + // includes its own program string. + // WindowsFrameInfo::STACK_INFO_FPO is the older type + // corresponding to the FPO_DATA struct. See stackwalker_x86.cc. + const char* frame_info_ptr; + if ((windows_frame_info_[WindowsFrameInfo::STACK_INFO_FRAME_DATA] + .RetrieveRange(address, frame_info_ptr)) + || (windows_frame_info_[WindowsFrameInfo::STACK_INFO_FPO] + .RetrieveRange(address, frame_info_ptr))) { + result->CopyFrom(CopyWFI(frame_info_ptr)); + return result.release(); + } + + // Even without a relevant STACK line, many functions contain + // information about how much space their parameters consume on the + // stack. Use RetrieveNearestRange instead of RetrieveRange, so that + // we can use the function to bound the extent of the PUBLIC symbol, + // below. However, this does mean we need to check that ADDRESS + // falls within the retrieved function's range; do the range + // comparison in an overflow-friendly way. + scoped_ptr function(new Function); + const Function* function_ptr = 0; + MemAddr function_base, function_size; + if (functions_.RetrieveNearestRange(address, function_ptr, + &function_base, &function_size) && + address >= function_base && address - function_base < function_size) { + function.get()->CopyFrom(function_ptr); + result->parameter_size = function->parameter_size; + result->valid |= WindowsFrameInfo::VALID_PARAMETER_SIZE; + return result.release(); + } + + // PUBLIC symbols might have a parameter size. Use the function we + // found above to limit the range the public symbol covers. + scoped_ptr public_symbol(new PublicSymbol); + const PublicSymbol* public_symbol_ptr = 0; + MemAddr public_address; + if (public_symbols_.Retrieve(address, public_symbol_ptr, &public_address) && + (!function_ptr || public_address > function_base)) { + public_symbol.get()->CopyFrom(public_symbol_ptr); + result->parameter_size = public_symbol->parameter_size; + } + + return NULL; +} + +CFIFrameInfo *FastSourceLineResolver::Module::FindCFIFrameInfo( + const StackFrame *frame) const { + MemAddr address = frame->instruction - frame->module->base_address(); + MemAddr initial_base, initial_size; + const char* initial_rules = NULL; + + // Find the initial rule whose range covers this address. That + // provides an initial set of register recovery rules. Then, walk + // forward from the initial rule's starting address to frame's + // instruction address, applying delta rules. + if (!cfi_initial_rules_.RetrieveRange(address, initial_rules, + &initial_base, &initial_size)) { + return NULL; + } + + // Create a frame info structure, and populate it with the rules from + // the STACK CFI INIT record. + scoped_ptr rules(new CFIFrameInfo()); + if (!ParseCFIRuleSet(initial_rules, rules.get())) + return NULL; + + // Find the first delta rule that falls within the initial rule's range. + StaticMap::iterator delta = + cfi_delta_rules_.lower_bound(initial_base); + + // Apply delta rules up to and including the frame's address. + while (delta != cfi_delta_rules_.end() && delta.GetKey() <= address) { + ParseCFIRuleSet(delta.GetValuePtr(), rules.get()); + delta++; + } + + return rules.release(); +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/fast_source_line_resolver_types.h b/TMessagesProj/jni/third_party/breakpad/src/processor/fast_source_line_resolver_types.h new file mode 100644 index 0000000000..2c010470f5 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/fast_source_line_resolver_types.h @@ -0,0 +1,185 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// fast_source_line_resolver_types.h: definition of nested classes/structs in +// FastSourceLineResolver. It moves the definitions out of +// fast_source_line_resolver.cc, so that other classes could have access +// to these private nested types without including fast_source_line_resolver.cc +// +// Author: lambxsy@google.com (Siyang Xie) + +#ifndef PROCESSOR_FAST_SOURCE_LINE_RESOLVER_TYPES_H__ +#define PROCESSOR_FAST_SOURCE_LINE_RESOLVER_TYPES_H__ + +#include "google_breakpad/processor/fast_source_line_resolver.h" +#include "processor/source_line_resolver_base_types.h" + +#include +#include + +#include "google_breakpad/processor/stack_frame.h" +#include "processor/cfi_frame_info.h" +#include "processor/static_address_map-inl.h" +#include "processor/static_contained_range_map-inl.h" +#include "processor/static_map.h" +#include "processor/static_range_map-inl.h" +#include "processor/windows_frame_info.h" + +namespace google_breakpad { + +struct FastSourceLineResolver::Line : public SourceLineResolverBase::Line { + void CopyFrom(const Line *line_ptr) { + const char *raw = reinterpret_cast(line_ptr); + CopyFrom(raw); + } + + // De-serialize the memory data of a Line. + void CopyFrom(const char *raw) { + address = *(reinterpret_cast(raw)); + size = *(reinterpret_cast(raw + sizeof(address))); + source_file_id = *(reinterpret_cast( + raw + 2 * sizeof(address))); + line = *(reinterpret_cast( + raw + 2 * sizeof(address) + sizeof(source_file_id))); + } +}; + +struct FastSourceLineResolver::Function : +public SourceLineResolverBase::Function { + void CopyFrom(const Function *func_ptr) { + const char *raw = reinterpret_cast(func_ptr); + CopyFrom(raw); + } + + // De-serialize the memory data of a Function. + void CopyFrom(const char *raw) { + size_t name_size = strlen(raw) + 1; + name = raw; + address = *(reinterpret_cast(raw + name_size)); + size = *(reinterpret_cast( + raw + name_size + sizeof(MemAddr))); + parameter_size = *(reinterpret_cast( + raw + name_size + 2 * sizeof(MemAddr))); + lines = StaticRangeMap( + raw + name_size + 2 * sizeof(MemAddr) + sizeof(int32_t)); + } + + StaticRangeMap lines; +}; + +struct FastSourceLineResolver::PublicSymbol : +public SourceLineResolverBase::PublicSymbol { + void CopyFrom(const PublicSymbol *public_symbol_ptr) { + const char *raw = reinterpret_cast(public_symbol_ptr); + CopyFrom(raw); + } + + // De-serialize the memory data of a PublicSymbol. + void CopyFrom(const char *raw) { + size_t name_size = strlen(raw) + 1; + name = raw; + address = *(reinterpret_cast(raw + name_size)); + parameter_size = *(reinterpret_cast( + raw + name_size + sizeof(MemAddr))); + } +}; + +class FastSourceLineResolver::Module: public SourceLineResolverBase::Module { + public: + explicit Module(const string &name) : name_(name), is_corrupt_(false) { } + virtual ~Module() { } + + // Looks up the given relative address, and fills the StackFrame struct + // with the result. + virtual void LookupAddress(StackFrame *frame) const; + + // Loads a map from the given buffer in char* type. + virtual bool LoadMapFromMemory(char *memory_buffer, + size_t memory_buffer_size); + + // Tells whether the loaded symbol data is corrupt. Return value is + // undefined, if the symbol data hasn't been loaded yet. + virtual bool IsCorrupt() const { return is_corrupt_; } + + // If Windows stack walking information is available covering ADDRESS, + // return a WindowsFrameInfo structure describing it. If the information + // is not available, returns NULL. A NULL return value does not indicate + // an error. The caller takes ownership of any returned WindowsFrameInfo + // object. + virtual WindowsFrameInfo *FindWindowsFrameInfo(const StackFrame *frame) const; + + // If CFI stack walking information is available covering ADDRESS, + // return a CFIFrameInfo structure describing it. If the information + // is not available, return NULL. The caller takes ownership of any + // returned CFIFrameInfo object. + virtual CFIFrameInfo *FindCFIFrameInfo(const StackFrame *frame) const; + + // Number of serialized map components of Module. + static const int kNumberMaps_ = 5 + WindowsFrameInfo::STACK_INFO_LAST; + + private: + friend class FastSourceLineResolver; + friend class ModuleComparer; + typedef StaticMap FileMap; + + string name_; + StaticMap files_; + StaticRangeMap functions_; + StaticAddressMap public_symbols_; + bool is_corrupt_; + + // Each element in the array is a ContainedRangeMap for a type + // listed in WindowsFrameInfoTypes. These are split by type because + // there may be overlaps between maps of different types, but some + // information is only available as certain types. + StaticContainedRangeMap + windows_frame_info_[WindowsFrameInfo::STACK_INFO_LAST]; + + // DWARF CFI stack walking data. The Module stores the initial rule sets + // and rule deltas as strings, just as they appear in the symbol file: + // although the file may contain hundreds of thousands of STACK CFI + // records, walking a stack will only ever use a few of them, so it's + // best to delay parsing a record until it's actually needed. + // + // STACK CFI INIT records: for each range, an initial set of register + // recovery rules. The RangeMap's itself gives the starting and ending + // addresses. + StaticRangeMap cfi_initial_rules_; + + // STACK CFI records: at a given address, the changes to the register + // recovery rules that take effect at that address. The map key is the + // starting address; the ending address is the key of the next entry in + // this map, or the end of the range as given by the cfi_initial_rules_ + // entry (which FindCFIFrameInfo looks up first). + StaticMap cfi_delta_rules_; +}; + +} // namespace google_breakpad + +#endif // PROCESSOR_FAST_SOURCE_LINE_RESOLVER_TYPES_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/fast_source_line_resolver_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/fast_source_line_resolver_unittest.cc new file mode 100644 index 0000000000..72632f84f9 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/fast_source_line_resolver_unittest.cc @@ -0,0 +1,489 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// fast_source_line_resolver_unittest.cc: Unit tests for FastSourceLineResolver. +// Two different approaches for testing fast source line resolver: +// First, use the same unit test data for basic source line resolver. +// Second, read data from symbol files, load them as basic modules, and then +// serialize them and load the serialized data as fast modules. Then compare +// modules to assure the fast module contains exactly the same data as +// basic module. +// +// Author: Siyang Xie (lambxsy@google.com) + +#include +#include + +#include +#include + +#include "breakpad_googletest_includes.h" +#include "common/using_std_string.h" +#include "google_breakpad/processor/code_module.h" +#include "google_breakpad/processor/stack_frame.h" +#include "google_breakpad/processor/memory_region.h" +#include "processor/logging.h" +#include "processor/module_serializer.h" +#include "processor/module_comparer.h" + +namespace { + +using google_breakpad::SourceLineResolverBase; +using google_breakpad::BasicSourceLineResolver; +using google_breakpad::FastSourceLineResolver; +using google_breakpad::ModuleSerializer; +using google_breakpad::ModuleComparer; +using google_breakpad::CFIFrameInfo; +using google_breakpad::CodeModule; +using google_breakpad::MemoryRegion; +using google_breakpad::StackFrame; +using google_breakpad::WindowsFrameInfo; +using google_breakpad::linked_ptr; +using google_breakpad::scoped_ptr; + +class TestCodeModule : public CodeModule { + public: + explicit TestCodeModule(string code_file) : code_file_(code_file) {} + virtual ~TestCodeModule() {} + + virtual uint64_t base_address() const { return 0; } + virtual uint64_t size() const { return 0xb000; } + virtual string code_file() const { return code_file_; } + virtual string code_identifier() const { return ""; } + virtual string debug_file() const { return ""; } + virtual string debug_identifier() const { return ""; } + virtual string version() const { return ""; } + virtual const CodeModule* Copy() const { + return new TestCodeModule(code_file_); + } + + private: + string code_file_; +}; + +// A mock memory region object, for use by the STACK CFI tests. +class MockMemoryRegion: public MemoryRegion { + uint64_t GetBase() const { return 0x10000; } + uint32_t GetSize() const { return 0x01000; } + bool GetMemoryAtAddress(uint64_t address, uint8_t *value) const { + *value = address & 0xff; + return true; + } + bool GetMemoryAtAddress(uint64_t address, uint16_t *value) const { + *value = address & 0xffff; + return true; + } + bool GetMemoryAtAddress(uint64_t address, uint32_t *value) const { + switch (address) { + case 0x10008: *value = 0x98ecadc3; break; // saved %ebx + case 0x1000c: *value = 0x878f7524; break; // saved %esi + case 0x10010: *value = 0x6312f9a5; break; // saved %edi + case 0x10014: *value = 0x10038; break; // caller's %ebp + case 0x10018: *value = 0xf6438648; break; // return address + default: *value = 0xdeadbeef; break; // junk + } + return true; + } + bool GetMemoryAtAddress(uint64_t address, uint64_t *value) const { + *value = address; + return true; + } + void Print() const { + assert(false); + } +}; + +// Verify that, for every association in ACTUAL, EXPECTED has the same +// association. (That is, ACTUAL's associations should be a subset of +// EXPECTED's.) Also verify that ACTUAL has associations for ".ra" and +// ".cfa". +static bool VerifyRegisters( + const char *file, int line, + const CFIFrameInfo::RegisterValueMap &expected, + const CFIFrameInfo::RegisterValueMap &actual) { + CFIFrameInfo::RegisterValueMap::const_iterator a; + a = actual.find(".cfa"); + if (a == actual.end()) + return false; + a = actual.find(".ra"); + if (a == actual.end()) + return false; + for (a = actual.begin(); a != actual.end(); a++) { + CFIFrameInfo::RegisterValueMap::const_iterator e = + expected.find(a->first); + if (e == expected.end()) { + fprintf(stderr, "%s:%d: unexpected register '%s' recovered, value 0x%x\n", + file, line, a->first.c_str(), a->second); + return false; + } + if (e->second != a->second) { + fprintf(stderr, + "%s:%d: register '%s' recovered value was 0x%x, expected 0x%x\n", + file, line, a->first.c_str(), a->second, e->second); + return false; + } + // Don't complain if this doesn't recover all registers. Although + // the DWARF spec says that unmentioned registers are undefined, + // GCC uses omission to mean that they are unchanged. + } + return true; +} + +static bool VerifyEmpty(const StackFrame &frame) { + if (frame.function_name.empty() && + frame.source_file_name.empty() && + frame.source_line == 0) + return true; + return false; +} + +static void ClearSourceLineInfo(StackFrame *frame) { + frame->function_name.clear(); + frame->module = NULL; + frame->source_file_name.clear(); + frame->source_line = 0; +} + +class TestFastSourceLineResolver : public ::testing::Test { + public: + void SetUp() { + testdata_dir = string(getenv("srcdir") ? getenv("srcdir") : ".") + + "/src/processor/testdata"; + } + + string symbol_file(int file_index) { + std::stringstream ss; + ss << testdata_dir << "/module" << file_index << ".out"; + return ss.str(); + } + + ModuleSerializer serializer; + BasicSourceLineResolver basic_resolver; + FastSourceLineResolver fast_resolver; + ModuleComparer module_comparer; + + string testdata_dir; +}; + +// Test adapted from basic_source_line_resolver_unittest. +TEST_F(TestFastSourceLineResolver, TestLoadAndResolve) { + TestCodeModule module1("module1"); + ASSERT_TRUE(basic_resolver.LoadModule(&module1, symbol_file(1))); + ASSERT_TRUE(basic_resolver.HasModule(&module1)); + // Convert module1 to fast_module: + ASSERT_TRUE(serializer.ConvertOneModule( + module1.code_file(), &basic_resolver, &fast_resolver)); + ASSERT_TRUE(fast_resolver.HasModule(&module1)); + + TestCodeModule module2("module2"); + ASSERT_TRUE(basic_resolver.LoadModule(&module2, symbol_file(2))); + ASSERT_TRUE(basic_resolver.HasModule(&module2)); + // Convert module2 to fast_module: + ASSERT_TRUE(serializer.ConvertOneModule( + module2.code_file(), &basic_resolver, &fast_resolver)); + ASSERT_TRUE(fast_resolver.HasModule(&module2)); + + StackFrame frame; + scoped_ptr windows_frame_info; + scoped_ptr cfi_frame_info; + frame.instruction = 0x1000; + frame.module = NULL; + fast_resolver.FillSourceLineInfo(&frame); + ASSERT_FALSE(frame.module); + ASSERT_TRUE(frame.function_name.empty()); + ASSERT_EQ(frame.function_base, 0U); + ASSERT_TRUE(frame.source_file_name.empty()); + ASSERT_EQ(frame.source_line, 0); + ASSERT_EQ(frame.source_line_base, 0U); + + frame.module = &module1; + fast_resolver.FillSourceLineInfo(&frame); + ASSERT_EQ(frame.function_name, "Function1_1"); + ASSERT_TRUE(frame.module); + ASSERT_EQ(frame.module->code_file(), "module1"); + ASSERT_EQ(frame.function_base, 0x1000U); + ASSERT_EQ(frame.source_file_name, "file1_1.cc"); + ASSERT_EQ(frame.source_line, 44); + ASSERT_EQ(frame.source_line_base, 0x1000U); + windows_frame_info.reset(fast_resolver.FindWindowsFrameInfo(&frame)); + ASSERT_TRUE(windows_frame_info.get()); + ASSERT_FALSE(windows_frame_info->allocates_base_pointer); + ASSERT_EQ(windows_frame_info->program_string, + "$eip 4 + ^ = $esp $ebp 8 + = $ebp $ebp ^ ="); + + ClearSourceLineInfo(&frame); + frame.instruction = 0x800; + frame.module = &module1; + fast_resolver.FillSourceLineInfo(&frame); + ASSERT_TRUE(VerifyEmpty(frame)); + windows_frame_info.reset(fast_resolver.FindWindowsFrameInfo(&frame)); + ASSERT_FALSE(windows_frame_info.get()); + + frame.instruction = 0x1280; + fast_resolver.FillSourceLineInfo(&frame); + ASSERT_EQ(frame.function_name, "Function1_3"); + ASSERT_TRUE(frame.source_file_name.empty()); + ASSERT_EQ(frame.source_line, 0); + windows_frame_info.reset(fast_resolver.FindWindowsFrameInfo(&frame)); + ASSERT_TRUE(windows_frame_info.get()); + ASSERT_EQ(windows_frame_info->type_, WindowsFrameInfo::STACK_INFO_UNKNOWN); + ASSERT_FALSE(windows_frame_info->allocates_base_pointer); + ASSERT_TRUE(windows_frame_info->program_string.empty()); + + frame.instruction = 0x1380; + fast_resolver.FillSourceLineInfo(&frame); + ASSERT_EQ(frame.function_name, "Function1_4"); + ASSERT_TRUE(frame.source_file_name.empty()); + ASSERT_EQ(frame.source_line, 0); + windows_frame_info.reset(fast_resolver.FindWindowsFrameInfo(&frame)); + ASSERT_TRUE(windows_frame_info.get()); + ASSERT_EQ(windows_frame_info->type_, WindowsFrameInfo::STACK_INFO_FRAME_DATA); + ASSERT_FALSE(windows_frame_info->allocates_base_pointer); + ASSERT_FALSE(windows_frame_info->program_string.empty()); + + frame.instruction = 0x2000; + windows_frame_info.reset(fast_resolver.FindWindowsFrameInfo(&frame)); + ASSERT_FALSE(windows_frame_info.get()); + + // module1 has STACK CFI records covering 3d40..3def; + // module2 has STACK CFI records covering 3df0..3e9f; + // check that FindCFIFrameInfo doesn't claim to find any outside those ranges. + frame.instruction = 0x3d3f; + frame.module = &module1; + cfi_frame_info.reset(fast_resolver.FindCFIFrameInfo(&frame)); + ASSERT_FALSE(cfi_frame_info.get()); + + frame.instruction = 0x3e9f; + frame.module = &module1; + cfi_frame_info.reset(fast_resolver.FindCFIFrameInfo(&frame)); + ASSERT_FALSE(cfi_frame_info.get()); + + CFIFrameInfo::RegisterValueMap current_registers; + CFIFrameInfo::RegisterValueMap caller_registers; + CFIFrameInfo::RegisterValueMap expected_caller_registers; + MockMemoryRegion memory; + + // Regardless of which instruction evaluation takes place at, it + // should produce the same values for the caller's registers. + expected_caller_registers[".cfa"] = 0x1001c; + expected_caller_registers[".ra"] = 0xf6438648; + expected_caller_registers["$ebp"] = 0x10038; + expected_caller_registers["$ebx"] = 0x98ecadc3; + expected_caller_registers["$esi"] = 0x878f7524; + expected_caller_registers["$edi"] = 0x6312f9a5; + + frame.instruction = 0x3d40; + frame.module = &module1; + current_registers.clear(); + current_registers["$esp"] = 0x10018; + current_registers["$ebp"] = 0x10038; + current_registers["$ebx"] = 0x98ecadc3; + current_registers["$esi"] = 0x878f7524; + current_registers["$edi"] = 0x6312f9a5; + cfi_frame_info.reset(fast_resolver.FindCFIFrameInfo(&frame)); + ASSERT_TRUE(cfi_frame_info.get()); + ASSERT_TRUE(cfi_frame_info.get() + ->FindCallerRegs(current_registers, memory, + &caller_registers)); + ASSERT_TRUE(VerifyRegisters(__FILE__, __LINE__, + expected_caller_registers, caller_registers)); + + frame.instruction = 0x3d41; + current_registers["$esp"] = 0x10014; + cfi_frame_info.reset(fast_resolver.FindCFIFrameInfo(&frame)); + ASSERT_TRUE(cfi_frame_info.get()); + ASSERT_TRUE(cfi_frame_info.get() + ->FindCallerRegs(current_registers, memory, + &caller_registers)); + ASSERT_TRUE(VerifyRegisters(__FILE__, __LINE__, + expected_caller_registers, caller_registers)); + + frame.instruction = 0x3d43; + current_registers["$ebp"] = 0x10014; + cfi_frame_info.reset(fast_resolver.FindCFIFrameInfo(&frame)); + ASSERT_TRUE(cfi_frame_info.get()); + ASSERT_TRUE(cfi_frame_info.get() + ->FindCallerRegs(current_registers, memory, + &caller_registers)); + VerifyRegisters(__FILE__, __LINE__, + expected_caller_registers, caller_registers); + + frame.instruction = 0x3d54; + current_registers["$ebx"] = 0x6864f054U; + cfi_frame_info.reset(fast_resolver.FindCFIFrameInfo(&frame)); + ASSERT_TRUE(cfi_frame_info.get()); + ASSERT_TRUE(cfi_frame_info.get() + ->FindCallerRegs(current_registers, memory, + &caller_registers)); + VerifyRegisters(__FILE__, __LINE__, + expected_caller_registers, caller_registers); + + frame.instruction = 0x3d5a; + current_registers["$esi"] = 0x6285f79aU; + cfi_frame_info.reset(fast_resolver.FindCFIFrameInfo(&frame)); + ASSERT_TRUE(cfi_frame_info.get()); + ASSERT_TRUE(cfi_frame_info.get() + ->FindCallerRegs(current_registers, memory, + &caller_registers)); + VerifyRegisters(__FILE__, __LINE__, + expected_caller_registers, caller_registers); + + frame.instruction = 0x3d84; + current_registers["$edi"] = 0x64061449U; + cfi_frame_info.reset(fast_resolver.FindCFIFrameInfo(&frame)); + ASSERT_TRUE(cfi_frame_info.get()); + ASSERT_TRUE(cfi_frame_info.get() + ->FindCallerRegs(current_registers, memory, + &caller_registers)); + VerifyRegisters(__FILE__, __LINE__, + expected_caller_registers, caller_registers); + + frame.instruction = 0x2900; + frame.module = &module1; + fast_resolver.FillSourceLineInfo(&frame); + ASSERT_EQ(frame.function_name, string("PublicSymbol")); + + frame.instruction = 0x4000; + frame.module = &module1; + fast_resolver.FillSourceLineInfo(&frame); + ASSERT_EQ(frame.function_name, string("LargeFunction")); + + frame.instruction = 0x2181; + frame.module = &module2; + fast_resolver.FillSourceLineInfo(&frame); + ASSERT_EQ(frame.function_name, "Function2_2"); + ASSERT_EQ(frame.function_base, 0x2170U); + ASSERT_TRUE(frame.module); + ASSERT_EQ(frame.module->code_file(), "module2"); + ASSERT_EQ(frame.source_file_name, "file2_2.cc"); + ASSERT_EQ(frame.source_line, 21); + ASSERT_EQ(frame.source_line_base, 0x2180U); + windows_frame_info.reset(fast_resolver.FindWindowsFrameInfo(&frame)); + ASSERT_TRUE(windows_frame_info.get()); + ASSERT_EQ(windows_frame_info->type_, WindowsFrameInfo::STACK_INFO_FRAME_DATA); + ASSERT_EQ(windows_frame_info->prolog_size, 1U); + + frame.instruction = 0x216f; + fast_resolver.FillSourceLineInfo(&frame); + ASSERT_EQ(frame.function_name, "Public2_1"); + + ClearSourceLineInfo(&frame); + frame.instruction = 0x219f; + frame.module = &module2; + fast_resolver.FillSourceLineInfo(&frame); + ASSERT_TRUE(frame.function_name.empty()); + + frame.instruction = 0x21a0; + frame.module = &module2; + fast_resolver.FillSourceLineInfo(&frame); + ASSERT_EQ(frame.function_name, "Public2_2"); +} + +TEST_F(TestFastSourceLineResolver, TestInvalidLoads) { + TestCodeModule module3("module3"); + ASSERT_TRUE(basic_resolver.LoadModule(&module3, + testdata_dir + "/module3_bad.out")); + ASSERT_TRUE(basic_resolver.HasModule(&module3)); + ASSERT_TRUE(basic_resolver.IsModuleCorrupt(&module3)); + // Convert module3 to fast_module: + ASSERT_TRUE(serializer.ConvertOneModule(module3.code_file(), + &basic_resolver, + &fast_resolver)); + ASSERT_TRUE(fast_resolver.HasModule(&module3)); + ASSERT_TRUE(fast_resolver.IsModuleCorrupt(&module3)); + + TestCodeModule module4("module4"); + ASSERT_TRUE(basic_resolver.LoadModule(&module4, + testdata_dir + "/module4_bad.out")); + ASSERT_TRUE(basic_resolver.HasModule(&module4)); + ASSERT_TRUE(basic_resolver.IsModuleCorrupt(&module4)); + // Convert module4 to fast_module: + ASSERT_TRUE(serializer.ConvertOneModule(module4.code_file(), + &basic_resolver, + &fast_resolver)); + ASSERT_TRUE(fast_resolver.HasModule(&module4)); + ASSERT_TRUE(fast_resolver.IsModuleCorrupt(&module4)); + + TestCodeModule module5("module5"); + ASSERT_FALSE(fast_resolver.LoadModule(&module5, + testdata_dir + "/invalid-filename")); + ASSERT_FALSE(fast_resolver.HasModule(&module5)); + + TestCodeModule invalidmodule("invalid-module"); + ASSERT_FALSE(fast_resolver.HasModule(&invalidmodule)); +} + +TEST_F(TestFastSourceLineResolver, TestUnload) { + TestCodeModule module1("module1"); + ASSERT_FALSE(basic_resolver.HasModule(&module1)); + + ASSERT_TRUE(basic_resolver.LoadModule(&module1, symbol_file(1))); + ASSERT_TRUE(basic_resolver.HasModule(&module1)); + // Convert module1 to fast_module. + ASSERT_TRUE(serializer.ConvertOneModule(module1.code_file(), + &basic_resolver, + &fast_resolver)); + ASSERT_TRUE(fast_resolver.HasModule(&module1)); + basic_resolver.UnloadModule(&module1); + fast_resolver.UnloadModule(&module1); + ASSERT_FALSE(fast_resolver.HasModule(&module1)); + + ASSERT_TRUE(basic_resolver.LoadModule(&module1, symbol_file(1))); + ASSERT_TRUE(basic_resolver.HasModule(&module1)); + // Convert module1 to fast_module. + ASSERT_TRUE(serializer.ConvertOneModule(module1.code_file(), + &basic_resolver, + &fast_resolver)); + ASSERT_TRUE(fast_resolver.HasModule(&module1)); +} + +TEST_F(TestFastSourceLineResolver, CompareModule) { + char *symbol_data; + size_t symbol_data_size; + string symbol_data_string; + string filename; + + for (int module_index = 0; module_index < 3; ++module_index) { + std::stringstream ss; + ss << testdata_dir << "/module" << module_index << ".out"; + filename = ss.str(); + ASSERT_TRUE(SourceLineResolverBase::ReadSymbolFile( + symbol_file(module_index), &symbol_data, &symbol_data_size)); + symbol_data_string.assign(symbol_data, symbol_data_size); + delete [] symbol_data; + ASSERT_TRUE(module_comparer.Compare(symbol_data_string)); + } +} + +} // namespace + +int main(int argc, char *argv[]) { + ::testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/linked_ptr.h b/TMessagesProj/jni/third_party/breakpad/src/processor/linked_ptr.h new file mode 100644 index 0000000000..72fbba84a9 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/linked_ptr.h @@ -0,0 +1,193 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// A "smart" pointer type with reference tracking. Every pointer to a +// particular object is kept on a circular linked list. When the last pointer +// to an object is destroyed or reassigned, the object is deleted. +// +// Used properly, this deletes the object when the last reference goes away. +// There are several caveats: +// - Like all reference counting schemes, cycles lead to leaks. +// - Each smart pointer is actually two pointers (8 bytes instead of 4). +// - Every time a pointer is assigned, the entire list of pointers to that +// object is traversed. This class is therefore NOT SUITABLE when there +// will often be more than two or three pointers to a particular object. +// - References are only tracked as long as linked_ptr<> objects are copied. +// If a linked_ptr<> is converted to a raw pointer and back, BAD THINGS +// will happen (double deletion). +// +// A good use of this class is storing object references in STL containers. +// You can safely put linked_ptr<> in a vector<>. +// Other uses may not be as good. +// +// Note: If you use an incomplete type with linked_ptr<>, the class +// *containing* linked_ptr<> must have a constructor and destructor (even +// if they do nothing!). + +#ifndef PROCESSOR_LINKED_PTR_H__ +#define PROCESSOR_LINKED_PTR_H__ + +namespace google_breakpad { + +// This is used internally by all instances of linked_ptr<>. It needs to be +// a non-template class because different types of linked_ptr<> can refer to +// the same object (linked_ptr(obj) vs linked_ptr(obj)). +// So, it needs to be possible for different types of linked_ptr to participate +// in the same circular linked list, so we need a single class type here. +// +// DO NOT USE THIS CLASS DIRECTLY YOURSELF. Use linked_ptr. +class linked_ptr_internal { + public: + // Create a new circle that includes only this instance. + void join_new() { + next_ = this; + } + + // Join an existing circle. + void join(linked_ptr_internal const* ptr) { + linked_ptr_internal const* p = ptr; + while (p->next_ != ptr) p = p->next_; + p->next_ = this; + next_ = ptr; + } + + // Leave whatever circle we're part of. Returns true iff we were the + // last member of the circle. Once this is done, you can join() another. + bool depart() { + if (next_ == this) return true; + linked_ptr_internal const* p = next_; + while (p->next_ != this) p = p->next_; + p->next_ = next_; + return false; + } + + private: + mutable linked_ptr_internal const* next_; +}; + +template +class linked_ptr { + public: + typedef T element_type; + + // Take over ownership of a raw pointer. This should happen as soon as + // possible after the object is created. + explicit linked_ptr(T* ptr = NULL) { capture(ptr); } + ~linked_ptr() { depart(); } + + // Copy an existing linked_ptr<>, adding ourselves to the list of references. + template linked_ptr(linked_ptr const& ptr) { copy(&ptr); } + linked_ptr(linked_ptr const& ptr) { copy(&ptr); } + + // Assignment releases the old value and acquires the new. + template linked_ptr& operator=(linked_ptr const& ptr) { + depart(); + copy(&ptr); + return *this; + } + + linked_ptr& operator=(linked_ptr const& ptr) { + if (&ptr != this) { + depart(); + copy(&ptr); + } + return *this; + } + + // Smart pointer members. + void reset(T* ptr = NULL) { depart(); capture(ptr); } + T* get() const { return value_; } + T* operator->() const { return value_; } + T& operator*() const { return *value_; } + // Release ownership of the pointed object and returns it. + // Sole ownership by this linked_ptr object is required. + T* release() { + link_.depart(); + T* v = value_; + value_ = NULL; + return v; + } + + bool operator==(T* p) const { return value_ == p; } + bool operator!=(T* p) const { return value_ != p; } + template + bool operator==(linked_ptr const& ptr) const { + return value_ == ptr.get(); + } + template + bool operator!=(linked_ptr const& ptr) const { + return value_ != ptr.get(); + } + + private: + template + friend class linked_ptr; + + T* value_; + linked_ptr_internal link_; + + void depart() { + if (link_.depart()) delete value_; + } + + void capture(T* ptr) { + value_ = ptr; + link_.join_new(); + } + + template void copy(linked_ptr const* ptr) { + value_ = ptr->get(); + if (value_) + link_.join(&ptr->link_); + else + link_.join_new(); + } +}; + +template inline +bool operator==(T* ptr, const linked_ptr& x) { + return ptr == x.get(); +} + +template inline +bool operator!=(T* ptr, const linked_ptr& x) { + return ptr != x.get(); +} + +// A function to convert T* into linked_ptr +// Doing e.g. make_linked_ptr(new FooBarBaz(arg)) is a shorter notation +// for linked_ptr >(new FooBarBaz(arg)) +template +linked_ptr make_linked_ptr(T* ptr) { + return linked_ptr(ptr); +} + +} // namespace google_breakpad + +#endif // PROCESSOR_LINKED_PTR_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/logging.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/logging.cc new file mode 100644 index 0000000000..8bb95a695a --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/logging.cc @@ -0,0 +1,115 @@ +// Copyright (c) 2007, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// logging.cc: Breakpad logging +// +// See logging.h for documentation. +// +// Author: Mark Mentovai + +#include +#include +#include +#include +#include + +#include + +#include "common/using_std_string.h" +#include "processor/logging.h" +#include "processor/pathname_stripper.h" + +#ifdef _WIN32 +#define snprintf _snprintf +#endif + +namespace google_breakpad { + +LogStream::LogStream(std::ostream &stream, Severity severity, + const char *file, int line) + : stream_(stream) { + time_t clock; + time(&clock); + struct tm tm_struct; +#ifdef _WIN32 + localtime_s(&tm_struct, &clock); +#else + localtime_r(&clock, &tm_struct); +#endif + char time_string[20]; + strftime(time_string, sizeof(time_string), "%Y-%m-%d %H:%M:%S", &tm_struct); + + const char *severity_string = "UNKNOWN_SEVERITY"; + switch (severity) { + case SEVERITY_INFO: + severity_string = "INFO"; + break; + case SEVERITY_ERROR: + severity_string = "ERROR"; + break; + } + + stream_ << time_string << ": " << PathnameStripper::File(file) << ":" << + line << ": " << severity_string << ": "; +} + +LogStream::~LogStream() { + stream_ << std::endl; +} + +string HexString(uint32_t number) { + char buffer[11]; + snprintf(buffer, sizeof(buffer), "0x%x", number); + return string(buffer); +} + +string HexString(uint64_t number) { + char buffer[19]; + snprintf(buffer, sizeof(buffer), "0x%" PRIx64, number); + return string(buffer); +} + +string HexString(int number) { + char buffer[19]; + snprintf(buffer, sizeof(buffer), "0x%x", number); + return string(buffer); +} + +int ErrnoString(string *error_string) { + assert(error_string); + + // strerror isn't necessarily thread-safe. strerror_r would be preferrable, + // but GNU libc uses a nonstandard strerror_r by default, which returns a + // char* (rather than an int success indicator) and doesn't necessarily + // use the supplied buffer. + error_string->assign(strerror(errno)); + return errno; +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/logging.h b/TMessagesProj/jni/third_party/breakpad/src/processor/logging.h new file mode 100644 index 0000000000..406fb67cf6 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/logging.h @@ -0,0 +1,186 @@ +// Copyright (c) 2007, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// logging.h: Breakpad logging +// +// Breakpad itself uses Breakpad logging with statements of the form: +// BPLOG(severity) << "message"; +// severity may be INFO, ERROR, or other values defined in this file. +// +// BPLOG is an overridable macro so that users can customize Breakpad's +// logging. Left at the default, logging messages are sent to stderr along +// with a timestamp and the source code location that produced a message. +// The streams may be changed by redefining BPLOG_*_STREAM, the logging +// behavior may be changed by redefining BPLOG_*, and the entire logging +// system may be overridden by redefining BPLOG(severity). These +// redefinitions may be passed to the preprocessor as a command-line flag +// (-D). +// +// If an additional header is required to override Breakpad logging, it can +// be specified by the BP_LOGGING_INCLUDE macro. If defined, this header +// will #include the header specified by that macro. +// +// If any initialization is needed before logging, it can be performed by +// a function called through the BPLOG_INIT macro. Each main function of +// an executable program in the Breakpad processor library calls +// BPLOG_INIT(&argc, &argv); before any logging can be performed; define +// BPLOG_INIT appropriately if initialization is required. +// +// Author: Mark Mentovai + +#ifndef PROCESSOR_LOGGING_H__ +#define PROCESSOR_LOGGING_H__ + +#include +#include + +#include "common/using_std_string.h" +#include "google_breakpad/common/breakpad_types.h" + +#ifdef BP_LOGGING_INCLUDE +#include BP_LOGGING_INCLUDE +#endif // BP_LOGGING_INCLUDE + +#ifndef THIRD_PARTY_BREAKPAD_GOOGLE_GLUE_LOGGING_H_ +namespace base_logging { + +// The open-source copy of logging.h has diverged from Google's internal copy +// (temporarily, at least). To support the transition to structured logging +// a definition for base_logging::LogMessage is needed, which is a ostream- +// like object for streaming arguments to construct a log message. +typedef std::ostream LogMessage; + +} // namespace base_logging +#endif // THIRD_PARTY_BREAKPAD_GOOGLE_GLUE_LOGGING_H_ + +namespace google_breakpad { + +// These are defined in Microsoft headers. +#ifdef SEVERITY_ERROR +#undef SEVERITY_ERROR +#endif + +#ifdef ERROR +#undef ERROR +#endif + +class LogStream { + public: + enum Severity { + SEVERITY_INFO, + SEVERITY_ERROR + }; + + // Begin logging a message to the stream identified by |stream|, at the + // indicated severity. The file and line parameters should be set so as to + // identify the line of source code that is producing a message. + LogStream(std::ostream &stream, Severity severity, + const char *file, int line); + + // Finish logging by printing a newline and flushing the output stream. + ~LogStream(); + + template std::ostream& operator<<(const T &t) { + return stream_ << t; + } + + private: + std::ostream &stream_; + + // Disallow copy constructor and assignment operator + explicit LogStream(const LogStream &that); + void operator=(const LogStream &that); +}; + +// This class is used to explicitly ignore values in the conditional logging +// macros. This avoids compiler warnings like "value computed is not used" +// and "statement has no effect". +class LogMessageVoidify { + public: + LogMessageVoidify() {} + + // This has to be an operator with a precedence lower than << but higher + // than ?: + void operator&(base_logging::LogMessage &) {} +}; + +// Returns number formatted as a hexadecimal string, such as "0x7b". +string HexString(uint32_t number); +string HexString(uint64_t number); +string HexString(int number); + +// Returns the error code as set in the global errno variable, and sets +// error_string, a required argument, to a string describing that error +// code. +int ErrnoString(string *error_string); + +} // namespace google_breakpad + +#ifndef BPLOG_INIT +#define BPLOG_INIT(pargc, pargv) +#endif // BPLOG_INIT + +#define BPLOG_LAZY_STREAM(stream, condition) \ + !(condition) ? (void) 0 : \ + google_breakpad::LogMessageVoidify() & (BPLOG_ ## stream) + +#ifndef BPLOG_MINIMUM_SEVERITY +#define BPLOG_MINIMUM_SEVERITY SEVERITY_INFO +#endif + +#define BPLOG_LOG_IS_ON(severity) \ + ((google_breakpad::LogStream::SEVERITY_ ## severity) >= \ + (google_breakpad::LogStream::BPLOG_MINIMUM_SEVERITY)) + +#ifndef BPLOG +#define BPLOG(severity) BPLOG_LAZY_STREAM(severity, BPLOG_LOG_IS_ON(severity)) +#endif // BPLOG + +#ifndef BPLOG_INFO +#ifndef BPLOG_INFO_STREAM +#define BPLOG_INFO_STREAM std::clog +#endif // BPLOG_INFO_STREAM +#define BPLOG_INFO google_breakpad::LogStream(BPLOG_INFO_STREAM, \ + google_breakpad::LogStream::SEVERITY_INFO, \ + __FILE__, __LINE__) +#endif // BPLOG_INFO + +#ifndef BPLOG_ERROR +#ifndef BPLOG_ERROR_STREAM +#define BPLOG_ERROR_STREAM std::cerr +#endif // BPLOG_ERROR_STREAM +#define BPLOG_ERROR google_breakpad::LogStream(BPLOG_ERROR_STREAM, \ + google_breakpad::LogStream::SEVERITY_ERROR, \ + __FILE__, __LINE__) +#endif // BPLOG_ERROR + +#define BPLOG_IF(severity, condition) \ + BPLOG_LAZY_STREAM(severity, ((condition) && BPLOG_LOG_IS_ON(severity))) + +#endif // PROCESSOR_LOGGING_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/map_serializers-inl.h b/TMessagesProj/jni/third_party/breakpad/src/processor/map_serializers-inl.h new file mode 100644 index 0000000000..61c7bbd7c8 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/map_serializers-inl.h @@ -0,0 +1,266 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// map_serializers_inl.h: implementation for serializing std::map and its +// wrapper classes. +// +// See map_serializers.h for documentation. +// +// Author: Siyang Xie (lambxsy@google.com) + +#ifndef PROCESSOR_MAP_SERIALIZERS_INL_H__ +#define PROCESSOR_MAP_SERIALIZERS_INL_H__ + +#include +#include + +#include "processor/map_serializers.h" +#include "processor/simple_serializer.h" + +#include "processor/address_map-inl.h" +#include "processor/range_map-inl.h" +#include "processor/contained_range_map-inl.h" + +#include "processor/logging.h" + +namespace google_breakpad { + +template +size_t StdMapSerializer::SizeOf( + const std::map &m) const { + size_t size = 0; + size_t header_size = (1 + m.size()) * sizeof(uint32_t); + size += header_size; + + typename std::map::const_iterator iter; + for (iter = m.begin(); iter != m.end(); ++iter) { + size += key_serializer_.SizeOf(iter->first); + size += value_serializer_.SizeOf(iter->second); + } + return size; +} + +template +char *StdMapSerializer::Write(const std::map &m, + char *dest) const { + if (!dest) { + BPLOG(ERROR) << "StdMapSerializer failed: write to NULL address."; + return NULL; + } + char *start_address = dest; + + // Write header: + // Number of nodes. + dest = SimpleSerializer::Write(m.size(), dest); + // Nodes offsets. + uint32_t *offsets = reinterpret_cast(dest); + dest += sizeof(uint32_t) * m.size(); + + char *key_address = dest; + dest += sizeof(Key) * m.size(); + + // Traverse map. + typename std::map::const_iterator iter; + int index = 0; + for (iter = m.begin(); iter != m.end(); ++iter, ++index) { + offsets[index] = static_cast(dest - start_address); + key_address = key_serializer_.Write(iter->first, key_address); + dest = value_serializer_.Write(iter->second, dest); + } + return dest; +} + +template +char *StdMapSerializer::Serialize( + const std::map &m, unsigned int *size) const { + // Compute size of memory to be allocated. + unsigned int size_to_alloc = SizeOf(m); + // Allocate memory. + char *serialized_data = new char[size_to_alloc]; + if (!serialized_data) { + BPLOG(INFO) << "StdMapSerializer memory allocation failed."; + if (size) *size = 0; + return NULL; + } + // Write serialized data into memory. + Write(m, serialized_data); + + if (size) *size = size_to_alloc; + return serialized_data; +} + +template +size_t RangeMapSerializer::SizeOf( + const RangeMap &m) const { + size_t size = 0; + size_t header_size = (1 + m.map_.size()) * sizeof(uint32_t); + size += header_size; + + typename std::map::const_iterator iter; + for (iter = m.map_.begin(); iter != m.map_.end(); ++iter) { + // Size of key (high address). + size += address_serializer_.SizeOf(iter->first); + // Size of base (low address). + size += address_serializer_.SizeOf(iter->second.base()); + // Size of entry. + size += entry_serializer_.SizeOf(iter->second.entry()); + } + return size; +} + +template +char *RangeMapSerializer::Write( + const RangeMap &m, char *dest) const { + if (!dest) { + BPLOG(ERROR) << "RangeMapSerializer failed: write to NULL address."; + return NULL; + } + char *start_address = dest; + + // Write header: + // Number of nodes. + dest = SimpleSerializer::Write(m.map_.size(), dest); + // Nodes offsets. + uint32_t *offsets = reinterpret_cast(dest); + dest += sizeof(uint32_t) * m.map_.size(); + + char *key_address = dest; + dest += sizeof(Address) * m.map_.size(); + + // Traverse map. + typename std::map::const_iterator iter; + int index = 0; + for (iter = m.map_.begin(); iter != m.map_.end(); ++iter, ++index) { + offsets[index] = static_cast(dest - start_address); + key_address = address_serializer_.Write(iter->first, key_address); + dest = address_serializer_.Write(iter->second.base(), dest); + dest = entry_serializer_.Write(iter->second.entry(), dest); + } + return dest; +} + +template +char *RangeMapSerializer::Serialize( + const RangeMap &m, unsigned int *size) const { + // Compute size of memory to be allocated. + unsigned int size_to_alloc = SizeOf(m); + // Allocate memory. + char *serialized_data = new char[size_to_alloc]; + if (!serialized_data) { + BPLOG(INFO) << "RangeMapSerializer memory allocation failed."; + if (size) *size = 0; + return NULL; + } + + // Write serialized data into memory. + Write(m, serialized_data); + + if (size) *size = size_to_alloc; + return serialized_data; +} + + +template +size_t ContainedRangeMapSerializer::SizeOf( + const ContainedRangeMap *m) const { + size_t size = 0; + size_t header_size = addr_serializer_.SizeOf(m->base_) + + entry_serializer_.SizeOf(m->entry_) + + sizeof(uint32_t); + size += header_size; + // In case m.map_ == NULL, we treat it as an empty map: + size += sizeof(uint32_t); + if (m->map_) { + size += m->map_->size() * sizeof(uint32_t); + typename Map::const_iterator iter; + for (iter = m->map_->begin(); iter != m->map_->end(); ++iter) { + size += addr_serializer_.SizeOf(iter->first); + // Recursive calculation of size: + size += SizeOf(iter->second); + } + } + return size; +} + +template +char *ContainedRangeMapSerializer::Write( + const ContainedRangeMap *m, char *dest) const { + if (!dest) { + BPLOG(ERROR) << "StdMapSerializer failed: write to NULL address."; + return NULL; + } + dest = addr_serializer_.Write(m->base_, dest); + dest = SimpleSerializer::Write(entry_serializer_.SizeOf(m->entry_), + dest); + dest = entry_serializer_.Write(m->entry_, dest); + + // Write map<: + char *map_address = dest; + if (m->map_ == NULL) { + dest = SimpleSerializer::Write(0, dest); + } else { + dest = SimpleSerializer::Write(m->map_->size(), dest); + uint32_t *offsets = reinterpret_cast(dest); + dest += sizeof(uint32_t) * m->map_->size(); + + char *key_address = dest; + dest += sizeof(AddrType) * m->map_->size(); + + // Traverse map. + typename Map::const_iterator iter; + int index = 0; + for (iter = m->map_->begin(); iter != m->map_->end(); ++iter, ++index) { + offsets[index] = static_cast(dest - map_address); + key_address = addr_serializer_.Write(iter->first, key_address); + // Recursively write. + dest = Write(iter->second, dest); + } + } + return dest; +} + +template +char *ContainedRangeMapSerializer::Serialize( + const ContainedRangeMap *m, unsigned int *size) const { + unsigned int size_to_alloc = SizeOf(m); + // Allocating memory. + char *serialized_data = new char[size_to_alloc]; + if (!serialized_data) { + BPLOG(INFO) << "ContainedRangeMapSerializer memory allocation failed."; + if (size) *size = 0; + return NULL; + } + Write(m, serialized_data); + if (size) *size = size_to_alloc; + return serialized_data; +} + +} // namespace google_breakpad + +#endif // PROCESSOR_MAP_SERIALIZERS_INL_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/map_serializers.h b/TMessagesProj/jni/third_party/breakpad/src/processor/map_serializers.h new file mode 100644 index 0000000000..a0b9d3fd67 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/map_serializers.h @@ -0,0 +1,168 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// map_serializers.h: defines templates for serializing std::map and its +// wrappers: AddressMap, RangeMap, and ContainedRangeMap. +// +// Author: Siyang Xie (lambxsy@google.com) + + +#ifndef PROCESSOR_MAP_SERIALIZERS_H__ +#define PROCESSOR_MAP_SERIALIZERS_H__ + +#include +#include + +#include "processor/simple_serializer.h" + +#include "processor/address_map-inl.h" +#include "processor/range_map-inl.h" +#include "processor/contained_range_map-inl.h" + +namespace google_breakpad { + +// StdMapSerializer allocates memory and serializes an std::map instance into a +// chunk of memory data. +template +class StdMapSerializer { + public: + // Calculate the memory size of serialized data. + size_t SizeOf(const std::map &m) const; + + // Writes the serialized data to memory with start address = dest, + // and returns the "end" of data, i.e., return the address follow the final + // byte of data. + // NOTE: caller has to allocate enough memory before invoke Write() method. + char* Write(const std::map &m, char* dest) const; + + // Serializes a std::map object into a chunk of memory data with format + // described in "StaticMap.h" comment. + // Returns a pointer to the serialized data. If size != NULL, *size is set + // to the size of serialized data, i.e., SizeOf(m). + // Caller has the ownership of memory allocated as "new char[]". + char* Serialize(const std::map &m, unsigned int *size) const; + + private: + SimpleSerializer key_serializer_; + SimpleSerializer value_serializer_; +}; + +// AddressMapSerializer allocates memory and serializes an AddressMap into a +// chunk of memory data. +template +class AddressMapSerializer { + public: + // Calculate the memory size of serialized data. + size_t SizeOf(const AddressMap &m) const { + return std_map_serializer_.SizeOf(m.map_); + } + + // Write the serialized data to specified memory location. Return the "end" + // of data, i.e., return the address after the final byte of data. + // NOTE: caller has to allocate enough memory before invoke Write() method. + char* Write(const AddressMap &m, char *dest) const { + return std_map_serializer_.Write(m.map_, dest); + } + + // Serializes an AddressMap object into a chunk of memory data. + // Returns a pointer to the serialized data. If size != NULL, *size is set + // to the size of serialized data, i.e., SizeOf(m). + // Caller has the ownership of memory allocated as "new char[]". + char* Serialize(const AddressMap &m, unsigned int *size) const { + return std_map_serializer_.Serialize(m.map_, size); + } + + private: + // AddressMapSerializer is a simple wrapper of StdMapSerializer, just as + // AddressMap is a simple wrapper of std::map. + StdMapSerializer std_map_serializer_; +}; + +// RangeMapSerializer allocates memory and serializes a RangeMap instance into a +// chunk of memory data. +template +class RangeMapSerializer { + public: + // Calculate the memory size of serialized data. + size_t SizeOf(const RangeMap &m) const; + + // Write the serialized data to specified memory location. Return the "end" + // of data, i.e., return the address after the final byte of data. + // NOTE: caller has to allocate enough memory before invoke Write() method. + char* Write(const RangeMap &m, char* dest) const; + + // Serializes a RangeMap object into a chunk of memory data. + // Returns a pointer to the serialized data. If size != NULL, *size is set + // to the size of serialized data, i.e., SizeOf(m). + // Caller has the ownership of memory allocated as "new char[]". + char* Serialize(const RangeMap &m, unsigned int *size) const; + + private: + // Convenient type name for Range. + typedef typename RangeMap::Range Range; + + // Serializer for RangeMap's key and Range::base_. + SimpleSerializer
address_serializer_; + // Serializer for RangeMap::Range::entry_. + SimpleSerializer entry_serializer_; +}; + +// ContainedRangeMapSerializer allocates memory and serializes a +// ContainedRangeMap instance into a chunk of memory data. +template +class ContainedRangeMapSerializer { + public: + // Calculate the memory size of serialized data. + size_t SizeOf(const ContainedRangeMap *m) const; + + // Write the serialized data to specified memory location. Return the "end" + // of data, i.e., return the address after the final byte of data. + // NOTE: caller has to allocate enough memory before invoke Write() method. + char* Write(const ContainedRangeMap *m, + char* dest) const; + + // Serializes a ContainedRangeMap object into a chunk of memory data. + // Returns a pointer to the serialized data. If size != NULL, *size is set + // to the size of serialized data, i.e., SizeOf(m). + // Caller has the ownership of memory allocated as "new char[]". + char* Serialize(const ContainedRangeMap *m, + unsigned int *size) const; + + private: + // Convenient type name for the underlying map type. + typedef std::map*> Map; + + // Serializer for addresses and entries stored in ContainedRangeMap. + SimpleSerializer addr_serializer_; + SimpleSerializer entry_serializer_; +}; + +} // namespace google_breakpad + +#endif // PROCESSOR_MAP_SERIALIZERS_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/map_serializers_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/map_serializers_unittest.cc new file mode 100644 index 0000000000..0d872ec2e2 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/map_serializers_unittest.cc @@ -0,0 +1,386 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// map_serializers_unittest.cc: Unit tests for std::map serializer and +// std::map wrapper serializers. +// +// Author: Siyang Xie (lambxsy@google.com) + +#include +#include +#include +#include +#include +#include + +#include "breakpad_googletest_includes.h" +#include "map_serializers-inl.h" + +#include "processor/address_map-inl.h" +#include "processor/range_map-inl.h" +#include "processor/contained_range_map-inl.h" + +typedef int32_t AddrType; +typedef int32_t EntryType; + +class TestStdMapSerializer : public ::testing::Test { + protected: + void SetUp() { + serialized_size_ = 0; + serialized_data_ = NULL; + } + + void TearDown() { + delete [] serialized_data_; + } + + std::map std_map_; + google_breakpad::StdMapSerializer serializer_; + uint32_t serialized_size_; + char *serialized_data_; +}; + +TEST_F(TestStdMapSerializer, EmptyMapTestCase) { + const int32_t correct_data[] = { 0 }; + uint32_t correct_size = sizeof(correct_data); + + // std_map_ is empty. + serialized_data_ = serializer_.Serialize(std_map_, &serialized_size_); + + EXPECT_EQ(correct_size, serialized_size_); + EXPECT_EQ(memcmp(correct_data, serialized_data_, correct_size), 0); +} + +TEST_F(TestStdMapSerializer, MapWithTwoElementsTestCase) { + const int32_t correct_data[] = { + // # of nodes + 2, + // Offsets + 20, 24, + // Keys + 1, 3, + // Values + 2, 6 + }; + uint32_t correct_size = sizeof(correct_data); + + std_map_.insert(std::make_pair(1, 2)); + std_map_.insert(std::make_pair(3, 6)); + + serialized_data_ = serializer_.Serialize(std_map_, &serialized_size_); + + EXPECT_EQ(correct_size, serialized_size_); + EXPECT_EQ(memcmp(correct_data, serialized_data_, correct_size), 0); +} + +TEST_F(TestStdMapSerializer, MapWithFiveElementsTestCase) { + const int32_t correct_data[] = { + // # of nodes + 5, + // Offsets + 44, 48, 52, 56, 60, + // Keys + 1, 2, 3, 4, 5, + // Values + 11, 12, 13, 14, 15 + }; + uint32_t correct_size = sizeof(correct_data); + + for (int i = 1; i < 6; ++i) + std_map_.insert(std::make_pair(i, 10 + i)); + + serialized_data_ = serializer_.Serialize(std_map_, &serialized_size_); + + EXPECT_EQ(correct_size, serialized_size_); + EXPECT_EQ(memcmp(correct_data, serialized_data_, correct_size), 0); +} + +class TestAddressMapSerializer : public ::testing::Test { + protected: + void SetUp() { + serialized_size_ = 0; + serialized_data_ = 0; + } + + void TearDown() { + delete [] serialized_data_; + } + + google_breakpad::AddressMap address_map_; + google_breakpad::AddressMapSerializer serializer_; + uint32_t serialized_size_; + char *serialized_data_; +}; + +TEST_F(TestAddressMapSerializer, EmptyMapTestCase) { + const int32_t correct_data[] = { 0 }; + uint32_t correct_size = sizeof(correct_data); + + // std_map_ is empty. + serialized_data_ = serializer_.Serialize(address_map_, &serialized_size_); + + EXPECT_EQ(correct_size, serialized_size_); + EXPECT_EQ(memcmp(correct_data, serialized_data_, correct_size), 0); +} + +TEST_F(TestAddressMapSerializer, MapWithTwoElementsTestCase) { + const int32_t correct_data[] = { + // # of nodes + 2, + // Offsets + 20, 24, + // Keys + 1, 3, + // Values + 2, 6 + }; + uint32_t correct_size = sizeof(correct_data); + + address_map_.Store(1, 2); + address_map_.Store(3, 6); + + serialized_data_ = serializer_.Serialize(address_map_, &serialized_size_); + + EXPECT_EQ(correct_size, serialized_size_); + EXPECT_EQ(memcmp(correct_data, serialized_data_, correct_size), 0); +} + +TEST_F(TestAddressMapSerializer, MapWithFourElementsTestCase) { + const int32_t correct_data[] = { + // # of nodes + 4, + // Offsets + 36, 40, 44, 48, + // Keys + -6, -4, 8, 123, + // Values + 2, 3, 5, 8 + }; + uint32_t correct_size = sizeof(correct_data); + + address_map_.Store(-6, 2); + address_map_.Store(-4, 3); + address_map_.Store(8, 5); + address_map_.Store(123, 8); + + serialized_data_ = serializer_.Serialize(address_map_, &serialized_size_); + + EXPECT_EQ(correct_size, serialized_size_); + EXPECT_EQ(memcmp(correct_data, serialized_data_, correct_size), 0); +} + + +class TestRangeMapSerializer : public ::testing::Test { + protected: + void SetUp() { + serialized_size_ = 0; + serialized_data_ = 0; + } + + void TearDown() { + delete [] serialized_data_; + } + + google_breakpad::RangeMap range_map_; + google_breakpad::RangeMapSerializer serializer_; + uint32_t serialized_size_; + char *serialized_data_; +}; + +TEST_F(TestRangeMapSerializer, EmptyMapTestCase) { + const int32_t correct_data[] = { 0 }; + uint32_t correct_size = sizeof(correct_data); + + // range_map_ is empty. + serialized_data_ = serializer_.Serialize(range_map_, &serialized_size_); + + EXPECT_EQ(correct_size, serialized_size_); + EXPECT_EQ(memcmp(correct_data, serialized_data_, correct_size), 0); +} + +TEST_F(TestRangeMapSerializer, MapWithOneRangeTestCase) { + const int32_t correct_data[] = { + // # of nodes + 1, + // Offsets + 12, + // Keys: high address + 10, + // Values: (low address, entry) pairs + 1, 6 + }; + uint32_t correct_size = sizeof(correct_data); + + range_map_.StoreRange(1, 10, 6); + + serialized_data_ = serializer_.Serialize(range_map_, &serialized_size_); + + EXPECT_EQ(correct_size, serialized_size_); + EXPECT_EQ(memcmp(correct_data, serialized_data_, correct_size), 0); +} + +TEST_F(TestRangeMapSerializer, MapWithThreeRangesTestCase) { + const int32_t correct_data[] = { + // # of nodes + 3, + // Offsets + 28, 36, 44, + // Keys: high address + 5, 9, 20, + // Values: (low address, entry) pairs + 2, 1, 6, 2, 10, 3 + }; + uint32_t correct_size = sizeof(correct_data); + + ASSERT_TRUE(range_map_.StoreRange(2, 4, 1)); + ASSERT_TRUE(range_map_.StoreRange(6, 4, 2)); + ASSERT_TRUE(range_map_.StoreRange(10, 11, 3)); + + serialized_data_ = serializer_.Serialize(range_map_, &serialized_size_); + + EXPECT_EQ(correct_size, serialized_size_); + EXPECT_EQ(memcmp(correct_data, serialized_data_, correct_size), 0); +} + + +class TestContainedRangeMapSerializer : public ::testing::Test { + protected: + void SetUp() { + serialized_size_ = 0; + serialized_data_ = 0; + } + + void TearDown() { + delete [] serialized_data_; + } + + google_breakpad::ContainedRangeMap crm_map_; + google_breakpad::ContainedRangeMapSerializer serializer_; + uint32_t serialized_size_; + char *serialized_data_; +}; + +TEST_F(TestContainedRangeMapSerializer, EmptyMapTestCase) { + const int32_t correct_data[] = { + 0, // base address of root + 4, // size of entry + 0, // entry stored at root + 0 // empty map stored at root + }; + uint32_t correct_size = sizeof(correct_data); + + // crm_map_ is empty. + serialized_data_ = serializer_.Serialize(&crm_map_, &serialized_size_); + + EXPECT_EQ(correct_size, serialized_size_); + EXPECT_EQ(memcmp(correct_data, serialized_data_, correct_size), 0); +} + +TEST_F(TestContainedRangeMapSerializer, MapWithOneRangeTestCase) { + const int32_t correct_data[] = { + 0, // base address of root + 4, // size of entry + 0, // entry stored at root + // Map stored at root node: + 1, // # of nodes + 12, // offset + 9, // key + // value: a child ContainedRangeMap + 3, // base address of child CRM + 4, // size of entry + -1, // entry stored in child CRM + 0 // empty sub-map stored in child CRM + }; + uint32_t correct_size = sizeof(correct_data); + + crm_map_.StoreRange(3, 7, -1); + + serialized_data_ = serializer_.Serialize(&crm_map_, &serialized_size_); + + EXPECT_EQ(correct_size, serialized_size_); + EXPECT_EQ(memcmp(correct_data, serialized_data_, correct_size), 0); +} + +TEST_F(TestContainedRangeMapSerializer, MapWithTwoLevelsTestCase) { + // Tree structure of ranges: + // root level 0 + // | + // map + // / \ level 1: child1, child2 + // 2~8 10~20 + // | | + // map map + // / \ | + // 3~4 6~7 16-20 level 2: grandchild1, grandchild2, grandchild3 + + const int32_t correct_data[] = { + // root: base, entry_size, entry + 0, 4, 0, + // root's map: # of nodes, offset1, offset2, key1, key2 + 2, 20, 84, 8, 20, + // child1: base, entry_size, entry: + 2, 4, -1, + // child1's map: # of nodes, offset1, offset2, key1, key2 + 2, 20, 36, 4, 7, + // grandchild1: base, entry_size, entry, empty_map + 3, 4, -1, 0, + // grandchild2: base, entry_size, entry, empty_map + 6, 4, -1, 0, + // child2: base, entry_size, entry: + 10, 4, -1, + // child2's map: # of nodes, offset1, key1 + 1, 12, 20, + // grandchild3: base, entry_size, entry, empty_map + 16, 4, -1, 0 + }; + uint32_t correct_size = sizeof(correct_data); + + // Store child1. + ASSERT_TRUE(crm_map_.StoreRange(2, 7, -1)); + // Store child2. + ASSERT_TRUE(crm_map_.StoreRange(10, 11, -1)); + // Store grandchild1. + ASSERT_TRUE(crm_map_.StoreRange(3, 2, -1)); + // Store grandchild2. + ASSERT_TRUE(crm_map_.StoreRange(6, 2, -1)); + // Store grandchild3. + ASSERT_TRUE(crm_map_.StoreRange(16, 5, -1)); + + serialized_data_ = serializer_.Serialize(&crm_map_, &serialized_size_); + + EXPECT_EQ(correct_size, serialized_size_); + EXPECT_EQ(memcmp(correct_data, serialized_data_, correct_size), 0); +} + + +int main(int argc, char *argv[]) { + ::testing::InitGoogleTest(&argc, argv); + + return RUN_ALL_TESTS(); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/microdump.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/microdump.cc new file mode 100644 index 0000000000..bf62855293 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/microdump.cc @@ -0,0 +1,306 @@ +// Copyright (c) 2014 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// microdump.cc: A microdump reader. +// +// See microdump.h for documentation. + +#include "google_breakpad/processor/microdump.h" + +#include +#include + +#include +#include +#include +#include + +#include "google_breakpad/common/minidump_cpu_arm.h" +#include "google_breakpad/processor/code_module.h" +#include "processor/basic_code_module.h" +#include "processor/linked_ptr.h" +#include "processor/logging.h" +#include "processor/range_map-inl.h" + +namespace { +static const char kGoogleBreakpadKey[] = "google-breakpad"; +static const char kMicrodumpBegin[] = "-----BEGIN BREAKPAD MICRODUMP-----"; +static const char kMicrodumpEnd[] = "-----END BREAKPAD MICRODUMP-----"; +static const char kOsKey[] = ": O "; +static const char kCpuKey[] = ": C "; +static const char kMmapKey[] = ": M "; +static const char kStackKey[] = ": S "; +static const char kStackFirstLineKey[] = ": S 0 "; +static const char kArmArchitecture[] = "arm"; +static const char kArm64Architecture[] = "arm64"; + +template +T HexStrToL(const string& str) { + uint64_t res = 0; + std::istringstream ss(str); + ss >> std::hex >> res; + return static_cast(res); +} + +std::vector ParseHexBuf(const string& str) { + std::vector buf; + for (size_t i = 0; i < str.length(); i += 2) { + buf.push_back(HexStrToL(str.substr(i, 2))); + } + return buf; +} + +} // namespace + +namespace google_breakpad { + +// +// MicrodumpModules +// + +void MicrodumpModules::Add(const CodeModule* module) { + linked_ptr module_ptr(module); + if (!map_->StoreRange(module->base_address(), module->size(), module_ptr)) { + BPLOG(ERROR) << "Module " << module->code_file() << + " could not be stored"; + } +} + + +// +// MicrodumpContext +// + +void MicrodumpContext::SetContextARM(MDRawContextARM* arm) { + DumpContext::SetContextFlags(MD_CONTEXT_ARM); + DumpContext::SetContextARM(arm); + valid_ = true; +} + +void MicrodumpContext::SetContextARM64(MDRawContextARM64* arm64) { + DumpContext::SetContextFlags(MD_CONTEXT_ARM64); + DumpContext::SetContextARM64(arm64); + valid_ = true; +} + + +// +// MicrodumpMemoryRegion +// + +MicrodumpMemoryRegion::MicrodumpMemoryRegion() : base_address_(0) { } + +void MicrodumpMemoryRegion::Init(uint64_t base_address, + const std::vector& contents) { + base_address_ = base_address; + contents_ = contents; +} + +uint64_t MicrodumpMemoryRegion::GetBase() const { return base_address_; } + +uint32_t MicrodumpMemoryRegion::GetSize() const { return contents_.size(); } + +bool MicrodumpMemoryRegion::GetMemoryAtAddress(uint64_t address, + uint8_t* value) const { + return GetMemoryLittleEndian(address, value); +} + +bool MicrodumpMemoryRegion::GetMemoryAtAddress(uint64_t address, + uint16_t* value) const { + return GetMemoryLittleEndian(address, value); +} + +bool MicrodumpMemoryRegion::GetMemoryAtAddress(uint64_t address, + uint32_t* value) const { + return GetMemoryLittleEndian(address, value); +} + +bool MicrodumpMemoryRegion::GetMemoryAtAddress(uint64_t address, + uint64_t* value) const { + return GetMemoryLittleEndian(address, value); +} + +template +bool MicrodumpMemoryRegion::GetMemoryLittleEndian(uint64_t address, + ValueType* value) const { + if (address < base_address_ || + address - base_address_ + sizeof(ValueType) > contents_.size()) + return false; + ValueType v = 0; + uint64_t start = address - base_address_; + // The loop condition is odd, but it's correct for size_t. + for (size_t i = sizeof(ValueType) - 1; i < sizeof(ValueType); i--) + v = (v << 8) | static_cast(contents_[start + i]); + *value = v; + return true; +} + +void MicrodumpMemoryRegion::Print() const { + // Not reached, just needed to honor the base class contract. + assert(false); +} + +// +// Microdump +// +Microdump::Microdump(const string& contents) + : context_(new MicrodumpContext()), + stack_region_(new MicrodumpMemoryRegion()), + modules_(new MicrodumpModules()), + system_info_(new SystemInfo()) { + assert(!contents.empty()); + + bool in_microdump = false; + string line; + uint64_t stack_start = 0; + std::vector stack_content; + string arch; + + std::istringstream stream(contents); + while (std::getline(stream, line)) { + if (line.find(kGoogleBreakpadKey) == string::npos) { + continue; + } + if (line.find(kMicrodumpBegin) != string::npos) { + in_microdump = true; + continue; + } + if (line.find(kMicrodumpEnd) != string::npos) { + break; + } + + if (!in_microdump) { + continue; + } + + size_t pos; + if ((pos = line.find(kOsKey)) != string::npos) { + string os_str(line, pos + strlen(kOsKey)); + std::istringstream os_tokens(os_str); + string os_id; + string num_cpus; + string os_version; + // This reflect the actual HW arch and might not match the arch emulated + // for the execution (e.g., running a 32-bit binary on a 64-bit cpu). + string hw_arch; + + os_tokens >> os_id; + os_tokens >> arch; + os_tokens >> num_cpus; + os_tokens >> hw_arch; + std::getline(os_tokens, os_version); + os_version.erase(0, 1); // remove leading space. + + system_info_->cpu = hw_arch; + system_info_->cpu_count = HexStrToL(num_cpus); + system_info_->os_version = os_version; + + if (os_id == "L") { + system_info_->os = "Linux"; + system_info_->os_short = "linux"; + } else if (os_id == "A") { + system_info_->os = "Android"; + system_info_->os_short = "android"; + } + + // OS line also contains release and version for future use. + } else if ((pos = line.find(kStackKey)) != string::npos) { + if (line.find(kStackFirstLineKey) != string::npos) { + // The first line of the stack (S 0 stack header) provides the value of + // the stack pointer, the start address of the stack being dumped and + // the length of the stack. We could use it in future to double check + // that we received all the stack as expected. + continue; + } + string stack_str(line, pos + strlen(kStackKey)); + std::istringstream stack_tokens(stack_str); + string start_addr_str; + string raw_content; + stack_tokens >> start_addr_str; + stack_tokens >> raw_content; + uint64_t start_addr = HexStrToL(start_addr_str); + + if (stack_start != 0) { + // Verify that the stack chunks in the microdump are contiguous. + assert(start_addr == stack_start + stack_content.size()); + } else { + stack_start = start_addr; + } + std::vector chunk = ParseHexBuf(raw_content); + stack_content.insert(stack_content.end(), chunk.begin(), chunk.end()); + + } else if ((pos = line.find(kCpuKey)) != string::npos) { + string cpu_state_str(line, pos + strlen(kCpuKey)); + std::vector cpu_state_raw = ParseHexBuf(cpu_state_str); + if (strcmp(arch.c_str(), kArmArchitecture) == 0) { + if (cpu_state_raw.size() != sizeof(MDRawContextARM)) { + std::cerr << "Malformed CPU context. Got " << cpu_state_raw.size() << + " bytes instead of " << sizeof(MDRawContextARM) << std::endl; + continue; + } + MDRawContextARM* arm = new MDRawContextARM(); + memcpy(arm, &cpu_state_raw[0], cpu_state_raw.size()); + context_->SetContextARM(arm); + } else if (strcmp(arch.c_str(), kArm64Architecture) == 0) { + if (cpu_state_raw.size() != sizeof(MDRawContextARM64)) { + std::cerr << "Malformed CPU context. Got " << cpu_state_raw.size() << + " bytes instead of " << sizeof(MDRawContextARM64) << std::endl; + continue; + } + MDRawContextARM64* arm = new MDRawContextARM64(); + memcpy(arm, &cpu_state_raw[0], cpu_state_raw.size()); + context_->SetContextARM64(arm); + } else { + std::cerr << "Unsupported architecture: " << arch << std::endl; + } + } else if ((pos = line.find(kMmapKey)) != string::npos) { + string mmap_line(line, pos + strlen(kMmapKey)); + std::istringstream mmap_tokens(mmap_line); + string addr, offset, size, identifier, filename; + mmap_tokens >> addr; + mmap_tokens >> offset; + mmap_tokens >> size; + mmap_tokens >> identifier; + mmap_tokens >> filename; + + modules_->Add(new BasicCodeModule( + HexStrToL(addr), // base_address + HexStrToL(size), // size + filename, // code_file + identifier, // code_identifier + filename, // debug_file + identifier, // debug_identifier + "")); // version + } + } + stack_region_->Init(stack_start, stack_content); +} + +} // namespace google_breakpad + diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/microdump_processor.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/microdump_processor.cc new file mode 100644 index 0000000000..366e3f30aa --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/microdump_processor.cc @@ -0,0 +1,100 @@ +// Copyright (c) 2014, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// microdump_processor.cc: A microdump processor. +// +// See microdump_processor.h for documentation. + +#include "google_breakpad/processor/microdump_processor.h" + +#include + +#include + +#include "common/using_std_string.h" +#include "google_breakpad/processor/call_stack.h" +#include "google_breakpad/processor/microdump.h" +#include "google_breakpad/processor/process_state.h" +#include "google_breakpad/processor/stackwalker.h" +#include "google_breakpad/processor/stack_frame_symbolizer.h" +#include "processor/logging.h" + +namespace google_breakpad { + +MicrodumpProcessor::MicrodumpProcessor(StackFrameSymbolizer* frame_symbolizer) + : frame_symbolizer_(frame_symbolizer) { + assert(frame_symbolizer); +} + +MicrodumpProcessor::~MicrodumpProcessor() {} + +ProcessResult MicrodumpProcessor::Process(const string µdump_contents, + ProcessState* process_state) { + assert(process_state); + + process_state->Clear(); + + if (microdump_contents.empty()) { + BPLOG(ERROR) << "Microdump is empty."; + return PROCESS_ERROR_MINIDUMP_NOT_FOUND; + } + + Microdump microdump(microdump_contents); + process_state->modules_ = microdump.GetModules()->Copy(); + scoped_ptr stackwalker( + Stackwalker::StackwalkerForCPU( + &process_state->system_info_, + microdump.GetContext(), + microdump.GetMemory(), + process_state->modules_, + frame_symbolizer_)); + + scoped_ptr stack(new CallStack()); + if (stackwalker.get()) { + if (!stackwalker->Walk(stack.get(), + &process_state->modules_without_symbols_, + &process_state->modules_with_corrupt_symbols_)) { + BPLOG(INFO) << "Processing was interrupted."; + return PROCESS_SYMBOL_SUPPLIER_INTERRUPTED; + } + } else { + BPLOG(ERROR) << "No stackwalker found for microdump."; + return PROCESS_ERROR_NO_THREAD_LIST; + } + + process_state->threads_.push_back(stack.release()); + process_state->thread_memory_regions_.push_back(microdump.GetMemory()); + process_state->crashed_ = true; + process_state->requesting_thread_ = 0; + process_state->system_info_ = *microdump.GetSystemInfo(); + + return PROCESS_OK; +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/microdump_processor_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/microdump_processor_unittest.cc new file mode 100644 index 0000000000..10e5351c07 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/microdump_processor_unittest.cc @@ -0,0 +1,197 @@ +// Copyright (c) 2014, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Unit test for MicrodumpProcessor. + +#include +#include +#include +#include + +#include "breakpad_googletest_includes.h" +#include "google_breakpad/processor/basic_source_line_resolver.h" +#include "google_breakpad/processor/call_stack.h" +#include "google_breakpad/processor/microdump_processor.h" +#include "google_breakpad/processor/process_state.h" +#include "google_breakpad/processor/stack_frame.h" +#include "google_breakpad/processor/stack_frame_symbolizer.h" +#include "processor/simple_symbol_supplier.h" +#include "processor/stackwalker_unittest_utils.h" + +namespace { + +using google_breakpad::BasicSourceLineResolver; +using google_breakpad::MicrodumpProcessor; +using google_breakpad::ProcessState; +using google_breakpad::SimpleSymbolSupplier; +using google_breakpad::StackFrameSymbolizer; + +class MicrodumpProcessorTest : public ::testing::Test { + public: + MicrodumpProcessorTest() + : files_path_(string(getenv("srcdir") ? getenv("srcdir") : ".") + + "/src/processor/testdata/") { + } + + void ReadFile(const string& file_name, string* file_contents) { + assert(file_contents); + std::ifstream file_stream(file_name.c_str(), std::ios::in); + ASSERT_TRUE(file_stream.good()); + std::vector bytes; + file_stream.seekg(0, std::ios_base::end); + ASSERT_TRUE(file_stream.good()); + bytes.resize(file_stream.tellg()); + file_stream.seekg(0, std::ios_base::beg); + ASSERT_TRUE(file_stream.good()); + file_stream.read(&bytes[0], bytes.size()); + ASSERT_TRUE(file_stream.good()); + *file_contents = string(&bytes[0], bytes.size()); + } + + google_breakpad::ProcessResult ProcessMicrodump( + const string& symbols_file, + const string& microdump_contents, + ProcessState* state) { + SimpleSymbolSupplier supplier(symbols_file); + BasicSourceLineResolver resolver; + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + MicrodumpProcessor processor(&frame_symbolizer); + + return processor.Process(microdump_contents, state); + } + + void AnalyzeDump(const string& microdump_file_name, ProcessState* state, + bool omit_symbols) { + string symbols_file = omit_symbols ? "" : files_path_ + "symbols/microdump"; + string microdump_file_path = files_path_ + microdump_file_name; + string microdump_contents; + ReadFile(microdump_file_path, µdump_contents); + + google_breakpad::ProcessResult result = + ProcessMicrodump(symbols_file, microdump_contents, state); + + ASSERT_EQ(google_breakpad::PROCESS_OK, result); + ASSERT_TRUE(state->crashed()); + ASSERT_EQ(0, state->requesting_thread()); + ASSERT_EQ(1U, state->threads()->size()); + + ASSERT_EQ(2, state->system_info()->cpu_count); + ASSERT_EQ("android", state->system_info()->os_short); + ASSERT_EQ("Android", state->system_info()->os); + } + + string files_path_; +}; + +TEST_F(MicrodumpProcessorTest, TestProcess_Empty) { + ProcessState state; + google_breakpad::ProcessResult result = + ProcessMicrodump("", "", &state); + ASSERT_EQ(google_breakpad::PROCESS_ERROR_MINIDUMP_NOT_FOUND, result); +} + +TEST_F(MicrodumpProcessorTest, TestProcess_Invalid) { + ProcessState state; + google_breakpad::ProcessResult result = + ProcessMicrodump("", "This is not a valid microdump", &state); + ASSERT_EQ(google_breakpad::PROCESS_ERROR_NO_THREAD_LIST, result); +} + +TEST_F(MicrodumpProcessorTest, TestProcess_MissingSymbols) { + ProcessState state; + AnalyzeDump("microdump-arm64.dmp", &state, true /* omit_symbols */); + + ASSERT_EQ(8U, state.modules()->module_count()); + ASSERT_EQ("aarch64", state.system_info()->cpu); + ASSERT_EQ("OS 64 VERSION INFO", state.system_info()->os_version); + ASSERT_EQ(1U, state.threads()->size()); + ASSERT_EQ(12U, state.threads()->at(0)->frames()->size()); + + ASSERT_EQ("", + state.threads()->at(0)->frames()->at(0)->function_name); + ASSERT_EQ("", + state.threads()->at(0)->frames()->at(3)->function_name); +} + +TEST_F(MicrodumpProcessorTest, TestProcess_UnsupportedArch) { + string microdump_contents = + "W/google-breakpad(26491): -----BEGIN BREAKPAD MICRODUMP-----\n" + "W/google-breakpad(26491): O A \"unsupported-arch\"\n" + "W/google-breakpad(26491): S 0 A48BD840 A48BD000 00002000\n"; + + ProcessState state; + + google_breakpad::ProcessResult result = + ProcessMicrodump("", microdump_contents, &state); + + ASSERT_EQ(google_breakpad::PROCESS_ERROR_NO_THREAD_LIST, result); +} + +TEST_F(MicrodumpProcessorTest, TestProcessArm) { + ProcessState state; + AnalyzeDump("microdump-arm.dmp", &state, false /* omit_symbols */); + + ASSERT_EQ(6U, state.modules()->module_count()); + ASSERT_EQ("armv7l", state.system_info()->cpu); + ASSERT_EQ("OS VERSION INFO", state.system_info()->os_version); + ASSERT_EQ(8U, state.threads()->at(0)->frames()->size()); + ASSERT_EQ("MicrodumpWriterTest_Setup_Test::TestBody", + state.threads()->at(0)->frames()->at(0)->function_name); + ASSERT_EQ("testing::Test::Run", + state.threads()->at(0)->frames()->at(1)->function_name); + ASSERT_EQ("main", + state.threads()->at(0)->frames()->at(6)->function_name); + ASSERT_EQ("breakpad_unittests", + state.threads()->at(0)->frames()->at(6)->module->code_file()); +} + +TEST_F(MicrodumpProcessorTest, TestProcessArm64) { + ProcessState state; + AnalyzeDump("microdump-arm64.dmp", &state, false /* omit_symbols */); + + ASSERT_EQ(8U, state.modules()->module_count()); + ASSERT_EQ("aarch64", state.system_info()->cpu); + ASSERT_EQ("OS 64 VERSION INFO", state.system_info()->os_version); + ASSERT_EQ(9U, state.threads()->at(0)->frames()->size()); + ASSERT_EQ("MicrodumpWriterTest_Setup_Test::TestBody", + state.threads()->at(0)->frames()->at(0)->function_name); + ASSERT_EQ("testing::Test::Run", + state.threads()->at(0)->frames()->at(2)->function_name); + ASSERT_EQ("main", + state.threads()->at(0)->frames()->at(7)->function_name); + ASSERT_EQ("breakpad_unittests", + state.threads()->at(0)->frames()->at(7)->module->code_file()); +} + +} // namespace + +int main(int argc, char* argv[]) { + ::testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/microdump_stackwalk.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/microdump_stackwalk.cc new file mode 100644 index 0000000000..7ea80495a6 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/microdump_stackwalk.cc @@ -0,0 +1,151 @@ +// Copyright (c) 2014 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// microdump_stackwalk.cc: Process a microdump with MicrodumpProcessor, printing +// the results, including stack traces. + +#include +#include + +#include +#include +#include + +#include "common/scoped_ptr.h" +#include "common/using_std_string.h" +#include "google_breakpad/processor/basic_source_line_resolver.h" +#include "google_breakpad/processor/microdump_processor.h" +#include "google_breakpad/processor/process_state.h" +#include "google_breakpad/processor/stack_frame_symbolizer.h" +#include "processor/logging.h" +#include "processor/simple_symbol_supplier.h" +#include "processor/stackwalk_common.h" + + +namespace { + +using google_breakpad::BasicSourceLineResolver; +using google_breakpad::MicrodumpProcessor; +using google_breakpad::ProcessResult; +using google_breakpad::ProcessState; +using google_breakpad::scoped_ptr; +using google_breakpad::SimpleSymbolSupplier; +using google_breakpad::StackFrameSymbolizer; + +// Processes |microdump_file| using MicrodumpProcessor. |symbol_path|, if +// non-empty, is the base directory of a symbol storage area, laid out in +// the format required by SimpleSymbolSupplier. If such a storage area +// is specified, it is made available for use by the MicrodumpProcessor. +// +// Returns the value of MicrodumpProcessor::Process. If processing succeeds, +// prints identifying OS and CPU information from the microdump, crash +// information and call stacks for the crashing thread. +// All information is printed to stdout. +int PrintMicrodumpProcess(const char* microdump_file, + const std::vector& symbol_paths, + bool machine_readable) { + std::ifstream file_stream(microdump_file); + std::vector bytes; + file_stream.seekg(0, std::ios_base::end); + bytes.resize(file_stream.tellg()); + file_stream.seekg(0, std::ios_base::beg); + file_stream.read(&bytes[0], bytes.size()); + string microdump_content(&bytes[0], bytes.size()); + + scoped_ptr symbol_supplier; + if (!symbol_paths.empty()) { + symbol_supplier.reset(new SimpleSymbolSupplier(symbol_paths)); + } + + BasicSourceLineResolver resolver; + StackFrameSymbolizer frame_symbolizer(symbol_supplier.get(), &resolver); + ProcessState process_state; + MicrodumpProcessor microdump_processor(&frame_symbolizer); + ProcessResult res = microdump_processor.Process(microdump_content, + &process_state); + + if (res == google_breakpad::PROCESS_OK) { + if (machine_readable) { + PrintProcessStateMachineReadable(process_state); + } else { + PrintProcessState(process_state, false, &resolver); + } + return 0; + } + + BPLOG(ERROR) << "MicrodumpProcessor::Process failed (code = " << res << ")"; + return 1; +} + +void usage(const char *program_name) { + fprintf(stderr, "usage: %s [-m] [symbol-path ...]\n" + " -m : Output in machine-readable format\n", + program_name); +} + +} // namespace + +int main(int argc, char** argv) { + BPLOG_INIT(&argc, &argv); + + if (argc < 2) { + usage(argv[0]); + return 1; + } + + const char* microdump_file; + bool machine_readable; + int symbol_path_arg; + + if (strcmp(argv[1], "-m") == 0) { + if (argc < 3) { + usage(argv[0]); + return 1; + } + + machine_readable = true; + microdump_file = argv[2]; + symbol_path_arg = 3; + } else { + machine_readable = false; + microdump_file = argv[1]; + symbol_path_arg = 2; + } + + // extra arguments are symbol paths + std::vector symbol_paths; + if (argc > symbol_path_arg) { + for (int argi = symbol_path_arg; argi < argc; ++argi) + symbol_paths.push_back(argv[argi]); + } + + return PrintMicrodumpProcess(microdump_file, + symbol_paths, + machine_readable); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/microdump_stackwalk_machine_readable_test b/TMessagesProj/jni/third_party/breakpad/src/processor/microdump_stackwalk_machine_readable_test new file mode 100644 index 0000000000..fadec26456 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/microdump_stackwalk_machine_readable_test @@ -0,0 +1,43 @@ +#!/bin/sh + +# Copyright (c) 2014, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +source "${0%/*}/microdump_stackwalk_test_vars" # for MICRODUMP_SUPPORTED_ARCHS. +testdata_dir=$srcdir/src/processor/testdata + +set -e # Bail out with an error if any of the commands below fails. +for ARCH in $MICRODUMP_SUPPORTED_ARCHS; do + echo "Testing microdump_stackwalk -m for arch $ARCH" + ./src/processor/microdump_stackwalk -m $testdata_dir/microdump-${ARCH}.dmp \ + $testdata_dir/symbols/microdump | \ + tr -d '\015' | \ + diff -u $testdata_dir/microdump.stackwalk.machine_readable-${ARCH}.out - +done +exit 0 diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/microdump_stackwalk_test b/TMessagesProj/jni/third_party/breakpad/src/processor/microdump_stackwalk_test new file mode 100644 index 0000000000..5a1f3d59fc --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/microdump_stackwalk_test @@ -0,0 +1,43 @@ +#!/bin/sh + +# Copyright (c) 2014, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +source "${0%/*}/microdump_stackwalk_test_vars" # for MICRODUMP_SUPPORTED_ARCHS. +testdata_dir=$srcdir/src/processor/testdata + +set -e # Bail out with an error if any of the commands below fails. +for ARCH in $MICRODUMP_SUPPORTED_ARCHS; do + echo "Testing microdump_stackwalk for arch $ARCH" + ./src/processor/microdump_stackwalk $testdata_dir/microdump-${ARCH}.dmp \ + $testdata_dir/symbols/microdump | \ + tr -d '\015' | \ + diff -u $testdata_dir/microdump.stackwalk-${ARCH}.out - +done +exit 0 diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/microdump_stackwalk_test_vars b/TMessagesProj/jni/third_party/breakpad/src/processor/microdump_stackwalk_test_vars new file mode 100644 index 0000000000..a8b0e0df5a --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/microdump_stackwalk_test_vars @@ -0,0 +1 @@ +MICRODUMP_SUPPORTED_ARCHS="arm arm64" diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/minidump.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/minidump.cc new file mode 100644 index 0000000000..08f64652a3 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/minidump.cc @@ -0,0 +1,4631 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// minidump.cc: A minidump reader. +// +// See minidump.h for documentation. +// +// Author: Mark Mentovai + +#include "google_breakpad/processor/minidump.h" + +#include +#include +#include +#include +#include +#include + +#ifdef _WIN32 +#include +#define PRIx64 "llx" +#define PRIx32 "lx" +#define snprintf _snprintf +#else // _WIN32 +#include +#endif // _WIN32 + +#include +#include +#include +#include +#include + +#include "processor/range_map-inl.h" + +#include "common/scoped_ptr.h" +#include "google_breakpad/processor/dump_context.h" +#include "processor/basic_code_module.h" +#include "processor/basic_code_modules.h" +#include "processor/logging.h" + +namespace google_breakpad { + + +using std::istream; +using std::ifstream; +using std::numeric_limits; +using std::vector; + +// Returns true iff |context_size| matches exactly one of the sizes of the +// various MDRawContext* types. +// TODO(blundell): This function can be removed once +// http://code.google.com/p/google-breakpad/issues/detail?id=550 is fixed. +static bool IsContextSizeUnique(uint32_t context_size) { + int num_matching_contexts = 0; + if (context_size == sizeof(MDRawContextX86)) + num_matching_contexts++; + if (context_size == sizeof(MDRawContextPPC)) + num_matching_contexts++; + if (context_size == sizeof(MDRawContextPPC64)) + num_matching_contexts++; + if (context_size == sizeof(MDRawContextAMD64)) + num_matching_contexts++; + if (context_size == sizeof(MDRawContextSPARC)) + num_matching_contexts++; + if (context_size == sizeof(MDRawContextARM)) + num_matching_contexts++; + if (context_size == sizeof(MDRawContextARM64)) + num_matching_contexts++; + if (context_size == sizeof(MDRawContextMIPS)) + num_matching_contexts++; + return num_matching_contexts == 1; +} + +// +// Swapping routines +// +// Inlining these doesn't increase code size significantly, and it saves +// a whole lot of unnecessary jumping back and forth. +// + + +// Swapping an 8-bit quantity is a no-op. This function is only provided +// to account for certain templatized operations that require swapping for +// wider types but handle uint8_t too +// (MinidumpMemoryRegion::GetMemoryAtAddressInternal). +static inline void Swap(uint8_t* value) { +} + + +// Optimization: don't need to AND the furthest right shift, because we're +// shifting an unsigned quantity. The standard requires zero-filling in this +// case. If the quantities were signed, a bitmask whould be needed for this +// right shift to avoid an arithmetic shift (which retains the sign bit). +// The furthest left shift never needs to be ANDed bitmask. + + +static inline void Swap(uint16_t* value) { + *value = (*value >> 8) | + (*value << 8); +} + + +static inline void Swap(uint32_t* value) { + *value = (*value >> 24) | + ((*value >> 8) & 0x0000ff00) | + ((*value << 8) & 0x00ff0000) | + (*value << 24); +} + + +static inline void Swap(uint64_t* value) { + uint32_t* value32 = reinterpret_cast(value); + Swap(&value32[0]); + Swap(&value32[1]); + uint32_t temp = value32[0]; + value32[0] = value32[1]; + value32[1] = temp; +} + + +// Given a pointer to a 128-bit int in the minidump data, set the "low" +// and "high" fields appropriately. +static void Normalize128(uint128_struct* value, bool is_big_endian) { + // The struct format is [high, low], so if the format is big-endian, + // the most significant bytes will already be in the high field. + if (!is_big_endian) { + uint64_t temp = value->low; + value->low = value->high; + value->high = temp; + } +} + +// This just swaps each int64 half of the 128-bit value. +// The value should also be normalized by calling Normalize128(). +static void Swap(uint128_struct* value) { + Swap(&value->low); + Swap(&value->high); +} + +// Swapping signed integers +static inline void Swap(int16_t* value) { + Swap(reinterpret_cast(value)); +} + +static inline void Swap(int32_t* value) { + Swap(reinterpret_cast(value)); +} + +static inline void Swap(int64_t* value) { + Swap(reinterpret_cast(value)); +} + + +static inline void Swap(MDLocationDescriptor* location_descriptor) { + Swap(&location_descriptor->data_size); + Swap(&location_descriptor->rva); +} + + +static inline void Swap(MDMemoryDescriptor* memory_descriptor) { + Swap(&memory_descriptor->start_of_memory_range); + Swap(&memory_descriptor->memory); +} + + +static inline void Swap(MDGUID* guid) { + Swap(&guid->data1); + Swap(&guid->data2); + Swap(&guid->data3); + // Don't swap guid->data4[] because it contains 8-bit quantities. +} + +static inline void Swap(MDSystemTime* system_time) { + Swap(&system_time->year); + Swap(&system_time->month); + Swap(&system_time->day_of_week); + Swap(&system_time->day); + Swap(&system_time->hour); + Swap(&system_time->minute); + Swap(&system_time->second); + Swap(&system_time->milliseconds); +} + +static inline void Swap(uint16_t* data, size_t size_in_bytes) { + size_t data_length = size_in_bytes / sizeof(data[0]); + for (size_t i = 0; i < data_length; i++) { + Swap(&data[i]); + } +} + +// +// Character conversion routines +// + + +// Standard wide-character conversion routines depend on the system's own +// idea of what width a wide character should be: some use 16 bits, and +// some use 32 bits. For the purposes of a minidump, wide strings are +// always represented with 16-bit UTF-16 chracters. iconv isn't available +// everywhere, and its interface varies where it is available. iconv also +// deals purely with char* pointers, so in addition to considering the swap +// parameter, a converter that uses iconv would also need to take the host +// CPU's endianness into consideration. It doesn't seems worth the trouble +// of making it a dependency when we don't care about anything but UTF-16. +static string* UTF16ToUTF8(const vector& in, + bool swap) { + scoped_ptr out(new string()); + + // Set the string's initial capacity to the number of UTF-16 characters, + // because the UTF-8 representation will always be at least this long. + // If the UTF-8 representation is longer, the string will grow dynamically. + out->reserve(in.size()); + + for (vector::const_iterator iterator = in.begin(); + iterator != in.end(); + ++iterator) { + // Get a 16-bit value from the input + uint16_t in_word = *iterator; + if (swap) + Swap(&in_word); + + // Convert the input value (in_word) into a Unicode code point (unichar). + uint32_t unichar; + if (in_word >= 0xdc00 && in_word <= 0xdcff) { + BPLOG(ERROR) << "UTF16ToUTF8 found low surrogate " << + HexString(in_word) << " without high"; + return NULL; + } else if (in_word >= 0xd800 && in_word <= 0xdbff) { + // High surrogate. + unichar = (in_word - 0xd7c0) << 10; + if (++iterator == in.end()) { + BPLOG(ERROR) << "UTF16ToUTF8 found high surrogate " << + HexString(in_word) << " at end of string"; + return NULL; + } + uint32_t high_word = in_word; + in_word = *iterator; + if (in_word < 0xdc00 || in_word > 0xdcff) { + BPLOG(ERROR) << "UTF16ToUTF8 found high surrogate " << + HexString(high_word) << " without low " << + HexString(in_word); + return NULL; + } + unichar |= in_word & 0x03ff; + } else { + // The ordinary case, a single non-surrogate Unicode character encoded + // as a single 16-bit value. + unichar = in_word; + } + + // Convert the Unicode code point (unichar) into its UTF-8 representation, + // appending it to the out string. + if (unichar < 0x80) { + (*out) += static_cast(unichar); + } else if (unichar < 0x800) { + (*out) += 0xc0 | static_cast(unichar >> 6); + (*out) += 0x80 | static_cast(unichar & 0x3f); + } else if (unichar < 0x10000) { + (*out) += 0xe0 | static_cast(unichar >> 12); + (*out) += 0x80 | static_cast((unichar >> 6) & 0x3f); + (*out) += 0x80 | static_cast(unichar & 0x3f); + } else if (unichar < 0x200000) { + (*out) += 0xf0 | static_cast(unichar >> 18); + (*out) += 0x80 | static_cast((unichar >> 12) & 0x3f); + (*out) += 0x80 | static_cast((unichar >> 6) & 0x3f); + (*out) += 0x80 | static_cast(unichar & 0x3f); + } else { + BPLOG(ERROR) << "UTF16ToUTF8 cannot represent high value " << + HexString(unichar) << " in UTF-8"; + return NULL; + } + } + + return out.release(); +} + +// Return the smaller of the number of code units in the UTF-16 string, +// not including the terminating null word, or maxlen. +static size_t UTF16codeunits(const uint16_t *string, size_t maxlen) { + size_t count = 0; + while (count < maxlen && string[count] != 0) + count++; + return count; +} + +static inline void Swap(MDTimeZoneInformation* time_zone) { + Swap(&time_zone->bias); + // Skip time_zone->standard_name. No need to swap UTF-16 fields. + // The swap will be done as part of the conversion to UTF-8. + Swap(&time_zone->standard_date); + Swap(&time_zone->standard_bias); + // Skip time_zone->daylight_name. No need to swap UTF-16 fields. + // The swap will be done as part of the conversion to UTF-8. + Swap(&time_zone->daylight_date); + Swap(&time_zone->daylight_bias); +} + +static void ConvertUTF16BufferToUTF8String(const uint16_t* utf16_data, + size_t max_length_in_bytes, + string* utf8_result, + bool swap) { + // Since there is no explicit byte length for each string, use + // UTF16codeunits to calculate word length, then derive byte + // length from that. + size_t max_word_length = max_length_in_bytes / sizeof(utf16_data[0]); + size_t word_length = UTF16codeunits(utf16_data, max_word_length); + if (word_length > 0) { + size_t byte_length = word_length * sizeof(utf16_data[0]); + vector utf16_vector(word_length); + memcpy(&utf16_vector[0], &utf16_data[0], byte_length); + scoped_ptr temp(UTF16ToUTF8(utf16_vector, swap)); + if (temp.get()) { + utf8_result->assign(*temp); + } + } else { + utf8_result->clear(); + } +} + + +// For fields that may or may not be valid, PrintValueOrInvalid will print the +// string "(invalid)" if the field is not valid, and will print the value if +// the field is valid. The value is printed as hexadecimal or decimal. + +enum NumberFormat { + kNumberFormatDecimal, + kNumberFormatHexadecimal, +}; + +static void PrintValueOrInvalid(bool valid, + NumberFormat number_format, + uint32_t value) { + if (!valid) { + printf("(invalid)\n"); + } else if (number_format == kNumberFormatDecimal) { + printf("%d\n", value); + } else { + printf("0x%x\n", value); + } +} + +// Converts a time_t to a string showing the time in UTC. +string TimeTToUTCString(time_t tt) { + struct tm timestruct; +#ifdef _WIN32 + gmtime_s(×truct, &tt); +#else + gmtime_r(&tt, ×truct); +#endif + + char timestr[20]; + int rv = strftime(timestr, 20, "%Y-%m-%d %H:%M:%S", ×truct); + if (rv == 0) { + return string(); + } + + return string(timestr); +} + + +// +// MinidumpObject +// + + +MinidumpObject::MinidumpObject(Minidump* minidump) + : DumpObject(), + minidump_(minidump) { +} + + +// +// MinidumpStream +// + + +MinidumpStream::MinidumpStream(Minidump* minidump) + : MinidumpObject(minidump) { +} + + +// +// MinidumpContext +// + + +MinidumpContext::MinidumpContext(Minidump* minidump) + : DumpContext(), + minidump_(minidump) { +} + +MinidumpContext::~MinidumpContext() { +} + +bool MinidumpContext::Read(uint32_t expected_size) { + valid_ = false; + + // Certain raw context types are currently assumed to have unique sizes. + if (!IsContextSizeUnique(sizeof(MDRawContextAMD64))) { + BPLOG(ERROR) << "sizeof(MDRawContextAMD64) cannot match the size of any " + << "other raw context"; + return false; + } + if (!IsContextSizeUnique(sizeof(MDRawContextPPC64))) { + BPLOG(ERROR) << "sizeof(MDRawContextPPC64) cannot match the size of any " + << "other raw context"; + return false; + } + if (!IsContextSizeUnique(sizeof(MDRawContextARM64))) { + BPLOG(ERROR) << "sizeof(MDRawContextARM64) cannot match the size of any " + << "other raw context"; + return false; + } + + FreeContext(); + + // First, figure out what type of CPU this context structure is for. + // For some reason, the AMD64 Context doesn't have context_flags + // at the beginning of the structure, so special case it here. + if (expected_size == sizeof(MDRawContextAMD64)) { + BPLOG(INFO) << "MinidumpContext: looks like AMD64 context"; + + scoped_ptr context_amd64(new MDRawContextAMD64()); + if (!minidump_->ReadBytes(context_amd64.get(), + sizeof(MDRawContextAMD64))) { + BPLOG(ERROR) << "MinidumpContext could not read amd64 context"; + return false; + } + + if (minidump_->swap()) + Swap(&context_amd64->context_flags); + + uint32_t cpu_type = context_amd64->context_flags & MD_CONTEXT_CPU_MASK; + if (cpu_type == 0) { + if (minidump_->GetContextCPUFlagsFromSystemInfo(&cpu_type)) { + context_amd64->context_flags |= cpu_type; + } else { + BPLOG(ERROR) << "Failed to preserve the current stream position"; + return false; + } + } + + if (cpu_type != MD_CONTEXT_AMD64) { + // TODO: Fall through to switch below. + // http://code.google.com/p/google-breakpad/issues/detail?id=550 + BPLOG(ERROR) << "MinidumpContext not actually amd64 context"; + return false; + } + + // Do this after reading the entire MDRawContext structure because + // GetSystemInfo may seek minidump to a new position. + if (!CheckAgainstSystemInfo(cpu_type)) { + BPLOG(ERROR) << "MinidumpContext amd64 does not match system info"; + return false; + } + + // Normalize the 128-bit types in the dump. + // Since this is AMD64, by definition, the values are little-endian. + for (unsigned int vr_index = 0; + vr_index < MD_CONTEXT_AMD64_VR_COUNT; + ++vr_index) + Normalize128(&context_amd64->vector_register[vr_index], false); + + if (minidump_->swap()) { + Swap(&context_amd64->p1_home); + Swap(&context_amd64->p2_home); + Swap(&context_amd64->p3_home); + Swap(&context_amd64->p4_home); + Swap(&context_amd64->p5_home); + Swap(&context_amd64->p6_home); + // context_flags is already swapped + Swap(&context_amd64->mx_csr); + Swap(&context_amd64->cs); + Swap(&context_amd64->ds); + Swap(&context_amd64->es); + Swap(&context_amd64->fs); + Swap(&context_amd64->ss); + Swap(&context_amd64->eflags); + Swap(&context_amd64->dr0); + Swap(&context_amd64->dr1); + Swap(&context_amd64->dr2); + Swap(&context_amd64->dr3); + Swap(&context_amd64->dr6); + Swap(&context_amd64->dr7); + Swap(&context_amd64->rax); + Swap(&context_amd64->rcx); + Swap(&context_amd64->rdx); + Swap(&context_amd64->rbx); + Swap(&context_amd64->rsp); + Swap(&context_amd64->rbp); + Swap(&context_amd64->rsi); + Swap(&context_amd64->rdi); + Swap(&context_amd64->r8); + Swap(&context_amd64->r9); + Swap(&context_amd64->r10); + Swap(&context_amd64->r11); + Swap(&context_amd64->r12); + Swap(&context_amd64->r13); + Swap(&context_amd64->r14); + Swap(&context_amd64->r15); + Swap(&context_amd64->rip); + // FIXME: I'm not sure what actually determines + // which member of the union {flt_save, sse_registers} + // is valid. We're not currently using either, + // but it would be good to have them swapped properly. + + for (unsigned int vr_index = 0; + vr_index < MD_CONTEXT_AMD64_VR_COUNT; + ++vr_index) + Swap(&context_amd64->vector_register[vr_index]); + Swap(&context_amd64->vector_control); + Swap(&context_amd64->debug_control); + Swap(&context_amd64->last_branch_to_rip); + Swap(&context_amd64->last_branch_from_rip); + Swap(&context_amd64->last_exception_to_rip); + Swap(&context_amd64->last_exception_from_rip); + } + + SetContextFlags(context_amd64->context_flags); + + SetContextAMD64(context_amd64.release()); + } else if (expected_size == sizeof(MDRawContextPPC64)) { + // |context_flags| of MDRawContextPPC64 is 64 bits, but other MDRawContext + // in the else case have 32 bits |context_flags|, so special case it here. + uint64_t context_flags; + if (!minidump_->ReadBytes(&context_flags, sizeof(context_flags))) { + BPLOG(ERROR) << "MinidumpContext could not read context flags"; + return false; + } + if (minidump_->swap()) + Swap(&context_flags); + + uint32_t cpu_type = context_flags & MD_CONTEXT_CPU_MASK; + scoped_ptr context_ppc64(new MDRawContextPPC64()); + + if (cpu_type == 0) { + if (minidump_->GetContextCPUFlagsFromSystemInfo(&cpu_type)) { + context_ppc64->context_flags |= cpu_type; + } else { + BPLOG(ERROR) << "Failed to preserve the current stream position"; + return false; + } + } + + if (cpu_type != MD_CONTEXT_PPC64) { + // TODO: Fall through to switch below. + // http://code.google.com/p/google-breakpad/issues/detail?id=550 + BPLOG(ERROR) << "MinidumpContext not actually ppc64 context"; + return false; + } + + // Set the context_flags member, which has already been read, and + // read the rest of the structure beginning with the first member + // after context_flags. + context_ppc64->context_flags = context_flags; + + size_t flags_size = sizeof(context_ppc64->context_flags); + uint8_t* context_after_flags = + reinterpret_cast(context_ppc64.get()) + flags_size; + if (!minidump_->ReadBytes(context_after_flags, + sizeof(MDRawContextPPC64) - flags_size)) { + BPLOG(ERROR) << "MinidumpContext could not read ppc64 context"; + return false; + } + + // Do this after reading the entire MDRawContext structure because + // GetSystemInfo may seek minidump to a new position. + if (!CheckAgainstSystemInfo(cpu_type)) { + BPLOG(ERROR) << "MinidumpContext ppc64 does not match system info"; + return false; + } + if (minidump_->swap()) { + // context_ppc64->context_flags was already swapped. + Swap(&context_ppc64->srr0); + Swap(&context_ppc64->srr1); + for (unsigned int gpr_index = 0; + gpr_index < MD_CONTEXT_PPC64_GPR_COUNT; + ++gpr_index) { + Swap(&context_ppc64->gpr[gpr_index]); + } + Swap(&context_ppc64->cr); + Swap(&context_ppc64->xer); + Swap(&context_ppc64->lr); + Swap(&context_ppc64->ctr); + Swap(&context_ppc64->vrsave); + for (unsigned int fpr_index = 0; + fpr_index < MD_FLOATINGSAVEAREA_PPC_FPR_COUNT; + ++fpr_index) { + Swap(&context_ppc64->float_save.fpregs[fpr_index]); + } + // Don't swap context_ppc64->float_save.fpscr_pad because it is only + // used for padding. + Swap(&context_ppc64->float_save.fpscr); + for (unsigned int vr_index = 0; + vr_index < MD_VECTORSAVEAREA_PPC_VR_COUNT; + ++vr_index) { + Normalize128(&context_ppc64->vector_save.save_vr[vr_index], true); + Swap(&context_ppc64->vector_save.save_vr[vr_index]); + } + Swap(&context_ppc64->vector_save.save_vscr); + // Don't swap the padding fields in vector_save. + Swap(&context_ppc64->vector_save.save_vrvalid); + } + + SetContextFlags(static_cast(context_ppc64->context_flags)); + + // Check for data loss when converting context flags from uint64_t into + // uint32_t + if (static_cast(GetContextFlags()) != + context_ppc64->context_flags) { + BPLOG(ERROR) << "Data loss detected when converting PPC64 context_flags"; + return false; + } + + SetContextPPC64(context_ppc64.release()); + } else if (expected_size == sizeof(MDRawContextARM64)) { + // |context_flags| of MDRawContextARM64 is 64 bits, but other MDRawContext + // in the else case have 32 bits |context_flags|, so special case it here. + uint64_t context_flags; + + BPLOG(INFO) << "MinidumpContext: looks like ARM64 context"; + + if (!minidump_->ReadBytes(&context_flags, sizeof(context_flags))) { + BPLOG(ERROR) << "MinidumpContext could not read context flags"; + return false; + } + if (minidump_->swap()) + Swap(&context_flags); + + scoped_ptr context_arm64(new MDRawContextARM64()); + + uint32_t cpu_type = context_flags & MD_CONTEXT_CPU_MASK; + if (cpu_type == 0) { + if (minidump_->GetContextCPUFlagsFromSystemInfo(&cpu_type)) { + context_arm64->context_flags |= cpu_type; + } else { + BPLOG(ERROR) << "Failed to preserve the current stream position"; + return false; + } + } + + if (cpu_type != MD_CONTEXT_ARM64) { + // TODO: Fall through to switch below. + // http://code.google.com/p/google-breakpad/issues/detail?id=550 + BPLOG(ERROR) << "MinidumpContext not actually arm64 context"; + return false; + } + + // Set the context_flags member, which has already been read, and + // read the rest of the structure beginning with the first member + // after context_flags. + context_arm64->context_flags = context_flags; + + size_t flags_size = sizeof(context_arm64->context_flags); + uint8_t* context_after_flags = + reinterpret_cast(context_arm64.get()) + flags_size; + if (!minidump_->ReadBytes(context_after_flags, + sizeof(MDRawContextARM64) - flags_size)) { + BPLOG(ERROR) << "MinidumpContext could not read arm64 context"; + return false; + } + + // Do this after reading the entire MDRawContext structure because + // GetSystemInfo may seek minidump to a new position. + if (!CheckAgainstSystemInfo(cpu_type)) { + BPLOG(ERROR) << "MinidumpContext arm64 does not match system info"; + return false; + } + + if (minidump_->swap()) { + // context_arm64->context_flags was already swapped. + for (unsigned int ireg_index = 0; + ireg_index < MD_CONTEXT_ARM64_GPR_COUNT; + ++ireg_index) { + Swap(&context_arm64->iregs[ireg_index]); + } + Swap(&context_arm64->cpsr); + Swap(&context_arm64->float_save.fpsr); + Swap(&context_arm64->float_save.fpcr); + for (unsigned int fpr_index = 0; + fpr_index < MD_FLOATINGSAVEAREA_ARM64_FPR_COUNT; + ++fpr_index) { + // While ARM64 is bi-endian, iOS (currently the only platform + // for which ARM64 support has been brought up) uses ARM64 exclusively + // in little-endian mode. + Normalize128(&context_arm64->float_save.regs[fpr_index], false); + Swap(&context_arm64->float_save.regs[fpr_index]); + } + } + SetContextFlags(static_cast(context_arm64->context_flags)); + + // Check for data loss when converting context flags from uint64_t into + // uint32_t + if (static_cast(GetContextFlags()) != + context_arm64->context_flags) { + BPLOG(ERROR) << "Data loss detected when converting ARM64 context_flags"; + return false; + } + + SetContextARM64(context_arm64.release()); + } else { + uint32_t context_flags; + if (!minidump_->ReadBytes(&context_flags, sizeof(context_flags))) { + BPLOG(ERROR) << "MinidumpContext could not read context flags"; + return false; + } + if (minidump_->swap()) + Swap(&context_flags); + + uint32_t cpu_type = context_flags & MD_CONTEXT_CPU_MASK; + if (cpu_type == 0) { + // Unfortunately the flag for MD_CONTEXT_ARM that was taken + // from a Windows CE SDK header conflicts in practice with + // the CONTEXT_XSTATE flag. MD_CONTEXT_ARM has been renumbered, + // but handle dumps with the legacy value gracefully here. + if (context_flags & MD_CONTEXT_ARM_OLD) { + context_flags |= MD_CONTEXT_ARM; + context_flags &= ~MD_CONTEXT_ARM_OLD; + cpu_type = MD_CONTEXT_ARM; + } + } + + if (cpu_type == 0) { + if (minidump_->GetContextCPUFlagsFromSystemInfo(&cpu_type)) { + context_flags |= cpu_type; + } else { + BPLOG(ERROR) << "Failed to preserve the current stream position"; + return false; + } + } + + // Allocate the context structure for the correct CPU and fill it. The + // casts are slightly unorthodox, but it seems better to do that than to + // maintain a separate pointer for each type of CPU context structure + // when only one of them will be used. + switch (cpu_type) { + case MD_CONTEXT_X86: { + if (expected_size != sizeof(MDRawContextX86)) { + BPLOG(ERROR) << "MinidumpContext x86 size mismatch, " << + expected_size << " != " << sizeof(MDRawContextX86); + return false; + } + + scoped_ptr context_x86(new MDRawContextX86()); + + // Set the context_flags member, which has already been read, and + // read the rest of the structure beginning with the first member + // after context_flags. + context_x86->context_flags = context_flags; + + size_t flags_size = sizeof(context_x86->context_flags); + uint8_t* context_after_flags = + reinterpret_cast(context_x86.get()) + flags_size; + if (!minidump_->ReadBytes(context_after_flags, + sizeof(MDRawContextX86) - flags_size)) { + BPLOG(ERROR) << "MinidumpContext could not read x86 context"; + return false; + } + + // Do this after reading the entire MDRawContext structure because + // GetSystemInfo may seek minidump to a new position. + if (!CheckAgainstSystemInfo(cpu_type)) { + BPLOG(ERROR) << "MinidumpContext x86 does not match system info"; + return false; + } + + if (minidump_->swap()) { + // context_x86->context_flags was already swapped. + Swap(&context_x86->dr0); + Swap(&context_x86->dr1); + Swap(&context_x86->dr2); + Swap(&context_x86->dr3); + Swap(&context_x86->dr6); + Swap(&context_x86->dr7); + Swap(&context_x86->float_save.control_word); + Swap(&context_x86->float_save.status_word); + Swap(&context_x86->float_save.tag_word); + Swap(&context_x86->float_save.error_offset); + Swap(&context_x86->float_save.error_selector); + Swap(&context_x86->float_save.data_offset); + Swap(&context_x86->float_save.data_selector); + // context_x86->float_save.register_area[] contains 8-bit quantities + // and does not need to be swapped. + Swap(&context_x86->float_save.cr0_npx_state); + Swap(&context_x86->gs); + Swap(&context_x86->fs); + Swap(&context_x86->es); + Swap(&context_x86->ds); + Swap(&context_x86->edi); + Swap(&context_x86->esi); + Swap(&context_x86->ebx); + Swap(&context_x86->edx); + Swap(&context_x86->ecx); + Swap(&context_x86->eax); + Swap(&context_x86->ebp); + Swap(&context_x86->eip); + Swap(&context_x86->cs); + Swap(&context_x86->eflags); + Swap(&context_x86->esp); + Swap(&context_x86->ss); + // context_x86->extended_registers[] contains 8-bit quantities and + // does not need to be swapped. + } + + SetContextX86(context_x86.release()); + + break; + } + + case MD_CONTEXT_PPC: { + if (expected_size != sizeof(MDRawContextPPC)) { + BPLOG(ERROR) << "MinidumpContext ppc size mismatch, " << + expected_size << " != " << sizeof(MDRawContextPPC); + return false; + } + + scoped_ptr context_ppc(new MDRawContextPPC()); + + // Set the context_flags member, which has already been read, and + // read the rest of the structure beginning with the first member + // after context_flags. + context_ppc->context_flags = context_flags; + + size_t flags_size = sizeof(context_ppc->context_flags); + uint8_t* context_after_flags = + reinterpret_cast(context_ppc.get()) + flags_size; + if (!minidump_->ReadBytes(context_after_flags, + sizeof(MDRawContextPPC) - flags_size)) { + BPLOG(ERROR) << "MinidumpContext could not read ppc context"; + return false; + } + + // Do this after reading the entire MDRawContext structure because + // GetSystemInfo may seek minidump to a new position. + if (!CheckAgainstSystemInfo(cpu_type)) { + BPLOG(ERROR) << "MinidumpContext ppc does not match system info"; + return false; + } + + // Normalize the 128-bit types in the dump. + // Since this is PowerPC, by definition, the values are big-endian. + for (unsigned int vr_index = 0; + vr_index < MD_VECTORSAVEAREA_PPC_VR_COUNT; + ++vr_index) { + Normalize128(&context_ppc->vector_save.save_vr[vr_index], true); + } + + if (minidump_->swap()) { + // context_ppc->context_flags was already swapped. + Swap(&context_ppc->srr0); + Swap(&context_ppc->srr1); + for (unsigned int gpr_index = 0; + gpr_index < MD_CONTEXT_PPC_GPR_COUNT; + ++gpr_index) { + Swap(&context_ppc->gpr[gpr_index]); + } + Swap(&context_ppc->cr); + Swap(&context_ppc->xer); + Swap(&context_ppc->lr); + Swap(&context_ppc->ctr); + Swap(&context_ppc->mq); + Swap(&context_ppc->vrsave); + for (unsigned int fpr_index = 0; + fpr_index < MD_FLOATINGSAVEAREA_PPC_FPR_COUNT; + ++fpr_index) { + Swap(&context_ppc->float_save.fpregs[fpr_index]); + } + // Don't swap context_ppc->float_save.fpscr_pad because it is only + // used for padding. + Swap(&context_ppc->float_save.fpscr); + for (unsigned int vr_index = 0; + vr_index < MD_VECTORSAVEAREA_PPC_VR_COUNT; + ++vr_index) { + Swap(&context_ppc->vector_save.save_vr[vr_index]); + } + Swap(&context_ppc->vector_save.save_vscr); + // Don't swap the padding fields in vector_save. + Swap(&context_ppc->vector_save.save_vrvalid); + } + + SetContextPPC(context_ppc.release()); + + break; + } + + case MD_CONTEXT_SPARC: { + if (expected_size != sizeof(MDRawContextSPARC)) { + BPLOG(ERROR) << "MinidumpContext sparc size mismatch, " << + expected_size << " != " << sizeof(MDRawContextSPARC); + return false; + } + + scoped_ptr context_sparc(new MDRawContextSPARC()); + + // Set the context_flags member, which has already been read, and + // read the rest of the structure beginning with the first member + // after context_flags. + context_sparc->context_flags = context_flags; + + size_t flags_size = sizeof(context_sparc->context_flags); + uint8_t* context_after_flags = + reinterpret_cast(context_sparc.get()) + flags_size; + if (!minidump_->ReadBytes(context_after_flags, + sizeof(MDRawContextSPARC) - flags_size)) { + BPLOG(ERROR) << "MinidumpContext could not read sparc context"; + return false; + } + + // Do this after reading the entire MDRawContext structure because + // GetSystemInfo may seek minidump to a new position. + if (!CheckAgainstSystemInfo(cpu_type)) { + BPLOG(ERROR) << "MinidumpContext sparc does not match system info"; + return false; + } + + if (minidump_->swap()) { + // context_sparc->context_flags was already swapped. + for (unsigned int gpr_index = 0; + gpr_index < MD_CONTEXT_SPARC_GPR_COUNT; + ++gpr_index) { + Swap(&context_sparc->g_r[gpr_index]); + } + Swap(&context_sparc->ccr); + Swap(&context_sparc->pc); + Swap(&context_sparc->npc); + Swap(&context_sparc->y); + Swap(&context_sparc->asi); + Swap(&context_sparc->fprs); + for (unsigned int fpr_index = 0; + fpr_index < MD_FLOATINGSAVEAREA_SPARC_FPR_COUNT; + ++fpr_index) { + Swap(&context_sparc->float_save.regs[fpr_index]); + } + Swap(&context_sparc->float_save.filler); + Swap(&context_sparc->float_save.fsr); + } + SetContextSPARC(context_sparc.release()); + + break; + } + + case MD_CONTEXT_ARM: { + if (expected_size != sizeof(MDRawContextARM)) { + BPLOG(ERROR) << "MinidumpContext arm size mismatch, " << + expected_size << " != " << sizeof(MDRawContextARM); + return false; + } + + scoped_ptr context_arm(new MDRawContextARM()); + + // Set the context_flags member, which has already been read, and + // read the rest of the structure beginning with the first member + // after context_flags. + context_arm->context_flags = context_flags; + + size_t flags_size = sizeof(context_arm->context_flags); + uint8_t* context_after_flags = + reinterpret_cast(context_arm.get()) + flags_size; + if (!minidump_->ReadBytes(context_after_flags, + sizeof(MDRawContextARM) - flags_size)) { + BPLOG(ERROR) << "MinidumpContext could not read arm context"; + return false; + } + + // Do this after reading the entire MDRawContext structure because + // GetSystemInfo may seek minidump to a new position. + if (!CheckAgainstSystemInfo(cpu_type)) { + BPLOG(ERROR) << "MinidumpContext arm does not match system info"; + return false; + } + + if (minidump_->swap()) { + // context_arm->context_flags was already swapped. + for (unsigned int ireg_index = 0; + ireg_index < MD_CONTEXT_ARM_GPR_COUNT; + ++ireg_index) { + Swap(&context_arm->iregs[ireg_index]); + } + Swap(&context_arm->cpsr); + Swap(&context_arm->float_save.fpscr); + for (unsigned int fpr_index = 0; + fpr_index < MD_FLOATINGSAVEAREA_ARM_FPR_COUNT; + ++fpr_index) { + Swap(&context_arm->float_save.regs[fpr_index]); + } + for (unsigned int fpe_index = 0; + fpe_index < MD_FLOATINGSAVEAREA_ARM_FPEXTRA_COUNT; + ++fpe_index) { + Swap(&context_arm->float_save.extra[fpe_index]); + } + } + SetContextARM(context_arm.release()); + + break; + } + + case MD_CONTEXT_MIPS: { + if (expected_size != sizeof(MDRawContextMIPS)) { + BPLOG(ERROR) << "MinidumpContext MIPS size mismatch, " + << expected_size + << " != " + << sizeof(MDRawContextMIPS); + return false; + } + + scoped_ptr context_mips(new MDRawContextMIPS()); + + // Set the context_flags member, which has already been read, and + // read the rest of the structure beginning with the first member + // after context_flags. + context_mips->context_flags = context_flags; + + size_t flags_size = sizeof(context_mips->context_flags); + uint8_t* context_after_flags = + reinterpret_cast(context_mips.get()) + flags_size; + if (!minidump_->ReadBytes(context_after_flags, + sizeof(MDRawContextMIPS) - flags_size)) { + BPLOG(ERROR) << "MinidumpContext could not read MIPS context"; + return false; + } + + // Do this after reading the entire MDRawContext structure because + // GetSystemInfo may seek minidump to a new position. + if (!CheckAgainstSystemInfo(cpu_type)) { + BPLOG(ERROR) << "MinidumpContext MIPS does not match system info"; + return false; + } + + if (minidump_->swap()) { + // context_mips->context_flags was already swapped. + for (int ireg_index = 0; + ireg_index < MD_CONTEXT_MIPS_GPR_COUNT; + ++ireg_index) { + Swap(&context_mips->iregs[ireg_index]); + } + Swap(&context_mips->mdhi); + Swap(&context_mips->mdlo); + for (int dsp_index = 0; + dsp_index < MD_CONTEXT_MIPS_DSP_COUNT; + ++dsp_index) { + Swap(&context_mips->hi[dsp_index]); + Swap(&context_mips->lo[dsp_index]); + } + Swap(&context_mips->dsp_control); + Swap(&context_mips->epc); + Swap(&context_mips->badvaddr); + Swap(&context_mips->status); + Swap(&context_mips->cause); + for (int fpr_index = 0; + fpr_index < MD_FLOATINGSAVEAREA_MIPS_FPR_COUNT; + ++fpr_index) { + Swap(&context_mips->float_save.regs[fpr_index]); + } + Swap(&context_mips->float_save.fpcsr); + Swap(&context_mips->float_save.fir); + } + SetContextMIPS(context_mips.release()); + + break; + } + + default: { + // Unknown context type - Don't log as an error yet. Let the + // caller work that out. + BPLOG(INFO) << "MinidumpContext unknown context type " << + HexString(cpu_type); + return false; + break; + } + } + SetContextFlags(context_flags); + } + + valid_ = true; + return true; +} + +bool MinidumpContext::CheckAgainstSystemInfo(uint32_t context_cpu_type) { + // It's OK if the minidump doesn't contain an MD_SYSTEM_INFO_STREAM, + // as this function just implements a sanity check. + MinidumpSystemInfo* system_info = minidump_->GetSystemInfo(); + if (!system_info) { + BPLOG(INFO) << "MinidumpContext could not be compared against " + "MinidumpSystemInfo"; + return true; + } + + // If there is an MD_SYSTEM_INFO_STREAM, it should contain valid system info. + const MDRawSystemInfo* raw_system_info = system_info->system_info(); + if (!raw_system_info) { + BPLOG(INFO) << "MinidumpContext could not be compared against " + "MDRawSystemInfo"; + return false; + } + + MDCPUArchitecture system_info_cpu_type = static_cast( + raw_system_info->processor_architecture); + + // Compare the CPU type of the context record to the CPU type in the + // minidump's system info stream. + bool return_value = false; + switch (context_cpu_type) { + case MD_CONTEXT_X86: + if (system_info_cpu_type == MD_CPU_ARCHITECTURE_X86 || + system_info_cpu_type == MD_CPU_ARCHITECTURE_X86_WIN64 || + system_info_cpu_type == MD_CPU_ARCHITECTURE_AMD64) { + return_value = true; + } + break; + + case MD_CONTEXT_PPC: + if (system_info_cpu_type == MD_CPU_ARCHITECTURE_PPC) + return_value = true; + break; + + case MD_CONTEXT_PPC64: + if (system_info_cpu_type == MD_CPU_ARCHITECTURE_PPC64) + return_value = true; + break; + + case MD_CONTEXT_AMD64: + if (system_info_cpu_type == MD_CPU_ARCHITECTURE_AMD64) + return_value = true; + break; + + case MD_CONTEXT_SPARC: + if (system_info_cpu_type == MD_CPU_ARCHITECTURE_SPARC) + return_value = true; + break; + + case MD_CONTEXT_ARM: + if (system_info_cpu_type == MD_CPU_ARCHITECTURE_ARM) + return_value = true; + break; + + case MD_CONTEXT_ARM64: + if (system_info_cpu_type == MD_CPU_ARCHITECTURE_ARM64) + return_value = true; + break; + + case MD_CONTEXT_MIPS: + if (system_info_cpu_type == MD_CPU_ARCHITECTURE_MIPS) + return_value = true; + break; + } + + BPLOG_IF(ERROR, !return_value) << "MinidumpContext CPU " << + HexString(context_cpu_type) << + " wrong for MinidumpSystemInfo CPU " << + HexString(system_info_cpu_type); + + return return_value; +} + + +// +// MinidumpMemoryRegion +// + + +uint32_t MinidumpMemoryRegion::max_bytes_ = 1024 * 1024; // 1MB + + +MinidumpMemoryRegion::MinidumpMemoryRegion(Minidump* minidump) + : MinidumpObject(minidump), + descriptor_(NULL), + memory_(NULL) { +} + + +MinidumpMemoryRegion::~MinidumpMemoryRegion() { + delete memory_; +} + + +void MinidumpMemoryRegion::SetDescriptor(MDMemoryDescriptor* descriptor) { + descriptor_ = descriptor; + valid_ = descriptor && + descriptor_->memory.data_size <= + numeric_limits::max() - + descriptor_->start_of_memory_range; +} + + +const uint8_t* MinidumpMemoryRegion::GetMemory() const { + if (!valid_) { + BPLOG(ERROR) << "Invalid MinidumpMemoryRegion for GetMemory"; + return NULL; + } + + if (!memory_) { + if (descriptor_->memory.data_size == 0) { + BPLOG(ERROR) << "MinidumpMemoryRegion is empty"; + return NULL; + } + + if (!minidump_->SeekSet(descriptor_->memory.rva)) { + BPLOG(ERROR) << "MinidumpMemoryRegion could not seek to memory region"; + return NULL; + } + + if (descriptor_->memory.data_size > max_bytes_) { + BPLOG(ERROR) << "MinidumpMemoryRegion size " << + descriptor_->memory.data_size << " exceeds maximum " << + max_bytes_; + return NULL; + } + + scoped_ptr< vector > memory( + new vector(descriptor_->memory.data_size)); + + if (!minidump_->ReadBytes(&(*memory)[0], descriptor_->memory.data_size)) { + BPLOG(ERROR) << "MinidumpMemoryRegion could not read memory region"; + return NULL; + } + + memory_ = memory.release(); + } + + return &(*memory_)[0]; +} + + +uint64_t MinidumpMemoryRegion::GetBase() const { + if (!valid_) { + BPLOG(ERROR) << "Invalid MinidumpMemoryRegion for GetBase"; + return static_cast(-1); + } + + return descriptor_->start_of_memory_range; +} + + +uint32_t MinidumpMemoryRegion::GetSize() const { + if (!valid_) { + BPLOG(ERROR) << "Invalid MinidumpMemoryRegion for GetSize"; + return 0; + } + + return descriptor_->memory.data_size; +} + + +void MinidumpMemoryRegion::FreeMemory() { + delete memory_; + memory_ = NULL; +} + + +template +bool MinidumpMemoryRegion::GetMemoryAtAddressInternal(uint64_t address, + T* value) const { + BPLOG_IF(ERROR, !value) << "MinidumpMemoryRegion::GetMemoryAtAddressInternal " + "requires |value|"; + assert(value); + *value = 0; + + if (!valid_) { + BPLOG(ERROR) << "Invalid MinidumpMemoryRegion for " + "GetMemoryAtAddressInternal"; + return false; + } + + // Common failure case + if (address < descriptor_->start_of_memory_range || + sizeof(T) > numeric_limits::max() - address || + address + sizeof(T) > descriptor_->start_of_memory_range + + descriptor_->memory.data_size) { + BPLOG(INFO) << "MinidumpMemoryRegion request out of range: " << + HexString(address) << "+" << sizeof(T) << "/" << + HexString(descriptor_->start_of_memory_range) << "+" << + HexString(descriptor_->memory.data_size); + return false; + } + + const uint8_t* memory = GetMemory(); + if (!memory) { + // GetMemory already logged a perfectly good message. + return false; + } + + // If the CPU requires memory accesses to be aligned, this can crash. + // x86 and ppc are able to cope, though. + *value = *reinterpret_cast( + &memory[address - descriptor_->start_of_memory_range]); + + if (minidump_->swap()) + Swap(value); + + return true; +} + + +bool MinidumpMemoryRegion::GetMemoryAtAddress(uint64_t address, + uint8_t* value) const { + return GetMemoryAtAddressInternal(address, value); +} + + +bool MinidumpMemoryRegion::GetMemoryAtAddress(uint64_t address, + uint16_t* value) const { + return GetMemoryAtAddressInternal(address, value); +} + + +bool MinidumpMemoryRegion::GetMemoryAtAddress(uint64_t address, + uint32_t* value) const { + return GetMemoryAtAddressInternal(address, value); +} + + +bool MinidumpMemoryRegion::GetMemoryAtAddress(uint64_t address, + uint64_t* value) const { + return GetMemoryAtAddressInternal(address, value); +} + + +void MinidumpMemoryRegion::Print() const { + if (!valid_) { + BPLOG(ERROR) << "MinidumpMemoryRegion cannot print invalid data"; + return; + } + + const uint8_t* memory = GetMemory(); + if (memory) { + printf("0x"); + for (unsigned int byte_index = 0; + byte_index < descriptor_->memory.data_size; + byte_index++) { + printf("%02x", memory[byte_index]); + } + printf("\n"); + } else { + printf("No memory\n"); + } +} + + +// +// MinidumpThread +// + + +MinidumpThread::MinidumpThread(Minidump* minidump) + : MinidumpObject(minidump), + thread_(), + memory_(NULL), + context_(NULL) { +} + + +MinidumpThread::~MinidumpThread() { + delete memory_; + delete context_; +} + + +bool MinidumpThread::Read() { + // Invalidate cached data. + delete memory_; + memory_ = NULL; + delete context_; + context_ = NULL; + + valid_ = false; + + if (!minidump_->ReadBytes(&thread_, sizeof(thread_))) { + BPLOG(ERROR) << "MinidumpThread cannot read thread"; + return false; + } + + if (minidump_->swap()) { + Swap(&thread_.thread_id); + Swap(&thread_.suspend_count); + Swap(&thread_.priority_class); + Swap(&thread_.priority); + Swap(&thread_.teb); + Swap(&thread_.stack); + Swap(&thread_.thread_context); + } + + // Check for base + size overflow or undersize. + if (thread_.stack.memory.rva == 0 || + thread_.stack.memory.data_size == 0 || + thread_.stack.memory.data_size > numeric_limits::max() - + thread_.stack.start_of_memory_range) { + // This is ok, but log an error anyway. + BPLOG(ERROR) << "MinidumpThread has a memory region problem, " << + HexString(thread_.stack.start_of_memory_range) << "+" << + HexString(thread_.stack.memory.data_size) << + ", RVA 0x" << HexString(thread_.stack.memory.rva); + } else { + memory_ = new MinidumpMemoryRegion(minidump_); + memory_->SetDescriptor(&thread_.stack); + } + + valid_ = true; + return true; +} + +uint64_t MinidumpThread::GetStartOfStackMemoryRange() const { + if (!valid_) { + BPLOG(ERROR) << "GetStartOfStackMemoryRange: Invalid MinidumpThread"; + return 0; + } + + return thread_.stack.start_of_memory_range; +} + +MinidumpMemoryRegion* MinidumpThread::GetMemory() { + if (!valid_) { + BPLOG(ERROR) << "Invalid MinidumpThread for GetMemory"; + return NULL; + } + + return memory_; +} + + +MinidumpContext* MinidumpThread::GetContext() { + if (!valid_) { + BPLOG(ERROR) << "Invalid MinidumpThread for GetContext"; + return NULL; + } + + if (!context_) { + if (!minidump_->SeekSet(thread_.thread_context.rva)) { + BPLOG(ERROR) << "MinidumpThread cannot seek to context"; + return NULL; + } + + scoped_ptr context(new MinidumpContext(minidump_)); + + if (!context->Read(thread_.thread_context.data_size)) { + BPLOG(ERROR) << "MinidumpThread cannot read context"; + return NULL; + } + + context_ = context.release(); + } + + return context_; +} + + +bool MinidumpThread::GetThreadID(uint32_t *thread_id) const { + BPLOG_IF(ERROR, !thread_id) << "MinidumpThread::GetThreadID requires " + "|thread_id|"; + assert(thread_id); + *thread_id = 0; + + if (!valid_) { + BPLOG(ERROR) << "Invalid MinidumpThread for GetThreadID"; + return false; + } + + *thread_id = thread_.thread_id; + return true; +} + + +void MinidumpThread::Print() { + if (!valid_) { + BPLOG(ERROR) << "MinidumpThread cannot print invalid data"; + return; + } + + printf("MDRawThread\n"); + printf(" thread_id = 0x%x\n", thread_.thread_id); + printf(" suspend_count = %d\n", thread_.suspend_count); + printf(" priority_class = 0x%x\n", thread_.priority_class); + printf(" priority = 0x%x\n", thread_.priority); + printf(" teb = 0x%" PRIx64 "\n", thread_.teb); + printf(" stack.start_of_memory_range = 0x%" PRIx64 "\n", + thread_.stack.start_of_memory_range); + printf(" stack.memory.data_size = 0x%x\n", + thread_.stack.memory.data_size); + printf(" stack.memory.rva = 0x%x\n", thread_.stack.memory.rva); + printf(" thread_context.data_size = 0x%x\n", + thread_.thread_context.data_size); + printf(" thread_context.rva = 0x%x\n", + thread_.thread_context.rva); + + MinidumpContext* context = GetContext(); + if (context) { + printf("\n"); + context->Print(); + } else { + printf(" (no context)\n"); + printf("\n"); + } + + MinidumpMemoryRegion* memory = GetMemory(); + if (memory) { + printf("Stack\n"); + memory->Print(); + } else { + printf("No stack\n"); + } + printf("\n"); +} + + +// +// MinidumpThreadList +// + + +uint32_t MinidumpThreadList::max_threads_ = 4096; + + +MinidumpThreadList::MinidumpThreadList(Minidump* minidump) + : MinidumpStream(minidump), + id_to_thread_map_(), + threads_(NULL), + thread_count_(0) { +} + + +MinidumpThreadList::~MinidumpThreadList() { + delete threads_; +} + + +bool MinidumpThreadList::Read(uint32_t expected_size) { + // Invalidate cached data. + id_to_thread_map_.clear(); + delete threads_; + threads_ = NULL; + thread_count_ = 0; + + valid_ = false; + + uint32_t thread_count; + if (expected_size < sizeof(thread_count)) { + BPLOG(ERROR) << "MinidumpThreadList count size mismatch, " << + expected_size << " < " << sizeof(thread_count); + return false; + } + if (!minidump_->ReadBytes(&thread_count, sizeof(thread_count))) { + BPLOG(ERROR) << "MinidumpThreadList cannot read thread count"; + return false; + } + + if (minidump_->swap()) + Swap(&thread_count); + + if (thread_count > numeric_limits::max() / sizeof(MDRawThread)) { + BPLOG(ERROR) << "MinidumpThreadList thread count " << thread_count << + " would cause multiplication overflow"; + return false; + } + + if (expected_size != sizeof(thread_count) + + thread_count * sizeof(MDRawThread)) { + // may be padded with 4 bytes on 64bit ABIs for alignment + if (expected_size == sizeof(thread_count) + 4 + + thread_count * sizeof(MDRawThread)) { + uint32_t useless; + if (!minidump_->ReadBytes(&useless, 4)) { + BPLOG(ERROR) << "MinidumpThreadList cannot read threadlist padded " + "bytes"; + return false; + } + } else { + BPLOG(ERROR) << "MinidumpThreadList size mismatch, " << expected_size << + " != " << sizeof(thread_count) + + thread_count * sizeof(MDRawThread); + return false; + } + } + + + if (thread_count > max_threads_) { + BPLOG(ERROR) << "MinidumpThreadList count " << thread_count << + " exceeds maximum " << max_threads_; + return false; + } + + if (thread_count != 0) { + scoped_ptr threads( + new MinidumpThreads(thread_count, MinidumpThread(minidump_))); + + for (unsigned int thread_index = 0; + thread_index < thread_count; + ++thread_index) { + MinidumpThread* thread = &(*threads)[thread_index]; + + // Assume that the file offset is correct after the last read. + if (!thread->Read()) { + BPLOG(ERROR) << "MinidumpThreadList cannot read thread " << + thread_index << "/" << thread_count; + return false; + } + + uint32_t thread_id; + if (!thread->GetThreadID(&thread_id)) { + BPLOG(ERROR) << "MinidumpThreadList cannot get thread ID for thread " << + thread_index << "/" << thread_count; + return false; + } + + if (GetThreadByID(thread_id)) { + // Another thread with this ID is already in the list. Data error. + BPLOG(ERROR) << "MinidumpThreadList found multiple threads with ID " << + HexString(thread_id) << " at thread " << + thread_index << "/" << thread_count; + return false; + } + id_to_thread_map_[thread_id] = thread; + } + + threads_ = threads.release(); + } + + thread_count_ = thread_count; + + valid_ = true; + return true; +} + + +MinidumpThread* MinidumpThreadList::GetThreadAtIndex(unsigned int index) + const { + if (!valid_) { + BPLOG(ERROR) << "Invalid MinidumpThreadList for GetThreadAtIndex"; + return NULL; + } + + if (index >= thread_count_) { + BPLOG(ERROR) << "MinidumpThreadList index out of range: " << + index << "/" << thread_count_; + return NULL; + } + + return &(*threads_)[index]; +} + + +MinidumpThread* MinidumpThreadList::GetThreadByID(uint32_t thread_id) { + // Don't check valid_. Read calls this method before everything is + // validated. It is safe to not check valid_ here. + return id_to_thread_map_[thread_id]; +} + + +void MinidumpThreadList::Print() { + if (!valid_) { + BPLOG(ERROR) << "MinidumpThreadList cannot print invalid data"; + return; + } + + printf("MinidumpThreadList\n"); + printf(" thread_count = %d\n", thread_count_); + printf("\n"); + + for (unsigned int thread_index = 0; + thread_index < thread_count_; + ++thread_index) { + printf("thread[%d]\n", thread_index); + + (*threads_)[thread_index].Print(); + } +} + + +// +// MinidumpModule +// + + +uint32_t MinidumpModule::max_cv_bytes_ = 32768; +uint32_t MinidumpModule::max_misc_bytes_ = 32768; + + +MinidumpModule::MinidumpModule(Minidump* minidump) + : MinidumpObject(minidump), + module_valid_(false), + has_debug_info_(false), + module_(), + name_(NULL), + cv_record_(NULL), + cv_record_signature_(MD_CVINFOUNKNOWN_SIGNATURE), + misc_record_(NULL) { +} + + +MinidumpModule::~MinidumpModule() { + delete name_; + delete cv_record_; + delete misc_record_; +} + + +bool MinidumpModule::Read() { + // Invalidate cached data. + delete name_; + name_ = NULL; + delete cv_record_; + cv_record_ = NULL; + cv_record_signature_ = MD_CVINFOUNKNOWN_SIGNATURE; + delete misc_record_; + misc_record_ = NULL; + + module_valid_ = false; + has_debug_info_ = false; + valid_ = false; + + if (!minidump_->ReadBytes(&module_, MD_MODULE_SIZE)) { + BPLOG(ERROR) << "MinidumpModule cannot read module"; + return false; + } + + if (minidump_->swap()) { + Swap(&module_.base_of_image); + Swap(&module_.size_of_image); + Swap(&module_.checksum); + Swap(&module_.time_date_stamp); + Swap(&module_.module_name_rva); + Swap(&module_.version_info.signature); + Swap(&module_.version_info.struct_version); + Swap(&module_.version_info.file_version_hi); + Swap(&module_.version_info.file_version_lo); + Swap(&module_.version_info.product_version_hi); + Swap(&module_.version_info.product_version_lo); + Swap(&module_.version_info.file_flags_mask); + Swap(&module_.version_info.file_flags); + Swap(&module_.version_info.file_os); + Swap(&module_.version_info.file_type); + Swap(&module_.version_info.file_subtype); + Swap(&module_.version_info.file_date_hi); + Swap(&module_.version_info.file_date_lo); + Swap(&module_.cv_record); + Swap(&module_.misc_record); + // Don't swap reserved fields because their contents are unknown (as + // are their proper widths). + } + + // Check for base + size overflow or undersize. + if (module_.size_of_image == 0 || + module_.size_of_image > + numeric_limits::max() - module_.base_of_image) { + BPLOG(ERROR) << "MinidumpModule has a module problem, " << + HexString(module_.base_of_image) << "+" << + HexString(module_.size_of_image); + return false; + } + + module_valid_ = true; + return true; +} + + +bool MinidumpModule::ReadAuxiliaryData() { + if (!module_valid_) { + BPLOG(ERROR) << "Invalid MinidumpModule for ReadAuxiliaryData"; + return false; + } + + // Each module must have a name. + name_ = minidump_->ReadString(module_.module_name_rva); + if (!name_) { + BPLOG(ERROR) << "MinidumpModule could not read name"; + return false; + } + + // At this point, we have enough info for the module to be valid. + valid_ = true; + + // CodeView and miscellaneous debug records are only required if the + // module indicates that they exist. + if (module_.cv_record.data_size && !GetCVRecord(NULL)) { + BPLOG(ERROR) << "MinidumpModule has no CodeView record, " + "but one was expected"; + return false; + } + + if (module_.misc_record.data_size && !GetMiscRecord(NULL)) { + BPLOG(ERROR) << "MinidumpModule has no miscellaneous debug record, " + "but one was expected"; + return false; + } + + has_debug_info_ = true; + return true; +} + + +string MinidumpModule::code_file() const { + if (!valid_) { + BPLOG(ERROR) << "Invalid MinidumpModule for code_file"; + return ""; + } + + return *name_; +} + + +string MinidumpModule::code_identifier() const { + if (!valid_) { + BPLOG(ERROR) << "Invalid MinidumpModule for code_identifier"; + return ""; + } + + if (!has_debug_info_) + return ""; + + MinidumpSystemInfo *minidump_system_info = minidump_->GetSystemInfo(); + if (!minidump_system_info) { + BPLOG(ERROR) << "MinidumpModule code_identifier requires " + "MinidumpSystemInfo"; + return ""; + } + + const MDRawSystemInfo *raw_system_info = minidump_system_info->system_info(); + if (!raw_system_info) { + BPLOG(ERROR) << "MinidumpModule code_identifier requires MDRawSystemInfo"; + return ""; + } + + string identifier; + + switch (raw_system_info->platform_id) { + case MD_OS_WIN32_NT: + case MD_OS_WIN32_WINDOWS: { + // Use the same format that the MS symbol server uses in filesystem + // hierarchies. + char identifier_string[17]; + snprintf(identifier_string, sizeof(identifier_string), "%08X%x", + module_.time_date_stamp, module_.size_of_image); + identifier = identifier_string; + break; + } + + case MD_OS_MAC_OS_X: + case MD_OS_IOS: + case MD_OS_SOLARIS: + case MD_OS_ANDROID: + case MD_OS_LINUX: + case MD_OS_NACL: + case MD_OS_PS3: { + // TODO(mmentovai): support uuid extension if present, otherwise fall + // back to version (from LC_ID_DYLIB?), otherwise fall back to something + // else. + identifier = "id"; + break; + } + + default: { + // Without knowing what OS generated the dump, we can't generate a good + // identifier. Return an empty string, signalling failure. + BPLOG(ERROR) << "MinidumpModule code_identifier requires known platform, " + "found " << HexString(raw_system_info->platform_id); + break; + } + } + + return identifier; +} + + +string MinidumpModule::debug_file() const { + if (!valid_) { + BPLOG(ERROR) << "Invalid MinidumpModule for debug_file"; + return ""; + } + + if (!has_debug_info_) + return ""; + + string file; + // Prefer the CodeView record if present. + if (cv_record_) { + if (cv_record_signature_ == MD_CVINFOPDB70_SIGNATURE) { + // It's actually an MDCVInfoPDB70 structure. + const MDCVInfoPDB70* cv_record_70 = + reinterpret_cast(&(*cv_record_)[0]); + assert(cv_record_70->cv_signature == MD_CVINFOPDB70_SIGNATURE); + + // GetCVRecord guarantees pdb_file_name is null-terminated. + file = reinterpret_cast(cv_record_70->pdb_file_name); + } else if (cv_record_signature_ == MD_CVINFOPDB20_SIGNATURE) { + // It's actually an MDCVInfoPDB20 structure. + const MDCVInfoPDB20* cv_record_20 = + reinterpret_cast(&(*cv_record_)[0]); + assert(cv_record_20->cv_header.signature == MD_CVINFOPDB20_SIGNATURE); + + // GetCVRecord guarantees pdb_file_name is null-terminated. + file = reinterpret_cast(cv_record_20->pdb_file_name); + } + + // If there's a CodeView record but it doesn't match a known signature, + // try the miscellaneous record. + } + + if (file.empty()) { + // No usable CodeView record. Try the miscellaneous debug record. + if (misc_record_) { + const MDImageDebugMisc* misc_record = + reinterpret_cast(&(*misc_record_)[0]); + if (!misc_record->unicode) { + // If it's not Unicode, just stuff it into the string. It's unclear + // if misc_record->data is 0-terminated, so use an explicit size. + file = string( + reinterpret_cast(misc_record->data), + module_.misc_record.data_size - MDImageDebugMisc_minsize); + } else { + // There's a misc_record but it encodes the debug filename in UTF-16. + // (Actually, because miscellaneous records are so old, it's probably + // UCS-2.) Convert it to UTF-8 for congruity with the other strings + // that this method (and all other methods in the Minidump family) + // return. + + unsigned int bytes = + module_.misc_record.data_size - MDImageDebugMisc_minsize; + if (bytes % 2 == 0) { + unsigned int utf16_words = bytes / 2; + + // UTF16ToUTF8 expects a vector, so create a temporary one + // and copy the UTF-16 data into it. + vector string_utf16(utf16_words); + if (utf16_words) + memcpy(&string_utf16[0], &misc_record->data, bytes); + + // GetMiscRecord already byte-swapped the data[] field if it contains + // UTF-16, so pass false as the swap argument. + scoped_ptr new_file(UTF16ToUTF8(string_utf16, false)); + file = *new_file; + } + } + } + } + + // Relatively common case + BPLOG_IF(INFO, file.empty()) << "MinidumpModule could not determine " + "debug_file for " << *name_; + + return file; +} + + +string MinidumpModule::debug_identifier() const { + if (!valid_) { + BPLOG(ERROR) << "Invalid MinidumpModule for debug_identifier"; + return ""; + } + + if (!has_debug_info_) + return ""; + + string identifier; + + // Use the CodeView record if present. + if (cv_record_) { + if (cv_record_signature_ == MD_CVINFOPDB70_SIGNATURE) { + // It's actually an MDCVInfoPDB70 structure. + const MDCVInfoPDB70* cv_record_70 = + reinterpret_cast(&(*cv_record_)[0]); + assert(cv_record_70->cv_signature == MD_CVINFOPDB70_SIGNATURE); + + // Use the same format that the MS symbol server uses in filesystem + // hierarchies. + char identifier_string[41]; + snprintf(identifier_string, sizeof(identifier_string), + "%08X%04X%04X%02X%02X%02X%02X%02X%02X%02X%02X%x", + cv_record_70->signature.data1, + cv_record_70->signature.data2, + cv_record_70->signature.data3, + cv_record_70->signature.data4[0], + cv_record_70->signature.data4[1], + cv_record_70->signature.data4[2], + cv_record_70->signature.data4[3], + cv_record_70->signature.data4[4], + cv_record_70->signature.data4[5], + cv_record_70->signature.data4[6], + cv_record_70->signature.data4[7], + cv_record_70->age); + identifier = identifier_string; + } else if (cv_record_signature_ == MD_CVINFOPDB20_SIGNATURE) { + // It's actually an MDCVInfoPDB20 structure. + const MDCVInfoPDB20* cv_record_20 = + reinterpret_cast(&(*cv_record_)[0]); + assert(cv_record_20->cv_header.signature == MD_CVINFOPDB20_SIGNATURE); + + // Use the same format that the MS symbol server uses in filesystem + // hierarchies. + char identifier_string[17]; + snprintf(identifier_string, sizeof(identifier_string), + "%08X%x", cv_record_20->signature, cv_record_20->age); + identifier = identifier_string; + } + } + + // TODO(mmentovai): if there's no usable CodeView record, there might be a + // miscellaneous debug record. It only carries a filename, though, and no + // identifier. I'm not sure what the right thing to do for the identifier + // is in that case, but I don't expect to find many modules without a + // CodeView record (or some other Breakpad extension structure in place of + // a CodeView record). Treat it as an error (empty identifier) for now. + + // TODO(mmentovai): on the Mac, provide fallbacks as in code_identifier(). + + // Relatively common case + BPLOG_IF(INFO, identifier.empty()) << "MinidumpModule could not determine " + "debug_identifier for " << *name_; + + return identifier; +} + + +string MinidumpModule::version() const { + if (!valid_) { + BPLOG(ERROR) << "Invalid MinidumpModule for version"; + return ""; + } + + string version; + + if (module_.version_info.signature == MD_VSFIXEDFILEINFO_SIGNATURE && + module_.version_info.struct_version & MD_VSFIXEDFILEINFO_VERSION) { + char version_string[24]; + snprintf(version_string, sizeof(version_string), "%u.%u.%u.%u", + module_.version_info.file_version_hi >> 16, + module_.version_info.file_version_hi & 0xffff, + module_.version_info.file_version_lo >> 16, + module_.version_info.file_version_lo & 0xffff); + version = version_string; + } + + // TODO(mmentovai): possibly support other struct types in place of + // the one used with MD_VSFIXEDFILEINFO_SIGNATURE. We can possibly use + // a different structure that better represents versioning facilities on + // Mac OS X and Linux, instead of forcing them to adhere to the dotted + // quad of 16-bit ints that Windows uses. + + BPLOG_IF(INFO, version.empty()) << "MinidumpModule could not determine " + "version for " << *name_; + + return version; +} + + +const CodeModule* MinidumpModule::Copy() const { + return new BasicCodeModule(this); +} + + +const uint8_t* MinidumpModule::GetCVRecord(uint32_t* size) { + if (!module_valid_) { + BPLOG(ERROR) << "Invalid MinidumpModule for GetCVRecord"; + return NULL; + } + + if (!cv_record_) { + // This just guards against 0-sized CodeView records; more specific checks + // are used when the signature is checked against various structure types. + if (module_.cv_record.data_size == 0) { + return NULL; + } + + if (!minidump_->SeekSet(module_.cv_record.rva)) { + BPLOG(ERROR) << "MinidumpModule could not seek to CodeView record"; + return NULL; + } + + if (module_.cv_record.data_size > max_cv_bytes_) { + BPLOG(ERROR) << "MinidumpModule CodeView record size " << + module_.cv_record.data_size << " exceeds maximum " << + max_cv_bytes_; + return NULL; + } + + // Allocating something that will be accessed as MDCVInfoPDB70 or + // MDCVInfoPDB20 but is allocated as uint8_t[] can cause alignment + // problems. x86 and ppc are able to cope, though. This allocation + // style is needed because the MDCVInfoPDB70 or MDCVInfoPDB20 are + // variable-sized due to their pdb_file_name fields; these structures + // are not MDCVInfoPDB70_minsize or MDCVInfoPDB20_minsize and treating + // them as such would result in incomplete structures or overruns. + scoped_ptr< vector > cv_record( + new vector(module_.cv_record.data_size)); + + if (!minidump_->ReadBytes(&(*cv_record)[0], module_.cv_record.data_size)) { + BPLOG(ERROR) << "MinidumpModule could not read CodeView record"; + return NULL; + } + + uint32_t signature = MD_CVINFOUNKNOWN_SIGNATURE; + if (module_.cv_record.data_size > sizeof(signature)) { + MDCVInfoPDB70* cv_record_signature = + reinterpret_cast(&(*cv_record)[0]); + signature = cv_record_signature->cv_signature; + if (minidump_->swap()) + Swap(&signature); + } + + if (signature == MD_CVINFOPDB70_SIGNATURE) { + // Now that the structure type is known, recheck the size. + if (MDCVInfoPDB70_minsize > module_.cv_record.data_size) { + BPLOG(ERROR) << "MinidumpModule CodeView7 record size mismatch, " << + MDCVInfoPDB70_minsize << " > " << + module_.cv_record.data_size; + return NULL; + } + + if (minidump_->swap()) { + MDCVInfoPDB70* cv_record_70 = + reinterpret_cast(&(*cv_record)[0]); + Swap(&cv_record_70->cv_signature); + Swap(&cv_record_70->signature); + Swap(&cv_record_70->age); + // Don't swap cv_record_70.pdb_file_name because it's an array of 8-bit + // quantities. (It's a path, is it UTF-8?) + } + + // The last field of either structure is null-terminated 8-bit character + // data. Ensure that it's null-terminated. + if ((*cv_record)[module_.cv_record.data_size - 1] != '\0') { + BPLOG(ERROR) << "MinidumpModule CodeView7 record string is not " + "0-terminated"; + return NULL; + } + } else if (signature == MD_CVINFOPDB20_SIGNATURE) { + // Now that the structure type is known, recheck the size. + if (MDCVInfoPDB20_minsize > module_.cv_record.data_size) { + BPLOG(ERROR) << "MinidumpModule CodeView2 record size mismatch, " << + MDCVInfoPDB20_minsize << " > " << + module_.cv_record.data_size; + return NULL; + } + if (minidump_->swap()) { + MDCVInfoPDB20* cv_record_20 = + reinterpret_cast(&(*cv_record)[0]); + Swap(&cv_record_20->cv_header.signature); + Swap(&cv_record_20->cv_header.offset); + Swap(&cv_record_20->signature); + Swap(&cv_record_20->age); + // Don't swap cv_record_20.pdb_file_name because it's an array of 8-bit + // quantities. (It's a path, is it UTF-8?) + } + + // The last field of either structure is null-terminated 8-bit character + // data. Ensure that it's null-terminated. + if ((*cv_record)[module_.cv_record.data_size - 1] != '\0') { + BPLOG(ERROR) << "MindumpModule CodeView2 record string is not " + "0-terminated"; + return NULL; + } + } + + // If the signature doesn't match something above, it's not something + // that Breakpad can presently handle directly. Because some modules in + // the wild contain such CodeView records as MD_CVINFOCV50_SIGNATURE, + // don't bail out here - allow the data to be returned to the user, + // although byte-swapping can't be done. + + // Store the vector type because that's how storage was allocated, but + // return it casted to uint8_t*. + cv_record_ = cv_record.release(); + cv_record_signature_ = signature; + } + + if (size) + *size = module_.cv_record.data_size; + + return &(*cv_record_)[0]; +} + + +const MDImageDebugMisc* MinidumpModule::GetMiscRecord(uint32_t* size) { + if (!module_valid_) { + BPLOG(ERROR) << "Invalid MinidumpModule for GetMiscRecord"; + return NULL; + } + + if (!misc_record_) { + if (module_.misc_record.data_size == 0) { + return NULL; + } + + if (MDImageDebugMisc_minsize > module_.misc_record.data_size) { + BPLOG(ERROR) << "MinidumpModule miscellaneous debugging record " + "size mismatch, " << MDImageDebugMisc_minsize << " > " << + module_.misc_record.data_size; + return NULL; + } + + if (!minidump_->SeekSet(module_.misc_record.rva)) { + BPLOG(ERROR) << "MinidumpModule could not seek to miscellaneous " + "debugging record"; + return NULL; + } + + if (module_.misc_record.data_size > max_misc_bytes_) { + BPLOG(ERROR) << "MinidumpModule miscellaneous debugging record size " << + module_.misc_record.data_size << " exceeds maximum " << + max_misc_bytes_; + return NULL; + } + + // Allocating something that will be accessed as MDImageDebugMisc but + // is allocated as uint8_t[] can cause alignment problems. x86 and + // ppc are able to cope, though. This allocation style is needed + // because the MDImageDebugMisc is variable-sized due to its data field; + // this structure is not MDImageDebugMisc_minsize and treating it as such + // would result in an incomplete structure or an overrun. + scoped_ptr< vector > misc_record_mem( + new vector(module_.misc_record.data_size)); + MDImageDebugMisc* misc_record = + reinterpret_cast(&(*misc_record_mem)[0]); + + if (!minidump_->ReadBytes(misc_record, module_.misc_record.data_size)) { + BPLOG(ERROR) << "MinidumpModule could not read miscellaneous debugging " + "record"; + return NULL; + } + + if (minidump_->swap()) { + Swap(&misc_record->data_type); + Swap(&misc_record->length); + // Don't swap misc_record.unicode because it's an 8-bit quantity. + // Don't swap the reserved fields for the same reason, and because + // they don't contain any valid data. + if (misc_record->unicode) { + // There is a potential alignment problem, but shouldn't be a problem + // in practice due to the layout of MDImageDebugMisc. + uint16_t* data16 = reinterpret_cast(&(misc_record->data)); + unsigned int dataBytes = module_.misc_record.data_size - + MDImageDebugMisc_minsize; + Swap(data16, dataBytes); + } + } + + if (module_.misc_record.data_size != misc_record->length) { + BPLOG(ERROR) << "MinidumpModule miscellaneous debugging record data " + "size mismatch, " << module_.misc_record.data_size << + " != " << misc_record->length; + return NULL; + } + + // Store the vector type because that's how storage was allocated, but + // return it casted to MDImageDebugMisc*. + misc_record_ = misc_record_mem.release(); + } + + if (size) + *size = module_.misc_record.data_size; + + return reinterpret_cast(&(*misc_record_)[0]); +} + + +void MinidumpModule::Print() { + if (!valid_) { + BPLOG(ERROR) << "MinidumpModule cannot print invalid data"; + return; + } + + printf("MDRawModule\n"); + printf(" base_of_image = 0x%" PRIx64 "\n", + module_.base_of_image); + printf(" size_of_image = 0x%x\n", + module_.size_of_image); + printf(" checksum = 0x%x\n", + module_.checksum); + printf(" time_date_stamp = 0x%x %s\n", + module_.time_date_stamp, + TimeTToUTCString(module_.time_date_stamp).c_str()); + printf(" module_name_rva = 0x%x\n", + module_.module_name_rva); + printf(" version_info.signature = 0x%x\n", + module_.version_info.signature); + printf(" version_info.struct_version = 0x%x\n", + module_.version_info.struct_version); + printf(" version_info.file_version = 0x%x:0x%x\n", + module_.version_info.file_version_hi, + module_.version_info.file_version_lo); + printf(" version_info.product_version = 0x%x:0x%x\n", + module_.version_info.product_version_hi, + module_.version_info.product_version_lo); + printf(" version_info.file_flags_mask = 0x%x\n", + module_.version_info.file_flags_mask); + printf(" version_info.file_flags = 0x%x\n", + module_.version_info.file_flags); + printf(" version_info.file_os = 0x%x\n", + module_.version_info.file_os); + printf(" version_info.file_type = 0x%x\n", + module_.version_info.file_type); + printf(" version_info.file_subtype = 0x%x\n", + module_.version_info.file_subtype); + printf(" version_info.file_date = 0x%x:0x%x\n", + module_.version_info.file_date_hi, + module_.version_info.file_date_lo); + printf(" cv_record.data_size = %d\n", + module_.cv_record.data_size); + printf(" cv_record.rva = 0x%x\n", + module_.cv_record.rva); + printf(" misc_record.data_size = %d\n", + module_.misc_record.data_size); + printf(" misc_record.rva = 0x%x\n", + module_.misc_record.rva); + + printf(" (code_file) = \"%s\"\n", code_file().c_str()); + printf(" (code_identifier) = \"%s\"\n", + code_identifier().c_str()); + + uint32_t cv_record_size; + const uint8_t *cv_record = GetCVRecord(&cv_record_size); + if (cv_record) { + if (cv_record_signature_ == MD_CVINFOPDB70_SIGNATURE) { + const MDCVInfoPDB70* cv_record_70 = + reinterpret_cast(cv_record); + assert(cv_record_70->cv_signature == MD_CVINFOPDB70_SIGNATURE); + + printf(" (cv_record).cv_signature = 0x%x\n", + cv_record_70->cv_signature); + printf(" (cv_record).signature = %08x-%04x-%04x-%02x%02x-", + cv_record_70->signature.data1, + cv_record_70->signature.data2, + cv_record_70->signature.data3, + cv_record_70->signature.data4[0], + cv_record_70->signature.data4[1]); + for (unsigned int guidIndex = 2; + guidIndex < 8; + ++guidIndex) { + printf("%02x", cv_record_70->signature.data4[guidIndex]); + } + printf("\n"); + printf(" (cv_record).age = %d\n", + cv_record_70->age); + printf(" (cv_record).pdb_file_name = \"%s\"\n", + cv_record_70->pdb_file_name); + } else if (cv_record_signature_ == MD_CVINFOPDB20_SIGNATURE) { + const MDCVInfoPDB20* cv_record_20 = + reinterpret_cast(cv_record); + assert(cv_record_20->cv_header.signature == MD_CVINFOPDB20_SIGNATURE); + + printf(" (cv_record).cv_header.signature = 0x%x\n", + cv_record_20->cv_header.signature); + printf(" (cv_record).cv_header.offset = 0x%x\n", + cv_record_20->cv_header.offset); + printf(" (cv_record).signature = 0x%x %s\n", + cv_record_20->signature, + TimeTToUTCString(cv_record_20->signature).c_str()); + printf(" (cv_record).age = %d\n", + cv_record_20->age); + printf(" (cv_record).pdb_file_name = \"%s\"\n", + cv_record_20->pdb_file_name); + } else { + printf(" (cv_record) = "); + for (unsigned int cv_byte_index = 0; + cv_byte_index < cv_record_size; + ++cv_byte_index) { + printf("%02x", cv_record[cv_byte_index]); + } + printf("\n"); + } + } else { + printf(" (cv_record) = (null)\n"); + } + + const MDImageDebugMisc* misc_record = GetMiscRecord(NULL); + if (misc_record) { + printf(" (misc_record).data_type = 0x%x\n", + misc_record->data_type); + printf(" (misc_record).length = 0x%x\n", + misc_record->length); + printf(" (misc_record).unicode = %d\n", + misc_record->unicode); + if (misc_record->unicode) { + string misc_record_data_utf8; + ConvertUTF16BufferToUTF8String( + reinterpret_cast(misc_record->data), + misc_record->length - offsetof(MDImageDebugMisc, data), + &misc_record_data_utf8, + false); // already swapped + printf(" (misc_record).data = \"%s\"\n", + misc_record_data_utf8.c_str()); + } else { + printf(" (misc_record).data = \"%s\"\n", + misc_record->data); + } + } else { + printf(" (misc_record) = (null)\n"); + } + + printf(" (debug_file) = \"%s\"\n", debug_file().c_str()); + printf(" (debug_identifier) = \"%s\"\n", + debug_identifier().c_str()); + printf(" (version) = \"%s\"\n", version().c_str()); + printf("\n"); +} + + +// +// MinidumpModuleList +// + + +uint32_t MinidumpModuleList::max_modules_ = 1024; + + +MinidumpModuleList::MinidumpModuleList(Minidump* minidump) + : MinidumpStream(minidump), + range_map_(new RangeMap()), + modules_(NULL), + module_count_(0) { +} + + +MinidumpModuleList::~MinidumpModuleList() { + delete range_map_; + delete modules_; +} + + +bool MinidumpModuleList::Read(uint32_t expected_size) { + // Invalidate cached data. + range_map_->Clear(); + delete modules_; + modules_ = NULL; + module_count_ = 0; + + valid_ = false; + + uint32_t module_count; + if (expected_size < sizeof(module_count)) { + BPLOG(ERROR) << "MinidumpModuleList count size mismatch, " << + expected_size << " < " << sizeof(module_count); + return false; + } + if (!minidump_->ReadBytes(&module_count, sizeof(module_count))) { + BPLOG(ERROR) << "MinidumpModuleList could not read module count"; + return false; + } + + if (minidump_->swap()) + Swap(&module_count); + + if (module_count > numeric_limits::max() / MD_MODULE_SIZE) { + BPLOG(ERROR) << "MinidumpModuleList module count " << module_count << + " would cause multiplication overflow"; + return false; + } + + if (expected_size != sizeof(module_count) + + module_count * MD_MODULE_SIZE) { + // may be padded with 4 bytes on 64bit ABIs for alignment + if (expected_size == sizeof(module_count) + 4 + + module_count * MD_MODULE_SIZE) { + uint32_t useless; + if (!minidump_->ReadBytes(&useless, 4)) { + BPLOG(ERROR) << "MinidumpModuleList cannot read modulelist padded " + "bytes"; + return false; + } + } else { + BPLOG(ERROR) << "MinidumpModuleList size mismatch, " << expected_size << + " != " << sizeof(module_count) + + module_count * MD_MODULE_SIZE; + return false; + } + } + + if (module_count > max_modules_) { + BPLOG(ERROR) << "MinidumpModuleList count " << module_count_ << + " exceeds maximum " << max_modules_; + return false; + } + + if (module_count != 0) { + scoped_ptr modules( + new MinidumpModules(module_count, MinidumpModule(minidump_))); + + for (unsigned int module_index = 0; + module_index < module_count; + ++module_index) { + MinidumpModule* module = &(*modules)[module_index]; + + // Assume that the file offset is correct after the last read. + if (!module->Read()) { + BPLOG(ERROR) << "MinidumpModuleList could not read module " << + module_index << "/" << module_count; + return false; + } + } + + // Loop through the module list once more to read additional data and + // build the range map. This is done in a second pass because + // MinidumpModule::ReadAuxiliaryData seeks around, and if it were + // included in the loop above, additional seeks would be needed where + // none are now to read contiguous data. + for (unsigned int module_index = 0; + module_index < module_count; + ++module_index) { + MinidumpModule* module = &(*modules)[module_index]; + + // ReadAuxiliaryData fails if any data that the module indicates should + // exist is missing, but we treat some such cases as valid anyway. See + // issue #222: if a debugging record is of a format that's too large to + // handle, it shouldn't render the entire dump invalid. Check module + // validity before giving up. + if (!module->ReadAuxiliaryData() && !module->valid()) { + BPLOG(ERROR) << "MinidumpModuleList could not read required module " + "auxiliary data for module " << + module_index << "/" << module_count; + return false; + } + + // It is safe to use module->code_file() after successfully calling + // module->ReadAuxiliaryData or noting that the module is valid. + + uint64_t base_address = module->base_address(); + uint64_t module_size = module->size(); + if (base_address == static_cast(-1)) { + BPLOG(ERROR) << "MinidumpModuleList found bad base address " + "for module " << module_index << "/" << module_count << + ", " << module->code_file(); + return false; + } + + if (!range_map_->StoreRange(base_address, module_size, module_index)) { + // Android's shared memory implementation /dev/ashmem can contain + // duplicate entries for JITted code, so ignore these. + // TODO(wfh): Remove this code when Android is fixed. + // See https://crbug.com/439531 + const string kDevAshmem("/dev/ashmem/"); + if (module->code_file().compare( + 0, kDevAshmem.length(), kDevAshmem) != 0) { + BPLOG(ERROR) << "MinidumpModuleList could not store module " << + module_index << "/" << module_count << ", " << + module->code_file() << ", " << + HexString(base_address) << "+" << + HexString(module_size); + return false; + } else { + BPLOG(INFO) << "MinidumpModuleList ignoring overlapping module " << + module_index << "/" << module_count << ", " << + module->code_file() << ", " << + HexString(base_address) << "+" << + HexString(module_size); + } + } + } + + modules_ = modules.release(); + } + + module_count_ = module_count; + + valid_ = true; + return true; +} + + +const MinidumpModule* MinidumpModuleList::GetModuleForAddress( + uint64_t address) const { + if (!valid_) { + BPLOG(ERROR) << "Invalid MinidumpModuleList for GetModuleForAddress"; + return NULL; + } + + unsigned int module_index; + if (!range_map_->RetrieveRange(address, &module_index, NULL, NULL)) { + BPLOG(INFO) << "MinidumpModuleList has no module at " << + HexString(address); + return NULL; + } + + return GetModuleAtIndex(module_index); +} + + +const MinidumpModule* MinidumpModuleList::GetMainModule() const { + if (!valid_) { + BPLOG(ERROR) << "Invalid MinidumpModuleList for GetMainModule"; + return NULL; + } + + // The main code module is the first one present in a minidump file's + // MDRawModuleList. + return GetModuleAtIndex(0); +} + + +const MinidumpModule* MinidumpModuleList::GetModuleAtSequence( + unsigned int sequence) const { + if (!valid_) { + BPLOG(ERROR) << "Invalid MinidumpModuleList for GetModuleAtSequence"; + return NULL; + } + + if (sequence >= module_count_) { + BPLOG(ERROR) << "MinidumpModuleList sequence out of range: " << + sequence << "/" << module_count_; + return NULL; + } + + unsigned int module_index; + if (!range_map_->RetrieveRangeAtIndex(sequence, &module_index, NULL, NULL)) { + BPLOG(ERROR) << "MinidumpModuleList has no module at sequence " << sequence; + return NULL; + } + + return GetModuleAtIndex(module_index); +} + + +const MinidumpModule* MinidumpModuleList::GetModuleAtIndex( + unsigned int index) const { + if (!valid_) { + BPLOG(ERROR) << "Invalid MinidumpModuleList for GetModuleAtIndex"; + return NULL; + } + + if (index >= module_count_) { + BPLOG(ERROR) << "MinidumpModuleList index out of range: " << + index << "/" << module_count_; + return NULL; + } + + return &(*modules_)[index]; +} + + +const CodeModules* MinidumpModuleList::Copy() const { + return new BasicCodeModules(this); +} + + +void MinidumpModuleList::Print() { + if (!valid_) { + BPLOG(ERROR) << "MinidumpModuleList cannot print invalid data"; + return; + } + + printf("MinidumpModuleList\n"); + printf(" module_count = %d\n", module_count_); + printf("\n"); + + for (unsigned int module_index = 0; + module_index < module_count_; + ++module_index) { + printf("module[%d]\n", module_index); + + (*modules_)[module_index].Print(); + } +} + + +// +// MinidumpMemoryList +// + + +uint32_t MinidumpMemoryList::max_regions_ = 4096; + + +MinidumpMemoryList::MinidumpMemoryList(Minidump* minidump) + : MinidumpStream(minidump), + range_map_(new RangeMap()), + descriptors_(NULL), + regions_(NULL), + region_count_(0) { +} + + +MinidumpMemoryList::~MinidumpMemoryList() { + delete range_map_; + delete descriptors_; + delete regions_; +} + + +bool MinidumpMemoryList::Read(uint32_t expected_size) { + // Invalidate cached data. + delete descriptors_; + descriptors_ = NULL; + delete regions_; + regions_ = NULL; + range_map_->Clear(); + region_count_ = 0; + + valid_ = false; + + uint32_t region_count; + if (expected_size < sizeof(region_count)) { + BPLOG(ERROR) << "MinidumpMemoryList count size mismatch, " << + expected_size << " < " << sizeof(region_count); + return false; + } + if (!minidump_->ReadBytes(®ion_count, sizeof(region_count))) { + BPLOG(ERROR) << "MinidumpMemoryList could not read memory region count"; + return false; + } + + if (minidump_->swap()) + Swap(®ion_count); + + if (region_count > + numeric_limits::max() / sizeof(MDMemoryDescriptor)) { + BPLOG(ERROR) << "MinidumpMemoryList region count " << region_count << + " would cause multiplication overflow"; + return false; + } + + if (expected_size != sizeof(region_count) + + region_count * sizeof(MDMemoryDescriptor)) { + // may be padded with 4 bytes on 64bit ABIs for alignment + if (expected_size == sizeof(region_count) + 4 + + region_count * sizeof(MDMemoryDescriptor)) { + uint32_t useless; + if (!minidump_->ReadBytes(&useless, 4)) { + BPLOG(ERROR) << "MinidumpMemoryList cannot read memorylist padded " + "bytes"; + return false; + } + } else { + BPLOG(ERROR) << "MinidumpMemoryList size mismatch, " << expected_size << + " != " << sizeof(region_count) + + region_count * sizeof(MDMemoryDescriptor); + return false; + } + } + + if (region_count > max_regions_) { + BPLOG(ERROR) << "MinidumpMemoryList count " << region_count << + " exceeds maximum " << max_regions_; + return false; + } + + if (region_count != 0) { + scoped_ptr descriptors( + new MemoryDescriptors(region_count)); + + // Read the entire array in one fell swoop, instead of reading one entry + // at a time in the loop. + if (!minidump_->ReadBytes(&(*descriptors)[0], + sizeof(MDMemoryDescriptor) * region_count)) { + BPLOG(ERROR) << "MinidumpMemoryList could not read memory region list"; + return false; + } + + scoped_ptr regions( + new MemoryRegions(region_count, MinidumpMemoryRegion(minidump_))); + + for (unsigned int region_index = 0; + region_index < region_count; + ++region_index) { + MDMemoryDescriptor* descriptor = &(*descriptors)[region_index]; + + if (minidump_->swap()) + Swap(descriptor); + + uint64_t base_address = descriptor->start_of_memory_range; + uint32_t region_size = descriptor->memory.data_size; + + // Check for base + size overflow or undersize. + if (region_size == 0 || + region_size > numeric_limits::max() - base_address) { + BPLOG(ERROR) << "MinidumpMemoryList has a memory region problem, " << + " region " << region_index << "/" << region_count << + ", " << HexString(base_address) << "+" << + HexString(region_size); + return false; + } + + if (!range_map_->StoreRange(base_address, region_size, region_index)) { + BPLOG(ERROR) << "MinidumpMemoryList could not store memory region " << + region_index << "/" << region_count << ", " << + HexString(base_address) << "+" << + HexString(region_size); + return false; + } + + (*regions)[region_index].SetDescriptor(descriptor); + } + + descriptors_ = descriptors.release(); + regions_ = regions.release(); + } + + region_count_ = region_count; + + valid_ = true; + return true; +} + + +MinidumpMemoryRegion* MinidumpMemoryList::GetMemoryRegionAtIndex( + unsigned int index) { + if (!valid_) { + BPLOG(ERROR) << "Invalid MinidumpMemoryList for GetMemoryRegionAtIndex"; + return NULL; + } + + if (index >= region_count_) { + BPLOG(ERROR) << "MinidumpMemoryList index out of range: " << + index << "/" << region_count_; + return NULL; + } + + return &(*regions_)[index]; +} + + +MinidumpMemoryRegion* MinidumpMemoryList::GetMemoryRegionForAddress( + uint64_t address) { + if (!valid_) { + BPLOG(ERROR) << "Invalid MinidumpMemoryList for GetMemoryRegionForAddress"; + return NULL; + } + + unsigned int region_index; + if (!range_map_->RetrieveRange(address, ®ion_index, NULL, NULL)) { + BPLOG(INFO) << "MinidumpMemoryList has no memory region at " << + HexString(address); + return NULL; + } + + return GetMemoryRegionAtIndex(region_index); +} + + +void MinidumpMemoryList::Print() { + if (!valid_) { + BPLOG(ERROR) << "MinidumpMemoryList cannot print invalid data"; + return; + } + + printf("MinidumpMemoryList\n"); + printf(" region_count = %d\n", region_count_); + printf("\n"); + + for (unsigned int region_index = 0; + region_index < region_count_; + ++region_index) { + MDMemoryDescriptor* descriptor = &(*descriptors_)[region_index]; + printf("region[%d]\n", region_index); + printf("MDMemoryDescriptor\n"); + printf(" start_of_memory_range = 0x%" PRIx64 "\n", + descriptor->start_of_memory_range); + printf(" memory.data_size = 0x%x\n", descriptor->memory.data_size); + printf(" memory.rva = 0x%x\n", descriptor->memory.rva); + MinidumpMemoryRegion* region = GetMemoryRegionAtIndex(region_index); + if (region) { + printf("Memory\n"); + region->Print(); + } else { + printf("No memory\n"); + } + printf("\n"); + } +} + + +// +// MinidumpException +// + + +MinidumpException::MinidumpException(Minidump* minidump) + : MinidumpStream(minidump), + exception_(), + context_(NULL) { +} + + +MinidumpException::~MinidumpException() { + delete context_; +} + + +bool MinidumpException::Read(uint32_t expected_size) { + // Invalidate cached data. + delete context_; + context_ = NULL; + + valid_ = false; + + if (expected_size != sizeof(exception_)) { + BPLOG(ERROR) << "MinidumpException size mismatch, " << expected_size << + " != " << sizeof(exception_); + return false; + } + + if (!minidump_->ReadBytes(&exception_, sizeof(exception_))) { + BPLOG(ERROR) << "MinidumpException cannot read exception"; + return false; + } + + if (minidump_->swap()) { + Swap(&exception_.thread_id); + // exception_.__align is for alignment only and does not need to be + // swapped. + Swap(&exception_.exception_record.exception_code); + Swap(&exception_.exception_record.exception_flags); + Swap(&exception_.exception_record.exception_record); + Swap(&exception_.exception_record.exception_address); + Swap(&exception_.exception_record.number_parameters); + // exception_.exception_record.__align is for alignment only and does not + // need to be swapped. + for (unsigned int parameter_index = 0; + parameter_index < MD_EXCEPTION_MAXIMUM_PARAMETERS; + ++parameter_index) { + Swap(&exception_.exception_record.exception_information[parameter_index]); + } + Swap(&exception_.thread_context); + } + + valid_ = true; + return true; +} + + +bool MinidumpException::GetThreadID(uint32_t *thread_id) const { + BPLOG_IF(ERROR, !thread_id) << "MinidumpException::GetThreadID requires " + "|thread_id|"; + assert(thread_id); + *thread_id = 0; + + if (!valid_) { + BPLOG(ERROR) << "Invalid MinidumpException for GetThreadID"; + return false; + } + + *thread_id = exception_.thread_id; + return true; +} + + +MinidumpContext* MinidumpException::GetContext() { + if (!valid_) { + BPLOG(ERROR) << "Invalid MinidumpException for GetContext"; + return NULL; + } + + if (!context_) { + if (!minidump_->SeekSet(exception_.thread_context.rva)) { + BPLOG(ERROR) << "MinidumpException cannot seek to context"; + return NULL; + } + + scoped_ptr context(new MinidumpContext(minidump_)); + + // Don't log as an error if we can still fall back on the thread's context + // (which must be possible if we got this far.) + if (!context->Read(exception_.thread_context.data_size)) { + BPLOG(INFO) << "MinidumpException cannot read context"; + return NULL; + } + + context_ = context.release(); + } + + return context_; +} + + +void MinidumpException::Print() { + if (!valid_) { + BPLOG(ERROR) << "MinidumpException cannot print invalid data"; + return; + } + + printf("MDException\n"); + printf(" thread_id = 0x%x\n", + exception_.thread_id); + printf(" exception_record.exception_code = 0x%x\n", + exception_.exception_record.exception_code); + printf(" exception_record.exception_flags = 0x%x\n", + exception_.exception_record.exception_flags); + printf(" exception_record.exception_record = 0x%" PRIx64 "\n", + exception_.exception_record.exception_record); + printf(" exception_record.exception_address = 0x%" PRIx64 "\n", + exception_.exception_record.exception_address); + printf(" exception_record.number_parameters = %d\n", + exception_.exception_record.number_parameters); + for (unsigned int parameterIndex = 0; + parameterIndex < exception_.exception_record.number_parameters; + ++parameterIndex) { + printf(" exception_record.exception_information[%2d] = 0x%" PRIx64 "\n", + parameterIndex, + exception_.exception_record.exception_information[parameterIndex]); + } + printf(" thread_context.data_size = %d\n", + exception_.thread_context.data_size); + printf(" thread_context.rva = 0x%x\n", + exception_.thread_context.rva); + MinidumpContext* context = GetContext(); + if (context) { + printf("\n"); + context->Print(); + } else { + printf(" (no context)\n"); + printf("\n"); + } +} + +// +// MinidumpAssertion +// + + +MinidumpAssertion::MinidumpAssertion(Minidump* minidump) + : MinidumpStream(minidump), + assertion_(), + expression_(), + function_(), + file_() { +} + + +MinidumpAssertion::~MinidumpAssertion() { +} + + +bool MinidumpAssertion::Read(uint32_t expected_size) { + // Invalidate cached data. + valid_ = false; + + if (expected_size != sizeof(assertion_)) { + BPLOG(ERROR) << "MinidumpAssertion size mismatch, " << expected_size << + " != " << sizeof(assertion_); + return false; + } + + if (!minidump_->ReadBytes(&assertion_, sizeof(assertion_))) { + BPLOG(ERROR) << "MinidumpAssertion cannot read assertion"; + return false; + } + + // Each of {expression, function, file} is a UTF-16 string, + // we'll convert them to UTF-8 for ease of use. + ConvertUTF16BufferToUTF8String(assertion_.expression, + sizeof(assertion_.expression), &expression_, + minidump_->swap()); + ConvertUTF16BufferToUTF8String(assertion_.function, + sizeof(assertion_.function), &function_, + minidump_->swap()); + ConvertUTF16BufferToUTF8String(assertion_.file, sizeof(assertion_.file), + &file_, minidump_->swap()); + + if (minidump_->swap()) { + Swap(&assertion_.line); + Swap(&assertion_.type); + } + + valid_ = true; + return true; +} + +void MinidumpAssertion::Print() { + if (!valid_) { + BPLOG(ERROR) << "MinidumpAssertion cannot print invalid data"; + return; + } + + printf("MDAssertion\n"); + printf(" expression = %s\n", + expression_.c_str()); + printf(" function = %s\n", + function_.c_str()); + printf(" file = %s\n", + file_.c_str()); + printf(" line = %u\n", + assertion_.line); + printf(" type = %u\n", + assertion_.type); + printf("\n"); +} + +// +// MinidumpSystemInfo +// + + +MinidumpSystemInfo::MinidumpSystemInfo(Minidump* minidump) + : MinidumpStream(minidump), + system_info_(), + csd_version_(NULL), + cpu_vendor_(NULL) { +} + + +MinidumpSystemInfo::~MinidumpSystemInfo() { + delete csd_version_; + delete cpu_vendor_; +} + + +bool MinidumpSystemInfo::Read(uint32_t expected_size) { + // Invalidate cached data. + delete csd_version_; + csd_version_ = NULL; + delete cpu_vendor_; + cpu_vendor_ = NULL; + + valid_ = false; + + if (expected_size != sizeof(system_info_)) { + BPLOG(ERROR) << "MinidumpSystemInfo size mismatch, " << expected_size << + " != " << sizeof(system_info_); + return false; + } + + if (!minidump_->ReadBytes(&system_info_, sizeof(system_info_))) { + BPLOG(ERROR) << "MinidumpSystemInfo cannot read system info"; + return false; + } + + if (minidump_->swap()) { + Swap(&system_info_.processor_architecture); + Swap(&system_info_.processor_level); + Swap(&system_info_.processor_revision); + // number_of_processors and product_type are 8-bit quantities and need no + // swapping. + Swap(&system_info_.major_version); + Swap(&system_info_.minor_version); + Swap(&system_info_.build_number); + Swap(&system_info_.platform_id); + Swap(&system_info_.csd_version_rva); + Swap(&system_info_.suite_mask); + // Don't swap the reserved2 field because its contents are unknown. + + if (system_info_.processor_architecture == MD_CPU_ARCHITECTURE_X86 || + system_info_.processor_architecture == MD_CPU_ARCHITECTURE_X86_WIN64) { + for (unsigned int i = 0; i < 3; ++i) + Swap(&system_info_.cpu.x86_cpu_info.vendor_id[i]); + Swap(&system_info_.cpu.x86_cpu_info.version_information); + Swap(&system_info_.cpu.x86_cpu_info.feature_information); + Swap(&system_info_.cpu.x86_cpu_info.amd_extended_cpu_features); + } else { + for (unsigned int i = 0; i < 2; ++i) + Swap(&system_info_.cpu.other_cpu_info.processor_features[i]); + } + } + + valid_ = true; + return true; +} + + +string MinidumpSystemInfo::GetOS() { + string os; + + if (!valid_) { + BPLOG(ERROR) << "Invalid MinidumpSystemInfo for GetOS"; + return os; + } + + switch (system_info_.platform_id) { + case MD_OS_WIN32_NT: + case MD_OS_WIN32_WINDOWS: + os = "windows"; + break; + + case MD_OS_MAC_OS_X: + os = "mac"; + break; + + case MD_OS_IOS: + os = "ios"; + break; + + case MD_OS_LINUX: + os = "linux"; + break; + + case MD_OS_SOLARIS: + os = "solaris"; + break; + + case MD_OS_ANDROID: + os = "android"; + break; + + case MD_OS_PS3: + os = "ps3"; + break; + + case MD_OS_NACL: + os = "nacl"; + break; + + default: + BPLOG(ERROR) << "MinidumpSystemInfo unknown OS for platform " << + HexString(system_info_.platform_id); + break; + } + + return os; +} + + +string MinidumpSystemInfo::GetCPU() { + if (!valid_) { + BPLOG(ERROR) << "Invalid MinidumpSystemInfo for GetCPU"; + return ""; + } + + string cpu; + + switch (system_info_.processor_architecture) { + case MD_CPU_ARCHITECTURE_X86: + case MD_CPU_ARCHITECTURE_X86_WIN64: + cpu = "x86"; + break; + + case MD_CPU_ARCHITECTURE_AMD64: + cpu = "x86-64"; + break; + + case MD_CPU_ARCHITECTURE_PPC: + cpu = "ppc"; + break; + + case MD_CPU_ARCHITECTURE_PPC64: + cpu = "ppc64"; + break; + + case MD_CPU_ARCHITECTURE_SPARC: + cpu = "sparc"; + break; + + case MD_CPU_ARCHITECTURE_ARM: + cpu = "arm"; + break; + + case MD_CPU_ARCHITECTURE_ARM64: + cpu = "arm64"; + break; + + default: + BPLOG(ERROR) << "MinidumpSystemInfo unknown CPU for architecture " << + HexString(system_info_.processor_architecture); + break; + } + + return cpu; +} + + +const string* MinidumpSystemInfo::GetCSDVersion() { + if (!valid_) { + BPLOG(ERROR) << "Invalid MinidumpSystemInfo for GetCSDVersion"; + return NULL; + } + + if (!csd_version_) + csd_version_ = minidump_->ReadString(system_info_.csd_version_rva); + + BPLOG_IF(ERROR, !csd_version_) << "MinidumpSystemInfo could not read " + "CSD version"; + + return csd_version_; +} + + +const string* MinidumpSystemInfo::GetCPUVendor() { + if (!valid_) { + BPLOG(ERROR) << "Invalid MinidumpSystemInfo for GetCPUVendor"; + return NULL; + } + + // CPU vendor information can only be determined from x86 minidumps. + if (!cpu_vendor_ && + (system_info_.processor_architecture == MD_CPU_ARCHITECTURE_X86 || + system_info_.processor_architecture == MD_CPU_ARCHITECTURE_X86_WIN64)) { + char cpu_vendor_string[13]; + snprintf(cpu_vendor_string, sizeof(cpu_vendor_string), + "%c%c%c%c%c%c%c%c%c%c%c%c", + system_info_.cpu.x86_cpu_info.vendor_id[0] & 0xff, + (system_info_.cpu.x86_cpu_info.vendor_id[0] >> 8) & 0xff, + (system_info_.cpu.x86_cpu_info.vendor_id[0] >> 16) & 0xff, + (system_info_.cpu.x86_cpu_info.vendor_id[0] >> 24) & 0xff, + system_info_.cpu.x86_cpu_info.vendor_id[1] & 0xff, + (system_info_.cpu.x86_cpu_info.vendor_id[1] >> 8) & 0xff, + (system_info_.cpu.x86_cpu_info.vendor_id[1] >> 16) & 0xff, + (system_info_.cpu.x86_cpu_info.vendor_id[1] >> 24) & 0xff, + system_info_.cpu.x86_cpu_info.vendor_id[2] & 0xff, + (system_info_.cpu.x86_cpu_info.vendor_id[2] >> 8) & 0xff, + (system_info_.cpu.x86_cpu_info.vendor_id[2] >> 16) & 0xff, + (system_info_.cpu.x86_cpu_info.vendor_id[2] >> 24) & 0xff); + cpu_vendor_ = new string(cpu_vendor_string); + } + + return cpu_vendor_; +} + + +void MinidumpSystemInfo::Print() { + if (!valid_) { + BPLOG(ERROR) << "MinidumpSystemInfo cannot print invalid data"; + return; + } + + printf("MDRawSystemInfo\n"); + printf(" processor_architecture = 0x%x\n", + system_info_.processor_architecture); + printf(" processor_level = %d\n", + system_info_.processor_level); + printf(" processor_revision = 0x%x\n", + system_info_.processor_revision); + printf(" number_of_processors = %d\n", + system_info_.number_of_processors); + printf(" product_type = %d\n", + system_info_.product_type); + printf(" major_version = %d\n", + system_info_.major_version); + printf(" minor_version = %d\n", + system_info_.minor_version); + printf(" build_number = %d\n", + system_info_.build_number); + printf(" platform_id = 0x%x\n", + system_info_.platform_id); + printf(" csd_version_rva = 0x%x\n", + system_info_.csd_version_rva); + printf(" suite_mask = 0x%x\n", + system_info_.suite_mask); + if (system_info_.processor_architecture == MD_CPU_ARCHITECTURE_X86 || + system_info_.processor_architecture == MD_CPU_ARCHITECTURE_X86_WIN64) { + printf(" cpu.x86_cpu_info (valid):\n"); + } else { + printf(" cpu.x86_cpu_info (invalid):\n"); + } + for (unsigned int i = 0; i < 3; ++i) { + printf(" cpu.x86_cpu_info.vendor_id[%d] = 0x%x\n", + i, system_info_.cpu.x86_cpu_info.vendor_id[i]); + } + printf(" cpu.x86_cpu_info.version_information = 0x%x\n", + system_info_.cpu.x86_cpu_info.version_information); + printf(" cpu.x86_cpu_info.feature_information = 0x%x\n", + system_info_.cpu.x86_cpu_info.feature_information); + printf(" cpu.x86_cpu_info.amd_extended_cpu_features = 0x%x\n", + system_info_.cpu.x86_cpu_info.amd_extended_cpu_features); + if (system_info_.processor_architecture != MD_CPU_ARCHITECTURE_X86 && + system_info_.processor_architecture != MD_CPU_ARCHITECTURE_X86_WIN64) { + printf(" cpu.other_cpu_info (valid):\n"); + for (unsigned int i = 0; i < 2; ++i) { + printf(" cpu.other_cpu_info.processor_features[%d] = 0x%" PRIx64 "\n", + i, system_info_.cpu.other_cpu_info.processor_features[i]); + } + } + const string* csd_version = GetCSDVersion(); + if (csd_version) { + printf(" (csd_version) = \"%s\"\n", + csd_version->c_str()); + } else { + printf(" (csd_version) = (null)\n"); + } + const string* cpu_vendor = GetCPUVendor(); + if (cpu_vendor) { + printf(" (cpu_vendor) = \"%s\"\n", + cpu_vendor->c_str()); + } else { + printf(" (cpu_vendor) = (null)\n"); + } + printf("\n"); +} + + +// +// MinidumpMiscInfo +// + + +MinidumpMiscInfo::MinidumpMiscInfo(Minidump* minidump) + : MinidumpStream(minidump), + misc_info_() { +} + + +bool MinidumpMiscInfo::Read(uint32_t expected_size) { + valid_ = false; + + if (expected_size != MD_MISCINFO_SIZE && + expected_size != MD_MISCINFO2_SIZE && + expected_size != MD_MISCINFO3_SIZE && + expected_size != MD_MISCINFO4_SIZE) { + BPLOG(ERROR) << "MinidumpMiscInfo size mismatch, " << expected_size + << " != " << MD_MISCINFO_SIZE << ", " << MD_MISCINFO2_SIZE + << ", " << MD_MISCINFO3_SIZE << ", " << MD_MISCINFO4_SIZE + << ")"; + return false; + } + + if (!minidump_->ReadBytes(&misc_info_, expected_size)) { + BPLOG(ERROR) << "MinidumpMiscInfo cannot read miscellaneous info"; + return false; + } + + if (minidump_->swap()) { + // Swap version 1 fields + Swap(&misc_info_.size_of_info); + Swap(&misc_info_.flags1); + Swap(&misc_info_.process_id); + Swap(&misc_info_.process_create_time); + Swap(&misc_info_.process_user_time); + Swap(&misc_info_.process_kernel_time); + if (misc_info_.size_of_info > MD_MISCINFO_SIZE) { + // Swap version 2 fields + Swap(&misc_info_.processor_max_mhz); + Swap(&misc_info_.processor_current_mhz); + Swap(&misc_info_.processor_mhz_limit); + Swap(&misc_info_.processor_max_idle_state); + Swap(&misc_info_.processor_current_idle_state); + } + if (misc_info_.size_of_info > MD_MISCINFO2_SIZE) { + // Swap version 3 fields + Swap(&misc_info_.process_integrity_level); + Swap(&misc_info_.process_execute_flags); + Swap(&misc_info_.protected_process); + Swap(&misc_info_.time_zone_id); + Swap(&misc_info_.time_zone); + } + if (misc_info_.size_of_info > MD_MISCINFO3_SIZE) { + // Swap version 4 fields. + // Do not swap UTF-16 strings. The swap is done as part of the + // conversion to UTF-8 (code follows below). + } + } + + if (expected_size != misc_info_.size_of_info) { + BPLOG(ERROR) << "MinidumpMiscInfo size mismatch, " << + expected_size << " != " << misc_info_.size_of_info; + return false; + } + + // Convert UTF-16 strings + if (misc_info_.size_of_info > MD_MISCINFO2_SIZE) { + // Convert UTF-16 strings in version 3 fields + ConvertUTF16BufferToUTF8String(misc_info_.time_zone.standard_name, + sizeof(misc_info_.time_zone.standard_name), + &standard_name_, minidump_->swap()); + ConvertUTF16BufferToUTF8String(misc_info_.time_zone.daylight_name, + sizeof(misc_info_.time_zone.daylight_name), + &daylight_name_, minidump_->swap()); + } + if (misc_info_.size_of_info > MD_MISCINFO3_SIZE) { + // Convert UTF-16 strings in version 4 fields + ConvertUTF16BufferToUTF8String(misc_info_.build_string, + sizeof(misc_info_.build_string), + &build_string_, minidump_->swap()); + ConvertUTF16BufferToUTF8String(misc_info_.dbg_bld_str, + sizeof(misc_info_.dbg_bld_str), + &dbg_bld_str_, minidump_->swap()); + } + + valid_ = true; + return true; +} + + +void MinidumpMiscInfo::Print() { + if (!valid_) { + BPLOG(ERROR) << "MinidumpMiscInfo cannot print invalid data"; + return; + } + + printf("MDRawMiscInfo\n"); + // Print version 1 fields + printf(" size_of_info = %d\n", misc_info_.size_of_info); + printf(" flags1 = 0x%x\n", misc_info_.flags1); + printf(" process_id = "); + PrintValueOrInvalid(misc_info_.flags1 & MD_MISCINFO_FLAGS1_PROCESS_ID, + kNumberFormatDecimal, misc_info_.process_id); + if (misc_info_.flags1 & MD_MISCINFO_FLAGS1_PROCESS_TIMES) { + printf(" process_create_time = 0x%x %s\n", + misc_info_.process_create_time, + TimeTToUTCString(misc_info_.process_create_time).c_str()); + } else { + printf(" process_create_time = (invalid)\n"); + } + printf(" process_user_time = "); + PrintValueOrInvalid(misc_info_.flags1 & MD_MISCINFO_FLAGS1_PROCESS_TIMES, + kNumberFormatDecimal, misc_info_.process_user_time); + printf(" process_kernel_time = "); + PrintValueOrInvalid(misc_info_.flags1 & MD_MISCINFO_FLAGS1_PROCESS_TIMES, + kNumberFormatDecimal, misc_info_.process_kernel_time); + if (misc_info_.size_of_info > MD_MISCINFO_SIZE) { + // Print version 2 fields + printf(" processor_max_mhz = "); + PrintValueOrInvalid(misc_info_.flags1 & + MD_MISCINFO_FLAGS1_PROCESSOR_POWER_INFO, + kNumberFormatDecimal, misc_info_.processor_max_mhz); + printf(" processor_current_mhz = "); + PrintValueOrInvalid(misc_info_.flags1 & + MD_MISCINFO_FLAGS1_PROCESSOR_POWER_INFO, + kNumberFormatDecimal, misc_info_.processor_current_mhz); + printf(" processor_mhz_limit = "); + PrintValueOrInvalid(misc_info_.flags1 & + MD_MISCINFO_FLAGS1_PROCESSOR_POWER_INFO, + kNumberFormatDecimal, misc_info_.processor_mhz_limit); + printf(" processor_max_idle_state = "); + PrintValueOrInvalid(misc_info_.flags1 & + MD_MISCINFO_FLAGS1_PROCESSOR_POWER_INFO, + kNumberFormatDecimal, + misc_info_.processor_max_idle_state); + printf(" processor_current_idle_state = "); + PrintValueOrInvalid(misc_info_.flags1 & + MD_MISCINFO_FLAGS1_PROCESSOR_POWER_INFO, + kNumberFormatDecimal, + misc_info_.processor_current_idle_state); + } + if (misc_info_.size_of_info > MD_MISCINFO2_SIZE) { + // Print version 3 fields + printf(" process_integrity_level = "); + PrintValueOrInvalid(misc_info_.flags1 & + MD_MISCINFO_FLAGS1_PROCESS_INTEGRITY, + kNumberFormatHexadecimal, + misc_info_.process_integrity_level); + printf(" process_execute_flags = "); + PrintValueOrInvalid(misc_info_.flags1 & + MD_MISCINFO_FLAGS1_PROCESS_EXECUTE_FLAGS, + kNumberFormatHexadecimal, + misc_info_.process_execute_flags); + printf(" protected_process = "); + PrintValueOrInvalid(misc_info_.flags1 & + MD_MISCINFO_FLAGS1_PROTECTED_PROCESS, + kNumberFormatDecimal, misc_info_.protected_process); + printf(" time_zone_id = "); + PrintValueOrInvalid(misc_info_.flags1 & MD_MISCINFO_FLAGS1_TIMEZONE, + kNumberFormatDecimal, misc_info_.time_zone_id); + if (misc_info_.flags1 & MD_MISCINFO_FLAGS1_TIMEZONE) { + printf(" time_zone.bias = %d\n", + misc_info_.time_zone.bias); + printf(" time_zone.standard_name = %s\n", standard_name_.c_str()); + printf(" time_zone.standard_date = " + "%04d-%02d-%02d (%d) %02d:%02d:%02d.%03d\n", + misc_info_.time_zone.standard_date.year, + misc_info_.time_zone.standard_date.month, + misc_info_.time_zone.standard_date.day, + misc_info_.time_zone.standard_date.day_of_week, + misc_info_.time_zone.standard_date.hour, + misc_info_.time_zone.standard_date.minute, + misc_info_.time_zone.standard_date.second, + misc_info_.time_zone.standard_date.milliseconds); + printf(" time_zone.standard_bias = %d\n", + misc_info_.time_zone.standard_bias); + printf(" time_zone.daylight_name = %s\n", daylight_name_.c_str()); + printf(" time_zone.daylight_date = " + "%04d-%02d-%02d (%d) %02d:%02d:%02d.%03d\n", + misc_info_.time_zone.daylight_date.year, + misc_info_.time_zone.daylight_date.month, + misc_info_.time_zone.daylight_date.day, + misc_info_.time_zone.daylight_date.day_of_week, + misc_info_.time_zone.daylight_date.hour, + misc_info_.time_zone.daylight_date.minute, + misc_info_.time_zone.daylight_date.second, + misc_info_.time_zone.daylight_date.milliseconds); + printf(" time_zone.daylight_bias = %d\n", + misc_info_.time_zone.daylight_bias); + } else { + printf(" time_zone.bias = (invalid)\n"); + printf(" time_zone.standard_name = (invalid)\n"); + printf(" time_zone.standard_date = (invalid)\n"); + printf(" time_zone.standard_bias = (invalid)\n"); + printf(" time_zone.daylight_name = (invalid)\n"); + printf(" time_zone.daylight_date = (invalid)\n"); + printf(" time_zone.daylight_bias = (invalid)\n"); + } + } + if (misc_info_.size_of_info > MD_MISCINFO3_SIZE) { + // Print version 4 fields + if (misc_info_.flags1 & MD_MISCINFO_FLAGS1_BUILDSTRING) { + printf(" build_string = %s\n", build_string_.c_str()); + printf(" dbg_bld_str = %s\n", dbg_bld_str_.c_str()); + } else { + printf(" build_string = (invalid)\n"); + printf(" dbg_bld_str = (invalid)\n"); + } + } + printf("\n"); +} + + +// +// MinidumpBreakpadInfo +// + + +MinidumpBreakpadInfo::MinidumpBreakpadInfo(Minidump* minidump) + : MinidumpStream(minidump), + breakpad_info_() { +} + + +bool MinidumpBreakpadInfo::Read(uint32_t expected_size) { + valid_ = false; + + if (expected_size != sizeof(breakpad_info_)) { + BPLOG(ERROR) << "MinidumpBreakpadInfo size mismatch, " << expected_size << + " != " << sizeof(breakpad_info_); + return false; + } + + if (!minidump_->ReadBytes(&breakpad_info_, sizeof(breakpad_info_))) { + BPLOG(ERROR) << "MinidumpBreakpadInfo cannot read Breakpad info"; + return false; + } + + if (minidump_->swap()) { + Swap(&breakpad_info_.validity); + Swap(&breakpad_info_.dump_thread_id); + Swap(&breakpad_info_.requesting_thread_id); + } + + valid_ = true; + return true; +} + + +bool MinidumpBreakpadInfo::GetDumpThreadID(uint32_t *thread_id) const { + BPLOG_IF(ERROR, !thread_id) << "MinidumpBreakpadInfo::GetDumpThreadID " + "requires |thread_id|"; + assert(thread_id); + *thread_id = 0; + + if (!valid_) { + BPLOG(ERROR) << "Invalid MinidumpBreakpadInfo for GetDumpThreadID"; + return false; + } + + if (!(breakpad_info_.validity & MD_BREAKPAD_INFO_VALID_DUMP_THREAD_ID)) { + BPLOG(INFO) << "MinidumpBreakpadInfo has no dump thread"; + return false; + } + + *thread_id = breakpad_info_.dump_thread_id; + return true; +} + + +bool MinidumpBreakpadInfo::GetRequestingThreadID(uint32_t *thread_id) + const { + BPLOG_IF(ERROR, !thread_id) << "MinidumpBreakpadInfo::GetRequestingThreadID " + "requires |thread_id|"; + assert(thread_id); + *thread_id = 0; + + if (!thread_id || !valid_) { + BPLOG(ERROR) << "Invalid MinidumpBreakpadInfo for GetRequestingThreadID"; + return false; + } + + if (!(breakpad_info_.validity & + MD_BREAKPAD_INFO_VALID_REQUESTING_THREAD_ID)) { + BPLOG(INFO) << "MinidumpBreakpadInfo has no requesting thread"; + return false; + } + + *thread_id = breakpad_info_.requesting_thread_id; + return true; +} + + +void MinidumpBreakpadInfo::Print() { + if (!valid_) { + BPLOG(ERROR) << "MinidumpBreakpadInfo cannot print invalid data"; + return; + } + + printf("MDRawBreakpadInfo\n"); + printf(" validity = 0x%x\n", breakpad_info_.validity); + printf(" dump_thread_id = "); + PrintValueOrInvalid(breakpad_info_.validity & + MD_BREAKPAD_INFO_VALID_DUMP_THREAD_ID, + kNumberFormatHexadecimal, breakpad_info_.dump_thread_id); + printf(" requesting_thread_id = "); + PrintValueOrInvalid(breakpad_info_.validity & + MD_BREAKPAD_INFO_VALID_REQUESTING_THREAD_ID, + kNumberFormatHexadecimal, + breakpad_info_.requesting_thread_id); + + printf("\n"); +} + + +// +// MinidumpMemoryInfo +// + + +MinidumpMemoryInfo::MinidumpMemoryInfo(Minidump* minidump) + : MinidumpObject(minidump), + memory_info_() { +} + + +bool MinidumpMemoryInfo::IsExecutable() const { + uint32_t protection = + memory_info_.protection & MD_MEMORY_PROTECTION_ACCESS_MASK; + return protection == MD_MEMORY_PROTECT_EXECUTE || + protection == MD_MEMORY_PROTECT_EXECUTE_READ || + protection == MD_MEMORY_PROTECT_EXECUTE_READWRITE; +} + + +bool MinidumpMemoryInfo::IsWritable() const { + uint32_t protection = + memory_info_.protection & MD_MEMORY_PROTECTION_ACCESS_MASK; + return protection == MD_MEMORY_PROTECT_READWRITE || + protection == MD_MEMORY_PROTECT_WRITECOPY || + protection == MD_MEMORY_PROTECT_EXECUTE_READWRITE || + protection == MD_MEMORY_PROTECT_EXECUTE_WRITECOPY; +} + + +bool MinidumpMemoryInfo::Read() { + valid_ = false; + + if (!minidump_->ReadBytes(&memory_info_, sizeof(memory_info_))) { + BPLOG(ERROR) << "MinidumpMemoryInfo cannot read memory info"; + return false; + } + + if (minidump_->swap()) { + Swap(&memory_info_.base_address); + Swap(&memory_info_.allocation_base); + Swap(&memory_info_.allocation_protection); + Swap(&memory_info_.region_size); + Swap(&memory_info_.state); + Swap(&memory_info_.protection); + Swap(&memory_info_.type); + } + + // Check for base + size overflow or undersize. + if (memory_info_.region_size == 0 || + memory_info_.region_size > numeric_limits::max() - + memory_info_.base_address) { + BPLOG(ERROR) << "MinidumpMemoryInfo has a memory region problem, " << + HexString(memory_info_.base_address) << "+" << + HexString(memory_info_.region_size); + return false; + } + + valid_ = true; + return true; +} + + +void MinidumpMemoryInfo::Print() { + if (!valid_) { + BPLOG(ERROR) << "MinidumpMemoryInfo cannot print invalid data"; + return; + } + + printf("MDRawMemoryInfo\n"); + printf(" base_address = 0x%" PRIx64 "\n", + memory_info_.base_address); + printf(" allocation_base = 0x%" PRIx64 "\n", + memory_info_.allocation_base); + printf(" allocation_protection = 0x%x\n", + memory_info_.allocation_protection); + printf(" region_size = 0x%" PRIx64 "\n", memory_info_.region_size); + printf(" state = 0x%x\n", memory_info_.state); + printf(" protection = 0x%x\n", memory_info_.protection); + printf(" type = 0x%x\n", memory_info_.type); +} + + +// +// MinidumpMemoryInfoList +// + + +MinidumpMemoryInfoList::MinidumpMemoryInfoList(Minidump* minidump) + : MinidumpStream(minidump), + range_map_(new RangeMap()), + infos_(NULL), + info_count_(0) { +} + + +MinidumpMemoryInfoList::~MinidumpMemoryInfoList() { + delete range_map_; + delete infos_; +} + + +bool MinidumpMemoryInfoList::Read(uint32_t expected_size) { + // Invalidate cached data. + delete infos_; + infos_ = NULL; + range_map_->Clear(); + info_count_ = 0; + + valid_ = false; + + MDRawMemoryInfoList header; + if (expected_size < sizeof(MDRawMemoryInfoList)) { + BPLOG(ERROR) << "MinidumpMemoryInfoList header size mismatch, " << + expected_size << " < " << sizeof(MDRawMemoryInfoList); + return false; + } + if (!minidump_->ReadBytes(&header, sizeof(header))) { + BPLOG(ERROR) << "MinidumpMemoryInfoList could not read header"; + return false; + } + + if (minidump_->swap()) { + Swap(&header.size_of_header); + Swap(&header.size_of_entry); + Swap(&header.number_of_entries); + } + + // Sanity check that the header is the expected size. + // TODO(ted): could possibly handle this more gracefully, assuming + // that future versions of the structs would be backwards-compatible. + if (header.size_of_header != sizeof(MDRawMemoryInfoList)) { + BPLOG(ERROR) << "MinidumpMemoryInfoList header size mismatch, " << + header.size_of_header << " != " << + sizeof(MDRawMemoryInfoList); + return false; + } + + // Sanity check that the entries are the expected size. + if (header.size_of_entry != sizeof(MDRawMemoryInfo)) { + BPLOG(ERROR) << "MinidumpMemoryInfoList entry size mismatch, " << + header.size_of_entry << " != " << + sizeof(MDRawMemoryInfo); + return false; + } + + if (header.number_of_entries > + numeric_limits::max() / sizeof(MDRawMemoryInfo)) { + BPLOG(ERROR) << "MinidumpMemoryInfoList info count " << + header.number_of_entries << + " would cause multiplication overflow"; + return false; + } + + if (expected_size != sizeof(MDRawMemoryInfoList) + + header.number_of_entries * sizeof(MDRawMemoryInfo)) { + BPLOG(ERROR) << "MinidumpMemoryInfoList size mismatch, " << expected_size << + " != " << sizeof(MDRawMemoryInfoList) + + header.number_of_entries * sizeof(MDRawMemoryInfo); + return false; + } + + // Check for data loss when converting header.number_of_entries from + // uint64_t into MinidumpMemoryInfos::size_type (uint32_t) + MinidumpMemoryInfos::size_type header_number_of_entries = + static_cast(header.number_of_entries); + if (static_cast(header_number_of_entries) != + header.number_of_entries) { + BPLOG(ERROR) << "Data loss detected when converting " + "the header's number_of_entries"; + return false; + } + + if (header.number_of_entries != 0) { + scoped_ptr infos( + new MinidumpMemoryInfos(header_number_of_entries, + MinidumpMemoryInfo(minidump_))); + + for (unsigned int index = 0; + index < header.number_of_entries; + ++index) { + MinidumpMemoryInfo* info = &(*infos)[index]; + + // Assume that the file offset is correct after the last read. + if (!info->Read()) { + BPLOG(ERROR) << "MinidumpMemoryInfoList cannot read info " << + index << "/" << header.number_of_entries; + return false; + } + + uint64_t base_address = info->GetBase(); + uint64_t region_size = info->GetSize(); + + if (!range_map_->StoreRange(base_address, region_size, index)) { + BPLOG(ERROR) << "MinidumpMemoryInfoList could not store" + " memory region " << + index << "/" << header.number_of_entries << ", " << + HexString(base_address) << "+" << + HexString(region_size); + return false; + } + } + + infos_ = infos.release(); + } + + info_count_ = header_number_of_entries; + + valid_ = true; + return true; +} + + +const MinidumpMemoryInfo* MinidumpMemoryInfoList::GetMemoryInfoAtIndex( + unsigned int index) const { + if (!valid_) { + BPLOG(ERROR) << "Invalid MinidumpMemoryInfoList for GetMemoryInfoAtIndex"; + return NULL; + } + + if (index >= info_count_) { + BPLOG(ERROR) << "MinidumpMemoryInfoList index out of range: " << + index << "/" << info_count_; + return NULL; + } + + return &(*infos_)[index]; +} + + +const MinidumpMemoryInfo* MinidumpMemoryInfoList::GetMemoryInfoForAddress( + uint64_t address) const { + if (!valid_) { + BPLOG(ERROR) << "Invalid MinidumpMemoryInfoList for" + " GetMemoryInfoForAddress"; + return NULL; + } + + unsigned int info_index; + if (!range_map_->RetrieveRange(address, &info_index, NULL, NULL)) { + BPLOG(INFO) << "MinidumpMemoryInfoList has no memory info at " << + HexString(address); + return NULL; + } + + return GetMemoryInfoAtIndex(info_index); +} + + +void MinidumpMemoryInfoList::Print() { + if (!valid_) { + BPLOG(ERROR) << "MinidumpMemoryInfoList cannot print invalid data"; + return; + } + + printf("MinidumpMemoryInfoList\n"); + printf(" info_count = %d\n", info_count_); + printf("\n"); + + for (unsigned int info_index = 0; + info_index < info_count_; + ++info_index) { + printf("info[%d]\n", info_index); + (*infos_)[info_index].Print(); + printf("\n"); + } +} + + +// +// Minidump +// + + +uint32_t Minidump::max_streams_ = 128; +unsigned int Minidump::max_string_length_ = 1024; + + +Minidump::Minidump(const string& path) + : header_(), + directory_(NULL), + stream_map_(new MinidumpStreamMap()), + path_(path), + stream_(NULL), + swap_(false), + valid_(false) { +} + +Minidump::Minidump(istream& stream) + : header_(), + directory_(NULL), + stream_map_(new MinidumpStreamMap()), + path_(), + stream_(&stream), + swap_(false), + valid_(false) { +} + +Minidump::~Minidump() { + if (stream_) { + BPLOG(INFO) << "Minidump closing minidump"; + } + if (!path_.empty()) { + delete stream_; + } + delete directory_; + delete stream_map_; +} + + +bool Minidump::Open() { + if (stream_ != NULL) { + BPLOG(INFO) << "Minidump reopening minidump " << path_; + + // The file is already open. Seek to the beginning, which is the position + // the file would be at if it were opened anew. + return SeekSet(0); + } + + stream_ = new ifstream(path_.c_str(), std::ios::in | std::ios::binary); + if (!stream_ || !stream_->good()) { + string error_string; + int error_code = ErrnoString(&error_string); + BPLOG(ERROR) << "Minidump could not open minidump " << path_ << + ", error " << error_code << ": " << error_string; + return false; + } + + BPLOG(INFO) << "Minidump opened minidump " << path_; + return true; +} + +bool Minidump::GetContextCPUFlagsFromSystemInfo(uint32_t *context_cpu_flags) { + // Initialize output parameters + *context_cpu_flags = 0; + + // Save the current stream position + off_t saved_position = Tell(); + if (saved_position == -1) { + // Failed to save the current stream position. + // Returns true because the current position of the stream is preserved. + return true; + } + + const MDRawSystemInfo* system_info = + GetSystemInfo() ? GetSystemInfo()->system_info() : NULL; + + if (system_info != NULL) { + switch (system_info->processor_architecture) { + case MD_CPU_ARCHITECTURE_X86: + *context_cpu_flags = MD_CONTEXT_X86; + break; + case MD_CPU_ARCHITECTURE_MIPS: + *context_cpu_flags = MD_CONTEXT_MIPS; + break; + case MD_CPU_ARCHITECTURE_ALPHA: + *context_cpu_flags = MD_CONTEXT_ALPHA; + break; + case MD_CPU_ARCHITECTURE_PPC: + *context_cpu_flags = MD_CONTEXT_PPC; + break; + case MD_CPU_ARCHITECTURE_PPC64: + *context_cpu_flags = MD_CONTEXT_PPC64; + break; + case MD_CPU_ARCHITECTURE_SHX: + *context_cpu_flags = MD_CONTEXT_SHX; + break; + case MD_CPU_ARCHITECTURE_ARM: + *context_cpu_flags = MD_CONTEXT_ARM; + break; + case MD_CPU_ARCHITECTURE_ARM64: + *context_cpu_flags = MD_CONTEXT_ARM64; + break; + case MD_CPU_ARCHITECTURE_IA64: + *context_cpu_flags = MD_CONTEXT_IA64; + break; + case MD_CPU_ARCHITECTURE_ALPHA64: + *context_cpu_flags = 0; + break; + case MD_CPU_ARCHITECTURE_MSIL: + *context_cpu_flags = 0; + break; + case MD_CPU_ARCHITECTURE_AMD64: + *context_cpu_flags = MD_CONTEXT_AMD64; + break; + case MD_CPU_ARCHITECTURE_X86_WIN64: + *context_cpu_flags = 0; + break; + case MD_CPU_ARCHITECTURE_SPARC: + *context_cpu_flags = MD_CONTEXT_SPARC; + break; + case MD_CPU_ARCHITECTURE_UNKNOWN: + *context_cpu_flags = 0; + break; + default: + *context_cpu_flags = 0; + break; + } + } + + // Restore position and return + return SeekSet(saved_position); +} + + +bool Minidump::Read() { + // Invalidate cached data. + delete directory_; + directory_ = NULL; + stream_map_->clear(); + + valid_ = false; + + if (!Open()) { + BPLOG(ERROR) << "Minidump cannot open minidump"; + return false; + } + + if (!ReadBytes(&header_, sizeof(MDRawHeader))) { + BPLOG(ERROR) << "Minidump cannot read header"; + return false; + } + + if (header_.signature != MD_HEADER_SIGNATURE) { + // The file may be byte-swapped. Under the present architecture, these + // classes don't know or need to know what CPU (or endianness) the + // minidump was produced on in order to parse it. Use the signature as + // a byte order marker. + uint32_t signature_swapped = header_.signature; + Swap(&signature_swapped); + if (signature_swapped != MD_HEADER_SIGNATURE) { + // This isn't a minidump or a byte-swapped minidump. + BPLOG(ERROR) << "Minidump header signature mismatch: (" << + HexString(header_.signature) << ", " << + HexString(signature_swapped) << ") != " << + HexString(MD_HEADER_SIGNATURE); + return false; + } + swap_ = true; + } else { + // The file is not byte-swapped. Set swap_ false (it may have been true + // if the object is being reused?) + swap_ = false; + } + + BPLOG(INFO) << "Minidump " << (swap_ ? "" : "not ") << + "byte-swapping minidump"; + + if (swap_) { + Swap(&header_.signature); + Swap(&header_.version); + Swap(&header_.stream_count); + Swap(&header_.stream_directory_rva); + Swap(&header_.checksum); + Swap(&header_.time_date_stamp); + Swap(&header_.flags); + } + + // Version check. The high 16 bits of header_.version contain something + // else "implementation specific." + if ((header_.version & 0x0000ffff) != MD_HEADER_VERSION) { + BPLOG(ERROR) << "Minidump version mismatch: " << + HexString(header_.version & 0x0000ffff) << " != " << + HexString(MD_HEADER_VERSION); + return false; + } + + if (!SeekSet(header_.stream_directory_rva)) { + BPLOG(ERROR) << "Minidump cannot seek to stream directory"; + return false; + } + + if (header_.stream_count > max_streams_) { + BPLOG(ERROR) << "Minidump stream count " << header_.stream_count << + " exceeds maximum " << max_streams_; + return false; + } + + if (header_.stream_count != 0) { + scoped_ptr directory( + new MinidumpDirectoryEntries(header_.stream_count)); + + // Read the entire array in one fell swoop, instead of reading one entry + // at a time in the loop. + if (!ReadBytes(&(*directory)[0], + sizeof(MDRawDirectory) * header_.stream_count)) { + BPLOG(ERROR) << "Minidump cannot read stream directory"; + return false; + } + + for (unsigned int stream_index = 0; + stream_index < header_.stream_count; + ++stream_index) { + MDRawDirectory* directory_entry = &(*directory)[stream_index]; + + if (swap_) { + Swap(&directory_entry->stream_type); + Swap(&directory_entry->location); + } + + // Initialize the stream_map_ map, which speeds locating a stream by + // type. + unsigned int stream_type = directory_entry->stream_type; + switch (stream_type) { + case MD_THREAD_LIST_STREAM: + case MD_MODULE_LIST_STREAM: + case MD_MEMORY_LIST_STREAM: + case MD_EXCEPTION_STREAM: + case MD_SYSTEM_INFO_STREAM: + case MD_MISC_INFO_STREAM: + case MD_BREAKPAD_INFO_STREAM: { + if (stream_map_->find(stream_type) != stream_map_->end()) { + // Another stream with this type was already found. A minidump + // file should contain at most one of each of these stream types. + BPLOG(ERROR) << "Minidump found multiple streams of type " << + stream_type << ", but can only deal with one"; + return false; + } + // Fall through to default + } + + default: { + // Overwrites for stream types other than those above, but it's + // expected to be the user's burden in that case. + (*stream_map_)[stream_type].stream_index = stream_index; + } + } + } + + directory_ = directory.release(); + } + + valid_ = true; + return true; +} + + +MinidumpThreadList* Minidump::GetThreadList() { + MinidumpThreadList* thread_list; + return GetStream(&thread_list); +} + + +MinidumpModuleList* Minidump::GetModuleList() { + MinidumpModuleList* module_list; + return GetStream(&module_list); +} + + +MinidumpMemoryList* Minidump::GetMemoryList() { + MinidumpMemoryList* memory_list; + return GetStream(&memory_list); +} + + +MinidumpException* Minidump::GetException() { + MinidumpException* exception; + return GetStream(&exception); +} + +MinidumpAssertion* Minidump::GetAssertion() { + MinidumpAssertion* assertion; + return GetStream(&assertion); +} + + +MinidumpSystemInfo* Minidump::GetSystemInfo() { + MinidumpSystemInfo* system_info; + return GetStream(&system_info); +} + + +MinidumpMiscInfo* Minidump::GetMiscInfo() { + MinidumpMiscInfo* misc_info; + return GetStream(&misc_info); +} + + +MinidumpBreakpadInfo* Minidump::GetBreakpadInfo() { + MinidumpBreakpadInfo* breakpad_info; + return GetStream(&breakpad_info); +} + +MinidumpMemoryInfoList* Minidump::GetMemoryInfoList() { + MinidumpMemoryInfoList* memory_info_list; + return GetStream(&memory_info_list); +} + +static const char* get_stream_name(uint32_t stream_type) { + switch (stream_type) { + case MD_UNUSED_STREAM: + return "MD_UNUSED_STREAM"; + case MD_RESERVED_STREAM_0: + return "MD_RESERVED_STREAM_0"; + case MD_RESERVED_STREAM_1: + return "MD_RESERVED_STREAM_1"; + case MD_THREAD_LIST_STREAM: + return "MD_THREAD_LIST_STREAM"; + case MD_MODULE_LIST_STREAM: + return "MD_MODULE_LIST_STREAM"; + case MD_MEMORY_LIST_STREAM: + return "MD_MEMORY_LIST_STREAM"; + case MD_EXCEPTION_STREAM: + return "MD_EXCEPTION_STREAM"; + case MD_SYSTEM_INFO_STREAM: + return "MD_SYSTEM_INFO_STREAM"; + case MD_THREAD_EX_LIST_STREAM: + return "MD_THREAD_EX_LIST_STREAM"; + case MD_MEMORY_64_LIST_STREAM: + return "MD_MEMORY_64_LIST_STREAM"; + case MD_COMMENT_STREAM_A: + return "MD_COMMENT_STREAM_A"; + case MD_COMMENT_STREAM_W: + return "MD_COMMENT_STREAM_W"; + case MD_HANDLE_DATA_STREAM: + return "MD_HANDLE_DATA_STREAM"; + case MD_FUNCTION_TABLE_STREAM: + return "MD_FUNCTION_TABLE_STREAM"; + case MD_UNLOADED_MODULE_LIST_STREAM: + return "MD_UNLOADED_MODULE_LIST_STREAM"; + case MD_MISC_INFO_STREAM: + return "MD_MISC_INFO_STREAM"; + case MD_MEMORY_INFO_LIST_STREAM: + return "MD_MEMORY_INFO_LIST_STREAM"; + case MD_THREAD_INFO_LIST_STREAM: + return "MD_THREAD_INFO_LIST_STREAM"; + case MD_HANDLE_OPERATION_LIST_STREAM: + return "MD_HANDLE_OPERATION_LIST_STREAM"; + case MD_LAST_RESERVED_STREAM: + return "MD_LAST_RESERVED_STREAM"; + case MD_BREAKPAD_INFO_STREAM: + return "MD_BREAKPAD_INFO_STREAM"; + case MD_ASSERTION_INFO_STREAM: + return "MD_ASSERTION_INFO_STREAM"; + case MD_LINUX_CPU_INFO: + return "MD_LINUX_CPU_INFO"; + case MD_LINUX_PROC_STATUS: + return "MD_LINUX_PROC_STATUS"; + case MD_LINUX_LSB_RELEASE: + return "MD_LINUX_LSB_RELEASE"; + case MD_LINUX_CMD_LINE: + return "MD_LINUX_CMD_LINE"; + case MD_LINUX_ENVIRON: + return "MD_LINUX_ENVIRON"; + case MD_LINUX_AUXV: + return "MD_LINUX_AUXV"; + case MD_LINUX_MAPS: + return "MD_LINUX_MAPS"; + case MD_LINUX_DSO_DEBUG: + return "MD_LINUX_DSO_DEBUG"; + default: + return "unknown"; + } +} + +void Minidump::Print() { + if (!valid_) { + BPLOG(ERROR) << "Minidump cannot print invalid data"; + return; + } + + printf("MDRawHeader\n"); + printf(" signature = 0x%x\n", header_.signature); + printf(" version = 0x%x\n", header_.version); + printf(" stream_count = %d\n", header_.stream_count); + printf(" stream_directory_rva = 0x%x\n", header_.stream_directory_rva); + printf(" checksum = 0x%x\n", header_.checksum); + printf(" time_date_stamp = 0x%x %s\n", + header_.time_date_stamp, + TimeTToUTCString(header_.time_date_stamp).c_str()); + printf(" flags = 0x%" PRIx64 "\n", header_.flags); + printf("\n"); + + for (unsigned int stream_index = 0; + stream_index < header_.stream_count; + ++stream_index) { + MDRawDirectory* directory_entry = &(*directory_)[stream_index]; + + printf("mDirectory[%d]\n", stream_index); + printf("MDRawDirectory\n"); + printf(" stream_type = 0x%x (%s)\n", directory_entry->stream_type, + get_stream_name(directory_entry->stream_type)); + printf(" location.data_size = %d\n", + directory_entry->location.data_size); + printf(" location.rva = 0x%x\n", directory_entry->location.rva); + printf("\n"); + } + + printf("Streams:\n"); + for (MinidumpStreamMap::const_iterator iterator = stream_map_->begin(); + iterator != stream_map_->end(); + ++iterator) { + uint32_t stream_type = iterator->first; + MinidumpStreamInfo info = iterator->second; + printf(" stream type 0x%x (%s) at index %d\n", stream_type, + get_stream_name(stream_type), + info.stream_index); + } + printf("\n"); +} + + +const MDRawDirectory* Minidump::GetDirectoryEntryAtIndex(unsigned int index) + const { + if (!valid_) { + BPLOG(ERROR) << "Invalid Minidump for GetDirectoryEntryAtIndex"; + return NULL; + } + + if (index >= header_.stream_count) { + BPLOG(ERROR) << "Minidump stream directory index out of range: " << + index << "/" << header_.stream_count; + return NULL; + } + + return &(*directory_)[index]; +} + + +bool Minidump::ReadBytes(void* bytes, size_t count) { + // Can't check valid_ because Read needs to call this method before + // validity can be determined. + if (!stream_) { + return false; + } + stream_->read(static_cast(bytes), count); + std::streamsize bytes_read = stream_->gcount(); + if (bytes_read == -1) { + string error_string; + int error_code = ErrnoString(&error_string); + BPLOG(ERROR) << "ReadBytes: error " << error_code << ": " << error_string; + return false; + } + + // Convert to size_t and check for data loss + size_t bytes_read_converted = static_cast(bytes_read); + if (static_cast(bytes_read_converted) != bytes_read) { + BPLOG(ERROR) << "ReadBytes: conversion data loss detected when converting " + << bytes_read << " to " << bytes_read_converted; + return false; + } + + if (bytes_read_converted != count) { + BPLOG(ERROR) << "ReadBytes: read " << bytes_read_converted << "/" << count; + return false; + } + + return true; +} + + +bool Minidump::SeekSet(off_t offset) { + // Can't check valid_ because Read needs to call this method before + // validity can be determined. + if (!stream_) { + return false; + } + stream_->seekg(offset, std::ios_base::beg); + if (!stream_->good()) { + string error_string; + int error_code = ErrnoString(&error_string); + BPLOG(ERROR) << "SeekSet: error " << error_code << ": " << error_string; + return false; + } + return true; +} + +off_t Minidump::Tell() { + if (!valid_ || !stream_) { + return (off_t)-1; + } + + // Check for conversion data loss + std::streamoff std_streamoff = stream_->tellg(); + off_t rv = static_cast(std_streamoff); + if (static_cast(rv) == std_streamoff) { + return rv; + } else { + BPLOG(ERROR) << "Data loss detected"; + return (off_t)-1; + } +} + + +string* Minidump::ReadString(off_t offset) { + if (!valid_) { + BPLOG(ERROR) << "Invalid Minidump for ReadString"; + return NULL; + } + if (!SeekSet(offset)) { + BPLOG(ERROR) << "ReadString could not seek to string at offset " << offset; + return NULL; + } + + uint32_t bytes; + if (!ReadBytes(&bytes, sizeof(bytes))) { + BPLOG(ERROR) << "ReadString could not read string size at offset " << + offset; + return NULL; + } + if (swap_) + Swap(&bytes); + + if (bytes % 2 != 0) { + BPLOG(ERROR) << "ReadString found odd-sized " << bytes << + "-byte string at offset " << offset; + return NULL; + } + unsigned int utf16_words = bytes / 2; + + if (utf16_words > max_string_length_) { + BPLOG(ERROR) << "ReadString string length " << utf16_words << + " exceeds maximum " << max_string_length_ << + " at offset " << offset; + return NULL; + } + + vector string_utf16(utf16_words); + + if (utf16_words) { + if (!ReadBytes(&string_utf16[0], bytes)) { + BPLOG(ERROR) << "ReadString could not read " << bytes << + "-byte string at offset " << offset; + return NULL; + } + } + + return UTF16ToUTF8(string_utf16, swap_); +} + + +bool Minidump::SeekToStreamType(uint32_t stream_type, + uint32_t* stream_length) { + BPLOG_IF(ERROR, !stream_length) << "Minidump::SeekToStreamType requires " + "|stream_length|"; + assert(stream_length); + *stream_length = 0; + + if (!valid_) { + BPLOG(ERROR) << "Invalid Mindump for SeekToStreamType"; + return false; + } + + MinidumpStreamMap::const_iterator iterator = stream_map_->find(stream_type); + if (iterator == stream_map_->end()) { + // This stream type didn't exist in the directory. + BPLOG(INFO) << "SeekToStreamType: type " << stream_type << " not present"; + return false; + } + + MinidumpStreamInfo info = iterator->second; + if (info.stream_index >= header_.stream_count) { + BPLOG(ERROR) << "SeekToStreamType: type " << stream_type << + " out of range: " << + info.stream_index << "/" << header_.stream_count; + return false; + } + + MDRawDirectory* directory_entry = &(*directory_)[info.stream_index]; + if (!SeekSet(directory_entry->location.rva)) { + BPLOG(ERROR) << "SeekToStreamType could not seek to stream type " << + stream_type; + return false; + } + + *stream_length = directory_entry->location.data_size; + + return true; +} + + +template +T* Minidump::GetStream(T** stream) { + // stream is a garbage parameter that's present only to account for C++'s + // inability to overload a method based solely on its return type. + + const uint32_t stream_type = T::kStreamType; + + BPLOG_IF(ERROR, !stream) << "Minidump::GetStream type " << stream_type << + " requires |stream|"; + assert(stream); + *stream = NULL; + + if (!valid_) { + BPLOG(ERROR) << "Invalid Minidump for GetStream type " << stream_type; + return NULL; + } + + MinidumpStreamMap::iterator iterator = stream_map_->find(stream_type); + if (iterator == stream_map_->end()) { + // This stream type didn't exist in the directory. + BPLOG(INFO) << "GetStream: type " << stream_type << " not present"; + return NULL; + } + + // Get a pointer so that the stored stream field can be altered. + MinidumpStreamInfo* info = &iterator->second; + + if (info->stream) { + // This cast is safe because info.stream is only populated by this + // method, and there is a direct correlation between T and stream_type. + *stream = static_cast(info->stream); + return *stream; + } + + uint32_t stream_length; + if (!SeekToStreamType(stream_type, &stream_length)) { + BPLOG(ERROR) << "GetStream could not seek to stream type " << stream_type; + return NULL; + } + + scoped_ptr new_stream(new T(this)); + + if (!new_stream->Read(stream_length)) { + BPLOG(ERROR) << "GetStream could not read stream type " << stream_type; + return NULL; + } + + *stream = new_stream.release(); + info->stream = *stream; + return *stream; +} + + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/minidump_dump.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/minidump_dump.cc new file mode 100644 index 0000000000..343f044288 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/minidump_dump.cc @@ -0,0 +1,213 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// minidump_dump.cc: Print the contents of a minidump file in somewhat +// readable text. +// +// Author: Mark Mentovai + +#include +#include + +#include "common/scoped_ptr.h" +#include "google_breakpad/processor/minidump.h" +#include "processor/logging.h" + +namespace { + +using google_breakpad::Minidump; +using google_breakpad::MinidumpThreadList; +using google_breakpad::MinidumpModuleList; +using google_breakpad::MinidumpMemoryInfoList; +using google_breakpad::MinidumpMemoryList; +using google_breakpad::MinidumpException; +using google_breakpad::MinidumpAssertion; +using google_breakpad::MinidumpSystemInfo; +using google_breakpad::MinidumpMiscInfo; +using google_breakpad::MinidumpBreakpadInfo; + +static void DumpRawStream(Minidump *minidump, + uint32_t stream_type, + const char *stream_name, + int *errors) { + uint32_t length = 0; + if (!minidump->SeekToStreamType(stream_type, &length)) { + return; + } + + printf("Stream %s:\n", stream_name); + + if (length == 0) { + printf("\n"); + return; + } + std::vector contents(length); + if (!minidump->ReadBytes(&contents[0], length)) { + ++*errors; + BPLOG(ERROR) << "minidump.ReadBytes failed"; + return; + } + size_t current_offset = 0; + while (current_offset < length) { + size_t remaining = length - current_offset; + // Printf requires an int and direct casting from size_t results + // in compatibility warnings. + uint32_t int_remaining = remaining; + printf("%.*s", int_remaining, &contents[current_offset]); + char *next_null = reinterpret_cast( + memchr(&contents[current_offset], 0, remaining)); + if (next_null == NULL) + break; + printf("\\0\n"); + size_t null_offset = next_null - &contents[0]; + current_offset = null_offset + 1; + } + printf("\n\n"); +} + +static bool PrintMinidumpDump(const char *minidump_file) { + Minidump minidump(minidump_file); + if (!minidump.Read()) { + BPLOG(ERROR) << "minidump.Read() failed"; + return false; + } + minidump.Print(); + + int errors = 0; + + MinidumpThreadList *thread_list = minidump.GetThreadList(); + if (!thread_list) { + ++errors; + BPLOG(ERROR) << "minidump.GetThreadList() failed"; + } else { + thread_list->Print(); + } + + MinidumpModuleList *module_list = minidump.GetModuleList(); + if (!module_list) { + ++errors; + BPLOG(ERROR) << "minidump.GetModuleList() failed"; + } else { + module_list->Print(); + } + + MinidumpMemoryList *memory_list = minidump.GetMemoryList(); + if (!memory_list) { + ++errors; + BPLOG(ERROR) << "minidump.GetMemoryList() failed"; + } else { + memory_list->Print(); + } + + MinidumpException *exception = minidump.GetException(); + if (!exception) { + BPLOG(INFO) << "minidump.GetException() failed"; + } else { + exception->Print(); + } + + MinidumpAssertion *assertion = minidump.GetAssertion(); + if (!assertion) { + BPLOG(INFO) << "minidump.GetAssertion() failed"; + } else { + assertion->Print(); + } + + MinidumpSystemInfo *system_info = minidump.GetSystemInfo(); + if (!system_info) { + ++errors; + BPLOG(ERROR) << "minidump.GetSystemInfo() failed"; + } else { + system_info->Print(); + } + + MinidumpMiscInfo *misc_info = minidump.GetMiscInfo(); + if (!misc_info) { + ++errors; + BPLOG(ERROR) << "minidump.GetMiscInfo() failed"; + } else { + misc_info->Print(); + } + + MinidumpBreakpadInfo *breakpad_info = minidump.GetBreakpadInfo(); + if (!breakpad_info) { + // Breakpad info is optional, so don't treat this as an error. + BPLOG(INFO) << "minidump.GetBreakpadInfo() failed"; + } else { + breakpad_info->Print(); + } + + MinidumpMemoryInfoList *memory_info_list = minidump.GetMemoryInfoList(); + if (!memory_info_list) { + ++errors; + BPLOG(ERROR) << "minidump.GetMemoryInfoList() failed"; + } else { + memory_info_list->Print(); + } + + DumpRawStream(&minidump, + MD_LINUX_CMD_LINE, + "MD_LINUX_CMD_LINE", + &errors); + DumpRawStream(&minidump, + MD_LINUX_ENVIRON, + "MD_LINUX_ENVIRON", + &errors); + DumpRawStream(&minidump, + MD_LINUX_LSB_RELEASE, + "MD_LINUX_LSB_RELEASE", + &errors); + DumpRawStream(&minidump, + MD_LINUX_PROC_STATUS, + "MD_LINUX_PROC_STATUS", + &errors); + DumpRawStream(&minidump, + MD_LINUX_CPU_INFO, + "MD_LINUX_CPU_INFO", + &errors); + DumpRawStream(&minidump, + MD_LINUX_MAPS, + "MD_LINUX_MAPS", + &errors); + + return errors == 0; +} + +} // namespace + +int main(int argc, char **argv) { + BPLOG_INIT(&argc, &argv); + + if (argc != 2) { + fprintf(stderr, "usage: %s \n", argv[0]); + return 1; + } + + return PrintMinidumpDump(argv[1]) ? 0 : 1; +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/minidump_dump_test b/TMessagesProj/jni/third_party/breakpad/src/processor/minidump_dump_test new file mode 100644 index 0000000000..fb62ace735 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/minidump_dump_test @@ -0,0 +1,36 @@ +#!/bin/sh + +# Copyright (c) 2006, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +testdata_dir=$srcdir/src/processor/testdata +./src/processor/minidump_dump $testdata_dir/minidump2.dmp | \ + tr -d '\015' | \ + diff -u $testdata_dir/minidump2.dump.out - +exit $? diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/minidump_processor.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/minidump_processor.cc new file mode 100644 index 0000000000..71dedaba75 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/minidump_processor.cc @@ -0,0 +1,1536 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#include "google_breakpad/processor/minidump_processor.h" + +#include +#include + +#include + +#include "common/scoped_ptr.h" +#include "common/using_std_string.h" +#include "google_breakpad/processor/call_stack.h" +#include "google_breakpad/processor/minidump.h" +#include "google_breakpad/processor/process_state.h" +#include "google_breakpad/processor/exploitability.h" +#include "google_breakpad/processor/stack_frame_symbolizer.h" +#include "processor/logging.h" +#include "processor/stackwalker_x86.h" +#include "processor/symbolic_constants_win.h" + +namespace google_breakpad { + +MinidumpProcessor::MinidumpProcessor(SymbolSupplier *supplier, + SourceLineResolverInterface *resolver) + : frame_symbolizer_(new StackFrameSymbolizer(supplier, resolver)), + own_frame_symbolizer_(true), + enable_exploitability_(false) { +} + +MinidumpProcessor::MinidumpProcessor(SymbolSupplier *supplier, + SourceLineResolverInterface *resolver, + bool enable_exploitability) + : frame_symbolizer_(new StackFrameSymbolizer(supplier, resolver)), + own_frame_symbolizer_(true), + enable_exploitability_(enable_exploitability) { +} + +MinidumpProcessor::MinidumpProcessor(StackFrameSymbolizer *frame_symbolizer, + bool enable_exploitability) + : frame_symbolizer_(frame_symbolizer), + own_frame_symbolizer_(false), + enable_exploitability_(enable_exploitability) { + assert(frame_symbolizer_); +} + +MinidumpProcessor::~MinidumpProcessor() { + if (own_frame_symbolizer_) delete frame_symbolizer_; +} + +ProcessResult MinidumpProcessor::Process( + Minidump *dump, ProcessState *process_state) { + assert(dump); + assert(process_state); + + process_state->Clear(); + + const MDRawHeader *header = dump->header(); + if (!header) { + BPLOG(ERROR) << "Minidump " << dump->path() << " has no header"; + return PROCESS_ERROR_NO_MINIDUMP_HEADER; + } + process_state->time_date_stamp_ = header->time_date_stamp; + + bool has_process_create_time = + GetProcessCreateTime(dump, &process_state->process_create_time_); + + bool has_cpu_info = GetCPUInfo(dump, &process_state->system_info_); + bool has_os_info = GetOSInfo(dump, &process_state->system_info_); + + uint32_t dump_thread_id = 0; + bool has_dump_thread = false; + uint32_t requesting_thread_id = 0; + bool has_requesting_thread = false; + + MinidumpBreakpadInfo *breakpad_info = dump->GetBreakpadInfo(); + if (breakpad_info) { + has_dump_thread = breakpad_info->GetDumpThreadID(&dump_thread_id); + has_requesting_thread = + breakpad_info->GetRequestingThreadID(&requesting_thread_id); + } + + MinidumpException *exception = dump->GetException(); + if (exception) { + process_state->crashed_ = true; + has_requesting_thread = exception->GetThreadID(&requesting_thread_id); + + process_state->crash_reason_ = GetCrashReason( + dump, &process_state->crash_address_); + } + + // This will just return an empty string if it doesn't exist. + process_state->assertion_ = GetAssertion(dump); + + MinidumpModuleList *module_list = dump->GetModuleList(); + + // Put a copy of the module list into ProcessState object. This is not + // necessarily a MinidumpModuleList, but it adheres to the CodeModules + // interface, which is all that ProcessState needs to expose. + if (module_list) + process_state->modules_ = module_list->Copy(); + + MinidumpMemoryList *memory_list = dump->GetMemoryList(); + if (memory_list) { + BPLOG(INFO) << "Found " << memory_list->region_count() + << " memory regions."; + } + + MinidumpThreadList *threads = dump->GetThreadList(); + if (!threads) { + BPLOG(ERROR) << "Minidump " << dump->path() << " has no thread list"; + return PROCESS_ERROR_NO_THREAD_LIST; + } + + BPLOG(INFO) << "Minidump " << dump->path() << " has " << + (has_cpu_info ? "" : "no ") << "CPU info, " << + (has_os_info ? "" : "no ") << "OS info, " << + (breakpad_info != NULL ? "" : "no ") << "Breakpad info, " << + (exception != NULL ? "" : "no ") << "exception, " << + (module_list != NULL ? "" : "no ") << "module list, " << + (threads != NULL ? "" : "no ") << "thread list, " << + (has_dump_thread ? "" : "no ") << "dump thread, " << + (has_requesting_thread ? "" : "no ") << "requesting thread, and " << + (has_process_create_time ? "" : "no ") << "process create time"; + + bool interrupted = false; + bool found_requesting_thread = false; + unsigned int thread_count = threads->thread_count(); + + // Reset frame_symbolizer_ at the beginning of stackwalk for each minidump. + frame_symbolizer_->Reset(); + + for (unsigned int thread_index = 0; + thread_index < thread_count; + ++thread_index) { + char thread_string_buffer[64]; + snprintf(thread_string_buffer, sizeof(thread_string_buffer), "%d/%d", + thread_index, thread_count); + string thread_string = dump->path() + ":" + thread_string_buffer; + + MinidumpThread *thread = threads->GetThreadAtIndex(thread_index); + if (!thread) { + BPLOG(ERROR) << "Could not get thread for " << thread_string; + return PROCESS_ERROR_GETTING_THREAD; + } + + uint32_t thread_id; + if (!thread->GetThreadID(&thread_id)) { + BPLOG(ERROR) << "Could not get thread ID for " << thread_string; + return PROCESS_ERROR_GETTING_THREAD_ID; + } + + thread_string += " id " + HexString(thread_id); + BPLOG(INFO) << "Looking at thread " << thread_string; + + // If this thread is the thread that produced the minidump, don't process + // it. Because of the problems associated with a thread producing a + // dump of itself (when both its context and its stack are in flux), + // processing that stack wouldn't provide much useful data. + if (has_dump_thread && thread_id == dump_thread_id) { + continue; + } + + MinidumpContext *context = thread->GetContext(); + + if (has_requesting_thread && thread_id == requesting_thread_id) { + if (found_requesting_thread) { + // There can't be more than one requesting thread. + BPLOG(ERROR) << "Duplicate requesting thread: " << thread_string; + return PROCESS_ERROR_DUPLICATE_REQUESTING_THREADS; + } + + // Use processed_state->threads_.size() instead of thread_index. + // thread_index points to the thread index in the minidump, which + // might be greater than the thread index in the threads vector if + // any of the minidump's threads are skipped and not placed into the + // processed threads vector. The thread vector's current size will + // be the index of the current thread when it's pushed into the + // vector. + process_state->requesting_thread_ = process_state->threads_.size(); + + found_requesting_thread = true; + + if (process_state->crashed_) { + // Use the exception record's context for the crashed thread, instead + // of the thread's own context. For the crashed thread, the thread's + // own context is the state inside the exception handler. Using it + // would not result in the expected stack trace from the time of the + // crash. If the exception context is invalid, however, we fall back + // on the thread context. + MinidumpContext *ctx = exception->GetContext(); + context = ctx ? ctx : thread->GetContext(); + } + } + + // If the memory region for the stack cannot be read using the RVA stored + // in the memory descriptor inside MINIDUMP_THREAD, try to locate and use + // a memory region (containing the stack) from the minidump memory list. + MinidumpMemoryRegion *thread_memory = thread->GetMemory(); + if (!thread_memory && memory_list) { + uint64_t start_stack_memory_range = thread->GetStartOfStackMemoryRange(); + if (start_stack_memory_range) { + thread_memory = memory_list->GetMemoryRegionForAddress( + start_stack_memory_range); + } + } + if (!thread_memory) { + BPLOG(ERROR) << "No memory region for " << thread_string; + } + + // Use process_state->modules_ instead of module_list, because the + // |modules| argument will be used to populate the |module| fields in + // the returned StackFrame objects, which will be placed into the + // returned ProcessState object. module_list's lifetime is only as + // long as the Minidump object: it will be deleted when this function + // returns. process_state->modules_ is owned by the ProcessState object + // (just like the StackFrame objects), and is much more suitable for this + // task. + scoped_ptr stackwalker( + Stackwalker::StackwalkerForCPU(process_state->system_info(), + context, + thread_memory, + process_state->modules_, + frame_symbolizer_)); + + scoped_ptr stack(new CallStack()); + if (stackwalker.get()) { + if (!stackwalker->Walk(stack.get(), + &process_state->modules_without_symbols_, + &process_state->modules_with_corrupt_symbols_)) { + BPLOG(INFO) << "Stackwalker interrupt (missing symbols?) at " + << thread_string; + interrupted = true; + } + } else { + // Threads with missing CPU contexts will hit this, but + // don't abort processing the rest of the dump just for + // one bad thread. + BPLOG(ERROR) << "No stackwalker for " << thread_string; + } + process_state->threads_.push_back(stack.release()); + process_state->thread_memory_regions_.push_back(thread_memory); + } + + if (interrupted) { + BPLOG(INFO) << "Processing interrupted for " << dump->path(); + return PROCESS_SYMBOL_SUPPLIER_INTERRUPTED; + } + + // If a requesting thread was indicated, it must be present. + if (has_requesting_thread && !found_requesting_thread) { + // Don't mark as an error, but invalidate the requesting thread + BPLOG(ERROR) << "Minidump indicated requesting thread " << + HexString(requesting_thread_id) << ", not found in " << + dump->path(); + process_state->requesting_thread_ = -1; + } + + // Exploitability defaults to EXPLOITABILITY_NOT_ANALYZED + process_state->exploitability_ = EXPLOITABILITY_NOT_ANALYZED; + + // If an exploitability run was requested we perform the platform specific + // rating. + if (enable_exploitability_) { + scoped_ptr exploitability( + Exploitability::ExploitabilityForPlatform(dump, process_state)); + // The engine will be null if the platform is not supported + if (exploitability != NULL) { + process_state->exploitability_ = exploitability->CheckExploitability(); + } else { + process_state->exploitability_ = EXPLOITABILITY_ERR_NOENGINE; + } + } + + BPLOG(INFO) << "Processed " << dump->path(); + return PROCESS_OK; +} + +ProcessResult MinidumpProcessor::Process( + const string &minidump_file, ProcessState *process_state) { + BPLOG(INFO) << "Processing minidump in file " << minidump_file; + + Minidump dump(minidump_file); + if (!dump.Read()) { + BPLOG(ERROR) << "Minidump " << dump.path() << " could not be read"; + return PROCESS_ERROR_MINIDUMP_NOT_FOUND; + } + + return Process(&dump, process_state); +} + +// Returns the MDRawSystemInfo from a minidump, or NULL if system info is +// not available from the minidump. If system_info is non-NULL, it is used +// to pass back the MinidumpSystemInfo object. +static const MDRawSystemInfo* GetSystemInfo(Minidump *dump, + MinidumpSystemInfo **system_info) { + MinidumpSystemInfo *minidump_system_info = dump->GetSystemInfo(); + if (!minidump_system_info) + return NULL; + + if (system_info) + *system_info = minidump_system_info; + + return minidump_system_info->system_info(); +} + +// Extract CPU info string from ARM-specific MDRawSystemInfo structure. +// raw_info: pointer to source MDRawSystemInfo. +// cpu_info: address of target string, cpu info text will be appended to it. +static void GetARMCpuInfo(const MDRawSystemInfo* raw_info, + string* cpu_info) { + assert(raw_info != NULL && cpu_info != NULL); + + // Write ARM architecture version. + char cpu_string[32]; + snprintf(cpu_string, sizeof(cpu_string), "ARMv%d", + raw_info->processor_level); + cpu_info->append(cpu_string); + + // There is no good list of implementer id values, but the following + // pages provide some help: + // http://comments.gmane.org/gmane.linux.linaro.devel/6903 + // http://forum.xda-developers.com/archive/index.php/t-480226.html + const struct { + uint32_t id; + const char* name; + } vendors[] = { + { 0x41, "ARM" }, + { 0x51, "Qualcomm" }, + { 0x56, "Marvell" }, + { 0x69, "Intel/Marvell" }, + }; + const struct { + uint32_t id; + const char* name; + } parts[] = { + { 0x4100c050, "Cortex-A5" }, + { 0x4100c080, "Cortex-A8" }, + { 0x4100c090, "Cortex-A9" }, + { 0x4100c0f0, "Cortex-A15" }, + { 0x4100c140, "Cortex-R4" }, + { 0x4100c150, "Cortex-R5" }, + { 0x4100b360, "ARM1136" }, + { 0x4100b560, "ARM1156" }, + { 0x4100b760, "ARM1176" }, + { 0x4100b020, "ARM11-MPCore" }, + { 0x41009260, "ARM926" }, + { 0x41009460, "ARM946" }, + { 0x41009660, "ARM966" }, + { 0x510006f0, "Krait" }, + { 0x510000f0, "Scorpion" }, + }; + + const struct { + uint32_t hwcap; + const char* name; + } features[] = { + { MD_CPU_ARM_ELF_HWCAP_SWP, "swp" }, + { MD_CPU_ARM_ELF_HWCAP_HALF, "half" }, + { MD_CPU_ARM_ELF_HWCAP_THUMB, "thumb" }, + { MD_CPU_ARM_ELF_HWCAP_26BIT, "26bit" }, + { MD_CPU_ARM_ELF_HWCAP_FAST_MULT, "fastmult" }, + { MD_CPU_ARM_ELF_HWCAP_FPA, "fpa" }, + { MD_CPU_ARM_ELF_HWCAP_VFP, "vfpv2" }, + { MD_CPU_ARM_ELF_HWCAP_EDSP, "edsp" }, + { MD_CPU_ARM_ELF_HWCAP_JAVA, "java" }, + { MD_CPU_ARM_ELF_HWCAP_IWMMXT, "iwmmxt" }, + { MD_CPU_ARM_ELF_HWCAP_CRUNCH, "crunch" }, + { MD_CPU_ARM_ELF_HWCAP_THUMBEE, "thumbee" }, + { MD_CPU_ARM_ELF_HWCAP_NEON, "neon" }, + { MD_CPU_ARM_ELF_HWCAP_VFPv3, "vfpv3" }, + { MD_CPU_ARM_ELF_HWCAP_VFPv3D16, "vfpv3d16" }, + { MD_CPU_ARM_ELF_HWCAP_TLS, "tls" }, + { MD_CPU_ARM_ELF_HWCAP_VFPv4, "vfpv4" }, + { MD_CPU_ARM_ELF_HWCAP_IDIVA, "idiva" }, + { MD_CPU_ARM_ELF_HWCAP_IDIVT, "idivt" }, + }; + + uint32_t cpuid = raw_info->cpu.arm_cpu_info.cpuid; + if (cpuid != 0) { + // Extract vendor name from CPUID + const char* vendor = NULL; + uint32_t vendor_id = (cpuid >> 24) & 0xff; + for (size_t i = 0; i < sizeof(vendors)/sizeof(vendors[0]); ++i) { + if (vendors[i].id == vendor_id) { + vendor = vendors[i].name; + break; + } + } + cpu_info->append(" "); + if (vendor) { + cpu_info->append(vendor); + } else { + snprintf(cpu_string, sizeof(cpu_string), "vendor(0x%x)", vendor_id); + cpu_info->append(cpu_string); + } + + // Extract part name from CPUID + uint32_t part_id = (cpuid & 0xff00fff0); + const char* part = NULL; + for (size_t i = 0; i < sizeof(parts)/sizeof(parts[0]); ++i) { + if (parts[i].id == part_id) { + part = parts[i].name; + break; + } + } + cpu_info->append(" "); + if (part != NULL) { + cpu_info->append(part); + } else { + snprintf(cpu_string, sizeof(cpu_string), "part(0x%x)", part_id); + cpu_info->append(cpu_string); + } + } + uint32_t elf_hwcaps = raw_info->cpu.arm_cpu_info.elf_hwcaps; + if (elf_hwcaps != 0) { + cpu_info->append(" features: "); + const char* comma = ""; + for (size_t i = 0; i < sizeof(features)/sizeof(features[0]); ++i) { + if (elf_hwcaps & features[i].hwcap) { + cpu_info->append(comma); + cpu_info->append(features[i].name); + comma = ","; + } + } + } +} + +// static +bool MinidumpProcessor::GetCPUInfo(Minidump *dump, SystemInfo *info) { + assert(dump); + assert(info); + + info->cpu.clear(); + info->cpu_info.clear(); + + MinidumpSystemInfo *system_info; + const MDRawSystemInfo *raw_system_info = GetSystemInfo(dump, &system_info); + if (!raw_system_info) + return false; + + switch (raw_system_info->processor_architecture) { + case MD_CPU_ARCHITECTURE_X86: + case MD_CPU_ARCHITECTURE_AMD64: { + if (raw_system_info->processor_architecture == + MD_CPU_ARCHITECTURE_X86) + info->cpu = "x86"; + else + info->cpu = "amd64"; + + const string *cpu_vendor = system_info->GetCPUVendor(); + if (cpu_vendor) { + info->cpu_info = *cpu_vendor; + info->cpu_info.append(" "); + } + + char x86_info[36]; + snprintf(x86_info, sizeof(x86_info), "family %u model %u stepping %u", + raw_system_info->processor_level, + raw_system_info->processor_revision >> 8, + raw_system_info->processor_revision & 0xff); + info->cpu_info.append(x86_info); + break; + } + + case MD_CPU_ARCHITECTURE_PPC: { + info->cpu = "ppc"; + break; + } + + case MD_CPU_ARCHITECTURE_PPC64: { + info->cpu = "ppc64"; + break; + } + + case MD_CPU_ARCHITECTURE_SPARC: { + info->cpu = "sparc"; + break; + } + + case MD_CPU_ARCHITECTURE_ARM: { + info->cpu = "arm"; + GetARMCpuInfo(raw_system_info, &info->cpu_info); + break; + } + + case MD_CPU_ARCHITECTURE_ARM64: { + info->cpu = "arm64"; + break; + } + + case MD_CPU_ARCHITECTURE_MIPS: { + info->cpu = "mips"; + break; + } + + default: { + // Assign the numeric architecture ID into the CPU string. + char cpu_string[7]; + snprintf(cpu_string, sizeof(cpu_string), "0x%04x", + raw_system_info->processor_architecture); + info->cpu = cpu_string; + break; + } + } + + info->cpu_count = raw_system_info->number_of_processors; + + return true; +} + +// static +bool MinidumpProcessor::GetOSInfo(Minidump *dump, SystemInfo *info) { + assert(dump); + assert(info); + + info->os.clear(); + info->os_short.clear(); + info->os_version.clear(); + + MinidumpSystemInfo *system_info; + const MDRawSystemInfo *raw_system_info = GetSystemInfo(dump, &system_info); + if (!raw_system_info) + return false; + + info->os_short = system_info->GetOS(); + + switch (raw_system_info->platform_id) { + case MD_OS_WIN32_NT: { + info->os = "Windows NT"; + break; + } + + case MD_OS_WIN32_WINDOWS: { + info->os = "Windows"; + break; + } + + case MD_OS_MAC_OS_X: { + info->os = "Mac OS X"; + break; + } + + case MD_OS_IOS: { + info->os = "iOS"; + break; + } + + case MD_OS_LINUX: { + info->os = "Linux"; + break; + } + + case MD_OS_SOLARIS: { + info->os = "Solaris"; + break; + } + + case MD_OS_ANDROID: { + info->os = "Android"; + break; + } + + case MD_OS_PS3: { + info->os = "PS3"; + break; + } + + case MD_OS_NACL: { + info->os = "NaCl"; + break; + } + + default: { + // Assign the numeric platform ID into the OS string. + char os_string[11]; + snprintf(os_string, sizeof(os_string), "0x%08x", + raw_system_info->platform_id); + info->os = os_string; + break; + } + } + + char os_version_string[33]; + snprintf(os_version_string, sizeof(os_version_string), "%u.%u.%u", + raw_system_info->major_version, + raw_system_info->minor_version, + raw_system_info->build_number); + info->os_version = os_version_string; + + const string *csd_version = system_info->GetCSDVersion(); + if (csd_version) { + info->os_version.append(" "); + info->os_version.append(*csd_version); + } + + return true; +} + +// static +bool MinidumpProcessor::GetProcessCreateTime(Minidump* dump, + uint32_t* process_create_time) { + assert(dump); + assert(process_create_time); + + *process_create_time = 0; + + MinidumpMiscInfo* minidump_misc_info = dump->GetMiscInfo(); + if (!minidump_misc_info) { + return false; + } + + const MDRawMiscInfo* md_raw_misc_info = minidump_misc_info->misc_info(); + if (!md_raw_misc_info) { + return false; + } + + if (!(md_raw_misc_info->flags1 & MD_MISCINFO_FLAGS1_PROCESS_TIMES)) { + return false; + } + + *process_create_time = md_raw_misc_info->process_create_time; + return true; +} + +// static +string MinidumpProcessor::GetCrashReason(Minidump *dump, uint64_t *address) { + MinidumpException *exception = dump->GetException(); + if (!exception) + return ""; + + const MDRawExceptionStream *raw_exception = exception->exception(); + if (!raw_exception) + return ""; + + if (address) + *address = raw_exception->exception_record.exception_address; + + // The reason value is OS-specific and possibly CPU-specific. Set up + // sensible numeric defaults for the reason string in case we can't + // map the codes to a string (because there's no system info, or because + // it's an unrecognized platform, or because it's an unrecognized code.) + char reason_string[24]; + uint32_t exception_code = raw_exception->exception_record.exception_code; + uint32_t exception_flags = raw_exception->exception_record.exception_flags; + snprintf(reason_string, sizeof(reason_string), "0x%08x / 0x%08x", + exception_code, exception_flags); + string reason = reason_string; + + const MDRawSystemInfo *raw_system_info = GetSystemInfo(dump, NULL); + if (!raw_system_info) + return reason; + + switch (raw_system_info->platform_id) { + case MD_OS_MAC_OS_X: + case MD_OS_IOS: { + char flags_string[11]; + snprintf(flags_string, sizeof(flags_string), "0x%08x", exception_flags); + switch (exception_code) { + case MD_EXCEPTION_MAC_BAD_ACCESS: + reason = "EXC_BAD_ACCESS / "; + switch (exception_flags) { + case MD_EXCEPTION_CODE_MAC_INVALID_ADDRESS: + reason.append("KERN_INVALID_ADDRESS"); + break; + case MD_EXCEPTION_CODE_MAC_PROTECTION_FAILURE: + reason.append("KERN_PROTECTION_FAILURE"); + break; + case MD_EXCEPTION_CODE_MAC_NO_ACCESS: + reason.append("KERN_NO_ACCESS"); + break; + case MD_EXCEPTION_CODE_MAC_MEMORY_FAILURE: + reason.append("KERN_MEMORY_FAILURE"); + break; + case MD_EXCEPTION_CODE_MAC_MEMORY_ERROR: + reason.append("KERN_MEMORY_ERROR"); + break; + default: + // arm and ppc overlap + if (raw_system_info->processor_architecture == + MD_CPU_ARCHITECTURE_ARM || + raw_system_info->processor_architecture == + MD_CPU_ARCHITECTURE_ARM64) { + switch (exception_flags) { + case MD_EXCEPTION_CODE_MAC_ARM_DA_ALIGN: + reason.append("EXC_ARM_DA_ALIGN"); + break; + case MD_EXCEPTION_CODE_MAC_ARM_DA_DEBUG: + reason.append("EXC_ARM_DA_DEBUG"); + break; + default: + reason.append(flags_string); + BPLOG(INFO) << "Unknown exception reason " << reason; + break; + } + } else if (raw_system_info->processor_architecture == + MD_CPU_ARCHITECTURE_PPC) { + switch (exception_flags) { + case MD_EXCEPTION_CODE_MAC_PPC_VM_PROT_READ: + reason.append("EXC_PPC_VM_PROT_READ"); + break; + case MD_EXCEPTION_CODE_MAC_PPC_BADSPACE: + reason.append("EXC_PPC_BADSPACE"); + break; + case MD_EXCEPTION_CODE_MAC_PPC_UNALIGNED: + reason.append("EXC_PPC_UNALIGNED"); + break; + default: + reason.append(flags_string); + BPLOG(INFO) << "Unknown exception reason " << reason; + break; + } + } else { + reason.append(flags_string); + BPLOG(INFO) << "Unknown exception reason " << reason; + } + break; + } + break; + case MD_EXCEPTION_MAC_BAD_INSTRUCTION: + reason = "EXC_BAD_INSTRUCTION / "; + switch (raw_system_info->processor_architecture) { + case MD_CPU_ARCHITECTURE_ARM: + case MD_CPU_ARCHITECTURE_ARM64: { + switch (exception_flags) { + case MD_EXCEPTION_CODE_MAC_ARM_UNDEFINED: + reason.append("EXC_ARM_UNDEFINED"); + break; + default: + reason.append(flags_string); + BPLOG(INFO) << "Unknown exception reason " << reason; + break; + } + break; + } + case MD_CPU_ARCHITECTURE_PPC: { + switch (exception_flags) { + case MD_EXCEPTION_CODE_MAC_PPC_INVALID_SYSCALL: + reason.append("EXC_PPC_INVALID_SYSCALL"); + break; + case MD_EXCEPTION_CODE_MAC_PPC_UNIMPLEMENTED_INSTRUCTION: + reason.append("EXC_PPC_UNIPL_INST"); + break; + case MD_EXCEPTION_CODE_MAC_PPC_PRIVILEGED_INSTRUCTION: + reason.append("EXC_PPC_PRIVINST"); + break; + case MD_EXCEPTION_CODE_MAC_PPC_PRIVILEGED_REGISTER: + reason.append("EXC_PPC_PRIVREG"); + break; + case MD_EXCEPTION_CODE_MAC_PPC_TRACE: + reason.append("EXC_PPC_TRACE"); + break; + case MD_EXCEPTION_CODE_MAC_PPC_PERFORMANCE_MONITOR: + reason.append("EXC_PPC_PERFMON"); + break; + default: + reason.append(flags_string); + BPLOG(INFO) << "Unknown exception reason " << reason; + break; + } + break; + } + case MD_CPU_ARCHITECTURE_X86: { + switch (exception_flags) { + case MD_EXCEPTION_CODE_MAC_X86_INVALID_OPERATION: + reason.append("EXC_I386_INVOP"); + break; + case MD_EXCEPTION_CODE_MAC_X86_INVALID_TASK_STATE_SEGMENT: + reason.append("EXC_INVTSSFLT"); + break; + case MD_EXCEPTION_CODE_MAC_X86_SEGMENT_NOT_PRESENT: + reason.append("EXC_SEGNPFLT"); + break; + case MD_EXCEPTION_CODE_MAC_X86_STACK_FAULT: + reason.append("EXC_STKFLT"); + break; + case MD_EXCEPTION_CODE_MAC_X86_GENERAL_PROTECTION_FAULT: + reason.append("EXC_GPFLT"); + break; + case MD_EXCEPTION_CODE_MAC_X86_ALIGNMENT_FAULT: + reason.append("EXC_ALIGNFLT"); + break; + default: + reason.append(flags_string); + BPLOG(INFO) << "Unknown exception reason " << reason; + break; + } + break; + } + default: + reason.append(flags_string); + BPLOG(INFO) << "Unknown exception reason " << reason; + break; + } + break; + case MD_EXCEPTION_MAC_ARITHMETIC: + reason = "EXC_ARITHMETIC / "; + switch (raw_system_info->processor_architecture) { + case MD_CPU_ARCHITECTURE_PPC: { + switch (exception_flags) { + case MD_EXCEPTION_CODE_MAC_PPC_OVERFLOW: + reason.append("EXC_PPC_OVERFLOW"); + break; + case MD_EXCEPTION_CODE_MAC_PPC_ZERO_DIVIDE: + reason.append("EXC_PPC_ZERO_DIVIDE"); + break; + case MD_EXCEPTION_CODE_MAC_PPC_FLOAT_INEXACT: + reason.append("EXC_FLT_INEXACT"); + break; + case MD_EXCEPTION_CODE_MAC_PPC_FLOAT_ZERO_DIVIDE: + reason.append("EXC_PPC_FLT_ZERO_DIVIDE"); + break; + case MD_EXCEPTION_CODE_MAC_PPC_FLOAT_UNDERFLOW: + reason.append("EXC_PPC_FLT_UNDERFLOW"); + break; + case MD_EXCEPTION_CODE_MAC_PPC_FLOAT_OVERFLOW: + reason.append("EXC_PPC_FLT_OVERFLOW"); + break; + case MD_EXCEPTION_CODE_MAC_PPC_FLOAT_NOT_A_NUMBER: + reason.append("EXC_PPC_FLT_NOT_A_NUMBER"); + break; + case MD_EXCEPTION_CODE_MAC_PPC_NO_EMULATION: + reason.append("EXC_PPC_NOEMULATION"); + break; + case MD_EXCEPTION_CODE_MAC_PPC_ALTIVEC_ASSIST: + reason.append("EXC_PPC_ALTIVECASSIST"); + default: + reason.append(flags_string); + BPLOG(INFO) << "Unknown exception reason " << reason; + break; + } + break; + } + case MD_CPU_ARCHITECTURE_X86: { + switch (exception_flags) { + case MD_EXCEPTION_CODE_MAC_X86_DIV: + reason.append("EXC_I386_DIV"); + break; + case MD_EXCEPTION_CODE_MAC_X86_INTO: + reason.append("EXC_I386_INTO"); + break; + case MD_EXCEPTION_CODE_MAC_X86_NOEXT: + reason.append("EXC_I386_NOEXT"); + break; + case MD_EXCEPTION_CODE_MAC_X86_EXTOVR: + reason.append("EXC_I386_EXTOVR"); + break; + case MD_EXCEPTION_CODE_MAC_X86_EXTERR: + reason.append("EXC_I386_EXTERR"); + break; + case MD_EXCEPTION_CODE_MAC_X86_EMERR: + reason.append("EXC_I386_EMERR"); + break; + case MD_EXCEPTION_CODE_MAC_X86_BOUND: + reason.append("EXC_I386_BOUND"); + break; + case MD_EXCEPTION_CODE_MAC_X86_SSEEXTERR: + reason.append("EXC_I386_SSEEXTERR"); + break; + default: + reason.append(flags_string); + BPLOG(INFO) << "Unknown exception reason " << reason; + break; + } + break; + } + default: + reason.append(flags_string); + BPLOG(INFO) << "Unknown exception reason " << reason; + break; + } + break; + case MD_EXCEPTION_MAC_EMULATION: + reason = "EXC_EMULATION / "; + reason.append(flags_string); + break; + case MD_EXCEPTION_MAC_SOFTWARE: + reason = "EXC_SOFTWARE / "; + switch (exception_flags) { + case MD_EXCEPTION_CODE_MAC_ABORT: + reason.append("SIGABRT"); + break; + case MD_EXCEPTION_CODE_MAC_NS_EXCEPTION: + reason.append("UNCAUGHT_NS_EXCEPTION"); + break; + // These are ppc only but shouldn't be a problem as they're + // unused on x86 + case MD_EXCEPTION_CODE_MAC_PPC_TRAP: + reason.append("EXC_PPC_TRAP"); + break; + case MD_EXCEPTION_CODE_MAC_PPC_MIGRATE: + reason.append("EXC_PPC_MIGRATE"); + break; + default: + reason.append(flags_string); + BPLOG(INFO) << "Unknown exception reason " << reason; + break; + } + break; + case MD_EXCEPTION_MAC_BREAKPOINT: + reason = "EXC_BREAKPOINT / "; + switch (raw_system_info->processor_architecture) { + case MD_CPU_ARCHITECTURE_ARM: + case MD_CPU_ARCHITECTURE_ARM64: { + switch (exception_flags) { + case MD_EXCEPTION_CODE_MAC_ARM_DA_ALIGN: + reason.append("EXC_ARM_DA_ALIGN"); + break; + case MD_EXCEPTION_CODE_MAC_ARM_DA_DEBUG: + reason.append("EXC_ARM_DA_DEBUG"); + break; + case MD_EXCEPTION_CODE_MAC_ARM_BREAKPOINT: + reason.append("EXC_ARM_BREAKPOINT"); + break; + default: + reason.append(flags_string); + BPLOG(INFO) << "Unknown exception reason " << reason; + break; + } + break; + } + case MD_CPU_ARCHITECTURE_PPC: { + switch (exception_flags) { + case MD_EXCEPTION_CODE_MAC_PPC_BREAKPOINT: + reason.append("EXC_PPC_BREAKPOINT"); + break; + default: + reason.append(flags_string); + BPLOG(INFO) << "Unknown exception reason " << reason; + break; + } + break; + } + case MD_CPU_ARCHITECTURE_X86: { + switch (exception_flags) { + case MD_EXCEPTION_CODE_MAC_X86_SGL: + reason.append("EXC_I386_SGL"); + break; + case MD_EXCEPTION_CODE_MAC_X86_BPT: + reason.append("EXC_I386_BPT"); + break; + default: + reason.append(flags_string); + BPLOG(INFO) << "Unknown exception reason " << reason; + break; + } + break; + } + default: + reason.append(flags_string); + BPLOG(INFO) << "Unknown exception reason " << reason; + break; + } + break; + case MD_EXCEPTION_MAC_SYSCALL: + reason = "EXC_SYSCALL / "; + reason.append(flags_string); + break; + case MD_EXCEPTION_MAC_MACH_SYSCALL: + reason = "EXC_MACH_SYSCALL / "; + reason.append(flags_string); + break; + case MD_EXCEPTION_MAC_RPC_ALERT: + reason = "EXC_RPC_ALERT / "; + reason.append(flags_string); + break; + } + break; + } + + case MD_OS_WIN32_NT: + case MD_OS_WIN32_WINDOWS: { + switch (exception_code) { + case MD_EXCEPTION_CODE_WIN_CONTROL_C: + reason = "DBG_CONTROL_C"; + break; + case MD_EXCEPTION_CODE_WIN_GUARD_PAGE_VIOLATION: + reason = "EXCEPTION_GUARD_PAGE"; + break; + case MD_EXCEPTION_CODE_WIN_DATATYPE_MISALIGNMENT: + reason = "EXCEPTION_DATATYPE_MISALIGNMENT"; + break; + case MD_EXCEPTION_CODE_WIN_BREAKPOINT: + reason = "EXCEPTION_BREAKPOINT"; + break; + case MD_EXCEPTION_CODE_WIN_SINGLE_STEP: + reason = "EXCEPTION_SINGLE_STEP"; + break; + case MD_EXCEPTION_CODE_WIN_ACCESS_VIOLATION: + // For EXCEPTION_ACCESS_VIOLATION, Windows puts the address that + // caused the fault in exception_information[1]. + // exception_information[0] is 0 if the violation was caused by + // an attempt to read data, 1 if it was an attempt to write data, + // and 8 if this was a data execution violation. + // This information is useful in addition to the code address, which + // will be present in the crash thread's instruction field anyway. + if (raw_exception->exception_record.number_parameters >= 1) { + MDAccessViolationTypeWin av_type = + static_cast + (raw_exception->exception_record.exception_information[0]); + switch (av_type) { + case MD_ACCESS_VIOLATION_WIN_READ: + reason = "EXCEPTION_ACCESS_VIOLATION_READ"; + break; + case MD_ACCESS_VIOLATION_WIN_WRITE: + reason = "EXCEPTION_ACCESS_VIOLATION_WRITE"; + break; + case MD_ACCESS_VIOLATION_WIN_EXEC: + reason = "EXCEPTION_ACCESS_VIOLATION_EXEC"; + break; + default: + reason = "EXCEPTION_ACCESS_VIOLATION"; + break; + } + } else { + reason = "EXCEPTION_ACCESS_VIOLATION"; + } + if (address && + raw_exception->exception_record.number_parameters >= 2) { + *address = + raw_exception->exception_record.exception_information[1]; + } + break; + case MD_EXCEPTION_CODE_WIN_IN_PAGE_ERROR: + // For EXCEPTION_IN_PAGE_ERROR, Windows puts the address that + // caused the fault in exception_information[1]. + // exception_information[0] is 0 if the violation was caused by + // an attempt to read data, 1 if it was an attempt to write data, + // and 8 if this was a data execution violation. + // exception_information[2] contains the underlying NTSTATUS code, + // which is the explanation for why this error occured. + // This information is useful in addition to the code address, which + // will be present in the crash thread's instruction field anyway. + if (raw_exception->exception_record.number_parameters >= 1) { + MDInPageErrorTypeWin av_type = + static_cast + (raw_exception->exception_record.exception_information[0]); + switch (av_type) { + case MD_IN_PAGE_ERROR_WIN_READ: + reason = "EXCEPTION_IN_PAGE_ERROR_READ"; + break; + case MD_IN_PAGE_ERROR_WIN_WRITE: + reason = "EXCEPTION_IN_PAGE_ERROR_WRITE"; + break; + case MD_IN_PAGE_ERROR_WIN_EXEC: + reason = "EXCEPTION_IN_PAGE_ERROR_EXEC"; + break; + default: + reason = "EXCEPTION_IN_PAGE_ERROR"; + break; + } + } else { + reason = "EXCEPTION_IN_PAGE_ERROR"; + } + if (address && + raw_exception->exception_record.number_parameters >= 2) { + *address = + raw_exception->exception_record.exception_information[1]; + } + if (raw_exception->exception_record.number_parameters >= 3) { + uint32_t ntstatus = + static_cast + (raw_exception->exception_record.exception_information[2]); + reason.append(" / "); + reason.append(NTStatusToString(ntstatus)); + } + break; + case MD_EXCEPTION_CODE_WIN_INVALID_HANDLE: + reason = "EXCEPTION_INVALID_HANDLE"; + break; + case MD_EXCEPTION_CODE_WIN_ILLEGAL_INSTRUCTION: + reason = "EXCEPTION_ILLEGAL_INSTRUCTION"; + break; + case MD_EXCEPTION_CODE_WIN_NONCONTINUABLE_EXCEPTION: + reason = "EXCEPTION_NONCONTINUABLE_EXCEPTION"; + break; + case MD_EXCEPTION_CODE_WIN_INVALID_DISPOSITION: + reason = "EXCEPTION_INVALID_DISPOSITION"; + break; + case MD_EXCEPTION_CODE_WIN_ARRAY_BOUNDS_EXCEEDED: + reason = "EXCEPTION_BOUNDS_EXCEEDED"; + break; + case MD_EXCEPTION_CODE_WIN_FLOAT_DENORMAL_OPERAND: + reason = "EXCEPTION_FLT_DENORMAL_OPERAND"; + break; + case MD_EXCEPTION_CODE_WIN_FLOAT_DIVIDE_BY_ZERO: + reason = "EXCEPTION_FLT_DIVIDE_BY_ZERO"; + break; + case MD_EXCEPTION_CODE_WIN_FLOAT_INEXACT_RESULT: + reason = "EXCEPTION_FLT_INEXACT_RESULT"; + break; + case MD_EXCEPTION_CODE_WIN_FLOAT_INVALID_OPERATION: + reason = "EXCEPTION_FLT_INVALID_OPERATION"; + break; + case MD_EXCEPTION_CODE_WIN_FLOAT_OVERFLOW: + reason = "EXCEPTION_FLT_OVERFLOW"; + break; + case MD_EXCEPTION_CODE_WIN_FLOAT_STACK_CHECK: + reason = "EXCEPTION_FLT_STACK_CHECK"; + break; + case MD_EXCEPTION_CODE_WIN_FLOAT_UNDERFLOW: + reason = "EXCEPTION_FLT_UNDERFLOW"; + break; + case MD_EXCEPTION_CODE_WIN_INTEGER_DIVIDE_BY_ZERO: + reason = "EXCEPTION_INT_DIVIDE_BY_ZERO"; + break; + case MD_EXCEPTION_CODE_WIN_INTEGER_OVERFLOW: + reason = "EXCEPTION_INT_OVERFLOW"; + break; + case MD_EXCEPTION_CODE_WIN_PRIVILEGED_INSTRUCTION: + reason = "EXCEPTION_PRIV_INSTRUCTION"; + break; + case MD_EXCEPTION_CODE_WIN_STACK_OVERFLOW: + reason = "EXCEPTION_STACK_OVERFLOW"; + break; + case MD_EXCEPTION_CODE_WIN_POSSIBLE_DEADLOCK: + reason = "EXCEPTION_POSSIBLE_DEADLOCK"; + break; + case MD_EXCEPTION_CODE_WIN_STACK_BUFFER_OVERRUN: + reason = "EXCEPTION_STACK_BUFFER_OVERRUN"; + break; + case MD_EXCEPTION_CODE_WIN_HEAP_CORRUPTION: + reason = "EXCEPTION_HEAP_CORRUPTION"; + break; + case MD_EXCEPTION_CODE_WIN_UNHANDLED_CPP_EXCEPTION: + reason = "Unhandled C++ Exception"; + break; + default: + BPLOG(INFO) << "Unknown exception reason " << reason; + break; + } + break; + } + + case MD_OS_ANDROID: + case MD_OS_LINUX: { + switch (exception_code) { + case MD_EXCEPTION_CODE_LIN_SIGHUP: + reason = "SIGHUP"; + break; + case MD_EXCEPTION_CODE_LIN_SIGINT: + reason = "SIGINT"; + break; + case MD_EXCEPTION_CODE_LIN_SIGQUIT: + reason = "SIGQUIT"; + break; + case MD_EXCEPTION_CODE_LIN_SIGILL: + reason = "SIGILL"; + break; + case MD_EXCEPTION_CODE_LIN_SIGTRAP: + reason = "SIGTRAP"; + break; + case MD_EXCEPTION_CODE_LIN_SIGABRT: + reason = "SIGABRT"; + break; + case MD_EXCEPTION_CODE_LIN_SIGBUS: + reason = "SIGBUS"; + break; + case MD_EXCEPTION_CODE_LIN_SIGFPE: + reason = "SIGFPE"; + break; + case MD_EXCEPTION_CODE_LIN_SIGKILL: + reason = "SIGKILL"; + break; + case MD_EXCEPTION_CODE_LIN_SIGUSR1: + reason = "SIGUSR1"; + break; + case MD_EXCEPTION_CODE_LIN_SIGSEGV: + reason = "SIGSEGV"; + break; + case MD_EXCEPTION_CODE_LIN_SIGUSR2: + reason = "SIGUSR2"; + break; + case MD_EXCEPTION_CODE_LIN_SIGPIPE: + reason = "SIGPIPE"; + break; + case MD_EXCEPTION_CODE_LIN_SIGALRM: + reason = "SIGALRM"; + break; + case MD_EXCEPTION_CODE_LIN_SIGTERM: + reason = "SIGTERM"; + break; + case MD_EXCEPTION_CODE_LIN_SIGSTKFLT: + reason = "SIGSTKFLT"; + break; + case MD_EXCEPTION_CODE_LIN_SIGCHLD: + reason = "SIGCHLD"; + break; + case MD_EXCEPTION_CODE_LIN_SIGCONT: + reason = "SIGCONT"; + break; + case MD_EXCEPTION_CODE_LIN_SIGSTOP: + reason = "SIGSTOP"; + break; + case MD_EXCEPTION_CODE_LIN_SIGTSTP: + reason = "SIGTSTP"; + break; + case MD_EXCEPTION_CODE_LIN_SIGTTIN: + reason = "SIGTTIN"; + break; + case MD_EXCEPTION_CODE_LIN_SIGTTOU: + reason = "SIGTTOU"; + break; + case MD_EXCEPTION_CODE_LIN_SIGURG: + reason = "SIGURG"; + break; + case MD_EXCEPTION_CODE_LIN_SIGXCPU: + reason = "SIGXCPU"; + break; + case MD_EXCEPTION_CODE_LIN_SIGXFSZ: + reason = "SIGXFSZ"; + break; + case MD_EXCEPTION_CODE_LIN_SIGVTALRM: + reason = "SIGVTALRM"; + break; + case MD_EXCEPTION_CODE_LIN_SIGPROF: + reason = "SIGPROF"; + break; + case MD_EXCEPTION_CODE_LIN_SIGWINCH: + reason = "SIGWINCH"; + break; + case MD_EXCEPTION_CODE_LIN_SIGIO: + reason = "SIGIO"; + break; + case MD_EXCEPTION_CODE_LIN_SIGPWR: + reason = "SIGPWR"; + break; + case MD_EXCEPTION_CODE_LIN_SIGSYS: + reason = "SIGSYS"; + break; + case MD_EXCEPTION_CODE_LIN_DUMP_REQUESTED: + reason = "DUMP_REQUESTED"; + break; + default: + BPLOG(INFO) << "Unknown exception reason " << reason; + break; + } + break; + } + + case MD_OS_SOLARIS: { + switch (exception_code) { + case MD_EXCEPTION_CODE_SOL_SIGHUP: + reason = "SIGHUP"; + break; + case MD_EXCEPTION_CODE_SOL_SIGINT: + reason = "SIGINT"; + break; + case MD_EXCEPTION_CODE_SOL_SIGQUIT: + reason = "SIGQUIT"; + break; + case MD_EXCEPTION_CODE_SOL_SIGILL: + reason = "SIGILL"; + break; + case MD_EXCEPTION_CODE_SOL_SIGTRAP: + reason = "SIGTRAP"; + break; + case MD_EXCEPTION_CODE_SOL_SIGIOT: + reason = "SIGIOT | SIGABRT"; + break; + case MD_EXCEPTION_CODE_SOL_SIGEMT: + reason = "SIGEMT"; + break; + case MD_EXCEPTION_CODE_SOL_SIGFPE: + reason = "SIGFPE"; + break; + case MD_EXCEPTION_CODE_SOL_SIGKILL: + reason = "SIGKILL"; + break; + case MD_EXCEPTION_CODE_SOL_SIGBUS: + reason = "SIGBUS"; + break; + case MD_EXCEPTION_CODE_SOL_SIGSEGV: + reason = "SIGSEGV"; + break; + case MD_EXCEPTION_CODE_SOL_SIGSYS: + reason = "SIGSYS"; + break; + case MD_EXCEPTION_CODE_SOL_SIGPIPE: + reason = "SIGPIPE"; + break; + case MD_EXCEPTION_CODE_SOL_SIGALRM: + reason = "SIGALRM"; + break; + case MD_EXCEPTION_CODE_SOL_SIGTERM: + reason = "SIGTERM"; + break; + case MD_EXCEPTION_CODE_SOL_SIGUSR1: + reason = "SIGUSR1"; + break; + case MD_EXCEPTION_CODE_SOL_SIGUSR2: + reason = "SIGUSR2"; + break; + case MD_EXCEPTION_CODE_SOL_SIGCLD: + reason = "SIGCLD | SIGCHLD"; + break; + case MD_EXCEPTION_CODE_SOL_SIGPWR: + reason = "SIGPWR"; + break; + case MD_EXCEPTION_CODE_SOL_SIGWINCH: + reason = "SIGWINCH"; + break; + case MD_EXCEPTION_CODE_SOL_SIGURG: + reason = "SIGURG"; + break; + case MD_EXCEPTION_CODE_SOL_SIGPOLL: + reason = "SIGPOLL | SIGIO"; + break; + case MD_EXCEPTION_CODE_SOL_SIGSTOP: + reason = "SIGSTOP"; + break; + case MD_EXCEPTION_CODE_SOL_SIGTSTP: + reason = "SIGTSTP"; + break; + case MD_EXCEPTION_CODE_SOL_SIGCONT: + reason = "SIGCONT"; + break; + case MD_EXCEPTION_CODE_SOL_SIGTTIN: + reason = "SIGTTIN"; + break; + case MD_EXCEPTION_CODE_SOL_SIGTTOU: + reason = "SIGTTOU"; + break; + case MD_EXCEPTION_CODE_SOL_SIGVTALRM: + reason = "SIGVTALRM"; + break; + case MD_EXCEPTION_CODE_SOL_SIGPROF: + reason = "SIGPROF"; + break; + case MD_EXCEPTION_CODE_SOL_SIGXCPU: + reason = "SIGXCPU"; + break; + case MD_EXCEPTION_CODE_SOL_SIGXFSZ: + reason = "SIGXFSZ"; + break; + case MD_EXCEPTION_CODE_SOL_SIGWAITING: + reason = "SIGWAITING"; + break; + case MD_EXCEPTION_CODE_SOL_SIGLWP: + reason = "SIGLWP"; + break; + case MD_EXCEPTION_CODE_SOL_SIGFREEZE: + reason = "SIGFREEZE"; + break; + case MD_EXCEPTION_CODE_SOL_SIGTHAW: + reason = "SIGTHAW"; + break; + case MD_EXCEPTION_CODE_SOL_SIGCANCEL: + reason = "SIGCANCEL"; + break; + case MD_EXCEPTION_CODE_SOL_SIGLOST: + reason = "SIGLOST"; + break; + case MD_EXCEPTION_CODE_SOL_SIGXRES: + reason = "SIGXRES"; + break; + case MD_EXCEPTION_CODE_SOL_SIGJVM1: + reason = "SIGJVM1"; + break; + case MD_EXCEPTION_CODE_SOL_SIGJVM2: + reason = "SIGJVM2"; + break; + default: + BPLOG(INFO) << "Unknown exception reason " << reason; + break; + } + break; + } + + case MD_OS_PS3: { + switch (exception_code) { + case MD_EXCEPTION_CODE_PS3_UNKNOWN: + reason = "UNKNOWN"; + break; + case MD_EXCEPTION_CODE_PS3_TRAP_EXCEP: + reason = "TRAP_EXCEP"; + break; + case MD_EXCEPTION_CODE_PS3_PRIV_INSTR: + reason = "PRIV_INSTR"; + break; + case MD_EXCEPTION_CODE_PS3_ILLEGAL_INSTR: + reason = "ILLEGAL_INSTR"; + break; + case MD_EXCEPTION_CODE_PS3_INSTR_STORAGE: + reason = "INSTR_STORAGE"; + break; + case MD_EXCEPTION_CODE_PS3_INSTR_SEGMENT: + reason = "INSTR_SEGMENT"; + break; + case MD_EXCEPTION_CODE_PS3_DATA_STORAGE: + reason = "DATA_STORAGE"; + break; + case MD_EXCEPTION_CODE_PS3_DATA_SEGMENT: + reason = "DATA_SEGMENT"; + break; + case MD_EXCEPTION_CODE_PS3_FLOAT_POINT: + reason = "FLOAT_POINT"; + break; + case MD_EXCEPTION_CODE_PS3_DABR_MATCH: + reason = "DABR_MATCH"; + break; + case MD_EXCEPTION_CODE_PS3_ALIGN_EXCEP: + reason = "ALIGN_EXCEP"; + break; + case MD_EXCEPTION_CODE_PS3_MEMORY_ACCESS: + reason = "MEMORY_ACCESS"; + break; + case MD_EXCEPTION_CODE_PS3_COPRO_ALIGN: + reason = "COPRO_ALIGN"; + break; + case MD_EXCEPTION_CODE_PS3_COPRO_INVALID_COM: + reason = "COPRO_INVALID_COM"; + break; + case MD_EXCEPTION_CODE_PS3_COPRO_ERR: + reason = "COPRO_ERR"; + break; + case MD_EXCEPTION_CODE_PS3_COPRO_FIR: + reason = "COPRO_FIR"; + break; + case MD_EXCEPTION_CODE_PS3_COPRO_DATA_SEGMENT: + reason = "COPRO_DATA_SEGMENT"; + break; + case MD_EXCEPTION_CODE_PS3_COPRO_DATA_STORAGE: + reason = "COPRO_DATA_STORAGE"; + break; + case MD_EXCEPTION_CODE_PS3_COPRO_STOP_INSTR: + reason = "COPRO_STOP_INSTR"; + break; + case MD_EXCEPTION_CODE_PS3_COPRO_HALT_INSTR: + reason = "COPRO_HALT_INSTR"; + break; + case MD_EXCEPTION_CODE_PS3_COPRO_HALTINST_UNKNOWN: + reason = "COPRO_HALTINSTR_UNKNOWN"; + break; + case MD_EXCEPTION_CODE_PS3_COPRO_MEMORY_ACCESS: + reason = "COPRO_MEMORY_ACCESS"; + break; + case MD_EXCEPTION_CODE_PS3_GRAPHIC: + reason = "GRAPHIC"; + break; + default: + BPLOG(INFO) << "Unknown exception reason "<< reason; + break; + } + break; + } + + default: { + BPLOG(INFO) << "Unknown exception reason " << reason; + break; + } + } + + return reason; +} + +// static +string MinidumpProcessor::GetAssertion(Minidump *dump) { + MinidumpAssertion *assertion = dump->GetAssertion(); + if (!assertion) + return ""; + + const MDRawAssertionInfo *raw_assertion = assertion->assertion(); + if (!raw_assertion) + return ""; + + string assertion_string; + switch (raw_assertion->type) { + case MD_ASSERTION_INFO_TYPE_INVALID_PARAMETER: + assertion_string = "Invalid parameter passed to library function"; + break; + case MD_ASSERTION_INFO_TYPE_PURE_VIRTUAL_CALL: + assertion_string = "Pure virtual function called"; + break; + default: { + char assertion_type[32]; + snprintf(assertion_type, sizeof(assertion_type), + "0x%08x", raw_assertion->type); + assertion_string = "Unknown assertion type "; + assertion_string += assertion_type; + break; + } + } + + string expression = assertion->expression(); + if (!expression.empty()) { + assertion_string.append(" " + expression); + } + + string function = assertion->function(); + if (!function.empty()) { + assertion_string.append(" in function " + function); + } + + string file = assertion->file(); + if (!file.empty()) { + assertion_string.append(", in file " + file); + } + + if (raw_assertion->line != 0) { + char assertion_line[32]; + snprintf(assertion_line, sizeof(assertion_line), "%u", raw_assertion->line); + assertion_string.append(" at line "); + assertion_string.append(assertion_line); + } + + return assertion_string; +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/minidump_processor_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/minidump_processor_unittest.cc new file mode 100644 index 0000000000..69e1f42e65 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/minidump_processor_unittest.cc @@ -0,0 +1,644 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Unit test for MinidumpProcessor. Uses a pre-generated minidump and +// corresponding symbol file, and checks the stack frames for correctness. + +#include + +#include +#include +#include +#include +#include + +#include "breakpad_googletest_includes.h" +#include "common/scoped_ptr.h" +#include "common/using_std_string.h" +#include "google_breakpad/processor/basic_source_line_resolver.h" +#include "google_breakpad/processor/call_stack.h" +#include "google_breakpad/processor/code_module.h" +#include "google_breakpad/processor/code_modules.h" +#include "google_breakpad/processor/minidump.h" +#include "google_breakpad/processor/minidump_processor.h" +#include "google_breakpad/processor/process_state.h" +#include "google_breakpad/processor/stack_frame.h" +#include "google_breakpad/processor/symbol_supplier.h" +#include "processor/logging.h" +#include "processor/stackwalker_unittest_utils.h" + +using std::map; + +namespace google_breakpad { +class MockMinidump : public Minidump { + public: + MockMinidump() : Minidump("") { + } + + MOCK_METHOD0(Read, bool()); + MOCK_CONST_METHOD0(path, string()); + MOCK_CONST_METHOD0(header, const MDRawHeader*()); + MOCK_METHOD0(GetThreadList, MinidumpThreadList*()); + MOCK_METHOD0(GetSystemInfo, MinidumpSystemInfo*()); + MOCK_METHOD0(GetMiscInfo, MinidumpMiscInfo*()); + MOCK_METHOD0(GetBreakpadInfo, MinidumpBreakpadInfo*()); + MOCK_METHOD0(GetException, MinidumpException*()); + MOCK_METHOD0(GetAssertion, MinidumpAssertion*()); + MOCK_METHOD0(GetModuleList, MinidumpModuleList*()); + MOCK_METHOD0(GetMemoryList, MinidumpMemoryList*()); +}; + +class MockMinidumpThreadList : public MinidumpThreadList { + public: + MockMinidumpThreadList() : MinidumpThreadList(NULL) {} + + MOCK_CONST_METHOD0(thread_count, unsigned int()); + MOCK_CONST_METHOD1(GetThreadAtIndex, MinidumpThread*(unsigned int)); +}; + +class MockMinidumpMemoryList : public MinidumpMemoryList { + public: + MockMinidumpMemoryList() : MinidumpMemoryList(NULL) {} + + MOCK_METHOD1(GetMemoryRegionForAddress, MinidumpMemoryRegion*(uint64_t)); +}; + +class MockMinidumpThread : public MinidumpThread { + public: + MockMinidumpThread() : MinidumpThread(NULL) {} + + MOCK_CONST_METHOD1(GetThreadID, bool(uint32_t*)); + MOCK_METHOD0(GetContext, MinidumpContext*()); + MOCK_METHOD0(GetMemory, MinidumpMemoryRegion*()); + MOCK_CONST_METHOD0(GetStartOfStackMemoryRange, uint64_t()); +}; + +// This is crappy, but MinidumpProcessor really does want a +// MinidumpMemoryRegion. +class MockMinidumpMemoryRegion : public MinidumpMemoryRegion { + public: + MockMinidumpMemoryRegion(uint64_t base, const string& contents) : + MinidumpMemoryRegion(NULL) { + region_.Init(base, contents); + } + + uint64_t GetBase() const { return region_.GetBase(); } + uint32_t GetSize() const { return region_.GetSize(); } + + bool GetMemoryAtAddress(uint64_t address, uint8_t *value) const { + return region_.GetMemoryAtAddress(address, value); + } + bool GetMemoryAtAddress(uint64_t address, uint16_t *value) const { + return region_.GetMemoryAtAddress(address, value); + } + bool GetMemoryAtAddress(uint64_t address, uint32_t *value) const { + return region_.GetMemoryAtAddress(address, value); + } + bool GetMemoryAtAddress(uint64_t address, uint64_t *value) const { + return region_.GetMemoryAtAddress(address, value); + } + + MockMemoryRegion region_; +}; + +// A test miscelaneous info stream, just returns values from the +// MDRawMiscInfo fed to it. +class TestMinidumpMiscInfo : public MinidumpMiscInfo { + public: + explicit TestMinidumpMiscInfo(const MDRawMiscInfo& misc_info) : + MinidumpMiscInfo(NULL) { + valid_ = true; + misc_info_ = misc_info; + } +}; + +} // namespace google_breakpad + +namespace { + +using google_breakpad::BasicSourceLineResolver; +using google_breakpad::CallStack; +using google_breakpad::CodeModule; +using google_breakpad::MinidumpContext; +using google_breakpad::MinidumpMemoryRegion; +using google_breakpad::MinidumpMiscInfo; +using google_breakpad::MinidumpProcessor; +using google_breakpad::MinidumpSystemInfo; +using google_breakpad::MinidumpThreadList; +using google_breakpad::MinidumpThread; +using google_breakpad::MockMinidump; +using google_breakpad::MockMinidumpMemoryList; +using google_breakpad::MockMinidumpMemoryRegion; +using google_breakpad::MockMinidumpThread; +using google_breakpad::MockMinidumpThreadList; +using google_breakpad::ProcessState; +using google_breakpad::scoped_ptr; +using google_breakpad::SymbolSupplier; +using google_breakpad::SystemInfo; +using ::testing::_; +using ::testing::AnyNumber; +using ::testing::DoAll; +using ::testing::Mock; +using ::testing::Ne; +using ::testing::Property; +using ::testing::Return; +using ::testing::SetArgumentPointee; + +static const char *kSystemInfoOS = "Windows NT"; +static const char *kSystemInfoOSShort = "windows"; +static const char *kSystemInfoOSVersion = "5.1.2600 Service Pack 2"; +static const char *kSystemInfoCPU = "x86"; +static const char *kSystemInfoCPUInfo = + "GenuineIntel family 6 model 13 stepping 8"; + +#define ASSERT_TRUE_ABORT(cond) \ + if (!(cond)) { \ + fprintf(stderr, "FAILED: %s at %s:%d\n", #cond, __FILE__, __LINE__); \ + abort(); \ + } + +#define ASSERT_EQ_ABORT(e1, e2) ASSERT_TRUE_ABORT((e1) == (e2)) + +class TestSymbolSupplier : public SymbolSupplier { + public: + TestSymbolSupplier() : interrupt_(false) {} + + virtual SymbolResult GetSymbolFile(const CodeModule *module, + const SystemInfo *system_info, + string *symbol_file); + + virtual SymbolResult GetSymbolFile(const CodeModule *module, + const SystemInfo *system_info, + string *symbol_file, + string *symbol_data); + + virtual SymbolResult GetCStringSymbolData(const CodeModule *module, + const SystemInfo *system_info, + string *symbol_file, + char **symbol_data, + size_t *symbol_data_size); + + virtual void FreeSymbolData(const CodeModule *module); + + // When set to true, causes the SymbolSupplier to return INTERRUPT + void set_interrupt(bool interrupt) { interrupt_ = interrupt; } + + private: + bool interrupt_; + map memory_buffers_; +}; + +SymbolSupplier::SymbolResult TestSymbolSupplier::GetSymbolFile( + const CodeModule *module, + const SystemInfo *system_info, + string *symbol_file) { + ASSERT_TRUE_ABORT(module); + ASSERT_TRUE_ABORT(system_info); + ASSERT_EQ_ABORT(system_info->cpu, kSystemInfoCPU); + ASSERT_EQ_ABORT(system_info->cpu_info, kSystemInfoCPUInfo); + ASSERT_EQ_ABORT(system_info->os, kSystemInfoOS); + ASSERT_EQ_ABORT(system_info->os_short, kSystemInfoOSShort); + ASSERT_EQ_ABORT(system_info->os_version, kSystemInfoOSVersion); + + if (interrupt_) { + return INTERRUPT; + } + + if (module && module->code_file() == "c:\\test_app.exe") { + *symbol_file = string(getenv("srcdir") ? getenv("srcdir") : ".") + + "/src/processor/testdata/symbols/test_app.pdb/" + + module->debug_identifier() + + "/test_app.sym"; + return FOUND; + } + + return NOT_FOUND; +} + +SymbolSupplier::SymbolResult TestSymbolSupplier::GetSymbolFile( + const CodeModule *module, + const SystemInfo *system_info, + string *symbol_file, + string *symbol_data) { + SymbolSupplier::SymbolResult s = GetSymbolFile(module, system_info, + symbol_file); + if (s == FOUND) { + std::ifstream in(symbol_file->c_str()); + std::getline(in, *symbol_data, string::traits_type::to_char_type( + string::traits_type::eof())); + in.close(); + } + + return s; +} + +SymbolSupplier::SymbolResult TestSymbolSupplier::GetCStringSymbolData( + const CodeModule *module, + const SystemInfo *system_info, + string *symbol_file, + char **symbol_data, + size_t *symbol_data_size) { + string symbol_data_string; + SymbolSupplier::SymbolResult s = GetSymbolFile(module, + system_info, + symbol_file, + &symbol_data_string); + if (s == FOUND) { + *symbol_data_size = symbol_data_string.size() + 1; + *symbol_data = new char[*symbol_data_size]; + if (*symbol_data == NULL) { + BPLOG(ERROR) << "Memory allocation failed for module: " + << module->code_file() << " size: " << *symbol_data_size; + return INTERRUPT; + } + memcpy(*symbol_data, symbol_data_string.c_str(), symbol_data_string.size()); + (*symbol_data)[symbol_data_string.size()] = '\0'; + memory_buffers_.insert(make_pair(module->code_file(), *symbol_data)); + } + + return s; +} + +void TestSymbolSupplier::FreeSymbolData(const CodeModule *module) { + map::iterator it = memory_buffers_.find(module->code_file()); + if (it != memory_buffers_.end()) { + delete [] it->second; + memory_buffers_.erase(it); + } +} + +// A test system info stream, just returns values from the +// MDRawSystemInfo fed to it. +class TestMinidumpSystemInfo : public MinidumpSystemInfo { + public: + explicit TestMinidumpSystemInfo(MDRawSystemInfo info) : + MinidumpSystemInfo(NULL) { + valid_ = true; + system_info_ = info; + csd_version_ = new string(""); + } +}; + +// A test minidump context, just returns the MDRawContextX86 +// fed to it. +class TestMinidumpContext : public MinidumpContext { + public: + explicit TestMinidumpContext(const MDRawContextX86& context) : + MinidumpContext(NULL) { + valid_ = true; + SetContextX86(new MDRawContextX86(context)); + SetContextFlags(MD_CONTEXT_X86); + } +}; + +class MinidumpProcessorTest : public ::testing::Test { +}; + +TEST_F(MinidumpProcessorTest, TestCorruptMinidumps) { + MockMinidump dump; + TestSymbolSupplier supplier; + BasicSourceLineResolver resolver; + MinidumpProcessor processor(&supplier, &resolver); + ProcessState state; + + EXPECT_EQ(processor.Process("nonexistent minidump", &state), + google_breakpad::PROCESS_ERROR_MINIDUMP_NOT_FOUND); + + EXPECT_CALL(dump, path()).WillRepeatedly(Return("mock minidump")); + EXPECT_CALL(dump, Read()).WillRepeatedly(Return(true)); + + MDRawHeader fakeHeader; + fakeHeader.time_date_stamp = 0; + EXPECT_CALL(dump, header()). + WillOnce(Return(reinterpret_cast(NULL))). + WillRepeatedly(Return(&fakeHeader)); + + EXPECT_EQ(processor.Process(&dump, &state), + google_breakpad::PROCESS_ERROR_NO_MINIDUMP_HEADER); + + EXPECT_CALL(dump, GetThreadList()). + WillOnce(Return(reinterpret_cast(NULL))); + EXPECT_CALL(dump, GetSystemInfo()). + WillRepeatedly(Return(reinterpret_cast(NULL))); + + EXPECT_EQ(processor.Process(&dump, &state), + google_breakpad::PROCESS_ERROR_NO_THREAD_LIST); +} + +// This test case verifies that the symbol supplier is only consulted +// once per minidump per module. +TEST_F(MinidumpProcessorTest, TestSymbolSupplierLookupCounts) { + MockSymbolSupplier supplier; + BasicSourceLineResolver resolver; + MinidumpProcessor processor(&supplier, &resolver); + + string minidump_file = string(getenv("srcdir") ? getenv("srcdir") : ".") + + "/src/processor/testdata/minidump2.dmp"; + ProcessState state; + EXPECT_CALL(supplier, GetCStringSymbolData( + Property(&google_breakpad::CodeModule::code_file, + "c:\\test_app.exe"), + _, _, _, _)).WillOnce(Return(SymbolSupplier::NOT_FOUND)); + EXPECT_CALL(supplier, GetCStringSymbolData( + Property(&google_breakpad::CodeModule::code_file, + Ne("c:\\test_app.exe")), + _, _, _, _)).WillRepeatedly(Return(SymbolSupplier::NOT_FOUND)); + // Avoid GMOCK WARNING "Uninteresting mock function call - returning + // directly" for FreeSymbolData(). + EXPECT_CALL(supplier, FreeSymbolData(_)).Times(AnyNumber()); + ASSERT_EQ(processor.Process(minidump_file, &state), + google_breakpad::PROCESS_OK); + + ASSERT_TRUE(Mock::VerifyAndClearExpectations(&supplier)); + + // We need to verify that across minidumps, the processor will refetch + // symbol files, even with the same symbol supplier. + EXPECT_CALL(supplier, GetCStringSymbolData( + Property(&google_breakpad::CodeModule::code_file, + "c:\\test_app.exe"), + _, _, _, _)).WillOnce(Return(SymbolSupplier::NOT_FOUND)); + EXPECT_CALL(supplier, GetCStringSymbolData( + Property(&google_breakpad::CodeModule::code_file, + Ne("c:\\test_app.exe")), + _, _, _, _)).WillRepeatedly(Return(SymbolSupplier::NOT_FOUND)); + // Avoid GMOCK WARNING "Uninteresting mock function call - returning + // directly" for FreeSymbolData(). + EXPECT_CALL(supplier, FreeSymbolData(_)).Times(AnyNumber()); + ASSERT_EQ(processor.Process(minidump_file, &state), + google_breakpad::PROCESS_OK); +} + +TEST_F(MinidumpProcessorTest, TestBasicProcessing) { + TestSymbolSupplier supplier; + BasicSourceLineResolver resolver; + MinidumpProcessor processor(&supplier, &resolver); + + string minidump_file = string(getenv("srcdir") ? getenv("srcdir") : ".") + + "/src/processor/testdata/minidump2.dmp"; + + ProcessState state; + ASSERT_EQ(processor.Process(minidump_file, &state), + google_breakpad::PROCESS_OK); + ASSERT_EQ(state.system_info()->os, kSystemInfoOS); + ASSERT_EQ(state.system_info()->os_short, kSystemInfoOSShort); + ASSERT_EQ(state.system_info()->os_version, kSystemInfoOSVersion); + ASSERT_EQ(state.system_info()->cpu, kSystemInfoCPU); + ASSERT_EQ(state.system_info()->cpu_info, kSystemInfoCPUInfo); + ASSERT_TRUE(state.crashed()); + ASSERT_EQ(state.crash_reason(), "EXCEPTION_ACCESS_VIOLATION_WRITE"); + ASSERT_EQ(state.crash_address(), 0x45U); + ASSERT_EQ(state.threads()->size(), size_t(1)); + ASSERT_EQ(state.requesting_thread(), 0); + EXPECT_EQ(1171480435U, state.time_date_stamp()); + EXPECT_EQ(1171480435U, state.process_create_time()); + + CallStack *stack = state.threads()->at(0); + ASSERT_TRUE(stack); + ASSERT_EQ(stack->frames()->size(), 4U); + + ASSERT_TRUE(stack->frames()->at(0)->module); + ASSERT_EQ(stack->frames()->at(0)->module->base_address(), 0x400000U); + ASSERT_EQ(stack->frames()->at(0)->module->code_file(), "c:\\test_app.exe"); + ASSERT_EQ(stack->frames()->at(0)->function_name, + "`anonymous namespace'::CrashFunction"); + ASSERT_EQ(stack->frames()->at(0)->source_file_name, "c:\\test_app.cc"); + ASSERT_EQ(stack->frames()->at(0)->source_line, 58); + + ASSERT_TRUE(stack->frames()->at(1)->module); + ASSERT_EQ(stack->frames()->at(1)->module->base_address(), 0x400000U); + ASSERT_EQ(stack->frames()->at(1)->module->code_file(), "c:\\test_app.exe"); + ASSERT_EQ(stack->frames()->at(1)->function_name, "main"); + ASSERT_EQ(stack->frames()->at(1)->source_file_name, "c:\\test_app.cc"); + ASSERT_EQ(stack->frames()->at(1)->source_line, 65); + + // This comes from the CRT + ASSERT_TRUE(stack->frames()->at(2)->module); + ASSERT_EQ(stack->frames()->at(2)->module->base_address(), 0x400000U); + ASSERT_EQ(stack->frames()->at(2)->module->code_file(), "c:\\test_app.exe"); + ASSERT_EQ(stack->frames()->at(2)->function_name, "__tmainCRTStartup"); + ASSERT_EQ(stack->frames()->at(2)->source_file_name, + "f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\crt0.c"); + ASSERT_EQ(stack->frames()->at(2)->source_line, 327); + + // No debug info available for kernel32.dll + ASSERT_TRUE(stack->frames()->at(3)->module); + ASSERT_EQ(stack->frames()->at(3)->module->base_address(), 0x7c800000U); + ASSERT_EQ(stack->frames()->at(3)->module->code_file(), + "C:\\WINDOWS\\system32\\kernel32.dll"); + ASSERT_TRUE(stack->frames()->at(3)->function_name.empty()); + ASSERT_TRUE(stack->frames()->at(3)->source_file_name.empty()); + ASSERT_EQ(stack->frames()->at(3)->source_line, 0); + + ASSERT_EQ(state.modules()->module_count(), 13U); + ASSERT_TRUE(state.modules()->GetMainModule()); + ASSERT_EQ(state.modules()->GetMainModule()->code_file(), "c:\\test_app.exe"); + ASSERT_FALSE(state.modules()->GetModuleForAddress(0)); + ASSERT_EQ(state.modules()->GetMainModule(), + state.modules()->GetModuleForAddress(0x400000)); + ASSERT_EQ(state.modules()->GetModuleForAddress(0x7c801234)->debug_file(), + "kernel32.pdb"); + ASSERT_EQ(state.modules()->GetModuleForAddress(0x77d43210)->version(), + "5.1.2600.2622"); + + // Test that disabled exploitability engine defaults to + // EXPLOITABILITY_NOT_ANALYZED. + ASSERT_EQ(google_breakpad::EXPLOITABILITY_NOT_ANALYZED, + state.exploitability()); + + // Test that the symbol supplier can interrupt processing + state.Clear(); + supplier.set_interrupt(true); + ASSERT_EQ(processor.Process(minidump_file, &state), + google_breakpad::PROCESS_SYMBOL_SUPPLIER_INTERRUPTED); +} + +TEST_F(MinidumpProcessorTest, TestThreadMissingMemory) { + MockMinidump dump; + EXPECT_CALL(dump, path()).WillRepeatedly(Return("mock minidump")); + EXPECT_CALL(dump, Read()).WillRepeatedly(Return(true)); + + MDRawHeader fake_header; + fake_header.time_date_stamp = 0; + EXPECT_CALL(dump, header()).WillRepeatedly(Return(&fake_header)); + + MDRawSystemInfo raw_system_info; + memset(&raw_system_info, 0, sizeof(raw_system_info)); + raw_system_info.processor_architecture = MD_CPU_ARCHITECTURE_X86; + raw_system_info.platform_id = MD_OS_WIN32_NT; + TestMinidumpSystemInfo dump_system_info(raw_system_info); + + EXPECT_CALL(dump, GetSystemInfo()). + WillRepeatedly(Return(&dump_system_info)); + + MockMinidumpThreadList thread_list; + EXPECT_CALL(dump, GetThreadList()). + WillOnce(Return(&thread_list)); + + MockMinidumpMemoryList memory_list; + EXPECT_CALL(dump, GetMemoryList()). + WillOnce(Return(&memory_list)); + + // Return a thread missing stack memory. + MockMinidumpThread no_memory_thread; + EXPECT_CALL(no_memory_thread, GetThreadID(_)). + WillRepeatedly(DoAll(SetArgumentPointee<0>(1), + Return(true))); + EXPECT_CALL(no_memory_thread, GetMemory()). + WillRepeatedly(Return(reinterpret_cast(NULL))); + + const uint64_t kTestStartOfMemoryRange = 0x1234; + EXPECT_CALL(no_memory_thread, GetStartOfStackMemoryRange()). + WillRepeatedly(Return(kTestStartOfMemoryRange)); + EXPECT_CALL(memory_list, GetMemoryRegionForAddress(kTestStartOfMemoryRange)). + WillRepeatedly(Return(reinterpret_cast(NULL))); + + MDRawContextX86 no_memory_thread_raw_context; + memset(&no_memory_thread_raw_context, 0, + sizeof(no_memory_thread_raw_context)); + no_memory_thread_raw_context.context_flags = MD_CONTEXT_X86_FULL; + const uint32_t kExpectedEIP = 0xabcd1234; + no_memory_thread_raw_context.eip = kExpectedEIP; + TestMinidumpContext no_memory_thread_context(no_memory_thread_raw_context); + EXPECT_CALL(no_memory_thread, GetContext()). + WillRepeatedly(Return(&no_memory_thread_context)); + + EXPECT_CALL(thread_list, thread_count()). + WillRepeatedly(Return(1)); + EXPECT_CALL(thread_list, GetThreadAtIndex(0)). + WillOnce(Return(&no_memory_thread)); + + MinidumpProcessor processor(reinterpret_cast(NULL), NULL); + ProcessState state; + EXPECT_EQ(processor.Process(&dump, &state), + google_breakpad::PROCESS_OK); + + // Should have a single thread with a single frame in it. + ASSERT_EQ(1U, state.threads()->size()); + ASSERT_EQ(1U, state.threads()->at(0)->frames()->size()); + ASSERT_EQ(kExpectedEIP, state.threads()->at(0)->frames()->at(0)->instruction); +} + +TEST_F(MinidumpProcessorTest, GetProcessCreateTime) { + const uint32_t kProcessCreateTime = 2000; + const uint32_t kTimeDateStamp = 5000; + MockMinidump dump; + EXPECT_CALL(dump, path()).WillRepeatedly(Return("mock minidump")); + EXPECT_CALL(dump, Read()).WillRepeatedly(Return(true)); + + // Set time of crash. + MDRawHeader fake_header; + fake_header.time_date_stamp = kTimeDateStamp; + EXPECT_CALL(dump, header()).WillRepeatedly(Return(&fake_header)); + + // Set process create time. + MDRawMiscInfo raw_misc_info; + memset(&raw_misc_info, 0, sizeof(raw_misc_info)); + raw_misc_info.process_create_time = kProcessCreateTime; + raw_misc_info.flags1 |= MD_MISCINFO_FLAGS1_PROCESS_TIMES; + google_breakpad::TestMinidumpMiscInfo dump_misc_info(raw_misc_info); + EXPECT_CALL(dump, GetMiscInfo()).WillRepeatedly(Return(&dump_misc_info)); + + // No threads + MockMinidumpThreadList thread_list; + EXPECT_CALL(dump, GetThreadList()).WillOnce(Return(&thread_list)); + EXPECT_CALL(thread_list, thread_count()).WillRepeatedly(Return(0)); + + MinidumpProcessor processor(reinterpret_cast(NULL), NULL); + ProcessState state; + EXPECT_EQ(google_breakpad::PROCESS_OK, processor.Process(&dump, &state)); + + // Verify the time stamps. + ASSERT_EQ(kTimeDateStamp, state.time_date_stamp()); + ASSERT_EQ(kProcessCreateTime, state.process_create_time()); +} + +TEST_F(MinidumpProcessorTest, TestThreadMissingContext) { + MockMinidump dump; + EXPECT_CALL(dump, path()).WillRepeatedly(Return("mock minidump")); + EXPECT_CALL(dump, Read()).WillRepeatedly(Return(true)); + + MDRawHeader fake_header; + fake_header.time_date_stamp = 0; + EXPECT_CALL(dump, header()).WillRepeatedly(Return(&fake_header)); + + MDRawSystemInfo raw_system_info; + memset(&raw_system_info, 0, sizeof(raw_system_info)); + raw_system_info.processor_architecture = MD_CPU_ARCHITECTURE_X86; + raw_system_info.platform_id = MD_OS_WIN32_NT; + TestMinidumpSystemInfo dump_system_info(raw_system_info); + + EXPECT_CALL(dump, GetSystemInfo()). + WillRepeatedly(Return(&dump_system_info)); + + MockMinidumpThreadList thread_list; + EXPECT_CALL(dump, GetThreadList()). + WillOnce(Return(&thread_list)); + + MockMinidumpMemoryList memory_list; + EXPECT_CALL(dump, GetMemoryList()). + WillOnce(Return(&memory_list)); + + // Return a thread missing a thread context. + MockMinidumpThread no_context_thread; + EXPECT_CALL(no_context_thread, GetThreadID(_)). + WillRepeatedly(DoAll(SetArgumentPointee<0>(1), + Return(true))); + EXPECT_CALL(no_context_thread, GetContext()). + WillRepeatedly(Return(reinterpret_cast(NULL))); + + // The memory contents don't really matter here, since it won't be used. + MockMinidumpMemoryRegion no_context_thread_memory(0x1234, "xxx"); + EXPECT_CALL(no_context_thread, GetMemory()). + WillRepeatedly(Return(&no_context_thread_memory)); + EXPECT_CALL(no_context_thread, GetStartOfStackMemoryRange()). + Times(0); + EXPECT_CALL(memory_list, GetMemoryRegionForAddress(_)). + Times(0); + + EXPECT_CALL(thread_list, thread_count()). + WillRepeatedly(Return(1)); + EXPECT_CALL(thread_list, GetThreadAtIndex(0)). + WillOnce(Return(&no_context_thread)); + + MinidumpProcessor processor(reinterpret_cast(NULL), NULL); + ProcessState state; + EXPECT_EQ(processor.Process(&dump, &state), + google_breakpad::PROCESS_OK); + + // Should have a single thread with zero frames. + ASSERT_EQ(1U, state.threads()->size()); + ASSERT_EQ(0U, state.threads()->at(0)->frames()->size()); +} + +} // namespace + +int main(int argc, char *argv[]) { + ::testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/minidump_stackwalk.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/minidump_stackwalk.cc new file mode 100644 index 0000000000..8f83969fef --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/minidump_stackwalk.cc @@ -0,0 +1,162 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// minidump_stackwalk.cc: Process a minidump with MinidumpProcessor, printing +// the results, including stack traces. +// +// Author: Mark Mentovai + +#include +#include + +#include +#include + +#include "common/scoped_ptr.h" +#include "common/using_std_string.h" +#include "google_breakpad/processor/basic_source_line_resolver.h" +#include "google_breakpad/processor/minidump.h" +#include "google_breakpad/processor/minidump_processor.h" +#include "google_breakpad/processor/process_state.h" +#include "processor/logging.h" +#include "processor/simple_symbol_supplier.h" +#include "processor/stackwalk_common.h" + + +namespace { + +using google_breakpad::BasicSourceLineResolver; +using google_breakpad::Minidump; +using google_breakpad::MinidumpProcessor; +using google_breakpad::ProcessState; +using google_breakpad::SimpleSymbolSupplier; +using google_breakpad::scoped_ptr; + +// Processes |minidump_file| using MinidumpProcessor. |symbol_path|, if +// non-empty, is the base directory of a symbol storage area, laid out in +// the format required by SimpleSymbolSupplier. If such a storage area +// is specified, it is made available for use by the MinidumpProcessor. +// +// Returns the value of MinidumpProcessor::Process. If processing succeeds, +// prints identifying OS and CPU information from the minidump, crash +// information if the minidump was produced as a result of a crash, and +// call stacks for each thread contained in the minidump. All information +// is printed to stdout. +bool PrintMinidumpProcess(const string &minidump_file, + const std::vector &symbol_paths, + bool machine_readable, + bool output_stack_contents) { + scoped_ptr symbol_supplier; + if (!symbol_paths.empty()) { + // TODO(mmentovai): check existence of symbol_path if specified? + symbol_supplier.reset(new SimpleSymbolSupplier(symbol_paths)); + } + + BasicSourceLineResolver resolver; + MinidumpProcessor minidump_processor(symbol_supplier.get(), &resolver); + + // Process the minidump. + Minidump dump(minidump_file); + if (!dump.Read()) { + BPLOG(ERROR) << "Minidump " << dump.path() << " could not be read"; + return false; + } + ProcessState process_state; + if (minidump_processor.Process(&dump, &process_state) != + google_breakpad::PROCESS_OK) { + BPLOG(ERROR) << "MinidumpProcessor::Process failed"; + return false; + } + + if (machine_readable) { + PrintProcessStateMachineReadable(process_state); + } else { + PrintProcessState(process_state, output_stack_contents, &resolver); + } + + return true; +} + +void usage(const char *program_name) { + fprintf(stderr, "usage: %s [-m|-s] [symbol-path ...]\n" + " -m : Output in machine-readable format\n" + " -s : Output stack contents\n", + program_name); +} + +} // namespace + +int main(int argc, char **argv) { + BPLOG_INIT(&argc, &argv); + + if (argc < 2) { + usage(argv[0]); + return 1; + } + + const char *minidump_file; + bool machine_readable = false; + bool output_stack_contents = false; + int symbol_path_arg; + + if (strcmp(argv[1], "-m") == 0) { + if (argc < 3) { + usage(argv[0]); + return 1; + } + + machine_readable = true; + minidump_file = argv[2]; + symbol_path_arg = 3; + } else if (strcmp(argv[1], "-s") == 0) { + if (argc < 3) { + usage(argv[0]); + return 1; + } + + output_stack_contents = true; + minidump_file = argv[2]; + symbol_path_arg = 3; + } else { + minidump_file = argv[1]; + symbol_path_arg = 2; + } + + // extra arguments are symbol paths + std::vector symbol_paths; + if (argc > symbol_path_arg) { + for (int argi = symbol_path_arg; argi < argc; ++argi) + symbol_paths.push_back(argv[argi]); + } + + return PrintMinidumpProcess(minidump_file, + symbol_paths, + machine_readable, + output_stack_contents) ? 0 : 1; +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/minidump_stackwalk_machine_readable_test b/TMessagesProj/jni/third_party/breakpad/src/processor/minidump_stackwalk_machine_readable_test new file mode 100644 index 0000000000..2aadb2412f --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/minidump_stackwalk_machine_readable_test @@ -0,0 +1,37 @@ +#!/bin/sh + +# Copyright (c) 2007, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +testdata_dir=$srcdir/src/processor/testdata +./src/processor/minidump_stackwalk -m $testdata_dir/minidump2.dmp \ + $testdata_dir/symbols | \ + tr -d '\015' | \ + diff -u $testdata_dir/minidump2.stackwalk.machine_readable.out - +exit $? diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/minidump_stackwalk_test b/TMessagesProj/jni/third_party/breakpad/src/processor/minidump_stackwalk_test new file mode 100644 index 0000000000..f97902791e --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/minidump_stackwalk_test @@ -0,0 +1,37 @@ +#!/bin/sh + +# Copyright (c) 2006, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +testdata_dir=$srcdir/src/processor/testdata +./src/processor/minidump_stackwalk $testdata_dir/minidump2.dmp \ + $testdata_dir/symbols | \ + tr -d '\015' | \ + diff -u $testdata_dir/minidump2.stackwalk.out - +exit $? diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/minidump_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/minidump_unittest.cc new file mode 100644 index 0000000000..bb7dac642c --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/minidump_unittest.cc @@ -0,0 +1,1266 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Unit test for Minidump. Uses a pre-generated minidump and +// verifies that certain streams are correct. + +#include +#include +#include +#include +#include +#include + +#include "breakpad_googletest_includes.h" +#include "common/using_std_string.h" +#include "google_breakpad/common/minidump_format.h" +#include "google_breakpad/processor/minidump.h" +#include "processor/logging.h" +#include "processor/synth_minidump.h" + +namespace { + +using google_breakpad::Minidump; +using google_breakpad::MinidumpContext; +using google_breakpad::MinidumpException; +using google_breakpad::MinidumpMemoryInfo; +using google_breakpad::MinidumpMemoryInfoList; +using google_breakpad::MinidumpMemoryList; +using google_breakpad::MinidumpMemoryRegion; +using google_breakpad::MinidumpModule; +using google_breakpad::MinidumpModuleList; +using google_breakpad::MinidumpSystemInfo; +using google_breakpad::MinidumpThread; +using google_breakpad::MinidumpThreadList; +using google_breakpad::SynthMinidump::Context; +using google_breakpad::SynthMinidump::Dump; +using google_breakpad::SynthMinidump::Exception; +using google_breakpad::SynthMinidump::Memory; +using google_breakpad::SynthMinidump::Module; +using google_breakpad::SynthMinidump::Stream; +using google_breakpad::SynthMinidump::String; +using google_breakpad::SynthMinidump::SystemInfo; +using google_breakpad::SynthMinidump::Thread; +using google_breakpad::test_assembler::kBigEndian; +using google_breakpad::test_assembler::kLittleEndian; +using std::ifstream; +using std::istringstream; +using std::vector; +using ::testing::Return; + +class MinidumpTest : public ::testing::Test { +public: + void SetUp() { + minidump_file_ = string(getenv("srcdir") ? getenv("srcdir") : ".") + + "/src/processor/testdata/minidump2.dmp"; + } + string minidump_file_; +}; + +TEST_F(MinidumpTest, TestMinidumpFromFile) { + Minidump minidump(minidump_file_); + ASSERT_EQ(minidump.path(), minidump_file_); + ASSERT_TRUE(minidump.Read()); + const MDRawHeader* header = minidump.header(); + ASSERT_NE(header, (MDRawHeader*)NULL); + ASSERT_EQ(header->signature, uint32_t(MD_HEADER_SIGNATURE)); + //TODO: add more checks here +} + +TEST_F(MinidumpTest, TestMinidumpFromStream) { + // read minidump contents into memory, construct a stringstream around them + ifstream file_stream(minidump_file_.c_str(), std::ios::in); + ASSERT_TRUE(file_stream.good()); + vector bytes; + file_stream.seekg(0, std::ios_base::end); + ASSERT_TRUE(file_stream.good()); + bytes.resize(file_stream.tellg()); + file_stream.seekg(0, std::ios_base::beg); + ASSERT_TRUE(file_stream.good()); + file_stream.read(&bytes[0], bytes.size()); + ASSERT_TRUE(file_stream.good()); + string str(&bytes[0], bytes.size()); + istringstream stream(str); + ASSERT_TRUE(stream.good()); + + // now read minidump from stringstream + Minidump minidump(stream); + ASSERT_EQ(minidump.path(), ""); + ASSERT_TRUE(minidump.Read()); + const MDRawHeader* header = minidump.header(); + ASSERT_NE(header, (MDRawHeader*)NULL); + ASSERT_EQ(header->signature, uint32_t(MD_HEADER_SIGNATURE)); + //TODO: add more checks here +} + +TEST(Dump, ReadBackEmpty) { + Dump dump(0); + dump.Finish(); + string contents; + ASSERT_TRUE(dump.GetContents(&contents)); + istringstream stream(contents); + Minidump minidump(stream); + ASSERT_TRUE(minidump.Read()); + ASSERT_EQ(0U, minidump.GetDirectoryEntryCount()); +} + +TEST(Dump, ReadBackEmptyBigEndian) { + Dump big_minidump(0, kBigEndian); + big_minidump.Finish(); + string contents; + ASSERT_TRUE(big_minidump.GetContents(&contents)); + istringstream stream(contents); + Minidump minidump(stream); + ASSERT_TRUE(minidump.Read()); + ASSERT_EQ(0U, minidump.GetDirectoryEntryCount()); +} + +TEST(Dump, OneStream) { + Dump dump(0, kBigEndian); + Stream stream(dump, 0xfbb7fa2bU); + stream.Append("stream contents"); + dump.Add(&stream); + dump.Finish(); + + string contents; + ASSERT_TRUE(dump.GetContents(&contents)); + istringstream minidump_stream(contents); + Minidump minidump(minidump_stream); + ASSERT_TRUE(minidump.Read()); + ASSERT_EQ(1U, minidump.GetDirectoryEntryCount()); + + const MDRawDirectory *dir = minidump.GetDirectoryEntryAtIndex(0); + ASSERT_TRUE(dir != NULL); + EXPECT_EQ(0xfbb7fa2bU, dir->stream_type); + + uint32_t stream_length; + ASSERT_TRUE(minidump.SeekToStreamType(0xfbb7fa2bU, &stream_length)); + ASSERT_EQ(15U, stream_length); + char stream_contents[15]; + ASSERT_TRUE(minidump.ReadBytes(stream_contents, sizeof(stream_contents))); + EXPECT_EQ(string("stream contents"), + string(stream_contents, sizeof(stream_contents))); + + EXPECT_FALSE(minidump.GetThreadList()); + EXPECT_FALSE(minidump.GetModuleList()); + EXPECT_FALSE(minidump.GetMemoryList()); + EXPECT_FALSE(minidump.GetException()); + EXPECT_FALSE(minidump.GetAssertion()); + EXPECT_FALSE(minidump.GetSystemInfo()); + EXPECT_FALSE(minidump.GetMiscInfo()); + EXPECT_FALSE(minidump.GetBreakpadInfo()); +} + +TEST(Dump, OneMemory) { + Dump dump(0, kBigEndian); + Memory memory(dump, 0x309d68010bd21b2cULL); + memory.Append("memory contents"); + dump.Add(&memory); + dump.Finish(); + + string contents; + ASSERT_TRUE(dump.GetContents(&contents)); + istringstream minidump_stream(contents); + Minidump minidump(minidump_stream); + ASSERT_TRUE(minidump.Read()); + ASSERT_EQ(1U, minidump.GetDirectoryEntryCount()); + + const MDRawDirectory *dir = minidump.GetDirectoryEntryAtIndex(0); + ASSERT_TRUE(dir != NULL); + EXPECT_EQ((uint32_t) MD_MEMORY_LIST_STREAM, dir->stream_type); + + MinidumpMemoryList *memory_list = minidump.GetMemoryList(); + ASSERT_TRUE(memory_list != NULL); + ASSERT_EQ(1U, memory_list->region_count()); + + MinidumpMemoryRegion *region1 = memory_list->GetMemoryRegionAtIndex(0); + ASSERT_EQ(0x309d68010bd21b2cULL, region1->GetBase()); + ASSERT_EQ(15U, region1->GetSize()); + const uint8_t *region1_bytes = region1->GetMemory(); + ASSERT_TRUE(memcmp("memory contents", region1_bytes, 15) == 0); +} + +// One thread --- and its requisite entourage. +TEST(Dump, OneThread) { + Dump dump(0, kLittleEndian); + Memory stack(dump, 0x2326a0fa); + stack.Append("stack for thread"); + + MDRawContextX86 raw_context; + const uint32_t kExpectedEIP = 0x6913f540; + raw_context.context_flags = MD_CONTEXT_X86_INTEGER | MD_CONTEXT_X86_CONTROL; + raw_context.edi = 0x3ecba80d; + raw_context.esi = 0x382583b9; + raw_context.ebx = 0x7fccc03f; + raw_context.edx = 0xf62f8ec2; + raw_context.ecx = 0x46a6a6a8; + raw_context.eax = 0x6a5025e2; + raw_context.ebp = 0xd9fabb4a; + raw_context.eip = kExpectedEIP; + raw_context.cs = 0xbffe6eda; + raw_context.eflags = 0xb2ce1e2d; + raw_context.esp = 0x659caaa4; + raw_context.ss = 0x2e951ef7; + Context context(dump, raw_context); + + Thread thread(dump, 0xa898f11b, stack, context, + 0x9e39439f, 0x4abfc15f, 0xe499898a, 0x0d43e939dcfd0372ULL); + + dump.Add(&stack); + dump.Add(&context); + dump.Add(&thread); + dump.Finish(); + + string contents; + ASSERT_TRUE(dump.GetContents(&contents)); + + istringstream minidump_stream(contents); + Minidump minidump(minidump_stream); + ASSERT_TRUE(minidump.Read()); + ASSERT_EQ(2U, minidump.GetDirectoryEntryCount()); + + MinidumpMemoryList *md_memory_list = minidump.GetMemoryList(); + ASSERT_TRUE(md_memory_list != NULL); + ASSERT_EQ(1U, md_memory_list->region_count()); + + MinidumpMemoryRegion *md_region = md_memory_list->GetMemoryRegionAtIndex(0); + ASSERT_EQ(0x2326a0faU, md_region->GetBase()); + ASSERT_EQ(16U, md_region->GetSize()); + const uint8_t *region_bytes = md_region->GetMemory(); + ASSERT_TRUE(memcmp("stack for thread", region_bytes, 16) == 0); + + MinidumpThreadList *thread_list = minidump.GetThreadList(); + ASSERT_TRUE(thread_list != NULL); + ASSERT_EQ(1U, thread_list->thread_count()); + + MinidumpThread *md_thread = thread_list->GetThreadAtIndex(0); + ASSERT_TRUE(md_thread != NULL); + uint32_t thread_id; + ASSERT_TRUE(md_thread->GetThreadID(&thread_id)); + ASSERT_EQ(0xa898f11bU, thread_id); + MinidumpMemoryRegion *md_stack = md_thread->GetMemory(); + ASSERT_TRUE(md_stack != NULL); + ASSERT_EQ(0x2326a0faU, md_stack->GetBase()); + ASSERT_EQ(16U, md_stack->GetSize()); + const uint8_t *md_stack_bytes = md_stack->GetMemory(); + ASSERT_TRUE(memcmp("stack for thread", md_stack_bytes, 16) == 0); + + MinidumpContext *md_context = md_thread->GetContext(); + ASSERT_TRUE(md_context != NULL); + ASSERT_EQ((uint32_t) MD_CONTEXT_X86, md_context->GetContextCPU()); + + uint64_t eip; + ASSERT_TRUE(md_context->GetInstructionPointer(&eip)); + EXPECT_EQ(kExpectedEIP, eip); + + const MDRawContextX86 *md_raw_context = md_context->GetContextX86(); + ASSERT_TRUE(md_raw_context != NULL); + ASSERT_EQ((uint32_t) (MD_CONTEXT_X86_INTEGER | MD_CONTEXT_X86_CONTROL), + (md_raw_context->context_flags + & (MD_CONTEXT_X86_INTEGER | MD_CONTEXT_X86_CONTROL))); + EXPECT_EQ(0x3ecba80dU, raw_context.edi); + EXPECT_EQ(0x382583b9U, raw_context.esi); + EXPECT_EQ(0x7fccc03fU, raw_context.ebx); + EXPECT_EQ(0xf62f8ec2U, raw_context.edx); + EXPECT_EQ(0x46a6a6a8U, raw_context.ecx); + EXPECT_EQ(0x6a5025e2U, raw_context.eax); + EXPECT_EQ(0xd9fabb4aU, raw_context.ebp); + EXPECT_EQ(kExpectedEIP, raw_context.eip); + EXPECT_EQ(0xbffe6edaU, raw_context.cs); + EXPECT_EQ(0xb2ce1e2dU, raw_context.eflags); + EXPECT_EQ(0x659caaa4U, raw_context.esp); + EXPECT_EQ(0x2e951ef7U, raw_context.ss); +} + +TEST(Dump, ThreadMissingMemory) { + Dump dump(0, kLittleEndian); + Memory stack(dump, 0x2326a0fa); + // Stack has no contents. + + MDRawContextX86 raw_context; + memset(&raw_context, 0, sizeof(raw_context)); + raw_context.context_flags = MD_CONTEXT_X86_INTEGER | MD_CONTEXT_X86_CONTROL; + Context context(dump, raw_context); + + Thread thread(dump, 0xa898f11b, stack, context, + 0x9e39439f, 0x4abfc15f, 0xe499898a, 0x0d43e939dcfd0372ULL); + + dump.Add(&stack); + dump.Add(&context); + dump.Add(&thread); + dump.Finish(); + + string contents; + ASSERT_TRUE(dump.GetContents(&contents)); + + istringstream minidump_stream(contents); + Minidump minidump(minidump_stream); + ASSERT_TRUE(minidump.Read()); + ASSERT_EQ(2U, minidump.GetDirectoryEntryCount()); + + // This should succeed even though the thread has no stack memory. + MinidumpThreadList* thread_list = minidump.GetThreadList(); + ASSERT_TRUE(thread_list != NULL); + ASSERT_EQ(1U, thread_list->thread_count()); + + MinidumpThread* md_thread = thread_list->GetThreadAtIndex(0); + ASSERT_TRUE(md_thread != NULL); + + uint32_t thread_id; + ASSERT_TRUE(md_thread->GetThreadID(&thread_id)); + ASSERT_EQ(0xa898f11bU, thread_id); + + MinidumpContext* md_context = md_thread->GetContext(); + ASSERT_NE(reinterpret_cast(NULL), md_context); + + MinidumpMemoryRegion* md_stack = md_thread->GetMemory(); + ASSERT_EQ(reinterpret_cast(NULL), md_stack); +} + +TEST(Dump, ThreadMissingContext) { + Dump dump(0, kLittleEndian); + Memory stack(dump, 0x2326a0fa); + stack.Append("stack for thread"); + + // Context is empty. + Context context(dump); + + Thread thread(dump, 0xa898f11b, stack, context, + 0x9e39439f, 0x4abfc15f, 0xe499898a, 0x0d43e939dcfd0372ULL); + + dump.Add(&stack); + dump.Add(&context); + dump.Add(&thread); + dump.Finish(); + + string contents; + ASSERT_TRUE(dump.GetContents(&contents)); + + istringstream minidump_stream(contents); + Minidump minidump(minidump_stream); + ASSERT_TRUE(minidump.Read()); + ASSERT_EQ(2U, minidump.GetDirectoryEntryCount()); + + // This should succeed even though the thread has no stack memory. + MinidumpThreadList* thread_list = minidump.GetThreadList(); + ASSERT_TRUE(thread_list != NULL); + ASSERT_EQ(1U, thread_list->thread_count()); + + MinidumpThread* md_thread = thread_list->GetThreadAtIndex(0); + ASSERT_TRUE(md_thread != NULL); + + uint32_t thread_id; + ASSERT_TRUE(md_thread->GetThreadID(&thread_id)); + ASSERT_EQ(0xa898f11bU, thread_id); + MinidumpMemoryRegion* md_stack = md_thread->GetMemory(); + ASSERT_NE(reinterpret_cast(NULL), md_stack); + + MinidumpContext* md_context = md_thread->GetContext(); + ASSERT_EQ(reinterpret_cast(NULL), md_context); +} + +TEST(Dump, OneModule) { + static const MDVSFixedFileInfo fixed_file_info = { + 0xb2fba33a, // signature + 0x33d7a728, // struct_version + 0x31afcb20, // file_version_hi + 0xe51cdab1, // file_version_lo + 0xd1ea6907, // product_version_hi + 0x03032857, // product_version_lo + 0x11bf71d7, // file_flags_mask + 0x5fb8cdbf, // file_flags + 0xe45d0d5d, // file_os + 0x107d9562, // file_type + 0x5a8844d4, // file_subtype + 0xa8d30b20, // file_date_hi + 0x651c3e4e // file_date_lo + }; + + Dump dump(0, kBigEndian); + String module_name(dump, "single module"); + Module module(dump, 0xa90206ca83eb2852ULL, 0xada542bd, + module_name, + 0xb1054d2a, + 0x34571371, + fixed_file_info, // from synth_minidump_unittest_data.h + NULL, NULL); + + dump.Add(&module); + dump.Add(&module_name); + dump.Finish(); + + string contents; + ASSERT_TRUE(dump.GetContents(&contents)); + istringstream minidump_stream(contents); + Minidump minidump(minidump_stream); + ASSERT_TRUE(minidump.Read()); + ASSERT_EQ(1U, minidump.GetDirectoryEntryCount()); + + const MDRawDirectory *dir = minidump.GetDirectoryEntryAtIndex(0); + ASSERT_TRUE(dir != NULL); + EXPECT_EQ((uint32_t) MD_MODULE_LIST_STREAM, dir->stream_type); + + MinidumpModuleList *md_module_list = minidump.GetModuleList(); + ASSERT_TRUE(md_module_list != NULL); + ASSERT_EQ(1U, md_module_list->module_count()); + + const MinidumpModule *md_module = md_module_list->GetModuleAtIndex(0); + ASSERT_TRUE(md_module != NULL); + ASSERT_EQ(0xa90206ca83eb2852ULL, md_module->base_address()); + ASSERT_EQ(0xada542bd, md_module->size()); + ASSERT_EQ("single module", md_module->code_file()); + + const MDRawModule *md_raw_module = md_module->module(); + ASSERT_TRUE(md_raw_module != NULL); + ASSERT_EQ(0xb1054d2aU, md_raw_module->time_date_stamp); + ASSERT_EQ(0x34571371U, md_raw_module->checksum); + ASSERT_TRUE(memcmp(&md_raw_module->version_info, &fixed_file_info, + sizeof(fixed_file_info)) == 0); +} + +TEST(Dump, OneSystemInfo) { + Dump dump(0, kLittleEndian); + String csd_version(dump, "Petulant Pierogi"); + SystemInfo system_info(dump, SystemInfo::windows_x86, csd_version); + + dump.Add(&system_info); + dump.Add(&csd_version); + dump.Finish(); + + string contents; + ASSERT_TRUE(dump.GetContents(&contents)); + istringstream minidump_stream(contents); + Minidump minidump(minidump_stream); + ASSERT_TRUE(minidump.Read()); + ASSERT_EQ(1U, minidump.GetDirectoryEntryCount()); + + const MDRawDirectory *dir = minidump.GetDirectoryEntryAtIndex(0); + ASSERT_TRUE(dir != NULL); + EXPECT_EQ((uint32_t) MD_SYSTEM_INFO_STREAM, dir->stream_type); + + MinidumpSystemInfo *md_system_info = minidump.GetSystemInfo(); + ASSERT_TRUE(md_system_info != NULL); + ASSERT_EQ("windows", md_system_info->GetOS()); + ASSERT_EQ("x86", md_system_info->GetCPU()); + ASSERT_EQ("Petulant Pierogi", *md_system_info->GetCSDVersion()); + ASSERT_EQ("GenuineIntel", *md_system_info->GetCPUVendor()); +} + +TEST(Dump, BigDump) { + Dump dump(0, kLittleEndian); + + // A SystemInfo stream. + String csd_version(dump, "Munificent Macaque"); + SystemInfo system_info(dump, SystemInfo::windows_x86, csd_version); + dump.Add(&csd_version); + dump.Add(&system_info); + + // Five threads! + Memory stack0(dump, 0x70b9ebfc); + stack0.Append("stack for thread zero"); + MDRawContextX86 raw_context0; + raw_context0.context_flags = MD_CONTEXT_X86_INTEGER; + raw_context0.eip = 0xaf0709e4; + Context context0(dump, raw_context0); + Thread thread0(dump, 0xbbef4432, stack0, context0, + 0xd0377e7b, 0xdb8eb0cf, 0xd73bc314, 0x09d357bac7f9a163ULL); + dump.Add(&stack0); + dump.Add(&context0); + dump.Add(&thread0); + + Memory stack1(dump, 0xf988cc45); + stack1.Append("stack for thread one"); + MDRawContextX86 raw_context1; + raw_context1.context_flags = MD_CONTEXT_X86_INTEGER; + raw_context1.eip = 0xe4f56f81; + Context context1(dump, raw_context1); + Thread thread1(dump, 0x657c3f58, stack1, context1, + 0xa68fa182, 0x6f3cf8dd, 0xe3a78ccf, 0x78cc84775e4534bbULL); + dump.Add(&stack1); + dump.Add(&context1); + dump.Add(&thread1); + + Memory stack2(dump, 0xc8a92e7c); + stack2.Append("stack for thread two"); + MDRawContextX86 raw_context2; + raw_context2.context_flags = MD_CONTEXT_X86_INTEGER; + raw_context2.eip = 0xb336a438; + Context context2(dump, raw_context2); + Thread thread2(dump, 0xdf4b8a71, stack2, context2, + 0x674c26b6, 0x445d7120, 0x7e700c56, 0xd89bf778e7793e17ULL); + dump.Add(&stack2); + dump.Add(&context2); + dump.Add(&thread2); + + Memory stack3(dump, 0x36d08e08); + stack3.Append("stack for thread three"); + MDRawContextX86 raw_context3; + raw_context3.context_flags = MD_CONTEXT_X86_INTEGER; + raw_context3.eip = 0xdf99a60c; + Context context3(dump, raw_context3); + Thread thread3(dump, 0x86e6c341, stack3, context3, + 0x32dc5c55, 0x17a2aba8, 0xe0cc75e7, 0xa46393994dae83aeULL); + dump.Add(&stack3); + dump.Add(&context3); + dump.Add(&thread3); + + Memory stack4(dump, 0x1e0ab4fa); + stack4.Append("stack for thread four"); + MDRawContextX86 raw_context4; + raw_context4.context_flags = MD_CONTEXT_X86_INTEGER; + raw_context4.eip = 0xaa646267; + Context context4(dump, raw_context4); + Thread thread4(dump, 0x261a28d4, stack4, context4, + 0x6ebd389e, 0xa0cd4759, 0x30168846, 0x164f650a0cf39d35ULL); + dump.Add(&stack4); + dump.Add(&context4); + dump.Add(&thread4); + + // Three modules! + String module1_name(dump, "module one"); + Module module1(dump, 0xeb77da57b5d4cbdaULL, 0x83cd5a37, module1_name); + dump.Add(&module1_name); + dump.Add(&module1); + + String module2_name(dump, "module two"); + Module module2(dump, 0x8675884adfe5ac90ULL, 0xb11e4ea3, module2_name); + dump.Add(&module2_name); + dump.Add(&module2); + + String module3_name(dump, "module three"); + Module module3(dump, 0x95fc1544da321b6cULL, 0x7c2bf081, module3_name); + dump.Add(&module3_name); + dump.Add(&module3); + + // Add one more memory region, on top of the five stacks. + Memory memory5(dump, 0x61979e828040e564ULL); + memory5.Append("contents of memory 5"); + dump.Add(&memory5); + + dump.Finish(); + + string contents; + ASSERT_TRUE(dump.GetContents(&contents)); + istringstream minidump_stream(contents); + Minidump minidump(minidump_stream); + ASSERT_TRUE(minidump.Read()); + ASSERT_EQ(4U, minidump.GetDirectoryEntryCount()); + + // Check the threads. + MinidumpThreadList *thread_list = minidump.GetThreadList(); + ASSERT_TRUE(thread_list != NULL); + ASSERT_EQ(5U, thread_list->thread_count()); + uint32_t thread_id; + ASSERT_TRUE(thread_list->GetThreadAtIndex(0)->GetThreadID(&thread_id)); + ASSERT_EQ(0xbbef4432U, thread_id); + ASSERT_EQ(0x70b9ebfcU, + thread_list->GetThreadAtIndex(0)->GetMemory()->GetBase()); + ASSERT_EQ(0xaf0709e4U, + thread_list->GetThreadAtIndex(0)->GetContext()->GetContextX86() + ->eip); + + ASSERT_TRUE(thread_list->GetThreadAtIndex(1)->GetThreadID(&thread_id)); + ASSERT_EQ(0x657c3f58U, thread_id); + ASSERT_EQ(0xf988cc45U, + thread_list->GetThreadAtIndex(1)->GetMemory()->GetBase()); + ASSERT_EQ(0xe4f56f81U, + thread_list->GetThreadAtIndex(1)->GetContext()->GetContextX86() + ->eip); + + ASSERT_TRUE(thread_list->GetThreadAtIndex(2)->GetThreadID(&thread_id)); + ASSERT_EQ(0xdf4b8a71U, thread_id); + ASSERT_EQ(0xc8a92e7cU, + thread_list->GetThreadAtIndex(2)->GetMemory()->GetBase()); + ASSERT_EQ(0xb336a438U, + thread_list->GetThreadAtIndex(2)->GetContext()->GetContextX86() + ->eip); + + ASSERT_TRUE(thread_list->GetThreadAtIndex(3)->GetThreadID(&thread_id)); + ASSERT_EQ(0x86e6c341U, thread_id); + ASSERT_EQ(0x36d08e08U, + thread_list->GetThreadAtIndex(3)->GetMemory()->GetBase()); + ASSERT_EQ(0xdf99a60cU, + thread_list->GetThreadAtIndex(3)->GetContext()->GetContextX86() + ->eip); + + ASSERT_TRUE(thread_list->GetThreadAtIndex(4)->GetThreadID(&thread_id)); + ASSERT_EQ(0x261a28d4U, thread_id); + ASSERT_EQ(0x1e0ab4faU, + thread_list->GetThreadAtIndex(4)->GetMemory()->GetBase()); + ASSERT_EQ(0xaa646267U, + thread_list->GetThreadAtIndex(4)->GetContext()->GetContextX86() + ->eip); + + // Check the modules. + MinidumpModuleList *md_module_list = minidump.GetModuleList(); + ASSERT_TRUE(md_module_list != NULL); + ASSERT_EQ(3U, md_module_list->module_count()); + EXPECT_EQ(0xeb77da57b5d4cbdaULL, + md_module_list->GetModuleAtIndex(0)->base_address()); + EXPECT_EQ(0x8675884adfe5ac90ULL, + md_module_list->GetModuleAtIndex(1)->base_address()); + EXPECT_EQ(0x95fc1544da321b6cULL, + md_module_list->GetModuleAtIndex(2)->base_address()); +} + +TEST(Dump, OneMemoryInfo) { + Dump dump(0, kBigEndian); + Stream stream(dump, MD_MEMORY_INFO_LIST_STREAM); + + // Add the MDRawMemoryInfoList header. + const uint64_t kNumberOfEntries = 1; + stream.D32(sizeof(MDRawMemoryInfoList)) // size_of_header + .D32(sizeof(MDRawMemoryInfo)) // size_of_entry + .D64(kNumberOfEntries); // number_of_entries + + + // Now add a MDRawMemoryInfo entry. + const uint64_t kBaseAddress = 0x1000; + const uint64_t kRegionSize = 0x2000; + stream.D64(kBaseAddress) // base_address + .D64(kBaseAddress) // allocation_base + .D32(MD_MEMORY_PROTECT_EXECUTE_READWRITE) // allocation_protection + .D32(0) // __alignment1 + .D64(kRegionSize) // region_size + .D32(MD_MEMORY_STATE_COMMIT) // state + .D32(MD_MEMORY_PROTECT_EXECUTE_READWRITE) // protection + .D32(MD_MEMORY_TYPE_PRIVATE) // type + .D32(0); // __alignment2 + + dump.Add(&stream); + dump.Finish(); + + string contents; + ASSERT_TRUE(dump.GetContents(&contents)); + istringstream minidump_stream(contents); + Minidump minidump(minidump_stream); + ASSERT_TRUE(minidump.Read()); + ASSERT_EQ(1U, minidump.GetDirectoryEntryCount()); + + const MDRawDirectory *dir = minidump.GetDirectoryEntryAtIndex(0); + ASSERT_TRUE(dir != NULL); + EXPECT_EQ((uint32_t) MD_MEMORY_INFO_LIST_STREAM, dir->stream_type); + + MinidumpMemoryInfoList *info_list = minidump.GetMemoryInfoList(); + ASSERT_TRUE(info_list != NULL); + ASSERT_EQ(1U, info_list->info_count()); + + const MinidumpMemoryInfo *info1 = info_list->GetMemoryInfoAtIndex(0); + ASSERT_EQ(kBaseAddress, info1->GetBase()); + ASSERT_EQ(kRegionSize, info1->GetSize()); + ASSERT_TRUE(info1->IsExecutable()); + ASSERT_TRUE(info1->IsWritable()); + + // Should get back the same memory region here. + const MinidumpMemoryInfo *info2 = + info_list->GetMemoryInfoForAddress(kBaseAddress + kRegionSize / 2); + ASSERT_EQ(kBaseAddress, info2->GetBase()); + ASSERT_EQ(kRegionSize, info2->GetSize()); +} + +TEST(Dump, OneExceptionX86) { + Dump dump(0, kLittleEndian); + + MDRawContextX86 raw_context; + raw_context.context_flags = MD_CONTEXT_X86_INTEGER | MD_CONTEXT_X86_CONTROL; + raw_context.edi = 0x3ecba80d; + raw_context.esi = 0x382583b9; + raw_context.ebx = 0x7fccc03f; + raw_context.edx = 0xf62f8ec2; + raw_context.ecx = 0x46a6a6a8; + raw_context.eax = 0x6a5025e2; + raw_context.ebp = 0xd9fabb4a; + raw_context.eip = 0x6913f540; + raw_context.cs = 0xbffe6eda; + raw_context.eflags = 0xb2ce1e2d; + raw_context.esp = 0x659caaa4; + raw_context.ss = 0x2e951ef7; + Context context(dump, raw_context); + + Exception exception(dump, context, + 0x1234abcd, // thread id + 0xdcba4321, // exception code + 0xf0e0d0c0, // exception flags + 0x0919a9b9c9d9e9f9ULL); // exception address + + dump.Add(&context); + dump.Add(&exception); + dump.Finish(); + + string contents; + ASSERT_TRUE(dump.GetContents(&contents)); + + istringstream minidump_stream(contents); + Minidump minidump(minidump_stream); + ASSERT_TRUE(minidump.Read()); + ASSERT_EQ(1U, minidump.GetDirectoryEntryCount()); + + MinidumpException *md_exception = minidump.GetException(); + ASSERT_TRUE(md_exception != NULL); + + uint32_t thread_id; + ASSERT_TRUE(md_exception->GetThreadID(&thread_id)); + ASSERT_EQ(0x1234abcdU, thread_id); + + const MDRawExceptionStream* raw_exception = md_exception->exception(); + ASSERT_TRUE(raw_exception != NULL); + EXPECT_EQ(0xdcba4321, raw_exception->exception_record.exception_code); + EXPECT_EQ(0xf0e0d0c0, raw_exception->exception_record.exception_flags); + EXPECT_EQ(0x0919a9b9c9d9e9f9ULL, + raw_exception->exception_record.exception_address); + + MinidumpContext *md_context = md_exception->GetContext(); + ASSERT_TRUE(md_context != NULL); + ASSERT_EQ((uint32_t) MD_CONTEXT_X86, md_context->GetContextCPU()); + const MDRawContextX86 *md_raw_context = md_context->GetContextX86(); + ASSERT_TRUE(md_raw_context != NULL); + ASSERT_EQ((uint32_t) (MD_CONTEXT_X86_INTEGER | MD_CONTEXT_X86_CONTROL), + (md_raw_context->context_flags + & (MD_CONTEXT_X86_INTEGER | MD_CONTEXT_X86_CONTROL))); + EXPECT_EQ(0x3ecba80dU, raw_context.edi); + EXPECT_EQ(0x382583b9U, raw_context.esi); + EXPECT_EQ(0x7fccc03fU, raw_context.ebx); + EXPECT_EQ(0xf62f8ec2U, raw_context.edx); + EXPECT_EQ(0x46a6a6a8U, raw_context.ecx); + EXPECT_EQ(0x6a5025e2U, raw_context.eax); + EXPECT_EQ(0xd9fabb4aU, raw_context.ebp); + EXPECT_EQ(0x6913f540U, raw_context.eip); + EXPECT_EQ(0xbffe6edaU, raw_context.cs); + EXPECT_EQ(0xb2ce1e2dU, raw_context.eflags); + EXPECT_EQ(0x659caaa4U, raw_context.esp); + EXPECT_EQ(0x2e951ef7U, raw_context.ss); +} + +TEST(Dump, OneExceptionX86XState) { + Dump dump(0, kLittleEndian); + + MDRawContextX86 raw_context; + raw_context.context_flags = MD_CONTEXT_X86_INTEGER | + MD_CONTEXT_X86_CONTROL | MD_CONTEXT_X86_XSTATE; + raw_context.edi = 0x3ecba80d; + raw_context.esi = 0x382583b9; + raw_context.ebx = 0x7fccc03f; + raw_context.edx = 0xf62f8ec2; + raw_context.ecx = 0x46a6a6a8; + raw_context.eax = 0x6a5025e2; + raw_context.ebp = 0xd9fabb4a; + raw_context.eip = 0x6913f540; + raw_context.cs = 0xbffe6eda; + raw_context.eflags = 0xb2ce1e2d; + raw_context.esp = 0x659caaa4; + raw_context.ss = 0x2e951ef7; + Context context(dump, raw_context); + + Exception exception(dump, context, + 0x1234abcd, // thread id + 0xdcba4321, // exception code + 0xf0e0d0c0, // exception flags + 0x0919a9b9c9d9e9f9ULL); // exception address + + dump.Add(&context); + dump.Add(&exception); + dump.Finish(); + + string contents; + ASSERT_TRUE(dump.GetContents(&contents)); + + istringstream minidump_stream(contents); + Minidump minidump(minidump_stream); + ASSERT_TRUE(minidump.Read()); + ASSERT_EQ(1U, minidump.GetDirectoryEntryCount()); + + MinidumpException *md_exception = minidump.GetException(); + ASSERT_TRUE(md_exception != NULL); + + uint32_t thread_id; + ASSERT_TRUE(md_exception->GetThreadID(&thread_id)); + ASSERT_EQ(0x1234abcdU, thread_id); + + const MDRawExceptionStream* raw_exception = md_exception->exception(); + ASSERT_TRUE(raw_exception != NULL); + EXPECT_EQ(0xdcba4321, raw_exception->exception_record.exception_code); + EXPECT_EQ(0xf0e0d0c0, raw_exception->exception_record.exception_flags); + EXPECT_EQ(0x0919a9b9c9d9e9f9ULL, + raw_exception->exception_record.exception_address); + + MinidumpContext *md_context = md_exception->GetContext(); + ASSERT_TRUE(md_context != NULL); + ASSERT_EQ((uint32_t) MD_CONTEXT_X86, md_context->GetContextCPU()); + const MDRawContextX86 *md_raw_context = md_context->GetContextX86(); + ASSERT_TRUE(md_raw_context != NULL); + ASSERT_EQ((uint32_t) (MD_CONTEXT_X86_INTEGER | MD_CONTEXT_X86_CONTROL), + (md_raw_context->context_flags + & (MD_CONTEXT_X86_INTEGER | MD_CONTEXT_X86_CONTROL))); + EXPECT_EQ(0x3ecba80dU, raw_context.edi); + EXPECT_EQ(0x382583b9U, raw_context.esi); + EXPECT_EQ(0x7fccc03fU, raw_context.ebx); + EXPECT_EQ(0xf62f8ec2U, raw_context.edx); + EXPECT_EQ(0x46a6a6a8U, raw_context.ecx); + EXPECT_EQ(0x6a5025e2U, raw_context.eax); + EXPECT_EQ(0xd9fabb4aU, raw_context.ebp); + EXPECT_EQ(0x6913f540U, raw_context.eip); + EXPECT_EQ(0xbffe6edaU, raw_context.cs); + EXPECT_EQ(0xb2ce1e2dU, raw_context.eflags); + EXPECT_EQ(0x659caaa4U, raw_context.esp); + EXPECT_EQ(0x2e951ef7U, raw_context.ss); +} + +// Testing that the CPU type can be loaded from a system info stream when +// the CPU flags are missing from the context_flags of an exception record +TEST(Dump, OneExceptionX86NoCPUFlags) { + Dump dump(0, kLittleEndian); + + MDRawContextX86 raw_context; + // Intentionally not setting CPU type in the context_flags + raw_context.context_flags = 0; + raw_context.edi = 0x3ecba80d; + raw_context.esi = 0x382583b9; + raw_context.ebx = 0x7fccc03f; + raw_context.edx = 0xf62f8ec2; + raw_context.ecx = 0x46a6a6a8; + raw_context.eax = 0x6a5025e2; + raw_context.ebp = 0xd9fabb4a; + raw_context.eip = 0x6913f540; + raw_context.cs = 0xbffe6eda; + raw_context.eflags = 0xb2ce1e2d; + raw_context.esp = 0x659caaa4; + raw_context.ss = 0x2e951ef7; + Context context(dump, raw_context); + + Exception exception(dump, context, + 0x1234abcd, // thread id + 0xdcba4321, // exception code + 0xf0e0d0c0, // exception flags + 0x0919a9b9c9d9e9f9ULL); // exception address + + dump.Add(&context); + dump.Add(&exception); + + // Add system info. This is needed as an alternative source for CPU type + // information. Note, that the CPU flags were intentionally skipped from + // the context_flags and this alternative source is required. + String csd_version(dump, "Service Pack 2"); + SystemInfo system_info(dump, SystemInfo::windows_x86, csd_version); + dump.Add(&system_info); + dump.Add(&csd_version); + + dump.Finish(); + + string contents; + ASSERT_TRUE(dump.GetContents(&contents)); + + istringstream minidump_stream(contents); + Minidump minidump(minidump_stream); + ASSERT_TRUE(minidump.Read()); + ASSERT_EQ(2U, minidump.GetDirectoryEntryCount()); + + MinidumpException *md_exception = minidump.GetException(); + ASSERT_TRUE(md_exception != NULL); + + uint32_t thread_id; + ASSERT_TRUE(md_exception->GetThreadID(&thread_id)); + ASSERT_EQ(0x1234abcdU, thread_id); + + const MDRawExceptionStream* raw_exception = md_exception->exception(); + ASSERT_TRUE(raw_exception != NULL); + EXPECT_EQ(0xdcba4321, raw_exception->exception_record.exception_code); + EXPECT_EQ(0xf0e0d0c0, raw_exception->exception_record.exception_flags); + EXPECT_EQ(0x0919a9b9c9d9e9f9ULL, + raw_exception->exception_record.exception_address); + + MinidumpContext *md_context = md_exception->GetContext(); + ASSERT_TRUE(md_context != NULL); + + ASSERT_EQ((uint32_t) MD_CONTEXT_X86, md_context->GetContextCPU()); + const MDRawContextX86 *md_raw_context = md_context->GetContextX86(); + ASSERT_TRUE(md_raw_context != NULL); + + // Even though the CPU flags were missing from the context_flags, the + // GetContext call above is expected to load the missing CPU flags from the + // system info stream and set the CPU type bits in context_flags. + ASSERT_EQ((uint32_t) (MD_CONTEXT_X86), md_raw_context->context_flags); + + EXPECT_EQ(0x3ecba80dU, raw_context.edi); + EXPECT_EQ(0x382583b9U, raw_context.esi); + EXPECT_EQ(0x7fccc03fU, raw_context.ebx); + EXPECT_EQ(0xf62f8ec2U, raw_context.edx); + EXPECT_EQ(0x46a6a6a8U, raw_context.ecx); + EXPECT_EQ(0x6a5025e2U, raw_context.eax); + EXPECT_EQ(0xd9fabb4aU, raw_context.ebp); + EXPECT_EQ(0x6913f540U, raw_context.eip); + EXPECT_EQ(0xbffe6edaU, raw_context.cs); + EXPECT_EQ(0xb2ce1e2dU, raw_context.eflags); + EXPECT_EQ(0x659caaa4U, raw_context.esp); + EXPECT_EQ(0x2e951ef7U, raw_context.ss); +} + +// This test covers a scenario where a dump contains an exception but the +// context record of the exception is missing the CPU type information in its +// context_flags. The dump has no system info stream so it is imposible to +// deduce the CPU type, hence the context record is unusable. +TEST(Dump, OneExceptionX86NoCPUFlagsNoSystemInfo) { + Dump dump(0, kLittleEndian); + + MDRawContextX86 raw_context; + // Intentionally not setting CPU type in the context_flags + raw_context.context_flags = 0; + raw_context.edi = 0x3ecba80d; + raw_context.esi = 0x382583b9; + raw_context.ebx = 0x7fccc03f; + raw_context.edx = 0xf62f8ec2; + raw_context.ecx = 0x46a6a6a8; + raw_context.eax = 0x6a5025e2; + raw_context.ebp = 0xd9fabb4a; + raw_context.eip = 0x6913f540; + raw_context.cs = 0xbffe6eda; + raw_context.eflags = 0xb2ce1e2d; + raw_context.esp = 0x659caaa4; + raw_context.ss = 0x2e951ef7; + Context context(dump, raw_context); + + Exception exception(dump, context, + 0x1234abcd, // thread id + 0xdcba4321, // exception code + 0xf0e0d0c0, // exception flags + 0x0919a9b9c9d9e9f9ULL); // exception address + + dump.Add(&context); + dump.Add(&exception); + dump.Finish(); + + string contents; + ASSERT_TRUE(dump.GetContents(&contents)); + + istringstream minidump_stream(contents); + Minidump minidump(minidump_stream); + ASSERT_TRUE(minidump.Read()); + ASSERT_EQ(1U, minidump.GetDirectoryEntryCount()); + + MinidumpException *md_exception = minidump.GetException(); + ASSERT_TRUE(md_exception != NULL); + + uint32_t thread_id; + ASSERT_TRUE(md_exception->GetThreadID(&thread_id)); + ASSERT_EQ(0x1234abcdU, thread_id); + + const MDRawExceptionStream* raw_exception = md_exception->exception(); + ASSERT_TRUE(raw_exception != NULL); + EXPECT_EQ(0xdcba4321, raw_exception->exception_record.exception_code); + EXPECT_EQ(0xf0e0d0c0, raw_exception->exception_record.exception_flags); + EXPECT_EQ(0x0919a9b9c9d9e9f9ULL, + raw_exception->exception_record.exception_address); + + // The context record of the exception is unusable because the context_flags + // don't have CPU type information and at the same time the minidump lacks + // system info stream so it is impossible to deduce the CPU type. + MinidumpContext *md_context = md_exception->GetContext(); + ASSERT_EQ(NULL, md_context); +} + +TEST(Dump, OneExceptionARM) { + Dump dump(0, kLittleEndian); + + MDRawContextARM raw_context; + raw_context.context_flags = MD_CONTEXT_ARM_INTEGER; + raw_context.iregs[0] = 0x3ecba80d; + raw_context.iregs[1] = 0x382583b9; + raw_context.iregs[2] = 0x7fccc03f; + raw_context.iregs[3] = 0xf62f8ec2; + raw_context.iregs[4] = 0x46a6a6a8; + raw_context.iregs[5] = 0x6a5025e2; + raw_context.iregs[6] = 0xd9fabb4a; + raw_context.iregs[7] = 0x6913f540; + raw_context.iregs[8] = 0xbffe6eda; + raw_context.iregs[9] = 0xb2ce1e2d; + raw_context.iregs[10] = 0x659caaa4; + raw_context.iregs[11] = 0xf0e0d0c0; + raw_context.iregs[12] = 0xa9b8c7d6; + raw_context.iregs[13] = 0x12345678; + raw_context.iregs[14] = 0xabcd1234; + raw_context.iregs[15] = 0x10203040; + raw_context.cpsr = 0x2e951ef7; + Context context(dump, raw_context); + + Exception exception(dump, context, + 0x1234abcd, // thread id + 0xdcba4321, // exception code + 0xf0e0d0c0, // exception flags + 0x0919a9b9c9d9e9f9ULL); // exception address + + dump.Add(&context); + dump.Add(&exception); + dump.Finish(); + + string contents; + ASSERT_TRUE(dump.GetContents(&contents)); + + istringstream minidump_stream(contents); + Minidump minidump(minidump_stream); + ASSERT_TRUE(minidump.Read()); + ASSERT_EQ(1U, minidump.GetDirectoryEntryCount()); + + MinidumpException *md_exception = minidump.GetException(); + ASSERT_TRUE(md_exception != NULL); + + uint32_t thread_id; + ASSERT_TRUE(md_exception->GetThreadID(&thread_id)); + ASSERT_EQ(0x1234abcdU, thread_id); + + const MDRawExceptionStream* raw_exception = md_exception->exception(); + ASSERT_TRUE(raw_exception != NULL); + EXPECT_EQ(0xdcba4321, raw_exception->exception_record.exception_code); + EXPECT_EQ(0xf0e0d0c0, raw_exception->exception_record.exception_flags); + EXPECT_EQ(0x0919a9b9c9d9e9f9ULL, + raw_exception->exception_record.exception_address); + + MinidumpContext *md_context = md_exception->GetContext(); + ASSERT_TRUE(md_context != NULL); + ASSERT_EQ((uint32_t) MD_CONTEXT_ARM, md_context->GetContextCPU()); + const MDRawContextARM *md_raw_context = md_context->GetContextARM(); + ASSERT_TRUE(md_raw_context != NULL); + ASSERT_EQ((uint32_t) MD_CONTEXT_ARM_INTEGER, + (md_raw_context->context_flags + & MD_CONTEXT_ARM_INTEGER)); + EXPECT_EQ(0x3ecba80dU, raw_context.iregs[0]); + EXPECT_EQ(0x382583b9U, raw_context.iregs[1]); + EXPECT_EQ(0x7fccc03fU, raw_context.iregs[2]); + EXPECT_EQ(0xf62f8ec2U, raw_context.iregs[3]); + EXPECT_EQ(0x46a6a6a8U, raw_context.iregs[4]); + EXPECT_EQ(0x6a5025e2U, raw_context.iregs[5]); + EXPECT_EQ(0xd9fabb4aU, raw_context.iregs[6]); + EXPECT_EQ(0x6913f540U, raw_context.iregs[7]); + EXPECT_EQ(0xbffe6edaU, raw_context.iregs[8]); + EXPECT_EQ(0xb2ce1e2dU, raw_context.iregs[9]); + EXPECT_EQ(0x659caaa4U, raw_context.iregs[10]); + EXPECT_EQ(0xf0e0d0c0U, raw_context.iregs[11]); + EXPECT_EQ(0xa9b8c7d6U, raw_context.iregs[12]); + EXPECT_EQ(0x12345678U, raw_context.iregs[13]); + EXPECT_EQ(0xabcd1234U, raw_context.iregs[14]); + EXPECT_EQ(0x10203040U, raw_context.iregs[15]); + EXPECT_EQ(0x2e951ef7U, raw_context.cpsr); +} + +TEST(Dump, OneExceptionARMOldFlags) { + Dump dump(0, kLittleEndian); + + MDRawContextARM raw_context; + // MD_CONTEXT_ARM_INTEGER, but with _OLD + raw_context.context_flags = MD_CONTEXT_ARM_OLD | 0x00000002; + raw_context.iregs[0] = 0x3ecba80d; + raw_context.iregs[1] = 0x382583b9; + raw_context.iregs[2] = 0x7fccc03f; + raw_context.iregs[3] = 0xf62f8ec2; + raw_context.iregs[4] = 0x46a6a6a8; + raw_context.iregs[5] = 0x6a5025e2; + raw_context.iregs[6] = 0xd9fabb4a; + raw_context.iregs[7] = 0x6913f540; + raw_context.iregs[8] = 0xbffe6eda; + raw_context.iregs[9] = 0xb2ce1e2d; + raw_context.iregs[10] = 0x659caaa4; + raw_context.iregs[11] = 0xf0e0d0c0; + raw_context.iregs[12] = 0xa9b8c7d6; + raw_context.iregs[13] = 0x12345678; + raw_context.iregs[14] = 0xabcd1234; + raw_context.iregs[15] = 0x10203040; + raw_context.cpsr = 0x2e951ef7; + Context context(dump, raw_context); + + Exception exception(dump, context, + 0x1234abcd, // thread id + 0xdcba4321, // exception code + 0xf0e0d0c0, // exception flags + 0x0919a9b9c9d9e9f9ULL); // exception address + + dump.Add(&context); + dump.Add(&exception); + dump.Finish(); + + string contents; + ASSERT_TRUE(dump.GetContents(&contents)); + + istringstream minidump_stream(contents); + Minidump minidump(minidump_stream); + ASSERT_TRUE(minidump.Read()); + ASSERT_EQ(1U, minidump.GetDirectoryEntryCount()); + + MinidumpException *md_exception = minidump.GetException(); + ASSERT_TRUE(md_exception != NULL); + + uint32_t thread_id; + ASSERT_TRUE(md_exception->GetThreadID(&thread_id)); + ASSERT_EQ(0x1234abcdU, thread_id); + + const MDRawExceptionStream* raw_exception = md_exception->exception(); + ASSERT_TRUE(raw_exception != NULL); + EXPECT_EQ(0xdcba4321, raw_exception->exception_record.exception_code); + EXPECT_EQ(0xf0e0d0c0, raw_exception->exception_record.exception_flags); + EXPECT_EQ(0x0919a9b9c9d9e9f9ULL, + raw_exception->exception_record.exception_address); + + MinidumpContext *md_context = md_exception->GetContext(); + ASSERT_TRUE(md_context != NULL); + ASSERT_EQ((uint32_t) MD_CONTEXT_ARM, md_context->GetContextCPU()); + const MDRawContextARM *md_raw_context = md_context->GetContextARM(); + ASSERT_TRUE(md_raw_context != NULL); + ASSERT_EQ((uint32_t) MD_CONTEXT_ARM_INTEGER, + (md_raw_context->context_flags + & MD_CONTEXT_ARM_INTEGER)); + EXPECT_EQ(0x3ecba80dU, raw_context.iregs[0]); + EXPECT_EQ(0x382583b9U, raw_context.iregs[1]); + EXPECT_EQ(0x7fccc03fU, raw_context.iregs[2]); + EXPECT_EQ(0xf62f8ec2U, raw_context.iregs[3]); + EXPECT_EQ(0x46a6a6a8U, raw_context.iregs[4]); + EXPECT_EQ(0x6a5025e2U, raw_context.iregs[5]); + EXPECT_EQ(0xd9fabb4aU, raw_context.iregs[6]); + EXPECT_EQ(0x6913f540U, raw_context.iregs[7]); + EXPECT_EQ(0xbffe6edaU, raw_context.iregs[8]); + EXPECT_EQ(0xb2ce1e2dU, raw_context.iregs[9]); + EXPECT_EQ(0x659caaa4U, raw_context.iregs[10]); + EXPECT_EQ(0xf0e0d0c0U, raw_context.iregs[11]); + EXPECT_EQ(0xa9b8c7d6U, raw_context.iregs[12]); + EXPECT_EQ(0x12345678U, raw_context.iregs[13]); + EXPECT_EQ(0xabcd1234U, raw_context.iregs[14]); + EXPECT_EQ(0x10203040U, raw_context.iregs[15]); + EXPECT_EQ(0x2e951ef7U, raw_context.cpsr); +} + +TEST(Dump, OneExceptionMIPS) { + Dump dump(0, kLittleEndian); + + MDRawContextMIPS raw_context; + raw_context.context_flags = MD_CONTEXT_MIPS_INTEGER; + raw_context.iregs[0] = 0x3ecba80d; + raw_context.iregs[1] = 0x382583b9; + raw_context.iregs[2] = 0x7fccc03f; + raw_context.iregs[3] = 0xf62f8ec2; + raw_context.iregs[4] = 0x46a6a6a8; + raw_context.iregs[5] = 0x6a5025e2; + raw_context.iregs[6] = 0xd9fabb4a; + raw_context.iregs[7] = 0x6913f540; + raw_context.iregs[8] = 0xbffe6eda; + raw_context.iregs[9] = 0xb2ce1e2d; + raw_context.iregs[10] = 0x659caaa4; + raw_context.iregs[11] = 0xf0e0d0c0; + raw_context.iregs[12] = 0xa9b8c7d6; + raw_context.iregs[13] = 0x12345678; + raw_context.iregs[14] = 0xabcd1234; + raw_context.iregs[15] = 0x10203040; + raw_context.iregs[16] = 0xa80d3ecb; + raw_context.iregs[17] = 0x83b93825; + raw_context.iregs[18] = 0xc03f7fcc; + raw_context.iregs[19] = 0x8ec2f62f; + raw_context.iregs[20] = 0xa6a846a6; + raw_context.iregs[21] = 0x25e26a50; + raw_context.iregs[22] = 0xbb4ad9fa; + raw_context.iregs[23] = 0xf5406913; + raw_context.iregs[24] = 0x6edabffe; + raw_context.iregs[25] = 0x1e2db2ce; + raw_context.iregs[26] = 0xaaa4659c; + raw_context.iregs[27] = 0xd0c0f0e0; + raw_context.iregs[28] = 0xc7d6a9b8; + raw_context.iregs[29] = 0x56781234; + raw_context.iregs[30] = 0x1234abcd; + raw_context.iregs[31] = 0x30401020; + + Context context(dump, raw_context); + + Exception exception(dump, context, + 0x1234abcd, // Thread id. + 0xdcba4321, // Exception code. + 0xf0e0d0c0, // Exception flags. + 0x0919a9b9); // Exception address. + + dump.Add(&context); + dump.Add(&exception); + dump.Finish(); + + string contents; + ASSERT_TRUE(dump.GetContents(&contents)); + + istringstream minidump_stream(contents); + Minidump minidump(minidump_stream); + ASSERT_TRUE(minidump.Read()); + ASSERT_EQ(1U, minidump.GetDirectoryEntryCount()); + + MinidumpException *md_exception = minidump.GetException(); + ASSERT_TRUE(md_exception != NULL); + + uint32_t thread_id; + ASSERT_TRUE(md_exception->GetThreadID(&thread_id)); + ASSERT_EQ(0x1234abcdU, thread_id); + + const MDRawExceptionStream* raw_exception = md_exception->exception(); + ASSERT_TRUE(raw_exception != NULL); + EXPECT_EQ(0xdcba4321, raw_exception->exception_record.exception_code); + EXPECT_EQ(0xf0e0d0c0, raw_exception->exception_record.exception_flags); + EXPECT_EQ(0x0919a9b9U, + raw_exception->exception_record.exception_address); + + MinidumpContext* md_context = md_exception->GetContext(); + ASSERT_TRUE(md_context != NULL); + ASSERT_EQ((uint32_t) MD_CONTEXT_MIPS, md_context->GetContextCPU()); + const MDRawContextMIPS* md_raw_context = md_context->GetContextMIPS(); + ASSERT_TRUE(md_raw_context != NULL); + ASSERT_EQ((uint32_t) MD_CONTEXT_MIPS_INTEGER, + (md_raw_context->context_flags & MD_CONTEXT_MIPS_INTEGER)); + EXPECT_EQ(0x3ecba80dU, raw_context.iregs[0]); + EXPECT_EQ(0x382583b9U, raw_context.iregs[1]); + EXPECT_EQ(0x7fccc03fU, raw_context.iregs[2]); + EXPECT_EQ(0xf62f8ec2U, raw_context.iregs[3]); + EXPECT_EQ(0x46a6a6a8U, raw_context.iregs[4]); + EXPECT_EQ(0x6a5025e2U, raw_context.iregs[5]); + EXPECT_EQ(0xd9fabb4aU, raw_context.iregs[6]); + EXPECT_EQ(0x6913f540U, raw_context.iregs[7]); + EXPECT_EQ(0xbffe6edaU, raw_context.iregs[8]); + EXPECT_EQ(0xb2ce1e2dU, raw_context.iregs[9]); + EXPECT_EQ(0x659caaa4U, raw_context.iregs[10]); + EXPECT_EQ(0xf0e0d0c0U, raw_context.iregs[11]); + EXPECT_EQ(0xa9b8c7d6U, raw_context.iregs[12]); + EXPECT_EQ(0x12345678U, raw_context.iregs[13]); + EXPECT_EQ(0xabcd1234U, raw_context.iregs[14]); + EXPECT_EQ(0x10203040U, raw_context.iregs[15]); + EXPECT_EQ(0xa80d3ecbU, raw_context.iregs[16]); + EXPECT_EQ(0x83b93825U, raw_context.iregs[17]); + EXPECT_EQ(0xc03f7fccU, raw_context.iregs[18]); + EXPECT_EQ(0x8ec2f62fU, raw_context.iregs[19]); + EXPECT_EQ(0xa6a846a6U, raw_context.iregs[20]); + EXPECT_EQ(0x25e26a50U, raw_context.iregs[21]); + EXPECT_EQ(0xbb4ad9faU, raw_context.iregs[22]); + EXPECT_EQ(0xf5406913U, raw_context.iregs[23]); + EXPECT_EQ(0x6edabffeU, raw_context.iregs[24]); + EXPECT_EQ(0x1e2db2ceU, raw_context.iregs[25]); + EXPECT_EQ(0xaaa4659cU, raw_context.iregs[26]); + EXPECT_EQ(0xd0c0f0e0U, raw_context.iregs[27]); + EXPECT_EQ(0xc7d6a9b8U, raw_context.iregs[28]); + EXPECT_EQ(0x56781234U, raw_context.iregs[29]); + EXPECT_EQ(0x1234abcdU, raw_context.iregs[30]); + EXPECT_EQ(0x30401020U, raw_context.iregs[31]); +} + +} // namespace diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/module_comparer.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/module_comparer.cc new file mode 100644 index 0000000000..025ab883a3 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/module_comparer.cc @@ -0,0 +1,302 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// module_comparer.cc: ModuleComparer implementation. +// See module_comparer.h for documentation. +// +// Author: lambxsy@google.com (Siyang Xie) + +#include "processor/module_comparer.h" + +#include +#include + +#include "common/scoped_ptr.h" +#include "processor/basic_code_module.h" +#include "processor/logging.h" + +#define ASSERT_TRUE(condition) \ + if (!(condition)) { \ + BPLOG(ERROR) << "FAIL: " << #condition << " @ " \ + << __FILE__ << ":" << __LINE__; \ + return false; \ + } + +#define ASSERT_FALSE(condition) ASSERT_TRUE(!(condition)) + +namespace google_breakpad { + +bool ModuleComparer::Compare(const string &symbol_data) { + scoped_ptr basic_module(new BasicModule("test_module")); + scoped_ptr fast_module(new FastModule("test_module")); + + // Load symbol data into basic_module + scoped_array buffer(new char[symbol_data.size() + 1]); + memcpy(buffer.get(), symbol_data.c_str(), symbol_data.size()); + buffer.get()[symbol_data.size()] = '\0'; + ASSERT_TRUE(basic_module->LoadMapFromMemory(buffer.get(), + symbol_data.size() + 1)); + buffer.reset(); + + // Serialize BasicSourceLineResolver::Module. + unsigned int serialized_size = 0; + scoped_array serialized_data( + serializer_.Serialize(*(basic_module.get()), &serialized_size)); + ASSERT_TRUE(serialized_data.get()); + BPLOG(INFO) << "Serialized size = " << serialized_size << " Bytes"; + + // Load FastSourceLineResolver::Module using serialized data. + ASSERT_TRUE(fast_module->LoadMapFromMemory(serialized_data.get(), + serialized_size)); + ASSERT_TRUE(fast_module->IsCorrupt() == basic_module->IsCorrupt()); + + // Compare FastSourceLineResolver::Module with + // BasicSourceLineResolver::Module. + ASSERT_TRUE(CompareModule(basic_module.get(), fast_module.get())); + + return true; +} + +// Traversal the content of module and do comparison +bool ModuleComparer::CompareModule(const BasicModule *basic_module, + const FastModule *fast_module) const { + // Compare name_. + ASSERT_TRUE(basic_module->name_ == fast_module->name_); + + // Compare files_: + { + BasicModule::FileMap::const_iterator iter1 = basic_module->files_.begin(); + FastModule::FileMap::iterator iter2 = fast_module->files_.begin(); + while (iter1 != basic_module->files_.end() + && iter2 != fast_module->files_.end()) { + ASSERT_TRUE(iter1->first == iter2.GetKey()); + string tmp(iter2.GetValuePtr()); + ASSERT_TRUE(iter1->second == tmp); + ++iter1; + ++iter2; + } + ASSERT_TRUE(iter1 == basic_module->files_.end()); + ASSERT_TRUE(iter2 == fast_module->files_.end()); + } + + // Compare functions_: + { + RangeMap >::MapConstIterator iter1; + StaticRangeMap::MapConstIterator iter2; + iter1 = basic_module->functions_.map_.begin(); + iter2 = fast_module->functions_.map_.begin(); + while (iter1 != basic_module->functions_.map_.end() + && iter2 != fast_module->functions_.map_.end()) { + ASSERT_TRUE(iter1->first == iter2.GetKey()); + ASSERT_TRUE(iter1->second.base() == iter2.GetValuePtr()->base()); + ASSERT_TRUE(CompareFunction( + iter1->second.entry().get(), iter2.GetValuePtr()->entryptr())); + ++iter1; + ++iter2; + } + ASSERT_TRUE(iter1 == basic_module->functions_.map_.end()); + ASSERT_TRUE(iter2 == fast_module->functions_.map_.end()); + } + + // Compare public_symbols_: + { + AddressMap >::MapConstIterator iter1; + StaticAddressMap::MapConstIterator iter2; + iter1 = basic_module->public_symbols_.map_.begin(); + iter2 = fast_module->public_symbols_.map_.begin(); + while (iter1 != basic_module->public_symbols_.map_.end() + && iter2 != fast_module->public_symbols_.map_.end()) { + ASSERT_TRUE(iter1->first == iter2.GetKey()); + ASSERT_TRUE(ComparePubSymbol( + iter1->second.get(), iter2.GetValuePtr())); + ++iter1; + ++iter2; + } + ASSERT_TRUE(iter1 == basic_module->public_symbols_.map_.end()); + ASSERT_TRUE(iter2 == fast_module->public_symbols_.map_.end()); + } + + // Compare windows_frame_info_[]: + for (int i = 0; i < WindowsFrameInfo::STACK_INFO_LAST; ++i) { + ASSERT_TRUE(CompareCRM(&(basic_module->windows_frame_info_[i]), + &(fast_module->windows_frame_info_[i]))); + } + + // Compare cfi_initial_rules_: + { + RangeMap::MapConstIterator iter1; + StaticRangeMap::MapConstIterator iter2; + iter1 = basic_module->cfi_initial_rules_.map_.begin(); + iter2 = fast_module->cfi_initial_rules_.map_.begin(); + while (iter1 != basic_module->cfi_initial_rules_.map_.end() + && iter2 != fast_module->cfi_initial_rules_.map_.end()) { + ASSERT_TRUE(iter1->first == iter2.GetKey()); + ASSERT_TRUE(iter1->second.base() == iter2.GetValuePtr()->base()); + string tmp(iter2.GetValuePtr()->entryptr()); + ASSERT_TRUE(iter1->second.entry() == tmp); + ++iter1; + ++iter2; + } + ASSERT_TRUE(iter1 == basic_module->cfi_initial_rules_.map_.end()); + ASSERT_TRUE(iter2 == fast_module->cfi_initial_rules_.map_.end()); + } + + // Compare cfi_delta_rules_: + { + map::const_iterator iter1; + StaticMap::iterator iter2; + iter1 = basic_module->cfi_delta_rules_.begin(); + iter2 = fast_module->cfi_delta_rules_.begin(); + while (iter1 != basic_module->cfi_delta_rules_.end() + && iter2 != fast_module->cfi_delta_rules_.end()) { + ASSERT_TRUE(iter1->first == iter2.GetKey()); + string tmp(iter2.GetValuePtr()); + ASSERT_TRUE(iter1->second == tmp); + ++iter1; + ++iter2; + } + ASSERT_TRUE(iter1 == basic_module->cfi_delta_rules_.end()); + ASSERT_TRUE(iter2 == fast_module->cfi_delta_rules_.end()); + } + + return true; +} + +bool ModuleComparer::CompareFunction(const BasicFunc *basic_func, + const FastFunc *fast_func_raw) const { + FastFunc* fast_func = new FastFunc(); + fast_func->CopyFrom(fast_func_raw); + ASSERT_TRUE(basic_func->name == fast_func->name); + ASSERT_TRUE(basic_func->address == fast_func->address); + ASSERT_TRUE(basic_func->size == fast_func->size); + + // compare range map of lines: + RangeMap >::MapConstIterator iter1; + StaticRangeMap::MapConstIterator iter2; + iter1 = basic_func->lines.map_.begin(); + iter2 = fast_func->lines.map_.begin(); + while (iter1 != basic_func->lines.map_.end() + && iter2 != fast_func->lines.map_.end()) { + ASSERT_TRUE(iter1->first == iter2.GetKey()); + ASSERT_TRUE(iter1->second.base() == iter2.GetValuePtr()->base()); + ASSERT_TRUE(CompareLine(iter1->second.entry().get(), + iter2.GetValuePtr()->entryptr())); + ++iter1; + ++iter2; + } + ASSERT_TRUE(iter1 == basic_func->lines.map_.end()); + ASSERT_TRUE(iter2 == fast_func->lines.map_.end()); + + delete fast_func; + return true; +} + +bool ModuleComparer::CompareLine(const BasicLine *basic_line, + const FastLine *fast_line_raw) const { + FastLine *fast_line = new FastLine; + fast_line->CopyFrom(fast_line_raw); + + ASSERT_TRUE(basic_line->address == fast_line->address); + ASSERT_TRUE(basic_line->size == fast_line->size); + ASSERT_TRUE(basic_line->source_file_id == fast_line->source_file_id); + ASSERT_TRUE(basic_line->line == fast_line->line); + + delete fast_line; + return true; +} + +bool ModuleComparer::ComparePubSymbol(const BasicPubSymbol* basic_ps, + const FastPubSymbol* fastps_raw) const { + FastPubSymbol *fast_ps = new FastPubSymbol; + fast_ps->CopyFrom(fastps_raw); + ASSERT_TRUE(basic_ps->name == fast_ps->name); + ASSERT_TRUE(basic_ps->address == fast_ps->address); + ASSERT_TRUE(basic_ps->parameter_size == fast_ps->parameter_size); + delete fast_ps; + return true; +} + +bool ModuleComparer::CompareWFI(const WindowsFrameInfo& wfi1, + const WindowsFrameInfo& wfi2) const { + ASSERT_TRUE(wfi1.type_ == wfi2.type_); + ASSERT_TRUE(wfi1.valid == wfi2.valid); + ASSERT_TRUE(wfi1.prolog_size == wfi2.prolog_size); + ASSERT_TRUE(wfi1.epilog_size == wfi2.epilog_size); + ASSERT_TRUE(wfi1.parameter_size == wfi2.parameter_size); + ASSERT_TRUE(wfi1.saved_register_size == wfi2.saved_register_size); + ASSERT_TRUE(wfi1.local_size == wfi2.local_size); + ASSERT_TRUE(wfi1.max_stack_size == wfi2.max_stack_size); + ASSERT_TRUE(wfi1.allocates_base_pointer == wfi2.allocates_base_pointer); + ASSERT_TRUE(wfi1.program_string == wfi2.program_string); + return true; +} + +// Compare ContainedRangeMap +bool ModuleComparer::CompareCRM( + const ContainedRangeMap >* basic_crm, + const StaticContainedRangeMap* fast_crm) const { + ASSERT_TRUE(basic_crm->base_ == fast_crm->base_); + + if (!basic_crm->entry_.get() || !fast_crm->entry_ptr_) { + // empty entry: + ASSERT_TRUE(!basic_crm->entry_.get() && !fast_crm->entry_ptr_); + } else { + WFI newwfi; + newwfi.CopyFrom(fast_resolver_->CopyWFI(fast_crm->entry_ptr_)); + ASSERT_TRUE(CompareWFI(*(basic_crm->entry_.get()), newwfi)); + } + + if ((!basic_crm->map_ || basic_crm->map_->empty()) + || fast_crm->map_.empty()) { + ASSERT_TRUE((!basic_crm->map_ || basic_crm->map_->empty()) + && fast_crm->map_.empty()); + } else { + ContainedRangeMap >::MapConstIterator iter1; + StaticContainedRangeMap::MapConstIterator iter2; + iter1 = basic_crm->map_->begin(); + iter2 = fast_crm->map_.begin(); + while (iter1 != basic_crm->map_->end() + && iter2 != fast_crm->map_.end()) { + ASSERT_TRUE(iter1->first == iter2.GetKey()); + StaticContainedRangeMap *child = + new StaticContainedRangeMap( + reinterpret_cast(iter2.GetValuePtr())); + ASSERT_TRUE(CompareCRM(iter1->second, child)); + delete child; + ++iter1; + ++iter2; + } + ASSERT_TRUE(iter1 == basic_crm->map_->end()); + ASSERT_TRUE(iter2 == fast_crm->map_.end()); + } + + return true; +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/module_comparer.h b/TMessagesProj/jni/third_party/breakpad/src/processor/module_comparer.h new file mode 100644 index 0000000000..fcbd51775f --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/module_comparer.h @@ -0,0 +1,98 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// module_comparer.h: ModuleComparer reads a string format of symbol file, and +// loads the symbol into both BasicSourceLineResolver::Module and +// FastSourceLineResolve::Module. It then traverses both Modules and compare +// the content of data to verify the correctness of new fast module. +// ModuleCompare class is a tool to verify correctness of a loaded +// FastSourceLineResolver::Module instance, i.e., in-memory representation of +// parsed symbol. ModuleComparer class should be used for testing purpose only, +// e.g., in fast_source_line_resolver_unittest. +// +// Author: lambxsy@google.com (Siyang Xie) + +#ifndef PROCESSOR_MODULE_COMPARER_H__ +#define PROCESSOR_MODULE_COMPARER_H__ + +#include + +#include "processor/basic_source_line_resolver_types.h" +#include "processor/fast_source_line_resolver_types.h" +#include "processor/module_serializer.h" +#include "processor/windows_frame_info.h" + +namespace google_breakpad { + +class ModuleComparer { + public: + ModuleComparer(): fast_resolver_(new FastSourceLineResolver), + basic_resolver_(new BasicSourceLineResolver) { } + ~ModuleComparer() { + delete fast_resolver_; + delete basic_resolver_; + } + + // BasicSourceLineResolver loads its module using the symbol data, + // ModuleSerializer serialize the loaded module into a memory chunk, + // FastSourceLineResolver loads its module using the serialized memory chunk, + // Then, traverse both modules together and compare underlying data + // return true if both modules contain exactly same data. + bool Compare(const string &symbol_data); + + private: + typedef BasicSourceLineResolver::Module BasicModule; + typedef FastSourceLineResolver::Module FastModule; + typedef BasicSourceLineResolver::Function BasicFunc; + typedef FastSourceLineResolver::Function FastFunc; + typedef BasicSourceLineResolver::Line BasicLine; + typedef FastSourceLineResolver::Line FastLine; + typedef BasicSourceLineResolver::PublicSymbol BasicPubSymbol; + typedef FastSourceLineResolver::PublicSymbol FastPubSymbol; + typedef WindowsFrameInfo WFI; + + bool CompareModule(const BasicModule *oldmodule, + const FastModule *newmodule) const; + bool CompareFunction(const BasicFunc *oldfunc, const FastFunc *newfunc) const; + bool CompareLine(const BasicLine *oldline, const FastLine *newline) const; + bool ComparePubSymbol(const BasicPubSymbol*, const FastPubSymbol*) const; + bool CompareWFI(const WindowsFrameInfo&, const WindowsFrameInfo&) const; + + // Compare ContainedRangeMap + bool CompareCRM(const ContainedRangeMap >*, + const StaticContainedRangeMap*) const; + + FastSourceLineResolver *fast_resolver_; + BasicSourceLineResolver *basic_resolver_; + ModuleSerializer serializer_; +}; + +} // namespace google_breakpad + +#endif // PROCESSOR_MODULE_COMPARER_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/module_factory.h b/TMessagesProj/jni/third_party/breakpad/src/processor/module_factory.h new file mode 100644 index 0000000000..7aa7caa59d --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/module_factory.h @@ -0,0 +1,72 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// module_factory.h: ModuleFactory a factory that provides +// an interface for creating a Module and deferring instantiation to subclasses +// BasicModuleFactory and FastModuleFactory. + +// Author: Siyang Xie (lambxsy@google.com) + +#ifndef PROCESSOR_MODULE_FACTORY_H__ +#define PROCESSOR_MODULE_FACTORY_H__ + +#include "processor/basic_source_line_resolver_types.h" +#include "processor/fast_source_line_resolver_types.h" +#include "processor/source_line_resolver_base_types.h" + +namespace google_breakpad { + +class ModuleFactory { + public: + virtual ~ModuleFactory() { }; + virtual SourceLineResolverBase::Module* CreateModule( + const string &name) const = 0; +}; + +class BasicModuleFactory : public ModuleFactory { + public: + virtual ~BasicModuleFactory() { } + virtual BasicSourceLineResolver::Module* CreateModule( + const string &name) const { + return new BasicSourceLineResolver::Module(name); + } +}; + +class FastModuleFactory : public ModuleFactory { + public: + virtual ~FastModuleFactory() { } + virtual FastSourceLineResolver::Module* CreateModule( + const string &name) const { + return new FastSourceLineResolver::Module(name); + } +}; + +} // namespace google_breakpad + +#endif // PROCESSOR_MODULE_FACTORY_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/module_serializer.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/module_serializer.cc new file mode 100644 index 0000000000..6ac60c1fcf --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/module_serializer.cc @@ -0,0 +1,207 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// module_serializer.cc: ModuleSerializer implementation. +// +// See module_serializer.h for documentation. +// +// Author: Siyang Xie (lambxsy@google.com) + +#include "processor/module_serializer.h" + +#include +#include + +#include "processor/basic_code_module.h" +#include "processor/logging.h" + +namespace google_breakpad { + +// Definition of static member variable in SimplerSerializer, which +// is declared in file "simple_serializer-inl.h" +RangeMapSerializer< MemAddr, linked_ptr > +SimpleSerializer::range_map_serializer_; + +size_t ModuleSerializer::SizeOf(const BasicSourceLineResolver::Module &module) { + size_t total_size_alloc_ = 0; + + // Size of the "is_corrupt" flag. + total_size_alloc_ += SimpleSerializer::SizeOf(module.is_corrupt_); + + // Compute memory size for each map component in Module class. + int map_index = 0; + map_sizes_[map_index++] = files_serializer_.SizeOf(module.files_); + map_sizes_[map_index++] = functions_serializer_.SizeOf(module.functions_); + map_sizes_[map_index++] = pubsym_serializer_.SizeOf(module.public_symbols_); + for (int i = 0; i < WindowsFrameInfo::STACK_INFO_LAST; ++i) + map_sizes_[map_index++] = + wfi_serializer_.SizeOf(&(module.windows_frame_info_[i])); + map_sizes_[map_index++] = cfi_init_rules_serializer_.SizeOf( + module.cfi_initial_rules_); + map_sizes_[map_index++] = cfi_delta_rules_serializer_.SizeOf( + module.cfi_delta_rules_); + + // Header size. + total_size_alloc_ += kNumberMaps_ * sizeof(uint32_t); + + for (int i = 0; i < kNumberMaps_; ++i) { + total_size_alloc_ += map_sizes_[i]; + } + + // Extra one byte for null terminator for C-string copy safety. + total_size_alloc_ += SimpleSerializer::SizeOf(0); + + return total_size_alloc_; +} + +char *ModuleSerializer::Write(const BasicSourceLineResolver::Module &module, + char *dest) { + // Write the is_corrupt flag. + dest = SimpleSerializer::Write(module.is_corrupt_, dest); + // Write header. + memcpy(dest, map_sizes_, kNumberMaps_ * sizeof(uint32_t)); + dest += kNumberMaps_ * sizeof(uint32_t); + // Write each map. + dest = files_serializer_.Write(module.files_, dest); + dest = functions_serializer_.Write(module.functions_, dest); + dest = pubsym_serializer_.Write(module.public_symbols_, dest); + for (int i = 0; i < WindowsFrameInfo::STACK_INFO_LAST; ++i) + dest = wfi_serializer_.Write(&(module.windows_frame_info_[i]), dest); + dest = cfi_init_rules_serializer_.Write(module.cfi_initial_rules_, dest); + dest = cfi_delta_rules_serializer_.Write(module.cfi_delta_rules_, dest); + // Write a null terminator. + dest = SimpleSerializer::Write(0, dest); + return dest; +} + +char* ModuleSerializer::Serialize( + const BasicSourceLineResolver::Module &module, unsigned int *size) { + // Compute size of memory to allocate. + unsigned int size_to_alloc = SizeOf(module); + + // Allocate memory for serialized data. + char *serialized_data = new char[size_to_alloc]; + if (!serialized_data) { + BPLOG(ERROR) << "ModuleSerializer: memory allocation failed, " + << "size to alloc: " << size_to_alloc; + if (size) *size = 0; + return NULL; + } + + // Write serialized data to allocated memory chunk. + char *end_address = Write(module, serialized_data); + // Verify the allocated memory size is equal to the size of data been written. + unsigned int size_written = + static_cast(end_address - serialized_data); + if (size_to_alloc != size_written) { + BPLOG(ERROR) << "size_to_alloc differs from size_written: " + << size_to_alloc << " vs " << size_written; + } + + // Set size and return the start address of memory chunk. + if (size) + *size = size_to_alloc; + return serialized_data; +} + +bool ModuleSerializer::SerializeModuleAndLoadIntoFastResolver( + const BasicSourceLineResolver::ModuleMap::const_iterator &iter, + FastSourceLineResolver *fast_resolver) { + BPLOG(INFO) << "Converting symbol " << iter->first.c_str(); + + // Cast SourceLineResolverBase::Module* to BasicSourceLineResolver::Module*. + BasicSourceLineResolver::Module* basic_module = + dynamic_cast(iter->second); + + unsigned int size = 0; + scoped_array symbol_data(Serialize(*basic_module, &size)); + if (!symbol_data.get()) { + BPLOG(ERROR) << "Serialization failed for module: " << basic_module->name_; + return false; + } + BPLOG(INFO) << "Serialized Symbol Size " << size; + + // Copy the data into string. + // Must pass string to LoadModuleUsingMapBuffer(), instead of passing char* to + // LoadModuleUsingMemoryBuffer(), becaused of data ownership/lifetime issue. + string symbol_data_string(symbol_data.get(), size); + symbol_data.reset(); + + scoped_ptr code_module( + new BasicCodeModule(0, 0, iter->first, "", "", "", "")); + + return fast_resolver->LoadModuleUsingMapBuffer(code_module.get(), + symbol_data_string); +} + +void ModuleSerializer::ConvertAllModules( + const BasicSourceLineResolver *basic_resolver, + FastSourceLineResolver *fast_resolver) { + // Check for NULL pointer. + if (!basic_resolver || !fast_resolver) + return; + + // Traverse module list in basic resolver. + BasicSourceLineResolver::ModuleMap::const_iterator iter; + iter = basic_resolver->modules_->begin(); + for (; iter != basic_resolver->modules_->end(); ++iter) + SerializeModuleAndLoadIntoFastResolver(iter, fast_resolver); +} + +bool ModuleSerializer::ConvertOneModule( + const string &moduleid, + const BasicSourceLineResolver *basic_resolver, + FastSourceLineResolver *fast_resolver) { + // Check for NULL pointer. + if (!basic_resolver || !fast_resolver) + return false; + + BasicSourceLineResolver::ModuleMap::const_iterator iter; + iter = basic_resolver->modules_->find(moduleid); + if (iter == basic_resolver->modules_->end()) + return false; + + return SerializeModuleAndLoadIntoFastResolver(iter, fast_resolver); +} + +char* ModuleSerializer::SerializeSymbolFileData( + const string &symbol_data, unsigned int *size) { + scoped_ptr module( + new BasicSourceLineResolver::Module("no name")); + scoped_array buffer(new char[symbol_data.size() + 1]); + memcpy(buffer.get(), symbol_data.c_str(), symbol_data.size()); + buffer.get()[symbol_data.size()] = '\0'; + if (!module->LoadMapFromMemory(buffer.get(), symbol_data.size() + 1)) { + return NULL; + } + buffer.reset(NULL); + return Serialize(*(module.get()), size); +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/module_serializer.h b/TMessagesProj/jni/third_party/breakpad/src/processor/module_serializer.h new file mode 100644 index 0000000000..effb009162 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/module_serializer.h @@ -0,0 +1,127 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// module_serializer.h: ModuleSerializer serializes a loaded symbol, +// i.e., a loaded BasicSouceLineResolver::Module instance, into a memory +// chunk of data. The serialized data can be read and loaded by +// FastSourceLineResolver without CPU & memory-intensive parsing. +// +// Author: Siyang Xie (lambxsy@google.com) + +#ifndef PROCESSOR_MODULE_SERIALIZER_H__ +#define PROCESSOR_MODULE_SERIALIZER_H__ + +#include +#include + +#include "google_breakpad/processor/basic_source_line_resolver.h" +#include "google_breakpad/processor/fast_source_line_resolver.h" +#include "processor/basic_source_line_resolver_types.h" +#include "processor/fast_source_line_resolver_types.h" +#include "processor/linked_ptr.h" +#include "processor/map_serializers-inl.h" +#include "processor/simple_serializer-inl.h" +#include "processor/windows_frame_info.h" + +namespace google_breakpad { + +// ModuleSerializer serializes a loaded BasicSourceLineResolver::Module into a +// chunk of memory data. ModuleSerializer also provides interface to compute +// memory size of the serialized data, write serialized data directly into +// memory, convert ASCII format symbol data into serialized binary data, and +// convert loaded BasicSourceLineResolver::Module into +// FastSourceLineResolver::Module. +class ModuleSerializer { + public: + // Compute the size of memory required to serialize a module. Return the + // total size needed for serialization. + size_t SizeOf(const BasicSourceLineResolver::Module &module); + + // Write a module into an allocated memory chunk with required size. + // Return the "end" of data, i.e., the address after the final byte of data. + char* Write(const BasicSourceLineResolver::Module &module, char *dest); + + // Serializes a loaded Module object into a chunk of memory data and returns + // the address of memory chunk. If size != NULL, *size is set to the memory + // size allocated for the serialized data. + // Caller takes the ownership of the memory chunk (allocated on heap), and + // owner should call delete [] to free the memory after use. + char* Serialize(const BasicSourceLineResolver::Module &module, + unsigned int *size = NULL); + + // Given the string format symbol_data, produces a chunk of serialized data. + // Caller takes ownership of the serialized data (on heap), and owner should + // call delete [] to free the memory after use. + char* SerializeSymbolFileData(const string &symbol_data, + unsigned int *size = NULL); + + // Serializes one loaded module with given moduleid in the basic source line + // resolver, and loads the serialized data into the fast source line resolver. + // Return false if the basic source line doesn't have a module with the given + // moduleid. + bool ConvertOneModule(const string &moduleid, + const BasicSourceLineResolver *basic_resolver, + FastSourceLineResolver *fast_resolver); + + // Serializes all the loaded modules in a basic source line resolver, and + // loads the serialized data into a fast source line resolver. + void ConvertAllModules(const BasicSourceLineResolver *basic_resolver, + FastSourceLineResolver *fast_resolver); + + private: + // Convenient type names. + typedef BasicSourceLineResolver::Line Line; + typedef BasicSourceLineResolver::Function Function; + typedef BasicSourceLineResolver::PublicSymbol PublicSymbol; + + // Internal implementation for ConvertOneModule and ConvertAllModules methods. + bool SerializeModuleAndLoadIntoFastResolver( + const BasicSourceLineResolver::ModuleMap::const_iterator &iter, + FastSourceLineResolver *fast_resolver); + + // Number of Maps that Module class contains. + static const int32_t kNumberMaps_ = + FastSourceLineResolver::Module::kNumberMaps_; + + // Memory sizes required to serialize map components in Module. + uint32_t map_sizes_[kNumberMaps_]; + + // Serializers for each individual map component in Module class. + StdMapSerializer files_serializer_; + RangeMapSerializer > functions_serializer_; + AddressMapSerializer > pubsym_serializer_; + ContainedRangeMapSerializer > wfi_serializer_; + RangeMapSerializer cfi_init_rules_serializer_; + StdMapSerializer cfi_delta_rules_serializer_; +}; + +} // namespace google_breakpad + +#endif // PROCESSOR_MODULE_SERIALIZER_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/pathname_stripper.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/pathname_stripper.cc new file mode 100644 index 0000000000..839287bdba --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/pathname_stripper.cc @@ -0,0 +1,56 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// pathname_stripper.cc: Manipulates pathnames into their component parts. +// +// See pathname_stripper.h for documentation. +// +// Author: Mark Mentovai + +#include "processor/pathname_stripper.h" + +namespace google_breakpad { + +// static +string PathnameStripper::File(const string &path) { + string::size_type slash = path.rfind('/'); + string::size_type backslash = path.rfind('\\'); + + string::size_type file_start = 0; + if (slash != string::npos && + (backslash == string::npos || slash > backslash)) { + file_start = slash + 1; + } else if (backslash != string::npos) { + file_start = backslash + 1; + } + + return path.substr(file_start); +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/pathname_stripper.h b/TMessagesProj/jni/third_party/breakpad/src/processor/pathname_stripper.h new file mode 100644 index 0000000000..423ca0d05a --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/pathname_stripper.h @@ -0,0 +1,53 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// pathname_stripper.h: Manipulates pathnames into their component parts. +// +// Author: Mark Mentovai + +#ifndef PROCESSOR_PATHNAME_STRIPPER_H__ +#define PROCESSOR_PATHNAME_STRIPPER_H__ + +#include + +#include "common/using_std_string.h" + +namespace google_breakpad { + +class PathnameStripper { + public: + // Given path, a pathname with components separated by slashes (/) or + // backslashes (\), returns the trailing component, without any separator. + // If path ends in a separator character, returns an empty string. + static string File(const string &path); +}; + +} // namespace google_breakpad + +#endif // PROCESSOR_PATHNAME_STRIPPER_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/pathname_stripper_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/pathname_stripper_unittest.cc new file mode 100644 index 0000000000..1bff4cb017 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/pathname_stripper_unittest.cc @@ -0,0 +1,87 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#include + +#include "processor/pathname_stripper.h" +#include "processor/logging.h" + +#define ASSERT_TRUE(condition) \ + if (!(condition)) { \ + fprintf(stderr, "FAIL: %s @ %s:%d\n", #condition, __FILE__, __LINE__); \ + return false; \ + } + +#define ASSERT_EQ(e1, e2) ASSERT_TRUE((e1) == (e2)) + +namespace { + +using google_breakpad::PathnameStripper; + +static bool RunTests() { + ASSERT_EQ(PathnameStripper::File("/dir/file"), "file"); + ASSERT_EQ(PathnameStripper::File("\\dir\\file"), "file"); + ASSERT_EQ(PathnameStripper::File("/dir\\file"), "file"); + ASSERT_EQ(PathnameStripper::File("\\dir/file"), "file"); + ASSERT_EQ(PathnameStripper::File("dir/file"), "file"); + ASSERT_EQ(PathnameStripper::File("dir\\file"), "file"); + ASSERT_EQ(PathnameStripper::File("dir/\\file"), "file"); + ASSERT_EQ(PathnameStripper::File("dir\\/file"), "file"); + ASSERT_EQ(PathnameStripper::File("file"), "file"); + ASSERT_EQ(PathnameStripper::File("dir/"), ""); + ASSERT_EQ(PathnameStripper::File("dir\\"), ""); + ASSERT_EQ(PathnameStripper::File("dir/dir/"), ""); + ASSERT_EQ(PathnameStripper::File("dir\\dir\\"), ""); + ASSERT_EQ(PathnameStripper::File("dir1/dir2/file"), "file"); + ASSERT_EQ(PathnameStripper::File("dir1\\dir2\\file"), "file"); + ASSERT_EQ(PathnameStripper::File("dir1/dir2\\file"), "file"); + ASSERT_EQ(PathnameStripper::File("dir1\\dir2/file"), "file"); + ASSERT_EQ(PathnameStripper::File(""), ""); + ASSERT_EQ(PathnameStripper::File("1"), "1"); + ASSERT_EQ(PathnameStripper::File("1/2"), "2"); + ASSERT_EQ(PathnameStripper::File("1\\2"), "2"); + ASSERT_EQ(PathnameStripper::File("/1/2"), "2"); + ASSERT_EQ(PathnameStripper::File("\\1\\2"), "2"); + ASSERT_EQ(PathnameStripper::File("dir//file"), "file"); + ASSERT_EQ(PathnameStripper::File("dir\\\\file"), "file"); + ASSERT_EQ(PathnameStripper::File("/dir//file"), "file"); + ASSERT_EQ(PathnameStripper::File("\\dir\\\\file"), "file"); + ASSERT_EQ(PathnameStripper::File("c:\\dir\\file"), "file"); + ASSERT_EQ(PathnameStripper::File("c:\\dir\\file.ext"), "file.ext"); + + return true; +} + +} // namespace + +int main(int argc, char **argv) { + BPLOG_INIT(&argc, &argv); + + return RunTests() ? 0 : 1; +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/postfix_evaluator-inl.h b/TMessagesProj/jni/third_party/breakpad/src/processor/postfix_evaluator-inl.h new file mode 100644 index 0000000000..d7dbeac205 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/postfix_evaluator-inl.h @@ -0,0 +1,363 @@ +// -*- mode: c++ -*- + +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// postfix_evaluator-inl.h: Postfix (reverse Polish) notation expression +// evaluator. +// +// Documentation in postfix_evaluator.h. +// +// Author: Mark Mentovai + +#ifndef PROCESSOR_POSTFIX_EVALUATOR_INL_H__ +#define PROCESSOR_POSTFIX_EVALUATOR_INL_H__ + +#include "processor/postfix_evaluator.h" + +#include + +#include + +#include "google_breakpad/processor/memory_region.h" +#include "processor/logging.h" + +namespace google_breakpad { + +using std::istringstream; +using std::ostringstream; + + +// A small class used in Evaluate to make sure to clean up the stack +// before returning failure. +class AutoStackClearer { + public: + explicit AutoStackClearer(vector *stack) : stack_(stack) {} + ~AutoStackClearer() { stack_->clear(); } + + private: + vector *stack_; +}; + + +template +bool PostfixEvaluator::EvaluateToken( + const string &token, + const string &expression, + DictionaryValidityType *assigned) { + // There are enough binary operations that do exactly the same thing + // (other than the specific operation, of course) that it makes sense + // to share as much code as possible. + enum BinaryOperation { + BINARY_OP_NONE = 0, + BINARY_OP_ADD, + BINARY_OP_SUBTRACT, + BINARY_OP_MULTIPLY, + BINARY_OP_DIVIDE_QUOTIENT, + BINARY_OP_DIVIDE_MODULUS, + BINARY_OP_ALIGN + }; + + BinaryOperation operation = BINARY_OP_NONE; + if (token == "+") + operation = BINARY_OP_ADD; + else if (token == "-") + operation = BINARY_OP_SUBTRACT; + else if (token == "*") + operation = BINARY_OP_MULTIPLY; + else if (token == "/") + operation = BINARY_OP_DIVIDE_QUOTIENT; + else if (token == "%") + operation = BINARY_OP_DIVIDE_MODULUS; + else if (token == "@") + operation = BINARY_OP_ALIGN; + + if (operation != BINARY_OP_NONE) { + // Get the operands. + ValueType operand1 = ValueType(); + ValueType operand2 = ValueType(); + if (!PopValues(&operand1, &operand2)) { + BPLOG(ERROR) << "Could not PopValues to get two values for binary " + "operation " << token << ": " << expression; + return false; + } + + // Perform the operation. + ValueType result; + switch (operation) { + case BINARY_OP_ADD: + result = operand1 + operand2; + break; + case BINARY_OP_SUBTRACT: + result = operand1 - operand2; + break; + case BINARY_OP_MULTIPLY: + result = operand1 * operand2; + break; + case BINARY_OP_DIVIDE_QUOTIENT: + result = operand1 / operand2; + break; + case BINARY_OP_DIVIDE_MODULUS: + result = operand1 % operand2; + break; + case BINARY_OP_ALIGN: + result = + operand1 & (static_cast(-1) ^ (operand2 - 1)); + break; + case BINARY_OP_NONE: + // This will not happen, but compilers will want a default or + // BINARY_OP_NONE case. + BPLOG(ERROR) << "Not reached!"; + return false; + break; + } + + // Save the result. + PushValue(result); + } else if (token == "^") { + // ^ for unary dereference. Can't dereference without memory. + if (!memory_) { + BPLOG(ERROR) << "Attempt to dereference without memory: " << + expression; + return false; + } + + ValueType address; + if (!PopValue(&address)) { + BPLOG(ERROR) << "Could not PopValue to get value to derefence: " << + expression; + return false; + } + + ValueType value; + if (!memory_->GetMemoryAtAddress(address, &value)) { + BPLOG(ERROR) << "Could not dereference memory at address " << + HexString(address) << ": " << expression; + return false; + } + + PushValue(value); + } else if (token == "=") { + // = for assignment. + ValueType value; + if (!PopValue(&value)) { + BPLOG(INFO) << "Could not PopValue to get value to assign: " << + expression; + return false; + } + + // Assignment is only meaningful when assigning into an identifier. + // The identifier must name a variable, not a constant. Variables + // begin with '$'. + string identifier; + if (PopValueOrIdentifier(NULL, &identifier) != POP_RESULT_IDENTIFIER) { + BPLOG(ERROR) << "PopValueOrIdentifier returned a value, but an " + "identifier is needed to assign " << + HexString(value) << ": " << expression; + return false; + } + if (identifier.empty() || identifier[0] != '$') { + BPLOG(ERROR) << "Can't assign " << HexString(value) << " to " << + identifier << ": " << expression; + return false; + } + + (*dictionary_)[identifier] = value; + if (assigned) + (*assigned)[identifier] = true; + } else { + // The token is not an operator, it's a literal value or an identifier. + // Push it onto the stack as-is. Use push_back instead of PushValue + // because PushValue pushes ValueType as a string, but token is already + // a string. + stack_.push_back(token); + } + return true; +} + +template +bool PostfixEvaluator::EvaluateInternal( + const string &expression, + DictionaryValidityType *assigned) { + // Tokenize, splitting on whitespace. + istringstream stream(expression); + string token; + while (stream >> token) { + // Normally, tokens are whitespace-separated, but occasionally, the + // assignment operator is smashed up against the next token, i.e. + // $T0 $ebp 128 + =$eip $T0 4 + ^ =$ebp $T0 ^ = + // This has been observed in program strings produced by MSVS 2010 in LTO + // mode. + if (token.size() > 1 && token[0] == '=') { + if (!EvaluateToken("=", expression, assigned)) { + return false; + } + + if (!EvaluateToken(token.substr(1), expression, assigned)) { + return false; + } + } else if (!EvaluateToken(token, expression, assigned)) { + return false; + } + } + + return true; +} + +template +bool PostfixEvaluator::Evaluate(const string &expression, + DictionaryValidityType *assigned) { + // Ensure that the stack is cleared before returning. + AutoStackClearer clearer(&stack_); + + if (!EvaluateInternal(expression, assigned)) + return false; + + // If there's anything left on the stack, it indicates incomplete execution. + // This is a failure case. If the stack is empty, evalution was complete + // and successful. + if (stack_.empty()) + return true; + + BPLOG(ERROR) << "Incomplete execution: " << expression; + return false; +} + +template +bool PostfixEvaluator::EvaluateForValue(const string &expression, + ValueType *result) { + // Ensure that the stack is cleared before returning. + AutoStackClearer clearer(&stack_); + + if (!EvaluateInternal(expression, NULL)) + return false; + + // A successful execution should leave exactly one value on the stack. + if (stack_.size() != 1) { + BPLOG(ERROR) << "Expression yielded bad number of results: " + << "'" << expression << "'"; + return false; + } + + return PopValue(result); +} + +template +typename PostfixEvaluator::PopResult +PostfixEvaluator::PopValueOrIdentifier( + ValueType *value, string *identifier) { + // There needs to be at least one element on the stack to pop. + if (!stack_.size()) + return POP_RESULT_FAIL; + + string token = stack_.back(); + stack_.pop_back(); + + // First, try to treat the value as a literal. Literals may have leading + // '-' sign, and the entire remaining string must be parseable as + // ValueType. If this isn't possible, it can't be a literal, so treat it + // as an identifier instead. + // + // Some versions of the libstdc++, the GNU standard C++ library, have + // stream extractors for unsigned integer values that permit a leading + // '-' sign (6.0.13); others do not (6.0.9). Since we require it, we + // handle it explicitly here. + istringstream token_stream(token); + ValueType literal = ValueType(); + bool negative; + if (token_stream.peek() == '-') { + negative = true; + token_stream.get(); + } else { + negative = false; + } + if (token_stream >> literal && token_stream.peek() == EOF) { + if (value) { + *value = literal; + } + if (negative) + *value = -*value; + return POP_RESULT_VALUE; + } else { + if (identifier) { + *identifier = token; + } + return POP_RESULT_IDENTIFIER; + } +} + + +template +bool PostfixEvaluator::PopValue(ValueType *value) { + ValueType literal = ValueType(); + string token; + PopResult result; + if ((result = PopValueOrIdentifier(&literal, &token)) == POP_RESULT_FAIL) { + return false; + } else if (result == POP_RESULT_VALUE) { + // This is the easy case. + *value = literal; + } else { // result == POP_RESULT_IDENTIFIER + // There was an identifier at the top of the stack. Resolve it to a + // value by looking it up in the dictionary. + typename DictionaryType::const_iterator iterator = + dictionary_->find(token); + if (iterator == dictionary_->end()) { + // The identifier wasn't found in the dictionary. Don't imply any + // default value, just fail. + BPLOG(INFO) << "Identifier " << token << " not in dictionary"; + return false; + } + + *value = iterator->second; + } + + return true; +} + + +template +bool PostfixEvaluator::PopValues(ValueType *value1, + ValueType *value2) { + return PopValue(value2) && PopValue(value1); +} + + +template +void PostfixEvaluator::PushValue(const ValueType &value) { + ostringstream token_stream; + token_stream << value; + stack_.push_back(token_stream.str()); +} + + +} // namespace google_breakpad + + +#endif // PROCESSOR_POSTFIX_EVALUATOR_INL_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/postfix_evaluator.h b/TMessagesProj/jni/third_party/breakpad/src/processor/postfix_evaluator.h new file mode 100644 index 0000000000..94b66190d5 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/postfix_evaluator.h @@ -0,0 +1,179 @@ +// -*- mode: C++ -*- + +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// postfix_evaluator.h: Postfix (reverse Polish) notation expression evaluator. +// +// PostfixEvaluator evaluates an expression, using the expression itself +// in postfix (reverse Polish) notation and a dictionary mapping constants +// and variables to their values. The evaluator supports standard +// arithmetic operations, assignment into variables, and when an optional +// MemoryRange is provided, dereferencing. (Any unary key-to-value operation +// may be used with a MemoryRange implementation that returns the appropriate +// values, but PostfixEvaluator was written with dereferencing in mind.) +// +// The expression language is simple. Expressions are supplied as strings, +// with operands and operators delimited by whitespace. Operands may be +// either literal values suitable for ValueType, or constants or variables, +// which reference the dictionary. The supported binary operators are + +// (addition), - (subtraction), * (multiplication), / (quotient of division), +// % (modulus of division), and @ (data alignment). The alignment operator (@) +// accepts a value and an alignment size, and produces a result that is a +// multiple of the alignment size by truncating the input value. +// The unary ^ (dereference) operator is also provided. These operators +// allow any operand to be either a literal value, constant, or variable. +// Assignment (=) of any type of operand into a variable is also supported. +// +// The dictionary is provided as a map with string keys. Keys beginning +// with the '$' character are treated as variables. All other keys are +// treated as constants. Any results must be assigned into variables in the +// dictionary. These variables do not need to exist prior to calling +// Evaluate, unless used in an expression prior to being assigned to. The +// internal stack state is not made available after evaluation, and any +// values remaining on the stack are treated as evidence of incomplete +// execution and cause the evaluator to indicate failure. +// +// PostfixEvaluator is intended to support evaluation of "program strings" +// obtained from MSVC frame data debugging information in pdb files as +// returned by the DIA APIs. +// +// Author: Mark Mentovai + +#ifndef PROCESSOR_POSTFIX_EVALUATOR_H__ +#define PROCESSOR_POSTFIX_EVALUATOR_H__ + + +#include +#include +#include + +#include "common/using_std_string.h" + +namespace google_breakpad { + +using std::map; +using std::vector; + +class MemoryRegion; + +template +class PostfixEvaluator { + public: + typedef map DictionaryType; + typedef map DictionaryValidityType; + + // Create a PostfixEvaluator object that may be used (with Evaluate) on + // one or more expressions. PostfixEvaluator does not take ownership of + // either argument. |memory| may be NULL, in which case dereferencing + // (^) will not be supported. |dictionary| may be NULL, but evaluation + // will fail in that case unless set_dictionary is used before calling + // Evaluate. + PostfixEvaluator(DictionaryType *dictionary, const MemoryRegion *memory) + : dictionary_(dictionary), memory_(memory), stack_() {} + + // Evaluate the expression, starting with an empty stack. The results of + // execution will be stored in one (or more) variables in the dictionary. + // Returns false if any failures occur during execution, leaving + // variables in the dictionary in an indeterminate state. If assigned is + // non-NULL, any keys set in the dictionary as a result of evaluation + // will also be set to true in assigned, providing a way to determine if + // an expression modifies any of its input variables. + bool Evaluate(const string &expression, DictionaryValidityType *assigned); + + // Like Evaluate, but provides the value left on the stack to the + // caller. If evaluation succeeds and leaves exactly one value on + // the stack, pop that value, store it in *result, and return true. + // Otherwise, return false. + bool EvaluateForValue(const string &expression, ValueType *result); + + DictionaryType* dictionary() const { return dictionary_; } + + // Reset the dictionary. PostfixEvaluator does not take ownership. + void set_dictionary(DictionaryType *dictionary) {dictionary_ = dictionary; } + + private: + // Return values for PopValueOrIdentifier + enum PopResult { + POP_RESULT_FAIL = 0, + POP_RESULT_VALUE, + POP_RESULT_IDENTIFIER + }; + + // Retrieves the topmost literal value, constant, or variable from the + // stack. Returns POP_RESULT_VALUE if the topmost entry is a literal + // value, and sets |value| accordingly. Returns POP_RESULT_IDENTIFIER + // if the topmost entry is a constant or variable identifier, and sets + // |identifier| accordingly. Returns POP_RESULT_FAIL on failure, such + // as when the stack is empty. + PopResult PopValueOrIdentifier(ValueType *value, string *identifier); + + // Retrieves the topmost value on the stack. If the topmost entry is + // an identifier, the dictionary is queried for the identifier's value. + // Returns false on failure, such as when the stack is empty or when + // a nonexistent identifier is named. + bool PopValue(ValueType *value); + + // Retrieves the top two values on the stack, in the style of PopValue. + // value2 is popped before value1, so that value1 corresponds to the + // entry that was pushed prior to value2. Returns false on failure. + bool PopValues(ValueType *value1, ValueType *value2); + + // Pushes a new value onto the stack. + void PushValue(const ValueType &value); + + // Evaluate expression, updating *assigned if it is non-zero. Return + // true if evaluation completes successfully. Do not clear the stack + // upon successful evaluation. + bool EvaluateInternal(const string &expression, + DictionaryValidityType *assigned); + + bool EvaluateToken(const string &token, + const string &expression, + DictionaryValidityType *assigned); + + // The dictionary mapping constant and variable identifiers (strings) to + // values. Keys beginning with '$' are treated as variable names, and + // PostfixEvaluator is free to create and modify these keys. Weak pointer. + DictionaryType *dictionary_; + + // If non-NULL, the MemoryRegion used for dereference (^) operations. + // If NULL, dereferencing is unsupported and will fail. Weak pointer. + const MemoryRegion *memory_; + + // The stack contains state information as execution progresses. Values + // are pushed on to it as the expression string is read and as operations + // yield values; values are popped when used as operands to operators. + vector stack_; +}; + +} // namespace google_breakpad + + +#endif // PROCESSOR_POSTFIX_EVALUATOR_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/postfix_evaluator_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/postfix_evaluator_unittest.cc new file mode 100644 index 0000000000..f118982849 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/postfix_evaluator_unittest.cc @@ -0,0 +1,403 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// postfix_evaluator_unittest.cc: Unit tests for PostfixEvaluator. +// +// Author: Mark Mentovai + +#include +#include + +#include +#include + +#include "processor/postfix_evaluator-inl.h" + +#include "common/using_std_string.h" +#include "google_breakpad/common/breakpad_types.h" +#include "google_breakpad/processor/memory_region.h" +#include "processor/logging.h" + + +namespace { + + +using std::map; +using google_breakpad::MemoryRegion; +using google_breakpad::PostfixEvaluator; + + +// FakeMemoryRegion is used to test PostfixEvaluator's dereference (^) +// operator. The result of dereferencing a value is one greater than +// the value. +class FakeMemoryRegion : public MemoryRegion { + public: + virtual uint64_t GetBase() const { return 0; } + virtual uint32_t GetSize() const { return 0; } + virtual bool GetMemoryAtAddress(uint64_t address, uint8_t *value) const { + *value = address + 1; + return true; + } + virtual bool GetMemoryAtAddress(uint64_t address, uint16_t *value) const { + *value = address + 1; + return true; + } + virtual bool GetMemoryAtAddress(uint64_t address, uint32_t *value) const { + *value = address + 1; + return true; + } + virtual bool GetMemoryAtAddress(uint64_t address, uint64_t *value) const { + *value = address + 1; + return true; + } + virtual void Print() const { + assert(false); + } +}; + + +struct EvaluateTest { + // Expression passed to PostfixEvaluator::Evaluate. + const string expression; + + // True if the expression is expected to be evaluable, false if evaluation + // is expected to fail. + bool evaluable; +}; + + +struct EvaluateTestSet { + // The dictionary used for all tests in the set. + PostfixEvaluator::DictionaryType *dictionary; + + // The list of tests. + const EvaluateTest *evaluate_tests; + + // The number of tests. + unsigned int evaluate_test_count; + + // Identifiers and their expected values upon completion of the Evaluate + // tests in the set. + map *validate_data; +}; + + +struct EvaluateForValueTest { + // Expression passed to PostfixEvaluator::Evaluate. + const string expression; + + // True if the expression is expected to be evaluable, false if evaluation + // is expected to fail. + bool evaluable; + + // If evaluable, the value we expect it to yield. + unsigned int value; +}; + +static bool RunTests() { + // The first test set checks the basic operations and failure modes. + PostfixEvaluator::DictionaryType dictionary_0; + const EvaluateTest evaluate_tests_0[] = { + { "$rAdd 2 2 + =", true }, // $rAdd = 2 + 2 = 4 + { "$rAdd $rAdd 2 + =", true }, // $rAdd = $rAdd + 2 = 6 + { "$rAdd 2 $rAdd + =", true }, // $rAdd = 2 + $rAdd = 8 + { "99", false }, // put some junk on the stack... + { "$rAdd2 2 2 + =", true }, // ...and make sure things still work + { "$rAdd2\t2\n2 + =", true }, // same but with different whitespace + { "$rAdd2 2 2 + = ", true }, // trailing whitespace + { " $rAdd2 2 2 + =", true }, // leading whitespace + { "$rAdd2 2 2 + =", true }, // extra whitespace + { "$T0 2 = +", false }, // too few operands for add + { "2 + =", false }, // too few operands for add + { "2 +", false }, // too few operands for add + { "+", false }, // too few operands for add + { "^", false }, // too few operands for dereference + { "=", false }, // too few operands for assignment + { "2 =", false }, // too few operands for assignment + { "2 2 + =", false }, // too few operands for assignment + { "2 2 =", false }, // can't assign into a literal + { "k 2 =", false }, // can't assign into a constant + { "2", false }, // leftover data on stack + { "2 2 +", false }, // leftover data on stack + { "$rAdd", false }, // leftover data on stack + { "0 $T1 0 0 + =", false }, // leftover data on stack + { "$T2 $T2 2 + =", false }, // can't operate on an undefined value + { "$rMul 9 6 * =", true }, // $rMul = 9 * 6 = 54 + { "$rSub 9 6 - =", true }, // $rSub = 9 - 6 = 3 + { "$rDivQ 9 6 / =", true }, // $rDivQ = 9 / 6 = 1 + { "$rDivM 9 6 % =", true }, // $rDivM = 9 % 6 = 3 + { "$rDeref 9 ^ =", true }, // $rDeref = ^9 = 10 (FakeMemoryRegion) + { "$rAlign 36 8 @ =", true }, // $rAlign = 36 @ 8 + { "$rAdd3 2 2 + =$rMul2 9 6 * =", true } // smashed-equals tokenization + }; + map validate_data_0; + validate_data_0["$rAdd"] = 8; + validate_data_0["$rAdd2"] = 4; + validate_data_0["$rSub"] = 3; + validate_data_0["$rMul"] = 54; + validate_data_0["$rDivQ"] = 1; + validate_data_0["$rDivM"] = 3; + validate_data_0["$rDeref"] = 10; + validate_data_0["$rAlign"] = 32; + validate_data_0["$rAdd3"] = 4; + validate_data_0["$rMul2"] = 54; + + // The second test set simulates a couple of MSVC program strings. + // The data is fudged a little bit because the tests use FakeMemoryRegion + // instead of a real stack snapshot, but the program strings are real and + // the implementation doesn't know or care that the data is not real. + PostfixEvaluator::DictionaryType dictionary_1; + dictionary_1["$ebp"] = 0xbfff0010; + dictionary_1["$eip"] = 0x10000000; + dictionary_1["$esp"] = 0xbfff0000; + dictionary_1[".cbSavedRegs"] = 4; + dictionary_1[".cbParams"] = 4; + dictionary_1[".raSearchStart"] = 0xbfff0020; + const EvaluateTest evaluate_tests_1[] = { + { "$T0 $ebp = $eip $T0 4 + ^ = $ebp $T0 ^ = $esp $T0 8 + = " + "$L $T0 .cbSavedRegs - = $P $T0 8 + .cbParams + =", true }, + // Intermediate state: $T0 = 0xbfff0010, $eip = 0xbfff0015, + // $ebp = 0xbfff0011, $esp = 0xbfff0018, + // $L = 0xbfff000c, $P = 0xbfff001c + { "$T0 $ebp = $eip $T0 4 + ^ = $ebp $T0 ^ = $esp $T0 8 + = " + "$L $T0 .cbSavedRegs - = $P $T0 8 + .cbParams + = $ebx $T0 28 - ^ =", + true }, + // Intermediate state: $T0 = 0xbfff0011, $eip = 0xbfff0016, + // $ebp = 0xbfff0012, $esp = 0xbfff0019, + // $L = 0xbfff000d, $P = 0xbfff001d, + // $ebx = 0xbffefff6 + { "$T0 $ebp = $T2 $esp = $T1 .raSearchStart = $eip $T1 ^ = $ebp $T0 = " + "$esp $T1 4 + = $L $T0 .cbSavedRegs - = $P $T1 4 + .cbParams + = " + "$ebx $T0 28 - ^ =", + true } + }; + map validate_data_1; + validate_data_1["$T0"] = 0xbfff0012; + validate_data_1["$T1"] = 0xbfff0020; + validate_data_1["$T2"] = 0xbfff0019; + validate_data_1["$eip"] = 0xbfff0021; + validate_data_1["$ebp"] = 0xbfff0012; + validate_data_1["$esp"] = 0xbfff0024; + validate_data_1["$L"] = 0xbfff000e; + validate_data_1["$P"] = 0xbfff0028; + validate_data_1["$ebx"] = 0xbffefff7; + validate_data_1[".cbSavedRegs"] = 4; + validate_data_1[".cbParams"] = 4; + + EvaluateTestSet evaluate_test_sets[] = { + { &dictionary_0, evaluate_tests_0, + sizeof(evaluate_tests_0) / sizeof(EvaluateTest), &validate_data_0 }, + { &dictionary_1, evaluate_tests_1, + sizeof(evaluate_tests_1) / sizeof(EvaluateTest), &validate_data_1 }, + }; + + unsigned int evaluate_test_set_count = sizeof(evaluate_test_sets) / + sizeof(EvaluateTestSet); + + FakeMemoryRegion fake_memory; + PostfixEvaluator postfix_evaluator = + PostfixEvaluator(NULL, &fake_memory); + + for (unsigned int evaluate_test_set_index = 0; + evaluate_test_set_index < evaluate_test_set_count; + ++evaluate_test_set_index) { + EvaluateTestSet *evaluate_test_set = + &evaluate_test_sets[evaluate_test_set_index]; + const EvaluateTest *evaluate_tests = evaluate_test_set->evaluate_tests; + unsigned int evaluate_test_count = evaluate_test_set->evaluate_test_count; + + // The same dictionary will be used for each test in the set. Earlier + // tests can affect the state of the dictionary for later tests. + postfix_evaluator.set_dictionary(evaluate_test_set->dictionary); + + // Use a new validity dictionary for each test set. + PostfixEvaluator::DictionaryValidityType assigned; + + for (unsigned int evaluate_test_index = 0; + evaluate_test_index < evaluate_test_count; + ++evaluate_test_index) { + const EvaluateTest *evaluate_test = &evaluate_tests[evaluate_test_index]; + + // Do the test. + bool result = postfix_evaluator.Evaluate(evaluate_test->expression, + &assigned); + if (result != evaluate_test->evaluable) { + fprintf(stderr, "FAIL: evaluate set %d/%d, test %d/%d, " + "expression \"%s\", expected %s, observed %s\n", + evaluate_test_set_index, evaluate_test_set_count, + evaluate_test_index, evaluate_test_count, + evaluate_test->expression.c_str(), + evaluate_test->evaluable ? "evaluable" : "not evaluable", + result ? "evaluted" : "not evaluated"); + return false; + } + } + + // Validate the results. + for (map::const_iterator validate_iterator = + evaluate_test_set->validate_data->begin(); + validate_iterator != evaluate_test_set->validate_data->end(); + ++validate_iterator) { + const string identifier = validate_iterator->first; + unsigned int expected_value = validate_iterator->second; + + map::const_iterator dictionary_iterator = + evaluate_test_set->dictionary->find(identifier); + + // The identifier must exist in the dictionary. + if (dictionary_iterator == evaluate_test_set->dictionary->end()) { + fprintf(stderr, "FAIL: evaluate test set %d/%d, " + "validate identifier \"%s\", " + "expected %d, observed not found\n", + evaluate_test_set_index, evaluate_test_set_count, + identifier.c_str(), expected_value); + return false; + } + + // The value in the dictionary must be the same as the expected value. + unsigned int observed_value = dictionary_iterator->second; + if (expected_value != observed_value) { + fprintf(stderr, "FAIL: evaluate test set %d/%d, " + "validate identifier \"%s\", " + "expected %d, observed %d\n", + evaluate_test_set_index, evaluate_test_set_count, + identifier.c_str(), expected_value, observed_value); + return false; + } + + // The value must be set in the "assigned" dictionary if it was a + // variable. It must not have been assigned if it was a constant. + bool expected_assigned = identifier[0] == '$'; + bool observed_assigned = false; + PostfixEvaluator::DictionaryValidityType::const_iterator + iterator_assigned = assigned.find(identifier); + if (iterator_assigned != assigned.end()) { + observed_assigned = iterator_assigned->second; + } + if (expected_assigned != observed_assigned) { + fprintf(stderr, "FAIL: evaluate test set %d/%d, " + "validate assignment of \"%s\", " + "expected %d, observed %d\n", + evaluate_test_set_index, evaluate_test_set_count, + identifier.c_str(), expected_assigned, observed_assigned); + return false; + } + } + } + + // EvaluateForValue tests. + PostfixEvaluator::DictionaryType dictionary_2; + dictionary_2["$ebp"] = 0xbfff0010; + dictionary_2["$eip"] = 0x10000000; + dictionary_2["$esp"] = 0xbfff0000; + dictionary_2[".cbSavedRegs"] = 4; + dictionary_2[".cbParams"] = 4; + dictionary_2[".raSearchStart"] = 0xbfff0020; + const EvaluateForValueTest evaluate_for_value_tests_2[] = { + { "28907223", true, 28907223 }, // simple constant + { "89854293 40010015 +", true, 89854293 + 40010015 }, // arithmetic + { "-870245 8769343 +", true, 7899098 }, // negative constants + { "$ebp $esp - $eip +", true, 0x10000010 }, // variable references + { "18929794 34015074", false, 0 }, // too many values + { "$ebp $ebp 4 - =", false, 0 }, // too few values + { "$new $eip = $new", true, 0x10000000 }, // make new variable + { "$new 4 +", true, 0x10000004 }, // see prior assignments + { ".cfa 42 = 10", false, 0 } // can't set constants + }; + const int evaluate_for_value_tests_2_size + = (sizeof (evaluate_for_value_tests_2) + / sizeof (evaluate_for_value_tests_2[0])); + map validate_data_2; + validate_data_2["$eip"] = 0x10000000; + validate_data_2["$ebp"] = 0xbfff000c; + validate_data_2["$esp"] = 0xbfff0000; + validate_data_2["$new"] = 0x10000000; + validate_data_2[".cbSavedRegs"] = 4; + validate_data_2[".cbParams"] = 4; + validate_data_2[".raSearchStart"] = 0xbfff0020; + + postfix_evaluator.set_dictionary(&dictionary_2); + for (int i = 0; i < evaluate_for_value_tests_2_size; i++) { + const EvaluateForValueTest *test = &evaluate_for_value_tests_2[i]; + unsigned int result; + if (postfix_evaluator.EvaluateForValue(test->expression, &result) + != test->evaluable) { + fprintf(stderr, "FAIL: evaluate for value test %d, " + "expected evaluation to %s, but it %s\n", + i, test->evaluable ? "succeed" : "fail", + test->evaluable ? "failed" : "succeeded"); + return false; + } + if (test->evaluable && result != test->value) { + fprintf(stderr, "FAIL: evaluate for value test %d, " + "expected value to be 0x%x, but it was 0x%x\n", + i, test->value, result); + return false; + } + } + + for (map::iterator v = validate_data_2.begin(); + v != validate_data_2.end(); v++) { + map::iterator a = dictionary_2.find(v->first); + if (a == dictionary_2.end()) { + fprintf(stderr, "FAIL: evaluate for value dictionary check: " + "expected dict[\"%s\"] to be 0x%x, but it was unset\n", + v->first.c_str(), v->second); + return false; + } else if (a->second != v->second) { + fprintf(stderr, "FAIL: evaluate for value dictionary check: " + "expected dict[\"%s\"] to be 0x%x, but it was 0x%x\n", + v->first.c_str(), v->second, a->second); + return false; + } + dictionary_2.erase(a); + } + + map::iterator remaining = dictionary_2.begin(); + if (remaining != dictionary_2.end()) { + fprintf(stderr, "FAIL: evaluation of test expressions put unexpected " + "values in dictionary:\n"); + for (; remaining != dictionary_2.end(); remaining++) + fprintf(stderr, " dict[\"%s\"] == 0x%x\n", + remaining->first.c_str(), remaining->second); + return false; + } + + return true; +} + + +} // namespace + + +int main(int argc, char **argv) { + BPLOG_INIT(&argc, &argv); + + return RunTests() ? 0 : 1; +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/process_state.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/process_state.cc new file mode 100644 index 0000000000..5a5cd7f629 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/process_state.cc @@ -0,0 +1,69 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// process_state.cc: A snapshot of a process, in a fully-digested state. +// +// See process_state.h for documentation. +// +// Author: Mark Mentovai + +#include "google_breakpad/processor/process_state.h" +#include "google_breakpad/processor/call_stack.h" +#include "google_breakpad/processor/code_modules.h" + +namespace google_breakpad { + +ProcessState::~ProcessState() { + Clear(); +} + +void ProcessState::Clear() { + time_date_stamp_ = 0; + process_create_time_ = 0; + crashed_ = false; + crash_reason_.clear(); + crash_address_ = 0; + assertion_.clear(); + requesting_thread_ = -1; + for (vector::const_iterator iterator = threads_.begin(); + iterator != threads_.end(); + ++iterator) { + delete *iterator; + } + threads_.clear(); + system_info_.Clear(); + // modules_without_symbols_ and modules_with_corrupt_symbols_ DO NOT own + // the underlying CodeModule pointers. Just clear the vectors. + modules_without_symbols_.clear(); + modules_with_corrupt_symbols_.clear(); + delete modules_; + modules_ = NULL; +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/processor.gyp b/TMessagesProj/jni/third_party/breakpad/src/processor/processor.gyp new file mode 100644 index 0000000000..fb3d7cde80 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/processor.gyp @@ -0,0 +1,183 @@ +# Copyright 2014 Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +{ + 'includes': [ + 'processor_tools.gypi', + ], + 'targets': [ + { + 'target_name': 'processor', + 'type': 'static_library', + 'sources': [ + 'address_map-inl.h', + 'address_map.h', + 'basic_code_module.h', + 'basic_code_modules.cc', + 'basic_code_modules.h', + 'basic_source_line_resolver.cc', + 'basic_source_line_resolver_types.h', + 'binarystream.cc', + 'binarystream.h', + 'call_stack.cc', + 'cfi_frame_info-inl.h', + 'cfi_frame_info.cc', + 'cfi_frame_info.h', + 'contained_range_map-inl.h', + 'contained_range_map.h', + 'disassembler_x86.cc', + 'disassembler_x86.h', + 'dump_context.cc', + 'dump_object.cc', + 'exploitability.cc', + 'exploitability_linux.cc', + 'exploitability_linux.h', + 'exploitability_win.cc', + 'exploitability_win.h', + 'fast_source_line_resolver.cc', + 'fast_source_line_resolver_types.h', + 'linked_ptr.h', + 'logging.cc', + 'logging.h', + 'map_serializers-inl.h', + 'map_serializers.h', + 'microdump_processor.cc', + 'minidump.cc', + 'minidump_processor.cc', + 'module_comparer.cc', + 'module_comparer.h', + 'module_factory.h', + 'module_serializer.cc', + 'module_serializer.h', + 'pathname_stripper.cc', + 'pathname_stripper.h', + 'postfix_evaluator-inl.h', + 'postfix_evaluator.h', + 'process_state.cc', + 'range_map-inl.h', + 'range_map.h', + 'simple_serializer-inl.h', + 'simple_serializer.h', + 'simple_symbol_supplier.cc', + 'simple_symbol_supplier.h', + 'source_line_resolver_base.cc', + 'source_line_resolver_base_types.h', + 'stack_frame_cpu.cc', + 'stack_frame_symbolizer.cc', + 'stackwalk_common.cc', + 'stackwalk_common.h', + 'stackwalker.cc', + 'stackwalker_address_list.cc', + 'stackwalker_address_list.h', + 'stackwalker_amd64.cc', + 'stackwalker_amd64.h', + 'stackwalker_arm.cc', + 'stackwalker_arm.h', + 'stackwalker_arm64.cc', + 'stackwalker_arm64.h', + 'stackwalker_mips.cc', + 'stackwalker_mips.h', + 'stackwalker_ppc.cc', + 'stackwalker_ppc.h', + 'stackwalker_ppc64.cc', + 'stackwalker_ppc64.h', + 'stackwalker_selftest.cc', + 'stackwalker_sparc.cc', + 'stackwalker_sparc.h', + 'stackwalker_x86.cc', + 'stackwalker_x86.h', + 'static_address_map-inl.h', + 'static_address_map.h', + 'static_contained_range_map-inl.h', + 'static_contained_range_map.h', + 'static_map-inl.h', + 'static_map.h', + 'static_map_iterator-inl.h', + 'static_map_iterator.h', + 'static_range_map-inl.h', + 'static_range_map.h', + 'symbolic_constants_win.cc', + 'symbolic_constants_win.h', + 'synth_minidump.cc', + 'synth_minidump.h', + 'tokenize.cc', + 'tokenize.h', + 'windows_frame_info.h', + ], + 'include_dirs': [ + '..', + ], + 'dependencies': [ + '../common/common.gyp:common', + '../third_party/libdisasm/libdisasm.gyp:libdisasm', + ], + }, + { + 'target_name': 'processor_unittests', + 'type': 'executable', + 'sources': [ + 'address_map_unittest.cc', + 'basic_source_line_resolver_unittest.cc', + 'binarystream_unittest.cc', + 'cfi_frame_info_unittest.cc', + 'contained_range_map_unittest.cc', + 'disassembler_x86_unittest.cc', + 'exploitability_unittest.cc', + 'fast_source_line_resolver_unittest.cc', + 'map_serializers_unittest.cc', + 'microdump_processor_unittest.cc', + 'minidump_processor_unittest.cc', + 'minidump_unittest.cc', + 'pathname_stripper_unittest.cc', + 'postfix_evaluator_unittest.cc', + 'range_map_unittest.cc', + 'stackwalker_address_list_unittest.cc', + 'stackwalker_amd64_unittest.cc', + 'stackwalker_arm64_unittest.cc', + 'stackwalker_arm_unittest.cc', + 'stackwalker_mips_unittest.cc', + 'stackwalker_unittest_utils.h', + 'stackwalker_x86_unittest.cc', + 'static_address_map_unittest.cc', + 'static_contained_range_map_unittest.cc', + 'static_map_unittest.cc', + 'static_range_map_unittest.cc', + 'synth_minidump_unittest.cc', + 'synth_minidump_unittest_data.h', + ], + 'include_dirs': [ + '..', + ], + 'dependencies': [ + 'processor', + '../build/testing.gypi:gmock', + '../build/testing.gypi:gtest', + ], + }, + ], +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/processor_tools.gypi b/TMessagesProj/jni/third_party/breakpad/src/processor/processor_tools.gypi new file mode 100644 index 0000000000..ecb450d602 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/processor_tools.gypi @@ -0,0 +1,57 @@ +# Copyright 2014 Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +{ + 'target_defaults': { + 'include_dirs': [ + '..', + ], + }, + 'targets': [ + { + 'target_name': 'minidump_dump', + 'type': 'executable', + 'sources': [ + 'minidump_dump.cc', + ], + 'dependencies': [ + 'processor', + ], + }, + { + 'target_name': 'minidump_stackwalk', + 'type': 'executable', + 'sources': [ + 'minidump_stackwalk.cc', + ], + 'dependencies': [ + 'processor', + ], + }, + ], +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/range_map-inl.h b/TMessagesProj/jni/third_party/breakpad/src/processor/range_map-inl.h new file mode 100644 index 0000000000..55dae8396d --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/range_map-inl.h @@ -0,0 +1,220 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// range_map-inl.h: Range map implementation. +// +// See range_map.h for documentation. +// +// Author: Mark Mentovai + +#ifndef PROCESSOR_RANGE_MAP_INL_H__ +#define PROCESSOR_RANGE_MAP_INL_H__ + + +#include + +#include "processor/range_map.h" +#include "processor/logging.h" + + +namespace google_breakpad { + + +template +bool RangeMap::StoreRange(const AddressType &base, + const AddressType &size, + const EntryType &entry) { + AddressType high = base + size - 1; + + // Check for undersize or overflow. + if (size <= 0 || high < base) { + // The processor will hit this case too frequently with common symbol + // files in the size == 0 case, which is more suited to a DEBUG channel. + // Filter those out since there's no DEBUG channel at the moment. + BPLOG_IF(INFO, size != 0) << "StoreRange failed, " << HexString(base) << + "+" << HexString(size) << ", " << + HexString(high); + return false; + } + + // Ensure that this range does not overlap with another one already in the + // map. + MapConstIterator iterator_base = map_.lower_bound(base); + MapConstIterator iterator_high = map_.lower_bound(high); + + if (iterator_base != iterator_high) { + // Some other range begins in the space used by this range. It may be + // contained within the space used by this range, or it may extend lower. + // Regardless, it is an error. + // The processor hits this case too frequently with common symbol files. + // This is most appropriate for a DEBUG channel, but since none exists now + // simply comment out this logging. + // + // AddressType other_base = iterator_base->second.base(); + // AddressType other_size = iterator_base->first - other_base + 1; + // BPLOG(INFO) << "StoreRange failed, an existing range is contained by or " + // "extends lower than the new range: new " << + // HexString(base) << "+" << HexString(size) << + // ", existing " << HexString(other_base) << "+" << + // HexString(other_size); + + return false; + } + + if (iterator_high != map_.end()) { + if (iterator_high->second.base() <= high) { + // The range above this one overlaps with this one. It may fully + // contain this range, or it may begin within this range and extend + // higher. Regardless, it's an error. + // The processor hits this case too frequently with common symbol files. + // This is most appropriate for a DEBUG channel, but since none exists now + // simply comment out this logging. + // + // AddressType other_base = iterator_high->second.base(); + // AddressType other_size = iterator_high->first - other_base + 1; + // BPLOG(INFO) << "StoreRange failed, an existing range contains or " + // "extends higher than the new range: new " << + // HexString(base) << "+" << HexString(size) << + // ", existing " << HexString(other_base) << "+" << + // HexString(other_size); + return false; + } + } + + // Store the range in the map by its high address, so that lower_bound can + // be used to quickly locate a range by address. + map_.insert(MapValue(high, Range(base, entry))); + return true; +} + + +template +bool RangeMap::RetrieveRange( + const AddressType &address, EntryType *entry, + AddressType *entry_base, AddressType *entry_size) const { + BPLOG_IF(ERROR, !entry) << "RangeMap::RetrieveRange requires |entry|"; + assert(entry); + + MapConstIterator iterator = map_.lower_bound(address); + if (iterator == map_.end()) + return false; + + // The map is keyed by the high address of each range, so |address| is + // guaranteed to be lower than the range's high address. If |range| is + // not directly preceded by another range, it's possible for address to + // be below the range's low address, though. When that happens, address + // references something not within any range, so return false. + if (address < iterator->second.base()) + return false; + + *entry = iterator->second.entry(); + if (entry_base) + *entry_base = iterator->second.base(); + if (entry_size) + *entry_size = iterator->first - iterator->second.base() + 1; + + return true; +} + + +template +bool RangeMap::RetrieveNearestRange( + const AddressType &address, EntryType *entry, + AddressType *entry_base, AddressType *entry_size) const { + BPLOG_IF(ERROR, !entry) << "RangeMap::RetrieveNearestRange requires |entry|"; + assert(entry); + + // If address is within a range, RetrieveRange can handle it. + if (RetrieveRange(address, entry, entry_base, entry_size)) + return true; + + // upper_bound gives the first element whose key is greater than address, + // but we want the first element whose key is less than or equal to address. + // Decrement the iterator to get there, but not if the upper_bound already + // points to the beginning of the map - in that case, address is lower than + // the lowest stored key, so return false. + MapConstIterator iterator = map_.upper_bound(address); + if (iterator == map_.begin()) + return false; + --iterator; + + *entry = iterator->second.entry(); + if (entry_base) + *entry_base = iterator->second.base(); + if (entry_size) + *entry_size = iterator->first - iterator->second.base() + 1; + + return true; +} + + +template +bool RangeMap::RetrieveRangeAtIndex( + int index, EntryType *entry, + AddressType *entry_base, AddressType *entry_size) const { + BPLOG_IF(ERROR, !entry) << "RangeMap::RetrieveRangeAtIndex requires |entry|"; + assert(entry); + + if (index >= GetCount()) { + BPLOG(ERROR) << "Index out of range: " << index << "/" << GetCount(); + return false; + } + + // Walk through the map. Although it's ordered, it's not a vector, so it + // can't be addressed directly by index. + MapConstIterator iterator = map_.begin(); + for (int this_index = 0; this_index < index; ++this_index) + ++iterator; + + *entry = iterator->second.entry(); + if (entry_base) + *entry_base = iterator->second.base(); + if (entry_size) + *entry_size = iterator->first - iterator->second.base() + 1; + + return true; +} + + +template +int RangeMap::GetCount() const { + return map_.size(); +} + + +template +void RangeMap::Clear() { + map_.clear(); +} + + +} // namespace google_breakpad + + +#endif // PROCESSOR_RANGE_MAP_INL_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/range_map.h b/TMessagesProj/jni/third_party/breakpad/src/processor/range_map.h new file mode 100644 index 0000000000..2572e4927d --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/range_map.h @@ -0,0 +1,132 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// range_map.h: Range maps. +// +// A range map associates a range of addresses with a specific object. This +// is useful when certain objects of variable size are located within an +// address space. The range map makes it simple to determine which object is +// associated with a specific address, which may be any address within the +// range associated with an object. +// +// Author: Mark Mentovai + +#ifndef PROCESSOR_RANGE_MAP_H__ +#define PROCESSOR_RANGE_MAP_H__ + + +#include + + +namespace google_breakpad { + +// Forward declarations (for later friend declarations of specialized template). +template class RangeMapSerializer; + +template +class RangeMap { + public: + RangeMap() : map_() {} + + // Inserts a range into the map. Returns false for a parameter error, + // or if the location of the range would conflict with a range already + // stored in the map. + bool StoreRange(const AddressType &base, + const AddressType &size, + const EntryType &entry); + + // Locates the range encompassing the supplied address. If there is + // no such range, returns false. entry_base and entry_size, if non-NULL, + // are set to the base and size of the entry's range. + bool RetrieveRange(const AddressType &address, EntryType *entry, + AddressType *entry_base, AddressType *entry_size) const; + + // Locates the range encompassing the supplied address, if one exists. + // If no range encompasses the supplied address, locates the nearest range + // to the supplied address that is lower than the address. Returns false + // if no range meets these criteria. entry_base and entry_size, if + // non-NULL, are set to the base and size of the entry's range. + bool RetrieveNearestRange(const AddressType &address, EntryType *entry, + AddressType *entry_base, AddressType *entry_size) + const; + + // Treating all ranges as a list ordered by the address spaces that they + // occupy, locates the range at the index specified by index. Returns + // false if index is larger than the number of ranges stored. entry_base + // and entry_size, if non-NULL, are set to the base and size of the entry's + // range. + // + // RetrieveRangeAtIndex is not optimized for speedy operation. + bool RetrieveRangeAtIndex(int index, EntryType *entry, + AddressType *entry_base, AddressType *entry_size) + const; + + // Returns the number of ranges stored in the RangeMap. + int GetCount() const; + + // Empties the range map, restoring it to the state it was when it was + // initially created. + void Clear(); + + private: + // Friend declarations. + friend class ModuleComparer; + friend class RangeMapSerializer; + + class Range { + public: + Range(const AddressType &base, const EntryType &entry) + : base_(base), entry_(entry) {} + + AddressType base() const { return base_; } + EntryType entry() const { return entry_; } + + private: + // The base address of the range. The high address does not need to + // be stored, because RangeMap uses it as the key to the map. + const AddressType base_; + + // The entry corresponding to a range. + const EntryType entry_; + }; + + // Convenience types. + typedef std::map AddressToRangeMap; + typedef typename AddressToRangeMap::const_iterator MapConstIterator; + typedef typename AddressToRangeMap::value_type MapValue; + + // Maps the high address of each range to a EntryType. + AddressToRangeMap map_; +}; + + +} // namespace google_breakpad + + +#endif // PROCESSOR_RANGE_MAP_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/range_map_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/range_map_unittest.cc new file mode 100644 index 0000000000..bf9b7279ad --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/range_map_unittest.cc @@ -0,0 +1,552 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// range_map_unittest.cc: Unit tests for RangeMap +// +// Author: Mark Mentovai + + +#include +#include + +#include "processor/range_map-inl.h" + +#include "common/scoped_ptr.h" +#include "processor/linked_ptr.h" +#include "processor/logging.h" + +namespace { + + +using google_breakpad::linked_ptr; +using google_breakpad::scoped_ptr; +using google_breakpad::RangeMap; + + +// A CountedObject holds an int. A global (not thread safe!) count of +// allocated CountedObjects is maintained to help test memory management. +class CountedObject { + public: + explicit CountedObject(int id) : id_(id) { ++count_; } + ~CountedObject() { --count_; } + + static int count() { return count_; } + int id() const { return id_; } + + private: + static int count_; + int id_; +}; + +int CountedObject::count_; + + +typedef int AddressType; +typedef RangeMap< AddressType, linked_ptr > TestMap; + + +// RangeTest contains data to use for store and retrieve tests. See +// RunTests for descriptions of the tests. +struct RangeTest { + // Base address to use for test + AddressType address; + + // Size of range to use for test + AddressType size; + + // Unique ID of range - unstorable ranges must have unique IDs too + int id; + + // Whether this range is expected to be stored successfully or not + bool expect_storable; +}; + + +// A RangeTestSet encompasses multiple RangeTests, which are run in +// sequence on the same RangeMap. +struct RangeTestSet { + // An array of RangeTests + const RangeTest *range_tests; + + // The number of tests in the set + unsigned int range_test_count; +}; + + +// StoreTest uses the data in a RangeTest and calls StoreRange on the +// test RangeMap. It returns true if the expected result occurred, and +// false if something else happened. +static bool StoreTest(TestMap *range_map, const RangeTest *range_test) { + linked_ptr object(new CountedObject(range_test->id)); + bool stored = range_map->StoreRange(range_test->address, + range_test->size, + object); + + if (stored != range_test->expect_storable) { + fprintf(stderr, "FAILED: " + "StoreRange id %d, expected %s, observed %s\n", + range_test->id, + range_test->expect_storable ? "storable" : "not storable", + stored ? "stored" : "not stored"); + return false; + } + + return true; +} + + +// RetrieveTest uses the data in RangeTest and calls RetrieveRange on the +// test RangeMap. If it retrieves the expected value (which can be no +// map entry at the specified range,) it returns true, otherwise, it returns +// false. RetrieveTest will check the values around the base address and +// the high address of a range to guard against off-by-one errors. +static bool RetrieveTest(TestMap *range_map, const RangeTest *range_test) { + for (unsigned int side = 0; side <= 1; ++side) { + // When side == 0, check the low side (base address) of each range. + // When side == 1, check the high side (base + size) of each range. + + // Check one-less and one-greater than the target address in addition + // to the target address itself. + + // If the size of the range is only 1, don't check one greater than + // the base or one less than the high - for a successfully stored + // range, these tests would erroneously fail because the range is too + // small. + AddressType low_offset = -1; + AddressType high_offset = 1; + if (range_test->size == 1) { + if (!side) // When checking the low side, + high_offset = 0; // don't check one over the target. + else // When checking the high side, + low_offset = 0; // don't check one under the target. + } + + for (AddressType offset = low_offset; offset <= high_offset; ++offset) { + AddressType address = + offset + + (!side ? range_test->address : + range_test->address + range_test->size - 1); + + bool expected_result = false; // This is correct for tests not stored. + if (range_test->expect_storable) { + if (offset == 0) // When checking the target address, + expected_result = true; // test should always succeed. + else if (offset == -1) // When checking one below the target, + expected_result = side; // should fail low and succeed high. + else // When checking one above the target, + expected_result = !side; // should succeed low and fail high. + } + + linked_ptr object; + AddressType retrieved_base = AddressType(); + AddressType retrieved_size = AddressType(); + bool retrieved = range_map->RetrieveRange(address, &object, + &retrieved_base, + &retrieved_size); + + bool observed_result = retrieved && object->id() == range_test->id; + + if (observed_result != expected_result) { + fprintf(stderr, "FAILED: " + "RetrieveRange id %d, side %d, offset %d, " + "expected %s, observed %s\n", + range_test->id, + side, + offset, + expected_result ? "true" : "false", + observed_result ? "true" : "false"); + return false; + } + + // If a range was successfully retrieved, check that the returned + // bounds match the range as stored. + if (observed_result == true && + (retrieved_base != range_test->address || + retrieved_size != range_test->size)) { + fprintf(stderr, "FAILED: " + "RetrieveRange id %d, side %d, offset %d, " + "expected base/size %d/%d, observed %d/%d\n", + range_test->id, + side, + offset, + range_test->address, range_test->size, + retrieved_base, retrieved_size); + return false; + } + + // Now, check RetrieveNearestRange. The nearest range is always + // expected to be different from the test range when checking one + // less than the low side. + bool expected_nearest = range_test->expect_storable; + if (!side && offset < 0) + expected_nearest = false; + + linked_ptr nearest_object; + AddressType nearest_base = AddressType(); + AddressType nearest_size = AddressType(); + bool retrieved_nearest = range_map->RetrieveNearestRange(address, + &nearest_object, + &nearest_base, + &nearest_size); + + // When checking one greater than the high side, RetrieveNearestRange + // should usually return the test range. When a different range begins + // at that address, though, then RetrieveNearestRange should return the + // range at the address instead of the test range. + if (side && offset > 0 && nearest_base == address) { + expected_nearest = false; + } + + bool observed_nearest = retrieved_nearest && + nearest_object->id() == range_test->id; + + if (observed_nearest != expected_nearest) { + fprintf(stderr, "FAILED: " + "RetrieveNearestRange id %d, side %d, offset %d, " + "expected %s, observed %s\n", + range_test->id, + side, + offset, + expected_nearest ? "true" : "false", + observed_nearest ? "true" : "false"); + return false; + } + + // If a range was successfully retrieved, check that the returned + // bounds match the range as stored. + if (expected_nearest && + (nearest_base != range_test->address || + nearest_size != range_test->size)) { + fprintf(stderr, "FAILED: " + "RetrieveNearestRange id %d, side %d, offset %d, " + "expected base/size %d/%d, observed %d/%d\n", + range_test->id, + side, + offset, + range_test->address, range_test->size, + nearest_base, nearest_size); + return false; + } + } + } + + return true; +} + + +// Test RetrieveRangeAtIndex, which is supposed to return objects in order +// according to their addresses. This test is performed by looping through +// the map, calling RetrieveRangeAtIndex for all possible indices in sequence, +// and verifying that each call returns a different object than the previous +// call, and that ranges are returned with increasing base addresses. Returns +// false if the test fails. +static bool RetrieveIndexTest(TestMap *range_map, int set) { + linked_ptr object; + CountedObject *last_object = NULL; + AddressType last_base = 0; + + int object_count = range_map->GetCount(); + for (int object_index = 0; object_index < object_count; ++object_index) { + AddressType base; + if (!range_map->RetrieveRangeAtIndex(object_index, &object, &base, NULL)) { + fprintf(stderr, "FAILED: RetrieveRangeAtIndex set %d index %d, " + "expected success, observed failure\n", + set, object_index); + return false; + } + + if (!object.get()) { + fprintf(stderr, "FAILED: RetrieveRangeAtIndex set %d index %d, " + "expected object, observed NULL\n", + set, object_index); + return false; + } + + // It's impossible to do these comparisons unless there's a previous + // object to compare against. + if (last_object) { + // The object must be different from the last one. + if (object->id() == last_object->id()) { + fprintf(stderr, "FAILED: RetrieveRangeAtIndex set %d index %d, " + "expected different objects, observed same objects (%d)\n", + set, object_index, object->id()); + return false; + } + + // Each object must have a base greater than the previous object's base. + if (base <= last_base) { + fprintf(stderr, "FAILED: RetrieveRangeAtIndex set %d index %d, " + "expected different bases, observed same bases (%d)\n", + set, object_index, base); + return false; + } + } + + last_object = object.get(); + last_base = base; + } + + // Make sure that RetrieveRangeAtIndex doesn't allow lookups at indices that + // are too high. + if (range_map->RetrieveRangeAtIndex(object_count, &object, NULL, NULL)) { + fprintf(stderr, "FAILED: RetrieveRangeAtIndex set %d index %d (too large), " + "expected failure, observed success\n", + set, object_count); + return false; + } + + return true; +} + +// Additional RetriveAtIndex test to expose the bug in RetrieveRangeAtIndex(). +// Bug info: RetrieveRangeAtIndex() previously retrieves the high address of +// entry, however, it is supposed to retrieve the base address of entry as +// stated in the comment in range_map.h. +static bool RetriveAtIndexTest2() { + scoped_ptr range_map(new TestMap()); + + // Store ranges with base address = 2 * object_id: + const int range_size = 2; + for (int object_id = 0; object_id < 100; ++object_id) { + linked_ptr object(new CountedObject(object_id)); + int base_address = 2 * object_id; + range_map->StoreRange(base_address, range_size, object); + } + + linked_ptr object; + int object_count = range_map->GetCount(); + for (int object_index = 0; object_index < object_count; ++object_index) { + AddressType base; + if (!range_map->RetrieveRangeAtIndex(object_index, &object, &base, NULL)) { + fprintf(stderr, "FAILED: RetrieveAtIndexTest2 index %d, " + "expected success, observed failure\n", object_index); + return false; + } + + int expected_base = 2 * object->id(); + if (base != expected_base) { + fprintf(stderr, "FAILED: RetriveAtIndexTest2 index %d, " + "expected base %d, observed base %d", + object_index, expected_base, base); + return false; + } + } + + return true; +} + + +// RunTests runs a series of test sets. +static bool RunTests() { + // These tests will be run sequentially. The first set of tests exercises + // most functions of RangeTest, and verifies all of the bounds-checking. + const RangeTest range_tests_0[] = { + { INT_MIN, 16, 1, true }, // lowest possible range + { -2, 5, 2, true }, // a range through zero + { INT_MAX - 9, 11, 3, false }, // tests anti-overflow + { INT_MAX - 9, 10, 4, true }, // highest possible range + { 5, 0, 5, false }, // tests anti-zero-size + { 5, 1, 6, true }, // smallest possible range + { -20, 15, 7, true }, // entirely negative + + { 10, 10, 10, true }, // causes the following tests to fail + { 9, 10, 11, false }, // one-less base, one-less high + { 9, 11, 12, false }, // one-less base, identical high + { 9, 12, 13, false }, // completely contains existing + { 10, 9, 14, false }, // identical base, one-less high + { 10, 10, 15, false }, // exactly identical to existing range + { 10, 11, 16, false }, // identical base, one-greater high + { 11, 8, 17, false }, // contained completely within + { 11, 9, 18, false }, // one-greater base, identical high + { 11, 10, 19, false }, // one-greater base, one-greater high + { 9, 2, 20, false }, // overlaps bottom by one + { 10, 1, 21, false }, // overlaps bottom by one, contained + { 19, 1, 22, false }, // overlaps top by one, contained + { 19, 2, 23, false }, // overlaps top by one + + { 9, 1, 24, true }, // directly below without overlap + { 20, 1, 25, true }, // directly above without overlap + + { 6, 3, 26, true }, // exactly between two ranges, gapless + { 7, 3, 27, false }, // tries to span two ranges + { 7, 5, 28, false }, // tries to span three ranges + { 4, 20, 29, false }, // tries to contain several ranges + + { 30, 50, 30, true }, + { 90, 25, 31, true }, + { 35, 65, 32, false }, // tries to span two noncontiguous + { 120, 10000, 33, true }, // > 8-bit + { 20000, 20000, 34, true }, // > 8-bit + { 0x10001, 0x10001, 35, true }, // > 16-bit + + { 27, -1, 36, false } // tests high < base + }; + + // Attempt to fill the entire space. The entire space must be filled with + // three stores because AddressType is signed for these tests, so RangeMap + // treats the size as signed and rejects sizes that appear to be negative. + // Even if these tests were run as unsigned, two stores would be needed + // to fill the space because the entire size of the space could only be + // described by using one more bit than would be present in AddressType. + const RangeTest range_tests_1[] = { + { INT_MIN, INT_MAX, 50, true }, // From INT_MIN to -2, inclusive + { -1, 2, 51, true }, // From -1 to 0, inclusive + { 1, INT_MAX, 52, true }, // From 1 to INT_MAX, inclusive + { INT_MIN, INT_MAX, 53, false }, // Can't fill the space twice + { -1, 2, 54, false }, + { 1, INT_MAX, 55, false }, + { -3, 6, 56, false }, // -3 to 2, inclusive - spans 3 ranges + }; + + // A light round of testing to verify that RetrieveRange does the right + // the right thing at the extremities of the range when nothing is stored + // there. Checks are forced without storing anything at the extremities + // by setting size = 0. + const RangeTest range_tests_2[] = { + { INT_MIN, 0, 100, false }, // makes RetrieveRange check low end + { -1, 3, 101, true }, + { INT_MAX, 0, 102, false }, // makes RetrieveRange check high end + }; + + // Similar to the previous test set, but with a couple of ranges closer + // to the extremities. + const RangeTest range_tests_3[] = { + { INT_MIN + 1, 1, 110, true }, + { INT_MAX - 1, 1, 111, true }, + { INT_MIN, 0, 112, false }, // makes RetrieveRange check low end + { INT_MAX, 0, 113, false } // makes RetrieveRange check high end + }; + + // The range map is cleared between sets of tests listed here. + const RangeTestSet range_test_sets[] = { + { range_tests_0, sizeof(range_tests_0) / sizeof(RangeTest) }, + { range_tests_1, sizeof(range_tests_1) / sizeof(RangeTest) }, + { range_tests_2, sizeof(range_tests_2) / sizeof(RangeTest) }, + { range_tests_3, sizeof(range_tests_3) / sizeof(RangeTest) }, + { range_tests_0, sizeof(range_tests_0) / sizeof(RangeTest) } // Run again + }; + + // Maintain the range map in a pointer so that deletion can be meaningfully + // tested. + scoped_ptr range_map(new TestMap()); + + // Run all of the test sets in sequence. + unsigned int range_test_set_count = sizeof(range_test_sets) / + sizeof(RangeTestSet); + for (unsigned int range_test_set_index = 0; + range_test_set_index < range_test_set_count; + ++range_test_set_index) { + const RangeTest *range_tests = + range_test_sets[range_test_set_index].range_tests; + unsigned int range_test_count = + range_test_sets[range_test_set_index].range_test_count; + + // Run the StoreRange test, which validates StoreRange and initializes + // the RangeMap with data for the RetrieveRange test. + int stored_count = 0; // The number of ranges successfully stored + for (unsigned int range_test_index = 0; + range_test_index < range_test_count; + ++range_test_index) { + const RangeTest *range_test = &range_tests[range_test_index]; + if (!StoreTest(range_map.get(), range_test)) + return false; + + if (range_test->expect_storable) + ++stored_count; + } + + // There should be exactly one CountedObject for everything successfully + // stored in the RangeMap. + if (CountedObject::count() != stored_count) { + fprintf(stderr, "FAILED: " + "stored object counts don't match, expected %d, observed %d\n", + stored_count, + CountedObject::count()); + + return false; + } + + // The RangeMap's own count of objects should also match. + if (range_map->GetCount() != stored_count) { + fprintf(stderr, "FAILED: stored object count doesn't match GetCount, " + "expected %d, observed %d\n", + stored_count, range_map->GetCount()); + + return false; + } + + // Run the RetrieveRange test + for (unsigned int range_test_index = 0; + range_test_index < range_test_count; + ++range_test_index) { + const RangeTest *range_test = &range_tests[range_test_index]; + if (!RetrieveTest(range_map.get(), range_test)) + return false; + } + + if (!RetrieveIndexTest(range_map.get(), range_test_set_index)) + return false; + + // Clear the map between test sets. If this is the final test set, + // delete the map instead to test destruction. + if (range_test_set_index < range_test_set_count - 1) + range_map->Clear(); + else + range_map.reset(); + + // Test that all stored objects are freed when the RangeMap is cleared + // or deleted. + if (CountedObject::count() != 0) { + fprintf(stderr, "FAILED: " + "did not free all objects after %s, %d still allocated\n", + range_test_set_index < range_test_set_count - 1 ? "clear" + : "delete", + CountedObject::count()); + + return false; + } + } + + if (!RetriveAtIndexTest2()) { + fprintf(stderr, "FAILED: did not pass RetrieveAtIndexTest2()\n"); + return false; + } + + return true; +} + + +} // namespace + + +int main(int argc, char **argv) { + BPLOG_INIT(&argc, &argv); + + return RunTests() ? 0 : 1; +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/simple_serializer-inl.h b/TMessagesProj/jni/third_party/breakpad/src/processor/simple_serializer-inl.h new file mode 100644 index 0000000000..606bb3cead --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/simple_serializer-inl.h @@ -0,0 +1,260 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// simple_serializer-inl.h: template specializations for following types: +// bool, const char *(C-string), string, +// Line, Function, PublicSymbol, WindowsFrameInfo and their linked pointers. +// +// See simple_serializer.h for moredocumentation. +// +// Author: Siyang Xie (lambxsy@google.com) + +#ifndef PROCESSOR_SIMPLE_SERIALIZER_INL_H__ +#define PROCESSOR_SIMPLE_SERIALIZER_INL_H__ + +#include + +#include "processor/simple_serializer.h" +#include "map_serializers-inl.h" + +#include "google_breakpad/processor/basic_source_line_resolver.h" +#include "processor/basic_source_line_resolver_types.h" +#include "processor/linked_ptr.h" +#include "processor/windows_frame_info.h" + +namespace google_breakpad { + +// Specializations of SimpleSerializer: bool +template<> +class SimpleSerializer { + public: + static size_t SizeOf(bool boolean) { return 1; } + + static char *Write(bool boolean, char *dest) { + *dest = static_cast(boolean? 255 : 0); + return ++dest; + } + + static const char *Read(const char *source, bool *value) { + *value = ((*source) == 0 ? false : true); + return ++source; + } +}; + +// Specializations of SimpleSerializer: string +template<> +class SimpleSerializer { + public: + static size_t SizeOf(const string &str) { return str.size() + 1; } + + static char *Write(const string &str, char *dest) { + strcpy(dest, str.c_str()); + return dest + SizeOf(str); + } +}; + +// Specializations of SimpleSerializer: C-string +template<> +class SimpleSerializer { + public: + static size_t SizeOf(const char *cstring) { + return strlen(cstring) + 1; + } + + static char *Write(const char *cstring, char *dest) { + strcpy(dest, cstring); + return dest + SizeOf(cstring); + } +}; + +// Specializations of SimpleSerializer: Line +template<> +class SimpleSerializer { + typedef BasicSourceLineResolver::Line Line; + public: + static size_t SizeOf(const Line &line) { + return SimpleSerializer::SizeOf(line.address) + + SimpleSerializer::SizeOf(line.size) + + SimpleSerializer::SizeOf(line.source_file_id) + + SimpleSerializer::SizeOf(line.line); + } + static char *Write(const Line &line, char *dest) { + dest = SimpleSerializer::Write(line.address, dest); + dest = SimpleSerializer::Write(line.size, dest); + dest = SimpleSerializer::Write(line.source_file_id, dest); + dest = SimpleSerializer::Write(line.line, dest); + return dest; + } +}; + +// Specializations of SimpleSerializer: PublicSymbol +template<> +class SimpleSerializer { + typedef BasicSourceLineResolver::PublicSymbol PublicSymbol; + public: + static size_t SizeOf(const PublicSymbol &pubsymbol) { + return SimpleSerializer::SizeOf(pubsymbol.name) + + SimpleSerializer::SizeOf(pubsymbol.address) + + SimpleSerializer::SizeOf(pubsymbol.parameter_size); + } + static char *Write(const PublicSymbol &pubsymbol, char *dest) { + dest = SimpleSerializer::Write(pubsymbol.name, dest); + dest = SimpleSerializer::Write(pubsymbol.address, dest); + dest = SimpleSerializer::Write(pubsymbol.parameter_size, dest); + return dest; + } +}; + +// Specializations of SimpleSerializer: WindowsFrameInfo +template<> +class SimpleSerializer { + public: + static size_t SizeOf(const WindowsFrameInfo &wfi) { + unsigned int size = 0; + size += sizeof(int32_t); // wfi.type_ + size += SimpleSerializer::SizeOf(wfi.valid); + size += SimpleSerializer::SizeOf(wfi.prolog_size); + size += SimpleSerializer::SizeOf(wfi.epilog_size); + size += SimpleSerializer::SizeOf(wfi.parameter_size); + size += SimpleSerializer::SizeOf(wfi.saved_register_size); + size += SimpleSerializer::SizeOf(wfi.local_size); + size += SimpleSerializer::SizeOf(wfi.max_stack_size); + size += SimpleSerializer::SizeOf(wfi.allocates_base_pointer); + size += SimpleSerializer::SizeOf(wfi.program_string); + return size; + } + static char *Write(const WindowsFrameInfo &wfi, char *dest) { + dest = SimpleSerializer::Write( + static_cast(wfi.type_), dest); + dest = SimpleSerializer::Write(wfi.valid, dest); + dest = SimpleSerializer::Write(wfi.prolog_size, dest); + dest = SimpleSerializer::Write(wfi.epilog_size, dest); + dest = SimpleSerializer::Write(wfi.parameter_size, dest); + dest = SimpleSerializer::Write(wfi.saved_register_size, dest); + dest = SimpleSerializer::Write(wfi.local_size, dest); + dest = SimpleSerializer::Write(wfi.max_stack_size, dest); + dest = SimpleSerializer::Write(wfi.allocates_base_pointer, dest); + return SimpleSerializer::Write(wfi.program_string, dest); + } +}; + +// Specializations of SimpleSerializer: Linked_ptr version of +// Line, Function, PublicSymbol, WindowsFrameInfo. +template<> +class SimpleSerializer< linked_ptr > { + typedef BasicSourceLineResolver::Line Line; + public: + static size_t SizeOf(const linked_ptr &lineptr) { + if (lineptr.get() == NULL) return 0; + return SimpleSerializer::SizeOf(*(lineptr.get())); + } + static char *Write(const linked_ptr &lineptr, char *dest) { + if (lineptr.get()) + dest = SimpleSerializer::Write(*(lineptr.get()), dest); + return dest; + } +}; + +template<> +class SimpleSerializer { + // Convenient type names. + typedef BasicSourceLineResolver::Function Function; + typedef BasicSourceLineResolver::Line Line; + public: + static size_t SizeOf(const Function &func) { + unsigned int size = 0; + size += SimpleSerializer::SizeOf(func.name); + size += SimpleSerializer::SizeOf(func.address); + size += SimpleSerializer::SizeOf(func.size); + size += SimpleSerializer::SizeOf(func.parameter_size); + size += range_map_serializer_.SizeOf(func.lines); + return size; + } + + static char *Write(const Function &func, char *dest) { + dest = SimpleSerializer::Write(func.name, dest); + dest = SimpleSerializer::Write(func.address, dest); + dest = SimpleSerializer::Write(func.size, dest); + dest = SimpleSerializer::Write(func.parameter_size, dest); + dest = range_map_serializer_.Write(func.lines, dest); + return dest; + } + private: + // This static member is defined in module_serializer.cc. + static RangeMapSerializer< MemAddr, linked_ptr > range_map_serializer_; +}; + +template<> +class SimpleSerializer< linked_ptr > { + typedef BasicSourceLineResolver::Function Function; + public: + static size_t SizeOf(const linked_ptr &func) { + if (!func.get()) return 0; + return SimpleSerializer::SizeOf(*(func.get())); + } + + static char *Write(const linked_ptr &func, char *dest) { + if (func.get()) + dest = SimpleSerializer::Write(*(func.get()), dest); + return dest; + } +}; + +template<> +class SimpleSerializer< linked_ptr > { + typedef BasicSourceLineResolver::PublicSymbol PublicSymbol; + public: + static size_t SizeOf(const linked_ptr &pubsymbol) { + if (pubsymbol.get() == NULL) return 0; + return SimpleSerializer::SizeOf(*(pubsymbol.get())); + } + static char *Write(const linked_ptr &pubsymbol, char *dest) { + if (pubsymbol.get()) + dest = SimpleSerializer::Write(*(pubsymbol.get()), dest); + return dest; + } +}; + +template<> +class SimpleSerializer< linked_ptr > { + public: + static size_t SizeOf(const linked_ptr &wfi) { + if (wfi.get() == NULL) return 0; + return SimpleSerializer::SizeOf(*(wfi.get())); + } + static char *Write(const linked_ptr &wfi, char *dest) { + if (wfi.get()) + dest = SimpleSerializer::Write(*(wfi.get()), dest); + return dest; + } +}; + +} // namespace google_breakpad + +#endif // PROCESSOR_SIMPLE_SERIALIZER_INL_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/simple_serializer.h b/TMessagesProj/jni/third_party/breakpad/src/processor/simple_serializer.h new file mode 100644 index 0000000000..275f51ce36 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/simple_serializer.h @@ -0,0 +1,63 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// simple_serializer.h: SimpleSerializer is a template for calculating size and +// writing to specific memory location for objects of primitive types, C-style +// string, string, breakpad types/structs etc. +// All specializations of SimpleSerializer template are defined in the +// "simple_serializer-inl.h" file. +// +// Author: Siyang Xie (lambxsy@google.com) + +#ifndef PROCESSOR_SIMPLE_SERIALIZER_H__ +#define PROCESSOR_SIMPLE_SERIALIZER_H__ + +#include "google_breakpad/common/breakpad_types.h" + +namespace google_breakpad { + +typedef uint64_t MemAddr; + +// Default implementation of SimpleSerializer template. +// Specializations are defined in "simple_serializer-inl.h". +template class SimpleSerializer { + public: + // Calculate and return the size of the 'item'. + static size_t SizeOf(const Type &item) { return sizeof(item); } + // Write 'item' to memory location 'dest', and return to the "end" address of + // data written, i.e., the address after the final byte written. + static char *Write(const Type &item, char *dest) { + new (dest) Type(item); + return dest + SizeOf(item); + } +}; + +} // namespace google_breakpad + +#endif // PROCESSOR_SIMPLE_SERIALIZER_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/simple_symbol_supplier.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/simple_symbol_supplier.cc new file mode 100644 index 0000000000..bc5ebb687b --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/simple_symbol_supplier.cc @@ -0,0 +1,204 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// simple_symbol_supplier.cc: A simple SymbolSupplier implementation +// +// See simple_symbol_supplier.h for documentation. +// +// Author: Mark Mentovai + +#include "processor/simple_symbol_supplier.h" + +#include +#include +#include +#include + +#include +#include +#include + +#include "common/using_std_string.h" +#include "google_breakpad/processor/code_module.h" +#include "google_breakpad/processor/system_info.h" +#include "processor/logging.h" +#include "processor/pathname_stripper.h" + +namespace google_breakpad { + +static bool file_exists(const string &file_name) { + struct stat sb; + return stat(file_name.c_str(), &sb) == 0; +} + +SymbolSupplier::SymbolResult SimpleSymbolSupplier::GetSymbolFile( + const CodeModule *module, const SystemInfo *system_info, + string *symbol_file) { + BPLOG_IF(ERROR, !symbol_file) << "SimpleSymbolSupplier::GetSymbolFile " + "requires |symbol_file|"; + assert(symbol_file); + symbol_file->clear(); + + for (unsigned int path_index = 0; path_index < paths_.size(); ++path_index) { + SymbolResult result; + if ((result = GetSymbolFileAtPathFromRoot(module, system_info, + paths_[path_index], + symbol_file)) != NOT_FOUND) { + return result; + } + } + return NOT_FOUND; +} + +SymbolSupplier::SymbolResult SimpleSymbolSupplier::GetSymbolFile( + const CodeModule *module, + const SystemInfo *system_info, + string *symbol_file, + string *symbol_data) { + assert(symbol_data); + symbol_data->clear(); + + SymbolSupplier::SymbolResult s = GetSymbolFile(module, system_info, + symbol_file); + if (s == FOUND) { + std::ifstream in(symbol_file->c_str()); + std::getline(in, *symbol_data, string::traits_type::to_char_type( + string::traits_type::eof())); + in.close(); + } + return s; +} + +SymbolSupplier::SymbolResult SimpleSymbolSupplier::GetCStringSymbolData( + const CodeModule *module, + const SystemInfo *system_info, + string *symbol_file, + char **symbol_data, + size_t *symbol_data_size) { + assert(symbol_data); + assert(symbol_data_size); + + string symbol_data_string; + SymbolSupplier::SymbolResult s = + GetSymbolFile(module, system_info, symbol_file, &symbol_data_string); + + if (s == FOUND) { + *symbol_data_size = symbol_data_string.size() + 1; + *symbol_data = new char[*symbol_data_size]; + if (*symbol_data == NULL) { + BPLOG(ERROR) << "Memory allocation for size " << *symbol_data_size + << " failed"; + return INTERRUPT; + } + memcpy(*symbol_data, symbol_data_string.c_str(), symbol_data_string.size()); + (*symbol_data)[symbol_data_string.size()] = '\0'; + memory_buffers_.insert(make_pair(module->code_file(), *symbol_data)); + } + return s; +} + +void SimpleSymbolSupplier::FreeSymbolData(const CodeModule *module) { + if (!module) { + BPLOG(INFO) << "Cannot free symbol data buffer for NULL module"; + return; + } + + map::iterator it = memory_buffers_.find(module->code_file()); + if (it == memory_buffers_.end()) { + BPLOG(INFO) << "Cannot find symbol data buffer for module " + << module->code_file(); + return; + } + delete [] it->second; + memory_buffers_.erase(it); +} + +SymbolSupplier::SymbolResult SimpleSymbolSupplier::GetSymbolFileAtPathFromRoot( + const CodeModule *module, const SystemInfo *system_info, + const string &root_path, string *symbol_file) { + BPLOG_IF(ERROR, !symbol_file) << "SimpleSymbolSupplier::GetSymbolFileAtPath " + "requires |symbol_file|"; + assert(symbol_file); + symbol_file->clear(); + + if (!module) + return NOT_FOUND; + + // Start with the base path. + string path = root_path; + + // Append the debug (pdb) file name as a directory name. + path.append("/"); + string debug_file_name = PathnameStripper::File(module->debug_file()); + if (debug_file_name.empty()) { + BPLOG(ERROR) << "Can't construct symbol file path without debug_file " + "(code_file = " << + PathnameStripper::File(module->code_file()) << ")"; + return NOT_FOUND; + } + path.append(debug_file_name); + + // Append the identifier as a directory name. + path.append("/"); + string identifier = module->debug_identifier(); + if (identifier.empty()) { + BPLOG(ERROR) << "Can't construct symbol file path without debug_identifier " + "(code_file = " << + PathnameStripper::File(module->code_file()) << + ", debug_file = " << debug_file_name << ")"; + return NOT_FOUND; + } + path.append(identifier); + + // Transform the debug file name into one ending in .sym. If the existing + // name ends in .pdb, strip the .pdb. Otherwise, add .sym to the non-.pdb + // name. + path.append("/"); + string debug_file_extension; + if (debug_file_name.size() > 4) + debug_file_extension = debug_file_name.substr(debug_file_name.size() - 4); + std::transform(debug_file_extension.begin(), debug_file_extension.end(), + debug_file_extension.begin(), tolower); + if (debug_file_extension == ".pdb") { + path.append(debug_file_name.substr(0, debug_file_name.size() - 4)); + } else { + path.append(debug_file_name); + } + path.append(".sym"); + + if (!file_exists(path)) { + BPLOG(INFO) << "No symbol file at " << path; + return NOT_FOUND; + } + + *symbol_file = path; + return FOUND; +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/simple_symbol_supplier.h b/TMessagesProj/jni/third_party/breakpad/src/processor/simple_symbol_supplier.h new file mode 100644 index 0000000000..0cde85cdcd --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/simple_symbol_supplier.h @@ -0,0 +1,140 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// simple_symbol_supplier.h: A simple SymbolSupplier implementation +// +// SimpleSymbolSupplier is a straightforward implementation of SymbolSupplier +// that stores symbol files in a filesystem tree. A SimpleSymbolSupplier is +// created with one or more base directories, which are the root paths for all +// symbol files. Each symbol file contained therein has a directory entry in +// the base directory with a name identical to the corresponding debugging +// file (pdb). Within each of these directories, there are subdirectories +// named for the debugging file's identifier. For recent pdb files, this is +// a concatenation of the pdb's uuid and age, presented in hexadecimal form, +// without any dashes or separators. The uuid is in uppercase hexadecimal +// and the age is in lowercase hexadecimal. Within that subdirectory, +// SimpleSymbolSupplier expects to find the symbol file, which is named +// identically to the debug file, but with a .sym extension. If the original +// debug file had a name ending in .pdb, the .pdb extension will be replaced +// with .sym. This sample hierarchy is rooted at the "symbols" base +// directory: +// +// symbols +// symbols/test_app.pdb +// symbols/test_app.pdb/63FE4780728D49379B9D7BB6460CB42A1 +// symbols/test_app.pdb/63FE4780728D49379B9D7BB6460CB42A1/test_app.sym +// symbols/kernel32.pdb +// symbols/kernel32.pdb/BCE8785C57B44245A669896B6A19B9542 +// symbols/kernel32.pdb/BCE8785C57B44245A669896B6A19B9542/kernel32.sym +// +// In this case, the uuid of test_app.pdb is +// 63fe4780-728d-4937-9b9d-7bb6460cb42a and its age is 1. +// +// This scheme was chosen to be roughly analogous to the way that +// symbol files may be accessed from Microsoft Symbol Server. A hierarchy +// used for Microsoft Symbol Server storage is usable as a hierarchy for +// SimpleSymbolServer, provided that the pdb files are transformed to dumped +// format using a tool such as dump_syms, and given a .sym extension. +// +// SimpleSymbolSupplier will iterate over all root paths searching for +// a symbol file existing in that path. +// +// SimpleSymbolSupplier supports any debugging file which can be identified +// by a CodeModule object's debug_file and debug_identifier accessors. The +// expected ultimate source of these CodeModule objects are MinidumpModule +// objects; it is this class that is responsible for assigning appropriate +// values for debug_file and debug_identifier. +// +// Author: Mark Mentovai + +#ifndef PROCESSOR_SIMPLE_SYMBOL_SUPPLIER_H__ +#define PROCESSOR_SIMPLE_SYMBOL_SUPPLIER_H__ + +#include +#include +#include + +#include "common/using_std_string.h" +#include "google_breakpad/processor/symbol_supplier.h" + +namespace google_breakpad { + +using std::map; +using std::vector; + +class CodeModule; + +class SimpleSymbolSupplier : public SymbolSupplier { + public: + // Creates a new SimpleSymbolSupplier, using path as the root path where + // symbols are stored. + explicit SimpleSymbolSupplier(const string &path) : paths_(1, path) {} + + // Creates a new SimpleSymbolSupplier, using paths as a list of root + // paths where symbols may be stored. + explicit SimpleSymbolSupplier(const vector &paths) : paths_(paths) {} + + virtual ~SimpleSymbolSupplier() {} + + // Returns the path to the symbol file for the given module. See the + // description above. + virtual SymbolResult GetSymbolFile(const CodeModule *module, + const SystemInfo *system_info, + string *symbol_file); + + virtual SymbolResult GetSymbolFile(const CodeModule *module, + const SystemInfo *system_info, + string *symbol_file, + string *symbol_data); + + // Allocates data buffer on heap and writes symbol data into buffer. + // Symbol supplier ALWAYS takes ownership of the data buffer. + virtual SymbolResult GetCStringSymbolData(const CodeModule *module, + const SystemInfo *system_info, + string *symbol_file, + char **symbol_data, + size_t *symbol_data_size); + + // Free the data buffer allocated in the above GetCStringSymbolData(); + virtual void FreeSymbolData(const CodeModule *module); + + protected: + SymbolResult GetSymbolFileAtPathFromRoot(const CodeModule *module, + const SystemInfo *system_info, + const string &root_path, + string *symbol_file); + + private: + map memory_buffers_; + vector paths_; +}; + +} // namespace google_breakpad + +#endif // PROCESSOR_SIMPLE_SYMBOL_SUPPLIER_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/source_line_resolver_base.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/source_line_resolver_base.cc new file mode 100644 index 0000000000..6eff1f9918 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/source_line_resolver_base.cc @@ -0,0 +1,341 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// source_line_resolver_base.cc: Implementation of SourceLineResolverBase. +// +// See source_line_resolver_base.h and source_line_resolver_base_types.h for +// more documentation. +// +// Author: Siyang Xie (lambxsy@google.com) + +#include +#include +#include + +#include +#include + +#include "google_breakpad/processor/source_line_resolver_base.h" +#include "processor/source_line_resolver_base_types.h" +#include "processor/module_factory.h" + +using std::map; +using std::make_pair; + +namespace google_breakpad { + +SourceLineResolverBase::SourceLineResolverBase( + ModuleFactory *module_factory) + : modules_(new ModuleMap), + corrupt_modules_(new ModuleSet), + memory_buffers_(new MemoryMap), + module_factory_(module_factory) { +} + +SourceLineResolverBase::~SourceLineResolverBase() { + ModuleMap::iterator it; + // Iterate through ModuleMap and delete all loaded modules. + for (it = modules_->begin(); it != modules_->end(); ++it) { + // Delete individual module. + delete it->second; + } + // Delete the map of modules. + delete modules_; + modules_ = NULL; + + // Delete the set of corrupt modules. + delete corrupt_modules_; + corrupt_modules_ = NULL; + + MemoryMap::iterator iter = memory_buffers_->begin(); + for (; iter != memory_buffers_->end(); ++iter) { + delete [] iter->second; + } + // Delete the map of memory buffers. + delete memory_buffers_; + memory_buffers_ = NULL; + + delete module_factory_; + module_factory_ = NULL; +} + +bool SourceLineResolverBase::ReadSymbolFile(const string &map_file, + char **symbol_data, + size_t *symbol_data_size) { + if (symbol_data == NULL || symbol_data_size == NULL) { + BPLOG(ERROR) << "Could not Read file into Null memory pointer"; + return false; + } + + struct stat buf; + int error_code = stat(map_file.c_str(), &buf); + if (error_code == -1) { + string error_string; + error_code = ErrnoString(&error_string); + BPLOG(ERROR) << "Could not open " << map_file << + ", error " << error_code << ": " << error_string; + return false; + } + + off_t file_size = buf.st_size; + + // Allocate memory for file contents, plus a null terminator + // since we may use strtok() on the contents. + *symbol_data_size = file_size + 1; + *symbol_data = new char[file_size + 1]; + + if (*symbol_data == NULL) { + BPLOG(ERROR) << "Could not allocate memory for " << map_file; + return false; + } + + BPLOG(INFO) << "Opening " << map_file; + + FILE *f = fopen(map_file.c_str(), "rt"); + if (!f) { + string error_string; + error_code = ErrnoString(&error_string); + BPLOG(ERROR) << "Could not open " << map_file << + ", error " << error_code << ": " << error_string; + delete [] (*symbol_data); + *symbol_data = NULL; + return false; + } + + AutoFileCloser closer(f); + + int items_read = 0; + + items_read = fread(*symbol_data, 1, file_size, f); + + if (items_read != file_size) { + string error_string; + error_code = ErrnoString(&error_string); + BPLOG(ERROR) << "Could not slurp " << map_file << + ", error " << error_code << ": " << error_string; + delete [] (*symbol_data); + *symbol_data = NULL; + return false; + } + + (*symbol_data)[file_size] = '\0'; + return true; +} + +bool SourceLineResolverBase::LoadModule(const CodeModule *module, + const string &map_file) { + if (module == NULL) + return false; + + // Make sure we don't already have a module with the given name. + if (modules_->find(module->code_file()) != modules_->end()) { + BPLOG(INFO) << "Symbols for module " << module->code_file() + << " already loaded"; + return false; + } + + BPLOG(INFO) << "Loading symbols for module " << module->code_file() + << " from " << map_file; + + char *memory_buffer; + size_t memory_buffer_size; + if (!ReadSymbolFile(map_file, &memory_buffer, &memory_buffer_size)) + return false; + + BPLOG(INFO) << "Read symbol file " << map_file << " succeeded"; + + bool load_result = LoadModuleUsingMemoryBuffer(module, memory_buffer, + memory_buffer_size); + + if (load_result && !ShouldDeleteMemoryBufferAfterLoadModule()) { + // memory_buffer has to stay alive as long as the module. + memory_buffers_->insert(make_pair(module->code_file(), memory_buffer)); + } else { + delete [] memory_buffer; + } + + return load_result; +} + +bool SourceLineResolverBase::LoadModuleUsingMapBuffer( + const CodeModule *module, const string &map_buffer) { + if (module == NULL) + return false; + + // Make sure we don't already have a module with the given name. + if (modules_->find(module->code_file()) != modules_->end()) { + BPLOG(INFO) << "Symbols for module " << module->code_file() + << " already loaded"; + return false; + } + + size_t memory_buffer_size = map_buffer.size() + 1; + char *memory_buffer = new char[memory_buffer_size]; + if (memory_buffer == NULL) { + BPLOG(ERROR) << "Could not allocate memory for " << module->code_file(); + return false; + } + + // Can't use strcpy, as the data may contain '\0's before the end. + memcpy(memory_buffer, map_buffer.c_str(), map_buffer.size()); + memory_buffer[map_buffer.size()] = '\0'; + + bool load_result = LoadModuleUsingMemoryBuffer(module, memory_buffer, + memory_buffer_size); + + if (load_result && !ShouldDeleteMemoryBufferAfterLoadModule()) { + // memory_buffer has to stay alive as long as the module. + memory_buffers_->insert(make_pair(module->code_file(), memory_buffer)); + } else { + delete [] memory_buffer; + } + + return load_result; +} + +bool SourceLineResolverBase::LoadModuleUsingMemoryBuffer( + const CodeModule *module, + char *memory_buffer, + size_t memory_buffer_size) { + if (!module) + return false; + + // Make sure we don't already have a module with the given name. + if (modules_->find(module->code_file()) != modules_->end()) { + BPLOG(INFO) << "Symbols for module " << module->code_file() + << " already loaded"; + return false; + } + + BPLOG(INFO) << "Loading symbols for module " << module->code_file() + << " from memory buffer"; + + Module *basic_module = module_factory_->CreateModule(module->code_file()); + + // Ownership of memory is NOT transfered to Module::LoadMapFromMemory(). + if (!basic_module->LoadMapFromMemory(memory_buffer, memory_buffer_size)) { + BPLOG(ERROR) << "Too many error while parsing symbol data for module " + << module->code_file(); + // Returning false from here would be an indication that the symbols for + // this module are missing which would be wrong. Intentionally fall through + // and add the module to both the modules_ and the corrupt_modules_ lists. + assert(basic_module->IsCorrupt()); + } + + modules_->insert(make_pair(module->code_file(), basic_module)); + if (basic_module->IsCorrupt()) { + corrupt_modules_->insert(module->code_file()); + } + return true; +} + +bool SourceLineResolverBase::ShouldDeleteMemoryBufferAfterLoadModule() { + return true; +} + +void SourceLineResolverBase::UnloadModule(const CodeModule *code_module) { + if (!code_module) + return; + + ModuleMap::iterator mod_iter = modules_->find(code_module->code_file()); + if (mod_iter != modules_->end()) { + Module *symbol_module = mod_iter->second; + delete symbol_module; + corrupt_modules_->erase(mod_iter->first); + modules_->erase(mod_iter); + } + + if (ShouldDeleteMemoryBufferAfterLoadModule()) { + // No-op. Because we never store any memory buffers. + } else { + // There may be a buffer stored locally, we need to find and delete it. + MemoryMap::iterator iter = memory_buffers_->find(code_module->code_file()); + if (iter != memory_buffers_->end()) { + delete [] iter->second; + memory_buffers_->erase(iter); + } + } +} + +bool SourceLineResolverBase::HasModule(const CodeModule *module) { + if (!module) + return false; + return modules_->find(module->code_file()) != modules_->end(); +} + +bool SourceLineResolverBase::IsModuleCorrupt(const CodeModule *module) { + if (!module) + return false; + return corrupt_modules_->find(module->code_file()) != corrupt_modules_->end(); +} + +void SourceLineResolverBase::FillSourceLineInfo(StackFrame *frame) { + if (frame->module) { + ModuleMap::const_iterator it = modules_->find(frame->module->code_file()); + if (it != modules_->end()) { + it->second->LookupAddress(frame); + } + } +} + +WindowsFrameInfo *SourceLineResolverBase::FindWindowsFrameInfo( + const StackFrame *frame) { + if (frame->module) { + ModuleMap::const_iterator it = modules_->find(frame->module->code_file()); + if (it != modules_->end()) { + return it->second->FindWindowsFrameInfo(frame); + } + } + return NULL; +} + +CFIFrameInfo *SourceLineResolverBase::FindCFIFrameInfo( + const StackFrame *frame) { + if (frame->module) { + ModuleMap::const_iterator it = modules_->find(frame->module->code_file()); + if (it != modules_->end()) { + return it->second->FindCFIFrameInfo(frame); + } + } + return NULL; +} + +bool SourceLineResolverBase::CompareString::operator()( + const string &s1, const string &s2) const { + return strcmp(s1.c_str(), s2.c_str()) < 0; +} + +bool SourceLineResolverBase::Module::ParseCFIRuleSet( + const string &rule_set, CFIFrameInfo *frame_info) const { + CFIFrameInfoParseHandler handler(frame_info); + CFIRuleParser parser(&handler); + return parser.Parse(rule_set); +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/source_line_resolver_base_types.h b/TMessagesProj/jni/third_party/breakpad/src/processor/source_line_resolver_base_types.h new file mode 100644 index 0000000000..4a9dfb3ced --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/source_line_resolver_base_types.h @@ -0,0 +1,158 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// source_line_resolver_base_types.h: definition of nested classes/structs in +// SourceLineResolverBase. It moves the definitions out of +// source_line_resolver_base.cc, so that other classes may have access +// to these private nested types without including source_line_resolver_base.cc +// In addition, Module is defined as a pure abstract class to be implemented by +// each concrete source line resolver class. +// +// See source_line_resolver_base.h for more documentation. +// +// Author: Siyang Xie (lambxsy@google.com) + +#include + +#include +#include + +#include "google_breakpad/common/breakpad_types.h" +#include "google_breakpad/processor/source_line_resolver_base.h" +#include "google_breakpad/processor/stack_frame.h" +#include "processor/cfi_frame_info.h" +#include "processor/windows_frame_info.h" + +#ifndef PROCESSOR_SOURCE_LINE_RESOLVER_BASE_TYPES_H__ +#define PROCESSOR_SOURCE_LINE_RESOLVER_BASE_TYPES_H__ + +namespace google_breakpad { + +class SourceLineResolverBase::AutoFileCloser { + public: + explicit AutoFileCloser(FILE *file) : file_(file) {} + ~AutoFileCloser() { + if (file_) + fclose(file_); + } + + private: + FILE *file_; +}; + +struct SourceLineResolverBase::Line { + Line() { } + Line(MemAddr addr, MemAddr code_size, int file_id, int source_line) + : address(addr) + , size(code_size) + , source_file_id(file_id) + , line(source_line) { } + + MemAddr address; + MemAddr size; + int32_t source_file_id; + int32_t line; +}; + +struct SourceLineResolverBase::Function { + Function() { } + Function(const string &function_name, + MemAddr function_address, + MemAddr code_size, + int set_parameter_size) + : name(function_name), address(function_address), size(code_size), + parameter_size(set_parameter_size) { } + + string name; + MemAddr address; + MemAddr size; + + // The size of parameters passed to this function on the stack. + int32_t parameter_size; +}; + +struct SourceLineResolverBase::PublicSymbol { + PublicSymbol() { } + PublicSymbol(const string& set_name, + MemAddr set_address, + int set_parameter_size) + : name(set_name), + address(set_address), + parameter_size(set_parameter_size) {} + + string name; + MemAddr address; + + // If the public symbol is used as a function entry point, parameter_size + // is set to the size of the parameters passed to the funciton on the + // stack, if known. + int32_t parameter_size; +}; + +class SourceLineResolverBase::Module { + public: + virtual ~Module() { }; + // Loads a map from the given buffer in char* type. + // Does NOT take ownership of memory_buffer (the caller, source line resolver, + // is the owner of memory_buffer). + // The passed in |memory buffer| is of size |memory_buffer_size|. If it is + // not null terminated, LoadMapFromMemory will null terminate it by modifying + // the passed in buffer. + virtual bool LoadMapFromMemory(char *memory_buffer, + size_t memory_buffer_size) = 0; + + // Tells whether the loaded symbol data is corrupt. Return value is + // undefined, if the symbol data hasn't been loaded yet. + virtual bool IsCorrupt() const = 0; + + // Looks up the given relative address, and fills the StackFrame struct + // with the result. + virtual void LookupAddress(StackFrame *frame) const = 0; + + // If Windows stack walking information is available covering ADDRESS, + // return a WindowsFrameInfo structure describing it. If the information + // is not available, returns NULL. A NULL return value does not indicate + // an error. The caller takes ownership of any returned WindowsFrameInfo + // object. + virtual WindowsFrameInfo * + FindWindowsFrameInfo(const StackFrame *frame) const = 0; + + // If CFI stack walking information is available covering ADDRESS, + // return a CFIFrameInfo structure describing it. If the information + // is not available, return NULL. The caller takes ownership of any + // returned CFIFrameInfo object. + virtual CFIFrameInfo *FindCFIFrameInfo(const StackFrame *frame) const = 0; + protected: + virtual bool ParseCFIRuleSet(const string &rule_set, + CFIFrameInfo *frame_info) const; +}; + +} // namespace google_breakpad + +#endif // PROCESSOR_SOURCE_LINE_RESOLVER_BASE_TYPES_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/stack_frame_cpu.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/stack_frame_cpu.cc new file mode 100644 index 0000000000..6175dc7f26 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/stack_frame_cpu.cc @@ -0,0 +1,79 @@ +// Copyright 2013 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// stack_frame_cpu.h: CPU-specific StackFrame extensions. +// +// See google_breakpad/processor/stack_frame_cpu.h for documentation. +// +// Author: Colin Blundell + +#include "google_breakpad/processor/stack_frame_cpu.h" + +namespace google_breakpad { + +const uint64_t StackFrameARM64::CONTEXT_VALID_X0; +const uint64_t StackFrameARM64::CONTEXT_VALID_X1; +const uint64_t StackFrameARM64::CONTEXT_VALID_X2; +const uint64_t StackFrameARM64::CONTEXT_VALID_X3; +const uint64_t StackFrameARM64::CONTEXT_VALID_X4; +const uint64_t StackFrameARM64::CONTEXT_VALID_X5; +const uint64_t StackFrameARM64::CONTEXT_VALID_X6; +const uint64_t StackFrameARM64::CONTEXT_VALID_X7; +const uint64_t StackFrameARM64::CONTEXT_VALID_X8; +const uint64_t StackFrameARM64::CONTEXT_VALID_X9; +const uint64_t StackFrameARM64::CONTEXT_VALID_X10; +const uint64_t StackFrameARM64::CONTEXT_VALID_X11; +const uint64_t StackFrameARM64::CONTEXT_VALID_X12; +const uint64_t StackFrameARM64::CONTEXT_VALID_X13; +const uint64_t StackFrameARM64::CONTEXT_VALID_X14; +const uint64_t StackFrameARM64::CONTEXT_VALID_X15; +const uint64_t StackFrameARM64::CONTEXT_VALID_X16; +const uint64_t StackFrameARM64::CONTEXT_VALID_X17; +const uint64_t StackFrameARM64::CONTEXT_VALID_X18; +const uint64_t StackFrameARM64::CONTEXT_VALID_X19; +const uint64_t StackFrameARM64::CONTEXT_VALID_X20; +const uint64_t StackFrameARM64::CONTEXT_VALID_X21; +const uint64_t StackFrameARM64::CONTEXT_VALID_X22; +const uint64_t StackFrameARM64::CONTEXT_VALID_X23; +const uint64_t StackFrameARM64::CONTEXT_VALID_X24; +const uint64_t StackFrameARM64::CONTEXT_VALID_X25; +const uint64_t StackFrameARM64::CONTEXT_VALID_X26; +const uint64_t StackFrameARM64::CONTEXT_VALID_X27; +const uint64_t StackFrameARM64::CONTEXT_VALID_X28; +const uint64_t StackFrameARM64::CONTEXT_VALID_X29; +const uint64_t StackFrameARM64::CONTEXT_VALID_X30; +const uint64_t StackFrameARM64::CONTEXT_VALID_X31; +const uint64_t StackFrameARM64::CONTEXT_VALID_X32; +const uint64_t StackFrameARM64::CONTEXT_VALID_FP; +const uint64_t StackFrameARM64::CONTEXT_VALID_LR; +const uint64_t StackFrameARM64::CONTEXT_VALID_SP; +const uint64_t StackFrameARM64::CONTEXT_VALID_PC; +const uint64_t StackFrameARM64::CONTEXT_VALID_ALL; + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/stack_frame_symbolizer.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/stack_frame_symbolizer.cc new file mode 100644 index 0000000000..5c8dbe5e13 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/stack_frame_symbolizer.cc @@ -0,0 +1,138 @@ +// Copyright (c) 2012 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Implementation of StackFrameSymbolizer, which encapsulates the logic of how +// SourceLineResolverInterface interacts with SymbolSupplier to fill source +// line information in a stack frame, and also looks up WindowsFrameInfo or +// CFIFrameInfo for a stack frame. + +#include "google_breakpad/processor/stack_frame_symbolizer.h" + +#include + +#include "common/scoped_ptr.h" +#include "google_breakpad/processor/code_module.h" +#include "google_breakpad/processor/code_modules.h" +#include "google_breakpad/processor/source_line_resolver_interface.h" +#include "google_breakpad/processor/stack_frame.h" +#include "google_breakpad/processor/symbol_supplier.h" +#include "google_breakpad/processor/system_info.h" +#include "processor/linked_ptr.h" +#include "processor/logging.h" + +namespace google_breakpad { + +StackFrameSymbolizer::StackFrameSymbolizer( + SymbolSupplier* supplier, + SourceLineResolverInterface* resolver) : supplier_(supplier), + resolver_(resolver) { } + +StackFrameSymbolizer::SymbolizerResult StackFrameSymbolizer::FillSourceLineInfo( + const CodeModules* modules, + const SystemInfo* system_info, + StackFrame* frame) { + assert(frame); + + if (!modules) return kError; + const CodeModule* module = modules->GetModuleForAddress(frame->instruction); + if (!module) return kError; + frame->module = module; + + if (!resolver_) return kError; // no resolver. + // If module is known to have missing symbol file, return. + if (no_symbol_modules_.find(module->code_file()) != + no_symbol_modules_.end()) { + return kError; + } + + // If module is already loaded, go ahead to fill source line info and return. + if (resolver_->HasModule(frame->module)) { + resolver_->FillSourceLineInfo(frame); + return resolver_->IsModuleCorrupt(frame->module) ? + kWarningCorruptSymbols : kNoError; + } + + // Module needs to fetch symbol file. First check to see if supplier exists. + if (!supplier_) { + return kError; + } + + // Start fetching symbol from supplier. + string symbol_file; + char* symbol_data = NULL; + size_t symbol_data_size; + SymbolSupplier::SymbolResult symbol_result = supplier_->GetCStringSymbolData( + module, system_info, &symbol_file, &symbol_data, &symbol_data_size); + + switch (symbol_result) { + case SymbolSupplier::FOUND: { + bool load_success = resolver_->LoadModuleUsingMemoryBuffer( + frame->module, + symbol_data, + symbol_data_size); + if (resolver_->ShouldDeleteMemoryBufferAfterLoadModule()) { + supplier_->FreeSymbolData(module); + } + + if (load_success) { + resolver_->FillSourceLineInfo(frame); + return resolver_->IsModuleCorrupt(frame->module) ? + kWarningCorruptSymbols : kNoError; + } else { + BPLOG(ERROR) << "Failed to load symbol file in resolver."; + no_symbol_modules_.insert(module->code_file()); + return kError; + } + } + + case SymbolSupplier::NOT_FOUND: + no_symbol_modules_.insert(module->code_file()); + return kError; + + case SymbolSupplier::INTERRUPT: + return kInterrupt; + + default: + BPLOG(ERROR) << "Unknown SymbolResult enum: " << symbol_result; + return kError; + } + return kError; +} + +WindowsFrameInfo* StackFrameSymbolizer::FindWindowsFrameInfo( + const StackFrame* frame) { + return resolver_ ? resolver_->FindWindowsFrameInfo(frame) : NULL; +} + +CFIFrameInfo* StackFrameSymbolizer::FindCFIFrameInfo( + const StackFrame* frame) { + return resolver_ ? resolver_->FindCFIFrameInfo(frame) : NULL; +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalk_common.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalk_common.cc new file mode 100644 index 0000000000..3a6e17feca --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalk_common.cc @@ -0,0 +1,929 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// stackwalk_common.cc: Module shared by the {micro,mini}dump_stackwalck +// executables to print the content of dumps (w/ stack traces) on the console. +// +// Author: Mark Mentovai + +#include "processor/stackwalk_common.h" + +#include +#include +#include +#include + +#include +#include + +#include "common/using_std_string.h" +#include "google_breakpad/processor/call_stack.h" +#include "google_breakpad/processor/code_module.h" +#include "google_breakpad/processor/code_modules.h" +#include "google_breakpad/processor/process_state.h" +#include "google_breakpad/processor/source_line_resolver_interface.h" +#include "google_breakpad/processor/stack_frame_cpu.h" +#include "processor/logging.h" +#include "processor/pathname_stripper.h" + +namespace google_breakpad { + +namespace { + +using std::vector; + +// Separator character for machine readable output. +static const char kOutputSeparator = '|'; + +// PrintRegister prints a register's name and value to stdout. It will +// print four registers on a line. For the first register in a set, +// pass 0 for |start_col|. For registers in a set, pass the most recent +// return value of PrintRegister. +// The caller is responsible for printing the final newline after a set +// of registers is completely printed, regardless of the number of calls +// to PrintRegister. +static const int kMaxWidth = 80; // optimize for an 80-column terminal +static int PrintRegister(const char *name, uint32_t value, int start_col) { + char buffer[64]; + snprintf(buffer, sizeof(buffer), " %5s = 0x%08x", name, value); + + if (start_col + static_cast(strlen(buffer)) > kMaxWidth) { + start_col = 0; + printf("\n "); + } + fputs(buffer, stdout); + + return start_col + strlen(buffer); +} + +// PrintRegister64 does the same thing, but for 64-bit registers. +static int PrintRegister64(const char *name, uint64_t value, int start_col) { + char buffer[64]; + snprintf(buffer, sizeof(buffer), " %5s = 0x%016" PRIx64 , name, value); + + if (start_col + static_cast(strlen(buffer)) > kMaxWidth) { + start_col = 0; + printf("\n "); + } + fputs(buffer, stdout); + + return start_col + strlen(buffer); +} + +// StripSeparator takes a string |original| and returns a copy +// of the string with all occurences of |kOutputSeparator| removed. +static string StripSeparator(const string &original) { + string result = original; + string::size_type position = 0; + while ((position = result.find(kOutputSeparator, position)) != string::npos) { + result.erase(position, 1); + } + position = 0; + while ((position = result.find('\n', position)) != string::npos) { + result.erase(position, 1); + } + return result; +} + +// PrintStackContents prints the stack contents of the current frame to stdout. +static void PrintStackContents(const std::string &indent, + const StackFrame *frame, + const StackFrame *prev_frame, + const std::string &cpu, + const MemoryRegion *memory, + const CodeModules* modules, + SourceLineResolverInterface *resolver) { + // Find stack range. + int word_length = 0; + uint64_t stack_begin = 0, stack_end = 0; + if (cpu == "x86") { + word_length = 4; + const StackFrameX86 *frame_x86 = static_cast(frame); + const StackFrameX86 *prev_frame_x86 = + static_cast(prev_frame); + if ((frame_x86->context_validity & StackFrameX86::CONTEXT_VALID_ESP) && + (prev_frame_x86->context_validity & StackFrameX86::CONTEXT_VALID_ESP)) { + stack_begin = frame_x86->context.esp; + stack_end = prev_frame_x86->context.esp; + } + } else if (cpu == "amd64") { + word_length = 8; + const StackFrameAMD64 *frame_amd64 = + static_cast(frame); + const StackFrameAMD64 *prev_frame_amd64 = + static_cast(prev_frame); + if ((frame_amd64->context_validity & StackFrameAMD64::CONTEXT_VALID_RSP) && + (prev_frame_amd64->context_validity & + StackFrameAMD64::CONTEXT_VALID_RSP)) { + stack_begin = frame_amd64->context.rsp; + stack_end = prev_frame_amd64->context.rsp; + } + } else if (cpu == "arm") { + word_length = 4; + const StackFrameARM *frame_arm = static_cast(frame); + const StackFrameARM *prev_frame_arm = + static_cast(prev_frame); + if ((frame_arm->context_validity & StackFrameARM::CONTEXT_VALID_SP) && + (prev_frame_arm->context_validity & StackFrameARM::CONTEXT_VALID_SP)) { + stack_begin = frame_arm->context.iregs[13]; + stack_end = prev_frame_arm->context.iregs[13]; + } + } else if (cpu == "arm64") { + word_length = 8; + const StackFrameARM64 *frame_arm64 = + static_cast(frame); + const StackFrameARM64 *prev_frame_arm64 = + static_cast(prev_frame); + if ((frame_arm64->context_validity & StackFrameARM64::CONTEXT_VALID_SP) && + (prev_frame_arm64->context_validity & + StackFrameARM64::CONTEXT_VALID_SP)) { + stack_begin = frame_arm64->context.iregs[31]; + stack_end = prev_frame_arm64->context.iregs[31]; + } + } + if (!word_length || !stack_begin || !stack_end) + return; + + // Print stack contents. + printf("\n%sStack contents:", indent.c_str()); + for(uint64_t address = stack_begin; address < stack_end; ) { + // Print the start address of this row. + if (word_length == 4) + printf("\n%s %08x", indent.c_str(), static_cast(address)); + else + printf("\n%s %016" PRIx64, indent.c_str(), address); + + // Print data in hex. + const int kBytesPerRow = 16; + std::string data_as_string; + for (int i = 0; i < kBytesPerRow; ++i, ++address) { + uint8_t value = 0; + if (address < stack_end && + memory->GetMemoryAtAddress(address, &value)) { + printf(" %02x", value); + data_as_string.push_back(isprint(value) ? value : '.'); + } else { + printf(" "); + data_as_string.push_back(' '); + } + } + // Print data as string. + printf(" %s", data_as_string.c_str()); + } + + // Try to find instruction pointers from stack. + printf("\n%sPossible instruction pointers:\n", indent.c_str()); + for (uint64_t address = stack_begin; address < stack_end; + address += word_length) { + StackFrame pointee_frame; + + // Read a word (possible instruction pointer) from stack. + if (word_length == 4) { + uint32_t data32 = 0; + memory->GetMemoryAtAddress(address, &data32); + pointee_frame.instruction = data32; + } else { + uint64_t data64 = 0; + memory->GetMemoryAtAddress(address, &data64); + pointee_frame.instruction = data64; + } + pointee_frame.module = + modules->GetModuleForAddress(pointee_frame.instruction); + + // Try to look up the function name. + if (pointee_frame.module) + resolver->FillSourceLineInfo(&pointee_frame); + + // Print function name. + if (!pointee_frame.function_name.empty()) { + if (word_length == 4) { + printf("%s *(0x%08x) = 0x%08x", indent.c_str(), + static_cast(address), + static_cast(pointee_frame.instruction)); + } else { + printf("%s *(0x%016" PRIx64 ") = 0x%016" PRIx64, + indent.c_str(), address, pointee_frame.instruction); + } + printf(" <%s> [%s : %d + 0x%" PRIx64 "]\n", + pointee_frame.function_name.c_str(), + PathnameStripper::File(pointee_frame.source_file_name).c_str(), + pointee_frame.source_line, + pointee_frame.instruction - pointee_frame.source_line_base); + } + } + printf("\n"); +} + +// PrintStack prints the call stack in |stack| to stdout, in a reasonably +// useful form. Module, function, and source file names are displayed if +// they are available. The code offset to the base code address of the +// source line, function, or module is printed, preferring them in that +// order. If no source line, function, or module information is available, +// an absolute code offset is printed. +// +// If |cpu| is a recognized CPU name, relevant register state for each stack +// frame printed is also output, if available. +static void PrintStack(const CallStack *stack, + const string &cpu, + bool output_stack_contents, + const MemoryRegion* memory, + const CodeModules* modules, + SourceLineResolverInterface* resolver) { + int frame_count = stack->frames()->size(); + if (frame_count == 0) { + printf(" \n"); + } + for (int frame_index = 0; frame_index < frame_count; ++frame_index) { + const StackFrame *frame = stack->frames()->at(frame_index); + printf("%2d ", frame_index); + + uint64_t instruction_address = frame->ReturnAddress(); + + if (frame->module) { + printf("%s", PathnameStripper::File(frame->module->code_file()).c_str()); + if (!frame->function_name.empty()) { + printf("!%s", frame->function_name.c_str()); + if (!frame->source_file_name.empty()) { + string source_file = PathnameStripper::File(frame->source_file_name); + printf(" [%s : %d + 0x%" PRIx64 "]", + source_file.c_str(), + frame->source_line, + instruction_address - frame->source_line_base); + } else { + printf(" + 0x%" PRIx64, instruction_address - frame->function_base); + } + } else { + printf(" + 0x%" PRIx64, + instruction_address - frame->module->base_address()); + } + } else { + printf("0x%" PRIx64, instruction_address); + } + printf("\n "); + + int sequence = 0; + if (cpu == "x86") { + const StackFrameX86 *frame_x86 = + reinterpret_cast(frame); + + if (frame_x86->context_validity & StackFrameX86::CONTEXT_VALID_EIP) + sequence = PrintRegister("eip", frame_x86->context.eip, sequence); + if (frame_x86->context_validity & StackFrameX86::CONTEXT_VALID_ESP) + sequence = PrintRegister("esp", frame_x86->context.esp, sequence); + if (frame_x86->context_validity & StackFrameX86::CONTEXT_VALID_EBP) + sequence = PrintRegister("ebp", frame_x86->context.ebp, sequence); + if (frame_x86->context_validity & StackFrameX86::CONTEXT_VALID_EBX) + sequence = PrintRegister("ebx", frame_x86->context.ebx, sequence); + if (frame_x86->context_validity & StackFrameX86::CONTEXT_VALID_ESI) + sequence = PrintRegister("esi", frame_x86->context.esi, sequence); + if (frame_x86->context_validity & StackFrameX86::CONTEXT_VALID_EDI) + sequence = PrintRegister("edi", frame_x86->context.edi, sequence); + if (frame_x86->context_validity == StackFrameX86::CONTEXT_VALID_ALL) { + sequence = PrintRegister("eax", frame_x86->context.eax, sequence); + sequence = PrintRegister("ecx", frame_x86->context.ecx, sequence); + sequence = PrintRegister("edx", frame_x86->context.edx, sequence); + sequence = PrintRegister("efl", frame_x86->context.eflags, sequence); + } + } else if (cpu == "ppc") { + const StackFramePPC *frame_ppc = + reinterpret_cast(frame); + + if (frame_ppc->context_validity & StackFramePPC::CONTEXT_VALID_SRR0) + sequence = PrintRegister("srr0", frame_ppc->context.srr0, sequence); + if (frame_ppc->context_validity & StackFramePPC::CONTEXT_VALID_GPR1) + sequence = PrintRegister("r1", frame_ppc->context.gpr[1], sequence); + } else if (cpu == "amd64") { + const StackFrameAMD64 *frame_amd64 = + reinterpret_cast(frame); + + if (frame_amd64->context_validity & StackFrameAMD64::CONTEXT_VALID_RAX) + sequence = PrintRegister64("rax", frame_amd64->context.rax, sequence); + if (frame_amd64->context_validity & StackFrameAMD64::CONTEXT_VALID_RDX) + sequence = PrintRegister64("rdx", frame_amd64->context.rdx, sequence); + if (frame_amd64->context_validity & StackFrameAMD64::CONTEXT_VALID_RCX) + sequence = PrintRegister64("rcx", frame_amd64->context.rcx, sequence); + if (frame_amd64->context_validity & StackFrameAMD64::CONTEXT_VALID_RBX) + sequence = PrintRegister64("rbx", frame_amd64->context.rbx, sequence); + if (frame_amd64->context_validity & StackFrameAMD64::CONTEXT_VALID_RSI) + sequence = PrintRegister64("rsi", frame_amd64->context.rsi, sequence); + if (frame_amd64->context_validity & StackFrameAMD64::CONTEXT_VALID_RDI) + sequence = PrintRegister64("rdi", frame_amd64->context.rdi, sequence); + if (frame_amd64->context_validity & StackFrameAMD64::CONTEXT_VALID_RBP) + sequence = PrintRegister64("rbp", frame_amd64->context.rbp, sequence); + if (frame_amd64->context_validity & StackFrameAMD64::CONTEXT_VALID_RSP) + sequence = PrintRegister64("rsp", frame_amd64->context.rsp, sequence); + if (frame_amd64->context_validity & StackFrameAMD64::CONTEXT_VALID_R8) + sequence = PrintRegister64("r8", frame_amd64->context.r8, sequence); + if (frame_amd64->context_validity & StackFrameAMD64::CONTEXT_VALID_R9) + sequence = PrintRegister64("r9", frame_amd64->context.r9, sequence); + if (frame_amd64->context_validity & StackFrameAMD64::CONTEXT_VALID_R10) + sequence = PrintRegister64("r10", frame_amd64->context.r10, sequence); + if (frame_amd64->context_validity & StackFrameAMD64::CONTEXT_VALID_R11) + sequence = PrintRegister64("r11", frame_amd64->context.r11, sequence); + if (frame_amd64->context_validity & StackFrameAMD64::CONTEXT_VALID_R12) + sequence = PrintRegister64("r12", frame_amd64->context.r12, sequence); + if (frame_amd64->context_validity & StackFrameAMD64::CONTEXT_VALID_R13) + sequence = PrintRegister64("r13", frame_amd64->context.r13, sequence); + if (frame_amd64->context_validity & StackFrameAMD64::CONTEXT_VALID_R14) + sequence = PrintRegister64("r14", frame_amd64->context.r14, sequence); + if (frame_amd64->context_validity & StackFrameAMD64::CONTEXT_VALID_R15) + sequence = PrintRegister64("r15", frame_amd64->context.r15, sequence); + if (frame_amd64->context_validity & StackFrameAMD64::CONTEXT_VALID_RIP) + sequence = PrintRegister64("rip", frame_amd64->context.rip, sequence); + } else if (cpu == "sparc") { + const StackFrameSPARC *frame_sparc = + reinterpret_cast(frame); + + if (frame_sparc->context_validity & StackFrameSPARC::CONTEXT_VALID_SP) + sequence = PrintRegister("sp", frame_sparc->context.g_r[14], sequence); + if (frame_sparc->context_validity & StackFrameSPARC::CONTEXT_VALID_FP) + sequence = PrintRegister("fp", frame_sparc->context.g_r[30], sequence); + if (frame_sparc->context_validity & StackFrameSPARC::CONTEXT_VALID_PC) + sequence = PrintRegister("pc", frame_sparc->context.pc, sequence); + } else if (cpu == "arm") { + const StackFrameARM *frame_arm = + reinterpret_cast(frame); + + // Argument registers (caller-saves), which will likely only be valid + // for the youngest frame. + if (frame_arm->context_validity & StackFrameARM::CONTEXT_VALID_R0) + sequence = PrintRegister("r0", frame_arm->context.iregs[0], sequence); + if (frame_arm->context_validity & StackFrameARM::CONTEXT_VALID_R1) + sequence = PrintRegister("r1", frame_arm->context.iregs[1], sequence); + if (frame_arm->context_validity & StackFrameARM::CONTEXT_VALID_R2) + sequence = PrintRegister("r2", frame_arm->context.iregs[2], sequence); + if (frame_arm->context_validity & StackFrameARM::CONTEXT_VALID_R3) + sequence = PrintRegister("r3", frame_arm->context.iregs[3], sequence); + + // General-purpose callee-saves registers. + if (frame_arm->context_validity & StackFrameARM::CONTEXT_VALID_R4) + sequence = PrintRegister("r4", frame_arm->context.iregs[4], sequence); + if (frame_arm->context_validity & StackFrameARM::CONTEXT_VALID_R5) + sequence = PrintRegister("r5", frame_arm->context.iregs[5], sequence); + if (frame_arm->context_validity & StackFrameARM::CONTEXT_VALID_R6) + sequence = PrintRegister("r6", frame_arm->context.iregs[6], sequence); + if (frame_arm->context_validity & StackFrameARM::CONTEXT_VALID_R7) + sequence = PrintRegister("r7", frame_arm->context.iregs[7], sequence); + if (frame_arm->context_validity & StackFrameARM::CONTEXT_VALID_R8) + sequence = PrintRegister("r8", frame_arm->context.iregs[8], sequence); + if (frame_arm->context_validity & StackFrameARM::CONTEXT_VALID_R9) + sequence = PrintRegister("r9", frame_arm->context.iregs[9], sequence); + if (frame_arm->context_validity & StackFrameARM::CONTEXT_VALID_R10) + sequence = PrintRegister("r10", frame_arm->context.iregs[10], sequence); + if (frame_arm->context_validity & StackFrameARM::CONTEXT_VALID_R12) + sequence = PrintRegister("r12", frame_arm->context.iregs[12], sequence); + + // Registers with a dedicated or conventional purpose. + if (frame_arm->context_validity & StackFrameARM::CONTEXT_VALID_FP) + sequence = PrintRegister("fp", frame_arm->context.iregs[11], sequence); + if (frame_arm->context_validity & StackFrameARM::CONTEXT_VALID_SP) + sequence = PrintRegister("sp", frame_arm->context.iregs[13], sequence); + if (frame_arm->context_validity & StackFrameARM::CONTEXT_VALID_LR) + sequence = PrintRegister("lr", frame_arm->context.iregs[14], sequence); + if (frame_arm->context_validity & StackFrameARM::CONTEXT_VALID_PC) + sequence = PrintRegister("pc", frame_arm->context.iregs[15], sequence); + } else if (cpu == "arm64") { + const StackFrameARM64 *frame_arm64 = + reinterpret_cast(frame); + + if (frame_arm64->context_validity & StackFrameARM64::CONTEXT_VALID_X0) { + sequence = + PrintRegister64("x0", frame_arm64->context.iregs[0], sequence); + } + if (frame_arm64->context_validity & StackFrameARM64::CONTEXT_VALID_X1) { + sequence = + PrintRegister64("x1", frame_arm64->context.iregs[1], sequence); + } + if (frame_arm64->context_validity & StackFrameARM64::CONTEXT_VALID_X2) { + sequence = + PrintRegister64("x2", frame_arm64->context.iregs[2], sequence); + } + if (frame_arm64->context_validity & StackFrameARM64::CONTEXT_VALID_X3) { + sequence = + PrintRegister64("x3", frame_arm64->context.iregs[3], sequence); + } + if (frame_arm64->context_validity & StackFrameARM64::CONTEXT_VALID_X4) { + sequence = + PrintRegister64("x4", frame_arm64->context.iregs[4], sequence); + } + if (frame_arm64->context_validity & StackFrameARM64::CONTEXT_VALID_X5) { + sequence = + PrintRegister64("x5", frame_arm64->context.iregs[5], sequence); + } + if (frame_arm64->context_validity & StackFrameARM64::CONTEXT_VALID_X6) { + sequence = + PrintRegister64("x6", frame_arm64->context.iregs[6], sequence); + } + if (frame_arm64->context_validity & StackFrameARM64::CONTEXT_VALID_X7) { + sequence = + PrintRegister64("x7", frame_arm64->context.iregs[7], sequence); + } + if (frame_arm64->context_validity & StackFrameARM64::CONTEXT_VALID_X8) { + sequence = + PrintRegister64("x8", frame_arm64->context.iregs[8], sequence); + } + if (frame_arm64->context_validity & StackFrameARM64::CONTEXT_VALID_X9) { + sequence = + PrintRegister64("x9", frame_arm64->context.iregs[9], sequence); + } + if (frame_arm64->context_validity & StackFrameARM64::CONTEXT_VALID_X10) { + sequence = + PrintRegister64("x10", frame_arm64->context.iregs[10], sequence); + } + if (frame_arm64->context_validity & StackFrameARM64::CONTEXT_VALID_X11) { + sequence = + PrintRegister64("x11", frame_arm64->context.iregs[11], sequence); + } + if (frame_arm64->context_validity & StackFrameARM64::CONTEXT_VALID_X12) { + sequence = + PrintRegister64("x12", frame_arm64->context.iregs[12], sequence); + } + if (frame_arm64->context_validity & StackFrameARM64::CONTEXT_VALID_X13) { + sequence = + PrintRegister64("x13", frame_arm64->context.iregs[13], sequence); + } + if (frame_arm64->context_validity & StackFrameARM64::CONTEXT_VALID_X14) { + sequence = + PrintRegister64("x14", frame_arm64->context.iregs[14], sequence); + } + if (frame_arm64->context_validity & StackFrameARM64::CONTEXT_VALID_X15) { + sequence = + PrintRegister64("x15", frame_arm64->context.iregs[15], sequence); + } + if (frame_arm64->context_validity & StackFrameARM64::CONTEXT_VALID_X16) { + sequence = + PrintRegister64("x16", frame_arm64->context.iregs[16], sequence); + } + if (frame_arm64->context_validity & StackFrameARM64::CONTEXT_VALID_X17) { + sequence = + PrintRegister64("x17", frame_arm64->context.iregs[17], sequence); + } + if (frame_arm64->context_validity & StackFrameARM64::CONTEXT_VALID_X18) { + sequence = + PrintRegister64("x18", frame_arm64->context.iregs[18], sequence); + } + if (frame_arm64->context_validity & StackFrameARM64::CONTEXT_VALID_X19) { + sequence = + PrintRegister64("x19", frame_arm64->context.iregs[19], sequence); + } + if (frame_arm64->context_validity & StackFrameARM64::CONTEXT_VALID_X20) { + sequence = + PrintRegister64("x20", frame_arm64->context.iregs[20], sequence); + } + if (frame_arm64->context_validity & StackFrameARM64::CONTEXT_VALID_X21) { + sequence = + PrintRegister64("x21", frame_arm64->context.iregs[21], sequence); + } + if (frame_arm64->context_validity & StackFrameARM64::CONTEXT_VALID_X22) { + sequence = + PrintRegister64("x22", frame_arm64->context.iregs[22], sequence); + } + if (frame_arm64->context_validity & StackFrameARM64::CONTEXT_VALID_X23) { + sequence = + PrintRegister64("x23", frame_arm64->context.iregs[23], sequence); + } + if (frame_arm64->context_validity & StackFrameARM64::CONTEXT_VALID_X24) { + sequence = + PrintRegister64("x24", frame_arm64->context.iregs[24], sequence); + } + if (frame_arm64->context_validity & StackFrameARM64::CONTEXT_VALID_X25) { + sequence = + PrintRegister64("x25", frame_arm64->context.iregs[25], sequence); + } + if (frame_arm64->context_validity & StackFrameARM64::CONTEXT_VALID_X26) { + sequence = + PrintRegister64("x26", frame_arm64->context.iregs[26], sequence); + } + if (frame_arm64->context_validity & StackFrameARM64::CONTEXT_VALID_X27) { + sequence = + PrintRegister64("x27", frame_arm64->context.iregs[27], sequence); + } + if (frame_arm64->context_validity & StackFrameARM64::CONTEXT_VALID_X28) { + sequence = + PrintRegister64("x28", frame_arm64->context.iregs[28], sequence); + } + + // Registers with a dedicated or conventional purpose. + if (frame_arm64->context_validity & StackFrameARM64::CONTEXT_VALID_FP) { + sequence = + PrintRegister64("fp", frame_arm64->context.iregs[29], sequence); + } + if (frame_arm64->context_validity & StackFrameARM64::CONTEXT_VALID_LR) { + sequence = + PrintRegister64("lr", frame_arm64->context.iregs[30], sequence); + } + if (frame_arm64->context_validity & StackFrameARM64::CONTEXT_VALID_SP) { + sequence = + PrintRegister64("sp", frame_arm64->context.iregs[31], sequence); + } + if (frame_arm64->context_validity & StackFrameARM64::CONTEXT_VALID_PC) { + sequence = + PrintRegister64("pc", frame_arm64->context.iregs[32], sequence); + } + } else if (cpu == "mips") { + const StackFrameMIPS* frame_mips = + reinterpret_cast(frame); + + if (frame_mips->context_validity & StackFrameMIPS::CONTEXT_VALID_GP) + sequence = PrintRegister64("gp", + frame_mips->context.iregs[MD_CONTEXT_MIPS_REG_GP], + sequence); + if (frame_mips->context_validity & StackFrameMIPS::CONTEXT_VALID_SP) + sequence = PrintRegister64("sp", + frame_mips->context.iregs[MD_CONTEXT_MIPS_REG_SP], + sequence); + if (frame_mips->context_validity & StackFrameMIPS::CONTEXT_VALID_FP) + sequence = PrintRegister64("fp", + frame_mips->context.iregs[MD_CONTEXT_MIPS_REG_FP], + sequence); + if (frame_mips->context_validity & StackFrameMIPS::CONTEXT_VALID_RA) + sequence = PrintRegister64("ra", + frame_mips->context.iregs[MD_CONTEXT_MIPS_REG_RA], + sequence); + if (frame_mips->context_validity & StackFrameMIPS::CONTEXT_VALID_PC) + sequence = PrintRegister64("pc", frame_mips->context.epc, sequence); + + // Save registers s0-s7 + if (frame_mips->context_validity & StackFrameMIPS::CONTEXT_VALID_S0) + sequence = PrintRegister64("s0", + frame_mips->context.iregs[MD_CONTEXT_MIPS_REG_S0], + sequence); + if (frame_mips->context_validity & StackFrameMIPS::CONTEXT_VALID_S1) + sequence = PrintRegister64("s1", + frame_mips->context.iregs[MD_CONTEXT_MIPS_REG_S1], + sequence); + if (frame_mips->context_validity & StackFrameMIPS::CONTEXT_VALID_S2) + sequence = PrintRegister64("s2", + frame_mips->context.iregs[MD_CONTEXT_MIPS_REG_S2], + sequence); + if (frame_mips->context_validity & StackFrameMIPS::CONTEXT_VALID_S3) + sequence = PrintRegister64("s3", + frame_mips->context.iregs[MD_CONTEXT_MIPS_REG_S3], + sequence); + if (frame_mips->context_validity & StackFrameMIPS::CONTEXT_VALID_S4) + sequence = PrintRegister64("s4", + frame_mips->context.iregs[MD_CONTEXT_MIPS_REG_S4], + sequence); + if (frame_mips->context_validity & StackFrameMIPS::CONTEXT_VALID_S5) + sequence = PrintRegister64("s5", + frame_mips->context.iregs[MD_CONTEXT_MIPS_REG_S5], + sequence); + if (frame_mips->context_validity & StackFrameMIPS::CONTEXT_VALID_S6) + sequence = PrintRegister64("s6", + frame_mips->context.iregs[MD_CONTEXT_MIPS_REG_S6], + sequence); + if (frame_mips->context_validity & StackFrameMIPS::CONTEXT_VALID_S7) + sequence = PrintRegister64("s7", + frame_mips->context.iregs[MD_CONTEXT_MIPS_REG_S7], + sequence); + } + printf("\n Found by: %s\n", frame->trust_description().c_str()); + + // Print stack contents. + if (output_stack_contents && frame_index + 1 < frame_count) { + const std::string indent(" "); + PrintStackContents(indent, frame, stack->frames()->at(frame_index + 1), + cpu, memory, modules, resolver); + } + } +} + +// PrintStackMachineReadable prints the call stack in |stack| to stdout, +// in the following machine readable pipe-delimited text format: +// thread number|frame number|module|function|source file|line|offset +// +// Module, function, source file, and source line may all be empty +// depending on availability. The code offset follows the same rules as +// PrintStack above. +static void PrintStackMachineReadable(int thread_num, const CallStack *stack) { + int frame_count = stack->frames()->size(); + for (int frame_index = 0; frame_index < frame_count; ++frame_index) { + const StackFrame *frame = stack->frames()->at(frame_index); + printf("%d%c%d%c", thread_num, kOutputSeparator, frame_index, + kOutputSeparator); + + uint64_t instruction_address = frame->ReturnAddress(); + + if (frame->module) { + assert(!frame->module->code_file().empty()); + printf("%s", StripSeparator(PathnameStripper::File( + frame->module->code_file())).c_str()); + if (!frame->function_name.empty()) { + printf("%c%s", kOutputSeparator, + StripSeparator(frame->function_name).c_str()); + if (!frame->source_file_name.empty()) { + printf("%c%s%c%d%c0x%" PRIx64, + kOutputSeparator, + StripSeparator(frame->source_file_name).c_str(), + kOutputSeparator, + frame->source_line, + kOutputSeparator, + instruction_address - frame->source_line_base); + } else { + printf("%c%c%c0x%" PRIx64, + kOutputSeparator, // empty source file + kOutputSeparator, // empty source line + kOutputSeparator, + instruction_address - frame->function_base); + } + } else { + printf("%c%c%c%c0x%" PRIx64, + kOutputSeparator, // empty function name + kOutputSeparator, // empty source file + kOutputSeparator, // empty source line + kOutputSeparator, + instruction_address - frame->module->base_address()); + } + } else { + // the printf before this prints a trailing separator for module name + printf("%c%c%c%c0x%" PRIx64, + kOutputSeparator, // empty function name + kOutputSeparator, // empty source file + kOutputSeparator, // empty source line + kOutputSeparator, + instruction_address); + } + printf("\n"); + } +} + +// ContainsModule checks whether a given |module| is in the vector +// |modules_without_symbols|. +static bool ContainsModule( + const vector *modules, + const CodeModule *module) { + assert(modules); + assert(module); + vector::const_iterator iter; + for (iter = modules->begin(); iter != modules->end(); ++iter) { + if (module->debug_file().compare((*iter)->debug_file()) == 0 && + module->debug_identifier().compare((*iter)->debug_identifier()) == 0) { + return true; + } + } + return false; +} + +// PrintModule prints a single |module| to stdout. +// |modules_without_symbols| should contain the list of modules that were +// confirmed to be missing their symbols during the stack walk. +static void PrintModule( + const CodeModule *module, + const vector *modules_without_symbols, + const vector *modules_with_corrupt_symbols, + uint64_t main_address) { + string symbol_issues; + if (ContainsModule(modules_without_symbols, module)) { + symbol_issues = " (WARNING: No symbols, " + + PathnameStripper::File(module->debug_file()) + ", " + + module->debug_identifier() + ")"; + } else if (ContainsModule(modules_with_corrupt_symbols, module)) { + symbol_issues = " (WARNING: Corrupt symbols, " + + PathnameStripper::File(module->debug_file()) + ", " + + module->debug_identifier() + ")"; + } + uint64_t base_address = module->base_address(); + printf("0x%08" PRIx64 " - 0x%08" PRIx64 " %s %s%s%s\n", + base_address, base_address + module->size() - 1, + PathnameStripper::File(module->code_file()).c_str(), + module->version().empty() ? "???" : module->version().c_str(), + main_address != 0 && base_address == main_address ? " (main)" : "", + symbol_issues.c_str()); +} + +// PrintModules prints the list of all loaded |modules| to stdout. +// |modules_without_symbols| should contain the list of modules that were +// confirmed to be missing their symbols during the stack walk. +static void PrintModules( + const CodeModules *modules, + const vector *modules_without_symbols, + const vector *modules_with_corrupt_symbols) { + if (!modules) + return; + + printf("\n"); + printf("Loaded modules:\n"); + + uint64_t main_address = 0; + const CodeModule *main_module = modules->GetMainModule(); + if (main_module) { + main_address = main_module->base_address(); + } + + unsigned int module_count = modules->module_count(); + for (unsigned int module_sequence = 0; + module_sequence < module_count; + ++module_sequence) { + const CodeModule *module = modules->GetModuleAtSequence(module_sequence); + PrintModule(module, modules_without_symbols, modules_with_corrupt_symbols, + main_address); + } +} + +// PrintModulesMachineReadable outputs a list of loaded modules, +// one per line, in the following machine-readable pipe-delimited +// text format: +// Module|{Module Filename}|{Version}|{Debug Filename}|{Debug Identifier}| +// {Base Address}|{Max Address}|{Main} +static void PrintModulesMachineReadable(const CodeModules *modules) { + if (!modules) + return; + + uint64_t main_address = 0; + const CodeModule *main_module = modules->GetMainModule(); + if (main_module) { + main_address = main_module->base_address(); + } + + unsigned int module_count = modules->module_count(); + for (unsigned int module_sequence = 0; + module_sequence < module_count; + ++module_sequence) { + const CodeModule *module = modules->GetModuleAtSequence(module_sequence); + uint64_t base_address = module->base_address(); + printf("Module%c%s%c%s%c%s%c%s%c0x%08" PRIx64 "%c0x%08" PRIx64 "%c%d\n", + kOutputSeparator, + StripSeparator(PathnameStripper::File(module->code_file())).c_str(), + kOutputSeparator, StripSeparator(module->version()).c_str(), + kOutputSeparator, + StripSeparator(PathnameStripper::File(module->debug_file())).c_str(), + kOutputSeparator, + StripSeparator(module->debug_identifier()).c_str(), + kOutputSeparator, base_address, + kOutputSeparator, base_address + module->size() - 1, + kOutputSeparator, + main_module != NULL && base_address == main_address ? 1 : 0); + } +} + +} // namespace + +void PrintProcessState(const ProcessState& process_state, + bool output_stack_contents, + SourceLineResolverInterface* resolver) { + // Print OS and CPU information. + string cpu = process_state.system_info()->cpu; + string cpu_info = process_state.system_info()->cpu_info; + printf("Operating system: %s\n", process_state.system_info()->os.c_str()); + printf(" %s\n", + process_state.system_info()->os_version.c_str()); + printf("CPU: %s\n", cpu.c_str()); + if (!cpu_info.empty()) { + // This field is optional. + printf(" %s\n", cpu_info.c_str()); + } + printf(" %d CPU%s\n", + process_state.system_info()->cpu_count, + process_state.system_info()->cpu_count != 1 ? "s" : ""); + printf("\n"); + + // Print crash information. + if (process_state.crashed()) { + printf("Crash reason: %s\n", process_state.crash_reason().c_str()); + printf("Crash address: 0x%" PRIx64 "\n", process_state.crash_address()); + } else { + printf("No crash\n"); + } + + string assertion = process_state.assertion(); + if (!assertion.empty()) { + printf("Assertion: %s\n", assertion.c_str()); + } + + // Compute process uptime if the process creation and crash times are + // available in the dump. + if (process_state.time_date_stamp() != 0 && + process_state.process_create_time() != 0 && + process_state.time_date_stamp() >= process_state.process_create_time()) { + printf("Process uptime: %d seconds\n", + process_state.time_date_stamp() - + process_state.process_create_time()); + } else { + printf("Process uptime: not available\n"); + } + + // If the thread that requested the dump is known, print it first. + int requesting_thread = process_state.requesting_thread(); + if (requesting_thread != -1) { + printf("\n"); + printf("Thread %d (%s)\n", + requesting_thread, + process_state.crashed() ? "crashed" : + "requested dump, did not crash"); + PrintStack(process_state.threads()->at(requesting_thread), cpu, + output_stack_contents, + process_state.thread_memory_regions()->at(requesting_thread), + process_state.modules(), resolver); + } + + // Print all of the threads in the dump. + int thread_count = process_state.threads()->size(); + for (int thread_index = 0; thread_index < thread_count; ++thread_index) { + if (thread_index != requesting_thread) { + // Don't print the crash thread again, it was already printed. + printf("\n"); + printf("Thread %d\n", thread_index); + PrintStack(process_state.threads()->at(thread_index), cpu, + output_stack_contents, + process_state.thread_memory_regions()->at(thread_index), + process_state.modules(), resolver); + } + } + + PrintModules(process_state.modules(), + process_state.modules_without_symbols(), + process_state.modules_with_corrupt_symbols()); +} + +void PrintProcessStateMachineReadable(const ProcessState& process_state) { + // Print OS and CPU information. + // OS|{OS Name}|{OS Version} + // CPU|{CPU Name}|{CPU Info}|{Number of CPUs} + printf("OS%c%s%c%s\n", kOutputSeparator, + StripSeparator(process_state.system_info()->os).c_str(), + kOutputSeparator, + StripSeparator(process_state.system_info()->os_version).c_str()); + printf("CPU%c%s%c%s%c%d\n", kOutputSeparator, + StripSeparator(process_state.system_info()->cpu).c_str(), + kOutputSeparator, + // this may be empty + StripSeparator(process_state.system_info()->cpu_info).c_str(), + kOutputSeparator, + process_state.system_info()->cpu_count); + + int requesting_thread = process_state.requesting_thread(); + + // Print crash information. + // Crash|{Crash Reason}|{Crash Address}|{Crashed Thread} + printf("Crash%c", kOutputSeparator); + if (process_state.crashed()) { + printf("%s%c0x%" PRIx64 "%c", + StripSeparator(process_state.crash_reason()).c_str(), + kOutputSeparator, process_state.crash_address(), kOutputSeparator); + } else { + // print assertion info, if available, in place of crash reason, + // instead of the unhelpful "No crash" + string assertion = process_state.assertion(); + if (!assertion.empty()) { + printf("%s%c%c", StripSeparator(assertion).c_str(), + kOutputSeparator, kOutputSeparator); + } else { + printf("No crash%c%c", kOutputSeparator, kOutputSeparator); + } + } + + if (requesting_thread != -1) { + printf("%d\n", requesting_thread); + } else { + printf("\n"); + } + + PrintModulesMachineReadable(process_state.modules()); + + // blank line to indicate start of threads + printf("\n"); + + // If the thread that requested the dump is known, print it first. + if (requesting_thread != -1) { + PrintStackMachineReadable(requesting_thread, + process_state.threads()->at(requesting_thread)); + } + + // Print all of the threads in the dump. + int thread_count = process_state.threads()->size(); + for (int thread_index = 0; thread_index < thread_count; ++thread_index) { + if (thread_index != requesting_thread) { + // Don't print the crash thread again, it was already printed. + PrintStackMachineReadable(thread_index, + process_state.threads()->at(thread_index)); + } + } +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalk_common.h b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalk_common.h new file mode 100644 index 0000000000..a74f7b6da1 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalk_common.h @@ -0,0 +1,49 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// stackwalk_common.cc: Module shared by the {micro,mini}dump_stackwalck +// executables to print the content of dumps (w/ stack traces) on the console. + + +#ifndef PROCESSOR_STACKWALK_COMMON_H__ +#define PROCESSOR_STACKWALK_COMMON_H__ + +namespace google_breakpad { + +class ProcessState; +class SourceLineResolverInterface; + +void PrintProcessStateMachineReadable(const ProcessState& process_state); +void PrintProcessState(const ProcessState& process_state, + bool output_stack_contents, + SourceLineResolverInterface* resolver); + +} // namespace google_breakpad + +#endif // PROCESSOR_STACKWALK_COMMON_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker.cc new file mode 100644 index 0000000000..424cf4c471 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker.cc @@ -0,0 +1,295 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// stackwalker.cc: Generic stackwalker. +// +// See stackwalker.h for documentation. +// +// Author: Mark Mentovai + +#include "google_breakpad/processor/stackwalker.h" + +#include + +#include "common/scoped_ptr.h" +#include "google_breakpad/processor/call_stack.h" +#include "google_breakpad/processor/code_module.h" +#include "google_breakpad/processor/code_modules.h" +#include "google_breakpad/processor/dump_context.h" +#include "google_breakpad/processor/stack_frame.h" +#include "google_breakpad/processor/stack_frame_symbolizer.h" +#include "google_breakpad/processor/system_info.h" +#include "processor/linked_ptr.h" +#include "processor/logging.h" +#include "processor/stackwalker_ppc.h" +#include "processor/stackwalker_ppc64.h" +#include "processor/stackwalker_sparc.h" +#include "processor/stackwalker_x86.h" +#include "processor/stackwalker_amd64.h" +#include "processor/stackwalker_arm.h" +#include "processor/stackwalker_arm64.h" +#include "processor/stackwalker_mips.h" + +namespace google_breakpad { + +const int Stackwalker::kRASearchWords = 30; + +uint32_t Stackwalker::max_frames_ = 1024; +bool Stackwalker::max_frames_set_ = false; + +uint32_t Stackwalker::max_frames_scanned_ = 1024; + +Stackwalker::Stackwalker(const SystemInfo* system_info, + MemoryRegion* memory, + const CodeModules* modules, + StackFrameSymbolizer* frame_symbolizer) + : system_info_(system_info), + memory_(memory), + modules_(modules), + frame_symbolizer_(frame_symbolizer) { + assert(frame_symbolizer_); +} + +void InsertSpecialAttentionModule( + StackFrameSymbolizer::SymbolizerResult symbolizer_result, + const CodeModule* module, + vector* modules) { + if (!module) { + return; + } + assert(symbolizer_result == StackFrameSymbolizer::kError || + symbolizer_result == StackFrameSymbolizer::kWarningCorruptSymbols); + bool found = false; + vector::iterator iter; + for (iter = modules->begin(); iter != modules->end(); ++iter) { + if (*iter == module) { + found = true; + break; + } + } + if (!found) { + BPLOG(INFO) << ((symbolizer_result == StackFrameSymbolizer::kError) ? + "Couldn't load symbols for: " : + "Detected corrupt symbols for: ") + << module->debug_file() << "|" << module->debug_identifier(); + modules->push_back(module); + } +} + +bool Stackwalker::Walk( + CallStack* stack, + vector* modules_without_symbols, + vector* modules_with_corrupt_symbols) { + BPLOG_IF(ERROR, !stack) << "Stackwalker::Walk requires |stack|"; + assert(stack); + stack->Clear(); + + BPLOG_IF(ERROR, !modules_without_symbols) << "Stackwalker::Walk requires " + << "|modules_without_symbols|"; + BPLOG_IF(ERROR, !modules_without_symbols) << "Stackwalker::Walk requires " + << "|modules_with_corrupt_symbols|"; + assert(modules_without_symbols); + assert(modules_with_corrupt_symbols); + + // Begin with the context frame, and keep getting callers until there are + // no more. + + // Keep track of the number of scanned or otherwise dubious frames seen + // so far, as the caller may have set a limit. + uint32_t scanned_frames = 0; + + // Take ownership of the pointer returned by GetContextFrame. + scoped_ptr frame(GetContextFrame()); + + while (frame.get()) { + // frame already contains a good frame with properly set instruction and + // frame_pointer fields. The frame structure comes from either the + // context frame (above) or a caller frame (below). + + // Resolve the module information, if a module map was provided. + StackFrameSymbolizer::SymbolizerResult symbolizer_result = + frame_symbolizer_->FillSourceLineInfo(modules_, system_info_, + frame.get()); + switch (symbolizer_result) { + case StackFrameSymbolizer::kInterrupt: + BPLOG(INFO) << "Stack walk is interrupted."; + return false; + break; + case StackFrameSymbolizer::kError: + InsertSpecialAttentionModule(symbolizer_result, frame->module, + modules_without_symbols); + break; + case StackFrameSymbolizer::kWarningCorruptSymbols: + InsertSpecialAttentionModule(symbolizer_result, frame->module, + modules_with_corrupt_symbols); + break; + case StackFrameSymbolizer::kNoError: + break; + default: + assert(false); + break; + } + + // Keep track of the number of dubious frames so far. + switch (frame.get()->trust) { + case StackFrame::FRAME_TRUST_NONE: + case StackFrame::FRAME_TRUST_SCAN: + case StackFrame::FRAME_TRUST_CFI_SCAN: + scanned_frames++; + break; + default: + break; + } + + // Add the frame to the call stack. Relinquish the ownership claim + // over the frame, because the stack now owns it. + stack->frames_.push_back(frame.release()); + if (stack->frames_.size() > max_frames_) { + // Only emit an error message in the case where the limit + // reached is the default limit, not set by the user. + if (!max_frames_set_) + BPLOG(ERROR) << "The stack is over " << max_frames_ << " frames."; + break; + } + + // Get the next frame and take ownership. + bool stack_scan_allowed = scanned_frames < max_frames_scanned_; + frame.reset(GetCallerFrame(stack, stack_scan_allowed)); + } + + return true; +} + + +// static +Stackwalker* Stackwalker::StackwalkerForCPU( + const SystemInfo* system_info, + DumpContext* context, + MemoryRegion* memory, + const CodeModules* modules, + StackFrameSymbolizer* frame_symbolizer) { + if (!context) { + BPLOG(ERROR) << "Can't choose a stackwalker implementation without context"; + return NULL; + } + + Stackwalker* cpu_stackwalker = NULL; + + uint32_t cpu = context->GetContextCPU(); + switch (cpu) { + case MD_CONTEXT_X86: + cpu_stackwalker = new StackwalkerX86(system_info, + context->GetContextX86(), + memory, modules, frame_symbolizer); + break; + + case MD_CONTEXT_PPC: + cpu_stackwalker = new StackwalkerPPC(system_info, + context->GetContextPPC(), + memory, modules, frame_symbolizer); + break; + + case MD_CONTEXT_PPC64: + cpu_stackwalker = new StackwalkerPPC64(system_info, + context->GetContextPPC64(), + memory, modules, frame_symbolizer); + break; + + case MD_CONTEXT_AMD64: + cpu_stackwalker = new StackwalkerAMD64(system_info, + context->GetContextAMD64(), + memory, modules, frame_symbolizer); + break; + + case MD_CONTEXT_SPARC: + cpu_stackwalker = new StackwalkerSPARC(system_info, + context->GetContextSPARC(), + memory, modules, frame_symbolizer); + break; + + case MD_CONTEXT_MIPS: + cpu_stackwalker = new StackwalkerMIPS(system_info, + context->GetContextMIPS(), + memory, modules, frame_symbolizer); + break; + + case MD_CONTEXT_ARM: + { + int fp_register = -1; + if (system_info->os_short == "ios") + fp_register = MD_CONTEXT_ARM_REG_IOS_FP; + cpu_stackwalker = new StackwalkerARM(system_info, + context->GetContextARM(), + fp_register, memory, modules, + frame_symbolizer); + break; + } + + case MD_CONTEXT_ARM64: + cpu_stackwalker = new StackwalkerARM64(system_info, + context->GetContextARM64(), + memory, modules, + frame_symbolizer); + break; + } + + BPLOG_IF(ERROR, !cpu_stackwalker) << "Unknown CPU type " << HexString(cpu) << + ", can't choose a stackwalker " + "implementation"; + return cpu_stackwalker; +} + +bool Stackwalker::InstructionAddressSeemsValid(uint64_t address) { + StackFrame frame; + frame.instruction = address; + StackFrameSymbolizer::SymbolizerResult symbolizer_result = + frame_symbolizer_->FillSourceLineInfo(modules_, system_info_, &frame); + + if (!frame.module) { + // not inside any loaded module + return false; + } + + if (!frame_symbolizer_->HasImplementation()) { + // No valid implementation to symbolize stack frame, but the address is + // within a known module. + return true; + } + + if (symbolizer_result != StackFrameSymbolizer::kNoError && + symbolizer_result != StackFrameSymbolizer::kWarningCorruptSymbols) { + // Some error occurred during symbolization, but the address is within a + // known module + return true; + } + + return !frame.function_name.empty(); +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_address_list.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_address_list.cc new file mode 100644 index 0000000000..e81fec282c --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_address_list.cc @@ -0,0 +1,92 @@ +// Copyright (c) 2013 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// stackwalker_address_list.cc: a pseudo stack walker. +// +// See stackwalker_address_list.h for documentation. +// +// Author: Chris Hamilton + +#include + +#include + +#include "google_breakpad/processor/call_stack.h" +#include "google_breakpad/processor/memory_region.h" +#include "google_breakpad/processor/source_line_resolver_interface.h" +#include "google_breakpad/processor/stack_frame.h" +#include "processor/logging.h" +#include "processor/stackwalker_address_list.h" + +namespace google_breakpad { + +StackwalkerAddressList::StackwalkerAddressList( + const uint64_t* frames, + size_t frame_count, + const CodeModules* modules, + StackFrameSymbolizer* frame_symbolizer) + : Stackwalker(NULL, NULL, modules, frame_symbolizer), + frames_(frames), + frame_count_(frame_count) { + assert(frames); + assert(frame_symbolizer); +} + +StackFrame* StackwalkerAddressList::GetContextFrame() { + if (frame_count_ == 0) + return NULL; + + StackFrame* frame = new StackFrame(); + frame->instruction = frames_[0]; + frame->trust = StackFrame::FRAME_TRUST_PREWALKED; + return frame; +} + +StackFrame* StackwalkerAddressList::GetCallerFrame(const CallStack* stack, + bool stack_scan_allowed) { + if (!stack) { + BPLOG(ERROR) << "Can't get caller frame without stack"; + return NULL; + } + + size_t frame_index = stack->frames()->size(); + + // There are no more frames to fetch. + if (frame_index >= frame_count_) + return NULL; + + // All frames have the highest level of trust because they were + // explicitly provided. + StackFrame* frame = new StackFrame(); + frame->instruction = frames_[frame_index]; + frame->trust = StackFrame::FRAME_TRUST_PREWALKED; + return frame; +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_address_list.h b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_address_list.h new file mode 100644 index 0000000000..0f8c989efd --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_address_list.h @@ -0,0 +1,72 @@ +// Copyright (c) 2013 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// stackwalker_address_list.h: a pseudo stackwalker. +// +// Doesn't actually walk a stack, rather initializes a CallStack given an +// explicit list of already walked return addresses. +// +// Author: Chris Hamilton + +#ifndef PROCESSOR_STACKWALKER_ADDRESS_LIST_H_ +#define PROCESSOR_STACKWALKER_ADDRESS_LIST_H_ + +#include "common/basictypes.h" +#include "google_breakpad/common/breakpad_types.h" +#include "google_breakpad/processor/stackwalker.h" + +namespace google_breakpad { + +class CodeModules; + +class StackwalkerAddressList : public Stackwalker { + public: + // Initializes this stack walker with an explicit set of frame addresses. + // |modules| and |frame_symbolizer| are passed directly through to the base + // Stackwalker constructor. + StackwalkerAddressList(const uint64_t* frames, + size_t frame_count, + const CodeModules* modules, + StackFrameSymbolizer* frame_symbolizer); + + private: + // Implementation of Stackwalker. + virtual StackFrame* GetContextFrame(); + virtual StackFrame* GetCallerFrame(const CallStack* stack, + bool stack_scan_allowed); + + const uint64_t* frames_; + size_t frame_count_; + + DISALLOW_COPY_AND_ASSIGN(StackwalkerAddressList); +}; + +} // namespace google_breakpad + +#endif // PROCESSOR_STACKWALKER_ADDRESS_LIST_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_address_list_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_address_list_unittest.cc new file mode 100644 index 0000000000..ab4e9c0880 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_address_list_unittest.cc @@ -0,0 +1,197 @@ +// Copyright (c) 2013, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// stackwalker_address_list_unittest.cc: Unit tests for the +// StackwalkerAddressList class. +// +// Author: Chris Hamilton + +#include +#include + +#include "breakpad_googletest_includes.h" +#include "common/using_std_string.h" +#include "google_breakpad/common/minidump_format.h" +#include "google_breakpad/processor/basic_source_line_resolver.h" +#include "google_breakpad/processor/call_stack.h" +#include "google_breakpad/processor/code_module.h" +#include "google_breakpad/processor/source_line_resolver_interface.h" +#include "google_breakpad/processor/stack_frame.h" +#include "processor/stackwalker_unittest_utils.h" +#include "processor/stackwalker_address_list.h" + +using google_breakpad::BasicSourceLineResolver; +using google_breakpad::CallStack; +using google_breakpad::CodeModule; +using google_breakpad::StackFrameSymbolizer; +using google_breakpad::StackFrame; +using google_breakpad::Stackwalker; +using google_breakpad::StackwalkerAddressList; +using std::vector; +using testing::_; +using testing::AnyNumber; +using testing::Return; +using testing::SetArgumentPointee; + +#define arraysize(f) (sizeof(f) / sizeof(*f)) + +// Addresses and sizes of a couple dummy modules. +uint64_t kModule1Base = 0x40000000; +uint64_t kModule1Size = 0x10000; +uint64_t kModule2Base = 0x50000000; +uint64_t kModule2Size = 0x10000; + +// A handful of addresses that lie within the modules above. +const uint64_t kDummyFrames[] = { + 0x50003000, 0x50002000, 0x50001000, 0x40002000, 0x40001000 }; + +class StackwalkerAddressListTest : public testing::Test { + public: + StackwalkerAddressListTest() + : // Give the two modules reasonable standard locations and names + // for tests to play with. + module1(kModule1Base, kModule1Size, "module1", "version1"), + module2(kModule2Base, kModule2Size, "module2", "version2") { + // Create some modules with some stock debugging information. + modules.Add(&module1); + modules.Add(&module2); + + // By default, none of the modules have symbol info; call + // SetModuleSymbols to override this. + EXPECT_CALL(supplier, GetCStringSymbolData(_, _, _, _, _)) + .WillRepeatedly(Return(MockSymbolSupplier::NOT_FOUND)); + + // Avoid GMOCK WARNING "Uninteresting mock function call - returning + // directly" for FreeSymbolData(). + EXPECT_CALL(supplier, FreeSymbolData(_)).Times(AnyNumber()); + } + + // Set the Breakpad symbol information that supplier should return for + // MODULE to INFO. + void SetModuleSymbols(MockCodeModule *module, const string &info) { + size_t buffer_size; + char *buffer = supplier.CopySymbolDataAndOwnTheCopy(info, &buffer_size); + EXPECT_CALL(supplier, GetCStringSymbolData(module, NULL, _, _, _)) + .WillRepeatedly(DoAll(SetArgumentPointee<3>(buffer), + SetArgumentPointee<4>(buffer_size), + Return(MockSymbolSupplier::FOUND))); + } + + void CheckCallStack(const CallStack& call_stack) { + const std::vector* frames = call_stack.frames(); + ASSERT_EQ(arraysize(kDummyFrames), frames->size()); + for (size_t i = 0; i < arraysize(kDummyFrames); ++i) { + ASSERT_EQ(kDummyFrames[i], frames->at(i)->instruction); + ASSERT_EQ(StackFrame::FRAME_TRUST_PREWALKED, frames->at(i)->trust); + } + ASSERT_EQ(static_cast(&module2), frames->at(0)->module); + ASSERT_EQ(static_cast(&module2), frames->at(1)->module); + ASSERT_EQ(static_cast(&module2), frames->at(2)->module); + ASSERT_EQ(static_cast(&module1), frames->at(3)->module); + ASSERT_EQ(static_cast(&module1), frames->at(4)->module); + } + + MockCodeModule module1; + MockCodeModule module2; + MockCodeModules modules; + MockSymbolSupplier supplier; + BasicSourceLineResolver resolver; +}; + +TEST_F(StackwalkerAddressListTest, ScanWithoutSymbols) { + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerAddressList walker(kDummyFrames, arraysize(kDummyFrames), + &modules, &frame_symbolizer); + + CallStack call_stack; + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + + // The stack starts in module2, so we expect that to be the first module + // found without symbols. + ASSERT_EQ(2U, modules_without_symbols.size()); + ASSERT_EQ("module2", modules_without_symbols[0]->debug_file()); + ASSERT_EQ("module1", modules_without_symbols[1]->debug_file()); + ASSERT_EQ(0u, modules_with_corrupt_symbols.size()); + + ASSERT_NO_FATAL_FAILURE(CheckCallStack(call_stack)); +} + +TEST_F(StackwalkerAddressListTest, ScanWithSymbols) { + // File : FILE number(dex) name + // Function: FUNC address(hex) size(hex) parameter_size(hex) name + // Line : address(hex) size(hex) line(dec) filenum(dec) + SetModuleSymbols(&module2, + "FILE 1 module2.cc\n" + "FUNC 3000 100 10 mod2func3\n" + "3000 10 1 1\n" + "FUNC 2000 200 10 mod2func2\n" + "FUNC 1000 300 10 mod2func1\n"); + SetModuleSymbols(&module1, + "FUNC 2000 200 10 mod1func2\n" + "FUNC 1000 300 10 mod1func1\n"); + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerAddressList walker(kDummyFrames, arraysize(kDummyFrames), + &modules, &frame_symbolizer); + + CallStack call_stack; + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + + ASSERT_EQ(0u, modules_without_symbols.size()); + ASSERT_EQ(0u, modules_with_corrupt_symbols.size()); + + ASSERT_NO_FATAL_FAILURE(CheckCallStack(call_stack)); + + const std::vector* frames = call_stack.frames(); + + // We have full file/line information for the first function call. + ASSERT_EQ("mod2func3", frames->at(0)->function_name); + ASSERT_EQ(0x50003000u, frames->at(0)->function_base); + ASSERT_EQ("module2.cc", frames->at(0)->source_file_name); + ASSERT_EQ(1, frames->at(0)->source_line); + ASSERT_EQ(0x50003000u, frames->at(0)->source_line_base); + + ASSERT_EQ("mod2func2", frames->at(1)->function_name); + ASSERT_EQ(0x50002000u, frames->at(1)->function_base); + + ASSERT_EQ("mod2func1", frames->at(2)->function_name); + ASSERT_EQ(0x50001000u, frames->at(2)->function_base); + + ASSERT_EQ("mod1func2", frames->at(3)->function_name); + ASSERT_EQ(0x40002000u, frames->at(3)->function_base); + + ASSERT_EQ("mod1func1", frames->at(4)->function_name); + ASSERT_EQ(0x40001000u, frames->at(4)->function_base); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_amd64.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_amd64.cc new file mode 100644 index 0000000000..f252a33b71 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_amd64.cc @@ -0,0 +1,307 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// stackwalker_amd64.cc: amd64-specific stackwalker. +// +// See stackwalker_amd64.h for documentation. +// +// Author: Mark Mentovai, Ted Mielczarek + +#include + +#include "common/scoped_ptr.h" +#include "google_breakpad/processor/call_stack.h" +#include "google_breakpad/processor/memory_region.h" +#include "google_breakpad/processor/source_line_resolver_interface.h" +#include "google_breakpad/processor/stack_frame_cpu.h" +#include "google_breakpad/processor/system_info.h" +#include "processor/cfi_frame_info.h" +#include "processor/logging.h" +#include "processor/stackwalker_amd64.h" + +namespace google_breakpad { + + +const StackwalkerAMD64::CFIWalker::RegisterSet +StackwalkerAMD64::cfi_register_map_[] = { + // It may seem like $rip and $rsp are callee-saves, because the callee is + // responsible for having them restored upon return. But the callee_saves + // flags here really means that the walker should assume they're + // unchanged if the CFI doesn't mention them --- clearly wrong for $rip + // and $rsp. + { "$rax", NULL, false, + StackFrameAMD64::CONTEXT_VALID_RAX, &MDRawContextAMD64::rax }, + { "$rdx", NULL, false, + StackFrameAMD64::CONTEXT_VALID_RDX, &MDRawContextAMD64::rdx }, + { "$rcx", NULL, false, + StackFrameAMD64::CONTEXT_VALID_RCX, &MDRawContextAMD64::rcx }, + { "$rbx", NULL, true, + StackFrameAMD64::CONTEXT_VALID_RBX, &MDRawContextAMD64::rbx }, + { "$rsi", NULL, false, + StackFrameAMD64::CONTEXT_VALID_RSI, &MDRawContextAMD64::rsi }, + { "$rdi", NULL, false, + StackFrameAMD64::CONTEXT_VALID_RDI, &MDRawContextAMD64::rdi }, + { "$rbp", NULL, true, + StackFrameAMD64::CONTEXT_VALID_RBP, &MDRawContextAMD64::rbp }, + { "$rsp", ".cfa", false, + StackFrameAMD64::CONTEXT_VALID_RSP, &MDRawContextAMD64::rsp }, + { "$r8", NULL, false, + StackFrameAMD64::CONTEXT_VALID_R8, &MDRawContextAMD64::r8 }, + { "$r9", NULL, false, + StackFrameAMD64::CONTEXT_VALID_R9, &MDRawContextAMD64::r9 }, + { "$r10", NULL, false, + StackFrameAMD64::CONTEXT_VALID_R10, &MDRawContextAMD64::r10 }, + { "$r11", NULL, false, + StackFrameAMD64::CONTEXT_VALID_R11, &MDRawContextAMD64::r11 }, + { "$r12", NULL, true, + StackFrameAMD64::CONTEXT_VALID_R12, &MDRawContextAMD64::r12 }, + { "$r13", NULL, true, + StackFrameAMD64::CONTEXT_VALID_R13, &MDRawContextAMD64::r13 }, + { "$r14", NULL, true, + StackFrameAMD64::CONTEXT_VALID_R14, &MDRawContextAMD64::r14 }, + { "$r15", NULL, true, + StackFrameAMD64::CONTEXT_VALID_R15, &MDRawContextAMD64::r15 }, + { "$rip", ".ra", false, + StackFrameAMD64::CONTEXT_VALID_RIP, &MDRawContextAMD64::rip }, +}; + +StackwalkerAMD64::StackwalkerAMD64(const SystemInfo* system_info, + const MDRawContextAMD64* context, + MemoryRegion* memory, + const CodeModules* modules, + StackFrameSymbolizer* resolver_helper) + : Stackwalker(system_info, memory, modules, resolver_helper), + context_(context), + cfi_walker_(cfi_register_map_, + (sizeof(cfi_register_map_) / sizeof(cfi_register_map_[0]))) { +} + +uint64_t StackFrameAMD64::ReturnAddress() const { + assert(context_validity & StackFrameAMD64::CONTEXT_VALID_RIP); + return context.rip; +} + +StackFrame* StackwalkerAMD64::GetContextFrame() { + if (!context_) { + BPLOG(ERROR) << "Can't get context frame without context"; + return NULL; + } + + StackFrameAMD64* frame = new StackFrameAMD64(); + + // The instruction pointer is stored directly in a register, so pull it + // straight out of the CPU context structure. + frame->context = *context_; + frame->context_validity = StackFrameAMD64::CONTEXT_VALID_ALL; + frame->trust = StackFrame::FRAME_TRUST_CONTEXT; + frame->instruction = frame->context.rip; + + return frame; +} + +StackFrameAMD64* StackwalkerAMD64::GetCallerByCFIFrameInfo( + const vector &frames, + CFIFrameInfo* cfi_frame_info) { + StackFrameAMD64* last_frame = static_cast(frames.back()); + + scoped_ptr frame(new StackFrameAMD64()); + if (!cfi_walker_ + .FindCallerRegisters(*memory_, *cfi_frame_info, + last_frame->context, last_frame->context_validity, + &frame->context, &frame->context_validity)) + return NULL; + + // Make sure we recovered all the essentials. + static const int essentials = (StackFrameAMD64::CONTEXT_VALID_RIP + | StackFrameAMD64::CONTEXT_VALID_RSP); + if ((frame->context_validity & essentials) != essentials) + return NULL; + + frame->trust = StackFrame::FRAME_TRUST_CFI; + return frame.release(); +} + +StackFrameAMD64* StackwalkerAMD64::GetCallerByFramePointerRecovery( + const vector& frames) { + StackFrameAMD64* last_frame = static_cast(frames.back()); + uint64_t last_rsp = last_frame->context.rsp; + uint64_t last_rbp = last_frame->context.rbp; + + // Assume the presence of a frame pointer. This is not mandated by the + // AMD64 ABI, c.f. section 3.2.2 footnote 7, though it is typical for + // compilers to still preserve the frame pointer and not treat %rbp as a + // general purpose register. + // + // With this assumption, the CALL instruction pushes the return address + // onto the stack and sets %rip to the procedure to enter. The procedure + // then establishes the stack frame with a prologue that PUSHes the current + // %rbp onto the stack, MOVes the current %rsp to %rbp, and then allocates + // space for any local variables. Using this procedure linking information, + // it is possible to locate frame information for the callee: + // + // %caller_rsp = *(%callee_rbp + 16) + // %caller_rip = *(%callee_rbp + 8) + // %caller_rbp = *(%callee_rbp) + + uint64_t caller_rip, caller_rbp; + if (memory_->GetMemoryAtAddress(last_rbp + 8, &caller_rip) && + memory_->GetMemoryAtAddress(last_rbp, &caller_rbp)) { + uint64_t caller_rsp = last_rbp + 16; + + // Simple sanity check that the stack is growing downwards as expected. + if (caller_rbp < last_rbp || caller_rsp < last_rsp) + return NULL; + + StackFrameAMD64* frame = new StackFrameAMD64(); + frame->trust = StackFrame::FRAME_TRUST_FP; + frame->context = last_frame->context; + frame->context.rip = caller_rip; + frame->context.rsp = caller_rsp; + frame->context.rbp = caller_rbp; + frame->context_validity = StackFrameAMD64::CONTEXT_VALID_RIP | + StackFrameAMD64::CONTEXT_VALID_RSP | + StackFrameAMD64::CONTEXT_VALID_RBP; + return frame; + } + + return NULL; +} + +StackFrameAMD64* StackwalkerAMD64::GetCallerByStackScan( + const vector &frames) { + StackFrameAMD64* last_frame = static_cast(frames.back()); + uint64_t last_rsp = last_frame->context.rsp; + uint64_t caller_rip_address, caller_rip; + + if (!ScanForReturnAddress(last_rsp, &caller_rip_address, &caller_rip, + frames.size() == 1 /* is_context_frame */)) { + // No plausible return address was found. + return NULL; + } + + // Create a new stack frame (ownership will be transferred to the caller) + // and fill it in. + StackFrameAMD64* frame = new StackFrameAMD64(); + + frame->trust = StackFrame::FRAME_TRUST_SCAN; + frame->context = last_frame->context; + frame->context.rip = caller_rip; + // The caller's %rsp is directly underneath the return address pushed by + // the call. + frame->context.rsp = caller_rip_address + 8; + frame->context_validity = StackFrameAMD64::CONTEXT_VALID_RIP | + StackFrameAMD64::CONTEXT_VALID_RSP; + + // Other unwinders give up if they don't have an %rbp value, so see if we + // can pass some plausible value on. + if (last_frame->context_validity & StackFrameAMD64::CONTEXT_VALID_RBP) { + // Functions typically push their caller's %rbp immediately upon entry, + // and then set %rbp to point to that. So if the callee's %rbp is + // pointing to the first word below the alleged return address, presume + // that the caller's %rbp is saved there. + if (caller_rip_address - 8 == last_frame->context.rbp) { + uint64_t caller_rbp = 0; + if (memory_->GetMemoryAtAddress(last_frame->context.rbp, &caller_rbp) && + caller_rbp > caller_rip_address) { + frame->context.rbp = caller_rbp; + frame->context_validity |= StackFrameAMD64::CONTEXT_VALID_RBP; + } + } else if (last_frame->context.rbp >= caller_rip_address + 8) { + // If the callee's %rbp is plausible as a value for the caller's + // %rbp, presume that the callee left it unchanged. + frame->context.rbp = last_frame->context.rbp; + frame->context_validity |= StackFrameAMD64::CONTEXT_VALID_RBP; + } + } + + return frame; +} + +StackFrame* StackwalkerAMD64::GetCallerFrame(const CallStack* stack, + bool stack_scan_allowed) { + if (!memory_ || !stack) { + BPLOG(ERROR) << "Can't get caller frame without memory or stack"; + return NULL; + } + + const vector &frames = *stack->frames(); + StackFrameAMD64* last_frame = static_cast(frames.back()); + scoped_ptr new_frame; + + // If we have DWARF CFI information, use it. + scoped_ptr cfi_frame_info( + frame_symbolizer_->FindCFIFrameInfo(last_frame)); + if (cfi_frame_info.get()) + new_frame.reset(GetCallerByCFIFrameInfo(frames, cfi_frame_info.get())); + + // If CFI was not available or failed, try using frame pointer recovery. + if (!new_frame.get()) { + new_frame.reset(GetCallerByFramePointerRecovery(frames)); + } + + // If all else fails, fall back to stack scanning. + if (stack_scan_allowed && !new_frame.get()) { + new_frame.reset(GetCallerByStackScan(frames)); + } + + // If nothing worked, tell the caller. + if (!new_frame.get()) + return NULL; + + if (system_info_->os_short == "nacl") { + // Apply constraints from Native Client's x86-64 sandbox. These + // registers have the 4GB-aligned sandbox base address (from r15) + // added to them, and only the bottom 32 bits are relevant for + // stack walking. + new_frame->context.rip = static_cast(new_frame->context.rip); + new_frame->context.rsp = static_cast(new_frame->context.rsp); + new_frame->context.rbp = static_cast(new_frame->context.rbp); + } + + // Treat an instruction address of 0 as end-of-stack. + if (new_frame->context.rip == 0) + return NULL; + + // If the new stack pointer is at a lower address than the old, then + // that's clearly incorrect. Treat this as end-of-stack to enforce + // progress and avoid infinite loops. + if (new_frame->context.rsp <= last_frame->context.rsp) + return NULL; + + // new_frame->context.rip is the return address, which is the instruction + // after the CALL that caused us to arrive at the callee. Set + // new_frame->instruction to one less than that, so it points within the + // CALL instruction. See StackFrame::instruction for details, and + // StackFrameAMD64::ReturnAddress. + new_frame->instruction = new_frame->context.rip - 1; + + return new_frame.release(); +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_amd64.h b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_amd64.h new file mode 100644 index 0000000000..8f3dbd5280 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_amd64.h @@ -0,0 +1,108 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// stackwalker_amd64.h: amd64-specific stackwalker. +// +// Provides stack frames given amd64 register context and a memory region +// corresponding to a amd64 stack. +// +// Author: Mark Mentovai, Ted Mielczarek + + +#ifndef PROCESSOR_STACKWALKER_AMD64_H__ +#define PROCESSOR_STACKWALKER_AMD64_H__ + +#include + +#include "google_breakpad/common/breakpad_types.h" +#include "google_breakpad/common/minidump_format.h" +#include "google_breakpad/processor/stackwalker.h" +#include "google_breakpad/processor/stack_frame_cpu.h" +#include "processor/cfi_frame_info.h" + +namespace google_breakpad { + +class CodeModules; + +class StackwalkerAMD64 : public Stackwalker { + public: + // context is a amd64 context object that gives access to amd64-specific + // register state corresponding to the innermost called frame to be + // included in the stack. The other arguments are passed directly through + // to the base Stackwalker constructor. + StackwalkerAMD64(const SystemInfo* system_info, + const MDRawContextAMD64* context, + MemoryRegion* memory, + const CodeModules* modules, + StackFrameSymbolizer* frame_symbolizer); + + private: + // A STACK CFI-driven frame walker for the AMD64 + typedef SimpleCFIWalker CFIWalker; + + // Implementation of Stackwalker, using amd64 context (stack pointer in %rsp, + // stack base in %rbp) and stack conventions (saved stack pointer at 0(%rbp)) + virtual StackFrame* GetContextFrame(); + virtual StackFrame* GetCallerFrame(const CallStack* stack, + bool stack_scan_allowed); + + // Use cfi_frame_info (derived from STACK CFI records) to construct + // the frame that called frames.back(). The caller takes ownership + // of the returned frame. Return NULL on failure. + StackFrameAMD64* GetCallerByCFIFrameInfo(const vector &frames, + CFIFrameInfo* cfi_frame_info); + + // Assumes a traditional frame layout where the frame pointer has not been + // omitted. The expectation is that caller's %rbp is pushed to the stack + // after the return address of the callee, and that the callee's %rsp can + // be used to find the pushed %rbp. + // Caller owns the returned frame object. Returns NULL on failure. + StackFrameAMD64* GetCallerByFramePointerRecovery( + const vector& frames); + + // Scan the stack for plausible return addresses. The caller takes ownership + // of the returned frame. Return NULL on failure. + StackFrameAMD64* GetCallerByStackScan(const vector &frames); + + // Stores the CPU context corresponding to the innermost stack frame to + // be returned by GetContextFrame. + const MDRawContextAMD64* context_; + + // Our register map, for cfi_walker_. + static const CFIWalker::RegisterSet cfi_register_map_[]; + + // Our CFI frame walker. + const CFIWalker cfi_walker_; +}; + + +} // namespace google_breakpad + + +#endif // PROCESSOR_STACKWALKER_AMD64_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_amd64_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_amd64_unittest.cc new file mode 100644 index 0000000000..a54198bfd0 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_amd64_unittest.cc @@ -0,0 +1,699 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// stackwalker_amd64_unittest.cc: Unit tests for StackwalkerAMD64 class. + +#include +#include +#include + +#include "breakpad_googletest_includes.h" +#include "common/test_assembler.h" +#include "common/using_std_string.h" +#include "google_breakpad/common/minidump_format.h" +#include "google_breakpad/processor/basic_source_line_resolver.h" +#include "google_breakpad/processor/call_stack.h" +#include "google_breakpad/processor/code_module.h" +#include "google_breakpad/processor/source_line_resolver_interface.h" +#include "google_breakpad/processor/stack_frame_cpu.h" +#include "processor/stackwalker_unittest_utils.h" +#include "processor/stackwalker_amd64.h" + +using google_breakpad::BasicSourceLineResolver; +using google_breakpad::CallStack; +using google_breakpad::CodeModule; +using google_breakpad::StackFrameSymbolizer; +using google_breakpad::StackFrame; +using google_breakpad::StackFrameAMD64; +using google_breakpad::Stackwalker; +using google_breakpad::StackwalkerAMD64; +using google_breakpad::SystemInfo; +using google_breakpad::test_assembler::kLittleEndian; +using google_breakpad::test_assembler::Label; +using google_breakpad::test_assembler::Section; +using std::vector; +using testing::_; +using testing::AnyNumber; +using testing::Return; +using testing::SetArgumentPointee; +using testing::Test; + +class StackwalkerAMD64Fixture { + public: + StackwalkerAMD64Fixture() + : stack_section(kLittleEndian), + // Give the two modules reasonable standard locations and names + // for tests to play with. + module1(0x40000000c0000000ULL, 0x10000, "module1", "version1"), + module2(0x50000000b0000000ULL, 0x10000, "module2", "version2") { + // Identify the system as a Linux system. + system_info.os = "Linux"; + system_info.os_short = "linux"; + system_info.os_version = "Horrendous Hippo"; + system_info.cpu = "x86"; + system_info.cpu_info = ""; + + // Put distinctive values in the raw CPU context. + BrandContext(&raw_context); + + // Create some modules with some stock debugging information. + modules.Add(&module1); + modules.Add(&module2); + + // By default, none of the modules have symbol info; call + // SetModuleSymbols to override this. + EXPECT_CALL(supplier, GetCStringSymbolData(_, _, _, _, _)) + .WillRepeatedly(Return(MockSymbolSupplier::NOT_FOUND)); + + // Avoid GMOCK WARNING "Uninteresting mock function call - returning + // directly" for FreeSymbolData(). + EXPECT_CALL(supplier, FreeSymbolData(_)).Times(AnyNumber()); + + // Reset max_frames_scanned since it's static. + Stackwalker::set_max_frames_scanned(1024); + } + + // Set the Breakpad symbol information that supplier should return for + // MODULE to INFO. + void SetModuleSymbols(MockCodeModule *module, const string &info) { + size_t buffer_size; + char *buffer = supplier.CopySymbolDataAndOwnTheCopy(info, &buffer_size); + EXPECT_CALL(supplier, GetCStringSymbolData(module, &system_info, _, _, _)) + .WillRepeatedly(DoAll(SetArgumentPointee<3>(buffer), + SetArgumentPointee<4>(buffer_size), + Return(MockSymbolSupplier::FOUND))); + } + + // Populate stack_region with the contents of stack_section. Use + // stack_section.start() as the region's starting address. + void RegionFromSection() { + string contents; + ASSERT_TRUE(stack_section.GetContents(&contents)); + stack_region.Init(stack_section.start().Value(), contents); + } + + // Fill RAW_CONTEXT with pseudo-random data, for round-trip checking. + void BrandContext(MDRawContextAMD64 *raw_context) { + uint8_t x = 173; + for (size_t i = 0; i < sizeof(*raw_context); i++) + reinterpret_cast(raw_context)[i] = (x += 17); + } + + SystemInfo system_info; + MDRawContextAMD64 raw_context; + Section stack_section; + MockMemoryRegion stack_region; + MockCodeModule module1; + MockCodeModule module2; + MockCodeModules modules; + MockSymbolSupplier supplier; + BasicSourceLineResolver resolver; + CallStack call_stack; + const vector *frames; +}; + +class GetContextFrame: public StackwalkerAMD64Fixture, public Test { }; + +class SanityCheck: public StackwalkerAMD64Fixture, public Test { }; + +TEST_F(SanityCheck, NoResolver) { + // There should be no references to the stack in this walk: we don't + // provide any call frame information, so trying to reconstruct the + // context frame's caller should fail. So there's no need for us to + // provide stack contents. + raw_context.rip = 0x40000000c0000200ULL; + raw_context.rbp = 0x8000000080000000ULL; + + StackFrameSymbolizer frame_symbolizer(NULL, NULL); + StackwalkerAMD64 walker(&system_info, &raw_context, &stack_region, &modules, + &frame_symbolizer); + // This should succeed even without a resolver or supplier. + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(1U, modules_without_symbols.size()); + ASSERT_EQ("module1", modules_without_symbols[0]->debug_file()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_GE(1U, frames->size()); + StackFrameAMD64 *frame = static_cast(frames->at(0)); + // Check that the values from the original raw context made it + // through to the context in the stack frame. + EXPECT_EQ(0, memcmp(&raw_context, &frame->context, sizeof(raw_context))); +} + +TEST_F(GetContextFrame, Simple) { + // There should be no references to the stack in this walk: we don't + // provide any call frame information, so trying to reconstruct the + // context frame's caller should fail. So there's no need for us to + // provide stack contents. + raw_context.rip = 0x40000000c0000200ULL; + raw_context.rbp = 0x8000000080000000ULL; + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerAMD64 walker(&system_info, &raw_context, &stack_region, &modules, + &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(1U, modules_without_symbols.size()); + ASSERT_EQ("module1", modules_without_symbols[0]->debug_file()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_GE(1U, frames->size()); + StackFrameAMD64 *frame = static_cast(frames->at(0)); + // Check that the values from the original raw context made it + // through to the context in the stack frame. + EXPECT_EQ(0, memcmp(&raw_context, &frame->context, sizeof(raw_context))); +} + +// The stackwalker should be able to produce the context frame even +// without stack memory present. +TEST_F(GetContextFrame, NoStackMemory) { + raw_context.rip = 0x40000000c0000200ULL; + raw_context.rbp = 0x8000000080000000ULL; + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerAMD64 walker(&system_info, &raw_context, NULL, &modules, + &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(1U, modules_without_symbols.size()); + ASSERT_EQ("module1", modules_without_symbols[0]->debug_file()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_GE(1U, frames->size()); + StackFrameAMD64 *frame = static_cast(frames->at(0)); + // Check that the values from the original raw context made it + // through to the context in the stack frame. + EXPECT_EQ(0, memcmp(&raw_context, &frame->context, sizeof(raw_context))); +} + +class GetCallerFrame: public StackwalkerAMD64Fixture, public Test { }; + +TEST_F(GetCallerFrame, ScanWithoutSymbols) { + // When the stack walker resorts to scanning the stack, + // only addresses located within loaded modules are + // considered valid return addresses. + // Force scanning through three frames to ensure that the + // stack pointer is set properly in scan-recovered frames. + stack_section.start() = 0x8000000080000000ULL; + uint64_t return_address1 = 0x50000000b0000100ULL; + uint64_t return_address2 = 0x50000000b0000900ULL; + Label frame1_sp, frame2_sp, frame1_rbp; + stack_section + // frame 0 + .Append(16, 0) // space + + .D64(0x40000000b0000000ULL) // junk that's not + .D64(0x50000000d0000000ULL) // a return address + + .D64(return_address1) // actual return address + // frame 1 + .Mark(&frame1_sp) + .Append(16, 0) // space + + .D64(0x40000000b0000000ULL) // more junk + .D64(0x50000000d0000000ULL) + + .Mark(&frame1_rbp) + .D64(stack_section.start()) // This is in the right place to be + // a saved rbp, but it's bogus, so + // we shouldn't report it. + + .D64(return_address2) // actual return address + // frame 2 + .Mark(&frame2_sp) + .Append(32, 0); // end of stack + + RegionFromSection(); + + raw_context.rip = 0x40000000c0000200ULL; + raw_context.rbp = frame1_rbp.Value(); + raw_context.rsp = stack_section.start().Value(); + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerAMD64 walker(&system_info, &raw_context, &stack_region, &modules, + &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(2U, modules_without_symbols.size()); + ASSERT_EQ("module1", modules_without_symbols[0]->debug_file()); + ASSERT_EQ("module2", modules_without_symbols[1]->debug_file()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(3U, frames->size()); + + StackFrameAMD64 *frame0 = static_cast(frames->at(0)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CONTEXT, frame0->trust); + ASSERT_EQ(StackFrameAMD64::CONTEXT_VALID_ALL, frame0->context_validity); + EXPECT_EQ(0, memcmp(&raw_context, &frame0->context, sizeof(raw_context))); + + StackFrameAMD64 *frame1 = static_cast(frames->at(1)); + EXPECT_EQ(StackFrame::FRAME_TRUST_SCAN, frame1->trust); + ASSERT_EQ((StackFrameAMD64::CONTEXT_VALID_RIP | + StackFrameAMD64::CONTEXT_VALID_RSP | + StackFrameAMD64::CONTEXT_VALID_RBP), + frame1->context_validity); + EXPECT_EQ(return_address1, frame1->context.rip); + EXPECT_EQ(frame1_sp.Value(), frame1->context.rsp); + EXPECT_EQ(frame1_rbp.Value(), frame1->context.rbp); + + StackFrameAMD64 *frame2 = static_cast(frames->at(2)); + EXPECT_EQ(StackFrame::FRAME_TRUST_SCAN, frame2->trust); + ASSERT_EQ((StackFrameAMD64::CONTEXT_VALID_RIP | + StackFrameAMD64::CONTEXT_VALID_RSP), + frame2->context_validity); + EXPECT_EQ(return_address2, frame2->context.rip); + EXPECT_EQ(frame2_sp.Value(), frame2->context.rsp); +} + +TEST_F(GetCallerFrame, ScanWithFunctionSymbols) { + // During stack scanning, if a potential return address + // is located within a loaded module that has symbols, + // it is only considered a valid return address if it + // lies within a function's bounds. + stack_section.start() = 0x8000000080000000ULL; + uint64_t return_address = 0x50000000b0000110ULL; + Label frame1_sp, frame1_rbp; + + stack_section + // frame 0 + .Append(16, 0) // space + + .D64(0x40000000b0000000ULL) // junk that's not + .D64(0x50000000b0000000ULL) // a return address + + .D64(0x40000000c0001000ULL) // a couple of plausible addresses + .D64(0x50000000b000aaaaULL) // that are not within functions + + .D64(return_address) // actual return address + // frame 1 + .Mark(&frame1_sp) + .Append(32, 0) // end of stack + .Mark(&frame1_rbp); + RegionFromSection(); + + raw_context.rip = 0x40000000c0000200ULL; + raw_context.rbp = frame1_rbp.Value(); + raw_context.rsp = stack_section.start().Value(); + + SetModuleSymbols(&module1, + // The youngest frame's function. + "FUNC 100 400 10 platypus\n"); + SetModuleSymbols(&module2, + // The calling frame's function. + "FUNC 100 400 10 echidna\n"); + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerAMD64 walker(&system_info, &raw_context, &stack_region, &modules, + &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(0U, modules_without_symbols.size()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(2U, frames->size()); + + StackFrameAMD64 *frame0 = static_cast(frames->at(0)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CONTEXT, frame0->trust); + ASSERT_EQ(StackFrameAMD64::CONTEXT_VALID_ALL, frame0->context_validity); + EXPECT_EQ("platypus", frame0->function_name); + EXPECT_EQ(0x40000000c0000100ULL, frame0->function_base); + + StackFrameAMD64 *frame1 = static_cast(frames->at(1)); + EXPECT_EQ(StackFrame::FRAME_TRUST_SCAN, frame1->trust); + ASSERT_EQ((StackFrameAMD64::CONTEXT_VALID_RIP | + StackFrameAMD64::CONTEXT_VALID_RSP | + StackFrameAMD64::CONTEXT_VALID_RBP), + frame1->context_validity); + EXPECT_EQ(return_address, frame1->context.rip); + EXPECT_EQ(frame1_sp.Value(), frame1->context.rsp); + EXPECT_EQ(frame1_rbp.Value(), frame1->context.rbp); + EXPECT_EQ("echidna", frame1->function_name); + EXPECT_EQ(0x50000000b0000100ULL, frame1->function_base); +} + +// Test that set_max_frames_scanned prevents using stack scanning +// to find caller frames. +TEST_F(GetCallerFrame, ScanningNotAllowed) { + // When the stack walker resorts to scanning the stack, + // only addresses located within loaded modules are + // considered valid return addresses. + stack_section.start() = 0x8000000080000000ULL; + uint64_t return_address1 = 0x50000000b0000100ULL; + uint64_t return_address2 = 0x50000000b0000900ULL; + Label frame1_sp, frame2_sp, frame1_rbp; + stack_section + // frame 0 + .Append(16, 0) // space + + .D64(0x40000000b0000000ULL) // junk that's not + .D64(0x50000000d0000000ULL) // a return address + + .D64(return_address1) // actual return address + // frame 1 + .Mark(&frame1_sp) + .Append(16, 0) // space + + .D64(0x40000000b0000000ULL) // more junk + .D64(0x50000000d0000000ULL) + + .Mark(&frame1_rbp) + .D64(stack_section.start()) // This is in the right place to be + // a saved rbp, but it's bogus, so + // we shouldn't report it. + + .D64(return_address2) // actual return address + // frame 2 + .Mark(&frame2_sp) + .Append(32, 0); // end of stack + + RegionFromSection(); + + raw_context.rip = 0x40000000c0000200ULL; + raw_context.rbp = frame1_rbp.Value(); + raw_context.rsp = stack_section.start().Value(); + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerAMD64 walker(&system_info, &raw_context, &stack_region, &modules, + &frame_symbolizer); + Stackwalker::set_max_frames_scanned(0); + + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(1U, modules_without_symbols.size()); + ASSERT_EQ("module1", modules_without_symbols[0]->debug_file()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(1U, frames->size()); + + StackFrameAMD64 *frame0 = static_cast(frames->at(0)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CONTEXT, frame0->trust); + ASSERT_EQ(StackFrameAMD64::CONTEXT_VALID_ALL, frame0->context_validity); + EXPECT_EQ(0, memcmp(&raw_context, &frame0->context, sizeof(raw_context))); +} + +TEST_F(GetCallerFrame, CallerPushedRBP) { + // Functions typically push their %rbp upon entry and set %rbp pointing + // there. If stackwalking finds a plausible address for the next frame's + // %rbp directly below the return address, assume that it is indeed the + // next frame's %rbp. + stack_section.start() = 0x8000000080000000ULL; + uint64_t return_address = 0x50000000b0000110ULL; + Label frame0_rbp, frame1_sp, frame1_rbp; + + stack_section + // frame 0 + .Append(16, 0) // space + + .D64(0x40000000b0000000ULL) // junk that's not + .D64(0x50000000b0000000ULL) // a return address + + .D64(0x40000000c0001000ULL) // a couple of plausible addresses + .D64(0x50000000b000aaaaULL) // that are not within functions + + .Mark(&frame0_rbp) + .D64(frame1_rbp) // caller-pushed %rbp + .D64(return_address) // actual return address + // frame 1 + .Mark(&frame1_sp) + .Append(32, 0) // body of frame1 + .Mark(&frame1_rbp); // end of stack + RegionFromSection(); + + raw_context.rip = 0x40000000c0000200ULL; + raw_context.rbp = frame0_rbp.Value(); + raw_context.rsp = stack_section.start().Value(); + + SetModuleSymbols(&module1, + // The youngest frame's function. + "FUNC 100 400 10 sasquatch\n"); + SetModuleSymbols(&module2, + // The calling frame's function. + "FUNC 100 400 10 yeti\n"); + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerAMD64 walker(&system_info, &raw_context, &stack_region, &modules, + &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(0U, modules_without_symbols.size()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(2U, frames->size()); + + StackFrameAMD64 *frame0 = static_cast(frames->at(0)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CONTEXT, frame0->trust); + ASSERT_EQ(StackFrameAMD64::CONTEXT_VALID_ALL, frame0->context_validity); + EXPECT_EQ(frame0_rbp.Value(), frame0->context.rbp); + EXPECT_EQ("sasquatch", frame0->function_name); + EXPECT_EQ(0x40000000c0000100ULL, frame0->function_base); + + StackFrameAMD64 *frame1 = static_cast(frames->at(1)); + EXPECT_EQ(StackFrame::FRAME_TRUST_FP, frame1->trust); + ASSERT_EQ((StackFrameAMD64::CONTEXT_VALID_RIP | + StackFrameAMD64::CONTEXT_VALID_RSP | + StackFrameAMD64::CONTEXT_VALID_RBP), + frame1->context_validity); + EXPECT_EQ(return_address, frame1->context.rip); + EXPECT_EQ(frame1_sp.Value(), frame1->context.rsp); + EXPECT_EQ(frame1_rbp.Value(), frame1->context.rbp); + EXPECT_EQ("yeti", frame1->function_name); + EXPECT_EQ(0x50000000b0000100ULL, frame1->function_base); +} + +struct CFIFixture: public StackwalkerAMD64Fixture { + CFIFixture() { + // Provide a bunch of STACK CFI records; we'll walk to the caller + // from every point in this series, expecting to find the same set + // of register values. + SetModuleSymbols(&module1, + // The youngest frame's function. + "FUNC 4000 1000 10 enchiridion\n" + // Initially, just a return address. + "STACK CFI INIT 4000 100 .cfa: $rsp 8 + .ra: .cfa 8 - ^\n" + // Push %rbx. + "STACK CFI 4001 .cfa: $rsp 16 + $rbx: .cfa 16 - ^\n" + // Save %r12 in %rbx. Weird, but permitted. + "STACK CFI 4002 $r12: $rbx\n" + // Allocate frame space, and save %r13. + "STACK CFI 4003 .cfa: $rsp 40 + $r13: .cfa 32 - ^\n" + // Put the return address in %r13. + "STACK CFI 4005 .ra: $r13\n" + // Save %rbp, and use it as a frame pointer. + "STACK CFI 4006 .cfa: $rbp 16 + $rbp: .cfa 24 - ^\n" + + // The calling function. + "FUNC 5000 1000 10 epictetus\n" + // Mark it as end of stack. + "STACK CFI INIT 5000 1000 .cfa: $rsp .ra 0\n"); + + // Provide some distinctive values for the caller's registers. + expected.rsp = 0x8000000080000000ULL; + expected.rip = 0x40000000c0005510ULL; + expected.rbp = 0x68995b1de4700266ULL; + expected.rbx = 0x5a5beeb38de23be8ULL; + expected.r12 = 0xed1b02e8cc0fc79cULL; + expected.r13 = 0x1d20ad8acacbe930ULL; + expected.r14 = 0xe94cffc2f7adaa28ULL; + expected.r15 = 0xb638d17d8da413b5ULL; + + // By default, registers are unchanged. + raw_context = expected; + } + + // Walk the stack, using stack_section as the contents of the stack + // and raw_context as the current register values. (Set + // raw_context.rsp to the stack's starting address.) Expect two + // stack frames; in the older frame, expect the callee-saves + // registers to have values matching those in 'expected'. + void CheckWalk() { + RegionFromSection(); + raw_context.rsp = stack_section.start().Value(); + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerAMD64 walker(&system_info, &raw_context, &stack_region, &modules, + &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(0U, modules_without_symbols.size()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(2U, frames->size()); + + StackFrameAMD64 *frame0 = static_cast(frames->at(0)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CONTEXT, frame0->trust); + ASSERT_EQ(StackFrameAMD64::CONTEXT_VALID_ALL, frame0->context_validity); + EXPECT_EQ("enchiridion", frame0->function_name); + EXPECT_EQ(0x40000000c0004000ULL, frame0->function_base); + + StackFrameAMD64 *frame1 = static_cast(frames->at(1)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CFI, frame1->trust); + ASSERT_EQ((StackFrameAMD64::CONTEXT_VALID_RIP | + StackFrameAMD64::CONTEXT_VALID_RSP | + StackFrameAMD64::CONTEXT_VALID_RBP | + StackFrameAMD64::CONTEXT_VALID_RBX | + StackFrameAMD64::CONTEXT_VALID_R12 | + StackFrameAMD64::CONTEXT_VALID_R13 | + StackFrameAMD64::CONTEXT_VALID_R14 | + StackFrameAMD64::CONTEXT_VALID_R15), + frame1->context_validity); + EXPECT_EQ(expected.rip, frame1->context.rip); + EXPECT_EQ(expected.rsp, frame1->context.rsp); + EXPECT_EQ(expected.rbp, frame1->context.rbp); + EXPECT_EQ(expected.rbx, frame1->context.rbx); + EXPECT_EQ(expected.r12, frame1->context.r12); + EXPECT_EQ(expected.r13, frame1->context.r13); + EXPECT_EQ(expected.r14, frame1->context.r14); + EXPECT_EQ(expected.r15, frame1->context.r15); + EXPECT_EQ("epictetus", frame1->function_name); + } + + // The values we expect to find for the caller's registers. + MDRawContextAMD64 expected; +}; + +class CFI: public CFIFixture, public Test { }; + +TEST_F(CFI, At4000) { + Label frame1_rsp = expected.rsp; + stack_section + .D64(0x40000000c0005510ULL) // return address + .Mark(&frame1_rsp); // This effectively sets stack_section.start(). + raw_context.rip = 0x40000000c0004000ULL; + CheckWalk(); +} + +TEST_F(CFI, At4001) { + Label frame1_rsp = expected.rsp; + stack_section + .D64(0x5a5beeb38de23be8ULL) // saved %rbx + .D64(0x40000000c0005510ULL) // return address + .Mark(&frame1_rsp); // This effectively sets stack_section.start(). + raw_context.rip = 0x40000000c0004001ULL; + raw_context.rbx = 0xbe0487d2f9eafe29ULL; // callee's (distinct) %rbx value + CheckWalk(); +} + +TEST_F(CFI, At4002) { + Label frame1_rsp = expected.rsp; + stack_section + .D64(0x5a5beeb38de23be8ULL) // saved %rbx + .D64(0x40000000c0005510ULL) // return address + .Mark(&frame1_rsp); // This effectively sets stack_section.start(). + raw_context.rip = 0x40000000c0004002ULL; + raw_context.rbx = 0xed1b02e8cc0fc79cULL; // saved %r12 + raw_context.r12 = 0xb0118de918a4bceaULL; // callee's (distinct) %r12 value + CheckWalk(); +} + +TEST_F(CFI, At4003) { + Label frame1_rsp = expected.rsp; + stack_section + .D64(0x0e023828dffd4d81ULL) // garbage + .D64(0x1d20ad8acacbe930ULL) // saved %r13 + .D64(0x319e68b49e3ace0fULL) // garbage + .D64(0x5a5beeb38de23be8ULL) // saved %rbx + .D64(0x40000000c0005510ULL) // return address + .Mark(&frame1_rsp); // This effectively sets stack_section.start(). + raw_context.rip = 0x40000000c0004003ULL; + raw_context.rbx = 0xed1b02e8cc0fc79cULL; // saved %r12 + raw_context.r12 = 0x89d04fa804c87a43ULL; // callee's (distinct) %r12 + raw_context.r13 = 0x5118e02cbdb24b03ULL; // callee's (distinct) %r13 + CheckWalk(); +} + +// The results here should be the same as those at module offset 0x4003. +TEST_F(CFI, At4004) { + Label frame1_rsp = expected.rsp; + stack_section + .D64(0x0e023828dffd4d81ULL) // garbage + .D64(0x1d20ad8acacbe930ULL) // saved %r13 + .D64(0x319e68b49e3ace0fULL) // garbage + .D64(0x5a5beeb38de23be8ULL) // saved %rbx + .D64(0x40000000c0005510ULL) // return address + .Mark(&frame1_rsp); // This effectively sets stack_section.start(). + raw_context.rip = 0x40000000c0004004ULL; + raw_context.rbx = 0xed1b02e8cc0fc79cULL; // saved %r12 + raw_context.r12 = 0x89d04fa804c87a43ULL; // callee's (distinct) %r12 + raw_context.r13 = 0x5118e02cbdb24b03ULL; // callee's (distinct) %r13 + CheckWalk(); +} + +TEST_F(CFI, At4005) { + Label frame1_rsp = expected.rsp; + stack_section + .D64(0x4b516dd035745953ULL) // garbage + .D64(0x1d20ad8acacbe930ULL) // saved %r13 + .D64(0xa6d445e16ae3d872ULL) // garbage + .D64(0x5a5beeb38de23be8ULL) // saved %rbx + .D64(0xaa95fa054aedfbaeULL) // garbage + .Mark(&frame1_rsp); // This effectively sets stack_section.start(). + raw_context.rip = 0x40000000c0004005ULL; + raw_context.rbx = 0xed1b02e8cc0fc79cULL; // saved %r12 + raw_context.r12 = 0x46b1b8868891b34aULL; // callee's %r12 + raw_context.r13 = 0x40000000c0005510ULL; // return address + CheckWalk(); +} + +TEST_F(CFI, At4006) { + Label frame0_rbp; + Label frame1_rsp = expected.rsp; + stack_section + .D64(0x043c6dfceb91aa34ULL) // garbage + .D64(0x1d20ad8acacbe930ULL) // saved %r13 + .D64(0x68995b1de4700266ULL) // saved %rbp + .Mark(&frame0_rbp) // frame pointer points here + .D64(0x5a5beeb38de23be8ULL) // saved %rbx + .D64(0xf015ee516ad89eabULL) // garbage + .Mark(&frame1_rsp); // This effectively sets stack_section.start(). + raw_context.rip = 0x40000000c0004006ULL; + raw_context.rbp = frame0_rbp.Value(); + raw_context.rbx = 0xed1b02e8cc0fc79cULL; // saved %r12 + raw_context.r12 = 0x26e007b341acfebdULL; // callee's %r12 + raw_context.r13 = 0x40000000c0005510ULL; // return address + CheckWalk(); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_arm.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_arm.cc new file mode 100644 index 0000000000..e4fc58697d --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_arm.cc @@ -0,0 +1,296 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// stackwalker_arm.cc: arm-specific stackwalker. +// +// See stackwalker_arm.h for documentation. +// +// Author: Mark Mentovai, Ted Mielczarek, Jim Blandy + +#include + +#include "common/scoped_ptr.h" +#include "google_breakpad/processor/call_stack.h" +#include "google_breakpad/processor/memory_region.h" +#include "google_breakpad/processor/source_line_resolver_interface.h" +#include "google_breakpad/processor/stack_frame_cpu.h" +#include "processor/cfi_frame_info.h" +#include "processor/logging.h" +#include "processor/stackwalker_arm.h" + +namespace google_breakpad { + + +StackwalkerARM::StackwalkerARM(const SystemInfo* system_info, + const MDRawContextARM* context, + int fp_register, + MemoryRegion* memory, + const CodeModules* modules, + StackFrameSymbolizer* resolver_helper) + : Stackwalker(system_info, memory, modules, resolver_helper), + context_(context), fp_register_(fp_register), + context_frame_validity_(StackFrameARM::CONTEXT_VALID_ALL) { } + + +StackFrame* StackwalkerARM::GetContextFrame() { + if (!context_) { + BPLOG(ERROR) << "Can't get context frame without context"; + return NULL; + } + + StackFrameARM* frame = new StackFrameARM(); + + // The instruction pointer is stored directly in a register (r15), so pull it + // straight out of the CPU context structure. + frame->context = *context_; + frame->context_validity = context_frame_validity_; + frame->trust = StackFrame::FRAME_TRUST_CONTEXT; + frame->instruction = frame->context.iregs[MD_CONTEXT_ARM_REG_PC]; + + return frame; +} + +StackFrameARM* StackwalkerARM::GetCallerByCFIFrameInfo( + const vector &frames, + CFIFrameInfo* cfi_frame_info) { + StackFrameARM* last_frame = static_cast(frames.back()); + + static const char* register_names[] = { + "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7", + "r8", "r9", "r10", "r11", "r12", "sp", "lr", "pc", + "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7", + "fps", "cpsr", + NULL + }; + + // Populate a dictionary with the valid register values in last_frame. + CFIFrameInfo::RegisterValueMap callee_registers; + for (int i = 0; register_names[i]; i++) + if (last_frame->context_validity & StackFrameARM::RegisterValidFlag(i)) + callee_registers[register_names[i]] = last_frame->context.iregs[i]; + + // Use the STACK CFI data to recover the caller's register values. + CFIFrameInfo::RegisterValueMap caller_registers; + if (!cfi_frame_info->FindCallerRegs(callee_registers, *memory_, + &caller_registers)) + return NULL; + + // Construct a new stack frame given the values the CFI recovered. + scoped_ptr frame(new StackFrameARM()); + for (int i = 0; register_names[i]; i++) { + CFIFrameInfo::RegisterValueMap::iterator entry = + caller_registers.find(register_names[i]); + if (entry != caller_registers.end()) { + // We recovered the value of this register; fill the context with the + // value from caller_registers. + frame->context_validity |= StackFrameARM::RegisterValidFlag(i); + frame->context.iregs[i] = entry->second; + } else if (4 <= i && i <= 11 && (last_frame->context_validity & + StackFrameARM::RegisterValidFlag(i))) { + // If the STACK CFI data doesn't mention some callee-saves register, and + // it is valid in the callee, assume the callee has not yet changed it. + // Registers r4 through r11 are callee-saves, according to the Procedure + // Call Standard for the ARM Architecture, which the Linux ABI follows. + frame->context_validity |= StackFrameARM::RegisterValidFlag(i); + frame->context.iregs[i] = last_frame->context.iregs[i]; + } + } + // If the CFI doesn't recover the PC explicitly, then use .ra. + if (!(frame->context_validity & StackFrameARM::CONTEXT_VALID_PC)) { + CFIFrameInfo::RegisterValueMap::iterator entry = + caller_registers.find(".ra"); + if (entry != caller_registers.end()) { + if (fp_register_ == -1) { + frame->context_validity |= StackFrameARM::CONTEXT_VALID_PC; + frame->context.iregs[MD_CONTEXT_ARM_REG_PC] = entry->second; + } else { + // The CFI updated the link register and not the program counter. + // Handle getting the program counter from the link register. + frame->context_validity |= StackFrameARM::CONTEXT_VALID_PC; + frame->context_validity |= StackFrameARM::CONTEXT_VALID_LR; + frame->context.iregs[MD_CONTEXT_ARM_REG_LR] = entry->second; + frame->context.iregs[MD_CONTEXT_ARM_REG_PC] = + last_frame->context.iregs[MD_CONTEXT_ARM_REG_LR]; + } + } + } + // If the CFI doesn't recover the SP explicitly, then use .cfa. + if (!(frame->context_validity & StackFrameARM::CONTEXT_VALID_SP)) { + CFIFrameInfo::RegisterValueMap::iterator entry = + caller_registers.find(".cfa"); + if (entry != caller_registers.end()) { + frame->context_validity |= StackFrameARM::CONTEXT_VALID_SP; + frame->context.iregs[MD_CONTEXT_ARM_REG_SP] = entry->second; + } + } + + // If we didn't recover the PC and the SP, then the frame isn't very useful. + static const int essentials = (StackFrameARM::CONTEXT_VALID_SP + | StackFrameARM::CONTEXT_VALID_PC); + if ((frame->context_validity & essentials) != essentials) + return NULL; + + frame->trust = StackFrame::FRAME_TRUST_CFI; + return frame.release(); +} + +StackFrameARM* StackwalkerARM::GetCallerByStackScan( + const vector &frames) { + StackFrameARM* last_frame = static_cast(frames.back()); + uint32_t last_sp = last_frame->context.iregs[MD_CONTEXT_ARM_REG_SP]; + uint32_t caller_sp, caller_pc; + + if (!ScanForReturnAddress(last_sp, &caller_sp, &caller_pc, + frames.size() == 1 /* is_context_frame */)) { + // No plausible return address was found. + return NULL; + } + + // ScanForReturnAddress found a reasonable return address. Advance + // %sp to the location above the one where the return address was + // found. + caller_sp += 4; + + // Create a new stack frame (ownership will be transferred to the caller) + // and fill it in. + StackFrameARM* frame = new StackFrameARM(); + + frame->trust = StackFrame::FRAME_TRUST_SCAN; + frame->context = last_frame->context; + frame->context.iregs[MD_CONTEXT_ARM_REG_PC] = caller_pc; + frame->context.iregs[MD_CONTEXT_ARM_REG_SP] = caller_sp; + frame->context_validity = StackFrameARM::CONTEXT_VALID_PC | + StackFrameARM::CONTEXT_VALID_SP; + + return frame; +} + +StackFrameARM* StackwalkerARM::GetCallerByFramePointer( + const vector &frames) { + StackFrameARM* last_frame = static_cast(frames.back()); + + if (!(last_frame->context_validity & + StackFrameARM::RegisterValidFlag(fp_register_))) { + return NULL; + } + + uint32_t last_fp = last_frame->context.iregs[fp_register_]; + + uint32_t caller_fp = 0; + if (last_fp && !memory_->GetMemoryAtAddress(last_fp, &caller_fp)) { + BPLOG(ERROR) << "Unable to read caller_fp from last_fp: 0x" + << std::hex << last_fp; + return NULL; + } + + uint32_t caller_lr = 0; + if (last_fp && !memory_->GetMemoryAtAddress(last_fp + 4, &caller_lr)) { + BPLOG(ERROR) << "Unable to read caller_lr from last_fp + 4: 0x" + << std::hex << (last_fp + 4); + return NULL; + } + + uint32_t caller_sp = last_fp ? last_fp + 8 : + last_frame->context.iregs[MD_CONTEXT_ARM_REG_SP]; + + // Create a new stack frame (ownership will be transferred to the caller) + // and fill it in. + StackFrameARM* frame = new StackFrameARM(); + + frame->trust = StackFrame::FRAME_TRUST_FP; + frame->context = last_frame->context; + frame->context.iregs[fp_register_] = caller_fp; + frame->context.iregs[MD_CONTEXT_ARM_REG_SP] = caller_sp; + frame->context.iregs[MD_CONTEXT_ARM_REG_PC] = + last_frame->context.iregs[MD_CONTEXT_ARM_REG_LR]; + frame->context.iregs[MD_CONTEXT_ARM_REG_LR] = caller_lr; + frame->context_validity = StackFrameARM::CONTEXT_VALID_PC | + StackFrameARM::CONTEXT_VALID_LR | + StackFrameARM::RegisterValidFlag(fp_register_) | + StackFrameARM::CONTEXT_VALID_SP; + return frame; +} + +StackFrame* StackwalkerARM::GetCallerFrame(const CallStack* stack, + bool stack_scan_allowed) { + if (!memory_ || !stack) { + BPLOG(ERROR) << "Can't get caller frame without memory or stack"; + return NULL; + } + + const vector &frames = *stack->frames(); + StackFrameARM* last_frame = static_cast(frames.back()); + scoped_ptr frame; + + // See if there is DWARF call frame information covering this address. + scoped_ptr cfi_frame_info( + frame_symbolizer_->FindCFIFrameInfo(last_frame)); + if (cfi_frame_info.get()) + frame.reset(GetCallerByCFIFrameInfo(frames, cfi_frame_info.get())); + + // If CFI failed, or there wasn't CFI available, fall back + // to frame pointer, if this is configured. + if (fp_register_ >= 0 && !frame.get()) + frame.reset(GetCallerByFramePointer(frames)); + + // If everuthing failed, fall back to stack scanning. + if (stack_scan_allowed && !frame.get()) + frame.reset(GetCallerByStackScan(frames)); + + // If nothing worked, tell the caller. + if (!frame.get()) + return NULL; + + + // An instruction address of zero marks the end of the stack. + if (frame->context.iregs[MD_CONTEXT_ARM_REG_PC] == 0) + return NULL; + + // If the new stack pointer is at a lower address than the old, then + // that's clearly incorrect. Treat this as end-of-stack to enforce + // progress and avoid infinite loops. + if (frame->context.iregs[MD_CONTEXT_ARM_REG_SP] + < last_frame->context.iregs[MD_CONTEXT_ARM_REG_SP]) + return NULL; + + // The new frame's context's PC is the return address, which is one + // instruction past the instruction that caused us to arrive at the + // callee. Set new_frame->instruction to one less than the PC. This won't + // reference the beginning of the call instruction, but it's at least + // within it, which is sufficient to get the source line information to + // match up with the line that contains the function call. Callers that + // require the exact return address value may access + // frame->context.iregs[MD_CONTEXT_ARM_REG_PC]. + frame->instruction = frame->context.iregs[MD_CONTEXT_ARM_REG_PC] - 2; + + return frame.release(); +} + + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_arm.h b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_arm.h new file mode 100644 index 0000000000..9081a40cd0 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_arm.h @@ -0,0 +1,107 @@ +// -*- mode: C++ -*- + +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// stackwalker_arm.h: arm-specific stackwalker. +// +// Provides stack frames given arm register context and a memory region +// corresponding to an arm stack. +// +// Author: Mark Mentovai, Ted Mielczarek + + +#ifndef PROCESSOR_STACKWALKER_ARM_H__ +#define PROCESSOR_STACKWALKER_ARM_H__ + +#include "google_breakpad/common/breakpad_types.h" +#include "google_breakpad/common/minidump_format.h" +#include "google_breakpad/processor/stackwalker.h" + +namespace google_breakpad { + +class CodeModules; + +class StackwalkerARM : public Stackwalker { + public: + // context is an arm context object that gives access to arm-specific + // register state corresponding to the innermost called frame to be + // included in the stack. The other arguments are passed directly through + // to the base Stackwalker constructor. + StackwalkerARM(const SystemInfo* system_info, + const MDRawContextARM* context, + int fp_register, + MemoryRegion* memory, + const CodeModules* modules, + StackFrameSymbolizer* frame_symbolizer); + + // Change the context validity mask of the frame returned by + // GetContextFrame to VALID. This is only for use by unit tests; the + // default behavior is correct for all application code. + void SetContextFrameValidity(int valid) { context_frame_validity_ = valid; } + + private: + // Implementation of Stackwalker, using arm context and stack conventions. + virtual StackFrame* GetContextFrame(); + virtual StackFrame* GetCallerFrame(const CallStack* stack, + bool stack_scan_allowed); + + // Use cfi_frame_info (derived from STACK CFI records) to construct + // the frame that called frames.back(). The caller takes ownership + // of the returned frame. Return NULL on failure. + StackFrameARM* GetCallerByCFIFrameInfo(const vector &frames, + CFIFrameInfo* cfi_frame_info); + + // Use the frame pointer. The caller takes ownership of the returned frame. + // Return NULL on failure. + StackFrameARM* GetCallerByFramePointer(const vector &frames); + + // Scan the stack for plausible return addresses. The caller takes ownership + // of the returned frame. Return NULL on failure. + StackFrameARM* GetCallerByStackScan(const vector &frames); + + // Stores the CPU context corresponding to the youngest stack frame, to + // be returned by GetContextFrame. + const MDRawContextARM* context_; + + // The register to use a as frame pointer. The value is -1 if frame pointer + // cannot be used. + int fp_register_; + + // Validity mask for youngest stack frame. This is always + // CONTEXT_VALID_ALL in real use; it is only changeable for the sake of + // unit tests. + int context_frame_validity_; +}; + + +} // namespace google_breakpad + + +#endif // PROCESSOR_STACKWALKER_ARM_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_arm64.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_arm64.cc new file mode 100644 index 0000000000..31119a97e1 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_arm64.cc @@ -0,0 +1,278 @@ +// Copyright (c) 2013 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// stackwalker_arm64.cc: arm64-specific stackwalker. +// +// See stackwalker_arm64.h for documentation. +// +// Author: Mark Mentovai, Ted Mielczarek, Jim Blandy, Colin Blundell + +#include + +#include "common/scoped_ptr.h" +#include "google_breakpad/processor/call_stack.h" +#include "google_breakpad/processor/memory_region.h" +#include "google_breakpad/processor/source_line_resolver_interface.h" +#include "google_breakpad/processor/stack_frame_cpu.h" +#include "processor/cfi_frame_info.h" +#include "processor/logging.h" +#include "processor/stackwalker_arm64.h" + +namespace google_breakpad { + + +StackwalkerARM64::StackwalkerARM64(const SystemInfo* system_info, + const MDRawContextARM64* context, + MemoryRegion* memory, + const CodeModules* modules, + StackFrameSymbolizer* resolver_helper) + : Stackwalker(system_info, memory, modules, resolver_helper), + context_(context), + context_frame_validity_(StackFrameARM64::CONTEXT_VALID_ALL) { } + + +StackFrame* StackwalkerARM64::GetContextFrame() { + if (!context_) { + BPLOG(ERROR) << "Can't get context frame without context"; + return NULL; + } + + StackFrameARM64* frame = new StackFrameARM64(); + + // The instruction pointer is stored directly in a register (x32), so pull it + // straight out of the CPU context structure. + frame->context = *context_; + frame->context_validity = context_frame_validity_; + frame->trust = StackFrame::FRAME_TRUST_CONTEXT; + frame->instruction = frame->context.iregs[MD_CONTEXT_ARM64_REG_PC]; + + return frame; +} + +StackFrameARM64* StackwalkerARM64::GetCallerByCFIFrameInfo( + const vector &frames, + CFIFrameInfo* cfi_frame_info) { + StackFrameARM64* last_frame = static_cast(frames.back()); + + static const char* register_names[] = { + "x0", "x1", "x2", "x3", "x4", "x5", "x6", "x7", + "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", + "x16", "x17", "x18", "x19", "x20", "x21", "x22", "x23", + "x24", "x25", "x26", "x27", "x28", "x29", "x30", "sp", + "pc", NULL + }; + + // Populate a dictionary with the valid register values in last_frame. + CFIFrameInfo::RegisterValueMap callee_registers; + for (int i = 0; register_names[i]; i++) { + if (last_frame->context_validity & StackFrameARM64::RegisterValidFlag(i)) + callee_registers[register_names[i]] = last_frame->context.iregs[i]; + } + + // Use the STACK CFI data to recover the caller's register values. + CFIFrameInfo::RegisterValueMap caller_registers; + if (!cfi_frame_info->FindCallerRegs(callee_registers, *memory_, + &caller_registers)) { + return NULL; + } + // Construct a new stack frame given the values the CFI recovered. + scoped_ptr frame(new StackFrameARM64()); + for (int i = 0; register_names[i]; i++) { + CFIFrameInfo::RegisterValueMap::iterator entry = + caller_registers.find(register_names[i]); + if (entry != caller_registers.end()) { + // We recovered the value of this register; fill the context with the + // value from caller_registers. + frame->context_validity |= StackFrameARM64::RegisterValidFlag(i); + frame->context.iregs[i] = entry->second; + } else if (19 <= i && i <= 29 && (last_frame->context_validity & + StackFrameARM64::RegisterValidFlag(i))) { + // If the STACK CFI data doesn't mention some callee-saves register, and + // it is valid in the callee, assume the callee has not yet changed it. + // Registers r19 through r29 are callee-saves, according to the Procedure + // Call Standard for the ARM AARCH64 Architecture, which the Linux ABI + // follows. + frame->context_validity |= StackFrameARM64::RegisterValidFlag(i); + frame->context.iregs[i] = last_frame->context.iregs[i]; + } + } + // If the CFI doesn't recover the PC explicitly, then use .ra. + if (!(frame->context_validity & StackFrameARM64::CONTEXT_VALID_PC)) { + CFIFrameInfo::RegisterValueMap::iterator entry = + caller_registers.find(".ra"); + if (entry != caller_registers.end()) { + frame->context_validity |= StackFrameARM64::CONTEXT_VALID_PC; + frame->context.iregs[MD_CONTEXT_ARM64_REG_PC] = entry->second; + } + } + // If the CFI doesn't recover the SP explicitly, then use .cfa. + if (!(frame->context_validity & StackFrameARM64::CONTEXT_VALID_SP)) { + CFIFrameInfo::RegisterValueMap::iterator entry = + caller_registers.find(".cfa"); + if (entry != caller_registers.end()) { + frame->context_validity |= StackFrameARM64::CONTEXT_VALID_SP; + frame->context.iregs[MD_CONTEXT_ARM64_REG_SP] = entry->second; + } + } + + // If we didn't recover the PC and the SP, then the frame isn't very useful. + static const uint64_t essentials = (StackFrameARM64::CONTEXT_VALID_SP + | StackFrameARM64::CONTEXT_VALID_PC); + if ((frame->context_validity & essentials) != essentials) + return NULL; + + frame->trust = StackFrame::FRAME_TRUST_CFI; + return frame.release(); +} + +StackFrameARM64* StackwalkerARM64::GetCallerByStackScan( + const vector &frames) { + StackFrameARM64* last_frame = static_cast(frames.back()); + uint64_t last_sp = last_frame->context.iregs[MD_CONTEXT_ARM64_REG_SP]; + uint64_t caller_sp, caller_pc; + + if (!ScanForReturnAddress(last_sp, &caller_sp, &caller_pc, + frames.size() == 1 /* is_context_frame */)) { + // No plausible return address was found. + return NULL; + } + + // ScanForReturnAddress found a reasonable return address. Advance + // %sp to the location above the one where the return address was + // found. + caller_sp += 8; + + // Create a new stack frame (ownership will be transferred to the caller) + // and fill it in. + StackFrameARM64* frame = new StackFrameARM64(); + + frame->trust = StackFrame::FRAME_TRUST_SCAN; + frame->context = last_frame->context; + frame->context.iregs[MD_CONTEXT_ARM64_REG_PC] = caller_pc; + frame->context.iregs[MD_CONTEXT_ARM64_REG_SP] = caller_sp; + frame->context_validity = StackFrameARM64::CONTEXT_VALID_PC | + StackFrameARM64::CONTEXT_VALID_SP; + + return frame; +} + +StackFrameARM64* StackwalkerARM64::GetCallerByFramePointer( + const vector &frames) { + StackFrameARM64* last_frame = static_cast(frames.back()); + + uint64_t last_fp = last_frame->context.iregs[MD_CONTEXT_ARM64_REG_FP]; + + uint64_t caller_fp = 0; + if (last_fp && !memory_->GetMemoryAtAddress(last_fp, &caller_fp)) { + BPLOG(ERROR) << "Unable to read caller_fp from last_fp: 0x" + << std::hex << last_fp; + return NULL; + } + + uint64_t caller_lr = 0; + if (last_fp && !memory_->GetMemoryAtAddress(last_fp + 8, &caller_lr)) { + BPLOG(ERROR) << "Unable to read caller_lr from last_fp + 8: 0x" + << std::hex << (last_fp + 8); + return NULL; + } + + uint64_t caller_sp = last_fp ? last_fp + 16 : + last_frame->context.iregs[MD_CONTEXT_ARM64_REG_SP]; + + // Create a new stack frame (ownership will be transferred to the caller) + // and fill it in. + StackFrameARM64* frame = new StackFrameARM64(); + + frame->trust = StackFrame::FRAME_TRUST_FP; + frame->context = last_frame->context; + frame->context.iregs[MD_CONTEXT_ARM64_REG_FP] = caller_fp; + frame->context.iregs[MD_CONTEXT_ARM64_REG_SP] = caller_sp; + frame->context.iregs[MD_CONTEXT_ARM64_REG_PC] = + last_frame->context.iregs[MD_CONTEXT_ARM64_REG_LR]; + frame->context.iregs[MD_CONTEXT_ARM64_REG_LR] = caller_lr; + frame->context_validity = StackFrameARM64::CONTEXT_VALID_PC | + StackFrameARM64::CONTEXT_VALID_LR | + StackFrameARM64::CONTEXT_VALID_FP | + StackFrameARM64::CONTEXT_VALID_SP; + return frame; +} + +StackFrame* StackwalkerARM64::GetCallerFrame(const CallStack* stack, + bool stack_scan_allowed) { + if (!memory_ || !stack) { + BPLOG(ERROR) << "Can't get caller frame without memory or stack"; + return NULL; + } + + const vector &frames = *stack->frames(); + StackFrameARM64* last_frame = static_cast(frames.back()); + scoped_ptr frame; + + // See if there is DWARF call frame information covering this address. + scoped_ptr cfi_frame_info( + frame_symbolizer_->FindCFIFrameInfo(last_frame)); + if (cfi_frame_info.get()) + frame.reset(GetCallerByCFIFrameInfo(frames, cfi_frame_info.get())); + + // If CFI failed, or there wasn't CFI available, fall back to frame pointer. + if (!frame.get()) + frame.reset(GetCallerByFramePointer(frames)); + + // If everything failed, fall back to stack scanning. + if (stack_scan_allowed && !frame.get()) + frame.reset(GetCallerByStackScan(frames)); + + // If nothing worked, tell the caller. + if (!frame.get()) + return NULL; + + // An instruction address of zero marks the end of the stack. + if (frame->context.iregs[MD_CONTEXT_ARM64_REG_PC] == 0) + return NULL; + + // If the new stack pointer is at a lower address than the old, then + // that's clearly incorrect. Treat this as end-of-stack to enforce + // progress and avoid infinite loops. + if (frame->context.iregs[MD_CONTEXT_ARM64_REG_SP] + < last_frame->context.iregs[MD_CONTEXT_ARM64_REG_SP]) + return NULL; + + // The new frame's context's PC is the return address, which is one + // instruction past the instruction that caused us to arrive at the callee. + // ARM64 instructions have a uniform 4-byte encoding, so subtracting 4 off + // the return address gets back to the beginning of the call instruction. + // Callers that require the exact return address value may access + // frame->context.iregs[MD_CONTEXT_ARM64_REG_PC]. + frame->instruction = frame->context.iregs[MD_CONTEXT_ARM64_REG_PC] - 4; + + return frame.release(); +} + + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_arm64.h b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_arm64.h new file mode 100644 index 0000000000..121e824672 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_arm64.h @@ -0,0 +1,104 @@ +// -*- mode: C++ -*- + +// Copyright (c) 2013 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// stackwalker_arm64.h: arm64-specific stackwalker. +// +// Provides stack frames given arm64 register context and a memory region +// corresponding to an arm64 stack. +// +// Author: Mark Mentovai, Ted Mielczarek, Colin Blundell + + +#ifndef PROCESSOR_STACKWALKER_ARM64_H__ +#define PROCESSOR_STACKWALKER_ARM64_H__ + +#include "google_breakpad/common/breakpad_types.h" +#include "google_breakpad/common/minidump_format.h" +#include "google_breakpad/processor/stackwalker.h" + +namespace google_breakpad { + +class CodeModules; + +class StackwalkerARM64 : public Stackwalker { + public: + // context is an arm64 context object that gives access to arm64-specific + // register state corresponding to the innermost called frame to be + // included in the stack. The other arguments are passed directly through + // to the base Stackwalker constructor. + StackwalkerARM64(const SystemInfo* system_info, + const MDRawContextARM64* context, + MemoryRegion* memory, + const CodeModules* modules, + StackFrameSymbolizer* frame_symbolizer); + + // Change the context validity mask of the frame returned by + // GetContextFrame to VALID. This is only for use by unit tests; the + // default behavior is correct for all application code. + void SetContextFrameValidity(uint64_t valid) { + context_frame_validity_ = valid; + } + + private: + // Implementation of Stackwalker, using arm64 context and stack conventions. + virtual StackFrame* GetContextFrame(); + virtual StackFrame* GetCallerFrame(const CallStack* stack, + bool stack_scan_allowed); + + // Use cfi_frame_info (derived from STACK CFI records) to construct + // the frame that called frames.back(). The caller takes ownership + // of the returned frame. Return NULL on failure. + StackFrameARM64* GetCallerByCFIFrameInfo(const vector &frames, + CFIFrameInfo* cfi_frame_info); + + // Use the frame pointer. The caller takes ownership of the returned frame. + // Return NULL on failure. + StackFrameARM64* GetCallerByFramePointer(const vector &frames); + + // Scan the stack for plausible return addresses. The caller takes ownership + // of the returned frame. Return NULL on failure. + StackFrameARM64* GetCallerByStackScan(const vector &frames); + + // Stores the CPU context corresponding to the youngest stack frame, to + // be returned by GetContextFrame. + const MDRawContextARM64* context_; + + // Validity mask for youngest stack frame. This is always + // CONTEXT_VALID_ALL in real use; it is only changeable for the sake of + // unit tests. + uint64_t context_frame_validity_; +}; + + +} // namespace google_breakpad + + +#endif // PROCESSOR_STACKWALKER_ARM64_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_arm64_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_arm64_unittest.cc new file mode 100644 index 0000000000..dd617f691e --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_arm64_unittest.cc @@ -0,0 +1,880 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// stackwalker_arm64_unittest.cc: Unit tests for StackwalkerARM64 class. + +#include +#include +#include + +#include "breakpad_googletest_includes.h" +#include "common/test_assembler.h" +#include "common/using_std_string.h" +#include "google_breakpad/common/minidump_format.h" +#include "google_breakpad/processor/basic_source_line_resolver.h" +#include "google_breakpad/processor/call_stack.h" +#include "google_breakpad/processor/code_module.h" +#include "google_breakpad/processor/source_line_resolver_interface.h" +#include "google_breakpad/processor/stack_frame_cpu.h" +#include "processor/stackwalker_unittest_utils.h" +#include "processor/stackwalker_arm64.h" +#include "processor/windows_frame_info.h" + +using google_breakpad::BasicSourceLineResolver; +using google_breakpad::CallStack; +using google_breakpad::CodeModule; +using google_breakpad::StackFrameSymbolizer; +using google_breakpad::StackFrame; +using google_breakpad::StackFrameARM64; +using google_breakpad::Stackwalker; +using google_breakpad::StackwalkerARM64; +using google_breakpad::SystemInfo; +using google_breakpad::WindowsFrameInfo; +using google_breakpad::test_assembler::kLittleEndian; +using google_breakpad::test_assembler::Label; +using google_breakpad::test_assembler::Section; +using std::vector; +using testing::_; +using testing::AnyNumber; +using testing::Return; +using testing::SetArgumentPointee; +using testing::Test; + +class StackwalkerARM64Fixture { + public: + StackwalkerARM64Fixture() + : stack_section(kLittleEndian), + // Give the two modules reasonable standard locations and names + // for tests to play with. + module1(0x40000000, 0x10000, "module1", "version1"), + module2(0x50000000, 0x10000, "module2", "version2") { + // Identify the system as an iOS system. + system_info.os = "iOS"; + system_info.os_short = "ios"; + system_info.cpu = "arm64"; + system_info.cpu_info = ""; + + // Put distinctive values in the raw CPU context. + BrandContext(&raw_context); + + // Create some modules with some stock debugging information. + modules.Add(&module1); + modules.Add(&module2); + + // By default, none of the modules have symbol info; call + // SetModuleSymbols to override this. + EXPECT_CALL(supplier, GetCStringSymbolData(_, _, _, _, _)) + .WillRepeatedly(Return(MockSymbolSupplier::NOT_FOUND)); + + // Avoid GMOCK WARNING "Uninteresting mock function call - returning + // directly" for FreeSymbolData(). + EXPECT_CALL(supplier, FreeSymbolData(_)).Times(AnyNumber()); + + // Reset max_frames_scanned since it's static. + Stackwalker::set_max_frames_scanned(1024); + } + + // Set the Breakpad symbol information that supplier should return for + // MODULE to INFO. + void SetModuleSymbols(MockCodeModule *module, const string &info) { + size_t buffer_size; + char *buffer = supplier.CopySymbolDataAndOwnTheCopy(info, &buffer_size); + EXPECT_CALL(supplier, GetCStringSymbolData(module, &system_info, _, _, _)) + .WillRepeatedly(DoAll(SetArgumentPointee<3>(buffer), + SetArgumentPointee<4>(buffer_size), + Return(MockSymbolSupplier::FOUND))); + } + + // Populate stack_region with the contents of stack_section. Use + // stack_section.start() as the region's starting address. + void RegionFromSection() { + string contents; + ASSERT_TRUE(stack_section.GetContents(&contents)); + stack_region.Init(stack_section.start().Value(), contents); + } + + // Fill RAW_CONTEXT with pseudo-random data, for round-trip checking. + void BrandContext(MDRawContextARM64 *raw_context) { + uint8_t x = 173; + for (size_t i = 0; i < sizeof(*raw_context); i++) + reinterpret_cast(raw_context)[i] = (x += 17); + } + + SystemInfo system_info; + MDRawContextARM64 raw_context; + Section stack_section; + MockMemoryRegion stack_region; + MockCodeModule module1; + MockCodeModule module2; + MockCodeModules modules; + MockSymbolSupplier supplier; + BasicSourceLineResolver resolver; + CallStack call_stack; + const vector *frames; +}; + +class SanityCheck: public StackwalkerARM64Fixture, public Test { }; + +TEST_F(SanityCheck, NoResolver) { + // Since the context's frame pointer is garbage, the stack walk will end after + // the first frame. + StackFrameSymbolizer frame_symbolizer(NULL, NULL); + StackwalkerARM64 walker(&system_info, &raw_context, &stack_region, &modules, + &frame_symbolizer); + // This should succeed even without a resolver or supplier. + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(0U, modules_without_symbols.size()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(1U, frames->size()); + StackFrameARM64 *frame = static_cast(frames->at(0)); + // Check that the values from the original raw context made it + // through to the context in the stack frame. + EXPECT_EQ(0, memcmp(&raw_context, &frame->context, sizeof(raw_context))); +} + +class GetContextFrame: public StackwalkerARM64Fixture, public Test { }; + +// The stackwalker should be able to produce the context frame even +// without stack memory present. +TEST_F(GetContextFrame, NoStackMemory) { + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerARM64 walker(&system_info, &raw_context, NULL, &modules, + &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(0U, modules_without_symbols.size()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(1U, frames->size()); + StackFrameARM64 *frame = static_cast(frames->at(0)); + // Check that the values from the original raw context made it + // through to the context in the stack frame. + EXPECT_EQ(0, memcmp(&raw_context, &frame->context, sizeof(raw_context))); +} + +class GetCallerFrame: public StackwalkerARM64Fixture, public Test { }; + +TEST_F(GetCallerFrame, ScanWithoutSymbols) { + // When the stack walker resorts to scanning the stack, + // only addresses located within loaded modules are + // considered valid return addresses. + // Force scanning through three frames to ensure that the + // stack pointer is set properly in scan-recovered frames. + stack_section.start() = 0x80000000; + uint64_t return_address1 = 0x50000100; + uint64_t return_address2 = 0x50000900; + Label frame1_sp, frame2_sp; + stack_section + // frame 0 + .Append(16, 0) // space + + .D64(0x40090000) // junk that's not + .D64(0x60000000) // a return address + + .D64(return_address1) // actual return address + // frame 1 + .Mark(&frame1_sp) + .Append(16, 0) // space + + .D64(0xF0000000) // more junk + .D64(0x0000000D) + + .D64(return_address2) // actual return address + // frame 2 + .Mark(&frame2_sp) + .Append(64, 0); // end of stack + RegionFromSection(); + + raw_context.iregs[MD_CONTEXT_ARM64_REG_PC] = 0x40005510; + raw_context.iregs[MD_CONTEXT_ARM64_REG_SP] = stack_section.start().Value(); + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerARM64 walker(&system_info, &raw_context, &stack_region, &modules, + &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(2U, modules_without_symbols.size()); + ASSERT_EQ("module1", modules_without_symbols[0]->debug_file()); + ASSERT_EQ("module2", modules_without_symbols[1]->debug_file()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(3U, frames->size()); + + StackFrameARM64 *frame0 = static_cast(frames->at(0)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CONTEXT, frame0->trust); + ASSERT_EQ(StackFrameARM64::CONTEXT_VALID_ALL, + frame0->context_validity); + EXPECT_EQ(0, memcmp(&raw_context, &frame0->context, sizeof(raw_context))); + + StackFrameARM64 *frame1 = static_cast(frames->at(1)); + EXPECT_EQ(StackFrame::FRAME_TRUST_SCAN, frame1->trust); + ASSERT_EQ((StackFrameARM64::CONTEXT_VALID_PC | + StackFrameARM64::CONTEXT_VALID_SP), + frame1->context_validity); + EXPECT_EQ(return_address1, frame1->context.iregs[MD_CONTEXT_ARM64_REG_PC]); + EXPECT_EQ(frame1_sp.Value(), frame1->context.iregs[MD_CONTEXT_ARM64_REG_SP]); + + StackFrameARM64 *frame2 = static_cast(frames->at(2)); + EXPECT_EQ(StackFrame::FRAME_TRUST_SCAN, frame2->trust); + ASSERT_EQ((StackFrameARM64::CONTEXT_VALID_PC | + StackFrameARM64::CONTEXT_VALID_SP), + frame2->context_validity); + EXPECT_EQ(return_address2, frame2->context.iregs[MD_CONTEXT_ARM64_REG_PC]); + EXPECT_EQ(frame2_sp.Value(), frame2->context.iregs[MD_CONTEXT_ARM64_REG_SP]); +} + +TEST_F(GetCallerFrame, ScanWithFunctionSymbols) { + // During stack scanning, if a potential return address + // is located within a loaded module that has symbols, + // it is only considered a valid return address if it + // lies within a function's bounds. + stack_section.start() = 0x80000000; + uint64_t return_address = 0x50000200; + Label frame1_sp; + + stack_section + // frame 0 + .Append(16, 0) // space + + .D64(0x40090000) // junk that's not + .D64(0x60000000) // a return address + + .D64(0x40001000) // a couple of plausible addresses + .D64(0x5000F000) // that are not within functions + + .D64(return_address) // actual return address + // frame 1 + .Mark(&frame1_sp) + .Append(64, 0); // end of stack + RegionFromSection(); + + raw_context.iregs[MD_CONTEXT_ARM64_REG_PC] = 0x40000200; + raw_context.iregs[MD_CONTEXT_ARM64_REG_SP] = stack_section.start().Value(); + + SetModuleSymbols(&module1, + // The youngest frame's function. + "FUNC 100 400 10 monotreme\n"); + SetModuleSymbols(&module2, + // The calling frame's function. + "FUNC 100 400 10 marsupial\n"); + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerARM64 walker(&system_info, &raw_context, &stack_region, &modules, + &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(0U, modules_without_symbols.size()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(2U, frames->size()); + + StackFrameARM64 *frame0 = static_cast(frames->at(0)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CONTEXT, frame0->trust); + ASSERT_EQ(StackFrameARM64::CONTEXT_VALID_ALL, + frame0->context_validity); + EXPECT_EQ(0, memcmp(&raw_context, &frame0->context, sizeof(raw_context))); + EXPECT_EQ("monotreme", frame0->function_name); + EXPECT_EQ(0x40000100ULL, frame0->function_base); + + StackFrameARM64 *frame1 = static_cast(frames->at(1)); + EXPECT_EQ(StackFrame::FRAME_TRUST_SCAN, frame1->trust); + ASSERT_EQ((StackFrameARM64::CONTEXT_VALID_PC | + StackFrameARM64::CONTEXT_VALID_SP), + frame1->context_validity); + EXPECT_EQ(return_address, frame1->context.iregs[MD_CONTEXT_ARM64_REG_PC]); + EXPECT_EQ(frame1_sp.Value(), frame1->context.iregs[MD_CONTEXT_ARM64_REG_SP]); + EXPECT_EQ("marsupial", frame1->function_name); + EXPECT_EQ(0x50000100ULL, frame1->function_base); +} + +TEST_F(GetCallerFrame, ScanFirstFrame) { + // If the stackwalker resorts to stack scanning, it will scan much + // farther to find the caller of the context frame. + stack_section.start() = 0x80000000; + uint64_t return_address1 = 0x50000100; + uint64_t return_address2 = 0x50000900; + Label frame1_sp, frame2_sp; + stack_section + // frame 0 + .Append(32, 0) // space + + .D64(0x40090000) // junk that's not + .D64(0x60000000) // a return address + + .Append(96, 0) // more space + + .D64(return_address1) // actual return address + // frame 1 + .Mark(&frame1_sp) + .Append(32, 0) // space + + .D64(0xF0000000) // more junk + .D64(0x0000000D) + + .Append(256, 0) // more space + + .D64(return_address2) // actual return address + // (won't be found) + // frame 2 + .Mark(&frame2_sp) + .Append(64, 0); // end of stack + RegionFromSection(); + + raw_context.iregs[MD_CONTEXT_ARM64_REG_PC] = 0x40005510; + raw_context.iregs[MD_CONTEXT_ARM64_REG_SP] = stack_section.start().Value(); + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerARM64 walker(&system_info, &raw_context, &stack_region, &modules, + &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(2U, modules_without_symbols.size()); + ASSERT_EQ("module1", modules_without_symbols[0]->debug_file()); + ASSERT_EQ("module2", modules_without_symbols[1]->debug_file()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(2U, frames->size()); + + StackFrameARM64 *frame0 = static_cast(frames->at(0)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CONTEXT, frame0->trust); + ASSERT_EQ(StackFrameARM64::CONTEXT_VALID_ALL, + frame0->context_validity); + EXPECT_EQ(0, memcmp(&raw_context, &frame0->context, sizeof(raw_context))); + + StackFrameARM64 *frame1 = static_cast(frames->at(1)); + EXPECT_EQ(StackFrame::FRAME_TRUST_SCAN, frame1->trust); + ASSERT_EQ((StackFrameARM64::CONTEXT_VALID_PC | + StackFrameARM64::CONTEXT_VALID_SP), + frame1->context_validity); + EXPECT_EQ(return_address1, frame1->context.iregs[MD_CONTEXT_ARM64_REG_PC]); + EXPECT_EQ(frame1_sp.Value(), frame1->context.iregs[MD_CONTEXT_ARM64_REG_SP]); +} + +// Test that set_max_frames_scanned prevents using stack scanning +// to find caller frames. +TEST_F(GetCallerFrame, ScanningNotAllowed) { + // When the stack walker resorts to scanning the stack, + // only addresses located within loaded modules are + // considered valid return addresses. + stack_section.start() = 0x80000000; + uint64_t return_address1 = 0x50000100; + uint64_t return_address2 = 0x50000900; + Label frame1_sp, frame2_sp; + stack_section + // frame 0 + .Append(16, 0) // space + + .D64(0x40090000) // junk that's not + .D64(0x60000000) // a return address + + .D64(return_address1) // actual return address + // frame 1 + .Mark(&frame1_sp) + .Append(16, 0) // space + + .D64(0xF0000000) // more junk + .D64(0x0000000D) + + .D64(return_address2) // actual return address + // frame 2 + .Mark(&frame2_sp) + .Append(64, 0); // end of stack + RegionFromSection(); + + raw_context.iregs[MD_CONTEXT_ARM64_REG_PC] = 0x40005510; + raw_context.iregs[MD_CONTEXT_ARM64_REG_SP] = stack_section.start().Value(); + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerARM64 walker(&system_info, &raw_context, &stack_region, &modules, + &frame_symbolizer); + Stackwalker::set_max_frames_scanned(0); + + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(1U, modules_without_symbols.size()); + ASSERT_EQ("module1", modules_without_symbols[0]->debug_file()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(1U, frames->size()); + + StackFrameARM64 *frame0 = static_cast(frames->at(0)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CONTEXT, frame0->trust); + ASSERT_EQ(StackFrameARM64::CONTEXT_VALID_ALL, + frame0->context_validity); + EXPECT_EQ(0, memcmp(&raw_context, &frame0->context, sizeof(raw_context))); +} + +class GetFramesByFramePointer: public StackwalkerARM64Fixture, public Test { }; + +TEST_F(GetFramesByFramePointer, OnlyFramePointer) { + stack_section.start() = 0x80000000; + uint64_t return_address1 = 0x50000100; + uint64_t return_address2 = 0x50000900; + Label frame1_sp, frame2_sp; + Label frame1_fp, frame2_fp; + stack_section + // frame 0 + .Append(64, 0) // Whatever values on the stack. + .D64(0x0000000D) // junk that's not + .D64(0xF0000000) // a return address. + + .Mark(&frame1_fp) // Next fp will point to the next value. + .D64(frame2_fp) // Save current frame pointer. + .D64(return_address2) // Save current link register. + .Mark(&frame1_sp) + + // frame 1 + .Append(64, 0) // Whatever values on the stack. + .D64(0x0000000D) // junk that's not + .D64(0xF0000000) // a return address. + + .Mark(&frame2_fp) + .D64(0) + .D64(0) + .Mark(&frame2_sp) + + // frame 2 + .Append(64, 0) // Whatever values on the stack. + .D64(0x0000000D) // junk that's not + .D64(0xF0000000); // a return address. + RegionFromSection(); + + + raw_context.iregs[MD_CONTEXT_ARM64_REG_PC] = 0x40005510; + raw_context.iregs[MD_CONTEXT_ARM64_REG_LR] = return_address1; + raw_context.iregs[MD_CONTEXT_ARM64_REG_FP] = frame1_fp.Value(); + raw_context.iregs[MD_CONTEXT_ARM64_REG_SP] = stack_section.start().Value(); + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerARM64 walker(&system_info, &raw_context, + &stack_region, &modules, &frame_symbolizer); + + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(2U, modules_without_symbols.size()); + ASSERT_EQ("module1", modules_without_symbols[0]->debug_file()); + ASSERT_EQ("module2", modules_without_symbols[1]->debug_file()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(3U, frames->size()); + + StackFrameARM64 *frame0 = static_cast(frames->at(0)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CONTEXT, frame0->trust); + ASSERT_EQ(StackFrameARM64::CONTEXT_VALID_ALL, + frame0->context_validity); + EXPECT_EQ(0, memcmp(&raw_context, &frame0->context, sizeof(raw_context))); + + StackFrameARM64 *frame1 = static_cast(frames->at(1)); + EXPECT_EQ(StackFrame::FRAME_TRUST_FP, frame1->trust); + ASSERT_EQ((StackFrameARM64::CONTEXT_VALID_PC | + StackFrameARM64::CONTEXT_VALID_LR | + StackFrameARM64::CONTEXT_VALID_FP | + StackFrameARM64::CONTEXT_VALID_SP), + frame1->context_validity); + EXPECT_EQ(return_address1, frame1->context.iregs[MD_CONTEXT_ARM64_REG_PC]); + EXPECT_EQ(return_address2, frame1->context.iregs[MD_CONTEXT_ARM64_REG_LR]); + EXPECT_EQ(frame1_sp.Value(), frame1->context.iregs[MD_CONTEXT_ARM64_REG_SP]); + EXPECT_EQ(frame2_fp.Value(), + frame1->context.iregs[MD_CONTEXT_ARM64_REG_FP]); + + StackFrameARM64 *frame2 = static_cast(frames->at(2)); + EXPECT_EQ(StackFrame::FRAME_TRUST_FP, frame2->trust); + ASSERT_EQ((StackFrameARM64::CONTEXT_VALID_PC | + StackFrameARM64::CONTEXT_VALID_LR | + StackFrameARM64::CONTEXT_VALID_FP | + StackFrameARM64::CONTEXT_VALID_SP), + frame2->context_validity); + EXPECT_EQ(return_address2, frame2->context.iregs[MD_CONTEXT_ARM64_REG_PC]); + EXPECT_EQ(0U, frame2->context.iregs[MD_CONTEXT_ARM64_REG_LR]); + EXPECT_EQ(frame2_sp.Value(), frame2->context.iregs[MD_CONTEXT_ARM64_REG_SP]); + EXPECT_EQ(0U, frame2->context.iregs[MD_CONTEXT_ARM64_REG_FP]); +} + +struct CFIFixture: public StackwalkerARM64Fixture { + CFIFixture() { + // Provide a bunch of STACK CFI records; we'll walk to the caller + // from every point in this series, expecting to find the same set + // of register values. + SetModuleSymbols(&module1, + // The youngest frame's function. + "FUNC 4000 1000 10 enchiridion\n" + // Initially, nothing has been pushed on the stack, + // and the return address is still in the link + // register (x30). + "STACK CFI INIT 4000 100 .cfa: sp 0 + .ra: x30\n" + // Push x19, x20, the frame pointer and the link register. + "STACK CFI 4001 .cfa: sp 32 + .ra: .cfa -8 + ^" + " x19: .cfa -32 + ^ x20: .cfa -24 + ^ " + " x29: .cfa -16 + ^\n" + // Save x19..x22 in x0..x3: verify that we populate + // the youngest frame with all the values we have. + "STACK CFI 4002 x19: x0 x20: x1 x21: x2 x22: x3\n" + // Restore x19..x22. Save the non-callee-saves register x1. + "STACK CFI 4003 .cfa: sp 40 + x1: .cfa 40 - ^" + " x19: x19 x20: x20 x21: x21 x22: x22\n" + // Move the .cfa back eight bytes, to point at the return + // address, and restore the sp explicitly. + "STACK CFI 4005 .cfa: sp 32 + x1: .cfa 32 - ^" + " x29: .cfa 8 - ^ .ra: .cfa ^ sp: .cfa 8 +\n" + // Recover the PC explicitly from a new stack slot; + // provide garbage for the .ra. + "STACK CFI 4006 .cfa: sp 40 + pc: .cfa 40 - ^\n" + + // The calling function. + "FUNC 5000 1000 10 epictetus\n" + // Mark it as end of stack. + "STACK CFI INIT 5000 1000 .cfa: 0 .ra: 0\n" + + // A function whose CFI makes the stack pointer + // go backwards. + "FUNC 6000 1000 20 palinal\n" + "STACK CFI INIT 6000 1000 .cfa: sp 8 - .ra: x30\n" + + // A function with CFI expressions that can't be + // evaluated. + "FUNC 7000 1000 20 rhetorical\n" + "STACK CFI INIT 7000 1000 .cfa: moot .ra: ambiguous\n"); + + // Provide some distinctive values for the caller's registers. + expected.iregs[MD_CONTEXT_ARM64_REG_PC] = 0x0000000040005510L; + expected.iregs[MD_CONTEXT_ARM64_REG_SP] = 0x0000000080000000L; + expected.iregs[19] = 0x5e68b5d5b5d55e68L; + expected.iregs[20] = 0x34f3ebd1ebd134f3L; + expected.iregs[21] = 0x74bca31ea31e74bcL; + expected.iregs[22] = 0x16b32dcb2dcb16b3L; + expected.iregs[23] = 0x21372ada2ada2137L; + expected.iregs[24] = 0x557dbbbbbbbb557dL; + expected.iregs[25] = 0x8ca748bf48bf8ca7L; + expected.iregs[26] = 0x21f0ab46ab4621f0L; + expected.iregs[27] = 0x146732b732b71467L; + expected.iregs[28] = 0xa673645fa673645fL; + expected.iregs[MD_CONTEXT_ARM64_REG_FP] = 0xe11081128112e110L; + + // Expect CFI to recover all callee-saves registers. Since CFI is the + // only stack frame construction technique we have, aside from the + // context frame itself, there's no way for us to have a set of valid + // registers smaller than this. + expected_validity = (StackFrameARM64::CONTEXT_VALID_PC | + StackFrameARM64::CONTEXT_VALID_SP | + StackFrameARM64::CONTEXT_VALID_X19 | + StackFrameARM64::CONTEXT_VALID_X20 | + StackFrameARM64::CONTEXT_VALID_X21 | + StackFrameARM64::CONTEXT_VALID_X22 | + StackFrameARM64::CONTEXT_VALID_X23 | + StackFrameARM64::CONTEXT_VALID_X24 | + StackFrameARM64::CONTEXT_VALID_X25 | + StackFrameARM64::CONTEXT_VALID_X26 | + StackFrameARM64::CONTEXT_VALID_X27 | + StackFrameARM64::CONTEXT_VALID_X28 | + StackFrameARM64::CONTEXT_VALID_FP); + + // By default, context frames provide all registers, as normal. + context_frame_validity = StackFrameARM64::CONTEXT_VALID_ALL; + + // By default, registers are unchanged. + raw_context = expected; + } + + // Walk the stack, using stack_section as the contents of the stack + // and raw_context as the current register values. (Set the stack + // pointer to the stack's starting address.) Expect two stack + // frames; in the older frame, expect the callee-saves registers to + // have values matching those in 'expected'. + void CheckWalk() { + RegionFromSection(); + raw_context.iregs[MD_CONTEXT_ARM64_REG_SP] = stack_section.start().Value(); + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerARM64 walker(&system_info, &raw_context, &stack_region, + &modules, &frame_symbolizer); + walker.SetContextFrameValidity(context_frame_validity); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(0U, modules_without_symbols.size()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(2U, frames->size()); + + StackFrameARM64 *frame0 = static_cast(frames->at(0)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CONTEXT, frame0->trust); + ASSERT_EQ(context_frame_validity, frame0->context_validity); + EXPECT_EQ("enchiridion", frame0->function_name); + EXPECT_EQ(0x0000000040004000UL, frame0->function_base); + + StackFrameARM64 *frame1 = static_cast(frames->at(1)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CFI, frame1->trust); + ASSERT_EQ(expected_validity, frame1->context_validity); + if (expected_validity & StackFrameARM64::CONTEXT_VALID_X1) + EXPECT_EQ(expected.iregs[1], frame1->context.iregs[1]); + if (expected_validity & StackFrameARM64::CONTEXT_VALID_X19) + EXPECT_EQ(expected.iregs[19], frame1->context.iregs[19]); + if (expected_validity & StackFrameARM64::CONTEXT_VALID_X20) + EXPECT_EQ(expected.iregs[20], frame1->context.iregs[20]); + if (expected_validity & StackFrameARM64::CONTEXT_VALID_X21) + EXPECT_EQ(expected.iregs[21], frame1->context.iregs[21]); + if (expected_validity & StackFrameARM64::CONTEXT_VALID_X22) + EXPECT_EQ(expected.iregs[22], frame1->context.iregs[22]); + if (expected_validity & StackFrameARM64::CONTEXT_VALID_X23) + EXPECT_EQ(expected.iregs[23], frame1->context.iregs[23]); + if (expected_validity & StackFrameARM64::CONTEXT_VALID_X24) + EXPECT_EQ(expected.iregs[24], frame1->context.iregs[24]); + if (expected_validity & StackFrameARM64::CONTEXT_VALID_X25) + EXPECT_EQ(expected.iregs[25], frame1->context.iregs[25]); + if (expected_validity & StackFrameARM64::CONTEXT_VALID_X26) + EXPECT_EQ(expected.iregs[26], frame1->context.iregs[26]); + if (expected_validity & StackFrameARM64::CONTEXT_VALID_X27) + EXPECT_EQ(expected.iregs[27], frame1->context.iregs[27]); + if (expected_validity & StackFrameARM64::CONTEXT_VALID_X28) + EXPECT_EQ(expected.iregs[28], frame1->context.iregs[28]); + if (expected_validity & StackFrameARM64::CONTEXT_VALID_FP) + EXPECT_EQ(expected.iregs[MD_CONTEXT_ARM64_REG_FP], + frame1->context.iregs[MD_CONTEXT_ARM64_REG_FP]); + + // We would never have gotten a frame in the first place if the SP + // and PC weren't valid or ->instruction weren't set. + EXPECT_EQ(expected.iregs[MD_CONTEXT_ARM64_REG_SP], + frame1->context.iregs[MD_CONTEXT_ARM64_REG_SP]); + EXPECT_EQ(expected.iregs[MD_CONTEXT_ARM64_REG_PC], + frame1->context.iregs[MD_CONTEXT_ARM64_REG_PC]); + EXPECT_EQ(expected.iregs[MD_CONTEXT_ARM64_REG_PC], + frame1->instruction + 4); + EXPECT_EQ("epictetus", frame1->function_name); + } + + // The values we expect to find for the caller's registers. + MDRawContextARM64 expected; + + // The validity mask for expected. + uint64_t expected_validity; + + // The validity mask to impose on the context frame. + uint64_t context_frame_validity; +}; + +class CFI: public CFIFixture, public Test { }; + +TEST_F(CFI, At4000) { + stack_section.start() = expected.iregs[MD_CONTEXT_ARM64_REG_SP]; + raw_context.iregs[MD_CONTEXT_ARM64_REG_PC] = 0x0000000040004000L; + raw_context.iregs[MD_CONTEXT_ARM64_REG_LR] = 0x0000000040005510L; + CheckWalk(); +} + +TEST_F(CFI, At4001) { + Label frame1_sp = expected.iregs[MD_CONTEXT_ARM64_REG_SP]; + stack_section + .D64(0x5e68b5d5b5d55e68L) // saved x19 + .D64(0x34f3ebd1ebd134f3L) // saved x20 + .D64(0xe11081128112e110L) // saved fp + .D64(0x0000000040005510L) // return address + .Mark(&frame1_sp); // This effectively sets stack_section.start(). + raw_context.iregs[MD_CONTEXT_ARM64_REG_PC] = 0x0000000040004001L; + // distinct callee x19, x20 and fp + raw_context.iregs[19] = 0xadc9f635a635adc9L; + raw_context.iregs[20] = 0x623135ac35ac6231L; + raw_context.iregs[MD_CONTEXT_ARM64_REG_FP] = 0x5fc4be14be145fc4L; + CheckWalk(); +} + +// As above, but unwind from a context that has only the PC and SP. +TEST_F(CFI, At4001LimitedValidity) { + Label frame1_sp = expected.iregs[MD_CONTEXT_ARM64_REG_SP]; + stack_section + .D64(0x5e68b5d5b5d55e68L) // saved x19 + .D64(0x34f3ebd1ebd134f3L) // saved x20 + .D64(0xe11081128112e110L) // saved fp + .D64(0x0000000040005510L) // return address + .Mark(&frame1_sp); // This effectively sets stack_section.start(). + context_frame_validity = + StackFrameARM64::CONTEXT_VALID_PC | StackFrameARM64::CONTEXT_VALID_SP; + raw_context.iregs[MD_CONTEXT_ARM64_REG_PC] = 0x0000000040004001L; + raw_context.iregs[MD_CONTEXT_ARM64_REG_FP] = 0x5fc4be14be145fc4L; + + expected_validity = (StackFrameARM64::CONTEXT_VALID_PC + | StackFrameARM64::CONTEXT_VALID_SP + | StackFrameARM64::CONTEXT_VALID_FP + | StackFrameARM64::CONTEXT_VALID_X19 + | StackFrameARM64::CONTEXT_VALID_X20); + CheckWalk(); +} + +TEST_F(CFI, At4002) { + Label frame1_sp = expected.iregs[MD_CONTEXT_ARM64_REG_SP]; + stack_section + .D64(0xff3dfb81fb81ff3dL) // no longer saved x19 + .D64(0x34f3ebd1ebd134f3L) // no longer saved x20 + .D64(0xe11081128112e110L) // saved fp + .D64(0x0000000040005510L) // return address + .Mark(&frame1_sp); // This effectively sets stack_section.start(). + raw_context.iregs[MD_CONTEXT_ARM64_REG_PC] = 0x0000000040004002L; + raw_context.iregs[0] = 0x5e68b5d5b5d55e68L; // saved x19 + raw_context.iregs[1] = 0x34f3ebd1ebd134f3L; // saved x20 + raw_context.iregs[2] = 0x74bca31ea31e74bcL; // saved x21 + raw_context.iregs[3] = 0x16b32dcb2dcb16b3L; // saved x22 + raw_context.iregs[19] = 0xadc9f635a635adc9L; // distinct callee x19 + raw_context.iregs[20] = 0x623135ac35ac6231L; // distinct callee x20 + raw_context.iregs[21] = 0xac4543564356ac45L; // distinct callee x21 + raw_context.iregs[22] = 0x2561562f562f2561L; // distinct callee x22 + // distinct callee fp + raw_context.iregs[MD_CONTEXT_ARM64_REG_FP] = 0x5fc4be14be145fc4L; + CheckWalk(); +} + +TEST_F(CFI, At4003) { + Label frame1_sp = expected.iregs[MD_CONTEXT_ARM64_REG_SP]; + stack_section + .D64(0xdd5a48c848c8dd5aL) // saved x1 (even though it's not callee-saves) + .D64(0xff3dfb81fb81ff3dL) // no longer saved x19 + .D64(0x34f3ebd1ebd134f3L) // no longer saved x20 + .D64(0xe11081128112e110L) // saved fp + .D64(0x0000000040005510L) // return address + .Mark(&frame1_sp); // This effectively sets stack_section.start(). + raw_context.iregs[MD_CONTEXT_ARM64_REG_PC] = 0x0000000040004003L; + // distinct callee x1 and fp + raw_context.iregs[1] = 0xfb756319fb756319L; + raw_context.iregs[MD_CONTEXT_ARM64_REG_FP] = 0x5fc4be14be145fc4L; + // caller's x1 + expected.iregs[1] = 0xdd5a48c848c8dd5aL; + expected_validity |= StackFrameARM64::CONTEXT_VALID_X1; + CheckWalk(); +} + +// We have no new rule at module offset 0x4004, so the results here should +// be the same as those at module offset 0x4003. +TEST_F(CFI, At4004) { + Label frame1_sp = expected.iregs[MD_CONTEXT_ARM64_REG_SP]; + stack_section + .D64(0xdd5a48c848c8dd5aL) // saved x1 (even though it's not callee-saves) + .D64(0xff3dfb81fb81ff3dL) // no longer saved x19 + .D64(0x34f3ebd1ebd134f3L) // no longer saved x20 + .D64(0xe11081128112e110L) // saved fp + .D64(0x0000000040005510L) // return address + .Mark(&frame1_sp); // This effectively sets stack_section.start(). + raw_context.iregs[MD_CONTEXT_ARM64_REG_PC] = 0x0000000040004004L; + // distinct callee x1 and fp + raw_context.iregs[1] = 0xfb756319fb756319L; + raw_context.iregs[MD_CONTEXT_ARM64_REG_FP] = 0x5fc4be14be145fc4L; + // caller's x1 + expected.iregs[1] = 0xdd5a48c848c8dd5aL; + expected_validity |= StackFrameARM64::CONTEXT_VALID_X1; + CheckWalk(); +} + +// Here we move the .cfa, but provide an explicit rule to recover the SP, +// so again there should be no change in the registers recovered. +TEST_F(CFI, At4005) { + Label frame1_sp = expected.iregs[MD_CONTEXT_ARM64_REG_SP]; + stack_section + .D64(0xdd5a48c848c8dd5aL) // saved x1 (even though it's not callee-saves) + .D64(0xff3dfb81fb81ff3dL) // no longer saved x19 + .D64(0x34f3ebd1ebd134f3L) // no longer saved x20 + .D64(0xe11081128112e110L) // saved fp + .D64(0x0000000040005510L) // return address + .Mark(&frame1_sp); // This effectively sets stack_section.start(). + raw_context.iregs[MD_CONTEXT_ARM64_REG_PC] = 0x0000000040004005L; + raw_context.iregs[1] = 0xfb756319fb756319L; // distinct callee x1 + expected.iregs[1] = 0xdd5a48c848c8dd5aL; // caller's x1 + expected_validity |= StackFrameARM64::CONTEXT_VALID_X1; + CheckWalk(); +} + +// Here we provide an explicit rule for the PC, and have the saved .ra be +// bogus. +TEST_F(CFI, At4006) { + Label frame1_sp = expected.iregs[MD_CONTEXT_ARM64_REG_SP]; + stack_section + .D64(0x0000000040005510L) // saved pc + .D64(0xdd5a48c848c8dd5aL) // saved x1 (even though it's not callee-saves) + .D64(0xff3dfb81fb81ff3dL) // no longer saved x19 + .D64(0x34f3ebd1ebd134f3L) // no longer saved x20 + .D64(0xe11081128112e110L) // saved fp + .D64(0xf8d157835783f8d1L) // .ra rule recovers this, which is garbage + .Mark(&frame1_sp); // This effectively sets stack_section.start(). + raw_context.iregs[MD_CONTEXT_ARM64_REG_PC] = 0x0000000040004006L; + raw_context.iregs[1] = 0xfb756319fb756319L; // distinct callee x1 + expected.iregs[1] = 0xdd5a48c848c8dd5aL; // caller's x1 + expected_validity |= StackFrameARM64::CONTEXT_VALID_X1; + CheckWalk(); +} + +// Check that we reject rules that would cause the stack pointer to +// move in the wrong direction. +TEST_F(CFI, RejectBackwards) { + raw_context.iregs[MD_CONTEXT_ARM64_REG_PC] = 0x0000000040006000L; + raw_context.iregs[MD_CONTEXT_ARM64_REG_SP] = 0x0000000080000000L; + raw_context.iregs[MD_CONTEXT_ARM64_REG_LR] = 0x0000000040005510L; + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerARM64 walker(&system_info, &raw_context, &stack_region, &modules, + &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(0U, modules_without_symbols.size()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(1U, frames->size()); +} + +// Check that we reject rules whose expressions' evaluation fails. +TEST_F(CFI, RejectBadExpressions) { + raw_context.iregs[MD_CONTEXT_ARM64_REG_PC] = 0x0000000040007000L; + raw_context.iregs[MD_CONTEXT_ARM64_REG_SP] = 0x0000000080000000L; + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerARM64 walker(&system_info, &raw_context, &stack_region, &modules, + &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(0U, modules_without_symbols.size()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(1U, frames->size()); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_arm_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_arm_unittest.cc new file mode 100644 index 0000000000..c73322e6b6 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_arm_unittest.cc @@ -0,0 +1,974 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// stackwalker_arm_unittest.cc: Unit tests for StackwalkerARM class. + +#include +#include +#include + +#include "breakpad_googletest_includes.h" +#include "common/test_assembler.h" +#include "common/using_std_string.h" +#include "google_breakpad/common/minidump_format.h" +#include "google_breakpad/processor/basic_source_line_resolver.h" +#include "google_breakpad/processor/call_stack.h" +#include "google_breakpad/processor/code_module.h" +#include "google_breakpad/processor/source_line_resolver_interface.h" +#include "google_breakpad/processor/stack_frame_cpu.h" +#include "processor/stackwalker_unittest_utils.h" +#include "processor/stackwalker_arm.h" +#include "processor/windows_frame_info.h" + +using google_breakpad::BasicSourceLineResolver; +using google_breakpad::CallStack; +using google_breakpad::CodeModule; +using google_breakpad::StackFrameSymbolizer; +using google_breakpad::StackFrame; +using google_breakpad::StackFrameARM; +using google_breakpad::Stackwalker; +using google_breakpad::StackwalkerARM; +using google_breakpad::SystemInfo; +using google_breakpad::WindowsFrameInfo; +using google_breakpad::test_assembler::kLittleEndian; +using google_breakpad::test_assembler::Label; +using google_breakpad::test_assembler::Section; +using std::vector; +using testing::_; +using testing::AnyNumber; +using testing::Return; +using testing::SetArgumentPointee; +using testing::Test; + +class StackwalkerARMFixture { + public: + StackwalkerARMFixture() + : stack_section(kLittleEndian), + // Give the two modules reasonable standard locations and names + // for tests to play with. + module1(0x40000000, 0x10000, "module1", "version1"), + module2(0x50000000, 0x10000, "module2", "version2") { + // Identify the system as a Linux system. + system_info.os = "Linux"; + system_info.os_short = "linux"; + system_info.os_version = "Lugubrious Labrador"; + system_info.cpu = "arm"; + system_info.cpu_info = ""; + + // Put distinctive values in the raw CPU context. + BrandContext(&raw_context); + + // Create some modules with some stock debugging information. + modules.Add(&module1); + modules.Add(&module2); + + // By default, none of the modules have symbol info; call + // SetModuleSymbols to override this. + EXPECT_CALL(supplier, GetCStringSymbolData(_, _, _, _, _)) + .WillRepeatedly(Return(MockSymbolSupplier::NOT_FOUND)); + + // Avoid GMOCK WARNING "Uninteresting mock function call - returning + // directly" for FreeSymbolData(). + EXPECT_CALL(supplier, FreeSymbolData(_)).Times(AnyNumber()); + + // Reset max_frames_scanned since it's static. + Stackwalker::set_max_frames_scanned(1024); + } + + // Set the Breakpad symbol information that supplier should return for + // MODULE to INFO. + void SetModuleSymbols(MockCodeModule *module, const string &info) { + size_t buffer_size; + char *buffer = supplier.CopySymbolDataAndOwnTheCopy(info, &buffer_size); + EXPECT_CALL(supplier, GetCStringSymbolData(module, &system_info, _, _, _)) + .WillRepeatedly(DoAll(SetArgumentPointee<3>(buffer), + SetArgumentPointee<4>(buffer_size), + Return(MockSymbolSupplier::FOUND))); + } + + // Populate stack_region with the contents of stack_section. Use + // stack_section.start() as the region's starting address. + void RegionFromSection() { + string contents; + ASSERT_TRUE(stack_section.GetContents(&contents)); + stack_region.Init(stack_section.start().Value(), contents); + } + + // Fill RAW_CONTEXT with pseudo-random data, for round-trip checking. + void BrandContext(MDRawContextARM *raw_context) { + uint8_t x = 173; + for (size_t i = 0; i < sizeof(*raw_context); i++) + reinterpret_cast(raw_context)[i] = (x += 17); + } + + SystemInfo system_info; + MDRawContextARM raw_context; + Section stack_section; + MockMemoryRegion stack_region; + MockCodeModule module1; + MockCodeModule module2; + MockCodeModules modules; + MockSymbolSupplier supplier; + BasicSourceLineResolver resolver; + CallStack call_stack; + const vector *frames; +}; + +class SanityCheck: public StackwalkerARMFixture, public Test { }; + +TEST_F(SanityCheck, NoResolver) { + // Since we have no call frame information, and all unwinding + // requires call frame information, the stack walk will end after + // the first frame. + StackFrameSymbolizer frame_symbolizer(NULL, NULL); + StackwalkerARM walker(&system_info, &raw_context, -1, &stack_region, &modules, + &frame_symbolizer); + // This should succeed even without a resolver or supplier. + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(0U, modules_without_symbols.size()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(1U, frames->size()); + StackFrameARM *frame = static_cast(frames->at(0)); + // Check that the values from the original raw context made it + // through to the context in the stack frame. + EXPECT_EQ(0, memcmp(&raw_context, &frame->context, sizeof(raw_context))); +} + +class GetContextFrame: public StackwalkerARMFixture, public Test { }; + +TEST_F(GetContextFrame, Simple) { + // Since we have no call frame information, and all unwinding + // requires call frame information, the stack walk will end after + // the first frame. + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerARM walker(&system_info, &raw_context, -1, &stack_region, &modules, + &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(0U, modules_without_symbols.size()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(1U, frames->size()); + StackFrameARM *frame = static_cast(frames->at(0)); + // Check that the values from the original raw context made it + // through to the context in the stack frame. + EXPECT_EQ(0, memcmp(&raw_context, &frame->context, sizeof(raw_context))); +} + +// The stackwalker should be able to produce the context frame even +// without stack memory present. +TEST_F(GetContextFrame, NoStackMemory) { + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerARM walker(&system_info, &raw_context, -1, NULL, &modules, + &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(0U, modules_without_symbols.size()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(1U, frames->size()); + StackFrameARM *frame = static_cast(frames->at(0)); + // Check that the values from the original raw context made it + // through to the context in the stack frame. + EXPECT_EQ(0, memcmp(&raw_context, &frame->context, sizeof(raw_context))); +} + +class GetCallerFrame: public StackwalkerARMFixture, public Test { }; + +TEST_F(GetCallerFrame, ScanWithoutSymbols) { + // When the stack walker resorts to scanning the stack, + // only addresses located within loaded modules are + // considered valid return addresses. + // Force scanning through three frames to ensure that the + // stack pointer is set properly in scan-recovered frames. + stack_section.start() = 0x80000000; + uint32_t return_address1 = 0x50000100; + uint32_t return_address2 = 0x50000900; + Label frame1_sp, frame2_sp; + stack_section + // frame 0 + .Append(16, 0) // space + + .D32(0x40090000) // junk that's not + .D32(0x60000000) // a return address + + .D32(return_address1) // actual return address + // frame 1 + .Mark(&frame1_sp) + .Append(16, 0) // space + + .D32(0xF0000000) // more junk + .D32(0x0000000D) + + .D32(return_address2) // actual return address + // frame 2 + .Mark(&frame2_sp) + .Append(32, 0); // end of stack + RegionFromSection(); + + raw_context.iregs[MD_CONTEXT_ARM_REG_PC] = 0x40005510; + raw_context.iregs[MD_CONTEXT_ARM_REG_SP] = stack_section.start().Value(); + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerARM walker(&system_info, &raw_context, -1, &stack_region, &modules, + &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(2U, modules_without_symbols.size()); + ASSERT_EQ("module1", modules_without_symbols[0]->debug_file()); + ASSERT_EQ("module2", modules_without_symbols[1]->debug_file()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(3U, frames->size()); + + StackFrameARM *frame0 = static_cast(frames->at(0)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CONTEXT, frame0->trust); + ASSERT_EQ(StackFrameARM::CONTEXT_VALID_ALL, frame0->context_validity); + EXPECT_EQ(0, memcmp(&raw_context, &frame0->context, sizeof(raw_context))); + + StackFrameARM *frame1 = static_cast(frames->at(1)); + EXPECT_EQ(StackFrame::FRAME_TRUST_SCAN, frame1->trust); + ASSERT_EQ((StackFrameARM::CONTEXT_VALID_PC | + StackFrameARM::CONTEXT_VALID_SP), + frame1->context_validity); + EXPECT_EQ(return_address1, frame1->context.iregs[MD_CONTEXT_ARM_REG_PC]); + EXPECT_EQ(frame1_sp.Value(), frame1->context.iregs[MD_CONTEXT_ARM_REG_SP]); + + StackFrameARM *frame2 = static_cast(frames->at(2)); + EXPECT_EQ(StackFrame::FRAME_TRUST_SCAN, frame2->trust); + ASSERT_EQ((StackFrameARM::CONTEXT_VALID_PC | + StackFrameARM::CONTEXT_VALID_SP), + frame2->context_validity); + EXPECT_EQ(return_address2, frame2->context.iregs[MD_CONTEXT_ARM_REG_PC]); + EXPECT_EQ(frame2_sp.Value(), frame2->context.iregs[MD_CONTEXT_ARM_REG_SP]); +} + +TEST_F(GetCallerFrame, ScanWithFunctionSymbols) { + // During stack scanning, if a potential return address + // is located within a loaded module that has symbols, + // it is only considered a valid return address if it + // lies within a function's bounds. + stack_section.start() = 0x80000000; + uint32_t return_address = 0x50000200; + Label frame1_sp; + + stack_section + // frame 0 + .Append(16, 0) // space + + .D32(0x40090000) // junk that's not + .D32(0x60000000) // a return address + + .D32(0x40001000) // a couple of plausible addresses + .D32(0x5000F000) // that are not within functions + + .D32(return_address) // actual return address + // frame 1 + .Mark(&frame1_sp) + .Append(32, 0); // end of stack + RegionFromSection(); + + raw_context.iregs[MD_CONTEXT_ARM_REG_PC] = 0x40000200; + raw_context.iregs[MD_CONTEXT_ARM_REG_SP] = stack_section.start().Value(); + + SetModuleSymbols(&module1, + // The youngest frame's function. + "FUNC 100 400 10 monotreme\n"); + SetModuleSymbols(&module2, + // The calling frame's function. + "FUNC 100 400 10 marsupial\n"); + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerARM walker(&system_info, &raw_context, -1, &stack_region, &modules, + &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(0U, modules_without_symbols.size()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(2U, frames->size()); + + StackFrameARM *frame0 = static_cast(frames->at(0)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CONTEXT, frame0->trust); + ASSERT_EQ(StackFrameARM::CONTEXT_VALID_ALL, frame0->context_validity); + EXPECT_EQ(0, memcmp(&raw_context, &frame0->context, sizeof(raw_context))); + EXPECT_EQ("monotreme", frame0->function_name); + EXPECT_EQ(0x40000100U, frame0->function_base); + + StackFrameARM *frame1 = static_cast(frames->at(1)); + EXPECT_EQ(StackFrame::FRAME_TRUST_SCAN, frame1->trust); + ASSERT_EQ((StackFrameARM::CONTEXT_VALID_PC | + StackFrameARM::CONTEXT_VALID_SP), + frame1->context_validity); + EXPECT_EQ(return_address, frame1->context.iregs[MD_CONTEXT_ARM_REG_PC]); + EXPECT_EQ(frame1_sp.Value(), frame1->context.iregs[MD_CONTEXT_ARM_REG_SP]); + EXPECT_EQ("marsupial", frame1->function_name); + EXPECT_EQ(0x50000100U, frame1->function_base); +} + +TEST_F(GetCallerFrame, ScanFirstFrame) { + // If the stackwalker resorts to stack scanning, it will scan much + // farther to find the caller of the context frame. + stack_section.start() = 0x80000000; + uint32_t return_address1 = 0x50000100; + uint32_t return_address2 = 0x50000900; + Label frame1_sp, frame2_sp; + stack_section + // frame 0 + .Append(32, 0) // space + + .D32(0x40090000) // junk that's not + .D32(0x60000000) // a return address + + .Append(96, 0) // more space + + .D32(return_address1) // actual return address + // frame 1 + .Mark(&frame1_sp) + .Append(32, 0) // space + + .D32(0xF0000000) // more junk + .D32(0x0000000D) + + .Append(96, 0) // more space + + .D32(return_address2) // actual return address + // (won't be found) + // frame 2 + .Mark(&frame2_sp) + .Append(32, 0); // end of stack + RegionFromSection(); + + raw_context.iregs[MD_CONTEXT_ARM_REG_PC] = 0x40005510; + raw_context.iregs[MD_CONTEXT_ARM_REG_SP] = stack_section.start().Value(); + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerARM walker(&system_info, &raw_context, -1, &stack_region, &modules, + &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(2U, modules_without_symbols.size()); + ASSERT_EQ("module1", modules_without_symbols[0]->debug_file()); + ASSERT_EQ("module2", modules_without_symbols[1]->debug_file()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(2U, frames->size()); + + StackFrameARM *frame0 = static_cast(frames->at(0)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CONTEXT, frame0->trust); + ASSERT_EQ(StackFrameARM::CONTEXT_VALID_ALL, frame0->context_validity); + EXPECT_EQ(0, memcmp(&raw_context, &frame0->context, sizeof(raw_context))); + + StackFrameARM *frame1 = static_cast(frames->at(1)); + EXPECT_EQ(StackFrame::FRAME_TRUST_SCAN, frame1->trust); + ASSERT_EQ((StackFrameARM::CONTEXT_VALID_PC | + StackFrameARM::CONTEXT_VALID_SP), + frame1->context_validity); + EXPECT_EQ(return_address1, frame1->context.iregs[MD_CONTEXT_ARM_REG_PC]); + EXPECT_EQ(frame1_sp.Value(), frame1->context.iregs[MD_CONTEXT_ARM_REG_SP]); +} + +// Test that set_max_frames_scanned prevents using stack scanning +// to find caller frames. +TEST_F(GetCallerFrame, ScanningNotAllowed) { + // When the stack walker resorts to scanning the stack, + // only addresses located within loaded modules are + // considered valid return addresses. + stack_section.start() = 0x80000000; + uint32_t return_address1 = 0x50000100; + uint32_t return_address2 = 0x50000900; + Label frame1_sp, frame2_sp; + stack_section + // frame 0 + .Append(16, 0) // space + + .D32(0x40090000) // junk that's not + .D32(0x60000000) // a return address + + .D32(return_address1) // actual return address + // frame 1 + .Mark(&frame1_sp) + .Append(16, 0) // space + + .D32(0xF0000000) // more junk + .D32(0x0000000D) + + .D32(return_address2) // actual return address + // frame 2 + .Mark(&frame2_sp) + .Append(32, 0); // end of stack + RegionFromSection(); + + raw_context.iregs[MD_CONTEXT_ARM_REG_PC] = 0x40005510; + raw_context.iregs[MD_CONTEXT_ARM_REG_SP] = stack_section.start().Value(); + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerARM walker(&system_info, &raw_context, -1, &stack_region, &modules, + &frame_symbolizer); + Stackwalker::set_max_frames_scanned(0); + + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(1U, modules_without_symbols.size()); + ASSERT_EQ("module1", modules_without_symbols[0]->debug_file()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(1U, frames->size()); + + StackFrameARM *frame0 = static_cast(frames->at(0)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CONTEXT, frame0->trust); + ASSERT_EQ(StackFrameARM::CONTEXT_VALID_ALL, frame0->context_validity); + EXPECT_EQ(0, memcmp(&raw_context, &frame0->context, sizeof(raw_context))); +} + +struct CFIFixture: public StackwalkerARMFixture { + CFIFixture() { + // Provide a bunch of STACK CFI records; we'll walk to the caller + // from every point in this series, expecting to find the same set + // of register values. + SetModuleSymbols(&module1, + // The youngest frame's function. + "FUNC 4000 1000 10 enchiridion\n" + // Initially, nothing has been pushed on the stack, + // and the return address is still in the link register. + "STACK CFI INIT 4000 100 .cfa: sp .ra: lr\n" + // Push r4, the frame pointer, and the link register. + "STACK CFI 4001 .cfa: sp 12 + r4: .cfa 12 - ^" + " r11: .cfa 8 - ^ .ra: .cfa 4 - ^\n" + // Save r4..r7 in r0..r3: verify that we populate + // the youngest frame with all the values we have. + "STACK CFI 4002 r4: r0 r5: r1 r6: r2 r7: r3\n" + // Restore r4..r7. Save the non-callee-saves register r1. + "STACK CFI 4003 .cfa: sp 16 + r1: .cfa 16 - ^" + " r4: r4 r5: r5 r6: r6 r7: r7\n" + // Move the .cfa back four bytes, to point at the return + // address, and restore the sp explicitly. + "STACK CFI 4005 .cfa: sp 12 + r1: .cfa 12 - ^" + " r11: .cfa 4 - ^ .ra: .cfa ^ sp: .cfa 4 +\n" + // Recover the PC explicitly from a new stack slot; + // provide garbage for the .ra. + "STACK CFI 4006 .cfa: sp 16 + pc: .cfa 16 - ^\n" + + // The calling function. + "FUNC 5000 1000 10 epictetus\n" + // Mark it as end of stack. + "STACK CFI INIT 5000 1000 .cfa: 0 .ra: 0\n" + + // A function whose CFI makes the stack pointer + // go backwards. + "FUNC 6000 1000 20 palinal\n" + "STACK CFI INIT 6000 1000 .cfa: sp 4 - .ra: lr\n" + + // A function with CFI expressions that can't be + // evaluated. + "FUNC 7000 1000 20 rhetorical\n" + "STACK CFI INIT 7000 1000 .cfa: moot .ra: ambiguous\n"); + + // Provide some distinctive values for the caller's registers. + expected.iregs[MD_CONTEXT_ARM_REG_PC] = 0x40005510; + expected.iregs[MD_CONTEXT_ARM_REG_SP] = 0x80000000; + expected.iregs[4] = 0xb5d55e68; + expected.iregs[5] = 0xebd134f3; + expected.iregs[6] = 0xa31e74bc; + expected.iregs[7] = 0x2dcb16b3; + expected.iregs[8] = 0x2ada2137; + expected.iregs[9] = 0xbbbb557d; + expected.iregs[10] = 0x48bf8ca7; + expected.iregs[MD_CONTEXT_ARM_REG_FP] = 0x8112e110; + + // Expect CFI to recover all callee-saves registers. Since CFI is the + // only stack frame construction technique we have, aside from the + // context frame itself, there's no way for us to have a set of valid + // registers smaller than this. + expected_validity = (StackFrameARM::CONTEXT_VALID_PC | + StackFrameARM::CONTEXT_VALID_SP | + StackFrameARM::CONTEXT_VALID_R4 | + StackFrameARM::CONTEXT_VALID_R5 | + StackFrameARM::CONTEXT_VALID_R6 | + StackFrameARM::CONTEXT_VALID_R7 | + StackFrameARM::CONTEXT_VALID_R8 | + StackFrameARM::CONTEXT_VALID_R9 | + StackFrameARM::CONTEXT_VALID_R10 | + StackFrameARM::CONTEXT_VALID_FP); + + // By default, context frames provide all registers, as normal. + context_frame_validity = StackFrameARM::CONTEXT_VALID_ALL; + + // By default, registers are unchanged. + raw_context = expected; + } + + // Walk the stack, using stack_section as the contents of the stack + // and raw_context as the current register values. (Set the stack + // pointer to the stack's starting address.) Expect two stack + // frames; in the older frame, expect the callee-saves registers to + // have values matching those in 'expected'. + void CheckWalk() { + RegionFromSection(); + raw_context.iregs[MD_CONTEXT_ARM_REG_SP] = stack_section.start().Value(); + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerARM walker(&system_info, &raw_context, -1, &stack_region, + &modules, &frame_symbolizer); + walker.SetContextFrameValidity(context_frame_validity); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(0U, modules_without_symbols.size()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(2U, frames->size()); + + StackFrameARM *frame0 = static_cast(frames->at(0)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CONTEXT, frame0->trust); + ASSERT_EQ(context_frame_validity, frame0->context_validity); + EXPECT_EQ("enchiridion", frame0->function_name); + EXPECT_EQ(0x40004000U, frame0->function_base); + + StackFrameARM *frame1 = static_cast(frames->at(1)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CFI, frame1->trust); + ASSERT_EQ(expected_validity, frame1->context_validity); + if (expected_validity & StackFrameARM::CONTEXT_VALID_R1) + EXPECT_EQ(expected.iregs[1], frame1->context.iregs[1]); + if (expected_validity & StackFrameARM::CONTEXT_VALID_R4) + EXPECT_EQ(expected.iregs[4], frame1->context.iregs[4]); + if (expected_validity & StackFrameARM::CONTEXT_VALID_R5) + EXPECT_EQ(expected.iregs[5], frame1->context.iregs[5]); + if (expected_validity & StackFrameARM::CONTEXT_VALID_R6) + EXPECT_EQ(expected.iregs[6], frame1->context.iregs[6]); + if (expected_validity & StackFrameARM::CONTEXT_VALID_R7) + EXPECT_EQ(expected.iregs[7], frame1->context.iregs[7]); + if (expected_validity & StackFrameARM::CONTEXT_VALID_R8) + EXPECT_EQ(expected.iregs[8], frame1->context.iregs[8]); + if (expected_validity & StackFrameARM::CONTEXT_VALID_R9) + EXPECT_EQ(expected.iregs[9], frame1->context.iregs[9]); + if (expected_validity & StackFrameARM::CONTEXT_VALID_R10) + EXPECT_EQ(expected.iregs[10], frame1->context.iregs[10]); + if (expected_validity & StackFrameARM::CONTEXT_VALID_FP) + EXPECT_EQ(expected.iregs[MD_CONTEXT_ARM_REG_FP], + frame1->context.iregs[MD_CONTEXT_ARM_REG_FP]); + + // We would never have gotten a frame in the first place if the SP + // and PC weren't valid or ->instruction weren't set. + EXPECT_EQ(expected.iregs[MD_CONTEXT_ARM_REG_SP], + frame1->context.iregs[MD_CONTEXT_ARM_REG_SP]); + EXPECT_EQ(expected.iregs[MD_CONTEXT_ARM_REG_PC], + frame1->context.iregs[MD_CONTEXT_ARM_REG_PC]); + EXPECT_EQ(expected.iregs[MD_CONTEXT_ARM_REG_PC], + frame1->instruction + 2); + EXPECT_EQ("epictetus", frame1->function_name); + } + + // The values we expect to find for the caller's registers. + MDRawContextARM expected; + + // The validity mask for expected. + int expected_validity; + + // The validity mask to impose on the context frame. + int context_frame_validity; +}; + +class CFI: public CFIFixture, public Test { }; + +TEST_F(CFI, At4000) { + stack_section.start() = expected.iregs[MD_CONTEXT_ARM_REG_SP]; + raw_context.iregs[MD_CONTEXT_ARM_REG_PC] = 0x40004000; + raw_context.iregs[MD_CONTEXT_ARM_REG_LR] = 0x40005510; + CheckWalk(); +} + +TEST_F(CFI, At4001) { + Label frame1_sp = expected.iregs[MD_CONTEXT_ARM_REG_SP]; + stack_section + .D32(0xb5d55e68) // saved r4 + .D32(0x8112e110) // saved fp + .D32(0x40005510) // return address + .Mark(&frame1_sp); // This effectively sets stack_section.start(). + raw_context.iregs[MD_CONTEXT_ARM_REG_PC] = 0x40004001; + raw_context.iregs[4] = 0x635adc9f; // distinct callee r4 + raw_context.iregs[MD_CONTEXT_ARM_REG_FP] = 0xbe145fc4; // distinct callee fp + CheckWalk(); +} + +// As above, but unwind from a context that has only the PC and SP. +TEST_F(CFI, At4001LimitedValidity) { + context_frame_validity = + StackFrameARM::CONTEXT_VALID_PC | StackFrameARM::CONTEXT_VALID_SP; + raw_context.iregs[MD_CONTEXT_ARM_REG_PC] = 0x40004001; + raw_context.iregs[MD_CONTEXT_ARM_REG_FP] = 0xbe145fc4; // distinct callee fp + Label frame1_sp = expected.iregs[MD_CONTEXT_ARM_REG_SP]; + stack_section + .D32(0xb5d55e68) // saved r4 + .D32(0x8112e110) // saved fp + .D32(0x40005510) // return address + .Mark(&frame1_sp); // This effectively sets stack_section.start(). + expected_validity = (StackFrameARM::CONTEXT_VALID_PC + | StackFrameARM::CONTEXT_VALID_SP + | StackFrameARM::CONTEXT_VALID_FP + | StackFrameARM::CONTEXT_VALID_R4); + CheckWalk(); +} + +TEST_F(CFI, At4002) { + Label frame1_sp = expected.iregs[MD_CONTEXT_ARM_REG_SP]; + stack_section + .D32(0xfb81ff3d) // no longer saved r4 + .D32(0x8112e110) // saved fp + .D32(0x40005510) // return address + .Mark(&frame1_sp); // This effectively sets stack_section.start(). + raw_context.iregs[MD_CONTEXT_ARM_REG_PC] = 0x40004002; + raw_context.iregs[0] = 0xb5d55e68; // saved r4 + raw_context.iregs[1] = 0xebd134f3; // saved r5 + raw_context.iregs[2] = 0xa31e74bc; // saved r6 + raw_context.iregs[3] = 0x2dcb16b3; // saved r7 + raw_context.iregs[4] = 0xfdd35466; // distinct callee r4 + raw_context.iregs[5] = 0xf18c946c; // distinct callee r5 + raw_context.iregs[6] = 0xac2079e8; // distinct callee r6 + raw_context.iregs[7] = 0xa449829f; // distinct callee r7 + raw_context.iregs[MD_CONTEXT_ARM_REG_FP] = 0xbe145fc4; // distinct callee fp + CheckWalk(); +} + +TEST_F(CFI, At4003) { + Label frame1_sp = expected.iregs[MD_CONTEXT_ARM_REG_SP]; + stack_section + .D32(0x48c8dd5a) // saved r1 (even though it's not callee-saves) + .D32(0xcb78040e) // no longer saved r4 + .D32(0x8112e110) // saved fp + .D32(0x40005510) // return address + .Mark(&frame1_sp); // This effectively sets stack_section.start(). + raw_context.iregs[MD_CONTEXT_ARM_REG_PC] = 0x40004003; + raw_context.iregs[1] = 0xfb756319; // distinct callee r1 + raw_context.iregs[MD_CONTEXT_ARM_REG_FP] = 0x0a2857ea; // distinct callee fp + expected.iregs[1] = 0x48c8dd5a; // caller's r1 + expected_validity |= StackFrameARM::CONTEXT_VALID_R1; + CheckWalk(); +} + +// We have no new rule at module offset 0x4004, so the results here should +// be the same as those at module offset 0x4003. +TEST_F(CFI, At4004) { + Label frame1_sp = expected.iregs[MD_CONTEXT_ARM_REG_SP]; + stack_section + .D32(0x48c8dd5a) // saved r1 (even though it's not callee-saves) + .D32(0xcb78040e) // no longer saved r4 + .D32(0x8112e110) // saved fp + .D32(0x40005510) // return address + .Mark(&frame1_sp); // This effectively sets stack_section.start(). + raw_context.iregs[MD_CONTEXT_ARM_REG_PC] = 0x40004004; + raw_context.iregs[1] = 0xfb756319; // distinct callee r1 + expected.iregs[1] = 0x48c8dd5a; // caller's r1 + expected_validity |= StackFrameARM::CONTEXT_VALID_R1; + CheckWalk(); +} + +// Here we move the .cfa, but provide an explicit rule to recover the SP, +// so again there should be no change in the registers recovered. +TEST_F(CFI, At4005) { + Label frame1_sp = expected.iregs[MD_CONTEXT_ARM_REG_SP]; + stack_section + .D32(0x48c8dd5a) // saved r1 (even though it's not callee-saves) + .D32(0xf013f841) // no longer saved r4 + .D32(0x8112e110) // saved fp + .D32(0x40005510) // return address + .Mark(&frame1_sp); // This effectively sets stack_section.start(). + raw_context.iregs[MD_CONTEXT_ARM_REG_PC] = 0x40004005; + raw_context.iregs[1] = 0xfb756319; // distinct callee r1 + expected.iregs[1] = 0x48c8dd5a; // caller's r1 + expected_validity |= StackFrameARM::CONTEXT_VALID_R1; + CheckWalk(); +} + +// Here we provide an explicit rule for the PC, and have the saved .ra be +// bogus. +TEST_F(CFI, At4006) { + Label frame1_sp = expected.iregs[MD_CONTEXT_ARM_REG_SP]; + stack_section + .D32(0x40005510) // saved pc + .D32(0x48c8dd5a) // saved r1 (even though it's not callee-saves) + .D32(0xf013f841) // no longer saved r4 + .D32(0x8112e110) // saved fp + .D32(0xf8d15783) // .ra rule recovers this, which is garbage + .Mark(&frame1_sp); // This effectively sets stack_section.start(). + raw_context.iregs[MD_CONTEXT_ARM_REG_PC] = 0x40004006; + raw_context.iregs[1] = 0xfb756319; // callee's r1, different from caller's + expected.iregs[1] = 0x48c8dd5a; // caller's r1 + expected_validity |= StackFrameARM::CONTEXT_VALID_R1; + CheckWalk(); +} + +// Check that we reject rules that would cause the stack pointer to +// move in the wrong direction. +TEST_F(CFI, RejectBackwards) { + raw_context.iregs[MD_CONTEXT_ARM_REG_PC] = 0x40006000; + raw_context.iregs[MD_CONTEXT_ARM_REG_SP] = 0x80000000; + raw_context.iregs[MD_CONTEXT_ARM_REG_LR] = 0x40005510; + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerARM walker(&system_info, &raw_context, -1, &stack_region, &modules, + &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(0U, modules_without_symbols.size()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(1U, frames->size()); +} + +// Check that we reject rules whose expressions' evaluation fails. +TEST_F(CFI, RejectBadExpressions) { + raw_context.iregs[MD_CONTEXT_ARM_REG_PC] = 0x40007000; + raw_context.iregs[MD_CONTEXT_ARM_REG_SP] = 0x80000000; + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerARM walker(&system_info, &raw_context, -1, &stack_region, &modules, + &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(0U, modules_without_symbols.size()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(1U, frames->size()); +} + +class StackwalkerARMFixtureIOS : public StackwalkerARMFixture { + public: + StackwalkerARMFixtureIOS() { + system_info.os = "iOS"; + system_info.os_short = "ios"; + } +}; + +class GetFramesByFramePointer: public StackwalkerARMFixtureIOS, public Test { }; + +TEST_F(GetFramesByFramePointer, OnlyFramePointer) { + stack_section.start() = 0x80000000; + uint32_t return_address1 = 0x50000100; + uint32_t return_address2 = 0x50000900; + Label frame1_sp, frame2_sp; + Label frame1_fp, frame2_fp; + stack_section + // frame 0 + .Append(32, 0) // Whatever values on the stack. + .D32(0x0000000D) // junk that's not + .D32(0xF0000000) // a return address. + + .Mark(&frame1_fp) // Next fp will point to the next value. + .D32(frame2_fp) // Save current frame pointer. + .D32(return_address2) // Save current link register. + .Mark(&frame1_sp) + + // frame 1 + .Append(32, 0) // Whatever values on the stack. + .D32(0x0000000D) // junk that's not + .D32(0xF0000000) // a return address. + + .Mark(&frame2_fp) + .D32(0) + .D32(0) + .Mark(&frame2_sp) + + // frame 2 + .Append(32, 0) // Whatever values on the stack. + .D32(0x0000000D) // junk that's not + .D32(0xF0000000); // a return address. + RegionFromSection(); + + + raw_context.iregs[MD_CONTEXT_ARM_REG_PC] = 0x40005510; + raw_context.iregs[MD_CONTEXT_ARM_REG_LR] = return_address1; + raw_context.iregs[MD_CONTEXT_ARM_REG_IOS_FP] = frame1_fp.Value(); + raw_context.iregs[MD_CONTEXT_ARM_REG_SP] = stack_section.start().Value(); + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerARM walker(&system_info, &raw_context, MD_CONTEXT_ARM_REG_IOS_FP, + &stack_region, &modules, &frame_symbolizer); + + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(2U, modules_without_symbols.size()); + ASSERT_EQ("module1", modules_without_symbols[0]->debug_file()); + ASSERT_EQ("module2", modules_without_symbols[1]->debug_file()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(3U, frames->size()); + + StackFrameARM *frame0 = static_cast(frames->at(0)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CONTEXT, frame0->trust); + ASSERT_EQ(StackFrameARM::CONTEXT_VALID_ALL, frame0->context_validity); + EXPECT_EQ(0, memcmp(&raw_context, &frame0->context, sizeof(raw_context))); + + StackFrameARM *frame1 = static_cast(frames->at(1)); + EXPECT_EQ(StackFrame::FRAME_TRUST_FP, frame1->trust); + ASSERT_EQ((StackFrameARM::CONTEXT_VALID_PC | + StackFrameARM::CONTEXT_VALID_LR | + StackFrameARM::RegisterValidFlag(MD_CONTEXT_ARM_REG_IOS_FP) | + StackFrameARM::CONTEXT_VALID_SP), + frame1->context_validity); + EXPECT_EQ(return_address1, frame1->context.iregs[MD_CONTEXT_ARM_REG_PC]); + EXPECT_EQ(return_address2, frame1->context.iregs[MD_CONTEXT_ARM_REG_LR]); + EXPECT_EQ(frame1_sp.Value(), frame1->context.iregs[MD_CONTEXT_ARM_REG_SP]); + EXPECT_EQ(frame2_fp.Value(), + frame1->context.iregs[MD_CONTEXT_ARM_REG_IOS_FP]); + + StackFrameARM *frame2 = static_cast(frames->at(2)); + EXPECT_EQ(StackFrame::FRAME_TRUST_FP, frame2->trust); + ASSERT_EQ((StackFrameARM::CONTEXT_VALID_PC | + StackFrameARM::CONTEXT_VALID_LR | + StackFrameARM::RegisterValidFlag(MD_CONTEXT_ARM_REG_IOS_FP) | + StackFrameARM::CONTEXT_VALID_SP), + frame2->context_validity); + EXPECT_EQ(return_address2, frame2->context.iregs[MD_CONTEXT_ARM_REG_PC]); + EXPECT_EQ(0U, frame2->context.iregs[MD_CONTEXT_ARM_REG_LR]); + EXPECT_EQ(frame2_sp.Value(), frame2->context.iregs[MD_CONTEXT_ARM_REG_SP]); + EXPECT_EQ(0U, frame2->context.iregs[MD_CONTEXT_ARM_REG_IOS_FP]); +} + +TEST_F(GetFramesByFramePointer, FramePointerAndCFI) { + // Provide the standatd STACK CFI records that is obtained when exmining an + // executable produced by XCode. + SetModuleSymbols(&module1, + // Adding a function in CFI. + "FUNC 4000 1000 10 enchiridion\n" + + "STACK CFI INIT 4000 100 .cfa: sp 0 + .ra: lr\n" + "STACK CFI 4001 .cfa: sp 8 + .ra: .cfa -4 + ^" + " r7: .cfa -8 + ^\n" + "STACK CFI 4002 .cfa: r7 8 +\n" + ); + + stack_section.start() = 0x80000000; + uint32_t return_address1 = 0x40004010; + uint32_t return_address2 = 0x50000900; + Label frame1_sp, frame2_sp; + Label frame1_fp, frame2_fp; + stack_section + // frame 0 + .Append(32, 0) // Whatever values on the stack. + .D32(0x0000000D) // junk that's not + .D32(0xF0000000) // a return address. + + .Mark(&frame1_fp) // Next fp will point to the next value. + .D32(frame2_fp) // Save current frame pointer. + .D32(return_address2) // Save current link register. + .Mark(&frame1_sp) + + // frame 1 + .Append(32, 0) // Whatever values on the stack. + .D32(0x0000000D) // junk that's not + .D32(0xF0000000) // a return address. + + .Mark(&frame2_fp) + .D32(0) + .D32(0) + .Mark(&frame2_sp) + + // frame 2 + .Append(32, 0) // Whatever values on the stack. + .D32(0x0000000D) // junk that's not + .D32(0xF0000000); // a return address. + RegionFromSection(); + + + raw_context.iregs[MD_CONTEXT_ARM_REG_PC] = 0x50000400; + raw_context.iregs[MD_CONTEXT_ARM_REG_LR] = return_address1; + raw_context.iregs[MD_CONTEXT_ARM_REG_IOS_FP] = frame1_fp.Value(); + raw_context.iregs[MD_CONTEXT_ARM_REG_SP] = stack_section.start().Value(); + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerARM walker(&system_info, &raw_context, MD_CONTEXT_ARM_REG_IOS_FP, + &stack_region, &modules, &frame_symbolizer); + + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(1U, modules_without_symbols.size()); + ASSERT_EQ("module2", modules_without_symbols[0]->debug_file()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(3U, frames->size()); + + StackFrameARM *frame0 = static_cast(frames->at(0)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CONTEXT, frame0->trust); + ASSERT_EQ(StackFrameARM::CONTEXT_VALID_ALL, frame0->context_validity); + EXPECT_EQ(0, memcmp(&raw_context, &frame0->context, sizeof(raw_context))); + + StackFrameARM *frame1 = static_cast(frames->at(1)); + EXPECT_EQ(StackFrame::FRAME_TRUST_FP, frame1->trust); + ASSERT_EQ((StackFrameARM::CONTEXT_VALID_PC | + StackFrameARM::CONTEXT_VALID_LR | + StackFrameARM::RegisterValidFlag(MD_CONTEXT_ARM_REG_IOS_FP) | + StackFrameARM::CONTEXT_VALID_SP), + frame1->context_validity); + EXPECT_EQ(return_address1, frame1->context.iregs[MD_CONTEXT_ARM_REG_PC]); + EXPECT_EQ(return_address2, frame1->context.iregs[MD_CONTEXT_ARM_REG_LR]); + EXPECT_EQ(frame1_sp.Value(), frame1->context.iregs[MD_CONTEXT_ARM_REG_SP]); + EXPECT_EQ(frame2_fp.Value(), + frame1->context.iregs[MD_CONTEXT_ARM_REG_IOS_FP]); + EXPECT_EQ("enchiridion", frame1->function_name); + EXPECT_EQ(0x40004000U, frame1->function_base); + + + StackFrameARM *frame2 = static_cast(frames->at(2)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CFI, frame2->trust); + ASSERT_EQ((StackFrameARM::CONTEXT_VALID_PC | + StackFrameARM::CONTEXT_VALID_LR | + StackFrameARM::RegisterValidFlag(MD_CONTEXT_ARM_REG_IOS_FP) | + StackFrameARM::CONTEXT_VALID_SP), + frame2->context_validity); + EXPECT_EQ(return_address2, frame2->context.iregs[MD_CONTEXT_ARM_REG_PC]); + EXPECT_EQ(0U, frame2->context.iregs[MD_CONTEXT_ARM_REG_LR]); + EXPECT_EQ(frame2_sp.Value(), frame2->context.iregs[MD_CONTEXT_ARM_REG_SP]); + EXPECT_EQ(0U, frame2->context.iregs[MD_CONTEXT_ARM_REG_IOS_FP]); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_mips.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_mips.cc new file mode 100644 index 0000000000..7db3421923 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_mips.cc @@ -0,0 +1,300 @@ +// Copyright (c) 2013 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// stackwalker_mips.cc: MIPS-specific stackwalker. +// +// See stackwalker_mips.h for documentation. +// +// Author: Tata Elxsi + +#include "common/scoped_ptr.h" +#include "google_breakpad/processor/call_stack.h" +#include "google_breakpad/processor/code_modules.h" +#include "google_breakpad/processor/memory_region.h" +#include "google_breakpad/processor/source_line_resolver_interface.h" +#include "google_breakpad/processor/stack_frame_cpu.h" +#include "processor/cfi_frame_info.h" +#include "processor/logging.h" +#include "processor/postfix_evaluator-inl.h" +#include "processor/stackwalker_mips.h" +#include "processor/windows_frame_info.h" +#include "google_breakpad/common/minidump_cpu_mips.h" + +namespace google_breakpad { + +StackwalkerMIPS::StackwalkerMIPS(const SystemInfo* system_info, + const MDRawContextMIPS* context, + MemoryRegion* memory, + const CodeModules* modules, + StackFrameSymbolizer* resolver_helper) + : Stackwalker(system_info, memory, modules, resolver_helper), + context_(context) { + if (memory_ && memory_->GetBase() + memory_->GetSize() - 1 > 0xffffffff) { + BPLOG(ERROR) << "Memory out of range for stackwalking: " + << HexString(memory_->GetBase()) + << "+" + << HexString(memory_->GetSize()); + memory_ = NULL; + } +} + +StackFrame* StackwalkerMIPS::GetContextFrame() { + if (!context_) { + BPLOG(ERROR) << "Can't get context frame without context."; + return NULL; + } + + StackFrameMIPS* frame = new StackFrameMIPS(); + + // The instruction pointer is stored directly in a register, so pull it + // straight out of the CPU context structure. + frame->context = *context_; + frame->context_validity = StackFrameMIPS::CONTEXT_VALID_ALL; + frame->trust = StackFrame::FRAME_TRUST_CONTEXT; + frame->instruction = frame->context.epc; + + return frame; +} + +// Register names for mips. +static const char* const kRegisterNames[] = { + "$zero", "$at", "$v0", "$v1", "$a0", "$a1", "$a2", "$a3", "$to", "$t1", + "$t2", "$t3", "$t4", "$t5", "$t6", "$t7", "$s0", "$s1", "$s2", "$s3", + "$s4", "$s5", "$s6", "$s7", "$t8", "$t9", "$k0", "$k1", "$gp", "$sp", + "$fp", "$ra", NULL + // TODO(gordanac): add float point save registers +}; + +StackFrameMIPS* StackwalkerMIPS::GetCallerByCFIFrameInfo( + const vector& frames, + CFIFrameInfo* cfi_frame_info) { + StackFrameMIPS* last_frame = static_cast(frames.back()); + + uint32_t sp = 0, pc = 0; + + // Populate a dictionary with the valid register values in last_frame. + CFIFrameInfo::RegisterValueMap callee_registers; + // Use the STACK CFI data to recover the caller's register values. + CFIFrameInfo::RegisterValueMap caller_registers; + + for (int i = 0; kRegisterNames[i]; ++i) { + caller_registers[kRegisterNames[i]] = last_frame->context.iregs[i]; + callee_registers[kRegisterNames[i]] = last_frame->context.iregs[i]; + } + + if (!cfi_frame_info->FindCallerRegs(callee_registers, *memory_, + &caller_registers)) { + return NULL; + } + + CFIFrameInfo::RegisterValueMap::const_iterator entry = + caller_registers.find(".cfa"); + + if (entry != caller_registers.end()) { + sp = entry->second; + caller_registers["$sp"] = entry->second; + } + + entry = caller_registers.find(".ra"); + if (entry != caller_registers.end()) { + caller_registers["$ra"] = entry->second; + pc = entry->second - 2 * sizeof(pc); + } + caller_registers["$pc"] = pc; + // Construct a new stack frame given the values the CFI recovered. + scoped_ptr frame(new StackFrameMIPS()); + + for (int i = 0; kRegisterNames[i]; ++i) { + CFIFrameInfo::RegisterValueMap::const_iterator caller_entry = + caller_registers.find(kRegisterNames[i]); + + if (caller_entry != caller_registers.end()) { + // The value of this register is recovered; fill the context with the + // value from caller_registers. + frame->context.iregs[i] = caller_entry->second; + frame->context_validity |= StackFrameMIPS::RegisterValidFlag(i); + } else if (((i >= INDEX_MIPS_REG_S0 && i <= INDEX_MIPS_REG_S7) || + (i > INDEX_MIPS_REG_GP && i < INDEX_MIPS_REG_RA)) && + (last_frame->context_validity & + StackFrameMIPS::RegisterValidFlag(i))) { + // If the STACK CFI data doesn't mention some callee-save register, and + // it is valid in the callee, assume the callee has not yet changed it. + // Calee-save registers according to the MIPS o32 ABI specification are: + // $s0 to $s7 + // $sp, $s8 + frame->context.iregs[i] = last_frame->context.iregs[i]; + frame->context_validity |= StackFrameMIPS::RegisterValidFlag(i); + } + } + + frame->context.epc = caller_registers["$pc"]; + frame->instruction = caller_registers["$pc"]; + frame->context_validity |= StackFrameMIPS::CONTEXT_VALID_PC; + + frame->context.iregs[MD_CONTEXT_MIPS_REG_RA] = caller_registers["$ra"]; + frame->context_validity |= StackFrameMIPS::CONTEXT_VALID_RA; + + frame->trust = StackFrame::FRAME_TRUST_CFI; + + return frame.release(); +} + +StackFrame* StackwalkerMIPS::GetCallerFrame(const CallStack* stack, + bool stack_scan_allowed) { + if (!memory_ || !stack) { + BPLOG(ERROR) << "Can't get caller frame without memory or stack"; + return NULL; + } + + const vector& frames = *stack->frames(); + StackFrameMIPS* last_frame = static_cast(frames.back()); + scoped_ptr new_frame; + + // See if there is DWARF call frame information covering this address. + scoped_ptr cfi_frame_info( + frame_symbolizer_->FindCFIFrameInfo(last_frame)); + if (cfi_frame_info.get()) + new_frame.reset(GetCallerByCFIFrameInfo(frames, cfi_frame_info.get())); + + // If caller frame is not found in CFI try analyzing the stack. + if (stack_scan_allowed && !new_frame.get()) { + new_frame.reset(GetCallerByStackScan(frames)); + } + + // If nothing worked, tell the caller. + if (!new_frame.get()) { + return NULL; + } + + // Treat an instruction address of 0 as end-of-stack. + if (new_frame->context.epc == 0) { + return NULL; + } + + // If the new stack pointer is at a lower address than the old, then + // that's clearly incorrect. Treat this as end-of-stack to enforce + // progress and avoid infinite loops. + if (new_frame->context.iregs[MD_CONTEXT_MIPS_REG_SP] <= + last_frame->context.iregs[MD_CONTEXT_MIPS_REG_SP]) { + return NULL; + } + + return new_frame.release(); +} + +StackFrameMIPS* StackwalkerMIPS::GetCallerByStackScan( + const vector& frames) { + const uint32_t kMaxFrameStackSize = 1024; + const uint32_t kMinArgsOnStack = 4; + + StackFrameMIPS* last_frame = static_cast(frames.back()); + + uint32_t last_sp = last_frame->context.iregs[MD_CONTEXT_MIPS_REG_SP]; + uint32_t caller_pc, caller_sp, caller_fp; + + // Return address cannot be obtained directly. + // Force stackwalking. + + // We cannot use frame pointer to get the return address. + // We'll scan the stack for a + // return address. This can happen if last_frame is executing code + // for a module for which we don't have symbols. + int count = kMaxFrameStackSize / sizeof(caller_pc); + + if (frames.size() > 1) { + // In case of mips32 ABI stack frame of a nonleaf function + // must have minimum stack frame assigned for 4 arguments (4 words). + // Move stack pointer for 4 words to avoid reporting non-existing frames + // for all frames except the topmost one. + // There is no way of knowing if topmost frame belongs to a leaf or + // a nonleaf function. + last_sp += kMinArgsOnStack * sizeof(caller_pc); + // Adjust 'count' so that return address is scanned only in limits + // of one stack frame. + count -= kMinArgsOnStack; + } + + do { + // Scanning for return address from stack pointer of the last frame. + if (!ScanForReturnAddress(last_sp, &caller_sp, &caller_pc, count)) { + // If we can't find an instruction pointer even with stack scanning, + // give up. + BPLOG(ERROR) << " ScanForReturnAddress failed "; + return NULL; + } + // Get $fp stored in the stack frame. + if (!memory_->GetMemoryAtAddress(caller_sp - sizeof(caller_pc), + &caller_fp)) { + BPLOG(INFO) << " GetMemoryAtAddress for fp failed " ; + return NULL; + } + + count = count - (caller_sp - last_sp) / sizeof(caller_pc); + // Now scan the next address in the stack. + last_sp = caller_sp + sizeof(caller_pc); + } while ((caller_fp - caller_sp >= kMaxFrameStackSize) && count > 0); + + if (!count) { + BPLOG(INFO) << " No frame found " ; + return NULL; + } + + // ScanForReturnAddress found a reasonable return address. Advance + // $sp to the location above the one where the return address was + // found. + caller_sp += sizeof(caller_pc); + // caller_pc is actually containing $ra value; + // $pc is two instructions before $ra, + // so the caller_pc needs to be decremented accordingly. + caller_pc -= 2 * sizeof(caller_pc); + + + // Create a new stack frame (ownership will be transferred to the caller) + // and fill it in. + StackFrameMIPS* frame = new StackFrameMIPS(); + frame->trust = StackFrame::FRAME_TRUST_SCAN; + frame->context = last_frame->context; + frame->context.epc = caller_pc; + frame->context_validity |= StackFrameMIPS::CONTEXT_VALID_PC; + frame->instruction = caller_pc; + + frame->context.iregs[MD_CONTEXT_MIPS_REG_SP] = caller_sp; + frame->context_validity |= StackFrameMIPS::CONTEXT_VALID_SP; + frame->context.iregs[MD_CONTEXT_MIPS_REG_FP] = caller_fp; + frame->context_validity |= StackFrameMIPS::CONTEXT_VALID_FP; + + frame->context.iregs[MD_CONTEXT_MIPS_REG_RA] = + caller_pc + 2 * sizeof(caller_pc); + frame->context_validity |= StackFrameMIPS::CONTEXT_VALID_RA; + + return frame; +} + +} // namespace google_breakpad + diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_mips.h b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_mips.h new file mode 100644 index 0000000000..5f97791fb7 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_mips.h @@ -0,0 +1,85 @@ +// Copyright (c) 2013 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// stackwalker_mips.h: MIPS-specific stackwalker. +// +// Provides stack frames given MIPS register context and a memory region +// corresponding to a MIPSstack. +// +// Author: Tata Elxsi + +#ifndef PROCESSOR_STACKWALKER_MIPS_H__ +#define PROCESSOR_STACKWALKER_MIPS_H__ + +#include "google_breakpad/common/breakpad_types.h" +#include "google_breakpad/common/minidump_format.h" +#include "google_breakpad/processor/stackwalker.h" +#include "google_breakpad/processor/stack_frame_cpu.h" +#include "processor/cfi_frame_info.h" + +namespace google_breakpad { + +class CodeModules; + +class StackwalkerMIPS : public Stackwalker { + public: + // Context is a MIPS context object that gives access to mips-specific + // register state corresponding to the innermost called frame to be + // included in the stack. The other arguments are passed directly + // through to the base Stackwalker constructor. + StackwalkerMIPS(const SystemInfo* system_info, + const MDRawContextMIPS* context, + MemoryRegion* memory, + const CodeModules* modules, + StackFrameSymbolizer* frame_symbolizer); + + private: + // Implementation of Stackwalker, using mips context and stack conventions. + virtual StackFrame* GetContextFrame(); + virtual StackFrame* GetCallerFrame(const CallStack* stack, + bool stack_scan_allowed); + + // Use cfi_frame_info (derived from STACK CFI records) to construct + // the frame that called frames.back(). The caller takes ownership + // of the returned frame. Return NULL on failure. + StackFrameMIPS* GetCallerByCFIFrameInfo(const vector& frames, + CFIFrameInfo* cfi_frame_info); + + // Scan the stack for plausible return address and frame pointer pair. + // The caller takes ownership of the returned frame. Return NULL on failure. + StackFrameMIPS* GetCallerByStackScan(const vector& frames); + + // Stores the CPU context corresponding to the innermost stack frame to + // be returned by GetContextFrame. + const MDRawContextMIPS* context_; +}; + +} // namespace google_breakpad + +#endif // PROCESSOR_STACKWALKER_MIPS_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_mips_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_mips_unittest.cc new file mode 100644 index 0000000000..ed4be4f5b1 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_mips_unittest.cc @@ -0,0 +1,697 @@ +// Copyright (c) 2013, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Gordana Cmiljanovic + +// stackwalker_mips_unittest.cc: Unit tests for StackwalkerMIPS class. + +#include +#include +#include + +#include "breakpad_googletest_includes.h" +#include "common/test_assembler.h" +#include "common/using_std_string.h" +#include "google_breakpad/common/minidump_format.h" +#include "google_breakpad/processor/basic_source_line_resolver.h" +#include "google_breakpad/processor/call_stack.h" +#include "google_breakpad/processor/code_module.h" +#include "google_breakpad/processor/source_line_resolver_interface.h" +#include "google_breakpad/processor/stack_frame_cpu.h" +#include "processor/stackwalker_unittest_utils.h" +#include "processor/stackwalker_mips.h" +#include "processor/windows_frame_info.h" + +using google_breakpad::BasicSourceLineResolver; +using google_breakpad::CallStack; +using google_breakpad::CodeModule; +using google_breakpad::StackFrameSymbolizer; +using google_breakpad::StackFrame; +using google_breakpad::StackFrameMIPS; +using google_breakpad::Stackwalker; +using google_breakpad::StackwalkerMIPS; +using google_breakpad::SystemInfo; +using google_breakpad::WindowsFrameInfo; +using google_breakpad::test_assembler::kLittleEndian; +using google_breakpad::test_assembler::Label; +using google_breakpad::test_assembler::Section; +using std::vector; +using testing::_; +using testing::AnyNumber; +using testing::Return; +using testing::SetArgumentPointee; +using testing::Test; + +class StackwalkerMIPSFixture { + public: + StackwalkerMIPSFixture() + : stack_section(kLittleEndian), + // Give the two modules reasonable standard locations and names + // for tests to play with. + module1(0x00400000, 0x10000, "module1", "version1"), + module2(0x00500000, 0x10000, "module2", "version2") { + // Identify the system as a Linux system. + system_info.os = "Linux"; + system_info.os_short = "linux"; + system_info.os_version = "Observant Opossum"; // Jealous Jellyfish + system_info.cpu = "mips"; + system_info.cpu_info = ""; + + // Put distinctive values in the raw CPU context. + BrandContext(&raw_context); + + // Create some modules with some stock debugging information. + modules.Add(&module1); + modules.Add(&module2); + + // By default, none of the modules have symbol info; call + // SetModuleSymbols to override this. + EXPECT_CALL(supplier, GetCStringSymbolData(_, _, _, _, _)) + .WillRepeatedly(Return(MockSymbolSupplier::NOT_FOUND)); + + // Avoid GMOCK WARNING "Uninteresting mock function call - returning + // directly" for FreeSymbolData(). + EXPECT_CALL(supplier, FreeSymbolData(_)).Times(AnyNumber()); + + // Reset max_frames_scanned since it's static. + Stackwalker::set_max_frames_scanned(1024); + } + + // Set the Breakpad symbol information that supplier should return for + // MODULE to INFO. + void SetModuleSymbols(MockCodeModule* module, const string& info) { + size_t buffer_size; + char* buffer = supplier.CopySymbolDataAndOwnTheCopy(info, &buffer_size); + EXPECT_CALL(supplier, GetCStringSymbolData(module, &system_info, _, _, _)) + .WillRepeatedly(DoAll(SetArgumentPointee<3>(buffer), + SetArgumentPointee<4>(buffer_size), + Return(MockSymbolSupplier::FOUND))); + } + + // Populate stack_region with the contents of stack_section. Use + // stack_section.start() as the region's starting address. + void RegionFromSection() { + string contents; + ASSERT_TRUE(stack_section.GetContents(&contents)); + stack_region.Init(stack_section.start().Value(), contents); + } + + // Fill RAW_CONTEXT with pseudo-random data, for round-trip checking. + void BrandContext(MDRawContextMIPS* raw_context) { + uint8_t x = 173; + for (size_t i = 0; i < sizeof(*raw_context); ++i) + reinterpret_cast(raw_context)[i] = (x += 17); + } + + SystemInfo system_info; + MDRawContextMIPS raw_context; + Section stack_section; + MockMemoryRegion stack_region; + MockCodeModule module1; + MockCodeModule module2; + MockCodeModules modules; + MockSymbolSupplier supplier; + BasicSourceLineResolver resolver; + CallStack call_stack; + const vector* frames; +}; + +class SanityCheck: public StackwalkerMIPSFixture, public Test { }; + +TEST_F(SanityCheck, NoResolver) { + stack_section.start() = 0x80000000; + stack_section.D32(0).D32(0x0); + RegionFromSection(); + raw_context.epc = 0x00400020; + raw_context.iregs[MD_CONTEXT_MIPS_REG_SP] = 0x80000000; + + StackFrameSymbolizer frame_symbolizer(NULL, NULL); + StackwalkerMIPS walker(&system_info, &raw_context, &stack_region, &modules, + &frame_symbolizer); + // This should succeed, even without a resolver or supplier. + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(1U, modules_without_symbols.size()); + ASSERT_EQ("module1", modules_without_symbols[0]->debug_file()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(1U, frames->size()); + StackFrameMIPS* frame = static_cast(frames->at(0)); + // Check that the values from the original raw context made it + // through to the context in the stack frame. + EXPECT_EQ(0, memcmp(&raw_context, &frame->context, sizeof(raw_context))); +} + +class GetContextFrame: public StackwalkerMIPSFixture, public Test { }; + +TEST_F(GetContextFrame, Simple) { + stack_section.start() = 0x80000000; + stack_section.D32(0).D32(0x0); + RegionFromSection(); + raw_context.epc = 0x00400020; + raw_context.iregs[MD_CONTEXT_MIPS_REG_SP] = 0x80000000; + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerMIPS walker(&system_info, &raw_context, &stack_region, &modules, + &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(1U, modules_without_symbols.size()); + ASSERT_EQ("module1", modules_without_symbols[0]->debug_file()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + StackFrameMIPS* frame = static_cast(frames->at(0)); + // Check that the values from the original raw context made it + // through to the context in the stack frame. + EXPECT_EQ(0, memcmp(&raw_context, &frame->context, sizeof(raw_context))); +} + +// The stackwalker should be able to produce the context frame even +// without stack memory present. +TEST_F(GetContextFrame, NoStackMemory) { + raw_context.epc = 0x00400020; + raw_context.iregs[MD_CONTEXT_MIPS_REG_SP] = 0x80000000; + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerMIPS walker(&system_info, &raw_context, NULL, &modules, + &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(1U, modules_without_symbols.size()); + ASSERT_EQ("module1", modules_without_symbols[0]->debug_file()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + StackFrameMIPS* frame = static_cast(frames->at(0)); + // Check that the values from the original raw context made it + // through to the context in the stack frame. + EXPECT_EQ(0, memcmp(&raw_context, &frame->context, sizeof(raw_context))); +} + +class GetCallerFrame: public StackwalkerMIPSFixture, public Test { }; + +TEST_F(GetCallerFrame, ScanWithoutSymbols) { + // When the stack walker resorts to scanning the stack, + // only addresses located within loaded modules are + // considered valid return addresses. + // Force scanning through three frames to ensure that the + // stack pointer is set properly in scan-recovered frames. + stack_section.start() = 0x80000000; + uint32_t return_address1 = 0x00400100; + uint32_t return_address2 = 0x00400900; + Label frame1_sp, frame2_sp; + stack_section + // frame 0 + .Append(16, 0) // space + + .D32(0x00490000) // junk that's not + .D32(0x00600000) // a return address + + .D32(frame1_sp) // stack pointer + .D32(return_address1) // actual return address + // frame 1 + .Mark(&frame1_sp) + .Append(16, 0) // space + + .D32(0xF0000000) // more junk + .D32(0x0000000D) + + .D32(frame2_sp) // stack pointer + .D32(return_address2) // actual return address + // frame 2 + .Mark(&frame2_sp) + .Append(32, 0); // end of stack + RegionFromSection(); + + raw_context.epc = 0x00405510; + raw_context.iregs[MD_CONTEXT_MIPS_REG_SP] = stack_section.start().Value(); + raw_context.iregs[MD_CONTEXT_MIPS_REG_RA] = return_address1; + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerMIPS walker(&system_info, &raw_context, &stack_region, &modules, + &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(1U, modules_without_symbols.size()); + ASSERT_EQ("module1", modules_without_symbols[0]->debug_file()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(3U, frames->size()); + + StackFrameMIPS* frame0 = static_cast(frames->at(0)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CONTEXT, frame0->trust); + ASSERT_EQ(StackFrameMIPS::CONTEXT_VALID_ALL, frame0->context_validity); + EXPECT_EQ(0, memcmp(&raw_context, &frame0->context, sizeof(raw_context))); + + StackFrameMIPS* frame1 = static_cast(frames->at(1)); + EXPECT_EQ(StackFrame::FRAME_TRUST_SCAN, frame1->trust); + ASSERT_EQ((StackFrameMIPS::CONTEXT_VALID_PC | + StackFrameMIPS::CONTEXT_VALID_SP | + StackFrameMIPS::CONTEXT_VALID_FP | + StackFrameMIPS::CONTEXT_VALID_RA), + frame1->context_validity); + EXPECT_EQ(return_address1 - 2 * sizeof(return_address1), frame1->context.epc); + EXPECT_EQ(frame1_sp.Value(), frame1->context.iregs[MD_CONTEXT_MIPS_REG_SP]); + + StackFrameMIPS* frame2 = static_cast(frames->at(2)); + EXPECT_EQ(StackFrame::FRAME_TRUST_SCAN, frame2->trust); + ASSERT_EQ((StackFrameMIPS::CONTEXT_VALID_PC | + StackFrameMIPS::CONTEXT_VALID_SP | + StackFrameMIPS::CONTEXT_VALID_FP | + StackFrameMIPS::CONTEXT_VALID_RA), + frame2->context_validity); + EXPECT_EQ(return_address2 - 2 * sizeof(return_address2), frame2->context.epc); + EXPECT_EQ(frame2_sp.Value(), frame2->context.iregs[MD_CONTEXT_MIPS_REG_SP]); +} + +TEST_F(GetCallerFrame, ScanWithFunctionSymbols) { + // During stack scanning, if a potential return address + // is located within a loaded module that has symbols, + // it is only considered a valid return address if it + // lies within a function's bounds. + stack_section.start() = 0x80000000; + uint32_t return_address = 0x00500200; + Label frame1_sp; + stack_section + // frame 0 + .Append(16, 0) // space + + .D32(0x00490000) // junk that's not + .D32(0x00600000) // a return address + + .D32(0x00401000) // a couple of plausible addresses + .D32(0x0050F000) // that are not within functions + + .D32(frame1_sp) // stack pointer + .D32(return_address) // actual return address + // frame 1 + .Mark(&frame1_sp) + .Append(32, 0); // end of stack + RegionFromSection(); + + raw_context.epc = 0x00400200; + raw_context.iregs[MD_CONTEXT_MIPS_REG_SP] = stack_section.start().Value(); + raw_context.iregs[MD_CONTEXT_MIPS_REG_RA] = return_address; + + SetModuleSymbols(&module1, + // The youngest frame's function. + "FUNC 100 400 10 monotreme\n"); + SetModuleSymbols(&module2, + // The calling frame's function. + "FUNC 100 400 10 marsupial\n"); + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerMIPS walker(&system_info, &raw_context, &stack_region, &modules, + &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(0U, modules_without_symbols.size()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(2U, frames->size()); + + StackFrameMIPS* frame0 = static_cast(frames->at(0)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CONTEXT, frame0->trust); + ASSERT_EQ(StackFrameMIPS::CONTEXT_VALID_ALL, frame0->context_validity); + EXPECT_EQ(0, memcmp(&raw_context, &frame0->context, sizeof(raw_context))); + EXPECT_EQ("monotreme", frame0->function_name); + EXPECT_EQ(0x00400100U, frame0->function_base); + + StackFrameMIPS* frame1 = static_cast(frames->at(1)); + EXPECT_EQ(StackFrame::FRAME_TRUST_SCAN, frame1->trust); + ASSERT_EQ((StackFrameMIPS::CONTEXT_VALID_PC | + StackFrameMIPS::CONTEXT_VALID_SP | + StackFrameMIPS::CONTEXT_VALID_FP | + StackFrameMIPS::CONTEXT_VALID_RA), + frame1->context_validity); + EXPECT_EQ(return_address - 2 * sizeof(return_address), frame1->context.epc); + EXPECT_EQ(frame1_sp.Value(), frame1->context.iregs[MD_CONTEXT_MIPS_REG_SP]); + EXPECT_EQ("marsupial", frame1->function_name); + EXPECT_EQ(0x00500100U, frame1->function_base); +} + +TEST_F(GetCallerFrame, CheckStackFrameSizeLimit) { + // If the stackwalker resorts to stack scanning, it will scan only + // 1024 bytes of stack which correspondes to maximum size of stack frame. + stack_section.start() = 0x80000000; + uint32_t return_address1 = 0x00500100; + uint32_t return_address2 = 0x00500900; + Label frame1_sp, frame2_sp; + stack_section + // frame 0 + .Append(32, 0) // space + + .D32(0x00490000) // junk that's not + .D32(0x00600000) // a return address + + .Append(96, 0) // more space + + .D32(frame1_sp) // stack pointer + .D32(return_address1) // actual return address + // frame 1 + .Mark(&frame1_sp) + .Append(128 * 4, 0) // space + + .D32(0x00F00000) // more junk + .D32(0x0000000D) + + .Append(128 * 4, 0) // more space + + .D32(frame2_sp) // stack pointer + .D32(return_address2) // actual return address + // (won't be found) + // frame 2 + .Mark(&frame2_sp) + .Append(32, 0); // end of stack + RegionFromSection(); + + raw_context.epc = 0x00405510; + raw_context.iregs[MD_CONTEXT_MIPS_REG_SP] = stack_section.start().Value(); + raw_context.iregs[MD_CONTEXT_MIPS_REG_RA] = return_address1; + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerMIPS walker(&system_info, &raw_context, &stack_region, &modules, + &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(2U, modules_without_symbols.size()); + ASSERT_EQ("module1", modules_without_symbols[0]->debug_file()); + ASSERT_EQ("module2", modules_without_symbols[1]->debug_file()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(2U, frames->size()); + + StackFrameMIPS* frame0 = static_cast(frames->at(0)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CONTEXT, frame0->trust); + ASSERT_EQ(StackFrameMIPS::CONTEXT_VALID_ALL, frame0->context_validity); + EXPECT_EQ(0, memcmp(&raw_context, &frame0->context, sizeof(raw_context))); + + StackFrameMIPS* frame1 = static_cast(frames->at(1)); + EXPECT_EQ(StackFrame::FRAME_TRUST_SCAN, frame1->trust); + ASSERT_EQ((StackFrameMIPS::CONTEXT_VALID_PC | + StackFrameMIPS::CONTEXT_VALID_SP | + StackFrameMIPS::CONTEXT_VALID_FP | + StackFrameMIPS::CONTEXT_VALID_RA), + frame1->context_validity); + EXPECT_EQ(return_address1 - 2 * sizeof(return_address1), frame1->context.epc); + EXPECT_EQ(frame1_sp.Value(), frame1->context.iregs[MD_CONTEXT_MIPS_REG_SP]); +} + +// Test that set_max_frames_scanned prevents using stack scanning +// to find caller frames. +TEST_F(GetCallerFrame, ScanningNotAllowed) { + // When the stack walker resorts to scanning the stack, + // only fixed number of frames are allowed to be scanned out from stack + stack_section.start() = 0x80000000; + uint32_t return_address1 = 0x00500100; + uint32_t return_address2 = 0x00500900; + Label frame1_sp, frame2_sp; + stack_section + // frame 0 + .Append(32, 0) // space + + .D32(0x00490000) // junk that's not + .D32(0x00600000) // a return address + + .Append(96, 0) // more space + + .D32(frame1_sp) // stack pointer + .D32(return_address1) // actual return address + // frame 1 + .Mark(&frame1_sp) + .Append(128 * 4, 0) // space + + .D32(0x00F00000) // more junk + .D32(0x0000000D) + + .Append(128 * 4, 0) // more space + + .D32(frame2_sp) // stack pointer + .D32(return_address2) // actual return address + // (won't be found) + // frame 2 + .Mark(&frame2_sp) + .Append(32, 0); // end of stack + RegionFromSection(); + + raw_context.epc = 0x00405510; + raw_context.iregs[MD_CONTEXT_MIPS_REG_SP] = stack_section.start().Value(); + raw_context.iregs[MD_CONTEXT_MIPS_REG_RA] = return_address1; + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerMIPS walker(&system_info, &raw_context, &stack_region, &modules, + &frame_symbolizer); + Stackwalker::set_max_frames_scanned(0); + + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(1U, modules_without_symbols.size()); + ASSERT_EQ("module1", modules_without_symbols[0]->debug_file()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(1U, frames->size()); + + StackFrameMIPS* frame0 = static_cast(frames->at(0)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CONTEXT, frame0->trust); + ASSERT_EQ(StackFrameMIPS::CONTEXT_VALID_ALL, frame0->context_validity); + EXPECT_EQ(0, memcmp(&raw_context, &frame0->context, sizeof(raw_context))); +} + +struct CFIFixture: public StackwalkerMIPSFixture { + CFIFixture() { + // Provide some STACK CFI records; + SetModuleSymbols(&module1, + // The youngest frame's function. + "FUNC 4000 1000 0 enchiridion\n" + // Initially, nothing has been pushed on the stack, + // and the return address is still in the $ra register. + "STACK CFI INIT 4000 1000 .cfa: $sp 0 + .ra: $ra\n" + // Move stack pointer. + "STACK CFI 4004 .cfa: $sp 32 +\n" + // store $fp and ra + "STACK CFI 4008 $fp: .cfa -8 + ^ .ra: .cfa -4 + ^\n" + // restore $fp + "STACK CFI 400c .cfa: $fp 32 +\n" + // restore $sp + "STACK CFI 4018 .cfa: $sp 32 +\n" + + "STACK CFI 4020 $fp: $fp .cfa: $sp 0 + .ra: .ra\n" + + // The calling function. + "FUNC 5000 1000 0 epictetus\n" + // Initially, nothing has been pushed on the stack, + // and the return address is still in the $ra register. + "STACK CFI INIT 5000 1000 .cfa: $sp .ra: $ra\n" + // Mark it as end of stack. + "STACK CFI INIT 5000 8 .cfa: $sp 0 + .ra: $ra\n" + + // A function whose CFI makes the stack pointer + // go backwards. + "FUNC 6000 1000 20 palinal\n" + "STACK CFI INIT 6000 1000 .cfa: $sp 4 - .ra: $ra\n" + + // A function with CFI expressions that can't be + // evaluated. + "FUNC 7000 1000 20 rhetorical\n" + "STACK CFI INIT 7000 1000 .cfa: moot .ra: ambiguous\n" + ); + + // Provide some distinctive values for the caller's registers. + expected.epc = 0x00405508; + expected.iregs[MD_CONTEXT_MIPS_REG_S0] = 0x0; + expected.iregs[MD_CONTEXT_MIPS_REG_S1] = 0x1; + expected.iregs[MD_CONTEXT_MIPS_REG_S2] = 0x2; + expected.iregs[MD_CONTEXT_MIPS_REG_S3] = 0x3; + expected.iregs[MD_CONTEXT_MIPS_REG_S4] = 0x4; + expected.iregs[MD_CONTEXT_MIPS_REG_S5] = 0x5; + expected.iregs[MD_CONTEXT_MIPS_REG_S6] = 0x6; + expected.iregs[MD_CONTEXT_MIPS_REG_S7] = 0x7; + expected.iregs[MD_CONTEXT_MIPS_REG_SP] = 0x80000000; + expected.iregs[MD_CONTEXT_MIPS_REG_FP] = 0x80000000; + expected.iregs[MD_CONTEXT_MIPS_REG_RA] = 0x00405510; + + // Expect CFI to recover all callee-save registers. Since CFI is the + // only stack frame construction technique we have, aside from the + // context frame itself, there's no way for us to have a set of valid + // registers smaller than this. + expected_validity = (StackFrameMIPS::CONTEXT_VALID_PC | + StackFrameMIPS::CONTEXT_VALID_S0 | + StackFrameMIPS::CONTEXT_VALID_S1 | + StackFrameMIPS::CONTEXT_VALID_S2 | + StackFrameMIPS::CONTEXT_VALID_S3 | + StackFrameMIPS::CONTEXT_VALID_S4 | + StackFrameMIPS::CONTEXT_VALID_S5 | + StackFrameMIPS::CONTEXT_VALID_S6 | + StackFrameMIPS::CONTEXT_VALID_S7 | + StackFrameMIPS::CONTEXT_VALID_SP | + StackFrameMIPS::CONTEXT_VALID_FP | + StackFrameMIPS::CONTEXT_VALID_RA); + + // By default, context frames provide all registers, as normal. + context_frame_validity = StackFrameMIPS::CONTEXT_VALID_ALL; + + // By default, registers are unchanged. + raw_context = expected; + } + + // Walk the stack, using stack_section as the contents of the stack + // and raw_context as the current register values. (Set the stack + // pointer to the stack's starting address.) Expect two stack + // frames; in the older frame, expect the callee-saves registers to + // have values matching those in 'expected'. + void CheckWalk() { + RegionFromSection(); + raw_context.iregs[MD_CONTEXT_MIPS_REG_SP] = stack_section.start().Value(); + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerMIPS walker(&system_info, &raw_context, &stack_region, + &modules, &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(0U, modules_without_symbols.size()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(2U, frames->size()); + + StackFrameMIPS* frame0 = static_cast(frames->at(0)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CONTEXT, frame0->trust); + ASSERT_EQ(StackFrameMIPS::CONTEXT_VALID_ALL, frame0->context_validity); + EXPECT_EQ("enchiridion", frame0->function_name); + EXPECT_EQ(0x00404000U, frame0->function_base); + + StackFrameMIPS* frame1 = static_cast(frames->at(1)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CFI, frame1->trust); + ASSERT_EQ(expected_validity, frame1->context_validity); + EXPECT_EQ(expected.iregs[MD_CONTEXT_MIPS_REG_S0], + frame1->context.iregs[MD_CONTEXT_MIPS_REG_S0]); + EXPECT_EQ(expected.iregs[MD_CONTEXT_MIPS_REG_S1], + frame1->context.iregs[MD_CONTEXT_MIPS_REG_S1]); + EXPECT_EQ(expected.iregs[MD_CONTEXT_MIPS_REG_S2], + frame1->context.iregs[MD_CONTEXT_MIPS_REG_S2]); + EXPECT_EQ(expected.iregs[MD_CONTEXT_MIPS_REG_S3], + frame1->context.iregs[MD_CONTEXT_MIPS_REG_S3]); + EXPECT_EQ(expected.iregs[MD_CONTEXT_MIPS_REG_S4], + frame1->context.iregs[MD_CONTEXT_MIPS_REG_S4]); + EXPECT_EQ(expected.iregs[MD_CONTEXT_MIPS_REG_S5], + frame1->context.iregs[MD_CONTEXT_MIPS_REG_S5]); + EXPECT_EQ(expected.iregs[MD_CONTEXT_MIPS_REG_S6], + frame1->context.iregs[MD_CONTEXT_MIPS_REG_S6]); + EXPECT_EQ(expected.iregs[MD_CONTEXT_MIPS_REG_S7], + frame1->context.iregs[MD_CONTEXT_MIPS_REG_S7]); + EXPECT_EQ(expected.iregs[MD_CONTEXT_MIPS_REG_FP], + frame1->context.iregs[MD_CONTEXT_MIPS_REG_FP]); + EXPECT_EQ(expected.iregs[MD_CONTEXT_MIPS_REG_RA], + frame1->context.iregs[MD_CONTEXT_MIPS_REG_RA]); + EXPECT_EQ(expected.iregs[MD_CONTEXT_MIPS_REG_SP], + frame1->context.iregs[MD_CONTEXT_MIPS_REG_SP]); + EXPECT_EQ(expected.epc, frame1->context.epc); + EXPECT_EQ(expected.epc, frame1->instruction); + EXPECT_EQ("epictetus", frame1->function_name); + EXPECT_EQ(0x00405000U, frame1->function_base); + } + + // The values we expect to find for the caller's registers. + MDRawContextMIPS expected; + + // The validity mask for expected. + int expected_validity; + + // The validity mask to impose on the context frame. + int context_frame_validity; +}; + +class CFI: public CFIFixture, public Test { }; + +// TODO(gordanac): add CFI tests + +TEST_F(CFI, At4004) { + Label frame1_sp = expected.iregs[MD_CONTEXT_MIPS_REG_SP]; + stack_section + // frame0 + .Append(24, 0) // space + .D32(frame1_sp) // stack pointer + .D32(0x00405510) // return address + .Mark(&frame1_sp); // This effectively sets stack_section.start(). + raw_context.epc = 0x00404004; + CheckWalk(); +} + +// Check that we reject rules that would cause the stack pointer to +// move in the wrong direction. +TEST_F(CFI, RejectBackwards) { + raw_context.epc = 0x40005000; + raw_context.iregs[MD_CONTEXT_MIPS_REG_SP] = 0x80000000; + raw_context.iregs[MD_CONTEXT_MIPS_REG_RA] = 0x00405510; + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerMIPS walker(&system_info, &raw_context, &stack_region, &modules, + &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(0U, modules_without_symbols.size()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(1U, frames->size()); +} + +// Check that we reject rules whose expressions' evaluation fails. +TEST_F(CFI, RejectBadExpressions) { + raw_context.epc = 0x00407000; + raw_context.iregs[MD_CONTEXT_MIPS_REG_SP] = 0x80000000; + raw_context.iregs[MD_CONTEXT_MIPS_REG_RA] = 0x00405510; + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerMIPS walker(&system_info, &raw_context, &stack_region, &modules, + &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(0U, modules_without_symbols.size()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(1U, frames->size()); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_ppc.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_ppc.cc new file mode 100644 index 0000000000..7e2088440b --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_ppc.cc @@ -0,0 +1,146 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// stackwalker_ppc.cc: ppc-specific stackwalker. +// +// See stackwalker_ppc.h for documentation. +// +// Author: Mark Mentovai + + +#include "processor/stackwalker_ppc.h" +#include "google_breakpad/processor/call_stack.h" +#include "google_breakpad/processor/memory_region.h" +#include "google_breakpad/processor/stack_frame_cpu.h" +#include "processor/logging.h" + +namespace google_breakpad { + + +StackwalkerPPC::StackwalkerPPC(const SystemInfo* system_info, + const MDRawContextPPC* context, + MemoryRegion* memory, + const CodeModules* modules, + StackFrameSymbolizer* resolver_helper) + : Stackwalker(system_info, memory, modules, resolver_helper), + context_(context) { + if (memory_ && memory_->GetBase() + memory_->GetSize() - 1 > 0xffffffff) { + // This implementation only covers 32-bit ppc CPUs. The limits of the + // supplied stack are invalid. Mark memory_ = NULL, which will cause + // stackwalking to fail. + BPLOG(ERROR) << "Memory out of range for stackwalking: " << + HexString(memory_->GetBase()) << "+" << + HexString(memory_->GetSize()); + memory_ = NULL; + } +} + + +StackFrame* StackwalkerPPC::GetContextFrame() { + if (!context_) { + BPLOG(ERROR) << "Can't get context frame without context"; + return NULL; + } + + StackFramePPC* frame = new StackFramePPC(); + + // The instruction pointer is stored directly in a register, so pull it + // straight out of the CPU context structure. + frame->context = *context_; + frame->context_validity = StackFramePPC::CONTEXT_VALID_ALL; + frame->trust = StackFrame::FRAME_TRUST_CONTEXT; + frame->instruction = frame->context.srr0; + + return frame; +} + + +StackFrame* StackwalkerPPC::GetCallerFrame(const CallStack* stack, + bool stack_scan_allowed) { + if (!memory_ || !stack) { + BPLOG(ERROR) << "Can't get caller frame without memory or stack"; + return NULL; + } + + // The instruction pointers for previous frames are saved on the stack. + // The typical ppc calling convention is for the called procedure to store + // its return address in the calling procedure's stack frame at 8(%r1), + // and to allocate its own stack frame by decrementing %r1 (the stack + // pointer) and saving the old value of %r1 at 0(%r1). Because the ppc has + // no hardware stack, there is no distinction between the stack pointer and + // frame pointer, and what is typically thought of as the frame pointer on + // an x86 is usually referred to as the stack pointer on a ppc. + + StackFramePPC* last_frame = static_cast( + stack->frames()->back()); + + // A caller frame must reside higher in memory than its callee frames. + // Anything else is an error, or an indication that we've reached the + // end of the stack. + uint32_t stack_pointer; + if (!memory_->GetMemoryAtAddress(last_frame->context.gpr[1], + &stack_pointer) || + stack_pointer <= last_frame->context.gpr[1]) { + return NULL; + } + + // Mac OS X/Darwin gives 1 as the return address from the bottom-most + // frame in a stack (a thread's entry point). I haven't found any + // documentation on this, but 0 or 1 would be bogus return addresses, + // so check for them here and return false (end of stack) when they're + // hit to avoid having a phantom frame. + uint32_t instruction; + if (!memory_->GetMemoryAtAddress(stack_pointer + 8, &instruction) || + instruction <= 1) { + return NULL; + } + + StackFramePPC* frame = new StackFramePPC(); + + frame->context = last_frame->context; + frame->context.srr0 = instruction; + frame->context.gpr[1] = stack_pointer; + frame->context_validity = StackFramePPC::CONTEXT_VALID_SRR0 | + StackFramePPC::CONTEXT_VALID_GPR1; + frame->trust = StackFrame::FRAME_TRUST_FP; + + // frame->context.srr0 is the return address, which is one instruction + // past the branch that caused us to arrive at the callee. Set + // frame_ppc->instruction to four less than that. Since all ppc + // instructions are 4 bytes wide, this is the address of the branch + // instruction. This allows source line information to match up with the + // line that contains a function call. Callers that require the exact + // return address value may access the context.srr0 field of StackFramePPC. + frame->instruction = frame->context.srr0 - 4; + + return frame; +} + + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_ppc.h b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_ppc.h new file mode 100644 index 0000000000..012e5c32f9 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_ppc.h @@ -0,0 +1,79 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// stackwalker_ppc.h: ppc-specific stackwalker. +// +// Provides stack frames given ppc register context and a memory region +// corresponding to a ppc stack. +// +// Author: Mark Mentovai + + +#ifndef PROCESSOR_STACKWALKER_PPC_H__ +#define PROCESSOR_STACKWALKER_PPC_H__ + + +#include "google_breakpad/common/breakpad_types.h" +#include "google_breakpad/common/minidump_format.h" +#include "google_breakpad/processor/stackwalker.h" + +namespace google_breakpad { + +class CodeModules; + +class StackwalkerPPC : public Stackwalker { + public: + // context is a ppc context object that gives access to ppc-specific + // register state corresponding to the innermost called frame to be + // included in the stack. The other arguments are passed directly through + // to the base Stackwalker constructor. + StackwalkerPPC(const SystemInfo* system_info, + const MDRawContextPPC* context, + MemoryRegion* memory, + const CodeModules* modules, + StackFrameSymbolizer* frame_symbolizer); + + private: + // Implementation of Stackwalker, using ppc context (stack pointer in %r1, + // saved program counter in %srr0) and stack conventions (saved stack + // pointer at 0(%r1), return address at 8(0(%r1)). + virtual StackFrame* GetContextFrame(); + virtual StackFrame* GetCallerFrame(const CallStack* stack, + bool stack_scan_allowed); + + // Stores the CPU context corresponding to the innermost stack frame to + // be returned by GetContextFrame. + const MDRawContextPPC* context_; +}; + + +} // namespace google_breakpad + + +#endif // PROCESSOR_STACKWALKER_PPC_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_ppc64.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_ppc64.cc new file mode 100644 index 0000000000..51c71fe561 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_ppc64.cc @@ -0,0 +1,137 @@ +// Copyright (c) 2013 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// stackwalker_ppc64.cc: ppc64-specific stackwalker. +// +// See stackwalker_ppc64.h for documentation. + + +#include "processor/stackwalker_ppc64.h" +#include "google_breakpad/processor/call_stack.h" +#include "google_breakpad/processor/memory_region.h" +#include "google_breakpad/processor/stack_frame_cpu.h" +#include "processor/logging.h" + +#include + +namespace google_breakpad { + + +StackwalkerPPC64::StackwalkerPPC64(const SystemInfo* system_info, + const MDRawContextPPC64* context, + MemoryRegion* memory, + const CodeModules* modules, + StackFrameSymbolizer* resolver_helper) + : Stackwalker(system_info, memory, modules, resolver_helper), + context_(context) { +} + + +StackFrame* StackwalkerPPC64::GetContextFrame() { + if (!context_) { + BPLOG(ERROR) << "Can't get context frame without context"; + return NULL; + } + + StackFramePPC64* frame = new StackFramePPC64(); + + // The instruction pointer is stored directly in a register, so pull it + // straight out of the CPU context structure. + frame->context = *context_; + frame->context_validity = StackFramePPC64::CONTEXT_VALID_ALL; + frame->trust = StackFrame::FRAME_TRUST_CONTEXT; + frame->instruction = frame->context.srr0; + + return frame; +} + + +StackFrame* StackwalkerPPC64::GetCallerFrame(const CallStack* stack, + bool stack_scan_allowed) { + if (!memory_ || !stack) { + BPLOG(ERROR) << "Can't get caller frame without memory or stack"; + return NULL; + } + + // The instruction pointers for previous frames are saved on the stack. + // The typical ppc64 calling convention is for the called procedure to store + // its return address in the calling procedure's stack frame at 8(%r1), + // and to allocate its own stack frame by decrementing %r1 (the stack + // pointer) and saving the old value of %r1 at 0(%r1). Because the ppc64 has + // no hardware stack, there is no distinction between the stack pointer and + // frame pointer, and what is typically thought of as the frame pointer on + // an x86 is usually referred to as the stack pointer on a ppc64. + + StackFramePPC64* last_frame = static_cast( + stack->frames()->back()); + + // A caller frame must reside higher in memory than its callee frames. + // Anything else is an error, or an indication that we've reached the + // end of the stack. + uint64_t stack_pointer; + if (!memory_->GetMemoryAtAddress(last_frame->context.gpr[1], + &stack_pointer) || + stack_pointer <= last_frame->context.gpr[1]) { + return NULL; + } + + // Mac OS X/Darwin gives 1 as the return address from the bottom-most + // frame in a stack (a thread's entry point). I haven't found any + // documentation on this, but 0 or 1 would be bogus return addresses, + // so check for them here and return false (end of stack) when they're + // hit to avoid having a phantom frame. + uint64_t instruction; + if (!memory_->GetMemoryAtAddress(stack_pointer + 16, &instruction) || + instruction <= 1) { + return NULL; + } + + StackFramePPC64* frame = new StackFramePPC64(); + + frame->context = last_frame->context; + frame->context.srr0 = instruction; + frame->context.gpr[1] = stack_pointer; + frame->context_validity = StackFramePPC64::CONTEXT_VALID_SRR0 | + StackFramePPC64::CONTEXT_VALID_GPR1; + frame->trust = StackFrame::FRAME_TRUST_FP; + + // frame->context.srr0 is the return address, which is one instruction + // past the branch that caused us to arrive at the callee. Set + // frame_ppc64->instruction to eight less than that. Since all ppc64 + // instructions are 8 bytes wide, this is the address of the branch + // instruction. This allows source line information to match up with the + // line that contains a function call. Callers that require the exact + // return address value may access the context.srr0 field of StackFramePPC64. + frame->instruction = frame->context.srr0 - 8; + + return frame; +} + + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_ppc64.h b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_ppc64.h new file mode 100644 index 0000000000..a406343af4 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_ppc64.h @@ -0,0 +1,77 @@ +// Copyright (c) 2013 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// stackwalker_ppc64.h: ppc-specific stackwalker. +// +// Provides stack frames given ppc64 register context and a memory region +// corresponding to a ppc64 stack. + + +#ifndef PROCESSOR_STACKWALKER_PPC64_H__ +#define PROCESSOR_STACKWALKER_PPC64_H__ + + +#include "google_breakpad/common/breakpad_types.h" +#include "google_breakpad/common/minidump_format.h" +#include "google_breakpad/processor/stackwalker.h" + +namespace google_breakpad { + +class CodeModules; + +class StackwalkerPPC64 : public Stackwalker { + public: + // context is a ppc64 context object that gives access to ppc64-specific + // register state corresponding to the innermost called frame to be + // included in the stack. The other arguments are passed directly through + // to the base Stackwalker constructor. + StackwalkerPPC64(const SystemInfo* system_info, + const MDRawContextPPC64* context, + MemoryRegion* memory, + const CodeModules* modules, + StackFrameSymbolizer* frame_symbolizer); + + private: + // Implementation of Stackwalker, using ppc64 context (stack pointer in %r1, + // saved program counter in %srr0) and stack conventions (saved stack + // pointer at 0(%r1), return address at 8(0(%r1)). + virtual StackFrame* GetContextFrame(); + virtual StackFrame* GetCallerFrame(const CallStack* stack, + bool stack_scan_allowed); + + // Stores the CPU context corresponding to the innermost stack frame to + // be returned by GetContextFrame. + const MDRawContextPPC64* context_; +}; + + +} // namespace google_breakpad + + +#endif // PROCESSOR_STACKWALKER_PPC64_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_selftest.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_selftest.cc new file mode 100644 index 0000000000..f692d4c4c0 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_selftest.cc @@ -0,0 +1,433 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// stackwalker_selftest.cc: Tests StackwalkerX86 or StackwalkerPPC using the +// running process' stack as test data, if running on an x86 or ppc and +// compiled with gcc. This test is not enabled in the "make check" suite +// by default, because certain optimizations interfere with its proper +// operation. To turn it on, configure with --enable-selftest. +// +// Optimizations that cause problems: +// - stack frame reuse. The Recursor function here calls itself with +// |return Recursor|. When the caller's frame is reused, it will cause +// CountCallerFrames to correctly return the same number of frames +// in both the caller and callee. This is considered an unexpected +// condition in the test, which expects a callee to have one more +// caller frame in the stack than its caller. +// - frame pointer omission. Even with a stackwalker that understands +// this optimization, the code to harness debug information currently +// only exists to retrieve it from minidumps, not the current process. +// +// This test can also serve as a developmental and debugging aid if +// PRINT_STACKS is defined. +// +// Author: Mark Mentovai + +#include + +#include "processor/logging.h" + +#if defined(__i386) && !defined(__i386__) +#define __i386__ +#endif +#if defined(__sparc) && !defined(__sparc__) +#define __sparc__ +#endif + +#if (defined(__SUNPRO_CC) || defined(__GNUC__)) && \ + (defined(__i386__) || defined(__ppc__) || defined(__sparc__)) + + +#include + +#include "common/scoped_ptr.h" +#include "google_breakpad/common/breakpad_types.h" +#include "google_breakpad/common/minidump_format.h" +#include "google_breakpad/processor/basic_source_line_resolver.h" +#include "google_breakpad/processor/call_stack.h" +#include "google_breakpad/processor/code_module.h" +#include "google_breakpad/processor/memory_region.h" +#include "google_breakpad/processor/stack_frame.h" +#include "google_breakpad/processor/stack_frame_cpu.h" + +using google_breakpad::BasicSourceLineResolver; +using google_breakpad::CallStack; +using google_breakpad::CodeModule; +using google_breakpad::MemoryRegion; +using google_breakpad::scoped_ptr; +using google_breakpad::StackFrame; +using google_breakpad::StackFramePPC; +using google_breakpad::StackFrameX86; +using google_breakpad::StackFrameSPARC; + +#if defined(__i386__) +#include "processor/stackwalker_x86.h" +using google_breakpad::StackwalkerX86; +#elif defined(__ppc__) +#include "processor/stackwalker_ppc.h" +using google_breakpad::StackwalkerPPC; +#elif defined(__sparc__) +#include "processor/stackwalker_sparc.h" +using google_breakpad::StackwalkerSPARC; +#endif // __i386__ || __ppc__ || __sparc__ + +#define RECURSION_DEPTH 100 + + +// A simple MemoryRegion subclass that provides direct access to this +// process' memory space by pointer. +class SelfMemoryRegion : public MemoryRegion { + public: + virtual uint64_t GetBase() const { return 0; } + virtual uint32_t GetSize() const { return 0xffffffff; } + + bool GetMemoryAtAddress(uint64_t address, uint8_t* value) const { + return GetMemoryAtAddressInternal(address, value); } + bool GetMemoryAtAddress(uint64_t address, uint16_t* value) const { + return GetMemoryAtAddressInternal(address, value); } + bool GetMemoryAtAddress(uint64_t address, uint32_t* value) const { + return GetMemoryAtAddressInternal(address, value); } + bool GetMemoryAtAddress(uint64_t address, uint64_t* value) const { + return GetMemoryAtAddressInternal(address, value); } + void Print() const { + assert(false); + } + + private: + template bool GetMemoryAtAddressInternal(uint64_t address, + T* value) { + // Without knowing what addresses are actually mapped, just assume that + // everything low is not mapped. This helps the stackwalker catch the + // end of a stack when it tries to dereference a null or low pointer + // in an attempt to find the caller frame. Other unmapped accesses will + // cause the program to crash, but that would properly be a test failure. + if (address < 0x100) + return false; + + uint8_t* memory = 0; + *value = *reinterpret_cast(&memory[address]); + return true; + } +}; + + +#if defined(__GNUC__) + + +#if defined(__i386__) + +// GetEBP returns the current value of the %ebp register. Because it's +// implemented as a function, %ebp itself contains GetEBP's frame pointer +// and not the caller's frame pointer. Dereference %ebp to obtain the +// caller's frame pointer, which the compiler-generated preamble stored +// on the stack (provided frame pointers are not being omitted.) Because +// this function depends on the compiler-generated preamble, inlining is +// disabled. +static uint32_t GetEBP() __attribute__((noinline)); +static uint32_t GetEBP() { + uint32_t ebp; + __asm__ __volatile__( + "movl (%%ebp), %0" + : "=a" (ebp) + ); + return ebp; +} + + +// The caller's %esp is 8 higher than the value of %ebp in this function, +// assuming that it's not inlined and that the standard prolog is used. +// The CALL instruction places a 4-byte return address on the stack above +// the caller's %esp, and this function's prolog will save the caller's %ebp +// on the stack as well, for another 4 bytes, before storing %esp in %ebp. +static uint32_t GetESP() __attribute__((noinline)); +static uint32_t GetESP() { + uint32_t ebp; + __asm__ __volatile__( + "movl %%ebp, %0" + : "=a" (ebp) + ); + return ebp + 8; +} + + +// GetEIP returns the instruction pointer identifying the next instruction +// to execute after GetEIP returns. It obtains this information from the +// stack, where it was placed by the call instruction that called GetEIP. +// This function depends on frame pointers not being omitted. It is possible +// to write a pure asm version of this routine that has no compiler-generated +// preamble and uses %esp instead of %ebp; that would function in the +// absence of frame pointers. However, the simpler approach is used here +// because GetEBP and stackwalking necessarily depends on access to frame +// pointers. Because this function depends on a call instruction and the +// compiler-generated preamble, inlining is disabled. +static uint32_t GetEIP() __attribute__((noinline)); +static uint32_t GetEIP() { + uint32_t eip; + __asm__ __volatile__( + "movl 4(%%ebp), %0" + : "=a" (eip) + ); + return eip; +} + + +#elif defined(__ppc__) + + +// GetSP returns the current value of the %r1 register, which by convention, +// is the stack pointer on ppc. Because it's implemented as a function, +// %r1 itself contains GetSP's own stack pointer and not the caller's stack +// pointer. Dereference %r1 to obtain the caller's stack pointer, which the +// compiler-generated prolog stored on the stack. Because this function +// depends on the compiler-generated prolog, inlining is disabled. +static uint32_t GetSP() __attribute__((noinline)); +static uint32_t GetSP() { + uint32_t sp; + __asm__ __volatile__( + "lwz %0, 0(r1)" + : "=r" (sp) + ); + return sp; +} + + +// GetPC returns the program counter identifying the next instruction to +// execute after GetPC returns. It obtains this information from the +// link register, where it was placed by the branch instruction that called +// GetPC. Because this function depends on the caller's use of a branch +// instruction, inlining is disabled. +static uint32_t GetPC() __attribute__((noinline)); +static uint32_t GetPC() { + uint32_t lr; + __asm__ __volatile__( + "mflr %0" + : "=r" (lr) + ); + return lr; +} + + +#elif defined(__sparc__) + + +// GetSP returns the current value of the %sp/%o6/%g_r[14] register, which +// by convention, is the stack pointer on sparc. Because it's implemented +// as a function, %sp itself contains GetSP's own stack pointer and not +// the caller's stack pointer. Dereference to obtain the caller's stack +// pointer, which the compiler-generated prolog stored on the stack. +// Because this function depends on the compiler-generated prolog, inlining +// is disabled. +static uint32_t GetSP() __attribute__((noinline)); +static uint32_t GetSP() { + uint32_t sp; + __asm__ __volatile__( + "mov %%fp, %0" + : "=r" (sp) + ); + return sp; +} + +// GetFP returns the current value of the %fp register. Because it's +// implemented as a function, %fp itself contains GetFP's frame pointer +// and not the caller's frame pointer. Dereference %fp to obtain the +// caller's frame pointer, which the compiler-generated preamble stored +// on the stack (provided frame pointers are not being omitted.) Because +// this function depends on the compiler-generated preamble, inlining is +// disabled. +static uint32_t GetFP() __attribute__((noinline)); +static uint32_t GetFP() { + uint32_t fp; + __asm__ __volatile__( + "ld [%%fp+56], %0" + : "=r" (fp) + ); + return fp; +} + +// GetPC returns the program counter identifying the next instruction to +// execute after GetPC returns. It obtains this information from the +// link register, where it was placed by the branch instruction that called +// GetPC. Because this function depends on the caller's use of a branch +// instruction, inlining is disabled. +static uint32_t GetPC() __attribute__((noinline)); +static uint32_t GetPC() { + uint32_t pc; + __asm__ __volatile__( + "mov %%i7, %0" + : "=r" (pc) + ); + return pc + 8; +} + +#endif // __i386__ || __ppc__ || __sparc__ + +#elif defined(__SUNPRO_CC) + +#if defined(__i386__) +extern "C" { +extern uint32_t GetEIP(); +extern uint32_t GetEBP(); +extern uint32_t GetESP(); +} +#elif defined(__sparc__) +extern "C" { +extern uint32_t GetPC(); +extern uint32_t GetFP(); +extern uint32_t GetSP(); +} +#endif // __i386__ || __sparc__ + +#endif // __GNUC__ || __SUNPRO_CC + +// CountCallerFrames returns the number of stack frames beneath the function +// that called CountCallerFrames. Because this function's return value +// is dependent on the size of the stack beneath it, inlining is disabled, +// and any function that calls this should not be inlined either. +#if defined(__GNUC__) +static unsigned int CountCallerFrames() __attribute__((noinline)); +#elif defined(__SUNPRO_CC) +static unsigned int CountCallerFrames(); +#endif +static unsigned int CountCallerFrames() { + SelfMemoryRegion memory; + BasicSourceLineResolver resolver; + +#if defined(__i386__) + MDRawContextX86 context = MDRawContextX86(); + context.eip = GetEIP(); + context.ebp = GetEBP(); + context.esp = GetESP(); + + StackwalkerX86 stackwalker = StackwalkerX86(NULL, &context, &memory, NULL, + NULL, &resolver); +#elif defined(__ppc__) + MDRawContextPPC context = MDRawContextPPC(); + context.srr0 = GetPC(); + context.gpr[1] = GetSP(); + + StackwalkerPPC stackwalker = StackwalkerPPC(NULL, &context, &memory, NULL, + NULL, &resolver); +#elif defined(__sparc__) + MDRawContextSPARC context = MDRawContextSPARC(); + context.pc = GetPC(); + context.g_r[14] = GetSP(); + context.g_r[30] = GetFP(); + + StackwalkerSPARC stackwalker = StackwalkerSPARC(NULL, &context, &memory, + NULL, NULL, &resolver); +#endif // __i386__ || __ppc__ || __sparc__ + + CallStack stack; + vector modules_without_symbols; + stackwalker.Walk(&stack, &modules_without_symbols); + +#ifdef PRINT_STACKS + printf("\n"); + for (unsigned int frame_index = 0; + frame_index < stack.frames()->size(); + ++frame_index) { + StackFrame *frame = stack.frames()->at(frame_index); + printf("frame %-3d instruction = 0x%08" PRIx64, + frame_index, frame->instruction); +#if defined(__i386__) + StackFrameX86 *frame_x86 = reinterpret_cast(frame); + printf(" esp = 0x%08x ebp = 0x%08x\n", + frame_x86->context.esp, frame_x86->context.ebp); +#elif defined(__ppc__) + StackFramePPC *frame_ppc = reinterpret_cast(frame); + printf(" gpr[1] = 0x%08x\n", frame_ppc->context.gpr[1]); +#elif defined(__sparc__) + StackFrameSPARC *frame_sparc = reinterpret_cast(frame); + printf(" sp = 0x%08x fp = 0x%08x\n", + frame_sparc->context.g_r[14], frame_sparc->context.g_r[30]); +#endif // __i386__ || __ppc__ || __sparc__ + } +#endif // PRINT_STACKS + + // Subtract 1 because the caller wants the number of frames beneath + // itself. Because the caller called us, subract two for our frame and its + // frame, which are included in stack.size(). + return stack.frames()->size() - 2; +} + + +// Recursor verifies that the number stack frames beneath itself is one more +// than the number of stack frames beneath its parent. When depth frames +// have been reached, Recursor stops checking and returns success. If the +// frame count check fails at any depth, Recursor will stop and return false. +// Because this calls CountCallerFrames, inlining is disabled. +#if defined(__GNUC__) +static bool Recursor(unsigned int depth, unsigned int parent_callers) + __attribute__((noinline)); +#elif defined(__SUNPRO_CC) +static bool Recursor(unsigned int depth, unsigned int parent_callers); +#endif +static bool Recursor(unsigned int depth, unsigned int parent_callers) { + unsigned int callers = CountCallerFrames(); + if (callers != parent_callers + 1) + return false; + + if (depth) + return Recursor(depth - 1, callers); + + // depth == 0 + return true; +} + + +// Because this calls CountCallerFrames, inlining is disabled - but because +// it's main (and nobody calls it other than the entry point), it wouldn't +// be inlined anyway. +#if defined(__GNUC__) +int main(int argc, char** argv) __attribute__((noinline)); +#elif defined(__SUNPRO_CC) +int main(int argc, char** argv); +#endif +int main(int argc, char** argv) { + BPLOG_INIT(&argc, &argv); + + return Recursor(RECURSION_DEPTH, CountCallerFrames()) ? 0 : 1; +} + + +#else +// Not i386 or ppc or sparc? We can only test stacks we know how to walk. + + +int main(int argc, char **argv) { + BPLOG_INIT(&argc, &argv); + + // "make check" interprets an exit status of 77 to mean that the test is + // not supported. + BPLOG(ERROR) << "Selftest not supported here"; + return 77; +} + + +#endif // (__GNUC__ || __SUNPRO_CC) && (__i386__ || __ppc__ || __sparc__) diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_selftest_sol.s b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_selftest_sol.s new file mode 100644 index 0000000000..648b0499a1 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_selftest_sol.s @@ -0,0 +1,111 @@ +/* Copyright (c) 2007, Google Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following disclaimer + * in the documentation and/or other materials provided with the + * distribution. + * * Neither the name of Google Inc. nor the names of its + * contributors may be used to endorse or promote products derived from + * this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +/* stackwalker_selftest_sol.s + * On Solaris, the recommeded compiler is CC, so we can not use gcc inline + * asm, use this method instead. + * + * How to compile: as -P -L -D_ASM -D_STDC -K PIC -o \ + * src/processor/stackwalker_selftest_sol.o \ + * src/processor/stackwalker_selftest_sol.s + * + * Author: Michael Shang + */ + +#include + +#if defined(__i386) + + +ENTRY(GetEBP) + pushl %ebp + movl %esp,%ebp + subl $0x00000004,%esp + movl 0x00000000(%ebp),%eax + movl %eax,0xfffffffc(%ebp) + movl 0xfffffffc(%ebp),%eax + leave + ret +SET_SIZE(GetEBP) + +ENTRY(GetEIP) + pushl %ebp + movl %esp,%ebp + subl $0x00000004,%esp + movl 0x00000004(%ebp),%eax + movl %eax,0xfffffffc(%ebp) + movl 0xfffffffc(%ebp),%eax + leave + ret +SET_SIZE(GetEIP) + +ENTRY(GetESP) + pushl %ebp + movl %esp,%ebp + subl $0x00000004,%esp + movl %ebp,%eax + movl %eax,0xfffffffc(%ebp) + movl 0xfffffffc(%ebp),%eax + addl $0x00000008,%eax + leave + ret +SET_SIZE(GetESP) + + +#elif defined(__sparc) + + +ENTRY(GetPC) + save %sp, -120, %sp + mov %i7, %i4 + inccc 8, %i4 + mov %i4, %i0 + ret + restore +SET_SIZE(GetPC) + +ENTRY(GetSP) + save %sp, -120, %sp + mov %fp, %i4 + mov %i4, %i0 + ret + restore +SET_SIZE(GetSP) + +ENTRY(GetFP) + save %sp, -120, %sp + ld [%fp + 56], %g1 + mov %g1, %i0 + ret + restore +SET_SIZE(GetFP) + + +#endif // __i386 || __sparc diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_sparc.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_sparc.cc new file mode 100644 index 0000000000..ff2ea75a80 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_sparc.cc @@ -0,0 +1,139 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// stackwalker_sparc.cc: sparc-specific stackwalker. +// +// See stackwalker_sparc.h for documentation. +// +// Author: Michael Shang + + +#include "google_breakpad/processor/call_stack.h" +#include "google_breakpad/processor/memory_region.h" +#include "google_breakpad/processor/stack_frame_cpu.h" +#include "processor/logging.h" +#include "processor/stackwalker_sparc.h" + +namespace google_breakpad { + + +StackwalkerSPARC::StackwalkerSPARC(const SystemInfo* system_info, + const MDRawContextSPARC* context, + MemoryRegion* memory, + const CodeModules* modules, + StackFrameSymbolizer* resolver_helper) + : Stackwalker(system_info, memory, modules, resolver_helper), + context_(context) { +} + + +StackFrame* StackwalkerSPARC::GetContextFrame() { + if (!context_) { + BPLOG(ERROR) << "Can't get context frame without context"; + return NULL; + } + + StackFrameSPARC* frame = new StackFrameSPARC(); + + // The instruction pointer is stored directly in a register, so pull it + // straight out of the CPU context structure. + frame->context = *context_; + frame->context_validity = StackFrameSPARC::CONTEXT_VALID_ALL; + frame->trust = StackFrame::FRAME_TRUST_CONTEXT; + frame->instruction = frame->context.pc; + + return frame; +} + + +StackFrame* StackwalkerSPARC::GetCallerFrame(const CallStack* stack, + bool stack_scan_allowed) { + if (!memory_ || !stack) { + BPLOG(ERROR) << "Can't get caller frame without memory or stack"; + return NULL; + } + + StackFrameSPARC* last_frame = static_cast( + stack->frames()->back()); + + // new: caller + // old: callee + // %fp, %i6 and g_r[30] is the same, see minidump_format.h + // %sp, %o6 and g_r[14] is the same, see minidump_format.h + // %sp_new = %fp_old + // %fp_new = *(%fp_old + 32 + 32 - 8), where the callee's %i6 + // %pc_new = *(%fp_old + 32 + 32 - 4) + 8 + // which is callee's %i7 plus 8 + + // A caller frame must reside higher in memory than its callee frames. + // Anything else is an error, or an indication that we've reached the + // end of the stack. + uint64_t stack_pointer = last_frame->context.g_r[30]; + if (stack_pointer <= last_frame->context.g_r[14]) { + return NULL; + } + + uint32_t instruction; + if (!memory_->GetMemoryAtAddress(stack_pointer + 60, + &instruction) || instruction <= 1) { + return NULL; + } + + uint32_t stack_base; + if (!memory_->GetMemoryAtAddress(stack_pointer + 56, + &stack_base) || stack_base <= 1) { + return NULL; + } + + StackFrameSPARC* frame = new StackFrameSPARC(); + + frame->context = last_frame->context; + frame->context.g_r[14] = stack_pointer; + frame->context.g_r[30] = stack_base; + + // frame->context.pc is the return address, which is 2 instruction + // past the branch that caused us to arrive at the callee, which are + // a CALL instruction then a NOP instruction. + // frame_ppc->instruction to 8 less than that. Since all sparc + // instructions are 4 bytes wide, this is the address of the branch + // instruction. This allows source line information to match up with the + // line that contains a function call. Callers that require the exact + // return address value may access the %i7/g_r[31] field of StackFrameSPARC. + frame->context.pc = instruction + 8; + frame->instruction = instruction; + frame->context_validity = StackFrameSPARC::CONTEXT_VALID_PC | + StackFrameSPARC::CONTEXT_VALID_SP | + StackFrameSPARC::CONTEXT_VALID_FP; + frame->trust = StackFrame::FRAME_TRUST_FP; + + return frame; +} + + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_sparc.h b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_sparc.h new file mode 100644 index 0000000000..e8f2a38887 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_sparc.h @@ -0,0 +1,78 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// stackwalker_sparc.h: sparc-specific stackwalker. +// +// Provides stack frames given sparc register context and a memory region +// corresponding to an sparc stack. +// +// Author: Michael Shang + + +#ifndef PROCESSOR_STACKWALKER_SPARC_H__ +#define PROCESSOR_STACKWALKER_SPARC_H__ + + +#include "google_breakpad/common/breakpad_types.h" +#include "google_breakpad/common/minidump_format.h" +#include "google_breakpad/processor/stackwalker.h" + +namespace google_breakpad { + +class CodeModules; + +class StackwalkerSPARC : public Stackwalker { + public: + // context is a sparc context object that gives access to sparc-specific + // register state corresponding to the innermost called frame to be + // included in the stack. The other arguments are passed directly through + // to the base Stackwalker constructor. + StackwalkerSPARC(const SystemInfo* system_info, + const MDRawContextSPARC* context, + MemoryRegion* memory, + const CodeModules* modules, + StackFrameSymbolizer* frame_symbolizer); + + private: + // Implementation of Stackwalker, using sparc context (%fp, %sp, %pc) and + // stack conventions + virtual StackFrame* GetContextFrame(); + virtual StackFrame* GetCallerFrame(const CallStack* stack, + bool stack_scan_allowed); + + // Stores the CPU context corresponding to the innermost stack frame to + // be returned by GetContextFrame. + const MDRawContextSPARC* context_; +}; + + +} // namespace google_breakpad + + +#endif // PROCESSOR_STACKWALKER_SPARC_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_unittest_utils.h b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_unittest_utils.h new file mode 100644 index 0000000000..73ceb199fe --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_unittest_utils.h @@ -0,0 +1,211 @@ +// -*- mode: C++ -*- + +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// Mock classes for writing stackwalker tests, shared amongst architectures. + +#ifndef PROCESSOR_STACKWALKER_UNITTEST_UTILS_H_ +#define PROCESSOR_STACKWALKER_UNITTEST_UTILS_H_ + +#include +#include +#include +#include + +#include "common/using_std_string.h" +#include "google_breakpad/common/breakpad_types.h" +#include "google_breakpad/processor/code_module.h" +#include "google_breakpad/processor/code_modules.h" +#include "google_breakpad/processor/memory_region.h" +#include "google_breakpad/processor/symbol_supplier.h" +#include "google_breakpad/processor/system_info.h" + +class MockMemoryRegion: public google_breakpad::MemoryRegion { + public: + MockMemoryRegion(): base_address_(0) { } + + // Set this region's address and contents. If we have placed an + // instance of this class in a test fixture class, individual tests + // can use this to provide the region's contents. + void Init(uint64_t base_address, const string &contents) { + base_address_ = base_address; + contents_ = contents; + } + + uint64_t GetBase() const { return base_address_; } + uint32_t GetSize() const { return contents_.size(); } + + bool GetMemoryAtAddress(uint64_t address, uint8_t *value) const { + return GetMemoryLittleEndian(address, value); + } + bool GetMemoryAtAddress(uint64_t address, uint16_t *value) const { + return GetMemoryLittleEndian(address, value); + } + bool GetMemoryAtAddress(uint64_t address, uint32_t *value) const { + return GetMemoryLittleEndian(address, value); + } + bool GetMemoryAtAddress(uint64_t address, uint64_t *value) const { + return GetMemoryLittleEndian(address, value); + } + void Print() const { + assert(false); + } + + private: + // Fetch a little-endian value from ADDRESS in contents_ whose size + // is BYTES, and store it in *VALUE. Return true on success. + template + bool GetMemoryLittleEndian(uint64_t address, ValueType *value) const { + if (address < base_address_ || + address - base_address_ + sizeof(ValueType) > contents_.size()) + return false; + ValueType v = 0; + int start = address - base_address_; + // The loop condition is odd, but it's correct for size_t. + for (size_t i = sizeof(ValueType) - 1; i < sizeof(ValueType); i--) + v = (v << 8) | static_cast(contents_[start + i]); + *value = v; + return true; + } + + uint64_t base_address_; + string contents_; +}; + +class MockCodeModule: public google_breakpad::CodeModule { + public: + MockCodeModule(uint64_t base_address, uint64_t size, + const string &code_file, const string &version) + : base_address_(base_address), size_(size), code_file_(code_file) { } + + uint64_t base_address() const { return base_address_; } + uint64_t size() const { return size_; } + string code_file() const { return code_file_; } + string code_identifier() const { return code_file_; } + string debug_file() const { return code_file_; } + string debug_identifier() const { return code_file_; } + string version() const { return version_; } + const google_breakpad::CodeModule *Copy() const { + abort(); // Tests won't use this. + } + + private: + uint64_t base_address_; + uint64_t size_; + string code_file_; + string version_; +}; + +class MockCodeModules: public google_breakpad::CodeModules { + public: + typedef google_breakpad::CodeModule CodeModule; + typedef google_breakpad::CodeModules CodeModules; + + void Add(const MockCodeModule *module) { + modules_.push_back(module); + } + + unsigned int module_count() const { return modules_.size(); } + + const CodeModule *GetModuleForAddress(uint64_t address) const { + for (ModuleVector::const_iterator i = modules_.begin(); + i != modules_.end(); i++) { + const MockCodeModule *module = *i; + if (module->base_address() <= address && + address - module->base_address() < module->size()) + return module; + } + return NULL; + }; + + const CodeModule *GetMainModule() const { return modules_[0]; } + + const CodeModule *GetModuleAtSequence(unsigned int sequence) const { + return modules_.at(sequence); + } + + const CodeModule *GetModuleAtIndex(unsigned int index) const { + return modules_.at(index); + } + + const CodeModules *Copy() const { abort(); } // Tests won't use this. + + private: + typedef std::vector ModuleVector; + ModuleVector modules_; +}; + +class MockSymbolSupplier: public google_breakpad::SymbolSupplier { + public: + typedef google_breakpad::CodeModule CodeModule; + typedef google_breakpad::SystemInfo SystemInfo; + MOCK_METHOD3(GetSymbolFile, SymbolResult(const CodeModule *module, + const SystemInfo *system_info, + string *symbol_file)); + MOCK_METHOD4(GetSymbolFile, SymbolResult(const CodeModule *module, + const SystemInfo *system_info, + string *symbol_file, + string *symbol_data)); + MOCK_METHOD5(GetCStringSymbolData, SymbolResult(const CodeModule *module, + const SystemInfo *system_info, + string *symbol_file, + char **symbol_data, + size_t *symbol_data_size)); + MOCK_METHOD1(FreeSymbolData, void(const CodeModule *module)); + + // Copies the passed string contents into a newly allocated buffer. + // The newly allocated buffer will be freed during destruction. + char* CopySymbolDataAndOwnTheCopy(const std::string &info, + size_t *symbol_data_size) { + *symbol_data_size = info.size() + 1; + char *symbol_data = new char[*symbol_data_size]; + memcpy(symbol_data, info.c_str(), info.size()); + symbol_data[info.size()] = '\0'; + symbol_data_to_free_.push_back(symbol_data); + return symbol_data; + } + + virtual ~MockSymbolSupplier() { + for (SymbolDataVector::const_iterator i = symbol_data_to_free_.begin(); + i != symbol_data_to_free_.end(); i++) { + char* symbol_data = *i; + delete [] symbol_data; + } + } + + private: + // List of symbol data to be freed upon destruction + typedef std::vector SymbolDataVector; + SymbolDataVector symbol_data_to_free_; +}; + +#endif // PROCESSOR_STACKWALKER_UNITTEST_UTILS_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_x86.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_x86.cc new file mode 100644 index 0000000000..29d98e4b8a --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_x86.cc @@ -0,0 +1,672 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// stackwalker_x86.cc: x86-specific stackwalker. +// +// See stackwalker_x86.h for documentation. +// +// Author: Mark Mentovai + +#include +#include + +#include "common/scoped_ptr.h" +#include "google_breakpad/processor/call_stack.h" +#include "google_breakpad/processor/code_modules.h" +#include "google_breakpad/processor/memory_region.h" +#include "google_breakpad/processor/source_line_resolver_interface.h" +#include "google_breakpad/processor/stack_frame_cpu.h" +#include "processor/logging.h" +#include "processor/postfix_evaluator-inl.h" +#include "processor/stackwalker_x86.h" +#include "processor/windows_frame_info.h" +#include "processor/cfi_frame_info.h" + +namespace google_breakpad { + +// Max reasonable size for a single x86 frame is 128 KB. This value is used in +// a heuristic for recovering of the EBP chain after a scan for return address. +// This value is based on a stack frame size histogram built for a set of +// popular third party libraries which suggests that 99.5% of all frames are +// smaller than 128 KB. +static const uint32_t kMaxReasonableGapBetweenFrames = 128 * 1024; + +const StackwalkerX86::CFIWalker::RegisterSet +StackwalkerX86::cfi_register_map_[] = { + // It may seem like $eip and $esp are callee-saves, because (with Unix or + // cdecl calling conventions) the callee is responsible for having them + // restored upon return. But the callee_saves flags here really means + // that the walker should assume they're unchanged if the CFI doesn't + // mention them, which is clearly wrong for $eip and $esp. + { "$eip", ".ra", false, + StackFrameX86::CONTEXT_VALID_EIP, &MDRawContextX86::eip }, + { "$esp", ".cfa", false, + StackFrameX86::CONTEXT_VALID_ESP, &MDRawContextX86::esp }, + { "$ebp", NULL, true, + StackFrameX86::CONTEXT_VALID_EBP, &MDRawContextX86::ebp }, + { "$eax", NULL, false, + StackFrameX86::CONTEXT_VALID_EAX, &MDRawContextX86::eax }, + { "$ebx", NULL, true, + StackFrameX86::CONTEXT_VALID_EBX, &MDRawContextX86::ebx }, + { "$ecx", NULL, false, + StackFrameX86::CONTEXT_VALID_ECX, &MDRawContextX86::ecx }, + { "$edx", NULL, false, + StackFrameX86::CONTEXT_VALID_EDX, &MDRawContextX86::edx }, + { "$esi", NULL, true, + StackFrameX86::CONTEXT_VALID_ESI, &MDRawContextX86::esi }, + { "$edi", NULL, true, + StackFrameX86::CONTEXT_VALID_EDI, &MDRawContextX86::edi }, +}; + +StackwalkerX86::StackwalkerX86(const SystemInfo* system_info, + const MDRawContextX86* context, + MemoryRegion* memory, + const CodeModules* modules, + StackFrameSymbolizer* resolver_helper) + : Stackwalker(system_info, memory, modules, resolver_helper), + context_(context), + cfi_walker_(cfi_register_map_, + (sizeof(cfi_register_map_) / sizeof(cfi_register_map_[0]))) { + if (memory_ && memory_->GetBase() + memory_->GetSize() - 1 > 0xffffffff) { + // The x86 is a 32-bit CPU, the limits of the supplied stack are invalid. + // Mark memory_ = NULL, which will cause stackwalking to fail. + BPLOG(ERROR) << "Memory out of range for stackwalking: " << + HexString(memory_->GetBase()) << "+" << + HexString(memory_->GetSize()); + memory_ = NULL; + } +} + +StackFrameX86::~StackFrameX86() { + if (windows_frame_info) + delete windows_frame_info; + windows_frame_info = NULL; + if (cfi_frame_info) + delete cfi_frame_info; + cfi_frame_info = NULL; +} + +uint64_t StackFrameX86::ReturnAddress() const { + assert(context_validity & StackFrameX86::CONTEXT_VALID_EIP); + return context.eip; +} + +StackFrame* StackwalkerX86::GetContextFrame() { + if (!context_) { + BPLOG(ERROR) << "Can't get context frame without context"; + return NULL; + } + + StackFrameX86* frame = new StackFrameX86(); + + // The instruction pointer is stored directly in a register, so pull it + // straight out of the CPU context structure. + frame->context = *context_; + frame->context_validity = StackFrameX86::CONTEXT_VALID_ALL; + frame->trust = StackFrame::FRAME_TRUST_CONTEXT; + frame->instruction = frame->context.eip; + + return frame; +} + +StackFrameX86* StackwalkerX86::GetCallerByWindowsFrameInfo( + const vector &frames, + WindowsFrameInfo* last_frame_info, + bool stack_scan_allowed) { + StackFrame::FrameTrust trust = StackFrame::FRAME_TRUST_NONE; + + StackFrameX86* last_frame = static_cast(frames.back()); + + // Save the stack walking info we found, in case we need it later to + // find the callee of the frame we're constructing now. + last_frame->windows_frame_info = last_frame_info; + + // This function only covers the full STACK WIN case. If + // last_frame_info is VALID_PARAMETER_SIZE-only, then we should + // assume the traditional frame format or use some other strategy. + if (last_frame_info->valid != WindowsFrameInfo::VALID_ALL) + return NULL; + + // This stackwalker sets each frame's %esp to its value immediately prior + // to the CALL into the callee. This means that %esp points to the last + // callee argument pushed onto the stack, which may not be where %esp points + // after the callee returns. Specifically, the value is correct for the + // cdecl calling convention, but not other conventions. The cdecl + // convention requires a caller to pop its callee's arguments from the + // stack after the callee returns. This is usually accomplished by adding + // the known size of the arguments to %esp. Other calling conventions, + // including stdcall, thiscall, and fastcall, require the callee to pop any + // parameters stored on the stack before returning. This is usually + // accomplished by using the RET n instruction, which pops n bytes off + // the stack after popping the return address. + // + // Because each frame's %esp will point to a location on the stack after + // callee arguments have been PUSHed, when locating things in a stack frame + // relative to %esp, the size of the arguments to the callee need to be + // taken into account. This seems a little bit unclean, but it's better + // than the alternative, which would need to take these same things into + // account, but only for cdecl functions. With this implementation, we get + // to be agnostic about each function's calling convention. Furthermore, + // this is how Windows debugging tools work, so it means that the %esp + // values produced by this stackwalker directly correspond to the %esp + // values you'll see there. + // + // If the last frame has no callee (because it's the context frame), just + // set the callee parameter size to 0: the stack pointer can't point to + // callee arguments because there's no callee. This is correct as long + // as the context wasn't captured while arguments were being pushed for + // a function call. Note that there may be functions whose parameter sizes + // are unknown, 0 is also used in that case. When that happens, it should + // be possible to walk to the next frame without reference to %esp. + + uint32_t last_frame_callee_parameter_size = 0; + int frames_already_walked = frames.size(); + if (frames_already_walked >= 2) { + const StackFrameX86* last_frame_callee + = static_cast(frames[frames_already_walked - 2]); + WindowsFrameInfo* last_frame_callee_info + = last_frame_callee->windows_frame_info; + if (last_frame_callee_info && + (last_frame_callee_info->valid + & WindowsFrameInfo::VALID_PARAMETER_SIZE)) { + last_frame_callee_parameter_size = + last_frame_callee_info->parameter_size; + } + } + + // Set up the dictionary for the PostfixEvaluator. %ebp and %esp are used + // in each program string, and their previous values are known, so set them + // here. + PostfixEvaluator::DictionaryType dictionary; + // Provide the current register values. + dictionary["$ebp"] = last_frame->context.ebp; + dictionary["$esp"] = last_frame->context.esp; + // Provide constants from the debug info for last_frame and its callee. + // .cbCalleeParams is a Breakpad extension that allows us to use the + // PostfixEvaluator engine when certain types of debugging information + // are present without having to write the constants into the program + // string as literals. + dictionary[".cbCalleeParams"] = last_frame_callee_parameter_size; + dictionary[".cbSavedRegs"] = last_frame_info->saved_register_size; + dictionary[".cbLocals"] = last_frame_info->local_size; + + uint32_t raSearchStart = last_frame->context.esp + + last_frame_callee_parameter_size + + last_frame_info->local_size + + last_frame_info->saved_register_size; + + uint32_t raSearchStartOld = raSearchStart; + uint32_t found = 0; // dummy value + // Scan up to three words above the calculated search value, in case + // the stack was aligned to a quadword boundary. + // + // TODO(ivan.penkov): Consider cleaning up the scan for return address that + // follows. The purpose of this scan is to adjust the .raSearchStart + // calculation (which is based on register %esp) in the cases where register + // %esp may have been aligned (up to a quadword). There are two problems + // with this approach: + // 1) In practice, 64 byte boundary alignment is seen which clearly can not + // be handled by a three word scan. + // 2) A search for a return address is "guesswork" by definition because + // the results will be different depending on what is left on the stack + // from previous executions. + // So, basically, the results from this scan should be ignored if other means + // for calculation of the value of .raSearchStart are available. + if (ScanForReturnAddress(raSearchStart, &raSearchStart, &found, 3) && + last_frame->trust == StackFrame::FRAME_TRUST_CONTEXT && + last_frame->windows_frame_info != NULL && + last_frame_info->type_ == WindowsFrameInfo::STACK_INFO_FPO && + raSearchStartOld == raSearchStart && + found == last_frame->context.eip) { + // The context frame represents an FPO-optimized Windows system call. + // On the top of the stack we have a pointer to the current instruction. + // This means that the callee has returned but the return address is still + // on the top of the stack which is very atypical situaltion. + // Skip one slot from the stack and do another scan in order to get the + // actual return address. + raSearchStart += 4; + ScanForReturnAddress(raSearchStart, &raSearchStart, &found, 3); + } + + dictionary[".cbParams"] = last_frame_info->parameter_size; + + // Decide what type of program string to use. The program string is in + // postfix notation and will be passed to PostfixEvaluator::Evaluate. + // Given the dictionary and the program string, it is possible to compute + // the return address and the values of other registers in the calling + // function. Because of bugs described below, the stack may need to be + // scanned for these values. The results of program string evaluation + // will be used to determine whether to scan for better values. + string program_string; + bool recover_ebp = true; + + trust = StackFrame::FRAME_TRUST_CFI; + if (!last_frame_info->program_string.empty()) { + // The FPO data has its own program string, which will tell us how to + // get to the caller frame, and may even fill in the values of + // nonvolatile registers and provide pointers to local variables and + // parameters. In some cases, particularly with program strings that use + // .raSearchStart, the stack may need to be scanned afterward. + program_string = last_frame_info->program_string; + } else if (last_frame_info->allocates_base_pointer) { + // The function corresponding to the last frame doesn't use the frame + // pointer for conventional purposes, but it does allocate a new + // frame pointer and use it for its own purposes. Its callee's + // information is still accessed relative to %esp, and the previous + // value of %ebp can be recovered from a location in its stack frame, + // within the saved-register area. + // + // Functions that fall into this category use the %ebp register for + // a purpose other than the frame pointer. They restore the caller's + // %ebp before returning. These functions create their stack frame + // after a CALL by decrementing the stack pointer in an amount + // sufficient to store local variables, and then PUSHing saved + // registers onto the stack. Arguments to a callee function, if any, + // are PUSHed after that. Walking up to the caller, therefore, + // can be done solely with calculations relative to the stack pointer + // (%esp). The return address is recovered from the memory location + // above the known sizes of the callee's parameters, saved registers, + // and locals. The caller's stack pointer (the value of %esp when + // the caller executed CALL) is the location immediately above the + // saved return address. The saved value of %ebp to be restored for + // the caller is at a known location in the saved-register area of + // the stack frame. + // + // For this type of frame, MSVC 14 (from Visual Studio 8/2005) in + // link-time code generation mode (/LTCG and /GL) can generate erroneous + // debugging data. The reported size of saved registers can be 0, + // which is clearly an error because these frames must, at the very + // least, save %ebp. For this reason, in addition to those given above + // about the use of .raSearchStart, the stack may need to be scanned + // for a better return address and a better frame pointer after the + // program string is evaluated. + // + // %eip_new = *(%esp_old + callee_params + saved_regs + locals) + // %ebp_new = *(%esp_old + callee_params + saved_regs - 8) + // %esp_new = %esp_old + callee_params + saved_regs + locals + 4 + program_string = "$eip .raSearchStart ^ = " + "$ebp $esp .cbCalleeParams + .cbSavedRegs + 8 - ^ = " + "$esp .raSearchStart 4 + ="; + } else { + // The function corresponding to the last frame doesn't use %ebp at + // all. The callee frame is located relative to %esp. + // + // The called procedure's instruction pointer and stack pointer are + // recovered in the same way as the case above, except that no + // frame pointer (%ebp) is used at all, so it is not saved anywhere + // in the callee's stack frame and does not need to be recovered. + // Because %ebp wasn't used in the callee, whatever value it has + // is the value that it had in the caller, so it can be carried + // straight through without bringing its validity into question. + // + // Because of the use of .raSearchStart, the stack will possibly be + // examined to locate a better return address after program string + // evaluation. The stack will not be examined to locate a saved + // %ebp value, because these frames do not save (or use) %ebp. + // + // %eip_new = *(%esp_old + callee_params + saved_regs + locals) + // %esp_new = %esp_old + callee_params + saved_regs + locals + 4 + // %ebp_new = %ebp_old + program_string = "$eip .raSearchStart ^ = " + "$esp .raSearchStart 4 + ="; + recover_ebp = false; + } + + // Check for alignment operators in the program string. If alignment + // operators are found, then current %ebp must be valid and it is the only + // reliable data point that can be used for getting to the previous frame. + // E.g. the .raSearchStart calculation (above) is based on %esp and since + // %esp was aligned in the current frame (which is a lossy operation) the + // calculated value of .raSearchStart cannot be correct and should not be + // used. Instead .raSearchStart must be calculated based on %ebp. + // The code that follows assumes that .raSearchStart is supposed to point + // at the saved return address (ebp + 4). + // For some more details on this topic, take a look at the following thread: + // https://groups.google.com/forum/#!topic/google-breakpad-dev/ZP1FA9B1JjM + if ((StackFrameX86::CONTEXT_VALID_EBP & last_frame->context_validity) != 0 && + program_string.find('@') != string::npos) { + raSearchStart = last_frame->context.ebp + 4; + } + + // The difference between raSearch and raSearchStart is unknown, + // but making them the same seems to work well in practice. + dictionary[".raSearchStart"] = raSearchStart; + dictionary[".raSearch"] = raSearchStart; + + // Now crank it out, making sure that the program string set at least the + // two required variables. + PostfixEvaluator evaluator = + PostfixEvaluator(&dictionary, memory_); + PostfixEvaluator::DictionaryValidityType dictionary_validity; + if (!evaluator.Evaluate(program_string, &dictionary_validity) || + dictionary_validity.find("$eip") == dictionary_validity.end() || + dictionary_validity.find("$esp") == dictionary_validity.end()) { + // Program string evaluation failed. It may be that %eip is not somewhere + // with stack frame info, and %ebp is pointing to non-stack memory, so + // our evaluation couldn't succeed. We'll scan the stack for a return + // address. This can happen if the stack is in a module for which + // we don't have symbols, and that module is compiled without a + // frame pointer. + uint32_t location_start = last_frame->context.esp; + uint32_t location, eip; + if (!stack_scan_allowed + || !ScanForReturnAddress(location_start, &location, &eip, + frames.size() == 1 /* is_context_frame */)) { + // if we can't find an instruction pointer even with stack scanning, + // give up. + return NULL; + } + + // This seems like a reasonable return address. Since program string + // evaluation failed, use it and set %esp to the location above the + // one where the return address was found. + dictionary["$eip"] = eip; + dictionary["$esp"] = location + 4; + trust = StackFrame::FRAME_TRUST_SCAN; + } + + // Since this stack frame did not use %ebp in a traditional way, + // locating the return address isn't entirely deterministic. In that + // case, the stack can be scanned to locate the return address. + // + // However, if program string evaluation resulted in both %eip and + // %ebp values of 0, trust that the end of the stack has been + // reached and don't scan for anything else. + if (dictionary["$eip"] != 0 || dictionary["$ebp"] != 0) { + int offset = 0; + + // This scan can only be done if a CodeModules object is available, to + // check that candidate return addresses are in fact inside a module. + // + // TODO(mmentovai): This ignores dynamically-generated code. One possible + // solution is to check the minidump's memory map to see if the candidate + // %eip value comes from a mapped executable page, although this would + // require dumps that contain MINIDUMP_MEMORY_INFO, which the Breakpad + // client doesn't currently write (it would need to call MiniDumpWriteDump + // with the MiniDumpWithFullMemoryInfo type bit set). Even given this + // ability, older OSes (pre-XP SP2) and CPUs (pre-P4) don't enforce + // an independent execute privilege on memory pages. + + uint32_t eip = dictionary["$eip"]; + if (modules_ && !modules_->GetModuleForAddress(eip)) { + // The instruction pointer at .raSearchStart was invalid, so start + // looking one 32-bit word above that location. + uint32_t location_start = dictionary[".raSearchStart"] + 4; + uint32_t location; + if (stack_scan_allowed + && ScanForReturnAddress(location_start, &location, &eip, + frames.size() == 1 /* is_context_frame */)) { + // This is a better return address that what program string + // evaluation found. Use it, and set %esp to the location above the + // one where the return address was found. + dictionary["$eip"] = eip; + dictionary["$esp"] = location + 4; + offset = location - location_start; + trust = StackFrame::FRAME_TRUST_CFI_SCAN; + } + } + + if (recover_ebp) { + // When trying to recover the previous value of the frame pointer (%ebp), + // start looking at the lowest possible address in the saved-register + // area, and look at the entire saved register area, increased by the + // size of |offset| to account for additional data that may be on the + // stack. The scan is performed from the highest possible address to + // the lowest, because the expectation is that the function's prolog + // would have saved %ebp early. + uint32_t ebp = dictionary["$ebp"]; + + // When a scan for return address is used, it is possible to skip one or + // more frames (when return address is not in a known module). One + // indication for skipped frames is when the value of %ebp is lower than + // the location of the return address on the stack + bool has_skipped_frames = + (trust != StackFrame::FRAME_TRUST_CFI && ebp <= raSearchStart + offset); + + uint32_t value; // throwaway variable to check pointer validity + if (has_skipped_frames || !memory_->GetMemoryAtAddress(ebp, &value)) { + int fp_search_bytes = last_frame_info->saved_register_size + offset; + uint32_t location_end = last_frame->context.esp + + last_frame_callee_parameter_size; + + for (uint32_t location = location_end + fp_search_bytes; + location >= location_end; + location -= 4) { + if (!memory_->GetMemoryAtAddress(location, &ebp)) + break; + + if (memory_->GetMemoryAtAddress(ebp, &value)) { + // The candidate value is a pointer to the same memory region + // (the stack). Prefer it as a recovered %ebp result. + dictionary["$ebp"] = ebp; + break; + } + } + } + } + } + + // Create a new stack frame (ownership will be transferred to the caller) + // and fill it in. + StackFrameX86* frame = new StackFrameX86(); + + frame->trust = trust; + frame->context = last_frame->context; + frame->context.eip = dictionary["$eip"]; + frame->context.esp = dictionary["$esp"]; + frame->context.ebp = dictionary["$ebp"]; + frame->context_validity = StackFrameX86::CONTEXT_VALID_EIP | + StackFrameX86::CONTEXT_VALID_ESP | + StackFrameX86::CONTEXT_VALID_EBP; + + // These are nonvolatile (callee-save) registers, and the program string + // may have filled them in. + if (dictionary_validity.find("$ebx") != dictionary_validity.end()) { + frame->context.ebx = dictionary["$ebx"]; + frame->context_validity |= StackFrameX86::CONTEXT_VALID_EBX; + } + if (dictionary_validity.find("$esi") != dictionary_validity.end()) { + frame->context.esi = dictionary["$esi"]; + frame->context_validity |= StackFrameX86::CONTEXT_VALID_ESI; + } + if (dictionary_validity.find("$edi") != dictionary_validity.end()) { + frame->context.edi = dictionary["$edi"]; + frame->context_validity |= StackFrameX86::CONTEXT_VALID_EDI; + } + + return frame; +} + +StackFrameX86* StackwalkerX86::GetCallerByCFIFrameInfo( + const vector &frames, + CFIFrameInfo* cfi_frame_info) { + StackFrameX86* last_frame = static_cast(frames.back()); + last_frame->cfi_frame_info = cfi_frame_info; + + scoped_ptr frame(new StackFrameX86()); + if (!cfi_walker_ + .FindCallerRegisters(*memory_, *cfi_frame_info, + last_frame->context, last_frame->context_validity, + &frame->context, &frame->context_validity)) + return NULL; + + // Make sure we recovered all the essentials. + static const int essentials = (StackFrameX86::CONTEXT_VALID_EIP + | StackFrameX86::CONTEXT_VALID_ESP + | StackFrameX86::CONTEXT_VALID_EBP); + if ((frame->context_validity & essentials) != essentials) + return NULL; + + frame->trust = StackFrame::FRAME_TRUST_CFI; + + return frame.release(); +} + +StackFrameX86* StackwalkerX86::GetCallerByEBPAtBase( + const vector &frames, + bool stack_scan_allowed) { + StackFrame::FrameTrust trust; + StackFrameX86* last_frame = static_cast(frames.back()); + uint32_t last_esp = last_frame->context.esp; + uint32_t last_ebp = last_frame->context.ebp; + + // Assume that the standard %ebp-using x86 calling convention is in + // use. + // + // The typical x86 calling convention, when frame pointers are present, + // is for the calling procedure to use CALL, which pushes the return + // address onto the stack and sets the instruction pointer (%eip) to + // the entry point of the called routine. The called routine then + // PUSHes the calling routine's frame pointer (%ebp) onto the stack + // before copying the stack pointer (%esp) to the frame pointer (%ebp). + // Therefore, the calling procedure's frame pointer is always available + // by dereferencing the called procedure's frame pointer, and the return + // address is always available at the memory location immediately above + // the address pointed to by the called procedure's frame pointer. The + // calling procedure's stack pointer (%esp) is 8 higher than the value + // of the called procedure's frame pointer at the time the calling + // procedure made the CALL: 4 bytes for the return address pushed by the + // CALL itself, and 4 bytes for the callee's PUSH of the caller's frame + // pointer. + // + // %eip_new = *(%ebp_old + 4) + // %esp_new = %ebp_old + 8 + // %ebp_new = *(%ebp_old) + + uint32_t caller_eip, caller_esp, caller_ebp; + + if (memory_->GetMemoryAtAddress(last_ebp + 4, &caller_eip) && + memory_->GetMemoryAtAddress(last_ebp, &caller_ebp)) { + caller_esp = last_ebp + 8; + trust = StackFrame::FRAME_TRUST_FP; + } else { + // We couldn't read the memory %ebp refers to. It may be that %ebp + // is pointing to non-stack memory. We'll scan the stack for a + // return address. This can happen if last_frame is executing code + // for a module for which we don't have symbols, and that module + // is compiled without a frame pointer. + if (!stack_scan_allowed + || !ScanForReturnAddress(last_esp, &caller_esp, &caller_eip, + frames.size() == 1 /* is_context_frame */)) { + // if we can't find an instruction pointer even with stack scanning, + // give up. + return NULL; + } + + // ScanForReturnAddress found a reasonable return address. Advance %esp to + // the location immediately above the one where the return address was + // found. + caller_esp += 4; + // Try to restore the %ebp chain. The caller %ebp should be stored at a + // location immediately below the one where the return address was found. + // A valid caller %ebp must be greater than the address where it is stored + // and the gap between the two adjacent frames should be reasonable. + uint32_t restored_ebp_chain = caller_esp - 8; + if (!memory_->GetMemoryAtAddress(restored_ebp_chain, &caller_ebp) || + caller_ebp <= restored_ebp_chain || + caller_ebp - restored_ebp_chain > kMaxReasonableGapBetweenFrames) { + // The restored %ebp chain doesn't appear to be valid. + // Assume that %ebp is unchanged. + caller_ebp = last_ebp; + } + + trust = StackFrame::FRAME_TRUST_SCAN; + } + + // Create a new stack frame (ownership will be transferred to the caller) + // and fill it in. + StackFrameX86* frame = new StackFrameX86(); + + frame->trust = trust; + frame->context = last_frame->context; + frame->context.eip = caller_eip; + frame->context.esp = caller_esp; + frame->context.ebp = caller_ebp; + frame->context_validity = StackFrameX86::CONTEXT_VALID_EIP | + StackFrameX86::CONTEXT_VALID_ESP | + StackFrameX86::CONTEXT_VALID_EBP; + + return frame; +} + +StackFrame* StackwalkerX86::GetCallerFrame(const CallStack* stack, + bool stack_scan_allowed) { + if (!memory_ || !stack) { + BPLOG(ERROR) << "Can't get caller frame without memory or stack"; + return NULL; + } + + const vector &frames = *stack->frames(); + StackFrameX86* last_frame = static_cast(frames.back()); + scoped_ptr new_frame; + + // If the resolver has Windows stack walking information, use that. + WindowsFrameInfo* windows_frame_info + = frame_symbolizer_->FindWindowsFrameInfo(last_frame); + if (windows_frame_info) + new_frame.reset(GetCallerByWindowsFrameInfo(frames, windows_frame_info, + stack_scan_allowed)); + + // If the resolver has DWARF CFI information, use that. + if (!new_frame.get()) { + CFIFrameInfo* cfi_frame_info = + frame_symbolizer_->FindCFIFrameInfo(last_frame); + if (cfi_frame_info) + new_frame.reset(GetCallerByCFIFrameInfo(frames, cfi_frame_info)); + } + + // Otherwise, hope that the program was using a traditional frame structure. + if (!new_frame.get()) + new_frame.reset(GetCallerByEBPAtBase(frames, stack_scan_allowed)); + + // If nothing worked, tell the caller. + if (!new_frame.get()) + return NULL; + + // Treat an instruction address of 0 as end-of-stack. + if (new_frame->context.eip == 0) + return NULL; + + // If the new stack pointer is at a lower address than the old, then + // that's clearly incorrect. Treat this as end-of-stack to enforce + // progress and avoid infinite loops. + if (new_frame->context.esp <= last_frame->context.esp) + return NULL; + + // new_frame->context.eip is the return address, which is the instruction + // after the CALL that caused us to arrive at the callee. Set + // new_frame->instruction to one less than that, so it points within the + // CALL instruction. See StackFrame::instruction for details, and + // StackFrameAMD64::ReturnAddress. + new_frame->instruction = new_frame->context.eip - 1; + + return new_frame.release(); +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_x86.h b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_x86.h new file mode 100644 index 0000000000..0659a13bf4 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_x86.h @@ -0,0 +1,117 @@ +// -*- mode: c++ -*- + +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// stackwalker_x86.h: x86-specific stackwalker. +// +// Provides stack frames given x86 register context and a memory region +// corresponding to an x86 stack. +// +// Author: Mark Mentovai + + +#ifndef PROCESSOR_STACKWALKER_X86_H__ +#define PROCESSOR_STACKWALKER_X86_H__ + +#include + +#include "google_breakpad/common/breakpad_types.h" +#include "google_breakpad/common/minidump_format.h" +#include "google_breakpad/processor/stackwalker.h" +#include "google_breakpad/processor/stack_frame_cpu.h" +#include "processor/cfi_frame_info.h" + +namespace google_breakpad { + +class CodeModules; + + +class StackwalkerX86 : public Stackwalker { + public: + // context is an x86 context object that gives access to x86-specific + // register state corresponding to the innermost called frame to be + // included in the stack. The other arguments are passed directly through + // to the base Stackwalker constructor. + StackwalkerX86(const SystemInfo* system_info, + const MDRawContextX86* context, + MemoryRegion* memory, + const CodeModules* modules, + StackFrameSymbolizer* frame_symbolizer); + + private: + // A STACK CFI-driven frame walker for the X86. + typedef SimpleCFIWalker CFIWalker; + + // Implementation of Stackwalker, using x86 context (%ebp, %esp, %eip) and + // stack conventions (saved %ebp at [%ebp], saved %eip at 4[%ebp], or + // alternate conventions as guided by any WindowsFrameInfo available for the + // code in question.). + virtual StackFrame* GetContextFrame(); + virtual StackFrame* GetCallerFrame(const CallStack* stack, + bool stack_scan_allowed); + + // Use windows_frame_info (derived from STACK WIN and FUNC records) + // to construct the frame that called frames.back(). The caller + // takes ownership of the returned frame. Return NULL on failure. + StackFrameX86* GetCallerByWindowsFrameInfo( + const vector &frames, + WindowsFrameInfo* windows_frame_info, + bool stack_scan_allowed); + + // Use cfi_frame_info (derived from STACK CFI records) to construct + // the frame that called frames.back(). The caller takes ownership + // of the returned frame. Return NULL on failure. + StackFrameX86* GetCallerByCFIFrameInfo(const vector &frames, + CFIFrameInfo* cfi_frame_info); + + // Assuming a traditional frame layout --- where the caller's %ebp + // has been pushed just after the return address and the callee's + // %ebp points to the saved %ebp --- construct the frame that called + // frames.back(). The caller takes ownership of the returned frame. + // Return NULL on failure. + StackFrameX86* GetCallerByEBPAtBase(const vector &frames, + bool stack_scan_allowed); + + // Stores the CPU context corresponding to the innermost stack frame to + // be returned by GetContextFrame. + const MDRawContextX86* context_; + + // Our register map, for cfi_walker_. + static const CFIWalker::RegisterSet cfi_register_map_[]; + + // Our CFI frame walker. + const CFIWalker cfi_walker_; +}; + + +} // namespace google_breakpad + + +#endif // PROCESSOR_STACKWALKER_X86_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_x86_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_x86_unittest.cc new file mode 100644 index 0000000000..008b496b59 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/stackwalker_x86_unittest.cc @@ -0,0 +1,2128 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// stackwalker_x86_unittest.cc: Unit tests for StackwalkerX86 class. + +#include +#include + +#include "breakpad_googletest_includes.h" +#include "common/test_assembler.h" +#include "common/using_std_string.h" +#include "google_breakpad/common/minidump_format.h" +#include "google_breakpad/processor/basic_source_line_resolver.h" +#include "google_breakpad/processor/call_stack.h" +#include "google_breakpad/processor/code_module.h" +#include "google_breakpad/processor/source_line_resolver_interface.h" +#include "google_breakpad/processor/stack_frame_cpu.h" +#include "processor/stackwalker_unittest_utils.h" +#include "processor/stackwalker_x86.h" +#include "processor/windows_frame_info.h" + +using google_breakpad::BasicSourceLineResolver; +using google_breakpad::CallStack; +using google_breakpad::CodeModule; +using google_breakpad::StackFrameSymbolizer; +using google_breakpad::StackFrame; +using google_breakpad::StackFrameX86; +using google_breakpad::Stackwalker; +using google_breakpad::StackwalkerX86; +using google_breakpad::SystemInfo; +using google_breakpad::WindowsFrameInfo; +using google_breakpad::test_assembler::kLittleEndian; +using google_breakpad::test_assembler::Label; +using google_breakpad::test_assembler::Section; +using std::vector; +using testing::_; +using testing::AnyNumber; +using testing::Return; +using testing::SetArgumentPointee; +using testing::Test; + +class StackwalkerX86Fixture { + public: + StackwalkerX86Fixture() + : stack_section(kLittleEndian), + // Give the two modules reasonable standard locations and names + // for tests to play with. + module1(0x40000000, 0x10000, "module1", "version1"), + module2(0x50000000, 0x10000, "module2", "version2"), + module3(0x771d0000, 0x180000, "module3", "version3"), + module4(0x75f90000, 0x46000, "module4", "version4"), + module5(0x75730000, 0x110000, "module5", "version5"), + module6(0x647f0000, 0x1ba8000, "module6", "version6") { + // Identify the system as a Linux system. + system_info.os = "Linux"; + system_info.os_short = "linux"; + system_info.os_version = "Salacious Skink"; + system_info.cpu = "x86"; + system_info.cpu_info = ""; + + // Put distinctive values in the raw CPU context. + BrandContext(&raw_context); + + // Create some modules with some stock debugging information. + modules.Add(&module1); + modules.Add(&module2); + modules.Add(&module3); + modules.Add(&module4); + modules.Add(&module5); + modules.Add(&module6); + + // By default, none of the modules have symbol info; call + // SetModuleSymbols to override this. + EXPECT_CALL(supplier, GetCStringSymbolData(_, _, _, _, _)) + .WillRepeatedly(Return(MockSymbolSupplier::NOT_FOUND)); + + // Avoid GMOCK WARNING "Uninteresting mock function call - returning + // directly" for FreeSymbolData(). + EXPECT_CALL(supplier, FreeSymbolData(_)).Times(AnyNumber()); + + // Reset max_frames_scanned since it's static. + Stackwalker::set_max_frames_scanned(1024); + } + + // Set the Breakpad symbol information that supplier should return for + // MODULE to INFO. + void SetModuleSymbols(MockCodeModule *module, const string &info) { + size_t buffer_size; + char *buffer = supplier.CopySymbolDataAndOwnTheCopy(info, &buffer_size); + EXPECT_CALL(supplier, GetCStringSymbolData(module, &system_info, _, _, _)) + .WillRepeatedly(DoAll(SetArgumentPointee<3>(buffer), + SetArgumentPointee<4>(buffer_size), + Return(MockSymbolSupplier::FOUND))); + } + + // Populate stack_region with the contents of stack_section. Use + // stack_section.start() as the region's starting address. + void RegionFromSection() { + string contents; + ASSERT_TRUE(stack_section.GetContents(&contents)); + stack_region.Init(stack_section.start().Value(), contents); + } + + // Fill RAW_CONTEXT with pseudo-random data, for round-trip checking. + void BrandContext(MDRawContextX86 *raw_context) { + uint8_t x = 173; + for (size_t i = 0; i < sizeof(*raw_context); i++) + reinterpret_cast(raw_context)[i] = (x += 17); + } + + SystemInfo system_info; + MDRawContextX86 raw_context; + Section stack_section; + MockMemoryRegion stack_region; + MockCodeModule module1; + MockCodeModule module2; + MockCodeModule module3; + MockCodeModule module4; + MockCodeModule module5; + MockCodeModule module6; + MockCodeModules modules; + MockSymbolSupplier supplier; + BasicSourceLineResolver resolver; + CallStack call_stack; + const vector *frames; +}; + +class SanityCheck: public StackwalkerX86Fixture, public Test { }; + +TEST_F(SanityCheck, NoResolver) { + stack_section.start() = 0x80000000; + stack_section.D32(0).D32(0); // end-of-stack marker + RegionFromSection(); + raw_context.eip = 0x40000200; + raw_context.ebp = 0x80000000; + + StackFrameSymbolizer frame_symbolizer(NULL, NULL); + StackwalkerX86 walker(&system_info, &raw_context, &stack_region, &modules, + &frame_symbolizer); + // This should succeed, even without a resolver or supplier. + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(1U, modules_without_symbols.size()); + ASSERT_EQ("module1", modules_without_symbols[0]->debug_file()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + StackFrameX86 *frame = static_cast(frames->at(0)); + // Check that the values from the original raw context made it + // through to the context in the stack frame. + EXPECT_EQ(0, memcmp(&raw_context, &frame->context, sizeof(raw_context))); +} + +class GetContextFrame: public StackwalkerX86Fixture, public Test { }; + +TEST_F(GetContextFrame, Simple) { + stack_section.start() = 0x80000000; + stack_section.D32(0).D32(0); // end-of-stack marker + RegionFromSection(); + raw_context.eip = 0x40000200; + raw_context.ebp = 0x80000000; + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerX86 walker(&system_info, &raw_context, &stack_region, &modules, + &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(1U, modules_without_symbols.size()); + ASSERT_EQ("module1", modules_without_symbols[0]->debug_file()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + StackFrameX86 *frame = static_cast(frames->at(0)); + // Check that the values from the original raw context made it + // through to the context in the stack frame. + EXPECT_EQ(0, memcmp(&raw_context, &frame->context, sizeof(raw_context))); +} + +// The stackwalker should be able to produce the context frame even +// without stack memory present. +TEST_F(GetContextFrame, NoStackMemory) { + raw_context.eip = 0x40000200; + raw_context.ebp = 0x80000000; + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerX86 walker(&system_info, &raw_context, NULL, &modules, + &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(1U, modules_without_symbols.size()); + ASSERT_EQ("module1", modules_without_symbols[0]->debug_file()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + StackFrameX86 *frame = static_cast(frames->at(0)); + // Check that the values from the original raw context made it + // through to the context in the stack frame. + EXPECT_EQ(0, memcmp(&raw_context, &frame->context, sizeof(raw_context))); +} + +class GetCallerFrame: public StackwalkerX86Fixture, public Test { + protected: + void IPAddressIsNotInKnownModuleTestImpl(bool has_corrupt_symbols); +}; + +// Walk a traditional frame. A traditional frame saves the caller's +// %ebp just below the return address, and has its own %ebp pointing +// at the saved %ebp. +TEST_F(GetCallerFrame, Traditional) { + stack_section.start() = 0x80000000; + Label frame0_ebp, frame1_ebp; + stack_section + .Append(12, 0) // frame 0: space + .Mark(&frame0_ebp) // frame 0 %ebp points here + .D32(frame1_ebp) // frame 0: saved %ebp + .D32(0x40008679) // frame 0: return address + .Append(8, 0) // frame 1: space + .Mark(&frame1_ebp) // frame 1 %ebp points here + .D32(0) // frame 1: saved %ebp (stack end) + .D32(0); // frame 1: return address (stack end) + RegionFromSection(); + raw_context.eip = 0x4000c7a5; + raw_context.esp = stack_section.start().Value(); + raw_context.ebp = frame0_ebp.Value(); + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerX86 walker(&system_info, &raw_context, &stack_region, &modules, + &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(1U, modules_without_symbols.size()); + ASSERT_EQ("module1", modules_without_symbols[0]->debug_file()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(2U, frames->size()); + + { // To avoid reusing locals by mistake + StackFrameX86 *frame0 = static_cast(frames->at(0)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CONTEXT, frame0->trust); + EXPECT_EQ(StackFrameX86::CONTEXT_VALID_ALL, frame0->context_validity); + EXPECT_EQ(0x4000c7a5U, frame0->instruction); + EXPECT_EQ(0x4000c7a5U, frame0->context.eip); + EXPECT_EQ(frame0_ebp.Value(), frame0->context.ebp); + EXPECT_EQ(NULL, frame0->windows_frame_info); + } + + { // To avoid reusing locals by mistake + StackFrameX86 *frame1 = static_cast(frames->at(1)); + EXPECT_EQ(StackFrame::FRAME_TRUST_FP, frame1->trust); + ASSERT_EQ((StackFrameX86::CONTEXT_VALID_EIP + | StackFrameX86::CONTEXT_VALID_ESP + | StackFrameX86::CONTEXT_VALID_EBP), + frame1->context_validity); + EXPECT_EQ(0x40008679U, frame1->instruction + 1); + EXPECT_EQ(0x40008679U, frame1->context.eip); + EXPECT_EQ(frame1_ebp.Value(), frame1->context.ebp); + EXPECT_EQ(NULL, frame1->windows_frame_info); + } +} + +// Walk a traditional frame, but use a bogus %ebp value, forcing a scan +// of the stack for something that looks like a return address. +TEST_F(GetCallerFrame, TraditionalScan) { + stack_section.start() = 0x80000000; + Label frame1_ebp; + Label frame1_esp; + stack_section + // frame 0 + .D32(0xf065dc76) // locals area: + .D32(0x46ee2167) // garbage that doesn't look like + .D32(0xbab023ec) // a return address + .D32(frame1_ebp) // saved %ebp (%ebp fails to point here, forcing scan) + .D32(0x4000129d) // return address + // frame 1 + .Mark(&frame1_esp) + .Append(8, 0) // space + .Mark(&frame1_ebp) // %ebp points here + .D32(0) // saved %ebp (stack end) + .D32(0); // return address (stack end) + + RegionFromSection(); + raw_context.eip = 0x4000f49d; + raw_context.esp = stack_section.start().Value(); + // Make the frame pointer bogus, to make the stackwalker scan the stack + // for something that looks like a return address. + raw_context.ebp = 0xd43eed6e; + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerX86 walker(&system_info, &raw_context, &stack_region, &modules, + &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(1U, modules_without_symbols.size()); + ASSERT_EQ("module1", modules_without_symbols[0]->debug_file()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(2U, frames->size()); + + { // To avoid reusing locals by mistake + StackFrameX86 *frame0 = static_cast(frames->at(0)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CONTEXT, frame0->trust); + ASSERT_EQ(StackFrameX86::CONTEXT_VALID_ALL, frame0->context_validity); + EXPECT_EQ(0x4000f49dU, frame0->instruction); + EXPECT_EQ(0x4000f49dU, frame0->context.eip); + EXPECT_EQ(stack_section.start().Value(), frame0->context.esp); + EXPECT_EQ(0xd43eed6eU, frame0->context.ebp); + EXPECT_EQ(NULL, frame0->windows_frame_info); + } + + { // To avoid reusing locals by mistake + StackFrameX86 *frame1 = static_cast(frames->at(1)); + EXPECT_EQ(StackFrame::FRAME_TRUST_SCAN, frame1->trust); + ASSERT_EQ((StackFrameX86::CONTEXT_VALID_EIP + | StackFrameX86::CONTEXT_VALID_ESP + | StackFrameX86::CONTEXT_VALID_EBP), + frame1->context_validity); + EXPECT_EQ(0x4000129dU, frame1->instruction + 1); + EXPECT_EQ(0x4000129dU, frame1->context.eip); + EXPECT_EQ(frame1_esp.Value(), frame1->context.esp); + EXPECT_EQ(frame1_ebp.Value(), frame1->context.ebp); + EXPECT_EQ(NULL, frame1->windows_frame_info); + } +} + +// Force scanning for a return address a long way down the stack +TEST_F(GetCallerFrame, TraditionalScanLongWay) { + stack_section.start() = 0x80000000; + Label frame1_ebp; + Label frame1_esp; + stack_section + // frame 0 + .D32(0xf065dc76) // locals area: + .D32(0x46ee2167) // garbage that doesn't look like + .D32(0xbab023ec) // a return address + .Append(20 * 4, 0) // a bunch of space + .D32(frame1_ebp) // saved %ebp (%ebp fails to point here, forcing scan) + .D32(0x4000129d) // return address + // frame 1 + .Mark(&frame1_esp) + .Append(8, 0) // space + .Mark(&frame1_ebp) // %ebp points here + .D32(0) // saved %ebp (stack end) + .D32(0); // return address (stack end) + + RegionFromSection(); + raw_context.eip = 0x4000f49d; + raw_context.esp = stack_section.start().Value(); + // Make the frame pointer bogus, to make the stackwalker scan the stack + // for something that looks like a return address. + raw_context.ebp = 0xd43eed6e; + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerX86 walker(&system_info, &raw_context, &stack_region, &modules, + &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(1U, modules_without_symbols.size()); + ASSERT_EQ("module1", modules_without_symbols[0]->debug_file()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(2U, frames->size()); + + { // To avoid reusing locals by mistake + StackFrameX86 *frame0 = static_cast(frames->at(0)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CONTEXT, frame0->trust); + ASSERT_EQ(StackFrameX86::CONTEXT_VALID_ALL, frame0->context_validity); + EXPECT_EQ(0x4000f49dU, frame0->instruction); + EXPECT_EQ(0x4000f49dU, frame0->context.eip); + EXPECT_EQ(stack_section.start().Value(), frame0->context.esp); + EXPECT_EQ(0xd43eed6eU, frame0->context.ebp); + EXPECT_EQ(NULL, frame0->windows_frame_info); + } + + { // To avoid reusing locals by mistake + StackFrameX86 *frame1 = static_cast(frames->at(1)); + EXPECT_EQ(StackFrame::FRAME_TRUST_SCAN, frame1->trust); + ASSERT_EQ((StackFrameX86::CONTEXT_VALID_EIP + | StackFrameX86::CONTEXT_VALID_ESP + | StackFrameX86::CONTEXT_VALID_EBP), + frame1->context_validity); + EXPECT_EQ(0x4000129dU, frame1->instruction + 1); + EXPECT_EQ(0x4000129dU, frame1->context.eip); + EXPECT_EQ(frame1_esp.Value(), frame1->context.esp); + EXPECT_EQ(frame1_ebp.Value(), frame1->context.ebp); + EXPECT_EQ(NULL, frame1->windows_frame_info); + } +} + +// Test that set_max_frames_scanned prevents using stack scanning +// to find caller frames. +TEST_F(GetCallerFrame, ScanningNotAllowed) { + stack_section.start() = 0x80000000; + Label frame1_ebp; + stack_section + // frame 0 + .D32(0xf065dc76) // locals area: + .D32(0x46ee2167) // garbage that doesn't look like + .D32(0xbab023ec) // a return address + .D32(frame1_ebp) // saved %ebp (%ebp fails to point here, forcing scan) + .D32(0x4000129d) // return address + // frame 1 + .Append(8, 0) // space + .Mark(&frame1_ebp) // %ebp points here + .D32(0) // saved %ebp (stack end) + .D32(0); // return address (stack end) + + RegionFromSection(); + raw_context.eip = 0x4000f49d; + raw_context.esp = stack_section.start().Value(); + // Make the frame pointer bogus, to make the stackwalker scan the stack + // for something that looks like a return address. + raw_context.ebp = 0xd43eed6e; + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerX86 walker(&system_info, &raw_context, &stack_region, &modules, + &frame_symbolizer); + Stackwalker::set_max_frames_scanned(0); + + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(1U, modules_without_symbols.size()); + ASSERT_EQ("module1", modules_without_symbols[0]->debug_file()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(1U, frames->size()); + + { // To avoid reusing locals by mistake + StackFrameX86 *frame0 = static_cast(frames->at(0)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CONTEXT, frame0->trust); + ASSERT_EQ(StackFrameX86::CONTEXT_VALID_ALL, frame0->context_validity); + EXPECT_EQ(0x4000f49dU, frame0->instruction); + EXPECT_EQ(0x4000f49dU, frame0->context.eip); + EXPECT_EQ(stack_section.start().Value(), frame0->context.esp); + EXPECT_EQ(0xd43eed6eU, frame0->context.ebp); + EXPECT_EQ(NULL, frame0->windows_frame_info); + } +} + +// Use Windows frame data (a "STACK WIN 4" record, from a +// FrameTypeFrameData DIA record) to walk a stack frame. +TEST_F(GetCallerFrame, WindowsFrameData) { + SetModuleSymbols(&module1, + "STACK WIN 4 aa85 176 0 0 4 10 4 0 1" + " $T2 $esp .cbSavedRegs + =" + " $T0 .raSearchStart =" + " $eip $T0 ^ =" + " $esp $T0 4 + =" + " $ebx $T2 4 - ^ =" + " $edi $T2 8 - ^ =" + " $esi $T2 12 - ^ =" + " $ebp $T2 16 - ^ =\n"); + Label frame1_esp, frame1_ebp; + stack_section.start() = 0x80000000; + stack_section + // frame 0 + .D32(frame1_ebp) // saved regs: %ebp + .D32(0xa7120d1a) // %esi + .D32(0x630891be) // %edi + .D32(0x9068a878) // %ebx + .D32(0xa08ea45f) // locals: unused + .D32(0x40001350) // return address + // frame 1 + .Mark(&frame1_esp) + .Append(12, 0) // empty space + .Mark(&frame1_ebp) + .D32(0) // saved %ebp (stack end) + .D32(0); // saved %eip (stack end) + + RegionFromSection(); + raw_context.eip = 0x4000aa85; + raw_context.esp = stack_section.start().Value(); + raw_context.ebp = 0xf052c1de; // should not be needed to walk frame + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerX86 walker(&system_info, &raw_context, &stack_region, &modules, + &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(0U, modules_without_symbols.size()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(2U, frames->size()); + + { // To avoid reusing locals by mistake + StackFrameX86 *frame0 = static_cast(frames->at(0)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CONTEXT, frame0->trust); + ASSERT_EQ(StackFrameX86::CONTEXT_VALID_ALL, frame0->context_validity); + EXPECT_EQ(0x4000aa85U, frame0->instruction); + EXPECT_EQ(0x4000aa85U, frame0->context.eip); + EXPECT_EQ(stack_section.start().Value(), frame0->context.esp); + EXPECT_EQ(0xf052c1deU, frame0->context.ebp); + EXPECT_TRUE(frame0->windows_frame_info != NULL); + } + + { // To avoid reusing locals by mistake + StackFrameX86 *frame1 = static_cast(frames->at(1)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CFI, frame1->trust); + ASSERT_EQ((StackFrameX86::CONTEXT_VALID_EIP + | StackFrameX86::CONTEXT_VALID_ESP + | StackFrameX86::CONTEXT_VALID_EBP + | StackFrameX86::CONTEXT_VALID_EBX + | StackFrameX86::CONTEXT_VALID_ESI + | StackFrameX86::CONTEXT_VALID_EDI), + frame1->context_validity); + EXPECT_EQ(0x40001350U, frame1->instruction + 1); + EXPECT_EQ(0x40001350U, frame1->context.eip); + EXPECT_EQ(frame1_esp.Value(), frame1->context.esp); + EXPECT_EQ(frame1_ebp.Value(), frame1->context.ebp); + EXPECT_EQ(0x9068a878U, frame1->context.ebx); + EXPECT_EQ(0xa7120d1aU, frame1->context.esi); + EXPECT_EQ(0x630891beU, frame1->context.edi); + EXPECT_EQ(NULL, frame1->windows_frame_info); + } +} + +// Use Windows frame data (a "STACK WIN 4" record, from a +// FrameTypeFrameData DIA record) to walk a stack frame where the stack +// is aligned and we must search +TEST_F(GetCallerFrame, WindowsFrameDataAligned) { + SetModuleSymbols(&module1, + "STACK WIN 4 aa85 176 0 0 4 4 8 0 1" + " $T1 .raSearch =" + " $T0 $T1 4 - 8 @ =" + " $ebp $T1 4 - ^ =" + " $eip $T1 ^ =" + " $esp $T1 4 + ="); + Label frame0_esp, frame0_ebp; + Label frame1_esp, frame1_ebp; + stack_section.start() = 0x80000000; + stack_section + // frame 0 + .Mark(&frame0_esp) + .D32(0x0ffa0ffa) // unused saved register + .D32(0xdeaddead) // locals + .D32(0xbeefbeef) + .D32(0) // 8-byte alignment + .Mark(&frame0_ebp) + .D32(frame1_ebp) // saved %ebp + .D32(0x5000129d) // return address + // frame 1 + .Mark(&frame1_esp) + .D32(0x1) // parameter + .Mark(&frame1_ebp) + .D32(0) // saved %ebp (stack end) + .D32(0); // saved %eip (stack end) + + RegionFromSection(); + raw_context.eip = 0x4000aa85; + raw_context.esp = frame0_esp.Value(); + raw_context.ebp = frame0_ebp.Value(); + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerX86 walker(&system_info, &raw_context, &stack_region, &modules, + &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(1U, modules_without_symbols.size()); + ASSERT_EQ("module2", modules_without_symbols[0]->debug_file()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(2U, frames->size()); + + { // To avoid reusing locals by mistake + StackFrameX86 *frame0 = static_cast(frames->at(0)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CONTEXT, frame0->trust); + ASSERT_EQ(StackFrameX86::CONTEXT_VALID_ALL, frame0->context_validity); + EXPECT_EQ(0x4000aa85U, frame0->instruction); + EXPECT_EQ(0x4000aa85U, frame0->context.eip); + EXPECT_EQ(frame0_esp.Value(), frame0->context.esp); + EXPECT_EQ(frame0_ebp.Value(), frame0->context.ebp); + EXPECT_TRUE(frame0->windows_frame_info != NULL); + } + + { // To avoid reusing locals by mistake + StackFrameX86 *frame1 = static_cast(frames->at(1)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CFI, frame1->trust); + ASSERT_EQ((StackFrameX86::CONTEXT_VALID_EIP + | StackFrameX86::CONTEXT_VALID_ESP + | StackFrameX86::CONTEXT_VALID_EBP), + frame1->context_validity); + EXPECT_EQ(0x5000129dU, frame1->instruction + 1); + EXPECT_EQ(0x5000129dU, frame1->context.eip); + EXPECT_EQ(frame1_esp.Value(), frame1->context.esp); + EXPECT_EQ(frame1_ebp.Value(), frame1->context.ebp); + EXPECT_EQ(NULL, frame1->windows_frame_info); + } +} + +// Use Windows frame data (a "STACK WIN 4" record, from a +// FrameTypeFrameData DIA record) to walk a frame, and depend on the +// parameter size from the callee as well. +TEST_F(GetCallerFrame, WindowsFrameDataParameterSize) { + SetModuleSymbols(&module1, "FUNC 1000 100 c module1::wheedle\n"); + SetModuleSymbols(&module2, + // Note bogus parameter size in FUNC record; the stack walker + // should prefer the STACK WIN record, and see '4' below. + "FUNC aa85 176 beef module2::whine\n" + "STACK WIN 4 aa85 176 0 0 4 10 4 0 1" + " $T2 $esp .cbLocals + .cbSavedRegs + =" + " $T0 .raSearchStart =" + " $eip $T0 ^ =" + " $esp $T0 4 + =" + " $ebp $T0 20 - ^ =" + " $ebx $T0 8 - ^ =\n"); + Label frame0_esp, frame0_ebp; + Label frame1_esp; + Label frame2_esp, frame2_ebp; + stack_section.start() = 0x80000000; + stack_section + // frame 0, in module1::wheedle. Traditional frame. + .Mark(&frame0_esp) + .Append(16, 0) // frame space + .Mark(&frame0_ebp) + .D32(0x6fa902e0) // saved %ebp. Not a frame pointer. + .D32(0x5000aa95) // return address, in module2::whine + // frame 1, in module2::whine. FrameData frame. + .Mark(&frame1_esp) + .D32(0xbaa0cb7a) // argument 3 passed to module1::wheedle + .D32(0xbdc92f9f) // argument 2 + .D32(0x0b1d8442) // argument 1 + .D32(frame2_ebp) // saved %ebp + .D32(0xb1b90a15) // unused + .D32(0xf18e072d) // unused + .D32(0x2558c7f3) // saved %ebx + .D32(0x0365e25e) // unused + .D32(0x2a179e38) // return address; $T0 points here + // frame 2, in no module + .Mark(&frame2_esp) + .Append(12, 0) // empty space + .Mark(&frame2_ebp) + .D32(0) // saved %ebp (stack end) + .D32(0); // saved %eip (stack end) + + RegionFromSection(); + raw_context.eip = 0x40001004; // in module1::wheedle + raw_context.esp = stack_section.start().Value(); + raw_context.ebp = frame0_ebp.Value(); + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerX86 walker(&system_info, &raw_context, &stack_region, &modules, + &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(0U, modules_without_symbols.size()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(3U, frames->size()); + + { // To avoid reusing locals by mistake + StackFrameX86 *frame0 = static_cast(frames->at(0)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CONTEXT, frame0->trust); + ASSERT_EQ(StackFrameX86::CONTEXT_VALID_ALL, frame0->context_validity); + EXPECT_EQ(0x40001004U, frame0->instruction); + EXPECT_EQ(0x40001004U, frame0->context.eip); + EXPECT_EQ(frame0_esp.Value(), frame0->context.esp); + EXPECT_EQ(frame0_ebp.Value(), frame0->context.ebp); + EXPECT_EQ(&module1, frame0->module); + EXPECT_EQ("module1::wheedle", frame0->function_name); + EXPECT_EQ(0x40001000U, frame0->function_base); + // The FUNC record for module1::wheedle should have produced a + // WindowsFrameInfo structure with only the parameter size valid. + ASSERT_TRUE(frame0->windows_frame_info != NULL); + EXPECT_EQ(WindowsFrameInfo::VALID_PARAMETER_SIZE, + frame0->windows_frame_info->valid); + EXPECT_EQ(WindowsFrameInfo::STACK_INFO_UNKNOWN, + frame0->windows_frame_info->type_); + EXPECT_EQ(12U, frame0->windows_frame_info->parameter_size); + } + + { // To avoid reusing locals by mistake + StackFrameX86 *frame1 = static_cast(frames->at(1)); + EXPECT_EQ(StackFrame::FRAME_TRUST_FP, frame1->trust); + ASSERT_EQ((StackFrameX86::CONTEXT_VALID_EIP + | StackFrameX86::CONTEXT_VALID_ESP + | StackFrameX86::CONTEXT_VALID_EBP), + frame1->context_validity); + EXPECT_EQ(0x5000aa95U, frame1->instruction + 1); + EXPECT_EQ(0x5000aa95U, frame1->context.eip); + EXPECT_EQ(frame1_esp.Value(), frame1->context.esp); + EXPECT_EQ(0x6fa902e0U, frame1->context.ebp); + EXPECT_EQ(&module2, frame1->module); + EXPECT_EQ("module2::whine", frame1->function_name); + EXPECT_EQ(0x5000aa85U, frame1->function_base); + ASSERT_TRUE(frame1->windows_frame_info != NULL); + EXPECT_EQ(WindowsFrameInfo::VALID_ALL, frame1->windows_frame_info->valid); + EXPECT_EQ(WindowsFrameInfo::STACK_INFO_FRAME_DATA, + frame1->windows_frame_info->type_); + // This should not see the 0xbeef parameter size from the FUNC + // record, but should instead see the STACK WIN record. + EXPECT_EQ(4U, frame1->windows_frame_info->parameter_size); + } + + { // To avoid reusing locals by mistake + StackFrameX86 *frame2 = static_cast(frames->at(2)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CFI, frame2->trust); + ASSERT_EQ((StackFrameX86::CONTEXT_VALID_EIP + | StackFrameX86::CONTEXT_VALID_ESP + | StackFrameX86::CONTEXT_VALID_EBP + | StackFrameX86::CONTEXT_VALID_EBX), + frame2->context_validity); + EXPECT_EQ(0x2a179e38U, frame2->instruction + 1); + EXPECT_EQ(0x2a179e38U, frame2->context.eip); + EXPECT_EQ(frame2_esp.Value(), frame2->context.esp); + EXPECT_EQ(frame2_ebp.Value(), frame2->context.ebp); + EXPECT_EQ(0x2558c7f3U, frame2->context.ebx); + EXPECT_EQ(NULL, frame2->module); + EXPECT_EQ(NULL, frame2->windows_frame_info); + } +} + +// Use Windows frame data (a "STACK WIN 4" record, from a +// FrameTypeFrameData DIA record) to walk a stack frame, where the +// expression fails to yield both an $eip and an $ebp value, and the stack +// walker must scan. +TEST_F(GetCallerFrame, WindowsFrameDataScan) { + SetModuleSymbols(&module1, + "STACK WIN 4 c8c 111 0 0 4 10 4 0 1 bad program string\n"); + // Mark frame 1's PC as the end of the stack. + SetModuleSymbols(&module2, + "FUNC 7c38 accf 0 module2::function\n" + "STACK WIN 4 7c38 accf 0 0 4 10 4 0 1 $eip 0 = $ebp 0 =\n"); + Label frame1_esp; + stack_section.start() = 0x80000000; + stack_section + // frame 0 + .Append(16, 0x2a) // unused, garbage + .D32(0x50007ce9) // return address + // frame 1 + .Mark(&frame1_esp) + .Append(8, 0); // empty space + + RegionFromSection(); + raw_context.eip = 0x40000c9c; + raw_context.esp = stack_section.start().Value(); + raw_context.ebp = 0x2ae314cd; // should not be needed to walk frame + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerX86 walker(&system_info, &raw_context, &stack_region, &modules, + &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(0U, modules_without_symbols.size()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(2U, frames->size()); + + { // To avoid reusing locals by mistake + StackFrameX86 *frame0 = static_cast(frames->at(0)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CONTEXT, frame0->trust); + ASSERT_EQ(StackFrameX86::CONTEXT_VALID_ALL, frame0->context_validity); + EXPECT_EQ(0x40000c9cU, frame0->instruction); + EXPECT_EQ(0x40000c9cU, frame0->context.eip); + EXPECT_EQ(stack_section.start().Value(), frame0->context.esp); + EXPECT_EQ(0x2ae314cdU, frame0->context.ebp); + EXPECT_TRUE(frame0->windows_frame_info != NULL); + } + + { // To avoid reusing locals by mistake + StackFrameX86 *frame1 = static_cast(frames->at(1)); + EXPECT_EQ(StackFrame::FRAME_TRUST_SCAN, frame1->trust); + // I'd argue that CONTEXT_VALID_EBP shouldn't be here, since the walker + // does not actually fetch the EBP after a scan (forcing the next frame + // to be scanned as well). But let's grandfather the existing behavior in + // for now. + ASSERT_EQ((StackFrameX86::CONTEXT_VALID_EIP + | StackFrameX86::CONTEXT_VALID_ESP + | StackFrameX86::CONTEXT_VALID_EBP), + frame1->context_validity); + EXPECT_EQ(0x50007ce9U, frame1->instruction + 1); + EXPECT_EQ(0x50007ce9U, frame1->context.eip); + EXPECT_EQ(frame1_esp.Value(), frame1->context.esp); + EXPECT_TRUE(frame1->windows_frame_info != NULL); + } +} + +// Use Windows frame data (a "STACK WIN 4" record, from a +// FrameTypeFrameData DIA record) to walk a stack frame, where the +// expression yields an $eip that falls outside of any module, and the +// stack walker must scan. +TEST_F(GetCallerFrame, WindowsFrameDataBadEIPScan) { + SetModuleSymbols(&module1, + "STACK WIN 4 6e6 e7 0 0 0 8 4 0 1" + // A traditional frame, actually. + " $eip $ebp 4 + ^ = $esp $ebp 8 + = $ebp $ebp ^ =\n"); + // Mark frame 1's PC as the end of the stack. + SetModuleSymbols(&module2, + "FUNC cfdb 8406 0 module2::function\n" + "STACK WIN 4 cfdb 8406 0 0 0 0 0 0 1 $eip 0 = $ebp 0 =\n"); + stack_section.start() = 0x80000000; + + // In this stack, the context's %ebp is pointing at the wrong place, so + // the stack walker needs to scan to find the return address, and then + // scan again to find the caller's saved %ebp. + Label frame0_ebp, frame1_ebp, frame1_esp; + stack_section + // frame 0 + .Append(8, 0x2a) // garbage + .Mark(&frame0_ebp) // frame 0 %ebp points here, but should point + // at *** below + // The STACK WIN record says that the following two values are + // frame 1's saved %ebp and return address, but the %ebp is wrong; + // they're garbage. The stack walker will scan for the right values. + .D32(0x3d937b2b) // alleged to be frame 1's saved %ebp + .D32(0x17847f5b) // alleged to be frame 1's return address + .D32(frame1_ebp) // frame 1's real saved %ebp; scan will find + .D32(0x2b2b2b2b) // first word of realigned register save area + // *** frame 0 %ebp ought to be pointing here + .D32(0x2c2c2c2c) // realigned locals area + .D32(0x5000d000) // frame 1's real saved %eip; scan will find + // Frame 1, in module2::function. The STACK WIN record describes + // this as the oldest frame, without referring to its contents, so + // we needn't to provide any actual data here. + .Mark(&frame1_esp) + .Mark(&frame1_ebp) // frame 1 %ebp points here + // A dummy value for frame 1's %ebp to point at. The scan recognizes the + // saved %ebp because it points to a valid word in the stack memory region. + .D32(0x2d2d2d2d); + + RegionFromSection(); + raw_context.eip = 0x40000700; + raw_context.esp = stack_section.start().Value(); + raw_context.ebp = frame0_ebp.Value(); + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerX86 walker(&system_info, &raw_context, &stack_region, &modules, + &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(0U, modules_without_symbols.size()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(2U, frames->size()); + + { // To avoid reusing locals by mistake + StackFrameX86 *frame0 = static_cast(frames->at(0)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CONTEXT, frame0->trust); + ASSERT_EQ(StackFrameX86::CONTEXT_VALID_ALL, frame0->context_validity); + EXPECT_EQ(0x40000700U, frame0->instruction); + EXPECT_EQ(0x40000700U, frame0->context.eip); + EXPECT_EQ(stack_section.start().Value(), frame0->context.esp); + EXPECT_EQ(frame0_ebp.Value(), frame0->context.ebp); + EXPECT_TRUE(frame0->windows_frame_info != NULL); + } + + { // To avoid reusing locals by mistake + StackFrameX86 *frame1 = static_cast(frames->at(1)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CFI_SCAN, frame1->trust); + // I'd argue that CONTEXT_VALID_EBP shouldn't be here, since the + // walker does not actually fetch the EBP after a scan (forcing the + // next frame to be scanned as well). But let's grandfather the existing + // behavior in for now. + ASSERT_EQ((StackFrameX86::CONTEXT_VALID_EIP + | StackFrameX86::CONTEXT_VALID_ESP + | StackFrameX86::CONTEXT_VALID_EBP), + frame1->context_validity); + EXPECT_EQ(0x5000d000U, frame1->instruction + 1); + EXPECT_EQ(0x5000d000U, frame1->context.eip); + EXPECT_EQ(frame1_esp.Value(), frame1->context.esp); + EXPECT_EQ(frame1_ebp.Value(), frame1->context.ebp); + EXPECT_TRUE(frame1->windows_frame_info != NULL); + } +} + +// Use Windows FrameTypeFPO data to walk a stack frame for a function that +// does not modify %ebp from the value it had in the caller. +TEST_F(GetCallerFrame, WindowsFPOUnchangedEBP) { + SetModuleSymbols(&module1, + // Note bogus parameter size in FUNC record; the walker + // should prefer the STACK WIN record, and see the '8' below. + "FUNC e8a8 100 feeb module1::discombobulated\n" + "STACK WIN 0 e8a8 100 0 0 8 4 10 0 0 0\n"); + Label frame0_esp; + Label frame1_esp, frame1_ebp; + stack_section.start() = 0x80000000; + stack_section + // frame 0, in module1::wheedle. FrameTypeFPO (STACK WIN 0) frame. + .Mark(&frame0_esp) + // no outgoing parameters; this is the youngest frame. + .D32(0x7c521352) // four bytes of saved registers + .Append(0x10, 0x42) // local area + .D32(0x40009b5b) // return address, in module1, no function + // frame 1, in module1, no function. + .Mark(&frame1_esp) + .D32(0xf60ea7fc) // junk + .Mark(&frame1_ebp) + .D32(0) // saved %ebp (stack end) + .D32(0); // saved %eip (stack end) + + RegionFromSection(); + raw_context.eip = 0x4000e8b8; // in module1::whine + raw_context.esp = stack_section.start().Value(); + // Frame pointer unchanged from caller. + raw_context.ebp = frame1_ebp.Value(); + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerX86 walker(&system_info, &raw_context, &stack_region, &modules, + &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(0U, modules_without_symbols.size()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(2U, frames->size()); + + { // To avoid reusing locals by mistake + StackFrameX86 *frame0 = static_cast(frames->at(0)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CONTEXT, frame0->trust); + ASSERT_EQ(StackFrameX86::CONTEXT_VALID_ALL, frame0->context_validity); + EXPECT_EQ(0x4000e8b8U, frame0->instruction); + EXPECT_EQ(0x4000e8b8U, frame0->context.eip); + EXPECT_EQ(frame0_esp.Value(), frame0->context.esp); + // unchanged from caller + EXPECT_EQ(frame1_ebp.Value(), frame0->context.ebp); + EXPECT_EQ(&module1, frame0->module); + EXPECT_EQ("module1::discombobulated", frame0->function_name); + EXPECT_EQ(0x4000e8a8U, frame0->function_base); + // The STACK WIN record for module1::discombobulated should have + // produced a fully populated WindowsFrameInfo structure. + ASSERT_TRUE(frame0->windows_frame_info != NULL); + EXPECT_EQ(WindowsFrameInfo::VALID_ALL, frame0->windows_frame_info->valid); + EXPECT_EQ(WindowsFrameInfo::STACK_INFO_FPO, + frame0->windows_frame_info->type_); + EXPECT_EQ(0x10U, frame0->windows_frame_info->local_size); + } + + { // To avoid reusing locals by mistake + StackFrameX86 *frame1 = static_cast(frames->at(1)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CFI, frame1->trust); + ASSERT_EQ((StackFrameX86::CONTEXT_VALID_EIP + | StackFrameX86::CONTEXT_VALID_ESP + | StackFrameX86::CONTEXT_VALID_EBP), + frame1->context_validity); + EXPECT_EQ(0x40009b5bU, frame1->instruction + 1); + EXPECT_EQ(0x40009b5bU, frame1->context.eip); + EXPECT_EQ(frame1_esp.Value(), frame1->context.esp); + EXPECT_EQ(frame1_ebp.Value(), frame1->context.ebp); + EXPECT_EQ(&module1, frame1->module); + EXPECT_EQ("", frame1->function_name); + EXPECT_EQ(NULL, frame1->windows_frame_info); + } +} + +// Use Windows FrameTypeFPO data to walk a stack frame for a function +// that uses %ebp for its own purposes, saving the value it had in the +// caller in the standard place in the saved register area. +TEST_F(GetCallerFrame, WindowsFPOUsedEBP) { + SetModuleSymbols(&module1, + // Note bogus parameter size in FUNC record; the walker + // should prefer the STACK WIN record, and see the '8' below. + "FUNC 9aa8 e6 abbe module1::RaisedByTheAliens\n" + "STACK WIN 0 9aa8 e6 a 0 10 8 4 0 0 1\n"); + Label frame0_esp; + Label frame1_esp, frame1_ebp; + stack_section.start() = 0x80000000; + stack_section + // frame 0, in module1::wheedle. FrameTypeFPO (STACK WIN 0) frame. + .Mark(&frame0_esp) + // no outgoing parameters; this is the youngest frame. + .D32(frame1_ebp) // saved register area: saved %ebp + .D32(0xb68bd5f9) // saved register area: something else + .D32(0xd25d05fc) // local area + .D32(0x4000debe) // return address, in module1, no function + // frame 1, in module1, no function. + .Mark(&frame1_esp) + .D32(0xf0c9a974) // junk + .Mark(&frame1_ebp) + .D32(0) // saved %ebp (stack end) + .D32(0); // saved %eip (stack end) + + RegionFromSection(); + raw_context.eip = 0x40009ab8; // in module1::RaisedByTheAliens + raw_context.esp = stack_section.start().Value(); + // RaisedByTheAliens uses %ebp for its own mysterious purposes. + raw_context.ebp = 0xecbdd1a5; + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerX86 walker(&system_info, &raw_context, &stack_region, &modules, + &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(0U, modules_without_symbols.size()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(2U, frames->size()); + + { // To avoid reusing locals by mistake + StackFrameX86 *frame0 = static_cast(frames->at(0)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CONTEXT, frame0->trust); + ASSERT_EQ(StackFrameX86::CONTEXT_VALID_ALL, frame0->context_validity); + EXPECT_EQ(0x40009ab8U, frame0->instruction); + EXPECT_EQ(0x40009ab8U, frame0->context.eip); + EXPECT_EQ(frame0_esp.Value(), frame0->context.esp); + EXPECT_EQ(0xecbdd1a5, frame0->context.ebp); + EXPECT_EQ(&module1, frame0->module); + EXPECT_EQ("module1::RaisedByTheAliens", frame0->function_name); + EXPECT_EQ(0x40009aa8U, frame0->function_base); + // The STACK WIN record for module1::RaisedByTheAliens should have + // produced a fully populated WindowsFrameInfo structure. + ASSERT_TRUE(frame0->windows_frame_info != NULL); + EXPECT_EQ(WindowsFrameInfo::VALID_ALL, frame0->windows_frame_info->valid); + EXPECT_EQ(WindowsFrameInfo::STACK_INFO_FPO, + frame0->windows_frame_info->type_); + EXPECT_EQ("", frame0->windows_frame_info->program_string); + EXPECT_TRUE(frame0->windows_frame_info->allocates_base_pointer); + } + + { // To avoid reusing locals by mistake + StackFrameX86 *frame1 = static_cast(frames->at(1)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CFI, frame1->trust); + ASSERT_EQ((StackFrameX86::CONTEXT_VALID_EIP + | StackFrameX86::CONTEXT_VALID_ESP + | StackFrameX86::CONTEXT_VALID_EBP), + frame1->context_validity); + EXPECT_EQ(0x4000debeU, frame1->instruction + 1); + EXPECT_EQ(0x4000debeU, frame1->context.eip); + EXPECT_EQ(frame1_esp.Value(), frame1->context.esp); + EXPECT_EQ(frame1_ebp.Value(), frame1->context.ebp); + EXPECT_EQ(&module1, frame1->module); + EXPECT_EQ("", frame1->function_name); + EXPECT_EQ(NULL, frame1->windows_frame_info); + } +} + +// This is a regression unit test which covers a bug which has to do with +// FPO-optimized Windows system call stubs in the context frame. There is +// a more recent Windows system call dispatch mechanism which differs from +// the one which is being tested here. The newer system call dispatch +// mechanism creates an extra context frame (KiFastSystemCallRet). +TEST_F(GetCallerFrame, WindowsFPOSystemCall) { + SetModuleSymbols(&module3, // ntdll.dll + "PUBLIC 1f8ac c ZwWaitForSingleObject\n" + "STACK WIN 0 1f8ac 1b 0 0 c 0 0 0 0 0\n"); + SetModuleSymbols(&module4, // kernelbase.dll + "PUBLIC 109f9 c WaitForSingleObjectEx\n" + "PUBLIC 36590 0 _except_handler4\n" + "STACK WIN 4 109f9 df c 0 c c 48 0 1 $T0 $ebp = $eip " + "$T0 4 + ^ = $ebp $T0 ^ = $esp $T0 8 + = $L " + "$T0 .cbSavedRegs - = $P $T0 8 + .cbParams + =\n" + "STACK WIN 4 36590 154 17 0 10 0 14 0 1 $T0 $ebp = $eip " + "$T0 4 + ^ = $ebp $T0 ^ = $esp $T0 8 + = $L $T0 " + ".cbSavedRegs - = $P $T0 8 + .cbParams + =\n"); + SetModuleSymbols(&module5, // kernel32.dll + "PUBLIC 11136 8 WaitForSingleObject\n" + "PUBLIC 11151 c WaitForSingleObjectExImplementation\n" + "STACK WIN 4 11136 16 5 0 8 0 0 0 1 $T0 $ebp = $eip " + "$T0 4 + ^ = $ebp $T0 ^ = $esp $T0 8 + = $L " + "$T0 .cbSavedRegs - = $P $T0 8 + .cbParams + =\n" + "STACK WIN 4 11151 7a 5 0 c 0 0 0 1 $T0 $ebp = $eip " + "$T0 4 + ^ = $ebp $T0 ^ = $esp $T0 8 + = $L " + "$T0 .cbSavedRegs - = $P $T0 8 + .cbParams + =\n"); + SetModuleSymbols(&module6, // chrome.dll + "FILE 7038 some_file_name.h\n" + "FILE 839776 some_file_name.cc\n" + "FUNC 217fda 17 4 function_217fda\n" + "217fda 4 102 839776\n" + "FUNC 217ff1 a 4 function_217ff1\n" + "217ff1 0 594 7038\n" + "217ff1 a 596 7038\n" + "STACK WIN 0 217ff1 a 0 0 4 0 0 0 0 0\n"); + + Label frame0_esp, frame1_esp; + Label frame1_ebp, frame2_ebp, frame3_ebp; + stack_section.start() = 0x002ff290; + stack_section + .Mark(&frame0_esp) + .D32(0x771ef8c1) // EIP in frame 0 (system call) + .D32(0x75fa0a91) // return address of frame 0 + .Mark(&frame1_esp) + .D32(0x000017b0) // args to child + .D32(0x00000000) + .D32(0x002ff2d8) + .D32(0x88014a2e) + .D32(0x002ff364) + .D32(0x000017b0) + .D32(0x00000000) + .D32(0x00000024) + .D32(0x00000001) + .D32(0x00000000) + .D32(0x00000000) + .D32(0x00000000) + .D32(0x00000000) + .D32(0x00000000) + .D32(0x00000000) + .D32(0x00000000) + .D32(0x9e3b9800) + .D32(0xfffffff7) + .D32(0x00000000) + .D32(0x002ff2a4) + .D32(0x64a07ff1) // random value to be confused with a return address + .D32(0x002ff8dc) + .D32(0x75fc6590) // random value to be confused with a return address + .D32(0xfdd2c6ea) + .D32(0x00000000) + .Mark(&frame1_ebp) + .D32(frame2_ebp) // Child EBP + .D32(0x75741194) // return address of frame 1 + .D32(0x000017b0) // args to child + .D32(0x0036ee80) + .D32(0x00000000) + .D32(0x65bc7d14) + .Mark(&frame2_ebp) + .D32(frame3_ebp) // Child EBP + .D32(0x75741148) // return address of frame 2 + .D32(0x000017b0) // args to child + .D32(0x0036ee80) + .D32(0x00000000) + .Mark(&frame3_ebp) + .D32(0) // saved %ebp (stack end) + .D32(0); // saved %eip (stack end) + + RegionFromSection(); + raw_context.eip = 0x771ef8c1; // in ntdll::ZwWaitForSingleObject + raw_context.esp = stack_section.start().Value(); + ASSERT_TRUE(raw_context.esp == frame0_esp.Value()); + raw_context.ebp = frame1_ebp.Value(); + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerX86 walker(&system_info, &raw_context, &stack_region, &modules, + &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(0U, modules_without_symbols.size()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + + ASSERT_EQ(4U, frames->size()); + + { // To avoid reusing locals by mistake + StackFrameX86 *frame0 = static_cast(frames->at(0)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CONTEXT, frame0->trust); + ASSERT_EQ(StackFrameX86::CONTEXT_VALID_ALL, frame0->context_validity); + EXPECT_EQ(0x771ef8c1U, frame0->instruction); + EXPECT_EQ(0x771ef8c1U, frame0->context.eip); + EXPECT_EQ(frame0_esp.Value(), frame0->context.esp); + EXPECT_EQ(frame1_ebp.Value(), frame0->context.ebp); + EXPECT_EQ(&module3, frame0->module); + EXPECT_EQ("ZwWaitForSingleObject", frame0->function_name); + // The STACK WIN record for module3!ZwWaitForSingleObject should have + // produced a fully populated WindowsFrameInfo structure. + ASSERT_TRUE(frame0->windows_frame_info != NULL); + EXPECT_EQ(WindowsFrameInfo::VALID_ALL, frame0->windows_frame_info->valid); + EXPECT_EQ(WindowsFrameInfo::STACK_INFO_FPO, + frame0->windows_frame_info->type_); + EXPECT_EQ("", frame0->windows_frame_info->program_string); + EXPECT_FALSE(frame0->windows_frame_info->allocates_base_pointer); + } + + { // To avoid reusing locals by mistake + StackFrameX86 *frame1 = static_cast(frames->at(1)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CFI, frame1->trust); + ASSERT_EQ((StackFrameX86::CONTEXT_VALID_EIP + | StackFrameX86::CONTEXT_VALID_ESP + | StackFrameX86::CONTEXT_VALID_EBP), + frame1->context_validity); + EXPECT_EQ(0x75fa0a91U, frame1->instruction + 1); + EXPECT_EQ(0x75fa0a91U, frame1->context.eip); + EXPECT_EQ(frame1_esp.Value(), frame1->context.esp); + EXPECT_EQ(frame1_ebp.Value(), frame1->context.ebp); + EXPECT_EQ(&module4, frame1->module); + EXPECT_EQ("WaitForSingleObjectEx", frame1->function_name); + // The STACK WIN record for module4!WaitForSingleObjectEx should have + // produced a fully populated WindowsFrameInfo structure. + ASSERT_TRUE(frame1->windows_frame_info != NULL); + EXPECT_EQ(WindowsFrameInfo::VALID_ALL, frame1->windows_frame_info->valid); + EXPECT_EQ(WindowsFrameInfo::STACK_INFO_FRAME_DATA, + frame1->windows_frame_info->type_); + EXPECT_EQ("$T0 $ebp = $eip $T0 4 + ^ = $ebp $T0 ^ = $esp $T0 8 + = $L " + "$T0 .cbSavedRegs - = $P $T0 8 + .cbParams + =", + frame1->windows_frame_info->program_string); + EXPECT_FALSE(frame1->windows_frame_info->allocates_base_pointer); + } +} + +// Scan the stack for a better return address and potentially skip frames +// when the calculated return address is not in a known module. Note, that +// the span of this scan is somewhat arbitrarily limited to 120 search words +// for the context frame and 30 search words (pointers) for the other frames: +// const int kRASearchWords = 30; +// This means that frames can be skipped only when their size is relatively +// small: smaller than 4 * kRASearchWords * sizeof(InstructionType) +TEST_F(GetCallerFrame, ReturnAddressIsNotInKnownModule) { + MockCodeModule msvcrt_dll(0x77be0000, 0x58000, "msvcrt.dll", "version1"); + SetModuleSymbols(&msvcrt_dll, // msvcrt.dll + "PUBLIC 38180 0 wcsstr\n" + "STACK WIN 4 38180 61 10 0 8 0 0 0 1 $T0 $ebp = $eip $T0 " + "4 + ^ = $ebp $T0 ^ = $esp $T0 8 + = $L $T0 .cbSavedRegs " + "- = $P $T0 4 + .cbParams + =\n"); + + MockCodeModule kernel32_dll(0x7c800000, 0x103000, "kernel32.dll", "version1"); + SetModuleSymbols(&kernel32_dll, // kernel32.dll + "PUBLIC efda 8 FindNextFileW\n" + "STACK WIN 4 efda 1bb c 0 8 8 3c 0 1 $T0 $ebp = $eip $T0 " + "4 + ^ = $ebp $T0 ^ = $esp $T0 8 + = $L $T0 .cbSavedRegs " + "- = $P $T0 4 + .cbParams + =\n"); + + MockCodeModule chrome_dll(0x1c30000, 0x28C8000, "chrome.dll", "version1"); + SetModuleSymbols(&chrome_dll, // chrome.dll + "FUNC e3cff 4af 0 file_util::FileEnumerator::Next()\n" + "e3cff 1a 711 2505\n" + "STACK WIN 4 e3cff 4af 20 0 4 c 94 0 1 $T1 .raSearch = " + "$T0 $T1 4 - 8 @ = $ebp $T1 4 - ^ = $eip $T1 ^ = $esp " + "$T1 4 + = $20 $T0 152 - ^ = $23 $T0 156 - ^ = $24 " + "$T0 160 - ^ =\n"); + + // Create some modules with some stock debugging information. + MockCodeModules local_modules; + local_modules.Add(&msvcrt_dll); + local_modules.Add(&kernel32_dll); + local_modules.Add(&chrome_dll); + + Label frame0_esp; + Label frame0_ebp; + Label frame1_ebp; + Label frame2_ebp; + Label frame3_ebp; + + stack_section.start() = 0x0932f2d0; + stack_section + .Mark(&frame0_esp) + .D32(0x0764e000) + .D32(0x0764e068) + .Mark(&frame0_ebp) + .D32(frame1_ebp) // Child EBP + .D32(0x001767a0) // return address of frame 0 + // Not in known module + .D32(0x0764e0c6) + .D32(0x001bb1b8) + .D32(0x0764e068) + .D32(0x00000003) + .D32(0x0764e068) + .D32(0x00000003) + .D32(0x07578828) + .D32(0x0764e000) + .D32(0x00000000) + .D32(0x001c0010) + .D32(0x0764e0c6) + .Mark(&frame1_ebp) + .D32(frame2_ebp) // Child EBP + .D32(0x7c80f10f) // return address of frame 1 + // inside kernel32!FindNextFileW + .D32(0x000008f8) + .D32(0x00000000) + .D32(0x00000000) + .D32(0x00000000) + .D32(0x0932f34c) + .D32(0x0764e000) + .D32(0x00001000) + .D32(0x00000000) + .D32(0x00000001) + .D32(0x00000000) + .D32(0x00000000) + .D32(0x0932f6a8) + .D32(0x00000000) + .D32(0x0932f6d8) + .D32(0x00000000) + .D32(0x000000d6) + .D32(0x0764e000) + .D32(0x7ff9a000) + .D32(0x0932f3fc) + .D32(0x00000001) + .D32(0x00000001) + .D32(0x07578828) + .D32(0x0000002e) + .D32(0x0932f340) + .D32(0x0932eef4) + .D32(0x0932ffdc) + .D32(0x7c839ad8) + .D32(0x7c80f0d8) + .D32(0x00000000) + .Mark(&frame2_ebp) + .D32(frame3_ebp) // Child EBP + .D32(0x01d13f91) // return address of frame 2 + // inside chrome_dll!file_util::FileEnumerator::Next + .D32(0x07578828) + .D32(0x0932f6ac) + .D32(0x0932f9c4) + .D32(0x0932f9b4) + .D32(0x00000000) + .D32(0x00000003) + .D32(0x0932f978) + .D32(0x01094330) + .D32(0x00000000) + .D32(0x00000001) + .D32(0x01094330) + .D32(0x00000000) + .D32(0x00000000) + .D32(0x07f30000) + .D32(0x01c3ba17) + .D32(0x08bab840) + .D32(0x07f31580) + .D32(0x00000000) + .D32(0x00000007) + .D32(0x0932f940) + .D32(0x0000002e) + .D32(0x0932f40c) + .D32(0x01d13b53) + .D32(0x0932f958) + .D32(0x00000001) + .D32(0x00000007) + .D32(0x0932f940) + .D32(0x0000002e) + .D32(0x00000000) + .D32(0x0932f6ac) + .D32(0x01e13ef0) + .D32(0x00000001) + .D32(0x00000007) + .D32(0x0932f958) + .D32(0x08bab840) + .D32(0x0932f9b4) + .D32(0x00000000) + .D32(0x0932f9b4) + .D32(0x000000a7) + .D32(0x000000a7) + .D32(0x0932f998) + .D32(0x579627a2) + .Mark(&frame3_ebp) + .D32(0) // saved %ebp (stack end) + .D32(0); // saved %eip (stack end) + + RegionFromSection(); + raw_context.eip = 0x77c181cd; // inside msvcrt!wcsstr + raw_context.esp = frame0_esp.Value(); + raw_context.ebp = frame0_ebp.Value(); + // sanity + ASSERT_TRUE(raw_context.esp == stack_section.start().Value()); + ASSERT_TRUE(raw_context.ebp == stack_section.start().Value() + 8); + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerX86 walker(&system_info, &raw_context, &stack_region, + &local_modules, &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(0U, modules_without_symbols.size()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + + ASSERT_EQ(3U, frames->size()); + + { // To avoid reusing locals by mistake + StackFrameX86 *frame0 = static_cast(frames->at(0)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CONTEXT, frame0->trust); + ASSERT_EQ(StackFrameX86::CONTEXT_VALID_ALL, frame0->context_validity); + EXPECT_EQ(0x77c181cdU, frame0->instruction); + EXPECT_EQ(0x77c181cdU, frame0->context.eip); + EXPECT_EQ(frame0_esp.Value(), frame0->context.esp); + EXPECT_EQ(frame0_ebp.Value(), frame0->context.ebp); + EXPECT_EQ(&msvcrt_dll, frame0->module); + EXPECT_EQ("wcsstr", frame0->function_name); + ASSERT_TRUE(frame0->windows_frame_info != NULL); + EXPECT_EQ(WindowsFrameInfo::VALID_ALL, frame0->windows_frame_info->valid); + EXPECT_EQ(WindowsFrameInfo::STACK_INFO_FRAME_DATA, + frame0->windows_frame_info->type_); + EXPECT_EQ("$T0 $ebp = $eip $T0 " + "4 + ^ = $ebp $T0 ^ = $esp $T0 8 + = $L $T0 .cbSavedRegs " + "- = $P $T0 4 + .cbParams + =", + frame0->windows_frame_info->program_string); + // It has program string, so allocates_base_pointer is not expected + EXPECT_FALSE(frame0->windows_frame_info->allocates_base_pointer); + } + + { // To avoid reusing locals by mistake + StackFrameX86 *frame1 = static_cast(frames->at(1)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CFI_SCAN, frame1->trust); + ASSERT_EQ((StackFrameX86::CONTEXT_VALID_EIP | + StackFrameX86::CONTEXT_VALID_ESP | + StackFrameX86::CONTEXT_VALID_EBP), + frame1->context_validity); + EXPECT_EQ(0x7c80f10fU, frame1->instruction + 1); + EXPECT_EQ(0x7c80f10fU, frame1->context.eip); + // frame 1 was skipped, so intead of frame1_ebp compare with frame2_ebp. + EXPECT_EQ(frame2_ebp.Value(), frame1->context.ebp); + EXPECT_EQ(&kernel32_dll, frame1->module); + EXPECT_EQ("FindNextFileW", frame1->function_name); + ASSERT_TRUE(frame1->windows_frame_info != NULL); + EXPECT_EQ(WindowsFrameInfo::VALID_ALL, frame1->windows_frame_info->valid); + EXPECT_EQ(WindowsFrameInfo::STACK_INFO_FRAME_DATA, + frame1->windows_frame_info->type_); + EXPECT_EQ("$T0 $ebp = $eip $T0 " + "4 + ^ = $ebp $T0 ^ = $esp $T0 8 + = $L $T0 .cbSavedRegs " + "- = $P $T0 4 + .cbParams + =", + frame1->windows_frame_info->program_string); + EXPECT_FALSE(frame1->windows_frame_info->allocates_base_pointer); + } + + { // To avoid reusing locals by mistake + StackFrameX86 *frame2 = static_cast(frames->at(2)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CFI, frame2->trust); + ASSERT_EQ((StackFrameX86::CONTEXT_VALID_EIP | + StackFrameX86::CONTEXT_VALID_ESP | + StackFrameX86::CONTEXT_VALID_EBP), + frame2->context_validity); + EXPECT_EQ(0x01d13f91U, frame2->instruction + 1); + EXPECT_EQ(0x01d13f91U, frame2->context.eip); + // frame 1 was skipped, so intead of frame2_ebp compare with frame3_ebp. + EXPECT_EQ(frame3_ebp.Value(), frame2->context.ebp); + EXPECT_EQ(&chrome_dll, frame2->module); + EXPECT_EQ("file_util::FileEnumerator::Next()", frame2->function_name); + ASSERT_TRUE(frame2->windows_frame_info != NULL); + EXPECT_EQ(WindowsFrameInfo::VALID_ALL, frame2->windows_frame_info->valid); + EXPECT_EQ(WindowsFrameInfo::STACK_INFO_FRAME_DATA, + frame2->windows_frame_info->type_); + EXPECT_EQ("$T1 .raSearch = " + "$T0 $T1 4 - 8 @ = $ebp $T1 4 - ^ = $eip $T1 ^ = $esp " + "$T1 4 + = $20 $T0 152 - ^ = $23 $T0 156 - ^ = $24 " + "$T0 160 - ^ =", + frame2->windows_frame_info->program_string); + EXPECT_FALSE(frame2->windows_frame_info->allocates_base_pointer); + } +} + +// Test the .raSearchStart/.raSearch calculation when alignment operators are +// used in the program string. The current %ebp must be valid and it is the +// only reliable data point that can be used for that calculation. +TEST_F(GetCallerFrame, HandleAlignmentInProgramString) { + MockCodeModule chrome_dll(0x59630000, 0x19e3000, "chrome.dll", "version1"); + SetModuleSymbols(&chrome_dll, // chrome.dll + "FUNC 56422 50c 8 base::MessageLoop::RunTask" + "(base::PendingTask const &)\n" + "56422 e 458 4589\n" + "STACK WIN 4 56422 50c 11 0 8 c ac 0 1 $T1 .raSearch = $T0 " + "$T1 4 - 8 @ = $ebp $T1 4 - ^ = $eip $T1 ^ = $esp $T1 4 + = " + "$20 $T0 176 - ^ = $23 $T0 180 - ^ = $24 $T0 184 - ^ =\n" + "FUNC 55d34 34a 0 base::MessageLoop::DoWork()\n" + "55d34 11 596 4589\n" + "STACK WIN 4 55d34 34a 19 0 0 c 134 0 1 $T1 .raSearch = " + "$T0 $T1 4 - 8 @ = $ebp $T1 4 - ^ = $eip $T1 ^ = $esp " + "$T1 4 + = $20 $T0 312 - ^ = $23 $T0 316 - ^ = $24 $T0 " + "320 - ^ =\n" + "FUNC 55c39 fb 0 base::MessagePumpForIO::DoRunLoop()\n" + "55c39 d 518 19962\n" + "STACK WIN 4 55c39 fb d 0 0 c 34 0 1 $T1 .raSearch = $T0 " + "$T1 4 - 64 @ = $ebp $T1 4 - ^ = $eip $T1 ^ = $esp $T1 4 + " + "= $20 $T0 56 - ^ = $23 $T0 60 - ^ = $24 $T0 64 - ^ =\n" + "FUNC 55bf0 49 4 base::MessagePumpWin::Run(base::" + "MessagePump::Delegate *)\n" + "55bf0 49 48 4724\n" + "STACK WIN 4 55bf0 49 c 0 4 0 10 0 1 $T0 $ebp = $eip $T0 4 " + "+ ^ = $ebp $T0 ^ = $esp $T0 8 + =\n" + "FUNC 165d de 4 malloc\n" + "165d 6 119 54\n" + "STACK WIN 4 165d de d 0 4 8 0 0 1 $T1 .raSearch = $T0 " + "$T1 4 - 8 @ = $ebp $T1 4 - ^ = $eip $T1 ^ = $esp $T1 4 " + "+ = $23 $T0 4 - ^ = $24 $T0 8 - ^ =\n" + "FUNC 55ac9 79 0 base::MessageLoop::RunInternal()\n" + "55ac9 d 427 4589\n" + "STACK WIN 4 55ac9 79 d 0 0 8 10 0 1 $T1 .raSearch = $T0 " + "$T1 4 - 8 @ = $ebp $T1 4 - ^ = $eip $T1 ^ = $esp $T1 4 + = " + "$23 $T0 20 - ^ = $24 $T0 24 - ^ =\n"); + + // Create some modules with some stock debugging information. + MockCodeModules local_modules; + local_modules.Add(&chrome_dll); + + Label frame0_esp; + Label frame0_ebp; + Label frame1_esp; + Label frame1_ebp; + Label frame2_esp; + Label frame2_ebp; + Label frame3_esp; + Label frame3_ebp; + + stack_section.start() = 0x046bfc80; + stack_section + .D32(0) + .Mark(&frame0_esp) + .D32(0x01e235a0) + .D32(0x00000000) + .D32(0x01e9f580) + .D32(0x01e9f580) + .D32(0x00000020) + .D32(0x00000000) + .D32(0x00463674) + .D32(0x00000020) + .D32(0x00000000) + .D32(0x046bfcd8) + .D32(0x046bfcd8) + .D32(0x0001204b) + .D32(0x00000000) + .D32(0xfdddb523) + .D32(0x00000000) + .D32(0x00000007) + .D32(0x00000040) + .D32(0x00000000) + .D32(0x59631693) // chrome_59630000!malloc+0x36 + .D32(0x01e9f580) + .D32(0x01e9f580) + .D32(0x046bfcf8) + .D32(0x77da6704) // ntdll!NtSetIoCompletion+0xc + .D32(0x046bfd4c) + .D32(0x59685bec) // chrome_59630000!base::MessageLoop::StartHistogrammer.. + .D32(0x01e235a0) + + .Mark(&frame0_ebp) + .D32(frame1_ebp) // Child EBP .D32(0x046bfd0c) + .D32(0x59685c2e) // Return address in + // chrome_59630000!base::MessagePumpWin::Run+0x3e + .Mark(&frame1_esp) + .D32(0x01e75a90) + .D32(0x046bfd4c) + .D32(0x01e75a90) + .D32(0x00000000) + .D32(0x00000300) + .D32(0x00000001) + + .Mark(&frame1_ebp) + .D32(frame2_ebp) // Child EBP .D32(0x046bfd30) + .D32(0x59685b3c) // Return address in + // chrome_59630000!base::MessageLoop::RunInternal+0x73 + .Mark(&frame2_esp) + .D32(0x01e75a90) + .D32(0x00000000) + .D32(0x046bfd4c) + .D32(0x59658123) // chrome_59630000!std::deque.. + .D32(0x046bfda0) + .D32(0x01e79d70) + .D32(0x046bfda0) + + .Mark(&frame2_ebp) // .D32(0x046bfd40) + .D32(0) // saved %ebp (stack end) + .D32(0); // saved %eip (stack end) + + RegionFromSection(); + raw_context.eip = 0x59685c46; // Context frame in + // base::MessagePumpForIO::DoRunLoop + raw_context.esp = frame0_esp.Value(); + raw_context.ebp = frame0_ebp.Value(); + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerX86 walker(&system_info, &raw_context, &stack_region, + &local_modules, &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(0U, modules_without_symbols.size()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + + ASSERT_EQ(3U, frames->size()); + + { // To avoid reusing locals by mistake + StackFrameX86 *frame = static_cast(frames->at(0)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CONTEXT, frame->trust); + ASSERT_EQ(StackFrameX86::CONTEXT_VALID_ALL, frame->context_validity); + EXPECT_EQ("base::MessagePumpForIO::DoRunLoop()", frame->function_name); + EXPECT_EQ(0x59685c46U, frame->instruction); + EXPECT_EQ(0x59685c46U, frame->context.eip); + EXPECT_EQ(frame0_esp.Value(), frame->context.esp); + EXPECT_EQ(frame0_ebp.Value(), frame->context.ebp); + EXPECT_EQ(&chrome_dll, frame->module); + ASSERT_TRUE(frame->windows_frame_info != NULL); + EXPECT_EQ(WindowsFrameInfo::VALID_ALL, frame->windows_frame_info->valid); + EXPECT_EQ(WindowsFrameInfo::STACK_INFO_FRAME_DATA, + frame->windows_frame_info->type_); + EXPECT_EQ("$T1 .raSearch = $T0 " + "$T1 4 - 64 @ = $ebp $T1 4 - ^ = $eip $T1 ^ = $esp $T1 4 + " + "= $20 $T0 56 - ^ = $23 $T0 60 - ^ = $24 $T0 64 - ^ =", + frame->windows_frame_info->program_string); + EXPECT_FALSE(frame->windows_frame_info->allocates_base_pointer); + } + + { // To avoid reusing locals by mistake + StackFrameX86 *frame = static_cast(frames->at(1)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CFI, frame->trust); + ASSERT_EQ((StackFrameX86::CONTEXT_VALID_EIP | + StackFrameX86::CONTEXT_VALID_ESP | + StackFrameX86::CONTEXT_VALID_EBP), + frame->context_validity); + EXPECT_EQ("base::MessagePumpWin::Run(base::MessagePump::Delegate *)", + frame->function_name); + EXPECT_EQ(1500011566U, frame->instruction + 1); + EXPECT_EQ(1500011566U, frame->context.eip); + EXPECT_EQ(frame1_esp.Value(), frame->context.esp); + EXPECT_EQ(frame1_ebp.Value(), frame->context.ebp); + EXPECT_EQ(&chrome_dll, frame->module); + ASSERT_TRUE(frame->windows_frame_info != NULL); + EXPECT_EQ(WindowsFrameInfo::VALID_ALL, frame->windows_frame_info->valid); + EXPECT_EQ(WindowsFrameInfo::STACK_INFO_FRAME_DATA, + frame->windows_frame_info->type_); + EXPECT_EQ("$T0 $ebp = $eip $T0 4 + ^ = $ebp $T0 ^ = $esp $T0 8 + =", + frame->windows_frame_info->program_string); + EXPECT_FALSE(frame->windows_frame_info->allocates_base_pointer); + } + + { // To avoid reusing locals by mistake + StackFrameX86 *frame = static_cast(frames->at(2)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CFI, frame->trust); + ASSERT_EQ((StackFrameX86::CONTEXT_VALID_EIP | + StackFrameX86::CONTEXT_VALID_ESP | + StackFrameX86::CONTEXT_VALID_EBP), + frame->context_validity); + EXPECT_EQ("base::MessageLoop::RunInternal()", frame->function_name); + EXPECT_EQ(1500011324U, frame->instruction + 1); + EXPECT_EQ(1500011324U, frame->context.eip); + EXPECT_EQ(frame2_esp.Value(), frame->context.esp); + EXPECT_EQ(frame2_ebp.Value(), frame->context.ebp); + EXPECT_EQ(&chrome_dll, frame->module); + ASSERT_TRUE(frame->windows_frame_info != NULL); + EXPECT_EQ(WindowsFrameInfo::VALID_ALL, frame->windows_frame_info->valid); + EXPECT_EQ(WindowsFrameInfo::STACK_INFO_FRAME_DATA, + frame->windows_frame_info->type_); + EXPECT_EQ("$T1 .raSearch = $T0 " + "$T1 4 - 8 @ = $ebp $T1 4 - ^ = $eip $T1 ^ = $esp $T1 4 + = " + "$23 $T0 20 - ^ = $24 $T0 24 - ^ =", + frame->windows_frame_info->program_string); + EXPECT_FALSE(frame->windows_frame_info->allocates_base_pointer); + } +} + +// Scan the stack for a return address and potentially skip frames when the +// current IP address is not in a known module. Note, that that the span of +// this scan is limited to 120 search words for the context frame and 30 +// search words (pointers) for the other frames: +// const int kRASearchWords = 30; +void GetCallerFrame::IPAddressIsNotInKnownModuleTestImpl( + bool has_corrupt_symbols) { + MockCodeModule remoting_core_dll(0x54080000, 0x501000, "remoting_core.dll", + "version1"); + string symbols_func_section = + "FUNC 137214 17d 10 PK11_Verify\n" + "FUNC 15c834 37 14 nsc_ECDSAVerifyStub\n" + "FUNC 1611d3 91 14 NSC_Verify\n" + "FUNC 162ff7 60 4 sftk_SessionFromHandle\n"; + string symbols_stack_section = + "STACK WIN 4 137214 17d 9 0 10 0 10 0 1 $T0 $ebp = " + "$eip $T0 4 + ^ = $ebp $T0 ^ = $esp $T0 8 + =\n" + "STACK WIN 4 15c834 37 6 0 14 0 18 0 1 $T0 $ebp = " + "$eip $T0 4 + ^ = $ebp $T0 ^ = $esp $T0 8 + =\n" + "STACK WIN 4 1611d3 91 7 0 14 0 8 0 1 $T0 $ebp = " + "$eip $T0 4 + ^ = $ebp $T0 ^ = $esp $T0 8 + =\n" + "STACK WIN 4 162ff7 60 5 0 4 0 0 0 1 $T0 $ebp = " + "$eip $T0 4 + ^ = $ebp $T0 ^ = $esp $T0 8 + =\n"; + + string symbols = symbols_func_section; + if (has_corrupt_symbols) { + symbols.append(string(1, '\0')); // null terminator in the middle + symbols.append("\n"); + symbols.append("FUNC 1234\n" // invalid FUNC records + "FUNNC 1234\n" + "STACK WIN 4 1234 234 23 " // invalid STACK record + "23423423 234 23 234 234 " + "234 23 234 23 234 234 " + "234 234 234\n"); + } + symbols.append(symbols_stack_section); + SetModuleSymbols(&remoting_core_dll, symbols); + + // Create some modules with some stock debugging information. + MockCodeModules local_modules; + local_modules.Add(&remoting_core_dll); + + Label frame0_esp; + Label frame0_ebp; + Label frame1_ebp; + Label frame1_esp; + Label frame2_ebp; + Label frame2_esp; + Label frame3_ebp; + Label frame3_esp; + Label bogus_stack_location_1; + Label bogus_stack_location_2; + Label bogus_stack_location_3; + + stack_section.start() = 0x01a3ea28; + stack_section + .Mark(&frame0_esp) + .D32(bogus_stack_location_2) + .D32(bogus_stack_location_1) + .D32(0x042478e4) + .D32(bogus_stack_location_2) + .D32(0x00000000) + .D32(0x041f0420) + .D32(0x00000000) + .D32(0x00000000) + .D32(0x00000040) + .D32(0x00000001) + .D32(0x00b7e0d0) + .D32(0x00000000) + .D32(0x00000040) + .D32(0x00000001) + .D32(0x00b7f570) + .Mark(&bogus_stack_location_1) + .D32(0x00000000) + .D32(0x00000040) + .D32(0x00000008) + .D32(0x04289530) + .D32(0x00000000) + .D32(0x00000040) + .D32(0x00000008) + .D32(0x00b7e910) + .D32(0x00000000) + .D32(0x00000040) + .D32(0x00000008) + .D32(0x00b7d998) + .D32(0x00000000) + .D32(0x00000040) + .D32(0x00000008) + .D32(0x00b7dec0) + .Mark(&bogus_stack_location_2) + .D32(0x00000000) + .D32(0x00000040) + .D32(0x00000008) + .D32(0x04289428) + .D32(0x00000000) + .D32(0x00000040) + .D32(0x00000008) + .D32(0x00b7f258) + .Mark(&bogus_stack_location_3) + .D32(0x00000000) + .D32(0x041f3560) + .D32(0x00000041) + .D32(0x00000020) + .D32(0xffffffff) + .Mark(&frame0_ebp) + .D32(frame1_ebp) // Child %ebp + .D32(0x541dc866) // return address of frame 0 + // inside remoting_core!nsc_ECDSAVerifyStub+0x32 + .Mark(&frame1_esp) + .D32(0x04247860) + .D32(0x01a3eaec) + .D32(0x01a3eaf8) + .D32(0x541e304f) // remoting_core!sftk_SessionFromHandle+0x58 + .D32(0x0404c620) + .D32(0x00000040) + .D32(0x01a3eb2c) + .D32(0x01a3ec08) + .D32(0x00000014) + .Mark(&frame1_ebp) + .D32(frame2_ebp) // Child %ebp + .D32(0x541e1234) // return address of frame 1 + // inside remoting_core!NSC_Verify+0x61 + .Mark(&frame2_esp) + .D32(0x04247858) + .D32(0x0404c620) + .D32(0x00000040) + .D32(0x01a3ec08) + .D32(0x00000014) + .D32(0x01000005) + .D32(0x00b2f7a0) + .D32(0x041f0420) + .D32(0x041f3650) + .Mark(&frame2_ebp) + .D32(frame3_ebp) // Child %ebp + .D32(0x541b734d) // return address of frame 1 + // inside remoting_core!PK11_Verify+0x139 + .Mark(&frame3_esp) + .D32(0x01000005) + .D32(0x01a3ec08) + .D32(0x00000014) + .D32(0x0404c620) + .D32(0x00000040) + .D32(0x04073e00) + .D32(0x04073e00) + .D32(0x04247050) + .D32(0x00001041) + .D32(0x00000000) + .D32(0x00000000) + .D32(0x00000000) + .Mark(&frame3_ebp) + .D32(0) // saved %ebp (stack end) + .D32(0); // saved %eip (stack end) + + RegionFromSection(); + raw_context.eip = 0x4247860; // IP address not in known module + raw_context.ebp = 0x5420362d; // bogus + raw_context.esp = frame0_esp.Value(); + + // sanity + ASSERT_TRUE(raw_context.esp == stack_section.start().Value()); + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerX86 walker(&system_info, &raw_context, &stack_region, + &local_modules, &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(0U, modules_without_symbols.size()); + if (has_corrupt_symbols) { + ASSERT_EQ(1U, modules_with_corrupt_symbols.size()); + ASSERT_EQ("remoting_core.dll", + modules_with_corrupt_symbols[0]->debug_file()); + } else { + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + } + frames = call_stack.frames(); + + ASSERT_EQ(4U, frames->size()); + + { // To avoid reusing locals by mistake + StackFrameX86 *frame0 = static_cast(frames->at(0)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CONTEXT, frame0->trust); + ASSERT_EQ(StackFrameX86::CONTEXT_VALID_ALL, frame0->context_validity); + EXPECT_EQ(raw_context.eip, frame0->context.eip); + EXPECT_EQ(raw_context.ebp, frame0->context.ebp); + EXPECT_EQ(raw_context.esp, frame0->context.esp); + EXPECT_EQ(NULL, frame0->module); // IP not in known module + EXPECT_EQ("", frame0->function_name); + ASSERT_EQ(NULL, frame0->windows_frame_info); + } + + { // To avoid reusing locals by mistake + StackFrameX86 *frame1 = static_cast(frames->at(1)); + EXPECT_EQ(StackFrame::FRAME_TRUST_SCAN, frame1->trust); + ASSERT_EQ((StackFrameX86::CONTEXT_VALID_EIP | + StackFrameX86::CONTEXT_VALID_ESP | + StackFrameX86::CONTEXT_VALID_EBP), + frame1->context_validity); + EXPECT_EQ(frame1_ebp.Value(), frame1->context.ebp); + EXPECT_EQ(frame1_esp.Value(), frame1->context.esp); + EXPECT_EQ(&remoting_core_dll, frame1->module); + EXPECT_EQ("nsc_ECDSAVerifyStub", frame1->function_name); + ASSERT_TRUE(frame1->windows_frame_info != NULL); + EXPECT_EQ(WindowsFrameInfo::VALID_ALL, frame1->windows_frame_info->valid); + EXPECT_EQ(WindowsFrameInfo::STACK_INFO_FRAME_DATA, + frame1->windows_frame_info->type_); + EXPECT_EQ("$T0 $ebp = $eip $T0 4 + ^ = $ebp $T0 ^ = $esp $T0 8 + =", + frame1->windows_frame_info->program_string); + EXPECT_FALSE(frame1->windows_frame_info->allocates_base_pointer); + } + + { // To avoid reusing locals by mistake + StackFrameX86 *frame2 = static_cast(frames->at(2)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CFI, frame2->trust); + ASSERT_EQ((StackFrameX86::CONTEXT_VALID_EIP | + StackFrameX86::CONTEXT_VALID_ESP | + StackFrameX86::CONTEXT_VALID_EBP), + frame2->context_validity); + EXPECT_EQ(frame2_ebp.Value(), frame2->context.ebp); + EXPECT_EQ(frame2_esp.Value(), frame2->context.esp); + EXPECT_EQ(&remoting_core_dll, frame2->module); + EXPECT_EQ("NSC_Verify", frame2->function_name); + ASSERT_TRUE(frame2->windows_frame_info != NULL); + EXPECT_EQ(WindowsFrameInfo::VALID_ALL, frame2->windows_frame_info->valid); + EXPECT_EQ(WindowsFrameInfo::STACK_INFO_FRAME_DATA, + frame2->windows_frame_info->type_); + EXPECT_EQ("$T0 $ebp = $eip $T0 4 + ^ = $ebp $T0 ^ = $esp $T0 8 + =", + frame2->windows_frame_info->program_string); + EXPECT_FALSE(frame2->windows_frame_info->allocates_base_pointer); + } + + { // To avoid reusing locals by mistake + StackFrameX86 *frame3 = static_cast(frames->at(3)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CFI, frame3->trust); + ASSERT_EQ((StackFrameX86::CONTEXT_VALID_EIP | + StackFrameX86::CONTEXT_VALID_ESP | + StackFrameX86::CONTEXT_VALID_EBP), + frame3->context_validity); + EXPECT_EQ(frame3_ebp.Value(), frame3->context.ebp); + EXPECT_EQ(frame3_esp.Value(), frame3->context.esp); + EXPECT_EQ(&remoting_core_dll, frame3->module); + EXPECT_EQ("PK11_Verify", frame3->function_name); + ASSERT_TRUE(frame3->windows_frame_info != NULL); + EXPECT_EQ(WindowsFrameInfo::VALID_ALL, frame3->windows_frame_info->valid); + EXPECT_EQ(WindowsFrameInfo::STACK_INFO_FRAME_DATA, + frame3->windows_frame_info->type_); + EXPECT_EQ("$T0 $ebp = $eip $T0 4 + ^ = $ebp $T0 ^ = $esp $T0 8 + =", + frame3->windows_frame_info->program_string); + EXPECT_FALSE(frame3->windows_frame_info->allocates_base_pointer); + } +} + +// Runs IPAddressIsNotInKnownModule test with good symbols +TEST_F(GetCallerFrame, IPAddressIsNotInKnownModule) { + IPAddressIsNotInKnownModuleTestImpl(false /* has_corrupt_modules */); +} + +// Runs IPAddressIsNotInKnownModule test with corrupt symbols +TEST_F(GetCallerFrame, IPAddressIsNotInKnownModule_CorruptSymbols) { + IPAddressIsNotInKnownModuleTestImpl(true /* has_corrupt_modules */); +} + +struct CFIFixture: public StackwalkerX86Fixture { + CFIFixture() { + // Provide a bunch of STACK CFI records; individual tests walk to the + // caller from every point in this series, expecting to find the same + // set of register values. + SetModuleSymbols(&module1, + // The youngest frame's function. + "FUNC 4000 1000 10 enchiridion\n" + // Initially, just a return address. + "STACK CFI INIT 4000 100 .cfa: $esp 4 + .ra: .cfa 4 - ^\n" + // Push %ebx. + "STACK CFI 4001 .cfa: $esp 8 + $ebx: .cfa 8 - ^\n" + // Move %esi into %ebx. Weird, but permitted. + "STACK CFI 4002 $esi: $ebx\n" + // Allocate frame space, and save %edi. + "STACK CFI 4003 .cfa: $esp 20 + $edi: .cfa 16 - ^\n" + // Put the return address in %edi. + "STACK CFI 4005 .ra: $edi\n" + // Save %ebp, and use it as a frame pointer. + "STACK CFI 4006 .cfa: $ebp 8 + $ebp: .cfa 12 - ^\n" + + // The calling function. + "FUNC 5000 1000 10 epictetus\n" + // Mark it as end of stack. + "STACK CFI INIT 5000 1000 .cfa: $esp .ra 0\n"); + + // Provide some distinctive values for the caller's registers. + expected.esp = 0x80000000; + expected.eip = 0x40005510; + expected.ebp = 0xc0d4aab9; + expected.ebx = 0x60f20ce6; + expected.esi = 0x53d1379d; + expected.edi = 0xafbae234; + + // By default, registers are unchanged. + raw_context = expected; + } + + // Walk the stack, using stack_section as the contents of the stack + // and raw_context as the current register values. (Set + // raw_context.esp to the stack's starting address.) Expect two + // stack frames; in the older frame, expect the callee-saves + // registers to have values matching those in 'expected'. + void CheckWalk() { + RegionFromSection(); + raw_context.esp = stack_section.start().Value(); + + StackFrameSymbolizer frame_symbolizer(&supplier, &resolver); + StackwalkerX86 walker(&system_info, &raw_context, &stack_region, &modules, + &frame_symbolizer); + vector modules_without_symbols; + vector modules_with_corrupt_symbols; + ASSERT_TRUE(walker.Walk(&call_stack, &modules_without_symbols, + &modules_with_corrupt_symbols)); + ASSERT_EQ(0U, modules_without_symbols.size()); + ASSERT_EQ(0U, modules_with_corrupt_symbols.size()); + frames = call_stack.frames(); + ASSERT_EQ(2U, frames->size()); + + { // To avoid reusing locals by mistake + StackFrameX86 *frame0 = static_cast(frames->at(0)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CONTEXT, frame0->trust); + ASSERT_EQ(StackFrameX86::CONTEXT_VALID_ALL, frame0->context_validity); + EXPECT_EQ("enchiridion", frame0->function_name); + EXPECT_EQ(0x40004000U, frame0->function_base); + ASSERT_TRUE(frame0->windows_frame_info != NULL); + ASSERT_EQ(WindowsFrameInfo::VALID_PARAMETER_SIZE, + frame0->windows_frame_info->valid); + ASSERT_TRUE(frame0->cfi_frame_info != NULL); + } + + { // To avoid reusing locals by mistake + StackFrameX86 *frame1 = static_cast(frames->at(1)); + EXPECT_EQ(StackFrame::FRAME_TRUST_CFI, frame1->trust); + ASSERT_EQ((StackFrameX86::CONTEXT_VALID_EIP | + StackFrameX86::CONTEXT_VALID_ESP | + StackFrameX86::CONTEXT_VALID_EBP | + StackFrameX86::CONTEXT_VALID_EBX | + StackFrameX86::CONTEXT_VALID_ESI | + StackFrameX86::CONTEXT_VALID_EDI), + frame1->context_validity); + EXPECT_EQ(expected.eip, frame1->context.eip); + EXPECT_EQ(expected.esp, frame1->context.esp); + EXPECT_EQ(expected.ebp, frame1->context.ebp); + EXPECT_EQ(expected.ebx, frame1->context.ebx); + EXPECT_EQ(expected.esi, frame1->context.esi); + EXPECT_EQ(expected.edi, frame1->context.edi); + EXPECT_EQ("epictetus", frame1->function_name); + } + } + + // The values the stack walker should find for the caller's registers. + MDRawContextX86 expected; +}; + +class CFI: public CFIFixture, public Test { }; + +TEST_F(CFI, At4000) { + Label frame1_esp = expected.esp; + stack_section + .D32(0x40005510) // return address + .Mark(&frame1_esp); // This effectively sets stack_section.start(). + raw_context.eip = 0x40004000; + CheckWalk(); +} + +TEST_F(CFI, At4001) { + Label frame1_esp = expected.esp; + stack_section + .D32(0x60f20ce6) // saved %ebx + .D32(0x40005510) // return address + .Mark(&frame1_esp); // This effectively sets stack_section.start(). + raw_context.eip = 0x40004001; + raw_context.ebx = 0x91aa9a8b; // callee's %ebx value + CheckWalk(); +} + +TEST_F(CFI, At4002) { + Label frame1_esp = expected.esp; + stack_section + .D32(0x60f20ce6) // saved %ebx + .D32(0x40005510) // return address + .Mark(&frame1_esp); // This effectively sets stack_section.start(). + raw_context.eip = 0x40004002; + raw_context.ebx = 0x53d1379d; // saved %esi + raw_context.esi = 0xa5c790ed; // callee's %esi value + CheckWalk(); +} + +TEST_F(CFI, At4003) { + Label frame1_esp = expected.esp; + stack_section + .D32(0x56ec3db7) // garbage + .D32(0xafbae234) // saved %edi + .D32(0x53d67131) // garbage + .D32(0x60f20ce6) // saved %ebx + .D32(0x40005510) // return address + .Mark(&frame1_esp); // This effectively sets stack_section.start(). + raw_context.eip = 0x40004003; + raw_context.ebx = 0x53d1379d; // saved %esi + raw_context.esi = 0xa97f229d; // callee's %esi + raw_context.edi = 0xb05cc997; // callee's %edi + CheckWalk(); +} + +// The results here should be the same as those at module offset +// 0x4003. +TEST_F(CFI, At4004) { + Label frame1_esp = expected.esp; + stack_section + .D32(0xe29782c2) // garbage + .D32(0xafbae234) // saved %edi + .D32(0x5ba29ce9) // garbage + .D32(0x60f20ce6) // saved %ebx + .D32(0x40005510) // return address + .Mark(&frame1_esp); // This effectively sets stack_section.start(). + raw_context.eip = 0x40004004; + raw_context.ebx = 0x53d1379d; // saved %esi + raw_context.esi = 0x0fb7dc4e; // callee's %esi + raw_context.edi = 0x993b4280; // callee's %edi + CheckWalk(); +} + +TEST_F(CFI, At4005) { + Label frame1_esp = expected.esp; + stack_section + .D32(0xe29782c2) // garbage + .D32(0xafbae234) // saved %edi + .D32(0x5ba29ce9) // garbage + .D32(0x60f20ce6) // saved %ebx + .D32(0x8036cc02) // garbage + .Mark(&frame1_esp); // This effectively sets stack_section.start(). + raw_context.eip = 0x40004005; + raw_context.ebx = 0x53d1379d; // saved %esi + raw_context.esi = 0x0fb7dc4e; // callee's %esi + raw_context.edi = 0x40005510; // return address + CheckWalk(); +} + +TEST_F(CFI, At4006) { + Label frame0_ebp; + Label frame1_esp = expected.esp; + stack_section + .D32(0xdcdd25cd) // garbage + .D32(0xafbae234) // saved %edi + .D32(0xc0d4aab9) // saved %ebp + .Mark(&frame0_ebp) // frame pointer points here + .D32(0x60f20ce6) // saved %ebx + .D32(0x8036cc02) // garbage + .Mark(&frame1_esp); // This effectively sets stack_section.start(). + raw_context.eip = 0x40004006; + raw_context.ebp = frame0_ebp.Value(); + raw_context.ebx = 0x53d1379d; // saved %esi + raw_context.esi = 0x743833c9; // callee's %esi + raw_context.edi = 0x40005510; // return address + CheckWalk(); +} + diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/static_address_map-inl.h b/TMessagesProj/jni/third_party/breakpad/src/processor/static_address_map-inl.h new file mode 100644 index 0000000000..67e07976e0 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/static_address_map-inl.h @@ -0,0 +1,71 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// static_address_map-inl.h: StaticAddressMap implementation. +// +// See static_address_map.h for documentation. +// +// Author: Siyang Xie (lambxsy@google.com) + +#ifndef PROCESSOR_STATIC_ADDRESS_MAP_INL_H__ +#define PROCESSOR_STATIC_ADDRESS_MAP_INL_H__ + +#include "processor/static_address_map.h" + +#include "processor/logging.h" + +namespace google_breakpad { + +template +bool StaticAddressMap::Retrieve( + const AddressType &address, + const EntryType *&entry, AddressType *entry_address) const { + + // upper_bound gives the first element whose key is greater than address, + // but we want the first element whose key is less than or equal to address. + // Decrement the iterator to get there, but not if the upper_bound already + // points to the beginning of the map - in that case, address is lower than + // the lowest stored key, so return false. + + MapConstIterator iterator = map_.upper_bound(address); + if (iterator == map_.begin()) + return false; + --iterator; + + entry = iterator.GetValuePtr(); + // Make sure AddressType is a copyable basic type + if (entry_address) + *entry_address = iterator.GetKey(); + + return true; +} + +} // namespace google_breakpad + +#endif // PROCESSOR_STATIC_ADDRESS_MAP_INL_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/static_address_map.h b/TMessagesProj/jni/third_party/breakpad/src/processor/static_address_map.h new file mode 100644 index 0000000000..6bafc66750 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/static_address_map.h @@ -0,0 +1,78 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// static_address_map.h: StaticAddressMap. +// +// StaticAddressMap is a wrapper class of StaticMap, just as AddressMap wraps +// std::map. StaticAddressMap provides read-only Retrieve() operation, similar +// as AddressMap. However, the difference between StaticAddressMap and +// AddressMap is that StaticAddressMap does not support dynamic operation +// Store() due to the static nature of the underlying StaticMap. +// +// See address_map.h for reference. +// +// Author: Siyang Xie (lambxsy@google.com) + +#ifndef PROCESSOR_STATIC_ADDRESS_MAP_H__ +#define PROCESSOR_STATIC_ADDRESS_MAP_H__ + +#include "processor/static_map-inl.h" + +namespace google_breakpad { + +// AddressType MUST be a basic type, e.g.: integer types etc +// EntryType could be a complex type, so we retrieve its pointer instead. +template +class StaticAddressMap { + public: + StaticAddressMap(): map_() { } + explicit StaticAddressMap(const char *map_data): map_(map_data) { } + + // Locates the entry stored at the highest address less than or equal to + // the address argument. If there is no such range, returns false. The + // entry is returned in entry, which is a required argument. If + // entry_address is not NULL, it will be set to the address that the entry + // was stored at. + bool Retrieve(const AddressType &address, + const EntryType *&entry, AddressType *entry_address) const; + + private: + friend class ModuleComparer; + // Convenience types. + typedef StaticAddressMap* SelfPtr; + typedef StaticMap AddressToEntryMap; + typedef typename AddressToEntryMap::const_iterator MapConstIterator; + + AddressToEntryMap map_; +}; + +} // namespace google_breakpad + +#endif // PROCESSOR_STATIC_ADDRESS_MAP_H__ + diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/static_address_map_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/static_address_map_unittest.cc new file mode 100644 index 0000000000..12c735cff0 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/static_address_map_unittest.cc @@ -0,0 +1,236 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// static_address_map_unittest.cc: Unit tests for StaticAddressMap. +// +// Author: Siyang Xie (lambxsy@google.com) + +#include +#include +#include +#include +#include +#include + +#include "breakpad_googletest_includes.h" +#include "common/using_std_string.h" +#include "processor/address_map-inl.h" +#include "processor/static_address_map-inl.h" +#include "processor/simple_serializer-inl.h" +#include "map_serializers-inl.h" + +typedef google_breakpad::StaticAddressMap TestMap; +typedef google_breakpad::AddressMap AddrMap; + +class TestStaticAddressMap : public ::testing::Test { + protected: + void SetUp() { + for (int testcase = 0; testcase < kNumberTestCases; ++testcase) { + testdata[testcase] = new int[testsize[testcase]]; + } + + // Test data set0: NULL (empty map) + + // Test data set1: single element. + testdata[1][0] = 10; + + // Test data set2: six elements. + const int tempdata[] = {5, 10, 14, 15, 16, 20}; + for (int i = 0; i < testsize[2]; ++i) + testdata[2][i] = tempdata[i]; + + // Test data set3: + srand(time(NULL)); + for (int i = 0; i < testsize[3]; ++i) + testdata[3][i] = rand(); + + // Setup maps. + std::stringstream sstream; + for (int testcase = 0; testcase < kNumberTestCases; ++testcase) { + for (int data_item = 0; data_item < testsize[testcase]; ++data_item) { + sstream.clear(); + sstream << "test " << testdata[testcase][data_item]; + addr_map[testcase].Store(testdata[testcase][data_item], sstream.str()); + } + map_data[testcase] = serializer.Serialize(addr_map[testcase], NULL); + test_map[testcase] = TestMap(map_data[testcase]); + } + } + + void TearDown() { + for (int i = 0; i < kNumberTestCases; ++i) { + delete [] map_data[i]; + delete [] testdata[i]; + } + } + + void CompareRetrieveResult(int testcase, int target) { + int address; + int address_test; + string entry; + string entry_test; + const char *entry_cstring = NULL; + bool found; + bool found_test; + + found = addr_map[testcase].Retrieve(target, &entry, &address); + found_test = + test_map[testcase].Retrieve(target, entry_cstring, &address_test); + + ASSERT_EQ(found, found_test); + + if (found && found_test) { + ASSERT_EQ(address, address_test); + entry_test = entry_cstring; + ASSERT_EQ(entry, entry_test); + } + } + + void RetrieveTester(int testcase) { + int target; + target = INT_MIN; + CompareRetrieveResult(testcase, target); + target = INT_MAX; + CompareRetrieveResult(testcase, target); + + srand(time(0)); + for (int data_item = 0; data_item < testsize[testcase]; ++data_item) { + // Retrive (aka, search) for target address and compare results from + // AddressMap and StaticAddressMap. + + // First, assign the search target to be one of original testdata that is + // known to exist in the map. + target = testdata[testcase][data_item]; + CompareRetrieveResult(testcase, target); + // Then, add +2 / -1 bias to target value, in order to test searching for + // a target address not stored in the map. + target -= 1; + CompareRetrieveResult(testcase, target); + target += 3; + CompareRetrieveResult(testcase, target); + // Repeatedly test searching for random target addresses. + target = rand(); + CompareRetrieveResult(testcase, target); + } + } + + // Test data sets: + static const int kNumberTestCases = 4; + static const int testsize[]; + int *testdata[kNumberTestCases]; + + AddrMap addr_map[kNumberTestCases]; + TestMap test_map[kNumberTestCases]; + char *map_data[kNumberTestCases]; + google_breakpad::AddressMapSerializer serializer; +}; + +const int TestStaticAddressMap::testsize[] = {0, 1, 6, 1000}; + +TEST_F(TestStaticAddressMap, TestEmptyMap) { + int testcase = 0; + int target; + target = INT_MIN; + CompareRetrieveResult(testcase, target); + target = INT_MAX; + CompareRetrieveResult(testcase, target); + for (int data_item = 0; data_item < testsize[testcase]; ++data_item) { + target = testdata[testcase][data_item]; + CompareRetrieveResult(testcase, target); + target -= 1; + CompareRetrieveResult(testcase, target); + target += 3; + CompareRetrieveResult(testcase, target); + target = rand(); + CompareRetrieveResult(testcase, target); + } +} + +TEST_F(TestStaticAddressMap, TestOneElementMap) { + int testcase = 1; + int target; + target = INT_MIN; + CompareRetrieveResult(testcase, target); + target = INT_MAX; + CompareRetrieveResult(testcase, target); + for (int data_item = 0; data_item < testsize[testcase]; ++data_item) { + target = testdata[testcase][data_item]; + CompareRetrieveResult(testcase, target); + target -= 1; + CompareRetrieveResult(testcase, target); + target += 3; + CompareRetrieveResult(testcase, target); + target = rand(); + CompareRetrieveResult(testcase, target); + } +} + +TEST_F(TestStaticAddressMap, TestSixElementsMap) { + int testcase = 2; + int target; + target = INT_MIN; + CompareRetrieveResult(testcase, target); + target = INT_MAX; + CompareRetrieveResult(testcase, target); + for (int data_item = 0; data_item < testsize[testcase]; ++data_item) { + target = testdata[testcase][data_item]; + CompareRetrieveResult(testcase, target); + target -= 1; + CompareRetrieveResult(testcase, target); + target += 3; + CompareRetrieveResult(testcase, target); + target = rand(); + CompareRetrieveResult(testcase, target); + } +} + +TEST_F(TestStaticAddressMap, Test1000RandomElementsMap) { + int testcase = 3; + int target; + target = INT_MIN; + CompareRetrieveResult(testcase, target); + target = INT_MAX; + CompareRetrieveResult(testcase, target); + for (int data_item = 0; data_item < testsize[testcase]; ++data_item) { + target = testdata[testcase][data_item]; + CompareRetrieveResult(testcase, target); + target -= 1; + CompareRetrieveResult(testcase, target); + target += 3; + CompareRetrieveResult(testcase, target); + target = rand(); + CompareRetrieveResult(testcase, target); + } +} + +int main(int argc, char *argv[]) { + ::testing::InitGoogleTest(&argc, argv); + + return RUN_ALL_TESTS(); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/static_contained_range_map-inl.h b/TMessagesProj/jni/third_party/breakpad/src/processor/static_contained_range_map-inl.h new file mode 100644 index 0000000000..777c762184 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/static_contained_range_map-inl.h @@ -0,0 +1,92 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// static_contained_range_map-inl.h: Hierarchically-organized range map, +// i.e., StaticContainedRangeMap implementation. +// +// See static_contained_range_map.h for documentation. +// +// Author: Siyang Xie (lambxsy@google.com) + +#ifndef PROCESSOR_STATIC_CONTAINED_RANGE_MAP_INL_H__ +#define PROCESSOR_STATIC_CONTAINED_RANGE_MAP_INL_H__ + +#include "processor/static_contained_range_map.h" +#include "processor/logging.h" + +namespace google_breakpad { + +template +StaticContainedRangeMap::StaticContainedRangeMap( + const char *base) + : base_(*(reinterpret_cast(base))), + entry_size_(*(reinterpret_cast(base + sizeof(base_)))), + entry_ptr_(reinterpret_cast( + base + sizeof(base_) + sizeof(entry_size_))), + map_(base + sizeof(base_) + sizeof(entry_size_) + entry_size_) { + if (entry_size_ == 0) + entry_ptr_ = NULL; +} + + +template +bool StaticContainedRangeMap::RetrieveRange( + const AddressType &address, const EntryType *&entry) const { + + // Get an iterator to the child range whose high address is equal to or + // greater than the supplied address. If the supplied address is higher + // than all of the high addresses in the range, then this range does not + // contain a child at address, so return false. If the supplied address + // is lower than the base address of the child range, then it is not within + // the child range, so return false. + MapConstIterator iterator = map_.lower_bound(address); + + if (iterator == map_.end()) + return false; + + const char *memory_child = + reinterpret_cast(iterator.GetValuePtr()); + + StaticContainedRangeMap child_map(memory_child); + + if (address < child_map.base_) + return false; + + // The child in iterator->second contains the specified address. Find out + // if it has a more-specific descendant that also contains it. If it does, + // it will set |entry| appropriately. If not, set |entry| to the child. + if (!child_map.RetrieveRange(address, entry)) + entry = child_map.entry_ptr_; + + return true; +} + +} // namespace google_breakpad + +#endif // PROCESSOR_STATIC_CONTAINED_RANGE_MAP_INL_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/static_contained_range_map.h b/TMessagesProj/jni/third_party/breakpad/src/processor/static_contained_range_map.h new file mode 100644 index 0000000000..6a9b8b7b6d --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/static_contained_range_map.h @@ -0,0 +1,96 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// static_contained_range_map.h: StaticContainedRangeMap. +// +// StaticContainedRangeMap is similar to ContainedRangeMap. However, +// StaticContainedRangeMap wraps a StaticMap instead of std::map, and does not +// support dynamic operations like StoreRange(...). +// StaticContainedRangeMap provides same RetrieveRange(...) interfaces as +// ContainedRangeMap. +// +// Please see contained_range_map.h for more documentation. +// +// Author: Siyang Xie (lambxsy@google.com) + +#ifndef PROCESSOR_STATIC_CONTAINED_RANGE_MAP_H__ +#define PROCESSOR_STATIC_CONTAINED_RANGE_MAP_H__ + +#include "processor/static_map-inl.h" + +namespace google_breakpad { + +template +class StaticContainedRangeMap { + public: + StaticContainedRangeMap(): base_(), entry_size_(), entry_ptr_(), map_() { } + explicit StaticContainedRangeMap(const char *base); + + // Retrieves the most specific (smallest) descendant range encompassing + // the specified address. This method will only return entries held by + // child ranges, and not the entry contained by |this|. This is necessary + // to support a sparsely-populated root range. If no descendant range + // encompasses the address, returns false. + bool RetrieveRange(const AddressType &address, const EntryType *&entry) const; + + private: + friend class ModuleComparer; + // AddressToRangeMap stores pointers. This makes reparenting simpler in + // StoreRange, because it doesn't need to copy entire objects. + typedef StaticContainedRangeMap* SelfPtr; + typedef + StaticMap AddressToRangeMap; + typedef typename AddressToRangeMap::const_iterator MapConstIterator; + + // The base address of this range. The high address does not need to + // be stored, because it is used as the key to an object in its parent's + // map, and all ContainedRangeMaps except for the root range are contained + // within maps. The root range does not actually contain an entry, so its + // base_ field is meaningless, and the fact that it has no parent and thus + // no key is unimportant. For this reason, the base_ field should only be + // is accessed on child ContainedRangeMap objects, and never on |this|. + AddressType base_; + + // The entry corresponding to this range. The root range does not + // actually contain an entry, so its entry_ field is meaningless. For + // this reason, the entry_ field should only be accessed on child + // ContainedRangeMap objects, and never on |this|. + uint32_t entry_size_; + const EntryType *entry_ptr_; + + // The map containing child ranges, keyed by each child range's high + // address. This is a pointer to avoid allocating map structures for + // leaf nodes, where they are not needed. + AddressToRangeMap map_; +}; + +} // namespace google_breakpad + + +#endif // PROCESSOR_STATIC_CONTAINED_RANGE_MAP_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/static_contained_range_map_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/static_contained_range_map_unittest.cc new file mode 100644 index 0000000000..4ee47578e2 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/static_contained_range_map_unittest.cc @@ -0,0 +1,320 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// static_contained_range_map_unittest.cc: Unit tests for +// StaticContainedRangeMap. +// +// Author: Siyang Xie (lambxsy@google.com) + +#include "breakpad_googletest_includes.h" +#include "common/scoped_ptr.h" +#include "processor/contained_range_map-inl.h" +#include "processor/static_contained_range_map-inl.h" +#include "processor/simple_serializer-inl.h" +#include "processor/map_serializers-inl.h" +#include "processor/logging.h" + +namespace { + +typedef google_breakpad::ContainedRangeMap CRMMap; +typedef google_breakpad::StaticContainedRangeMap TestMap; + +// Each element in test_data contains the expected result when calling +// RetrieveRange on an address. +const int test_data[] = { + 0, // 0 + 0, // 1 + 0, // 2 + 0, // 3 + 0, // 4 + 0, // 5 + 0, // 6 + 0, // 7 + 9, // 8 + 7, // 9 + 1, // 10 + 5, // 11 + 6, // 12 + 6, // 13 + 6, // 14 + 6, // 15 + 6, // 16 + 6, // 17 + 6, // 18 + 5, // 19 + 7, // 20 + 8, // 21 + 0, // 22 + 0, // 23 + 0, // 24 + 0, // 25 + 0, // 26 + 0, // 27 + 0, // 28 + 0, // 29 + 10, // 30 + 10, // 31 + 10, // 32 + 11, // 33 + 11, // 34 + 11, // 35 + 0, // 36 + 0, // 37 + 0, // 38 + 0, // 39 + 14, // 40 + 14, // 41 + 14, // 42 + 14, // 43 + 15, // 44 + 15, // 45 + 15, // 46 + 15, // 47 + 0, // 48 + 0, // 49 + 19, // 50 + 18, // 51 + 18, // 52 + 18, // 53 + 18, // 54 + 18, // 55 + 18, // 56 + 18, // 57 + 18, // 58 + 20, // 59 + 21, // 60 + 25, // 61 + 26, // 62 + 26, // 63 + 26, // 64 + 26, // 65 + 26, // 66 + 26, // 67 + 24, // 68 + 22, // 69 + 30, // 70 + 30, // 71 + 30, // 72 + 30, // 73 + 31, // 74 + 31, // 75 + 30, // 76 + 32, // 77 + 32, // 78 + 30, // 79 + 34, // 80 + 35, // 81 + 36, // 82 + 39, // 83 + 38, // 84 + 37, // 85 + 43, // 86 + 44, // 87 + 41, // 88 + 45, // 89 + 42, // 90 + 0, // 91 + 0, // 92 + 0, // 93 + 0, // 94 + 0, // 95 + 0, // 96 + 0, // 97 + 0, // 98 + 0 // 99 +}; + +} // namespace + +namespace google_breakpad { + +class TestStaticCRMMap : public ::testing::Test { + protected: + void SetUp(); + + // A referrence map for testing StaticCRMMap. + google_breakpad::ContainedRangeMap crm_map_; + + // Static version of crm_map using serialized data of crm_map. + // The goal of testing is to make sure TestMap provides same results for + // lookup operation(s) as CRMMap does. + google_breakpad::StaticContainedRangeMap test_map_; + + google_breakpad::ContainedRangeMapSerializer serializer_; + + scoped_array serialized_data_; +}; + +void TestStaticCRMMap::SetUp() { + // First, do the StoreRange tests. This validates the containment + // rules. + // We confirm the referrence map correctly stores data during setup. + ASSERT_TRUE (crm_map_.StoreRange(10, 10, 1)); + ASSERT_FALSE(crm_map_.StoreRange(10, 10, 2)); // exactly equal to 1 + ASSERT_FALSE(crm_map_.StoreRange(11, 10, 3)); // begins inside 1 and extends up + ASSERT_FALSE(crm_map_.StoreRange( 9, 10, 4)); // begins below 1 and ends inside + ASSERT_TRUE (crm_map_.StoreRange(11, 9, 5)); // contained by existing + ASSERT_TRUE (crm_map_.StoreRange(12, 7, 6)); + ASSERT_TRUE (crm_map_.StoreRange( 9, 12, 7)); // contains existing + ASSERT_TRUE (crm_map_.StoreRange( 9, 13, 8)); + ASSERT_TRUE (crm_map_.StoreRange( 8, 14, 9)); + ASSERT_TRUE (crm_map_.StoreRange(30, 3, 10)); + ASSERT_TRUE (crm_map_.StoreRange(33, 3, 11)); + ASSERT_TRUE (crm_map_.StoreRange(30, 6, 12)); // storable but totally masked + ASSERT_TRUE (crm_map_.StoreRange(40, 8, 13)); // will be totally masked + ASSERT_TRUE (crm_map_.StoreRange(40, 4, 14)); + ASSERT_TRUE (crm_map_.StoreRange(44, 4, 15)); + ASSERT_FALSE(crm_map_.StoreRange(32, 10, 16)); // begins in #10, ends in #14 + ASSERT_FALSE(crm_map_.StoreRange(50, 0, 17)); // zero length + ASSERT_TRUE (crm_map_.StoreRange(50, 10, 18)); + ASSERT_TRUE (crm_map_.StoreRange(50, 1, 19)); + ASSERT_TRUE (crm_map_.StoreRange(59, 1, 20)); + ASSERT_TRUE (crm_map_.StoreRange(60, 1, 21)); + ASSERT_TRUE (crm_map_.StoreRange(69, 1, 22)); + ASSERT_TRUE (crm_map_.StoreRange(60, 10, 23)); + ASSERT_TRUE (crm_map_.StoreRange(68, 1, 24)); + ASSERT_TRUE (crm_map_.StoreRange(61, 1, 25)); + ASSERT_TRUE (crm_map_.StoreRange(61, 8, 26)); + ASSERT_FALSE(crm_map_.StoreRange(59, 9, 27)); + ASSERT_FALSE(crm_map_.StoreRange(59, 10, 28)); + ASSERT_FALSE(crm_map_.StoreRange(59, 11, 29)); + ASSERT_TRUE (crm_map_.StoreRange(70, 10, 30)); + ASSERT_TRUE (crm_map_.StoreRange(74, 2, 31)); + ASSERT_TRUE (crm_map_.StoreRange(77, 2, 32)); + ASSERT_FALSE(crm_map_.StoreRange(72, 6, 33)); + ASSERT_TRUE (crm_map_.StoreRange(80, 3, 34)); + ASSERT_TRUE (crm_map_.StoreRange(81, 1, 35)); + ASSERT_TRUE (crm_map_.StoreRange(82, 1, 36)); + ASSERT_TRUE (crm_map_.StoreRange(83, 3, 37)); + ASSERT_TRUE (crm_map_.StoreRange(84, 1, 38)); + ASSERT_TRUE (crm_map_.StoreRange(83, 1, 39)); + ASSERT_TRUE (crm_map_.StoreRange(86, 5, 40)); + ASSERT_TRUE (crm_map_.StoreRange(88, 1, 41)); + ASSERT_TRUE (crm_map_.StoreRange(90, 1, 42)); + ASSERT_TRUE (crm_map_.StoreRange(86, 1, 43)); + ASSERT_TRUE (crm_map_.StoreRange(87, 1, 44)); + ASSERT_TRUE (crm_map_.StoreRange(89, 1, 45)); + ASSERT_TRUE (crm_map_.StoreRange(87, 4, 46)); + ASSERT_TRUE (crm_map_.StoreRange(87, 3, 47)); + ASSERT_FALSE(crm_map_.StoreRange(86, 2, 48)); + + // Serialize crm_map to generate serialized data. + unsigned int size; + serialized_data_.reset(serializer_.Serialize(&crm_map_, &size)); + BPLOG(INFO) << "Serialized data size: " << size << " Bytes."; + + // Construct test_map_ from serialized data. + test_map_ = TestMap(serialized_data_.get()); +} + +TEST_F(TestStaticCRMMap, TestEmptyMap) { + CRMMap empty_crm_map; + + unsigned int size; + scoped_array serialized_data; + serialized_data.reset(serializer_.Serialize(&empty_crm_map, &size)); + scoped_ptr test_map(new TestMap(serialized_data.get())); + + const unsigned int kCorrectSizeForEmptyMap = 16; + ASSERT_EQ(kCorrectSizeForEmptyMap, size); + + const int *entry_test; + ASSERT_FALSE(test_map->RetrieveRange(-1, entry_test)); + ASSERT_FALSE(test_map->RetrieveRange(0, entry_test)); + ASSERT_FALSE(test_map->RetrieveRange(10, entry_test)); +} + +TEST_F(TestStaticCRMMap, TestSingleElementMap) { + CRMMap crm_map; + // Test on one element: + int entry = 1; + crm_map.StoreRange(10, 10, entry); + + unsigned int size; + scoped_array serialized_data; + serialized_data.reset(serializer_.Serialize(&crm_map, &size)); + scoped_ptr test_map(new TestMap(serialized_data.get())); + + const unsigned int kCorrectSizeForSingleElementMap = 40; + ASSERT_EQ(kCorrectSizeForSingleElementMap, size); + + const int *entry_test; + ASSERT_FALSE(test_map->RetrieveRange(-1, entry_test)); + ASSERT_FALSE(test_map->RetrieveRange(0, entry_test)); + ASSERT_TRUE(test_map->RetrieveRange(10, entry_test)); + ASSERT_EQ(*entry_test, entry); + ASSERT_TRUE(test_map->RetrieveRange(13, entry_test)); + ASSERT_EQ(*entry_test, entry); +} + +TEST_F(TestStaticCRMMap, RunTestData) { + unsigned int test_high = sizeof(test_data) / sizeof(test_data[0]); + + // Now, do the RetrieveRange tests. This further validates that the + // objects were stored properly and that retrieval returns the correct + // object. + // If GENERATE_TEST_DATA is defined, instead of the retrieval tests, a + // new test_data array will be printed. Exercise caution when doing this. + // Be sure to verify the results manually! +#ifdef GENERATE_TEST_DATA + printf(" const int test_data[] = {\n"); +#endif // GENERATE_TEST_DATA + + for (unsigned int address = 0; address < test_high; ++address) { + const int *entryptr; + int value = 0; + if (test_map_.RetrieveRange(address, entryptr)) + value = *entryptr; + +#ifndef GENERATE_TEST_DATA + // Don't use ASSERT inside the loop because it won't show the failed + // |address|, and the line number will always be the same. That makes + // it difficult to figure out which test failed. + EXPECT_EQ(value, test_data[address]) << "FAIL: retrieve address " + << address; +#else // !GENERATE_TEST_DATA + printf(" %d%c%s // %d\n", value, + address == test_high - 1 ? ' ' : ',', + value < 10 ? " " : "", + address); +#endif // !GENERATE_TEST_DATA + } + +#ifdef GENERATE_TEST_DATA + printf(" };\n"); +#endif // GENERATE_TEST_DATA +} + +} // namespace google_breakpad + +int main(int argc, char *argv[]) { + ::testing::InitGoogleTest(&argc, argv); + + return RUN_ALL_TESTS(); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/static_map-inl.h b/TMessagesProj/jni/third_party/breakpad/src/processor/static_map-inl.h new file mode 100644 index 0000000000..e6aac6aba4 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/static_map-inl.h @@ -0,0 +1,176 @@ +// Copyright 2010 Google Inc. All Rights Reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// static_map-inl.h: StaticMap implementation. +// +// See static_map.h for documentation. +// +// Author: Siyang Xie (lambxsy@google.com) + + +#ifndef PROCESSOR_STATIC_MAP_INL_H__ +#define PROCESSOR_STATIC_MAP_INL_H__ + +#include "processor/static_map.h" +#include "processor/static_map_iterator-inl.h" +#include "processor/logging.h" + +namespace google_breakpad { + +template +StaticMap::StaticMap(const char* raw_data) + : raw_data_(raw_data), + compare_() { + // First 4 Bytes store the number of nodes. + num_nodes_ = *(reinterpret_cast(raw_data_)); + + offsets_ = reinterpret_cast( + raw_data_ + sizeof(num_nodes_)); + + keys_ = reinterpret_cast( + raw_data_ + (1 + num_nodes_) * sizeof(uint32_t)); +} + +// find(), lower_bound() and upper_bound() implement binary search algorithm. +template +StaticMapIterator +StaticMap::find(const Key &key) const { + int begin = 0; + int end = num_nodes_; + int middle; + int compare_result; + while (begin < end) { + middle = begin + (end - begin) / 2; + compare_result = compare_(key, GetKeyAtIndex(middle)); + if (compare_result == 0) + return IteratorAtIndex(middle); + if (compare_result < 0) { + end = middle; + } else { + begin = middle + 1; + } + } + return this->end(); +} + +template +StaticMapIterator +StaticMap::lower_bound(const Key &key) const { + int begin = 0; + int end = num_nodes_; + int middle; + int comp_result; + while (begin < end) { + middle = begin + (end - begin) / 2; + comp_result = compare_(key, GetKeyAtIndex(middle)); + if (comp_result == 0) + return IteratorAtIndex(middle); + if (comp_result < 0) { + end = middle; + } else { + begin = middle + 1; + } + } + return IteratorAtIndex(begin); +} + +template +StaticMapIterator +StaticMap::upper_bound(const Key &key) const { + int begin = 0; + int end = num_nodes_; + int middle; + int compare_result; + while (begin < end) { + middle = begin + (end - begin) / 2; + compare_result = compare_(key, GetKeyAtIndex(middle)); + if (compare_result == 0) + return IteratorAtIndex(middle + 1); + if (compare_result < 0) { + end = middle; + } else { + begin = middle + 1; + } + } + return IteratorAtIndex(begin); +} + +template +bool StaticMap::ValidateInMemoryStructure() const { + // check the number of nodes is non-negative: + if (!raw_data_) return false; + int32_t num_nodes = *(reinterpret_cast(raw_data_)); + if (num_nodes < 0) { + BPLOG(INFO) << "StaticMap check failed: negative number of nodes"; + return false; + } + + int node_index = 0; + if (num_nodes_) { + uint64_t first_offset = sizeof(int32_t) * (num_nodes_ + 1) + + sizeof(Key) * num_nodes_; + // Num_nodes_ is too large. + if (first_offset > 0xffffffffUL) { + BPLOG(INFO) << "StaticMap check failed: size exceeds limit"; + return false; + } + if (offsets_[node_index] != static_cast(first_offset)) { + BPLOG(INFO) << "StaticMap check failed: first node offset is incorrect"; + return false; + } + } + + for (node_index = 1; node_index < num_nodes_; ++node_index) { + // Check offsets[i] is strictly increasing: + if (offsets_[node_index] <= offsets_[node_index - 1]) { + BPLOG(INFO) << "StaticMap check failed: node offsets non-increasing"; + return false; + } + // Check Key[i] is strictly increasing as no duplicate keys are allowed. + if (compare_(GetKeyAtIndex(node_index), + GetKeyAtIndex(node_index - 1)) <= 0) { + BPLOG(INFO) << "StaticMap check failed: node keys non-increasing"; + return false; + } + } + return true; +} + +template +const Key StaticMap::GetKeyAtIndex(int index) const { + if (index < 0 || index >= num_nodes_) { + BPLOG(ERROR) << "Key index out of range error"; + // Key type is required to be primitive type. Return 0 if index is invalid. + return 0; + } + return keys_[index]; +} + +} // namespace google_breakpad + +#endif // PROCESSOR_STATIC_MAP_INL_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/static_map.h b/TMessagesProj/jni/third_party/breakpad/src/processor/static_map.h new file mode 100644 index 0000000000..9723ab2a84 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/static_map.h @@ -0,0 +1,144 @@ +// Copyright 2010 Google Inc. All Rights Reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// static_map.h: StaticMap. +// +// StaticMap provides lookup interfaces and iterators similar as stl::map's. +// These lookup operations are purely Read-Only, thus memory +// allocation & deallocation is mostly avoided (intentionally). +// +// The chunk of memory should contain data with pre-defined pattern: +// **************** header *************** +// uint32 (4 bytes): number of nodes +// uint32 (4 bytes): address offset of node1's mapped_value +// uint32 (4 bytes): address offset of node2's mapped_value +// ... +// uint32 (4 bytes): address offset of nodeN's mapped_value +// +// ************* Key array ************ +// (X bytes): node1's key +// (X bytes): node2's key +// ... +// (X bytes): nodeN's key +// +// ************* Value array ********** +// (? bytes): node1's mapped_value +// (? bytes): node2's mapped_value +// ... +// (? bytes): nodeN's mapped_value +// +// REQUIREMENT: Key type MUST be primitive type or pointers so that: +// X = sizeof(typename Key); +// +// Note: since address offset is stored as uint32, user should keep in mind that +// StaticMap only supports up to 4GB size of memory data. + +// Author: Siyang Xie (lambxsy@google.com) + + +#ifndef PROCESSOR_STATIC_MAP_H__ +#define PROCESSOR_STATIC_MAP_H__ + +#include "processor/static_map_iterator-inl.h" + +namespace google_breakpad { + +// Default functor to compare keys. +template +class DefaultCompare { + public: + int operator()(const Key &k1, const Key &k2) const { + if (k1 < k2) return -1; + if (k1 == k2) return 0; + return 1; + } +}; + +template > +class StaticMap { + public: + typedef StaticMapIterator iterator; + typedef StaticMapIterator const_iterator; + + StaticMap() : raw_data_(0), + num_nodes_(0), + offsets_(0), + compare_() { } + + explicit StaticMap(const char* raw_data); + + inline bool empty() const { return num_nodes_ == 0; } + inline unsigned int size() const { return num_nodes_; } + + // Return iterators. + inline iterator begin() const { return IteratorAtIndex(0); } + inline iterator last() const { return IteratorAtIndex(num_nodes_ - 1); } + inline iterator end() const { return IteratorAtIndex(num_nodes_); } + inline iterator IteratorAtIndex(int index) const { + return iterator(raw_data_, index); + } + + // Lookup operations. + iterator find(const Key &k) const; + + // lower_bound(k) searches in a sorted range for the first element that has a + // key not less than the argument k. + iterator lower_bound(const Key &k) const; + + // upper_bound(k) searches in a sorted range for the first element that has a + // key greater than the argument k. + iterator upper_bound(const Key &k) const; + + // Checks if the underlying memory data conforms to the predefined pattern: + // first check the number of nodes is non-negative, + // then check both offsets and keys are strictly increasing (sorted). + bool ValidateInMemoryStructure() const; + + private: + const Key GetKeyAtIndex(int i) const; + + // Start address of a raw memory chunk with serialized data. + const char* raw_data_; + + // Number of nodes in the static map. + int32_t num_nodes_; + + // Array of offset addresses for stored values. + // For example: + // address_of_i-th_node_value = raw_data_ + offsets_[i] + const uint32_t* offsets_; + + // keys_[i] = key of i_th node + const Key* keys_; + + Compare compare_; +}; + +} // namespace google_breakpad + +#endif // PROCESSOR_STATIC_MAP_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/static_map_iterator-inl.h b/TMessagesProj/jni/third_party/breakpad/src/processor/static_map_iterator-inl.h new file mode 100644 index 0000000000..7a7db5ad93 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/static_map_iterator-inl.h @@ -0,0 +1,147 @@ +// Copyright 2010 Google Inc. All Rights Reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// static_map_iterator-inl.h: StaticMapIterator implementation. +// +// See static_map_iterator.h for documentation. +// +// Author: Siyang Xie (lambxsy@google.com) + +#ifndef PROCESSOR_STATIC_MAP_ITERATOR_INL_H__ +#define PROCESSOR_STATIC_MAP_ITERATOR_INL_H__ + +#include "processor/static_map_iterator.h" + +#include "processor/logging.h" + +namespace google_breakpad { + +template +StaticMapIterator::StaticMapIterator(const char* base, + const int &index): + index_(index), base_(base) { + // See static_map.h for documentation on + // bytes format of serialized StaticMap data. + num_nodes_ = *(reinterpret_cast(base_)); + offsets_ = reinterpret_cast(base_ + sizeof(num_nodes_)); + keys_ = reinterpret_cast( + base_ + (1 + num_nodes_) * sizeof(num_nodes_)); +} + +// Increment & Decrement operators: +template +StaticMapIterator& +StaticMapIterator::operator++() { + if (!IsValid()) { + BPLOG(ERROR) << "operator++ on invalid iterator"; + return *this; + } + if (++index_ > num_nodes_) index_ = num_nodes_; + return *this; +} + +template +StaticMapIterator +StaticMapIterator::operator++(int postfix_operator) { + if (!IsValid()) { + BPLOG(ERROR) << "operator++ on invalid iterator"; + return *this; + } + StaticMapIterator tmp = *this; + if (++index_ > num_nodes_) index_ = num_nodes_; + return tmp; +} + +template +StaticMapIterator& +StaticMapIterator::operator--() { + if (!IsValid()) { + BPLOG(ERROR) << "operator++ on invalid iterator"; + return *this; + } + + if (--index_ < 0) index_ = 0; + return *this; +} + +template +StaticMapIterator +StaticMapIterator::operator--(int postfix_operator) { + if (!IsValid()) { + BPLOG(ERROR) << "operator++ on invalid iterator"; + return *this; + } + StaticMapIterator tmp = *this; + + if (--index_ < 0) index_ = 0; + return tmp; +} + +template +const Key* StaticMapIterator::GetKeyPtr() const { + if (!IsValid()) { + BPLOG(ERROR) << "call GetKeyPtr() on invalid iterator"; + return NULL; + } + return &(keys_[index_]); +} + +template +const char* StaticMapIterator::GetValueRawPtr() const { + if (!IsValid()) { + BPLOG(ERROR) << "call GetValuePtr() on invalid iterator"; + return NULL; + } + return base_ + offsets_[index_]; +} + +template +bool StaticMapIterator::operator==( + const StaticMapIterator& x) const { + return base_ == x.base_ && index_ == x.index_; +} + +template +bool StaticMapIterator::operator!=( + const StaticMapIterator& x) const { + // Only need to compare base_ and index_. + // Other data members are auxiliary. + return base_ != x.base_ || index_ != x.index_; +} + +template +bool StaticMapIterator::IsValid() const { + if (!base_ || index_ < 0 || index_ > num_nodes_) + return false; + + return true; +} + +} // namespace google_breakpad + +#endif // PROCESSOR_STATIC_MAP_ITERATOR_INL_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/static_map_iterator.h b/TMessagesProj/jni/third_party/breakpad/src/processor/static_map_iterator.h new file mode 100644 index 0000000000..1af8fff454 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/static_map_iterator.h @@ -0,0 +1,112 @@ +// Copyright 2010 Google Inc. All Rights Reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// static_map_iterator.h: StaticMapIterator template class declaration. +// +// StaticMapIterator provides increment and decrement operators to iterate +// through a StaticMap map. It does not provide *, -> operators, user should +// use GetKeyPtr(), GetKey(), GetValuePtr() interfaces to retrieve data or +// pointer to data. StaticMapIterator is essentially a const_iterator. +// +// Author: Siyang Xie (lambxsy@google.com) + + +#ifndef PROCESSOR_STATIC_MAP_ITERATOR_H__ +#define PROCESSOR_STATIC_MAP_ITERATOR_H__ + +#include "google_breakpad/common/breakpad_types.h" + +namespace google_breakpad { + +// Forward declaration. +template class StaticMap; + +// StaticMapIterator does not support operator*() or operator->(), +// User should use GetKey(), GetKeyPtr(), GetValuePtr() instead; +template +class StaticMapIterator { + public: + // Constructors. + StaticMapIterator(): index_(-1), base_(NULL) { } + + // Increment & Decrement operators: + StaticMapIterator& operator++(); + StaticMapIterator operator++(int post_fix_operator); + + StaticMapIterator& operator--(); + StaticMapIterator operator--(int post_fix_operator); + + // Interface for retrieving data / pointer to data. + const Key* GetKeyPtr() const; + + // Run time error will occur if GetKey() is called on an invalid iterator. + inline const Key GetKey() const { return *GetKeyPtr(); } + + // return a raw memory pointer that points to the start address of value. + const char* GetValueRawPtr() const; + + // return a reinterpret-casted pointer to the value. + inline const Value* GetValuePtr() const { + return reinterpret_cast(GetValueRawPtr()); + } + + bool operator==(const StaticMapIterator& x) const; + bool operator!=(const StaticMapIterator& x) const; + + // Check if this iterator is valid. + // If iterator is invalid, user is forbidden to use ++/-- operator + // or interfaces for retrieving data / pointer to data. + bool IsValid() const; + + private: + friend class StaticMap; + + // Only StaticMap can call this constructor. + explicit StaticMapIterator(const char* base, const int32_t &index); + + // Index of node that the iterator is pointing to. + int32_t index_; + + // Beginning address of the serialized map data. + const char* base_; + + // Number of nodes in the map. Use it to identify end() iterator. + int32_t num_nodes_; + + // offsets_ is an array of offset addresses of mapped values. + // For example: + // address_of_i-th_node_value = base_ + offsets_[i] + const uint32_t* offsets_; + + // keys_[i] = key of i_th node. + const Key* keys_; +}; + +} // namespace google_breakpad + +#endif // PROCESSOR_STATIC_MAP_ITERATOR_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/static_map_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/static_map_unittest.cc new file mode 100644 index 0000000000..97b1e61a93 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/static_map_unittest.cc @@ -0,0 +1,386 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// static_map_unittest.cc: Unit tests for StaticMap. +// +// Author: Siyang Xie (lambxsy@google.com) + +#include +#include + +#include "breakpad_googletest_includes.h" +#include "processor/static_map-inl.h" + + +typedef int ValueType; +typedef int KeyType; +typedef google_breakpad::StaticMap< KeyType, ValueType > TestMap; +typedef std::map< KeyType, ValueType > StdMap; + +template +class SimpleMapSerializer { + public: + static char* Serialize(const std::map &stdmap, + unsigned int* size = NULL) { + unsigned int size_per_node = + sizeof(uint32_t) + sizeof(Key) + sizeof(Value); + unsigned int memsize = sizeof(int32_t) + size_per_node * stdmap.size(); + if (size) *size = memsize; + + // Allocate memory for serialized data: + char* mem = reinterpret_cast(operator new(memsize)); + char* address = mem; + + // Writer the number of nodes: + new (address) uint32_t(static_cast(stdmap.size())); + address += sizeof(uint32_t); + + // Nodes' offset: + uint32_t* offsets = reinterpret_cast(address); + address += sizeof(uint32_t) * stdmap.size(); + + // Keys: + Key* keys = reinterpret_cast(address); + address += sizeof(Key) * stdmap.size(); + + // Traversing map: + typename std::map::const_iterator iter = stdmap.begin(); + for (int index = 0; iter != stdmap.end(); ++iter, ++index) { + offsets[index] = static_cast(address - mem); + keys[index] = iter->first; + new (address) Value(iter->second); + address += sizeof(Value); + } + return mem; + } +}; + + +class TestInvalidMap : public ::testing::Test { + protected: + void SetUp() { + memset(data, 0, kMemorySize); + } + + // 40 Bytes memory can hold a StaticMap with up to 3 nodes. + static const int kMemorySize = 40; + char data[kMemorySize]; + TestMap test_map; +}; + +TEST_F(TestInvalidMap, TestNegativeNumberNodes) { + memset(data, 0xff, sizeof(uint32_t)); // Set the number of nodes = -1 + test_map = TestMap(data); + ASSERT_FALSE(test_map.ValidateInMemoryStructure()); +} + +TEST_F(TestInvalidMap, TestWrongOffsets) { + uint32_t* header = reinterpret_cast(data); + const uint32_t kNumNodes = 2; + const uint32_t kHeaderOffset = + sizeof(uint32_t) + kNumNodes * (sizeof(uint32_t) + sizeof(KeyType)); + + header[0] = kNumNodes; + header[1] = kHeaderOffset + 3; // Wrong offset for first node + test_map = TestMap(data); + ASSERT_FALSE(test_map.ValidateInMemoryStructure()); + + header[1] = kHeaderOffset; // Correct offset for first node + header[2] = kHeaderOffset - 1; // Wrong offset for second node + test_map = TestMap(data); + ASSERT_FALSE(test_map.ValidateInMemoryStructure()); +} + +TEST_F(TestInvalidMap, TestUnSortedKeys) { + uint32_t* header = reinterpret_cast(data); + const uint32_t kNumNodes = 2; + const uint32_t kHeaderOffset = + sizeof(uint32_t) + kNumNodes * (sizeof(uint32_t) + sizeof(KeyType)); + header[0] = kNumNodes; + header[1] = kHeaderOffset; + header[2] = kHeaderOffset + sizeof(ValueType); + + KeyType* keys = reinterpret_cast( + data + (kNumNodes + 1) * sizeof(uint32_t)); + // Set keys in non-increasing order. + keys[0] = 10; + keys[1] = 7; + test_map = TestMap(data); + ASSERT_FALSE(test_map.ValidateInMemoryStructure()); +} + + +class TestValidMap : public ::testing::Test { + protected: + void SetUp() { + int testcase = 0; + + // Empty map. + map_data[testcase] = + serializer.Serialize(std_map[testcase], &size[testcase]); + test_map[testcase] = TestMap(map_data[testcase]); + ++testcase; + + // Single element. + std_map[testcase].insert(std::make_pair(2, 8)); + map_data[testcase] = + serializer.Serialize(std_map[testcase], &size[testcase]); + test_map[testcase] = TestMap(map_data[testcase]); + ++testcase; + + // 100 elements. + for (int i = 0; i < 100; ++i) + std_map[testcase].insert(std::make_pair(i, 2 * i)); + map_data[testcase] = + serializer.Serialize(std_map[testcase], &size[testcase]); + test_map[testcase] = TestMap(map_data[testcase]); + ++testcase; + + // 1000 random elements. + for (int i = 0; i < 1000; ++i) + std_map[testcase].insert(std::make_pair(rand(), rand())); + map_data[testcase] = + serializer.Serialize(std_map[testcase], &size[testcase]); + test_map[testcase] = TestMap(map_data[testcase]); + + // Set correct size of memory allocation for each test case. + unsigned int size_per_node = + sizeof(uint32_t) + sizeof(KeyType) + sizeof(ValueType); + for (testcase = 0; testcase < kNumberTestCases; ++testcase) { + correct_size[testcase] = + sizeof(uint32_t) + std_map[testcase].size() * size_per_node; + } + } + + void TearDown() { + for (int i = 0;i < kNumberTestCases; ++i) + delete map_data[i]; + } + + + void IteratorTester(int test_case) { + // scan through: + iter_test = test_map[test_case].begin(); + iter_std = std_map[test_case].begin(); + + for (; iter_test != test_map[test_case].end() && + iter_std != std_map[test_case].end(); + ++iter_test, ++iter_std) { + ASSERT_EQ(iter_test.GetKey(), iter_std->first); + ASSERT_EQ(*(iter_test.GetValuePtr()), iter_std->second); + } + ASSERT_TRUE(iter_test == test_map[test_case].end() + && iter_std == std_map[test_case].end()); + + // Boundary testcase. + if (!std_map[test_case].empty()) { + // rear boundary case: + iter_test = test_map[test_case].end(); + iter_std = std_map[test_case].end(); + --iter_std; + --iter_test; + ASSERT_EQ(iter_test.GetKey(), iter_std->first); + ASSERT_EQ(*(iter_test.GetValuePtr()), iter_std->second); + + ++iter_test; + ++iter_std; + ASSERT_TRUE(iter_test == test_map[test_case].end()); + + --iter_test; + --iter_std; + ASSERT_TRUE(iter_test != test_map[test_case].end()); + ASSERT_TRUE(iter_test == test_map[test_case].last()); + ASSERT_EQ(iter_test.GetKey(), iter_std->first); + ASSERT_EQ(*(iter_test.GetValuePtr()), iter_std->second); + + // front boundary case: + iter_test = test_map[test_case].begin(); + --iter_test; + ASSERT_TRUE(iter_test == test_map[test_case].begin()); + } + } + + void CompareLookupResult(int test_case) { + bool found1 = (iter_test != test_map[test_case].end()); + bool found2 = (iter_std != std_map[test_case].end()); + ASSERT_EQ(found1, found2); + + if (found1 && found2) { + ASSERT_EQ(iter_test.GetKey(), iter_std->first); + ASSERT_EQ(*(iter_test.GetValuePtr()), iter_std->second); + } + } + + void FindTester(int test_case, const KeyType &key) { + iter_test = test_map[test_case].find(key); + iter_std = std_map[test_case].find(key); + CompareLookupResult(test_case); + } + + void LowerBoundTester(int test_case, const KeyType &key) { + iter_test = test_map[test_case].lower_bound(key); + iter_std = std_map[test_case].lower_bound(key); + CompareLookupResult(test_case); + } + + void UpperBoundTester(int test_case, const KeyType &key) { + iter_test = test_map[test_case].upper_bound(key); + iter_std = std_map[test_case].upper_bound(key); + CompareLookupResult(test_case); + } + + void LookupTester(int test_case) { + StdMap::const_iterator iter; + // Test find(): + for (iter = std_map[test_case].begin(); + iter != std_map[test_case].end(); + ++iter) { + FindTester(test_case, iter->first); + FindTester(test_case, iter->first + 1); + FindTester(test_case, iter->first - 1); + } + FindTester(test_case, INT_MIN); + FindTester(test_case, INT_MAX); + // random test: + for (int i = 0; i < rand()%5000 + 5000; ++i) + FindTester(test_case, rand()); + + // Test lower_bound(): + for (iter = std_map[test_case].begin(); + iter != std_map[test_case].end(); + ++iter) { + LowerBoundTester(test_case, iter->first); + LowerBoundTester(test_case, iter->first + 1); + LowerBoundTester(test_case, iter->first - 1); + } + LowerBoundTester(test_case, INT_MIN); + LowerBoundTester(test_case, INT_MAX); + // random test: + for (int i = 0; i < rand()%5000 + 5000; ++i) + LowerBoundTester(test_case, rand()); + + // Test upper_bound(): + for (iter = std_map[test_case].begin(); + iter != std_map[test_case].end(); + ++iter) { + UpperBoundTester(test_case, iter->first); + UpperBoundTester(test_case, iter->first + 1); + UpperBoundTester(test_case, iter->first - 1); + } + UpperBoundTester(test_case, INT_MIN); + UpperBoundTester(test_case, INT_MAX); + // random test: + for (int i = 0; i < rand()%5000 + 5000; ++i) + UpperBoundTester(test_case, rand()); + } + + static const int kNumberTestCases = 4; + StdMap std_map[kNumberTestCases]; + TestMap test_map[kNumberTestCases]; + TestMap::const_iterator iter_test; + StdMap::const_iterator iter_std; + char* map_data[kNumberTestCases]; + unsigned int size[kNumberTestCases]; + unsigned int correct_size[kNumberTestCases]; + SimpleMapSerializer serializer; +}; + +TEST_F(TestValidMap, TestEmptyMap) { + int test_case = 0; + // Assert memory size allocated during serialization is correct. + ASSERT_EQ(correct_size[test_case], size[test_case]); + + // Sanity check of serialized data: + ASSERT_TRUE(test_map[test_case].ValidateInMemoryStructure()); + ASSERT_EQ(std_map[test_case].empty(), test_map[test_case].empty()); + ASSERT_EQ(std_map[test_case].size(), test_map[test_case].size()); + + // Test Iterator. + IteratorTester(test_case); + + // Test lookup operations. + LookupTester(test_case); +} + +TEST_F(TestValidMap, TestSingleElement) { + int test_case = 1; + // Assert memory size allocated during serialization is correct. + ASSERT_EQ(correct_size[test_case], size[test_case]); + + // Sanity check of serialized data: + ASSERT_TRUE(test_map[test_case].ValidateInMemoryStructure()); + ASSERT_EQ(std_map[test_case].empty(), test_map[test_case].empty()); + ASSERT_EQ(std_map[test_case].size(), test_map[test_case].size()); + + // Test Iterator. + IteratorTester(test_case); + + // Test lookup operations. + LookupTester(test_case); +} + +TEST_F(TestValidMap, Test100Elements) { + int test_case = 2; + // Assert memory size allocated during serialization is correct. + ASSERT_EQ(correct_size[test_case], size[test_case]); + + // Sanity check of serialized data: + ASSERT_TRUE(test_map[test_case].ValidateInMemoryStructure()); + ASSERT_EQ(std_map[test_case].empty(), test_map[test_case].empty()); + ASSERT_EQ(std_map[test_case].size(), test_map[test_case].size()); + + // Test Iterator. + IteratorTester(test_case); + + // Test lookup operations. + LookupTester(test_case); +} + +TEST_F(TestValidMap, Test1000RandomElements) { + int test_case = 3; + // Assert memory size allocated during serialization is correct. + ASSERT_EQ(correct_size[test_case], size[test_case]); + + // Sanity check of serialized data: + ASSERT_TRUE(test_map[test_case].ValidateInMemoryStructure()); + ASSERT_EQ(std_map[test_case].empty(), test_map[test_case].empty()); + ASSERT_EQ(std_map[test_case].size(), test_map[test_case].size()); + + // Test Iterator. + IteratorTester(test_case); + + // Test lookup operations. + LookupTester(test_case); +} + +int main(int argc, char *argv[]) { + ::testing::InitGoogleTest(&argc, argv); + + return RUN_ALL_TESTS(); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/static_range_map-inl.h b/TMessagesProj/jni/third_party/breakpad/src/processor/static_range_map-inl.h new file mode 100644 index 0000000000..f6cef1a9ee --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/static_range_map-inl.h @@ -0,0 +1,130 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// static_range_map-inl.h: StaticRangeMap implementation. +// +// See static_range_map.h for documentation. +// +// Author: Siyang Xie (lambxsy@google.com) + +#ifndef PROCESSOR_STATIC_RANGE_MAP_INL_H__ +#define PROCESSOR_STATIC_RANGE_MAP_INL_H__ + +#include "processor/static_range_map.h" +#include "processor/logging.h" + +namespace google_breakpad { + +template +bool StaticRangeMap::RetrieveRange( + const AddressType &address, const EntryType *&entry, + AddressType *entry_base, AddressType *entry_size) const { + MapConstIterator iterator = map_.lower_bound(address); + if (iterator == map_.end()) + return false; + + // The map is keyed by the high address of each range, so |address| is + // guaranteed to be lower than the range's high address. If |range| is + // not directly preceded by another range, it's possible for address to + // be below the range's low address, though. When that happens, address + // references something not within any range, so return false. + + const Range *range = iterator.GetValuePtr(); + + // Make sure AddressType and EntryType are copyable basic types + // e.g.: integer types, pointers etc + if (address < range->base()) + return false; + + entry = range->entryptr(); + if (entry_base) + *entry_base = range->base(); + if (entry_size) + *entry_size = iterator.GetKey() - range->base() + 1; + + return true; +} + + +template +bool StaticRangeMap::RetrieveNearestRange( + const AddressType &address, const EntryType *&entry, + AddressType *entry_base, AddressType *entry_size) const { + // If address is within a range, RetrieveRange can handle it. + if (RetrieveRange(address, entry, entry_base, entry_size)) + return true; + + // upper_bound gives the first element whose key is greater than address, + // but we want the first element whose key is less than or equal to address. + // Decrement the iterator to get there, but not if the upper_bound already + // points to the beginning of the map - in that case, address is lower than + // the lowest stored key, so return false. + + MapConstIterator iterator = map_.upper_bound(address); + if (iterator == map_.begin()) + return false; + --iterator; + + const Range *range = iterator.GetValuePtr(); + entry = range->entryptr(); + if (entry_base) + *entry_base = range->base(); + if (entry_size) + *entry_size = iterator.GetKey() - range->base() + 1; + + return true; +} + +template +bool StaticRangeMap::RetrieveRangeAtIndex( + int index, const EntryType *&entry, + AddressType *entry_base, AddressType *entry_size) const { + + if (index >= GetCount()) { + BPLOG(ERROR) << "Index out of range: " << index << "/" << GetCount(); + return false; + } + + MapConstIterator iterator = map_.IteratorAtIndex(index); + + const Range *range = iterator.GetValuePtr(); + + entry = range->entryptr(); + if (entry_base) + *entry_base = range->base(); + if (entry_size) + *entry_size = iterator.GetKey() - range->base() + 1; + + return true; +} + +} // namespace google_breakpad + + +#endif // PROCESSOR_STATIC_RANGE_MAP_INL_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/static_range_map.h b/TMessagesProj/jni/third_party/breakpad/src/processor/static_range_map.h new file mode 100644 index 0000000000..91aabb0324 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/static_range_map.h @@ -0,0 +1,106 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// static_range_map.h: StaticRangeMap. +// +// StaticRangeMap is similar as RangeMap. However, StaticRangeMap wraps a +// StaticMap instead of std::map, and does not support dynamic operations like +// StoreRange(...). StaticRangeMap provides same Retrieve*() interfaces as +// RangeMap. Please see range_map.h for more documentation. +// +// Author: Siyang Xie (lambxsy@google.com) + +#ifndef PROCESSOR_STATIC_RANGE_MAP_H__ +#define PROCESSOR_STATIC_RANGE_MAP_H__ + + +#include "processor/static_map-inl.h" + +namespace google_breakpad { + +// AddressType is basic type, e.g.: integer types, pointers etc +// EntryType could be a complex type, so we retrieve its pointer instead. +template +class StaticRangeMap { + public: + StaticRangeMap(): map_() { } + explicit StaticRangeMap(const char *memory): map_(memory) { } + + // Locates the range encompassing the supplied address. If there is + // no such range, returns false. entry_base and entry_size, if non-NULL, + // are set to the base and size of the entry's range. + bool RetrieveRange(const AddressType &address, const EntryType *&entry, + AddressType *entry_base, AddressType *entry_size) const; + + // Locates the range encompassing the supplied address, if one exists. + // If no range encompasses the supplied address, locates the nearest range + // to the supplied address that is lower than the address. Returns false + // if no range meets these criteria. entry_base and entry_size, if + // non-NULL, are set to the base and size of the entry's range. + bool RetrieveNearestRange(const AddressType &address, const EntryType *&entry, + AddressType *entry_base, AddressType *entry_size) + const; + + // Treating all ranges as a list ordered by the address spaces that they + // occupy, locates the range at the index specified by index. Returns + // false if index is larger than the number of ranges stored. entry_base + // and entry_size, if non-NULL, are set to the base and size of the entry's + // range. + // + // RetrieveRangeAtIndex is not optimized for speedy operation. + bool RetrieveRangeAtIndex(int index, const EntryType *&entry, + AddressType *entry_base, AddressType *entry_size) + const; + + // Returns the number of ranges stored in the RangeMap. + inline int GetCount() const { return map_.size(); } + + private: + friend class ModuleComparer; + class Range { + public: + AddressType base() const { + return *(reinterpret_cast(this)); + } + const EntryType* entryptr() const { + return reinterpret_cast(this + sizeof(AddressType)); + } + }; + + // Convenience types. + typedef StaticRangeMap* SelfPtr; + typedef StaticMap AddressToRangeMap; + typedef typename AddressToRangeMap::const_iterator MapConstIterator; + + AddressToRangeMap map_; +}; + +} // namespace google_breakpad + +#endif // PROCESSOR_STATIC_RANGE_MAP_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/static_range_map_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/static_range_map_unittest.cc new file mode 100644 index 0000000000..2821736224 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/static_range_map_unittest.cc @@ -0,0 +1,421 @@ +// Copyright (c) 2010 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// static_range_map_unittest.cc: Unit tests for StaticRangeMap. +// +// Author: Siyang Xie (lambxsy@google.com) + +#include "breakpad_googletest_includes.h" +#include "common/scoped_ptr.h" +#include "processor/range_map-inl.h" +#include "processor/static_range_map-inl.h" +#include "processor/simple_serializer-inl.h" +#include "processor/map_serializers-inl.h" +#include "processor/logging.h" + + +namespace { +// Types used for testing. +typedef int AddressType; +typedef int EntryType; +typedef google_breakpad::StaticRangeMap< AddressType, EntryType > TestMap; +typedef google_breakpad::RangeMap< AddressType, EntryType > RMap; + +// RangeTest contains data to use for store and retrieve tests. See +// RunTests for descriptions of the tests. +struct RangeTest { + // Base address to use for test + AddressType address; + + // Size of range to use for test + AddressType size; + + // Unique ID of range - unstorable ranges must have unique IDs too + EntryType id; + + // Whether this range is expected to be stored successfully or not + bool expect_storable; +}; + +// A RangeTestSet encompasses multiple RangeTests, which are run in +// sequence on the same RangeMap. +struct RangeTestSet { + // An array of RangeTests + const RangeTest* range_tests; + + // The number of tests in the set + unsigned int range_test_count; +}; + +// These tests will be run sequentially. The first set of tests exercises +// most functions of RangeTest, and verifies all of the bounds-checking. +const RangeTest range_tests_0[] = { + { INT_MIN, 16, 1, true }, // lowest possible range + { -2, 5, 2, true }, // a range through zero + { INT_MAX - 9, 11, 3, false }, // tests anti-overflow + { INT_MAX - 9, 10, 4, true }, // highest possible range + { 5, 0, 5, false }, // tests anti-zero-size + { 5, 1, 6, true }, // smallest possible range + { -20, 15, 7, true }, // entirely negative + + { 10, 10, 10, true }, // causes the following tests to fail + { 9, 10, 11, false }, // one-less base, one-less high + { 9, 11, 12, false }, // one-less base, identical high + { 9, 12, 13, false }, // completely contains existing + { 10, 9, 14, false }, // identical base, one-less high + { 10, 10, 15, false }, // exactly identical to existing range + { 10, 11, 16, false }, // identical base, one-greater high + { 11, 8, 17, false }, // contained completely within + { 11, 9, 18, false }, // one-greater base, identical high + { 11, 10, 19, false }, // one-greater base, one-greater high + { 9, 2, 20, false }, // overlaps bottom by one + { 10, 1, 21, false }, // overlaps bottom by one, contained + { 19, 1, 22, false }, // overlaps top by one, contained + { 19, 2, 23, false }, // overlaps top by one + + { 9, 1, 24, true }, // directly below without overlap + { 20, 1, 25, true }, // directly above without overlap + + { 6, 3, 26, true }, // exactly between two ranges, gapless + { 7, 3, 27, false }, // tries to span two ranges + { 7, 5, 28, false }, // tries to span three ranges + { 4, 20, 29, false }, // tries to contain several ranges + + { 30, 50, 30, true }, + { 90, 25, 31, true }, + { 35, 65, 32, false }, // tries to span two noncontiguous + { 120, 10000, 33, true }, // > 8-bit + { 20000, 20000, 34, true }, // > 8-bit + { 0x10001, 0x10001, 35, true }, // > 16-bit + + { 27, -1, 36, false } // tests high < base +}; + +// Attempt to fill the entire space. The entire space must be filled with +// three stores because AddressType is signed for these tests, so RangeMap +// treats the size as signed and rejects sizes that appear to be negative. +// Even if these tests were run as unsigned, two stores would be needed +// to fill the space because the entire size of the space could only be +// described by using one more bit than would be present in AddressType. +const RangeTest range_tests_1[] = { + { INT_MIN, INT_MAX, 50, true }, // From INT_MIN to -2, inclusive + { -1, 2, 51, true }, // From -1 to 0, inclusive + { 1, INT_MAX, 52, true }, // From 1 to INT_MAX, inclusive + { INT_MIN, INT_MAX, 53, false }, // Can't fill the space twice + { -1, 2, 54, false }, + { 1, INT_MAX, 55, false }, + { -3, 6, 56, false }, // -3 to 2, inclusive - spans 3 ranges +}; + +// A light round of testing to verify that RetrieveRange does the right +// the right thing at the extremities of the range when nothing is stored +// there. Checks are forced without storing anything at the extremities +// by setting size = 0. +const RangeTest range_tests_2[] = { + { INT_MIN, 0, 100, false }, // makes RetrieveRange check low end + { -1, 3, 101, true }, + { INT_MAX, 0, 102, false }, // makes RetrieveRange check high end +}; + +// Similar to the previous test set, but with a couple of ranges closer +// to the extremities. +const RangeTest range_tests_3[] = { + { INT_MIN + 1, 1, 110, true }, + { INT_MAX - 1, 1, 111, true }, + { INT_MIN, 0, 112, false }, // makes RetrieveRange check low end + { INT_MAX, 0, 113, false } // makes RetrieveRange check high end +}; + +// The range map is cleared between sets of tests listed here. +const RangeTestSet range_test_sets[] = { + { range_tests_0, sizeof(range_tests_0) / sizeof(RangeTest) }, + { range_tests_1, sizeof(range_tests_1) / sizeof(RangeTest) }, + { range_tests_2, sizeof(range_tests_2) / sizeof(RangeTest) }, + { range_tests_3, sizeof(range_tests_3) / sizeof(RangeTest) }, + { range_tests_0, sizeof(range_tests_0) / sizeof(RangeTest) } // Run again +}; + +} // namespace + +namespace google_breakpad { +class TestStaticRangeMap : public ::testing::Test { + protected: + void SetUp() { + kTestCasesCount_ = sizeof(range_test_sets) / sizeof(RangeTestSet); + } + + // StoreTest uses the data in a RangeTest and calls StoreRange on the + // test RangeMap. It returns true if the expected result occurred, and + // false if something else happened. + void StoreTest(RMap* range_map, const RangeTest* range_test); + + // RetrieveTest uses the data in RangeTest and calls RetrieveRange on the + // test RangeMap. If it retrieves the expected value (which can be no + // map entry at the specified range,) it returns true, otherwise, it returns + // false. RetrieveTest will check the values around the base address and + // the high address of a range to guard against off-by-one errors. + void RetrieveTest(TestMap* range_map, const RangeTest* range_test); + + // Test RetrieveRangeAtIndex, which is supposed to return objects in order + // according to their addresses. This test is performed by looping through + // the map, calling RetrieveRangeAtIndex for all possible indices in sequence, + // and verifying that each call returns a different object than the previous + // call, and that ranges are returned with increasing base addresses. Returns + // false if the test fails. + void RetrieveIndexTest(const TestMap* range_map, int set); + + void RunTestCase(int test_case); + + unsigned int kTestCasesCount_; + RangeMapSerializer serializer_; +}; + +void TestStaticRangeMap::StoreTest(RMap* range_map, + const RangeTest* range_test) { + bool stored = range_map->StoreRange(range_test->address, + range_test->size, + range_test->id); + EXPECT_EQ(stored, range_test->expect_storable) + << "StoreRange id " << range_test->id << "FAILED"; +} + +void TestStaticRangeMap::RetrieveTest(TestMap* range_map, + const RangeTest* range_test) { + for (unsigned int side = 0; side <= 1; ++side) { + // When side == 0, check the low side (base address) of each range. + // When side == 1, check the high side (base + size) of each range. + + // Check one-less and one-greater than the target address in addition + // to the target address itself. + + // If the size of the range is only 1, don't check one greater than + // the base or one less than the high - for a successfully stored + // range, these tests would erroneously fail because the range is too + // small. + AddressType low_offset = -1; + AddressType high_offset = 1; + if (range_test->size == 1) { + if (!side) // When checking the low side, + high_offset = 0; // don't check one over the target. + else // When checking the high side, + low_offset = 0; // don't check one under the target. + } + + for (AddressType offset = low_offset; offset <= high_offset; ++offset) { + AddressType address = + offset + + (!side ? range_test->address : + range_test->address + range_test->size - 1); + + bool expected_result = false; // This is correct for tests not stored. + if (range_test->expect_storable) { + if (offset == 0) // When checking the target address, + expected_result = true; // test should always succeed. + else if (offset == -1) // When checking one below the target, + expected_result = side; // should fail low and succeed high. + else // When checking one above the target, + expected_result = !side; // should succeed low and fail high. + } + + const EntryType* id; + AddressType retrieved_base; + AddressType retrieved_size; + bool retrieved = range_map->RetrieveRange(address, id, + &retrieved_base, + &retrieved_size); + + bool observed_result = retrieved && *id == range_test->id; + EXPECT_EQ(observed_result, expected_result) + << "RetrieveRange id " << range_test->id + << ", side " << side << ", offset " << offset << " FAILED."; + + // If a range was successfully retrieved, check that the returned + // bounds match the range as stored. + if (observed_result == true) { + EXPECT_EQ(retrieved_base, range_test->address) + << "RetrieveRange id " << range_test->id + << ", side " << side << ", offset " << offset << " FAILED."; + EXPECT_EQ(retrieved_size, range_test->size) + << "RetrieveRange id " << range_test->id + << ", side " << side << ", offset " << offset << " FAILED."; + } + + // Now, check RetrieveNearestRange. The nearest range is always + // expected to be different from the test range when checking one + // less than the low side. + bool expected_nearest = range_test->expect_storable; + if (!side && offset < 0) + expected_nearest = false; + + AddressType nearest_base; + AddressType nearest_size; + bool retrieved_nearest = range_map->RetrieveNearestRange(address, + id, + &nearest_base, + &nearest_size); + + // When checking one greater than the high side, RetrieveNearestRange + // should usually return the test range. When a different range begins + // at that address, though, then RetrieveNearestRange should return the + // range at the address instead of the test range. + if (side && offset > 0 && nearest_base == address) { + expected_nearest = false; + } + + bool observed_nearest = retrieved_nearest && + *id == range_test->id; + + EXPECT_EQ(observed_nearest, expected_nearest) + << "RetrieveRange id " << range_test->id + << ", side " << side << ", offset " << offset << " FAILED."; + + // If a range was successfully retrieved, check that the returned + // bounds match the range as stored. + if (expected_nearest ==true) { + EXPECT_EQ(nearest_base, range_test->address) + << "RetrieveRange id " << range_test->id + << ", side " << side << ", offset " << offset << " FAILED."; + EXPECT_EQ(nearest_size, range_test->size) + << "RetrieveRange id " << range_test->id + << ", side " << side << ", offset " << offset << " FAILED."; + } + } + } +} + +void TestStaticRangeMap::RetrieveIndexTest(const TestMap* range_map, int set) { + AddressType last_base = 0; + const EntryType* last_entry = 0; + const EntryType* entry; + int object_count = range_map->GetCount(); + for (int object_index = 0; object_index < object_count; ++object_index) { + AddressType base; + ASSERT_TRUE(range_map->RetrieveRangeAtIndex(object_index, + entry, + &base, + NULL)) + << "FAILED: RetrieveRangeAtIndex set " << set + << " index " << object_index; + + ASSERT_TRUE(entry) << "FAILED: RetrieveRangeAtIndex set " << set + << " index " << object_index; + + // It's impossible to do these comparisons unless there's a previous + // object to compare against. + if (last_entry) { + // The object must be different from the last_entry one. + EXPECT_NE(*entry, *last_entry) << "FAILED: RetrieveRangeAtIndex set " + << set << " index " << object_index; + // Each object must have a base greater than the previous object's base. + EXPECT_GT(base, last_base) << "FAILED: RetrieveRangeAtIndex set " << set + << " index " << object_index; + } + last_entry = entry; + last_base = base; + } + + // Make sure that RetrieveRangeAtIndex doesn't allow lookups at indices that + // are too high. + ASSERT_FALSE(range_map->RetrieveRangeAtIndex( + object_count, entry, NULL, NULL)) << "FAILED: RetrieveRangeAtIndex set " + << set << " index " << object_count + << " (too large)"; +} + +// RunTests runs a series of test sets. +void TestStaticRangeMap::RunTestCase(int test_case) { + // Maintain the range map in a pointer so that deletion can be meaningfully + // tested. + scoped_ptr rmap(new RMap()); + + const RangeTest* range_tests = range_test_sets[test_case].range_tests; + unsigned int range_test_count = range_test_sets[test_case].range_test_count; + + // Run the StoreRange test, which validates StoreRange and initializes + // the RangeMap with data for the RetrieveRange test. + int stored_count = 0; // The number of ranges successfully stored + for (unsigned int range_test_index = 0; + range_test_index < range_test_count; + ++range_test_index) { + const RangeTest* range_test = &range_tests[range_test_index]; + StoreTest(rmap.get(), range_test); + + if (range_test->expect_storable) + ++stored_count; + } + + scoped_array memaddr(serializer_.Serialize(*rmap, NULL)); + scoped_ptr static_range_map(new TestMap(memaddr.get())); + + // The RangeMap's own count of objects should also match. + EXPECT_EQ(static_range_map->GetCount(), stored_count); + + // Run the RetrieveRange test + for (unsigned int range_test_index = 0; + range_test_index < range_test_count; + ++range_test_index) { + const RangeTest* range_test = &range_tests[range_test_index]; + RetrieveTest(static_range_map.get(), range_test); + } + + RetrieveIndexTest(static_range_map.get(), test_case); +} + +TEST_F(TestStaticRangeMap, TestCase0) { + int test_case = 0; + RunTestCase(test_case); +} + +TEST_F(TestStaticRangeMap, TestCase1) { + int test_case = 1; + RunTestCase(test_case); +} + +TEST_F(TestStaticRangeMap, TestCase2) { + int test_case = 2; + RunTestCase(test_case); +} + +TEST_F(TestStaticRangeMap, TestCase3) { + int test_case = 3; + RunTestCase(test_case); +} + +TEST_F(TestStaticRangeMap, RunTestCase0Again) { + int test_case = 0; + RunTestCase(test_case); +} + +} // namespace google_breakpad + +int main(int argc, char *argv[]) { + ::testing::InitGoogleTest(&argc, argv); + + return RUN_ALL_TESTS(); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/symbolic_constants_win.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/symbolic_constants_win.cc new file mode 100644 index 0000000000..333aa04174 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/symbolic_constants_win.cc @@ -0,0 +1,6417 @@ +// Copyright (c) 2015 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// ntstatus_reason_win.h: Windows NTSTATUS code to string. +// +// Provides a means to convert NTSTATUS codes to strings. +// +// Author: Ben Wagner + +#include +#include + +#include "google_breakpad/common/breakpad_types.h" +#include "google_breakpad/common/minidump_exception_win32.h" +#include "processor/symbolic_constants_win.h" + +namespace google_breakpad { + +std::string NTStatusToString(uint32_t ntstatus) { + std::string reason; + // The content of this switch was created from ntstatus.h in the 8.1 SDK with + // + // egrep '#define [A-Z_0-9]+\s+\(\(NTSTATUS\)0xC[0-9A-F]+L\)' ntstatus.h + // | tr -d '\r' + // | sed -r 's@#define ([A-Z_0-9]+)\s+\(\(NTSTATUS\)(0xC[0-9A-F]+)L\).*@\2 \1@' + // | sort + // | sed -r 's@(0xC[0-9A-F]+) ([A-Z_0-9]+)@ case MD_NTSTATUS_WIN_\2:\n reason = "\2";\n break;@' + // + // With easy copy to clipboard with + // | xclip -selection c # on linux + // | clip # on windows + // | pbcopy # on mac + // + // and then the default case added. + switch (ntstatus) { + case MD_NTSTATUS_WIN_STATUS_UNSUCCESSFUL: + reason = "STATUS_UNSUCCESSFUL"; + break; + case MD_NTSTATUS_WIN_STATUS_NOT_IMPLEMENTED: + reason = "STATUS_NOT_IMPLEMENTED"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_INFO_CLASS: + reason = "STATUS_INVALID_INFO_CLASS"; + break; + case MD_NTSTATUS_WIN_STATUS_INFO_LENGTH_MISMATCH: + reason = "STATUS_INFO_LENGTH_MISMATCH"; + break; + case MD_NTSTATUS_WIN_STATUS_ACCESS_VIOLATION: + reason = "STATUS_ACCESS_VIOLATION"; + break; + case MD_NTSTATUS_WIN_STATUS_IN_PAGE_ERROR: + reason = "STATUS_IN_PAGE_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_PAGEFILE_QUOTA: + reason = "STATUS_PAGEFILE_QUOTA"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_HANDLE: + reason = "STATUS_INVALID_HANDLE"; + break; + case MD_NTSTATUS_WIN_STATUS_BAD_INITIAL_STACK: + reason = "STATUS_BAD_INITIAL_STACK"; + break; + case MD_NTSTATUS_WIN_STATUS_BAD_INITIAL_PC: + reason = "STATUS_BAD_INITIAL_PC"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_CID: + reason = "STATUS_INVALID_CID"; + break; + case MD_NTSTATUS_WIN_STATUS_TIMER_NOT_CANCELED: + reason = "STATUS_TIMER_NOT_CANCELED"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_PARAMETER: + reason = "STATUS_INVALID_PARAMETER"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_SUCH_DEVICE: + reason = "STATUS_NO_SUCH_DEVICE"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_SUCH_FILE: + reason = "STATUS_NO_SUCH_FILE"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_DEVICE_REQUEST: + reason = "STATUS_INVALID_DEVICE_REQUEST"; + break; + case MD_NTSTATUS_WIN_STATUS_END_OF_FILE: + reason = "STATUS_END_OF_FILE"; + break; + case MD_NTSTATUS_WIN_STATUS_WRONG_VOLUME: + reason = "STATUS_WRONG_VOLUME"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_MEDIA_IN_DEVICE: + reason = "STATUS_NO_MEDIA_IN_DEVICE"; + break; + case MD_NTSTATUS_WIN_STATUS_UNRECOGNIZED_MEDIA: + reason = "STATUS_UNRECOGNIZED_MEDIA"; + break; + case MD_NTSTATUS_WIN_STATUS_NONEXISTENT_SECTOR: + reason = "STATUS_NONEXISTENT_SECTOR"; + break; + case MD_NTSTATUS_WIN_STATUS_MORE_PROCESSING_REQUIRED: + reason = "STATUS_MORE_PROCESSING_REQUIRED"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_MEMORY: + reason = "STATUS_NO_MEMORY"; + break; + case MD_NTSTATUS_WIN_STATUS_CONFLICTING_ADDRESSES: + reason = "STATUS_CONFLICTING_ADDRESSES"; + break; + case MD_NTSTATUS_WIN_STATUS_NOT_MAPPED_VIEW: + reason = "STATUS_NOT_MAPPED_VIEW"; + break; + case MD_NTSTATUS_WIN_STATUS_UNABLE_TO_FREE_VM: + reason = "STATUS_UNABLE_TO_FREE_VM"; + break; + case MD_NTSTATUS_WIN_STATUS_UNABLE_TO_DELETE_SECTION: + reason = "STATUS_UNABLE_TO_DELETE_SECTION"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_SYSTEM_SERVICE: + reason = "STATUS_INVALID_SYSTEM_SERVICE"; + break; + case MD_NTSTATUS_WIN_STATUS_ILLEGAL_INSTRUCTION: + reason = "STATUS_ILLEGAL_INSTRUCTION"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_LOCK_SEQUENCE: + reason = "STATUS_INVALID_LOCK_SEQUENCE"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_VIEW_SIZE: + reason = "STATUS_INVALID_VIEW_SIZE"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_FILE_FOR_SECTION: + reason = "STATUS_INVALID_FILE_FOR_SECTION"; + break; + case MD_NTSTATUS_WIN_STATUS_ALREADY_COMMITTED: + reason = "STATUS_ALREADY_COMMITTED"; + break; + case MD_NTSTATUS_WIN_STATUS_ACCESS_DENIED: + reason = "STATUS_ACCESS_DENIED"; + break; + case MD_NTSTATUS_WIN_STATUS_BUFFER_TOO_SMALL: + reason = "STATUS_BUFFER_TOO_SMALL"; + break; + case MD_NTSTATUS_WIN_STATUS_OBJECT_TYPE_MISMATCH: + reason = "STATUS_OBJECT_TYPE_MISMATCH"; + break; + case MD_NTSTATUS_WIN_STATUS_NONCONTINUABLE_EXCEPTION: + reason = "STATUS_NONCONTINUABLE_EXCEPTION"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_DISPOSITION: + reason = "STATUS_INVALID_DISPOSITION"; + break; + case MD_NTSTATUS_WIN_STATUS_UNWIND: + reason = "STATUS_UNWIND"; + break; + case MD_NTSTATUS_WIN_STATUS_BAD_STACK: + reason = "STATUS_BAD_STACK"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_UNWIND_TARGET: + reason = "STATUS_INVALID_UNWIND_TARGET"; + break; + case MD_NTSTATUS_WIN_STATUS_NOT_LOCKED: + reason = "STATUS_NOT_LOCKED"; + break; + case MD_NTSTATUS_WIN_STATUS_PARITY_ERROR: + reason = "STATUS_PARITY_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_UNABLE_TO_DECOMMIT_VM: + reason = "STATUS_UNABLE_TO_DECOMMIT_VM"; + break; + case MD_NTSTATUS_WIN_STATUS_NOT_COMMITTED: + reason = "STATUS_NOT_COMMITTED"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_PORT_ATTRIBUTES: + reason = "STATUS_INVALID_PORT_ATTRIBUTES"; + break; + case MD_NTSTATUS_WIN_STATUS_PORT_MESSAGE_TOO_LONG: + reason = "STATUS_PORT_MESSAGE_TOO_LONG"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_PARAMETER_MIX: + reason = "STATUS_INVALID_PARAMETER_MIX"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_QUOTA_LOWER: + reason = "STATUS_INVALID_QUOTA_LOWER"; + break; + case MD_NTSTATUS_WIN_STATUS_DISK_CORRUPT_ERROR: + reason = "STATUS_DISK_CORRUPT_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_OBJECT_NAME_INVALID: + reason = "STATUS_OBJECT_NAME_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_OBJECT_NAME_NOT_FOUND: + reason = "STATUS_OBJECT_NAME_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_OBJECT_NAME_COLLISION: + reason = "STATUS_OBJECT_NAME_COLLISION"; + break; + case MD_NTSTATUS_WIN_STATUS_PORT_DISCONNECTED: + reason = "STATUS_PORT_DISCONNECTED"; + break; + case MD_NTSTATUS_WIN_STATUS_DEVICE_ALREADY_ATTACHED: + reason = "STATUS_DEVICE_ALREADY_ATTACHED"; + break; + case MD_NTSTATUS_WIN_STATUS_OBJECT_PATH_INVALID: + reason = "STATUS_OBJECT_PATH_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_OBJECT_PATH_NOT_FOUND: + reason = "STATUS_OBJECT_PATH_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_OBJECT_PATH_SYNTAX_BAD: + reason = "STATUS_OBJECT_PATH_SYNTAX_BAD"; + break; + case MD_NTSTATUS_WIN_STATUS_DATA_OVERRUN: + reason = "STATUS_DATA_OVERRUN"; + break; + case MD_NTSTATUS_WIN_STATUS_DATA_LATE_ERROR: + reason = "STATUS_DATA_LATE_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_DATA_ERROR: + reason = "STATUS_DATA_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_CRC_ERROR: + reason = "STATUS_CRC_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_SECTION_TOO_BIG: + reason = "STATUS_SECTION_TOO_BIG"; + break; + case MD_NTSTATUS_WIN_STATUS_PORT_CONNECTION_REFUSED: + reason = "STATUS_PORT_CONNECTION_REFUSED"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_PORT_HANDLE: + reason = "STATUS_INVALID_PORT_HANDLE"; + break; + case MD_NTSTATUS_WIN_STATUS_SHARING_VIOLATION: + reason = "STATUS_SHARING_VIOLATION"; + break; + case MD_NTSTATUS_WIN_STATUS_QUOTA_EXCEEDED: + reason = "STATUS_QUOTA_EXCEEDED"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_PAGE_PROTECTION: + reason = "STATUS_INVALID_PAGE_PROTECTION"; + break; + case MD_NTSTATUS_WIN_STATUS_MUTANT_NOT_OWNED: + reason = "STATUS_MUTANT_NOT_OWNED"; + break; + case MD_NTSTATUS_WIN_STATUS_SEMAPHORE_LIMIT_EXCEEDED: + reason = "STATUS_SEMAPHORE_LIMIT_EXCEEDED"; + break; + case MD_NTSTATUS_WIN_STATUS_PORT_ALREADY_SET: + reason = "STATUS_PORT_ALREADY_SET"; + break; + case MD_NTSTATUS_WIN_STATUS_SECTION_NOT_IMAGE: + reason = "STATUS_SECTION_NOT_IMAGE"; + break; + case MD_NTSTATUS_WIN_STATUS_SUSPEND_COUNT_EXCEEDED: + reason = "STATUS_SUSPEND_COUNT_EXCEEDED"; + break; + case MD_NTSTATUS_WIN_STATUS_THREAD_IS_TERMINATING: + reason = "STATUS_THREAD_IS_TERMINATING"; + break; + case MD_NTSTATUS_WIN_STATUS_BAD_WORKING_SET_LIMIT: + reason = "STATUS_BAD_WORKING_SET_LIMIT"; + break; + case MD_NTSTATUS_WIN_STATUS_INCOMPATIBLE_FILE_MAP: + reason = "STATUS_INCOMPATIBLE_FILE_MAP"; + break; + case MD_NTSTATUS_WIN_STATUS_SECTION_PROTECTION: + reason = "STATUS_SECTION_PROTECTION"; + break; + case MD_NTSTATUS_WIN_STATUS_EAS_NOT_SUPPORTED: + reason = "STATUS_EAS_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_EA_TOO_LARGE: + reason = "STATUS_EA_TOO_LARGE"; + break; + case MD_NTSTATUS_WIN_STATUS_NONEXISTENT_EA_ENTRY: + reason = "STATUS_NONEXISTENT_EA_ENTRY"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_EAS_ON_FILE: + reason = "STATUS_NO_EAS_ON_FILE"; + break; + case MD_NTSTATUS_WIN_STATUS_EA_CORRUPT_ERROR: + reason = "STATUS_EA_CORRUPT_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_FILE_LOCK_CONFLICT: + reason = "STATUS_FILE_LOCK_CONFLICT"; + break; + case MD_NTSTATUS_WIN_STATUS_LOCK_NOT_GRANTED: + reason = "STATUS_LOCK_NOT_GRANTED"; + break; + case MD_NTSTATUS_WIN_STATUS_DELETE_PENDING: + reason = "STATUS_DELETE_PENDING"; + break; + case MD_NTSTATUS_WIN_STATUS_CTL_FILE_NOT_SUPPORTED: + reason = "STATUS_CTL_FILE_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_UNKNOWN_REVISION: + reason = "STATUS_UNKNOWN_REVISION"; + break; + case MD_NTSTATUS_WIN_STATUS_REVISION_MISMATCH: + reason = "STATUS_REVISION_MISMATCH"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_OWNER: + reason = "STATUS_INVALID_OWNER"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_PRIMARY_GROUP: + reason = "STATUS_INVALID_PRIMARY_GROUP"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_IMPERSONATION_TOKEN: + reason = "STATUS_NO_IMPERSONATION_TOKEN"; + break; + case MD_NTSTATUS_WIN_STATUS_CANT_DISABLE_MANDATORY: + reason = "STATUS_CANT_DISABLE_MANDATORY"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_LOGON_SERVERS: + reason = "STATUS_NO_LOGON_SERVERS"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_SUCH_LOGON_SESSION: + reason = "STATUS_NO_SUCH_LOGON_SESSION"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_SUCH_PRIVILEGE: + reason = "STATUS_NO_SUCH_PRIVILEGE"; + break; + case MD_NTSTATUS_WIN_STATUS_PRIVILEGE_NOT_HELD: + reason = "STATUS_PRIVILEGE_NOT_HELD"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_ACCOUNT_NAME: + reason = "STATUS_INVALID_ACCOUNT_NAME"; + break; + case MD_NTSTATUS_WIN_STATUS_USER_EXISTS: + reason = "STATUS_USER_EXISTS"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_SUCH_USER: + reason = "STATUS_NO_SUCH_USER"; + break; + case MD_NTSTATUS_WIN_STATUS_GROUP_EXISTS: + reason = "STATUS_GROUP_EXISTS"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_SUCH_GROUP: + reason = "STATUS_NO_SUCH_GROUP"; + break; + case MD_NTSTATUS_WIN_STATUS_MEMBER_IN_GROUP: + reason = "STATUS_MEMBER_IN_GROUP"; + break; + case MD_NTSTATUS_WIN_STATUS_MEMBER_NOT_IN_GROUP: + reason = "STATUS_MEMBER_NOT_IN_GROUP"; + break; + case MD_NTSTATUS_WIN_STATUS_LAST_ADMIN: + reason = "STATUS_LAST_ADMIN"; + break; + case MD_NTSTATUS_WIN_STATUS_WRONG_PASSWORD: + reason = "STATUS_WRONG_PASSWORD"; + break; + case MD_NTSTATUS_WIN_STATUS_ILL_FORMED_PASSWORD: + reason = "STATUS_ILL_FORMED_PASSWORD"; + break; + case MD_NTSTATUS_WIN_STATUS_PASSWORD_RESTRICTION: + reason = "STATUS_PASSWORD_RESTRICTION"; + break; + case MD_NTSTATUS_WIN_STATUS_LOGON_FAILURE: + reason = "STATUS_LOGON_FAILURE"; + break; + case MD_NTSTATUS_WIN_STATUS_ACCOUNT_RESTRICTION: + reason = "STATUS_ACCOUNT_RESTRICTION"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_LOGON_HOURS: + reason = "STATUS_INVALID_LOGON_HOURS"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_WORKSTATION: + reason = "STATUS_INVALID_WORKSTATION"; + break; + case MD_NTSTATUS_WIN_STATUS_PASSWORD_EXPIRED: + reason = "STATUS_PASSWORD_EXPIRED"; + break; + case MD_NTSTATUS_WIN_STATUS_ACCOUNT_DISABLED: + reason = "STATUS_ACCOUNT_DISABLED"; + break; + case MD_NTSTATUS_WIN_STATUS_NONE_MAPPED: + reason = "STATUS_NONE_MAPPED"; + break; + case MD_NTSTATUS_WIN_STATUS_TOO_MANY_LUIDS_REQUESTED: + reason = "STATUS_TOO_MANY_LUIDS_REQUESTED"; + break; + case MD_NTSTATUS_WIN_STATUS_LUIDS_EXHAUSTED: + reason = "STATUS_LUIDS_EXHAUSTED"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_SUB_AUTHORITY: + reason = "STATUS_INVALID_SUB_AUTHORITY"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_ACL: + reason = "STATUS_INVALID_ACL"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_SID: + reason = "STATUS_INVALID_SID"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_SECURITY_DESCR: + reason = "STATUS_INVALID_SECURITY_DESCR"; + break; + case MD_NTSTATUS_WIN_STATUS_PROCEDURE_NOT_FOUND: + reason = "STATUS_PROCEDURE_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_IMAGE_FORMAT: + reason = "STATUS_INVALID_IMAGE_FORMAT"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_TOKEN: + reason = "STATUS_NO_TOKEN"; + break; + case MD_NTSTATUS_WIN_STATUS_BAD_INHERITANCE_ACL: + reason = "STATUS_BAD_INHERITANCE_ACL"; + break; + case MD_NTSTATUS_WIN_STATUS_RANGE_NOT_LOCKED: + reason = "STATUS_RANGE_NOT_LOCKED"; + break; + case MD_NTSTATUS_WIN_STATUS_DISK_FULL: + reason = "STATUS_DISK_FULL"; + break; + case MD_NTSTATUS_WIN_STATUS_SERVER_DISABLED: + reason = "STATUS_SERVER_DISABLED"; + break; + case MD_NTSTATUS_WIN_STATUS_SERVER_NOT_DISABLED: + reason = "STATUS_SERVER_NOT_DISABLED"; + break; + case MD_NTSTATUS_WIN_STATUS_TOO_MANY_GUIDS_REQUESTED: + reason = "STATUS_TOO_MANY_GUIDS_REQUESTED"; + break; + case MD_NTSTATUS_WIN_STATUS_GUIDS_EXHAUSTED: + reason = "STATUS_GUIDS_EXHAUSTED"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_ID_AUTHORITY: + reason = "STATUS_INVALID_ID_AUTHORITY"; + break; + case MD_NTSTATUS_WIN_STATUS_AGENTS_EXHAUSTED: + reason = "STATUS_AGENTS_EXHAUSTED"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_VOLUME_LABEL: + reason = "STATUS_INVALID_VOLUME_LABEL"; + break; + case MD_NTSTATUS_WIN_STATUS_SECTION_NOT_EXTENDED: + reason = "STATUS_SECTION_NOT_EXTENDED"; + break; + case MD_NTSTATUS_WIN_STATUS_NOT_MAPPED_DATA: + reason = "STATUS_NOT_MAPPED_DATA"; + break; + case MD_NTSTATUS_WIN_STATUS_RESOURCE_DATA_NOT_FOUND: + reason = "STATUS_RESOURCE_DATA_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_RESOURCE_TYPE_NOT_FOUND: + reason = "STATUS_RESOURCE_TYPE_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_RESOURCE_NAME_NOT_FOUND: + reason = "STATUS_RESOURCE_NAME_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_ARRAY_BOUNDS_EXCEEDED: + reason = "STATUS_ARRAY_BOUNDS_EXCEEDED"; + break; + case MD_NTSTATUS_WIN_STATUS_FLOAT_DENORMAL_OPERAND: + reason = "STATUS_FLOAT_DENORMAL_OPERAND"; + break; + case MD_NTSTATUS_WIN_STATUS_FLOAT_DIVIDE_BY_ZERO: + reason = "STATUS_FLOAT_DIVIDE_BY_ZERO"; + break; + case MD_NTSTATUS_WIN_STATUS_FLOAT_INEXACT_RESULT: + reason = "STATUS_FLOAT_INEXACT_RESULT"; + break; + case MD_NTSTATUS_WIN_STATUS_FLOAT_INVALID_OPERATION: + reason = "STATUS_FLOAT_INVALID_OPERATION"; + break; + case MD_NTSTATUS_WIN_STATUS_FLOAT_OVERFLOW: + reason = "STATUS_FLOAT_OVERFLOW"; + break; + case MD_NTSTATUS_WIN_STATUS_FLOAT_STACK_CHECK: + reason = "STATUS_FLOAT_STACK_CHECK"; + break; + case MD_NTSTATUS_WIN_STATUS_FLOAT_UNDERFLOW: + reason = "STATUS_FLOAT_UNDERFLOW"; + break; + case MD_NTSTATUS_WIN_STATUS_INTEGER_DIVIDE_BY_ZERO: + reason = "STATUS_INTEGER_DIVIDE_BY_ZERO"; + break; + case MD_NTSTATUS_WIN_STATUS_INTEGER_OVERFLOW: + reason = "STATUS_INTEGER_OVERFLOW"; + break; + case MD_NTSTATUS_WIN_STATUS_PRIVILEGED_INSTRUCTION: + reason = "STATUS_PRIVILEGED_INSTRUCTION"; + break; + case MD_NTSTATUS_WIN_STATUS_TOO_MANY_PAGING_FILES: + reason = "STATUS_TOO_MANY_PAGING_FILES"; + break; + case MD_NTSTATUS_WIN_STATUS_FILE_INVALID: + reason = "STATUS_FILE_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_ALLOTTED_SPACE_EXCEEDED: + reason = "STATUS_ALLOTTED_SPACE_EXCEEDED"; + break; + case MD_NTSTATUS_WIN_STATUS_INSUFFICIENT_RESOURCES: + reason = "STATUS_INSUFFICIENT_RESOURCES"; + break; + case MD_NTSTATUS_WIN_STATUS_DFS_EXIT_PATH_FOUND: + reason = "STATUS_DFS_EXIT_PATH_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_DEVICE_DATA_ERROR: + reason = "STATUS_DEVICE_DATA_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_DEVICE_NOT_CONNECTED: + reason = "STATUS_DEVICE_NOT_CONNECTED"; + break; + case MD_NTSTATUS_WIN_STATUS_DEVICE_POWER_FAILURE: + reason = "STATUS_DEVICE_POWER_FAILURE"; + break; + case MD_NTSTATUS_WIN_STATUS_FREE_VM_NOT_AT_BASE: + reason = "STATUS_FREE_VM_NOT_AT_BASE"; + break; + case MD_NTSTATUS_WIN_STATUS_MEMORY_NOT_ALLOCATED: + reason = "STATUS_MEMORY_NOT_ALLOCATED"; + break; + case MD_NTSTATUS_WIN_STATUS_WORKING_SET_QUOTA: + reason = "STATUS_WORKING_SET_QUOTA"; + break; + case MD_NTSTATUS_WIN_STATUS_MEDIA_WRITE_PROTECTED: + reason = "STATUS_MEDIA_WRITE_PROTECTED"; + break; + case MD_NTSTATUS_WIN_STATUS_DEVICE_NOT_READY: + reason = "STATUS_DEVICE_NOT_READY"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_GROUP_ATTRIBUTES: + reason = "STATUS_INVALID_GROUP_ATTRIBUTES"; + break; + case MD_NTSTATUS_WIN_STATUS_BAD_IMPERSONATION_LEVEL: + reason = "STATUS_BAD_IMPERSONATION_LEVEL"; + break; + case MD_NTSTATUS_WIN_STATUS_CANT_OPEN_ANONYMOUS: + reason = "STATUS_CANT_OPEN_ANONYMOUS"; + break; + case MD_NTSTATUS_WIN_STATUS_BAD_VALIDATION_CLASS: + reason = "STATUS_BAD_VALIDATION_CLASS"; + break; + case MD_NTSTATUS_WIN_STATUS_BAD_TOKEN_TYPE: + reason = "STATUS_BAD_TOKEN_TYPE"; + break; + case MD_NTSTATUS_WIN_STATUS_BAD_MASTER_BOOT_RECORD: + reason = "STATUS_BAD_MASTER_BOOT_RECORD"; + break; + case MD_NTSTATUS_WIN_STATUS_INSTRUCTION_MISALIGNMENT: + reason = "STATUS_INSTRUCTION_MISALIGNMENT"; + break; + case MD_NTSTATUS_WIN_STATUS_INSTANCE_NOT_AVAILABLE: + reason = "STATUS_INSTANCE_NOT_AVAILABLE"; + break; + case MD_NTSTATUS_WIN_STATUS_PIPE_NOT_AVAILABLE: + reason = "STATUS_PIPE_NOT_AVAILABLE"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_PIPE_STATE: + reason = "STATUS_INVALID_PIPE_STATE"; + break; + case MD_NTSTATUS_WIN_STATUS_PIPE_BUSY: + reason = "STATUS_PIPE_BUSY"; + break; + case MD_NTSTATUS_WIN_STATUS_ILLEGAL_FUNCTION: + reason = "STATUS_ILLEGAL_FUNCTION"; + break; + case MD_NTSTATUS_WIN_STATUS_PIPE_DISCONNECTED: + reason = "STATUS_PIPE_DISCONNECTED"; + break; + case MD_NTSTATUS_WIN_STATUS_PIPE_CLOSING: + reason = "STATUS_PIPE_CLOSING"; + break; + case MD_NTSTATUS_WIN_STATUS_PIPE_CONNECTED: + reason = "STATUS_PIPE_CONNECTED"; + break; + case MD_NTSTATUS_WIN_STATUS_PIPE_LISTENING: + reason = "STATUS_PIPE_LISTENING"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_READ_MODE: + reason = "STATUS_INVALID_READ_MODE"; + break; + case MD_NTSTATUS_WIN_STATUS_IO_TIMEOUT: + reason = "STATUS_IO_TIMEOUT"; + break; + case MD_NTSTATUS_WIN_STATUS_FILE_FORCED_CLOSED: + reason = "STATUS_FILE_FORCED_CLOSED"; + break; + case MD_NTSTATUS_WIN_STATUS_PROFILING_NOT_STARTED: + reason = "STATUS_PROFILING_NOT_STARTED"; + break; + case MD_NTSTATUS_WIN_STATUS_PROFILING_NOT_STOPPED: + reason = "STATUS_PROFILING_NOT_STOPPED"; + break; + case MD_NTSTATUS_WIN_STATUS_COULD_NOT_INTERPRET: + reason = "STATUS_COULD_NOT_INTERPRET"; + break; + case MD_NTSTATUS_WIN_STATUS_FILE_IS_A_DIRECTORY: + reason = "STATUS_FILE_IS_A_DIRECTORY"; + break; + case MD_NTSTATUS_WIN_STATUS_NOT_SUPPORTED: + reason = "STATUS_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_REMOTE_NOT_LISTENING: + reason = "STATUS_REMOTE_NOT_LISTENING"; + break; + case MD_NTSTATUS_WIN_STATUS_DUPLICATE_NAME: + reason = "STATUS_DUPLICATE_NAME"; + break; + case MD_NTSTATUS_WIN_STATUS_BAD_NETWORK_PATH: + reason = "STATUS_BAD_NETWORK_PATH"; + break; + case MD_NTSTATUS_WIN_STATUS_NETWORK_BUSY: + reason = "STATUS_NETWORK_BUSY"; + break; + case MD_NTSTATUS_WIN_STATUS_DEVICE_DOES_NOT_EXIST: + reason = "STATUS_DEVICE_DOES_NOT_EXIST"; + break; + case MD_NTSTATUS_WIN_STATUS_TOO_MANY_COMMANDS: + reason = "STATUS_TOO_MANY_COMMANDS"; + break; + case MD_NTSTATUS_WIN_STATUS_ADAPTER_HARDWARE_ERROR: + reason = "STATUS_ADAPTER_HARDWARE_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_NETWORK_RESPONSE: + reason = "STATUS_INVALID_NETWORK_RESPONSE"; + break; + case MD_NTSTATUS_WIN_STATUS_UNEXPECTED_NETWORK_ERROR: + reason = "STATUS_UNEXPECTED_NETWORK_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_BAD_REMOTE_ADAPTER: + reason = "STATUS_BAD_REMOTE_ADAPTER"; + break; + case MD_NTSTATUS_WIN_STATUS_PRINT_QUEUE_FULL: + reason = "STATUS_PRINT_QUEUE_FULL"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_SPOOL_SPACE: + reason = "STATUS_NO_SPOOL_SPACE"; + break; + case MD_NTSTATUS_WIN_STATUS_PRINT_CANCELLED: + reason = "STATUS_PRINT_CANCELLED"; + break; + case MD_NTSTATUS_WIN_STATUS_NETWORK_NAME_DELETED: + reason = "STATUS_NETWORK_NAME_DELETED"; + break; + case MD_NTSTATUS_WIN_STATUS_NETWORK_ACCESS_DENIED: + reason = "STATUS_NETWORK_ACCESS_DENIED"; + break; + case MD_NTSTATUS_WIN_STATUS_BAD_DEVICE_TYPE: + reason = "STATUS_BAD_DEVICE_TYPE"; + break; + case MD_NTSTATUS_WIN_STATUS_BAD_NETWORK_NAME: + reason = "STATUS_BAD_NETWORK_NAME"; + break; + case MD_NTSTATUS_WIN_STATUS_TOO_MANY_NAMES: + reason = "STATUS_TOO_MANY_NAMES"; + break; + case MD_NTSTATUS_WIN_STATUS_TOO_MANY_SESSIONS: + reason = "STATUS_TOO_MANY_SESSIONS"; + break; + case MD_NTSTATUS_WIN_STATUS_SHARING_PAUSED: + reason = "STATUS_SHARING_PAUSED"; + break; + case MD_NTSTATUS_WIN_STATUS_REQUEST_NOT_ACCEPTED: + reason = "STATUS_REQUEST_NOT_ACCEPTED"; + break; + case MD_NTSTATUS_WIN_STATUS_REDIRECTOR_PAUSED: + reason = "STATUS_REDIRECTOR_PAUSED"; + break; + case MD_NTSTATUS_WIN_STATUS_NET_WRITE_FAULT: + reason = "STATUS_NET_WRITE_FAULT"; + break; + case MD_NTSTATUS_WIN_STATUS_PROFILING_AT_LIMIT: + reason = "STATUS_PROFILING_AT_LIMIT"; + break; + case MD_NTSTATUS_WIN_STATUS_NOT_SAME_DEVICE: + reason = "STATUS_NOT_SAME_DEVICE"; + break; + case MD_NTSTATUS_WIN_STATUS_FILE_RENAMED: + reason = "STATUS_FILE_RENAMED"; + break; + case MD_NTSTATUS_WIN_STATUS_VIRTUAL_CIRCUIT_CLOSED: + reason = "STATUS_VIRTUAL_CIRCUIT_CLOSED"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_SECURITY_ON_OBJECT: + reason = "STATUS_NO_SECURITY_ON_OBJECT"; + break; + case MD_NTSTATUS_WIN_STATUS_CANT_WAIT: + reason = "STATUS_CANT_WAIT"; + break; + case MD_NTSTATUS_WIN_STATUS_PIPE_EMPTY: + reason = "STATUS_PIPE_EMPTY"; + break; + case MD_NTSTATUS_WIN_STATUS_CANT_ACCESS_DOMAIN_INFO: + reason = "STATUS_CANT_ACCESS_DOMAIN_INFO"; + break; + case MD_NTSTATUS_WIN_STATUS_CANT_TERMINATE_SELF: + reason = "STATUS_CANT_TERMINATE_SELF"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_SERVER_STATE: + reason = "STATUS_INVALID_SERVER_STATE"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_DOMAIN_STATE: + reason = "STATUS_INVALID_DOMAIN_STATE"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_DOMAIN_ROLE: + reason = "STATUS_INVALID_DOMAIN_ROLE"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_SUCH_DOMAIN: + reason = "STATUS_NO_SUCH_DOMAIN"; + break; + case MD_NTSTATUS_WIN_STATUS_DOMAIN_EXISTS: + reason = "STATUS_DOMAIN_EXISTS"; + break; + case MD_NTSTATUS_WIN_STATUS_DOMAIN_LIMIT_EXCEEDED: + reason = "STATUS_DOMAIN_LIMIT_EXCEEDED"; + break; + case MD_NTSTATUS_WIN_STATUS_OPLOCK_NOT_GRANTED: + reason = "STATUS_OPLOCK_NOT_GRANTED"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_OPLOCK_PROTOCOL: + reason = "STATUS_INVALID_OPLOCK_PROTOCOL"; + break; + case MD_NTSTATUS_WIN_STATUS_INTERNAL_DB_CORRUPTION: + reason = "STATUS_INTERNAL_DB_CORRUPTION"; + break; + case MD_NTSTATUS_WIN_STATUS_INTERNAL_ERROR: + reason = "STATUS_INTERNAL_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_GENERIC_NOT_MAPPED: + reason = "STATUS_GENERIC_NOT_MAPPED"; + break; + case MD_NTSTATUS_WIN_STATUS_BAD_DESCRIPTOR_FORMAT: + reason = "STATUS_BAD_DESCRIPTOR_FORMAT"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_USER_BUFFER: + reason = "STATUS_INVALID_USER_BUFFER"; + break; + case MD_NTSTATUS_WIN_STATUS_UNEXPECTED_IO_ERROR: + reason = "STATUS_UNEXPECTED_IO_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_UNEXPECTED_MM_CREATE_ERR: + reason = "STATUS_UNEXPECTED_MM_CREATE_ERR"; + break; + case MD_NTSTATUS_WIN_STATUS_UNEXPECTED_MM_MAP_ERROR: + reason = "STATUS_UNEXPECTED_MM_MAP_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_UNEXPECTED_MM_EXTEND_ERR: + reason = "STATUS_UNEXPECTED_MM_EXTEND_ERR"; + break; + case MD_NTSTATUS_WIN_STATUS_NOT_LOGON_PROCESS: + reason = "STATUS_NOT_LOGON_PROCESS"; + break; + case MD_NTSTATUS_WIN_STATUS_LOGON_SESSION_EXISTS: + reason = "STATUS_LOGON_SESSION_EXISTS"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_PARAMETER_1: + reason = "STATUS_INVALID_PARAMETER_1"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_PARAMETER_2: + reason = "STATUS_INVALID_PARAMETER_2"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_PARAMETER_3: + reason = "STATUS_INVALID_PARAMETER_3"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_PARAMETER_4: + reason = "STATUS_INVALID_PARAMETER_4"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_PARAMETER_5: + reason = "STATUS_INVALID_PARAMETER_5"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_PARAMETER_6: + reason = "STATUS_INVALID_PARAMETER_6"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_PARAMETER_7: + reason = "STATUS_INVALID_PARAMETER_7"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_PARAMETER_8: + reason = "STATUS_INVALID_PARAMETER_8"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_PARAMETER_9: + reason = "STATUS_INVALID_PARAMETER_9"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_PARAMETER_10: + reason = "STATUS_INVALID_PARAMETER_10"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_PARAMETER_11: + reason = "STATUS_INVALID_PARAMETER_11"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_PARAMETER_12: + reason = "STATUS_INVALID_PARAMETER_12"; + break; + case MD_NTSTATUS_WIN_STATUS_REDIRECTOR_NOT_STARTED: + reason = "STATUS_REDIRECTOR_NOT_STARTED"; + break; + case MD_NTSTATUS_WIN_STATUS_REDIRECTOR_STARTED: + reason = "STATUS_REDIRECTOR_STARTED"; + break; + case MD_NTSTATUS_WIN_STATUS_STACK_OVERFLOW: + reason = "STATUS_STACK_OVERFLOW"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_SUCH_PACKAGE: + reason = "STATUS_NO_SUCH_PACKAGE"; + break; + case MD_NTSTATUS_WIN_STATUS_BAD_FUNCTION_TABLE: + reason = "STATUS_BAD_FUNCTION_TABLE"; + break; + case MD_NTSTATUS_WIN_STATUS_VARIABLE_NOT_FOUND: + reason = "STATUS_VARIABLE_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_DIRECTORY_NOT_EMPTY: + reason = "STATUS_DIRECTORY_NOT_EMPTY"; + break; + case MD_NTSTATUS_WIN_STATUS_FILE_CORRUPT_ERROR: + reason = "STATUS_FILE_CORRUPT_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_NOT_A_DIRECTORY: + reason = "STATUS_NOT_A_DIRECTORY"; + break; + case MD_NTSTATUS_WIN_STATUS_BAD_LOGON_SESSION_STATE: + reason = "STATUS_BAD_LOGON_SESSION_STATE"; + break; + case MD_NTSTATUS_WIN_STATUS_LOGON_SESSION_COLLISION: + reason = "STATUS_LOGON_SESSION_COLLISION"; + break; + case MD_NTSTATUS_WIN_STATUS_NAME_TOO_LONG: + reason = "STATUS_NAME_TOO_LONG"; + break; + case MD_NTSTATUS_WIN_STATUS_FILES_OPEN: + reason = "STATUS_FILES_OPEN"; + break; + case MD_NTSTATUS_WIN_STATUS_CONNECTION_IN_USE: + reason = "STATUS_CONNECTION_IN_USE"; + break; + case MD_NTSTATUS_WIN_STATUS_MESSAGE_NOT_FOUND: + reason = "STATUS_MESSAGE_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_PROCESS_IS_TERMINATING: + reason = "STATUS_PROCESS_IS_TERMINATING"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_LOGON_TYPE: + reason = "STATUS_INVALID_LOGON_TYPE"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_GUID_TRANSLATION: + reason = "STATUS_NO_GUID_TRANSLATION"; + break; + case MD_NTSTATUS_WIN_STATUS_CANNOT_IMPERSONATE: + reason = "STATUS_CANNOT_IMPERSONATE"; + break; + case MD_NTSTATUS_WIN_STATUS_IMAGE_ALREADY_LOADED: + reason = "STATUS_IMAGE_ALREADY_LOADED"; + break; + case MD_NTSTATUS_WIN_STATUS_ABIOS_NOT_PRESENT: + reason = "STATUS_ABIOS_NOT_PRESENT"; + break; + case MD_NTSTATUS_WIN_STATUS_ABIOS_LID_NOT_EXIST: + reason = "STATUS_ABIOS_LID_NOT_EXIST"; + break; + case MD_NTSTATUS_WIN_STATUS_ABIOS_LID_ALREADY_OWNED: + reason = "STATUS_ABIOS_LID_ALREADY_OWNED"; + break; + case MD_NTSTATUS_WIN_STATUS_ABIOS_NOT_LID_OWNER: + reason = "STATUS_ABIOS_NOT_LID_OWNER"; + break; + case MD_NTSTATUS_WIN_STATUS_ABIOS_INVALID_COMMAND: + reason = "STATUS_ABIOS_INVALID_COMMAND"; + break; + case MD_NTSTATUS_WIN_STATUS_ABIOS_INVALID_LID: + reason = "STATUS_ABIOS_INVALID_LID"; + break; + case MD_NTSTATUS_WIN_STATUS_ABIOS_SELECTOR_NOT_AVAILABLE: + reason = "STATUS_ABIOS_SELECTOR_NOT_AVAILABLE"; + break; + case MD_NTSTATUS_WIN_STATUS_ABIOS_INVALID_SELECTOR: + reason = "STATUS_ABIOS_INVALID_SELECTOR"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_LDT: + reason = "STATUS_NO_LDT"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_LDT_SIZE: + reason = "STATUS_INVALID_LDT_SIZE"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_LDT_OFFSET: + reason = "STATUS_INVALID_LDT_OFFSET"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_LDT_DESCRIPTOR: + reason = "STATUS_INVALID_LDT_DESCRIPTOR"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_IMAGE_NE_FORMAT: + reason = "STATUS_INVALID_IMAGE_NE_FORMAT"; + break; + case MD_NTSTATUS_WIN_STATUS_RXACT_INVALID_STATE: + reason = "STATUS_RXACT_INVALID_STATE"; + break; + case MD_NTSTATUS_WIN_STATUS_RXACT_COMMIT_FAILURE: + reason = "STATUS_RXACT_COMMIT_FAILURE"; + break; + case MD_NTSTATUS_WIN_STATUS_MAPPED_FILE_SIZE_ZERO: + reason = "STATUS_MAPPED_FILE_SIZE_ZERO"; + break; + case MD_NTSTATUS_WIN_STATUS_TOO_MANY_OPENED_FILES: + reason = "STATUS_TOO_MANY_OPENED_FILES"; + break; + case MD_NTSTATUS_WIN_STATUS_CANCELLED: + reason = "STATUS_CANCELLED"; + break; + case MD_NTSTATUS_WIN_STATUS_CANNOT_DELETE: + reason = "STATUS_CANNOT_DELETE"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_COMPUTER_NAME: + reason = "STATUS_INVALID_COMPUTER_NAME"; + break; + case MD_NTSTATUS_WIN_STATUS_FILE_DELETED: + reason = "STATUS_FILE_DELETED"; + break; + case MD_NTSTATUS_WIN_STATUS_SPECIAL_ACCOUNT: + reason = "STATUS_SPECIAL_ACCOUNT"; + break; + case MD_NTSTATUS_WIN_STATUS_SPECIAL_GROUP: + reason = "STATUS_SPECIAL_GROUP"; + break; + case MD_NTSTATUS_WIN_STATUS_SPECIAL_USER: + reason = "STATUS_SPECIAL_USER"; + break; + case MD_NTSTATUS_WIN_STATUS_MEMBERS_PRIMARY_GROUP: + reason = "STATUS_MEMBERS_PRIMARY_GROUP"; + break; + case MD_NTSTATUS_WIN_STATUS_FILE_CLOSED: + reason = "STATUS_FILE_CLOSED"; + break; + case MD_NTSTATUS_WIN_STATUS_TOO_MANY_THREADS: + reason = "STATUS_TOO_MANY_THREADS"; + break; + case MD_NTSTATUS_WIN_STATUS_THREAD_NOT_IN_PROCESS: + reason = "STATUS_THREAD_NOT_IN_PROCESS"; + break; + case MD_NTSTATUS_WIN_STATUS_TOKEN_ALREADY_IN_USE: + reason = "STATUS_TOKEN_ALREADY_IN_USE"; + break; + case MD_NTSTATUS_WIN_STATUS_PAGEFILE_QUOTA_EXCEEDED: + reason = "STATUS_PAGEFILE_QUOTA_EXCEEDED"; + break; + case MD_NTSTATUS_WIN_STATUS_COMMITMENT_LIMIT: + reason = "STATUS_COMMITMENT_LIMIT"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_IMAGE_LE_FORMAT: + reason = "STATUS_INVALID_IMAGE_LE_FORMAT"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_IMAGE_NOT_MZ: + reason = "STATUS_INVALID_IMAGE_NOT_MZ"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_IMAGE_PROTECT: + reason = "STATUS_INVALID_IMAGE_PROTECT"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_IMAGE_WIN_16: + reason = "STATUS_INVALID_IMAGE_WIN_16"; + break; + case MD_NTSTATUS_WIN_STATUS_LOGON_SERVER_CONFLICT: + reason = "STATUS_LOGON_SERVER_CONFLICT"; + break; + case MD_NTSTATUS_WIN_STATUS_TIME_DIFFERENCE_AT_DC: + reason = "STATUS_TIME_DIFFERENCE_AT_DC"; + break; + case MD_NTSTATUS_WIN_STATUS_SYNCHRONIZATION_REQUIRED: + reason = "STATUS_SYNCHRONIZATION_REQUIRED"; + break; + case MD_NTSTATUS_WIN_STATUS_DLL_NOT_FOUND: + reason = "STATUS_DLL_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_OPEN_FAILED: + reason = "STATUS_OPEN_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_IO_PRIVILEGE_FAILED: + reason = "STATUS_IO_PRIVILEGE_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_ORDINAL_NOT_FOUND: + reason = "STATUS_ORDINAL_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_ENTRYPOINT_NOT_FOUND: + reason = "STATUS_ENTRYPOINT_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_CONTROL_C_EXIT: + reason = "STATUS_CONTROL_C_EXIT"; + break; + case MD_NTSTATUS_WIN_STATUS_LOCAL_DISCONNECT: + reason = "STATUS_LOCAL_DISCONNECT"; + break; + case MD_NTSTATUS_WIN_STATUS_REMOTE_DISCONNECT: + reason = "STATUS_REMOTE_DISCONNECT"; + break; + case MD_NTSTATUS_WIN_STATUS_REMOTE_RESOURCES: + reason = "STATUS_REMOTE_RESOURCES"; + break; + case MD_NTSTATUS_WIN_STATUS_LINK_FAILED: + reason = "STATUS_LINK_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_LINK_TIMEOUT: + reason = "STATUS_LINK_TIMEOUT"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_CONNECTION: + reason = "STATUS_INVALID_CONNECTION"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_ADDRESS: + reason = "STATUS_INVALID_ADDRESS"; + break; + case MD_NTSTATUS_WIN_STATUS_DLL_INIT_FAILED: + reason = "STATUS_DLL_INIT_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_MISSING_SYSTEMFILE: + reason = "STATUS_MISSING_SYSTEMFILE"; + break; + case MD_NTSTATUS_WIN_STATUS_UNHANDLED_EXCEPTION: + reason = "STATUS_UNHANDLED_EXCEPTION"; + break; + case MD_NTSTATUS_WIN_STATUS_APP_INIT_FAILURE: + reason = "STATUS_APP_INIT_FAILURE"; + break; + case MD_NTSTATUS_WIN_STATUS_PAGEFILE_CREATE_FAILED: + reason = "STATUS_PAGEFILE_CREATE_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_PAGEFILE: + reason = "STATUS_NO_PAGEFILE"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_LEVEL: + reason = "STATUS_INVALID_LEVEL"; + break; + case MD_NTSTATUS_WIN_STATUS_WRONG_PASSWORD_CORE: + reason = "STATUS_WRONG_PASSWORD_CORE"; + break; + case MD_NTSTATUS_WIN_STATUS_ILLEGAL_FLOAT_CONTEXT: + reason = "STATUS_ILLEGAL_FLOAT_CONTEXT"; + break; + case MD_NTSTATUS_WIN_STATUS_PIPE_BROKEN: + reason = "STATUS_PIPE_BROKEN"; + break; + case MD_NTSTATUS_WIN_STATUS_REGISTRY_CORRUPT: + reason = "STATUS_REGISTRY_CORRUPT"; + break; + case MD_NTSTATUS_WIN_STATUS_REGISTRY_IO_FAILED: + reason = "STATUS_REGISTRY_IO_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_EVENT_PAIR: + reason = "STATUS_NO_EVENT_PAIR"; + break; + case MD_NTSTATUS_WIN_STATUS_UNRECOGNIZED_VOLUME: + reason = "STATUS_UNRECOGNIZED_VOLUME"; + break; + case MD_NTSTATUS_WIN_STATUS_SERIAL_NO_DEVICE_INITED: + reason = "STATUS_SERIAL_NO_DEVICE_INITED"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_SUCH_ALIAS: + reason = "STATUS_NO_SUCH_ALIAS"; + break; + case MD_NTSTATUS_WIN_STATUS_MEMBER_NOT_IN_ALIAS: + reason = "STATUS_MEMBER_NOT_IN_ALIAS"; + break; + case MD_NTSTATUS_WIN_STATUS_MEMBER_IN_ALIAS: + reason = "STATUS_MEMBER_IN_ALIAS"; + break; + case MD_NTSTATUS_WIN_STATUS_ALIAS_EXISTS: + reason = "STATUS_ALIAS_EXISTS"; + break; + case MD_NTSTATUS_WIN_STATUS_LOGON_NOT_GRANTED: + reason = "STATUS_LOGON_NOT_GRANTED"; + break; + case MD_NTSTATUS_WIN_STATUS_TOO_MANY_SECRETS: + reason = "STATUS_TOO_MANY_SECRETS"; + break; + case MD_NTSTATUS_WIN_STATUS_SECRET_TOO_LONG: + reason = "STATUS_SECRET_TOO_LONG"; + break; + case MD_NTSTATUS_WIN_STATUS_INTERNAL_DB_ERROR: + reason = "STATUS_INTERNAL_DB_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_FULLSCREEN_MODE: + reason = "STATUS_FULLSCREEN_MODE"; + break; + case MD_NTSTATUS_WIN_STATUS_TOO_MANY_CONTEXT_IDS: + reason = "STATUS_TOO_MANY_CONTEXT_IDS"; + break; + case MD_NTSTATUS_WIN_STATUS_LOGON_TYPE_NOT_GRANTED: + reason = "STATUS_LOGON_TYPE_NOT_GRANTED"; + break; + case MD_NTSTATUS_WIN_STATUS_NOT_REGISTRY_FILE: + reason = "STATUS_NOT_REGISTRY_FILE"; + break; + case MD_NTSTATUS_WIN_STATUS_NT_CROSS_ENCRYPTION_REQUIRED: + reason = "STATUS_NT_CROSS_ENCRYPTION_REQUIRED"; + break; + case MD_NTSTATUS_WIN_STATUS_DOMAIN_CTRLR_CONFIG_ERROR: + reason = "STATUS_DOMAIN_CTRLR_CONFIG_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_FT_MISSING_MEMBER: + reason = "STATUS_FT_MISSING_MEMBER"; + break; + case MD_NTSTATUS_WIN_STATUS_ILL_FORMED_SERVICE_ENTRY: + reason = "STATUS_ILL_FORMED_SERVICE_ENTRY"; + break; + case MD_NTSTATUS_WIN_STATUS_ILLEGAL_CHARACTER: + reason = "STATUS_ILLEGAL_CHARACTER"; + break; + case MD_NTSTATUS_WIN_STATUS_UNMAPPABLE_CHARACTER: + reason = "STATUS_UNMAPPABLE_CHARACTER"; + break; + case MD_NTSTATUS_WIN_STATUS_UNDEFINED_CHARACTER: + reason = "STATUS_UNDEFINED_CHARACTER"; + break; + case MD_NTSTATUS_WIN_STATUS_FLOPPY_VOLUME: + reason = "STATUS_FLOPPY_VOLUME"; + break; + case MD_NTSTATUS_WIN_STATUS_FLOPPY_ID_MARK_NOT_FOUND: + reason = "STATUS_FLOPPY_ID_MARK_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_FLOPPY_WRONG_CYLINDER: + reason = "STATUS_FLOPPY_WRONG_CYLINDER"; + break; + case MD_NTSTATUS_WIN_STATUS_FLOPPY_UNKNOWN_ERROR: + reason = "STATUS_FLOPPY_UNKNOWN_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_FLOPPY_BAD_REGISTERS: + reason = "STATUS_FLOPPY_BAD_REGISTERS"; + break; + case MD_NTSTATUS_WIN_STATUS_DISK_RECALIBRATE_FAILED: + reason = "STATUS_DISK_RECALIBRATE_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_DISK_OPERATION_FAILED: + reason = "STATUS_DISK_OPERATION_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_DISK_RESET_FAILED: + reason = "STATUS_DISK_RESET_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_SHARED_IRQ_BUSY: + reason = "STATUS_SHARED_IRQ_BUSY"; + break; + case MD_NTSTATUS_WIN_STATUS_FT_ORPHANING: + reason = "STATUS_FT_ORPHANING"; + break; + case MD_NTSTATUS_WIN_STATUS_BIOS_FAILED_TO_CONNECT_INTERRUPT: + reason = "STATUS_BIOS_FAILED_TO_CONNECT_INTERRUPT"; + break; + case MD_NTSTATUS_WIN_STATUS_PARTITION_FAILURE: + reason = "STATUS_PARTITION_FAILURE"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_BLOCK_LENGTH: + reason = "STATUS_INVALID_BLOCK_LENGTH"; + break; + case MD_NTSTATUS_WIN_STATUS_DEVICE_NOT_PARTITIONED: + reason = "STATUS_DEVICE_NOT_PARTITIONED"; + break; + case MD_NTSTATUS_WIN_STATUS_UNABLE_TO_LOCK_MEDIA: + reason = "STATUS_UNABLE_TO_LOCK_MEDIA"; + break; + case MD_NTSTATUS_WIN_STATUS_UNABLE_TO_UNLOAD_MEDIA: + reason = "STATUS_UNABLE_TO_UNLOAD_MEDIA"; + break; + case MD_NTSTATUS_WIN_STATUS_EOM_OVERFLOW: + reason = "STATUS_EOM_OVERFLOW"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_MEDIA: + reason = "STATUS_NO_MEDIA"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_SUCH_MEMBER: + reason = "STATUS_NO_SUCH_MEMBER"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_MEMBER: + reason = "STATUS_INVALID_MEMBER"; + break; + case MD_NTSTATUS_WIN_STATUS_KEY_DELETED: + reason = "STATUS_KEY_DELETED"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_LOG_SPACE: + reason = "STATUS_NO_LOG_SPACE"; + break; + case MD_NTSTATUS_WIN_STATUS_TOO_MANY_SIDS: + reason = "STATUS_TOO_MANY_SIDS"; + break; + case MD_NTSTATUS_WIN_STATUS_LM_CROSS_ENCRYPTION_REQUIRED: + reason = "STATUS_LM_CROSS_ENCRYPTION_REQUIRED"; + break; + case MD_NTSTATUS_WIN_STATUS_KEY_HAS_CHILDREN: + reason = "STATUS_KEY_HAS_CHILDREN"; + break; + case MD_NTSTATUS_WIN_STATUS_CHILD_MUST_BE_VOLATILE: + reason = "STATUS_CHILD_MUST_BE_VOLATILE"; + break; + case MD_NTSTATUS_WIN_STATUS_DEVICE_CONFIGURATION_ERROR: + reason = "STATUS_DEVICE_CONFIGURATION_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_DRIVER_INTERNAL_ERROR: + reason = "STATUS_DRIVER_INTERNAL_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_DEVICE_STATE: + reason = "STATUS_INVALID_DEVICE_STATE"; + break; + case MD_NTSTATUS_WIN_STATUS_IO_DEVICE_ERROR: + reason = "STATUS_IO_DEVICE_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_DEVICE_PROTOCOL_ERROR: + reason = "STATUS_DEVICE_PROTOCOL_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_BACKUP_CONTROLLER: + reason = "STATUS_BACKUP_CONTROLLER"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_FILE_FULL: + reason = "STATUS_LOG_FILE_FULL"; + break; + case MD_NTSTATUS_WIN_STATUS_TOO_LATE: + reason = "STATUS_TOO_LATE"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_TRUST_LSA_SECRET: + reason = "STATUS_NO_TRUST_LSA_SECRET"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_TRUST_SAM_ACCOUNT: + reason = "STATUS_NO_TRUST_SAM_ACCOUNT"; + break; + case MD_NTSTATUS_WIN_STATUS_TRUSTED_DOMAIN_FAILURE: + reason = "STATUS_TRUSTED_DOMAIN_FAILURE"; + break; + case MD_NTSTATUS_WIN_STATUS_TRUSTED_RELATIONSHIP_FAILURE: + reason = "STATUS_TRUSTED_RELATIONSHIP_FAILURE"; + break; + case MD_NTSTATUS_WIN_STATUS_EVENTLOG_FILE_CORRUPT: + reason = "STATUS_EVENTLOG_FILE_CORRUPT"; + break; + case MD_NTSTATUS_WIN_STATUS_EVENTLOG_CANT_START: + reason = "STATUS_EVENTLOG_CANT_START"; + break; + case MD_NTSTATUS_WIN_STATUS_TRUST_FAILURE: + reason = "STATUS_TRUST_FAILURE"; + break; + case MD_NTSTATUS_WIN_STATUS_MUTANT_LIMIT_EXCEEDED: + reason = "STATUS_MUTANT_LIMIT_EXCEEDED"; + break; + case MD_NTSTATUS_WIN_STATUS_NETLOGON_NOT_STARTED: + reason = "STATUS_NETLOGON_NOT_STARTED"; + break; + case MD_NTSTATUS_WIN_STATUS_ACCOUNT_EXPIRED: + reason = "STATUS_ACCOUNT_EXPIRED"; + break; + case MD_NTSTATUS_WIN_STATUS_POSSIBLE_DEADLOCK: + reason = "STATUS_POSSIBLE_DEADLOCK"; + break; + case MD_NTSTATUS_WIN_STATUS_NETWORK_CREDENTIAL_CONFLICT: + reason = "STATUS_NETWORK_CREDENTIAL_CONFLICT"; + break; + case MD_NTSTATUS_WIN_STATUS_REMOTE_SESSION_LIMIT: + reason = "STATUS_REMOTE_SESSION_LIMIT"; + break; + case MD_NTSTATUS_WIN_STATUS_EVENTLOG_FILE_CHANGED: + reason = "STATUS_EVENTLOG_FILE_CHANGED"; + break; + case MD_NTSTATUS_WIN_STATUS_NOLOGON_INTERDOMAIN_TRUST_ACCOUNT: + reason = "STATUS_NOLOGON_INTERDOMAIN_TRUST_ACCOUNT"; + break; + case MD_NTSTATUS_WIN_STATUS_NOLOGON_WORKSTATION_TRUST_ACCOUNT: + reason = "STATUS_NOLOGON_WORKSTATION_TRUST_ACCOUNT"; + break; + case MD_NTSTATUS_WIN_STATUS_NOLOGON_SERVER_TRUST_ACCOUNT: + reason = "STATUS_NOLOGON_SERVER_TRUST_ACCOUNT"; + break; + case MD_NTSTATUS_WIN_STATUS_DOMAIN_TRUST_INCONSISTENT: + reason = "STATUS_DOMAIN_TRUST_INCONSISTENT"; + break; + case MD_NTSTATUS_WIN_STATUS_FS_DRIVER_REQUIRED: + reason = "STATUS_FS_DRIVER_REQUIRED"; + break; + case MD_NTSTATUS_WIN_STATUS_IMAGE_ALREADY_LOADED_AS_DLL: + reason = "STATUS_IMAGE_ALREADY_LOADED_AS_DLL"; + break; + case MD_NTSTATUS_WIN_STATUS_INCOMPATIBLE_WITH_GLOBAL_SHORT_NAME_REGISTRY_SETTING: + reason = "STATUS_INCOMPATIBLE_WITH_GLOBAL_SHORT_NAME_REGISTRY_SETTING"; + break; + case MD_NTSTATUS_WIN_STATUS_SHORT_NAMES_NOT_ENABLED_ON_VOLUME: + reason = "STATUS_SHORT_NAMES_NOT_ENABLED_ON_VOLUME"; + break; + case MD_NTSTATUS_WIN_STATUS_SECURITY_STREAM_IS_INCONSISTENT: + reason = "STATUS_SECURITY_STREAM_IS_INCONSISTENT"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_LOCK_RANGE: + reason = "STATUS_INVALID_LOCK_RANGE"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_ACE_CONDITION: + reason = "STATUS_INVALID_ACE_CONDITION"; + break; + case MD_NTSTATUS_WIN_STATUS_IMAGE_SUBSYSTEM_NOT_PRESENT: + reason = "STATUS_IMAGE_SUBSYSTEM_NOT_PRESENT"; + break; + case MD_NTSTATUS_WIN_STATUS_NOTIFICATION_GUID_ALREADY_DEFINED: + reason = "STATUS_NOTIFICATION_GUID_ALREADY_DEFINED"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_EXCEPTION_HANDLER: + reason = "STATUS_INVALID_EXCEPTION_HANDLER"; + break; + case MD_NTSTATUS_WIN_STATUS_DUPLICATE_PRIVILEGES: + reason = "STATUS_DUPLICATE_PRIVILEGES"; + break; + case MD_NTSTATUS_WIN_STATUS_NOT_ALLOWED_ON_SYSTEM_FILE: + reason = "STATUS_NOT_ALLOWED_ON_SYSTEM_FILE"; + break; + case MD_NTSTATUS_WIN_STATUS_REPAIR_NEEDED: + reason = "STATUS_REPAIR_NEEDED"; + break; + case MD_NTSTATUS_WIN_STATUS_QUOTA_NOT_ENABLED: + reason = "STATUS_QUOTA_NOT_ENABLED"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_APPLICATION_PACKAGE: + reason = "STATUS_NO_APPLICATION_PACKAGE"; + break; + case MD_NTSTATUS_WIN_STATUS_NETWORK_OPEN_RESTRICTION: + reason = "STATUS_NETWORK_OPEN_RESTRICTION"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_USER_SESSION_KEY: + reason = "STATUS_NO_USER_SESSION_KEY"; + break; + case MD_NTSTATUS_WIN_STATUS_USER_SESSION_DELETED: + reason = "STATUS_USER_SESSION_DELETED"; + break; + case MD_NTSTATUS_WIN_STATUS_RESOURCE_LANG_NOT_FOUND: + reason = "STATUS_RESOURCE_LANG_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_INSUFF_SERVER_RESOURCES: + reason = "STATUS_INSUFF_SERVER_RESOURCES"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_BUFFER_SIZE: + reason = "STATUS_INVALID_BUFFER_SIZE"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_ADDRESS_COMPONENT: + reason = "STATUS_INVALID_ADDRESS_COMPONENT"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_ADDRESS_WILDCARD: + reason = "STATUS_INVALID_ADDRESS_WILDCARD"; + break; + case MD_NTSTATUS_WIN_STATUS_TOO_MANY_ADDRESSES: + reason = "STATUS_TOO_MANY_ADDRESSES"; + break; + case MD_NTSTATUS_WIN_STATUS_ADDRESS_ALREADY_EXISTS: + reason = "STATUS_ADDRESS_ALREADY_EXISTS"; + break; + case MD_NTSTATUS_WIN_STATUS_ADDRESS_CLOSED: + reason = "STATUS_ADDRESS_CLOSED"; + break; + case MD_NTSTATUS_WIN_STATUS_CONNECTION_DISCONNECTED: + reason = "STATUS_CONNECTION_DISCONNECTED"; + break; + case MD_NTSTATUS_WIN_STATUS_CONNECTION_RESET: + reason = "STATUS_CONNECTION_RESET"; + break; + case MD_NTSTATUS_WIN_STATUS_TOO_MANY_NODES: + reason = "STATUS_TOO_MANY_NODES"; + break; + case MD_NTSTATUS_WIN_STATUS_TRANSACTION_ABORTED: + reason = "STATUS_TRANSACTION_ABORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_TRANSACTION_TIMED_OUT: + reason = "STATUS_TRANSACTION_TIMED_OUT"; + break; + case MD_NTSTATUS_WIN_STATUS_TRANSACTION_NO_RELEASE: + reason = "STATUS_TRANSACTION_NO_RELEASE"; + break; + case MD_NTSTATUS_WIN_STATUS_TRANSACTION_NO_MATCH: + reason = "STATUS_TRANSACTION_NO_MATCH"; + break; + case MD_NTSTATUS_WIN_STATUS_TRANSACTION_RESPONDED: + reason = "STATUS_TRANSACTION_RESPONDED"; + break; + case MD_NTSTATUS_WIN_STATUS_TRANSACTION_INVALID_ID: + reason = "STATUS_TRANSACTION_INVALID_ID"; + break; + case MD_NTSTATUS_WIN_STATUS_TRANSACTION_INVALID_TYPE: + reason = "STATUS_TRANSACTION_INVALID_TYPE"; + break; + case MD_NTSTATUS_WIN_STATUS_NOT_SERVER_SESSION: + reason = "STATUS_NOT_SERVER_SESSION"; + break; + case MD_NTSTATUS_WIN_STATUS_NOT_CLIENT_SESSION: + reason = "STATUS_NOT_CLIENT_SESSION"; + break; + case MD_NTSTATUS_WIN_STATUS_CANNOT_LOAD_REGISTRY_FILE: + reason = "STATUS_CANNOT_LOAD_REGISTRY_FILE"; + break; + case MD_NTSTATUS_WIN_STATUS_DEBUG_ATTACH_FAILED: + reason = "STATUS_DEBUG_ATTACH_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_SYSTEM_PROCESS_TERMINATED: + reason = "STATUS_SYSTEM_PROCESS_TERMINATED"; + break; + case MD_NTSTATUS_WIN_STATUS_DATA_NOT_ACCEPTED: + reason = "STATUS_DATA_NOT_ACCEPTED"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_BROWSER_SERVERS_FOUND: + reason = "STATUS_NO_BROWSER_SERVERS_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_VDM_HARD_ERROR: + reason = "STATUS_VDM_HARD_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_DRIVER_CANCEL_TIMEOUT: + reason = "STATUS_DRIVER_CANCEL_TIMEOUT"; + break; + case MD_NTSTATUS_WIN_STATUS_REPLY_MESSAGE_MISMATCH: + reason = "STATUS_REPLY_MESSAGE_MISMATCH"; + break; + case MD_NTSTATUS_WIN_STATUS_MAPPED_ALIGNMENT: + reason = "STATUS_MAPPED_ALIGNMENT"; + break; + case MD_NTSTATUS_WIN_STATUS_IMAGE_CHECKSUM_MISMATCH: + reason = "STATUS_IMAGE_CHECKSUM_MISMATCH"; + break; + case MD_NTSTATUS_WIN_STATUS_LOST_WRITEBEHIND_DATA: + reason = "STATUS_LOST_WRITEBEHIND_DATA"; + break; + case MD_NTSTATUS_WIN_STATUS_CLIENT_SERVER_PARAMETERS_INVALID: + reason = "STATUS_CLIENT_SERVER_PARAMETERS_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_PASSWORD_MUST_CHANGE: + reason = "STATUS_PASSWORD_MUST_CHANGE"; + break; + case MD_NTSTATUS_WIN_STATUS_NOT_FOUND: + reason = "STATUS_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_NOT_TINY_STREAM: + reason = "STATUS_NOT_TINY_STREAM"; + break; + case MD_NTSTATUS_WIN_STATUS_RECOVERY_FAILURE: + reason = "STATUS_RECOVERY_FAILURE"; + break; + case MD_NTSTATUS_WIN_STATUS_STACK_OVERFLOW_READ: + reason = "STATUS_STACK_OVERFLOW_READ"; + break; + case MD_NTSTATUS_WIN_STATUS_FAIL_CHECK: + reason = "STATUS_FAIL_CHECK"; + break; + case MD_NTSTATUS_WIN_STATUS_DUPLICATE_OBJECTID: + reason = "STATUS_DUPLICATE_OBJECTID"; + break; + case MD_NTSTATUS_WIN_STATUS_OBJECTID_EXISTS: + reason = "STATUS_OBJECTID_EXISTS"; + break; + case MD_NTSTATUS_WIN_STATUS_CONVERT_TO_LARGE: + reason = "STATUS_CONVERT_TO_LARGE"; + break; + case MD_NTSTATUS_WIN_STATUS_RETRY: + reason = "STATUS_RETRY"; + break; + case MD_NTSTATUS_WIN_STATUS_FOUND_OUT_OF_SCOPE: + reason = "STATUS_FOUND_OUT_OF_SCOPE"; + break; + case MD_NTSTATUS_WIN_STATUS_ALLOCATE_BUCKET: + reason = "STATUS_ALLOCATE_BUCKET"; + break; + case MD_NTSTATUS_WIN_STATUS_PROPSET_NOT_FOUND: + reason = "STATUS_PROPSET_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_MARSHALL_OVERFLOW: + reason = "STATUS_MARSHALL_OVERFLOW"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_VARIANT: + reason = "STATUS_INVALID_VARIANT"; + break; + case MD_NTSTATUS_WIN_STATUS_DOMAIN_CONTROLLER_NOT_FOUND: + reason = "STATUS_DOMAIN_CONTROLLER_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_ACCOUNT_LOCKED_OUT: + reason = "STATUS_ACCOUNT_LOCKED_OUT"; + break; + case MD_NTSTATUS_WIN_STATUS_HANDLE_NOT_CLOSABLE: + reason = "STATUS_HANDLE_NOT_CLOSABLE"; + break; + case MD_NTSTATUS_WIN_STATUS_CONNECTION_REFUSED: + reason = "STATUS_CONNECTION_REFUSED"; + break; + case MD_NTSTATUS_WIN_STATUS_GRACEFUL_DISCONNECT: + reason = "STATUS_GRACEFUL_DISCONNECT"; + break; + case MD_NTSTATUS_WIN_STATUS_ADDRESS_ALREADY_ASSOCIATED: + reason = "STATUS_ADDRESS_ALREADY_ASSOCIATED"; + break; + case MD_NTSTATUS_WIN_STATUS_ADDRESS_NOT_ASSOCIATED: + reason = "STATUS_ADDRESS_NOT_ASSOCIATED"; + break; + case MD_NTSTATUS_WIN_STATUS_CONNECTION_INVALID: + reason = "STATUS_CONNECTION_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_CONNECTION_ACTIVE: + reason = "STATUS_CONNECTION_ACTIVE"; + break; + case MD_NTSTATUS_WIN_STATUS_NETWORK_UNREACHABLE: + reason = "STATUS_NETWORK_UNREACHABLE"; + break; + case MD_NTSTATUS_WIN_STATUS_HOST_UNREACHABLE: + reason = "STATUS_HOST_UNREACHABLE"; + break; + case MD_NTSTATUS_WIN_STATUS_PROTOCOL_UNREACHABLE: + reason = "STATUS_PROTOCOL_UNREACHABLE"; + break; + case MD_NTSTATUS_WIN_STATUS_PORT_UNREACHABLE: + reason = "STATUS_PORT_UNREACHABLE"; + break; + case MD_NTSTATUS_WIN_STATUS_REQUEST_ABORTED: + reason = "STATUS_REQUEST_ABORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_CONNECTION_ABORTED: + reason = "STATUS_CONNECTION_ABORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_BAD_COMPRESSION_BUFFER: + reason = "STATUS_BAD_COMPRESSION_BUFFER"; + break; + case MD_NTSTATUS_WIN_STATUS_USER_MAPPED_FILE: + reason = "STATUS_USER_MAPPED_FILE"; + break; + case MD_NTSTATUS_WIN_STATUS_AUDIT_FAILED: + reason = "STATUS_AUDIT_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_TIMER_RESOLUTION_NOT_SET: + reason = "STATUS_TIMER_RESOLUTION_NOT_SET"; + break; + case MD_NTSTATUS_WIN_STATUS_CONNECTION_COUNT_LIMIT: + reason = "STATUS_CONNECTION_COUNT_LIMIT"; + break; + case MD_NTSTATUS_WIN_STATUS_LOGIN_TIME_RESTRICTION: + reason = "STATUS_LOGIN_TIME_RESTRICTION"; + break; + case MD_NTSTATUS_WIN_STATUS_LOGIN_WKSTA_RESTRICTION: + reason = "STATUS_LOGIN_WKSTA_RESTRICTION"; + break; + case MD_NTSTATUS_WIN_STATUS_IMAGE_MP_UP_MISMATCH: + reason = "STATUS_IMAGE_MP_UP_MISMATCH"; + break; + case MD_NTSTATUS_WIN_STATUS_INSUFFICIENT_LOGON_INFO: + reason = "STATUS_INSUFFICIENT_LOGON_INFO"; + break; + case MD_NTSTATUS_WIN_STATUS_BAD_DLL_ENTRYPOINT: + reason = "STATUS_BAD_DLL_ENTRYPOINT"; + break; + case MD_NTSTATUS_WIN_STATUS_BAD_SERVICE_ENTRYPOINT: + reason = "STATUS_BAD_SERVICE_ENTRYPOINT"; + break; + case MD_NTSTATUS_WIN_STATUS_LPC_REPLY_LOST: + reason = "STATUS_LPC_REPLY_LOST"; + break; + case MD_NTSTATUS_WIN_STATUS_IP_ADDRESS_CONFLICT1: + reason = "STATUS_IP_ADDRESS_CONFLICT1"; + break; + case MD_NTSTATUS_WIN_STATUS_IP_ADDRESS_CONFLICT2: + reason = "STATUS_IP_ADDRESS_CONFLICT2"; + break; + case MD_NTSTATUS_WIN_STATUS_REGISTRY_QUOTA_LIMIT: + reason = "STATUS_REGISTRY_QUOTA_LIMIT"; + break; + case MD_NTSTATUS_WIN_STATUS_PATH_NOT_COVERED: + reason = "STATUS_PATH_NOT_COVERED"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_CALLBACK_ACTIVE: + reason = "STATUS_NO_CALLBACK_ACTIVE"; + break; + case MD_NTSTATUS_WIN_STATUS_LICENSE_QUOTA_EXCEEDED: + reason = "STATUS_LICENSE_QUOTA_EXCEEDED"; + break; + case MD_NTSTATUS_WIN_STATUS_PWD_TOO_SHORT: + reason = "STATUS_PWD_TOO_SHORT"; + break; + case MD_NTSTATUS_WIN_STATUS_PWD_TOO_RECENT: + reason = "STATUS_PWD_TOO_RECENT"; + break; + case MD_NTSTATUS_WIN_STATUS_PWD_HISTORY_CONFLICT: + reason = "STATUS_PWD_HISTORY_CONFLICT"; + break; + case MD_NTSTATUS_WIN_STATUS_PLUGPLAY_NO_DEVICE: + reason = "STATUS_PLUGPLAY_NO_DEVICE"; + break; + case MD_NTSTATUS_WIN_STATUS_UNSUPPORTED_COMPRESSION: + reason = "STATUS_UNSUPPORTED_COMPRESSION"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_HW_PROFILE: + reason = "STATUS_INVALID_HW_PROFILE"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_PLUGPLAY_DEVICE_PATH: + reason = "STATUS_INVALID_PLUGPLAY_DEVICE_PATH"; + break; + case MD_NTSTATUS_WIN_STATUS_DRIVER_ORDINAL_NOT_FOUND: + reason = "STATUS_DRIVER_ORDINAL_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_DRIVER_ENTRYPOINT_NOT_FOUND: + reason = "STATUS_DRIVER_ENTRYPOINT_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_RESOURCE_NOT_OWNED: + reason = "STATUS_RESOURCE_NOT_OWNED"; + break; + case MD_NTSTATUS_WIN_STATUS_TOO_MANY_LINKS: + reason = "STATUS_TOO_MANY_LINKS"; + break; + case MD_NTSTATUS_WIN_STATUS_QUOTA_LIST_INCONSISTENT: + reason = "STATUS_QUOTA_LIST_INCONSISTENT"; + break; + case MD_NTSTATUS_WIN_STATUS_FILE_IS_OFFLINE: + reason = "STATUS_FILE_IS_OFFLINE"; + break; + case MD_NTSTATUS_WIN_STATUS_EVALUATION_EXPIRATION: + reason = "STATUS_EVALUATION_EXPIRATION"; + break; + case MD_NTSTATUS_WIN_STATUS_ILLEGAL_DLL_RELOCATION: + reason = "STATUS_ILLEGAL_DLL_RELOCATION"; + break; + case MD_NTSTATUS_WIN_STATUS_LICENSE_VIOLATION: + reason = "STATUS_LICENSE_VIOLATION"; + break; + case MD_NTSTATUS_WIN_STATUS_DLL_INIT_FAILED_LOGOFF: + reason = "STATUS_DLL_INIT_FAILED_LOGOFF"; + break; + case MD_NTSTATUS_WIN_STATUS_DRIVER_UNABLE_TO_LOAD: + reason = "STATUS_DRIVER_UNABLE_TO_LOAD"; + break; + case MD_NTSTATUS_WIN_STATUS_DFS_UNAVAILABLE: + reason = "STATUS_DFS_UNAVAILABLE"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLUME_DISMOUNTED: + reason = "STATUS_VOLUME_DISMOUNTED"; + break; + case MD_NTSTATUS_WIN_STATUS_WX86_INTERNAL_ERROR: + reason = "STATUS_WX86_INTERNAL_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_WX86_FLOAT_STACK_CHECK: + reason = "STATUS_WX86_FLOAT_STACK_CHECK"; + break; + case MD_NTSTATUS_WIN_STATUS_VALIDATE_CONTINUE: + reason = "STATUS_VALIDATE_CONTINUE"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_MATCH: + reason = "STATUS_NO_MATCH"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_MORE_MATCHES: + reason = "STATUS_NO_MORE_MATCHES"; + break; + case MD_NTSTATUS_WIN_STATUS_NOT_A_REPARSE_POINT: + reason = "STATUS_NOT_A_REPARSE_POINT"; + break; + case MD_NTSTATUS_WIN_STATUS_IO_REPARSE_TAG_INVALID: + reason = "STATUS_IO_REPARSE_TAG_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_IO_REPARSE_TAG_MISMATCH: + reason = "STATUS_IO_REPARSE_TAG_MISMATCH"; + break; + case MD_NTSTATUS_WIN_STATUS_IO_REPARSE_DATA_INVALID: + reason = "STATUS_IO_REPARSE_DATA_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_IO_REPARSE_TAG_NOT_HANDLED: + reason = "STATUS_IO_REPARSE_TAG_NOT_HANDLED"; + break; + case MD_NTSTATUS_WIN_STATUS_PWD_TOO_LONG: + reason = "STATUS_PWD_TOO_LONG"; + break; + case MD_NTSTATUS_WIN_STATUS_STOWED_EXCEPTION: + reason = "STATUS_STOWED_EXCEPTION"; + break; + case MD_NTSTATUS_WIN_STATUS_REPARSE_POINT_NOT_RESOLVED: + reason = "STATUS_REPARSE_POINT_NOT_RESOLVED"; + break; + case MD_NTSTATUS_WIN_STATUS_DIRECTORY_IS_A_REPARSE_POINT: + reason = "STATUS_DIRECTORY_IS_A_REPARSE_POINT"; + break; + case MD_NTSTATUS_WIN_STATUS_RANGE_LIST_CONFLICT: + reason = "STATUS_RANGE_LIST_CONFLICT"; + break; + case MD_NTSTATUS_WIN_STATUS_SOURCE_ELEMENT_EMPTY: + reason = "STATUS_SOURCE_ELEMENT_EMPTY"; + break; + case MD_NTSTATUS_WIN_STATUS_DESTINATION_ELEMENT_FULL: + reason = "STATUS_DESTINATION_ELEMENT_FULL"; + break; + case MD_NTSTATUS_WIN_STATUS_ILLEGAL_ELEMENT_ADDRESS: + reason = "STATUS_ILLEGAL_ELEMENT_ADDRESS"; + break; + case MD_NTSTATUS_WIN_STATUS_MAGAZINE_NOT_PRESENT: + reason = "STATUS_MAGAZINE_NOT_PRESENT"; + break; + case MD_NTSTATUS_WIN_STATUS_REINITIALIZATION_NEEDED: + reason = "STATUS_REINITIALIZATION_NEEDED"; + break; + case MD_NTSTATUS_WIN_STATUS_ENCRYPTION_FAILED: + reason = "STATUS_ENCRYPTION_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_DECRYPTION_FAILED: + reason = "STATUS_DECRYPTION_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_RANGE_NOT_FOUND: + reason = "STATUS_RANGE_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_RECOVERY_POLICY: + reason = "STATUS_NO_RECOVERY_POLICY"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_EFS: + reason = "STATUS_NO_EFS"; + break; + case MD_NTSTATUS_WIN_STATUS_WRONG_EFS: + reason = "STATUS_WRONG_EFS"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_USER_KEYS: + reason = "STATUS_NO_USER_KEYS"; + break; + case MD_NTSTATUS_WIN_STATUS_FILE_NOT_ENCRYPTED: + reason = "STATUS_FILE_NOT_ENCRYPTED"; + break; + case MD_NTSTATUS_WIN_STATUS_NOT_EXPORT_FORMAT: + reason = "STATUS_NOT_EXPORT_FORMAT"; + break; + case MD_NTSTATUS_WIN_STATUS_FILE_ENCRYPTED: + reason = "STATUS_FILE_ENCRYPTED"; + break; + case MD_NTSTATUS_WIN_STATUS_WMI_GUID_NOT_FOUND: + reason = "STATUS_WMI_GUID_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_WMI_INSTANCE_NOT_FOUND: + reason = "STATUS_WMI_INSTANCE_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_WMI_ITEMID_NOT_FOUND: + reason = "STATUS_WMI_ITEMID_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_WMI_TRY_AGAIN: + reason = "STATUS_WMI_TRY_AGAIN"; + break; + case MD_NTSTATUS_WIN_STATUS_SHARED_POLICY: + reason = "STATUS_SHARED_POLICY"; + break; + case MD_NTSTATUS_WIN_STATUS_POLICY_OBJECT_NOT_FOUND: + reason = "STATUS_POLICY_OBJECT_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_POLICY_ONLY_IN_DS: + reason = "STATUS_POLICY_ONLY_IN_DS"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLUME_NOT_UPGRADED: + reason = "STATUS_VOLUME_NOT_UPGRADED"; + break; + case MD_NTSTATUS_WIN_STATUS_REMOTE_STORAGE_NOT_ACTIVE: + reason = "STATUS_REMOTE_STORAGE_NOT_ACTIVE"; + break; + case MD_NTSTATUS_WIN_STATUS_REMOTE_STORAGE_MEDIA_ERROR: + reason = "STATUS_REMOTE_STORAGE_MEDIA_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_TRACKING_SERVICE: + reason = "STATUS_NO_TRACKING_SERVICE"; + break; + case MD_NTSTATUS_WIN_STATUS_SERVER_SID_MISMATCH: + reason = "STATUS_SERVER_SID_MISMATCH"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_NO_ATTRIBUTE_OR_VALUE: + reason = "STATUS_DS_NO_ATTRIBUTE_OR_VALUE"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_INVALID_ATTRIBUTE_SYNTAX: + reason = "STATUS_DS_INVALID_ATTRIBUTE_SYNTAX"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_ATTRIBUTE_TYPE_UNDEFINED: + reason = "STATUS_DS_ATTRIBUTE_TYPE_UNDEFINED"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_ATTRIBUTE_OR_VALUE_EXISTS: + reason = "STATUS_DS_ATTRIBUTE_OR_VALUE_EXISTS"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_BUSY: + reason = "STATUS_DS_BUSY"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_UNAVAILABLE: + reason = "STATUS_DS_UNAVAILABLE"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_NO_RIDS_ALLOCATED: + reason = "STATUS_DS_NO_RIDS_ALLOCATED"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_NO_MORE_RIDS: + reason = "STATUS_DS_NO_MORE_RIDS"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_INCORRECT_ROLE_OWNER: + reason = "STATUS_DS_INCORRECT_ROLE_OWNER"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_RIDMGR_INIT_ERROR: + reason = "STATUS_DS_RIDMGR_INIT_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_OBJ_CLASS_VIOLATION: + reason = "STATUS_DS_OBJ_CLASS_VIOLATION"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_CANT_ON_NON_LEAF: + reason = "STATUS_DS_CANT_ON_NON_LEAF"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_CANT_ON_RDN: + reason = "STATUS_DS_CANT_ON_RDN"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_CANT_MOD_OBJ_CLASS: + reason = "STATUS_DS_CANT_MOD_OBJ_CLASS"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_CROSS_DOM_MOVE_FAILED: + reason = "STATUS_DS_CROSS_DOM_MOVE_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_GC_NOT_AVAILABLE: + reason = "STATUS_DS_GC_NOT_AVAILABLE"; + break; + case MD_NTSTATUS_WIN_STATUS_DIRECTORY_SERVICE_REQUIRED: + reason = "STATUS_DIRECTORY_SERVICE_REQUIRED"; + break; + case MD_NTSTATUS_WIN_STATUS_REPARSE_ATTRIBUTE_CONFLICT: + reason = "STATUS_REPARSE_ATTRIBUTE_CONFLICT"; + break; + case MD_NTSTATUS_WIN_STATUS_CANT_ENABLE_DENY_ONLY: + reason = "STATUS_CANT_ENABLE_DENY_ONLY"; + break; + case MD_NTSTATUS_WIN_STATUS_FLOAT_MULTIPLE_FAULTS: + reason = "STATUS_FLOAT_MULTIPLE_FAULTS"; + break; + case MD_NTSTATUS_WIN_STATUS_FLOAT_MULTIPLE_TRAPS: + reason = "STATUS_FLOAT_MULTIPLE_TRAPS"; + break; + case MD_NTSTATUS_WIN_STATUS_DEVICE_REMOVED: + reason = "STATUS_DEVICE_REMOVED"; + break; + case MD_NTSTATUS_WIN_STATUS_JOURNAL_DELETE_IN_PROGRESS: + reason = "STATUS_JOURNAL_DELETE_IN_PROGRESS"; + break; + case MD_NTSTATUS_WIN_STATUS_JOURNAL_NOT_ACTIVE: + reason = "STATUS_JOURNAL_NOT_ACTIVE"; + break; + case MD_NTSTATUS_WIN_STATUS_NOINTERFACE: + reason = "STATUS_NOINTERFACE"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_RIDMGR_DISABLED: + reason = "STATUS_DS_RIDMGR_DISABLED"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_ADMIN_LIMIT_EXCEEDED: + reason = "STATUS_DS_ADMIN_LIMIT_EXCEEDED"; + break; + case MD_NTSTATUS_WIN_STATUS_DRIVER_FAILED_SLEEP: + reason = "STATUS_DRIVER_FAILED_SLEEP"; + break; + case MD_NTSTATUS_WIN_STATUS_MUTUAL_AUTHENTICATION_FAILED: + reason = "STATUS_MUTUAL_AUTHENTICATION_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_CORRUPT_SYSTEM_FILE: + reason = "STATUS_CORRUPT_SYSTEM_FILE"; + break; + case MD_NTSTATUS_WIN_STATUS_DATATYPE_MISALIGNMENT_ERROR: + reason = "STATUS_DATATYPE_MISALIGNMENT_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_WMI_READ_ONLY: + reason = "STATUS_WMI_READ_ONLY"; + break; + case MD_NTSTATUS_WIN_STATUS_WMI_SET_FAILURE: + reason = "STATUS_WMI_SET_FAILURE"; + break; + case MD_NTSTATUS_WIN_STATUS_COMMITMENT_MINIMUM: + reason = "STATUS_COMMITMENT_MINIMUM"; + break; + case MD_NTSTATUS_WIN_STATUS_REG_NAT_CONSUMPTION: + reason = "STATUS_REG_NAT_CONSUMPTION"; + break; + case MD_NTSTATUS_WIN_STATUS_TRANSPORT_FULL: + reason = "STATUS_TRANSPORT_FULL"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_SAM_INIT_FAILURE: + reason = "STATUS_DS_SAM_INIT_FAILURE"; + break; + case MD_NTSTATUS_WIN_STATUS_ONLY_IF_CONNECTED: + reason = "STATUS_ONLY_IF_CONNECTED"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_SENSITIVE_GROUP_VIOLATION: + reason = "STATUS_DS_SENSITIVE_GROUP_VIOLATION"; + break; + case MD_NTSTATUS_WIN_STATUS_PNP_RESTART_ENUMERATION: + reason = "STATUS_PNP_RESTART_ENUMERATION"; + break; + case MD_NTSTATUS_WIN_STATUS_JOURNAL_ENTRY_DELETED: + reason = "STATUS_JOURNAL_ENTRY_DELETED"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_CANT_MOD_PRIMARYGROUPID: + reason = "STATUS_DS_CANT_MOD_PRIMARYGROUPID"; + break; + case MD_NTSTATUS_WIN_STATUS_SYSTEM_IMAGE_BAD_SIGNATURE: + reason = "STATUS_SYSTEM_IMAGE_BAD_SIGNATURE"; + break; + case MD_NTSTATUS_WIN_STATUS_PNP_REBOOT_REQUIRED: + reason = "STATUS_PNP_REBOOT_REQUIRED"; + break; + case MD_NTSTATUS_WIN_STATUS_POWER_STATE_INVALID: + reason = "STATUS_POWER_STATE_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_INVALID_GROUP_TYPE: + reason = "STATUS_DS_INVALID_GROUP_TYPE"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_NO_NEST_GLOBALGROUP_IN_MIXEDDOMAIN: + reason = "STATUS_DS_NO_NEST_GLOBALGROUP_IN_MIXEDDOMAIN"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_NO_NEST_LOCALGROUP_IN_MIXEDDOMAIN: + reason = "STATUS_DS_NO_NEST_LOCALGROUP_IN_MIXEDDOMAIN"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_GLOBAL_CANT_HAVE_LOCAL_MEMBER: + reason = "STATUS_DS_GLOBAL_CANT_HAVE_LOCAL_MEMBER"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_GLOBAL_CANT_HAVE_UNIVERSAL_MEMBER: + reason = "STATUS_DS_GLOBAL_CANT_HAVE_UNIVERSAL_MEMBER"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_UNIVERSAL_CANT_HAVE_LOCAL_MEMBER: + reason = "STATUS_DS_UNIVERSAL_CANT_HAVE_LOCAL_MEMBER"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_GLOBAL_CANT_HAVE_CROSSDOMAIN_MEMBER: + reason = "STATUS_DS_GLOBAL_CANT_HAVE_CROSSDOMAIN_MEMBER"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_LOCAL_CANT_HAVE_CROSSDOMAIN_LOCAL_MEMBER: + reason = "STATUS_DS_LOCAL_CANT_HAVE_CROSSDOMAIN_LOCAL_MEMBER"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_HAVE_PRIMARY_MEMBERS: + reason = "STATUS_DS_HAVE_PRIMARY_MEMBERS"; + break; + case MD_NTSTATUS_WIN_STATUS_WMI_NOT_SUPPORTED: + reason = "STATUS_WMI_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_INSUFFICIENT_POWER: + reason = "STATUS_INSUFFICIENT_POWER"; + break; + case MD_NTSTATUS_WIN_STATUS_SAM_NEED_BOOTKEY_PASSWORD: + reason = "STATUS_SAM_NEED_BOOTKEY_PASSWORD"; + break; + case MD_NTSTATUS_WIN_STATUS_SAM_NEED_BOOTKEY_FLOPPY: + reason = "STATUS_SAM_NEED_BOOTKEY_FLOPPY"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_CANT_START: + reason = "STATUS_DS_CANT_START"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_INIT_FAILURE: + reason = "STATUS_DS_INIT_FAILURE"; + break; + case MD_NTSTATUS_WIN_STATUS_SAM_INIT_FAILURE: + reason = "STATUS_SAM_INIT_FAILURE"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_GC_REQUIRED: + reason = "STATUS_DS_GC_REQUIRED"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_LOCAL_MEMBER_OF_LOCAL_ONLY: + reason = "STATUS_DS_LOCAL_MEMBER_OF_LOCAL_ONLY"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_NO_FPO_IN_UNIVERSAL_GROUPS: + reason = "STATUS_DS_NO_FPO_IN_UNIVERSAL_GROUPS"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_MACHINE_ACCOUNT_QUOTA_EXCEEDED: + reason = "STATUS_DS_MACHINE_ACCOUNT_QUOTA_EXCEEDED"; + break; + case MD_NTSTATUS_WIN_STATUS_MULTIPLE_FAULT_VIOLATION: + reason = "STATUS_MULTIPLE_FAULT_VIOLATION"; + break; + case MD_NTSTATUS_WIN_STATUS_CURRENT_DOMAIN_NOT_ALLOWED: + reason = "STATUS_CURRENT_DOMAIN_NOT_ALLOWED"; + break; + case MD_NTSTATUS_WIN_STATUS_CANNOT_MAKE: + reason = "STATUS_CANNOT_MAKE"; + break; + case MD_NTSTATUS_WIN_STATUS_SYSTEM_SHUTDOWN: + reason = "STATUS_SYSTEM_SHUTDOWN"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_INIT_FAILURE_CONSOLE: + reason = "STATUS_DS_INIT_FAILURE_CONSOLE"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_SAM_INIT_FAILURE_CONSOLE: + reason = "STATUS_DS_SAM_INIT_FAILURE_CONSOLE"; + break; + case MD_NTSTATUS_WIN_STATUS_UNFINISHED_CONTEXT_DELETED: + reason = "STATUS_UNFINISHED_CONTEXT_DELETED"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_TGT_REPLY: + reason = "STATUS_NO_TGT_REPLY"; + break; + case MD_NTSTATUS_WIN_STATUS_OBJECTID_NOT_FOUND: + reason = "STATUS_OBJECTID_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_IP_ADDRESSES: + reason = "STATUS_NO_IP_ADDRESSES"; + break; + case MD_NTSTATUS_WIN_STATUS_WRONG_CREDENTIAL_HANDLE: + reason = "STATUS_WRONG_CREDENTIAL_HANDLE"; + break; + case MD_NTSTATUS_WIN_STATUS_CRYPTO_SYSTEM_INVALID: + reason = "STATUS_CRYPTO_SYSTEM_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_MAX_REFERRALS_EXCEEDED: + reason = "STATUS_MAX_REFERRALS_EXCEEDED"; + break; + case MD_NTSTATUS_WIN_STATUS_MUST_BE_KDC: + reason = "STATUS_MUST_BE_KDC"; + break; + case MD_NTSTATUS_WIN_STATUS_STRONG_CRYPTO_NOT_SUPPORTED: + reason = "STATUS_STRONG_CRYPTO_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_TOO_MANY_PRINCIPALS: + reason = "STATUS_TOO_MANY_PRINCIPALS"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_PA_DATA: + reason = "STATUS_NO_PA_DATA"; + break; + case MD_NTSTATUS_WIN_STATUS_PKINIT_NAME_MISMATCH: + reason = "STATUS_PKINIT_NAME_MISMATCH"; + break; + case MD_NTSTATUS_WIN_STATUS_SMARTCARD_LOGON_REQUIRED: + reason = "STATUS_SMARTCARD_LOGON_REQUIRED"; + break; + case MD_NTSTATUS_WIN_STATUS_KDC_INVALID_REQUEST: + reason = "STATUS_KDC_INVALID_REQUEST"; + break; + case MD_NTSTATUS_WIN_STATUS_KDC_UNABLE_TO_REFER: + reason = "STATUS_KDC_UNABLE_TO_REFER"; + break; + case MD_NTSTATUS_WIN_STATUS_KDC_UNKNOWN_ETYPE: + reason = "STATUS_KDC_UNKNOWN_ETYPE"; + break; + case MD_NTSTATUS_WIN_STATUS_SHUTDOWN_IN_PROGRESS: + reason = "STATUS_SHUTDOWN_IN_PROGRESS"; + break; + case MD_NTSTATUS_WIN_STATUS_SERVER_SHUTDOWN_IN_PROGRESS: + reason = "STATUS_SERVER_SHUTDOWN_IN_PROGRESS"; + break; + case MD_NTSTATUS_WIN_STATUS_NOT_SUPPORTED_ON_SBS: + reason = "STATUS_NOT_SUPPORTED_ON_SBS"; + break; + case MD_NTSTATUS_WIN_STATUS_WMI_GUID_DISCONNECTED: + reason = "STATUS_WMI_GUID_DISCONNECTED"; + break; + case MD_NTSTATUS_WIN_STATUS_WMI_ALREADY_DISABLED: + reason = "STATUS_WMI_ALREADY_DISABLED"; + break; + case MD_NTSTATUS_WIN_STATUS_WMI_ALREADY_ENABLED: + reason = "STATUS_WMI_ALREADY_ENABLED"; + break; + case MD_NTSTATUS_WIN_STATUS_MFT_TOO_FRAGMENTED: + reason = "STATUS_MFT_TOO_FRAGMENTED"; + break; + case MD_NTSTATUS_WIN_STATUS_COPY_PROTECTION_FAILURE: + reason = "STATUS_COPY_PROTECTION_FAILURE"; + break; + case MD_NTSTATUS_WIN_STATUS_CSS_AUTHENTICATION_FAILURE: + reason = "STATUS_CSS_AUTHENTICATION_FAILURE"; + break; + case MD_NTSTATUS_WIN_STATUS_CSS_KEY_NOT_PRESENT: + reason = "STATUS_CSS_KEY_NOT_PRESENT"; + break; + case MD_NTSTATUS_WIN_STATUS_CSS_KEY_NOT_ESTABLISHED: + reason = "STATUS_CSS_KEY_NOT_ESTABLISHED"; + break; + case MD_NTSTATUS_WIN_STATUS_CSS_SCRAMBLED_SECTOR: + reason = "STATUS_CSS_SCRAMBLED_SECTOR"; + break; + case MD_NTSTATUS_WIN_STATUS_CSS_REGION_MISMATCH: + reason = "STATUS_CSS_REGION_MISMATCH"; + break; + case MD_NTSTATUS_WIN_STATUS_CSS_RESETS_EXHAUSTED: + reason = "STATUS_CSS_RESETS_EXHAUSTED"; + break; + case MD_NTSTATUS_WIN_STATUS_PASSWORD_CHANGE_REQUIRED: + reason = "STATUS_PASSWORD_CHANGE_REQUIRED"; + break; + case MD_NTSTATUS_WIN_STATUS_PKINIT_FAILURE: + reason = "STATUS_PKINIT_FAILURE"; + break; + case MD_NTSTATUS_WIN_STATUS_SMARTCARD_SUBSYSTEM_FAILURE: + reason = "STATUS_SMARTCARD_SUBSYSTEM_FAILURE"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_KERB_KEY: + reason = "STATUS_NO_KERB_KEY"; + break; + case MD_NTSTATUS_WIN_STATUS_HOST_DOWN: + reason = "STATUS_HOST_DOWN"; + break; + case MD_NTSTATUS_WIN_STATUS_UNSUPPORTED_PREAUTH: + reason = "STATUS_UNSUPPORTED_PREAUTH"; + break; + case MD_NTSTATUS_WIN_STATUS_EFS_ALG_BLOB_TOO_BIG: + reason = "STATUS_EFS_ALG_BLOB_TOO_BIG"; + break; + case MD_NTSTATUS_WIN_STATUS_PORT_NOT_SET: + reason = "STATUS_PORT_NOT_SET"; + break; + case MD_NTSTATUS_WIN_STATUS_DEBUGGER_INACTIVE: + reason = "STATUS_DEBUGGER_INACTIVE"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_VERSION_CHECK_FAILURE: + reason = "STATUS_DS_VERSION_CHECK_FAILURE"; + break; + case MD_NTSTATUS_WIN_STATUS_AUDITING_DISABLED: + reason = "STATUS_AUDITING_DISABLED"; + break; + case MD_NTSTATUS_WIN_STATUS_PRENT4_MACHINE_ACCOUNT: + reason = "STATUS_PRENT4_MACHINE_ACCOUNT"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_AG_CANT_HAVE_UNIVERSAL_MEMBER: + reason = "STATUS_DS_AG_CANT_HAVE_UNIVERSAL_MEMBER"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_IMAGE_WIN_32: + reason = "STATUS_INVALID_IMAGE_WIN_32"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_IMAGE_WIN_64: + reason = "STATUS_INVALID_IMAGE_WIN_64"; + break; + case MD_NTSTATUS_WIN_STATUS_BAD_BINDINGS: + reason = "STATUS_BAD_BINDINGS"; + break; + case MD_NTSTATUS_WIN_STATUS_NETWORK_SESSION_EXPIRED: + reason = "STATUS_NETWORK_SESSION_EXPIRED"; + break; + case MD_NTSTATUS_WIN_STATUS_APPHELP_BLOCK: + reason = "STATUS_APPHELP_BLOCK"; + break; + case MD_NTSTATUS_WIN_STATUS_ALL_SIDS_FILTERED: + reason = "STATUS_ALL_SIDS_FILTERED"; + break; + case MD_NTSTATUS_WIN_STATUS_NOT_SAFE_MODE_DRIVER: + reason = "STATUS_NOT_SAFE_MODE_DRIVER"; + break; + case MD_NTSTATUS_WIN_STATUS_ACCESS_DISABLED_BY_POLICY_DEFAULT: + reason = "STATUS_ACCESS_DISABLED_BY_POLICY_DEFAULT"; + break; + case MD_NTSTATUS_WIN_STATUS_ACCESS_DISABLED_BY_POLICY_PATH: + reason = "STATUS_ACCESS_DISABLED_BY_POLICY_PATH"; + break; + case MD_NTSTATUS_WIN_STATUS_ACCESS_DISABLED_BY_POLICY_PUBLISHER: + reason = "STATUS_ACCESS_DISABLED_BY_POLICY_PUBLISHER"; + break; + case MD_NTSTATUS_WIN_STATUS_ACCESS_DISABLED_BY_POLICY_OTHER: + reason = "STATUS_ACCESS_DISABLED_BY_POLICY_OTHER"; + break; + case MD_NTSTATUS_WIN_STATUS_FAILED_DRIVER_ENTRY: + reason = "STATUS_FAILED_DRIVER_ENTRY"; + break; + case MD_NTSTATUS_WIN_STATUS_DEVICE_ENUMERATION_ERROR: + reason = "STATUS_DEVICE_ENUMERATION_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_MOUNT_POINT_NOT_RESOLVED: + reason = "STATUS_MOUNT_POINT_NOT_RESOLVED"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_DEVICE_OBJECT_PARAMETER: + reason = "STATUS_INVALID_DEVICE_OBJECT_PARAMETER"; + break; + case MD_NTSTATUS_WIN_STATUS_MCA_OCCURED: + reason = "STATUS_MCA_OCCURED"; + break; + case MD_NTSTATUS_WIN_STATUS_DRIVER_BLOCKED_CRITICAL: + reason = "STATUS_DRIVER_BLOCKED_CRITICAL"; + break; + case MD_NTSTATUS_WIN_STATUS_DRIVER_BLOCKED: + reason = "STATUS_DRIVER_BLOCKED"; + break; + case MD_NTSTATUS_WIN_STATUS_DRIVER_DATABASE_ERROR: + reason = "STATUS_DRIVER_DATABASE_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_SYSTEM_HIVE_TOO_LARGE: + reason = "STATUS_SYSTEM_HIVE_TOO_LARGE"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_IMPORT_OF_NON_DLL: + reason = "STATUS_INVALID_IMPORT_OF_NON_DLL"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_SECRETS: + reason = "STATUS_NO_SECRETS"; + break; + case MD_NTSTATUS_WIN_STATUS_ACCESS_DISABLED_NO_SAFER_UI_BY_POLICY: + reason = "STATUS_ACCESS_DISABLED_NO_SAFER_UI_BY_POLICY"; + break; + case MD_NTSTATUS_WIN_STATUS_FAILED_STACK_SWITCH: + reason = "STATUS_FAILED_STACK_SWITCH"; + break; + case MD_NTSTATUS_WIN_STATUS_HEAP_CORRUPTION: + reason = "STATUS_HEAP_CORRUPTION"; + break; + case MD_NTSTATUS_WIN_STATUS_SMARTCARD_WRONG_PIN: + reason = "STATUS_SMARTCARD_WRONG_PIN"; + break; + case MD_NTSTATUS_WIN_STATUS_SMARTCARD_CARD_BLOCKED: + reason = "STATUS_SMARTCARD_CARD_BLOCKED"; + break; + case MD_NTSTATUS_WIN_STATUS_SMARTCARD_CARD_NOT_AUTHENTICATED: + reason = "STATUS_SMARTCARD_CARD_NOT_AUTHENTICATED"; + break; + case MD_NTSTATUS_WIN_STATUS_SMARTCARD_NO_CARD: + reason = "STATUS_SMARTCARD_NO_CARD"; + break; + case MD_NTSTATUS_WIN_STATUS_SMARTCARD_NO_KEY_CONTAINER: + reason = "STATUS_SMARTCARD_NO_KEY_CONTAINER"; + break; + case MD_NTSTATUS_WIN_STATUS_SMARTCARD_NO_CERTIFICATE: + reason = "STATUS_SMARTCARD_NO_CERTIFICATE"; + break; + case MD_NTSTATUS_WIN_STATUS_SMARTCARD_NO_KEYSET: + reason = "STATUS_SMARTCARD_NO_KEYSET"; + break; + case MD_NTSTATUS_WIN_STATUS_SMARTCARD_IO_ERROR: + reason = "STATUS_SMARTCARD_IO_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_DOWNGRADE_DETECTED: + reason = "STATUS_DOWNGRADE_DETECTED"; + break; + case MD_NTSTATUS_WIN_STATUS_SMARTCARD_CERT_REVOKED: + reason = "STATUS_SMARTCARD_CERT_REVOKED"; + break; + case MD_NTSTATUS_WIN_STATUS_ISSUING_CA_UNTRUSTED: + reason = "STATUS_ISSUING_CA_UNTRUSTED"; + break; + case MD_NTSTATUS_WIN_STATUS_REVOCATION_OFFLINE_C: + reason = "STATUS_REVOCATION_OFFLINE_C"; + break; + case MD_NTSTATUS_WIN_STATUS_PKINIT_CLIENT_FAILURE: + reason = "STATUS_PKINIT_CLIENT_FAILURE"; + break; + case MD_NTSTATUS_WIN_STATUS_SMARTCARD_CERT_EXPIRED: + reason = "STATUS_SMARTCARD_CERT_EXPIRED"; + break; + case MD_NTSTATUS_WIN_STATUS_DRIVER_FAILED_PRIOR_UNLOAD: + reason = "STATUS_DRIVER_FAILED_PRIOR_UNLOAD"; + break; + case MD_NTSTATUS_WIN_STATUS_SMARTCARD_SILENT_CONTEXT: + reason = "STATUS_SMARTCARD_SILENT_CONTEXT"; + break; + case MD_NTSTATUS_WIN_STATUS_PER_USER_TRUST_QUOTA_EXCEEDED: + reason = "STATUS_PER_USER_TRUST_QUOTA_EXCEEDED"; + break; + case MD_NTSTATUS_WIN_STATUS_ALL_USER_TRUST_QUOTA_EXCEEDED: + reason = "STATUS_ALL_USER_TRUST_QUOTA_EXCEEDED"; + break; + case MD_NTSTATUS_WIN_STATUS_USER_DELETE_TRUST_QUOTA_EXCEEDED: + reason = "STATUS_USER_DELETE_TRUST_QUOTA_EXCEEDED"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_NAME_NOT_UNIQUE: + reason = "STATUS_DS_NAME_NOT_UNIQUE"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_DUPLICATE_ID_FOUND: + reason = "STATUS_DS_DUPLICATE_ID_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_GROUP_CONVERSION_ERROR: + reason = "STATUS_DS_GROUP_CONVERSION_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLSNAP_PREPARE_HIBERNATE: + reason = "STATUS_VOLSNAP_PREPARE_HIBERNATE"; + break; + case MD_NTSTATUS_WIN_STATUS_USER2USER_REQUIRED: + reason = "STATUS_USER2USER_REQUIRED"; + break; + case MD_NTSTATUS_WIN_STATUS_STACK_BUFFER_OVERRUN: + reason = "STATUS_STACK_BUFFER_OVERRUN"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_S4U_PROT_SUPPORT: + reason = "STATUS_NO_S4U_PROT_SUPPORT"; + break; + case MD_NTSTATUS_WIN_STATUS_CROSSREALM_DELEGATION_FAILURE: + reason = "STATUS_CROSSREALM_DELEGATION_FAILURE"; + break; + case MD_NTSTATUS_WIN_STATUS_REVOCATION_OFFLINE_KDC: + reason = "STATUS_REVOCATION_OFFLINE_KDC"; + break; + case MD_NTSTATUS_WIN_STATUS_ISSUING_CA_UNTRUSTED_KDC: + reason = "STATUS_ISSUING_CA_UNTRUSTED_KDC"; + break; + case MD_NTSTATUS_WIN_STATUS_KDC_CERT_EXPIRED: + reason = "STATUS_KDC_CERT_EXPIRED"; + break; + case MD_NTSTATUS_WIN_STATUS_KDC_CERT_REVOKED: + reason = "STATUS_KDC_CERT_REVOKED"; + break; + case MD_NTSTATUS_WIN_STATUS_PARAMETER_QUOTA_EXCEEDED: + reason = "STATUS_PARAMETER_QUOTA_EXCEEDED"; + break; + case MD_NTSTATUS_WIN_STATUS_HIBERNATION_FAILURE: + reason = "STATUS_HIBERNATION_FAILURE"; + break; + case MD_NTSTATUS_WIN_STATUS_DELAY_LOAD_FAILED: + reason = "STATUS_DELAY_LOAD_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_AUTHENTICATION_FIREWALL_FAILED: + reason = "STATUS_AUTHENTICATION_FIREWALL_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_VDM_DISALLOWED: + reason = "STATUS_VDM_DISALLOWED"; + break; + case MD_NTSTATUS_WIN_STATUS_HUNG_DISPLAY_DRIVER_THREAD: + reason = "STATUS_HUNG_DISPLAY_DRIVER_THREAD"; + break; + case MD_NTSTATUS_WIN_STATUS_INSUFFICIENT_RESOURCE_FOR_SPECIFIED_SHARED_SECTION_SIZE: + reason = "STATUS_INSUFFICIENT_RESOURCE_FOR_SPECIFIED_SHARED_SECTION_SIZE"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_CRUNTIME_PARAMETER: + reason = "STATUS_INVALID_CRUNTIME_PARAMETER"; + break; + case MD_NTSTATUS_WIN_STATUS_NTLM_BLOCKED: + reason = "STATUS_NTLM_BLOCKED"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_SRC_SID_EXISTS_IN_FOREST: + reason = "STATUS_DS_SRC_SID_EXISTS_IN_FOREST"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_DOMAIN_NAME_EXISTS_IN_FOREST: + reason = "STATUS_DS_DOMAIN_NAME_EXISTS_IN_FOREST"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_FLAT_NAME_EXISTS_IN_FOREST: + reason = "STATUS_DS_FLAT_NAME_EXISTS_IN_FOREST"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_USER_PRINCIPAL_NAME: + reason = "STATUS_INVALID_USER_PRINCIPAL_NAME"; + break; + case MD_NTSTATUS_WIN_STATUS_FATAL_USER_CALLBACK_EXCEPTION: + reason = "STATUS_FATAL_USER_CALLBACK_EXCEPTION"; + break; + case MD_NTSTATUS_WIN_STATUS_ASSERTION_FAILURE: + reason = "STATUS_ASSERTION_FAILURE"; + break; + case MD_NTSTATUS_WIN_STATUS_VERIFIER_STOP: + reason = "STATUS_VERIFIER_STOP"; + break; + case MD_NTSTATUS_WIN_STATUS_CALLBACK_POP_STACK: + reason = "STATUS_CALLBACK_POP_STACK"; + break; + case MD_NTSTATUS_WIN_STATUS_INCOMPATIBLE_DRIVER_BLOCKED: + reason = "STATUS_INCOMPATIBLE_DRIVER_BLOCKED"; + break; + case MD_NTSTATUS_WIN_STATUS_HIVE_UNLOADED: + reason = "STATUS_HIVE_UNLOADED"; + break; + case MD_NTSTATUS_WIN_STATUS_COMPRESSION_DISABLED: + reason = "STATUS_COMPRESSION_DISABLED"; + break; + case MD_NTSTATUS_WIN_STATUS_FILE_SYSTEM_LIMITATION: + reason = "STATUS_FILE_SYSTEM_LIMITATION"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_IMAGE_HASH: + reason = "STATUS_INVALID_IMAGE_HASH"; + break; + case MD_NTSTATUS_WIN_STATUS_NOT_CAPABLE: + reason = "STATUS_NOT_CAPABLE"; + break; + case MD_NTSTATUS_WIN_STATUS_REQUEST_OUT_OF_SEQUENCE: + reason = "STATUS_REQUEST_OUT_OF_SEQUENCE"; + break; + case MD_NTSTATUS_WIN_STATUS_IMPLEMENTATION_LIMIT: + reason = "STATUS_IMPLEMENTATION_LIMIT"; + break; + case MD_NTSTATUS_WIN_STATUS_ELEVATION_REQUIRED: + reason = "STATUS_ELEVATION_REQUIRED"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_SECURITY_CONTEXT: + reason = "STATUS_NO_SECURITY_CONTEXT"; + break; + case MD_NTSTATUS_WIN_STATUS_PKU2U_CERT_FAILURE: + reason = "STATUS_PKU2U_CERT_FAILURE"; + break; + case MD_NTSTATUS_WIN_STATUS_BEYOND_VDL: + reason = "STATUS_BEYOND_VDL"; + break; + case MD_NTSTATUS_WIN_STATUS_ENCOUNTERED_WRITE_IN_PROGRESS: + reason = "STATUS_ENCOUNTERED_WRITE_IN_PROGRESS"; + break; + case MD_NTSTATUS_WIN_STATUS_PTE_CHANGED: + reason = "STATUS_PTE_CHANGED"; + break; + case MD_NTSTATUS_WIN_STATUS_PURGE_FAILED: + reason = "STATUS_PURGE_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_CRED_REQUIRES_CONFIRMATION: + reason = "STATUS_CRED_REQUIRES_CONFIRMATION"; + break; + case MD_NTSTATUS_WIN_STATUS_CS_ENCRYPTION_INVALID_SERVER_RESPONSE: + reason = "STATUS_CS_ENCRYPTION_INVALID_SERVER_RESPONSE"; + break; + case MD_NTSTATUS_WIN_STATUS_CS_ENCRYPTION_UNSUPPORTED_SERVER: + reason = "STATUS_CS_ENCRYPTION_UNSUPPORTED_SERVER"; + break; + case MD_NTSTATUS_WIN_STATUS_CS_ENCRYPTION_EXISTING_ENCRYPTED_FILE: + reason = "STATUS_CS_ENCRYPTION_EXISTING_ENCRYPTED_FILE"; + break; + case MD_NTSTATUS_WIN_STATUS_CS_ENCRYPTION_NEW_ENCRYPTED_FILE: + reason = "STATUS_CS_ENCRYPTION_NEW_ENCRYPTED_FILE"; + break; + case MD_NTSTATUS_WIN_STATUS_CS_ENCRYPTION_FILE_NOT_CSE: + reason = "STATUS_CS_ENCRYPTION_FILE_NOT_CSE"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_LABEL: + reason = "STATUS_INVALID_LABEL"; + break; + case MD_NTSTATUS_WIN_STATUS_DRIVER_PROCESS_TERMINATED: + reason = "STATUS_DRIVER_PROCESS_TERMINATED"; + break; + case MD_NTSTATUS_WIN_STATUS_AMBIGUOUS_SYSTEM_DEVICE: + reason = "STATUS_AMBIGUOUS_SYSTEM_DEVICE"; + break; + case MD_NTSTATUS_WIN_STATUS_SYSTEM_DEVICE_NOT_FOUND: + reason = "STATUS_SYSTEM_DEVICE_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_RESTART_BOOT_APPLICATION: + reason = "STATUS_RESTART_BOOT_APPLICATION"; + break; + case MD_NTSTATUS_WIN_STATUS_INSUFFICIENT_NVRAM_RESOURCES: + reason = "STATUS_INSUFFICIENT_NVRAM_RESOURCES"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_SESSION: + reason = "STATUS_INVALID_SESSION"; + break; + case MD_NTSTATUS_WIN_STATUS_THREAD_ALREADY_IN_SESSION: + reason = "STATUS_THREAD_ALREADY_IN_SESSION"; + break; + case MD_NTSTATUS_WIN_STATUS_THREAD_NOT_IN_SESSION: + reason = "STATUS_THREAD_NOT_IN_SESSION"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_WEIGHT: + reason = "STATUS_INVALID_WEIGHT"; + break; + case MD_NTSTATUS_WIN_STATUS_REQUEST_PAUSED: + reason = "STATUS_REQUEST_PAUSED"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_RANGES_PROCESSED: + reason = "STATUS_NO_RANGES_PROCESSED"; + break; + case MD_NTSTATUS_WIN_STATUS_DISK_RESOURCES_EXHAUSTED: + reason = "STATUS_DISK_RESOURCES_EXHAUSTED"; + break; + case MD_NTSTATUS_WIN_STATUS_NEEDS_REMEDIATION: + reason = "STATUS_NEEDS_REMEDIATION"; + break; + case MD_NTSTATUS_WIN_STATUS_DEVICE_FEATURE_NOT_SUPPORTED: + reason = "STATUS_DEVICE_FEATURE_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_DEVICE_UNREACHABLE: + reason = "STATUS_DEVICE_UNREACHABLE"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_TOKEN: + reason = "STATUS_INVALID_TOKEN"; + break; + case MD_NTSTATUS_WIN_STATUS_SERVER_UNAVAILABLE: + reason = "STATUS_SERVER_UNAVAILABLE"; + break; + case MD_NTSTATUS_WIN_STATUS_FILE_NOT_AVAILABLE: + reason = "STATUS_FILE_NOT_AVAILABLE"; + break; + case MD_NTSTATUS_WIN_STATUS_DEVICE_INSUFFICIENT_RESOURCES: + reason = "STATUS_DEVICE_INSUFFICIENT_RESOURCES"; + break; + case MD_NTSTATUS_WIN_STATUS_PACKAGE_UPDATING: + reason = "STATUS_PACKAGE_UPDATING"; + break; + case MD_NTSTATUS_WIN_STATUS_NOT_READ_FROM_COPY: + reason = "STATUS_NOT_READ_FROM_COPY"; + break; + case MD_NTSTATUS_WIN_STATUS_FT_WRITE_FAILURE: + reason = "STATUS_FT_WRITE_FAILURE"; + break; + case MD_NTSTATUS_WIN_STATUS_FT_DI_SCAN_REQUIRED: + reason = "STATUS_FT_DI_SCAN_REQUIRED"; + break; + case MD_NTSTATUS_WIN_STATUS_OBJECT_NOT_EXTERNALLY_BACKED: + reason = "STATUS_OBJECT_NOT_EXTERNALLY_BACKED"; + break; + case MD_NTSTATUS_WIN_STATUS_EXTERNAL_BACKING_PROVIDER_UNKNOWN: + reason = "STATUS_EXTERNAL_BACKING_PROVIDER_UNKNOWN"; + break; + case MD_NTSTATUS_WIN_STATUS_DATA_CHECKSUM_ERROR: + reason = "STATUS_DATA_CHECKSUM_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_INTERMIXED_KERNEL_EA_OPERATION: + reason = "STATUS_INTERMIXED_KERNEL_EA_OPERATION"; + break; + case MD_NTSTATUS_WIN_STATUS_TRIM_READ_ZERO_NOT_SUPPORTED: + reason = "STATUS_TRIM_READ_ZERO_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_TOO_MANY_SEGMENT_DESCRIPTORS: + reason = "STATUS_TOO_MANY_SEGMENT_DESCRIPTORS"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_OFFSET_ALIGNMENT: + reason = "STATUS_INVALID_OFFSET_ALIGNMENT"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_FIELD_IN_PARAMETER_LIST: + reason = "STATUS_INVALID_FIELD_IN_PARAMETER_LIST"; + break; + case MD_NTSTATUS_WIN_STATUS_OPERATION_IN_PROGRESS: + reason = "STATUS_OPERATION_IN_PROGRESS"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_INITIATOR_TARGET_PATH: + reason = "STATUS_INVALID_INITIATOR_TARGET_PATH"; + break; + case MD_NTSTATUS_WIN_STATUS_SCRUB_DATA_DISABLED: + reason = "STATUS_SCRUB_DATA_DISABLED"; + break; + case MD_NTSTATUS_WIN_STATUS_NOT_REDUNDANT_STORAGE: + reason = "STATUS_NOT_REDUNDANT_STORAGE"; + break; + case MD_NTSTATUS_WIN_STATUS_RESIDENT_FILE_NOT_SUPPORTED: + reason = "STATUS_RESIDENT_FILE_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_COMPRESSED_FILE_NOT_SUPPORTED: + reason = "STATUS_COMPRESSED_FILE_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_DIRECTORY_NOT_SUPPORTED: + reason = "STATUS_DIRECTORY_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_IO_OPERATION_TIMEOUT: + reason = "STATUS_IO_OPERATION_TIMEOUT"; + break; + case MD_NTSTATUS_WIN_STATUS_SYSTEM_NEEDS_REMEDIATION: + reason = "STATUS_SYSTEM_NEEDS_REMEDIATION"; + break; + case MD_NTSTATUS_WIN_STATUS_APPX_INTEGRITY_FAILURE_CLR_NGEN: + reason = "STATUS_APPX_INTEGRITY_FAILURE_CLR_NGEN"; + break; + case MD_NTSTATUS_WIN_STATUS_SHARE_UNAVAILABLE: + reason = "STATUS_SHARE_UNAVAILABLE"; + break; + case MD_NTSTATUS_WIN_STATUS_APISET_NOT_HOSTED: + reason = "STATUS_APISET_NOT_HOSTED"; + break; + case MD_NTSTATUS_WIN_STATUS_APISET_NOT_PRESENT: + reason = "STATUS_APISET_NOT_PRESENT"; + break; + case MD_NTSTATUS_WIN_STATUS_DEVICE_HARDWARE_ERROR: + reason = "STATUS_DEVICE_HARDWARE_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_TASK_NAME: + reason = "STATUS_INVALID_TASK_NAME"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_TASK_INDEX: + reason = "STATUS_INVALID_TASK_INDEX"; + break; + case MD_NTSTATUS_WIN_STATUS_THREAD_ALREADY_IN_TASK: + reason = "STATUS_THREAD_ALREADY_IN_TASK"; + break; + case MD_NTSTATUS_WIN_STATUS_CALLBACK_BYPASS: + reason = "STATUS_CALLBACK_BYPASS"; + break; + case MD_NTSTATUS_WIN_STATUS_UNDEFINED_SCOPE: + reason = "STATUS_UNDEFINED_SCOPE"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_CAP: + reason = "STATUS_INVALID_CAP"; + break; + case MD_NTSTATUS_WIN_STATUS_NOT_GUI_PROCESS: + reason = "STATUS_NOT_GUI_PROCESS"; + break; + case MD_NTSTATUS_WIN_STATUS_FAIL_FAST_EXCEPTION: + reason = "STATUS_FAIL_FAST_EXCEPTION"; + break; + case MD_NTSTATUS_WIN_STATUS_IMAGE_CERT_REVOKED: + reason = "STATUS_IMAGE_CERT_REVOKED"; + break; + case MD_NTSTATUS_WIN_STATUS_DYNAMIC_CODE_BLOCKED: + reason = "STATUS_DYNAMIC_CODE_BLOCKED"; + break; + case MD_NTSTATUS_WIN_STATUS_PORT_CLOSED: + reason = "STATUS_PORT_CLOSED"; + break; + case MD_NTSTATUS_WIN_STATUS_MESSAGE_LOST: + reason = "STATUS_MESSAGE_LOST"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_MESSAGE: + reason = "STATUS_INVALID_MESSAGE"; + break; + case MD_NTSTATUS_WIN_STATUS_REQUEST_CANCELED: + reason = "STATUS_REQUEST_CANCELED"; + break; + case MD_NTSTATUS_WIN_STATUS_RECURSIVE_DISPATCH: + reason = "STATUS_RECURSIVE_DISPATCH"; + break; + case MD_NTSTATUS_WIN_STATUS_LPC_RECEIVE_BUFFER_EXPECTED: + reason = "STATUS_LPC_RECEIVE_BUFFER_EXPECTED"; + break; + case MD_NTSTATUS_WIN_STATUS_LPC_INVALID_CONNECTION_USAGE: + reason = "STATUS_LPC_INVALID_CONNECTION_USAGE"; + break; + case MD_NTSTATUS_WIN_STATUS_LPC_REQUESTS_NOT_ALLOWED: + reason = "STATUS_LPC_REQUESTS_NOT_ALLOWED"; + break; + case MD_NTSTATUS_WIN_STATUS_RESOURCE_IN_USE: + reason = "STATUS_RESOURCE_IN_USE"; + break; + case MD_NTSTATUS_WIN_STATUS_HARDWARE_MEMORY_ERROR: + reason = "STATUS_HARDWARE_MEMORY_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_THREADPOOL_HANDLE_EXCEPTION: + reason = "STATUS_THREADPOOL_HANDLE_EXCEPTION"; + break; + case MD_NTSTATUS_WIN_STATUS_THREADPOOL_SET_EVENT_ON_COMPLETION_FAILED: + reason = "STATUS_THREADPOOL_SET_EVENT_ON_COMPLETION_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_THREADPOOL_RELEASE_SEMAPHORE_ON_COMPLETION_FAILED: + reason = "STATUS_THREADPOOL_RELEASE_SEMAPHORE_ON_COMPLETION_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_THREADPOOL_RELEASE_MUTEX_ON_COMPLETION_FAILED: + reason = "STATUS_THREADPOOL_RELEASE_MUTEX_ON_COMPLETION_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_THREADPOOL_FREE_LIBRARY_ON_COMPLETION_FAILED: + reason = "STATUS_THREADPOOL_FREE_LIBRARY_ON_COMPLETION_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_THREADPOOL_RELEASED_DURING_OPERATION: + reason = "STATUS_THREADPOOL_RELEASED_DURING_OPERATION"; + break; + case MD_NTSTATUS_WIN_STATUS_CALLBACK_RETURNED_WHILE_IMPERSONATING: + reason = "STATUS_CALLBACK_RETURNED_WHILE_IMPERSONATING"; + break; + case MD_NTSTATUS_WIN_STATUS_APC_RETURNED_WHILE_IMPERSONATING: + reason = "STATUS_APC_RETURNED_WHILE_IMPERSONATING"; + break; + case MD_NTSTATUS_WIN_STATUS_PROCESS_IS_PROTECTED: + reason = "STATUS_PROCESS_IS_PROTECTED"; + break; + case MD_NTSTATUS_WIN_STATUS_MCA_EXCEPTION: + reason = "STATUS_MCA_EXCEPTION"; + break; + case MD_NTSTATUS_WIN_STATUS_CERTIFICATE_MAPPING_NOT_UNIQUE: + reason = "STATUS_CERTIFICATE_MAPPING_NOT_UNIQUE"; + break; + case MD_NTSTATUS_WIN_STATUS_SYMLINK_CLASS_DISABLED: + reason = "STATUS_SYMLINK_CLASS_DISABLED"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_IDN_NORMALIZATION: + reason = "STATUS_INVALID_IDN_NORMALIZATION"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_UNICODE_TRANSLATION: + reason = "STATUS_NO_UNICODE_TRANSLATION"; + break; + case MD_NTSTATUS_WIN_STATUS_ALREADY_REGISTERED: + reason = "STATUS_ALREADY_REGISTERED"; + break; + case MD_NTSTATUS_WIN_STATUS_CONTEXT_MISMATCH: + reason = "STATUS_CONTEXT_MISMATCH"; + break; + case MD_NTSTATUS_WIN_STATUS_PORT_ALREADY_HAS_COMPLETION_LIST: + reason = "STATUS_PORT_ALREADY_HAS_COMPLETION_LIST"; + break; + case MD_NTSTATUS_WIN_STATUS_CALLBACK_RETURNED_THREAD_PRIORITY: + reason = "STATUS_CALLBACK_RETURNED_THREAD_PRIORITY"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_THREAD: + reason = "STATUS_INVALID_THREAD"; + break; + case MD_NTSTATUS_WIN_STATUS_CALLBACK_RETURNED_TRANSACTION: + reason = "STATUS_CALLBACK_RETURNED_TRANSACTION"; + break; + case MD_NTSTATUS_WIN_STATUS_CALLBACK_RETURNED_LDR_LOCK: + reason = "STATUS_CALLBACK_RETURNED_LDR_LOCK"; + break; + case MD_NTSTATUS_WIN_STATUS_CALLBACK_RETURNED_LANG: + reason = "STATUS_CALLBACK_RETURNED_LANG"; + break; + case MD_NTSTATUS_WIN_STATUS_CALLBACK_RETURNED_PRI_BACK: + reason = "STATUS_CALLBACK_RETURNED_PRI_BACK"; + break; + case MD_NTSTATUS_WIN_STATUS_CALLBACK_RETURNED_THREAD_AFFINITY: + reason = "STATUS_CALLBACK_RETURNED_THREAD_AFFINITY"; + break; + case MD_NTSTATUS_WIN_STATUS_DISK_REPAIR_DISABLED: + reason = "STATUS_DISK_REPAIR_DISABLED"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_DOMAIN_RENAME_IN_PROGRESS: + reason = "STATUS_DS_DOMAIN_RENAME_IN_PROGRESS"; + break; + case MD_NTSTATUS_WIN_STATUS_DISK_QUOTA_EXCEEDED: + reason = "STATUS_DISK_QUOTA_EXCEEDED"; + break; + case MD_NTSTATUS_WIN_STATUS_CONTENT_BLOCKED: + reason = "STATUS_CONTENT_BLOCKED"; + break; + case MD_NTSTATUS_WIN_STATUS_BAD_CLUSTERS: + reason = "STATUS_BAD_CLUSTERS"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLUME_DIRTY: + reason = "STATUS_VOLUME_DIRTY"; + break; + case MD_NTSTATUS_WIN_STATUS_DISK_REPAIR_UNSUCCESSFUL: + reason = "STATUS_DISK_REPAIR_UNSUCCESSFUL"; + break; + case MD_NTSTATUS_WIN_STATUS_CORRUPT_LOG_OVERFULL: + reason = "STATUS_CORRUPT_LOG_OVERFULL"; + break; + case MD_NTSTATUS_WIN_STATUS_CORRUPT_LOG_CORRUPTED: + reason = "STATUS_CORRUPT_LOG_CORRUPTED"; + break; + case MD_NTSTATUS_WIN_STATUS_CORRUPT_LOG_UNAVAILABLE: + reason = "STATUS_CORRUPT_LOG_UNAVAILABLE"; + break; + case MD_NTSTATUS_WIN_STATUS_CORRUPT_LOG_DELETED_FULL: + reason = "STATUS_CORRUPT_LOG_DELETED_FULL"; + break; + case MD_NTSTATUS_WIN_STATUS_CORRUPT_LOG_CLEARED: + reason = "STATUS_CORRUPT_LOG_CLEARED"; + break; + case MD_NTSTATUS_WIN_STATUS_ORPHAN_NAME_EXHAUSTED: + reason = "STATUS_ORPHAN_NAME_EXHAUSTED"; + break; + case MD_NTSTATUS_WIN_STATUS_PROACTIVE_SCAN_IN_PROGRESS: + reason = "STATUS_PROACTIVE_SCAN_IN_PROGRESS"; + break; + case MD_NTSTATUS_WIN_STATUS_ENCRYPTED_IO_NOT_POSSIBLE: + reason = "STATUS_ENCRYPTED_IO_NOT_POSSIBLE"; + break; + case MD_NTSTATUS_WIN_STATUS_CORRUPT_LOG_UPLEVEL_RECORDS: + reason = "STATUS_CORRUPT_LOG_UPLEVEL_RECORDS"; + break; + case MD_NTSTATUS_WIN_STATUS_FILE_CHECKED_OUT: + reason = "STATUS_FILE_CHECKED_OUT"; + break; + case MD_NTSTATUS_WIN_STATUS_CHECKOUT_REQUIRED: + reason = "STATUS_CHECKOUT_REQUIRED"; + break; + case MD_NTSTATUS_WIN_STATUS_BAD_FILE_TYPE: + reason = "STATUS_BAD_FILE_TYPE"; + break; + case MD_NTSTATUS_WIN_STATUS_FILE_TOO_LARGE: + reason = "STATUS_FILE_TOO_LARGE"; + break; + case MD_NTSTATUS_WIN_STATUS_FORMS_AUTH_REQUIRED: + reason = "STATUS_FORMS_AUTH_REQUIRED"; + break; + case MD_NTSTATUS_WIN_STATUS_VIRUS_INFECTED: + reason = "STATUS_VIRUS_INFECTED"; + break; + case MD_NTSTATUS_WIN_STATUS_VIRUS_DELETED: + reason = "STATUS_VIRUS_DELETED"; + break; + case MD_NTSTATUS_WIN_STATUS_BAD_MCFG_TABLE: + reason = "STATUS_BAD_MCFG_TABLE"; + break; + case MD_NTSTATUS_WIN_STATUS_CANNOT_BREAK_OPLOCK: + reason = "STATUS_CANNOT_BREAK_OPLOCK"; + break; + case MD_NTSTATUS_WIN_STATUS_BAD_KEY: + reason = "STATUS_BAD_KEY"; + break; + case MD_NTSTATUS_WIN_STATUS_BAD_DATA: + reason = "STATUS_BAD_DATA"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_KEY: + reason = "STATUS_NO_KEY"; + break; + case MD_NTSTATUS_WIN_STATUS_FILE_HANDLE_REVOKED: + reason = "STATUS_FILE_HANDLE_REVOKED"; + break; + case MD_NTSTATUS_WIN_STATUS_WOW_ASSERTION: + reason = "STATUS_WOW_ASSERTION"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_SIGNATURE: + reason = "STATUS_INVALID_SIGNATURE"; + break; + case MD_NTSTATUS_WIN_STATUS_HMAC_NOT_SUPPORTED: + reason = "STATUS_HMAC_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_AUTH_TAG_MISMATCH: + reason = "STATUS_AUTH_TAG_MISMATCH"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_STATE_TRANSITION: + reason = "STATUS_INVALID_STATE_TRANSITION"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_KERNEL_INFO_VERSION: + reason = "STATUS_INVALID_KERNEL_INFO_VERSION"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_PEP_INFO_VERSION: + reason = "STATUS_INVALID_PEP_INFO_VERSION"; + break; + case MD_NTSTATUS_WIN_STATUS_IPSEC_QUEUE_OVERFLOW: + reason = "STATUS_IPSEC_QUEUE_OVERFLOW"; + break; + case MD_NTSTATUS_WIN_STATUS_ND_QUEUE_OVERFLOW: + reason = "STATUS_ND_QUEUE_OVERFLOW"; + break; + case MD_NTSTATUS_WIN_STATUS_HOPLIMIT_EXCEEDED: + reason = "STATUS_HOPLIMIT_EXCEEDED"; + break; + case MD_NTSTATUS_WIN_STATUS_PROTOCOL_NOT_SUPPORTED: + reason = "STATUS_PROTOCOL_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_FASTPATH_REJECTED: + reason = "STATUS_FASTPATH_REJECTED"; + break; + case MD_NTSTATUS_WIN_STATUS_LOST_WRITEBEHIND_DATA_NETWORK_DISCONNECTED: + reason = "STATUS_LOST_WRITEBEHIND_DATA_NETWORK_DISCONNECTED"; + break; + case MD_NTSTATUS_WIN_STATUS_LOST_WRITEBEHIND_DATA_NETWORK_SERVER_ERROR: + reason = "STATUS_LOST_WRITEBEHIND_DATA_NETWORK_SERVER_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_LOST_WRITEBEHIND_DATA_LOCAL_DISK_ERROR: + reason = "STATUS_LOST_WRITEBEHIND_DATA_LOCAL_DISK_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_XML_PARSE_ERROR: + reason = "STATUS_XML_PARSE_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_XMLDSIG_ERROR: + reason = "STATUS_XMLDSIG_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_WRONG_COMPARTMENT: + reason = "STATUS_WRONG_COMPARTMENT"; + break; + case MD_NTSTATUS_WIN_STATUS_AUTHIP_FAILURE: + reason = "STATUS_AUTHIP_FAILURE"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_OID_MAPPED_GROUP_CANT_HAVE_MEMBERS: + reason = "STATUS_DS_OID_MAPPED_GROUP_CANT_HAVE_MEMBERS"; + break; + case MD_NTSTATUS_WIN_STATUS_DS_OID_NOT_FOUND: + reason = "STATUS_DS_OID_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_INCORRECT_ACCOUNT_TYPE: + reason = "STATUS_INCORRECT_ACCOUNT_TYPE"; + break; + case MD_NTSTATUS_WIN_STATUS_HASH_NOT_SUPPORTED: + reason = "STATUS_HASH_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_HASH_NOT_PRESENT: + reason = "STATUS_HASH_NOT_PRESENT"; + break; + case MD_NTSTATUS_WIN_STATUS_SECONDARY_IC_PROVIDER_NOT_REGISTERED: + reason = "STATUS_SECONDARY_IC_PROVIDER_NOT_REGISTERED"; + break; + case MD_NTSTATUS_WIN_STATUS_GPIO_CLIENT_INFORMATION_INVALID: + reason = "STATUS_GPIO_CLIENT_INFORMATION_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_GPIO_VERSION_NOT_SUPPORTED: + reason = "STATUS_GPIO_VERSION_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_GPIO_INVALID_REGISTRATION_PACKET: + reason = "STATUS_GPIO_INVALID_REGISTRATION_PACKET"; + break; + case MD_NTSTATUS_WIN_STATUS_GPIO_OPERATION_DENIED: + reason = "STATUS_GPIO_OPERATION_DENIED"; + break; + case MD_NTSTATUS_WIN_STATUS_GPIO_INCOMPATIBLE_CONNECT_MODE: + reason = "STATUS_GPIO_INCOMPATIBLE_CONNECT_MODE"; + break; + case MD_NTSTATUS_WIN_STATUS_CANNOT_SWITCH_RUNLEVEL: + reason = "STATUS_CANNOT_SWITCH_RUNLEVEL"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_RUNLEVEL_SETTING: + reason = "STATUS_INVALID_RUNLEVEL_SETTING"; + break; + case MD_NTSTATUS_WIN_STATUS_RUNLEVEL_SWITCH_TIMEOUT: + reason = "STATUS_RUNLEVEL_SWITCH_TIMEOUT"; + break; + case MD_NTSTATUS_WIN_STATUS_RUNLEVEL_SWITCH_AGENT_TIMEOUT: + reason = "STATUS_RUNLEVEL_SWITCH_AGENT_TIMEOUT"; + break; + case MD_NTSTATUS_WIN_STATUS_RUNLEVEL_SWITCH_IN_PROGRESS: + reason = "STATUS_RUNLEVEL_SWITCH_IN_PROGRESS"; + break; + case MD_NTSTATUS_WIN_STATUS_NOT_APPCONTAINER: + reason = "STATUS_NOT_APPCONTAINER"; + break; + case MD_NTSTATUS_WIN_STATUS_NOT_SUPPORTED_IN_APPCONTAINER: + reason = "STATUS_NOT_SUPPORTED_IN_APPCONTAINER"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_PACKAGE_SID_LENGTH: + reason = "STATUS_INVALID_PACKAGE_SID_LENGTH"; + break; + case MD_NTSTATUS_WIN_STATUS_APP_DATA_NOT_FOUND: + reason = "STATUS_APP_DATA_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_APP_DATA_EXPIRED: + reason = "STATUS_APP_DATA_EXPIRED"; + break; + case MD_NTSTATUS_WIN_STATUS_APP_DATA_CORRUPT: + reason = "STATUS_APP_DATA_CORRUPT"; + break; + case MD_NTSTATUS_WIN_STATUS_APP_DATA_LIMIT_EXCEEDED: + reason = "STATUS_APP_DATA_LIMIT_EXCEEDED"; + break; + case MD_NTSTATUS_WIN_STATUS_APP_DATA_REBOOT_REQUIRED: + reason = "STATUS_APP_DATA_REBOOT_REQUIRED"; + break; + case MD_NTSTATUS_WIN_STATUS_OFFLOAD_READ_FLT_NOT_SUPPORTED: + reason = "STATUS_OFFLOAD_READ_FLT_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_OFFLOAD_WRITE_FLT_NOT_SUPPORTED: + reason = "STATUS_OFFLOAD_WRITE_FLT_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_OFFLOAD_READ_FILE_NOT_SUPPORTED: + reason = "STATUS_OFFLOAD_READ_FILE_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_OFFLOAD_WRITE_FILE_NOT_SUPPORTED: + reason = "STATUS_OFFLOAD_WRITE_FILE_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_DBG_NO_STATE_CHANGE: + reason = "DBG_NO_STATE_CHANGE"; + break; + case MD_NTSTATUS_WIN_DBG_APP_NOT_IDLE: + reason = "DBG_APP_NOT_IDLE"; + break; + case MD_NTSTATUS_WIN_RPC_NT_INVALID_STRING_BINDING: + reason = "RPC_NT_INVALID_STRING_BINDING"; + break; + case MD_NTSTATUS_WIN_RPC_NT_WRONG_KIND_OF_BINDING: + reason = "RPC_NT_WRONG_KIND_OF_BINDING"; + break; + case MD_NTSTATUS_WIN_RPC_NT_INVALID_BINDING: + reason = "RPC_NT_INVALID_BINDING"; + break; + case MD_NTSTATUS_WIN_RPC_NT_PROTSEQ_NOT_SUPPORTED: + reason = "RPC_NT_PROTSEQ_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_RPC_NT_INVALID_RPC_PROTSEQ: + reason = "RPC_NT_INVALID_RPC_PROTSEQ"; + break; + case MD_NTSTATUS_WIN_RPC_NT_INVALID_STRING_UUID: + reason = "RPC_NT_INVALID_STRING_UUID"; + break; + case MD_NTSTATUS_WIN_RPC_NT_INVALID_ENDPOINT_FORMAT: + reason = "RPC_NT_INVALID_ENDPOINT_FORMAT"; + break; + case MD_NTSTATUS_WIN_RPC_NT_INVALID_NET_ADDR: + reason = "RPC_NT_INVALID_NET_ADDR"; + break; + case MD_NTSTATUS_WIN_RPC_NT_NO_ENDPOINT_FOUND: + reason = "RPC_NT_NO_ENDPOINT_FOUND"; + break; + case MD_NTSTATUS_WIN_RPC_NT_INVALID_TIMEOUT: + reason = "RPC_NT_INVALID_TIMEOUT"; + break; + case MD_NTSTATUS_WIN_RPC_NT_OBJECT_NOT_FOUND: + reason = "RPC_NT_OBJECT_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_RPC_NT_ALREADY_REGISTERED: + reason = "RPC_NT_ALREADY_REGISTERED"; + break; + case MD_NTSTATUS_WIN_RPC_NT_TYPE_ALREADY_REGISTERED: + reason = "RPC_NT_TYPE_ALREADY_REGISTERED"; + break; + case MD_NTSTATUS_WIN_RPC_NT_ALREADY_LISTENING: + reason = "RPC_NT_ALREADY_LISTENING"; + break; + case MD_NTSTATUS_WIN_RPC_NT_NO_PROTSEQS_REGISTERED: + reason = "RPC_NT_NO_PROTSEQS_REGISTERED"; + break; + case MD_NTSTATUS_WIN_RPC_NT_NOT_LISTENING: + reason = "RPC_NT_NOT_LISTENING"; + break; + case MD_NTSTATUS_WIN_RPC_NT_UNKNOWN_MGR_TYPE: + reason = "RPC_NT_UNKNOWN_MGR_TYPE"; + break; + case MD_NTSTATUS_WIN_RPC_NT_UNKNOWN_IF: + reason = "RPC_NT_UNKNOWN_IF"; + break; + case MD_NTSTATUS_WIN_RPC_NT_NO_BINDINGS: + reason = "RPC_NT_NO_BINDINGS"; + break; + case MD_NTSTATUS_WIN_RPC_NT_NO_PROTSEQS: + reason = "RPC_NT_NO_PROTSEQS"; + break; + case MD_NTSTATUS_WIN_RPC_NT_CANT_CREATE_ENDPOINT: + reason = "RPC_NT_CANT_CREATE_ENDPOINT"; + break; + case MD_NTSTATUS_WIN_RPC_NT_OUT_OF_RESOURCES: + reason = "RPC_NT_OUT_OF_RESOURCES"; + break; + case MD_NTSTATUS_WIN_RPC_NT_SERVER_UNAVAILABLE: + reason = "RPC_NT_SERVER_UNAVAILABLE"; + break; + case MD_NTSTATUS_WIN_RPC_NT_SERVER_TOO_BUSY: + reason = "RPC_NT_SERVER_TOO_BUSY"; + break; + case MD_NTSTATUS_WIN_RPC_NT_INVALID_NETWORK_OPTIONS: + reason = "RPC_NT_INVALID_NETWORK_OPTIONS"; + break; + case MD_NTSTATUS_WIN_RPC_NT_NO_CALL_ACTIVE: + reason = "RPC_NT_NO_CALL_ACTIVE"; + break; + case MD_NTSTATUS_WIN_RPC_NT_CALL_FAILED: + reason = "RPC_NT_CALL_FAILED"; + break; + case MD_NTSTATUS_WIN_RPC_NT_CALL_FAILED_DNE: + reason = "RPC_NT_CALL_FAILED_DNE"; + break; + case MD_NTSTATUS_WIN_RPC_NT_PROTOCOL_ERROR: + reason = "RPC_NT_PROTOCOL_ERROR"; + break; + case MD_NTSTATUS_WIN_RPC_NT_UNSUPPORTED_TRANS_SYN: + reason = "RPC_NT_UNSUPPORTED_TRANS_SYN"; + break; + case MD_NTSTATUS_WIN_RPC_NT_UNSUPPORTED_TYPE: + reason = "RPC_NT_UNSUPPORTED_TYPE"; + break; + case MD_NTSTATUS_WIN_RPC_NT_INVALID_TAG: + reason = "RPC_NT_INVALID_TAG"; + break; + case MD_NTSTATUS_WIN_RPC_NT_INVALID_BOUND: + reason = "RPC_NT_INVALID_BOUND"; + break; + case MD_NTSTATUS_WIN_RPC_NT_NO_ENTRY_NAME: + reason = "RPC_NT_NO_ENTRY_NAME"; + break; + case MD_NTSTATUS_WIN_RPC_NT_INVALID_NAME_SYNTAX: + reason = "RPC_NT_INVALID_NAME_SYNTAX"; + break; + case MD_NTSTATUS_WIN_RPC_NT_UNSUPPORTED_NAME_SYNTAX: + reason = "RPC_NT_UNSUPPORTED_NAME_SYNTAX"; + break; + case MD_NTSTATUS_WIN_RPC_NT_UUID_NO_ADDRESS: + reason = "RPC_NT_UUID_NO_ADDRESS"; + break; + case MD_NTSTATUS_WIN_RPC_NT_DUPLICATE_ENDPOINT: + reason = "RPC_NT_DUPLICATE_ENDPOINT"; + break; + case MD_NTSTATUS_WIN_RPC_NT_UNKNOWN_AUTHN_TYPE: + reason = "RPC_NT_UNKNOWN_AUTHN_TYPE"; + break; + case MD_NTSTATUS_WIN_RPC_NT_MAX_CALLS_TOO_SMALL: + reason = "RPC_NT_MAX_CALLS_TOO_SMALL"; + break; + case MD_NTSTATUS_WIN_RPC_NT_STRING_TOO_LONG: + reason = "RPC_NT_STRING_TOO_LONG"; + break; + case MD_NTSTATUS_WIN_RPC_NT_PROTSEQ_NOT_FOUND: + reason = "RPC_NT_PROTSEQ_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_RPC_NT_PROCNUM_OUT_OF_RANGE: + reason = "RPC_NT_PROCNUM_OUT_OF_RANGE"; + break; + case MD_NTSTATUS_WIN_RPC_NT_BINDING_HAS_NO_AUTH: + reason = "RPC_NT_BINDING_HAS_NO_AUTH"; + break; + case MD_NTSTATUS_WIN_RPC_NT_UNKNOWN_AUTHN_SERVICE: + reason = "RPC_NT_UNKNOWN_AUTHN_SERVICE"; + break; + case MD_NTSTATUS_WIN_RPC_NT_UNKNOWN_AUTHN_LEVEL: + reason = "RPC_NT_UNKNOWN_AUTHN_LEVEL"; + break; + case MD_NTSTATUS_WIN_RPC_NT_INVALID_AUTH_IDENTITY: + reason = "RPC_NT_INVALID_AUTH_IDENTITY"; + break; + case MD_NTSTATUS_WIN_RPC_NT_UNKNOWN_AUTHZ_SERVICE: + reason = "RPC_NT_UNKNOWN_AUTHZ_SERVICE"; + break; + case MD_NTSTATUS_WIN_EPT_NT_INVALID_ENTRY: + reason = "EPT_NT_INVALID_ENTRY"; + break; + case MD_NTSTATUS_WIN_EPT_NT_CANT_PERFORM_OP: + reason = "EPT_NT_CANT_PERFORM_OP"; + break; + case MD_NTSTATUS_WIN_EPT_NT_NOT_REGISTERED: + reason = "EPT_NT_NOT_REGISTERED"; + break; + case MD_NTSTATUS_WIN_RPC_NT_NOTHING_TO_EXPORT: + reason = "RPC_NT_NOTHING_TO_EXPORT"; + break; + case MD_NTSTATUS_WIN_RPC_NT_INCOMPLETE_NAME: + reason = "RPC_NT_INCOMPLETE_NAME"; + break; + case MD_NTSTATUS_WIN_RPC_NT_INVALID_VERS_OPTION: + reason = "RPC_NT_INVALID_VERS_OPTION"; + break; + case MD_NTSTATUS_WIN_RPC_NT_NO_MORE_MEMBERS: + reason = "RPC_NT_NO_MORE_MEMBERS"; + break; + case MD_NTSTATUS_WIN_RPC_NT_NOT_ALL_OBJS_UNEXPORTED: + reason = "RPC_NT_NOT_ALL_OBJS_UNEXPORTED"; + break; + case MD_NTSTATUS_WIN_RPC_NT_INTERFACE_NOT_FOUND: + reason = "RPC_NT_INTERFACE_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_RPC_NT_ENTRY_ALREADY_EXISTS: + reason = "RPC_NT_ENTRY_ALREADY_EXISTS"; + break; + case MD_NTSTATUS_WIN_RPC_NT_ENTRY_NOT_FOUND: + reason = "RPC_NT_ENTRY_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_RPC_NT_NAME_SERVICE_UNAVAILABLE: + reason = "RPC_NT_NAME_SERVICE_UNAVAILABLE"; + break; + case MD_NTSTATUS_WIN_RPC_NT_INVALID_NAF_ID: + reason = "RPC_NT_INVALID_NAF_ID"; + break; + case MD_NTSTATUS_WIN_RPC_NT_CANNOT_SUPPORT: + reason = "RPC_NT_CANNOT_SUPPORT"; + break; + case MD_NTSTATUS_WIN_RPC_NT_NO_CONTEXT_AVAILABLE: + reason = "RPC_NT_NO_CONTEXT_AVAILABLE"; + break; + case MD_NTSTATUS_WIN_RPC_NT_INTERNAL_ERROR: + reason = "RPC_NT_INTERNAL_ERROR"; + break; + case MD_NTSTATUS_WIN_RPC_NT_ZERO_DIVIDE: + reason = "RPC_NT_ZERO_DIVIDE"; + break; + case MD_NTSTATUS_WIN_RPC_NT_ADDRESS_ERROR: + reason = "RPC_NT_ADDRESS_ERROR"; + break; + case MD_NTSTATUS_WIN_RPC_NT_FP_DIV_ZERO: + reason = "RPC_NT_FP_DIV_ZERO"; + break; + case MD_NTSTATUS_WIN_RPC_NT_FP_UNDERFLOW: + reason = "RPC_NT_FP_UNDERFLOW"; + break; + case MD_NTSTATUS_WIN_RPC_NT_FP_OVERFLOW: + reason = "RPC_NT_FP_OVERFLOW"; + break; + case MD_NTSTATUS_WIN_RPC_NT_CALL_IN_PROGRESS: + reason = "RPC_NT_CALL_IN_PROGRESS"; + break; + case MD_NTSTATUS_WIN_RPC_NT_NO_MORE_BINDINGS: + reason = "RPC_NT_NO_MORE_BINDINGS"; + break; + case MD_NTSTATUS_WIN_RPC_NT_GROUP_MEMBER_NOT_FOUND: + reason = "RPC_NT_GROUP_MEMBER_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_EPT_NT_CANT_CREATE: + reason = "EPT_NT_CANT_CREATE"; + break; + case MD_NTSTATUS_WIN_RPC_NT_INVALID_OBJECT: + reason = "RPC_NT_INVALID_OBJECT"; + break; + case MD_NTSTATUS_WIN_RPC_NT_NO_INTERFACES: + reason = "RPC_NT_NO_INTERFACES"; + break; + case MD_NTSTATUS_WIN_RPC_NT_CALL_CANCELLED: + reason = "RPC_NT_CALL_CANCELLED"; + break; + case MD_NTSTATUS_WIN_RPC_NT_BINDING_INCOMPLETE: + reason = "RPC_NT_BINDING_INCOMPLETE"; + break; + case MD_NTSTATUS_WIN_RPC_NT_COMM_FAILURE: + reason = "RPC_NT_COMM_FAILURE"; + break; + case MD_NTSTATUS_WIN_RPC_NT_UNSUPPORTED_AUTHN_LEVEL: + reason = "RPC_NT_UNSUPPORTED_AUTHN_LEVEL"; + break; + case MD_NTSTATUS_WIN_RPC_NT_NO_PRINC_NAME: + reason = "RPC_NT_NO_PRINC_NAME"; + break; + case MD_NTSTATUS_WIN_RPC_NT_NOT_RPC_ERROR: + reason = "RPC_NT_NOT_RPC_ERROR"; + break; + case MD_NTSTATUS_WIN_RPC_NT_SEC_PKG_ERROR: + reason = "RPC_NT_SEC_PKG_ERROR"; + break; + case MD_NTSTATUS_WIN_RPC_NT_NOT_CANCELLED: + reason = "RPC_NT_NOT_CANCELLED"; + break; + case MD_NTSTATUS_WIN_RPC_NT_INVALID_ASYNC_HANDLE: + reason = "RPC_NT_INVALID_ASYNC_HANDLE"; + break; + case MD_NTSTATUS_WIN_RPC_NT_INVALID_ASYNC_CALL: + reason = "RPC_NT_INVALID_ASYNC_CALL"; + break; + case MD_NTSTATUS_WIN_RPC_NT_PROXY_ACCESS_DENIED: + reason = "RPC_NT_PROXY_ACCESS_DENIED"; + break; + case MD_NTSTATUS_WIN_RPC_NT_COOKIE_AUTH_FAILED: + reason = "RPC_NT_COOKIE_AUTH_FAILED"; + break; + case MD_NTSTATUS_WIN_RPC_NT_NO_MORE_ENTRIES: + reason = "RPC_NT_NO_MORE_ENTRIES"; + break; + case MD_NTSTATUS_WIN_RPC_NT_SS_CHAR_TRANS_OPEN_FAIL: + reason = "RPC_NT_SS_CHAR_TRANS_OPEN_FAIL"; + break; + case MD_NTSTATUS_WIN_RPC_NT_SS_CHAR_TRANS_SHORT_FILE: + reason = "RPC_NT_SS_CHAR_TRANS_SHORT_FILE"; + break; + case MD_NTSTATUS_WIN_RPC_NT_SS_IN_NULL_CONTEXT: + reason = "RPC_NT_SS_IN_NULL_CONTEXT"; + break; + case MD_NTSTATUS_WIN_RPC_NT_SS_CONTEXT_MISMATCH: + reason = "RPC_NT_SS_CONTEXT_MISMATCH"; + break; + case MD_NTSTATUS_WIN_RPC_NT_SS_CONTEXT_DAMAGED: + reason = "RPC_NT_SS_CONTEXT_DAMAGED"; + break; + case MD_NTSTATUS_WIN_RPC_NT_SS_HANDLES_MISMATCH: + reason = "RPC_NT_SS_HANDLES_MISMATCH"; + break; + case MD_NTSTATUS_WIN_RPC_NT_SS_CANNOT_GET_CALL_HANDLE: + reason = "RPC_NT_SS_CANNOT_GET_CALL_HANDLE"; + break; + case MD_NTSTATUS_WIN_RPC_NT_NULL_REF_POINTER: + reason = "RPC_NT_NULL_REF_POINTER"; + break; + case MD_NTSTATUS_WIN_RPC_NT_ENUM_VALUE_OUT_OF_RANGE: + reason = "RPC_NT_ENUM_VALUE_OUT_OF_RANGE"; + break; + case MD_NTSTATUS_WIN_RPC_NT_BYTE_COUNT_TOO_SMALL: + reason = "RPC_NT_BYTE_COUNT_TOO_SMALL"; + break; + case MD_NTSTATUS_WIN_RPC_NT_BAD_STUB_DATA: + reason = "RPC_NT_BAD_STUB_DATA"; + break; + case MD_NTSTATUS_WIN_RPC_NT_INVALID_ES_ACTION: + reason = "RPC_NT_INVALID_ES_ACTION"; + break; + case MD_NTSTATUS_WIN_RPC_NT_WRONG_ES_VERSION: + reason = "RPC_NT_WRONG_ES_VERSION"; + break; + case MD_NTSTATUS_WIN_RPC_NT_WRONG_STUB_VERSION: + reason = "RPC_NT_WRONG_STUB_VERSION"; + break; + case MD_NTSTATUS_WIN_RPC_NT_INVALID_PIPE_OBJECT: + reason = "RPC_NT_INVALID_PIPE_OBJECT"; + break; + case MD_NTSTATUS_WIN_RPC_NT_INVALID_PIPE_OPERATION: + reason = "RPC_NT_INVALID_PIPE_OPERATION"; + break; + case MD_NTSTATUS_WIN_RPC_NT_WRONG_PIPE_VERSION: + reason = "RPC_NT_WRONG_PIPE_VERSION"; + break; + case MD_NTSTATUS_WIN_RPC_NT_PIPE_CLOSED: + reason = "RPC_NT_PIPE_CLOSED"; + break; + case MD_NTSTATUS_WIN_RPC_NT_PIPE_DISCIPLINE_ERROR: + reason = "RPC_NT_PIPE_DISCIPLINE_ERROR"; + break; + case MD_NTSTATUS_WIN_RPC_NT_PIPE_EMPTY: + reason = "RPC_NT_PIPE_EMPTY"; + break; + case MD_NTSTATUS_WIN_STATUS_PNP_BAD_MPS_TABLE: + reason = "STATUS_PNP_BAD_MPS_TABLE"; + break; + case MD_NTSTATUS_WIN_STATUS_PNP_TRANSLATION_FAILED: + reason = "STATUS_PNP_TRANSLATION_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_PNP_IRQ_TRANSLATION_FAILED: + reason = "STATUS_PNP_IRQ_TRANSLATION_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_PNP_INVALID_ID: + reason = "STATUS_PNP_INVALID_ID"; + break; + case MD_NTSTATUS_WIN_STATUS_IO_REISSUE_AS_CACHED: + reason = "STATUS_IO_REISSUE_AS_CACHED"; + break; + case MD_NTSTATUS_WIN_STATUS_CTX_WINSTATION_NAME_INVALID: + reason = "STATUS_CTX_WINSTATION_NAME_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_CTX_INVALID_PD: + reason = "STATUS_CTX_INVALID_PD"; + break; + case MD_NTSTATUS_WIN_STATUS_CTX_PD_NOT_FOUND: + reason = "STATUS_CTX_PD_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_CTX_CLOSE_PENDING: + reason = "STATUS_CTX_CLOSE_PENDING"; + break; + case MD_NTSTATUS_WIN_STATUS_CTX_NO_OUTBUF: + reason = "STATUS_CTX_NO_OUTBUF"; + break; + case MD_NTSTATUS_WIN_STATUS_CTX_MODEM_INF_NOT_FOUND: + reason = "STATUS_CTX_MODEM_INF_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_CTX_INVALID_MODEMNAME: + reason = "STATUS_CTX_INVALID_MODEMNAME"; + break; + case MD_NTSTATUS_WIN_STATUS_CTX_RESPONSE_ERROR: + reason = "STATUS_CTX_RESPONSE_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_CTX_MODEM_RESPONSE_TIMEOUT: + reason = "STATUS_CTX_MODEM_RESPONSE_TIMEOUT"; + break; + case MD_NTSTATUS_WIN_STATUS_CTX_MODEM_RESPONSE_NO_CARRIER: + reason = "STATUS_CTX_MODEM_RESPONSE_NO_CARRIER"; + break; + case MD_NTSTATUS_WIN_STATUS_CTX_MODEM_RESPONSE_NO_DIALTONE: + reason = "STATUS_CTX_MODEM_RESPONSE_NO_DIALTONE"; + break; + case MD_NTSTATUS_WIN_STATUS_CTX_MODEM_RESPONSE_BUSY: + reason = "STATUS_CTX_MODEM_RESPONSE_BUSY"; + break; + case MD_NTSTATUS_WIN_STATUS_CTX_MODEM_RESPONSE_VOICE: + reason = "STATUS_CTX_MODEM_RESPONSE_VOICE"; + break; + case MD_NTSTATUS_WIN_STATUS_CTX_TD_ERROR: + reason = "STATUS_CTX_TD_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_CTX_LICENSE_CLIENT_INVALID: + reason = "STATUS_CTX_LICENSE_CLIENT_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_CTX_LICENSE_NOT_AVAILABLE: + reason = "STATUS_CTX_LICENSE_NOT_AVAILABLE"; + break; + case MD_NTSTATUS_WIN_STATUS_CTX_LICENSE_EXPIRED: + reason = "STATUS_CTX_LICENSE_EXPIRED"; + break; + case MD_NTSTATUS_WIN_STATUS_CTX_WINSTATION_NOT_FOUND: + reason = "STATUS_CTX_WINSTATION_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_CTX_WINSTATION_NAME_COLLISION: + reason = "STATUS_CTX_WINSTATION_NAME_COLLISION"; + break; + case MD_NTSTATUS_WIN_STATUS_CTX_WINSTATION_BUSY: + reason = "STATUS_CTX_WINSTATION_BUSY"; + break; + case MD_NTSTATUS_WIN_STATUS_CTX_BAD_VIDEO_MODE: + reason = "STATUS_CTX_BAD_VIDEO_MODE"; + break; + case MD_NTSTATUS_WIN_STATUS_CTX_GRAPHICS_INVALID: + reason = "STATUS_CTX_GRAPHICS_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_CTX_NOT_CONSOLE: + reason = "STATUS_CTX_NOT_CONSOLE"; + break; + case MD_NTSTATUS_WIN_STATUS_CTX_CLIENT_QUERY_TIMEOUT: + reason = "STATUS_CTX_CLIENT_QUERY_TIMEOUT"; + break; + case MD_NTSTATUS_WIN_STATUS_CTX_CONSOLE_DISCONNECT: + reason = "STATUS_CTX_CONSOLE_DISCONNECT"; + break; + case MD_NTSTATUS_WIN_STATUS_CTX_CONSOLE_CONNECT: + reason = "STATUS_CTX_CONSOLE_CONNECT"; + break; + case MD_NTSTATUS_WIN_STATUS_CTX_SHADOW_DENIED: + reason = "STATUS_CTX_SHADOW_DENIED"; + break; + case MD_NTSTATUS_WIN_STATUS_CTX_WINSTATION_ACCESS_DENIED: + reason = "STATUS_CTX_WINSTATION_ACCESS_DENIED"; + break; + case MD_NTSTATUS_WIN_STATUS_CTX_INVALID_WD: + reason = "STATUS_CTX_INVALID_WD"; + break; + case MD_NTSTATUS_WIN_STATUS_CTX_WD_NOT_FOUND: + reason = "STATUS_CTX_WD_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_CTX_SHADOW_INVALID: + reason = "STATUS_CTX_SHADOW_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_CTX_SHADOW_DISABLED: + reason = "STATUS_CTX_SHADOW_DISABLED"; + break; + case MD_NTSTATUS_WIN_STATUS_RDP_PROTOCOL_ERROR: + reason = "STATUS_RDP_PROTOCOL_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_CTX_CLIENT_LICENSE_NOT_SET: + reason = "STATUS_CTX_CLIENT_LICENSE_NOT_SET"; + break; + case MD_NTSTATUS_WIN_STATUS_CTX_CLIENT_LICENSE_IN_USE: + reason = "STATUS_CTX_CLIENT_LICENSE_IN_USE"; + break; + case MD_NTSTATUS_WIN_STATUS_CTX_SHADOW_ENDED_BY_MODE_CHANGE: + reason = "STATUS_CTX_SHADOW_ENDED_BY_MODE_CHANGE"; + break; + case MD_NTSTATUS_WIN_STATUS_CTX_SHADOW_NOT_RUNNING: + reason = "STATUS_CTX_SHADOW_NOT_RUNNING"; + break; + case MD_NTSTATUS_WIN_STATUS_CTX_LOGON_DISABLED: + reason = "STATUS_CTX_LOGON_DISABLED"; + break; + case MD_NTSTATUS_WIN_STATUS_CTX_SECURITY_LAYER_ERROR: + reason = "STATUS_CTX_SECURITY_LAYER_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_TS_INCOMPATIBLE_SESSIONS: + reason = "STATUS_TS_INCOMPATIBLE_SESSIONS"; + break; + case MD_NTSTATUS_WIN_STATUS_TS_VIDEO_SUBSYSTEM_ERROR: + reason = "STATUS_TS_VIDEO_SUBSYSTEM_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_MUI_FILE_NOT_FOUND: + reason = "STATUS_MUI_FILE_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_MUI_INVALID_FILE: + reason = "STATUS_MUI_INVALID_FILE"; + break; + case MD_NTSTATUS_WIN_STATUS_MUI_INVALID_RC_CONFIG: + reason = "STATUS_MUI_INVALID_RC_CONFIG"; + break; + case MD_NTSTATUS_WIN_STATUS_MUI_INVALID_LOCALE_NAME: + reason = "STATUS_MUI_INVALID_LOCALE_NAME"; + break; + case MD_NTSTATUS_WIN_STATUS_MUI_INVALID_ULTIMATEFALLBACK_NAME: + reason = "STATUS_MUI_INVALID_ULTIMATEFALLBACK_NAME"; + break; + case MD_NTSTATUS_WIN_STATUS_MUI_FILE_NOT_LOADED: + reason = "STATUS_MUI_FILE_NOT_LOADED"; + break; + case MD_NTSTATUS_WIN_STATUS_RESOURCE_ENUM_USER_STOP: + reason = "STATUS_RESOURCE_ENUM_USER_STOP"; + break; + case MD_NTSTATUS_WIN_STATUS_CLUSTER_INVALID_NODE: + reason = "STATUS_CLUSTER_INVALID_NODE"; + break; + case MD_NTSTATUS_WIN_STATUS_CLUSTER_NODE_EXISTS: + reason = "STATUS_CLUSTER_NODE_EXISTS"; + break; + case MD_NTSTATUS_WIN_STATUS_CLUSTER_JOIN_IN_PROGRESS: + reason = "STATUS_CLUSTER_JOIN_IN_PROGRESS"; + break; + case MD_NTSTATUS_WIN_STATUS_CLUSTER_NODE_NOT_FOUND: + reason = "STATUS_CLUSTER_NODE_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_CLUSTER_LOCAL_NODE_NOT_FOUND: + reason = "STATUS_CLUSTER_LOCAL_NODE_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_CLUSTER_NETWORK_EXISTS: + reason = "STATUS_CLUSTER_NETWORK_EXISTS"; + break; + case MD_NTSTATUS_WIN_STATUS_CLUSTER_NETWORK_NOT_FOUND: + reason = "STATUS_CLUSTER_NETWORK_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_CLUSTER_NETINTERFACE_EXISTS: + reason = "STATUS_CLUSTER_NETINTERFACE_EXISTS"; + break; + case MD_NTSTATUS_WIN_STATUS_CLUSTER_NETINTERFACE_NOT_FOUND: + reason = "STATUS_CLUSTER_NETINTERFACE_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_CLUSTER_INVALID_REQUEST: + reason = "STATUS_CLUSTER_INVALID_REQUEST"; + break; + case MD_NTSTATUS_WIN_STATUS_CLUSTER_INVALID_NETWORK_PROVIDER: + reason = "STATUS_CLUSTER_INVALID_NETWORK_PROVIDER"; + break; + case MD_NTSTATUS_WIN_STATUS_CLUSTER_NODE_DOWN: + reason = "STATUS_CLUSTER_NODE_DOWN"; + break; + case MD_NTSTATUS_WIN_STATUS_CLUSTER_NODE_UNREACHABLE: + reason = "STATUS_CLUSTER_NODE_UNREACHABLE"; + break; + case MD_NTSTATUS_WIN_STATUS_CLUSTER_NODE_NOT_MEMBER: + reason = "STATUS_CLUSTER_NODE_NOT_MEMBER"; + break; + case MD_NTSTATUS_WIN_STATUS_CLUSTER_JOIN_NOT_IN_PROGRESS: + reason = "STATUS_CLUSTER_JOIN_NOT_IN_PROGRESS"; + break; + case MD_NTSTATUS_WIN_STATUS_CLUSTER_INVALID_NETWORK: + reason = "STATUS_CLUSTER_INVALID_NETWORK"; + break; + case MD_NTSTATUS_WIN_STATUS_CLUSTER_NO_NET_ADAPTERS: + reason = "STATUS_CLUSTER_NO_NET_ADAPTERS"; + break; + case MD_NTSTATUS_WIN_STATUS_CLUSTER_NODE_UP: + reason = "STATUS_CLUSTER_NODE_UP"; + break; + case MD_NTSTATUS_WIN_STATUS_CLUSTER_NODE_PAUSED: + reason = "STATUS_CLUSTER_NODE_PAUSED"; + break; + case MD_NTSTATUS_WIN_STATUS_CLUSTER_NODE_NOT_PAUSED: + reason = "STATUS_CLUSTER_NODE_NOT_PAUSED"; + break; + case MD_NTSTATUS_WIN_STATUS_CLUSTER_NO_SECURITY_CONTEXT: + reason = "STATUS_CLUSTER_NO_SECURITY_CONTEXT"; + break; + case MD_NTSTATUS_WIN_STATUS_CLUSTER_NETWORK_NOT_INTERNAL: + reason = "STATUS_CLUSTER_NETWORK_NOT_INTERNAL"; + break; + case MD_NTSTATUS_WIN_STATUS_CLUSTER_POISONED: + reason = "STATUS_CLUSTER_POISONED"; + break; + case MD_NTSTATUS_WIN_STATUS_CLUSTER_NON_CSV_PATH: + reason = "STATUS_CLUSTER_NON_CSV_PATH"; + break; + case MD_NTSTATUS_WIN_STATUS_CLUSTER_CSV_VOLUME_NOT_LOCAL: + reason = "STATUS_CLUSTER_CSV_VOLUME_NOT_LOCAL"; + break; + case MD_NTSTATUS_WIN_STATUS_CLUSTER_CSV_READ_OPLOCK_BREAK_IN_PROGRESS: + reason = "STATUS_CLUSTER_CSV_READ_OPLOCK_BREAK_IN_PROGRESS"; + break; + case MD_NTSTATUS_WIN_STATUS_CLUSTER_CSV_AUTO_PAUSE_ERROR: + reason = "STATUS_CLUSTER_CSV_AUTO_PAUSE_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_CLUSTER_CSV_REDIRECTED: + reason = "STATUS_CLUSTER_CSV_REDIRECTED"; + break; + case MD_NTSTATUS_WIN_STATUS_CLUSTER_CSV_NOT_REDIRECTED: + reason = "STATUS_CLUSTER_CSV_NOT_REDIRECTED"; + break; + case MD_NTSTATUS_WIN_STATUS_CLUSTER_CSV_VOLUME_DRAINING: + reason = "STATUS_CLUSTER_CSV_VOLUME_DRAINING"; + break; + case MD_NTSTATUS_WIN_STATUS_CLUSTER_CSV_SNAPSHOT_CREATION_IN_PROGRESS: + reason = "STATUS_CLUSTER_CSV_SNAPSHOT_CREATION_IN_PROGRESS"; + break; + case MD_NTSTATUS_WIN_STATUS_CLUSTER_CSV_VOLUME_DRAINING_SUCCEEDED_DOWNLEVEL: + reason = "STATUS_CLUSTER_CSV_VOLUME_DRAINING_SUCCEEDED_DOWNLEVEL"; + break; + case MD_NTSTATUS_WIN_STATUS_ACPI_INVALID_OPCODE: + reason = "STATUS_ACPI_INVALID_OPCODE"; + break; + case MD_NTSTATUS_WIN_STATUS_ACPI_STACK_OVERFLOW: + reason = "STATUS_ACPI_STACK_OVERFLOW"; + break; + case MD_NTSTATUS_WIN_STATUS_ACPI_ASSERT_FAILED: + reason = "STATUS_ACPI_ASSERT_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_ACPI_INVALID_INDEX: + reason = "STATUS_ACPI_INVALID_INDEX"; + break; + case MD_NTSTATUS_WIN_STATUS_ACPI_INVALID_ARGUMENT: + reason = "STATUS_ACPI_INVALID_ARGUMENT"; + break; + case MD_NTSTATUS_WIN_STATUS_ACPI_FATAL: + reason = "STATUS_ACPI_FATAL"; + break; + case MD_NTSTATUS_WIN_STATUS_ACPI_INVALID_SUPERNAME: + reason = "STATUS_ACPI_INVALID_SUPERNAME"; + break; + case MD_NTSTATUS_WIN_STATUS_ACPI_INVALID_ARGTYPE: + reason = "STATUS_ACPI_INVALID_ARGTYPE"; + break; + case MD_NTSTATUS_WIN_STATUS_ACPI_INVALID_OBJTYPE: + reason = "STATUS_ACPI_INVALID_OBJTYPE"; + break; + case MD_NTSTATUS_WIN_STATUS_ACPI_INVALID_TARGETTYPE: + reason = "STATUS_ACPI_INVALID_TARGETTYPE"; + break; + case MD_NTSTATUS_WIN_STATUS_ACPI_INCORRECT_ARGUMENT_COUNT: + reason = "STATUS_ACPI_INCORRECT_ARGUMENT_COUNT"; + break; + case MD_NTSTATUS_WIN_STATUS_ACPI_ADDRESS_NOT_MAPPED: + reason = "STATUS_ACPI_ADDRESS_NOT_MAPPED"; + break; + case MD_NTSTATUS_WIN_STATUS_ACPI_INVALID_EVENTTYPE: + reason = "STATUS_ACPI_INVALID_EVENTTYPE"; + break; + case MD_NTSTATUS_WIN_STATUS_ACPI_HANDLER_COLLISION: + reason = "STATUS_ACPI_HANDLER_COLLISION"; + break; + case MD_NTSTATUS_WIN_STATUS_ACPI_INVALID_DATA: + reason = "STATUS_ACPI_INVALID_DATA"; + break; + case MD_NTSTATUS_WIN_STATUS_ACPI_INVALID_REGION: + reason = "STATUS_ACPI_INVALID_REGION"; + break; + case MD_NTSTATUS_WIN_STATUS_ACPI_INVALID_ACCESS_SIZE: + reason = "STATUS_ACPI_INVALID_ACCESS_SIZE"; + break; + case MD_NTSTATUS_WIN_STATUS_ACPI_ACQUIRE_GLOBAL_LOCK: + reason = "STATUS_ACPI_ACQUIRE_GLOBAL_LOCK"; + break; + case MD_NTSTATUS_WIN_STATUS_ACPI_ALREADY_INITIALIZED: + reason = "STATUS_ACPI_ALREADY_INITIALIZED"; + break; + case MD_NTSTATUS_WIN_STATUS_ACPI_NOT_INITIALIZED: + reason = "STATUS_ACPI_NOT_INITIALIZED"; + break; + case MD_NTSTATUS_WIN_STATUS_ACPI_INVALID_MUTEX_LEVEL: + reason = "STATUS_ACPI_INVALID_MUTEX_LEVEL"; + break; + case MD_NTSTATUS_WIN_STATUS_ACPI_MUTEX_NOT_OWNED: + reason = "STATUS_ACPI_MUTEX_NOT_OWNED"; + break; + case MD_NTSTATUS_WIN_STATUS_ACPI_MUTEX_NOT_OWNER: + reason = "STATUS_ACPI_MUTEX_NOT_OWNER"; + break; + case MD_NTSTATUS_WIN_STATUS_ACPI_RS_ACCESS: + reason = "STATUS_ACPI_RS_ACCESS"; + break; + case MD_NTSTATUS_WIN_STATUS_ACPI_INVALID_TABLE: + reason = "STATUS_ACPI_INVALID_TABLE"; + break; + case MD_NTSTATUS_WIN_STATUS_ACPI_REG_HANDLER_FAILED: + reason = "STATUS_ACPI_REG_HANDLER_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_ACPI_POWER_REQUEST_FAILED: + reason = "STATUS_ACPI_POWER_REQUEST_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_SXS_SECTION_NOT_FOUND: + reason = "STATUS_SXS_SECTION_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_SXS_CANT_GEN_ACTCTX: + reason = "STATUS_SXS_CANT_GEN_ACTCTX"; + break; + case MD_NTSTATUS_WIN_STATUS_SXS_INVALID_ACTCTXDATA_FORMAT: + reason = "STATUS_SXS_INVALID_ACTCTXDATA_FORMAT"; + break; + case MD_NTSTATUS_WIN_STATUS_SXS_ASSEMBLY_NOT_FOUND: + reason = "STATUS_SXS_ASSEMBLY_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_SXS_MANIFEST_FORMAT_ERROR: + reason = "STATUS_SXS_MANIFEST_FORMAT_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_SXS_MANIFEST_PARSE_ERROR: + reason = "STATUS_SXS_MANIFEST_PARSE_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_SXS_ACTIVATION_CONTEXT_DISABLED: + reason = "STATUS_SXS_ACTIVATION_CONTEXT_DISABLED"; + break; + case MD_NTSTATUS_WIN_STATUS_SXS_KEY_NOT_FOUND: + reason = "STATUS_SXS_KEY_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_SXS_VERSION_CONFLICT: + reason = "STATUS_SXS_VERSION_CONFLICT"; + break; + case MD_NTSTATUS_WIN_STATUS_SXS_WRONG_SECTION_TYPE: + reason = "STATUS_SXS_WRONG_SECTION_TYPE"; + break; + case MD_NTSTATUS_WIN_STATUS_SXS_THREAD_QUERIES_DISABLED: + reason = "STATUS_SXS_THREAD_QUERIES_DISABLED"; + break; + case MD_NTSTATUS_WIN_STATUS_SXS_ASSEMBLY_MISSING: + reason = "STATUS_SXS_ASSEMBLY_MISSING"; + break; + case MD_NTSTATUS_WIN_STATUS_SXS_PROCESS_DEFAULT_ALREADY_SET: + reason = "STATUS_SXS_PROCESS_DEFAULT_ALREADY_SET"; + break; + case MD_NTSTATUS_WIN_STATUS_SXS_EARLY_DEACTIVATION: + reason = "STATUS_SXS_EARLY_DEACTIVATION"; + break; + case MD_NTSTATUS_WIN_STATUS_SXS_INVALID_DEACTIVATION: + reason = "STATUS_SXS_INVALID_DEACTIVATION"; + break; + case MD_NTSTATUS_WIN_STATUS_SXS_MULTIPLE_DEACTIVATION: + reason = "STATUS_SXS_MULTIPLE_DEACTIVATION"; + break; + case MD_NTSTATUS_WIN_STATUS_SXS_SYSTEM_DEFAULT_ACTIVATION_CONTEXT_EMPTY: + reason = "STATUS_SXS_SYSTEM_DEFAULT_ACTIVATION_CONTEXT_EMPTY"; + break; + case MD_NTSTATUS_WIN_STATUS_SXS_PROCESS_TERMINATION_REQUESTED: + reason = "STATUS_SXS_PROCESS_TERMINATION_REQUESTED"; + break; + case MD_NTSTATUS_WIN_STATUS_SXS_CORRUPT_ACTIVATION_STACK: + reason = "STATUS_SXS_CORRUPT_ACTIVATION_STACK"; + break; + case MD_NTSTATUS_WIN_STATUS_SXS_CORRUPTION: + reason = "STATUS_SXS_CORRUPTION"; + break; + case MD_NTSTATUS_WIN_STATUS_SXS_INVALID_IDENTITY_ATTRIBUTE_VALUE: + reason = "STATUS_SXS_INVALID_IDENTITY_ATTRIBUTE_VALUE"; + break; + case MD_NTSTATUS_WIN_STATUS_SXS_INVALID_IDENTITY_ATTRIBUTE_NAME: + reason = "STATUS_SXS_INVALID_IDENTITY_ATTRIBUTE_NAME"; + break; + case MD_NTSTATUS_WIN_STATUS_SXS_IDENTITY_DUPLICATE_ATTRIBUTE: + reason = "STATUS_SXS_IDENTITY_DUPLICATE_ATTRIBUTE"; + break; + case MD_NTSTATUS_WIN_STATUS_SXS_IDENTITY_PARSE_ERROR: + reason = "STATUS_SXS_IDENTITY_PARSE_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_SXS_COMPONENT_STORE_CORRUPT: + reason = "STATUS_SXS_COMPONENT_STORE_CORRUPT"; + break; + case MD_NTSTATUS_WIN_STATUS_SXS_FILE_HASH_MISMATCH: + reason = "STATUS_SXS_FILE_HASH_MISMATCH"; + break; + case MD_NTSTATUS_WIN_STATUS_SXS_MANIFEST_IDENTITY_SAME_BUT_CONTENTS_DIFFERENT: + reason = "STATUS_SXS_MANIFEST_IDENTITY_SAME_BUT_CONTENTS_DIFFERENT"; + break; + case MD_NTSTATUS_WIN_STATUS_SXS_IDENTITIES_DIFFERENT: + reason = "STATUS_SXS_IDENTITIES_DIFFERENT"; + break; + case MD_NTSTATUS_WIN_STATUS_SXS_ASSEMBLY_IS_NOT_A_DEPLOYMENT: + reason = "STATUS_SXS_ASSEMBLY_IS_NOT_A_DEPLOYMENT"; + break; + case MD_NTSTATUS_WIN_STATUS_SXS_FILE_NOT_PART_OF_ASSEMBLY: + reason = "STATUS_SXS_FILE_NOT_PART_OF_ASSEMBLY"; + break; + case MD_NTSTATUS_WIN_STATUS_ADVANCED_INSTALLER_FAILED: + reason = "STATUS_ADVANCED_INSTALLER_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_XML_ENCODING_MISMATCH: + reason = "STATUS_XML_ENCODING_MISMATCH"; + break; + case MD_NTSTATUS_WIN_STATUS_SXS_MANIFEST_TOO_BIG: + reason = "STATUS_SXS_MANIFEST_TOO_BIG"; + break; + case MD_NTSTATUS_WIN_STATUS_SXS_SETTING_NOT_REGISTERED: + reason = "STATUS_SXS_SETTING_NOT_REGISTERED"; + break; + case MD_NTSTATUS_WIN_STATUS_SXS_TRANSACTION_CLOSURE_INCOMPLETE: + reason = "STATUS_SXS_TRANSACTION_CLOSURE_INCOMPLETE"; + break; + case MD_NTSTATUS_WIN_STATUS_SMI_PRIMITIVE_INSTALLER_FAILED: + reason = "STATUS_SMI_PRIMITIVE_INSTALLER_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_GENERIC_COMMAND_FAILED: + reason = "STATUS_GENERIC_COMMAND_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_SXS_FILE_HASH_MISSING: + reason = "STATUS_SXS_FILE_HASH_MISSING"; + break; + case MD_NTSTATUS_WIN_STATUS_TRANSACTIONAL_CONFLICT: + reason = "STATUS_TRANSACTIONAL_CONFLICT"; + break; + case MD_NTSTATUS_WIN_STATUS_INVALID_TRANSACTION: + reason = "STATUS_INVALID_TRANSACTION"; + break; + case MD_NTSTATUS_WIN_STATUS_TRANSACTION_NOT_ACTIVE: + reason = "STATUS_TRANSACTION_NOT_ACTIVE"; + break; + case MD_NTSTATUS_WIN_STATUS_TM_INITIALIZATION_FAILED: + reason = "STATUS_TM_INITIALIZATION_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_RM_NOT_ACTIVE: + reason = "STATUS_RM_NOT_ACTIVE"; + break; + case MD_NTSTATUS_WIN_STATUS_RM_METADATA_CORRUPT: + reason = "STATUS_RM_METADATA_CORRUPT"; + break; + case MD_NTSTATUS_WIN_STATUS_TRANSACTION_NOT_JOINED: + reason = "STATUS_TRANSACTION_NOT_JOINED"; + break; + case MD_NTSTATUS_WIN_STATUS_DIRECTORY_NOT_RM: + reason = "STATUS_DIRECTORY_NOT_RM"; + break; + case MD_NTSTATUS_WIN_STATUS_TRANSACTIONS_UNSUPPORTED_REMOTE: + reason = "STATUS_TRANSACTIONS_UNSUPPORTED_REMOTE"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_RESIZE_INVALID_SIZE: + reason = "STATUS_LOG_RESIZE_INVALID_SIZE"; + break; + case MD_NTSTATUS_WIN_STATUS_REMOTE_FILE_VERSION_MISMATCH: + reason = "STATUS_REMOTE_FILE_VERSION_MISMATCH"; + break; + case MD_NTSTATUS_WIN_STATUS_CRM_PROTOCOL_ALREADY_EXISTS: + reason = "STATUS_CRM_PROTOCOL_ALREADY_EXISTS"; + break; + case MD_NTSTATUS_WIN_STATUS_TRANSACTION_PROPAGATION_FAILED: + reason = "STATUS_TRANSACTION_PROPAGATION_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_CRM_PROTOCOL_NOT_FOUND: + reason = "STATUS_CRM_PROTOCOL_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_TRANSACTION_SUPERIOR_EXISTS: + reason = "STATUS_TRANSACTION_SUPERIOR_EXISTS"; + break; + case MD_NTSTATUS_WIN_STATUS_TRANSACTION_REQUEST_NOT_VALID: + reason = "STATUS_TRANSACTION_REQUEST_NOT_VALID"; + break; + case MD_NTSTATUS_WIN_STATUS_TRANSACTION_NOT_REQUESTED: + reason = "STATUS_TRANSACTION_NOT_REQUESTED"; + break; + case MD_NTSTATUS_WIN_STATUS_TRANSACTION_ALREADY_ABORTED: + reason = "STATUS_TRANSACTION_ALREADY_ABORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_TRANSACTION_ALREADY_COMMITTED: + reason = "STATUS_TRANSACTION_ALREADY_COMMITTED"; + break; + case MD_NTSTATUS_WIN_STATUS_TRANSACTION_INVALID_MARSHALL_BUFFER: + reason = "STATUS_TRANSACTION_INVALID_MARSHALL_BUFFER"; + break; + case MD_NTSTATUS_WIN_STATUS_CURRENT_TRANSACTION_NOT_VALID: + reason = "STATUS_CURRENT_TRANSACTION_NOT_VALID"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_GROWTH_FAILED: + reason = "STATUS_LOG_GROWTH_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_OBJECT_NO_LONGER_EXISTS: + reason = "STATUS_OBJECT_NO_LONGER_EXISTS"; + break; + case MD_NTSTATUS_WIN_STATUS_STREAM_MINIVERSION_NOT_FOUND: + reason = "STATUS_STREAM_MINIVERSION_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_STREAM_MINIVERSION_NOT_VALID: + reason = "STATUS_STREAM_MINIVERSION_NOT_VALID"; + break; + case MD_NTSTATUS_WIN_STATUS_MINIVERSION_INACCESSIBLE_FROM_SPECIFIED_TRANSACTION: + reason = "STATUS_MINIVERSION_INACCESSIBLE_FROM_SPECIFIED_TRANSACTION"; + break; + case MD_NTSTATUS_WIN_STATUS_CANT_OPEN_MINIVERSION_WITH_MODIFY_INTENT: + reason = "STATUS_CANT_OPEN_MINIVERSION_WITH_MODIFY_INTENT"; + break; + case MD_NTSTATUS_WIN_STATUS_CANT_CREATE_MORE_STREAM_MINIVERSIONS: + reason = "STATUS_CANT_CREATE_MORE_STREAM_MINIVERSIONS"; + break; + case MD_NTSTATUS_WIN_STATUS_HANDLE_NO_LONGER_VALID: + reason = "STATUS_HANDLE_NO_LONGER_VALID"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_CORRUPTION_DETECTED: + reason = "STATUS_LOG_CORRUPTION_DETECTED"; + break; + case MD_NTSTATUS_WIN_STATUS_RM_DISCONNECTED: + reason = "STATUS_RM_DISCONNECTED"; + break; + case MD_NTSTATUS_WIN_STATUS_ENLISTMENT_NOT_SUPERIOR: + reason = "STATUS_ENLISTMENT_NOT_SUPERIOR"; + break; + case MD_NTSTATUS_WIN_STATUS_FILE_IDENTITY_NOT_PERSISTENT: + reason = "STATUS_FILE_IDENTITY_NOT_PERSISTENT"; + break; + case MD_NTSTATUS_WIN_STATUS_CANT_BREAK_TRANSACTIONAL_DEPENDENCY: + reason = "STATUS_CANT_BREAK_TRANSACTIONAL_DEPENDENCY"; + break; + case MD_NTSTATUS_WIN_STATUS_CANT_CROSS_RM_BOUNDARY: + reason = "STATUS_CANT_CROSS_RM_BOUNDARY"; + break; + case MD_NTSTATUS_WIN_STATUS_TXF_DIR_NOT_EMPTY: + reason = "STATUS_TXF_DIR_NOT_EMPTY"; + break; + case MD_NTSTATUS_WIN_STATUS_INDOUBT_TRANSACTIONS_EXIST: + reason = "STATUS_INDOUBT_TRANSACTIONS_EXIST"; + break; + case MD_NTSTATUS_WIN_STATUS_TM_VOLATILE: + reason = "STATUS_TM_VOLATILE"; + break; + case MD_NTSTATUS_WIN_STATUS_ROLLBACK_TIMER_EXPIRED: + reason = "STATUS_ROLLBACK_TIMER_EXPIRED"; + break; + case MD_NTSTATUS_WIN_STATUS_TXF_ATTRIBUTE_CORRUPT: + reason = "STATUS_TXF_ATTRIBUTE_CORRUPT"; + break; + case MD_NTSTATUS_WIN_STATUS_EFS_NOT_ALLOWED_IN_TRANSACTION: + reason = "STATUS_EFS_NOT_ALLOWED_IN_TRANSACTION"; + break; + case MD_NTSTATUS_WIN_STATUS_TRANSACTIONAL_OPEN_NOT_ALLOWED: + reason = "STATUS_TRANSACTIONAL_OPEN_NOT_ALLOWED"; + break; + case MD_NTSTATUS_WIN_STATUS_TRANSACTED_MAPPING_UNSUPPORTED_REMOTE: + reason = "STATUS_TRANSACTED_MAPPING_UNSUPPORTED_REMOTE"; + break; + case MD_NTSTATUS_WIN_STATUS_TRANSACTION_REQUIRED_PROMOTION: + reason = "STATUS_TRANSACTION_REQUIRED_PROMOTION"; + break; + case MD_NTSTATUS_WIN_STATUS_CANNOT_EXECUTE_FILE_IN_TRANSACTION: + reason = "STATUS_CANNOT_EXECUTE_FILE_IN_TRANSACTION"; + break; + case MD_NTSTATUS_WIN_STATUS_TRANSACTIONS_NOT_FROZEN: + reason = "STATUS_TRANSACTIONS_NOT_FROZEN"; + break; + case MD_NTSTATUS_WIN_STATUS_TRANSACTION_FREEZE_IN_PROGRESS: + reason = "STATUS_TRANSACTION_FREEZE_IN_PROGRESS"; + break; + case MD_NTSTATUS_WIN_STATUS_NOT_SNAPSHOT_VOLUME: + reason = "STATUS_NOT_SNAPSHOT_VOLUME"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_SAVEPOINT_WITH_OPEN_FILES: + reason = "STATUS_NO_SAVEPOINT_WITH_OPEN_FILES"; + break; + case MD_NTSTATUS_WIN_STATUS_SPARSE_NOT_ALLOWED_IN_TRANSACTION: + reason = "STATUS_SPARSE_NOT_ALLOWED_IN_TRANSACTION"; + break; + case MD_NTSTATUS_WIN_STATUS_TM_IDENTITY_MISMATCH: + reason = "STATUS_TM_IDENTITY_MISMATCH"; + break; + case MD_NTSTATUS_WIN_STATUS_FLOATED_SECTION: + reason = "STATUS_FLOATED_SECTION"; + break; + case MD_NTSTATUS_WIN_STATUS_CANNOT_ACCEPT_TRANSACTED_WORK: + reason = "STATUS_CANNOT_ACCEPT_TRANSACTED_WORK"; + break; + case MD_NTSTATUS_WIN_STATUS_CANNOT_ABORT_TRANSACTIONS: + reason = "STATUS_CANNOT_ABORT_TRANSACTIONS"; + break; + case MD_NTSTATUS_WIN_STATUS_TRANSACTION_NOT_FOUND: + reason = "STATUS_TRANSACTION_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_RESOURCEMANAGER_NOT_FOUND: + reason = "STATUS_RESOURCEMANAGER_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_ENLISTMENT_NOT_FOUND: + reason = "STATUS_ENLISTMENT_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_TRANSACTIONMANAGER_NOT_FOUND: + reason = "STATUS_TRANSACTIONMANAGER_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_TRANSACTIONMANAGER_NOT_ONLINE: + reason = "STATUS_TRANSACTIONMANAGER_NOT_ONLINE"; + break; + case MD_NTSTATUS_WIN_STATUS_TRANSACTIONMANAGER_RECOVERY_NAME_COLLISION: + reason = "STATUS_TRANSACTIONMANAGER_RECOVERY_NAME_COLLISION"; + break; + case MD_NTSTATUS_WIN_STATUS_TRANSACTION_NOT_ROOT: + reason = "STATUS_TRANSACTION_NOT_ROOT"; + break; + case MD_NTSTATUS_WIN_STATUS_TRANSACTION_OBJECT_EXPIRED: + reason = "STATUS_TRANSACTION_OBJECT_EXPIRED"; + break; + case MD_NTSTATUS_WIN_STATUS_COMPRESSION_NOT_ALLOWED_IN_TRANSACTION: + reason = "STATUS_COMPRESSION_NOT_ALLOWED_IN_TRANSACTION"; + break; + case MD_NTSTATUS_WIN_STATUS_TRANSACTION_RESPONSE_NOT_ENLISTED: + reason = "STATUS_TRANSACTION_RESPONSE_NOT_ENLISTED"; + break; + case MD_NTSTATUS_WIN_STATUS_TRANSACTION_RECORD_TOO_LONG: + reason = "STATUS_TRANSACTION_RECORD_TOO_LONG"; + break; + case MD_NTSTATUS_WIN_STATUS_NO_LINK_TRACKING_IN_TRANSACTION: + reason = "STATUS_NO_LINK_TRACKING_IN_TRANSACTION"; + break; + case MD_NTSTATUS_WIN_STATUS_OPERATION_NOT_SUPPORTED_IN_TRANSACTION: + reason = "STATUS_OPERATION_NOT_SUPPORTED_IN_TRANSACTION"; + break; + case MD_NTSTATUS_WIN_STATUS_TRANSACTION_INTEGRITY_VIOLATED: + reason = "STATUS_TRANSACTION_INTEGRITY_VIOLATED"; + break; + case MD_NTSTATUS_WIN_STATUS_TRANSACTIONMANAGER_IDENTITY_MISMATCH: + reason = "STATUS_TRANSACTIONMANAGER_IDENTITY_MISMATCH"; + break; + case MD_NTSTATUS_WIN_STATUS_RM_CANNOT_BE_FROZEN_FOR_SNAPSHOT: + reason = "STATUS_RM_CANNOT_BE_FROZEN_FOR_SNAPSHOT"; + break; + case MD_NTSTATUS_WIN_STATUS_TRANSACTION_MUST_WRITETHROUGH: + reason = "STATUS_TRANSACTION_MUST_WRITETHROUGH"; + break; + case MD_NTSTATUS_WIN_STATUS_TRANSACTION_NO_SUPERIOR: + reason = "STATUS_TRANSACTION_NO_SUPERIOR"; + break; + case MD_NTSTATUS_WIN_STATUS_EXPIRED_HANDLE: + reason = "STATUS_EXPIRED_HANDLE"; + break; + case MD_NTSTATUS_WIN_STATUS_TRANSACTION_NOT_ENLISTED: + reason = "STATUS_TRANSACTION_NOT_ENLISTED"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_SECTOR_INVALID: + reason = "STATUS_LOG_SECTOR_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_SECTOR_PARITY_INVALID: + reason = "STATUS_LOG_SECTOR_PARITY_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_SECTOR_REMAPPED: + reason = "STATUS_LOG_SECTOR_REMAPPED"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_BLOCK_INCOMPLETE: + reason = "STATUS_LOG_BLOCK_INCOMPLETE"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_INVALID_RANGE: + reason = "STATUS_LOG_INVALID_RANGE"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_BLOCKS_EXHAUSTED: + reason = "STATUS_LOG_BLOCKS_EXHAUSTED"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_READ_CONTEXT_INVALID: + reason = "STATUS_LOG_READ_CONTEXT_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_RESTART_INVALID: + reason = "STATUS_LOG_RESTART_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_BLOCK_VERSION: + reason = "STATUS_LOG_BLOCK_VERSION"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_BLOCK_INVALID: + reason = "STATUS_LOG_BLOCK_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_READ_MODE_INVALID: + reason = "STATUS_LOG_READ_MODE_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_METADATA_CORRUPT: + reason = "STATUS_LOG_METADATA_CORRUPT"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_METADATA_INVALID: + reason = "STATUS_LOG_METADATA_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_METADATA_INCONSISTENT: + reason = "STATUS_LOG_METADATA_INCONSISTENT"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_RESERVATION_INVALID: + reason = "STATUS_LOG_RESERVATION_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_CANT_DELETE: + reason = "STATUS_LOG_CANT_DELETE"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_CONTAINER_LIMIT_EXCEEDED: + reason = "STATUS_LOG_CONTAINER_LIMIT_EXCEEDED"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_START_OF_LOG: + reason = "STATUS_LOG_START_OF_LOG"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_POLICY_ALREADY_INSTALLED: + reason = "STATUS_LOG_POLICY_ALREADY_INSTALLED"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_POLICY_NOT_INSTALLED: + reason = "STATUS_LOG_POLICY_NOT_INSTALLED"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_POLICY_INVALID: + reason = "STATUS_LOG_POLICY_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_POLICY_CONFLICT: + reason = "STATUS_LOG_POLICY_CONFLICT"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_PINNED_ARCHIVE_TAIL: + reason = "STATUS_LOG_PINNED_ARCHIVE_TAIL"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_RECORD_NONEXISTENT: + reason = "STATUS_LOG_RECORD_NONEXISTENT"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_RECORDS_RESERVED_INVALID: + reason = "STATUS_LOG_RECORDS_RESERVED_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_SPACE_RESERVED_INVALID: + reason = "STATUS_LOG_SPACE_RESERVED_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_TAIL_INVALID: + reason = "STATUS_LOG_TAIL_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_FULL: + reason = "STATUS_LOG_FULL"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_MULTIPLEXED: + reason = "STATUS_LOG_MULTIPLEXED"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_DEDICATED: + reason = "STATUS_LOG_DEDICATED"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_ARCHIVE_NOT_IN_PROGRESS: + reason = "STATUS_LOG_ARCHIVE_NOT_IN_PROGRESS"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_ARCHIVE_IN_PROGRESS: + reason = "STATUS_LOG_ARCHIVE_IN_PROGRESS"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_EPHEMERAL: + reason = "STATUS_LOG_EPHEMERAL"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_NOT_ENOUGH_CONTAINERS: + reason = "STATUS_LOG_NOT_ENOUGH_CONTAINERS"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_CLIENT_ALREADY_REGISTERED: + reason = "STATUS_LOG_CLIENT_ALREADY_REGISTERED"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_CLIENT_NOT_REGISTERED: + reason = "STATUS_LOG_CLIENT_NOT_REGISTERED"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_FULL_HANDLER_IN_PROGRESS: + reason = "STATUS_LOG_FULL_HANDLER_IN_PROGRESS"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_CONTAINER_READ_FAILED: + reason = "STATUS_LOG_CONTAINER_READ_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_CONTAINER_WRITE_FAILED: + reason = "STATUS_LOG_CONTAINER_WRITE_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_CONTAINER_OPEN_FAILED: + reason = "STATUS_LOG_CONTAINER_OPEN_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_CONTAINER_STATE_INVALID: + reason = "STATUS_LOG_CONTAINER_STATE_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_STATE_INVALID: + reason = "STATUS_LOG_STATE_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_PINNED: + reason = "STATUS_LOG_PINNED"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_METADATA_FLUSH_FAILED: + reason = "STATUS_LOG_METADATA_FLUSH_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_INCONSISTENT_SECURITY: + reason = "STATUS_LOG_INCONSISTENT_SECURITY"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_APPENDED_FLUSH_FAILED: + reason = "STATUS_LOG_APPENDED_FLUSH_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_LOG_PINNED_RESERVATION: + reason = "STATUS_LOG_PINNED_RESERVATION"; + break; + case MD_NTSTATUS_WIN_STATUS_VIDEO_HUNG_DISPLAY_DRIVER_THREAD: + reason = "STATUS_VIDEO_HUNG_DISPLAY_DRIVER_THREAD"; + break; + case MD_NTSTATUS_WIN_STATUS_FLT_NO_HANDLER_DEFINED: + reason = "STATUS_FLT_NO_HANDLER_DEFINED"; + break; + case MD_NTSTATUS_WIN_STATUS_FLT_CONTEXT_ALREADY_DEFINED: + reason = "STATUS_FLT_CONTEXT_ALREADY_DEFINED"; + break; + case MD_NTSTATUS_WIN_STATUS_FLT_INVALID_ASYNCHRONOUS_REQUEST: + reason = "STATUS_FLT_INVALID_ASYNCHRONOUS_REQUEST"; + break; + case MD_NTSTATUS_WIN_STATUS_FLT_DISALLOW_FAST_IO: + reason = "STATUS_FLT_DISALLOW_FAST_IO"; + break; + case MD_NTSTATUS_WIN_STATUS_FLT_INVALID_NAME_REQUEST: + reason = "STATUS_FLT_INVALID_NAME_REQUEST"; + break; + case MD_NTSTATUS_WIN_STATUS_FLT_NOT_SAFE_TO_POST_OPERATION: + reason = "STATUS_FLT_NOT_SAFE_TO_POST_OPERATION"; + break; + case MD_NTSTATUS_WIN_STATUS_FLT_NOT_INITIALIZED: + reason = "STATUS_FLT_NOT_INITIALIZED"; + break; + case MD_NTSTATUS_WIN_STATUS_FLT_FILTER_NOT_READY: + reason = "STATUS_FLT_FILTER_NOT_READY"; + break; + case MD_NTSTATUS_WIN_STATUS_FLT_POST_OPERATION_CLEANUP: + reason = "STATUS_FLT_POST_OPERATION_CLEANUP"; + break; + case MD_NTSTATUS_WIN_STATUS_FLT_INTERNAL_ERROR: + reason = "STATUS_FLT_INTERNAL_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_FLT_DELETING_OBJECT: + reason = "STATUS_FLT_DELETING_OBJECT"; + break; + case MD_NTSTATUS_WIN_STATUS_FLT_MUST_BE_NONPAGED_POOL: + reason = "STATUS_FLT_MUST_BE_NONPAGED_POOL"; + break; + case MD_NTSTATUS_WIN_STATUS_FLT_DUPLICATE_ENTRY: + reason = "STATUS_FLT_DUPLICATE_ENTRY"; + break; + case MD_NTSTATUS_WIN_STATUS_FLT_CBDQ_DISABLED: + reason = "STATUS_FLT_CBDQ_DISABLED"; + break; + case MD_NTSTATUS_WIN_STATUS_FLT_DO_NOT_ATTACH: + reason = "STATUS_FLT_DO_NOT_ATTACH"; + break; + case MD_NTSTATUS_WIN_STATUS_FLT_DO_NOT_DETACH: + reason = "STATUS_FLT_DO_NOT_DETACH"; + break; + case MD_NTSTATUS_WIN_STATUS_FLT_INSTANCE_ALTITUDE_COLLISION: + reason = "STATUS_FLT_INSTANCE_ALTITUDE_COLLISION"; + break; + case MD_NTSTATUS_WIN_STATUS_FLT_INSTANCE_NAME_COLLISION: + reason = "STATUS_FLT_INSTANCE_NAME_COLLISION"; + break; + case MD_NTSTATUS_WIN_STATUS_FLT_FILTER_NOT_FOUND: + reason = "STATUS_FLT_FILTER_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_FLT_VOLUME_NOT_FOUND: + reason = "STATUS_FLT_VOLUME_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_FLT_INSTANCE_NOT_FOUND: + reason = "STATUS_FLT_INSTANCE_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_FLT_CONTEXT_ALLOCATION_NOT_FOUND: + reason = "STATUS_FLT_CONTEXT_ALLOCATION_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_FLT_INVALID_CONTEXT_REGISTRATION: + reason = "STATUS_FLT_INVALID_CONTEXT_REGISTRATION"; + break; + case MD_NTSTATUS_WIN_STATUS_FLT_NAME_CACHE_MISS: + reason = "STATUS_FLT_NAME_CACHE_MISS"; + break; + case MD_NTSTATUS_WIN_STATUS_FLT_NO_DEVICE_OBJECT: + reason = "STATUS_FLT_NO_DEVICE_OBJECT"; + break; + case MD_NTSTATUS_WIN_STATUS_FLT_VOLUME_ALREADY_MOUNTED: + reason = "STATUS_FLT_VOLUME_ALREADY_MOUNTED"; + break; + case MD_NTSTATUS_WIN_STATUS_FLT_ALREADY_ENLISTED: + reason = "STATUS_FLT_ALREADY_ENLISTED"; + break; + case MD_NTSTATUS_WIN_STATUS_FLT_CONTEXT_ALREADY_LINKED: + reason = "STATUS_FLT_CONTEXT_ALREADY_LINKED"; + break; + case MD_NTSTATUS_WIN_STATUS_FLT_NO_WAITER_FOR_REPLY: + reason = "STATUS_FLT_NO_WAITER_FOR_REPLY"; + break; + case MD_NTSTATUS_WIN_STATUS_FLT_REGISTRATION_BUSY: + reason = "STATUS_FLT_REGISTRATION_BUSY"; + break; + case MD_NTSTATUS_WIN_STATUS_MONITOR_NO_DESCRIPTOR: + reason = "STATUS_MONITOR_NO_DESCRIPTOR"; + break; + case MD_NTSTATUS_WIN_STATUS_MONITOR_UNKNOWN_DESCRIPTOR_FORMAT: + reason = "STATUS_MONITOR_UNKNOWN_DESCRIPTOR_FORMAT"; + break; + case MD_NTSTATUS_WIN_STATUS_MONITOR_INVALID_DESCRIPTOR_CHECKSUM: + reason = "STATUS_MONITOR_INVALID_DESCRIPTOR_CHECKSUM"; + break; + case MD_NTSTATUS_WIN_STATUS_MONITOR_INVALID_STANDARD_TIMING_BLOCK: + reason = "STATUS_MONITOR_INVALID_STANDARD_TIMING_BLOCK"; + break; + case MD_NTSTATUS_WIN_STATUS_MONITOR_WMI_DATABLOCK_REGISTRATION_FAILED: + reason = "STATUS_MONITOR_WMI_DATABLOCK_REGISTRATION_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_MONITOR_INVALID_SERIAL_NUMBER_MONDSC_BLOCK: + reason = "STATUS_MONITOR_INVALID_SERIAL_NUMBER_MONDSC_BLOCK"; + break; + case MD_NTSTATUS_WIN_STATUS_MONITOR_INVALID_USER_FRIENDLY_MONDSC_BLOCK: + reason = "STATUS_MONITOR_INVALID_USER_FRIENDLY_MONDSC_BLOCK"; + break; + case MD_NTSTATUS_WIN_STATUS_MONITOR_NO_MORE_DESCRIPTOR_DATA: + reason = "STATUS_MONITOR_NO_MORE_DESCRIPTOR_DATA"; + break; + case MD_NTSTATUS_WIN_STATUS_MONITOR_INVALID_DETAILED_TIMING_BLOCK: + reason = "STATUS_MONITOR_INVALID_DETAILED_TIMING_BLOCK"; + break; + case MD_NTSTATUS_WIN_STATUS_MONITOR_INVALID_MANUFACTURE_DATE: + reason = "STATUS_MONITOR_INVALID_MANUFACTURE_DATE"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_NOT_EXCLUSIVE_MODE_OWNER: + reason = "STATUS_GRAPHICS_NOT_EXCLUSIVE_MODE_OWNER"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INSUFFICIENT_DMA_BUFFER: + reason = "STATUS_GRAPHICS_INSUFFICIENT_DMA_BUFFER"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_DISPLAY_ADAPTER: + reason = "STATUS_GRAPHICS_INVALID_DISPLAY_ADAPTER"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_ADAPTER_WAS_RESET: + reason = "STATUS_GRAPHICS_ADAPTER_WAS_RESET"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_DRIVER_MODEL: + reason = "STATUS_GRAPHICS_INVALID_DRIVER_MODEL"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_PRESENT_MODE_CHANGED: + reason = "STATUS_GRAPHICS_PRESENT_MODE_CHANGED"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_PRESENT_OCCLUDED: + reason = "STATUS_GRAPHICS_PRESENT_OCCLUDED"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_PRESENT_DENIED: + reason = "STATUS_GRAPHICS_PRESENT_DENIED"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_CANNOTCOLORCONVERT: + reason = "STATUS_GRAPHICS_CANNOTCOLORCONVERT"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_DRIVER_MISMATCH: + reason = "STATUS_GRAPHICS_DRIVER_MISMATCH"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_PRESENT_REDIRECTION_DISABLED: + reason = "STATUS_GRAPHICS_PRESENT_REDIRECTION_DISABLED"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_PRESENT_UNOCCLUDED: + reason = "STATUS_GRAPHICS_PRESENT_UNOCCLUDED"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_WINDOWDC_NOT_AVAILABLE: + reason = "STATUS_GRAPHICS_WINDOWDC_NOT_AVAILABLE"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_WINDOWLESS_PRESENT_DISABLED: + reason = "STATUS_GRAPHICS_WINDOWLESS_PRESENT_DISABLED"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_NO_VIDEO_MEMORY: + reason = "STATUS_GRAPHICS_NO_VIDEO_MEMORY"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_CANT_LOCK_MEMORY: + reason = "STATUS_GRAPHICS_CANT_LOCK_MEMORY"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_ALLOCATION_BUSY: + reason = "STATUS_GRAPHICS_ALLOCATION_BUSY"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_TOO_MANY_REFERENCES: + reason = "STATUS_GRAPHICS_TOO_MANY_REFERENCES"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_TRY_AGAIN_LATER: + reason = "STATUS_GRAPHICS_TRY_AGAIN_LATER"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_TRY_AGAIN_NOW: + reason = "STATUS_GRAPHICS_TRY_AGAIN_NOW"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_ALLOCATION_INVALID: + reason = "STATUS_GRAPHICS_ALLOCATION_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_UNSWIZZLING_APERTURE_UNAVAILABLE: + reason = "STATUS_GRAPHICS_UNSWIZZLING_APERTURE_UNAVAILABLE"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_UNSWIZZLING_APERTURE_UNSUPPORTED: + reason = "STATUS_GRAPHICS_UNSWIZZLING_APERTURE_UNSUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_CANT_EVICT_PINNED_ALLOCATION: + reason = "STATUS_GRAPHICS_CANT_EVICT_PINNED_ALLOCATION"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_ALLOCATION_USAGE: + reason = "STATUS_GRAPHICS_INVALID_ALLOCATION_USAGE"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_CANT_RENDER_LOCKED_ALLOCATION: + reason = "STATUS_GRAPHICS_CANT_RENDER_LOCKED_ALLOCATION"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_ALLOCATION_CLOSED: + reason = "STATUS_GRAPHICS_ALLOCATION_CLOSED"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_ALLOCATION_INSTANCE: + reason = "STATUS_GRAPHICS_INVALID_ALLOCATION_INSTANCE"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_ALLOCATION_HANDLE: + reason = "STATUS_GRAPHICS_INVALID_ALLOCATION_HANDLE"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_WRONG_ALLOCATION_DEVICE: + reason = "STATUS_GRAPHICS_WRONG_ALLOCATION_DEVICE"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_ALLOCATION_CONTENT_LOST: + reason = "STATUS_GRAPHICS_ALLOCATION_CONTENT_LOST"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_GPU_EXCEPTION_ON_DEVICE: + reason = "STATUS_GRAPHICS_GPU_EXCEPTION_ON_DEVICE"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_VIDPN_TOPOLOGY: + reason = "STATUS_GRAPHICS_INVALID_VIDPN_TOPOLOGY"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_VIDPN_TOPOLOGY_NOT_SUPPORTED: + reason = "STATUS_GRAPHICS_VIDPN_TOPOLOGY_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_VIDPN_TOPOLOGY_CURRENTLY_NOT_SUPPORTED: + reason = "STATUS_GRAPHICS_VIDPN_TOPOLOGY_CURRENTLY_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_VIDPN: + reason = "STATUS_GRAPHICS_INVALID_VIDPN"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_VIDEO_PRESENT_SOURCE: + reason = "STATUS_GRAPHICS_INVALID_VIDEO_PRESENT_SOURCE"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_VIDEO_PRESENT_TARGET: + reason = "STATUS_GRAPHICS_INVALID_VIDEO_PRESENT_TARGET"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_VIDPN_MODALITY_NOT_SUPPORTED: + reason = "STATUS_GRAPHICS_VIDPN_MODALITY_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_VIDPN_SOURCEMODESET: + reason = "STATUS_GRAPHICS_INVALID_VIDPN_SOURCEMODESET"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_VIDPN_TARGETMODESET: + reason = "STATUS_GRAPHICS_INVALID_VIDPN_TARGETMODESET"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_FREQUENCY: + reason = "STATUS_GRAPHICS_INVALID_FREQUENCY"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_ACTIVE_REGION: + reason = "STATUS_GRAPHICS_INVALID_ACTIVE_REGION"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_TOTAL_REGION: + reason = "STATUS_GRAPHICS_INVALID_TOTAL_REGION"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_VIDEO_PRESENT_SOURCE_MODE: + reason = "STATUS_GRAPHICS_INVALID_VIDEO_PRESENT_SOURCE_MODE"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_VIDEO_PRESENT_TARGET_MODE: + reason = "STATUS_GRAPHICS_INVALID_VIDEO_PRESENT_TARGET_MODE"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_PINNED_MODE_MUST_REMAIN_IN_SET: + reason = "STATUS_GRAPHICS_PINNED_MODE_MUST_REMAIN_IN_SET"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_PATH_ALREADY_IN_TOPOLOGY: + reason = "STATUS_GRAPHICS_PATH_ALREADY_IN_TOPOLOGY"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_MODE_ALREADY_IN_MODESET: + reason = "STATUS_GRAPHICS_MODE_ALREADY_IN_MODESET"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_VIDEOPRESENTSOURCESET: + reason = "STATUS_GRAPHICS_INVALID_VIDEOPRESENTSOURCESET"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_VIDEOPRESENTTARGETSET: + reason = "STATUS_GRAPHICS_INVALID_VIDEOPRESENTTARGETSET"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_SOURCE_ALREADY_IN_SET: + reason = "STATUS_GRAPHICS_SOURCE_ALREADY_IN_SET"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_TARGET_ALREADY_IN_SET: + reason = "STATUS_GRAPHICS_TARGET_ALREADY_IN_SET"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_VIDPN_PRESENT_PATH: + reason = "STATUS_GRAPHICS_INVALID_VIDPN_PRESENT_PATH"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_NO_RECOMMENDED_VIDPN_TOPOLOGY: + reason = "STATUS_GRAPHICS_NO_RECOMMENDED_VIDPN_TOPOLOGY"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_MONITOR_FREQUENCYRANGESET: + reason = "STATUS_GRAPHICS_INVALID_MONITOR_FREQUENCYRANGESET"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_MONITOR_FREQUENCYRANGE: + reason = "STATUS_GRAPHICS_INVALID_MONITOR_FREQUENCYRANGE"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_FREQUENCYRANGE_NOT_IN_SET: + reason = "STATUS_GRAPHICS_FREQUENCYRANGE_NOT_IN_SET"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_FREQUENCYRANGE_ALREADY_IN_SET: + reason = "STATUS_GRAPHICS_FREQUENCYRANGE_ALREADY_IN_SET"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_STALE_MODESET: + reason = "STATUS_GRAPHICS_STALE_MODESET"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_MONITOR_SOURCEMODESET: + reason = "STATUS_GRAPHICS_INVALID_MONITOR_SOURCEMODESET"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_MONITOR_SOURCE_MODE: + reason = "STATUS_GRAPHICS_INVALID_MONITOR_SOURCE_MODE"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_NO_RECOMMENDED_FUNCTIONAL_VIDPN: + reason = "STATUS_GRAPHICS_NO_RECOMMENDED_FUNCTIONAL_VIDPN"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_MODE_ID_MUST_BE_UNIQUE: + reason = "STATUS_GRAPHICS_MODE_ID_MUST_BE_UNIQUE"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_EMPTY_ADAPTER_MONITOR_MODE_SUPPORT_INTERSECTION: + reason = "STATUS_GRAPHICS_EMPTY_ADAPTER_MONITOR_MODE_SUPPORT_INTERSECTION"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_VIDEO_PRESENT_TARGETS_LESS_THAN_SOURCES: + reason = "STATUS_GRAPHICS_VIDEO_PRESENT_TARGETS_LESS_THAN_SOURCES"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_PATH_NOT_IN_TOPOLOGY: + reason = "STATUS_GRAPHICS_PATH_NOT_IN_TOPOLOGY"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_ADAPTER_MUST_HAVE_AT_LEAST_ONE_SOURCE: + reason = "STATUS_GRAPHICS_ADAPTER_MUST_HAVE_AT_LEAST_ONE_SOURCE"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_ADAPTER_MUST_HAVE_AT_LEAST_ONE_TARGET: + reason = "STATUS_GRAPHICS_ADAPTER_MUST_HAVE_AT_LEAST_ONE_TARGET"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_MONITORDESCRIPTORSET: + reason = "STATUS_GRAPHICS_INVALID_MONITORDESCRIPTORSET"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_MONITORDESCRIPTOR: + reason = "STATUS_GRAPHICS_INVALID_MONITORDESCRIPTOR"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_MONITORDESCRIPTOR_NOT_IN_SET: + reason = "STATUS_GRAPHICS_MONITORDESCRIPTOR_NOT_IN_SET"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_MONITORDESCRIPTOR_ALREADY_IN_SET: + reason = "STATUS_GRAPHICS_MONITORDESCRIPTOR_ALREADY_IN_SET"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_MONITORDESCRIPTOR_ID_MUST_BE_UNIQUE: + reason = "STATUS_GRAPHICS_MONITORDESCRIPTOR_ID_MUST_BE_UNIQUE"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_VIDPN_TARGET_SUBSET_TYPE: + reason = "STATUS_GRAPHICS_INVALID_VIDPN_TARGET_SUBSET_TYPE"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_RESOURCES_NOT_RELATED: + reason = "STATUS_GRAPHICS_RESOURCES_NOT_RELATED"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_SOURCE_ID_MUST_BE_UNIQUE: + reason = "STATUS_GRAPHICS_SOURCE_ID_MUST_BE_UNIQUE"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_TARGET_ID_MUST_BE_UNIQUE: + reason = "STATUS_GRAPHICS_TARGET_ID_MUST_BE_UNIQUE"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_NO_AVAILABLE_VIDPN_TARGET: + reason = "STATUS_GRAPHICS_NO_AVAILABLE_VIDPN_TARGET"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_MONITOR_COULD_NOT_BE_ASSOCIATED_WITH_ADAPTER: + reason = "STATUS_GRAPHICS_MONITOR_COULD_NOT_BE_ASSOCIATED_WITH_ADAPTER"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_NO_VIDPNMGR: + reason = "STATUS_GRAPHICS_NO_VIDPNMGR"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_NO_ACTIVE_VIDPN: + reason = "STATUS_GRAPHICS_NO_ACTIVE_VIDPN"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_STALE_VIDPN_TOPOLOGY: + reason = "STATUS_GRAPHICS_STALE_VIDPN_TOPOLOGY"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_MONITOR_NOT_CONNECTED: + reason = "STATUS_GRAPHICS_MONITOR_NOT_CONNECTED"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_SOURCE_NOT_IN_TOPOLOGY: + reason = "STATUS_GRAPHICS_SOURCE_NOT_IN_TOPOLOGY"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_PRIMARYSURFACE_SIZE: + reason = "STATUS_GRAPHICS_INVALID_PRIMARYSURFACE_SIZE"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_VISIBLEREGION_SIZE: + reason = "STATUS_GRAPHICS_INVALID_VISIBLEREGION_SIZE"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_STRIDE: + reason = "STATUS_GRAPHICS_INVALID_STRIDE"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_PIXELFORMAT: + reason = "STATUS_GRAPHICS_INVALID_PIXELFORMAT"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_COLORBASIS: + reason = "STATUS_GRAPHICS_INVALID_COLORBASIS"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_PIXELVALUEACCESSMODE: + reason = "STATUS_GRAPHICS_INVALID_PIXELVALUEACCESSMODE"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_TARGET_NOT_IN_TOPOLOGY: + reason = "STATUS_GRAPHICS_TARGET_NOT_IN_TOPOLOGY"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_NO_DISPLAY_MODE_MANAGEMENT_SUPPORT: + reason = "STATUS_GRAPHICS_NO_DISPLAY_MODE_MANAGEMENT_SUPPORT"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_VIDPN_SOURCE_IN_USE: + reason = "STATUS_GRAPHICS_VIDPN_SOURCE_IN_USE"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_CANT_ACCESS_ACTIVE_VIDPN: + reason = "STATUS_GRAPHICS_CANT_ACCESS_ACTIVE_VIDPN"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_PATH_IMPORTANCE_ORDINAL: + reason = "STATUS_GRAPHICS_INVALID_PATH_IMPORTANCE_ORDINAL"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_PATH_CONTENT_GEOMETRY_TRANSFORMATION: + reason = "STATUS_GRAPHICS_INVALID_PATH_CONTENT_GEOMETRY_TRANSFORMATION"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_PATH_CONTENT_GEOMETRY_TRANSFORMATION_NOT_SUPPORTED: + reason = "STATUS_GRAPHICS_PATH_CONTENT_GEOMETRY_TRANSFORMATION_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_GAMMA_RAMP: + reason = "STATUS_GRAPHICS_INVALID_GAMMA_RAMP"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_GAMMA_RAMP_NOT_SUPPORTED: + reason = "STATUS_GRAPHICS_GAMMA_RAMP_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_MULTISAMPLING_NOT_SUPPORTED: + reason = "STATUS_GRAPHICS_MULTISAMPLING_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_MODE_NOT_IN_MODESET: + reason = "STATUS_GRAPHICS_MODE_NOT_IN_MODESET"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_VIDPN_TOPOLOGY_RECOMMENDATION_REASON: + reason = "STATUS_GRAPHICS_INVALID_VIDPN_TOPOLOGY_RECOMMENDATION_REASON"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_PATH_CONTENT_TYPE: + reason = "STATUS_GRAPHICS_INVALID_PATH_CONTENT_TYPE"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_COPYPROTECTION_TYPE: + reason = "STATUS_GRAPHICS_INVALID_COPYPROTECTION_TYPE"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_UNASSIGNED_MODESET_ALREADY_EXISTS: + reason = "STATUS_GRAPHICS_UNASSIGNED_MODESET_ALREADY_EXISTS"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_SCANLINE_ORDERING: + reason = "STATUS_GRAPHICS_INVALID_SCANLINE_ORDERING"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_TOPOLOGY_CHANGES_NOT_ALLOWED: + reason = "STATUS_GRAPHICS_TOPOLOGY_CHANGES_NOT_ALLOWED"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_NO_AVAILABLE_IMPORTANCE_ORDINALS: + reason = "STATUS_GRAPHICS_NO_AVAILABLE_IMPORTANCE_ORDINALS"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INCOMPATIBLE_PRIVATE_FORMAT: + reason = "STATUS_GRAPHICS_INCOMPATIBLE_PRIVATE_FORMAT"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_MODE_PRUNING_ALGORITHM: + reason = "STATUS_GRAPHICS_INVALID_MODE_PRUNING_ALGORITHM"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_MONITOR_CAPABILITY_ORIGIN: + reason = "STATUS_GRAPHICS_INVALID_MONITOR_CAPABILITY_ORIGIN"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_MONITOR_FREQUENCYRANGE_CONSTRAINT: + reason = "STATUS_GRAPHICS_INVALID_MONITOR_FREQUENCYRANGE_CONSTRAINT"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_MAX_NUM_PATHS_REACHED: + reason = "STATUS_GRAPHICS_MAX_NUM_PATHS_REACHED"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_CANCEL_VIDPN_TOPOLOGY_AUGMENTATION: + reason = "STATUS_GRAPHICS_CANCEL_VIDPN_TOPOLOGY_AUGMENTATION"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_CLIENT_TYPE: + reason = "STATUS_GRAPHICS_INVALID_CLIENT_TYPE"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_CLIENTVIDPN_NOT_SET: + reason = "STATUS_GRAPHICS_CLIENTVIDPN_NOT_SET"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_SPECIFIED_CHILD_ALREADY_CONNECTED: + reason = "STATUS_GRAPHICS_SPECIFIED_CHILD_ALREADY_CONNECTED"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_CHILD_DESCRIPTOR_NOT_SUPPORTED: + reason = "STATUS_GRAPHICS_CHILD_DESCRIPTOR_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_NOT_A_LINKED_ADAPTER: + reason = "STATUS_GRAPHICS_NOT_A_LINKED_ADAPTER"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_LEADLINK_NOT_ENUMERATED: + reason = "STATUS_GRAPHICS_LEADLINK_NOT_ENUMERATED"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_CHAINLINKS_NOT_ENUMERATED: + reason = "STATUS_GRAPHICS_CHAINLINKS_NOT_ENUMERATED"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_ADAPTER_CHAIN_NOT_READY: + reason = "STATUS_GRAPHICS_ADAPTER_CHAIN_NOT_READY"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_CHAINLINKS_NOT_STARTED: + reason = "STATUS_GRAPHICS_CHAINLINKS_NOT_STARTED"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_CHAINLINKS_NOT_POWERED_ON: + reason = "STATUS_GRAPHICS_CHAINLINKS_NOT_POWERED_ON"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INCONSISTENT_DEVICE_LINK_STATE: + reason = "STATUS_GRAPHICS_INCONSISTENT_DEVICE_LINK_STATE"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_NOT_POST_DEVICE_DRIVER: + reason = "STATUS_GRAPHICS_NOT_POST_DEVICE_DRIVER"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_ADAPTER_ACCESS_NOT_EXCLUDED: + reason = "STATUS_GRAPHICS_ADAPTER_ACCESS_NOT_EXCLUDED"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_NOT_SUPPORTED: + reason = "STATUS_GRAPHICS_OPM_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_COPP_NOT_SUPPORTED: + reason = "STATUS_GRAPHICS_COPP_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_UAB_NOT_SUPPORTED: + reason = "STATUS_GRAPHICS_UAB_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_INVALID_ENCRYPTED_PARAMETERS: + reason = "STATUS_GRAPHICS_OPM_INVALID_ENCRYPTED_PARAMETERS"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_NO_PROTECTED_OUTPUTS_EXIST: + reason = "STATUS_GRAPHICS_OPM_NO_PROTECTED_OUTPUTS_EXIST"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_INTERNAL_ERROR: + reason = "STATUS_GRAPHICS_OPM_INTERNAL_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_INVALID_HANDLE: + reason = "STATUS_GRAPHICS_OPM_INVALID_HANDLE"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_PVP_INVALID_CERTIFICATE_LENGTH: + reason = "STATUS_GRAPHICS_PVP_INVALID_CERTIFICATE_LENGTH"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_SPANNING_MODE_ENABLED: + reason = "STATUS_GRAPHICS_OPM_SPANNING_MODE_ENABLED"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_THEATER_MODE_ENABLED: + reason = "STATUS_GRAPHICS_OPM_THEATER_MODE_ENABLED"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_PVP_HFS_FAILED: + reason = "STATUS_GRAPHICS_PVP_HFS_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_INVALID_SRM: + reason = "STATUS_GRAPHICS_OPM_INVALID_SRM"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_OUTPUT_DOES_NOT_SUPPORT_HDCP: + reason = "STATUS_GRAPHICS_OPM_OUTPUT_DOES_NOT_SUPPORT_HDCP"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_OUTPUT_DOES_NOT_SUPPORT_ACP: + reason = "STATUS_GRAPHICS_OPM_OUTPUT_DOES_NOT_SUPPORT_ACP"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_OUTPUT_DOES_NOT_SUPPORT_CGMSA: + reason = "STATUS_GRAPHICS_OPM_OUTPUT_DOES_NOT_SUPPORT_CGMSA"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_HDCP_SRM_NEVER_SET: + reason = "STATUS_GRAPHICS_OPM_HDCP_SRM_NEVER_SET"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_RESOLUTION_TOO_HIGH: + reason = "STATUS_GRAPHICS_OPM_RESOLUTION_TOO_HIGH"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_ALL_HDCP_HARDWARE_ALREADY_IN_USE: + reason = "STATUS_GRAPHICS_OPM_ALL_HDCP_HARDWARE_ALREADY_IN_USE"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_PROTECTED_OUTPUT_NO_LONGER_EXISTS: + reason = "STATUS_GRAPHICS_OPM_PROTECTED_OUTPUT_NO_LONGER_EXISTS"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_PROTECTED_OUTPUT_DOES_NOT_HAVE_COPP_SEMANTICS: + reason = "STATUS_GRAPHICS_OPM_PROTECTED_OUTPUT_DOES_NOT_HAVE_COPP_SEMANTICS"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_INVALID_INFORMATION_REQUEST: + reason = "STATUS_GRAPHICS_OPM_INVALID_INFORMATION_REQUEST"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_DRIVER_INTERNAL_ERROR: + reason = "STATUS_GRAPHICS_OPM_DRIVER_INTERNAL_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_PROTECTED_OUTPUT_DOES_NOT_HAVE_OPM_SEMANTICS: + reason = "STATUS_GRAPHICS_OPM_PROTECTED_OUTPUT_DOES_NOT_HAVE_OPM_SEMANTICS"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_SIGNALING_NOT_SUPPORTED: + reason = "STATUS_GRAPHICS_OPM_SIGNALING_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_OPM_INVALID_CONFIGURATION_REQUEST: + reason = "STATUS_GRAPHICS_OPM_INVALID_CONFIGURATION_REQUEST"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_I2C_NOT_SUPPORTED: + reason = "STATUS_GRAPHICS_I2C_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_I2C_DEVICE_DOES_NOT_EXIST: + reason = "STATUS_GRAPHICS_I2C_DEVICE_DOES_NOT_EXIST"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_I2C_ERROR_TRANSMITTING_DATA: + reason = "STATUS_GRAPHICS_I2C_ERROR_TRANSMITTING_DATA"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_I2C_ERROR_RECEIVING_DATA: + reason = "STATUS_GRAPHICS_I2C_ERROR_RECEIVING_DATA"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_DDCCI_VCP_NOT_SUPPORTED: + reason = "STATUS_GRAPHICS_DDCCI_VCP_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_DDCCI_INVALID_DATA: + reason = "STATUS_GRAPHICS_DDCCI_INVALID_DATA"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_DDCCI_MONITOR_RETURNED_INVALID_TIMING_STATUS_BYTE: + reason = "STATUS_GRAPHICS_DDCCI_MONITOR_RETURNED_INVALID_TIMING_STATUS_BYTE"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_DDCCI_INVALID_CAPABILITIES_STRING: + reason = "STATUS_GRAPHICS_DDCCI_INVALID_CAPABILITIES_STRING"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_MCA_INTERNAL_ERROR: + reason = "STATUS_GRAPHICS_MCA_INTERNAL_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_DDCCI_INVALID_MESSAGE_COMMAND: + reason = "STATUS_GRAPHICS_DDCCI_INVALID_MESSAGE_COMMAND"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_DDCCI_INVALID_MESSAGE_LENGTH: + reason = "STATUS_GRAPHICS_DDCCI_INVALID_MESSAGE_LENGTH"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_DDCCI_INVALID_MESSAGE_CHECKSUM: + reason = "STATUS_GRAPHICS_DDCCI_INVALID_MESSAGE_CHECKSUM"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_PHYSICAL_MONITOR_HANDLE: + reason = "STATUS_GRAPHICS_INVALID_PHYSICAL_MONITOR_HANDLE"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_MONITOR_NO_LONGER_EXISTS: + reason = "STATUS_GRAPHICS_MONITOR_NO_LONGER_EXISTS"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_ONLY_CONSOLE_SESSION_SUPPORTED: + reason = "STATUS_GRAPHICS_ONLY_CONSOLE_SESSION_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_NO_DISPLAY_DEVICE_CORRESPONDS_TO_NAME: + reason = "STATUS_GRAPHICS_NO_DISPLAY_DEVICE_CORRESPONDS_TO_NAME"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_DISPLAY_DEVICE_NOT_ATTACHED_TO_DESKTOP: + reason = "STATUS_GRAPHICS_DISPLAY_DEVICE_NOT_ATTACHED_TO_DESKTOP"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_MIRRORING_DEVICES_NOT_SUPPORTED: + reason = "STATUS_GRAPHICS_MIRRORING_DEVICES_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INVALID_POINTER: + reason = "STATUS_GRAPHICS_INVALID_POINTER"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_NO_MONITORS_CORRESPOND_TO_DISPLAY_DEVICE: + reason = "STATUS_GRAPHICS_NO_MONITORS_CORRESPOND_TO_DISPLAY_DEVICE"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_PARAMETER_ARRAY_TOO_SMALL: + reason = "STATUS_GRAPHICS_PARAMETER_ARRAY_TOO_SMALL"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_INTERNAL_ERROR: + reason = "STATUS_GRAPHICS_INTERNAL_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_GRAPHICS_SESSION_TYPE_CHANGE_IN_PROGRESS: + reason = "STATUS_GRAPHICS_SESSION_TYPE_CHANGE_IN_PROGRESS"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_LOCKED_VOLUME: + reason = "STATUS_FVE_LOCKED_VOLUME"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_NOT_ENCRYPTED: + reason = "STATUS_FVE_NOT_ENCRYPTED"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_BAD_INFORMATION: + reason = "STATUS_FVE_BAD_INFORMATION"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_TOO_SMALL: + reason = "STATUS_FVE_TOO_SMALL"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_FAILED_WRONG_FS: + reason = "STATUS_FVE_FAILED_WRONG_FS"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_BAD_PARTITION_SIZE: + reason = "STATUS_FVE_BAD_PARTITION_SIZE"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_FS_NOT_EXTENDED: + reason = "STATUS_FVE_FS_NOT_EXTENDED"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_FS_MOUNTED: + reason = "STATUS_FVE_FS_MOUNTED"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_NO_LICENSE: + reason = "STATUS_FVE_NO_LICENSE"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_ACTION_NOT_ALLOWED: + reason = "STATUS_FVE_ACTION_NOT_ALLOWED"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_BAD_DATA: + reason = "STATUS_FVE_BAD_DATA"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_VOLUME_NOT_BOUND: + reason = "STATUS_FVE_VOLUME_NOT_BOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_NOT_DATA_VOLUME: + reason = "STATUS_FVE_NOT_DATA_VOLUME"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_CONV_READ_ERROR: + reason = "STATUS_FVE_CONV_READ_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_CONV_WRITE_ERROR: + reason = "STATUS_FVE_CONV_WRITE_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_OVERLAPPED_UPDATE: + reason = "STATUS_FVE_OVERLAPPED_UPDATE"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_FAILED_SECTOR_SIZE: + reason = "STATUS_FVE_FAILED_SECTOR_SIZE"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_FAILED_AUTHENTICATION: + reason = "STATUS_FVE_FAILED_AUTHENTICATION"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_NOT_OS_VOLUME: + reason = "STATUS_FVE_NOT_OS_VOLUME"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_KEYFILE_NOT_FOUND: + reason = "STATUS_FVE_KEYFILE_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_KEYFILE_INVALID: + reason = "STATUS_FVE_KEYFILE_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_KEYFILE_NO_VMK: + reason = "STATUS_FVE_KEYFILE_NO_VMK"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_TPM_DISABLED: + reason = "STATUS_FVE_TPM_DISABLED"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_TPM_SRK_AUTH_NOT_ZERO: + reason = "STATUS_FVE_TPM_SRK_AUTH_NOT_ZERO"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_TPM_INVALID_PCR: + reason = "STATUS_FVE_TPM_INVALID_PCR"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_TPM_NO_VMK: + reason = "STATUS_FVE_TPM_NO_VMK"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_PIN_INVALID: + reason = "STATUS_FVE_PIN_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_AUTH_INVALID_APPLICATION: + reason = "STATUS_FVE_AUTH_INVALID_APPLICATION"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_AUTH_INVALID_CONFIG: + reason = "STATUS_FVE_AUTH_INVALID_CONFIG"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_DEBUGGER_ENABLED: + reason = "STATUS_FVE_DEBUGGER_ENABLED"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_DRY_RUN_FAILED: + reason = "STATUS_FVE_DRY_RUN_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_BAD_METADATA_POINTER: + reason = "STATUS_FVE_BAD_METADATA_POINTER"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_OLD_METADATA_COPY: + reason = "STATUS_FVE_OLD_METADATA_COPY"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_REBOOT_REQUIRED: + reason = "STATUS_FVE_REBOOT_REQUIRED"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_RAW_ACCESS: + reason = "STATUS_FVE_RAW_ACCESS"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_RAW_BLOCKED: + reason = "STATUS_FVE_RAW_BLOCKED"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_NO_AUTOUNLOCK_MASTER_KEY: + reason = "STATUS_FVE_NO_AUTOUNLOCK_MASTER_KEY"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_MOR_FAILED: + reason = "STATUS_FVE_MOR_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_NO_FEATURE_LICENSE: + reason = "STATUS_FVE_NO_FEATURE_LICENSE"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_POLICY_USER_DISABLE_RDV_NOT_ALLOWED: + reason = "STATUS_FVE_POLICY_USER_DISABLE_RDV_NOT_ALLOWED"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_CONV_RECOVERY_FAILED: + reason = "STATUS_FVE_CONV_RECOVERY_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_VIRTUALIZED_SPACE_TOO_BIG: + reason = "STATUS_FVE_VIRTUALIZED_SPACE_TOO_BIG"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_INVALID_DATUM_TYPE: + reason = "STATUS_FVE_INVALID_DATUM_TYPE"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_VOLUME_TOO_SMALL: + reason = "STATUS_FVE_VOLUME_TOO_SMALL"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_ENH_PIN_INVALID: + reason = "STATUS_FVE_ENH_PIN_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_FULL_ENCRYPTION_NOT_ALLOWED_ON_TP_STORAGE: + reason = "STATUS_FVE_FULL_ENCRYPTION_NOT_ALLOWED_ON_TP_STORAGE"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_WIPE_NOT_ALLOWED_ON_TP_STORAGE: + reason = "STATUS_FVE_WIPE_NOT_ALLOWED_ON_TP_STORAGE"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_NOT_ALLOWED_ON_CSV_STACK: + reason = "STATUS_FVE_NOT_ALLOWED_ON_CSV_STACK"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_NOT_ALLOWED_ON_CLUSTER: + reason = "STATUS_FVE_NOT_ALLOWED_ON_CLUSTER"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_NOT_ALLOWED_TO_UPGRADE_WHILE_CONVERTING: + reason = "STATUS_FVE_NOT_ALLOWED_TO_UPGRADE_WHILE_CONVERTING"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_WIPE_CANCEL_NOT_APPLICABLE: + reason = "STATUS_FVE_WIPE_CANCEL_NOT_APPLICABLE"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_EDRIVE_DRY_RUN_FAILED: + reason = "STATUS_FVE_EDRIVE_DRY_RUN_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_SECUREBOOT_DISABLED: + reason = "STATUS_FVE_SECUREBOOT_DISABLED"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_SECUREBOOT_CONFIG_CHANGE: + reason = "STATUS_FVE_SECUREBOOT_CONFIG_CHANGE"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_DEVICE_LOCKEDOUT: + reason = "STATUS_FVE_DEVICE_LOCKEDOUT"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_VOLUME_EXTEND_PREVENTS_EOW_DECRYPT: + reason = "STATUS_FVE_VOLUME_EXTEND_PREVENTS_EOW_DECRYPT"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_NOT_DE_VOLUME: + reason = "STATUS_FVE_NOT_DE_VOLUME"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_PROTECTION_DISABLED: + reason = "STATUS_FVE_PROTECTION_DISABLED"; + break; + case MD_NTSTATUS_WIN_STATUS_FVE_PROTECTION_CANNOT_BE_DISABLED: + reason = "STATUS_FVE_PROTECTION_CANNOT_BE_DISABLED"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_CALLOUT_NOT_FOUND: + reason = "STATUS_FWP_CALLOUT_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_CONDITION_NOT_FOUND: + reason = "STATUS_FWP_CONDITION_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_FILTER_NOT_FOUND: + reason = "STATUS_FWP_FILTER_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_LAYER_NOT_FOUND: + reason = "STATUS_FWP_LAYER_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_PROVIDER_NOT_FOUND: + reason = "STATUS_FWP_PROVIDER_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_PROVIDER_CONTEXT_NOT_FOUND: + reason = "STATUS_FWP_PROVIDER_CONTEXT_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_SUBLAYER_NOT_FOUND: + reason = "STATUS_FWP_SUBLAYER_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_NOT_FOUND: + reason = "STATUS_FWP_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_ALREADY_EXISTS: + reason = "STATUS_FWP_ALREADY_EXISTS"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_IN_USE: + reason = "STATUS_FWP_IN_USE"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_DYNAMIC_SESSION_IN_PROGRESS: + reason = "STATUS_FWP_DYNAMIC_SESSION_IN_PROGRESS"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_WRONG_SESSION: + reason = "STATUS_FWP_WRONG_SESSION"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_NO_TXN_IN_PROGRESS: + reason = "STATUS_FWP_NO_TXN_IN_PROGRESS"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_TXN_IN_PROGRESS: + reason = "STATUS_FWP_TXN_IN_PROGRESS"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_TXN_ABORTED: + reason = "STATUS_FWP_TXN_ABORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_SESSION_ABORTED: + reason = "STATUS_FWP_SESSION_ABORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_INCOMPATIBLE_TXN: + reason = "STATUS_FWP_INCOMPATIBLE_TXN"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_TIMEOUT: + reason = "STATUS_FWP_TIMEOUT"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_NET_EVENTS_DISABLED: + reason = "STATUS_FWP_NET_EVENTS_DISABLED"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_INCOMPATIBLE_LAYER: + reason = "STATUS_FWP_INCOMPATIBLE_LAYER"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_KM_CLIENTS_ONLY: + reason = "STATUS_FWP_KM_CLIENTS_ONLY"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_LIFETIME_MISMATCH: + reason = "STATUS_FWP_LIFETIME_MISMATCH"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_BUILTIN_OBJECT: + reason = "STATUS_FWP_BUILTIN_OBJECT"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_TOO_MANY_CALLOUTS: + reason = "STATUS_FWP_TOO_MANY_CALLOUTS"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_NOTIFICATION_DROPPED: + reason = "STATUS_FWP_NOTIFICATION_DROPPED"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_TRAFFIC_MISMATCH: + reason = "STATUS_FWP_TRAFFIC_MISMATCH"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_INCOMPATIBLE_SA_STATE: + reason = "STATUS_FWP_INCOMPATIBLE_SA_STATE"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_NULL_POINTER: + reason = "STATUS_FWP_NULL_POINTER"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_INVALID_ENUMERATOR: + reason = "STATUS_FWP_INVALID_ENUMERATOR"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_INVALID_FLAGS: + reason = "STATUS_FWP_INVALID_FLAGS"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_INVALID_NET_MASK: + reason = "STATUS_FWP_INVALID_NET_MASK"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_INVALID_RANGE: + reason = "STATUS_FWP_INVALID_RANGE"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_INVALID_INTERVAL: + reason = "STATUS_FWP_INVALID_INTERVAL"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_ZERO_LENGTH_ARRAY: + reason = "STATUS_FWP_ZERO_LENGTH_ARRAY"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_NULL_DISPLAY_NAME: + reason = "STATUS_FWP_NULL_DISPLAY_NAME"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_INVALID_ACTION_TYPE: + reason = "STATUS_FWP_INVALID_ACTION_TYPE"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_INVALID_WEIGHT: + reason = "STATUS_FWP_INVALID_WEIGHT"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_MATCH_TYPE_MISMATCH: + reason = "STATUS_FWP_MATCH_TYPE_MISMATCH"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_TYPE_MISMATCH: + reason = "STATUS_FWP_TYPE_MISMATCH"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_OUT_OF_BOUNDS: + reason = "STATUS_FWP_OUT_OF_BOUNDS"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_RESERVED: + reason = "STATUS_FWP_RESERVED"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_DUPLICATE_CONDITION: + reason = "STATUS_FWP_DUPLICATE_CONDITION"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_DUPLICATE_KEYMOD: + reason = "STATUS_FWP_DUPLICATE_KEYMOD"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_ACTION_INCOMPATIBLE_WITH_LAYER: + reason = "STATUS_FWP_ACTION_INCOMPATIBLE_WITH_LAYER"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_ACTION_INCOMPATIBLE_WITH_SUBLAYER: + reason = "STATUS_FWP_ACTION_INCOMPATIBLE_WITH_SUBLAYER"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_CONTEXT_INCOMPATIBLE_WITH_LAYER: + reason = "STATUS_FWP_CONTEXT_INCOMPATIBLE_WITH_LAYER"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_CONTEXT_INCOMPATIBLE_WITH_CALLOUT: + reason = "STATUS_FWP_CONTEXT_INCOMPATIBLE_WITH_CALLOUT"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_INCOMPATIBLE_AUTH_METHOD: + reason = "STATUS_FWP_INCOMPATIBLE_AUTH_METHOD"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_INCOMPATIBLE_DH_GROUP: + reason = "STATUS_FWP_INCOMPATIBLE_DH_GROUP"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_EM_NOT_SUPPORTED: + reason = "STATUS_FWP_EM_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_NEVER_MATCH: + reason = "STATUS_FWP_NEVER_MATCH"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_PROVIDER_CONTEXT_MISMATCH: + reason = "STATUS_FWP_PROVIDER_CONTEXT_MISMATCH"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_INVALID_PARAMETER: + reason = "STATUS_FWP_INVALID_PARAMETER"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_TOO_MANY_SUBLAYERS: + reason = "STATUS_FWP_TOO_MANY_SUBLAYERS"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_CALLOUT_NOTIFICATION_FAILED: + reason = "STATUS_FWP_CALLOUT_NOTIFICATION_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_INVALID_AUTH_TRANSFORM: + reason = "STATUS_FWP_INVALID_AUTH_TRANSFORM"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_INVALID_CIPHER_TRANSFORM: + reason = "STATUS_FWP_INVALID_CIPHER_TRANSFORM"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_INCOMPATIBLE_CIPHER_TRANSFORM: + reason = "STATUS_FWP_INCOMPATIBLE_CIPHER_TRANSFORM"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_INVALID_TRANSFORM_COMBINATION: + reason = "STATUS_FWP_INVALID_TRANSFORM_COMBINATION"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_DUPLICATE_AUTH_METHOD: + reason = "STATUS_FWP_DUPLICATE_AUTH_METHOD"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_INVALID_TUNNEL_ENDPOINT: + reason = "STATUS_FWP_INVALID_TUNNEL_ENDPOINT"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_L2_DRIVER_NOT_READY: + reason = "STATUS_FWP_L2_DRIVER_NOT_READY"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_KEY_DICTATOR_ALREADY_REGISTERED: + reason = "STATUS_FWP_KEY_DICTATOR_ALREADY_REGISTERED"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_KEY_DICTATION_INVALID_KEYING_MATERIAL: + reason = "STATUS_FWP_KEY_DICTATION_INVALID_KEYING_MATERIAL"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_CONNECTIONS_DISABLED: + reason = "STATUS_FWP_CONNECTIONS_DISABLED"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_INVALID_DNS_NAME: + reason = "STATUS_FWP_INVALID_DNS_NAME"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_STILL_ON: + reason = "STATUS_FWP_STILL_ON"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_IKEEXT_NOT_RUNNING: + reason = "STATUS_FWP_IKEEXT_NOT_RUNNING"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_TCPIP_NOT_READY: + reason = "STATUS_FWP_TCPIP_NOT_READY"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_INJECT_HANDLE_CLOSING: + reason = "STATUS_FWP_INJECT_HANDLE_CLOSING"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_INJECT_HANDLE_STALE: + reason = "STATUS_FWP_INJECT_HANDLE_STALE"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_CANNOT_PEND: + reason = "STATUS_FWP_CANNOT_PEND"; + break; + case MD_NTSTATUS_WIN_STATUS_FWP_DROP_NOICMP: + reason = "STATUS_FWP_DROP_NOICMP"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_CLOSING: + reason = "STATUS_NDIS_CLOSING"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_BAD_VERSION: + reason = "STATUS_NDIS_BAD_VERSION"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_BAD_CHARACTERISTICS: + reason = "STATUS_NDIS_BAD_CHARACTERISTICS"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_ADAPTER_NOT_FOUND: + reason = "STATUS_NDIS_ADAPTER_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_OPEN_FAILED: + reason = "STATUS_NDIS_OPEN_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_DEVICE_FAILED: + reason = "STATUS_NDIS_DEVICE_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_MULTICAST_FULL: + reason = "STATUS_NDIS_MULTICAST_FULL"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_MULTICAST_EXISTS: + reason = "STATUS_NDIS_MULTICAST_EXISTS"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_MULTICAST_NOT_FOUND: + reason = "STATUS_NDIS_MULTICAST_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_REQUEST_ABORTED: + reason = "STATUS_NDIS_REQUEST_ABORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_RESET_IN_PROGRESS: + reason = "STATUS_NDIS_RESET_IN_PROGRESS"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_INVALID_PACKET: + reason = "STATUS_NDIS_INVALID_PACKET"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_INVALID_DEVICE_REQUEST: + reason = "STATUS_NDIS_INVALID_DEVICE_REQUEST"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_ADAPTER_NOT_READY: + reason = "STATUS_NDIS_ADAPTER_NOT_READY"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_INVALID_LENGTH: + reason = "STATUS_NDIS_INVALID_LENGTH"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_INVALID_DATA: + reason = "STATUS_NDIS_INVALID_DATA"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_BUFFER_TOO_SHORT: + reason = "STATUS_NDIS_BUFFER_TOO_SHORT"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_INVALID_OID: + reason = "STATUS_NDIS_INVALID_OID"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_ADAPTER_REMOVED: + reason = "STATUS_NDIS_ADAPTER_REMOVED"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_UNSUPPORTED_MEDIA: + reason = "STATUS_NDIS_UNSUPPORTED_MEDIA"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_GROUP_ADDRESS_IN_USE: + reason = "STATUS_NDIS_GROUP_ADDRESS_IN_USE"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_FILE_NOT_FOUND: + reason = "STATUS_NDIS_FILE_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_ERROR_READING_FILE: + reason = "STATUS_NDIS_ERROR_READING_FILE"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_ALREADY_MAPPED: + reason = "STATUS_NDIS_ALREADY_MAPPED"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_RESOURCE_CONFLICT: + reason = "STATUS_NDIS_RESOURCE_CONFLICT"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_MEDIA_DISCONNECTED: + reason = "STATUS_NDIS_MEDIA_DISCONNECTED"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_INVALID_ADDRESS: + reason = "STATUS_NDIS_INVALID_ADDRESS"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_PAUSED: + reason = "STATUS_NDIS_PAUSED"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_INTERFACE_NOT_FOUND: + reason = "STATUS_NDIS_INTERFACE_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_UNSUPPORTED_REVISION: + reason = "STATUS_NDIS_UNSUPPORTED_REVISION"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_INVALID_PORT: + reason = "STATUS_NDIS_INVALID_PORT"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_INVALID_PORT_STATE: + reason = "STATUS_NDIS_INVALID_PORT_STATE"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_LOW_POWER_STATE: + reason = "STATUS_NDIS_LOW_POWER_STATE"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_REINIT_REQUIRED: + reason = "STATUS_NDIS_REINIT_REQUIRED"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_NOT_SUPPORTED: + reason = "STATUS_NDIS_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_OFFLOAD_POLICY: + reason = "STATUS_NDIS_OFFLOAD_POLICY"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_OFFLOAD_CONNECTION_REJECTED: + reason = "STATUS_NDIS_OFFLOAD_CONNECTION_REJECTED"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_OFFLOAD_PATH_REJECTED: + reason = "STATUS_NDIS_OFFLOAD_PATH_REJECTED"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_DOT11_AUTO_CONFIG_ENABLED: + reason = "STATUS_NDIS_DOT11_AUTO_CONFIG_ENABLED"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_DOT11_MEDIA_IN_USE: + reason = "STATUS_NDIS_DOT11_MEDIA_IN_USE"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_DOT11_POWER_STATE_INVALID: + reason = "STATUS_NDIS_DOT11_POWER_STATE_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_PM_WOL_PATTERN_LIST_FULL: + reason = "STATUS_NDIS_PM_WOL_PATTERN_LIST_FULL"; + break; + case MD_NTSTATUS_WIN_STATUS_NDIS_PM_PROTOCOL_OFFLOAD_LIST_FULL: + reason = "STATUS_NDIS_PM_PROTOCOL_OFFLOAD_LIST_FULL"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_ERROR_MASK: + reason = "STATUS_TPM_ERROR_MASK"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_AUTHFAIL: + reason = "STATUS_TPM_AUTHFAIL"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_BADINDEX: + reason = "STATUS_TPM_BADINDEX"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_BAD_PARAMETER: + reason = "STATUS_TPM_BAD_PARAMETER"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_AUDITFAILURE: + reason = "STATUS_TPM_AUDITFAILURE"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_CLEAR_DISABLED: + reason = "STATUS_TPM_CLEAR_DISABLED"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_DEACTIVATED: + reason = "STATUS_TPM_DEACTIVATED"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_DISABLED: + reason = "STATUS_TPM_DISABLED"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_DISABLED_CMD: + reason = "STATUS_TPM_DISABLED_CMD"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_FAIL: + reason = "STATUS_TPM_FAIL"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_BAD_ORDINAL: + reason = "STATUS_TPM_BAD_ORDINAL"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_INSTALL_DISABLED: + reason = "STATUS_TPM_INSTALL_DISABLED"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_INVALID_KEYHANDLE: + reason = "STATUS_TPM_INVALID_KEYHANDLE"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_KEYNOTFOUND: + reason = "STATUS_TPM_KEYNOTFOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_INAPPROPRIATE_ENC: + reason = "STATUS_TPM_INAPPROPRIATE_ENC"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_MIGRATEFAIL: + reason = "STATUS_TPM_MIGRATEFAIL"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_INVALID_PCR_INFO: + reason = "STATUS_TPM_INVALID_PCR_INFO"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_NOSPACE: + reason = "STATUS_TPM_NOSPACE"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_NOSRK: + reason = "STATUS_TPM_NOSRK"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_NOTSEALED_BLOB: + reason = "STATUS_TPM_NOTSEALED_BLOB"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_OWNER_SET: + reason = "STATUS_TPM_OWNER_SET"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_RESOURCES: + reason = "STATUS_TPM_RESOURCES"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_SHORTRANDOM: + reason = "STATUS_TPM_SHORTRANDOM"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_SIZE: + reason = "STATUS_TPM_SIZE"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_WRONGPCRVAL: + reason = "STATUS_TPM_WRONGPCRVAL"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_BAD_PARAM_SIZE: + reason = "STATUS_TPM_BAD_PARAM_SIZE"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_SHA_THREAD: + reason = "STATUS_TPM_SHA_THREAD"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_SHA_ERROR: + reason = "STATUS_TPM_SHA_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_FAILEDSELFTEST: + reason = "STATUS_TPM_FAILEDSELFTEST"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_AUTH2FAIL: + reason = "STATUS_TPM_AUTH2FAIL"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_BADTAG: + reason = "STATUS_TPM_BADTAG"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_IOERROR: + reason = "STATUS_TPM_IOERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_ENCRYPT_ERROR: + reason = "STATUS_TPM_ENCRYPT_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_DECRYPT_ERROR: + reason = "STATUS_TPM_DECRYPT_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_INVALID_AUTHHANDLE: + reason = "STATUS_TPM_INVALID_AUTHHANDLE"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_NO_ENDORSEMENT: + reason = "STATUS_TPM_NO_ENDORSEMENT"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_INVALID_KEYUSAGE: + reason = "STATUS_TPM_INVALID_KEYUSAGE"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_WRONG_ENTITYTYPE: + reason = "STATUS_TPM_WRONG_ENTITYTYPE"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_INVALID_POSTINIT: + reason = "STATUS_TPM_INVALID_POSTINIT"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_INAPPROPRIATE_SIG: + reason = "STATUS_TPM_INAPPROPRIATE_SIG"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_BAD_KEY_PROPERTY: + reason = "STATUS_TPM_BAD_KEY_PROPERTY"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_BAD_MIGRATION: + reason = "STATUS_TPM_BAD_MIGRATION"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_BAD_SCHEME: + reason = "STATUS_TPM_BAD_SCHEME"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_BAD_DATASIZE: + reason = "STATUS_TPM_BAD_DATASIZE"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_BAD_MODE: + reason = "STATUS_TPM_BAD_MODE"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_BAD_PRESENCE: + reason = "STATUS_TPM_BAD_PRESENCE"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_BAD_VERSION: + reason = "STATUS_TPM_BAD_VERSION"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_NO_WRAP_TRANSPORT: + reason = "STATUS_TPM_NO_WRAP_TRANSPORT"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_AUDITFAIL_UNSUCCESSFUL: + reason = "STATUS_TPM_AUDITFAIL_UNSUCCESSFUL"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_AUDITFAIL_SUCCESSFUL: + reason = "STATUS_TPM_AUDITFAIL_SUCCESSFUL"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_NOTRESETABLE: + reason = "STATUS_TPM_NOTRESETABLE"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_NOTLOCAL: + reason = "STATUS_TPM_NOTLOCAL"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_BAD_TYPE: + reason = "STATUS_TPM_BAD_TYPE"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_INVALID_RESOURCE: + reason = "STATUS_TPM_INVALID_RESOURCE"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_NOTFIPS: + reason = "STATUS_TPM_NOTFIPS"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_INVALID_FAMILY: + reason = "STATUS_TPM_INVALID_FAMILY"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_NO_NV_PERMISSION: + reason = "STATUS_TPM_NO_NV_PERMISSION"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_REQUIRES_SIGN: + reason = "STATUS_TPM_REQUIRES_SIGN"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_KEY_NOTSUPPORTED: + reason = "STATUS_TPM_KEY_NOTSUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_AUTH_CONFLICT: + reason = "STATUS_TPM_AUTH_CONFLICT"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_AREA_LOCKED: + reason = "STATUS_TPM_AREA_LOCKED"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_BAD_LOCALITY: + reason = "STATUS_TPM_BAD_LOCALITY"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_READ_ONLY: + reason = "STATUS_TPM_READ_ONLY"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_PER_NOWRITE: + reason = "STATUS_TPM_PER_NOWRITE"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_FAMILYCOUNT: + reason = "STATUS_TPM_FAMILYCOUNT"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_WRITE_LOCKED: + reason = "STATUS_TPM_WRITE_LOCKED"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_BAD_ATTRIBUTES: + reason = "STATUS_TPM_BAD_ATTRIBUTES"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_INVALID_STRUCTURE: + reason = "STATUS_TPM_INVALID_STRUCTURE"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_KEY_OWNER_CONTROL: + reason = "STATUS_TPM_KEY_OWNER_CONTROL"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_BAD_COUNTER: + reason = "STATUS_TPM_BAD_COUNTER"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_NOT_FULLWRITE: + reason = "STATUS_TPM_NOT_FULLWRITE"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_CONTEXT_GAP: + reason = "STATUS_TPM_CONTEXT_GAP"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_MAXNVWRITES: + reason = "STATUS_TPM_MAXNVWRITES"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_NOOPERATOR: + reason = "STATUS_TPM_NOOPERATOR"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_RESOURCEMISSING: + reason = "STATUS_TPM_RESOURCEMISSING"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_DELEGATE_LOCK: + reason = "STATUS_TPM_DELEGATE_LOCK"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_DELEGATE_FAMILY: + reason = "STATUS_TPM_DELEGATE_FAMILY"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_DELEGATE_ADMIN: + reason = "STATUS_TPM_DELEGATE_ADMIN"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_TRANSPORT_NOTEXCLUSIVE: + reason = "STATUS_TPM_TRANSPORT_NOTEXCLUSIVE"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_OWNER_CONTROL: + reason = "STATUS_TPM_OWNER_CONTROL"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_DAA_RESOURCES: + reason = "STATUS_TPM_DAA_RESOURCES"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_DAA_INPUT_DATA0: + reason = "STATUS_TPM_DAA_INPUT_DATA0"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_DAA_INPUT_DATA1: + reason = "STATUS_TPM_DAA_INPUT_DATA1"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_DAA_ISSUER_SETTINGS: + reason = "STATUS_TPM_DAA_ISSUER_SETTINGS"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_DAA_TPM_SETTINGS: + reason = "STATUS_TPM_DAA_TPM_SETTINGS"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_DAA_STAGE: + reason = "STATUS_TPM_DAA_STAGE"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_DAA_ISSUER_VALIDITY: + reason = "STATUS_TPM_DAA_ISSUER_VALIDITY"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_DAA_WRONG_W: + reason = "STATUS_TPM_DAA_WRONG_W"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_BAD_HANDLE: + reason = "STATUS_TPM_BAD_HANDLE"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_BAD_DELEGATE: + reason = "STATUS_TPM_BAD_DELEGATE"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_BADCONTEXT: + reason = "STATUS_TPM_BADCONTEXT"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_TOOMANYCONTEXTS: + reason = "STATUS_TPM_TOOMANYCONTEXTS"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_MA_TICKET_SIGNATURE: + reason = "STATUS_TPM_MA_TICKET_SIGNATURE"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_MA_DESTINATION: + reason = "STATUS_TPM_MA_DESTINATION"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_MA_SOURCE: + reason = "STATUS_TPM_MA_SOURCE"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_MA_AUTHORITY: + reason = "STATUS_TPM_MA_AUTHORITY"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_PERMANENTEK: + reason = "STATUS_TPM_PERMANENTEK"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_BAD_SIGNATURE: + reason = "STATUS_TPM_BAD_SIGNATURE"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_NOCONTEXTSPACE: + reason = "STATUS_TPM_NOCONTEXTSPACE"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_COMMAND_BLOCKED: + reason = "STATUS_TPM_COMMAND_BLOCKED"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_INVALID_HANDLE: + reason = "STATUS_TPM_INVALID_HANDLE"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_DUPLICATE_VHANDLE: + reason = "STATUS_TPM_DUPLICATE_VHANDLE"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_EMBEDDED_COMMAND_BLOCKED: + reason = "STATUS_TPM_EMBEDDED_COMMAND_BLOCKED"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_EMBEDDED_COMMAND_UNSUPPORTED: + reason = "STATUS_TPM_EMBEDDED_COMMAND_UNSUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_RETRY: + reason = "STATUS_TPM_RETRY"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_NEEDS_SELFTEST: + reason = "STATUS_TPM_NEEDS_SELFTEST"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_DOING_SELFTEST: + reason = "STATUS_TPM_DOING_SELFTEST"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_DEFEND_LOCK_RUNNING: + reason = "STATUS_TPM_DEFEND_LOCK_RUNNING"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_COMMAND_CANCELED: + reason = "STATUS_TPM_COMMAND_CANCELED"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_TOO_MANY_CONTEXTS: + reason = "STATUS_TPM_TOO_MANY_CONTEXTS"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_NOT_FOUND: + reason = "STATUS_TPM_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_ACCESS_DENIED: + reason = "STATUS_TPM_ACCESS_DENIED"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_INSUFFICIENT_BUFFER: + reason = "STATUS_TPM_INSUFFICIENT_BUFFER"; + break; + case MD_NTSTATUS_WIN_STATUS_TPM_PPI_FUNCTION_UNSUPPORTED: + reason = "STATUS_TPM_PPI_FUNCTION_UNSUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_PCP_ERROR_MASK: + reason = "STATUS_PCP_ERROR_MASK"; + break; + case MD_NTSTATUS_WIN_STATUS_PCP_DEVICE_NOT_READY: + reason = "STATUS_PCP_DEVICE_NOT_READY"; + break; + case MD_NTSTATUS_WIN_STATUS_PCP_INVALID_HANDLE: + reason = "STATUS_PCP_INVALID_HANDLE"; + break; + case MD_NTSTATUS_WIN_STATUS_PCP_INVALID_PARAMETER: + reason = "STATUS_PCP_INVALID_PARAMETER"; + break; + case MD_NTSTATUS_WIN_STATUS_PCP_FLAG_NOT_SUPPORTED: + reason = "STATUS_PCP_FLAG_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_PCP_NOT_SUPPORTED: + reason = "STATUS_PCP_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_PCP_BUFFER_TOO_SMALL: + reason = "STATUS_PCP_BUFFER_TOO_SMALL"; + break; + case MD_NTSTATUS_WIN_STATUS_PCP_INTERNAL_ERROR: + reason = "STATUS_PCP_INTERNAL_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_PCP_AUTHENTICATION_FAILED: + reason = "STATUS_PCP_AUTHENTICATION_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_PCP_AUTHENTICATION_IGNORED: + reason = "STATUS_PCP_AUTHENTICATION_IGNORED"; + break; + case MD_NTSTATUS_WIN_STATUS_PCP_POLICY_NOT_FOUND: + reason = "STATUS_PCP_POLICY_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_PCP_PROFILE_NOT_FOUND: + reason = "STATUS_PCP_PROFILE_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_PCP_VALIDATION_FAILED: + reason = "STATUS_PCP_VALIDATION_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_PCP_DEVICE_NOT_FOUND: + reason = "STATUS_PCP_DEVICE_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_HV_INVALID_HYPERCALL_CODE: + reason = "STATUS_HV_INVALID_HYPERCALL_CODE"; + break; + case MD_NTSTATUS_WIN_STATUS_HV_INVALID_HYPERCALL_INPUT: + reason = "STATUS_HV_INVALID_HYPERCALL_INPUT"; + break; + case MD_NTSTATUS_WIN_STATUS_HV_INVALID_ALIGNMENT: + reason = "STATUS_HV_INVALID_ALIGNMENT"; + break; + case MD_NTSTATUS_WIN_STATUS_HV_INVALID_PARAMETER: + reason = "STATUS_HV_INVALID_PARAMETER"; + break; + case MD_NTSTATUS_WIN_STATUS_HV_ACCESS_DENIED: + reason = "STATUS_HV_ACCESS_DENIED"; + break; + case MD_NTSTATUS_WIN_STATUS_HV_INVALID_PARTITION_STATE: + reason = "STATUS_HV_INVALID_PARTITION_STATE"; + break; + case MD_NTSTATUS_WIN_STATUS_HV_OPERATION_DENIED: + reason = "STATUS_HV_OPERATION_DENIED"; + break; + case MD_NTSTATUS_WIN_STATUS_HV_UNKNOWN_PROPERTY: + reason = "STATUS_HV_UNKNOWN_PROPERTY"; + break; + case MD_NTSTATUS_WIN_STATUS_HV_PROPERTY_VALUE_OUT_OF_RANGE: + reason = "STATUS_HV_PROPERTY_VALUE_OUT_OF_RANGE"; + break; + case MD_NTSTATUS_WIN_STATUS_HV_INSUFFICIENT_MEMORY: + reason = "STATUS_HV_INSUFFICIENT_MEMORY"; + break; + case MD_NTSTATUS_WIN_STATUS_HV_PARTITION_TOO_DEEP: + reason = "STATUS_HV_PARTITION_TOO_DEEP"; + break; + case MD_NTSTATUS_WIN_STATUS_HV_INVALID_PARTITION_ID: + reason = "STATUS_HV_INVALID_PARTITION_ID"; + break; + case MD_NTSTATUS_WIN_STATUS_HV_INVALID_VP_INDEX: + reason = "STATUS_HV_INVALID_VP_INDEX"; + break; + case MD_NTSTATUS_WIN_STATUS_HV_INVALID_PORT_ID: + reason = "STATUS_HV_INVALID_PORT_ID"; + break; + case MD_NTSTATUS_WIN_STATUS_HV_INVALID_CONNECTION_ID: + reason = "STATUS_HV_INVALID_CONNECTION_ID"; + break; + case MD_NTSTATUS_WIN_STATUS_HV_INSUFFICIENT_BUFFERS: + reason = "STATUS_HV_INSUFFICIENT_BUFFERS"; + break; + case MD_NTSTATUS_WIN_STATUS_HV_NOT_ACKNOWLEDGED: + reason = "STATUS_HV_NOT_ACKNOWLEDGED"; + break; + case MD_NTSTATUS_WIN_STATUS_HV_ACKNOWLEDGED: + reason = "STATUS_HV_ACKNOWLEDGED"; + break; + case MD_NTSTATUS_WIN_STATUS_HV_INVALID_SAVE_RESTORE_STATE: + reason = "STATUS_HV_INVALID_SAVE_RESTORE_STATE"; + break; + case MD_NTSTATUS_WIN_STATUS_HV_INVALID_SYNIC_STATE: + reason = "STATUS_HV_INVALID_SYNIC_STATE"; + break; + case MD_NTSTATUS_WIN_STATUS_HV_OBJECT_IN_USE: + reason = "STATUS_HV_OBJECT_IN_USE"; + break; + case MD_NTSTATUS_WIN_STATUS_HV_INVALID_PROXIMITY_DOMAIN_INFO: + reason = "STATUS_HV_INVALID_PROXIMITY_DOMAIN_INFO"; + break; + case MD_NTSTATUS_WIN_STATUS_HV_NO_DATA: + reason = "STATUS_HV_NO_DATA"; + break; + case MD_NTSTATUS_WIN_STATUS_HV_INACTIVE: + reason = "STATUS_HV_INACTIVE"; + break; + case MD_NTSTATUS_WIN_STATUS_HV_NO_RESOURCES: + reason = "STATUS_HV_NO_RESOURCES"; + break; + case MD_NTSTATUS_WIN_STATUS_HV_FEATURE_UNAVAILABLE: + reason = "STATUS_HV_FEATURE_UNAVAILABLE"; + break; + case MD_NTSTATUS_WIN_STATUS_HV_INSUFFICIENT_BUFFER: + reason = "STATUS_HV_INSUFFICIENT_BUFFER"; + break; + case MD_NTSTATUS_WIN_STATUS_HV_INSUFFICIENT_DEVICE_DOMAINS: + reason = "STATUS_HV_INSUFFICIENT_DEVICE_DOMAINS"; + break; + case MD_NTSTATUS_WIN_STATUS_HV_INVALID_LP_INDEX: + reason = "STATUS_HV_INVALID_LP_INDEX"; + break; + case MD_NTSTATUS_WIN_STATUS_HV_NOT_PRESENT: + reason = "STATUS_HV_NOT_PRESENT"; + break; + case MD_NTSTATUS_WIN_STATUS_IPSEC_BAD_SPI: + reason = "STATUS_IPSEC_BAD_SPI"; + break; + case MD_NTSTATUS_WIN_STATUS_IPSEC_SA_LIFETIME_EXPIRED: + reason = "STATUS_IPSEC_SA_LIFETIME_EXPIRED"; + break; + case MD_NTSTATUS_WIN_STATUS_IPSEC_WRONG_SA: + reason = "STATUS_IPSEC_WRONG_SA"; + break; + case MD_NTSTATUS_WIN_STATUS_IPSEC_REPLAY_CHECK_FAILED: + reason = "STATUS_IPSEC_REPLAY_CHECK_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_IPSEC_INVALID_PACKET: + reason = "STATUS_IPSEC_INVALID_PACKET"; + break; + case MD_NTSTATUS_WIN_STATUS_IPSEC_INTEGRITY_CHECK_FAILED: + reason = "STATUS_IPSEC_INTEGRITY_CHECK_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_IPSEC_CLEAR_TEXT_DROP: + reason = "STATUS_IPSEC_CLEAR_TEXT_DROP"; + break; + case MD_NTSTATUS_WIN_STATUS_IPSEC_AUTH_FIREWALL_DROP: + reason = "STATUS_IPSEC_AUTH_FIREWALL_DROP"; + break; + case MD_NTSTATUS_WIN_STATUS_IPSEC_THROTTLE_DROP: + reason = "STATUS_IPSEC_THROTTLE_DROP"; + break; + case MD_NTSTATUS_WIN_STATUS_IPSEC_DOSP_BLOCK: + reason = "STATUS_IPSEC_DOSP_BLOCK"; + break; + case MD_NTSTATUS_WIN_STATUS_IPSEC_DOSP_RECEIVED_MULTICAST: + reason = "STATUS_IPSEC_DOSP_RECEIVED_MULTICAST"; + break; + case MD_NTSTATUS_WIN_STATUS_IPSEC_DOSP_INVALID_PACKET: + reason = "STATUS_IPSEC_DOSP_INVALID_PACKET"; + break; + case MD_NTSTATUS_WIN_STATUS_IPSEC_DOSP_STATE_LOOKUP_FAILED: + reason = "STATUS_IPSEC_DOSP_STATE_LOOKUP_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_IPSEC_DOSP_MAX_ENTRIES: + reason = "STATUS_IPSEC_DOSP_MAX_ENTRIES"; + break; + case MD_NTSTATUS_WIN_STATUS_IPSEC_DOSP_KEYMOD_NOT_ALLOWED: + reason = "STATUS_IPSEC_DOSP_KEYMOD_NOT_ALLOWED"; + break; + case MD_NTSTATUS_WIN_STATUS_IPSEC_DOSP_MAX_PER_IP_RATELIMIT_QUEUES: + reason = "STATUS_IPSEC_DOSP_MAX_PER_IP_RATELIMIT_QUEUES"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_DUPLICATE_HANDLER: + reason = "STATUS_VID_DUPLICATE_HANDLER"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_TOO_MANY_HANDLERS: + reason = "STATUS_VID_TOO_MANY_HANDLERS"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_QUEUE_FULL: + reason = "STATUS_VID_QUEUE_FULL"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_HANDLER_NOT_PRESENT: + reason = "STATUS_VID_HANDLER_NOT_PRESENT"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_INVALID_OBJECT_NAME: + reason = "STATUS_VID_INVALID_OBJECT_NAME"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_PARTITION_NAME_TOO_LONG: + reason = "STATUS_VID_PARTITION_NAME_TOO_LONG"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_MESSAGE_QUEUE_NAME_TOO_LONG: + reason = "STATUS_VID_MESSAGE_QUEUE_NAME_TOO_LONG"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_PARTITION_ALREADY_EXISTS: + reason = "STATUS_VID_PARTITION_ALREADY_EXISTS"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_PARTITION_DOES_NOT_EXIST: + reason = "STATUS_VID_PARTITION_DOES_NOT_EXIST"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_PARTITION_NAME_NOT_FOUND: + reason = "STATUS_VID_PARTITION_NAME_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_MESSAGE_QUEUE_ALREADY_EXISTS: + reason = "STATUS_VID_MESSAGE_QUEUE_ALREADY_EXISTS"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_EXCEEDED_MBP_ENTRY_MAP_LIMIT: + reason = "STATUS_VID_EXCEEDED_MBP_ENTRY_MAP_LIMIT"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_MB_STILL_REFERENCED: + reason = "STATUS_VID_MB_STILL_REFERENCED"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_CHILD_GPA_PAGE_SET_CORRUPTED: + reason = "STATUS_VID_CHILD_GPA_PAGE_SET_CORRUPTED"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_INVALID_NUMA_SETTINGS: + reason = "STATUS_VID_INVALID_NUMA_SETTINGS"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_INVALID_NUMA_NODE_INDEX: + reason = "STATUS_VID_INVALID_NUMA_NODE_INDEX"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_NOTIFICATION_QUEUE_ALREADY_ASSOCIATED: + reason = "STATUS_VID_NOTIFICATION_QUEUE_ALREADY_ASSOCIATED"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_INVALID_MEMORY_BLOCK_HANDLE: + reason = "STATUS_VID_INVALID_MEMORY_BLOCK_HANDLE"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_PAGE_RANGE_OVERFLOW: + reason = "STATUS_VID_PAGE_RANGE_OVERFLOW"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_INVALID_MESSAGE_QUEUE_HANDLE: + reason = "STATUS_VID_INVALID_MESSAGE_QUEUE_HANDLE"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_INVALID_GPA_RANGE_HANDLE: + reason = "STATUS_VID_INVALID_GPA_RANGE_HANDLE"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_NO_MEMORY_BLOCK_NOTIFICATION_QUEUE: + reason = "STATUS_VID_NO_MEMORY_BLOCK_NOTIFICATION_QUEUE"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_MEMORY_BLOCK_LOCK_COUNT_EXCEEDED: + reason = "STATUS_VID_MEMORY_BLOCK_LOCK_COUNT_EXCEEDED"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_INVALID_PPM_HANDLE: + reason = "STATUS_VID_INVALID_PPM_HANDLE"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_MBPS_ARE_LOCKED: + reason = "STATUS_VID_MBPS_ARE_LOCKED"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_MESSAGE_QUEUE_CLOSED: + reason = "STATUS_VID_MESSAGE_QUEUE_CLOSED"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_VIRTUAL_PROCESSOR_LIMIT_EXCEEDED: + reason = "STATUS_VID_VIRTUAL_PROCESSOR_LIMIT_EXCEEDED"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_STOP_PENDING: + reason = "STATUS_VID_STOP_PENDING"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_INVALID_PROCESSOR_STATE: + reason = "STATUS_VID_INVALID_PROCESSOR_STATE"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_EXCEEDED_KM_CONTEXT_COUNT_LIMIT: + reason = "STATUS_VID_EXCEEDED_KM_CONTEXT_COUNT_LIMIT"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_KM_INTERFACE_ALREADY_INITIALIZED: + reason = "STATUS_VID_KM_INTERFACE_ALREADY_INITIALIZED"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_MB_PROPERTY_ALREADY_SET_RESET: + reason = "STATUS_VID_MB_PROPERTY_ALREADY_SET_RESET"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_MMIO_RANGE_DESTROYED: + reason = "STATUS_VID_MMIO_RANGE_DESTROYED"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_INVALID_CHILD_GPA_PAGE_SET: + reason = "STATUS_VID_INVALID_CHILD_GPA_PAGE_SET"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_RESERVE_PAGE_SET_IS_BEING_USED: + reason = "STATUS_VID_RESERVE_PAGE_SET_IS_BEING_USED"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_RESERVE_PAGE_SET_TOO_SMALL: + reason = "STATUS_VID_RESERVE_PAGE_SET_TOO_SMALL"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_MBP_ALREADY_LOCKED_USING_RESERVED_PAGE: + reason = "STATUS_VID_MBP_ALREADY_LOCKED_USING_RESERVED_PAGE"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_MBP_COUNT_EXCEEDED_LIMIT: + reason = "STATUS_VID_MBP_COUNT_EXCEEDED_LIMIT"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_SAVED_STATE_CORRUPT: + reason = "STATUS_VID_SAVED_STATE_CORRUPT"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_SAVED_STATE_UNRECOGNIZED_ITEM: + reason = "STATUS_VID_SAVED_STATE_UNRECOGNIZED_ITEM"; + break; + case MD_NTSTATUS_WIN_STATUS_VID_SAVED_STATE_INCOMPATIBLE: + reason = "STATUS_VID_SAVED_STATE_INCOMPATIBLE"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_DATABASE_FULL: + reason = "STATUS_VOLMGR_DATABASE_FULL"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_CONFIGURATION_CORRUPTED: + reason = "STATUS_VOLMGR_DISK_CONFIGURATION_CORRUPTED"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_CONFIGURATION_NOT_IN_SYNC: + reason = "STATUS_VOLMGR_DISK_CONFIGURATION_NOT_IN_SYNC"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_PACK_CONFIG_UPDATE_FAILED: + reason = "STATUS_VOLMGR_PACK_CONFIG_UPDATE_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_CONTAINS_NON_SIMPLE_VOLUME: + reason = "STATUS_VOLMGR_DISK_CONTAINS_NON_SIMPLE_VOLUME"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_DUPLICATE: + reason = "STATUS_VOLMGR_DISK_DUPLICATE"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_DYNAMIC: + reason = "STATUS_VOLMGR_DISK_DYNAMIC"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_ID_INVALID: + reason = "STATUS_VOLMGR_DISK_ID_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_INVALID: + reason = "STATUS_VOLMGR_DISK_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_LAST_VOTER: + reason = "STATUS_VOLMGR_DISK_LAST_VOTER"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_LAYOUT_INVALID: + reason = "STATUS_VOLMGR_DISK_LAYOUT_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_LAYOUT_NON_BASIC_BETWEEN_BASIC_PARTITIONS: + reason = "STATUS_VOLMGR_DISK_LAYOUT_NON_BASIC_BETWEEN_BASIC_PARTITIONS"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_LAYOUT_NOT_CYLINDER_ALIGNED: + reason = "STATUS_VOLMGR_DISK_LAYOUT_NOT_CYLINDER_ALIGNED"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_LAYOUT_PARTITIONS_TOO_SMALL: + reason = "STATUS_VOLMGR_DISK_LAYOUT_PARTITIONS_TOO_SMALL"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_LAYOUT_PRIMARY_BETWEEN_LOGICAL_PARTITIONS: + reason = "STATUS_VOLMGR_DISK_LAYOUT_PRIMARY_BETWEEN_LOGICAL_PARTITIONS"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_LAYOUT_TOO_MANY_PARTITIONS: + reason = "STATUS_VOLMGR_DISK_LAYOUT_TOO_MANY_PARTITIONS"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_MISSING: + reason = "STATUS_VOLMGR_DISK_MISSING"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_NOT_EMPTY: + reason = "STATUS_VOLMGR_DISK_NOT_EMPTY"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_NOT_ENOUGH_SPACE: + reason = "STATUS_VOLMGR_DISK_NOT_ENOUGH_SPACE"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_REVECTORING_FAILED: + reason = "STATUS_VOLMGR_DISK_REVECTORING_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_SECTOR_SIZE_INVALID: + reason = "STATUS_VOLMGR_DISK_SECTOR_SIZE_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_SET_NOT_CONTAINED: + reason = "STATUS_VOLMGR_DISK_SET_NOT_CONTAINED"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_USED_BY_MULTIPLE_MEMBERS: + reason = "STATUS_VOLMGR_DISK_USED_BY_MULTIPLE_MEMBERS"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_DISK_USED_BY_MULTIPLE_PLEXES: + reason = "STATUS_VOLMGR_DISK_USED_BY_MULTIPLE_PLEXES"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_DYNAMIC_DISK_NOT_SUPPORTED: + reason = "STATUS_VOLMGR_DYNAMIC_DISK_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_EXTENT_ALREADY_USED: + reason = "STATUS_VOLMGR_EXTENT_ALREADY_USED"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_EXTENT_NOT_CONTIGUOUS: + reason = "STATUS_VOLMGR_EXTENT_NOT_CONTIGUOUS"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_EXTENT_NOT_IN_PUBLIC_REGION: + reason = "STATUS_VOLMGR_EXTENT_NOT_IN_PUBLIC_REGION"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_EXTENT_NOT_SECTOR_ALIGNED: + reason = "STATUS_VOLMGR_EXTENT_NOT_SECTOR_ALIGNED"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_EXTENT_OVERLAPS_EBR_PARTITION: + reason = "STATUS_VOLMGR_EXTENT_OVERLAPS_EBR_PARTITION"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_EXTENT_VOLUME_LENGTHS_DO_NOT_MATCH: + reason = "STATUS_VOLMGR_EXTENT_VOLUME_LENGTHS_DO_NOT_MATCH"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_FAULT_TOLERANT_NOT_SUPPORTED: + reason = "STATUS_VOLMGR_FAULT_TOLERANT_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_INTERLEAVE_LENGTH_INVALID: + reason = "STATUS_VOLMGR_INTERLEAVE_LENGTH_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_MAXIMUM_REGISTERED_USERS: + reason = "STATUS_VOLMGR_MAXIMUM_REGISTERED_USERS"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_MEMBER_IN_SYNC: + reason = "STATUS_VOLMGR_MEMBER_IN_SYNC"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_MEMBER_INDEX_DUPLICATE: + reason = "STATUS_VOLMGR_MEMBER_INDEX_DUPLICATE"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_MEMBER_INDEX_INVALID: + reason = "STATUS_VOLMGR_MEMBER_INDEX_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_MEMBER_MISSING: + reason = "STATUS_VOLMGR_MEMBER_MISSING"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_MEMBER_NOT_DETACHED: + reason = "STATUS_VOLMGR_MEMBER_NOT_DETACHED"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_MEMBER_REGENERATING: + reason = "STATUS_VOLMGR_MEMBER_REGENERATING"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_ALL_DISKS_FAILED: + reason = "STATUS_VOLMGR_ALL_DISKS_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_NO_REGISTERED_USERS: + reason = "STATUS_VOLMGR_NO_REGISTERED_USERS"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_NO_SUCH_USER: + reason = "STATUS_VOLMGR_NO_SUCH_USER"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_NOTIFICATION_RESET: + reason = "STATUS_VOLMGR_NOTIFICATION_RESET"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_NUMBER_OF_MEMBERS_INVALID: + reason = "STATUS_VOLMGR_NUMBER_OF_MEMBERS_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_NUMBER_OF_PLEXES_INVALID: + reason = "STATUS_VOLMGR_NUMBER_OF_PLEXES_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_PACK_DUPLICATE: + reason = "STATUS_VOLMGR_PACK_DUPLICATE"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_PACK_ID_INVALID: + reason = "STATUS_VOLMGR_PACK_ID_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_PACK_INVALID: + reason = "STATUS_VOLMGR_PACK_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_PACK_NAME_INVALID: + reason = "STATUS_VOLMGR_PACK_NAME_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_PACK_OFFLINE: + reason = "STATUS_VOLMGR_PACK_OFFLINE"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_PACK_HAS_QUORUM: + reason = "STATUS_VOLMGR_PACK_HAS_QUORUM"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_PACK_WITHOUT_QUORUM: + reason = "STATUS_VOLMGR_PACK_WITHOUT_QUORUM"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_PARTITION_STYLE_INVALID: + reason = "STATUS_VOLMGR_PARTITION_STYLE_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_PARTITION_UPDATE_FAILED: + reason = "STATUS_VOLMGR_PARTITION_UPDATE_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_PLEX_IN_SYNC: + reason = "STATUS_VOLMGR_PLEX_IN_SYNC"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_PLEX_INDEX_DUPLICATE: + reason = "STATUS_VOLMGR_PLEX_INDEX_DUPLICATE"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_PLEX_INDEX_INVALID: + reason = "STATUS_VOLMGR_PLEX_INDEX_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_PLEX_LAST_ACTIVE: + reason = "STATUS_VOLMGR_PLEX_LAST_ACTIVE"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_PLEX_MISSING: + reason = "STATUS_VOLMGR_PLEX_MISSING"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_PLEX_REGENERATING: + reason = "STATUS_VOLMGR_PLEX_REGENERATING"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_PLEX_TYPE_INVALID: + reason = "STATUS_VOLMGR_PLEX_TYPE_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_PLEX_NOT_RAID5: + reason = "STATUS_VOLMGR_PLEX_NOT_RAID5"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_PLEX_NOT_SIMPLE: + reason = "STATUS_VOLMGR_PLEX_NOT_SIMPLE"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_STRUCTURE_SIZE_INVALID: + reason = "STATUS_VOLMGR_STRUCTURE_SIZE_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_TOO_MANY_NOTIFICATION_REQUESTS: + reason = "STATUS_VOLMGR_TOO_MANY_NOTIFICATION_REQUESTS"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_TRANSACTION_IN_PROGRESS: + reason = "STATUS_VOLMGR_TRANSACTION_IN_PROGRESS"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_UNEXPECTED_DISK_LAYOUT_CHANGE: + reason = "STATUS_VOLMGR_UNEXPECTED_DISK_LAYOUT_CHANGE"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_VOLUME_CONTAINS_MISSING_DISK: + reason = "STATUS_VOLMGR_VOLUME_CONTAINS_MISSING_DISK"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_VOLUME_ID_INVALID: + reason = "STATUS_VOLMGR_VOLUME_ID_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_VOLUME_LENGTH_INVALID: + reason = "STATUS_VOLMGR_VOLUME_LENGTH_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_VOLUME_LENGTH_NOT_SECTOR_SIZE_MULTIPLE: + reason = "STATUS_VOLMGR_VOLUME_LENGTH_NOT_SECTOR_SIZE_MULTIPLE"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_VOLUME_NOT_MIRRORED: + reason = "STATUS_VOLMGR_VOLUME_NOT_MIRRORED"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_VOLUME_NOT_RETAINED: + reason = "STATUS_VOLMGR_VOLUME_NOT_RETAINED"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_VOLUME_OFFLINE: + reason = "STATUS_VOLMGR_VOLUME_OFFLINE"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_VOLUME_RETAINED: + reason = "STATUS_VOLMGR_VOLUME_RETAINED"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_NUMBER_OF_EXTENTS_INVALID: + reason = "STATUS_VOLMGR_NUMBER_OF_EXTENTS_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_DIFFERENT_SECTOR_SIZE: + reason = "STATUS_VOLMGR_DIFFERENT_SECTOR_SIZE"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_BAD_BOOT_DISK: + reason = "STATUS_VOLMGR_BAD_BOOT_DISK"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_PACK_CONFIG_OFFLINE: + reason = "STATUS_VOLMGR_PACK_CONFIG_OFFLINE"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_PACK_CONFIG_ONLINE: + reason = "STATUS_VOLMGR_PACK_CONFIG_ONLINE"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_NOT_PRIMARY_PACK: + reason = "STATUS_VOLMGR_NOT_PRIMARY_PACK"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_PACK_LOG_UPDATE_FAILED: + reason = "STATUS_VOLMGR_PACK_LOG_UPDATE_FAILED"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_NUMBER_OF_DISKS_IN_PLEX_INVALID: + reason = "STATUS_VOLMGR_NUMBER_OF_DISKS_IN_PLEX_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_NUMBER_OF_DISKS_IN_MEMBER_INVALID: + reason = "STATUS_VOLMGR_NUMBER_OF_DISKS_IN_MEMBER_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_VOLUME_MIRRORED: + reason = "STATUS_VOLMGR_VOLUME_MIRRORED"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_PLEX_NOT_SIMPLE_SPANNED: + reason = "STATUS_VOLMGR_PLEX_NOT_SIMPLE_SPANNED"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_NO_VALID_LOG_COPIES: + reason = "STATUS_VOLMGR_NO_VALID_LOG_COPIES"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_PRIMARY_PACK_PRESENT: + reason = "STATUS_VOLMGR_PRIMARY_PACK_PRESENT"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_NUMBER_OF_DISKS_INVALID: + reason = "STATUS_VOLMGR_NUMBER_OF_DISKS_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_MIRROR_NOT_SUPPORTED: + reason = "STATUS_VOLMGR_MIRROR_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLMGR_RAID5_NOT_SUPPORTED: + reason = "STATUS_VOLMGR_RAID5_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_BCD_TOO_MANY_ELEMENTS: + reason = "STATUS_BCD_TOO_MANY_ELEMENTS"; + break; + case MD_NTSTATUS_WIN_STATUS_VHD_DRIVE_FOOTER_MISSING: + reason = "STATUS_VHD_DRIVE_FOOTER_MISSING"; + break; + case MD_NTSTATUS_WIN_STATUS_VHD_DRIVE_FOOTER_CHECKSUM_MISMATCH: + reason = "STATUS_VHD_DRIVE_FOOTER_CHECKSUM_MISMATCH"; + break; + case MD_NTSTATUS_WIN_STATUS_VHD_DRIVE_FOOTER_CORRUPT: + reason = "STATUS_VHD_DRIVE_FOOTER_CORRUPT"; + break; + case MD_NTSTATUS_WIN_STATUS_VHD_FORMAT_UNKNOWN: + reason = "STATUS_VHD_FORMAT_UNKNOWN"; + break; + case MD_NTSTATUS_WIN_STATUS_VHD_FORMAT_UNSUPPORTED_VERSION: + reason = "STATUS_VHD_FORMAT_UNSUPPORTED_VERSION"; + break; + case MD_NTSTATUS_WIN_STATUS_VHD_SPARSE_HEADER_CHECKSUM_MISMATCH: + reason = "STATUS_VHD_SPARSE_HEADER_CHECKSUM_MISMATCH"; + break; + case MD_NTSTATUS_WIN_STATUS_VHD_SPARSE_HEADER_UNSUPPORTED_VERSION: + reason = "STATUS_VHD_SPARSE_HEADER_UNSUPPORTED_VERSION"; + break; + case MD_NTSTATUS_WIN_STATUS_VHD_SPARSE_HEADER_CORRUPT: + reason = "STATUS_VHD_SPARSE_HEADER_CORRUPT"; + break; + case MD_NTSTATUS_WIN_STATUS_VHD_BLOCK_ALLOCATION_FAILURE: + reason = "STATUS_VHD_BLOCK_ALLOCATION_FAILURE"; + break; + case MD_NTSTATUS_WIN_STATUS_VHD_BLOCK_ALLOCATION_TABLE_CORRUPT: + reason = "STATUS_VHD_BLOCK_ALLOCATION_TABLE_CORRUPT"; + break; + case MD_NTSTATUS_WIN_STATUS_VHD_INVALID_BLOCK_SIZE: + reason = "STATUS_VHD_INVALID_BLOCK_SIZE"; + break; + case MD_NTSTATUS_WIN_STATUS_VHD_BITMAP_MISMATCH: + reason = "STATUS_VHD_BITMAP_MISMATCH"; + break; + case MD_NTSTATUS_WIN_STATUS_VHD_PARENT_VHD_NOT_FOUND: + reason = "STATUS_VHD_PARENT_VHD_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_VHD_CHILD_PARENT_ID_MISMATCH: + reason = "STATUS_VHD_CHILD_PARENT_ID_MISMATCH"; + break; + case MD_NTSTATUS_WIN_STATUS_VHD_CHILD_PARENT_TIMESTAMP_MISMATCH: + reason = "STATUS_VHD_CHILD_PARENT_TIMESTAMP_MISMATCH"; + break; + case MD_NTSTATUS_WIN_STATUS_VHD_METADATA_READ_FAILURE: + reason = "STATUS_VHD_METADATA_READ_FAILURE"; + break; + case MD_NTSTATUS_WIN_STATUS_VHD_METADATA_WRITE_FAILURE: + reason = "STATUS_VHD_METADATA_WRITE_FAILURE"; + break; + case MD_NTSTATUS_WIN_STATUS_VHD_INVALID_SIZE: + reason = "STATUS_VHD_INVALID_SIZE"; + break; + case MD_NTSTATUS_WIN_STATUS_VHD_INVALID_FILE_SIZE: + reason = "STATUS_VHD_INVALID_FILE_SIZE"; + break; + case MD_NTSTATUS_WIN_STATUS_VIRTDISK_PROVIDER_NOT_FOUND: + reason = "STATUS_VIRTDISK_PROVIDER_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_VIRTDISK_NOT_VIRTUAL_DISK: + reason = "STATUS_VIRTDISK_NOT_VIRTUAL_DISK"; + break; + case MD_NTSTATUS_WIN_STATUS_VHD_PARENT_VHD_ACCESS_DENIED: + reason = "STATUS_VHD_PARENT_VHD_ACCESS_DENIED"; + break; + case MD_NTSTATUS_WIN_STATUS_VHD_CHILD_PARENT_SIZE_MISMATCH: + reason = "STATUS_VHD_CHILD_PARENT_SIZE_MISMATCH"; + break; + case MD_NTSTATUS_WIN_STATUS_VHD_DIFFERENCING_CHAIN_CYCLE_DETECTED: + reason = "STATUS_VHD_DIFFERENCING_CHAIN_CYCLE_DETECTED"; + break; + case MD_NTSTATUS_WIN_STATUS_VHD_DIFFERENCING_CHAIN_ERROR_IN_PARENT: + reason = "STATUS_VHD_DIFFERENCING_CHAIN_ERROR_IN_PARENT"; + break; + case MD_NTSTATUS_WIN_STATUS_VIRTUAL_DISK_LIMITATION: + reason = "STATUS_VIRTUAL_DISK_LIMITATION"; + break; + case MD_NTSTATUS_WIN_STATUS_VHD_INVALID_TYPE: + reason = "STATUS_VHD_INVALID_TYPE"; + break; + case MD_NTSTATUS_WIN_STATUS_VHD_INVALID_STATE: + reason = "STATUS_VHD_INVALID_STATE"; + break; + case MD_NTSTATUS_WIN_STATUS_VIRTDISK_UNSUPPORTED_DISK_SECTOR_SIZE: + reason = "STATUS_VIRTDISK_UNSUPPORTED_DISK_SECTOR_SIZE"; + break; + case MD_NTSTATUS_WIN_STATUS_VIRTDISK_DISK_ALREADY_OWNED: + reason = "STATUS_VIRTDISK_DISK_ALREADY_OWNED"; + break; + case MD_NTSTATUS_WIN_STATUS_VIRTDISK_DISK_ONLINE_AND_WRITABLE: + reason = "STATUS_VIRTDISK_DISK_ONLINE_AND_WRITABLE"; + break; + case MD_NTSTATUS_WIN_STATUS_CTLOG_TRACKING_NOT_INITIALIZED: + reason = "STATUS_CTLOG_TRACKING_NOT_INITIALIZED"; + break; + case MD_NTSTATUS_WIN_STATUS_CTLOG_LOGFILE_SIZE_EXCEEDED_MAXSIZE: + reason = "STATUS_CTLOG_LOGFILE_SIZE_EXCEEDED_MAXSIZE"; + break; + case MD_NTSTATUS_WIN_STATUS_CTLOG_VHD_CHANGED_OFFLINE: + reason = "STATUS_CTLOG_VHD_CHANGED_OFFLINE"; + break; + case MD_NTSTATUS_WIN_STATUS_CTLOG_INVALID_TRACKING_STATE: + reason = "STATUS_CTLOG_INVALID_TRACKING_STATE"; + break; + case MD_NTSTATUS_WIN_STATUS_CTLOG_INCONSISTENT_TRACKING_FILE: + reason = "STATUS_CTLOG_INCONSISTENT_TRACKING_FILE"; + break; + case MD_NTSTATUS_WIN_STATUS_VHD_METADATA_FULL: + reason = "STATUS_VHD_METADATA_FULL"; + break; + case MD_NTSTATUS_WIN_STATUS_RKF_KEY_NOT_FOUND: + reason = "STATUS_RKF_KEY_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_RKF_DUPLICATE_KEY: + reason = "STATUS_RKF_DUPLICATE_KEY"; + break; + case MD_NTSTATUS_WIN_STATUS_RKF_BLOB_FULL: + reason = "STATUS_RKF_BLOB_FULL"; + break; + case MD_NTSTATUS_WIN_STATUS_RKF_STORE_FULL: + reason = "STATUS_RKF_STORE_FULL"; + break; + case MD_NTSTATUS_WIN_STATUS_RKF_FILE_BLOCKED: + reason = "STATUS_RKF_FILE_BLOCKED"; + break; + case MD_NTSTATUS_WIN_STATUS_RKF_ACTIVE_KEY: + reason = "STATUS_RKF_ACTIVE_KEY"; + break; + case MD_NTSTATUS_WIN_STATUS_RDBSS_RESTART_OPERATION: + reason = "STATUS_RDBSS_RESTART_OPERATION"; + break; + case MD_NTSTATUS_WIN_STATUS_RDBSS_CONTINUE_OPERATION: + reason = "STATUS_RDBSS_CONTINUE_OPERATION"; + break; + case MD_NTSTATUS_WIN_STATUS_RDBSS_POST_OPERATION: + reason = "STATUS_RDBSS_POST_OPERATION"; + break; + case MD_NTSTATUS_WIN_STATUS_BTH_ATT_INVALID_HANDLE: + reason = "STATUS_BTH_ATT_INVALID_HANDLE"; + break; + case MD_NTSTATUS_WIN_STATUS_BTH_ATT_READ_NOT_PERMITTED: + reason = "STATUS_BTH_ATT_READ_NOT_PERMITTED"; + break; + case MD_NTSTATUS_WIN_STATUS_BTH_ATT_WRITE_NOT_PERMITTED: + reason = "STATUS_BTH_ATT_WRITE_NOT_PERMITTED"; + break; + case MD_NTSTATUS_WIN_STATUS_BTH_ATT_INVALID_PDU: + reason = "STATUS_BTH_ATT_INVALID_PDU"; + break; + case MD_NTSTATUS_WIN_STATUS_BTH_ATT_INSUFFICIENT_AUTHENTICATION: + reason = "STATUS_BTH_ATT_INSUFFICIENT_AUTHENTICATION"; + break; + case MD_NTSTATUS_WIN_STATUS_BTH_ATT_REQUEST_NOT_SUPPORTED: + reason = "STATUS_BTH_ATT_REQUEST_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_BTH_ATT_INVALID_OFFSET: + reason = "STATUS_BTH_ATT_INVALID_OFFSET"; + break; + case MD_NTSTATUS_WIN_STATUS_BTH_ATT_INSUFFICIENT_AUTHORIZATION: + reason = "STATUS_BTH_ATT_INSUFFICIENT_AUTHORIZATION"; + break; + case MD_NTSTATUS_WIN_STATUS_BTH_ATT_PREPARE_QUEUE_FULL: + reason = "STATUS_BTH_ATT_PREPARE_QUEUE_FULL"; + break; + case MD_NTSTATUS_WIN_STATUS_BTH_ATT_ATTRIBUTE_NOT_FOUND: + reason = "STATUS_BTH_ATT_ATTRIBUTE_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_BTH_ATT_ATTRIBUTE_NOT_LONG: + reason = "STATUS_BTH_ATT_ATTRIBUTE_NOT_LONG"; + break; + case MD_NTSTATUS_WIN_STATUS_BTH_ATT_INSUFFICIENT_ENCRYPTION_KEY_SIZE: + reason = "STATUS_BTH_ATT_INSUFFICIENT_ENCRYPTION_KEY_SIZE"; + break; + case MD_NTSTATUS_WIN_STATUS_BTH_ATT_INVALID_ATTRIBUTE_VALUE_LENGTH: + reason = "STATUS_BTH_ATT_INVALID_ATTRIBUTE_VALUE_LENGTH"; + break; + case MD_NTSTATUS_WIN_STATUS_BTH_ATT_UNLIKELY: + reason = "STATUS_BTH_ATT_UNLIKELY"; + break; + case MD_NTSTATUS_WIN_STATUS_BTH_ATT_INSUFFICIENT_ENCRYPTION: + reason = "STATUS_BTH_ATT_INSUFFICIENT_ENCRYPTION"; + break; + case MD_NTSTATUS_WIN_STATUS_BTH_ATT_UNSUPPORTED_GROUP_TYPE: + reason = "STATUS_BTH_ATT_UNSUPPORTED_GROUP_TYPE"; + break; + case MD_NTSTATUS_WIN_STATUS_BTH_ATT_INSUFFICIENT_RESOURCES: + reason = "STATUS_BTH_ATT_INSUFFICIENT_RESOURCES"; + break; + case MD_NTSTATUS_WIN_STATUS_BTH_ATT_UNKNOWN_ERROR: + reason = "STATUS_BTH_ATT_UNKNOWN_ERROR"; + break; + case MD_NTSTATUS_WIN_STATUS_SECUREBOOT_ROLLBACK_DETECTED: + reason = "STATUS_SECUREBOOT_ROLLBACK_DETECTED"; + break; + case MD_NTSTATUS_WIN_STATUS_SECUREBOOT_POLICY_VIOLATION: + reason = "STATUS_SECUREBOOT_POLICY_VIOLATION"; + break; + case MD_NTSTATUS_WIN_STATUS_SECUREBOOT_INVALID_POLICY: + reason = "STATUS_SECUREBOOT_INVALID_POLICY"; + break; + case MD_NTSTATUS_WIN_STATUS_SECUREBOOT_POLICY_PUBLISHER_NOT_FOUND: + reason = "STATUS_SECUREBOOT_POLICY_PUBLISHER_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_SECUREBOOT_POLICY_NOT_SIGNED: + reason = "STATUS_SECUREBOOT_POLICY_NOT_SIGNED"; + break; + case MD_NTSTATUS_WIN_STATUS_SECUREBOOT_FILE_REPLACED: + reason = "STATUS_SECUREBOOT_FILE_REPLACED"; + break; + case MD_NTSTATUS_WIN_STATUS_AUDIO_ENGINE_NODE_NOT_FOUND: + reason = "STATUS_AUDIO_ENGINE_NODE_NOT_FOUND"; + break; + case MD_NTSTATUS_WIN_STATUS_HDAUDIO_EMPTY_CONNECTION_LIST: + reason = "STATUS_HDAUDIO_EMPTY_CONNECTION_LIST"; + break; + case MD_NTSTATUS_WIN_STATUS_HDAUDIO_CONNECTION_LIST_NOT_SUPPORTED: + reason = "STATUS_HDAUDIO_CONNECTION_LIST_NOT_SUPPORTED"; + break; + case MD_NTSTATUS_WIN_STATUS_HDAUDIO_NO_LOGICAL_DEVICES_CREATED: + reason = "STATUS_HDAUDIO_NO_LOGICAL_DEVICES_CREATED"; + break; + case MD_NTSTATUS_WIN_STATUS_HDAUDIO_NULL_LINKED_LIST_ENTRY: + reason = "STATUS_HDAUDIO_NULL_LINKED_LIST_ENTRY"; + break; + case MD_NTSTATUS_WIN_STATUS_VOLSNAP_BOOTFILE_NOT_VALID: + reason = "STATUS_VOLSNAP_BOOTFILE_NOT_VALID"; + break; + case MD_NTSTATUS_WIN_STATUS_IO_PREEMPTED: + reason = "STATUS_IO_PREEMPTED"; + break; + case MD_NTSTATUS_WIN_STATUS_SVHDX_ERROR_STORED: + reason = "STATUS_SVHDX_ERROR_STORED"; + break; + case MD_NTSTATUS_WIN_STATUS_SVHDX_ERROR_NOT_AVAILABLE: + reason = "STATUS_SVHDX_ERROR_NOT_AVAILABLE"; + break; + case MD_NTSTATUS_WIN_STATUS_SVHDX_UNIT_ATTENTION_AVAILABLE: + reason = "STATUS_SVHDX_UNIT_ATTENTION_AVAILABLE"; + break; + case MD_NTSTATUS_WIN_STATUS_SVHDX_UNIT_ATTENTION_CAPACITY_DATA_CHANGED: + reason = "STATUS_SVHDX_UNIT_ATTENTION_CAPACITY_DATA_CHANGED"; + break; + case MD_NTSTATUS_WIN_STATUS_SVHDX_UNIT_ATTENTION_RESERVATIONS_PREEMPTED: + reason = "STATUS_SVHDX_UNIT_ATTENTION_RESERVATIONS_PREEMPTED"; + break; + case MD_NTSTATUS_WIN_STATUS_SVHDX_UNIT_ATTENTION_RESERVATIONS_RELEASED: + reason = "STATUS_SVHDX_UNIT_ATTENTION_RESERVATIONS_RELEASED"; + break; + case MD_NTSTATUS_WIN_STATUS_SVHDX_UNIT_ATTENTION_REGISTRATIONS_PREEMPTED: + reason = "STATUS_SVHDX_UNIT_ATTENTION_REGISTRATIONS_PREEMPTED"; + break; + case MD_NTSTATUS_WIN_STATUS_SVHDX_UNIT_ATTENTION_OPERATING_DEFINITION_CHANGED: + reason = "STATUS_SVHDX_UNIT_ATTENTION_OPERATING_DEFINITION_CHANGED"; + break; + case MD_NTSTATUS_WIN_STATUS_SVHDX_RESERVATION_CONFLICT: + reason = "STATUS_SVHDX_RESERVATION_CONFLICT"; + break; + case MD_NTSTATUS_WIN_STATUS_SVHDX_WRONG_FILE_TYPE: + reason = "STATUS_SVHDX_WRONG_FILE_TYPE"; + break; + case MD_NTSTATUS_WIN_STATUS_SVHDX_VERSION_MISMATCH: + reason = "STATUS_SVHDX_VERSION_MISMATCH"; + break; + case MD_NTSTATUS_WIN_STATUS_VHD_SHARED: + reason = "STATUS_VHD_SHARED"; + break; + case MD_NTSTATUS_WIN_STATUS_SPACES_RESILIENCY_TYPE_INVALID: + reason = "STATUS_SPACES_RESILIENCY_TYPE_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_SPACES_DRIVE_SECTOR_SIZE_INVALID: + reason = "STATUS_SPACES_DRIVE_SECTOR_SIZE_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_SPACES_INTERLEAVE_LENGTH_INVALID: + reason = "STATUS_SPACES_INTERLEAVE_LENGTH_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_SPACES_NUMBER_OF_COLUMNS_INVALID: + reason = "STATUS_SPACES_NUMBER_OF_COLUMNS_INVALID"; + break; + case MD_NTSTATUS_WIN_STATUS_SPACES_NOT_ENOUGH_DRIVES: + reason = "STATUS_SPACES_NOT_ENOUGH_DRIVES"; + break; + default: { + char reason_string[11]; + std::snprintf(reason_string, sizeof(reason_string), "0x%08x", ntstatus); + reason = reason_string; + break; + } + } + return reason; +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/symbolic_constants_win.h b/TMessagesProj/jni/third_party/breakpad/src/processor/symbolic_constants_win.h new file mode 100644 index 0000000000..c05c91698c --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/symbolic_constants_win.h @@ -0,0 +1,50 @@ +// Copyright (c) 2015 Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// ntstatus_reason_win.h: Windows NTSTATUS code to string. +// +// Provides a means to convert NTSTATUS codes to strings. +// +// Author: Ben Wagner + +#ifndef GOOGLE_BREAKPAD_PROCESSOR_SYMBOLIC_CONSTANTS_WIN_H_ +#define GOOGLE_BREAKPAD_PROCESSOR_SYMBOLIC_CONSTANTS_WIN_H_ + +#include + +#include "google_breakpad/common/breakpad_types.h" + +namespace google_breakpad { + +/* Converts a NTSTATUS code to a reason string. */ +std::string NTStatusToString(uint32_t ntstatus); + +} // namespace google_breakpad + +#endif // GOOGLE_BREAKPAD_PROCESSOR_SYMBOLIC_CONSTANTS_WIN_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/synth_minidump.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/synth_minidump.cc new file mode 100644 index 0000000000..2cfbb08886 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/synth_minidump.cc @@ -0,0 +1,391 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// synth_minidump.cc: Implementation of SynthMinidump. See synth_minidump.h + +#include "processor/synth_minidump.h" + +namespace google_breakpad { + +namespace SynthMinidump { + +Section::Section(const Dump &dump) + : test_assembler::Section(dump.endianness()) { } + +void Section::CiteLocationIn(test_assembler::Section *section) const { + if (this) + (*section).D32(size_).D32(file_offset_); + else + (*section).D32(0).D32(0); +} + +void Stream::CiteStreamIn(test_assembler::Section *section) const { + section->D32(type_); + CiteLocationIn(section); +} + +SystemInfo::SystemInfo(const Dump &dump, + const MDRawSystemInfo &system_info, + const String &csd_version) + : Stream(dump, MD_SYSTEM_INFO_STREAM) { + D16(system_info.processor_architecture); + D16(system_info.processor_level); + D16(system_info.processor_revision); + D8(system_info.number_of_processors); + D8(system_info.product_type); + D32(system_info.major_version); + D32(system_info.minor_version); + D32(system_info.build_number); + D32(system_info.platform_id); + csd_version.CiteStringIn(this); + D16(system_info.suite_mask); + D16(system_info.reserved2); // Well, why not? + + // MDCPUInformation cpu; + if (system_info.processor_architecture == MD_CPU_ARCHITECTURE_X86) { + D32(system_info.cpu.x86_cpu_info.vendor_id[0]); + D32(system_info.cpu.x86_cpu_info.vendor_id[1]); + D32(system_info.cpu.x86_cpu_info.vendor_id[2]); + D32(system_info.cpu.x86_cpu_info.version_information); + D32(system_info.cpu.x86_cpu_info.feature_information); + D32(system_info.cpu.x86_cpu_info.amd_extended_cpu_features); + } else if (system_info.processor_architecture == MD_CPU_ARCHITECTURE_ARM) { + D32(system_info.cpu.arm_cpu_info.cpuid); + D32(system_info.cpu.arm_cpu_info.elf_hwcaps); + } else { + D64(system_info.cpu.other_cpu_info.processor_features[0]); + D64(system_info.cpu.other_cpu_info.processor_features[1]); + } +} + +const MDRawSystemInfo SystemInfo::windows_x86 = { + MD_CPU_ARCHITECTURE_X86, // processor_architecture + 6, // processor_level + 0xd08, // processor_revision + 1, // number_of_processors + 1, // product_type + 5, // major_version + 1, // minor_version + 2600, // build_number + 2, // platform_id + 0xdeadbeef, // csd_version_rva + 0x100, // suite_mask + 0, // reserved2 + { // cpu + { // x86_cpu_info + { 0x756e6547, 0x49656e69, 0x6c65746e }, // vendor_id + 0x6d8, // version_information + 0xafe9fbff, // feature_information + 0xffffffff // amd_extended_cpu_features + } + } +}; + +const string SystemInfo::windows_x86_csd_version = "Service Pack 2"; + +String::String(const Dump &dump, const string &contents) : Section(dump) { + D32(contents.size() * 2); + for (string::const_iterator i = contents.begin(); i != contents.end(); i++) + D16(*i); +} + +void String::CiteStringIn(test_assembler::Section *section) const { + section->D32(file_offset_); +} + +void Memory::CiteMemoryIn(test_assembler::Section *section) const { + section->D64(address_); + CiteLocationIn(section); +} + +Context::Context(const Dump &dump, const MDRawContextX86 &context) + : Section(dump) { + // The caller should have properly set the CPU type flag. + // The high 24 bits identify the CPU. Note that context records with no CPU + // type information can be valid (e.g. produced by ::RtlCaptureContext). + assert(((context.context_flags & MD_CONTEXT_CPU_MASK) == 0) || + (context.context_flags & MD_CONTEXT_X86)); + // It doesn't make sense to store x86 registers in big-endian form. + assert(dump.endianness() == kLittleEndian); + D32(context.context_flags); + D32(context.dr0); + D32(context.dr1); + D32(context.dr2); + D32(context.dr3); + D32(context.dr6); + D32(context.dr7); + D32(context.float_save.control_word); + D32(context.float_save.status_word); + D32(context.float_save.tag_word); + D32(context.float_save.error_offset); + D32(context.float_save.error_selector); + D32(context.float_save.data_offset); + D32(context.float_save.data_selector); + // context.float_save.register_area[] contains 8-bit quantities and + // does not need to be swapped. + Append(context.float_save.register_area, + sizeof(context.float_save.register_area)); + D32(context.float_save.cr0_npx_state); + D32(context.gs); + D32(context.fs); + D32(context.es); + D32(context.ds); + D32(context.edi); + D32(context.esi); + D32(context.ebx); + D32(context.edx); + D32(context.ecx); + D32(context.eax); + D32(context.ebp); + D32(context.eip); + D32(context.cs); + D32(context.eflags); + D32(context.esp); + D32(context.ss); + // context.extended_registers[] contains 8-bit quantities and does + // not need to be swapped. + Append(context.extended_registers, sizeof(context.extended_registers)); + assert(Size() == sizeof(MDRawContextX86)); +} + +Context::Context(const Dump &dump, const MDRawContextARM &context) + : Section(dump) { + // The caller should have properly set the CPU type flag. + assert((context.context_flags & MD_CONTEXT_ARM) || + (context.context_flags & MD_CONTEXT_ARM_OLD)); + // It doesn't make sense to store ARM registers in big-endian form. + assert(dump.endianness() == kLittleEndian); + D32(context.context_flags); + for (int i = 0; i < MD_CONTEXT_ARM_GPR_COUNT; ++i) + D32(context.iregs[i]); + D32(context.cpsr); + D64(context.float_save.fpscr); + for (int i = 0; i < MD_FLOATINGSAVEAREA_ARM_FPR_COUNT; ++i) + D64(context.float_save.regs[i]); + for (int i = 0; i < MD_FLOATINGSAVEAREA_ARM_FPEXTRA_COUNT; ++i) + D32(context.float_save.extra[i]); + assert(Size() == sizeof(MDRawContextARM)); +} + +Context::Context(const Dump &dump, const MDRawContextMIPS &context) + : Section(dump) { + // The caller should have properly set the CPU type flag. + assert(context.context_flags & MD_CONTEXT_MIPS); + D32(context.context_flags); + D32(context._pad0); + + for (int i = 0; i < MD_CONTEXT_MIPS_GPR_COUNT; ++i) + D64(context.iregs[i]); + + D64(context.mdhi); + D64(context.mdlo); + + for (int i = 0; i < MD_CONTEXT_MIPS_DSP_COUNT; ++i) + D32(context.hi[i]); + + for (int i = 0; i < MD_CONTEXT_MIPS_DSP_COUNT; ++i) + D32(context.lo[i]); + + D32(context.dsp_control); + D32(context._pad1); + + D64(context.epc); + D64(context.badvaddr); + D32(context.status); + D32(context.cause); + + for (int i = 0; i < MD_FLOATINGSAVEAREA_MIPS_FPR_COUNT; ++i) + D64(context.float_save.regs[i]); + + D32(context.float_save.fpcsr); + D32(context.float_save.fir); + + assert(Size() == sizeof(MDRawContextMIPS)); +} + +Thread::Thread(const Dump &dump, + uint32_t thread_id, const Memory &stack, const Context &context, + uint32_t suspend_count, uint32_t priority_class, + uint32_t priority, uint64_t teb) : Section(dump) { + D32(thread_id); + D32(suspend_count); + D32(priority_class); + D32(priority); + D64(teb); + stack.CiteMemoryIn(this); + context.CiteLocationIn(this); + assert(Size() == sizeof(MDRawThread)); +} + +Module::Module(const Dump &dump, + uint64_t base_of_image, + uint32_t size_of_image, + const String &name, + uint32_t time_date_stamp, + uint32_t checksum, + const MDVSFixedFileInfo &version_info, + const Section *cv_record, + const Section *misc_record) : Section(dump) { + D64(base_of_image); + D32(size_of_image); + D32(checksum); + D32(time_date_stamp); + name.CiteStringIn(this); + D32(version_info.signature); + D32(version_info.struct_version); + D32(version_info.file_version_hi); + D32(version_info.file_version_lo); + D32(version_info.product_version_hi); + D32(version_info.product_version_lo); + D32(version_info.file_flags_mask); + D32(version_info.file_flags); + D32(version_info.file_os); + D32(version_info.file_type); + D32(version_info.file_subtype); + D32(version_info.file_date_hi); + D32(version_info.file_date_lo); + cv_record->CiteLocationIn(this); + misc_record->CiteLocationIn(this); + D64(0).D64(0); +} + +const MDVSFixedFileInfo Module::stock_version_info = { + MD_VSFIXEDFILEINFO_SIGNATURE, // signature + MD_VSFIXEDFILEINFO_VERSION, // struct_version + 0x11111111, // file_version_hi + 0x22222222, // file_version_lo + 0x33333333, // product_version_hi + 0x44444444, // product_version_lo + MD_VSFIXEDFILEINFO_FILE_FLAGS_DEBUG, // file_flags_mask + MD_VSFIXEDFILEINFO_FILE_FLAGS_DEBUG, // file_flags + MD_VSFIXEDFILEINFO_FILE_OS_NT | MD_VSFIXEDFILEINFO_FILE_OS__WINDOWS32, + // file_os + MD_VSFIXEDFILEINFO_FILE_TYPE_APP, // file_type + MD_VSFIXEDFILEINFO_FILE_SUBTYPE_UNKNOWN, // file_subtype + 0, // file_date_hi + 0 // file_date_lo +}; + +Exception::Exception(const Dump &dump, + const Context &context, + uint32_t thread_id, + uint32_t exception_code, + uint32_t exception_flags, + uint64_t exception_address) + : Stream(dump, MD_EXCEPTION_STREAM) { + D32(thread_id); + D32(0); // __align + D32(exception_code); + D32(exception_flags); + D64(0); // exception_record + D64(exception_address); + D32(0); // number_parameters + D32(0); // __align + for (int i = 0; i < MD_EXCEPTION_MAXIMUM_PARAMETERS; ++i) + D64(0); // exception_information + context.CiteLocationIn(this); + assert(Size() == sizeof(MDRawExceptionStream)); +} + +Dump::Dump(uint64_t flags, + Endianness endianness, + uint32_t version, + uint32_t date_time_stamp) + : test_assembler::Section(endianness), + file_start_(0), + stream_directory_(*this), + stream_count_(0), + thread_list_(*this, MD_THREAD_LIST_STREAM), + module_list_(*this, MD_MODULE_LIST_STREAM), + memory_list_(*this, MD_MEMORY_LIST_STREAM) + { + D32(MD_HEADER_SIGNATURE); + D32(version); + D32(stream_count_label_); + D32(stream_directory_rva_); + D32(0); + D32(date_time_stamp); + D64(flags); + assert(Size() == sizeof(MDRawHeader)); +} + +Dump &Dump::Add(SynthMinidump::Section *section) { + section->Finish(file_start_ + Size()); + Append(*section); + return *this; +} + +Dump &Dump::Add(Stream *stream) { + Add(static_cast(stream)); + stream->CiteStreamIn(&stream_directory_); + stream_count_++; + return *this; +} + +Dump &Dump::Add(Memory *memory) { + // Add the memory contents themselves to the file. + Add(static_cast(memory)); + + // The memory list is a list of MDMemoryDescriptors, not of actual + // memory elements. Produce a descriptor, and add that to the list. + SynthMinidump::Section descriptor(*this); + memory->CiteMemoryIn(&descriptor); + memory_list_.Add(&descriptor); + return *this; +} + +Dump &Dump::Add(Thread *thread) { + thread_list_.Add(thread); + return *this; +} + +Dump &Dump::Add(Module *module) { + module_list_.Add(module); + return *this; +} + +void Dump::Finish() { + if (!thread_list_.Empty()) Add(&thread_list_); + if (!module_list_.Empty()) Add(&module_list_); + if (!memory_list_.Empty()) Add(&memory_list_); + + // Create the stream directory. We don't use + // stream_directory_.Finish here, because the stream directory isn't + // cited using a location descriptor; rather, the Minidump header + // has the stream count and MDRVA. + stream_count_label_ = stream_count_; + stream_directory_rva_ = file_start_ + Size(); + Append(static_cast(stream_directory_)); +} + +} // namespace SynthMinidump + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/synth_minidump.h b/TMessagesProj/jni/third_party/breakpad/src/processor/synth_minidump.h new file mode 100644 index 0000000000..8dac8784e5 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/synth_minidump.h @@ -0,0 +1,372 @@ +// -*- mode: C++ -*- + +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// synth_minidump.h: Interface to SynthMinidump: fake minidump generator. +// +// We treat a minidump file as the concatenation of a bunch of +// test_assembler::Sections. The file header, stream directory, +// streams, memory regions, strings, and so on --- each is a Section +// that eventually gets appended to the minidump. Dump, Memory, +// Context, Thread, and so on all inherit from test_assembler::Section. +// For example: +// +// using google_breakpad::test_assembler::kLittleEndian; +// using google_breakpad::SynthMinidump::Context; +// using google_breakpad::SynthMinidump::Dump; +// using google_breakpad::SynthMinidump::Memory; +// using google_breakpad::SynthMinidump::Thread; +// +// Dump minidump(MD_NORMAL, kLittleEndian); +// +// Memory stack1(minidump, 0x569eb0a9); +// ... build contents of stack1 with test_assembler::Section functions ... +// +// MDRawContextX86 x86_context1; +// x86_context1.context_flags = MD_CONTEXT_X86; +// x86_context1.eip = 0x7c90eb94; +// x86_context1.esp = 0x569eb0a9; +// x86_context1.ebp = x86_context1.esp + something appropriate; +// Context context1(minidump, x86_context1); +// +// Thread thread1(minidump, 0xe4a4821d, stack1, context1); +// +// minidump.Add(&stack1); +// minidump.Add(&context1); +// minidump.Add(&thread1); +// minidump.Finish(); +// +// string contents; +// EXPECT_TRUE(minidump.GetContents(&contents)); +// // contents now holds the bytes of a minidump file +// +// Because the test_assembler classes let us write Label references to +// sections before the Labels' values are known, this gives us +// flexibility in how we put the dump together: minidump pieces can +// hold the file offsets of other minidump pieces before the +// referents' positions have been decided. As long as everything has +// been placed by the time we call dump.GetContents to obtain the +// bytes, all the Labels' values will be known, and everything will +// get patched up appropriately. +// +// The dump.Add(thing) functions append THINGS's contents to the +// minidump, but they also do two other things: +// +// - dump.Add(thing) invokes thing->Finish, which tells *thing the +// offset within the file at which it was placed, and allows *thing +// to do any final content generation. +// +// - If THING is something which should receive an entry in some sort +// of list or directory, then dump.Add(THING) automatically creates +// the appropriate directory or list entry. Streams must appear in +// the stream directory; memory ranges should be listed in the +// memory list; threads should be placed in the thread list; and so +// on. +// +// By convention, Section subclass constructors that take references +// to other Sections do not take care of 'Add'ing their arguments to +// the dump. For example, although the Thread constructor takes +// references to a Memory and a Context, it does not add them to the +// dump on the caller's behalf. Rather, the caller is responsible for +// 'Add'ing every section they create. This allows Sections to be +// cited from more than one place; for example, Memory ranges are +// cited both from Thread objects (as their stack contents) and by the +// memory list stream. +// +// If you forget to Add some Section, the Dump::GetContents call will +// fail, as the test_assembler::Labels used to cite the Section's +// contents from elsewhere will still be undefined. +#ifndef PROCESSOR_SYNTH_MINIDUMP_H_ +#define PROCESSOR_SYNTH_MINIDUMP_H_ + +#include + +#include +#include + +#include "common/test_assembler.h" +#include "common/using_std_string.h" +#include "google_breakpad/common/breakpad_types.h" +#include "google_breakpad/common/minidump_format.h" + +namespace google_breakpad { + +namespace SynthMinidump { + +using test_assembler::Endianness; +using test_assembler::kBigEndian; +using test_assembler::kLittleEndian; +using test_assembler::kUnsetEndian; +using test_assembler::Label; + +class Dump; +class Memory; +class String; + +// A test_assembler::Section which will be appended to a minidump. +class Section: public test_assembler::Section { + public: + explicit Section(const Dump &dump); + + // Append an MDLocationDescriptor referring to this section to SECTION. + // If 'this' is NULL, append a descriptor with a zero length and MDRVA. + // + // (I couldn't find the language in the C++ standard that says that + // invoking member functions of a NULL pointer to a class type is + // bad, if such language exists. Having this function handle NULL + // 'this' is convenient, but if it causes trouble, it's not hard to + // do differently.) + void CiteLocationIn(test_assembler::Section *section) const; + + // Note that this section's contents are complete, and that it has + // been placed in the minidump file at OFFSET. The 'Add' member + // functions call the Finish member function of the object being + // added for you; if you are 'Add'ing this section, you needn't Finish it. + virtual void Finish(const Label &offset) { + file_offset_ = offset; size_ = Size(); + } + + protected: + // This section's size and offset within the minidump file. + Label file_offset_, size_; +}; + +// A stream within a minidump file. 'Add'ing a stream to a minidump +// creates an entry for it in the minidump's stream directory. +class Stream: public Section { + public: + // Create a stream of type TYPE. You can append whatever contents + // you like to this stream using the test_assembler::Section methods. + Stream(const Dump &dump, uint32_t type) : Section(dump), type_(type) { } + + // Append an MDRawDirectory referring to this stream to SECTION. + void CiteStreamIn(test_assembler::Section *section) const; + + private: + // The type of this stream. + uint32_t type_; +}; + +class SystemInfo: public Stream { + public: + // Create an MD_SYSTEM_INFO_STREAM stream belonging to DUMP holding + // an MDRawSystem info structure initialized with the values from + // SYSTEM_INFO, except that the csd_version field is replaced with + // the file offset of the string CSD_VERSION, which can be 'Add'ed + // to the dump at the desired location. + // + // Remember that you are still responsible for 'Add'ing CSD_VERSION + // to the dump yourself. + SystemInfo(const Dump &dump, + const MDRawSystemInfo &system_info, + const String &csd_version); + + // Stock MDRawSystemInfo information and associated strings, for + // writing tests. + static const MDRawSystemInfo windows_x86; + static const string windows_x86_csd_version; +}; + +// An MDString: a string preceded by a 32-bit length. +class String: public Section { + public: + String(const Dump &dump, const string &value); + + // Append an MDRVA referring to this string to SECTION. + void CiteStringIn(test_assembler::Section *section) const; +}; + +// A range of memory contents. 'Add'ing a memory range to a minidump +// creates n entry for it in the minidump's memory list. By +// convention, the 'start', 'Here', and 'Mark' member functions refer +// to memory addresses. +class Memory: public Section { + public: + Memory(const Dump &dump, uint64_t address) + : Section(dump), address_(address) { start() = address; } + + // Append an MDMemoryDescriptor referring to this memory range to SECTION. + void CiteMemoryIn(test_assembler::Section *section) const; + + private: + // The process address from which these memory contents were taken. + // Shouldn't this be a Label? + uint64_t address_; +}; + +class Context: public Section { + public: + // Create a context belonging to DUMP whose contents are a copy of CONTEXT. + Context(const Dump &dump, const MDRawContextX86 &context); + Context(const Dump &dump, const MDRawContextARM &context); + Context(const Dump &dump, const MDRawContextMIPS &context); + // Add an empty context to the dump. + Context(const Dump &dump) : Section(dump) {} + // Add constructors for other architectures here. Remember to byteswap. +}; + +class Thread: public Section { + public: + // Create a thread belonging to DUMP with the given values, citing + // STACK and CONTEXT (which you must Add to the dump separately). + Thread(const Dump &dump, + uint32_t thread_id, + const Memory &stack, + const Context &context, + uint32_t suspend_count = 0, + uint32_t priority_class = 0, + uint32_t priority = 0, + uint64_t teb = 0); +}; + +class Module: public Section { + public: + // Create a module with the given values. Note that CV_RECORD and + // MISC_RECORD can be NULL, in which case the corresponding location + // descriptior in the minidump will have a length of zero. + Module(const Dump &dump, + uint64_t base_of_image, + uint32_t size_of_image, + const String &name, + uint32_t time_date_stamp = 1262805309, + uint32_t checksum = 0, + const MDVSFixedFileInfo &version_info = Module::stock_version_info, + const Section *cv_record = NULL, + const Section *misc_record = NULL); + + private: + // A standard MDVSFixedFileInfo structure to use as a default for + // minidumps. There's no reason to make users write out all this crap + // over and over. + static const MDVSFixedFileInfo stock_version_info; +}; + +class Exception : public Stream { +public: + Exception(const Dump &dump, + const Context &context, + uint32_t thread_id = 0, + uint32_t exception_code = 0, + uint32_t exception_flags = 0, + uint64_t exception_address = 0); +}; + +// A list of entries starting with a 32-bit count, like a memory list +// or a thread list. +template +class List: public Stream { + public: + List(const Dump &dump, uint32_t type) : Stream(dump, type), count_(0) { + D32(count_label_); + } + + // Add ELEMENT to this list. + void Add(Element *element) { + element->Finish(file_offset_ + Size()); + Append(*element); + count_++; + } + + // Return true if this List is empty, false otherwise. + bool Empty() { return count_ == 0; } + + // Finish up the contents of this section, mark it as having been + // placed at OFFSET. + virtual void Finish(const Label &offset) { + Stream::Finish(offset); + count_label_ = count_; + } + + private: + size_t count_; + Label count_label_; +}; + +class Dump: public test_assembler::Section { + public: + + // Create a test_assembler::Section containing a minidump file whose + // header uses the given values. ENDIANNESS determines the + // endianness of the signature; we set this section's default + // endianness by this. + Dump(uint64_t flags, + Endianness endianness = kLittleEndian, + uint32_t version = MD_HEADER_VERSION, + uint32_t date_time_stamp = 1262805309); + + // The following functions call OBJECT->Finish(), and append the + // contents of OBJECT to this minidump. They also record OBJECT in + // whatever directory or list is appropriate for its type. The + // stream directory, memory list, thread list, and module list are + // accumulated this way. + Dump &Add(SynthMinidump::Section *object); // simply append data + Dump &Add(Stream *object); // append, record in stream directory + Dump &Add(Memory *object); // append, record in memory list + Dump &Add(Thread *object); // append, record in thread list + Dump &Add(Module *object); // append, record in module list + + // Complete the construction of the minidump, given the Add calls + // we've seen up to this point. After this call, this Dump's + // contents are complete, all labels should be defined if everything + // Cited has been Added, and you may call GetContents on it. + void Finish(); + + private: + // A label representing the start of the minidump file. + Label file_start_; + + // The stream directory. We construct this incrementally from + // Add(Stream *) calls. + SynthMinidump::Section stream_directory_; // The directory's contents. + size_t stream_count_; // The number of streams so far. + Label stream_count_label_; // Cited in file header. + Label stream_directory_rva_; // The directory's file offset. + + // This minidump's thread list. We construct this incrementally from + // Add(Thread *) calls. + List thread_list_; + + // This minidump's module list. We construct this incrementally from + // Add(Module *) calls. + List module_list_; + + // This minidump's memory list. We construct this incrementally from + // Add(Memory *) calls. This is actually a list of MDMemoryDescriptors, + // not memory ranges --- thus the odd type. + List memory_list_; +}; + +} // namespace SynthMinidump + +} // namespace google_breakpad + +#endif // PROCESSOR_SYNTH_MINIDUMP_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/synth_minidump_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/synth_minidump_unittest.cc new file mode 100644 index 0000000000..8835b44933 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/synth_minidump_unittest.cc @@ -0,0 +1,336 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Original author: Jim Blandy + +// synth_minidump_unittest.cc: Unit tests for google_breakpad::SynthMinidump +// classes. + +#include +#include + +#include "breakpad_googletest_includes.h" +#include "common/using_std_string.h" +#include "google_breakpad/common/minidump_format.h" +#include "processor/synth_minidump.h" +#include "processor/synth_minidump_unittest_data.h" + +using google_breakpad::SynthMinidump::Context; +using google_breakpad::SynthMinidump::Dump; +using google_breakpad::SynthMinidump::Exception; +using google_breakpad::SynthMinidump::List; +using google_breakpad::SynthMinidump::Memory; +using google_breakpad::SynthMinidump::Module; +using google_breakpad::SynthMinidump::Section; +using google_breakpad::SynthMinidump::Stream; +using google_breakpad::SynthMinidump::String; +using google_breakpad::SynthMinidump::SystemInfo; +using google_breakpad::test_assembler::kBigEndian; +using google_breakpad::test_assembler::kLittleEndian; +using google_breakpad::test_assembler::Label; + +TEST(Section, Simple) { + Dump dump(0); + Section section(dump); + section.L32(0x12345678); + section.Finish(0); + string contents; + ASSERT_TRUE(section.GetContents(&contents)); + EXPECT_EQ(string("\x78\x56\x34\x12", 4), contents); +} + +TEST(Section, CiteLocationIn) { + Dump dump(0, kBigEndian); + Section section1(dump), section2(dump); + section1.Append("order"); + section2.Append("mayhem"); + section2.Finish(0x32287ec2); + section2.CiteLocationIn(§ion1); + string contents; + ASSERT_TRUE(section1.GetContents(&contents)); + string expected("order\0\0\0\x06\x32\x28\x7e\xc2", 13); + EXPECT_EQ(expected, contents); +} + +TEST(Stream, CiteStreamIn) { + Dump dump(0, kLittleEndian); + Stream stream(dump, 0x40cae2b3); + Section section(dump); + stream.Append("stream contents"); + section.Append("section contents"); + stream.Finish(0x41424344); + stream.CiteStreamIn(§ion); + string contents; + ASSERT_TRUE(section.GetContents(&contents)); + string expected("section contents" + "\xb3\xe2\xca\x40" + "\x0f\0\0\0" + "\x44\x43\x42\x41", + 16 + 4 + 4 + 4); + EXPECT_EQ(expected, contents); +} + +TEST(Memory, CiteMemoryIn) { + Dump dump(0, kBigEndian); + Memory memory(dump, 0x76d010874ab019f9ULL); + Section section(dump); + memory.Append("memory contents"); + section.Append("section contents"); + memory.Finish(0x51525354); + memory.CiteMemoryIn(§ion); + string contents; + ASSERT_TRUE(section.GetContents(&contents)); + string expected("section contents" + "\x76\xd0\x10\x87\x4a\xb0\x19\xf9" + "\0\0\0\x0f" + "\x51\x52\x53\x54", + 16 + 8 + 4 + 4); + EXPECT_EQ(contents, expected); +} + +TEST(Memory, Here) { + Dump dump(0, kBigEndian); + Memory memory(dump, 0x89979731eb060ed4ULL); + memory.Append(1729, 42); + Label l = memory.Here(); + ASSERT_EQ(0x89979731eb060ed4ULL + 1729, l.Value()); +} + +TEST(Context, X86) { + Dump dump(0, kLittleEndian); + assert(x86_raw_context.context_flags & MD_CONTEXT_X86); + Context context(dump, x86_raw_context); + string contents; + ASSERT_TRUE(context.GetContents(&contents)); + EXPECT_EQ(sizeof(x86_expected_contents), contents.size()); + EXPECT_TRUE(memcmp(contents.data(), x86_expected_contents, contents.size()) + == 0); +} + +TEST(Context, ARM) { + Dump dump(0, kLittleEndian); + assert(arm_raw_context.context_flags & MD_CONTEXT_ARM); + Context context(dump, arm_raw_context); + string contents; + ASSERT_TRUE(context.GetContents(&contents)); + EXPECT_EQ(sizeof(arm_expected_contents), contents.size()); + EXPECT_TRUE(memcmp(contents.data(), arm_expected_contents, contents.size()) + == 0); +} + +TEST(ContextDeathTest, X86BadFlags) { + Dump dump(0, kLittleEndian); + MDRawContextX86 raw; + raw.context_flags = MD_CONTEXT_AMD64; + ASSERT_DEATH(Context context(dump, raw);, + "context\\.context_flags & (0x[0-9a-f]+|MD_CONTEXT_X86)"); +} + +TEST(ContextDeathTest, X86BadEndianness) { + Dump dump(0, kBigEndian); + MDRawContextX86 raw; + raw.context_flags = MD_CONTEXT_X86; + ASSERT_DEATH(Context context(dump, raw);, + "dump\\.endianness\\(\\) == kLittleEndian"); +} + +TEST(Thread, Simple) { + Dump dump(0, kLittleEndian); + Context context(dump, x86_raw_context); + context.Finish(0x8665da0c); + Memory stack(dump, 0xaad55a93cc3c0efcULL); + stack.Append("stack contents"); + stack.Finish(0xe08cdbd1); + google_breakpad::SynthMinidump::Thread thread( + dump, 0x3d7ec360, stack, context, + 0x3593f44d, // suspend count + 0xab352b82, // priority class + 0x2753d838, // priority + 0xeb2de4be3f29e3e9ULL); // thread environment block + string contents; + ASSERT_TRUE(thread.GetContents(&contents)); + static const uint8_t expected_bytes[] = { + 0x60, 0xc3, 0x7e, 0x3d, // thread id + 0x4d, 0xf4, 0x93, 0x35, // suspend count + 0x82, 0x2b, 0x35, 0xab, // priority class + 0x38, 0xd8, 0x53, 0x27, // priority + 0xe9, 0xe3, 0x29, 0x3f, 0xbe, 0xe4, 0x2d, 0xeb, // thread environment block + 0xfc, 0x0e, 0x3c, 0xcc, 0x93, 0x5a, 0xd5, 0xaa, // stack address + 0x0e, 0x00, 0x00, 0x00, // stack size + 0xd1, 0xdb, 0x8c, 0xe0, // stack MDRVA + 0xcc, 0x02, 0x00, 0x00, // context size + 0x0c, 0xda, 0x65, 0x86 // context MDRVA + }; + EXPECT_EQ(sizeof(expected_bytes), contents.size()); + EXPECT_TRUE(memcmp(contents.data(), expected_bytes, contents.size()) == 0); +} + +TEST(Exception, Simple) { + Dump dump(0, kLittleEndian); + Context context(dump, x86_raw_context); + context.Finish(0x8665da0c); + + Exception exception(dump, context, + 0x1234abcd, // thread id + 0xdcba4321, // exception code + 0xf0e0d0c0, // exception flags + 0x0919a9b9c9d9e9f9ULL); // exception address + string contents; + ASSERT_TRUE(exception.GetContents(&contents)); + static const uint8_t expected_bytes[] = { + 0xcd, 0xab, 0x34, 0x12, // thread id + 0x00, 0x00, 0x00, 0x00, // __align + 0x21, 0x43, 0xba, 0xdc, // exception code + 0xc0, 0xd0, 0xe0, 0xf0, // exception flags + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // exception record + 0xf9, 0xe9, 0xd9, 0xc9, 0xb9, 0xa9, 0x19, 0x09, // exception address + 0x00, 0x00, 0x00, 0x00, // number parameters + 0x00, 0x00, 0x00, 0x00, // __align + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // exception_information + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // exception_information + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // exception_information + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // exception_information + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // exception_information + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // exception_information + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // exception_information + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // exception_information + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // exception_information + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // exception_information + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // exception_information + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // exception_information + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // exception_information + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // exception_information + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // exception_information + 0xcc, 0x02, 0x00, 0x00, // context size + 0x0c, 0xda, 0x65, 0x86 // context MDRVA + }; + EXPECT_EQ(sizeof(expected_bytes), contents.size()); + EXPECT_TRUE(memcmp(contents.data(), expected_bytes, contents.size()) == 0); +} + +TEST(String, Simple) { + Dump dump(0, kBigEndian); + String s(dump, "All mimsy were the borogoves"); + string contents; + ASSERT_TRUE(s.GetContents(&contents)); + static const char expected[] = + "\x00\x00\x00\x38\0A\0l\0l\0 \0m\0i\0m\0s\0y\0 \0w\0e\0r\0e" + "\0 \0t\0h\0e\0 \0b\0o\0r\0o\0g\0o\0v\0e\0s"; + string expected_string(expected, sizeof(expected) - 1); + EXPECT_EQ(expected_string, contents); +} + +TEST(String, CiteStringIn) { + Dump dump(0, kLittleEndian); + String s(dump, "and the mome wraths outgrabe"); + Section section(dump); + section.Append("initial"); + s.CiteStringIn(§ion); + s.Finish(0xdc2bb469); + string contents; + ASSERT_TRUE(section.GetContents(&contents)); + EXPECT_EQ(string("initial\x69\xb4\x2b\xdc", 7 + 4), contents); +} + +TEST(List, Empty) { + Dump dump(0, kBigEndian); + List
list(dump, 0x2442779c); + EXPECT_TRUE(list.Empty()); + list.Finish(0x84e09808); + string contents; + ASSERT_TRUE(list.GetContents(&contents)); + EXPECT_EQ(string("\0\0\0\0", 4), contents); +} + +TEST(List, Two) { + Dump dump(0, kBigEndian); + List
list(dump, 0x26c9f498); + Section section1(dump); + section1.Append("section one contents"); + EXPECT_TRUE(list.Empty()); + list.Add(§ion1); + EXPECT_FALSE(list.Empty()); + Section section2(dump); + section2.Append("section two contents"); + list.Add(§ion2); + list.Finish(0x1e5bb60e); + string contents; + ASSERT_TRUE(list.GetContents(&contents)); + EXPECT_EQ(string("\0\0\0\x02section one contentssection two contents", 44), + contents); +} + +TEST(Dump, Header) { + Dump dump(0x9f738b33685cc84cULL, kLittleEndian, 0xb3817faf, 0x2c741c0a); + dump.Finish(); + string contents; + ASSERT_TRUE(dump.GetContents(&contents)); + ASSERT_EQ(string("\x4d\x44\x4d\x50" // signature + "\xaf\x7f\x81\xb3" // version + "\0\0\0\0" // stream count + "\x20\0\0\0" // directory RVA (could be anything) + "\0\0\0\0" // checksum + "\x0a\x1c\x74\x2c" // time_date_stamp + "\x4c\xc8\x5c\x68\x33\x8b\x73\x9f", // flags + 32), + contents); +} + +TEST(Dump, HeaderBigEndian) { + Dump dump(0x206ce3cc6fb8e0f0ULL, kBigEndian, 0x161693e2, 0x35667744); + dump.Finish(); + string contents; + ASSERT_TRUE(dump.GetContents(&contents)); + ASSERT_EQ(string("\x50\x4d\x44\x4d" // signature + "\x16\x16\x93\xe2" // version + "\0\0\0\0" // stream count + "\0\0\0\x20" // directory RVA (could be anything) + "\0\0\0\0" // checksum + "\x35\x66\x77\x44" // time_date_stamp + "\x20\x6c\xe3\xcc\x6f\xb8\xe0\xf0", // flags + 32), + contents); +} + +TEST(Dump, OneSection) { + Dump dump(0, kLittleEndian); + Section section(dump); + section.Append("section contents"); + dump.Add(§ion); + dump.Finish(); + string dump_contents; + // Just check for undefined labels; don't worry about the contents. + ASSERT_TRUE(dump.GetContents(&dump_contents)); + + Section referencing_section(dump); + section.CiteLocationIn(&referencing_section); + string contents; + ASSERT_TRUE(referencing_section.GetContents(&contents)); + ASSERT_EQ(string("\x10\0\0\0\x20\0\0\0", 8), contents); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/synth_minidump_unittest_data.h b/TMessagesProj/jni/third_party/breakpad/src/processor/synth_minidump_unittest_data.h new file mode 100644 index 0000000000..3403372e6c --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/synth_minidump_unittest_data.h @@ -0,0 +1,418 @@ +// -*- mode: C++ -*- + +// Not copyrightable: random test data. +// synth_minidump_unittest_data.h: verbose test data for SynthMinidump tests. + +#ifndef PROCESSOR_SYNTH_MINIDUMP_UNITTEST_DATA_H_ +#define PROCESSOR_SYNTH_MINIDUMP_UNITTEST_DATA_H_ + +#include "google_breakpad/common/minidump_format.h" + +static const MDRawContextX86 x86_raw_context = { + 0xded5d71b, // context_flags + 0x9fdb432e, // dr0 + 0x26b7a81a, // dr1 + 0xcac7e348, // dr2 + 0xcf99ec09, // dr3 + 0x7dc8c2cd, // dr6 + 0x21deb880, // dr7 + + // float_save + { + 0x8a5d2bb0, // control_word + 0x0286c4c9, // status_word + 0xf1feea21, // tag_word + 0xb2d40576, // error_offset + 0x48146cde, // error_selector + 0x983f9b21, // data_offset + 0x475be12c, // data_selector + + // register_area + { + 0xd9, 0x04, 0x20, 0x6b, 0x88, 0x3a, 0x3f, 0xd5, + 0x59, 0x7a, 0xa9, 0xeb, 0xd0, 0x5c, 0xdf, 0xfe, + 0xad, 0xdd, 0x4a, 0x8b, 0x10, 0xcc, 0x9a, 0x33, + 0xcb, 0xb6, 0xf7, 0x86, 0xcd, 0x69, 0x25, 0xae, + 0x25, 0xe5, 0x7a, 0xa1, 0x8f, 0xb2, 0x84, 0xd9, + 0xf7, 0x2d, 0x8a, 0xa1, 0x80, 0x81, 0x7f, 0x67, + 0x07, 0xa8, 0x23, 0xf1, 0x8c, 0xdc, 0xd8, 0x04, + 0x8b, 0x9d, 0xb1, 0xcd, 0x61, 0x0c, 0x9c, 0x69, + 0xc7, 0x8d, 0x17, 0xb6, 0xe5, 0x0b, 0x94, 0xf7, + 0x78, 0x9b, 0x63, 0x49, 0xba, 0xfc, 0x08, 0x4d + }, + + 0x84c53a90, // cr0_npx_state + }, + + 0x79f71e76, // gs + 0x8107bd25, // fs + 0x452d2921, // es + 0x87ec2875, // ds + 0xf8bb73f5, // edi + 0xa63ebb88, // esi + 0x95d35ebe, // ebx + 0x17aa2456, // edx + 0x135fa208, // ecx + 0x500615e6, // eax + 0x66d14205, // ebp + 0x000719a5, // eip + 0x477b481b, // cs + 0x8684dfba, // eflags + 0xe33ccddf, // esp + 0xc0e65d33, // ss + + // extended_registers + { + 0x68, 0x63, 0xdf, 0x50, 0xf7, 0x3b, 0xe8, 0xe5, + 0xcb, 0xd6, 0x66, 0x60, 0xe5, 0xa3, 0x58, 0xb3, + 0x6f, 0x34, 0xca, 0x02, 0x9b, 0x5f, 0xd0, 0x41, + 0xbd, 0xc5, 0x2d, 0xf8, 0xff, 0x15, 0xa2, 0xd0, + 0xe3, 0x2b, 0x3b, 0x8a, 0x9f, 0xc3, 0x9e, 0x28, + 0x0a, 0xc2, 0xac, 0x3b, 0x67, 0x37, 0x01, 0xfd, + 0xc3, 0xaf, 0x60, 0xf6, 0x2c, 0x4f, 0xa9, 0x52, + 0x92, 0xe5, 0x28, 0xde, 0x34, 0xb6, 0x2e, 0x44, + 0x15, 0xa4, 0xb6, 0xe4, 0xc9, 0x1a, 0x14, 0xb9, + 0x51, 0x33, 0x3c, 0xe0, 0xc7, 0x94, 0xf0, 0xf7, + 0x78, 0xdd, 0xe5, 0xca, 0xb7, 0xa6, 0xe0, 0x14, + 0xa6, 0x03, 0xab, 0x77, 0xad, 0xbd, 0xd2, 0x53, + 0x3d, 0x07, 0xe7, 0xaf, 0x90, 0x44, 0x71, 0xbe, + 0x0c, 0xdf, 0x2b, 0x97, 0x40, 0x48, 0xd5, 0xf9, + 0x62, 0x03, 0x91, 0x84, 0xd6, 0xdd, 0x29, 0x97, + 0x35, 0x02, 0xfb, 0x59, 0x97, 0xb0, 0xec, 0xa9, + 0x39, 0x6f, 0x81, 0x71, 0x2a, 0xf0, 0xe7, 0x2c, + 0x4e, 0x93, 0x90, 0xcb, 0x67, 0x69, 0xde, 0xd7, + 0x68, 0x3b, 0x0f, 0x69, 0xa8, 0xf4, 0xa8, 0x83, + 0x42, 0x80, 0x47, 0x65, 0x7a, 0xc9, 0x19, 0x5d, + 0xcb, 0x43, 0xa5, 0xff, 0xf8, 0x9e, 0x62, 0xf4, + 0xe2, 0x6c, 0xcc, 0x17, 0x55, 0x7c, 0x0d, 0x5c, + 0x8d, 0x16, 0x01, 0xd7, 0x3a, 0x0c, 0xf4, 0x7f, + 0x71, 0xdc, 0x48, 0xe9, 0x4b, 0xfe, 0x1a, 0xd0, + 0x04, 0x15, 0x33, 0xec, 0x78, 0xc6, 0x7e, 0xde, + 0x7c, 0x23, 0x18, 0x8d, 0x8f, 0xc2, 0x74, 0xc1, + 0x48, 0xcd, 0x5d, 0xee, 0xee, 0x81, 0x9e, 0x49, + 0x47, 0x8a, 0xf8, 0x61, 0xa3, 0x9c, 0x81, 0x96, + 0xbe, 0x2b, 0x5e, 0xbc, 0xcd, 0x34, 0x0a, 0x2a, + 0x3b, 0x8b, 0x7d, 0xa1, 0xf2, 0x8d, 0xb4, 0x51, + 0x9e, 0x14, 0x78, 0xa3, 0x58, 0x65, 0x2d, 0xd6, + 0x50, 0x40, 0x36, 0x32, 0x31, 0xd4, 0x3e, 0xc2, + 0xe0, 0x87, 0x1c, 0x05, 0x95, 0x80, 0x84, 0x24, + 0x08, 0x6f, 0x5b, 0xc7, 0xe1, 0x1d, 0xd5, 0xa3, + 0x94, 0x44, 0xa1, 0x7c, 0xd8, 0x4b, 0x86, 0xd2, + 0xc6, 0xa9, 0xf3, 0xe2, 0x4d, 0x6e, 0x1f, 0x0e, + 0xf2, 0xf5, 0x71, 0xf9, 0x71, 0x05, 0x24, 0xc9, + 0xc1, 0xe8, 0x91, 0x42, 0x61, 0x86, 0x57, 0x68, + 0xd9, 0xc9, 0x1d, 0xd5, 0x5a, 0xe9, 0xba, 0xe6, + 0x15, 0x8f, 0x87, 0xbd, 0x62, 0x56, 0xed, 0xda, + 0xc2, 0xa5, 0xd5, 0x39, 0xac, 0x05, 0x10, 0x14, + 0x4a, 0xe7, 0xe7, 0x3c, 0x3f, 0xb7, 0xbb, 0xed, + 0x01, 0x6e, 0xcd, 0xee, 0x81, 0xb4, 0x62, 0xf4, + 0x62, 0x16, 0xff, 0x20, 0xb4, 0xf0, 0xbc, 0xff, + 0x7d, 0xd9, 0xcf, 0x95, 0x30, 0x27, 0xe0, 0x2f, + 0x98, 0x53, 0x80, 0x15, 0x13, 0xef, 0x44, 0x58, + 0x12, 0x16, 0xdb, 0x11, 0xef, 0x73, 0x51, 0xcd, + 0x42, 0x3f, 0x98, 0x6c, 0xc9, 0x68, 0xc3, 0xf4, + 0x5b, 0x0f, 0x5d, 0x77, 0xed, 0xdf, 0x0f, 0xff, + 0xb8, 0x69, 0x98, 0x50, 0x77, 0x7a, 0xe8, 0x90, + 0x27, 0x46, 0x10, 0xd2, 0xb5, 0x00, 0x3b, 0x36, + 0x43, 0x6d, 0x67, 0x41, 0x20, 0x3a, 0x32, 0xe0, + 0x2e, 0x5a, 0xfb, 0x4e, 0x4f, 0xa4, 0xf7, 0xc2, + 0xe6, 0x81, 0x1a, 0x51, 0xa8, 0x7c, 0xd4, 0x60, + 0x7c, 0x45, 0xe2, 0xba, 0x5b, 0x42, 0xf3, 0xbf, + 0x28, 0xaa, 0xf2, 0x90, 0xe4, 0x94, 0xdd, 0xaa, + 0x22, 0xd3, 0x71, 0x33, 0xa1, 0x01, 0x43, 0x0e, + 0xfa, 0x46, 0xd2, 0x6e, 0x55, 0x5e, 0x49, 0xeb, + 0x94, 0xf0, 0xb0, 0xb1, 0x2e, 0xf2, 0x3d, 0x6c, + 0x00, 0x5e, 0x01, 0x56, 0x3b, 0xfd, 0x5b, 0xa1, + 0x2f, 0x63, 0x1d, 0xbf, 0xf9, 0xd8, 0x13, 0xf7, + 0x4d, 0xb7, 0x1e, 0x3d, 0x98, 0xd2, 0xee, 0xb8, + 0x48, 0xc8, 0x5b, 0x91, 0x0f, 0x54, 0x9e, 0x26, + 0xb2, 0xc7, 0x3a, 0x6c, 0x8a, 0x35, 0xe1, 0xba + } +}; + +static const uint8_t x86_expected_contents[] = { + 0x1b, 0xd7, 0xd5, 0xde, + 0x2e, 0x43, 0xdb, 0x9f, + 0x1a, 0xa8, 0xb7, 0x26, + 0x48, 0xe3, 0xc7, 0xca, + 0x09, 0xec, 0x99, 0xcf, + 0xcd, 0xc2, 0xc8, 0x7d, + 0x80, 0xb8, 0xde, 0x21, + 0xb0, 0x2b, 0x5d, 0x8a, + 0xc9, 0xc4, 0x86, 0x02, + 0x21, 0xea, 0xfe, 0xf1, + 0x76, 0x05, 0xd4, 0xb2, + 0xde, 0x6c, 0x14, 0x48, + 0x21, 0x9b, 0x3f, 0x98, + 0x2c, 0xe1, 0x5b, 0x47, + + // float_save.register_area --- unswapped + 0xd9, 0x04, 0x20, 0x6b, 0x88, 0x3a, 0x3f, 0xd5, + 0x59, 0x7a, 0xa9, 0xeb, 0xd0, 0x5c, 0xdf, 0xfe, + 0xad, 0xdd, 0x4a, 0x8b, 0x10, 0xcc, 0x9a, 0x33, + 0xcb, 0xb6, 0xf7, 0x86, 0xcd, 0x69, 0x25, 0xae, + 0x25, 0xe5, 0x7a, 0xa1, 0x8f, 0xb2, 0x84, 0xd9, + 0xf7, 0x2d, 0x8a, 0xa1, 0x80, 0x81, 0x7f, 0x67, + 0x07, 0xa8, 0x23, 0xf1, 0x8c, 0xdc, 0xd8, 0x04, + 0x8b, 0x9d, 0xb1, 0xcd, 0x61, 0x0c, 0x9c, 0x69, + 0xc7, 0x8d, 0x17, 0xb6, 0xe5, 0x0b, 0x94, 0xf7, + 0x78, 0x9b, 0x63, 0x49, 0xba, 0xfc, 0x08, 0x4d, + + 0x90, 0x3a, 0xc5, 0x84, + 0x76, 0x1e, 0xf7, 0x79, + 0x25, 0xbd, 0x07, 0x81, + 0x21, 0x29, 0x2d, 0x45, + 0x75, 0x28, 0xec, 0x87, + 0xf5, 0x73, 0xbb, 0xf8, + 0x88, 0xbb, 0x3e, 0xa6, + 0xbe, 0x5e, 0xd3, 0x95, + 0x56, 0x24, 0xaa, 0x17, + 0x08, 0xa2, 0x5f, 0x13, + 0xe6, 0x15, 0x06, 0x50, + 0x05, 0x42, 0xd1, 0x66, + 0xa5, 0x19, 0x07, 0x00, + 0x1b, 0x48, 0x7b, 0x47, + 0xba, 0xdf, 0x84, 0x86, + 0xdf, 0xcd, 0x3c, 0xe3, + 0x33, 0x5d, 0xe6, 0xc0, + + // extended_registers --- unswapped + 0x68, 0x63, 0xdf, 0x50, 0xf7, 0x3b, 0xe8, 0xe5, + 0xcb, 0xd6, 0x66, 0x60, 0xe5, 0xa3, 0x58, 0xb3, + 0x6f, 0x34, 0xca, 0x02, 0x9b, 0x5f, 0xd0, 0x41, + 0xbd, 0xc5, 0x2d, 0xf8, 0xff, 0x15, 0xa2, 0xd0, + 0xe3, 0x2b, 0x3b, 0x8a, 0x9f, 0xc3, 0x9e, 0x28, + 0x0a, 0xc2, 0xac, 0x3b, 0x67, 0x37, 0x01, 0xfd, + 0xc3, 0xaf, 0x60, 0xf6, 0x2c, 0x4f, 0xa9, 0x52, + 0x92, 0xe5, 0x28, 0xde, 0x34, 0xb6, 0x2e, 0x44, + 0x15, 0xa4, 0xb6, 0xe4, 0xc9, 0x1a, 0x14, 0xb9, + 0x51, 0x33, 0x3c, 0xe0, 0xc7, 0x94, 0xf0, 0xf7, + 0x78, 0xdd, 0xe5, 0xca, 0xb7, 0xa6, 0xe0, 0x14, + 0xa6, 0x03, 0xab, 0x77, 0xad, 0xbd, 0xd2, 0x53, + 0x3d, 0x07, 0xe7, 0xaf, 0x90, 0x44, 0x71, 0xbe, + 0x0c, 0xdf, 0x2b, 0x97, 0x40, 0x48, 0xd5, 0xf9, + 0x62, 0x03, 0x91, 0x84, 0xd6, 0xdd, 0x29, 0x97, + 0x35, 0x02, 0xfb, 0x59, 0x97, 0xb0, 0xec, 0xa9, + 0x39, 0x6f, 0x81, 0x71, 0x2a, 0xf0, 0xe7, 0x2c, + 0x4e, 0x93, 0x90, 0xcb, 0x67, 0x69, 0xde, 0xd7, + 0x68, 0x3b, 0x0f, 0x69, 0xa8, 0xf4, 0xa8, 0x83, + 0x42, 0x80, 0x47, 0x65, 0x7a, 0xc9, 0x19, 0x5d, + 0xcb, 0x43, 0xa5, 0xff, 0xf8, 0x9e, 0x62, 0xf4, + 0xe2, 0x6c, 0xcc, 0x17, 0x55, 0x7c, 0x0d, 0x5c, + 0x8d, 0x16, 0x01, 0xd7, 0x3a, 0x0c, 0xf4, 0x7f, + 0x71, 0xdc, 0x48, 0xe9, 0x4b, 0xfe, 0x1a, 0xd0, + 0x04, 0x15, 0x33, 0xec, 0x78, 0xc6, 0x7e, 0xde, + 0x7c, 0x23, 0x18, 0x8d, 0x8f, 0xc2, 0x74, 0xc1, + 0x48, 0xcd, 0x5d, 0xee, 0xee, 0x81, 0x9e, 0x49, + 0x47, 0x8a, 0xf8, 0x61, 0xa3, 0x9c, 0x81, 0x96, + 0xbe, 0x2b, 0x5e, 0xbc, 0xcd, 0x34, 0x0a, 0x2a, + 0x3b, 0x8b, 0x7d, 0xa1, 0xf2, 0x8d, 0xb4, 0x51, + 0x9e, 0x14, 0x78, 0xa3, 0x58, 0x65, 0x2d, 0xd6, + 0x50, 0x40, 0x36, 0x32, 0x31, 0xd4, 0x3e, 0xc2, + 0xe0, 0x87, 0x1c, 0x05, 0x95, 0x80, 0x84, 0x24, + 0x08, 0x6f, 0x5b, 0xc7, 0xe1, 0x1d, 0xd5, 0xa3, + 0x94, 0x44, 0xa1, 0x7c, 0xd8, 0x4b, 0x86, 0xd2, + 0xc6, 0xa9, 0xf3, 0xe2, 0x4d, 0x6e, 0x1f, 0x0e, + 0xf2, 0xf5, 0x71, 0xf9, 0x71, 0x05, 0x24, 0xc9, + 0xc1, 0xe8, 0x91, 0x42, 0x61, 0x86, 0x57, 0x68, + 0xd9, 0xc9, 0x1d, 0xd5, 0x5a, 0xe9, 0xba, 0xe6, + 0x15, 0x8f, 0x87, 0xbd, 0x62, 0x56, 0xed, 0xda, + 0xc2, 0xa5, 0xd5, 0x39, 0xac, 0x05, 0x10, 0x14, + 0x4a, 0xe7, 0xe7, 0x3c, 0x3f, 0xb7, 0xbb, 0xed, + 0x01, 0x6e, 0xcd, 0xee, 0x81, 0xb4, 0x62, 0xf4, + 0x62, 0x16, 0xff, 0x20, 0xb4, 0xf0, 0xbc, 0xff, + 0x7d, 0xd9, 0xcf, 0x95, 0x30, 0x27, 0xe0, 0x2f, + 0x98, 0x53, 0x80, 0x15, 0x13, 0xef, 0x44, 0x58, + 0x12, 0x16, 0xdb, 0x11, 0xef, 0x73, 0x51, 0xcd, + 0x42, 0x3f, 0x98, 0x6c, 0xc9, 0x68, 0xc3, 0xf4, + 0x5b, 0x0f, 0x5d, 0x77, 0xed, 0xdf, 0x0f, 0xff, + 0xb8, 0x69, 0x98, 0x50, 0x77, 0x7a, 0xe8, 0x90, + 0x27, 0x46, 0x10, 0xd2, 0xb5, 0x00, 0x3b, 0x36, + 0x43, 0x6d, 0x67, 0x41, 0x20, 0x3a, 0x32, 0xe0, + 0x2e, 0x5a, 0xfb, 0x4e, 0x4f, 0xa4, 0xf7, 0xc2, + 0xe6, 0x81, 0x1a, 0x51, 0xa8, 0x7c, 0xd4, 0x60, + 0x7c, 0x45, 0xe2, 0xba, 0x5b, 0x42, 0xf3, 0xbf, + 0x28, 0xaa, 0xf2, 0x90, 0xe4, 0x94, 0xdd, 0xaa, + 0x22, 0xd3, 0x71, 0x33, 0xa1, 0x01, 0x43, 0x0e, + 0xfa, 0x46, 0xd2, 0x6e, 0x55, 0x5e, 0x49, 0xeb, + 0x94, 0xf0, 0xb0, 0xb1, 0x2e, 0xf2, 0x3d, 0x6c, + 0x00, 0x5e, 0x01, 0x56, 0x3b, 0xfd, 0x5b, 0xa1, + 0x2f, 0x63, 0x1d, 0xbf, 0xf9, 0xd8, 0x13, 0xf7, + 0x4d, 0xb7, 0x1e, 0x3d, 0x98, 0xd2, 0xee, 0xb8, + 0x48, 0xc8, 0x5b, 0x91, 0x0f, 0x54, 0x9e, 0x26, + 0xb2, 0xc7, 0x3a, 0x6c, 0x8a, 0x35, 0xe1, 0xba +}; + +static const MDRawContextARM arm_raw_context = { + // context_flags + 0x591b9e6a, + // iregs + { + 0xa21594de, + 0x820d8a25, + 0xc4e133b2, + 0x173a1c02, + 0x105fb175, + 0xe871793f, + 0x5def70b3, + 0xcee3a623, + 0x7b3aa9b8, + 0x52518537, + 0x627012c5, + 0x22723dcc, + 0x16fcc971, + 0x20988bcb, + 0xf1ab806b, + 0x99d5fc03, + }, + // cpsr + 0xb70df511, + // float_save + { + // fpscr + 0xa1e1f7ce1077e6b5ULL, + // regs + { + 0xbcb8d002eed7fbdeULL, + 0x4dd26a43b96ae97fULL, + 0x8eec22db8b31741cULL, + 0xfd634bd7c5ad66a0ULL, + 0x1681da0daeb3debeULL, + 0x474a32bdf72d0b71ULL, + 0xcaf464f8b1044834ULL, + 0xcaa6592ae5c7582aULL, + 0x4ee46889d877c3dbULL, + 0xf8930cf301645cf5ULL, + 0x4da7e9ebba27f7c7ULL, + 0x69a7b02761944da3ULL, + 0x2cda2b2e78195c06ULL, + 0x66b227ab9b460a42ULL, + 0x7e77e49e52ee0849ULL, + 0xd62cd9663e76f255ULL, + 0xe9370f082451514bULL, + 0x50a1c674dd1b6029ULL, + 0x405db4575829eac4ULL, + 0x67b948764649eee7ULL, + 0x93731885419229d4ULL, + 0xdb0338bad72a4ce7ULL, + 0xa0a451f996fca4c8ULL, + 0xb4508ea668400a45ULL, + 0xbff28c5c7a142423ULL, + 0x4f31b42b96f3a431ULL, + 0x2ce6789d4ea1ff37ULL, + 0xfa150b52e4f82a3cULL, + 0xe9ec40449e6ed4f3ULL, + 0x5ceca87836fe2251ULL, + 0x66f50de463ee238cULL, + 0x42823efcd59ab511ULL, + }, + // extra + { + 0xe9e14cd2, + 0x865bb640, + 0x9f3f0b3e, + 0x94a71c52, + 0x3c012f19, + 0x6436637c, + 0x46ccedcb, + 0x7b341be7, + } + } +}; + +static const uint8_t arm_expected_contents[] = { + 0x6a, 0x9e, 0x1b, 0x59, + 0xde, 0x94, 0x15, 0xa2, + 0x25, 0x8a, 0x0d, 0x82, + 0xb2, 0x33, 0xe1, 0xc4, + 0x02, 0x1c, 0x3a, 0x17, + 0x75, 0xb1, 0x5f, 0x10, + 0x3f, 0x79, 0x71, 0xe8, + 0xb3, 0x70, 0xef, 0x5d, + 0x23, 0xa6, 0xe3, 0xce, + 0xb8, 0xa9, 0x3a, 0x7b, + 0x37, 0x85, 0x51, 0x52, + 0xc5, 0x12, 0x70, 0x62, + 0xcc, 0x3d, 0x72, 0x22, + 0x71, 0xc9, 0xfc, 0x16, + 0xcb, 0x8b, 0x98, 0x20, + 0x6b, 0x80, 0xab, 0xf1, + 0x03, 0xfc, 0xd5, 0x99, + 0x11, 0xf5, 0x0d, 0xb7, + 0xb5, 0xe6, 0x77, 0x10, + 0xce, 0xf7, 0xe1, 0xa1, + 0xde, 0xfb, 0xd7, 0xee, + 0x02, 0xd0, 0xb8, 0xbc, + 0x7f, 0xe9, 0x6a, 0xb9, + 0x43, 0x6a, 0xd2, 0x4d, + 0x1c, 0x74, 0x31, 0x8b, + 0xdb, 0x22, 0xec, 0x8e, + 0xa0, 0x66, 0xad, 0xc5, + 0xd7, 0x4b, 0x63, 0xfd, + 0xbe, 0xde, 0xb3, 0xae, + 0x0d, 0xda, 0x81, 0x16, + 0x71, 0x0b, 0x2d, 0xf7, + 0xbd, 0x32, 0x4a, 0x47, + 0x34, 0x48, 0x04, 0xb1, + 0xf8, 0x64, 0xf4, 0xca, + 0x2a, 0x58, 0xc7, 0xe5, + 0x2a, 0x59, 0xa6, 0xca, + 0xdb, 0xc3, 0x77, 0xd8, + 0x89, 0x68, 0xe4, 0x4e, + 0xf5, 0x5c, 0x64, 0x01, + 0xf3, 0x0c, 0x93, 0xf8, + 0xc7, 0xf7, 0x27, 0xba, + 0xeb, 0xe9, 0xa7, 0x4d, + 0xa3, 0x4d, 0x94, 0x61, + 0x27, 0xb0, 0xa7, 0x69, + 0x06, 0x5c, 0x19, 0x78, + 0x2e, 0x2b, 0xda, 0x2c, + 0x42, 0x0a, 0x46, 0x9b, + 0xab, 0x27, 0xb2, 0x66, + 0x49, 0x08, 0xee, 0x52, + 0x9e, 0xe4, 0x77, 0x7e, + 0x55, 0xf2, 0x76, 0x3e, + 0x66, 0xd9, 0x2c, 0xd6, + 0x4b, 0x51, 0x51, 0x24, + 0x08, 0x0f, 0x37, 0xe9, + 0x29, 0x60, 0x1b, 0xdd, + 0x74, 0xc6, 0xa1, 0x50, + 0xc4, 0xea, 0x29, 0x58, + 0x57, 0xb4, 0x5d, 0x40, + 0xe7, 0xee, 0x49, 0x46, + 0x76, 0x48, 0xb9, 0x67, + 0xd4, 0x29, 0x92, 0x41, + 0x85, 0x18, 0x73, 0x93, + 0xe7, 0x4c, 0x2a, 0xd7, + 0xba, 0x38, 0x03, 0xdb, + 0xc8, 0xa4, 0xfc, 0x96, + 0xf9, 0x51, 0xa4, 0xa0, + 0x45, 0x0a, 0x40, 0x68, + 0xa6, 0x8e, 0x50, 0xb4, + 0x23, 0x24, 0x14, 0x7a, + 0x5c, 0x8c, 0xf2, 0xbf, + 0x31, 0xa4, 0xf3, 0x96, + 0x2b, 0xb4, 0x31, 0x4f, + 0x37, 0xff, 0xa1, 0x4e, + 0x9d, 0x78, 0xe6, 0x2c, + 0x3c, 0x2a, 0xf8, 0xe4, + 0x52, 0x0b, 0x15, 0xfa, + 0xf3, 0xd4, 0x6e, 0x9e, + 0x44, 0x40, 0xec, 0xe9, + 0x51, 0x22, 0xfe, 0x36, + 0x78, 0xa8, 0xec, 0x5c, + 0x8c, 0x23, 0xee, 0x63, + 0xe4, 0x0d, 0xf5, 0x66, + 0x11, 0xb5, 0x9a, 0xd5, + 0xfc, 0x3e, 0x82, 0x42, + 0xd2, 0x4c, 0xe1, 0xe9, + 0x40, 0xb6, 0x5b, 0x86, + 0x3e, 0x0b, 0x3f, 0x9f, + 0x52, 0x1c, 0xa7, 0x94, + 0x19, 0x2f, 0x01, 0x3c, + 0x7c, 0x63, 0x36, 0x64, + 0xcb, 0xed, 0xcc, 0x46, + 0xe7, 0x1b, 0x34, 0x7b +}; + +#endif // PROCESSOR_SYNTH_MINIDUMP_UNITTEST_DATA_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/tokenize.cc b/TMessagesProj/jni/third_party/breakpad/src/processor/tokenize.cc new file mode 100644 index 0000000000..f468120c0c --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/tokenize.cc @@ -0,0 +1,79 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#include + +#include +#include + +#include "common/using_std_string.h" + +namespace google_breakpad { + +#ifdef _WIN32 +#define strtok_r strtok_s +#endif + +using std::vector; + +bool Tokenize(char *line, + const char *separators, + int max_tokens, + vector *tokens) { + tokens->clear(); + tokens->reserve(max_tokens); + + int remaining = max_tokens; + + // Split tokens on the separator character. + // strip them out before exhausting max_tokens. + char *save_ptr; + char *token = strtok_r(line, separators, &save_ptr); + while (token && --remaining > 0) { + tokens->push_back(token); + if (remaining > 1) + token = strtok_r(NULL, separators, &save_ptr); + } + + // If there's anything left, just add it as a single token. + if (remaining == 0 && (token = strtok_r(NULL, "\r\n", &save_ptr))) { + tokens->push_back(token); + } + + return tokens->size() == static_cast(max_tokens); +} + +void StringToVector(const string &str, vector &vec) { + vec.resize(str.length() + 1); + std::copy(str.begin(), str.end(), + vec.begin()); + vec[str.length()] = '\0'; +} + +} // namespace google_breakpad diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/tokenize.h b/TMessagesProj/jni/third_party/breakpad/src/processor/tokenize.h new file mode 100644 index 0000000000..9ff571d5c5 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/tokenize.h @@ -0,0 +1,63 @@ +// Copyright (c) 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Implements a Tokenize function for splitting up strings. + +#ifndef GOOGLE_BREAKPAD_PROCESSOR_TOKENIZE_H_ +#define GOOGLE_BREAKPAD_PROCESSOR_TOKENIZE_H_ + +#include +#include + +#include "common/using_std_string.h" + +namespace google_breakpad { + +// Splits line into at most max_tokens tokens, separated by any of the +// characters in separators and placing them in the tokens vector. +// line is a 0-terminated string that optionally ends with a newline +// character or combination, which will be removed. +// If more tokens than max_tokens are present, the final token is placed +// into the vector without splitting it up at all. This modifies line as +// a side effect. Returns true if exactly max_tokens tokens are returned, +// and false if fewer are returned. This is not considered a failure of +// Tokenize, but may be treated as a failure if the caller expects an +// exact, as opposed to maximum, number of tokens. + +bool Tokenize(char *line, + const char *separators, + int max_tokens, + std::vector *tokens); +// For convenience, since you need a char* to pass to Tokenize. +// You can call StringToVector on a string, and use &vec[0]. +void StringToVector(const string &str, std::vector &vec); + +} // namespace google_breakpad + +#endif // GOOGLE_BREAKPAD_PROCESSOR_TOKENIZE_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/processor/windows_frame_info.h b/TMessagesProj/jni/third_party/breakpad/src/processor/windows_frame_info.h new file mode 100644 index 0000000000..c92c610c7d --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/processor/windows_frame_info.h @@ -0,0 +1,209 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// windows_frame_info.h: Holds debugging information about a stack frame. +// +// This structure is specific to Windows debugging information obtained +// from pdb files using the DIA API. +// +// Author: Mark Mentovai + + +#ifndef PROCESSOR_WINDOWS_FRAME_INFO_H__ +#define PROCESSOR_WINDOWS_FRAME_INFO_H__ + +#include +#include + +#include +#include + +#include "common/using_std_string.h" +#include "google_breakpad/common/breakpad_types.h" +#include "processor/logging.h" +#include "processor/tokenize.h" + +namespace google_breakpad { + +#ifdef _WIN32 +#define strtoull _strtoui64 +#endif + +struct WindowsFrameInfo { + public: + enum Validity { + VALID_NONE = 0, + VALID_PARAMETER_SIZE = 1, + VALID_ALL = -1 + }; + + // The types for stack_info_. This is equivalent to MS DIA's + // StackFrameTypeEnum. Each identifies a different type of frame + // information, although all are represented in the symbol file in the + // same format. These are used as indices to the stack_info_ array. + enum StackInfoTypes { + STACK_INFO_FPO = 0, + STACK_INFO_TRAP, // not used here + STACK_INFO_TSS, // not used here + STACK_INFO_STANDARD, + STACK_INFO_FRAME_DATA, + STACK_INFO_LAST, // must be the last sequentially-numbered item + STACK_INFO_UNKNOWN = -1 + }; + + WindowsFrameInfo() : type_(STACK_INFO_UNKNOWN), + valid(VALID_NONE), + prolog_size(0), + epilog_size(0), + parameter_size(0), + saved_register_size(0), + local_size(0), + max_stack_size(0), + allocates_base_pointer(0), + program_string() {} + + WindowsFrameInfo(StackInfoTypes type, + uint32_t set_prolog_size, + uint32_t set_epilog_size, + uint32_t set_parameter_size, + uint32_t set_saved_register_size, + uint32_t set_local_size, + uint32_t set_max_stack_size, + int set_allocates_base_pointer, + const string set_program_string) + : type_(type), + valid(VALID_ALL), + prolog_size(set_prolog_size), + epilog_size(set_epilog_size), + parameter_size(set_parameter_size), + saved_register_size(set_saved_register_size), + local_size(set_local_size), + max_stack_size(set_max_stack_size), + allocates_base_pointer(set_allocates_base_pointer), + program_string(set_program_string) {} + + // Parse a textual serialization of a WindowsFrameInfo object from + // a string. Returns NULL if parsing fails, or a new object + // otherwise. type, rva and code_size are present in the STACK line, + // but not the StackFrameInfo structure, so return them as outparams. + static WindowsFrameInfo *ParseFromString(const string string, + int &type, + uint64_t &rva, + uint64_t &code_size) { + // The format of a STACK WIN record is documented at: + // + // http://code.google.com/p/google-breakpad/wiki/SymbolFiles + + std::vector buffer; + StringToVector(string, buffer); + std::vector tokens; + if (!Tokenize(&buffer[0], " \r\n", 11, &tokens)) + return NULL; + + type = strtol(tokens[0], NULL, 16); + if (type < 0 || type > STACK_INFO_LAST - 1) + return NULL; + + rva = strtoull(tokens[1], NULL, 16); + code_size = strtoull(tokens[2], NULL, 16); + uint32_t prolog_size = strtoul(tokens[3], NULL, 16); + uint32_t epilog_size = strtoul(tokens[4], NULL, 16); + uint32_t parameter_size = strtoul(tokens[5], NULL, 16); + uint32_t saved_register_size = strtoul(tokens[6], NULL, 16); + uint32_t local_size = strtoul(tokens[7], NULL, 16); + uint32_t max_stack_size = strtoul(tokens[8], NULL, 16); + int has_program_string = strtoul(tokens[9], NULL, 16); + + const char *program_string = ""; + int allocates_base_pointer = 0; + if (has_program_string) { + program_string = tokens[10]; + } else { + allocates_base_pointer = strtoul(tokens[10], NULL, 16); + } + + return new WindowsFrameInfo(static_cast(type), + prolog_size, + epilog_size, + parameter_size, + saved_register_size, + local_size, + max_stack_size, + allocates_base_pointer, + program_string); + } + + // CopyFrom makes "this" WindowsFrameInfo object identical to "that". + void CopyFrom(const WindowsFrameInfo &that) { + type_ = that.type_; + valid = that.valid; + prolog_size = that.prolog_size; + epilog_size = that.epilog_size; + parameter_size = that.parameter_size; + saved_register_size = that.saved_register_size; + local_size = that.local_size; + max_stack_size = that.max_stack_size; + allocates_base_pointer = that.allocates_base_pointer; + program_string = that.program_string; + } + + // Clears the WindowsFrameInfo object so that users will see it as though + // it contains no information. + void Clear() { + type_ = STACK_INFO_UNKNOWN; + valid = VALID_NONE; + program_string.erase(); + } + + StackInfoTypes type_; + + // Identifies which fields in the structure are valid. This is of + // type Validity, but it is defined as an int because it's not + // possible to OR values into an enumerated type. Users must check + // this field before using any other. + int valid; + + // These values come from IDiaFrameData. + uint32_t prolog_size; + uint32_t epilog_size; + uint32_t parameter_size; + uint32_t saved_register_size; + uint32_t local_size; + uint32_t max_stack_size; + + // Only one of allocates_base_pointer or program_string will be valid. + // If program_string is empty, use allocates_base_pointer. + bool allocates_base_pointer; + string program_string; +}; + +} // namespace google_breakpad + + +#endif // PROCESSOR_WINDOWS_FRAME_INFO_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/curl/COPYING b/TMessagesProj/jni/third_party/breakpad/src/third_party/curl/COPYING new file mode 100644 index 0000000000..610fbdb07f --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/curl/COPYING @@ -0,0 +1,22 @@ +COPYRIGHT AND PERMISSION NOTICE + +Copyright (c) 1996 - 2011, Daniel Stenberg, . + +All rights reserved. + +Permission to use, copy, modify, and distribute this software for any purpose +with or without fee is hereby granted, provided that the above copyright +notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS. IN +NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE +OR OTHER DEALINGS IN THE SOFTWARE. + +Except as contained in this notice, the name of a copyright holder shall not +be used in advertising or otherwise to promote the sale, use or other dealings +in this Software without prior written authorization of the copyright holder. + diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/curl/curl.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/curl/curl.h new file mode 100644 index 0000000000..0d80936f75 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/curl/curl.h @@ -0,0 +1,1936 @@ +#ifndef __CURL_CURL_H +#define __CURL_CURL_H +/*************************************************************************** + * _ _ ____ _ + * Project ___| | | | _ \| | + * / __| | | | |_) | | + * | (__| |_| | _ <| |___ + * \___|\___/|_| \_\_____| + * + * Copyright (C) 1998 - 2009, Daniel Stenberg, , et al. + * + * This software is licensed as described in the file COPYING, which + * you should have received as part of this distribution. The terms + * are also available at http://curl.haxx.se/docs/copyright.html. + * + * You may opt to use, copy, modify, merge, publish, distribute and/or sell + * copies of the Software, and permit persons to whom the Software is + * furnished to do so, under the terms of the COPYING file. + * + * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY + * KIND, either express or implied. + * + * $Id: curl.h,v 1.396 2009-10-16 13:30:31 yangtse Exp $ + ***************************************************************************/ + +/* + * If you have libcurl problems, all docs and details are found here: + * http://curl.haxx.se/libcurl/ + * + * curl-library mailing list subscription and unsubscription web interface: + * http://cool.haxx.se/mailman/listinfo/curl-library/ + */ + +/* + * Leading 'curl' path on the 'curlbuild.h' include statement is + * required to properly allow building outside of the source tree, + * due to the fact that in this case 'curlbuild.h' is generated in + * a subdirectory of the build tree while 'curl.h actually remains + * in a subdirectory of the source tree. + */ + +#include "third_party/curl/curlver.h" /* libcurl version defines */ +#include "third_party/curl/curlbuild.h" /* libcurl build definitions */ +#include "third_party/curl/curlrules.h" /* libcurl rules enforcement */ + +/* + * Define WIN32 when build target is Win32 API + */ + +#if (defined(_WIN32) || defined(__WIN32__)) && \ + !defined(WIN32) && !defined(__SYMBIAN32__) +#define WIN32 +#endif + +#include +#include + +/* The include stuff here below is mainly for time_t! */ +#include +#include + +#if defined(WIN32) && !defined(_WIN32_WCE) && !defined(__GNUC__) && \ + !defined(__CYGWIN__) || defined(__MINGW32__) +#if !(defined(_WINSOCKAPI_) || defined(_WINSOCK_H)) +/* The check above prevents the winsock2 inclusion if winsock.h already was + included, since they can't co-exist without problems */ +#include +#include +#endif +#else + +/* HP-UX systems version 9, 10 and 11 lack sys/select.h and so does oldish + libc5-based Linux systems. Only include it on system that are known to + require it! */ +#if defined(_AIX) || defined(__NOVELL_LIBC__) || defined(__NetBSD__) || \ + defined(__minix) || defined(__SYMBIAN32__) || defined(__INTEGRITY) || \ + defined(__ANDROID__) +#include +#endif + +#ifndef _WIN32_WCE +#include +#endif +#if !defined(WIN32) && !defined(__WATCOMC__) && !defined(__VXWORKS__) +#include +#endif +#include +#endif + +#ifdef __BEOS__ +#include +#endif + +#ifdef __cplusplus +extern "C" { +#endif + +typedef void CURL; + +/* + * Decorate exportable functions for Win32 and Symbian OS DLL linking. + * This avoids using a .def file for building libcurl.dll. + */ +#if (defined(WIN32) || defined(_WIN32) || defined(__SYMBIAN32__)) && \ + !defined(CURL_STATICLIB) +#if defined(BUILDING_LIBCURL) +#define CURL_EXTERN __declspec(dllexport) +#else +#define CURL_EXTERN __declspec(dllimport) +#endif +#else + +#ifdef CURL_HIDDEN_SYMBOLS +/* + * This definition is used to make external definitions visible in the + * shared library when symbols are hidden by default. It makes no + * difference when compiling applications whether this is set or not, + * only when compiling the library. + */ +#define CURL_EXTERN CURL_EXTERN_SYMBOL +#else +#define CURL_EXTERN +#endif +#endif + +#ifndef curl_socket_typedef +/* socket typedef */ +#ifdef WIN32 +typedef SOCKET curl_socket_t; +#define CURL_SOCKET_BAD INVALID_SOCKET +#else +typedef int curl_socket_t; +#define CURL_SOCKET_BAD -1 +#endif +#define curl_socket_typedef +#endif /* curl_socket_typedef */ + +struct curl_httppost { + struct curl_httppost *next; /* next entry in the list */ + char *name; /* pointer to allocated name */ + long namelength; /* length of name length */ + char *contents; /* pointer to allocated data contents */ + long contentslength; /* length of contents field */ + char *buffer; /* pointer to allocated buffer contents */ + long bufferlength; /* length of buffer field */ + char *contenttype; /* Content-Type */ + struct curl_slist* contentheader; /* list of extra headers for this form */ + struct curl_httppost *more; /* if one field name has more than one + file, this link should link to following + files */ + long flags; /* as defined below */ +#define HTTPPOST_FILENAME (1<<0) /* specified content is a file name */ +#define HTTPPOST_READFILE (1<<1) /* specified content is a file name */ +#define HTTPPOST_PTRNAME (1<<2) /* name is only stored pointer + do not free in formfree */ +#define HTTPPOST_PTRCONTENTS (1<<3) /* contents is only stored pointer + do not free in formfree */ +#define HTTPPOST_BUFFER (1<<4) /* upload file from buffer */ +#define HTTPPOST_PTRBUFFER (1<<5) /* upload file from pointer contents */ +#define HTTPPOST_CALLBACK (1<<6) /* upload file contents by using the + regular read callback to get the data + and pass the given pointer as custom + pointer */ + + char *showfilename; /* The file name to show. If not set, the + actual file name will be used (if this + is a file part) */ + void *userp; /* custom pointer used for + HTTPPOST_CALLBACK posts */ +}; + +typedef int (*curl_progress_callback)(void *clientp, + double dltotal, + double dlnow, + double ultotal, + double ulnow); + +#ifndef CURL_MAX_WRITE_SIZE + /* Tests have proven that 20K is a very bad buffer size for uploads on + Windows, while 16K for some odd reason performed a lot better. + We do the ifndef check to allow this value to easier be changed at build + time for those who feel adventurous. */ +#define CURL_MAX_WRITE_SIZE 16384 +#endif + +#ifndef CURL_MAX_HTTP_HEADER +/* The only reason to have a max limit for this is to avoid the risk of a bad + server feeding libcurl with a never-ending header that will cause reallocs + infinitely */ +#define CURL_MAX_HTTP_HEADER (100*1024) +#endif + + +/* This is a magic return code for the write callback that, when returned, + will signal libcurl to pause receiving on the current transfer. */ +#define CURL_WRITEFUNC_PAUSE 0x10000001 +typedef size_t (*curl_write_callback)(char *buffer, + size_t size, + size_t nitems, + void *outstream); + +/* These are the return codes for the seek callbacks */ +#define CURL_SEEKFUNC_OK 0 +#define CURL_SEEKFUNC_FAIL 1 /* fail the entire transfer */ +#define CURL_SEEKFUNC_CANTSEEK 2 /* tell libcurl seeking can't be done, so + libcurl might try other means instead */ +typedef int (*curl_seek_callback)(void *instream, + curl_off_t offset, + int origin); /* 'whence' */ + +/* This is a return code for the read callback that, when returned, will + signal libcurl to immediately abort the current transfer. */ +#define CURL_READFUNC_ABORT 0x10000000 +/* This is a return code for the read callback that, when returned, will + signal libcurl to pause sending data on the current transfer. */ +#define CURL_READFUNC_PAUSE 0x10000001 + +typedef size_t (*curl_read_callback)(char *buffer, + size_t size, + size_t nitems, + void *instream); + +typedef enum { + CURLSOCKTYPE_IPCXN, /* socket created for a specific IP connection */ + CURLSOCKTYPE_LAST /* never use */ +} curlsocktype; + +typedef int (*curl_sockopt_callback)(void *clientp, + curl_socket_t curlfd, + curlsocktype purpose); + +struct curl_sockaddr { + int family; + int socktype; + int protocol; + unsigned int addrlen; /* addrlen was a socklen_t type before 7.18.0 but it + turned really ugly and painful on the systems that + lack this type */ + struct sockaddr addr; +}; + +typedef curl_socket_t +(*curl_opensocket_callback)(void *clientp, + curlsocktype purpose, + struct curl_sockaddr *address); + +#ifndef CURL_NO_OLDIES + /* not used since 7.10.8, will be removed in a future release */ +typedef int (*curl_passwd_callback)(void *clientp, + const char *prompt, + char *buffer, + int buflen); +#endif + +typedef enum { + CURLIOE_OK, /* I/O operation successful */ + CURLIOE_UNKNOWNCMD, /* command was unknown to callback */ + CURLIOE_FAILRESTART, /* failed to restart the read */ + CURLIOE_LAST /* never use */ +} curlioerr; + +typedef enum { + CURLIOCMD_NOP, /* no operation */ + CURLIOCMD_RESTARTREAD, /* restart the read stream from start */ + CURLIOCMD_LAST /* never use */ +} curliocmd; + +typedef curlioerr (*curl_ioctl_callback)(CURL *handle, + int cmd, + void *clientp); + +/* + * The following typedef's are signatures of malloc, free, realloc, strdup and + * calloc respectively. Function pointers of these types can be passed to the + * curl_global_init_mem() function to set user defined memory management + * callback routines. + */ +typedef void *(*curl_malloc_callback)(size_t size); +typedef void (*curl_free_callback)(void *ptr); +typedef void *(*curl_realloc_callback)(void *ptr, size_t size); +typedef char *(*curl_strdup_callback)(const char *str); +typedef void *(*curl_calloc_callback)(size_t nmemb, size_t size); + +/* the kind of data that is passed to information_callback*/ +typedef enum { + CURLINFO_TEXT = 0, + CURLINFO_HEADER_IN, /* 1 */ + CURLINFO_HEADER_OUT, /* 2 */ + CURLINFO_DATA_IN, /* 3 */ + CURLINFO_DATA_OUT, /* 4 */ + CURLINFO_SSL_DATA_IN, /* 5 */ + CURLINFO_SSL_DATA_OUT, /* 6 */ + CURLINFO_END +} curl_infotype; + +typedef int (*curl_debug_callback) + (CURL *handle, /* the handle/transfer this concerns */ + curl_infotype type, /* what kind of data */ + char *data, /* points to the data */ + size_t size, /* size of the data pointed to */ + void *userptr); /* whatever the user please */ + +/* All possible error codes from all sorts of curl functions. Future versions + may return other values, stay prepared. + + Always add new return codes last. Never *EVER* remove any. The return + codes must remain the same! + */ + +typedef enum { + CURLE_OK = 0, + CURLE_UNSUPPORTED_PROTOCOL, /* 1 */ + CURLE_FAILED_INIT, /* 2 */ + CURLE_URL_MALFORMAT, /* 3 */ + CURLE_OBSOLETE4, /* 4 - NOT USED */ + CURLE_COULDNT_RESOLVE_PROXY, /* 5 */ + CURLE_COULDNT_RESOLVE_HOST, /* 6 */ + CURLE_COULDNT_CONNECT, /* 7 */ + CURLE_FTP_WEIRD_SERVER_REPLY, /* 8 */ + CURLE_REMOTE_ACCESS_DENIED, /* 9 a service was denied by the server + due to lack of access - when login fails + this is not returned. */ + CURLE_OBSOLETE10, /* 10 - NOT USED */ + CURLE_FTP_WEIRD_PASS_REPLY, /* 11 */ + CURLE_OBSOLETE12, /* 12 - NOT USED */ + CURLE_FTP_WEIRD_PASV_REPLY, /* 13 */ + CURLE_FTP_WEIRD_227_FORMAT, /* 14 */ + CURLE_FTP_CANT_GET_HOST, /* 15 */ + CURLE_OBSOLETE16, /* 16 - NOT USED */ + CURLE_FTP_COULDNT_SET_TYPE, /* 17 */ + CURLE_PARTIAL_FILE, /* 18 */ + CURLE_FTP_COULDNT_RETR_FILE, /* 19 */ + CURLE_OBSOLETE20, /* 20 - NOT USED */ + CURLE_QUOTE_ERROR, /* 21 - quote command failure */ + CURLE_HTTP_RETURNED_ERROR, /* 22 */ + CURLE_WRITE_ERROR, /* 23 */ + CURLE_OBSOLETE24, /* 24 - NOT USED */ + CURLE_UPLOAD_FAILED, /* 25 - failed upload "command" */ + CURLE_READ_ERROR, /* 26 - couldn't open/read from file */ + CURLE_OUT_OF_MEMORY, /* 27 */ + /* Note: CURLE_OUT_OF_MEMORY may sometimes indicate a conversion error + instead of a memory allocation error if CURL_DOES_CONVERSIONS + is defined + */ + CURLE_OPERATION_TIMEDOUT, /* 28 - the timeout time was reached */ + CURLE_OBSOLETE29, /* 29 - NOT USED */ + CURLE_FTP_PORT_FAILED, /* 30 - FTP PORT operation failed */ + CURLE_FTP_COULDNT_USE_REST, /* 31 - the REST command failed */ + CURLE_OBSOLETE32, /* 32 - NOT USED */ + CURLE_RANGE_ERROR, /* 33 - RANGE "command" didn't work */ + CURLE_HTTP_POST_ERROR, /* 34 */ + CURLE_SSL_CONNECT_ERROR, /* 35 - wrong when connecting with SSL */ + CURLE_BAD_DOWNLOAD_RESUME, /* 36 - couldn't resume download */ + CURLE_FILE_COULDNT_READ_FILE, /* 37 */ + CURLE_LDAP_CANNOT_BIND, /* 38 */ + CURLE_LDAP_SEARCH_FAILED, /* 39 */ + CURLE_OBSOLETE40, /* 40 - NOT USED */ + CURLE_FUNCTION_NOT_FOUND, /* 41 */ + CURLE_ABORTED_BY_CALLBACK, /* 42 */ + CURLE_BAD_FUNCTION_ARGUMENT, /* 43 */ + CURLE_OBSOLETE44, /* 44 - NOT USED */ + CURLE_INTERFACE_FAILED, /* 45 - CURLOPT_INTERFACE failed */ + CURLE_OBSOLETE46, /* 46 - NOT USED */ + CURLE_TOO_MANY_REDIRECTS , /* 47 - catch endless re-direct loops */ + CURLE_UNKNOWN_TELNET_OPTION, /* 48 - User specified an unknown option */ + CURLE_TELNET_OPTION_SYNTAX , /* 49 - Malformed telnet option */ + CURLE_OBSOLETE50, /* 50 - NOT USED */ + CURLE_PEER_FAILED_VERIFICATION, /* 51 - peer's certificate or fingerprint + wasn't verified fine */ + CURLE_GOT_NOTHING, /* 52 - when this is a specific error */ + CURLE_SSL_ENGINE_NOTFOUND, /* 53 - SSL crypto engine not found */ + CURLE_SSL_ENGINE_SETFAILED, /* 54 - can not set SSL crypto engine as + default */ + CURLE_SEND_ERROR, /* 55 - failed sending network data */ + CURLE_RECV_ERROR, /* 56 - failure in receiving network data */ + CURLE_OBSOLETE57, /* 57 - NOT IN USE */ + CURLE_SSL_CERTPROBLEM, /* 58 - problem with the local certificate */ + CURLE_SSL_CIPHER, /* 59 - couldn't use specified cipher */ + CURLE_SSL_CACERT, /* 60 - problem with the CA cert (path?) */ + CURLE_BAD_CONTENT_ENCODING, /* 61 - Unrecognized transfer encoding */ + CURLE_LDAP_INVALID_URL, /* 62 - Invalid LDAP URL */ + CURLE_FILESIZE_EXCEEDED, /* 63 - Maximum file size exceeded */ + CURLE_USE_SSL_FAILED, /* 64 - Requested FTP SSL level failed */ + CURLE_SEND_FAIL_REWIND, /* 65 - Sending the data requires a rewind + that failed */ + CURLE_SSL_ENGINE_INITFAILED, /* 66 - failed to initialise ENGINE */ + CURLE_LOGIN_DENIED, /* 67 - user, password or similar was not + accepted and we failed to login */ + CURLE_TFTP_NOTFOUND, /* 68 - file not found on server */ + CURLE_TFTP_PERM, /* 69 - permission problem on server */ + CURLE_REMOTE_DISK_FULL, /* 70 - out of disk space on server */ + CURLE_TFTP_ILLEGAL, /* 71 - Illegal TFTP operation */ + CURLE_TFTP_UNKNOWNID, /* 72 - Unknown transfer ID */ + CURLE_REMOTE_FILE_EXISTS, /* 73 - File already exists */ + CURLE_TFTP_NOSUCHUSER, /* 74 - No such user */ + CURLE_CONV_FAILED, /* 75 - conversion failed */ + CURLE_CONV_REQD, /* 76 - caller must register conversion + callbacks using curl_easy_setopt options + CURLOPT_CONV_FROM_NETWORK_FUNCTION, + CURLOPT_CONV_TO_NETWORK_FUNCTION, and + CURLOPT_CONV_FROM_UTF8_FUNCTION */ + CURLE_SSL_CACERT_BADFILE, /* 77 - could not load CACERT file, missing + or wrong format */ + CURLE_REMOTE_FILE_NOT_FOUND, /* 78 - remote file not found */ + CURLE_SSH, /* 79 - error from the SSH layer, somewhat + generic so the error message will be of + interest when this has happened */ + + CURLE_SSL_SHUTDOWN_FAILED, /* 80 - Failed to shut down the SSL + connection */ + CURLE_AGAIN, /* 81 - socket is not ready for send/recv, + wait till it's ready and try again (Added + in 7.18.2) */ + CURLE_SSL_CRL_BADFILE, /* 82 - could not load CRL file, missing or + wrong format (Added in 7.19.0) */ + CURLE_SSL_ISSUER_ERROR, /* 83 - Issuer check failed. (Added in + 7.19.0) */ + CURL_LAST /* never use! */ +} CURLcode; + +#ifndef CURL_NO_OLDIES /* define this to test if your app builds with all + the obsolete stuff removed! */ + +/* Backwards compatibility with older names */ + +/* The following were added in 7.17.1 */ +/* These are scheduled to disappear by 2009 */ +#define CURLE_SSL_PEER_CERTIFICATE CURLE_PEER_FAILED_VERIFICATION + +/* The following were added in 7.17.0 */ +/* These are scheduled to disappear by 2009 */ +#define CURLE_OBSOLETE CURLE_OBSOLETE50 /* noone should be using this! */ +#define CURLE_BAD_PASSWORD_ENTERED CURLE_OBSOLETE46 +#define CURLE_BAD_CALLING_ORDER CURLE_OBSOLETE44 +#define CURLE_FTP_USER_PASSWORD_INCORRECT CURLE_OBSOLETE10 +#define CURLE_FTP_CANT_RECONNECT CURLE_OBSOLETE16 +#define CURLE_FTP_COULDNT_GET_SIZE CURLE_OBSOLETE32 +#define CURLE_FTP_COULDNT_SET_ASCII CURLE_OBSOLETE29 +#define CURLE_FTP_WEIRD_USER_REPLY CURLE_OBSOLETE12 +#define CURLE_FTP_WRITE_ERROR CURLE_OBSOLETE20 +#define CURLE_LIBRARY_NOT_FOUND CURLE_OBSOLETE40 +#define CURLE_MALFORMAT_USER CURLE_OBSOLETE24 +#define CURLE_SHARE_IN_USE CURLE_OBSOLETE57 +#define CURLE_URL_MALFORMAT_USER CURLE_OBSOLETE4 + +#define CURLE_FTP_ACCESS_DENIED CURLE_REMOTE_ACCESS_DENIED +#define CURLE_FTP_COULDNT_SET_BINARY CURLE_FTP_COULDNT_SET_TYPE +#define CURLE_FTP_QUOTE_ERROR CURLE_QUOTE_ERROR +#define CURLE_TFTP_DISKFULL CURLE_REMOTE_DISK_FULL +#define CURLE_TFTP_EXISTS CURLE_REMOTE_FILE_EXISTS +#define CURLE_HTTP_RANGE_ERROR CURLE_RANGE_ERROR +#define CURLE_FTP_SSL_FAILED CURLE_USE_SSL_FAILED + +/* The following were added earlier */ + +#define CURLE_OPERATION_TIMEOUTED CURLE_OPERATION_TIMEDOUT + +#define CURLE_HTTP_NOT_FOUND CURLE_HTTP_RETURNED_ERROR +#define CURLE_HTTP_PORT_FAILED CURLE_INTERFACE_FAILED +#define CURLE_FTP_COULDNT_STOR_FILE CURLE_UPLOAD_FAILED + +#define CURLE_FTP_PARTIAL_FILE CURLE_PARTIAL_FILE +#define CURLE_FTP_BAD_DOWNLOAD_RESUME CURLE_BAD_DOWNLOAD_RESUME + +/* This was the error code 50 in 7.7.3 and a few earlier versions, this + is no longer used by libcurl but is instead #defined here only to not + make programs break */ +#define CURLE_ALREADY_COMPLETE 99999 + +#endif /*!CURL_NO_OLDIES*/ + +/* This prototype applies to all conversion callbacks */ +typedef CURLcode (*curl_conv_callback)(char *buffer, size_t length); + +typedef CURLcode (*curl_ssl_ctx_callback)(CURL *curl, /* easy handle */ + void *ssl_ctx, /* actually an + OpenSSL SSL_CTX */ + void *userptr); + +typedef enum { + CURLPROXY_HTTP = 0, /* added in 7.10, new in 7.19.4 default is to use + CONNECT HTTP/1.1 */ + CURLPROXY_HTTP_1_0 = 1, /* added in 7.19.4, force to use CONNECT + HTTP/1.0 */ + CURLPROXY_SOCKS4 = 4, /* support added in 7.15.2, enum existed already + in 7.10 */ + CURLPROXY_SOCKS5 = 5, /* added in 7.10 */ + CURLPROXY_SOCKS4A = 6, /* added in 7.18.0 */ + CURLPROXY_SOCKS5_HOSTNAME = 7 /* Use the SOCKS5 protocol but pass along the + host name rather than the IP address. added + in 7.18.0 */ +} curl_proxytype; /* this enum was added in 7.10 */ + +#define CURLAUTH_NONE 0 /* nothing */ +#define CURLAUTH_BASIC (1<<0) /* Basic (default) */ +#define CURLAUTH_DIGEST (1<<1) /* Digest */ +#define CURLAUTH_GSSNEGOTIATE (1<<2) /* GSS-Negotiate */ +#define CURLAUTH_NTLM (1<<3) /* NTLM */ +#define CURLAUTH_DIGEST_IE (1<<4) /* Digest with IE flavour */ +#define CURLAUTH_ANY (~CURLAUTH_DIGEST_IE) /* all fine types set */ +#define CURLAUTH_ANYSAFE (~(CURLAUTH_BASIC|CURLAUTH_DIGEST_IE)) + +#define CURLSSH_AUTH_ANY ~0 /* all types supported by the server */ +#define CURLSSH_AUTH_NONE 0 /* none allowed, silly but complete */ +#define CURLSSH_AUTH_PUBLICKEY (1<<0) /* public/private key files */ +#define CURLSSH_AUTH_PASSWORD (1<<1) /* password */ +#define CURLSSH_AUTH_HOST (1<<2) /* host key files */ +#define CURLSSH_AUTH_KEYBOARD (1<<3) /* keyboard interactive */ +#define CURLSSH_AUTH_DEFAULT CURLSSH_AUTH_ANY + +#define CURL_ERROR_SIZE 256 + +struct curl_khkey { + const char *key; /* points to a zero-terminated string encoded with base64 + if len is zero, otherwise to the "raw" data */ + size_t len; + enum type { + CURLKHTYPE_UNKNOWN, + CURLKHTYPE_RSA1, + CURLKHTYPE_RSA, + CURLKHTYPE_DSS + } keytype; +}; + +/* this is the set of return values expected from the curl_sshkeycallback + callback */ +enum curl_khstat { + CURLKHSTAT_FINE_ADD_TO_FILE, + CURLKHSTAT_FINE, + CURLKHSTAT_REJECT, /* reject the connection, return an error */ + CURLKHSTAT_DEFER, /* do not accept it, but we can't answer right now so + this causes a CURLE_DEFER error but otherwise the + connection will be left intact etc */ + CURLKHSTAT_LAST /* not for use, only a marker for last-in-list */ +}; + +/* this is the set of status codes pass in to the callback */ +enum curl_khmatch { + CURLKHMATCH_OK, /* match */ + CURLKHMATCH_MISMATCH, /* host found, key mismatch! */ + CURLKHMATCH_MISSING, /* no matching host/key found */ + CURLKHMATCH_LAST /* not for use, only a marker for last-in-list */ +}; + +typedef int + (*curl_sshkeycallback) (CURL *easy, /* easy handle */ + const struct curl_khkey *knownkey, /* known */ + const struct curl_khkey *foundkey, /* found */ + enum curl_khmatch, /* libcurl's view on the keys */ + void *clientp); /* custom pointer passed from app */ + +/* parameter for the CURLOPT_USE_SSL option */ +typedef enum { + CURLUSESSL_NONE, /* do not attempt to use SSL */ + CURLUSESSL_TRY, /* try using SSL, proceed anyway otherwise */ + CURLUSESSL_CONTROL, /* SSL for the control connection or fail */ + CURLUSESSL_ALL, /* SSL for all communication or fail */ + CURLUSESSL_LAST /* not an option, never use */ +} curl_usessl; + +#ifndef CURL_NO_OLDIES /* define this to test if your app builds with all + the obsolete stuff removed! */ + +/* Backwards compatibility with older names */ +/* These are scheduled to disappear by 2009 */ + +#define CURLFTPSSL_NONE CURLUSESSL_NONE +#define CURLFTPSSL_TRY CURLUSESSL_TRY +#define CURLFTPSSL_CONTROL CURLUSESSL_CONTROL +#define CURLFTPSSL_ALL CURLUSESSL_ALL +#define CURLFTPSSL_LAST CURLUSESSL_LAST +#define curl_ftpssl curl_usessl +#endif /*!CURL_NO_OLDIES*/ + +/* parameter for the CURLOPT_FTP_SSL_CCC option */ +typedef enum { + CURLFTPSSL_CCC_NONE, /* do not send CCC */ + CURLFTPSSL_CCC_PASSIVE, /* Let the server initiate the shutdown */ + CURLFTPSSL_CCC_ACTIVE, /* Initiate the shutdown */ + CURLFTPSSL_CCC_LAST /* not an option, never use */ +} curl_ftpccc; + +/* parameter for the CURLOPT_FTPSSLAUTH option */ +typedef enum { + CURLFTPAUTH_DEFAULT, /* let libcurl decide */ + CURLFTPAUTH_SSL, /* use "AUTH SSL" */ + CURLFTPAUTH_TLS, /* use "AUTH TLS" */ + CURLFTPAUTH_LAST /* not an option, never use */ +} curl_ftpauth; + +/* parameter for the CURLOPT_FTP_CREATE_MISSING_DIRS option */ +typedef enum { + CURLFTP_CREATE_DIR_NONE, /* do NOT create missing dirs! */ + CURLFTP_CREATE_DIR, /* (FTP/SFTP) if CWD fails, try MKD and then CWD + again if MKD succeeded, for SFTP this does + similar magic */ + CURLFTP_CREATE_DIR_RETRY, /* (FTP only) if CWD fails, try MKD and then CWD + again even if MKD failed! */ + CURLFTP_CREATE_DIR_LAST /* not an option, never use */ +} curl_ftpcreatedir; + +/* parameter for the CURLOPT_FTP_FILEMETHOD option */ +typedef enum { + CURLFTPMETHOD_DEFAULT, /* let libcurl pick */ + CURLFTPMETHOD_MULTICWD, /* single CWD operation for each path part */ + CURLFTPMETHOD_NOCWD, /* no CWD at all */ + CURLFTPMETHOD_SINGLECWD, /* one CWD to full dir, then work on file */ + CURLFTPMETHOD_LAST /* not an option, never use */ +} curl_ftpmethod; + +/* CURLPROTO_ defines are for the CURLOPT_*PROTOCOLS options */ +#define CURLPROTO_HTTP (1<<0) +#define CURLPROTO_HTTPS (1<<1) +#define CURLPROTO_FTP (1<<2) +#define CURLPROTO_FTPS (1<<3) +#define CURLPROTO_SCP (1<<4) +#define CURLPROTO_SFTP (1<<5) +#define CURLPROTO_TELNET (1<<6) +#define CURLPROTO_LDAP (1<<7) +#define CURLPROTO_LDAPS (1<<8) +#define CURLPROTO_DICT (1<<9) +#define CURLPROTO_FILE (1<<10) +#define CURLPROTO_TFTP (1<<11) +#define CURLPROTO_ALL (~0) /* enable everything */ + +/* long may be 32 or 64 bits, but we should never depend on anything else + but 32 */ +#define CURLOPTTYPE_LONG 0 +#define CURLOPTTYPE_OBJECTPOINT 10000 +#define CURLOPTTYPE_FUNCTIONPOINT 20000 +#define CURLOPTTYPE_OFF_T 30000 + +/* name is uppercase CURLOPT_, + type is one of the defined CURLOPTTYPE_ + number is unique identifier */ +#ifdef CINIT +#undef CINIT +#endif + +#ifdef CURL_ISOCPP +#define CINIT(name,type,number) CURLOPT_ ## name = CURLOPTTYPE_ ## type + number +#else +/* The macro "##" is ISO C, we assume pre-ISO C doesn't support it. */ +#define LONG CURLOPTTYPE_LONG +#define OBJECTPOINT CURLOPTTYPE_OBJECTPOINT +#define FUNCTIONPOINT CURLOPTTYPE_FUNCTIONPOINT +#define OFF_T CURLOPTTYPE_OFF_T +#define CINIT(name,type,number) CURLOPT_/**/name = type + number +#endif + +/* + * This macro-mania below setups the CURLOPT_[what] enum, to be used with + * curl_easy_setopt(). The first argument in the CINIT() macro is the [what] + * word. + */ + +typedef enum { + /* This is the FILE * or void * the regular output should be written to. */ + CINIT(FILE, OBJECTPOINT, 1), + + /* The full URL to get/put */ + CINIT(URL, OBJECTPOINT, 2), + + /* Port number to connect to, if other than default. */ + CINIT(PORT, LONG, 3), + + /* Name of proxy to use. */ + CINIT(PROXY, OBJECTPOINT, 4), + + /* "name:password" to use when fetching. */ + CINIT(USERPWD, OBJECTPOINT, 5), + + /* "name:password" to use with proxy. */ + CINIT(PROXYUSERPWD, OBJECTPOINT, 6), + + /* Range to get, specified as an ASCII string. */ + CINIT(RANGE, OBJECTPOINT, 7), + + /* not used */ + + /* Specified file stream to upload from (use as input): */ + CINIT(INFILE, OBJECTPOINT, 9), + + /* Buffer to receive error messages in, must be at least CURL_ERROR_SIZE + * bytes big. If this is not used, error messages go to stderr instead: */ + CINIT(ERRORBUFFER, OBJECTPOINT, 10), + + /* Function that will be called to store the output (instead of fwrite). The + * parameters will use fwrite() syntax, make sure to follow them. */ + CINIT(WRITEFUNCTION, FUNCTIONPOINT, 11), + + /* Function that will be called to read the input (instead of fread). The + * parameters will use fread() syntax, make sure to follow them. */ + CINIT(READFUNCTION, FUNCTIONPOINT, 12), + + /* Time-out the read operation after this amount of seconds */ + CINIT(TIMEOUT, LONG, 13), + + /* If the CURLOPT_INFILE is used, this can be used to inform libcurl about + * how large the file being sent really is. That allows better error + * checking and better verifies that the upload was successful. -1 means + * unknown size. + * + * For large file support, there is also a _LARGE version of the key + * which takes an off_t type, allowing platforms with larger off_t + * sizes to handle larger files. See below for INFILESIZE_LARGE. + */ + CINIT(INFILESIZE, LONG, 14), + + /* POST static input fields. */ + CINIT(POSTFIELDS, OBJECTPOINT, 15), + + /* Set the referrer page (needed by some CGIs) */ + CINIT(REFERER, OBJECTPOINT, 16), + + /* Set the FTP PORT string (interface name, named or numerical IP address) + Use i.e '-' to use default address. */ + CINIT(FTPPORT, OBJECTPOINT, 17), + + /* Set the User-Agent string (examined by some CGIs) */ + CINIT(USERAGENT, OBJECTPOINT, 18), + + /* If the download receives less than "low speed limit" bytes/second + * during "low speed time" seconds, the operations is aborted. + * You could i.e if you have a pretty high speed connection, abort if + * it is less than 2000 bytes/sec during 20 seconds. + */ + + /* Set the "low speed limit" */ + CINIT(LOW_SPEED_LIMIT, LONG, 19), + + /* Set the "low speed time" */ + CINIT(LOW_SPEED_TIME, LONG, 20), + + /* Set the continuation offset. + * + * Note there is also a _LARGE version of this key which uses + * off_t types, allowing for large file offsets on platforms which + * use larger-than-32-bit off_t's. Look below for RESUME_FROM_LARGE. + */ + CINIT(RESUME_FROM, LONG, 21), + + /* Set cookie in request: */ + CINIT(COOKIE, OBJECTPOINT, 22), + + /* This points to a linked list of headers, struct curl_slist kind */ + CINIT(HTTPHEADER, OBJECTPOINT, 23), + + /* This points to a linked list of post entries, struct curl_httppost */ + CINIT(HTTPPOST, OBJECTPOINT, 24), + + /* name of the file keeping your private SSL-certificate */ + CINIT(SSLCERT, OBJECTPOINT, 25), + + /* password for the SSL or SSH private key */ + CINIT(KEYPASSWD, OBJECTPOINT, 26), + + /* send TYPE parameter? */ + CINIT(CRLF, LONG, 27), + + /* send linked-list of QUOTE commands */ + CINIT(QUOTE, OBJECTPOINT, 28), + + /* send FILE * or void * to store headers to, if you use a callback it + is simply passed to the callback unmodified */ + CINIT(WRITEHEADER, OBJECTPOINT, 29), + + /* point to a file to read the initial cookies from, also enables + "cookie awareness" */ + CINIT(COOKIEFILE, OBJECTPOINT, 31), + + /* What version to specifically try to use. + See CURL_SSLVERSION defines below. */ + CINIT(SSLVERSION, LONG, 32), + + /* What kind of HTTP time condition to use, see defines */ + CINIT(TIMECONDITION, LONG, 33), + + /* Time to use with the above condition. Specified in number of seconds + since 1 Jan 1970 */ + CINIT(TIMEVALUE, LONG, 34), + + /* 35 = OBSOLETE */ + + /* Custom request, for customizing the get command like + HTTP: DELETE, TRACE and others + FTP: to use a different list command + */ + CINIT(CUSTOMREQUEST, OBJECTPOINT, 36), + + /* HTTP request, for odd commands like DELETE, TRACE and others */ + CINIT(STDERR, OBJECTPOINT, 37), + + /* 38 is not used */ + + /* send linked-list of post-transfer QUOTE commands */ + CINIT(POSTQUOTE, OBJECTPOINT, 39), + + /* Pass a pointer to string of the output using full variable-replacement + as described elsewhere. */ + CINIT(WRITEINFO, OBJECTPOINT, 40), + + CINIT(VERBOSE, LONG, 41), /* talk a lot */ + CINIT(HEADER, LONG, 42), /* throw the header out too */ + CINIT(NOPROGRESS, LONG, 43), /* shut off the progress meter */ + CINIT(NOBODY, LONG, 44), /* use HEAD to get http document */ + CINIT(FAILONERROR, LONG, 45), /* no output on http error codes >= 300 */ + CINIT(UPLOAD, LONG, 46), /* this is an upload */ + CINIT(POST, LONG, 47), /* HTTP POST method */ + CINIT(DIRLISTONLY, LONG, 48), /* return bare names when listing directories */ + + CINIT(APPEND, LONG, 50), /* Append instead of overwrite on upload! */ + + /* Specify whether to read the user+password from the .netrc or the URL. + * This must be one of the CURL_NETRC_* enums below. */ + CINIT(NETRC, LONG, 51), + + CINIT(FOLLOWLOCATION, LONG, 52), /* use Location: Luke! */ + + CINIT(TRANSFERTEXT, LONG, 53), /* transfer data in text/ASCII format */ + CINIT(PUT, LONG, 54), /* HTTP PUT */ + + /* 55 = OBSOLETE */ + + /* Function that will be called instead of the internal progress display + * function. This function should be defined as the curl_progress_callback + * prototype defines. */ + CINIT(PROGRESSFUNCTION, FUNCTIONPOINT, 56), + + /* Data passed to the progress callback */ + CINIT(PROGRESSDATA, OBJECTPOINT, 57), + + /* We want the referrer field set automatically when following locations */ + CINIT(AUTOREFERER, LONG, 58), + + /* Port of the proxy, can be set in the proxy string as well with: + "[host]:[port]" */ + CINIT(PROXYPORT, LONG, 59), + + /* size of the POST input data, if strlen() is not good to use */ + CINIT(POSTFIELDSIZE, LONG, 60), + + /* tunnel non-http operations through a HTTP proxy */ + CINIT(HTTPPROXYTUNNEL, LONG, 61), + + /* Set the interface string to use as outgoing network interface */ + CINIT(INTERFACE, OBJECTPOINT, 62), + + /* Set the krb4/5 security level, this also enables krb4/5 awareness. This + * is a string, 'clear', 'safe', 'confidential' or 'private'. If the string + * is set but doesn't match one of these, 'private' will be used. */ + CINIT(KRBLEVEL, OBJECTPOINT, 63), + + /* Set if we should verify the peer in ssl handshake, set 1 to verify. */ + CINIT(SSL_VERIFYPEER, LONG, 64), + + /* The CApath or CAfile used to validate the peer certificate + this option is used only if SSL_VERIFYPEER is true */ + CINIT(CAINFO, OBJECTPOINT, 65), + + /* 66 = OBSOLETE */ + /* 67 = OBSOLETE */ + + /* Maximum number of http redirects to follow */ + CINIT(MAXREDIRS, LONG, 68), + + /* Pass a long set to 1 to get the date of the requested document (if + possible)! Pass a zero to shut it off. */ + CINIT(FILETIME, LONG, 69), + + /* This points to a linked list of telnet options */ + CINIT(TELNETOPTIONS, OBJECTPOINT, 70), + + /* Max amount of cached alive connections */ + CINIT(MAXCONNECTS, LONG, 71), + + /* What policy to use when closing connections when the cache is filled + up */ + CINIT(CLOSEPOLICY, LONG, 72), + + /* 73 = OBSOLETE */ + + /* Set to explicitly use a new connection for the upcoming transfer. + Do not use this unless you're absolutely sure of this, as it makes the + operation slower and is less friendly for the network. */ + CINIT(FRESH_CONNECT, LONG, 74), + + /* Set to explicitly forbid the upcoming transfer's connection to be re-used + when done. Do not use this unless you're absolutely sure of this, as it + makes the operation slower and is less friendly for the network. */ + CINIT(FORBID_REUSE, LONG, 75), + + /* Set to a file name that contains random data for libcurl to use to + seed the random engine when doing SSL connects. */ + CINIT(RANDOM_FILE, OBJECTPOINT, 76), + + /* Set to the Entropy Gathering Daemon socket pathname */ + CINIT(EGDSOCKET, OBJECTPOINT, 77), + + /* Time-out connect operations after this amount of seconds, if connects + are OK within this time, then fine... This only aborts the connect + phase. [Only works on unix-style/SIGALRM operating systems] */ + CINIT(CONNECTTIMEOUT, LONG, 78), + + /* Function that will be called to store headers (instead of fwrite). The + * parameters will use fwrite() syntax, make sure to follow them. */ + CINIT(HEADERFUNCTION, FUNCTIONPOINT, 79), + + /* Set this to force the HTTP request to get back to GET. Only really usable + if POST, PUT or a custom request have been used first. + */ + CINIT(HTTPGET, LONG, 80), + + /* Set if we should verify the Common name from the peer certificate in ssl + * handshake, set 1 to check existence, 2 to ensure that it matches the + * provided hostname. */ + CINIT(SSL_VERIFYHOST, LONG, 81), + + /* Specify which file name to write all known cookies in after completed + operation. Set file name to "-" (dash) to make it go to stdout. */ + CINIT(COOKIEJAR, OBJECTPOINT, 82), + + /* Specify which SSL ciphers to use */ + CINIT(SSL_CIPHER_LIST, OBJECTPOINT, 83), + + /* Specify which HTTP version to use! This must be set to one of the + CURL_HTTP_VERSION* enums set below. */ + CINIT(HTTP_VERSION, LONG, 84), + + /* Specifically switch on or off the FTP engine's use of the EPSV command. By + default, that one will always be attempted before the more traditional + PASV command. */ + CINIT(FTP_USE_EPSV, LONG, 85), + + /* type of the file keeping your SSL-certificate ("DER", "PEM", "ENG") */ + CINIT(SSLCERTTYPE, OBJECTPOINT, 86), + + /* name of the file keeping your private SSL-key */ + CINIT(SSLKEY, OBJECTPOINT, 87), + + /* type of the file keeping your private SSL-key ("DER", "PEM", "ENG") */ + CINIT(SSLKEYTYPE, OBJECTPOINT, 88), + + /* crypto engine for the SSL-sub system */ + CINIT(SSLENGINE, OBJECTPOINT, 89), + + /* set the crypto engine for the SSL-sub system as default + the param has no meaning... + */ + CINIT(SSLENGINE_DEFAULT, LONG, 90), + + /* Non-zero value means to use the global dns cache */ + CINIT(DNS_USE_GLOBAL_CACHE, LONG, 91), /* To become OBSOLETE soon */ + + /* DNS cache timeout */ + CINIT(DNS_CACHE_TIMEOUT, LONG, 92), + + /* send linked-list of pre-transfer QUOTE commands */ + CINIT(PREQUOTE, OBJECTPOINT, 93), + + /* set the debug function */ + CINIT(DEBUGFUNCTION, FUNCTIONPOINT, 94), + + /* set the data for the debug function */ + CINIT(DEBUGDATA, OBJECTPOINT, 95), + + /* mark this as start of a cookie session */ + CINIT(COOKIESESSION, LONG, 96), + + /* The CApath directory used to validate the peer certificate + this option is used only if SSL_VERIFYPEER is true */ + CINIT(CAPATH, OBJECTPOINT, 97), + + /* Instruct libcurl to use a smaller receive buffer */ + CINIT(BUFFERSIZE, LONG, 98), + + /* Instruct libcurl to not use any signal/alarm handlers, even when using + timeouts. This option is useful for multi-threaded applications. + See libcurl-the-guide for more background information. */ + CINIT(NOSIGNAL, LONG, 99), + + /* Provide a CURLShare for mutexing non-ts data */ + CINIT(SHARE, OBJECTPOINT, 100), + + /* indicates type of proxy. accepted values are CURLPROXY_HTTP (default), + CURLPROXY_SOCKS4, CURLPROXY_SOCKS4A and CURLPROXY_SOCKS5. */ + CINIT(PROXYTYPE, LONG, 101), + + /* Set the Accept-Encoding string. Use this to tell a server you would like + the response to be compressed. */ + CINIT(ENCODING, OBJECTPOINT, 102), + + /* Set pointer to private data */ + CINIT(PRIVATE, OBJECTPOINT, 103), + + /* Set aliases for HTTP 200 in the HTTP Response header */ + CINIT(HTTP200ALIASES, OBJECTPOINT, 104), + + /* Continue to send authentication (user+password) when following locations, + even when hostname changed. This can potentially send off the name + and password to whatever host the server decides. */ + CINIT(UNRESTRICTED_AUTH, LONG, 105), + + /* Specifically switch on or off the FTP engine's use of the EPRT command ( it + also disables the LPRT attempt). By default, those ones will always be + attempted before the good old traditional PORT command. */ + CINIT(FTP_USE_EPRT, LONG, 106), + + /* Set this to a bitmask value to enable the particular authentications + methods you like. Use this in combination with CURLOPT_USERPWD. + Note that setting multiple bits may cause extra network round-trips. */ + CINIT(HTTPAUTH, LONG, 107), + + /* Set the ssl context callback function, currently only for OpenSSL ssl_ctx + in second argument. The function must be matching the + curl_ssl_ctx_callback proto. */ + CINIT(SSL_CTX_FUNCTION, FUNCTIONPOINT, 108), + + /* Set the userdata for the ssl context callback function's third + argument */ + CINIT(SSL_CTX_DATA, OBJECTPOINT, 109), + + /* FTP Option that causes missing dirs to be created on the remote server. + In 7.19.4 we introduced the convenience enums for this option using the + CURLFTP_CREATE_DIR prefix. + */ + CINIT(FTP_CREATE_MISSING_DIRS, LONG, 110), + + /* Set this to a bitmask value to enable the particular authentications + methods you like. Use this in combination with CURLOPT_PROXYUSERPWD. + Note that setting multiple bits may cause extra network round-trips. */ + CINIT(PROXYAUTH, LONG, 111), + + /* FTP option that changes the timeout, in seconds, associated with + getting a response. This is different from transfer timeout time and + essentially places a demand on the FTP server to acknowledge commands + in a timely manner. */ + CINIT(FTP_RESPONSE_TIMEOUT, LONG, 112), + + /* Set this option to one of the CURL_IPRESOLVE_* defines (see below) to + tell libcurl to resolve names to those IP versions only. This only has + affect on systems with support for more than one, i.e IPv4 _and_ IPv6. */ + CINIT(IPRESOLVE, LONG, 113), + + /* Set this option to limit the size of a file that will be downloaded from + an HTTP or FTP server. + + Note there is also _LARGE version which adds large file support for + platforms which have larger off_t sizes. See MAXFILESIZE_LARGE below. */ + CINIT(MAXFILESIZE, LONG, 114), + + /* See the comment for INFILESIZE above, but in short, specifies + * the size of the file being uploaded. -1 means unknown. + */ + CINIT(INFILESIZE_LARGE, OFF_T, 115), + + /* Sets the continuation offset. There is also a LONG version of this; + * look above for RESUME_FROM. + */ + CINIT(RESUME_FROM_LARGE, OFF_T, 116), + + /* Sets the maximum size of data that will be downloaded from + * an HTTP or FTP server. See MAXFILESIZE above for the LONG version. + */ + CINIT(MAXFILESIZE_LARGE, OFF_T, 117), + + /* Set this option to the file name of your .netrc file you want libcurl + to parse (using the CURLOPT_NETRC option). If not set, libcurl will do + a poor attempt to find the user's home directory and check for a .netrc + file in there. */ + CINIT(NETRC_FILE, OBJECTPOINT, 118), + + /* Enable SSL/TLS for FTP, pick one of: + CURLFTPSSL_TRY - try using SSL, proceed anyway otherwise + CURLFTPSSL_CONTROL - SSL for the control connection or fail + CURLFTPSSL_ALL - SSL for all communication or fail + */ + CINIT(USE_SSL, LONG, 119), + + /* The _LARGE version of the standard POSTFIELDSIZE option */ + CINIT(POSTFIELDSIZE_LARGE, OFF_T, 120), + + /* Enable/disable the TCP Nagle algorithm */ + CINIT(TCP_NODELAY, LONG, 121), + + /* 122 OBSOLETE, used in 7.12.3. Gone in 7.13.0 */ + /* 123 OBSOLETE. Gone in 7.16.0 */ + /* 124 OBSOLETE, used in 7.12.3. Gone in 7.13.0 */ + /* 125 OBSOLETE, used in 7.12.3. Gone in 7.13.0 */ + /* 126 OBSOLETE, used in 7.12.3. Gone in 7.13.0 */ + /* 127 OBSOLETE. Gone in 7.16.0 */ + /* 128 OBSOLETE. Gone in 7.16.0 */ + + /* When FTP over SSL/TLS is selected (with CURLOPT_USE_SSL), this option + can be used to change libcurl's default action which is to first try + "AUTH SSL" and then "AUTH TLS" in this order, and proceed when a OK + response has been received. + + Available parameters are: + CURLFTPAUTH_DEFAULT - let libcurl decide + CURLFTPAUTH_SSL - try "AUTH SSL" first, then TLS + CURLFTPAUTH_TLS - try "AUTH TLS" first, then SSL + */ + CINIT(FTPSSLAUTH, LONG, 129), + + CINIT(IOCTLFUNCTION, FUNCTIONPOINT, 130), + CINIT(IOCTLDATA, OBJECTPOINT, 131), + + /* 132 OBSOLETE. Gone in 7.16.0 */ + /* 133 OBSOLETE. Gone in 7.16.0 */ + + /* zero terminated string for pass on to the FTP server when asked for + "account" info */ + CINIT(FTP_ACCOUNT, OBJECTPOINT, 134), + + /* feed cookies into cookie engine */ + CINIT(COOKIELIST, OBJECTPOINT, 135), + + /* ignore Content-Length */ + CINIT(IGNORE_CONTENT_LENGTH, LONG, 136), + + /* Set to non-zero to skip the IP address received in a 227 PASV FTP server + response. Typically used for FTP-SSL purposes but is not restricted to + that. libcurl will then instead use the same IP address it used for the + control connection. */ + CINIT(FTP_SKIP_PASV_IP, LONG, 137), + + /* Select "file method" to use when doing FTP, see the curl_ftpmethod + above. */ + CINIT(FTP_FILEMETHOD, LONG, 138), + + /* Local port number to bind the socket to */ + CINIT(LOCALPORT, LONG, 139), + + /* Number of ports to try, including the first one set with LOCALPORT. + Thus, setting it to 1 will make no additional attempts but the first. + */ + CINIT(LOCALPORTRANGE, LONG, 140), + + /* no transfer, set up connection and let application use the socket by + extracting it with CURLINFO_LASTSOCKET */ + CINIT(CONNECT_ONLY, LONG, 141), + + /* Function that will be called to convert from the + network encoding (instead of using the iconv calls in libcurl) */ + CINIT(CONV_FROM_NETWORK_FUNCTION, FUNCTIONPOINT, 142), + + /* Function that will be called to convert to the + network encoding (instead of using the iconv calls in libcurl) */ + CINIT(CONV_TO_NETWORK_FUNCTION, FUNCTIONPOINT, 143), + + /* Function that will be called to convert from UTF8 + (instead of using the iconv calls in libcurl) + Note that this is used only for SSL certificate processing */ + CINIT(CONV_FROM_UTF8_FUNCTION, FUNCTIONPOINT, 144), + + /* if the connection proceeds too quickly then need to slow it down */ + /* limit-rate: maximum number of bytes per second to send or receive */ + CINIT(MAX_SEND_SPEED_LARGE, OFF_T, 145), + CINIT(MAX_RECV_SPEED_LARGE, OFF_T, 146), + + /* Pointer to command string to send if USER/PASS fails. */ + CINIT(FTP_ALTERNATIVE_TO_USER, OBJECTPOINT, 147), + + /* callback function for setting socket options */ + CINIT(SOCKOPTFUNCTION, FUNCTIONPOINT, 148), + CINIT(SOCKOPTDATA, OBJECTPOINT, 149), + + /* set to 0 to disable session ID re-use for this transfer, default is + enabled (== 1) */ + CINIT(SSL_SESSIONID_CACHE, LONG, 150), + + /* allowed SSH authentication methods */ + CINIT(SSH_AUTH_TYPES, LONG, 151), + + /* Used by scp/sftp to do public/private key authentication */ + CINIT(SSH_PUBLIC_KEYFILE, OBJECTPOINT, 152), + CINIT(SSH_PRIVATE_KEYFILE, OBJECTPOINT, 153), + + /* Send CCC (Clear Command Channel) after authentication */ + CINIT(FTP_SSL_CCC, LONG, 154), + + /* Same as TIMEOUT and CONNECTTIMEOUT, but with ms resolution */ + CINIT(TIMEOUT_MS, LONG, 155), + CINIT(CONNECTTIMEOUT_MS, LONG, 156), + + /* set to zero to disable the libcurl's decoding and thus pass the raw body + data to the application even when it is encoded/compressed */ + CINIT(HTTP_TRANSFER_DECODING, LONG, 157), + CINIT(HTTP_CONTENT_DECODING, LONG, 158), + + /* Permission used when creating new files and directories on the remote + server for protocols that support it, SFTP/SCP/FILE */ + CINIT(NEW_FILE_PERMS, LONG, 159), + CINIT(NEW_DIRECTORY_PERMS, LONG, 160), + + /* Set the behaviour of POST when redirecting. Values must be set to one + of CURL_REDIR* defines below. This used to be called CURLOPT_POST301 */ + CINIT(POSTREDIR, LONG, 161), + + /* used by scp/sftp to verify the host's public key */ + CINIT(SSH_HOST_PUBLIC_KEY_MD5, OBJECTPOINT, 162), + + /* Callback function for opening socket (instead of socket(2)). Optionally, + callback is able change the address or refuse to connect returning + CURL_SOCKET_BAD. The callback should have type + curl_opensocket_callback */ + CINIT(OPENSOCKETFUNCTION, FUNCTIONPOINT, 163), + CINIT(OPENSOCKETDATA, OBJECTPOINT, 164), + + /* POST volatile input fields. */ + CINIT(COPYPOSTFIELDS, OBJECTPOINT, 165), + + /* set transfer mode (;type=) when doing FTP via an HTTP proxy */ + CINIT(PROXY_TRANSFER_MODE, LONG, 166), + + /* Callback function for seeking in the input stream */ + CINIT(SEEKFUNCTION, FUNCTIONPOINT, 167), + CINIT(SEEKDATA, OBJECTPOINT, 168), + + /* CRL file */ + CINIT(CRLFILE, OBJECTPOINT, 169), + + /* Issuer certificate */ + CINIT(ISSUERCERT, OBJECTPOINT, 170), + + /* (IPv6) Address scope */ + CINIT(ADDRESS_SCOPE, LONG, 171), + + /* Collect certificate chain info and allow it to get retrievable with + CURLINFO_CERTINFO after the transfer is complete. (Unfortunately) only + working with OpenSSL-powered builds. */ + CINIT(CERTINFO, LONG, 172), + + /* "name" and "pwd" to use when fetching. */ + CINIT(USERNAME, OBJECTPOINT, 173), + CINIT(PASSWORD, OBJECTPOINT, 174), + + /* "name" and "pwd" to use with Proxy when fetching. */ + CINIT(PROXYUSERNAME, OBJECTPOINT, 175), + CINIT(PROXYPASSWORD, OBJECTPOINT, 176), + + /* Comma separated list of hostnames defining no-proxy zones. These should + match both hostnames directly, and hostnames within a domain. For + example, local.com will match local.com and www.local.com, but NOT + notlocal.com or www.notlocal.com. For compatibility with other + implementations of this, .local.com will be considered to be the same as + local.com. A single * is the only valid wildcard, and effectively + disables the use of proxy. */ + CINIT(NOPROXY, OBJECTPOINT, 177), + + /* block size for TFTP transfers */ + CINIT(TFTP_BLKSIZE, LONG, 178), + + /* Socks Service */ + CINIT(SOCKS5_GSSAPI_SERVICE, OBJECTPOINT, 179), + + /* Socks Service */ + CINIT(SOCKS5_GSSAPI_NEC, LONG, 180), + + /* set the bitmask for the protocols that are allowed to be used for the + transfer, which thus helps the app which takes URLs from users or other + external inputs and want to restrict what protocol(s) to deal + with. Defaults to CURLPROTO_ALL. */ + CINIT(PROTOCOLS, LONG, 181), + + /* set the bitmask for the protocols that libcurl is allowed to follow to, + as a subset of the CURLOPT_PROTOCOLS ones. That means the protocol needs + to be set in both bitmasks to be allowed to get redirected to. Defaults + to all protocols except FILE and SCP. */ + CINIT(REDIR_PROTOCOLS, LONG, 182), + + /* set the SSH knownhost file name to use */ + CINIT(SSH_KNOWNHOSTS, OBJECTPOINT, 183), + + /* set the SSH host key callback, must point to a curl_sshkeycallback + function */ + CINIT(SSH_KEYFUNCTION, FUNCTIONPOINT, 184), + + /* set the SSH host key callback custom pointer */ + CINIT(SSH_KEYDATA, OBJECTPOINT, 185), + + CURLOPT_LASTENTRY /* the last unused */ +} CURLoption; + +#ifndef CURL_NO_OLDIES /* define this to test if your app builds with all + the obsolete stuff removed! */ + +/* Backwards compatibility with older names */ +/* These are scheduled to disappear by 2011 */ + +/* This was added in version 7.19.1 */ +#define CURLOPT_POST301 CURLOPT_POSTREDIR + +/* These are scheduled to disappear by 2009 */ + +/* The following were added in 7.17.0 */ +#define CURLOPT_SSLKEYPASSWD CURLOPT_KEYPASSWD +#define CURLOPT_FTPAPPEND CURLOPT_APPEND +#define CURLOPT_FTPLISTONLY CURLOPT_DIRLISTONLY +#define CURLOPT_FTP_SSL CURLOPT_USE_SSL + +/* The following were added earlier */ + +#define CURLOPT_SSLCERTPASSWD CURLOPT_KEYPASSWD +#define CURLOPT_KRB4LEVEL CURLOPT_KRBLEVEL + +#else +/* This is set if CURL_NO_OLDIES is defined at compile-time */ +#undef CURLOPT_DNS_USE_GLOBAL_CACHE /* soon obsolete */ +#endif + + + /* Below here follows defines for the CURLOPT_IPRESOLVE option. If a host + name resolves addresses using more than one IP protocol version, this + option might be handy to force libcurl to use a specific IP version. */ +#define CURL_IPRESOLVE_WHATEVER 0 /* default, resolves addresses to all IP + versions that your system allows */ +#define CURL_IPRESOLVE_V4 1 /* resolve to ipv4 addresses */ +#define CURL_IPRESOLVE_V6 2 /* resolve to ipv6 addresses */ + + /* three convenient "aliases" that follow the name scheme better */ +#define CURLOPT_WRITEDATA CURLOPT_FILE +#define CURLOPT_READDATA CURLOPT_INFILE +#define CURLOPT_HEADERDATA CURLOPT_WRITEHEADER + + /* These enums are for use with the CURLOPT_HTTP_VERSION option. */ +enum { + CURL_HTTP_VERSION_NONE, /* setting this means we don't care, and that we'd + like the library to choose the best possible + for us! */ + CURL_HTTP_VERSION_1_0, /* please use HTTP 1.0 in the request */ + CURL_HTTP_VERSION_1_1, /* please use HTTP 1.1 in the request */ + + CURL_HTTP_VERSION_LAST /* *ILLEGAL* http version */ +}; + + /* These enums are for use with the CURLOPT_NETRC option. */ +enum CURL_NETRC_OPTION { + CURL_NETRC_IGNORED, /* The .netrc will never be read. + * This is the default. */ + CURL_NETRC_OPTIONAL, /* A user:password in the URL will be preferred + * to one in the .netrc. */ + CURL_NETRC_REQUIRED, /* A user:password in the URL will be ignored. + * Unless one is set programmatically, the .netrc + * will be queried. */ + CURL_NETRC_LAST +}; + +enum { + CURL_SSLVERSION_DEFAULT, + CURL_SSLVERSION_TLSv1, + CURL_SSLVERSION_SSLv2, + CURL_SSLVERSION_SSLv3, + + CURL_SSLVERSION_LAST /* never use, keep last */ +}; + +/* symbols to use with CURLOPT_POSTREDIR. + CURL_REDIR_POST_301 and CURL_REDIR_POST_302 can be bitwise ORed so that + CURL_REDIR_POST_301 | CURL_REDIR_POST_302 == CURL_REDIR_POST_ALL */ + +#define CURL_REDIR_GET_ALL 0 +#define CURL_REDIR_POST_301 1 +#define CURL_REDIR_POST_302 2 +#define CURL_REDIR_POST_ALL (CURL_REDIR_POST_301|CURL_REDIR_POST_302) + +typedef enum { + CURL_TIMECOND_NONE, + + CURL_TIMECOND_IFMODSINCE, + CURL_TIMECOND_IFUNMODSINCE, + CURL_TIMECOND_LASTMOD, + + CURL_TIMECOND_LAST +} curl_TimeCond; + + +/* curl_strequal() and curl_strnequal() are subject for removal in a future + libcurl, see lib/README.curlx for details */ +CURL_EXTERN int (curl_strequal)(const char *s1, const char *s2); +CURL_EXTERN int (curl_strnequal)(const char *s1, const char *s2, size_t n); + +/* name is uppercase CURLFORM_ */ +#ifdef CFINIT +#undef CFINIT +#endif + +#ifdef CURL_ISOCPP +#define CFINIT(name) CURLFORM_ ## name +#else +/* The macro "##" is ISO C, we assume pre-ISO C doesn't support it. */ +#define CFINIT(name) CURLFORM_/**/name +#endif + +typedef enum { + CFINIT(NOTHING), /********* the first one is unused ************/ + + /* */ + CFINIT(COPYNAME), + CFINIT(PTRNAME), + CFINIT(NAMELENGTH), + CFINIT(COPYCONTENTS), + CFINIT(PTRCONTENTS), + CFINIT(CONTENTSLENGTH), + CFINIT(FILECONTENT), + CFINIT(ARRAY), + CFINIT(OBSOLETE), + CFINIT(FILE), + + CFINIT(BUFFER), + CFINIT(BUFFERPTR), + CFINIT(BUFFERLENGTH), + + CFINIT(CONTENTTYPE), + CFINIT(CONTENTHEADER), + CFINIT(FILENAME), + CFINIT(END), + CFINIT(OBSOLETE2), + + CFINIT(STREAM), + + CURLFORM_LASTENTRY /* the last unused */ +} CURLformoption; + +#undef CFINIT /* done */ + +/* structure to be used as parameter for CURLFORM_ARRAY */ +struct curl_forms { + CURLformoption option; + const char *value; +}; + +/* use this for multipart formpost building */ +/* Returns code for curl_formadd() + * + * Returns: + * CURL_FORMADD_OK on success + * CURL_FORMADD_MEMORY if the FormInfo allocation fails + * CURL_FORMADD_OPTION_TWICE if one option is given twice for one Form + * CURL_FORMADD_NULL if a null pointer was given for a char + * CURL_FORMADD_MEMORY if the allocation of a FormInfo struct failed + * CURL_FORMADD_UNKNOWN_OPTION if an unknown option was used + * CURL_FORMADD_INCOMPLETE if the some FormInfo is not complete (or error) + * CURL_FORMADD_MEMORY if a curl_httppost struct cannot be allocated + * CURL_FORMADD_MEMORY if some allocation for string copying failed. + * CURL_FORMADD_ILLEGAL_ARRAY if an illegal option is used in an array + * + ***************************************************************************/ +typedef enum { + CURL_FORMADD_OK, /* first, no error */ + + CURL_FORMADD_MEMORY, + CURL_FORMADD_OPTION_TWICE, + CURL_FORMADD_NULL, + CURL_FORMADD_UNKNOWN_OPTION, + CURL_FORMADD_INCOMPLETE, + CURL_FORMADD_ILLEGAL_ARRAY, + CURL_FORMADD_DISABLED, /* libcurl was built with this disabled */ + + CURL_FORMADD_LAST /* last */ +} CURLFORMcode; + +/* + * NAME curl_formadd() + * + * DESCRIPTION + * + * Pretty advanced function for building multi-part formposts. Each invoke + * adds one part that together construct a full post. Then use + * CURLOPT_HTTPPOST to send it off to libcurl. + */ +CURL_EXTERN CURLFORMcode curl_formadd(struct curl_httppost **httppost, + struct curl_httppost **last_post, + ...); + +/* + * callback function for curl_formget() + * The void *arg pointer will be the one passed as second argument to + * curl_formget(). + * The character buffer passed to it must not be freed. + * Should return the buffer length passed to it as the argument "len" on + * success. + */ +typedef size_t (*curl_formget_callback)(void *arg, const char *buf, size_t len); + +/* + * NAME curl_formget() + * + * DESCRIPTION + * + * Serialize a curl_httppost struct built with curl_formadd(). + * Accepts a void pointer as second argument which will be passed to + * the curl_formget_callback function. + * Returns 0 on success. + */ +CURL_EXTERN int curl_formget(struct curl_httppost *form, void *arg, + curl_formget_callback append); +/* + * NAME curl_formfree() + * + * DESCRIPTION + * + * Free a multipart formpost previously built with curl_formadd(). + */ +CURL_EXTERN void curl_formfree(struct curl_httppost *form); + +/* + * NAME curl_getenv() + * + * DESCRIPTION + * + * Returns a malloc()'ed string that MUST be curl_free()ed after usage is + * complete. DEPRECATED - see lib/README.curlx + */ +CURL_EXTERN char *curl_getenv(const char *variable); + +/* + * NAME curl_version() + * + * DESCRIPTION + * + * Returns a static ascii string of the libcurl version. + */ +CURL_EXTERN char *curl_version(void); + +/* + * NAME curl_easy_escape() + * + * DESCRIPTION + * + * Escapes URL strings (converts all letters consider illegal in URLs to their + * %XX versions). This function returns a new allocated string or NULL if an + * error occurred. + */ +CURL_EXTERN char *curl_easy_escape(CURL *handle, + const char *string, + int length); + +/* the previous version: */ +CURL_EXTERN char *curl_escape(const char *string, + int length); + + +/* + * NAME curl_easy_unescape() + * + * DESCRIPTION + * + * Unescapes URL encoding in strings (converts all %XX codes to their 8bit + * versions). This function returns a new allocated string or NULL if an error + * occurred. + * Conversion Note: On non-ASCII platforms the ASCII %XX codes are + * converted into the host encoding. + */ +CURL_EXTERN char *curl_easy_unescape(CURL *handle, + const char *string, + int length, + int *outlength); + +/* the previous version */ +CURL_EXTERN char *curl_unescape(const char *string, + int length); + +/* + * NAME curl_free() + * + * DESCRIPTION + * + * Provided for de-allocation in the same translation unit that did the + * allocation. Added in libcurl 7.10 + */ +CURL_EXTERN void curl_free(void *p); + +/* + * NAME curl_global_init() + * + * DESCRIPTION + * + * curl_global_init() should be invoked exactly once for each application that + * uses libcurl and before any call of other libcurl functions. + * + * This function is not thread-safe! + */ +CURL_EXTERN CURLcode curl_global_init(long flags); + +/* + * NAME curl_global_init_mem() + * + * DESCRIPTION + * + * curl_global_init() or curl_global_init_mem() should be invoked exactly once + * for each application that uses libcurl. This function can be used to + * initialize libcurl and set user defined memory management callback + * functions. Users can implement memory management routines to check for + * memory leaks, check for mis-use of the curl library etc. User registered + * callback routines with be invoked by this library instead of the system + * memory management routines like malloc, free etc. + */ +CURL_EXTERN CURLcode curl_global_init_mem(long flags, + curl_malloc_callback m, + curl_free_callback f, + curl_realloc_callback r, + curl_strdup_callback s, + curl_calloc_callback c); + +/* + * NAME curl_global_cleanup() + * + * DESCRIPTION + * + * curl_global_cleanup() should be invoked exactly once for each application + * that uses libcurl + */ +CURL_EXTERN void curl_global_cleanup(void); + +/* linked-list structure for the CURLOPT_QUOTE option (and other) */ +struct curl_slist { + char *data; + struct curl_slist *next; +}; + +/* + * NAME curl_slist_append() + * + * DESCRIPTION + * + * Appends a string to a linked list. If no list exists, it will be created + * first. Returns the new list, after appending. + */ +CURL_EXTERN struct curl_slist *curl_slist_append(struct curl_slist *, + const char *); + +/* + * NAME curl_slist_free_all() + * + * DESCRIPTION + * + * free a previously built curl_slist. + */ +CURL_EXTERN void curl_slist_free_all(struct curl_slist *); + +/* + * NAME curl_getdate() + * + * DESCRIPTION + * + * Returns the time, in seconds since 1 Jan 1970 of the time string given in + * the first argument. The time argument in the second parameter is unused + * and should be set to NULL. + */ +CURL_EXTERN time_t curl_getdate(const char *p, const time_t *unused); + +/* info about the certificate chain, only for OpenSSL builds. Asked + for with CURLOPT_CERTINFO / CURLINFO_CERTINFO */ +struct curl_certinfo { + int num_of_certs; /* number of certificates with information */ + struct curl_slist **certinfo; /* for each index in this array, there's a + linked list with textual information in the + format "name: value" */ +}; + +#define CURLINFO_STRING 0x100000 +#define CURLINFO_LONG 0x200000 +#define CURLINFO_DOUBLE 0x300000 +#define CURLINFO_SLIST 0x400000 +#define CURLINFO_MASK 0x0fffff +#define CURLINFO_TYPEMASK 0xf00000 + +typedef enum { + CURLINFO_NONE, /* first, never use this */ + CURLINFO_EFFECTIVE_URL = CURLINFO_STRING + 1, + CURLINFO_RESPONSE_CODE = CURLINFO_LONG + 2, + CURLINFO_TOTAL_TIME = CURLINFO_DOUBLE + 3, + CURLINFO_NAMELOOKUP_TIME = CURLINFO_DOUBLE + 4, + CURLINFO_CONNECT_TIME = CURLINFO_DOUBLE + 5, + CURLINFO_PRETRANSFER_TIME = CURLINFO_DOUBLE + 6, + CURLINFO_SIZE_UPLOAD = CURLINFO_DOUBLE + 7, + CURLINFO_SIZE_DOWNLOAD = CURLINFO_DOUBLE + 8, + CURLINFO_SPEED_DOWNLOAD = CURLINFO_DOUBLE + 9, + CURLINFO_SPEED_UPLOAD = CURLINFO_DOUBLE + 10, + CURLINFO_HEADER_SIZE = CURLINFO_LONG + 11, + CURLINFO_REQUEST_SIZE = CURLINFO_LONG + 12, + CURLINFO_SSL_VERIFYRESULT = CURLINFO_LONG + 13, + CURLINFO_FILETIME = CURLINFO_LONG + 14, + CURLINFO_CONTENT_LENGTH_DOWNLOAD = CURLINFO_DOUBLE + 15, + CURLINFO_CONTENT_LENGTH_UPLOAD = CURLINFO_DOUBLE + 16, + CURLINFO_STARTTRANSFER_TIME = CURLINFO_DOUBLE + 17, + CURLINFO_CONTENT_TYPE = CURLINFO_STRING + 18, + CURLINFO_REDIRECT_TIME = CURLINFO_DOUBLE + 19, + CURLINFO_REDIRECT_COUNT = CURLINFO_LONG + 20, + CURLINFO_PRIVATE = CURLINFO_STRING + 21, + CURLINFO_HTTP_CONNECTCODE = CURLINFO_LONG + 22, + CURLINFO_HTTPAUTH_AVAIL = CURLINFO_LONG + 23, + CURLINFO_PROXYAUTH_AVAIL = CURLINFO_LONG + 24, + CURLINFO_OS_ERRNO = CURLINFO_LONG + 25, + CURLINFO_NUM_CONNECTS = CURLINFO_LONG + 26, + CURLINFO_SSL_ENGINES = CURLINFO_SLIST + 27, + CURLINFO_COOKIELIST = CURLINFO_SLIST + 28, + CURLINFO_LASTSOCKET = CURLINFO_LONG + 29, + CURLINFO_FTP_ENTRY_PATH = CURLINFO_STRING + 30, + CURLINFO_REDIRECT_URL = CURLINFO_STRING + 31, + CURLINFO_PRIMARY_IP = CURLINFO_STRING + 32, + CURLINFO_APPCONNECT_TIME = CURLINFO_DOUBLE + 33, + CURLINFO_CERTINFO = CURLINFO_SLIST + 34, + CURLINFO_CONDITION_UNMET = CURLINFO_LONG + 35, + /* Fill in new entries below here! */ + + CURLINFO_LASTONE = 35 +} CURLINFO; + +/* CURLINFO_RESPONSE_CODE is the new name for the option previously known as + CURLINFO_HTTP_CODE */ +#define CURLINFO_HTTP_CODE CURLINFO_RESPONSE_CODE + +typedef enum { + CURLCLOSEPOLICY_NONE, /* first, never use this */ + + CURLCLOSEPOLICY_OLDEST, + CURLCLOSEPOLICY_LEAST_RECENTLY_USED, + CURLCLOSEPOLICY_LEAST_TRAFFIC, + CURLCLOSEPOLICY_SLOWEST, + CURLCLOSEPOLICY_CALLBACK, + + CURLCLOSEPOLICY_LAST /* last, never use this */ +} curl_closepolicy; + +#define CURL_GLOBAL_SSL (1<<0) +#define CURL_GLOBAL_WIN32 (1<<1) +#define CURL_GLOBAL_ALL (CURL_GLOBAL_SSL|CURL_GLOBAL_WIN32) +#define CURL_GLOBAL_NOTHING 0 +#define CURL_GLOBAL_DEFAULT CURL_GLOBAL_ALL + + +/***************************************************************************** + * Setup defines, protos etc for the sharing stuff. + */ + +/* Different data locks for a single share */ +typedef enum { + CURL_LOCK_DATA_NONE = 0, + /* CURL_LOCK_DATA_SHARE is used internally to say that + * the locking is just made to change the internal state of the share + * itself. + */ + CURL_LOCK_DATA_SHARE, + CURL_LOCK_DATA_COOKIE, + CURL_LOCK_DATA_DNS, + CURL_LOCK_DATA_SSL_SESSION, + CURL_LOCK_DATA_CONNECT, + CURL_LOCK_DATA_LAST +} curl_lock_data; + +/* Different lock access types */ +typedef enum { + CURL_LOCK_ACCESS_NONE = 0, /* unspecified action */ + CURL_LOCK_ACCESS_SHARED = 1, /* for read perhaps */ + CURL_LOCK_ACCESS_SINGLE = 2, /* for write perhaps */ + CURL_LOCK_ACCESS_LAST /* never use */ +} curl_lock_access; + +typedef void (*curl_lock_function)(CURL *handle, + curl_lock_data data, + curl_lock_access locktype, + void *userptr); +typedef void (*curl_unlock_function)(CURL *handle, + curl_lock_data data, + void *userptr); + +typedef void CURLSH; + +typedef enum { + CURLSHE_OK, /* all is fine */ + CURLSHE_BAD_OPTION, /* 1 */ + CURLSHE_IN_USE, /* 2 */ + CURLSHE_INVALID, /* 3 */ + CURLSHE_NOMEM, /* out of memory */ + CURLSHE_LAST /* never use */ +} CURLSHcode; + +typedef enum { + CURLSHOPT_NONE, /* don't use */ + CURLSHOPT_SHARE, /* specify a data type to share */ + CURLSHOPT_UNSHARE, /* specify which data type to stop sharing */ + CURLSHOPT_LOCKFUNC, /* pass in a 'curl_lock_function' pointer */ + CURLSHOPT_UNLOCKFUNC, /* pass in a 'curl_unlock_function' pointer */ + CURLSHOPT_USERDATA, /* pass in a user data pointer used in the lock/unlock + callback functions */ + CURLSHOPT_LAST /* never use */ +} CURLSHoption; + +CURL_EXTERN CURLSH *curl_share_init(void); +CURL_EXTERN CURLSHcode curl_share_setopt(CURLSH *, CURLSHoption option, ...); +CURL_EXTERN CURLSHcode curl_share_cleanup(CURLSH *); + +/**************************************************************************** + * Structures for querying information about the curl library at runtime. + */ + +typedef enum { + CURLVERSION_FIRST, + CURLVERSION_SECOND, + CURLVERSION_THIRD, + CURLVERSION_FOURTH, + CURLVERSION_LAST /* never actually use this */ +} CURLversion; + +/* The 'CURLVERSION_NOW' is the symbolic name meant to be used by + basically all programs ever that want to get version information. It is + meant to be a built-in version number for what kind of struct the caller + expects. If the struct ever changes, we redefine the NOW to another enum + from above. */ +#define CURLVERSION_NOW CURLVERSION_FOURTH + +typedef struct { + CURLversion age; /* age of the returned struct */ + const char *version; /* LIBCURL_VERSION */ + unsigned int version_num; /* LIBCURL_VERSION_NUM */ + const char *host; /* OS/host/cpu/machine when configured */ + int features; /* bitmask, see defines below */ + const char *ssl_version; /* human readable string */ + long ssl_version_num; /* not used anymore, always 0 */ + const char *libz_version; /* human readable string */ + /* protocols is terminated by an entry with a NULL protoname */ + const char * const *protocols; + + /* The fields below this were added in CURLVERSION_SECOND */ + const char *ares; + int ares_num; + + /* This field was added in CURLVERSION_THIRD */ + const char *libidn; + + /* These field were added in CURLVERSION_FOURTH */ + + /* Same as '_libiconv_version' if built with HAVE_ICONV */ + int iconv_ver_num; + + const char *libssh_version; /* human readable string */ + +} curl_version_info_data; + +#define CURL_VERSION_IPV6 (1<<0) /* IPv6-enabled */ +#define CURL_VERSION_KERBEROS4 (1<<1) /* kerberos auth is supported */ +#define CURL_VERSION_SSL (1<<2) /* SSL options are present */ +#define CURL_VERSION_LIBZ (1<<3) /* libz features are present */ +#define CURL_VERSION_NTLM (1<<4) /* NTLM auth is supported */ +#define CURL_VERSION_GSSNEGOTIATE (1<<5) /* Negotiate auth support */ +#define CURL_VERSION_DEBUG (1<<6) /* built with debug capabilities */ +#define CURL_VERSION_ASYNCHDNS (1<<7) /* asynchronous dns resolves */ +#define CURL_VERSION_SPNEGO (1<<8) /* SPNEGO auth */ +#define CURL_VERSION_LARGEFILE (1<<9) /* supports files bigger than 2GB */ +#define CURL_VERSION_IDN (1<<10) /* International Domain Names support */ +#define CURL_VERSION_SSPI (1<<11) /* SSPI is supported */ +#define CURL_VERSION_CONV (1<<12) /* character conversions supported */ +#define CURL_VERSION_CURLDEBUG (1<<13) /* debug memory tracking supported */ + +/* + * NAME curl_version_info() + * + * DESCRIPTION + * + * This function returns a pointer to a static copy of the version info + * struct. See above. + */ +CURL_EXTERN curl_version_info_data *curl_version_info(CURLversion); + +/* + * NAME curl_easy_strerror() + * + * DESCRIPTION + * + * The curl_easy_strerror function may be used to turn a CURLcode value + * into the equivalent human readable error string. This is useful + * for printing meaningful error messages. + */ +CURL_EXTERN const char *curl_easy_strerror(CURLcode); + +/* + * NAME curl_share_strerror() + * + * DESCRIPTION + * + * The curl_share_strerror function may be used to turn a CURLSHcode value + * into the equivalent human readable error string. This is useful + * for printing meaningful error messages. + */ +CURL_EXTERN const char *curl_share_strerror(CURLSHcode); + +/* + * NAME curl_easy_pause() + * + * DESCRIPTION + * + * The curl_easy_pause function pauses or unpauses transfers. Select the new + * state by setting the bitmask, use the convenience defines below. + * + */ +CURL_EXTERN CURLcode curl_easy_pause(CURL *handle, int bitmask); + +#define CURLPAUSE_RECV (1<<0) +#define CURLPAUSE_RECV_CONT (0) + +#define CURLPAUSE_SEND (1<<2) +#define CURLPAUSE_SEND_CONT (0) + +#define CURLPAUSE_ALL (CURLPAUSE_RECV|CURLPAUSE_SEND) +#define CURLPAUSE_CONT (CURLPAUSE_RECV_CONT|CURLPAUSE_SEND_CONT) + +#ifdef __cplusplus +} +#endif + +/* unfortunately, the easy.h and multi.h include files need options and info + stuff before they can be included! */ +#include "easy.h" /* nothing in curl is fun without the easy stuff */ +#include "multi.h" + +/* the typechecker doesn't work in C++ (yet) */ +#if defined(__GNUC__) && defined(__GNUC_MINOR__) && \ + ((__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) && \ + !defined(__cplusplus) && !defined(CURL_DISABLE_TYPECHECK) +#include "typecheck-gcc.h" +#else +#if defined(__STDC__) && (__STDC__ >= 1) +/* This preprocessor magic that replaces a call with the exact same call is + only done to make sure application authors pass exactly three arguments + to these functions. */ +#define curl_easy_setopt(handle,opt,param) curl_easy_setopt(handle,opt,param) +#define curl_easy_getinfo(handle,info,arg) curl_easy_getinfo(handle,info,arg) +#define curl_share_setopt(share,opt,param) curl_share_setopt(share,opt,param) +#define curl_multi_setopt(handle,opt,param) curl_multi_setopt(handle,opt,param) +#endif /* __STDC__ >= 1 */ +#endif /* gcc >= 4.3 && !__cplusplus */ + +#endif /* __CURL_CURL_H */ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/curl/curlbuild.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/curl/curlbuild.h new file mode 100644 index 0000000000..b0a53e6c98 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/curl/curlbuild.h @@ -0,0 +1,202 @@ +/* include/curl/curlbuild.h. Generated from curlbuild.h.in by configure. */ +#ifndef __CURL_CURLBUILD_H +#define __CURL_CURLBUILD_H +/*************************************************************************** + * _ _ ____ _ + * Project ___| | | | _ \| | + * / __| | | | |_) | | + * | (__| |_| | _ <| |___ + * \___|\___/|_| \_\_____| + * + * Copyright (C) 1998 - 2009, Daniel Stenberg, , et al. + * + * This software is licensed as described in the file COPYING, which + * you should have received as part of this distribution. The terms + * are also available at http://curl.haxx.se/docs/copyright.html. + * + * You may opt to use, copy, modify, merge, publish, distribute and/or sell + * copies of the Software, and permit persons to whom the Software is + * furnished to do so, under the terms of the COPYING file. + * + * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY + * KIND, either express or implied. + * + * $Id: curlbuild.h.in,v 1.8 2009-04-29 15:15:38 yangtse Exp $ + ***************************************************************************/ + +/* ================================================================ */ +/* NOTES FOR CONFIGURE CAPABLE SYSTEMS */ +/* ================================================================ */ + +/* + * NOTE 1: + * ------- + * + * Nothing in this file is intended to be modified or adjusted by the + * curl library user nor by the curl library builder. + * + * If you think that something actually needs to be changed, adjusted + * or fixed in this file, then, report it on the libcurl development + * mailing list: http://cool.haxx.se/mailman/listinfo/curl-library/ + * + * This header file shall only export symbols which are 'curl' or 'CURL' + * prefixed, otherwise public name space would be polluted. + * + * NOTE 2: + * ------- + * + * Right now you might be staring at file include/curl/curlbuild.h.in or + * at file include/curl/curlbuild.h, this is due to the following reason: + * + * On systems capable of running the configure script, the configure process + * will overwrite the distributed include/curl/curlbuild.h file with one that + * is suitable and specific to the library being configured and built, which + * is generated from the include/curl/curlbuild.h.in template file. + * + */ + +/* ================================================================ */ +/* DEFINITION OF THESE SYMBOLS SHALL NOT TAKE PLACE ANYWHERE ELSE */ +/* ================================================================ */ + +#ifdef CURL_SIZEOF_LONG +# error "CURL_SIZEOF_LONG shall not be defined except in curlbuild.h" + Error Compilation_aborted_CURL_SIZEOF_LONG_already_defined +#endif + +#ifdef CURL_TYPEOF_CURL_SOCKLEN_T +# error "CURL_TYPEOF_CURL_SOCKLEN_T shall not be defined except in curlbuild.h" + Error Compilation_aborted_CURL_TYPEOF_CURL_SOCKLEN_T_already_defined +#endif + +#ifdef CURL_SIZEOF_CURL_SOCKLEN_T +# error "CURL_SIZEOF_CURL_SOCKLEN_T shall not be defined except in curlbuild.h" + Error Compilation_aborted_CURL_SIZEOF_CURL_SOCKLEN_T_already_defined +#endif + +#ifdef CURL_TYPEOF_CURL_OFF_T +# error "CURL_TYPEOF_CURL_OFF_T shall not be defined except in curlbuild.h" + Error Compilation_aborted_CURL_TYPEOF_CURL_OFF_T_already_defined +#endif + +#ifdef CURL_FORMAT_CURL_OFF_T +# error "CURL_FORMAT_CURL_OFF_T shall not be defined except in curlbuild.h" + Error Compilation_aborted_CURL_FORMAT_CURL_OFF_T_already_defined +#endif + +#ifdef CURL_FORMAT_CURL_OFF_TU +# error "CURL_FORMAT_CURL_OFF_TU shall not be defined except in curlbuild.h" + Error Compilation_aborted_CURL_FORMAT_CURL_OFF_TU_already_defined +#endif + +#ifdef CURL_FORMAT_OFF_T +# error "CURL_FORMAT_OFF_T shall not be defined except in curlbuild.h" + Error Compilation_aborted_CURL_FORMAT_OFF_T_already_defined +#endif + +#ifdef CURL_SIZEOF_CURL_OFF_T +# error "CURL_SIZEOF_CURL_OFF_T shall not be defined except in curlbuild.h" + Error Compilation_aborted_CURL_SIZEOF_CURL_OFF_T_already_defined +#endif + +#ifdef CURL_SUFFIX_CURL_OFF_T +# error "CURL_SUFFIX_CURL_OFF_T shall not be defined except in curlbuild.h" + Error Compilation_aborted_CURL_SUFFIX_CURL_OFF_T_already_defined +#endif + +#ifdef CURL_SUFFIX_CURL_OFF_TU +# error "CURL_SUFFIX_CURL_OFF_TU shall not be defined except in curlbuild.h" + Error Compilation_aborted_CURL_SUFFIX_CURL_OFF_TU_already_defined +#endif + +/* ================================================================ */ +/* EXTERNAL INTERFACE SETTINGS FOR CONFIGURE CAPABLE SYSTEMS ONLY */ +/* ================================================================ */ + +/* Configure process defines this to 1 when it finds out that system */ +/* header file ws2tcpip.h must be included by the external interface. */ +/* #undef CURL_PULL_WS2TCPIP_H */ +#ifdef CURL_PULL_WS2TCPIP_H +# ifndef WIN32_LEAN_AND_MEAN +# define WIN32_LEAN_AND_MEAN +# endif +# include +# include +# include +#endif + +/* Configure process defines this to 1 when it finds out that system */ +/* header file sys/types.h must be included by the external interface. */ +#define CURL_PULL_SYS_TYPES_H 1 +#ifdef CURL_PULL_SYS_TYPES_H +# include +#endif + +/* Configure process defines this to 1 when it finds out that system */ +/* header file stdint.h must be included by the external interface. */ +/* #undef CURL_PULL_STDINT_H */ +#ifdef CURL_PULL_STDINT_H +# include +#endif + +/* Configure process defines this to 1 when it finds out that system */ +/* header file inttypes.h must be included by the external interface. */ +/* #undef CURL_PULL_INTTYPES_H */ +#ifdef CURL_PULL_INTTYPES_H +# include +#endif + +/* Configure process defines this to 1 when it finds out that system */ +/* header file sys/socket.h must be included by the external interface. */ +#define CURL_PULL_SYS_SOCKET_H 1 +#ifdef CURL_PULL_SYS_SOCKET_H +# include +#endif + +/* The size of `long', as computed by sizeof. */ +#if defined(_M_X64) || (defined(__x86_64__) && !defined(__ILP32__)) || \ + defined(__aarch64__) || (defined(__mips__) && _MIPS_SIM == _ABI64) +#define CURL_SIZEOF_LONG 8 +#else +#define CURL_SIZEOF_LONG 4 +#endif + +/* Integral data type used for curl_socklen_t. */ +#define CURL_TYPEOF_CURL_SOCKLEN_T socklen_t + +/* The size of `curl_socklen_t', as computed by sizeof. */ +#define CURL_SIZEOF_CURL_SOCKLEN_T 4 + +/* Data type definition of curl_socklen_t. */ +typedef CURL_TYPEOF_CURL_SOCKLEN_T curl_socklen_t; + +/* Signed integral data type used for curl_off_t. */ +#if defined(_M_X64) || (defined(__x86_64__) && !defined(__ILP32__)) || \ + defined(__aarch64__) +#define CURL_TYPEOF_CURL_OFF_T long +#else +#define CURL_TYPEOF_CURL_OFF_T int64_t +#endif + +/* Data type definition of curl_off_t. */ +typedef CURL_TYPEOF_CURL_OFF_T curl_off_t; + +/* curl_off_t formatting string directive without "%" conversion specifier. */ +#define CURL_FORMAT_CURL_OFF_T "ld" + +/* unsigned curl_off_t formatting string without "%" conversion specifier. */ +#define CURL_FORMAT_CURL_OFF_TU "lu" + +/* curl_off_t formatting string directive with "%" conversion specifier. */ +#define CURL_FORMAT_OFF_T "%ld" + +/* The size of `curl_off_t', as computed by sizeof. */ +#define CURL_SIZEOF_CURL_OFF_T 8 + +/* curl_off_t constant suffix. */ +#define CURL_SUFFIX_CURL_OFF_T L + +/* unsigned curl_off_t constant suffix. */ +#define CURL_SUFFIX_CURL_OFF_TU UL + +#endif /* __CURL_CURLBUILD_H */ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/curl/curlrules.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/curl/curlrules.h new file mode 100644 index 0000000000..abac4397d2 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/curl/curlrules.h @@ -0,0 +1,249 @@ +#ifndef __CURL_CURLRULES_H +#define __CURL_CURLRULES_H +/*************************************************************************** + * _ _ ____ _ + * Project ___| | | | _ \| | + * / __| | | | |_) | | + * | (__| |_| | _ <| |___ + * \___|\___/|_| \_\_____| + * + * Copyright (C) 1998 - 2009, Daniel Stenberg, , et al. + * + * This software is licensed as described in the file COPYING, which + * you should have received as part of this distribution. The terms + * are also available at http://curl.haxx.se/docs/copyright.html. + * + * You may opt to use, copy, modify, merge, publish, distribute and/or sell + * copies of the Software, and permit persons to whom the Software is + * furnished to do so, under the terms of the COPYING file. + * + * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY + * KIND, either express or implied. + * + * $Id: curlrules.h,v 1.7 2009-10-27 16:56:20 yangtse Exp $ + ***************************************************************************/ + +/* ================================================================ */ +/* COMPILE TIME SANITY CHECKS */ +/* ================================================================ */ + +/* + * NOTE 1: + * ------- + * + * All checks done in this file are intentionally placed in a public + * header file which is pulled by curl/curl.h when an application is + * being built using an already built libcurl library. Additionally + * this file is also included and used when building the library. + * + * If compilation fails on this file it is certainly sure that the + * problem is elsewhere. It could be a problem in the curlbuild.h + * header file, or simply that you are using different compilation + * settings than those used to build the library. + * + * Nothing in this file is intended to be modified or adjusted by the + * curl library user nor by the curl library builder. + * + * Do not deactivate any check, these are done to make sure that the + * library is properly built and used. + * + * You can find further help on the libcurl development mailing list: + * http://cool.haxx.se/mailman/listinfo/curl-library/ + * + * NOTE 2 + * ------ + * + * Some of the following compile time checks are based on the fact + * that the dimension of a constant array can not be a negative one. + * In this way if the compile time verification fails, the compilation + * will fail issuing an error. The error description wording is compiler + * dependent but it will be quite similar to one of the following: + * + * "negative subscript or subscript is too large" + * "array must have at least one element" + * "-1 is an illegal array size" + * "size of array is negative" + * + * If you are building an application which tries to use an already + * built libcurl library and you are getting this kind of errors on + * this file, it is a clear indication that there is a mismatch between + * how the library was built and how you are trying to use it for your + * application. Your already compiled or binary library provider is the + * only one who can give you the details you need to properly use it. + */ + +/* + * Verify that some macros are actually defined. + */ + +#ifndef CURL_SIZEOF_LONG +# error "CURL_SIZEOF_LONG definition is missing!" + Error Compilation_aborted_CURL_SIZEOF_LONG_is_missing +#endif + +#ifndef CURL_TYPEOF_CURL_SOCKLEN_T +# error "CURL_TYPEOF_CURL_SOCKLEN_T definition is missing!" + Error Compilation_aborted_CURL_TYPEOF_CURL_SOCKLEN_T_is_missing +#endif + +#ifndef CURL_SIZEOF_CURL_SOCKLEN_T +# error "CURL_SIZEOF_CURL_SOCKLEN_T definition is missing!" + Error Compilation_aborted_CURL_SIZEOF_CURL_SOCKLEN_T_is_missing +#endif + +#ifndef CURL_TYPEOF_CURL_OFF_T +# error "CURL_TYPEOF_CURL_OFF_T definition is missing!" + Error Compilation_aborted_CURL_TYPEOF_CURL_OFF_T_is_missing +#endif + +#ifndef CURL_FORMAT_CURL_OFF_T +# error "CURL_FORMAT_CURL_OFF_T definition is missing!" + Error Compilation_aborted_CURL_FORMAT_CURL_OFF_T_is_missing +#endif + +#ifndef CURL_FORMAT_CURL_OFF_TU +# error "CURL_FORMAT_CURL_OFF_TU definition is missing!" + Error Compilation_aborted_CURL_FORMAT_CURL_OFF_TU_is_missing +#endif + +#ifndef CURL_FORMAT_OFF_T +# error "CURL_FORMAT_OFF_T definition is missing!" + Error Compilation_aborted_CURL_FORMAT_OFF_T_is_missing +#endif + +#ifndef CURL_SIZEOF_CURL_OFF_T +# error "CURL_SIZEOF_CURL_OFF_T definition is missing!" + Error Compilation_aborted_CURL_SIZEOF_CURL_OFF_T_is_missing +#endif + +#ifndef CURL_SUFFIX_CURL_OFF_T +# error "CURL_SUFFIX_CURL_OFF_T definition is missing!" + Error Compilation_aborted_CURL_SUFFIX_CURL_OFF_T_is_missing +#endif + +#ifndef CURL_SUFFIX_CURL_OFF_TU +# error "CURL_SUFFIX_CURL_OFF_TU definition is missing!" + Error Compilation_aborted_CURL_SUFFIX_CURL_OFF_TU_is_missing +#endif + +/* + * Macros private to this header file. + */ + +#define CurlchkszEQ(t, s) sizeof(t) == s ? 1 : -1 + +#define CurlchkszGE(t1, t2) sizeof(t1) >= sizeof(t2) ? 1 : -1 + +/* + * Verify that the size previously defined and expected for long + * is the same as the one reported by sizeof() at compile time. + */ + +typedef char + __curl_rule_01__ + [CurlchkszEQ(long, CURL_SIZEOF_LONG)]; + +/* + * Verify that the size previously defined and expected for + * curl_off_t is actually the the same as the one reported + * by sizeof() at compile time. + */ + +typedef char + __curl_rule_02__ + [CurlchkszEQ(curl_off_t, CURL_SIZEOF_CURL_OFF_T)]; + +/* + * Verify at compile time that the size of curl_off_t as reported + * by sizeof() is greater or equal than the one reported for long + * for the current compilation. + */ + +typedef char + __curl_rule_03__ + [CurlchkszGE(curl_off_t, long)]; + +/* + * Verify that the size previously defined and expected for + * curl_socklen_t is actually the the same as the one reported + * by sizeof() at compile time. + */ + +typedef char + __curl_rule_04__ + [CurlchkszEQ(curl_socklen_t, CURL_SIZEOF_CURL_SOCKLEN_T)]; + +/* + * Verify at compile time that the size of curl_socklen_t as reported + * by sizeof() is greater or equal than the one reported for int for + * the current compilation. + */ + +typedef char + __curl_rule_05__ + [CurlchkszGE(curl_socklen_t, int)]; + +/* ================================================================ */ +/* EXTERNALLY AND INTERNALLY VISIBLE DEFINITIONS */ +/* ================================================================ */ + +/* + * CURL_ISOCPP and CURL_OFF_T_C definitions are done here in order to allow + * these to be visible and exported by the external libcurl interface API, + * while also making them visible to the library internals, simply including + * setup.h, without actually needing to include curl.h internally. + * If some day this section would grow big enough, all this should be moved + * to its own header file. + */ + +/* + * Figure out if we can use the ## preprocessor operator, which is supported + * by ISO/ANSI C and C++. Some compilers support it without setting __STDC__ + * or __cplusplus so we need to carefully check for them too. + */ + +#if defined(__STDC__) || defined(_MSC_VER) || defined(__cplusplus) || \ + defined(__HP_aCC) || defined(__BORLANDC__) || defined(__LCC__) || \ + defined(__POCC__) || defined(__SALFORDC__) || defined(__HIGHC__) || \ + defined(__ILEC400__) + /* This compiler is believed to have an ISO compatible preprocessor */ +#define CURL_ISOCPP +#else + /* This compiler is believed NOT to have an ISO compatible preprocessor */ +#undef CURL_ISOCPP +#endif + +/* + * Macros for minimum-width signed and unsigned curl_off_t integer constants. + */ + +#ifdef CURL_ISOCPP +# define __CURL_OFF_T_C_HELPER2(Val,Suffix) Val ## Suffix +#else +# define __CURL_OFF_T_C_HELPER2(Val,Suffix) Val/**/Suffix +#endif +#define __CURL_OFF_T_C_HELPER1(Val,Suffix) __CURL_OFF_T_C_HELPER2(Val,Suffix) +#define CURL_OFF_T_C(Val) __CURL_OFF_T_C_HELPER1(Val,CURL_SUFFIX_CURL_OFF_T) +#define CURL_OFF_TU_C(Val) __CURL_OFF_T_C_HELPER1(Val,CURL_SUFFIX_CURL_OFF_TU) + +/* + * Get rid of macros private to this header file. + */ + +#undef CurlchkszEQ +#undef CurlchkszGE + +/* + * Get rid of macros not intended to exist beyond this point. + */ + +#undef CURL_PULL_WS2TCPIP_H +#undef CURL_PULL_SYS_TYPES_H +#undef CURL_PULL_SYS_SOCKET_H +#undef CURL_PULL_STDINT_H +#undef CURL_PULL_INTTYPES_H + +#undef CURL_TYPEOF_CURL_SOCKLEN_T +#undef CURL_TYPEOF_CURL_OFF_T + +#endif /* __CURL_CURLRULES_H */ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/curl/curlver.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/curl/curlver.h new file mode 100644 index 0000000000..afa85c15ad --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/curl/curlver.h @@ -0,0 +1,70 @@ +#ifndef __CURL_CURLVER_H +#define __CURL_CURLVER_H +/*************************************************************************** + * _ _ ____ _ + * Project ___| | | | _ \| | + * / __| | | | |_) | | + * | (__| |_| | _ <| |___ + * \___|\___/|_| \_\_____| + * + * Copyright (C) 1998 - 2009, Daniel Stenberg, , et al. + * + * This software is licensed as described in the file COPYING, which + * you should have received as part of this distribution. The terms + * are also available at http://curl.haxx.se/docs/copyright.html. + * + * You may opt to use, copy, modify, merge, publish, distribute and/or sell + * copies of the Software, and permit persons to whom the Software is + * furnished to do so, under the terms of the COPYING file. + * + * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY + * KIND, either express or implied. + * + * $Id: curlver.h,v 1.48 2009-08-12 11:24:52 bagder Exp $ + ***************************************************************************/ + +/* This header file contains nothing but libcurl version info, generated by + a script at release-time. This was made its own header file in 7.11.2 */ + +/* This is the global package copyright */ +#define LIBCURL_COPYRIGHT "1996 - 2009 Daniel Stenberg, ." + +/* This is the version number of the libcurl package from which this header + file origins: */ +#define LIBCURL_VERSION "7.19.7" + +/* The numeric version number is also available "in parts" by using these + defines: */ +#define LIBCURL_VERSION_MAJOR 7 +#define LIBCURL_VERSION_MINOR 19 +#define LIBCURL_VERSION_PATCH 7 + +/* This is the numeric version of the libcurl version number, meant for easier + parsing and comparions by programs. The LIBCURL_VERSION_NUM define will + always follow this syntax: + + 0xXXYYZZ + + Where XX, YY and ZZ are the main version, release and patch numbers in + hexadecimal (using 8 bits each). All three numbers are always represented + using two digits. 1.2 would appear as "0x010200" while version 9.11.7 + appears as "0x090b07". + + This 6-digit (24 bits) hexadecimal number does not show pre-release number, + and it is always a greater number in a more recent release. It makes + comparisons with greater than and less than work. +*/ +#define LIBCURL_VERSION_NUM 0x071307 + +/* + * This is the date and time when the full source package was created. The + * timestamp is not stored in CVS, as the timestamp is properly set in the + * tarballs by the maketgz script. + * + * The format of the date should follow this template: + * + * "Mon Feb 12 11:35:33 UTC 2007" + */ +#define LIBCURL_TIMESTAMP "Wed Nov 4 12:34:59 UTC 2009" + +#endif /* __CURL_CURLVER_H */ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/curl/easy.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/curl/easy.h new file mode 100644 index 0000000000..40449c3ec8 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/curl/easy.h @@ -0,0 +1,103 @@ +#ifndef __CURL_EASY_H +#define __CURL_EASY_H +/*************************************************************************** + * _ _ ____ _ + * Project ___| | | | _ \| | + * / __| | | | |_) | | + * | (__| |_| | _ <| |___ + * \___|\___/|_| \_\_____| + * + * Copyright (C) 1998 - 2008, Daniel Stenberg, , et al. + * + * This software is licensed as described in the file COPYING, which + * you should have received as part of this distribution. The terms + * are also available at http://curl.haxx.se/docs/copyright.html. + * + * You may opt to use, copy, modify, merge, publish, distribute and/or sell + * copies of the Software, and permit persons to whom the Software is + * furnished to do so, under the terms of the COPYING file. + * + * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY + * KIND, either express or implied. + * + * $Id: easy.h,v 1.14 2008-05-12 21:43:28 bagder Exp $ + ***************************************************************************/ +#ifdef __cplusplus +extern "C" { +#endif + +CURL_EXTERN CURL *curl_easy_init(void); +CURL_EXTERN CURLcode curl_easy_setopt(CURL *curl, CURLoption option, ...); +CURL_EXTERN CURLcode curl_easy_perform(CURL *curl); +CURL_EXTERN void curl_easy_cleanup(CURL *curl); + +/* + * NAME curl_easy_getinfo() + * + * DESCRIPTION + * + * Request internal information from the curl session with this function. The + * third argument MUST be a pointer to a long, a pointer to a char * or a + * pointer to a double (as the documentation describes elsewhere). The data + * pointed to will be filled in accordingly and can be relied upon only if the + * function returns CURLE_OK. This function is intended to get used *AFTER* a + * performed transfer, all results from this function are undefined until the + * transfer is completed. + */ +CURL_EXTERN CURLcode curl_easy_getinfo(CURL *curl, CURLINFO info, ...); + + +/* + * NAME curl_easy_duphandle() + * + * DESCRIPTION + * + * Creates a new curl session handle with the same options set for the handle + * passed in. Duplicating a handle could only be a matter of cloning data and + * options, internal state info and things like persistant connections cannot + * be transfered. It is useful in multithreaded applications when you can run + * curl_easy_duphandle() for each new thread to avoid a series of identical + * curl_easy_setopt() invokes in every thread. + */ +CURL_EXTERN CURL* curl_easy_duphandle(CURL *curl); + +/* + * NAME curl_easy_reset() + * + * DESCRIPTION + * + * Re-initializes a CURL handle to the default values. This puts back the + * handle to the same state as it was in when it was just created. + * + * It does keep: live connections, the Session ID cache, the DNS cache and the + * cookies. + */ +CURL_EXTERN void curl_easy_reset(CURL *curl); + +/* + * NAME curl_easy_recv() + * + * DESCRIPTION + * + * Receives data from the connected socket. Use after successful + * curl_easy_perform() with CURLOPT_CONNECT_ONLY option. + */ +CURL_EXTERN CURLcode curl_easy_recv(CURL *curl, void *buffer, size_t buflen, + size_t *n); + +/* + * NAME curl_easy_send() + * + * DESCRIPTION + * + * Sends data over the connected socket. Use after successful + * curl_easy_perform() with CURLOPT_CONNECT_ONLY option. + */ +CURL_EXTERN CURLcode curl_easy_send(CURL *curl, const void *buffer, + size_t buflen, size_t *n); + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/curl/mprintf.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/curl/mprintf.h new file mode 100644 index 0000000000..d7202de170 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/curl/mprintf.h @@ -0,0 +1,82 @@ +#ifndef __CURL_MPRINTF_H +#define __CURL_MPRINTF_H +/*************************************************************************** + * _ _ ____ _ + * Project ___| | | | _ \| | + * / __| | | | |_) | | + * | (__| |_| | _ <| |___ + * \___|\___/|_| \_\_____| + * + * Copyright (C) 1998 - 2006, Daniel Stenberg, , et al. + * + * This software is licensed as described in the file COPYING, which + * you should have received as part of this distribution. The terms + * are also available at http://curl.haxx.se/docs/copyright.html. + * + * You may opt to use, copy, modify, merge, publish, distribute and/or sell + * copies of the Software, and permit persons to whom the Software is + * furnished to do so, under the terms of the COPYING file. + * + * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY + * KIND, either express or implied. + * + * $Id: mprintf.h,v 1.16 2008-05-20 10:21:50 patrickm Exp $ + ***************************************************************************/ + +#include +#include /* needed for FILE */ + +#include "curl.h" + +#ifdef __cplusplus +extern "C" { +#endif + +CURL_EXTERN int curl_mprintf(const char *format, ...); +CURL_EXTERN int curl_mfprintf(FILE *fd, const char *format, ...); +CURL_EXTERN int curl_msprintf(char *buffer, const char *format, ...); +CURL_EXTERN int curl_msnprintf(char *buffer, size_t maxlength, + const char *format, ...); +CURL_EXTERN int curl_mvprintf(const char *format, va_list args); +CURL_EXTERN int curl_mvfprintf(FILE *fd, const char *format, va_list args); +CURL_EXTERN int curl_mvsprintf(char *buffer, const char *format, va_list args); +CURL_EXTERN int curl_mvsnprintf(char *buffer, size_t maxlength, + const char *format, va_list args); +CURL_EXTERN char *curl_maprintf(const char *format, ...); +CURL_EXTERN char *curl_mvaprintf(const char *format, va_list args); + +#ifdef _MPRINTF_REPLACE +# undef printf +# undef fprintf +# undef sprintf +# undef vsprintf +# undef snprintf +# undef vprintf +# undef vfprintf +# undef vsnprintf +# undef aprintf +# undef vaprintf +# define printf curl_mprintf +# define fprintf curl_mfprintf +#ifdef CURLDEBUG +/* When built with CURLDEBUG we define away the sprintf() functions since we + don't want internal code to be using them */ +# define sprintf sprintf_was_used +# define vsprintf vsprintf_was_used +#else +# define sprintf curl_msprintf +# define vsprintf curl_mvsprintf +#endif +# define snprintf curl_msnprintf +# define vprintf curl_mvprintf +# define vfprintf curl_mvfprintf +# define vsnprintf curl_mvsnprintf +# define aprintf curl_maprintf +# define vaprintf curl_mvaprintf +#endif + +#ifdef __cplusplus +} +#endif + +#endif /* __CURL_MPRINTF_H */ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/curl/multi.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/curl/multi.h new file mode 100644 index 0000000000..153f7721c9 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/curl/multi.h @@ -0,0 +1,346 @@ +#ifndef __CURL_MULTI_H +#define __CURL_MULTI_H +/*************************************************************************** + * _ _ ____ _ + * Project ___| | | | _ \| | + * / __| | | | |_) | | + * | (__| |_| | _ <| |___ + * \___|\___/|_| \_\_____| + * + * Copyright (C) 1998 - 2007, Daniel Stenberg, , et al. + * + * This software is licensed as described in the file COPYING, which + * you should have received as part of this distribution. The terms + * are also available at http://curl.haxx.se/docs/copyright.html. + * + * You may opt to use, copy, modify, merge, publish, distribute and/or sell + * copies of the Software, and permit persons to whom the Software is + * furnished to do so, under the terms of the COPYING file. + * + * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY + * KIND, either express or implied. + * + * $Id: multi.h,v 1.45 2008-05-20 10:21:50 patrickm Exp $ + ***************************************************************************/ +/* + This is an "external" header file. Don't give away any internals here! + + GOALS + + o Enable a "pull" interface. The application that uses libcurl decides where + and when to ask libcurl to get/send data. + + o Enable multiple simultaneous transfers in the same thread without making it + complicated for the application. + + o Enable the application to select() on its own file descriptors and curl's + file descriptors simultaneous easily. + +*/ + +/* + * This header file should not really need to include "curl.h" since curl.h + * itself includes this file and we expect user applications to do #include + * without the need for especially including multi.h. + * + * For some reason we added this include here at one point, and rather than to + * break existing (wrongly written) libcurl applications, we leave it as-is + * but with this warning attached. + */ +#include "curl.h" + +#ifdef __cplusplus +extern "C" { +#endif + +typedef void CURLM; + +typedef enum { + CURLM_CALL_MULTI_PERFORM = -1, /* please call curl_multi_perform() or + curl_multi_socket*() soon */ + CURLM_OK, + CURLM_BAD_HANDLE, /* the passed-in handle is not a valid CURLM handle */ + CURLM_BAD_EASY_HANDLE, /* an easy handle was not good/valid */ + CURLM_OUT_OF_MEMORY, /* if you ever get this, you're in deep sh*t */ + CURLM_INTERNAL_ERROR, /* this is a libcurl bug */ + CURLM_BAD_SOCKET, /* the passed in socket argument did not match */ + CURLM_UNKNOWN_OPTION, /* curl_multi_setopt() with unsupported option */ + CURLM_LAST +} CURLMcode; + +/* just to make code nicer when using curl_multi_socket() you can now check + for CURLM_CALL_MULTI_SOCKET too in the same style it works for + curl_multi_perform() and CURLM_CALL_MULTI_PERFORM */ +#define CURLM_CALL_MULTI_SOCKET CURLM_CALL_MULTI_PERFORM + +typedef enum { + CURLMSG_NONE, /* first, not used */ + CURLMSG_DONE, /* This easy handle has completed. 'result' contains + the CURLcode of the transfer */ + CURLMSG_LAST /* last, not used */ +} CURLMSG; + +struct CURLMsg { + CURLMSG msg; /* what this message means */ + CURL *easy_handle; /* the handle it concerns */ + union { + void *whatever; /* message-specific data */ + CURLcode result; /* return code for transfer */ + } data; +}; +typedef struct CURLMsg CURLMsg; + +/* + * Name: curl_multi_init() + * + * Desc: inititalize multi-style curl usage + * + * Returns: a new CURLM handle to use in all 'curl_multi' functions. + */ +CURL_EXTERN CURLM *curl_multi_init(void); + +/* + * Name: curl_multi_add_handle() + * + * Desc: add a standard curl handle to the multi stack + * + * Returns: CURLMcode type, general multi error code. + */ +CURL_EXTERN CURLMcode curl_multi_add_handle(CURLM *multi_handle, + CURL *curl_handle); + + /* + * Name: curl_multi_remove_handle() + * + * Desc: removes a curl handle from the multi stack again + * + * Returns: CURLMcode type, general multi error code. + */ +CURL_EXTERN CURLMcode curl_multi_remove_handle(CURLM *multi_handle, + CURL *curl_handle); + + /* + * Name: curl_multi_fdset() + * + * Desc: Ask curl for its fd_set sets. The app can use these to select() or + * poll() on. We want curl_multi_perform() called as soon as one of + * them are ready. + * + * Returns: CURLMcode type, general multi error code. + */ +CURL_EXTERN CURLMcode curl_multi_fdset(CURLM *multi_handle, + fd_set *read_fd_set, + fd_set *write_fd_set, + fd_set *exc_fd_set, + int *max_fd); + + /* + * Name: curl_multi_perform() + * + * Desc: When the app thinks there's data available for curl it calls this + * function to read/write whatever there is right now. This returns + * as soon as the reads and writes are done. This function does not + * require that there actually is data available for reading or that + * data can be written, it can be called just in case. It returns + * the number of handles that still transfer data in the second + * argument's integer-pointer. + * + * Returns: CURLMcode type, general multi error code. *NOTE* that this only + * returns errors etc regarding the whole multi stack. There might + * still have occurred problems on invidual transfers even when this + * returns OK. + */ +CURL_EXTERN CURLMcode curl_multi_perform(CURLM *multi_handle, + int *running_handles); + + /* + * Name: curl_multi_cleanup() + * + * Desc: Cleans up and removes a whole multi stack. It does not free or + * touch any individual easy handles in any way. We need to define + * in what state those handles will be if this function is called + * in the middle of a transfer. + * + * Returns: CURLMcode type, general multi error code. + */ +CURL_EXTERN CURLMcode curl_multi_cleanup(CURLM *multi_handle); + +/* + * Name: curl_multi_info_read() + * + * Desc: Ask the multi handle if there's any messages/informationals from + * the individual transfers. Messages include informationals such as + * error code from the transfer or just the fact that a transfer is + * completed. More details on these should be written down as well. + * + * Repeated calls to this function will return a new struct each + * time, until a special "end of msgs" struct is returned as a signal + * that there is no more to get at this point. + * + * The data the returned pointer points to will not survive calling + * curl_multi_cleanup(). + * + * The 'CURLMsg' struct is meant to be very simple and only contain + * very basic informations. If more involved information is wanted, + * we will provide the particular "transfer handle" in that struct + * and that should/could/would be used in subsequent + * curl_easy_getinfo() calls (or similar). The point being that we + * must never expose complex structs to applications, as then we'll + * undoubtably get backwards compatibility problems in the future. + * + * Returns: A pointer to a filled-in struct, or NULL if it failed or ran out + * of structs. It also writes the number of messages left in the + * queue (after this read) in the integer the second argument points + * to. + */ +CURL_EXTERN CURLMsg *curl_multi_info_read(CURLM *multi_handle, + int *msgs_in_queue); + +/* + * Name: curl_multi_strerror() + * + * Desc: The curl_multi_strerror function may be used to turn a CURLMcode + * value into the equivalent human readable error string. This is + * useful for printing meaningful error messages. + * + * Returns: A pointer to a zero-terminated error message. + */ +CURL_EXTERN const char *curl_multi_strerror(CURLMcode); + +/* + * Name: curl_multi_socket() and + * curl_multi_socket_all() + * + * Desc: An alternative version of curl_multi_perform() that allows the + * application to pass in one of the file descriptors that have been + * detected to have "action" on them and let libcurl perform. + * See man page for details. + */ +#define CURL_POLL_NONE 0 +#define CURL_POLL_IN 1 +#define CURL_POLL_OUT 2 +#define CURL_POLL_INOUT 3 +#define CURL_POLL_REMOVE 4 + +#define CURL_SOCKET_TIMEOUT CURL_SOCKET_BAD + +#define CURL_CSELECT_IN 0x01 +#define CURL_CSELECT_OUT 0x02 +#define CURL_CSELECT_ERR 0x04 + +typedef int (*curl_socket_callback)(CURL *easy, /* easy handle */ + curl_socket_t s, /* socket */ + int what, /* see above */ + void *userp, /* private callback + pointer */ + void *socketp); /* private socket + pointer */ +/* + * Name: curl_multi_timer_callback + * + * Desc: Called by libcurl whenever the library detects a change in the + * maximum number of milliseconds the app is allowed to wait before + * curl_multi_socket() or curl_multi_perform() must be called + * (to allow libcurl's timed events to take place). + * + * Returns: The callback should return zero. + */ +typedef int (*curl_multi_timer_callback)(CURLM *multi, /* multi handle */ + long timeout_ms, /* see above */ + void *userp); /* private callback + pointer */ + +CURL_EXTERN CURLMcode curl_multi_socket(CURLM *multi_handle, curl_socket_t s, + int *running_handles); + +CURL_EXTERN CURLMcode curl_multi_socket_action(CURLM *multi_handle, + curl_socket_t s, + int ev_bitmask, + int *running_handles); + +CURL_EXTERN CURLMcode curl_multi_socket_all(CURLM *multi_handle, + int *running_handles); + +#ifndef CURL_ALLOW_OLD_MULTI_SOCKET +/* This macro below was added in 7.16.3 to push users who recompile to use + the new curl_multi_socket_action() instead of the old curl_multi_socket() +*/ +#define curl_multi_socket(x,y,z) curl_multi_socket_action(x,y,0,z) +#endif + +/* + * Name: curl_multi_timeout() + * + * Desc: Returns the maximum number of milliseconds the app is allowed to + * wait before curl_multi_socket() or curl_multi_perform() must be + * called (to allow libcurl's timed events to take place). + * + * Returns: CURLM error code. + */ +CURL_EXTERN CURLMcode curl_multi_timeout(CURLM *multi_handle, + long *milliseconds); + +#undef CINIT /* re-using the same name as in curl.h */ + +#ifdef CURL_ISOCPP +#define CINIT(name,type,num) CURLMOPT_ ## name = CURLOPTTYPE_ ## type + num +#else +/* The macro "##" is ISO C, we assume pre-ISO C doesn't support it. */ +#define LONG CURLOPTTYPE_LONG +#define OBJECTPOINT CURLOPTTYPE_OBJECTPOINT +#define FUNCTIONPOINT CURLOPTTYPE_FUNCTIONPOINT +#define OFF_T CURLOPTTYPE_OFF_T +#define CINIT(name,type,number) CURLMOPT_/**/name = type + number +#endif + +typedef enum { + /* This is the socket callback function pointer */ + CINIT(SOCKETFUNCTION, FUNCTIONPOINT, 1), + + /* This is the argument passed to the socket callback */ + CINIT(SOCKETDATA, OBJECTPOINT, 2), + + /* set to 1 to enable pipelining for this multi handle */ + CINIT(PIPELINING, LONG, 3), + + /* This is the timer callback function pointer */ + CINIT(TIMERFUNCTION, FUNCTIONPOINT, 4), + + /* This is the argument passed to the timer callback */ + CINIT(TIMERDATA, OBJECTPOINT, 5), + + /* maximum number of entries in the connection cache */ + CINIT(MAXCONNECTS, LONG, 6), + + CURLMOPT_LASTENTRY /* the last unused */ +} CURLMoption; + + +/* + * Name: curl_multi_setopt() + * + * Desc: Sets options for the multi handle. + * + * Returns: CURLM error code. + */ +CURL_EXTERN CURLMcode curl_multi_setopt(CURLM *multi_handle, + CURLMoption option, ...); + + +/* + * Name: curl_multi_assign() + * + * Desc: This function sets an association in the multi handle between the + * given socket and a private pointer of the application. This is + * (only) useful for curl_multi_socket uses. + * + * Returns: CURLM error code. + */ +CURL_EXTERN CURLMcode curl_multi_assign(CURLM *multi_handle, + curl_socket_t sockfd, void *sockp); + +#ifdef __cplusplus +} /* end of extern "C" */ +#endif + +#endif diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/curl/stdcheaders.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/curl/stdcheaders.h new file mode 100644 index 0000000000..f739d7f9aa --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/curl/stdcheaders.h @@ -0,0 +1,34 @@ +#ifndef __STDC_HEADERS_H +#define __STDC_HEADERS_H +/*************************************************************************** + * _ _ ____ _ + * Project ___| | | | _ \| | + * / __| | | | |_) | | + * | (__| |_| | _ <| |___ + * \___|\___/|_| \_\_____| + * + * Copyright (C) 1998 - 2009, Daniel Stenberg, , et al. + * + * This software is licensed as described in the file COPYING, which + * you should have received as part of this distribution. The terms + * are also available at http://curl.haxx.se/docs/copyright.html. + * + * You may opt to use, copy, modify, merge, publish, distribute and/or sell + * copies of the Software, and permit persons to whom the Software is + * furnished to do so, under the terms of the COPYING file. + * + * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY + * KIND, either express or implied. + * + * $Id: stdcheaders.h,v 1.9 2009-05-18 12:25:45 yangtse Exp $ + ***************************************************************************/ + +#include + +size_t fread (void *, size_t, size_t, FILE *); +size_t fwrite (const void *, size_t, size_t, FILE *); + +int strcasecmp(const char *, const char *); +int strncasecmp(const char *, const char *, size_t); + +#endif diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/curl/typecheck-gcc.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/curl/typecheck-gcc.h new file mode 100644 index 0000000000..9788305819 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/curl/typecheck-gcc.h @@ -0,0 +1,551 @@ +#ifndef __CURL_TYPECHECK_GCC_H +#define __CURL_TYPECHECK_GCC_H +/*************************************************************************** + * _ _ ____ _ + * Project ___| | | | _ \| | + * / __| | | | |_) | | + * | (__| |_| | _ <| |___ + * \___|\___/|_| \_\_____| + * + * Copyright (C) 1998 - 2009, Daniel Stenberg, , et al. + * + * This software is licensed as described in the file COPYING, which + * you should have received as part of this distribution. The terms + * are also available at http://curl.haxx.se/docs/copyright.html. + * + * You may opt to use, copy, modify, merge, publish, distribute and/or sell + * copies of the Software, and permit persons to whom the Software is + * furnished to do so, under the terms of the COPYING file. + * + * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY + * KIND, either express or implied. + * + * $Id: typecheck-gcc.h,v 1.9 2009-01-25 23:26:31 bagder Exp $ + ***************************************************************************/ + +/* wraps curl_easy_setopt() with typechecking */ + +/* To add a new kind of warning, add an + * if(_curl_is_sometype_option(_curl_opt) && ! _curl_is_sometype(value)) + * _curl_easy_setopt_err_sometype(); + * block and define _curl_is_sometype_option, _curl_is_sometype and + * _curl_easy_setopt_err_sometype below + * + * To add an option that uses the same type as an existing option, you'll just + * need to extend the appropriate _curl_*_option macro + */ +#define curl_easy_setopt(handle, option, value) \ +__extension__ ({ \ + __typeof__ (option) _curl_opt = option; \ + if (__builtin_constant_p(_curl_opt)) { \ + if (_curl_is_long_option(_curl_opt) && !_curl_is_long(value)) \ + _curl_easy_setopt_err_long(); \ + if (_curl_is_off_t_option(_curl_opt) && !_curl_is_off_t(value)) \ + _curl_easy_setopt_err_curl_off_t(); \ + if (_curl_is_string_option(_curl_opt) && !_curl_is_string(value)) \ + _curl_easy_setopt_err_string(); \ + if (_curl_is_write_cb_option(_curl_opt) && !_curl_is_write_cb(value)) \ + _curl_easy_setopt_err_write_callback(); \ + if ((_curl_opt) == CURLOPT_READFUNCTION && !_curl_is_read_cb(value)) \ + _curl_easy_setopt_err_read_cb(); \ + if ((_curl_opt) == CURLOPT_IOCTLFUNCTION && !_curl_is_ioctl_cb(value)) \ + _curl_easy_setopt_err_ioctl_cb(); \ + if ((_curl_opt) == CURLOPT_SOCKOPTFUNCTION && !_curl_is_sockopt_cb(value))\ + _curl_easy_setopt_err_sockopt_cb(); \ + if ((_curl_opt) == CURLOPT_OPENSOCKETFUNCTION && \ + !_curl_is_opensocket_cb(value)) \ + _curl_easy_setopt_err_opensocket_cb(); \ + if ((_curl_opt) == CURLOPT_PROGRESSFUNCTION && \ + !_curl_is_progress_cb(value)) \ + _curl_easy_setopt_err_progress_cb(); \ + if ((_curl_opt) == CURLOPT_DEBUGFUNCTION && !_curl_is_debug_cb(value)) \ + _curl_easy_setopt_err_debug_cb(); \ + if ((_curl_opt) == CURLOPT_SSL_CTX_FUNCTION && \ + !_curl_is_ssl_ctx_cb(value)) \ + _curl_easy_setopt_err_ssl_ctx_cb(); \ + if (_curl_is_conv_cb_option(_curl_opt) && !_curl_is_conv_cb(value)) \ + _curl_easy_setopt_err_conv_cb(); \ + if ((_curl_opt) == CURLOPT_SEEKFUNCTION && !_curl_is_seek_cb(value)) \ + _curl_easy_setopt_err_seek_cb(); \ + if (_curl_is_cb_data_option(_curl_opt) && !_curl_is_cb_data(value)) \ + _curl_easy_setopt_err_cb_data(); \ + if ((_curl_opt) == CURLOPT_ERRORBUFFER && !_curl_is_error_buffer(value)) \ + _curl_easy_setopt_err_error_buffer(); \ + if ((_curl_opt) == CURLOPT_STDERR && !_curl_is_FILE(value)) \ + _curl_easy_setopt_err_FILE(); \ + if (_curl_is_postfields_option(_curl_opt) && !_curl_is_postfields(value)) \ + _curl_easy_setopt_err_postfields(); \ + if ((_curl_opt) == CURLOPT_HTTPPOST && \ + !_curl_is_arr((value), struct curl_httppost)) \ + _curl_easy_setopt_err_curl_httpost(); \ + if (_curl_is_slist_option(_curl_opt) && \ + !_curl_is_arr((value), struct curl_slist)) \ + _curl_easy_setopt_err_curl_slist(); \ + if ((_curl_opt) == CURLOPT_SHARE && !_curl_is_ptr((value), CURLSH)) \ + _curl_easy_setopt_err_CURLSH(); \ + } \ + curl_easy_setopt(handle, _curl_opt, value); \ +}) + +/* wraps curl_easy_getinfo() with typechecking */ +/* FIXME: don't allow const pointers */ +#define curl_easy_getinfo(handle, info, arg) \ +__extension__ ({ \ + __typeof__ (info) _curl_info = info; \ + if (__builtin_constant_p(_curl_info)) { \ + if (_curl_is_string_info(_curl_info) && !_curl_is_arr((arg), char *)) \ + _curl_easy_getinfo_err_string(); \ + if (_curl_is_long_info(_curl_info) && !_curl_is_arr((arg), long)) \ + _curl_easy_getinfo_err_long(); \ + if (_curl_is_double_info(_curl_info) && !_curl_is_arr((arg), double)) \ + _curl_easy_getinfo_err_double(); \ + if (_curl_is_slist_info(_curl_info) && \ + !_curl_is_arr((arg), struct curl_slist *)) \ + _curl_easy_getinfo_err_curl_slist(); \ + } \ + curl_easy_getinfo(handle, _curl_info, arg); \ +}) + +/* TODO: typechecking for curl_share_setopt() and curl_multi_setopt(), + * for now just make sure that the functions are called with three + * arguments + */ +#define curl_share_setopt(share,opt,param) curl_share_setopt(share,opt,param) +#define curl_multi_setopt(handle,opt,param) curl_multi_setopt(handle,opt,param) + + +/* the actual warnings, triggered by calling the _curl_easy_setopt_err* + * functions */ + +/* To define a new warning, use _CURL_WARNING(identifier, "message") */ +#define _CURL_WARNING(id, message) \ + static void __attribute__((warning(message))) __attribute__((unused)) \ + __attribute__((noinline)) id(void) { __asm__(""); } + +_CURL_WARNING(_curl_easy_setopt_err_long, + "curl_easy_setopt expects a long argument for this option") +_CURL_WARNING(_curl_easy_setopt_err_curl_off_t, + "curl_easy_setopt expects a curl_off_t argument for this option") +_CURL_WARNING(_curl_easy_setopt_err_string, + "curl_easy_setopt expects a string (char* or char[]) argument for this option" + ) +_CURL_WARNING(_curl_easy_setopt_err_write_callback, + "curl_easy_setopt expects a curl_write_callback argument for this option") +_CURL_WARNING(_curl_easy_setopt_err_read_cb, + "curl_easy_setopt expects a curl_read_callback argument for this option") +_CURL_WARNING(_curl_easy_setopt_err_ioctl_cb, + "curl_easy_setopt expects a curl_ioctl_callback argument for this option") +_CURL_WARNING(_curl_easy_setopt_err_sockopt_cb, + "curl_easy_setopt expects a curl_sockopt_callback argument for this option") +_CURL_WARNING(_curl_easy_setopt_err_opensocket_cb, + "curl_easy_setopt expects a curl_opensocket_callback argument for this option" + ) +_CURL_WARNING(_curl_easy_setopt_err_progress_cb, + "curl_easy_setopt expects a curl_progress_callback argument for this option") +_CURL_WARNING(_curl_easy_setopt_err_debug_cb, + "curl_easy_setopt expects a curl_debug_callback argument for this option") +_CURL_WARNING(_curl_easy_setopt_err_ssl_ctx_cb, + "curl_easy_setopt expects a curl_ssl_ctx_callback argument for this option") +_CURL_WARNING(_curl_easy_setopt_err_conv_cb, + "curl_easy_setopt expects a curl_conv_callback argument for this option") +_CURL_WARNING(_curl_easy_setopt_err_seek_cb, + "curl_easy_setopt expects a curl_seek_callback argument for this option") +_CURL_WARNING(_curl_easy_setopt_err_cb_data, + "curl_easy_setopt expects a private data pointer as argument for this option") +_CURL_WARNING(_curl_easy_setopt_err_error_buffer, + "curl_easy_setopt expects a char buffer of CURL_ERROR_SIZE as argument for this option") +_CURL_WARNING(_curl_easy_setopt_err_FILE, + "curl_easy_setopt expects a FILE* argument for this option") +_CURL_WARNING(_curl_easy_setopt_err_postfields, + "curl_easy_setopt expects a void* or char* argument for this option") +_CURL_WARNING(_curl_easy_setopt_err_curl_httpost, + "curl_easy_setopt expects a struct curl_httppost* argument for this option") +_CURL_WARNING(_curl_easy_setopt_err_curl_slist, + "curl_easy_setopt expects a struct curl_slist* argument for this option") +_CURL_WARNING(_curl_easy_setopt_err_CURLSH, + "curl_easy_setopt expects a CURLSH* argument for this option") + +_CURL_WARNING(_curl_easy_getinfo_err_string, + "curl_easy_getinfo expects a pointer to char * for this info") +_CURL_WARNING(_curl_easy_getinfo_err_long, + "curl_easy_getinfo expects a pointer to long for this info") +_CURL_WARNING(_curl_easy_getinfo_err_double, + "curl_easy_getinfo expects a pointer to double for this info") +_CURL_WARNING(_curl_easy_getinfo_err_curl_slist, + "curl_easy_getinfo expects a pointer to struct curl_slist * for this info") + +/* groups of curl_easy_setops options that take the same type of argument */ + +/* To add a new option to one of the groups, just add + * (option) == CURLOPT_SOMETHING + * to the or-expression. If the option takes a long or curl_off_t, you don't + * have to do anything + */ + +/* evaluates to true if option takes a long argument */ +#define _curl_is_long_option(option) \ + (0 < (option) && (option) < CURLOPTTYPE_OBJECTPOINT) + +#define _curl_is_off_t_option(option) \ + ((option) > CURLOPTTYPE_OFF_T) + +/* evaluates to true if option takes a char* argument */ +#define _curl_is_string_option(option) \ + ((option) == CURLOPT_URL || \ + (option) == CURLOPT_PROXY || \ + (option) == CURLOPT_INTERFACE || \ + (option) == CURLOPT_NETRC_FILE || \ + (option) == CURLOPT_USERPWD || \ + (option) == CURLOPT_USERNAME || \ + (option) == CURLOPT_PASSWORD || \ + (option) == CURLOPT_PROXYUSERPWD || \ + (option) == CURLOPT_PROXYUSERNAME || \ + (option) == CURLOPT_PROXYPASSWORD || \ + (option) == CURLOPT_NOPROXY || \ + (option) == CURLOPT_ENCODING || \ + (option) == CURLOPT_REFERER || \ + (option) == CURLOPT_USERAGENT || \ + (option) == CURLOPT_COOKIE || \ + (option) == CURLOPT_COOKIEFILE || \ + (option) == CURLOPT_COOKIEJAR || \ + (option) == CURLOPT_COOKIELIST || \ + (option) == CURLOPT_FTPPORT || \ + (option) == CURLOPT_FTP_ALTERNATIVE_TO_USER || \ + (option) == CURLOPT_FTP_ACCOUNT || \ + (option) == CURLOPT_RANGE || \ + (option) == CURLOPT_CUSTOMREQUEST || \ + (option) == CURLOPT_SSLCERT || \ + (option) == CURLOPT_SSLCERTTYPE || \ + (option) == CURLOPT_SSLKEY || \ + (option) == CURLOPT_SSLKEYTYPE || \ + (option) == CURLOPT_KEYPASSWD || \ + (option) == CURLOPT_SSLENGINE || \ + (option) == CURLOPT_CAINFO || \ + (option) == CURLOPT_CAPATH || \ + (option) == CURLOPT_RANDOM_FILE || \ + (option) == CURLOPT_EGDSOCKET || \ + (option) == CURLOPT_SSL_CIPHER_LIST || \ + (option) == CURLOPT_KRBLEVEL || \ + (option) == CURLOPT_SSH_HOST_PUBLIC_KEY_MD5 || \ + (option) == CURLOPT_SSH_PUBLIC_KEYFILE || \ + (option) == CURLOPT_SSH_PRIVATE_KEYFILE || \ + (option) == CURLOPT_CRLFILE || \ + (option) == CURLOPT_ISSUERCERT || \ + 0) + +/* evaluates to true if option takes a curl_write_callback argument */ +#define _curl_is_write_cb_option(option) \ + ((option) == CURLOPT_HEADERFUNCTION || \ + (option) == CURLOPT_WRITEFUNCTION) + +/* evaluates to true if option takes a curl_conv_callback argument */ +#define _curl_is_conv_cb_option(option) \ + ((option) == CURLOPT_CONV_TO_NETWORK_FUNCTION || \ + (option) == CURLOPT_CONV_FROM_NETWORK_FUNCTION || \ + (option) == CURLOPT_CONV_FROM_UTF8_FUNCTION) + +/* evaluates to true if option takes a data argument to pass to a callback */ +#define _curl_is_cb_data_option(option) \ + ((option) == CURLOPT_WRITEDATA || \ + (option) == CURLOPT_READDATA || \ + (option) == CURLOPT_IOCTLDATA || \ + (option) == CURLOPT_SOCKOPTDATA || \ + (option) == CURLOPT_OPENSOCKETDATA || \ + (option) == CURLOPT_PROGRESSDATA || \ + (option) == CURLOPT_WRITEHEADER || \ + (option) == CURLOPT_DEBUGDATA || \ + (option) == CURLOPT_SSL_CTX_DATA || \ + (option) == CURLOPT_SEEKDATA || \ + (option) == CURLOPT_PRIVATE || \ + 0) + +/* evaluates to true if option takes a POST data argument (void* or char*) */ +#define _curl_is_postfields_option(option) \ + ((option) == CURLOPT_POSTFIELDS || \ + (option) == CURLOPT_COPYPOSTFIELDS || \ + 0) + +/* evaluates to true if option takes a struct curl_slist * argument */ +#define _curl_is_slist_option(option) \ + ((option) == CURLOPT_HTTPHEADER || \ + (option) == CURLOPT_HTTP200ALIASES || \ + (option) == CURLOPT_QUOTE || \ + (option) == CURLOPT_POSTQUOTE || \ + (option) == CURLOPT_PREQUOTE || \ + (option) == CURLOPT_TELNETOPTIONS || \ + 0) + +/* groups of curl_easy_getinfo infos that take the same type of argument */ + +/* evaluates to true if info expects a pointer to char * argument */ +#define _curl_is_string_info(info) \ + (CURLINFO_STRING < (info) && (info) < CURLINFO_LONG) + +/* evaluates to true if info expects a pointer to long argument */ +#define _curl_is_long_info(info) \ + (CURLINFO_LONG < (info) && (info) < CURLINFO_DOUBLE) + +/* evaluates to true if info expects a pointer to double argument */ +#define _curl_is_double_info(info) \ + (CURLINFO_DOUBLE < (info) && (info) < CURLINFO_SLIST) + +/* true if info expects a pointer to struct curl_slist * argument */ +#define _curl_is_slist_info(info) \ + (CURLINFO_SLIST < (info)) + + +/* typecheck helpers -- check whether given expression has requested type*/ + +/* For pointers, you can use the _curl_is_ptr/_curl_is_arr macros, + * otherwise define a new macro. Search for __builtin_types_compatible_p + * in the GCC manual. + * NOTE: these macros MUST NOT EVALUATE their arguments! The argument is + * the actual expression passed to the curl_easy_setopt macro. This + * means that you can only apply the sizeof and __typeof__ operators, no + * == or whatsoever. + */ + +/* XXX: should evaluate to true iff expr is a pointer */ +#define _curl_is_any_ptr(expr) \ + (sizeof(expr) == sizeof(void*)) + +/* evaluates to true if expr is NULL */ +/* XXX: must not evaluate expr, so this check is not accurate */ +#define _curl_is_NULL(expr) \ + (__builtin_types_compatible_p(__typeof__(expr), __typeof__(NULL))) + +/* evaluates to true if expr is type*, const type* or NULL */ +#define _curl_is_ptr(expr, type) \ + (_curl_is_NULL(expr) || \ + __builtin_types_compatible_p(__typeof__(expr), type *) || \ + __builtin_types_compatible_p(__typeof__(expr), const type *)) + +/* evaluates to true if expr is one of type[], type*, NULL or const type* */ +#define _curl_is_arr(expr, type) \ + (_curl_is_ptr((expr), type) || \ + __builtin_types_compatible_p(__typeof__(expr), type [])) + +/* evaluates to true if expr is a string */ +#define _curl_is_string(expr) \ + (_curl_is_arr((expr), char) || \ + _curl_is_arr((expr), signed char) || \ + _curl_is_arr((expr), unsigned char)) + +/* evaluates to true if expr is a long (no matter the signedness) + * XXX: for now, int is also accepted (and therefore short and char, which + * are promoted to int when passed to a variadic function) */ +#define _curl_is_long(expr) \ + (__builtin_types_compatible_p(__typeof__(expr), long) || \ + __builtin_types_compatible_p(__typeof__(expr), signed long) || \ + __builtin_types_compatible_p(__typeof__(expr), unsigned long) || \ + __builtin_types_compatible_p(__typeof__(expr), int) || \ + __builtin_types_compatible_p(__typeof__(expr), signed int) || \ + __builtin_types_compatible_p(__typeof__(expr), unsigned int) || \ + __builtin_types_compatible_p(__typeof__(expr), short) || \ + __builtin_types_compatible_p(__typeof__(expr), signed short) || \ + __builtin_types_compatible_p(__typeof__(expr), unsigned short) || \ + __builtin_types_compatible_p(__typeof__(expr), char) || \ + __builtin_types_compatible_p(__typeof__(expr), signed char) || \ + __builtin_types_compatible_p(__typeof__(expr), unsigned char)) + +/* evaluates to true if expr is of type curl_off_t */ +#define _curl_is_off_t(expr) \ + (__builtin_types_compatible_p(__typeof__(expr), curl_off_t)) + +/* evaluates to true if expr is abuffer suitable for CURLOPT_ERRORBUFFER */ +/* XXX: also check size of an char[] array? */ +#define _curl_is_error_buffer(expr) \ + (__builtin_types_compatible_p(__typeof__(expr), char *) || \ + __builtin_types_compatible_p(__typeof__(expr), char[])) + +/* evaluates to true if expr is of type (const) void* or (const) FILE* */ +#if 0 +#define _curl_is_cb_data(expr) \ + (_curl_is_ptr((expr), void) || \ + _curl_is_ptr((expr), FILE)) +#else /* be less strict */ +#define _curl_is_cb_data(expr) \ + _curl_is_any_ptr(expr) +#endif + +/* evaluates to true if expr is of type FILE* */ +#define _curl_is_FILE(expr) \ + (__builtin_types_compatible_p(__typeof__(expr), FILE *)) + +/* evaluates to true if expr can be passed as POST data (void* or char*) */ +#define _curl_is_postfields(expr) \ + (_curl_is_ptr((expr), void) || \ + _curl_is_arr((expr), char)) + +/* FIXME: the whole callback checking is messy... + * The idea is to tolerate char vs. void and const vs. not const + * pointers in arguments at least + */ +/* helper: __builtin_types_compatible_p distinguishes between functions and + * function pointers, hide it */ +#define _curl_callback_compatible(func, type) \ + (__builtin_types_compatible_p(__typeof__(func), type) || \ + __builtin_types_compatible_p(__typeof__(func), type*)) + +/* evaluates to true if expr is of type curl_read_callback or "similar" */ +#define _curl_is_read_cb(expr) \ + (_curl_is_NULL(expr) || \ + __builtin_types_compatible_p(__typeof__(expr), __typeof__(fread)) || \ + __builtin_types_compatible_p(__typeof__(expr), curl_read_callback) || \ + _curl_callback_compatible((expr), _curl_read_callback1) || \ + _curl_callback_compatible((expr), _curl_read_callback2) || \ + _curl_callback_compatible((expr), _curl_read_callback3) || \ + _curl_callback_compatible((expr), _curl_read_callback4) || \ + _curl_callback_compatible((expr), _curl_read_callback5) || \ + _curl_callback_compatible((expr), _curl_read_callback6)) +typedef size_t (_curl_read_callback1)(char *, size_t, size_t, void*); +typedef size_t (_curl_read_callback2)(char *, size_t, size_t, const void*); +typedef size_t (_curl_read_callback3)(char *, size_t, size_t, FILE*); +typedef size_t (_curl_read_callback4)(void *, size_t, size_t, void*); +typedef size_t (_curl_read_callback5)(void *, size_t, size_t, const void*); +typedef size_t (_curl_read_callback6)(void *, size_t, size_t, FILE*); + +/* evaluates to true if expr is of type curl_write_callback or "similar" */ +#define _curl_is_write_cb(expr) \ + (_curl_is_read_cb(expr) || \ + __builtin_types_compatible_p(__typeof__(expr), __typeof__(fwrite)) || \ + __builtin_types_compatible_p(__typeof__(expr), curl_write_callback) || \ + _curl_callback_compatible((expr), _curl_write_callback1) || \ + _curl_callback_compatible((expr), _curl_write_callback2) || \ + _curl_callback_compatible((expr), _curl_write_callback3) || \ + _curl_callback_compatible((expr), _curl_write_callback4) || \ + _curl_callback_compatible((expr), _curl_write_callback5) || \ + _curl_callback_compatible((expr), _curl_write_callback6)) +typedef size_t (_curl_write_callback1)(const char *, size_t, size_t, void*); +typedef size_t (_curl_write_callback2)(const char *, size_t, size_t, + const void*); +typedef size_t (_curl_write_callback3)(const char *, size_t, size_t, FILE*); +typedef size_t (_curl_write_callback4)(const void *, size_t, size_t, void*); +typedef size_t (_curl_write_callback5)(const void *, size_t, size_t, + const void*); +typedef size_t (_curl_write_callback6)(const void *, size_t, size_t, FILE*); + +/* evaluates to true if expr is of type curl_ioctl_callback or "similar" */ +#define _curl_is_ioctl_cb(expr) \ + (_curl_is_NULL(expr) || \ + __builtin_types_compatible_p(__typeof__(expr), curl_ioctl_callback) || \ + _curl_callback_compatible((expr), _curl_ioctl_callback1) || \ + _curl_callback_compatible((expr), _curl_ioctl_callback2) || \ + _curl_callback_compatible((expr), _curl_ioctl_callback3) || \ + _curl_callback_compatible((expr), _curl_ioctl_callback4)) +typedef curlioerr (_curl_ioctl_callback1)(CURL *, int, void*); +typedef curlioerr (_curl_ioctl_callback2)(CURL *, int, const void*); +typedef curlioerr (_curl_ioctl_callback3)(CURL *, curliocmd, void*); +typedef curlioerr (_curl_ioctl_callback4)(CURL *, curliocmd, const void*); + +/* evaluates to true if expr is of type curl_sockopt_callback or "similar" */ +#define _curl_is_sockopt_cb(expr) \ + (_curl_is_NULL(expr) || \ + __builtin_types_compatible_p(__typeof__(expr), curl_sockopt_callback) || \ + _curl_callback_compatible((expr), _curl_sockopt_callback1) || \ + _curl_callback_compatible((expr), _curl_sockopt_callback2)) +typedef int (_curl_sockopt_callback1)(void *, curl_socket_t, curlsocktype); +typedef int (_curl_sockopt_callback2)(const void *, curl_socket_t, + curlsocktype); + +/* evaluates to true if expr is of type curl_opensocket_callback or "similar" */ +#define _curl_is_opensocket_cb(expr) \ + (_curl_is_NULL(expr) || \ + __builtin_types_compatible_p(__typeof__(expr), curl_opensocket_callback) ||\ + _curl_callback_compatible((expr), _curl_opensocket_callback1) || \ + _curl_callback_compatible((expr), _curl_opensocket_callback2) || \ + _curl_callback_compatible((expr), _curl_opensocket_callback3) || \ + _curl_callback_compatible((expr), _curl_opensocket_callback4)) +typedef curl_socket_t (_curl_opensocket_callback1) + (void *, curlsocktype, struct curl_sockaddr *); +typedef curl_socket_t (_curl_opensocket_callback2) + (void *, curlsocktype, const struct curl_sockaddr *); +typedef curl_socket_t (_curl_opensocket_callback3) + (const void *, curlsocktype, struct curl_sockaddr *); +typedef curl_socket_t (_curl_opensocket_callback4) + (const void *, curlsocktype, const struct curl_sockaddr *); + +/* evaluates to true if expr is of type curl_progress_callback or "similar" */ +#define _curl_is_progress_cb(expr) \ + (_curl_is_NULL(expr) || \ + __builtin_types_compatible_p(__typeof__(expr), curl_progress_callback) || \ + _curl_callback_compatible((expr), _curl_progress_callback1) || \ + _curl_callback_compatible((expr), _curl_progress_callback2)) +typedef int (_curl_progress_callback1)(void *, + double, double, double, double); +typedef int (_curl_progress_callback2)(const void *, + double, double, double, double); + +/* evaluates to true if expr is of type curl_debug_callback or "similar" */ +#define _curl_is_debug_cb(expr) \ + (_curl_is_NULL(expr) || \ + __builtin_types_compatible_p(__typeof__(expr), curl_debug_callback) || \ + _curl_callback_compatible((expr), _curl_debug_callback1) || \ + _curl_callback_compatible((expr), _curl_debug_callback2) || \ + _curl_callback_compatible((expr), _curl_debug_callback3) || \ + _curl_callback_compatible((expr), _curl_debug_callback4)) +typedef int (_curl_debug_callback1) (CURL *, + curl_infotype, char *, size_t, void *); +typedef int (_curl_debug_callback2) (CURL *, + curl_infotype, char *, size_t, const void *); +typedef int (_curl_debug_callback3) (CURL *, + curl_infotype, const char *, size_t, void *); +typedef int (_curl_debug_callback4) (CURL *, + curl_infotype, const char *, size_t, const void *); + +/* evaluates to true if expr is of type curl_ssl_ctx_callback or "similar" */ +/* this is getting even messier... */ +#define _curl_is_ssl_ctx_cb(expr) \ + (_curl_is_NULL(expr) || \ + __builtin_types_compatible_p(__typeof__(expr), curl_ssl_ctx_callback) || \ + _curl_callback_compatible((expr), _curl_ssl_ctx_callback1) || \ + _curl_callback_compatible((expr), _curl_ssl_ctx_callback2) || \ + _curl_callback_compatible((expr), _curl_ssl_ctx_callback3) || \ + _curl_callback_compatible((expr), _curl_ssl_ctx_callback4) || \ + _curl_callback_compatible((expr), _curl_ssl_ctx_callback5) || \ + _curl_callback_compatible((expr), _curl_ssl_ctx_callback6) || \ + _curl_callback_compatible((expr), _curl_ssl_ctx_callback7) || \ + _curl_callback_compatible((expr), _curl_ssl_ctx_callback8)) +typedef CURLcode (_curl_ssl_ctx_callback1)(CURL *, void *, void *); +typedef CURLcode (_curl_ssl_ctx_callback2)(CURL *, void *, const void *); +typedef CURLcode (_curl_ssl_ctx_callback3)(CURL *, const void *, void *); +typedef CURLcode (_curl_ssl_ctx_callback4)(CURL *, const void *, const void *); +#ifdef HEADER_SSL_H +/* hack: if we included OpenSSL's ssl.h, we know about SSL_CTX + * this will of course break if we're included before OpenSSL headers... + */ +typedef CURLcode (_curl_ssl_ctx_callback5)(CURL *, SSL_CTX, void *); +typedef CURLcode (_curl_ssl_ctx_callback6)(CURL *, SSL_CTX, const void *); +typedef CURLcode (_curl_ssl_ctx_callback7)(CURL *, const SSL_CTX, void *); +typedef CURLcode (_curl_ssl_ctx_callback8)(CURL *, const SSL_CTX, const void *); +#else +typedef _curl_ssl_ctx_callback1 _curl_ssl_ctx_callback5; +typedef _curl_ssl_ctx_callback1 _curl_ssl_ctx_callback6; +typedef _curl_ssl_ctx_callback1 _curl_ssl_ctx_callback7; +typedef _curl_ssl_ctx_callback1 _curl_ssl_ctx_callback8; +#endif + +/* evaluates to true if expr is of type curl_conv_callback or "similar" */ +#define _curl_is_conv_cb(expr) \ + (_curl_is_NULL(expr) || \ + __builtin_types_compatible_p(__typeof__(expr), curl_conv_callback) || \ + _curl_callback_compatible((expr), _curl_conv_callback1) || \ + _curl_callback_compatible((expr), _curl_conv_callback2) || \ + _curl_callback_compatible((expr), _curl_conv_callback3) || \ + _curl_callback_compatible((expr), _curl_conv_callback4)) +typedef CURLcode (*_curl_conv_callback1)(char *, size_t length); +typedef CURLcode (*_curl_conv_callback2)(const char *, size_t length); +typedef CURLcode (*_curl_conv_callback3)(void *, size_t length); +typedef CURLcode (*_curl_conv_callback4)(const void *, size_t length); + +/* evaluates to true if expr is of type curl_seek_callback or "similar" */ +#define _curl_is_seek_cb(expr) \ + (_curl_is_NULL(expr) || \ + __builtin_types_compatible_p(__typeof__(expr), curl_seek_callback) || \ + _curl_callback_compatible((expr), _curl_seek_callback1) || \ + _curl_callback_compatible((expr), _curl_seek_callback2)) +typedef CURLcode (*_curl_seek_callback1)(void *, curl_off_t, int); +typedef CURLcode (*_curl_seek_callback2)(const void *, curl_off_t, int); + + +#endif /* __CURL_TYPECHECK_GCC_H */ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/curl/types.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/curl/types.h new file mode 100644 index 0000000000..d37d6ae9e1 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/curl/types.h @@ -0,0 +1 @@ +/* not used */ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/libglog.pc.in b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/libglog.pc.in new file mode 100644 index 0000000000..ad2b0774d8 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/libglog.pc.in @@ -0,0 +1,10 @@ +prefix=@prefix@ +exec_prefix=@exec_prefix@ +libdir=@libdir@ +includedir=@includedir@ + +Name: libglog +Description: Google Log (glog) C++ logging framework +Version: @VERSION@ +Libs: -L${libdir} -lglog +Cflags: -I${includedir} diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/base/commandlineflags.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/base/commandlineflags.h new file mode 100644 index 0000000000..53d9485f55 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/base/commandlineflags.h @@ -0,0 +1,132 @@ +// Copyright (c) 2008, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// --- +// This file is a compatibility layer that defines Google's version of +// command line flags that are used for configuration. +// +// We put flags into their own namespace. It is purposefully +// named in an opaque way that people should have trouble typing +// directly. The idea is that DEFINE puts the flag in the weird +// namespace, and DECLARE imports the flag from there into the +// current namespace. The net result is to force people to use +// DECLARE to get access to a flag, rather than saying +// extern bool FLAGS_logtostderr; +// or some such instead. We want this so we can put extra +// functionality (like sanity-checking) in DECLARE if we want, +// and make sure it is picked up everywhere. +// +// We also put the type of the variable in the namespace, so that +// people can't DECLARE_int32 something that they DEFINE_bool'd +// elsewhere. +#ifndef BASE_COMMANDLINEFLAGS_H__ +#define BASE_COMMANDLINEFLAGS_H__ + +#include "config.h" +#include +#include // for memchr +#include // for getenv + +#ifdef HAVE_LIB_GFLAGS + +#include + +#else + +#include "glog/logging.h" + +#define DECLARE_VARIABLE(type, name, tn) \ + namespace FLAG__namespace_do_not_use_directly_use_DECLARE_##tn##_instead { \ + extern GOOGLE_GLOG_DLL_DECL type FLAGS_##name; \ + } \ + using FLAG__namespace_do_not_use_directly_use_DECLARE_##tn##_instead::FLAGS_##name +#define DEFINE_VARIABLE(type, name, value, meaning, tn) \ + namespace FLAG__namespace_do_not_use_directly_use_DECLARE_##tn##_instead { \ + GOOGLE_GLOG_DLL_DECL type FLAGS_##name(value); \ + char FLAGS_no##name; \ + } \ + using FLAG__namespace_do_not_use_directly_use_DECLARE_##tn##_instead::FLAGS_##name + +// bool specialization +#define DECLARE_bool(name) \ + DECLARE_VARIABLE(bool, name, bool) +#define DEFINE_bool(name, value, meaning) \ + DEFINE_VARIABLE(bool, name, value, meaning, bool) + +// int32 specialization +#define DECLARE_int32(name) \ + DECLARE_VARIABLE(GOOGLE_NAMESPACE::int32, name, int32) +#define DEFINE_int32(name, value, meaning) \ + DEFINE_VARIABLE(GOOGLE_NAMESPACE::int32, name, value, meaning, int32) + +// Special case for string, because we have to specify the namespace +// std::string, which doesn't play nicely with our FLAG__namespace hackery. +#define DECLARE_string(name) \ + namespace FLAG__namespace_do_not_use_directly_use_DECLARE_string_instead { \ + extern GOOGLE_GLOG_DLL_DECL std::string FLAGS_##name; \ + } \ + using FLAG__namespace_do_not_use_directly_use_DECLARE_string_instead::FLAGS_##name +#define DEFINE_string(name, value, meaning) \ + namespace FLAG__namespace_do_not_use_directly_use_DECLARE_string_instead { \ + GOOGLE_GLOG_DLL_DECL std::string FLAGS_##name(value); \ + char FLAGS_no##name; \ + } \ + using FLAG__namespace_do_not_use_directly_use_DECLARE_string_instead::FLAGS_##name + +#endif // HAVE_LIB_GFLAGS + +// Define GLOG_DEFINE_* using DEFINE_* . By using these macros, we +// have GLOG_* environ variables even if we have gflags installed. +// +// If both an environment variable and a flag are specified, the value +// specified by a flag wins. E.g., if GLOG_v=0 and --v=1, the +// verbosity will be 1, not 0. + +#define GLOG_DEFINE_bool(name, value, meaning) \ + DEFINE_bool(name, EnvToBool("GLOG_" #name, value), meaning) + +#define GLOG_DEFINE_int32(name, value, meaning) \ + DEFINE_int32(name, EnvToInt("GLOG_" #name, value), meaning) + +#define GLOG_DEFINE_string(name, value, meaning) \ + DEFINE_string(name, EnvToString("GLOG_" #name, value), meaning) + +// These macros (could be functions, but I don't want to bother with a .cc +// file), make it easier to initialize flags from the environment. + +#define EnvToString(envname, dflt) \ + (!getenv(envname) ? (dflt) : getenv(envname)) + +#define EnvToBool(envname, dflt) \ + (!getenv(envname) ? (dflt) : memchr("tTyY1\0", getenv(envname)[0], 6) != NULL) + +#define EnvToInt(envname, dflt) \ + (!getenv(envname) ? (dflt) : strtol(getenv(envname), NULL, 10)) + +#endif // BASE_COMMANDLINEFLAGS_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/base/googleinit.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/base/googleinit.h new file mode 100644 index 0000000000..c907308e85 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/base/googleinit.h @@ -0,0 +1,51 @@ +// Copyright (c) 2008, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// --- +// Author: Jacob Hoffman-Andrews + +#ifndef _GOOGLEINIT_H +#define _GOOGLEINIT_H + +class GoogleInitializer { + public: + typedef void (*void_function)(void); + GoogleInitializer(const char* name, void_function f) { + f(); + } +}; + +#define REGISTER_MODULE_INITIALIZER(name, body) \ + namespace { \ + static void google_init_module_##name () { body; } \ + GoogleInitializer google_initializer_module_##name(#name, \ + google_init_module_##name); \ + } + +#endif /* _GOOGLEINIT_H */ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/base/mutex.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/base/mutex.h new file mode 100644 index 0000000000..7ba88cb5a6 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/base/mutex.h @@ -0,0 +1,325 @@ +// Copyright (c) 2007, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// --- +// Author: Craig Silverstein. +// +// A simple mutex wrapper, supporting locks and read-write locks. +// You should assume the locks are *not* re-entrant. +// +// To use: you should define the following macros in your configure.ac: +// ACX_PTHREAD +// AC_RWLOCK +// The latter is defined in ../autoconf. +// +// This class is meant to be internal-only and should be wrapped by an +// internal namespace. Before you use this module, please give the +// name of your internal namespace for this module. Or, if you want +// to expose it, you'll want to move it to the Google namespace. We +// cannot put this class in global namespace because there can be some +// problems when we have multiple versions of Mutex in each shared object. +// +// NOTE: by default, we have #ifdef'ed out the TryLock() method. +// This is for two reasons: +// 1) TryLock() under Windows is a bit annoying (it requires a +// #define to be defined very early). +// 2) TryLock() is broken for NO_THREADS mode, at least in NDEBUG +// mode. +// If you need TryLock(), and either these two caveats are not a +// problem for you, or you're willing to work around them, then +// feel free to #define GMUTEX_TRYLOCK, or to remove the #ifdefs +// in the code below. +// +// CYGWIN NOTE: Cygwin support for rwlock seems to be buggy: +// http://www.cygwin.com/ml/cygwin/2008-12/msg00017.html +// Because of that, we might as well use windows locks for +// cygwin. They seem to be more reliable than the cygwin pthreads layer. +// +// TRICKY IMPLEMENTATION NOTE: +// This class is designed to be safe to use during +// dynamic-initialization -- that is, by global constructors that are +// run before main() starts. The issue in this case is that +// dynamic-initialization happens in an unpredictable order, and it +// could be that someone else's dynamic initializer could call a +// function that tries to acquire this mutex -- but that all happens +// before this mutex's constructor has run. (This can happen even if +// the mutex and the function that uses the mutex are in the same .cc +// file.) Basically, because Mutex does non-trivial work in its +// constructor, it's not, in the naive implementation, safe to use +// before dynamic initialization has run on it. +// +// The solution used here is to pair the actual mutex primitive with a +// bool that is set to true when the mutex is dynamically initialized. +// (Before that it's false.) Then we modify all mutex routines to +// look at the bool, and not try to lock/unlock until the bool makes +// it to true (which happens after the Mutex constructor has run.) +// +// This works because before main() starts -- particularly, during +// dynamic initialization -- there are no threads, so a) it's ok that +// the mutex operations are a no-op, since we don't need locking then +// anyway; and b) we can be quite confident our bool won't change +// state between a call to Lock() and a call to Unlock() (that would +// require a global constructor in one translation unit to call Lock() +// and another global constructor in another translation unit to call +// Unlock() later, which is pretty perverse). +// +// That said, it's tricky, and can conceivably fail; it's safest to +// avoid trying to acquire a mutex in a global constructor, if you +// can. One way it can fail is that a really smart compiler might +// initialize the bool to true at static-initialization time (too +// early) rather than at dynamic-initialization time. To discourage +// that, we set is_safe_ to true in code (not the constructor +// colon-initializer) and set it to true via a function that always +// evaluates to true, but that the compiler can't know always +// evaluates to true. This should be good enough. + +#ifndef GOOGLE_MUTEX_H_ +#define GOOGLE_MUTEX_H_ + +#include "config.h" // to figure out pthreads support + +#if defined(NO_THREADS) + typedef int MutexType; // to keep a lock-count +#elif defined(_WIN32) || defined(__CYGWIN32__) || defined(__CYGWIN64__) +# define WIN32_LEAN_AND_MEAN // We only need minimal includes +# ifdef GMUTEX_TRYLOCK + // We need Windows NT or later for TryEnterCriticalSection(). If you + // don't need that functionality, you can remove these _WIN32_WINNT + // lines, and change TryLock() to assert(0) or something. +# ifndef _WIN32_WINNT +# define _WIN32_WINNT 0x0400 +# endif +# endif +// To avoid macro definition of ERROR. +# define NOGDI +// To avoid macro definition of min/max. +# define NOMINMAX +# include + typedef CRITICAL_SECTION MutexType; +#elif defined(HAVE_PTHREAD) && defined(HAVE_RWLOCK) + // Needed for pthread_rwlock_*. If it causes problems, you could take it + // out, but then you'd have to unset HAVE_RWLOCK (at least on linux -- it + // *does* cause problems for FreeBSD, or MacOSX, but isn't needed + // for locking there.) +# ifdef __linux__ +# define _XOPEN_SOURCE 500 // may be needed to get the rwlock calls +# endif +# include + typedef pthread_rwlock_t MutexType; +#elif defined(HAVE_PTHREAD) +# include + typedef pthread_mutex_t MutexType; +#else +# error Need to implement mutex.h for your architecture, or #define NO_THREADS +#endif + +// We need to include these header files after defining _XOPEN_SOURCE +// as they may define the _XOPEN_SOURCE macro. +#include +#include // for abort() + +#define MUTEX_NAMESPACE glog_internal_namespace_ + +namespace MUTEX_NAMESPACE { + +class Mutex { + public: + // Create a Mutex that is not held by anybody. This constructor is + // typically used for Mutexes allocated on the heap or the stack. + // See below for a recommendation for constructing global Mutex + // objects. + inline Mutex(); + + // Destructor + inline ~Mutex(); + + inline void Lock(); // Block if needed until free then acquire exclusively + inline void Unlock(); // Release a lock acquired via Lock() +#ifdef GMUTEX_TRYLOCK + inline bool TryLock(); // If free, Lock() and return true, else return false +#endif + // Note that on systems that don't support read-write locks, these may + // be implemented as synonyms to Lock() and Unlock(). So you can use + // these for efficiency, but don't use them anyplace where being able + // to do shared reads is necessary to avoid deadlock. + inline void ReaderLock(); // Block until free or shared then acquire a share + inline void ReaderUnlock(); // Release a read share of this Mutex + inline void WriterLock() { Lock(); } // Acquire an exclusive lock + inline void WriterUnlock() { Unlock(); } // Release a lock from WriterLock() + + // TODO(hamaji): Do nothing, implement correctly. + inline void AssertHeld() {} + + private: + MutexType mutex_; + // We want to make sure that the compiler sets is_safe_ to true only + // when we tell it to, and never makes assumptions is_safe_ is + // always true. volatile is the most reliable way to do that. + volatile bool is_safe_; + + inline void SetIsSafe() { is_safe_ = true; } + + // Catch the error of writing Mutex when intending MutexLock. + Mutex(Mutex* /*ignored*/) {} + // Disallow "evil" constructors + Mutex(const Mutex&); + void operator=(const Mutex&); +}; + +// Now the implementation of Mutex for various systems +#if defined(NO_THREADS) + +// When we don't have threads, we can be either reading or writing, +// but not both. We can have lots of readers at once (in no-threads +// mode, that's most likely to happen in recursive function calls), +// but only one writer. We represent this by having mutex_ be -1 when +// writing and a number > 0 when reading (and 0 when no lock is held). +// +// In debug mode, we assert these invariants, while in non-debug mode +// we do nothing, for efficiency. That's why everything is in an +// assert. + +Mutex::Mutex() : mutex_(0) { } +Mutex::~Mutex() { assert(mutex_ == 0); } +void Mutex::Lock() { assert(--mutex_ == -1); } +void Mutex::Unlock() { assert(mutex_++ == -1); } +#ifdef GMUTEX_TRYLOCK +bool Mutex::TryLock() { if (mutex_) return false; Lock(); return true; } +#endif +void Mutex::ReaderLock() { assert(++mutex_ > 0); } +void Mutex::ReaderUnlock() { assert(mutex_-- > 0); } + +#elif defined(_WIN32) || defined(__CYGWIN32__) || defined(__CYGWIN64__) + +Mutex::Mutex() { InitializeCriticalSection(&mutex_); SetIsSafe(); } +Mutex::~Mutex() { DeleteCriticalSection(&mutex_); } +void Mutex::Lock() { if (is_safe_) EnterCriticalSection(&mutex_); } +void Mutex::Unlock() { if (is_safe_) LeaveCriticalSection(&mutex_); } +#ifdef GMUTEX_TRYLOCK +bool Mutex::TryLock() { return is_safe_ ? + TryEnterCriticalSection(&mutex_) != 0 : true; } +#endif +void Mutex::ReaderLock() { Lock(); } // we don't have read-write locks +void Mutex::ReaderUnlock() { Unlock(); } + +#elif defined(HAVE_PTHREAD) && defined(HAVE_RWLOCK) + +#define SAFE_PTHREAD(fncall) do { /* run fncall if is_safe_ is true */ \ + if (is_safe_ && fncall(&mutex_) != 0) abort(); \ +} while (0) + +Mutex::Mutex() { + SetIsSafe(); + if (is_safe_ && pthread_rwlock_init(&mutex_, NULL) != 0) abort(); +} +Mutex::~Mutex() { SAFE_PTHREAD(pthread_rwlock_destroy); } +void Mutex::Lock() { SAFE_PTHREAD(pthread_rwlock_wrlock); } +void Mutex::Unlock() { SAFE_PTHREAD(pthread_rwlock_unlock); } +#ifdef GMUTEX_TRYLOCK +bool Mutex::TryLock() { return is_safe_ ? + pthread_rwlock_trywrlock(&mutex_) == 0 : + true; } +#endif +void Mutex::ReaderLock() { SAFE_PTHREAD(pthread_rwlock_rdlock); } +void Mutex::ReaderUnlock() { SAFE_PTHREAD(pthread_rwlock_unlock); } +#undef SAFE_PTHREAD + +#elif defined(HAVE_PTHREAD) + +#define SAFE_PTHREAD(fncall) do { /* run fncall if is_safe_ is true */ \ + if (is_safe_ && fncall(&mutex_) != 0) abort(); \ +} while (0) + +Mutex::Mutex() { + SetIsSafe(); + if (is_safe_ && pthread_mutex_init(&mutex_, NULL) != 0) abort(); +} +Mutex::~Mutex() { SAFE_PTHREAD(pthread_mutex_destroy); } +void Mutex::Lock() { SAFE_PTHREAD(pthread_mutex_lock); } +void Mutex::Unlock() { SAFE_PTHREAD(pthread_mutex_unlock); } +#ifdef GMUTEX_TRYLOCK +bool Mutex::TryLock() { return is_safe_ ? + pthread_mutex_trylock(&mutex_) == 0 : true; } +#endif +void Mutex::ReaderLock() { Lock(); } +void Mutex::ReaderUnlock() { Unlock(); } +#undef SAFE_PTHREAD + +#endif + +// -------------------------------------------------------------------------- +// Some helper classes + +// MutexLock(mu) acquires mu when constructed and releases it when destroyed. +class MutexLock { + public: + explicit MutexLock(Mutex *mu) : mu_(mu) { mu_->Lock(); } + ~MutexLock() { mu_->Unlock(); } + private: + Mutex * const mu_; + // Disallow "evil" constructors + MutexLock(const MutexLock&); + void operator=(const MutexLock&); +}; + +// ReaderMutexLock and WriterMutexLock do the same, for rwlocks +class ReaderMutexLock { + public: + explicit ReaderMutexLock(Mutex *mu) : mu_(mu) { mu_->ReaderLock(); } + ~ReaderMutexLock() { mu_->ReaderUnlock(); } + private: + Mutex * const mu_; + // Disallow "evil" constructors + ReaderMutexLock(const ReaderMutexLock&); + void operator=(const ReaderMutexLock&); +}; + +class WriterMutexLock { + public: + explicit WriterMutexLock(Mutex *mu) : mu_(mu) { mu_->WriterLock(); } + ~WriterMutexLock() { mu_->WriterUnlock(); } + private: + Mutex * const mu_; + // Disallow "evil" constructors + WriterMutexLock(const WriterMutexLock&); + void operator=(const WriterMutexLock&); +}; + +// Catch bug where variable name is omitted, e.g. MutexLock (&mu); +#define MutexLock(x) COMPILE_ASSERT(0, mutex_lock_decl_missing_var_name) +#define ReaderMutexLock(x) COMPILE_ASSERT(0, rmutex_lock_decl_missing_var_name) +#define WriterMutexLock(x) COMPILE_ASSERT(0, wmutex_lock_decl_missing_var_name) + +} // namespace MUTEX_NAMESPACE + +using namespace MUTEX_NAMESPACE; + +#undef MUTEX_NAMESPACE + +#endif /* #define GOOGLE_MUTEX_H__ */ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/config.h.in b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/config.h.in new file mode 100644 index 0000000000..844a7fce5c --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/config.h.in @@ -0,0 +1,165 @@ +/* src/config.h.in. Generated from configure.ac by autoheader. */ + +/* Namespace for Google classes */ +#undef GOOGLE_NAMESPACE + +/* Define if you have the `dladdr' function */ +#undef HAVE_DLADDR + +/* Define to 1 if you have the header file. */ +#undef HAVE_DLFCN_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_EXECINFO_H + +/* Define if you have the `fcntl' function */ +#undef HAVE_FCNTL + +/* Define to 1 if you have the header file. */ +#undef HAVE_GLOB_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_INTTYPES_H + +/* Define to 1 if you have the `pthread' library (-lpthread). */ +#undef HAVE_LIBPTHREAD + +/* Define to 1 if you have the header file. */ +#undef HAVE_LIBUNWIND_H + +/* define if you have google gflags library */ +#undef HAVE_LIB_GFLAGS + +/* define if you have google gmock library */ +#undef HAVE_LIB_GMOCK + +/* define if you have google gtest library */ +#undef HAVE_LIB_GTEST + +/* define if you have libunwind */ +#undef HAVE_LIB_UNWIND + +/* Define to 1 if you have the header file. */ +#undef HAVE_MEMORY_H + +/* define if the compiler implements namespaces */ +#undef HAVE_NAMESPACES + +/* Define if you have POSIX threads libraries and header files. */ +#undef HAVE_PTHREAD + +/* Define to 1 if you have the header file. */ +#undef HAVE_PWD_H + +/* define if the compiler implements pthread_rwlock_* */ +#undef HAVE_RWLOCK + +/* Define if you have the `sigaltstack' function */ +#undef HAVE_SIGALTSTACK + +/* Define to 1 if you have the header file. */ +#undef HAVE_STDINT_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_STDLIB_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_STRINGS_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_STRING_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_SYSCALL_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_SYSLOG_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_SYS_STAT_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_SYS_SYSCALL_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_SYS_TIME_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_SYS_TYPES_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_SYS_UCONTEXT_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_SYS_UTSNAME_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_UCONTEXT_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_UNISTD_H + +/* define if the compiler supports using expression for operator */ +#undef HAVE_USING_OPERATOR + +/* define if your compiler has __attribute__ */ +#undef HAVE___ATTRIBUTE__ + +/* define if your compiler has __builtin_expect */ +#undef HAVE___BUILTIN_EXPECT + +/* define if your compiler has __sync_val_compare_and_swap */ +#undef HAVE___SYNC_VAL_COMPARE_AND_SWAP + +/* Define to the sub-directory in which libtool stores uninstalled libraries. + */ +#undef LT_OBJDIR + +/* Name of package */ +#undef PACKAGE + +/* Define to the address where bug reports for this package should be sent. */ +#undef PACKAGE_BUGREPORT + +/* Define to the full name of this package. */ +#undef PACKAGE_NAME + +/* Define to the full name and version of this package. */ +#undef PACKAGE_STRING + +/* Define to the one symbol short name of this package. */ +#undef PACKAGE_TARNAME + +/* Define to the home page for this package. */ +#undef PACKAGE_URL + +/* Define to the version of this package. */ +#undef PACKAGE_VERSION + +/* How to access the PC from a struct ucontext */ +#undef PC_FROM_UCONTEXT + +/* Define to necessary symbol if this constant uses a non-standard name on + your system. */ +#undef PTHREAD_CREATE_JOINABLE + +/* The size of `void *', as computed by sizeof. */ +#undef SIZEOF_VOID_P + +/* Define to 1 if you have the ANSI C header files. */ +#undef STDC_HEADERS + +/* the namespace where STL code like vector<> is defined */ +#undef STL_NAMESPACE + +/* location of source code */ +#undef TEST_SRC_DIR + +/* Version number of package */ +#undef VERSION + +/* Stops putting the code inside the Google namespace */ +#undef _END_GOOGLE_NAMESPACE_ + +/* Puts following code inside the Google namespace */ +#undef _START_GOOGLE_NAMESPACE_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/config_for_unittests.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/config_for_unittests.h new file mode 100644 index 0000000000..13ea8eab7a --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/config_for_unittests.h @@ -0,0 +1,66 @@ +// Copyright (c) 2008, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// --- +// All Rights Reserved. +// +// Author: Craig Silverstein +// Copied from google-perftools and modified by Shinichiro Hamaji +// +// This file is needed for windows -- unittests are not part of the +// glog dll, but still want to include config.h just like the +// dll does, so they can use internal tools and APIs for testing. +// +// The problem is that config.h declares GOOGLE_GLOG_DLL_DECL to be +// for exporting symbols, but the unittest needs to *import* symbols +// (since it's not the dll). +// +// The solution is to have this file, which is just like config.h but +// sets GOOGLE_GLOG_DLL_DECL to do a dllimport instead of a dllexport. +// +// The reason we need this extra GOOGLE_GLOG_DLL_DECL_FOR_UNITTESTS +// variable is in case people want to set GOOGLE_GLOG_DLL_DECL explicitly +// to something other than __declspec(dllexport). In that case, they +// may want to use something other than __declspec(dllimport) for the +// unittest case. For that, we allow folks to define both +// GOOGLE_GLOG_DLL_DECL and GOOGLE_GLOG_DLL_DECL_FOR_UNITTESTS explicitly. +// +// NOTE: This file is equivalent to config.h on non-windows systems, +// which never defined GOOGLE_GLOG_DLL_DECL_FOR_UNITTESTS and always +// define GOOGLE_GLOG_DLL_DECL to the empty string. + +#include "config.h" + +#undef GOOGLE_GLOG_DLL_DECL +#ifdef GOOGLE_GLOG_DLL_DECL_FOR_UNITTESTS +# define GOOGLE_GLOG_DLL_DECL GOOGLE_GLOG_DLL_DECL_FOR_UNITTESTS +#else +// if DLL_DECL_FOR_UNITTESTS isn't defined, use "" +# define GOOGLE_GLOG_DLL_DECL +#endif diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/demangle.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/demangle.cc new file mode 100644 index 0000000000..2fbb790036 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/demangle.cc @@ -0,0 +1,1307 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Author: Satoru Takabayashi +// +// For reference check out: +// http://www.codesourcery.com/public/cxx-abi/abi.html#mangling +// +// Note that we only have partial C++0x support yet. + +#include // for NULL +#include "demangle.h" + +_START_GOOGLE_NAMESPACE_ + +typedef struct { + const char *abbrev; + const char *real_name; +} AbbrevPair; + +// List of operators from Itanium C++ ABI. +static const AbbrevPair kOperatorList[] = { + { "nw", "new" }, + { "na", "new[]" }, + { "dl", "delete" }, + { "da", "delete[]" }, + { "ps", "+" }, + { "ng", "-" }, + { "ad", "&" }, + { "de", "*" }, + { "co", "~" }, + { "pl", "+" }, + { "mi", "-" }, + { "ml", "*" }, + { "dv", "/" }, + { "rm", "%" }, + { "an", "&" }, + { "or", "|" }, + { "eo", "^" }, + { "aS", "=" }, + { "pL", "+=" }, + { "mI", "-=" }, + { "mL", "*=" }, + { "dV", "/=" }, + { "rM", "%=" }, + { "aN", "&=" }, + { "oR", "|=" }, + { "eO", "^=" }, + { "ls", "<<" }, + { "rs", ">>" }, + { "lS", "<<=" }, + { "rS", ">>=" }, + { "eq", "==" }, + { "ne", "!=" }, + { "lt", "<" }, + { "gt", ">" }, + { "le", "<=" }, + { "ge", ">=" }, + { "nt", "!" }, + { "aa", "&&" }, + { "oo", "||" }, + { "pp", "++" }, + { "mm", "--" }, + { "cm", "," }, + { "pm", "->*" }, + { "pt", "->" }, + { "cl", "()" }, + { "ix", "[]" }, + { "qu", "?" }, + { "st", "sizeof" }, + { "sz", "sizeof" }, + { NULL, NULL }, +}; + +// List of builtin types from Itanium C++ ABI. +static const AbbrevPair kBuiltinTypeList[] = { + { "v", "void" }, + { "w", "wchar_t" }, + { "b", "bool" }, + { "c", "char" }, + { "a", "signed char" }, + { "h", "unsigned char" }, + { "s", "short" }, + { "t", "unsigned short" }, + { "i", "int" }, + { "j", "unsigned int" }, + { "l", "long" }, + { "m", "unsigned long" }, + { "x", "long long" }, + { "y", "unsigned long long" }, + { "n", "__int128" }, + { "o", "unsigned __int128" }, + { "f", "float" }, + { "d", "double" }, + { "e", "long double" }, + { "g", "__float128" }, + { "z", "ellipsis" }, + { NULL, NULL } +}; + +// List of substitutions Itanium C++ ABI. +static const AbbrevPair kSubstitutionList[] = { + { "St", "" }, + { "Sa", "allocator" }, + { "Sb", "basic_string" }, + // std::basic_string,std::allocator > + { "Ss", "string"}, + // std::basic_istream > + { "Si", "istream" }, + // std::basic_ostream > + { "So", "ostream" }, + // std::basic_iostream > + { "Sd", "iostream" }, + { NULL, NULL } +}; + +// State needed for demangling. +typedef struct { + const char *mangled_cur; // Cursor of mangled name. + char *out_cur; // Cursor of output string. + const char *out_begin; // Beginning of output string. + const char *out_end; // End of output string. + const char *prev_name; // For constructors/destructors. + int prev_name_length; // For constructors/destructors. + short nest_level; // For nested names. + bool append; // Append flag. + bool overflowed; // True if output gets overflowed. +} State; + +// We don't use strlen() in libc since it's not guaranteed to be async +// signal safe. +static size_t StrLen(const char *str) { + size_t len = 0; + while (*str != '\0') { + ++str; + ++len; + } + return len; +} + +// Returns true if "str" has at least "n" characters remaining. +static bool AtLeastNumCharsRemaining(const char *str, int n) { + for (int i = 0; i < n; ++i) { + if (str == '\0') { + return false; + } + } + return true; +} + +// Returns true if "str" has "prefix" as a prefix. +static bool StrPrefix(const char *str, const char *prefix) { + size_t i = 0; + while (str[i] != '\0' && prefix[i] != '\0' && + str[i] == prefix[i]) { + ++i; + } + return prefix[i] == '\0'; // Consumed everything in "prefix". +} + +static void InitState(State *state, const char *mangled, + char *out, int out_size) { + state->mangled_cur = mangled; + state->out_cur = out; + state->out_begin = out; + state->out_end = out + out_size; + state->prev_name = NULL; + state->prev_name_length = -1; + state->nest_level = -1; + state->append = true; + state->overflowed = false; +} + +// Returns true and advances "mangled_cur" if we find "one_char_token" +// at "mangled_cur" position. It is assumed that "one_char_token" does +// not contain '\0'. +static bool ParseOneCharToken(State *state, const char one_char_token) { + if (state->mangled_cur[0] == one_char_token) { + ++state->mangled_cur; + return true; + } + return false; +} + +// Returns true and advances "mangled_cur" if we find "two_char_token" +// at "mangled_cur" position. It is assumed that "two_char_token" does +// not contain '\0'. +static bool ParseTwoCharToken(State *state, const char *two_char_token) { + if (state->mangled_cur[0] == two_char_token[0] && + state->mangled_cur[1] == two_char_token[1]) { + state->mangled_cur += 2; + return true; + } + return false; +} + +// Returns true and advances "mangled_cur" if we find any character in +// "char_class" at "mangled_cur" position. +static bool ParseCharClass(State *state, const char *char_class) { + if (state->mangled_cur == '\0') { + return false; + } + const char *p = char_class; + for (; *p != '\0'; ++p) { + if (state->mangled_cur[0] == *p) { + ++state->mangled_cur; + return true; + } + } + return false; +} + +// This function is used for handling an optional non-terminal. +static bool Optional(bool status) { + return true; +} + +// This function is used for handling + syntax. +typedef bool (*ParseFunc)(State *); +static bool OneOrMore(ParseFunc parse_func, State *state) { + if (parse_func(state)) { + while (parse_func(state)) { + } + return true; + } + return false; +} + +// This function is used for handling * syntax. The function +// always returns true and must be followed by a termination token or a +// terminating sequence not handled by parse_func (e.g. +// ParseOneCharToken(state, 'E')). +static bool ZeroOrMore(ParseFunc parse_func, State *state) { + while (parse_func(state)) { + } + return true; +} + +// Append "str" at "out_cur". If there is an overflow, "overflowed" +// is set to true for later use. The output string is ensured to +// always terminate with '\0' as long as there is no overflow. +static void Append(State *state, const char * const str, const int length) { + int i; + for (i = 0; i < length; ++i) { + if (state->out_cur + 1 < state->out_end) { // +1 for '\0' + *state->out_cur = str[i]; + ++state->out_cur; + } else { + state->overflowed = true; + break; + } + } + if (!state->overflowed) { + *state->out_cur = '\0'; // Terminate it with '\0' + } +} + +// We don't use equivalents in libc to avoid locale issues. +static bool IsLower(char c) { + return c >= 'a' && c <= 'z'; +} + +static bool IsAlpha(char c) { + return (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z'); +} + +static bool IsDigit(char c) { + return c >= '0' && c <= '9'; +} + +// Returns true if "str" is a function clone suffix. These suffixes are used +// by GCC 4.5.x and later versions to indicate functions which have been +// cloned during optimization. We treat any sequence (.+.+)+ as +// a function clone suffix. +static bool IsFunctionCloneSuffix(const char *str) { + size_t i = 0; + while (str[i] != '\0') { + // Consume a single .+.+ sequence. + if (str[i] != '.' || !IsAlpha(str[i + 1])) { + return false; + } + i += 2; + while (IsAlpha(str[i])) { + ++i; + } + if (str[i] != '.' || !IsDigit(str[i + 1])) { + return false; + } + i += 2; + while (IsDigit(str[i])) { + ++i; + } + } + return true; // Consumed everything in "str". +} + +// Append "str" with some tweaks, iff "append" state is true. +// Returns true so that it can be placed in "if" conditions. +static void MaybeAppendWithLength(State *state, const char * const str, + const int length) { + if (state->append && length > 0) { + // Append a space if the output buffer ends with '<' and "str" + // starts with '<' to avoid <<<. + if (str[0] == '<' && state->out_begin < state->out_cur && + state->out_cur[-1] == '<') { + Append(state, " ", 1); + } + // Remember the last identifier name for ctors/dtors. + if (IsAlpha(str[0]) || str[0] == '_') { + state->prev_name = state->out_cur; + state->prev_name_length = length; + } + Append(state, str, length); + } +} + +// A convenient wrapper arount MaybeAppendWithLength(). +static bool MaybeAppend(State *state, const char * const str) { + if (state->append) { + int length = StrLen(str); + MaybeAppendWithLength(state, str, length); + } + return true; +} + +// This function is used for handling nested names. +static bool EnterNestedName(State *state) { + state->nest_level = 0; + return true; +} + +// This function is used for handling nested names. +static bool LeaveNestedName(State *state, short prev_value) { + state->nest_level = prev_value; + return true; +} + +// Disable the append mode not to print function parameters, etc. +static bool DisableAppend(State *state) { + state->append = false; + return true; +} + +// Restore the append mode to the previous state. +static bool RestoreAppend(State *state, bool prev_value) { + state->append = prev_value; + return true; +} + +// Increase the nest level for nested names. +static void MaybeIncreaseNestLevel(State *state) { + if (state->nest_level > -1) { + ++state->nest_level; + } +} + +// Appends :: for nested names if necessary. +static void MaybeAppendSeparator(State *state) { + if (state->nest_level >= 1) { + MaybeAppend(state, "::"); + } +} + +// Cancel the last separator if necessary. +static void MaybeCancelLastSeparator(State *state) { + if (state->nest_level >= 1 && state->append && + state->out_begin <= state->out_cur - 2) { + state->out_cur -= 2; + *state->out_cur = '\0'; + } +} + +// Returns true if the identifier of the given length pointed to by +// "mangled_cur" is anonymous namespace. +static bool IdentifierIsAnonymousNamespace(State *state, int length) { + static const char anon_prefix[] = "_GLOBAL__N_"; + return (length > sizeof(anon_prefix) - 1 && // Should be longer. + StrPrefix(state->mangled_cur, anon_prefix)); +} + +// Forward declarations of our parsing functions. +static bool ParseMangledName(State *state); +static bool ParseEncoding(State *state); +static bool ParseName(State *state); +static bool ParseUnscopedName(State *state); +static bool ParseUnscopedTemplateName(State *state); +static bool ParseNestedName(State *state); +static bool ParsePrefix(State *state); +static bool ParseUnqualifiedName(State *state); +static bool ParseSourceName(State *state); +static bool ParseLocalSourceName(State *state); +static bool ParseNumber(State *state, int *number_out); +static bool ParseFloatNumber(State *state); +static bool ParseSeqId(State *state); +static bool ParseIdentifier(State *state, int length); +static bool ParseOperatorName(State *state); +static bool ParseSpecialName(State *state); +static bool ParseCallOffset(State *state); +static bool ParseNVOffset(State *state); +static bool ParseVOffset(State *state); +static bool ParseCtorDtorName(State *state); +static bool ParseType(State *state); +static bool ParseCVQualifiers(State *state); +static bool ParseBuiltinType(State *state); +static bool ParseFunctionType(State *state); +static bool ParseBareFunctionType(State *state); +static bool ParseClassEnumType(State *state); +static bool ParseArrayType(State *state); +static bool ParsePointerToMemberType(State *state); +static bool ParseTemplateParam(State *state); +static bool ParseTemplateTemplateParam(State *state); +static bool ParseTemplateArgs(State *state); +static bool ParseTemplateArg(State *state); +static bool ParseExpression(State *state); +static bool ParseExprPrimary(State *state); +static bool ParseLocalName(State *state); +static bool ParseDiscriminator(State *state); +static bool ParseSubstitution(State *state); + +// Implementation note: the following code is a straightforward +// translation of the Itanium C++ ABI defined in BNF with a couple of +// exceptions. +// +// - Support GNU extensions not defined in the Itanium C++ ABI +// - and are combined to avoid infinite loop +// - Reorder patterns to shorten the code +// - Reorder patterns to give greedier functions precedence +// We'll mark "Less greedy than" for these cases in the code +// +// Each parsing function changes the state and returns true on +// success. Otherwise, don't change the state and returns false. To +// ensure that the state isn't changed in the latter case, we save the +// original state before we call more than one parsing functions +// consecutively with &&, and restore the state if unsuccessful. See +// ParseEncoding() as an example of this convention. We follow the +// convention throughout the code. +// +// Originally we tried to do demangling without following the full ABI +// syntax but it turned out we needed to follow the full syntax to +// parse complicated cases like nested template arguments. Note that +// implementing a full-fledged demangler isn't trivial (libiberty's +// cp-demangle.c has +4300 lines). +// +// Note that (foo) in <(foo) ...> is a modifier to be ignored. +// +// Reference: +// - Itanium C++ ABI +// + +// ::= _Z +static bool ParseMangledName(State *state) { + return ParseTwoCharToken(state, "_Z") && ParseEncoding(state); +} + +// ::= <(function) name> +// ::= <(data) name> +// ::= +static bool ParseEncoding(State *state) { + State copy = *state; + if (ParseName(state) && ParseBareFunctionType(state)) { + return true; + } + *state = copy; + + if (ParseName(state) || ParseSpecialName(state)) { + return true; + } + return false; +} + +// ::= +// ::= +// ::= +// ::= +static bool ParseName(State *state) { + if (ParseNestedName(state) || ParseLocalName(state)) { + return true; + } + + State copy = *state; + if (ParseUnscopedTemplateName(state) && + ParseTemplateArgs(state)) { + return true; + } + *state = copy; + + // Less greedy than . + if (ParseUnscopedName(state)) { + return true; + } + return false; +} + +// ::= +// ::= St +static bool ParseUnscopedName(State *state) { + if (ParseUnqualifiedName(state)) { + return true; + } + + State copy = *state; + if (ParseTwoCharToken(state, "St") && + MaybeAppend(state, "std::") && + ParseUnqualifiedName(state)) { + return true; + } + *state = copy; + return false; +} + +// ::= +// ::= +static bool ParseUnscopedTemplateName(State *state) { + return ParseUnscopedName(state) || ParseSubstitution(state); +} + +// ::= N [] E +// ::= N [] E +static bool ParseNestedName(State *state) { + State copy = *state; + if (ParseOneCharToken(state, 'N') && + EnterNestedName(state) && + Optional(ParseCVQualifiers(state)) && + ParsePrefix(state) && + LeaveNestedName(state, copy.nest_level) && + ParseOneCharToken(state, 'E')) { + return true; + } + *state = copy; + return false; +} + +// This part is tricky. If we literally translate them to code, we'll +// end up infinite loop. Hence we merge them to avoid the case. +// +// ::= +// ::= +// ::= +// ::= +// ::= # empty +// ::= <(template) unqualified-name> +// ::= +// ::= +static bool ParsePrefix(State *state) { + bool has_something = false; + while (true) { + MaybeAppendSeparator(state); + if (ParseTemplateParam(state) || + ParseSubstitution(state) || + ParseUnscopedName(state)) { + has_something = true; + MaybeIncreaseNestLevel(state); + continue; + } + MaybeCancelLastSeparator(state); + if (has_something && ParseTemplateArgs(state)) { + return ParsePrefix(state); + } else { + break; + } + } + return true; +} + +// ::= +// ::= +// ::= +// ::= +static bool ParseUnqualifiedName(State *state) { + return (ParseOperatorName(state) || + ParseCtorDtorName(state) || + ParseSourceName(state) || + ParseLocalSourceName(state)); +} + +// ::= +static bool ParseSourceName(State *state) { + State copy = *state; + int length = -1; + if (ParseNumber(state, &length) && ParseIdentifier(state, length)) { + return true; + } + *state = copy; + return false; +} + +// ::= L [] +// +// References: +// http://gcc.gnu.org/bugzilla/show_bug.cgi?id=31775 +// http://gcc.gnu.org/viewcvs?view=rev&revision=124467 +static bool ParseLocalSourceName(State *state) { + State copy = *state; + if (ParseOneCharToken(state, 'L') && ParseSourceName(state) && + Optional(ParseDiscriminator(state))) { + return true; + } + *state = copy; + return false; +} + +// ::= [n] +// If "number_out" is non-null, then *number_out is set to the value of the +// parsed number on success. +static bool ParseNumber(State *state, int *number_out) { + int sign = 1; + if (ParseOneCharToken(state, 'n')) { + sign = -1; + } + const char *p = state->mangled_cur; + int number = 0; + for (;*p != '\0'; ++p) { + if (IsDigit(*p)) { + number = number * 10 + (*p - '0'); + } else { + break; + } + } + if (p != state->mangled_cur) { // Conversion succeeded. + state->mangled_cur = p; + if (number_out != NULL) { + *number_out = number * sign; + } + return true; + } + return false; +} + +// Floating-point literals are encoded using a fixed-length lowercase +// hexadecimal string. +static bool ParseFloatNumber(State *state) { + const char *p = state->mangled_cur; + for (;*p != '\0'; ++p) { + if (!IsDigit(*p) && !(*p >= 'a' && *p <= 'f')) { + break; + } + } + if (p != state->mangled_cur) { // Conversion succeeded. + state->mangled_cur = p; + return true; + } + return false; +} + +// The is a sequence number in base 36, +// using digits and upper case letters +static bool ParseSeqId(State *state) { + const char *p = state->mangled_cur; + for (;*p != '\0'; ++p) { + if (!IsDigit(*p) && !(*p >= 'A' && *p <= 'Z')) { + break; + } + } + if (p != state->mangled_cur) { // Conversion succeeded. + state->mangled_cur = p; + return true; + } + return false; +} + +// ::= (of given length) +static bool ParseIdentifier(State *state, int length) { + if (length == -1 || + !AtLeastNumCharsRemaining(state->mangled_cur, length)) { + return false; + } + if (IdentifierIsAnonymousNamespace(state, length)) { + MaybeAppend(state, "(anonymous namespace)"); + } else { + MaybeAppendWithLength(state, state->mangled_cur, length); + } + state->mangled_cur += length; + return true; +} + +// ::= nw, and other two letters cases +// ::= cv # (cast) +// ::= v # vendor extended operator +static bool ParseOperatorName(State *state) { + if (!AtLeastNumCharsRemaining(state->mangled_cur, 2)) { + return false; + } + // First check with "cv" (cast) case. + State copy = *state; + if (ParseTwoCharToken(state, "cv") && + MaybeAppend(state, "operator ") && + EnterNestedName(state) && + ParseType(state) && + LeaveNestedName(state, copy.nest_level)) { + return true; + } + *state = copy; + + // Then vendor extended operators. + if (ParseOneCharToken(state, 'v') && ParseCharClass(state, "0123456789") && + ParseSourceName(state)) { + return true; + } + *state = copy; + + // Other operator names should start with a lower alphabet followed + // by a lower/upper alphabet. + if (!(IsLower(state->mangled_cur[0]) && + IsAlpha(state->mangled_cur[1]))) { + return false; + } + // We may want to perform a binary search if we really need speed. + const AbbrevPair *p; + for (p = kOperatorList; p->abbrev != NULL; ++p) { + if (state->mangled_cur[0] == p->abbrev[0] && + state->mangled_cur[1] == p->abbrev[1]) { + MaybeAppend(state, "operator"); + if (IsLower(*p->real_name)) { // new, delete, etc. + MaybeAppend(state, " "); + } + MaybeAppend(state, p->real_name); + state->mangled_cur += 2; + return true; + } + } + return false; +} + +// ::= TV +// ::= TT +// ::= TI +// ::= TS +// ::= Tc <(base) encoding> +// ::= GV <(object) name> +// ::= T <(base) encoding> +// G++ extensions: +// ::= TC <(offset) number> _ <(base) type> +// ::= TF +// ::= TJ +// ::= GR +// ::= GA +// ::= Th <(base) encoding> +// ::= Tv <(base) encoding> +// +// Note: we don't care much about them since they don't appear in +// stack traces. The are special data. +static bool ParseSpecialName(State *state) { + State copy = *state; + if (ParseOneCharToken(state, 'T') && + ParseCharClass(state, "VTIS") && + ParseType(state)) { + return true; + } + *state = copy; + + if (ParseTwoCharToken(state, "Tc") && ParseCallOffset(state) && + ParseCallOffset(state) && ParseEncoding(state)) { + return true; + } + *state = copy; + + if (ParseTwoCharToken(state, "GV") && + ParseName(state)) { + return true; + } + *state = copy; + + if (ParseOneCharToken(state, 'T') && ParseCallOffset(state) && + ParseEncoding(state)) { + return true; + } + *state = copy; + + // G++ extensions + if (ParseTwoCharToken(state, "TC") && ParseType(state) && + ParseNumber(state, NULL) && ParseOneCharToken(state, '_') && + DisableAppend(state) && + ParseType(state)) { + RestoreAppend(state, copy.append); + return true; + } + *state = copy; + + if (ParseOneCharToken(state, 'T') && ParseCharClass(state, "FJ") && + ParseType(state)) { + return true; + } + *state = copy; + + if (ParseTwoCharToken(state, "GR") && ParseName(state)) { + return true; + } + *state = copy; + + if (ParseTwoCharToken(state, "GA") && ParseEncoding(state)) { + return true; + } + *state = copy; + + if (ParseOneCharToken(state, 'T') && ParseCharClass(state, "hv") && + ParseCallOffset(state) && ParseEncoding(state)) { + return true; + } + *state = copy; + return false; +} + +// ::= h _ +// ::= v _ +static bool ParseCallOffset(State *state) { + State copy = *state; + if (ParseOneCharToken(state, 'h') && + ParseNVOffset(state) && ParseOneCharToken(state, '_')) { + return true; + } + *state = copy; + + if (ParseOneCharToken(state, 'v') && + ParseVOffset(state) && ParseOneCharToken(state, '_')) { + return true; + } + *state = copy; + + return false; +} + +// ::= <(offset) number> +static bool ParseNVOffset(State *state) { + return ParseNumber(state, NULL); +} + +// ::= <(offset) number> _ <(virtual offset) number> +static bool ParseVOffset(State *state) { + State copy = *state; + if (ParseNumber(state, NULL) && ParseOneCharToken(state, '_') && + ParseNumber(state, NULL)) { + return true; + } + *state = copy; + return false; +} + +// ::= C1 | C2 | C3 +// ::= D0 | D1 | D2 +static bool ParseCtorDtorName(State *state) { + State copy = *state; + if (ParseOneCharToken(state, 'C') && + ParseCharClass(state, "123")) { + const char * const prev_name = state->prev_name; + const int prev_name_length = state->prev_name_length; + MaybeAppendWithLength(state, prev_name, prev_name_length); + return true; + } + *state = copy; + + if (ParseOneCharToken(state, 'D') && + ParseCharClass(state, "012")) { + const char * const prev_name = state->prev_name; + const int prev_name_length = state->prev_name_length; + MaybeAppend(state, "~"); + MaybeAppendWithLength(state, prev_name, prev_name_length); + return true; + } + *state = copy; + return false; +} + +// ::= +// ::= P # pointer-to +// ::= R # reference-to +// ::= O # rvalue reference-to (C++0x) +// ::= C # complex pair (C 2000) +// ::= G # imaginary (C 2000) +// ::= U # vendor extended type qualifier +// ::= +// ::= +// ::= +// ::= +// ::= +// ::= +// ::= +// ::= +// ::= Dp # pack expansion of (C++0x) +// ::= Dt E # decltype of an id-expression or class +// # member access (C++0x) +// ::= DT E # decltype of an expression (C++0x) +// +static bool ParseType(State *state) { + // We should check CV-qualifers, and PRGC things first. + State copy = *state; + if (ParseCVQualifiers(state) && ParseType(state)) { + return true; + } + *state = copy; + + if (ParseCharClass(state, "OPRCG") && ParseType(state)) { + return true; + } + *state = copy; + + if (ParseTwoCharToken(state, "Dp") && ParseType(state)) { + return true; + } + *state = copy; + + if (ParseOneCharToken(state, 'D') && ParseCharClass(state, "tT") && + ParseExpression(state) && ParseOneCharToken(state, 'E')) { + return true; + } + *state = copy; + + if (ParseOneCharToken(state, 'U') && ParseSourceName(state) && + ParseType(state)) { + return true; + } + *state = copy; + + if (ParseBuiltinType(state) || + ParseFunctionType(state) || + ParseClassEnumType(state) || + ParseArrayType(state) || + ParsePointerToMemberType(state) || + ParseSubstitution(state)) { + return true; + } + + if (ParseTemplateTemplateParam(state) && + ParseTemplateArgs(state)) { + return true; + } + *state = copy; + + // Less greedy than . + if (ParseTemplateParam(state)) { + return true; + } + + return false; +} + +// ::= [r] [V] [K] +// We don't allow empty to avoid infinite loop in +// ParseType(). +static bool ParseCVQualifiers(State *state) { + int num_cv_qualifiers = 0; + num_cv_qualifiers += ParseOneCharToken(state, 'r'); + num_cv_qualifiers += ParseOneCharToken(state, 'V'); + num_cv_qualifiers += ParseOneCharToken(state, 'K'); + return num_cv_qualifiers > 0; +} + +// ::= v, etc. +// ::= u +static bool ParseBuiltinType(State *state) { + const AbbrevPair *p; + for (p = kBuiltinTypeList; p->abbrev != NULL; ++p) { + if (state->mangled_cur[0] == p->abbrev[0]) { + MaybeAppend(state, p->real_name); + ++state->mangled_cur; + return true; + } + } + + State copy = *state; + if (ParseOneCharToken(state, 'u') && ParseSourceName(state)) { + return true; + } + *state = copy; + return false; +} + +// ::= F [Y] E +static bool ParseFunctionType(State *state) { + State copy = *state; + if (ParseOneCharToken(state, 'F') && + Optional(ParseOneCharToken(state, 'Y')) && + ParseBareFunctionType(state) && ParseOneCharToken(state, 'E')) { + return true; + } + *state = copy; + return false; +} + +// ::= <(signature) type>+ +static bool ParseBareFunctionType(State *state) { + State copy = *state; + DisableAppend(state); + if (OneOrMore(ParseType, state)) { + RestoreAppend(state, copy.append); + MaybeAppend(state, "()"); + return true; + } + *state = copy; + return false; +} + +// ::= +static bool ParseClassEnumType(State *state) { + return ParseName(state); +} + +// ::= A <(positive dimension) number> _ <(element) type> +// ::= A [<(dimension) expression>] _ <(element) type> +static bool ParseArrayType(State *state) { + State copy = *state; + if (ParseOneCharToken(state, 'A') && ParseNumber(state, NULL) && + ParseOneCharToken(state, '_') && ParseType(state)) { + return true; + } + *state = copy; + + if (ParseOneCharToken(state, 'A') && Optional(ParseExpression(state)) && + ParseOneCharToken(state, '_') && ParseType(state)) { + return true; + } + *state = copy; + return false; +} + +// ::= M <(class) type> <(member) type> +static bool ParsePointerToMemberType(State *state) { + State copy = *state; + if (ParseOneCharToken(state, 'M') && ParseType(state) && + ParseType(state)) { + return true; + } + *state = copy; + return false; +} + +// ::= T_ +// ::= T _ +static bool ParseTemplateParam(State *state) { + if (ParseTwoCharToken(state, "T_")) { + MaybeAppend(state, "?"); // We don't support template substitutions. + return true; + } + + State copy = *state; + if (ParseOneCharToken(state, 'T') && ParseNumber(state, NULL) && + ParseOneCharToken(state, '_')) { + MaybeAppend(state, "?"); // We don't support template substitutions. + return true; + } + *state = copy; + return false; +} + + +// ::= +// ::= +static bool ParseTemplateTemplateParam(State *state) { + return (ParseTemplateParam(state) || + ParseSubstitution(state)); +} + +// ::= I + E +static bool ParseTemplateArgs(State *state) { + State copy = *state; + DisableAppend(state); + if (ParseOneCharToken(state, 'I') && + OneOrMore(ParseTemplateArg, state) && + ParseOneCharToken(state, 'E')) { + RestoreAppend(state, copy.append); + MaybeAppend(state, "<>"); + return true; + } + *state = copy; + return false; +} + +// ::= +// ::= +// ::= I * E # argument pack +// ::= X E +static bool ParseTemplateArg(State *state) { + State copy = *state; + if (ParseOneCharToken(state, 'I') && + ZeroOrMore(ParseTemplateArg, state) && + ParseOneCharToken(state, 'E')) { + return true; + } + *state = copy; + + if (ParseType(state) || + ParseExprPrimary(state)) { + return true; + } + *state = copy; + + if (ParseOneCharToken(state, 'X') && ParseExpression(state) && + ParseOneCharToken(state, 'E')) { + return true; + } + *state = copy; + return false; +} + +// ::= +// ::= +// ::= +// ::= +// ::= +// +// ::= st +// ::= sr +// ::= sr +static bool ParseExpression(State *state) { + if (ParseTemplateParam(state) || ParseExprPrimary(state)) { + return true; + } + + State copy = *state; + if (ParseOperatorName(state) && + ParseExpression(state) && + ParseExpression(state) && + ParseExpression(state)) { + return true; + } + *state = copy; + + if (ParseOperatorName(state) && + ParseExpression(state) && + ParseExpression(state)) { + return true; + } + *state = copy; + + if (ParseOperatorName(state) && + ParseExpression(state)) { + return true; + } + *state = copy; + + if (ParseTwoCharToken(state, "st") && ParseType(state)) { + return true; + } + *state = copy; + + if (ParseTwoCharToken(state, "sr") && ParseType(state) && + ParseUnqualifiedName(state) && + ParseTemplateArgs(state)) { + return true; + } + *state = copy; + + if (ParseTwoCharToken(state, "sr") && ParseType(state) && + ParseUnqualifiedName(state)) { + return true; + } + *state = copy; + return false; +} + +// ::= L <(value) number> E +// ::= L <(value) float> E +// ::= L E +// // A bug in g++'s C++ ABI version 2 (-fabi-version=2). +// ::= LZ E +static bool ParseExprPrimary(State *state) { + State copy = *state; + if (ParseOneCharToken(state, 'L') && ParseType(state) && + ParseNumber(state, NULL) && + ParseOneCharToken(state, 'E')) { + return true; + } + *state = copy; + + if (ParseOneCharToken(state, 'L') && ParseType(state) && + ParseFloatNumber(state) && + ParseOneCharToken(state, 'E')) { + return true; + } + *state = copy; + + if (ParseOneCharToken(state, 'L') && ParseMangledName(state) && + ParseOneCharToken(state, 'E')) { + return true; + } + *state = copy; + + if (ParseTwoCharToken(state, "LZ") && ParseEncoding(state) && + ParseOneCharToken(state, 'E')) { + return true; + } + *state = copy; + + return false; +} + +// := Z <(function) encoding> E <(entity) name> +// [] +// := Z <(function) encoding> E s [] +static bool ParseLocalName(State *state) { + State copy = *state; + if (ParseOneCharToken(state, 'Z') && ParseEncoding(state) && + ParseOneCharToken(state, 'E') && MaybeAppend(state, "::") && + ParseName(state) && Optional(ParseDiscriminator(state))) { + return true; + } + *state = copy; + + if (ParseOneCharToken(state, 'Z') && ParseEncoding(state) && + ParseTwoCharToken(state, "Es") && Optional(ParseDiscriminator(state))) { + return true; + } + *state = copy; + return false; +} + +// := _ <(non-negative) number> +static bool ParseDiscriminator(State *state) { + State copy = *state; + if (ParseOneCharToken(state, '_') && ParseNumber(state, NULL)) { + return true; + } + *state = copy; + return false; +} + +// ::= S_ +// ::= S _ +// ::= St, etc. +static bool ParseSubstitution(State *state) { + if (ParseTwoCharToken(state, "S_")) { + MaybeAppend(state, "?"); // We don't support substitutions. + return true; + } + + State copy = *state; + if (ParseOneCharToken(state, 'S') && ParseSeqId(state) && + ParseOneCharToken(state, '_')) { + MaybeAppend(state, "?"); // We don't support substitutions. + return true; + } + *state = copy; + + // Expand abbreviations like "St" => "std". + if (ParseOneCharToken(state, 'S')) { + const AbbrevPair *p; + for (p = kSubstitutionList; p->abbrev != NULL; ++p) { + if (state->mangled_cur[0] == p->abbrev[1]) { + MaybeAppend(state, "std"); + if (p->real_name[0] != '\0') { + MaybeAppend(state, "::"); + MaybeAppend(state, p->real_name); + } + ++state->mangled_cur; + return true; + } + } + } + *state = copy; + return false; +} + +// Parse , optionally followed by either a function-clone suffix +// or version suffix. Returns true only if all of "mangled_cur" was consumed. +static bool ParseTopLevelMangledName(State *state) { + if (ParseMangledName(state)) { + if (state->mangled_cur[0] != '\0') { + // Drop trailing function clone suffix, if any. + if (IsFunctionCloneSuffix(state->mangled_cur)) { + return true; + } + // Append trailing version suffix if any. + // ex. _Z3foo@@GLIBCXX_3.4 + if (state->mangled_cur[0] == '@') { + MaybeAppend(state, state->mangled_cur); + return true; + } + return false; // Unconsumed suffix. + } + return true; + } + return false; +} + +// The demangler entry point. +bool Demangle(const char *mangled, char *out, int out_size) { + State state; + InitState(&state, mangled, out, out_size); + return ParseTopLevelMangledName(&state) && !state.overflowed; +} + +_END_GOOGLE_NAMESPACE_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/demangle.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/demangle.h new file mode 100644 index 0000000000..9c7591527c --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/demangle.h @@ -0,0 +1,84 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Author: Satoru Takabayashi +// +// An async-signal-safe and thread-safe demangler for Itanium C++ ABI +// (aka G++ V3 ABI). + +// The demangler is implemented to be used in async signal handlers to +// symbolize stack traces. We cannot use libstdc++'s +// abi::__cxa_demangle() in such signal handlers since it's not async +// signal safe (it uses malloc() internally). +// +// Note that this demangler doesn't support full demangling. More +// specifically, it doesn't print types of function parameters and +// types of template arguments. It just skips them. However, it's +// still very useful to extract basic information such as class, +// function, constructor, destructor, and operator names. +// +// See the implementation note in demangle.cc if you are interested. +// +// Example: +// +// | Mangled Name | The Demangler | abi::__cxa_demangle() +// |---------------|---------------|----------------------- +// | _Z1fv | f() | f() +// | _Z1fi | f() | f(int) +// | _Z3foo3bar | foo() | foo(bar) +// | _Z1fIiEvi | f<>() | void f(int) +// | _ZN1N1fE | N::f | N::f +// | _ZN3Foo3BarEv | Foo::Bar() | Foo::Bar() +// | _Zrm1XS_" | operator%() | operator%(X, X) +// | _ZN3FooC1Ev | Foo::Foo() | Foo::Foo() +// | _Z1fSs | f() | f(std::basic_string, +// | | | std::allocator >) +// +// See the unit test for more examples. +// +// Note: we might want to write demanglers for ABIs other than Itanium +// C++ ABI in the future. +// + +#ifndef BASE_DEMANGLE_H_ +#define BASE_DEMANGLE_H_ + +#include "config.h" + +_START_GOOGLE_NAMESPACE_ + +// Demangle "mangled". On success, return true and write the +// demangled symbol name to "out". Otherwise, return false. +// "out" is modified even if demangling is unsuccessful. +bool Demangle(const char *mangled, char *out, int out_size); + +_END_GOOGLE_NAMESPACE_ + +#endif // BASE_DEMANGLE_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/demangle_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/demangle_unittest.cc new file mode 100644 index 0000000000..9d219e6531 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/demangle_unittest.cc @@ -0,0 +1,142 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Author: Satoru Takabayashi +// +// Unit tests for functions in demangle.c. + +#include "utilities.h" + +#include +#include +#include +#include "glog/logging.h" +#include "demangle.h" +#include "googletest.h" +#include "config.h" + +GLOG_DEFINE_bool(demangle_filter, false, + "Run demangle_unittest in filter mode"); + +using namespace std; +using namespace GOOGLE_NAMESPACE; + +// A wrapper function for Demangle() to make the unit test simple. +static const char *DemangleIt(const char * const mangled) { + static char demangled[4096]; + if (Demangle(mangled, demangled, sizeof(demangled))) { + return demangled; + } else { + return mangled; + } +} + +// Test corner cases of bounary conditions. +TEST(Demangle, CornerCases) { + char tmp[10]; + EXPECT_TRUE(Demangle("_Z6foobarv", tmp, sizeof(tmp))); + // sizeof("foobar()") == 9 + EXPECT_STREQ("foobar()", tmp); + EXPECT_TRUE(Demangle("_Z6foobarv", tmp, 9)); + EXPECT_STREQ("foobar()", tmp); + EXPECT_FALSE(Demangle("_Z6foobarv", tmp, 8)); // Not enough. + EXPECT_FALSE(Demangle("_Z6foobarv", tmp, 1)); + EXPECT_FALSE(Demangle("_Z6foobarv", tmp, 0)); + EXPECT_FALSE(Demangle("_Z6foobarv", NULL, 0)); // Should not cause SEGV. +} + +// Test handling of functions suffixed with .clone.N, which is used by GCC +// 4.5.x, and .constprop.N and .isra.N, which are used by GCC 4.6.x. These +// suffixes are used to indicate functions which have been cloned during +// optimization. We ignore these suffixes. +TEST(Demangle, Clones) { + char tmp[20]; + EXPECT_TRUE(Demangle("_ZL3Foov", tmp, sizeof(tmp))); + EXPECT_STREQ("Foo()", tmp); + EXPECT_TRUE(Demangle("_ZL3Foov.clone.3", tmp, sizeof(tmp))); + EXPECT_STREQ("Foo()", tmp); + EXPECT_TRUE(Demangle("_ZL3Foov.constprop.80", tmp, sizeof(tmp))); + EXPECT_STREQ("Foo()", tmp); + EXPECT_TRUE(Demangle("_ZL3Foov.isra.18", tmp, sizeof(tmp))); + EXPECT_STREQ("Foo()", tmp); + EXPECT_TRUE(Demangle("_ZL3Foov.isra.2.constprop.18", tmp, sizeof(tmp))); + EXPECT_STREQ("Foo()", tmp); + // Invalid (truncated), should not demangle. + EXPECT_FALSE(Demangle("_ZL3Foov.clo", tmp, sizeof(tmp))); + // Invalid (.clone. not followed by number), should not demangle. + EXPECT_FALSE(Demangle("_ZL3Foov.clone.", tmp, sizeof(tmp))); + // Invalid (.clone. followed by non-number), should not demangle. + EXPECT_FALSE(Demangle("_ZL3Foov.clone.foo", tmp, sizeof(tmp))); + // Invalid (.constprop. not followed by number), should not demangle. + EXPECT_FALSE(Demangle("_ZL3Foov.isra.2.constprop.", tmp, sizeof(tmp))); +} + +TEST(Demangle, FromFile) { + string test_file = FLAGS_test_srcdir + "/src/demangle_unittest.txt"; + ifstream f(test_file.c_str()); // The file should exist. + EXPECT_FALSE(f.fail()); + + string line; + while (getline(f, line)) { + // Lines start with '#' are considered as comments. + if (line.empty() || line[0] == '#') { + continue; + } + // Each line should contain a mangled name and a demangled name + // separated by '\t'. Example: "_Z3foo\tfoo" + string::size_type tab_pos = line.find('\t'); + EXPECT_NE(string::npos, tab_pos); + string mangled = line.substr(0, tab_pos); + string demangled = line.substr(tab_pos + 1); + EXPECT_EQ(demangled, DemangleIt(mangled.c_str())); + } +} + +int main(int argc, char **argv) { +#ifdef HAVE_LIB_GFLAGS + ParseCommandLineFlags(&argc, &argv, true); +#endif + InitGoogleTest(&argc, argv); + + FLAGS_logtostderr = true; + InitGoogleLogging(argv[0]); + if (FLAGS_demangle_filter) { + // Read from cin and write to cout. + string line; + while (getline(cin, line, '\n')) { + cout << DemangleIt(line.c_str()) << endl; + } + return 0; + } else if (argc > 1) { + cout << DemangleIt(argv[1]) << endl; + return 0; + } else { + return RUN_ALL_TESTS(); + } +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/demangle_unittest.sh b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/demangle_unittest.sh new file mode 100644 index 0000000000..91deee2198 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/demangle_unittest.sh @@ -0,0 +1,95 @@ +#! /bin/sh +# +# Copyright (c) 2006, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +# Author: Satoru Takabayashi +# +# Unit tests for demangle.c with a real binary. + +set -e + +die () { + echo $1 + exit 1 +} + +BINDIR=".libs" +LIBGLOG="$BINDIR/libglog.so" + +DEMANGLER="$BINDIR/demangle_unittest" + +if test -e "$DEMANGLER"; then + # We need shared object. + export LD_LIBRARY_PATH=$BINDIR + export DYLD_LIBRARY_PATH=$BINDIR +else + # For windows + DEMANGLER="./demangle_unittest.exe" + if ! test -e "$DEMANGLER"; then + echo "We coundn't find demangle_unittest binary." + exit 1 + fi +fi + +# Extract C++ mangled symbols from libbase.so. +NM_OUTPUT="demangle.nm" +nm "$LIBGLOG" | perl -nle 'print $1 if /\s(_Z\S+$)/' > "$NM_OUTPUT" + +# Check if mangled symbols exist. If there are none, we quit. +# The binary is more likely compiled with GCC 2.95 or something old. +if ! grep --quiet '^_Z' "$NM_OUTPUT"; then + echo "PASS" + exit 0 +fi + +# Demangle the symbols using our demangler. +DM_OUTPUT="demangle.dm" +GLOG_demangle_filter=1 "$DEMANGLER" --demangle_filter < "$NM_OUTPUT" > "$DM_OUTPUT" + +# Calculate the numbers of lines. +NM_LINES=`wc -l "$NM_OUTPUT" | awk '{ print $1 }'` +DM_LINES=`wc -l "$DM_OUTPUT" | awk '{ print $1 }'` + +# Compare the numbers of lines. They must be the same. +if test "$NM_LINES" != "$DM_LINES"; then + die "$NM_OUTPUT and $DM_OUTPUT don't have the same numbers of lines" +fi + +# Check if mangled symbols exist. They must not exist. +if grep --quiet '^_Z' "$DM_OUTPUT"; then + MANGLED=`grep '^_Z' "$DM_OUTPUT" | wc -l | awk '{ print \$1 }'` + echo "Mangled symbols ($MANGLED out of $NM_LINES) found in $DM_OUTPUT:" + grep '^_Z' "$DM_OUTPUT" + die "Mangled symbols ($MANGLED out of $NM_LINES) found in $DM_OUTPUT" +fi + +# All C++ symbols are demangled successfully. +echo "PASS" +exit 0 diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/demangle_unittest.txt b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/demangle_unittest.txt new file mode 100644 index 0000000000..4e23c65b2d --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/demangle_unittest.txt @@ -0,0 +1,137 @@ +# Test caces for demangle_unittest. Each line consists of a +# tab-separated pair of mangled and demangled symbol names. + +# Constructors and destructors. +_ZN3FooC1Ev Foo::Foo() +_ZN3FooD1Ev Foo::~Foo() +_ZNSoD0Ev std::ostream::~ostream() + +# G++ extensions. +_ZTCN10LogMessage9LogStreamE0_So LogMessage::LogStream +_ZTv0_n12_N10LogMessage9LogStreamD0Ev LogMessage::LogStream::~LogStream() +_ZThn4_N7icu_3_410UnicodeSetD0Ev icu_3_4::UnicodeSet::~UnicodeSet() + +# A bug in g++'s C++ ABI version 2 (-fabi-version=2). +_ZN7NSSInfoI5groupjjXadL_Z10getgrgid_rEELZ19nss_getgrgid_r_nameEEC1Ei NSSInfo<>::NSSInfo() + +# C linkage symbol names. Should keep them untouched. +main main +Demangle Demangle +_ZERO _ZERO + +# Cast operator. +_Zcviv operator int() +_ZN3foocviEv foo::operator int() + +# Versioned symbols. +_Z3Foo@GLIBCXX_3.4 Foo@GLIBCXX_3.4 +_Z3Foo@@GLIBCXX_3.4 Foo@@GLIBCXX_3.4 + +# Abbreviations. +_ZNSaE std::allocator +_ZNSbE std::basic_string +_ZNSdE std::iostream +_ZNSiE std::istream +_ZNSoE std::ostream +_ZNSsE std::string + +# Substitutions. We just replace them with ?. +_ZN3fooS_E foo::? +_ZN3foo3barS0_E foo::bar::? +_ZNcvT_IiEEv operator ?<>() + +# "<< <" case. +_ZlsI3fooE operator<< <> + +# Random things we found interesting. +_ZN3FooISt6vectorISsSaISsEEEclEv Foo<>::operator()() +_ZTI9Callback1IiE Callback1<> +_ZN7icu_3_47UMemorynwEj icu_3_4::UMemory::operator new() +_ZNSt6vectorIbE9push_backE std::vector<>::push_back +_ZNSt6vectorIbSaIbEE9push_backEb std::vector<>::push_back() +_ZlsRSoRK15PRIVATE_Counter operator<<() +_ZSt6fill_nIPPN9__gnu_cxx15_Hashtable_nodeISt4pairIKPKcjEEEjS8_ET_SA_T0_RKT1_ std::fill_n<>() +_ZZ3FoovE3Bar Foo()::Bar +_ZGVZ7UpTimervE8up_timer UpTimer()::up_timer + +# Test cases from gcc-4.1.0/libstdc++-v3/testsuite/demangle. +# Collected by: +# % grep verify_demangle **/*.cc | perl -nle 'print $1 if /"(_Z.*?)"/' | +# sort | uniq +# +# Note that the following symbols are invalid. +# That's why they are not demangled. +# - _ZNZN1N1fEiE1X1gE +# - _ZNZN1N1fEiE1X1gEv +# - _Z1xINiEE +_Z1fA37_iPS_ f() +_Z1fAszL_ZZNK1N1A1fEvE3foo_0E_i f() +_Z1fI1APS0_PKS0_EvT_T0_T1_PA4_S3_M1CS8_ f<>() +_Z1fI1XENT_1tES2_ f<>() +_Z1fI1XEvPVN1AIT_E1TE f<>() +_Z1fILi1ELc120EEv1AIXplT_cviLd4028ae147ae147aeEEE f<>() +_Z1fILi1ELc120EEv1AIXplT_cviLf3f800000EEE f<>() +_Z1fILi5E1AEvN1CIXqugtT_Li0ELi1ELi2EEE1qE f<>() +_Z1fILi5E1AEvN1CIXstN1T1tEEXszsrS2_1tEE1qE f<>() +_Z1fILi5EEvN1AIXcvimlT_Li22EEE1qE f<>() +_Z1fIiEvi f<>() +_Z1fKPFiiE f() +_Z1fM1AFivEPS0_ f() +_Z1fM1AKFivE f() +_Z1fM1AKFvvE f() +_Z1fPFPA1_ivE f() +_Z1fPFYPFiiEiE f() +_Z1fPFvvEM1SFvvE f() +_Z1fPKM1AFivE f() +_Z1fi f() +_Z1fv f() +_Z1jM1AFivEPS1_ j() +_Z1rM1GFivEMS_KFivES_M1HFivES1_4whatIKS_E5what2IS8_ES3_ r() +_Z1sPA37_iPS0_ s() +_Z1xINiEE _Z1xINiEE +_Z3absILi11EEvv abs<>() +_Z3foo3bar foo() +_Z3foo5Hello5WorldS0_S_ foo() +_Z3fooA30_A_i foo() +_Z3fooIA6_KiEvA9_KT_rVPrS4_ foo<>() +_Z3fooILi2EEvRAplT_Li1E_i foo<>() +_Z3fooIiFvdEiEvv foo<>() +_Z3fooPM2ABi foo() +_Z3fooc foo() +_Z3fooiPiPS_PS0_PS1_PS2_PS3_PS4_PS5_PS6_PS7_PS8_PS9_PSA_PSB_PSC_ foo() +_Z3kooPA28_A30_i koo() +_Z4makeI7FactoryiET_IT0_Ev make<>() +_Z5firstI3DuoEvS0_ first<>() +_Z5firstI3DuoEvT_ first<>() +_Z9hairyfuncM1YKFPVPFrPA2_PM1XKFKPA3_ilEPcEiE hairyfunc() +_ZGVN5libcw24_GLOBAL__N_cbll.cc0ZhUKa23compiler_bug_workaroundISt6vectorINS_13omanip_id_tctINS_5debug32memblk_types_manipulator_data_ctEEESaIS6_EEE3idsE libcw::(anonymous namespace)::compiler_bug_workaround<>::ids +_ZN12libcw_app_ct10add_optionIS_EEvMT_FvPKcES3_cS3_S3_ libcw_app_ct::add_option<>() +_ZN1AIfEcvT_IiEEv A<>::operator ?<>() +_ZN1N1TIiiE2mfES0_IddE N::T<>::mf() +_ZN1N1fE N::f +_ZN1f1fE f::f +_ZN3FooIA4_iE3barE Foo<>::bar +_ZN5Arena5levelE Arena::level +_ZN5StackIiiE5levelE Stack<>::level +_ZN5libcw5debug13cwprint_usingINS_9_private_12GlobalObjectEEENS0_17cwprint_using_tctIT_EERKS5_MS5_KFvRSt7ostreamE libcw::debug::cwprint_using<>() +_ZN6System5Sound4beepEv System::Sound::beep() +_ZNKSt14priority_queueIP27timer_event_request_base_ctSt5dequeIS1_SaIS1_EE13timer_greaterE3topEv std::priority_queue<>::top() +_ZNKSt15_Deque_iteratorIP15memory_block_stRKS1_PS2_EeqERKS5_ std::_Deque_iterator<>::operator==() +_ZNKSt17__normal_iteratorIPK6optionSt6vectorIS0_SaIS0_EEEmiERKS6_ std::__normal_iterator<>::operator-() +_ZNSbIcSt11char_traitsIcEN5libcw5debug27no_alloc_checking_allocatorEE12_S_constructIPcEES6_T_S7_RKS3_ std::basic_string<>::_S_construct<>() +_ZNSt13_Alloc_traitsISbIcSt18string_char_traitsIcEN5libcw5debug9_private_17allocator_adaptorIcSt24__default_alloc_templateILb0ELi327664EELb1EEEENS5_IS9_S7_Lb1EEEE15_S_instancelessE std::_Alloc_traits<>::_S_instanceless +_ZNSt3_In4wardE std::_In::ward +_ZNZN1N1fEiE1X1gE _ZNZN1N1fEiE1X1gE +_ZNZN1N1fEiE1X1gEv _ZNZN1N1fEiE1X1gEv +_ZSt1BISt1DIP1ARKS2_PS3_ES0_IS2_RS2_PS2_ES2_ET0_T_SB_SA_PT1_ std::B<>() +_ZSt5state std::state +_ZTI7a_class a_class +_ZZN1N1fEiE1p N::f()::p +_ZZN1N1fEiEs N::f() +_ZlsRK1XS1_ operator<<() +_ZlsRKU3fooU4bart1XS0_ operator<<() +_ZlsRKU3fooU4bart1XS2_ operator<<() +_ZlsRSoRKSs operator<<() +_ZngILi42EEvN1AIXplT_Li2EEE1TE operator-<>() +_ZplR1XS0_ operator+() +_Zrm1XS_ operator%() diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/glog/log_severity.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/glog/log_severity.h new file mode 100644 index 0000000000..17805fbadd --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/glog/log_severity.h @@ -0,0 +1,84 @@ +// Copyright (c) 2007, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef BASE_LOG_SEVERITY_H__ +#define BASE_LOG_SEVERITY_H__ + +// Annoying stuff for windows -- makes sure clients can import these functions +#ifndef GOOGLE_GLOG_DLL_DECL +# if defined(_WIN32) && !defined(__CYGWIN__) +# define GOOGLE_GLOG_DLL_DECL __declspec(dllimport) +# else +# define GOOGLE_GLOG_DLL_DECL +# endif +#endif + +// Variables of type LogSeverity are widely taken to lie in the range +// [0, NUM_SEVERITIES-1]. Be careful to preserve this assumption if +// you ever need to change their values or add a new severity. +typedef int LogSeverity; + +const int INFO = 0, WARNING = 1, ERROR = 2, FATAL = 3, NUM_SEVERITIES = 4; + +// DFATAL is FATAL in debug mode, ERROR in normal mode +#ifdef NDEBUG +#define DFATAL_LEVEL ERROR +#else +#define DFATAL_LEVEL FATAL +#endif + +extern GOOGLE_GLOG_DLL_DECL const char* const LogSeverityNames[NUM_SEVERITIES]; + +// NDEBUG usage helpers related to (RAW_)DCHECK: +// +// DEBUG_MODE is for small !NDEBUG uses like +// if (DEBUG_MODE) foo.CheckThatFoo(); +// instead of substantially more verbose +// #ifndef NDEBUG +// foo.CheckThatFoo(); +// #endif +// +// IF_DEBUG_MODE is for small !NDEBUG uses like +// IF_DEBUG_MODE( string error; ) +// DCHECK(Foo(&error)) << error; +// instead of substantially more verbose +// #ifndef NDEBUG +// string error; +// DCHECK(Foo(&error)) << error; +// #endif +// +#ifdef NDEBUG +enum { DEBUG_MODE = 0 }; +#define IF_DEBUG_MODE(x) +#else +enum { DEBUG_MODE = 1 }; +#define IF_DEBUG_MODE(x) x +#endif + +#endif // BASE_LOG_SEVERITY_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/glog/logging.h.in b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/glog/logging.h.in new file mode 100644 index 0000000000..4356552470 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/glog/logging.h.in @@ -0,0 +1,1506 @@ +// Copyright (c) 1999, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Author: Ray Sidney +// +// This file contains #include information about logging-related stuff. +// Pretty much everybody needs to #include this file so that they can +// log various happenings. +// +#ifndef _LOGGING_H_ +#define _LOGGING_H_ + +#include +#include +#include +#include +#if @ac_cv_have_unistd_h@ +# include +#endif +#ifdef __DEPRECATED +// Make GCC quiet. +# undef __DEPRECATED +# include +# define __DEPRECATED +#else +# include +#endif +#include + +// Annoying stuff for windows -- makes sure clients can import these functions +#ifndef GOOGLE_GLOG_DLL_DECL +# if defined(_WIN32) && !defined(__CYGWIN__) +# define GOOGLE_GLOG_DLL_DECL __declspec(dllimport) +# else +# define GOOGLE_GLOG_DLL_DECL +# endif +#endif + +// We care a lot about number of bits things take up. Unfortunately, +// systems define their bit-specific ints in a lot of different ways. +// We use our own way, and have a typedef to get there. +// Note: these commands below may look like "#if 1" or "#if 0", but +// that's because they were constructed that way at ./configure time. +// Look at logging.h.in to see how they're calculated (based on your config). +#if @ac_cv_have_stdint_h@ +#include // the normal place uint16_t is defined +#endif +#if @ac_cv_have_systypes_h@ +#include // the normal place u_int16_t is defined +#endif +#if @ac_cv_have_inttypes_h@ +#include // a third place for uint16_t or u_int16_t +#endif + +#if @ac_cv_have_libgflags@ +#include +#endif + +@ac_google_start_namespace@ + +#if @ac_cv_have_uint16_t@ // the C99 format +typedef int32_t int32; +typedef uint32_t uint32; +typedef int64_t int64; +typedef uint64_t uint64; +#elif @ac_cv_have_u_int16_t@ // the BSD format +typedef int32_t int32; +typedef u_int32_t uint32; +typedef int64_t int64; +typedef u_int64_t uint64; +#elif @ac_cv_have___uint16@ // the windows (vc7) format +typedef __int32 int32; +typedef unsigned __int32 uint32; +typedef __int64 int64; +typedef unsigned __int64 uint64; +#else +#error Do not know how to define a 32-bit integer quantity on your system +#endif + +@ac_google_end_namespace@ + +// The global value of GOOGLE_STRIP_LOG. All the messages logged to +// LOG(XXX) with severity less than GOOGLE_STRIP_LOG will not be displayed. +// If it can be determined at compile time that the message will not be +// printed, the statement will be compiled out. +// +// Example: to strip out all INFO and WARNING messages, use the value +// of 2 below. To make an exception for WARNING messages from a single +// file, add "#define GOOGLE_STRIP_LOG 1" to that file _before_ including +// base/logging.h +#ifndef GOOGLE_STRIP_LOG +#define GOOGLE_STRIP_LOG 0 +#endif + +// GCC can be told that a certain branch is not likely to be taken (for +// instance, a CHECK failure), and use that information in static analysis. +// Giving it this information can help it optimize for the common case in +// the absence of better information (ie. -fprofile-arcs). +// +#ifndef GOOGLE_PREDICT_BRANCH_NOT_TAKEN +#if @ac_cv_have___builtin_expect@ +#define GOOGLE_PREDICT_BRANCH_NOT_TAKEN(x) (__builtin_expect(x, 0)) +#else +#define GOOGLE_PREDICT_BRANCH_NOT_TAKEN(x) x +#endif +#endif + +// Make a bunch of macros for logging. The way to log things is to stream +// things to LOG(). E.g., +// +// LOG(INFO) << "Found " << num_cookies << " cookies"; +// +// You can capture log messages in a string, rather than reporting them +// immediately: +// +// vector errors; +// LOG_STRING(ERROR, &errors) << "Couldn't parse cookie #" << cookie_num; +// +// This pushes back the new error onto 'errors'; if given a NULL pointer, +// it reports the error via LOG(ERROR). +// +// You can also do conditional logging: +// +// LOG_IF(INFO, num_cookies > 10) << "Got lots of cookies"; +// +// You can also do occasional logging (log every n'th occurrence of an +// event): +// +// LOG_EVERY_N(INFO, 10) << "Got the " << google::COUNTER << "th cookie"; +// +// The above will cause log messages to be output on the 1st, 11th, 21st, ... +// times it is executed. Note that the special google::COUNTER value is used +// to identify which repetition is happening. +// +// You can also do occasional conditional logging (log every n'th +// occurrence of an event, when condition is satisfied): +// +// LOG_IF_EVERY_N(INFO, (size > 1024), 10) << "Got the " << google::COUNTER +// << "th big cookie"; +// +// You can log messages the first N times your code executes a line. E.g. +// +// LOG_FIRST_N(INFO, 20) << "Got the " << google::COUNTER << "th cookie"; +// +// Outputs log messages for the first 20 times it is executed. +// +// Analogous SYSLOG, SYSLOG_IF, and SYSLOG_EVERY_N macros are available. +// These log to syslog as well as to the normal logs. If you use these at +// all, you need to be aware that syslog can drastically reduce performance, +// especially if it is configured for remote logging! Don't use these +// unless you fully understand this and have a concrete need to use them. +// Even then, try to minimize your use of them. +// +// There are also "debug mode" logging macros like the ones above: +// +// DLOG(INFO) << "Found cookies"; +// +// DLOG_IF(INFO, num_cookies > 10) << "Got lots of cookies"; +// +// DLOG_EVERY_N(INFO, 10) << "Got the " << google::COUNTER << "th cookie"; +// +// All "debug mode" logging is compiled away to nothing for non-debug mode +// compiles. +// +// We also have +// +// LOG_ASSERT(assertion); +// DLOG_ASSERT(assertion); +// +// which is syntactic sugar for {,D}LOG_IF(FATAL, assert fails) << assertion; +// +// There are "verbose level" logging macros. They look like +// +// VLOG(1) << "I'm printed when you run the program with --v=1 or more"; +// VLOG(2) << "I'm printed when you run the program with --v=2 or more"; +// +// These always log at the INFO log level (when they log at all). +// The verbose logging can also be turned on module-by-module. For instance, +// --vmodule=mapreduce=2,file=1,gfs*=3 --v=0 +// will cause: +// a. VLOG(2) and lower messages to be printed from mapreduce.{h,cc} +// b. VLOG(1) and lower messages to be printed from file.{h,cc} +// c. VLOG(3) and lower messages to be printed from files prefixed with "gfs" +// d. VLOG(0) and lower messages to be printed from elsewhere +// +// The wildcarding functionality shown by (c) supports both '*' (match +// 0 or more characters) and '?' (match any single character) wildcards. +// +// There's also VLOG_IS_ON(n) "verbose level" condition macro. To be used as +// +// if (VLOG_IS_ON(2)) { +// // do some logging preparation and logging +// // that can't be accomplished with just VLOG(2) << ...; +// } +// +// There are also VLOG_IF, VLOG_EVERY_N and VLOG_IF_EVERY_N "verbose level" +// condition macros for sample cases, when some extra computation and +// preparation for logs is not needed. +// VLOG_IF(1, (size > 1024)) +// << "I'm printed when size is more than 1024 and when you run the " +// "program with --v=1 or more"; +// VLOG_EVERY_N(1, 10) +// << "I'm printed every 10th occurrence, and when you run the program " +// "with --v=1 or more. Present occurence is " << google::COUNTER; +// VLOG_IF_EVERY_N(1, (size > 1024), 10) +// << "I'm printed on every 10th occurence of case when size is more " +// " than 1024, when you run the program with --v=1 or more. "; +// "Present occurence is " << google::COUNTER; +// +// The supported severity levels for macros that allow you to specify one +// are (in increasing order of severity) INFO, WARNING, ERROR, and FATAL. +// Note that messages of a given severity are logged not only in the +// logfile for that severity, but also in all logfiles of lower severity. +// E.g., a message of severity FATAL will be logged to the logfiles of +// severity FATAL, ERROR, WARNING, and INFO. +// +// There is also the special severity of DFATAL, which logs FATAL in +// debug mode, ERROR in normal mode. +// +// Very important: logging a message at the FATAL severity level causes +// the program to terminate (after the message is logged). +// +// Unless otherwise specified, logs will be written to the filename +// "...log..", followed +// by the date, time, and pid (you can't prevent the date, time, and pid +// from being in the filename). +// +// The logging code takes two flags: +// --v=# set the verbose level +// --logtostderr log all the messages to stderr instead of to logfiles + +// LOG LINE PREFIX FORMAT +// +// Log lines have this form: +// +// Lmmdd hh:mm:ss.uuuuuu threadid file:line] msg... +// +// where the fields are defined as follows: +// +// L A single character, representing the log level +// (eg 'I' for INFO) +// mm The month (zero padded; ie May is '05') +// dd The day (zero padded) +// hh:mm:ss.uuuuuu Time in hours, minutes and fractional seconds +// threadid The space-padded thread ID as returned by GetTID() +// (this matches the PID on Linux) +// file The file name +// line The line number +// msg The user-supplied message +// +// Example: +// +// I1103 11:57:31.739339 24395 google.cc:2341] Command line: ./some_prog +// I1103 11:57:31.739403 24395 google.cc:2342] Process id 24395 +// +// NOTE: although the microseconds are useful for comparing events on +// a single machine, clocks on different machines may not be well +// synchronized. Hence, use caution when comparing the low bits of +// timestamps from different machines. + +#ifndef DECLARE_VARIABLE +#define MUST_UNDEF_GFLAGS_DECLARE_MACROS +#define DECLARE_VARIABLE(type, name, tn) \ + namespace FLAG__namespace_do_not_use_directly_use_DECLARE_##tn##_instead { \ + extern GOOGLE_GLOG_DLL_DECL type FLAGS_##name; \ + } \ + using FLAG__namespace_do_not_use_directly_use_DECLARE_##tn##_instead::FLAGS_##name + +// bool specialization +#define DECLARE_bool(name) \ + DECLARE_VARIABLE(bool, name, bool) + +// int32 specialization +#define DECLARE_int32(name) \ + DECLARE_VARIABLE(@ac_google_namespace@::int32, name, int32) + +// Special case for string, because we have to specify the namespace +// std::string, which doesn't play nicely with our FLAG__namespace hackery. +#define DECLARE_string(name) \ + namespace FLAG__namespace_do_not_use_directly_use_DECLARE_string_instead { \ + extern GOOGLE_GLOG_DLL_DECL std::string FLAGS_##name; \ + } \ + using FLAG__namespace_do_not_use_directly_use_DECLARE_string_instead::FLAGS_##name +#endif + +// Set whether log messages go to stderr instead of logfiles +DECLARE_bool(logtostderr); + +// Set whether log messages go to stderr in addition to logfiles. +DECLARE_bool(alsologtostderr); + +// Log messages at a level >= this flag are automatically sent to +// stderr in addition to log files. +DECLARE_int32(stderrthreshold); + +// Set whether the log prefix should be prepended to each line of output. +DECLARE_bool(log_prefix); + +// Log messages at a level <= this flag are buffered. +// Log messages at a higher level are flushed immediately. +DECLARE_int32(logbuflevel); + +// Sets the maximum number of seconds which logs may be buffered for. +DECLARE_int32(logbufsecs); + +// Log suppression level: messages logged at a lower level than this +// are suppressed. +DECLARE_int32(minloglevel); + +// If specified, logfiles are written into this directory instead of the +// default logging directory. +DECLARE_string(log_dir); + +// Sets the path of the directory into which to put additional links +// to the log files. +DECLARE_string(log_link); + +DECLARE_int32(v); // in vlog_is_on.cc + +// Sets the maximum log file size (in MB). +DECLARE_int32(max_log_size); + +// Sets whether to avoid logging to the disk if the disk is full. +DECLARE_bool(stop_logging_if_full_disk); + +#ifdef MUST_UNDEF_GFLAGS_DECLARE_MACROS +#undef MUST_UNDEF_GFLAGS_DECLARE_MACROS +#undef DECLARE_VARIABLE +#undef DECLARE_bool +#undef DECLARE_int32 +#undef DECLARE_string +#endif + +// Log messages below the GOOGLE_STRIP_LOG level will be compiled away for +// security reasons. See LOG(severtiy) below. + +// A few definitions of macros that don't generate much code. Since +// LOG(INFO) and its ilk are used all over our code, it's +// better to have compact code for these operations. + +#if GOOGLE_STRIP_LOG == 0 +#define COMPACT_GOOGLE_LOG_INFO @ac_google_namespace@::LogMessage( \ + __FILE__, __LINE__) +#define LOG_TO_STRING_INFO(message) @ac_google_namespace@::LogMessage( \ + __FILE__, __LINE__, @ac_google_namespace@::INFO, message) +#else +#define COMPACT_GOOGLE_LOG_INFO @ac_google_namespace@::NullStream() +#define LOG_TO_STRING_INFO(message) @ac_google_namespace@::NullStream() +#endif + +#if GOOGLE_STRIP_LOG <= 1 +#define COMPACT_GOOGLE_LOG_WARNING @ac_google_namespace@::LogMessage( \ + __FILE__, __LINE__, @ac_google_namespace@::WARNING) +#define LOG_TO_STRING_WARNING(message) @ac_google_namespace@::LogMessage( \ + __FILE__, __LINE__, @ac_google_namespace@::WARNING, message) +#else +#define COMPACT_GOOGLE_LOG_WARNING @ac_google_namespace@::NullStream() +#define LOG_TO_STRING_WARNING(message) @ac_google_namespace@::NullStream() +#endif + +#if GOOGLE_STRIP_LOG <= 2 +#define COMPACT_GOOGLE_LOG_ERROR @ac_google_namespace@::LogMessage( \ + __FILE__, __LINE__, @ac_google_namespace@::ERROR) +#define LOG_TO_STRING_ERROR(message) @ac_google_namespace@::LogMessage( \ + __FILE__, __LINE__, @ac_google_namespace@::ERROR, message) +#else +#define COMPACT_GOOGLE_LOG_ERROR @ac_google_namespace@::NullStream() +#define LOG_TO_STRING_ERROR(message) @ac_google_namespace@::NullStream() +#endif + +#if GOOGLE_STRIP_LOG <= 3 +#define COMPACT_GOOGLE_LOG_FATAL @ac_google_namespace@::LogMessageFatal( \ + __FILE__, __LINE__) +#define LOG_TO_STRING_FATAL(message) @ac_google_namespace@::LogMessage( \ + __FILE__, __LINE__, @ac_google_namespace@::FATAL, message) +#else +#define COMPACT_GOOGLE_LOG_FATAL @ac_google_namespace@::NullStreamFatal() +#define LOG_TO_STRING_FATAL(message) @ac_google_namespace@::NullStreamFatal() +#endif + +// For DFATAL, we want to use LogMessage (as opposed to +// LogMessageFatal), to be consistent with the original behavior. +#ifdef NDEBUG +#define COMPACT_GOOGLE_LOG_DFATAL COMPACT_GOOGLE_LOG_ERROR +#elif GOOGLE_STRIP_LOG <= 3 +#define COMPACT_GOOGLE_LOG_DFATAL @ac_google_namespace@::LogMessage( \ + __FILE__, __LINE__, @ac_google_namespace@::FATAL) +#else +#define COMPACT_GOOGLE_LOG_DFATAL @ac_google_namespace@::NullStreamFatal() +#endif + +#define GOOGLE_LOG_INFO(counter) @ac_google_namespace@::LogMessage(__FILE__, __LINE__, @ac_google_namespace@::INFO, counter, &@ac_google_namespace@::LogMessage::SendToLog) +#define SYSLOG_INFO(counter) \ + @ac_google_namespace@::LogMessage(__FILE__, __LINE__, @ac_google_namespace@::INFO, counter, \ + &@ac_google_namespace@::LogMessage::SendToSyslogAndLog) +#define GOOGLE_LOG_WARNING(counter) \ + @ac_google_namespace@::LogMessage(__FILE__, __LINE__, @ac_google_namespace@::WARNING, counter, \ + &@ac_google_namespace@::LogMessage::SendToLog) +#define SYSLOG_WARNING(counter) \ + @ac_google_namespace@::LogMessage(__FILE__, __LINE__, @ac_google_namespace@::WARNING, counter, \ + &@ac_google_namespace@::LogMessage::SendToSyslogAndLog) +#define GOOGLE_LOG_ERROR(counter) \ + @ac_google_namespace@::LogMessage(__FILE__, __LINE__, @ac_google_namespace@::ERROR, counter, \ + &@ac_google_namespace@::LogMessage::SendToLog) +#define SYSLOG_ERROR(counter) \ + @ac_google_namespace@::LogMessage(__FILE__, __LINE__, @ac_google_namespace@::ERROR, counter, \ + &@ac_google_namespace@::LogMessage::SendToSyslogAndLog) +#define GOOGLE_LOG_FATAL(counter) \ + @ac_google_namespace@::LogMessage(__FILE__, __LINE__, @ac_google_namespace@::FATAL, counter, \ + &@ac_google_namespace@::LogMessage::SendToLog) +#define SYSLOG_FATAL(counter) \ + @ac_google_namespace@::LogMessage(__FILE__, __LINE__, @ac_google_namespace@::FATAL, counter, \ + &@ac_google_namespace@::LogMessage::SendToSyslogAndLog) +#define GOOGLE_LOG_DFATAL(counter) \ + @ac_google_namespace@::LogMessage(__FILE__, __LINE__, @ac_google_namespace@::DFATAL_LEVEL, counter, \ + &@ac_google_namespace@::LogMessage::SendToLog) +#define SYSLOG_DFATAL(counter) \ + @ac_google_namespace@::LogMessage(__FILE__, __LINE__, @ac_google_namespace@::DFATAL_LEVEL, counter, \ + &@ac_google_namespace@::LogMessage::SendToSyslogAndLog) + +#if defined(WIN32) || defined(_WIN32) || defined(__WIN32__) || defined(__CYGWIN__) || defined(__CYGWIN32__) +// A very useful logging macro to log windows errors: +#define LOG_SYSRESULT(result) \ + if (FAILED(result)) { \ + LPTSTR message = NULL; \ + LPTSTR msg = reinterpret_cast(&message); \ + DWORD message_length = FormatMessage(FORMAT_MESSAGE_ALLOCATE_BUFFER | \ + FORMAT_MESSAGE_FROM_SYSTEM, \ + 0, result, 0, msg, 100, NULL); \ + if (message_length > 0) { \ + @ac_google_namespace@::LogMessage(__FILE__, __LINE__, ERROR, 0, \ + &@ac_google_namespace@::LogMessage::SendToLog).stream() << message; \ + LocalFree(message); \ + } \ + } +#endif + +// We use the preprocessor's merging operator, "##", so that, e.g., +// LOG(INFO) becomes the token GOOGLE_LOG_INFO. There's some funny +// subtle difference between ostream member streaming functions (e.g., +// ostream::operator<<(int) and ostream non-member streaming functions +// (e.g., ::operator<<(ostream&, string&): it turns out that it's +// impossible to stream something like a string directly to an unnamed +// ostream. We employ a neat hack by calling the stream() member +// function of LogMessage which seems to avoid the problem. +#define LOG(severity) COMPACT_GOOGLE_LOG_ ## severity.stream() +#define SYSLOG(severity) SYSLOG_ ## severity(0).stream() + +@ac_google_start_namespace@ + +// They need the definitions of integer types. +#include "glog/log_severity.h" +#include "glog/vlog_is_on.h" + +// Initialize google's logging library. You will see the program name +// specified by argv0 in log outputs. +GOOGLE_GLOG_DLL_DECL void InitGoogleLogging(const char* argv0); + +// Shutdown google's logging library. +GOOGLE_GLOG_DLL_DECL void ShutdownGoogleLogging(); + +// Install a function which will be called after LOG(FATAL). +GOOGLE_GLOG_DLL_DECL void InstallFailureFunction(void (*fail_func)()); + +class LogSink; // defined below + +// If a non-NULL sink pointer is given, we push this message to that sink. +// For LOG_TO_SINK we then do normal LOG(severity) logging as well. +// This is useful for capturing messages and passing/storing them +// somewhere more specific than the global log of the process. +// Argument types: +// LogSink* sink; +// LogSeverity severity; +// The cast is to disambiguate NULL arguments. +#define LOG_TO_SINK(sink, severity) \ + @ac_google_namespace@::LogMessage( \ + __FILE__, __LINE__, \ + @ac_google_namespace@::severity, \ + static_cast<@ac_google_namespace@::LogSink*>(sink), true).stream() +#define LOG_TO_SINK_BUT_NOT_TO_LOGFILE(sink, severity) \ + @ac_google_namespace@::LogMessage( \ + __FILE__, __LINE__, \ + @ac_google_namespace@::severity, \ + static_cast<@ac_google_namespace@::LogSink*>(sink), false).stream() + +// If a non-NULL string pointer is given, we write this message to that string. +// We then do normal LOG(severity) logging as well. +// This is useful for capturing messages and storing them somewhere more +// specific than the global log of the process. +// Argument types: +// string* message; +// LogSeverity severity; +// The cast is to disambiguate NULL arguments. +// NOTE: LOG(severity) expands to LogMessage().stream() for the specified +// severity. +#define LOG_TO_STRING(severity, message) \ + LOG_TO_STRING_##severity(static_cast(message)).stream() + +// If a non-NULL pointer is given, we push the message onto the end +// of a vector of strings; otherwise, we report it with LOG(severity). +// This is handy for capturing messages and perhaps passing them back +// to the caller, rather than reporting them immediately. +// Argument types: +// LogSeverity severity; +// vector *outvec; +// The cast is to disambiguate NULL arguments. +#define LOG_STRING(severity, outvec) \ + LOG_TO_STRING_##severity(static_cast*>(outvec)).stream() + +#define LOG_IF(severity, condition) \ + !(condition) ? (void) 0 : @ac_google_namespace@::LogMessageVoidify() & LOG(severity) +#define SYSLOG_IF(severity, condition) \ + !(condition) ? (void) 0 : @ac_google_namespace@::LogMessageVoidify() & SYSLOG(severity) + +#define LOG_ASSERT(condition) \ + LOG_IF(FATAL, !(condition)) << "Assert failed: " #condition +#define SYSLOG_ASSERT(condition) \ + SYSLOG_IF(FATAL, !(condition)) << "Assert failed: " #condition + +// CHECK dies with a fatal error if condition is not true. It is *not* +// controlled by NDEBUG, so the check will be executed regardless of +// compilation mode. Therefore, it is safe to do things like: +// CHECK(fp->Write(x) == 4) +#define CHECK(condition) \ + LOG_IF(FATAL, GOOGLE_PREDICT_BRANCH_NOT_TAKEN(!(condition))) \ + << "Check failed: " #condition " " + +// A container for a string pointer which can be evaluated to a bool - +// true iff the pointer is NULL. +struct CheckOpString { + CheckOpString(std::string* str) : str_(str) { } + // No destructor: if str_ is non-NULL, we're about to LOG(FATAL), + // so there's no point in cleaning up str_. + operator bool() const { + return GOOGLE_PREDICT_BRANCH_NOT_TAKEN(str_ != NULL); + } + std::string* str_; +}; + +// Function is overloaded for integral types to allow static const +// integrals declared in classes and not defined to be used as arguments to +// CHECK* macros. It's not encouraged though. +template +inline const T& GetReferenceableValue(const T& t) { return t; } +inline char GetReferenceableValue(char t) { return t; } +inline unsigned char GetReferenceableValue(unsigned char t) { return t; } +inline signed char GetReferenceableValue(signed char t) { return t; } +inline short GetReferenceableValue(short t) { return t; } +inline unsigned short GetReferenceableValue(unsigned short t) { return t; } +inline int GetReferenceableValue(int t) { return t; } +inline unsigned int GetReferenceableValue(unsigned int t) { return t; } +inline long GetReferenceableValue(long t) { return t; } +inline unsigned long GetReferenceableValue(unsigned long t) { return t; } +inline long long GetReferenceableValue(long long t) { return t; } +inline unsigned long long GetReferenceableValue(unsigned long long t) { + return t; +} + +// This is a dummy class to define the following operator. +struct DummyClassToDefineOperator {}; + +@ac_google_end_namespace@ + +// Define global operator<< to declare using ::operator<<. +// This declaration will allow use to use CHECK macros for user +// defined classes which have operator<< (e.g., stl_logging.h). +inline std::ostream& operator<<( + std::ostream& out, const google::DummyClassToDefineOperator&) { + return out; +} + +@ac_google_start_namespace@ + +// Build the error message string. +template +std::string* MakeCheckOpString(const t1& v1, const t2& v2, const char* names) { + // It means that we cannot use stl_logging if compiler doesn't + // support using expression for operator. + // TODO(hamaji): Figure out a way to fix. +#if @ac_cv_cxx_using_operator@ + using ::operator<<; +#endif + std::strstream ss; + ss << names << " (" << v1 << " vs. " << v2 << ")"; + return new std::string(ss.str(), ss.pcount()); +} + +// Helper functions for CHECK_OP macro. +// The (int, int) specialization works around the issue that the compiler +// will not instantiate the template version of the function on values of +// unnamed enum type - see comment below. +#define DEFINE_CHECK_OP_IMPL(name, op) \ + template \ + inline std::string* Check##name##Impl(const t1& v1, const t2& v2, \ + const char* names) { \ + if (v1 op v2) return NULL; \ + else return MakeCheckOpString(v1, v2, names); \ + } \ + inline std::string* Check##name##Impl(int v1, int v2, const char* names) { \ + return Check##name##Impl(v1, v2, names); \ + } + +// Use _EQ, _NE, _LE, etc. in case the file including base/logging.h +// provides its own #defines for the simpler names EQ, NE, LE, etc. +// This happens if, for example, those are used as token names in a +// yacc grammar. +DEFINE_CHECK_OP_IMPL(_EQ, ==) +DEFINE_CHECK_OP_IMPL(_NE, !=) +DEFINE_CHECK_OP_IMPL(_LE, <=) +DEFINE_CHECK_OP_IMPL(_LT, < ) +DEFINE_CHECK_OP_IMPL(_GE, >=) +DEFINE_CHECK_OP_IMPL(_GT, > ) +#undef DEFINE_CHECK_OP_IMPL + +// Helper macro for binary operators. +// Don't use this macro directly in your code, use CHECK_EQ et al below. + +#if defined(STATIC_ANALYSIS) +// Only for static analysis tool to know that it is equivalent to assert +#define CHECK_OP_LOG(name, op, val1, val2, log) CHECK((val1) op (val2)) +#elif !defined(NDEBUG) +// In debug mode, avoid constructing CheckOpStrings if possible, +// to reduce the overhead of CHECK statments by 2x. +// Real DCHECK-heavy tests have seen 1.5x speedups. + +// The meaning of "string" might be different between now and +// when this macro gets invoked (e.g., if someone is experimenting +// with other string implementations that get defined after this +// file is included). Save the current meaning now and use it +// in the macro. +typedef std::string _Check_string; +#define CHECK_OP_LOG(name, op, val1, val2, log) \ + while (@ac_google_namespace@::_Check_string* _result = \ + @ac_google_namespace@::Check##name##Impl( \ + @ac_google_namespace@::GetReferenceableValue(val1), \ + @ac_google_namespace@::GetReferenceableValue(val2), \ + #val1 " " #op " " #val2)) \ + log(__FILE__, __LINE__, \ + @ac_google_namespace@::CheckOpString(_result)).stream() +#else +// In optimized mode, use CheckOpString to hint to compiler that +// the while condition is unlikely. +#define CHECK_OP_LOG(name, op, val1, val2, log) \ + while (@ac_google_namespace@::CheckOpString _result = \ + @ac_google_namespace@::Check##name##Impl( \ + @ac_google_namespace@::GetReferenceableValue(val1), \ + @ac_google_namespace@::GetReferenceableValue(val2), \ + #val1 " " #op " " #val2)) \ + log(__FILE__, __LINE__, _result).stream() +#endif // STATIC_ANALYSIS, !NDEBUG + +#if GOOGLE_STRIP_LOG <= 3 +#define CHECK_OP(name, op, val1, val2) \ + CHECK_OP_LOG(name, op, val1, val2, @ac_google_namespace@::LogMessageFatal) +#else +#define CHECK_OP(name, op, val1, val2) \ + CHECK_OP_LOG(name, op, val1, val2, @ac_google_namespace@::NullStreamFatal) +#endif // STRIP_LOG <= 3 + +// Equality/Inequality checks - compare two values, and log a FATAL message +// including the two values when the result is not as expected. The values +// must have operator<<(ostream, ...) defined. +// +// You may append to the error message like so: +// CHECK_NE(1, 2) << ": The world must be ending!"; +// +// We are very careful to ensure that each argument is evaluated exactly +// once, and that anything which is legal to pass as a function argument is +// legal here. In particular, the arguments may be temporary expressions +// which will end up being destroyed at the end of the apparent statement, +// for example: +// CHECK_EQ(string("abc")[1], 'b'); +// +// WARNING: These don't compile correctly if one of the arguments is a pointer +// and the other is NULL. To work around this, simply static_cast NULL to the +// type of the desired pointer. + +#define CHECK_EQ(val1, val2) CHECK_OP(_EQ, ==, val1, val2) +#define CHECK_NE(val1, val2) CHECK_OP(_NE, !=, val1, val2) +#define CHECK_LE(val1, val2) CHECK_OP(_LE, <=, val1, val2) +#define CHECK_LT(val1, val2) CHECK_OP(_LT, < , val1, val2) +#define CHECK_GE(val1, val2) CHECK_OP(_GE, >=, val1, val2) +#define CHECK_GT(val1, val2) CHECK_OP(_GT, > , val1, val2) + +// Check that the input is non NULL. This very useful in constructor +// initializer lists. + +#define CHECK_NOTNULL(val) \ + @ac_google_namespace@::CheckNotNull(__FILE__, __LINE__, "'" #val "' Must be non NULL", (val)) + +// Helper functions for string comparisons. +// To avoid bloat, the definitions are in logging.cc. +#define DECLARE_CHECK_STROP_IMPL(func, expected) \ + GOOGLE_GLOG_DLL_DECL std::string* Check##func##expected##Impl( \ + const char* s1, const char* s2, const char* names); +DECLARE_CHECK_STROP_IMPL(strcmp, true) +DECLARE_CHECK_STROP_IMPL(strcmp, false) +DECLARE_CHECK_STROP_IMPL(strcasecmp, true) +DECLARE_CHECK_STROP_IMPL(strcasecmp, false) +#undef DECLARE_CHECK_STROP_IMPL + +// Helper macro for string comparisons. +// Don't use this macro directly in your code, use CHECK_STREQ et al below. +#define CHECK_STROP(func, op, expected, s1, s2) \ + while (@ac_google_namespace@::CheckOpString _result = \ + @ac_google_namespace@::Check##func##expected##Impl((s1), (s2), \ + #s1 " " #op " " #s2)) \ + LOG(FATAL) << *_result.str_ + + +// String (char*) equality/inequality checks. +// CASE versions are case-insensitive. +// +// Note that "s1" and "s2" may be temporary strings which are destroyed +// by the compiler at the end of the current "full expression" +// (e.g. CHECK_STREQ(Foo().c_str(), Bar().c_str())). + +#define CHECK_STREQ(s1, s2) CHECK_STROP(strcmp, ==, true, s1, s2) +#define CHECK_STRNE(s1, s2) CHECK_STROP(strcmp, !=, false, s1, s2) +#define CHECK_STRCASEEQ(s1, s2) CHECK_STROP(strcasecmp, ==, true, s1, s2) +#define CHECK_STRCASENE(s1, s2) CHECK_STROP(strcasecmp, !=, false, s1, s2) + +#define CHECK_INDEX(I,A) CHECK(I < (sizeof(A)/sizeof(A[0]))) +#define CHECK_BOUND(B,A) CHECK(B <= (sizeof(A)/sizeof(A[0]))) + +#define CHECK_DOUBLE_EQ(val1, val2) \ + do { \ + CHECK_LE((val1), (val2)+0.000000000000001L); \ + CHECK_GE((val1), (val2)-0.000000000000001L); \ + } while (0) + +#define CHECK_NEAR(val1, val2, margin) \ + do { \ + CHECK_LE((val1), (val2)+(margin)); \ + CHECK_GE((val1), (val2)-(margin)); \ + } while (0) + +// perror()..googly style! +// +// PLOG() and PLOG_IF() and PCHECK() behave exactly like their LOG* and +// CHECK equivalents with the addition that they postpend a description +// of the current state of errno to their output lines. + +#define PLOG(severity) GOOGLE_PLOG(severity, 0).stream() + +#define GOOGLE_PLOG(severity, counter) \ + @ac_google_namespace@::ErrnoLogMessage( \ + __FILE__, __LINE__, @ac_google_namespace@::severity, counter, \ + &@ac_google_namespace@::LogMessage::SendToLog) + +#define PLOG_IF(severity, condition) \ + !(condition) ? (void) 0 : @ac_google_namespace@::LogMessageVoidify() & PLOG(severity) + +// A CHECK() macro that postpends errno if the condition is false. E.g. +// +// if (poll(fds, nfds, timeout) == -1) { PCHECK(errno == EINTR); ... } +#define PCHECK(condition) \ + PLOG_IF(FATAL, GOOGLE_PREDICT_BRANCH_NOT_TAKEN(!(condition))) \ + << "Check failed: " #condition " " + +// A CHECK() macro that lets you assert the success of a function that +// returns -1 and sets errno in case of an error. E.g. +// +// CHECK_ERR(mkdir(path, 0700)); +// +// or +// +// int fd = open(filename, flags); CHECK_ERR(fd) << ": open " << filename; +#define CHECK_ERR(invocation) \ +PLOG_IF(FATAL, GOOGLE_PREDICT_BRANCH_NOT_TAKEN((invocation) == -1)) \ + << #invocation + +// Use macro expansion to create, for each use of LOG_EVERY_N(), static +// variables with the __LINE__ expansion as part of the variable name. +#define LOG_EVERY_N_VARNAME(base, line) LOG_EVERY_N_VARNAME_CONCAT(base, line) +#define LOG_EVERY_N_VARNAME_CONCAT(base, line) base ## line + +#define LOG_OCCURRENCES LOG_EVERY_N_VARNAME(occurrences_, __LINE__) +#define LOG_OCCURRENCES_MOD_N LOG_EVERY_N_VARNAME(occurrences_mod_n_, __LINE__) + +#define SOME_KIND_OF_LOG_EVERY_N(severity, n, what_to_do) \ + static int LOG_OCCURRENCES = 0, LOG_OCCURRENCES_MOD_N = 0; \ + ++LOG_OCCURRENCES; \ + if (++LOG_OCCURRENCES_MOD_N > n) LOG_OCCURRENCES_MOD_N -= n; \ + if (LOG_OCCURRENCES_MOD_N == 1) \ + @ac_google_namespace@::LogMessage( \ + __FILE__, __LINE__, @ac_google_namespace@::severity, LOG_OCCURRENCES, \ + &what_to_do).stream() + +#define SOME_KIND_OF_LOG_IF_EVERY_N(severity, condition, n, what_to_do) \ + static int LOG_OCCURRENCES = 0, LOG_OCCURRENCES_MOD_N = 0; \ + ++LOG_OCCURRENCES; \ + if (condition && \ + ((LOG_OCCURRENCES_MOD_N=(LOG_OCCURRENCES_MOD_N + 1) % n) == (1 % n))) \ + @ac_google_namespace@::LogMessage( \ + __FILE__, __LINE__, @ac_google_namespace@::severity, LOG_OCCURRENCES, \ + &what_to_do).stream() + +#define SOME_KIND_OF_PLOG_EVERY_N(severity, n, what_to_do) \ + static int LOG_OCCURRENCES = 0, LOG_OCCURRENCES_MOD_N = 0; \ + ++LOG_OCCURRENCES; \ + if (++LOG_OCCURRENCES_MOD_N > n) LOG_OCCURRENCES_MOD_N -= n; \ + if (LOG_OCCURRENCES_MOD_N == 1) \ + @ac_google_namespace@::ErrnoLogMessage( \ + __FILE__, __LINE__, @ac_google_namespace@::severity, LOG_OCCURRENCES, \ + &what_to_do).stream() + +#define SOME_KIND_OF_LOG_FIRST_N(severity, n, what_to_do) \ + static int LOG_OCCURRENCES = 0; \ + if (LOG_OCCURRENCES <= n) \ + ++LOG_OCCURRENCES; \ + if (LOG_OCCURRENCES <= n) \ + @ac_google_namespace@::LogMessage( \ + __FILE__, __LINE__, @ac_google_namespace@::severity, LOG_OCCURRENCES, \ + &what_to_do).stream() + +namespace glog_internal_namespace_ { +template +struct CompileAssert { +}; +struct CrashReason; +} // namespace glog_internal_namespace_ + +#define GOOGLE_GLOG_COMPILE_ASSERT(expr, msg) \ + typedef @ac_google_namespace@::glog_internal_namespace_::CompileAssert<(bool(expr))> msg[bool(expr) ? 1 : -1] + +#define LOG_EVERY_N(severity, n) \ + GOOGLE_GLOG_COMPILE_ASSERT(@ac_google_namespace@::severity < \ + @ac_google_namespace@::NUM_SEVERITIES, \ + INVALID_REQUESTED_LOG_SEVERITY); \ + SOME_KIND_OF_LOG_EVERY_N(severity, (n), @ac_google_namespace@::LogMessage::SendToLog) + +#define SYSLOG_EVERY_N(severity, n) \ + SOME_KIND_OF_LOG_EVERY_N(severity, (n), @ac_google_namespace@::LogMessage::SendToSyslogAndLog) + +#define PLOG_EVERY_N(severity, n) \ + SOME_KIND_OF_PLOG_EVERY_N(severity, (n), @ac_google_namespace@::LogMessage::SendToLog) + +#define LOG_FIRST_N(severity, n) \ + SOME_KIND_OF_LOG_FIRST_N(severity, (n), @ac_google_namespace@::LogMessage::SendToLog) + +#define LOG_IF_EVERY_N(severity, condition, n) \ + SOME_KIND_OF_LOG_IF_EVERY_N(severity, (condition), (n), @ac_google_namespace@::LogMessage::SendToLog) + +// We want the special COUNTER value available for LOG_EVERY_X()'ed messages +enum PRIVATE_Counter {COUNTER}; + + +// Plus some debug-logging macros that get compiled to nothing for production + +#ifndef NDEBUG + +#define DLOG(severity) LOG(severity) +#define DVLOG(verboselevel) VLOG(verboselevel) +#define DLOG_IF(severity, condition) LOG_IF(severity, condition) +#define DLOG_EVERY_N(severity, n) LOG_EVERY_N(severity, n) +#define DLOG_IF_EVERY_N(severity, condition, n) \ + LOG_IF_EVERY_N(severity, condition, n) +#define DLOG_ASSERT(condition) LOG_ASSERT(condition) + +// debug-only checking. not executed in NDEBUG mode. +#define DCHECK(condition) CHECK(condition) +#define DCHECK_EQ(val1, val2) CHECK_EQ(val1, val2) +#define DCHECK_NE(val1, val2) CHECK_NE(val1, val2) +#define DCHECK_LE(val1, val2) CHECK_LE(val1, val2) +#define DCHECK_LT(val1, val2) CHECK_LT(val1, val2) +#define DCHECK_GE(val1, val2) CHECK_GE(val1, val2) +#define DCHECK_GT(val1, val2) CHECK_GT(val1, val2) +#define DCHECK_NOTNULL(val) CHECK_NOTNULL(val) +#define DCHECK_STREQ(str1, str2) CHECK_STREQ(str1, str2) +#define DCHECK_STRCASEEQ(str1, str2) CHECK_STRCASEEQ(str1, str2) +#define DCHECK_STRNE(str1, str2) CHECK_STRNE(str1, str2) +#define DCHECK_STRCASENE(str1, str2) CHECK_STRCASENE(str1, str2) + +#else // NDEBUG + +#define DLOG(severity) \ + true ? (void) 0 : @ac_google_namespace@::LogMessageVoidify() & LOG(severity) + +#define DVLOG(verboselevel) \ + (true || !VLOG_IS_ON(verboselevel)) ?\ + (void) 0 : @ac_google_namespace@::LogMessageVoidify() & LOG(INFO) + +#define DLOG_IF(severity, condition) \ + (true || !(condition)) ? (void) 0 : @ac_google_namespace@::LogMessageVoidify() & LOG(severity) + +#define DLOG_EVERY_N(severity, n) \ + true ? (void) 0 : @ac_google_namespace@::LogMessageVoidify() & LOG(severity) + +#define DLOG_IF_EVERY_N(severity, condition, n) \ + (true || !(condition))? (void) 0 : @ac_google_namespace@::LogMessageVoidify() & LOG(severity) + +#define DLOG_ASSERT(condition) \ + true ? (void) 0 : LOG_ASSERT(condition) + +#define DCHECK(condition) \ + while (false) \ + CHECK(condition) + +#define DCHECK_EQ(val1, val2) \ + while (false) \ + CHECK_EQ(val1, val2) + +#define DCHECK_NE(val1, val2) \ + while (false) \ + CHECK_NE(val1, val2) + +#define DCHECK_LE(val1, val2) \ + while (false) \ + CHECK_LE(val1, val2) + +#define DCHECK_LT(val1, val2) \ + while (false) \ + CHECK_LT(val1, val2) + +#define DCHECK_GE(val1, val2) \ + while (false) \ + CHECK_GE(val1, val2) + +#define DCHECK_GT(val1, val2) \ + while (false) \ + CHECK_GT(val1, val2) + +#define DCHECK_NOTNULL(val) (val) + +#define DCHECK_STREQ(str1, str2) \ + while (false) \ + CHECK_STREQ(str1, str2) + +#define DCHECK_STRCASEEQ(str1, str2) \ + while (false) \ + CHECK_STRCASEEQ(str1, str2) + +#define DCHECK_STRNE(str1, str2) \ + while (false) \ + CHECK_STRNE(str1, str2) + +#define DCHECK_STRCASENE(str1, str2) \ + while (false) \ + CHECK_STRCASENE(str1, str2) + + +#endif // NDEBUG + +// Log only in verbose mode. + +#define VLOG(verboselevel) LOG_IF(INFO, VLOG_IS_ON(verboselevel)) + +#define VLOG_IF(verboselevel, condition) \ + LOG_IF(INFO, (condition) && VLOG_IS_ON(verboselevel)) + +#define VLOG_EVERY_N(verboselevel, n) \ + LOG_IF_EVERY_N(INFO, VLOG_IS_ON(verboselevel), n) + +#define VLOG_IF_EVERY_N(verboselevel, condition, n) \ + LOG_IF_EVERY_N(INFO, (condition) && VLOG_IS_ON(verboselevel), n) + +// +// This class more or less represents a particular log message. You +// create an instance of LogMessage and then stream stuff to it. +// When you finish streaming to it, ~LogMessage is called and the +// full message gets streamed to the appropriate destination. +// +// You shouldn't actually use LogMessage's constructor to log things, +// though. You should use the LOG() macro (and variants thereof) +// above. +class GOOGLE_GLOG_DLL_DECL LogMessage { +public: + enum { + // Passing kNoLogPrefix for the line number disables the + // log-message prefix. Useful for using the LogMessage + // infrastructure as a printing utility. See also the --log_prefix + // flag for controlling the log-message prefix on an + // application-wide basis. + kNoLogPrefix = -1 + }; + + // LogStream inherit from non-DLL-exported class (std::ostrstream) + // and VC++ produces a warning for this situation. + // However, MSDN says "C4275 can be ignored in Microsoft Visual C++ + // 2005 if you are deriving from a type in the Standard C++ Library" + // http://msdn.microsoft.com/en-us/library/3tdb471s(VS.80).aspx + // Let's just ignore the warning. +#ifdef _MSC_VER +# pragma warning(disable: 4275) +#endif + class GOOGLE_GLOG_DLL_DECL LogStream : public std::ostrstream { +#ifdef _MSC_VER +# pragma warning(default: 4275) +#endif + public: + LogStream(char *buf, int len, int ctr) + : ostrstream(buf, len), + ctr_(ctr) { + self_ = this; + } + + int ctr() const { return ctr_; } + void set_ctr(int ctr) { ctr_ = ctr; } + LogStream* self() const { return self_; } + + private: + int ctr_; // Counter hack (for the LOG_EVERY_X() macro) + LogStream *self_; // Consistency check hack + }; + +public: + // icc 8 requires this typedef to avoid an internal compiler error. + typedef void (LogMessage::*SendMethod)(); + + LogMessage(const char* file, int line, LogSeverity severity, int ctr, + SendMethod send_method); + + // Two special constructors that generate reduced amounts of code at + // LOG call sites for common cases. + + // Used for LOG(INFO): Implied are: + // severity = INFO, ctr = 0, send_method = &LogMessage::SendToLog. + // + // Using this constructor instead of the more complex constructor above + // saves 19 bytes per call site. + LogMessage(const char* file, int line); + + // Used for LOG(severity) where severity != INFO. Implied + // are: ctr = 0, send_method = &LogMessage::SendToLog + // + // Using this constructor instead of the more complex constructor above + // saves 17 bytes per call site. + LogMessage(const char* file, int line, LogSeverity severity); + + // Constructor to log this message to a specified sink (if not NULL). + // Implied are: ctr = 0, send_method = &LogMessage::SendToSinkAndLog if + // also_send_to_log is true, send_method = &LogMessage::SendToSink otherwise. + LogMessage(const char* file, int line, LogSeverity severity, LogSink* sink, + bool also_send_to_log); + + // Constructor where we also give a vector pointer + // for storing the messages (if the pointer is not NULL). + // Implied are: ctr = 0, send_method = &LogMessage::SaveOrSendToLog. + LogMessage(const char* file, int line, LogSeverity severity, + std::vector* outvec); + + // Constructor where we also give a string pointer for storing the + // message (if the pointer is not NULL). Implied are: ctr = 0, + // send_method = &LogMessage::WriteToStringAndLog. + LogMessage(const char* file, int line, LogSeverity severity, + std::string* message); + + // A special constructor used for check failures + LogMessage(const char* file, int line, const CheckOpString& result); + + ~LogMessage(); + + // Flush a buffered message to the sink set in the constructor. Always + // called by the destructor, it may also be called from elsewhere if + // needed. Only the first call is actioned; any later ones are ignored. + void Flush(); + + // An arbitrary limit on the length of a single log message. This + // is so that streaming can be done more efficiently. + static const size_t kMaxLogMessageLen; + + // Theses should not be called directly outside of logging.*, + // only passed as SendMethod arguments to other LogMessage methods: + void SendToLog(); // Actually dispatch to the logs + void SendToSyslogAndLog(); // Actually dispatch to syslog and the logs + + // Call abort() or similar to perform LOG(FATAL) crash. + static void Fail() @ac_cv___attribute___noreturn@; + + std::ostream& stream() { return *(data_->stream_); } + + int preserved_errno() const { return data_->preserved_errno_; } + + // Must be called without the log_mutex held. (L < log_mutex) + static int64 num_messages(int severity); + +private: + // Fully internal SendMethod cases: + void SendToSinkAndLog(); // Send to sink if provided and dispatch to the logs + void SendToSink(); // Send to sink if provided, do nothing otherwise. + + // Write to string if provided and dispatch to the logs. + void WriteToStringAndLog(); + + void SaveOrSendToLog(); // Save to stringvec if provided, else to logs + + void Init(const char* file, int line, LogSeverity severity, + void (LogMessage::*send_method)()); + + // Used to fill in crash information during LOG(FATAL) failures. + void RecordCrashReason(glog_internal_namespace_::CrashReason* reason); + + // Counts of messages sent at each priority: + static int64 num_messages_[NUM_SEVERITIES]; // under log_mutex + + // We keep the data in a separate struct so that each instance of + // LogMessage uses less stack space. + struct GOOGLE_GLOG_DLL_DECL LogMessageData { + LogMessageData() {}; + + int preserved_errno_; // preserved errno + char* buf_; + char* message_text_; // Complete message text (points to selected buffer) + LogStream* stream_alloc_; + LogStream* stream_; + char severity_; // What level is this LogMessage logged at? + int line_; // line number where logging call is. + void (LogMessage::*send_method_)(); // Call this in destructor to send + union { // At most one of these is used: union to keep the size low. + LogSink* sink_; // NULL or sink to send message to + std::vector* outvec_; // NULL or vector to push message onto + std::string* message_; // NULL or string to write message into + }; + time_t timestamp_; // Time of creation of LogMessage + struct ::tm tm_time_; // Time of creation of LogMessage + size_t num_prefix_chars_; // # of chars of prefix in this message + size_t num_chars_to_log_; // # of chars of msg to send to log + size_t num_chars_to_syslog_; // # of chars of msg to send to syslog + const char* basename_; // basename of file that called LOG + const char* fullname_; // fullname of file that called LOG + bool has_been_flushed_; // false => data has not been flushed + bool first_fatal_; // true => this was first fatal msg + + ~LogMessageData(); + private: + LogMessageData(const LogMessageData&); + void operator=(const LogMessageData&); + }; + + static LogMessageData fatal_msg_data_exclusive_; + static LogMessageData fatal_msg_data_shared_; + + LogMessageData* allocated_; + LogMessageData* data_; + + friend class LogDestination; + + LogMessage(const LogMessage&); + void operator=(const LogMessage&); +}; + +// This class happens to be thread-hostile because all instances share +// a single data buffer, but since it can only be created just before +// the process dies, we don't worry so much. +class GOOGLE_GLOG_DLL_DECL LogMessageFatal : public LogMessage { + public: + LogMessageFatal(const char* file, int line); + LogMessageFatal(const char* file, int line, const CheckOpString& result); + ~LogMessageFatal() @ac_cv___attribute___noreturn@; +}; + +// A non-macro interface to the log facility; (useful +// when the logging level is not a compile-time constant). +inline void LogAtLevel(int const severity, std::string const &msg) { + LogMessage(__FILE__, __LINE__, severity).stream() << msg; +} + +// A macro alternative of LogAtLevel. New code may want to use this +// version since there are two advantages: 1. this version outputs the +// file name and the line number where this macro is put like other +// LOG macros, 2. this macro can be used as C++ stream. +#define LOG_AT_LEVEL(severity) @ac_google_namespace@::LogMessage(__FILE__, __LINE__, severity).stream() + +// A small helper for CHECK_NOTNULL(). +template +T* CheckNotNull(const char *file, int line, const char *names, T* t) { + if (t == NULL) { + LogMessageFatal(file, line, new std::string(names)); + } + return t; +} + +// Allow folks to put a counter in the LOG_EVERY_X()'ed messages. This +// only works if ostream is a LogStream. If the ostream is not a +// LogStream you'll get an assert saying as much at runtime. +GOOGLE_GLOG_DLL_DECL std::ostream& operator<<(std::ostream &os, + const PRIVATE_Counter&); + + +// Derived class for PLOG*() above. +class GOOGLE_GLOG_DLL_DECL ErrnoLogMessage : public LogMessage { + public: + + ErrnoLogMessage(const char* file, int line, LogSeverity severity, int ctr, + void (LogMessage::*send_method)()); + + // Postpends ": strerror(errno) [errno]". + ~ErrnoLogMessage(); + + private: + ErrnoLogMessage(const ErrnoLogMessage&); + void operator=(const ErrnoLogMessage&); +}; + + +// This class is used to explicitly ignore values in the conditional +// logging macros. This avoids compiler warnings like "value computed +// is not used" and "statement has no effect". + +class GOOGLE_GLOG_DLL_DECL LogMessageVoidify { + public: + LogMessageVoidify() { } + // This has to be an operator with a precedence lower than << but + // higher than ?: + void operator&(std::ostream&) { } +}; + + +// Flushes all log files that contains messages that are at least of +// the specified severity level. Thread-safe. +GOOGLE_GLOG_DLL_DECL void FlushLogFiles(LogSeverity min_severity); + +// Flushes all log files that contains messages that are at least of +// the specified severity level. Thread-hostile because it ignores +// locking -- used for catastrophic failures. +GOOGLE_GLOG_DLL_DECL void FlushLogFilesUnsafe(LogSeverity min_severity); + +// +// Set the destination to which a particular severity level of log +// messages is sent. If base_filename is "", it means "don't log this +// severity". Thread-safe. +// +GOOGLE_GLOG_DLL_DECL void SetLogDestination(LogSeverity severity, + const char* base_filename); + +// +// Set the basename of the symlink to the latest log file at a given +// severity. If symlink_basename is empty, do not make a symlink. If +// you don't call this function, the symlink basename is the +// invocation name of the program. Thread-safe. +// +GOOGLE_GLOG_DLL_DECL void SetLogSymlink(LogSeverity severity, + const char* symlink_basename); + +// +// Used to send logs to some other kind of destination +// Users should subclass LogSink and override send to do whatever they want. +// Implementations must be thread-safe because a shared instance will +// be called from whichever thread ran the LOG(XXX) line. +class GOOGLE_GLOG_DLL_DECL LogSink { + public: + virtual ~LogSink(); + + // Sink's logging logic (message_len is such as to exclude '\n' at the end). + // This method can't use LOG() or CHECK() as logging system mutex(s) are held + // during this call. + virtual void send(LogSeverity severity, const char* full_filename, + const char* base_filename, int line, + const struct ::tm* tm_time, + const char* message, size_t message_len) = 0; + + // Redefine this to implement waiting for + // the sink's logging logic to complete. + // It will be called after each send() returns, + // but before that LogMessage exits or crashes. + // By default this function does nothing. + // Using this function one can implement complex logic for send() + // that itself involves logging; and do all this w/o causing deadlocks and + // inconsistent rearrangement of log messages. + // E.g. if a LogSink has thread-specific actions, the send() method + // can simply add the message to a queue and wake up another thread that + // handles real logging while itself making some LOG() calls; + // WaitTillSent() can be implemented to wait for that logic to complete. + // See our unittest for an example. + virtual void WaitTillSent(); + + // Returns the normal text output of the log message. + // Can be useful to implement send(). + static std::string ToString(LogSeverity severity, const char* file, int line, + const struct ::tm* tm_time, + const char* message, size_t message_len); +}; + +// Add or remove a LogSink as a consumer of logging data. Thread-safe. +GOOGLE_GLOG_DLL_DECL void AddLogSink(LogSink *destination); +GOOGLE_GLOG_DLL_DECL void RemoveLogSink(LogSink *destination); + +// +// Specify an "extension" added to the filename specified via +// SetLogDestination. This applies to all severity levels. It's +// often used to append the port we're listening on to the logfile +// name. Thread-safe. +// +GOOGLE_GLOG_DLL_DECL void SetLogFilenameExtension( + const char* filename_extension); + +// +// Make it so that all log messages of at least a particular severity +// are logged to stderr (in addition to logging to the usual log +// file(s)). Thread-safe. +// +GOOGLE_GLOG_DLL_DECL void SetStderrLogging(LogSeverity min_severity); + +// +// Make it so that all log messages go only to stderr. Thread-safe. +// +GOOGLE_GLOG_DLL_DECL void LogToStderr(); + +// +// Make it so that all log messages of at least a particular severity are +// logged via email to a list of addresses (in addition to logging to the +// usual log file(s)). The list of addresses is just a string containing +// the email addresses to send to (separated by spaces, say). Thread-safe. +// +GOOGLE_GLOG_DLL_DECL void SetEmailLogging(LogSeverity min_severity, + const char* addresses); + +// A simple function that sends email. dest is a commma-separated +// list of addressess. Thread-safe. +GOOGLE_GLOG_DLL_DECL bool SendEmail(const char *dest, + const char *subject, const char *body); + +GOOGLE_GLOG_DLL_DECL const std::vector& GetLoggingDirectories(); + +// For tests only: Clear the internal [cached] list of logging directories to +// force a refresh the next time GetLoggingDirectories is called. +// Thread-hostile. +void TestOnly_ClearLoggingDirectoriesList(); + +// Returns a set of existing temporary directories, which will be a +// subset of the directories returned by GetLogginDirectories(). +// Thread-safe. +GOOGLE_GLOG_DLL_DECL void GetExistingTempDirectories( + std::vector* list); + +// Print any fatal message again -- useful to call from signal handler +// so that the last thing in the output is the fatal message. +// Thread-hostile, but a race is unlikely. +GOOGLE_GLOG_DLL_DECL void ReprintFatalMessage(); + +// Truncate a log file that may be the append-only output of multiple +// processes and hence can't simply be renamed/reopened (typically a +// stdout/stderr). If the file "path" is > "limit" bytes, copy the +// last "keep" bytes to offset 0 and truncate the rest. Since we could +// be racing with other writers, this approach has the potential to +// lose very small amounts of data. For security, only follow symlinks +// if the path is /proc/self/fd/* +GOOGLE_GLOG_DLL_DECL void TruncateLogFile(const char *path, + int64 limit, int64 keep); + +// Truncate stdout and stderr if they are over the value specified by +// --max_log_size; keep the final 1MB. This function has the same +// race condition as TruncateLogFile. +GOOGLE_GLOG_DLL_DECL void TruncateStdoutStderr(); + +// Return the string representation of the provided LogSeverity level. +// Thread-safe. +GOOGLE_GLOG_DLL_DECL const char* GetLogSeverityName(LogSeverity severity); + +// --------------------------------------------------------------------- +// Implementation details that are not useful to most clients +// --------------------------------------------------------------------- + +// A Logger is the interface used by logging modules to emit entries +// to a log. A typical implementation will dump formatted data to a +// sequence of files. We also provide interfaces that will forward +// the data to another thread so that the invoker never blocks. +// Implementations should be thread-safe since the logging system +// will write to them from multiple threads. + +namespace base { + +class GOOGLE_GLOG_DLL_DECL Logger { + public: + virtual ~Logger(); + + // Writes "message[0,message_len-1]" corresponding to an event that + // occurred at "timestamp". If "force_flush" is true, the log file + // is flushed immediately. + // + // The input message has already been formatted as deemed + // appropriate by the higher level logging facility. For example, + // textual log messages already contain timestamps, and the + // file:linenumber header. + virtual void Write(bool force_flush, + time_t timestamp, + const char* message, + int message_len) = 0; + + // Flush any buffered messages + virtual void Flush() = 0; + + // Get the current LOG file size. + // The returned value is approximate since some + // logged data may not have been flushed to disk yet. + virtual uint32 LogSize() = 0; +}; + +// Get the logger for the specified severity level. The logger +// remains the property of the logging module and should not be +// deleted by the caller. Thread-safe. +extern GOOGLE_GLOG_DLL_DECL Logger* GetLogger(LogSeverity level); + +// Set the logger for the specified severity level. The logger +// becomes the property of the logging module and should not +// be deleted by the caller. Thread-safe. +extern GOOGLE_GLOG_DLL_DECL void SetLogger(LogSeverity level, Logger* logger); + +} + +// glibc has traditionally implemented two incompatible versions of +// strerror_r(). There is a poorly defined convention for picking the +// version that we want, but it is not clear whether it even works with +// all versions of glibc. +// So, instead, we provide this wrapper that automatically detects the +// version that is in use, and then implements POSIX semantics. +// N.B. In addition to what POSIX says, we also guarantee that "buf" will +// be set to an empty string, if this function failed. This means, in most +// cases, you do not need to check the error code and you can directly +// use the value of "buf". It will never have an undefined value. +GOOGLE_GLOG_DLL_DECL int posix_strerror_r(int err, char *buf, size_t len); + + +// A class for which we define operator<<, which does nothing. +class GOOGLE_GLOG_DLL_DECL NullStream : public LogMessage::LogStream { + public: + // Initialize the LogStream so the messages can be written somewhere + // (they'll never be actually displayed). This will be needed if a + // NullStream& is implicitly converted to LogStream&, in which case + // the overloaded NullStream::operator<< will not be invoked. + NullStream() : LogMessage::LogStream(message_buffer_, 1, 0) { } + NullStream(const char* /*file*/, int /*line*/, + const CheckOpString& /*result*/) : + LogMessage::LogStream(message_buffer_, 1, 0) { } + NullStream &stream() { return *this; } + private: + // A very short buffer for messages (which we discard anyway). This + // will be needed if NullStream& converted to LogStream& (e.g. as a + // result of a conditional expression). + char message_buffer_[2]; +}; + +// Do nothing. This operator is inline, allowing the message to be +// compiled away. The message will not be compiled away if we do +// something like (flag ? LOG(INFO) : LOG(ERROR)) << message; when +// SKIP_LOG=WARNING. In those cases, NullStream will be implicitly +// converted to LogStream and the message will be computed and then +// quietly discarded. +template +inline NullStream& operator<<(NullStream &str, const T &value) { return str; } + +// Similar to NullStream, but aborts the program (without stack +// trace), like LogMessageFatal. +class GOOGLE_GLOG_DLL_DECL NullStreamFatal : public NullStream { + public: + NullStreamFatal() { } + NullStreamFatal(const char* file, int line, const CheckOpString& result) : + NullStream(file, line, result) { } + @ac_cv___attribute___noreturn@ ~NullStreamFatal() { _exit(1); } +}; + +// Install a signal handler that will dump signal information and a stack +// trace when the program crashes on certain signals. We'll install the +// signal handler for the following signals. +// +// SIGSEGV, SIGILL, SIGFPE, SIGABRT, SIGBUS, and SIGTERM. +// +// By default, the signal handler will write the failure dump to the +// standard error. You can customize the destination by installing your +// own writer function by InstallFailureWriter() below. +// +// Note on threading: +// +// The function should be called before threads are created, if you want +// to use the failure signal handler for all threads. The stack trace +// will be shown only for the thread that receives the signal. In other +// words, stack traces of other threads won't be shown. +GOOGLE_GLOG_DLL_DECL void InstallFailureSignalHandler(); + +// Installs a function that is used for writing the failure dump. "data" +// is the pointer to the beginning of a message to be written, and "size" +// is the size of the message. You should not expect the data is +// terminated with '\0'. +GOOGLE_GLOG_DLL_DECL void InstallFailureWriter( + void (*writer)(const char* data, int size)); + +@ac_google_end_namespace@ + +#endif // _LOGGING_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/glog/raw_logging.h.in b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/glog/raw_logging.h.in new file mode 100644 index 0000000000..5378cd467b --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/glog/raw_logging.h.in @@ -0,0 +1,185 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Author: Maxim Lifantsev +// +// Thread-safe logging routines that do not allocate any memory or +// acquire any locks, and can therefore be used by low-level memory +// allocation and synchronization code. + +#ifndef BASE_RAW_LOGGING_H_ +#define BASE_RAW_LOGGING_H_ + +#include + +@ac_google_start_namespace@ + +#include "glog/log_severity.h" +#include "glog/vlog_is_on.h" + +// Annoying stuff for windows -- makes sure clients can import these functions +#ifndef GOOGLE_GLOG_DLL_DECL +# if defined(_WIN32) && !defined(__CYGWIN__) +# define GOOGLE_GLOG_DLL_DECL __declspec(dllimport) +# else +# define GOOGLE_GLOG_DLL_DECL +# endif +#endif + +// This is similar to LOG(severity) << format... and VLOG(level) << format.., +// but +// * it is to be used ONLY by low-level modules that can't use normal LOG() +// * it is desiged to be a low-level logger that does not allocate any +// memory and does not need any locks, hence: +// * it logs straight and ONLY to STDERR w/o buffering +// * it uses an explicit format and arguments list +// * it will silently chop off really long message strings +// Usage example: +// RAW_LOG(ERROR, "Failed foo with %i: %s", status, error); +// RAW_VLOG(3, "status is %i", status); +// These will print an almost standard log lines like this to stderr only: +// E0821 211317 file.cc:123] RAW: Failed foo with 22: bad_file +// I0821 211317 file.cc:142] RAW: status is 20 +#define RAW_LOG(severity, ...) \ + do { \ + switch (@ac_google_namespace@::severity) { \ + case 0: \ + RAW_LOG_INFO(__VA_ARGS__); \ + break; \ + case 1: \ + RAW_LOG_WARNING(__VA_ARGS__); \ + break; \ + case 2: \ + RAW_LOG_ERROR(__VA_ARGS__); \ + break; \ + case 3: \ + RAW_LOG_FATAL(__VA_ARGS__); \ + break; \ + default: \ + break; \ + } \ + } while (0) + +// The following STRIP_LOG testing is performed in the header file so that it's +// possible to completely compile out the logging code and the log messages. +#if STRIP_LOG == 0 +#define RAW_VLOG(verboselevel, ...) \ + do { \ + if (VLOG_IS_ON(verboselevel)) { \ + RAW_LOG_INFO(__VA_ARGS__); \ + } \ + } while (0) +#else +#define RAW_VLOG(verboselevel, ...) RawLogStub__(0, __VA_ARGS__) +#endif // STRIP_LOG == 0 + +#if STRIP_LOG == 0 +#define RAW_LOG_INFO(...) @ac_google_namespace@::RawLog__(@ac_google_namespace@::INFO, \ + __FILE__, __LINE__, __VA_ARGS__) +#else +#define RAW_LOG_INFO(...) @ac_google_namespace@::RawLogStub__(0, __VA_ARGS__) +#endif // STRIP_LOG == 0 + +#if STRIP_LOG <= 1 +#define RAW_LOG_WARNING(...) @ac_google_namespace@::RawLog__(@ac_google_namespace@::WARNING, \ + __FILE__, __LINE__, __VA_ARGS__) +#else +#define RAW_LOG_WARNING(...) @ac_google_namespace@::RawLogStub__(0, __VA_ARGS__) +#endif // STRIP_LOG <= 1 + +#if STRIP_LOG <= 2 +#define RAW_LOG_ERROR(...) @ac_google_namespace@::RawLog__(@ac_google_namespace@::ERROR, \ + __FILE__, __LINE__, __VA_ARGS__) +#else +#define RAW_LOG_ERROR(...) @ac_google_namespace@::RawLogStub__(0, __VA_ARGS__) +#endif // STRIP_LOG <= 2 + +#if STRIP_LOG <= 3 +#define RAW_LOG_FATAL(...) @ac_google_namespace@::RawLog__(@ac_google_namespace@::FATAL, \ + __FILE__, __LINE__, __VA_ARGS__) +#else +#define RAW_LOG_FATAL(...) \ + do { \ + @ac_google_namespace@::RawLogStub__(0, __VA_ARGS__); \ + exit(1); \ + } while (0) +#endif // STRIP_LOG <= 3 + +// Similar to CHECK(condition) << message, +// but for low-level modules: we use only RAW_LOG that does not allocate memory. +// We do not want to provide args list here to encourage this usage: +// if (!cond) RAW_LOG(FATAL, "foo ...", hard_to_compute_args); +// so that the args are not computed when not needed. +#define RAW_CHECK(condition, message) \ + do { \ + if (!(condition)) { \ + RAW_LOG(FATAL, "Check %s failed: %s", #condition, message); \ + } \ + } while (0) + +// Debug versions of RAW_LOG and RAW_CHECK +#ifndef NDEBUG + +#define RAW_DLOG(severity, ...) RAW_LOG(severity, __VA_ARGS__) +#define RAW_DCHECK(condition, message) RAW_CHECK(condition, message) + +#else // NDEBUG + +#define RAW_DLOG(severity, ...) \ + while (false) \ + RAW_LOG(severity, __VA_ARGS__) +#define RAW_DCHECK(condition, message) \ + while (false) \ + RAW_CHECK(condition, message) + +#endif // NDEBUG + +// Stub log function used to work around for unused variable warnings when +// building with STRIP_LOG > 0. +static inline void RawLogStub__(int ignored, ...) { +} + +// Helper function to implement RAW_LOG and RAW_VLOG +// Logs format... at "severity" level, reporting it +// as called from file:line. +// This does not allocate memory or acquire locks. +GOOGLE_GLOG_DLL_DECL void RawLog__(LogSeverity severity, + const char* file, + int line, + const char* format, ...) + @ac_cv___attribute___printf_4_5@; + +// Hack to propagate time information into this module so that +// this module does not have to directly call localtime_r(), +// which could allocate memory. +GOOGLE_GLOG_DLL_DECL void RawLog__SetLastTime(const struct tm& t, int usecs); + +@ac_google_end_namespace@ + +#endif // BASE_RAW_LOGGING_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/glog/stl_logging.h.in b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/glog/stl_logging.h.in new file mode 100644 index 0000000000..f09e08718e --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/glog/stl_logging.h.in @@ -0,0 +1,154 @@ +// Copyright (c) 2003, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Stream output operators for STL containers; to be used for logging *only*. +// Inclusion of this file lets you do: +// +// list x; +// LOG(INFO) << "data: " << x; +// vector v1, v2; +// CHECK_EQ(v1, v2); +// +// Note that if you want to use these operators from the non-global namespace, +// you may get an error since they are not in namespace std (and they are not +// in namespace std since that would result in undefined behavior). You may +// need to write +// +// using ::operator<<; +// +// to fix these errors. + +#ifndef UTIL_GTL_STL_LOGGING_INL_H_ +#define UTIL_GTL_STL_LOGGING_INL_H_ + +#if !@ac_cv_cxx_using_operator@ +# error We do not support stl_logging for this compiler +#endif + +#include +#include +#include +#include +#include +#include +#include + +#ifdef __GNUC__ +# include +# include +# include +#endif + +template +inline std::ostream& operator<<(std::ostream& out, + const std::pair& p) { + out << '(' << p.first << ", " << p.second << ')'; + return out; +} + +@ac_google_start_namespace@ + +template +inline void PrintSequence(std::ostream& out, Iter begin, Iter end) { + using ::operator<<; + // Output at most 100 elements -- appropriate if used for logging. + for (int i = 0; begin != end && i < 100; ++i, ++begin) { + if (i > 0) out << ' '; + out << *begin; + } + if (begin != end) { + out << " ..."; + } +} + +@ac_google_end_namespace@ + +#define OUTPUT_TWO_ARG_CONTAINER(Sequence) \ +template \ +inline std::ostream& operator<<(std::ostream& out, \ + const Sequence& seq) { \ + @ac_google_namespace@::PrintSequence(out, seq.begin(), seq.end()); \ + return out; \ +} + +OUTPUT_TWO_ARG_CONTAINER(std::vector) +OUTPUT_TWO_ARG_CONTAINER(std::deque) +OUTPUT_TWO_ARG_CONTAINER(std::list) +#ifdef __GNUC__ +OUTPUT_TWO_ARG_CONTAINER(__gnu_cxx::slist) +#endif + +#undef OUTPUT_TWO_ARG_CONTAINER + +#define OUTPUT_THREE_ARG_CONTAINER(Sequence) \ +template \ +inline std::ostream& operator<<(std::ostream& out, \ + const Sequence& seq) { \ + @ac_google_namespace@::PrintSequence(out, seq.begin(), seq.end()); \ + return out; \ +} + +OUTPUT_THREE_ARG_CONTAINER(std::set) +OUTPUT_THREE_ARG_CONTAINER(std::multiset) + +#undef OUTPUT_THREE_ARG_CONTAINER + +#define OUTPUT_FOUR_ARG_CONTAINER(Sequence) \ +template \ +inline std::ostream& operator<<(std::ostream& out, \ + const Sequence& seq) { \ + @ac_google_namespace@::PrintSequence(out, seq.begin(), seq.end()); \ + return out; \ +} + +OUTPUT_FOUR_ARG_CONTAINER(std::map) +OUTPUT_FOUR_ARG_CONTAINER(std::multimap) +#ifdef __GNUC__ +OUTPUT_FOUR_ARG_CONTAINER(__gnu_cxx::hash_set) +OUTPUT_FOUR_ARG_CONTAINER(__gnu_cxx::hash_multiset) +#endif + +#undef OUTPUT_FOUR_ARG_CONTAINER + +#define OUTPUT_FIVE_ARG_CONTAINER(Sequence) \ +template \ +inline std::ostream& operator<<(std::ostream& out, \ + const Sequence& seq) { \ + @ac_google_namespace@::PrintSequence(out, seq.begin(), seq.end()); \ + return out; \ +} + +#ifdef __GNUC__ +OUTPUT_FIVE_ARG_CONTAINER(__gnu_cxx::hash_map) +OUTPUT_FIVE_ARG_CONTAINER(__gnu_cxx::hash_multimap) +#endif + +#undef OUTPUT_FIVE_ARG_CONTAINER + +#endif // UTIL_GTL_STL_LOGGING_INL_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/glog/vlog_is_on.h.in b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/glog/vlog_is_on.h.in new file mode 100644 index 0000000000..3f4c4a32a8 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/glog/vlog_is_on.h.in @@ -0,0 +1,129 @@ +// Copyright (c) 1999, 2007, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Author: Ray Sidney and many others +// +// Defines the VLOG_IS_ON macro that controls the variable-verbosity +// conditional logging. +// +// It's used by VLOG and VLOG_IF in logging.h +// and by RAW_VLOG in raw_logging.h to trigger the logging. +// +// It can also be used directly e.g. like this: +// if (VLOG_IS_ON(2)) { +// // do some logging preparation and logging +// // that can't be accomplished e.g. via just VLOG(2) << ...; +// } +// +// The truth value that VLOG_IS_ON(level) returns is determined by +// the three verbosity level flags: +// --v= Gives the default maximal active V-logging level; +// 0 is the default. +// Normally positive values are used for V-logging levels. +// --vmodule= Gives the per-module maximal V-logging levels to override +// the value given by --v. +// E.g. "my_module=2,foo*=3" would change the logging level +// for all code in source files "my_module.*" and "foo*.*" +// ("-inl" suffixes are also disregarded for this matching). +// +// SetVLOGLevel helper function is provided to do limited dynamic control over +// V-logging by overriding the per-module settings given via --vmodule flag. +// +// CAVEAT: --vmodule functionality is not available in non gcc compilers. +// + +#ifndef BASE_VLOG_IS_ON_H_ +#define BASE_VLOG_IS_ON_H_ + +#include "glog/log_severity.h" + +// Annoying stuff for windows -- makes sure clients can import these functions +#ifndef GOOGLE_GLOG_DLL_DECL +# if defined(_WIN32) && !defined(__CYGWIN__) +# define GOOGLE_GLOG_DLL_DECL __declspec(dllimport) +# else +# define GOOGLE_GLOG_DLL_DECL +# endif +#endif + +#if defined(__GNUC__) +// We emit an anonymous static int* variable at every VLOG_IS_ON(n) site. +// (Normally) the first time every VLOG_IS_ON(n) site is hit, +// we determine what variable will dynamically control logging at this site: +// it's either FLAGS_v or an appropriate internal variable +// matching the current source file that represents results of +// parsing of --vmodule flag and/or SetVLOGLevel calls. +#define VLOG_IS_ON(verboselevel) \ + __extension__ \ + ({ static @ac_google_namespace@::int32* vlocal__ = &@ac_google_namespace@::kLogSiteUninitialized; \ + @ac_google_namespace@::int32 verbose_level__ = (verboselevel); \ + (*vlocal__ >= verbose_level__) && \ + ((vlocal__ != &@ac_google_namespace@::kLogSiteUninitialized) || \ + (@ac_google_namespace@::InitVLOG3__(&vlocal__, &FLAGS_v, \ + __FILE__, verbose_level__))); }) +#else +// GNU extensions not available, so we do not support --vmodule. +// Dynamic value of FLAGS_v always controls the logging level. +#define VLOG_IS_ON(verboselevel) (FLAGS_v >= (verboselevel)) +#endif + +// Set VLOG(_IS_ON) level for module_pattern to log_level. +// This lets us dynamically control what is normally set by the --vmodule flag. +// Returns the level that previously applied to module_pattern. +// NOTE: To change the log level for VLOG(_IS_ON) sites +// that have already executed after/during InitGoogleLogging, +// one needs to supply the exact --vmodule pattern that applied to them. +// (If no --vmodule pattern applied to them +// the value of FLAGS_v will continue to control them.) +extern GOOGLE_GLOG_DLL_DECL int SetVLOGLevel(const char* module_pattern, + int log_level); + +// Various declarations needed for VLOG_IS_ON above: ========================= + +// Special value used to indicate that a VLOG_IS_ON site has not been +// initialized. We make this a large value, so the common-case check +// of "*vlocal__ >= verbose_level__" in VLOG_IS_ON definition +// passes in such cases and InitVLOG3__ is then triggered. +extern @ac_google_namespace@::int32 kLogSiteUninitialized; + +// Helper routine which determines the logging info for a particalur VLOG site. +// site_flag is the address of the site-local pointer to the controlling +// verbosity level +// site_default is the default to use for *site_flag +// fname is the current source file name +// verbose_level is the argument to VLOG_IS_ON +// We will return the return value for VLOG_IS_ON +// and if possible set *site_flag appropriately. +extern GOOGLE_GLOG_DLL_DECL bool InitVLOG3__( + @ac_google_namespace@::int32** site_flag, + @ac_google_namespace@::int32* site_default, + const char* fname, + @ac_google_namespace@::int32 verbose_level); + +#endif // BASE_VLOG_IS_ON_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/googletest.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/googletest.h new file mode 100644 index 0000000000..c9e413e196 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/googletest.h @@ -0,0 +1,604 @@ +// Copyright (c) 2009, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Author: Shinichiro Hamaji +// (based on googletest: http://code.google.com/p/googletest/) + +#ifdef GOOGLETEST_H__ +#error You must not include this file twice. +#endif +#define GOOGLETEST_H__ + +#include "utilities.h" + +#include +#include +#include + +#include +#include +#include +#include + +#include +#include + +#include +#include +#include +#ifdef HAVE_UNISTD_H +# include +#endif + +#include "base/commandlineflags.h" + +using std::map; +using std::string; +using std::vector; + +_START_GOOGLE_NAMESPACE_ + +extern GOOGLE_GLOG_DLL_DECL void (*g_logging_fail_func)(); + +_END_GOOGLE_NAMESPACE_ + +#undef GOOGLE_GLOG_DLL_DECL +#define GOOGLE_GLOG_DLL_DECL + +static inline string GetTempDir() { +#ifndef OS_WINDOWS + return "/tmp"; +#else + char tmp[MAX_PATH]; + GetTempPathA(MAX_PATH, tmp); + return tmp; +#endif +} + +#ifdef OS_WINDOWS +// The test will run in glog/vsproject/ +// (e.g., glog/vsproject/logging_unittest). +static const char TEST_SRC_DIR[] = "../.."; +#elif !defined(TEST_SRC_DIR) +# warning TEST_SRC_DIR should be defined in config.h +static const char TEST_SRC_DIR[] = "."; +#endif + +DEFINE_string(test_tmpdir, GetTempDir(), "Dir we use for temp files"); +DEFINE_string(test_srcdir, TEST_SRC_DIR, + "Source-dir root, needed to find glog_unittest_flagfile"); +DEFINE_bool(run_benchmark, false, "If true, run benchmarks"); +#ifdef NDEBUG +DEFINE_int32(benchmark_iters, 100000000, "Number of iterations per benchmark"); +#else +DEFINE_int32(benchmark_iters, 100000, "Number of iterations per benchmark"); +#endif + +#ifdef HAVE_LIB_GTEST +# include +// Use our ASSERT_DEATH implementation. +# undef ASSERT_DEATH +# undef ASSERT_DEBUG_DEATH +using testing::InitGoogleTest; +#else + +_START_GOOGLE_NAMESPACE_ + +void InitGoogleTest(int* argc, char** argv) {} + +// The following is some bare-bones testing infrastructure + +#define EXPECT_TRUE(cond) \ + do { \ + if (!(cond)) { \ + fprintf(stderr, "Check failed: %s\n", #cond); \ + exit(1); \ + } \ + } while (0) + +#define EXPECT_FALSE(cond) EXPECT_TRUE(!(cond)) + +#define EXPECT_OP(op, val1, val2) \ + do { \ + if (!((val1) op (val2))) { \ + fprintf(stderr, "Check failed: %s %s %s\n", #val1, #op, #val2); \ + exit(1); \ + } \ + } while (0) + +#define EXPECT_EQ(val1, val2) EXPECT_OP(==, val1, val2) +#define EXPECT_NE(val1, val2) EXPECT_OP(!=, val1, val2) +#define EXPECT_GT(val1, val2) EXPECT_OP(>, val1, val2) +#define EXPECT_LT(val1, val2) EXPECT_OP(<, val1, val2) + +#define EXPECT_NAN(arg) \ + do { \ + if (!isnan(arg)) { \ + fprintf(stderr, "Check failed: isnan(%s)\n", #arg); \ + exit(1); \ + } \ + } while (0) + +#define EXPECT_INF(arg) \ + do { \ + if (!isinf(arg)) { \ + fprintf(stderr, "Check failed: isinf(%s)\n", #arg); \ + exit(1); \ + } \ + } while (0) + +#define EXPECT_DOUBLE_EQ(val1, val2) \ + do { \ + if (((val1) < (val2) - 0.001 || (val1) > (val2) + 0.001)) { \ + fprintf(stderr, "Check failed: %s == %s\n", #val1, #val2); \ + exit(1); \ + } \ + } while (0) + +#define EXPECT_STREQ(val1, val2) \ + do { \ + if (strcmp((val1), (val2)) != 0) { \ + fprintf(stderr, "Check failed: streq(%s, %s)\n", #val1, #val2); \ + exit(1); \ + } \ + } while (0) + +vector g_testlist; // the tests to run + +#define TEST(a, b) \ + struct Test_##a##_##b { \ + Test_##a##_##b() { g_testlist.push_back(&Run); } \ + static void Run() { FlagSaver fs; RunTest(); } \ + static void RunTest(); \ + }; \ + static Test_##a##_##b g_test_##a##_##b; \ + void Test_##a##_##b::RunTest() + + +static inline int RUN_ALL_TESTS() { + vector::const_iterator it; + for (it = g_testlist.begin(); it != g_testlist.end(); ++it) { + (*it)(); + } + fprintf(stderr, "Passed %d tests\n\nPASS\n", (int)g_testlist.size()); + return 0; +} + +_END_GOOGLE_NAMESPACE_ + +#endif // ! HAVE_LIB_GTEST + +_START_GOOGLE_NAMESPACE_ + +static bool g_called_abort; +static jmp_buf g_jmp_buf; +static inline void CalledAbort() { + g_called_abort = true; + longjmp(g_jmp_buf, 1); +} + +#ifdef OS_WINDOWS +// TODO(hamaji): Death test somehow doesn't work in Windows. +#define ASSERT_DEATH(fn, msg) +#else +#define ASSERT_DEATH(fn, msg) \ + do { \ + g_called_abort = false; \ + /* in logging.cc */ \ + void (*original_logging_fail_func)() = g_logging_fail_func; \ + g_logging_fail_func = &CalledAbort; \ + if (!setjmp(g_jmp_buf)) fn; \ + /* set back to their default */ \ + g_logging_fail_func = original_logging_fail_func; \ + if (!g_called_abort) { \ + fprintf(stderr, "Function didn't die (%s): %s\n", msg, #fn); \ + exit(1); \ + } \ + } while (0) +#endif + +#ifdef NDEBUG +#define ASSERT_DEBUG_DEATH(fn, msg) +#else +#define ASSERT_DEBUG_DEATH(fn, msg) ASSERT_DEATH(fn, msg) +#endif // NDEBUG + +// Benchmark tools. + +#define BENCHMARK(n) static BenchmarkRegisterer __benchmark_ ## n (#n, &n); + +map g_benchlist; // the benchmarks to run + +class BenchmarkRegisterer { + public: + BenchmarkRegisterer(const char* name, void (*function)(int iters)) { + EXPECT_TRUE(g_benchlist.insert(std::make_pair(name, function)).second); + } +}; + +static inline void RunSpecifiedBenchmarks() { + if (!FLAGS_run_benchmark) { + return; + } + + int iter_cnt = FLAGS_benchmark_iters; + puts("Benchmark\tTime(ns)\tIterations"); + for (map::const_iterator iter = g_benchlist.begin(); + iter != g_benchlist.end(); + ++iter) { + clock_t start = clock(); + iter->second(iter_cnt); + double elapsed_ns = + ((double)clock() - start) / CLOCKS_PER_SEC * 1000*1000*1000; + printf("%s\t%8.2lf\t%10d\n", + iter->first.c_str(), elapsed_ns / iter_cnt, iter_cnt); + } + puts(""); +} + +// ---------------------------------------------------------------------- +// Golden file functions +// ---------------------------------------------------------------------- + +class CapturedStream { + public: + CapturedStream(int fd, const string & filename) : + fd_(fd), + uncaptured_fd_(-1), + filename_(filename) { + Capture(); + } + + ~CapturedStream() { + if (uncaptured_fd_ != -1) { + CHECK(close(uncaptured_fd_) != -1); + } + } + + // Start redirecting output to a file + void Capture() { + // Keep original stream for later + CHECK(uncaptured_fd_ == -1) << ", Stream " << fd_ << " already captured!"; + uncaptured_fd_ = dup(fd_); + CHECK(uncaptured_fd_ != -1); + + // Open file to save stream to + int cap_fd = open(filename_.c_str(), + O_CREAT | O_TRUNC | O_WRONLY, + S_IRUSR | S_IWUSR); + CHECK(cap_fd != -1); + + // Send stdout/stderr to this file + fflush(NULL); + CHECK(dup2(cap_fd, fd_) != -1); + CHECK(close(cap_fd) != -1); + } + + // Remove output redirection + void StopCapture() { + // Restore original stream + if (uncaptured_fd_ != -1) { + fflush(NULL); + CHECK(dup2(uncaptured_fd_, fd_) != -1); + } + } + + const string & filename() const { return filename_; } + + private: + int fd_; // file descriptor being captured + int uncaptured_fd_; // where the stream was originally being sent to + string filename_; // file where stream is being saved +}; +static CapturedStream * s_captured_streams[STDERR_FILENO+1]; +// Redirect a file descriptor to a file. +// fd - Should be STDOUT_FILENO or STDERR_FILENO +// filename - File where output should be stored +static inline void CaptureTestOutput(int fd, const string & filename) { + CHECK((fd == STDOUT_FILENO) || (fd == STDERR_FILENO)); + CHECK(s_captured_streams[fd] == NULL); + s_captured_streams[fd] = new CapturedStream(fd, filename); +} +static inline void CaptureTestStderr() { + CaptureTestOutput(STDERR_FILENO, FLAGS_test_tmpdir + "/captured.err"); +} +// Return the size (in bytes) of a file +static inline size_t GetFileSize(FILE * file) { + fseek(file, 0, SEEK_END); + return static_cast(ftell(file)); +} +// Read the entire content of a file as a string +static inline string ReadEntireFile(FILE * file) { + const size_t file_size = GetFileSize(file); + char * const buffer = new char[file_size]; + + size_t bytes_last_read = 0; // # of bytes read in the last fread() + size_t bytes_read = 0; // # of bytes read so far + + fseek(file, 0, SEEK_SET); + + // Keep reading the file until we cannot read further or the + // pre-determined file size is reached. + do { + bytes_last_read = fread(buffer+bytes_read, 1, file_size-bytes_read, file); + bytes_read += bytes_last_read; + } while (bytes_last_read > 0 && bytes_read < file_size); + + const string content = string(buffer, buffer+bytes_read); + delete[] buffer; + + return content; +} +// Get the captured stdout (when fd is STDOUT_FILENO) or stderr (when +// fd is STDERR_FILENO) as a string +static inline string GetCapturedTestOutput(int fd) { + CHECK(fd == STDOUT_FILENO || fd == STDERR_FILENO); + CapturedStream * const cap = s_captured_streams[fd]; + CHECK(cap) + << ": did you forget CaptureTestStdout() or CaptureTestStderr()?"; + + // Make sure everything is flushed. + cap->StopCapture(); + + // Read the captured file. + FILE * const file = fopen(cap->filename().c_str(), "r"); + const string content = ReadEntireFile(file); + fclose(file); + + delete cap; + s_captured_streams[fd] = NULL; + + return content; +} +// Get the captured stderr of a test as a string. +static inline string GetCapturedTestStderr() { + return GetCapturedTestOutput(STDERR_FILENO); +} + +// Check if the string is [IWEF](\d{4}|DATE) +static inline bool IsLoggingPrefix(const string& s) { + if (s.size() != 5) return false; + if (!strchr("IWEF", s[0])) return false; + for (int i = 1; i <= 4; ++i) { + if (!isdigit(s[i]) && s[i] != "DATE"[i-1]) return false; + } + return true; +} + +// Convert log output into normalized form. +// +// Example: +// I0102 030405 logging_unittest.cc:345] RAW: vlog -1 +// => IDATE TIME__ logging_unittest.cc:LINE] RAW: vlog -1 +static inline string MungeLine(const string& line) { + std::istringstream iss(line); + string before, logcode_date, time, thread_lineinfo; + iss >> logcode_date; + while (!IsLoggingPrefix(logcode_date)) { + before += " " + logcode_date; + if (!(iss >> logcode_date)) { + // We cannot find the header of log output. + return before; + } + } + if (!before.empty()) before += " "; + iss >> time; + iss >> thread_lineinfo; + CHECK(!thread_lineinfo.empty()); + if (thread_lineinfo[thread_lineinfo.size() - 1] != ']') { + // We found thread ID. + string tmp; + iss >> tmp; + CHECK(!tmp.empty()); + CHECK_EQ(']', tmp[tmp.size() - 1]); + thread_lineinfo = "THREADID " + tmp; + } + size_t index = thread_lineinfo.find(':'); + CHECK_NE(string::npos, index); + thread_lineinfo = thread_lineinfo.substr(0, index+1) + "LINE]"; + string rest; + std::getline(iss, rest); + return (before + logcode_date[0] + "DATE TIME__ " + thread_lineinfo + + MungeLine(rest)); +} + +static inline void StringReplace(string* str, + const string& oldsub, + const string& newsub) { + size_t pos = str->find(oldsub); + if (pos != string::npos) { + str->replace(pos, oldsub.size(), newsub.c_str()); + } +} + +static inline string Munge(const string& filename) { + FILE* fp = fopen(filename.c_str(), "rb"); + CHECK(fp != NULL) << filename << ": couldn't open"; + char buf[4096]; + string result; + while (fgets(buf, 4095, fp)) { + string line = MungeLine(buf); + char null_str[256]; + sprintf(null_str, "%p", static_cast(NULL)); + StringReplace(&line, "__NULLP__", null_str); + // Remove 0x prefix produced by %p. VC++ doesn't put the prefix. + StringReplace(&line, " 0x", " "); + + char errmsg_buf[100]; + posix_strerror_r(0, errmsg_buf, sizeof(errmsg_buf)); + if (*errmsg_buf == '\0') { + // MacOSX 10.4 and FreeBSD return empty string for errno=0. + // In such case, the we need to remove an extra space. + StringReplace(&line, "__SUCCESS__ ", ""); + } else { + StringReplace(&line, "__SUCCESS__", errmsg_buf); + } + StringReplace(&line, "__ENOENT__", strerror(ENOENT)); + StringReplace(&line, "__EINTR__", strerror(EINTR)); + StringReplace(&line, "__ENXIO__", strerror(ENXIO)); + StringReplace(&line, "__ENOEXEC__", strerror(ENOEXEC)); + result += line + "\n"; + } + fclose(fp); + return result; +} + +static inline void WriteToFile(const string& body, const string& file) { + FILE* fp = fopen(file.c_str(), "wb"); + fwrite(body.data(), 1, body.size(), fp); + fclose(fp); +} + +static inline bool MungeAndDiffTestStderr(const string& golden_filename) { + CapturedStream* cap = s_captured_streams[STDERR_FILENO]; + CHECK(cap) << ": did you forget CaptureTestStderr()?"; + + cap->StopCapture(); + + // Run munge + const string captured = Munge(cap->filename()); + const string golden = Munge(golden_filename); + if (captured != golden) { + fprintf(stderr, + "Test with golden file failed. We'll try to show the diff:\n"); + string munged_golden = golden_filename + ".munged"; + WriteToFile(golden, munged_golden); + string munged_captured = cap->filename() + ".munged"; + WriteToFile(captured, munged_captured); + string diffcmd("diff -u " + munged_golden + " " + munged_captured); + if (system(diffcmd.c_str()) != 0) { + fprintf(stderr, "diff command was failed.\n"); + } + unlink(munged_golden.c_str()); + unlink(munged_captured.c_str()); + return false; + } + LOG(INFO) << "Diff was successful"; + return true; +} + +// Save flags used from logging_unittest.cc. +#ifndef HAVE_LIB_GFLAGS +struct FlagSaver { + FlagSaver() + : v_(FLAGS_v), + stderrthreshold_(FLAGS_stderrthreshold), + logtostderr_(FLAGS_logtostderr), + alsologtostderr_(FLAGS_alsologtostderr) {} + ~FlagSaver() { + FLAGS_v = v_; + FLAGS_stderrthreshold = stderrthreshold_; + FLAGS_logtostderr = logtostderr_; + FLAGS_alsologtostderr = alsologtostderr_; + } + int v_; + int stderrthreshold_; + bool logtostderr_; + bool alsologtostderr_; +}; +#endif + +class Thread { + public: + virtual ~Thread() {} + + void SetJoinable(bool joinable) {} +#if defined(OS_WINDOWS) || defined(OS_CYGWIN) + void Start() { + handle_ = CreateThread(NULL, + 0, + (LPTHREAD_START_ROUTINE)&Thread::InvokeThread, + (LPVOID)this, + 0, + &th_); + CHECK(handle_) << "CreateThread"; + } + void Join() { + WaitForSingleObject(handle_, INFINITE); + } +#elif defined(HAVE_PTHREAD) + void Start() { + pthread_create(&th_, NULL, &Thread::InvokeThread, this); + } + void Join() { + pthread_join(th_, NULL); + } +#else +# error No thread implementation. +#endif + + protected: + virtual void Run() = 0; + + private: + static void* InvokeThread(void* self) { + ((Thread*)self)->Run(); + return NULL; + } + +#if defined(OS_WINDOWS) || defined(OS_CYGWIN) + HANDLE handle_; + DWORD th_; +#else + pthread_t th_; +#endif +}; + +static inline void SleepForMilliseconds(int t) { +#ifndef OS_WINDOWS + usleep(t * 1000); +#else + Sleep(t); +#endif +} + +// Add hook for operator new to ensure there are no memory allocation. + +void (*g_new_hook)() = NULL; + +_END_GOOGLE_NAMESPACE_ + +void* operator new(size_t size) { + if (GOOGLE_NAMESPACE::g_new_hook) { + GOOGLE_NAMESPACE::g_new_hook(); + } + return malloc(size); +} + +void* operator new[](size_t size) { + return ::operator new(size); +} + +void operator delete(void* p) { + free(p); +} + +void operator delete[](void* p) { + ::operator delete(p); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/logging.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/logging.cc new file mode 100644 index 0000000000..445d9f9e02 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/logging.cc @@ -0,0 +1,1808 @@ +// Copyright (c) 1999, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#define _GNU_SOURCE 1 // needed for O_NOFOLLOW and pread()/pwrite() + +#include "utilities.h" + +#include +#include +#include +#ifdef HAVE_UNISTD_H +# include // For _exit. +#endif +#include +#include +#include +#ifdef HAVE_SYS_UTSNAME_H +# include // For uname. +#endif +#include +#include +#include +#include +#include +#ifdef HAVE_PWD_H +# include +#endif +#ifdef HAVE_SYSLOG_H +# include +#endif +#include +#include // for errno +#include +#include "base/commandlineflags.h" // to get the program name +#include "glog/logging.h" +#include "glog/raw_logging.h" +#include "base/googleinit.h" + +#ifdef HAVE_STACKTRACE +# include "stacktrace.h" +#endif + +using std::string; +using std::vector; +using std::ostrstream; +using std::setw; +using std::setfill; +using std::hex; +using std::dec; +using std::min; +using std::ostream; +using std::ostringstream; +using std::strstream; + +// There is no thread annotation support. +#define EXCLUSIVE_LOCKS_REQUIRED(mu) + +static bool BoolFromEnv(const char *varname, bool defval) { + const char* const valstr = getenv(varname); + if (!valstr) { + return defval; + } + return memchr("tTyY1\0", valstr[0], 6) != NULL; +} + +GLOG_DEFINE_bool(logtostderr, BoolFromEnv("GOOGLE_LOGTOSTDERR", false), + "log messages go to stderr instead of logfiles"); +GLOG_DEFINE_bool(alsologtostderr, BoolFromEnv("GOOGLE_ALSOLOGTOSTDERR", false), + "log messages go to stderr in addition to logfiles"); +#ifdef OS_LINUX +GLOG_DEFINE_bool(drop_log_memory, true, "Drop in-memory buffers of log contents. " + "Logs can grow very quickly and they are rarely read before they " + "need to be evicted from memory. Instead, drop them from memory " + "as soon as they are flushed to disk."); +_START_GOOGLE_NAMESPACE_ +namespace logging { +static const int64 kPageSize = getpagesize(); +} +_END_GOOGLE_NAMESPACE_ +#endif + +// By default, errors (including fatal errors) get logged to stderr as +// well as the file. +// +// The default is ERROR instead of FATAL so that users can see problems +// when they run a program without having to look in another file. +DEFINE_int32(stderrthreshold, + GOOGLE_NAMESPACE::ERROR, + "log messages at or above this level are copied to stderr in " + "addition to logfiles. This flag obsoletes --alsologtostderr."); + +GLOG_DEFINE_string(alsologtoemail, "", + "log messages go to these email addresses " + "in addition to logfiles"); +GLOG_DEFINE_bool(log_prefix, true, + "Prepend the log prefix to the start of each log line"); +GLOG_DEFINE_int32(minloglevel, 0, "Messages logged at a lower level than this don't " + "actually get logged anywhere"); +GLOG_DEFINE_int32(logbuflevel, 0, + "Buffer log messages logged at this level or lower" + " (-1 means don't buffer; 0 means buffer INFO only;" + " ...)"); +GLOG_DEFINE_int32(logbufsecs, 30, + "Buffer log messages for at most this many seconds"); +GLOG_DEFINE_int32(logemaillevel, 999, + "Email log messages logged at this level or higher" + " (0 means email all; 3 means email FATAL only;" + " ...)"); +GLOG_DEFINE_string(logmailer, "/bin/mail", + "Mailer used to send logging email"); + +// Compute the default value for --log_dir +static const char* DefaultLogDir() { + const char* env; + env = getenv("GOOGLE_LOG_DIR"); + if (env != NULL && env[0] != '\0') { + return env; + } + env = getenv("TEST_TMPDIR"); + if (env != NULL && env[0] != '\0') { + return env; + } + return ""; +} + +GLOG_DEFINE_string(log_dir, DefaultLogDir(), + "If specified, logfiles are written into this directory instead " + "of the default logging directory."); +GLOG_DEFINE_string(log_link, "", "Put additional links to the log " + "files in this directory"); + +GLOG_DEFINE_int32(max_log_size, 1800, + "approx. maximum log file size (in MB). A value of 0 will " + "be silently overridden to 1."); + +GLOG_DEFINE_bool(stop_logging_if_full_disk, false, + "Stop attempting to log to disk if the disk is full."); + +GLOG_DEFINE_string(log_backtrace_at, "", + "Emit a backtrace when logging at file:linenum."); + +// TODO(hamaji): consider windows +#define PATH_SEPARATOR '/' + +static void GetHostName(string* hostname) { +#if defined(HAVE_SYS_UTSNAME_H) + struct utsname buf; + if (0 != uname(&buf)) { + // ensure null termination on failure + *buf.nodename = '\0'; + } + *hostname = buf.nodename; +#elif defined(OS_WINDOWS) + char buf[MAX_COMPUTERNAME_LENGTH + 1]; + DWORD len = MAX_COMPUTERNAME_LENGTH + 1; + if (GetComputerNameA(buf, &len)) { + *hostname = buf; + } else { + hostname->clear(); + } +#else +# warning There is no way to retrieve the host name. + *hostname = "(unknown)"; +#endif +} + +_START_GOOGLE_NAMESPACE_ + +// Safely get max_log_size, overriding to 1 if it somehow gets defined as 0 +static int32 MaxLogSize() { + return (FLAGS_max_log_size > 0 ? FLAGS_max_log_size : 1); +} + +// A mutex that allows only one thread to log at a time, to keep things from +// getting jumbled. Some other very uncommon logging operations (like +// changing the destination file for log messages of a given severity) also +// lock this mutex. Please be sure that anybody who might possibly need to +// lock it does so. +static Mutex log_mutex; + +// Number of messages sent at each severity. Under log_mutex. +int64 LogMessage::num_messages_[NUM_SEVERITIES] = {0, 0, 0, 0}; + +// Globally disable log writing (if disk is full) +static bool stop_writing = false; + +const char*const LogSeverityNames[NUM_SEVERITIES] = { + "INFO", "WARNING", "ERROR", "FATAL" +}; + +// Has the user called SetExitOnDFatal(true)? +static bool exit_on_dfatal = true; + +const char* GetLogSeverityName(LogSeverity severity) { + return LogSeverityNames[severity]; +} + +static bool SendEmailInternal(const char*dest, const char *subject, + const char*body, bool use_logging); + +base::Logger::~Logger() { +} + +namespace { + +// Encapsulates all file-system related state +class LogFileObject : public base::Logger { + public: + LogFileObject(LogSeverity severity, const char* base_filename); + ~LogFileObject(); + + virtual void Write(bool force_flush, // Should we force a flush here? + time_t timestamp, // Timestamp for this entry + const char* message, + int message_len); + + // Configuration options + void SetBasename(const char* basename); + void SetExtension(const char* ext); + void SetSymlinkBasename(const char* symlink_basename); + + // Normal flushing routine + virtual void Flush(); + + // It is the actual file length for the system loggers, + // i.e., INFO, ERROR, etc. + virtual uint32 LogSize() { + MutexLock l(&lock_); + return file_length_; + } + + // Internal flush routine. Exposed so that FlushLogFilesUnsafe() + // can avoid grabbing a lock. Usually Flush() calls it after + // acquiring lock_. + void FlushUnlocked(); + + private: + static const uint32 kRolloverAttemptFrequency = 0x20; + + Mutex lock_; + bool base_filename_selected_; + string base_filename_; + string symlink_basename_; + string filename_extension_; // option users can specify (eg to add port#) + FILE* file_; + LogSeverity severity_; + uint32 bytes_since_flush_; + uint32 file_length_; + unsigned int rollover_attempt_; + int64 next_flush_time_; // cycle count at which to flush log + + // Actually create a logfile using the value of base_filename_ and the + // supplied argument time_pid_string + // REQUIRES: lock_ is held + bool CreateLogfile(const char* time_pid_string); +}; + +} // namespace + +class LogDestination { + public: + friend class LogMessage; + friend void ReprintFatalMessage(); + friend base::Logger* base::GetLogger(LogSeverity); + friend void base::SetLogger(LogSeverity, base::Logger*); + + // These methods are just forwarded to by their global versions. + static void SetLogDestination(LogSeverity severity, + const char* base_filename); + static void SetLogSymlink(LogSeverity severity, + const char* symlink_basename); + static void AddLogSink(LogSink *destination); + static void RemoveLogSink(LogSink *destination); + static void SetLogFilenameExtension(const char* filename_extension); + static void SetStderrLogging(LogSeverity min_severity); + static void SetEmailLogging(LogSeverity min_severity, const char* addresses); + static void LogToStderr(); + // Flush all log files that are at least at the given severity level + static void FlushLogFiles(int min_severity); + static void FlushLogFilesUnsafe(int min_severity); + + // we set the maximum size of our packet to be 1400, the logic being + // to prevent fragmentation. + // Really this number is arbitrary. + static const int kNetworkBytes = 1400; + + static const string& hostname(); + + static void DeleteLogDestinations(); + + private: + LogDestination(LogSeverity severity, const char* base_filename); + ~LogDestination() { } + + // Take a log message of a particular severity and log it to stderr + // iff it's of a high enough severity to deserve it. + static void MaybeLogToStderr(LogSeverity severity, const char* message, + size_t len); + + // Take a log message of a particular severity and log it to email + // iff it's of a high enough severity to deserve it. + static void MaybeLogToEmail(LogSeverity severity, const char* message, + size_t len); + // Take a log message of a particular severity and log it to a file + // iff the base filename is not "" (which means "don't log to me") + static void MaybeLogToLogfile(LogSeverity severity, + time_t timestamp, + const char* message, size_t len); + // Take a log message of a particular severity and log it to the file + // for that severity and also for all files with severity less than + // this severity. + static void LogToAllLogfiles(LogSeverity severity, + time_t timestamp, + const char* message, size_t len); + + // Send logging info to all registered sinks. + static void LogToSinks(LogSeverity severity, + const char *full_filename, + const char *base_filename, + int line, + const struct ::tm* tm_time, + const char* message, + size_t message_len); + + // Wait for all registered sinks via WaitTillSent + // including the optional one in "data". + static void WaitForSinks(LogMessage::LogMessageData* data); + + static LogDestination* log_destination(LogSeverity severity); + + LogFileObject fileobject_; + base::Logger* logger_; // Either &fileobject_, or wrapper around it + + static LogDestination* log_destinations_[NUM_SEVERITIES]; + static LogSeverity email_logging_severity_; + static string addresses_; + static string hostname_; + + // arbitrary global logging destinations. + static vector* sinks_; + + // Protects the vector sinks_, + // but not the LogSink objects its elements reference. + static Mutex sink_mutex_; + + // Disallow + LogDestination(const LogDestination&); + LogDestination& operator=(const LogDestination&); +}; + +// Errors do not get logged to email by default. +LogSeverity LogDestination::email_logging_severity_ = 99999; + +string LogDestination::addresses_; +string LogDestination::hostname_; + +vector* LogDestination::sinks_ = NULL; +Mutex LogDestination::sink_mutex_; + +/* static */ +const string& LogDestination::hostname() { + if (hostname_.empty()) { + GetHostName(&hostname_); + if (hostname_.empty()) { + hostname_ = "(unknown)"; + } + } + return hostname_; +} + +LogDestination::LogDestination(LogSeverity severity, + const char* base_filename) + : fileobject_(severity, base_filename), + logger_(&fileobject_) { +} + +inline void LogDestination::FlushLogFilesUnsafe(int min_severity) { + // assume we have the log_mutex or we simply don't care + // about it + for (int i = min_severity; i < NUM_SEVERITIES; i++) { + LogDestination* log = log_destination(i); + if (log != NULL) { + // Flush the base fileobject_ logger directly instead of going + // through any wrappers to reduce chance of deadlock. + log->fileobject_.FlushUnlocked(); + } + } +} + +inline void LogDestination::FlushLogFiles(int min_severity) { + // Prevent any subtle race conditions by wrapping a mutex lock around + // all this stuff. + MutexLock l(&log_mutex); + for (int i = min_severity; i < NUM_SEVERITIES; i++) { + LogDestination* log = log_destination(i); + if (log != NULL) { + log->logger_->Flush(); + } + } +} + +inline void LogDestination::SetLogDestination(LogSeverity severity, + const char* base_filename) { + assert(severity >= 0 && severity < NUM_SEVERITIES); + // Prevent any subtle race conditions by wrapping a mutex lock around + // all this stuff. + MutexLock l(&log_mutex); + log_destination(severity)->fileobject_.SetBasename(base_filename); +} + +inline void LogDestination::SetLogSymlink(LogSeverity severity, + const char* symlink_basename) { + CHECK_GE(severity, 0); + CHECK_LT(severity, NUM_SEVERITIES); + MutexLock l(&log_mutex); + log_destination(severity)->fileobject_.SetSymlinkBasename(symlink_basename); +} + +inline void LogDestination::AddLogSink(LogSink *destination) { + // Prevent any subtle race conditions by wrapping a mutex lock around + // all this stuff. + MutexLock l(&sink_mutex_); + if (!sinks_) sinks_ = new vector; + sinks_->push_back(destination); +} + +inline void LogDestination::RemoveLogSink(LogSink *destination) { + // Prevent any subtle race conditions by wrapping a mutex lock around + // all this stuff. + MutexLock l(&sink_mutex_); + // This doesn't keep the sinks in order, but who cares? + if (sinks_) { + for (int i = sinks_->size() - 1; i >= 0; i--) { + if ((*sinks_)[i] == destination) { + (*sinks_)[i] = (*sinks_)[sinks_->size() - 1]; + sinks_->pop_back(); + break; + } + } + } +} + +inline void LogDestination::SetLogFilenameExtension(const char* ext) { + // Prevent any subtle race conditions by wrapping a mutex lock around + // all this stuff. + MutexLock l(&log_mutex); + for ( int severity = 0; severity < NUM_SEVERITIES; ++severity ) { + log_destination(severity)->fileobject_.SetExtension(ext); + } +} + +inline void LogDestination::SetStderrLogging(LogSeverity min_severity) { + assert(min_severity >= 0 && min_severity < NUM_SEVERITIES); + // Prevent any subtle race conditions by wrapping a mutex lock around + // all this stuff. + MutexLock l(&log_mutex); + FLAGS_stderrthreshold = min_severity; +} + +inline void LogDestination::LogToStderr() { + // *Don't* put this stuff in a mutex lock, since SetStderrLogging & + // SetLogDestination already do the locking! + SetStderrLogging(0); // thus everything is "also" logged to stderr + for ( int i = 0; i < NUM_SEVERITIES; ++i ) { + SetLogDestination(i, ""); // "" turns off logging to a logfile + } +} + +inline void LogDestination::SetEmailLogging(LogSeverity min_severity, + const char* addresses) { + assert(min_severity >= 0 && min_severity < NUM_SEVERITIES); + // Prevent any subtle race conditions by wrapping a mutex lock around + // all this stuff. + MutexLock l(&log_mutex); + LogDestination::email_logging_severity_ = min_severity; + LogDestination::addresses_ = addresses; +} + +static void WriteToStderr(const char* message, size_t len) { + // Avoid using cerr from this module since we may get called during + // exit code, and cerr may be partially or fully destroyed by then. + fwrite(message, len, 1, stderr); +} + +inline void LogDestination::MaybeLogToStderr(LogSeverity severity, + const char* message, size_t len) { + if ((severity >= FLAGS_stderrthreshold) || FLAGS_alsologtostderr) { + WriteToStderr(message, len); +#ifdef OS_WINDOWS + // On Windows, also output to the debugger + ::OutputDebugStringA(string(message,len).c_str()); +#endif + } +} + + +inline void LogDestination::MaybeLogToEmail(LogSeverity severity, + const char* message, size_t len) { + if (severity >= email_logging_severity_ || + severity >= FLAGS_logemaillevel) { + string to(FLAGS_alsologtoemail); + if (!addresses_.empty()) { + if (!to.empty()) { + to += ","; + } + to += addresses_; + } + const string subject(string("[LOG] ") + LogSeverityNames[severity] + ": " + + glog_internal_namespace_::ProgramInvocationShortName()); + string body(hostname()); + body += "\n\n"; + body.append(message, len); + + // should NOT use SendEmail(). The caller of this function holds the + // log_mutex and SendEmail() calls LOG/VLOG which will block trying to + // acquire the log_mutex object. Use SendEmailInternal() and set + // use_logging to false. + SendEmailInternal(to.c_str(), subject.c_str(), body.c_str(), false); + } +} + + +inline void LogDestination::MaybeLogToLogfile(LogSeverity severity, + time_t timestamp, + const char* message, + size_t len) { + const bool should_flush = severity > FLAGS_logbuflevel; + LogDestination* destination = log_destination(severity); + destination->logger_->Write(should_flush, timestamp, message, len); +} + +inline void LogDestination::LogToAllLogfiles(LogSeverity severity, + time_t timestamp, + const char* message, + size_t len) { + + if ( FLAGS_logtostderr ) // global flag: never log to file + WriteToStderr(message, len); + else + for (int i = severity; i >= 0; --i) + LogDestination::MaybeLogToLogfile(i, timestamp, message, len); + +} + +inline void LogDestination::LogToSinks(LogSeverity severity, + const char *full_filename, + const char *base_filename, + int line, + const struct ::tm* tm_time, + const char* message, + size_t message_len) { + ReaderMutexLock l(&sink_mutex_); + if (sinks_) { + for (int i = sinks_->size() - 1; i >= 0; i--) { + (*sinks_)[i]->send(severity, full_filename, base_filename, + line, tm_time, message, message_len); + } + } +} + +inline void LogDestination::WaitForSinks(LogMessage::LogMessageData* data) { + ReaderMutexLock l(&sink_mutex_); + if (sinks_) { + for (int i = sinks_->size() - 1; i >= 0; i--) { + (*sinks_)[i]->WaitTillSent(); + } + } + const bool send_to_sink = + (data->send_method_ == &LogMessage::SendToSink) || + (data->send_method_ == &LogMessage::SendToSinkAndLog); + if (send_to_sink && data->sink_ != NULL) { + data->sink_->WaitTillSent(); + } +} + +LogDestination* LogDestination::log_destinations_[NUM_SEVERITIES]; + +inline LogDestination* LogDestination::log_destination(LogSeverity severity) { + assert(severity >=0 && severity < NUM_SEVERITIES); + if (!log_destinations_[severity]) { + log_destinations_[severity] = new LogDestination(severity, NULL); + } + return log_destinations_[severity]; +} + +void LogDestination::DeleteLogDestinations() { + for (int severity = 0; severity < NUM_SEVERITIES; ++severity) { + delete log_destinations_[severity]; + log_destinations_[severity] = NULL; + } +} + +namespace { + +LogFileObject::LogFileObject(LogSeverity severity, + const char* base_filename) + : base_filename_selected_(base_filename != NULL), + base_filename_((base_filename != NULL) ? base_filename : ""), + symlink_basename_(glog_internal_namespace_::ProgramInvocationShortName()), + filename_extension_(), + file_(NULL), + severity_(severity), + bytes_since_flush_(0), + file_length_(0), + rollover_attempt_(kRolloverAttemptFrequency-1), + next_flush_time_(0) { + assert(severity >= 0); + assert(severity < NUM_SEVERITIES); +} + +LogFileObject::~LogFileObject() { + MutexLock l(&lock_); + if (file_ != NULL) { + fclose(file_); + file_ = NULL; + } +} + +void LogFileObject::SetBasename(const char* basename) { + MutexLock l(&lock_); + base_filename_selected_ = true; + if (base_filename_ != basename) { + // Get rid of old log file since we are changing names + if (file_ != NULL) { + fclose(file_); + file_ = NULL; + rollover_attempt_ = kRolloverAttemptFrequency-1; + } + base_filename_ = basename; + } +} + +void LogFileObject::SetExtension(const char* ext) { + MutexLock l(&lock_); + if (filename_extension_ != ext) { + // Get rid of old log file since we are changing names + if (file_ != NULL) { + fclose(file_); + file_ = NULL; + rollover_attempt_ = kRolloverAttemptFrequency-1; + } + filename_extension_ = ext; + } +} + +void LogFileObject::SetSymlinkBasename(const char* symlink_basename) { + MutexLock l(&lock_); + symlink_basename_ = symlink_basename; +} + +void LogFileObject::Flush() { + MutexLock l(&lock_); + FlushUnlocked(); +} + +void LogFileObject::FlushUnlocked(){ + if (file_ != NULL) { + fflush(file_); + bytes_since_flush_ = 0; + } + // Figure out when we are due for another flush. + const int64 next = (FLAGS_logbufsecs + * static_cast(1000000)); // in usec + next_flush_time_ = CycleClock_Now() + UsecToCycles(next); +} + +bool LogFileObject::CreateLogfile(const char* time_pid_string) { + string string_filename = base_filename_+filename_extension_+ + time_pid_string; + const char* filename = string_filename.c_str(); + int fd = open(filename, O_WRONLY | O_CREAT | O_EXCL, 0664); + if (fd == -1) return false; +#ifdef HAVE_FCNTL + // Mark the file close-on-exec. We don't really care if this fails + fcntl(fd, F_SETFD, FD_CLOEXEC); +#endif + + file_ = fdopen(fd, "a"); // Make a FILE*. + if (file_ == NULL) { // Man, we're screwed! + close(fd); + unlink(filename); // Erase the half-baked evidence: an unusable log file + return false; + } + + // We try to create a symlink called ., + // which is easier to use. (Every time we create a new logfile, + // we destroy the old symlink and create a new one, so it always + // points to the latest logfile.) If it fails, we're sad but it's + // no error. + if (!symlink_basename_.empty()) { + // take directory from filename + const char* slash = strrchr(filename, PATH_SEPARATOR); + const string linkname = + symlink_basename_ + '.' + LogSeverityNames[severity_]; + string linkpath; + if ( slash ) linkpath = string(filename, slash-filename+1); // get dirname + linkpath += linkname; + unlink(linkpath.c_str()); // delete old one if it exists + + // We must have unistd.h. +#ifdef HAVE_UNISTD_H + // Make the symlink be relative (in the same dir) so that if the + // entire log directory gets relocated the link is still valid. + const char *linkdest = slash ? (slash + 1) : filename; + if (symlink(linkdest, linkpath.c_str()) != 0) { + // silently ignore failures + } + + // Make an additional link to the log file in a place specified by + // FLAGS_log_link, if indicated + if (!FLAGS_log_link.empty()) { + linkpath = FLAGS_log_link + "/" + linkname; + unlink(linkpath.c_str()); // delete old one if it exists + if (symlink(filename, linkpath.c_str()) != 0) { + // silently ignore failures + } + } +#endif + } + + return true; // Everything worked +} + +void LogFileObject::Write(bool force_flush, + time_t timestamp, + const char* message, + int message_len) { + MutexLock l(&lock_); + + // We don't log if the base_name_ is "" (which means "don't write") + if (base_filename_selected_ && base_filename_.empty()) { + return; + } + + if (static_cast(file_length_ >> 20) >= MaxLogSize() || + PidHasChanged()) { + if (file_ != NULL) fclose(file_); + file_ = NULL; + file_length_ = bytes_since_flush_ = 0; + rollover_attempt_ = kRolloverAttemptFrequency-1; + } + + // If there's no destination file, make one before outputting + if (file_ == NULL) { + // Try to rollover the log file every 32 log messages. The only time + // this could matter would be when we have trouble creating the log + // file. If that happens, we'll lose lots of log messages, of course! + if (++rollover_attempt_ != kRolloverAttemptFrequency) return; + rollover_attempt_ = 0; + + struct ::tm tm_time; + localtime_r(×tamp, &tm_time); + + // The logfile's filename will have the date/time & pid in it + char time_pid_string[256]; // More than enough chars for time, pid, \0 + ostrstream time_pid_stream(time_pid_string, sizeof(time_pid_string)); + time_pid_stream.fill('0'); + time_pid_stream << 1900+tm_time.tm_year + << setw(2) << 1+tm_time.tm_mon + << setw(2) << tm_time.tm_mday + << '-' + << setw(2) << tm_time.tm_hour + << setw(2) << tm_time.tm_min + << setw(2) << tm_time.tm_sec + << '.' + << GetMainThreadPid() + << '\0'; + + if (base_filename_selected_) { + if (!CreateLogfile(time_pid_string)) { + perror("Could not create log file"); + fprintf(stderr, "COULD NOT CREATE LOGFILE '%s'!\n", time_pid_string); + return; + } + } else { + // If no base filename for logs of this severity has been set, use a + // default base filename of + // "...log..". So + // logfiles will have names like + // webserver.examplehost.root.log.INFO.19990817-150000.4354, where + // 19990817 is a date (1999 August 17), 150000 is a time (15:00:00), + // and 4354 is the pid of the logging process. The date & time reflect + // when the file was created for output. + // + // Where does the file get put? Successively try the directories + // "/tmp", and "." + string stripped_filename( + glog_internal_namespace_::ProgramInvocationShortName()); + string hostname; + GetHostName(&hostname); + + string uidname = MyUserName(); + // We should not call CHECK() here because this function can be + // called after holding on to log_mutex. We don't want to + // attempt to hold on to the same mutex, and get into a + // deadlock. Simply use a name like invalid-user. + if (uidname.empty()) uidname = "invalid-user"; + + stripped_filename = stripped_filename+'.'+hostname+'.' + +uidname+".log." + +LogSeverityNames[severity_]+'.'; + // We're going to (potentially) try to put logs in several different dirs + const vector & log_dirs = GetLoggingDirectories(); + + // Go through the list of dirs, and try to create the log file in each + // until we succeed or run out of options + bool success = false; + for (vector::const_iterator dir = log_dirs.begin(); + dir != log_dirs.end(); + ++dir) { + base_filename_ = *dir + "/" + stripped_filename; + if ( CreateLogfile(time_pid_string) ) { + success = true; + break; + } + } + // If we never succeeded, we have to give up + if ( success == false ) { + perror("Could not create logging file"); + fprintf(stderr, "COULD NOT CREATE A LOGGINGFILE %s!", time_pid_string); + return; + } + } + + // Write a header message into the log file + char file_header_string[512]; // Enough chars for time and binary info + ostrstream file_header_stream(file_header_string, + sizeof(file_header_string)); + file_header_stream.fill('0'); + file_header_stream << "Log file created at: " + << 1900+tm_time.tm_year << '/' + << setw(2) << 1+tm_time.tm_mon << '/' + << setw(2) << tm_time.tm_mday + << ' ' + << setw(2) << tm_time.tm_hour << ':' + << setw(2) << tm_time.tm_min << ':' + << setw(2) << tm_time.tm_sec << '\n' + << "Running on machine: " + << LogDestination::hostname() << '\n' + << "Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu " + << "threadid file:line] msg" << '\n' + << '\0'; + int header_len = strlen(file_header_string); + fwrite(file_header_string, 1, header_len, file_); + file_length_ += header_len; + bytes_since_flush_ += header_len; + } + + // Write to LOG file + if ( !stop_writing ) { + // fwrite() doesn't return an error when the disk is full, for + // messages that are less than 4096 bytes. When the disk is full, + // it returns the message length for messages that are less than + // 4096 bytes. fwrite() returns 4096 for message lengths that are + // greater than 4096, thereby indicating an error. + errno = 0; + fwrite(message, 1, message_len, file_); + if ( FLAGS_stop_logging_if_full_disk && + errno == ENOSPC ) { // disk full, stop writing to disk + stop_writing = true; // until the disk is + return; + } else { + file_length_ += message_len; + bytes_since_flush_ += message_len; + } + } else { + if ( CycleClock_Now() >= next_flush_time_ ) + stop_writing = false; // check to see if disk has free space. + return; // no need to flush + } + + // See important msgs *now*. Also, flush logs at least every 10^6 chars, + // or every "FLAGS_logbufsecs" seconds. + if ( force_flush || + (bytes_since_flush_ >= 1000000) || + (CycleClock_Now() >= next_flush_time_) ) { + FlushUnlocked(); +#ifdef OS_LINUX + if (FLAGS_drop_log_memory) { + if (file_length_ >= logging::kPageSize) { + // don't evict the most recent page + uint32 len = file_length_ & ~(logging::kPageSize - 1); + posix_fadvise(fileno(file_), 0, len, POSIX_FADV_DONTNEED); + } + } +#endif + } +} + +} // namespace + +// An arbitrary limit on the length of a single log message. This +// is so that streaming can be done more efficiently. +const size_t LogMessage::kMaxLogMessageLen = 30000; + +// Static log data space to avoid alloc failures in a LOG(FATAL) +// +// Since multiple threads may call LOG(FATAL), and we want to preserve +// the data from the first call, we allocate two sets of space. One +// for exclusive use by the first thread, and one for shared use by +// all other threads. +static Mutex fatal_msg_lock; +static CrashReason crash_reason; +static bool fatal_msg_exclusive = true; +static char fatal_msg_buf_exclusive[LogMessage::kMaxLogMessageLen+1]; +static char fatal_msg_buf_shared[LogMessage::kMaxLogMessageLen+1]; +static LogMessage::LogStream fatal_msg_stream_exclusive( + fatal_msg_buf_exclusive, LogMessage::kMaxLogMessageLen, 0); +static LogMessage::LogStream fatal_msg_stream_shared( + fatal_msg_buf_shared, LogMessage::kMaxLogMessageLen, 0); +LogMessage::LogMessageData LogMessage::fatal_msg_data_exclusive_; +LogMessage::LogMessageData LogMessage::fatal_msg_data_shared_; + +LogMessage::LogMessageData::~LogMessageData() { + delete[] buf_; + delete stream_alloc_; +} + +LogMessage::LogMessage(const char* file, int line, LogSeverity severity, + int ctr, void (LogMessage::*send_method)()) { + Init(file, line, severity, send_method); + data_->stream_->set_ctr(ctr); +} + +LogMessage::LogMessage(const char* file, int line, + const CheckOpString& result) { + Init(file, line, FATAL, &LogMessage::SendToLog); + stream() << "Check failed: " << (*result.str_) << " "; +} + +LogMessage::LogMessage(const char* file, int line) { + Init(file, line, INFO, &LogMessage::SendToLog); +} + +LogMessage::LogMessage(const char* file, int line, LogSeverity severity) { + Init(file, line, severity, &LogMessage::SendToLog); +} + +LogMessage::LogMessage(const char* file, int line, LogSeverity severity, + LogSink* sink, bool also_send_to_log) { + Init(file, line, severity, also_send_to_log ? &LogMessage::SendToSinkAndLog : + &LogMessage::SendToSink); + data_->sink_ = sink; // override Init()'s setting to NULL +} + +LogMessage::LogMessage(const char* file, int line, LogSeverity severity, + vector *outvec) { + Init(file, line, severity, &LogMessage::SaveOrSendToLog); + data_->outvec_ = outvec; // override Init()'s setting to NULL +} + +LogMessage::LogMessage(const char* file, int line, LogSeverity severity, + string *message) { + Init(file, line, severity, &LogMessage::WriteToStringAndLog); + data_->message_ = message; // override Init()'s setting to NULL +} + +void LogMessage::Init(const char* file, + int line, + LogSeverity severity, + void (LogMessage::*send_method)()) { + allocated_ = NULL; + if (severity != FATAL || !exit_on_dfatal) { + allocated_ = new LogMessageData(); + data_ = allocated_; + data_->buf_ = new char[kMaxLogMessageLen+1]; + data_->message_text_ = data_->buf_; + data_->stream_alloc_ = + new LogStream(data_->message_text_, kMaxLogMessageLen, 0); + data_->stream_ = data_->stream_alloc_; + data_->first_fatal_ = false; + } else { + MutexLock l(&fatal_msg_lock); + if (fatal_msg_exclusive) { + fatal_msg_exclusive = false; + data_ = &fatal_msg_data_exclusive_; + data_->message_text_ = fatal_msg_buf_exclusive; + data_->stream_ = &fatal_msg_stream_exclusive; + data_->first_fatal_ = true; + } else { + data_ = &fatal_msg_data_shared_; + data_->message_text_ = fatal_msg_buf_shared; + data_->stream_ = &fatal_msg_stream_shared; + data_->first_fatal_ = false; + } + data_->stream_alloc_ = NULL; + } + + stream().fill('0'); + data_->preserved_errno_ = errno; + data_->severity_ = severity; + data_->line_ = line; + data_->send_method_ = send_method; + data_->sink_ = NULL; + data_->outvec_ = NULL; + WallTime now = WallTime_Now(); + data_->timestamp_ = static_cast(now); + localtime_r(&data_->timestamp_, &data_->tm_time_); + int usecs = static_cast((now - data_->timestamp_) * 1000000); + RawLog__SetLastTime(data_->tm_time_, usecs); + + data_->num_chars_to_log_ = 0; + data_->num_chars_to_syslog_ = 0; + data_->basename_ = const_basename(file); + data_->fullname_ = file; + data_->has_been_flushed_ = false; + + // If specified, prepend a prefix to each line. For example: + // I1018 160715 f5d4fbb0 logging.cc:1153] + // (log level, GMT month, date, time, thread_id, file basename, line) + // We exclude the thread_id for the default thread. + if (FLAGS_log_prefix && (line != kNoLogPrefix)) { + stream() << LogSeverityNames[severity][0] + << setw(2) << 1+data_->tm_time_.tm_mon + << setw(2) << data_->tm_time_.tm_mday + << ' ' + << setw(2) << data_->tm_time_.tm_hour << ':' + << setw(2) << data_->tm_time_.tm_min << ':' + << setw(2) << data_->tm_time_.tm_sec << "." + << setw(6) << usecs + << ' ' + << setfill(' ') << setw(5) + << static_cast(GetTID()) << setfill('0') + << ' ' + << data_->basename_ << ':' << data_->line_ << "] "; + } + data_->num_prefix_chars_ = data_->stream_->pcount(); + + if (!FLAGS_log_backtrace_at.empty()) { + char fileline[128]; + snprintf(fileline, sizeof(fileline), "%s:%d", data_->basename_, line); +#ifdef HAVE_STACKTRACE + if (!strcmp(FLAGS_log_backtrace_at.c_str(), fileline)) { + string stacktrace; + DumpStackTraceToString(&stacktrace); + stream() << " (stacktrace:\n" << stacktrace << ") "; + } +#endif + } +} + +LogMessage::~LogMessage() { + Flush(); + delete allocated_; +} + +// Flush buffered message, called by the destructor, or any other function +// that needs to synchronize the log. +void LogMessage::Flush() { + if (data_->has_been_flushed_ || data_->severity_ < FLAGS_minloglevel) + return; + + data_->num_chars_to_log_ = data_->stream_->pcount(); + data_->num_chars_to_syslog_ = + data_->num_chars_to_log_ - data_->num_prefix_chars_; + + // Do we need to add a \n to the end of this message? + bool append_newline = + (data_->message_text_[data_->num_chars_to_log_-1] != '\n'); + char original_final_char = '\0'; + + // If we do need to add a \n, we'll do it by violating the memory of the + // ostrstream buffer. This is quick, and we'll make sure to undo our + // modification before anything else is done with the ostrstream. It + // would be preferable not to do things this way, but it seems to be + // the best way to deal with this. + if (append_newline) { + original_final_char = data_->message_text_[data_->num_chars_to_log_]; + data_->message_text_[data_->num_chars_to_log_++] = '\n'; + } + + // Prevent any subtle race conditions by wrapping a mutex lock around + // the actual logging action per se. + { + MutexLock l(&log_mutex); + (this->*(data_->send_method_))(); + ++num_messages_[static_cast(data_->severity_)]; + } + LogDestination::WaitForSinks(data_); + + if (append_newline) { + // Fix the ostrstream back how it was before we screwed with it. + // It's 99.44% certain that we don't need to worry about doing this. + data_->message_text_[data_->num_chars_to_log_-1] = original_final_char; + } + + // If errno was already set before we enter the logging call, we'll + // set it back to that value when we return from the logging call. + // It happens often that we log an error message after a syscall + // failure, which can potentially set the errno to some other + // values. We would like to preserve the original errno. + if (data_->preserved_errno_ != 0) { + errno = data_->preserved_errno_; + } + + // Note that this message is now safely logged. If we're asked to flush + // again, as a result of destruction, say, we'll do nothing on future calls. + data_->has_been_flushed_ = true; +} + +// Copy of first FATAL log message so that we can print it out again +// after all the stack traces. To preserve legacy behavior, we don't +// use fatal_msg_buf_exclusive. +static time_t fatal_time; +static char fatal_message[256]; + +void ReprintFatalMessage() { + if (fatal_message[0]) { + const int n = strlen(fatal_message); + if (!FLAGS_logtostderr) { + // Also write to stderr + WriteToStderr(fatal_message, n); + } + LogDestination::LogToAllLogfiles(ERROR, fatal_time, fatal_message, n); + } +} + +// L >= log_mutex (callers must hold the log_mutex). +void LogMessage::SendToLog() EXCLUSIVE_LOCKS_REQUIRED(log_mutex) { + static bool already_warned_before_initgoogle = false; + + log_mutex.AssertHeld(); + + RAW_DCHECK(data_->num_chars_to_log_ > 0 && + data_->message_text_[data_->num_chars_to_log_-1] == '\n', ""); + + // Messages of a given severity get logged to lower severity logs, too + + if (!already_warned_before_initgoogle && !IsGoogleLoggingInitialized()) { + const char w[] = "WARNING: Logging before InitGoogleLogging() is " + "written to STDERR\n"; + WriteToStderr(w, strlen(w)); + already_warned_before_initgoogle = true; + } + + // global flag: never log to file if set. Also -- don't log to a + // file if we haven't parsed the command line flags to get the + // program name. + if (FLAGS_logtostderr || !IsGoogleLoggingInitialized()) { + WriteToStderr(data_->message_text_, data_->num_chars_to_log_); + + // this could be protected by a flag if necessary. + LogDestination::LogToSinks(data_->severity_, + data_->fullname_, data_->basename_, + data_->line_, &data_->tm_time_, + data_->message_text_ + data_->num_prefix_chars_, + (data_->num_chars_to_log_ - + data_->num_prefix_chars_ - 1)); + } else { + + // log this message to all log files of severity <= severity_ + LogDestination::LogToAllLogfiles(data_->severity_, data_->timestamp_, + data_->message_text_, + data_->num_chars_to_log_); + + LogDestination::MaybeLogToStderr(data_->severity_, data_->message_text_, + data_->num_chars_to_log_); + LogDestination::MaybeLogToEmail(data_->severity_, data_->message_text_, + data_->num_chars_to_log_); + LogDestination::LogToSinks(data_->severity_, + data_->fullname_, data_->basename_, + data_->line_, &data_->tm_time_, + data_->message_text_ + data_->num_prefix_chars_, + (data_->num_chars_to_log_ + - data_->num_prefix_chars_ - 1)); + // NOTE: -1 removes trailing \n + } + + // If we log a FATAL message, flush all the log destinations, then toss + // a signal for others to catch. We leave the logs in a state that + // someone else can use them (as long as they flush afterwards) + if (data_->severity_ == FATAL && exit_on_dfatal) { + if (data_->first_fatal_) { + // Store crash information so that it is accessible from within signal + // handlers that may be invoked later. + RecordCrashReason(&crash_reason); + SetCrashReason(&crash_reason); + + // Store shortened fatal message for other logs and GWQ status + const int copy = min(data_->num_chars_to_log_, + sizeof(fatal_message)-1); + memcpy(fatal_message, data_->message_text_, copy); + fatal_message[copy] = '\0'; + fatal_time = data_->timestamp_; + } + + if (!FLAGS_logtostderr) { + for (int i = 0; i < NUM_SEVERITIES; ++i) { + if ( LogDestination::log_destinations_[i] ) + LogDestination::log_destinations_[i]->logger_->Write(true, 0, "", 0); + } + } + + // release the lock that our caller (directly or indirectly) + // LogMessage::~LogMessage() grabbed so that signal handlers + // can use the logging facility. Alternately, we could add + // an entire unsafe logging interface to bypass locking + // for signal handlers but this seems simpler. + log_mutex.Unlock(); + LogDestination::WaitForSinks(data_); + + const char* message = "*** Check failure stack trace: ***\n"; + if (write(STDERR_FILENO, message, strlen(message)) < 0) { + // Ignore errors. + } + Fail(); + } +} + +void LogMessage::RecordCrashReason( + glog_internal_namespace_::CrashReason* reason) { + reason->filename = fatal_msg_data_exclusive_.fullname_; + reason->line_number = fatal_msg_data_exclusive_.line_; + reason->message = fatal_msg_buf_exclusive + + fatal_msg_data_exclusive_.num_prefix_chars_; +#ifdef HAVE_STACKTRACE + // Retrieve the stack trace, omitting the logging frames that got us here. + reason->depth = GetStackTrace(reason->stack, ARRAYSIZE(reason->stack), 4); +#else + reason->depth = 0; +#endif +} + +static void logging_fail() { +#if defined(_DEBUG) && defined(_MSC_VER) + // When debugging on windows, avoid the obnoxious dialog and make + // it possible to continue past a LOG(FATAL) in the debugger + _asm int 3 +#else + abort(); +#endif +} + +#ifdef HAVE___ATTRIBUTE__ +GOOGLE_GLOG_DLL_DECL +void (*g_logging_fail_func)() __attribute__((noreturn)) = &logging_fail; +#else +GOOGLE_GLOG_DLL_DECL void (*g_logging_fail_func)() = &logging_fail; +#endif + +void InstallFailureFunction(void (*fail_func)()) { + g_logging_fail_func = fail_func; +} + +void LogMessage::Fail() { + g_logging_fail_func(); +} + +// L >= log_mutex (callers must hold the log_mutex). +void LogMessage::SendToSink() EXCLUSIVE_LOCKS_REQUIRED(log_mutex) { + if (data_->sink_ != NULL) { + RAW_DCHECK(data_->num_chars_to_log_ > 0 && + data_->message_text_[data_->num_chars_to_log_-1] == '\n', ""); + data_->sink_->send(data_->severity_, data_->fullname_, data_->basename_, + data_->line_, &data_->tm_time_, + data_->message_text_ + data_->num_prefix_chars_, + (data_->num_chars_to_log_ - + data_->num_prefix_chars_ - 1)); + } +} + +// L >= log_mutex (callers must hold the log_mutex). +void LogMessage::SendToSinkAndLog() EXCLUSIVE_LOCKS_REQUIRED(log_mutex) { + SendToSink(); + SendToLog(); +} + +// L >= log_mutex (callers must hold the log_mutex). +void LogMessage::SaveOrSendToLog() EXCLUSIVE_LOCKS_REQUIRED(log_mutex) { + if (data_->outvec_ != NULL) { + RAW_DCHECK(data_->num_chars_to_log_ > 0 && + data_->message_text_[data_->num_chars_to_log_-1] == '\n', ""); + // Omit prefix of message and trailing newline when recording in outvec_. + const char *start = data_->message_text_ + data_->num_prefix_chars_; + int len = data_->num_chars_to_log_ - data_->num_prefix_chars_ - 1; + data_->outvec_->push_back(string(start, len)); + } else { + SendToLog(); + } +} + +void LogMessage::WriteToStringAndLog() EXCLUSIVE_LOCKS_REQUIRED(log_mutex) { + if (data_->message_ != NULL) { + RAW_DCHECK(data_->num_chars_to_log_ > 0 && + data_->message_text_[data_->num_chars_to_log_-1] == '\n', ""); + // Omit prefix of message and trailing newline when writing to message_. + const char *start = data_->message_text_ + data_->num_prefix_chars_; + int len = data_->num_chars_to_log_ - data_->num_prefix_chars_ - 1; + data_->message_->assign(start, len); + } + SendToLog(); +} + +// L >= log_mutex (callers must hold the log_mutex). +void LogMessage::SendToSyslogAndLog() { +#ifdef HAVE_SYSLOG_H + // Before any calls to syslog(), make a single call to openlog() + static bool openlog_already_called = false; + if (!openlog_already_called) { + openlog(glog_internal_namespace_::ProgramInvocationShortName(), + LOG_CONS | LOG_NDELAY | LOG_PID, + LOG_USER); + openlog_already_called = true; + } + + // This array maps Google severity levels to syslog levels + const int SEVERITY_TO_LEVEL[] = { LOG_INFO, LOG_WARNING, LOG_ERR, LOG_EMERG }; + syslog(LOG_USER | SEVERITY_TO_LEVEL[static_cast(data_->severity_)], "%.*s", + int(data_->num_chars_to_syslog_), + data_->message_text_ + data_->num_prefix_chars_); + SendToLog(); +#else + LOG(ERROR) << "No syslog support: message=" << data_->message_text_; +#endif +} + +base::Logger* base::GetLogger(LogSeverity severity) { + MutexLock l(&log_mutex); + return LogDestination::log_destination(severity)->logger_; +} + +void base::SetLogger(LogSeverity severity, base::Logger* logger) { + MutexLock l(&log_mutex); + LogDestination::log_destination(severity)->logger_ = logger; +} + +// L < log_mutex. Acquires and releases mutex_. +int64 LogMessage::num_messages(int severity) { + MutexLock l(&log_mutex); + return num_messages_[severity]; +} + +// Output the COUNTER value. This is only valid if ostream is a +// LogStream. +ostream& operator<<(ostream &os, const PRIVATE_Counter&) { + LogMessage::LogStream *log = dynamic_cast(&os); + CHECK(log == log->self()); + os << log->ctr(); + return os; +} + +ErrnoLogMessage::ErrnoLogMessage(const char* file, int line, + LogSeverity severity, int ctr, + void (LogMessage::*send_method)()) + : LogMessage(file, line, severity, ctr, send_method) { +} + +ErrnoLogMessage::~ErrnoLogMessage() { + // Don't access errno directly because it may have been altered + // while streaming the message. + char buf[100]; + posix_strerror_r(preserved_errno(), buf, sizeof(buf)); + stream() << ": " << buf << " [" << preserved_errno() << "]"; +} + +void FlushLogFiles(LogSeverity min_severity) { + LogDestination::FlushLogFiles(min_severity); +} + +void FlushLogFilesUnsafe(LogSeverity min_severity) { + LogDestination::FlushLogFilesUnsafe(min_severity); +} + +void SetLogDestination(LogSeverity severity, const char* base_filename) { + LogDestination::SetLogDestination(severity, base_filename); +} + +void SetLogSymlink(LogSeverity severity, const char* symlink_basename) { + LogDestination::SetLogSymlink(severity, symlink_basename); +} + +LogSink::~LogSink() { +} + +void LogSink::WaitTillSent() { + // noop default +} + +string LogSink::ToString(LogSeverity severity, const char* file, int line, + const struct ::tm* tm_time, + const char* message, size_t message_len) { + ostringstream stream(string(message, message_len)); + stream.fill('0'); + + // FIXME(jrvb): Updating this to use the correct value for usecs + // requires changing the signature for both this method and + // LogSink::send(). This change needs to be done in a separate CL + // so subclasses of LogSink can be updated at the same time. + int usecs = 0; + + stream << LogSeverityNames[severity][0] + << setw(2) << 1+tm_time->tm_mon + << setw(2) << tm_time->tm_mday + << ' ' + << setw(2) << tm_time->tm_hour << ':' + << setw(2) << tm_time->tm_min << ':' + << setw(2) << tm_time->tm_sec << '.' + << setw(6) << usecs + << ' ' + << setfill(' ') << setw(5) << GetTID() << setfill('0') + << ' ' + << file << ':' << line << "] "; + + stream << string(message, message_len); + return stream.str(); +} + +void AddLogSink(LogSink *destination) { + LogDestination::AddLogSink(destination); +} + +void RemoveLogSink(LogSink *destination) { + LogDestination::RemoveLogSink(destination); +} + +void SetLogFilenameExtension(const char* ext) { + LogDestination::SetLogFilenameExtension(ext); +} + +void SetStderrLogging(LogSeverity min_severity) { + LogDestination::SetStderrLogging(min_severity); +} + +void SetEmailLogging(LogSeverity min_severity, const char* addresses) { + LogDestination::SetEmailLogging(min_severity, addresses); +} + +void LogToStderr() { + LogDestination::LogToStderr(); +} + +namespace base { +namespace internal { + +bool GetExitOnDFatal() { + MutexLock l(&log_mutex); + return exit_on_dfatal; +} + +// Determines whether we exit the program for a LOG(DFATAL) message in +// debug mode. It does this by skipping the call to Fail/FailQuietly. +// This is intended for testing only. +// +// This can have some effects on LOG(FATAL) as well. Failure messages +// are always allocated (rather than sharing a buffer), the crash +// reason is not recorded, the "gwq" status message is not updated, +// and the stack trace is not recorded. The LOG(FATAL) *will* still +// exit the program. Since this function is used only in testing, +// these differences are acceptable. +void SetExitOnDFatal(bool value) { + MutexLock l(&log_mutex); + exit_on_dfatal = value; +} + +} // namespace internal +} // namespace base + +// use_logging controls whether the logging functions LOG/VLOG are used +// to log errors. It should be set to false when the caller holds the +// log_mutex. +static bool SendEmailInternal(const char*dest, const char *subject, + const char*body, bool use_logging) { + if (dest && *dest) { + if ( use_logging ) { + VLOG(1) << "Trying to send TITLE:" << subject + << " BODY:" << body << " to " << dest; + } else { + fprintf(stderr, "Trying to send TITLE: %s BODY: %s to %s\n", + subject, body, dest); + } + + string cmd = + FLAGS_logmailer + " -s\"" + subject + "\" " + dest; + FILE* pipe = popen(cmd.c_str(), "w"); + if (pipe != NULL) { + // Add the body if we have one + if (body) + fwrite(body, sizeof(char), strlen(body), pipe); + bool ok = pclose(pipe) != -1; + if ( !ok ) { + if ( use_logging ) { + char buf[100]; + posix_strerror_r(errno, buf, sizeof(buf)); + LOG(ERROR) << "Problems sending mail to " << dest << ": " << buf; + } else { + char buf[100]; + posix_strerror_r(errno, buf, sizeof(buf)); + fprintf(stderr, "Problems sending mail to %s: %s\n", dest, buf); + } + } + return ok; + } else { + if ( use_logging ) { + LOG(ERROR) << "Unable to send mail to " << dest; + } else { + fprintf(stderr, "Unable to send mail to %s\n", dest); + } + } + } + return false; +} + +bool SendEmail(const char*dest, const char *subject, const char*body){ + return SendEmailInternal(dest, subject, body, true); +} + +static void GetTempDirectories(vector* list) { + list->clear(); +#ifdef OS_WINDOWS + // On windows we'll try to find a directory in this order: + // C:/Documents & Settings/whomever/TEMP (or whatever GetTempPath() is) + // C:/TMP/ + // C:/TEMP/ + // C:/WINDOWS/ or C:/WINNT/ + // . + char tmp[MAX_PATH]; + if (GetTempPathA(MAX_PATH, tmp)) + list->push_back(tmp); + list->push_back("C:\\tmp\\"); + list->push_back("C:\\temp\\"); +#else + // Directories, in order of preference. If we find a dir that + // exists, we stop adding other less-preferred dirs + const char * candidates[] = { + // Non-null only during unittest/regtest + getenv("TEST_TMPDIR"), + + // Explicitly-supplied temp dirs + getenv("TMPDIR"), getenv("TMP"), + + // If all else fails + "/tmp", + }; + + for (int i = 0; i < ARRAYSIZE(candidates); i++) { + const char *d = candidates[i]; + if (!d) continue; // Empty env var + + // Make sure we don't surprise anyone who's expecting a '/' + string dstr = d; + if (dstr[dstr.size() - 1] != '/') { + dstr += "/"; + } + list->push_back(dstr); + + struct stat statbuf; + if (!stat(d, &statbuf) && S_ISDIR(statbuf.st_mode)) { + // We found a dir that exists - we're done. + return; + } + } + +#endif +} + +static vector* logging_directories_list; + +const vector& GetLoggingDirectories() { + // Not strictly thread-safe but we're called early in InitGoogle(). + if (logging_directories_list == NULL) { + logging_directories_list = new vector; + + if ( !FLAGS_log_dir.empty() ) { + // A dir was specified, we should use it + logging_directories_list->push_back(FLAGS_log_dir.c_str()); + } else { + GetTempDirectories(logging_directories_list); +#ifdef OS_WINDOWS + char tmp[MAX_PATH]; + if (GetWindowsDirectoryA(tmp, MAX_PATH)) + logging_directories_list->push_back(tmp); + logging_directories_list->push_back(".\\"); +#else + logging_directories_list->push_back("./"); +#endif + } + } + return *logging_directories_list; +} + +void TestOnly_ClearLoggingDirectoriesList() { + fprintf(stderr, "TestOnly_ClearLoggingDirectoriesList should only be " + "called from test code.\n"); + delete logging_directories_list; + logging_directories_list = NULL; +} + +void GetExistingTempDirectories(vector* list) { + GetTempDirectories(list); + vector::iterator i_dir = list->begin(); + while( i_dir != list->end() ) { + // zero arg to access means test for existence; no constant + // defined on windows + if ( access(i_dir->c_str(), 0) ) { + i_dir = list->erase(i_dir); + } else { + ++i_dir; + } + } +} + +void TruncateLogFile(const char *path, int64 limit, int64 keep) { +#ifdef HAVE_UNISTD_H + struct stat statbuf; + const int kCopyBlockSize = 8 << 10; + char copybuf[kCopyBlockSize]; + int64 read_offset, write_offset; + // Don't follow symlinks unless they're our own fd symlinks in /proc + int flags = O_RDWR; + const char *procfd_prefix = "/proc/self/fd/"; + if (strncmp(procfd_prefix, path, strlen(procfd_prefix))) flags |= O_NOFOLLOW; + + int fd = open(path, flags); + if (fd == -1) { + if (errno == EFBIG) { + // The log file in question has got too big for us to open. The + // real fix for this would be to compile logging.cc (or probably + // all of base/...) with -D_FILE_OFFSET_BITS=64 but that's + // rather scary. + // Instead just truncate the file to something we can manage + if (truncate(path, 0) == -1) { + PLOG(ERROR) << "Unable to truncate " << path; + } else { + LOG(ERROR) << "Truncated " << path << " due to EFBIG error"; + } + } else { + PLOG(ERROR) << "Unable to open " << path; + } + return; + } + + if (fstat(fd, &statbuf) == -1) { + PLOG(ERROR) << "Unable to fstat()"; + goto out_close_fd; + } + + // See if the path refers to a regular file bigger than the + // specified limit + if (!S_ISREG(statbuf.st_mode)) goto out_close_fd; + if (statbuf.st_size <= limit) goto out_close_fd; + if (statbuf.st_size <= keep) goto out_close_fd; + + // This log file is too large - we need to truncate it + LOG(INFO) << "Truncating " << path << " to " << keep << " bytes"; + + // Copy the last "keep" bytes of the file to the beginning of the file + read_offset = statbuf.st_size - keep; + write_offset = 0; + int bytesin, bytesout; + while ((bytesin = pread(fd, copybuf, sizeof(copybuf), read_offset)) > 0) { + bytesout = pwrite(fd, copybuf, bytesin, write_offset); + if (bytesout == -1) { + PLOG(ERROR) << "Unable to write to " << path; + break; + } else if (bytesout != bytesin) { + LOG(ERROR) << "Expected to write " << bytesin << ", wrote " << bytesout; + } + read_offset += bytesin; + write_offset += bytesout; + } + if (bytesin == -1) PLOG(ERROR) << "Unable to read from " << path; + + // Truncate the remainder of the file. If someone else writes to the + // end of the file after our last read() above, we lose their latest + // data. Too bad ... + if (ftruncate(fd, write_offset) == -1) { + PLOG(ERROR) << "Unable to truncate " << path; + } + + out_close_fd: + close(fd); +#else + LOG(ERROR) << "No log truncation support."; +#endif +} + +void TruncateStdoutStderr() { +#ifdef HAVE_UNISTD_H + int64 limit = MaxLogSize() << 20; + int64 keep = 1 << 20; + TruncateLogFile("/proc/self/fd/1", limit, keep); + TruncateLogFile("/proc/self/fd/2", limit, keep); +#else + LOG(ERROR) << "No log truncation support."; +#endif +} + + +// Helper functions for string comparisons. +#define DEFINE_CHECK_STROP_IMPL(name, func, expected) \ + string* Check##func##expected##Impl(const char* s1, const char* s2, \ + const char* names) { \ + bool equal = s1 == s2 || (s1 && s2 && !func(s1, s2)); \ + if (equal == expected) return NULL; \ + else { \ + strstream ss; \ + if (!s1) s1 = ""; \ + if (!s2) s2 = ""; \ + ss << #name " failed: " << names << " (" << s1 << " vs. " << s2 << ")"; \ + return new string(ss.str(), ss.pcount()); \ + } \ + } +DEFINE_CHECK_STROP_IMPL(CHECK_STREQ, strcmp, true) +DEFINE_CHECK_STROP_IMPL(CHECK_STRNE, strcmp, false) +DEFINE_CHECK_STROP_IMPL(CHECK_STRCASEEQ, strcasecmp, true) +DEFINE_CHECK_STROP_IMPL(CHECK_STRCASENE, strcasecmp, false) +#undef DEFINE_CHECK_STROP_IMPL + +int posix_strerror_r(int err, char *buf, size_t len) { + // Sanity check input parameters + if (buf == NULL || len <= 0) { + errno = EINVAL; + return -1; + } + + // Reset buf and errno, and try calling whatever version of strerror_r() + // is implemented by glibc + buf[0] = '\000'; + int old_errno = errno; + errno = 0; + char *rc = reinterpret_cast(strerror_r(err, buf, len)); + + // Both versions set errno on failure + if (errno) { + // Should already be there, but better safe than sorry + buf[0] = '\000'; + return -1; + } + errno = old_errno; + + // POSIX is vague about whether the string will be terminated, although + // is indirectly implies that typically ERANGE will be returned, instead + // of truncating the string. This is different from the GNU implementation. + // We play it safe by always terminating the string explicitly. + buf[len-1] = '\000'; + + // If the function succeeded, we can use its exit code to determine the + // semantics implemented by glibc + if (!rc) { + return 0; + } else { + // GNU semantics detected + if (rc == buf) { + return 0; + } else { + buf[0] = '\000'; +#if defined(OS_MACOSX) || defined(OS_FREEBSD) || defined(OS_OPENBSD) + if (reinterpret_cast(rc) < sys_nerr) { + // This means an error on MacOSX or FreeBSD. + return -1; + } +#endif + strncat(buf, rc, len-1); + return 0; + } + } +} + +LogMessageFatal::LogMessageFatal(const char* file, int line) : + LogMessage(file, line, FATAL) {} + +LogMessageFatal::LogMessageFatal(const char* file, int line, + const CheckOpString& result) : + LogMessage(file, line, result) {} + +LogMessageFatal::~LogMessageFatal() { + Flush(); + LogMessage::Fail(); +} + +void InitGoogleLogging(const char* argv0) { + glog_internal_namespace_::InitGoogleLoggingUtilities(argv0); +} + +void ShutdownGoogleLogging() { + glog_internal_namespace_::ShutdownGoogleLoggingUtilities(); + LogDestination::DeleteLogDestinations(); + delete logging_directories_list; + logging_directories_list = NULL; +} + +_END_GOOGLE_NAMESPACE_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/logging_striplog_test.sh b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/logging_striplog_test.sh new file mode 100644 index 0000000000..b9033b2427 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/logging_striplog_test.sh @@ -0,0 +1,72 @@ +#! /bin/sh +# +# Copyright (c) 2007, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +# Author: Sergey Ioffe + +get_strings () { + if test -e ".libs/$1"; then + binary=".libs/$1" + elif test -e "$1.exe"; then + binary="$1.exe" + else + echo "We coundn't find $1 binary." + exit 1 + fi + + strings -n 10 $binary | sort | awk '/TESTMESSAGE/ {printf "%s ", $2}' +} + +# Die if "$1" != "$2", print $3 as death reason +check_eq () { + if [ "$1" != "$2" ]; then + echo "Check failed: '$1' == '$2' ${3:+ ($3)}" + exit 1 + fi +} + +die () { + echo $1 + exit 1 +} + +# Check that the string literals are appropriately stripped. This will +# not be the case in debug mode. + +check_eq "`get_strings logging_striptest0`" "COND ERROR FATAL INFO WARNING " +check_eq "`get_strings logging_striptest2`" "COND ERROR FATAL " +check_eq "`get_strings logging_striptest10`" "" + +# Check that LOG(FATAL) aborts even for large STRIP_LOG + +./logging_striptest2 2>/dev/null && die "Did not abort for STRIP_LOG=2" +./logging_striptest10 2>/dev/null && die "Did not abort for STRIP_LOG=10" + +echo "PASS" diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/logging_striptest10.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/logging_striptest10.cc new file mode 100644 index 0000000000..f6e1078f39 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/logging_striptest10.cc @@ -0,0 +1,35 @@ +// Copyright (c) 2007, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Author: Sergey Ioffe + +#define GOOGLE_STRIP_LOG 10 + +// Include the actual test. +#include "logging_striptest_main.cc" diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/logging_striptest2.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/logging_striptest2.cc new file mode 100644 index 0000000000..a64685c9e5 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/logging_striptest2.cc @@ -0,0 +1,35 @@ +// Copyright (c) 2007, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Author: Sergey Ioffe + +#define GOOGLE_STRIP_LOG 2 + +// Include the actual test. +#include "logging_striptest_main.cc" diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/logging_striptest_main.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/logging_striptest_main.cc new file mode 100644 index 0000000000..17a582f543 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/logging_striptest_main.cc @@ -0,0 +1,68 @@ +// Copyright (c) 2007, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Author: Sergey Ioffe + +// The common part of the striplog tests. + +#include +#include +#include +#include "glog/logging.h" +#include "base/commandlineflags.h" +#include "config.h" + +DECLARE_bool(logtostderr); + +using std::string; +using namespace GOOGLE_NAMESPACE; + +int CheckNoReturn(bool b) { + string s; + if (b) { + LOG(FATAL) << "Fatal"; + } else { + return 0; + } +} + +struct A { }; +std::ostream &operator<<(std::ostream &str, const A&) {return str;} + +int main(int argc, char* argv[]) { + FLAGS_logtostderr = true; + InitGoogleLogging(argv[0]); + LOG(INFO) << "TESTMESSAGE INFO"; + LOG(WARNING) << 2 << "something" << "TESTMESSAGE WARNING" + << 1 << 'c' << A() << std::endl; + LOG(ERROR) << "TESTMESSAGE ERROR"; + bool flag = true; + (flag ? LOG(INFO) : LOG(ERROR)) << "TESTMESSAGE COND"; + LOG(FATAL) << "TESTMESSAGE FATAL"; +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/logging_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/logging_unittest.cc new file mode 100644 index 0000000000..5fc34d4a4e --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/logging_unittest.cc @@ -0,0 +1,1210 @@ +// Copyright (c) 2002, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Author: Ray Sidney + +#include "config_for_unittests.h" +#include "utilities.h" + +#include +#ifdef HAVE_GLOB_H +# include +#endif +#include +#ifdef HAVE_UNISTD_H +# include +#endif + +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +#include "base/commandlineflags.h" +#include "glog/logging.h" +#include "glog/raw_logging.h" +#include "googletest.h" + +DECLARE_string(log_backtrace_at); // logging.cc + +#ifdef HAVE_LIB_GFLAGS +#include +#endif + +#ifdef HAVE_LIB_GMOCK +#include +#include "mock-log.h" +// Introduce several symbols from gmock. +using testing::_; +using testing::AnyNumber; +using testing::HasSubstr; +using testing::AllOf; +using testing::StrNe; +using testing::StrictMock; +using testing::InitGoogleMock; +using GOOGLE_NAMESPACE::glog_testing::ScopedMockLog; +#endif + +using namespace std; +using namespace GOOGLE_NAMESPACE; + +// Some non-advertised functions that we want to test or use. +_START_GOOGLE_NAMESPACE_ +namespace base { +namespace internal { +bool GetExitOnDFatal(); +void SetExitOnDFatal(bool value); +} // namespace internal +} // namespace base +_END_GOOGLE_NAMESPACE_ + +static void TestLogging(bool check_counts); +static void TestRawLogging(); +static void LogWithLevels(int v, int severity, bool err, bool alsoerr); +static void TestLoggingLevels(); +static void TestLogString(); +static void TestLogSink(); +static void TestLogToString(); +static void TestLogSinkWaitTillSent(); +static void TestCHECK(); +static void TestDCHECK(); +static void TestSTREQ(); +static void TestBasename(); +static void TestSymlink(); +static void TestExtension(); +static void TestWrapper(); +static void TestErrno(); +static void TestTruncate(); + +static int x = -1; +static void BM_Check1(int n) { + while (n-- > 0) { + CHECK_GE(n, x); + CHECK_GE(n, x); + CHECK_GE(n, x); + CHECK_GE(n, x); + CHECK_GE(n, x); + CHECK_GE(n, x); + CHECK_GE(n, x); + CHECK_GE(n, x); + } +} +BENCHMARK(BM_Check1); + +static void CheckFailure(int a, int b, const char* file, int line, const char* msg); +static void BM_Check3(int n) { + while (n-- > 0) { + if (n < x) CheckFailure(n, x, __FILE__, __LINE__, "n < x"); + if (n < x) CheckFailure(n, x, __FILE__, __LINE__, "n < x"); + if (n < x) CheckFailure(n, x, __FILE__, __LINE__, "n < x"); + if (n < x) CheckFailure(n, x, __FILE__, __LINE__, "n < x"); + if (n < x) CheckFailure(n, x, __FILE__, __LINE__, "n < x"); + if (n < x) CheckFailure(n, x, __FILE__, __LINE__, "n < x"); + if (n < x) CheckFailure(n, x, __FILE__, __LINE__, "n < x"); + if (n < x) CheckFailure(n, x, __FILE__, __LINE__, "n < x"); + } +} +BENCHMARK(BM_Check3); + +static void BM_Check2(int n) { + if (n == 17) { + x = 5; + } + while (n-- > 0) { + CHECK(n >= x); + CHECK(n >= x); + CHECK(n >= x); + CHECK(n >= x); + CHECK(n >= x); + CHECK(n >= x); + CHECK(n >= x); + CHECK(n >= x); + } +} +BENCHMARK(BM_Check2); + +static void CheckFailure(int a, int b, const char* file, int line, const char* msg) { +} + +static void BM_logspeed(int n) { + while (n-- > 0) { + LOG(INFO) << "test message"; + } +} +BENCHMARK(BM_logspeed); + +static void BM_vlog(int n) { + while (n-- > 0) { + VLOG(1) << "test message"; + } +} +BENCHMARK(BM_vlog); + +int main(int argc, char **argv) { +#ifdef HAVE_LIB_GFLAGS + ParseCommandLineFlags(&argc, &argv, true); +#endif + + // Test some basics before InitGoogleLogging: + CaptureTestStderr(); + LogWithLevels(FLAGS_v, FLAGS_stderrthreshold, + FLAGS_logtostderr, FLAGS_alsologtostderr); + LogWithLevels(0, 0, 0, 0); // simulate "before global c-tors" + const string early_stderr = GetCapturedTestStderr(); + + InitGoogleLogging(argv[0]); + + RunSpecifiedBenchmarks(); + + FLAGS_logtostderr = true; + + InitGoogleTest(&argc, argv); +#ifdef HAVE_LIB_GMOCK + InitGoogleMock(&argc, argv); +#endif + + // so that death tests run before we use threads + CHECK_EQ(RUN_ALL_TESTS(), 0); + + CaptureTestStderr(); + + // re-emit early_stderr + LogMessage("dummy", LogMessage::kNoLogPrefix, INFO).stream() << early_stderr; + + TestLogging(true); + TestRawLogging(); + TestLoggingLevels(); + TestLogString(); + TestLogSink(); + TestLogToString(); + TestLogSinkWaitTillSent(); + TestCHECK(); + TestDCHECK(); + TestSTREQ(); + + // TODO: The golden test portion of this test is very flakey. + EXPECT_TRUE( + MungeAndDiffTestStderr(FLAGS_test_srcdir + "/src/logging_unittest.err")); + + FLAGS_logtostderr = false; + + TestBasename(); + TestSymlink(); + TestExtension(); + TestWrapper(); + TestErrno(); + TestTruncate(); + + ShutdownGoogleLogging(); + + fprintf(stdout, "PASS\n"); + return 0; +} + +void TestLogging(bool check_counts) { + int64 base_num_infos = LogMessage::num_messages(INFO); + int64 base_num_warning = LogMessage::num_messages(WARNING); + int64 base_num_errors = LogMessage::num_messages(ERROR); + + LOG(INFO) << string("foo ") << "bar " << 10 << ' ' << 3.4; + for ( int i = 0; i < 10; ++i ) { + int old_errno = errno; + errno = i; + PLOG_EVERY_N(ERROR, 2) << "Plog every 2, iteration " << COUNTER; + errno = old_errno; + + LOG_EVERY_N(ERROR, 3) << "Log every 3, iteration " << COUNTER << endl; + LOG_EVERY_N(ERROR, 4) << "Log every 4, iteration " << COUNTER << endl; + + LOG_IF_EVERY_N(WARNING, true, 5) << "Log if every 5, iteration " << COUNTER; + LOG_IF_EVERY_N(WARNING, false, 3) + << "Log if every 3, iteration " << COUNTER; + LOG_IF_EVERY_N(INFO, true, 1) << "Log if every 1, iteration " << COUNTER; + LOG_IF_EVERY_N(ERROR, (i < 3), 2) + << "Log if less than 3 every 2, iteration " << COUNTER; + } + LOG_IF(WARNING, true) << "log_if this"; + LOG_IF(WARNING, false) << "don't log_if this"; + + char s[] = "array"; + LOG(INFO) << s; + const char const_s[] = "const array"; + LOG(INFO) << const_s; + int j = 1000; + LOG(ERROR) << string("foo") << ' '<< j << ' ' << setw(10) << j << " " + << setw(1) << hex << j; + + LogMessage("foo", LogMessage::kNoLogPrefix, INFO).stream() << "no prefix"; + + if (check_counts) { + CHECK_EQ(base_num_infos + 14, LogMessage::num_messages(INFO)); + CHECK_EQ(base_num_warning + 3, LogMessage::num_messages(WARNING)); + CHECK_EQ(base_num_errors + 15, LogMessage::num_messages(ERROR)); + } +} + +static void NoAllocNewHook() { + CHECK(false) << "unexpected new"; +} + +struct NewHook { + NewHook() { + g_new_hook = &NoAllocNewHook; + } + ~NewHook() { + g_new_hook = NULL; + } +}; + +TEST(DeathNoAllocNewHook, logging) { + // tests that NewHook used below works + NewHook new_hook; + ASSERT_DEATH({ + new int; + }, "unexpected new"); +} + +void TestRawLogging() { + string* foo = new string("foo "); + string huge_str(50000, 'a'); + + FlagSaver saver; + + // Check that RAW loggging does not use mallocs. + NewHook new_hook; + + RAW_LOG(INFO, "%s%s%d%c%f", foo->c_str(), "bar ", 10, ' ', 3.4); + char s[] = "array"; + RAW_LOG(WARNING, "%s", s); + const char const_s[] = "const array"; + RAW_LOG(INFO, "%s", const_s); + void* p = reinterpret_cast(0x12345678); + RAW_LOG(INFO, "ptr %p", p); + p = NULL; + RAW_LOG(INFO, "ptr %p", p); + int j = 1000; + RAW_LOG(ERROR, "%s%d%c%010d%s%1x", foo->c_str(), j, ' ', j, " ", j); + RAW_VLOG(0, "foo %d", j); + +#ifdef NDEBUG + RAW_LOG(INFO, "foo %d", j); // so that have same stderr to compare +#else + RAW_DLOG(INFO, "foo %d", j); // test RAW_DLOG in debug mode +#endif + + // test how long messages are chopped: + RAW_LOG(WARNING, "Huge string: %s", huge_str.c_str()); + RAW_VLOG(0, "Huge string: %s", huge_str.c_str()); + + FLAGS_v = 0; + RAW_LOG(INFO, "log"); + RAW_VLOG(0, "vlog 0 on"); + RAW_VLOG(1, "vlog 1 off"); + RAW_VLOG(2, "vlog 2 off"); + RAW_VLOG(3, "vlog 3 off"); + FLAGS_v = 2; + RAW_LOG(INFO, "log"); + RAW_VLOG(1, "vlog 1 on"); + RAW_VLOG(2, "vlog 2 on"); + RAW_VLOG(3, "vlog 3 off"); + +#ifdef NDEBUG + RAW_DCHECK(1 == 2, " RAW_DCHECK's shouldn't be compiled in normal mode"); +#endif + + RAW_CHECK(1 == 1, "should be ok"); + RAW_DCHECK(true, "should be ok"); + + delete foo; +} + +void LogWithLevels(int v, int severity, bool err, bool alsoerr) { + RAW_LOG(INFO, + "Test: v=%d stderrthreshold=%d logtostderr=%d alsologtostderr=%d", + v, severity, err, alsoerr); + + FlagSaver saver; + + FLAGS_v = v; + FLAGS_stderrthreshold = severity; + FLAGS_logtostderr = err; + FLAGS_alsologtostderr = alsoerr; + + RAW_VLOG(-1, "vlog -1"); + RAW_VLOG(0, "vlog 0"); + RAW_VLOG(1, "vlog 1"); + RAW_LOG(INFO, "log info"); + RAW_LOG(WARNING, "log warning"); + RAW_LOG(ERROR, "log error"); + + VLOG(-1) << "vlog -1"; + VLOG(0) << "vlog 0"; + VLOG(1) << "vlog 1"; + LOG(INFO) << "log info"; + LOG(WARNING) << "log warning"; + LOG(ERROR) << "log error"; + + VLOG_IF(-1, true) << "vlog_if -1"; + VLOG_IF(-1, false) << "don't vlog_if -1"; + VLOG_IF(0, true) << "vlog_if 0"; + VLOG_IF(0, false) << "don't vlog_if 0"; + VLOG_IF(1, true) << "vlog_if 1"; + VLOG_IF(1, false) << "don't vlog_if 1"; + LOG_IF(INFO, true) << "log_if info"; + LOG_IF(INFO, false) << "don't log_if info"; + LOG_IF(WARNING, true) << "log_if warning"; + LOG_IF(WARNING, false) << "don't log_if warning"; + LOG_IF(ERROR, true) << "log_if error"; + LOG_IF(ERROR, false) << "don't log_if error"; + + int c; + c = 1; VLOG_IF(100, c -= 2) << "vlog_if 100 expr"; EXPECT_EQ(c, -1); + c = 1; VLOG_IF(0, c -= 2) << "vlog_if 0 expr"; EXPECT_EQ(c, -1); + c = 1; LOG_IF(INFO, c -= 2) << "log_if info expr"; EXPECT_EQ(c, -1); + c = 1; LOG_IF(ERROR, c -= 2) << "log_if error expr"; EXPECT_EQ(c, -1); + c = 2; VLOG_IF(0, c -= 2) << "don't vlog_if 0 expr"; EXPECT_EQ(c, 0); + c = 2; LOG_IF(ERROR, c -= 2) << "don't log_if error expr"; EXPECT_EQ(c, 0); + + c = 3; LOG_IF_EVERY_N(INFO, c -= 4, 1) << "log_if info every 1 expr"; + EXPECT_EQ(c, -1); + c = 3; LOG_IF_EVERY_N(ERROR, c -= 4, 1) << "log_if error every 1 expr"; + EXPECT_EQ(c, -1); + c = 4; LOG_IF_EVERY_N(ERROR, c -= 4, 3) << "don't log_if info every 3 expr"; + EXPECT_EQ(c, 0); + c = 4; LOG_IF_EVERY_N(ERROR, c -= 4, 3) << "don't log_if error every 3 expr"; + EXPECT_EQ(c, 0); + c = 5; VLOG_IF_EVERY_N(0, c -= 4, 1) << "vlog_if 0 every 1 expr"; + EXPECT_EQ(c, 1); + c = 5; VLOG_IF_EVERY_N(100, c -= 4, 3) << "vlog_if 100 every 3 expr"; + EXPECT_EQ(c, 1); + c = 6; VLOG_IF_EVERY_N(0, c -= 6, 1) << "don't vlog_if 0 every 1 expr"; + EXPECT_EQ(c, 0); + c = 6; VLOG_IF_EVERY_N(100, c -= 6, 3) << "don't vlog_if 100 every 1 expr"; + EXPECT_EQ(c, 0); +} + +void TestLoggingLevels() { + LogWithLevels(0, INFO, false, false); + LogWithLevels(1, INFO, false, false); + LogWithLevels(-1, INFO, false, false); + LogWithLevels(0, WARNING, false, false); + LogWithLevels(0, ERROR, false, false); + LogWithLevels(0, FATAL, false, false); + LogWithLevels(0, FATAL, true, false); + LogWithLevels(0, FATAL, false, true); + LogWithLevels(1, WARNING, false, false); + LogWithLevels(1, FATAL, false, true); +} + +TEST(DeathRawCHECK, logging) { + ASSERT_DEATH(RAW_CHECK(false, "failure 1"), + "RAW: Check false failed: failure 1"); + ASSERT_DEBUG_DEATH(RAW_DCHECK(1 == 2, "failure 2"), + "RAW: Check 1 == 2 failed: failure 2"); +} + +void TestLogString() { + vector errors; + vector *no_errors = NULL; + + LOG_STRING(INFO, &errors) << "LOG_STRING: " << "collected info"; + LOG_STRING(WARNING, &errors) << "LOG_STRING: " << "collected warning"; + LOG_STRING(ERROR, &errors) << "LOG_STRING: " << "collected error"; + + LOG_STRING(INFO, no_errors) << "LOG_STRING: " << "reported info"; + LOG_STRING(WARNING, no_errors) << "LOG_STRING: " << "reported warning"; + LOG_STRING(ERROR, NULL) << "LOG_STRING: " << "reported error"; + + for (size_t i = 0; i < errors.size(); ++i) { + LOG(INFO) << "Captured by LOG_STRING: " << errors[i]; + } +} + +void TestLogToString() { + string error; + string* no_error = NULL; + + LOG_TO_STRING(INFO, &error) << "LOG_TO_STRING: " << "collected info"; + LOG(INFO) << "Captured by LOG_TO_STRING: " << error; + LOG_TO_STRING(WARNING, &error) << "LOG_TO_STRING: " << "collected warning"; + LOG(INFO) << "Captured by LOG_TO_STRING: " << error; + LOG_TO_STRING(ERROR, &error) << "LOG_TO_STRING: " << "collected error"; + LOG(INFO) << "Captured by LOG_TO_STRING: " << error; + + LOG_TO_STRING(INFO, no_error) << "LOG_TO_STRING: " << "reported info"; + LOG_TO_STRING(WARNING, no_error) << "LOG_TO_STRING: " << "reported warning"; + LOG_TO_STRING(ERROR, NULL) << "LOG_TO_STRING: " << "reported error"; +} + +class TestLogSinkImpl : public LogSink { + public: + vector errors; + virtual void send(LogSeverity severity, const char* full_filename, + const char* base_filename, int line, + const struct tm* tm_time, + const char* message, size_t message_len) { + errors.push_back( + ToString(severity, base_filename, line, tm_time, message, message_len)); + } +}; + +void TestLogSink() { + TestLogSinkImpl sink; + LogSink *no_sink = NULL; + + LOG_TO_SINK(&sink, INFO) << "LOG_TO_SINK: " << "collected info"; + LOG_TO_SINK(&sink, WARNING) << "LOG_TO_SINK: " << "collected warning"; + LOG_TO_SINK(&sink, ERROR) << "LOG_TO_SINK: " << "collected error"; + + LOG_TO_SINK(no_sink, INFO) << "LOG_TO_SINK: " << "reported info"; + LOG_TO_SINK(no_sink, WARNING) << "LOG_TO_SINK: " << "reported warning"; + LOG_TO_SINK(NULL, ERROR) << "LOG_TO_SINK: " << "reported error"; + + LOG_TO_SINK_BUT_NOT_TO_LOGFILE(&sink, INFO) + << "LOG_TO_SINK_BUT_NOT_TO_LOGFILE: " << "collected info"; + LOG_TO_SINK_BUT_NOT_TO_LOGFILE(&sink, WARNING) + << "LOG_TO_SINK_BUT_NOT_TO_LOGFILE: " << "collected warning"; + LOG_TO_SINK_BUT_NOT_TO_LOGFILE(&sink, ERROR) + << "LOG_TO_SINK_BUT_NOT_TO_LOGFILE: " << "collected error"; + + LOG_TO_SINK_BUT_NOT_TO_LOGFILE(no_sink, INFO) + << "LOG_TO_SINK_BUT_NOT_TO_LOGFILE: " << "thrashed info"; + LOG_TO_SINK_BUT_NOT_TO_LOGFILE(no_sink, WARNING) + << "LOG_TO_SINK_BUT_NOT_TO_LOGFILE: " << "thrashed warning"; + LOG_TO_SINK_BUT_NOT_TO_LOGFILE(NULL, ERROR) + << "LOG_TO_SINK_BUT_NOT_TO_LOGFILE: " << "thrashed error"; + + LOG(INFO) << "Captured by LOG_TO_SINK:"; + for (size_t i = 0; i < sink.errors.size(); ++i) { + LogMessage("foo", LogMessage::kNoLogPrefix, INFO).stream() + << sink.errors[i]; + } +} + +// For testing using CHECK*() on anonymous enums. +enum { + CASE_A, + CASE_B +}; + +void TestCHECK() { + // Tests using CHECK*() on int values. + CHECK(1 == 1); + CHECK_EQ(1, 1); + CHECK_NE(1, 2); + CHECK_GE(1, 1); + CHECK_GE(2, 1); + CHECK_LE(1, 1); + CHECK_LE(1, 2); + CHECK_GT(2, 1); + CHECK_LT(1, 2); + + // Tests using CHECK*() on anonymous enums. + // Apple's GCC doesn't like this. +#if !defined(OS_MACOSX) + CHECK_EQ(CASE_A, CASE_A); + CHECK_NE(CASE_A, CASE_B); + CHECK_GE(CASE_A, CASE_A); + CHECK_GE(CASE_B, CASE_A); + CHECK_LE(CASE_A, CASE_A); + CHECK_LE(CASE_A, CASE_B); + CHECK_GT(CASE_B, CASE_A); + CHECK_LT(CASE_A, CASE_B); +#endif +} + +void TestDCHECK() { +#ifdef NDEBUG + DCHECK( 1 == 2 ) << " DCHECK's shouldn't be compiled in normal mode"; +#endif + DCHECK( 1 == 1 ); + DCHECK_EQ(1, 1); + DCHECK_NE(1, 2); + DCHECK_GE(1, 1); + DCHECK_GE(2, 1); + DCHECK_LE(1, 1); + DCHECK_LE(1, 2); + DCHECK_GT(2, 1); + DCHECK_LT(1, 2); + + auto_ptr sptr(new int64); + int64* ptr = DCHECK_NOTNULL(sptr.get()); + CHECK_EQ(ptr, sptr.get()); +} + +void TestSTREQ() { + CHECK_STREQ("this", "this"); + CHECK_STREQ(NULL, NULL); + CHECK_STRCASEEQ("this", "tHiS"); + CHECK_STRCASEEQ(NULL, NULL); + CHECK_STRNE("this", "tHiS"); + CHECK_STRNE("this", NULL); + CHECK_STRCASENE("this", "that"); + CHECK_STRCASENE(NULL, "that"); + CHECK_STREQ((string("a")+"b").c_str(), "ab"); + CHECK_STREQ(string("test").c_str(), + (string("te") + string("st")).c_str()); +} + +TEST(DeathSTREQ, logging) { + ASSERT_DEATH(CHECK_STREQ(NULL, "this"), ""); + ASSERT_DEATH(CHECK_STREQ("this", "siht"), ""); + ASSERT_DEATH(CHECK_STRCASEEQ(NULL, "siht"), ""); + ASSERT_DEATH(CHECK_STRCASEEQ("this", "siht"), ""); + ASSERT_DEATH(CHECK_STRNE(NULL, NULL), ""); + ASSERT_DEATH(CHECK_STRNE("this", "this"), ""); + ASSERT_DEATH(CHECK_STREQ((string("a")+"b").c_str(), "abc"), ""); +} + +TEST(CheckNOTNULL, Simple) { + int64 t; + void *ptr = static_cast(&t); + void *ref = CHECK_NOTNULL(ptr); + EXPECT_EQ(ptr, ref); + CHECK_NOTNULL(reinterpret_cast(ptr)); + CHECK_NOTNULL(reinterpret_cast(ptr)); + CHECK_NOTNULL(reinterpret_cast(ptr)); + CHECK_NOTNULL(reinterpret_cast(ptr)); +} + +TEST(DeathCheckNN, Simple) { + ASSERT_DEATH(CHECK_NOTNULL(static_cast(NULL)), ""); +} + +// Get list of file names that match pattern +static void GetFiles(const string& pattern, vector* files) { + files->clear(); +#if defined(HAVE_GLOB_H) + glob_t g; + const int r = glob(pattern.c_str(), 0, NULL, &g); + CHECK((r == 0) || (r == GLOB_NOMATCH)) << ": error matching " << pattern; + for (int i = 0; i < g.gl_pathc; i++) { + files->push_back(string(g.gl_pathv[i])); + } + globfree(&g); +#elif defined(OS_WINDOWS) + WIN32_FIND_DATAA data; + HANDLE handle = FindFirstFileA(pattern.c_str(), &data); + size_t index = pattern.rfind('\\'); + if (index == string::npos) { + LOG(FATAL) << "No directory separator."; + } + const string dirname = pattern.substr(0, index + 1); + if (FAILED(handle)) { + // Finding no files is OK. + return; + } + do { + files->push_back(dirname + data.cFileName); + } while (FindNextFileA(handle, &data)); + LOG_SYSRESULT(FindClose(handle)); +#else +# error There is no way to do glob. +#endif +} + +// Delete files patching pattern +static void DeleteFiles(const string& pattern) { + vector files; + GetFiles(pattern, &files); + for (size_t i = 0; i < files.size(); i++) { + CHECK(unlink(files[i].c_str()) == 0) << ": " << strerror(errno); + } +} + +static void CheckFile(const string& name, const string& expected_string) { + vector files; + GetFiles(name + "*", &files); + CHECK_EQ(files.size(), 1); + + FILE* file = fopen(files[0].c_str(), "r"); + CHECK(file != NULL) << ": could not open " << files[0]; + char buf[1000]; + while (fgets(buf, sizeof(buf), file) != NULL) { + if (strstr(buf, expected_string.c_str()) != NULL) { + fclose(file); + return; + } + } + fclose(file); + LOG(FATAL) << "Did not find " << expected_string << " in " << files[0]; +} + +static void TestBasename() { + fprintf(stderr, "==== Test setting log file basename\n"); + const string dest = FLAGS_test_tmpdir + "/logging_test_basename"; + DeleteFiles(dest + "*"); + + SetLogDestination(INFO, dest.c_str()); + LOG(INFO) << "message to new base"; + FlushLogFiles(INFO); + + CheckFile(dest, "message to new base"); + + // Release file handle for the destination file to unlock the file in Windows. + LogToStderr(); + DeleteFiles(dest + "*"); +} + +static void TestSymlink() { +#ifndef OS_WINDOWS + fprintf(stderr, "==== Test setting log file symlink\n"); + string dest = FLAGS_test_tmpdir + "/logging_test_symlink"; + string sym = FLAGS_test_tmpdir + "/symlinkbase"; + DeleteFiles(dest + "*"); + DeleteFiles(sym + "*"); + + SetLogSymlink(INFO, "symlinkbase"); + SetLogDestination(INFO, dest.c_str()); + LOG(INFO) << "message to new symlink"; + FlushLogFiles(INFO); + CheckFile(sym, "message to new symlink"); + + DeleteFiles(dest + "*"); + DeleteFiles(sym + "*"); +#endif +} + +static void TestExtension() { + fprintf(stderr, "==== Test setting log file extension\n"); + string dest = FLAGS_test_tmpdir + "/logging_test_extension"; + DeleteFiles(dest + "*"); + + SetLogDestination(INFO, dest.c_str()); + SetLogFilenameExtension("specialextension"); + LOG(INFO) << "message to new extension"; + FlushLogFiles(INFO); + CheckFile(dest, "message to new extension"); + + // Check that file name ends with extension + vector filenames; + GetFiles(dest + "*", &filenames); + CHECK_EQ(filenames.size(), 1); + CHECK(strstr(filenames[0].c_str(), "specialextension") != NULL); + + // Release file handle for the destination file to unlock the file in Windows. + LogToStderr(); + DeleteFiles(dest + "*"); +} + +struct MyLogger : public base::Logger { + string data; + + virtual void Write(bool should_flush, + time_t timestamp, + const char* message, + int length) { + data.append(message, length); + } + + virtual void Flush() { } + + virtual uint32 LogSize() { return data.length(); } +}; + +static void TestWrapper() { + fprintf(stderr, "==== Test log wrapper\n"); + + MyLogger my_logger; + base::Logger* old_logger = base::GetLogger(INFO); + base::SetLogger(INFO, &my_logger); + LOG(INFO) << "Send to wrapped logger"; + FlushLogFiles(INFO); + base::SetLogger(INFO, old_logger); + + CHECK(strstr(my_logger.data.c_str(), "Send to wrapped logger") != NULL); +} + +static void TestErrno() { + fprintf(stderr, "==== Test errno preservation\n"); + + errno = ENOENT; + TestLogging(false); + CHECK_EQ(errno, ENOENT); +} + +static void TestOneTruncate(const char *path, int64 limit, int64 keep, + int64 dsize, int64 ksize, int64 expect) { + int fd; + CHECK_ERR(fd = open(path, O_RDWR | O_CREAT | O_TRUNC, 0600)); + + const char *discardstr = "DISCARDME!", *keepstr = "KEEPME!"; + + // Fill the file with the requested data; first discard data, then kept data + int64 written = 0; + while (written < dsize) { + int bytes = min(dsize - written, strlen(discardstr)); + CHECK_ERR(write(fd, discardstr, bytes)); + written += bytes; + } + written = 0; + while (written < ksize) { + int bytes = min(ksize - written, strlen(keepstr)); + CHECK_ERR(write(fd, keepstr, bytes)); + written += bytes; + } + + TruncateLogFile(path, limit, keep); + + // File should now be shorter + struct stat statbuf; + CHECK_ERR(fstat(fd, &statbuf)); + CHECK_EQ(statbuf.st_size, expect); + CHECK_ERR(lseek(fd, 0, SEEK_SET)); + + // File should contain the suffix of the original file + int buf_size = statbuf.st_size + 1; + char* buf = new char[buf_size]; + memset(buf, 0, sizeof(buf)); + CHECK_ERR(read(fd, buf, buf_size)); + + const char *p = buf; + int64 checked = 0; + while (checked < expect) { + int bytes = min(expect - checked, strlen(keepstr)); + CHECK(!memcmp(p, keepstr, bytes)); + checked += bytes; + } + close(fd); + delete[] buf; +} + +static void TestTruncate() { +#ifdef HAVE_UNISTD_H + fprintf(stderr, "==== Test log truncation\n"); + string path = FLAGS_test_tmpdir + "/truncatefile"; + + // Test on a small file + TestOneTruncate(path.c_str(), 10, 10, 10, 10, 10); + + // And a big file (multiple blocks to copy) + TestOneTruncate(path.c_str(), 2<<20, 4<<10, 3<<20, 4<<10, 4<<10); + + // Check edge-case limits + TestOneTruncate(path.c_str(), 10, 20, 0, 20, 20); + TestOneTruncate(path.c_str(), 10, 0, 0, 0, 0); + TestOneTruncate(path.c_str(), 10, 50, 0, 10, 10); + TestOneTruncate(path.c_str(), 50, 100, 0, 30, 30); + + // MacOSX 10.4 doesn't fail in this case. + // Windows doesn't have symlink. + // Let's just ignore this test for these cases. +#if !defined(OS_MACOSX) && !defined(OS_WINDOWS) + // Through a symlink should fail to truncate + string linkname = path + ".link"; + unlink(linkname.c_str()); + CHECK_ERR(symlink(path.c_str(), linkname.c_str())); + TestOneTruncate(linkname.c_str(), 10, 10, 0, 30, 30); +#endif + + // The /proc/self path makes sense only for linux. +#if defined(OS_LINUX) + // Through an open fd symlink should work + int fd; + CHECK_ERR(fd = open(path.c_str(), O_APPEND | O_WRONLY)); + char fdpath[64]; + snprintf(fdpath, sizeof(fdpath), "/proc/self/fd/%d", fd); + TestOneTruncate(fdpath, 10, 10, 10, 10, 10); +#endif + +#endif +} + +_START_GOOGLE_NAMESPACE_ +namespace glog_internal_namespace_ { +extern // in logging.cc +bool SafeFNMatch_(const char* pattern, size_t patt_len, + const char* str, size_t str_len); +} // namespace glog_internal_namespace_ +using glog_internal_namespace_::SafeFNMatch_; +_END_GOOGLE_NAMESPACE_ + +static bool WrapSafeFNMatch(string pattern, string str) { + pattern += "abc"; + str += "defgh"; + return SafeFNMatch_(pattern.data(), pattern.size() - 3, + str.data(), str.size() - 5); +} + +TEST(SafeFNMatch, logging) { + CHECK(WrapSafeFNMatch("foo", "foo")); + CHECK(!WrapSafeFNMatch("foo", "bar")); + CHECK(!WrapSafeFNMatch("foo", "fo")); + CHECK(!WrapSafeFNMatch("foo", "foo2")); + CHECK(WrapSafeFNMatch("bar/foo.ext", "bar/foo.ext")); + CHECK(WrapSafeFNMatch("*ba*r/fo*o.ext*", "bar/foo.ext")); + CHECK(!WrapSafeFNMatch("bar/foo.ext", "bar/baz.ext")); + CHECK(!WrapSafeFNMatch("bar/foo.ext", "bar/foo")); + CHECK(!WrapSafeFNMatch("bar/foo.ext", "bar/foo.ext.zip")); + CHECK(WrapSafeFNMatch("ba?/*.ext", "bar/foo.ext")); + CHECK(WrapSafeFNMatch("ba?/*.ext", "baZ/FOO.ext")); + CHECK(!WrapSafeFNMatch("ba?/*.ext", "barr/foo.ext")); + CHECK(!WrapSafeFNMatch("ba?/*.ext", "bar/foo.ext2")); + CHECK(WrapSafeFNMatch("ba?/*", "bar/foo.ext2")); + CHECK(WrapSafeFNMatch("ba?/*", "bar/")); + CHECK(!WrapSafeFNMatch("ba?/?", "bar/")); + CHECK(!WrapSafeFNMatch("ba?/*", "bar")); +} + +// TestWaitingLogSink will save messages here +// No lock: Accessed only by TestLogSinkWriter thread +// and after its demise by its creator. +static vector global_messages; + +// helper for TestWaitingLogSink below. +// Thread that does the logic of TestWaitingLogSink +// It's free to use LOG() itself. +class TestLogSinkWriter : public Thread { + public: + + TestLogSinkWriter() : should_exit_(false) { + SetJoinable(true); + Start(); + } + + // Just buffer it (can't use LOG() here). + void Buffer(const string& message) { + mutex_.Lock(); + RAW_LOG(INFO, "Buffering"); + messages_.push(message); + mutex_.Unlock(); + RAW_LOG(INFO, "Buffered"); + } + + // Wait for the buffer to clear (can't use LOG() here). + void Wait() { + RAW_LOG(INFO, "Waiting"); + mutex_.Lock(); + while (!NoWork()) { + mutex_.Unlock(); + SleepForMilliseconds(1); + mutex_.Lock(); + } + RAW_LOG(INFO, "Waited"); + mutex_.Unlock(); + } + + // Trigger thread exit. + void Stop() { + MutexLock l(&mutex_); + should_exit_ = true; + } + + private: + + // helpers --------------- + + // For creating a "Condition". + bool NoWork() { return messages_.empty(); } + bool HaveWork() { return !messages_.empty() || should_exit_; } + + // Thread body; CAN use LOG() here! + virtual void Run() { + while (1) { + mutex_.Lock(); + while (!HaveWork()) { + mutex_.Unlock(); + SleepForMilliseconds(1); + mutex_.Lock(); + } + if (should_exit_ && messages_.empty()) { + mutex_.Unlock(); + break; + } + // Give the main thread time to log its message, + // so that we get a reliable log capture to compare to golden file. + // Same for the other sleep below. + SleepForMilliseconds(20); + RAW_LOG(INFO, "Sink got a messages"); // only RAW_LOG under mutex_ here + string message = messages_.front(); + messages_.pop(); + // Normally this would be some more real/involved logging logic + // where LOG() usage can't be eliminated, + // e.g. pushing the message over with an RPC: + int messages_left = messages_.size(); + mutex_.Unlock(); + SleepForMilliseconds(20); + // May not use LOG while holding mutex_, because Buffer() + // acquires mutex_, and Buffer is called from LOG(), + // which has its own internal mutex: + // LOG()->LogToSinks()->TestWaitingLogSink::send()->Buffer() + LOG(INFO) << "Sink is sending out a message: " << message; + LOG(INFO) << "Have " << messages_left << " left"; + global_messages.push_back(message); + } + } + + // data --------------- + + Mutex mutex_; + bool should_exit_; + queue messages_; // messages to be logged +}; + +// A log sink that exercises WaitTillSent: +// it pushes data to a buffer and wakes up another thread to do the logging +// (that other thread can than use LOG() itself), +class TestWaitingLogSink : public LogSink { + public: + + TestWaitingLogSink() { + tid_ = pthread_self(); // for thread-specific behavior + AddLogSink(this); + } + ~TestWaitingLogSink() { + RemoveLogSink(this); + writer_.Stop(); + writer_.Join(); + } + + // (re)define LogSink interface + + virtual void send(LogSeverity severity, const char* full_filename, + const char* base_filename, int line, + const struct tm* tm_time, + const char* message, size_t message_len) { + // Push it to Writer thread if we are the original logging thread. + // Note: Something like ThreadLocalLogSink is a better choice + // to do thread-specific LogSink logic for real. + if (pthread_equal(tid_, pthread_self())) { + writer_.Buffer(ToString(severity, base_filename, line, + tm_time, message, message_len)); + } + } + virtual void WaitTillSent() { + // Wait for Writer thread if we are the original logging thread. + if (pthread_equal(tid_, pthread_self())) writer_.Wait(); + } + + private: + + pthread_t tid_; + TestLogSinkWriter writer_; +}; + +// Check that LogSink::WaitTillSent can be used in the advertised way. +// We also do golden-stderr comparison. +static void TestLogSinkWaitTillSent() { + { TestWaitingLogSink sink; + // Sleeps give the sink threads time to do all their work, + // so that we get a reliable log capture to compare to the golden file. + LOG(INFO) << "Message 1"; + SleepForMilliseconds(60); + LOG(ERROR) << "Message 2"; + SleepForMilliseconds(60); + LOG(WARNING) << "Message 3"; + SleepForMilliseconds(60); + } + for (size_t i = 0; i < global_messages.size(); ++i) { + LOG(INFO) << "Sink capture: " << global_messages[i]; + } + CHECK_EQ(global_messages.size(), 3); +} + +TEST(Strerror, logging) { + int errcode = EINTR; + char *msg = strdup(strerror(errcode)); + int buf_size = strlen(msg) + 1; + char *buf = new char[buf_size]; + CHECK_EQ(posix_strerror_r(errcode, NULL, 0), -1); + buf[0] = 'A'; + CHECK_EQ(posix_strerror_r(errcode, buf, 0), -1); + CHECK_EQ(buf[0], 'A'); + CHECK_EQ(posix_strerror_r(errcode, NULL, buf_size), -1); +#if defined(OS_MACOSX) || defined(OS_FREEBSD) || defined(OS_OPENBSD) + // MacOSX or FreeBSD considers this case is an error since there is + // no enough space. + CHECK_EQ(posix_strerror_r(errcode, buf, 1), -1); +#else + CHECK_EQ(posix_strerror_r(errcode, buf, 1), 0); +#endif + CHECK_STREQ(buf, ""); + CHECK_EQ(posix_strerror_r(errcode, buf, buf_size), 0); + CHECK_STREQ(buf, msg); + free(msg); + delete[] buf; +} + +// Simple routines to look at the sizes of generated code for LOG(FATAL) and +// CHECK(..) via objdump +void MyFatal() { + LOG(FATAL) << "Failed"; +} +void MyCheck(bool a, bool b) { + CHECK_EQ(a, b); +} + +#ifdef HAVE_LIB_GMOCK + +TEST(DVLog, Basic) { + ScopedMockLog log; + +#if NDEBUG + // We are expecting that nothing is logged. + EXPECT_CALL(log, Log(_, _, _)).Times(0); +#else + EXPECT_CALL(log, Log(INFO, __FILE__, "debug log")); +#endif + + FLAGS_v = 1; + DVLOG(1) << "debug log"; +} + +TEST(DVLog, V0) { + ScopedMockLog log; + + // We are expecting that nothing is logged. + EXPECT_CALL(log, Log(_, _, _)).Times(0); + + FLAGS_v = 0; + DVLOG(1) << "debug log"; +} + +TEST(LogAtLevel, Basic) { + ScopedMockLog log; + + // The function version outputs "logging.h" as a file name. + EXPECT_CALL(log, Log(WARNING, StrNe(__FILE__), "function version")); + EXPECT_CALL(log, Log(INFO, __FILE__, "macro version")); + + int severity = WARNING; + LogAtLevel(severity, "function version"); + + severity = INFO; + // We can use the macro version as a C++ stream. + LOG_AT_LEVEL(severity) << "macro" << ' ' << "version"; +} + +TEST(TestExitOnDFatal, ToBeOrNotToBe) { + // Check the default setting... + EXPECT_TRUE(base::internal::GetExitOnDFatal()); + + // Turn off... + base::internal::SetExitOnDFatal(false); + EXPECT_FALSE(base::internal::GetExitOnDFatal()); + + // We don't die. + { + ScopedMockLog log; + //EXPECT_CALL(log, Log(_, _, _)).Times(AnyNumber()); + // LOG(DFATAL) has severity FATAL if debugging, but is + // downgraded to ERROR if not debugging. + const LogSeverity severity = +#ifdef NDEBUG + ERROR; +#else + FATAL; +#endif + EXPECT_CALL(log, Log(severity, __FILE__, "This should not be fatal")); + LOG(DFATAL) << "This should not be fatal"; + } + + // Turn back on... + base::internal::SetExitOnDFatal(true); + EXPECT_TRUE(base::internal::GetExitOnDFatal()); + +#ifdef GTEST_HAS_DEATH_TEST + // Death comes on little cats' feet. + EXPECT_DEBUG_DEATH({ + LOG(DFATAL) << "This should be fatal in debug mode"; + }, "This should be fatal in debug mode"); +#endif +} + +#ifdef HAVE_STACKTRACE + +static void BacktraceAtHelper() { + LOG(INFO) << "Not me"; + +// The vertical spacing of the next 3 lines is significant. + LOG(INFO) << "Backtrace me"; +} +static int kBacktraceAtLine = __LINE__ - 2; // The line of the LOG(INFO) above + +TEST(LogBacktraceAt, DoesNotBacktraceWhenDisabled) { + StrictMock log; + + FLAGS_log_backtrace_at = ""; + + EXPECT_CALL(log, Log(_, _, "Backtrace me")); + EXPECT_CALL(log, Log(_, _, "Not me")); + + BacktraceAtHelper(); +} + +TEST(LogBacktraceAt, DoesBacktraceAtRightLineWhenEnabled) { + StrictMock log; + + char where[100]; + snprintf(where, 100, "%s:%d", const_basename(__FILE__), kBacktraceAtLine); + FLAGS_log_backtrace_at = where; + + // The LOG at the specified line should include a stacktrace which includes + // the name of the containing function, followed by the log message. + // We use HasSubstr()s instead of ContainsRegex() for environments + // which don't have regexp. + EXPECT_CALL(log, Log(_, _, AllOf(HasSubstr("stacktrace:"), + HasSubstr("BacktraceAtHelper"), + HasSubstr("main"), + HasSubstr("Backtrace me")))); + // Other LOGs should not include a backtrace. + EXPECT_CALL(log, Log(_, _, "Not me")); + + BacktraceAtHelper(); +} + +#endif // HAVE_STACKTRACE + +#endif // HAVE_LIB_GMOCK + +struct UserDefinedClass { + bool operator==(const UserDefinedClass& rhs) const { return true; } +}; + +inline ostream& operator<<(ostream& out, const UserDefinedClass& u) { + out << "OK"; + return out; +} + +TEST(UserDefinedClass, logging) { + UserDefinedClass u; + vector buf; + LOG_STRING(INFO, &buf) << u; + CHECK_EQ(1, buf.size()); + CHECK(buf[0].find("OK") != string::npos); + + // We must be able to compile this. + CHECK_EQ(u, u); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/logging_unittest.err b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/logging_unittest.err new file mode 100644 index 0000000000..4f80bf5d72 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/logging_unittest.err @@ -0,0 +1,305 @@ +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: Test: v=0 stderrthreshold=2 logtostderr=0 alsologtostderr=0 +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: vlog -1 +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: vlog 0 +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: log info +WDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: log warning +EDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: log error +WARNING: Logging before InitGoogleLogging() is written to STDERR +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog -1 +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog 0 +IDATE TIME__ THREADID logging_unittest.cc:LINE] log info +WDATE TIME__ THREADID logging_unittest.cc:LINE] log warning +EDATE TIME__ THREADID logging_unittest.cc:LINE] log error +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog_if -1 +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog_if 0 +IDATE TIME__ THREADID logging_unittest.cc:LINE] log_if info +WDATE TIME__ THREADID logging_unittest.cc:LINE] log_if warning +EDATE TIME__ THREADID logging_unittest.cc:LINE] log_if error +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog_if 0 expr +IDATE TIME__ THREADID logging_unittest.cc:LINE] log_if info expr +EDATE TIME__ THREADID logging_unittest.cc:LINE] log_if error expr +IDATE TIME__ THREADID logging_unittest.cc:LINE] log_if info every 1 expr +EDATE TIME__ THREADID logging_unittest.cc:LINE] log_if error every 1 expr +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog_if 0 every 1 expr +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: Test: v=0 stderrthreshold=0 logtostderr=0 alsologtostderr=0 +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: vlog -1 +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: vlog 0 +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: log info +WDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: log warning +EDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: log error +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog -1 +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog 0 +IDATE TIME__ THREADID logging_unittest.cc:LINE] log info +WDATE TIME__ THREADID logging_unittest.cc:LINE] log warning +EDATE TIME__ THREADID logging_unittest.cc:LINE] log error +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog_if -1 +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog_if 0 +IDATE TIME__ THREADID logging_unittest.cc:LINE] log_if info +WDATE TIME__ THREADID logging_unittest.cc:LINE] log_if warning +EDATE TIME__ THREADID logging_unittest.cc:LINE] log_if error +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog_if 0 expr +IDATE TIME__ THREADID logging_unittest.cc:LINE] log_if info expr +EDATE TIME__ THREADID logging_unittest.cc:LINE] log_if error expr +IDATE TIME__ THREADID logging_unittest.cc:LINE] log_if info every 1 expr +EDATE TIME__ THREADID logging_unittest.cc:LINE] log_if error every 1 expr +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog_if 0 every 1 expr +IDATE TIME__ THREADID logging_unittest.cc:LINE] foo bar 10 3.4 +EDATE TIME__ THREADID logging_unittest.cc:LINE] Plog every 2, iteration 1: __SUCCESS__ [0] +EDATE TIME__ THREADID logging_unittest.cc:LINE] Log every 3, iteration 1 +EDATE TIME__ THREADID logging_unittest.cc:LINE] Log every 4, iteration 1 +WDATE TIME__ THREADID logging_unittest.cc:LINE] Log if every 5, iteration 1 +IDATE TIME__ THREADID logging_unittest.cc:LINE] Log if every 1, iteration 1 +EDATE TIME__ THREADID logging_unittest.cc:LINE] Log if less than 3 every 2, iteration 1 +IDATE TIME__ THREADID logging_unittest.cc:LINE] Log if every 1, iteration 2 +EDATE TIME__ THREADID logging_unittest.cc:LINE] Plog every 2, iteration 3: __ENOENT__ [2] +IDATE TIME__ THREADID logging_unittest.cc:LINE] Log if every 1, iteration 3 +EDATE TIME__ THREADID logging_unittest.cc:LINE] Log if less than 3 every 2, iteration 3 +EDATE TIME__ THREADID logging_unittest.cc:LINE] Log every 3, iteration 4 +IDATE TIME__ THREADID logging_unittest.cc:LINE] Log if every 1, iteration 4 +EDATE TIME__ THREADID logging_unittest.cc:LINE] Plog every 2, iteration 5: __EINTR__ [4] +EDATE TIME__ THREADID logging_unittest.cc:LINE] Log every 4, iteration 5 +IDATE TIME__ THREADID logging_unittest.cc:LINE] Log if every 1, iteration 5 +WDATE TIME__ THREADID logging_unittest.cc:LINE] Log if every 5, iteration 6 +IDATE TIME__ THREADID logging_unittest.cc:LINE] Log if every 1, iteration 6 +EDATE TIME__ THREADID logging_unittest.cc:LINE] Plog every 2, iteration 7: __ENXIO__ [6] +EDATE TIME__ THREADID logging_unittest.cc:LINE] Log every 3, iteration 7 +IDATE TIME__ THREADID logging_unittest.cc:LINE] Log if every 1, iteration 7 +IDATE TIME__ THREADID logging_unittest.cc:LINE] Log if every 1, iteration 8 +EDATE TIME__ THREADID logging_unittest.cc:LINE] Plog every 2, iteration 9: __ENOEXEC__ [8] +EDATE TIME__ THREADID logging_unittest.cc:LINE] Log every 4, iteration 9 +IDATE TIME__ THREADID logging_unittest.cc:LINE] Log if every 1, iteration 9 +EDATE TIME__ THREADID logging_unittest.cc:LINE] Log every 3, iteration 10 +IDATE TIME__ THREADID logging_unittest.cc:LINE] Log if every 1, iteration 10 +WDATE TIME__ THREADID logging_unittest.cc:LINE] log_if this +IDATE TIME__ THREADID logging_unittest.cc:LINE] array +IDATE TIME__ THREADID logging_unittest.cc:LINE] const array +EDATE TIME__ THREADID logging_unittest.cc:LINE] foo 1000 0000001000 3e8 +no prefix +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: foo bar 10 3.400000 +WDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: array +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: const array +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: ptr 0x12345678 +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: ptr __NULLP__ +EDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: foo 1000 0000001000 3e8 +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: foo 1000 +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: foo 1000 +WDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: RAW_LOG ERROR: The Message was too long! +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: RAW_LOG ERROR: The Message was too long! +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: log +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: vlog 0 on +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: log +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: vlog 1 on +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: vlog 2 on +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: Test: v=0 stderrthreshold=0 logtostderr=0 alsologtostderr=0 +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: vlog -1 +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: vlog 0 +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: log info +WDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: log warning +EDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: log error +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog -1 +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog 0 +IDATE TIME__ THREADID logging_unittest.cc:LINE] log info +WDATE TIME__ THREADID logging_unittest.cc:LINE] log warning +EDATE TIME__ THREADID logging_unittest.cc:LINE] log error +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog_if -1 +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog_if 0 +IDATE TIME__ THREADID logging_unittest.cc:LINE] log_if info +WDATE TIME__ THREADID logging_unittest.cc:LINE] log_if warning +EDATE TIME__ THREADID logging_unittest.cc:LINE] log_if error +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog_if 0 expr +IDATE TIME__ THREADID logging_unittest.cc:LINE] log_if info expr +EDATE TIME__ THREADID logging_unittest.cc:LINE] log_if error expr +IDATE TIME__ THREADID logging_unittest.cc:LINE] log_if info every 1 expr +EDATE TIME__ THREADID logging_unittest.cc:LINE] log_if error every 1 expr +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog_if 0 every 1 expr +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: Test: v=1 stderrthreshold=0 logtostderr=0 alsologtostderr=0 +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: vlog -1 +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: vlog 0 +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: vlog 1 +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: log info +WDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: log warning +EDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: log error +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog -1 +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog 0 +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog 1 +IDATE TIME__ THREADID logging_unittest.cc:LINE] log info +WDATE TIME__ THREADID logging_unittest.cc:LINE] log warning +EDATE TIME__ THREADID logging_unittest.cc:LINE] log error +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog_if -1 +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog_if 0 +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog_if 1 +IDATE TIME__ THREADID logging_unittest.cc:LINE] log_if info +WDATE TIME__ THREADID logging_unittest.cc:LINE] log_if warning +EDATE TIME__ THREADID logging_unittest.cc:LINE] log_if error +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog_if 0 expr +IDATE TIME__ THREADID logging_unittest.cc:LINE] log_if info expr +EDATE TIME__ THREADID logging_unittest.cc:LINE] log_if error expr +IDATE TIME__ THREADID logging_unittest.cc:LINE] log_if info every 1 expr +EDATE TIME__ THREADID logging_unittest.cc:LINE] log_if error every 1 expr +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog_if 0 every 1 expr +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: Test: v=-1 stderrthreshold=0 logtostderr=0 alsologtostderr=0 +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: vlog -1 +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: log info +WDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: log warning +EDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: log error +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog -1 +IDATE TIME__ THREADID logging_unittest.cc:LINE] log info +WDATE TIME__ THREADID logging_unittest.cc:LINE] log warning +EDATE TIME__ THREADID logging_unittest.cc:LINE] log error +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog_if -1 +IDATE TIME__ THREADID logging_unittest.cc:LINE] log_if info +WDATE TIME__ THREADID logging_unittest.cc:LINE] log_if warning +EDATE TIME__ THREADID logging_unittest.cc:LINE] log_if error +IDATE TIME__ THREADID logging_unittest.cc:LINE] log_if info expr +EDATE TIME__ THREADID logging_unittest.cc:LINE] log_if error expr +IDATE TIME__ THREADID logging_unittest.cc:LINE] log_if info every 1 expr +EDATE TIME__ THREADID logging_unittest.cc:LINE] log_if error every 1 expr +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: Test: v=0 stderrthreshold=1 logtostderr=0 alsologtostderr=0 +WDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: log warning +EDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: log error +WDATE TIME__ THREADID logging_unittest.cc:LINE] log warning +EDATE TIME__ THREADID logging_unittest.cc:LINE] log error +WDATE TIME__ THREADID logging_unittest.cc:LINE] log_if warning +EDATE TIME__ THREADID logging_unittest.cc:LINE] log_if error +EDATE TIME__ THREADID logging_unittest.cc:LINE] log_if error expr +EDATE TIME__ THREADID logging_unittest.cc:LINE] log_if error every 1 expr +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: Test: v=0 stderrthreshold=2 logtostderr=0 alsologtostderr=0 +EDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: log error +EDATE TIME__ THREADID logging_unittest.cc:LINE] log error +EDATE TIME__ THREADID logging_unittest.cc:LINE] log_if error +EDATE TIME__ THREADID logging_unittest.cc:LINE] log_if error expr +EDATE TIME__ THREADID logging_unittest.cc:LINE] log_if error every 1 expr +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: Test: v=0 stderrthreshold=3 logtostderr=0 alsologtostderr=0 +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: Test: v=0 stderrthreshold=3 logtostderr=1 alsologtostderr=0 +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: vlog -1 +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: vlog 0 +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: log info +WDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: log warning +EDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: log error +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog -1 +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog 0 +IDATE TIME__ THREADID logging_unittest.cc:LINE] log info +WDATE TIME__ THREADID logging_unittest.cc:LINE] log warning +EDATE TIME__ THREADID logging_unittest.cc:LINE] log error +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog_if -1 +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog_if 0 +IDATE TIME__ THREADID logging_unittest.cc:LINE] log_if info +WDATE TIME__ THREADID logging_unittest.cc:LINE] log_if warning +EDATE TIME__ THREADID logging_unittest.cc:LINE] log_if error +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog_if 0 expr +IDATE TIME__ THREADID logging_unittest.cc:LINE] log_if info expr +EDATE TIME__ THREADID logging_unittest.cc:LINE] log_if error expr +IDATE TIME__ THREADID logging_unittest.cc:LINE] log_if info every 1 expr +EDATE TIME__ THREADID logging_unittest.cc:LINE] log_if error every 1 expr +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog_if 0 every 1 expr +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: Test: v=0 stderrthreshold=3 logtostderr=0 alsologtostderr=1 +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: vlog -1 +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: vlog 0 +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: log info +WDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: log warning +EDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: log error +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog -1 +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog 0 +IDATE TIME__ THREADID logging_unittest.cc:LINE] log info +WDATE TIME__ THREADID logging_unittest.cc:LINE] log warning +EDATE TIME__ THREADID logging_unittest.cc:LINE] log error +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog_if -1 +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog_if 0 +IDATE TIME__ THREADID logging_unittest.cc:LINE] log_if info +WDATE TIME__ THREADID logging_unittest.cc:LINE] log_if warning +EDATE TIME__ THREADID logging_unittest.cc:LINE] log_if error +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog_if 0 expr +IDATE TIME__ THREADID logging_unittest.cc:LINE] log_if info expr +EDATE TIME__ THREADID logging_unittest.cc:LINE] log_if error expr +IDATE TIME__ THREADID logging_unittest.cc:LINE] log_if info every 1 expr +EDATE TIME__ THREADID logging_unittest.cc:LINE] log_if error every 1 expr +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog_if 0 every 1 expr +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: Test: v=1 stderrthreshold=1 logtostderr=0 alsologtostderr=0 +WDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: log warning +EDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: log error +WDATE TIME__ THREADID logging_unittest.cc:LINE] log warning +EDATE TIME__ THREADID logging_unittest.cc:LINE] log error +WDATE TIME__ THREADID logging_unittest.cc:LINE] log_if warning +EDATE TIME__ THREADID logging_unittest.cc:LINE] log_if error +EDATE TIME__ THREADID logging_unittest.cc:LINE] log_if error expr +EDATE TIME__ THREADID logging_unittest.cc:LINE] log_if error every 1 expr +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: Test: v=1 stderrthreshold=3 logtostderr=0 alsologtostderr=1 +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: vlog -1 +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: vlog 0 +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: vlog 1 +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: log info +WDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: log warning +EDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: log error +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog -1 +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog 0 +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog 1 +IDATE TIME__ THREADID logging_unittest.cc:LINE] log info +WDATE TIME__ THREADID logging_unittest.cc:LINE] log warning +EDATE TIME__ THREADID logging_unittest.cc:LINE] log error +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog_if -1 +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog_if 0 +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog_if 1 +IDATE TIME__ THREADID logging_unittest.cc:LINE] log_if info +WDATE TIME__ THREADID logging_unittest.cc:LINE] log_if warning +EDATE TIME__ THREADID logging_unittest.cc:LINE] log_if error +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog_if 0 expr +IDATE TIME__ THREADID logging_unittest.cc:LINE] log_if info expr +EDATE TIME__ THREADID logging_unittest.cc:LINE] log_if error expr +IDATE TIME__ THREADID logging_unittest.cc:LINE] log_if info every 1 expr +EDATE TIME__ THREADID logging_unittest.cc:LINE] log_if error every 1 expr +IDATE TIME__ THREADID logging_unittest.cc:LINE] vlog_if 0 every 1 expr +IDATE TIME__ THREADID logging_unittest.cc:LINE] LOG_STRING: reported info +WDATE TIME__ THREADID logging_unittest.cc:LINE] LOG_STRING: reported warning +EDATE TIME__ THREADID logging_unittest.cc:LINE] LOG_STRING: reported error +IDATE TIME__ THREADID logging_unittest.cc:LINE] Captured by LOG_STRING: LOG_STRING: collected info +IDATE TIME__ THREADID logging_unittest.cc:LINE] Captured by LOG_STRING: LOG_STRING: collected warning +IDATE TIME__ THREADID logging_unittest.cc:LINE] Captured by LOG_STRING: LOG_STRING: collected error +IDATE TIME__ THREADID logging_unittest.cc:LINE] LOG_TO_SINK: collected info +WDATE TIME__ THREADID logging_unittest.cc:LINE] LOG_TO_SINK: collected warning +EDATE TIME__ THREADID logging_unittest.cc:LINE] LOG_TO_SINK: collected error +IDATE TIME__ THREADID logging_unittest.cc:LINE] LOG_TO_SINK: reported info +WDATE TIME__ THREADID logging_unittest.cc:LINE] LOG_TO_SINK: reported warning +EDATE TIME__ THREADID logging_unittest.cc:LINE] LOG_TO_SINK: reported error +IDATE TIME__ THREADID logging_unittest.cc:LINE] Captured by LOG_TO_SINK: +IDATE TIME__ THREADID logging_unittest.cc:LINE] LOG_TO_SINK: collected info +WDATE TIME__ THREADID logging_unittest.cc:LINE] LOG_TO_SINK: collected warning +EDATE TIME__ THREADID logging_unittest.cc:LINE] LOG_TO_SINK: collected error +IDATE TIME__ THREADID logging_unittest.cc:LINE] LOG_TO_SINK_BUT_NOT_TO_LOGFILE: collected info +WDATE TIME__ THREADID logging_unittest.cc:LINE] LOG_TO_SINK_BUT_NOT_TO_LOGFILE: collected warning +EDATE TIME__ THREADID logging_unittest.cc:LINE] LOG_TO_SINK_BUT_NOT_TO_LOGFILE: collected error +IDATE TIME__ THREADID logging_unittest.cc:LINE] LOG_TO_STRING: collected info +IDATE TIME__ THREADID logging_unittest.cc:LINE] Captured by LOG_TO_STRING: LOG_TO_STRING: collected info +WDATE TIME__ THREADID logging_unittest.cc:LINE] LOG_TO_STRING: collected warning +IDATE TIME__ THREADID logging_unittest.cc:LINE] Captured by LOG_TO_STRING: LOG_TO_STRING: collected warning +EDATE TIME__ THREADID logging_unittest.cc:LINE] LOG_TO_STRING: collected error +IDATE TIME__ THREADID logging_unittest.cc:LINE] Captured by LOG_TO_STRING: LOG_TO_STRING: collected error +IDATE TIME__ THREADID logging_unittest.cc:LINE] LOG_TO_STRING: reported info +WDATE TIME__ THREADID logging_unittest.cc:LINE] LOG_TO_STRING: reported warning +EDATE TIME__ THREADID logging_unittest.cc:LINE] LOG_TO_STRING: reported error +IDATE TIME__ THREADID logging_unittest.cc:LINE] Message 1 +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: Buffering +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: Buffered +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: Waiting +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: Sink got a messages +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: Waited +IDATE TIME__ THREADID logging_unittest.cc:LINE] Sink is sending out a message: IDATE TIME__ THREADID logging_unittest.cc:LINE] Message 1 +IDATE TIME__ THREADID logging_unittest.cc:LINE] Have 0 left +EDATE TIME__ THREADID logging_unittest.cc:LINE] Message 2 +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: Buffering +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: Buffered +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: Waiting +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: Sink got a messages +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: Waited +IDATE TIME__ THREADID logging_unittest.cc:LINE] Sink is sending out a message: EDATE TIME__ THREADID logging_unittest.cc:LINE] Message 2 +IDATE TIME__ THREADID logging_unittest.cc:LINE] Have 0 left +WDATE TIME__ THREADID logging_unittest.cc:LINE] Message 3 +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: Buffering +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: Buffered +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: Waiting +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: Sink got a messages +IDATE TIME__ THREADID logging_unittest.cc:LINE] RAW: Waited +IDATE TIME__ THREADID logging_unittest.cc:LINE] Sink is sending out a message: WDATE TIME__ THREADID logging_unittest.cc:LINE] Message 3 +IDATE TIME__ THREADID logging_unittest.cc:LINE] Have 0 left +IDATE TIME__ THREADID logging_unittest.cc:LINE] Sink capture: IDATE TIME__ THREADID logging_unittest.cc:LINE] Message 1 +IDATE TIME__ THREADID logging_unittest.cc:LINE] Sink capture: EDATE TIME__ THREADID logging_unittest.cc:LINE] Message 2 +IDATE TIME__ THREADID logging_unittest.cc:LINE] Sink capture: WDATE TIME__ THREADID logging_unittest.cc:LINE] Message 3 diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/mock-log.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/mock-log.h new file mode 100644 index 0000000000..5b21811504 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/mock-log.h @@ -0,0 +1,155 @@ +// Copyright (c) 2007, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Author: Zhanyong Wan +// +// Defines the ScopedMockLog class (using Google C++ Mocking +// Framework), which is convenient for testing code that uses LOG(). + +#ifndef GLOG_SRC_MOCK_LOG_H_ +#define GLOG_SRC_MOCK_LOG_H_ + +// For GOOGLE_NAMESPACE. This must go first so we get _XOPEN_SOURCE. +#include "utilities.h" + +#include + +#include + +#include "glog/logging.h" + +_START_GOOGLE_NAMESPACE_ +namespace glog_testing { + +// A ScopedMockLog object intercepts LOG() messages issued during its +// lifespan. Using this together with Google C++ Mocking Framework, +// it's very easy to test how a piece of code calls LOG(). The +// typical usage: +// +// TEST(FooTest, LogsCorrectly) { +// ScopedMockLog log; +// +// // We expect the WARNING "Something bad!" exactly twice. +// EXPECT_CALL(log, Log(WARNING, _, "Something bad!")) +// .Times(2); +// +// // We allow foo.cc to call LOG(INFO) any number of times. +// EXPECT_CALL(log, Log(INFO, HasSubstr("/foo.cc"), _)) +// .Times(AnyNumber()); +// +// Foo(); // Exercises the code under test. +// } +class ScopedMockLog : public GOOGLE_NAMESPACE::LogSink { + public: + // When a ScopedMockLog object is constructed, it starts to + // intercept logs. + ScopedMockLog() { AddLogSink(this); } + + // When the object is destructed, it stops intercepting logs. + virtual ~ScopedMockLog() { RemoveLogSink(this); } + + // Implements the mock method: + // + // void Log(LogSeverity severity, const string& file_path, + // const string& message); + // + // The second argument to Send() is the full path of the source file + // in which the LOG() was issued. + // + // Note, that in a multi-threaded environment, all LOG() messages from a + // single thread will be handled in sequence, but that cannot be guaranteed + // for messages from different threads. In fact, if the same or multiple + // expectations are matched on two threads concurrently, their actions will + // be executed concurrently as well and may interleave. + MOCK_METHOD3(Log, void(GOOGLE_NAMESPACE::LogSeverity severity, + const std::string& file_path, + const std::string& message)); + + private: + // Implements the send() virtual function in class LogSink. + // Whenever a LOG() statement is executed, this function will be + // invoked with information presented in the LOG(). + // + // The method argument list is long and carries much information a + // test usually doesn't care about, so we trim the list before + // forwarding the call to Log(), which is much easier to use in + // tests. + // + // We still cannot call Log() directly, as it may invoke other LOG() + // messages, either due to Invoke, or due to an error logged in + // Google C++ Mocking Framework code, which would trigger a deadlock + // since a lock is held during send(). + // + // Hence, we save the message for WaitTillSent() which will be called after + // the lock on send() is released, and we'll call Log() inside + // WaitTillSent(). Since while a single send() call may be running at a + // time, multiple WaitTillSent() calls (along with the one send() call) may + // be running simultaneously, we ensure thread-safety of the exchange between + // send() and WaitTillSent(), and that for each message, LOG(), send(), + // WaitTillSent() and Log() are executed in the same thread. + virtual void send(GOOGLE_NAMESPACE::LogSeverity severity, + const char* full_filename, + const char* base_filename, int line, const tm* tm_time, + const char* message, size_t message_len) { + // We are only interested in the log severity, full file name, and + // log message. + message_info_.severity = severity; + message_info_.file_path = full_filename; + message_info_.message = std::string(message, message_len); + } + + // Implements the WaitTillSent() virtual function in class LogSink. + // It will be executed after send() and after the global logging lock is + // released, so calls within it (or rather within the Log() method called + // within) may also issue LOG() statements. + // + // LOG(), send(), WaitTillSent() and Log() will occur in the same thread for + // a given log message. + virtual void WaitTillSent() { + // First, and very importantly, we save a copy of the message being + // processed before calling Log(), since Log() may indirectly call send() + // and WaitTillSent() in the same thread again. + MessageInfo message_info = message_info_; + Log(message_info.severity, message_info.file_path, message_info.message); + } + + // All relevant information about a logged message that needs to be passed + // from send() to WaitTillSent(). + struct MessageInfo { + GOOGLE_NAMESPACE::LogSeverity severity; + std::string file_path; + std::string message; + }; + MessageInfo message_info_; +}; + +} // namespace glog_testing +_END_GOOGLE_NAMESPACE_ + +#endif // GLOG_SRC_MOCK_LOG_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/mock-log_test.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/mock-log_test.cc new file mode 100644 index 0000000000..7d58a307c2 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/mock-log_test.cc @@ -0,0 +1,106 @@ +// Copyright (c) 2007, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Author: Zhanyong Wan + +// Tests the ScopedMockLog class. + +#include "mock-log.h" + +#include + +#include +#include + +namespace { + +using GOOGLE_NAMESPACE::INFO; +using GOOGLE_NAMESPACE::WARNING; +using GOOGLE_NAMESPACE::ERROR; +using GOOGLE_NAMESPACE::glog_testing::ScopedMockLog; +using std::string; +using testing::_; +using testing::HasSubstr; +using testing::InSequence; +using testing::InvokeWithoutArgs; + +// Tests that ScopedMockLog intercepts LOG()s when it's alive. +TEST(ScopedMockLogTest, InterceptsLog) { + ScopedMockLog log; + + InSequence s; + EXPECT_CALL(log, Log(WARNING, HasSubstr("/mock-log_test.cc"), "Fishy.")); + EXPECT_CALL(log, Log(INFO, _, "Working...")) + .Times(2); + EXPECT_CALL(log, Log(ERROR, _, "Bad!!")); + + LOG(WARNING) << "Fishy."; + LOG(INFO) << "Working..."; + LOG(INFO) << "Working..."; + LOG(ERROR) << "Bad!!"; +} + +void LogBranch() { + LOG(INFO) << "Logging a branch..."; +} + +void LogTree() { + LOG(INFO) << "Logging the whole tree..."; +} + +void LogForest() { + LOG(INFO) << "Logging the entire forest."; + LOG(INFO) << "Logging the entire forest.."; + LOG(INFO) << "Logging the entire forest..."; +} + +// The purpose of the following test is to verify that intercepting logging +// continues to work properly if a LOG statement is executed within the scope +// of a mocked call. +TEST(ScopedMockLogTest, LogDuringIntercept) { + ScopedMockLog log; + InSequence s; + EXPECT_CALL(log, Log(INFO, __FILE__, "Logging a branch...")) + .WillOnce(InvokeWithoutArgs(LogTree)); + EXPECT_CALL(log, Log(INFO, __FILE__, "Logging the whole tree...")) + .WillOnce(InvokeWithoutArgs(LogForest)); + EXPECT_CALL(log, Log(INFO, __FILE__, "Logging the entire forest.")); + EXPECT_CALL(log, Log(INFO, __FILE__, "Logging the entire forest..")); + EXPECT_CALL(log, Log(INFO, __FILE__, "Logging the entire forest...")); + LogBranch(); +} + +} // namespace + +int main(int argc, char **argv) { + GOOGLE_NAMESPACE::InitGoogleLogging(argv[0]); + testing::InitGoogleMock(&argc, argv); + + return RUN_ALL_TESTS(); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/raw_logging.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/raw_logging.cc new file mode 100644 index 0000000000..50c6a71994 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/raw_logging.cc @@ -0,0 +1,172 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Author: Maxim Lifantsev +// +// logging_unittest.cc covers the functionality herein + +#include "utilities.h" + +#include +#include +#include +#ifdef HAVE_UNISTD_H +# include // for close() and write() +#endif +#include // for open() +#include +#include "config.h" +#include "glog/logging.h" // To pick up flag settings etc. +#include "glog/raw_logging.h" +#include "base/commandlineflags.h" + +#ifdef HAVE_STACKTRACE +# include "stacktrace.h" +#endif + +#if defined(HAVE_SYSCALL_H) +#include // for syscall() +#elif defined(HAVE_SYS_SYSCALL_H) +#include // for syscall() +#endif +#ifdef HAVE_UNISTD_H +# include +#endif + +#if defined(HAVE_SYSCALL_H) || defined(HAVE_SYS_SYSCALL_H) +# define safe_write(fd, s, len) syscall(SYS_write, fd, s, len) +#else + // Not so safe, but what can you do? +# define safe_write(fd, s, len) write(fd, s, len) +#endif + +_START_GOOGLE_NAMESPACE_ + +// Data for RawLog__ below. We simply pick up the latest +// time data created by a normal log message to avoid calling +// localtime_r which can allocate memory. +static struct ::tm last_tm_time_for_raw_log; +static int last_usecs_for_raw_log; + +void RawLog__SetLastTime(const struct ::tm& t, int usecs) { + memcpy(&last_tm_time_for_raw_log, &t, sizeof(last_tm_time_for_raw_log)); + last_usecs_for_raw_log = usecs; +} + +// CAVEAT: vsnprintf called from *DoRawLog below has some (exotic) code paths +// that invoke malloc() and getenv() that might acquire some locks. +// If this becomes a problem we should reimplement a subset of vsnprintf +// that does not need locks and malloc. + +// Helper for RawLog__ below. +// *DoRawLog writes to *buf of *size and move them past the written portion. +// It returns true iff there was no overflow or error. +static bool DoRawLog(char** buf, int* size, const char* format, ...) { + va_list ap; + va_start(ap, format); + int n = vsnprintf(*buf, *size, format, ap); + va_end(ap); + if (n < 0 || n > *size) return false; + *size -= n; + *buf += n; + return true; +} + +// Helper for RawLog__ below. +inline static bool VADoRawLog(char** buf, int* size, + const char* format, va_list ap) { + int n = vsnprintf(*buf, *size, format, ap); + if (n < 0 || n > *size) return false; + *size -= n; + *buf += n; + return true; +} + +static const int kLogBufSize = 3000; +static bool crashed = false; +static CrashReason crash_reason; +static char crash_buf[kLogBufSize + 1] = { 0 }; // Will end in '\0' + +void RawLog__(LogSeverity severity, const char* file, int line, + const char* format, ...) { + if (!(FLAGS_logtostderr || severity >= FLAGS_stderrthreshold || + FLAGS_alsologtostderr || !IsGoogleLoggingInitialized())) { + return; // this stderr log message is suppressed + } + // can't call localtime_r here: it can allocate + struct ::tm& t = last_tm_time_for_raw_log; + char buffer[kLogBufSize]; + char* buf = buffer; + int size = sizeof(buffer); + + // NOTE: this format should match the specification in base/logging.h + DoRawLog(&buf, &size, "%c%02d%02d %02d:%02d:%02d.%06d %5u %s:%d] RAW: ", + LogSeverityNames[severity][0], + 1 + t.tm_mon, t.tm_mday, t.tm_hour, t.tm_min, t.tm_sec, + last_usecs_for_raw_log, + static_cast(GetTID()), + const_basename(const_cast(file)), line); + + // Record the position and size of the buffer after the prefix + const char* msg_start = buf; + const int msg_size = size; + + va_list ap; + va_start(ap, format); + bool no_chop = VADoRawLog(&buf, &size, format, ap); + va_end(ap); + if (no_chop) { + DoRawLog(&buf, &size, "\n"); + } else { + DoRawLog(&buf, &size, "RAW_LOG ERROR: The Message was too long!\n"); + } + // We make a raw syscall to write directly to the stderr file descriptor, + // avoiding FILE buffering (to avoid invoking malloc()), and bypassing + // libc (to side-step any libc interception). + // We write just once to avoid races with other invocations of RawLog__. + safe_write(STDERR_FILENO, buffer, strlen(buffer)); + if (severity == FATAL) { + if (!sync_val_compare_and_swap(&crashed, false, true)) { + crash_reason.filename = file; + crash_reason.line_number = line; + memcpy(crash_buf, msg_start, msg_size); // Don't include prefix + crash_reason.message = crash_buf; +#ifdef HAVE_STACKTRACE + crash_reason.depth = + GetStackTrace(crash_reason.stack, ARRAYSIZE(crash_reason.stack), 1); +#else + crash_reason.depth = 0; +#endif + SetCrashReason(&crash_reason); + } + LogMessage::Fail(); // abort() + } +} + +_END_GOOGLE_NAMESPACE_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/signalhandler.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/signalhandler.cc new file mode 100644 index 0000000000..7c8fe57426 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/signalhandler.cc @@ -0,0 +1,350 @@ +// Copyright (c) 2008, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Author: Satoru Takabayashi +// +// Implementation of InstallFailureSignalHandler(). + +#include "utilities.h" +#include "stacktrace.h" +#include "symbolize.h" +#include "glog/logging.h" + +#include +#include +#ifdef HAVE_UCONTEXT_H +# include +#endif +#ifdef HAVE_SYS_UCONTEXT_H +# include +#endif +#include + +_START_GOOGLE_NAMESPACE_ + +namespace { + +// We'll install the failure signal handler for these signals. We could +// use strsignal() to get signal names, but we don't use it to avoid +// introducing yet another #ifdef complication. +// +// The list should be synced with the comment in signalhandler.h. +const struct { + int number; + const char *name; +} kFailureSignals[] = { + { SIGSEGV, "SIGSEGV" }, + { SIGILL, "SIGILL" }, + { SIGFPE, "SIGFPE" }, + { SIGABRT, "SIGABRT" }, + { SIGBUS, "SIGBUS" }, + { SIGTERM, "SIGTERM" }, +}; + +// Returns the program counter from signal context, NULL if unknown. +void* GetPC(void* ucontext_in_void) { +#if (defined(HAVE_UCONTEXT_H) || defined(HAVE_SYS_UCONTEXT_H)) && defined(PC_FROM_UCONTEXT) + if (ucontext_in_void != NULL) { + ucontext_t *context = reinterpret_cast(ucontext_in_void); + return (void*)context->PC_FROM_UCONTEXT; + } +#endif + return NULL; +} + +// The class is used for formatting error messages. We don't use printf() +// as it's not async signal safe. +class MinimalFormatter { + public: + MinimalFormatter(char *buffer, int size) + : buffer_(buffer), + cursor_(buffer), + end_(buffer + size) { + } + + // Returns the number of bytes written in the buffer. + int num_bytes_written() const { return cursor_ - buffer_; } + + // Appends string from "str" and updates the internal cursor. + void AppendString(const char* str) { + int i = 0; + while (str[i] != '\0' && cursor_ + i < end_) { + cursor_[i] = str[i]; + ++i; + } + cursor_ += i; + } + + // Formats "number" in "radix" and updates the internal cursor. + // Lowercase letters are used for 'a' - 'z'. + void AppendUint64(uint64 number, int radix) { + int i = 0; + while (cursor_ + i < end_) { + const int tmp = number % radix; + number /= radix; + cursor_[i] = (tmp < 10 ? '0' + tmp : 'a' + tmp - 10); + ++i; + if (number == 0) { + break; + } + } + // Reverse the bytes written. + std::reverse(cursor_, cursor_ + i); + cursor_ += i; + } + + // Formats "number" as hexadecimal number, and updates the internal + // cursor. Padding will be added in front if needed. + void AppendHexWithPadding(uint64 number, int width) { + char* start = cursor_; + AppendString("0x"); + AppendUint64(number, 16); + // Move to right and add padding in front if needed. + if (cursor_ < start + width) { + const int64 delta = start + width - cursor_; + std::copy(start, cursor_, start + delta); + std::fill(start, start + delta, ' '); + cursor_ = start + width; + } + } + + private: + char *buffer_; + char *cursor_; + const char * const end_; +}; + +// Writes the given data with the size to the standard error. +void WriteToStderr(const char* data, int size) { + if (write(STDERR_FILENO, data, size) < 0) { + // Ignore errors. + } +} + +// The writer function can be changed by InstallFailureWriter(). +void (*g_failure_writer)(const char* data, int size) = WriteToStderr; + +// Dumps time information. We don't dump human-readable time information +// as localtime() is not guaranteed to be async signal safe. +void DumpTimeInfo() { + time_t time_in_sec = time(NULL); + char buf[256]; // Big enough for time info. + MinimalFormatter formatter(buf, sizeof(buf)); + formatter.AppendString("*** Aborted at "); + formatter.AppendUint64(time_in_sec, 10); + formatter.AppendString(" (unix time)"); + formatter.AppendString(" try \"date -d @"); + formatter.AppendUint64(time_in_sec, 10); + formatter.AppendString("\" if you are using GNU date ***\n"); + g_failure_writer(buf, formatter.num_bytes_written()); +} + +// Dumps information about the signal to STDERR. +void DumpSignalInfo(int signal_number, siginfo_t *siginfo) { + // Get the signal name. + const char* signal_name = NULL; + for (int i = 0; i < ARRAYSIZE(kFailureSignals); ++i) { + if (signal_number == kFailureSignals[i].number) { + signal_name = kFailureSignals[i].name; + } + } + + char buf[256]; // Big enough for signal info. + MinimalFormatter formatter(buf, sizeof(buf)); + + formatter.AppendString("*** "); + if (signal_name) { + formatter.AppendString(signal_name); + } else { + // Use the signal number if the name is unknown. The signal name + // should be known, but just in case. + formatter.AppendString("Signal "); + formatter.AppendUint64(signal_number, 10); + } + formatter.AppendString(" (@0x"); + formatter.AppendUint64(reinterpret_cast(siginfo->si_addr), 16); + formatter.AppendString(")"); + formatter.AppendString(" received by PID "); + formatter.AppendUint64(getpid(), 10); + formatter.AppendString(" (TID 0x"); + // We assume pthread_t is an integral number or a pointer, rather + // than a complex struct. In some environments, pthread_self() + // returns an uint64 but in some other environments pthread_self() + // returns a pointer. Hence we use C-style cast here, rather than + // reinterpret/static_cast, to support both types of environments. + formatter.AppendUint64((uintptr_t)pthread_self(), 16); + formatter.AppendString(") "); + // Only linux has the PID of the signal sender in si_pid. +#ifdef OS_LINUX + formatter.AppendString("from PID "); + formatter.AppendUint64(siginfo->si_pid, 10); + formatter.AppendString("; "); +#endif + formatter.AppendString("stack trace: ***\n"); + g_failure_writer(buf, formatter.num_bytes_written()); +} + +// Dumps information about the stack frame to STDERR. +void DumpStackFrameInfo(const char* prefix, void* pc) { + // Get the symbol name. + const char *symbol = "(unknown)"; + char symbolized[1024]; // Big enough for a sane symbol. + // Symbolizes the previous address of pc because pc may be in the + // next function. + if (Symbolize(reinterpret_cast(pc) - 1, + symbolized, sizeof(symbolized))) { + symbol = symbolized; + } + + char buf[1024]; // Big enough for stack frame info. + MinimalFormatter formatter(buf, sizeof(buf)); + + formatter.AppendString(prefix); + formatter.AppendString("@ "); + const int width = 2 * sizeof(void*) + 2; // + 2 for "0x". + formatter.AppendHexWithPadding(reinterpret_cast(pc), width); + formatter.AppendString(" "); + formatter.AppendString(symbol); + formatter.AppendString("\n"); + g_failure_writer(buf, formatter.num_bytes_written()); +} + +// Invoke the default signal handler. +void InvokeDefaultSignalHandler(int signal_number) { + struct sigaction sig_action; + memset(&sig_action, 0, sizeof(sig_action)); + sigemptyset(&sig_action.sa_mask); + sig_action.sa_handler = SIG_DFL; + sigaction(signal_number, &sig_action, NULL); + kill(getpid(), signal_number); +} + +// This variable is used for protecting FailureSignalHandler() from +// dumping stuff while another thread is doing it. Our policy is to let +// the first thread dump stuff and let other threads wait. +// See also comments in FailureSignalHandler(). +static pthread_t* g_entered_thread_id_pointer = NULL; + +// Dumps signal and stack frame information, and invokes the default +// signal handler once our job is done. +void FailureSignalHandler(int signal_number, + siginfo_t *signal_info, + void *ucontext) { + // First check if we've already entered the function. We use an atomic + // compare and swap operation for platforms that support it. For other + // platforms, we use a naive method that could lead to a subtle race. + + // We assume pthread_self() is async signal safe, though it's not + // officially guaranteed. + pthread_t my_thread_id = pthread_self(); + // NOTE: We could simply use pthread_t rather than pthread_t* for this, + // if pthread_self() is guaranteed to return non-zero value for thread + // ids, but there is no such guarantee. We need to distinguish if the + // old value (value returned from __sync_val_compare_and_swap) is + // different from the original value (in this case NULL). + pthread_t* old_thread_id_pointer = + glog_internal_namespace_::sync_val_compare_and_swap( + &g_entered_thread_id_pointer, + static_cast(NULL), + &my_thread_id); + if (old_thread_id_pointer != NULL) { + // We've already entered the signal handler. What should we do? + if (pthread_equal(my_thread_id, *g_entered_thread_id_pointer)) { + // It looks the current thread is reentering the signal handler. + // Something must be going wrong (maybe we are reentering by another + // type of signal?). Kill ourself by the default signal handler. + InvokeDefaultSignalHandler(signal_number); + } + // Another thread is dumping stuff. Let's wait until that thread + // finishes the job and kills the process. + while (true) { + sleep(1); + } + } + // This is the first time we enter the signal handler. We are going to + // do some interesting stuff from here. + // TODO(satorux): We might want to set timeout here using alarm(), but + // mixing alarm() and sleep() can be a bad idea. + + // First dump time info. + DumpTimeInfo(); + + // Get the program counter from ucontext. + void *pc = GetPC(ucontext); + DumpStackFrameInfo("PC: ", pc); + +#ifdef HAVE_STACKTRACE + // Get the stack traces. + void *stack[32]; + // +1 to exclude this function. + const int depth = GetStackTrace(stack, ARRAYSIZE(stack), 1); + DumpSignalInfo(signal_number, signal_info); + // Dump the stack traces. + for (int i = 0; i < depth; ++i) { + DumpStackFrameInfo(" ", stack[i]); + } +#endif + + // *** TRANSITION *** + // + // BEFORE this point, all code must be async-termination-safe! + // (See WARNING above.) + // + // AFTER this point, we do unsafe things, like using LOG()! + // The process could be terminated or hung at any time. We try to + // do more useful things first and riskier things later. + + // Flush the logs before we do anything in case 'anything' + // causes problems. + FlushLogFilesUnsafe(0); + + // Kill ourself by the default signal handler. + InvokeDefaultSignalHandler(signal_number); +} + +} // namespace + +void InstallFailureSignalHandler() { + // Build the sigaction struct. + struct sigaction sig_action; + memset(&sig_action, 0, sizeof(sig_action)); + sigemptyset(&sig_action.sa_mask); + sig_action.sa_flags |= SA_SIGINFO; + sig_action.sa_sigaction = &FailureSignalHandler; + + for (int i = 0; i < ARRAYSIZE(kFailureSignals); ++i) { + CHECK_ERR(sigaction(kFailureSignals[i].number, &sig_action, NULL)); + } +} + +void InstallFailureWriter(void (*writer)(const char* data, int size)) { + g_failure_writer = writer; +} + +_END_GOOGLE_NAMESPACE_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/signalhandler_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/signalhandler_unittest.cc new file mode 100644 index 0000000000..1cd0fa010e --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/signalhandler_unittest.cc @@ -0,0 +1,97 @@ +// Copyright (c) 2008, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Author: Satoru Takabayashi +// +// This is a helper binary for testing signalhandler.cc. The actual test +// is done in signalhandler_unittest.sh. + +#include "utilities.h" + +#include +#include +#include +#include +#include +#include "glog/logging.h" + +using namespace GOOGLE_NAMESPACE; + +void* DieInThread(void*) { + // We assume pthread_t is an integral number or a pointer, rather + // than a complex struct. In some environments, pthread_self() + // returns an uint64 but in some other environments pthread_self() + // returns a pointer. Hence we use C-style cast here, rather than + // reinterpret/static_cast, to support both types of environments. + fprintf(stderr, "0x%lx is dying\n", (long)pthread_self()); + // Use volatile to prevent from these to be optimized away. + volatile int a = 0; + volatile int b = 1 / a; + fprintf(stderr, "We should have died: b=%d\n", b); + return NULL; +} + +void WriteToStdout(const char* data, int size) { + if (write(STDOUT_FILENO, data, size) < 0) { + // Ignore errors. + } +} + +int main(int argc, char **argv) { +#if defined(HAVE_STACKTRACE) && defined(HAVE_SYMBOLIZE) + InitGoogleLogging(argv[0]); +#ifdef HAVE_LIB_GFLAGS + ParseCommandLineFlags(&argc, &argv, true); +#endif + InstallFailureSignalHandler(); + const std::string command = argc > 1 ? argv[1] : "none"; + if (command == "segv") { + // We'll check if this is outputted. + LOG(INFO) << "create the log file"; + LOG(INFO) << "a message before segv"; + // We assume 0xDEAD is not writable. + int *a = (int*)0xDEAD; + *a = 0; + } else if (command == "loop") { + fprintf(stderr, "looping\n"); + while (true); + } else if (command == "die_in_thread") { + pthread_t thread; + pthread_create(&thread, NULL, &DieInThread, NULL); + pthread_join(thread, NULL); + } else if (command == "dump_to_stdout") { + InstallFailureWriter(WriteToStdout); + abort(); + } else { + // Tell the shell script + puts("OK"); + } +#endif + return 0; +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/signalhandler_unittest.sh b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/signalhandler_unittest.sh new file mode 100644 index 0000000000..3b57b05d43 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/signalhandler_unittest.sh @@ -0,0 +1,131 @@ +#! /bin/sh +# +# Copyright (c) 2008, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +# Author: Satoru Takabayashi +# +# Unit tests for signalhandler.cc. + +die () { + echo $1 + exit 1 +} + +BINDIR=".libs" +LIBGLOG="$BINDIR/libglog.so" + +BINARY="$BINDIR/signalhandler_unittest" +LOG_INFO="./signalhandler_unittest.INFO" + +# Remove temporary files. +rm -f signalhandler.out* + +if test -e "$BINARY"; then + # We need shared object. + export LD_LIBRARY_PATH=$BINDIR + export DYLD_LIBRARY_PATH=$BINDIR +else + # For windows + BINARY="./signalhandler_unittest.exe" + if ! test -e "$BINARY"; then + echo "We coundn't find demangle_unittest binary." + exit 1 + fi +fi + +if [ x`$BINARY` != 'xOK' ]; then + echo "PASS (No stacktrace support. We don't run this test.)" + exit 0 +fi + +# The PC cannot be obtained in signal handlers on PowerPC correctly. +# We just skip the test for PowerPC. +if [ x`uname -p` = x"powerpc" ]; then + echo "PASS (We don't test the signal handler on PowerPC.)" + exit 0 +fi + +# Test for a case the program kills itself by SIGSEGV. +GOOGLE_LOG_DIR=. $BINARY segv 2> signalhandler.out1 +for pattern in SIGSEGV 0xdead main "Aborted at [0-9]"; do + if ! grep --quiet "$pattern" signalhandler.out1; then + die "'$pattern' should appear in the output" + fi +done +if ! grep --quiet "a message before segv" $LOG_INFO; then + die "'a message before segv' should appear in the INFO log" +fi +rm -f $LOG_INFO + +# Test for a case the program is killed by this shell script. +# $! = the process id of the last command run in the background. +# $$ = the process id of this shell. +$BINARY loop 2> signalhandler.out2 & +# Wait until "looping" is written in the file. This indicates the program +# is ready to accept signals. +while true; do + if grep --quiet looping signalhandler.out2; then + break + fi +done +kill -TERM $! +wait $! + +from_pid='' +# Only linux has the process ID of the signal sender. +if [ x`uname` = "xLinux" ]; then + from_pid="from PID $$" +fi +for pattern in SIGTERM "by PID $!" "$from_pid" main "Aborted at [0-9]"; do + if ! grep --quiet "$pattern" signalhandler.out2; then + die "'$pattern' should appear in the output" + fi +done + +# Test for a case the program dies in a non-main thread. +$BINARY die_in_thread 2> signalhandler.out3 +EXPECTED_TID="`sed 's/ .*//' signalhandler.out3`" + +for pattern in SIGFPE DieInThread "TID $EXPECTED_TID" "Aborted at [0-9]"; do + if ! grep --quiet "$pattern" signalhandler.out3; then + die "'$pattern' should appear in the output" + fi +done + +# Test for a case the program installs a custom failure writer that writes +# stuff to stdout instead of stderr. +$BINARY dump_to_stdout 1> signalhandler.out4 +for pattern in SIGABRT main "Aborted at [0-9]"; do + if ! grep --quiet "$pattern" signalhandler.out4; then + die "'$pattern' should appear in the output" + fi +done + +echo PASS diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/stacktrace.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/stacktrace.h new file mode 100644 index 0000000000..8c3e8fe8f8 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/stacktrace.h @@ -0,0 +1,60 @@ +// Copyright (c) 2000 - 2007, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Routines to extract the current stack trace. These functions are +// thread-safe. + +#ifndef BASE_STACKTRACE_H_ +#define BASE_STACKTRACE_H_ + +#include "config.h" + +_START_GOOGLE_NAMESPACE_ + +// This is similar to the GetStackFrames routine, except that it returns +// the stack trace only, and not the stack frame sizes as well. +// Example: +// main() { foo(); } +// foo() { bar(); } +// bar() { +// void* result[10]; +// int depth = GetStackFrames(result, 10, 1); +// } +// +// This produces: +// result[0] foo +// result[1] main +// .... ... +// +// "result" must not be NULL. +extern int GetStackTrace(void** result, int max_depth, int skip_count); + +_END_GOOGLE_NAMESPACE_ + +#endif // BASE_STACKTRACE_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/stacktrace_generic-inl.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/stacktrace_generic-inl.h new file mode 100644 index 0000000000..fad81d3e3f --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/stacktrace_generic-inl.h @@ -0,0 +1,59 @@ +// Copyright (c) 2000 - 2007, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Portable implementation - just use glibc +// +// Note: The glibc implementation may cause a call to malloc. +// This can cause a deadlock in HeapProfiler. +#include +#include +#include "stacktrace.h" + +_START_GOOGLE_NAMESPACE_ + +// If you change this function, also change GetStackFrames below. +int GetStackTrace(void** result, int max_depth, int skip_count) { + static const int kStackLength = 64; + void * stack[kStackLength]; + int size; + + size = backtrace(stack, kStackLength); + skip_count++; // we want to skip the current frame as well + int result_count = size - skip_count; + if (result_count < 0) + result_count = 0; + if (result_count > max_depth) + result_count = max_depth; + for (int i = 0; i < result_count; i++) + result[i] = stack[i + skip_count]; + + return result_count; +} + +_END_GOOGLE_NAMESPACE_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/stacktrace_libunwind-inl.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/stacktrace_libunwind-inl.h new file mode 100644 index 0000000000..0dc14c6506 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/stacktrace_libunwind-inl.h @@ -0,0 +1,87 @@ +// Copyright (c) 2005 - 2007, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Author: Arun Sharma +// +// Produce stack trace using libunwind + +#include "utilities.h" + +extern "C" { +#define UNW_LOCAL_ONLY +#include +} +#include "glog/raw_logging.h" +#include "stacktrace.h" + +_START_GOOGLE_NAMESPACE_ + +// Sometimes, we can try to get a stack trace from within a stack +// trace, because libunwind can call mmap (maybe indirectly via an +// internal mmap based memory allocator), and that mmap gets trapped +// and causes a stack-trace request. If were to try to honor that +// recursive request, we'd end up with infinite recursion or deadlock. +// Luckily, it's safe to ignore those subsequent traces. In such +// cases, we return 0 to indicate the situation. +static bool g_now_entering = false; + +// If you change this function, also change GetStackFrames below. +int GetStackTrace(void** result, int max_depth, int skip_count) { + void *ip; + int n = 0; + unw_cursor_t cursor; + unw_context_t uc; + + if (sync_val_compare_and_swap(&g_now_entering, false, true)) { + return 0; + } + + unw_getcontext(&uc); + RAW_CHECK(unw_init_local(&cursor, &uc) >= 0, "unw_init_local failed"); + skip_count++; // Do not include the "GetStackTrace" frame + + while (n < max_depth) { + int ret = unw_get_reg(&cursor, UNW_REG_IP, (unw_word_t *) &ip); + if (ret < 0) + break; + if (skip_count > 0) { + skip_count--; + } else { + result[n++] = ip; + } + ret = unw_step(&cursor); + if (ret <= 0) + break; + } + + g_now_entering = false; + return n; +} + +_END_GOOGLE_NAMESPACE_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/stacktrace_powerpc-inl.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/stacktrace_powerpc-inl.h new file mode 100644 index 0000000000..1090ddedbc --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/stacktrace_powerpc-inl.h @@ -0,0 +1,130 @@ +// Copyright (c) 2007, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Author: Craig Silverstein +// +// Produce stack trace. I'm guessing (hoping!) the code is much like +// for x86. For apple machines, at least, it seems to be; see +// http://developer.apple.com/documentation/mac/runtimehtml/RTArch-59.html +// http://www.linux-foundation.org/spec/ELF/ppc64/PPC-elf64abi-1.9.html#STACK +// Linux has similar code: http://patchwork.ozlabs.org/linuxppc/patch?id=8882 + +#include +#include // for uintptr_t +#include "stacktrace.h" + +_START_GOOGLE_NAMESPACE_ + +// Given a pointer to a stack frame, locate and return the calling +// stackframe, or return NULL if no stackframe can be found. Perform sanity +// checks (the strictness of which is controlled by the boolean parameter +// "STRICT_UNWINDING") to reduce the chance that a bad pointer is returned. +template +static void **NextStackFrame(void **old_sp) { + void **new_sp = (void **) *old_sp; + + // Check that the transition from frame pointer old_sp to frame + // pointer new_sp isn't clearly bogus + if (STRICT_UNWINDING) { + // With the stack growing downwards, older stack frame must be + // at a greater address that the current one. + if (new_sp <= old_sp) return NULL; + // Assume stack frames larger than 100,000 bytes are bogus. + if ((uintptr_t)new_sp - (uintptr_t)old_sp > 100000) return NULL; + } else { + // In the non-strict mode, allow discontiguous stack frames. + // (alternate-signal-stacks for example). + if (new_sp == old_sp) return NULL; + // And allow frames upto about 1MB. + if ((new_sp > old_sp) + && ((uintptr_t)new_sp - (uintptr_t)old_sp > 1000000)) return NULL; + } + if ((uintptr_t)new_sp & (sizeof(void *) - 1)) return NULL; + return new_sp; +} + +// This ensures that GetStackTrace stes up the Link Register properly. +void StacktracePowerPCDummyFunction() __attribute__((noinline)); +void StacktracePowerPCDummyFunction() { __asm__ volatile(""); } + +// If you change this function, also change GetStackFrames below. +int GetStackTrace(void** result, int max_depth, int skip_count) { + void **sp; + // Apple OS X uses an old version of gnu as -- both Darwin 7.9.0 (Panther) + // and Darwin 8.8.1 (Tiger) use as 1.38. This means we have to use a + // different asm syntax. I don't know quite the best way to discriminate + // systems using the old as from the new one; I've gone with __APPLE__. +#ifdef __APPLE__ + __asm__ volatile ("mr %0,r1" : "=r" (sp)); +#else + __asm__ volatile ("mr %0,1" : "=r" (sp)); +#endif + + // On PowerPC, the "Link Register" or "Link Record" (LR), is a stack + // entry that holds the return address of the subroutine call (what + // instruction we run after our function finishes). This is the + // same as the stack-pointer of our parent routine, which is what we + // want here. While the compiler will always(?) set up LR for + // subroutine calls, it may not for leaf functions (such as this one). + // This routine forces the compiler (at least gcc) to push it anyway. + StacktracePowerPCDummyFunction(); + + // The LR save area is used by the callee, so the top entry is bogus. + skip_count++; + + int n = 0; + while (sp && n < max_depth) { + if (skip_count > 0) { + skip_count--; + } else { + // PowerPC has 3 main ABIs, which say where in the stack the + // Link Register is. For DARWIN and AIX (used by apple and + // linux ppc64), it's in sp[2]. For SYSV (used by linux ppc), + // it's in sp[1]. +#if defined(_CALL_AIX) || defined(_CALL_DARWIN) + result[n++] = *(sp+2); +#elif defined(_CALL_SYSV) + result[n++] = *(sp+1); +#elif defined(__APPLE__) || (defined(__linux) && defined(__PPC64__)) + // This check is in case the compiler doesn't define _CALL_AIX/etc. + result[n++] = *(sp+2); +#elif defined(__linux) + // This check is in case the compiler doesn't define _CALL_SYSV. + result[n++] = *(sp+1); +#else +#error Need to specify the PPC ABI for your archiecture. +#endif + } + // Use strict unwinding rules. + sp = NextStackFrame(sp); + } + return n; +} + +_END_GOOGLE_NAMESPACE_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/stacktrace_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/stacktrace_unittest.cc new file mode 100644 index 0000000000..0acd2c9af3 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/stacktrace_unittest.cc @@ -0,0 +1,157 @@ +// Copyright (c) 2004, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#include "utilities.h" + +#include +#include +#include "config.h" +#include "base/commandlineflags.h" +#include "glog/logging.h" +#include "stacktrace.h" + +#ifdef HAVE_EXECINFO_H +# include +#endif + +using namespace GOOGLE_NAMESPACE; + +#ifdef HAVE_STACKTRACE + +// Obtain a backtrace, verify that the expected callers are present in the +// backtrace, and maybe print the backtrace to stdout. + +//-----------------------------------------------------------------------// +void CheckStackTraceLeaf(); +void CheckStackTrace4(int i); +void CheckStackTrace3(int i); +void CheckStackTrace2(int i); +void CheckStackTrace1(int i); +void CheckStackTrace(int i); +//-----------------------------------------------------------------------// + +// The sequence of functions whose return addresses we expect to see in the +// backtrace. +const int BACKTRACE_STEPS = 6; +void * expected_stack[BACKTRACE_STEPS] = { + (void *) &CheckStackTraceLeaf, + (void *) &CheckStackTrace4, + (void *) &CheckStackTrace3, + (void *) &CheckStackTrace2, + (void *) &CheckStackTrace1, + (void *) &CheckStackTrace, +}; + +// Depending on the architecture/compiler/libraries, (not sure which) +// the current function may or may not appear in the backtrace. +// For gcc-2: +// +// stack[0] is ret addr within CheckStackTrace4 +// stack[1] is ret addr within CheckStackTrace3 +// stack[2] is ret addr within CheckStackTrace2 +// stack[3] is ret addr within CheckStackTrace1 +// stack[4] is ret addr within CheckStackTrace +// +// For gcc3-k8: +// +// stack[0] is ret addr within CheckStackTraceLeaf +// stack[1] is ret addr within CheckStackTrace4 +// ... +// stack[5] is ret addr within CheckStackTrace + +//-----------------------------------------------------------------------// + +const int kMaxFnLen = 0x40; // assume relevant functions are only this long + +void CheckRetAddrIsInFunction( void * ret_addr, void * function_start_addr) +{ + CHECK_GE(ret_addr, function_start_addr); + CHECK_LE(ret_addr, (void *) ((char *) function_start_addr + kMaxFnLen)); +} + +//-----------------------------------------------------------------------// + +void CheckStackTraceLeaf(void) { + const int STACK_LEN = 10; + void *stack[STACK_LEN]; + int size; + + size = GetStackTrace(stack, STACK_LEN, 0); + printf("Obtained %d stack frames.\n", size); + CHECK_LE(size, STACK_LEN); + + if (1) { +#ifdef HAVE_EXECINFO_H + char **strings = backtrace_symbols(stack, size); + printf("Obtained %d stack frames.\n", size); + for (int i = 0; i < size; i++) + printf("%s %p\n", strings[i], stack[i]); + printf("CheckStackTrace() addr: %p\n", &CheckStackTrace); + free(strings); +#endif + } + for (int i = 0; i < BACKTRACE_STEPS; i++) { + printf("Backtrace %d: expected: %p..%p actual: %p ... ", + i, expected_stack[i], + reinterpret_cast(expected_stack[i]) + kMaxFnLen, stack[i]); + CheckRetAddrIsInFunction(stack[i], expected_stack[i]); + printf("OK\n"); + } + + // Check if the second stacktrace returns the same size. + CHECK_EQ(size, GetStackTrace(stack, STACK_LEN, 0)); +} + +//-----------------------------------------------------------------------// + +/* Dummy functions to make the backtrace more interesting. */ +void CheckStackTrace4(int i) { for (int j = i; j >= 0; j--) CheckStackTraceLeaf(); } +void CheckStackTrace3(int i) { for (int j = i; j >= 0; j--) CheckStackTrace4(j); } +void CheckStackTrace2(int i) { for (int j = i; j >= 0; j--) CheckStackTrace3(j); } +void CheckStackTrace1(int i) { for (int j = i; j >= 0; j--) CheckStackTrace2(j); } +void CheckStackTrace(int i) { for (int j = i; j >= 0; j--) CheckStackTrace1(j); } + +//-----------------------------------------------------------------------// + +int main(int argc, char ** argv) { + FLAGS_logtostderr = true; + InitGoogleLogging(argv[0]); + + CheckStackTrace(0); + + printf("PASS\n"); + return 0; +} + +#else +int main() { + printf("PASS (no stacktrace support)\n"); + return 0; +} +#endif // HAVE_STACKTRACE diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/stacktrace_x86-inl.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/stacktrace_x86-inl.h new file mode 100644 index 0000000000..cfd31f783e --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/stacktrace_x86-inl.h @@ -0,0 +1,139 @@ +// Copyright (c) 2000 - 2007, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Produce stack trace + +#include // for uintptr_t + +#include "utilities.h" // for OS_* macros + +#if !defined(OS_WINDOWS) +#include +#include +#endif + +#include // for NULL +#include "stacktrace.h" + +_START_GOOGLE_NAMESPACE_ + +// Given a pointer to a stack frame, locate and return the calling +// stackframe, or return NULL if no stackframe can be found. Perform sanity +// checks (the strictness of which is controlled by the boolean parameter +// "STRICT_UNWINDING") to reduce the chance that a bad pointer is returned. +template +static void **NextStackFrame(void **old_sp) { + void **new_sp = (void **) *old_sp; + + // Check that the transition from frame pointer old_sp to frame + // pointer new_sp isn't clearly bogus + if (STRICT_UNWINDING) { + // With the stack growing downwards, older stack frame must be + // at a greater address that the current one. + if (new_sp <= old_sp) return NULL; + // Assume stack frames larger than 100,000 bytes are bogus. + if ((uintptr_t)new_sp - (uintptr_t)old_sp > 100000) return NULL; + } else { + // In the non-strict mode, allow discontiguous stack frames. + // (alternate-signal-stacks for example). + if (new_sp == old_sp) return NULL; + // And allow frames upto about 1MB. + if ((new_sp > old_sp) + && ((uintptr_t)new_sp - (uintptr_t)old_sp > 1000000)) return NULL; + } + if ((uintptr_t)new_sp & (sizeof(void *) - 1)) return NULL; +#ifdef __i386__ + // On 64-bit machines, the stack pointer can be very close to + // 0xffffffff, so we explicitly check for a pointer into the + // last two pages in the address space + if ((uintptr_t)new_sp >= 0xffffe000) return NULL; +#endif +#if !defined(OS_WINDOWS) + if (!STRICT_UNWINDING) { + // Lax sanity checks cause a crash in 32-bit tcmalloc/crash_reason_test + // on AMD-based machines with VDSO-enabled kernels. + // Make an extra sanity check to insure new_sp is readable. + // Note: NextStackFrame() is only called while the program + // is already on its last leg, so it's ok to be slow here. + static int page_size = getpagesize(); + void *new_sp_aligned = (void *)((uintptr_t)new_sp & ~(page_size - 1)); + if (msync(new_sp_aligned, page_size, MS_ASYNC) == -1) + return NULL; + } +#endif + return new_sp; +} + +// If you change this function, also change GetStackFrames below. +int GetStackTrace(void** result, int max_depth, int skip_count) { + void **sp; +#ifdef __i386__ + // Stack frame format: + // sp[0] pointer to previous frame + // sp[1] caller address + // sp[2] first argument + // ... + sp = (void **)&result - 2; +#endif + +#ifdef __x86_64__ + // __builtin_frame_address(0) can return the wrong address on gcc-4.1.0-k8 + unsigned long rbp; + // Move the value of the register %rbp into the local variable rbp. + // We need 'volatile' to prevent this instruction from getting moved + // around during optimization to before function prologue is done. + // An alternative way to achieve this + // would be (before this __asm__ instruction) to call Noop() defined as + // static void Noop() __attribute__ ((noinline)); // prevent inlining + // static void Noop() { asm(""); } // prevent optimizing-away + __asm__ volatile ("mov %%rbp, %0" : "=r" (rbp)); + // Arguments are passed in registers on x86-64, so we can't just + // offset from &result + sp = (void **) rbp; +#endif + + int n = 0; + while (sp && n < max_depth) { + if (*(sp+1) == (void *)0) { + // In 64-bit code, we often see a frame that + // points to itself and has a return address of 0. + break; + } + if (skip_count > 0) { + skip_count--; + } else { + result[n++] = *(sp+1); + } + // Use strict unwinding rules. + sp = NextStackFrame(sp); + } + return n; +} + +_END_GOOGLE_NAMESPACE_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/stacktrace_x86_64-inl.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/stacktrace_x86_64-inl.h new file mode 100644 index 0000000000..f7d1dca85b --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/stacktrace_x86_64-inl.h @@ -0,0 +1,105 @@ +// Copyright (c) 2005 - 2007, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Author: Arun Sharma +// +// Produce stack trace using libgcc + +extern "C" { +#include // for NULL +#include // ABI defined unwinder +} +#include "stacktrace.h" + +_START_GOOGLE_NAMESPACE_ + +typedef struct { + void **result; + int max_depth; + int skip_count; + int count; +} trace_arg_t; + + +// Workaround for the malloc() in _Unwind_Backtrace() issue. +static _Unwind_Reason_Code nop_backtrace(struct _Unwind_Context *uc, void *opq) { + return _URC_NO_REASON; +} + + +// This code is not considered ready to run until +// static initializers run so that we are guaranteed +// that any malloc-related initialization is done. +static bool ready_to_run = false; +class StackTraceInit { + public: + StackTraceInit() { + // Extra call to force initialization + _Unwind_Backtrace(nop_backtrace, NULL); + ready_to_run = true; + } +}; + +static StackTraceInit module_initializer; // Force initialization + +static _Unwind_Reason_Code GetOneFrame(struct _Unwind_Context *uc, void *opq) { + trace_arg_t *targ = (trace_arg_t *) opq; + + if (targ->skip_count > 0) { + targ->skip_count--; + } else { + targ->result[targ->count++] = (void *) _Unwind_GetIP(uc); + } + + if (targ->count == targ->max_depth) + return _URC_END_OF_STACK; + + return _URC_NO_REASON; +} + +// If you change this function, also change GetStackFrames below. +int GetStackTrace(void** result, int max_depth, int skip_count) { + if (!ready_to_run) + return 0; + + trace_arg_t targ; + + skip_count += 1; // Do not include the "GetStackTrace" frame + + targ.result = result; + targ.max_depth = max_depth; + targ.skip_count = skip_count; + targ.count = 0; + + _Unwind_Backtrace(GetOneFrame, &targ); + + return targ.count; +} + +_END_GOOGLE_NAMESPACE_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/stl_logging_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/stl_logging_unittest.cc new file mode 100644 index 0000000000..0ed4695da4 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/stl_logging_unittest.cc @@ -0,0 +1,191 @@ +// Copyright (c) 2003, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#include "utilities.h" +#include "config.h" + +#ifdef HAVE_USING_OPERATOR + +#include "glog/stl_logging.h" + +#include +#include +#include +#include +#include + +#ifdef __GNUC__ +# include +# include +#endif + +#include "glog/logging.h" +#include "googletest.h" + +using namespace std; +#ifdef __GNUC__ +using namespace __gnu_cxx; +#endif + +struct user_hash { + size_t operator()(int x) const { return x; } +}; + +void TestSTLLogging() { + { + // Test a sequence. + vector v; + v.push_back(10); + v.push_back(20); + v.push_back(30); + char ss_buf[1000]; + ostrstream ss(ss_buf, sizeof(ss_buf)); + // Just ostrstream s1; leaks heap. + ss << v << ends; + CHECK_STREQ(ss.str(), "10 20 30"); + vector copied_v(v); + CHECK_EQ(v, copied_v); // This must compile. + } + + { + // Test a sorted pair associative container. + map< int, string > m; + m[20] = "twenty"; + m[10] = "ten"; + m[30] = "thirty"; + char ss_buf[1000]; + ostrstream ss(ss_buf, sizeof(ss_buf)); + ss << m << ends; + CHECK_STREQ(ss.str(), "(10, ten) (20, twenty) (30, thirty)"); + map< int, string > copied_m(m); + CHECK_EQ(m, copied_m); // This must compile. + } + +#ifdef __GNUC__ + { + // Test a hashed simple associative container. + hash_set hs; + hs.insert(10); + hs.insert(20); + hs.insert(30); + char ss_buf[1000]; + ostrstream ss(ss_buf, sizeof(ss_buf)); + ss << hs << ends; + CHECK_STREQ(ss.str(), "10 20 30"); + hash_set copied_hs(hs); + CHECK_EQ(hs, copied_hs); // This must compile. + } +#endif + +#ifdef __GNUC__ + { + // Test a hashed pair associative container. + hash_map hm; + hm[10] = "ten"; + hm[20] = "twenty"; + hm[30] = "thirty"; + char ss_buf[1000]; + ostrstream ss(ss_buf, sizeof(ss_buf)); + ss << hm << ends; + CHECK_STREQ(ss.str(), "(10, ten) (20, twenty) (30, thirty)"); + hash_map copied_hm(hm); + CHECK_EQ(hm, copied_hm); // this must compile + } +#endif + + { + // Test a long sequence. + vector v; + string expected; + for (int i = 0; i < 100; i++) { + v.push_back(i); + if (i > 0) expected += ' '; + char buf[256]; + sprintf(buf, "%d", i); + expected += buf; + } + v.push_back(100); + expected += " ..."; + char ss_buf[1000]; + ostrstream ss(ss_buf, sizeof(ss_buf)); + ss << v << ends; + CHECK_STREQ(ss.str(), expected.c_str()); + } + + { + // Test a sorted pair associative container. + // Use a non-default comparison functor. + map< int, string, greater > m; + m[20] = "twenty"; + m[10] = "ten"; + m[30] = "thirty"; + char ss_buf[1000]; + ostrstream ss(ss_buf, sizeof(ss_buf)); + ss << m << ends; + CHECK_STREQ(ss.str(), "(30, thirty) (20, twenty) (10, ten)"); + map< int, string, greater > copied_m(m); + CHECK_EQ(m, copied_m); // This must compile. + } + +#ifdef __GNUC__ + { + // Test a hashed simple associative container. + // Use a user defined hash function. + hash_set hs; + hs.insert(10); + hs.insert(20); + hs.insert(30); + char ss_buf[1000]; + ostrstream ss(ss_buf, sizeof(ss_buf)); + ss << hs << ends; + CHECK_STREQ(ss.str(), "10 20 30"); + hash_set copied_hs(hs); + CHECK_EQ(hs, copied_hs); // This must compile. + } +#endif +} + +int main(int argc, char** argv) { + TestSTLLogging(); + std::cout << "PASS\n"; + return 0; +} + +#else + +#include + +int main(int argc, char** argv) { + std::cout << "We don't support stl_logging for this compiler.\n" + << "(we need compiler support of 'using ::operator<<' " + << "for this feature.)\n"; + return 0; +} + +#endif // HAVE_USING_OPERATOR diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/symbolize.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/symbolize.cc new file mode 100644 index 0000000000..3465de6c6f --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/symbolize.cc @@ -0,0 +1,681 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Author: Satoru Takabayashi +// Stack-footprint reduction work done by Raksit Ashok +// +// Implementation note: +// +// We don't use heaps but only use stacks. We want to reduce the +// stack consumption so that the symbolizer can run on small stacks. +// +// Here are some numbers collected with GCC 4.1.0 on x86: +// - sizeof(Elf32_Sym) = 16 +// - sizeof(Elf32_Shdr) = 40 +// - sizeof(Elf64_Sym) = 24 +// - sizeof(Elf64_Shdr) = 64 +// +// This implementation is intended to be async-signal-safe but uses +// some functions which are not guaranteed to be so, such as memchr() +// and memmove(). We assume they are async-signal-safe. +// + +#include "utilities.h" + +#if defined(HAVE_SYMBOLIZE) + +#include + +#include "symbolize.h" +#include "demangle.h" + +_START_GOOGLE_NAMESPACE_ + +// We don't use assert() since it's not guaranteed to be +// async-signal-safe. Instead we define a minimal assertion +// macro. So far, we don't need pretty printing for __FILE__, etc. + +// A wrapper for abort() to make it callable in ? :. +static int AssertFail() { + abort(); + return 0; // Should not reach. +} + +#define SAFE_ASSERT(expr) ((expr) ? 0 : AssertFail()) + +static SymbolizeCallback g_symbolize_callback = NULL; +void InstallSymbolizeCallback(SymbolizeCallback callback) { + g_symbolize_callback = callback; +} + +// This function wraps the Demangle function to provide an interface +// where the input symbol is demangled in-place. +// To keep stack consumption low, we would like this function to not +// get inlined. +static ATTRIBUTE_NOINLINE void DemangleInplace(char *out, int out_size) { + char demangled[256]; // Big enough for sane demangled symbols. + if (Demangle(out, demangled, sizeof(demangled))) { + // Demangling succeeded. Copy to out if the space allows. + int len = strlen(demangled); + if (len + 1 <= out_size) { // +1 for '\0'. + SAFE_ASSERT(len < sizeof(demangled)); + memmove(out, demangled, len + 1); + } + } +} + +_END_GOOGLE_NAMESPACE_ + +#if defined(__ELF__) + +#include +#include +#include +#include +#include +#include // For ElfW() macro. +#include +#include +#include +#include +#include +#include +#include +#include + +#include "symbolize.h" +#include "config.h" +#include "glog/raw_logging.h" + +// Re-runs fn until it doesn't cause EINTR. +#define NO_INTR(fn) do {} while ((fn) < 0 && errno == EINTR) + +_START_GOOGLE_NAMESPACE_ + +// Read up to "count" bytes from file descriptor "fd" into the buffer +// starting at "buf" while handling short reads and EINTR. On +// success, return the number of bytes read. Otherwise, return -1. +static ssize_t ReadPersistent(const int fd, void *buf, const size_t count) { + SAFE_ASSERT(fd >= 0); + SAFE_ASSERT(count >= 0 && count <= std::numeric_limits::max()); + char *buf0 = reinterpret_cast(buf); + ssize_t num_bytes = 0; + while (num_bytes < count) { + ssize_t len; + NO_INTR(len = read(fd, buf0 + num_bytes, count - num_bytes)); + if (len < 0) { // There was an error other than EINTR. + return -1; + } + if (len == 0) { // Reached EOF. + break; + } + num_bytes += len; + } + SAFE_ASSERT(num_bytes <= count); + return num_bytes; +} + +// Read up to "count" bytes from "offset" in the file pointed by file +// descriptor "fd" into the buffer starting at "buf". On success, +// return the number of bytes read. Otherwise, return -1. +static ssize_t ReadFromOffset(const int fd, void *buf, + const size_t count, const off_t offset) { + off_t off = lseek(fd, offset, SEEK_SET); + if (off == (off_t)-1) { + return -1; + } + return ReadPersistent(fd, buf, count); +} + +// Try reading exactly "count" bytes from "offset" bytes in a file +// pointed by "fd" into the buffer starting at "buf" while handling +// short reads and EINTR. On success, return true. Otherwise, return +// false. +static bool ReadFromOffsetExact(const int fd, void *buf, + const size_t count, const off_t offset) { + ssize_t len = ReadFromOffset(fd, buf, count, offset); + return len == count; +} + +// Returns elf_header.e_type if the file pointed by fd is an ELF binary. +static int FileGetElfType(const int fd) { + ElfW(Ehdr) elf_header; + if (!ReadFromOffsetExact(fd, &elf_header, sizeof(elf_header), 0)) { + return -1; + } + if (memcmp(elf_header.e_ident, ELFMAG, SELFMAG) != 0) { + return -1; + } + return elf_header.e_type; +} + +// Read the section headers in the given ELF binary, and if a section +// of the specified type is found, set the output to this section header +// and return true. Otherwise, return false. +// To keep stack consumption low, we would like this function to not get +// inlined. +static ATTRIBUTE_NOINLINE bool +GetSectionHeaderByType(const int fd, ElfW(Half) sh_num, const off_t sh_offset, + ElfW(Word) type, ElfW(Shdr) *out) { + // Read at most 16 section headers at a time to save read calls. + ElfW(Shdr) buf[16]; + for (int i = 0; i < sh_num;) { + const ssize_t num_bytes_left = (sh_num - i) * sizeof(buf[0]); + const ssize_t num_bytes_to_read = + (sizeof(buf) > num_bytes_left) ? num_bytes_left : sizeof(buf); + const ssize_t len = ReadFromOffset(fd, buf, num_bytes_to_read, + sh_offset + i * sizeof(buf[0])); + SAFE_ASSERT(len % sizeof(buf[0]) == 0); + const ssize_t num_headers_in_buf = len / sizeof(buf[0]); + SAFE_ASSERT(num_headers_in_buf <= sizeof(buf) / sizeof(buf[0])); + for (int j = 0; j < num_headers_in_buf; ++j) { + if (buf[j].sh_type == type) { + *out = buf[j]; + return true; + } + } + i += num_headers_in_buf; + } + return false; +} + +// There is no particular reason to limit section name to 63 characters, +// but there has (as yet) been no need for anything longer either. +const int kMaxSectionNameLen = 64; + +// name_len should include terminating '\0'. +bool GetSectionHeaderByName(int fd, const char *name, size_t name_len, + ElfW(Shdr) *out) { + ElfW(Ehdr) elf_header; + if (!ReadFromOffsetExact(fd, &elf_header, sizeof(elf_header), 0)) { + return false; + } + + ElfW(Shdr) shstrtab; + off_t shstrtab_offset = (elf_header.e_shoff + + elf_header.e_shentsize * elf_header.e_shstrndx); + if (!ReadFromOffsetExact(fd, &shstrtab, sizeof(shstrtab), shstrtab_offset)) { + return false; + } + + for (int i = 0; i < elf_header.e_shnum; ++i) { + off_t section_header_offset = (elf_header.e_shoff + + elf_header.e_shentsize * i); + if (!ReadFromOffsetExact(fd, out, sizeof(*out), section_header_offset)) { + return false; + } + char header_name[kMaxSectionNameLen]; + if (sizeof(header_name) < name_len) { + RAW_LOG(WARNING, "Section name '%s' is too long (%"PRIuS"); " + "section will not be found (even if present).", name, name_len); + // No point in even trying. + return false; + } + off_t name_offset = shstrtab.sh_offset + out->sh_name; + ssize_t n_read = ReadFromOffset(fd, &header_name, name_len, name_offset); + if (n_read == -1) { + return false; + } else if (n_read != name_len) { + // Short read -- name could be at end of file. + continue; + } + if (memcmp(header_name, name, name_len) == 0) { + return true; + } + } + return false; +} + +// Read a symbol table and look for the symbol containing the +// pc. Iterate over symbols in a symbol table and look for the symbol +// containing "pc". On success, return true and write the symbol name +// to out. Otherwise, return false. +// To keep stack consumption low, we would like this function to not get +// inlined. +static ATTRIBUTE_NOINLINE bool +FindSymbol(uint64_t pc, const int fd, char *out, int out_size, + uint64_t symbol_offset, const ElfW(Shdr) *strtab, + const ElfW(Shdr) *symtab) { + if (symtab == NULL) { + return false; + } + const int num_symbols = symtab->sh_size / symtab->sh_entsize; + for (int i = 0; i < num_symbols;) { + off_t offset = symtab->sh_offset + i * symtab->sh_entsize; + + // If we are reading Elf64_Sym's, we want to limit this array to + // 32 elements (to keep stack consumption low), otherwise we can + // have a 64 element Elf32_Sym array. +#if __WORDSIZE == 64 +#define NUM_SYMBOLS 32 +#else +#define NUM_SYMBOLS 64 +#endif + + // Read at most NUM_SYMBOLS symbols at once to save read() calls. + ElfW(Sym) buf[NUM_SYMBOLS]; + const ssize_t len = ReadFromOffset(fd, &buf, sizeof(buf), offset); + SAFE_ASSERT(len % sizeof(buf[0]) == 0); + const ssize_t num_symbols_in_buf = len / sizeof(buf[0]); + SAFE_ASSERT(num_symbols_in_buf <= sizeof(buf)/sizeof(buf[0])); + for (int j = 0; j < num_symbols_in_buf; ++j) { + const ElfW(Sym)& symbol = buf[j]; + uint64_t start_address = symbol.st_value; + start_address += symbol_offset; + uint64_t end_address = start_address + symbol.st_size; + if (symbol.st_value != 0 && // Skip null value symbols. + symbol.st_shndx != 0 && // Skip undefined symbols. + start_address <= pc && pc < end_address) { + ssize_t len1 = ReadFromOffset(fd, out, out_size, + strtab->sh_offset + symbol.st_name); + if (len1 <= 0 || memchr(out, '\0', out_size) == NULL) { + return false; + } + return true; // Obtained the symbol name. + } + } + i += num_symbols_in_buf; + } + return false; +} + +// Get the symbol name of "pc" from the file pointed by "fd". Process +// both regular and dynamic symbol tables if necessary. On success, +// write the symbol name to "out" and return true. Otherwise, return +// false. +static bool GetSymbolFromObjectFile(const int fd, uint64_t pc, + char *out, int out_size, + uint64_t map_start_address) { + // Read the ELF header. + ElfW(Ehdr) elf_header; + if (!ReadFromOffsetExact(fd, &elf_header, sizeof(elf_header), 0)) { + return false; + } + + uint64_t symbol_offset = 0; + if (elf_header.e_type == ET_DYN) { // DSO needs offset adjustment. + symbol_offset = map_start_address; + } + + ElfW(Shdr) symtab, strtab; + + // Consult a regular symbol table first. + if (!GetSectionHeaderByType(fd, elf_header.e_shnum, elf_header.e_shoff, + SHT_SYMTAB, &symtab)) { + return false; + } + if (!ReadFromOffsetExact(fd, &strtab, sizeof(strtab), elf_header.e_shoff + + symtab.sh_link * sizeof(symtab))) { + return false; + } + if (FindSymbol(pc, fd, out, out_size, symbol_offset, + &strtab, &symtab)) { + return true; // Found the symbol in a regular symbol table. + } + + // If the symbol is not found, then consult a dynamic symbol table. + if (!GetSectionHeaderByType(fd, elf_header.e_shnum, elf_header.e_shoff, + SHT_DYNSYM, &symtab)) { + return false; + } + if (!ReadFromOffsetExact(fd, &strtab, sizeof(strtab), elf_header.e_shoff + + symtab.sh_link * sizeof(symtab))) { + return false; + } + if (FindSymbol(pc, fd, out, out_size, symbol_offset, + &strtab, &symtab)) { + return true; // Found the symbol in a dynamic symbol table. + } + + return false; +} + +namespace { +// Thin wrapper around a file descriptor so that the file descriptor +// gets closed for sure. +struct FileDescriptor { + const int fd_; + explicit FileDescriptor(int fd) : fd_(fd) {} + ~FileDescriptor() { + if (fd_ >= 0) { + NO_INTR(close(fd_)); + } + } + int get() { return fd_; } + + private: + explicit FileDescriptor(const FileDescriptor&); + void operator=(const FileDescriptor&); +}; + +// Helper class for reading lines from file. +// +// Note: we don't use ProcMapsIterator since the object is big (it has +// a 5k array member) and uses async-unsafe functions such as sscanf() +// and snprintf(). +class LineReader { + public: + explicit LineReader(int fd, char *buf, int buf_len) : fd_(fd), + buf_(buf), buf_len_(buf_len), bol_(buf), eol_(buf), eod_(buf) { + } + + // Read '\n'-terminated line from file. On success, modify "bol" + // and "eol", then return true. Otherwise, return false. + // + // Note: if the last line doesn't end with '\n', the line will be + // dropped. It's an intentional behavior to make the code simple. + bool ReadLine(const char **bol, const char **eol) { + if (BufferIsEmpty()) { // First time. + const ssize_t num_bytes = ReadPersistent(fd_, buf_, buf_len_); + if (num_bytes <= 0) { // EOF or error. + return false; + } + eod_ = buf_ + num_bytes; + bol_ = buf_; + } else { + bol_ = eol_ + 1; // Advance to the next line in the buffer. + SAFE_ASSERT(bol_ <= eod_); // "bol_" can point to "eod_". + if (!HasCompleteLine()) { + const int incomplete_line_length = eod_ - bol_; + // Move the trailing incomplete line to the beginning. + memmove(buf_, bol_, incomplete_line_length); + // Read text from file and append it. + char * const append_pos = buf_ + incomplete_line_length; + const int capacity_left = buf_len_ - incomplete_line_length; + const ssize_t num_bytes = ReadPersistent(fd_, append_pos, + capacity_left); + if (num_bytes <= 0) { // EOF or error. + return false; + } + eod_ = append_pos + num_bytes; + bol_ = buf_; + } + } + eol_ = FindLineFeed(); + if (eol_ == NULL) { // '\n' not found. Malformed line. + return false; + } + *eol_ = '\0'; // Replace '\n' with '\0'. + + *bol = bol_; + *eol = eol_; + return true; + } + + // Beginning of line. + const char *bol() { + return bol_; + } + + // End of line. + const char *eol() { + return eol_; + } + + private: + explicit LineReader(const LineReader&); + void operator=(const LineReader&); + + char *FindLineFeed() { + return reinterpret_cast(memchr(bol_, '\n', eod_ - bol_)); + } + + bool BufferIsEmpty() { + return buf_ == eod_; + } + + bool HasCompleteLine() { + return !BufferIsEmpty() && FindLineFeed() != NULL; + } + + const int fd_; + char * const buf_; + const int buf_len_; + char *bol_; + char *eol_; + const char *eod_; // End of data in "buf_". +}; +} // namespace + +// Place the hex number read from "start" into "*hex". The pointer to +// the first non-hex character or "end" is returned. +static char *GetHex(const char *start, const char *end, uint64_t *hex) { + *hex = 0; + const char *p; + for (p = start; p < end; ++p) { + int ch = *p; + if ((ch >= '0' && ch <= '9') || + (ch >= 'A' && ch <= 'F') || (ch >= 'a' && ch <= 'f')) { + *hex = (*hex << 4) | (ch < 'A' ? ch - '0' : (ch & 0xF) + 9); + } else { // Encountered the first non-hex character. + break; + } + } + SAFE_ASSERT(p <= end); + return const_cast(p); +} + +// Search for the object file (from /proc/self/maps) that contains +// the specified pc. If found, open this file and return the file handle, +// and also set start_address to the start address of where this object +// file is mapped to in memory. Otherwise, return -1. +static ATTRIBUTE_NOINLINE int +OpenObjectFileContainingPcAndGetStartAddress(uint64_t pc, + uint64_t &start_address) { + int object_fd; + + // Open /proc/self/maps. + int maps_fd; + NO_INTR(maps_fd = open("/proc/self/maps", O_RDONLY)); + FileDescriptor wrapped_maps_fd(maps_fd); + if (wrapped_maps_fd.get() < 0) { + return -1; + } + + // Iterate over maps and look for the map containing the pc. Then + // look into the symbol tables inside. + char buf[1024]; // Big enough for line of sane /proc/self/maps + LineReader reader(wrapped_maps_fd.get(), buf, sizeof(buf)); + while (true) { + const char *cursor; + const char *eol; + if (!reader.ReadLine(&cursor, &eol)) { // EOF or malformed line. + return -1; + } + + // Start parsing line in /proc/self/maps. Here is an example: + // + // 08048000-0804c000 r-xp 00000000 08:01 2142121 /bin/cat + // + // We want start address (08048000), end address (0804c000), flags + // (r-xp) and file name (/bin/cat). + + // Read start address. + cursor = GetHex(cursor, eol, &start_address); + if (cursor == eol || *cursor != '-') { + return -1; // Malformed line. + } + ++cursor; // Skip '-'. + + // Read end address. + uint64_t end_address; + cursor = GetHex(cursor, eol, &end_address); + if (cursor == eol || *cursor != ' ') { + return -1; // Malformed line. + } + ++cursor; // Skip ' '. + + // Check start and end addresses. + if (!(start_address <= pc && pc < end_address)) { + continue; // We skip this map. PC isn't in this map. + } + + // Read flags. Skip flags until we encounter a space or eol. + const char * const flags_start = cursor; + while (cursor < eol && *cursor != ' ') { + ++cursor; + } + // We expect at least four letters for flags (ex. "r-xp"). + if (cursor == eol || cursor < flags_start + 4) { + return -1; // Malformed line. + } + + // Check flags. We are only interested in "r-x" maps. + if (memcmp(flags_start, "r-x", 3) != 0) { // Not a "r-x" map. + continue; // We skip this map. + } + ++cursor; // Skip ' '. + + // Skip to file name. "cursor" now points to file offset. We need to + // skip at least three spaces for file offset, dev, and inode. + int num_spaces = 0; + while (cursor < eol) { + if (*cursor == ' ') { + ++num_spaces; + } else if (num_spaces >= 3) { + // The first non-space character after skipping three spaces + // is the beginning of the file name. + break; + } + ++cursor; + } + if (cursor == eol) { + return -1; // Malformed line. + } + + // Finally, "cursor" now points to file name of our interest. + NO_INTR(object_fd = open(cursor, O_RDONLY)); + if (object_fd < 0) { + return -1; + } + return object_fd; + } +} + +// The implementation of our symbolization routine. If it +// successfully finds the symbol containing "pc" and obtains the +// symbol name, returns true and write the symbol name to "out". +// Otherwise, returns false. If Callback function is installed via +// InstallSymbolizeCallback(), the function is also called in this function, +// and "out" is used as its output. +// To keep stack consumption low, we would like this function to not +// get inlined. +static ATTRIBUTE_NOINLINE bool SymbolizeAndDemangle(void *pc, char *out, + int out_size) { + uint64_t pc0 = reinterpret_cast(pc); + uint64_t start_address = 0; + + int object_fd = OpenObjectFileContainingPcAndGetStartAddress(pc0, + start_address); + if (object_fd == -1) { + return false; + } + FileDescriptor wrapped_object_fd(object_fd); + int elf_type = FileGetElfType(wrapped_object_fd.get()); + if (elf_type == -1) { + return false; + } + if (g_symbolize_callback) { + // Run the call back if it's installed. + // Note: relocation (and much of the rest of this code) will be + // wrong for prelinked shared libraries and PIE executables. + uint64 relocation = (elf_type == ET_DYN) ? start_address : 0; + int num_bytes_written = g_symbolize_callback(wrapped_object_fd.get(), + pc, out, out_size, + relocation); + if (num_bytes_written > 0) { + out += num_bytes_written; + out_size -= num_bytes_written; + } + } + if (!GetSymbolFromObjectFile(wrapped_object_fd.get(), pc0, + out, out_size, start_address)) { + return false; + } + + // Symbolization succeeded. Now we try to demangle the symbol. + DemangleInplace(out, out_size); + return true; +} + +_END_GOOGLE_NAMESPACE_ + +#elif defined(OS_MACOSX) && defined(HAVE_DLADDR) + +#include +#include + +_START_GOOGLE_NAMESPACE_ + +static ATTRIBUTE_NOINLINE bool SymbolizeAndDemangle(void *pc, char *out, + int out_size) { + Dl_info info; + if (dladdr(pc, &info)) { + if (strlen(info.dli_sname) < out_size) { + strcpy(out, info.dli_sname); + // Symbolization succeeded. Now we try to demangle the symbol. + DemangleInplace(out, out_size); + return true; + } + } + return false; +} + +_END_GOOGLE_NAMESPACE_ + +#else +# error BUG: HAVE_SYMBOLIZE was wrongly set +#endif + +_START_GOOGLE_NAMESPACE_ + +bool Symbolize(void *pc, char *out, int out_size) { + SAFE_ASSERT(out_size >= 0); + return SymbolizeAndDemangle(pc, out, out_size); +} + +_END_GOOGLE_NAMESPACE_ + +#else /* HAVE_SYMBOLIZE */ + +#include + +#include "config.h" + +_START_GOOGLE_NAMESPACE_ + +// TODO: Support other environments. +bool Symbolize(void *pc, char *out, int out_size) { + assert(0); + return false; +} + +_END_GOOGLE_NAMESPACE_ + +#endif diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/symbolize.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/symbolize.h new file mode 100644 index 0000000000..1ebe4dd94a --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/symbolize.h @@ -0,0 +1,116 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Author: Satoru Takabayashi +// +// This library provides Symbolize() function that symbolizes program +// counters to their corresponding symbol names on linux platforms. +// This library has a minimal implementation of an ELF symbol table +// reader (i.e. it doesn't depend on libelf, etc.). +// +// The algorithm used in Symbolize() is as follows. +// +// 1. Go through a list of maps in /proc/self/maps and find the map +// containing the program counter. +// +// 2. Open the mapped file and find a regular symbol table inside. +// Iterate over symbols in the symbol table and look for the symbol +// containing the program counter. If such a symbol is found, +// obtain the symbol name, and demangle the symbol if possible. +// If the symbol isn't found in the regular symbol table (binary is +// stripped), try the same thing with a dynamic symbol table. +// +// Note that Symbolize() is originally implemented to be used in +// FailureSignalHandler() in base/google.cc. Hence it doesn't use +// malloc() and other unsafe operations. It should be both +// thread-safe and async-signal-safe. + +#ifndef BASE_SYMBOLIZE_H_ +#define BASE_SYMBOLIZE_H_ + +#include "utilities.h" +#include "config.h" +#include "glog/logging.h" + +#ifdef HAVE_SYMBOLIZE + +#if defined(__ELF__) // defined by gcc on Linux +#include +#include // For ElfW() macro. + +// If there is no ElfW macro, let's define it by ourself. +#ifndef ElfW +# if SIZEOF_VOID_P == 4 +# define ElfW(type) Elf32_##type +# elif SIZEOF_VOID_P == 8 +# define ElfW(type) Elf64_##type +# else +# error "Unknown sizeof(void *)" +# endif +#endif + +_START_GOOGLE_NAMESPACE_ + +// Gets the section header for the given name, if it exists. Returns true on +// success. Otherwise, returns false. +bool GetSectionHeaderByName(int fd, const char *name, size_t name_len, + ElfW(Shdr) *out); + +_END_GOOGLE_NAMESPACE_ + +#endif /* __ELF__ */ + +_START_GOOGLE_NAMESPACE_ + +// Installs a callback function, which will be called right before a symbol name +// is printed. The callback is intended to be used for showing a file name and a +// line number preceding a symbol name. +// "fd" is a file descriptor of the object file containing the program +// counter "pc". The callback function should write output to "out" +// and return the size of the output written. On error, the callback +// function should return -1. +typedef int (*SymbolizeCallback)(int fd, void *pc, char *out, size_t out_size, + uint64 relocation); +void InstallSymbolizeCallback(SymbolizeCallback callback); + +_END_GOOGLE_NAMESPACE_ + +#endif + +_START_GOOGLE_NAMESPACE_ + +// Symbolizes a program counter. On success, returns true and write the +// symbol name to "out". The symbol name is demangled if possible +// (supports symbols generated by GCC 3.x or newer). Otherwise, +// returns false. +bool Symbolize(void *pc, char *out, int out_size); + +_END_GOOGLE_NAMESPACE_ + +#endif // BASE_SYMBOLIZE_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/symbolize_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/symbolize_unittest.cc new file mode 100644 index 0000000000..f25909d124 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/symbolize_unittest.cc @@ -0,0 +1,365 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Author: Satoru Takabayashi +// +// Unit tests for functions in symbolize.cc. + +#include "utilities.h" + +#include +#include + +#include "glog/logging.h" +#include "symbolize.h" +#include "googletest.h" +#include "config.h" + +using namespace std; +using namespace GOOGLE_NAMESPACE; + +#if defined(HAVE_STACKTRACE) && defined(__ELF__) + +#define always_inline + +// This unit tests make sense only with GCC. +// Uses lots of GCC specific features. +#if defined(__GNUC__) && !defined(__OPENCC__) +# if __GNUC__ >= 4 +# define TEST_WITH_MODERN_GCC +# if __i386__ // always_inline isn't supported for x86_64 with GCC 4.1.0. +# undef always_inline +# define always_inline __attribute__((always_inline)) +# define HAVE_ALWAYS_INLINE +# endif // __i386__ +# else +# endif // __GNUC__ >= 4 +# if defined(__i386__) || defined(__x86_64__) +# define TEST_X86_32_AND_64 1 +# endif // defined(__i386__) || defined(__x86_64__) +#endif + +// A wrapper function for Symbolize() to make the unit test simple. +static const char *TrySymbolize(void *pc) { + static char symbol[4096]; + if (Symbolize(pc, symbol, sizeof(symbol))) { + return symbol; + } else { + return NULL; + } +} + +// Make them C linkage to avoid mangled names. +extern "C" { +void nonstatic_func() { + volatile int a = 0; + ++a; +} + +static void static_func() { + volatile int a = 0; + ++a; +} +} + +TEST(Symbolize, Symbolize) { + // We do C-style cast since GCC 2.95.3 doesn't allow + // reinterpret_cast(&func). + + // Compilers should give us pointers to them. + EXPECT_STREQ("nonstatic_func", TrySymbolize((void *)(&nonstatic_func))); + EXPECT_STREQ("static_func", TrySymbolize((void *)(&static_func))); + + EXPECT_TRUE(NULL == TrySymbolize(NULL)); +} + +struct Foo { + static void func(int x); +}; + +void ATTRIBUTE_NOINLINE Foo::func(int x) { + volatile int a = x; + ++a; +} + +// With a modern GCC, Symbolize() should return demangled symbol +// names. Function parameters should be omitted. +#ifdef TEST_WITH_MODERN_GCC +TEST(Symbolize, SymbolizeWithDemangling) { + Foo::func(100); + EXPECT_STREQ("Foo::func()", TrySymbolize((void *)(&Foo::func))); +} +#endif + +// Tests that verify that Symbolize footprint is within some limit. + +// To measure the stack footprint of the Symbolize function, we create +// a signal handler (for SIGUSR1 say) that calls the Symbolize function +// on an alternate stack. This alternate stack is initialized to some +// known pattern (0x55, 0x55, 0x55, ...). We then self-send this signal, +// and after the signal handler returns, look at the alternate stack +// buffer to see what portion has been touched. +// +// This trick gives us the the stack footprint of the signal handler. +// But the signal handler, even before the call to Symbolize, consumes +// some stack already. We however only want the stack usage of the +// Symbolize function. To measure this accurately, we install two signal +// handlers: one that does nothing and just returns, and another that +// calls Symbolize. The difference between the stack consumption of these +// two signals handlers should give us the Symbolize stack foorprint. + +static void *g_pc_to_symbolize; +static char g_symbolize_buffer[4096]; +static char *g_symbolize_result; + +static void EmptySignalHandler(int signo) {} + +static void SymbolizeSignalHandler(int signo) { + if (Symbolize(g_pc_to_symbolize, g_symbolize_buffer, + sizeof(g_symbolize_buffer))) { + g_symbolize_result = g_symbolize_buffer; + } else { + g_symbolize_result = NULL; + } +} + +const int kAlternateStackSize = 8096; +const char kAlternateStackFillValue = 0x55; + +// These helper functions look at the alternate stack buffer, and figure +// out what portion of this buffer has been touched - this is the stack +// consumption of the signal handler running on this alternate stack. +static ATTRIBUTE_NOINLINE bool StackGrowsDown(int *x) { + int y; + return &y < x; +} +static int GetStackConsumption(const char* alt_stack) { + int x; + if (StackGrowsDown(&x)) { + for (int i = 0; i < kAlternateStackSize; i++) { + if (alt_stack[i] != kAlternateStackFillValue) { + return (kAlternateStackSize - i); + } + } + } else { + for (int i = (kAlternateStackSize - 1); i >= 0; i--) { + if (alt_stack[i] != kAlternateStackFillValue) { + return i; + } + } + } + return -1; +} + +#ifdef HAVE_SIGALTSTACK + +// Call Symbolize and figure out the stack footprint of this call. +static const char *SymbolizeStackConsumption(void *pc, int *stack_consumed) { + + g_pc_to_symbolize = pc; + + // The alt-signal-stack cannot be heap allocated because there is a + // bug in glibc-2.2 where some signal handler setup code looks at the + // current stack pointer to figure out what thread is currently running. + // Therefore, the alternate stack must be allocated from the main stack + // itself. + char altstack[kAlternateStackSize]; + memset(altstack, kAlternateStackFillValue, kAlternateStackSize); + + // Set up the alt-signal-stack (and save the older one). + stack_t sigstk; + memset(&sigstk, 0, sizeof(stack_t)); + stack_t old_sigstk; + sigstk.ss_sp = altstack; + sigstk.ss_size = kAlternateStackSize; + sigstk.ss_flags = 0; + CHECK_ERR(sigaltstack(&sigstk, &old_sigstk)); + + // Set up SIGUSR1 and SIGUSR2 signal handlers (and save the older ones). + struct sigaction sa; + memset(&sa, 0, sizeof(struct sigaction)); + struct sigaction old_sa1, old_sa2; + sigemptyset(&sa.sa_mask); + sa.sa_flags = SA_ONSTACK; + + // SIGUSR1 maps to EmptySignalHandler. + sa.sa_handler = EmptySignalHandler; + CHECK_ERR(sigaction(SIGUSR1, &sa, &old_sa1)); + + // SIGUSR2 maps to SymbolizeSignalHanlder. + sa.sa_handler = SymbolizeSignalHandler; + CHECK_ERR(sigaction(SIGUSR2, &sa, &old_sa2)); + + // Send SIGUSR1 signal and measure the stack consumption of the empty + // signal handler. + CHECK_ERR(kill(getpid(), SIGUSR1)); + int stack_consumption1 = GetStackConsumption(altstack); + + // Send SIGUSR2 signal and measure the stack consumption of the symbolize + // signal handler. + CHECK_ERR(kill(getpid(), SIGUSR2)); + int stack_consumption2 = GetStackConsumption(altstack); + + // The difference between the two stack consumption values is the + // stack footprint of the Symbolize function. + if (stack_consumption1 != -1 && stack_consumption2 != -1) { + *stack_consumed = stack_consumption2 - stack_consumption1; + } else { + *stack_consumed = -1; + } + + // Log the stack consumption values. + LOG(INFO) << "Stack consumption of empty signal handler: " + << stack_consumption1; + LOG(INFO) << "Stack consumption of symbolize signal handler: " + << stack_consumption2; + LOG(INFO) << "Stack consumption of Symbolize: " << *stack_consumed; + + // Now restore the old alt-signal-stack and signal handlers. + CHECK_ERR(sigaltstack(&old_sigstk, NULL)); + CHECK_ERR(sigaction(SIGUSR1, &old_sa1, NULL)); + CHECK_ERR(sigaction(SIGUSR2, &old_sa2, NULL)); + + return g_symbolize_result; +} + +// Symbolize stack consumption should be within 2kB. +const int kStackConsumptionUpperLimit = 2048; + +TEST(Symbolize, SymbolizeStackConsumption) { + int stack_consumed; + const char* symbol; + + symbol = SymbolizeStackConsumption((void *)(&nonstatic_func), + &stack_consumed); + EXPECT_STREQ("nonstatic_func", symbol); + EXPECT_GT(stack_consumed, 0); + EXPECT_LT(stack_consumed, kStackConsumptionUpperLimit); + + symbol = SymbolizeStackConsumption((void *)(&static_func), + &stack_consumed); + EXPECT_STREQ("static_func", symbol); + EXPECT_GT(stack_consumed, 0); + EXPECT_LT(stack_consumed, kStackConsumptionUpperLimit); +} + +#ifdef TEST_WITH_MODERN_GCC +TEST(Symbolize, SymbolizeWithDemanglingStackConsumption) { + Foo::func(100); + int stack_consumed; + const char* symbol; + + symbol = SymbolizeStackConsumption((void *)(&Foo::func), &stack_consumed); + + EXPECT_STREQ("Foo::func()", symbol); + EXPECT_GT(stack_consumed, 0); + EXPECT_LT(stack_consumed, kStackConsumptionUpperLimit); +} +#endif + +#endif // HAVE_SIGALTSTACK + +// x86 specific tests. Uses some inline assembler. +extern "C" { +inline void* always_inline inline_func() { + register void *pc = NULL; +#ifdef TEST_X86_32_AND_64 + __asm__ __volatile__("call 1f; 1: pop %0" : "=r"(pc)); +#endif + return pc; +} + +void* ATTRIBUTE_NOINLINE non_inline_func() { + register void *pc = NULL; +#ifdef TEST_X86_32_AND_64 + __asm__ __volatile__("call 1f; 1: pop %0" : "=r"(pc)); +#endif + return pc; +} + +void ATTRIBUTE_NOINLINE TestWithPCInsideNonInlineFunction() { +#if defined(TEST_X86_32_AND_64) && defined(HAVE_ATTRIBUTE_NOINLINE) + void *pc = non_inline_func(); + const char *symbol = TrySymbolize(pc); + CHECK(symbol != NULL); + CHECK_STREQ(symbol, "non_inline_func"); + cout << "Test case TestWithPCInsideNonInlineFunction passed." << endl; +#endif +} + +void ATTRIBUTE_NOINLINE TestWithPCInsideInlineFunction() { +#if defined(TEST_X86_32_AND_64) && defined(HAVE_ALWAYS_INLINE) + void *pc = inline_func(); // Must be inlined. + const char *symbol = TrySymbolize(pc); + CHECK(symbol != NULL); + CHECK_STREQ(symbol, __FUNCTION__); + cout << "Test case TestWithPCInsideInlineFunction passed." << endl; +#endif +} +} + +// Test with a return address. +void ATTRIBUTE_NOINLINE TestWithReturnAddress() { +#if defined(HAVE_ATTRIBUTE_NOINLINE) + void *return_address = __builtin_return_address(0); + const char *symbol = TrySymbolize(return_address); + CHECK(symbol != NULL); + CHECK_STREQ(symbol, "main"); + cout << "Test case TestWithReturnAddress passed." << endl; +#endif +} + +int main(int argc, char **argv) { + FLAGS_logtostderr = true; + InitGoogleLogging(argv[0]); + InitGoogleTest(&argc, argv); +#ifdef HAVE_SYMBOLIZE + // We don't want to get affected by the callback interface, that may be + // used to install some callback function at InitGoogle() time. + InstallSymbolizeCallback(NULL); + + TestWithPCInsideInlineFunction(); + TestWithPCInsideNonInlineFunction(); + TestWithReturnAddress(); + return RUN_ALL_TESTS(); +#else + return 0; +#endif +} + +#else +int main() { +#ifdef HAVE_SYMBOLIZE + printf("PASS (no symbolize_unittest support)\n"); +#else + printf("PASS (no symbolize support)\n"); +#endif + return 0; +} +#endif // HAVE_STACKTRACE diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/utilities.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/utilities.cc new file mode 100644 index 0000000000..52b802a27f --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/utilities.cc @@ -0,0 +1,346 @@ +// Copyright (c) 2008, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Author: Shinichiro Hamaji + +#include "utilities.h" + +#include +#include + +#include +#ifdef HAVE_SYS_TIME_H +# include +#endif +#include +#if defined(HAVE_SYSCALL_H) +#include // for syscall() +#elif defined(HAVE_SYS_SYSCALL_H) +#include // for syscall() +#endif +#ifdef HAVE_SYSLOG_H +# include +#endif + +#include "base/googleinit.h" + +using std::string; + +_START_GOOGLE_NAMESPACE_ + +static const char* g_program_invocation_short_name = NULL; +static pthread_t g_main_thread_id; + +_END_GOOGLE_NAMESPACE_ + +// The following APIs are all internal. +#ifdef HAVE_STACKTRACE + +#include "stacktrace.h" +#include "symbolize.h" +#include "base/commandlineflags.h" + +GLOG_DEFINE_bool(symbolize_stacktrace, true, + "Symbolize the stack trace in the tombstone"); + +_START_GOOGLE_NAMESPACE_ + +typedef void DebugWriter(const char*, void*); + +// The %p field width for printf() functions is two characters per byte. +// For some environments, add two extra bytes for the leading "0x". +static const int kPrintfPointerFieldWidth = 2 + 2 * sizeof(void*); + +static void DebugWriteToStderr(const char* data, void *unused) { + // This one is signal-safe. + if (write(STDERR_FILENO, data, strlen(data)) < 0) { + // Ignore errors. + } +} + +void DebugWriteToString(const char* data, void *arg) { + reinterpret_cast(arg)->append(data); +} + +#ifdef HAVE_SYMBOLIZE +// Print a program counter and its symbol name. +static void DumpPCAndSymbol(DebugWriter *writerfn, void *arg, void *pc, + const char * const prefix) { + char tmp[1024]; + const char *symbol = "(unknown)"; + // Symbolizes the previous address of pc because pc may be in the + // next function. The overrun happens when the function ends with + // a call to a function annotated noreturn (e.g. CHECK). + if (Symbolize(reinterpret_cast(pc) - 1, tmp, sizeof(tmp))) { + symbol = tmp; + } + char buf[1024]; + snprintf(buf, sizeof(buf), "%s@ %*p %s\n", + prefix, kPrintfPointerFieldWidth, pc, symbol); + writerfn(buf, arg); +} +#endif + +static void DumpPC(DebugWriter *writerfn, void *arg, void *pc, + const char * const prefix) { + char buf[100]; + snprintf(buf, sizeof(buf), "%s@ %*p\n", + prefix, kPrintfPointerFieldWidth, pc); + writerfn(buf, arg); +} + +// Dump current stack trace as directed by writerfn +static void DumpStackTrace(int skip_count, DebugWriter *writerfn, void *arg) { + // Print stack trace + void* stack[32]; + int depth = GetStackTrace(stack, ARRAYSIZE(stack), skip_count+1); + for (int i = 0; i < depth; i++) { +#if defined(HAVE_SYMBOLIZE) + if (FLAGS_symbolize_stacktrace) { + DumpPCAndSymbol(writerfn, arg, stack[i], " "); + } else { + DumpPC(writerfn, arg, stack[i], " "); + } +#else + DumpPC(writerfn, arg, stack[i], " "); +#endif + } +} + +static void DumpStackTraceAndExit() { + DumpStackTrace(1, DebugWriteToStderr, NULL); + + // Set the default signal handler for SIGABRT, to avoid invoking our + // own signal handler installed by InstallFailedSignalHandler(). + struct sigaction sig_action; + memset(&sig_action, 0, sizeof(sig_action)); + sigemptyset(&sig_action.sa_mask); + sig_action.sa_handler = SIG_DFL; + sigaction(SIGABRT, &sig_action, NULL); + + abort(); +} + +_END_GOOGLE_NAMESPACE_ + +#endif // HAVE_STACKTRACE + +_START_GOOGLE_NAMESPACE_ + +namespace glog_internal_namespace_ { + +const char* ProgramInvocationShortName() { + if (g_program_invocation_short_name != NULL) { + return g_program_invocation_short_name; + } else { + // TODO(hamaji): Use /proc/self/cmdline and so? + return "UNKNOWN"; + } +} + +bool IsGoogleLoggingInitialized() { + return g_program_invocation_short_name != NULL; +} + +bool is_default_thread() { + if (g_program_invocation_short_name == NULL) { + // InitGoogleLogging() not yet called, so unlikely to be in a different + // thread + return true; + } else { + return pthread_equal(pthread_self(), g_main_thread_id); + } +} + +#ifdef OS_WINDOWS +struct timeval { + long tv_sec, tv_usec; +}; + +// Based on: http://www.google.com/codesearch/p?hl=en#dR3YEbitojA/os_win32.c&q=GetSystemTimeAsFileTime%20license:bsd +// See COPYING for copyright information. +static int gettimeofday(struct timeval *tv, void* tz) { +#define EPOCHFILETIME (116444736000000000ULL) + FILETIME ft; + LARGE_INTEGER li; + uint64 tt; + + GetSystemTimeAsFileTime(&ft); + li.LowPart = ft.dwLowDateTime; + li.HighPart = ft.dwHighDateTime; + tt = (li.QuadPart - EPOCHFILETIME) / 10; + tv->tv_sec = tt / 1000000; + tv->tv_usec = tt % 1000000; + + return 0; +} +#endif + +int64 CycleClock_Now() { + // TODO(hamaji): temporary impementation - it might be too slow. + struct timeval tv; + gettimeofday(&tv, NULL); + return static_cast(tv.tv_sec) * 1000000 + tv.tv_usec; +} + +int64 UsecToCycles(int64 usec) { + return usec; +} + +WallTime WallTime_Now() { + // Now, cycle clock is retuning microseconds since the epoch. + return CycleClock_Now() * 0.000001; +} + +static int32 g_main_thread_pid = getpid(); +int32 GetMainThreadPid() { + return g_main_thread_pid; +} + +bool PidHasChanged() { + int32 pid = getpid(); + if (g_main_thread_pid == pid) { + return false; + } + g_main_thread_pid = pid; + return true; +} + +pid_t GetTID() { + // On Linux and FreeBSD, we try to use gettid(). +#if defined OS_LINUX || defined OS_FREEBSD || defined OS_MACOSX +#ifndef __NR_gettid +#ifdef OS_MACOSX +#define __NR_gettid SYS_gettid +#elif ! defined __i386__ +#error "Must define __NR_gettid for non-x86 platforms" +#else +#define __NR_gettid 224 +#endif +#endif + static bool lacks_gettid = false; + if (!lacks_gettid) { + pid_t tid = syscall(__NR_gettid); + if (tid != -1) { + return tid; + } + // Technically, this variable has to be volatile, but there is a small + // performance penalty in accessing volatile variables and there should + // not be any serious adverse effect if a thread does not immediately see + // the value change to "true". + lacks_gettid = true; + } +#endif // OS_LINUX || OS_FREEBSD + + // If gettid() could not be used, we use one of the following. +#if defined OS_LINUX + return getpid(); // Linux: getpid returns thread ID when gettid is absent +#elif defined OS_WINDOWS || defined OS_CYGWIN + return GetCurrentThreadId(); +#else + // If none of the techniques above worked, we use pthread_self(). + return (pid_t)(uintptr_t)pthread_self(); +#endif +} + +const char* const_basename(const char* filepath) { + const char* base = strrchr(filepath, '/'); +#ifdef OS_WINDOWS // Look for either path separator in Windows + if (!base) + base = strrchr(filepath, '\\'); +#endif + return base ? (base+1) : filepath; +} + +static string g_my_user_name; +const string& MyUserName() { + return g_my_user_name; +} +static void MyUserNameInitializer() { + // TODO(hamaji): Probably this is not portable. +#if defined(OS_WINDOWS) + const char* user = getenv("USERNAME"); +#else + const char* user = getenv("USER"); +#endif + if (user != NULL) { + g_my_user_name = user; + } else { + g_my_user_name = "invalid-user"; + } +} +REGISTER_MODULE_INITIALIZER(utilities, MyUserNameInitializer()); + +#ifdef HAVE_STACKTRACE +void DumpStackTraceToString(string* stacktrace) { + DumpStackTrace(1, DebugWriteToString, stacktrace); +} +#endif + +// We use an atomic operation to prevent problems with calling CrashReason +// from inside the Mutex implementation (potentially through RAW_CHECK). +static const CrashReason* g_reason = 0; + +void SetCrashReason(const CrashReason* r) { + sync_val_compare_and_swap(&g_reason, + reinterpret_cast(0), + r); +} + +void InitGoogleLoggingUtilities(const char* argv0) { + CHECK(!IsGoogleLoggingInitialized()) + << "You called InitGoogleLogging() twice!"; + const char* slash = strrchr(argv0, '/'); +#ifdef OS_WINDOWS + if (!slash) slash = strrchr(argv0, '\\'); +#endif + g_program_invocation_short_name = slash ? slash + 1 : argv0; + g_main_thread_id = pthread_self(); + +#ifdef HAVE_STACKTRACE + InstallFailureFunction(&DumpStackTraceAndExit); +#endif +} + +void ShutdownGoogleLoggingUtilities() { + CHECK(IsGoogleLoggingInitialized()) + << "You called ShutdownGoogleLogging() without calling InitGoogleLogging() first!"; +#ifdef HAVE_SYSLOG_H + closelog(); +#endif +} + +} // namespace glog_internal_namespace_ + +_END_GOOGLE_NAMESPACE_ + +// Make an implementation of stacktrace compiled. +#ifdef STACKTRACE_H +# include STACKTRACE_H +#endif diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/utilities.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/utilities.h new file mode 100644 index 0000000000..42785dc0f5 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/utilities.h @@ -0,0 +1,226 @@ +// Copyright (c) 2008, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Author: Shinichiro Hamaji +// +// Define utilties for glog internal usage. + +#ifndef UTILITIES_H__ +#define UTILITIES_H__ + +#if defined(WIN32) || defined(_WIN32) || defined(__WIN32__) +# define OS_WINDOWS +#elif defined(__CYGWIN__) || defined(__CYGWIN32__) +# define OS_CYGWIN +#elif defined(linux) || defined(__linux) || defined(__linux__) +# define OS_LINUX +#elif defined(macintosh) || defined(__APPLE__) || defined(__APPLE_CC__) +# define OS_MACOSX +#elif defined(__FreeBSD__) +# define OS_FREEBSD +#elif defined(__NetBSD__) +# define OS_NETBSD +#elif defined(__OpenBSD__) +# define OS_OPENBSD +#else +// TODO(hamaji): Add other platforms. +#endif + +// printf macros for size_t, in the style of inttypes.h +#ifdef _LP64 +#define __PRIS_PREFIX "z" +#else +#define __PRIS_PREFIX +#endif + +// Use these macros after a % in a printf format string +// to get correct 32/64 bit behavior, like this: +// size_t size = records.size(); +// printf("%"PRIuS"\n", size); + +#define PRIdS __PRIS_PREFIX "d" +#define PRIxS __PRIS_PREFIX "x" +#define PRIuS __PRIS_PREFIX "u" +#define PRIXS __PRIS_PREFIX "X" +#define PRIoS __PRIS_PREFIX "o" + +#include "base/mutex.h" // This must go first so we get _XOPEN_SOURCE + +#include + +#if defined(OS_WINDOWS) +# include "port.h" +#endif + +#include "config.h" +#include "glog/logging.h" + +// There are three different ways we can try to get the stack trace: +// +// 1) The libunwind library. This is still in development, and as a +// separate library adds a new dependency, but doesn't need a frame +// pointer. It also doesn't call malloc. +// +// 2) Our hand-coded stack-unwinder. This depends on a certain stack +// layout, which is used by gcc (and those systems using a +// gcc-compatible ABI) on x86 systems, at least since gcc 2.95. +// It uses the frame pointer to do its work. +// +// 3) The gdb unwinder -- also the one used by the c++ exception code. +// It's obviously well-tested, but has a fatal flaw: it can call +// malloc() from the unwinder. This is a problem because we're +// trying to use the unwinder to instrument malloc(). +// +// Note: if you add a new implementation here, make sure it works +// correctly when GetStackTrace() is called with max_depth == 0. +// Some code may do that. + +#if defined(HAVE_LIB_UNWIND) +# define STACKTRACE_H "stacktrace_libunwind-inl.h" +#elif !defined(NO_FRAME_POINTER) +# if defined(__i386__) && __GNUC__ >= 2 +# define STACKTRACE_H "stacktrace_x86-inl.h" +# elif defined(__x86_64__) && __GNUC__ >= 2 +# define STACKTRACE_H "stacktrace_x86_64-inl.h" +# elif (defined(__ppc__) || defined(__PPC__)) && __GNUC__ >= 2 +# define STACKTRACE_H "stacktrace_powerpc-inl.h" +# endif +#endif + +#if !defined(STACKTRACE_H) && defined(HAVE_EXECINFO_H) +# define STACKTRACE_H "stacktrace_generic-inl.h" +#endif + +#if defined(STACKTRACE_H) +# define HAVE_STACKTRACE +#endif + +// defined by gcc +#if defined(__ELF__) && defined(OS_LINUX) +# define HAVE_SYMBOLIZE +#elif defined(OS_MACOSX) && defined(HAVE_DLADDR) +// Use dladdr to symbolize. +# define HAVE_SYMBOLIZE +#endif + +#ifndef ARRAYSIZE +// There is a better way, but this is good enough for our purpose. +# define ARRAYSIZE(a) (sizeof(a) / sizeof(*(a))) +#endif + +_START_GOOGLE_NAMESPACE_ + +namespace glog_internal_namespace_ { + +#ifdef HAVE___ATTRIBUTE__ +# define ATTRIBUTE_NOINLINE __attribute__ ((noinline)) +# define HAVE_ATTRIBUTE_NOINLINE +#else +# define ATTRIBUTE_NOINLINE +#endif + +const char* ProgramInvocationShortName(); + +bool IsGoogleLoggingInitialized(); + +bool is_default_thread(); + +int64 CycleClock_Now(); + +int64 UsecToCycles(int64 usec); + +typedef double WallTime; +WallTime WallTime_Now(); + +int32 GetMainThreadPid(); +bool PidHasChanged(); + +pid_t GetTID(); + +const std::string& MyUserName(); + +// Get the part of filepath after the last path separator. +// (Doesn't modify filepath, contrary to basename() in libgen.h.) +const char* const_basename(const char* filepath); + +// Wrapper of __sync_val_compare_and_swap. If the GCC extension isn't +// defined, we try the CPU specific logics (we only support x86 and +// x86_64 for now) first, then use a naive implementation, which has a +// race condition. +template +inline T sync_val_compare_and_swap(T* ptr, T oldval, T newval) { +#if defined(HAVE___SYNC_VAL_COMPARE_AND_SWAP) + return __sync_val_compare_and_swap(ptr, oldval, newval); +#elif defined(__GNUC__) && (defined(__i386__) || defined(__x86_64__)) + T ret; + __asm__ __volatile__("lock; cmpxchg %1, (%2);" + :"=a"(ret) + // GCC may produces %sil or %dil for + // constraint "r", but some of apple's gas + // dosn't know the 8 bit registers. + // We use "q" to avoid these registers. + :"q"(newval), "q"(ptr), "a"(oldval) + :"memory", "cc"); + return ret; +#else + T ret = *ptr; + if (ret == oldval) { + *ptr = newval; + } + return ret; +#endif +} + +void DumpStackTraceToString(std::string* stacktrace); + +struct CrashReason { + CrashReason() : filename(0), line_number(0), message(0), depth(0) {} + + const char* filename; + int line_number; + const char* message; + + // We'll also store a bit of stack trace context at the time of crash as + // it may not be available later on. + void* stack[32]; + int depth; +}; + +void SetCrashReason(const CrashReason* r); + +void InitGoogleLoggingUtilities(const char* argv0); +void ShutdownGoogleLoggingUtilities(); + +} // namespace glog_internal_namespace_ + +_END_GOOGLE_NAMESPACE_ + +using namespace GOOGLE_NAMESPACE::glog_internal_namespace_; + +#endif // UTILITIES_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/utilities_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/utilities_unittest.cc new file mode 100644 index 0000000000..7b796190e3 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/utilities_unittest.cc @@ -0,0 +1,54 @@ +// Copyright (c) 2008, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Author: Shinichiro Hamaji + +#include "utilities.h" +#include "googletest.h" +#include "glog/logging.h" + +using namespace GOOGLE_NAMESPACE; + +TEST(utilities, sync_val_compare_and_swap) { + bool now_entering = false; + EXPECT_FALSE(sync_val_compare_and_swap(&now_entering, false, true)); + EXPECT_TRUE(sync_val_compare_and_swap(&now_entering, false, true)); + EXPECT_TRUE(sync_val_compare_and_swap(&now_entering, false, true)); +} + +TEST(utilities, InitGoogleLoggingDeathTest) { + ASSERT_DEATH(InitGoogleLogging("foobar"), ""); +} + +int main(int argc, char **argv) { + InitGoogleLogging(argv[0]); + InitGoogleTest(&argc, argv); + + CHECK_EQ(RUN_ALL_TESTS(), 0); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/vlog_is_on.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/vlog_is_on.cc new file mode 100644 index 0000000000..ee0e412f66 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/vlog_is_on.cc @@ -0,0 +1,249 @@ +// Copyright (c) 1999, 2007, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Author: Ray Sidney and many others +// +// Broken out from logging.cc by Soren Lassen +// logging_unittest.cc covers the functionality herein + +#include "utilities.h" + +#include +#include +#include +#include +#include +#include "base/commandlineflags.h" +#include "glog/logging.h" +#include "glog/raw_logging.h" +#include "base/googleinit.h" + +// glog doesn't have annotation +#define ANNOTATE_BENIGN_RACE(address, description) + +using std::string; + +GLOG_DEFINE_int32(v, 0, "Show all VLOG(m) messages for m <= this." +" Overridable by --vmodule."); + +GLOG_DEFINE_string(vmodule, "", "per-module verbose level." +" Argument is a comma-separated list of =." +" is a glob pattern, matched against the filename base" +" (that is, name ignoring .cc/.h./-inl.h)." +" overrides any value given by --v."); + +_START_GOOGLE_NAMESPACE_ + +namespace glog_internal_namespace_ { + +// Implementation of fnmatch that does not need 0-termination +// of arguments and does not allocate any memory, +// but we only support "*" and "?" wildcards, not the "[...]" patterns. +// It's not a static function for the unittest. +GOOGLE_GLOG_DLL_DECL bool SafeFNMatch_(const char* pattern, + size_t patt_len, + const char* str, + size_t str_len) { + int p = 0; + int s = 0; + while (1) { + if (p == patt_len && s == str_len) return true; + if (p == patt_len) return false; + if (s == str_len) return p+1 == patt_len && pattern[p] == '*'; + if (pattern[p] == str[s] || pattern[p] == '?') { + p += 1; + s += 1; + continue; + } + if (pattern[p] == '*') { + if (p+1 == patt_len) return true; + do { + if (SafeFNMatch_(pattern+(p+1), patt_len-(p+1), str+s, str_len-s)) { + return true; + } + s += 1; + } while (s != str_len); + return false; + } + return false; + } +} + +} // namespace glog_internal_namespace_ + +using glog_internal_namespace_::SafeFNMatch_; + +int32 kLogSiteUninitialized = 1000; + +// List of per-module log levels from FLAGS_vmodule. +// Once created each element is never deleted/modified +// except for the vlog_level: other threads will read VModuleInfo blobs +// w/o locks and we'll store pointers to vlog_level at VLOG locations +// that will never go away. +// We can't use an STL struct here as we wouldn't know +// when it's safe to delete/update it: other threads need to use it w/o locks. +struct VModuleInfo { + string module_pattern; + mutable int32 vlog_level; // Conceptually this is an AtomicWord, but it's + // too much work to use AtomicWord type here + // w/o much actual benefit. + const VModuleInfo* next; +}; + +// This protects the following global variables. +static Mutex vmodule_lock; +// Pointer to head of the VModuleInfo list. +// It's a map from module pattern to logging level for those module(s). +static VModuleInfo* vmodule_list = 0; +// Boolean initialization flag. +static bool inited_vmodule = false; + +// L >= vmodule_lock. +static void VLOG2Initializer() { + vmodule_lock.AssertHeld(); + // Can now parse --vmodule flag and initialize mapping of module-specific + // logging levels. + inited_vmodule = false; + const char* vmodule = FLAGS_vmodule.c_str(); + const char* sep; + VModuleInfo* head = NULL; + VModuleInfo* tail = NULL; + while ((sep = strchr(vmodule, '=')) != NULL) { + string pattern(vmodule, sep - vmodule); + int module_level; + if (sscanf(sep, "=%d", &module_level) == 1) { + VModuleInfo* info = new VModuleInfo; + info->module_pattern = pattern; + info->vlog_level = module_level; + if (head) tail->next = info; + else head = info; + tail = info; + } + // Skip past this entry + vmodule = strchr(sep, ','); + if (vmodule == NULL) break; + vmodule++; // Skip past "," + } + if (head) { // Put them into the list at the head: + tail->next = vmodule_list; + vmodule_list = head; + } + inited_vmodule = true; +} + +// This can be called very early, so we use SpinLock and RAW_VLOG here. +int SetVLOGLevel(const char* module_pattern, int log_level) { + int result = FLAGS_v; + int const pattern_len = strlen(module_pattern); + bool found = false; + MutexLock l(&vmodule_lock); // protect whole read-modify-write + for (const VModuleInfo* info = vmodule_list; + info != NULL; info = info->next) { + if (info->module_pattern == module_pattern) { + if (!found) { + result = info->vlog_level; + found = true; + } + info->vlog_level = log_level; + } else if (!found && + SafeFNMatch_(info->module_pattern.c_str(), + info->module_pattern.size(), + module_pattern, pattern_len)) { + result = info->vlog_level; + found = true; + } + } + if (!found) { + VModuleInfo* info = new VModuleInfo; + info->module_pattern = module_pattern; + info->vlog_level = log_level; + info->next = vmodule_list; + vmodule_list = info; + } + RAW_VLOG(1, "Set VLOG level for \"%s\" to %d", module_pattern, log_level); + return result; +} + +// NOTE: Individual VLOG statements cache the integer log level pointers. +// NOTE: This function must not allocate memory or require any locks. +bool InitVLOG3__(int32** site_flag, int32* site_default, + const char* fname, int32 verbose_level) { + MutexLock l(&vmodule_lock); + bool read_vmodule_flag = inited_vmodule; + if (!read_vmodule_flag) { + VLOG2Initializer(); + } + + // protect the errno global in case someone writes: + // VLOG(..) << "The last error was " << strerror(errno) + int old_errno = errno; + + // site_default normally points to FLAGS_v + int32* site_flag_value = site_default; + + // Get basename for file + const char* base = strrchr(fname, '/'); + base = base ? (base+1) : fname; + const char* base_end = strchr(base, '.'); + size_t base_length = base_end ? (base_end - base) : strlen(base); + + // Trim out trailing "-inl" if any + if (base_length >= 4 && (memcmp(base+base_length-4, "-inl", 4) == 0)) { + base_length -= 4; + } + + // TODO: Trim out _unittest suffix? Perhaps it is better to have + // the extra control and just leave it there. + + // find target in vector of modules, replace site_flag_value with + // a module-specific verbose level, if any. + for (const VModuleInfo* info = vmodule_list; + info != NULL; info = info->next) { + if (SafeFNMatch_(info->module_pattern.c_str(), info->module_pattern.size(), + base, base_length)) { + site_flag_value = &info->vlog_level; + // value at info->vlog_level is now what controls + // the VLOG at the caller site forever + break; + } + } + + // Cache the vlog value pointer if --vmodule flag has been parsed. + ANNOTATE_BENIGN_RACE(site_flag, + "*site_flag may be written by several threads," + " but the value will be the same"); + if (read_vmodule_flag) *site_flag = site_flag_value; + + // restore the errno in case something recoverable went wrong during + // the initialization of the VLOG mechanism (see above note "protect the..") + errno = old_errno; + return *site_flag_value >= verbose_level; +} + +_END_GOOGLE_NAMESPACE_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/windows/config.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/windows/config.h new file mode 100644 index 0000000000..114762e846 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/windows/config.h @@ -0,0 +1,136 @@ +/* src/config.h.in. Generated from configure.ac by autoheader. */ + +/* Namespace for Google classes */ +#define GOOGLE_NAMESPACE google + +/* Define if you have the `dladdr' function */ +#undef HAVE_DLADDR + +/* Define to 1 if you have the header file. */ +#undef HAVE_DLFCN_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_EXECINFO_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_INTTYPES_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_LIBUNWIND_H + +/* define if you have google gflags library */ +#undef HAVE_LIB_GFLAGS + +/* define if you have libunwind */ +#undef HAVE_LIB_UNWIND + +/* Define to 1 if you have the header file. */ +#undef HAVE_MEMORY_H + +/* define if the compiler implements namespaces */ +#undef HAVE_NAMESPACES + +/* Define if you have POSIX threads libraries and header files. */ +#undef HAVE_PTHREAD + +/* define if the compiler implements pthread_rwlock_* */ +#undef HAVE_RWLOCK + +/* Define if you have the `sigaltstack' function */ +#undef HAVE_SIGALTSTACK + +/* Define to 1 if you have the header file. */ +#undef HAVE_STDINT_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_STDLIB_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_STRINGS_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_STRING_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_SYSCALL_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_SYS_STAT_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_SYS_SYSCALL_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_SYS_TYPES_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_UCONTEXT_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_UNISTD_H + +/* define if the compiler supports using expression for operator */ +#undef HAVE_USING_OPERATOR + +/* define if your compiler has __attribute__ */ +#undef HAVE___ATTRIBUTE__ + +/* define if your compiler has __builtin_expect */ +#undef HAVE___BUILTIN_EXPECT + +/* define if your compiler has __sync_val_compare_and_swap */ +#undef HAVE___SYNC_VAL_COMPARE_AND_SWAP + +/* Name of package */ +#undef PACKAGE + +/* Define to the address where bug reports for this package should be sent. */ +#undef PACKAGE_BUGREPORT + +/* Define to the full name of this package. */ +#undef PACKAGE_NAME + +/* Define to the full name and version of this package. */ +#undef PACKAGE_STRING + +/* Define to the one symbol short name of this package. */ +#undef PACKAGE_TARNAME + +/* Define to the version of this package. */ +#undef PACKAGE_VERSION + +/* How to access the PC from a struct ucontext */ +#undef PC_FROM_UCONTEXT + +/* Define to necessary symbol if this constant uses a non-standard name on + your system. */ +#undef PTHREAD_CREATE_JOINABLE + +/* The size of `void *', as computed by sizeof. */ +#undef SIZEOF_VOID_P + +/* Define to 1 if you have the ANSI C header files. */ +#undef STDC_HEADERS + +/* the namespace where STL code like vector<> is defined */ +#undef STL_NAMESPACE + +/* Version number of package */ +#undef VERSION + +/* Stops putting the code inside the Google namespace */ +#define _END_GOOGLE_NAMESPACE_ } + +/* Puts following code inside the Google namespace */ +#define _START_GOOGLE_NAMESPACE_ namespace google { + +/* Always the empty-string on non-windows systems. On windows, should be + "__declspec(dllexport)". This way, when we compile the dll, we export our + functions/classes. It's safe to define this here because config.h is only + used internally, to compile the DLL, and every DLL source file #includes + "config.h" before anything else. */ +#ifndef GOOGLE_GLOG_DLL_DECL +# define GOOGLE_GLOG_IS_A_DLL 1 /* not set if you're statically linking */ +# define GOOGLE_GLOG_DLL_DECL __declspec(dllexport) +# define GOOGLE_GLOG_DLL_DECL_FOR_UNITTESTS __declspec(dllimport) +#endif diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/windows/glog/log_severity.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/windows/glog/log_severity.h new file mode 100644 index 0000000000..5e7d09effb --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/windows/glog/log_severity.h @@ -0,0 +1,88 @@ +// This file is automatically generated from src/glog/log_severity.h +// using src/windows/preprocess.sh. +// DO NOT EDIT! + +// Copyright (c) 2007, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef BASE_LOG_SEVERITY_H__ +#define BASE_LOG_SEVERITY_H__ + +// Annoying stuff for windows -- makes sure clients can import these functions +#ifndef GOOGLE_GLOG_DLL_DECL +# if defined(_WIN32) && !defined(__CYGWIN__) +# define GOOGLE_GLOG_DLL_DECL __declspec(dllimport) +# else +# define GOOGLE_GLOG_DLL_DECL +# endif +#endif + +// Variables of type LogSeverity are widely taken to lie in the range +// [0, NUM_SEVERITIES-1]. Be careful to preserve this assumption if +// you ever need to change their values or add a new severity. +typedef int LogSeverity; + +const int INFO = 0, WARNING = 1, ERROR = 2, FATAL = 3, NUM_SEVERITIES = 4; + +// DFATAL is FATAL in debug mode, ERROR in normal mode +#ifdef NDEBUG +#define DFATAL_LEVEL ERROR +#else +#define DFATAL_LEVEL FATAL +#endif + +extern GOOGLE_GLOG_DLL_DECL const char* const LogSeverityNames[NUM_SEVERITIES]; + +// NDEBUG usage helpers related to (RAW_)DCHECK: +// +// DEBUG_MODE is for small !NDEBUG uses like +// if (DEBUG_MODE) foo.CheckThatFoo(); +// instead of substantially more verbose +// #ifndef NDEBUG +// foo.CheckThatFoo(); +// #endif +// +// IF_DEBUG_MODE is for small !NDEBUG uses like +// IF_DEBUG_MODE( string error; ) +// DCHECK(Foo(&error)) << error; +// instead of substantially more verbose +// #ifndef NDEBUG +// string error; +// DCHECK(Foo(&error)) << error; +// #endif +// +#ifdef NDEBUG +enum { DEBUG_MODE = 0 }; +#define IF_DEBUG_MODE(x) +#else +enum { DEBUG_MODE = 1 }; +#define IF_DEBUG_MODE(x) x +#endif + +#endif // BASE_LOG_SEVERITY_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/windows/glog/logging.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/windows/glog/logging.h new file mode 100644 index 0000000000..7a6df74f49 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/windows/glog/logging.h @@ -0,0 +1,1510 @@ +// This file is automatically generated from src/glog/logging.h.in +// using src/windows/preprocess.sh. +// DO NOT EDIT! + +// Copyright (c) 1999, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Author: Ray Sidney +// +// This file contains #include information about logging-related stuff. +// Pretty much everybody needs to #include this file so that they can +// log various happenings. +// +#ifndef _LOGGING_H_ +#define _LOGGING_H_ + +#include +#include +#include +#include +#if 0 +# include +#endif +#ifdef __DEPRECATED +// Make GCC quiet. +# undef __DEPRECATED +# include +# define __DEPRECATED +#else +# include +#endif +#include + +// Annoying stuff for windows -- makes sure clients can import these functions +#ifndef GOOGLE_GLOG_DLL_DECL +# if defined(_WIN32) && !defined(__CYGWIN__) +# define GOOGLE_GLOG_DLL_DECL __declspec(dllimport) +# else +# define GOOGLE_GLOG_DLL_DECL +# endif +#endif + +// We care a lot about number of bits things take up. Unfortunately, +// systems define their bit-specific ints in a lot of different ways. +// We use our own way, and have a typedef to get there. +// Note: these commands below may look like "#if 1" or "#if 0", but +// that's because they were constructed that way at ./configure time. +// Look at logging.h.in to see how they're calculated (based on your config). +#if 0 +#include // the normal place uint16_t is defined +#endif +#if 0 +#include // the normal place u_int16_t is defined +#endif +#if 0 +#include // a third place for uint16_t or u_int16_t +#endif + +#if 0 +#include +#endif + +namespace google { + +#if 0 // the C99 format +typedef int32_t int32; +typedef uint32_t uint32; +typedef int64_t int64; +typedef uint64_t uint64; +#elif 0 // the BSD format +typedef int32_t int32; +typedef u_int32_t uint32; +typedef int64_t int64; +typedef u_int64_t uint64; +#elif 1 // the windows (vc7) format +typedef __int32 int32; +typedef unsigned __int32 uint32; +typedef __int64 int64; +typedef unsigned __int64 uint64; +#else +#error Do not know how to define a 32-bit integer quantity on your system +#endif + +} + +// The global value of GOOGLE_STRIP_LOG. All the messages logged to +// LOG(XXX) with severity less than GOOGLE_STRIP_LOG will not be displayed. +// If it can be determined at compile time that the message will not be +// printed, the statement will be compiled out. +// +// Example: to strip out all INFO and WARNING messages, use the value +// of 2 below. To make an exception for WARNING messages from a single +// file, add "#define GOOGLE_STRIP_LOG 1" to that file _before_ including +// base/logging.h +#ifndef GOOGLE_STRIP_LOG +#define GOOGLE_STRIP_LOG 0 +#endif + +// GCC can be told that a certain branch is not likely to be taken (for +// instance, a CHECK failure), and use that information in static analysis. +// Giving it this information can help it optimize for the common case in +// the absence of better information (ie. -fprofile-arcs). +// +#ifndef GOOGLE_PREDICT_BRANCH_NOT_TAKEN +#if 0 +#define GOOGLE_PREDICT_BRANCH_NOT_TAKEN(x) (__builtin_expect(x, 0)) +#else +#define GOOGLE_PREDICT_BRANCH_NOT_TAKEN(x) x +#endif +#endif + +// Make a bunch of macros for logging. The way to log things is to stream +// things to LOG(). E.g., +// +// LOG(INFO) << "Found " << num_cookies << " cookies"; +// +// You can capture log messages in a string, rather than reporting them +// immediately: +// +// vector errors; +// LOG_STRING(ERROR, &errors) << "Couldn't parse cookie #" << cookie_num; +// +// This pushes back the new error onto 'errors'; if given a NULL pointer, +// it reports the error via LOG(ERROR). +// +// You can also do conditional logging: +// +// LOG_IF(INFO, num_cookies > 10) << "Got lots of cookies"; +// +// You can also do occasional logging (log every n'th occurrence of an +// event): +// +// LOG_EVERY_N(INFO, 10) << "Got the " << COUNTER << "th cookie"; +// +// The above will cause log messages to be output on the 1st, 11th, 21st, ... +// times it is executed. Note that the special COUNTER value is used to +// identify which repetition is happening. +// +// You can also do occasional conditional logging (log every n'th +// occurrence of an event, when condition is satisfied): +// +// LOG_IF_EVERY_N(INFO, (size > 1024), 10) << "Got the " << COUNTER +// << "th big cookie"; +// +// You can log messages the first N times your code executes a line. E.g. +// +// LOG_FIRST_N(INFO, 20) << "Got the " << COUNTER << "th cookie"; +// +// Outputs log messages for the first 20 times it is executed. +// +// Analogous SYSLOG, SYSLOG_IF, and SYSLOG_EVERY_N macros are available. +// These log to syslog as well as to the normal logs. If you use these at +// all, you need to be aware that syslog can drastically reduce performance, +// especially if it is configured for remote logging! Don't use these +// unless you fully understand this and have a concrete need to use them. +// Even then, try to minimize your use of them. +// +// There are also "debug mode" logging macros like the ones above: +// +// DLOG(INFO) << "Found cookies"; +// +// DLOG_IF(INFO, num_cookies > 10) << "Got lots of cookies"; +// +// DLOG_EVERY_N(INFO, 10) << "Got the " << COUNTER << "th cookie"; +// +// All "debug mode" logging is compiled away to nothing for non-debug mode +// compiles. +// +// We also have +// +// LOG_ASSERT(assertion); +// DLOG_ASSERT(assertion); +// +// which is syntactic sugar for {,D}LOG_IF(FATAL, assert fails) << assertion; +// +// There are "verbose level" logging macros. They look like +// +// VLOG(1) << "I'm printed when you run the program with --v=1 or more"; +// VLOG(2) << "I'm printed when you run the program with --v=2 or more"; +// +// These always log at the INFO log level (when they log at all). +// The verbose logging can also be turned on module-by-module. For instance, +// --vmodule=mapreduce=2,file=1,gfs*=3 --v=0 +// will cause: +// a. VLOG(2) and lower messages to be printed from mapreduce.{h,cc} +// b. VLOG(1) and lower messages to be printed from file.{h,cc} +// c. VLOG(3) and lower messages to be printed from files prefixed with "gfs" +// d. VLOG(0) and lower messages to be printed from elsewhere +// +// The wildcarding functionality shown by (c) supports both '*' (match +// 0 or more characters) and '?' (match any single character) wildcards. +// +// There's also VLOG_IS_ON(n) "verbose level" condition macro. To be used as +// +// if (VLOG_IS_ON(2)) { +// // do some logging preparation and logging +// // that can't be accomplished with just VLOG(2) << ...; +// } +// +// There are also VLOG_IF, VLOG_EVERY_N and VLOG_IF_EVERY_N "verbose level" +// condition macros for sample cases, when some extra computation and +// preparation for logs is not needed. +// VLOG_IF(1, (size > 1024)) +// << "I'm printed when size is more than 1024 and when you run the " +// "program with --v=1 or more"; +// VLOG_EVERY_N(1, 10) +// << "I'm printed every 10th occurrence, and when you run the program " +// "with --v=1 or more. Present occurence is " << COUNTER; +// VLOG_IF_EVERY_N(1, (size > 1024), 10) +// << "I'm printed on every 10th occurence of case when size is more " +// " than 1024, when you run the program with --v=1 or more. "; +// "Present occurence is " << COUNTER; +// +// The supported severity levels for macros that allow you to specify one +// are (in increasing order of severity) INFO, WARNING, ERROR, and FATAL. +// Note that messages of a given severity are logged not only in the +// logfile for that severity, but also in all logfiles of lower severity. +// E.g., a message of severity FATAL will be logged to the logfiles of +// severity FATAL, ERROR, WARNING, and INFO. +// +// There is also the special severity of DFATAL, which logs FATAL in +// debug mode, ERROR in normal mode. +// +// Very important: logging a message at the FATAL severity level causes +// the program to terminate (after the message is logged). +// +// Unless otherwise specified, logs will be written to the filename +// "...log..", followed +// by the date, time, and pid (you can't prevent the date, time, and pid +// from being in the filename). +// +// The logging code takes two flags: +// --v=# set the verbose level +// --logtostderr log all the messages to stderr instead of to logfiles + +// LOG LINE PREFIX FORMAT +// +// Log lines have this form: +// +// Lmmdd hh:mm:ss.uuuuuu threadid file:line] msg... +// +// where the fields are defined as follows: +// +// L A single character, representing the log level +// (eg 'I' for INFO) +// mm The month (zero padded; ie May is '05') +// dd The day (zero padded) +// hh:mm:ss.uuuuuu Time in hours, minutes and fractional seconds +// threadid The space-padded thread ID as returned by GetTID() +// (this matches the PID on Linux) +// file The file name +// line The line number +// msg The user-supplied message +// +// Example: +// +// I1103 11:57:31.739339 24395 google.cc:2341] Command line: ./some_prog +// I1103 11:57:31.739403 24395 google.cc:2342] Process id 24395 +// +// NOTE: although the microseconds are useful for comparing events on +// a single machine, clocks on different machines may not be well +// synchronized. Hence, use caution when comparing the low bits of +// timestamps from different machines. + +#ifndef DECLARE_VARIABLE +#define MUST_UNDEF_GFLAGS_DECLARE_MACROS +#define DECLARE_VARIABLE(type, name, tn) \ + namespace FLAG__namespace_do_not_use_directly_use_DECLARE_##tn##_instead { \ + extern GOOGLE_GLOG_DLL_DECL type FLAGS_##name; \ + } \ + using FLAG__namespace_do_not_use_directly_use_DECLARE_##tn##_instead::FLAGS_##name + +// bool specialization +#define DECLARE_bool(name) \ + DECLARE_VARIABLE(bool, name, bool) + +// int32 specialization +#define DECLARE_int32(name) \ + DECLARE_VARIABLE(google::int32, name, int32) + +// Special case for string, because we have to specify the namespace +// std::string, which doesn't play nicely with our FLAG__namespace hackery. +#define DECLARE_string(name) \ + namespace FLAG__namespace_do_not_use_directly_use_DECLARE_string_instead { \ + extern GOOGLE_GLOG_DLL_DECL std::string FLAGS_##name; \ + } \ + using FLAG__namespace_do_not_use_directly_use_DECLARE_string_instead::FLAGS_##name +#endif + +// Set whether log messages go to stderr instead of logfiles +DECLARE_bool(logtostderr); + +// Set whether log messages go to stderr in addition to logfiles. +DECLARE_bool(alsologtostderr); + +// Log messages at a level >= this flag are automatically sent to +// stderr in addition to log files. +DECLARE_int32(stderrthreshold); + +// Set whether the log prefix should be prepended to each line of output. +DECLARE_bool(log_prefix); + +// Log messages at a level <= this flag are buffered. +// Log messages at a higher level are flushed immediately. +DECLARE_int32(logbuflevel); + +// Sets the maximum number of seconds which logs may be buffered for. +DECLARE_int32(logbufsecs); + +// Log suppression level: messages logged at a lower level than this +// are suppressed. +DECLARE_int32(minloglevel); + +// If specified, logfiles are written into this directory instead of the +// default logging directory. +DECLARE_string(log_dir); + +// Sets the path of the directory into which to put additional links +// to the log files. +DECLARE_string(log_link); + +DECLARE_int32(v); // in vlog_is_on.cc + +// Sets the maximum log file size (in MB). +DECLARE_int32(max_log_size); + +// Sets whether to avoid logging to the disk if the disk is full. +DECLARE_bool(stop_logging_if_full_disk); + +#ifdef MUST_UNDEF_GFLAGS_DECLARE_MACROS +#undef MUST_UNDEF_GFLAGS_DECLARE_MACROS +#undef DECLARE_VARIABLE +#undef DECLARE_bool +#undef DECLARE_int32 +#undef DECLARE_string +#endif + +// Log messages below the GOOGLE_STRIP_LOG level will be compiled away for +// security reasons. See LOG(severtiy) below. + +// A few definitions of macros that don't generate much code. Since +// LOG(INFO) and its ilk are used all over our code, it's +// better to have compact code for these operations. + +#if GOOGLE_STRIP_LOG == 0 +#define COMPACT_GOOGLE_LOG_INFO google::LogMessage( \ + __FILE__, __LINE__) +#define LOG_TO_STRING_INFO(message) google::LogMessage( \ + __FILE__, __LINE__, google::INFO, message) +#else +#define COMPACT_GOOGLE_LOG_INFO google::NullStream() +#define LOG_TO_STRING_INFO(message) google::NullStream() +#endif + +#if GOOGLE_STRIP_LOG <= 1 +#define COMPACT_GOOGLE_LOG_WARNING google::LogMessage( \ + __FILE__, __LINE__, google::WARNING) +#define LOG_TO_STRING_WARNING(message) google::LogMessage( \ + __FILE__, __LINE__, google::WARNING, message) +#else +#define COMPACT_GOOGLE_LOG_WARNING google::NullStream() +#define LOG_TO_STRING_WARNING(message) google::NullStream() +#endif + +#if GOOGLE_STRIP_LOG <= 2 +#define COMPACT_GOOGLE_LOG_ERROR google::LogMessage( \ + __FILE__, __LINE__, google::ERROR) +#define LOG_TO_STRING_ERROR(message) google::LogMessage( \ + __FILE__, __LINE__, google::ERROR, message) +#else +#define COMPACT_GOOGLE_LOG_ERROR google::NullStream() +#define LOG_TO_STRING_ERROR(message) google::NullStream() +#endif + +#if GOOGLE_STRIP_LOG <= 3 +#define COMPACT_GOOGLE_LOG_FATAL google::LogMessageFatal( \ + __FILE__, __LINE__) +#define LOG_TO_STRING_FATAL(message) google::LogMessage( \ + __FILE__, __LINE__, google::FATAL, message) +#else +#define COMPACT_GOOGLE_LOG_FATAL google::NullStreamFatal() +#define LOG_TO_STRING_FATAL(message) google::NullStreamFatal() +#endif + +// For DFATAL, we want to use LogMessage (as opposed to +// LogMessageFatal), to be consistent with the original behavior. +#ifdef NDEBUG +#define COMPACT_GOOGLE_LOG_DFATAL COMPACT_GOOGLE_LOG_ERROR +#elif GOOGLE_STRIP_LOG <= 3 +#define COMPACT_GOOGLE_LOG_DFATAL google::LogMessage( \ + __FILE__, __LINE__, google::FATAL) +#else +#define COMPACT_GOOGLE_LOG_DFATAL google::NullStreamFatal() +#endif + +#define GOOGLE_LOG_INFO(counter) google::LogMessage(__FILE__, __LINE__, google::INFO, counter, &google::LogMessage::SendToLog) +#define SYSLOG_INFO(counter) \ + google::LogMessage(__FILE__, __LINE__, google::INFO, counter, \ + &google::LogMessage::SendToSyslogAndLog) +#define GOOGLE_LOG_WARNING(counter) \ + google::LogMessage(__FILE__, __LINE__, google::WARNING, counter, \ + &google::LogMessage::SendToLog) +#define SYSLOG_WARNING(counter) \ + google::LogMessage(__FILE__, __LINE__, google::WARNING, counter, \ + &google::LogMessage::SendToSyslogAndLog) +#define GOOGLE_LOG_ERROR(counter) \ + google::LogMessage(__FILE__, __LINE__, google::ERROR, counter, \ + &google::LogMessage::SendToLog) +#define SYSLOG_ERROR(counter) \ + google::LogMessage(__FILE__, __LINE__, google::ERROR, counter, \ + &google::LogMessage::SendToSyslogAndLog) +#define GOOGLE_LOG_FATAL(counter) \ + google::LogMessage(__FILE__, __LINE__, google::FATAL, counter, \ + &google::LogMessage::SendToLog) +#define SYSLOG_FATAL(counter) \ + google::LogMessage(__FILE__, __LINE__, google::FATAL, counter, \ + &google::LogMessage::SendToSyslogAndLog) +#define GOOGLE_LOG_DFATAL(counter) \ + google::LogMessage(__FILE__, __LINE__, google::DFATAL_LEVEL, counter, \ + &google::LogMessage::SendToLog) +#define SYSLOG_DFATAL(counter) \ + google::LogMessage(__FILE__, __LINE__, google::DFATAL_LEVEL, counter, \ + &google::LogMessage::SendToSyslogAndLog) + +#if defined(WIN32) || defined(_WIN32) || defined(__WIN32__) || defined(__CYGWIN__) || defined(__CYGWIN32__) +// A very useful logging macro to log windows errors: +#define LOG_SYSRESULT(result) \ + if (FAILED(result)) { \ + LPTSTR message = NULL; \ + LPTSTR msg = reinterpret_cast(&message); \ + DWORD message_length = FormatMessage(FORMAT_MESSAGE_ALLOCATE_BUFFER | \ + FORMAT_MESSAGE_FROM_SYSTEM, \ + 0, result, 0, msg, 100, NULL); \ + if (message_length > 0) { \ + google::LogMessage(__FILE__, __LINE__, ERROR, 0, \ + &google::LogMessage::SendToLog).stream() << message; \ + LocalFree(message); \ + } \ + } +#endif + +// We use the preprocessor's merging operator, "##", so that, e.g., +// LOG(INFO) becomes the token GOOGLE_LOG_INFO. There's some funny +// subtle difference between ostream member streaming functions (e.g., +// ostream::operator<<(int) and ostream non-member streaming functions +// (e.g., ::operator<<(ostream&, string&): it turns out that it's +// impossible to stream something like a string directly to an unnamed +// ostream. We employ a neat hack by calling the stream() member +// function of LogMessage which seems to avoid the problem. +#define LOG(severity) COMPACT_GOOGLE_LOG_ ## severity.stream() +#define SYSLOG(severity) SYSLOG_ ## severity(0).stream() + +namespace google { + +// They need the definitions of integer types. +#include "glog/log_severity.h" +#include "glog/vlog_is_on.h" + +// Initialize google's logging library. You will see the program name +// specified by argv0 in log outputs. +GOOGLE_GLOG_DLL_DECL void InitGoogleLogging(const char* argv0); + +// Shutdown google's logging library. +GOOGLE_GLOG_DLL_DECL void ShutdownGoogleLogging(); + +// Install a function which will be called after LOG(FATAL). +GOOGLE_GLOG_DLL_DECL void InstallFailureFunction(void (*fail_func)()); + +class LogSink; // defined below + +// If a non-NULL sink pointer is given, we push this message to that sink. +// For LOG_TO_SINK we then do normal LOG(severity) logging as well. +// This is useful for capturing messages and passing/storing them +// somewhere more specific than the global log of the process. +// Argument types: +// LogSink* sink; +// LogSeverity severity; +// The cast is to disambiguate NULL arguments. +#define LOG_TO_SINK(sink, severity) \ + google::LogMessage( \ + __FILE__, __LINE__, \ + google::severity, \ + static_cast(sink), true).stream() +#define LOG_TO_SINK_BUT_NOT_TO_LOGFILE(sink, severity) \ + google::LogMessage( \ + __FILE__, __LINE__, \ + google::severity, \ + static_cast(sink), false).stream() + +// If a non-NULL string pointer is given, we write this message to that string. +// We then do normal LOG(severity) logging as well. +// This is useful for capturing messages and storing them somewhere more +// specific than the global log of the process. +// Argument types: +// string* message; +// LogSeverity severity; +// The cast is to disambiguate NULL arguments. +// NOTE: LOG(severity) expands to LogMessage().stream() for the specified +// severity. +#define LOG_TO_STRING(severity, message) \ + LOG_TO_STRING_##severity(static_cast(message)).stream() + +// If a non-NULL pointer is given, we push the message onto the end +// of a vector of strings; otherwise, we report it with LOG(severity). +// This is handy for capturing messages and perhaps passing them back +// to the caller, rather than reporting them immediately. +// Argument types: +// LogSeverity severity; +// vector *outvec; +// The cast is to disambiguate NULL arguments. +#define LOG_STRING(severity, outvec) \ + LOG_TO_STRING_##severity(static_cast*>(outvec)).stream() + +#define LOG_IF(severity, condition) \ + !(condition) ? (void) 0 : google::LogMessageVoidify() & LOG(severity) +#define SYSLOG_IF(severity, condition) \ + !(condition) ? (void) 0 : google::LogMessageVoidify() & SYSLOG(severity) + +#define LOG_ASSERT(condition) \ + LOG_IF(FATAL, !(condition)) << "Assert failed: " #condition +#define SYSLOG_ASSERT(condition) \ + SYSLOG_IF(FATAL, !(condition)) << "Assert failed: " #condition + +// CHECK dies with a fatal error if condition is not true. It is *not* +// controlled by NDEBUG, so the check will be executed regardless of +// compilation mode. Therefore, it is safe to do things like: +// CHECK(fp->Write(x) == 4) +#define CHECK(condition) \ + LOG_IF(FATAL, GOOGLE_PREDICT_BRANCH_NOT_TAKEN(!(condition))) \ + << "Check failed: " #condition " " + +// A container for a string pointer which can be evaluated to a bool - +// true iff the pointer is NULL. +struct CheckOpString { + CheckOpString(std::string* str) : str_(str) { } + // No destructor: if str_ is non-NULL, we're about to LOG(FATAL), + // so there's no point in cleaning up str_. + operator bool() const { + return GOOGLE_PREDICT_BRANCH_NOT_TAKEN(str_ != NULL); + } + std::string* str_; +}; + +// Function is overloaded for integral types to allow static const +// integrals declared in classes and not defined to be used as arguments to +// CHECK* macros. It's not encouraged though. +template +inline const T& GetReferenceableValue(const T& t) { return t; } +inline char GetReferenceableValue(char t) { return t; } +inline unsigned char GetReferenceableValue(unsigned char t) { return t; } +inline signed char GetReferenceableValue(signed char t) { return t; } +inline short GetReferenceableValue(short t) { return t; } +inline unsigned short GetReferenceableValue(unsigned short t) { return t; } +inline int GetReferenceableValue(int t) { return t; } +inline unsigned int GetReferenceableValue(unsigned int t) { return t; } +inline long GetReferenceableValue(long t) { return t; } +inline unsigned long GetReferenceableValue(unsigned long t) { return t; } +inline long long GetReferenceableValue(long long t) { return t; } +inline unsigned long long GetReferenceableValue(unsigned long long t) { + return t; +} + +// This is a dummy class to define the following operator. +struct DummyClassToDefineOperator {}; + +} + +// Define global operator<< to declare using ::operator<<. +// This declaration will allow use to use CHECK macros for user +// defined classes which have operator<< (e.g., stl_logging.h). +inline std::ostream& operator<<( + std::ostream& out, const google::DummyClassToDefineOperator&) { + return out; +} + +namespace google { + +// Build the error message string. +template +std::string* MakeCheckOpString(const t1& v1, const t2& v2, const char* names) { + // It means that we cannot use stl_logging if compiler doesn't + // support using expression for operator. + // TODO(hamaji): Figure out a way to fix. +#if 1 + using ::operator<<; +#endif + std::strstream ss; + ss << names << " (" << v1 << " vs. " << v2 << ")"; + return new std::string(ss.str(), ss.pcount()); +} + +// Helper functions for CHECK_OP macro. +// The (int, int) specialization works around the issue that the compiler +// will not instantiate the template version of the function on values of +// unnamed enum type - see comment below. +#define DEFINE_CHECK_OP_IMPL(name, op) \ + template \ + inline std::string* Check##name##Impl(const t1& v1, const t2& v2, \ + const char* names) { \ + if (v1 op v2) return NULL; \ + else return MakeCheckOpString(v1, v2, names); \ + } \ + inline std::string* Check##name##Impl(int v1, int v2, const char* names) { \ + return Check##name##Impl(v1, v2, names); \ + } + +// Use _EQ, _NE, _LE, etc. in case the file including base/logging.h +// provides its own #defines for the simpler names EQ, NE, LE, etc. +// This happens if, for example, those are used as token names in a +// yacc grammar. +DEFINE_CHECK_OP_IMPL(_EQ, ==) +DEFINE_CHECK_OP_IMPL(_NE, !=) +DEFINE_CHECK_OP_IMPL(_LE, <=) +DEFINE_CHECK_OP_IMPL(_LT, < ) +DEFINE_CHECK_OP_IMPL(_GE, >=) +DEFINE_CHECK_OP_IMPL(_GT, > ) +#undef DEFINE_CHECK_OP_IMPL + +// Helper macro for binary operators. +// Don't use this macro directly in your code, use CHECK_EQ et al below. + +#if defined(STATIC_ANALYSIS) +// Only for static analysis tool to know that it is equivalent to assert +#define CHECK_OP_LOG(name, op, val1, val2, log) CHECK((val1) op (val2)) +#elif !defined(NDEBUG) +// In debug mode, avoid constructing CheckOpStrings if possible, +// to reduce the overhead of CHECK statments by 2x. +// Real DCHECK-heavy tests have seen 1.5x speedups. + +// The meaning of "string" might be different between now and +// when this macro gets invoked (e.g., if someone is experimenting +// with other string implementations that get defined after this +// file is included). Save the current meaning now and use it +// in the macro. +typedef std::string _Check_string; +#define CHECK_OP_LOG(name, op, val1, val2, log) \ + while (google::_Check_string* _result = \ + google::Check##name##Impl( \ + google::GetReferenceableValue(val1), \ + google::GetReferenceableValue(val2), \ + #val1 " " #op " " #val2)) \ + log(__FILE__, __LINE__, \ + google::CheckOpString(_result)).stream() +#else +// In optimized mode, use CheckOpString to hint to compiler that +// the while condition is unlikely. +#define CHECK_OP_LOG(name, op, val1, val2, log) \ + while (google::CheckOpString _result = \ + google::Check##name##Impl( \ + google::GetReferenceableValue(val1), \ + google::GetReferenceableValue(val2), \ + #val1 " " #op " " #val2)) \ + log(__FILE__, __LINE__, _result).stream() +#endif // STATIC_ANALYSIS, !NDEBUG + +#if GOOGLE_STRIP_LOG <= 3 +#define CHECK_OP(name, op, val1, val2) \ + CHECK_OP_LOG(name, op, val1, val2, google::LogMessageFatal) +#else +#define CHECK_OP(name, op, val1, val2) \ + CHECK_OP_LOG(name, op, val1, val2, google::NullStreamFatal) +#endif // STRIP_LOG <= 3 + +// Equality/Inequality checks - compare two values, and log a FATAL message +// including the two values when the result is not as expected. The values +// must have operator<<(ostream, ...) defined. +// +// You may append to the error message like so: +// CHECK_NE(1, 2) << ": The world must be ending!"; +// +// We are very careful to ensure that each argument is evaluated exactly +// once, and that anything which is legal to pass as a function argument is +// legal here. In particular, the arguments may be temporary expressions +// which will end up being destroyed at the end of the apparent statement, +// for example: +// CHECK_EQ(string("abc")[1], 'b'); +// +// WARNING: These don't compile correctly if one of the arguments is a pointer +// and the other is NULL. To work around this, simply static_cast NULL to the +// type of the desired pointer. + +#define CHECK_EQ(val1, val2) CHECK_OP(_EQ, ==, val1, val2) +#define CHECK_NE(val1, val2) CHECK_OP(_NE, !=, val1, val2) +#define CHECK_LE(val1, val2) CHECK_OP(_LE, <=, val1, val2) +#define CHECK_LT(val1, val2) CHECK_OP(_LT, < , val1, val2) +#define CHECK_GE(val1, val2) CHECK_OP(_GE, >=, val1, val2) +#define CHECK_GT(val1, val2) CHECK_OP(_GT, > , val1, val2) + +// Check that the input is non NULL. This very useful in constructor +// initializer lists. + +#define CHECK_NOTNULL(val) \ + google::CheckNotNull(__FILE__, __LINE__, "'" #val "' Must be non NULL", (val)) + +// Helper functions for string comparisons. +// To avoid bloat, the definitions are in logging.cc. +#define DECLARE_CHECK_STROP_IMPL(func, expected) \ + GOOGLE_GLOG_DLL_DECL std::string* Check##func##expected##Impl( \ + const char* s1, const char* s2, const char* names); +DECLARE_CHECK_STROP_IMPL(strcmp, true) +DECLARE_CHECK_STROP_IMPL(strcmp, false) +DECLARE_CHECK_STROP_IMPL(strcasecmp, true) +DECLARE_CHECK_STROP_IMPL(strcasecmp, false) +#undef DECLARE_CHECK_STROP_IMPL + +// Helper macro for string comparisons. +// Don't use this macro directly in your code, use CHECK_STREQ et al below. +#define CHECK_STROP(func, op, expected, s1, s2) \ + while (google::CheckOpString _result = \ + google::Check##func##expected##Impl((s1), (s2), \ + #s1 " " #op " " #s2)) \ + LOG(FATAL) << *_result.str_ + + +// String (char*) equality/inequality checks. +// CASE versions are case-insensitive. +// +// Note that "s1" and "s2" may be temporary strings which are destroyed +// by the compiler at the end of the current "full expression" +// (e.g. CHECK_STREQ(Foo().c_str(), Bar().c_str())). + +#define CHECK_STREQ(s1, s2) CHECK_STROP(strcmp, ==, true, s1, s2) +#define CHECK_STRNE(s1, s2) CHECK_STROP(strcmp, !=, false, s1, s2) +#define CHECK_STRCASEEQ(s1, s2) CHECK_STROP(strcasecmp, ==, true, s1, s2) +#define CHECK_STRCASENE(s1, s2) CHECK_STROP(strcasecmp, !=, false, s1, s2) + +#define CHECK_INDEX(I,A) CHECK(I < (sizeof(A)/sizeof(A[0]))) +#define CHECK_BOUND(B,A) CHECK(B <= (sizeof(A)/sizeof(A[0]))) + +#define CHECK_DOUBLE_EQ(val1, val2) \ + do { \ + CHECK_LE((val1), (val2)+0.000000000000001L); \ + CHECK_GE((val1), (val2)-0.000000000000001L); \ + } while (0) + +#define CHECK_NEAR(val1, val2, margin) \ + do { \ + CHECK_LE((val1), (val2)+(margin)); \ + CHECK_GE((val1), (val2)-(margin)); \ + } while (0) + +// perror()..googly style! +// +// PLOG() and PLOG_IF() and PCHECK() behave exactly like their LOG* and +// CHECK equivalents with the addition that they postpend a description +// of the current state of errno to their output lines. + +#define PLOG(severity) GOOGLE_PLOG(severity, 0).stream() + +#define GOOGLE_PLOG(severity, counter) \ + google::ErrnoLogMessage( \ + __FILE__, __LINE__, google::severity, counter, \ + &google::LogMessage::SendToLog) + +#define PLOG_IF(severity, condition) \ + !(condition) ? (void) 0 : google::LogMessageVoidify() & PLOG(severity) + +// A CHECK() macro that postpends errno if the condition is false. E.g. +// +// if (poll(fds, nfds, timeout) == -1) { PCHECK(errno == EINTR); ... } +#define PCHECK(condition) \ + PLOG_IF(FATAL, GOOGLE_PREDICT_BRANCH_NOT_TAKEN(!(condition))) \ + << "Check failed: " #condition " " + +// A CHECK() macro that lets you assert the success of a function that +// returns -1 and sets errno in case of an error. E.g. +// +// CHECK_ERR(mkdir(path, 0700)); +// +// or +// +// int fd = open(filename, flags); CHECK_ERR(fd) << ": open " << filename; +#define CHECK_ERR(invocation) \ +PLOG_IF(FATAL, GOOGLE_PREDICT_BRANCH_NOT_TAKEN((invocation) == -1)) \ + << #invocation + +// Use macro expansion to create, for each use of LOG_EVERY_N(), static +// variables with the __LINE__ expansion as part of the variable name. +#define LOG_EVERY_N_VARNAME(base, line) LOG_EVERY_N_VARNAME_CONCAT(base, line) +#define LOG_EVERY_N_VARNAME_CONCAT(base, line) base ## line + +#define LOG_OCCURRENCES LOG_EVERY_N_VARNAME(occurrences_, __LINE__) +#define LOG_OCCURRENCES_MOD_N LOG_EVERY_N_VARNAME(occurrences_mod_n_, __LINE__) + +#define SOME_KIND_OF_LOG_EVERY_N(severity, n, what_to_do) \ + static int LOG_OCCURRENCES = 0, LOG_OCCURRENCES_MOD_N = 0; \ + ++LOG_OCCURRENCES; \ + if (++LOG_OCCURRENCES_MOD_N > n) LOG_OCCURRENCES_MOD_N -= n; \ + if (LOG_OCCURRENCES_MOD_N == 1) \ + google::LogMessage( \ + __FILE__, __LINE__, google::severity, LOG_OCCURRENCES, \ + &what_to_do).stream() + +#define SOME_KIND_OF_LOG_IF_EVERY_N(severity, condition, n, what_to_do) \ + static int LOG_OCCURRENCES = 0, LOG_OCCURRENCES_MOD_N = 0; \ + ++LOG_OCCURRENCES; \ + if (condition && \ + ((LOG_OCCURRENCES_MOD_N=(LOG_OCCURRENCES_MOD_N + 1) % n) == (1 % n))) \ + google::LogMessage( \ + __FILE__, __LINE__, google::severity, LOG_OCCURRENCES, \ + &what_to_do).stream() + +#define SOME_KIND_OF_PLOG_EVERY_N(severity, n, what_to_do) \ + static int LOG_OCCURRENCES = 0, LOG_OCCURRENCES_MOD_N = 0; \ + ++LOG_OCCURRENCES; \ + if (++LOG_OCCURRENCES_MOD_N > n) LOG_OCCURRENCES_MOD_N -= n; \ + if (LOG_OCCURRENCES_MOD_N == 1) \ + google::ErrnoLogMessage( \ + __FILE__, __LINE__, google::severity, LOG_OCCURRENCES, \ + &what_to_do).stream() + +#define SOME_KIND_OF_LOG_FIRST_N(severity, n, what_to_do) \ + static int LOG_OCCURRENCES = 0; \ + if (LOG_OCCURRENCES <= n) \ + ++LOG_OCCURRENCES; \ + if (LOG_OCCURRENCES <= n) \ + google::LogMessage( \ + __FILE__, __LINE__, google::severity, LOG_OCCURRENCES, \ + &what_to_do).stream() + +namespace glog_internal_namespace_ { +template +struct CompileAssert { +}; +struct CrashReason; +} // namespace glog_internal_namespace_ + +#define GOOGLE_GLOG_COMPILE_ASSERT(expr, msg) \ + typedef google::glog_internal_namespace_::CompileAssert<(bool(expr))> msg[bool(expr) ? 1 : -1] + +#define LOG_EVERY_N(severity, n) \ + GOOGLE_GLOG_COMPILE_ASSERT(google::severity < \ + google::NUM_SEVERITIES, \ + INVALID_REQUESTED_LOG_SEVERITY); \ + SOME_KIND_OF_LOG_EVERY_N(severity, (n), google::LogMessage::SendToLog) + +#define SYSLOG_EVERY_N(severity, n) \ + SOME_KIND_OF_LOG_EVERY_N(severity, (n), google::LogMessage::SendToSyslogAndLog) + +#define PLOG_EVERY_N(severity, n) \ + SOME_KIND_OF_PLOG_EVERY_N(severity, (n), google::LogMessage::SendToLog) + +#define LOG_FIRST_N(severity, n) \ + SOME_KIND_OF_LOG_FIRST_N(severity, (n), google::LogMessage::SendToLog) + +#define LOG_IF_EVERY_N(severity, condition, n) \ + SOME_KIND_OF_LOG_IF_EVERY_N(severity, (condition), (n), google::LogMessage::SendToLog) + +// We want the special COUNTER value available for LOG_EVERY_X()'ed messages +enum PRIVATE_Counter {COUNTER}; + + +// Plus some debug-logging macros that get compiled to nothing for production + +#ifndef NDEBUG + +#define DLOG(severity) LOG(severity) +#define DVLOG(verboselevel) VLOG(verboselevel) +#define DLOG_IF(severity, condition) LOG_IF(severity, condition) +#define DLOG_EVERY_N(severity, n) LOG_EVERY_N(severity, n) +#define DLOG_IF_EVERY_N(severity, condition, n) \ + LOG_IF_EVERY_N(severity, condition, n) +#define DLOG_ASSERT(condition) LOG_ASSERT(condition) + +// debug-only checking. not executed in NDEBUG mode. +#define DCHECK(condition) CHECK(condition) +#define DCHECK_EQ(val1, val2) CHECK_EQ(val1, val2) +#define DCHECK_NE(val1, val2) CHECK_NE(val1, val2) +#define DCHECK_LE(val1, val2) CHECK_LE(val1, val2) +#define DCHECK_LT(val1, val2) CHECK_LT(val1, val2) +#define DCHECK_GE(val1, val2) CHECK_GE(val1, val2) +#define DCHECK_GT(val1, val2) CHECK_GT(val1, val2) +#define DCHECK_NOTNULL(val) CHECK_NOTNULL(val) +#define DCHECK_STREQ(str1, str2) CHECK_STREQ(str1, str2) +#define DCHECK_STRCASEEQ(str1, str2) CHECK_STRCASEEQ(str1, str2) +#define DCHECK_STRNE(str1, str2) CHECK_STRNE(str1, str2) +#define DCHECK_STRCASENE(str1, str2) CHECK_STRCASENE(str1, str2) + +#else // NDEBUG + +#define DLOG(severity) \ + true ? (void) 0 : google::LogMessageVoidify() & LOG(severity) + +#define DVLOG(verboselevel) \ + (true || !VLOG_IS_ON(verboselevel)) ?\ + (void) 0 : google::LogMessageVoidify() & LOG(INFO) + +#define DLOG_IF(severity, condition) \ + (true || !(condition)) ? (void) 0 : google::LogMessageVoidify() & LOG(severity) + +#define DLOG_EVERY_N(severity, n) \ + true ? (void) 0 : google::LogMessageVoidify() & LOG(severity) + +#define DLOG_IF_EVERY_N(severity, condition, n) \ + (true || !(condition))? (void) 0 : google::LogMessageVoidify() & LOG(severity) + +#define DLOG_ASSERT(condition) \ + true ? (void) 0 : LOG_ASSERT(condition) + +#define DCHECK(condition) \ + while (false) \ + CHECK(condition) + +#define DCHECK_EQ(val1, val2) \ + while (false) \ + CHECK_EQ(val1, val2) + +#define DCHECK_NE(val1, val2) \ + while (false) \ + CHECK_NE(val1, val2) + +#define DCHECK_LE(val1, val2) \ + while (false) \ + CHECK_LE(val1, val2) + +#define DCHECK_LT(val1, val2) \ + while (false) \ + CHECK_LT(val1, val2) + +#define DCHECK_GE(val1, val2) \ + while (false) \ + CHECK_GE(val1, val2) + +#define DCHECK_GT(val1, val2) \ + while (false) \ + CHECK_GT(val1, val2) + +#define DCHECK_NOTNULL(val) (val) + +#define DCHECK_STREQ(str1, str2) \ + while (false) \ + CHECK_STREQ(str1, str2) + +#define DCHECK_STRCASEEQ(str1, str2) \ + while (false) \ + CHECK_STRCASEEQ(str1, str2) + +#define DCHECK_STRNE(str1, str2) \ + while (false) \ + CHECK_STRNE(str1, str2) + +#define DCHECK_STRCASENE(str1, str2) \ + while (false) \ + CHECK_STRCASENE(str1, str2) + + +#endif // NDEBUG + +// Log only in verbose mode. + +#define VLOG(verboselevel) LOG_IF(INFO, VLOG_IS_ON(verboselevel)) + +#define VLOG_IF(verboselevel, condition) \ + LOG_IF(INFO, (condition) && VLOG_IS_ON(verboselevel)) + +#define VLOG_EVERY_N(verboselevel, n) \ + LOG_IF_EVERY_N(INFO, VLOG_IS_ON(verboselevel), n) + +#define VLOG_IF_EVERY_N(verboselevel, condition, n) \ + LOG_IF_EVERY_N(INFO, (condition) && VLOG_IS_ON(verboselevel), n) + +// +// This class more or less represents a particular log message. You +// create an instance of LogMessage and then stream stuff to it. +// When you finish streaming to it, ~LogMessage is called and the +// full message gets streamed to the appropriate destination. +// +// You shouldn't actually use LogMessage's constructor to log things, +// though. You should use the LOG() macro (and variants thereof) +// above. +class GOOGLE_GLOG_DLL_DECL LogMessage { +public: + enum { + // Passing kNoLogPrefix for the line number disables the + // log-message prefix. Useful for using the LogMessage + // infrastructure as a printing utility. See also the --log_prefix + // flag for controlling the log-message prefix on an + // application-wide basis. + kNoLogPrefix = -1 + }; + + // LogStream inherit from non-DLL-exported class (std::ostrstream) + // and VC++ produces a warning for this situation. + // However, MSDN says "C4275 can be ignored in Microsoft Visual C++ + // 2005 if you are deriving from a type in the Standard C++ Library" + // http://msdn.microsoft.com/en-us/library/3tdb471s(VS.80).aspx + // Let's just ignore the warning. +#ifdef _MSC_VER +# pragma warning(disable: 4275) +#endif + class GOOGLE_GLOG_DLL_DECL LogStream : public std::ostrstream { +#ifdef _MSC_VER +# pragma warning(default: 4275) +#endif + public: + LogStream(char *buf, int len, int ctr) + : ostrstream(buf, len), + ctr_(ctr) { + self_ = this; + } + + int ctr() const { return ctr_; } + void set_ctr(int ctr) { ctr_ = ctr; } + LogStream* self() const { return self_; } + + private: + int ctr_; // Counter hack (for the LOG_EVERY_X() macro) + LogStream *self_; // Consistency check hack + }; + +public: + // icc 8 requires this typedef to avoid an internal compiler error. + typedef void (LogMessage::*SendMethod)(); + + LogMessage(const char* file, int line, LogSeverity severity, int ctr, + SendMethod send_method); + + // Two special constructors that generate reduced amounts of code at + // LOG call sites for common cases. + + // Used for LOG(INFO): Implied are: + // severity = INFO, ctr = 0, send_method = &LogMessage::SendToLog. + // + // Using this constructor instead of the more complex constructor above + // saves 19 bytes per call site. + LogMessage(const char* file, int line); + + // Used for LOG(severity) where severity != INFO. Implied + // are: ctr = 0, send_method = &LogMessage::SendToLog + // + // Using this constructor instead of the more complex constructor above + // saves 17 bytes per call site. + LogMessage(const char* file, int line, LogSeverity severity); + + // Constructor to log this message to a specified sink (if not NULL). + // Implied are: ctr = 0, send_method = &LogMessage::SendToSinkAndLog if + // also_send_to_log is true, send_method = &LogMessage::SendToSink otherwise. + LogMessage(const char* file, int line, LogSeverity severity, LogSink* sink, + bool also_send_to_log); + + // Constructor where we also give a vector pointer + // for storing the messages (if the pointer is not NULL). + // Implied are: ctr = 0, send_method = &LogMessage::SaveOrSendToLog. + LogMessage(const char* file, int line, LogSeverity severity, + std::vector* outvec); + + // Constructor where we also give a string pointer for storing the + // message (if the pointer is not NULL). Implied are: ctr = 0, + // send_method = &LogMessage::WriteToStringAndLog. + LogMessage(const char* file, int line, LogSeverity severity, + std::string* message); + + // A special constructor used for check failures + LogMessage(const char* file, int line, const CheckOpString& result); + + ~LogMessage(); + + // Flush a buffered message to the sink set in the constructor. Always + // called by the destructor, it may also be called from elsewhere if + // needed. Only the first call is actioned; any later ones are ignored. + void Flush(); + + // An arbitrary limit on the length of a single log message. This + // is so that streaming can be done more efficiently. + static const size_t kMaxLogMessageLen; + + // Theses should not be called directly outside of logging.*, + // only passed as SendMethod arguments to other LogMessage methods: + void SendToLog(); // Actually dispatch to the logs + void SendToSyslogAndLog(); // Actually dispatch to syslog and the logs + + // Call abort() or similar to perform LOG(FATAL) crash. + static void Fail() ; + + std::ostream& stream() { return *(data_->stream_); } + + int preserved_errno() const { return data_->preserved_errno_; } + + // Must be called without the log_mutex held. (L < log_mutex) + static int64 num_messages(int severity); + +private: + // Fully internal SendMethod cases: + void SendToSinkAndLog(); // Send to sink if provided and dispatch to the logs + void SendToSink(); // Send to sink if provided, do nothing otherwise. + + // Write to string if provided and dispatch to the logs. + void WriteToStringAndLog(); + + void SaveOrSendToLog(); // Save to stringvec if provided, else to logs + + void Init(const char* file, int line, LogSeverity severity, + void (LogMessage::*send_method)()); + + // Used to fill in crash information during LOG(FATAL) failures. + void RecordCrashReason(glog_internal_namespace_::CrashReason* reason); + + // Counts of messages sent at each priority: + static int64 num_messages_[NUM_SEVERITIES]; // under log_mutex + + // We keep the data in a separate struct so that each instance of + // LogMessage uses less stack space. + struct GOOGLE_GLOG_DLL_DECL LogMessageData { + LogMessageData() {}; + + int preserved_errno_; // preserved errno + char* buf_; + char* message_text_; // Complete message text (points to selected buffer) + LogStream* stream_alloc_; + LogStream* stream_; + char severity_; // What level is this LogMessage logged at? + int line_; // line number where logging call is. + void (LogMessage::*send_method_)(); // Call this in destructor to send + union { // At most one of these is used: union to keep the size low. + LogSink* sink_; // NULL or sink to send message to + std::vector* outvec_; // NULL or vector to push message onto + std::string* message_; // NULL or string to write message into + }; + time_t timestamp_; // Time of creation of LogMessage + struct ::tm tm_time_; // Time of creation of LogMessage + size_t num_prefix_chars_; // # of chars of prefix in this message + size_t num_chars_to_log_; // # of chars of msg to send to log + size_t num_chars_to_syslog_; // # of chars of msg to send to syslog + const char* basename_; // basename of file that called LOG + const char* fullname_; // fullname of file that called LOG + bool has_been_flushed_; // false => data has not been flushed + bool first_fatal_; // true => this was first fatal msg + + ~LogMessageData(); + private: + LogMessageData(const LogMessageData&); + void operator=(const LogMessageData&); + }; + + static LogMessageData fatal_msg_data_exclusive_; + static LogMessageData fatal_msg_data_shared_; + + LogMessageData* allocated_; + LogMessageData* data_; + + friend class LogDestination; + + LogMessage(const LogMessage&); + void operator=(const LogMessage&); +}; + +// This class happens to be thread-hostile because all instances share +// a single data buffer, but since it can only be created just before +// the process dies, we don't worry so much. +class GOOGLE_GLOG_DLL_DECL LogMessageFatal : public LogMessage { + public: + LogMessageFatal(const char* file, int line); + LogMessageFatal(const char* file, int line, const CheckOpString& result); + ~LogMessageFatal() ; +}; + +// A non-macro interface to the log facility; (useful +// when the logging level is not a compile-time constant). +inline void LogAtLevel(int const severity, std::string const &msg) { + LogMessage(__FILE__, __LINE__, severity).stream() << msg; +} + +// A macro alternative of LogAtLevel. New code may want to use this +// version since there are two advantages: 1. this version outputs the +// file name and the line number where this macro is put like other +// LOG macros, 2. this macro can be used as C++ stream. +#define LOG_AT_LEVEL(severity) google::LogMessage(__FILE__, __LINE__, severity).stream() + +// A small helper for CHECK_NOTNULL(). +template +T* CheckNotNull(const char *file, int line, const char *names, T* t) { + if (t == NULL) { + LogMessageFatal(file, line, new std::string(names)); + } + return t; +} + +// Allow folks to put a counter in the LOG_EVERY_X()'ed messages. This +// only works if ostream is a LogStream. If the ostream is not a +// LogStream you'll get an assert saying as much at runtime. +GOOGLE_GLOG_DLL_DECL std::ostream& operator<<(std::ostream &os, + const PRIVATE_Counter&); + + +// Derived class for PLOG*() above. +class GOOGLE_GLOG_DLL_DECL ErrnoLogMessage : public LogMessage { + public: + + ErrnoLogMessage(const char* file, int line, LogSeverity severity, int ctr, + void (LogMessage::*send_method)()); + + // Postpends ": strerror(errno) [errno]". + ~ErrnoLogMessage(); + + private: + ErrnoLogMessage(const ErrnoLogMessage&); + void operator=(const ErrnoLogMessage&); +}; + + +// This class is used to explicitly ignore values in the conditional +// logging macros. This avoids compiler warnings like "value computed +// is not used" and "statement has no effect". + +class GOOGLE_GLOG_DLL_DECL LogMessageVoidify { + public: + LogMessageVoidify() { } + // This has to be an operator with a precedence lower than << but + // higher than ?: + void operator&(std::ostream&) { } +}; + + +// Flushes all log files that contains messages that are at least of +// the specified severity level. Thread-safe. +GOOGLE_GLOG_DLL_DECL void FlushLogFiles(LogSeverity min_severity); + +// Flushes all log files that contains messages that are at least of +// the specified severity level. Thread-hostile because it ignores +// locking -- used for catastrophic failures. +GOOGLE_GLOG_DLL_DECL void FlushLogFilesUnsafe(LogSeverity min_severity); + +// +// Set the destination to which a particular severity level of log +// messages is sent. If base_filename is "", it means "don't log this +// severity". Thread-safe. +// +GOOGLE_GLOG_DLL_DECL void SetLogDestination(LogSeverity severity, + const char* base_filename); + +// +// Set the basename of the symlink to the latest log file at a given +// severity. If symlink_basename is empty, do not make a symlink. If +// you don't call this function, the symlink basename is the +// invocation name of the program. Thread-safe. +// +GOOGLE_GLOG_DLL_DECL void SetLogSymlink(LogSeverity severity, + const char* symlink_basename); + +// +// Used to send logs to some other kind of destination +// Users should subclass LogSink and override send to do whatever they want. +// Implementations must be thread-safe because a shared instance will +// be called from whichever thread ran the LOG(XXX) line. +class GOOGLE_GLOG_DLL_DECL LogSink { + public: + virtual ~LogSink(); + + // Sink's logging logic (message_len is such as to exclude '\n' at the end). + // This method can't use LOG() or CHECK() as logging system mutex(s) are held + // during this call. + virtual void send(LogSeverity severity, const char* full_filename, + const char* base_filename, int line, + const struct ::tm* tm_time, + const char* message, size_t message_len) = 0; + + // Redefine this to implement waiting for + // the sink's logging logic to complete. + // It will be called after each send() returns, + // but before that LogMessage exits or crashes. + // By default this function does nothing. + // Using this function one can implement complex logic for send() + // that itself involves logging; and do all this w/o causing deadlocks and + // inconsistent rearrangement of log messages. + // E.g. if a LogSink has thread-specific actions, the send() method + // can simply add the message to a queue and wake up another thread that + // handles real logging while itself making some LOG() calls; + // WaitTillSent() can be implemented to wait for that logic to complete. + // See our unittest for an example. + virtual void WaitTillSent(); + + // Returns the normal text output of the log message. + // Can be useful to implement send(). + static std::string ToString(LogSeverity severity, const char* file, int line, + const struct ::tm* tm_time, + const char* message, size_t message_len); +}; + +// Add or remove a LogSink as a consumer of logging data. Thread-safe. +GOOGLE_GLOG_DLL_DECL void AddLogSink(LogSink *destination); +GOOGLE_GLOG_DLL_DECL void RemoveLogSink(LogSink *destination); + +// +// Specify an "extension" added to the filename specified via +// SetLogDestination. This applies to all severity levels. It's +// often used to append the port we're listening on to the logfile +// name. Thread-safe. +// +GOOGLE_GLOG_DLL_DECL void SetLogFilenameExtension( + const char* filename_extension); + +// +// Make it so that all log messages of at least a particular severity +// are logged to stderr (in addition to logging to the usual log +// file(s)). Thread-safe. +// +GOOGLE_GLOG_DLL_DECL void SetStderrLogging(LogSeverity min_severity); + +// +// Make it so that all log messages go only to stderr. Thread-safe. +// +GOOGLE_GLOG_DLL_DECL void LogToStderr(); + +// +// Make it so that all log messages of at least a particular severity are +// logged via email to a list of addresses (in addition to logging to the +// usual log file(s)). The list of addresses is just a string containing +// the email addresses to send to (separated by spaces, say). Thread-safe. +// +GOOGLE_GLOG_DLL_DECL void SetEmailLogging(LogSeverity min_severity, + const char* addresses); + +// A simple function that sends email. dest is a commma-separated +// list of addressess. Thread-safe. +GOOGLE_GLOG_DLL_DECL bool SendEmail(const char *dest, + const char *subject, const char *body); + +GOOGLE_GLOG_DLL_DECL const std::vector& GetLoggingDirectories(); + +// For tests only: Clear the internal [cached] list of logging directories to +// force a refresh the next time GetLoggingDirectories is called. +// Thread-hostile. +void TestOnly_ClearLoggingDirectoriesList(); + +// Returns a set of existing temporary directories, which will be a +// subset of the directories returned by GetLogginDirectories(). +// Thread-safe. +GOOGLE_GLOG_DLL_DECL void GetExistingTempDirectories( + std::vector* list); + +// Print any fatal message again -- useful to call from signal handler +// so that the last thing in the output is the fatal message. +// Thread-hostile, but a race is unlikely. +GOOGLE_GLOG_DLL_DECL void ReprintFatalMessage(); + +// Truncate a log file that may be the append-only output of multiple +// processes and hence can't simply be renamed/reopened (typically a +// stdout/stderr). If the file "path" is > "limit" bytes, copy the +// last "keep" bytes to offset 0 and truncate the rest. Since we could +// be racing with other writers, this approach has the potential to +// lose very small amounts of data. For security, only follow symlinks +// if the path is /proc/self/fd/* +GOOGLE_GLOG_DLL_DECL void TruncateLogFile(const char *path, + int64 limit, int64 keep); + +// Truncate stdout and stderr if they are over the value specified by +// --max_log_size; keep the final 1MB. This function has the same +// race condition as TruncateLogFile. +GOOGLE_GLOG_DLL_DECL void TruncateStdoutStderr(); + +// Return the string representation of the provided LogSeverity level. +// Thread-safe. +GOOGLE_GLOG_DLL_DECL const char* GetLogSeverityName(LogSeverity severity); + +// --------------------------------------------------------------------- +// Implementation details that are not useful to most clients +// --------------------------------------------------------------------- + +// A Logger is the interface used by logging modules to emit entries +// to a log. A typical implementation will dump formatted data to a +// sequence of files. We also provide interfaces that will forward +// the data to another thread so that the invoker never blocks. +// Implementations should be thread-safe since the logging system +// will write to them from multiple threads. + +namespace base { + +class GOOGLE_GLOG_DLL_DECL Logger { + public: + virtual ~Logger(); + + // Writes "message[0,message_len-1]" corresponding to an event that + // occurred at "timestamp". If "force_flush" is true, the log file + // is flushed immediately. + // + // The input message has already been formatted as deemed + // appropriate by the higher level logging facility. For example, + // textual log messages already contain timestamps, and the + // file:linenumber header. + virtual void Write(bool force_flush, + time_t timestamp, + const char* message, + int message_len) = 0; + + // Flush any buffered messages + virtual void Flush() = 0; + + // Get the current LOG file size. + // The returned value is approximate since some + // logged data may not have been flushed to disk yet. + virtual uint32 LogSize() = 0; +}; + +// Get the logger for the specified severity level. The logger +// remains the property of the logging module and should not be +// deleted by the caller. Thread-safe. +extern GOOGLE_GLOG_DLL_DECL Logger* GetLogger(LogSeverity level); + +// Set the logger for the specified severity level. The logger +// becomes the property of the logging module and should not +// be deleted by the caller. Thread-safe. +extern GOOGLE_GLOG_DLL_DECL void SetLogger(LogSeverity level, Logger* logger); + +} + +// glibc has traditionally implemented two incompatible versions of +// strerror_r(). There is a poorly defined convention for picking the +// version that we want, but it is not clear whether it even works with +// all versions of glibc. +// So, instead, we provide this wrapper that automatically detects the +// version that is in use, and then implements POSIX semantics. +// N.B. In addition to what POSIX says, we also guarantee that "buf" will +// be set to an empty string, if this function failed. This means, in most +// cases, you do not need to check the error code and you can directly +// use the value of "buf". It will never have an undefined value. +GOOGLE_GLOG_DLL_DECL int posix_strerror_r(int err, char *buf, size_t len); + + +// A class for which we define operator<<, which does nothing. +class GOOGLE_GLOG_DLL_DECL NullStream : public LogMessage::LogStream { + public: + // Initialize the LogStream so the messages can be written somewhere + // (they'll never be actually displayed). This will be needed if a + // NullStream& is implicitly converted to LogStream&, in which case + // the overloaded NullStream::operator<< will not be invoked. + NullStream() : LogMessage::LogStream(message_buffer_, 1, 0) { } + NullStream(const char* /*file*/, int /*line*/, + const CheckOpString& /*result*/) : + LogMessage::LogStream(message_buffer_, 1, 0) { } + NullStream &stream() { return *this; } + private: + // A very short buffer for messages (which we discard anyway). This + // will be needed if NullStream& converted to LogStream& (e.g. as a + // result of a conditional expression). + char message_buffer_[2]; +}; + +// Do nothing. This operator is inline, allowing the message to be +// compiled away. The message will not be compiled away if we do +// something like (flag ? LOG(INFO) : LOG(ERROR)) << message; when +// SKIP_LOG=WARNING. In those cases, NullStream will be implicitly +// converted to LogStream and the message will be computed and then +// quietly discarded. +template +inline NullStream& operator<<(NullStream &str, const T &value) { return str; } + +// Similar to NullStream, but aborts the program (without stack +// trace), like LogMessageFatal. +class GOOGLE_GLOG_DLL_DECL NullStreamFatal : public NullStream { + public: + NullStreamFatal() { } + NullStreamFatal(const char* file, int line, const CheckOpString& result) : + NullStream(file, line, result) { } + ~NullStreamFatal() { _exit(1); } +}; + +// Install a signal handler that will dump signal information and a stack +// trace when the program crashes on certain signals. We'll install the +// signal handler for the following signals. +// +// SIGSEGV, SIGILL, SIGFPE, SIGABRT, SIGBUS, and SIGTERM. +// +// By default, the signal handler will write the failure dump to the +// standard error. You can customize the destination by installing your +// own writer function by InstallFailureWriter() below. +// +// Note on threading: +// +// The function should be called before threads are created, if you want +// to use the failure signal handler for all threads. The stack trace +// will be shown only for the thread that receives the signal. In other +// words, stack traces of other threads won't be shown. +GOOGLE_GLOG_DLL_DECL void InstallFailureSignalHandler(); + +// Installs a function that is used for writing the failure dump. "data" +// is the pointer to the beginning of a message to be written, and "size" +// is the size of the message. You should not expect the data is +// terminated with '\0'. +GOOGLE_GLOG_DLL_DECL void InstallFailureWriter( + void (*writer)(const char* data, int size)); + +} + +#endif // _LOGGING_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/windows/glog/raw_logging.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/windows/glog/raw_logging.h new file mode 100644 index 0000000000..c81e67bf99 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/windows/glog/raw_logging.h @@ -0,0 +1,189 @@ +// This file is automatically generated from src/glog/raw_logging.h.in +// using src/windows/preprocess.sh. +// DO NOT EDIT! + +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Author: Maxim Lifantsev +// +// Thread-safe logging routines that do not allocate any memory or +// acquire any locks, and can therefore be used by low-level memory +// allocation and synchronization code. + +#ifndef BASE_RAW_LOGGING_H_ +#define BASE_RAW_LOGGING_H_ + +#include + +namespace google { + +#include "glog/log_severity.h" +#include "glog/vlog_is_on.h" + +// Annoying stuff for windows -- makes sure clients can import these functions +#ifndef GOOGLE_GLOG_DLL_DECL +# if defined(_WIN32) && !defined(__CYGWIN__) +# define GOOGLE_GLOG_DLL_DECL __declspec(dllimport) +# else +# define GOOGLE_GLOG_DLL_DECL +# endif +#endif + +// This is similar to LOG(severity) << format... and VLOG(level) << format.., +// but +// * it is to be used ONLY by low-level modules that can't use normal LOG() +// * it is desiged to be a low-level logger that does not allocate any +// memory and does not need any locks, hence: +// * it logs straight and ONLY to STDERR w/o buffering +// * it uses an explicit format and arguments list +// * it will silently chop off really long message strings +// Usage example: +// RAW_LOG(ERROR, "Failed foo with %i: %s", status, error); +// RAW_VLOG(3, "status is %i", status); +// These will print an almost standard log lines like this to stderr only: +// E0821 211317 file.cc:123] RAW: Failed foo with 22: bad_file +// I0821 211317 file.cc:142] RAW: status is 20 +#define RAW_LOG(severity, ...) \ + do { \ + switch (google::severity) { \ + case 0: \ + RAW_LOG_INFO(__VA_ARGS__); \ + break; \ + case 1: \ + RAW_LOG_WARNING(__VA_ARGS__); \ + break; \ + case 2: \ + RAW_LOG_ERROR(__VA_ARGS__); \ + break; \ + case 3: \ + RAW_LOG_FATAL(__VA_ARGS__); \ + break; \ + default: \ + break; \ + } \ + } while (0) + +// The following STRIP_LOG testing is performed in the header file so that it's +// possible to completely compile out the logging code and the log messages. +#if STRIP_LOG == 0 +#define RAW_VLOG(verboselevel, ...) \ + do { \ + if (VLOG_IS_ON(verboselevel)) { \ + RAW_LOG_INFO(__VA_ARGS__); \ + } \ + } while (0) +#else +#define RAW_VLOG(verboselevel, ...) RawLogStub__(0, __VA_ARGS__) +#endif // STRIP_LOG == 0 + +#if STRIP_LOG == 0 +#define RAW_LOG_INFO(...) google::RawLog__(google::INFO, \ + __FILE__, __LINE__, __VA_ARGS__) +#else +#define RAW_LOG_INFO(...) google::RawLogStub__(0, __VA_ARGS__) +#endif // STRIP_LOG == 0 + +#if STRIP_LOG <= 1 +#define RAW_LOG_WARNING(...) google::RawLog__(google::WARNING, \ + __FILE__, __LINE__, __VA_ARGS__) +#else +#define RAW_LOG_WARNING(...) google::RawLogStub__(0, __VA_ARGS__) +#endif // STRIP_LOG <= 1 + +#if STRIP_LOG <= 2 +#define RAW_LOG_ERROR(...) google::RawLog__(google::ERROR, \ + __FILE__, __LINE__, __VA_ARGS__) +#else +#define RAW_LOG_ERROR(...) google::RawLogStub__(0, __VA_ARGS__) +#endif // STRIP_LOG <= 2 + +#if STRIP_LOG <= 3 +#define RAW_LOG_FATAL(...) google::RawLog__(google::FATAL, \ + __FILE__, __LINE__, __VA_ARGS__) +#else +#define RAW_LOG_FATAL(...) \ + do { \ + google::RawLogStub__(0, __VA_ARGS__); \ + exit(1); \ + } while (0) +#endif // STRIP_LOG <= 3 + +// Similar to CHECK(condition) << message, +// but for low-level modules: we use only RAW_LOG that does not allocate memory. +// We do not want to provide args list here to encourage this usage: +// if (!cond) RAW_LOG(FATAL, "foo ...", hard_to_compute_args); +// so that the args are not computed when not needed. +#define RAW_CHECK(condition, message) \ + do { \ + if (!(condition)) { \ + RAW_LOG(FATAL, "Check %s failed: %s", #condition, message); \ + } \ + } while (0) + +// Debug versions of RAW_LOG and RAW_CHECK +#ifndef NDEBUG + +#define RAW_DLOG(severity, ...) RAW_LOG(severity, __VA_ARGS__) +#define RAW_DCHECK(condition, message) RAW_CHECK(condition, message) + +#else // NDEBUG + +#define RAW_DLOG(severity, ...) \ + while (false) \ + RAW_LOG(severity, __VA_ARGS__) +#define RAW_DCHECK(condition, message) \ + while (false) \ + RAW_CHECK(condition, message) + +#endif // NDEBUG + +// Stub log function used to work around for unused variable warnings when +// building with STRIP_LOG > 0. +static inline void RawLogStub__(int ignored, ...) { +} + +// Helper function to implement RAW_LOG and RAW_VLOG +// Logs format... at "severity" level, reporting it +// as called from file:line. +// This does not allocate memory or acquire locks. +GOOGLE_GLOG_DLL_DECL void RawLog__(LogSeverity severity, + const char* file, + int line, + const char* format, ...) + ; + +// Hack to propagate time information into this module so that +// this module does not have to directly call localtime_r(), +// which could allocate memory. +GOOGLE_GLOG_DLL_DECL void RawLog__SetLastTime(const struct tm& t, int usecs); + +} + +#endif // BASE_RAW_LOGGING_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/windows/glog/stl_logging.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/windows/glog/stl_logging.h new file mode 100644 index 0000000000..d76f6c18e2 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/windows/glog/stl_logging.h @@ -0,0 +1,158 @@ +// This file is automatically generated from src/glog/stl_logging.h.in +// using src/windows/preprocess.sh. +// DO NOT EDIT! + +// Copyright (c) 2003, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Stream output operators for STL containers; to be used for logging *only*. +// Inclusion of this file lets you do: +// +// list x; +// LOG(INFO) << "data: " << x; +// vector v1, v2; +// CHECK_EQ(v1, v2); +// +// Note that if you want to use these operators from the non-global namespace, +// you may get an error since they are not in namespace std (and they are not +// in namespace std since that would result in undefined behavior). You may +// need to write +// +// using ::operator<<; +// +// to fix these errors. + +#ifndef UTIL_GTL_STL_LOGGING_INL_H_ +#define UTIL_GTL_STL_LOGGING_INL_H_ + +#if !1 +# error We do not support stl_logging for this compiler +#endif + +#include +#include +#include +#include +#include +#include +#include + +#ifdef __GNUC__ +# include +# include +# include +#endif + +template +inline std::ostream& operator<<(std::ostream& out, + const std::pair& p) { + out << '(' << p.first << ", " << p.second << ')'; + return out; +} + +namespace google { + +template +inline void PrintSequence(std::ostream& out, Iter begin, Iter end) { + using ::operator<<; + // Output at most 100 elements -- appropriate if used for logging. + for (int i = 0; begin != end && i < 100; ++i, ++begin) { + if (i > 0) out << ' '; + out << *begin; + } + if (begin != end) { + out << " ..."; + } +} + +} + +#define OUTPUT_TWO_ARG_CONTAINER(Sequence) \ +template \ +inline std::ostream& operator<<(std::ostream& out, \ + const Sequence& seq) { \ + google::PrintSequence(out, seq.begin(), seq.end()); \ + return out; \ +} + +OUTPUT_TWO_ARG_CONTAINER(std::vector) +OUTPUT_TWO_ARG_CONTAINER(std::deque) +OUTPUT_TWO_ARG_CONTAINER(std::list) +#ifdef __GNUC__ +OUTPUT_TWO_ARG_CONTAINER(__gnu_cxx::slist) +#endif + +#undef OUTPUT_TWO_ARG_CONTAINER + +#define OUTPUT_THREE_ARG_CONTAINER(Sequence) \ +template \ +inline std::ostream& operator<<(std::ostream& out, \ + const Sequence& seq) { \ + google::PrintSequence(out, seq.begin(), seq.end()); \ + return out; \ +} + +OUTPUT_THREE_ARG_CONTAINER(std::set) +OUTPUT_THREE_ARG_CONTAINER(std::multiset) + +#undef OUTPUT_THREE_ARG_CONTAINER + +#define OUTPUT_FOUR_ARG_CONTAINER(Sequence) \ +template \ +inline std::ostream& operator<<(std::ostream& out, \ + const Sequence& seq) { \ + google::PrintSequence(out, seq.begin(), seq.end()); \ + return out; \ +} + +OUTPUT_FOUR_ARG_CONTAINER(std::map) +OUTPUT_FOUR_ARG_CONTAINER(std::multimap) +#ifdef __GNUC__ +OUTPUT_FOUR_ARG_CONTAINER(__gnu_cxx::hash_set) +OUTPUT_FOUR_ARG_CONTAINER(__gnu_cxx::hash_multiset) +#endif + +#undef OUTPUT_FOUR_ARG_CONTAINER + +#define OUTPUT_FIVE_ARG_CONTAINER(Sequence) \ +template \ +inline std::ostream& operator<<(std::ostream& out, \ + const Sequence& seq) { \ + google::PrintSequence(out, seq.begin(), seq.end()); \ + return out; \ +} + +#ifdef __GNUC__ +OUTPUT_FIVE_ARG_CONTAINER(__gnu_cxx::hash_map) +OUTPUT_FIVE_ARG_CONTAINER(__gnu_cxx::hash_multimap) +#endif + +#undef OUTPUT_FIVE_ARG_CONTAINER + +#endif // UTIL_GTL_STL_LOGGING_INL_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/windows/glog/vlog_is_on.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/windows/glog/vlog_is_on.h new file mode 100644 index 0000000000..409a4011b3 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/windows/glog/vlog_is_on.h @@ -0,0 +1,133 @@ +// This file is automatically generated from src/glog/vlog_is_on.h.in +// using src/windows/preprocess.sh. +// DO NOT EDIT! + +// Copyright (c) 1999, 2007, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Author: Ray Sidney and many others +// +// Defines the VLOG_IS_ON macro that controls the variable-verbosity +// conditional logging. +// +// It's used by VLOG and VLOG_IF in logging.h +// and by RAW_VLOG in raw_logging.h to trigger the logging. +// +// It can also be used directly e.g. like this: +// if (VLOG_IS_ON(2)) { +// // do some logging preparation and logging +// // that can't be accomplished e.g. via just VLOG(2) << ...; +// } +// +// The truth value that VLOG_IS_ON(level) returns is determined by +// the three verbosity level flags: +// --v= Gives the default maximal active V-logging level; +// 0 is the default. +// Normally positive values are used for V-logging levels. +// --vmodule= Gives the per-module maximal V-logging levels to override +// the value given by --v. +// E.g. "my_module=2,foo*=3" would change the logging level +// for all code in source files "my_module.*" and "foo*.*" +// ("-inl" suffixes are also disregarded for this matching). +// +// SetVLOGLevel helper function is provided to do limited dynamic control over +// V-logging by overriding the per-module settings given via --vmodule flag. +// +// CAVEAT: --vmodule functionality is not available in non gcc compilers. +// + +#ifndef BASE_VLOG_IS_ON_H_ +#define BASE_VLOG_IS_ON_H_ + +#include "glog/log_severity.h" + +// Annoying stuff for windows -- makes sure clients can import these functions +#ifndef GOOGLE_GLOG_DLL_DECL +# if defined(_WIN32) && !defined(__CYGWIN__) +# define GOOGLE_GLOG_DLL_DECL __declspec(dllimport) +# else +# define GOOGLE_GLOG_DLL_DECL +# endif +#endif + +#if defined(__GNUC__) +// We emit an anonymous static int* variable at every VLOG_IS_ON(n) site. +// (Normally) the first time every VLOG_IS_ON(n) site is hit, +// we determine what variable will dynamically control logging at this site: +// it's either FLAGS_v or an appropriate internal variable +// matching the current source file that represents results of +// parsing of --vmodule flag and/or SetVLOGLevel calls. +#define VLOG_IS_ON(verboselevel) \ + __extension__ \ + ({ static google::int32* vlocal__ = &google::kLogSiteUninitialized; \ + google::int32 verbose_level__ = (verboselevel); \ + (*vlocal__ >= verbose_level__) && \ + ((vlocal__ != &google::kLogSiteUninitialized) || \ + (google::InitVLOG3__(&vlocal__, &FLAGS_v, \ + __FILE__, verbose_level__))); }) +#else +// GNU extensions not available, so we do not support --vmodule. +// Dynamic value of FLAGS_v always controls the logging level. +#define VLOG_IS_ON(verboselevel) (FLAGS_v >= (verboselevel)) +#endif + +// Set VLOG(_IS_ON) level for module_pattern to log_level. +// This lets us dynamically control what is normally set by the --vmodule flag. +// Returns the level that previously applied to module_pattern. +// NOTE: To change the log level for VLOG(_IS_ON) sites +// that have already executed after/during InitGoogleLogging, +// one needs to supply the exact --vmodule pattern that applied to them. +// (If no --vmodule pattern applied to them +// the value of FLAGS_v will continue to control them.) +extern GOOGLE_GLOG_DLL_DECL int SetVLOGLevel(const char* module_pattern, + int log_level); + +// Various declarations needed for VLOG_IS_ON above: ========================= + +// Special value used to indicate that a VLOG_IS_ON site has not been +// initialized. We make this a large value, so the common-case check +// of "*vlocal__ >= verbose_level__" in VLOG_IS_ON definition +// passes in such cases and InitVLOG3__ is then triggered. +extern google::int32 kLogSiteUninitialized; + +// Helper routine which determines the logging info for a particalur VLOG site. +// site_flag is the address of the site-local pointer to the controlling +// verbosity level +// site_default is the default to use for *site_flag +// fname is the current source file name +// verbose_level is the argument to VLOG_IS_ON +// We will return the return value for VLOG_IS_ON +// and if possible set *site_flag appropriately. +extern GOOGLE_GLOG_DLL_DECL bool InitVLOG3__( + google::int32** site_flag, + google::int32* site_default, + const char* fname, + google::int32 verbose_level); + +#endif // BASE_VLOG_IS_ON_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/windows/port.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/windows/port.cc new file mode 100644 index 0000000000..bfa6e70afb --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/windows/port.cc @@ -0,0 +1,64 @@ +/* Copyright (c) 2008, Google Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following disclaimer + * in the documentation and/or other materials provided with the + * distribution. + * * Neither the name of Google Inc. nor the names of its + * contributors may be used to endorse or promote products derived from + * this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + * + * --- + * Author: Craig Silverstein + * Copied from google-perftools and modified by Shinichiro Hamaji + */ + +#ifndef _WIN32 +# error You should only be including windows/port.cc in a windows environment! +#endif + +#include "config.h" +#include // for va_list, va_start, va_end +#include // for strstr() +#include +#include +#include +#include "port.h" + +using std::string; +using std::vector; + +// These call the windows _vsnprintf, but always NUL-terminate. +int safe_vsnprintf(char *str, size_t size, const char *format, va_list ap) { + if (size == 0) // not even room for a \0? + return -1; // not what C99 says to do, but what windows does + str[size-1] = '\0'; + return _vsnprintf(str, size-1, format, ap); +} + +int snprintf(char *str, size_t size, const char *format, ...) { + va_list ap; + va_start(ap, format); + const int r = vsnprintf(str, size, format, ap); + va_end(ap); + return r; +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/windows/port.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/windows/port.h new file mode 100644 index 0000000000..d093bf5d34 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/windows/port.h @@ -0,0 +1,149 @@ +/* Copyright (c) 2008, Google Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following disclaimer + * in the documentation and/or other materials provided with the + * distribution. + * * Neither the name of Google Inc. nor the names of its + * contributors may be used to endorse or promote products derived from + * this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + * + * --- + * Author: Craig Silverstein + * Copied from google-perftools and modified by Shinichiro Hamaji + * + * These are some portability typedefs and defines to make it a bit + * easier to compile this code under VC++. + * + * Several of these are taken from glib: + * http://developer.gnome.org/doc/API/glib/glib-windows-compatability-functions.html + */ + +#ifndef CTEMPLATE_WINDOWS_PORT_H_ +#define CTEMPLATE_WINDOWS_PORT_H_ + +#include "config.h" + +#ifdef _WIN32 + +#define WIN32_LEAN_AND_MEAN /* We always want minimal includes */ +#include +#include /* for gethostname */ +#include /* because we so often use open/close/etc */ +#include /* for _getcwd() */ +#include /* for _getpid() */ +#include /* read in vsnprintf decl. before redifining it */ +#include /* template_dictionary.cc uses va_copy */ +#include /* for _strnicmp(), strerror_s() */ +#include /* for localtime_s() */ +/* Note: the C++ #includes are all together at the bottom. This file is + * used by both C and C++ code, so we put all the C++ together. + */ + +/* 4244: otherwise we get problems when substracting two size_t's to an int + * 4251: it's complaining about a private struct I've chosen not to dllexport + * 4355: we use this in a constructor, but we do it safely + * 4715: for some reason VC++ stopped realizing you can't return after abort() + * 4800: we know we're casting ints/char*'s to bools, and we're ok with that + * 4996: Yes, we're ok using "unsafe" functions like fopen() and strerror() + */ +#pragma warning(disable:4244 4251 4355 4715 4800 4996) + +/* file I/O */ +#define PATH_MAX 1024 +#define access _access +#define getcwd _getcwd +#define open _open +#define read _read +#define write _write +#define lseek _lseek +#define close _close +#define popen _popen +#define pclose _pclose +#define R_OK 04 /* read-only (for access()) */ +#define S_ISDIR(m) (((m) & _S_IFMT) == _S_IFDIR) +#ifndef __MINGW32__ +enum { STDIN_FILENO = 0, STDOUT_FILENO = 1, STDERR_FILENO = 2 }; +#endif +#define S_IRUSR S_IREAD +#define S_IWUSR S_IWRITE + +/* Not quite as lightweight as a hard-link, but more than good enough for us. */ +#define link(oldpath, newpath) CopyFileA(oldpath, newpath, false) + +#define strcasecmp _stricmp +#define strncasecmp _strnicmp + +/* In windows-land, hash<> is called hash_compare<> (from xhash.h) */ +#define hash hash_compare + +/* Sleep is in ms, on windows */ +#define sleep(secs) Sleep((secs) * 1000) + +/* We can't just use _vsnprintf and _snprintf as drop-in-replacements, + * because they don't always NUL-terminate. :-( We also can't use the + * name vsnprintf, since windows defines that (but not snprintf (!)). + */ +extern int snprintf(char *str, size_t size, + const char *format, ...); +extern int safe_vsnprintf(char *str, size_t size, + const char *format, va_list ap); +#define vsnprintf(str, size, format, ap) safe_vsnprintf(str, size, format, ap) +#define va_copy(dst, src) (dst) = (src) + +/* Windows doesn't support specifying the number of buckets as a + * hash_map constructor arg, so we leave this blank. + */ +#define CTEMPLATE_SMALL_HASHTABLE + +#define DEFAULT_TEMPLATE_ROOTDIR ".." + +// ----------------------------------- SYSTEM/PROCESS +typedef int pid_t; +#define getpid _getpid + +// ----------------------------------- THREADS +typedef DWORD pthread_t; +typedef DWORD pthread_key_t; +typedef LONG pthread_once_t; +enum { PTHREAD_ONCE_INIT = 0 }; // important that this be 0! for SpinLock +#define pthread_self GetCurrentThreadId +#define pthread_equal(pthread_t_1, pthread_t_2) ((pthread_t_1)==(pthread_t_2)) + +inline struct tm* localtime_r(const time_t* timep, struct tm* result) { + localtime_s(result, timep); + return result; +} + +inline char* strerror_r(int errnum, char* buf, size_t buflen) { + strerror_s(buf, buflen, errnum); + return buf; +} + +#ifndef __cplusplus +/* I don't see how to get inlining for C code in MSVC. Ah well. */ +#define inline +#endif + +#endif /* _WIN32 */ + +#endif /* CTEMPLATE_WINDOWS_PORT_H_ */ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/windows/preprocess.sh b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/windows/preprocess.sh new file mode 100644 index 0000000000..ea4352e8e3 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/glog/src/windows/preprocess.sh @@ -0,0 +1,118 @@ +#!/bin/sh + +# Copyright (c) 2008, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# --- +# Author: Craig Silverstein +# Copied from google-perftools and modified by Shinichiro Hamaji +# +# This script is meant to be run at distribution-generation time, for +# instance by autogen.sh. It does some of the work configure would +# normally do, for windows systems. In particular, it expands all the +# @...@ variables found in .in files, and puts them here, in the windows +# directory. +# +# This script should be run before any new release. + +if [ -z "$1" ]; then + echo "USAGE: $0 " + exit 1 +fi + +DLLDEF_MACRO_NAME="GLOG_DLL_DECL" + +# The text we put in every .h files we create. As a courtesy, we'll +# include a helpful comment for windows users as to how to use +# GLOG_DLL_DECL. Apparently sed expands \n into a newline. Good! +DLLDEF_DEFINES="\ +// NOTE: if you are statically linking the template library into your binary\n\ +// (rather than using the template .dll), set '/D $DLLDEF_MACRO_NAME='\n\ +// as a compiler flag in your project file to turn off the dllimports.\n\ +#ifndef $DLLDEF_MACRO_NAME\n\ +# define $DLLDEF_MACRO_NAME __declspec(dllimport)\n\ +#endif" + +# Read all the windows config info into variables +# In order for the 'set' to take, this requires putting all in a subshell. +( + while read define varname value; do + [ "$define" != "#define" ] && continue + eval "$varname='$value'" + done + + # Process all the .in files in the "glog" subdirectory + mkdir -p "$1/windows/glog" + for file in `echo "$1"/glog/*.in`; do + echo "Processing $file" + outfile="$1/windows/glog/`basename $file .in`" + + echo "\ +// This file is automatically generated from $file +// using src/windows/preprocess.sh. +// DO NOT EDIT! +" > "$outfile" + # Besides replacing @...@, we also need to turn on dllimport + # We also need to replace hash by hash_compare (annoying we hard-code :-( ) + sed -e "s!@ac_windows_dllexport@!$DLLDEF_MACRO_NAME!g" \ + -e "s!@ac_windows_dllexport_defines@!$DLLDEF_DEFINES!g" \ + -e "s!@ac_cv_cxx_hash_map@!$HASH_MAP_H!g" \ + -e "s!@ac_cv_cxx_hash_namespace@!$HASH_NAMESPACE!g" \ + -e "s!@ac_cv_cxx_hash_set@!$HASH_SET_H!g" \ + -e "s!@ac_cv_have_stdint_h@!0!g" \ + -e "s!@ac_cv_have_systypes_h@!0!g" \ + -e "s!@ac_cv_have_inttypes_h@!0!g" \ + -e "s!@ac_cv_have_unistd_h@!0!g" \ + -e "s!@ac_cv_have_uint16_t@!0!g" \ + -e "s!@ac_cv_have_u_int16_t@!0!g" \ + -e "s!@ac_cv_have___uint16@!1!g" \ + -e "s!@ac_cv_have_libgflags@!0!g" \ + -e "s!@ac_cv_have___builtin_expect@!0!g" \ + -e "s!@ac_cv_cxx_using_operator@!1!g" \ + -e "s!@ac_cv___attribute___noreturn@!!g" \ + -e "s!@ac_cv___attribute___printf_4_5@!!g" \ + -e "s!@ac_google_attribute@!${HAVE___ATTRIBUTE__:-0}!g" \ + -e "s!@ac_google_end_namespace@!$_END_GOOGLE_NAMESPACE_!g" \ + -e "s!@ac_google_namespace@!$GOOGLE_NAMESPACE!g" \ + -e "s!@ac_google_start_namespace@!$_START_GOOGLE_NAMESPACE_!g" \ + -e "s!@ac_htmlparser_namespace@!$HTMLPARSER_NAMESPACE!g" \ + -e "s!\\bhash\\b!hash_compare!g" \ + "$file" >> "$outfile" + done +) < "$1/windows/config.h" + +# log_severity.h isn't a .in file. +echo "\ +// This file is automatically generated from $1/glog/log_severity.h +// using src/windows/preprocess.sh. +// DO NOT EDIT! +" > "$1/windows/glog/log_severity.h" +cat "$1/glog/log_severity.h" >> "$1/windows/glog/log_severity.h" + +echo "DONE" diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/Makefile.am b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/Makefile.am new file mode 100644 index 0000000000..bd3129e1ad --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/Makefile.am @@ -0,0 +1,43 @@ +include_HEADERS = libdis.h +lib_LTLIBRARIES = libdisasm.la +libdisasm_la_SOURCES = \ + ia32_implicit.c \ + ia32_implicit.h \ + ia32_insn.c \ + ia32_insn.h \ + ia32_invariant.c \ + ia32_invariant.h \ + ia32_modrm.c \ + ia32_modrm.h \ + ia32_opcode_tables.c \ + ia32_opcode_tables.h \ + ia32_operand.c \ + ia32_operand.h \ + ia32_reg.c \ + ia32_reg.h \ + ia32_settings.c \ + ia32_settings.h \ + libdis.h \ + qword.h \ + x86_disasm.c \ + x86_format.c \ + x86_imm.c \ + x86_imm.h \ + x86_insn.c \ + x86_misc.c \ + x86_operand_list.c \ + x86_operand_list.h + +# Cheat to get non-autoconf swig into tarball, +# even if it doesn't build by default. +EXTRA_DIST = \ +swig/Makefile \ +swig/libdisasm.i \ +swig/libdisasm_oop.i \ +swig/python/Makefile-swig \ +swig/perl/Makefile-swig \ +swig/perl/Makefile.PL \ +swig/ruby/Makefile-swig \ +swig/ruby/extconf.rb \ +swig/tcl/Makefile-swig \ +swig/README diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/TODO b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/TODO new file mode 100644 index 0000000000..148addf9b8 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/TODO @@ -0,0 +1,43 @@ +x86_format.c +------------ +intel: jmpf -> jmp, callf -> call +att: jmpf -> ljmp, callf -> lcall + +opcode table +------------ +finish typing instructions +fix flag clear/set/toggle types + +ix64 stuff +---------- +document output file formats in web page +features doc: register aliases, implicit operands, stack mods, +ring0 flags, eflags, cpu model/isa + +ia32_handle_* implementation + +fix operand 0F C2 +CMPPS + +* sysenter, sysexit as CALL types -- preceded by MSR writes +* SYSENTER/SYSEXIT stack : overwrites SS, ESP +* stos, cmps, scas, movs, ins, outs, lods -> OP_PTR +* OP_SIZE in implicit operands +* use OP_SIZE to choose reg sizes! + +DONE?? : +implicit operands: provide action ? +e.g. add/inc for stach, write, etc +replace table numbers in opcodes.dat with +#defines for table names + +replace 0 with INSN_INVALID [or maybe FF for imnvalid and 00 for Not Applicable */ +no wait that is only for prefix tables -- n/p + +if ( prefx) only use if insn != invalid + +these should cover all the wacky disasm exceptions + +for the rep one we can chet, match only a 0x90 + +todo: privilege | ring diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_implicit.c b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_implicit.c new file mode 100644 index 0000000000..8b075d2ee0 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_implicit.c @@ -0,0 +1,422 @@ +#include + +#include "ia32_implicit.h" +#include "ia32_insn.h" +#include "ia32_reg.h" +#include "x86_operand_list.h" + +/* Conventions: Register operands which are aliases of another register + * operand (e.g. AX in one operand and AL in another) assume that the + * operands are different registers and that alias tracking will resolve + * data flow. This means that something like + * mov ax, al + * would have 'write only' access for AX and 'read only' access for AL, + * even though both AL and AX are read and written */ +typedef struct { + uint32_t type; + uint32_t operand; +} op_implicit_list_t; + +static op_implicit_list_t list_aaa[] = + /* 37 : AAA : rw AL */ + /* 3F : AAS : rw AL */ + {{ OP_R | OP_W, REG_BYTE_OFFSET }, {0}}; /* aaa */ + +static op_implicit_list_t list_aad[] = + /* D5 0A, D5 (ib) : AAD : rw AX */ + /* D4 0A, D4 (ib) : AAM : rw AX */ + {{ OP_R | OP_W, REG_WORD_OFFSET }, {0}}; /* aad */ + +static op_implicit_list_t list_call[] = + /* E8, FF, 9A, FF : CALL : rw ESP, rw EIP */ + /* C2, C3, CA, CB : RET : rw ESP, rw EIP */ + {{ OP_R | OP_W, REG_EIP_INDEX }, + { OP_R | OP_W, REG_ESP_INDEX }, {0}}; /* call, ret */ + +static op_implicit_list_t list_cbw[] = + /* 98 : CBW : r AL, rw AX */ + {{ OP_R | OP_W, REG_WORD_OFFSET }, + { OP_R, REG_BYTE_OFFSET}, {0}}; /* cbw */ + +static op_implicit_list_t list_cwde[] = + /* 98 : CWDE : r AX, rw EAX */ + {{ OP_R | OP_W, REG_DWORD_OFFSET }, + { OP_R, REG_WORD_OFFSET }, {0}}; /* cwde */ + +static op_implicit_list_t list_clts[] = + /* 0F 06 : CLTS : rw CR0 */ + {{ OP_R | OP_W, REG_CTRL_OFFSET}, {0}}; /* clts */ + +static op_implicit_list_t list_cmpxchg[] = + /* 0F B0 : CMPXCHG : rw AL */ + {{ OP_R | OP_W, REG_BYTE_OFFSET }, {0}}; /* cmpxchg */ + +static op_implicit_list_t list_cmpxchgb[] = + /* 0F B1 : CMPXCHG : rw EAX */ + {{ OP_R | OP_W, REG_DWORD_OFFSET }, {0}}; /* cmpxchg */ + +static op_implicit_list_t list_cmpxchg8b[] = + /* 0F C7 : CMPXCHG8B : rw EDX, rw EAX, r ECX, r EBX */ + {{ OP_R | OP_W, REG_DWORD_OFFSET }, + { OP_R | OP_W, REG_DWORD_OFFSET + 2 }, + { OP_R, REG_DWORD_OFFSET + 1 }, + { OP_R, REG_DWORD_OFFSET + 3 }, {0}}; /* cmpxchg8b */ + +static op_implicit_list_t list_cpuid[] = + /* 0F A2 : CPUID : rw EAX, w EBX, w ECX, w EDX */ + {{ OP_R | OP_W, REG_DWORD_OFFSET }, + { OP_W, REG_DWORD_OFFSET + 1 }, + { OP_W, REG_DWORD_OFFSET + 2 }, + { OP_W, REG_DWORD_OFFSET + 3 }, {0}}; /* cpuid */ + +static op_implicit_list_t list_cwd[] = + /* 99 : CWD/CWQ : rw EAX, w EDX */ + {{ OP_R | OP_W, REG_DWORD_OFFSET }, + { OP_W, REG_DWORD_OFFSET + 2 }, {0}}; /* cwd */ + +static op_implicit_list_t list_daa[] = + /* 27 : DAA : rw AL */ + /* 2F : DAS : rw AL */ + {{ OP_R | OP_W, REG_BYTE_OFFSET }, {0}}; /* daa */ + +static op_implicit_list_t list_idiv[] = + /* F6 : DIV, IDIV : r AX, w AL, w AH */ + /* FIXED: first op was EAX, not Aw. TODO: verify! */ + {{ OP_R, REG_WORD_OFFSET }, + { OP_W, REG_BYTE_OFFSET }, + { OP_W, REG_BYTE_OFFSET + 4 }, {0}}; /* div */ + +static op_implicit_list_t list_div[] = + /* F7 : DIV, IDIV : rw EDX, rw EAX */ + {{ OP_R | OP_W, REG_DWORD_OFFSET + 2 }, + { OP_R | OP_W, REG_DWORD_OFFSET }, {0}}; /* div */ + +static op_implicit_list_t list_enter[] = + /* C8 : ENTER : rw ESP w EBP */ + {{ OP_R | OP_W, REG_DWORD_OFFSET + 4 }, + { OP_R, REG_DWORD_OFFSET + 5 }, {0}}; /* enter */ + +static op_implicit_list_t list_f2xm1[] = + /* D9 F0 : F2XM1 : rw ST(0) */ + /* D9 E1 : FABS : rw ST(0) */ + /* D9 E0 : FCHS : rw ST(0) */ + /* D9 FF : FCOS : rw ST(0)*/ + /* D8, DA : FDIV : rw ST(0) */ + /* D8, DA : FDIVR : rw ST(0) */ + /* D9 F2 : FPTAN : rw ST(0) */ + /* D9 FC : FRNDINT : rw ST(0) */ + /* D9 FB : FSINCOS : rw ST(0) */ + /* D9 FE : FSIN : rw ST(0) */ + /* D9 FA : FSQRT : rw ST(0) */ + /* D9 F4 : FXTRACT : rw ST(0) */ + {{ OP_R | OP_W, REG_FPU_OFFSET }, {0}}; /* f2xm1 */ + +static op_implicit_list_t list_fcom[] = + /* D8, DC, DE D9 : FCOM : r ST(0) */ + /* DE, DA : FICOM : r ST(0) */ + /* DF, D8 : FIST : r ST(0) */ + /* D9 E4 : FTST : r ST(0) */ + /* D9 E5 : FXAM : r ST(0) */ + {{ OP_R, REG_FPU_OFFSET }, {0}}; /* fcom */ + +static op_implicit_list_t list_fpatan[] = + /* D9 F3 : FPATAN : r ST(0), rw ST(1) */ + {{ OP_R, REG_FPU_OFFSET }, {0}}; /* fpatan */ + +static op_implicit_list_t list_fprem[] = + /* D9 F8, D9 F5 : FPREM : rw ST(0) r ST(1) */ + /* D9 FD : FSCALE : rw ST(0), r ST(1) */ + {{ OP_R | OP_W, REG_FPU_OFFSET }, + { OP_R, REG_FPU_OFFSET + 1 }, {0}}; /* fprem */ + +static op_implicit_list_t list_faddp[] = + /* DE C1 : FADDP : r ST(0), rw ST(1) */ + /* DE E9 : FSUBP : r ST(0), rw ST(1) */ + /* D9 F1 : FYL2X : r ST(0), rw ST(1) */ + /* D9 F9 : FYL2XP1 : r ST(0), rw ST(1) */ + {{ OP_R, REG_FPU_OFFSET }, + { OP_R | OP_W, REG_FPU_OFFSET + 1 }, {0}}; /* faddp */ + +static op_implicit_list_t list_fucompp[] = + /* DA E9 : FUCOMPP : r ST(0), r ST(1) */ + {{ OP_R, REG_FPU_OFFSET }, + { OP_R, REG_FPU_OFFSET + 1 }, {0}}; /* fucompp */ + +static op_implicit_list_t list_imul[] = + /* F6 : IMUL : r AL, w AX */ + /* F6 : MUL : r AL, w AX */ + {{ OP_R, REG_BYTE_OFFSET }, + { OP_W, REG_WORD_OFFSET }, {0}}; /* imul */ + +static op_implicit_list_t list_mul[] = + /* F7 : IMUL : rw EAX, w EDX */ + /* F7 : MUL : rw EAX, w EDX */ + {{ OP_R | OP_W, REG_DWORD_OFFSET }, + { OP_W, REG_DWORD_OFFSET + 2 }, {0}}; /* imul */ + +static op_implicit_list_t list_lahf[] = + /* 9F : LAHF : r EFLAGS, w AH */ + {{ OP_R, REG_FLAGS_INDEX }, + { OP_W, REG_BYTE_OFFSET + 4 }, {0}}; /* lahf */ + +static op_implicit_list_t list_ldmxcsr[] = + /* 0F AE : LDMXCSR : w MXCSR SSE Control Status Reg */ + {{ OP_W, REG_MXCSG_INDEX }, {0}}; /* ldmxcsr */ + +static op_implicit_list_t list_leave[] = + /* C9 : LEAVE : rw ESP, w EBP */ + {{ OP_R | OP_W, REG_ESP_INDEX }, + { OP_W, REG_DWORD_OFFSET + 5 }, {0}}; /* leave */ + +static op_implicit_list_t list_lgdt[] = + /* 0F 01 : LGDT : w GDTR */ + {{ OP_W, REG_GDTR_INDEX }, {0}}; /* lgdt */ + +static op_implicit_list_t list_lidt[] = + /* 0F 01 : LIDT : w IDTR */ + {{ OP_W, REG_IDTR_INDEX }, {0}}; /* lidt */ + +static op_implicit_list_t list_lldt[] = + /* 0F 00 : LLDT : w LDTR */ + {{ OP_W, REG_LDTR_INDEX }, {0}}; /* lldt */ + +static op_implicit_list_t list_lmsw[] = + /* 0F 01 : LMSW : w CR0 */ + {{ OP_W, REG_CTRL_OFFSET }, {0}}; /* lmsw */ + +static op_implicit_list_t list_loop[] = + /* E0, E1, E2 : LOOP : rw ECX */ + {{ OP_R | OP_W, REG_DWORD_OFFSET + 1 }, {0}};/* loop */ + +static op_implicit_list_t list_ltr[] = + /* 0F 00 : LTR : w Task Register */ + {{ OP_W, REG_TR_INDEX }, {0}}; /* ltr */ + +static op_implicit_list_t list_pop[] = + /* 8F, 58, 1F, 07, 17, 0F A1, 0F A9 : POP : rw ESP */ + /* FF, 50, 6A, 68, 0E, 16, 1E, 06, 0F A0, 0F A8 : PUSH : rw ESP */ + {{ OP_R | OP_W, REG_ESP_INDEX }, {0}}; /* pop, push */ + +static op_implicit_list_t list_popad[] = + /* 61 : POPAD : rw esp, w edi esi ebp ebx edx ecx eax */ + {{ OP_R | OP_W, REG_ESP_INDEX }, + { OP_W, REG_DWORD_OFFSET + 7 }, + { OP_W, REG_DWORD_OFFSET + 6 }, + { OP_W, REG_DWORD_OFFSET + 5 }, + { OP_W, REG_DWORD_OFFSET + 3 }, + { OP_W, REG_DWORD_OFFSET + 2 }, + { OP_W, REG_DWORD_OFFSET + 1 }, + { OP_W, REG_DWORD_OFFSET }, {0}}; /* popad */ + +static op_implicit_list_t list_popfd[] = + /* 9D : POPFD : rw esp, w eflags */ + {{ OP_R | OP_W, REG_ESP_INDEX }, + { OP_W, REG_FLAGS_INDEX }, {0}}; /* popfd */ + +static op_implicit_list_t list_pushad[] = + /* FF, 50, 6A, 68, 0E, 16, 1E, 06, 0F A0, 0F A8 : PUSH : rw ESP */ + /* 60 : PUSHAD : rw esp, r eax ecx edx ebx esp ebp esi edi */ + {{ OP_R | OP_W, REG_ESP_INDEX }, + { OP_R, REG_DWORD_OFFSET }, + { OP_R, REG_DWORD_OFFSET + 1 }, + { OP_R, REG_DWORD_OFFSET + 2 }, + { OP_R, REG_DWORD_OFFSET + 3 }, + { OP_R, REG_DWORD_OFFSET + 5 }, + { OP_R, REG_DWORD_OFFSET + 6 }, + { OP_R, REG_DWORD_OFFSET + 7 }, {0}}; /* pushad */ + +static op_implicit_list_t list_pushfd[] = + /* 9C : PUSHFD : rw esp, r eflags */ + {{ OP_R | OP_W, REG_ESP_INDEX }, + { OP_R, REG_FLAGS_INDEX }, {0}}; /* pushfd */ + +static op_implicit_list_t list_rdmsr[] = + /* 0F 32 : RDMSR : r ECX, w EDX, w EAX */ + {{ OP_R, REG_DWORD_OFFSET + 1 }, + { OP_W, REG_DWORD_OFFSET + 2 }, + { OP_W, REG_DWORD_OFFSET }, {0}}; /* rdmsr */ + +static op_implicit_list_t list_rdpmc[] = + /* 0F 33 : RDPMC : r ECX, w EDX, w EAX */ + {{ OP_R, REG_DWORD_OFFSET + 1 }, + { OP_W, REG_DWORD_OFFSET + 2 }, + { OP_W, REG_DWORD_OFFSET }, {0}}; /* rdpmc */ + +static op_implicit_list_t list_rdtsc[] = + /* 0F 31 : RDTSC : rw EDX, rw EAX */ + {{ OP_R | OP_W, REG_DWORD_OFFSET + 2 }, + { OP_R | OP_W, REG_DWORD_OFFSET }, {0}}; /* rdtsc */ + +static op_implicit_list_t list_rep[] = + /* F3, F2 ... : REP : rw ECX */ + {{ OP_R | OP_W, REG_DWORD_OFFSET + 1 }, {0}};/* rep */ + +static op_implicit_list_t list_rsm[] = + /* 0F AA : RSM : r CR4, r CR0 */ + {{ OP_R, REG_CTRL_OFFSET + 4 }, + { OP_R, REG_CTRL_OFFSET }, {0}}; /* rsm */ + +static op_implicit_list_t list_sahf[] = + /* 9E : SAHF : r ah, rw eflags (set SF ZF AF PF CF) */ + {{ OP_R, REG_DWORD_OFFSET }, {0}}; /* sahf */ + +static op_implicit_list_t list_sgdt[] = + /* 0F : SGDT : r gdtr */ + /* TODO: finish this! */ + {{ OP_R, REG_DWORD_OFFSET }, {0}}; /* sgdt */ + +static op_implicit_list_t list_sidt[] = + /* 0F : SIDT : r idtr */ + /* TODO: finish this! */ + {{ OP_R, REG_DWORD_OFFSET }, {0}}; /* sidt */ + +static op_implicit_list_t list_sldt[] = + /* 0F : SLDT : r ldtr */ + /* TODO: finish this! */ + {{ OP_R, REG_DWORD_OFFSET }, {0}}; /* sldt */ + +static op_implicit_list_t list_smsw[] = + /* 0F : SMSW : r CR0 */ + /* TODO: finish this! */ + {{ OP_R, REG_DWORD_OFFSET }, {0}}; /* smsw */ + +static op_implicit_list_t list_stmxcsr[] = + /* 0F AE : STMXCSR : r MXCSR */ + /* TODO: finish this! */ + {{ OP_R, REG_DWORD_OFFSET }, {0}}; /* stmxcsr */ + +static op_implicit_list_t list_str[] = + /* 0F 00 : STR : r TR (task register) */ + /* TODO: finish this! */ + {{ OP_R, REG_DWORD_OFFSET }, {0}}; /* str */ + +static op_implicit_list_t list_sysenter[] = + /* 0F 34 : SYSENTER : w cs, w eip, w ss, w esp, r CR0, w eflags + * r sysenter_cs_msr, sysenter_esp_msr, sysenter_eip_msr */ + /* TODO: finish this! */ + {{ OP_R, REG_DWORD_OFFSET }, {0}}; /* sysenter */ + +static op_implicit_list_t list_sysexit[] = + /* 0F 35 : SYSEXIT : r edx, r ecx, w cs, w eip, w ss, w esp + * r sysenter_cs_msr */ + /* TODO: finish this! */ + {{ OP_R, REG_DWORD_OFFSET }, {0}}; /* sysexit */ + +static op_implicit_list_t list_wrmsr[] = + /* 0F 30 : WRMST : r edx, r eax, r ecx */ + /* TODO: finish this! */ + {{ OP_R, REG_DWORD_OFFSET }, {0}}; /* wrmsr */ + +static op_implicit_list_t list_xlat[] = + /* D7 : XLAT : rw al r ebx (ptr) */ + /* TODO: finish this! */ + {{ OP_R, REG_DWORD_OFFSET }, {0}}; /* xlat */ +/* TODO: + * monitor 0f 01 c8 eax OP_R ecx OP_R edx OP_R + * mwait 0f 01 c9 eax OP_R ecx OP_R + */ +static op_implicit_list_t list_monitor[] = + {{ OP_R, REG_DWORD_OFFSET }, {0}}; /* monitor */ +static op_implicit_list_t list_mwait[] = + {{ OP_R, REG_DWORD_OFFSET }, {0}}; /* mwait */ + +op_implicit_list_t *op_implicit_list[] = { + /* This is a list of implicit operands which are read/written by + * various x86 instructions. Note that modifications to the stack + * register are mentioned here, but that additional information on + * the effect an instruction has on the stack is contained in the + * x86_insn_t 'stack_mod' and 'stack_mod_val' fields. Use of the + * eflags register, i.e. setting, clearing, and testing flags, is + * not recorded here but rather in the flags_set and flags_tested + * fields of the x86_insn_t.*/ + NULL, + list_aaa, list_aad, list_call, list_cbw, /* 1 - 4 */ + list_cwde, list_clts, list_cmpxchg, list_cmpxchgb, /* 5 - 8 */ + list_cmpxchg8b, list_cpuid, list_cwd, list_daa, /* 9 - 12 */ + list_idiv, list_div, list_enter, list_f2xm1, /* 13 - 16 */ + list_fcom, list_fpatan, list_fprem, list_faddp, /* 17 - 20 */ + list_fucompp, list_imul, list_mul, list_lahf, /* 21 - 24 */ + list_ldmxcsr, list_leave, list_lgdt, list_lidt, /* 25 - 28 */ + list_lldt, list_lmsw, list_loop, list_ltr, /* 29 - 32 */ + list_pop, list_popad, list_popfd, list_pushad, /* 33 - 36 */ + list_pushfd, list_rdmsr, list_rdpmc, list_rdtsc, /* 37 - 40 */ + /* NOTE: 'REP' is a hack since it is a prefix: if its position + * in the table changes, then change IDX_IMPLICIT_REP in the .h */ + list_rep, list_rsm, list_sahf, list_sgdt, /* 41 - 44 */ + list_sidt, list_sldt, list_smsw, list_stmxcsr, /* 45 - 48 */ + list_str, list_sysenter, list_sysexit, list_wrmsr, /* 49 - 52 */ + list_xlat, list_monitor, list_mwait, /* 53 - 55*/ + NULL /* end of list */ + }; + +#define LAST_IMPL_IDX 55 + +static void handle_impl_reg( x86_op_t *op, uint32_t val ) { + x86_reg_t *reg = &op->data.reg; + op->type = op_register; + ia32_handle_register( reg, (unsigned int) val ); + switch (reg->size) { + case 1: + op->datatype = op_byte; break; + case 2: + op->datatype = op_word; break; + case 4: + op->datatype = op_dword; break; + case 8: + op->datatype = op_qword; break; + case 10: + op->datatype = op_extreal; break; + case 16: + op->datatype = op_dqword; break; + } + return; +} + +/* 'impl_idx' is the value from the opcode table: between 1 and LAST_IMPL_IDX */ +/* returns number of operands added */ +unsigned int ia32_insn_implicit_ops( x86_insn_t *insn, unsigned int impl_idx ) { + op_implicit_list_t *list; + x86_op_t *op; + unsigned int num = 0; + + if (! impl_idx || impl_idx > LAST_IMPL_IDX ) { + return 0; + } + + for ( list = op_implicit_list[impl_idx]; list->type; list++, num++ ) { + enum x86_op_access access = (enum x86_op_access) OP_PERM(list->type); + enum x86_op_flags flags = (enum x86_op_flags) (OP_FLAGS(list->type) >> 12); + + op = NULL; + /* In some cases (MUL), EAX is an implicit operand hardcoded in + * the instruction without being explicitly listed in assembly. + * For this situation, find the hardcoded operand and add the + * implied flag rather than adding a new implicit operand. */ + x86_oplist_t * existing; + if (ia32_true_register_id(list->operand) == REG_DWORD_OFFSET) { + for ( existing = insn->operands; existing; existing = existing->next ) { + if (existing->op.type == op_register && + existing->op.data.reg.id == list->operand) { + op = &existing->op; + break; + } + } + } + if (!op) { + op = x86_operand_new( insn ); + /* all implicit operands are registers */ + handle_impl_reg( op, list->operand ); + /* decrement the 'explicit count' incremented by default in + * x86_operand_new */ + insn->explicit_count = insn->explicit_count -1; + } + if (!op) { + return num; /* gah! return early */ + } + op->access |= access; + op->flags |= flags; + op->flags |= op_implied; + } + + return num; +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_implicit.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_implicit.h new file mode 100644 index 0000000000..0002b28b9b --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_implicit.h @@ -0,0 +1,13 @@ +#ifndef IA32_IMPLICIT_H +#define IA32_IMPLICIT_H + +#include "libdis.h" + +/* OK, this is a hack to deal with prefixes having implicit operands... + * thought I had removed all the old hackishness ;( */ + +#define IDX_IMPLICIT_REP 41 /* change this if the table changes! */ + +unsigned int ia32_insn_implicit_ops( x86_insn_t *insn, unsigned int impl_idx ); + +#endif diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_insn.c b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_insn.c new file mode 100644 index 0000000000..cc277608bf --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_insn.c @@ -0,0 +1,623 @@ +#include +#include +#include +#include "qword.h" + +#include "ia32_insn.h" +#include "ia32_opcode_tables.h" + +#include "ia32_reg.h" +#include "ia32_operand.h" +#include "ia32_implicit.h" +#include "ia32_settings.h" + +#include "libdis.h" + +extern ia32_table_desc_t ia32_tables[]; +extern ia32_settings_t ia32_settings; + +#define IS_SP( op ) (op->type == op_register && \ + (op->data.reg.id == REG_ESP_INDEX || \ + op->data.reg.alias == REG_ESP_INDEX) ) +#define IS_IMM( op ) (op->type == op_immediate ) + +#ifdef WIN32 +# define INLINE +#else +# define INLINE inline +#endif + +/* for calculating stack modification based on an operand */ +static INLINE int32_t long_from_operand( x86_op_t *op ) { + + if (! IS_IMM(op) ) { + return 0L; + } + + switch ( op->datatype ) { + case op_byte: + return (int32_t) op->data.sbyte; + case op_word: + return (int32_t) op->data.sword; + case op_qword: + return (int32_t) op->data.sqword; + case op_dword: + return op->data.sdword; + default: + /* these are not used in stack insn */ + break; + } + + return 0L; +} + + +/* determine what this insn does to the stack */ +static void ia32_stack_mod(x86_insn_t *insn) { + x86_op_t *dest, *src = NULL; + + if (! insn || ! insn->operands ) { + return; + } + + dest = &insn->operands->op; + if ( dest ) { + src = &insn->operands->next->op; + } + + insn->stack_mod = 0; + insn->stack_mod_val = 0; + + switch ( insn->type ) { + case insn_call: + case insn_callcc: + insn->stack_mod = 1; + insn->stack_mod_val = insn->addr_size * -1; + break; + case insn_push: + insn->stack_mod = 1; + insn->stack_mod_val = insn->addr_size * -1; + break; + case insn_return: + insn->stack_mod = 1; + insn->stack_mod_val = insn->addr_size; + case insn_int: case insn_intcc: + case insn_iret: + break; + case insn_pop: + insn->stack_mod = 1; + if (! IS_SP( dest ) ) { + insn->stack_mod_val = insn->op_size; + } /* else we don't know the stack change in a pop esp */ + break; + case insn_enter: + insn->stack_mod = 1; + insn->stack_mod_val = 0; /* TODO : FIX */ + break; + case insn_leave: + insn->stack_mod = 1; + insn->stack_mod_val = 0; /* TODO : FIX */ + break; + case insn_pushregs: + insn->stack_mod = 1; + insn->stack_mod_val = 0; /* TODO : FIX */ + break; + case insn_popregs: + insn->stack_mod = 1; + insn->stack_mod_val = 0; /* TODO : FIX */ + break; + case insn_pushflags: + insn->stack_mod = 1; + insn->stack_mod_val = 0; /* TODO : FIX */ + break; + case insn_popflags: + insn->stack_mod = 1; + insn->stack_mod_val = 0; /* TODO : FIX */ + break; + case insn_add: + if ( IS_SP( dest ) ) { + insn->stack_mod = 1; + insn->stack_mod_val = long_from_operand( src ); + } + break; + case insn_sub: + if ( IS_SP( dest ) ) { + insn->stack_mod = 1; + insn->stack_mod_val = long_from_operand( src ); + insn->stack_mod_val *= -1; + } + break; + case insn_inc: + if ( IS_SP( dest ) ) { + insn->stack_mod = 1; + insn->stack_mod_val = 1; + } + break; + case insn_dec: + if ( IS_SP( dest ) ) { + insn->stack_mod = 1; + insn->stack_mod_val = 1; + } + break; + case insn_mov: case insn_movcc: + case insn_xchg: case insn_xchgcc: + case insn_mul: case insn_div: + case insn_shl: case insn_shr: + case insn_rol: case insn_ror: + case insn_and: case insn_or: + case insn_not: case insn_neg: + case insn_xor: + if ( IS_SP( dest ) ) { + insn->stack_mod = 1; + } + break; + default: + break; + } + if (! strcmp("enter", insn->mnemonic) ) { + insn->stack_mod = 1; + } else if (! strcmp("leave", insn->mnemonic) ) { + insn->stack_mod = 1; + } + + /* for mov, etc we return 0 -- unknown stack mod */ + + return; +} + +/* get the cpu details for this insn from cpu flags int */ +static void ia32_handle_cpu( x86_insn_t *insn, unsigned int cpu ) { + insn->cpu = (enum x86_insn_cpu) CPU_MODEL(cpu); + insn->isa = (enum x86_insn_isa) (ISA_SUBSET(cpu)) >> 16; + return; +} + +/* handle mnemonic type and group */ +static void ia32_handle_mnemtype(x86_insn_t *insn, unsigned int mnemtype) { + unsigned int type = mnemtype & ~INS_FLAG_MASK; + insn->group = (enum x86_insn_group) (INS_GROUP(type)) >> 12; + insn->type = (enum x86_insn_type) INS_TYPE(type); + + return; +} + +static void ia32_handle_notes(x86_insn_t *insn, unsigned int notes) { + insn->note = (enum x86_insn_note) notes; + return; +} + +static void ia32_handle_eflags( x86_insn_t *insn, unsigned int eflags) { + unsigned int flags; + + /* handle flags effected */ + flags = INS_FLAGS_TEST(eflags); + /* handle weird OR cases */ + /* these are either JLE (ZF | SF<>OF) or JBE (CF | ZF) */ + if (flags & INS_TEST_OR) { + flags &= ~INS_TEST_OR; + if ( flags & INS_TEST_ZERO ) { + flags &= ~INS_TEST_ZERO; + if ( flags & INS_TEST_CARRY ) { + flags &= ~INS_TEST_CARRY ; + flags |= (int)insn_carry_or_zero_set; + } else if ( flags & INS_TEST_SFNEOF ) { + flags &= ~INS_TEST_SFNEOF; + flags |= (int)insn_zero_set_or_sign_ne_oflow; + } + } + } + insn->flags_tested = (enum x86_flag_status) flags; + + insn->flags_set = (enum x86_flag_status) INS_FLAGS_SET(eflags) >> 16; + + return; +} + +static void ia32_handle_prefix( x86_insn_t *insn, unsigned int prefixes ) { + + insn->prefix = (enum x86_insn_prefix) prefixes & PREFIX_MASK; // >> 20; + if (! (insn->prefix & PREFIX_PRINT_MASK) ) { + /* no printable prefixes */ + insn->prefix = insn_no_prefix; + } + + /* concat all prefix strings */ + if ( (unsigned int)insn->prefix & PREFIX_LOCK ) { + strncat(insn->prefix_string, "lock ", 32 - + strlen(insn->prefix_string)); + } + + if ( (unsigned int)insn->prefix & PREFIX_REPNZ ) { + strncat(insn->prefix_string, "repnz ", 32 - + strlen(insn->prefix_string)); + } else if ( (unsigned int)insn->prefix & PREFIX_REPZ ) { + strncat(insn->prefix_string, "repz ", 32 - + strlen(insn->prefix_string)); + } + + return; +} + + +static void reg_32_to_16( x86_op_t *op, x86_insn_t *insn, void *arg ) { + + /* if this is a 32-bit register and it is a general register ... */ + if ( op->type == op_register && op->data.reg.size == 4 && + (op->data.reg.type & reg_gen) ) { + /* WORD registers are 8 indices off from DWORD registers */ + ia32_handle_register( &(op->data.reg), + op->data.reg.id + 8 ); + } +} + +static void handle_insn_metadata( x86_insn_t *insn, ia32_insn_t *raw_insn ) { + ia32_handle_mnemtype( insn, raw_insn->mnem_flag ); + ia32_handle_notes( insn, raw_insn->notes ); + ia32_handle_eflags( insn, raw_insn->flags_effected ); + ia32_handle_cpu( insn, raw_insn->cpu ); + ia32_stack_mod( insn ); +} + +static size_t ia32_decode_insn( unsigned char *buf, size_t buf_len, + ia32_insn_t *raw_insn, x86_insn_t *insn, + unsigned int prefixes ) { + size_t size, op_size; + unsigned char modrm; + + /* this should never happen, but just in case... */ + if ( raw_insn->mnem_flag == INS_INVALID ) { + return 0; + } + + if (ia32_settings.options & opt_16_bit) { + insn->op_size = ( prefixes & PREFIX_OP_SIZE ) ? 4 : 2; + insn->addr_size = ( prefixes & PREFIX_ADDR_SIZE ) ? 4 : 2; + } else { + insn->op_size = ( prefixes & PREFIX_OP_SIZE ) ? 2 : 4; + insn->addr_size = ( prefixes & PREFIX_ADDR_SIZE ) ? 2 : 4; + } + + + /* ++++ 1. Copy mnemonic and mnemonic-flags to CODE struct */ + if ((ia32_settings.options & opt_att_mnemonics) && raw_insn->mnemonic_att[0]) { + strncpy( insn->mnemonic, raw_insn->mnemonic_att, 16 ); + } + else { + strncpy( insn->mnemonic, raw_insn->mnemonic, 16 ); + } + ia32_handle_prefix( insn, prefixes ); + + handle_insn_metadata( insn, raw_insn ); + + /* prefetch the next byte in case it is a modr/m byte -- saves + * worrying about whether the 'mod/rm' operand or the 'reg' operand + * occurs first */ + modrm = GET_BYTE( buf, buf_len ); + + /* ++++ 2. Decode Explicit Operands */ + /* Intel uses up to 3 explicit operands in its instructions; + * the first is 'dest', the second is 'src', and the third + * is an additional source value (usually an immediate value, + * e.g. in the MUL instructions). These three explicit operands + * are encoded in the opcode tables, even if they are not used + * by the instruction. Additional implicit operands are stored + * in a supplemental table and are handled later. */ + + op_size = ia32_decode_operand( buf, buf_len, insn, raw_insn->dest, + raw_insn->dest_flag, prefixes, modrm ); + /* advance buffer, increase size if necessary */ + buf += op_size; + buf_len -= op_size; + size = op_size; + + op_size = ia32_decode_operand( buf, buf_len, insn, raw_insn->src, + raw_insn->src_flag, prefixes, modrm ); + buf += op_size; + buf_len -= op_size; + size += op_size; + + op_size = ia32_decode_operand( buf, buf_len, insn, raw_insn->aux, + raw_insn->aux_flag, prefixes, modrm ); + size += op_size; + + + /* ++++ 3. Decode Implicit Operands */ + /* apply implicit operands */ + ia32_insn_implicit_ops( insn, raw_insn->implicit_ops ); + /* we have one small inelegant hack here, to deal with + * the two prefixes that have implicit operands. If Intel + * adds more, we'll change the algorithm to suit :) */ + if ( (prefixes & PREFIX_REPZ) || (prefixes & PREFIX_REPNZ) ) { + ia32_insn_implicit_ops( insn, IDX_IMPLICIT_REP ); + } + + + /* 16-bit hack: foreach operand, if 32-bit reg, make 16-bit reg */ + if ( insn->op_size == 2 ) { + x86_operand_foreach( insn, reg_32_to_16, NULL, op_any ); + } + + return size; +} + + +/* convenience routine */ +#define USES_MOD_RM(flag) \ + (flag == ADDRMETH_E || flag == ADDRMETH_M || flag == ADDRMETH_Q || \ + flag == ADDRMETH_W || flag == ADDRMETH_R) + +static int uses_modrm_flag( unsigned int flag ) { + unsigned int meth; + if ( flag == ARG_NONE ) { + return 0; + } + meth = (flag & ADDRMETH_MASK); + if ( USES_MOD_RM(meth) ) { + return 1; + } + + return 0; +} + +/* This routine performs the actual byte-by-byte opcode table lookup. + * Originally it was pretty simple: get a byte, adjust it to a proper + * index into the table, then check the table row at that index to + * determine what to do next. But is anything that simple with Intel? + * This is now a huge, convoluted mess, mostly of bitter comments. */ +/* buf: pointer to next byte to read from stream + * buf_len: length of buf + * table: index of table to use for lookups + * raw_insn: output pointer that receives opcode definition + * prefixes: output integer that is encoded with prefixes in insn + * returns : number of bytes consumed from stream during lookup */ +size_t ia32_table_lookup( unsigned char *buf, size_t buf_len, + unsigned int table, ia32_insn_t **raw_insn, + unsigned int *prefixes ) { + unsigned char *next, op = buf[0]; /* byte value -- 'opcode' */ + size_t size = 1, sub_size = 0, next_len; + ia32_table_desc_t *table_desc; + unsigned int subtable, prefix = 0, recurse_table = 0; + + table_desc = &ia32_tables[table]; + + op = GET_BYTE( buf, buf_len ); + + if ( table_desc->type == tbl_fpu && op > table_desc->maxlim) { + /* one of the fucking FPU tables out of the 00-BH range */ + /* OK,. this is a bit of a hack -- the proper way would + * have been to use subtables in the 00-BF FPU opcode tables, + * but that is rather wasteful of space... */ + table_desc = &ia32_tables[table +1]; + } + + /* PERFORM TABLE LOOKUP */ + + /* ModR/M trick: shift extension bits into lowest bits of byte */ + /* Note: non-ModR/M tables have a shift value of 0 */ + op >>= table_desc->shift; + + /* ModR/M trick: mask out high bits to turn extension into an index */ + /* Note: non-ModR/M tables have a mask value of 0xFF */ + op &= table_desc->mask; + + + /* Sparse table trick: check that byte is <= max value */ + /* Note: full (256-entry) tables have a maxlim of 155 */ + if ( op > table_desc->maxlim ) { + /* this is a partial table, truncated at the tail, + and op is out of range! */ + return INVALID_INSN; + } + + /* Sparse table trick: check that byte is >= min value */ + /* Note: full (256-entry) tables have a minlim of 0 */ + if ( table_desc->minlim > op ) { + /* this is a partial table, truncated at the head, + and op is out of range! */ + return INVALID_INSN; + } + /* adjust op to be an offset from table index 0 */ + op -= table_desc->minlim; + + /* Yay! 'op' is now fully adjusted to be an index into 'table' */ + *raw_insn = &(table_desc->table[op]); + //printf("BYTE %X TABLE %d OP %X\n", buf[0], table, op ); + + if ( (*raw_insn)->mnem_flag & INS_FLAG_PREFIX ) { + prefix = (*raw_insn)->mnem_flag & PREFIX_MASK; + } + + + /* handle escape to a multibyte/coproc/extension/etc table */ + /* NOTE: if insn is a prefix and has a subtable, then we + * only recurse if this is the first prefix byte -- + * that is, if *prefixes is 0. + * NOTE also that suffix tables are handled later */ + subtable = (*raw_insn)->table; + + if ( subtable && ia32_tables[subtable].type != tbl_suffix && + (! prefix || ! *prefixes) ) { + + if ( ia32_tables[subtable].type == tbl_ext_ext || + ia32_tables[subtable].type == tbl_fpu_ext ) { + /* opcode extension: reuse current byte in buffer */ + next = buf; + next_len = buf_len; + } else { + /* "normal" opcode: advance to next byte in buffer */ + if ( buf_len > 1 ) { + next = &buf[1]; + next_len = buf_len - 1; + } + else { + // buffer is truncated + return INVALID_INSN; + } + } + /* we encountered a multibyte opcode: recurse using the + * table specified in the opcode definition */ + sub_size = ia32_table_lookup( next, next_len, subtable, + raw_insn, prefixes ); + + /* SSE/prefix hack: if the original opcode def was a + * prefix that specified a subtable, and the subtable + * lookup returned a valid insn, then we have encountered + * an SSE opcode definition; otherwise, we pretend we + * never did the subtable lookup, and deal with the + * prefix normally later */ + if ( prefix && ( sub_size == INVALID_INSN || + INS_TYPE((*raw_insn)->mnem_flag) == INS_INVALID ) ) { + /* this is a prefix, not an SSE insn : + * lookup next byte in main table, + * subsize will be reset during the + * main table lookup */ + recurse_table = 1; + } else { + /* this is either a subtable (two-byte) insn + * or an invalid insn: either way, set prefix + * to NULL and end the opcode lookup */ + prefix = 0; + // short-circuit lookup on invalid insn + if (sub_size == INVALID_INSN) return INVALID_INSN; + } + } else if ( prefix ) { + recurse_table = 1; + } + + /* by default, we assume that we have the opcode definition, + * and there is no need to recurse on the same table, but + * if we do then a prefix was encountered... */ + if ( recurse_table ) { + /* this must have been a prefix: use the same table for + * lookup of the next byte */ + sub_size = ia32_table_lookup( &buf[1], buf_len - 1, table, + raw_insn, prefixes ); + + // short-circuit lookup on invalid insn + if (sub_size == INVALID_INSN) return INVALID_INSN; + + /* a bit of a hack for branch hints */ + if ( prefix & BRANCH_HINT_MASK ) { + if ( INS_GROUP((*raw_insn)->mnem_flag) == INS_EXEC ) { + /* segment override prefixes are invalid for + * all branch instructions, so delete them */ + prefix &= ~PREFIX_REG_MASK; + } else { + prefix &= ~BRANCH_HINT_MASK; + } + } + + /* apply prefix to instruction */ + + /* TODO: implement something enforcing prefix groups */ + (*prefixes) |= prefix; + } + + /* if this lookup was in a ModR/M table, then an opcode byte is + * NOT consumed: subtract accordingly. NOTE that if none of the + * operands used the ModR/M, then we need to consume the byte + * here, but ONLY in the 'top-level' opcode extension table */ + + if ( table_desc->type == tbl_ext_ext ) { + /* extensions-to-extensions never consume a byte */ + --size; + } else if ( (table_desc->type == tbl_extension || + table_desc->type == tbl_fpu || + table_desc->type == tbl_fpu_ext ) && + /* extensions that have an operand encoded in ModR/M + * never consume a byte */ + (uses_modrm_flag((*raw_insn)->dest_flag) || + uses_modrm_flag((*raw_insn)->src_flag) ) ) { + --size; + } + + size += sub_size; + + return size; +} + +static size_t handle_insn_suffix( unsigned char *buf, size_t buf_len, + ia32_insn_t *raw_insn, x86_insn_t * insn ) { + ia32_insn_t *sfx_insn; + size_t size; + unsigned int prefixes = 0; + + size = ia32_table_lookup( buf, buf_len, raw_insn->table, &sfx_insn, + &prefixes ); + if (size == INVALID_INSN || sfx_insn->mnem_flag == INS_INVALID ) { + return 0; + } + + strncpy( insn->mnemonic, sfx_insn->mnemonic, 16 ); + handle_insn_metadata( insn, sfx_insn ); + + return 1; +} + +/* invalid instructions are handled by returning 0 [error] from the + * function, setting the size of the insn to 1 byte, and copying + * the byte at the start of the invalid insn into the x86_insn_t. + * if the caller is saving the x86_insn_t for invalid instructions, + * instead of discarding them, this will maintain a consistent + * address space in the x86_insn_ts */ + +/* this function is called by the controlling disassembler, so its name and + * calling convention cannot be changed */ +/* buf points to the loc of the current opcode (start of the + * instruction) in the instruction stream. The instruction + * stream is assumed to be a buffer of bytes read directly + * from the file for the purpose of disassembly; a mem-mapped + * file is ideal for * this. + * insn points to a code structure to be filled by instr_decode + * returns the size of the decoded instruction in bytes */ +size_t ia32_disasm_addr( unsigned char * buf, size_t buf_len, + x86_insn_t *insn ) { + ia32_insn_t *raw_insn = NULL; + unsigned int prefixes = 0; + size_t size, sfx_size; + + if ( (ia32_settings.options & opt_ignore_nulls) && buf_len > 3 && + !buf[0] && !buf[1] && !buf[2] && !buf[3]) { + /* IF IGNORE_NULLS is set AND + * first 4 bytes in the intruction stream are NULL + * THEN return 0 (END_OF_DISASSEMBLY) */ + /* TODO: set errno */ + MAKE_INVALID( insn, buf ); + return 0; /* 4 00 bytes in a row? This isn't code! */ + } + + /* Perform recursive table lookup starting with main table (0) */ + size = ia32_table_lookup(buf, buf_len, idx_Main, &raw_insn, &prefixes); + if ( size == INVALID_INSN || size > buf_len || raw_insn->mnem_flag == INS_INVALID ) { + MAKE_INVALID( insn, buf ); + /* TODO: set errno */ + return 0; + } + + /* We now have the opcode itself figured out: we can decode + * the rest of the instruction. */ + size += ia32_decode_insn( &buf[size], buf_len - size, raw_insn, insn, + prefixes ); + if ( raw_insn->mnem_flag & INS_FLAG_SUFFIX ) { + /* AMD 3DNow! suffix -- get proper operand type here */ + sfx_size = handle_insn_suffix( &buf[size], buf_len - size, + raw_insn, insn ); + if (! sfx_size ) { + /* TODO: set errno */ + MAKE_INVALID( insn, buf ); + return 0; + } + + size += sfx_size; + } + + if (! size ) { + /* invalid insn */ + MAKE_INVALID( insn, buf ); + return 0; + } + + + insn->size = size; + return size; /* return size of instruction in bytes */ +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_insn.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_insn.h new file mode 100644 index 0000000000..d3f36c3b20 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_insn.h @@ -0,0 +1,506 @@ +#ifndef IA32_INSN_H +#define IA32_INSN_H +/* this file contains the structure of opcode definitions and the + * constants they use */ + +#include +#include "libdis.h" + + +#define GET_BYTE( buf, buf_len ) buf_len ? *buf : 0 + +#define OP_SIZE_16 1 +#define OP_SIZE_32 2 +#define ADDR_SIZE_16 4 +#define ADDR_SIZE_32 8 + +#define MAX_INSTRUCTION_SIZE 20 + +/* invalid instructions are handled by returning 0 [error] from the + * function, setting the size of the insn to 1 byte, and copying + * the byte at the start of the invalid insn into the x86_insn_t. + * if the caller is saving the x86_insn_t for invalid instructions, + * instead of discarding them, this will maintain a consistent + * address space in the x86_insn_ts */ + +#define INVALID_INSN ((size_t) -1) /* return value for invalid insn */ +#define MAKE_INVALID( i, buf ) \ + strcpy( i->mnemonic, "invalid" ); \ + x86_oplist_free( i ); \ + i->size = 1; \ + i->group = insn_none; \ + i->type = insn_invalid; \ + memcpy( i->bytes, buf, 1 ); + + +size_t ia32_disasm_addr( unsigned char * buf, size_t buf_len, + x86_insn_t *insn); + + +/* --------------------------------------------------------- Table Lookup */ +/* IA32 Instruction defintion for ia32_opcodes.c */ +typedef struct { + unsigned int table; /* escape to this sub-table */ + unsigned int mnem_flag; /* Flags referring to mnemonic */ + unsigned int notes; /* Notes for this instruction */ + unsigned int dest_flag, src_flag, aux_flag; /* and for specific operands */ + unsigned int cpu; /* minimumCPU [AND with clocks?? */ + char mnemonic[16]; /* buffers for building instruction */ + char mnemonic_att[16]; /* at&t style mnemonic name */ + int32_t dest; + int32_t src; + int32_t aux; + unsigned int flags_effected; + unsigned int implicit_ops; /* implicit operands */ +} ia32_insn_t; + + + +/* --------------------------------------------------------- Prefixes */ +/* Prefix Flags */ +/* Prefixes, same order as in the manual */ +/* had to reverse the values of the first three as they were entered into + * libdis.h incorrectly. */ +#define PREFIX_LOCK 0x0004 +#define PREFIX_REPNZ 0x0002 +#define PREFIX_REPZ 0x0001 +#define PREFIX_OP_SIZE 0x0010 +#define PREFIX_ADDR_SIZE 0x0020 +#define PREFIX_CS 0x0100 +#define PREFIX_SS 0x0200 +#define PREFIX_DS 0x0300 +#define PREFIX_ES 0x0400 +#define PREFIX_FS 0x0500 +#define PREFIX_GS 0x0600 +#define PREFIX_TAKEN 0x1000 /* branch taken */ +#define PREFIX_NOTTAKEN 0x2000 /* branch not taken */ +#define PREFIX_REG_MASK 0x0F00 +#define BRANCH_HINT_MASK 0x3000 +#define PREFIX_PRINT_MASK 0x000F /* printable prefixes */ +#define PREFIX_MASK 0xFFFF + +/* ---------------------------------------------------------- CPU Type */ + +#define cpu_8086 0x0001 +#define cpu_80286 0x0002 +#define cpu_80386 0x0003 +#define cpu_80387 0x0004 /* originally these were a co-proc */ +#define cpu_80486 0x0005 +#define cpu_PENTIUM 0x0006 +#define cpu_PENTPRO 0x0007 +#define cpu_PENTIUM2 0x0008 +#define cpu_PENTIUM3 0x0009 +#define cpu_PENTIUM4 0x000A +#define cpu_K6 0x0010 +#define cpu_K7 0x0020 +#define cpu_ATHLON 0x0030 +#define CPU_MODEL_MASK 0xFFFF +#define CPU_MODEL(cpu) (cpu & CPU_MODEL_MASK) +/* intel instruction subsets */ +#define isa_GP 0x10000 /* General Purpose Instructions */ +#define isa_FPU 0x20000 /* FPU instructions */ +#define isa_FPUMGT 0x30000 /* FPU/SIMD Management */ +#define isa_MMX 0x40000 /* MMX */ +#define isa_SSE1 0x50000 /* SSE */ +#define isa_SSE2 0x60000 /* SSE 2 */ +#define isa_SSE3 0x70000 /* SSE 3 */ +#define isa_3DNOW 0x80000 /* AMD 3d Now */ +#define isa_SYS 0x90000 /* System Instructions */ +#define ISA_SUBSET_MASK 0xFFFF0000 +#define ISA_SUBSET(isa) (isa & ISA_SUBSET_MASK) + + +/* ------------------------------------------------------ Operand Decoding */ +#define ARG_NONE 0 + +/* Using a mask allows us to store info such as OP_SIGNED in the + * operand flags field */ +#define OPFLAGS_MASK 0x0000FFFF + +/* Operand Addressing Methods, per intel manual */ +#define ADDRMETH_MASK 0x00FF0000 + +/* note: for instructions with implied operands, use no ADDRMETH */ +#define ADDRMETH_A 0x00010000 +#define ADDRMETH_C 0x00020000 +#define ADDRMETH_D 0x00030000 +#define ADDRMETH_E 0x00040000 +#define ADDRMETH_F 0x00050000 +#define ADDRMETH_G 0x00060000 +#define ADDRMETH_I 0x00070000 +#define ADDRMETH_J 0x00080000 +#define ADDRMETH_M 0x00090000 +#define ADDRMETH_O 0x000A0000 +#define ADDRMETH_P 0x000B0000 +#define ADDRMETH_Q 0x000C0000 +#define ADDRMETH_R 0x000D0000 +#define ADDRMETH_S 0x000E0000 +#define ADDRMETH_T 0x000F0000 +#define ADDRMETH_V 0x00100000 +#define ADDRMETH_W 0x00110000 +#define ADDRMETH_X 0x00120000 +#define ADDRMETH_Y 0x00130000 +#define ADDRMETH_RR 0x00140000 /* gen reg hard-coded in opcode */ +#define ADDRMETH_RS 0x00150000 /* seg reg hard-coded in opcode */ +#define ADDRMETH_RT 0x00160000 /* test reg hard-coded in opcode */ +#define ADDRMETH_RF 0x00170000 /* fpu reg hard-coded in opcode */ +#define ADDRMETH_II 0x00180000 /* immediate hard-coded in opcode */ +#define ADDRMETH_PP 0x00190000 /* mm reg ONLY in modr/m field */ +#define ADDRMETH_VV 0x001A0000 /* xmm reg ONLY in mod/rm field */ + +/* Operand Types, per intel manual */ +#define OPTYPE_MASK 0xFF000000 + +#define OPTYPE_a 0x01000000 /* BOUND: h:h or w:w */ +#define OPTYPE_b 0x02000000 /* byte */ +#define OPTYPE_c 0x03000000 /* byte or word */ +#define OPTYPE_d 0x04000000 /* word */ +#define OPTYPE_dq 0x05000000 /* qword */ +#define OPTYPE_p 0x06000000 /* 16:16 or 16:32 pointer */ +#define OPTYPE_pi 0x07000000 /* dword MMX reg */ +#define OPTYPE_ps 0x08000000 /* 128-bit single fp */ +#define OPTYPE_q 0x09000000 /* dword */ +#define OPTYPE_s 0x0A000000 /* 6-byte descriptor */ +#define OPTYPE_ss 0x0B000000 /* scalar of 128-bit single fp */ +#define OPTYPE_si 0x0C000000 /* word general register */ +#define OPTYPE_v 0x0D000000 /* hword or word */ +#define OPTYPE_w 0x0E000000 /* hword */ +#define OPTYPE_m 0x0F000000 /* to handle LEA */ +#define OPTYPE_none 0xFF000000 /* no valid operand size, INVLPG */ + +/* custom ones for FPU instructions */ +#define OPTYPE_fs 0x10000000 /* pointer to single-real*/ +#define OPTYPE_fd 0x20000000 /* pointer to double real */ +#define OPTYPE_fe 0x30000000 /* pointer to extended real */ +#define OPTYPE_fb 0x40000000 /* pointer to packed BCD */ +#define OPTYPE_fv 0x50000000 /* pointer to FPU env: 14|28-bytes */ +#define OPTYPE_ft 0x60000000 /* pointer to FPU state: 94|108-bytes */ +#define OPTYPE_fx 0x70000000 /* pointer to FPU regs: 512 bites */ +#define OPTYPE_fp 0x80000000 /* general fpu register: dbl ext */ + +/* SSE2 operand types */ +#define OPTYPE_sd 0x90000000 /* scalar of 128-bit double fp */ +#define OPTYPE_pd 0xA0000000 /* 128-bit double fp */ + + + +/* ---------------------------------------------- Opcode Table Descriptions */ +/* the table type describes how to handle byte/size increments before + * and after lookup. Some tables re-use the current byte, others + * consume a byte only if the ModR/M encodes no operands, etc */ +enum ia32_tbl_type_id { + tbl_opcode = 0, /* standard opcode table: no surprises */ + tbl_prefix, /* Prefix Override, e.g. 66/F2/F3 */ + tbl_suffix, /* 3D Now style */ + tbl_extension, /* ModR/M extension: 00-FF -> 00-07 */ + tbl_ext_ext, /* extension of modr/m using R/M field */ + tbl_fpu, /* fpu table: 00-BF -> 00-0F */ + tbl_fpu_ext /* fpu extension : C0-FF -> 00-1F */ + }; + +/* How it works: + * Bytes are 'consumed' if the next table lookup requires that the byte + * pointer be advanced in the instruction stream. 'Does not consume' means + * that, when the lookup function recurses, the same byte it re-used in the + * new table. It also means that size is not decremented, for example when + * a ModR/M byte is used. Note that tbl_extension (ModR/M) instructions that + * do not increase the size of an insn with their operands have a forced + 3 size increase in the lookup algo. Weird, yes, confusing, yes, welcome + * to the Intel ISA. Another note: tbl_prefix is used as an override, so an + * empty insn in a prefix table causes the instruction in the original table + * to be used, rather than an invalid insn being generated. + * tbl_opcode uses current byte and consumes it + * tbl_prefix uses current byte but does not consume it + * tbl_suffix uses and consumes last byte in insn + * tbl_extension uses current byte but does not consume it + * tbl_ext_ext uses current byte but does not consume it + * tbl_fpu uses current byte and consumes it + * tbl_fpu_ext uses current byte but does not consume it + */ + +/* Convenience struct for opcode tables : these will be stored in a + * 'table of tables' so we can use a table index instead of a pointer */ +typedef struct { /* Assembly instruction tables */ + ia32_insn_t *table; /* Pointer to table of instruction encodings */ + enum ia32_tbl_type_id type; + unsigned char shift; /* amount to shift modrm byte */ + unsigned char mask; /* bit mask for look up */ + unsigned char minlim,maxlim; /* limits on min/max entries. */ +} ia32_table_desc_t; + + +/* ---------------------------------------------- 'Cooked' Operand Type Info */ +/* Permissions: */ +#define OP_R 0x001 /* operand is READ */ +#define OP_W 0x002 /* operand is WRITTEN */ +#define OP_RW 0x003 /* (OP_R|OP_W): convenience macro */ +#define OP_X 0x004 /* operand is EXECUTED */ + +#define OP_PERM_MASK 0x0000007 /* perms are NOT mutually exclusive */ +#define OP_PERM( type ) (type & OP_PERM_MASK) + +/* Flags */ +#define OP_SIGNED 0x010 /* operand is signed */ + +#define OP_FLAG_MASK 0x0F0 /* mods are NOT mutually exclusive */ +#define OP_FLAGS( type ) (type & OP_FLAG_MASK) + +#define OP_REG_MASK 0x0000FFFF /* lower WORD is register ID */ +#define OP_REGTBL_MASK 0xFFFF0000 /* higher word is register type [gen/dbg] */ +#define OP_REGID( type ) (type & OP_REG_MASK) +#define OP_REGTYPE( type ) (type & OP_REGTBL_MASK) + +/* ------------------------------------------'Cooked' Instruction Type Info */ +/* high-bit opcode types/insn meta-types */ +#define INS_FLAG_PREFIX 0x10000000 /* insn is a prefix */ +#define INS_FLAG_SUFFIX 0x20000000 /* followed by a suffix byte */ +#define INS_FLAG_MASK 0xFF000000 + +/* insn notes */ +#define INS_NOTE_RING0 0x00000001 /* insn is privileged */ +#define INS_NOTE_SMM 0x00000002 /* Sys Mgt Mode only */ +#define INS_NOTE_SERIAL 0x00000004 /* serializes */ +#define INS_NOTE_NONSWAP 0x00000008 /* insn is not swapped in att format */ // could be separate field? +#define INS_NOTE_NOSUFFIX 0x00000010 /* insn has no size suffix in att format */ // could be separate field? +//#define INS_NOTE_NMI + +#define INS_INVALID 0 + +/* instruction groups */ +#define INS_EXEC 0x1000 +#define INS_ARITH 0x2000 +#define INS_LOGIC 0x3000 +#define INS_STACK 0x4000 +#define INS_COND 0x5000 +#define INS_LOAD 0x6000 +#define INS_ARRAY 0x7000 +#define INS_BIT 0x8000 +#define INS_FLAG 0x9000 +#define INS_FPU 0xA000 +#define INS_TRAPS 0xD000 +#define INS_SYSTEM 0xE000 +#define INS_OTHER 0xF000 + +#define INS_GROUP_MASK 0xF000 +#define INS_GROUP( type ) ( type & INS_GROUP_MASK ) + +/* INS_EXEC group */ +#define INS_BRANCH (INS_EXEC | 0x01) /* Unconditional branch */ +#define INS_BRANCHCC (INS_EXEC | 0x02) /* Conditional branch */ +#define INS_CALL (INS_EXEC | 0x03) /* Jump to subroutine */ +#define INS_CALLCC (INS_EXEC | 0x04) /* Jump to subroutine */ +#define INS_RET (INS_EXEC | 0x05) /* Return from subroutine */ + +/* INS_ARITH group */ +#define INS_ADD (INS_ARITH | 0x01) +#define INS_SUB (INS_ARITH | 0x02) +#define INS_MUL (INS_ARITH | 0x03) +#define INS_DIV (INS_ARITH | 0x04) +#define INS_INC (INS_ARITH | 0x05) /* increment */ +#define INS_DEC (INS_ARITH | 0x06) /* decrement */ +#define INS_SHL (INS_ARITH | 0x07) /* shift right */ +#define INS_SHR (INS_ARITH | 0x08) /* shift left */ +#define INS_ROL (INS_ARITH | 0x09) /* rotate left */ +#define INS_ROR (INS_ARITH | 0x0A) /* rotate right */ +#define INS_MIN (INS_ARITH | 0x0B) /* min func */ +#define INS_MAX (INS_ARITH | 0x0C) /* max func */ +#define INS_AVG (INS_ARITH | 0x0D) /* avg func */ +#define INS_FLR (INS_ARITH | 0x0E) /* floor func */ +#define INS_CEIL (INS_ARITH | 0x0F) /* ceiling func */ + +/* INS_LOGIC group */ +#define INS_AND (INS_LOGIC | 0x01) +#define INS_OR (INS_LOGIC | 0x02) +#define INS_XOR (INS_LOGIC | 0x03) +#define INS_NOT (INS_LOGIC | 0x04) +#define INS_NEG (INS_LOGIC | 0x05) +#define INS_NAND (INS_LOGIC | 0x06) + +/* INS_STACK group */ +#define INS_PUSH (INS_STACK | 0x01) +#define INS_POP (INS_STACK | 0x02) +#define INS_PUSHREGS (INS_STACK | 0x03) /* push register context */ +#define INS_POPREGS (INS_STACK | 0x04) /* pop register context */ +#define INS_PUSHFLAGS (INS_STACK | 0x05) /* push all flags */ +#define INS_POPFLAGS (INS_STACK | 0x06) /* pop all flags */ +#define INS_ENTER (INS_STACK | 0x07) /* enter stack frame */ +#define INS_LEAVE (INS_STACK | 0x08) /* leave stack frame */ + +/* INS_COND group */ +#define INS_TEST (INS_COND | 0x01) +#define INS_CMP (INS_COND | 0x02) + +/* INS_LOAD group */ +#define INS_MOV (INS_LOAD | 0x01) +#define INS_MOVCC (INS_LOAD | 0x02) +#define INS_XCHG (INS_LOAD | 0x03) +#define INS_XCHGCC (INS_LOAD | 0x04) +#define INS_CONV (INS_LOAD | 0x05) /* move and convert type */ + +/* INS_ARRAY group */ +#define INS_STRCMP (INS_ARRAY | 0x01) +#define INS_STRLOAD (INS_ARRAY | 0x02) +#define INS_STRMOV (INS_ARRAY | 0x03) +#define INS_STRSTOR (INS_ARRAY | 0x04) +#define INS_XLAT (INS_ARRAY | 0x05) + +/* INS_BIT group */ +#define INS_BITTEST (INS_BIT | 0x01) +#define INS_BITSET (INS_BIT | 0x02) +#define INS_BITCLR (INS_BIT | 0x03) + +/* INS_FLAG group */ +#define INS_CLEARCF (INS_FLAG | 0x01) /* clear Carry flag */ +#define INS_CLEARZF (INS_FLAG | 0x02) /* clear Zero flag */ +#define INS_CLEAROF (INS_FLAG | 0x03) /* clear Overflow flag */ +#define INS_CLEARDF (INS_FLAG | 0x04) /* clear Direction flag */ +#define INS_CLEARSF (INS_FLAG | 0x05) /* clear Sign flag */ +#define INS_CLEARPF (INS_FLAG | 0x06) /* clear Parity flag */ +#define INS_SETCF (INS_FLAG | 0x07) +#define INS_SETZF (INS_FLAG | 0x08) +#define INS_SETOF (INS_FLAG | 0x09) +#define INS_SETDF (INS_FLAG | 0x0A) +#define INS_SETSF (INS_FLAG | 0x0B) +#define INS_SETPF (INS_FLAG | 0x0C) +#define INS_TOGCF (INS_FLAG | 0x10) /* toggle */ +#define INS_TOGZF (INS_FLAG | 0x20) +#define INS_TOGOF (INS_FLAG | 0x30) +#define INS_TOGDF (INS_FLAG | 0x40) +#define INS_TOGSF (INS_FLAG | 0x50) +#define INS_TOGPF (INS_FLAG | 0x60) + +/* INS_FPU */ +#define INS_FMOV (INS_FPU | 0x1) +#define INS_FMOVCC (INS_FPU | 0x2) +#define INS_FNEG (INS_FPU | 0x3) +#define INS_FABS (INS_FPU | 0x4) +#define INS_FADD (INS_FPU | 0x5) +#define INS_FSUB (INS_FPU | 0x6) +#define INS_FMUL (INS_FPU | 0x7) +#define INS_FDIV (INS_FPU | 0x8) +#define INS_FSQRT (INS_FPU | 0x9) +#define INS_FCMP (INS_FPU | 0xA) +#define INS_FCOS (INS_FPU | 0xC) /* cosine */ +#define INS_FLDPI (INS_FPU | 0xD) /* load pi */ +#define INS_FLDZ (INS_FPU | 0xE) /* load 0 */ +#define INS_FTAN (INS_FPU | 0xF) /* tanget */ +#define INS_FSINE (INS_FPU | 0x10) /* sine */ +#define INS_FSYS (INS_FPU | 0x20) /* misc */ + +/* INS_TRAP */ +#define INS_TRAP (INS_TRAPS | 0x01) /* generate trap */ +#define INS_TRAPCC (INS_TRAPS | 0x02) /* conditional trap gen */ +#define INS_TRET (INS_TRAPS | 0x03) /* return from trap */ +#define INS_BOUNDS (INS_TRAPS | 0x04) /* gen bounds trap */ +#define INS_DEBUG (INS_TRAPS | 0x05) /* gen breakpoint trap */ +#define INS_TRACE (INS_TRAPS | 0x06) /* gen single step trap */ +#define INS_INVALIDOP (INS_TRAPS | 0x07) /* gen invalid insn */ +#define INS_OFLOW (INS_TRAPS | 0x08) /* gen overflow trap */ +#define INS_ICEBP (INS_TRAPS | 0x09) /* ICE breakpoint */ + +/* INS_SYSTEM */ +#define INS_HALT (INS_SYSTEM | 0x01) /* halt machine */ +#define INS_IN (INS_SYSTEM | 0x02) /* input form port */ +#define INS_OUT (INS_SYSTEM | 0x03) /* output to port */ +#define INS_CPUID (INS_SYSTEM | 0x04) /* identify cpu */ + +/* INS_OTHER */ +#define INS_NOP (INS_OTHER | 0x01) +#define INS_BCDCONV (INS_OTHER | 0x02) /* convert to/from BCD */ +#define INS_SZCONV (INS_OTHER | 0x03) /* convert size of operand */ +#define INS_SALC (INS_OTHER | 0x04) /* set %al on carry */ +#define INS_UNKNOWN (INS_OTHER | 0x05) + + +#define INS_TYPE_MASK 0xFFFF +#define INS_TYPE( type ) ( type & INS_TYPE_MASK ) + + /* flags effected by instruction */ +#define INS_TEST_CARRY 0x01 /* carry */ +#define INS_TEST_ZERO 0x02 /* zero/equal */ +#define INS_TEST_OFLOW 0x04 /* overflow */ +#define INS_TEST_DIR 0x08 /* direction */ +#define INS_TEST_SIGN 0x10 /* negative */ +#define INS_TEST_PARITY 0x20 /* parity */ +#define INS_TEST_OR 0x40 /* used in jle */ +#define INS_TEST_NCARRY 0x100 /* ! carry */ +#define INS_TEST_NZERO 0x200 /* ! zero */ +#define INS_TEST_NOFLOW 0x400 /* ! oflow */ +#define INS_TEST_NDIR 0x800 /* ! dir */ +#define INS_TEST_NSIGN 0x100 /* ! sign */ +#define INS_TEST_NPARITY 0x2000 /* ! parity */ +/* SF == OF */ +#define INS_TEST_SFEQOF 0x4000 +/* SF != OF */ +#define INS_TEST_SFNEOF 0x8000 + +#define INS_TEST_ALL INS_TEST_CARRY | INS_TEST_ZERO | \ + INS_TEST_OFLOW | INS_TEST_SIGN | \ + INS_TEST_PARITY + +#define INS_SET_CARRY 0x010000 /* carry */ +#define INS_SET_ZERO 0x020000 /* zero/equal */ +#define INS_SET_OFLOW 0x040000 /* overflow */ +#define INS_SET_DIR 0x080000 /* direction */ +#define INS_SET_SIGN 0x100000 /* negative */ +#define INS_SET_PARITY 0x200000 /* parity */ +#define INS_SET_NCARRY 0x1000000 +#define INS_SET_NZERO 0x2000000 +#define INS_SET_NOFLOW 0x4000000 +#define INS_SET_NDIR 0x8000000 +#define INS_SET_NSIGN 0x10000000 +#define INS_SET_NPARITY 0x20000000 +#define INS_SET_SFEQOF 0x40000000 +#define INS_SET_SFNEOF 0x80000000 + +#define INS_SET_ALL INS_SET_CARRY | INS_SET_ZERO | \ + INS_SET_OFLOW | INS_SET_SIGN | \ + INS_SET_PARITY + +#define INS_TEST_MASK 0x0000FFFF +#define INS_FLAGS_TEST(x) (x & INS_TEST_MASK) +#define INS_SET_MASK 0xFFFF0000 +#define INS_FLAGS_SET(x) (x & INS_SET_MASK) + +#if 0 +/* TODO: actually start using these */ +#define X86_PAIR_NP 1 /* not pairable; execs in U */ +#define X86_PAIR_PU 2 /* pairable in U pipe */ +#define X86_PAIR_PV 3 /* pairable in V pipe */ +#define X86_PAIR_UV 4 /* pairable in UV pipe */ +#define X86_PAIR_FX 5 /* pairable with FXCH */ + +#define X86_EXEC_PORT_0 1 +#define X86_EXEC_PORT_1 2 +#define X86_EXEC_PORT_2 4 +#define X86_EXEC_PORT_3 8 +#define X86_EXEC_PORT_4 16 + +#define X86_EXEC_UNITS + +typedef struct { /* representation of an insn during decoding */ + uint32_t flags; /* runtime settings */ + /* instruction prefixes and other foolishness */ + uint32_t prefix; /* encoding of prefix */ + char prefix_str[16]; /* mnemonics for prefix */ + uint32_t branch_hint; /* gah! */ + unsigned int cpu_ver; /* TODO: cpu version */ + unsigned int clocks; /* TODO: clock cycles: min/max */ + unsigned char last_prefix; + /* runtime intruction decoding helpers */ + unsigned char mode; /* 16, 32, 64 */ + unsigned char gen_regs; /* offset of default general reg set */ + unsigned char sz_operand; /* operand size for insn */ + unsigned char sz_address; /* address size for insn */ + unsigned char uops; /* uops per insn */ + unsigned char pairing; /* np,pu,pv.lv */ + unsigned char exec_unit; + unsigned char exec_port; + unsigned char latency; +} ia32_info_t; +#define MODE_32 0 /* default */ +#define MODE_16 1 +#define MODE_64 2 +#endif + +#endif diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_invariant.c b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_invariant.c new file mode 100644 index 0000000000..68ec153d27 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_invariant.c @@ -0,0 +1,313 @@ +#include +#include + +#include "ia32_invariant.h" +#include "ia32_insn.h" +#include "ia32_settings.h" + +extern ia32_table_desc_t *ia32_tables; +extern ia32_settings_t ia32_settings; + +extern size_t ia32_table_lookup( unsigned char *buf, size_t buf_len, + unsigned int table, ia32_insn_t **raw_insn, + unsigned int *prefixes ); + + +/* -------------------------------- ModR/M, SIB */ +/* Convenience flags */ +#define MODRM_EA 1 /* ModR/M is an effective addr */ +#define MODRM_reg 2 /* ModR/M is a register */ + +/* ModR/M flags */ +#define MODRM_RM_SIB 0x04 /* R/M == 100 */ +#define MODRM_RM_NOREG 0x05 /* R/B == 101 */ +/* if (MODRM.MOD_NODISP && MODRM.RM_NOREG) then just disp32 */ +#define MODRM_MOD_NODISP 0x00 /* mod == 00 */ +#define MODRM_MOD_DISP8 0x01 /* mod == 01 */ +#define MODRM_MOD_DISP32 0x02 /* mod == 10 */ +#define MODRM_MOD_NOEA 0x03 /* mod == 11 */ +/* 16-bit modrm flags */ +#define MOD16_MOD_NODISP 0 +#define MOD16_MOD_DISP8 1 +#define MOD16_MOD_DISP16 2 +#define MOD16_MOD_REG 3 + +#define MOD16_RM_BXSI 0 +#define MOD16_RM_BXDI 1 +#define MOD16_RM_BPSI 2 +#define MOD16_RM_BPDI 3 +#define MOD16_RM_SI 4 +#define MOD16_RM_DI 5 +#define MOD16_RM_BP 6 +#define MOD16_RM_BX 7 + +/* SIB flags */ +#define SIB_INDEX_NONE 0x04 +#define SIB_BASE_EBP 0x05 +#define SIB_SCALE_NOBASE 0x00 + +/* Convenience struct for modR/M bitfield */ +struct modRM_byte { + unsigned int mod : 2; + unsigned int reg : 3; + unsigned int rm : 3; +}; + +/* Convenience struct for SIB bitfield */ +struct SIB_byte { + unsigned int scale : 2; + unsigned int index : 3; + unsigned int base : 3; +}; + +#ifdef WIN32 +static void byte_decode(unsigned char b, struct modRM_byte *modrm) { +#else +static inline void byte_decode(unsigned char b, struct modRM_byte *modrm) { +#endif + /* generic bitfield-packing routine */ + + modrm->mod = b >> 6; /* top 2 bits */ + modrm->reg = (b & 56) >> 3; /* middle 3 bits */ + modrm->rm = b & 7; /* bottom 3 bits */ +} +static int ia32_invariant_modrm( unsigned char *in, unsigned char *out, + unsigned int mode_16, x86_invariant_op_t *op) { + struct modRM_byte modrm; + struct SIB_byte sib; + unsigned char *c, *cin; + unsigned short *s; + unsigned int *i; + int size = 0; /* modrm byte is already counted */ + + + byte_decode(*in, &modrm); /* get bitfields */ + + out[0] = in[0]; /* save modrm byte */ + cin = &in[1]; + c = &out[1]; + s = (unsigned short *)&out[1]; + i = (unsigned int *)&out[1]; + + op->type = op_expression; + op->flags |= op_pointer; + if ( ! mode_16 && modrm.rm == MODRM_RM_SIB && + modrm.mod != MODRM_MOD_NOEA ) { + size ++; + byte_decode(*cin, (struct modRM_byte *)(void*)&sib); + + out[1] = in[1]; /* save sib byte */ + cin = &in[2]; + c = &out[2]; + s = (unsigned short *)&out[2]; + i = (unsigned int *)&out[2]; + + if ( sib.base == SIB_BASE_EBP && ! modrm.mod ) { + /* disp 32 is variant! */ + memset( i, X86_WILDCARD_BYTE, 4 ); + size += 4; + } + } + + if (! modrm.mod && modrm.rm == 101) { + if ( mode_16 ) { /* straight RVA in disp */ + memset( s, X86_WILDCARD_BYTE, 2 ); + size += 2; + } else { + memset( i, X86_WILDCARD_BYTE, 2 ); + size += 4; + } + } else if (modrm.mod && modrm.mod < 3) { + if (modrm.mod == MODRM_MOD_DISP8) { /* offset in disp */ + *c = *cin; + size += 1; + } else if ( mode_16 ) { + *s = (* ((unsigned short *) cin)); + size += 2; + } else { + *i = (*((unsigned int *) cin)); + size += 4; + } + } else if ( modrm.mod == 3 ) { + op->type = op_register; + op->flags &= ~op_pointer; + } + + return (size); +} + + +static int ia32_decode_invariant( unsigned char *buf, size_t buf_len, + ia32_insn_t *t, unsigned char *out, + unsigned int prefixes, x86_invariant_t *inv) { + + unsigned int addr_size, op_size, mode_16; + unsigned int op_flags[3] = { t->dest_flag, t->src_flag, t->aux_flag }; + int x, type, bytes = 0, size = 0, modrm = 0; + + /* set addressing mode */ + if (ia32_settings.options & opt_16_bit) { + op_size = ( prefixes & PREFIX_OP_SIZE ) ? 4 : 2; + addr_size = ( prefixes & PREFIX_ADDR_SIZE ) ? 4 : 2; + mode_16 = ( prefixes & PREFIX_ADDR_SIZE ) ? 0 : 1; + } else { + op_size = ( prefixes & PREFIX_OP_SIZE ) ? 2 : 4; + addr_size = ( prefixes & PREFIX_ADDR_SIZE ) ? 2 : 4; + mode_16 = ( prefixes & PREFIX_ADDR_SIZE ) ? 1 : 0; + } + + for (x = 0; x < 3; x++) { + inv->operands[x].access = (enum x86_op_access) + OP_PERM(op_flags[x]); + inv->operands[x].flags = (enum x86_op_flags) + (OP_FLAGS(op_flags[x]) >> 12); + + switch (op_flags[x] & OPTYPE_MASK) { + case OPTYPE_c: + size = (op_size == 4) ? 2 : 1; + break; + case OPTYPE_a: case OPTYPE_v: + size = (op_size == 4) ? 4 : 2; + break; + case OPTYPE_p: + size = (op_size == 4) ? 6 : 4; + break; + case OPTYPE_b: + size = 1; + break; + case OPTYPE_w: + size = 2; + break; + case OPTYPE_d: case OPTYPE_fs: case OPTYPE_fd: + case OPTYPE_fe: case OPTYPE_fb: case OPTYPE_fv: + case OPTYPE_si: case OPTYPE_fx: + size = 4; + break; + case OPTYPE_s: + size = 6; + break; + case OPTYPE_q: case OPTYPE_pi: + size = 8; + break; + case OPTYPE_dq: case OPTYPE_ps: case OPTYPE_ss: + case OPTYPE_pd: case OPTYPE_sd: + size = 16; + break; + case OPTYPE_m: + size = (addr_size == 4) ? 4 : 2; + break; + default: + break; + } + + type = op_flags[x] & ADDRMETH_MASK; + switch (type) { + case ADDRMETH_E: case ADDRMETH_M: case ADDRMETH_Q: + case ADDRMETH_R: case ADDRMETH_W: + modrm = 1; + bytes += ia32_invariant_modrm( buf, out, + mode_16, &inv->operands[x]); + break; + case ADDRMETH_C: case ADDRMETH_D: case ADDRMETH_G: + case ADDRMETH_P: case ADDRMETH_S: case ADDRMETH_T: + case ADDRMETH_V: + inv->operands[x].type = op_register; + modrm = 1; + break; + case ADDRMETH_A: case ADDRMETH_O: + /* pad with xF4's */ + memset( &out[bytes + modrm], X86_WILDCARD_BYTE, + size ); + bytes += size; + inv->operands[x].type = op_offset; + if ( type == ADDRMETH_O ) { + inv->operands[x].flags |= op_signed | + op_pointer; + } + break; + case ADDRMETH_I: case ADDRMETH_J: + /* grab imm value */ + if ((op_flags[x] & OPTYPE_MASK) == OPTYPE_v) { + /* assume this is an address */ + memset( &out[bytes + modrm], + X86_WILDCARD_BYTE, size ); + } else { + memcpy( &out[bytes + modrm], + &buf[bytes + modrm], size ); + } + + bytes += size; + if ( type == ADDRMETH_J ) { + if ( size == 1 ) { + inv->operands[x].type = + op_relative_near; + } else { + inv->operands[x].type = + op_relative_far; + } + inv->operands[x].flags |= op_signed; + } else { + inv->operands[x].type = op_immediate; + } + break; + case ADDRMETH_F: + inv->operands[x].type = op_register; + break; + case ADDRMETH_X: + inv->operands[x].flags |= op_signed | + op_pointer | op_ds_seg | op_string; + break; + case ADDRMETH_Y: + inv->operands[x].flags |= op_signed | + op_pointer | op_es_seg | op_string; + break; + case ADDRMETH_RR: + inv->operands[x].type = op_register; + break; + case ADDRMETH_II: + inv->operands[x].type = op_immediate; + break; + default: + inv->operands[x].type = op_unused; + break; + } + } + + return (bytes + modrm); +} + +size_t ia32_disasm_invariant( unsigned char * buf, size_t buf_len, + x86_invariant_t *inv ) { + ia32_insn_t *raw_insn = NULL; + unsigned int prefixes; + unsigned int type; + size_t size; + + /* Perform recursive table lookup starting with main table (0) */ + size = ia32_table_lookup( buf, buf_len, 0, &raw_insn, &prefixes ); + if ( size == INVALID_INSN || size > buf_len ) { + /* TODO: set errno */ + return 0; + } + + /* copy opcode bytes to buffer */ + memcpy( inv->bytes, buf, size ); + + /* set mnemonic type and group */ + type = raw_insn->mnem_flag & ~INS_FLAG_MASK; + inv->group = (enum x86_insn_group) (INS_GROUP(type)) >> 12; + inv->type = (enum x86_insn_type) INS_TYPE(type); + + /* handle operands */ + size += ia32_decode_invariant( buf + size, buf_len - size, raw_insn, + &buf[size - 1], prefixes, inv ); + + inv->size = size; + + return size; /* return size of instruction in bytes */ +} + +size_t ia32_disasm_size( unsigned char *buf, size_t buf_len ) { + x86_invariant_t inv = { {0} }; + return( ia32_disasm_invariant( buf, buf_len, &inv ) ); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_invariant.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_invariant.h new file mode 100644 index 0000000000..e1cea60e9d --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_invariant.h @@ -0,0 +1,11 @@ +#ifndef IA32_INVARIANT_H +#define IA32_INVARIANT_H + +#include "libdis.h" + +size_t ia32_disasm_invariant( unsigned char *buf, size_t buf_len, + x86_invariant_t *inv); + +size_t ia32_disasm_size( unsigned char *buf, size_t buf_len ); + +#endif diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_modrm.c b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_modrm.c new file mode 100644 index 0000000000..b0fe2ed3d3 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_modrm.c @@ -0,0 +1,310 @@ +#include "ia32_modrm.h" +#include "ia32_reg.h" +#include "x86_imm.h" + +/* NOTE: when decoding ModR/M and SIB, we have to add 1 to all register + * values obtained from decoding the ModR/M or SIB byte, since they + * are encoded with eAX = 0 and the tables in ia32_reg.c use eAX = 1. + * ADDENDUM: this is only the case when the register value is used + * directly as an index into the register table, not when it is added to + * a genregs offset. */ + +/* -------------------------------- ModR/M, SIB */ +/* ModR/M flags */ +#define MODRM_RM_SIB 0x04 /* R/M == 100 */ +#define MODRM_RM_NOREG 0x05 /* R/B == 101 */ + +/* if (MODRM.MOD_NODISP && MODRM.RM_NOREG) then just disp32 */ +#define MODRM_MOD_NODISP 0x00 /* mod == 00 */ +#define MODRM_MOD_DISP8 0x01 /* mod == 01 */ +#define MODRM_MOD_DISP32 0x02 /* mod == 10 */ +#define MODRM_MOD_NOEA 0x03 /* mod == 11 */ + +/* 16-bit modrm flags */ +#define MOD16_MOD_NODISP 0 +#define MOD16_MOD_DISP8 1 +#define MOD16_MOD_DISP16 2 +#define MOD16_MOD_REG 3 + +#define MOD16_RM_BXSI 0 +#define MOD16_RM_BXDI 1 +#define MOD16_RM_BPSI 2 +#define MOD16_RM_BPDI 3 +#define MOD16_RM_SI 4 +#define MOD16_RM_DI 5 +#define MOD16_RM_BP 6 +#define MOD16_RM_BX 7 + +/* SIB flags */ +#define SIB_INDEX_NONE 0x04 +#define SIB_BASE_EBP 0x05 +#define SIB_SCALE_NOBASE 0x00 + +/* Convenience struct for modR/M bitfield */ +struct modRM_byte { + unsigned int mod : 2; + unsigned int reg : 3; + unsigned int rm : 3; +}; + +/* Convenience struct for SIB bitfield */ +struct SIB_byte { + unsigned int scale : 2; + unsigned int index : 3; + unsigned int base : 3; +}; + + +#if 0 +int modrm_rm[] = {0,1,2,3,MODRM_RM_SIB,MODRM_MOD_DISP32,6,7}; +int modrm_reg[] = {0, 1, 2, 3, 4, 5, 6, 7}; +int modrm_mod[] = {0, MODRM_MOD_DISP8, MODRM_MOD_DISP32, MODRM_MOD_NOEA}; +int sib_scl[] = {0, 2, 4, 8}; +int sib_idx[] = {0, 1, 2, 3, SIB_INDEX_NONE, 5, 6, 7 }; +int sib_bas[] = {0, 1, 2, 3, 4, SIB_SCALE_NOBASE, 6, 7 }; +#endif + +/* this is needed to replace x86_imm_signsized() which does not sign-extend + * to dest */ +static unsigned int imm32_signsized( unsigned char *buf, size_t buf_len, + int32_t *dest, unsigned int size ) { + if ( size > buf_len ) { + return 0; + } + + switch (size) { + case 1: + *dest = *((signed char *) buf); + break; + case 2: + *dest = *((signed short *) buf); + break; + case 4: + default: + *dest = *((signed int *) buf); + break; + } + + return size; +} + + + +static void byte_decode(unsigned char b, struct modRM_byte *modrm) { + /* generic bitfield-packing routine */ + + modrm->mod = b >> 6; /* top 2 bits */ + modrm->reg = (b & 56) >> 3; /* middle 3 bits */ + modrm->rm = b & 7; /* bottom 3 bits */ +} + + +static size_t sib_decode( unsigned char *buf, size_t buf_len, x86_ea_t *ea, + unsigned int mod ) { + /* set Address Expression fields (scale, index, base, disp) + * according to the contents of the SIB byte. + * b points to the SIB byte in the instruction-stream buffer; the + * byte after b[0] is therefore the byte after the SIB + * returns number of bytes 'used', including the SIB byte */ + size_t size = 1; /* start at 1 for SIB byte */ + struct SIB_byte sib; + + if ( buf_len < 1 ) { + return 0; + } + + byte_decode( *buf, (struct modRM_byte *)(void*)&sib ); /* get bit-fields */ + + if ( sib.base == SIB_BASE_EBP && ! mod ) { /* if base == 101 (ebp) */ + /* IF BASE == EBP, deal with exception */ + /* IF (ModR/M did not create a Disp */ + /* ... create a 32-bit Displacement */ + imm32_signsized( &buf[1], buf_len, &ea->disp, sizeof(int32_t)); + ea->disp_size = sizeof(int32_t); + ea->disp_sign = (ea->disp < 0) ? 1 : 0; + size += 4; /* add sizeof disp to count */ + + } else { + /* ELSE BASE refers to a General Register */ + ia32_handle_register( &ea->base, sib.base + 1 ); + } + + /* set scale to 1, 2, 4, 8 */ + ea->scale = 1 << sib.scale; + + if (sib.index != SIB_INDEX_NONE) { + /* IF INDEX is not 'ESP' (100) */ + ia32_handle_register( &ea->index, sib.index + 1 ); + } + + return (size); /* return number of bytes processed */ +} + +static size_t modrm_decode16( unsigned char *buf, unsigned int buf_len, + x86_op_t *op, struct modRM_byte *modrm ) { + /* 16-bit mode: hackish, but not as hackish as 32-bit mode ;) */ + size_t size = 1; /* # of bytes decoded [1 for modR/M byte] */ + x86_ea_t * ea = &op->data.expression; + + switch( modrm->rm ) { + case MOD16_RM_BXSI: + ia32_handle_register(&ea->base, REG_WORD_OFFSET + 3); + ia32_handle_register(&ea->index, REG_WORD_OFFSET + 6); + break; + case MOD16_RM_BXDI: + ia32_handle_register(&ea->base, REG_WORD_OFFSET + 3); + ia32_handle_register(&ea->index, REG_WORD_OFFSET + 7); + case MOD16_RM_BPSI: + op->flags |= op_ss_seg; + ia32_handle_register(&ea->base, REG_WORD_OFFSET + 5); + ia32_handle_register(&ea->index, REG_WORD_OFFSET + 6); + break; + case MOD16_RM_BPDI: + op->flags |= op_ss_seg; + ia32_handle_register(&ea->base, REG_WORD_OFFSET + 5); + ia32_handle_register(&ea->index, REG_WORD_OFFSET + 7); + break; + case MOD16_RM_SI: + ia32_handle_register(&ea->base, REG_WORD_OFFSET + 6); + break; + case MOD16_RM_DI: + ia32_handle_register(&ea->base, REG_WORD_OFFSET + 7); + break; + case MOD16_RM_BP: + if ( modrm->mod != MOD16_MOD_NODISP ) { + op->flags |= op_ss_seg; + ia32_handle_register(&ea->base, + REG_WORD_OFFSET + 5); + } + break; + case MOD16_RM_BX: + ia32_handle_register(&ea->base, REG_WORD_OFFSET + 3); + break; + } + + /* move to byte after ModR/M */ + ++buf; + --buf_len; + + if ( modrm->mod == MOD16_MOD_DISP8 ) { + imm32_signsized( buf, buf_len, &ea->disp, sizeof(char) ); + ea->disp_sign = (ea->disp < 0) ? 1 : 0; + ea->disp_size = sizeof(char); + size += sizeof(char); + } else if ( modrm->mod == MOD16_MOD_DISP16 ) { + imm32_signsized( buf, buf_len, &ea->disp, sizeof(short) ); + ea->disp_sign = (ea->disp < 0) ? 1 : 0; + ea->disp_size = sizeof(short); + size += sizeof(short); + } + + return size; +} + +/* TODO : Mark index modes + Use addressing mode flags to imply arrays (index), structure (disp), + two-dimensional arrays [disp + index], classes [ea reg], and so on. +*/ +size_t ia32_modrm_decode( unsigned char *buf, unsigned int buf_len, + x86_op_t *op, x86_insn_t *insn, size_t gen_regs ) { + /* create address expression and/or fill operand based on value of + * ModR/M byte. Calls sib_decode as appropriate. + * flags specifies whether Reg or mod+R/M fields are being decoded + * returns the number of bytes in the instruction, including modR/M */ + struct modRM_byte modrm; + size_t size = 1; /* # of bytes decoded [1 for modR/M byte] */ + x86_ea_t * ea; + + + byte_decode(*buf, &modrm); /* get bitfields */ + + /* first, handle the case where the mod field is a register only */ + if ( modrm.mod == MODRM_MOD_NOEA ) { + op->type = op_register; + ia32_handle_register(&op->data.reg, modrm.rm + gen_regs); + /* increase insn size by 1 for modrm byte */ + return 1; + } + + /* then deal with cases where there is an effective address */ + ea = &op->data.expression; + op->type = op_expression; + op->flags |= op_pointer; + + if ( insn->addr_size == 2 ) { + /* gah! 16 bit mode! */ + return modrm_decode16( buf, buf_len, op, &modrm); + } + + /* move to byte after ModR/M */ + ++buf; + --buf_len; + + if (modrm.mod == MODRM_MOD_NODISP) { /* if mod == 00 */ + + /* IF MOD == No displacement, just Indirect Register */ + if (modrm.rm == MODRM_RM_NOREG) { /* if r/m == 101 */ + /* IF RM == No Register, just Displacement */ + /* This is an Intel Moronic Exception TM */ + imm32_signsized( buf, buf_len, &ea->disp, + sizeof(int32_t) ); + ea->disp_size = sizeof(int32_t); + ea->disp_sign = (ea->disp < 0) ? 1 : 0; + size += 4; /* add sizeof disp to count */ + + } else if (modrm.rm == MODRM_RM_SIB) { /* if r/m == 100 */ + /* ELSE IF an SIB byte is present */ + /* TODO: check for 0 retval */ + size += sib_decode( buf, buf_len, ea, modrm.mod); + /* move to byte after SIB for displacement */ + ++buf; + --buf_len; + } else { /* modR/M specifies base register */ + /* ELSE RM encodes a general register */ + ia32_handle_register( &ea->base, modrm.rm + 1 ); + } + } else { /* mod is 01 or 10 */ + if (modrm.rm == MODRM_RM_SIB) { /* rm == 100 */ + /* IF base is an AddrExpr specified by an SIB byte */ + /* TODO: check for 0 retval */ + size += sib_decode( buf, buf_len, ea, modrm.mod); + /* move to byte after SIB for displacement */ + ++buf; + --buf_len; + } else { + /* ELSE base is a general register */ + ia32_handle_register( &ea->base, modrm.rm + 1 ); + } + + /* ELSE mod + r/m specify a disp##[base] or disp##(SIB) */ + if (modrm.mod == MODRM_MOD_DISP8) { /* mod == 01 */ + /* If this is an 8-bit displacement */ + imm32_signsized( buf, buf_len, &ea->disp, + sizeof(char)); + ea->disp_size = sizeof(char); + ea->disp_sign = (ea->disp < 0) ? 1 : 0; + size += 1; /* add sizeof disp to count */ + + } else { + /* Displacement is dependent on address size */ + imm32_signsized( buf, buf_len, &ea->disp, + insn->addr_size); + ea->disp_size = insn->addr_size; + ea->disp_sign = (ea->disp < 0) ? 1 : 0; + size += 4; + } + } + + return size; /* number of bytes found in instruction */ +} + +void ia32_reg_decode( unsigned char byte, x86_op_t *op, size_t gen_regs ) { + struct modRM_byte modrm; + byte_decode( byte, &modrm ); /* get bitfields */ + + /* set operand to register ID */ + op->type = op_register; + ia32_handle_register(&op->data.reg, modrm.reg + gen_regs); + + return; +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_modrm.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_modrm.h new file mode 100644 index 0000000000..765cb0833b --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_modrm.h @@ -0,0 +1,13 @@ +#ifndef IA32_MODRM_H +#define IA32_MODRM_H + +#include "libdis.h" +#include "ia32_insn.h" + +size_t ia32_modrm_decode( unsigned char *buf, unsigned int buf_len, + x86_op_t *op, x86_insn_t *insn, + size_t gen_regs ); + +void ia32_reg_decode( unsigned char byte, x86_op_t *op, size_t gen_regs ); + +#endif diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_opcode_tables.c b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_opcode_tables.c new file mode 100644 index 0000000000..ef97c7a351 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_opcode_tables.c @@ -0,0 +1,2939 @@ +#include "ia32_insn.h" + +#include "ia32_reg.h" + +#include "ia32_opcode_tables.h" + +static ia32_insn_t tbl_Main[] = { /* One-byte Opcodes */ + { 0, INS_ADD, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_G | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "add", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_ADD, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_G | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "add", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_ADD, 0, ADDRMETH_G | OPTYPE_b | OP_W | OP_R, ADDRMETH_E | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "add", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_ADD, 0, ADDRMETH_G | OPTYPE_v | OP_W | OP_R, ADDRMETH_E | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "add", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_ADD, 0, ADDRMETH_RR | OPTYPE_b | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "add", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_ADD, 0, ADDRMETH_RR | OPTYPE_v | OP_W | OP_R, ADDRMETH_I | OPTYPE_v | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "add", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_PUSH, 0, ADDRMETH_RS | OPTYPE_w | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "push", "", 0, 0, 0, 0 , 33 }, + { 0, INS_POP, 0, ADDRMETH_RS | OPTYPE_w | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "pop", "", 0, 0, 0, 0 , 33 }, + { 0, INS_OR, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_G | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "or", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_OR, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_G | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "or", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_OR, 0, ADDRMETH_G | OPTYPE_b | OP_W | OP_R, ADDRMETH_E | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "or", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_OR, 0, ADDRMETH_G | OPTYPE_v | OP_W | OP_R, ADDRMETH_E | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "or", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_OR, 0, ADDRMETH_RR | OPTYPE_b | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "or", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_OR, 0, ADDRMETH_RR | OPTYPE_v | OP_W | OP_R, ADDRMETH_I | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "or", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_PUSH, 0, ADDRMETH_RS | OPTYPE_w | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "push", "", 1, 0, 0, 0 , 33 }, + { idx_0F, 0, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 }, +/* 0x10 */ + { 0, INS_ADD, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_G | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "adc", "", 0, 0, 0, INS_SET_ALL|INS_TEST_CARRY, 0 }, + { 0, INS_ADD, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_G | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "adc", "", 0, 0, 0, INS_SET_ALL|INS_TEST_CARRY, 0 }, + { 0, INS_ADD, 0, ADDRMETH_G | OPTYPE_b | OP_W | OP_R, ADDRMETH_E | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "adc", "", 0, 0, 0, INS_SET_ALL|INS_TEST_CARRY, 0 }, + { 0, INS_ADD, 0, ADDRMETH_G | OPTYPE_v | OP_W | OP_R, ADDRMETH_E | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "adc", "", 0, 0, 0, INS_SET_ALL|INS_TEST_CARRY, 0 }, + { 0, INS_ADD, 0, ADDRMETH_RR | OPTYPE_b | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "adc", "", 0, 0, 0, INS_SET_ALL|INS_TEST_CARRY, 0 }, + { 0, INS_ADD, 0, ADDRMETH_RR | OPTYPE_v | OP_W | OP_R, ADDRMETH_I | OPTYPE_v | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "adc", "", 0, 0, 0, INS_SET_ALL|INS_TEST_CARRY, 0 }, + { 0, INS_PUSH, 0, ADDRMETH_RS | OPTYPE_w | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "push", "", 2, 0, 0, 0 , 33 }, + { 0, INS_POP, 0, ADDRMETH_RS | OPTYPE_w | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "pop", "", 2, 0, 0, 0 , 33 }, + { 0, INS_SUB, 0, ADDRMETH_E | OPTYPE_b | OP_SIGNED | OP_W | OP_R, ADDRMETH_G | OPTYPE_b | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "sbb", "", 0, 0, 0, INS_SET_ALL|INS_TEST_CARRY, 0 }, + { 0, INS_SUB, 0, ADDRMETH_E | OPTYPE_v | OP_SIGNED | OP_W | OP_R, ADDRMETH_G | OPTYPE_v | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "sbb", "", 0, 0, 0, INS_SET_ALL|INS_TEST_CARRY, 0 }, + { 0, INS_SUB, 0, ADDRMETH_G | OPTYPE_b | OP_W | OP_SIGNED | OP_R, ADDRMETH_E | OPTYPE_b | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "sbb", "", 0, 0, 0, INS_SET_ALL|INS_TEST_CARRY, 0 }, + { 0, INS_SUB, 0, ADDRMETH_G | OPTYPE_v | OP_SIGNED | OP_W | OP_R, ADDRMETH_E | OPTYPE_v | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "sbb", "", 0, 0, 0, INS_SET_ALL|INS_TEST_CARRY, 0 }, + { 0, INS_SUB, 0, ADDRMETH_RR | OPTYPE_b | OP_SIGNED | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "sbb", "", 0, 0, 0, INS_SET_ALL|INS_TEST_CARRY, 0 }, + { 0, INS_SUB, 0, ADDRMETH_RR | OPTYPE_v | OP_SIGNED | OP_W | OP_R, ADDRMETH_I | OPTYPE_v | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "sbb", "", 0, 0, 0, INS_SET_ALL|INS_TEST_CARRY, 0 }, + { 0, INS_PUSH, 0, ADDRMETH_RS | OPTYPE_w | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "push", "", 3, 0, 0, 0 , 33 }, + { 0, INS_POP, 0, ADDRMETH_RS | OPTYPE_w | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "pop", "", 3, 0, 0, 0 , 33 }, +/* 0x20 */ + { 0, INS_AND, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_G | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "and", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_AND, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_G | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "and", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_AND, 0, ADDRMETH_G | OPTYPE_b | OP_W | OP_R, ADDRMETH_E | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "and", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_AND, 0, ADDRMETH_G | OPTYPE_v | OP_W | OP_R, ADDRMETH_E | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "and", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_AND, 0, ADDRMETH_RR | OPTYPE_b | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "and", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_AND, 0, ADDRMETH_RR | OPTYPE_v | OP_W | OP_R, ADDRMETH_I | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "and", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_FLAG_PREFIX | PREFIX_ES, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_BCDCONV, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "daa", "", 0, 0, 0, INS_SET_SIGN|INS_SET_ZERO|INS_SET_CARRY|INS_SET_PARITY|INS_TEST_CARRY, 12 }, + { 0, INS_SUB, 0, ADDRMETH_E | OPTYPE_b | OP_SIGNED | OP_W | OP_R, ADDRMETH_G | OPTYPE_b | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "sub", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_SUB, 0, ADDRMETH_E | OPTYPE_v | OP_SIGNED | OP_W | OP_R, ADDRMETH_G | OPTYPE_v | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "sub", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_SUB, 0, ADDRMETH_G | OPTYPE_b | OP_SIGNED | OP_W | OP_R, ADDRMETH_E | OPTYPE_b | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "sub", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_SUB, 0, ADDRMETH_G | OPTYPE_v | OP_SIGNED | OP_W | OP_R, ADDRMETH_E | OPTYPE_v | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "sub", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_SUB, 0, ADDRMETH_RR | OPTYPE_b | OP_SIGNED | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "sub", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_SUB, 0, ADDRMETH_RR | OPTYPE_v | OP_SIGNED | OP_W | OP_R, ADDRMETH_I | OPTYPE_v | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "sub", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_FLAG_PREFIX | PREFIX_CS | PREFIX_NOTTAKEN, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_BCDCONV, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "das", "", 0, 0, 0, INS_SET_SIGN|INS_SET_ZERO|INS_SET_CARRY|INS_SET_PARITY|INS_TEST_CARRY, 0 }, +/* 0x30 */ + { 0, INS_XOR, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_G | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "xor", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_XOR, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_G | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "xor", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_XOR, 0, ADDRMETH_G | OPTYPE_b | OP_W | OP_R, ADDRMETH_E | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "xor", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_XOR, 0, ADDRMETH_G | OPTYPE_v | OP_W | OP_R, ADDRMETH_E | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "xor", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_XOR, 0, ADDRMETH_RR | OPTYPE_b | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "xor", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_XOR, 0, ADDRMETH_RR | OPTYPE_v | OP_W | OP_R, ADDRMETH_I | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "xor", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_FLAG_PREFIX | PREFIX_SS, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_BCDCONV, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "aaa", "", 0, 0, 0, INS_SET_CARRY, 1 }, + { 0, INS_CMP, 0, ADDRMETH_E | OPTYPE_b | OP_R, ADDRMETH_G | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "cmp", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_CMP, 0, ADDRMETH_E | OPTYPE_v | OP_R, ADDRMETH_G | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "cmp", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_CMP, 0, ADDRMETH_G | OPTYPE_b | OP_R, ADDRMETH_E | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "cmp", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_CMP, 0, ADDRMETH_G | OPTYPE_v | OP_R, ADDRMETH_E | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "cmp", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_CMP, 0, ADDRMETH_RR | OPTYPE_b | OP_R, ADDRMETH_I | OPTYPE_b | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "cmp", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_CMP, 0, ADDRMETH_RR | OPTYPE_v | OP_R, ADDRMETH_I | OPTYPE_v | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "cmp", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_FLAG_PREFIX | PREFIX_DS | PREFIX_TAKEN, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_BCDCONV, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "aas", "", 0, 0, 0, INS_SET_CARRY, 0 }, +/* 0x40 */ + { 0, INS_INC, 0, ADDRMETH_RR | OPTYPE_v | OP_R | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "inc", "", 0, 0, 0, INS_SET_OFLOW|INS_SET_SIGN|INS_SET_ZERO|INS_SET_PARITY, 0 }, + { 0, INS_INC, 0, ADDRMETH_RR | OPTYPE_v | OP_R | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "inc", "", 1, 0, 0, INS_SET_OFLOW|INS_SET_SIGN|INS_SET_ZERO|INS_SET_PARITY, 0 }, + { 0, INS_INC, 0, ADDRMETH_RR | OPTYPE_v | OP_R | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "inc", "", 2, 0, 0, INS_SET_OFLOW|INS_SET_SIGN|INS_SET_ZERO|INS_SET_PARITY, 0 }, + { 0, INS_INC, 0, ADDRMETH_RR | OPTYPE_v | OP_R | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "inc", "", 3, 0, 0, INS_SET_OFLOW|INS_SET_SIGN|INS_SET_ZERO|INS_SET_PARITY, 0 }, + { 0, INS_INC, 0, ADDRMETH_RR | OPTYPE_v | OP_R | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "inc", "", 4, 0, 0, INS_SET_OFLOW|INS_SET_SIGN|INS_SET_ZERO|INS_SET_PARITY, 0 }, + { 0, INS_INC, 0, ADDRMETH_RR | OPTYPE_v | OP_R | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "inc", "", 5, 0, 0, INS_SET_OFLOW|INS_SET_SIGN|INS_SET_ZERO|INS_SET_PARITY, 0 }, + { 0, INS_INC, 0, ADDRMETH_RR | OPTYPE_v | OP_R | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "inc", "", 6, 0, 0, INS_SET_OFLOW|INS_SET_SIGN|INS_SET_ZERO|INS_SET_PARITY, 0 }, + { 0, INS_INC, 0, ADDRMETH_RR | OPTYPE_v | OP_R | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "inc", "", 7, 0, 0, INS_SET_OFLOW|INS_SET_SIGN|INS_SET_ZERO|INS_SET_PARITY, 0 }, + { 0, INS_DEC, 0, ADDRMETH_RR | OPTYPE_v | OP_W | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "dec", "", 0, 0, 0, INS_SET_OFLOW|INS_SET_SIGN|INS_SET_ZERO|INS_SET_PARITY, 0 }, + { 0, INS_DEC, 0, ADDRMETH_RR | OPTYPE_v | OP_W | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "dec", "", 1, 0, 0, INS_SET_OFLOW|INS_SET_SIGN|INS_SET_ZERO|INS_SET_PARITY, 0 }, + { 0, INS_DEC, 0, ADDRMETH_RR | OPTYPE_v | OP_W | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "dec", "", 2, 0, 0, INS_SET_OFLOW|INS_SET_SIGN|INS_SET_ZERO|INS_SET_PARITY, 0 }, + { 0, INS_DEC, 0, ADDRMETH_RR | OPTYPE_v | OP_W | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "dec", "", 3, 0, 0, INS_SET_OFLOW|INS_SET_SIGN|INS_SET_ZERO|INS_SET_PARITY, 0 }, + { 0, INS_DEC, 0, ADDRMETH_RR | OPTYPE_v | OP_W | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "dec", "", 4, 0, 0, INS_SET_OFLOW|INS_SET_SIGN|INS_SET_ZERO|INS_SET_PARITY, 0 }, + { 0, INS_DEC, 0, ADDRMETH_RR | OPTYPE_v | OP_W | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "dec", "", 5, 0, 0, INS_SET_OFLOW|INS_SET_SIGN|INS_SET_ZERO|INS_SET_PARITY, 0 }, + { 0, INS_DEC, 0, ADDRMETH_RR | OPTYPE_v | OP_W | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "dec", "", 6, 0, 0, INS_SET_OFLOW|INS_SET_SIGN|INS_SET_ZERO|INS_SET_PARITY, 0 }, + { 0, INS_DEC, 0, ADDRMETH_RR | OPTYPE_v | OP_W | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "dec", "", 7, 0, 0, INS_SET_OFLOW|INS_SET_SIGN|INS_SET_ZERO|INS_SET_PARITY, 0 }, +/* 0x50 */ + { 0, INS_PUSH, 0, ADDRMETH_RR | OPTYPE_v | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "push", "", 0, 0, 0, 0 , 33 }, + { 0, INS_PUSH, 0, ADDRMETH_RR | OPTYPE_v | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "push", "", 1, 0, 0, 0 , 33 }, + { 0, INS_PUSH, 0, ADDRMETH_RR | OPTYPE_v | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "push", "", 2, 0, 0, 0 , 33 }, + { 0, INS_PUSH, 0, ADDRMETH_RR | OPTYPE_v | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "push", "", 3, 0, 0, 0 , 33 }, + { 0, INS_PUSH, 0, ADDRMETH_RR | OPTYPE_v | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "push", "", 4, 0, 0, 0 , 33 }, + { 0, INS_PUSH, 0, ADDRMETH_RR | OPTYPE_v | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "push", "", 5, 0, 0, 0 , 33 }, + { 0, INS_PUSH, 0, ADDRMETH_RR | OPTYPE_v | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "push", "", 6, 0, 0, 0 , 33 }, + { 0, INS_PUSH, 0, ADDRMETH_RR | OPTYPE_v | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "push", "", 7, 0, 0, 0 , 33 }, + { 0, INS_POP, 0, ADDRMETH_RR | OPTYPE_v | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "pop", "", 0, 0, 0, 0 , 33 }, + { 0, INS_POP, 0, ADDRMETH_RR | OPTYPE_v | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "pop", "", 1, 0, 0, 0 , 33 }, + { 0, INS_POP, 0, ADDRMETH_RR | OPTYPE_v | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "pop", "", 2, 0, 0, 0 , 33 }, + { 0, INS_POP, 0, ADDRMETH_RR | OPTYPE_v | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "pop", "", 3, 0, 0, 0 , 33 }, + { 0, INS_POP, 0, ADDRMETH_RR | OPTYPE_v | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "pop", "", 4, 0, 0, 0 , 33 }, + { 0, INS_POP, 0, ADDRMETH_RR | OPTYPE_v | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "pop", "", 5, 0, 0, 0 , 33 }, + { 0, INS_POP, 0, ADDRMETH_RR | OPTYPE_v | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "pop", "", 6, 0, 0, 0 , 33 }, + { 0, INS_POP, 0, ADDRMETH_RR | OPTYPE_v | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "pop", "", 7, 0, 0, 0 , 33 }, +/* 0x60 */ + { 0, INS_PUSHREGS, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "pusha", "", 0, 0, 0, 0 , 36 }, + { 0, INS_POPREGS, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "popa", "", 0, 0, 0, 0 , 34 }, + { 0, INS_BOUNDS, INS_NOTE_NONSWAP, ADDRMETH_G | OPTYPE_v | OP_R, ADDRMETH_M | OPTYPE_a | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "bound", "", 0, 0, 0, 0 , 0 }, + { 0, INS_SYSTEM, 0, ADDRMETH_E | OPTYPE_w | OP_R | OP_W, ADDRMETH_G | OPTYPE_w | OP_R, ARG_NONE, cpu_80386 | isa_GP, "arpl", "", 0, 0, 0, INS_SET_ZERO, 0 }, + { 0, INS_FLAG_PREFIX | PREFIX_FS, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_FLAG_PREFIX | PREFIX_GS, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 }, + { idx_66, INS_FLAG_PREFIX | PREFIX_OP_SIZE, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_FLAG_PREFIX | PREFIX_ADDR_SIZE, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_PUSH, 0, ADDRMETH_I | OPTYPE_v | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "push", "", 0, 0, 0, 0 , 33 }, + { 0, INS_MUL, 0, ADDRMETH_G | OPTYPE_v | OP_SIGNED | OP_R | OP_W, ADDRMETH_E | OPTYPE_v | OP_SIGNED | OP_R, ADDRMETH_I | OPTYPE_v | OP_SIGNED | OP_R, cpu_80386 | isa_GP, "imul", "", 0, 0, 0, INS_SET_OFLOW|INS_SET_CARRY, 0 }, + { 0, INS_PUSH, 0, ADDRMETH_I | OPTYPE_b | OP_SIGNED | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "push", "", 0, 0, 0, 0 , 33 }, + { 0, INS_MUL, 0, ADDRMETH_G | OPTYPE_v | OP_SIGNED | OP_R | OP_W, ADDRMETH_E | OPTYPE_v | OP_SIGNED | OP_R, ADDRMETH_I | OPTYPE_b | OP_SIGNED | OP_R, cpu_80386 | isa_GP, "imul", "", 0, 0, 0, INS_SET_OFLOW|INS_SET_CARRY, 0 }, + { 0, INS_IN, 0, ADDRMETH_Y | OPTYPE_b | OP_W, ADDRMETH_RR | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "ins", "", 0, 2, 0, 0 , 0 }, + { 0, INS_IN, 0, ADDRMETH_Y | OPTYPE_v | OP_W, ADDRMETH_RR | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "ins", "", 0, 2, 0, 0 , 0 }, + { 0, INS_OUT, 0, ADDRMETH_RR | OPTYPE_b | OP_R, ADDRMETH_X | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "outs", "", 2, 0, 0, 0 , 0 }, + { 0, INS_OUT, 0, ADDRMETH_RR | OPTYPE_v | OP_R, ADDRMETH_X | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "outs", "", 2, 0, 0, 0 , 0 }, +/* 0x70 */ + { 0, INS_BRANCHCC, 0, ADDRMETH_J | OPTYPE_b | OP_X, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "jo", "", 0, 0, 0, INS_TEST_OFLOW, 0 }, + { 0, INS_BRANCHCC, 0, ADDRMETH_J | OPTYPE_b | OP_X, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "jno", "", 0, 0, 0, INS_TEST_NOFLOW, 0 }, + { 0, INS_BRANCHCC, 0, ADDRMETH_J | OPTYPE_b | OP_X, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "jc", "", 0, 0, 0, INS_TEST_CARRY, 0 }, + { 0, INS_BRANCHCC, 0, ADDRMETH_J | OPTYPE_b | OP_X, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "jnc", "", 0, 0, 0, INS_TEST_NCARRY, 0 }, + { 0, INS_BRANCHCC, 0, ADDRMETH_J | OPTYPE_b | OP_X, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "jz", "", 0, 0, 0, INS_TEST_ZERO, 0 }, + { 0, INS_BRANCHCC, 0, ADDRMETH_J | OPTYPE_b | OP_X, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "jnz", "", 0, 0, 0, INS_TEST_NZERO, 0 }, + { 0, INS_BRANCHCC, 0, ADDRMETH_J | OPTYPE_b | OP_X, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "jbe", "", 0, 0, 0, INS_TEST_CARRY|INS_TEST_OR|INS_TEST_ZERO, 0 }, + { 0, INS_BRANCHCC, 0, ADDRMETH_J | OPTYPE_b | OP_X, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "ja", "", 0, 0, 0, INS_TEST_NCARRY|INS_TEST_NZERO, 0 }, + { 0, INS_BRANCHCC, 0, ADDRMETH_J | OPTYPE_b | OP_X, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "js", "", 0, 0, 0, INS_TEST_SIGN, 0 }, + { 0, INS_BRANCHCC, 0, ADDRMETH_J | OPTYPE_b | OP_X, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "jns", "", 0, 0, 0, INS_TEST_NSIGN, 0 }, + { 0, INS_BRANCHCC, 0, ADDRMETH_J | OPTYPE_b | OP_X, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "jpe", "", 0, 0, 0, INS_TEST_PARITY, 0 }, + { 0, INS_BRANCHCC, 0, ADDRMETH_J | OPTYPE_b | OP_X, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "jpo", "", 0, 0, 0, INS_TEST_NPARITY, 0 }, + { 0, INS_BRANCHCC, 0, ADDRMETH_J | OPTYPE_b | OP_X, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "jl", "", 0, 0, 0, INS_TEST_SFNEOF, 0 }, + { 0, INS_BRANCHCC, 0, ADDRMETH_J | OPTYPE_b | OP_X, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "jge", "", 0, 0, 0, INS_TEST_SFEQOF, 0 }, + { 0, INS_BRANCHCC, 0, ADDRMETH_J | OPTYPE_b | OP_X, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "jle", "", 0, 0, 0, INS_TEST_ZERO|INS_TEST_OR|INS_TEST_SFNEOF, 0 }, + { 0, INS_BRANCHCC, 0, ADDRMETH_J | OPTYPE_b | OP_X, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "jg", "", 0, 0, 0, INS_TEST_NZERO|INS_TEST_SFEQOF, 0 }, +/* 0x80 */ + { idx_80, 0, 0, ADDRMETH_E | OPTYPE_b, ADDRMETH_I | OPTYPE_b, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 }, + { idx_81, 0, 0, ADDRMETH_E | OPTYPE_v, ADDRMETH_I | OPTYPE_v, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 }, + { idx_82, 0, 0, ADDRMETH_E | OPTYPE_b, ADDRMETH_I | OPTYPE_b, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 }, + { idx_83, 0, 0, ADDRMETH_E | OPTYPE_v, ADDRMETH_I | OPTYPE_b, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_TEST, 0, ADDRMETH_E | OPTYPE_b | OP_R, ADDRMETH_G | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "test", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_TEST, 0, ADDRMETH_E | OPTYPE_v | OP_R, ADDRMETH_G | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "test", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_XCHG, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_G | OPTYPE_b | OP_W | OP_R, ARG_NONE, cpu_80386 | isa_GP, "xchg", "", 0, 0, 0, 0 , 0 }, + { 0, INS_XCHG, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_G | OPTYPE_v | OP_W | OP_R, ARG_NONE, cpu_80386 | isa_GP, "xchg", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_E | OPTYPE_b | OP_W, ADDRMETH_G | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "mov", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_E | OPTYPE_v | OP_W, ADDRMETH_G | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "mov", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_G | OPTYPE_b | OP_W, ADDRMETH_E | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "mov", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_G | OPTYPE_v | OP_W, ADDRMETH_E | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "mov", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_E | OPTYPE_w | OP_W, ADDRMETH_S | OPTYPE_w | OP_R, ARG_NONE, cpu_80386 | isa_GP, "mov", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_G | OPTYPE_v | OP_W, ADDRMETH_M | OPTYPE_m | OP_R, ARG_NONE, cpu_80386 | isa_GP, "lea", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_S | OPTYPE_w | OP_W, ADDRMETH_E | OPTYPE_w | OP_R, ARG_NONE, cpu_80386 | isa_GP, "mov", "", 0, 0, 0, 0 , 0 }, + { 0, INS_POP, 0, ADDRMETH_E | OPTYPE_v | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "pop", "", 0, 0, 0, 0 , 33 }, +/* 0x90 */ + { 0, INS_NOP, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "nop", "", 0, 0, 0, 0 , 0 }, + { 0, INS_XCHG, 0, ADDRMETH_RR | OPTYPE_v | OP_W | OP_R, ADDRMETH_RR | OPTYPE_v | OP_W | OP_R, ARG_NONE, cpu_80386 | isa_GP, "xchg", "", 0, 1, 0, 0 , 0 }, + { 0, INS_XCHG, 0, ADDRMETH_RR | OPTYPE_v | OP_W | OP_R, ADDRMETH_RR | OPTYPE_v | OP_W | OP_R, ARG_NONE, cpu_80386 | isa_GP, "xchg", "", 0, 2, 0, 0 , 0 }, + { 0, INS_XCHG, 0, ADDRMETH_RR | OPTYPE_v | OP_W | OP_R, ADDRMETH_RR | OPTYPE_v | OP_W | OP_R, ARG_NONE, cpu_80386 | isa_GP, "xchg", "", 0, 3, 0, 0 , 0 }, + { 0, INS_XCHG, 0, ADDRMETH_RR | OPTYPE_v | OP_W | OP_R, ADDRMETH_RR | OPTYPE_v | OP_W | OP_R, ARG_NONE, cpu_80386 | isa_GP, "xchg", "", 0, 4, 0, 0 , 0 }, + { 0, INS_XCHG, 0, ADDRMETH_RR | OPTYPE_v | OP_W | OP_R, ADDRMETH_RR | OPTYPE_v | OP_W | OP_R, ARG_NONE, cpu_80386 | isa_GP, "xchg", "", 0, 5, 0, 0 , 0 }, + { 0, INS_XCHG, 0, ADDRMETH_RR | OPTYPE_v | OP_W | OP_R, ADDRMETH_RR | OPTYPE_v | OP_W | OP_R, ARG_NONE, cpu_80386 | isa_GP, "xchg", "", 0, 6, 0, 0 , 0 }, + { 0, INS_XCHG, 0, ADDRMETH_RR | OPTYPE_v | OP_W | OP_R, ADDRMETH_RR | OPTYPE_v | OP_W | OP_R, ARG_NONE, cpu_80386 | isa_GP, "xchg", "", 0, 7, 0, 0 , 0 }, + { 0, INS_SZCONV, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "cwde", "", 0, 0, 0, 0 , 5 }, + { 0, INS_SZCONV, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "cdq", "", 0, 0, 0, 0 , 11 }, + { 0, INS_CALL, 0, ADDRMETH_A | OPTYPE_p | OP_X, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "callf", "", 0, 0, 0, 0 , 0 }, + { 0, INS_SYSTEM, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "wait", "", 0, 0, 0, 0 , 0 }, + { 0, INS_PUSHFLAGS, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "pushf", "", 0, 0, 0, 0 , 37 }, + { 0, INS_POPFLAGS, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "popf", "", 0, 0, 0, 0 , 35 }, + { 0, INS_MOV, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "sahf", "", 0, 0, 0, INS_SET_SIGN|INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 43 }, + { 0, INS_MOV, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "lahf", "", 0, 0, 0, 0 , 24 }, +/* 0xa0 */ + { 0, INS_MOV, 0, ADDRMETH_RR | OPTYPE_b | OP_W, ADDRMETH_O | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "mov", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_RR | OPTYPE_v | OP_W, ADDRMETH_O | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "mov", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_O | OPTYPE_b | OP_W, ADDRMETH_RR | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "mov", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_O | OPTYPE_v | OP_W, ADDRMETH_RR | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "mov", "", 0, 0, 0, 0 , 0 }, + { 0, INS_STRMOV, 0, ADDRMETH_Y | OPTYPE_b | OP_W, ADDRMETH_X | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "movs", "", 0, 0, 0, 0 , 0 }, + { 0, INS_STRMOV, 0, ADDRMETH_Y | OPTYPE_v | OP_W, ADDRMETH_X | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "movs", "", 0, 0, 0, 0 , 0 }, + { 0, INS_STRCMP, 0, ADDRMETH_Y | OPTYPE_b | OP_R, ADDRMETH_X | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "cmps", "", 0, 0, 0, 0 , 0 }, + { 0, INS_STRCMP, 0, ADDRMETH_X | OPTYPE_v | OP_R, ADDRMETH_Y | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "cmps", "", 0, 0, 0, 0 , 0 }, + { 0, INS_TEST, 0, ADDRMETH_RR | OPTYPE_b | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "test", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_TEST, 0, ADDRMETH_RR | OPTYPE_v | OP_R, ADDRMETH_I | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "test", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_STRSTOR, 0, ADDRMETH_Y | OPTYPE_b | OP_W, ADDRMETH_RR | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "stos", "", 0, 0, 0, 0 , 0 }, + { 0, INS_STRSTOR, 0, ADDRMETH_Y | OPTYPE_v | OP_W, ADDRMETH_RR | OPTYPE_v |OP_R, ARG_NONE, cpu_80386 | isa_GP, "stos", "", 0, 0, 0, 0 , 0 }, + { 0, INS_STRLOAD, 0, ADDRMETH_RR | OPTYPE_b | OP_W, ADDRMETH_X| OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "lods", "", 0, 0, 0, 0 , 0 }, + { 0, INS_STRLOAD, 0, ADDRMETH_RR | OPTYPE_v | OP_W, ADDRMETH_X| OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "lods", "", 0, 0, 0, 0 , 0 }, + { 0, INS_STRCMP, 0, ADDRMETH_RR | OPTYPE_b | OP_R, ADDRMETH_Y | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "scas", "", 0, 0, 0, 0 , 0 }, + { 0, INS_STRCMP, 0, ADDRMETH_RR | OPTYPE_v | OP_R, ADDRMETH_Y | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "scas", "", 0, 0, 0, 0 , 0 }, +/* 0xb0 */ + { 0, INS_MOV, 0, ADDRMETH_RR | OPTYPE_b | OP_W, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "mov", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_RR | OPTYPE_b | OP_W, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "mov", "", 1, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_RR | OPTYPE_b | OP_W, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "mov", "", 2, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_RR | OPTYPE_b | OP_W, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "mov", "", 3, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_RR | OPTYPE_b | OP_W, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "mov", "", 4, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_RR | OPTYPE_b | OP_W, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "mov", "", 5, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_RR | OPTYPE_b | OP_W, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "mov", "", 6, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_RR | OPTYPE_b | OP_W, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "mov", "", 7, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_RR | OPTYPE_v | OP_W, ADDRMETH_I | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "mov", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_RR | OPTYPE_v | OP_W, ADDRMETH_I | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "mov", "", 1, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_RR | OPTYPE_v | OP_W, ADDRMETH_I | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "mov", "", 2, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_RR | OPTYPE_v | OP_W, ADDRMETH_I | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "mov", "", 3, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_RR | OPTYPE_v | OP_W, ADDRMETH_I | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "mov", "", 4, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_RR | OPTYPE_v | OP_W, ADDRMETH_I | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "mov", "", 5, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_RR | OPTYPE_v | OP_W, ADDRMETH_I | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "mov", "", 6, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_RR | OPTYPE_v | OP_W, ADDRMETH_I | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "mov", "", 7, 0, 0, 0 , 0 }, +/* 0xc0 */ + { idx_C0, 0, 0, ADDRMETH_E | OPTYPE_b, ADDRMETH_I | OPTYPE_b, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 }, + { idx_C1, 0, 0, ADDRMETH_E | OPTYPE_v, ADDRMETH_I | OPTYPE_b, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_RET, 0, ADDRMETH_I | OPTYPE_w | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "ret", "", 0, 0, 0, 0 , 3 }, + { 0, INS_RET, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "ret", "", 0, 0, 0, 0 , 3 }, + { 0, INS_MOV, 0, ADDRMETH_G | OPTYPE_v | OP_W, ADDRMETH_M | OPTYPE_p | OP_R, ARG_NONE, cpu_80386 | isa_GP, "les", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_G | OPTYPE_v | OP_W, ADDRMETH_M | OPTYPE_p | OP_R, ARG_NONE, cpu_80386 | isa_GP, "lds", "", 0, 0, 0, 0 , 0 }, + { idx_C6, 0, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 }, + { idx_C7, 0, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_ENTER, INS_NOTE_NONSWAP, ADDRMETH_I | OPTYPE_w | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "enter", "", 0, 0, 0, 0 , 15 }, + { 0, INS_LEAVE, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "leave", "", 0, 0, 0, 0 , 26 }, + { 0, INS_RET, 0, ADDRMETH_I | OPTYPE_w | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "retf", "lret", 0, 0, 0, 0 , 3 }, + { 0, INS_RET, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "retf", "lret", 0, 0, 0, 0 , 3 }, + { 0, INS_DEBUG, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "int3", "", 0, 0, 0, 0 , 0 }, + { 0, INS_TRAP, 0, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "int", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OFLOW, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "into", "", 0, 0, 0, INS_TEST_OFLOW, 0 }, + { 0, INS_TRET, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "iret", "", 0, 0, 0, INS_SET_ALL|INS_SET_DIR, 0 }, +/* 0xd0 */ + { idx_D0, 0, 0, ADDRMETH_E | OPTYPE_b, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 1, 0, 0 , 0 }, + { idx_D1, 0, 0, ADDRMETH_E | OPTYPE_v, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 1, 0, 0 , 0 }, + { idx_D2, 0, 0, ADDRMETH_E | OPTYPE_b, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 1, 0, 0 , 0 }, + { idx_D3, 0, 0, ADDRMETH_E | OPTYPE_v, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 1, 0, 0 , 0 }, + { 0, INS_BCDCONV, 0, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "aam", "", 0, 0, 0, INS_SET_SIGN|INS_SET_ZERO|INS_SET_PARITY, 0 }, + { 0, INS_BCDCONV, 0, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "aad", "", 0, 0, 0, INS_SET_SIGN|INS_SET_ZERO|INS_SET_PARITY, 2 }, + { 0, INS_SALC, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "salc", "", 0, 0, 0, 0 , 0 }, + { 0, INS_XLAT, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "xlat", "", 0, 0, 0, 0 , 53 }, + { idx_D8, 0, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 }, + { idx_D9, 0, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 }, + { idx_DA, 0, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 }, + { idx_DB, 0, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 }, + { idx_DC, 0, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 }, + { idx_DD, 0, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 }, + { idx_DE, 0, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 }, + { idx_DF, 0, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 }, +/* 0xe0 */ + { 0, INS_BRANCHCC, 0, ADDRMETH_J | OPTYPE_b | OP_X, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "loopnz", "", 0, 0, 0, INS_TEST_NZERO, 31 }, + { 0, INS_BRANCHCC, 0, ADDRMETH_J | OPTYPE_b | OP_X, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "loopz", "", 0, 0, 0, INS_TEST_ZERO, 31 }, + { 0, INS_BRANCHCC, 0, ADDRMETH_J | OPTYPE_b | OP_X, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "loop", "", 0, 0, 0, 0 , 31 }, + { 0, INS_BRANCHCC, 0, ADDRMETH_J | OPTYPE_b | OP_X, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "jcxz", "", 0, 0, 0, 0 , 31 }, + { 0, INS_IN, 0, ADDRMETH_RR | OPTYPE_b | OP_W, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "in", "", 0, 0, 0, 0 , 0 }, + { 0, INS_IN, 0, ADDRMETH_RR | OPTYPE_b | OP_W, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "in", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OUT, 0, ADDRMETH_I | OPTYPE_b | OP_R, ADDRMETH_RR | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "out", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OUT, 0, ADDRMETH_I | OPTYPE_b | OP_R, ADDRMETH_RR | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "out", "", 0, 0, 0, 0 , 0 }, + { 0, INS_CALL, 0, ADDRMETH_J | OPTYPE_v | OP_X | OP_SIGNED, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "call", "", 0, 0, 0, 0 , 3 }, + { 0, INS_BRANCH, 0, ADDRMETH_J | OPTYPE_v | OP_X | OP_SIGNED, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "jmp", "", 0, 0, 0, 0 , 0 }, + { 0, INS_BRANCH, 0, ADDRMETH_A | OPTYPE_p | OP_X, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "jmp", "", 0, 0, 0, 0 , 0 }, + { 0, INS_BRANCH, 0, ADDRMETH_J | OPTYPE_b | OP_X | OP_SIGNED, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "jmp", "", 0, 0, 0, 0 , 0 }, + { 0, INS_IN, 0, ADDRMETH_RR | OPTYPE_b| OP_W, ADDRMETH_RR | OPTYPE_w| OP_R, ARG_NONE, cpu_80386 | isa_GP, "in", "", 0, 2, 0, 0 , 0 }, + { 0, INS_IN, 0, ADDRMETH_RR | OPTYPE_v | OP_W, ADDRMETH_RR | OPTYPE_w| OP_R, ARG_NONE, cpu_80386 | isa_GP, "in", "", 0, 2, 0, 0 , 0 }, + { 0, INS_OUT, 0, ADDRMETH_RR | OPTYPE_w| OP_R, ADDRMETH_RR | OPTYPE_b| OP_R, ARG_NONE, cpu_80386 | isa_GP, "out", "", 2, 0, 0, 0 , 0 }, + { 0, INS_OUT, 0, ADDRMETH_RR | OPTYPE_w| OP_R, ADDRMETH_RR | OPTYPE_v| OP_R, ARG_NONE, cpu_80386 | isa_GP, "out", "", 2, 0, 0, 0 , 0 }, +/* 0xf0 */ + { 0, INS_FLAG_PREFIX | PREFIX_LOCK, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_ICEBP, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "icebp", "", 0, 0, 0, 0 , 0 }, + { idx_F2, INS_FLAG_PREFIX | PREFIX_REPNZ, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 }, + { idx_F3, INS_FLAG_PREFIX | PREFIX_REPZ, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_HALT, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "hlt", "", 0, 0, 0, 0 , 0 }, + { 0, INS_TOGCF, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "cmc", "", 0, 0, 0, INS_SET_CARRY, 0 }, + { idx_F6, 0, 0, ADDRMETH_E | OPTYPE_b, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 }, + { idx_F7, 0, 0, ADDRMETH_E | OPTYPE_v, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_CLEARCF, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "clc", "", 0, 0, 0, INS_SET_NCARRY, 0 }, + { 0, INS_SETCF, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "stc", "", 0, 0, 0, INS_SET_CARRY, 0 }, + { 0, INS_SYSTEM, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "cli", "", 0, 0, 0, 0 , 0 }, + { 0, INS_SYSTEM, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "sti", "", 0, 0, 0, 0 , 0 }, + { 0, INS_CLEARDF, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "cld", "", 0, 0, 0, INS_SET_NDIR, 0 }, + { 0, INS_SETDF, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "std", "", 0, 0, 0, INS_SET_DIR, 0 }, + { idx_FE, 0, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 }, + { idx_FF, 0, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 } +}; + + +static ia32_insn_t tbl_66[] = { /* SIMD 66 one-byte Opcodes */ + { idx_660F, 0, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 } +}; + + +static ia32_insn_t tbl_F2[] = { /* SIMD F2 one-byte Opcodes */ + { idx_F20F, 0, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 } +}; + + +static ia32_insn_t tbl_F3[] = { /* SIMD F3 one-byte Opcodes */ + { idx_F30F, 0, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_SYSTEM, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "pause", "", 0, 0, 0, 0, 0 } +}; + + +static ia32_insn_t tbl_0F[] = { /* Two-byte Opcodes */ + { idx_0F00, 0, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 }, + { idx_0F01, 0, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_SYSTEM, 0, ADDRMETH_G | OPTYPE_v | OP_W, ADDRMETH_E | OPTYPE_w | OP_R, ARG_NONE, cpu_80386 | isa_GP, "lar", "", 0, 0, 0, 0 , 0 }, + { 0, INS_SYSTEM, 0, ADDRMETH_G | OPTYPE_v | OP_W, ADDRMETH_E | OPTYPE_w | OP_R, ARG_NONE, cpu_80386 | isa_GP, "lsl", "", 0, 0, 0, INS_SET_ZERO, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_SYSTEM, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "clts", "", 0, 0, 0, 0 , 6 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_SYSTEM, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80486 | isa_GP, "invd", "", 0, 0, 0, 0 , 0 }, + { 0, INS_SYSTEM, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80486 | isa_GP, "wbinvd", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_UNKNOWN, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTPRO | isa_GP, "ud2", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_SYSTEM, 0, ADDRMETH_M | OPTYPE_b | OP_R, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "prefetch", "", 0, 0, 0, 0, 0 }, + { 0, INS_SYSTEM, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "femms", "", 0, 0, 0, 0, 0 }, + { idx_0F0F, INS_FLAG_SUFFIX, 0, ADDRMETH_P | OPTYPE_pi | OP_R | OP_W, ADDRMETH_Q | OPTYPE_q |OP_R, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_MOV, 0, ADDRMETH_V | OPTYPE_ps | OP_W, ADDRMETH_W | OPTYPE_ps | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "movups", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_W | OPTYPE_ps | OP_W, ADDRMETH_V | OPTYPE_ps | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "movups", "", 0, 0, 0, 0 , 0 }, + { idx_0F12, 0, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, INS_NOTE_NOSUFFIX, ADDRMETH_V | OPTYPE_q | OP_W, ADDRMETH_W | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "movlps", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_V | OPTYPE_ps | OP_W, ADDRMETH_W | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "unpcklps", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_V | OPTYPE_ps | OP_W, ADDRMETH_W | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "unpckhps", "", 0, 0, 0, 0 , 0 }, + { idx_0F16, 0, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_W | OPTYPE_q | OP_W, ADDRMETH_V | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "movhps", "", 0, 0, 0, 0 , 0 }, + { idx_0F18, 0, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_R | OPTYPE_d | OP_W, ADDRMETH_C | OPTYPE_d | OP_R, ARG_NONE, cpu_80386 | isa_GP, "mov", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_R | OPTYPE_d | OP_W, ADDRMETH_D | OPTYPE_d | OP_R, ARG_NONE, cpu_80386 | isa_GP, "mov", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_C | OPTYPE_d | OP_W, ADDRMETH_R | OPTYPE_d | OP_R, ARG_NONE, cpu_80386 | isa_GP, "mov", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_D | OPTYPE_d | OP_W, ADDRMETH_R | OPTYPE_d | OP_R, ARG_NONE, cpu_80386 | isa_GP, "mov", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_R | OPTYPE_d | OP_W, ADDRMETH_T | OPTYPE_d | OP_R, ARG_NONE, cpu_80386 | isa_GP, "mov", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_T | OPTYPE_d | OP_W, ADDRMETH_R | OPTYPE_d | OP_R, ARG_NONE, cpu_80386 | isa_GP, "mov", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_V | OPTYPE_ps | OP_W, ADDRMETH_W | OPTYPE_ps | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "movaps", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_W | OPTYPE_ps | OP_W, ADDRMETH_V | OPTYPE_ps | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "movaps", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_V | OPTYPE_ps | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "cvtpi2ps", "", 0, 0, 0, 0, 0 }, + { 0, INS_MOV, 0, ADDRMETH_W | OPTYPE_ps | OP_W, ADDRMETH_V | OPTYPE_ps | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "movntps", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_W | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "cvttps2pi", "", 0, 0, 0, 0, 0 }, + { 0, INS_MOV, 0, ADDRMETH_P | OPTYPE_q | OP_W , ADDRMETH_W | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "cvtps2pi", "", 0, 0, 0, 0, 0 }, + { 0, INS_OTHER, 0, ADDRMETH_V | OPTYPE_ss | OP_W, ADDRMETH_W | OPTYPE_ss | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "ucomiss", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_OTHER, 0, ADDRMETH_V | OPTYPE_ps | OP_W, ADDRMETH_W | OPTYPE_ss | OP_W, ARG_NONE, cpu_PENTIUM2 | isa_GP, "comiss", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_SYSTEM, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM | isa_GP, "wrmsr", "", 0, 0, 0, 0 , 52 }, + { 0, INS_SYSTEM, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM | isa_GP, "rdtsc", "", 0, 0, 0, 0 , 40 }, + { 0, INS_SYSTEM, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM | isa_GP, "rdmsr", "", 0, 0, 0, 0 , 38 }, + { 0, INS_SYSTEM, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTPRO | isa_GP, "rdpmc", "", 0, 0, 0, 0 , 39 }, + { 0, INS_SYSTEM, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM2 | isa_GP, "sysenter", "", 0, 0, 0, 0 , 50 }, + { 0, INS_SYSTEM, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM2 | isa_GP, "sysexit", "", 0, 0, 0, 0 , 51 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOVCC, 0, ADDRMETH_G | OPTYPE_v | OP_W, ADDRMETH_E | OPTYPE_v | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "cmovo", "", 0, 0, 0, INS_TEST_OFLOW, 0 }, + { 0, INS_MOVCC, 0, ADDRMETH_G | OPTYPE_v | OP_W, ADDRMETH_E | OPTYPE_v | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "cmovno", "", 0, 0, 0, INS_TEST_NOFLOW, 0 }, + { 0, INS_MOVCC, 0, ADDRMETH_G | OPTYPE_v | OP_W, ADDRMETH_E | OPTYPE_v | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "cmovc", "", 0, 0, 0, INS_TEST_CARRY, 0 }, + { 0, INS_MOVCC, 0, ADDRMETH_G | OPTYPE_v | OP_W, ADDRMETH_E | OPTYPE_v | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "cmovnc", "", 0, 0, 0, INS_TEST_NCARRY, 0 }, + { 0, INS_MOVCC, 0, ADDRMETH_G | OPTYPE_v | OP_W, ADDRMETH_E | OPTYPE_v | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "cmovz", "", 0, 0, 0, INS_TEST_ZERO, 0 }, + { 0, INS_MOVCC, 0, ADDRMETH_G | OPTYPE_v | OP_W, ADDRMETH_E | OPTYPE_v | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "cmovnz", "", 0, 0, 0, INS_TEST_NZERO, 0 }, + { 0, INS_MOVCC, 0, ADDRMETH_G | OPTYPE_v | OP_W, ADDRMETH_E | OPTYPE_v | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "cmovbe", "", 0, 0, 0, INS_TEST_CARRY|INS_TEST_OR|INS_TEST_ZERO, 0 }, + { 0, INS_MOVCC, 0, ADDRMETH_G | OPTYPE_v | OP_W, ADDRMETH_E | OPTYPE_v | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "cmova", "", 0, 0, 0, INS_TEST_NZERO|INS_TEST_NCARRY, 0 }, + { 0, INS_MOVCC, 0, ADDRMETH_G | OPTYPE_v | OP_W, ADDRMETH_E | OPTYPE_v | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "cmovs", "", 0, 0, 0, INS_TEST_SIGN, 0 }, + { 0, INS_MOVCC, 0, ADDRMETH_G | OPTYPE_v | OP_W, ADDRMETH_E | OPTYPE_v | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "cmovns", "", 0, 0, 0, INS_TEST_NSIGN, 0 }, + { 0, INS_MOVCC, 0, ADDRMETH_G | OPTYPE_v | OP_W, ADDRMETH_E | OPTYPE_v | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "cmovp", "", 0, 0, 0, INS_TEST_PARITY, 0 }, + { 0, INS_MOVCC, 0, ADDRMETH_G | OPTYPE_v | OP_W, ADDRMETH_E | OPTYPE_v | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "cmovnp", "", 0, 0, 0, INS_TEST_NPARITY, 0 }, + { 0, INS_MOVCC, 0, ADDRMETH_G | OPTYPE_v | OP_W, ADDRMETH_E | OPTYPE_v | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "cmovl", "", 0, 0, 0, INS_TEST_SFNEOF, 0 }, + { 0, INS_MOVCC, 0, ADDRMETH_G | OPTYPE_v | OP_W, ADDRMETH_E | OPTYPE_v | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "cmovge", "", 0, 0, 0, INS_TEST_SFEQOF, 0 }, + { 0, INS_MOVCC, 0, ADDRMETH_G | OPTYPE_v | OP_W, ADDRMETH_E | OPTYPE_v | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "cmovle", "", 0, 0, 0, INS_TEST_ZERO|INS_TEST_OR|INS_TEST_SFNEOF, 0 }, + { 0, INS_MOVCC, 0, ADDRMETH_G | OPTYPE_v | OP_W, ADDRMETH_E | OPTYPE_v | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "cmovg", "", 0, 0, 0, INS_TEST_NZERO|INS_TEST_SFEQOF, 0 }, + { 0, INS_MOV, 0, ADDRMETH_G | OPTYPE_d | OP_W, ADDRMETH_W | OPTYPE_ps | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "movmskps", "", 0, 0, 0, 0 , 0 }, + { 0, INS_ARITH, 0, ADDRMETH_V | OPTYPE_ps | OP_W, ADDRMETH_W | OPTYPE_ps | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "sqrtps", "", 0, 0, 0, 0 , 0 }, + { 0, INS_ARITH, 0, ADDRMETH_V | OPTYPE_ps | OP_W, ADDRMETH_W | OPTYPE_ps | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "rsqrtps", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_V | OPTYPE_ps | OP_W, ADDRMETH_W | OPTYPE_ps | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "rcpps", "", 0, 0, 0, 0 , 0 }, + { 0, INS_AND, 0, ADDRMETH_V | OPTYPE_ps | OP_W, ADDRMETH_W | OPTYPE_ps | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "andps", "", 0, 0, 0, 0 , 0 }, + { 0, INS_AND, 0, ADDRMETH_V | OPTYPE_ps | OP_W, ADDRMETH_W | OPTYPE_ps | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "andnps", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OR, 0, ADDRMETH_V | OPTYPE_ps | OP_W, ADDRMETH_W | OPTYPE_ps | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "orps", "", 0, 0, 0, 0 , 0 }, + { 0, INS_XOR, 0, ADDRMETH_V | OPTYPE_ps | OP_W, ADDRMETH_W | OPTYPE_ps | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "xorps", "", 0, 0, 0, 0 , 0 }, + { 0, INS_ADD, 0, ADDRMETH_V | OPTYPE_ps | OP_W, ADDRMETH_W | OPTYPE_ps | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "addps", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MUL, 0, ADDRMETH_V | OPTYPE_ps | OP_R, ADDRMETH_W | OPTYPE_ps | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "mulps", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_V | OPTYPE_pd, ADDRMETH_W | OPTYPE_q, ARG_NONE, cpu_PENTIUM4 | isa_GP, "cvtps2pd", "", 0, 0, 0, 0, 0 }, + { 0, INS_MOV, 0, ADDRMETH_V | OPTYPE_ps | OP_W, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "cvtdq2ps", "", 0, 0, 0, 0, 0 }, + { 0, INS_SUB, 0, ADDRMETH_V | OPTYPE_ps | OP_W, ADDRMETH_W | OPTYPE_ps | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "subps", "", 0, 0, 0, 0 , 0 }, + { 0, INS_ARITH, 0, ADDRMETH_V | OPTYPE_ps | OP_W, ADDRMETH_W | OPTYPE_ps | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "minps", "", 0, 0, 0, 0 , 0 }, + { 0, INS_DIV, 0, ADDRMETH_V | OPTYPE_ps | OP_W, ADDRMETH_W | OPTYPE_ps | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "divps", "", 0, 0, 0, 0 , 0 }, + { 0, INS_ARITH, 0, ADDRMETH_V | OPTYPE_ps | OP_W, ADDRMETH_W | OPTYPE_ps | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "maxps", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_d | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "punpcklbw", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_d | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "punpcklwd", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_d | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "punpckldq", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_d | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "packsswb", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_d | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "pcmpgtb", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_d | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "pcmpgtw", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_d | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "pcmpgtd", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_d | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "packuswb", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_d | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "punpckhbw", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_d | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "punpckhwd", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_d | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "punpckhdq", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_d | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "packssdw", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_P | OPTYPE_d | OP_W, ADDRMETH_E | OPTYPE_d | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "movd", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "movq", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, cpu_PENTIUM2 | isa_GP, "pshufw", "", 0, 0, 0, 0, 0 }, + { idx_0F71, 0, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM | isa_MMX, "", "", 0, 0, 0, 0 , 0 }, + { idx_0F72, 0, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM | isa_MMX, "", "", 0, 0, 0, 0 , 0 }, + { idx_0F73, 0, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM | isa_MMX, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "pcmpeqb", "", 0, 0, 0, 0 , 0 }, + { 0, INS_CMP, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "pcmpeqw", "", 0, 0, 0, 0 , 0 }, + { 0, INS_CMP, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "pcmpeqd", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM | isa_MMX, "emms", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_E | OPTYPE_d | OP_W, ADDRMETH_P | OPTYPE_d | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "movd", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_Q | OPTYPE_q | OP_W, ADDRMETH_P | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "movq", "", 0, 0, 0, 0 , 0 }, + { 0, INS_BRANCHCC, 0, ADDRMETH_J | OPTYPE_v | OP_X | OP_SIGNED, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "jo", "", 0, 0, 0, INS_TEST_OFLOW, 0 }, + { 0, INS_BRANCHCC, 0, ADDRMETH_J | OPTYPE_v | OP_X | OP_SIGNED, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "jno", "", 0, 0, 0, INS_TEST_NOFLOW, 0 }, + { 0, INS_BRANCHCC, 0, ADDRMETH_J | OPTYPE_v | OP_X | OP_SIGNED, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "jc", "", 0, 0, 0, INS_TEST_CARRY, 0 }, + { 0, INS_BRANCHCC, 0, ADDRMETH_J | OPTYPE_v | OP_X | OP_SIGNED, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "jnc", "", 0, 0, 0, INS_TEST_NCARRY, 0 }, + { 0, INS_BRANCHCC, 0, ADDRMETH_J | OPTYPE_v | OP_X | OP_SIGNED, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "jz", "", 0, 0, 0, INS_TEST_ZERO, 0 }, + { 0, INS_BRANCHCC, 0, ADDRMETH_J | OPTYPE_v | OP_X | OP_SIGNED, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "jnz", "", 0, 0, 0, INS_TEST_NZERO, 0 }, + { 0, INS_BRANCHCC, 0, ADDRMETH_J | OPTYPE_v | OP_X | OP_SIGNED, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "jbe", "", 0, 0, 0, INS_TEST_CARRY|INS_TEST_OR|INS_TEST_ZERO, 0 }, + { 0, INS_BRANCHCC, 0, ADDRMETH_J | OPTYPE_v | OP_X | OP_SIGNED, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "ja", "", 0, 0, 0, INS_TEST_NCARRY|INS_TEST_NZERO, 0 }, + { 0, INS_BRANCHCC, 0, ADDRMETH_J | OPTYPE_v | OP_X | OP_SIGNED, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "js", "", 0, 0, 0, INS_TEST_SIGN, 0 }, + { 0, INS_BRANCHCC, 0, ADDRMETH_J | OPTYPE_v | OP_X | OP_SIGNED, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "jns", "", 0, 0, 0, INS_TEST_NSIGN, 0 }, + { 0, INS_BRANCHCC, 0, ADDRMETH_J | OPTYPE_v | OP_X | OP_SIGNED, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "jpe", "", 0, 0, 0, INS_TEST_PARITY, 0 }, + { 0, INS_BRANCHCC, 0, ADDRMETH_J | OPTYPE_v | OP_X | OP_SIGNED, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "jpo", "", 0, 0, 0, INS_TEST_NPARITY, 0 }, + { 0, INS_BRANCHCC, 0, ADDRMETH_J | OPTYPE_v | OP_X | OP_SIGNED, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "jl", "", 0, 0, 0, INS_TEST_SFNEOF, 0 }, + { 0, INS_BRANCHCC, 0, ADDRMETH_J | OPTYPE_v | OP_X | OP_SIGNED, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "jge", "", 0, 0, 0, INS_TEST_SFEQOF, 0 }, + { 0, INS_BRANCHCC, 0, ADDRMETH_J | OPTYPE_v | OP_X | OP_SIGNED, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "jle", "", 0, 0, 0, INS_TEST_ZERO|INS_TEST_OR|INS_TEST_SFNEOF, 0 }, + { 0, INS_BRANCHCC, 0, ADDRMETH_J | OPTYPE_v | OP_X | OP_SIGNED, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "jg", "", 0, 0, 0, INS_TEST_NZERO|INS_TEST_SFEQOF, 0 }, + { 0, INS_MOVCC, 0, ADDRMETH_E | OPTYPE_b | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "seto", "", 0, 0, 0, INS_TEST_OFLOW, 0 }, + { 0, INS_MOVCC, 0, ADDRMETH_E | OPTYPE_b | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "setno", "", 0, 0, 0, INS_TEST_OFLOW, 0 }, + { 0, INS_MOVCC, 0, ADDRMETH_E | OPTYPE_b | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "setc", "", 0, 0, 0, INS_TEST_CARRY, 0 }, + { 0, INS_MOVCC, 0, ADDRMETH_E | OPTYPE_b | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "setnc", "", 0, 0, 0, INS_TEST_NCARRY, 0 }, + { 0, INS_MOVCC, 0, ADDRMETH_E | OPTYPE_b | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "setz", "", 0, 0, 0, INS_TEST_ZERO, 0 }, + { 0, INS_MOVCC, 0, ADDRMETH_E | OPTYPE_b | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "setnz", "", 0, 0, 0, INS_TEST_NZERO, 0 }, + { 0, INS_MOVCC, 0, ADDRMETH_E | OPTYPE_b | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "setbe", "", 0, 0, 0, INS_TEST_CARRY|INS_TEST_OR|INS_TEST_ZERO, 0 }, + { 0, INS_MOVCC, 0, ADDRMETH_E | OPTYPE_b | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "seta", "", 0, 0, 0, INS_TEST_NCARRY|INS_TEST_NZERO, 0 }, + { 0, INS_MOVCC, 0, ADDRMETH_E | OPTYPE_b | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "sets", "", 0, 0, 0, INS_TEST_SIGN, 0 }, + { 0, INS_MOVCC, 0, ADDRMETH_E | OPTYPE_b | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "setns", "", 0, 0, 0, INS_TEST_NSIGN, 0 }, + { 0, INS_MOVCC, 0, ADDRMETH_E | OPTYPE_b | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "setpe", "", 0, 0, 0, INS_TEST_PARITY, 0 }, + { 0, INS_MOVCC, 0, ADDRMETH_E | OPTYPE_b | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "setpo", "", 0, 0, 0, INS_TEST_NPARITY, 0 }, + { 0, INS_MOVCC, 0, ADDRMETH_E | OPTYPE_b | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "setl", "", 0, 0, 0, INS_TEST_SFNEOF, 0 }, + { 0, INS_MOVCC, 0, ADDRMETH_E | OPTYPE_b | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "setge", "", 0, 0, 0, INS_TEST_SFEQOF, 0 }, + { 0, INS_MOVCC, 0, ADDRMETH_E | OPTYPE_b | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "setle", "", 0, 0, 0, INS_TEST_ZERO|INS_TEST_OR|INS_TEST_SFNEOF, 0 }, + { 0, INS_MOVCC, 0, ADDRMETH_E | OPTYPE_b | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "setg", "", 0, 0, 0, INS_TEST_NZERO|INS_TEST_SFEQOF, 0 }, + { 0, INS_PUSH, 0, ADDRMETH_RS | OPTYPE_w | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "push", "", 4, 0, 0, 0 , 33 }, + { 0, INS_POP, 0, ADDRMETH_RS | OPTYPE_w | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "pop", "", 4, 0, 0, 0 , 33 }, + { 0, INS_CPUID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM | isa_GP, "cpuid", "", 0, 0, 0, 0 , 10 }, + { 0, INS_BITTEST, 0, ADDRMETH_E | OPTYPE_v | OP_R, ADDRMETH_G | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "bt", "", 0, 0, 0, INS_SET_CARRY, 0 }, + { 0, INS_SHL, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_G | OPTYPE_v | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, cpu_80386 | isa_GP, "shld", "", 0, 0, 0, INS_SET_SIGN|INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 0 }, + //{ 0, INS_SHL, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_G | OPTYPE_v | OP_R, ADDRMETH_I | OP_R | OPTYPE_b | ADDRMETH_RR, cpu_80386 | isa_GP, "shld", "", 0, 0, 1, INS_SET_SIGN|INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 0 }, + { 0, INS_SHL, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_G | OPTYPE_v | OP_R, ADDRMETH_RR | OP_R | OPTYPE_b, cpu_80386 | isa_GP, "shld", "", 0, 0, 1, INS_SET_SIGN|INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_PUSH, 0, ADDRMETH_RS | OPTYPE_w | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "push", "", 5, 0, 0, 0 , 33 }, + { 0, INS_POP, 0, ADDRMETH_RS | OPTYPE_w | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "pop", "", 5, 0, 0, 0 , 33 }, + { 0, INS_SYSTEM, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "rsm", "", 0, 0, 0, INS_SET_ALL|INS_SET_DIR, 42 }, + { 0, INS_BITTEST, 0, ADDRMETH_E | OPTYPE_v | OP_R, ADDRMETH_G | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "bts", "", 0, 0, 0, INS_SET_CARRY, 0 }, + { 0, INS_SHR, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_G | OPTYPE_v | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, cpu_80386 | isa_GP, "shrd", "", 0, 0, 0, INS_SET_SIGN|INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 0 }, + { 0, INS_SHR, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_G | OPTYPE_v | OP_R, ADDRMETH_RR | OP_R | OPTYPE_b , cpu_80386 | isa_GP, "shrd", "", 0, 0, 1, INS_SET_SIGN|INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 0 }, + { idx_0FAE, 0, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM2 | isa_GP, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MUL, 0, ADDRMETH_G | OPTYPE_v | OP_SIGNED | OP_R | OP_W, ADDRMETH_E | OPTYPE_v | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "imul", "", 0, 0, 0, INS_SET_OFLOW|INS_SET_CARRY, }, + { 0, INS_XCHGCC, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_G | OPTYPE_b | OP_W, ARG_NONE, cpu_80486 | isa_GP, "cmpxchg", "", 0, 0, 0, INS_SET_ALL, 8 }, + { 0, INS_XCHGCC, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_G | OPTYPE_v | OP_W, ARG_NONE, cpu_80486 | isa_GP, "cmpxchg", "", 0, 0, 0, INS_SET_ALL, 7 }, + { 0, INS_MOV, 0, ADDRMETH_G | OPTYPE_v | OP_W, ADDRMETH_M | OPTYPE_p | OP_W, ARG_NONE, cpu_80386 | isa_GP, "lss", "", 0, 0, 0, 0 , 0 }, + { 0, INS_BITTEST, 0, ADDRMETH_E | OPTYPE_v | OP_R, ADDRMETH_G | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "btr", "", 0, 0, 0, INS_SET_CARRY, 0 }, + { 0, INS_MOV, 0, ADDRMETH_G | OPTYPE_v | OP_W, ADDRMETH_M | OPTYPE_p | OP_W, ARG_NONE, cpu_80386 | isa_GP, "lfs", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_G | OPTYPE_v | OP_W, ADDRMETH_M | OPTYPE_p | OP_W, ARG_NONE, cpu_80386 | isa_GP, "lgs", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_G | OPTYPE_v | OP_W, ADDRMETH_E | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "movzx", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_G | OPTYPE_v | OP_W, ADDRMETH_E | OPTYPE_w | OP_R, ARG_NONE, cpu_80386 | isa_GP, "movzx", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_UNKNOWN, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "ud1", "", 0, 0, 0, 0 , 0 }, + { idx_0FBA, 0, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_BITTEST, 0, ADDRMETH_E | OPTYPE_v | OP_R, ADDRMETH_G | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "btc", "", 0, 0, 0, INS_SET_CARRY, 0 }, + { 0, INS_BITTEST, 0, ADDRMETH_G | OPTYPE_v | OP_R | OP_W, ADDRMETH_E | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "bsf", "", 0, 0, 0, INS_SET_ZERO, 0 }, + { 0, INS_BITTEST, 0, ADDRMETH_G | OPTYPE_v | OP_R | OP_W, ADDRMETH_E | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "bsr", "", 0, 0, 0, INS_SET_ZERO, 0 }, + { 0, INS_MOV, 0, ADDRMETH_G | OPTYPE_v | OP_W, ADDRMETH_E | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "movsx", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_G | OPTYPE_v | OP_W, ADDRMETH_E | OPTYPE_w | OP_R, ARG_NONE, cpu_80386 | isa_GP, "movsx", "", 0, 0, 0, 0 , 0 }, + { 0, INS_ADD, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_G | OPTYPE_b | OP_W, ARG_NONE, cpu_80486 | isa_GP, "xadd", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_ADD, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_G | OPTYPE_v | OP_W, ARG_NONE, cpu_80486 | isa_GP, "xadd", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_CMP, 0, ADDRMETH_V | OPTYPE_ps | OP_R, ADDRMETH_W | OPTYPE_ps | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, cpu_PENTIUM4 | isa_GP, "cmpps", "", 0, 0, 0, 0, 0 }, + { 0, INS_MOV, 0, ADDRMETH_M | OPTYPE_d | OP_W, ADDRMETH_G | OPTYPE_d | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "movnti", "", 0, 0, 0, 0, 0 }, + { 0, INS_OTHER, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_E | OPTYPE_d | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, cpu_PENTIUM2 | isa_GP, "pinsrw", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_G | OPTYPE_d | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, cpu_PENTIUM2 | isa_GP, "pextrw", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_V | OPTYPE_ps | OP_W, ADDRMETH_W | OPTYPE_ps | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, cpu_PENTIUM2 | isa_GP, "shufps", "", 0, 0, 0, 0 , 0 }, + { 0, INS_XCHGCC, 0, ADDRMETH_M | OPTYPE_q | OP_R | OP_W, ARG_NONE, ARG_NONE, cpu_PENTIUM | isa_GP, "cmpxchg8b", "", 0, 0, 0, 0, 9 }, + { 0, INS_XCHG, 0, ADDRMETH_RR | OPTYPE_d | OP_W | OP_R, ARG_NONE, ARG_NONE, cpu_80486 | isa_GP, "bswap", "", 0, 0, 0, 0 , 0 }, + { 0, INS_XCHG, 0, ADDRMETH_RR | OPTYPE_d | OP_W | OP_R, ARG_NONE, ARG_NONE, cpu_80486 | isa_GP, "bswap", "", 1, 0, 0, 0 , 0 }, + { 0, INS_XCHG, 0, ADDRMETH_RR | OPTYPE_d | OP_W | OP_R, ARG_NONE, ARG_NONE, cpu_80486 | isa_GP, "bswap", "", 2, 0, 0, 0 , 0 }, + { 0, INS_XCHG, 0, ADDRMETH_RR | OPTYPE_d | OP_W | OP_R, ARG_NONE, ARG_NONE, cpu_80486 | isa_GP, "bswap", "", 3, 0, 0, 0 , 0 }, + { 0, INS_XCHG, 0, ADDRMETH_RR | OPTYPE_d | OP_W | OP_R, ARG_NONE, ARG_NONE, cpu_80486 | isa_GP, "bswap", "", 4, 0, 0, 0 , 0 }, + { 0, INS_XCHG, 0, ADDRMETH_RR | OPTYPE_d | OP_W | OP_R, ARG_NONE, ARG_NONE, cpu_80486 | isa_GP, "bswap", "", 5, 0, 0, 0 , 0 }, + { 0, INS_XCHG, 0, ADDRMETH_RR | OPTYPE_d | OP_W | OP_R, ARG_NONE, ARG_NONE, cpu_80486 | isa_GP, "bswap", "", 6, 0, 0, 0 , 0 }, + { 0, INS_XCHG, 0, ADDRMETH_RR | OPTYPE_d | OP_W | OP_R, ARG_NONE, ARG_NONE, cpu_80486 | isa_GP, "bswap", "", 7, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "psrlw", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "psrld", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "psrlq", "", 0, 0, 0, 0 , 0 }, + { 0, INS_ADD, 0, ADDRMETH_P | OPTYPE_q | OP_R | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "paddq", "", 0, 0, 0, 0, 0 }, + { 0, INS_OTHER, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "pmullw", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_G | OPTYPE_d | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "pmovmskb", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "psubusb", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "psubusw", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "pminub", "", 0, 0, 0, 0 , 0 }, + { 0, INS_AND, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "pand", "", 0, 0, 0, 0 , 0 }, + { 0, INS_ADD, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "paddusb", "", 0, 0, 0, 0 , 0 }, + { 0, INS_ADD, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "paddusw", "", 0, 0, 0, 0 , 0 }, + { 0, INS_ARITH, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "pmaxub", "", 0, 0, 0, 0 , 0 }, + { 0, INS_AND, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "pandn", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "pavgb", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "psraw", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "psrad", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "pavgw", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MUL, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "pmulhuw", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MUL, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "pmulhw", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, INS_NOTE_NOSUFFIX, ADDRMETH_W | OPTYPE_q | OP_W, ADDRMETH_V | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "movntq", "", 0, 0, 0, 0 , 0 }, + { 0, INS_SUB, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "psubsb", "", 0, 0, 0, 0 , 0 }, + { 0, INS_SUB, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "psubsw", "", 0, 0, 0, 0 , 0 }, + { 0, INS_ARITH, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "pminsw", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OR, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "por", "", 0, 0, 0, 0 , 0 }, + { 0, INS_ADD, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "paddsb", "", 0, 0, 0, 0 , 0 }, + { 0, INS_ADD, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "paddsw", "", 0, 0, 0, 0 , 0 }, + { 0, INS_ARITH, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "pmaxsw", "", 0, 0, 0, 0 , 0 }, + { 0, INS_XOR, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "pxor", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "psllw", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "pslld", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "psllq", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MUL, 0, ADDRMETH_P | OPTYPE_q | OP_R | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "pmuludq", "", 0, 0, 0, 0, 0 }, + { 0, INS_ADD, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "pmaddwd", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "psadbw", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_P | OPTYPE_pi | OP_W, ADDRMETH_Q | OPTYPE_pi | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "maskmovq", "", 0, 0, 0, 0 , 0 }, + { 0, INS_SUB, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "psubb", "", 0, 0, 0, 0 , 0 }, + { 0, INS_SUB, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "psubw", "", 0, 0, 0, 0 , 0 }, + { 0, INS_SUB, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "psubd", "", 0, 0, 0, 0 , 0 }, + { 0, INS_SUB, 0, ADDRMETH_P | OPTYPE_q | OP_R | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "psubq", "", 0, 0, 0, 0, 0 }, + { 0, INS_ADD, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "paddb", "", 0, 0, 0, 0 , 0 }, + { 0, INS_ADD, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "paddw", "", 0, 0, 0, 0 , 0 }, + { 0, INS_ADD, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "paddd", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 } +}; + + +static ia32_insn_t tbl_660F[] = { /* SIMD 66 Two-byte Opcodes */ + { 0, INS_MOV, 0, ADDRMETH_V | OPTYPE_pd | OP_R, ADDRMETH_W | OPTYPE_pd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "movupd", "", 0, 0, 0, 0, 0 }, + { 0, INS_MOV, 0, ADDRMETH_W | OPTYPE_pd | OP_R, ADDRMETH_V | OPTYPE_pd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "movupd", "", 0, 0, 0, 0, 0 }, + { 0, INS_MOV, INS_NOTE_NOSUFFIX, ADDRMETH_V | OPTYPE_q | OP_R, ADDRMETH_M | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "movlpd", "", 0, 0, 0, 0, 0 }, + { 0, INS_MOV, INS_NOTE_NOSUFFIX, ADDRMETH_M | OPTYPE_q | OP_R, ADDRMETH_V | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "movlpd", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_pd | OP_R, ADDRMETH_W | OPTYPE_pd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "unpcklpd", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_pd | OP_R, ADDRMETH_W | OPTYPE_pd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "unpckhpd", "", 0, 0, 0, 0, 0 }, + { 0, INS_MOV, INS_NOTE_NOSUFFIX, ADDRMETH_V | OPTYPE_q | OP_R, ADDRMETH_M | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "movhpd", "", 0, 0, 0, 0, 0 }, + { 0, INS_MOV, INS_NOTE_NOSUFFIX, ADDRMETH_M | OPTYPE_q | OP_R, ADDRMETH_V | OPTYPE_pd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "movhpd", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_MOV, 0, ADDRMETH_V | OPTYPE_pd | OP_R, ADDRMETH_W | OPTYPE_pd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "movapd", "", 0, 0, 0, 0, 0 }, + { 0, INS_MOV, 0, ADDRMETH_W | OPTYPE_pd | OP_R, ADDRMETH_V | OPTYPE_pd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "movapd", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_pd | OP_R, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "cvtpi2pd", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_M | OPTYPE_pd | OP_R, ADDRMETH_V | OPTYPE_pd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "movntpd", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_P | OPTYPE_q | OP_R, ADDRMETH_W | OPTYPE_pd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "cvttpd2pi", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_P | OPTYPE_q | OP_R, ADDRMETH_W | OPTYPE_pd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "cvtpd2pi", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_sd | OP_R, ADDRMETH_W | OPTYPE_sd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "ucomisd", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_sd | OP_R, ADDRMETH_W | OPTYPE_sd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "comisd", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_G | OPTYPE_d | OP_R, ADDRMETH_W | OPTYPE_pd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "movmskpd", "", 0, 0, 0, 0, 0 }, + { 0, INS_FSQRT, 0, ADDRMETH_V | OPTYPE_pd | OP_R, ADDRMETH_W | OPTYPE_pd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "sqrtpd", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_AND, 0, ADDRMETH_V | OPTYPE_pd | OP_R, ADDRMETH_W | OPTYPE_pd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "andpd", "", 0, 0, 0, 0, 0 }, + { 0, INS_AND, 0, ADDRMETH_V | OPTYPE_pd | OP_R, ADDRMETH_W | OPTYPE_pd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "andnpd", "", 0, 0, 0, 0, 0 }, + { 0, INS_OR, 0, ADDRMETH_V | OPTYPE_pd | OP_R, ADDRMETH_W | OPTYPE_pd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "orpd", "", 0, 0, 0, 0, 0 }, + { 0, INS_XOR, 0, ADDRMETH_V | OPTYPE_pd | OP_R, ADDRMETH_W | OPTYPE_pd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "xorpd", "", 0, 0, 0, 0, 0 }, + { 0, INS_ADD, 0, ADDRMETH_V | OPTYPE_pd | OP_R, ADDRMETH_W | OPTYPE_pd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "addpd", "", 0, 0, 0, 0, 0 }, + { 0, INS_MUL, 0, ADDRMETH_V | OPTYPE_pd | OP_R, ADDRMETH_W | OPTYPE_pd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "mulpd", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_ps | OP_R, ADDRMETH_W | OPTYPE_pd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "cvtpd2ps", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_ps | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "cvtps2dq", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_pd | OP_R, ADDRMETH_W | OPTYPE_pd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "subpd", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_pd | OP_R, ADDRMETH_W | OPTYPE_pd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "minpd", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_pd | OP_R, ADDRMETH_W | OPTYPE_pd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "divpd", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_pd | OP_R, ADDRMETH_W | OPTYPE_pd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "maxpd", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "punpcklbw", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "punpcklwd", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "punpckldq", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "packsswb", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "pcmpgtb", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "pcmpgtw", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "pcmpgtd", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "packuswb", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "punpckhbw", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "punpckhwd", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "punpckhdq", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "packssdw", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "punpcklqdq", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "punpckhqdq", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_d | OP_R, ADDRMETH_E | OPTYPE_d | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "movd", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "movdqa", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, cpu_PENTIUM4 | isa_GP, "pshufd", "", 0, 0, 0, 0, 0 }, + { idx_660F71, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { idx_660F72, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { idx_660F73, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "pcmpeqb", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "pcmpeqw", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "pcmpeqd", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_pd | OP_R, ADDRMETH_W | OPTYPE_pd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "haddpd", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_pd | OP_R, ADDRMETH_W | OPTYPE_pd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "hsubpd", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_E | OPTYPE_d | OP_R, ADDRMETH_V | OPTYPE_d | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "movd", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_W | OPTYPE_dq | OP_R, ADDRMETH_V | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "movdqa", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_pd | OP_R, ADDRMETH_W | OPTYPE_pd | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, cpu_PENTIUM4 | isa_GP, "cmppd", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_w | OP_R, ADDRMETH_E | OPTYPE_d | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, cpu_PENTIUM4 | isa_GP, "pinsrw", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_G | OPTYPE_d | OP_R, ADDRMETH_W | OPTYPE_w | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, cpu_PENTIUM4 | isa_GP, "pextrw", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_pd | OP_R, ADDRMETH_W | OPTYPE_pd | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, cpu_PENTIUM4 | isa_GP, "shufpd", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_pd | OP_R, ADDRMETH_W | OPTYPE_pd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "addsubpd", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "psrlw", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "psrld", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "psrlq", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "paddq", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "pmullw", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_W | OPTYPE_q | OP_R, ADDRMETH_V | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "movq", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_G | OPTYPE_d | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "pmovmskb", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "psubusb", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "psubusw", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "pminub", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "pand", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "paddusb", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "paddusw", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "pmaxub", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "pandn", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "pavgb", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "psraw", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "psrad", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "pavgw", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "pmulhuw", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "pmulhw", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_pd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "cvttpd2dq", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_M | OPTYPE_dq | OP_R, ADDRMETH_V | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "movntdq", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "psubsb", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "psubsw", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "pminsw", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "por", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "paddsb", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "paddsw", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "pmaxsw", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "pxor", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "psllw", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "pslld", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "psllq", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "pmuludq", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "pmaddwd", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "psadbw", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "maskmovdqu", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "psubb", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "psubw", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "psubd", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "psubq", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "paddb", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "paddw", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "paddd", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 } +}; + + +static ia32_insn_t tbl_F20F[] = { /* SIMD F2 Two-byte Opcodes */ + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_sd | OP_R, ADDRMETH_W | OPTYPE_sd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "movsd", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_W | OPTYPE_sd | OP_R, ADDRMETH_V | OPTYPE_sd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "movsd", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_q | OP_R, ADDRMETH_W | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "movddup", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_sd | OP_R, ADDRMETH_E | OPTYPE_d | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "cvtsi2sd", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_G | OPTYPE_d | OP_R, ADDRMETH_W | OPTYPE_sd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "cvttsd2si", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_G | OPTYPE_d | OP_R, ADDRMETH_W | OPTYPE_sd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "cvtsd2si", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_sd | OP_R, ADDRMETH_W | OPTYPE_sd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "sqrtsd", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_sd | OP_R, ADDRMETH_W | OPTYPE_sd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "addsd", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_sd | OP_R, ADDRMETH_W | OPTYPE_sd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "mulsd", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_ss | OP_R, ADDRMETH_W | OPTYPE_sd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "cvtsd2ss", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_sd | OP_R, ADDRMETH_W | OPTYPE_sd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "subsd", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_sd | OP_R, ADDRMETH_W | OPTYPE_sd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "minsd", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_sd | OP_R, ADDRMETH_W | OPTYPE_sd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "divsd", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_sd | OP_R, ADDRMETH_W | OPTYPE_sd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "maxsd", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, cpu_PENTIUM4 | isa_GP, "pshuflw", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_ps | OP_R, ADDRMETH_W | OPTYPE_ps | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "haddps", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_ps | OP_R, ADDRMETH_W | OPTYPE_ps | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "hsubps", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_sd | OP_R, ADDRMETH_W | OPTYPE_sd | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, cpu_PENTIUM4 | isa_GP, "cmpsd", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_ps | OP_R, ADDRMETH_W | OPTYPE_ps | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "addsubps", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_P | OPTYPE_q | OP_R, ADDRMETH_W | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "movdq2q", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_pd | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "cvtpd2dq", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_M | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "lddqu", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 } +}; + + +static ia32_insn_t tbl_F30F[] = { /* SIMD F3 Two-byte Opcodes */ + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_ss | OP_R, ADDRMETH_W | OPTYPE_ss | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "movss", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_W | OPTYPE_ss | OP_R, ADDRMETH_V | OPTYPE_ss | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "movss", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_ps | OP_R, ADDRMETH_W | OPTYPE_ps | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "movsldup", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_ps | OP_R, ADDRMETH_W | OPTYPE_ps | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "movshdup", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_ss | OP_R, ADDRMETH_E | OPTYPE_d | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "cvtsi2ss", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_G | OPTYPE_d | OP_R, ADDRMETH_W | OPTYPE_ss | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "cvttss2si", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_G | OPTYPE_d | OP_R, ADDRMETH_W | OPTYPE_ss | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "cvtss2si", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_ss | OP_R, ADDRMETH_W | OPTYPE_ss | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "sqrtss", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_ss | OP_R, ADDRMETH_W | OPTYPE_ss | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "rsqrtss", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_ss | OP_R, ADDRMETH_W | OPTYPE_ss | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "rcpss", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_ss | OP_R, ADDRMETH_W | OPTYPE_ss | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "addss", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_ss | OP_R, ADDRMETH_W | OPTYPE_ss | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "mulss", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_sd | OP_R, ADDRMETH_W | OPTYPE_ss | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "cvtss2sd", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_ps | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "cvttps2dq", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_ss | OP_R, ADDRMETH_W | OPTYPE_ss | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "subss", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_ss | OP_R, ADDRMETH_W | OPTYPE_ss | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "minss", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_ss | OP_R, ADDRMETH_W | OPTYPE_ss | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "divss", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_ss | OP_R, ADDRMETH_W | OPTYPE_ss | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "maxss", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "movdqu", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_W | OPTYPE_dq | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, cpu_PENTIUM4 | isa_GP, "pshufhw", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_q | OP_R, ADDRMETH_W | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "movq", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_W | OPTYPE_dq | OP_R, ADDRMETH_V | OPTYPE_dq | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "movdqu", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_ss | OP_R, ADDRMETH_W | OPTYPE_ss | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, cpu_PENTIUM4 | isa_GP, "cmpss", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_dq | OP_R, ADDRMETH_Q | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "movq2dq", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_UNKNOWN, 0, ADDRMETH_V | OPTYPE_pd | OP_R, ADDRMETH_W | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM4 | isa_GP, "cvtdq2pd", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "", "", 0, 0, 0, 0, 0 } +}; + + +static ia32_insn_t tbl_0F00[] = { /* Group 6 */ + { 0, INS_SYSTEM, 0, ADDRMETH_E | OPTYPE_w | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "sldt", "", 0, 0, 0, 0 , 46 }, + { 0, INS_SYSTEM, 0, ADDRMETH_E | OPTYPE_w | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "str", "", 0, 0, 0, 0 , 49 }, + { 0, INS_SYSTEM, 0, ADDRMETH_E | OPTYPE_w | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "lldt", "", 0, 0, 0, 0 , 29 }, + { 0, INS_SYSTEM, 0, ADDRMETH_E | OPTYPE_w | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "ltr", "", 0, 0, 0, 0 , 32 }, + { 0, INS_SYSTEM, 0, ADDRMETH_E | OPTYPE_w | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "verr", "", 0, 0, 0, INS_SET_ZERO, 0 }, + { 0, INS_SYSTEM, 0, ADDRMETH_E | OPTYPE_w | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "verw", "", 0, 0, 0, INS_SET_ZERO, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 } +}; + + +static ia32_insn_t tbl_0F01[] = { /* Group 7 */ + { 0, INS_SYSTEM, 0, ADDRMETH_M | OPTYPE_s | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "sgdt", "", 0, 0, 0, 0 , 44 }, + { 0, INS_SYSTEM, 0, ADDRMETH_M | OPTYPE_s | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "sidt", "", 0, 0, 0, 0 , 45 }, + { 0, INS_SYSTEM, 0, ADDRMETH_M | OPTYPE_s | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "lgdt", "", 0, 0, 0, 0 , 27 }, + { 0, INS_SYSTEM, 0, ADDRMETH_M | OPTYPE_s | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "lidt", "", 0, 0, 0, 0 , 28 }, + { 0, INS_SYSTEM, 0, ADDRMETH_E | OPTYPE_w | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "smsw", "", 0, 0, 0, 0 , 47 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_SYSTEM, 0, ADDRMETH_E | OPTYPE_w | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "lmsw", "", 0, 0, 0, 0 , 30 }, + { 0, INS_SYSTEM, 0, ADDRMETH_M | OPTYPE_none | OP_R, ARG_NONE, ARG_NONE, cpu_80486 | isa_GP, "invlpg", "", 0, 0, 0, 0 , 0 }, + { 0, INS_SYSTEM, 0, ADDRMETH_M | OPTYPE_s | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "sgdt", "", 0, 0, 0, 0 , 44 }, + { 0, INS_SYSTEM, 0, ADDRMETH_M | OPTYPE_s | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "sidt", "", 0, 0, 0, 0 , 45 }, + { 0, INS_SYSTEM, 0, ADDRMETH_M | OPTYPE_s | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "lgdt", "", 0, 0, 0, 0 , 27 }, + { 0, INS_SYSTEM, 0, ADDRMETH_M | OPTYPE_s | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "lidt", "", 0, 0, 0, 0 , 28 }, + { 0, INS_SYSTEM, 0, ADDRMETH_E | OPTYPE_w | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "smsw", "", 0, 0, 0, 0 , 47 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_SYSTEM, 0, ADDRMETH_E | OPTYPE_w | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "lmsw", "", 0, 0, 0, 0 , 30 }, + { 0, INS_SYSTEM, 0, ADDRMETH_M | OPTYPE_none | OP_R, ARG_NONE, ARG_NONE, cpu_80486 | isa_GP, "invlpg", "", 0, 0, 0, 0 , 0 }, + { 0, INS_SYSTEM, 0, ADDRMETH_M | OPTYPE_s | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "sgdt", "", 0, 0, 0, 0 , 44 }, + { 0, INS_SYSTEM, 0, ADDRMETH_M | OPTYPE_s | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "sidt", "", 0, 0, 0, 0 , 45 }, + { 0, INS_SYSTEM, 0, ADDRMETH_M | OPTYPE_s | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "lgdt", "", 0, 0, 0, 0 , 27 }, + { 0, INS_SYSTEM, 0, ADDRMETH_M | OPTYPE_s | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "lidt", "", 0, 0, 0, 0 , 28 }, + { 0, INS_SYSTEM, 0, ADDRMETH_E | OPTYPE_w | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "smsw", "", 0, 0, 0, 0 , 47 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_SYSTEM, 0, ADDRMETH_E | OPTYPE_w | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "lmsw", "", 0, 0, 0, 0 , 30 }, + { 0, INS_SYSTEM, 0, ADDRMETH_M | OPTYPE_none | OP_R, ARG_NONE, ARG_NONE, cpu_80486 | isa_GP, "invlpg", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { idx_0F0111, 0, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_SYSTEM, 0, ADDRMETH_E | OPTYPE_w | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "smsw", "", 0, 0, 0, 0 , 47 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_SYSTEM, 0, ADDRMETH_E | OPTYPE_w | OP_W, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "lmsw", "", 0, 0, 0, 0 , 30 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 } +}; + + +static ia32_insn_t tbl_0F0111[] = { /* Monitor/MWait opcode */ + { 0, INS_SYSTEM, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "monitor", "", 0, 0, 0, 0, 54 }, + { 0, INS_SYSTEM, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "mwait", "", 0, 0, 0, 0, 55 } +}; + + +static ia32_insn_t tbl_0F12[] = { /* Movlps Opcode */ + { 0, INS_MOV, INS_NOTE_NOSUFFIX, ADDRMETH_V | OPTYPE_q | OP_W, ADDRMETH_M | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "movlps", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, INS_NOTE_NOSUFFIX, ADDRMETH_V | OPTYPE_q | OP_W, ADDRMETH_M | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "movlps", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, INS_NOTE_NOSUFFIX, ADDRMETH_V | OPTYPE_q | OP_W, ADDRMETH_M | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "movlps", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_V | OPTYPE_ps | OP_R | OP_W, ADDRMETH_W | OPTYPE_ps | OP_R , ARG_NONE, cpu_PENTIUM4 | isa_GP, "movhlps", "", 0, 0, 0, 0, 0 } +}; + + +static ia32_insn_t tbl_0F16[] = { /* Movhps Opcode */ + { 0, INS_OTHER, 0, ADDRMETH_V | OPTYPE_q | OP_W, ADDRMETH_M | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "movhps", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_V | OPTYPE_q | OP_W, ADDRMETH_M | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "movhps", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_V | OPTYPE_q | OP_W, ADDRMETH_M | OPTYPE_q | OP_R, ARG_NONE, cpu_PENTIUM2 | isa_GP, "movhps", "", 0, 0, 0, 0 , 0 }, + { 0, INS_MOV, 0, ADDRMETH_V | OPTYPE_ps | OP_R | OP_W, ADDRMETH_W | OPTYPE_ps | OP_R , ARG_NONE, cpu_PENTIUM4 | isa_GP, "movlhps", "", 0, 0, 0, 0, 0 } +}; + + +static ia32_insn_t tbl_0F18[] = { /* Group 16 */ + { 0, INS_SYSTEM, 0, OP_W | OPTYPE_b | ADDRMETH_M, ARG_NONE, ARG_NONE, cpu_PENTIUM2 | isa_GP, "prefetchnta", "", 0, 0, 0, 0 , 0 }, + { 0, INS_SYSTEM, 0, ADDRMETH_RT | OPTYPE_d | OP_W, ARG_NONE, ARG_NONE, cpu_PENTIUM2 | isa_GP, "prefetcht0", "", 0, 0, 0, 0 , 0 }, + { 0, INS_SYSTEM, 0, ADDRMETH_RT | OPTYPE_d | OP_W, ARG_NONE, ARG_NONE, cpu_PENTIUM2 | isa_GP, "prefetcht1", "", 1, 0, 0, 0 , 0 }, + { 0, INS_SYSTEM, 0, ADDRMETH_RT | OPTYPE_d | OP_W, ARG_NONE, ARG_NONE, cpu_PENTIUM2 | isa_GP, "prefetcht2", "", 2, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_SYSTEM, 0, OP_W | OPTYPE_b | ADDRMETH_M, ARG_NONE, ARG_NONE, cpu_PENTIUM2 | isa_GP, "prefetchnta", "", 0, 0, 0, 0 , 0 }, + { 0, INS_SYSTEM, 0, ADDRMETH_RT | OPTYPE_d | OP_W, ARG_NONE, ARG_NONE, cpu_PENTIUM2 | isa_GP, "prefetcht0", "", 0, 0, 0, 0 , 0 }, + { 0, INS_SYSTEM, 0, ADDRMETH_RT | OPTYPE_d | OP_W, ARG_NONE, ARG_NONE, cpu_PENTIUM2 | isa_GP, "prefetcht1", "", 1, 0, 0, 0 , 0 }, + { 0, INS_SYSTEM, 0, ADDRMETH_RT | OPTYPE_d | OP_W, ARG_NONE, ARG_NONE, cpu_PENTIUM2 | isa_GP, "prefetcht2", "", 2, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_SYSTEM, 0, OP_W | OPTYPE_b | ADDRMETH_M, ARG_NONE, ARG_NONE, cpu_PENTIUM2 | isa_GP, "prefetchnta", "", 0, 0, 0, 0 , 0 }, + { 0, INS_SYSTEM, 0, ADDRMETH_RT | OPTYPE_d | OP_W, ARG_NONE, ARG_NONE, cpu_PENTIUM2 | isa_GP, "prefetcht0", "", 0, 0, 0, 0 , 0 }, + { 0, INS_SYSTEM, 0, ADDRMETH_RT | OPTYPE_d | OP_W, ARG_NONE, ARG_NONE, cpu_PENTIUM2 | isa_GP, "prefetcht1", "", 1, 0, 0, 0 , 0 }, + { 0, INS_SYSTEM, 0, ADDRMETH_RT | OPTYPE_d | OP_W, ARG_NONE, ARG_NONE, cpu_PENTIUM2 | isa_GP, "prefetcht2", "", 2, 0, 0, 0 , 0 } +}; + + +static ia32_insn_t tbl_0F71[] = { /* Group 12 */ + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "psrlw", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "psraw", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "psllw", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 } +}; + + +static ia32_insn_t tbl_660F71[] = { /* Group 12 SSE */ + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_P | OPTYPE_dq | OP_W, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "psrlw", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_P | OPTYPE_dq | OP_W, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "psraw", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_P | OPTYPE_dq | OP_W, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "psllw", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 } +}; + + +static ia32_insn_t tbl_0F72[] = { /* Group 13 */ + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "psrld", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "psrad", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "pslld", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 } +}; + + +static ia32_insn_t tbl_660F72[] = { /* Group 13 SSE */ + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_W | OPTYPE_dq | OP_W, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "psrld", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_W | OPTYPE_dq | OP_W, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "psrad", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_W | OPTYPE_dq | OP_W, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "pslld", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 } +}; + + +static ia32_insn_t tbl_0F73[] = { /* Group 14 */ + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "psrlq", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_P | OPTYPE_q | OP_W, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "psllq", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 } +}; + + +static ia32_insn_t tbl_660F73[] = { /* Group 14 SSE */ + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_W | OPTYPE_dq | OP_W, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "psrlq", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_W | OPTYPE_dq | OP_W, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "psrldq", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_W | OPTYPE_dq | OP_W, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "psllq", "", 0, 0, 0, 0 , 0 }, + { 0, INS_OTHER, 0, ADDRMETH_W | OPTYPE_dq | OP_W, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_PENTIUM | isa_MMX, "pslldq", "", 0, 0, 0, 0 , 0 } +}; + + +static ia32_insn_t tbl_0FAE[] = { /* Group 15 */ + { 0, INS_FPU, 0, ADDRMETH_E | OPTYPE_fx | OP_W, ARG_NONE, ARG_NONE, cpu_PENTIUM | isa_MMX, "fxsave", "", 0, 0, 0, 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_E | OPTYPE_fx | OP_R, ARG_NONE, ARG_NONE, cpu_PENTIUM | isa_MMX, "fxrstor", "", 0, 0, 0, 0 , 0 }, + { 0, INS_SYSTEM, 0, ADDRMETH_E | OPTYPE_d | OP_R, ARG_NONE, ARG_NONE, cpu_PENTIUM2, "ldmxcsr", "", 0, 0, 0, 0 , 25 }, + { 0, INS_SYSTEM, 0, ADDRMETH_E | OPTYPE_d | OP_W, ARG_NONE, ARG_NONE, cpu_PENTIUM2, "stmxcsr", "", 0, 0, 0 , 0, 48 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_SYSTEM, 0, ADDRMETH_M | OPTYPE_b | OP_R, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "clflush", "", 0, 0, 0, 0, 0 }, + { 0, INS_FPU, 0, ADDRMETH_E | OPTYPE_fx | OP_W, ARG_NONE, ARG_NONE, cpu_PENTIUM | isa_MMX, "fxsave", "", 0, 0, 0, 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_E | OPTYPE_fx | OP_R, ARG_NONE, ARG_NONE, cpu_PENTIUM | isa_MMX, "fxrstor", "", 0, 0, 0, 0 , 0 }, + { 0, INS_SYSTEM, 0, ADDRMETH_E | OPTYPE_d | OP_R, ARG_NONE, ARG_NONE, cpu_PENTIUM2, "ldmxcsr", "", 0, 0, 0, 0 , 25 }, + { 0, INS_SYSTEM, 0, ADDRMETH_E | OPTYPE_d | OP_W, ARG_NONE, ARG_NONE, cpu_PENTIUM2, "stmxcsr", "", 0, 0, 0 , 0, 48 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_SYSTEM, 0, ADDRMETH_M | OPTYPE_b | OP_R, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "clflush", "", 0, 0, 0, 0, 0 }, + { 0, INS_FPU, 0, ADDRMETH_E | OPTYPE_fx | OP_W, ARG_NONE, ARG_NONE, cpu_PENTIUM | isa_MMX, "fxsave", "", 0, 0, 0, 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_E | OPTYPE_fx | OP_R, ARG_NONE, ARG_NONE, cpu_PENTIUM | isa_MMX, "fxrstor", "", 0, 0, 0, 0 , 0 }, + { 0, INS_SYSTEM, 0, ADDRMETH_E | OPTYPE_d | OP_R, ARG_NONE, ARG_NONE, cpu_PENTIUM2, "ldmxcsr", "", 0, 0, 0, 0 , 25 }, + { 0, INS_SYSTEM, 0, ADDRMETH_E | OPTYPE_d | OP_W, ARG_NONE, ARG_NONE, cpu_PENTIUM2, "stmxcsr", "", 0, 0, 0 , 0, 48 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_SYSTEM, 0, ADDRMETH_M | OPTYPE_b | OP_R, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "clflush", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_SYSTEM, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "lfence", "", 0, 0, 0, 0 , 0 }, + { 0, INS_SYSTEM, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "mfence", "", 0, 0, 0, 0 , 0 }, + { 0, INS_SYSTEM, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "sfence", "", 0, 0, 0, 0 , 0 } +}; + + +static ia32_insn_t tbl_0FBA[] = { /* Group 8 */ + { 0, INS_BITTEST, 0, ADDRMETH_E | OPTYPE_v | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "bt", "", 0, 0, 0, INS_SET_CARRY, 0 }, + { 0, INS_BITTEST, 0, ADDRMETH_E | OPTYPE_v | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "bts", "", 0, 0, 0, INS_SET_CARRY, 0 }, + { 0, INS_BITTEST, 0, ADDRMETH_E | OPTYPE_v | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "btr", "", 0, 0, 0, INS_SET_CARRY, 0 }, + { 0, INS_BITTEST, 0, ADDRMETH_E | OPTYPE_v | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "btc", "", 0, 0, 0 , INS_SET_CARRY, 0 } +}; + + +static ia32_insn_t tbl_0FC7[] = { /* Group 9 */ + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_XCHGCC, 0, ADDRMETH_M | OPTYPE_q | OP_W, ARG_NONE, ARG_NONE, cpu_PENTIUM | isa_GP, "cmpxch8b", "", 0, 0, 0 , 0 , 9 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_XCHGCC, 0, ADDRMETH_M | OPTYPE_q | OP_W, ARG_NONE, ARG_NONE, cpu_PENTIUM | isa_GP, "cmpxch8b", "", 0, 0, 0 , 0 , 9 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "", "", 0, 0, 0, 0 , 0 }, + { 0, INS_XCHGCC, 0, ADDRMETH_M | OPTYPE_q | OP_W, ARG_NONE, ARG_NONE, cpu_PENTIUM | isa_GP, "cmpxch8b", "", 0, 0, 0 , 0 , 9 } +}; + + +static ia32_insn_t tbl_0FB9[] = { /* Group 10 */ + { 0, INS_SYSTEM, 0, ARG_NONE, ARG_NONE, ARG_NONE, 0, "fxsave", "", 0, 0, 0, 0 , 0 } +}; + + +static ia32_insn_t tbl_C6[] = { /* Group 11a */ + { 0, INS_MOV, 0, ADDRMETH_E | OPTYPE_b | OP_W, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "mov", "", 0, 0, 0, 0 , 0 } +}; + + +static ia32_insn_t tbl_C7[] = { /* Group 11b */ + { 0, INS_MOV, 0, ADDRMETH_E | OPTYPE_v | OP_W, ADDRMETH_I | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "mov", "", 0, 0, 0, 0 , 0 } +}; + + +static ia32_insn_t tbl_80[] = { /* Group 1a */ + { 0, INS_ADD, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "add", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_OR, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "or", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_ADD, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "adc", "", 0, 0, 0, INS_SET_ALL|INS_TEST_CARRY, 0 }, + { 0, INS_SUB, 0, ADDRMETH_E | OPTYPE_b | OP_SIGNED | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "sbb", "", 0, 0, 0, INS_SET_ALL|INS_TEST_CARRY, 0 }, + { 0, INS_AND, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "and", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_SUB, 0, ADDRMETH_E | OPTYPE_b | OP_SIGNED | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "sub", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_XOR, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "xor", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_CMP, 0, ADDRMETH_E | OPTYPE_b | OP_R, ADDRMETH_I | OPTYPE_b | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "cmp", "", 0, 0, 0 , INS_SET_ALL, 0 } +}; + + +static ia32_insn_t tbl_81[] = { /* Group 1b */ + { 0, INS_ADD, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_I | OPTYPE_v | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "add", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_OR, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_I | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "or", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_ADD, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_I | OPTYPE_v | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "adc", "", 0, 0, 0, INS_SET_ALL|INS_TEST_CARRY, 0 }, + { 0, INS_SUB, 0, ADDRMETH_E | OPTYPE_v | OP_SIGNED | OP_W | OP_R, ADDRMETH_I | OPTYPE_v | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "sbb", "", 0, 0, 0, INS_SET_ALL|INS_TEST_CARRY, 0 }, + { 0, INS_AND, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_I | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "and", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_SUB, 0, ADDRMETH_E | OPTYPE_v | OP_SIGNED | OP_W | OP_R, ADDRMETH_I | OPTYPE_v | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "sub", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_XOR, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_I | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "xor", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_CMP, 0, ADDRMETH_E | OPTYPE_v | OP_R, ADDRMETH_I | OPTYPE_v | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "cmp", "", 0, 0, 0 , INS_SET_ALL, 0 } +}; + + +static ia32_insn_t tbl_82[] = { /* Group 1c */ + { 0, INS_ADD, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "add", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_OR, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "or", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_ADD, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "adc", "", 0, 0, 0, INS_SET_ALL|INS_TEST_CARRY, 0 }, + { 0, INS_SUB, 0, ADDRMETH_E | OPTYPE_b | OP_SIGNED | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "sbb", "", 0, 0, 0, INS_SET_ALL|INS_TEST_CARRY, 0 }, + { 0, INS_AND, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "and", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_SUB, 0, ADDRMETH_E | OPTYPE_b | OP_SIGNED | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "sub", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_XOR, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "xor", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_CMP, 0, ADDRMETH_E | OPTYPE_b | OP_R, ADDRMETH_I | OPTYPE_b | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "cmp", "", 0, 0, 0 , INS_SET_ALL, 0 } +}; + + +static ia32_insn_t tbl_83[] = { /* Group 1d */ + { 0, INS_ADD, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "add", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_OR, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "or", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_ADD, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "adc", "", 0, 0, 0, INS_SET_ALL|INS_TEST_CARRY, 0 }, + { 0, INS_SUB, 0, ADDRMETH_E | OPTYPE_v | OP_SIGNED | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "sbb", "", 0, 0, 0, INS_SET_ALL|INS_TEST_CARRY, 0 }, + { 0, INS_AND, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "and", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_SUB, 0, ADDRMETH_E | OPTYPE_v | OP_SIGNED | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "sub", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_XOR, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "xor", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_CMP, 0, ADDRMETH_E | OPTYPE_v | OP_R, ADDRMETH_I | OPTYPE_b | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "cmp", "", 0, 0, 0 , INS_SET_ALL, 0 } +}; + + +static ia32_insn_t tbl_C0[] = { /* Group 2a */ + { 0, INS_ROL, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "rol", "", 0, 0, 0, INS_SET_CARRY|INS_SET_OFLOW, 0 }, + { 0, INS_ROR, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "ror", "", 0, 0, 0, INS_SET_CARRY|INS_SET_OFLOW, 0 }, + { 0, INS_ROL, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "rcl", "", 0, 0, 0, INS_SET_CARRY|INS_SET_OFLOW|INS_TEST_CARRY, 0 }, + { 0, INS_ROR, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "rcr", "", 0, 0, 0, INS_SET_CARRY|INS_SET_OFLOW|INS_TEST_CARRY, 0 }, + { 0, INS_SHL, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "shl", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_SHR, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "shr", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_SHL, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "sal", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_SHR, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "sar", "", 0, 0, 0 , INS_SET_ALL, 0 } +}; + + +static ia32_insn_t tbl_C1[] = { /* Group 2b */ + { 0, INS_ROL, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "rol", "", 0, 0, 0, INS_SET_CARRY|INS_SET_OFLOW, 0 }, + { 0, INS_ROR, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "ror", "", 0, 0, 0, INS_SET_CARRY|INS_SET_OFLOW, 0 }, + { 0, INS_ROL, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "rcl", "", 0, 0, 0, INS_SET_CARRY|INS_SET_OFLOW|INS_TEST_CARRY, 0 }, + { 0, INS_ROR, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "rcr", "", 0, 0, 0, INS_SET_CARRY|INS_SET_OFLOW|INS_TEST_CARRY, 0 }, + { 0, INS_SHL, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "shl", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_SHR, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "shr", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_SHL, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "sal", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_SHR, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_I | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "sar", "", 0, 0, 0 , INS_SET_ALL, 0 } +}; + + +static ia32_insn_t tbl_D0[] = { /* Group 2c */ + { 0, INS_ROL, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_II | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "rol", "", 0, 1, 0, INS_SET_CARRY|INS_SET_OFLOW, 0 }, + { 0, INS_ROR, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_II | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "ror", "", 0, 1, 0, INS_SET_CARRY|INS_SET_OFLOW, 0 }, + { 0, INS_ROL, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_II | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "rcl", "", 0, 1, 0, INS_SET_CARRY|INS_SET_OFLOW|INS_TEST_CARRY, 0 }, + { 0, INS_ROR, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_II | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "rcr", "", 0, 1, 0, INS_SET_CARRY|INS_SET_OFLOW|INS_TEST_CARRY, 0 }, + { 0, INS_SHL, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_II | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "shl", "", 0, 1, 0, INS_SET_ALL, 0 }, + { 0, INS_SHR, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_II | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "shr", "", 0, 1, 0, INS_SET_ALL, 0 }, + { 0, INS_SHL, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_II | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "sal", "", 0, 1, 0, INS_SET_ALL, 0 }, + { 0, INS_SHR, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_II | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "sar", "", 0, 1, 0 , INS_SET_ALL, 0 } +}; + + +static ia32_insn_t tbl_D1[] = { /* Group 2d */ + { 0, INS_ROL, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_II | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "rol", "", 0, 1, 0, INS_SET_CARRY|INS_SET_OFLOW, 0 }, + { 0, INS_ROR, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_II | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "ror", "", 0, 1, 0, INS_SET_CARRY|INS_SET_OFLOW, 0 }, + { 0, INS_ROL, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_II | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "rcl", "", 0, 1, 0, INS_SET_CARRY|INS_SET_OFLOW|INS_TEST_CARRY, 0 }, + { 0, INS_ROR, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_II | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "rcr", "", 0, 1, 0, INS_SET_CARRY|INS_SET_OFLOW|INS_TEST_CARRY, 0 }, + { 0, INS_SHL, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_II | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "shl", "", 0, 1, 0, INS_SET_ALL, 0 }, + { 0, INS_SHR, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_II | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "shr", "", 0, 1, 0, INS_SET_ALL, 0 }, + { 0, INS_SHL, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_II | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "sal", "", 0, 1, 0, INS_SET_ALL, 0 }, + { 0, INS_SHR, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_II | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "sar", "", 0, 1, 0 , INS_SET_ALL, 0 } +}; + + +static ia32_insn_t tbl_D2[] = { /* Group 2e */ + { 0, INS_ROL, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_RR | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "rol", "", 0, 1, 0, INS_SET_CARRY|INS_SET_OFLOW, 0 }, + { 0, INS_ROR, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_RR | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "ror", "", 0, 1, 0, INS_SET_CARRY|INS_SET_OFLOW, 0 }, + { 0, INS_ROL, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_RR | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "rcl", "", 0, 1, 0, INS_SET_CARRY|INS_SET_OFLOW|INS_TEST_CARRY, 0 }, + { 0, INS_ROR, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_RR | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "rcr", "", 0, 1, 0, INS_SET_CARRY|INS_SET_OFLOW|INS_TEST_CARRY, 0 }, + { 0, INS_SHL, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_RR | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "shl", "", 0, 1, 0, INS_SET_ALL, 0 }, + { 0, INS_SHR, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_RR | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "shr", "", 0, 1, 0, INS_SET_ALL, 0 }, + { 0, INS_SHL, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_RR | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "sal", "", 0, 1, 0, INS_SET_ALL, 0 }, + { 0, INS_SHR, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ADDRMETH_RR | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "sar", "", 0, 1, 0 , INS_SET_ALL, 0 } +}; + + +static ia32_insn_t tbl_D3[] = { /* Group 2f */ + { 0, INS_ROL, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_RR | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "rol", "", 0, 1, 0, INS_SET_CARRY|INS_SET_OFLOW, 0 }, + { 0, INS_ROR, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_RR | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "ror", "", 0, 1, 0, INS_SET_CARRY|INS_SET_OFLOW, 0 }, + { 0, INS_ROL, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_RR | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "rcl", "", 0, 1, 0, INS_SET_CARRY|INS_SET_OFLOW|INS_TEST_CARRY, 0 }, + { 0, INS_ROR, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_RR | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "rcr", "", 0, 1, 0, INS_SET_CARRY|INS_SET_OFLOW|INS_TEST_CARRY, 0 }, + { 0, INS_SHL, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_RR | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "shl", "", 0, 1, 0, INS_SET_ALL, 0 }, + { 0, INS_SHR, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_RR | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "shr", "", 0, 1, 0, INS_SET_ALL, 0 }, + { 0, INS_SHL, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_RR | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "sal", "", 0, 1, 0, INS_SET_ALL, 0 }, + { 0, INS_SHR, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ADDRMETH_RR | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "sar", "", 0, 1, 0 , INS_SET_ALL, 0 } +}; + + +static ia32_insn_t tbl_F6[] = { /* Group 3a */ + { 0, INS_TEST, 0, ADDRMETH_E | OPTYPE_b | OP_R, ADDRMETH_I | OPTYPE_b | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "test", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_TEST, 0, ADDRMETH_E | OPTYPE_b | OP_R, ADDRMETH_I | OPTYPE_b | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "test", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_NOT, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "not", "", 0, 0, 0, 0 , 0 }, + { 0, INS_NEG, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "neg", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_MUL, 0, OPTYPE_b | ADDRMETH_RR | OP_W | OP_R, ADDRMETH_E | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "mul", "", 0, 0, 0, INS_SET_OFLOW|INS_SET_CARRY, 22 }, + { 0, INS_MUL, 0, OPTYPE_b | ADDRMETH_RR | OP_W | OP_SIGNED | OP_R, ADDRMETH_E | OPTYPE_b | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "imul", "", 0, 0, 0, INS_SET_OFLOW|INS_SET_CARRY, 22 }, + { 0, INS_DIV, 0, ADDRMETH_RR | OPTYPE_b | OP_W | OP_R, ADDRMETH_E | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "div", "", 0, 0, 0, 0 , 13 }, + { 0, INS_DIV, 0, ADDRMETH_RR | OPTYPE_b | OP_W | OP_R, ADDRMETH_E | OPTYPE_b | OP_R, ARG_NONE, cpu_80386 | isa_GP, "idiv", "", 0, 0, 0 , 0 , 13 } +}; + + +static ia32_insn_t tbl_F7[] = { /* Group 3b */ + { 0, INS_TEST, 0, ADDRMETH_E | OPTYPE_v | OP_R, ADDRMETH_I | OPTYPE_v | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "test", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_TEST, 0, ADDRMETH_E | OPTYPE_v | OP_R, ADDRMETH_I | OPTYPE_v | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "test", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_NOT, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "not", "", 0, 0, 0, 0 , 0 }, + { 0, INS_NEG, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "neg", "", 0, 0, 0, INS_SET_ALL, 0 }, + { 0, INS_MUL, 0, ADDRMETH_RR | OPTYPE_v | OP_W | OP_R, ADDRMETH_E | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "mul", "", 0, 0, 0, INS_SET_OFLOW|INS_SET_CARRY, 23 }, + { 0, INS_MUL, 0, ADDRMETH_RR | OPTYPE_v | OP_SIGNED | OP_W | OP_R, ADDRMETH_E | OPTYPE_v | OP_SIGNED | OP_R, ARG_NONE, cpu_80386 | isa_GP, "imul", "", 0, 0, 0, INS_SET_OFLOW|INS_SET_CARRY, 23 }, + { 0, INS_DIV, 0, ADDRMETH_RR | OPTYPE_v | OP_W | OP_R, ADDRMETH_E | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "div", "", 0, 0, 0, 0 , 14 }, + { 0, INS_DIV, 0, ADDRMETH_RR | OPTYPE_v | OP_W | OP_R, ADDRMETH_E | OPTYPE_v | OP_R, ARG_NONE, cpu_80386 | isa_GP, "idiv", "", 0, 0, 0, 0 , 14 } +}; + + +static ia32_insn_t tbl_FE[] = { /* Group 4 */ + { 0, INS_INC, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "inc", "", 0, 0, 0, INS_SET_OFLOW|INS_SET_SIGN|INS_SET_ZERO|INS_SET_PARITY, 0 }, + { 0, INS_DEC, 0, ADDRMETH_E | OPTYPE_b | OP_W | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "dec", "", 0, 0, 0 , INS_SET_OFLOW|INS_SET_SIGN|INS_SET_ZERO|INS_SET_PARITY, 0 } +}; + + +static ia32_insn_t tbl_FF[] = { /* Group 5 */ + { 0, INS_INC, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "inc", "", 0, 0, 0, INS_SET_OFLOW|INS_SET_SIGN|INS_SET_ZERO|INS_SET_PARITY, 0 }, + { 0, INS_DEC, 0, ADDRMETH_E | OPTYPE_v | OP_W | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "dec", "", 0, 0, 0, INS_SET_OFLOW|INS_SET_SIGN|INS_SET_ZERO|INS_SET_PARITY, 0 }, + { 0, INS_CALL, 0, ADDRMETH_E | OPTYPE_v | OP_X, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "call", "", 0, 0, 0, 0 , 3 }, + { 0, INS_CALL, 0, ADDRMETH_M | OPTYPE_p | OP_X, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "call", "", 0, 0, 0, 0 , 0 }, + { 0, INS_BRANCH, 0, ADDRMETH_E | OPTYPE_v | OP_X, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "jmp", "", 0, 0, 0, 0 , 0 }, + { 0, INS_BRANCH, 0, ADDRMETH_M | OPTYPE_p | OP_X, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "jmp", "", 0, 0, 0, 0 , 0 }, + { 0, INS_PUSH, 0, ADDRMETH_E | OPTYPE_v | OP_R, ARG_NONE, ARG_NONE, cpu_80386 | isa_GP, "push", "", 0, 0, 0, 0 , 33 } +}; + + +static ia32_insn_t tbl_D8[] = { /* FPU D8 */ + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_fs|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fadd", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_fs|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fmul", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_fs|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fcom", "", 0, 0, 0 , INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 17 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_fs|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fcomp", "", 0, 0, 0 , INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_fs|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fsub", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_fs|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fsubr", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_fs|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fdiv", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_fs|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fdivr", "", 0, 0, 0 , 0 , 0 } +}; + + +static ia32_insn_t tbl_D8C0[] = { /* FPU D8 C0 */ + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fadd", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fadd", "", 0, 1, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fadd", "", 0, 2, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fadd", "", 0, 3, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fadd", "", 0, 4, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fadd", "", 0, 5, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fadd", "", 0, 6, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fadd", "", 0, 7, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fmul", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fmul", "", 0, 1, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fmul", "", 0, 2, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fmul", "", 0, 3, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fmul", "", 0, 4, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fmul", "", 0, 5, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fmul", "", 0, 6, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fmul", "", 0, 7, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcom", "", 0, 0, 0 , INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 17 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcom", "", 0, 1, 0 , INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 17 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcom", "", 0, 2, 0 , INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 17 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcom", "", 0, 3, 0 , INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 17 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcom", "", 0, 4, 0 , INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 17 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcom", "", 0, 5, 0 , INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 17 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcom", "", 0, 6, 0 , INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 17 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcom", "", 0, 7, 0 , INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 17 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcomp", "", 0, 0, 0 , INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcomp", "", 0, 1, 0 , INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcomp", "", 0, 2, 0 , INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcomp", "", 0, 3, 0 , INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcomp", "", 0, 4, 0 , INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcomp", "", 0, 5, 0 , INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcomp", "", 0, 6, 0 , INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcomp", "", 0, 7, 0 , INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsub", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsub", "", 0, 1, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsub", "", 0, 2, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsub", "", 0, 3, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsub", "", 0, 4, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsub", "", 0, 5, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsub", "", 0, 6, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsub", "", 0, 7, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsubr", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsubr", "", 0, 1, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsubr", "", 0, 2, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsubr", "", 0, 3, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsubr", "", 0, 4, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsubr", "", 0, 5, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsubr", "", 0, 6, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsubr", "", 0, 7, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdiv", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdiv", "", 0, 1, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdiv", "", 0, 2, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdiv", "", 0, 3, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdiv", "", 0, 4, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdiv", "", 0, 5, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdiv", "", 0, 6, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdiv", "", 0, 7, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdivr", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdivr", "", 0, 1, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdivr", "", 0, 2, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdivr", "", 0, 3, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdivr", "", 0, 4, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdivr", "", 0, 5, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdivr", "", 0, 6, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdivr", "", 0, 7, 0 , 0 , 0 } +}; + + +static ia32_insn_t tbl_D9[] = { /* FPU D9 */ + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_fs|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fld", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_fs|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fst", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_fs|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fstp", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_fv|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fldenv", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_w|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fldcw", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_fv|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fnstenv", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_w|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fnstcw", "", 0, 0, 0 , 0 , 0 } +}; + + +static ia32_insn_t tbl_D9C0[] = { /* FPU D9 C0 */ + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fld", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fld", "", 0, 1, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fld", "", 0, 2, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fld", "", 0, 3, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fld", "", 0, 4, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fld", "", 0, 5, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fld", "", 0, 6, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fld", "", 0, 7, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fxch", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fxch", "", 0, 1, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fxch", "", 0, 2, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fxch", "", 0, 3, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fxch", "", 0, 4, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fxch", "", 0, 5, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fxch", "", 0, 6, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fxch", "", 0, 7, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fnop", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fchs", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fabs", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "ftst", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fxam", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fld1", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fldl2t", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fldl2e", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fldpi", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fldlg2", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fldln2", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fldz", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "f2xm1", "", 0, 0, 0 , 0 , 16 }, + { 0, INS_FPU, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fyl2x", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fptan", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fpatan", "", 0, 0, 0 , 0 , 18 }, + { 0, INS_FPU, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fxtract", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fprem1", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fdecstp", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fincstp", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fprem", "", 0, 0, 0 , 0 , 19 }, + { 0, INS_FPU, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fyl2xp1", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fsqrt", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fsincos", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "frndint", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fscale", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fsin", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fcos", "", 0, 0, 0 , 0 , 0 } +}; + + +static ia32_insn_t tbl_DA[] = { /* FPU DA */ + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_d|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fiadd", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_d|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fimul", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_d|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "ficom", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_d|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "ficomp", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_d|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fisub", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_d|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fisubr", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_d|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fidiv", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_d|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fidivr", "", 0, 0, 0 , 0 , 0 } +}; + + +static ia32_insn_t tbl_DAC0[] = { /* FPU DA C0 */ + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "fcmovb", "", 0, 0, 0 , INS_TEST_CARRY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "fcmovb", "", 0, 1, 0 , INS_TEST_CARRY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "fcmovb", "", 0, 2, 0 , INS_TEST_CARRY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "fcmovb", "", 0, 3, 0 , INS_TEST_CARRY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "fcmovb", "", 0, 4, 0 , INS_TEST_CARRY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "fcmovb", "", 0, 5, 0 , INS_TEST_CARRY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "fcmovb", "", 0, 6, 0 , INS_TEST_CARRY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "fcmovb", "", 0, 7, 0 , INS_TEST_CARRY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "fcmove", "", 0, 0, 0 , INS_TEST_ZERO, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "fcmove", "", 0, 1, 0 , INS_TEST_ZERO, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "fcmove", "", 0, 2, 0 , INS_TEST_ZERO, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "fcmove", "", 0, 3, 0 , INS_TEST_ZERO, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "fcmove", "", 0, 4, 0 , INS_TEST_ZERO, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "fcmove", "", 0, 5, 0 , INS_TEST_ZERO, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "fcmove", "", 0, 6, 0 , INS_TEST_ZERO, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "fcmove", "", 0, 7, 0 , INS_TEST_ZERO, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "fcmovbe", "", 0, 0, 0 , INS_TEST_CARRY|INS_TEST_OR|INS_TEST_ZERO, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "fcmovbe", "", 0, 1, 0 , INS_TEST_CARRY|INS_TEST_OR|INS_TEST_ZERO, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "fcmovbe", "", 0, 2, 0 , INS_TEST_CARRY|INS_TEST_OR|INS_TEST_ZERO, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "fcmovbe", "", 0, 3, 0 , INS_TEST_CARRY|INS_TEST_OR|INS_TEST_ZERO, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "fcmovbe", "", 0, 4, 0 , INS_TEST_CARRY|INS_TEST_OR|INS_TEST_ZERO, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "fcmovbe", "", 0, 5, 0 , INS_TEST_CARRY|INS_TEST_OR|INS_TEST_ZERO, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "fcmovbe", "", 0, 6, 0 , INS_TEST_CARRY|INS_TEST_OR|INS_TEST_ZERO, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "fcmovbe", "", 0, 7, 0 , INS_TEST_CARRY|INS_TEST_OR|INS_TEST_ZERO, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "fcmovu", "", 0, 0, 0 , INS_TEST_PARITY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "fcmovu", "", 0, 1, 0 , INS_TEST_PARITY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "fcmovu", "", 0, 2, 0 , INS_TEST_PARITY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "fcmovu", "", 0, 3, 0 , INS_TEST_PARITY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "fcmovu", "", 0, 4, 0 , INS_TEST_PARITY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "fcmovu", "", 0, 5, 0 , INS_TEST_PARITY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "fcmovu", "", 0, 6, 0 , INS_TEST_PARITY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "fcmovu", "", 0, 7, 0 , INS_TEST_PARITY, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fucompp", "", 0, 0, 0 , 0 , 21 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 } +}; + + +static ia32_insn_t tbl_DB[] = { /* FPU DB */ + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_d|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fild", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_d|OP_W, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "fisttp", "", 0, 0, 0, 0, 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_d|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fist", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_d|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fistp", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_fe|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fld", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_fe|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fstp", "", 0, 0, 0 , 0 , 0 } +}; + + +static ia32_insn_t tbl_DBC0[] = { /* FPU DB C0 */ + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcmovnb", "", 0, 0, 0 , INS_TEST_NCARRY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcmovnb", "", 0, 1, 0 , INS_TEST_NCARRY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcmovnb", "", 0, 2, 0 , INS_TEST_NCARRY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcmovnb", "", 0, 3, 0 , INS_TEST_NCARRY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcmovnb", "", 0, 4, 0 , INS_TEST_NCARRY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcmovnb", "", 0, 5, 0 , INS_TEST_NCARRY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcmovnb", "", 0, 6, 0 , INS_TEST_NCARRY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcmovnb", "", 0, 7, 0 , INS_TEST_NCARRY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcmovne", "", 0, 0, 0 , INS_TEST_NZERO, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcmovne", "", 0, 1, 0 , INS_TEST_NZERO, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcmovne", "", 0, 2, 0 , INS_TEST_NZERO, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcmovne", "", 0, 3, 0 , INS_TEST_NZERO, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcmovne", "", 0, 4, 0 , INS_TEST_NZERO, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcmovne", "", 0, 5, 0 , INS_TEST_NZERO, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcmovne", "", 0, 6, 0 , INS_TEST_NZERO, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcmovne", "", 0, 7, 0 , INS_TEST_NZERO, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcmovnbe", "", 0, 0, 0 , INS_TEST_NCARRY|INS_TEST_NZERO, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcmovnbe", "", 0, 1, 0 , INS_TEST_NCARRY|INS_TEST_NZERO, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcmovnbe", "", 0, 2, 0 , INS_TEST_NCARRY|INS_TEST_NZERO, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcmovnbe", "", 0, 3, 0 , INS_TEST_NCARRY|INS_TEST_NZERO, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcmovnbe", "", 0, 4, 0 , INS_TEST_NCARRY|INS_TEST_NZERO, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcmovnbe", "", 0, 5, 0 , INS_TEST_NCARRY|INS_TEST_NZERO, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcmovnbe", "", 0, 6, 0 , INS_TEST_NCARRY|INS_TEST_NZERO, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcmovnbe", "", 0, 7, 0 , INS_TEST_NCARRY|INS_TEST_NZERO, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcmovnu", "", 0, 0, 0 , INS_TEST_NPARITY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcmovnu", "", 0, 1, 0 , INS_TEST_NPARITY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcmovnu", "", 0, 2, 0 , INS_TEST_NPARITY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcmovnu", "", 0, 3, 0 , INS_TEST_NPARITY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcmovnu", "", 0, 4, 0 , INS_TEST_NPARITY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcmovnu", "", 0, 5, 0 , INS_TEST_NPARITY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcmovnu", "", 0, 6, 0 , INS_TEST_NPARITY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcmovnu", "", 0, 7, 0 , INS_TEST_NPARITY, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fnclex", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fninit", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fucomi", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fucomi", "", 0, 1, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fucomi", "", 0, 2, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fucomi", "", 0, 3, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fucomi", "", 0, 4, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fucomi", "", 0, 5, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fucomi", "", 0, 6, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fucomi", "", 0, 7, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "fcomi", "", 0, 0, 0 , INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 0, }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "fcomi", "", 0, 1, 0 , INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "fcomi", "", 0, 2, 0 , INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "fcomi", "", 0, 3, 0 , INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "fcomi", "", 0, 4, 0 , INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "fcomi", "", 0, 5, 0 , INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "fcomi", "", 0, 6, 0 , INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_PENTPRO | isa_GP, "fcomi", "", 0, 7, 0 , INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 } +}; + + +static ia32_insn_t tbl_DC[] = { /* FPU DC */ + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_fd|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fadd", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_fd|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fmul", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_fd|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fcom", "", 0, 0, 0 , INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 17 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_fd|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fcomp", "", 0, 0, 0 , INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_fd|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fsub", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_fd|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fsubr", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_fd|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fdiv", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_fd|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fdivr", "", 0, 0, 0, 0 , 0 } +}; + + +static ia32_insn_t tbl_DCC0[] = { /* FPU DC C0 */ + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fadd", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fadd", "", 1, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fadd", "", 2, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fadd", "", 3, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fadd", "", 4, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fadd", "", 5, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fadd", "", 6, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fadd", "", 7, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fmul", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fmul", "", 1, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fmul", "", 2, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fmul", "", 3, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fmul", "", 4, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fmul", "", 5, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fmul", "", 6, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fmul", "", 7, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsubr", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsubr", "", 1, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsubr", "", 2, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsubr", "", 3, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsubr", "", 4, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsubr", "", 5, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsubr", "", 6, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsubr", "", 7, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsub", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsub", "", 1, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsub", "", 2, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsub", "", 3, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsub", "", 4, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsub", "", 5, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsub", "", 6, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsub", "", 7, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdivr", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdivr", "", 1, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdivr", "", 2, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdivr", "", 3, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdivr", "", 4, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdivr", "", 5, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdivr", "", 6, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdivr", "", 7, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdiv", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdiv", "", 1, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdiv", "", 2, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdiv", "", 3, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdiv", "", 4, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdiv", "", 5, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdiv", "", 6, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdiv", "", 7, 0, 0 , 0 , 0 } +}; + + +static ia32_insn_t tbl_DD[] = { /* FPU DD */ + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_fd|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fld", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_q|OP_W, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "fisttp", "", 0, 0, 0, 0, 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_fd|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fst", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_fd|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fstp", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_ft|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "frstor", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_ft|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fnsave", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_w|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fnstsw", "", 0, 0, 0 , 0 , 0 } +}; + + +static ia32_insn_t tbl_DDC0[] = { /* FPU DD C0 */ + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "ffree", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "ffree", "", 1, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "ffree", "", 2, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "ffree", "", 3, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "ffree", "", 4, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "ffree", "", 5, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "ffree", "", 6, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "ffree", "", 7, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fst", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fst", "", 1, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fst", "", 2, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fst", "", 3, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fst", "", 4, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fst", "", 5, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fst", "", 6, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fst", "", 7, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fstp", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fstp", "", 1, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fstp", "", 2, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fstp", "", 3, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fstp", "", 4, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fstp", "", 5, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fstp", "", 6, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fstp", "", 7, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fucom", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fucom", "", 1, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fucom", "", 2, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fucom", "", 3, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fucom", "", 4, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fucom", "", 5, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fucom", "", 6, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fucom", "", 7, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fucomp", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fucomp", "", 1, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fucomp", "", 2, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fucomp", "", 3, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fucomp", "", 4, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fucomp", "", 5, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fucomp", "", 6, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fucomp", "", 7, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 } +}; + + +static ia32_insn_t tbl_DE[] = { /* FPU DE */ + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_w|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fiadd", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_w|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fimul", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_w|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "ficom", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_w|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "ficomp", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_w|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fisub", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_w|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fisubr", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_w|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fidiv", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_w|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fidivr", "", 0, 0, 0, 0 , 0 } +}; + + +static ia32_insn_t tbl_DEC0[] = { /* FPU DE C0 */ + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "faddp", "", 0, 0, 0 , 0 , 20 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "faddp", "", 1, 0, 0 , 0 , 20 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "faddp", "", 2, 0, 0 , 0 , 20 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "faddp", "", 3, 0, 0 , 0 , 20 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "faddp", "", 4, 0, 0 , 0 , 20 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "faddp", "", 5, 0, 0 , 0 , 20 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "faddp", "", 6, 0, 0 , 0 , 20 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "faddp", "", 7, 0, 0 , 0 , 20 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fmulp", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fmulp", "", 1, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fmulp", "", 2, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fmulp", "", 3, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fmulp", "", 4, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fmulp", "", 5, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fmulp", "", 6, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fmulp", "", 7, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fcompp", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsubrp", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsubrp", "", 1, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsubrp", "", 2, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsubrp", "", 3, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsubrp", "", 4, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsubrp", "", 5, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsubrp", "", 6, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsubrp", "", 7, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsubp", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsubp", "", 1, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsubp", "", 2, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsubp", "", 3, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsubp", "", 4, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsubp", "", 5, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsubp", "", 6, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fsubp", "", 7, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdivrp", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdivrp", "", 1, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdivrp", "", 2, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdivrp", "", 3, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdivrp", "", 4, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdivrp", "", 5, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdivrp", "", 6, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdivrp", "", 7, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdivp", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdivp", "", 1, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdivp", "", 2, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdivp", "", 3, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdivp", "", 4, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdivp", "", 5, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdivp", "", 6, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fdivp", "", 7, 0, 0 , 0 , 0 } +}; + + +static ia32_insn_t tbl_DF[] = { /* FPU DF */ + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_w|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fild", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_w|OP_W, ARG_NONE, ARG_NONE, cpu_PENTIUM4 | isa_GP, "fisttp", "", 0, 0, 0, 0, 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_w|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fist", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_w|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fistp", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_fb|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fbld", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_q|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fild", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_fb|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fbstp", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_M|OPTYPE_q|OP_W, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fistp", "", 0, 0, 0 , 0 , 0 } +}; + + +static ia32_insn_t tbl_DFC0[] = { /* FPU DF C0 */ + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RR | OPTYPE_w | OP_R, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "fnstsw", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fucomip", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fucomip", "", 0, 1, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fucomip", "", 0, 2, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fucomip", "", 0, 3, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fucomip", "", 0, 4, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fucomip", "", 0, 5, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fucomip", "", 0, 6, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fucomip", "", 0, 7, 0 , 0 , 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcomip", "", 0, 0, 0 , INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcomip", "", 0, 1, 0 , INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcomip", "", 0, 2, 0 , INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcomip", "", 0, 3, 0 , INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcomip", "", 0, 4, 0 , INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcomip", "", 0, 5, 0 , INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcomip", "", 0, 6, 0 , INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 0 }, + { 0, INS_FPU, 0, ADDRMETH_RF | OPTYPE_fp | OP_W, ADDRMETH_RF | OPTYPE_fp | OP_R, ARG_NONE, cpu_80387 | isa_FPU, "fcomip", "", 0, 7, 0 , INS_SET_ZERO|INS_SET_PARITY|INS_SET_CARRY, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_80387 | isa_FPU, "", "", 0, 0, 0 , 0 , 0 } +}; + + +static ia32_insn_t tbl_0F0F[] = { /* 3D Now! 0F Suffix */ + /* 00 */ { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_CONV, 0, ADDRMETH_P | OPTYPE_pi | OP_R | OP_W, ADDRMETH_Q | OPTYPE_q |OP_R, ARG_NONE, cpu_K6 | isa_3DNOW, "pi2fd", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + /* 10 */ { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_CONV, 0, ADDRMETH_P | OPTYPE_pi | OP_R | OP_W, ADDRMETH_Q | OPTYPE_q |OP_R, ARG_NONE, cpu_K6 | isa_3DNOW, "pf2id", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + /* 20 */ { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + /* 30 */ { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + /* 40 */ { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + /* 50 */ { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + /* 60 */ { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + /* 70 */ { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + /* 80 */ { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_CMP, 0, ADDRMETH_P | OPTYPE_pi | OP_R | OP_W, ADDRMETH_Q | OPTYPE_q |OP_R, ARG_NONE, cpu_K6 | isa_3DNOW, "pfcmpge", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_MIN, 0, ADDRMETH_P | OPTYPE_pi | OP_R | OP_W, ADDRMETH_Q | OPTYPE_q |OP_R, ARG_NONE, cpu_K6 | isa_3DNOW, "pfmin", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_ARITH, 0, ADDRMETH_P | OPTYPE_pi | OP_R | OP_W, ADDRMETH_Q | OPTYPE_q |OP_R, ARG_NONE, cpu_K6 | isa_3DNOW, "pfrcp", "", 0, 0, 0, 0, 0 }, + { 0, INS_ARITH, 0, ADDRMETH_P | OPTYPE_pi | OP_R | OP_W, ADDRMETH_Q | OPTYPE_q |OP_R, ARG_NONE, cpu_K6 | isa_3DNOW, "pfrsqrt", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_SUB, 0, ADDRMETH_P | OPTYPE_pi | OP_R | OP_W, ADDRMETH_Q | OPTYPE_q |OP_R, ARG_NONE, cpu_K6 | isa_3DNOW, "pfsub", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_ADD, 0, ADDRMETH_P | OPTYPE_pi | OP_R | OP_W, ADDRMETH_Q | OPTYPE_q |OP_R, ARG_NONE, cpu_K6 | isa_3DNOW, "pfadd", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_CMP, 0, ADDRMETH_P | OPTYPE_pi | OP_R | OP_W, ADDRMETH_Q | OPTYPE_q |OP_R, ARG_NONE, cpu_K6 | isa_3DNOW, "pfcmpgt", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_MAX, 0, ADDRMETH_P | OPTYPE_pi | OP_R | OP_W, ADDRMETH_Q | OPTYPE_q |OP_R, ARG_NONE, cpu_K6 | isa_3DNOW, "pfmax", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_ARITH, 0, ADDRMETH_P | OPTYPE_pi | OP_R | OP_W, ADDRMETH_Q | OPTYPE_q |OP_R, ARG_NONE, cpu_K6 | isa_3DNOW, "pfrcpit1", "", 0, 0, 0, 0, 0 }, + { 0, INS_ARITH, 0, ADDRMETH_P | OPTYPE_pi | OP_R | OP_W, ADDRMETH_Q | OPTYPE_q |OP_R, ARG_NONE, cpu_K6 | isa_3DNOW, "pfrsqit1", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_SUB, 0, ADDRMETH_P | OPTYPE_pi | OP_R | OP_W, ADDRMETH_Q | OPTYPE_q |OP_R, ARG_NONE, cpu_K6 | isa_3DNOW, "pfsubr", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_ADD, 0, ADDRMETH_P | OPTYPE_pi | OP_R | OP_W, ADDRMETH_Q | OPTYPE_q |OP_R, ARG_NONE, cpu_K6 | isa_3DNOW, "pfacc", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_CMP, 0, ADDRMETH_P | OPTYPE_pi | OP_R | OP_W, ADDRMETH_Q | OPTYPE_q |OP_R, ARG_NONE, cpu_K6 | isa_3DNOW, "pfcmpeq", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_MUL, 0, ADDRMETH_P | OPTYPE_pi | OP_R | OP_W, ADDRMETH_Q | OPTYPE_q |OP_R, ARG_NONE, cpu_K6 | isa_3DNOW, "pfmul", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_ARITH, 0, ADDRMETH_P | OPTYPE_pi | OP_R | OP_W, ADDRMETH_Q | OPTYPE_q |OP_R, ARG_NONE, cpu_K6 | isa_3DNOW, "pfrcpit2", "", 0, 0, 0, 0, 0 }, + { 0, INS_MUL, 0, ADDRMETH_P | OPTYPE_pi | OP_R | OP_W, ADDRMETH_Q | OPTYPE_q |OP_R, ARG_NONE, cpu_K6 | isa_3DNOW, "pmulhrw", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_INVALID, 0, ARG_NONE, ARG_NONE, ARG_NONE, cpu_K6 | isa_3DNOW, "", "", 0, 0, 0, 0, 0 }, + { 0, INS_AVG, 0, ADDRMETH_P | OPTYPE_pi | OP_R | OP_W, ADDRMETH_Q | OPTYPE_q |OP_R, ARG_NONE, cpu_K6 | isa_3DNOW, "pavgusb", "", 0, 0, 0, 0, 0 } +}; + + + +/* ================== Table of Opcode Tables ================== */ +ia32_table_desc_t ia32_tables[] = { + /* table, prefix table, type, shift, mask, min, max */ + { tbl_Main, tbl_opcode, 0x00, 0xFF, 0x00, 0xFF }, + { tbl_66, tbl_prefix, 0x00, 0xFF, 0x0F, 0x0F }, + { tbl_F2, tbl_prefix, 0x00, 0xFF, 0x0F, 0x0F }, + { tbl_F3, tbl_prefix, 0x00, 0xFF, 0x0F, 0x90 }, + { tbl_0F, tbl_opcode, 0x00, 0xFF, 0x00, 0xFF }, + /* 5 */ + { tbl_660F, tbl_prefix, 0x00, 0xFF, 0x10, 0xFF }, + { tbl_F20F, tbl_prefix, 0x00, 0xFF, 0x10, 0xFF }, + { tbl_F30F, tbl_prefix, 0x00, 0xFF, 0x10, 0xFF }, + { tbl_0F00, tbl_extension, 0x03, 0x07, 0x00, 0x07 }, + { tbl_0F01, tbl_extension, 0x03, 0x1F, 0x00, 0x1F }, + /* 10 */ + { tbl_0F0111, tbl_ext_ext, 0x00, 0x01, 0x00, 0x01 }, + { tbl_0F12, tbl_extension, 0x06, 0x03, 0x00, 0x03 }, + { tbl_0F16, tbl_extension, 0x06, 0x03, 0x00, 0x03 }, + { tbl_0F18, tbl_extension, 0x03, 0x1F, 0x00, 0x13 }, + { tbl_0F71, tbl_extension, 0x03, 0x1F, 0x00, 0x1F }, + /* 15 */ + { tbl_660F71, tbl_extension, 0x03, 0x1F, 0x00, 0x1F }, + { tbl_0F72, tbl_extension, 0x03, 0x1F, 0x00, 0x1F }, + { tbl_660F72, tbl_extension, 0x03, 0x1F, 0x00, 0x1F }, + { tbl_0F73, tbl_extension, 0x00, 0x00, 0x00, 0x00 }, + { tbl_660F73, tbl_extension, 0x03, 0x1F, 0x00, 0x1F }, + /* 20 */ + { tbl_0FAE, tbl_extension, 0x03, 0x1F, 0x00, 0x1F }, + { tbl_0FBA, tbl_extension, 0x03, 0x07, 0x04, 0x07 }, + { tbl_0FC7, tbl_extension, 0x03, 0x1F, 0x00, 0x11 }, + { tbl_0FB9, tbl_extension, 0x03, 0x07, 0x00, 0x00 }, + { tbl_C6, tbl_extension, 0x03, 0x07, 0x00, 0x00 }, + /* 25 */ + { tbl_C7, tbl_extension, 0x03, 0x07, 0x00, 0x00 }, + { tbl_80, tbl_extension, 0x03, 0x07, 0x00, 0x07 }, + { tbl_81, tbl_extension, 0x03, 0x07, 0x00, 0x07 }, + { tbl_82, tbl_extension, 0x03, 0x07, 0x00, 0x07 }, + { tbl_83, tbl_extension, 0x03, 0x07, 0x00, 0x07 }, + /* 30 */ + { tbl_C0, tbl_extension, 0x03, 0x07, 0x00, 0x07 }, + { tbl_C1, tbl_extension, 0x03, 0x07, 0x00, 0x07 }, + { tbl_D0, tbl_extension, 0x03, 0x07, 0x00, 0x07 }, + { tbl_D1, tbl_extension, 0x03, 0x07, 0x00, 0x07 }, + { tbl_D2, tbl_extension, 0x03, 0x07, 0x00, 0x07 }, + /* 35 */ + { tbl_D3, tbl_extension, 0x03, 0x07, 0x00, 0x07 }, + { tbl_F6, tbl_extension, 0x03, 0x07, 0x00, 0x07 }, + { tbl_F7, tbl_extension, 0x03, 0x07, 0x00, 0x07 }, + { tbl_FE, tbl_extension, 0x03, 0x07, 0x00, 0x01 }, + { tbl_FF, tbl_extension, 0x03, 0x07, 0x00, 0x06 }, + /* 40 */ + { tbl_D8, tbl_fpu, 0x03, 0x07, 0x00, 0xBF }, + { tbl_D8C0, tbl_fpu_ext, 0x00, 0xFF, 0xC0, 0xFF }, + { tbl_D9, tbl_fpu, 0x03, 0x07, 0x00, 0xBF }, + { tbl_D9C0, tbl_fpu_ext, 0x00, 0xFF, 0xC0, 0xFF }, + { tbl_DA, tbl_fpu, 0x03, 0x07, 0x00, 0xBF }, + /* 45 */ + { tbl_DAC0, tbl_fpu_ext, 0x00, 0xFF, 0xC0, 0xFF }, + { tbl_DB, tbl_fpu, 0x03, 0x07, 0x00, 0xBF }, + { tbl_DBC0, tbl_fpu_ext, 0x00, 0xFF, 0xC0, 0xFF }, + { tbl_DC, tbl_fpu, 0x03, 0x07, 0x00, 0xBF }, + { tbl_DCC0, tbl_fpu_ext, 0x00, 0xFF, 0xC0, 0xFF }, + /* 50 */ + { tbl_DD, tbl_fpu, 0x03, 0x07, 0x00, 0xBF }, + { tbl_DDC0, tbl_fpu_ext, 0x00, 0xFF, 0xC0, 0xFF }, + { tbl_DE, tbl_fpu, 0x03, 0x07, 0x00, 0xBF }, + { tbl_DEC0, tbl_fpu_ext, 0x00, 0xFF, 0xC0, 0xFF }, + { tbl_DF, tbl_fpu, 0x03, 0x07, 0x00, 0xBF }, + /* 55 */ + { tbl_DFC0, tbl_fpu_ext, 0x00, 0xFF, 0xC0, 0xFF }, + { tbl_0F0F, tbl_suffix, 0x00, 0xFF, 0x00, 0xBF } +}; +/* ia32_opcode_tables.h */ +/* Table index constants: +#define idx_Main 0 +#define idx_66 1 +#define idx_F2 2 +#define idx_F3 3 +#define idx_0F 4 +#define idx_660F 5 +#define idx_F20F 6 +#define idx_F30F 7 +#define idx_0F00 8 +#define idx_0F01 9 +#define idx_0F0111 10 +#define idx_0F12 11 +#define idx_0F16 12 +#define idx_0F18 13 +#define idx_0F71 14 +#define idx_660F71 15 +#define idx_0F72 16 +#define idx_660F72 17 +#define idx_0F73 18 +#define idx_660F73 19 +#define idx_0FAE 20 +#define idx_0FBA 21 +#define idx_0FC7 22 +#define idx_0FB9 23 +#define idx_C6 24 +#define idx_C7 25 +#define idx_80 26 +#define idx_81 27 +#define idx_82 28 +#define idx_83 29 +#define idx_C0 30 +#define idx_C1 31 +#define idx_D0 32 +#define idx_D1 33 +#define idx_D2 34 +#define idx_D3 35 +#define idx_F6 36 +#define idx_F7 37 +#define idx_FE 38 +#define idx_FF 39 +#define idx_D8 40 +#define idx_D8C0 41 +#define idx_D9 42 +#define idx_D9C0 43 +#define idx_DA 44 +#define idx_DAC0 45 +#define idx_DB 46 +#define idx_DBC0 47 +#define idx_DC 48 +#define idx_DCC0 49 +#define idx_DD 50 +#define idx_DDC0 51 +#define idx_DE 52 +#define idx_DEC0 53 +#define idx_DF 54 +#define idx_DFC0 55 +#define idx_0F0F 56 +*/ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_opcode_tables.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_opcode_tables.h new file mode 100644 index 0000000000..bbd4fae9ab --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_opcode_tables.h @@ -0,0 +1,57 @@ +#define idx_Main 0 +#define idx_66 1 +#define idx_F2 2 +#define idx_F3 3 +#define idx_0F 4 +#define idx_660F 5 +#define idx_F20F 6 +#define idx_F30F 7 +#define idx_0F00 8 +#define idx_0F01 9 +#define idx_0F0111 10 +#define idx_0F12 11 +#define idx_0F16 12 +#define idx_0F18 13 +#define idx_0F71 14 +#define idx_660F71 15 +#define idx_0F72 16 +#define idx_660F72 17 +#define idx_0F73 18 +#define idx_660F73 19 +#define idx_0FAE 20 +#define idx_0FBA 21 +#define idx_0FC7 22 +#define idx_0FB9 23 +#define idx_C6 24 +#define idx_C7 25 +#define idx_80 26 +#define idx_81 27 +#define idx_82 28 +#define idx_83 29 +#define idx_C0 30 +#define idx_C1 31 +#define idx_D0 32 +#define idx_D1 33 +#define idx_D2 34 +#define idx_D3 35 +#define idx_F6 36 +#define idx_F7 37 +#define idx_FE 38 +#define idx_FF 39 +#define idx_D8 40 +#define idx_D8C0 41 +#define idx_D9 42 +#define idx_D9C0 43 +#define idx_DA 44 +#define idx_DAC0 45 +#define idx_DB 46 +#define idx_DBC0 47 +#define idx_DC 48 +#define idx_DCC0 49 +#define idx_DD 50 +#define idx_DDC0 51 +#define idx_DE 52 +#define idx_DEC0 53 +#define idx_DF 54 +#define idx_DFC0 55 +#define idx_0F0F 56 diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_operand.c b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_operand.c new file mode 100644 index 0000000000..8e7f16a0c0 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_operand.c @@ -0,0 +1,425 @@ +#include +#include +#include + +#include "libdis.h" +#include "ia32_insn.h" +#include "ia32_operand.h" +#include "ia32_modrm.h" +#include "ia32_reg.h" +#include "x86_imm.h" +#include "x86_operand_list.h" + + + +/* apply segment override to memory operand in insn */ +static void apply_seg( x86_op_t *op, unsigned int prefixes ) { + if (! prefixes ) return; + + /* apply overrides from prefix */ + switch ( prefixes & PREFIX_REG_MASK ) { + case PREFIX_CS: + op->flags |= op_cs_seg; break; + case PREFIX_SS: + op->flags |= op_ss_seg; break; + case PREFIX_DS: + op->flags |= op_ds_seg; break; + case PREFIX_ES: + op->flags |= op_es_seg; break; + case PREFIX_FS: + op->flags |= op_fs_seg; break; + case PREFIX_GS: + op->flags |= op_gs_seg; break; + } + + return; +} + +static size_t decode_operand_value( unsigned char *buf, size_t buf_len, + x86_op_t *op, x86_insn_t *insn, + unsigned int addr_meth, size_t op_size, + unsigned int op_value, unsigned char modrm, + size_t gen_regs ) { + size_t size = 0; + + /* ++ Do Operand Addressing Method / Decode operand ++ */ + switch (addr_meth) { + /* This sets the operand Size based on the Intel Opcode Map + * (Vol 2, Appendix A). Letter encodings are from section + * A.1.1, 'Codes for Addressing Method' */ + + /* ---------------------- Addressing Method -------------- */ + /* Note that decoding mod ModR/M operand adjusts the size of + * the instruction, but decoding the reg operand does not. + * This should not cause any problems, as every 'reg' operand + * has an associated 'mod' operand. + * Goddamn-Intel-Note: + * Some Intel addressing methods [M, R] specify that modR/M + * byte may only refer to a memory address/may only refer to + * a register -- however Intel provides no clues on what to do + * if, say, the modR/M for an M opcode decodes to a register + * rather than a memory address ... returning 0 is out of the + * question, as this would be an Immediate or a RelOffset, so + * instead these modR/Ms are decoded with total disregard to + * the M, R constraints. */ + + /* MODRM -- mod operand. sets size to at least 1! */ + case ADDRMETH_E: /* ModR/M present, Gen reg or memory */ + size = ia32_modrm_decode( buf, buf_len, op, insn, + gen_regs ); + break; + case ADDRMETH_M: /* ModR/M only refers to memory */ + size = ia32_modrm_decode( buf, buf_len, op, insn, + gen_regs ); + break; + case ADDRMETH_Q: /* ModR/M present, MMX or Memory */ + size = ia32_modrm_decode( buf, buf_len, op, insn, + REG_MMX_OFFSET ); + break; + case ADDRMETH_R: /* ModR/M mod == gen reg */ + size = ia32_modrm_decode( buf, buf_len, op, insn, + gen_regs ); + break; + case ADDRMETH_W: /* ModR/M present, mem or SIMD reg */ + size = ia32_modrm_decode( buf, buf_len, op, insn, + REG_SIMD_OFFSET ); + break; + + /* MODRM -- reg operand. does not effect size! */ + case ADDRMETH_C: /* ModR/M reg == control reg */ + ia32_reg_decode( modrm, op, REG_CTRL_OFFSET ); + break; + case ADDRMETH_D: /* ModR/M reg == debug reg */ + ia32_reg_decode( modrm, op, REG_DEBUG_OFFSET ); + break; + case ADDRMETH_G: /* ModR/M reg == gen-purpose reg */ + ia32_reg_decode( modrm, op, gen_regs ); + break; + case ADDRMETH_P: /* ModR/M reg == qword MMX reg */ + ia32_reg_decode( modrm, op, REG_MMX_OFFSET ); + break; + case ADDRMETH_S: /* ModR/M reg == segment reg */ + ia32_reg_decode( modrm, op, REG_SEG_OFFSET ); + break; + case ADDRMETH_T: /* ModR/M reg == test reg */ + ia32_reg_decode( modrm, op, REG_TEST_OFFSET ); + break; + case ADDRMETH_V: /* ModR/M reg == SIMD reg */ + ia32_reg_decode( modrm, op, REG_SIMD_OFFSET ); + break; + + /* No MODRM : note these set operand type explicitly */ + case ADDRMETH_A: /* No modR/M -- direct addr */ + op->type = op_absolute; + + /* segment:offset address used in far calls */ + x86_imm_sized( buf, buf_len, + &op->data.absolute.segment, 2 ); + if ( insn->addr_size == 4 ) { + x86_imm_sized( buf, buf_len, + &op->data.absolute.offset.off32, 4 ); + size = 6; + } else { + x86_imm_sized( buf, buf_len, + &op->data.absolute.offset.off16, 2 ); + size = 4; + } + + break; + case ADDRMETH_I: /* Immediate val */ + op->type = op_immediate; + /* if it ever becomes legal to have imm as dest and + * there is a src ModR/M operand, we are screwed! */ + if ( op->flags & op_signed ) { + x86_imm_signsized(buf, buf_len, &op->data.byte, + op_size); + } else { + x86_imm_sized(buf, buf_len, &op->data.byte, + op_size); + } + size = op_size; + break; + case ADDRMETH_J: /* Rel offset to add to IP [jmp] */ + /* this fills op->data.near_offset or + op->data.far_offset depending on the size of + the operand */ + op->flags |= op_signed; + if ( op_size == 1 ) { + /* one-byte near offset */ + op->type = op_relative_near; + x86_imm_signsized(buf, buf_len, + &op->data.relative_near, 1); + } else { + /* far offset...is this truly signed? */ + op->type = op_relative_far; + x86_imm_signsized(buf, buf_len, + &op->data.relative_far, op_size ); + } + size = op_size; + break; + case ADDRMETH_O: /* No ModR/M; op is word/dword offset */ + /* NOTE: these are actually RVAs not offsets to seg!! */ + /* note bene: 'O' ADDR_METH uses addr_size to + determine operand size */ + op->type = op_offset; + op->flags |= op_pointer; + x86_imm_sized( buf, buf_len, &op->data.offset, + insn->addr_size ); + + size = insn->addr_size; + break; + + /* Hard-coded: these are specified in the insn definition */ + case ADDRMETH_F: /* EFLAGS register */ + op->type = op_register; + op->flags |= op_hardcode; + ia32_handle_register( &op->data.reg, REG_FLAGS_INDEX ); + break; + case ADDRMETH_X: /* Memory addressed by DS:SI [string] */ + op->type = op_expression; + op->flags |= op_hardcode; + op->flags |= op_ds_seg | op_pointer | op_string; + ia32_handle_register( &op->data.expression.base, + REG_DWORD_OFFSET + 6 ); + break; + case ADDRMETH_Y: /* Memory addressed by ES:DI [string] */ + op->type = op_expression; + op->flags |= op_hardcode; + op->flags |= op_es_seg | op_pointer | op_string; + ia32_handle_register( &op->data.expression.base, + REG_DWORD_OFFSET + 7 ); + break; + case ADDRMETH_RR: /* Gen Register hard-coded in opcode */ + op->type = op_register; + op->flags |= op_hardcode; + ia32_handle_register( &op->data.reg, + op_value + gen_regs ); + break; + case ADDRMETH_RS: /* Seg Register hard-coded in opcode */ + op->type = op_register; + op->flags |= op_hardcode; + ia32_handle_register( &op->data.reg, + op_value + REG_SEG_OFFSET ); + break; + case ADDRMETH_RF: /* FPU Register hard-coded in opcode */ + op->type = op_register; + op->flags |= op_hardcode; + ia32_handle_register( &op->data.reg, + op_value + REG_FPU_OFFSET ); + break; + case ADDRMETH_RT: /* TST Register hard-coded in opcode */ + op->type = op_register; + op->flags |= op_hardcode; + ia32_handle_register( &op->data.reg, + op_value + REG_TEST_OFFSET ); + break; + case ADDRMETH_II: /* Immediate hard-coded in opcode */ + op->type = op_immediate; + op->data.dword = op_value; + op->flags |= op_hardcode; + break; + + case 0: /* Operand is not used */ + default: + /* ignore -- operand not used in this insn */ + op->type = op_unused; /* this shouldn't happen! */ + break; + } + + return size; +} + +static size_t decode_operand_size( unsigned int op_type, x86_insn_t *insn, + x86_op_t *op ){ + size_t size; + + /* ++ Do Operand Type ++ */ + switch (op_type) { + /* This sets the operand Size based on the Intel Opcode Map + * (Vol 2, Appendix A). Letter encodings are from section + * A.1.2, 'Codes for Operand Type' */ + /* NOTE: in this routines, 'size' refers to the size + * of the operand in the raw (encoded) instruction; + * 'datatype' stores the actual size and datatype + * of the operand */ + + /* ------------------------ Operand Type ----------------- */ + case OPTYPE_c: /* byte or word [op size attr] */ + size = (insn->op_size == 4) ? 2 : 1; + op->datatype = (size == 4) ? op_word : op_byte; + break; + case OPTYPE_a: /* 2 word or 2 dword [op size attr] */ + /* pointer to a 16:16 or 32:32 BOUNDS operand */ + size = (insn->op_size == 4) ? 8 : 4; + op->datatype = (size == 4) ? op_bounds32 : op_bounds16; + break; + case OPTYPE_v: /* word or dword [op size attr] */ + size = (insn->op_size == 4) ? 4 : 2; + op->datatype = (size == 4) ? op_dword : op_word; + break; + case OPTYPE_p: /* 32/48-bit ptr [op size attr] */ + /* technically these flags are not accurate: the + * value s a 16:16 pointer or a 16:32 pointer, where + * the first '16' is a segment */ + size = (insn->addr_size == 4) ? 6 : 4; + op->datatype = (size == 4) ? op_descr32 : op_descr16; + break; + case OPTYPE_b: /* byte, ignore op-size */ + size = 1; + op->datatype = op_byte; + break; + case OPTYPE_w: /* word, ignore op-size */ + size = 2; + op->datatype = op_word; + break; + case OPTYPE_d: /* dword , ignore op-size */ + size = 4; + op->datatype = op_dword; + break; + case OPTYPE_s: /* 6-byte psuedo-descriptor */ + /* ptr to 6-byte value which is 32:16 in 32-bit + * mode, or 8:24:16 in 16-bit mode. The high byte + * is ignored in 16-bit mode. */ + size = 6; + op->datatype = (insn->addr_size == 4) ? + op_pdescr32 : op_pdescr16; + break; + case OPTYPE_q: /* qword, ignore op-size */ + size = 8; + op->datatype = op_qword; + break; + case OPTYPE_dq: /* d-qword, ignore op-size */ + size = 16; + op->datatype = op_dqword; + break; + case OPTYPE_ps: /* 128-bit FP data */ + size = 16; + /* really this is 4 packed SP FP values */ + op->datatype = op_ssimd; + break; + case OPTYPE_pd: /* 128-bit FP data */ + size = 16; + /* really this is 2 packed DP FP values */ + op->datatype = op_dsimd; + break; + case OPTYPE_ss: /* Scalar elem of 128-bit FP data */ + size = 16; + /* this only looks at the low dword (4 bytes) + * of the xmmm register passed as a param. + * This is a 16-byte register where only 4 bytes + * are used in the insn. Painful, ain't it? */ + op->datatype = op_sssimd; + break; + case OPTYPE_sd: /* Scalar elem of 128-bit FP data */ + size = 16; + /* this only looks at the low qword (8 bytes) + * of the xmmm register passed as a param. + * This is a 16-byte register where only 8 bytes + * are used in the insn. Painful, again... */ + op->datatype = op_sdsimd; + break; + case OPTYPE_pi: /* qword mmx register */ + size = 8; + op->datatype = op_qword; + break; + case OPTYPE_si: /* dword integer register */ + size = 4; + op->datatype = op_dword; + break; + case OPTYPE_fs: /* single-real */ + size = 4; + op->datatype = op_sreal; + break; + case OPTYPE_fd: /* double real */ + size = 8; + op->datatype = op_dreal; + break; + case OPTYPE_fe: /* extended real */ + size = 10; + op->datatype = op_extreal; + break; + case OPTYPE_fb: /* packed BCD */ + size = 10; + op->datatype = op_bcd; + break; + case OPTYPE_fv: /* pointer to FPU env: 14 or 28-bytes */ + size = (insn->addr_size == 4)? 28 : 14; + op->datatype = (size == 28)? op_fpuenv32: op_fpuenv16; + break; + case OPTYPE_ft: /* pointer to FPU env: 94 or 108 bytes */ + size = (insn->addr_size == 4)? 108 : 94; + op->datatype = (size == 108)? + op_fpustate32: op_fpustate16; + break; + case OPTYPE_fx: /* 512-byte register stack */ + size = 512; + op->datatype = op_fpregset; + break; + case OPTYPE_fp: /* floating point register */ + size = 10; /* double extended precision */ + op->datatype = op_fpreg; + break; + case OPTYPE_m: /* fake operand type used for "lea Gv, M" */ + size = insn->addr_size; + op->datatype = (size == 4) ? op_dword : op_word; + break; + case OPTYPE_none: /* handle weird instructions that have no encoding but use a dword datatype, like invlpg */ + size = 0; + op->datatype = op_none; + break; + case 0: + default: + size = insn->op_size; + op->datatype = (size == 4) ? op_dword : op_word; + break; + } + return size; +} + +size_t ia32_decode_operand( unsigned char *buf, size_t buf_len, + x86_insn_t *insn, unsigned int raw_op, + unsigned int raw_flags, unsigned int prefixes, + unsigned char modrm ) { + unsigned int addr_meth, op_type, op_size, gen_regs; + x86_op_t *op; + size_t size; + + /* ++ Yank optype and addr mode out of operand flags */ + addr_meth = raw_flags & ADDRMETH_MASK; + op_type = raw_flags & OPTYPE_MASK; + + if ( raw_flags == ARG_NONE ) { + /* operand is not used in this instruction */ + return 0; + } + + /* allocate a new operand */ + op = x86_operand_new( insn ); + + /* ++ Copy flags from opcode table to x86_insn_t */ + op->access = (enum x86_op_access) OP_PERM(raw_flags); + op->flags = (enum x86_op_flags) (OP_FLAGS(raw_flags) >> 12); + + /* Get size (for decoding) and datatype of operand */ + op_size = decode_operand_size(op_type, insn, op); + + /* override default register set based on Operand Type */ + /* this allows mixing of 8, 16, and 32 bit regs in insn */ + if (op_size == 1) { + gen_regs = REG_BYTE_OFFSET; + } else if (op_size == 2) { + gen_regs = REG_WORD_OFFSET; + } else { + gen_regs = REG_DWORD_OFFSET; + } + + size = decode_operand_value( buf, buf_len, op, insn, addr_meth, + op_size, raw_op, modrm, gen_regs ); + + /* if operand is an address, apply any segment override prefixes */ + if ( op->type == op_expression || op->type == op_offset ) { + apply_seg(op, prefixes); + } + + return size; /* return number of bytes in instruction */ +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_operand.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_operand.h new file mode 100644 index 0000000000..08c3074cd7 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_operand.h @@ -0,0 +1,11 @@ +#ifndef IA32_OPERAND_H +#define IA32_OPERAND_H + +#include "libdis.h" +#include "ia32_insn.h" + +size_t ia32_decode_operand( unsigned char *buf, size_t buf_len, + x86_insn_t *insn, unsigned int raw_op, + unsigned int raw_flags, unsigned int prefixes, + unsigned char modrm ); +#endif diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_reg.c b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_reg.c new file mode 100644 index 0000000000..f270c1f346 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_reg.c @@ -0,0 +1,234 @@ +#include +#include + +#include "ia32_reg.h" +#include "ia32_insn.h" + +#define NUM_X86_REGS 92 + +/* register sizes */ +#define REG_DWORD_SIZE 4 +#define REG_WORD_SIZE 2 +#define REG_BYTE_SIZE 1 +#define REG_MMX_SIZE 8 +#define REG_SIMD_SIZE 16 +#define REG_DEBUG_SIZE 4 +#define REG_CTRL_SIZE 4 +#define REG_TEST_SIZE 4 +#define REG_SEG_SIZE 2 +#define REG_FPU_SIZE 10 +#define REG_FLAGS_SIZE 4 +#define REG_FPCTRL_SIZE 2 +#define REG_FPSTATUS_SIZE 2 +#define REG_FPTAG_SIZE 2 +#define REG_EIP_SIZE 4 +#define REG_IP_SIZE 2 + +/* REGISTER ALIAS TABLE: + * + * NOTE: the MMX register mapping is fixed to the physical registers + * used by the FPU. The floating FP stack does not effect the location + * of the MMX registers, so this aliasing is not 100% accurate. + * */ +static struct { + unsigned char alias; /* id of register this is an alias for */ + unsigned char shift; /* # of bits register must be shifted */ +} ia32_reg_aliases[] = { + { 0,0 }, + { REG_DWORD_OFFSET, 0 }, /* al : 1 */ + { REG_DWORD_OFFSET, 8 }, /* ah : 2 */ + { REG_DWORD_OFFSET, 0 }, /* ax : 3 */ + { REG_DWORD_OFFSET + 1, 0 }, /* cl : 4 */ + { REG_DWORD_OFFSET + 1, 8 }, /* ch : 5 */ + { REG_DWORD_OFFSET + 1, 0 }, /* cx : 6 */ + { REG_DWORD_OFFSET + 2, 0 }, /* dl : 7 */ + { REG_DWORD_OFFSET + 2, 8 }, /* dh : 8 */ + { REG_DWORD_OFFSET + 2, 0 }, /* dx : 9 */ + { REG_DWORD_OFFSET + 3, 0 }, /* bl : 10 */ + { REG_DWORD_OFFSET + 3, 8 }, /* bh : 11 */ + { REG_DWORD_OFFSET + 3, 0 }, /* bx : 12 */ + { REG_DWORD_OFFSET + 4, 0 }, /* sp : 13 */ + { REG_DWORD_OFFSET + 5, 0 }, /* bp : 14 */ + { REG_DWORD_OFFSET + 6, 0 }, /* si : 15 */ + { REG_DWORD_OFFSET + 7, 0 }, /* di : 16 */ + { REG_EIP_INDEX, 0 }, /* ip : 17 */ + { REG_FPU_OFFSET, 0 }, /* mm0 : 18 */ + { REG_FPU_OFFSET + 1, 0 }, /* mm1 : 19 */ + { REG_FPU_OFFSET + 2, 0 }, /* mm2 : 20 */ + { REG_FPU_OFFSET + 3, 0 }, /* mm3 : 21 */ + { REG_FPU_OFFSET + 4, 0 }, /* mm4 : 22 */ + { REG_FPU_OFFSET + 5, 0 }, /* mm5 : 23 */ + { REG_FPU_OFFSET + 6, 0 }, /* mm6 : 24 */ + { REG_FPU_OFFSET + 7, 0 } /* mm7 : 25 */ + }; + +/* REGISTER TABLE: size, type, and name of every register in the + * CPU. Does not include MSRs since the are, after all, + * model specific. */ +static struct { + unsigned int size; + enum x86_reg_type type; + unsigned int alias; + char mnemonic[8]; +} ia32_reg_table[NUM_X86_REGS + 2] = { + { 0, 0, 0, "" }, + /* REG_DWORD_OFFSET */ + { REG_DWORD_SIZE, reg_gen | reg_ret, 0, "eax" }, + { REG_DWORD_SIZE, reg_gen | reg_count, 0, "ecx" }, + { REG_DWORD_SIZE, reg_gen, 0, "edx" }, + { REG_DWORD_SIZE, reg_gen, 0, "ebx" }, + /* REG_ESP_INDEX */ + { REG_DWORD_SIZE, reg_gen | reg_sp, 0, "esp" }, + { REG_DWORD_SIZE, reg_gen | reg_fp, 0, "ebp" }, + { REG_DWORD_SIZE, reg_gen | reg_src, 0, "esi" }, + { REG_DWORD_SIZE, reg_gen | reg_dest, 0, "edi" }, + /* REG_WORD_OFFSET */ + { REG_WORD_SIZE, reg_gen | reg_ret, 3, "ax" }, + { REG_WORD_SIZE, reg_gen | reg_count, 6, "cx" }, + { REG_WORD_SIZE, reg_gen, 9, "dx" }, + { REG_WORD_SIZE, reg_gen, 12, "bx" }, + { REG_WORD_SIZE, reg_gen | reg_sp, 13, "sp" }, + { REG_WORD_SIZE, reg_gen | reg_fp, 14, "bp" }, + { REG_WORD_SIZE, reg_gen | reg_src, 15, "si" }, + { REG_WORD_SIZE, reg_gen | reg_dest, 16, "di" }, + /* REG_BYTE_OFFSET */ + { REG_BYTE_SIZE, reg_gen, 1, "al" }, + { REG_BYTE_SIZE, reg_gen, 4, "cl" }, + { REG_BYTE_SIZE, reg_gen, 7, "dl" }, + { REG_BYTE_SIZE, reg_gen, 10, "bl" }, + { REG_BYTE_SIZE, reg_gen, 2, "ah" }, + { REG_BYTE_SIZE, reg_gen, 5, "ch" }, + { REG_BYTE_SIZE, reg_gen, 8, "dh" }, + { REG_BYTE_SIZE, reg_gen, 11, "bh" }, + /* REG_MMX_OFFSET */ + { REG_MMX_SIZE, reg_simd, 18, "mm0" }, + { REG_MMX_SIZE, reg_simd, 19, "mm1" }, + { REG_MMX_SIZE, reg_simd, 20, "mm2" }, + { REG_MMX_SIZE, reg_simd, 21, "mm3" }, + { REG_MMX_SIZE, reg_simd, 22, "mm4" }, + { REG_MMX_SIZE, reg_simd, 23, "mm5" }, + { REG_MMX_SIZE, reg_simd, 24, "mm6" }, + { REG_MMX_SIZE, reg_simd, 25, "mm7" }, + /* REG_SIMD_OFFSET */ + { REG_SIMD_SIZE, reg_simd, 0, "xmm0" }, + { REG_SIMD_SIZE, reg_simd, 0, "xmm1" }, + { REG_SIMD_SIZE, reg_simd, 0, "xmm2" }, + { REG_SIMD_SIZE, reg_simd, 0, "xmm3" }, + { REG_SIMD_SIZE, reg_simd, 0, "xmm4" }, + { REG_SIMD_SIZE, reg_simd, 0, "xmm5" }, + { REG_SIMD_SIZE, reg_simd, 0, "xmm6" }, + { REG_SIMD_SIZE, reg_simd, 0, "xmm7" }, + /* REG_DEBUG_OFFSET */ + { REG_DEBUG_SIZE, reg_sys, 0, "dr0" }, + { REG_DEBUG_SIZE, reg_sys, 0, "dr1" }, + { REG_DEBUG_SIZE, reg_sys, 0, "dr2" }, + { REG_DEBUG_SIZE, reg_sys, 0, "dr3" }, + { REG_DEBUG_SIZE, reg_sys, 0, "dr4" }, + { REG_DEBUG_SIZE, reg_sys, 0, "dr5" }, + { REG_DEBUG_SIZE, reg_sys, 0, "dr6" }, + { REG_DEBUG_SIZE, reg_sys, 0, "dr7" }, + /* REG_CTRL_OFFSET */ + { REG_CTRL_SIZE, reg_sys, 0, "cr0" }, + { REG_CTRL_SIZE, reg_sys, 0, "cr1" }, + { REG_CTRL_SIZE, reg_sys, 0, "cr2" }, + { REG_CTRL_SIZE, reg_sys, 0, "cr3" }, + { REG_CTRL_SIZE, reg_sys, 0, "cr4" }, + { REG_CTRL_SIZE, reg_sys, 0, "cr5" }, + { REG_CTRL_SIZE, reg_sys, 0, "cr6" }, + { REG_CTRL_SIZE, reg_sys, 0, "cr7" }, + /* REG_TEST_OFFSET */ + { REG_TEST_SIZE, reg_sys, 0, "tr0" }, + { REG_TEST_SIZE, reg_sys, 0, "tr1" }, + { REG_TEST_SIZE, reg_sys, 0, "tr2" }, + { REG_TEST_SIZE, reg_sys, 0, "tr3" }, + { REG_TEST_SIZE, reg_sys, 0, "tr4" }, + { REG_TEST_SIZE, reg_sys, 0, "tr5" }, + { REG_TEST_SIZE, reg_sys, 0, "tr6" }, + { REG_TEST_SIZE, reg_sys, 0, "tr7" }, + /* REG_SEG_OFFSET */ + { REG_SEG_SIZE, reg_seg, 0, "es" }, + { REG_SEG_SIZE, reg_seg, 0, "cs" }, + { REG_SEG_SIZE, reg_seg, 0, "ss" }, + { REG_SEG_SIZE, reg_seg, 0, "ds" }, + { REG_SEG_SIZE, reg_seg, 0, "fs" }, + { REG_SEG_SIZE, reg_seg, 0, "gs" }, + /* REG_LDTR_INDEX */ + { REG_DWORD_SIZE, reg_sys, 0, "ldtr" }, + /* REG_GDTR_INDEX */ + { REG_DWORD_SIZE, reg_sys, 0, "gdtr" }, + /* REG_FPU_OFFSET */ + { REG_FPU_SIZE, reg_fpu, 0, "st(0)" }, + { REG_FPU_SIZE, reg_fpu, 0, "st(1)" }, + { REG_FPU_SIZE, reg_fpu, 0, "st(2)" }, + { REG_FPU_SIZE, reg_fpu, 0, "st(3)" }, + { REG_FPU_SIZE, reg_fpu, 0, "st(4)" }, + { REG_FPU_SIZE, reg_fpu, 0, "st(5)" }, + { REG_FPU_SIZE, reg_fpu, 0, "st(6)" }, + { REG_FPU_SIZE, reg_fpu, 0, "st(7)" }, + /* REG_FLAGS_INDEX : 81 */ + { REG_FLAGS_SIZE, reg_cond, 0, "eflags" }, + /* REG_FPCTRL_INDEX : 82*/ + { REG_FPCTRL_SIZE, reg_fpu | reg_sys, 0, "fpctrl" }, + /* REG_FPSTATUS_INDEX : 83*/ + { REG_FPSTATUS_SIZE, reg_fpu | reg_sys, 0, "fpstat" }, + /* REG_FPTAG_INDEX : 84 */ + { REG_FPTAG_SIZE, reg_fpu | reg_sys, 0, "fptag" }, + /* REG_EIP_INDEX : 85 */ + { REG_EIP_SIZE, reg_pc, 0, "eip" }, + /* REG_IP_INDEX : 86 */ + { REG_IP_SIZE, reg_pc, 17, "ip" }, + /* REG_IDTR_INDEX : 87 */ + { REG_DWORD_SIZE, reg_sys, 0, "idtr" }, + /* REG_MXCSG_INDEX : SSE Control Reg : 88 */ + { REG_DWORD_SIZE, reg_sys | reg_simd, 0, "mxcsr" }, + /* REG_TR_INDEX : Task Register : 89 */ + { 16 + 64, reg_sys, 0, "tr" }, + /* REG_CSMSR_INDEX : SYSENTER_CS_MSR : 90 */ + { REG_DWORD_SIZE, reg_sys, 0, "cs_msr" }, + /* REG_ESPMSR_INDEX : SYSENTER_ESP_MSR : 91 */ + { REG_DWORD_SIZE, reg_sys, 0, "esp_msr" }, + /* REG_EIPMSR_INDEX : SYSENTER_EIP_MSR : 92 */ + { REG_DWORD_SIZE, reg_sys, 0, "eip_msr" }, + { 0 } + }; + + +static size_t sz_regtable = NUM_X86_REGS + 1; + + +void ia32_handle_register( x86_reg_t *reg, size_t id ) { + unsigned int alias; + if (! id || id > sz_regtable ) { + return; + } + + memset( reg, 0, sizeof(x86_reg_t) ); + + strncpy( reg->name, ia32_reg_table[id].mnemonic, MAX_REGNAME ); + + reg->type = ia32_reg_table[id].type; + reg->size = ia32_reg_table[id].size; + + alias = ia32_reg_table[id].alias; + if ( alias ) { + reg->alias = ia32_reg_aliases[alias].alias; + reg->shift = ia32_reg_aliases[alias].shift; + } + reg->id = id; + + return; +} + +size_t ia32_true_register_id( size_t id ) { + size_t reg; + + if (! id || id > sz_regtable ) { + return 0; + } + + reg = id; + if (ia32_reg_table[reg].alias) { + reg = ia32_reg_aliases[ia32_reg_table[reg].alias].alias; + } + return reg; +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_reg.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_reg.h new file mode 100644 index 0000000000..fbbc77a178 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_reg.h @@ -0,0 +1,41 @@ +#ifndef IA32_REG_H +#define IA32_REG_H + +#include /* for size_t */ +#include "libdis.h" /* for x86_reg_t */ + +/* NOTE these are used in opcode tables for hard-coded registers */ +#define REG_DWORD_OFFSET 1 /* 0 + 1 */ +#define REG_ECX_INDEX 2 /* 0 + 1 + 1 */ +#define REG_ESP_INDEX 5 /* 0 + 4 + 1 */ +#define REG_EBP_INDEX 6 /* 0 + 5 + 1 */ +#define REG_ESI_INDEX 7 /* 0 + 6 + 1 */ +#define REG_EDI_INDEX 8 /* 0 + 7 + 1 */ +#define REG_WORD_OFFSET 9 /* 1 * 8 + 1 */ +#define REG_BYTE_OFFSET 17 /* 2 * 8 + 1 */ +#define REG_MMX_OFFSET 25 /* 3 * 8 + 1 */ +#define REG_SIMD_OFFSET 33 /* 4 * 8 + 1 */ +#define REG_DEBUG_OFFSET 41 /* 5 * 8 + 1 */ +#define REG_CTRL_OFFSET 49 /* 6 * 8 + 1 */ +#define REG_TEST_OFFSET 57 /* 7 * 8 + 1 */ +#define REG_SEG_OFFSET 65 /* 8 * 8 + 1 */ +#define REG_LDTR_INDEX 71 /* 8 * 8 + 1 + 1 */ +#define REG_GDTR_INDEX 72 /* 8 * 8 + 2 + 1 */ +#define REG_FPU_OFFSET 73 /* 9 * 8 + 1 */ +#define REG_FLAGS_INDEX 81 /* 10 * 8 + 1 */ +#define REG_FPCTRL_INDEX 82 /* 10 * 8 + 1 + 1 */ +#define REG_FPSTATUS_INDEX 83 /* 10 * 8 + 2 + 1 */ +#define REG_FPTAG_INDEX 84 /* 10 * 8 + 3 + 1 */ +#define REG_EIP_INDEX 85 /* 10 * 8 + 4 + 1 */ +#define REG_IP_INDEX 86 /* 10 * 8 + 5 + 1 */ +#define REG_IDTR_INDEX 87 /* 10 * 8 + 6 + 1 */ +#define REG_MXCSG_INDEX 88 /* 10 * 8 + 7 + 1 */ +#define REG_TR_INDEX 89 /* 10 * 8 + 8 + 1 */ +#define REG_CSMSR_INDEX 90 /* 10 * 8 + 9 + 1 */ +#define REG_ESPMSR_INDEX 91 /* 10 * 8 + 10 + 1 */ +#define REG_EIPMSR_INDEX 92 /* 10 * 8 + 11 + 1 */ + +void ia32_handle_register( x86_reg_t *reg, size_t id ); +size_t ia32_true_register_id( size_t id ); + +#endif diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_settings.c b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_settings.c new file mode 100644 index 0000000000..b578e34488 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_settings.c @@ -0,0 +1,13 @@ +#include "libdis.h" +#include "ia32_settings.h" +#include "ia32_reg.h" +#include "ia32_insn.h" + +ia32_settings_t ia32_settings = { + 1, 0xF4, + MAX_INSTRUCTION_SIZE, + 4, 4, 8, 4, 8, + REG_ESP_INDEX, REG_EBP_INDEX, REG_EIP_INDEX, REG_FLAGS_INDEX, + REG_DWORD_OFFSET, REG_SEG_OFFSET, REG_FPU_OFFSET, + opt_none +}; diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_settings.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_settings.h new file mode 100644 index 0000000000..769c0e9fa0 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/ia32_settings.h @@ -0,0 +1,27 @@ +#ifndef IA32_SETTINGS_H +#define IA32_SETTINGS_H + +#include "libdis.h" + +typedef struct { + /* options */ + unsigned char endian, /* 0 = big, 1 = little */ + wc_byte, /* wildcard byte */ + max_insn, /* max insn size */ + sz_addr, /* default address size */ + sz_oper, /* default operand size */ + sz_byte, /* # bits in byte */ + sz_word, /* # bytes in machine word */ + sz_dword; /* # bytes in machine dword */ + unsigned int id_sp_reg, /* id of stack pointer */ + id_fp_reg, /* id of frame pointer */ + id_ip_reg, /* id of instruction pointer */ + id_flag_reg, /* id of flags register */ + offset_gen_regs, /* start of general regs */ + offset_seg_regs, /* start of segment regs */ + offset_fpu_regs; /* start of floating point regs */ + /* user-controlled settings */ + enum x86_options options; +} ia32_settings_t; + +#endif diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/libdis.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/libdis.h new file mode 100644 index 0000000000..94103396f6 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/libdis.h @@ -0,0 +1,836 @@ +#ifndef LIBDISASM_H +#define LIBDISASM_H + +#ifdef WIN32 +#include +#endif + +#include + +/* 'NEW" types + * __________________________________________________________________________*/ +#ifndef LIBDISASM_QWORD_H /* do not interfere with qword.h */ + #define LIBDISASM_QWORD_H + #ifdef _MSC_VER + typedef __int64 qword_t; + #else + typedef int64_t qword_t; + #endif +#endif + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/* 'NEW" x86 API + * __________________________________________________________________________*/ + + +/* ========================================= Error Reporting */ +/* REPORT CODES + * These are passed to a reporter function passed at initialization. + * Each code determines the type of the argument passed to the reporter; + * this allows the report to recover from errors, or just log them. + */ +enum x86_report_codes { + report_disasm_bounds, /* RVA OUT OF BOUNDS : The disassembler could + not disassemble the supplied RVA as it is + out of the range of the buffer. The + application should store the address and + attempt to determine what section of the + binary it is in, then disassemble the + address from the bytes in that section. + data: uint32_t rva */ + report_insn_bounds, /* INSTRUCTION OUT OF BOUNDS: The disassembler + could not disassemble the instruction as + the instruction would require bytes beyond + the end of the current buffer. This usually + indicated garbage bytes at the end of a + buffer, or an incorrectly-sized buffer. + data: uint32_t rva */ + report_invalid_insn, /* INVALID INSTRUCTION: The disassembler could + not disassemble the instruction as it has an + invalid combination of opcodes and operands. + This will stop automated disassembly; the + application can restart the disassembly + after the invalid instruction. + data: uint32_t rva */ + report_unknown +}; + +/* 'arg' is optional arbitrary data provided by the code passing the + * callback -- for example, it could be 'this' or 'self' in OOP code. + * 'code' is provided by libdisasm, it is one of the above + * 'data' is provided by libdisasm and is context-specific, per the enums */ +typedef void (*DISASM_REPORTER)( enum x86_report_codes code, + void *data, void *arg ); + + +/* x86_report_error : Call the register reporter to report an error */ +void x86_report_error( enum x86_report_codes code, void *data ); + +/* ========================================= Libdisasm Management Routines */ +enum x86_options { /* these can be ORed together */ + opt_none= 0, + opt_ignore_nulls=1, /* ignore sequences of > 4 NULL bytes */ + opt_16_bit=2, /* 16-bit/DOS disassembly */ + opt_att_mnemonics=4, /* use AT&T syntax names for alternate opcode mnemonics */ +}; + +/* management routines */ +/* 'arg' is caller-specific data which is passed as the first argument + * to the reporter callback routine */ +int x86_init( enum x86_options options, DISASM_REPORTER reporter, void *arg); +void x86_set_reporter( DISASM_REPORTER reporter, void *arg); +void x86_set_options( enum x86_options options ); +enum x86_options x86_get_options( void ); +int x86_cleanup(void); + + +/* ========================================= Instruction Representation */ +/* these defines are only intended for use in the array decl's */ +#define MAX_REGNAME 8 + +#define MAX_PREFIX_STR 32 +#define MAX_MNEM_STR 16 +#define MAX_INSN_SIZE 20 /* same as in i386.h */ +#define MAX_OP_STRING 32 /* max possible operand size in string form */ +#define MAX_OP_RAW_STRING 64 /* max possible operand size in raw form */ +#define MAX_OP_XML_STRING 256 /* max possible operand size in xml form */ +#define MAX_NUM_OPERANDS 8 /* max # implicit and explicit operands */ +/* in these, the '2 *' is arbitrary: the max # of operands should require + * more space than the rest of the insn */ +#define MAX_INSN_STRING 512 /* 2 * 8 * MAX_OP_STRING */ +#define MAX_INSN_RAW_STRING 1024 /* 2 * 8 * MAX_OP_RAW_STRING */ +#define MAX_INSN_XML_STRING 4096 /* 2 * 8 * MAX_OP_XML_STRING */ + +enum x86_reg_type { /* NOTE: these may be ORed together */ + reg_gen = 0x00001, /* general purpose */ + reg_in = 0x00002, /* incoming args, ala RISC */ + reg_out = 0x00004, /* args to calls, ala RISC */ + reg_local = 0x00008, /* local vars, ala RISC */ + reg_fpu = 0x00010, /* FPU data register */ + reg_seg = 0x00020, /* segment register */ + reg_simd = 0x00040, /* SIMD/MMX reg */ + reg_sys = 0x00080, /* restricted/system register */ + reg_sp = 0x00100, /* stack pointer */ + reg_fp = 0x00200, /* frame pointer */ + reg_pc = 0x00400, /* program counter */ + reg_retaddr = 0x00800, /* return addr for func */ + reg_cond = 0x01000, /* condition code / flags */ + reg_zero = 0x02000, /* zero register, ala RISC */ + reg_ret = 0x04000, /* return value */ + reg_src = 0x10000, /* array/rep source */ + reg_dest = 0x20000, /* array/rep destination */ + reg_count = 0x40000 /* array/rep/loop counter */ +}; + +/* x86_reg_t : an X86 CPU register */ +typedef struct { + char name[MAX_REGNAME]; + enum x86_reg_type type; /* what register is used for */ + unsigned int size; /* size of register in bytes */ + unsigned int id; /* register ID #, for quick compares */ + unsigned int alias; /* ID of reg this is an alias for */ + unsigned int shift; /* amount to shift aliased reg by */ +} x86_reg_t; + +/* x86_ea_t : an X86 effective address (address expression) */ +typedef struct { + unsigned int scale; /* scale factor */ + x86_reg_t index, base; /* index, base registers */ + int32_t disp; /* displacement */ + char disp_sign; /* is negative? 1/0 */ + char disp_size; /* 0, 1, 2, 4 */ +} x86_ea_t; + +/* x86_absolute_t : an X86 segment:offset address (descriptor) */ +typedef struct { + unsigned short segment; /* loaded directly into CS */ + union { + unsigned short off16; /* loaded directly into IP */ + uint32_t off32; /* loaded directly into EIP */ + } offset; +} x86_absolute_t; + +enum x86_op_type { /* mutually exclusive */ + op_unused = 0, /* empty/unused operand: should never occur */ + op_register = 1, /* CPU register */ + op_immediate = 2, /* Immediate Value */ + op_relative_near = 3, /* Relative offset from IP */ + op_relative_far = 4, /* Relative offset from IP */ + op_absolute = 5, /* Absolute address (ptr16:32) */ + op_expression = 6, /* Address expression (scale/index/base/disp) */ + op_offset = 7, /* Offset from start of segment (m32) */ + op_unknown +}; + +#define x86_optype_is_address( optype ) \ + ( optype == op_absolute || optype == op_offset ) +#define x86_optype_is_relative( optype ) \ + ( optype == op_relative_near || optype == op_relative_far ) +#define x86_optype_is_memory( optype ) \ + ( optype > op_immediate && optype < op_unknown ) + +enum x86_op_datatype { /* these use Intel's lame terminology */ + op_byte = 1, /* 1 byte integer */ + op_word = 2, /* 2 byte integer */ + op_dword = 3, /* 4 byte integer */ + op_qword = 4, /* 8 byte integer */ + op_dqword = 5, /* 16 byte integer */ + op_sreal = 6, /* 4 byte real (single real) */ + op_dreal = 7, /* 8 byte real (double real) */ + op_extreal = 8, /* 10 byte real (extended real) */ + op_bcd = 9, /* 10 byte binary-coded decimal */ + op_ssimd = 10, /* 16 byte : 4 packed single FP (SIMD, MMX) */ + op_dsimd = 11, /* 16 byte : 2 packed double FP (SIMD, MMX) */ + op_sssimd = 12, /* 4 byte : scalar single FP (SIMD, MMX) */ + op_sdsimd = 13, /* 8 byte : scalar double FP (SIMD, MMX) */ + op_descr32 = 14, /* 6 byte Intel descriptor 2:4 */ + op_descr16 = 15, /* 4 byte Intel descriptor 2:2 */ + op_pdescr32 = 16, /* 6 byte Intel pseudo-descriptor 32:16 */ + op_pdescr16 = 17, /* 6 byte Intel pseudo-descriptor 8:24:16 */ + op_bounds16 = 18, /* signed 16:16 lower:upper bounds */ + op_bounds32 = 19, /* signed 32:32 lower:upper bounds */ + op_fpuenv16 = 20, /* 14 byte FPU control/environment data */ + op_fpuenv32 = 21, /* 28 byte FPU control/environment data */ + op_fpustate16 = 22, /* 94 byte FPU state (env & reg stack) */ + op_fpustate32 = 23, /* 108 byte FPU state (env & reg stack) */ + op_fpregset = 24, /* 512 bytes: register set */ + op_fpreg = 25, /* FPU register */ + op_none = 0xFF, /* operand without a datatype (INVLPG) */ +}; + +enum x86_op_access { /* ORed together */ + op_read = 1, + op_write = 2, + op_execute = 4 +}; + +enum x86_op_flags { /* ORed together, but segs are mutually exclusive */ + op_signed = 1, /* signed integer */ + op_string = 2, /* possible string or array */ + op_constant = 4, /* symbolic constant */ + op_pointer = 8, /* operand points to a memory address */ + op_sysref = 0x010, /* operand is a syscall number */ + op_implied = 0x020, /* operand is implicit in the insn */ + op_hardcode = 0x40, /* operand is hardcoded in insn definition */ + /* NOTE: an 'implied' operand is one which can be considered a side + * effect of the insn, e.g. %esp being modified by PUSH or POP. A + * 'hard-coded' operand is one which is specified in the instruction + * definition, e.g. %es:%edi in MOVSB or 1 in ROL Eb, 1. The difference + * is that hard-coded operands are printed by disassemblers and are + * required to re-assemble, while implicit operands are invisible. */ + op_es_seg = 0x100, /* ES segment override */ + op_cs_seg = 0x200, /* CS segment override */ + op_ss_seg = 0x300, /* SS segment override */ + op_ds_seg = 0x400, /* DS segment override */ + op_fs_seg = 0x500, /* FS segment override */ + op_gs_seg = 0x600 /* GS segment override */ +}; + +/* x86_op_t : an X86 instruction operand */ +typedef struct { + enum x86_op_type type; /* operand type */ + enum x86_op_datatype datatype; /* operand size */ + enum x86_op_access access; /* operand access [RWX] */ + enum x86_op_flags flags; /* misc flags */ + union { + /* sizeof will have to work on these union members! */ + /* immediate values */ + char sbyte; + short sword; + int32_t sdword; + qword_t sqword; + unsigned char byte; + unsigned short word; + uint32_t dword; + qword_t qword; + float sreal; + double dreal; + /* misc large/non-native types */ + unsigned char extreal[10]; + unsigned char bcd[10]; + qword_t dqword[2]; + unsigned char simd[16]; + unsigned char fpuenv[28]; + /* offset from segment */ + uint32_t offset; + /* ID of CPU register */ + x86_reg_t reg; + /* offsets from current insn */ + char relative_near; + int32_t relative_far; + /* segment:offset */ + x86_absolute_t absolute; + /* effective address [expression] */ + x86_ea_t expression; + } data; + /* this is needed to make formatting operands more sane */ + void * insn; /* pointer to x86_insn_t owning operand */ +} x86_op_t; + +/* Linked list of x86_op_t; provided for manual traversal of the operand + * list in an insn. Users wishing to add operands to this list, e.g. to add + * implicit operands, should use x86_operand_new in x86_operand_list.h */ +typedef struct x86_operand_list { + x86_op_t op; + struct x86_operand_list *next; +} x86_oplist_t; + +enum x86_insn_group { + insn_none = 0, /* invalid instruction */ + insn_controlflow = 1, + insn_arithmetic = 2, + insn_logic = 3, + insn_stack = 4, + insn_comparison = 5, + insn_move = 6, + insn_string = 7, + insn_bit_manip = 8, + insn_flag_manip = 9, + insn_fpu = 10, + insn_interrupt = 13, + insn_system = 14, + insn_other = 15 +}; + +enum x86_insn_type { + insn_invalid = 0, /* invalid instruction */ + /* insn_controlflow */ + insn_jmp = 0x1001, + insn_jcc = 0x1002, + insn_call = 0x1003, + insn_callcc = 0x1004, + insn_return = 0x1005, + /* insn_arithmetic */ + insn_add = 0x2001, + insn_sub = 0x2002, + insn_mul = 0x2003, + insn_div = 0x2004, + insn_inc = 0x2005, + insn_dec = 0x2006, + insn_shl = 0x2007, + insn_shr = 0x2008, + insn_rol = 0x2009, + insn_ror = 0x200A, + /* insn_logic */ + insn_and = 0x3001, + insn_or = 0x3002, + insn_xor = 0x3003, + insn_not = 0x3004, + insn_neg = 0x3005, + /* insn_stack */ + insn_push = 0x4001, + insn_pop = 0x4002, + insn_pushregs = 0x4003, + insn_popregs = 0x4004, + insn_pushflags = 0x4005, + insn_popflags = 0x4006, + insn_enter = 0x4007, + insn_leave = 0x4008, + /* insn_comparison */ + insn_test = 0x5001, + insn_cmp = 0x5002, + /* insn_move */ + insn_mov = 0x6001, /* move */ + insn_movcc = 0x6002, /* conditional move */ + insn_xchg = 0x6003, /* exchange */ + insn_xchgcc = 0x6004, /* conditional exchange */ + /* insn_string */ + insn_strcmp = 0x7001, + insn_strload = 0x7002, + insn_strmov = 0x7003, + insn_strstore = 0x7004, + insn_translate = 0x7005, /* xlat */ + /* insn_bit_manip */ + insn_bittest = 0x8001, + insn_bitset = 0x8002, + insn_bitclear = 0x8003, + /* insn_flag_manip */ + insn_clear_carry = 0x9001, + insn_clear_zero = 0x9002, + insn_clear_oflow = 0x9003, + insn_clear_dir = 0x9004, + insn_clear_sign = 0x9005, + insn_clear_parity = 0x9006, + insn_set_carry = 0x9007, + insn_set_zero = 0x9008, + insn_set_oflow = 0x9009, + insn_set_dir = 0x900A, + insn_set_sign = 0x900B, + insn_set_parity = 0x900C, + insn_tog_carry = 0x9010, + insn_tog_zero = 0x9020, + insn_tog_oflow = 0x9030, + insn_tog_dir = 0x9040, + insn_tog_sign = 0x9050, + insn_tog_parity = 0x9060, + /* insn_fpu */ + insn_fmov = 0xA001, + insn_fmovcc = 0xA002, + insn_fneg = 0xA003, + insn_fabs = 0xA004, + insn_fadd = 0xA005, + insn_fsub = 0xA006, + insn_fmul = 0xA007, + insn_fdiv = 0xA008, + insn_fsqrt = 0xA009, + insn_fcmp = 0xA00A, + insn_fcos = 0xA00C, + insn_fldpi = 0xA00D, + insn_fldz = 0xA00E, + insn_ftan = 0xA00F, + insn_fsine = 0xA010, + insn_fsys = 0xA020, + /* insn_interrupt */ + insn_int = 0xD001, + insn_intcc = 0xD002, /* not present in x86 ISA */ + insn_iret = 0xD003, + insn_bound = 0xD004, + insn_debug = 0xD005, + insn_trace = 0xD006, + insn_invalid_op = 0xD007, + insn_oflow = 0xD008, + /* insn_system */ + insn_halt = 0xE001, + insn_in = 0xE002, /* input from port/bus */ + insn_out = 0xE003, /* output to port/bus */ + insn_cpuid = 0xE004, + /* insn_other */ + insn_nop = 0xF001, + insn_bcdconv = 0xF002, /* convert to or from BCD */ + insn_szconv = 0xF003 /* change size of operand */ +}; + +/* These flags specify special characteristics of the instruction, such as + * whether the inatruction is privileged or whether it serializes the + * pipeline. + * NOTE : These may not be accurate for all instructions; updates to the + * opcode tables have not been completed. */ +enum x86_insn_note { + insn_note_ring0 = 1, /* Only available in ring 0 */ + insn_note_smm = 2, /* "" in System Management Mode */ + insn_note_serial = 4, /* Serializing instruction */ + insn_note_nonswap = 8, /* Does not swap arguments in att-style formatting */ + insn_note_nosuffix = 16, /* Does not have size suffix in att-style formatting */ +}; + +/* This specifies what effects the instruction has on the %eflags register */ +enum x86_flag_status { + insn_carry_set = 0x1, /* CF */ + insn_zero_set = 0x2, /* ZF */ + insn_oflow_set = 0x4, /* OF */ + insn_dir_set = 0x8, /* DF */ + insn_sign_set = 0x10, /* SF */ + insn_parity_set = 0x20, /* PF */ + insn_carry_or_zero_set = 0x40, + insn_zero_set_or_sign_ne_oflow = 0x80, + insn_carry_clear = 0x100, + insn_zero_clear = 0x200, + insn_oflow_clear = 0x400, + insn_dir_clear = 0x800, + insn_sign_clear = 0x1000, + insn_parity_clear = 0x2000, + insn_sign_eq_oflow = 0x4000, + insn_sign_ne_oflow = 0x8000 +}; + +/* The CPU model in which the insturction first appeared; this can be used + * to mask out instructions appearing in earlier or later models or to + * check the portability of a binary. + * NOTE : These may not be accurate for all instructions; updates to the + * opcode tables have not been completed. */ +enum x86_insn_cpu { + cpu_8086 = 1, /* Intel */ + cpu_80286 = 2, + cpu_80386 = 3, + cpu_80387 = 4, + cpu_80486 = 5, + cpu_pentium = 6, + cpu_pentiumpro = 7, + cpu_pentium2 = 8, + cpu_pentium3 = 9, + cpu_pentium4 = 10, + cpu_k6 = 16, /* AMD */ + cpu_k7 = 32, + cpu_athlon = 48 +}; + +/* CPU ISA subsets: These are derived from the Instruction Groups in + * Intel Vol 1 Chapter 5; they represent subsets of the IA32 ISA but + * do not reflect the 'type' of the instruction in the same way that + * x86_insn_group does. In short, these are AMD/Intel's somewhat useless + * designations. + * NOTE : These may not be accurate for all instructions; updates to the + * opcode tables have not been completed. */ +enum x86_insn_isa { + isa_gp = 1, /* general purpose */ + isa_fp = 2, /* floating point */ + isa_fpumgt = 3, /* FPU/SIMD management */ + isa_mmx = 4, /* Intel MMX */ + isa_sse1 = 5, /* Intel SSE SIMD */ + isa_sse2 = 6, /* Intel SSE2 SIMD */ + isa_sse3 = 7, /* Intel SSE3 SIMD */ + isa_3dnow = 8, /* AMD 3DNow! SIMD */ + isa_sys = 9 /* system instructions */ +}; + +enum x86_insn_prefix { + insn_no_prefix = 0, + insn_rep_zero = 1, /* REPZ and REPE */ + insn_rep_notzero = 2, /* REPNZ and REPNZ */ + insn_lock = 4 /* LOCK: */ +}; + +/* TODO: maybe provide insn_new/free(), and have disasm return new insn_t */ +/* x86_insn_t : an X86 instruction */ +typedef struct { + /* information about the instruction */ + uint32_t addr; /* load address */ + uint32_t offset; /* offset into file/buffer */ + enum x86_insn_group group; /* meta-type, e.g. INS_EXEC */ + enum x86_insn_type type; /* type, e.g. INS_BRANCH */ + enum x86_insn_note note; /* note, e.g. RING0 */ + unsigned char bytes[MAX_INSN_SIZE]; + unsigned char size; /* size of insn in bytes */ + /* 16/32-bit mode settings */ + unsigned char addr_size; /* default address size : 2 or 4 */ + unsigned char op_size; /* default operand size : 2 or 4 */ + /* CPU/instruction set */ + enum x86_insn_cpu cpu; + enum x86_insn_isa isa; + /* flags */ + enum x86_flag_status flags_set; /* flags set or tested by insn */ + enum x86_flag_status flags_tested; + /* stack */ + unsigned char stack_mod; /* 0 or 1 : is the stack modified? */ + int32_t stack_mod_val; /* val stack is modified by if known */ + + /* the instruction proper */ + enum x86_insn_prefix prefix; /* prefixes ORed together */ + char prefix_string[MAX_PREFIX_STR]; /* prefixes [might be truncated] */ + char mnemonic[MAX_MNEM_STR]; + x86_oplist_t *operands; /* list of explicit/implicit operands */ + size_t operand_count; /* total number of operands */ + size_t explicit_count; /* number of explicit operands */ + /* convenience fields for user */ + void *block; /* code block containing this insn */ + void *function; /* function containing this insn */ + int tag; /* tag the insn as seen/processed */ +} x86_insn_t; + + +/* returns 0 if an instruction is invalid, 1 if valid */ +int x86_insn_is_valid( x86_insn_t *insn ); + +/* DISASSEMBLY ROUTINES + * Canonical order of arguments is + * (buf, buf_len, buf_rva, offset, len, insn, func, arg, resolve_func) + * ...but of course all of these are not used at the same time. + */ + + +/* Function prototype for caller-supplied callback routine + * These callbacks are intended to process 'insn' further, e.g. by + * adding it to a linked list, database, etc */ +typedef void (*DISASM_CALLBACK)( x86_insn_t *insn, void * arg ); + +/* Function prototype for caller-supplied address resolver. + * This routine is used to determine the rva to disassemble next, given + * the 'dest' operand of a jump/call. This allows the caller to resolve + * jump/call targets stored in a register or on the stack, and also allows + * the caller to prevent endless loops by checking if an address has + * already been disassembled. If an address cannot be resolved from the + * operand, or if the address has already been disassembled, this routine + * should return -1; in all other cases the RVA to be disassembled next + * should be returned. */ +typedef int32_t (*DISASM_RESOLVER)( x86_op_t *op, x86_insn_t * current_insn, + void *arg ); + + +/* x86_disasm: Disassemble a single instruction from a buffer of bytes. + * Returns size of instruction in bytes. + * Caller is responsible for calling x86_oplist_free() on + * a reused "insn" to avoid leaking memory when calling this + * function repeatedly. + * buf : Buffer of bytes to disassemble + * buf_len : Length of the buffer + * buf_rva : Load address of the start of the buffer + * offset : Offset in buffer to disassemble + * insn : Structure to fill with disassembled instruction + */ +unsigned int x86_disasm( unsigned char *buf, unsigned int buf_len, + uint32_t buf_rva, unsigned int offset, + x86_insn_t * insn ); + +/* x86_disasm_range: Sequential disassembly of a range of bytes in a buffer, + * invoking a callback function each time an instruction + * is successfully disassembled. The 'range' refers to the + * bytes between 'offset' and 'offset + len' in the buffer; + * 'len' is assumed to be less than the length of the buffer. + * Returns number of instructions processed. + * buf : Buffer of bytes to disassemble (e.g. .text section) + * buf_rva : Load address of buffer (e.g. ELF Virtual Address) + * offset : Offset in buffer to start disassembly at + * len : Number of bytes to disassemble + * func : Callback function to invoke (may be NULL) + * arg : Arbitrary data to pass to callback (may be NULL) + */ +unsigned int x86_disasm_range( unsigned char *buf, uint32_t buf_rva, + unsigned int offset, unsigned int len, + DISASM_CALLBACK func, void *arg ); + +/* x86_disasm_forward: Flow-of-execution disassembly of the bytes in a buffer, + * invoking a callback function each time an instruction + * is successfully disassembled. + * buf : Buffer to disassemble (e.g. .text section) + * buf_len : Number of bytes in buffer + * buf_rva : Load address of buffer (e.g. ELF Virtual Address) + * offset : Offset in buffer to start disassembly at (e.g. entry point) + * func : Callback function to invoke (may be NULL) + * arg : Arbitrary data to pass to callback (may be NULL) + * resolver: Caller-supplied address resolver. If no resolver is + * supplied, a default internal one is used -- however the + * internal resolver does NOT catch loops and could end up + * disassembling forever.. + * r_arg : Arbitrary data to pass to resolver (may be NULL) + */ +unsigned int x86_disasm_forward( unsigned char *buf, unsigned int buf_len, + uint32_t buf_rva, unsigned int offset, + DISASM_CALLBACK func, void *arg, + DISASM_RESOLVER resolver, void *r_arg ); + +/* Instruction operands: these are stored as a list of explicit and + * implicit operands. It is recommended that the 'foreach' routines + * be used to when examining operands for purposes of data flow analysis */ + +/* Operand FOREACH callback: 'arg' is an abritrary parameter passed to the + * foreach routine, 'insn' is the x86_insn_t whose operands are being + * iterated over, and 'op' is the current x86_op_t */ +typedef void (*x86_operand_fn)(x86_op_t *op, x86_insn_t *insn, void *arg); + +/* FOREACH types: these are used to limit the foreach results to + * operands which match a certain "type" (implicit or explicit) + * or which are accessed in certain ways (e.g. read or write). Note + * that this operates on the operand list of single instruction, so + * specifying the 'real' operand type (register, memory, etc) is not + * useful. Note also that by definition Execute Access implies Read + * Access and implies Not Write Access. + * The "type" (implicit or explicit) and the access method can + * be ORed together, e.g. op_wo | op_explicit */ +enum x86_op_foreach_type { + op_any = 0, /* ALL operands (explicit, implicit, rwx) */ + op_dest = 1, /* operands with Write access */ + op_src = 2, /* operands with Read access */ + op_ro = 3, /* operands with Read but not Write access */ + op_wo = 4, /* operands with Write but not Read access */ + op_xo = 5, /* operands with Execute access */ + op_rw = 6, /* operands with Read AND Write access */ + op_implicit = 0x10, /* operands that are implied by the opcode */ + op_explicit = 0x20 /* operands that are not side-effects */ +}; + + +/* free the operand list associated with an instruction -- useful for + * preventing memory leaks when free()ing an x86_insn_t */ +void x86_oplist_free( x86_insn_t *insn ); + +/* Operand foreach: invokes 'func' with 'insn' and 'arg' as arguments. The + * 'type' parameter is used to select only operands matching specific + * criteria. */ +int x86_operand_foreach( x86_insn_t *insn, x86_operand_fn func, void *arg, + enum x86_op_foreach_type type); + +/* convenience routine: returns count of operands matching 'type' */ +size_t x86_operand_count( x86_insn_t *insn, enum x86_op_foreach_type type ); + +/* accessor functions for the operands */ +x86_op_t * x86_operand_1st( x86_insn_t *insn ); +x86_op_t * x86_operand_2nd( x86_insn_t *insn ); +x86_op_t * x86_operand_3rd( x86_insn_t *insn ); + +/* these allow libdisasm 2.0 accessor functions to still be used */ +#define x86_get_dest_operand( insn ) x86_operand_1st( insn ) +#define x86_get_src_operand( insn ) x86_operand_2nd( insn ) +#define x86_get_imm_operand( insn ) x86_operand_3rd( insn ) + +/* get size of operand data in bytes */ +unsigned int x86_operand_size( x86_op_t *op ); + +/* Operand Convenience Routines: the following three routines are common + * operations on operands, intended to ease the burden of the programmer. */ + +/* Get Address: return the value of an offset operand, or the offset of + * a segment:offset absolute address */ +uint32_t x86_get_address( x86_insn_t *insn ); + +/* Get Relative Offset: return as a sign-extended int32_t the near or far + * relative offset operand, or 0 if there is none. There can be only one + * relaive offset operand in an instruction. */ +int32_t x86_get_rel_offset( x86_insn_t *insn ); + +/* Get Branch Target: return the x86_op_t containing the target of + * a jump or call operand, or NULL if there is no branch target. + * Internally, a 'branch target' is defined as any operand with + * Execute Access set. There can be only one branch target per instruction. */ +x86_op_t * x86_get_branch_target( x86_insn_t *insn ); + +/* Get Immediate: return the x86_op_t containing the immediate operand + * for this instruction, or NULL if there is no immediate operand. There + * can be only one immediate operand per instruction */ +x86_op_t * x86_get_imm( x86_insn_t *insn ); + +/* Get Raw Immediate Data: returns a pointer to the immediate data encoded + * in the instruction. This is useful for large data types [>32 bits] currently + * not supported by libdisasm, or for determining if the disassembler + * screwed up the conversion of the immediate data. Note that 'imm' in this + * context refers to immediate data encoded at the end of an instruction as + * detailed in the Intel Manual Vol II Chapter 2; it does not refer to the + * 'op_imm' operand (the third operand in instructions like 'mul' */ +unsigned char * x86_get_raw_imm( x86_insn_t *insn ); + + +/* More accessor fuctions, this time for user-defined info... */ +/* set the address (usually RVA) of the insn */ +void x86_set_insn_addr( x86_insn_t *insn, uint32_t addr ); + +/* set the offset (usually offset into file) of the insn */ +void x86_set_insn_offset( x86_insn_t *insn, unsigned int offset ); + +/* set a pointer to the function owning the instruction. The + * type of 'func' is user-defined; libdisasm does not use the func field. */ +void x86_set_insn_function( x86_insn_t *insn, void * func ); + +/* set a pointer to the block of code owning the instruction. The + * type of 'block' is user-defined; libdisasm does not use the block field. */ +void x86_set_insn_block( x86_insn_t *insn, void * block ); + +/* instruction tagging: these routines allow the programmer to mark + * instructions as "seen" in a DFS, for example. libdisasm does not use + * the tag field.*/ +/* set insn->tag to 1 */ +void x86_tag_insn( x86_insn_t *insn ); +/* set insn->tag to 0 */ +void x86_untag_insn( x86_insn_t *insn ); +/* return insn->tag */ +int x86_insn_is_tagged( x86_insn_t *insn ); + + +/* Disassembly formats: + * AT&T is standard AS/GAS-style: "mnemonic\tsrc, dest, imm" + * Intel is standard MASM/NASM/TASM: "mnemonic\tdest,src, imm" + * Native is tab-delimited: "RVA\tbytes\tmnemonic\tdest\tsrc\timm" + * XML is your typical ... + * Raw is addr|offset|size|bytes|prefix... see libdisasm_formats.7 + */ +enum x86_asm_format { + unknown_syntax = 0, /* never use! */ + native_syntax, /* header: 35 bytes */ + intel_syntax, /* header: 23 bytes */ + att_syntax, /* header: 23 bytes */ + xml_syntax, /* header: 679 bytes */ + raw_syntax /* header: 172 bytes */ +}; + +/* format (sprintf) an operand into 'buf' using specified syntax */ +int x86_format_operand(x86_op_t *op, char *buf, int len, + enum x86_asm_format format); + +/* format (sprintf) an instruction mnemonic into 'buf' using specified syntax */ +int x86_format_mnemonic(x86_insn_t *insn, char *buf, int len, + enum x86_asm_format format); + +/* format (sprintf) an instruction into 'buf' using specified syntax; + * this includes formatting all operands */ +int x86_format_insn(x86_insn_t *insn, char *buf, int len, enum x86_asm_format); + +/* fill 'buf' with a description of the format's syntax */ +int x86_format_header( char *buf, int len, enum x86_asm_format format); + +/* Endianness of an x86 CPU : 0 is big, 1 is little; always returns 1 */ +unsigned int x86_endian(void); + +/* Default address and operand size in bytes */ +unsigned int x86_addr_size(void); +unsigned int x86_op_size(void); + +/* Size of a machine word in bytes */ +unsigned int x86_word_size(void); + +/* maximum size of a code instruction */ +#define x86_max_inst_size(x) x86_max_insn_size(x) +unsigned int x86_max_insn_size(void); + +/* register IDs of Stack, Frame, Instruction pointer and Flags register */ +unsigned int x86_sp_reg(void); +unsigned int x86_fp_reg(void); +unsigned int x86_ip_reg(void); +unsigned int x86_flag_reg(void); + +/* fill 'reg' struct with details of register 'id' */ +void x86_reg_from_id( unsigned int id, x86_reg_t * reg ); + +/* convenience macro demonstrating how to get an aliased register; proto is + * void x86_get_aliased_reg( x86_reg_t *alias_reg, x86_reg_t *output_reg ) + * where 'alias_reg' is a reg operand and 'output_reg' is filled with the + * register that the operand is an alias for */ +#define x86_get_aliased_reg( alias_reg, output_reg ) \ + x86_reg_from_id( alias_reg->alias, output_reg ) + + +/* ================================== Invariant Instruction Representation */ +/* Invariant instructions are used for generating binary signatures; + * the instruction is modified so that all variant bytes in an instruction + * are replaced with a wildcard byte. + * + * A 'variant byte' is one that is expected to be modified by either the + * static or the dynamic linker: for example, an address encoded in an + * instruction. + * + * By comparing the invariant representation of one instruction [or of a + * sequence of instructions] with the invariant representation of another, + * one determine whether the two invariant representations are from the same + * relocatable object [.o] file. Thus one can use binary signatures [which + * are just sequences of invariant instruction representations] to look for + * library routines which have been statically-linked into a binary. + * + * The invariant routines are faster and smaller than the disassembly + * routines; they can be used to determine the size of an instruction + * without all of the overhead of a full instruction disassembly. + */ + +/* This byte is used to replace variant bytes */ +#define X86_WILDCARD_BYTE 0xF4 + +typedef struct { + enum x86_op_type type; /* operand type */ + enum x86_op_datatype datatype; /* operand size */ + enum x86_op_access access; /* operand access [RWX] */ + enum x86_op_flags flags; /* misc flags */ +} x86_invariant_op_t; + +typedef struct { + unsigned char bytes[64]; /* invariant representation */ + unsigned int size; /* number of bytes in insn */ + enum x86_insn_group group; /* meta-type, e.g. INS_EXEC */ + enum x86_insn_type type; /* type, e.g. INS_BRANCH */ + x86_invariant_op_t operands[3]; /* operands: dest, src, imm */ +} x86_invariant_t; + + +/* return a version of the instruction with the variant bytes masked out */ +size_t x86_invariant_disasm( unsigned char *buf, int buf_len, + x86_invariant_t *inv ); +/* return the size in bytes of the intruction pointed to by 'buf'; + * this used x86_invariant_disasm since it faster than x86_disasm */ +size_t x86_size_disasm( unsigned char *buf, unsigned int buf_len ); + +#ifdef __cplusplus +} +#endif + + +#endif diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/libdisasm.gyp b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/libdisasm.gyp new file mode 100644 index 0000000000..c48ac824f2 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/libdisasm.gyp @@ -0,0 +1,64 @@ +# Copyright 2014 Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +{ + 'targets': [ + { + 'target_name': 'libdisasm', + 'type': 'static_library', + 'sources': [ + 'ia32_implicit.c', + 'ia32_implicit.h', + 'ia32_insn.c', + 'ia32_insn.h', + 'ia32_invariant.c', + 'ia32_invariant.h', + 'ia32_modrm.c', + 'ia32_modrm.h', + 'ia32_opcode_tables.c', + 'ia32_opcode_tables.h', + 'ia32_operand.c', + 'ia32_operand.h', + 'ia32_reg.c', + 'ia32_reg.h', + 'ia32_settings.c', + 'ia32_settings.h', + 'libdis.h', + 'qword.h', + 'x86_disasm.c', + 'x86_format.c', + 'x86_imm.c', + 'x86_imm.h', + 'x86_insn.c', + 'x86_misc.c', + 'x86_operand_list.c', + 'x86_operand_list.h', + ], + }, + ], +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/qword.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/qword.h new file mode 100644 index 0000000000..5f0e803c93 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/qword.h @@ -0,0 +1,14 @@ +#ifndef LIBDISASM_QWORD_H +#define LIBDISASM_QWORD_H + +#include + +/* platform independent data types */ + +#ifdef _MSC_VER + typedef __int64 qword_t; +#else + typedef int64_t qword_t; +#endif + +#endif diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/swig/Makefile b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/swig/Makefile new file mode 100644 index 0000000000..44ef486b6a --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/swig/Makefile @@ -0,0 +1,70 @@ +# change these values if you need to +SWIG = swig # apt-get install swig ! +GCC = gcc + +CC_FLAGS = -c -fPIC +LD_FLAGS = -shared -L../.. -ldisasm + +BASE_NAME = x86disasm + +export INTERFACE_FILE BASE_NAME SWIG GCC CC_FLAGS LD_FLAGS + +#==================================================== +# TARGETS + +all: swig +dummy: swig swig-python swig-ruby swig-perl swig-tcl install uninstall clean + +swig: swig-python swig-perl +# swig-rub swig-tcl + +swig-python: + cd python && make -f Makefile-swig + +swig-ruby: + cd ruby && make -f Makefile-swig + +swig-perl: + cd perl && make -f Makefile-swig + +swig-tcl: + cd tcl && make -f Makefile-swig + +# ================================================================== +install: install-python install-perl +# install-ruby install-tcl + +install-python: + cd python && sudo make -f Makefile-swig install + +install-ruby: + cd ruby && sudo make -f Makefile-swig install + +install-perl: + cd perl && sudo make -f Makefile-swig install + +install-tcl: + cd tcl && sudo make -f Makefile-swig install + +# ================================================================== +uninstall: uninstall-python +#uninstall-ruby uninstall-perl uninstall-tcl + +uninstall-python: + cd python && sudo make -f Makefile-swig uninstall + +uninstall-ruby: + cd ruby && sudo make -f Makefile-swig uninstall + +uninstall-perl: + cd perl && sudo make -f Makefile-swig uninstall + +uninstall-tcl: + cd tcl && sudo make -f Makefile-swig uninstall + +# ================================================================== +clean: + cd python && make -f Makefile-swig clean + cd ruby && make -f Makefile-swig clean + cd perl && make -f Makefile-swig clean + cd tcl && make -f Makefile-swig clean diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/swig/README b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/swig/README new file mode 100644 index 0000000000..a9fa79ec2c --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/swig/README @@ -0,0 +1,128 @@ + Libdisasm SWIG README + +The SWIG utility (www.swig.org) can be used to generate + + +Building SWIG Modules +--------------------- + + make + make install + +Make and Install both build Python, Perl, Ruby, and Tcl modules. If you +do not have one of these languages installed, comment out the relevant +target in the main Makefile. + +Install uses 'sudo' to put files in the correct locations; if you +do not have sudo installed, change the install targets. + +The Module API +-------------- + +The OOP API +----------- + + +The Python Module +----------------- + +To test that the module loads: + + bash# python + >>> import x86disasm + >>> x86disasm.version_string() + '0.21-pre' + >>>^D + bash# + + >>> import x86disasm + >>> import array + >>> disasm = x86disasm.X86_Disasm( ) + >>> tgt = open( "/tmp/a.out", "rb" ) + >>> tgt.seek( 0, 2 ) + >>> size = tgt.tell() + >>> tgt.seek( 0, 0 ) + >>> buf = array.array( 'B' ) + >>> buf.fromfile( tgt, size ) + >>> tgt.close() + >>> data = x86disasm.byteArray( size ) + >>> for i in range( size ): + ... data[i] = buf.pop(0) + ... + >>> del buf + >>> del tgt + >>> insn = disasm.disasm( data, size - 1, 0, 0 ) + >>> insn.format( x86disasm.att_syntax ) + 'jg\t0x00000047' + >>> insn.format( x86disasm.raw_syntax ) + '0x00000000|0x00000000|2|7F 45 |||controlflow|jcc|jg|80386|General Purpose|||zero_clear sign_eq_oflow |0|0|relative|sbyte|00000047|' + >>> ops = insn.operand_list() + >>> node = ops.first() + >>> while node is not None: + ... s = node.op.format(x86disasm.raw_syntax) + ... print s + ... node = ops.next() + ... + relative|sbyte|00000047| + + + + + + +The Perl Module +--------------- + +To test that the module loads: + + bash# perl + use x86disasm; + print x86disasm::version_string() . "\n"; + ^D + 0.21-pre + bash# + +The Ruby Module +--------------- + +To test that the module loads: + + bash# irb + irb(main):001:0> require 'x86disasm' + => true + irb(main):002:0> X86disasm.version_string() + => "0.21-pre" + irb(main):003:0> x = X86disasm::X86_Disasm.new + => # + irb(main):004:0> x.max_register_string() + => 8 + irb(main):003:0> ^D + bash# + +The Tcl Module +--------------- + +To test that the module loads: + + bash# tclsh + % load /usr/lib/tcl8.3/x86disasm.so X86disasm + % version_string + 0.21-pre + % ^D + bash# + + % x86_init 0 NULL NULL + OR + % x86disasm dis + _486b0708_p_x86disasm + % puts "[dis cget -last_error]" + 0 + + + + +The Interface Files +------------------- + + libdisasm.i -- interface file without shadow classes + libdisasm_oop.i -- interface file with shadow classes diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/swig/libdisasm.i b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/swig/libdisasm.i new file mode 100644 index 0000000000..ec12041755 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/swig/libdisasm.i @@ -0,0 +1,508 @@ +%module x86disasm +%{ +#include "../../libdis.h" +#include "../../../config.h" +%} + +%rename(version_string) x86_version_string; +%include "../../libdis.h" +#include "../../../config.h" + +%inline %{ + const char * x86_version_string( void ) { + return PACKAGE_VERSION; + } +%} + +%rename(report_codes) x86_report_codes; +%rename(report_error) x86_report_error; +%rename(options) x86_options; +%rename(init) x86_init; +%rename(set_reporter) x86_set_reporter; +%rename(set_options) x86_set_options; +%rename(options) x86_get_options; +%rename(cleanup) x86_cleanup; +%rename(reg_type) x86_reg_type; +%rename(reg) x86_reg_t; +%rename(eaddr) x86_ea_t; +%rename(op_type) x86_op_type; +%rename(optype_is_address) x86_optype_is_address; +%rename(optype_is_relative) x86_optype_is_relative; +%rename(op_datatype) x86_op_datatype; +%rename(op_access) x86_op_access; +%rename(op_flags) x86_op_flags; +%rename(operand) x86_op_t; +%rename(insn_group) x86_insn_group; +%rename(insn_type) x86_insn_type; +%rename(insn_note) x86_insn_note ; +%rename(flag_status) x86_flag_status; +%rename(insn_cpu) x86_insn_cpu ; +%rename(insn_isa) x86_insn_isa ; +%rename(insn_prefix) x86_insn_prefix ; +%rename(insn) x86_insn_t; +%rename(insn_is_valid) x86_insn_is_valid; +%rename(i_disasm) x86_disasm; +%rename(i_disasm_range) x86_disasm_range; +%rename(i_disasm_forward) x86_disasm_forward; +%rename(insn_operand_count) x86_operand_count; +%rename(insn_operand_1st) x86_operand_1st; +%rename(insn_operand_2nd) x86_operand_2nd; +%rename(insn_operand_3rd) x86_operand_3rd; +%rename(insn_dest_operand) x86_get_dest_operand; +%rename(insn_src_operand) x86_get_src_operand; +%rename(insn_imm_operand) x86_get_imm_operand; +%rename(operand_size) x86_operand_size; +%rename(insn_rel_offset) x86_get_rel_offset; +%rename(insn_branch_target) x86_get_branch_target; +%rename(insn_imm) x86_get_imm; +%rename(insn_raw_imm) x86_get_raw_imm; +%rename(insn_set_addr) x86_set_insn_addr; +%rename(insn_set_offset) x86_set_insn_offset; +%rename(insn_set_function) x86_set_insn_function; +%rename(insn_set_block) x86_set_insn_block; +%rename(insn_tag) x86_tag_insn; +%rename(insn_untag) x86_untag_insn; +%rename(insn_is_tagged) x86_insn_is_tagged; +%rename(asm_format) x86_asm_format; +%rename(operand_format) x86_format_operand; +%rename(insn_format_mnemonic) x86_format_mnemonic; +%rename(insn_format) x86_format_insn; +%rename(header_format) x86_format_header; +%rename(endian) x86_endian; +%rename(size_default_address) x86_addr_size; +%rename(size_default_operand) x86_op_size; +%rename(size_machine_word) x86_word_size; +%rename(size_max_insn) x86_max_insn_size; +%rename(reg_sp) x86_sp_reg; +%rename(reg_fp) x86_fp_reg; +%rename(reg_ip) x86_ip_reg; +%rename(reg_from_id) x86_reg_from_id; +%rename(reg_from_alias) x86_get_aliased_reg; +%rename(invariant_op) x86_invariant_op_t; +%rename(invariant) x86_invariant_t; +%rename(disasm_invariant) x86_invariant_disasm; +%rename(disasm_size) x86_size_disasm; + +%include "carrays.i" + +%array_class( unsigned char, byteArray ); + + +%apply (unsigned char *STRING, int LENGTH) { + (unsigned char *buf, size_t buf_len) +}; + + +%newobject x86_op_copy; +%inline %{ + x86_op_t * x86_op_copy( x86_op_t * src ) { + x86_op_t *op; + + if (! src ) { + return NULL; + } + + op = (x86_op_t *) calloc( sizeof(x86_op_t), 1 ); + if ( op ) { + memcpy( op, src, sizeof(x86_op_t) ); + } + + return op; + } + + typedef struct x86_op_list_node { + x86_op_t *op; + struct x86_op_list_node *next, *prev; + } x86_op_list_node; + + typedef struct x86_op_list { + size_t count; + x86_op_list_node *head, *tail, *curr; + } x86_op_list; + + x86_op_list * x86_op_list_new () { + x86_op_list *list = (x86_op_list *) + calloc( sizeof(x86_op_list), 1 ); + list->count = 0; + return list; + } + + void x86_op_list_free(x86_op_list *list) { + x86_op_list_node *node, *next; + + node = list->head; + while ( node ) { + next = node->next; + /* free( node->insn ); */ + free( node ); + node = next; + } + + free( list ); + } + + x86_op_list_node * x86_op_list_first(x86_op_list *list) { + return list->head; + } + + x86_op_list_node * x86_op_list_last(x86_op_list *list) { + return list->tail; + } + + x86_op_list_node * x86_op_list_next(x86_op_list *list) { + if (! list->curr ) { + list->curr = list->head; + return list->head; + } + + list->curr = list->curr->next; + return list->curr; + } + + x86_op_list_node * x86_op_list_prev(x86_op_list *list) { + if (! list->curr ) { + list->curr = list->tail; + return list->tail; + } + + list->curr = list->curr->prev; + return list->curr; + } + +%} + +%newobject x86_op_list_append; + +%inline %{ + void x86_op_list_append( x86_op_list * list, x86_op_t *op ) { + x86_op_list_node *node = (x86_op_list_node *) + calloc( sizeof(x86_op_list_node) , 1 ); + if (! node ) { + return; + } + + list->count++; + if ( ! list->tail ) { + list->head = list->tail = node; + } else { + list->tail->next = node; + node->prev = list->tail; + list->tail = node; + } + + node->op = x86_op_copy( op ); + } + + x86_oplist_t * x86_op_list_node_copy( x86_oplist_t * list ) { + x86_oplist_t *ptr; + ptr = (x86_oplist_t *) calloc( sizeof(x86_oplist_t), 1 ); + if ( ptr ) { + memcpy( &ptr->op, &list->op, sizeof(x86_op_t) ); + } + + return ptr; + } + + x86_insn_t * x86_insn_new() { + x86_insn_t *insn = (x86_insn_t *) + calloc( sizeof(x86_insn_t), 1 ); + return insn; + } + + void x86_insn_free( x86_insn_t *insn ) { + x86_oplist_free( insn ); + free( insn ); + } +%} + +%newobject x86_insn_copy; + +%inline %{ + x86_insn_t * x86_insn_copy( x86_insn_t *src) { + x86_oplist_t *ptr, *list, *last = NULL; + x86_insn_t *insn = (x86_insn_t *) + calloc( sizeof(x86_insn_t), 1 ); + + if ( insn ) { + memcpy( insn, src, sizeof(x86_insn_t) ); + insn->operands = NULL; + insn->block = NULL; + insn->function = NULL; + + /* copy operand list */ + for ( list = src->operands; list; list = list->next ) { + ptr = x86_op_list_node_copy( list ); + + if (! ptr ) { + continue; + } + + if ( insn->operands ) { + last->next = ptr; + } else { + insn->operands = ptr; + } + last = ptr; + } + } + + return insn; + } + + x86_op_list * x86_insn_op_list( x86_insn_t *insn ) { + x86_oplist_t *list = insn->operands; + x86_op_list *op_list = x86_op_list_new(); + + for ( list = insn->operands; list; list = list->next ) { + x86_op_list_append( op_list, &list->op ); + } + + return op_list; + } + + typedef struct x86_insn_list_node { + x86_insn_t *insn; + struct x86_insn_list_node *next, *prev; + } x86_insn_list_node; + + typedef struct x86_insn_list { + size_t count; + x86_insn_list_node *head, *tail, *curr; + } x86_insn_list; + +%} + +%newobject x86_insn_list_new; + +%inline %{ + x86_insn_list * x86_insn_list_new () { + x86_insn_list *list = (x86_insn_list *) + calloc( sizeof(x86_insn_list), 1 ); + list->count = 0; + return list; + } + + void x86_insn_list_free( x86_insn_list * list ) { + x86_insn_list_node *node, *next; + + if (! list ) { + return; + } + + node = list->head; + while ( node ) { + next = node->next; + /* free( node->insn ); */ + free( node ); + node = next; + } + + free( list ); + } + + x86_insn_list_node * x86_insn_list_first( x86_insn_list *list ) { + if (! list ) { + return NULL; + } + return list->head; + } + + x86_insn_list_node * x86_insn_list_last( x86_insn_list *list ) { + if (! list ) { + return NULL; + } + return list->tail; + } + + x86_insn_list_node * x86_insn_list_next( x86_insn_list *list ) { + if (! list ) { + return NULL; + } + if (! list->curr ) { + list->curr = list->head; + return list->head; + } + + list->curr = list->curr->next; + return list->curr; + } + + x86_insn_list_node * x86_insn_list_prev( x86_insn_list *list ) { + if (! list ) { + return NULL; + } + if (! list->curr ) { + list->curr = list->tail; + return list->tail; + } + + list->curr = list->curr->prev; + return list->curr; + } + +%} + +%newobject x86_insn_list_append; + +%inline %{ + void x86_insn_list_append( x86_insn_list *list, x86_insn_t *insn ) { + x86_insn_list_node *node; + if (! list ) { + return; + } + + node = (x86_insn_list_node *) + calloc( sizeof(x86_insn_list_node) , 1 ); + + if (! node ) { + return; + } + + list->count++; + if ( ! list->tail ) { + list->head = list->tail = node; + } else { + list->tail->next = node; + node->prev = list->tail; + list->tail = node; + } + + node->insn = x86_insn_copy( insn ); + } + + typedef struct { + enum x86_report_codes last_error; + void * last_error_data; + void * disasm_callback; + void * disasm_resolver; + } x86disasm; + + void x86_default_reporter( enum x86_report_codes code, + void *data, void *arg ) { + x86disasm *dis = (x86disasm *) arg; + if ( dis ) { + dis->last_error = code; + dis->last_error_data = data; + } + } + + void x86_default_callback( x86_insn_t *insn, void *arg ) { + x86_insn_list *list = (x86_insn_list *) arg; + if ( list ) { + x86_insn_list_append( list, insn ); + } + } + + /* TODO: resolver stack, maybe a callback */ + long x86_default_resolver( x86_op_t *op, x86_insn_t *insn, void *arg ) { + x86disasm *dis = (x86disasm *) arg; + if ( dis ) { + //return dis->resolver( op, insn ); + return 0; + } + + return 0; + } + + +%} + +%newobject x86disasm_new; + +%inline %{ + x86disasm * x86disasm_new ( enum x86_options options ) { + x86disasm * dis = (x86disasm *) + calloc( sizeof( x86disasm ), 1 ); + x86_init( options, x86_default_reporter, dis ); + return dis; + } + + void x86disasm_free( x86disasm * dis ) { + x86_cleanup(); + free( dis ); + } +%} + +%newobject x86_disasm; + +%inline %{ + x86_insn_t * disasm( unsigned char *buf, size_t buf_len, + unsigned long buf_rva, unsigned int offset ) { + x86_insn_t *insn = calloc( sizeof( x86_insn_t ), 1 ); + x86_disasm( buf, buf_len, buf_rva, offset, insn ); + return insn; + } + + int disasm_range( unsigned char *buf, size_t buf_len, + unsigned long buf_rva, unsigned int offset, + unsigned int len ) { + + x86_insn_list *list = x86_insn_list_new(); + + if ( len > buf_len ) { + len = buf_len; + } + + return x86_disasm_range( buf, buf_rva, offset, len, + x86_default_callback, list ); + } + + int disasm_forward( unsigned char *buf, size_t buf_len, + unsigned long buf_rva, unsigned int offset ) { + x86_insn_list *list = x86_insn_list_new(); + + /* use default resolver: damn SWIG callbacks! */ + return x86_disasm_forward( buf, buf_len, buf_rva, offset, + x86_default_callback, list, + x86_default_resolver, NULL ); + } + + size_t disasm_invariant( unsigned char *buf, size_t buf_len, + x86_invariant_t *inv ) { + return x86_invariant_disasm( buf, buf_len, inv ); + } + + size_t disasm_size( unsigned char *buf, size_t buf_len ) { + return x86_size_disasm( buf, buf_len ); + } + + int x86_max_operand_string( enum x86_asm_format format ) { + switch ( format ) { + case xml_syntax: + return MAX_OP_XML_STRING; + break; + case raw_syntax: + return MAX_OP_RAW_STRING; + break; + case native_syntax: + case intel_syntax: + case att_syntax: + case unknown_syntax: + default: + return MAX_OP_STRING; + break; + } + } + + + int x86_max_insn_string( enum x86_asm_format format ) { + switch ( format ) { + case xml_syntax: + return MAX_INSN_XML_STRING; + break; + case raw_syntax: + return MAX_INSN_RAW_STRING; + break; + case native_syntax: + case intel_syntax: + case att_syntax: + case unknown_syntax: + default: + return MAX_INSN_STRING; + break; + } + } + + int x86_max_num_operands( ) { return MAX_NUM_OPERANDS; } +%} + diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/swig/libdisasm_oop.i b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/swig/libdisasm_oop.i new file mode 100644 index 0000000000..973a47e27b --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/swig/libdisasm_oop.i @@ -0,0 +1,1114 @@ +%module x86disasm +%{ +#ifdef _MSC_VER + typedef __int64 qword; +#else + typedef long long qword; +#endif + +#include + +#define MAX_REGNAME 8 +#define MAX_PREFIX_STR 32 +#define MAX_MNEM_STR 16 +#define MAX_INSN_SIZE 20 +#define MAX_OP_STRING 32 +#define MAX_OP_RAW_STRING 64 +#define MAX_OP_XML_STRING 256 +#define MAX_NUM_OPERANDS 8 +#define MAX_INSN_STRING 512 +#define MAX_INSN_RAW_STRING 1024 +#define MAX_INSN_XML_STRING 4096 + +#include "../../../config.h" + + +const char * version_string( void ) { + return PACKAGE_VERSION; +} + +%} + +const char * version_string( void ); + +%rename(X86_Register) x86_reg_t; +%rename(X86_EAddr) x86_ea_t; +%rename(X86_Operand) x86_op_t; +//%rename(X86_OpList) x86_oplist_t; +%rename(X86_Insn) x86_insn_t; +%rename(X86_InvOperand) x86_invariant_op_t; +%rename(X86_Invariant) x86_invariant_t; + +%include "carrays.i" + +%array_class( unsigned char, byteArray ); + + +%apply (unsigned char *STRING, int LENGTH) { + (unsigned char *buf, size_t buf_len) +}; + + +%inline %{ + + +enum x86_asm_format { + unknown_syntax = 0, /* never use! */ + native_syntax, /* header: 35 bytes */ + intel_syntax, /* header: 23 bytes */ + att_syntax, /* header: 23 bytes */ + xml_syntax, /* header: 679 bytes */ + raw_syntax /* header: 172 bytes */ +}; +%} + +/* ================================================================== */ +/* operand class */ +%inline %{ + enum x86_reg_type { + reg_gen = 0x00001, reg_in = 0x00002, + reg_out = 0x00004, reg_local = 0x00008, + reg_fpu = 0x00010, reg_seg = 0x00020, + reg_simd = 0x00040, reg_sys = 0x00080, + reg_sp = 0x00100, reg_fp = 0x00200, + reg_pc = 0x00400, reg_retaddr = 0x00800, + reg_cond = 0x01000, reg_zero = 0x02000, + reg_ret = 0x04000, reg_src = 0x10000, + reg_dest = 0x20000, reg_count = 0x40000 + }; + + typedef struct { + char name[MAX_REGNAME]; + enum x86_reg_type type; + unsigned int size; + unsigned int id; + unsigned int alias; + unsigned int shift; + } x86_reg_t; + + void x86_reg_from_id( unsigned int id, x86_reg_t * reg ); + + typedef struct { + unsigned int scale; + x86_reg_t index, base; + long disp; + char disp_sign; + char disp_size; + } x86_ea_t; + + enum x86_op_type { + op_unused = 0, + op_register = 1, + op_immediate = 2, + op_relative_near = 3, + op_relative_far = 4, + op_absolute = 5, + op_expression = 6, + op_offset = 7, + op_unknown + }; + + enum x86_op_datatype { + op_byte = 1, op_word = 2, + op_dword = 3, op_qword = 4, + op_dqword = 5, op_sreal = 6, + op_dreal = 7, op_extreal = 8, + op_bcd = 9, op_ssimd = 10, + op_dsimd = 11, op_sssimd = 12, + op_sdsimd = 13, op_descr32 = 14, + op_descr16 = 15, op_pdescr32 = 16, + op_pdescr16 = 17, op_fpuenv = 18, + op_fpregset = 19, + }; + + enum x86_op_access { + op_read = 1, + op_write = 2, + op_execute = 4 + }; + + enum x86_op_flags { + op_signed = 1, op_string = 2, + op_constant = 4, op_pointer = 8, + op_sysref = 0x010, op_implied = 0x020, + op_hardcode = 0x40, op_es_seg = 0x100, + op_cs_seg = 0x200, op_ss_seg = 0x300, + op_ds_seg = 0x400, op_fs_seg = 0x500, + op_gs_seg = 0x600 + }; + + typedef struct { + enum x86_op_type type; + enum x86_op_datatype datatype; + enum x86_op_access access; + enum x86_op_flags flags; + union { + char sbyte; + short sword; + long sdword; + qword sqword; + unsigned char byte; + unsigned short word; + unsigned long dword; + qword qword; + float sreal; + double dreal; + unsigned char extreal[10]; + unsigned char bcd[10]; + qword dqword[2]; + unsigned char simd[16]; + unsigned char fpuenv[28]; + void * address; + unsigned long offset; + x86_reg_t reg; + char relative_near; + long relative_far; + x86_ea_t expression; + } data; + void * insn; + } x86_op_t; + + unsigned int x86_operand_size( x86_op_t *op ); + + int x86_format_operand(x86_op_t *op, char *buf, int len, + enum x86_asm_format format); +%} + +%extend x86_reg_t{ + x86_reg_t * aliased_reg( ) { + x86_reg_t * reg = (x86_reg_t * ) + calloc( sizeof(x86_reg_t), 1 ); + x86_reg_from_id( self->id, reg ); + return reg; + } +} + +%extend x86_op_t{ + size_t size() { + return x86_operand_size( self ); + } + char * format( enum x86_asm_format format ) { + char *buf, *str; + size_t len; + + switch ( format ) { + case xml_syntax: + len = MAX_OP_XML_STRING; + break; + case raw_syntax: + len = MAX_OP_RAW_STRING; + break; + case native_syntax: + case intel_syntax: + case att_syntax: + case unknown_syntax: + default: + len = MAX_OP_STRING; + break; + } + + buf = (char * ) calloc( len + 1, 1 ); + x86_format_operand( self, buf, len, format ); + + /* drop buffer down to a reasonable size */ + str = strdup( buf ); + free(buf); + return str; + } + + int is_address( ) { + if ( self->type == op_absolute || + self->type == op_offset ) { + return 1; + } + + return 0; + } + + int is_relative( ) { + if ( self->type == op_relative_near || + self->type == op_relative_far ) { + return 1; + } + + return 0; + } + + %newobject copy; + x86_op_t * copy() { + x86_op_t *op = (x86_op_t *) calloc( sizeof(x86_op_t), 1 ); + + if ( op ) { + memcpy( op, self, sizeof(x86_op_t) ); + } + + return op; + } +} + +/* ================================================================== */ +/* operand list class */ +%inline %{ + typedef struct X86_OpListNode { + x86_op_t *op; + struct X86_OpListNode *next, *prev; + } X86_OpListNode; + + typedef struct X86_OpList { + size_t count; + X86_OpListNode *head, *tail, *curr; + } X86_OpList; +%} + +%extend X86_OpList { + X86_OpList () { + X86_OpList *list = (X86_OpList *) + calloc( sizeof(X86_OpList), 1 ); + list->count = 0; + return list; + } + + ~X86_OpList() { + X86_OpListNode *node, *next; + + node = self->head; + while ( node ) { + next = node->next; + /* free( node->insn ); */ + free( node ); + node = next; + } + + free( self ); + } + + X86_OpListNode * first() { + self->curr = self->head; + return self->head; + } + + X86_OpListNode * last() { + self->curr = self->tail; + return self->tail; + } + + X86_OpListNode * next() { + if (! self->curr ) { + self->curr = self->head; + return self->head; + } + + self->curr = self->curr->next; + return self->curr; + } + + X86_OpListNode * prev() { + if (! self->curr ) { + self->curr = self->tail; + return self->tail; + } + + self->curr = self->curr->prev; + return self->curr; + } + + %newobject append; + void append( x86_op_t *op ) { + X86_OpListNode *node = (X86_OpListNode *) + calloc( sizeof(X86_OpListNode) , 1 ); + if (! node ) { + return; + } + + self->count++; + if ( ! self->tail ) { + self->head = self->tail = node; + } else { + self->tail->next = node; + node->prev = self->tail; + self->tail = node; + } + + node->op = x86_op_t_copy( op ); + } +} + +%inline %{ + typedef struct x86_operand_list { + x86_op_t op; + struct x86_operand_list *next; + } x86_oplist_t; +%} + +%extend x86_oplist_t { + %newobject x86_oplist_node_copy; +} + +/* ================================================================== */ +/* instruction class */ +%inline %{ + x86_oplist_t * x86_oplist_node_copy( x86_oplist_t * list ) { + x86_oplist_t *ptr; + ptr = (x86_oplist_t *) calloc( sizeof(x86_oplist_t), 1 ); + if ( ptr ) { + memcpy( &ptr->op, &list->op, sizeof(x86_op_t) ); + } + + return ptr; + } + + enum x86_insn_group { + insn_none = 0, insn_controlflow = 1, + insn_arithmetic = 2, insn_logic = 3, + insn_stack = 4, insn_comparison = 5, + insn_move = 6, insn_string = 7, + insn_bit_manip = 8, insn_flag_manip = 9, + insn_fpu = 10, insn_interrupt = 13, + insn_system = 14, insn_other = 15 + }; + + enum x86_insn_type { + insn_invalid = 0, insn_jmp = 0x1001, + insn_jcc = 0x1002, insn_call = 0x1003, + insn_callcc = 0x1004, insn_return = 0x1005, + insn_add = 0x2001, insn_sub = 0x2002, + insn_mul = 0x2003, insn_div = 0x2004, + insn_inc = 0x2005, insn_dec = 0x2006, + insn_shl = 0x2007, insn_shr = 0x2008, + insn_rol = 0x2009, insn_ror = 0x200A, + insn_and = 0x3001, insn_or = 0x3002, + insn_xor = 0x3003, insn_not = 0x3004, + insn_neg = 0x3005, insn_push = 0x4001, + insn_pop = 0x4002, insn_pushregs = 0x4003, + insn_popregs = 0x4004, insn_pushflags = 0x4005, + insn_popflags = 0x4006, insn_enter = 0x4007, + insn_leave = 0x4008, insn_test = 0x5001, + insn_cmp = 0x5002, insn_mov = 0x6001, + insn_movcc = 0x6002, insn_xchg = 0x6003, + insn_xchgcc = 0x6004, insn_strcmp = 0x7001, + insn_strload = 0x7002, insn_strmov = 0x7003, + insn_strstore = 0x7004, insn_translate = 0x7005, + insn_bittest = 0x8001, insn_bitset = 0x8002, + insn_bitclear = 0x8003, insn_clear_carry = 0x9001, + insn_clear_zero = 0x9002, insn_clear_oflow = 0x9003, + insn_clear_dir = 0x9004, insn_clear_sign = 0x9005, + insn_clear_parity = 0x9006, insn_set_carry = 0x9007, + insn_set_zero = 0x9008, insn_set_oflow = 0x9009, + insn_set_dir = 0x900A, insn_set_sign = 0x900B, + insn_set_parity = 0x900C, insn_tog_carry = 0x9010, + insn_tog_zero = 0x9020, insn_tog_oflow = 0x9030, + insn_tog_dir = 0x9040, insn_tog_sign = 0x9050, + insn_tog_parity = 0x9060, insn_fmov = 0xA001, + insn_fmovcc = 0xA002, insn_fneg = 0xA003, + insn_fabs = 0xA004, insn_fadd = 0xA005, + insn_fsub = 0xA006, insn_fmul = 0xA007, + insn_fdiv = 0xA008, insn_fsqrt = 0xA009, + insn_fcmp = 0xA00A, insn_fcos = 0xA00C, + insn_fldpi = 0xA00D, insn_fldz = 0xA00E, + insn_ftan = 0xA00F, insn_fsine = 0xA010, + insn_fsys = 0xA020, insn_int = 0xD001, + insn_intcc = 0xD002, insn_iret = 0xD003, + insn_bound = 0xD004, insn_debug = 0xD005, + insn_trace = 0xD006, insn_invalid_op = 0xD007, + insn_oflow = 0xD008, insn_halt = 0xE001, + insn_in = 0xE002, insn_out = 0xE003, + insn_cpuid = 0xE004, insn_nop = 0xF001, + insn_bcdconv = 0xF002, insn_szconv = 0xF003 + }; + + enum x86_insn_note { + insn_note_ring0 = 1, + insn_note_smm = 2, + insn_note_serial = 4 + }; + + enum x86_flag_status { + insn_carry_set = 0x1, + insn_zero_set = 0x2, + insn_oflow_set = 0x4, + insn_dir_set = 0x8, + insn_sign_set = 0x10, + insn_parity_set = 0x20, + insn_carry_or_zero_set = 0x40, + insn_zero_set_or_sign_ne_oflow = 0x80, + insn_carry_clear = 0x100, + insn_zero_clear = 0x200, + insn_oflow_clear = 0x400, + insn_dir_clear = 0x800, + insn_sign_clear = 0x1000, + insn_parity_clear = 0x2000, + insn_sign_eq_oflow = 0x4000, + insn_sign_ne_oflow = 0x8000 + }; + + enum x86_insn_cpu { + cpu_8086 = 1, cpu_80286 = 2, + cpu_80386 = 3, cpu_80387 = 4, + cpu_80486 = 5, cpu_pentium = 6, + cpu_pentiumpro = 7, cpu_pentium2 = 8, + cpu_pentium3 = 9, cpu_pentium4 = 10, + cpu_k6 = 16, cpu_k7 = 32, + cpu_athlon = 48 + }; + + enum x86_insn_isa { + isa_gp = 1, isa_fp = 2, + isa_fpumgt = 3, isa_mmx = 4, + isa_sse1 = 5, isa_sse2 = 6, + isa_sse3 = 7, isa_3dnow = 8, + isa_sys = 9 + }; + + enum x86_insn_prefix { + insn_no_prefix = 0, + insn_rep_zero = 1, + insn_rep_notzero = 2, + insn_lock = 4 + }; + + + typedef struct { + unsigned long addr; + unsigned long offset; + enum x86_insn_group group; + enum x86_insn_type type; + enum x86_insn_note note; + unsigned char bytes[MAX_INSN_SIZE]; + unsigned char size; + unsigned char addr_size; + unsigned char op_size; + enum x86_insn_cpu cpu; + enum x86_insn_isa isa; + enum x86_flag_status flags_set; + enum x86_flag_status flags_tested; + unsigned char stack_mod; + long stack_mod_val; + enum x86_insn_prefix prefix; + char prefix_string[MAX_PREFIX_STR]; + char mnemonic[MAX_MNEM_STR]; + x86_oplist_t *operands; + size_t operand_count; + size_t explicit_count; + void *block; + void *function; + int tag; + } x86_insn_t; + + typedef void (*x86_operand_fn)(x86_op_t *op, x86_insn_t *insn, + void *arg); + + enum x86_op_foreach_type { + op_any = 0, + op_dest = 1, + op_src = 2, + op_ro = 3, + op_wo = 4, + op_xo = 5, + op_rw = 6, + op_implicit = 0x10, + op_explicit = 0x20 + }; + + size_t x86_operand_count( x86_insn_t *insn, + enum x86_op_foreach_type type ); + x86_op_t * x86_operand_1st( x86_insn_t *insn ); + x86_op_t * x86_operand_2nd( x86_insn_t *insn ); + x86_op_t * x86_operand_3rd( x86_insn_t *insn ); + long x86_get_rel_offset( x86_insn_t *insn ); + x86_op_t * x86_get_branch_target( x86_insn_t *insn ); + x86_op_t * x86_get_imm( x86_insn_t *insn ); + unsigned char * x86_get_raw_imm( x86_insn_t *insn ); + void x86_set_insn_addr( x86_insn_t *insn, unsigned long addr ); + int x86_format_mnemonic(x86_insn_t *insn, char *buf, int len, + enum x86_asm_format format); + int x86_format_insn(x86_insn_t *insn, char *buf, int len, + enum x86_asm_format); + void x86_oplist_free( x86_insn_t *insn ); + int x86_insn_is_valid( x86_insn_t *insn ); +%} + +%extend x86_insn_t { + x86_insn_t() { + x86_insn_t *insn = (x86_insn_t *) + calloc( sizeof(x86_insn_t), 1 ); + return insn; + } + ~x86_insn_t() { + x86_oplist_free( self ); + free( self ); + } + + int is_valid( ) { + return x86_insn_is_valid( self ); + } + + x86_op_t * operand_1st() { + return x86_operand_1st( self ); + } + + x86_op_t * operand_2nd() { + return x86_operand_2nd( self ); + } + + x86_op_t * operand_3rd() { + return x86_operand_3rd( self ); + } + + x86_op_t * operand_dest() { + return x86_operand_1st( self ); + } + + x86_op_t * operand_src() { + return x86_operand_2nd( self ); + } + + size_t num_operands( enum x86_op_foreach_type type ) { + return x86_operand_count( self, type ); + } + + long rel_offset() { + return x86_get_rel_offset( self ); + } + + x86_op_t * branch_target() { + return x86_get_branch_target( self ); + } + + x86_op_t * imm() { + return x86_get_imm( self ); + } + + unsigned char * raw_imm() { + return x86_get_raw_imm( self ); + } + + %newobject format; + char * format( enum x86_asm_format format ) { + char *buf, *str; + size_t len; + + switch ( format ) { + case xml_syntax: + len = MAX_INSN_XML_STRING; + break; + case raw_syntax: + len = MAX_INSN_RAW_STRING; + break; + case native_syntax: + case intel_syntax: + case att_syntax: + case unknown_syntax: + default: + len = MAX_INSN_STRING; + break; + } + + buf = (char * ) calloc( len + 1, 1 ); + x86_format_insn( self, buf, len, format ); + + /* drop buffer down to a reasonable size */ + str = strdup( buf ); + free(buf); + return str; + } + + %newobject format_mnemonic; + char * format_mnemonic( enum x86_asm_format format ) { + char *buf, *str; + size_t len = MAX_MNEM_STR + MAX_PREFIX_STR + 4; + + buf = (char * ) calloc( len, 1 ); + x86_format_mnemonic( self, buf, len, format ); + + /* drop buffer down to a reasonable size */ + str = strdup( buf ); + free(buf); + + return str; + } + + %newobject copy; + x86_insn_t * copy() { + x86_oplist_t *ptr, *list, *last = NULL; + x86_insn_t *insn = (x86_insn_t *) + calloc( sizeof(x86_insn_t), 1 ); + + if ( insn ) { + memcpy( insn, self, sizeof(x86_insn_t) ); + insn->operands = NULL; + insn->block = NULL; + insn->function = NULL; + + /* copy operand list */ + for ( list = self->operands; list; list = list->next ) { + ptr = x86_oplist_node_copy( list ); + + if (! ptr ) { + continue; + } + + if ( insn->operands ) { + last->next = ptr; + } else { + insn->operands = ptr; + } + last = ptr; + } + } + + return insn; + } + + X86_OpList * operand_list( ) { + x86_oplist_t *list = self->operands; + X86_OpList *op_list = new_X86_OpList(); + + for ( list = self->operands; list; list = list->next ) { + X86_OpList_append( op_list, &list->op ); + } + + return op_list; + } +} + +/* ================================================================== */ +/* invariant instruction class */ +%inline %{ + #define X86_WILDCARD_BYTE 0xF4 + + typedef struct { + enum x86_op_type type; + enum x86_op_datatype datatype; + enum x86_op_access access; + enum x86_op_flags flags; + } x86_invariant_op_t; + + typedef struct { + unsigned char bytes[64]; + unsigned int size; + enum x86_insn_group group; + enum x86_insn_type type; + x86_invariant_op_t operands[3]; + } x86_invariant_t; +%} + +%extend x86_invariant_t { + + x86_invariant_t() { + x86_invariant_t *inv = (x86_invariant_t *) + calloc( sizeof(x86_invariant_t), 1 ); + return inv; + } + + ~x86_invariant_t() { + free( self ); + } +} + +/* ================================================================== */ +/* instruction list class */ +%inline %{ + typedef struct X86_InsnListNode { + x86_insn_t *insn; + struct X86_InsnListNode *next, *prev; + } X86_InsnListNode; + + typedef struct X86_InsnList { + size_t count; + X86_InsnListNode *head, *tail, *curr; + } X86_InsnList; +%} + +%extend X86_InsnList { + X86_InsnList () { + X86_InsnList *list = (X86_InsnList *) + calloc( sizeof(X86_InsnList), 1 ); + list->count = 0; + return list; + } + + ~X86_InsnList() { + X86_InsnListNode *node, *next; + + node = self->head; + while ( node ) { + next = node->next; + /* free( node->insn ); */ + free( node ); + node = next; + } + + free( self ); + } + + X86_InsnListNode * first() { return self->head; } + + X86_InsnListNode * last() { return self->tail; } + + X86_InsnListNode * next() { + if (! self->curr ) { + self->curr = self->head; + return self->head; + } + + self->curr = self->curr->next; + return self->curr; + } + + X86_InsnListNode * prev() { + if (! self->curr ) { + self->curr = self->tail; + return self->tail; + } + + self->curr = self->curr->prev; + return self->curr; + } + + %newobject append; + void append( x86_insn_t *insn ) { + X86_InsnListNode *node = (X86_InsnListNode *) + calloc( sizeof(X86_InsnListNode) , 1 ); + if (! node ) { + return; + } + + self->count++; + if ( ! self->tail ) { + self->head = self->tail = node; + } else { + self->tail->next = node; + node->prev = self->tail; + self->tail = node; + } + + node->insn = x86_insn_t_copy( insn ); + } +} + +/* ================================================================== */ +/* address table class */ +/* slight TODO */ + +/* ================================================================== */ +/* Main disassembler class */ +%inline %{ + + enum x86_options { + opt_none= 0, + opt_ignore_nulls=1, + opt_16_bit=2 + }; + enum x86_report_codes { + report_disasm_bounds, + report_insn_bounds, + report_invalid_insn, + report_unknown + }; + + + typedef struct { + enum x86_report_codes last_error; + void * last_error_data; + void * disasm_callback; + void * disasm_resolver; + } X86_Disasm; + + typedef void (*DISASM_REPORTER)( enum x86_report_codes code, + void *data, void *arg ); + typedef void (*DISASM_CALLBACK)( x86_insn_t *insn, void * arg ); + typedef long (*DISASM_RESOLVER)( x86_op_t *op, + x86_insn_t * current_insn, + void *arg ); + + void x86_report_error( enum x86_report_codes code, void *data ); + int x86_init( enum x86_options options, DISASM_REPORTER reporter, + void *arg); + void x86_set_reporter( DISASM_REPORTER reporter, void *arg); + void x86_set_options( enum x86_options options ); + enum x86_options x86_get_options( void ); + int x86_cleanup(void); + int x86_format_header( char *buf, int len, enum x86_asm_format format); + unsigned int x86_endian(void); + unsigned int x86_addr_size(void); + unsigned int x86_op_size(void); + unsigned int x86_word_size(void); + unsigned int x86_max_insn_size(void); + unsigned int x86_sp_reg(void); + unsigned int x86_fp_reg(void); + unsigned int x86_ip_reg(void); + size_t x86_invariant_disasm( unsigned char *buf, int buf_len, + x86_invariant_t *inv ); + size_t x86_size_disasm( unsigned char *buf, unsigned int buf_len ); + int x86_disasm( unsigned char *buf, unsigned int buf_len, + unsigned long buf_rva, unsigned int offset, + x86_insn_t * insn ); + int x86_disasm_range( unsigned char *buf, unsigned long buf_rva, + unsigned int offset, unsigned int len, + DISASM_CALLBACK func, void *arg ); + int x86_disasm_forward( unsigned char *buf, unsigned int buf_len, + unsigned long buf_rva, unsigned int offset, + DISASM_CALLBACK func, void *arg, + DISASM_RESOLVER resolver, void *r_arg ); + + void x86_default_reporter( enum x86_report_codes code, + void *data, void *arg ) { + X86_Disasm *dis = (X86_Disasm *) arg; + if ( dis ) { + dis->last_error = code; + dis->last_error_data = data; + } + } + + void x86_default_callback( x86_insn_t *insn, void *arg ) { + X86_InsnList *list = (X86_InsnList *) arg; + if ( list ) { + X86_InsnList_append( list, insn ); + } + } + + /* TODO: resolver stack, maybe a callback */ + long x86_default_resolver( x86_op_t *op, x86_insn_t *insn, void *arg ) { + X86_Disasm *dis = (X86_Disasm *) arg; + if ( dis ) { + //return dis->resolver( op, insn ); + return 0; + } + + return 0; + } + +%} + +%extend X86_Disasm { + + X86_Disasm( ) { + X86_Disasm * dis = (X86_Disasm *) + calloc( sizeof( X86_Disasm ), 1 ); + x86_init( opt_none, x86_default_reporter, dis ); + return dis; + } + + X86_Disasm( enum x86_options options ) { + X86_Disasm * dis = (X86_Disasm *) + calloc( sizeof( X86_Disasm ), 1 ); + x86_init( options, x86_default_reporter, dis ); + return dis; + } + + X86_Disasm( enum x86_options options, DISASM_REPORTER reporter ) { + X86_Disasm * dis = (X86_Disasm *) + calloc( sizeof( X86_Disasm ), 1 ); + x86_init( options, reporter, NULL ); + return dis; + } + + X86_Disasm( enum x86_options options, DISASM_REPORTER reporter, + void * arg ) { + X86_Disasm * dis = (X86_Disasm *) + calloc( sizeof( X86_Disasm ), 1 ); + x86_init( options, reporter, arg ); + return dis; + } + + ~X86_Disasm() { + x86_cleanup(); + free( self ); + } + + void set_options( enum x86_options options ) { + return x86_set_options( options ); + } + + enum x86_options options() { + return x86_get_options(); + } + + void set_callback( void * callback ) { + self->disasm_callback = callback; + } + + void set_resolver( void * callback ) { + self->disasm_resolver = callback; + } + + void report_error( enum x86_report_codes code ) { + x86_report_error( code, NULL ); + } + + %newobject disasm; + x86_insn_t * disasm( unsigned char *buf, size_t buf_len, + unsigned long buf_rva, unsigned int offset ) { + x86_insn_t *insn = calloc( sizeof( x86_insn_t ), 1 ); + x86_disasm( buf, buf_len, buf_rva, offset, insn ); + return insn; + } + + int disasm_range( unsigned char *buf, size_t buf_len, + unsigned long buf_rva, unsigned int offset, + unsigned int len ) { + + X86_InsnList *list = new_X86_InsnList(); + + if ( len > buf_len ) { + len = buf_len; + } + + return x86_disasm_range( buf, buf_rva, offset, len, + x86_default_callback, list ); + } + + int disasm_forward( unsigned char *buf, size_t buf_len, + unsigned long buf_rva, unsigned int offset ) { + X86_InsnList *list = new_X86_InsnList(); + + /* use default resolver: damn SWIG callbacks! */ + return x86_disasm_forward( buf, buf_len, buf_rva, offset, + x86_default_callback, list, + x86_default_resolver, NULL ); + } + + size_t disasm_invariant( unsigned char *buf, size_t buf_len, + x86_invariant_t *inv ) { + return x86_invariant_disasm( buf, buf_len, inv ); + } + + size_t disasm_size( unsigned char *buf, size_t buf_len ) { + return x86_size_disasm( buf, buf_len ); + } + + %newobject format_header; + char * format_header( enum x86_asm_format format) { + char *buf, *str; + size_t len; + + switch ( format ) { + /* these were obtained from x86_format.c */ + case xml_syntax: + len = 679; break; + case raw_syntax: + len = 172; break; + case native_syntax: + len = 35; break; + case intel_syntax: + len = 23; break; + case att_syntax: + len = 23; break; + case unknown_syntax: + default: + len = 23; break; + } + + buf = (char * ) calloc( len + 1, 1 ); + x86_format_header( buf, len, format ); + + return buf; + } + + unsigned int endian() { + return x86_endian(); + } + + unsigned int addr_size() { + return x86_addr_size(); + } + + unsigned int op_size() { + return x86_op_size(); + } + + unsigned int word_size() { + return x86_word_size(); + } + + unsigned int max_insn_size() { + return x86_max_insn_size(); + } + + unsigned int sp_reg() { + return x86_sp_reg(); + } + + unsigned int fp_reg() { + return x86_fp_reg(); + } + + unsigned int ip_reg() { + return x86_ip_reg(); + } + + %newobject reg_from_id; + x86_reg_t * reg_from_id( unsigned int id ) { + x86_reg_t * reg = calloc( sizeof(x86_reg_t), 1 ); + x86_reg_from_id( id, reg ); + return reg; + } + + unsigned char wildcard_byte() { return X86_WILDCARD_BYTE; } + + int max_register_string() { return MAX_REGNAME; } + + int max_prefix_string() { return MAX_PREFIX_STR; } + + int max_mnemonic_string() { return MAX_MNEM_STR; } + + int max_operand_string( enum x86_asm_format format ) { + switch ( format ) { + case xml_syntax: + return MAX_OP_XML_STRING; + break; + case raw_syntax: + return MAX_OP_RAW_STRING; + break; + case native_syntax: + case intel_syntax: + case att_syntax: + case unknown_syntax: + default: + return MAX_OP_STRING; + break; + } + } + + + int max_insn_string( enum x86_asm_format format ) { + switch ( format ) { + case xml_syntax: + return MAX_INSN_XML_STRING; + break; + case raw_syntax: + return MAX_INSN_RAW_STRING; + break; + case native_syntax: + case intel_syntax: + case att_syntax: + case unknown_syntax: + default: + return MAX_INSN_STRING; + break; + } + } + + int max_num_operands( ) { return MAX_NUM_OPERANDS; } +} + +/* python callback, per the manual */ +/*%typemap(python,in) PyObject *pyfunc { + if (!PyCallable_Check($source)) { + PyErr_SetString(PyExc_TypeError, "Need a callable object!"); + return NULL; + } + $target = $source; +}*/ + +/* python FILE * callback, per the manual */ +/* +%typemap(python,in) FILE * { + if (!PyFile_Check($source)) { + PyErr_SetString(PyExc_TypeError, "Need a file!"); + return NULL; + } + $target = PyFile_AsFile($source); +}*/ + + diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/swig/perl/Makefile-swig b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/swig/perl/Makefile-swig new file mode 100644 index 0000000000..9f3a645733 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/swig/perl/Makefile-swig @@ -0,0 +1,65 @@ +ifndef BASE_NAME +BASE_NAME = x86disasm +endif + +ifndef SWIG +SWIG = swig # apt-get install swig ! +endif + +ifndef GCC +GCC = gcc +endif + +ifndef CC_FLAGS +CC_FLAGS = -c -fPIC +endif + +ifndef LD_FLAGS +LD_FLAGS = -shared -L.. -ldisasm +endif + +INTERFACE_FILE = libdisasm_oop.i + +SWIG_INTERFACE = ../$(INTERFACE_FILE) + +# PERL rules +PERL_MOD = blib/arch/auto/$(BASE_NAME)/$(BASE_NAME).so +PERL_SHADOW = $(BASE_NAME)_wrap.c +PERL_SWIG = $(BASE_NAME).pl +PERL_OBJ = $(BASE_NAME)_wrap.o +PERL_INC = `perl -e 'use Config; print $$Config{archlib};'`/CORE +PERL_CC_FLAGS = `perl -e 'use Config; print $$Config{ccflags};'` + +#==================================================== +# TARGETS + +all: swig-perl + +dummy: swig-perl install uninstall clean + +swig-perl: $(PERL_MOD) + +$(PERL_MOD): $(PERL_OBJ) + perl Makefile.PL + make + #$(GCC) $(LD_FLAGS) $(PERL_OBJ) -o $@ + +$(PERL_OBJ): $(PERL_SHADOW) + $(GCC) $(CC_FLAGS) $(PERL_CC_FLAGS) -I$(PERL_INC) -o $@ $< + +$(PERL_SHADOW): $(SWIG_INTERFACE) + swig -perl -shadow -o $(PERL_SHADOW) -outdir . $< + +# ================================================================== +install: $(PERL_MOD) + make install + +# ================================================================== +uninstall: + +# ================================================================== +clean: + rm $(PERL_MOD) $(PERL_OBJ) + rm $(PERL_SHADOW) + rm -rf Makefile blib pm_to_blib + diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/swig/perl/Makefile.PL b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/swig/perl/Makefile.PL new file mode 100644 index 0000000000..6e625df182 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/swig/perl/Makefile.PL @@ -0,0 +1,7 @@ +use ExtUtils::MakeMaker; + +WriteMakefile( + 'NAME' => 'x86disasm', + 'LIBS' => ['-ldisasm'], + 'OBJECT' => 'x86disasm_wrap.o' +); diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/swig/python/Makefile-swig b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/swig/python/Makefile-swig new file mode 100644 index 0000000000..544681a13a --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/swig/python/Makefile-swig @@ -0,0 +1,64 @@ +ifndef BASE_NAME +BASE_NAME = x86disasm +endif + +ifndef SWIG +SWIG = swig # apt-get install swig ! +endif + +ifndef GCC +GCC = gcc +endif + +ifndef CC_FLAGS +CC_FLAGS = -c -fPIC +endif + +ifndef LD_FLAGS +LD_FLAGS = -shared -L.. -ldisasm +endif + +INTERFACE_FILE = libdisasm_oop.i + +SWIG_INTERFACE = ../$(INTERFACE_FILE) + +# PYTHON rules +PYTHON_MOD = $(BASE_NAME)-python.so +PYTHON_SHADOW = $(BASE_NAME)_wrap.c +PYTHON_SWIG = $(BASE_NAME).py +PYTHON_OBJ = $(BASE_NAME)_wrap.o +PYTHON_INC = `/bin/echo -e 'import sys\nprint sys.prefix + "/include/python" + sys.version[:3]' | python` +PYTHON_LIB = `/bin/echo -e 'import sys\nprint sys.prefix + "/lib/python" + sys.version[:3]' | python` +PYTHON_DEST = $(PYTHON_LIB)/lib-dynload/_$(BASE_NAME).so + +#==================================================== +# TARGETS + +all: swig-python + +dummy: swig-python install uninstall clean + +swig-python: $(PYTHON_MOD) + +$(PYTHON_MOD): $(PYTHON_OBJ) + $(GCC) $(LD_FLAGS) $(PYTHON_OBJ) -o $@ + +$(PYTHON_OBJ): $(PYTHON_SHADOW) + $(GCC) $(CC_FLAGS) -I$(PYTHON_INC) -I.. -o $@ $< + +$(PYTHON_SHADOW): $(SWIG_INTERFACE) + swig -python -shadow -o $(PYTHON_SHADOW) -outdir . $< + +# ================================================================== +install: $(PYTHON_MOD) + sudo cp $(PYTHON_MOD) $(PYTHON_DEST) + sudo cp $(PYTHON_SWIG) $(PYTHON_LIB) + +# ================================================================== +uninstall: + +# ================================================================== +clean: + rm $(PYTHON_MOD) $(PYTHON_SWIG) $(PYTHON_OBJ) + rm $(PYTHON_SHADOW) + diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/swig/ruby/Makefile-swig b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/swig/ruby/Makefile-swig new file mode 100644 index 0000000000..ee4800232c --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/swig/ruby/Makefile-swig @@ -0,0 +1,68 @@ +ifndef BASE_NAME +BASE_NAME = x86disasm +endif + +ifndef SWIG +SWIG = swig # apt-get install swig ! +endif + +ifndef GCC +GCC = gcc +endif + +ifndef CC_FLAGS +CC_FLAGS = -c -fPIC +endif + +ifndef LD_FLAGS +LD_FLAGS = -shared -L../.. -ldisasm +endif + +LIBDISASM_DIR = ../.. + +INTERFACE_FILE = libdisasm_oop.i + +SWIG_INTERFACE = ../$(INTERFACE_FILE) + +# RUBY rules +RUBY_MAKEFILE = Makefile +RUBY_MOD = $(BASE_NAME).so +RUBY_SHADOW = $(BASE_NAME)_wrap.c +#RUBY_SWIG = $(BASE_NAME).rb +RUBY_OBJ = $(BASE_NAME)_wrap.o +RUBY_INC = `ruby -e 'puts $$:.join("\n")' | tail -2 | head -1` +#RUBY_LIB = +#RUBY_DEST = + +#==================================================== +# TARGETS + +all: swig-ruby + +dummy: swig-ruby install uninstall clean + +swig-ruby: $(RUBY_MOD) + +$(RUBY_MOD): $(RUBY_MAKEFILE) + make + +$(RUBY_MAKEFILE): $(RUBY_OBJ) + ruby extconf.rb + +$(RUBY_OBJ):$(RUBY_SHADOW) + $(GCC) $(CC_FLAGS) -I$(RUBY_INC) -I.. -o $@ $< + +$(RUBY_SHADOW): $(SWIG_INTERFACE) + swig -ruby -o $(RUBY_SHADOW) -outdir . $< + +# ================================================================== +install: $(RUBY_MOD) + make install + +# ================================================================== +uninstall: + +# ================================================================== +clean: + make clean || true + rm $(RUBY_SHADOW) $(RUBY_MAKEFILE) $(RUBY_MOD) $(RUBY_OBJ) diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/swig/ruby/extconf.rb b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/swig/ruby/extconf.rb new file mode 100644 index 0000000000..4e74326435 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/swig/ruby/extconf.rb @@ -0,0 +1,4 @@ +require 'mkmf' +find_library('disasm', 'x86_init', "/usr/local/lib", "../..") +create_makefile('x86disasm') + diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/swig/tcl/Makefile-swig b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/swig/tcl/Makefile-swig new file mode 100644 index 0000000000..5145a82935 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/swig/tcl/Makefile-swig @@ -0,0 +1,63 @@ +ifndef BASE_NAME +BASE_NAME = x86disasm +endif + +ifndef SWIG +SWIG = swig # apt-get install swig ! +endif + +ifndef GCC +GCC = gcc +endif + +ifndef CC_FLAGS +CC_FLAGS = -c -fPIC +endif + +ifndef LD_FLAGS +LD_FLAGS = -shared -L../.. -ldisasm +endif + +INTERFACE_FILE = libdisasm.i + +SWIG_INTERFACE = ../$(INTERFACE_FILE) + +# TCL rules +TCL_VERSION = 8.3 +TCL_MOD = $(BASE_NAME)-tcl.so +TCL_SHADOW = $(BASE_NAME)_wrap.c +TCL_OBJ = $(BASE_NAME)_wrap.o +TCL_INC = /usr/include/tcl$(TCL_VERSION) +TCL_LIB = /usr/lib/tcl$(TCL_VERSION) +TCL_DEST = $(TCL_LIB)/$(BASE_NAME).so + +#==================================================== +# TARGETS + +all: swig-tcl + +dummy: swig-tcl install uninstall clean + +swig-tcl: $(TCL_MOD) + +$(TCL_MOD): $(TCL_OBJ) + $(GCC) $(LD_FLAGS) $(TCL_OBJ) -o $@ + +$(TCL_OBJ): $(TCL_SHADOW) + $(GCC) $(CC_FLAGS) -I$(TCL_INC) -I.. -o $@ $< + +$(TCL_SHADOW): $(SWIG_INTERFACE) + swig -tcl -o $(TCL_SHADOW) -outdir . $< + +# ================================================================== +install: $(TCL_MOD) + sudo cp $(TCL_MOD) $(TCL_DEST) + +# ================================================================== +uninstall: + +# ================================================================== +clean: + rm $(TCL_MOD) $(TCL_SWIG) $(TCL_OBJ) + rm $(TCL_SHADOW) + diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/x86_disasm.c b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/x86_disasm.c new file mode 100644 index 0000000000..51a213a46e --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/x86_disasm.c @@ -0,0 +1,210 @@ +#include +#include +#include + +#include "libdis.h" +#include "ia32_insn.h" +#include "ia32_invariant.h" +#include "x86_operand_list.h" + + +#ifdef _MSC_VER + #define snprintf _snprintf + #define inline __inline +#endif + +unsigned int x86_disasm( unsigned char *buf, unsigned int buf_len, + uint32_t buf_rva, unsigned int offset, + x86_insn_t *insn ){ + int len, size; + unsigned char bytes[MAX_INSTRUCTION_SIZE]; + + if ( ! buf || ! insn || ! buf_len ) { + /* caller screwed up somehow */ + return 0; + } + + + /* ensure we are all NULLed up */ + memset( insn, 0, sizeof(x86_insn_t) ); + insn->addr = buf_rva + offset; + insn->offset = offset; + /* default to invalid insn */ + insn->type = insn_invalid; + insn->group = insn_none; + + if ( offset >= buf_len ) { + /* another caller screwup ;) */ + x86_report_error(report_disasm_bounds, (void*)(long)buf_rva+offset); + return 0; + } + + len = buf_len - offset; + + /* copy enough bytes for disassembly into buffer : this + * helps prevent buffer overruns at the end of a file */ + memset( bytes, 0, MAX_INSTRUCTION_SIZE ); + memcpy( bytes, &buf[offset], (len < MAX_INSTRUCTION_SIZE) ? len : + MAX_INSTRUCTION_SIZE ); + + /* actually do the disassembly */ + /* TODO: allow switching when more disassemblers are added */ + size = ia32_disasm_addr( bytes, len, insn); + + /* check and see if we had an invalid instruction */ + if (! size ) { + x86_report_error(report_invalid_insn, (void*)(long)buf_rva+offset ); + return 0; + } + + /* check if we overran the end of the buffer */ + if ( size > len ) { + x86_report_error( report_insn_bounds, (void*)(long)buf_rva + offset ); + MAKE_INVALID( insn, bytes ); + return 0; + } + + /* fill bytes field of insn */ + memcpy( insn->bytes, bytes, size ); + + return size; +} + +unsigned int x86_disasm_range( unsigned char *buf, uint32_t buf_rva, + unsigned int offset, unsigned int len, + DISASM_CALLBACK func, void *arg ) { + x86_insn_t insn; + unsigned int buf_len, size, count = 0, bytes = 0; + + /* buf_len is implied by the arguments */ + buf_len = len + offset; + + while ( bytes < len ) { + size = x86_disasm( buf, buf_len, buf_rva, offset + bytes, + &insn ); + if ( size ) { + /* invoke callback if it exists */ + if ( func ) { + (*func)( &insn, arg ); + } + bytes += size; + count ++; + } else { + /* error */ + bytes++; /* try next byte */ + } + + x86_oplist_free( &insn ); + } + + return( count ); +} + +static inline int follow_insn_dest( x86_insn_t *insn ) { + if ( insn->type == insn_jmp || insn->type == insn_jcc || + insn->type == insn_call || insn->type == insn_callcc ) { + return(1); + } + return(0); +} + +static inline int insn_doesnt_return( x86_insn_t *insn ) { + return( (insn->type == insn_jmp || insn->type == insn_return) ? 1: 0 ); +} + +static int32_t internal_resolver( x86_op_t *op, x86_insn_t *insn ){ + int32_t next_addr = -1; + if ( x86_optype_is_address(op->type) ) { + next_addr = op->data.sdword; + } else if ( op->type == op_relative_near ) { + next_addr = insn->addr + insn->size + op->data.relative_near; + } else if ( op->type == op_relative_far ) { + next_addr = insn->addr + insn->size + op->data.relative_far; + } + return( next_addr ); +} + +unsigned int x86_disasm_forward( unsigned char *buf, unsigned int buf_len, + uint32_t buf_rva, unsigned int offset, + DISASM_CALLBACK func, void *arg, + DISASM_RESOLVER resolver, void *r_arg ){ + x86_insn_t insn; + x86_op_t *op; + int32_t next_addr; + uint32_t next_offset; + unsigned int size, count = 0, bytes = 0, cont = 1; + + while ( cont && bytes < buf_len ) { + size = x86_disasm( buf, buf_len, buf_rva, offset + bytes, + &insn ); + + if ( size ) { + /* invoke callback if it exists */ + if ( func ) { + (*func)( &insn, arg ); + } + bytes += size; + count ++; + } else { + /* error */ + bytes++; /* try next byte */ + } + + if ( follow_insn_dest(&insn) ) { + op = x86_get_dest_operand( &insn ); + next_addr = -1; + + /* if caller supplied a resolver, use it to determine + * the address to disassemble */ + if ( resolver ) { + next_addr = resolver(op, &insn, r_arg); + } else { + next_addr = internal_resolver(op, &insn); + } + + if (next_addr != -1 ) { + next_offset = next_addr - buf_rva; + /* if offset is in this buffer... */ + if ( (uint32_t)next_addr >= buf_rva && + next_offset < buf_len ) { + /* go ahead and disassemble */ + count += x86_disasm_forward( buf, + buf_len, + buf_rva, + next_offset, + func, arg, + resolver, r_arg ); + } else { + /* report unresolved address */ + x86_report_error( report_disasm_bounds, + (void*)(long)next_addr ); + } + } + } /* end follow_insn */ + + if ( insn_doesnt_return(&insn) ) { + /* stop disassembling */ + cont = 0; + } + + x86_oplist_free( &insn ); + } + return( count ); +} + +/* invariant instruction representation */ +size_t x86_invariant_disasm( unsigned char *buf, int buf_len, + x86_invariant_t *inv ){ + if (! buf || ! buf_len || ! inv ) { + return(0); + } + + return ia32_disasm_invariant(buf, buf_len, inv); +} +size_t x86_size_disasm( unsigned char *buf, unsigned int buf_len ) { + if (! buf || ! buf_len ) { + return(0); + } + + return ia32_disasm_size(buf, buf_len); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/x86_format.c b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/x86_format.c new file mode 100644 index 0000000000..0ec960dc8f --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/x86_format.c @@ -0,0 +1,1430 @@ +#include +#include +#include + +#include "libdis.h" +#include + +#ifdef _MSC_VER + #define snprintf _snprintf + #define inline __inline +#endif + + +/* + * concatenation macros. STRNCATF concatenates a format string, buf + * only with one argument. + */ +#define STRNCAT( buf, str, len ) do { \ + int _i = strlen(str), _blen = strlen(buf), _len = len - 1; \ + if ( len ) { \ + strncat( buf, str, _len ); \ + if ( _len <= _i ) { \ + buf[_blen+_len] = '\0'; \ + len = 0; \ + } else { \ + len -= _i; \ + } \ + } \ +} while( 0 ) + +#define STRNCATF( buf, fmt, data, len ) do { \ + char _tmp[MAX_OP_STRING]; \ + \ + snprintf( _tmp, sizeof _tmp, fmt, data ); \ + STRNCAT( buf, _tmp, len ); \ +} while( 0 ) + + +#define PRINT_DISPLACEMENT( ea ) do { \ + if ( ea->disp_size && ea->disp ) { \ + if ( ea->disp_sign ) { \ + STRNCATF( buf, "-0x%" PRIX32, -ea->disp, len ); \ + } else { \ + STRNCATF( buf, "0x%" PRIX32, ea->disp, len ); \ + } \ + } \ +} while( 0 ) + +static const char *prefix_strings[] = { + "", /* no prefix */ + "repz ", /* the trailing spaces make it easy to prepend to mnemonic */ + "repnz ", + "lock ", + "branch delay " /* unused in x86 */ +}; + +static int format_insn_prefix_str( enum x86_insn_prefix prefix, char *buf, + int len ) { + + int len_orig = len; + + /* concat all prefix strings */ + if ( prefix & 1 ) { STRNCAT( buf, prefix_strings[1], len ); } + if ( prefix & 2 ) { STRNCAT( buf, prefix_strings[2], len ); } + if ( prefix & 4 ) { STRNCAT( buf, prefix_strings[3], len ); } + if ( prefix & 8 ) { STRNCAT( buf, prefix_strings[4], len ); } + + /* return the number of characters added */ + return (len_orig - len); +} + +/* + * sprint's an operand's data to string str. + */ +static void get_operand_data_str( x86_op_t *op, char *str, int len ){ + + if ( op->flags & op_signed ) { + switch ( op->datatype ) { + case op_byte: + snprintf( str, len, "%" PRId8, op->data.sbyte ); + return; + case op_word: + snprintf( str, len, "%" PRId16, op->data.sword ); + return; + case op_qword: + snprintf( str, len, "%" PRId64, op->data.sqword ); + return; + default: + snprintf( str, len, "%" PRId32, op->data.sdword ); + return; + } + } + + //else + switch ( op->datatype ) { + case op_byte: + snprintf( str, len, "0x%02" PRIX8, op->data.byte ); + return; + case op_word: + snprintf( str, len, "0x%04" PRIX16, op->data.word ); + return; + case op_qword: + snprintf( str, len, "0x%08" PRIX64,op->data.sqword ); + return; + default: + snprintf( str, len, "0x%08" PRIX32, op->data.dword ); + return; + } +} + +/* + * sprints register types to a string. the register types can be ORed + * together. + */ +static void get_operand_regtype_str( int regtype, char *str, int len ) +{ + static struct { + const char *name; + int value; + } operand_regtypes[] = { + {"reg_gen" , 0x00001}, + {"reg_in" , 0x00002}, + {"reg_out" , 0x00004}, + {"reg_local" , 0x00008}, + {"reg_fpu" , 0x00010}, + {"reg_seg" , 0x00020}, + {"reg_simd" , 0x00040}, + {"reg_sys" , 0x00080}, + {"reg_sp" , 0x00100}, + {"reg_fp" , 0x00200}, + {"reg_pc" , 0x00400}, + {"reg_retaddr", 0x00800}, + {"reg_cond" , 0x01000}, + {"reg_zero" , 0x02000}, + {"reg_ret" , 0x04000}, + {"reg_src" , 0x10000}, + {"reg_dest" , 0x20000}, + {"reg_count" , 0x40000}, + {NULL, 0}, //end + }; + + unsigned int i; + + memset( str, 0, len ); + + //go thru every type in the enum + for ( i = 0; operand_regtypes[i].name; i++ ) { + //skip if type is not set + if(! (regtype & operand_regtypes[i].value) ) + continue; + + //not the first time around + if( str[0] ) { + STRNCAT( str, " ", len ); + } + + STRNCAT(str, operand_regtypes[i].name, len ); + } +} + +static int format_expr( x86_ea_t *ea, char *buf, int len, + enum x86_asm_format format ) { + char str[MAX_OP_STRING]; + + if ( format == att_syntax ) { + if (ea->base.name[0] || ea->index.name[0] || ea->scale) { + PRINT_DISPLACEMENT(ea); + STRNCAT( buf, "(", len ); + + if ( ea->base.name[0]) { + STRNCATF( buf, "%%%s", ea->base.name, len ); + } + if ( ea->index.name[0]) { + STRNCATF( buf, ",%%%s", ea->index.name, len ); + if ( ea->scale > 1 ) { + STRNCATF( buf, ",%d", ea->scale, len ); + } + } + /* handle the syntactic exception */ + if ( ! ea->base.name[0] && + ! ea->index.name[0] ) { + STRNCATF( buf, ",%d", ea->scale, len ); + } + + STRNCAT( buf, ")", len ); + } else + STRNCATF( buf, "0x%" PRIX32, ea->disp, len ); + + } else if ( format == xml_syntax ){ + + if ( ea->base.name[0]) { + STRNCAT (buf, "\t\t\t\n", len); + + get_operand_regtype_str (ea->base.type, str, + sizeof str); + STRNCAT (buf, "\t\t\t\tbase.name, len); + STRNCATF (buf, "type=\"%s\" ", str, len); + STRNCATF (buf, "size=%d/>\n", ea->base.size, len); + + STRNCAT (buf, "\t\t\t\n", len); + } + + if ( ea->index.name[0]) { + STRNCAT (buf, "\t\t\t\n", len); + + get_operand_regtype_str (ea->index.type, str, + sizeof str); + + STRNCAT (buf, "\t\t\t\tindex.name, len); + STRNCATF (buf, "type=\"%s\" ", str, len); + STRNCATF (buf, "size=%d/>\n", ea->index.size, len); + + STRNCAT (buf, "\t\t\t\n", len); + } + + //scale + STRNCAT (buf, "\t\t\t\n", len); + STRNCAT (buf, "\t\t\t\t\n", ea->scale, len); + STRNCAT (buf, "\t\t\t\n", len); + + if ( ea->disp_size ) { + + STRNCAT (buf, "\t\t\t\n", len); + + if ( ea->disp_size > 1 && ! ea->disp_sign ) { + STRNCAT (buf, "\t\t\t\t
\n", ea->disp, + len); + } else { + STRNCAT (buf, "\t\t\t\t\n", ea->disp, len); + } + + STRNCAT (buf, "\t\t\t\n", len); + } + + } else if ( format == raw_syntax ) { + + PRINT_DISPLACEMENT(ea); + STRNCAT( buf, "(", len ); + + STRNCATF( buf, "%s,", ea->base.name, len ); + STRNCATF( buf, "%s,", ea->index.name, len ); + STRNCATF( buf, "%d", ea->scale, len ); + STRNCAT( buf, ")", len ); + + } else { + + STRNCAT( buf, "[", len ); + + if ( ea->base.name[0] ) { + STRNCAT( buf, ea->base.name, len ); + if ( ea->index.name[0] || + (ea->disp_size && ! ea->disp_sign) ) { + STRNCAT( buf, "+", len ); + } + } + if ( ea->index.name[0] ) { + STRNCAT( buf, ea->index.name, len ); + if ( ea->scale > 1 ) + { + STRNCATF( buf, "*%" PRId32, ea->scale, len ); + } + if ( ea->disp_size && ! ea->disp_sign ) + { + STRNCAT( buf, "+", len ); + } + } + + if ( ea->disp_size || (! ea->index.name[0] && + ! ea->base.name[0] ) ) + { + PRINT_DISPLACEMENT(ea); + } + + STRNCAT( buf, "]", len ); + } + + return( strlen(buf) ); +} + +static int format_seg( x86_op_t *op, char *buf, int len, + enum x86_asm_format format ) { + int len_orig = len; + const char *reg = ""; + + if (! op || ! buf || ! len || ! op->flags) { + return(0); + } + if ( op->type != op_offset && op->type != op_expression ){ + return(0); + } + if (! ((int) op->flags & 0xF00) ) { + return(0); + } + + switch (op->flags & 0xF00) { + case op_es_seg: reg = "es"; break; + case op_cs_seg: reg = "cs"; break; + case op_ss_seg: reg = "ss"; break; + case op_ds_seg: reg = "ds"; break; + case op_fs_seg: reg = "fs"; break; + case op_gs_seg: reg = "gs"; break; + default: + break; + } + + if (! reg[0] ) { + return( 0 ); + } + + switch( format ) { + case xml_syntax: + STRNCAT( buf, "\t\t\t\n", reg, len ); + break; + case att_syntax: + STRNCATF( buf, "%%%s:", reg, len ); + break; + + default: + STRNCATF( buf, "%s:", reg, len ); + break; + } + + return( len_orig - len ); /* return length of appended string */ +} + +static const char *get_operand_datatype_str( x86_op_t *op ){ + + static const char *types[] = { + "sbyte", /* 0 */ + "sword", + "sqword", + "sdword", + "sdqword", /* 4 */ + "byte", + "word", + "qword", + "dword", /* 8 */ + "dqword", + "sreal", + "dreal", + "extreal", /* 12 */ + "bcd", + "ssimd", + "dsimd", + "sssimd", /* 16 */ + "sdsimd", + "descr32", + "descr16", + "pdescr32", /* 20 */ + "pdescr16", + "bounds16", + "bounds32", + "fpu_env16", + "fpu_env32", /* 25 */ + "fpu_state16", + "fpu_state32", + "fp_reg_set" + }; + + /* handle signed values first */ + if ( op->flags & op_signed ) { + switch (op->datatype) { + case op_byte: return types[0]; + case op_word: return types[1]; + case op_qword: return types[2]; + case op_dqword: return types[4]; + default: return types[3]; + } + } + + switch (op->datatype) { + case op_byte: return types[5]; + case op_word: return types[6]; + case op_qword: return types[7]; + case op_dqword: return types[9]; + case op_sreal: return types[10]; + case op_dreal: return types[11]; + case op_extreal: return types[12]; + case op_bcd: return types[13]; + case op_ssimd: return types[14]; + case op_dsimd: return types[15]; + case op_sssimd: return types[16]; + case op_sdsimd: return types[17]; + case op_descr32: return types[18]; + case op_descr16: return types[19]; + case op_pdescr32: return types[20]; + case op_pdescr16: return types[21]; + case op_bounds16: return types[22]; + case op_bounds32: return types[23]; + case op_fpustate16: return types[24]; + case op_fpustate32: return types[25]; + case op_fpuenv16: return types[26]; + case op_fpuenv32: return types[27]; + case op_fpregset: return types[28]; + default: return types[8]; + } +} + +static int format_insn_eflags_str( enum x86_flag_status flags, char *buf, + int len) { + + static struct { + const char *name; + int value; + } insn_flags[] = { + { "carry_set ", 0x0001 }, + { "zero_set ", 0x0002 }, + { "oflow_set ", 0x0004 }, + { "dir_set ", 0x0008 }, + { "sign_set ", 0x0010 }, + { "parity_set ", 0x0020 }, + { "carry_or_zero_set ", 0x0040 }, + { "zero_set_or_sign_ne_oflow ", 0x0080 }, + { "carry_clear ", 0x0100 }, + { "zero_clear ", 0x0200 }, + { "oflow_clear ", 0x0400 }, + { "dir_clear ", 0x0800 }, + { "sign_clear ", 0x1000 }, + { "parity_clear ", 0x2000 }, + { "sign_eq_oflow ", 0x4000 }, + { "sign_ne_oflow ", 0x8000 }, + { NULL, 0x0000 }, //end + }; + + unsigned int i; + int len_orig = len; + + for (i = 0; insn_flags[i].name; i++) { + if (! (flags & insn_flags[i].value) ) + continue; + + STRNCAT( buf, insn_flags[i].name, len ); + } + + return( len_orig - len ); +} + +static const char *get_insn_group_str( enum x86_insn_group gp ) { + + static const char *types[] = { + "", // 0 + "controlflow",// 1 + "arithmetic", // 2 + "logic", // 3 + "stack", // 4 + "comparison", // 5 + "move", // 6 + "string", // 7 + "bit_manip", // 8 + "flag_manip", // 9 + "fpu", // 10 + "", // 11 + "", // 12 + "interrupt", // 13 + "system", // 14 + "other", // 15 + }; + + if ( gp > sizeof (types)/sizeof(types[0]) ) + return ""; + + return types[gp]; +} + +static const char *get_insn_type_str( enum x86_insn_type type ) { + + static struct { + const char *name; + int value; + } types[] = { + /* insn_controlflow */ + { "jmp", 0x1001 }, + { "jcc", 0x1002 }, + { "call", 0x1003 }, + { "callcc", 0x1004 }, + { "return", 0x1005 }, + { "loop", 0x1006 }, + /* insn_arithmetic */ + { "add", 0x2001 }, + { "sub", 0x2002 }, + { "mul", 0x2003 }, + { "div", 0x2004 }, + { "inc", 0x2005 }, + { "dec", 0x2006 }, + { "shl", 0x2007 }, + { "shr", 0x2008 }, + { "rol", 0x2009 }, + { "ror", 0x200A }, + /* insn_logic */ + { "and", 0x3001 }, + { "or", 0x3002 }, + { "xor", 0x3003 }, + { "not", 0x3004 }, + { "neg", 0x3005 }, + /* insn_stack */ + { "push", 0x4001 }, + { "pop", 0x4002 }, + { "pushregs", 0x4003 }, + { "popregs", 0x4004 }, + { "pushflags", 0x4005 }, + { "popflags", 0x4006 }, + { "enter", 0x4007 }, + { "leave", 0x4008 }, + /* insn_comparison */ + { "test", 0x5001 }, + { "cmp", 0x5002 }, + /* insn_move */ + { "mov", 0x6001 }, /* move */ + { "movcc", 0x6002 }, /* conditional move */ + { "xchg", 0x6003 }, /* exchange */ + { "xchgcc", 0x6004 }, /* conditional exchange */ + /* insn_string */ + { "strcmp", 0x7001 }, + { "strload", 0x7002 }, + { "strmov", 0x7003 }, + { "strstore", 0x7004 }, + { "translate", 0x7005 }, /* xlat */ + /* insn_bit_manip */ + { "bittest", 0x8001 }, + { "bitset", 0x8002 }, + { "bitclear", 0x8003 }, + /* insn_flag_manip */ + { "clear_carry", 0x9001 }, + { "clear_zero", 0x9002 }, + { "clear_oflow", 0x9003 }, + { "clear_dir", 0x9004 }, + { "clear_sign", 0x9005 }, + { "clear_parity", 0x9006 }, + { "set_carry", 0x9007 }, + { "set_zero", 0x9008 }, + { "set_oflow", 0x9009 }, + { "set_dir", 0x900A }, + { "set_sign", 0x900B }, + { "set_parity", 0x900C }, + { "tog_carry", 0x9010 }, + { "tog_zero", 0x9020 }, + { "tog_oflow", 0x9030 }, + { "tog_dir", 0x9040 }, + { "tog_sign", 0x9050 }, + { "tog_parity", 0x9060 }, + /* insn_fpu */ + { "fmov", 0xA001 }, + { "fmovcc", 0xA002 }, + { "fneg", 0xA003 }, + { "fabs", 0xA004 }, + { "fadd", 0xA005 }, + { "fsub", 0xA006 }, + { "fmul", 0xA007 }, + { "fdiv", 0xA008 }, + { "fsqrt", 0xA009 }, + { "fcmp", 0xA00A }, + { "fcos", 0xA00C }, + { "fldpi", 0xA00D }, + { "fldz", 0xA00E }, + { "ftan", 0xA00F }, + { "fsine", 0xA010 }, + { "fsys", 0xA020 }, + /* insn_interrupt */ + { "int", 0xD001 }, + { "intcc", 0xD002 }, /* not present in x86 ISA */ + { "iret", 0xD003 }, + { "bound", 0xD004 }, + { "debug", 0xD005 }, + { "trace", 0xD006 }, + { "invalid_op", 0xD007 }, + { "oflow", 0xD008 }, + /* insn_system */ + { "halt", 0xE001 }, + { "in", 0xE002 }, /* input from port/bus */ + { "out", 0xE003 }, /* output to port/bus */ + { "cpuid", 0xE004 }, + /* insn_other */ + { "nop", 0xF001 }, + { "bcdconv", 0xF002 }, /* convert to or from BCD */ + { "szconv", 0xF003 }, /* change size of operand */ + { NULL, 0 }, //end + }; + + unsigned int i; + + //go thru every type in the enum + for ( i = 0; types[i].name; i++ ) { + if ( types[i].value == type ) + return types[i].name; + } + + return ""; +} + +static const char *get_insn_cpu_str( enum x86_insn_cpu cpu ) { + static const char *intel[] = { + "", // 0 + "8086", // 1 + "80286", // 2 + "80386", // 3 + "80387", // 4 + "80486", // 5 + "Pentium", // 6 + "Pentium Pro", // 7 + "Pentium 2", // 8 + "Pentium 3", // 9 + "Pentium 4" // 10 + }; + + if ( cpu < sizeof(intel)/sizeof(intel[0]) ) { + return intel[cpu]; + } else if ( cpu == 16 ) { + return "K6"; + } else if ( cpu == 32 ) { + return "K7"; + } else if ( cpu == 48 ) { + return "Athlon"; + } + + return ""; +} + +static const char *get_insn_isa_str( enum x86_insn_isa isa ) { + static const char *subset[] = { + NULL, // 0 + "General Purpose", // 1 + "Floating Point", // 2 + "FPU Management", // 3 + "MMX", // 4 + "SSE", // 5 + "SSE2", // 6 + "SSE3", // 7 + "3DNow!", // 8 + "System" // 9 + }; + + if ( isa > sizeof (subset)/sizeof(subset[0]) ) { + return ""; + } + + return subset[isa]; +} + +static int format_operand_att( x86_op_t *op, x86_insn_t *insn, char *buf, + int len){ + + char str[MAX_OP_STRING]; + + memset (str, 0, sizeof str); + + switch ( op->type ) { + case op_register: + STRNCATF( buf, "%%%s", op->data.reg.name, len ); + break; + + case op_immediate: + get_operand_data_str( op, str, sizeof str ); + STRNCATF( buf, "$%s", str, len ); + break; + + case op_relative_near: + STRNCATF( buf, "0x%08X", + (unsigned int)(op->data.sbyte + + insn->addr + insn->size), len ); + break; + + case op_relative_far: + if (op->datatype == op_word) { + STRNCATF( buf, "0x%08X", + (unsigned int)(op->data.sword + + insn->addr + insn->size), len ); + } else { + STRNCATF( buf, "0x%08X", + (unsigned int)(op->data.sdword + + insn->addr + insn->size), len ); + } + break; + + case op_absolute: + /* ATT uses the syntax $section, $offset */ + STRNCATF( buf, "$0x%04" PRIX16 ", ", op->data.absolute.segment, + len ); + if (op->datatype == op_descr16) { + STRNCATF( buf, "$0x%04" PRIX16, + op->data.absolute.offset.off16, len ); + } else { + STRNCATF( buf, "$0x%08" PRIX32, + op->data.absolute.offset.off32, len ); + } + break; + case op_offset: + /* ATT requires a '*' before JMP/CALL ops */ + if (insn->type == insn_jmp || insn->type == insn_call) + STRNCAT( buf, "*", len ); + + len -= format_seg( op, buf, len, att_syntax ); + STRNCATF( buf, "0x%08" PRIX32, op->data.sdword, len ); + break; + + case op_expression: + /* ATT requires a '*' before JMP/CALL ops */ + if (insn->type == insn_jmp || insn->type == insn_call) + STRNCAT( buf, "*", len ); + + len -= format_seg( op, buf, len, att_syntax ); + len -= format_expr( &op->data.expression, buf, len, + att_syntax ); + break; + case op_unused: + case op_unknown: + /* return 0-truncated buffer */ + break; + } + + return ( strlen( buf ) ); +} + +static int format_operand_native( x86_op_t *op, x86_insn_t *insn, char *buf, + int len){ + + char str[MAX_OP_STRING]; + + switch (op->type) { + case op_register: + STRNCAT( buf, op->data.reg.name, len ); + break; + + case op_immediate: + get_operand_data_str( op, str, sizeof str ); + STRNCAT( buf, str, len ); + break; + + case op_relative_near: + STRNCATF( buf, "0x%08" PRIX32, + (unsigned int)(op->data.sbyte + + insn->addr + insn->size), len ); + break; + + case op_relative_far: + if ( op->datatype == op_word ) { + STRNCATF( buf, "0x%08" PRIX32, + (unsigned int)(op->data.sword + + insn->addr + insn->size), len ); + break; + } else { + STRNCATF( buf, "0x%08" PRIX32, op->data.sdword + + insn->addr + insn->size, len ); + } + break; + + case op_absolute: + STRNCATF( buf, "$0x%04" PRIX16 ":", op->data.absolute.segment, + len ); + if (op->datatype == op_descr16) { + STRNCATF( buf, "0x%04" PRIX16, + op->data.absolute.offset.off16, len ); + } else { + STRNCATF( buf, "0x%08" PRIX32, + op->data.absolute.offset.off32, len ); + } + break; + + case op_offset: + len -= format_seg( op, buf, len, native_syntax ); + STRNCATF( buf, "[0x%08" PRIX32 "]", op->data.sdword, len ); + break; + + case op_expression: + len -= format_seg( op, buf, len, native_syntax ); + len -= format_expr( &op->data.expression, buf, len, + native_syntax ); + break; + case op_unused: + case op_unknown: + /* return 0-truncated buffer */ + break; + } + + return( strlen( buf ) ); +} + +static int format_operand_xml( x86_op_t *op, x86_insn_t *insn, char *buf, + int len){ + + char str[MAX_OP_STRING] = "\0"; + + switch (op->type) { + case op_register: + + get_operand_regtype_str( op->data.reg.type, str, + sizeof str ); + + STRNCAT( buf, "\t\tdata.reg.name, len ); + STRNCATF( buf, "type=\"%s\" ", str, len ); + STRNCATF( buf, "size=%d/>\n", op->data.reg.size, len ); + break; + + case op_immediate: + + get_operand_data_str( op, str, sizeof str ); + + STRNCAT( buf, "\t\t\n", str, len ); + break; + + case op_relative_near: + STRNCAT( buf, "\t\t\n", + (unsigned int)(op->data.sbyte + + insn->addr + insn->size), len ); + break; + + case op_relative_far: + STRNCAT( buf, "\t\tdatatype == op_word) { + STRNCATF( buf, "value=\"0x%08" PRIX32 "\"/>\n", + (unsigned int)(op->data.sword + + insn->addr + insn->size), len); + break; + } else { + + STRNCATF( buf, "value=\"0x%08" PRIX32 "\"/>\n", + op->data.sdword + insn->addr + insn->size, + len ); + } + break; + + case op_absolute: + + STRNCATF( buf, + "\t\tdata.absolute.segment, len ); + + if (op->datatype == op_descr16) { + STRNCATF( buf, "offset=\"0x%04" PRIX16 "\">", + op->data.absolute.offset.off16, len ); + } else { + STRNCATF( buf, "offset=\"0x%08" PRIX32 "\">", + op->data.absolute.offset.off32, len ); + } + + STRNCAT( buf, "\t\t\n", len ); + break; + + case op_expression: + + + STRNCAT( buf, "\t\t\n", len ); + + len -= format_seg( op, buf, len, xml_syntax ); + len -= format_expr( &op->data.expression, buf, len, + xml_syntax ); + + STRNCAT( buf, "\t\t\n", len ); + break; + + case op_offset: + + STRNCAT( buf, "\t\t\n", len ); + + len -= format_seg( op, buf, len, xml_syntax ); + + STRNCAT( buf, "\t\t\t
\n", + op->data.sdword, len ); + STRNCAT( buf, "\t\t\n", len ); + break; + + case op_unused: + case op_unknown: + /* return 0-truncated buffer */ + break; + } + + return( strlen( buf ) ); +} + +static int format_operand_raw( x86_op_t *op, x86_insn_t *insn, char *buf, + int len){ + + char str[MAX_OP_RAW_STRING]; + const char *datatype = get_operand_datatype_str(op); + + switch (op->type) { + case op_register: + + get_operand_regtype_str( op->data.reg.type, str, + sizeof str ); + + STRNCAT( buf, "reg|", len ); + STRNCATF( buf, "%s|", datatype, len ); + STRNCATF( buf, "%s:", op->data.reg.name, len ); + STRNCATF( buf, "%s:", str, len ); + STRNCATF( buf, "%d|", op->data.reg.size, len ); + break; + + case op_immediate: + + get_operand_data_str( op, str, sizeof str ); + + STRNCAT( buf, "immediate|", len ); + STRNCATF( buf, "%s|", datatype, len ); + STRNCATF( buf, "%s|", str, len ); + break; + + case op_relative_near: + /* NOTE: in raw format, we print the + * relative offset, not the actual + * address of the jump target */ + + STRNCAT( buf, "relative|", len ); + STRNCATF( buf, "%s|", datatype, len ); + STRNCATF( buf, "%" PRId8 "|", op->data.sbyte, len ); + break; + + case op_relative_far: + + STRNCAT( buf, "relative|", len ); + STRNCATF( buf, "%s|", datatype, len ); + + if (op->datatype == op_word) { + STRNCATF( buf, "%" PRId16 "|", op->data.sword, len); + break; + } else { + STRNCATF( buf, "%" PRId32 "|", op->data.sdword, len ); + } + break; + + case op_absolute: + + STRNCAT( buf, "absolute_address|", len ); + STRNCATF( buf, "%s|", datatype, len ); + + STRNCATF( buf, "$0x%04" PRIX16 ":", op->data.absolute.segment, + len ); + if (op->datatype == op_descr16) { + STRNCATF( buf, "0x%04" PRIX16 "|", + op->data.absolute.offset.off16, len ); + } else { + STRNCATF( buf, "0x%08" PRIX32 "|", + op->data.absolute.offset.off32, len ); + } + + break; + + case op_expression: + + STRNCAT( buf, "address_expression|", len ); + STRNCATF( buf, "%s|", datatype, len ); + + len -= format_seg( op, buf, len, native_syntax ); + len -= format_expr( &op->data.expression, buf, len, + raw_syntax ); + + STRNCAT( buf, "|", len ); + break; + + case op_offset: + + STRNCAT( buf, "segment_offset|", len ); + STRNCATF( buf, "%s|", datatype, len ); + + len -= format_seg( op, buf, len, xml_syntax ); + + STRNCATF( buf, "%08" PRIX32 "|", op->data.sdword, len ); + break; + + case op_unused: + case op_unknown: + /* return 0-truncated buffer */ + break; + } + + return( strlen( buf ) ); +} + +int x86_format_operand( x86_op_t *op, char *buf, int len, + enum x86_asm_format format ){ + x86_insn_t *insn; + + if ( ! op || ! buf || len < 1 ) { + return(0); + } + + /* insn is stored in x86_op_t since .21-pre3 */ + insn = (x86_insn_t *) op->insn; + + memset( buf, 0, len ); + + switch ( format ) { + case att_syntax: + return format_operand_att( op, insn, buf, len ); + case xml_syntax: + return format_operand_xml( op, insn, buf, len ); + case raw_syntax: + return format_operand_raw( op, insn, buf, len ); + case native_syntax: + case intel_syntax: + default: + return format_operand_native( op, insn, buf, len ); + } +} + +#define is_imm_jmp(op) (op->type == op_absolute || \ + op->type == op_immediate || \ + op->type == op_offset) +#define is_memory_op(op) (op->type == op_absolute || \ + op->type == op_expression || \ + op->type == op_offset) + +static int format_att_mnemonic( x86_insn_t *insn, char *buf, int len) { + int size = 0; + const char *suffix; + + if (! insn || ! buf || ! len ) + return(0); + + memset( buf, 0, len ); + + /* do long jump/call prefix */ + if ( insn->type == insn_jmp || insn->type == insn_call ) { + if (! is_imm_jmp( x86_operand_1st(insn) ) || + (x86_operand_1st(insn))->datatype != op_byte ) { + /* far jump/call, use "l" prefix */ + STRNCAT( buf, "l", len ); + } + STRNCAT( buf, insn->mnemonic, len ); + + return ( strlen( buf ) ); + } + + /* do mnemonic */ + STRNCAT( buf, insn->mnemonic, len ); + + /* do suffixes for memory operands */ + if (!(insn->note & insn_note_nosuffix) && + (insn->group == insn_arithmetic || + insn->group == insn_logic || + insn->group == insn_move || + insn->group == insn_stack || + insn->group == insn_string || + insn->group == insn_comparison || + insn->type == insn_in || + insn->type == insn_out + )) { + if ( x86_operand_count( insn, op_explicit ) > 0 && + is_memory_op( x86_operand_1st(insn) ) ){ + size = x86_operand_size( x86_operand_1st( insn ) ); + } else if ( x86_operand_count( insn, op_explicit ) > 1 && + is_memory_op( x86_operand_2nd(insn) ) ){ + size = x86_operand_size( x86_operand_2nd( insn ) ); + } + } + + if ( size == 1 ) suffix = "b"; + else if ( size == 2 ) suffix = "w"; + else if ( size == 4 ) suffix = "l"; + else if ( size == 8 ) suffix = "q"; + else suffix = ""; + + STRNCAT( buf, suffix, len ); + return ( strlen( buf ) ); +} + +int x86_format_mnemonic(x86_insn_t *insn, char *buf, int len, + enum x86_asm_format format){ + char str[MAX_OP_STRING]; + + memset( buf, 0, len ); + STRNCAT( buf, insn->prefix_string, len ); + if ( format == att_syntax ) { + format_att_mnemonic( insn, str, sizeof str ); + STRNCAT( buf, str, len ); + } else { + STRNCAT( buf, insn->mnemonic, len ); + } + + return( strlen( buf ) ); +} + +struct op_string { char *buf; size_t len; }; + +static void format_op_raw( x86_op_t *op, x86_insn_t *insn, void *arg ) { + struct op_string * opstr = (struct op_string *) arg; + + format_operand_raw(op, insn, opstr->buf, opstr->len); +} + +static int format_insn_note(x86_insn_t *insn, char *buf, int len){ + char note[32] = {0}; + int len_orig = len, note_len = 32; + + if ( insn->note & insn_note_ring0 ) { + STRNCATF( note, "%s", "Ring0 ", note_len ); + } + if ( insn->note & insn_note_smm ) { + STRNCATF( note, "%s", "SMM ", note_len ); + } + if ( insn->note & insn_note_serial ) { + STRNCATF(note, "%s", "Serialize ", note_len ); + } + STRNCATF( buf, "%s|", note, len ); + + return( len_orig - len ); +} + +static int format_raw_insn( x86_insn_t *insn, char *buf, int len ){ + struct op_string opstr = { buf, len }; + int i; + + /* RAW style: + * ADDRESS|OFFSET|SIZE|BYTES| + * PREFIX|PREFIX_STRING|GROUP|TYPE|NOTES| + * MNEMONIC|CPU|ISA|FLAGS_SET|FLAGS_TESTED| + * STACK_MOD|STACK_MOD_VAL + * [|OP_TYPE|OP_DATATYPE|OP_ACCESS|OP_FLAGS|OP]* + * + * Register values are encoded as: + * NAME:TYPE:SIZE + * + * Effective addresses are encoded as: + * disp(base_reg,index_reg,scale) + */ + STRNCATF( buf, "0x%08" PRIX32 "|", insn->addr , len ); + STRNCATF( buf, "0x%08" PRIX32 "|", insn->offset, len ); + STRNCATF( buf, "%d|" , insn->size , len ); + + /* print bytes */ + for ( i = 0; i < insn->size; i++ ) { + STRNCATF( buf, "%02X ", insn->bytes[i], len ); + } + STRNCAT( buf, "|", len ); + + len -= format_insn_prefix_str( insn->prefix, buf, len ); + STRNCATF( buf, "|%s|", insn->prefix_string , len ); + STRNCATF( buf, "%s|", get_insn_group_str( insn->group ), len ); + STRNCATF( buf, "%s|", get_insn_type_str( insn->type ) , len ); + STRNCATF( buf, "%s|", insn->mnemonic , len ); + STRNCATF( buf, "%s|", get_insn_cpu_str( insn->cpu ) , len ); + STRNCATF( buf, "%s|", get_insn_isa_str( insn->isa ) , len ); + + /* insn note */ + len -= format_insn_note( insn, buf, len ); + + len -= format_insn_eflags_str( insn->flags_set, buf, len ); + STRNCAT( buf, "|", len ); + len -= format_insn_eflags_str( insn->flags_tested, buf, len ); + STRNCAT( buf, "|", len ); + STRNCATF( buf, "%d|", insn->stack_mod, len ); + STRNCATF( buf, "%" PRId32 "|", insn->stack_mod_val, len ); + + opstr.len = len; + x86_operand_foreach( insn, format_op_raw, &opstr, op_any ); + + return( strlen (buf) ); +} + +static int format_xml_insn( x86_insn_t *insn, char *buf, int len ) { + char str[MAX_OP_XML_STRING]; + int i; + + STRNCAT( buf, "\n", len ); + + STRNCATF( buf, "\t
addr, len ); + STRNCATF( buf, "offset=\"0x%08" PRIX32 "\" ", insn->offset, len ); + STRNCATF( buf, "size=%d bytes=\"", insn->size, len ); + + for ( i = 0; i < insn->size; i++ ) { + STRNCATF( buf, "%02X ", insn->bytes[i], len ); + } + STRNCAT( buf, "\"/>\n", len ); + + STRNCAT( buf, "\tprefix, buf, len ); + STRNCATF( buf, "\" string=\"%s\"/>\n", insn->prefix_string, len ); + + STRNCATF( buf, "\tgroup), len ); + STRNCATF( buf, "type=\"%s\" ", get_insn_type_str (insn->type), len ); + STRNCATF( buf, "string=\"%s\"/>\n", insn->mnemonic, len ); + + STRNCAT( buf, "\t\n", len ); + STRNCAT( buf, "\t\tflags_set, buf, len ); + STRNCAT( buf, "\"/>\n\t\n", len ); + + + STRNCAT( buf, "\t\n", len ); + STRNCAT( buf, "\t\tflags_tested, buf, len ); + STRNCAT( buf, "\"/>\n\t\n", len ); + + if ( x86_operand_1st( insn ) ) { + x86_format_operand( x86_operand_1st(insn), str, + sizeof str, xml_syntax); + STRNCAT( buf, "\t\n", len ); + STRNCAT( buf, str, len ); + STRNCAT( buf, "\t\n", len ); + } + + if ( x86_operand_2nd( insn ) ) { + x86_format_operand( x86_operand_2nd( insn ), str, + sizeof str, xml_syntax); + STRNCAT( buf, "\t\n", len ); + STRNCAT( buf, str, len ); + STRNCAT( buf, "\t\n", len ); + } + + if ( x86_operand_3rd( insn ) ) { + x86_format_operand( x86_operand_3rd(insn), str, + sizeof str, xml_syntax); + STRNCAT( buf, "\t\n", len ); + STRNCAT( buf, str, len ); + STRNCAT( buf, "\t\n", len ); + } + + STRNCAT( buf, "\n", len ); + + return strlen (buf); +} + +int x86_format_header( char *buf, int len, enum x86_asm_format format ) { + switch (format) { + case att_syntax: + snprintf( buf, len, "MNEMONIC\tSRC, DEST, IMM" ); + break; + case intel_syntax: + snprintf( buf, len, "MNEMONIC\tDEST, SRC, IMM" ); + break; + case native_syntax: + snprintf( buf, len, "ADDRESS\tBYTES\tMNEMONIC\t" + "DEST\tSRC\tIMM" ); + break; + case raw_syntax: + snprintf( buf, len, "ADDRESS|OFFSET|SIZE|BYTES|" + "PREFIX|PREFIX_STRING|GROUP|TYPE|NOTES|" + "MNEMONIC|CPU|ISA|FLAGS_SET|FLAGS_TESTED|" + "STACK_MOD|STACK_MOD_VAL" + "[|OP_TYPE|OP_DATATYPE|OP_ACCESS|OP_FLAGS|OP]*" + ); + break; + case xml_syntax: + snprintf( buf, len, + "" + "
" + "" + "" + "" + "" + "" + "" + "" + "" + "" + "" + "" + "" + "" + "" + "" + "" + "" + "" + "" + "" + "" + "" + "" + "" + "" + "" + "" + "" + "" + "
" + "" + "" + "" + "
" + "" + "" + "" + ); + break; + case unknown_syntax: + if ( len ) { + buf[0] = '\0'; + } + break; + } + + return( strlen(buf) ); +} + +int x86_format_insn( x86_insn_t *insn, char *buf, int len, + enum x86_asm_format format ){ + char str[MAX_OP_STRING]; + x86_op_t *src, *dst; + int i; + + memset(buf, 0, len); + if ( format == intel_syntax ) { + /* INTEL STYLE: mnemonic dest, src, imm */ + STRNCAT( buf, insn->prefix_string, len ); + STRNCAT( buf, insn->mnemonic, len ); + STRNCAT( buf, "\t", len ); + + /* dest */ + if ( (dst = x86_operand_1st( insn )) && !(dst->flags & op_implied) ) { + x86_format_operand( dst, str, MAX_OP_STRING, format); + STRNCAT( buf, str, len ); + } + + /* src */ + if ( (src = x86_operand_2nd( insn )) ) { + if ( !(dst->flags & op_implied) ) { + STRNCAT( buf, ", ", len ); + } + x86_format_operand( src, str, MAX_OP_STRING, format); + STRNCAT( buf, str, len ); + } + + /* imm */ + if ( x86_operand_3rd( insn )) { + STRNCAT( buf, ", ", len ); + x86_format_operand( x86_operand_3rd( insn ), + str, MAX_OP_STRING, format); + STRNCAT( buf, str, len ); + } + + } else if ( format == att_syntax ) { + /* ATT STYLE: mnemonic src, dest, imm */ + STRNCAT( buf, insn->prefix_string, len ); + format_att_mnemonic(insn, str, MAX_OP_STRING); + STRNCATF( buf, "%s\t", str, len); + + + /* not sure which is correct? sometimes GNU as requires + * an imm as the first operand, sometimes as the third... */ + /* imm */ + if ( x86_operand_3rd( insn ) ) { + x86_format_operand(x86_operand_3rd( insn ), + str, MAX_OP_STRING, format); + STRNCAT( buf, str, len ); + /* there is always 'dest' operand if there is 'src' */ + STRNCAT( buf, ", ", len ); + } + + if ( (insn->note & insn_note_nonswap ) == 0 ) { + /* regular AT&T style swap */ + src = x86_operand_2nd( insn ); + dst = x86_operand_1st( insn ); + } + else { + /* special-case instructions */ + src = x86_operand_1st( insn ); + dst = x86_operand_2nd( insn ); + } + + /* src */ + if ( src ) { + x86_format_operand(src, str, MAX_OP_STRING, format); + STRNCAT( buf, str, len ); + /* there is always 'dest' operand if there is 'src' */ + if ( dst && !(dst->flags & op_implied) ) { + STRNCAT( buf, ", ", len ); + } + } + + /* dest */ + if ( dst && !(dst->flags & op_implied) ) { + x86_format_operand( dst, str, MAX_OP_STRING, format); + STRNCAT( buf, str, len ); + } + + + } else if ( format == raw_syntax ) { + format_raw_insn( insn, buf, len ); + } else if ( format == xml_syntax ) { + format_xml_insn( insn, buf, len ); + } else { /* default to native */ + /* NATIVE style: RVA\tBYTES\tMNEMONIC\tOPERANDS */ + /* print address */ + STRNCATF( buf, "%08" PRIX32 "\t", insn->addr, len ); + + /* print bytes */ + for ( i = 0; i < insn->size; i++ ) { + STRNCATF( buf, "%02X ", insn->bytes[i], len ); + } + + STRNCAT( buf, "\t", len ); + + /* print mnemonic */ + STRNCAT( buf, insn->prefix_string, len ); + STRNCAT( buf, insn->mnemonic, len ); + STRNCAT( buf, "\t", len ); + + /* print operands */ + /* dest */ + if ( x86_operand_1st( insn ) ) { + x86_format_operand( x86_operand_1st( insn ), + str, MAX_OP_STRING, format); + STRNCATF( buf, "%s\t", str, len ); + } + + /* src */ + if ( x86_operand_2nd( insn ) ) { + x86_format_operand(x86_operand_2nd( insn ), + str, MAX_OP_STRING, format); + STRNCATF( buf, "%s\t", str, len ); + } + + /* imm */ + if ( x86_operand_3rd( insn )) { + x86_format_operand( x86_operand_3rd( insn ), + str, MAX_OP_STRING, format); + STRNCAT( buf, str, len ); + } + } + + return( strlen( buf ) ); +} + diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/x86_imm.c b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/x86_imm.c new file mode 100644 index 0000000000..cd59bfc9ab --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/x86_imm.c @@ -0,0 +1,70 @@ +#include "qword.h" +#include "x86_imm.h" + +#include + +unsigned int x86_imm_signsized( unsigned char * buf, size_t buf_len, + void *dest, unsigned int size ) { + signed char *cp = (signed char *) dest; + signed short *sp = (signed short *) dest; + int32_t *lp = (int32_t *) dest; + qword_t *qp = (qword_t *) dest; + + if ( size > buf_len ) { + return 0; + } + + /* Copy 'size' bytes from *buf to *op + * return number of bytes copied */ + switch (size) { + case 1: /* BYTE */ + *cp = *((signed char *) buf); + break; + case 2: /* WORD */ + *sp = *((signed short *) buf); + break; + case 6: + case 8: /* QWORD */ + *qp = *((qword_t *) buf); + break; + case 4: /* DWORD */ + default: + *lp = *((int32_t *) buf); + break; + } + return (size); +} + +unsigned int x86_imm_sized( unsigned char * buf, size_t buf_len, void *dest, + unsigned int size ) { + unsigned char *cp = (unsigned char *) dest; + unsigned short *sp = (unsigned short *) dest; + uint32_t *lp = (uint32_t *) dest; + qword_t *qp = (qword_t *) dest; + + if ( size > buf_len ) { + return 0; + } + + /* Copy 'size' bytes from *buf to *op + * return number of bytes copied */ + switch (size) { + case 1: /* BYTE */ + *cp = *((unsigned char *) buf); + break; + case 2: /* WORD */ + *sp = *((unsigned short *) buf); + break; + case 6: + case 8: /* QWORD */ + *qp = *((qword_t *) buf); + break; + case 4: /* DWORD */ + default: + *lp = *((uint32_t *) buf); + break; + } + + return (size); +} + diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/x86_imm.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/x86_imm.h new file mode 100644 index 0000000000..fa35ff2de4 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/x86_imm.h @@ -0,0 +1,18 @@ +#ifndef x86_IMM_H +#define x86_IMM_H + +#include "./qword.h" +#include + +#ifdef WIN32 +#include +#endif + +/* these are in the global x86 namespace but are not a part of the + * official API */ +unsigned int x86_imm_sized( unsigned char *buf, size_t buf_len, void *dest, + unsigned int size ); + +unsigned int x86_imm_signsized( unsigned char *buf, size_t buf_len, void *dest, + unsigned int size ); +#endif diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/x86_insn.c b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/x86_insn.c new file mode 100644 index 0000000000..5649b89fb8 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/x86_insn.c @@ -0,0 +1,182 @@ +#include +#include + +#include "libdis.h" + +#ifdef _MSC_VER + #define snprintf _snprintf + #define inline __inline +#endif + +int x86_insn_is_valid( x86_insn_t *insn ) { + if ( insn && insn->type != insn_invalid && insn->size > 0 ) { + return 1; + } + + return 0; +} + +uint32_t x86_get_address( x86_insn_t *insn ) { + x86_oplist_t *op_lst; + if (! insn || ! insn->operands ) { + return 0; + } + + for (op_lst = insn->operands; op_lst; op_lst = op_lst->next ) { + if ( op_lst->op.type == op_offset ) { + return op_lst->op.data.offset; + } else if ( op_lst->op.type == op_absolute ) { + if ( op_lst->op.datatype == op_descr16 ) { + return (uint32_t) + op_lst->op.data.absolute.offset.off16; + } + return op_lst->op.data.absolute.offset.off32; + } + } + + return 0; +} + +int32_t x86_get_rel_offset( x86_insn_t *insn ) { + x86_oplist_t *op_lst; + if (! insn || ! insn->operands ) { + return 0; + } + + for (op_lst = insn->operands; op_lst; op_lst = op_lst->next ) { + if ( op_lst->op.type == op_relative_near ) { + return (int32_t) op_lst->op.data.relative_near; + } else if ( op_lst->op.type == op_relative_far ) { + return op_lst->op.data.relative_far; + } + } + + return 0; +} + +x86_op_t * x86_get_branch_target( x86_insn_t *insn ) { + x86_oplist_t *op_lst; + if (! insn || ! insn->operands ) { + return NULL; + } + + for (op_lst = insn->operands; op_lst; op_lst = op_lst->next ) { + if ( op_lst->op.access & op_execute ) { + return &(op_lst->op); + } + } + + return NULL; +} +x86_op_t * x86_get_imm( x86_insn_t *insn ) { + x86_oplist_t *op_lst; + if (! insn || ! insn->operands ) { + return NULL; + } + + for (op_lst = insn->operands; op_lst; op_lst = op_lst->next ) { + if ( op_lst->op.type == op_immediate ) { + return &(op_lst->op); + } + } + + return NULL; +} + +#define IS_PROPER_IMM( x ) \ + x->op.type == op_immediate && ! (x->op.flags & op_hardcode) + + +/* if there is an immediate value in the instruction, return a pointer to + * it */ +unsigned char * x86_get_raw_imm( x86_insn_t *insn ) { + int size, offset; + x86_op_t *op = NULL; + + if (! insn || ! insn->operands ) { + return(NULL); + } + + /* a bit inelegant, but oh well... */ + if ( IS_PROPER_IMM( insn->operands ) ) { + op = &insn->operands->op; + } else if ( insn->operands->next ) { + if ( IS_PROPER_IMM( insn->operands->next ) ) { + op = &insn->operands->next->op; + } else if ( insn->operands->next->next && + IS_PROPER_IMM( insn->operands->next->next ) ) { + op = &insn->operands->next->next->op; + } + } + + if (! op ) { + return( NULL ); + } + + /* immediate data is at the end of the insn */ + size = x86_operand_size( op ); + offset = insn->size - size; + return( &insn->bytes[offset] ); +} + + +unsigned int x86_operand_size( x86_op_t *op ) { + switch (op->datatype ) { + case op_byte: return 1; + case op_word: return 2; + case op_dword: return 4; + case op_qword: return 8; + case op_dqword: return 16; + case op_sreal: return 4; + case op_dreal: return 8; + case op_extreal: return 10; + case op_bcd: return 10; + case op_ssimd: return 16; + case op_dsimd: return 16; + case op_sssimd: return 4; + case op_sdsimd: return 8; + case op_descr32: return 6; + case op_descr16: return 4; + case op_pdescr32: return 6; + case op_pdescr16: return 6; + case op_bounds16: return 4; + case op_bounds32: return 8; + case op_fpuenv16: return 14; + case op_fpuenv32: return 28; + case op_fpustate16: return 94; + case op_fpustate32: return 108; + case op_fpregset: return 512; + case op_fpreg: return 10; + case op_none: return 0; + } + return(4); /* default size */ +} + +void x86_set_insn_addr( x86_insn_t *insn, uint32_t addr ) { + if ( insn ) insn->addr = addr; +} + +void x86_set_insn_offset( x86_insn_t *insn, unsigned int offset ){ + if ( insn ) insn->offset = offset; +} + +void x86_set_insn_function( x86_insn_t *insn, void * func ){ + if ( insn ) insn->function = func; +} + +void x86_set_insn_block( x86_insn_t *insn, void * block ){ + if ( insn ) insn->block = block; +} + +void x86_tag_insn( x86_insn_t *insn ){ + if ( insn ) insn->tag = 1; +} + +void x86_untag_insn( x86_insn_t *insn ){ + if ( insn ) insn->tag = 0; +} + +int x86_insn_is_tagged( x86_insn_t *insn ){ + return insn->tag; +} + diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/x86_misc.c b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/x86_misc.c new file mode 100644 index 0000000000..3d2dd0ae8b --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/x86_misc.c @@ -0,0 +1,71 @@ +#include +#include +#include + +#include "libdis.h" +#include "ia32_insn.h" +#include "ia32_reg.h" /* for ia32_reg wrapper */ +#include "ia32_settings.h" +extern ia32_settings_t ia32_settings; + +#ifdef _MSC_VER + #define snprintf _snprintf + #define inline __inline +#endif + + +/* =========================================================== INIT/TERM */ +static DISASM_REPORTER __x86_reporter_func = NULL; +static void * __x86_reporter_arg = NULL; + +int x86_init( enum x86_options options, DISASM_REPORTER reporter, void * arg ) +{ + ia32_settings.options = options; + __x86_reporter_func = reporter; + __x86_reporter_arg = arg; + + return 1; +} + +void x86_set_reporter( DISASM_REPORTER reporter, void * arg ) { + __x86_reporter_func = reporter; + __x86_reporter_arg = arg; +} + +void x86_set_options( enum x86_options options ){ + ia32_settings.options = options; +} + +enum x86_options x86_get_options( void ) { + return ia32_settings.options; +} + +int x86_cleanup( void ) +{ + return 1; +} + +/* =========================================================== ERRORS */ +void x86_report_error( enum x86_report_codes code, void *data ) { + if ( __x86_reporter_func ) { + (*__x86_reporter_func)(code, data, __x86_reporter_arg); + } +} + + +/* =========================================================== MISC */ +unsigned int x86_endian(void) { return ia32_settings.endian; } +unsigned int x86_addr_size(void) { return ia32_settings.sz_addr; } +unsigned int x86_op_size(void) { return ia32_settings.sz_oper; } +unsigned int x86_word_size(void) { return ia32_settings.sz_word; } +unsigned int x86_max_insn_size(void) { return ia32_settings.max_insn; } +unsigned int x86_sp_reg(void) { return ia32_settings.id_sp_reg; } +unsigned int x86_fp_reg(void) { return ia32_settings.id_fp_reg; } +unsigned int x86_ip_reg(void) { return ia32_settings.id_ip_reg; } +unsigned int x86_flag_reg(void) { return ia32_settings.id_flag_reg; } + +/* wrapper function to hide the IA32 register fn */ +void x86_reg_from_id( unsigned int id, x86_reg_t * reg ) { + ia32_handle_register( reg, id ); + return; +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/x86_operand_list.c b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/x86_operand_list.c new file mode 100644 index 0000000000..95409e0698 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/x86_operand_list.c @@ -0,0 +1,191 @@ +#include +#include "libdis.h" + + +static void x86_oplist_append( x86_insn_t *insn, x86_oplist_t *op ) { + x86_oplist_t *list; + + if (! insn ) { + return; + } + + list = insn->operands; + if (! list ) { + insn->operand_count = 1; + /* Note that we have no way of knowing if this is an + * exlicit operand or not, since the caller fills + * the x86_op_t after we return. We increase the + * explicit count automatically, and ia32_insn_implicit_ops + * decrements it */ + insn->explicit_count = 1; + insn->operands = op; + return; + } + + /* get to end of list */ + for ( ; list->next; list = list->next ) + ; + + insn->operand_count = insn->operand_count + 1; + insn->explicit_count = insn->explicit_count + 1; + list->next = op; + + return; +} + +x86_op_t * x86_operand_new( x86_insn_t *insn ) { + x86_oplist_t *op; + + if (! insn ) { + return(NULL); + } + op = calloc( sizeof(x86_oplist_t), 1 ); + op->op.insn = insn; + x86_oplist_append( insn, op ); + return( &(op->op) ); +} + +void x86_oplist_free( x86_insn_t *insn ) { + x86_oplist_t *op, *list; + + if (! insn ) { + return; + } + + for ( list = insn->operands; list; ) { + op = list; + list = list->next; + free(op); + } + + insn->operands = NULL; + insn->operand_count = 0; + insn->explicit_count = 0; + + return; +} + +/* ================================================== LIBDISASM API */ +/* these could probably just be #defines, but that means exposing the + enum... yet one more confusing thing in the API */ +int x86_operand_foreach( x86_insn_t *insn, x86_operand_fn func, void *arg, + enum x86_op_foreach_type type ){ + x86_oplist_t *list; + char explicit = 1, implicit = 1; + + if (! insn || ! func ) { + return 0; + } + + /* note: explicit and implicit can be ORed together to + * allow an "all" limited by access type, even though the + * user is stupid to do this since it is default behavior :) */ + if ( (type & op_explicit) && ! (type & op_implicit) ) { + implicit = 0; + } + if ( (type & op_implicit) && ! (type & op_explicit) ) { + explicit = 0; + } + + type = type & 0x0F; /* mask out explicit/implicit operands */ + + for ( list = insn->operands; list; list = list->next ) { + if (! implicit && (list->op.flags & op_implied) ) { + /* operand is implicit */ + continue; + } + + if (! explicit && ! (list->op.flags & op_implied) ) { + /* operand is not implicit */ + continue; + } + + switch ( type ) { + case op_any: + break; + case op_dest: + if (! (list->op.access & op_write) ) { + continue; + } + break; + case op_src: + if (! (list->op.access & op_read) ) { + continue; + } + break; + case op_ro: + if (! (list->op.access & op_read) || + (list->op.access & op_write ) ) { + continue; + } + break; + case op_wo: + if (! (list->op.access & op_write) || + (list->op.access & op_read ) ) { + continue; + } + break; + case op_xo: + if (! (list->op.access & op_execute) ) { + continue; + } + break; + case op_rw: + if (! (list->op.access & op_write) || + ! (list->op.access & op_read ) ) { + continue; + } + break; + case op_implicit: case op_explicit: /* make gcc happy */ + break; + } + /* any non-continue ends up here: invoke the callback */ + (*func)( &list->op, insn, arg ); + } + + return 1; +} + +static void count_operand( x86_op_t *op, x86_insn_t *insn, void *arg ) { + size_t * count = (size_t *) arg; + *count = *count + 1; +} + +size_t x86_operand_count( x86_insn_t *insn, enum x86_op_foreach_type type ) { + size_t count = 0; + + /* save us a list traversal for common counts... */ + if ( type == op_any ) { + return insn->operand_count; + } else if ( type == op_explicit ) { + return insn->explicit_count; + } + + x86_operand_foreach( insn, count_operand, &count, type ); + return count; +} + +/* accessor functions */ +x86_op_t * x86_operand_1st( x86_insn_t *insn ) { + if (! insn->explicit_count ) { + return NULL; + } + + return &(insn->operands->op); +} + +x86_op_t * x86_operand_2nd( x86_insn_t *insn ) { + if ( insn->explicit_count < 2 ) { + return NULL; + } + + return &(insn->operands->next->op); +} + +x86_op_t * x86_operand_3rd( x86_insn_t *insn ) { + if ( insn->explicit_count < 3 ) { + return NULL; + } + + return &(insn->operands->next->next->op); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/x86_operand_list.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/x86_operand_list.h new file mode 100644 index 0000000000..53668658ec --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/libdisasm/x86_operand_list.h @@ -0,0 +1,8 @@ +#ifndef X86_OPERAND_LIST_H +#define X86_OPERAND_LIST_H +#include "libdis.h" + + +x86_op_t * x86_operand_new( x86_insn_t *insn ); + +#endif diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/linux/include/gflags/gflags.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/linux/include/gflags/gflags.h new file mode 100644 index 0000000000..08a3b637e1 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/linux/include/gflags/gflags.h @@ -0,0 +1,533 @@ +// Copyright (c) 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// --- +// Author: Ray Sidney +// Revamped and reorganized by Craig Silverstein +// +// This is the file that should be included by any file which declares +// or defines a command line flag or wants to parse command line flags +// or print a program usage message (which will include information about +// flags). Executive summary, in the form of an example foo.cc file: +// +// #include "foo.h" // foo.h has a line "DECLARE_int32(start);" +// +// DEFINE_int32(end, 1000, "The last record to read"); +// DECLARE_bool(verbose); // some other file has a DEFINE_bool(verbose, ...) +// +// void MyFunc() { +// if (FLAGS_verbose) printf("Records %d-%d\n", FLAGS_start, FLAGS_end); +// } +// +// Then, at the command-line: +// ./foo --noverbose --start=5 --end=100 +// +// For more details, see +// doc/gflags.html +// +// --- A note about thread-safety: +// +// We describe many functions in this routine as being thread-hostile, +// thread-compatible, or thread-safe. Here are the meanings we use: +// +// thread-safe: it is safe for multiple threads to call this routine +// (or, when referring to a class, methods of this class) +// concurrently. +// thread-hostile: it is not safe for multiple threads to call this +// routine (or methods of this class) concurrently. In gflags, +// most thread-hostile routines are intended to be called early in, +// or even before, main() -- that is, before threads are spawned. +// thread-compatible: it is safe for multiple threads to read from +// this variable (when applied to variables), or to call const +// methods of this class (when applied to classes), as long as no +// other thread is writing to the variable or calling non-const +// methods of this class. + +#ifndef GOOGLE_GFLAGS_H_ +#define GOOGLE_GFLAGS_H_ + +#include +#include + +// We care a lot about number of bits things take up. Unfortunately, +// systems define their bit-specific ints in a lot of different ways. +// We use our own way, and have a typedef to get there. +// Note: these commands below may look like "#if 1" or "#if 0", but +// that's because they were constructed that way at ./configure time. +// Look at gflags.h.in to see how they're calculated (based on your config). +#if 1 +#include // the normal place uint16_t is defined +#endif +#if 1 +#include // the normal place u_int16_t is defined +#endif +#if 1 +#include // a third place for uint16_t or u_int16_t +#endif + +namespace google { + +#if 1 // the C99 format +typedef int32_t int32; +typedef uint32_t uint32; +typedef int64_t int64; +typedef uint64_t uint64; +#elif 1 // the BSD format +typedef int32_t int32; +typedef u_int32_t uint32; +typedef int64_t int64; +typedef u_int64_t uint64; +#elif 0 // the windows (vc7) format +typedef __int32 int32; +typedef unsigned __int32 uint32; +typedef __int64 int64; +typedef unsigned __int64 uint64; +#else +#error Do not know how to define a 32-bit integer quantity on your system +#endif + +// -------------------------------------------------------------------- +// To actually define a flag in a file, use DEFINE_bool, +// DEFINE_string, etc. at the bottom of this file. You may also find +// it useful to register a validator with the flag. This ensures that +// when the flag is parsed from the commandline, or is later set via +// SetCommandLineOption, we call the validation function. +// +// The validation function should return true if the flag value is valid, and +// false otherwise. If the function returns false for the new setting of the +// flag, the flag will retain its current value. If it returns false for the +// default value, InitGoogle will die. +// +// This function is safe to call at global construct time (as in the +// example below). +// +// Example use: +// static bool ValidatePort(const char* flagname, int32 value) { +// if (value > 0 && value < 32768) // value is ok +// return true; +// printf("Invalid value for --%s: %d\n", flagname, (int)value); +// return false; +// } +// DEFINE_int32(port, 0, "What port to listen on"); +// static bool dummy = RegisterFlagValidator(&FLAGS_port, &ValidatePort); + +// Returns true if successfully registered, false if not (because the +// first argument doesn't point to a command-line flag, or because a +// validator is already registered for this flag). +bool RegisterFlagValidator(const bool* flag, + bool (*validate_fn)(const char*, bool)); +bool RegisterFlagValidator(const int32* flag, + bool (*validate_fn)(const char*, int32)); +bool RegisterFlagValidator(const int64* flag, + bool (*validate_fn)(const char*, int64)); +bool RegisterFlagValidator(const uint64* flag, + bool (*validate_fn)(const char*, uint64)); +bool RegisterFlagValidator(const double* flag, + bool (*validate_fn)(const char*, double)); +bool RegisterFlagValidator(const std::string* flag, + bool (*validate_fn)(const char*, const std::string&)); + + +// -------------------------------------------------------------------- +// These methods are the best way to get access to info about the +// list of commandline flags. Note that these routines are pretty slow. +// GetAllFlags: mostly-complete info about the list, sorted by file. +// ShowUsageWithFlags: pretty-prints the list to stdout (what --help does) +// ShowUsageWithFlagsRestrict: limit to filenames with restrict as a substr +// +// In addition to accessing flags, you can also access argv[0] (the program +// name) and argv (the entire commandline), which we sock away a copy of. +// These variables are static, so you should only set them once. + +struct CommandLineFlagInfo { + std::string name; // the name of the flag + std::string type; // the type of the flag: int32, etc + std::string description; // the "help text" associated with the flag + std::string current_value; // the current value, as a string + std::string default_value; // the default value, as a string + std::string filename; // 'cleaned' version of filename holding the flag + bool has_validator_fn; // true if RegisterFlagValidator called on flag + bool is_default; // true if the flag has default value +}; + +extern void GetAllFlags(std::vector* OUTPUT); +// These two are actually defined in commandlineflags_reporting.cc. +extern void ShowUsageWithFlags(const char *argv0); // what --help does +extern void ShowUsageWithFlagsRestrict(const char *argv0, const char *restrict); + +// Create a descriptive string for a flag. +// Goes to some trouble to make pretty line breaks. +extern std::string DescribeOneFlag(const CommandLineFlagInfo& flag); + +// Thread-hostile; meant to be called before any threads are spawned. +extern void SetArgv(int argc, const char** argv); +// The following functions are thread-safe as long as SetArgv() is +// only called before any threads start. +extern const std::vector& GetArgvs(); // all of argv as a vector +extern const char* GetArgv(); // all of argv as a string +extern const char* GetArgv0(); // only argv0 +extern uint32 GetArgvSum(); // simple checksum of argv +extern const char* ProgramInvocationName(); // argv0, or "UNKNOWN" if not set +extern const char* ProgramInvocationShortName(); // basename(argv0) +// ProgramUsage() is thread-safe as long as SetUsageMessage() is only +// called before any threads start. +extern const char* ProgramUsage(); // string set by SetUsageMessage() + + +// -------------------------------------------------------------------- +// Normally you access commandline flags by just saying "if (FLAGS_foo)" +// or whatever, and set them by calling "FLAGS_foo = bar" (or, more +// commonly, via the DEFINE_foo macro). But if you need a bit more +// control, we have programmatic ways to get/set the flags as well. +// These programmatic ways to access flags are thread-safe, but direct +// access is only thread-compatible. + +// Return true iff the flagname was found. +// OUTPUT is set to the flag's value, or unchanged if we return false. +extern bool GetCommandLineOption(const char* name, std::string* OUTPUT); + +// Return true iff the flagname was found. OUTPUT is set to the flag's +// CommandLineFlagInfo or unchanged if we return false. +extern bool GetCommandLineFlagInfo(const char* name, + CommandLineFlagInfo* OUTPUT); + +// Return the CommandLineFlagInfo of the flagname. exit() if name not found. +// Example usage, to check if a flag's value is currently the default value: +// if (GetCommandLineFlagInfoOrDie("foo").is_default) ... +extern CommandLineFlagInfo GetCommandLineFlagInfoOrDie(const char* name); + +enum FlagSettingMode { + // update the flag's value (can call this multiple times). + SET_FLAGS_VALUE, + // update the flag's value, but *only if* it has not yet been updated + // with SET_FLAGS_VALUE, SET_FLAG_IF_DEFAULT, or "FLAGS_xxx = nondef". + SET_FLAG_IF_DEFAULT, + // set the flag's default value to this. If the flag has not yet updated + // yet (via SET_FLAGS_VALUE, SET_FLAG_IF_DEFAULT, or "FLAGS_xxx = nondef") + // change the flag's current value to the new default value as well. + SET_FLAGS_DEFAULT +}; + +// Set a particular flag ("command line option"). Returns a string +// describing the new value that the option has been set to. The +// return value API is not well-specified, so basically just depend on +// it to be empty if the setting failed for some reason -- the name is +// not a valid flag name, or the value is not a valid value -- and +// non-empty else. + +// SetCommandLineOption uses set_mode == SET_FLAGS_VALUE (the common case) +extern std::string SetCommandLineOption(const char* name, const char* value); +extern std::string SetCommandLineOptionWithMode(const char* name, const char* value, + FlagSettingMode set_mode); + + +// -------------------------------------------------------------------- +// Saves the states (value, default value, whether the user has set +// the flag, registered validators, etc) of all flags, and restores +// them when the FlagSaver is destroyed. This is very useful in +// tests, say, when you want to let your tests change the flags, but +// make sure that they get reverted to the original states when your +// test is complete. +// +// Example usage: +// void TestFoo() { +// FlagSaver s1; +// FLAG_foo = false; +// FLAG_bar = "some value"; +// +// // test happens here. You can return at any time +// // without worrying about restoring the FLAG values. +// } +// +// Note: This class is marked with __attribute__((unused)) because all the +// work is done in the constructor and destructor, so in the standard +// usage example above, the compiler would complain that it's an +// unused variable. +// +// This class is thread-safe. + +class FlagSaver { + public: + FlagSaver(); + ~FlagSaver(); + + private: + class FlagSaverImpl* impl_; // we use pimpl here to keep API steady + + FlagSaver(const FlagSaver&); // no copying! + void operator=(const FlagSaver&); +} __attribute__ ((unused)); + +// -------------------------------------------------------------------- +// Some deprecated or hopefully-soon-to-be-deprecated functions. + +// This is often used for logging. TODO(csilvers): figure out a better way +extern std::string CommandlineFlagsIntoString(); +// Usually where this is used, a FlagSaver should be used instead. +extern bool ReadFlagsFromString(const std::string& flagfilecontents, + const char* prog_name, + bool errors_are_fatal); // uses SET_FLAGS_VALUE + +// These let you manually implement --flagfile functionality. +// DEPRECATED. +extern bool AppendFlagsIntoFile(const std::string& filename, const char* prog_name); +extern bool SaveCommandFlags(); // actually defined in google.cc ! +extern bool ReadFromFlagsFile(const std::string& filename, const char* prog_name, + bool errors_are_fatal); // uses SET_FLAGS_VALUE + + +// -------------------------------------------------------------------- +// Useful routines for initializing flags from the environment. +// In each case, if 'varname' does not exist in the environment +// return defval. If 'varname' does exist but is not valid +// (e.g., not a number for an int32 flag), abort with an error. +// Otherwise, return the value. NOTE: for booleans, for true use +// 't' or 'T' or 'true' or '1', for false 'f' or 'F' or 'false' or '0'. + +extern bool BoolFromEnv(const char *varname, bool defval); +extern int32 Int32FromEnv(const char *varname, int32 defval); +extern int64 Int64FromEnv(const char *varname, int64 defval); +extern uint64 Uint64FromEnv(const char *varname, uint64 defval); +extern double DoubleFromEnv(const char *varname, double defval); +extern const char *StringFromEnv(const char *varname, const char *defval); + + +// -------------------------------------------------------------------- +// The next two functions parse commandlineflags from main(): + +// Set the "usage" message for this program. For example: +// string usage("This program does nothing. Sample usage:\n"); +// usage += argv[0] + " "; +// SetUsageMessage(usage); +// Do not include commandline flags in the usage: we do that for you! +// Thread-hostile; meant to be called before any threads are spawned. +extern void SetUsageMessage(const std::string& usage); + +// Looks for flags in argv and parses them. Rearranges argv to put +// flags first, or removes them entirely if remove_flags is true. +// If a flag is defined more than once in the command line or flag +// file, the last definition is used. +// See top-of-file for more details on this function. +#ifndef SWIG // In swig, use ParseCommandLineFlagsScript() instead. +extern uint32 ParseCommandLineFlags(int *argc, char*** argv, + bool remove_flags); +#endif + + +// Calls to ParseCommandLineNonHelpFlags and then to +// HandleCommandLineHelpFlags can be used instead of a call to +// ParseCommandLineFlags during initialization, in order to allow for +// changing default values for some FLAGS (via +// e.g. SetCommandLineOptionWithMode calls) between the time of +// command line parsing and the time of dumping help information for +// the flags as a result of command line parsing. +// If a flag is defined more than once in the command line or flag +// file, the last definition is used. +extern uint32 ParseCommandLineNonHelpFlags(int *argc, char*** argv, + bool remove_flags); +// This is actually defined in commandlineflags_reporting.cc. +// This function is misnamed (it also handles --version, etc.), but +// it's too late to change that now. :-( +extern void HandleCommandLineHelpFlags(); // in commandlineflags_reporting.cc + +// Allow command line reparsing. Disables the error normally +// generated when an unknown flag is found, since it may be found in a +// later parse. Thread-hostile; meant to be called before any threads +// are spawned. +extern void AllowCommandLineReparsing(); + +// Reparse the flags that have not yet been recognized. +// Only flags registered since the last parse will be recognized. +// Any flag value must be provided as part of the argument using "=", +// not as a separate command line argument that follows the flag argument. +// Intended for handling flags from dynamically loaded libraries, +// since their flags are not registered until they are loaded. +extern uint32 ReparseCommandLineNonHelpFlags(); + + +// -------------------------------------------------------------------- +// Now come the command line flag declaration/definition macros that +// will actually be used. They're kind of hairy. A major reason +// for this is initialization: we want people to be able to access +// variables in global constructors and have that not crash, even if +// their global constructor runs before the global constructor here. +// (Obviously, we can't guarantee the flags will have the correct +// default value in that case, but at least accessing them is safe.) +// The only way to do that is have flags point to a static buffer. +// So we make one, using a union to ensure proper alignment, and +// then use placement-new to actually set up the flag with the +// correct default value. In the same vein, we have to worry about +// flag access in global destructors, so FlagRegisterer has to be +// careful never to destroy the flag-values it constructs. +// +// Note that when we define a flag variable FLAGS_, we also +// preemptively define a junk variable, FLAGS_no. This is to +// cause a link-time error if someone tries to define 2 flags with +// names like "logging" and "nologging". We do this because a bool +// flag FLAG can be set from the command line to true with a "-FLAG" +// argument, and to false with a "-noFLAG" argument, and so this can +// potentially avert confusion. +// +// We also put flags into their own namespace. It is purposefully +// named in an opaque way that people should have trouble typing +// directly. The idea is that DEFINE puts the flag in the weird +// namespace, and DECLARE imports the flag from there into the current +// namespace. The net result is to force people to use DECLARE to get +// access to a flag, rather than saying "extern bool FLAGS_whatever;" +// or some such instead. We want this so we can put extra +// functionality (like sanity-checking) in DECLARE if we want, and +// make sure it is picked up everywhere. +// +// We also put the type of the variable in the namespace, so that +// people can't DECLARE_int32 something that they DEFINE_bool'd +// elsewhere. + +class FlagRegisterer { + public: + FlagRegisterer(const char* name, const char* type, + const char* help, const char* filename, + void* current_storage, void* defvalue_storage); +}; + +extern bool FlagsTypeWarn(const char *name); + +// If your application #defines STRIP_FLAG_HELP to a non-zero value +// before #including this file, we remove the help message from the +// binary file. This can reduce the size of the resulting binary +// somewhat, and may also be useful for security reasons. + +extern const char kStrippedFlagHelp[]; + +} + +#ifndef SWIG // In swig, ignore the main flag declarations + +#if defined(STRIP_FLAG_HELP) && STRIP_FLAG_HELP > 0 +// Need this construct to avoid the 'defined but not used' warning. +#define MAYBE_STRIPPED_HELP(txt) (false ? (txt) : kStrippedFlagHelp) +#else +#define MAYBE_STRIPPED_HELP(txt) txt +#endif + +// Each command-line flag has two variables associated with it: one +// with the current value, and one with the default value. However, +// we have a third variable, which is where value is assigned; it's a +// constant. This guarantees that FLAG_##value is initialized at +// static initialization time (e.g. before program-start) rather than +// than global construction time (which is after program-start but +// before main), at least when 'value' is a compile-time constant. We +// use a small trick for the "default value" variable, and call it +// FLAGS_no. This serves the second purpose of assuring a +// compile error if someone tries to define a flag named no +// which is illegal (--foo and --nofoo both affect the "foo" flag). +#define DEFINE_VARIABLE(type, shorttype, name, value, help) \ + namespace fL##shorttype { \ + static const type FLAGS_nono##name = value; \ + type FLAGS_##name = FLAGS_nono##name; \ + type FLAGS_no##name = FLAGS_nono##name; \ + static ::google::FlagRegisterer o_##name( \ + #name, #type, MAYBE_STRIPPED_HELP(help), __FILE__, \ + &FLAGS_##name, &FLAGS_no##name); \ + } \ + using fL##shorttype::FLAGS_##name + +#define DECLARE_VARIABLE(type, shorttype, name) \ + namespace fL##shorttype { \ + extern type FLAGS_##name; \ + } \ + using fL##shorttype::FLAGS_##name + +// For DEFINE_bool, we want to do the extra check that the passed-in +// value is actually a bool, and not a string or something that can be +// coerced to a bool. These declarations (no definition needed!) will +// help us do that, and never evaluate From, which is important. +// We'll use 'sizeof(IsBool(val))' to distinguish. This code requires +// that the compiler have different sizes for bool & double. Since +// this is not guaranteed by the standard, we check it with a +// compile-time assert (msg[-1] will give a compile-time error). +namespace fLB { +struct CompileAssert {}; +typedef CompileAssert expected_sizeof_double_neq_sizeof_bool[ + (sizeof(double) != sizeof(bool)) ? 1 : -1]; +template double IsBoolFlag(const From& from); +bool IsBoolFlag(bool from); +} // namespace fLB + +#define DECLARE_bool(name) DECLARE_VARIABLE(bool,B, name) +#define DEFINE_bool(name,val,txt) \ + namespace fLB { \ + typedef CompileAssert FLAG_##name##_value_is_not_a_bool[ \ + (sizeof(::fLB::IsBoolFlag(val)) != sizeof(double)) ? 1 : -1]; \ + } \ + DEFINE_VARIABLE(bool,B, name, val, txt) + +#define DECLARE_int32(name) DECLARE_VARIABLE(::google::int32,I, name) +#define DEFINE_int32(name,val,txt) DEFINE_VARIABLE(::google::int32,I, name, val, txt) + +#define DECLARE_int64(name) DECLARE_VARIABLE(::google::int64,I64, name) +#define DEFINE_int64(name,val,txt) DEFINE_VARIABLE(::google::int64,I64, name, val, txt) + +#define DECLARE_uint64(name) DECLARE_VARIABLE(::google::uint64,U64, name) +#define DEFINE_uint64(name,val,txt) DEFINE_VARIABLE(::google::uint64,U64, name, val, txt) + +#define DECLARE_double(name) DECLARE_VARIABLE(double,D, name) +#define DEFINE_double(name,val,txt) DEFINE_VARIABLE(double,D, name, val, txt) + +// Strings are trickier, because they're not a POD, so we can't +// construct them at static-initialization time (instead they get +// constructed at global-constructor time, which is much later). To +// try to avoid crashes in that case, we use a char buffer to store +// the string, which we can static-initialize, and then placement-new +// into it later. It's not perfect, but the best we can do. +#define DECLARE_string(name) namespace fLS { extern std::string& FLAGS_##name; } \ + using fLS::FLAGS_##name + +// We need to define a var named FLAGS_no##name so people don't define +// --string and --nostring. And we need a temporary place to put val +// so we don't have to evaluate it twice. Two great needs that go +// great together! +// The weird 'using' + 'extern' inside the fLS namespace is to work around +// an unknown compiler bug/issue with the gcc 4.2.1 on SUSE 10. See +// http://code.google.com/p/google-gflags/issues/detail?id=20 +#define DEFINE_string(name, val, txt) \ + namespace fLS { \ + static union { void* align; char s[sizeof(std::string)]; } s_##name[2]; \ + const std::string* const FLAGS_no##name = new (s_##name[0].s) std::string(val); \ + static ::google::FlagRegisterer o_##name( \ + #name, "string", MAYBE_STRIPPED_HELP(txt), __FILE__, \ + s_##name[0].s, new (s_##name[1].s) std::string(*FLAGS_no##name)); \ + extern std::string& FLAGS_##name; \ + using fLS::FLAGS_##name; \ + std::string& FLAGS_##name = *(reinterpret_cast(s_##name[0].s)); \ + } \ + using fLS::FLAGS_##name + +#endif // SWIG + +#endif // GOOGLE_GFLAGS_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/linux/include/gflags/gflags_completions.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/linux/include/gflags/gflags_completions.h new file mode 100644 index 0000000000..9d9ce7a5f7 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/linux/include/gflags/gflags_completions.h @@ -0,0 +1,121 @@ +// Copyright (c) 2008, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// --- +// Author: Dave Nicponski +// +// Implement helpful bash-style command line flag completions +// +// ** Functional API: +// HandleCommandLineCompletions() should be called early during +// program startup, but after command line flag code has been +// initialized, such as the beginning of HandleCommandLineHelpFlags(). +// It checks the value of the flag --tab_completion_word. If this +// flag is empty, nothing happens here. If it contains a string, +// however, then HandleCommandLineCompletions() will hijack the +// process, attempting to identify the intention behind this +// completion. Regardless of the outcome of this deduction, the +// process will be terminated, similar to --helpshort flag +// handling. +// +// ** Overview of Bash completions: +// Bash can be told to programatically determine completions for the +// current 'cursor word'. It does this by (in this case) invoking a +// command with some additional arguments identifying the command +// being executed, the word being completed, and the previous word +// (if any). Bash then expects a sequence of output lines to be +// printed to stdout. If these lines all contain a common prefix +// longer than the cursor word, bash will replace the cursor word +// with that common prefix, and display nothing. If there isn't such +// a common prefix, bash will display the lines in pages using 'more'. +// +// ** Strategy taken for command line completions: +// If we can deduce either the exact flag intended, or a common flag +// prefix, we'll output exactly that. Otherwise, if information +// must be displayed to the user, we'll take the opportunity to add +// some helpful information beyond just the flag name (specifically, +// we'll include the default flag value and as much of the flag's +// description as can fit on a single terminal line width, as specified +// by the flag --tab_completion_columns). Furthermore, we'll try to +// make bash order the output such that the most useful or relevent +// flags are the most likely to be shown at the top. +// +// ** Additional features: +// To assist in finding that one really useful flag, substring matching +// was implemented. Before pressing a to get completion for the +// current word, you can append one or more '?' to the flag to do +// substring matching. Here's the semantics: +// --foo Show me all flags with names prefixed by 'foo' +// --foo? Show me all flags with 'foo' somewhere in the name +// --foo?? Same as prior case, but also search in module +// definition path for 'foo' +// --foo??? Same as prior case, but also search in flag +// descriptions for 'foo' +// Finally, we'll trim the output to a relatively small number of +// flags to keep bash quiet about the verbosity of output. If one +// really wanted to see all possible matches, appending a '+' to the +// search word will force the exhaustive list of matches to be printed. +// +// ** How to have bash accept completions from a binary: +// Bash requires that it be informed about each command that programmatic +// completion should be enabled for. Example addition to a .bashrc +// file would be (your path to gflags_completions.sh file may differ): + +/* +$ complete -o bashdefault -o default -o nospace -C \ + '/usr/local/bin/gflags_completions.sh --tab_completion_columns $COLUMNS' \ + time env binary_name another_binary [...] +*/ + +// This would allow the following to work: +// $ /path/to/binary_name --vmodule +// Or: +// $ ./bin/path/another_binary --gfs_u +// (etc) +// +// Sadly, it appears that bash gives no easy way to force this behavior for +// all commands. That's where the "time" in the above example comes in. +// If you haven't specifically added a command to the list of completion +// supported commands, you can still get completions by prefixing the +// entire command with "env". +// $ env /some/brand/new/binary --vmod +// Assuming that "binary" is a newly compiled binary, this should still +// produce the expected completion output. + + +#ifndef GOOGLE_GFLAGS_COMPLETIONS_H_ +#define GOOGLE_GFLAGS_COMPLETIONS_H_ + +namespace google { + +void HandleCommandLineCompletions(void); + +} + +#endif // GOOGLE_GFLAGS_COMPLETIONS_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/linux/lib/gflags/libgflags.a b/TMessagesProj/jni/third_party/breakpad/src/third_party/linux/lib/gflags/libgflags.a new file mode 100644 index 0000000000..c0de874c96 Binary files /dev/null and b/TMessagesProj/jni/third_party/breakpad/src/third_party/linux/lib/gflags/libgflags.a differ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/linux/lib/glog/libglog.a b/TMessagesProj/jni/third_party/breakpad/src/third_party/linux/lib/glog/libglog.a new file mode 100644 index 0000000000..8d2afaef64 Binary files /dev/null and b/TMessagesProj/jni/third_party/breakpad/src/third_party/linux/lib/glog/libglog.a differ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/lss/linux_syscall_support.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/lss/linux_syscall_support.h new file mode 100644 index 0000000000..1fe0ae89e9 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/lss/linux_syscall_support.h @@ -0,0 +1,4029 @@ +/* Copyright (c) 2005-2011, Google Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following disclaimer + * in the documentation and/or other materials provided with the + * distribution. + * * Neither the name of Google Inc. nor the names of its + * contributors may be used to endorse or promote products derived from + * this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + * + * --- + * Author: Markus Gutschke + */ + +/* This file includes Linux-specific support functions common to the + * coredumper and the thread lister; primarily, this is a collection + * of direct system calls, and a couple of symbols missing from + * standard header files. + * There are a few options that the including file can set to control + * the behavior of this file: + * + * SYS_CPLUSPLUS: + * The entire header file will normally be wrapped in 'extern "C" { }", + * making it suitable for compilation as both C and C++ source. If you + * do not want to do this, you can set the SYS_CPLUSPLUS macro to inhibit + * the wrapping. N.B. doing so will suppress inclusion of all prerequisite + * system header files, too. It is the caller's responsibility to provide + * the necessary definitions. + * + * SYS_ERRNO: + * All system calls will update "errno" unless overriden by setting the + * SYS_ERRNO macro prior to including this file. SYS_ERRNO should be + * an l-value. + * + * SYS_INLINE: + * New symbols will be defined "static inline", unless overridden by + * the SYS_INLINE macro. + * + * SYS_LINUX_SYSCALL_SUPPORT_H + * This macro is used to avoid multiple inclusions of this header file. + * If you need to include this file more than once, make sure to + * unset SYS_LINUX_SYSCALL_SUPPORT_H before each inclusion. + * + * SYS_PREFIX: + * New system calls will have a prefix of "sys_" unless overridden by + * the SYS_PREFIX macro. Valid values for this macro are [0..9] which + * results in prefixes "sys[0..9]_". It is also possible to set this + * macro to -1, which avoids all prefixes. + * + * SYS_SYSCALL_ENTRYPOINT: + * Some applications (such as sandboxes that filter system calls), need + * to be able to run custom-code each time a system call is made. If this + * macro is defined, it expands to the name of a "common" symbol. If + * this symbol is assigned a non-NULL pointer value, it is used as the + * address of the system call entrypoint. + * A pointer to this symbol can be obtained by calling + * get_syscall_entrypoint() + * + * This file defines a few internal symbols that all start with "LSS_". + * Do not access these symbols from outside this file. They are not part + * of the supported API. + */ +#ifndef SYS_LINUX_SYSCALL_SUPPORT_H +#define SYS_LINUX_SYSCALL_SUPPORT_H + +/* We currently only support x86-32, x86-64, ARM, MIPS, and PPC on Linux. + * Porting to other related platforms should not be difficult. + */ +#if (defined(__i386__) || defined(__x86_64__) || defined(__ARM_ARCH_3__) || \ + defined(__mips__) || defined(__PPC__) || defined(__ARM_EABI__) || \ + defined(__aarch64__)) \ + && (defined(__linux) || defined(__ANDROID__)) + +#ifndef SYS_CPLUSPLUS +#ifdef __cplusplus +/* Some system header files in older versions of gcc neglect to properly + * handle being included from C++. As it appears to be harmless to have + * multiple nested 'extern "C"' blocks, just add another one here. + */ +extern "C" { +#endif + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#ifdef __mips__ +/* Include definitions of the ABI currently in use. */ +#include +#endif +#endif + +/* The Android NDK's #defines these macros as aliases + * to their non-64 counterparts. To avoid naming conflict, remove them. */ +#ifdef __ANDROID__ + /* These are restored by the corresponding #pragma pop_macro near + * the end of this file. */ +# pragma push_macro("stat64") +# pragma push_macro("fstat64") +# pragma push_macro("lstat64") +# undef stat64 +# undef fstat64 +# undef lstat64 +#endif + +/* As glibc often provides subtly incompatible data structures (and implicit + * wrapper functions that convert them), we provide our own kernel data + * structures for use by the system calls. + * These structures have been developed by using Linux 2.6.23 headers for + * reference. Note though, we do not care about exact API compatibility + * with the kernel, and in fact the kernel often does not have a single + * API that works across architectures. Instead, we try to mimic the glibc + * API where reasonable, and only guarantee ABI compatibility with the + * kernel headers. + * Most notably, here are a few changes that were made to the structures + * defined by kernel headers: + * + * - we only define structures, but not symbolic names for kernel data + * types. For the latter, we directly use the native C datatype + * (i.e. "unsigned" instead of "mode_t"). + * - in a few cases, it is possible to define identical structures for + * both 32bit (e.g. i386) and 64bit (e.g. x86-64) platforms by + * standardizing on the 64bit version of the data types. In particular, + * this means that we use "unsigned" where the 32bit headers say + * "unsigned long". + * - overall, we try to minimize the number of cases where we need to + * conditionally define different structures. + * - the "struct kernel_sigaction" class of structures have been + * modified to more closely mimic glibc's API by introducing an + * anonymous union for the function pointer. + * - a small number of field names had to have an underscore appended to + * them, because glibc defines a global macro by the same name. + */ + +/* include/linux/dirent.h */ +struct kernel_dirent64 { + unsigned long long d_ino; + long long d_off; + unsigned short d_reclen; + unsigned char d_type; + char d_name[256]; +}; + +/* include/linux/dirent.h */ +#if defined(__aarch64__) +// aarch64 only defines dirent64, just uses that for dirent too. +#define kernel_dirent kernel_dirent64 +#else +struct kernel_dirent { + long d_ino; + long d_off; + unsigned short d_reclen; + char d_name[256]; +}; +#endif + +/* include/linux/uio.h */ +struct kernel_iovec { + void *iov_base; + unsigned long iov_len; +}; + +/* include/linux/socket.h */ +struct kernel_msghdr { + void *msg_name; + int msg_namelen; + struct kernel_iovec*msg_iov; + unsigned long msg_iovlen; + void *msg_control; + unsigned long msg_controllen; + unsigned msg_flags; +}; + +/* include/asm-generic/poll.h */ +struct kernel_pollfd { + int fd; + short events; + short revents; +}; + +/* include/linux/resource.h */ +struct kernel_rlimit { + unsigned long rlim_cur; + unsigned long rlim_max; +}; + +/* include/linux/time.h */ +struct kernel_timespec { + long tv_sec; + long tv_nsec; +}; + +/* include/linux/time.h */ +struct kernel_timeval { + long tv_sec; + long tv_usec; +}; + +/* include/linux/resource.h */ +struct kernel_rusage { + struct kernel_timeval ru_utime; + struct kernel_timeval ru_stime; + long ru_maxrss; + long ru_ixrss; + long ru_idrss; + long ru_isrss; + long ru_minflt; + long ru_majflt; + long ru_nswap; + long ru_inblock; + long ru_oublock; + long ru_msgsnd; + long ru_msgrcv; + long ru_nsignals; + long ru_nvcsw; + long ru_nivcsw; +}; + +#if defined(__i386__) || defined(__ARM_EABI__) || defined(__ARM_ARCH_3__) \ + || defined(__PPC__) + +/* include/asm-{arm,i386,mips,ppc}/signal.h */ +struct kernel_old_sigaction { + union { + void (*sa_handler_)(int); + void (*sa_sigaction_)(int, siginfo_t *, void *); + }; + unsigned long sa_mask; + unsigned long sa_flags; + void (*sa_restorer)(void); +} __attribute__((packed,aligned(4))); +#elif (defined(__mips__) && _MIPS_SIM == _MIPS_SIM_ABI32) + #define kernel_old_sigaction kernel_sigaction +#elif defined(__aarch64__) + // No kernel_old_sigaction defined for arm64. +#endif + +/* Some kernel functions (e.g. sigaction() in 2.6.23) require that the + * exactly match the size of the signal set, even though the API was + * intended to be extensible. We define our own KERNEL_NSIG to deal with + * this. + * Please note that glibc provides signals [1.._NSIG-1], whereas the + * kernel (and this header) provides the range [1..KERNEL_NSIG]. The + * actual number of signals is obviously the same, but the constants + * differ by one. + */ +#ifdef __mips__ +#define KERNEL_NSIG 128 +#else +#define KERNEL_NSIG 64 +#endif + +/* include/asm-{arm,aarch64,i386,mips,x86_64}/signal.h */ +struct kernel_sigset_t { + unsigned long sig[(KERNEL_NSIG + 8*sizeof(unsigned long) - 1)/ + (8*sizeof(unsigned long))]; +}; + +/* include/asm-{arm,i386,mips,x86_64,ppc}/signal.h */ +struct kernel_sigaction { +#ifdef __mips__ + unsigned long sa_flags; + union { + void (*sa_handler_)(int); + void (*sa_sigaction_)(int, siginfo_t *, void *); + }; + struct kernel_sigset_t sa_mask; +#else + union { + void (*sa_handler_)(int); + void (*sa_sigaction_)(int, siginfo_t *, void *); + }; + unsigned long sa_flags; + void (*sa_restorer)(void); + struct kernel_sigset_t sa_mask; +#endif +}; + +/* include/linux/socket.h */ +struct kernel_sockaddr { + unsigned short sa_family; + char sa_data[14]; +}; + +/* include/asm-{arm,aarch64,i386,mips,ppc}/stat.h */ +#ifdef __mips__ +#if _MIPS_SIM == _MIPS_SIM_ABI64 +struct kernel_stat { +#else +struct kernel_stat64 { +#endif + unsigned st_dev; + unsigned __pad0[3]; + unsigned long long st_ino; + unsigned st_mode; + unsigned st_nlink; + unsigned st_uid; + unsigned st_gid; + unsigned st_rdev; + unsigned __pad1[3]; + long long st_size; + unsigned st_atime_; + unsigned st_atime_nsec_; + unsigned st_mtime_; + unsigned st_mtime_nsec_; + unsigned st_ctime_; + unsigned st_ctime_nsec_; + unsigned st_blksize; + unsigned __pad2; + unsigned long long st_blocks; +}; +#elif defined __PPC__ +struct kernel_stat64 { + unsigned long long st_dev; + unsigned long long st_ino; + unsigned st_mode; + unsigned st_nlink; + unsigned st_uid; + unsigned st_gid; + unsigned long long st_rdev; + unsigned short int __pad2; + long long st_size; + long st_blksize; + long long st_blocks; + long st_atime_; + unsigned long st_atime_nsec_; + long st_mtime_; + unsigned long st_mtime_nsec_; + long st_ctime_; + unsigned long st_ctime_nsec_; + unsigned long __unused4; + unsigned long __unused5; +}; +#else +struct kernel_stat64 { + unsigned long long st_dev; + unsigned char __pad0[4]; + unsigned __st_ino; + unsigned st_mode; + unsigned st_nlink; + unsigned st_uid; + unsigned st_gid; + unsigned long long st_rdev; + unsigned char __pad3[4]; + long long st_size; + unsigned st_blksize; + unsigned long long st_blocks; + unsigned st_atime_; + unsigned st_atime_nsec_; + unsigned st_mtime_; + unsigned st_mtime_nsec_; + unsigned st_ctime_; + unsigned st_ctime_nsec_; + unsigned long long st_ino; +}; +#endif + +/* include/asm-{arm,aarch64,i386,mips,x86_64,ppc}/stat.h */ +#if defined(__i386__) || defined(__ARM_ARCH_3__) || defined(__ARM_EABI__) +struct kernel_stat { + /* The kernel headers suggest that st_dev and st_rdev should be 32bit + * quantities encoding 12bit major and 20bit minor numbers in an interleaved + * format. In reality, we do not see useful data in the top bits. So, + * we'll leave the padding in here, until we find a better solution. + */ + unsigned short st_dev; + short pad1; + unsigned st_ino; + unsigned short st_mode; + unsigned short st_nlink; + unsigned short st_uid; + unsigned short st_gid; + unsigned short st_rdev; + short pad2; + unsigned st_size; + unsigned st_blksize; + unsigned st_blocks; + unsigned st_atime_; + unsigned st_atime_nsec_; + unsigned st_mtime_; + unsigned st_mtime_nsec_; + unsigned st_ctime_; + unsigned st_ctime_nsec_; + unsigned __unused4; + unsigned __unused5; +}; +#elif defined(__x86_64__) +struct kernel_stat { + uint64_t st_dev; + uint64_t st_ino; + uint64_t st_nlink; + unsigned st_mode; + unsigned st_uid; + unsigned st_gid; + unsigned __pad0; + uint64_t st_rdev; + int64_t st_size; + int64_t st_blksize; + int64_t st_blocks; + uint64_t st_atime_; + uint64_t st_atime_nsec_; + uint64_t st_mtime_; + uint64_t st_mtime_nsec_; + uint64_t st_ctime_; + uint64_t st_ctime_nsec_; + int64_t __unused4[3]; +}; +#elif defined(__PPC__) +struct kernel_stat { + unsigned st_dev; + unsigned long st_ino; // ino_t + unsigned long st_mode; // mode_t + unsigned short st_nlink; // nlink_t + unsigned st_uid; // uid_t + unsigned st_gid; // gid_t + unsigned st_rdev; + long st_size; // off_t + unsigned long st_blksize; + unsigned long st_blocks; + unsigned long st_atime_; + unsigned long st_atime_nsec_; + unsigned long st_mtime_; + unsigned long st_mtime_nsec_; + unsigned long st_ctime_; + unsigned long st_ctime_nsec_; + unsigned long __unused4; + unsigned long __unused5; +}; +#elif (defined(__mips__) && _MIPS_SIM != _MIPS_SIM_ABI64) +struct kernel_stat { + unsigned st_dev; + int st_pad1[3]; + unsigned st_ino; + unsigned st_mode; + unsigned st_nlink; + unsigned st_uid; + unsigned st_gid; + unsigned st_rdev; + int st_pad2[2]; + long st_size; + int st_pad3; + long st_atime_; + long st_atime_nsec_; + long st_mtime_; + long st_mtime_nsec_; + long st_ctime_; + long st_ctime_nsec_; + int st_blksize; + int st_blocks; + int st_pad4[14]; +}; +#elif defined(__aarch64__) +struct kernel_stat { + unsigned long st_dev; + unsigned long st_ino; + unsigned int st_mode; + unsigned int st_nlink; + unsigned int st_uid; + unsigned int st_gid; + unsigned long st_rdev; + unsigned long __pad1; + long st_size; + int st_blksize; + int __pad2; + long st_blocks; + long st_atime_; + unsigned long st_atime_nsec_; + long st_mtime_; + unsigned long st_mtime_nsec_; + long st_ctime_; + unsigned long st_ctime_nsec_; + unsigned int __unused4; + unsigned int __unused5; +}; +#endif + +/* include/asm-{arm,aarch64,i386,mips,x86_64,ppc}/statfs.h */ +#ifdef __mips__ +#if _MIPS_SIM != _MIPS_SIM_ABI64 +struct kernel_statfs64 { + unsigned long f_type; + unsigned long f_bsize; + unsigned long f_frsize; + unsigned long __pad; + unsigned long long f_blocks; + unsigned long long f_bfree; + unsigned long long f_files; + unsigned long long f_ffree; + unsigned long long f_bavail; + struct { int val[2]; } f_fsid; + unsigned long f_namelen; + unsigned long f_spare[6]; +}; +#endif +#elif !defined(__x86_64__) +struct kernel_statfs64 { + unsigned long f_type; + unsigned long f_bsize; + unsigned long long f_blocks; + unsigned long long f_bfree; + unsigned long long f_bavail; + unsigned long long f_files; + unsigned long long f_ffree; + struct { int val[2]; } f_fsid; + unsigned long f_namelen; + unsigned long f_frsize; + unsigned long f_spare[5]; +}; +#endif + +/* include/asm-{arm,i386,mips,x86_64,ppc,generic}/statfs.h */ +#ifdef __mips__ +struct kernel_statfs { + long f_type; + long f_bsize; + long f_frsize; + long f_blocks; + long f_bfree; + long f_files; + long f_ffree; + long f_bavail; + struct { int val[2]; } f_fsid; + long f_namelen; + long f_spare[6]; +}; +#elif defined(__x86_64__) +struct kernel_statfs { + /* x86_64 actually defines all these fields as signed, whereas all other */ + /* platforms define them as unsigned. Leaving them at unsigned should not */ + /* cause any problems. Make sure these are 64-bit even on x32. */ + uint64_t f_type; + uint64_t f_bsize; + uint64_t f_blocks; + uint64_t f_bfree; + uint64_t f_bavail; + uint64_t f_files; + uint64_t f_ffree; + struct { int val[2]; } f_fsid; + uint64_t f_namelen; + uint64_t f_frsize; + uint64_t f_spare[5]; +}; +#else +struct kernel_statfs { + unsigned long f_type; + unsigned long f_bsize; + unsigned long f_blocks; + unsigned long f_bfree; + unsigned long f_bavail; + unsigned long f_files; + unsigned long f_ffree; + struct { int val[2]; } f_fsid; + unsigned long f_namelen; + unsigned long f_frsize; + unsigned long f_spare[5]; +}; +#endif + + +/* Definitions missing from the standard header files */ +#ifndef O_DIRECTORY +#if defined(__ARM_ARCH_3__) || defined(__ARM_EABI__) || defined(__aarch64__) +#define O_DIRECTORY 0040000 +#else +#define O_DIRECTORY 0200000 +#endif +#endif +#ifndef NT_PRXFPREG +#define NT_PRXFPREG 0x46e62b7f +#endif +#ifndef PTRACE_GETFPXREGS +#define PTRACE_GETFPXREGS ((enum __ptrace_request)18) +#endif +#ifndef PR_GET_DUMPABLE +#define PR_GET_DUMPABLE 3 +#endif +#ifndef PR_SET_DUMPABLE +#define PR_SET_DUMPABLE 4 +#endif +#ifndef PR_GET_SECCOMP +#define PR_GET_SECCOMP 21 +#endif +#ifndef PR_SET_SECCOMP +#define PR_SET_SECCOMP 22 +#endif +#ifndef AT_FDCWD +#define AT_FDCWD (-100) +#endif +#ifndef AT_SYMLINK_NOFOLLOW +#define AT_SYMLINK_NOFOLLOW 0x100 +#endif +#ifndef AT_REMOVEDIR +#define AT_REMOVEDIR 0x200 +#endif +#ifndef MREMAP_FIXED +#define MREMAP_FIXED 2 +#endif +#ifndef SA_RESTORER +#define SA_RESTORER 0x04000000 +#endif +#ifndef CPUCLOCK_PROF +#define CPUCLOCK_PROF 0 +#endif +#ifndef CPUCLOCK_VIRT +#define CPUCLOCK_VIRT 1 +#endif +#ifndef CPUCLOCK_SCHED +#define CPUCLOCK_SCHED 2 +#endif +#ifndef CPUCLOCK_PERTHREAD_MASK +#define CPUCLOCK_PERTHREAD_MASK 4 +#endif +#ifndef MAKE_PROCESS_CPUCLOCK +#define MAKE_PROCESS_CPUCLOCK(pid, clock) \ + ((~(int)(pid) << 3) | (int)(clock)) +#endif +#ifndef MAKE_THREAD_CPUCLOCK +#define MAKE_THREAD_CPUCLOCK(tid, clock) \ + ((~(int)(tid) << 3) | (int)((clock) | CPUCLOCK_PERTHREAD_MASK)) +#endif + +#ifndef FUTEX_WAIT +#define FUTEX_WAIT 0 +#endif +#ifndef FUTEX_WAKE +#define FUTEX_WAKE 1 +#endif +#ifndef FUTEX_FD +#define FUTEX_FD 2 +#endif +#ifndef FUTEX_REQUEUE +#define FUTEX_REQUEUE 3 +#endif +#ifndef FUTEX_CMP_REQUEUE +#define FUTEX_CMP_REQUEUE 4 +#endif +#ifndef FUTEX_WAKE_OP +#define FUTEX_WAKE_OP 5 +#endif +#ifndef FUTEX_LOCK_PI +#define FUTEX_LOCK_PI 6 +#endif +#ifndef FUTEX_UNLOCK_PI +#define FUTEX_UNLOCK_PI 7 +#endif +#ifndef FUTEX_TRYLOCK_PI +#define FUTEX_TRYLOCK_PI 8 +#endif +#ifndef FUTEX_PRIVATE_FLAG +#define FUTEX_PRIVATE_FLAG 128 +#endif +#ifndef FUTEX_CMD_MASK +#define FUTEX_CMD_MASK ~FUTEX_PRIVATE_FLAG +#endif +#ifndef FUTEX_WAIT_PRIVATE +#define FUTEX_WAIT_PRIVATE (FUTEX_WAIT | FUTEX_PRIVATE_FLAG) +#endif +#ifndef FUTEX_WAKE_PRIVATE +#define FUTEX_WAKE_PRIVATE (FUTEX_WAKE | FUTEX_PRIVATE_FLAG) +#endif +#ifndef FUTEX_REQUEUE_PRIVATE +#define FUTEX_REQUEUE_PRIVATE (FUTEX_REQUEUE | FUTEX_PRIVATE_FLAG) +#endif +#ifndef FUTEX_CMP_REQUEUE_PRIVATE +#define FUTEX_CMP_REQUEUE_PRIVATE (FUTEX_CMP_REQUEUE | FUTEX_PRIVATE_FLAG) +#endif +#ifndef FUTEX_WAKE_OP_PRIVATE +#define FUTEX_WAKE_OP_PRIVATE (FUTEX_WAKE_OP | FUTEX_PRIVATE_FLAG) +#endif +#ifndef FUTEX_LOCK_PI_PRIVATE +#define FUTEX_LOCK_PI_PRIVATE (FUTEX_LOCK_PI | FUTEX_PRIVATE_FLAG) +#endif +#ifndef FUTEX_UNLOCK_PI_PRIVATE +#define FUTEX_UNLOCK_PI_PRIVATE (FUTEX_UNLOCK_PI | FUTEX_PRIVATE_FLAG) +#endif +#ifndef FUTEX_TRYLOCK_PI_PRIVATE +#define FUTEX_TRYLOCK_PI_PRIVATE (FUTEX_TRYLOCK_PI | FUTEX_PRIVATE_FLAG) +#endif + + +#if defined(__x86_64__) +#ifndef ARCH_SET_GS +#define ARCH_SET_GS 0x1001 +#endif +#ifndef ARCH_GET_GS +#define ARCH_GET_GS 0x1004 +#endif +#endif + +#if defined(__i386__) +#ifndef __NR_quotactl +#define __NR_quotactl 131 +#endif +#ifndef __NR_setresuid +#define __NR_setresuid 164 +#define __NR_getresuid 165 +#define __NR_setresgid 170 +#define __NR_getresgid 171 +#endif +#ifndef __NR_rt_sigaction +#define __NR_rt_sigreturn 173 +#define __NR_rt_sigaction 174 +#define __NR_rt_sigprocmask 175 +#define __NR_rt_sigpending 176 +#define __NR_rt_sigsuspend 179 +#endif +#ifndef __NR_pread64 +#define __NR_pread64 180 +#endif +#ifndef __NR_pwrite64 +#define __NR_pwrite64 181 +#endif +#ifndef __NR_ugetrlimit +#define __NR_ugetrlimit 191 +#endif +#ifndef __NR_stat64 +#define __NR_stat64 195 +#endif +#ifndef __NR_fstat64 +#define __NR_fstat64 197 +#endif +#ifndef __NR_setresuid32 +#define __NR_setresuid32 208 +#define __NR_getresuid32 209 +#define __NR_setresgid32 210 +#define __NR_getresgid32 211 +#endif +#ifndef __NR_setfsuid32 +#define __NR_setfsuid32 215 +#define __NR_setfsgid32 216 +#endif +#ifndef __NR_getdents64 +#define __NR_getdents64 220 +#endif +#ifndef __NR_gettid +#define __NR_gettid 224 +#endif +#ifndef __NR_readahead +#define __NR_readahead 225 +#endif +#ifndef __NR_setxattr +#define __NR_setxattr 226 +#endif +#ifndef __NR_lsetxattr +#define __NR_lsetxattr 227 +#endif +#ifndef __NR_getxattr +#define __NR_getxattr 229 +#endif +#ifndef __NR_lgetxattr +#define __NR_lgetxattr 230 +#endif +#ifndef __NR_listxattr +#define __NR_listxattr 232 +#endif +#ifndef __NR_llistxattr +#define __NR_llistxattr 233 +#endif +#ifndef __NR_tkill +#define __NR_tkill 238 +#endif +#ifndef __NR_futex +#define __NR_futex 240 +#endif +#ifndef __NR_sched_setaffinity +#define __NR_sched_setaffinity 241 +#define __NR_sched_getaffinity 242 +#endif +#ifndef __NR_set_tid_address +#define __NR_set_tid_address 258 +#endif +#ifndef __NR_clock_gettime +#define __NR_clock_gettime 265 +#endif +#ifndef __NR_clock_getres +#define __NR_clock_getres 266 +#endif +#ifndef __NR_statfs64 +#define __NR_statfs64 268 +#endif +#ifndef __NR_fstatfs64 +#define __NR_fstatfs64 269 +#endif +#ifndef __NR_fadvise64_64 +#define __NR_fadvise64_64 272 +#endif +#ifndef __NR_ioprio_set +#define __NR_ioprio_set 289 +#endif +#ifndef __NR_ioprio_get +#define __NR_ioprio_get 290 +#endif +#ifndef __NR_openat +#define __NR_openat 295 +#endif +#ifndef __NR_fstatat64 +#define __NR_fstatat64 300 +#endif +#ifndef __NR_unlinkat +#define __NR_unlinkat 301 +#endif +#ifndef __NR_move_pages +#define __NR_move_pages 317 +#endif +#ifndef __NR_getcpu +#define __NR_getcpu 318 +#endif +#ifndef __NR_fallocate +#define __NR_fallocate 324 +#endif +/* End of i386 definitions */ +#elif defined(__ARM_ARCH_3__) || defined(__ARM_EABI__) +#ifndef __NR_setresuid +#define __NR_setresuid (__NR_SYSCALL_BASE + 164) +#define __NR_getresuid (__NR_SYSCALL_BASE + 165) +#define __NR_setresgid (__NR_SYSCALL_BASE + 170) +#define __NR_getresgid (__NR_SYSCALL_BASE + 171) +#endif +#ifndef __NR_rt_sigaction +#define __NR_rt_sigreturn (__NR_SYSCALL_BASE + 173) +#define __NR_rt_sigaction (__NR_SYSCALL_BASE + 174) +#define __NR_rt_sigprocmask (__NR_SYSCALL_BASE + 175) +#define __NR_rt_sigpending (__NR_SYSCALL_BASE + 176) +#define __NR_rt_sigsuspend (__NR_SYSCALL_BASE + 179) +#endif +#ifndef __NR_pread64 +#define __NR_pread64 (__NR_SYSCALL_BASE + 180) +#endif +#ifndef __NR_pwrite64 +#define __NR_pwrite64 (__NR_SYSCALL_BASE + 181) +#endif +#ifndef __NR_ugetrlimit +#define __NR_ugetrlimit (__NR_SYSCALL_BASE + 191) +#endif +#ifndef __NR_stat64 +#define __NR_stat64 (__NR_SYSCALL_BASE + 195) +#endif +#ifndef __NR_fstat64 +#define __NR_fstat64 (__NR_SYSCALL_BASE + 197) +#endif +#ifndef __NR_setresuid32 +#define __NR_setresuid32 (__NR_SYSCALL_BASE + 208) +#define __NR_getresuid32 (__NR_SYSCALL_BASE + 209) +#define __NR_setresgid32 (__NR_SYSCALL_BASE + 210) +#define __NR_getresgid32 (__NR_SYSCALL_BASE + 211) +#endif +#ifndef __NR_setfsuid32 +#define __NR_setfsuid32 (__NR_SYSCALL_BASE + 215) +#define __NR_setfsgid32 (__NR_SYSCALL_BASE + 216) +#endif +#ifndef __NR_getdents64 +#define __NR_getdents64 (__NR_SYSCALL_BASE + 217) +#endif +#ifndef __NR_gettid +#define __NR_gettid (__NR_SYSCALL_BASE + 224) +#endif +#ifndef __NR_readahead +#define __NR_readahead (__NR_SYSCALL_BASE + 225) +#endif +#ifndef __NR_setxattr +#define __NR_setxattr (__NR_SYSCALL_BASE + 226) +#endif +#ifndef __NR_lsetxattr +#define __NR_lsetxattr (__NR_SYSCALL_BASE + 227) +#endif +#ifndef __NR_getxattr +#define __NR_getxattr (__NR_SYSCALL_BASE + 229) +#endif +#ifndef __NR_lgetxattr +#define __NR_lgetxattr (__NR_SYSCALL_BASE + 230) +#endif +#ifndef __NR_listxattr +#define __NR_listxattr (__NR_SYSCALL_BASE + 232) +#endif +#ifndef __NR_llistxattr +#define __NR_llistxattr (__NR_SYSCALL_BASE + 233) +#endif +#ifndef __NR_tkill +#define __NR_tkill (__NR_SYSCALL_BASE + 238) +#endif +#ifndef __NR_futex +#define __NR_futex (__NR_SYSCALL_BASE + 240) +#endif +#ifndef __NR_sched_setaffinity +#define __NR_sched_setaffinity (__NR_SYSCALL_BASE + 241) +#define __NR_sched_getaffinity (__NR_SYSCALL_BASE + 242) +#endif +#ifndef __NR_set_tid_address +#define __NR_set_tid_address (__NR_SYSCALL_BASE + 256) +#endif +#ifndef __NR_clock_gettime +#define __NR_clock_gettime (__NR_SYSCALL_BASE + 263) +#endif +#ifndef __NR_clock_getres +#define __NR_clock_getres (__NR_SYSCALL_BASE + 264) +#endif +#ifndef __NR_statfs64 +#define __NR_statfs64 (__NR_SYSCALL_BASE + 266) +#endif +#ifndef __NR_fstatfs64 +#define __NR_fstatfs64 (__NR_SYSCALL_BASE + 267) +#endif +#ifndef __NR_ioprio_set +#define __NR_ioprio_set (__NR_SYSCALL_BASE + 314) +#endif +#ifndef __NR_ioprio_get +#define __NR_ioprio_get (__NR_SYSCALL_BASE + 315) +#endif +#ifndef __NR_move_pages +#define __NR_move_pages (__NR_SYSCALL_BASE + 344) +#endif +#ifndef __NR_getcpu +#define __NR_getcpu (__NR_SYSCALL_BASE + 345) +#endif +/* End of ARM 3/EABI definitions */ +#elif defined(__aarch64__) +#ifndef __NR_setxattr +#define __NR_setxattr 5 +#endif +#ifndef __NR_lsetxattr +#define __NR_lsetxattr 6 +#endif +#ifndef __NR_getxattr +#define __NR_getxattr 8 +#endif +#ifndef __NR_lgetxattr +#define __NR_lgetxattr 9 +#endif +#ifndef __NR_listxattr +#define __NR_listxattr 11 +#endif +#ifndef __NR_llistxattr +#define __NR_llistxattr 12 +#endif +#ifndef __NR_ioprio_set +#define __NR_ioprio_set 30 +#endif +#ifndef __NR_ioprio_get +#define __NR_ioprio_get 31 +#endif +#ifndef __NR_unlinkat +#define __NR_unlinkat 35 +#endif +#ifndef __NR_fallocate +#define __NR_fallocate 47 +#endif +#ifndef __NR_openat +#define __NR_openat 56 +#endif +#ifndef __NR_quotactl +#define __NR_quotactl 60 +#endif +#ifndef __NR_getdents64 +#define __NR_getdents64 61 +#endif +#ifndef __NR_getdents +#define __NR_getdents __NR_getdents64 +#endif +#ifndef __NR_pread64 +#define __NR_pread64 67 +#endif +#ifndef __NR_pwrite64 +#define __NR_pwrite64 68 +#endif +#ifndef __NR_ppoll +#define __NR_ppoll 73 +#endif +#ifndef __NR_readlinkat +#define __NR_readlinkat 78 +#endif +#ifndef __NR_newfstatat +#define __NR_newfstatat 79 +#endif +#ifndef __NR_set_tid_address +#define __NR_set_tid_address 96 +#endif +#ifndef __NR_futex +#define __NR_futex 98 +#endif +#ifndef __NR_clock_gettime +#define __NR_clock_gettime 113 +#endif +#ifndef __NR_clock_getres +#define __NR_clock_getres 114 +#endif +#ifndef __NR_sched_setaffinity +#define __NR_sched_setaffinity 122 +#define __NR_sched_getaffinity 123 +#endif +#ifndef __NR_tkill +#define __NR_tkill 130 +#endif +#ifndef __NR_setresuid +#define __NR_setresuid 147 +#define __NR_getresuid 148 +#define __NR_setresgid 149 +#define __NR_getresgid 150 +#endif +#ifndef __NR_gettid +#define __NR_gettid 178 +#endif +#ifndef __NR_readahead +#define __NR_readahead 213 +#endif +#ifndef __NR_fadvise64 +#define __NR_fadvise64 223 +#endif +#ifndef __NR_move_pages +#define __NR_move_pages 239 +#endif +/* End of aarch64 definitions */ +#elif defined(__x86_64__) +#ifndef __NR_pread64 +#define __NR_pread64 17 +#endif +#ifndef __NR_pwrite64 +#define __NR_pwrite64 18 +#endif +#ifndef __NR_setresuid +#define __NR_setresuid 117 +#define __NR_getresuid 118 +#define __NR_setresgid 119 +#define __NR_getresgid 120 +#endif +#ifndef __NR_quotactl +#define __NR_quotactl 179 +#endif +#ifndef __NR_gettid +#define __NR_gettid 186 +#endif +#ifndef __NR_readahead +#define __NR_readahead 187 +#endif +#ifndef __NR_setxattr +#define __NR_setxattr 188 +#endif +#ifndef __NR_lsetxattr +#define __NR_lsetxattr 189 +#endif +#ifndef __NR_getxattr +#define __NR_getxattr 191 +#endif +#ifndef __NR_lgetxattr +#define __NR_lgetxattr 192 +#endif +#ifndef __NR_listxattr +#define __NR_listxattr 194 +#endif +#ifndef __NR_llistxattr +#define __NR_llistxattr 195 +#endif +#ifndef __NR_tkill +#define __NR_tkill 200 +#endif +#ifndef __NR_futex +#define __NR_futex 202 +#endif +#ifndef __NR_sched_setaffinity +#define __NR_sched_setaffinity 203 +#define __NR_sched_getaffinity 204 +#endif +#ifndef __NR_getdents64 +#define __NR_getdents64 217 +#endif +#ifndef __NR_set_tid_address +#define __NR_set_tid_address 218 +#endif +#ifndef __NR_fadvise64 +#define __NR_fadvise64 221 +#endif +#ifndef __NR_clock_gettime +#define __NR_clock_gettime 228 +#endif +#ifndef __NR_clock_getres +#define __NR_clock_getres 229 +#endif +#ifndef __NR_ioprio_set +#define __NR_ioprio_set 251 +#endif +#ifndef __NR_ioprio_get +#define __NR_ioprio_get 252 +#endif +#ifndef __NR_openat +#define __NR_openat 257 +#endif +#ifndef __NR_newfstatat +#define __NR_newfstatat 262 +#endif +#ifndef __NR_unlinkat +#define __NR_unlinkat 263 +#endif +#ifndef __NR_move_pages +#define __NR_move_pages 279 +#endif +#ifndef __NR_fallocate +#define __NR_fallocate 285 +#endif +/* End of x86-64 definitions */ +#elif defined(__mips__) +#if _MIPS_SIM == _MIPS_SIM_ABI32 +#ifndef __NR_setresuid +#define __NR_setresuid (__NR_Linux + 185) +#define __NR_getresuid (__NR_Linux + 186) +#define __NR_setresgid (__NR_Linux + 190) +#define __NR_getresgid (__NR_Linux + 191) +#endif +#ifndef __NR_rt_sigaction +#define __NR_rt_sigreturn (__NR_Linux + 193) +#define __NR_rt_sigaction (__NR_Linux + 194) +#define __NR_rt_sigprocmask (__NR_Linux + 195) +#define __NR_rt_sigpending (__NR_Linux + 196) +#define __NR_rt_sigsuspend (__NR_Linux + 199) +#endif +#ifndef __NR_pread64 +#define __NR_pread64 (__NR_Linux + 200) +#endif +#ifndef __NR_pwrite64 +#define __NR_pwrite64 (__NR_Linux + 201) +#endif +#ifndef __NR_stat64 +#define __NR_stat64 (__NR_Linux + 213) +#endif +#ifndef __NR_fstat64 +#define __NR_fstat64 (__NR_Linux + 215) +#endif +#ifndef __NR_getdents64 +#define __NR_getdents64 (__NR_Linux + 219) +#endif +#ifndef __NR_gettid +#define __NR_gettid (__NR_Linux + 222) +#endif +#ifndef __NR_readahead +#define __NR_readahead (__NR_Linux + 223) +#endif +#ifndef __NR_setxattr +#define __NR_setxattr (__NR_Linux + 224) +#endif +#ifndef __NR_lsetxattr +#define __NR_lsetxattr (__NR_Linux + 225) +#endif +#ifndef __NR_getxattr +#define __NR_getxattr (__NR_Linux + 227) +#endif +#ifndef __NR_lgetxattr +#define __NR_lgetxattr (__NR_Linux + 228) +#endif +#ifndef __NR_listxattr +#define __NR_listxattr (__NR_Linux + 230) +#endif +#ifndef __NR_llistxattr +#define __NR_llistxattr (__NR_Linux + 231) +#endif +#ifndef __NR_tkill +#define __NR_tkill (__NR_Linux + 236) +#endif +#ifndef __NR_futex +#define __NR_futex (__NR_Linux + 238) +#endif +#ifndef __NR_sched_setaffinity +#define __NR_sched_setaffinity (__NR_Linux + 239) +#define __NR_sched_getaffinity (__NR_Linux + 240) +#endif +#ifndef __NR_set_tid_address +#define __NR_set_tid_address (__NR_Linux + 252) +#endif +#ifndef __NR_statfs64 +#define __NR_statfs64 (__NR_Linux + 255) +#endif +#ifndef __NR_fstatfs64 +#define __NR_fstatfs64 (__NR_Linux + 256) +#endif +#ifndef __NR_clock_gettime +#define __NR_clock_gettime (__NR_Linux + 263) +#endif +#ifndef __NR_clock_getres +#define __NR_clock_getres (__NR_Linux + 264) +#endif +#ifndef __NR_openat +#define __NR_openat (__NR_Linux + 288) +#endif +#ifndef __NR_fstatat +#define __NR_fstatat (__NR_Linux + 293) +#endif +#ifndef __NR_unlinkat +#define __NR_unlinkat (__NR_Linux + 294) +#endif +#ifndef __NR_move_pages +#define __NR_move_pages (__NR_Linux + 308) +#endif +#ifndef __NR_getcpu +#define __NR_getcpu (__NR_Linux + 312) +#endif +#ifndef __NR_ioprio_set +#define __NR_ioprio_set (__NR_Linux + 314) +#endif +#ifndef __NR_ioprio_get +#define __NR_ioprio_get (__NR_Linux + 315) +#endif +/* End of MIPS (old 32bit API) definitions */ +#elif _MIPS_SIM == _MIPS_SIM_ABI64 +#ifndef __NR_pread64 +#define __NR_pread64 (__NR_Linux + 16) +#endif +#ifndef __NR_pwrite64 +#define __NR_pwrite64 (__NR_Linux + 17) +#endif +#ifndef __NR_setresuid +#define __NR_setresuid (__NR_Linux + 115) +#define __NR_getresuid (__NR_Linux + 116) +#define __NR_setresgid (__NR_Linux + 117) +#define __NR_getresgid (__NR_Linux + 118) +#endif +#ifndef __NR_gettid +#define __NR_gettid (__NR_Linux + 178) +#endif +#ifndef __NR_readahead +#define __NR_readahead (__NR_Linux + 179) +#endif +#ifndef __NR_setxattr +#define __NR_setxattr (__NR_Linux + 180) +#endif +#ifndef __NR_lsetxattr +#define __NR_lsetxattr (__NR_Linux + 181) +#endif +#ifndef __NR_getxattr +#define __NR_getxattr (__NR_Linux + 183) +#endif +#ifndef __NR_lgetxattr +#define __NR_lgetxattr (__NR_Linux + 184) +#endif +#ifndef __NR_listxattr +#define __NR_listxattr (__NR_Linux + 186) +#endif +#ifndef __NR_llistxattr +#define __NR_llistxattr (__NR_Linux + 187) +#endif +#ifndef __NR_tkill +#define __NR_tkill (__NR_Linux + 192) +#endif +#ifndef __NR_futex +#define __NR_futex (__NR_Linux + 194) +#endif +#ifndef __NR_sched_setaffinity +#define __NR_sched_setaffinity (__NR_Linux + 195) +#define __NR_sched_getaffinity (__NR_Linux + 196) +#endif +#ifndef __NR_set_tid_address +#define __NR_set_tid_address (__NR_Linux + 212) +#endif +#ifndef __NR_clock_gettime +#define __NR_clock_gettime (__NR_Linux + 222) +#endif +#ifndef __NR_clock_getres +#define __NR_clock_getres (__NR_Linux + 223) +#endif +#ifndef __NR_openat +#define __NR_openat (__NR_Linux + 247) +#endif +#ifndef __NR_fstatat +#define __NR_fstatat (__NR_Linux + 252) +#endif +#ifndef __NR_unlinkat +#define __NR_unlinkat (__NR_Linux + 253) +#endif +#ifndef __NR_move_pages +#define __NR_move_pages (__NR_Linux + 267) +#endif +#ifndef __NR_getcpu +#define __NR_getcpu (__NR_Linux + 271) +#endif +#ifndef __NR_ioprio_set +#define __NR_ioprio_set (__NR_Linux + 273) +#endif +#ifndef __NR_ioprio_get +#define __NR_ioprio_get (__NR_Linux + 274) +#endif +/* End of MIPS (64bit API) definitions */ +#else +#ifndef __NR_setresuid +#define __NR_setresuid (__NR_Linux + 115) +#define __NR_getresuid (__NR_Linux + 116) +#define __NR_setresgid (__NR_Linux + 117) +#define __NR_getresgid (__NR_Linux + 118) +#endif +#ifndef __NR_gettid +#define __NR_gettid (__NR_Linux + 178) +#endif +#ifndef __NR_readahead +#define __NR_readahead (__NR_Linux + 179) +#endif +#ifndef __NR_setxattr +#define __NR_setxattr (__NR_Linux + 180) +#endif +#ifndef __NR_lsetxattr +#define __NR_lsetxattr (__NR_Linux + 181) +#endif +#ifndef __NR_getxattr +#define __NR_getxattr (__NR_Linux + 183) +#endif +#ifndef __NR_lgetxattr +#define __NR_lgetxattr (__NR_Linux + 184) +#endif +#ifndef __NR_listxattr +#define __NR_listxattr (__NR_Linux + 186) +#endif +#ifndef __NR_llistxattr +#define __NR_llistxattr (__NR_Linux + 187) +#endif +#ifndef __NR_tkill +#define __NR_tkill (__NR_Linux + 192) +#endif +#ifndef __NR_futex +#define __NR_futex (__NR_Linux + 194) +#endif +#ifndef __NR_sched_setaffinity +#define __NR_sched_setaffinity (__NR_Linux + 195) +#define __NR_sched_getaffinity (__NR_Linux + 196) +#endif +#ifndef __NR_set_tid_address +#define __NR_set_tid_address (__NR_Linux + 213) +#endif +#ifndef __NR_statfs64 +#define __NR_statfs64 (__NR_Linux + 217) +#endif +#ifndef __NR_fstatfs64 +#define __NR_fstatfs64 (__NR_Linux + 218) +#endif +#ifndef __NR_clock_gettime +#define __NR_clock_gettime (__NR_Linux + 226) +#endif +#ifndef __NR_clock_getres +#define __NR_clock_getres (__NR_Linux + 227) +#endif +#ifndef __NR_openat +#define __NR_openat (__NR_Linux + 251) +#endif +#ifndef __NR_fstatat +#define __NR_fstatat (__NR_Linux + 256) +#endif +#ifndef __NR_unlinkat +#define __NR_unlinkat (__NR_Linux + 257) +#endif +#ifndef __NR_move_pages +#define __NR_move_pages (__NR_Linux + 271) +#endif +#ifndef __NR_getcpu +#define __NR_getcpu (__NR_Linux + 275) +#endif +#ifndef __NR_ioprio_set +#define __NR_ioprio_set (__NR_Linux + 277) +#endif +#ifndef __NR_ioprio_get +#define __NR_ioprio_get (__NR_Linux + 278) +#endif +/* End of MIPS (new 32bit API) definitions */ +#endif +/* End of MIPS definitions */ +#elif defined(__PPC__) +#ifndef __NR_setfsuid +#define __NR_setfsuid 138 +#define __NR_setfsgid 139 +#endif +#ifndef __NR_setresuid +#define __NR_setresuid 164 +#define __NR_getresuid 165 +#define __NR_setresgid 169 +#define __NR_getresgid 170 +#endif +#ifndef __NR_rt_sigaction +#define __NR_rt_sigreturn 172 +#define __NR_rt_sigaction 173 +#define __NR_rt_sigprocmask 174 +#define __NR_rt_sigpending 175 +#define __NR_rt_sigsuspend 178 +#endif +#ifndef __NR_pread64 +#define __NR_pread64 179 +#endif +#ifndef __NR_pwrite64 +#define __NR_pwrite64 180 +#endif +#ifndef __NR_ugetrlimit +#define __NR_ugetrlimit 190 +#endif +#ifndef __NR_readahead +#define __NR_readahead 191 +#endif +#ifndef __NR_stat64 +#define __NR_stat64 195 +#endif +#ifndef __NR_fstat64 +#define __NR_fstat64 197 +#endif +#ifndef __NR_getdents64 +#define __NR_getdents64 202 +#endif +#ifndef __NR_gettid +#define __NR_gettid 207 +#endif +#ifndef __NR_tkill +#define __NR_tkill 208 +#endif +#ifndef __NR_setxattr +#define __NR_setxattr 209 +#endif +#ifndef __NR_lsetxattr +#define __NR_lsetxattr 210 +#endif +#ifndef __NR_getxattr +#define __NR_getxattr 212 +#endif +#ifndef __NR_lgetxattr +#define __NR_lgetxattr 213 +#endif +#ifndef __NR_listxattr +#define __NR_listxattr 215 +#endif +#ifndef __NR_llistxattr +#define __NR_llistxattr 216 +#endif +#ifndef __NR_futex +#define __NR_futex 221 +#endif +#ifndef __NR_sched_setaffinity +#define __NR_sched_setaffinity 222 +#define __NR_sched_getaffinity 223 +#endif +#ifndef __NR_set_tid_address +#define __NR_set_tid_address 232 +#endif +#ifndef __NR_clock_gettime +#define __NR_clock_gettime 246 +#endif +#ifndef __NR_clock_getres +#define __NR_clock_getres 247 +#endif +#ifndef __NR_statfs64 +#define __NR_statfs64 252 +#endif +#ifndef __NR_fstatfs64 +#define __NR_fstatfs64 253 +#endif +#ifndef __NR_fadvise64_64 +#define __NR_fadvise64_64 254 +#endif +#ifndef __NR_ioprio_set +#define __NR_ioprio_set 273 +#endif +#ifndef __NR_ioprio_get +#define __NR_ioprio_get 274 +#endif +#ifndef __NR_openat +#define __NR_openat 286 +#endif +#ifndef __NR_fstatat64 +#define __NR_fstatat64 291 +#endif +#ifndef __NR_unlinkat +#define __NR_unlinkat 292 +#endif +#ifndef __NR_move_pages +#define __NR_move_pages 301 +#endif +#ifndef __NR_getcpu +#define __NR_getcpu 302 +#endif +/* End of powerpc defininitions */ +#endif + + +/* After forking, we must make sure to only call system calls. */ +#if defined(__BOUNDED_POINTERS__) + #error "Need to port invocations of syscalls for bounded ptrs" +#else + /* The core dumper and the thread lister get executed after threads + * have been suspended. As a consequence, we cannot call any functions + * that acquire locks. Unfortunately, libc wraps most system calls + * (e.g. in order to implement pthread_atfork, and to make calls + * cancellable), which means we cannot call these functions. Instead, + * we have to call syscall() directly. + */ + #undef LSS_ERRNO + #ifdef SYS_ERRNO + /* Allow the including file to override the location of errno. This can + * be useful when using clone() with the CLONE_VM option. + */ + #define LSS_ERRNO SYS_ERRNO + #else + #define LSS_ERRNO errno + #endif + + #undef LSS_INLINE + #ifdef SYS_INLINE + #define LSS_INLINE SYS_INLINE + #else + #define LSS_INLINE static inline + #endif + + /* Allow the including file to override the prefix used for all new + * system calls. By default, it will be set to "sys_". + */ + #undef LSS_NAME + #ifndef SYS_PREFIX + #define LSS_NAME(name) sys_##name + #elif defined(SYS_PREFIX) && SYS_PREFIX < 0 + #define LSS_NAME(name) name + #elif defined(SYS_PREFIX) && SYS_PREFIX == 0 + #define LSS_NAME(name) sys0_##name + #elif defined(SYS_PREFIX) && SYS_PREFIX == 1 + #define LSS_NAME(name) sys1_##name + #elif defined(SYS_PREFIX) && SYS_PREFIX == 2 + #define LSS_NAME(name) sys2_##name + #elif defined(SYS_PREFIX) && SYS_PREFIX == 3 + #define LSS_NAME(name) sys3_##name + #elif defined(SYS_PREFIX) && SYS_PREFIX == 4 + #define LSS_NAME(name) sys4_##name + #elif defined(SYS_PREFIX) && SYS_PREFIX == 5 + #define LSS_NAME(name) sys5_##name + #elif defined(SYS_PREFIX) && SYS_PREFIX == 6 + #define LSS_NAME(name) sys6_##name + #elif defined(SYS_PREFIX) && SYS_PREFIX == 7 + #define LSS_NAME(name) sys7_##name + #elif defined(SYS_PREFIX) && SYS_PREFIX == 8 + #define LSS_NAME(name) sys8_##name + #elif defined(SYS_PREFIX) && SYS_PREFIX == 9 + #define LSS_NAME(name) sys9_##name + #endif + + #undef LSS_RETURN + #if (defined(__i386__) || defined(__x86_64__) || defined(__ARM_ARCH_3__) \ + || defined(__ARM_EABI__) || defined(__aarch64__)) + /* Failing system calls return a negative result in the range of + * -1..-4095. These are "errno" values with the sign inverted. + */ + #define LSS_RETURN(type, res) \ + do { \ + if ((unsigned long)(res) >= (unsigned long)(-4095)) { \ + LSS_ERRNO = -(res); \ + res = -1; \ + } \ + return (type) (res); \ + } while (0) + #elif defined(__mips__) + /* On MIPS, failing system calls return -1, and set errno in a + * separate CPU register. + */ + #define LSS_RETURN(type, res, err) \ + do { \ + if (err) { \ + unsigned long __errnovalue = (res); \ + LSS_ERRNO = __errnovalue; \ + res = -1; \ + } \ + return (type) (res); \ + } while (0) + #elif defined(__PPC__) + /* On PPC, failing system calls return -1, and set errno in a + * separate CPU register. See linux/unistd.h. + */ + #define LSS_RETURN(type, res, err) \ + do { \ + if (err & 0x10000000 ) { \ + LSS_ERRNO = (res); \ + res = -1; \ + } \ + return (type) (res); \ + } while (0) + #endif + #if defined(__i386__) + /* In PIC mode (e.g. when building shared libraries), gcc for i386 + * reserves ebx. Unfortunately, most distribution ship with implementations + * of _syscallX() which clobber ebx. + * Also, most definitions of _syscallX() neglect to mark "memory" as being + * clobbered. This causes problems with compilers, that do a better job + * at optimizing across __asm__ calls. + * So, we just have to redefine all of the _syscallX() macros. + */ + #undef LSS_ENTRYPOINT + #ifdef SYS_SYSCALL_ENTRYPOINT + static inline void (**LSS_NAME(get_syscall_entrypoint)(void))(void) { + void (**entrypoint)(void); + asm volatile(".bss\n" + ".align 8\n" + ".globl " SYS_SYSCALL_ENTRYPOINT "\n" + ".common " SYS_SYSCALL_ENTRYPOINT ",8,8\n" + ".previous\n" + /* This logically does 'lea "SYS_SYSCALL_ENTRYPOINT", %0' */ + "call 0f\n" + "0:pop %0\n" + "add $_GLOBAL_OFFSET_TABLE_+[.-0b], %0\n" + "mov " SYS_SYSCALL_ENTRYPOINT "@GOT(%0), %0\n" + : "=r"(entrypoint)); + return entrypoint; + } + + #define LSS_ENTRYPOINT ".bss\n" \ + ".align 8\n" \ + ".globl " SYS_SYSCALL_ENTRYPOINT "\n" \ + ".common " SYS_SYSCALL_ENTRYPOINT ",8,8\n" \ + ".previous\n" \ + /* Check the SYS_SYSCALL_ENTRYPOINT vector */ \ + "push %%eax\n" \ + "call 10000f\n" \ + "10000:pop %%eax\n" \ + "add $_GLOBAL_OFFSET_TABLE_+[.-10000b], %%eax\n" \ + "mov " SYS_SYSCALL_ENTRYPOINT \ + "@GOT(%%eax), %%eax\n" \ + "mov 0(%%eax), %%eax\n" \ + "test %%eax, %%eax\n" \ + "jz 10002f\n" \ + "push %%eax\n" \ + "call 10001f\n" \ + "10001:pop %%eax\n" \ + "add $(10003f-10001b), %%eax\n" \ + "xchg 4(%%esp), %%eax\n" \ + "ret\n" \ + "10002:pop %%eax\n" \ + "int $0x80\n" \ + "10003:\n" + #else + #define LSS_ENTRYPOINT "int $0x80\n" + #endif + #undef LSS_BODY + #define LSS_BODY(type,args...) \ + long __res; \ + __asm__ __volatile__("push %%ebx\n" \ + "movl %2,%%ebx\n" \ + LSS_ENTRYPOINT \ + "pop %%ebx" \ + args \ + : "esp", "memory"); \ + LSS_RETURN(type,__res) + #undef _syscall0 + #define _syscall0(type,name) \ + type LSS_NAME(name)(void) { \ + long __res; \ + __asm__ volatile(LSS_ENTRYPOINT \ + : "=a" (__res) \ + : "0" (__NR_##name) \ + : "esp", "memory"); \ + LSS_RETURN(type,__res); \ + } + #undef _syscall1 + #define _syscall1(type,name,type1,arg1) \ + type LSS_NAME(name)(type1 arg1) { \ + LSS_BODY(type, \ + : "=a" (__res) \ + : "0" (__NR_##name), "ri" ((long)(arg1))); \ + } + #undef _syscall2 + #define _syscall2(type,name,type1,arg1,type2,arg2) \ + type LSS_NAME(name)(type1 arg1,type2 arg2) { \ + LSS_BODY(type, \ + : "=a" (__res) \ + : "0" (__NR_##name),"ri" ((long)(arg1)), "c" ((long)(arg2))); \ + } + #undef _syscall3 + #define _syscall3(type,name,type1,arg1,type2,arg2,type3,arg3) \ + type LSS_NAME(name)(type1 arg1,type2 arg2,type3 arg3) { \ + LSS_BODY(type, \ + : "=a" (__res) \ + : "0" (__NR_##name), "ri" ((long)(arg1)), "c" ((long)(arg2)), \ + "d" ((long)(arg3))); \ + } + #undef _syscall4 + #define _syscall4(type,name,type1,arg1,type2,arg2,type3,arg3,type4,arg4) \ + type LSS_NAME(name)(type1 arg1, type2 arg2, type3 arg3, type4 arg4) { \ + LSS_BODY(type, \ + : "=a" (__res) \ + : "0" (__NR_##name), "ri" ((long)(arg1)), "c" ((long)(arg2)), \ + "d" ((long)(arg3)),"S" ((long)(arg4))); \ + } + #undef _syscall5 + #define _syscall5(type,name,type1,arg1,type2,arg2,type3,arg3,type4,arg4, \ + type5,arg5) \ + type LSS_NAME(name)(type1 arg1, type2 arg2, type3 arg3, type4 arg4, \ + type5 arg5) { \ + long __res; \ + __asm__ __volatile__("push %%ebx\n" \ + "movl %2,%%ebx\n" \ + "movl %1,%%eax\n" \ + LSS_ENTRYPOINT \ + "pop %%ebx" \ + : "=a" (__res) \ + : "i" (__NR_##name), "ri" ((long)(arg1)), \ + "c" ((long)(arg2)), "d" ((long)(arg3)), \ + "S" ((long)(arg4)), "D" ((long)(arg5)) \ + : "esp", "memory"); \ + LSS_RETURN(type,__res); \ + } + #undef _syscall6 + #define _syscall6(type,name,type1,arg1,type2,arg2,type3,arg3,type4,arg4, \ + type5,arg5,type6,arg6) \ + type LSS_NAME(name)(type1 arg1, type2 arg2, type3 arg3, type4 arg4, \ + type5 arg5, type6 arg6) { \ + long __res; \ + struct { long __a1; long __a6; } __s = { (long)arg1, (long) arg6 }; \ + __asm__ __volatile__("push %%ebp\n" \ + "push %%ebx\n" \ + "movl 4(%2),%%ebp\n" \ + "movl 0(%2), %%ebx\n" \ + "movl %1,%%eax\n" \ + LSS_ENTRYPOINT \ + "pop %%ebx\n" \ + "pop %%ebp" \ + : "=a" (__res) \ + : "i" (__NR_##name), "0" ((long)(&__s)), \ + "c" ((long)(arg2)), "d" ((long)(arg3)), \ + "S" ((long)(arg4)), "D" ((long)(arg5)) \ + : "esp", "memory"); \ + LSS_RETURN(type,__res); \ + } + LSS_INLINE int LSS_NAME(clone)(int (*fn)(void *), void *child_stack, + int flags, void *arg, int *parent_tidptr, + void *newtls, int *child_tidptr) { + long __res; + __asm__ __volatile__(/* if (fn == NULL) + * return -EINVAL; + */ + "movl %3,%%ecx\n" + "jecxz 1f\n" + + /* if (child_stack == NULL) + * return -EINVAL; + */ + "movl %4,%%ecx\n" + "jecxz 1f\n" + + /* Set up alignment of the child stack: + * child_stack = (child_stack & ~0xF) - 20; + */ + "andl $-16,%%ecx\n" + "subl $20,%%ecx\n" + + /* Push "arg" and "fn" onto the stack that will be + * used by the child. + */ + "movl %6,%%eax\n" + "movl %%eax,4(%%ecx)\n" + "movl %3,%%eax\n" + "movl %%eax,(%%ecx)\n" + + /* %eax = syscall(%eax = __NR_clone, + * %ebx = flags, + * %ecx = child_stack, + * %edx = parent_tidptr, + * %esi = newtls, + * %edi = child_tidptr) + * Also, make sure that %ebx gets preserved as it is + * used in PIC mode. + */ + "movl %8,%%esi\n" + "movl %7,%%edx\n" + "movl %5,%%eax\n" + "movl %9,%%edi\n" + "pushl %%ebx\n" + "movl %%eax,%%ebx\n" + "movl %2,%%eax\n" + LSS_ENTRYPOINT + + /* In the parent: restore %ebx + * In the child: move "fn" into %ebx + */ + "popl %%ebx\n" + + /* if (%eax != 0) + * return %eax; + */ + "test %%eax,%%eax\n" + "jnz 1f\n" + + /* In the child, now. Terminate frame pointer chain. + */ + "movl $0,%%ebp\n" + + /* Call "fn". "arg" is already on the stack. + */ + "call *%%ebx\n" + + /* Call _exit(%ebx). Unfortunately older versions + * of gcc restrict the number of arguments that can + * be passed to asm(). So, we need to hard-code the + * system call number. + */ + "movl %%eax,%%ebx\n" + "movl $1,%%eax\n" + LSS_ENTRYPOINT + + /* Return to parent. + */ + "1:\n" + : "=a" (__res) + : "0"(-EINVAL), "i"(__NR_clone), + "m"(fn), "m"(child_stack), "m"(flags), "m"(arg), + "m"(parent_tidptr), "m"(newtls), "m"(child_tidptr) + : "esp", "memory", "ecx", "edx", "esi", "edi"); + LSS_RETURN(int, __res); + } + + #define __NR__fadvise64_64 __NR_fadvise64_64 + LSS_INLINE _syscall6(int, _fadvise64_64, int, fd, + unsigned, offset_lo, unsigned, offset_hi, + unsigned, len_lo, unsigned, len_hi, + int, advice) + + LSS_INLINE int LSS_NAME(fadvise64)(int fd, loff_t offset, + loff_t len, int advice) { + return LSS_NAME(_fadvise64_64)(fd, + (unsigned)offset, (unsigned)(offset >>32), + (unsigned)len, (unsigned)(len >> 32), + advice); + } + + #define __NR__fallocate __NR_fallocate + LSS_INLINE _syscall6(int, _fallocate, int, fd, + int, mode, + unsigned, offset_lo, unsigned, offset_hi, + unsigned, len_lo, unsigned, len_hi) + + LSS_INLINE int LSS_NAME(fallocate)(int fd, int mode, + loff_t offset, loff_t len) { + union { loff_t off; unsigned w[2]; } o = { offset }, l = { len }; + return LSS_NAME(_fallocate)(fd, mode, o.w[0], o.w[1], l.w[0], l.w[1]); + } + + LSS_INLINE _syscall1(int, set_thread_area, void *, u) + LSS_INLINE _syscall1(int, get_thread_area, void *, u) + + LSS_INLINE void (*LSS_NAME(restore_rt)(void))(void) { + /* On i386, the kernel does not know how to return from a signal + * handler. Instead, it relies on user space to provide a + * restorer function that calls the {rt_,}sigreturn() system call. + * Unfortunately, we cannot just reference the glibc version of this + * function, as glibc goes out of its way to make it inaccessible. + */ + void (*res)(void); + __asm__ __volatile__("call 2f\n" + "0:.align 16\n" + "1:movl %1,%%eax\n" + LSS_ENTRYPOINT + "2:popl %0\n" + "addl $(1b-0b),%0\n" + : "=a" (res) + : "i" (__NR_rt_sigreturn)); + return res; + } + LSS_INLINE void (*LSS_NAME(restore)(void))(void) { + /* On i386, the kernel does not know how to return from a signal + * handler. Instead, it relies on user space to provide a + * restorer function that calls the {rt_,}sigreturn() system call. + * Unfortunately, we cannot just reference the glibc version of this + * function, as glibc goes out of its way to make it inaccessible. + */ + void (*res)(void); + __asm__ __volatile__("call 2f\n" + "0:.align 16\n" + "1:pop %%eax\n" + "movl %1,%%eax\n" + LSS_ENTRYPOINT + "2:popl %0\n" + "addl $(1b-0b),%0\n" + : "=a" (res) + : "i" (__NR_sigreturn)); + return res; + } + #elif defined(__x86_64__) + /* There are no known problems with any of the _syscallX() macros + * currently shipping for x86_64, but we still need to be able to define + * our own version so that we can override the location of the errno + * location (e.g. when using the clone() system call with the CLONE_VM + * option). + */ + #undef LSS_ENTRYPOINT + #ifdef SYS_SYSCALL_ENTRYPOINT + static inline void (**LSS_NAME(get_syscall_entrypoint)(void))(void) { + void (**entrypoint)(void); + asm volatile(".bss\n" + ".align 8\n" + ".globl " SYS_SYSCALL_ENTRYPOINT "\n" + ".common " SYS_SYSCALL_ENTRYPOINT ",8,8\n" + ".previous\n" + "mov " SYS_SYSCALL_ENTRYPOINT "@GOTPCREL(%%rip), %0\n" + : "=r"(entrypoint)); + return entrypoint; + } + + #define LSS_ENTRYPOINT \ + ".bss\n" \ + ".align 8\n" \ + ".globl " SYS_SYSCALL_ENTRYPOINT "\n" \ + ".common " SYS_SYSCALL_ENTRYPOINT ",8,8\n" \ + ".previous\n" \ + "mov " SYS_SYSCALL_ENTRYPOINT "@GOTPCREL(%%rip), %%rcx\n" \ + "mov 0(%%rcx), %%rcx\n" \ + "test %%rcx, %%rcx\n" \ + "jz 10001f\n" \ + "call *%%rcx\n" \ + "jmp 10002f\n" \ + "10001:syscall\n" \ + "10002:\n" + + #else + #define LSS_ENTRYPOINT "syscall\n" + #endif + + /* The x32 ABI has 32 bit longs, but the syscall interface is 64 bit. + * We need to explicitly cast to an unsigned 64 bit type to avoid implicit + * sign extension. We can't cast pointers directly because those are + * 32 bits, and gcc will dump ugly warnings about casting from a pointer + * to an integer of a different size. + */ + #undef LSS_SYSCALL_ARG + #define LSS_SYSCALL_ARG(a) ((uint64_t)(uintptr_t)(a)) + #undef _LSS_RETURN + #define _LSS_RETURN(type, res, cast) \ + do { \ + if ((uint64_t)(res) >= (uint64_t)(-4095)) { \ + LSS_ERRNO = -(res); \ + res = -1; \ + } \ + return (type)(cast)(res); \ + } while (0) + #undef LSS_RETURN + #define LSS_RETURN(type, res) _LSS_RETURN(type, res, uintptr_t) + + #undef _LSS_BODY + #define _LSS_BODY(nr, type, name, cast, ...) \ + long long __res; \ + __asm__ __volatile__(LSS_BODY_ASM##nr LSS_ENTRYPOINT \ + : "=a" (__res) \ + : "0" (__NR_##name) LSS_BODY_ARG##nr(__VA_ARGS__) \ + : LSS_BODY_CLOBBER##nr "r11", "rcx", "memory"); \ + _LSS_RETURN(type, __res, cast) + #undef LSS_BODY + #define LSS_BODY(nr, type, name, args...) \ + _LSS_BODY(nr, type, name, uintptr_t, ## args) + + #undef LSS_BODY_ASM0 + #undef LSS_BODY_ASM1 + #undef LSS_BODY_ASM2 + #undef LSS_BODY_ASM3 + #undef LSS_BODY_ASM4 + #undef LSS_BODY_ASM5 + #undef LSS_BODY_ASM6 + #define LSS_BODY_ASM0 + #define LSS_BODY_ASM1 LSS_BODY_ASM0 + #define LSS_BODY_ASM2 LSS_BODY_ASM1 + #define LSS_BODY_ASM3 LSS_BODY_ASM2 + #define LSS_BODY_ASM4 LSS_BODY_ASM3 "movq %5,%%r10;" + #define LSS_BODY_ASM5 LSS_BODY_ASM4 "movq %6,%%r8;" + #define LSS_BODY_ASM6 LSS_BODY_ASM5 "movq %7,%%r9;" + + #undef LSS_BODY_CLOBBER0 + #undef LSS_BODY_CLOBBER1 + #undef LSS_BODY_CLOBBER2 + #undef LSS_BODY_CLOBBER3 + #undef LSS_BODY_CLOBBER4 + #undef LSS_BODY_CLOBBER5 + #undef LSS_BODY_CLOBBER6 + #define LSS_BODY_CLOBBER0 + #define LSS_BODY_CLOBBER1 LSS_BODY_CLOBBER0 + #define LSS_BODY_CLOBBER2 LSS_BODY_CLOBBER1 + #define LSS_BODY_CLOBBER3 LSS_BODY_CLOBBER2 + #define LSS_BODY_CLOBBER4 LSS_BODY_CLOBBER3 "r10", + #define LSS_BODY_CLOBBER5 LSS_BODY_CLOBBER4 "r8", + #define LSS_BODY_CLOBBER6 LSS_BODY_CLOBBER5 "r9", + + #undef LSS_BODY_ARG0 + #undef LSS_BODY_ARG1 + #undef LSS_BODY_ARG2 + #undef LSS_BODY_ARG3 + #undef LSS_BODY_ARG4 + #undef LSS_BODY_ARG5 + #undef LSS_BODY_ARG6 + #define LSS_BODY_ARG0() + #define LSS_BODY_ARG1(arg1) \ + LSS_BODY_ARG0(), "D" (arg1) + #define LSS_BODY_ARG2(arg1, arg2) \ + LSS_BODY_ARG1(arg1), "S" (arg2) + #define LSS_BODY_ARG3(arg1, arg2, arg3) \ + LSS_BODY_ARG2(arg1, arg2), "d" (arg3) + #define LSS_BODY_ARG4(arg1, arg2, arg3, arg4) \ + LSS_BODY_ARG3(arg1, arg2, arg3), "r" (arg4) + #define LSS_BODY_ARG5(arg1, arg2, arg3, arg4, arg5) \ + LSS_BODY_ARG4(arg1, arg2, arg3, arg4), "r" (arg5) + #define LSS_BODY_ARG6(arg1, arg2, arg3, arg4, arg5, arg6) \ + LSS_BODY_ARG5(arg1, arg2, arg3, arg4, arg5), "r" (arg6) + + #undef _syscall0 + #define _syscall0(type,name) \ + type LSS_NAME(name)(void) { \ + LSS_BODY(0, type, name); \ + } + #undef _syscall1 + #define _syscall1(type,name,type1,arg1) \ + type LSS_NAME(name)(type1 arg1) { \ + LSS_BODY(1, type, name, LSS_SYSCALL_ARG(arg1)); \ + } + #undef _syscall2 + #define _syscall2(type,name,type1,arg1,type2,arg2) \ + type LSS_NAME(name)(type1 arg1, type2 arg2) { \ + LSS_BODY(2, type, name, LSS_SYSCALL_ARG(arg1), LSS_SYSCALL_ARG(arg2));\ + } + #undef _syscall3 + #define _syscall3(type,name,type1,arg1,type2,arg2,type3,arg3) \ + type LSS_NAME(name)(type1 arg1, type2 arg2, type3 arg3) { \ + LSS_BODY(3, type, name, LSS_SYSCALL_ARG(arg1), LSS_SYSCALL_ARG(arg2), \ + LSS_SYSCALL_ARG(arg3)); \ + } + #undef _syscall4 + #define _syscall4(type,name,type1,arg1,type2,arg2,type3,arg3,type4,arg4) \ + type LSS_NAME(name)(type1 arg1, type2 arg2, type3 arg3, type4 arg4) { \ + LSS_BODY(4, type, name, LSS_SYSCALL_ARG(arg1), LSS_SYSCALL_ARG(arg2), \ + LSS_SYSCALL_ARG(arg3), LSS_SYSCALL_ARG(arg4));\ + } + #undef _syscall5 + #define _syscall5(type,name,type1,arg1,type2,arg2,type3,arg3,type4,arg4, \ + type5,arg5) \ + type LSS_NAME(name)(type1 arg1, type2 arg2, type3 arg3, type4 arg4, \ + type5 arg5) { \ + LSS_BODY(5, type, name, LSS_SYSCALL_ARG(arg1), LSS_SYSCALL_ARG(arg2), \ + LSS_SYSCALL_ARG(arg3), LSS_SYSCALL_ARG(arg4), \ + LSS_SYSCALL_ARG(arg5)); \ + } + #undef _syscall6 + #define _syscall6(type,name,type1,arg1,type2,arg2,type3,arg3,type4,arg4, \ + type5,arg5,type6,arg6) \ + type LSS_NAME(name)(type1 arg1, type2 arg2, type3 arg3, type4 arg4, \ + type5 arg5, type6 arg6) { \ + LSS_BODY(6, type, name, LSS_SYSCALL_ARG(arg1), LSS_SYSCALL_ARG(arg2), \ + LSS_SYSCALL_ARG(arg3), LSS_SYSCALL_ARG(arg4), \ + LSS_SYSCALL_ARG(arg5), LSS_SYSCALL_ARG(arg6));\ + } + LSS_INLINE int LSS_NAME(clone)(int (*fn)(void *), void *child_stack, + int flags, void *arg, int *parent_tidptr, + void *newtls, int *child_tidptr) { + long long __res; + { + __asm__ __volatile__(/* if (fn == NULL) + * return -EINVAL; + */ + "testq %4,%4\n" + "jz 1f\n" + + /* if (child_stack == NULL) + * return -EINVAL; + */ + "testq %5,%5\n" + "jz 1f\n" + + /* childstack -= 2*sizeof(void *); + */ + "subq $16,%5\n" + + /* Push "arg" and "fn" onto the stack that will be + * used by the child. + */ + "movq %7,8(%5)\n" + "movq %4,0(%5)\n" + + /* %rax = syscall(%rax = __NR_clone, + * %rdi = flags, + * %rsi = child_stack, + * %rdx = parent_tidptr, + * %r8 = new_tls, + * %r10 = child_tidptr) + */ + "movq %2,%%rax\n" + "movq %9,%%r8\n" + "movq %10,%%r10\n" + LSS_ENTRYPOINT + + /* if (%rax != 0) + * return; + */ + "testq %%rax,%%rax\n" + "jnz 1f\n" + + /* In the child. Terminate frame pointer chain. + */ + "xorq %%rbp,%%rbp\n" + + /* Call "fn(arg)". + */ + "popq %%rax\n" + "popq %%rdi\n" + "call *%%rax\n" + + /* Call _exit(%ebx). + */ + "movq %%rax,%%rdi\n" + "movq %3,%%rax\n" + LSS_ENTRYPOINT + + /* Return to parent. + */ + "1:\n" + : "=a" (__res) + : "0"(-EINVAL), "i"(__NR_clone), "i"(__NR_exit), + "r"(LSS_SYSCALL_ARG(fn)), + "S"(LSS_SYSCALL_ARG(child_stack)), + "D"(LSS_SYSCALL_ARG(flags)), + "r"(LSS_SYSCALL_ARG(arg)), + "d"(LSS_SYSCALL_ARG(parent_tidptr)), + "r"(LSS_SYSCALL_ARG(newtls)), + "r"(LSS_SYSCALL_ARG(child_tidptr)) + : "rsp", "memory", "r8", "r10", "r11", "rcx"); + } + LSS_RETURN(int, __res); + } + LSS_INLINE _syscall2(int, arch_prctl, int, c, void *, a) + + /* Need to make sure loff_t isn't truncated to 32-bits under x32. */ + LSS_INLINE int LSS_NAME(fadvise64)(int fd, loff_t offset, loff_t len, + int advice) { + LSS_BODY(4, int, fadvise64, LSS_SYSCALL_ARG(fd), (uint64_t)(offset), + (uint64_t)(len), LSS_SYSCALL_ARG(advice)); + } + + LSS_INLINE void (*LSS_NAME(restore_rt)(void))(void) { + /* On x86-64, the kernel does not know how to return from + * a signal handler. Instead, it relies on user space to provide a + * restorer function that calls the rt_sigreturn() system call. + * Unfortunately, we cannot just reference the glibc version of this + * function, as glibc goes out of its way to make it inaccessible. + */ + long long res; + __asm__ __volatile__("jmp 2f\n" + ".align 16\n" + "1:movq %1,%%rax\n" + LSS_ENTRYPOINT + "2:leaq 1b(%%rip),%0\n" + : "=r" (res) + : "i" (__NR_rt_sigreturn)); + return (void (*)(void))(uintptr_t)res; + } + #elif defined(__ARM_ARCH_3__) + /* Most definitions of _syscallX() neglect to mark "memory" as being + * clobbered. This causes problems with compilers, that do a better job + * at optimizing across __asm__ calls. + * So, we just have to redefine all of the _syscallX() macros. + */ + #undef LSS_REG + #define LSS_REG(r,a) register long __r##r __asm__("r"#r) = (long)a + #undef LSS_BODY + #define LSS_BODY(type,name,args...) \ + register long __res_r0 __asm__("r0"); \ + long __res; \ + __asm__ __volatile__ (__syscall(name) \ + : "=r"(__res_r0) : args : "lr", "memory"); \ + __res = __res_r0; \ + LSS_RETURN(type, __res) + #undef _syscall0 + #define _syscall0(type, name) \ + type LSS_NAME(name)(void) { \ + LSS_BODY(type, name); \ + } + #undef _syscall1 + #define _syscall1(type, name, type1, arg1) \ + type LSS_NAME(name)(type1 arg1) { \ + LSS_REG(0, arg1); LSS_BODY(type, name, "r"(__r0)); \ + } + #undef _syscall2 + #define _syscall2(type, name, type1, arg1, type2, arg2) \ + type LSS_NAME(name)(type1 arg1, type2 arg2) { \ + LSS_REG(0, arg1); LSS_REG(1, arg2); \ + LSS_BODY(type, name, "r"(__r0), "r"(__r1)); \ + } + #undef _syscall3 + #define _syscall3(type, name, type1, arg1, type2, arg2, type3, arg3) \ + type LSS_NAME(name)(type1 arg1, type2 arg2, type3 arg3) { \ + LSS_REG(0, arg1); LSS_REG(1, arg2); LSS_REG(2, arg3); \ + LSS_BODY(type, name, "r"(__r0), "r"(__r1), "r"(__r2)); \ + } + #undef _syscall4 + #define _syscall4(type,name,type1,arg1,type2,arg2,type3,arg3,type4,arg4) \ + type LSS_NAME(name)(type1 arg1, type2 arg2, type3 arg3, type4 arg4) { \ + LSS_REG(0, arg1); LSS_REG(1, arg2); LSS_REG(2, arg3); \ + LSS_REG(3, arg4); \ + LSS_BODY(type, name, "r"(__r0), "r"(__r1), "r"(__r2), "r"(__r3)); \ + } + #undef _syscall5 + #define _syscall5(type,name,type1,arg1,type2,arg2,type3,arg3,type4,arg4, \ + type5,arg5) \ + type LSS_NAME(name)(type1 arg1, type2 arg2, type3 arg3, type4 arg4, \ + type5 arg5) { \ + LSS_REG(0, arg1); LSS_REG(1, arg2); LSS_REG(2, arg3); \ + LSS_REG(3, arg4); LSS_REG(4, arg5); \ + LSS_BODY(type, name, "r"(__r0), "r"(__r1), "r"(__r2), "r"(__r3), \ + "r"(__r4)); \ + } + #undef _syscall6 + #define _syscall6(type,name,type1,arg1,type2,arg2,type3,arg3,type4,arg4, \ + type5,arg5,type6,arg6) \ + type LSS_NAME(name)(type1 arg1, type2 arg2, type3 arg3, type4 arg4, \ + type5 arg5, type6 arg6) { \ + LSS_REG(0, arg1); LSS_REG(1, arg2); LSS_REG(2, arg3); \ + LSS_REG(3, arg4); LSS_REG(4, arg5); LSS_REG(5, arg6); \ + LSS_BODY(type, name, "r"(__r0), "r"(__r1), "r"(__r2), "r"(__r3), \ + "r"(__r4), "r"(__r5)); \ + } + LSS_INLINE int LSS_NAME(clone)(int (*fn)(void *), void *child_stack, + int flags, void *arg, int *parent_tidptr, + void *newtls, int *child_tidptr) { + long __res; + { + register int __flags __asm__("r0") = flags; + register void *__stack __asm__("r1") = child_stack; + register void *__ptid __asm__("r2") = parent_tidptr; + register void *__tls __asm__("r3") = newtls; + register int *__ctid __asm__("r4") = child_tidptr; + __asm__ __volatile__(/* if (fn == NULL || child_stack == NULL) + * return -EINVAL; + */ + "cmp %2,#0\n" + "cmpne %3,#0\n" + "moveq %0,%1\n" + "beq 1f\n" + + /* Push "arg" and "fn" onto the stack that will be + * used by the child. + */ + "str %5,[%3,#-4]!\n" + "str %2,[%3,#-4]!\n" + + /* %r0 = syscall(%r0 = flags, + * %r1 = child_stack, + * %r2 = parent_tidptr, + * %r3 = newtls, + * %r4 = child_tidptr) + */ + __syscall(clone)"\n" + + /* if (%r0 != 0) + * return %r0; + */ + "movs %0,r0\n" + "bne 1f\n" + + /* In the child, now. Call "fn(arg)". + */ + "ldr r0,[sp, #4]\n" + "mov lr,pc\n" + "ldr pc,[sp]\n" + + /* Call _exit(%r0). + */ + __syscall(exit)"\n" + "1:\n" + : "=r" (__res) + : "i"(-EINVAL), + "r"(fn), "r"(__stack), "r"(__flags), "r"(arg), + "r"(__ptid), "r"(__tls), "r"(__ctid) + : "cc", "lr", "memory"); + } + LSS_RETURN(int, __res); + } + #elif defined(__ARM_EABI__) + /* Most definitions of _syscallX() neglect to mark "memory" as being + * clobbered. This causes problems with compilers, that do a better job + * at optimizing across __asm__ calls. + * So, we just have to redefine all fo the _syscallX() macros. + */ + #undef LSS_REG + #define LSS_REG(r,a) register long __r##r __asm__("r"#r) = (long)a + #undef LSS_BODY + #define LSS_BODY(type,name,args...) \ + register long __res_r0 __asm__("r0"); \ + long __res; \ + __asm__ __volatile__ ("push {r7}\n" \ + "mov r7, %1\n" \ + "swi 0x0\n" \ + "pop {r7}\n" \ + : "=r"(__res_r0) \ + : "i"(__NR_##name) , ## args \ + : "lr", "memory"); \ + __res = __res_r0; \ + LSS_RETURN(type, __res) + #undef _syscall0 + #define _syscall0(type, name) \ + type LSS_NAME(name)(void) { \ + LSS_BODY(type, name); \ + } + #undef _syscall1 + #define _syscall1(type, name, type1, arg1) \ + type LSS_NAME(name)(type1 arg1) { \ + LSS_REG(0, arg1); LSS_BODY(type, name, "r"(__r0)); \ + } + #undef _syscall2 + #define _syscall2(type, name, type1, arg1, type2, arg2) \ + type LSS_NAME(name)(type1 arg1, type2 arg2) { \ + LSS_REG(0, arg1); LSS_REG(1, arg2); \ + LSS_BODY(type, name, "r"(__r0), "r"(__r1)); \ + } + #undef _syscall3 + #define _syscall3(type, name, type1, arg1, type2, arg2, type3, arg3) \ + type LSS_NAME(name)(type1 arg1, type2 arg2, type3 arg3) { \ + LSS_REG(0, arg1); LSS_REG(1, arg2); LSS_REG(2, arg3); \ + LSS_BODY(type, name, "r"(__r0), "r"(__r1), "r"(__r2)); \ + } + #undef _syscall4 + #define _syscall4(type,name,type1,arg1,type2,arg2,type3,arg3,type4,arg4) \ + type LSS_NAME(name)(type1 arg1, type2 arg2, type3 arg3, type4 arg4) { \ + LSS_REG(0, arg1); LSS_REG(1, arg2); LSS_REG(2, arg3); \ + LSS_REG(3, arg4); \ + LSS_BODY(type, name, "r"(__r0), "r"(__r1), "r"(__r2), "r"(__r3)); \ + } + #undef _syscall5 + #define _syscall5(type,name,type1,arg1,type2,arg2,type3,arg3,type4,arg4, \ + type5,arg5) \ + type LSS_NAME(name)(type1 arg1, type2 arg2, type3 arg3, type4 arg4, \ + type5 arg5) { \ + LSS_REG(0, arg1); LSS_REG(1, arg2); LSS_REG(2, arg3); \ + LSS_REG(3, arg4); LSS_REG(4, arg5); \ + LSS_BODY(type, name, "r"(__r0), "r"(__r1), "r"(__r2), "r"(__r3), \ + "r"(__r4)); \ + } + #undef _syscall6 + #define _syscall6(type,name,type1,arg1,type2,arg2,type3,arg3,type4,arg4, \ + type5,arg5,type6,arg6) \ + type LSS_NAME(name)(type1 arg1, type2 arg2, type3 arg3, type4 arg4, \ + type5 arg5, type6 arg6) { \ + LSS_REG(0, arg1); LSS_REG(1, arg2); LSS_REG(2, arg3); \ + LSS_REG(3, arg4); LSS_REG(4, arg5); LSS_REG(5, arg6); \ + LSS_BODY(type, name, "r"(__r0), "r"(__r1), "r"(__r2), "r"(__r3), \ + "r"(__r4), "r"(__r5)); \ + } + LSS_INLINE int LSS_NAME(clone)(int (*fn)(void *), void *child_stack, + int flags, void *arg, int *parent_tidptr, + void *newtls, int *child_tidptr) { + long __res; + { + register int __flags __asm__("r0") = flags; + register void *__stack __asm__("r1") = child_stack; + register void *__ptid __asm__("r2") = parent_tidptr; + register void *__tls __asm__("r3") = newtls; + register int *__ctid __asm__("r4") = child_tidptr; + __asm__ __volatile__(/* if (fn == NULL || child_stack == NULL) + * return -EINVAL; + */ +#ifdef __thumb2__ + "push {r7}\n" +#endif + "cmp %2,#0\n" + "it ne\n" + "cmpne %3,#0\n" + "it eq\n" + "moveq %0,%1\n" + "beq 1f\n" + + /* Push "arg" and "fn" onto the stack that will be + * used by the child. + */ + "str %5,[%3,#-4]!\n" + "str %2,[%3,#-4]!\n" + + /* %r0 = syscall(%r0 = flags, + * %r1 = child_stack, + * %r2 = parent_tidptr, + * %r3 = newtls, + * %r4 = child_tidptr) + */ + "mov r7, %9\n" + "swi 0x0\n" + + /* if (%r0 != 0) + * return %r0; + */ + "movs %0,r0\n" + "bne 1f\n" + + /* In the child, now. Call "fn(arg)". + */ + "ldr r0,[sp, #4]\n" + + /* When compiling for Thumb-2 the "MOV LR,PC" here + * won't work because it loads PC+4 into LR, + * whereas the LDR is a 4-byte instruction. + * This results in the child thread always + * crashing with an "Illegal Instruction" when it + * returned into the middle of the LDR instruction + * The instruction sequence used instead was + * recommended by + * "https://wiki.edubuntu.org/ARM/Thumb2PortingHowto#Quick_Reference". + */ + #ifdef __thumb2__ + "ldr r7,[sp]\n" + "blx r7\n" + #else + "mov lr,pc\n" + "ldr pc,[sp]\n" + #endif + + /* Call _exit(%r0). + */ + "mov r7, %10\n" + "swi 0x0\n" + "1:\n" +#ifdef __thumb2__ + "pop {r7}" +#endif + : "=r" (__res) + : "i"(-EINVAL), + "r"(fn), "r"(__stack), "r"(__flags), "r"(arg), + "r"(__ptid), "r"(__tls), "r"(__ctid), + "i"(__NR_clone), "i"(__NR_exit) +#ifdef __thumb2__ + : "cc", "lr", "memory"); +#else + : "cc", "r7", "lr", "memory"); +#endif + } + LSS_RETURN(int, __res); + } + #elif defined(__aarch64__) + /* Most definitions of _syscallX() neglect to mark "memory" as being + * clobbered. This causes problems with compilers, that do a better job + * at optimizing across __asm__ calls. + * So, we just have to redefine all of the _syscallX() macros. + */ + #undef LSS_REG + #define LSS_REG(r,a) register int64_t __r##r __asm__("x"#r) = (int64_t)a + #undef LSS_BODY + #define LSS_BODY(type,name,args...) \ + register int64_t __res_x0 __asm__("x0"); \ + int64_t __res; \ + __asm__ __volatile__ ("mov x8, %1\n" \ + "svc 0x0\n" \ + : "=r"(__res_x0) \ + : "i"(__NR_##name) , ## args \ + : "x8", "memory"); \ + __res = __res_x0; \ + LSS_RETURN(type, __res) + #undef _syscall0 + #define _syscall0(type, name) \ + type LSS_NAME(name)(void) { \ + LSS_BODY(type, name); \ + } + #undef _syscall1 + #define _syscall1(type, name, type1, arg1) \ + type LSS_NAME(name)(type1 arg1) { \ + LSS_REG(0, arg1); LSS_BODY(type, name, "r"(__r0)); \ + } + #undef _syscall2 + #define _syscall2(type, name, type1, arg1, type2, arg2) \ + type LSS_NAME(name)(type1 arg1, type2 arg2) { \ + LSS_REG(0, arg1); LSS_REG(1, arg2); \ + LSS_BODY(type, name, "r"(__r0), "r"(__r1)); \ + } + #undef _syscall3 + #define _syscall3(type, name, type1, arg1, type2, arg2, type3, arg3) \ + type LSS_NAME(name)(type1 arg1, type2 arg2, type3 arg3) { \ + LSS_REG(0, arg1); LSS_REG(1, arg2); LSS_REG(2, arg3); \ + LSS_BODY(type, name, "r"(__r0), "r"(__r1), "r"(__r2)); \ + } + #undef _syscall4 + #define _syscall4(type,name,type1,arg1,type2,arg2,type3,arg3,type4,arg4) \ + type LSS_NAME(name)(type1 arg1, type2 arg2, type3 arg3, type4 arg4) { \ + LSS_REG(0, arg1); LSS_REG(1, arg2); LSS_REG(2, arg3); \ + LSS_REG(3, arg4); \ + LSS_BODY(type, name, "r"(__r0), "r"(__r1), "r"(__r2), "r"(__r3)); \ + } + #undef _syscall5 + #define _syscall5(type,name,type1,arg1,type2,arg2,type3,arg3,type4,arg4, \ + type5,arg5) \ + type LSS_NAME(name)(type1 arg1, type2 arg2, type3 arg3, type4 arg4, \ + type5 arg5) { \ + LSS_REG(0, arg1); LSS_REG(1, arg2); LSS_REG(2, arg3); \ + LSS_REG(3, arg4); LSS_REG(4, arg5); \ + LSS_BODY(type, name, "r"(__r0), "r"(__r1), "r"(__r2), "r"(__r3), \ + "r"(__r4)); \ + } + #undef _syscall6 + #define _syscall6(type,name,type1,arg1,type2,arg2,type3,arg3,type4,arg4, \ + type5,arg5,type6,arg6) \ + type LSS_NAME(name)(type1 arg1, type2 arg2, type3 arg3, type4 arg4, \ + type5 arg5, type6 arg6) { \ + LSS_REG(0, arg1); LSS_REG(1, arg2); LSS_REG(2, arg3); \ + LSS_REG(3, arg4); LSS_REG(4, arg5); LSS_REG(5, arg6); \ + LSS_BODY(type, name, "r"(__r0), "r"(__r1), "r"(__r2), "r"(__r3), \ + "r"(__r4), "r"(__r5)); \ + } + + LSS_INLINE int LSS_NAME(clone)(int (*fn)(void *), void *child_stack, + int flags, void *arg, int *parent_tidptr, + void *newtls, int *child_tidptr) { + int64_t __res; + { + register uint64_t __flags __asm__("x0") = flags; + register void *__stack __asm__("x1") = child_stack; + register void *__ptid __asm__("x2") = parent_tidptr; + register void *__tls __asm__("x3") = newtls; + register int *__ctid __asm__("x4") = child_tidptr; + __asm__ __volatile__(/* Push "arg" and "fn" onto the stack that will be + * used by the child. + */ + "stp %1, %4, [%2, #-16]!\n" + + /* %x0 = syscall(%x0 = flags, + * %x1 = child_stack, + * %x2 = parent_tidptr, + * %x3 = newtls, + * %x4 = child_tidptr) + */ + "mov x8, %8\n" + "svc 0x0\n" + + /* if (%r0 != 0) + * return %r0; + */ + "mov %0, x0\n" + "cbnz x0, 1f\n" + + /* In the child, now. Call "fn(arg)". + */ + "ldp x1, x0, [sp], #16\n" + "blr x1\n" + + /* Call _exit(%r0). + */ + "mov x8, %9\n" + "svc 0x0\n" + "1:\n" + : "=r" (__res) + : "r"(fn), "r"(__stack), "r"(__flags), "r"(arg), + "r"(__ptid), "r"(__tls), "r"(__ctid), + "i"(__NR_clone), "i"(__NR_exit) + : "cc", "x8", "memory"); + } + LSS_RETURN(int, __res); + } + #elif defined(__mips__) + #undef LSS_REG + #define LSS_REG(r,a) register unsigned long __r##r __asm__("$"#r) = \ + (unsigned long)(a) + #undef LSS_BODY + #undef LSS_SYSCALL_CLOBBERS + #if _MIPS_SIM == _MIPS_SIM_ABI32 + #define LSS_SYSCALL_CLOBBERS "$1", "$3", "$8", "$9", "$10", \ + "$11", "$12", "$13", "$14", "$15", \ + "$24", "$25", "hi", "lo", "memory" + #else + #define LSS_SYSCALL_CLOBBERS "$1", "$3", "$10", "$11", "$12", \ + "$13", "$14", "$15", "$24", "$25", \ + "hi", "lo", "memory" + #endif + #define LSS_BODY(type,name,r7,...) \ + register unsigned long __v0 __asm__("$2") = __NR_##name; \ + __asm__ __volatile__ ("syscall\n" \ + : "+r"(__v0), r7 (__r7) \ + : "0"(__v0), ##__VA_ARGS__ \ + : LSS_SYSCALL_CLOBBERS); \ + LSS_RETURN(type, __v0, __r7) + #undef _syscall0 + #define _syscall0(type, name) \ + type LSS_NAME(name)(void) { \ + register unsigned long __r7 __asm__("$7"); \ + LSS_BODY(type, name, "=r"); \ + } + #undef _syscall1 + #define _syscall1(type, name, type1, arg1) \ + type LSS_NAME(name)(type1 arg1) { \ + register unsigned long __r7 __asm__("$7"); \ + LSS_REG(4, arg1); LSS_BODY(type, name, "=r", "r"(__r4)); \ + } + #undef _syscall2 + #define _syscall2(type, name, type1, arg1, type2, arg2) \ + type LSS_NAME(name)(type1 arg1, type2 arg2) { \ + register unsigned long __r7 __asm__("$7"); \ + LSS_REG(4, arg1); LSS_REG(5, arg2); \ + LSS_BODY(type, name, "=r", "r"(__r4), "r"(__r5)); \ + } + #undef _syscall3 + #define _syscall3(type, name, type1, arg1, type2, arg2, type3, arg3) \ + type LSS_NAME(name)(type1 arg1, type2 arg2, type3 arg3) { \ + register unsigned long __r7 __asm__("$7"); \ + LSS_REG(4, arg1); LSS_REG(5, arg2); LSS_REG(6, arg3); \ + LSS_BODY(type, name, "=r", "r"(__r4), "r"(__r5), "r"(__r6)); \ + } + #undef _syscall4 + #define _syscall4(type,name,type1,arg1,type2,arg2,type3,arg3,type4,arg4) \ + type LSS_NAME(name)(type1 arg1, type2 arg2, type3 arg3, type4 arg4) { \ + LSS_REG(4, arg1); LSS_REG(5, arg2); LSS_REG(6, arg3); \ + LSS_REG(7, arg4); \ + LSS_BODY(type, name, "+r", "r"(__r4), "r"(__r5), "r"(__r6)); \ + } + #undef _syscall5 + #if _MIPS_SIM == _MIPS_SIM_ABI32 + /* The old 32bit MIPS system call API passes the fifth and sixth argument + * on the stack, whereas the new APIs use registers "r8" and "r9". + */ + #define _syscall5(type,name,type1,arg1,type2,arg2,type3,arg3,type4,arg4, \ + type5,arg5) \ + type LSS_NAME(name)(type1 arg1, type2 arg2, type3 arg3, type4 arg4, \ + type5 arg5) { \ + LSS_REG(4, arg1); LSS_REG(5, arg2); LSS_REG(6, arg3); \ + LSS_REG(7, arg4); \ + register unsigned long __v0 __asm__("$2") = __NR_##name; \ + __asm__ __volatile__ (".set noreorder\n" \ + "subu $29, 32\n" \ + "sw %5, 16($29)\n" \ + "syscall\n" \ + "addiu $29, 32\n" \ + ".set reorder\n" \ + : "+r"(__v0), "+r" (__r7) \ + : "r"(__r4), "r"(__r5), \ + "r"(__r6), "r" ((unsigned long)arg5) \ + : "$8", "$9", "$10", "$11", "$12", \ + "$13", "$14", "$15", "$24", "$25", \ + "memory"); \ + LSS_RETURN(type, __v0, __r7); \ + } + #else + #define _syscall5(type,name,type1,arg1,type2,arg2,type3,arg3,type4,arg4, \ + type5,arg5) \ + type LSS_NAME(name)(type1 arg1, type2 arg2, type3 arg3, type4 arg4, \ + type5 arg5) { \ + LSS_REG(4, arg1); LSS_REG(5, arg2); LSS_REG(6, arg3); \ + LSS_REG(7, arg4); LSS_REG(8, arg5); \ + LSS_BODY(type, name, "+r", "r"(__r4), "r"(__r5), "r"(__r6), \ + "r"(__r8)); \ + } + #endif + #undef _syscall6 + #if _MIPS_SIM == _MIPS_SIM_ABI32 + /* The old 32bit MIPS system call API passes the fifth and sixth argument + * on the stack, whereas the new APIs use registers "r8" and "r9". + */ + #define _syscall6(type,name,type1,arg1,type2,arg2,type3,arg3,type4,arg4, \ + type5,arg5,type6,arg6) \ + type LSS_NAME(name)(type1 arg1, type2 arg2, type3 arg3, type4 arg4, \ + type5 arg5, type6 arg6) { \ + LSS_REG(4, arg1); LSS_REG(5, arg2); LSS_REG(6, arg3); \ + LSS_REG(7, arg4); \ + register unsigned long __v0 __asm__("$2") = __NR_##name; \ + __asm__ __volatile__ (".set noreorder\n" \ + "subu $29, 32\n" \ + "sw %5, 16($29)\n" \ + "sw %6, 20($29)\n" \ + "syscall\n" \ + "addiu $29, 32\n" \ + ".set reorder\n" \ + : "+r"(__v0), "+r" (__r7) \ + : "r"(__r4), "r"(__r5), \ + "r"(__r6), "r" ((unsigned long)arg5), \ + "r" ((unsigned long)arg6) \ + : "$8", "$9", "$10", "$11", "$12", \ + "$13", "$14", "$15", "$24", "$25", \ + "memory"); \ + LSS_RETURN(type, __v0, __r7); \ + } + #else + #define _syscall6(type,name,type1,arg1,type2,arg2,type3,arg3,type4,arg4, \ + type5,arg5,type6,arg6) \ + type LSS_NAME(name)(type1 arg1, type2 arg2, type3 arg3, type4 arg4, \ + type5 arg5,type6 arg6) { \ + LSS_REG(4, arg1); LSS_REG(5, arg2); LSS_REG(6, arg3); \ + LSS_REG(7, arg4); LSS_REG(8, arg5); LSS_REG(9, arg6); \ + LSS_BODY(type, name, "+r", "r"(__r4), "r"(__r5), "r"(__r6), \ + "r"(__r8), "r"(__r9)); \ + } + #endif + LSS_INLINE int LSS_NAME(clone)(int (*fn)(void *), void *child_stack, + int flags, void *arg, int *parent_tidptr, + void *newtls, int *child_tidptr) { + register unsigned long __v0 __asm__("$2"); + register unsigned long __r7 __asm__("$7") = (unsigned long)newtls; + { + register int __flags __asm__("$4") = flags; + register void *__stack __asm__("$5") = child_stack; + register void *__ptid __asm__("$6") = parent_tidptr; + register int *__ctid __asm__("$8") = child_tidptr; + __asm__ __volatile__( + #if _MIPS_SIM == _MIPS_SIM_ABI32 && _MIPS_SZPTR == 32 + "subu $29,24\n" + #elif _MIPS_SIM == _MIPS_SIM_NABI32 + "sub $29,16\n" + #else + "dsubu $29,16\n" + #endif + + /* if (fn == NULL || child_stack == NULL) + * return -EINVAL; + */ + "li %0,%2\n" + "beqz %5,1f\n" + "beqz %6,1f\n" + + /* Push "arg" and "fn" onto the stack that will be + * used by the child. + */ + #if _MIPS_SIM == _MIPS_SIM_ABI32 && _MIPS_SZPTR == 32 + "subu %6,32\n" + "sw %5,0(%6)\n" + "sw %8,4(%6)\n" + #elif _MIPS_SIM == _MIPS_SIM_NABI32 + "sub %6,32\n" + "sw %5,0(%6)\n" + "sw %8,8(%6)\n" + #else + "dsubu %6,32\n" + "sd %5,0(%6)\n" + "sd %8,8(%6)\n" + #endif + + /* $7 = syscall($4 = flags, + * $5 = child_stack, + * $6 = parent_tidptr, + * $7 = newtls, + * $8 = child_tidptr) + */ + "li $2,%3\n" + "syscall\n" + + /* if ($7 != 0) + * return $2; + */ + "bnez $7,1f\n" + "bnez $2,1f\n" + + /* In the child, now. Call "fn(arg)". + */ + #if _MIPS_SIM == _MIPS_SIM_ABI32 && _MIPS_SZPTR == 32 + "lw $25,0($29)\n" + "lw $4,4($29)\n" + #elif _MIPS_SIM == _MIPS_SIM_NABI32 + "lw $25,0($29)\n" + "lw $4,8($29)\n" + #else + "ld $25,0($29)\n" + "ld $4,8($29)\n" + #endif + "jalr $25\n" + + /* Call _exit($2) + */ + "move $4,$2\n" + "li $2,%4\n" + "syscall\n" + + "1:\n" + #if _MIPS_SIM == _MIPS_SIM_ABI32 && _MIPS_SZPTR == 32 + "addu $29, 24\n" + #elif _MIPS_SIM == _MIPS_SIM_NABI32 + "add $29, 16\n" + #else + "daddu $29,16\n" + #endif + : "+r" (__v0), "+r" (__r7) + : "i"(-EINVAL), "i"(__NR_clone), "i"(__NR_exit), + "r"(fn), "r"(__stack), "r"(__flags), "r"(arg), + "r"(__ptid), "r"(__r7), "r"(__ctid) + : "$9", "$10", "$11", "$12", "$13", "$14", "$15", + "$24", "$25", "memory"); + } + LSS_RETURN(int, __v0, __r7); + } + #elif defined (__PPC__) + #undef LSS_LOADARGS_0 + #define LSS_LOADARGS_0(name, dummy...) \ + __sc_0 = __NR_##name + #undef LSS_LOADARGS_1 + #define LSS_LOADARGS_1(name, arg1) \ + LSS_LOADARGS_0(name); \ + __sc_3 = (unsigned long) (arg1) + #undef LSS_LOADARGS_2 + #define LSS_LOADARGS_2(name, arg1, arg2) \ + LSS_LOADARGS_1(name, arg1); \ + __sc_4 = (unsigned long) (arg2) + #undef LSS_LOADARGS_3 + #define LSS_LOADARGS_3(name, arg1, arg2, arg3) \ + LSS_LOADARGS_2(name, arg1, arg2); \ + __sc_5 = (unsigned long) (arg3) + #undef LSS_LOADARGS_4 + #define LSS_LOADARGS_4(name, arg1, arg2, arg3, arg4) \ + LSS_LOADARGS_3(name, arg1, arg2, arg3); \ + __sc_6 = (unsigned long) (arg4) + #undef LSS_LOADARGS_5 + #define LSS_LOADARGS_5(name, arg1, arg2, arg3, arg4, arg5) \ + LSS_LOADARGS_4(name, arg1, arg2, arg3, arg4); \ + __sc_7 = (unsigned long) (arg5) + #undef LSS_LOADARGS_6 + #define LSS_LOADARGS_6(name, arg1, arg2, arg3, arg4, arg5, arg6) \ + LSS_LOADARGS_5(name, arg1, arg2, arg3, arg4, arg5); \ + __sc_8 = (unsigned long) (arg6) + #undef LSS_ASMINPUT_0 + #define LSS_ASMINPUT_0 "0" (__sc_0) + #undef LSS_ASMINPUT_1 + #define LSS_ASMINPUT_1 LSS_ASMINPUT_0, "1" (__sc_3) + #undef LSS_ASMINPUT_2 + #define LSS_ASMINPUT_2 LSS_ASMINPUT_1, "2" (__sc_4) + #undef LSS_ASMINPUT_3 + #define LSS_ASMINPUT_3 LSS_ASMINPUT_2, "3" (__sc_5) + #undef LSS_ASMINPUT_4 + #define LSS_ASMINPUT_4 LSS_ASMINPUT_3, "4" (__sc_6) + #undef LSS_ASMINPUT_5 + #define LSS_ASMINPUT_5 LSS_ASMINPUT_4, "5" (__sc_7) + #undef LSS_ASMINPUT_6 + #define LSS_ASMINPUT_6 LSS_ASMINPUT_5, "6" (__sc_8) + #undef LSS_BODY + #define LSS_BODY(nr, type, name, args...) \ + long __sc_ret, __sc_err; \ + { \ + register unsigned long __sc_0 __asm__ ("r0"); \ + register unsigned long __sc_3 __asm__ ("r3"); \ + register unsigned long __sc_4 __asm__ ("r4"); \ + register unsigned long __sc_5 __asm__ ("r5"); \ + register unsigned long __sc_6 __asm__ ("r6"); \ + register unsigned long __sc_7 __asm__ ("r7"); \ + register unsigned long __sc_8 __asm__ ("r8"); \ + \ + LSS_LOADARGS_##nr(name, args); \ + __asm__ __volatile__ \ + ("sc\n\t" \ + "mfcr %0" \ + : "=&r" (__sc_0), \ + "=&r" (__sc_3), "=&r" (__sc_4), \ + "=&r" (__sc_5), "=&r" (__sc_6), \ + "=&r" (__sc_7), "=&r" (__sc_8) \ + : LSS_ASMINPUT_##nr \ + : "cr0", "ctr", "memory", \ + "r9", "r10", "r11", "r12"); \ + __sc_ret = __sc_3; \ + __sc_err = __sc_0; \ + } \ + LSS_RETURN(type, __sc_ret, __sc_err) + #undef _syscall0 + #define _syscall0(type, name) \ + type LSS_NAME(name)(void) { \ + LSS_BODY(0, type, name); \ + } + #undef _syscall1 + #define _syscall1(type, name, type1, arg1) \ + type LSS_NAME(name)(type1 arg1) { \ + LSS_BODY(1, type, name, arg1); \ + } + #undef _syscall2 + #define _syscall2(type, name, type1, arg1, type2, arg2) \ + type LSS_NAME(name)(type1 arg1, type2 arg2) { \ + LSS_BODY(2, type, name, arg1, arg2); \ + } + #undef _syscall3 + #define _syscall3(type, name, type1, arg1, type2, arg2, type3, arg3) \ + type LSS_NAME(name)(type1 arg1, type2 arg2, type3 arg3) { \ + LSS_BODY(3, type, name, arg1, arg2, arg3); \ + } + #undef _syscall4 + #define _syscall4(type, name, type1, arg1, type2, arg2, type3, arg3, \ + type4, arg4) \ + type LSS_NAME(name)(type1 arg1, type2 arg2, type3 arg3, type4 arg4) { \ + LSS_BODY(4, type, name, arg1, arg2, arg3, arg4); \ + } + #undef _syscall5 + #define _syscall5(type, name, type1, arg1, type2, arg2, type3, arg3, \ + type4, arg4, type5, arg5) \ + type LSS_NAME(name)(type1 arg1, type2 arg2, type3 arg3, type4 arg4, \ + type5 arg5) { \ + LSS_BODY(5, type, name, arg1, arg2, arg3, arg4, arg5); \ + } + #undef _syscall6 + #define _syscall6(type, name, type1, arg1, type2, arg2, type3, arg3, \ + type4, arg4, type5, arg5, type6, arg6) \ + type LSS_NAME(name)(type1 arg1, type2 arg2, type3 arg3, type4 arg4, \ + type5 arg5, type6 arg6) { \ + LSS_BODY(6, type, name, arg1, arg2, arg3, arg4, arg5, arg6); \ + } + /* clone function adapted from glibc 2.3.6 clone.S */ + /* TODO(csilvers): consider wrapping some args up in a struct, like we + * do for i386's _syscall6, so we can compile successfully on gcc 2.95 + */ + LSS_INLINE int LSS_NAME(clone)(int (*fn)(void *), void *child_stack, + int flags, void *arg, int *parent_tidptr, + void *newtls, int *child_tidptr) { + long __ret, __err; + { + register int (*__fn)(void *) __asm__ ("r8") = fn; + register void *__cstack __asm__ ("r4") = child_stack; + register int __flags __asm__ ("r3") = flags; + register void * __arg __asm__ ("r9") = arg; + register int * __ptidptr __asm__ ("r5") = parent_tidptr; + register void * __newtls __asm__ ("r6") = newtls; + register int * __ctidptr __asm__ ("r7") = child_tidptr; + __asm__ __volatile__( + /* check for fn == NULL + * and child_stack == NULL + */ + "cmpwi cr0, %6, 0\n\t" + "cmpwi cr1, %7, 0\n\t" + "cror cr0*4+eq, cr1*4+eq, cr0*4+eq\n\t" + "beq- cr0, 1f\n\t" + + /* set up stack frame for child */ + "clrrwi %7, %7, 4\n\t" + "li 0, 0\n\t" + "stwu 0, -16(%7)\n\t" + + /* fn, arg, child_stack are saved across the syscall: r28-30 */ + "mr 28, %6\n\t" + "mr 29, %7\n\t" + "mr 27, %9\n\t" + + /* syscall */ + "li 0, %4\n\t" + /* flags already in r3 + * child_stack already in r4 + * ptidptr already in r5 + * newtls already in r6 + * ctidptr already in r7 + */ + "sc\n\t" + + /* Test if syscall was successful */ + "cmpwi cr1, 3, 0\n\t" + "crandc cr1*4+eq, cr1*4+eq, cr0*4+so\n\t" + "bne- cr1, 1f\n\t" + + /* Do the function call */ + "mtctr 28\n\t" + "mr 3, 27\n\t" + "bctrl\n\t" + + /* Call _exit(r3) */ + "li 0, %5\n\t" + "sc\n\t" + + /* Return to parent */ + "1:\n" + "mfcr %1\n\t" + "mr %0, 3\n\t" + : "=r" (__ret), "=r" (__err) + : "0" (-1), "1" (EINVAL), + "i" (__NR_clone), "i" (__NR_exit), + "r" (__fn), "r" (__cstack), "r" (__flags), + "r" (__arg), "r" (__ptidptr), "r" (__newtls), + "r" (__ctidptr) + : "cr0", "cr1", "memory", "ctr", + "r0", "r29", "r27", "r28"); + } + LSS_RETURN(int, __ret, __err); + } + #endif + #define __NR__exit __NR_exit + #define __NR__gettid __NR_gettid + #define __NR__mremap __NR_mremap + LSS_INLINE _syscall1(void *, brk, void *, e) + LSS_INLINE _syscall1(int, chdir, const char *,p) + LSS_INLINE _syscall1(int, close, int, f) + LSS_INLINE _syscall2(int, clock_getres, int, c, + struct kernel_timespec*, t) + LSS_INLINE _syscall2(int, clock_gettime, int, c, + struct kernel_timespec*, t) + LSS_INLINE _syscall1(int, dup, int, f) + #if !defined(__aarch64__) + // The dup2 syscall has been deprecated on aarch64. We polyfill it below. + LSS_INLINE _syscall2(int, dup2, int, s, + int, d) + #endif + LSS_INLINE _syscall3(int, execve, const char*, f, + const char*const*,a,const char*const*, e) + LSS_INLINE _syscall1(int, _exit, int, e) + LSS_INLINE _syscall1(int, exit_group, int, e) + LSS_INLINE _syscall3(int, fcntl, int, f, + int, c, long, a) + #if !defined(__aarch64__) + // The fork syscall has been deprecated on aarch64. We polyfill it below. + LSS_INLINE _syscall0(pid_t, fork) + #endif + LSS_INLINE _syscall2(int, fstat, int, f, + struct kernel_stat*, b) + LSS_INLINE _syscall2(int, fstatfs, int, f, + struct kernel_statfs*, b) + #if defined(__x86_64__) + /* Need to make sure off_t isn't truncated to 32-bits under x32. */ + LSS_INLINE int LSS_NAME(ftruncate)(int f, off_t l) { + LSS_BODY(2, int, ftruncate, LSS_SYSCALL_ARG(f), (uint64_t)(l)); + } + #else + LSS_INLINE _syscall2(int, ftruncate, int, f, + off_t, l) + #endif + LSS_INLINE _syscall4(int, futex, int*, a, + int, o, int, v, + struct kernel_timespec*, t) + LSS_INLINE _syscall3(int, getdents, int, f, + struct kernel_dirent*, d, int, c) + LSS_INLINE _syscall3(int, getdents64, int, f, + struct kernel_dirent64*, d, int, c) + LSS_INLINE _syscall0(gid_t, getegid) + LSS_INLINE _syscall0(uid_t, geteuid) + #if !defined(__aarch64__) + // The getgprp syscall has been deprecated on aarch64. + LSS_INLINE _syscall0(pid_t, getpgrp) + #endif + LSS_INLINE _syscall0(pid_t, getpid) + LSS_INLINE _syscall0(pid_t, getppid) + LSS_INLINE _syscall2(int, getpriority, int, a, + int, b) + LSS_INLINE _syscall3(int, getresgid, gid_t *, r, + gid_t *, e, gid_t *, s) + LSS_INLINE _syscall3(int, getresuid, uid_t *, r, + uid_t *, e, uid_t *, s) +#if !defined(__ARM_EABI__) + LSS_INLINE _syscall2(int, getrlimit, int, r, + struct kernel_rlimit*, l) +#endif + LSS_INLINE _syscall1(pid_t, getsid, pid_t, p) + LSS_INLINE _syscall0(pid_t, _gettid) + LSS_INLINE _syscall2(pid_t, gettimeofday, struct kernel_timeval*, t, + void*, tz) + LSS_INLINE _syscall5(int, setxattr, const char *,p, + const char *, n, const void *,v, + size_t, s, int, f) + LSS_INLINE _syscall5(int, lsetxattr, const char *,p, + const char *, n, const void *,v, + size_t, s, int, f) + LSS_INLINE _syscall4(ssize_t, getxattr, const char *,p, + const char *, n, void *, v, size_t, s) + LSS_INLINE _syscall4(ssize_t, lgetxattr, const char *,p, + const char *, n, void *, v, size_t, s) + LSS_INLINE _syscall3(ssize_t, listxattr, const char *,p, + char *, l, size_t, s) + LSS_INLINE _syscall3(ssize_t, llistxattr, const char *,p, + char *, l, size_t, s) + LSS_INLINE _syscall3(int, ioctl, int, d, + int, r, void *, a) + LSS_INLINE _syscall2(int, ioprio_get, int, which, + int, who) + LSS_INLINE _syscall3(int, ioprio_set, int, which, + int, who, int, ioprio) + LSS_INLINE _syscall2(int, kill, pid_t, p, + int, s) + #if defined(__x86_64__) + /* Need to make sure off_t isn't truncated to 32-bits under x32. */ + LSS_INLINE off_t LSS_NAME(lseek)(int f, off_t o, int w) { + _LSS_BODY(3, off_t, lseek, off_t, LSS_SYSCALL_ARG(f), (uint64_t)(o), + LSS_SYSCALL_ARG(w)); + } + #else + LSS_INLINE _syscall3(off_t, lseek, int, f, + off_t, o, int, w) + #endif + LSS_INLINE _syscall2(int, munmap, void*, s, + size_t, l) + LSS_INLINE _syscall6(long, move_pages, pid_t, p, + unsigned long, n, void **,g, int *, d, + int *, s, int, f) + LSS_INLINE _syscall3(int, mprotect, const void *,a, + size_t, l, int, p) + LSS_INLINE _syscall5(void*, _mremap, void*, o, + size_t, os, size_t, ns, + unsigned long, f, void *, a) + #if !defined(__aarch64__) + // The open and poll syscalls have been deprecated on aarch64. We polyfill + // them below. + LSS_INLINE _syscall3(int, open, const char*, p, + int, f, int, m) + LSS_INLINE _syscall3(int, poll, struct kernel_pollfd*, u, + unsigned int, n, int, t) + #endif + LSS_INLINE _syscall5(int, prctl, int, option, + unsigned long, arg2, + unsigned long, arg3, + unsigned long, arg4, + unsigned long, arg5) + LSS_INLINE _syscall4(long, ptrace, int, r, + pid_t, p, void *, a, void *, d) + #if defined(__NR_quotactl) + // Defined on x86_64 / i386 only + LSS_INLINE _syscall4(int, quotactl, int, cmd, const char *, special, + int, id, caddr_t, addr) + #endif + LSS_INLINE _syscall3(ssize_t, read, int, f, + void *, b, size_t, c) + #if !defined(__aarch64__) + // The readlink syscall has been deprecated on aarch64. We polyfill below. + LSS_INLINE _syscall3(int, readlink, const char*, p, + char*, b, size_t, s) + #endif + LSS_INLINE _syscall4(int, rt_sigaction, int, s, + const struct kernel_sigaction*, a, + struct kernel_sigaction*, o, size_t, c) + LSS_INLINE _syscall2(int, rt_sigpending, struct kernel_sigset_t *, s, + size_t, c) + LSS_INLINE _syscall4(int, rt_sigprocmask, int, h, + const struct kernel_sigset_t*, s, + struct kernel_sigset_t*, o, size_t, c) + LSS_INLINE _syscall2(int, rt_sigsuspend, + const struct kernel_sigset_t*, s, size_t, c) + LSS_INLINE _syscall3(int, sched_getaffinity,pid_t, p, + unsigned int, l, unsigned long *, m) + LSS_INLINE _syscall3(int, sched_setaffinity,pid_t, p, + unsigned int, l, unsigned long *, m) + LSS_INLINE _syscall0(int, sched_yield) + LSS_INLINE _syscall1(long, set_tid_address, int *, t) + LSS_INLINE _syscall1(int, setfsgid, gid_t, g) + LSS_INLINE _syscall1(int, setfsuid, uid_t, u) + LSS_INLINE _syscall1(int, setuid, uid_t, u) + LSS_INLINE _syscall1(int, setgid, gid_t, g) + LSS_INLINE _syscall2(int, setpgid, pid_t, p, + pid_t, g) + LSS_INLINE _syscall3(int, setpriority, int, a, + int, b, int, p) + LSS_INLINE _syscall3(int, setresgid, gid_t, r, + gid_t, e, gid_t, s) + LSS_INLINE _syscall3(int, setresuid, uid_t, r, + uid_t, e, uid_t, s) + LSS_INLINE _syscall2(int, setrlimit, int, r, + const struct kernel_rlimit*, l) + LSS_INLINE _syscall0(pid_t, setsid) + LSS_INLINE _syscall2(int, sigaltstack, const stack_t*, s, + const stack_t*, o) + #if defined(__NR_sigreturn) + LSS_INLINE _syscall1(int, sigreturn, unsigned long, u) + #endif + #if !defined(__aarch64__) + // The stat syscall has been deprecated on aarch64. We polyfill it below. + LSS_INLINE _syscall2(int, stat, const char*, f, + struct kernel_stat*, b) + #endif + LSS_INLINE _syscall2(int, statfs, const char*, f, + struct kernel_statfs*, b) + LSS_INLINE _syscall3(int, tgkill, pid_t, p, + pid_t, t, int, s) + LSS_INLINE _syscall2(int, tkill, pid_t, p, + int, s) + #if !defined(__aarch64__) + // The unlink syscall has been deprecated on aarch64. We polyfill it below. + LSS_INLINE _syscall1(int, unlink, const char*, f) + #endif + LSS_INLINE _syscall3(ssize_t, write, int, f, + const void *, b, size_t, c) + LSS_INLINE _syscall3(ssize_t, writev, int, f, + const struct kernel_iovec*, v, size_t, c) + #if defined(__NR_getcpu) + LSS_INLINE _syscall3(long, getcpu, unsigned *, cpu, + unsigned *, node, void *, unused) + #endif + #if defined(__x86_64__) || \ + (defined(__mips__) && _MIPS_SIM != _MIPS_SIM_ABI32) + LSS_INLINE _syscall3(int, recvmsg, int, s, + struct kernel_msghdr*, m, int, f) + LSS_INLINE _syscall3(int, sendmsg, int, s, + const struct kernel_msghdr*, m, int, f) + LSS_INLINE _syscall6(int, sendto, int, s, + const void*, m, size_t, l, + int, f, + const struct kernel_sockaddr*, a, int, t) + LSS_INLINE _syscall2(int, shutdown, int, s, + int, h) + LSS_INLINE _syscall3(int, socket, int, d, + int, t, int, p) + LSS_INLINE _syscall4(int, socketpair, int, d, + int, t, int, p, int*, s) + #endif + #if defined(__x86_64__) + /* Need to make sure loff_t isn't truncated to 32-bits under x32. */ + LSS_INLINE int LSS_NAME(fallocate)(int f, int mode, loff_t offset, + loff_t len) { + LSS_BODY(4, int, fallocate, LSS_SYSCALL_ARG(f), LSS_SYSCALL_ARG(mode), + (uint64_t)(offset), (uint64_t)(len)); + } + + LSS_INLINE int LSS_NAME(getresgid32)(gid_t *rgid, + gid_t *egid, + gid_t *sgid) { + return LSS_NAME(getresgid)(rgid, egid, sgid); + } + + LSS_INLINE int LSS_NAME(getresuid32)(uid_t *ruid, + uid_t *euid, + uid_t *suid) { + return LSS_NAME(getresuid)(ruid, euid, suid); + } + + /* Need to make sure __off64_t isn't truncated to 32-bits under x32. */ + LSS_INLINE void* LSS_NAME(mmap)(void *s, size_t l, int p, int f, int d, + int64_t o) { + LSS_BODY(6, void*, mmap, LSS_SYSCALL_ARG(s), LSS_SYSCALL_ARG(l), + LSS_SYSCALL_ARG(p), LSS_SYSCALL_ARG(f), + LSS_SYSCALL_ARG(d), (uint64_t)(o)); + } + + LSS_INLINE _syscall4(int, newfstatat, int, d, + const char *, p, + struct kernel_stat*, b, int, f) + + LSS_INLINE int LSS_NAME(setfsgid32)(gid_t gid) { + return LSS_NAME(setfsgid)(gid); + } + + LSS_INLINE int LSS_NAME(setfsuid32)(uid_t uid) { + return LSS_NAME(setfsuid)(uid); + } + + LSS_INLINE int LSS_NAME(setresgid32)(gid_t rgid, gid_t egid, gid_t sgid) { + return LSS_NAME(setresgid)(rgid, egid, sgid); + } + + LSS_INLINE int LSS_NAME(setresuid32)(uid_t ruid, uid_t euid, uid_t suid) { + return LSS_NAME(setresuid)(ruid, euid, suid); + } + + LSS_INLINE int LSS_NAME(sigaction)(int signum, + const struct kernel_sigaction *act, + struct kernel_sigaction *oldact) { + /* On x86_64, the kernel requires us to always set our own + * SA_RESTORER in order to be able to return from a signal handler. + * This function must have a "magic" signature that the "gdb" + * (and maybe the kernel?) can recognize. + */ + if (act != NULL && !(act->sa_flags & SA_RESTORER)) { + struct kernel_sigaction a = *act; + a.sa_flags |= SA_RESTORER; + a.sa_restorer = LSS_NAME(restore_rt)(); + return LSS_NAME(rt_sigaction)(signum, &a, oldact, + (KERNEL_NSIG+7)/8); + } else { + return LSS_NAME(rt_sigaction)(signum, act, oldact, + (KERNEL_NSIG+7)/8); + } + } + + LSS_INLINE int LSS_NAME(sigpending)(struct kernel_sigset_t *set) { + return LSS_NAME(rt_sigpending)(set, (KERNEL_NSIG+7)/8); + } + + LSS_INLINE int LSS_NAME(sigprocmask)(int how, + const struct kernel_sigset_t *set, + struct kernel_sigset_t *oldset) { + return LSS_NAME(rt_sigprocmask)(how, set, oldset, (KERNEL_NSIG+7)/8); + } + + LSS_INLINE int LSS_NAME(sigsuspend)(const struct kernel_sigset_t *set) { + return LSS_NAME(rt_sigsuspend)(set, (KERNEL_NSIG+7)/8); + } + #endif + #if defined(__x86_64__) || defined(__ARM_ARCH_3__) || \ + defined(__ARM_EABI__) || defined(__aarch64__) || \ + (defined(__mips__) && _MIPS_SIM != _MIPS_SIM_ABI32) + LSS_INLINE _syscall4(pid_t, wait4, pid_t, p, + int*, s, int, o, + struct kernel_rusage*, r) + + LSS_INLINE pid_t LSS_NAME(waitpid)(pid_t pid, int *status, int options){ + return LSS_NAME(wait4)(pid, status, options, 0); + } + #endif + #if defined(__i386__) || defined(__x86_64__) || defined(__aarch64__) + LSS_INLINE _syscall4(int, openat, int, d, const char *, p, int, f, int, m) + LSS_INLINE _syscall3(int, unlinkat, int, d, const char *, p, int, f) + #endif + #if defined(__i386__) || defined(__ARM_ARCH_3__) || defined(__ARM_EABI__) + #define __NR__getresgid32 __NR_getresgid32 + #define __NR__getresuid32 __NR_getresuid32 + #define __NR__setfsgid32 __NR_setfsgid32 + #define __NR__setfsuid32 __NR_setfsuid32 + #define __NR__setresgid32 __NR_setresgid32 + #define __NR__setresuid32 __NR_setresuid32 +#if defined(__ARM_EABI__) + LSS_INLINE _syscall2(int, ugetrlimit, int, r, + struct kernel_rlimit*, l) +#endif + LSS_INLINE _syscall3(int, _getresgid32, gid_t *, r, + gid_t *, e, gid_t *, s) + LSS_INLINE _syscall3(int, _getresuid32, uid_t *, r, + uid_t *, e, uid_t *, s) + LSS_INLINE _syscall1(int, _setfsgid32, gid_t, f) + LSS_INLINE _syscall1(int, _setfsuid32, uid_t, f) + LSS_INLINE _syscall3(int, _setresgid32, gid_t, r, + gid_t, e, gid_t, s) + LSS_INLINE _syscall3(int, _setresuid32, uid_t, r, + uid_t, e, uid_t, s) + + LSS_INLINE int LSS_NAME(getresgid32)(gid_t *rgid, + gid_t *egid, + gid_t *sgid) { + int rc; + if ((rc = LSS_NAME(_getresgid32)(rgid, egid, sgid)) < 0 && + LSS_ERRNO == ENOSYS) { + if ((rgid == NULL) || (egid == NULL) || (sgid == NULL)) { + return EFAULT; + } + // Clear the high bits first, since getresgid only sets 16 bits + *rgid = *egid = *sgid = 0; + rc = LSS_NAME(getresgid)(rgid, egid, sgid); + } + return rc; + } + + LSS_INLINE int LSS_NAME(getresuid32)(uid_t *ruid, + uid_t *euid, + uid_t *suid) { + int rc; + if ((rc = LSS_NAME(_getresuid32)(ruid, euid, suid)) < 0 && + LSS_ERRNO == ENOSYS) { + if ((ruid == NULL) || (euid == NULL) || (suid == NULL)) { + return EFAULT; + } + // Clear the high bits first, since getresuid only sets 16 bits + *ruid = *euid = *suid = 0; + rc = LSS_NAME(getresuid)(ruid, euid, suid); + } + return rc; + } + + LSS_INLINE int LSS_NAME(setfsgid32)(gid_t gid) { + int rc; + if ((rc = LSS_NAME(_setfsgid32)(gid)) < 0 && + LSS_ERRNO == ENOSYS) { + if ((unsigned int)gid & ~0xFFFFu) { + rc = EINVAL; + } else { + rc = LSS_NAME(setfsgid)(gid); + } + } + return rc; + } + + LSS_INLINE int LSS_NAME(setfsuid32)(uid_t uid) { + int rc; + if ((rc = LSS_NAME(_setfsuid32)(uid)) < 0 && + LSS_ERRNO == ENOSYS) { + if ((unsigned int)uid & ~0xFFFFu) { + rc = EINVAL; + } else { + rc = LSS_NAME(setfsuid)(uid); + } + } + return rc; + } + + LSS_INLINE int LSS_NAME(setresgid32)(gid_t rgid, gid_t egid, gid_t sgid) { + int rc; + if ((rc = LSS_NAME(_setresgid32)(rgid, egid, sgid)) < 0 && + LSS_ERRNO == ENOSYS) { + if ((unsigned int)rgid & ~0xFFFFu || + (unsigned int)egid & ~0xFFFFu || + (unsigned int)sgid & ~0xFFFFu) { + rc = EINVAL; + } else { + rc = LSS_NAME(setresgid)(rgid, egid, sgid); + } + } + return rc; + } + + LSS_INLINE int LSS_NAME(setresuid32)(uid_t ruid, uid_t euid, uid_t suid) { + int rc; + if ((rc = LSS_NAME(_setresuid32)(ruid, euid, suid)) < 0 && + LSS_ERRNO == ENOSYS) { + if ((unsigned int)ruid & ~0xFFFFu || + (unsigned int)euid & ~0xFFFFu || + (unsigned int)suid & ~0xFFFFu) { + rc = EINVAL; + } else { + rc = LSS_NAME(setresuid)(ruid, euid, suid); + } + } + return rc; + } + #endif + LSS_INLINE int LSS_NAME(sigemptyset)(struct kernel_sigset_t *set) { + memset(&set->sig, 0, sizeof(set->sig)); + return 0; + } + + LSS_INLINE int LSS_NAME(sigfillset)(struct kernel_sigset_t *set) { + memset(&set->sig, -1, sizeof(set->sig)); + return 0; + } + + LSS_INLINE int LSS_NAME(sigaddset)(struct kernel_sigset_t *set, + int signum) { + if (signum < 1 || signum > (int)(8*sizeof(set->sig))) { + LSS_ERRNO = EINVAL; + return -1; + } else { + set->sig[(signum - 1)/(8*sizeof(set->sig[0]))] + |= 1UL << ((signum - 1) % (8*sizeof(set->sig[0]))); + return 0; + } + } + + LSS_INLINE int LSS_NAME(sigdelset)(struct kernel_sigset_t *set, + int signum) { + if (signum < 1 || signum > (int)(8*sizeof(set->sig))) { + LSS_ERRNO = EINVAL; + return -1; + } else { + set->sig[(signum - 1)/(8*sizeof(set->sig[0]))] + &= ~(1UL << ((signum - 1) % (8*sizeof(set->sig[0])))); + return 0; + } + } + + LSS_INLINE int LSS_NAME(sigismember)(struct kernel_sigset_t *set, + int signum) { + if (signum < 1 || signum > (int)(8*sizeof(set->sig))) { + LSS_ERRNO = EINVAL; + return -1; + } else { + return !!(set->sig[(signum - 1)/(8*sizeof(set->sig[0]))] & + (1UL << ((signum - 1) % (8*sizeof(set->sig[0]))))); + } + } + #if defined(__i386__) || defined(__ARM_ARCH_3__) || \ + defined(__ARM_EABI__) || \ + (defined(__mips__) && _MIPS_SIM == _MIPS_SIM_ABI32) || defined(__PPC__) + #define __NR__sigaction __NR_sigaction + #define __NR__sigpending __NR_sigpending + #define __NR__sigprocmask __NR_sigprocmask + #define __NR__sigsuspend __NR_sigsuspend + #define __NR__socketcall __NR_socketcall + LSS_INLINE _syscall2(int, fstat64, int, f, + struct kernel_stat64 *, b) + LSS_INLINE _syscall5(int, _llseek, uint, fd, + unsigned long, hi, unsigned long, lo, + loff_t *, res, uint, wh) +#if !defined(__ARM_EABI__) + LSS_INLINE _syscall1(void*, mmap, void*, a) +#endif + LSS_INLINE _syscall6(void*, mmap2, void*, s, + size_t, l, int, p, + int, f, int, d, + off_t, o) + LSS_INLINE _syscall3(int, _sigaction, int, s, + const struct kernel_old_sigaction*, a, + struct kernel_old_sigaction*, o) + LSS_INLINE _syscall1(int, _sigpending, unsigned long*, s) + LSS_INLINE _syscall3(int, _sigprocmask, int, h, + const unsigned long*, s, + unsigned long*, o) + #ifdef __PPC__ + LSS_INLINE _syscall1(int, _sigsuspend, unsigned long, s) + #else + LSS_INLINE _syscall3(int, _sigsuspend, const void*, a, + int, b, + unsigned long, s) + #endif + LSS_INLINE _syscall2(int, stat64, const char *, p, + struct kernel_stat64 *, b) + + LSS_INLINE int LSS_NAME(sigaction)(int signum, + const struct kernel_sigaction *act, + struct kernel_sigaction *oldact) { + int old_errno = LSS_ERRNO; + int rc; + struct kernel_sigaction a; + if (act != NULL) { + a = *act; + #ifdef __i386__ + /* On i386, the kernel requires us to always set our own + * SA_RESTORER when using realtime signals. Otherwise, it does not + * know how to return from a signal handler. This function must have + * a "magic" signature that the "gdb" (and maybe the kernel?) can + * recognize. + * Apparently, a SA_RESTORER is implicitly set by the kernel, when + * using non-realtime signals. + * + * TODO: Test whether ARM needs a restorer + */ + if (!(a.sa_flags & SA_RESTORER)) { + a.sa_flags |= SA_RESTORER; + a.sa_restorer = (a.sa_flags & SA_SIGINFO) + ? LSS_NAME(restore_rt)() : LSS_NAME(restore)(); + } + #endif + } + rc = LSS_NAME(rt_sigaction)(signum, act ? &a : act, oldact, + (KERNEL_NSIG+7)/8); + if (rc < 0 && LSS_ERRNO == ENOSYS) { + struct kernel_old_sigaction oa, ooa, *ptr_a = &oa, *ptr_oa = &ooa; + if (!act) { + ptr_a = NULL; + } else { + oa.sa_handler_ = act->sa_handler_; + memcpy(&oa.sa_mask, &act->sa_mask, sizeof(oa.sa_mask)); + #ifndef __mips__ + oa.sa_restorer = act->sa_restorer; + #endif + oa.sa_flags = act->sa_flags; + } + if (!oldact) { + ptr_oa = NULL; + } + LSS_ERRNO = old_errno; + rc = LSS_NAME(_sigaction)(signum, ptr_a, ptr_oa); + if (rc == 0 && oldact) { + if (act) { + memcpy(oldact, act, sizeof(*act)); + } else { + memset(oldact, 0, sizeof(*oldact)); + } + oldact->sa_handler_ = ptr_oa->sa_handler_; + oldact->sa_flags = ptr_oa->sa_flags; + memcpy(&oldact->sa_mask, &ptr_oa->sa_mask, sizeof(ptr_oa->sa_mask)); + #ifndef __mips__ + oldact->sa_restorer = ptr_oa->sa_restorer; + #endif + } + } + return rc; + } + + LSS_INLINE int LSS_NAME(sigpending)(struct kernel_sigset_t *set) { + int old_errno = LSS_ERRNO; + int rc = LSS_NAME(rt_sigpending)(set, (KERNEL_NSIG+7)/8); + if (rc < 0 && LSS_ERRNO == ENOSYS) { + LSS_ERRNO = old_errno; + LSS_NAME(sigemptyset)(set); + rc = LSS_NAME(_sigpending)(&set->sig[0]); + } + return rc; + } + + LSS_INLINE int LSS_NAME(sigprocmask)(int how, + const struct kernel_sigset_t *set, + struct kernel_sigset_t *oldset) { + int olderrno = LSS_ERRNO; + int rc = LSS_NAME(rt_sigprocmask)(how, set, oldset, (KERNEL_NSIG+7)/8); + if (rc < 0 && LSS_ERRNO == ENOSYS) { + LSS_ERRNO = olderrno; + if (oldset) { + LSS_NAME(sigemptyset)(oldset); + } + rc = LSS_NAME(_sigprocmask)(how, + set ? &set->sig[0] : NULL, + oldset ? &oldset->sig[0] : NULL); + } + return rc; + } + + LSS_INLINE int LSS_NAME(sigsuspend)(const struct kernel_sigset_t *set) { + int olderrno = LSS_ERRNO; + int rc = LSS_NAME(rt_sigsuspend)(set, (KERNEL_NSIG+7)/8); + if (rc < 0 && LSS_ERRNO == ENOSYS) { + LSS_ERRNO = olderrno; + rc = LSS_NAME(_sigsuspend)( + #ifndef __PPC__ + set, 0, + #endif + set->sig[0]); + } + return rc; + } + #endif + #if defined(__PPC__) + #undef LSS_SC_LOADARGS_0 + #define LSS_SC_LOADARGS_0(dummy...) + #undef LSS_SC_LOADARGS_1 + #define LSS_SC_LOADARGS_1(arg1) \ + __sc_4 = (unsigned long) (arg1) + #undef LSS_SC_LOADARGS_2 + #define LSS_SC_LOADARGS_2(arg1, arg2) \ + LSS_SC_LOADARGS_1(arg1); \ + __sc_5 = (unsigned long) (arg2) + #undef LSS_SC_LOADARGS_3 + #define LSS_SC_LOADARGS_3(arg1, arg2, arg3) \ + LSS_SC_LOADARGS_2(arg1, arg2); \ + __sc_6 = (unsigned long) (arg3) + #undef LSS_SC_LOADARGS_4 + #define LSS_SC_LOADARGS_4(arg1, arg2, arg3, arg4) \ + LSS_SC_LOADARGS_3(arg1, arg2, arg3); \ + __sc_7 = (unsigned long) (arg4) + #undef LSS_SC_LOADARGS_5 + #define LSS_SC_LOADARGS_5(arg1, arg2, arg3, arg4, arg5) \ + LSS_SC_LOADARGS_4(arg1, arg2, arg3, arg4); \ + __sc_8 = (unsigned long) (arg5) + #undef LSS_SC_BODY + #define LSS_SC_BODY(nr, type, opt, args...) \ + long __sc_ret, __sc_err; \ + { \ + register unsigned long __sc_0 __asm__ ("r0") = __NR_socketcall; \ + register unsigned long __sc_3 __asm__ ("r3") = opt; \ + register unsigned long __sc_4 __asm__ ("r4"); \ + register unsigned long __sc_5 __asm__ ("r5"); \ + register unsigned long __sc_6 __asm__ ("r6"); \ + register unsigned long __sc_7 __asm__ ("r7"); \ + register unsigned long __sc_8 __asm__ ("r8"); \ + LSS_SC_LOADARGS_##nr(args); \ + __asm__ __volatile__ \ + ("stwu 1, -48(1)\n\t" \ + "stw 4, 20(1)\n\t" \ + "stw 5, 24(1)\n\t" \ + "stw 6, 28(1)\n\t" \ + "stw 7, 32(1)\n\t" \ + "stw 8, 36(1)\n\t" \ + "addi 4, 1, 20\n\t" \ + "sc\n\t" \ + "mfcr %0" \ + : "=&r" (__sc_0), \ + "=&r" (__sc_3), "=&r" (__sc_4), \ + "=&r" (__sc_5), "=&r" (__sc_6), \ + "=&r" (__sc_7), "=&r" (__sc_8) \ + : LSS_ASMINPUT_##nr \ + : "cr0", "ctr", "memory"); \ + __sc_ret = __sc_3; \ + __sc_err = __sc_0; \ + } \ + LSS_RETURN(type, __sc_ret, __sc_err) + + LSS_INLINE ssize_t LSS_NAME(recvmsg)(int s,struct kernel_msghdr *msg, + int flags){ + LSS_SC_BODY(3, ssize_t, 17, s, msg, flags); + } + + LSS_INLINE ssize_t LSS_NAME(sendmsg)(int s, + const struct kernel_msghdr *msg, + int flags) { + LSS_SC_BODY(3, ssize_t, 16, s, msg, flags); + } + + // TODO(csilvers): why is this ifdef'ed out? +#if 0 + LSS_INLINE ssize_t LSS_NAME(sendto)(int s, const void *buf, size_t len, + int flags, + const struct kernel_sockaddr *to, + unsigned int tolen) { + LSS_BODY(6, ssize_t, 11, s, buf, len, flags, to, tolen); + } +#endif + + LSS_INLINE int LSS_NAME(shutdown)(int s, int how) { + LSS_SC_BODY(2, int, 13, s, how); + } + + LSS_INLINE int LSS_NAME(socket)(int domain, int type, int protocol) { + LSS_SC_BODY(3, int, 1, domain, type, protocol); + } + + LSS_INLINE int LSS_NAME(socketpair)(int d, int type, int protocol, + int sv[2]) { + LSS_SC_BODY(4, int, 8, d, type, protocol, sv); + } + #endif + #if defined(__ARM_EABI__) || defined (__aarch64__) + LSS_INLINE _syscall3(ssize_t, recvmsg, int, s, struct kernel_msghdr*, msg, + int, flags) + LSS_INLINE _syscall3(ssize_t, sendmsg, int, s, const struct kernel_msghdr*, + msg, int, flags) + LSS_INLINE _syscall6(ssize_t, sendto, int, s, const void*, buf, size_t,len, + int, flags, const struct kernel_sockaddr*, to, + unsigned int, tolen) + LSS_INLINE _syscall2(int, shutdown, int, s, int, how) + LSS_INLINE _syscall3(int, socket, int, domain, int, type, int, protocol) + LSS_INLINE _syscall4(int, socketpair, int, d, int, type, int, protocol, + int*, sv) + #endif + #if defined(__i386__) || defined(__ARM_ARCH_3__) || \ + (defined(__mips__) && _MIPS_SIM == _MIPS_SIM_ABI32) + #define __NR__socketcall __NR_socketcall + LSS_INLINE _syscall2(int, _socketcall, int, c, + va_list, a) + LSS_INLINE int LSS_NAME(socketcall)(int op, ...) { + int rc; + va_list ap; + va_start(ap, op); + rc = LSS_NAME(_socketcall)(op, ap); + va_end(ap); + return rc; + } + + LSS_INLINE ssize_t LSS_NAME(recvmsg)(int s,struct kernel_msghdr *msg, + int flags){ + return (ssize_t)LSS_NAME(socketcall)(17, s, msg, flags); + } + + LSS_INLINE ssize_t LSS_NAME(sendmsg)(int s, + const struct kernel_msghdr *msg, + int flags) { + return (ssize_t)LSS_NAME(socketcall)(16, s, msg, flags); + } + + LSS_INLINE ssize_t LSS_NAME(sendto)(int s, const void *buf, size_t len, + int flags, + const struct kernel_sockaddr *to, + unsigned int tolen) { + return (ssize_t)LSS_NAME(socketcall)(11, s, buf, len, flags, to, tolen); + } + + LSS_INLINE int LSS_NAME(shutdown)(int s, int how) { + return LSS_NAME(socketcall)(13, s, how); + } + + LSS_INLINE int LSS_NAME(socket)(int domain, int type, int protocol) { + return LSS_NAME(socketcall)(1, domain, type, protocol); + } + + LSS_INLINE int LSS_NAME(socketpair)(int d, int type, int protocol, + int sv[2]) { + return LSS_NAME(socketcall)(8, d, type, protocol, sv); + } + #endif + #if defined(__i386__) || defined(__PPC__) + LSS_INLINE _syscall4(int, fstatat64, int, d, + const char *, p, + struct kernel_stat64 *, b, int, f) + #endif + #if defined(__i386__) || defined(__PPC__) || \ + (defined(__mips__) && _MIPS_SIM == _MIPS_SIM_ABI32) + LSS_INLINE _syscall3(pid_t, waitpid, pid_t, p, + int*, s, int, o) + #endif + #if defined(__mips__) + /* sys_pipe() on MIPS has non-standard calling conventions, as it returns + * both file handles through CPU registers. + */ + LSS_INLINE int LSS_NAME(pipe)(int *p) { + register unsigned long __v0 __asm__("$2") = __NR_pipe; + register unsigned long __v1 __asm__("$3"); + register unsigned long __r7 __asm__("$7"); + __asm__ __volatile__ ("syscall\n" + : "+r"(__v0), "=r"(__v1), "=r" (__r7) + : "0"(__v0) + : "$8", "$9", "$10", "$11", "$12", + "$13", "$14", "$15", "$24", "$25", "memory"); + if (__r7) { + unsigned long __errnovalue = __v0; + LSS_ERRNO = __errnovalue; + return -1; + } else { + p[0] = __v0; + p[1] = __v1; + return 0; + } + } + #elif !defined(__aarch64__) + // The unlink syscall has been deprecated on aarch64. We polyfill it below. + LSS_INLINE _syscall1(int, pipe, int *, p) + #endif + /* TODO(csilvers): see if ppc can/should support this as well */ + #if defined(__i386__) || defined(__ARM_ARCH_3__) || \ + defined(__ARM_EABI__) || \ + (defined(__mips__) && _MIPS_SIM != _MIPS_SIM_ABI64) + #define __NR__statfs64 __NR_statfs64 + #define __NR__fstatfs64 __NR_fstatfs64 + LSS_INLINE _syscall3(int, _statfs64, const char*, p, + size_t, s,struct kernel_statfs64*, b) + LSS_INLINE _syscall3(int, _fstatfs64, int, f, + size_t, s,struct kernel_statfs64*, b) + LSS_INLINE int LSS_NAME(statfs64)(const char *p, + struct kernel_statfs64 *b) { + return LSS_NAME(_statfs64)(p, sizeof(*b), b); + } + LSS_INLINE int LSS_NAME(fstatfs64)(int f,struct kernel_statfs64 *b) { + return LSS_NAME(_fstatfs64)(f, sizeof(*b), b); + } + #endif + + LSS_INLINE int LSS_NAME(execv)(const char *path, const char *const argv[]) { + extern char **environ; + return LSS_NAME(execve)(path, argv, (const char *const *)environ); + } + + LSS_INLINE pid_t LSS_NAME(gettid)(void) { + pid_t tid = LSS_NAME(_gettid)(); + if (tid != -1) { + return tid; + } + return LSS_NAME(getpid)(); + } + + LSS_INLINE void *LSS_NAME(mremap)(void *old_address, size_t old_size, + size_t new_size, int flags, ...) { + va_list ap; + void *new_address, *rc; + va_start(ap, flags); + new_address = va_arg(ap, void *); + rc = LSS_NAME(_mremap)(old_address, old_size, new_size, + flags, new_address); + va_end(ap); + return rc; + } + + LSS_INLINE int LSS_NAME(ptrace_detach)(pid_t pid) { + /* PTRACE_DETACH can sometimes forget to wake up the tracee and it + * then sends job control signals to the real parent, rather than to + * the tracer. We reduce the risk of this happening by starting a + * whole new time slice, and then quickly sending a SIGCONT signal + * right after detaching from the tracee. + * + * We use tkill to ensure that we only issue a wakeup for the thread being + * detached. Large multi threaded apps can take a long time in the kernel + * processing SIGCONT. + */ + int rc, err; + LSS_NAME(sched_yield)(); + rc = LSS_NAME(ptrace)(PTRACE_DETACH, pid, (void *)0, (void *)0); + err = LSS_ERRNO; + LSS_NAME(tkill)(pid, SIGCONT); + /* Old systems don't have tkill */ + if (LSS_ERRNO == ENOSYS) + LSS_NAME(kill)(pid, SIGCONT); + LSS_ERRNO = err; + return rc; + } + + LSS_INLINE int LSS_NAME(raise)(int sig) { + return LSS_NAME(kill)(LSS_NAME(getpid)(), sig); + } + + LSS_INLINE int LSS_NAME(setpgrp)(void) { + return LSS_NAME(setpgid)(0, 0); + } + + LSS_INLINE int LSS_NAME(sysconf)(int name) { + extern int __getpagesize(void); + switch (name) { + case _SC_OPEN_MAX: { + struct kernel_rlimit limit; +#if defined(__ARM_EABI__) + return LSS_NAME(ugetrlimit)(RLIMIT_NOFILE, &limit) < 0 + ? 8192 : limit.rlim_cur; +#else + return LSS_NAME(getrlimit)(RLIMIT_NOFILE, &limit) < 0 + ? 8192 : limit.rlim_cur; +#endif + } + case _SC_PAGESIZE: + return __getpagesize(); + default: + LSS_ERRNO = ENOSYS; + return -1; + } + } + #if defined(__x86_64__) + /* Need to make sure loff_t isn't truncated to 32-bits under x32. */ + LSS_INLINE ssize_t LSS_NAME(pread64)(int f, void *b, size_t c, loff_t o) { + LSS_BODY(4, ssize_t, pread64, LSS_SYSCALL_ARG(f), LSS_SYSCALL_ARG(b), + LSS_SYSCALL_ARG(c), (uint64_t)(o)); + } + + LSS_INLINE ssize_t LSS_NAME(pwrite64)(int f, const void *b, size_t c, + loff_t o) { + LSS_BODY(4, ssize_t, pwrite64, LSS_SYSCALL_ARG(f), LSS_SYSCALL_ARG(b), + LSS_SYSCALL_ARG(c), (uint64_t)(o)); + } + + LSS_INLINE int LSS_NAME(readahead)(int f, loff_t o, unsigned c) { + LSS_BODY(3, int, readahead, LSS_SYSCALL_ARG(f), (uint64_t)(o), + LSS_SYSCALL_ARG(c)); + } + #elif defined(__mips__) && _MIPS_SIM == _MIPS_SIM_ABI64 + LSS_INLINE _syscall4(ssize_t, pread64, int, f, + void *, b, size_t, c, + loff_t, o) + LSS_INLINE _syscall4(ssize_t, pwrite64, int, f, + const void *, b, size_t, c, + loff_t, o) + LSS_INLINE _syscall3(int, readahead, int, f, + loff_t, o, unsigned, c) + #else + #define __NR__pread64 __NR_pread64 + #define __NR__pwrite64 __NR_pwrite64 + #define __NR__readahead __NR_readahead + #if defined(__ARM_EABI__) || defined(__mips__) + /* On ARM and MIPS, a 64-bit parameter has to be in an even-odd register + * pair. Hence these calls ignore their fourth argument (r3) so that their + * fifth and sixth make such a pair (r4,r5). + */ + #define LSS_LLARG_PAD 0, + LSS_INLINE _syscall6(ssize_t, _pread64, int, f, + void *, b, size_t, c, + unsigned, skip, unsigned, o1, unsigned, o2) + LSS_INLINE _syscall6(ssize_t, _pwrite64, int, f, + const void *, b, size_t, c, + unsigned, skip, unsigned, o1, unsigned, o2) + LSS_INLINE _syscall5(int, _readahead, int, f, + unsigned, skip, + unsigned, o1, unsigned, o2, size_t, c) + #else + #define LSS_LLARG_PAD + LSS_INLINE _syscall5(ssize_t, _pread64, int, f, + void *, b, size_t, c, unsigned, o1, + unsigned, o2) + LSS_INLINE _syscall5(ssize_t, _pwrite64, int, f, + const void *, b, size_t, c, unsigned, o1, + long, o2) + LSS_INLINE _syscall4(int, _readahead, int, f, + unsigned, o1, unsigned, o2, size_t, c) + #endif + /* We force 64bit-wide parameters onto the stack, then access each + * 32-bit component individually. This guarantees that we build the + * correct parameters independent of the native byte-order of the + * underlying architecture. + */ + LSS_INLINE ssize_t LSS_NAME(pread64)(int fd, void *buf, size_t count, + loff_t off) { + union { loff_t off; unsigned arg[2]; } o = { off }; + return LSS_NAME(_pread64)(fd, buf, count, + LSS_LLARG_PAD o.arg[0], o.arg[1]); + } + LSS_INLINE ssize_t LSS_NAME(pwrite64)(int fd, const void *buf, + size_t count, loff_t off) { + union { loff_t off; unsigned arg[2]; } o = { off }; + return LSS_NAME(_pwrite64)(fd, buf, count, + LSS_LLARG_PAD o.arg[0], o.arg[1]); + } + LSS_INLINE int LSS_NAME(readahead)(int fd, loff_t off, int len) { + union { loff_t off; unsigned arg[2]; } o = { off }; + return LSS_NAME(_readahead)(fd, LSS_LLARG_PAD o.arg[0], o.arg[1], len); + } + #endif +#endif + +#if defined(__aarch64__) + LSS_INLINE _syscall3(int, dup3, int, s, int, d, int, f) + LSS_INLINE _syscall6(void *, mmap, void *, addr, size_t, length, int, prot, + int, flags, int, fd, int64_t, offset) + LSS_INLINE _syscall4(int, newfstatat, int, dirfd, const char *, pathname, + struct kernel_stat *, buf, int, flags) + LSS_INLINE _syscall2(int, pipe2, int *, pipefd, int, flags) + LSS_INLINE _syscall5(int, ppoll, struct kernel_pollfd *, u, + unsigned int, n, const struct kernel_timespec *, t, + const kernel_sigset_t *, sigmask, size_t, s) + LSS_INLINE _syscall4(int, readlinkat, int, d, const char *, p, char *, b, + size_t, s) +#endif + +/* + * Polyfills for deprecated syscalls. + */ + +#if defined(__aarch64__) + LSS_INLINE int LSS_NAME(dup2)(int s, int d) { + return LSS_NAME(dup3)(s, d, 0); + } + + LSS_INLINE int LSS_NAME(open)(const char *pathname, int flags, int mode) { + return LSS_NAME(openat)(AT_FDCWD, pathname, flags, mode); + } + + LSS_INLINE int LSS_NAME(unlink)(const char *pathname) { + return LSS_NAME(unlinkat)(AT_FDCWD, pathname, 0); + } + + LSS_INLINE int LSS_NAME(readlink)(const char *pathname, char *buffer, + size_t size) { + return LSS_NAME(readlinkat)(AT_FDCWD, pathname, buffer, size); + } + + LSS_INLINE pid_t LSS_NAME(pipe)(int *pipefd) { + return LSS_NAME(pipe2)(pipefd, 0); + } + + LSS_INLINE int LSS_NAME(poll)(struct kernel_pollfd *fds, unsigned int nfds, + int timeout) { + struct kernel_timespec timeout_ts; + struct kernel_timespec *timeout_ts_p = NULL; + + if (timeout >= 0) { + timeout_ts.tv_sec = timeout / 1000; + timeout_ts.tv_nsec = (timeout % 1000) * 1000000; + timeout_ts_p = &timeout_ts; + } + return LSS_NAME(ppoll)(fds, nfds, timeout_ts_p, NULL, 0); + } + + LSS_INLINE int LSS_NAME(stat)(const char *pathname, + struct kernel_stat *buf) { + return LSS_NAME(newfstatat)(AT_FDCWD, pathname, buf, 0); + } + + LSS_INLINE pid_t LSS_NAME(fork)(void) { + // No fork syscall on aarch64 - implement by means of the clone syscall. + // Note that this does not reset glibc's cached view of the PID/TID, so + // some glibc interfaces might go wrong in the forked subprocess. + int flags = SIGCHLD; + void *child_stack = NULL; + void *parent_tidptr = NULL; + void *newtls = NULL; + void *child_tidptr = NULL; + + LSS_REG(0, flags); + LSS_REG(1, child_stack); + LSS_REG(2, parent_tidptr); + LSS_REG(3, newtls); + LSS_REG(4, child_tidptr); + LSS_BODY(pid_t, clone, "r"(__r0), "r"(__r1), "r"(__r2), "r"(__r3), + "r"(__r4)); + } +#endif + +#ifdef __ANDROID__ + /* These restore the original values of these macros saved by the + * corresponding #pragma push_macro near the top of this file. */ +# pragma pop_macro("stat64") +# pragma pop_macro("fstat64") +# pragma pop_macro("lstat64") +#endif + +#if defined(__cplusplus) && !defined(SYS_CPLUSPLUS) +} +#endif + +#endif +#endif diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/Makefile.am b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/Makefile.am new file mode 100644 index 0000000000..327339a951 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/Makefile.am @@ -0,0 +1,363 @@ +## Process this file with automake to produce Makefile.in + +if HAVE_ZLIB +GZCHECKPROGRAMS = zcgzip zcgunzip +GZHEADERS = google/protobuf/io/gzip_stream.h +GZTESTS = google/protobuf/io/gzip_stream_unittest.sh +else +GZCHECKPROGRAMS = +GZHEADERS = +GZTESTS = +endif + +if GCC +# These are good warnings to turn on by default +NO_OPT_CXXFLAGS = $(PTHREAD_CFLAGS) -Wall -Wwrite-strings -Woverloaded-virtual -Wno-sign-compare +else +NO_OPT_CXXFLAGS = $(PTHREAD_CFLAGS) +endif + +AM_CXXFLAGS = $(NO_OPT_CXXFLAGS) $(PROTOBUF_OPT_FLAG) + +AM_LDFLAGS = $(PTHREAD_CFLAGS) + +# If I say "dist_include_DATA", automake complains that $(includedir) is not +# a "legitimate" directory for DATA. Screw you, automake. +protodir = $(includedir) +nobase_dist_proto_DATA = google/protobuf/descriptor.proto \ + google/protobuf/compiler/plugin.proto + +# Not sure why these don't get cleaned automatically. +clean-local: + rm -f *.loT + +CLEANFILES = $(protoc_outputs) unittest_proto_middleman \ + testzip.jar testzip.list testzip.proto testzip.zip + +MAINTAINERCLEANFILES = \ + Makefile.in + +nobase_include_HEADERS = \ + google/protobuf/stubs/common.h \ + google/protobuf/stubs/once.h \ + google/protobuf/descriptor.h \ + google/protobuf/descriptor.pb.h \ + google/protobuf/descriptor_database.h \ + google/protobuf/dynamic_message.h \ + google/protobuf/extension_set.h \ + google/protobuf/generated_message_util.h \ + google/protobuf/generated_message_reflection.h \ + google/protobuf/message.h \ + google/protobuf/message_lite.h \ + google/protobuf/reflection_ops.h \ + google/protobuf/repeated_field.h \ + google/protobuf/service.h \ + google/protobuf/text_format.h \ + google/protobuf/unknown_field_set.h \ + google/protobuf/wire_format.h \ + google/protobuf/wire_format_lite.h \ + google/protobuf/wire_format_lite_inl.h \ + google/protobuf/io/coded_stream.h \ + $(GZHEADERS) \ + google/protobuf/io/printer.h \ + google/protobuf/io/tokenizer.h \ + google/protobuf/io/zero_copy_stream.h \ + google/protobuf/io/zero_copy_stream_impl.h \ + google/protobuf/io/zero_copy_stream_impl_lite.h \ + google/protobuf/compiler/code_generator.h \ + google/protobuf/compiler/command_line_interface.h \ + google/protobuf/compiler/importer.h \ + google/protobuf/compiler/parser.h \ + google/protobuf/compiler/plugin.h \ + google/protobuf/compiler/plugin.pb.h \ + google/protobuf/compiler/cpp/cpp_generator.h \ + google/protobuf/compiler/java/java_generator.h \ + google/protobuf/compiler/python/python_generator.h + +lib_LTLIBRARIES = libprotobuf-lite.la libprotobuf.la libprotoc.la + +libprotobuf_lite_la_LIBADD = $(PTHREAD_LIBS) +libprotobuf_lite_la_LDFLAGS = -version-info 7:0:0 -export-dynamic -no-undefined +libprotobuf_lite_la_SOURCES = \ + google/protobuf/stubs/common.cc \ + google/protobuf/stubs/once.cc \ + google/protobuf/stubs/hash.h \ + google/protobuf/stubs/map-util.h \ + google/protobuf/stubs/stl_util-inl.h \ + google/protobuf/extension_set.cc \ + google/protobuf/generated_message_util.cc \ + google/protobuf/message_lite.cc \ + google/protobuf/repeated_field.cc \ + google/protobuf/wire_format_lite.cc \ + google/protobuf/io/coded_stream.cc \ + google/protobuf/io/coded_stream_inl.h \ + google/protobuf/io/zero_copy_stream.cc \ + google/protobuf/io/zero_copy_stream_impl_lite.cc + +libprotobuf_la_LIBADD = $(PTHREAD_LIBS) +libprotobuf_la_LDFLAGS = -version-info 7:0:0 -export-dynamic -no-undefined +libprotobuf_la_SOURCES = \ + $(libprotobuf_lite_la_SOURCES) \ + google/protobuf/stubs/strutil.cc \ + google/protobuf/stubs/strutil.h \ + google/protobuf/stubs/substitute.cc \ + google/protobuf/stubs/substitute.h \ + google/protobuf/stubs/structurally_valid.cc \ + google/protobuf/descriptor.cc \ + google/protobuf/descriptor.pb.cc \ + google/protobuf/descriptor_database.cc \ + google/protobuf/dynamic_message.cc \ + google/protobuf/extension_set_heavy.cc \ + google/protobuf/generated_message_reflection.cc \ + google/protobuf/message.cc \ + google/protobuf/reflection_ops.cc \ + google/protobuf/service.cc \ + google/protobuf/text_format.cc \ + google/protobuf/unknown_field_set.cc \ + google/protobuf/wire_format.cc \ + google/protobuf/io/gzip_stream.cc \ + google/protobuf/io/printer.cc \ + google/protobuf/io/tokenizer.cc \ + google/protobuf/io/zero_copy_stream_impl.cc \ + google/protobuf/compiler/importer.cc \ + google/protobuf/compiler/parser.cc + +libprotoc_la_LIBADD = $(PTHREAD_LIBS) libprotobuf.la +libprotoc_la_LDFLAGS = -version-info 7:0:0 -export-dynamic -no-undefined +libprotoc_la_SOURCES = \ + google/protobuf/compiler/code_generator.cc \ + google/protobuf/compiler/command_line_interface.cc \ + google/protobuf/compiler/plugin.cc \ + google/protobuf/compiler/plugin.pb.cc \ + google/protobuf/compiler/subprocess.cc \ + google/protobuf/compiler/subprocess.h \ + google/protobuf/compiler/zip_writer.cc \ + google/protobuf/compiler/zip_writer.h \ + google/protobuf/compiler/cpp/cpp_enum.cc \ + google/protobuf/compiler/cpp/cpp_enum.h \ + google/protobuf/compiler/cpp/cpp_enum_field.cc \ + google/protobuf/compiler/cpp/cpp_enum_field.h \ + google/protobuf/compiler/cpp/cpp_extension.cc \ + google/protobuf/compiler/cpp/cpp_extension.h \ + google/protobuf/compiler/cpp/cpp_field.cc \ + google/protobuf/compiler/cpp/cpp_field.h \ + google/protobuf/compiler/cpp/cpp_file.cc \ + google/protobuf/compiler/cpp/cpp_file.h \ + google/protobuf/compiler/cpp/cpp_generator.cc \ + google/protobuf/compiler/cpp/cpp_helpers.cc \ + google/protobuf/compiler/cpp/cpp_helpers.h \ + google/protobuf/compiler/cpp/cpp_message.cc \ + google/protobuf/compiler/cpp/cpp_message.h \ + google/protobuf/compiler/cpp/cpp_message_field.cc \ + google/protobuf/compiler/cpp/cpp_message_field.h \ + google/protobuf/compiler/cpp/cpp_primitive_field.cc \ + google/protobuf/compiler/cpp/cpp_primitive_field.h \ + google/protobuf/compiler/cpp/cpp_service.cc \ + google/protobuf/compiler/cpp/cpp_service.h \ + google/protobuf/compiler/cpp/cpp_string_field.cc \ + google/protobuf/compiler/cpp/cpp_string_field.h \ + google/protobuf/compiler/java/java_enum.cc \ + google/protobuf/compiler/java/java_enum.h \ + google/protobuf/compiler/java/java_enum_field.cc \ + google/protobuf/compiler/java/java_enum_field.h \ + google/protobuf/compiler/java/java_extension.cc \ + google/protobuf/compiler/java/java_extension.h \ + google/protobuf/compiler/java/java_field.cc \ + google/protobuf/compiler/java/java_field.h \ + google/protobuf/compiler/java/java_file.cc \ + google/protobuf/compiler/java/java_file.h \ + google/protobuf/compiler/java/java_generator.cc \ + google/protobuf/compiler/java/java_helpers.cc \ + google/protobuf/compiler/java/java_helpers.h \ + google/protobuf/compiler/java/java_message.cc \ + google/protobuf/compiler/java/java_message.h \ + google/protobuf/compiler/java/java_message_field.cc \ + google/protobuf/compiler/java/java_message_field.h \ + google/protobuf/compiler/java/java_primitive_field.cc \ + google/protobuf/compiler/java/java_primitive_field.h \ + google/protobuf/compiler/java/java_service.cc \ + google/protobuf/compiler/java/java_service.h \ + google/protobuf/compiler/java/java_string_field.cc \ + google/protobuf/compiler/java/java_string_field.h \ + google/protobuf/compiler/python/python_generator.cc + +bin_PROGRAMS = protoc +protoc_LDADD = $(PTHREAD_LIBS) libprotobuf.la libprotoc.la +protoc_SOURCES = google/protobuf/compiler/main.cc + +# Tests ============================================================== + +protoc_inputs = \ + google/protobuf/unittest.proto \ + google/protobuf/unittest_empty.proto \ + google/protobuf/unittest_import.proto \ + google/protobuf/unittest_mset.proto \ + google/protobuf/unittest_optimize_for.proto \ + google/protobuf/unittest_embed_optimize_for.proto \ + google/protobuf/unittest_custom_options.proto \ + google/protobuf/unittest_lite.proto \ + google/protobuf/unittest_import_lite.proto \ + google/protobuf/unittest_lite_imports_nonlite.proto \ + google/protobuf/unittest_no_generic_services.proto \ + google/protobuf/compiler/cpp/cpp_test_bad_identifiers.proto + +EXTRA_DIST = \ + $(protoc_inputs) \ + solaris/libstdc++.la \ + google/protobuf/io/gzip_stream.h \ + google/protobuf/io/gzip_stream_unittest.sh \ + google/protobuf/testdata/golden_message \ + google/protobuf/testdata/golden_packed_fields_message \ + google/protobuf/testdata/text_format_unittest_data.txt \ + google/protobuf/testdata/text_format_unittest_extensions_data.txt \ + google/protobuf/package_info.h \ + google/protobuf/io/package_info.h \ + google/protobuf/compiler/package_info.h \ + google/protobuf/compiler/zip_output_unittest.sh \ + google/protobuf/unittest_enormous_descriptor.proto + +protoc_lite_outputs = \ + google/protobuf/unittest_lite.pb.cc \ + google/protobuf/unittest_lite.pb.h \ + google/protobuf/unittest_import_lite.pb.cc \ + google/protobuf/unittest_import_lite.pb.h + +protoc_outputs = \ + $(protoc_lite_outputs) \ + google/protobuf/unittest.pb.cc \ + google/protobuf/unittest.pb.h \ + google/protobuf/unittest_empty.pb.cc \ + google/protobuf/unittest_empty.pb.h \ + google/protobuf/unittest_import.pb.cc \ + google/protobuf/unittest_import.pb.h \ + google/protobuf/unittest_mset.pb.cc \ + google/protobuf/unittest_mset.pb.h \ + google/protobuf/unittest_optimize_for.pb.cc \ + google/protobuf/unittest_optimize_for.pb.h \ + google/protobuf/unittest_embed_optimize_for.pb.cc \ + google/protobuf/unittest_embed_optimize_for.pb.h \ + google/protobuf/unittest_custom_options.pb.cc \ + google/protobuf/unittest_custom_options.pb.h \ + google/protobuf/unittest_lite_imports_nonlite.pb.cc \ + google/protobuf/unittest_lite_imports_nonlite.pb.h \ + google/protobuf/unittest_no_generic_services.pb.cc \ + google/protobuf/unittest_no_generic_services.pb.h \ + google/protobuf/compiler/cpp/cpp_test_bad_identifiers.pb.cc \ + google/protobuf/compiler/cpp/cpp_test_bad_identifiers.pb.h + +BUILT_SOURCES = $(protoc_outputs) + +if USE_EXTERNAL_PROTOC + +unittest_proto_middleman: $(protoc_inputs) + $(PROTOC) -I$(srcdir) --cpp_out=. $^ + touch unittest_proto_middleman + +else + +# We have to cd to $(srcdir) before executing protoc because $(protoc_inputs) is +# relative to srcdir, which may not be the same as the current directory when +# building out-of-tree. +unittest_proto_middleman: protoc$(EXEEXT) $(protoc_inputs) + oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/protoc$(EXEEXT) -I. --cpp_out=$$oldpwd $(protoc_inputs) ) + touch unittest_proto_middleman + +endif + +$(protoc_outputs): unittest_proto_middleman + +COMMON_TEST_SOURCES = \ + google/protobuf/test_util.cc \ + google/protobuf/test_util.h \ + google/protobuf/testing/googletest.cc \ + google/protobuf/testing/googletest.h \ + google/protobuf/testing/file.cc \ + google/protobuf/testing/file.h + +check_PROGRAMS = protoc protobuf-test protobuf-lazy-descriptor-test \ + protobuf-lite-test test_plugin $(GZCHECKPROGRAMS) +protobuf_test_LDADD = $(PTHREAD_LIBS) libprotobuf.la libprotoc.la \ + $(top_builddir)/gtest/lib/libgtest.la \ + $(top_builddir)/gtest/lib/libgtest_main.la +protobuf_test_CPPFLAGS = -I$(top_srcdir)/gtest/include \ + -I$(top_builddir)/gtest/include +# Disable optimization for tests unless the user explicitly asked for it, +# since test_util.cc takes forever to compile with optimization (with GCC). +# See configure.ac for more info. +protobuf_test_CXXFLAGS = $(NO_OPT_CXXFLAGS) +protobuf_test_SOURCES = \ + google/protobuf/stubs/common_unittest.cc \ + google/protobuf/stubs/once_unittest.cc \ + google/protobuf/stubs/strutil_unittest.cc \ + google/protobuf/stubs/structurally_valid_unittest.cc \ + google/protobuf/descriptor_database_unittest.cc \ + google/protobuf/descriptor_unittest.cc \ + google/protobuf/dynamic_message_unittest.cc \ + google/protobuf/extension_set_unittest.cc \ + google/protobuf/generated_message_reflection_unittest.cc \ + google/protobuf/message_unittest.cc \ + google/protobuf/reflection_ops_unittest.cc \ + google/protobuf/repeated_field_unittest.cc \ + google/protobuf/text_format_unittest.cc \ + google/protobuf/unknown_field_set_unittest.cc \ + google/protobuf/wire_format_unittest.cc \ + google/protobuf/io/coded_stream_unittest.cc \ + google/protobuf/io/printer_unittest.cc \ + google/protobuf/io/tokenizer_unittest.cc \ + google/protobuf/io/zero_copy_stream_unittest.cc \ + google/protobuf/compiler/command_line_interface_unittest.cc \ + google/protobuf/compiler/importer_unittest.cc \ + google/protobuf/compiler/mock_code_generator.cc \ + google/protobuf/compiler/mock_code_generator.h \ + google/protobuf/compiler/parser_unittest.cc \ + google/protobuf/compiler/cpp/cpp_bootstrap_unittest.cc \ + google/protobuf/compiler/cpp/cpp_unittest.cc \ + google/protobuf/compiler/cpp/cpp_plugin_unittest.cc \ + google/protobuf/compiler/java/java_plugin_unittest.cc \ + google/protobuf/compiler/python/python_plugin_unittest.cc \ + $(COMMON_TEST_SOURCES) +nodist_protobuf_test_SOURCES = $(protoc_outputs) + +# Run cpp_unittest again with PROTOBUF_TEST_NO_DESCRIPTORS defined. +protobuf_lazy_descriptor_test_LDADD = $(PTHREAD_LIBS) libprotobuf.la \ + $(top_builddir)/gtest/lib/libgtest.la \ + $(top_builddir)/gtest/lib/libgtest_main.la +protobuf_lazy_descriptor_test_CPPFLAGS = -I$(top_srcdir)/gtest/include \ + -I$(top_builddir)/gtest/include \ + -DPROTOBUF_TEST_NO_DESCRIPTORS +protobuf_lazy_descriptor_test_CXXFLAGS = $(NO_OPT_CXXFLAGS) +protobuf_lazy_descriptor_test_SOURCES = \ + google/protobuf/compiler/cpp/cpp_unittest.cc \ + $(COMMON_TEST_SOURCES) +nodist_protobuf_lazy_descriptor_test_SOURCES = $(protoc_outputs) + +# Build lite_unittest separately, since it doesn't use gtest. +protobuf_lite_test_LDADD = $(PTHREAD_LIBS) libprotobuf-lite.la +protobuf_lite_test_CXXFLAGS = $(NO_OPT_CXXFLAGS) +protobuf_lite_test_SOURCES = \ + google/protobuf/lite_unittest.cc \ + google/protobuf/test_util_lite.cc \ + google/protobuf/test_util_lite.h +nodist_protobuf_lite_test_SOURCES = $(protoc_lite_outputs) + +# Test plugin binary. +test_plugin_LDADD = $(PTHREAD_LIBS) libprotobuf.la libprotoc.la \ + $(top_builddir)/gtest/lib/libgtest.la +test_plugin_CPPFLAGS = -I$(top_srcdir)/gtest/include \ + -I$(top_builddir)/gtest/include +test_plugin_SOURCES = \ + google/protobuf/compiler/mock_code_generator.cc \ + google/protobuf/testing/file.cc \ + google/protobuf/testing/file.h \ + google/protobuf/compiler/test_plugin.cc + +if HAVE_ZLIB +zcgzip_LDADD = $(PTHREAD_LIBS) libprotobuf.la +zcgzip_SOURCES = google/protobuf/testing/zcgzip.cc + +zcgunzip_LDADD = $(PTHREAD_LIBS) libprotobuf.la +zcgunzip_SOURCES = google/protobuf/testing/zcgunzip.cc +endif + +TESTS = protobuf-test protobuf-lazy-descriptor-test protobuf-lite-test \ + google/protobuf/compiler/zip_output_unittest.sh $(GZTESTS) diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/SEBS b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/SEBS new file mode 100644 index 0000000000..ba33c7327e --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/SEBS @@ -0,0 +1,240 @@ +# **EXPERIMENTAL** +# +# See http://sebs.googlecode.com +# +# This is an experimental build definition file using the SEBS build system. +# I (Kenton Varda, maintainer of Protocol Buffers) happen to be the author of +# SEBS, though SEBS is not a Google project. I'm sticking this file in +# protobuf's SVN because that's the easiest place for me to put it, and it +# shouldn't harm anyone. This file is not included in the distribution. +# +# Currently, to use this file, you must generate config.h and put it at the +# top level of the source tree. + +_cpp = sebs.import_("//sebs/cpp.sebs") + +# ==================================================================== +# Public targets + +protobuf_lite = _cpp.Library( + name = "protobuf-lite", + srcs = [ "stubs/common.cc", + "stubs/once.cc", + "stubs/hash.cc", + "stubs/hash.h", + "stubs/map-util.h", + "stubs/stl_util-inl.h", + "extension_set.cc", + "generated_message_util.cc", + "message_lite.cc", + "repeated_field.cc", + "wire_format_lite.cc", + "io/coded_stream.cc", + "io/zero_copy_stream.cc", + "io/zero_copy_stream_impl_lite.cc" ], + deps = [ _cpp.SystemLibrary(name = "pthread") ]) + +protobuf = _cpp.Library( + name = "protobuf", + srcs = [ "stubs/strutil.cc", + "stubs/strutil.h", + "stubs/substitute.cc", + "stubs/substitute.h", + "stubs/structurally_valid.cc", + "descriptor.cc", + "descriptor.pb.cc", + "descriptor_database.cc", + "dynamic_message.cc", + "extension_set_heavy.cc", + "generated_message_reflection.cc", + "message.cc", + "reflection_ops.cc", + "service.cc", + "text_format.cc", + "unknown_field_set.cc", + "wire_format.cc", + "io/gzip_stream.cc", + "io/printer.cc", + "io/tokenizer.cc", + "io/zero_copy_stream_impl.cc", + "compiler/importer.cc", + "compiler/parser.cc" ], + deps = [ protobuf_lite, + _cpp.SystemLibrary(name = "z") ]) + +libprotoc = _cpp.Library( + name = "protoc", + srcs = [ "compiler/code_generator.cc", + "compiler/command_line_interface.cc", + "compiler/cpp/cpp_enum.cc", + "compiler/cpp/cpp_enum.h", + "compiler/cpp/cpp_enum_field.cc", + "compiler/cpp/cpp_enum_field.h", + "compiler/cpp/cpp_extension.cc", + "compiler/cpp/cpp_extension.h", + "compiler/cpp/cpp_field.cc", + "compiler/cpp/cpp_field.h", + "compiler/cpp/cpp_file.cc", + "compiler/cpp/cpp_file.h", + "compiler/cpp/cpp_generator.cc", + "compiler/cpp/cpp_helpers.cc", + "compiler/cpp/cpp_helpers.h", + "compiler/cpp/cpp_message.cc", + "compiler/cpp/cpp_message.h", + "compiler/cpp/cpp_message_field.cc", + "compiler/cpp/cpp_message_field.h", + "compiler/cpp/cpp_primitive_field.cc", + "compiler/cpp/cpp_primitive_field.h", + "compiler/cpp/cpp_service.cc", + "compiler/cpp/cpp_service.h", + "compiler/cpp/cpp_string_field.cc", + "compiler/cpp/cpp_string_field.h", + "compiler/java/java_enum.cc", + "compiler/java/java_enum.h", + "compiler/java/java_enum_field.cc", + "compiler/java/java_enum_field.h", + "compiler/java/java_extension.cc", + "compiler/java/java_extension.h", + "compiler/java/java_field.cc", + "compiler/java/java_field.h", + "compiler/java/java_file.cc", + "compiler/java/java_file.h", + "compiler/java/java_generator.cc", + "compiler/java/java_helpers.cc", + "compiler/java/java_helpers.h", + "compiler/java/java_message.cc", + "compiler/java/java_message.h", + "compiler/java/java_message_field.cc", + "compiler/java/java_message_field.h", + "compiler/java/java_primitive_field.cc", + "compiler/java/java_primitive_field.h", + "compiler/java/java_service.cc", + "compiler/java/java_service.h", + "compiler/python/python_generator.cc" ], + deps = [ protobuf ]) + +protoc = _cpp.Binary( + name = "protoc", + srcs = [ "compiler/main.cc" ], + deps = [ libprotoc ]) + +# ==================================================================== +# ProtobufLibrary rule class + +class ProtobufLibrary(sebs.Rule): + argument_spec = sebs.ArgumentSpec(srcs = [sebs.Artifact], + deps = ([sebs.Rule], []), + lite = (bool, False)) + + def _expand(self, args): + for dep in args.deps: + if not isinstance(dep, ProtobufLibrary): + raise sebs.DefinitionError( + "Dependency of ProtobufLibrary is not a ProtobufLibrary: %s" % dep) + + protoc.expand_once() + + # We must build protoc for the host configuration to allow cross-compiling. + host_protoc = self.context.configured_artifact(protoc.binary, "host") + + protoc_action = self.context.action(self, "protobuf") + protoc_args = [host_protoc, "-Isrc", "-Itmp", "-Iinclude","--cpp_out=tmp"] + + cpp_srcs = [] + for src in args.srcs: + protoc_args.append(src) + + # We cannot build .proto files from other packages because the .pb.cc + # and .pb.h files would be written to that package, and we aren't allowed + # to write to other packages. + if self.context.local_filename(src) is None: + raise sebs.DefinitionError( + "Source file is not in this package: %s" % src) + + cc_artifact = self.context.derived_artifact(src, ".pb.cc", protoc_action) + header_artifact = self.context.derived_artifact( + src, ".pb.h", protoc_action) + + cpp_srcs.append(cc_artifact) + cpp_srcs.append(header_artifact) + + protoc_action.set_command( + sebs.SubprocessCommand(protoc_action, protoc_args, implicit = cpp_srcs)) + + deps = list(args.deps) + if args.lite: + deps.append(protobuf_lite) + else: + deps.append(protobuf) + + self.__cpp_library = _cpp.Library(srcs = cpp_srcs, deps = deps, + context = self.context) + self.__cpp_library.label = self.label + self.outputs = [] + + def as_cpp_library(self): + self.expand_once() + return self.__cpp_library + +# ==================================================================== +# Tests + +_lite_test_protos = ProtobufLibrary( + srcs = [ "unittest_lite.proto", + "unittest_import_lite.proto" ], + lite = True) +_test_protos = ProtobufLibrary( + srcs = [ "unittest.proto", + "unittest_empty.proto", + "unittest_import.proto", + "unittest_mset.proto", + "unittest_optimize_for.proto", + "unittest_embed_optimize_for.proto", + "unittest_custom_options.proto", + "unittest_lite_imports_nonlite.proto", + "compiler/cpp/cpp_test_bad_identifiers.proto" ], + deps = [ _lite_test_protos ]) + +_test_util = _cpp.Library( + name = "test_util", + srcs = [ "test_util.cc", + "test_util.h", + "testing/googletest.cc", + "testing/googletest.h", + "testing/file.cc", + "testing/file.h" ], + deps = [ protobuf, _test_protos, _cpp.SystemLibrary(name = "gtest")] ) + +protobuf_lite_test = _cpp.Test( + srcs = [ "lite_unittest.cc", + "test_util_lite.cc", + "test_util_lite.h" ], + deps = [ _lite_test_protos ]) + +protobuf_test = _cpp.Test( + srcs = [ "stubs/common_unittest.cc", + "stubs/once_unittest.cc", + "stubs/strutil_unittest.cc", + "stubs/structurally_valid_unittest.cc", + "descriptor_database_unittest.cc", + "descriptor_unittest.cc", + "dynamic_message_unittest.cc", + "extension_set_unittest.cc", + "generated_message_reflection_unittest.cc", + "message_unittest.cc", + "reflection_ops_unittest.cc", + "repeated_field_unittest.cc", + "text_format_unittest.cc", + "unknown_field_set_unittest.cc", + "wire_format_unittest.cc", + "io/coded_stream_unittest.cc", + "io/printer_unittest.cc", + "io/tokenizer_unittest.cc", + "io/zero_copy_stream_unittest.cc", + "compiler/command_line_interface_unittest.cc", + "compiler/importer_unittest.cc", + "compiler/parser_unittest.cc", + "compiler/cpp/cpp_bootstrap_unittest.cc", + "compiler/cpp/cpp_unittest.cc" ], + deps = [ protobuf, libprotoc, _test_util, + _cpp.SystemLibrary(name = "gtest_main") ]) diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/code_generator.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/code_generator.cc new file mode 100644 index 0000000000..455c239a93 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/code_generator.cc @@ -0,0 +1,80 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include + +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { + +CodeGenerator::~CodeGenerator() {} +GeneratorContext::~GeneratorContext() {} + +io::ZeroCopyOutputStream* GeneratorContext::OpenForInsert( + const string& filename, const string& insertion_point) { + GOOGLE_LOG(FATAL) << "This GeneratorContext does not support insertion."; + return NULL; // make compiler happy +} + +void GeneratorContext::ListParsedFiles( + vector* output) { + GOOGLE_LOG(FATAL) << "This GeneratorContext does not support ListParsedFiles"; +} + +// Parses a set of comma-delimited name/value pairs. +void ParseGeneratorParameter(const string& text, + vector >* output) { + vector parts; + SplitStringUsing(text, ",", &parts); + + for (int i = 0; i < parts.size(); i++) { + string::size_type equals_pos = parts[i].find_first_of('='); + pair value; + if (equals_pos == string::npos) { + value.first = parts[i]; + value.second = ""; + } else { + value.first = parts[i].substr(0, equals_pos); + value.second = parts[i].substr(equals_pos + 1); + } + output->push_back(value); + } +} + +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/code_generator.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/code_generator.h new file mode 100644 index 0000000000..252f68d1dc --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/code_generator.h @@ -0,0 +1,142 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// Defines the abstract interface implemented by each of the language-specific +// code generators. + +#ifndef GOOGLE_PROTOBUF_COMPILER_CODE_GENERATOR_H__ +#define GOOGLE_PROTOBUF_COMPILER_CODE_GENERATOR_H__ + +#include +#include +#include +#include + +namespace google { +namespace protobuf { + +namespace io { class ZeroCopyOutputStream; } +class FileDescriptor; + +namespace compiler { + +// Defined in this file. +class CodeGenerator; +class GeneratorContext; + +// The abstract interface to a class which generates code implementing a +// particular proto file in a particular language. A number of these may +// be registered with CommandLineInterface to support various languages. +class LIBPROTOC_EXPORT CodeGenerator { + public: + inline CodeGenerator() {} + virtual ~CodeGenerator(); + + // Generates code for the given proto file, generating one or more files in + // the given output directory. + // + // A parameter to be passed to the generator can be specified on the + // command line. This is intended to be used by Java and similar languages + // to specify which specific class from the proto file is to be generated, + // though it could have other uses as well. It is empty if no parameter was + // given. + // + // Returns true if successful. Otherwise, sets *error to a description of + // the problem (e.g. "invalid parameter") and returns false. + virtual bool Generate(const FileDescriptor* file, + const string& parameter, + GeneratorContext* generator_context, + string* error) const = 0; + + private: + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(CodeGenerator); +}; + +// CodeGenerators generate one or more files in a given directory. This +// abstract interface represents the directory to which the CodeGenerator is +// to write and other information about the context in which the Generator +// runs. +class LIBPROTOC_EXPORT GeneratorContext { + public: + inline GeneratorContext() {} + virtual ~GeneratorContext(); + + // Opens the given file, truncating it if it exists, and returns a + // ZeroCopyOutputStream that writes to the file. The caller takes ownership + // of the returned object. This method never fails (a dummy stream will be + // returned instead). + // + // The filename given should be relative to the root of the source tree. + // E.g. the C++ generator, when generating code for "foo/bar.proto", will + // generate the files "foo/bar.pb.h" and "foo/bar.pb.cc"; note that + // "foo/" is included in these filenames. The filename is not allowed to + // contain "." or ".." components. + virtual io::ZeroCopyOutputStream* Open(const string& filename) = 0; + + // Creates a ZeroCopyOutputStream which will insert code into the given file + // at the given insertion point. See plugin.proto (plugin.pb.h) for more + // information on insertion points. The default implementation + // assert-fails -- it exists only for backwards-compatibility. + // + // WARNING: This feature is currently EXPERIMENTAL and is subject to change. + virtual io::ZeroCopyOutputStream* OpenForInsert( + const string& filename, const string& insertion_point); + + // Returns a vector of FileDescriptors for all the files being compiled + // in this run. Useful for languages, such as Go, that treat files + // differently when compiled as a set rather than individually. + virtual void ListParsedFiles(vector* output); + + private: + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(GeneratorContext); +}; + +// The type GeneratorContext was once called OutputDirectory. This typedef +// provides backward compatibility. +typedef GeneratorContext OutputDirectory; + +// Several code generators treat the parameter argument as holding a +// list of options separated by commas. This helper function parses +// a set of comma-delimited name/value pairs: e.g., +// "foo=bar,baz,qux=corge" +// parses to the pairs: +// ("foo", "bar"), ("baz", ""), ("qux", "corge") +extern void ParseGeneratorParameter(const string&, + vector >*); + +} // namespace compiler +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_COMPILER_CODE_GENERATOR_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/command_line_interface.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/command_line_interface.cc new file mode 100644 index 0000000000..1c76994bdc --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/command_line_interface.cc @@ -0,0 +1,1357 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include + +#include +#include +#include +#include +#ifdef _MSC_VER +#include +#include +#else +#include +#endif +#include +#include +#include + +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + + +namespace google { +namespace protobuf { +namespace compiler { + +#if defined(_WIN32) +#define mkdir(name, mode) mkdir(name) +#ifndef W_OK +#define W_OK 02 // not defined by MSVC for whatever reason +#endif +#ifndef F_OK +#define F_OK 00 // not defined by MSVC for whatever reason +#endif +#ifndef STDIN_FILENO +#define STDIN_FILENO 0 +#endif +#ifndef STDOUT_FILENO +#define STDOUT_FILENO 1 +#endif +#endif + +#ifndef O_BINARY +#ifdef _O_BINARY +#define O_BINARY _O_BINARY +#else +#define O_BINARY 0 // If this isn't defined, the platform doesn't need it. +#endif +#endif + +namespace { +#if defined(_WIN32) && !defined(__CYGWIN__) +static const char* kPathSeparator = ";"; +#else +static const char* kPathSeparator = ":"; +#endif + +// Returns true if the text looks like a Windows-style absolute path, starting +// with a drive letter. Example: "C:\foo". TODO(kenton): Share this with +// copy in importer.cc? +static bool IsWindowsAbsolutePath(const string& text) { +#if defined(_WIN32) || defined(__CYGWIN__) + return text.size() >= 3 && text[1] == ':' && + isalpha(text[0]) && + (text[2] == '/' || text[2] == '\\') && + text.find_last_of(':') == 1; +#else + return false; +#endif +} + +void SetFdToTextMode(int fd) { +#ifdef _WIN32 + if (_setmode(fd, _O_TEXT) == -1) { + // This should never happen, I think. + GOOGLE_LOG(WARNING) << "_setmode(" << fd << ", _O_TEXT): " << strerror(errno); + } +#endif + // (Text and binary are the same on non-Windows platforms.) +} + +void SetFdToBinaryMode(int fd) { +#ifdef _WIN32 + if (_setmode(fd, _O_BINARY) == -1) { + // This should never happen, I think. + GOOGLE_LOG(WARNING) << "_setmode(" << fd << ", _O_BINARY): " << strerror(errno); + } +#endif + // (Text and binary are the same on non-Windows platforms.) +} + +void AddTrailingSlash(string* path) { + if (!path->empty() && path->at(path->size() - 1) != '/') { + path->push_back('/'); + } +} + +bool VerifyDirectoryExists(const string& path) { + if (path.empty()) return true; + + if (access(path.c_str(), W_OK) == -1) { + cerr << path << ": " << strerror(errno) << endl; + return false; + } else { + return true; + } +} + +// Try to create the parent directory of the given file, creating the parent's +// parent if necessary, and so on. The full file name is actually +// (prefix + filename), but we assume |prefix| already exists and only create +// directories listed in |filename|. +bool TryCreateParentDirectory(const string& prefix, const string& filename) { + // Recursively create parent directories to the output file. + vector parts; + SplitStringUsing(filename, "/", &parts); + string path_so_far = prefix; + for (int i = 0; i < parts.size() - 1; i++) { + path_so_far += parts[i]; + if (mkdir(path_so_far.c_str(), 0777) != 0) { + if (errno != EEXIST) { + cerr << filename << ": while trying to create directory " + << path_so_far << ": " << strerror(errno) << endl; + return false; + } + } + path_so_far += '/'; + } + + return true; +} + +} // namespace + +// A MultiFileErrorCollector that prints errors to stderr. +class CommandLineInterface::ErrorPrinter : public MultiFileErrorCollector, + public io::ErrorCollector { + public: + ErrorPrinter(ErrorFormat format, DiskSourceTree *tree = NULL) + : format_(format), tree_(tree) {} + ~ErrorPrinter() {} + + // implements MultiFileErrorCollector ------------------------------ + void AddError(const string& filename, int line, int column, + const string& message) { + + // Print full path when running under MSVS + string dfile; + if (format_ == CommandLineInterface::ERROR_FORMAT_MSVS && + tree_ != NULL && + tree_->VirtualFileToDiskFile(filename, &dfile)) { + cerr << dfile; + } else { + cerr << filename; + } + + // Users typically expect 1-based line/column numbers, so we add 1 + // to each here. + if (line != -1) { + // Allow for both GCC- and Visual-Studio-compatible output. + switch (format_) { + case CommandLineInterface::ERROR_FORMAT_GCC: + cerr << ":" << (line + 1) << ":" << (column + 1); + break; + case CommandLineInterface::ERROR_FORMAT_MSVS: + cerr << "(" << (line + 1) << ") : error in column=" << (column + 1); + break; + } + } + + cerr << ": " << message << endl; + } + + // implements io::ErrorCollector ----------------------------------- + void AddError(int line, int column, const string& message) { + AddError("input", line, column, message); + } + + private: + const ErrorFormat format_; + DiskSourceTree *tree_; +}; + +// ------------------------------------------------------------------- + +// A GeneratorContext implementation that buffers files in memory, then dumps +// them all to disk on demand. +class CommandLineInterface::GeneratorContextImpl : public GeneratorContext { + public: + GeneratorContextImpl(const vector& parsed_files); + ~GeneratorContextImpl(); + + // Write all files in the directory to disk at the given output location, + // which must end in a '/'. + bool WriteAllToDisk(const string& prefix); + + // Write the contents of this directory to a ZIP-format archive with the + // given name. + bool WriteAllToZip(const string& filename); + + // Add a boilerplate META-INF/MANIFEST.MF file as required by the Java JAR + // format, unless one has already been written. + void AddJarManifest(); + + // implements GeneratorContext -------------------------------------- + io::ZeroCopyOutputStream* Open(const string& filename); + io::ZeroCopyOutputStream* OpenForInsert( + const string& filename, const string& insertion_point); + void ListParsedFiles(vector* output) { + *output = parsed_files_; + } + + private: + friend class MemoryOutputStream; + + // map instead of hash_map so that files are written in order (good when + // writing zips). + map files_; + const vector& parsed_files_; + bool had_error_; +}; + +class CommandLineInterface::MemoryOutputStream + : public io::ZeroCopyOutputStream { + public: + MemoryOutputStream(GeneratorContextImpl* directory, const string& filename); + MemoryOutputStream(GeneratorContextImpl* directory, const string& filename, + const string& insertion_point); + virtual ~MemoryOutputStream(); + + // implements ZeroCopyOutputStream --------------------------------- + virtual bool Next(void** data, int* size) { return inner_->Next(data, size); } + virtual void BackUp(int count) { inner_->BackUp(count); } + virtual int64 ByteCount() const { return inner_->ByteCount(); } + + private: + // Where to insert the string when it's done. + GeneratorContextImpl* directory_; + string filename_; + string insertion_point_; + + // The string we're building. + string data_; + + // StringOutputStream writing to data_. + scoped_ptr inner_; +}; + +// ------------------------------------------------------------------- + +CommandLineInterface::GeneratorContextImpl::GeneratorContextImpl( + const vector& parsed_files) + : parsed_files_(parsed_files), + had_error_(false) { +} + +CommandLineInterface::GeneratorContextImpl::~GeneratorContextImpl() { + STLDeleteValues(&files_); +} + +bool CommandLineInterface::GeneratorContextImpl::WriteAllToDisk( + const string& prefix) { + if (had_error_) { + return false; + } + + if (!VerifyDirectoryExists(prefix)) { + return false; + } + + for (map::const_iterator iter = files_.begin(); + iter != files_.end(); ++iter) { + const string& relative_filename = iter->first; + const char* data = iter->second->data(); + int size = iter->second->size(); + + if (!TryCreateParentDirectory(prefix, relative_filename)) { + return false; + } + string filename = prefix + relative_filename; + + // Create the output file. + int file_descriptor; + do { + file_descriptor = + open(filename.c_str(), O_WRONLY | O_CREAT | O_TRUNC | O_BINARY, 0666); + } while (file_descriptor < 0 && errno == EINTR); + + if (file_descriptor < 0) { + int error = errno; + cerr << filename << ": " << strerror(error); + return false; + } + + // Write the file. + while (size > 0) { + int write_result; + do { + write_result = write(file_descriptor, data, size); + } while (write_result < 0 && errno == EINTR); + + if (write_result <= 0) { + // Write error. + + // FIXME(kenton): According to the man page, if write() returns zero, + // there was no error; write() simply did not write anything. It's + // unclear under what circumstances this might happen, but presumably + // errno won't be set in this case. I am confused as to how such an + // event should be handled. For now I'm treating it as an error, + // since retrying seems like it could lead to an infinite loop. I + // suspect this never actually happens anyway. + + if (write_result < 0) { + int error = errno; + cerr << filename << ": write: " << strerror(error); + } else { + cerr << filename << ": write() returned zero?" << endl; + } + return false; + } + + data += write_result; + size -= write_result; + } + + if (close(file_descriptor) != 0) { + int error = errno; + cerr << filename << ": close: " << strerror(error); + return false; + } + } + + return true; +} + +bool CommandLineInterface::GeneratorContextImpl::WriteAllToZip( + const string& filename) { + if (had_error_) { + return false; + } + + // Create the output file. + int file_descriptor; + do { + file_descriptor = + open(filename.c_str(), O_WRONLY | O_CREAT | O_TRUNC | O_BINARY, 0666); + } while (file_descriptor < 0 && errno == EINTR); + + if (file_descriptor < 0) { + int error = errno; + cerr << filename << ": " << strerror(error); + return false; + } + + // Create the ZipWriter + io::FileOutputStream stream(file_descriptor); + ZipWriter zip_writer(&stream); + + for (map::const_iterator iter = files_.begin(); + iter != files_.end(); ++iter) { + zip_writer.Write(iter->first, *iter->second); + } + + zip_writer.WriteDirectory(); + + if (stream.GetErrno() != 0) { + cerr << filename << ": " << strerror(stream.GetErrno()) << endl; + } + + if (!stream.Close()) { + cerr << filename << ": " << strerror(stream.GetErrno()) << endl; + } + + return true; +} + +void CommandLineInterface::GeneratorContextImpl::AddJarManifest() { + string** map_slot = &files_["META-INF/MANIFEST.MF"]; + if (*map_slot == NULL) { + *map_slot = new string( + "Manifest-Version: 1.0\n" + "Created-By: 1.6.0 (protoc)\n" + "\n"); + } +} + +io::ZeroCopyOutputStream* CommandLineInterface::GeneratorContextImpl::Open( + const string& filename) { + return new MemoryOutputStream(this, filename); +} + +io::ZeroCopyOutputStream* +CommandLineInterface::GeneratorContextImpl::OpenForInsert( + const string& filename, const string& insertion_point) { + return new MemoryOutputStream(this, filename, insertion_point); +} + +// ------------------------------------------------------------------- + +CommandLineInterface::MemoryOutputStream::MemoryOutputStream( + GeneratorContextImpl* directory, const string& filename) + : directory_(directory), + filename_(filename), + inner_(new io::StringOutputStream(&data_)) { +} + +CommandLineInterface::MemoryOutputStream::MemoryOutputStream( + GeneratorContextImpl* directory, const string& filename, + const string& insertion_point) + : directory_(directory), + filename_(filename), + insertion_point_(insertion_point), + inner_(new io::StringOutputStream(&data_)) { +} + +CommandLineInterface::MemoryOutputStream::~MemoryOutputStream() { + // Make sure all data has been written. + inner_.reset(); + + // Insert into the directory. + string** map_slot = &directory_->files_[filename_]; + + if (insertion_point_.empty()) { + // This was just a regular Open(). + if (*map_slot != NULL) { + cerr << filename_ << ": Tried to write the same file twice." << endl; + directory_->had_error_ = true; + return; + } + + *map_slot = new string; + (*map_slot)->swap(data_); + } else { + // This was an OpenForInsert(). + + // If the data doens't end with a clean line break, add one. + if (!data_.empty() && data_[data_.size() - 1] != '\n') { + data_.push_back('\n'); + } + + // Find the file we are going to insert into. + if (*map_slot == NULL) { + cerr << filename_ << ": Tried to insert into file that doesn't exist." + << endl; + directory_->had_error_ = true; + return; + } + string* target = *map_slot; + + // Find the insertion point. + string magic_string = strings::Substitute( + "@@protoc_insertion_point($0)", insertion_point_); + string::size_type pos = target->find(magic_string); + + if (pos == string::npos) { + cerr << filename_ << ": insertion point \"" << insertion_point_ + << "\" not found." << endl; + directory_->had_error_ = true; + return; + } + + // Seek backwards to the beginning of the line, which is where we will + // insert the data. Note that this has the effect of pushing the insertion + // point down, so the data is inserted before it. This is intentional + // because it means that multiple insertions at the same point will end + // up in the expected order in the final output. + pos = target->find_last_of('\n', pos); + if (pos == string::npos) { + // Insertion point is on the first line. + pos = 0; + } else { + // Advance to character after '\n'. + ++pos; + } + + // Extract indent. + string indent_(*target, pos, target->find_first_not_of(" \t", pos) - pos); + + if (indent_.empty()) { + // No indent. This makes things easier. + target->insert(pos, data_); + } else { + // Calculate how much space we need. + int indent_size = 0; + for (int i = 0; i < data_.size(); i++) { + if (data_[i] == '\n') indent_size += indent_.size(); + } + + // Make a hole for it. + target->insert(pos, data_.size() + indent_size, '\0'); + + // Now copy in the data. + string::size_type data_pos = 0; + char* target_ptr = string_as_array(target) + pos; + while (data_pos < data_.size()) { + // Copy indent. + memcpy(target_ptr, indent_.data(), indent_.size()); + target_ptr += indent_.size(); + + // Copy line from data_. + // We already guaranteed that data_ ends with a newline (above), so this + // search can't fail. + string::size_type line_length = + data_.find_first_of('\n', data_pos) + 1 - data_pos; + memcpy(target_ptr, data_.data() + data_pos, line_length); + target_ptr += line_length; + data_pos += line_length; + } + + GOOGLE_CHECK_EQ(target_ptr, + string_as_array(target) + pos + data_.size() + indent_size); + } + } +} + +// =================================================================== + +CommandLineInterface::CommandLineInterface() + : mode_(MODE_COMPILE), + error_format_(ERROR_FORMAT_GCC), + imports_in_descriptor_set_(false), + disallow_services_(false), + inputs_are_proto_path_relative_(false) {} +CommandLineInterface::~CommandLineInterface() {} + +void CommandLineInterface::RegisterGenerator(const string& flag_name, + CodeGenerator* generator, + const string& help_text) { + GeneratorInfo info; + info.generator = generator; + info.help_text = help_text; + generators_[flag_name] = info; +} + +void CommandLineInterface::AllowPlugins(const string& exe_name_prefix) { + plugin_prefix_ = exe_name_prefix; +} + +int CommandLineInterface::Run(int argc, const char* const argv[]) { + Clear(); + if (!ParseArguments(argc, argv)) return 1; + + // Set up the source tree. + DiskSourceTree source_tree; + for (int i = 0; i < proto_path_.size(); i++) { + source_tree.MapPath(proto_path_[i].first, proto_path_[i].second); + } + + // Map input files to virtual paths if necessary. + if (!inputs_are_proto_path_relative_) { + if (!MakeInputsBeProtoPathRelative(&source_tree)) { + return 1; + } + } + + // Allocate the Importer. + ErrorPrinter error_collector(error_format_, &source_tree); + Importer importer(&source_tree, &error_collector); + + vector parsed_files; + + // Parse each file. + for (int i = 0; i < input_files_.size(); i++) { + // Import the file. + const FileDescriptor* parsed_file = importer.Import(input_files_[i]); + if (parsed_file == NULL) return 1; + parsed_files.push_back(parsed_file); + + // Enforce --disallow_services. + if (disallow_services_ && parsed_file->service_count() > 0) { + cerr << parsed_file->name() << ": This file contains services, but " + "--disallow_services was used." << endl; + return 1; + } + } + + // We construct a separate GeneratorContext for each output location. Note + // that two code generators may output to the same location, in which case + // they should share a single GeneratorContext so that OpenForInsert() works. + typedef hash_map GeneratorContextMap; + GeneratorContextMap output_directories; + + // Generate output. + if (mode_ == MODE_COMPILE) { + for (int i = 0; i < output_directives_.size(); i++) { + string output_location = output_directives_[i].output_location; + if (!HasSuffixString(output_location, ".zip") && + !HasSuffixString(output_location, ".jar")) { + AddTrailingSlash(&output_location); + } + GeneratorContextImpl** map_slot = &output_directories[output_location]; + + if (*map_slot == NULL) { + // First time we've seen this output location. + *map_slot = new GeneratorContextImpl(parsed_files); + } + + if (!GenerateOutput(parsed_files, output_directives_[i], *map_slot)) { + STLDeleteValues(&output_directories); + return 1; + } + } + } + + // Write all output to disk. + for (GeneratorContextMap::iterator iter = output_directories.begin(); + iter != output_directories.end(); ++iter) { + const string& location = iter->first; + GeneratorContextImpl* directory = iter->second; + if (HasSuffixString(location, "/")) { + if (!directory->WriteAllToDisk(location)) { + STLDeleteValues(&output_directories); + return 1; + } + } else { + if (HasSuffixString(location, ".jar")) { + directory->AddJarManifest(); + } + + if (!directory->WriteAllToZip(location)) { + STLDeleteValues(&output_directories); + return 1; + } + } + } + + STLDeleteValues(&output_directories); + + if (!descriptor_set_name_.empty()) { + if (!WriteDescriptorSet(parsed_files)) { + return 1; + } + } + + if (mode_ == MODE_ENCODE || mode_ == MODE_DECODE) { + if (codec_type_.empty()) { + // HACK: Define an EmptyMessage type to use for decoding. + DescriptorPool pool; + FileDescriptorProto file; + file.set_name("empty_message.proto"); + file.add_message_type()->set_name("EmptyMessage"); + GOOGLE_CHECK(pool.BuildFile(file) != NULL); + codec_type_ = "EmptyMessage"; + if (!EncodeOrDecode(&pool)) { + return 1; + } + } else { + if (!EncodeOrDecode(importer.pool())) { + return 1; + } + } + } + + return 0; +} + +void CommandLineInterface::Clear() { + // Clear all members that are set by Run(). Note that we must not clear + // members which are set by other methods before Run() is called. + executable_name_.clear(); + proto_path_.clear(); + input_files_.clear(); + output_directives_.clear(); + codec_type_.clear(); + descriptor_set_name_.clear(); + + mode_ = MODE_COMPILE; + imports_in_descriptor_set_ = false; + disallow_services_ = false; +} + +bool CommandLineInterface::MakeInputsBeProtoPathRelative( + DiskSourceTree* source_tree) { + for (int i = 0; i < input_files_.size(); i++) { + string virtual_file, shadowing_disk_file; + switch (source_tree->DiskFileToVirtualFile( + input_files_[i], &virtual_file, &shadowing_disk_file)) { + case DiskSourceTree::SUCCESS: + input_files_[i] = virtual_file; + break; + case DiskSourceTree::SHADOWED: + cerr << input_files_[i] << ": Input is shadowed in the --proto_path " + "by \"" << shadowing_disk_file << "\". Either use the latter " + "file as your input or reorder the --proto_path so that the " + "former file's location comes first." << endl; + return false; + case DiskSourceTree::CANNOT_OPEN: + cerr << input_files_[i] << ": " << strerror(errno) << endl; + return false; + case DiskSourceTree::NO_MAPPING: + // First check if the file exists at all. + if (access(input_files_[i].c_str(), F_OK) < 0) { + // File does not even exist. + cerr << input_files_[i] << ": " << strerror(ENOENT) << endl; + } else { + cerr << input_files_[i] << ": File does not reside within any path " + "specified using --proto_path (or -I). You must specify a " + "--proto_path which encompasses this file. Note that the " + "proto_path must be an exact prefix of the .proto file " + "names -- protoc is too dumb to figure out when two paths " + "(e.g. absolute and relative) are equivalent (it's harder " + "than you think)." << endl; + } + return false; + } + } + + return true; +} + +bool CommandLineInterface::ParseArguments(int argc, const char* const argv[]) { + executable_name_ = argv[0]; + + // Iterate through all arguments and parse them. + for (int i = 1; i < argc; i++) { + string name, value; + + if (ParseArgument(argv[i], &name, &value)) { + // Returned true => Use the next argument as the flag value. + if (i + 1 == argc || argv[i+1][0] == '-') { + cerr << "Missing value for flag: " << name << endl; + if (name == "--decode") { + cerr << "To decode an unknown message, use --decode_raw." << endl; + } + return false; + } else { + ++i; + value = argv[i]; + } + } + + if (!InterpretArgument(name, value)) return false; + } + + // If no --proto_path was given, use the current working directory. + if (proto_path_.empty()) { + proto_path_.push_back(make_pair("", ".")); + } + + // Check some errror cases. + bool decoding_raw = (mode_ == MODE_DECODE) && codec_type_.empty(); + if (decoding_raw && !input_files_.empty()) { + cerr << "When using --decode_raw, no input files should be given." << endl; + return false; + } else if (!decoding_raw && input_files_.empty()) { + cerr << "Missing input file." << endl; + return false; + } + if (mode_ == MODE_COMPILE && output_directives_.empty() && + descriptor_set_name_.empty()) { + cerr << "Missing output directives." << endl; + return false; + } + if (imports_in_descriptor_set_ && descriptor_set_name_.empty()) { + cerr << "--include_imports only makes sense when combined with " + "--descriptor_set_out." << endl; + } + + return true; +} + +bool CommandLineInterface::ParseArgument(const char* arg, + string* name, string* value) { + bool parsed_value = false; + + if (arg[0] != '-') { + // Not a flag. + name->clear(); + parsed_value = true; + *value = arg; + } else if (arg[1] == '-') { + // Two dashes: Multi-character name, with '=' separating name and + // value. + const char* equals_pos = strchr(arg, '='); + if (equals_pos != NULL) { + *name = string(arg, equals_pos - arg); + *value = equals_pos + 1; + parsed_value = true; + } else { + *name = arg; + } + } else { + // One dash: One-character name, all subsequent characters are the + // value. + if (arg[1] == '\0') { + // arg is just "-". We treat this as an input file, except that at + // present this will just lead to a "file not found" error. + name->clear(); + *value = arg; + parsed_value = true; + } else { + *name = string(arg, 2); + *value = arg + 2; + parsed_value = !value->empty(); + } + } + + // Need to return true iff the next arg should be used as the value for this + // one, false otherwise. + + if (parsed_value) { + // We already parsed a value for this flag. + return false; + } + + if (*name == "-h" || *name == "--help" || + *name == "--disallow_services" || + *name == "--include_imports" || + *name == "--version" || + *name == "--decode_raw") { + // HACK: These are the only flags that don't take a value. + // They probably should not be hard-coded like this but for now it's + // not worth doing better. + return false; + } + + // Next argument is the flag value. + return true; +} + +bool CommandLineInterface::InterpretArgument(const string& name, + const string& value) { + if (name.empty()) { + // Not a flag. Just a filename. + if (value.empty()) { + cerr << "You seem to have passed an empty string as one of the " + "arguments to " << executable_name_ << ". This is actually " + "sort of hard to do. Congrats. Unfortunately it is not valid " + "input so the program is going to die now." << endl; + return false; + } + + input_files_.push_back(value); + + } else if (name == "-I" || name == "--proto_path") { + // Java's -classpath (and some other languages) delimits path components + // with colons. Let's accept that syntax too just to make things more + // intuitive. + vector parts; + SplitStringUsing(value, kPathSeparator, &parts); + + for (int i = 0; i < parts.size(); i++) { + string virtual_path; + string disk_path; + + int equals_pos = parts[i].find_first_of('='); + if (equals_pos == string::npos) { + virtual_path = ""; + disk_path = parts[i]; + } else { + virtual_path = parts[i].substr(0, equals_pos); + disk_path = parts[i].substr(equals_pos + 1); + } + + if (disk_path.empty()) { + cerr << "--proto_path passed empty directory name. (Use \".\" for " + "current directory.)" << endl; + return false; + } + + // Make sure disk path exists, warn otherwise. + if (access(disk_path.c_str(), F_OK) < 0) { + cerr << disk_path << ": warning: directory does not exist." << endl; + } + + proto_path_.push_back(make_pair(virtual_path, disk_path)); + } + + } else if (name == "-o" || name == "--descriptor_set_out") { + if (!descriptor_set_name_.empty()) { + cerr << name << " may only be passed once." << endl; + return false; + } + if (value.empty()) { + cerr << name << " requires a non-empty value." << endl; + return false; + } + if (mode_ != MODE_COMPILE) { + cerr << "Cannot use --encode or --decode and generate descriptors at the " + "same time." << endl; + return false; + } + descriptor_set_name_ = value; + + } else if (name == "--include_imports") { + if (imports_in_descriptor_set_) { + cerr << name << " may only be passed once." << endl; + return false; + } + imports_in_descriptor_set_ = true; + + } else if (name == "-h" || name == "--help") { + PrintHelpText(); + return false; // Exit without running compiler. + + } else if (name == "--version") { + if (!version_info_.empty()) { + cout << version_info_ << endl; + } + cout << "libprotoc " + << protobuf::internal::VersionString(GOOGLE_PROTOBUF_VERSION) + << endl; + return false; // Exit without running compiler. + + } else if (name == "--disallow_services") { + disallow_services_ = true; + + } else if (name == "--encode" || name == "--decode" || + name == "--decode_raw") { + if (mode_ != MODE_COMPILE) { + cerr << "Only one of --encode and --decode can be specified." << endl; + return false; + } + if (!output_directives_.empty() || !descriptor_set_name_.empty()) { + cerr << "Cannot use " << name + << " and generate code or descriptors at the same time." << endl; + return false; + } + + mode_ = (name == "--encode") ? MODE_ENCODE : MODE_DECODE; + + if (value.empty() && name != "--decode_raw") { + cerr << "Type name for " << name << " cannot be blank." << endl; + if (name == "--decode") { + cerr << "To decode an unknown message, use --decode_raw." << endl; + } + return false; + } else if (!value.empty() && name == "--decode_raw") { + cerr << "--decode_raw does not take a parameter." << endl; + return false; + } + + codec_type_ = value; + + } else if (name == "--error_format") { + if (value == "gcc") { + error_format_ = ERROR_FORMAT_GCC; + } else if (value == "msvs") { + error_format_ = ERROR_FORMAT_MSVS; + } else { + cerr << "Unknown error format: " << value << endl; + return false; + } + + } else if (name == "--plugin") { + if (plugin_prefix_.empty()) { + cerr << "This compiler does not support plugins." << endl; + return false; + } + + string name; + string path; + + string::size_type equals_pos = value.find_first_of('='); + if (equals_pos == string::npos) { + // Use the basename of the file. + string::size_type slash_pos = value.find_last_of('/'); + if (slash_pos == string::npos) { + name = value; + } else { + name = value.substr(slash_pos + 1); + } + path = value; + } else { + name = value.substr(0, equals_pos); + path = value.substr(equals_pos + 1); + } + + plugins_[name] = path; + + } else { + // Some other flag. Look it up in the generators list. + const GeneratorInfo* generator_info = FindOrNull(generators_, name); + if (generator_info == NULL && + (plugin_prefix_.empty() || !HasSuffixString(name, "_out"))) { + cerr << "Unknown flag: " << name << endl; + return false; + } + + // It's an output flag. Add it to the output directives. + if (mode_ != MODE_COMPILE) { + cerr << "Cannot use --encode or --decode and generate code at the " + "same time." << endl; + return false; + } + + OutputDirective directive; + directive.name = name; + if (generator_info == NULL) { + directive.generator = NULL; + } else { + directive.generator = generator_info->generator; + } + + // Split value at ':' to separate the generator parameter from the + // filename. However, avoid doing this if the colon is part of a valid + // Windows-style absolute path. + string::size_type colon_pos = value.find_first_of(':'); + if (colon_pos == string::npos || IsWindowsAbsolutePath(value)) { + directive.output_location = value; + } else { + directive.parameter = value.substr(0, colon_pos); + directive.output_location = value.substr(colon_pos + 1); + } + + output_directives_.push_back(directive); + } + + return true; +} + +void CommandLineInterface::PrintHelpText() { + // Sorry for indentation here; line wrapping would be uglier. + cerr << +"Usage: " << executable_name_ << " [OPTION] PROTO_FILES\n" +"Parse PROTO_FILES and generate output based on the options given:\n" +" -IPATH, --proto_path=PATH Specify the directory in which to search for\n" +" imports. May be specified multiple times;\n" +" directories will be searched in order. If not\n" +" given, the current working directory is used.\n" +" --version Show version info and exit.\n" +" -h, --help Show this text and exit.\n" +" --encode=MESSAGE_TYPE Read a text-format message of the given type\n" +" from standard input and write it in binary\n" +" to standard output. The message type must\n" +" be defined in PROTO_FILES or their imports.\n" +" --decode=MESSAGE_TYPE Read a binary message of the given type from\n" +" standard input and write it in text format\n" +" to standard output. The message type must\n" +" be defined in PROTO_FILES or their imports.\n" +" --decode_raw Read an arbitrary protocol message from\n" +" standard input and write the raw tag/value\n" +" pairs in text format to standard output. No\n" +" PROTO_FILES should be given when using this\n" +" flag.\n" +" -oFILE, Writes a FileDescriptorSet (a protocol buffer,\n" +" --descriptor_set_out=FILE defined in descriptor.proto) containing all of\n" +" the input files to FILE.\n" +" --include_imports When using --descriptor_set_out, also include\n" +" all dependencies of the input files in the\n" +" set, so that the set is self-contained.\n" +" --error_format=FORMAT Set the format in which to print errors.\n" +" FORMAT may be 'gcc' (the default) or 'msvs'\n" +" (Microsoft Visual Studio format)." << endl; + if (!plugin_prefix_.empty()) { + cerr << +" --plugin=EXECUTABLE Specifies a plugin executable to use.\n" +" Normally, protoc searches the PATH for\n" +" plugins, but you may specify additional\n" +" executables not in the path using this flag.\n" +" Additionally, EXECUTABLE may be of the form\n" +" NAME=PATH, in which case the given plugin name\n" +" is mapped to the given executable even if\n" +" the executable's own name differs." << endl; + } + + for (GeneratorMap::iterator iter = generators_.begin(); + iter != generators_.end(); ++iter) { + // FIXME(kenton): If the text is long enough it will wrap, which is ugly, + // but fixing this nicely (e.g. splitting on spaces) is probably more + // trouble than it's worth. + cerr << " " << iter->first << "=OUT_DIR " + << string(19 - iter->first.size(), ' ') // Spaces for alignment. + << iter->second.help_text << endl; + } +} + +bool CommandLineInterface::GenerateOutput( + const vector& parsed_files, + const OutputDirective& output_directive, + GeneratorContext* generator_context) { + // Call the generator. + string error; + if (output_directive.generator == NULL) { + // This is a plugin. + GOOGLE_CHECK(HasPrefixString(output_directive.name, "--") && + HasSuffixString(output_directive.name, "_out")) + << "Bad name for plugin generator: " << output_directive.name; + + // Strip the "--" and "_out" and add the plugin prefix. + string plugin_name = plugin_prefix_ + "gen-" + + output_directive.name.substr(2, output_directive.name.size() - 6); + + if (!GeneratePluginOutput(parsed_files, plugin_name, + output_directive.parameter, + generator_context, &error)) { + cerr << output_directive.name << ": " << error << endl; + return false; + } + } else { + // Regular generator. + for (int i = 0; i < parsed_files.size(); i++) { + if (!output_directive.generator->Generate( + parsed_files[i], output_directive.parameter, + generator_context, &error)) { + // Generator returned an error. + cerr << output_directive.name << ": " << parsed_files[i]->name() << ": " + << error << endl; + return false; + } + } + } + + return true; +} + +bool CommandLineInterface::GeneratePluginOutput( + const vector& parsed_files, + const string& plugin_name, + const string& parameter, + GeneratorContext* generator_context, + string* error) { + CodeGeneratorRequest request; + CodeGeneratorResponse response; + + // Build the request. + if (!parameter.empty()) { + request.set_parameter(parameter); + } + + set already_seen; + for (int i = 0; i < parsed_files.size(); i++) { + request.add_file_to_generate(parsed_files[i]->name()); + GetTransitiveDependencies(parsed_files[i], &already_seen, + request.mutable_proto_file()); + } + + // Invoke the plugin. + Subprocess subprocess; + + if (plugins_.count(plugin_name) > 0) { + subprocess.Start(plugins_[plugin_name], Subprocess::EXACT_NAME); + } else { + subprocess.Start(plugin_name, Subprocess::SEARCH_PATH); + } + + string communicate_error; + if (!subprocess.Communicate(request, &response, &communicate_error)) { + *error = strings::Substitute("$0: $1", plugin_name, communicate_error); + return false; + } + + // Write the files. We do this even if there was a generator error in order + // to match the behavior of a compiled-in generator. + scoped_ptr current_output; + for (int i = 0; i < response.file_size(); i++) { + const CodeGeneratorResponse::File& output_file = response.file(i); + + if (!output_file.insertion_point().empty()) { + // Open a file for insert. + // We reset current_output to NULL first so that the old file is closed + // before the new one is opened. + current_output.reset(); + current_output.reset(generator_context->OpenForInsert( + output_file.name(), output_file.insertion_point())); + } else if (!output_file.name().empty()) { + // Starting a new file. Open it. + // We reset current_output to NULL first so that the old file is closed + // before the new one is opened. + current_output.reset(); + current_output.reset(generator_context->Open(output_file.name())); + } else if (current_output == NULL) { + *error = strings::Substitute( + "$0: First file chunk returned by plugin did not specify a file name.", + plugin_name); + return false; + } + + // Use CodedOutputStream for convenience; otherwise we'd need to provide + // our own buffer-copying loop. + io::CodedOutputStream writer(current_output.get()); + writer.WriteString(output_file.content()); + } + + // Check for errors. + if (!response.error().empty()) { + // Generator returned an error. + *error = response.error(); + return false; + } + + return true; +} + +bool CommandLineInterface::EncodeOrDecode(const DescriptorPool* pool) { + // Look up the type. + const Descriptor* type = pool->FindMessageTypeByName(codec_type_); + if (type == NULL) { + cerr << "Type not defined: " << codec_type_ << endl; + return false; + } + + DynamicMessageFactory dynamic_factory(pool); + scoped_ptr message(dynamic_factory.GetPrototype(type)->New()); + + if (mode_ == MODE_ENCODE) { + SetFdToTextMode(STDIN_FILENO); + SetFdToBinaryMode(STDOUT_FILENO); + } else { + SetFdToBinaryMode(STDIN_FILENO); + SetFdToTextMode(STDOUT_FILENO); + } + + io::FileInputStream in(STDIN_FILENO); + io::FileOutputStream out(STDOUT_FILENO); + + if (mode_ == MODE_ENCODE) { + // Input is text. + ErrorPrinter error_collector(error_format_); + TextFormat::Parser parser; + parser.RecordErrorsTo(&error_collector); + parser.AllowPartialMessage(true); + + if (!parser.Parse(&in, message.get())) { + cerr << "Failed to parse input." << endl; + return false; + } + } else { + // Input is binary. + if (!message->ParsePartialFromZeroCopyStream(&in)) { + cerr << "Failed to parse input." << endl; + return false; + } + } + + if (!message->IsInitialized()) { + cerr << "warning: Input message is missing required fields: " + << message->InitializationErrorString() << endl; + } + + if (mode_ == MODE_ENCODE) { + // Output is binary. + if (!message->SerializePartialToZeroCopyStream(&out)) { + cerr << "output: I/O error." << endl; + return false; + } + } else { + // Output is text. + if (!TextFormat::Print(*message, &out)) { + cerr << "output: I/O error." << endl; + return false; + } + } + + return true; +} + +bool CommandLineInterface::WriteDescriptorSet( + const vector parsed_files) { + FileDescriptorSet file_set; + + if (imports_in_descriptor_set_) { + set already_seen; + for (int i = 0; i < parsed_files.size(); i++) { + GetTransitiveDependencies( + parsed_files[i], &already_seen, file_set.mutable_file()); + } + } else { + for (int i = 0; i < parsed_files.size(); i++) { + parsed_files[i]->CopyTo(file_set.add_file()); + } + } + + int fd; + do { + fd = open(descriptor_set_name_.c_str(), + O_WRONLY | O_CREAT | O_TRUNC | O_BINARY, 0666); + } while (fd < 0 && errno == EINTR); + + if (fd < 0) { + perror(descriptor_set_name_.c_str()); + return false; + } + + io::FileOutputStream out(fd); + if (!file_set.SerializeToZeroCopyStream(&out)) { + cerr << descriptor_set_name_ << ": " << strerror(out.GetErrno()) << endl; + out.Close(); + return false; + } + if (!out.Close()) { + cerr << descriptor_set_name_ << ": " << strerror(out.GetErrno()) << endl; + return false; + } + + return true; +} + +void CommandLineInterface::GetTransitiveDependencies( + const FileDescriptor* file, + set* already_seen, + RepeatedPtrField* output) { + if (!already_seen->insert(file).second) { + // Already saw this file. Skip. + return; + } + + // Add all dependencies. + for (int i = 0; i < file->dependency_count(); i++) { + GetTransitiveDependencies(file->dependency(i), already_seen, output); + } + + // Add this file. + file->CopyTo(output->Add()); +} + + +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/command_line_interface.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/command_line_interface.h new file mode 100644 index 0000000000..0b507d801f --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/command_line_interface.h @@ -0,0 +1,318 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// Implements the Protocol Compiler front-end such that it may be reused by +// custom compilers written to support other languages. + +#ifndef GOOGLE_PROTOBUF_COMPILER_COMMAND_LINE_INTERFACE_H__ +#define GOOGLE_PROTOBUF_COMPILER_COMMAND_LINE_INTERFACE_H__ + +#include +#include +#include +#include +#include +#include + +namespace google { +namespace protobuf { + +class FileDescriptor; // descriptor.h +class DescriptorPool; // descriptor.h +class FileDescriptorProto; // descriptor.pb.h +template class RepeatedPtrField; // repeated_field.h + +namespace compiler { + +class CodeGenerator; // code_generator.h +class GeneratorContext; // code_generator.h +class DiskSourceTree; // importer.h + +// This class implements the command-line interface to the protocol compiler. +// It is designed to make it very easy to create a custom protocol compiler +// supporting the languages of your choice. For example, if you wanted to +// create a custom protocol compiler binary which includes both the regular +// C++ support plus support for your own custom output "Foo", you would +// write a class "FooGenerator" which implements the CodeGenerator interface, +// then write a main() procedure like this: +// +// int main(int argc, char* argv[]) { +// google::protobuf::compiler::CommandLineInterface cli; +// +// // Support generation of C++ source and headers. +// google::protobuf::compiler::cpp::CppGenerator cpp_generator; +// cli.RegisterGenerator("--cpp_out", &cpp_generator, +// "Generate C++ source and header."); +// +// // Support generation of Foo code. +// FooGenerator foo_generator; +// cli.RegisterGenerator("--foo_out", &foo_generator, +// "Generate Foo file."); +// +// return cli.Run(argc, argv); +// } +// +// The compiler is invoked with syntax like: +// protoc --cpp_out=outdir --foo_out=outdir --proto_path=src src/foo.proto +// +// For a full description of the command-line syntax, invoke it with --help. +class LIBPROTOC_EXPORT CommandLineInterface { + public: + CommandLineInterface(); + ~CommandLineInterface(); + + // Register a code generator for a language. + // + // Parameters: + // * flag_name: The command-line flag used to specify an output file of + // this type. The name must start with a '-'. If the name is longer + // than one letter, it must start with two '-'s. + // * generator: The CodeGenerator which will be called to generate files + // of this type. + // * help_text: Text describing this flag in the --help output. + // + // Some generators accept extra parameters. You can specify this parameter + // on the command-line by placing it before the output directory, separated + // by a colon: + // protoc --foo_out=enable_bar:outdir + // The text before the colon is passed to CodeGenerator::Generate() as the + // "parameter". + void RegisterGenerator(const string& flag_name, + CodeGenerator* generator, + const string& help_text); + + // Enables "plugins". In this mode, if a command-line flag ends with "_out" + // but does not match any registered generator, the compiler will attempt to + // find a "plugin" to implement the generator. Plugins are just executables. + // They should live somewhere in the PATH. + // + // The compiler determines the executable name to search for by concatenating + // exe_name_prefix with the unrecognized flag name, removing "_out". So, for + // example, if exe_name_prefix is "protoc-" and you pass the flag --foo_out, + // the compiler will try to run the program "protoc-foo". + // + // The plugin program should implement the following usage: + // plugin [--out=OUTDIR] [--parameter=PARAMETER] PROTO_FILES < DESCRIPTORS + // --out indicates the output directory (as passed to the --foo_out + // parameter); if omitted, the current directory should be used. --parameter + // gives the generator parameter, if any was provided. The PROTO_FILES list + // the .proto files which were given on the compiler command-line; these are + // the files for which the plugin is expected to generate output code. + // Finally, DESCRIPTORS is an encoded FileDescriptorSet (as defined in + // descriptor.proto). This is piped to the plugin's stdin. The set will + // include descriptors for all the files listed in PROTO_FILES as well as + // all files that they import. The plugin MUST NOT attempt to read the + // PROTO_FILES directly -- it must use the FileDescriptorSet. + // + // The plugin should generate whatever files are necessary, as code generators + // normally do. It should write the names of all files it generates to + // stdout. The names should be relative to the output directory, NOT absolute + // names or relative to the current directory. If any errors occur, error + // messages should be written to stderr. If an error is fatal, the plugin + // should exit with a non-zero exit code. + void AllowPlugins(const string& exe_name_prefix); + + // Run the Protocol Compiler with the given command-line parameters. + // Returns the error code which should be returned by main(). + // + // It may not be safe to call Run() in a multi-threaded environment because + // it calls strerror(). I'm not sure why you'd want to do this anyway. + int Run(int argc, const char* const argv[]); + + // Call SetInputsAreCwdRelative(true) if the input files given on the command + // line should be interpreted relative to the proto import path specified + // using --proto_path or -I flags. Otherwise, input file names will be + // interpreted relative to the current working directory (or as absolute + // paths if they start with '/'), though they must still reside inside + // a directory given by --proto_path or the compiler will fail. The latter + // mode is generally more intuitive and easier to use, especially e.g. when + // defining implicit rules in Makefiles. + void SetInputsAreProtoPathRelative(bool enable) { + inputs_are_proto_path_relative_ = enable; + } + + // Provides some text which will be printed when the --version flag is + // used. The version of libprotoc will also be printed on the next line + // after this text. + void SetVersionInfo(const string& text) { + version_info_ = text; + } + + + private: + // ----------------------------------------------------------------- + + class ErrorPrinter; + class GeneratorContextImpl; + class MemoryOutputStream; + + // Clear state from previous Run(). + void Clear(); + + // Remaps each file in input_files_ so that it is relative to one of the + // directories in proto_path_. Returns false if an error occurred. This + // is only used if inputs_are_proto_path_relative_ is false. + bool MakeInputsBeProtoPathRelative( + DiskSourceTree* source_tree); + + // Parse all command-line arguments. + bool ParseArguments(int argc, const char* const argv[]); + + // Parses a command-line argument into a name/value pair. Returns + // true if the next argument in the argv should be used as the value, + // false otherwise. + // + // Exmaples: + // "-Isrc/protos" -> + // name = "-I", value = "src/protos" + // "--cpp_out=src/foo.pb2.cc" -> + // name = "--cpp_out", value = "src/foo.pb2.cc" + // "foo.proto" -> + // name = "", value = "foo.proto" + bool ParseArgument(const char* arg, string* name, string* value); + + // Interprets arguments parsed with ParseArgument. + bool InterpretArgument(const string& name, const string& value); + + // Print the --help text to stderr. + void PrintHelpText(); + + // Generate the given output file from the given input. + struct OutputDirective; // see below + bool GenerateOutput(const vector& parsed_files, + const OutputDirective& output_directive, + GeneratorContext* generator_context); + bool GeneratePluginOutput(const vector& parsed_files, + const string& plugin_name, + const string& parameter, + GeneratorContext* generator_context, + string* error); + + // Implements --encode and --decode. + bool EncodeOrDecode(const DescriptorPool* pool); + + // Implements the --descriptor_set_out option. + bool WriteDescriptorSet(const vector parsed_files); + + // Get all transitive dependencies of the given file (including the file + // itself), adding them to the given list of FileDescriptorProtos. The + // protos will be ordered such that every file is listed before any file that + // depends on it, so that you can call DescriptorPool::BuildFile() on them + // in order. Any files in *already_seen will not be added, and each file + // added will be inserted into *already_seen. + static void GetTransitiveDependencies( + const FileDescriptor* file, + set* already_seen, + RepeatedPtrField* output); + + // ----------------------------------------------------------------- + + // The name of the executable as invoked (i.e. argv[0]). + string executable_name_; + + // Version info set with SetVersionInfo(). + string version_info_; + + // Map from flag names to registered generators. + struct GeneratorInfo { + CodeGenerator* generator; + string help_text; + }; + typedef map GeneratorMap; + GeneratorMap generators_; + + // See AllowPlugins(). If this is empty, plugins aren't allowed. + string plugin_prefix_; + + // Maps specific plugin names to files. When executing a plugin, this map + // is searched first to find the plugin executable. If not found here, the + // PATH (or other OS-specific search strategy) is searched. + map plugins_; + + // Stuff parsed from command line. + enum Mode { + MODE_COMPILE, // Normal mode: parse .proto files and compile them. + MODE_ENCODE, // --encode: read text from stdin, write binary to stdout. + MODE_DECODE // --decode: read binary from stdin, write text to stdout. + }; + + Mode mode_; + + enum ErrorFormat { + ERROR_FORMAT_GCC, // GCC error output format (default). + ERROR_FORMAT_MSVS // Visual Studio output (--error_format=msvs). + }; + + ErrorFormat error_format_; + + vector > proto_path_; // Search path for proto files. + vector input_files_; // Names of the input proto files. + + // output_directives_ lists all the files we are supposed to output and what + // generator to use for each. + struct OutputDirective { + string name; // E.g. "--foo_out" + CodeGenerator* generator; // NULL for plugins + string parameter; + string output_location; + }; + vector output_directives_; + + // When using --encode or --decode, this names the type we are encoding or + // decoding. (Empty string indicates --decode_raw.) + string codec_type_; + + // If --descriptor_set_out was given, this is the filename to which the + // FileDescriptorSet should be written. Otherwise, empty. + string descriptor_set_name_; + + // True if --include_imports was given, meaning that we should + // write all transitive dependencies to the DescriptorSet. Otherwise, only + // the .proto files listed on the command-line are added. + bool imports_in_descriptor_set_; + + // Was the --disallow_services flag used? + bool disallow_services_; + + // See SetInputsAreProtoPathRelative(). + bool inputs_are_proto_path_relative_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(CommandLineInterface); +}; + +} // namespace compiler +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_COMPILER_COMMAND_LINE_INTERFACE_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/command_line_interface_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/command_line_interface_unittest.cc new file mode 100644 index 0000000000..d5b3a1dcd8 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/command_line_interface_unittest.cc @@ -0,0 +1,1452 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include +#include +#ifdef _MSC_VER +#include +#else +#include +#endif +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { + +#if defined(_WIN32) +#ifndef STDIN_FILENO +#define STDIN_FILENO 0 +#endif +#ifndef STDOUT_FILENO +#define STDOUT_FILENO 1 +#endif +#ifndef F_OK +#define F_OK 00 // not defined by MSVC for whatever reason +#endif +#endif + +namespace { + +class CommandLineInterfaceTest : public testing::Test { + protected: + virtual void SetUp(); + virtual void TearDown(); + + // Runs the CommandLineInterface with the given command line. The + // command is automatically split on spaces, and the string "$tmpdir" + // is replaced with TestTempDir(). + void Run(const string& command); + + // ----------------------------------------------------------------- + // Methods to set up the test (called before Run()). + + class NullCodeGenerator; + + // Normally plugins are allowed for all tests. Call this to explicitly + // disable them. + void DisallowPlugins() { disallow_plugins_ = true; } + + // Create a temp file within temp_directory_ with the given name. + // The containing directory is also created if necessary. + void CreateTempFile(const string& name, const string& contents); + + // Create a subdirectory within temp_directory_. + void CreateTempDir(const string& name); + + void SetInputsAreProtoPathRelative(bool enable) { + cli_.SetInputsAreProtoPathRelative(enable); + } + + // ----------------------------------------------------------------- + // Methods to check the test results (called after Run()). + + // Checks that no text was written to stderr during Run(), and Run() + // returned 0. + void ExpectNoErrors(); + + // Checks that Run() returned non-zero and the stderr output is exactly + // the text given. expected_test may contain references to "$tmpdir", + // which will be replaced by the temporary directory path. + void ExpectErrorText(const string& expected_text); + + // Checks that Run() returned non-zero and the stderr contains the given + // substring. + void ExpectErrorSubstring(const string& expected_substring); + + // Returns true if ExpectErrorSubstring(expected_substring) would pass, but + // does not fail otherwise. + bool HasAlternateErrorSubstring(const string& expected_substring); + + // Checks that MockCodeGenerator::Generate() was called in the given + // context (or the generator in test_plugin.cc, which produces the same + // output). That is, this tests if the generator with the given name + // was called with the given parameter and proto file and produced the + // given output file. This is checked by reading the output file and + // checking that it contains the content that MockCodeGenerator would + // generate given these inputs. message_name is the name of the first + // message that appeared in the proto file; this is just to make extra + // sure that the correct file was parsed. + void ExpectGenerated(const string& generator_name, + const string& parameter, + const string& proto_name, + const string& message_name); + void ExpectGenerated(const string& generator_name, + const string& parameter, + const string& proto_name, + const string& message_name, + const string& output_directory); + void ExpectGeneratedWithMultipleInputs(const string& generator_name, + const string& all_proto_names, + const string& proto_name, + const string& message_name); + void ExpectGeneratedWithInsertions(const string& generator_name, + const string& parameter, + const string& insertions, + const string& proto_name, + const string& message_name); + + void ExpectNullCodeGeneratorCalled(const string& parameter); + + void ReadDescriptorSet(const string& filename, + FileDescriptorSet* descriptor_set); + + private: + // The object we are testing. + CommandLineInterface cli_; + + // Was DisallowPlugins() called? + bool disallow_plugins_; + + // We create a directory within TestTempDir() in order to add extra + // protection against accidentally deleting user files (since we recursively + // delete this directory during the test). This is the full path of that + // directory. + string temp_directory_; + + // The result of Run(). + int return_code_; + + // The captured stderr output. + string error_text_; + + // Pointers which need to be deleted later. + vector mock_generators_to_delete_; + + NullCodeGenerator* null_generator_; +}; + +class CommandLineInterfaceTest::NullCodeGenerator : public CodeGenerator { + public: + NullCodeGenerator() : called_(false) {} + ~NullCodeGenerator() {} + + mutable bool called_; + mutable string parameter_; + + // implements CodeGenerator ---------------------------------------- + bool Generate(const FileDescriptor* file, + const string& parameter, + GeneratorContext* context, + string* error) const { + called_ = true; + parameter_ = parameter; + return true; + } +}; + +// =================================================================== + +void CommandLineInterfaceTest::SetUp() { + // Most of these tests were written before this option was added, so we + // run with the option on (which used to be the only way) except in certain + // tests where we turn it off. + cli_.SetInputsAreProtoPathRelative(true); + + temp_directory_ = TestTempDir() + "/proto2_cli_test_temp"; + + // If the temp directory already exists, it must be left over from a + // previous run. Delete it. + if (File::Exists(temp_directory_)) { + File::DeleteRecursively(temp_directory_, NULL, NULL); + } + + // Create the temp directory. + GOOGLE_CHECK(File::CreateDir(temp_directory_.c_str(), DEFAULT_FILE_MODE)); + + // Register generators. + CodeGenerator* generator = new MockCodeGenerator("test_generator"); + mock_generators_to_delete_.push_back(generator); + cli_.RegisterGenerator("--test_out", generator, "Test output."); + cli_.RegisterGenerator("-t", generator, "Test output."); + + generator = new MockCodeGenerator("alt_generator"); + mock_generators_to_delete_.push_back(generator); + cli_.RegisterGenerator("--alt_out", generator, "Alt output."); + + generator = null_generator_ = new NullCodeGenerator(); + mock_generators_to_delete_.push_back(generator); + cli_.RegisterGenerator("--null_out", generator, "Null output."); + + disallow_plugins_ = false; +} + +void CommandLineInterfaceTest::TearDown() { + // Delete the temp directory. + File::DeleteRecursively(temp_directory_, NULL, NULL); + + // Delete all the MockCodeGenerators. + for (int i = 0; i < mock_generators_to_delete_.size(); i++) { + delete mock_generators_to_delete_[i]; + } + mock_generators_to_delete_.clear(); +} + +void CommandLineInterfaceTest::Run(const string& command) { + vector args; + SplitStringUsing(command, " ", &args); + + if (!disallow_plugins_) { + cli_.AllowPlugins("prefix-"); + const char* possible_paths[] = { + // When building with shared libraries, libtool hides the real executable + // in .libs and puts a fake wrapper in the current directory. + // Unfortunately, due to an apparent bug on Cygwin/MinGW, if one program + // wrapped in this way (e.g. protobuf-tests.exe) tries to execute another + // program wrapped in this way (e.g. test_plugin.exe), the latter fails + // with error code 127 and no explanation message. Presumably the problem + // is that the wrapper for protobuf-tests.exe set some environment + // variables that confuse the wrapper for test_plugin.exe. Luckily, it + // turns out that if we simply invoke the wrapped test_plugin.exe + // directly, it works -- I guess the environment variables set by the + // protobuf-tests.exe wrapper happen to be correct for it too. So we do + // that. + ".libs/test_plugin.exe", // Win32 w/autotool (Cygwin / MinGW) + "test_plugin.exe", // Other Win32 (MSVC) + "test_plugin", // Unix + }; + + string plugin_path; + + for (int i = 0; i < GOOGLE_ARRAYSIZE(possible_paths); i++) { + if (access(possible_paths[i], F_OK) == 0) { + plugin_path = possible_paths[i]; + break; + } + } + + if (plugin_path.empty()) { + GOOGLE_LOG(ERROR) + << "Plugin executable not found. Plugin tests are likely to fail."; + } else { + args.push_back("--plugin=prefix-gen-plug=" + plugin_path); + } + } + + scoped_array argv(new const char*[args.size()]); + + for (int i = 0; i < args.size(); i++) { + args[i] = StringReplace(args[i], "$tmpdir", temp_directory_, true); + argv[i] = args[i].c_str(); + } + + CaptureTestStderr(); + + return_code_ = cli_.Run(args.size(), argv.get()); + + error_text_ = GetCapturedTestStderr(); +} + +// ------------------------------------------------------------------- + +void CommandLineInterfaceTest::CreateTempFile( + const string& name, + const string& contents) { + // Create parent directory, if necessary. + string::size_type slash_pos = name.find_last_of('/'); + if (slash_pos != string::npos) { + string dir = name.substr(0, slash_pos); + File::RecursivelyCreateDir(temp_directory_ + "/" + dir, 0777); + } + + // Write file. + string full_name = temp_directory_ + "/" + name; + File::WriteStringToFileOrDie(contents, full_name); +} + +void CommandLineInterfaceTest::CreateTempDir(const string& name) { + File::RecursivelyCreateDir(temp_directory_ + "/" + name, 0777); +} + +// ------------------------------------------------------------------- + +void CommandLineInterfaceTest::ExpectNoErrors() { + EXPECT_EQ(0, return_code_); + EXPECT_EQ("", error_text_); +} + +void CommandLineInterfaceTest::ExpectErrorText(const string& expected_text) { + EXPECT_NE(0, return_code_); + EXPECT_EQ(StringReplace(expected_text, "$tmpdir", temp_directory_, true), + error_text_); +} + +void CommandLineInterfaceTest::ExpectErrorSubstring( + const string& expected_substring) { + EXPECT_NE(0, return_code_); + EXPECT_PRED_FORMAT2(testing::IsSubstring, expected_substring, error_text_); +} + +bool CommandLineInterfaceTest::HasAlternateErrorSubstring( + const string& expected_substring) { + EXPECT_NE(0, return_code_); + return error_text_.find(expected_substring) != string::npos; +} + +void CommandLineInterfaceTest::ExpectGenerated( + const string& generator_name, + const string& parameter, + const string& proto_name, + const string& message_name) { + MockCodeGenerator::ExpectGenerated( + generator_name, parameter, "", proto_name, message_name, proto_name, + temp_directory_); +} + +void CommandLineInterfaceTest::ExpectGenerated( + const string& generator_name, + const string& parameter, + const string& proto_name, + const string& message_name, + const string& output_directory) { + MockCodeGenerator::ExpectGenerated( + generator_name, parameter, "", proto_name, message_name, proto_name, + temp_directory_ + "/" + output_directory); +} + +void CommandLineInterfaceTest::ExpectGeneratedWithMultipleInputs( + const string& generator_name, + const string& all_proto_names, + const string& proto_name, + const string& message_name) { + MockCodeGenerator::ExpectGenerated( + generator_name, "", "", proto_name, message_name, + all_proto_names, + temp_directory_); +} + +void CommandLineInterfaceTest::ExpectGeneratedWithInsertions( + const string& generator_name, + const string& parameter, + const string& insertions, + const string& proto_name, + const string& message_name) { + MockCodeGenerator::ExpectGenerated( + generator_name, parameter, insertions, proto_name, message_name, + proto_name, temp_directory_); +} + +void CommandLineInterfaceTest::ExpectNullCodeGeneratorCalled( + const string& parameter) { + EXPECT_TRUE(null_generator_->called_); + EXPECT_EQ(parameter, null_generator_->parameter_); +} + +void CommandLineInterfaceTest::ReadDescriptorSet( + const string& filename, FileDescriptorSet* descriptor_set) { + string path = temp_directory_ + "/" + filename; + string file_contents; + if (!File::ReadFileToString(path, &file_contents)) { + FAIL() << "File not found: " << path; + } + if (!descriptor_set->ParseFromString(file_contents)) { + FAIL() << "Could not parse file contents: " << path; + } +} + +// =================================================================== + +TEST_F(CommandLineInterfaceTest, BasicOutput) { + // Test that the common case works. + + CreateTempFile("foo.proto", + "syntax = \"proto2\";\n" + "message Foo {}\n"); + + Run("protocol_compiler --test_out=$tmpdir " + "--proto_path=$tmpdir foo.proto"); + + ExpectNoErrors(); + ExpectGenerated("test_generator", "", "foo.proto", "Foo"); +} + +TEST_F(CommandLineInterfaceTest, BasicPlugin) { + // Test that basic plugins work. + + CreateTempFile("foo.proto", + "syntax = \"proto2\";\n" + "message Foo {}\n"); + + Run("protocol_compiler --plug_out=$tmpdir " + "--proto_path=$tmpdir foo.proto"); + + ExpectNoErrors(); + ExpectGenerated("test_plugin", "", "foo.proto", "Foo"); +} + +TEST_F(CommandLineInterfaceTest, GeneratorAndPlugin) { + // Invoke a generator and a plugin at the same time. + + CreateTempFile("foo.proto", + "syntax = \"proto2\";\n" + "message Foo {}\n"); + + Run("protocol_compiler --test_out=$tmpdir --plug_out=$tmpdir " + "--proto_path=$tmpdir foo.proto"); + + ExpectNoErrors(); + ExpectGenerated("test_generator", "", "foo.proto", "Foo"); + ExpectGenerated("test_plugin", "", "foo.proto", "Foo"); +} + +TEST_F(CommandLineInterfaceTest, MultipleInputs) { + // Test parsing multiple input files. + + CreateTempFile("foo.proto", + "syntax = \"proto2\";\n" + "message Foo {}\n"); + CreateTempFile("bar.proto", + "syntax = \"proto2\";\n" + "message Bar {}\n"); + + Run("protocol_compiler --test_out=$tmpdir --plug_out=$tmpdir " + "--proto_path=$tmpdir foo.proto bar.proto"); + + ExpectNoErrors(); + ExpectGeneratedWithMultipleInputs("test_generator", "foo.proto,bar.proto", + "foo.proto", "Foo"); + ExpectGeneratedWithMultipleInputs("test_generator", "foo.proto,bar.proto", + "bar.proto", "Bar"); + ExpectGeneratedWithMultipleInputs("test_plugin", "foo.proto,bar.proto", + "foo.proto", "Foo"); + ExpectGeneratedWithMultipleInputs("test_plugin", "foo.proto,bar.proto", + "bar.proto", "Bar"); +} + +TEST_F(CommandLineInterfaceTest, MultipleInputsWithImport) { + // Test parsing multiple input files with an import of a separate file. + + CreateTempFile("foo.proto", + "syntax = \"proto2\";\n" + "message Foo {}\n"); + CreateTempFile("bar.proto", + "syntax = \"proto2\";\n" + "import \"baz.proto\";\n" + "message Bar {\n" + " optional Baz a = 1;\n" + "}\n"); + CreateTempFile("baz.proto", + "syntax = \"proto2\";\n" + "message Baz {}\n"); + + Run("protocol_compiler --test_out=$tmpdir --plug_out=$tmpdir " + "--proto_path=$tmpdir foo.proto bar.proto"); + + ExpectNoErrors(); + ExpectGeneratedWithMultipleInputs("test_generator", "foo.proto,bar.proto", + "foo.proto", "Foo"); + ExpectGeneratedWithMultipleInputs("test_generator", "foo.proto,bar.proto", + "bar.proto", "Bar"); + ExpectGeneratedWithMultipleInputs("test_plugin", "foo.proto,bar.proto", + "foo.proto", "Foo"); + ExpectGeneratedWithMultipleInputs("test_plugin", "foo.proto,bar.proto", + "bar.proto", "Bar"); +} + +TEST_F(CommandLineInterfaceTest, CreateDirectory) { + // Test that when we output to a sub-directory, it is created. + + CreateTempFile("bar/baz/foo.proto", + "syntax = \"proto2\";\n" + "message Foo {}\n"); + CreateTempDir("out"); + CreateTempDir("plugout"); + + Run("protocol_compiler --test_out=$tmpdir/out --plug_out=$tmpdir/plugout " + "--proto_path=$tmpdir bar/baz/foo.proto"); + + ExpectNoErrors(); + ExpectGenerated("test_generator", "", "bar/baz/foo.proto", "Foo", "out"); + ExpectGenerated("test_plugin", "", "bar/baz/foo.proto", "Foo", "plugout"); +} + +TEST_F(CommandLineInterfaceTest, GeneratorParameters) { + // Test that generator parameters are correctly parsed from the command line. + + CreateTempFile("foo.proto", + "syntax = \"proto2\";\n" + "message Foo {}\n"); + + Run("protocol_compiler --test_out=TestParameter:$tmpdir " + "--plug_out=TestPluginParameter:$tmpdir " + "--proto_path=$tmpdir foo.proto"); + + ExpectNoErrors(); + ExpectGenerated("test_generator", "TestParameter", "foo.proto", "Foo"); + ExpectGenerated("test_plugin", "TestPluginParameter", "foo.proto", "Foo"); +} + +TEST_F(CommandLineInterfaceTest, Insert) { + // Test running a generator that inserts code into another's output. + + CreateTempFile("foo.proto", + "syntax = \"proto2\";\n" + "message Foo {}\n"); + + Run("protocol_compiler " + "--test_out=TestParameter:$tmpdir " + "--plug_out=TestPluginParameter:$tmpdir " + "--test_out=insert=test_generator,test_plugin:$tmpdir " + "--plug_out=insert=test_generator,test_plugin:$tmpdir " + "--proto_path=$tmpdir foo.proto"); + + ExpectNoErrors(); + ExpectGeneratedWithInsertions( + "test_generator", "TestParameter", "test_generator,test_plugin", + "foo.proto", "Foo"); + ExpectGeneratedWithInsertions( + "test_plugin", "TestPluginParameter", "test_generator,test_plugin", + "foo.proto", "Foo"); +} + +#if defined(_WIN32) + +TEST_F(CommandLineInterfaceTest, WindowsOutputPath) { + // Test that the output path can be a Windows-style path. + + CreateTempFile("foo.proto", + "syntax = \"proto2\";\n"); + + Run("protocol_compiler --null_out=C:\\ " + "--proto_path=$tmpdir foo.proto"); + + ExpectNoErrors(); + ExpectNullCodeGeneratorCalled(""); +} + +TEST_F(CommandLineInterfaceTest, WindowsOutputPathAndParameter) { + // Test that we can have a windows-style output path and a parameter. + + CreateTempFile("foo.proto", + "syntax = \"proto2\";\n"); + + Run("protocol_compiler --null_out=bar:C:\\ " + "--proto_path=$tmpdir foo.proto"); + + ExpectNoErrors(); + ExpectNullCodeGeneratorCalled("bar"); +} + +TEST_F(CommandLineInterfaceTest, TrailingBackslash) { + // Test that the directories can end in backslashes. Some users claim this + // doesn't work on their system. + + CreateTempFile("foo.proto", + "syntax = \"proto2\";\n" + "message Foo {}\n"); + + Run("protocol_compiler --test_out=$tmpdir\\ " + "--proto_path=$tmpdir\\ foo.proto"); + + ExpectNoErrors(); + ExpectGenerated("test_generator", "", "foo.proto", "Foo"); +} + +#endif // defined(_WIN32) || defined(__CYGWIN__) + +TEST_F(CommandLineInterfaceTest, PathLookup) { + // Test that specifying multiple directories in the proto search path works. + + CreateTempFile("b/bar.proto", + "syntax = \"proto2\";\n" + "message Bar {}\n"); + CreateTempFile("a/foo.proto", + "syntax = \"proto2\";\n" + "import \"bar.proto\";\n" + "message Foo {\n" + " optional Bar a = 1;\n" + "}\n"); + CreateTempFile("b/foo.proto", "this should not be parsed\n"); + + Run("protocol_compiler --test_out=$tmpdir " + "--proto_path=$tmpdir/a --proto_path=$tmpdir/b foo.proto"); + + ExpectNoErrors(); + ExpectGenerated("test_generator", "", "foo.proto", "Foo"); +} + +TEST_F(CommandLineInterfaceTest, ColonDelimitedPath) { + // Same as PathLookup, but we provide the proto_path in a single flag. + + CreateTempFile("b/bar.proto", + "syntax = \"proto2\";\n" + "message Bar {}\n"); + CreateTempFile("a/foo.proto", + "syntax = \"proto2\";\n" + "import \"bar.proto\";\n" + "message Foo {\n" + " optional Bar a = 1;\n" + "}\n"); + CreateTempFile("b/foo.proto", "this should not be parsed\n"); + +#undef PATH_SEPARATOR +#if defined(_WIN32) +#define PATH_SEPARATOR ";" +#else +#define PATH_SEPARATOR ":" +#endif + + Run("protocol_compiler --test_out=$tmpdir " + "--proto_path=$tmpdir/a"PATH_SEPARATOR"$tmpdir/b foo.proto"); + +#undef PATH_SEPARATOR + + ExpectNoErrors(); + ExpectGenerated("test_generator", "", "foo.proto", "Foo"); +} + +TEST_F(CommandLineInterfaceTest, NonRootMapping) { + // Test setting up a search path mapping a directory to a non-root location. + + CreateTempFile("foo.proto", + "syntax = \"proto2\";\n" + "message Foo {}\n"); + + Run("protocol_compiler --test_out=$tmpdir " + "--proto_path=bar=$tmpdir bar/foo.proto"); + + ExpectNoErrors(); + ExpectGenerated("test_generator", "", "bar/foo.proto", "Foo"); +} + +TEST_F(CommandLineInterfaceTest, MultipleGenerators) { + // Test that we can have multiple generators and use both in one invocation, + // each with a different output directory. + + CreateTempFile("foo.proto", + "syntax = \"proto2\";\n" + "message Foo {}\n"); + // Create the "a" and "b" sub-directories. + CreateTempDir("a"); + CreateTempDir("b"); + + Run("protocol_compiler " + "--test_out=$tmpdir/a " + "--alt_out=$tmpdir/b " + "--proto_path=$tmpdir foo.proto"); + + ExpectNoErrors(); + ExpectGenerated("test_generator", "", "foo.proto", "Foo", "a"); + ExpectGenerated("alt_generator", "", "foo.proto", "Foo", "b"); +} + +TEST_F(CommandLineInterfaceTest, DisallowServicesNoServices) { + // Test that --disallow_services doesn't cause a problem when there are no + // services. + + CreateTempFile("foo.proto", + "syntax = \"proto2\";\n" + "message Foo {}\n"); + + Run("protocol_compiler --disallow_services --test_out=$tmpdir " + "--proto_path=$tmpdir foo.proto"); + + ExpectNoErrors(); + ExpectGenerated("test_generator", "", "foo.proto", "Foo"); +} + +TEST_F(CommandLineInterfaceTest, DisallowServicesHasService) { + // Test that --disallow_services produces an error when there are services. + + CreateTempFile("foo.proto", + "syntax = \"proto2\";\n" + "message Foo {}\n" + "service Bar {}\n"); + + Run("protocol_compiler --disallow_services --test_out=$tmpdir " + "--proto_path=$tmpdir foo.proto"); + + ExpectErrorSubstring("foo.proto: This file contains services"); +} + +TEST_F(CommandLineInterfaceTest, AllowServicesHasService) { + // Test that services work fine as long as --disallow_services is not used. + + CreateTempFile("foo.proto", + "syntax = \"proto2\";\n" + "message Foo {}\n" + "service Bar {}\n"); + + Run("protocol_compiler --test_out=$tmpdir " + "--proto_path=$tmpdir foo.proto"); + + ExpectNoErrors(); + ExpectGenerated("test_generator", "", "foo.proto", "Foo"); +} + +TEST_F(CommandLineInterfaceTest, CwdRelativeInputs) { + // Test that we can accept working-directory-relative input files. + + SetInputsAreProtoPathRelative(false); + + CreateTempFile("foo.proto", + "syntax = \"proto2\";\n" + "message Foo {}\n"); + + Run("protocol_compiler --test_out=$tmpdir " + "--proto_path=$tmpdir $tmpdir/foo.proto"); + + ExpectNoErrors(); + ExpectGenerated("test_generator", "", "foo.proto", "Foo"); +} + +TEST_F(CommandLineInterfaceTest, WriteDescriptorSet) { + CreateTempFile("foo.proto", + "syntax = \"proto2\";\n" + "message Foo {}\n"); + CreateTempFile("bar.proto", + "syntax = \"proto2\";\n" + "import \"foo.proto\";\n" + "message Bar {\n" + " optional Foo foo = 1;\n" + "}\n"); + + Run("protocol_compiler --descriptor_set_out=$tmpdir/descriptor_set " + "--proto_path=$tmpdir bar.proto"); + + ExpectNoErrors(); + + FileDescriptorSet descriptor_set; + ReadDescriptorSet("descriptor_set", &descriptor_set); + if (HasFatalFailure()) return; + ASSERT_EQ(1, descriptor_set.file_size()); + EXPECT_EQ("bar.proto", descriptor_set.file(0).name()); +} + +TEST_F(CommandLineInterfaceTest, WriteTransitiveDescriptorSet) { + CreateTempFile("foo.proto", + "syntax = \"proto2\";\n" + "message Foo {}\n"); + CreateTempFile("bar.proto", + "syntax = \"proto2\";\n" + "import \"foo.proto\";\n" + "message Bar {\n" + " optional Foo foo = 1;\n" + "}\n"); + + Run("protocol_compiler --descriptor_set_out=$tmpdir/descriptor_set " + "--include_imports --proto_path=$tmpdir bar.proto"); + + ExpectNoErrors(); + + FileDescriptorSet descriptor_set; + ReadDescriptorSet("descriptor_set", &descriptor_set); + if (HasFatalFailure()) return; + ASSERT_EQ(2, descriptor_set.file_size()); + if (descriptor_set.file(0).name() == "bar.proto") { + std::swap(descriptor_set.mutable_file()->mutable_data()[0], + descriptor_set.mutable_file()->mutable_data()[1]); + } + EXPECT_EQ("foo.proto", descriptor_set.file(0).name()); + EXPECT_EQ("bar.proto", descriptor_set.file(1).name()); +} + +// ------------------------------------------------------------------- + +TEST_F(CommandLineInterfaceTest, ParseErrors) { + // Test that parse errors are reported. + + CreateTempFile("foo.proto", + "syntax = \"proto2\";\n" + "badsyntax\n"); + + Run("protocol_compiler --test_out=$tmpdir " + "--proto_path=$tmpdir foo.proto"); + + ExpectErrorText( + "foo.proto:2:1: Expected top-level statement (e.g. \"message\").\n"); +} + +TEST_F(CommandLineInterfaceTest, ParseErrorsMultipleFiles) { + // Test that parse errors are reported from multiple files. + + // We set up files such that foo.proto actually depends on bar.proto in + // two ways: Directly and through baz.proto. bar.proto's errors should + // only be reported once. + CreateTempFile("bar.proto", + "syntax = \"proto2\";\n" + "badsyntax\n"); + CreateTempFile("baz.proto", + "syntax = \"proto2\";\n" + "import \"bar.proto\";\n"); + CreateTempFile("foo.proto", + "syntax = \"proto2\";\n" + "import \"bar.proto\";\n" + "import \"baz.proto\";\n"); + + Run("protocol_compiler --test_out=$tmpdir " + "--proto_path=$tmpdir foo.proto"); + + ExpectErrorText( + "bar.proto:2:1: Expected top-level statement (e.g. \"message\").\n" + "baz.proto: Import \"bar.proto\" was not found or had errors.\n" + "foo.proto: Import \"bar.proto\" was not found or had errors.\n" + "foo.proto: Import \"baz.proto\" was not found or had errors.\n"); +} + +TEST_F(CommandLineInterfaceTest, InputNotFoundError) { + // Test what happens if the input file is not found. + + Run("protocol_compiler --test_out=$tmpdir " + "--proto_path=$tmpdir foo.proto"); + + ExpectErrorText( + "foo.proto: File not found.\n"); +} + +TEST_F(CommandLineInterfaceTest, CwdRelativeInputNotFoundError) { + // Test what happens when a working-directory-relative input file is not + // found. + + SetInputsAreProtoPathRelative(false); + + Run("protocol_compiler --test_out=$tmpdir " + "--proto_path=$tmpdir $tmpdir/foo.proto"); + + ExpectErrorText( + "$tmpdir/foo.proto: No such file or directory\n"); +} + +TEST_F(CommandLineInterfaceTest, CwdRelativeInputNotMappedError) { + // Test what happens when a working-directory-relative input file is not + // mapped to a virtual path. + + SetInputsAreProtoPathRelative(false); + + CreateTempFile("foo.proto", + "syntax = \"proto2\";\n" + "message Foo {}\n"); + + // Create a directory called "bar" so that we can point --proto_path at it. + CreateTempFile("bar/dummy", ""); + + Run("protocol_compiler --test_out=$tmpdir " + "--proto_path=$tmpdir/bar $tmpdir/foo.proto"); + + ExpectErrorText( + "$tmpdir/foo.proto: File does not reside within any path " + "specified using --proto_path (or -I). You must specify a " + "--proto_path which encompasses this file. Note that the " + "proto_path must be an exact prefix of the .proto file " + "names -- protoc is too dumb to figure out when two paths " + "(e.g. absolute and relative) are equivalent (it's harder " + "than you think).\n"); +} + +TEST_F(CommandLineInterfaceTest, CwdRelativeInputNotFoundAndNotMappedError) { + // Check what happens if the input file is not found *and* is not mapped + // in the proto_path. + + SetInputsAreProtoPathRelative(false); + + // Create a directory called "bar" so that we can point --proto_path at it. + CreateTempFile("bar/dummy", ""); + + Run("protocol_compiler --test_out=$tmpdir " + "--proto_path=$tmpdir/bar $tmpdir/foo.proto"); + + ExpectErrorText( + "$tmpdir/foo.proto: No such file or directory\n"); +} + +TEST_F(CommandLineInterfaceTest, CwdRelativeInputShadowedError) { + // Test what happens when a working-directory-relative input file is shadowed + // by another file in the virtual path. + + SetInputsAreProtoPathRelative(false); + + CreateTempFile("foo/foo.proto", + "syntax = \"proto2\";\n" + "message Foo {}\n"); + CreateTempFile("bar/foo.proto", + "syntax = \"proto2\";\n" + "message Bar {}\n"); + + Run("protocol_compiler --test_out=$tmpdir " + "--proto_path=$tmpdir/foo --proto_path=$tmpdir/bar " + "$tmpdir/bar/foo.proto"); + + ExpectErrorText( + "$tmpdir/bar/foo.proto: Input is shadowed in the --proto_path " + "by \"$tmpdir/foo/foo.proto\". Either use the latter " + "file as your input or reorder the --proto_path so that the " + "former file's location comes first.\n"); +} + +TEST_F(CommandLineInterfaceTest, ProtoPathNotFoundError) { + // Test what happens if the input file is not found. + + Run("protocol_compiler --test_out=$tmpdir " + "--proto_path=$tmpdir/foo foo.proto"); + + ExpectErrorText( + "$tmpdir/foo: warning: directory does not exist.\n" + "foo.proto: File not found.\n"); +} + +TEST_F(CommandLineInterfaceTest, MissingInputError) { + // Test that we get an error if no inputs are given. + + Run("protocol_compiler --test_out=$tmpdir " + "--proto_path=$tmpdir"); + + ExpectErrorText("Missing input file.\n"); +} + +TEST_F(CommandLineInterfaceTest, MissingOutputError) { + CreateTempFile("foo.proto", + "syntax = \"proto2\";\n" + "message Foo {}\n"); + + Run("protocol_compiler --proto_path=$tmpdir foo.proto"); + + ExpectErrorText("Missing output directives.\n"); +} + +TEST_F(CommandLineInterfaceTest, OutputWriteError) { + CreateTempFile("foo.proto", + "syntax = \"proto2\";\n" + "message Foo {}\n"); + + string output_file = + MockCodeGenerator::GetOutputFileName("test_generator", "foo.proto"); + + // Create a directory blocking our output location. + CreateTempDir(output_file); + + Run("protocol_compiler --test_out=$tmpdir " + "--proto_path=$tmpdir foo.proto"); + + // MockCodeGenerator no longer detects an error because we actually write to + // an in-memory location first, then dump to disk at the end. This is no + // big deal. + // ExpectErrorSubstring("MockCodeGenerator detected write error."); + +#if defined(_WIN32) && !defined(__CYGWIN__) + // Windows with MSVCRT.dll produces EPERM instead of EISDIR. + if (HasAlternateErrorSubstring(output_file + ": Permission denied")) { + return; + } +#endif + + ExpectErrorSubstring(output_file + ": Is a directory"); +} + +TEST_F(CommandLineInterfaceTest, PluginOutputWriteError) { + CreateTempFile("foo.proto", + "syntax = \"proto2\";\n" + "message Foo {}\n"); + + string output_file = + MockCodeGenerator::GetOutputFileName("test_plugin", "foo.proto"); + + // Create a directory blocking our output location. + CreateTempDir(output_file); + + Run("protocol_compiler --plug_out=$tmpdir " + "--proto_path=$tmpdir foo.proto"); + +#if defined(_WIN32) && !defined(__CYGWIN__) + // Windows with MSVCRT.dll produces EPERM instead of EISDIR. + if (HasAlternateErrorSubstring(output_file + ": Permission denied")) { + return; + } +#endif + + ExpectErrorSubstring(output_file + ": Is a directory"); +} + +TEST_F(CommandLineInterfaceTest, OutputDirectoryNotFoundError) { + CreateTempFile("foo.proto", + "syntax = \"proto2\";\n" + "message Foo {}\n"); + + Run("protocol_compiler --test_out=$tmpdir/nosuchdir " + "--proto_path=$tmpdir foo.proto"); + + ExpectErrorSubstring("nosuchdir/: No such file or directory"); +} + +TEST_F(CommandLineInterfaceTest, PluginOutputDirectoryNotFoundError) { + CreateTempFile("foo.proto", + "syntax = \"proto2\";\n" + "message Foo {}\n"); + + Run("protocol_compiler --plug_out=$tmpdir/nosuchdir " + "--proto_path=$tmpdir foo.proto"); + + ExpectErrorSubstring("nosuchdir/: No such file or directory"); +} + +TEST_F(CommandLineInterfaceTest, OutputDirectoryIsFileError) { + CreateTempFile("foo.proto", + "syntax = \"proto2\";\n" + "message Foo {}\n"); + + Run("protocol_compiler --test_out=$tmpdir/foo.proto " + "--proto_path=$tmpdir foo.proto"); + +#if defined(_WIN32) && !defined(__CYGWIN__) + // Windows with MSVCRT.dll produces EINVAL instead of ENOTDIR. + if (HasAlternateErrorSubstring("foo.proto/: Invalid argument")) { + return; + } +#endif + + ExpectErrorSubstring("foo.proto/: Not a directory"); +} + +TEST_F(CommandLineInterfaceTest, GeneratorError) { + CreateTempFile("foo.proto", + "syntax = \"proto2\";\n" + "message MockCodeGenerator_Error {}\n"); + + Run("protocol_compiler --test_out=$tmpdir " + "--proto_path=$tmpdir foo.proto"); + + ExpectErrorSubstring( + "--test_out: foo.proto: Saw message type MockCodeGenerator_Error."); +} + +TEST_F(CommandLineInterfaceTest, GeneratorPluginError) { + // Test a generator plugin that returns an error. + + CreateTempFile("foo.proto", + "syntax = \"proto2\";\n" + "message MockCodeGenerator_Error {}\n"); + + Run("protocol_compiler --plug_out=TestParameter:$tmpdir " + "--proto_path=$tmpdir foo.proto"); + + ExpectErrorSubstring( + "--plug_out: foo.proto: Saw message type MockCodeGenerator_Error."); +} + +TEST_F(CommandLineInterfaceTest, GeneratorPluginFail) { + // Test a generator plugin that exits with an error code. + + CreateTempFile("foo.proto", + "syntax = \"proto2\";\n" + "message MockCodeGenerator_Exit {}\n"); + + Run("protocol_compiler --plug_out=TestParameter:$tmpdir " + "--proto_path=$tmpdir foo.proto"); + + ExpectErrorSubstring("Saw message type MockCodeGenerator_Exit."); + ExpectErrorSubstring( + "--plug_out: prefix-gen-plug: Plugin failed with status code 123."); +} + +TEST_F(CommandLineInterfaceTest, GeneratorPluginCrash) { + // Test a generator plugin that crashes. + + CreateTempFile("foo.proto", + "syntax = \"proto2\";\n" + "message MockCodeGenerator_Abort {}\n"); + + Run("protocol_compiler --plug_out=TestParameter:$tmpdir " + "--proto_path=$tmpdir foo.proto"); + + ExpectErrorSubstring("Saw message type MockCodeGenerator_Abort."); + +#ifdef _WIN32 + // Windows doesn't have signals. It looks like abort()ing causes the process + // to exit with status code 3, but let's not depend on the exact number here. + ExpectErrorSubstring( + "--plug_out: prefix-gen-plug: Plugin failed with status code"); +#else + // Don't depend on the exact signal number. + ExpectErrorSubstring( + "--plug_out: prefix-gen-plug: Plugin killed by signal"); +#endif +} + +TEST_F(CommandLineInterfaceTest, GeneratorPluginNotFound) { + // Test what happens if the plugin isn't found. + + CreateTempFile("error.proto", + "syntax = \"proto2\";\n" + "message Foo {}\n"); + + Run("protocol_compiler --badplug_out=TestParameter:$tmpdir " + "--plugin=prefix-gen-badplug=no_such_file " + "--proto_path=$tmpdir error.proto"); + +#ifdef _WIN32 + ExpectErrorSubstring("--badplug_out: prefix-gen-badplug: " + + Subprocess::Win32ErrorMessage(ERROR_FILE_NOT_FOUND)); +#else + // Error written to stdout by child process after exec() fails. + ExpectErrorSubstring( + "no_such_file: program not found or is not executable"); + + // Error written by parent process when child fails. + ExpectErrorSubstring( + "--badplug_out: prefix-gen-badplug: Plugin failed with status code 1."); +#endif +} + +TEST_F(CommandLineInterfaceTest, GeneratorPluginNotAllowed) { + // Test what happens if plugins aren't allowed. + + CreateTempFile("error.proto", + "syntax = \"proto2\";\n" + "message Foo {}\n"); + + DisallowPlugins(); + Run("protocol_compiler --plug_out=TestParameter:$tmpdir " + "--proto_path=$tmpdir error.proto"); + + ExpectErrorSubstring("Unknown flag: --plug_out"); +} + +TEST_F(CommandLineInterfaceTest, HelpText) { + Run("test_exec_name --help"); + + ExpectErrorSubstring("Usage: test_exec_name "); + ExpectErrorSubstring("--test_out=OUT_DIR"); + ExpectErrorSubstring("Test output."); + ExpectErrorSubstring("--alt_out=OUT_DIR"); + ExpectErrorSubstring("Alt output."); +} + +TEST_F(CommandLineInterfaceTest, GccFormatErrors) { + // Test --error_format=gcc (which is the default, but we want to verify + // that it can be set explicitly). + + CreateTempFile("foo.proto", + "syntax = \"proto2\";\n" + "badsyntax\n"); + + Run("protocol_compiler --test_out=$tmpdir " + "--proto_path=$tmpdir --error_format=gcc foo.proto"); + + ExpectErrorText( + "foo.proto:2:1: Expected top-level statement (e.g. \"message\").\n"); +} + +TEST_F(CommandLineInterfaceTest, MsvsFormatErrors) { + // Test --error_format=msvs + + CreateTempFile("foo.proto", + "syntax = \"proto2\";\n" + "badsyntax\n"); + + Run("protocol_compiler --test_out=$tmpdir " + "--proto_path=$tmpdir --error_format=msvs foo.proto"); + + ExpectErrorText( + "$tmpdir/foo.proto(2) : error in column=1: Expected top-level statement " + "(e.g. \"message\").\n"); +} + +TEST_F(CommandLineInterfaceTest, InvalidErrorFormat) { + // Test --error_format=msvs + + CreateTempFile("foo.proto", + "syntax = \"proto2\";\n" + "badsyntax\n"); + + Run("protocol_compiler --test_out=$tmpdir " + "--proto_path=$tmpdir --error_format=invalid foo.proto"); + + ExpectErrorText( + "Unknown error format: invalid\n"); +} + +// ------------------------------------------------------------------- +// Flag parsing tests + +TEST_F(CommandLineInterfaceTest, ParseSingleCharacterFlag) { + // Test that a single-character flag works. + + CreateTempFile("foo.proto", + "syntax = \"proto2\";\n" + "message Foo {}\n"); + + Run("protocol_compiler -t$tmpdir " + "--proto_path=$tmpdir foo.proto"); + + ExpectNoErrors(); + ExpectGenerated("test_generator", "", "foo.proto", "Foo"); +} + +TEST_F(CommandLineInterfaceTest, ParseSpaceDelimitedValue) { + // Test that separating the flag value with a space works. + + CreateTempFile("foo.proto", + "syntax = \"proto2\";\n" + "message Foo {}\n"); + + Run("protocol_compiler --test_out $tmpdir " + "--proto_path=$tmpdir foo.proto"); + + ExpectNoErrors(); + ExpectGenerated("test_generator", "", "foo.proto", "Foo"); +} + +TEST_F(CommandLineInterfaceTest, ParseSingleCharacterSpaceDelimitedValue) { + // Test that separating the flag value with a space works for + // single-character flags. + + CreateTempFile("foo.proto", + "syntax = \"proto2\";\n" + "message Foo {}\n"); + + Run("protocol_compiler -t $tmpdir " + "--proto_path=$tmpdir foo.proto"); + + ExpectNoErrors(); + ExpectGenerated("test_generator", "", "foo.proto", "Foo"); +} + +TEST_F(CommandLineInterfaceTest, MissingValueError) { + // Test that we get an error if a flag is missing its value. + + Run("protocol_compiler --test_out --proto_path=$tmpdir foo.proto"); + + ExpectErrorText("Missing value for flag: --test_out\n"); +} + +TEST_F(CommandLineInterfaceTest, MissingValueAtEndError) { + // Test that we get an error if the last argument is a flag requiring a + // value. + + Run("protocol_compiler --test_out"); + + ExpectErrorText("Missing value for flag: --test_out\n"); +} + +// =================================================================== + +// Test for --encode and --decode. Note that it would be easier to do this +// test as a shell script, but we'd like to be able to run the test on +// platforms that don't have a Bourne-compatible shell available (especially +// Windows/MSVC). +class EncodeDecodeTest : public testing::Test { + protected: + virtual void SetUp() { + duped_stdin_ = dup(STDIN_FILENO); + } + + virtual void TearDown() { + dup2(duped_stdin_, STDIN_FILENO); + close(duped_stdin_); + } + + void RedirectStdinFromText(const string& input) { + string filename = TestTempDir() + "/test_stdin"; + File::WriteStringToFileOrDie(input, filename); + GOOGLE_CHECK(RedirectStdinFromFile(filename)); + } + + bool RedirectStdinFromFile(const string& filename) { + int fd = open(filename.c_str(), O_RDONLY); + if (fd < 0) return false; + dup2(fd, STDIN_FILENO); + close(fd); + return true; + } + + // Remove '\r' characters from text. + string StripCR(const string& text) { + string result; + + for (int i = 0; i < text.size(); i++) { + if (text[i] != '\r') { + result.push_back(text[i]); + } + } + + return result; + } + + enum Type { TEXT, BINARY }; + enum ReturnCode { SUCCESS, ERROR }; + + bool Run(const string& command) { + vector args; + args.push_back("protoc"); + SplitStringUsing(command, " ", &args); + args.push_back("--proto_path=" + TestSourceDir()); + + scoped_array argv(new const char*[args.size()]); + for (int i = 0; i < args.size(); i++) { + argv[i] = args[i].c_str(); + } + + CommandLineInterface cli; + cli.SetInputsAreProtoPathRelative(true); + + CaptureTestStdout(); + CaptureTestStderr(); + + int result = cli.Run(args.size(), argv.get()); + + captured_stdout_ = GetCapturedTestStdout(); + captured_stderr_ = GetCapturedTestStderr(); + + return result == 0; + } + + void ExpectStdoutMatchesBinaryFile(const string& filename) { + string expected_output; + ASSERT_TRUE(File::ReadFileToString(filename, &expected_output)); + + // Don't use EXPECT_EQ because we don't want to print raw binary data to + // stdout on failure. + EXPECT_TRUE(captured_stdout_ == expected_output); + } + + void ExpectStdoutMatchesTextFile(const string& filename) { + string expected_output; + ASSERT_TRUE(File::ReadFileToString(filename, &expected_output)); + + ExpectStdoutMatchesText(expected_output); + } + + void ExpectStdoutMatchesText(const string& expected_text) { + EXPECT_EQ(StripCR(expected_text), StripCR(captured_stdout_)); + } + + void ExpectStderrMatchesText(const string& expected_text) { + EXPECT_EQ(StripCR(expected_text), StripCR(captured_stderr_)); + } + + private: + int duped_stdin_; + string captured_stdout_; + string captured_stderr_; +}; + +TEST_F(EncodeDecodeTest, Encode) { + RedirectStdinFromFile(TestSourceDir() + + "/google/protobuf/testdata/text_format_unittest_data.txt"); + EXPECT_TRUE(Run("google/protobuf/unittest.proto " + "--encode=protobuf_unittest.TestAllTypes")); + ExpectStdoutMatchesBinaryFile(TestSourceDir() + + "/google/protobuf/testdata/golden_message"); + ExpectStderrMatchesText(""); +} + +TEST_F(EncodeDecodeTest, Decode) { + RedirectStdinFromFile(TestSourceDir() + + "/google/protobuf/testdata/golden_message"); + EXPECT_TRUE(Run("google/protobuf/unittest.proto " + "--decode=protobuf_unittest.TestAllTypes")); + ExpectStdoutMatchesTextFile(TestSourceDir() + + "/google/protobuf/testdata/text_format_unittest_data.txt"); + ExpectStderrMatchesText(""); +} + +TEST_F(EncodeDecodeTest, Partial) { + RedirectStdinFromText(""); + EXPECT_TRUE(Run("google/protobuf/unittest.proto " + "--encode=protobuf_unittest.TestRequired")); + ExpectStdoutMatchesText(""); + ExpectStderrMatchesText( + "warning: Input message is missing required fields: a, b, c\n"); +} + +TEST_F(EncodeDecodeTest, DecodeRaw) { + protobuf_unittest::TestAllTypes message; + message.set_optional_int32(123); + message.set_optional_string("foo"); + string data; + message.SerializeToString(&data); + + RedirectStdinFromText(data); + EXPECT_TRUE(Run("--decode_raw")); + ExpectStdoutMatchesText("1: 123\n" + "14: \"foo\"\n"); + ExpectStderrMatchesText(""); +} + +TEST_F(EncodeDecodeTest, UnknownType) { + EXPECT_FALSE(Run("google/protobuf/unittest.proto " + "--encode=NoSuchType")); + ExpectStdoutMatchesText(""); + ExpectStderrMatchesText("Type not defined: NoSuchType\n"); +} + +TEST_F(EncodeDecodeTest, ProtoParseError) { + EXPECT_FALSE(Run("google/protobuf/no_such_file.proto " + "--encode=NoSuchType")); + ExpectStdoutMatchesText(""); + ExpectStderrMatchesText( + "google/protobuf/no_such_file.proto: File not found.\n"); +} + +} // anonymous namespace + +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_bootstrap_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_bootstrap_unittest.cc new file mode 100644 index 0000000000..bcfa502073 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_bootstrap_unittest.cc @@ -0,0 +1,158 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// This test insures that google/protobuf/descriptor.pb.{h,cc} match exactly +// what would be generated by the protocol compiler. These files are not +// generated automatically at build time because they are compiled into the +// protocol compiler itself. So, if they were auto-generated, you'd have a +// chicken-and-egg problem. +// +// If this test fails, run the script +// "generate_descriptor_proto.sh" and add +// descriptor.pb.{h,cc} to your changelist. + +#include + +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace cpp { + +namespace { + +class MockErrorCollector : public MultiFileErrorCollector { + public: + MockErrorCollector() {} + ~MockErrorCollector() {} + + string text_; + + // implements ErrorCollector --------------------------------------- + void AddError(const string& filename, int line, int column, + const string& message) { + strings::SubstituteAndAppend(&text_, "$0:$1:$2: $3\n", + filename, line, column, message); + } +}; + +class MockGeneratorContext : public GeneratorContext { + public: + MockGeneratorContext() {} + ~MockGeneratorContext() { + STLDeleteValues(&files_); + } + + void ExpectFileMatches(const string& virtual_filename, + const string& physical_filename) { + string* expected_contents = FindPtrOrNull(files_, virtual_filename); + ASSERT_TRUE(expected_contents != NULL) + << "Generator failed to generate file: " << virtual_filename; + + string actual_contents; + File::ReadFileToStringOrDie( + TestSourceDir() + "/" + physical_filename, + &actual_contents); + EXPECT_TRUE(actual_contents == *expected_contents) + << physical_filename << " needs to be regenerated. Please run " + "generate_descriptor_proto.sh and add this file " + "to your CL."; + } + + // implements GeneratorContext -------------------------------------- + + virtual io::ZeroCopyOutputStream* Open(const string& filename) { + string** map_slot = &files_[filename]; + if (*map_slot != NULL) delete *map_slot; + *map_slot = new string; + + return new io::StringOutputStream(*map_slot); + } + + private: + map files_; +}; + +TEST(BootstrapTest, GeneratedDescriptorMatches) { + MockErrorCollector error_collector; + DiskSourceTree source_tree; + source_tree.MapPath("", TestSourceDir()); + Importer importer(&source_tree, &error_collector); + const FileDescriptor* proto_file = + importer.Import("google/protobuf/descriptor.proto"); + const FileDescriptor* plugin_proto_file = + importer.Import("google/protobuf/compiler/plugin.proto"); + EXPECT_EQ("", error_collector.text_); + ASSERT_TRUE(proto_file != NULL); + ASSERT_TRUE(plugin_proto_file != NULL); + + CppGenerator generator; + MockGeneratorContext context; + string error; + string parameter; + parameter = "dllexport_decl=LIBPROTOBUF_EXPORT"; + ASSERT_TRUE(generator.Generate(proto_file, parameter, + &context, &error)); + parameter = "dllexport_decl=LIBPROTOC_EXPORT"; + ASSERT_TRUE(generator.Generate(plugin_proto_file, parameter, + &context, &error)); + + context.ExpectFileMatches("google/protobuf/descriptor.pb.h", + "google/protobuf/descriptor.pb.h"); + context.ExpectFileMatches("google/protobuf/descriptor.pb.cc", + "google/protobuf/descriptor.pb.cc"); + context.ExpectFileMatches("google/protobuf/compiler/plugin.pb.h", + "google/protobuf/compiler/plugin.pb.h"); + context.ExpectFileMatches("google/protobuf/compiler/plugin.pb.cc", + "google/protobuf/compiler/plugin.pb.cc"); +} + +} // namespace + +} // namespace cpp +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_enum.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_enum.cc new file mode 100644 index 0000000000..76d2b79853 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_enum.cc @@ -0,0 +1,258 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include + +#include +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace cpp { + +EnumGenerator::EnumGenerator(const EnumDescriptor* descriptor, + const string& dllexport_decl) + : descriptor_(descriptor), + classname_(ClassName(descriptor, false)), + dllexport_decl_(dllexport_decl) { +} + +EnumGenerator::~EnumGenerator() {} + +void EnumGenerator::GenerateDefinition(io::Printer* printer) { + map vars; + vars["classname"] = classname_; + vars["short_name"] = descriptor_->name(); + + printer->Print(vars, "enum $classname$ {\n"); + printer->Indent(); + + const EnumValueDescriptor* min_value = descriptor_->value(0); + const EnumValueDescriptor* max_value = descriptor_->value(0); + + for (int i = 0; i < descriptor_->value_count(); i++) { + vars["name"] = descriptor_->value(i)->name(); + vars["number"] = SimpleItoa(descriptor_->value(i)->number()); + vars["prefix"] = (descriptor_->containing_type() == NULL) ? + "" : classname_ + "_"; + + if (i > 0) printer->Print(",\n"); + printer->Print(vars, "$prefix$$name$ = $number$"); + + if (descriptor_->value(i)->number() < min_value->number()) { + min_value = descriptor_->value(i); + } + if (descriptor_->value(i)->number() > max_value->number()) { + max_value = descriptor_->value(i); + } + } + + printer->Outdent(); + printer->Print("\n};\n"); + + vars["min_name"] = min_value->name(); + vars["max_name"] = max_value->name(); + + if (dllexport_decl_.empty()) { + vars["dllexport"] = ""; + } else { + vars["dllexport"] = dllexport_decl_ + " "; + } + + printer->Print(vars, + "$dllexport$bool $classname$_IsValid(int value);\n" + "const $classname$ $prefix$$short_name$_MIN = $prefix$$min_name$;\n" + "const $classname$ $prefix$$short_name$_MAX = $prefix$$max_name$;\n" + "const int $prefix$$short_name$_ARRAYSIZE = $prefix$$short_name$_MAX + 1;\n" + "\n"); + + if (HasDescriptorMethods(descriptor_->file())) { + printer->Print(vars, + "$dllexport$const ::google::protobuf::EnumDescriptor* $classname$_descriptor();\n"); + // The _Name and _Parse methods + printer->Print(vars, + "inline const ::std::string& $classname$_Name($classname$ value) {\n" + " return ::google::protobuf::internal::NameOfEnum(\n" + " $classname$_descriptor(), value);\n" + "}\n"); + printer->Print(vars, + "inline bool $classname$_Parse(\n" + " const ::std::string& name, $classname$* value) {\n" + " return ::google::protobuf::internal::ParseNamedEnum<$classname$>(\n" + " $classname$_descriptor(), name, value);\n" + "}\n"); + } +} + +void EnumGenerator:: +GenerateGetEnumDescriptorSpecializations(io::Printer* printer) { + if (HasDescriptorMethods(descriptor_->file())) { + printer->Print( + "template <>\n" + "inline const EnumDescriptor* GetEnumDescriptor< $classname$>() {\n" + " return $classname$_descriptor();\n" + "}\n", + "classname", ClassName(descriptor_, true)); + } +} + +void EnumGenerator::GenerateSymbolImports(io::Printer* printer) { + map vars; + vars["nested_name"] = descriptor_->name(); + vars["classname"] = classname_; + printer->Print(vars, "typedef $classname$ $nested_name$;\n"); + + for (int j = 0; j < descriptor_->value_count(); j++) { + vars["tag"] = descriptor_->value(j)->name(); + printer->Print(vars, + "static const $nested_name$ $tag$ = $classname$_$tag$;\n"); + } + + printer->Print(vars, + "static inline bool $nested_name$_IsValid(int value) {\n" + " return $classname$_IsValid(value);\n" + "}\n" + "static const $nested_name$ $nested_name$_MIN =\n" + " $classname$_$nested_name$_MIN;\n" + "static const $nested_name$ $nested_name$_MAX =\n" + " $classname$_$nested_name$_MAX;\n" + "static const int $nested_name$_ARRAYSIZE =\n" + " $classname$_$nested_name$_ARRAYSIZE;\n"); + + if (HasDescriptorMethods(descriptor_->file())) { + printer->Print(vars, + "static inline const ::google::protobuf::EnumDescriptor*\n" + "$nested_name$_descriptor() {\n" + " return $classname$_descriptor();\n" + "}\n"); + printer->Print(vars, + "static inline const ::std::string& $nested_name$_Name($nested_name$ value) {\n" + " return $classname$_Name(value);\n" + "}\n"); + printer->Print(vars, + "static inline bool $nested_name$_Parse(const ::std::string& name,\n" + " $nested_name$* value) {\n" + " return $classname$_Parse(name, value);\n" + "}\n"); + } +} + +void EnumGenerator::GenerateDescriptorInitializer( + io::Printer* printer, int index) { + map vars; + vars["classname"] = classname_; + vars["index"] = SimpleItoa(index); + + if (descriptor_->containing_type() == NULL) { + printer->Print(vars, + "$classname$_descriptor_ = file->enum_type($index$);\n"); + } else { + vars["parent"] = ClassName(descriptor_->containing_type(), false); + printer->Print(vars, + "$classname$_descriptor_ = $parent$_descriptor_->enum_type($index$);\n"); + } +} + +void EnumGenerator::GenerateMethods(io::Printer* printer) { + map vars; + vars["classname"] = classname_; + + if (HasDescriptorMethods(descriptor_->file())) { + printer->Print(vars, + "const ::google::protobuf::EnumDescriptor* $classname$_descriptor() {\n" + " protobuf_AssignDescriptorsOnce();\n" + " return $classname$_descriptor_;\n" + "}\n"); + } + + printer->Print(vars, + "bool $classname$_IsValid(int value) {\n" + " switch(value) {\n"); + + // Multiple values may have the same number. Make sure we only cover + // each number once by first constructing a set containing all valid + // numbers, then printing a case statement for each element. + + set numbers; + for (int j = 0; j < descriptor_->value_count(); j++) { + const EnumValueDescriptor* value = descriptor_->value(j); + numbers.insert(value->number()); + } + + for (set::iterator iter = numbers.begin(); + iter != numbers.end(); ++iter) { + printer->Print( + " case $number$:\n", + "number", SimpleItoa(*iter)); + } + + printer->Print(vars, + " return true;\n" + " default:\n" + " return false;\n" + " }\n" + "}\n" + "\n"); + + if (descriptor_->containing_type() != NULL) { + // We need to "define" the static constants which were declared in the + // header, to give the linker a place to put them. Or at least the C++ + // standard says we have to. MSVC actually insists tha we do _not_ define + // them again in the .cc file. + printer->Print("#ifndef _MSC_VER\n"); + + vars["parent"] = ClassName(descriptor_->containing_type(), false); + vars["nested_name"] = descriptor_->name(); + for (int i = 0; i < descriptor_->value_count(); i++) { + vars["value"] = descriptor_->value(i)->name(); + printer->Print(vars, + "const $classname$ $parent$::$value$;\n"); + } + printer->Print(vars, + "const $classname$ $parent$::$nested_name$_MIN;\n" + "const $classname$ $parent$::$nested_name$_MAX;\n" + "const int $parent$::$nested_name$_ARRAYSIZE;\n"); + + printer->Print("#endif // _MSC_VER\n"); + } +} + +} // namespace cpp +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_enum.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_enum.h new file mode 100644 index 0000000000..58f7721ecd --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_enum.h @@ -0,0 +1,99 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#ifndef GOOGLE_PROTOBUF_COMPILER_CPP_ENUM_H__ +#define GOOGLE_PROTOBUF_COMPILER_CPP_ENUM_H__ + +#include +#include + +namespace google { +namespace protobuf { + namespace io { + class Printer; // printer.h + } +} + +namespace protobuf { +namespace compiler { +namespace cpp { + +class EnumGenerator { + public: + // See generator.cc for the meaning of dllexport_decl. + explicit EnumGenerator(const EnumDescriptor* descriptor, + const string& dllexport_decl); + ~EnumGenerator(); + + // Header stuff. + + // Generate header code defining the enum. This code should be placed + // within the enum's package namespace, but NOT within any class, even for + // nested enums. + void GenerateDefinition(io::Printer* printer); + + // Generate specialization of GetEnumDescriptor(). + // Precondition: in ::google::protobuf namespace. + void GenerateGetEnumDescriptorSpecializations(io::Printer* printer); + + // For enums nested within a message, generate code to import all the enum's + // symbols (e.g. the enum type name, all its values, etc.) into the class's + // namespace. This should be placed inside the class definition in the + // header. + void GenerateSymbolImports(io::Printer* printer); + + // Source file stuff. + + // Generate code that initializes the global variable storing the enum's + // descriptor. + void GenerateDescriptorInitializer(io::Printer* printer, int index); + + // Generate non-inline methods related to the enum, such as IsValidValue(). + // Goes in the .cc file. + void GenerateMethods(io::Printer* printer); + + private: + const EnumDescriptor* descriptor_; + string classname_; + string dllexport_decl_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(EnumGenerator); +}; + +} // namespace cpp +} // namespace compiler +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_COMPILER_CPP_ENUM_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_enum_field.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_enum_field.cc new file mode 100644 index 0000000000..a369f41781 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_enum_field.cc @@ -0,0 +1,361 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace cpp { + +namespace { + +void SetEnumVariables(const FieldDescriptor* descriptor, + map* variables) { + SetCommonFieldVariables(descriptor, variables); + const EnumValueDescriptor* default_value = descriptor->default_value_enum(); + (*variables)["type"] = ClassName(descriptor->enum_type(), true); + (*variables)["default"] = SimpleItoa(default_value->number()); +} + +} // namespace + +// =================================================================== + +EnumFieldGenerator:: +EnumFieldGenerator(const FieldDescriptor* descriptor) + : descriptor_(descriptor) { + SetEnumVariables(descriptor, &variables_); +} + +EnumFieldGenerator::~EnumFieldGenerator() {} + +void EnumFieldGenerator:: +GeneratePrivateMembers(io::Printer* printer) const { + printer->Print(variables_, "int $name$_;\n"); +} + +void EnumFieldGenerator:: +GenerateAccessorDeclarations(io::Printer* printer) const { + printer->Print(variables_, + "inline $type$ $name$() const$deprecation$;\n" + "inline void set_$name$($type$ value)$deprecation$;\n"); +} + +void EnumFieldGenerator:: +GenerateInlineAccessorDefinitions(io::Printer* printer) const { + printer->Print(variables_, + "inline $type$ $classname$::$name$() const {\n" + " return static_cast< $type$ >($name$_);\n" + "}\n" + "inline void $classname$::set_$name$($type$ value) {\n" + " GOOGLE_DCHECK($type$_IsValid(value));\n" + " set_has_$name$();\n" + " $name$_ = value;\n" + "}\n"); +} + +void EnumFieldGenerator:: +GenerateClearingCode(io::Printer* printer) const { + printer->Print(variables_, "$name$_ = $default$;\n"); +} + +void EnumFieldGenerator:: +GenerateMergingCode(io::Printer* printer) const { + printer->Print(variables_, "set_$name$(from.$name$());\n"); +} + +void EnumFieldGenerator:: +GenerateSwappingCode(io::Printer* printer) const { + printer->Print(variables_, "std::swap($name$_, other->$name$_);\n"); +} + +void EnumFieldGenerator:: +GenerateConstructorCode(io::Printer* printer) const { + printer->Print(variables_, "$name$_ = $default$;\n"); +} + +void EnumFieldGenerator:: +GenerateMergeFromCodedStream(io::Printer* printer) const { + printer->Print(variables_, + "int value;\n" + "DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<\n" + " int, ::google::protobuf::internal::WireFormatLite::TYPE_ENUM>(\n" + " input, &value)));\n" + "if ($type$_IsValid(value)) {\n" + " set_$name$(static_cast< $type$ >(value));\n"); + if (HasUnknownFields(descriptor_->file())) { + printer->Print(variables_, + "} else {\n" + " mutable_unknown_fields()->AddVarint($number$, value);\n"); + } + printer->Print(variables_, + "}\n"); +} + +void EnumFieldGenerator:: +GenerateSerializeWithCachedSizes(io::Printer* printer) const { + printer->Print(variables_, + "::google::protobuf::internal::WireFormatLite::WriteEnum(\n" + " $number$, this->$name$(), output);\n"); +} + +void EnumFieldGenerator:: +GenerateSerializeWithCachedSizesToArray(io::Printer* printer) const { + printer->Print(variables_, + "target = ::google::protobuf::internal::WireFormatLite::WriteEnumToArray(\n" + " $number$, this->$name$(), target);\n"); +} + +void EnumFieldGenerator:: +GenerateByteSize(io::Printer* printer) const { + printer->Print(variables_, + "total_size += $tag_size$ +\n" + " ::google::protobuf::internal::WireFormatLite::EnumSize(this->$name$());\n"); +} + +// =================================================================== + +RepeatedEnumFieldGenerator:: +RepeatedEnumFieldGenerator(const FieldDescriptor* descriptor) + : descriptor_(descriptor) { + SetEnumVariables(descriptor, &variables_); +} + +RepeatedEnumFieldGenerator::~RepeatedEnumFieldGenerator() {} + +void RepeatedEnumFieldGenerator:: +GeneratePrivateMembers(io::Printer* printer) const { + printer->Print(variables_, + "::google::protobuf::RepeatedField $name$_;\n"); + if (descriptor_->options().packed() && HasGeneratedMethods(descriptor_->file())) { + printer->Print(variables_, + "mutable int _$name$_cached_byte_size_;\n"); + } +} + +void RepeatedEnumFieldGenerator:: +GenerateAccessorDeclarations(io::Printer* printer) const { + printer->Print(variables_, + "inline $type$ $name$(int index) const$deprecation$;\n" + "inline void set_$name$(int index, $type$ value)$deprecation$;\n" + "inline void add_$name$($type$ value)$deprecation$;\n"); + printer->Print(variables_, + "inline const ::google::protobuf::RepeatedField& $name$() const$deprecation$;\n" + "inline ::google::protobuf::RepeatedField* mutable_$name$()$deprecation$;\n"); +} + +void RepeatedEnumFieldGenerator:: +GenerateInlineAccessorDefinitions(io::Printer* printer) const { + printer->Print(variables_, + "inline $type$ $classname$::$name$(int index) const {\n" + " return static_cast< $type$ >($name$_.Get(index));\n" + "}\n" + "inline void $classname$::set_$name$(int index, $type$ value) {\n" + " GOOGLE_DCHECK($type$_IsValid(value));\n" + " $name$_.Set(index, value);\n" + "}\n" + "inline void $classname$::add_$name$($type$ value) {\n" + " GOOGLE_DCHECK($type$_IsValid(value));\n" + " $name$_.Add(value);\n" + "}\n"); + printer->Print(variables_, + "inline const ::google::protobuf::RepeatedField&\n" + "$classname$::$name$() const {\n" + " return $name$_;\n" + "}\n" + "inline ::google::protobuf::RepeatedField*\n" + "$classname$::mutable_$name$() {\n" + " return &$name$_;\n" + "}\n"); +} + +void RepeatedEnumFieldGenerator:: +GenerateClearingCode(io::Printer* printer) const { + printer->Print(variables_, "$name$_.Clear();\n"); +} + +void RepeatedEnumFieldGenerator:: +GenerateMergingCode(io::Printer* printer) const { + printer->Print(variables_, "$name$_.MergeFrom(from.$name$_);\n"); +} + +void RepeatedEnumFieldGenerator:: +GenerateSwappingCode(io::Printer* printer) const { + printer->Print(variables_, "$name$_.Swap(&other->$name$_);\n"); +} + +void RepeatedEnumFieldGenerator:: +GenerateConstructorCode(io::Printer* printer) const { + // Not needed for repeated fields. +} + +void RepeatedEnumFieldGenerator:: +GenerateMergeFromCodedStream(io::Printer* printer) const { + // Don't use ReadRepeatedPrimitive here so that the enum can be validated. + printer->Print(variables_, + "int value;\n" + "DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<\n" + " int, ::google::protobuf::internal::WireFormatLite::TYPE_ENUM>(\n" + " input, &value)));\n" + "if ($type$_IsValid(value)) {\n" + " add_$name$(static_cast< $type$ >(value));\n"); + if (HasUnknownFields(descriptor_->file())) { + printer->Print(variables_, + "} else {\n" + " mutable_unknown_fields()->AddVarint($number$, value);\n"); + } + printer->Print("}\n"); +} + +void RepeatedEnumFieldGenerator:: +GenerateMergeFromCodedStreamWithPacking(io::Printer* printer) const { + if (!descriptor_->options().packed()) { + // We use a non-inlined implementation in this case, since this path will + // rarely be executed. + printer->Print(variables_, + "DO_((::google::protobuf::internal::WireFormatLite::ReadPackedEnumNoInline(\n" + " input,\n" + " &$type$_IsValid,\n" + " this->mutable_$name$())));\n"); + } else { + printer->Print(variables_, + "::google::protobuf::uint32 length;\n" + "DO_(input->ReadVarint32(&length));\n" + "::google::protobuf::io::CodedInputStream::Limit limit = " + "input->PushLimit(length);\n" + "while (input->BytesUntilLimit() > 0) {\n" + " int value;\n" + " DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<\n" + " int, ::google::protobuf::internal::WireFormatLite::TYPE_ENUM>(\n" + " input, &value)));\n" + " if ($type$_IsValid(value)) {\n" + " add_$name$(static_cast< $type$ >(value));\n" + " }\n" + "}\n" + "input->PopLimit(limit);\n"); + } +} + +void RepeatedEnumFieldGenerator:: +GenerateSerializeWithCachedSizes(io::Printer* printer) const { + if (descriptor_->options().packed()) { + // Write the tag and the size. + printer->Print(variables_, + "if (this->$name$_size() > 0) {\n" + " ::google::protobuf::internal::WireFormatLite::WriteTag(\n" + " $number$,\n" + " ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED,\n" + " output);\n" + " output->WriteVarint32(_$name$_cached_byte_size_);\n" + "}\n"); + } + printer->Print(variables_, + "for (int i = 0; i < this->$name$_size(); i++) {\n"); + if (descriptor_->options().packed()) { + printer->Print(variables_, + " ::google::protobuf::internal::WireFormatLite::WriteEnumNoTag(\n" + " this->$name$(i), output);\n"); + } else { + printer->Print(variables_, + " ::google::protobuf::internal::WireFormatLite::WriteEnum(\n" + " $number$, this->$name$(i), output);\n"); + } + printer->Print("}\n"); +} + +void RepeatedEnumFieldGenerator:: +GenerateSerializeWithCachedSizesToArray(io::Printer* printer) const { + if (descriptor_->options().packed()) { + // Write the tag and the size. + printer->Print(variables_, + "if (this->$name$_size() > 0) {\n" + " target = ::google::protobuf::internal::WireFormatLite::WriteTagToArray(\n" + " $number$,\n" + " ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED,\n" + " target);\n" + " target = ::google::protobuf::io::CodedOutputStream::WriteVarint32ToArray(" + " _$name$_cached_byte_size_, target);\n" + "}\n"); + } + printer->Print(variables_, + "for (int i = 0; i < this->$name$_size(); i++) {\n"); + if (descriptor_->options().packed()) { + printer->Print(variables_, + " target = ::google::protobuf::internal::WireFormatLite::WriteEnumNoTagToArray(\n" + " this->$name$(i), target);\n"); + } else { + printer->Print(variables_, + " target = ::google::protobuf::internal::WireFormatLite::WriteEnumToArray(\n" + " $number$, this->$name$(i), target);\n"); + } + printer->Print("}\n"); +} + +void RepeatedEnumFieldGenerator:: +GenerateByteSize(io::Printer* printer) const { + printer->Print(variables_, + "{\n" + " int data_size = 0;\n"); + printer->Indent(); + printer->Print(variables_, + "for (int i = 0; i < this->$name$_size(); i++) {\n" + " data_size += ::google::protobuf::internal::WireFormatLite::EnumSize(\n" + " this->$name$(i));\n" + "}\n"); + + if (descriptor_->options().packed()) { + printer->Print(variables_, + "if (data_size > 0) {\n" + " total_size += $tag_size$ +\n" + " ::google::protobuf::internal::WireFormatLite::Int32Size(data_size);\n" + "}\n" + "_$name$_cached_byte_size_ = data_size;\n" + "total_size += data_size;\n"); + } else { + printer->Print(variables_, + "total_size += $tag_size$ * this->$name$_size() + data_size;\n"); + } + printer->Outdent(); + printer->Print("}\n"); +} + +} // namespace cpp +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_enum_field.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_enum_field.h new file mode 100644 index 0000000000..0793430c1d --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_enum_field.h @@ -0,0 +1,103 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#ifndef GOOGLE_PROTOBUF_COMPILER_CPP_ENUM_FIELD_H__ +#define GOOGLE_PROTOBUF_COMPILER_CPP_ENUM_FIELD_H__ + +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace cpp { + +class EnumFieldGenerator : public FieldGenerator { + public: + explicit EnumFieldGenerator(const FieldDescriptor* descriptor); + ~EnumFieldGenerator(); + + // implements FieldGenerator --------------------------------------- + void GeneratePrivateMembers(io::Printer* printer) const; + void GenerateAccessorDeclarations(io::Printer* printer) const; + void GenerateInlineAccessorDefinitions(io::Printer* printer) const; + void GenerateClearingCode(io::Printer* printer) const; + void GenerateMergingCode(io::Printer* printer) const; + void GenerateSwappingCode(io::Printer* printer) const; + void GenerateConstructorCode(io::Printer* printer) const; + void GenerateMergeFromCodedStream(io::Printer* printer) const; + void GenerateSerializeWithCachedSizes(io::Printer* printer) const; + void GenerateSerializeWithCachedSizesToArray(io::Printer* printer) const; + void GenerateByteSize(io::Printer* printer) const; + + private: + const FieldDescriptor* descriptor_; + map variables_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(EnumFieldGenerator); +}; + +class RepeatedEnumFieldGenerator : public FieldGenerator { + public: + explicit RepeatedEnumFieldGenerator(const FieldDescriptor* descriptor); + ~RepeatedEnumFieldGenerator(); + + // implements FieldGenerator --------------------------------------- + void GeneratePrivateMembers(io::Printer* printer) const; + void GenerateAccessorDeclarations(io::Printer* printer) const; + void GenerateInlineAccessorDefinitions(io::Printer* printer) const; + void GenerateClearingCode(io::Printer* printer) const; + void GenerateMergingCode(io::Printer* printer) const; + void GenerateSwappingCode(io::Printer* printer) const; + void GenerateConstructorCode(io::Printer* printer) const; + void GenerateMergeFromCodedStream(io::Printer* printer) const; + void GenerateMergeFromCodedStreamWithPacking(io::Printer* printer) const; + void GenerateSerializeWithCachedSizes(io::Printer* printer) const; + void GenerateSerializeWithCachedSizesToArray(io::Printer* printer) const; + void GenerateByteSize(io::Printer* printer) const; + + private: + const FieldDescriptor* descriptor_; + map variables_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(RepeatedEnumFieldGenerator); +}; + +} // namespace cpp +} // namespace compiler +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_COMPILER_CPP_ENUM_FIELD_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_extension.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_extension.cc new file mode 100644 index 0000000000..658a7077bb --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_extension.cc @@ -0,0 +1,210 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include +#include +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace cpp { + +namespace { + +// Returns the fully-qualified class name of the message that this field +// extends. This function is used in the Google-internal code to handle some +// legacy cases. +string ExtendeeClassName(const FieldDescriptor* descriptor) { + const Descriptor* extendee = descriptor->containing_type(); + return ClassName(extendee, true); +} + +} // anonymous namespace + +ExtensionGenerator::ExtensionGenerator(const FieldDescriptor* descriptor, + const string& dllexport_decl) + : descriptor_(descriptor), + dllexport_decl_(dllexport_decl) { + // Construct type_traits_. + if (descriptor_->is_repeated()) { + type_traits_ = "Repeated"; + } + + switch (descriptor_->cpp_type()) { + case FieldDescriptor::CPPTYPE_ENUM: + type_traits_.append("EnumTypeTraits< "); + type_traits_.append(ClassName(descriptor_->enum_type(), true)); + type_traits_.append(", "); + type_traits_.append(ClassName(descriptor_->enum_type(), true)); + type_traits_.append("_IsValid>"); + break; + case FieldDescriptor::CPPTYPE_STRING: + type_traits_.append("StringTypeTraits"); + break; + case FieldDescriptor::CPPTYPE_MESSAGE: + type_traits_.append("MessageTypeTraits< "); + type_traits_.append(ClassName(descriptor_->message_type(), true)); + type_traits_.append(" >"); + break; + default: + type_traits_.append("PrimitiveTypeTraits< "); + type_traits_.append(PrimitiveTypeName(descriptor_->cpp_type())); + type_traits_.append(" >"); + break; + } +} + +ExtensionGenerator::~ExtensionGenerator() {} + +void ExtensionGenerator::GenerateDeclaration(io::Printer* printer) { + map vars; + vars["extendee" ] = ExtendeeClassName(descriptor_); + vars["number" ] = SimpleItoa(descriptor_->number()); + vars["type_traits" ] = type_traits_; + vars["name" ] = descriptor_->name(); + vars["field_type" ] = SimpleItoa(static_cast(descriptor_->type())); + vars["packed" ] = descriptor_->options().packed() ? "true" : "false"; + vars["constant_name"] = FieldConstantName(descriptor_); + + // If this is a class member, it needs to be declared "static". Otherwise, + // it needs to be "extern". In the latter case, it also needs the DLL + // export/import specifier. + if (descriptor_->extension_scope() == NULL) { + vars["qualifier"] = "extern"; + if (!dllexport_decl_.empty()) { + vars["qualifier"] = dllexport_decl_ + " " + vars["qualifier"]; + } + } else { + vars["qualifier"] = "static"; + } + + printer->Print(vars, + "static const int $constant_name$ = $number$;\n" + "$qualifier$ ::google::protobuf::internal::ExtensionIdentifier< $extendee$,\n" + " ::google::protobuf::internal::$type_traits$, $field_type$, $packed$ >\n" + " $name$;\n" + ); + +} + +void ExtensionGenerator::GenerateDefinition(io::Printer* printer) { + // If this is a class member, it needs to be declared in its class scope. + string scope = (descriptor_->extension_scope() == NULL) ? "" : + ClassName(descriptor_->extension_scope(), false) + "::"; + string name = scope + descriptor_->name(); + + map vars; + vars["extendee" ] = ExtendeeClassName(descriptor_); + vars["type_traits" ] = type_traits_; + vars["name" ] = name; + vars["constant_name"] = FieldConstantName(descriptor_); + vars["default" ] = DefaultValue(descriptor_); + vars["field_type" ] = SimpleItoa(static_cast(descriptor_->type())); + vars["packed" ] = descriptor_->options().packed() ? "true" : "false"; + vars["scope" ] = scope; + + if (descriptor_->cpp_type() == FieldDescriptor::CPPTYPE_STRING) { + // We need to declare a global string which will contain the default value. + // We cannot declare it at class scope because that would require exposing + // it in the header which would be annoying for other reasons. So we + // replace :: with _ in the name and declare it as a global. + string global_name = StringReplace(name, "::", "_", true); + vars["global_name"] = global_name; + printer->Print(vars, + "const ::std::string $global_name$_default($default$);\n"); + + // Update the default to refer to the string global. + vars["default"] = global_name + "_default"; + } + + // Likewise, class members need to declare the field constant variable. + if (descriptor_->extension_scope() != NULL) { + printer->Print(vars, + "#ifndef _MSC_VER\n" + "const int $scope$$constant_name$;\n" + "#endif\n"); + } + + printer->Print(vars, + "::google::protobuf::internal::ExtensionIdentifier< $extendee$,\n" + " ::google::protobuf::internal::$type_traits$, $field_type$, $packed$ >\n" + " $name$($constant_name$, $default$);\n"); +} + +void ExtensionGenerator::GenerateRegistration(io::Printer* printer) { + map vars; + vars["extendee" ] = ExtendeeClassName(descriptor_); + vars["number" ] = SimpleItoa(descriptor_->number()); + vars["field_type" ] = SimpleItoa(static_cast(descriptor_->type())); + vars["is_repeated"] = descriptor_->is_repeated() ? "true" : "false"; + vars["is_packed" ] = (descriptor_->is_repeated() && + descriptor_->options().packed()) + ? "true" : "false"; + + switch (descriptor_->cpp_type()) { + case FieldDescriptor::CPPTYPE_ENUM: + printer->Print(vars, + "::google::protobuf::internal::ExtensionSet::RegisterEnumExtension(\n" + " &$extendee$::default_instance(),\n" + " $number$, $field_type$, $is_repeated$, $is_packed$,\n"); + printer->Print( + " &$type$_IsValid);\n", + "type", ClassName(descriptor_->enum_type(), true)); + break; + case FieldDescriptor::CPPTYPE_MESSAGE: + printer->Print(vars, + "::google::protobuf::internal::ExtensionSet::RegisterMessageExtension(\n" + " &$extendee$::default_instance(),\n" + " $number$, $field_type$, $is_repeated$, $is_packed$,\n"); + printer->Print( + " &$type$::default_instance());\n", + "type", ClassName(descriptor_->message_type(), true)); + break; + default: + printer->Print(vars, + "::google::protobuf::internal::ExtensionSet::RegisterExtension(\n" + " &$extendee$::default_instance(),\n" + " $number$, $field_type$, $is_repeated$, $is_packed$);\n"); + break; + } +} + +} // namespace cpp +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_extension.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_extension.h new file mode 100644 index 0000000000..3068b09148 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_extension.h @@ -0,0 +1,85 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#ifndef GOOGLE_PROTOBUF_COMPILER_CPP_EXTENSION_H__ +#define GOOGLE_PROTOBUF_COMPILER_CPP_EXTENSION_H__ + +#include +#include + +namespace google { +namespace protobuf { + class FieldDescriptor; // descriptor.h + namespace io { + class Printer; // printer.h + } +} + +namespace protobuf { +namespace compiler { +namespace cpp { + +// Generates code for an extension, which may be within the scope of some +// message or may be at file scope. This is much simpler than FieldGenerator +// since extensions are just simple identifiers with interesting types. +class ExtensionGenerator { + public: + // See generator.cc for the meaning of dllexport_decl. + explicit ExtensionGenerator(const FieldDescriptor* descriptor, + const string& dllexport_decl); + ~ExtensionGenerator(); + + // Header stuff. + void GenerateDeclaration(io::Printer* printer); + + // Source file stuff. + void GenerateDefinition(io::Printer* printer); + + // Generate code to register the extension. + void GenerateRegistration(io::Printer* printer); + + private: + const FieldDescriptor* descriptor_; + string type_traits_; + string dllexport_decl_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(ExtensionGenerator); +}; + +} // namespace cpp +} // namespace compiler +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_COMPILER_CPP_MESSAGE_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_field.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_field.cc new file mode 100644 index 0000000000..103cac4a9f --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_field.cc @@ -0,0 +1,139 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace cpp { + +using internal::WireFormat; + +void SetCommonFieldVariables(const FieldDescriptor* descriptor, + map* variables) { + (*variables)["name"] = FieldName(descriptor); + (*variables)["index"] = SimpleItoa(descriptor->index()); + (*variables)["number"] = SimpleItoa(descriptor->number()); + (*variables)["classname"] = ClassName(FieldScope(descriptor), false); + (*variables)["declared_type"] = DeclaredTypeMethodName(descriptor->type()); + + (*variables)["tag_size"] = SimpleItoa( + WireFormat::TagSize(descriptor->number(), descriptor->type())); + (*variables)["deprecation"] = descriptor->options().deprecated() + ? " PROTOBUF_DEPRECATED" : ""; + +} + +FieldGenerator::~FieldGenerator() {} + +void FieldGenerator:: +GenerateMergeFromCodedStreamWithPacking(io::Printer* printer) const { + // Reaching here indicates a bug. Cases are: + // - This FieldGenerator should support packing, but this method should be + // overridden. + // - This FieldGenerator doesn't support packing, and this method should + // never have been called. + GOOGLE_LOG(FATAL) << "GenerateMergeFromCodedStreamWithPacking() " + << "called on field generator that does not support packing."; + +} + +FieldGeneratorMap::FieldGeneratorMap(const Descriptor* descriptor) + : descriptor_(descriptor), + field_generators_( + new scoped_ptr[descriptor->field_count()]) { + // Construct all the FieldGenerators. + for (int i = 0; i < descriptor->field_count(); i++) { + field_generators_[i].reset(MakeGenerator(descriptor->field(i))); + } +} + +FieldGenerator* FieldGeneratorMap::MakeGenerator(const FieldDescriptor* field) { + if (field->is_repeated()) { + switch (field->cpp_type()) { + case FieldDescriptor::CPPTYPE_MESSAGE: + return new RepeatedMessageFieldGenerator(field); + case FieldDescriptor::CPPTYPE_STRING: + switch (field->options().ctype()) { + default: // RepeatedStringFieldGenerator handles unknown ctypes. + case FieldOptions::STRING: + return new RepeatedStringFieldGenerator(field); + } + case FieldDescriptor::CPPTYPE_ENUM: + return new RepeatedEnumFieldGenerator(field); + default: + return new RepeatedPrimitiveFieldGenerator(field); + } + } else { + switch (field->cpp_type()) { + case FieldDescriptor::CPPTYPE_MESSAGE: + return new MessageFieldGenerator(field); + case FieldDescriptor::CPPTYPE_STRING: + switch (field->options().ctype()) { + default: // StringFieldGenerator handles unknown ctypes. + case FieldOptions::STRING: + return new StringFieldGenerator(field); + } + case FieldDescriptor::CPPTYPE_ENUM: + return new EnumFieldGenerator(field); + default: + return new PrimitiveFieldGenerator(field); + } + } +} + +FieldGeneratorMap::~FieldGeneratorMap() {} + +const FieldGenerator& FieldGeneratorMap::get( + const FieldDescriptor* field) const { + GOOGLE_CHECK_EQ(field->containing_type(), descriptor_); + return *field_generators_[field->index()]; +} + + +} // namespace cpp +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_field.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_field.h new file mode 100644 index 0000000000..c303a3378f --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_field.h @@ -0,0 +1,167 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#ifndef GOOGLE_PROTOBUF_COMPILER_CPP_FIELD_H__ +#define GOOGLE_PROTOBUF_COMPILER_CPP_FIELD_H__ + +#include +#include + +#include +#include + +namespace google { +namespace protobuf { + namespace io { + class Printer; // printer.h + } +} + +namespace protobuf { +namespace compiler { +namespace cpp { + +// Helper function: set variables in the map that are the same for all +// field code generators. +// ['name', 'index', 'number', 'classname', 'declared_type', 'tag_size', +// 'deprecation']. +void SetCommonFieldVariables(const FieldDescriptor* descriptor, + map* variables); + +class FieldGenerator { + public: + FieldGenerator() {} + virtual ~FieldGenerator(); + + // Generate lines of code declaring members fields of the message class + // needed to represent this field. These are placed inside the message + // class. + virtual void GeneratePrivateMembers(io::Printer* printer) const = 0; + + // Generate prototypes for all of the accessor functions related to this + // field. These are placed inside the class definition. + virtual void GenerateAccessorDeclarations(io::Printer* printer) const = 0; + + // Generate inline definitions of accessor functions for this field. + // These are placed inside the header after all class definitions. + virtual void GenerateInlineAccessorDefinitions( + io::Printer* printer) const = 0; + + // Generate definitions of accessors that aren't inlined. These are + // placed somewhere in the .cc file. + // Most field types don't need this, so the default implementation is empty. + virtual void GenerateNonInlineAccessorDefinitions( + io::Printer* printer) const {} + + // Generate lines of code (statements, not declarations) which clear the + // field. This is used to define the clear_$name$() method as well as + // the Clear() method for the whole message. + virtual void GenerateClearingCode(io::Printer* printer) const = 0; + + // Generate lines of code (statements, not declarations) which merges the + // contents of the field from the current message to the target message, + // which is stored in the generated code variable "from". + // This is used to fill in the MergeFrom method for the whole message. + // Details of this usage can be found in message.cc under the + // GenerateMergeFrom method. + virtual void GenerateMergingCode(io::Printer* printer) const = 0; + + // Generate lines of code (statements, not declarations) which swaps + // this field and the corresponding field of another message, which + // is stored in the generated code variable "other". This is used to + // define the Swap method. Details of usage can be found in + // message.cc under the GenerateSwap method. + virtual void GenerateSwappingCode(io::Printer* printer) const = 0; + + // Generate initialization code for private members declared by + // GeneratePrivateMembers(). These go into the message class's SharedCtor() + // method, invoked by each of the generated constructors. + virtual void GenerateConstructorCode(io::Printer* printer) const = 0; + + // Generate any code that needs to go in the class's SharedDtor() method, + // invoked by the destructor. + // Most field types don't need this, so the default implementation is empty. + virtual void GenerateDestructorCode(io::Printer* printer) const {} + + // Generate lines to decode this field, which will be placed inside the + // message's MergeFromCodedStream() method. + virtual void GenerateMergeFromCodedStream(io::Printer* printer) const = 0; + + // Generate lines to decode this field from a packed value, which will be + // placed inside the message's MergeFromCodedStream() method. + virtual void GenerateMergeFromCodedStreamWithPacking(io::Printer* printer) + const; + + // Generate lines to serialize this field, which are placed within the + // message's SerializeWithCachedSizes() method. + virtual void GenerateSerializeWithCachedSizes(io::Printer* printer) const = 0; + + // Generate lines to serialize this field directly to the array "target", + // which are placed within the message's SerializeWithCachedSizesToArray() + // method. This must also advance "target" past the written bytes. + virtual void GenerateSerializeWithCachedSizesToArray( + io::Printer* printer) const = 0; + + // Generate lines to compute the serialized size of this field, which + // are placed in the message's ByteSize() method. + virtual void GenerateByteSize(io::Printer* printer) const = 0; + + private: + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(FieldGenerator); +}; + +// Convenience class which constructs FieldGenerators for a Descriptor. +class FieldGeneratorMap { + public: + explicit FieldGeneratorMap(const Descriptor* descriptor); + ~FieldGeneratorMap(); + + const FieldGenerator& get(const FieldDescriptor* field) const; + + private: + const Descriptor* descriptor_; + scoped_array > field_generators_; + + static FieldGenerator* MakeGenerator(const FieldDescriptor* field); + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(FieldGeneratorMap); +}; + + +} // namespace cpp +} // namespace compiler +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_COMPILER_CPP_FIELD_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_file.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_file.cc new file mode 100644 index 0000000000..312ebc86d2 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_file.cc @@ -0,0 +1,611 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace cpp { + +// =================================================================== + +FileGenerator::FileGenerator(const FileDescriptor* file, + const string& dllexport_decl) + : file_(file), + message_generators_( + new scoped_ptr[file->message_type_count()]), + enum_generators_( + new scoped_ptr[file->enum_type_count()]), + service_generators_( + new scoped_ptr[file->service_count()]), + extension_generators_( + new scoped_ptr[file->extension_count()]), + dllexport_decl_(dllexport_decl) { + + for (int i = 0; i < file->message_type_count(); i++) { + message_generators_[i].reset( + new MessageGenerator(file->message_type(i), dllexport_decl)); + } + + for (int i = 0; i < file->enum_type_count(); i++) { + enum_generators_[i].reset( + new EnumGenerator(file->enum_type(i), dllexport_decl)); + } + + for (int i = 0; i < file->service_count(); i++) { + service_generators_[i].reset( + new ServiceGenerator(file->service(i), dllexport_decl)); + } + + for (int i = 0; i < file->extension_count(); i++) { + extension_generators_[i].reset( + new ExtensionGenerator(file->extension(i), dllexport_decl)); + } + + SplitStringUsing(file_->package(), ".", &package_parts_); +} + +FileGenerator::~FileGenerator() {} + +void FileGenerator::GenerateHeader(io::Printer* printer) { + string filename_identifier = FilenameIdentifier(file_->name()); + + // Generate top of header. + printer->Print( + "// Generated by the protocol buffer compiler. DO NOT EDIT!\n" + "// source: $filename$\n" + "\n" + "#ifndef PROTOBUF_$filename_identifier$__INCLUDED\n" + "#define PROTOBUF_$filename_identifier$__INCLUDED\n" + "\n" + "#include \n" + "\n", + "filename", file_->name(), + "filename_identifier", filename_identifier); + + printer->Print( + "#include \n" + "\n"); + + // Verify the protobuf library header version is compatible with the protoc + // version before going any further. + printer->Print( + "#if GOOGLE_PROTOBUF_VERSION < $min_header_version$\n" + "#error This file was generated by a newer version of protoc which is\n" + "#error incompatible with your Protocol Buffer headers. Please update\n" + "#error your headers.\n" + "#endif\n" + "#if $protoc_version$ < GOOGLE_PROTOBUF_MIN_PROTOC_VERSION\n" + "#error This file was generated by an older version of protoc which is\n" + "#error incompatible with your Protocol Buffer headers. Please\n" + "#error regenerate this file with a newer version of protoc.\n" + "#endif\n" + "\n", + "min_header_version", + SimpleItoa(protobuf::internal::kMinHeaderVersionForProtoc), + "protoc_version", SimpleItoa(GOOGLE_PROTOBUF_VERSION)); + + // OK, it's now safe to #include other files. + printer->Print( + "#include \n" + "#include \n" + "#include \n"); + + if (HasDescriptorMethods(file_)) { + printer->Print( + "#include \n"); + } + + if (HasGenericServices(file_)) { + printer->Print( + "#include \n"); + } + + + for (int i = 0; i < file_->dependency_count(); i++) { + printer->Print( + "#include \"$dependency$.pb.h\"\n", + "dependency", StripProto(file_->dependency(i)->name())); + } + + printer->Print( + "// @@protoc_insertion_point(includes)\n"); + + // Open namespace. + GenerateNamespaceOpeners(printer); + + // Forward-declare the AddDescriptors, AssignDescriptors, and ShutdownFile + // functions, so that we can declare them to be friends of each class. + printer->Print( + "\n" + "// Internal implementation detail -- do not call these.\n" + "void $dllexport_decl$ $adddescriptorsname$();\n", + "adddescriptorsname", GlobalAddDescriptorsName(file_->name()), + "dllexport_decl", dllexport_decl_); + + printer->Print( + // Note that we don't put dllexport_decl on these because they are only + // called by the .pb.cc file in which they are defined. + "void $assigndescriptorsname$();\n" + "void $shutdownfilename$();\n" + "\n", + "assigndescriptorsname", GlobalAssignDescriptorsName(file_->name()), + "shutdownfilename", GlobalShutdownFileName(file_->name())); + + // Generate forward declarations of classes. + for (int i = 0; i < file_->message_type_count(); i++) { + message_generators_[i]->GenerateForwardDeclaration(printer); + } + + printer->Print("\n"); + + // Generate enum definitions. + for (int i = 0; i < file_->message_type_count(); i++) { + message_generators_[i]->GenerateEnumDefinitions(printer); + } + for (int i = 0; i < file_->enum_type_count(); i++) { + enum_generators_[i]->GenerateDefinition(printer); + } + + printer->Print(kThickSeparator); + printer->Print("\n"); + + // Generate class definitions. + for (int i = 0; i < file_->message_type_count(); i++) { + if (i > 0) { + printer->Print("\n"); + printer->Print(kThinSeparator); + printer->Print("\n"); + } + message_generators_[i]->GenerateClassDefinition(printer); + } + + printer->Print("\n"); + printer->Print(kThickSeparator); + printer->Print("\n"); + + if (HasGenericServices(file_)) { + // Generate service definitions. + for (int i = 0; i < file_->service_count(); i++) { + if (i > 0) { + printer->Print("\n"); + printer->Print(kThinSeparator); + printer->Print("\n"); + } + service_generators_[i]->GenerateDeclarations(printer); + } + + printer->Print("\n"); + printer->Print(kThickSeparator); + printer->Print("\n"); + } + + // Declare extension identifiers. + for (int i = 0; i < file_->extension_count(); i++) { + extension_generators_[i]->GenerateDeclaration(printer); + } + + printer->Print("\n"); + printer->Print(kThickSeparator); + printer->Print("\n"); + + // Generate class inline methods. + for (int i = 0; i < file_->message_type_count(); i++) { + if (i > 0) { + printer->Print(kThinSeparator); + printer->Print("\n"); + } + message_generators_[i]->GenerateInlineMethods(printer); + } + + printer->Print( + "\n" + "// @@protoc_insertion_point(namespace_scope)\n"); + + // Close up namespace. + GenerateNamespaceClosers(printer); + + // Emit GetEnumDescriptor specializations into google::protobuf namespace: + if (HasDescriptorMethods(file_)) { + // The SWIG conditional is to avoid a null-pointer dereference + // (bug 1984964) in swig-1.3.21 resulting from the following syntax: + // namespace X { void Y(); } + // which appears in GetEnumDescriptor() specializations. + printer->Print( + "\n" + "#ifndef SWIG\n" + "namespace google {\nnamespace protobuf {\n" + "\n"); + for (int i = 0; i < file_->message_type_count(); i++) { + message_generators_[i]->GenerateGetEnumDescriptorSpecializations(printer); + } + for (int i = 0; i < file_->enum_type_count(); i++) { + enum_generators_[i]->GenerateGetEnumDescriptorSpecializations(printer); + } + printer->Print( + "\n" + "} // namespace google\n} // namespace protobuf\n" + "#endif // SWIG\n"); + } + + printer->Print( + "\n" + "// @@protoc_insertion_point(global_scope)\n" + "\n"); + + printer->Print( + "#endif // PROTOBUF_$filename_identifier$__INCLUDED\n", + "filename_identifier", filename_identifier); +} + +void FileGenerator::GenerateSource(io::Printer* printer) { + printer->Print( + "// Generated by the protocol buffer compiler. DO NOT EDIT!\n" + "\n" + + // The generated code calls accessors that might be deprecated. We don't + // want the compiler to warn in generated code. + "#define INTERNAL_SUPPRESS_PROTOBUF_FIELD_DEPRECATION\n" + "#include \"$basename$.pb.h\"\n" + "\n" + "#include \n" // for swap() + "\n" + "#include \n" + "#include \n" + "#include \n", + "basename", StripProto(file_->name())); + + if (HasDescriptorMethods(file_)) { + printer->Print( + "#include \n" + "#include \n" + "#include \n"); + } + + printer->Print( + "// @@protoc_insertion_point(includes)\n"); + + GenerateNamespaceOpeners(printer); + + if (HasDescriptorMethods(file_)) { + printer->Print( + "\n" + "namespace {\n" + "\n"); + for (int i = 0; i < file_->message_type_count(); i++) { + message_generators_[i]->GenerateDescriptorDeclarations(printer); + } + for (int i = 0; i < file_->enum_type_count(); i++) { + printer->Print( + "const ::google::protobuf::EnumDescriptor* $name$_descriptor_ = NULL;\n", + "name", ClassName(file_->enum_type(i), false)); + } + + if (HasGenericServices(file_)) { + for (int i = 0; i < file_->service_count(); i++) { + printer->Print( + "const ::google::protobuf::ServiceDescriptor* $name$_descriptor_ = NULL;\n", + "name", file_->service(i)->name()); + } + } + + printer->Print( + "\n" + "} // namespace\n" + "\n"); + } + + // Define our externally-visible BuildDescriptors() function. (For the lite + // library, all this does is initialize default instances.) + GenerateBuildDescriptors(printer); + + // Generate enums. + for (int i = 0; i < file_->enum_type_count(); i++) { + enum_generators_[i]->GenerateMethods(printer); + } + + // Generate classes. + for (int i = 0; i < file_->message_type_count(); i++) { + printer->Print("\n"); + printer->Print(kThickSeparator); + printer->Print("\n"); + message_generators_[i]->GenerateClassMethods(printer); + } + + if (HasGenericServices(file_)) { + // Generate services. + for (int i = 0; i < file_->service_count(); i++) { + if (i == 0) printer->Print("\n"); + printer->Print(kThickSeparator); + printer->Print("\n"); + service_generators_[i]->GenerateImplementation(printer); + } + } + + // Define extensions. + for (int i = 0; i < file_->extension_count(); i++) { + extension_generators_[i]->GenerateDefinition(printer); + } + + printer->Print( + "\n" + "// @@protoc_insertion_point(namespace_scope)\n"); + + GenerateNamespaceClosers(printer); + + printer->Print( + "\n" + "// @@protoc_insertion_point(global_scope)\n"); +} + +void FileGenerator::GenerateBuildDescriptors(io::Printer* printer) { + // AddDescriptors() is a file-level procedure which adds the encoded + // FileDescriptorProto for this .proto file to the global DescriptorPool + // for generated files (DescriptorPool::generated_pool()). It always runs + // at static initialization time, so all files will be registered before + // main() starts. This procedure also constructs default instances and + // registers extensions. + // + // Its sibling, AssignDescriptors(), actually pulls the compiled + // FileDescriptor from the DescriptorPool and uses it to populate all of + // the global variables which store pointers to the descriptor objects. + // It also constructs the reflection objects. It is called the first time + // anyone calls descriptor() or GetReflection() on one of the types defined + // in the file. + + // In optimize_for = LITE_RUNTIME mode, we don't generate AssignDescriptors() + // and we only use AddDescriptors() to allocate default instances. + if (HasDescriptorMethods(file_)) { + printer->Print( + "\n" + "void $assigndescriptorsname$() {\n", + "assigndescriptorsname", GlobalAssignDescriptorsName(file_->name())); + printer->Indent(); + + // Make sure the file has found its way into the pool. If a descriptor + // is requested *during* static init then AddDescriptors() may not have + // been called yet, so we call it manually. Note that it's fine if + // AddDescriptors() is called multiple times. + printer->Print( + "$adddescriptorsname$();\n", + "adddescriptorsname", GlobalAddDescriptorsName(file_->name())); + + // Get the file's descriptor from the pool. + printer->Print( + "const ::google::protobuf::FileDescriptor* file =\n" + " ::google::protobuf::DescriptorPool::generated_pool()->FindFileByName(\n" + " \"$filename$\");\n" + // Note that this GOOGLE_CHECK is necessary to prevent a warning about "file" + // being unused when compiling an empty .proto file. + "GOOGLE_CHECK(file != NULL);\n", + "filename", file_->name()); + + // Go through all the stuff defined in this file and generated code to + // assign the global descriptor pointers based on the file descriptor. + for (int i = 0; i < file_->message_type_count(); i++) { + message_generators_[i]->GenerateDescriptorInitializer(printer, i); + } + for (int i = 0; i < file_->enum_type_count(); i++) { + enum_generators_[i]->GenerateDescriptorInitializer(printer, i); + } + if (HasGenericServices(file_)) { + for (int i = 0; i < file_->service_count(); i++) { + service_generators_[i]->GenerateDescriptorInitializer(printer, i); + } + } + + printer->Outdent(); + printer->Print( + "}\n" + "\n"); + + // --------------------------------------------------------------- + + // protobuf_AssignDescriptorsOnce(): The first time it is called, calls + // AssignDescriptors(). All later times, waits for the first call to + // complete and then returns. + printer->Print( + "namespace {\n" + "\n" + "GOOGLE_PROTOBUF_DECLARE_ONCE(protobuf_AssignDescriptors_once_);\n" + "inline void protobuf_AssignDescriptorsOnce() {\n" + " ::google::protobuf::GoogleOnceInit(&protobuf_AssignDescriptors_once_,\n" + " &$assigndescriptorsname$);\n" + "}\n" + "\n", + "assigndescriptorsname", GlobalAssignDescriptorsName(file_->name())); + + // protobuf_RegisterTypes(): Calls + // MessageFactory::InternalRegisterGeneratedType() for each message type. + printer->Print( + "void protobuf_RegisterTypes(const ::std::string&) {\n" + " protobuf_AssignDescriptorsOnce();\n"); + printer->Indent(); + + for (int i = 0; i < file_->message_type_count(); i++) { + message_generators_[i]->GenerateTypeRegistrations(printer); + } + + printer->Outdent(); + printer->Print( + "}\n" + "\n" + "} // namespace\n"); + } + + // ----------------------------------------------------------------- + + // ShutdownFile(): Deletes descriptors, default instances, etc. on shutdown. + printer->Print( + "\n" + "void $shutdownfilename$() {\n", + "shutdownfilename", GlobalShutdownFileName(file_->name())); + printer->Indent(); + + for (int i = 0; i < file_->message_type_count(); i++) { + message_generators_[i]->GenerateShutdownCode(printer); + } + + printer->Outdent(); + printer->Print( + "}\n"); + + // ----------------------------------------------------------------- + + // Now generate the AddDescriptors() function. + printer->Print( + "\n" + "void $adddescriptorsname$() {\n" + // We don't need any special synchronization here because this code is + // called at static init time before any threads exist. + " static bool already_here = false;\n" + " if (already_here) return;\n" + " already_here = true;\n" + " GOOGLE_PROTOBUF_VERIFY_VERSION;\n" + "\n", + "adddescriptorsname", GlobalAddDescriptorsName(file_->name())); + printer->Indent(); + + // Call the AddDescriptors() methods for all of our dependencies, to make + // sure they get added first. + for (int i = 0; i < file_->dependency_count(); i++) { + const FileDescriptor* dependency = file_->dependency(i); + // Print the namespace prefix for the dependency. + vector dependency_package_parts; + SplitStringUsing(dependency->package(), ".", &dependency_package_parts); + printer->Print("::"); + for (int i = 0; i < dependency_package_parts.size(); i++) { + printer->Print("$name$::", + "name", dependency_package_parts[i]); + } + // Call its AddDescriptors function. + printer->Print( + "$name$();\n", + "name", GlobalAddDescriptorsName(dependency->name())); + } + + if (HasDescriptorMethods(file_)) { + // Embed the descriptor. We simply serialize the entire FileDescriptorProto + // and embed it as a string literal, which is parsed and built into real + // descriptors at initialization time. + FileDescriptorProto file_proto; + file_->CopyTo(&file_proto); + string file_data; + file_proto.SerializeToString(&file_data); + + printer->Print( + "::google::protobuf::DescriptorPool::InternalAddGeneratedFile("); + + // Only write 40 bytes per line. + static const int kBytesPerLine = 40; + for (int i = 0; i < file_data.size(); i += kBytesPerLine) { + printer->Print("\n \"$data$\"", + "data", EscapeTrigraphs(CEscape(file_data.substr(i, kBytesPerLine)))); + } + printer->Print( + ", $size$);\n", + "size", SimpleItoa(file_data.size())); + + // Call MessageFactory::InternalRegisterGeneratedFile(). + printer->Print( + "::google::protobuf::MessageFactory::InternalRegisterGeneratedFile(\n" + " \"$filename$\", &protobuf_RegisterTypes);\n", + "filename", file_->name()); + } + + // Allocate and initialize default instances. This can't be done lazily + // since default instances are returned by simple accessors and are used with + // extensions. Speaking of which, we also register extensions at this time. + for (int i = 0; i < file_->message_type_count(); i++) { + message_generators_[i]->GenerateDefaultInstanceAllocator(printer); + } + for (int i = 0; i < file_->extension_count(); i++) { + extension_generators_[i]->GenerateRegistration(printer); + } + for (int i = 0; i < file_->message_type_count(); i++) { + message_generators_[i]->GenerateDefaultInstanceInitializer(printer); + } + + printer->Print( + "::google::protobuf::internal::OnShutdown(&$shutdownfilename$);\n", + "shutdownfilename", GlobalShutdownFileName(file_->name())); + + printer->Outdent(); + + printer->Print( + "}\n" + "\n" + "// Force AddDescriptors() to be called at static initialization time.\n" + "struct StaticDescriptorInitializer_$filename$ {\n" + " StaticDescriptorInitializer_$filename$() {\n" + " $adddescriptorsname$();\n" + " }\n" + "} static_descriptor_initializer_$filename$_;\n" + "\n", + "adddescriptorsname", GlobalAddDescriptorsName(file_->name()), + "filename", FilenameIdentifier(file_->name())); +} + +void FileGenerator::GenerateNamespaceOpeners(io::Printer* printer) { + if (package_parts_.size() > 0) printer->Print("\n"); + + for (int i = 0; i < package_parts_.size(); i++) { + printer->Print("namespace $part$ {\n", + "part", package_parts_[i]); + } +} + +void FileGenerator::GenerateNamespaceClosers(io::Printer* printer) { + if (package_parts_.size() > 0) printer->Print("\n"); + + for (int i = package_parts_.size() - 1; i >= 0; i--) { + printer->Print("} // namespace $part$\n", + "part", package_parts_[i]); + } +} + +} // namespace cpp +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_file.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_file.h new file mode 100644 index 0000000000..b4e012857d --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_file.h @@ -0,0 +1,98 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#ifndef GOOGLE_PROTOBUF_COMPILER_CPP_FILE_H__ +#define GOOGLE_PROTOBUF_COMPILER_CPP_FILE_H__ + +#include +#include +#include +#include + +namespace google { +namespace protobuf { + class FileDescriptor; // descriptor.h + namespace io { + class Printer; // printer.h + } +} + +namespace protobuf { +namespace compiler { +namespace cpp { + +class EnumGenerator; // enum.h +class MessageGenerator; // message.h +class ServiceGenerator; // service.h +class ExtensionGenerator; // extension.h + +class FileGenerator { + public: + // See generator.cc for the meaning of dllexport_decl. + explicit FileGenerator(const FileDescriptor* file, + const string& dllexport_decl); + ~FileGenerator(); + + void GenerateHeader(io::Printer* printer); + void GenerateSource(io::Printer* printer); + + private: + // Generate the BuildDescriptors() procedure, which builds all descriptors + // for types defined in the file. + void GenerateBuildDescriptors(io::Printer* printer); + + void GenerateNamespaceOpeners(io::Printer* printer); + void GenerateNamespaceClosers(io::Printer* printer); + + const FileDescriptor* file_; + + scoped_array > message_generators_; + scoped_array > enum_generators_; + scoped_array > service_generators_; + scoped_array > extension_generators_; + + // E.g. if the package is foo.bar, package_parts_ is {"foo", "bar"}. + vector package_parts_; + + string dllexport_decl_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(FileGenerator); +}; + +} // namespace cpp +} // namespace compiler +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_COMPILER_CPP_FILE_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_generator.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_generator.cc new file mode 100644 index 0000000000..bb84e2ab46 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_generator.cc @@ -0,0 +1,122 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include + +#include +#include + +#include +#include +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace cpp { + +CppGenerator::CppGenerator() {} +CppGenerator::~CppGenerator() {} + +bool CppGenerator::Generate(const FileDescriptor* file, + const string& parameter, + GeneratorContext* generator_context, + string* error) const { + vector > options; + ParseGeneratorParameter(parameter, &options); + + // ----------------------------------------------------------------- + // parse generator options + + // TODO(kenton): If we ever have more options, we may want to create a + // class that encapsulates them which we can pass down to all the + // generator classes. Currently we pass dllexport_decl down to all of + // them via the constructors, but we don't want to have to add another + // constructor parameter for every option. + + // If the dllexport_decl option is passed to the compiler, we need to write + // it in front of every symbol that should be exported if this .proto is + // compiled into a Windows DLL. E.g., if the user invokes the protocol + // compiler as: + // protoc --cpp_out=dllexport_decl=FOO_EXPORT:outdir foo.proto + // then we'll define classes like this: + // class FOO_EXPORT Foo { + // ... + // } + // FOO_EXPORT is a macro which should expand to __declspec(dllexport) or + // __declspec(dllimport) depending on what is being compiled. + string dllexport_decl; + + for (int i = 0; i < options.size(); i++) { + if (options[i].first == "dllexport_decl") { + dllexport_decl = options[i].second; + } else { + *error = "Unknown generator option: " + options[i].first; + return false; + } + } + + // ----------------------------------------------------------------- + + + string basename = StripProto(file->name()); + basename.append(".pb"); + + FileGenerator file_generator(file, dllexport_decl); + + // Generate header. + { + scoped_ptr output( + generator_context->Open(basename + ".h")); + io::Printer printer(output.get(), '$'); + file_generator.GenerateHeader(&printer); + } + + // Generate cc file. + { + scoped_ptr output( + generator_context->Open(basename + ".cc")); + io::Printer printer(output.get(), '$'); + file_generator.GenerateSource(&printer); + } + + return true; +} + +} // namespace cpp +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_generator.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_generator.h new file mode 100644 index 0000000000..a90e84d7b1 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_generator.h @@ -0,0 +1,72 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// Generates C++ code for a given .proto file. + +#ifndef GOOGLE_PROTOBUF_COMPILER_CPP_GENERATOR_H__ +#define GOOGLE_PROTOBUF_COMPILER_CPP_GENERATOR_H__ + +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace cpp { + +// CodeGenerator implementation which generates a C++ source file and +// header. If you create your own protocol compiler binary and you want +// it to support C++ output, you can do so by registering an instance of this +// CodeGenerator with the CommandLineInterface in your main() function. +class LIBPROTOC_EXPORT CppGenerator : public CodeGenerator { + public: + CppGenerator(); + ~CppGenerator(); + + // implements CodeGenerator ---------------------------------------- + bool Generate(const FileDescriptor* file, + const string& parameter, + GeneratorContext* generator_context, + string* error) const; + + private: + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(CppGenerator); +}; + +} // namespace cpp +} // namespace compiler +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_COMPILER_CPP_GENERATOR_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_helpers.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_helpers.cc new file mode 100644 index 0000000000..25b05a85fd --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_helpers.cc @@ -0,0 +1,347 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include +#include + +#include +#include +#include +#include + + +namespace google { +namespace protobuf { +namespace compiler { +namespace cpp { + +namespace { + +string DotsToUnderscores(const string& name) { + return StringReplace(name, ".", "_", true); +} + +string DotsToColons(const string& name) { + return StringReplace(name, ".", "::", true); +} + +const char* const kKeywordList[] = { + "and", "and_eq", "asm", "auto", "bitand", "bitor", "bool", "break", "case", + "catch", "char", "class", "compl", "const", "const_cast", "continue", + "default", "delete", "do", "double", "dynamic_cast", "else", "enum", + "explicit", "extern", "false", "float", "for", "friend", "goto", "if", + "inline", "int", "long", "mutable", "namespace", "new", "not", "not_eq", + "operator", "or", "or_eq", "private", "protected", "public", "register", + "reinterpret_cast", "return", "short", "signed", "sizeof", "static", + "static_cast", "struct", "switch", "template", "this", "throw", "true", "try", + "typedef", "typeid", "typename", "union", "unsigned", "using", "virtual", + "void", "volatile", "wchar_t", "while", "xor", "xor_eq" +}; + +hash_set MakeKeywordsMap() { + hash_set result; + for (int i = 0; i < GOOGLE_ARRAYSIZE(kKeywordList); i++) { + result.insert(kKeywordList[i]); + } + return result; +} + +hash_set kKeywords = MakeKeywordsMap(); + +string UnderscoresToCamelCase(const string& input, bool cap_next_letter) { + string result; + // Note: I distrust ctype.h due to locales. + for (int i = 0; i < input.size(); i++) { + if ('a' <= input[i] && input[i] <= 'z') { + if (cap_next_letter) { + result += input[i] + ('A' - 'a'); + } else { + result += input[i]; + } + cap_next_letter = false; + } else if ('A' <= input[i] && input[i] <= 'Z') { + // Capital letters are left as-is. + result += input[i]; + cap_next_letter = false; + } else if ('0' <= input[i] && input[i] <= '9') { + result += input[i]; + cap_next_letter = true; + } else { + cap_next_letter = true; + } + } + return result; +} + +} // namespace + +const char kThickSeparator[] = + "// ===================================================================\n"; +const char kThinSeparator[] = + "// -------------------------------------------------------------------\n"; + +string ClassName(const Descriptor* descriptor, bool qualified) { + + // Find "outer", the descriptor of the top-level message in which + // "descriptor" is embedded. + const Descriptor* outer = descriptor; + while (outer->containing_type() != NULL) outer = outer->containing_type(); + + const string& outer_name = outer->full_name(); + string inner_name = descriptor->full_name().substr(outer_name.size()); + + if (qualified) { + return "::" + DotsToColons(outer_name) + DotsToUnderscores(inner_name); + } else { + return outer->name() + DotsToUnderscores(inner_name); + } +} + +string ClassName(const EnumDescriptor* enum_descriptor, bool qualified) { + if (enum_descriptor->containing_type() == NULL) { + if (qualified) { + return DotsToColons(enum_descriptor->full_name()); + } else { + return enum_descriptor->name(); + } + } else { + string result = ClassName(enum_descriptor->containing_type(), qualified); + result += '_'; + result += enum_descriptor->name(); + return result; + } +} + + +string SuperClassName(const Descriptor* descriptor) { + return HasDescriptorMethods(descriptor->file()) ? + "::google::protobuf::Message" : "::google::protobuf::MessageLite"; +} + +string FieldName(const FieldDescriptor* field) { + string result = field->name(); + LowerString(&result); + if (kKeywords.count(result) > 0) { + result.append("_"); + } + return result; +} + +string FieldConstantName(const FieldDescriptor *field) { + string field_name = UnderscoresToCamelCase(field->name(), true); + string result = "k" + field_name + "FieldNumber"; + + if (!field->is_extension() && + field->containing_type()->FindFieldByCamelcaseName( + field->camelcase_name()) != field) { + // This field's camelcase name is not unique. As a hack, add the field + // number to the constant name. This makes the constant rather useless, + // but what can we do? + result += "_" + SimpleItoa(field->number()); + } + + return result; +} + +string FieldMessageTypeName(const FieldDescriptor* field) { + // Note: The Google-internal version of Protocol Buffers uses this function + // as a hook point for hacks to support legacy code. + return ClassName(field->message_type(), true); +} + +string StripProto(const string& filename) { + if (HasSuffixString(filename, ".protodevel")) { + return StripSuffixString(filename, ".protodevel"); + } else { + return StripSuffixString(filename, ".proto"); + } +} + +const char* PrimitiveTypeName(FieldDescriptor::CppType type) { + switch (type) { + case FieldDescriptor::CPPTYPE_INT32 : return "::google::protobuf::int32"; + case FieldDescriptor::CPPTYPE_INT64 : return "::google::protobuf::int64"; + case FieldDescriptor::CPPTYPE_UINT32 : return "::google::protobuf::uint32"; + case FieldDescriptor::CPPTYPE_UINT64 : return "::google::protobuf::uint64"; + case FieldDescriptor::CPPTYPE_DOUBLE : return "double"; + case FieldDescriptor::CPPTYPE_FLOAT : return "float"; + case FieldDescriptor::CPPTYPE_BOOL : return "bool"; + case FieldDescriptor::CPPTYPE_ENUM : return "int"; + case FieldDescriptor::CPPTYPE_STRING : return "::std::string"; + case FieldDescriptor::CPPTYPE_MESSAGE: return NULL; + + // No default because we want the compiler to complain if any new + // CppTypes are added. + } + + GOOGLE_LOG(FATAL) << "Can't get here."; + return NULL; +} + +const char* DeclaredTypeMethodName(FieldDescriptor::Type type) { + switch (type) { + case FieldDescriptor::TYPE_INT32 : return "Int32"; + case FieldDescriptor::TYPE_INT64 : return "Int64"; + case FieldDescriptor::TYPE_UINT32 : return "UInt32"; + case FieldDescriptor::TYPE_UINT64 : return "UInt64"; + case FieldDescriptor::TYPE_SINT32 : return "SInt32"; + case FieldDescriptor::TYPE_SINT64 : return "SInt64"; + case FieldDescriptor::TYPE_FIXED32 : return "Fixed32"; + case FieldDescriptor::TYPE_FIXED64 : return "Fixed64"; + case FieldDescriptor::TYPE_SFIXED32: return "SFixed32"; + case FieldDescriptor::TYPE_SFIXED64: return "SFixed64"; + case FieldDescriptor::TYPE_FLOAT : return "Float"; + case FieldDescriptor::TYPE_DOUBLE : return "Double"; + + case FieldDescriptor::TYPE_BOOL : return "Bool"; + case FieldDescriptor::TYPE_ENUM : return "Enum"; + + case FieldDescriptor::TYPE_STRING : return "String"; + case FieldDescriptor::TYPE_BYTES : return "Bytes"; + case FieldDescriptor::TYPE_GROUP : return "Group"; + case FieldDescriptor::TYPE_MESSAGE : return "Message"; + + // No default because we want the compiler to complain if any new + // types are added. + } + GOOGLE_LOG(FATAL) << "Can't get here."; + return ""; +} + +string DefaultValue(const FieldDescriptor* field) { + switch (field->cpp_type()) { + case FieldDescriptor::CPPTYPE_INT32: + return SimpleItoa(field->default_value_int32()); + case FieldDescriptor::CPPTYPE_UINT32: + return SimpleItoa(field->default_value_uint32()) + "u"; + case FieldDescriptor::CPPTYPE_INT64: + return "GOOGLE_LONGLONG(" + SimpleItoa(field->default_value_int64()) + ")"; + case FieldDescriptor::CPPTYPE_UINT64: + return "GOOGLE_ULONGLONG(" + SimpleItoa(field->default_value_uint64())+ ")"; + case FieldDescriptor::CPPTYPE_DOUBLE: { + double value = field->default_value_double(); + if (value == numeric_limits::infinity()) { + return "::google::protobuf::internal::Infinity()"; + } else if (value == -numeric_limits::infinity()) { + return "-::google::protobuf::internal::Infinity()"; + } else if (value != value) { + return "::google::protobuf::internal::NaN()"; + } else { + return SimpleDtoa(value); + } + } + case FieldDescriptor::CPPTYPE_FLOAT: + { + float value = field->default_value_float(); + if (value == numeric_limits::infinity()) { + return "static_cast(::google::protobuf::internal::Infinity())"; + } else if (value == -numeric_limits::infinity()) { + return "static_cast(-::google::protobuf::internal::Infinity())"; + } else if (value != value) { + return "static_cast(::google::protobuf::internal::NaN())"; + } else { + string float_value = SimpleFtoa(value); + // If floating point value contains a period (.) or an exponent + // (either E or e), then append suffix 'f' to make it a float + // literal. + if (float_value.find_first_of(".eE") != string::npos) { + float_value.push_back('f'); + } + return float_value; + } + } + case FieldDescriptor::CPPTYPE_BOOL: + return field->default_value_bool() ? "true" : "false"; + case FieldDescriptor::CPPTYPE_ENUM: + // Lazy: Generate a static_cast because we don't have a helper function + // that constructs the full name of an enum value. + return strings::Substitute( + "static_cast< $0 >($1)", + ClassName(field->enum_type(), true), + field->default_value_enum()->number()); + case FieldDescriptor::CPPTYPE_STRING: + return "\"" + EscapeTrigraphs(CEscape(field->default_value_string())) + + "\""; + case FieldDescriptor::CPPTYPE_MESSAGE: + return FieldMessageTypeName(field) + "::default_instance()"; + } + // Can't actually get here; make compiler happy. (We could add a default + // case above but then we wouldn't get the nice compiler warning when a + // new type is added.) + GOOGLE_LOG(FATAL) << "Can't get here."; + return ""; +} + +// Convert a file name into a valid identifier. +string FilenameIdentifier(const string& filename) { + string result; + for (int i = 0; i < filename.size(); i++) { + if (ascii_isalnum(filename[i])) { + result.push_back(filename[i]); + } else { + // Not alphanumeric. To avoid any possibility of name conflicts we + // use the hex code for the character. + result.push_back('_'); + char buffer[kFastToBufferSize]; + result.append(FastHexToBuffer(static_cast(filename[i]), buffer)); + } + } + return result; +} + +// Return the name of the AddDescriptors() function for a given file. +string GlobalAddDescriptorsName(const string& filename) { + return "protobuf_AddDesc_" + FilenameIdentifier(filename); +} + +// Return the name of the AssignDescriptors() function for a given file. +string GlobalAssignDescriptorsName(const string& filename) { + return "protobuf_AssignDesc_" + FilenameIdentifier(filename); +} + +// Return the name of the ShutdownFile() function for a given file. +string GlobalShutdownFileName(const string& filename) { + return "protobuf_ShutdownFile_" + FilenameIdentifier(filename); +} + +// Escape C++ trigraphs by escaping question marks to \? +string EscapeTrigraphs(const string& to_escape) { + return StringReplace(to_escape, "?", "\\?", true); +} + +} // namespace cpp +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_helpers.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_helpers.h new file mode 100644 index 0000000000..b13d53beff --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_helpers.h @@ -0,0 +1,159 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#ifndef GOOGLE_PROTOBUF_COMPILER_CPP_HELPERS_H__ +#define GOOGLE_PROTOBUF_COMPILER_CPP_HELPERS_H__ + +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace cpp { + +// Commonly-used separator comments. Thick is a line of '=', thin is a line +// of '-'. +extern const char kThickSeparator[]; +extern const char kThinSeparator[]; + +// Returns the non-nested type name for the given type. If "qualified" is +// true, prefix the type with the full namespace. For example, if you had: +// package foo.bar; +// message Baz { message Qux {} } +// Then the qualified ClassName for Qux would be: +// ::foo::bar::Baz_Qux +// While the non-qualified version would be: +// Baz_Qux +string ClassName(const Descriptor* descriptor, bool qualified); +string ClassName(const EnumDescriptor* enum_descriptor, bool qualified); + +string SuperClassName(const Descriptor* descriptor); + +// Get the (unqualified) name that should be used for this field in C++ code. +// The name is coerced to lower-case to emulate proto1 behavior. People +// should be using lowercase-with-underscores style for proto field names +// anyway, so normally this just returns field->name(). +string FieldName(const FieldDescriptor* field); + +// Get the unqualified name that should be used for a field's field +// number constant. +string FieldConstantName(const FieldDescriptor *field); + +// Returns the scope where the field was defined (for extensions, this is +// different from the message type to which the field applies). +inline const Descriptor* FieldScope(const FieldDescriptor* field) { + return field->is_extension() ? + field->extension_scope() : field->containing_type(); +} + +// Returns the fully-qualified type name field->message_type(). Usually this +// is just ClassName(field->message_type(), true); +string FieldMessageTypeName(const FieldDescriptor* field); + +// Strips ".proto" or ".protodevel" from the end of a filename. +string StripProto(const string& filename); + +// Get the C++ type name for a primitive type (e.g. "double", "::google::protobuf::int32", etc.). +// Note: non-built-in type names will be qualified, meaning they will start +// with a ::. If you are using the type as a template parameter, you will +// need to insure there is a space between the < and the ::, because the +// ridiculous C++ standard defines "<:" to be a synonym for "[". +const char* PrimitiveTypeName(FieldDescriptor::CppType type); + +// Get the declared type name in CamelCase format, as is used e.g. for the +// methods of WireFormat. For example, TYPE_INT32 becomes "Int32". +const char* DeclaredTypeMethodName(FieldDescriptor::Type type); + +// Get code that evaluates to the field's default value. +string DefaultValue(const FieldDescriptor* field); + +// Convert a file name into a valid identifier. +string FilenameIdentifier(const string& filename); + +// Return the name of the AddDescriptors() function for a given file. +string GlobalAddDescriptorsName(const string& filename); + +// Return the name of the AssignDescriptors() function for a given file. +string GlobalAssignDescriptorsName(const string& filename); + +// Return the name of the ShutdownFile() function for a given file. +string GlobalShutdownFileName(const string& filename); + +// Escape C++ trigraphs by escaping question marks to \? +string EscapeTrigraphs(const string& to_escape); + +// Do message classes in this file keep track of unknown fields? +inline bool HasUnknownFields(const FileDescriptor *file) { + return file->options().optimize_for() != FileOptions::LITE_RUNTIME; +} + +// Does this file have generated parsing, serialization, and other +// standard methods for which reflection-based fallback implementations exist? +inline bool HasGeneratedMethods(const FileDescriptor *file) { + return file->options().optimize_for() != FileOptions::CODE_SIZE; +} + +// Do message classes in this file have descriptor and refelction methods? +inline bool HasDescriptorMethods(const FileDescriptor *file) { + return file->options().optimize_for() != FileOptions::LITE_RUNTIME; +} + +// Should we generate generic services for this file? +inline bool HasGenericServices(const FileDescriptor *file) { + return file->service_count() > 0 && + file->options().optimize_for() != FileOptions::LITE_RUNTIME && + file->options().cc_generic_services(); +} + +// Should string fields in this file verify that their contents are UTF-8? +inline bool HasUtf8Verification(const FileDescriptor* file) { + return file->options().optimize_for() != FileOptions::LITE_RUNTIME; +} + +// Should we generate a separate, super-optimized code path for serializing to +// flat arrays? We don't do this in Lite mode because we'd rather reduce code +// size. +inline bool HasFastArraySerialization(const FileDescriptor* file) { + return file->options().optimize_for() == FileOptions::SPEED; +} + + +} // namespace cpp +} // namespace compiler +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_COMPILER_CPP_HELPERS_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_message.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_message.cc new file mode 100644 index 0000000000..c4e6fb2c85 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_message.cc @@ -0,0 +1,1933 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace cpp { + +using internal::WireFormat; +using internal::WireFormatLite; + +namespace { + +void PrintFieldComment(io::Printer* printer, const FieldDescriptor* field) { + // Print the field's proto-syntax definition as a comment. We don't want to + // print group bodies so we cut off after the first line. + string def = field->DebugString(); + printer->Print("// $def$\n", + "def", def.substr(0, def.find_first_of('\n'))); +} + +struct FieldOrderingByNumber { + inline bool operator()(const FieldDescriptor* a, + const FieldDescriptor* b) const { + return a->number() < b->number(); + } +}; + +const char* kWireTypeNames[] = { + "VARINT", + "FIXED64", + "LENGTH_DELIMITED", + "START_GROUP", + "END_GROUP", + "FIXED32", +}; + +// Sort the fields of the given Descriptor by number into a new[]'d array +// and return it. +const FieldDescriptor** SortFieldsByNumber(const Descriptor* descriptor) { + const FieldDescriptor** fields = + new const FieldDescriptor*[descriptor->field_count()]; + for (int i = 0; i < descriptor->field_count(); i++) { + fields[i] = descriptor->field(i); + } + sort(fields, fields + descriptor->field_count(), + FieldOrderingByNumber()); + return fields; +} + +// Functor for sorting extension ranges by their "start" field number. +struct ExtensionRangeSorter { + bool operator()(const Descriptor::ExtensionRange* left, + const Descriptor::ExtensionRange* right) const { + return left->start < right->start; + } +}; + +// Returns true if the message type has any required fields. If it doesn't, +// we can optimize out calls to its IsInitialized() method. +// +// already_seen is used to avoid checking the same type multiple times +// (and also to protect against recursion). +static bool HasRequiredFields( + const Descriptor* type, + hash_set* already_seen) { + if (already_seen->count(type) > 0) { + // Since the first occurrence of a required field causes the whole + // function to return true, we can assume that if the type is already + // in the cache it didn't have any required fields. + return false; + } + already_seen->insert(type); + + // If the type has extensions, an extension with message type could contain + // required fields, so we have to be conservative and assume such an + // extension exists. + if (type->extension_range_count() > 0) return true; + + for (int i = 0; i < type->field_count(); i++) { + const FieldDescriptor* field = type->field(i); + if (field->is_required()) { + return true; + } + if (field->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) { + if (HasRequiredFields(field->message_type(), already_seen)) { + return true; + } + } + } + + return false; +} + +static bool HasRequiredFields(const Descriptor* type) { + hash_set already_seen; + return HasRequiredFields(type, &already_seen); +} + +// This returns an estimate of the compiler's alignment for the field. This +// can't guarantee to be correct because the generated code could be compiled on +// different systems with different alignment rules. The estimates below assume +// 64-bit pointers. +int EstimateAlignmentSize(const FieldDescriptor* field) { + if (field == NULL) return 0; + if (field->is_repeated()) return 8; + switch (field->cpp_type()) { + case FieldDescriptor::CPPTYPE_BOOL: + return 1; + + case FieldDescriptor::CPPTYPE_INT32: + case FieldDescriptor::CPPTYPE_UINT32: + case FieldDescriptor::CPPTYPE_ENUM: + case FieldDescriptor::CPPTYPE_FLOAT: + return 4; + + case FieldDescriptor::CPPTYPE_INT64: + case FieldDescriptor::CPPTYPE_UINT64: + case FieldDescriptor::CPPTYPE_DOUBLE: + case FieldDescriptor::CPPTYPE_STRING: + case FieldDescriptor::CPPTYPE_MESSAGE: + return 8; + } + GOOGLE_LOG(FATAL) << "Can't get here."; + return -1; // Make compiler happy. +} + +// FieldGroup is just a helper for OptimizePadding below. It holds a vector of +// fields that are grouped together because they have compatible alignment, and +// a preferred location in the final field ordering. +class FieldGroup { + public: + FieldGroup() + : preferred_location_(0) {} + + // A group with a single field. + FieldGroup(float preferred_location, const FieldDescriptor* field) + : preferred_location_(preferred_location), + fields_(1, field) {} + + // Append the fields in 'other' to this group. + void Append(const FieldGroup& other) { + if (other.fields_.empty()) { + return; + } + // Preferred location is the average among all the fields, so we weight by + // the number of fields on each FieldGroup object. + preferred_location_ = + (preferred_location_ * fields_.size() + + (other.preferred_location_ * other.fields_.size())) / + (fields_.size() + other.fields_.size()); + fields_.insert(fields_.end(), other.fields_.begin(), other.fields_.end()); + } + + void SetPreferredLocation(float location) { preferred_location_ = location; } + const vector& fields() const { return fields_; } + + // FieldGroup objects sort by their preferred location. + bool operator<(const FieldGroup& other) const { + return preferred_location_ < other.preferred_location_; + } + + private: + // "preferred_location_" is an estimate of where this group should go in the + // final list of fields. We compute this by taking the average index of each + // field in this group in the original ordering of fields. This is very + // approximate, but should put this group close to where its member fields + // originally went. + float preferred_location_; + vector fields_; + // We rely on the default copy constructor and operator= so this type can be + // used in a vector. +}; + +// Reorder 'fields' so that if the fields are output into a c++ class in the new +// order, the alignment padding is minimized. We try to do this while keeping +// each field as close as possible to its original position so that we don't +// reduce cache locality much for function that access each field in order. +void OptimizePadding(vector* fields) { + // First divide fields into those that align to 1 byte, 4 bytes or 8 bytes. + vector aligned_to_1, aligned_to_4, aligned_to_8; + for (int i = 0; i < fields->size(); ++i) { + switch (EstimateAlignmentSize((*fields)[i])) { + case 1: aligned_to_1.push_back(FieldGroup(i, (*fields)[i])); break; + case 4: aligned_to_4.push_back(FieldGroup(i, (*fields)[i])); break; + case 8: aligned_to_8.push_back(FieldGroup(i, (*fields)[i])); break; + default: + GOOGLE_LOG(FATAL) << "Unknown alignment size."; + } + } + + // Now group fields aligned to 1 byte into sets of 4, and treat those like a + // single field aligned to 4 bytes. + for (int i = 0; i < aligned_to_1.size(); i += 4) { + FieldGroup field_group; + for (int j = i; j < aligned_to_1.size() && j < i + 4; ++j) { + field_group.Append(aligned_to_1[j]); + } + aligned_to_4.push_back(field_group); + } + // Sort by preferred location to keep fields as close to their original + // location as possible. + sort(aligned_to_4.begin(), aligned_to_4.end()); + + // Now group fields aligned to 4 bytes (or the 4-field groups created above) + // into pairs, and treat those like a single field aligned to 8 bytes. + for (int i = 0; i < aligned_to_4.size(); i += 2) { + FieldGroup field_group; + for (int j = i; j < aligned_to_4.size() && j < i + 2; ++j) { + field_group.Append(aligned_to_4[j]); + } + if (i == aligned_to_4.size() - 1) { + // Move incomplete 4-byte block to the end. + field_group.SetPreferredLocation(fields->size() + 1); + } + aligned_to_8.push_back(field_group); + } + // Sort by preferred location to keep fields as close to their original + // location as possible. + sort(aligned_to_8.begin(), aligned_to_8.end()); + + // Now pull out all the FieldDescriptors in order. + fields->clear(); + for (int i = 0; i < aligned_to_8.size(); ++i) { + fields->insert(fields->end(), + aligned_to_8[i].fields().begin(), + aligned_to_8[i].fields().end()); + } +} + +} + +// =================================================================== + +MessageGenerator::MessageGenerator(const Descriptor* descriptor, + const string& dllexport_decl) + : descriptor_(descriptor), + classname_(ClassName(descriptor, false)), + dllexport_decl_(dllexport_decl), + field_generators_(descriptor), + nested_generators_(new scoped_ptr[ + descriptor->nested_type_count()]), + enum_generators_(new scoped_ptr[ + descriptor->enum_type_count()]), + extension_generators_(new scoped_ptr[ + descriptor->extension_count()]) { + + for (int i = 0; i < descriptor->nested_type_count(); i++) { + nested_generators_[i].reset( + new MessageGenerator(descriptor->nested_type(i), dllexport_decl)); + } + + for (int i = 0; i < descriptor->enum_type_count(); i++) { + enum_generators_[i].reset( + new EnumGenerator(descriptor->enum_type(i), dllexport_decl)); + } + + for (int i = 0; i < descriptor->extension_count(); i++) { + extension_generators_[i].reset( + new ExtensionGenerator(descriptor->extension(i), dllexport_decl)); + } +} + +MessageGenerator::~MessageGenerator() {} + +void MessageGenerator:: +GenerateForwardDeclaration(io::Printer* printer) { + printer->Print("class $classname$;\n", + "classname", classname_); + + for (int i = 0; i < descriptor_->nested_type_count(); i++) { + nested_generators_[i]->GenerateForwardDeclaration(printer); + } +} + +void MessageGenerator:: +GenerateEnumDefinitions(io::Printer* printer) { + for (int i = 0; i < descriptor_->nested_type_count(); i++) { + nested_generators_[i]->GenerateEnumDefinitions(printer); + } + + for (int i = 0; i < descriptor_->enum_type_count(); i++) { + enum_generators_[i]->GenerateDefinition(printer); + } +} + +void MessageGenerator:: +GenerateGetEnumDescriptorSpecializations(io::Printer* printer) { + for (int i = 0; i < descriptor_->nested_type_count(); i++) { + nested_generators_[i]->GenerateGetEnumDescriptorSpecializations(printer); + } + for (int i = 0; i < descriptor_->enum_type_count(); i++) { + enum_generators_[i]->GenerateGetEnumDescriptorSpecializations(printer); + } +} + +void MessageGenerator:: +GenerateFieldAccessorDeclarations(io::Printer* printer) { + for (int i = 0; i < descriptor_->field_count(); i++) { + const FieldDescriptor* field = descriptor_->field(i); + + PrintFieldComment(printer, field); + + map vars; + SetCommonFieldVariables(field, &vars); + vars["constant_name"] = FieldConstantName(field); + + if (field->is_repeated()) { + printer->Print(vars, "inline int $name$_size() const$deprecation$;\n"); + } else { + printer->Print(vars, "inline bool has_$name$() const$deprecation$;\n"); + } + + printer->Print(vars, "inline void clear_$name$()$deprecation$;\n"); + printer->Print(vars, "static const int $constant_name$ = $number$;\n"); + + // Generate type-specific accessor declarations. + field_generators_.get(field).GenerateAccessorDeclarations(printer); + + printer->Print("\n"); + } + + if (descriptor_->extension_range_count() > 0) { + // Generate accessors for extensions. We just call a macro located in + // extension_set.h since the accessors about 80 lines of static code. + printer->Print( + "GOOGLE_PROTOBUF_EXTENSION_ACCESSORS($classname$)\n", + "classname", classname_); + } +} + +void MessageGenerator:: +GenerateFieldAccessorDefinitions(io::Printer* printer) { + printer->Print("// $classname$\n\n", "classname", classname_); + + for (int i = 0; i < descriptor_->field_count(); i++) { + const FieldDescriptor* field = descriptor_->field(i); + + PrintFieldComment(printer, field); + + map vars; + SetCommonFieldVariables(field, &vars); + + // Generate has_$name$() or $name$_size(). + if (field->is_repeated()) { + printer->Print(vars, + "inline int $classname$::$name$_size() const {\n" + " return $name$_.size();\n" + "}\n"); + } else { + // Singular field. + char buffer[kFastToBufferSize]; + vars["has_array_index"] = SimpleItoa(field->index() / 32); + vars["has_mask"] = FastHex32ToBuffer(1u << (field->index() % 32), buffer); + printer->Print(vars, + "inline bool $classname$::has_$name$() const {\n" + " return (_has_bits_[$has_array_index$] & 0x$has_mask$u) != 0;\n" + "}\n" + "inline void $classname$::set_has_$name$() {\n" + " _has_bits_[$has_array_index$] |= 0x$has_mask$u;\n" + "}\n" + "inline void $classname$::clear_has_$name$() {\n" + " _has_bits_[$has_array_index$] &= ~0x$has_mask$u;\n" + "}\n" + ); + } + + // Generate clear_$name$() + printer->Print(vars, + "inline void $classname$::clear_$name$() {\n"); + + printer->Indent(); + field_generators_.get(field).GenerateClearingCode(printer); + printer->Outdent(); + + if (!field->is_repeated()) { + printer->Print(vars, + " clear_has_$name$();\n"); + } + + printer->Print("}\n"); + + // Generate type-specific accessors. + field_generators_.get(field).GenerateInlineAccessorDefinitions(printer); + + printer->Print("\n"); + } +} + +void MessageGenerator:: +GenerateClassDefinition(io::Printer* printer) { + for (int i = 0; i < descriptor_->nested_type_count(); i++) { + nested_generators_[i]->GenerateClassDefinition(printer); + printer->Print("\n"); + printer->Print(kThinSeparator); + printer->Print("\n"); + } + + map vars; + vars["classname"] = classname_; + vars["field_count"] = SimpleItoa(descriptor_->field_count()); + if (dllexport_decl_.empty()) { + vars["dllexport"] = ""; + } else { + vars["dllexport"] = dllexport_decl_ + " "; + } + vars["superclass"] = SuperClassName(descriptor_); + + printer->Print(vars, + "class $dllexport$$classname$ : public $superclass$ {\n" + " public:\n"); + printer->Indent(); + + printer->Print(vars, + "$classname$();\n" + "virtual ~$classname$();\n" + "\n" + "$classname$(const $classname$& from);\n" + "\n" + "inline $classname$& operator=(const $classname$& from) {\n" + " CopyFrom(from);\n" + " return *this;\n" + "}\n" + "\n"); + + if (HasUnknownFields(descriptor_->file())) { + printer->Print( + "inline const ::google::protobuf::UnknownFieldSet& unknown_fields() const {\n" + " return _unknown_fields_;\n" + "}\n" + "\n" + "inline ::google::protobuf::UnknownFieldSet* mutable_unknown_fields() {\n" + " return &_unknown_fields_;\n" + "}\n" + "\n"); + } + + // Only generate this member if it's not disabled. + if (HasDescriptorMethods(descriptor_->file()) && + !descriptor_->options().no_standard_descriptor_accessor()) { + printer->Print(vars, + "static const ::google::protobuf::Descriptor* descriptor();\n"); + } + + printer->Print(vars, + "static const $classname$& default_instance();\n" + "\n"); + + + printer->Print(vars, + "void Swap($classname$* other);\n" + "\n" + "// implements Message ----------------------------------------------\n" + "\n" + "$classname$* New() const;\n"); + + if (HasGeneratedMethods(descriptor_->file())) { + if (HasDescriptorMethods(descriptor_->file())) { + printer->Print(vars, + "void CopyFrom(const ::google::protobuf::Message& from);\n" + "void MergeFrom(const ::google::protobuf::Message& from);\n"); + } else { + printer->Print(vars, + "void CheckTypeAndMergeFrom(const ::google::protobuf::MessageLite& from);\n"); + } + + printer->Print(vars, + "void CopyFrom(const $classname$& from);\n" + "void MergeFrom(const $classname$& from);\n" + "void Clear();\n" + "bool IsInitialized() const;\n" + "\n" + "int ByteSize() const;\n" + "bool MergePartialFromCodedStream(\n" + " ::google::protobuf::io::CodedInputStream* input);\n" + "void SerializeWithCachedSizes(\n" + " ::google::protobuf::io::CodedOutputStream* output) const;\n"); + if (HasFastArraySerialization(descriptor_->file())) { + printer->Print( + "::google::protobuf::uint8* SerializeWithCachedSizesToArray(::google::protobuf::uint8* output) const;\n"); + } + } + + printer->Print(vars, + "int GetCachedSize() const { return _cached_size_; }\n" + "private:\n" + "void SharedCtor();\n" + "void SharedDtor();\n" + "void SetCachedSize(int size) const;\n" + "public:\n" + "\n"); + + if (HasDescriptorMethods(descriptor_->file())) { + printer->Print( + "::google::protobuf::Metadata GetMetadata() const;\n" + "\n"); + } else { + printer->Print( + "::std::string GetTypeName() const;\n" + "\n"); + } + + printer->Print( + "// nested types ----------------------------------------------------\n" + "\n"); + + // Import all nested message classes into this class's scope with typedefs. + for (int i = 0; i < descriptor_->nested_type_count(); i++) { + const Descriptor* nested_type = descriptor_->nested_type(i); + printer->Print("typedef $nested_full_name$ $nested_name$;\n", + "nested_name", nested_type->name(), + "nested_full_name", ClassName(nested_type, false)); + } + + if (descriptor_->nested_type_count() > 0) { + printer->Print("\n"); + } + + // Import all nested enums and their values into this class's scope with + // typedefs and constants. + for (int i = 0; i < descriptor_->enum_type_count(); i++) { + enum_generators_[i]->GenerateSymbolImports(printer); + printer->Print("\n"); + } + + printer->Print( + "// accessors -------------------------------------------------------\n" + "\n"); + + // Generate accessor methods for all fields. + GenerateFieldAccessorDeclarations(printer); + + // Declare extension identifiers. + for (int i = 0; i < descriptor_->extension_count(); i++) { + extension_generators_[i]->GenerateDeclaration(printer); + } + + + printer->Print( + "// @@protoc_insertion_point(class_scope:$full_name$)\n", + "full_name", descriptor_->full_name()); + + // Generate private members. + printer->Outdent(); + printer->Print(" private:\n"); + printer->Indent(); + + for (int i = 0; i < descriptor_->field_count(); i++) { + if (!descriptor_->field(i)->is_repeated()) { + printer->Print( + "inline void set_has_$name$();\n", + "name", FieldName(descriptor_->field(i))); + printer->Print( + "inline void clear_has_$name$();\n", + "name", FieldName(descriptor_->field(i))); + } + } + printer->Print("\n"); + + // To minimize padding, data members are divided into three sections: + // (1) members assumed to align to 8 bytes + // (2) members corresponding to message fields, re-ordered to optimize + // alignment. + // (3) members assumed to align to 4 bytes. + + // Members assumed to align to 8 bytes: + + if (descriptor_->extension_range_count() > 0) { + printer->Print( + "::google::protobuf::internal::ExtensionSet _extensions_;\n" + "\n"); + } + + if (HasUnknownFields(descriptor_->file())) { + printer->Print( + "::google::protobuf::UnknownFieldSet _unknown_fields_;\n" + "\n"); + } + + // Field members: + + vector fields; + for (int i = 0; i < descriptor_->field_count(); i++) { + fields.push_back(descriptor_->field(i)); + } + OptimizePadding(&fields); + for (int i = 0; i < fields.size(); ++i) { + field_generators_.get(fields[i]).GeneratePrivateMembers(printer); + } + + // Members assumed to align to 4 bytes: + + // TODO(kenton): Make _cached_size_ an atomic when C++ supports it. + printer->Print( + "\n" + "mutable int _cached_size_;\n"); + + // Generate _has_bits_. + if (descriptor_->field_count() > 0) { + printer->Print(vars, + "::google::protobuf::uint32 _has_bits_[($field_count$ + 31) / 32];\n" + "\n"); + } else { + // Zero-size arrays aren't technically allowed, and MSVC in particular + // doesn't like them. We still need to declare these arrays to make + // other code compile. Since this is an uncommon case, we'll just declare + // them with size 1 and waste some space. Oh well. + printer->Print( + "::google::protobuf::uint32 _has_bits_[1];\n" + "\n"); + } + + // Declare AddDescriptors(), BuildDescriptors(), and ShutdownFile() as + // friends so that they can access private static variables like + // default_instance_ and reflection_. + printer->Print( + "friend void $dllexport_decl$ $adddescriptorsname$();\n", + "dllexport_decl", dllexport_decl_, + "adddescriptorsname", + GlobalAddDescriptorsName(descriptor_->file()->name())); + printer->Print( + "friend void $assigndescriptorsname$();\n" + "friend void $shutdownfilename$();\n" + "\n", + "assigndescriptorsname", + GlobalAssignDescriptorsName(descriptor_->file()->name()), + "shutdownfilename", GlobalShutdownFileName(descriptor_->file()->name())); + + printer->Print( + "void InitAsDefaultInstance();\n" + "static $classname$* default_instance_;\n", + "classname", classname_); + + printer->Outdent(); + printer->Print(vars, "};"); +} + +void MessageGenerator:: +GenerateInlineMethods(io::Printer* printer) { + for (int i = 0; i < descriptor_->nested_type_count(); i++) { + nested_generators_[i]->GenerateInlineMethods(printer); + printer->Print(kThinSeparator); + printer->Print("\n"); + } + + GenerateFieldAccessorDefinitions(printer); +} + +void MessageGenerator:: +GenerateDescriptorDeclarations(io::Printer* printer) { + printer->Print( + "const ::google::protobuf::Descriptor* $name$_descriptor_ = NULL;\n" + "const ::google::protobuf::internal::GeneratedMessageReflection*\n" + " $name$_reflection_ = NULL;\n", + "name", classname_); + + for (int i = 0; i < descriptor_->nested_type_count(); i++) { + nested_generators_[i]->GenerateDescriptorDeclarations(printer); + } + + for (int i = 0; i < descriptor_->enum_type_count(); i++) { + printer->Print( + "const ::google::protobuf::EnumDescriptor* $name$_descriptor_ = NULL;\n", + "name", ClassName(descriptor_->enum_type(i), false)); + } +} + +void MessageGenerator:: +GenerateDescriptorInitializer(io::Printer* printer, int index) { + // TODO(kenton): Passing the index to this method is redundant; just use + // descriptor_->index() instead. + map vars; + vars["classname"] = classname_; + vars["index"] = SimpleItoa(index); + + // Obtain the descriptor from the parent's descriptor. + if (descriptor_->containing_type() == NULL) { + printer->Print(vars, + "$classname$_descriptor_ = file->message_type($index$);\n"); + } else { + vars["parent"] = ClassName(descriptor_->containing_type(), false); + printer->Print(vars, + "$classname$_descriptor_ = " + "$parent$_descriptor_->nested_type($index$);\n"); + } + + // Generate the offsets. + GenerateOffsets(printer); + + // Construct the reflection object. + printer->Print(vars, + "$classname$_reflection_ =\n" + " new ::google::protobuf::internal::GeneratedMessageReflection(\n" + " $classname$_descriptor_,\n" + " $classname$::default_instance_,\n" + " $classname$_offsets_,\n" + " GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET($classname$, _has_bits_[0]),\n" + " GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(" + "$classname$, _unknown_fields_),\n"); + if (descriptor_->extension_range_count() > 0) { + printer->Print(vars, + " GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(" + "$classname$, _extensions_),\n"); + } else { + // No extensions. + printer->Print(vars, + " -1,\n"); + } + printer->Print(vars, + " ::google::protobuf::DescriptorPool::generated_pool(),\n" + " ::google::protobuf::MessageFactory::generated_factory(),\n" + " sizeof($classname$));\n"); + + // Handle nested types. + for (int i = 0; i < descriptor_->nested_type_count(); i++) { + nested_generators_[i]->GenerateDescriptorInitializer(printer, i); + } + + for (int i = 0; i < descriptor_->enum_type_count(); i++) { + enum_generators_[i]->GenerateDescriptorInitializer(printer, i); + } +} + +void MessageGenerator:: +GenerateTypeRegistrations(io::Printer* printer) { + // Register this message type with the message factory. + printer->Print( + "::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage(\n" + " $classname$_descriptor_, &$classname$::default_instance());\n", + "classname", classname_); + + // Handle nested types. + for (int i = 0; i < descriptor_->nested_type_count(); i++) { + nested_generators_[i]->GenerateTypeRegistrations(printer); + } +} + +void MessageGenerator:: +GenerateDefaultInstanceAllocator(io::Printer* printer) { + // Construct the default instance. We can't call InitAsDefaultInstance() yet + // because we need to make sure all default instances that this one might + // depend on are constructed first. + printer->Print( + "$classname$::default_instance_ = new $classname$();\n", + "classname", classname_); + + // Handle nested types. + for (int i = 0; i < descriptor_->nested_type_count(); i++) { + nested_generators_[i]->GenerateDefaultInstanceAllocator(printer); + } + +} + +void MessageGenerator:: +GenerateDefaultInstanceInitializer(io::Printer* printer) { + printer->Print( + "$classname$::default_instance_->InitAsDefaultInstance();\n", + "classname", classname_); + + // Register extensions. + for (int i = 0; i < descriptor_->extension_count(); i++) { + extension_generators_[i]->GenerateRegistration(printer); + } + + // Handle nested types. + for (int i = 0; i < descriptor_->nested_type_count(); i++) { + nested_generators_[i]->GenerateDefaultInstanceInitializer(printer); + } +} + +void MessageGenerator:: +GenerateShutdownCode(io::Printer* printer) { + printer->Print( + "delete $classname$::default_instance_;\n", + "classname", classname_); + + if (HasDescriptorMethods(descriptor_->file())) { + printer->Print( + "delete $classname$_reflection_;\n", + "classname", classname_); + } + + // Handle nested types. + for (int i = 0; i < descriptor_->nested_type_count(); i++) { + nested_generators_[i]->GenerateShutdownCode(printer); + } +} + +void MessageGenerator:: +GenerateClassMethods(io::Printer* printer) { + for (int i = 0; i < descriptor_->enum_type_count(); i++) { + enum_generators_[i]->GenerateMethods(printer); + } + + for (int i = 0; i < descriptor_->nested_type_count(); i++) { + nested_generators_[i]->GenerateClassMethods(printer); + printer->Print("\n"); + printer->Print(kThinSeparator); + printer->Print("\n"); + } + + // Generate non-inline field definitions. + for (int i = 0; i < descriptor_->field_count(); i++) { + field_generators_.get(descriptor_->field(i)) + .GenerateNonInlineAccessorDefinitions(printer); + } + + // Generate field number constants. + printer->Print("#ifndef _MSC_VER\n"); + for (int i = 0; i < descriptor_->field_count(); i++) { + const FieldDescriptor *field = descriptor_->field(i); + printer->Print( + "const int $classname$::$constant_name$;\n", + "classname", ClassName(FieldScope(field), false), + "constant_name", FieldConstantName(field)); + } + printer->Print( + "#endif // !_MSC_VER\n" + "\n"); + + // Define extension identifiers. + for (int i = 0; i < descriptor_->extension_count(); i++) { + extension_generators_[i]->GenerateDefinition(printer); + } + + GenerateStructors(printer); + printer->Print("\n"); + + if (HasGeneratedMethods(descriptor_->file())) { + GenerateClear(printer); + printer->Print("\n"); + + GenerateMergeFromCodedStream(printer); + printer->Print("\n"); + + GenerateSerializeWithCachedSizes(printer); + printer->Print("\n"); + + if (HasFastArraySerialization(descriptor_->file())) { + GenerateSerializeWithCachedSizesToArray(printer); + printer->Print("\n"); + } + + GenerateByteSize(printer); + printer->Print("\n"); + + GenerateMergeFrom(printer); + printer->Print("\n"); + + GenerateCopyFrom(printer); + printer->Print("\n"); + + GenerateIsInitialized(printer); + printer->Print("\n"); + } + + GenerateSwap(printer); + printer->Print("\n"); + + if (HasDescriptorMethods(descriptor_->file())) { + printer->Print( + "::google::protobuf::Metadata $classname$::GetMetadata() const {\n" + " protobuf_AssignDescriptorsOnce();\n" + " ::google::protobuf::Metadata metadata;\n" + " metadata.descriptor = $classname$_descriptor_;\n" + " metadata.reflection = $classname$_reflection_;\n" + " return metadata;\n" + "}\n" + "\n", + "classname", classname_); + } else { + printer->Print( + "::std::string $classname$::GetTypeName() const {\n" + " return \"$type_name$\";\n" + "}\n" + "\n", + "classname", classname_, + "type_name", descriptor_->full_name()); + } + +} + +void MessageGenerator:: +GenerateOffsets(io::Printer* printer) { + printer->Print( + "static const int $classname$_offsets_[$field_count$] = {\n", + "classname", classname_, + "field_count", SimpleItoa(max(1, descriptor_->field_count()))); + printer->Indent(); + + for (int i = 0; i < descriptor_->field_count(); i++) { + const FieldDescriptor* field = descriptor_->field(i); + printer->Print( + "GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET($classname$, $name$_),\n", + "classname", classname_, + "name", FieldName(field)); + } + + printer->Outdent(); + printer->Print("};\n"); +} + +void MessageGenerator:: +GenerateSharedConstructorCode(io::Printer* printer) { + printer->Print( + "void $classname$::SharedCtor() {\n", + "classname", classname_); + printer->Indent(); + + printer->Print( + "_cached_size_ = 0;\n"); + + for (int i = 0; i < descriptor_->field_count(); i++) { + field_generators_.get(descriptor_->field(i)) + .GenerateConstructorCode(printer); + } + + printer->Print( + "::memset(_has_bits_, 0, sizeof(_has_bits_));\n"); + + printer->Outdent(); + printer->Print("}\n\n"); +} + +void MessageGenerator:: +GenerateSharedDestructorCode(io::Printer* printer) { + printer->Print( + "void $classname$::SharedDtor() {\n", + "classname", classname_); + printer->Indent(); + // Write the destructors for each field. + for (int i = 0; i < descriptor_->field_count(); i++) { + field_generators_.get(descriptor_->field(i)) + .GenerateDestructorCode(printer); + } + + printer->Print( + "if (this != default_instance_) {\n"); + + // We need to delete all embedded messages. + // TODO(kenton): If we make unset messages point at default instances + // instead of NULL, then it would make sense to move this code into + // MessageFieldGenerator::GenerateDestructorCode(). + for (int i = 0; i < descriptor_->field_count(); i++) { + const FieldDescriptor* field = descriptor_->field(i); + + if (!field->is_repeated() && + field->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) { + printer->Print(" delete $name$_;\n", + "name", FieldName(field)); + } + } + + printer->Outdent(); + printer->Print( + " }\n" + "}\n" + "\n"); +} + +void MessageGenerator:: +GenerateStructors(io::Printer* printer) { + string superclass = SuperClassName(descriptor_); + + // Generate the default constructor. + printer->Print( + "$classname$::$classname$()\n" + " : $superclass$() {\n" + " SharedCtor();\n" + "}\n", + "classname", classname_, + "superclass", superclass); + + printer->Print( + "\n" + "void $classname$::InitAsDefaultInstance() {\n", + "classname", classname_); + + // The default instance needs all of its embedded message pointers + // cross-linked to other default instances. We can't do this initialization + // in the constructor because some other default instances may not have been + // constructed yet at that time. + // TODO(kenton): Maybe all message fields (even for non-default messages) + // should be initialized to point at default instances rather than NULL? + for (int i = 0; i < descriptor_->field_count(); i++) { + const FieldDescriptor* field = descriptor_->field(i); + + if (!field->is_repeated() && + field->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) { + printer->Print( + " $name$_ = const_cast< $type$*>(&$type$::default_instance());\n", + "name", FieldName(field), + "type", FieldMessageTypeName(field)); + } + } + printer->Print( + "}\n" + "\n"); + + // Generate the copy constructor. + printer->Print( + "$classname$::$classname$(const $classname$& from)\n" + " : $superclass$() {\n" + " SharedCtor();\n" + " MergeFrom(from);\n" + "}\n" + "\n", + "classname", classname_, + "superclass", superclass); + + // Generate the shared constructor code. + GenerateSharedConstructorCode(printer); + + // Generate the destructor. + printer->Print( + "$classname$::~$classname$() {\n" + " SharedDtor();\n" + "}\n" + "\n", + "classname", classname_); + + // Generate the shared destructor code. + GenerateSharedDestructorCode(printer); + + // Generate SetCachedSize. + printer->Print( + "void $classname$::SetCachedSize(int size) const {\n" + " GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();\n" + " _cached_size_ = size;\n" + " GOOGLE_SAFE_CONCURRENT_WRITES_END();\n" + "}\n", + "classname", classname_); + + // Only generate this member if it's not disabled. + if (HasDescriptorMethods(descriptor_->file()) && + !descriptor_->options().no_standard_descriptor_accessor()) { + printer->Print( + "const ::google::protobuf::Descriptor* $classname$::descriptor() {\n" + " protobuf_AssignDescriptorsOnce();\n" + " return $classname$_descriptor_;\n" + "}\n" + "\n", + "classname", classname_, + "adddescriptorsname", + GlobalAddDescriptorsName(descriptor_->file()->name())); + } + + printer->Print( + "const $classname$& $classname$::default_instance() {\n" + " if (default_instance_ == NULL) $adddescriptorsname$();" + " return *default_instance_;\n" + "}\n" + "\n" + "$classname$* $classname$::default_instance_ = NULL;\n" + "\n" + "$classname$* $classname$::New() const {\n" + " return new $classname$;\n" + "}\n", + "classname", classname_, + "adddescriptorsname", + GlobalAddDescriptorsName(descriptor_->file()->name())); + +} + +void MessageGenerator:: +GenerateClear(io::Printer* printer) { + printer->Print("void $classname$::Clear() {\n", + "classname", classname_); + printer->Indent(); + + int last_index = -1; + + if (descriptor_->extension_range_count() > 0) { + printer->Print("_extensions_.Clear();\n"); + } + + for (int i = 0; i < descriptor_->field_count(); i++) { + const FieldDescriptor* field = descriptor_->field(i); + + if (!field->is_repeated()) { + // We can use the fact that _has_bits_ is a giant bitfield to our + // advantage: We can check up to 32 bits at a time for equality to + // zero, and skip the whole range if so. This can improve the speed + // of Clear() for messages which contain a very large number of + // optional fields of which only a few are used at a time. Here, + // we've chosen to check 8 bits at a time rather than 32. + if (i / 8 != last_index / 8 || last_index < 0) { + if (last_index >= 0) { + printer->Outdent(); + printer->Print("}\n"); + } + printer->Print( + "if (_has_bits_[$index$ / 32] & (0xffu << ($index$ % 32))) {\n", + "index", SimpleItoa(field->index())); + printer->Indent(); + } + last_index = i; + + // It's faster to just overwrite primitive types, but we should + // only clear strings and messages if they were set. + // TODO(kenton): Let the CppFieldGenerator decide this somehow. + bool should_check_bit = + field->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE || + field->cpp_type() == FieldDescriptor::CPPTYPE_STRING; + + if (should_check_bit) { + printer->Print( + "if (has_$name$()) {\n", + "name", FieldName(field)); + printer->Indent(); + } + + field_generators_.get(field).GenerateClearingCode(printer); + + if (should_check_bit) { + printer->Outdent(); + printer->Print("}\n"); + } + } + } + + if (last_index >= 0) { + printer->Outdent(); + printer->Print("}\n"); + } + + // Repeated fields don't use _has_bits_ so we clear them in a separate + // pass. + for (int i = 0; i < descriptor_->field_count(); i++) { + const FieldDescriptor* field = descriptor_->field(i); + + if (field->is_repeated()) { + field_generators_.get(field).GenerateClearingCode(printer); + } + } + + printer->Print( + "::memset(_has_bits_, 0, sizeof(_has_bits_));\n"); + + if (HasUnknownFields(descriptor_->file())) { + printer->Print( + "mutable_unknown_fields()->Clear();\n"); + } + + printer->Outdent(); + printer->Print("}\n"); +} + +void MessageGenerator:: +GenerateSwap(io::Printer* printer) { + // Generate the Swap member function. + printer->Print("void $classname$::Swap($classname$* other) {\n", + "classname", classname_); + printer->Indent(); + printer->Print("if (other != this) {\n"); + printer->Indent(); + + if (HasGeneratedMethods(descriptor_->file())) { + for (int i = 0; i < descriptor_->field_count(); i++) { + const FieldDescriptor* field = descriptor_->field(i); + field_generators_.get(field).GenerateSwappingCode(printer); + } + + for (int i = 0; i < (descriptor_->field_count() + 31) / 32; ++i) { + printer->Print("std::swap(_has_bits_[$i$], other->_has_bits_[$i$]);\n", + "i", SimpleItoa(i)); + } + + if (HasUnknownFields(descriptor_->file())) { + printer->Print("_unknown_fields_.Swap(&other->_unknown_fields_);\n"); + } + printer->Print("std::swap(_cached_size_, other->_cached_size_);\n"); + if (descriptor_->extension_range_count() > 0) { + printer->Print("_extensions_.Swap(&other->_extensions_);\n"); + } + } else { + printer->Print("GetReflection()->Swap(this, other);"); + } + + printer->Outdent(); + printer->Print("}\n"); + printer->Outdent(); + printer->Print("}\n"); +} + +void MessageGenerator:: +GenerateMergeFrom(io::Printer* printer) { + if (HasDescriptorMethods(descriptor_->file())) { + // Generate the generalized MergeFrom (aka that which takes in the Message + // base class as a parameter). + printer->Print( + "void $classname$::MergeFrom(const ::google::protobuf::Message& from) {\n" + " GOOGLE_CHECK_NE(&from, this);\n", + "classname", classname_); + printer->Indent(); + + // Cast the message to the proper type. If we find that the message is + // *not* of the proper type, we can still call Merge via the reflection + // system, as the GOOGLE_CHECK above ensured that we have the same descriptor + // for each message. + printer->Print( + "const $classname$* source =\n" + " ::google::protobuf::internal::dynamic_cast_if_available(\n" + " &from);\n" + "if (source == NULL) {\n" + " ::google::protobuf::internal::ReflectionOps::Merge(from, this);\n" + "} else {\n" + " MergeFrom(*source);\n" + "}\n", + "classname", classname_); + + printer->Outdent(); + printer->Print("}\n\n"); + } else { + // Generate CheckTypeAndMergeFrom(). + printer->Print( + "void $classname$::CheckTypeAndMergeFrom(\n" + " const ::google::protobuf::MessageLite& from) {\n" + " MergeFrom(*::google::protobuf::down_cast(&from));\n" + "}\n" + "\n", + "classname", classname_); + } + + // Generate the class-specific MergeFrom, which avoids the GOOGLE_CHECK and cast. + printer->Print( + "void $classname$::MergeFrom(const $classname$& from) {\n" + " GOOGLE_CHECK_NE(&from, this);\n", + "classname", classname_); + printer->Indent(); + + // Merge Repeated fields. These fields do not require a + // check as we can simply iterate over them. + for (int i = 0; i < descriptor_->field_count(); ++i) { + const FieldDescriptor* field = descriptor_->field(i); + + if (field->is_repeated()) { + field_generators_.get(field).GenerateMergingCode(printer); + } + } + + // Merge Optional and Required fields (after a _has_bit check). + int last_index = -1; + + for (int i = 0; i < descriptor_->field_count(); ++i) { + const FieldDescriptor* field = descriptor_->field(i); + + if (!field->is_repeated()) { + // See above in GenerateClear for an explanation of this. + if (i / 8 != last_index / 8 || last_index < 0) { + if (last_index >= 0) { + printer->Outdent(); + printer->Print("}\n"); + } + printer->Print( + "if (from._has_bits_[$index$ / 32] & (0xffu << ($index$ % 32))) {\n", + "index", SimpleItoa(field->index())); + printer->Indent(); + } + + last_index = i; + + printer->Print( + "if (from.has_$name$()) {\n", + "name", FieldName(field)); + printer->Indent(); + + field_generators_.get(field).GenerateMergingCode(printer); + + printer->Outdent(); + printer->Print("}\n"); + } + } + + if (last_index >= 0) { + printer->Outdent(); + printer->Print("}\n"); + } + + if (descriptor_->extension_range_count() > 0) { + printer->Print("_extensions_.MergeFrom(from._extensions_);\n"); + } + + if (HasUnknownFields(descriptor_->file())) { + printer->Print( + "mutable_unknown_fields()->MergeFrom(from.unknown_fields());\n"); + } + + printer->Outdent(); + printer->Print("}\n"); +} + +void MessageGenerator:: +GenerateCopyFrom(io::Printer* printer) { + if (HasDescriptorMethods(descriptor_->file())) { + // Generate the generalized CopyFrom (aka that which takes in the Message + // base class as a parameter). + printer->Print( + "void $classname$::CopyFrom(const ::google::protobuf::Message& from) {\n", + "classname", classname_); + printer->Indent(); + + printer->Print( + "if (&from == this) return;\n" + "Clear();\n" + "MergeFrom(from);\n"); + + printer->Outdent(); + printer->Print("}\n\n"); + } + + // Generate the class-specific CopyFrom. + printer->Print( + "void $classname$::CopyFrom(const $classname$& from) {\n", + "classname", classname_); + printer->Indent(); + + printer->Print( + "if (&from == this) return;\n" + "Clear();\n" + "MergeFrom(from);\n"); + + printer->Outdent(); + printer->Print("}\n"); +} + +void MessageGenerator:: +GenerateMergeFromCodedStream(io::Printer* printer) { + if (descriptor_->options().message_set_wire_format()) { + // Special-case MessageSet. + printer->Print( + "bool $classname$::MergePartialFromCodedStream(\n" + " ::google::protobuf::io::CodedInputStream* input) {\n" + " return _extensions_.ParseMessageSet(input, default_instance_,\n" + " mutable_unknown_fields());\n" + "}\n", + "classname", classname_); + return; + } + + printer->Print( + "bool $classname$::MergePartialFromCodedStream(\n" + " ::google::protobuf::io::CodedInputStream* input) {\n" + "#define DO_(EXPRESSION) if (!(EXPRESSION)) return false\n" + " ::google::protobuf::uint32 tag;\n" + " while ((tag = input->ReadTag()) != 0) {\n", + "classname", classname_); + + printer->Indent(); + printer->Indent(); + + if (descriptor_->field_count() > 0) { + // We don't even want to print the switch() if we have no fields because + // MSVC dislikes switch() statements that contain only a default value. + + // Note: If we just switched on the tag rather than the field number, we + // could avoid the need for the if() to check the wire type at the beginning + // of each case. However, this is actually a bit slower in practice as it + // creates a jump table that is 8x larger and sparser, and meanwhile the + // if()s are highly predictable. + printer->Print( + "switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {\n"); + + printer->Indent(); + + scoped_array ordered_fields( + SortFieldsByNumber(descriptor_)); + + for (int i = 0; i < descriptor_->field_count(); i++) { + const FieldDescriptor* field = ordered_fields[i]; + + PrintFieldComment(printer, field); + + printer->Print( + "case $number$: {\n", + "number", SimpleItoa(field->number())); + printer->Indent(); + const FieldGenerator& field_generator = field_generators_.get(field); + + // Emit code to parse the common, expected case. + printer->Print( + "if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==\n" + " ::google::protobuf::internal::WireFormatLite::WIRETYPE_$wiretype$) {\n", + "wiretype", kWireTypeNames[WireFormat::WireTypeForField(field)]); + + if (i > 0 || (field->is_repeated() && !field->options().packed())) { + printer->Print( + " parse_$name$:\n", + "name", field->name()); + } + + printer->Indent(); + if (field->options().packed()) { + field_generator.GenerateMergeFromCodedStreamWithPacking(printer); + } else { + field_generator.GenerateMergeFromCodedStream(printer); + } + printer->Outdent(); + + // Emit code to parse unexpectedly packed or unpacked values. + if (field->is_packable() && field->options().packed()) { + printer->Print( + "} else if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag)\n" + " == ::google::protobuf::internal::WireFormatLite::\n" + " WIRETYPE_$wiretype$) {\n", + "wiretype", + kWireTypeNames[WireFormat::WireTypeForFieldType(field->type())]); + printer->Indent(); + field_generator.GenerateMergeFromCodedStream(printer); + printer->Outdent(); + } else if (field->is_packable() && !field->options().packed()) { + printer->Print( + "} else if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag)\n" + " == ::google::protobuf::internal::WireFormatLite::\n" + " WIRETYPE_LENGTH_DELIMITED) {\n"); + printer->Indent(); + field_generator.GenerateMergeFromCodedStreamWithPacking(printer); + printer->Outdent(); + } + + printer->Print( + "} else {\n" + " goto handle_uninterpreted;\n" + "}\n"); + + // switch() is slow since it can't be predicted well. Insert some if()s + // here that attempt to predict the next tag. + if (field->is_repeated() && !field->options().packed()) { + // Expect repeats of this field. + printer->Print( + "if (input->ExpectTag($tag$)) goto parse_$name$;\n", + "tag", SimpleItoa(WireFormat::MakeTag(field)), + "name", field->name()); + } + + if (i + 1 < descriptor_->field_count()) { + // Expect the next field in order. + const FieldDescriptor* next_field = ordered_fields[i + 1]; + printer->Print( + "if (input->ExpectTag($next_tag$)) goto parse_$next_name$;\n", + "next_tag", SimpleItoa(WireFormat::MakeTag(next_field)), + "next_name", next_field->name()); + } else { + // Expect EOF. + // TODO(kenton): Expect group end-tag? + printer->Print( + "if (input->ExpectAtEnd()) return true;\n"); + } + + printer->Print( + "break;\n"); + + printer->Outdent(); + printer->Print("}\n\n"); + } + + printer->Print( + "default: {\n" + "handle_uninterpreted:\n"); + printer->Indent(); + } + + // Is this an end-group tag? If so, this must be the end of the message. + printer->Print( + "if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==\n" + " ::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {\n" + " return true;\n" + "}\n"); + + // Handle extension ranges. + if (descriptor_->extension_range_count() > 0) { + printer->Print( + "if ("); + for (int i = 0; i < descriptor_->extension_range_count(); i++) { + const Descriptor::ExtensionRange* range = + descriptor_->extension_range(i); + if (i > 0) printer->Print(" ||\n "); + + uint32 start_tag = WireFormatLite::MakeTag( + range->start, static_cast(0)); + uint32 end_tag = WireFormatLite::MakeTag( + range->end, static_cast(0)); + + if (range->end > FieldDescriptor::kMaxNumber) { + printer->Print( + "($start$u <= tag)", + "start", SimpleItoa(start_tag)); + } else { + printer->Print( + "($start$u <= tag && tag < $end$u)", + "start", SimpleItoa(start_tag), + "end", SimpleItoa(end_tag)); + } + } + printer->Print(") {\n"); + if (HasUnknownFields(descriptor_->file())) { + printer->Print( + " DO_(_extensions_.ParseField(tag, input, default_instance_,\n" + " mutable_unknown_fields()));\n"); + } else { + printer->Print( + " DO_(_extensions_.ParseField(tag, input, default_instance_));\n"); + } + printer->Print( + " continue;\n" + "}\n"); + } + + // We really don't recognize this tag. Skip it. + if (HasUnknownFields(descriptor_->file())) { + printer->Print( + "DO_(::google::protobuf::internal::WireFormat::SkipField(\n" + " input, tag, mutable_unknown_fields()));\n"); + } else { + printer->Print( + "DO_(::google::protobuf::internal::WireFormatLite::SkipField(input, tag));\n"); + } + + if (descriptor_->field_count() > 0) { + printer->Print("break;\n"); + printer->Outdent(); + printer->Print("}\n"); // default: + printer->Outdent(); + printer->Print("}\n"); // switch + } + + printer->Outdent(); + printer->Outdent(); + printer->Print( + " }\n" // while + " return true;\n" + "#undef DO_\n" + "}\n"); +} + +void MessageGenerator::GenerateSerializeOneField( + io::Printer* printer, const FieldDescriptor* field, bool to_array) { + PrintFieldComment(printer, field); + + if (!field->is_repeated()) { + printer->Print( + "if (has_$name$()) {\n", + "name", FieldName(field)); + printer->Indent(); + } + + if (to_array) { + field_generators_.get(field).GenerateSerializeWithCachedSizesToArray( + printer); + } else { + field_generators_.get(field).GenerateSerializeWithCachedSizes(printer); + } + + if (!field->is_repeated()) { + printer->Outdent(); + printer->Print("}\n"); + } + printer->Print("\n"); +} + +void MessageGenerator::GenerateSerializeOneExtensionRange( + io::Printer* printer, const Descriptor::ExtensionRange* range, + bool to_array) { + map vars; + vars["start"] = SimpleItoa(range->start); + vars["end"] = SimpleItoa(range->end); + printer->Print(vars, + "// Extension range [$start$, $end$)\n"); + if (to_array) { + printer->Print(vars, + "target = _extensions_.SerializeWithCachedSizesToArray(\n" + " $start$, $end$, target);\n\n"); + } else { + printer->Print(vars, + "_extensions_.SerializeWithCachedSizes(\n" + " $start$, $end$, output);\n\n"); + } +} + +void MessageGenerator:: +GenerateSerializeWithCachedSizes(io::Printer* printer) { + if (descriptor_->options().message_set_wire_format()) { + // Special-case MessageSet. + printer->Print( + "void $classname$::SerializeWithCachedSizes(\n" + " ::google::protobuf::io::CodedOutputStream* output) const {\n" + " _extensions_.SerializeMessageSetWithCachedSizes(output);\n", + "classname", classname_); + if (HasUnknownFields(descriptor_->file())) { + printer->Print( + " ::google::protobuf::internal::WireFormat::SerializeUnknownMessageSetItems(\n" + " unknown_fields(), output);\n"); + } + printer->Print( + "}\n"); + return; + } + + printer->Print( + "void $classname$::SerializeWithCachedSizes(\n" + " ::google::protobuf::io::CodedOutputStream* output) const {\n", + "classname", classname_); + printer->Indent(); + + GenerateSerializeWithCachedSizesBody(printer, false); + + printer->Outdent(); + printer->Print( + "}\n"); +} + +void MessageGenerator:: +GenerateSerializeWithCachedSizesToArray(io::Printer* printer) { + if (descriptor_->options().message_set_wire_format()) { + // Special-case MessageSet. + printer->Print( + "::google::protobuf::uint8* $classname$::SerializeWithCachedSizesToArray(\n" + " ::google::protobuf::uint8* target) const {\n" + " target =\n" + " _extensions_.SerializeMessageSetWithCachedSizesToArray(target);\n", + "classname", classname_); + if (HasUnknownFields(descriptor_->file())) { + printer->Print( + " target = ::google::protobuf::internal::WireFormat::\n" + " SerializeUnknownMessageSetItemsToArray(\n" + " unknown_fields(), target);\n"); + } + printer->Print( + " return target;\n" + "}\n"); + return; + } + + printer->Print( + "::google::protobuf::uint8* $classname$::SerializeWithCachedSizesToArray(\n" + " ::google::protobuf::uint8* target) const {\n", + "classname", classname_); + printer->Indent(); + + GenerateSerializeWithCachedSizesBody(printer, true); + + printer->Outdent(); + printer->Print( + " return target;\n" + "}\n"); +} + +void MessageGenerator:: +GenerateSerializeWithCachedSizesBody(io::Printer* printer, bool to_array) { + scoped_array ordered_fields( + SortFieldsByNumber(descriptor_)); + + vector sorted_extensions; + for (int i = 0; i < descriptor_->extension_range_count(); ++i) { + sorted_extensions.push_back(descriptor_->extension_range(i)); + } + sort(sorted_extensions.begin(), sorted_extensions.end(), + ExtensionRangeSorter()); + + // Merge the fields and the extension ranges, both sorted by field number. + int i, j; + for (i = 0, j = 0; + i < descriptor_->field_count() || j < sorted_extensions.size(); + ) { + if (i == descriptor_->field_count()) { + GenerateSerializeOneExtensionRange(printer, + sorted_extensions[j++], + to_array); + } else if (j == sorted_extensions.size()) { + GenerateSerializeOneField(printer, ordered_fields[i++], to_array); + } else if (ordered_fields[i]->number() < sorted_extensions[j]->start) { + GenerateSerializeOneField(printer, ordered_fields[i++], to_array); + } else { + GenerateSerializeOneExtensionRange(printer, + sorted_extensions[j++], + to_array); + } + } + + if (HasUnknownFields(descriptor_->file())) { + printer->Print("if (!unknown_fields().empty()) {\n"); + printer->Indent(); + if (to_array) { + printer->Print( + "target = " + "::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(\n" + " unknown_fields(), target);\n"); + } else { + printer->Print( + "::google::protobuf::internal::WireFormat::SerializeUnknownFields(\n" + " unknown_fields(), output);\n"); + } + printer->Outdent(); + + printer->Print( + "}\n"); + } +} + +void MessageGenerator:: +GenerateByteSize(io::Printer* printer) { + if (descriptor_->options().message_set_wire_format()) { + // Special-case MessageSet. + printer->Print( + "int $classname$::ByteSize() const {\n" + " int total_size = _extensions_.MessageSetByteSize();\n", + "classname", classname_); + if (HasUnknownFields(descriptor_->file())) { + printer->Print( + " total_size += ::google::protobuf::internal::WireFormat::\n" + " ComputeUnknownMessageSetItemsSize(unknown_fields());\n"); + } + printer->Print( + " GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();\n" + " _cached_size_ = total_size;\n" + " GOOGLE_SAFE_CONCURRENT_WRITES_END();\n" + " return total_size;\n" + "}\n"); + return; + } + + printer->Print( + "int $classname$::ByteSize() const {\n", + "classname", classname_); + printer->Indent(); + printer->Print( + "int total_size = 0;\n" + "\n"); + + int last_index = -1; + + for (int i = 0; i < descriptor_->field_count(); i++) { + const FieldDescriptor* field = descriptor_->field(i); + + if (!field->is_repeated()) { + // See above in GenerateClear for an explanation of this. + // TODO(kenton): Share code? Unclear how to do so without + // over-engineering. + if ((i / 8) != (last_index / 8) || + last_index < 0) { + if (last_index >= 0) { + printer->Outdent(); + printer->Print("}\n"); + } + printer->Print( + "if (_has_bits_[$index$ / 32] & (0xffu << ($index$ % 32))) {\n", + "index", SimpleItoa(field->index())); + printer->Indent(); + } + last_index = i; + + PrintFieldComment(printer, field); + + printer->Print( + "if (has_$name$()) {\n", + "name", FieldName(field)); + printer->Indent(); + + field_generators_.get(field).GenerateByteSize(printer); + + printer->Outdent(); + printer->Print( + "}\n" + "\n"); + } + } + + if (last_index >= 0) { + printer->Outdent(); + printer->Print("}\n"); + } + + // Repeated fields don't use _has_bits_ so we count them in a separate + // pass. + for (int i = 0; i < descriptor_->field_count(); i++) { + const FieldDescriptor* field = descriptor_->field(i); + + if (field->is_repeated()) { + PrintFieldComment(printer, field); + field_generators_.get(field).GenerateByteSize(printer); + printer->Print("\n"); + } + } + + if (descriptor_->extension_range_count() > 0) { + printer->Print( + "total_size += _extensions_.ByteSize();\n" + "\n"); + } + + if (HasUnknownFields(descriptor_->file())) { + printer->Print("if (!unknown_fields().empty()) {\n"); + printer->Indent(); + printer->Print( + "total_size +=\n" + " ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize(\n" + " unknown_fields());\n"); + printer->Outdent(); + printer->Print("}\n"); + } + + // We update _cached_size_ even though this is a const method. In theory, + // this is not thread-compatible, because concurrent writes have undefined + // results. In practice, since any concurrent writes will be writing the + // exact same value, it works on all common processors. In a future version + // of C++, _cached_size_ should be made into an atomic. + printer->Print( + "GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();\n" + "_cached_size_ = total_size;\n" + "GOOGLE_SAFE_CONCURRENT_WRITES_END();\n" + "return total_size;\n"); + + printer->Outdent(); + printer->Print("}\n"); +} + +void MessageGenerator:: +GenerateIsInitialized(io::Printer* printer) { + printer->Print( + "bool $classname$::IsInitialized() const {\n", + "classname", classname_); + printer->Indent(); + + // Check that all required fields in this message are set. We can do this + // most efficiently by checking 32 "has bits" at a time. + int has_bits_array_size = (descriptor_->field_count() + 31) / 32; + for (int i = 0; i < has_bits_array_size; i++) { + uint32 mask = 0; + for (int bit = 0; bit < 32; bit++) { + int index = i * 32 + bit; + if (index >= descriptor_->field_count()) break; + const FieldDescriptor* field = descriptor_->field(index); + + if (field->is_required()) { + mask |= 1 << bit; + } + } + + if (mask != 0) { + char buffer[kFastToBufferSize]; + printer->Print( + "if ((_has_bits_[$i$] & 0x$mask$) != 0x$mask$) return false;\n", + "i", SimpleItoa(i), + "mask", FastHex32ToBuffer(mask, buffer)); + } + } + + // Now check that all embedded messages are initialized. + printer->Print("\n"); + for (int i = 0; i < descriptor_->field_count(); i++) { + const FieldDescriptor* field = descriptor_->field(i); + if (field->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE && + HasRequiredFields(field->message_type())) { + if (field->is_repeated()) { + printer->Print( + "for (int i = 0; i < $name$_size(); i++) {\n" + " if (!this->$name$(i).IsInitialized()) return false;\n" + "}\n", + "name", FieldName(field)); + } else { + printer->Print( + "if (has_$name$()) {\n" + " if (!this->$name$().IsInitialized()) return false;\n" + "}\n", + "name", FieldName(field)); + } + } + } + + if (descriptor_->extension_range_count() > 0) { + printer->Print( + "\n" + "if (!_extensions_.IsInitialized()) return false;"); + } + + printer->Outdent(); + printer->Print( + " return true;\n" + "}\n"); +} + + +} // namespace cpp +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_message.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_message.h new file mode 100644 index 0000000000..04778f6d1e --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_message.h @@ -0,0 +1,170 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#ifndef GOOGLE_PROTOBUF_COMPILER_CPP_MESSAGE_H__ +#define GOOGLE_PROTOBUF_COMPILER_CPP_MESSAGE_H__ + +#include +#include +#include + +namespace google { +namespace protobuf { + namespace io { + class Printer; // printer.h + } +} + +namespace protobuf { +namespace compiler { +namespace cpp { + +class EnumGenerator; // enum.h +class ExtensionGenerator; // extension.h + +class MessageGenerator { + public: + // See generator.cc for the meaning of dllexport_decl. + explicit MessageGenerator(const Descriptor* descriptor, + const string& dllexport_decl); + ~MessageGenerator(); + + // Header stuff. + + // Generate foward declarations for this class and all its nested types. + void GenerateForwardDeclaration(io::Printer* printer); + + // Generate definitions of all nested enums (must come before class + // definitions because those classes use the enums definitions). + void GenerateEnumDefinitions(io::Printer* printer); + + // Generate specializations of GetEnumDescriptor(). + // Precondition: in ::google::protobuf namespace. + void GenerateGetEnumDescriptorSpecializations(io::Printer* printer); + + // Generate definitions for this class and all its nested types. + void GenerateClassDefinition(io::Printer* printer); + + // Generate definitions of inline methods (placed at the end of the header + // file). + void GenerateInlineMethods(io::Printer* printer); + + // Source file stuff. + + // Generate code which declares all the global descriptor pointers which + // will be initialized by the methods below. + void GenerateDescriptorDeclarations(io::Printer* printer); + + // Generate code that initializes the global variable storing the message's + // descriptor. + void GenerateDescriptorInitializer(io::Printer* printer, int index); + + // Generate code that calls MessageFactory::InternalRegisterGeneratedMessage() + // for all types. + void GenerateTypeRegistrations(io::Printer* printer); + + // Generates code that allocates the message's default instance. + void GenerateDefaultInstanceAllocator(io::Printer* printer); + + // Generates code that initializes the message's default instance. This + // is separate from allocating because all default instances must be + // allocated before any can be initialized. + void GenerateDefaultInstanceInitializer(io::Printer* printer); + + // Generates code that should be run when ShutdownProtobufLibrary() is called, + // to delete all dynamically-allocated objects. + void GenerateShutdownCode(io::Printer* printer); + + // Generate all non-inline methods for this class. + void GenerateClassMethods(io::Printer* printer); + + private: + // Generate declarations and definitions of accessors for fields. + void GenerateFieldAccessorDeclarations(io::Printer* printer); + void GenerateFieldAccessorDefinitions(io::Printer* printer); + + // Generate the field offsets array. + void GenerateOffsets(io::Printer* printer); + + // Generate constructors and destructor. + void GenerateStructors(io::Printer* printer); + + // The compiler typically generates multiple copies of each constructor and + // destructor: http://gcc.gnu.org/bugs.html#nonbugs_cxx + // Placing common code in a separate method reduces the generated code size. + // + // Generate the shared constructor code. + void GenerateSharedConstructorCode(io::Printer* printer); + // Generate the shared destructor code. + void GenerateSharedDestructorCode(io::Printer* printer); + + // Generate standard Message methods. + void GenerateClear(io::Printer* printer); + void GenerateMergeFromCodedStream(io::Printer* printer); + void GenerateSerializeWithCachedSizes(io::Printer* printer); + void GenerateSerializeWithCachedSizesToArray(io::Printer* printer); + void GenerateSerializeWithCachedSizesBody(io::Printer* printer, + bool to_array); + void GenerateByteSize(io::Printer* printer); + void GenerateMergeFrom(io::Printer* printer); + void GenerateCopyFrom(io::Printer* printer); + void GenerateSwap(io::Printer* printer); + void GenerateIsInitialized(io::Printer* printer); + + // Helpers for GenerateSerializeWithCachedSizes(). + void GenerateSerializeOneField(io::Printer* printer, + const FieldDescriptor* field, + bool unbounded); + void GenerateSerializeOneExtensionRange( + io::Printer* printer, const Descriptor::ExtensionRange* range, + bool unbounded); + + + const Descriptor* descriptor_; + string classname_; + string dllexport_decl_; + FieldGeneratorMap field_generators_; + scoped_array > nested_generators_; + scoped_array > enum_generators_; + scoped_array > extension_generators_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(MessageGenerator); +}; + +} // namespace cpp +} // namespace compiler +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_COMPILER_CPP_MESSAGE_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_message_field.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_message_field.cc new file mode 100644 index 0000000000..23e75b87d4 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_message_field.cc @@ -0,0 +1,277 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace cpp { + +namespace { + +void SetMessageVariables(const FieldDescriptor* descriptor, + map* variables) { + SetCommonFieldVariables(descriptor, variables); + (*variables)["type"] = FieldMessageTypeName(descriptor); + (*variables)["stream_writer"] = (*variables)["declared_type"] + + (HasFastArraySerialization(descriptor->message_type()->file()) ? + "MaybeToArray" : + ""); +} + +} // namespace + +// =================================================================== + +MessageFieldGenerator:: +MessageFieldGenerator(const FieldDescriptor* descriptor) + : descriptor_(descriptor) { + SetMessageVariables(descriptor, &variables_); +} + +MessageFieldGenerator::~MessageFieldGenerator() {} + +void MessageFieldGenerator:: +GeneratePrivateMembers(io::Printer* printer) const { + printer->Print(variables_, "$type$* $name$_;\n"); +} + +void MessageFieldGenerator:: +GenerateAccessorDeclarations(io::Printer* printer) const { + printer->Print(variables_, + "inline const $type$& $name$() const$deprecation$;\n" + "inline $type$* mutable_$name$()$deprecation$;\n" + "inline $type$* release_$name$()$deprecation$;\n"); +} + +void MessageFieldGenerator:: +GenerateInlineAccessorDefinitions(io::Printer* printer) const { + printer->Print(variables_, + "inline const $type$& $classname$::$name$() const {\n" + " return $name$_ != NULL ? *$name$_ : *default_instance_->$name$_;\n" + "}\n" + "inline $type$* $classname$::mutable_$name$() {\n" + " set_has_$name$();\n" + " if ($name$_ == NULL) $name$_ = new $type$;\n" + " return $name$_;\n" + "}\n" + "inline $type$* $classname$::release_$name$() {\n" + " clear_has_$name$();\n" + " $type$* temp = $name$_;\n" + " $name$_ = NULL;\n" + " return temp;\n" + "}\n"); +} + +void MessageFieldGenerator:: +GenerateClearingCode(io::Printer* printer) const { + printer->Print(variables_, + "if ($name$_ != NULL) $name$_->$type$::Clear();\n"); +} + +void MessageFieldGenerator:: +GenerateMergingCode(io::Printer* printer) const { + printer->Print(variables_, + "mutable_$name$()->$type$::MergeFrom(from.$name$());\n"); +} + +void MessageFieldGenerator:: +GenerateSwappingCode(io::Printer* printer) const { + printer->Print(variables_, "std::swap($name$_, other->$name$_);\n"); +} + +void MessageFieldGenerator:: +GenerateConstructorCode(io::Printer* printer) const { + printer->Print(variables_, "$name$_ = NULL;\n"); +} + +void MessageFieldGenerator:: +GenerateMergeFromCodedStream(io::Printer* printer) const { + if (descriptor_->type() == FieldDescriptor::TYPE_MESSAGE) { + printer->Print(variables_, + "DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(\n" + " input, mutable_$name$()));\n"); + } else { + printer->Print(variables_, + "DO_(::google::protobuf::internal::WireFormatLite::ReadGroupNoVirtual(\n" + " $number$, input, mutable_$name$()));\n"); + } +} + +void MessageFieldGenerator:: +GenerateSerializeWithCachedSizes(io::Printer* printer) const { + printer->Print(variables_, + "::google::protobuf::internal::WireFormatLite::Write$stream_writer$(\n" + " $number$, this->$name$(), output);\n"); +} + +void MessageFieldGenerator:: +GenerateSerializeWithCachedSizesToArray(io::Printer* printer) const { + printer->Print(variables_, + "target = ::google::protobuf::internal::WireFormatLite::\n" + " Write$declared_type$NoVirtualToArray(\n" + " $number$, this->$name$(), target);\n"); +} + +void MessageFieldGenerator:: +GenerateByteSize(io::Printer* printer) const { + printer->Print(variables_, + "total_size += $tag_size$ +\n" + " ::google::protobuf::internal::WireFormatLite::$declared_type$SizeNoVirtual(\n" + " this->$name$());\n"); +} + +// =================================================================== + +RepeatedMessageFieldGenerator:: +RepeatedMessageFieldGenerator(const FieldDescriptor* descriptor) + : descriptor_(descriptor) { + SetMessageVariables(descriptor, &variables_); +} + +RepeatedMessageFieldGenerator::~RepeatedMessageFieldGenerator() {} + +void RepeatedMessageFieldGenerator:: +GeneratePrivateMembers(io::Printer* printer) const { + printer->Print(variables_, + "::google::protobuf::RepeatedPtrField< $type$ > $name$_;\n"); +} + +void RepeatedMessageFieldGenerator:: +GenerateAccessorDeclarations(io::Printer* printer) const { + printer->Print(variables_, + "inline const $type$& $name$(int index) const$deprecation$;\n" + "inline $type$* mutable_$name$(int index)$deprecation$;\n" + "inline $type$* add_$name$()$deprecation$;\n"); + printer->Print(variables_, + "inline const ::google::protobuf::RepeatedPtrField< $type$ >&\n" + " $name$() const$deprecation$;\n" + "inline ::google::protobuf::RepeatedPtrField< $type$ >*\n" + " mutable_$name$()$deprecation$;\n"); +} + +void RepeatedMessageFieldGenerator:: +GenerateInlineAccessorDefinitions(io::Printer* printer) const { + printer->Print(variables_, + "inline const $type$& $classname$::$name$(int index) const {\n" + " return $name$_.Get(index);\n" + "}\n" + "inline $type$* $classname$::mutable_$name$(int index) {\n" + " return $name$_.Mutable(index);\n" + "}\n" + "inline $type$* $classname$::add_$name$() {\n" + " return $name$_.Add();\n" + "}\n"); + printer->Print(variables_, + "inline const ::google::protobuf::RepeatedPtrField< $type$ >&\n" + "$classname$::$name$() const {\n" + " return $name$_;\n" + "}\n" + "inline ::google::protobuf::RepeatedPtrField< $type$ >*\n" + "$classname$::mutable_$name$() {\n" + " return &$name$_;\n" + "}\n"); +} + +void RepeatedMessageFieldGenerator:: +GenerateClearingCode(io::Printer* printer) const { + printer->Print(variables_, "$name$_.Clear();\n"); +} + +void RepeatedMessageFieldGenerator:: +GenerateMergingCode(io::Printer* printer) const { + printer->Print(variables_, "$name$_.MergeFrom(from.$name$_);\n"); +} + +void RepeatedMessageFieldGenerator:: +GenerateSwappingCode(io::Printer* printer) const { + printer->Print(variables_, "$name$_.Swap(&other->$name$_);\n"); +} + +void RepeatedMessageFieldGenerator:: +GenerateConstructorCode(io::Printer* printer) const { + // Not needed for repeated fields. +} + +void RepeatedMessageFieldGenerator:: +GenerateMergeFromCodedStream(io::Printer* printer) const { + if (descriptor_->type() == FieldDescriptor::TYPE_MESSAGE) { + printer->Print(variables_, + "DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(\n" + " input, add_$name$()));\n"); + } else { + printer->Print(variables_, + "DO_(::google::protobuf::internal::WireFormatLite::ReadGroupNoVirtual(\n" + " $number$, input, add_$name$()));\n"); + } +} + +void RepeatedMessageFieldGenerator:: +GenerateSerializeWithCachedSizes(io::Printer* printer) const { + printer->Print(variables_, + "for (int i = 0; i < this->$name$_size(); i++) {\n" + " ::google::protobuf::internal::WireFormatLite::Write$stream_writer$(\n" + " $number$, this->$name$(i), output);\n" + "}\n"); +} + +void RepeatedMessageFieldGenerator:: +GenerateSerializeWithCachedSizesToArray(io::Printer* printer) const { + printer->Print(variables_, + "for (int i = 0; i < this->$name$_size(); i++) {\n" + " target = ::google::protobuf::internal::WireFormatLite::\n" + " Write$declared_type$NoVirtualToArray(\n" + " $number$, this->$name$(i), target);\n" + "}\n"); +} + +void RepeatedMessageFieldGenerator:: +GenerateByteSize(io::Printer* printer) const { + printer->Print(variables_, + "total_size += $tag_size$ * this->$name$_size();\n" + "for (int i = 0; i < this->$name$_size(); i++) {\n" + " total_size +=\n" + " ::google::protobuf::internal::WireFormatLite::$declared_type$SizeNoVirtual(\n" + " this->$name$(i));\n" + "}\n"); +} + +} // namespace cpp +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_message_field.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_message_field.h new file mode 100644 index 0000000000..f5147278b4 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_message_field.h @@ -0,0 +1,102 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#ifndef GOOGLE_PROTOBUF_COMPILER_CPP_MESSAGE_FIELD_H__ +#define GOOGLE_PROTOBUF_COMPILER_CPP_MESSAGE_FIELD_H__ + +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace cpp { + +class MessageFieldGenerator : public FieldGenerator { + public: + explicit MessageFieldGenerator(const FieldDescriptor* descriptor); + ~MessageFieldGenerator(); + + // implements FieldGenerator --------------------------------------- + void GeneratePrivateMembers(io::Printer* printer) const; + void GenerateAccessorDeclarations(io::Printer* printer) const; + void GenerateInlineAccessorDefinitions(io::Printer* printer) const; + void GenerateClearingCode(io::Printer* printer) const; + void GenerateMergingCode(io::Printer* printer) const; + void GenerateSwappingCode(io::Printer* printer) const; + void GenerateConstructorCode(io::Printer* printer) const; + void GenerateMergeFromCodedStream(io::Printer* printer) const; + void GenerateSerializeWithCachedSizes(io::Printer* printer) const; + void GenerateSerializeWithCachedSizesToArray(io::Printer* printer) const; + void GenerateByteSize(io::Printer* printer) const; + + private: + const FieldDescriptor* descriptor_; + map variables_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(MessageFieldGenerator); +}; + +class RepeatedMessageFieldGenerator : public FieldGenerator { + public: + explicit RepeatedMessageFieldGenerator(const FieldDescriptor* descriptor); + ~RepeatedMessageFieldGenerator(); + + // implements FieldGenerator --------------------------------------- + void GeneratePrivateMembers(io::Printer* printer) const; + void GenerateAccessorDeclarations(io::Printer* printer) const; + void GenerateInlineAccessorDefinitions(io::Printer* printer) const; + void GenerateClearingCode(io::Printer* printer) const; + void GenerateMergingCode(io::Printer* printer) const; + void GenerateSwappingCode(io::Printer* printer) const; + void GenerateConstructorCode(io::Printer* printer) const; + void GenerateMergeFromCodedStream(io::Printer* printer) const; + void GenerateSerializeWithCachedSizes(io::Printer* printer) const; + void GenerateSerializeWithCachedSizesToArray(io::Printer* printer) const; + void GenerateByteSize(io::Printer* printer) const; + + private: + const FieldDescriptor* descriptor_; + map variables_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(RepeatedMessageFieldGenerator); +}; + +} // namespace cpp +} // namespace compiler +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_COMPILER_CPP_MESSAGE_FIELD_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_plugin_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_plugin_unittest.cc new file mode 100644 index 0000000000..5c4aa4fbc5 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_plugin_unittest.cc @@ -0,0 +1,121 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// +// TODO(kenton): Share code with the versions of this test in other languages? +// It seemed like parameterizing it would add more complexity than it is +// worth. + +#include +#include +#include +#include + +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace cpp { +namespace { + +class TestGenerator : public CodeGenerator { + public: + TestGenerator() {} + ~TestGenerator() {} + + virtual bool Generate(const FileDescriptor* file, + const string& parameter, + GeneratorContext* context, + string* error) const { + TryInsert("test.pb.h", "includes", context); + TryInsert("test.pb.h", "namespace_scope", context); + TryInsert("test.pb.h", "global_scope", context); + TryInsert("test.pb.h", "class_scope:foo.Bar", context); + TryInsert("test.pb.h", "class_scope:foo.Bar.Baz", context); + + TryInsert("test.pb.cc", "includes", context); + TryInsert("test.pb.cc", "namespace_scope", context); + TryInsert("test.pb.cc", "global_scope", context); + return true; + } + + void TryInsert(const string& filename, const string& insertion_point, + GeneratorContext* context) const { + scoped_ptr output( + context->OpenForInsert(filename, insertion_point)); + io::Printer printer(output.get(), '$'); + printer.Print("// inserted $name$\n", "name", insertion_point); + } +}; + +// This test verifies that all the expected insertion points exist. It does +// not verify that they are correctly-placed; that would require actually +// compiling the output which is a bit more than I care to do for this test. +TEST(CppPluginTest, PluginTest) { + File::WriteStringToFileOrDie( + "syntax = \"proto2\";\n" + "package foo;\n" + "message Bar {\n" + " message Baz {}\n" + "}\n", + TestTempDir() + "/test.proto"); + + google::protobuf::compiler::CommandLineInterface cli; + cli.SetInputsAreProtoPathRelative(true); + + CppGenerator cpp_generator; + TestGenerator test_generator; + cli.RegisterGenerator("--cpp_out", &cpp_generator, ""); + cli.RegisterGenerator("--test_out", &test_generator, ""); + + string proto_path = "-I" + TestTempDir(); + string cpp_out = "--cpp_out=" + TestTempDir(); + string test_out = "--test_out=" + TestTempDir(); + + const char* argv[] = { + "protoc", + proto_path.c_str(), + cpp_out.c_str(), + test_out.c_str(), + "test.proto" + }; + + EXPECT_EQ(0, cli.Run(5, argv)); +} + +} // namespace +} // namespace cpp +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_primitive_field.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_primitive_field.cc new file mode 100644 index 0000000000..5e8df0f497 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_primitive_field.cc @@ -0,0 +1,382 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace cpp { + +using internal::WireFormatLite; + +namespace { + +// For encodings with fixed sizes, returns that size in bytes. Otherwise +// returns -1. +int FixedSize(FieldDescriptor::Type type) { + switch (type) { + case FieldDescriptor::TYPE_INT32 : return -1; + case FieldDescriptor::TYPE_INT64 : return -1; + case FieldDescriptor::TYPE_UINT32 : return -1; + case FieldDescriptor::TYPE_UINT64 : return -1; + case FieldDescriptor::TYPE_SINT32 : return -1; + case FieldDescriptor::TYPE_SINT64 : return -1; + case FieldDescriptor::TYPE_FIXED32 : return WireFormatLite::kFixed32Size; + case FieldDescriptor::TYPE_FIXED64 : return WireFormatLite::kFixed64Size; + case FieldDescriptor::TYPE_SFIXED32: return WireFormatLite::kSFixed32Size; + case FieldDescriptor::TYPE_SFIXED64: return WireFormatLite::kSFixed64Size; + case FieldDescriptor::TYPE_FLOAT : return WireFormatLite::kFloatSize; + case FieldDescriptor::TYPE_DOUBLE : return WireFormatLite::kDoubleSize; + + case FieldDescriptor::TYPE_BOOL : return WireFormatLite::kBoolSize; + case FieldDescriptor::TYPE_ENUM : return -1; + + case FieldDescriptor::TYPE_STRING : return -1; + case FieldDescriptor::TYPE_BYTES : return -1; + case FieldDescriptor::TYPE_GROUP : return -1; + case FieldDescriptor::TYPE_MESSAGE : return -1; + + // No default because we want the compiler to complain if any new + // types are added. + } + GOOGLE_LOG(FATAL) << "Can't get here."; + return -1; +} + +void SetPrimitiveVariables(const FieldDescriptor* descriptor, + map* variables) { + SetCommonFieldVariables(descriptor, variables); + (*variables)["type"] = PrimitiveTypeName(descriptor->cpp_type()); + (*variables)["default"] = DefaultValue(descriptor); + (*variables)["tag"] = SimpleItoa(internal::WireFormat::MakeTag(descriptor)); + int fixed_size = FixedSize(descriptor->type()); + if (fixed_size != -1) { + (*variables)["fixed_size"] = SimpleItoa(fixed_size); + } + (*variables)["wire_format_field_type"] = + "::google::protobuf::internal::WireFormatLite::" + FieldDescriptorProto_Type_Name( + static_cast(descriptor->type())); +} + +} // namespace + +// =================================================================== + +PrimitiveFieldGenerator:: +PrimitiveFieldGenerator(const FieldDescriptor* descriptor) + : descriptor_(descriptor) { + SetPrimitiveVariables(descriptor, &variables_); +} + +PrimitiveFieldGenerator::~PrimitiveFieldGenerator() {} + +void PrimitiveFieldGenerator:: +GeneratePrivateMembers(io::Printer* printer) const { + printer->Print(variables_, "$type$ $name$_;\n"); +} + +void PrimitiveFieldGenerator:: +GenerateAccessorDeclarations(io::Printer* printer) const { + printer->Print(variables_, + "inline $type$ $name$() const$deprecation$;\n" + "inline void set_$name$($type$ value)$deprecation$;\n"); +} + +void PrimitiveFieldGenerator:: +GenerateInlineAccessorDefinitions(io::Printer* printer) const { + printer->Print(variables_, + "inline $type$ $classname$::$name$() const {\n" + " return $name$_;\n" + "}\n" + "inline void $classname$::set_$name$($type$ value) {\n" + " set_has_$name$();\n" + " $name$_ = value;\n" + "}\n"); +} + +void PrimitiveFieldGenerator:: +GenerateClearingCode(io::Printer* printer) const { + printer->Print(variables_, "$name$_ = $default$;\n"); +} + +void PrimitiveFieldGenerator:: +GenerateMergingCode(io::Printer* printer) const { + printer->Print(variables_, "set_$name$(from.$name$());\n"); +} + +void PrimitiveFieldGenerator:: +GenerateSwappingCode(io::Printer* printer) const { + printer->Print(variables_, "std::swap($name$_, other->$name$_);\n"); +} + +void PrimitiveFieldGenerator:: +GenerateConstructorCode(io::Printer* printer) const { + printer->Print(variables_, "$name$_ = $default$;\n"); +} + +void PrimitiveFieldGenerator:: +GenerateMergeFromCodedStream(io::Printer* printer) const { + printer->Print(variables_, + "DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<\n" + " $type$, $wire_format_field_type$>(\n" + " input, &$name$_)));\n" + "set_has_$name$();\n"); +} + +void PrimitiveFieldGenerator:: +GenerateSerializeWithCachedSizes(io::Printer* printer) const { + printer->Print(variables_, + "::google::protobuf::internal::WireFormatLite::Write$declared_type$(" + "$number$, this->$name$(), output);\n"); +} + +void PrimitiveFieldGenerator:: +GenerateSerializeWithCachedSizesToArray(io::Printer* printer) const { + printer->Print(variables_, + "target = ::google::protobuf::internal::WireFormatLite::Write$declared_type$ToArray(" + "$number$, this->$name$(), target);\n"); +} + +void PrimitiveFieldGenerator:: +GenerateByteSize(io::Printer* printer) const { + int fixed_size = FixedSize(descriptor_->type()); + if (fixed_size == -1) { + printer->Print(variables_, + "total_size += $tag_size$ +\n" + " ::google::protobuf::internal::WireFormatLite::$declared_type$Size(\n" + " this->$name$());\n"); + } else { + printer->Print(variables_, + "total_size += $tag_size$ + $fixed_size$;\n"); + } +} + +// =================================================================== + +RepeatedPrimitiveFieldGenerator:: +RepeatedPrimitiveFieldGenerator(const FieldDescriptor* descriptor) + : descriptor_(descriptor) { + SetPrimitiveVariables(descriptor, &variables_); + + if (descriptor->options().packed()) { + variables_["packed_reader"] = "ReadPackedPrimitive"; + variables_["repeated_reader"] = "ReadRepeatedPrimitiveNoInline"; + } else { + variables_["packed_reader"] = "ReadPackedPrimitiveNoInline"; + variables_["repeated_reader"] = "ReadRepeatedPrimitive"; + } +} + +RepeatedPrimitiveFieldGenerator::~RepeatedPrimitiveFieldGenerator() {} + +void RepeatedPrimitiveFieldGenerator:: +GeneratePrivateMembers(io::Printer* printer) const { + printer->Print(variables_, + "::google::protobuf::RepeatedField< $type$ > $name$_;\n"); + if (descriptor_->options().packed() && HasGeneratedMethods(descriptor_->file())) { + printer->Print(variables_, + "mutable int _$name$_cached_byte_size_;\n"); + } +} + +void RepeatedPrimitiveFieldGenerator:: +GenerateAccessorDeclarations(io::Printer* printer) const { + printer->Print(variables_, + "inline $type$ $name$(int index) const$deprecation$;\n" + "inline void set_$name$(int index, $type$ value)$deprecation$;\n" + "inline void add_$name$($type$ value)$deprecation$;\n"); + printer->Print(variables_, + "inline const ::google::protobuf::RepeatedField< $type$ >&\n" + " $name$() const$deprecation$;\n" + "inline ::google::protobuf::RepeatedField< $type$ >*\n" + " mutable_$name$()$deprecation$;\n"); +} + +void RepeatedPrimitiveFieldGenerator:: +GenerateInlineAccessorDefinitions(io::Printer* printer) const { + printer->Print(variables_, + "inline $type$ $classname$::$name$(int index) const {\n" + " return $name$_.Get(index);\n" + "}\n" + "inline void $classname$::set_$name$(int index, $type$ value) {\n" + " $name$_.Set(index, value);\n" + "}\n" + "inline void $classname$::add_$name$($type$ value) {\n" + " $name$_.Add(value);\n" + "}\n"); + printer->Print(variables_, + "inline const ::google::protobuf::RepeatedField< $type$ >&\n" + "$classname$::$name$() const {\n" + " return $name$_;\n" + "}\n" + "inline ::google::protobuf::RepeatedField< $type$ >*\n" + "$classname$::mutable_$name$() {\n" + " return &$name$_;\n" + "}\n"); +} + +void RepeatedPrimitiveFieldGenerator:: +GenerateClearingCode(io::Printer* printer) const { + printer->Print(variables_, "$name$_.Clear();\n"); +} + +void RepeatedPrimitiveFieldGenerator:: +GenerateMergingCode(io::Printer* printer) const { + printer->Print(variables_, "$name$_.MergeFrom(from.$name$_);\n"); +} + +void RepeatedPrimitiveFieldGenerator:: +GenerateSwappingCode(io::Printer* printer) const { + printer->Print(variables_, "$name$_.Swap(&other->$name$_);\n"); +} + +void RepeatedPrimitiveFieldGenerator:: +GenerateConstructorCode(io::Printer* printer) const { + // Not needed for repeated fields. +} + +void RepeatedPrimitiveFieldGenerator:: +GenerateMergeFromCodedStream(io::Printer* printer) const { + printer->Print(variables_, + "DO_((::google::protobuf::internal::WireFormatLite::$repeated_reader$<\n" + " $type$, $wire_format_field_type$>(\n" + " $tag_size$, $tag$, input, this->mutable_$name$())));\n"); +} + +void RepeatedPrimitiveFieldGenerator:: +GenerateMergeFromCodedStreamWithPacking(io::Printer* printer) const { + printer->Print(variables_, + "DO_((::google::protobuf::internal::WireFormatLite::$packed_reader$<\n" + " $type$, $wire_format_field_type$>(\n" + " input, this->mutable_$name$())));\n"); +} + +void RepeatedPrimitiveFieldGenerator:: +GenerateSerializeWithCachedSizes(io::Printer* printer) const { + if (descriptor_->options().packed()) { + // Write the tag and the size. + printer->Print(variables_, + "if (this->$name$_size() > 0) {\n" + " ::google::protobuf::internal::WireFormatLite::WriteTag(" + "$number$, " + "::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED, " + "output);\n" + " output->WriteVarint32(_$name$_cached_byte_size_);\n" + "}\n"); + } + printer->Print(variables_, + "for (int i = 0; i < this->$name$_size(); i++) {\n"); + if (descriptor_->options().packed()) { + printer->Print(variables_, + " ::google::protobuf::internal::WireFormatLite::Write$declared_type$NoTag(\n" + " this->$name$(i), output);\n"); + } else { + printer->Print(variables_, + " ::google::protobuf::internal::WireFormatLite::Write$declared_type$(\n" + " $number$, this->$name$(i), output);\n"); + } + printer->Print("}\n"); +} + +void RepeatedPrimitiveFieldGenerator:: +GenerateSerializeWithCachedSizesToArray(io::Printer* printer) const { + if (descriptor_->options().packed()) { + // Write the tag and the size. + printer->Print(variables_, + "if (this->$name$_size() > 0) {\n" + " target = ::google::protobuf::internal::WireFormatLite::WriteTagToArray(\n" + " $number$,\n" + " ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED,\n" + " target);\n" + " target = ::google::protobuf::io::CodedOutputStream::WriteVarint32ToArray(\n" + " _$name$_cached_byte_size_, target);\n" + "}\n"); + } + printer->Print(variables_, + "for (int i = 0; i < this->$name$_size(); i++) {\n"); + if (descriptor_->options().packed()) { + printer->Print(variables_, + " target = ::google::protobuf::internal::WireFormatLite::\n" + " Write$declared_type$NoTagToArray(this->$name$(i), target);\n"); + } else { + printer->Print(variables_, + " target = ::google::protobuf::internal::WireFormatLite::\n" + " Write$declared_type$ToArray($number$, this->$name$(i), target);\n"); + } + printer->Print("}\n"); +} + +void RepeatedPrimitiveFieldGenerator:: +GenerateByteSize(io::Printer* printer) const { + printer->Print(variables_, + "{\n" + " int data_size = 0;\n"); + printer->Indent(); + int fixed_size = FixedSize(descriptor_->type()); + if (fixed_size == -1) { + printer->Print(variables_, + "for (int i = 0; i < this->$name$_size(); i++) {\n" + " data_size += ::google::protobuf::internal::WireFormatLite::\n" + " $declared_type$Size(this->$name$(i));\n" + "}\n"); + } else { + printer->Print(variables_, + "data_size = $fixed_size$ * this->$name$_size();\n"); + } + + if (descriptor_->options().packed()) { + printer->Print(variables_, + "if (data_size > 0) {\n" + " total_size += $tag_size$ +\n" + " ::google::protobuf::internal::WireFormatLite::Int32Size(data_size);\n" + "}\n" + "_$name$_cached_byte_size_ = data_size;\n" + "total_size += data_size;\n"); + } else { + printer->Print(variables_, + "total_size += $tag_size$ * this->$name$_size() + data_size;\n"); + } + printer->Outdent(); + printer->Print("}\n"); +} + +} // namespace cpp +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_primitive_field.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_primitive_field.h new file mode 100644 index 0000000000..8fcd74ae55 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_primitive_field.h @@ -0,0 +1,103 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#ifndef GOOGLE_PROTOBUF_COMPILER_CPP_PRIMITIVE_FIELD_H__ +#define GOOGLE_PROTOBUF_COMPILER_CPP_PRIMITIVE_FIELD_H__ + +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace cpp { + +class PrimitiveFieldGenerator : public FieldGenerator { + public: + explicit PrimitiveFieldGenerator(const FieldDescriptor* descriptor); + ~PrimitiveFieldGenerator(); + + // implements FieldGenerator --------------------------------------- + void GeneratePrivateMembers(io::Printer* printer) const; + void GenerateAccessorDeclarations(io::Printer* printer) const; + void GenerateInlineAccessorDefinitions(io::Printer* printer) const; + void GenerateClearingCode(io::Printer* printer) const; + void GenerateMergingCode(io::Printer* printer) const; + void GenerateSwappingCode(io::Printer* printer) const; + void GenerateConstructorCode(io::Printer* printer) const; + void GenerateMergeFromCodedStream(io::Printer* printer) const; + void GenerateSerializeWithCachedSizes(io::Printer* printer) const; + void GenerateSerializeWithCachedSizesToArray(io::Printer* printer) const; + void GenerateByteSize(io::Printer* printer) const; + + private: + const FieldDescriptor* descriptor_; + map variables_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(PrimitiveFieldGenerator); +}; + +class RepeatedPrimitiveFieldGenerator : public FieldGenerator { + public: + explicit RepeatedPrimitiveFieldGenerator(const FieldDescriptor* descriptor); + ~RepeatedPrimitiveFieldGenerator(); + + // implements FieldGenerator --------------------------------------- + void GeneratePrivateMembers(io::Printer* printer) const; + void GenerateAccessorDeclarations(io::Printer* printer) const; + void GenerateInlineAccessorDefinitions(io::Printer* printer) const; + void GenerateClearingCode(io::Printer* printer) const; + void GenerateMergingCode(io::Printer* printer) const; + void GenerateSwappingCode(io::Printer* printer) const; + void GenerateConstructorCode(io::Printer* printer) const; + void GenerateMergeFromCodedStream(io::Printer* printer) const; + void GenerateMergeFromCodedStreamWithPacking(io::Printer* printer) const; + void GenerateSerializeWithCachedSizes(io::Printer* printer) const; + void GenerateSerializeWithCachedSizesToArray(io::Printer* printer) const; + void GenerateByteSize(io::Printer* printer) const; + + private: + const FieldDescriptor* descriptor_; + map variables_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(RepeatedPrimitiveFieldGenerator); +}; + +} // namespace cpp +} // namespace compiler +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_COMPILER_CPP_PRIMITIVE_FIELD_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_service.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_service.cc new file mode 100644 index 0000000000..c282568354 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_service.cc @@ -0,0 +1,334 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace cpp { + +ServiceGenerator::ServiceGenerator(const ServiceDescriptor* descriptor, + const string& dllexport_decl) + : descriptor_(descriptor) { + vars_["classname"] = descriptor_->name(); + vars_["full_name"] = descriptor_->full_name(); + if (dllexport_decl.empty()) { + vars_["dllexport"] = ""; + } else { + vars_["dllexport"] = dllexport_decl + " "; + } +} + +ServiceGenerator::~ServiceGenerator() {} + +void ServiceGenerator::GenerateDeclarations(io::Printer* printer) { + // Forward-declare the stub type. + printer->Print(vars_, + "class $classname$_Stub;\n" + "\n"); + + GenerateInterface(printer); + GenerateStubDefinition(printer); +} + +void ServiceGenerator::GenerateInterface(io::Printer* printer) { + printer->Print(vars_, + "class $dllexport$$classname$ : public ::google::protobuf::Service {\n" + " protected:\n" + " // This class should be treated as an abstract interface.\n" + " inline $classname$() {};\n" + " public:\n" + " virtual ~$classname$();\n"); + printer->Indent(); + + printer->Print(vars_, + "\n" + "typedef $classname$_Stub Stub;\n" + "\n" + "static const ::google::protobuf::ServiceDescriptor* descriptor();\n" + "\n"); + + GenerateMethodSignatures(VIRTUAL, printer); + + printer->Print( + "\n" + "// implements Service ----------------------------------------------\n" + "\n" + "const ::google::protobuf::ServiceDescriptor* GetDescriptor();\n" + "void CallMethod(const ::google::protobuf::MethodDescriptor* method,\n" + " ::google::protobuf::RpcController* controller,\n" + " const ::google::protobuf::Message* request,\n" + " ::google::protobuf::Message* response,\n" + " ::google::protobuf::Closure* done);\n" + "const ::google::protobuf::Message& GetRequestPrototype(\n" + " const ::google::protobuf::MethodDescriptor* method) const;\n" + "const ::google::protobuf::Message& GetResponsePrototype(\n" + " const ::google::protobuf::MethodDescriptor* method) const;\n"); + + printer->Outdent(); + printer->Print(vars_, + "\n" + " private:\n" + " GOOGLE_DISALLOW_EVIL_CONSTRUCTORS($classname$);\n" + "};\n" + "\n"); +} + +void ServiceGenerator::GenerateStubDefinition(io::Printer* printer) { + printer->Print(vars_, + "class $dllexport$$classname$_Stub : public $classname$ {\n" + " public:\n"); + + printer->Indent(); + + printer->Print(vars_, + "$classname$_Stub(::google::protobuf::RpcChannel* channel);\n" + "$classname$_Stub(::google::protobuf::RpcChannel* channel,\n" + " ::google::protobuf::Service::ChannelOwnership ownership);\n" + "~$classname$_Stub();\n" + "\n" + "inline ::google::protobuf::RpcChannel* channel() { return channel_; }\n" + "\n" + "// implements $classname$ ------------------------------------------\n" + "\n"); + + GenerateMethodSignatures(NON_VIRTUAL, printer); + + printer->Outdent(); + printer->Print(vars_, + " private:\n" + " ::google::protobuf::RpcChannel* channel_;\n" + " bool owns_channel_;\n" + " GOOGLE_DISALLOW_EVIL_CONSTRUCTORS($classname$_Stub);\n" + "};\n" + "\n"); +} + +void ServiceGenerator::GenerateMethodSignatures( + VirtualOrNon virtual_or_non, io::Printer* printer) { + for (int i = 0; i < descriptor_->method_count(); i++) { + const MethodDescriptor* method = descriptor_->method(i); + map sub_vars; + sub_vars["name"] = method->name(); + sub_vars["input_type"] = ClassName(method->input_type(), true); + sub_vars["output_type"] = ClassName(method->output_type(), true); + sub_vars["virtual"] = virtual_or_non == VIRTUAL ? "virtual " : ""; + + printer->Print(sub_vars, + "$virtual$void $name$(::google::protobuf::RpcController* controller,\n" + " const $input_type$* request,\n" + " $output_type$* response,\n" + " ::google::protobuf::Closure* done);\n"); + } +} + +// =================================================================== + +void ServiceGenerator::GenerateDescriptorInitializer( + io::Printer* printer, int index) { + map vars; + vars["classname"] = descriptor_->name(); + vars["index"] = SimpleItoa(index); + + printer->Print(vars, + "$classname$_descriptor_ = file->service($index$);\n"); +} + +// =================================================================== + +void ServiceGenerator::GenerateImplementation(io::Printer* printer) { + printer->Print(vars_, + "$classname$::~$classname$() {}\n" + "\n" + "const ::google::protobuf::ServiceDescriptor* $classname$::descriptor() {\n" + " protobuf_AssignDescriptorsOnce();\n" + " return $classname$_descriptor_;\n" + "}\n" + "\n" + "const ::google::protobuf::ServiceDescriptor* $classname$::GetDescriptor() {\n" + " protobuf_AssignDescriptorsOnce();\n" + " return $classname$_descriptor_;\n" + "}\n" + "\n"); + + // Generate methods of the interface. + GenerateNotImplementedMethods(printer); + GenerateCallMethod(printer); + GenerateGetPrototype(REQUEST, printer); + GenerateGetPrototype(RESPONSE, printer); + + // Generate stub implementation. + printer->Print(vars_, + "$classname$_Stub::$classname$_Stub(::google::protobuf::RpcChannel* channel)\n" + " : channel_(channel), owns_channel_(false) {}\n" + "$classname$_Stub::$classname$_Stub(\n" + " ::google::protobuf::RpcChannel* channel,\n" + " ::google::protobuf::Service::ChannelOwnership ownership)\n" + " : channel_(channel),\n" + " owns_channel_(ownership == ::google::protobuf::Service::STUB_OWNS_CHANNEL) {}\n" + "$classname$_Stub::~$classname$_Stub() {\n" + " if (owns_channel_) delete channel_;\n" + "}\n" + "\n"); + + GenerateStubMethods(printer); +} + +void ServiceGenerator::GenerateNotImplementedMethods(io::Printer* printer) { + for (int i = 0; i < descriptor_->method_count(); i++) { + const MethodDescriptor* method = descriptor_->method(i); + map sub_vars; + sub_vars["classname"] = descriptor_->name(); + sub_vars["name"] = method->name(); + sub_vars["index"] = SimpleItoa(i); + sub_vars["input_type"] = ClassName(method->input_type(), true); + sub_vars["output_type"] = ClassName(method->output_type(), true); + + printer->Print(sub_vars, + "void $classname$::$name$(::google::protobuf::RpcController* controller,\n" + " const $input_type$*,\n" + " $output_type$*,\n" + " ::google::protobuf::Closure* done) {\n" + " controller->SetFailed(\"Method $name$() not implemented.\");\n" + " done->Run();\n" + "}\n" + "\n"); + } +} + +void ServiceGenerator::GenerateCallMethod(io::Printer* printer) { + printer->Print(vars_, + "void $classname$::CallMethod(const ::google::protobuf::MethodDescriptor* method,\n" + " ::google::protobuf::RpcController* controller,\n" + " const ::google::protobuf::Message* request,\n" + " ::google::protobuf::Message* response,\n" + " ::google::protobuf::Closure* done) {\n" + " GOOGLE_DCHECK_EQ(method->service(), $classname$_descriptor_);\n" + " switch(method->index()) {\n"); + + for (int i = 0; i < descriptor_->method_count(); i++) { + const MethodDescriptor* method = descriptor_->method(i); + map sub_vars; + sub_vars["name"] = method->name(); + sub_vars["index"] = SimpleItoa(i); + sub_vars["input_type"] = ClassName(method->input_type(), true); + sub_vars["output_type"] = ClassName(method->output_type(), true); + + // Note: down_cast does not work here because it only works on pointers, + // not references. + printer->Print(sub_vars, + " case $index$:\n" + " $name$(controller,\n" + " ::google::protobuf::down_cast(request),\n" + " ::google::protobuf::down_cast< $output_type$*>(response),\n" + " done);\n" + " break;\n"); + } + + printer->Print(vars_, + " default:\n" + " GOOGLE_LOG(FATAL) << \"Bad method index; this should never happen.\";\n" + " break;\n" + " }\n" + "}\n" + "\n"); +} + +void ServiceGenerator::GenerateGetPrototype(RequestOrResponse which, + io::Printer* printer) { + if (which == REQUEST) { + printer->Print(vars_, + "const ::google::protobuf::Message& $classname$::GetRequestPrototype(\n"); + } else { + printer->Print(vars_, + "const ::google::protobuf::Message& $classname$::GetResponsePrototype(\n"); + } + + printer->Print(vars_, + " const ::google::protobuf::MethodDescriptor* method) const {\n" + " GOOGLE_DCHECK_EQ(method->service(), descriptor());\n" + " switch(method->index()) {\n"); + + for (int i = 0; i < descriptor_->method_count(); i++) { + const MethodDescriptor* method = descriptor_->method(i); + const Descriptor* type = + (which == REQUEST) ? method->input_type() : method->output_type(); + + map sub_vars; + sub_vars["index"] = SimpleItoa(i); + sub_vars["type"] = ClassName(type, true); + + printer->Print(sub_vars, + " case $index$:\n" + " return $type$::default_instance();\n"); + } + + printer->Print(vars_, + " default:\n" + " GOOGLE_LOG(FATAL) << \"Bad method index; this should never happen.\";\n" + " return *reinterpret_cast< ::google::protobuf::Message*>(NULL);\n" + " }\n" + "}\n" + "\n"); +} + +void ServiceGenerator::GenerateStubMethods(io::Printer* printer) { + for (int i = 0; i < descriptor_->method_count(); i++) { + const MethodDescriptor* method = descriptor_->method(i); + map sub_vars; + sub_vars["classname"] = descriptor_->name(); + sub_vars["name"] = method->name(); + sub_vars["index"] = SimpleItoa(i); + sub_vars["input_type"] = ClassName(method->input_type(), true); + sub_vars["output_type"] = ClassName(method->output_type(), true); + + printer->Print(sub_vars, + "void $classname$_Stub::$name$(::google::protobuf::RpcController* controller,\n" + " const $input_type$* request,\n" + " $output_type$* response,\n" + " ::google::protobuf::Closure* done) {\n" + " channel_->CallMethod(descriptor()->method($index$),\n" + " controller, request, response, done);\n" + "}\n"); + } +} + +} // namespace cpp +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_service.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_service.h new file mode 100644 index 0000000000..10e9dd3cd2 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_service.h @@ -0,0 +1,118 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#ifndef GOOGLE_PROTOBUF_COMPILER_CPP_SERVICE_H__ +#define GOOGLE_PROTOBUF_COMPILER_CPP_SERVICE_H__ + +#include +#include +#include +#include + +namespace google { +namespace protobuf { + namespace io { + class Printer; // printer.h + } +} + +namespace protobuf { +namespace compiler { +namespace cpp { + +class ServiceGenerator { + public: + // See generator.cc for the meaning of dllexport_decl. + explicit ServiceGenerator(const ServiceDescriptor* descriptor, + const string& dllexport_decl); + ~ServiceGenerator(); + + // Header stuff. + + // Generate the class definitions for the service's interface and the + // stub implementation. + void GenerateDeclarations(io::Printer* printer); + + // Source file stuff. + + // Generate code that initializes the global variable storing the service's + // descriptor. + void GenerateDescriptorInitializer(io::Printer* printer, int index); + + // Generate implementations of everything declared by GenerateDeclarations(). + void GenerateImplementation(io::Printer* printer); + + private: + enum RequestOrResponse { REQUEST, RESPONSE }; + enum VirtualOrNon { VIRTUAL, NON_VIRTUAL }; + + // Header stuff. + + // Generate the service abstract interface. + void GenerateInterface(io::Printer* printer); + + // Generate the stub class definition. + void GenerateStubDefinition(io::Printer* printer); + + // Prints signatures for all methods in the + void GenerateMethodSignatures(VirtualOrNon virtual_or_non, + io::Printer* printer); + + // Source file stuff. + + // Generate the default implementations of the service methods, which + // produce a "not implemented" error. + void GenerateNotImplementedMethods(io::Printer* printer); + + // Generate the CallMethod() method of the service. + void GenerateCallMethod(io::Printer* printer); + + // Generate the Get{Request,Response}Prototype() methods. + void GenerateGetPrototype(RequestOrResponse which, io::Printer* printer); + + // Generate the stub's implementations of the service methods. + void GenerateStubMethods(io::Printer* printer); + + const ServiceDescriptor* descriptor_; + map vars_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(ServiceGenerator); +}; + +} // namespace cpp +} // namespace compiler +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_COMPILER_CPP_SERVICE_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_string_field.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_string_field.cc new file mode 100644 index 0000000000..8d611b690b --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_string_field.cc @@ -0,0 +1,453 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace cpp { + +namespace { + +void SetStringVariables(const FieldDescriptor* descriptor, + map* variables) { + SetCommonFieldVariables(descriptor, variables); + (*variables)["default"] = DefaultValue(descriptor); + (*variables)["default_variable"] = descriptor->default_value_string().empty() + ? "::google::protobuf::internal::kEmptyString" + : "_default_" + FieldName(descriptor) + "_"; + (*variables)["pointer_type"] = + descriptor->type() == FieldDescriptor::TYPE_BYTES ? "void" : "char"; +} + +} // namespace + +// =================================================================== + +StringFieldGenerator:: +StringFieldGenerator(const FieldDescriptor* descriptor) + : descriptor_(descriptor) { + SetStringVariables(descriptor, &variables_); +} + +StringFieldGenerator::~StringFieldGenerator() {} + +void StringFieldGenerator:: +GeneratePrivateMembers(io::Printer* printer) const { + printer->Print(variables_, "::std::string* $name$_;\n"); + if (!descriptor_->default_value_string().empty()) { + printer->Print(variables_, "static const ::std::string $default_variable$;\n"); + } +} + +void StringFieldGenerator:: +GenerateAccessorDeclarations(io::Printer* printer) const { + // If we're using StringFieldGenerator for a field with a ctype, it's + // because that ctype isn't actually implemented. In particular, this is + // true of ctype=CORD and ctype=STRING_PIECE in the open source release. + // We aren't releasing Cord because it has too many Google-specific + // dependencies and we aren't releasing StringPiece because it's hardly + // useful outside of Google and because it would get confusing to have + // multiple instances of the StringPiece class in different libraries (PCRE + // already includes it for their C++ bindings, which came from Google). + // + // In any case, we make all the accessors private while still actually + // using a string to represent the field internally. This way, we can + // guarantee that if we do ever implement the ctype, it won't break any + // existing users who might be -- for whatever reason -- already using .proto + // files that applied the ctype. The field can still be accessed via the + // reflection interface since the reflection interface is independent of + // the string's underlying representation. + if (descriptor_->options().ctype() != FieldOptions::STRING) { + printer->Outdent(); + printer->Print( + " private:\n" + " // Hidden due to unknown ctype option.\n"); + printer->Indent(); + } + + printer->Print(variables_, + "inline const ::std::string& $name$() const$deprecation$;\n" + "inline void set_$name$(const ::std::string& value)$deprecation$;\n" + "inline void set_$name$(const char* value)$deprecation$;\n" + "inline void set_$name$(const $pointer_type$* value, size_t size)" + "$deprecation$;\n" + "inline ::std::string* mutable_$name$()$deprecation$;\n" + "inline ::std::string* release_$name$()$deprecation$;\n"); + + if (descriptor_->options().ctype() != FieldOptions::STRING) { + printer->Outdent(); + printer->Print(" public:\n"); + printer->Indent(); + } +} + +void StringFieldGenerator:: +GenerateInlineAccessorDefinitions(io::Printer* printer) const { + printer->Print(variables_, + "inline const ::std::string& $classname$::$name$() const {\n" + " return *$name$_;\n" + "}\n" + "inline void $classname$::set_$name$(const ::std::string& value) {\n" + " set_has_$name$();\n" + " if ($name$_ == &$default_variable$) {\n" + " $name$_ = new ::std::string;\n" + " }\n" + " $name$_->assign(value);\n" + "}\n" + "inline void $classname$::set_$name$(const char* value) {\n" + " set_has_$name$();\n" + " if ($name$_ == &$default_variable$) {\n" + " $name$_ = new ::std::string;\n" + " }\n" + " $name$_->assign(value);\n" + "}\n" + "inline " + "void $classname$::set_$name$(const $pointer_type$* value, size_t size) {\n" + " set_has_$name$();\n" + " if ($name$_ == &$default_variable$) {\n" + " $name$_ = new ::std::string;\n" + " }\n" + " $name$_->assign(reinterpret_cast(value), size);\n" + "}\n" + "inline ::std::string* $classname$::mutable_$name$() {\n" + " set_has_$name$();\n" + " if ($name$_ == &$default_variable$) {\n"); + if (descriptor_->default_value_string().empty()) { + printer->Print(variables_, + " $name$_ = new ::std::string;\n"); + } else { + printer->Print(variables_, + " $name$_ = new ::std::string($default_variable$);\n"); + } + printer->Print(variables_, + " }\n" + " return $name$_;\n" + "}\n" + "inline ::std::string* $classname$::release_$name$() {\n" + " clear_has_$name$();\n" + " if ($name$_ == &$default_variable$) {\n" + " return NULL;\n" + " } else {\n" + " ::std::string* temp = $name$_;\n" + " $name$_ = const_cast< ::std::string*>(&$default_variable$);\n" + " return temp;\n" + " }\n" + "}\n"); +} + +void StringFieldGenerator:: +GenerateNonInlineAccessorDefinitions(io::Printer* printer) const { + if (!descriptor_->default_value_string().empty()) { + printer->Print(variables_, + "const ::std::string $classname$::$default_variable$($default$);\n"); + } +} + +void StringFieldGenerator:: +GenerateClearingCode(io::Printer* printer) const { + if (descriptor_->default_value_string().empty()) { + printer->Print(variables_, + "if ($name$_ != &$default_variable$) {\n" + " $name$_->clear();\n" + "}\n"); + } else { + printer->Print(variables_, + "if ($name$_ != &$default_variable$) {\n" + " $name$_->assign($default_variable$);\n" + "}\n"); + } +} + +void StringFieldGenerator:: +GenerateMergingCode(io::Printer* printer) const { + printer->Print(variables_, "set_$name$(from.$name$());\n"); +} + +void StringFieldGenerator:: +GenerateSwappingCode(io::Printer* printer) const { + printer->Print(variables_, "std::swap($name$_, other->$name$_);\n"); +} + +void StringFieldGenerator:: +GenerateConstructorCode(io::Printer* printer) const { + printer->Print(variables_, + "$name$_ = const_cast< ::std::string*>(&$default_variable$);\n"); +} + +void StringFieldGenerator:: +GenerateDestructorCode(io::Printer* printer) const { + printer->Print(variables_, + "if ($name$_ != &$default_variable$) {\n" + " delete $name$_;\n" + "}\n"); +} + +void StringFieldGenerator:: +GenerateMergeFromCodedStream(io::Printer* printer) const { + printer->Print(variables_, + "DO_(::google::protobuf::internal::WireFormatLite::Read$declared_type$(\n" + " input, this->mutable_$name$()));\n"); + if (HasUtf8Verification(descriptor_->file()) && + descriptor_->type() == FieldDescriptor::TYPE_STRING) { + printer->Print(variables_, + "::google::protobuf::internal::WireFormat::VerifyUTF8String(\n" + " this->$name$().data(), this->$name$().length(),\n" + " ::google::protobuf::internal::WireFormat::PARSE);\n"); + } +} + +void StringFieldGenerator:: +GenerateSerializeWithCachedSizes(io::Printer* printer) const { + if (HasUtf8Verification(descriptor_->file()) && + descriptor_->type() == FieldDescriptor::TYPE_STRING) { + printer->Print(variables_, + "::google::protobuf::internal::WireFormat::VerifyUTF8String(\n" + " this->$name$().data(), this->$name$().length(),\n" + " ::google::protobuf::internal::WireFormat::SERIALIZE);\n"); + } + printer->Print(variables_, + "::google::protobuf::internal::WireFormatLite::Write$declared_type$(\n" + " $number$, this->$name$(), output);\n"); +} + +void StringFieldGenerator:: +GenerateSerializeWithCachedSizesToArray(io::Printer* printer) const { + if (HasUtf8Verification(descriptor_->file()) && + descriptor_->type() == FieldDescriptor::TYPE_STRING) { + printer->Print(variables_, + "::google::protobuf::internal::WireFormat::VerifyUTF8String(\n" + " this->$name$().data(), this->$name$().length(),\n" + " ::google::protobuf::internal::WireFormat::SERIALIZE);\n"); + } + printer->Print(variables_, + "target =\n" + " ::google::protobuf::internal::WireFormatLite::Write$declared_type$ToArray(\n" + " $number$, this->$name$(), target);\n"); +} + +void StringFieldGenerator:: +GenerateByteSize(io::Printer* printer) const { + printer->Print(variables_, + "total_size += $tag_size$ +\n" + " ::google::protobuf::internal::WireFormatLite::$declared_type$Size(\n" + " this->$name$());\n"); +} + +// =================================================================== + +RepeatedStringFieldGenerator:: +RepeatedStringFieldGenerator(const FieldDescriptor* descriptor) + : descriptor_(descriptor) { + SetStringVariables(descriptor, &variables_); +} + +RepeatedStringFieldGenerator::~RepeatedStringFieldGenerator() {} + +void RepeatedStringFieldGenerator:: +GeneratePrivateMembers(io::Printer* printer) const { + printer->Print(variables_, + "::google::protobuf::RepeatedPtrField< ::std::string> $name$_;\n"); +} + +void RepeatedStringFieldGenerator:: +GenerateAccessorDeclarations(io::Printer* printer) const { + // See comment above about unknown ctypes. + if (descriptor_->options().ctype() != FieldOptions::STRING) { + printer->Outdent(); + printer->Print( + " private:\n" + " // Hidden due to unknown ctype option.\n"); + printer->Indent(); + } + + printer->Print(variables_, + "inline const ::std::string& $name$(int index) const$deprecation$;\n" + "inline ::std::string* mutable_$name$(int index)$deprecation$;\n" + "inline void set_$name$(int index, const ::std::string& value)$deprecation$;\n" + "inline void set_$name$(int index, const char* value)$deprecation$;\n" + "inline " + "void set_$name$(int index, const $pointer_type$* value, size_t size)" + "$deprecation$;\n" + "inline ::std::string* add_$name$()$deprecation$;\n" + "inline void add_$name$(const ::std::string& value)$deprecation$;\n" + "inline void add_$name$(const char* value)$deprecation$;\n" + "inline void add_$name$(const $pointer_type$* value, size_t size)" + "$deprecation$;\n"); + + printer->Print(variables_, + "inline const ::google::protobuf::RepeatedPtrField< ::std::string>& $name$() const" + "$deprecation$;\n" + "inline ::google::protobuf::RepeatedPtrField< ::std::string>* mutable_$name$()" + "$deprecation$;\n"); + + if (descriptor_->options().ctype() != FieldOptions::STRING) { + printer->Outdent(); + printer->Print(" public:\n"); + printer->Indent(); + } +} + +void RepeatedStringFieldGenerator:: +GenerateInlineAccessorDefinitions(io::Printer* printer) const { + printer->Print(variables_, + "inline const ::std::string& $classname$::$name$(int index) const {\n" + " return $name$_.Get(index);\n" + "}\n" + "inline ::std::string* $classname$::mutable_$name$(int index) {\n" + " return $name$_.Mutable(index);\n" + "}\n" + "inline void $classname$::set_$name$(int index, const ::std::string& value) {\n" + " $name$_.Mutable(index)->assign(value);\n" + "}\n" + "inline void $classname$::set_$name$(int index, const char* value) {\n" + " $name$_.Mutable(index)->assign(value);\n" + "}\n" + "inline void " + "$classname$::set_$name$" + "(int index, const $pointer_type$* value, size_t size) {\n" + " $name$_.Mutable(index)->assign(\n" + " reinterpret_cast(value), size);\n" + "}\n" + "inline ::std::string* $classname$::add_$name$() {\n" + " return $name$_.Add();\n" + "}\n" + "inline void $classname$::add_$name$(const ::std::string& value) {\n" + " $name$_.Add()->assign(value);\n" + "}\n" + "inline void $classname$::add_$name$(const char* value) {\n" + " $name$_.Add()->assign(value);\n" + "}\n" + "inline void " + "$classname$::add_$name$(const $pointer_type$* value, size_t size) {\n" + " $name$_.Add()->assign(reinterpret_cast(value), size);\n" + "}\n"); + printer->Print(variables_, + "inline const ::google::protobuf::RepeatedPtrField< ::std::string>&\n" + "$classname$::$name$() const {\n" + " return $name$_;\n" + "}\n" + "inline ::google::protobuf::RepeatedPtrField< ::std::string>*\n" + "$classname$::mutable_$name$() {\n" + " return &$name$_;\n" + "}\n"); +} + +void RepeatedStringFieldGenerator:: +GenerateClearingCode(io::Printer* printer) const { + printer->Print(variables_, "$name$_.Clear();\n"); +} + +void RepeatedStringFieldGenerator:: +GenerateMergingCode(io::Printer* printer) const { + printer->Print(variables_, "$name$_.MergeFrom(from.$name$_);\n"); +} + +void RepeatedStringFieldGenerator:: +GenerateSwappingCode(io::Printer* printer) const { + printer->Print(variables_, "$name$_.Swap(&other->$name$_);\n"); +} + +void RepeatedStringFieldGenerator:: +GenerateConstructorCode(io::Printer* printer) const { + // Not needed for repeated fields. +} + +void RepeatedStringFieldGenerator:: +GenerateMergeFromCodedStream(io::Printer* printer) const { + printer->Print(variables_, + "DO_(::google::protobuf::internal::WireFormatLite::Read$declared_type$(\n" + " input, this->add_$name$()));\n"); + if (HasUtf8Verification(descriptor_->file()) && + descriptor_->type() == FieldDescriptor::TYPE_STRING) { + printer->Print(variables_, + "::google::protobuf::internal::WireFormat::VerifyUTF8String(\n" + " this->$name$(0).data(), this->$name$(0).length(),\n" + " ::google::protobuf::internal::WireFormat::PARSE);\n"); + } +} + +void RepeatedStringFieldGenerator:: +GenerateSerializeWithCachedSizes(io::Printer* printer) const { + printer->Print(variables_, + "for (int i = 0; i < this->$name$_size(); i++) {\n"); + if (HasUtf8Verification(descriptor_->file()) && + descriptor_->type() == FieldDescriptor::TYPE_STRING) { + printer->Print(variables_, + "::google::protobuf::internal::WireFormat::VerifyUTF8String(\n" + " this->$name$(i).data(), this->$name$(i).length(),\n" + " ::google::protobuf::internal::WireFormat::SERIALIZE);\n"); + } + printer->Print(variables_, + " ::google::protobuf::internal::WireFormatLite::Write$declared_type$(\n" + " $number$, this->$name$(i), output);\n" + "}\n"); +} + +void RepeatedStringFieldGenerator:: +GenerateSerializeWithCachedSizesToArray(io::Printer* printer) const { + printer->Print(variables_, + "for (int i = 0; i < this->$name$_size(); i++) {\n"); + if (HasUtf8Verification(descriptor_->file()) && + descriptor_->type() == FieldDescriptor::TYPE_STRING) { + printer->Print(variables_, + " ::google::protobuf::internal::WireFormat::VerifyUTF8String(\n" + " this->$name$(i).data(), this->$name$(i).length(),\n" + " ::google::protobuf::internal::WireFormat::SERIALIZE);\n"); + } + printer->Print(variables_, + " target = ::google::protobuf::internal::WireFormatLite::\n" + " Write$declared_type$ToArray($number$, this->$name$(i), target);\n" + "}\n"); +} + +void RepeatedStringFieldGenerator:: +GenerateByteSize(io::Printer* printer) const { + printer->Print(variables_, + "total_size += $tag_size$ * this->$name$_size();\n" + "for (int i = 0; i < this->$name$_size(); i++) {\n" + " total_size += ::google::protobuf::internal::WireFormatLite::$declared_type$Size(\n" + " this->$name$(i));\n" + "}\n"); +} + +} // namespace cpp +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_string_field.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_string_field.h new file mode 100644 index 0000000000..7f45107de6 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_string_field.h @@ -0,0 +1,104 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#ifndef GOOGLE_PROTOBUF_COMPILER_CPP_STRING_FIELD_H__ +#define GOOGLE_PROTOBUF_COMPILER_CPP_STRING_FIELD_H__ + +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace cpp { + +class StringFieldGenerator : public FieldGenerator { + public: + explicit StringFieldGenerator(const FieldDescriptor* descriptor); + ~StringFieldGenerator(); + + // implements FieldGenerator --------------------------------------- + void GeneratePrivateMembers(io::Printer* printer) const; + void GenerateAccessorDeclarations(io::Printer* printer) const; + void GenerateInlineAccessorDefinitions(io::Printer* printer) const; + void GenerateNonInlineAccessorDefinitions(io::Printer* printer) const; + void GenerateClearingCode(io::Printer* printer) const; + void GenerateMergingCode(io::Printer* printer) const; + void GenerateSwappingCode(io::Printer* printer) const; + void GenerateConstructorCode(io::Printer* printer) const; + void GenerateDestructorCode(io::Printer* printer) const; + void GenerateMergeFromCodedStream(io::Printer* printer) const; + void GenerateSerializeWithCachedSizes(io::Printer* printer) const; + void GenerateSerializeWithCachedSizesToArray(io::Printer* printer) const; + void GenerateByteSize(io::Printer* printer) const; + + private: + const FieldDescriptor* descriptor_; + map variables_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(StringFieldGenerator); +}; + +class RepeatedStringFieldGenerator : public FieldGenerator { + public: + explicit RepeatedStringFieldGenerator(const FieldDescriptor* descriptor); + ~RepeatedStringFieldGenerator(); + + // implements FieldGenerator --------------------------------------- + void GeneratePrivateMembers(io::Printer* printer) const; + void GenerateAccessorDeclarations(io::Printer* printer) const; + void GenerateInlineAccessorDefinitions(io::Printer* printer) const; + void GenerateClearingCode(io::Printer* printer) const; + void GenerateMergingCode(io::Printer* printer) const; + void GenerateSwappingCode(io::Printer* printer) const; + void GenerateConstructorCode(io::Printer* printer) const; + void GenerateMergeFromCodedStream(io::Printer* printer) const; + void GenerateSerializeWithCachedSizes(io::Printer* printer) const; + void GenerateSerializeWithCachedSizesToArray(io::Printer* printer) const; + void GenerateByteSize(io::Printer* printer) const; + + private: + const FieldDescriptor* descriptor_; + map variables_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(RepeatedStringFieldGenerator); +}; + +} // namespace cpp +} // namespace compiler +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_COMPILER_CPP_STRING_FIELD_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_test_bad_identifiers.proto b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_test_bad_identifiers.proto new file mode 100644 index 0000000000..54d830fca2 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_test_bad_identifiers.proto @@ -0,0 +1,113 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// This file tests that various identifiers work as field and type names even +// though the same identifiers are used internally by the C++ code generator. + + +// Some generic_services option(s) added automatically. +// See: http://go/proto2-generic-services-default +option cc_generic_services = true; // auto-added + +// We don't put this in a package within proto2 because we need to make sure +// that the generated code doesn't depend on being in the proto2 namespace. +package protobuf_unittest; + +// Test that fields can have names like "input" and "i" which are also used +// internally by the code generator for local variables. +message TestConflictingSymbolNames { + message BuildDescriptors {} + message TypeTraits {} + + optional int32 input = 1; + optional int32 output = 2; + optional string length = 3; + repeated int32 i = 4; + repeated string new_element = 5 [ctype=STRING_PIECE]; + optional int32 total_size = 6; + optional int32 tag = 7; + + enum TestEnum { FOO = 1; } + message Data1 { repeated int32 data = 1; } + message Data2 { repeated TestEnum data = 1; } + message Data3 { repeated string data = 1; } + message Data4 { repeated Data4 data = 1; } + message Data5 { repeated string data = 1 [ctype=STRING_PIECE]; } + message Data6 { repeated string data = 1 [ctype=CORD]; } + + optional int32 source = 8; + optional int32 value = 9; + optional int32 file = 10; + optional int32 from = 11; + optional int32 handle_uninterpreted = 12; + repeated int32 index = 13; + optional int32 controller = 14; + optional int32 already_here = 15; + + optional uint32 uint32 = 16; + optional uint64 uint64 = 17; + optional string string = 18; + optional int32 memset = 19; + optional int32 int32 = 20; + optional int64 int64 = 21; + + optional uint32 cached_size = 22; + optional uint32 extensions = 23; + optional uint32 bit = 24; + optional uint32 bits = 25; + optional uint32 offsets = 26; + optional uint32 reflection = 27; + + message Cord {} + optional string some_cord = 28 [ctype=CORD]; + + message StringPiece {} + optional string some_string_piece = 29 [ctype=STRING_PIECE]; + + // Some keywords. + optional uint32 int = 30; + optional uint32 friend = 31; + + // The generator used to #define a macro called "DO" inside the .cc file. + message DO {} + optional DO do = 32; + + extensions 1000 to max; +} + +message DummyMessage {} + +service TestConflictingMethodNames { + rpc Closure(DummyMessage) returns (DummyMessage); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_unittest.cc new file mode 100644 index 0000000000..301a7ce677 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/cpp/cpp_unittest.cc @@ -0,0 +1,1281 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// To test the code generator, we actually use it to generate code for +// google/protobuf/unittest.proto, then test that. This means that we +// are actually testing the parser and other parts of the system at the same +// time, and that problems in the generator may show up as compile-time errors +// rather than unittest failures, which may be surprising. However, testing +// the output of the C++ generator directly would be very hard. We can't very +// well just check it against golden files since those files would have to be +// updated for any small change; such a test would be very brittle and probably +// not very helpful. What we really want to test is that the code compiles +// correctly and produces the interfaces we expect, which is why this test +// is written this way. + +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace cpp { + +// Can't use an anonymous namespace here due to brokenness of Tru64 compiler. +namespace cpp_unittest { + + +class MockErrorCollector : public MultiFileErrorCollector { + public: + MockErrorCollector() {} + ~MockErrorCollector() {} + + string text_; + + // implements ErrorCollector --------------------------------------- + void AddError(const string& filename, int line, int column, + const string& message) { + strings::SubstituteAndAppend(&text_, "$0:$1:$2: $3\n", + filename, line, column, message); + } +}; + +#ifndef PROTOBUF_TEST_NO_DESCRIPTORS + +// Test that generated code has proper descriptors: +// Parse a descriptor directly (using google::protobuf::compiler::Importer) and +// compare it to the one that was produced by generated code. +TEST(GeneratedDescriptorTest, IdenticalDescriptors) { + const FileDescriptor* generated_descriptor = + unittest::TestAllTypes::descriptor()->file(); + + // Set up the Importer. + MockErrorCollector error_collector; + DiskSourceTree source_tree; + source_tree.MapPath("", TestSourceDir()); + Importer importer(&source_tree, &error_collector); + + // Import (parse) unittest.proto. + const FileDescriptor* parsed_descriptor = + importer.Import("google/protobuf/unittest.proto"); + EXPECT_EQ("", error_collector.text_); + ASSERT_TRUE(parsed_descriptor != NULL); + + // Test that descriptors are generated correctly by converting them to + // FileDescriptorProtos and comparing. + FileDescriptorProto generated_decsriptor_proto, parsed_descriptor_proto; + generated_descriptor->CopyTo(&generated_decsriptor_proto); + parsed_descriptor->CopyTo(&parsed_descriptor_proto); + + EXPECT_EQ(parsed_descriptor_proto.DebugString(), + generated_decsriptor_proto.DebugString()); +} + +#endif // !PROTOBUF_TEST_NO_DESCRIPTORS + +// =================================================================== + +TEST(GeneratedMessageTest, Defaults) { + // Check that all default values are set correctly in the initial message. + unittest::TestAllTypes message; + + TestUtil::ExpectClear(message); + + // Messages should return pointers to default instances until first use. + // (This is not checked by ExpectClear() since it is not actually true after + // the fields have been set and then cleared.) + EXPECT_EQ(&unittest::TestAllTypes::OptionalGroup::default_instance(), + &message.optionalgroup()); + EXPECT_EQ(&unittest::TestAllTypes::NestedMessage::default_instance(), + &message.optional_nested_message()); + EXPECT_EQ(&unittest::ForeignMessage::default_instance(), + &message.optional_foreign_message()); + EXPECT_EQ(&unittest_import::ImportMessage::default_instance(), + &message.optional_import_message()); +} + +TEST(GeneratedMessageTest, FloatingPointDefaults) { + const unittest::TestExtremeDefaultValues& extreme_default = + unittest::TestExtremeDefaultValues::default_instance(); + + EXPECT_EQ(0.0f, extreme_default.zero_float()); + EXPECT_EQ(1.0f, extreme_default.one_float()); + EXPECT_EQ(1.5f, extreme_default.small_float()); + EXPECT_EQ(-1.0f, extreme_default.negative_one_float()); + EXPECT_EQ(-1.5f, extreme_default.negative_float()); + EXPECT_EQ(2.0e8f, extreme_default.large_float()); + EXPECT_EQ(-8e-28f, extreme_default.small_negative_float()); + EXPECT_EQ(numeric_limits::infinity(), + extreme_default.inf_double()); + EXPECT_EQ(-numeric_limits::infinity(), + extreme_default.neg_inf_double()); + EXPECT_TRUE(extreme_default.nan_double() != extreme_default.nan_double()); + EXPECT_EQ(numeric_limits::infinity(), + extreme_default.inf_float()); + EXPECT_EQ(-numeric_limits::infinity(), + extreme_default.neg_inf_float()); + EXPECT_TRUE(extreme_default.nan_float() != extreme_default.nan_float()); +} + +TEST(GeneratedMessageTest, Trigraph) { + const unittest::TestExtremeDefaultValues& extreme_default = + unittest::TestExtremeDefaultValues::default_instance(); + + EXPECT_EQ("? ? ?? ?? ??? ?\?/ ?\?-", extreme_default.cpp_trigraph()); +} + +TEST(GeneratedMessageTest, Accessors) { + // Set every field to a unique value then go back and check all those + // values. + unittest::TestAllTypes message; + + TestUtil::SetAllFields(&message); + TestUtil::ExpectAllFieldsSet(message); + + TestUtil::ModifyRepeatedFields(&message); + TestUtil::ExpectRepeatedFieldsModified(message); +} + +TEST(GeneratedMessageTest, MutableStringDefault) { + // mutable_foo() for a string should return a string initialized to its + // default value. + unittest::TestAllTypes message; + + EXPECT_EQ("hello", *message.mutable_default_string()); + + // Note that the first time we call mutable_foo(), we get a newly-allocated + // string, but if we clear it and call it again, we get the same object again. + // We should verify that it has its default value in both cases. + message.set_default_string("blah"); + message.Clear(); + + EXPECT_EQ("hello", *message.mutable_default_string()); +} + +TEST(GeneratedMessageTest, ReleaseString) { + // Check that release_foo() starts out NULL, and gives us a value + // that we can delete after it's been set. + unittest::TestAllTypes message; + + EXPECT_EQ(NULL, message.release_default_string()); + EXPECT_FALSE(message.has_default_string()); + EXPECT_EQ("hello", message.default_string()); + + message.set_default_string("blah"); + EXPECT_TRUE(message.has_default_string()); + string* str = message.release_default_string(); + EXPECT_FALSE(message.has_default_string()); + ASSERT_TRUE(str != NULL); + EXPECT_EQ("blah", *str); + delete str; + + EXPECT_EQ(NULL, message.release_default_string()); + EXPECT_FALSE(message.has_default_string()); + EXPECT_EQ("hello", message.default_string()); +} + +TEST(GeneratedMessageTest, ReleaseMessage) { + // Check that release_foo() starts out NULL, and gives us a value + // that we can delete after it's been set. + unittest::TestAllTypes message; + + EXPECT_EQ(NULL, message.release_optional_nested_message()); + EXPECT_FALSE(message.has_optional_nested_message()); + + message.mutable_optional_nested_message()->set_bb(1); + unittest::TestAllTypes::NestedMessage* nest = + message.release_optional_nested_message(); + EXPECT_FALSE(message.has_optional_nested_message()); + ASSERT_TRUE(nest != NULL); + EXPECT_EQ(1, nest->bb()); + delete nest; + + EXPECT_EQ(NULL, message.release_optional_nested_message()); + EXPECT_FALSE(message.has_optional_nested_message()); +} + +TEST(GeneratedMessageTest, Clear) { + // Set every field to a unique value, clear the message, then check that + // it is cleared. + unittest::TestAllTypes message; + + TestUtil::SetAllFields(&message); + message.Clear(); + TestUtil::ExpectClear(message); + + // Unlike with the defaults test, we do NOT expect that requesting embedded + // messages will return a pointer to the default instance. Instead, they + // should return the objects that were created when mutable_blah() was + // called. + EXPECT_NE(&unittest::TestAllTypes::OptionalGroup::default_instance(), + &message.optionalgroup()); + EXPECT_NE(&unittest::TestAllTypes::NestedMessage::default_instance(), + &message.optional_nested_message()); + EXPECT_NE(&unittest::ForeignMessage::default_instance(), + &message.optional_foreign_message()); + EXPECT_NE(&unittest_import::ImportMessage::default_instance(), + &message.optional_import_message()); +} + +TEST(GeneratedMessageTest, EmbeddedNullsInBytesCharStar) { + unittest::TestAllTypes message; + + const char* value = "\0lalala\0\0"; + message.set_optional_bytes(value, 9); + ASSERT_EQ(9, message.optional_bytes().size()); + EXPECT_EQ(0, memcmp(value, message.optional_bytes().data(), 9)); + + message.add_repeated_bytes(value, 9); + ASSERT_EQ(9, message.repeated_bytes(0).size()); + EXPECT_EQ(0, memcmp(value, message.repeated_bytes(0).data(), 9)); +} + +TEST(GeneratedMessageTest, ClearOneField) { + // Set every field to a unique value, then clear one value and insure that + // only that one value is cleared. + unittest::TestAllTypes message; + + TestUtil::SetAllFields(&message); + int64 original_value = message.optional_int64(); + + // Clear the field and make sure it shows up as cleared. + message.clear_optional_int64(); + EXPECT_FALSE(message.has_optional_int64()); + EXPECT_EQ(0, message.optional_int64()); + + // Other adjacent fields should not be cleared. + EXPECT_TRUE(message.has_optional_int32()); + EXPECT_TRUE(message.has_optional_uint32()); + + // Make sure if we set it again, then all fields are set. + message.set_optional_int64(original_value); + TestUtil::ExpectAllFieldsSet(message); +} + +TEST(GeneratedMessageTest, StringCharStarLength) { + // Verify that we can use a char*,length to set one of the string fields. + unittest::TestAllTypes message; + message.set_optional_string("abcdef", 3); + EXPECT_EQ("abc", message.optional_string()); + + // Verify that we can use a char*,length to add to a repeated string field. + message.add_repeated_string("abcdef", 3); + EXPECT_EQ(1, message.repeated_string_size()); + EXPECT_EQ("abc", message.repeated_string(0)); + + // Verify that we can use a char*,length to set a repeated string field. + message.set_repeated_string(0, "wxyz", 2); + EXPECT_EQ("wx", message.repeated_string(0)); +} + + +TEST(GeneratedMessageTest, CopyFrom) { + unittest::TestAllTypes message1, message2; + + TestUtil::SetAllFields(&message1); + message2.CopyFrom(message1); + TestUtil::ExpectAllFieldsSet(message2); + + // Copying from self should be a no-op. + message2.CopyFrom(message2); + TestUtil::ExpectAllFieldsSet(message2); +} + + +TEST(GeneratedMessageTest, SwapWithEmpty) { + unittest::TestAllTypes message1, message2; + TestUtil::SetAllFields(&message1); + + TestUtil::ExpectAllFieldsSet(message1); + TestUtil::ExpectClear(message2); + message1.Swap(&message2); + TestUtil::ExpectAllFieldsSet(message2); + TestUtil::ExpectClear(message1); +} + +TEST(GeneratedMessageTest, SwapWithSelf) { + unittest::TestAllTypes message; + TestUtil::SetAllFields(&message); + TestUtil::ExpectAllFieldsSet(message); + message.Swap(&message); + TestUtil::ExpectAllFieldsSet(message); +} + +TEST(GeneratedMessageTest, SwapWithOther) { + unittest::TestAllTypes message1, message2; + + message1.set_optional_int32(123); + message1.set_optional_string("abc"); + message1.mutable_optional_nested_message()->set_bb(1); + message1.set_optional_nested_enum(unittest::TestAllTypes::FOO); + message1.add_repeated_int32(1); + message1.add_repeated_int32(2); + message1.add_repeated_string("a"); + message1.add_repeated_string("b"); + message1.add_repeated_nested_message()->set_bb(7); + message1.add_repeated_nested_message()->set_bb(8); + message1.add_repeated_nested_enum(unittest::TestAllTypes::FOO); + message1.add_repeated_nested_enum(unittest::TestAllTypes::BAR); + + message2.set_optional_int32(456); + message2.set_optional_string("def"); + message2.mutable_optional_nested_message()->set_bb(2); + message2.set_optional_nested_enum(unittest::TestAllTypes::BAR); + message2.add_repeated_int32(3); + message2.add_repeated_string("c"); + message2.add_repeated_nested_message()->set_bb(9); + message2.add_repeated_nested_enum(unittest::TestAllTypes::BAZ); + + message1.Swap(&message2); + + EXPECT_EQ(456, message1.optional_int32()); + EXPECT_EQ("def", message1.optional_string()); + EXPECT_EQ(2, message1.optional_nested_message().bb()); + EXPECT_EQ(unittest::TestAllTypes::BAR, message1.optional_nested_enum()); + ASSERT_EQ(1, message1.repeated_int32_size()); + EXPECT_EQ(3, message1.repeated_int32(0)); + ASSERT_EQ(1, message1.repeated_string_size()); + EXPECT_EQ("c", message1.repeated_string(0)); + ASSERT_EQ(1, message1.repeated_nested_message_size()); + EXPECT_EQ(9, message1.repeated_nested_message(0).bb()); + ASSERT_EQ(1, message1.repeated_nested_enum_size()); + EXPECT_EQ(unittest::TestAllTypes::BAZ, message1.repeated_nested_enum(0)); + + EXPECT_EQ(123, message2.optional_int32()); + EXPECT_EQ("abc", message2.optional_string()); + EXPECT_EQ(1, message2.optional_nested_message().bb()); + EXPECT_EQ(unittest::TestAllTypes::FOO, message2.optional_nested_enum()); + ASSERT_EQ(2, message2.repeated_int32_size()); + EXPECT_EQ(1, message2.repeated_int32(0)); + EXPECT_EQ(2, message2.repeated_int32(1)); + ASSERT_EQ(2, message2.repeated_string_size()); + EXPECT_EQ("a", message2.repeated_string(0)); + EXPECT_EQ("b", message2.repeated_string(1)); + ASSERT_EQ(2, message2.repeated_nested_message_size()); + EXPECT_EQ(7, message2.repeated_nested_message(0).bb()); + EXPECT_EQ(8, message2.repeated_nested_message(1).bb()); + ASSERT_EQ(2, message2.repeated_nested_enum_size()); + EXPECT_EQ(unittest::TestAllTypes::FOO, message2.repeated_nested_enum(0)); + EXPECT_EQ(unittest::TestAllTypes::BAR, message2.repeated_nested_enum(1)); +} + +TEST(GeneratedMessageTest, CopyConstructor) { + unittest::TestAllTypes message1; + TestUtil::SetAllFields(&message1); + + unittest::TestAllTypes message2(message1); + TestUtil::ExpectAllFieldsSet(message2); +} + +TEST(GeneratedMessageTest, CopyAssignmentOperator) { + unittest::TestAllTypes message1; + TestUtil::SetAllFields(&message1); + + unittest::TestAllTypes message2; + message2 = message1; + TestUtil::ExpectAllFieldsSet(message2); + + // Make sure that self-assignment does something sane. + message2.operator=(message2); + TestUtil::ExpectAllFieldsSet(message2); +} + +TEST(GeneratedMessageTest, UpcastCopyFrom) { + // Test the CopyFrom method that takes in the generic const Message& + // parameter. + unittest::TestAllTypes message1, message2; + + TestUtil::SetAllFields(&message1); + + const Message* source = implicit_cast(&message1); + message2.CopyFrom(*source); + + TestUtil::ExpectAllFieldsSet(message2); +} + +#ifndef PROTOBUF_TEST_NO_DESCRIPTORS + +TEST(GeneratedMessageTest, DynamicMessageCopyFrom) { + // Test copying from a DynamicMessage, which must fall back to using + // reflection. + unittest::TestAllTypes message2; + + // Construct a new version of the dynamic message via the factory. + DynamicMessageFactory factory; + scoped_ptr message1; + message1.reset(factory.GetPrototype( + unittest::TestAllTypes::descriptor())->New()); + + TestUtil::ReflectionTester reflection_tester( + unittest::TestAllTypes::descriptor()); + reflection_tester.SetAllFieldsViaReflection(message1.get()); + + message2.CopyFrom(*message1); + + TestUtil::ExpectAllFieldsSet(message2); +} + +#endif // !PROTOBUF_TEST_NO_DESCRIPTORS + +TEST(GeneratedMessageTest, NonEmptyMergeFrom) { + // Test merging with a non-empty message. Code is a modified form + // of that found in google/protobuf/reflection_ops_unittest.cc. + unittest::TestAllTypes message1, message2; + + TestUtil::SetAllFields(&message1); + + // This field will test merging into an empty spot. + message2.set_optional_int32(message1.optional_int32()); + message1.clear_optional_int32(); + + // This tests overwriting. + message2.set_optional_string(message1.optional_string()); + message1.set_optional_string("something else"); + + // This tests concatenating. + message2.add_repeated_int32(message1.repeated_int32(1)); + int32 i = message1.repeated_int32(0); + message1.clear_repeated_int32(); + message1.add_repeated_int32(i); + + message1.MergeFrom(message2); + + TestUtil::ExpectAllFieldsSet(message1); +} + +#ifdef GTEST_HAS_DEATH_TEST + +TEST(GeneratedMessageTest, MergeFromSelf) { + unittest::TestAllTypes message; + EXPECT_DEATH(message.MergeFrom(message), "&from"); + EXPECT_DEATH(message.MergeFrom(implicit_cast(message)), + "&from"); +} + +#endif // GTEST_HAS_DEATH_TEST + +// Test the generated SerializeWithCachedSizesToArray(), +TEST(GeneratedMessageTest, SerializationToArray) { + unittest::TestAllTypes message1, message2; + string data; + TestUtil::SetAllFields(&message1); + int size = message1.ByteSize(); + data.resize(size); + uint8* start = reinterpret_cast(string_as_array(&data)); + uint8* end = message1.SerializeWithCachedSizesToArray(start); + EXPECT_EQ(size, end - start); + EXPECT_TRUE(message2.ParseFromString(data)); + TestUtil::ExpectAllFieldsSet(message2); + +} + +TEST(GeneratedMessageTest, PackedFieldsSerializationToArray) { + unittest::TestPackedTypes packed_message1, packed_message2; + string packed_data; + TestUtil::SetPackedFields(&packed_message1); + int packed_size = packed_message1.ByteSize(); + packed_data.resize(packed_size); + uint8* start = reinterpret_cast(string_as_array(&packed_data)); + uint8* end = packed_message1.SerializeWithCachedSizesToArray(start); + EXPECT_EQ(packed_size, end - start); + EXPECT_TRUE(packed_message2.ParseFromString(packed_data)); + TestUtil::ExpectPackedFieldsSet(packed_message2); +} + +// Test the generated SerializeWithCachedSizes() by forcing the buffer to write +// one byte at a time. +TEST(GeneratedMessageTest, SerializationToStream) { + unittest::TestAllTypes message1, message2; + TestUtil::SetAllFields(&message1); + int size = message1.ByteSize(); + string data; + data.resize(size); + { + // Allow the output stream to buffer only one byte at a time. + io::ArrayOutputStream array_stream(string_as_array(&data), size, 1); + io::CodedOutputStream output_stream(&array_stream); + message1.SerializeWithCachedSizes(&output_stream); + EXPECT_FALSE(output_stream.HadError()); + EXPECT_EQ(size, output_stream.ByteCount()); + } + EXPECT_TRUE(message2.ParseFromString(data)); + TestUtil::ExpectAllFieldsSet(message2); + +} + +TEST(GeneratedMessageTest, PackedFieldsSerializationToStream) { + unittest::TestPackedTypes message1, message2; + TestUtil::SetPackedFields(&message1); + int size = message1.ByteSize(); + string data; + data.resize(size); + { + // Allow the output stream to buffer only one byte at a time. + io::ArrayOutputStream array_stream(string_as_array(&data), size, 1); + io::CodedOutputStream output_stream(&array_stream); + message1.SerializeWithCachedSizes(&output_stream); + EXPECT_FALSE(output_stream.HadError()); + EXPECT_EQ(size, output_stream.ByteCount()); + } + EXPECT_TRUE(message2.ParseFromString(data)); + TestUtil::ExpectPackedFieldsSet(message2); +} + + +TEST(GeneratedMessageTest, Required) { + // Test that IsInitialized() returns false if required fields are missing. + unittest::TestRequired message; + + EXPECT_FALSE(message.IsInitialized()); + message.set_a(1); + EXPECT_FALSE(message.IsInitialized()); + message.set_b(2); + EXPECT_FALSE(message.IsInitialized()); + message.set_c(3); + EXPECT_TRUE(message.IsInitialized()); +} + +TEST(GeneratedMessageTest, RequiredForeign) { + // Test that IsInitialized() returns false if required fields in nested + // messages are missing. + unittest::TestRequiredForeign message; + + EXPECT_TRUE(message.IsInitialized()); + + message.mutable_optional_message(); + EXPECT_FALSE(message.IsInitialized()); + + message.mutable_optional_message()->set_a(1); + message.mutable_optional_message()->set_b(2); + message.mutable_optional_message()->set_c(3); + EXPECT_TRUE(message.IsInitialized()); + + message.add_repeated_message(); + EXPECT_FALSE(message.IsInitialized()); + + message.mutable_repeated_message(0)->set_a(1); + message.mutable_repeated_message(0)->set_b(2); + message.mutable_repeated_message(0)->set_c(3); + EXPECT_TRUE(message.IsInitialized()); +} + +TEST(GeneratedMessageTest, ForeignNested) { + // Test that TestAllTypes::NestedMessage can be embedded directly into + // another message. + unittest::TestForeignNested message; + + // If this compiles and runs without crashing, it must work. We have + // nothing more to test. + unittest::TestAllTypes::NestedMessage* nested = + message.mutable_foreign_nested(); + nested->set_bb(1); +} + +TEST(GeneratedMessageTest, ReallyLargeTagNumber) { + // Test that really large tag numbers don't break anything. + unittest::TestReallyLargeTagNumber message1, message2; + string data; + + // For the most part, if this compiles and runs then we're probably good. + // (The most likely cause for failure would be if something were attempting + // to allocate a lookup table of some sort using tag numbers as the index.) + // We'll try serializing just for fun. + message1.set_a(1234); + message1.set_bb(5678); + message1.SerializeToString(&data); + EXPECT_TRUE(message2.ParseFromString(data)); + EXPECT_EQ(1234, message2.a()); + EXPECT_EQ(5678, message2.bb()); +} + +TEST(GeneratedMessageTest, MutualRecursion) { + // Test that mutually-recursive message types work. + unittest::TestMutualRecursionA message; + unittest::TestMutualRecursionA* nested = message.mutable_bb()->mutable_a(); + unittest::TestMutualRecursionA* nested2 = nested->mutable_bb()->mutable_a(); + + // Again, if the above compiles and runs, that's all we really have to + // test, but just for run we'll check that the system didn't somehow come + // up with a pointer loop... + EXPECT_NE(&message, nested); + EXPECT_NE(&message, nested2); + EXPECT_NE(nested, nested2); +} + +TEST(GeneratedMessageTest, CamelCaseFieldNames) { + // This test is mainly checking that the following compiles, which verifies + // that the field names were coerced to lower-case. + // + // Protocol buffers standard style is to use lowercase-with-underscores for + // field names. Some old proto1 .protos unfortunately used camel-case field + // names. In proto1, these names were forced to lower-case. So, we do the + // same thing in proto2. + + unittest::TestCamelCaseFieldNames message; + + message.set_primitivefield(2); + message.set_stringfield("foo"); + message.set_enumfield(unittest::FOREIGN_FOO); + message.mutable_messagefield()->set_c(6); + + message.add_repeatedprimitivefield(8); + message.add_repeatedstringfield("qux"); + message.add_repeatedenumfield(unittest::FOREIGN_BAR); + message.add_repeatedmessagefield()->set_c(15); + + EXPECT_EQ(2, message.primitivefield()); + EXPECT_EQ("foo", message.stringfield()); + EXPECT_EQ(unittest::FOREIGN_FOO, message.enumfield()); + EXPECT_EQ(6, message.messagefield().c()); + + EXPECT_EQ(8, message.repeatedprimitivefield(0)); + EXPECT_EQ("qux", message.repeatedstringfield(0)); + EXPECT_EQ(unittest::FOREIGN_BAR, message.repeatedenumfield(0)); + EXPECT_EQ(15, message.repeatedmessagefield(0).c()); +} + +TEST(GeneratedMessageTest, TestConflictingSymbolNames) { + // test_bad_identifiers.proto successfully compiled, then it works. The + // following is just a token usage to insure that the code is, in fact, + // being compiled and linked. + + protobuf_unittest::TestConflictingSymbolNames message; + message.set_uint32(1); + EXPECT_EQ(3, message.ByteSize()); + + message.set_friend_(5); + EXPECT_EQ(5, message.friend_()); +} + +#ifndef PROTOBUF_TEST_NO_DESCRIPTORS + +TEST(GeneratedMessageTest, TestOptimizedForSize) { + // We rely on the tests in reflection_ops_unittest and wire_format_unittest + // to really test that reflection-based methods work. Here we are mostly + // just making sure that TestOptimizedForSize actually builds and seems to + // function. + + protobuf_unittest::TestOptimizedForSize message, message2; + message.set_i(1); + message.mutable_msg()->set_c(2); + message2.CopyFrom(message); + EXPECT_EQ(1, message2.i()); + EXPECT_EQ(2, message2.msg().c()); +} + +TEST(GeneratedMessageTest, TestEmbedOptimizedForSize) { + // Verifies that something optimized for speed can contain something optimized + // for size. + + protobuf_unittest::TestEmbedOptimizedForSize message, message2; + message.mutable_optional_message()->set_i(1); + message.add_repeated_message()->mutable_msg()->set_c(2); + string data; + message.SerializeToString(&data); + ASSERT_TRUE(message2.ParseFromString(data)); + EXPECT_EQ(1, message2.optional_message().i()); + EXPECT_EQ(2, message2.repeated_message(0).msg().c()); +} + +TEST(GeneratedMessageTest, TestSpaceUsed) { + unittest::TestAllTypes message1; + // sizeof provides a lower bound on SpaceUsed(). + EXPECT_LE(sizeof(unittest::TestAllTypes), message1.SpaceUsed()); + const int empty_message_size = message1.SpaceUsed(); + + // Setting primitive types shouldn't affect the space used. + message1.set_optional_int32(123); + message1.set_optional_int64(12345); + message1.set_optional_uint32(123); + message1.set_optional_uint64(12345); + EXPECT_EQ(empty_message_size, message1.SpaceUsed()); + + // On some STL implementations, setting the string to a small value should + // only increase SpaceUsed() by the size of a string object, though this is + // not true everywhere. + message1.set_optional_string("abc"); + EXPECT_LE(empty_message_size + sizeof(string), message1.SpaceUsed()); + + // Setting a string to a value larger than the string object itself should + // increase SpaceUsed(), because it cannot store the value internally. + message1.set_optional_string(string(sizeof(string) + 1, 'x')); + int min_expected_increase = message1.optional_string().capacity() + + sizeof(string); + EXPECT_LE(empty_message_size + min_expected_increase, + message1.SpaceUsed()); + + int previous_size = message1.SpaceUsed(); + // Adding an optional message should increase the size by the size of the + // nested message type. NestedMessage is simple enough (1 int field) that it + // is equal to sizeof(NestedMessage) + message1.mutable_optional_nested_message(); + ASSERT_EQ(sizeof(unittest::TestAllTypes::NestedMessage), + message1.optional_nested_message().SpaceUsed()); + EXPECT_EQ(previous_size + + sizeof(unittest::TestAllTypes::NestedMessage), + message1.SpaceUsed()); +} + +#endif // !PROTOBUF_TEST_NO_DESCRIPTORS + + +TEST(GeneratedMessageTest, FieldConstantValues) { + unittest::TestRequired message; + EXPECT_EQ(unittest::TestAllTypes_NestedMessage::kBbFieldNumber, 1); + EXPECT_EQ(unittest::TestAllTypes::kOptionalInt32FieldNumber, 1); + EXPECT_EQ(unittest::TestAllTypes::kOptionalgroupFieldNumber, 16); + EXPECT_EQ(unittest::TestAllTypes::kOptionalNestedMessageFieldNumber, 18); + EXPECT_EQ(unittest::TestAllTypes::kOptionalNestedEnumFieldNumber, 21); + EXPECT_EQ(unittest::TestAllTypes::kRepeatedInt32FieldNumber, 31); + EXPECT_EQ(unittest::TestAllTypes::kRepeatedgroupFieldNumber, 46); + EXPECT_EQ(unittest::TestAllTypes::kRepeatedNestedMessageFieldNumber, 48); + EXPECT_EQ(unittest::TestAllTypes::kRepeatedNestedEnumFieldNumber, 51); +} + +TEST(GeneratedMessageTest, ExtensionConstantValues) { + EXPECT_EQ(unittest::TestRequired::kSingleFieldNumber, 1000); + EXPECT_EQ(unittest::TestRequired::kMultiFieldNumber, 1001); + EXPECT_EQ(unittest::kOptionalInt32ExtensionFieldNumber, 1); + EXPECT_EQ(unittest::kOptionalgroupExtensionFieldNumber, 16); + EXPECT_EQ(unittest::kOptionalNestedMessageExtensionFieldNumber, 18); + EXPECT_EQ(unittest::kOptionalNestedEnumExtensionFieldNumber, 21); + EXPECT_EQ(unittest::kRepeatedInt32ExtensionFieldNumber, 31); + EXPECT_EQ(unittest::kRepeatedgroupExtensionFieldNumber, 46); + EXPECT_EQ(unittest::kRepeatedNestedMessageExtensionFieldNumber, 48); + EXPECT_EQ(unittest::kRepeatedNestedEnumExtensionFieldNumber, 51); +} + +// =================================================================== + +TEST(GeneratedEnumTest, EnumValuesAsSwitchCases) { + // Test that our nested enum values can be used as switch cases. This test + // doesn't actually do anything, the proof that it works is that it + // compiles. + int i =0; + unittest::TestAllTypes::NestedEnum a = unittest::TestAllTypes::BAR; + switch (a) { + case unittest::TestAllTypes::FOO: + i = 1; + break; + case unittest::TestAllTypes::BAR: + i = 2; + break; + case unittest::TestAllTypes::BAZ: + i = 3; + break; + // no default case: We want to make sure the compiler recognizes that + // all cases are covered. (GCC warns if you do not cover all cases of + // an enum in a switch.) + } + + // Token check just for fun. + EXPECT_EQ(2, i); +} + +TEST(GeneratedEnumTest, IsValidValue) { + // Test enum IsValidValue. + EXPECT_TRUE(unittest::TestAllTypes::NestedEnum_IsValid(1)); + EXPECT_TRUE(unittest::TestAllTypes::NestedEnum_IsValid(2)); + EXPECT_TRUE(unittest::TestAllTypes::NestedEnum_IsValid(3)); + + EXPECT_FALSE(unittest::TestAllTypes::NestedEnum_IsValid(0)); + EXPECT_FALSE(unittest::TestAllTypes::NestedEnum_IsValid(4)); + + // Make sure it also works when there are dups. + EXPECT_TRUE(unittest::TestEnumWithDupValue_IsValid(1)); + EXPECT_TRUE(unittest::TestEnumWithDupValue_IsValid(2)); + EXPECT_TRUE(unittest::TestEnumWithDupValue_IsValid(3)); + + EXPECT_FALSE(unittest::TestEnumWithDupValue_IsValid(0)); + EXPECT_FALSE(unittest::TestEnumWithDupValue_IsValid(4)); +} + +TEST(GeneratedEnumTest, MinAndMax) { + EXPECT_EQ(unittest::TestAllTypes::FOO, + unittest::TestAllTypes::NestedEnum_MIN); + EXPECT_EQ(unittest::TestAllTypes::BAZ, + unittest::TestAllTypes::NestedEnum_MAX); + EXPECT_EQ(4, unittest::TestAllTypes::NestedEnum_ARRAYSIZE); + + EXPECT_EQ(unittest::FOREIGN_FOO, unittest::ForeignEnum_MIN); + EXPECT_EQ(unittest::FOREIGN_BAZ, unittest::ForeignEnum_MAX); + EXPECT_EQ(7, unittest::ForeignEnum_ARRAYSIZE); + + EXPECT_EQ(1, unittest::TestEnumWithDupValue_MIN); + EXPECT_EQ(3, unittest::TestEnumWithDupValue_MAX); + EXPECT_EQ(4, unittest::TestEnumWithDupValue_ARRAYSIZE); + + EXPECT_EQ(unittest::SPARSE_E, unittest::TestSparseEnum_MIN); + EXPECT_EQ(unittest::SPARSE_C, unittest::TestSparseEnum_MAX); + EXPECT_EQ(12589235, unittest::TestSparseEnum_ARRAYSIZE); + + // Make sure we can take the address of _MIN, _MAX and _ARRAYSIZE. + void* null_pointer = 0; // NULL may be integer-type, not pointer-type. + EXPECT_NE(null_pointer, &unittest::TestAllTypes::NestedEnum_MIN); + EXPECT_NE(null_pointer, &unittest::TestAllTypes::NestedEnum_MAX); + EXPECT_NE(null_pointer, &unittest::TestAllTypes::NestedEnum_ARRAYSIZE); + + EXPECT_NE(null_pointer, &unittest::ForeignEnum_MIN); + EXPECT_NE(null_pointer, &unittest::ForeignEnum_MAX); + EXPECT_NE(null_pointer, &unittest::ForeignEnum_ARRAYSIZE); + + // Make sure we can use _MIN, _MAX and _ARRAYSIZE as switch cases. + switch (unittest::SPARSE_A) { + case unittest::TestSparseEnum_MIN: + case unittest::TestSparseEnum_MAX: + case unittest::TestSparseEnum_ARRAYSIZE: + break; + default: + break; + } +} + +#ifndef PROTOBUF_TEST_NO_DESCRIPTORS + +TEST(GeneratedEnumTest, Name) { + // "Names" in the presence of dup values are a bit arbitrary. + EXPECT_EQ("FOO1", unittest::TestEnumWithDupValue_Name(unittest::FOO1)); + EXPECT_EQ("FOO1", unittest::TestEnumWithDupValue_Name(unittest::FOO2)); + + EXPECT_EQ("SPARSE_A", unittest::TestSparseEnum_Name(unittest::SPARSE_A)); + EXPECT_EQ("SPARSE_B", unittest::TestSparseEnum_Name(unittest::SPARSE_B)); + EXPECT_EQ("SPARSE_C", unittest::TestSparseEnum_Name(unittest::SPARSE_C)); + EXPECT_EQ("SPARSE_D", unittest::TestSparseEnum_Name(unittest::SPARSE_D)); + EXPECT_EQ("SPARSE_E", unittest::TestSparseEnum_Name(unittest::SPARSE_E)); + EXPECT_EQ("SPARSE_F", unittest::TestSparseEnum_Name(unittest::SPARSE_F)); + EXPECT_EQ("SPARSE_G", unittest::TestSparseEnum_Name(unittest::SPARSE_G)); +} + +TEST(GeneratedEnumTest, Parse) { + unittest::TestEnumWithDupValue dup_value = unittest::FOO1; + EXPECT_TRUE(unittest::TestEnumWithDupValue_Parse("FOO1", &dup_value)); + EXPECT_EQ(unittest::FOO1, dup_value); + EXPECT_TRUE(unittest::TestEnumWithDupValue_Parse("FOO2", &dup_value)); + EXPECT_EQ(unittest::FOO2, dup_value); + EXPECT_FALSE(unittest::TestEnumWithDupValue_Parse("FOO", &dup_value)); +} + +TEST(GeneratedEnumTest, GetEnumDescriptor) { + EXPECT_EQ(unittest::TestAllTypes::NestedEnum_descriptor(), + GetEnumDescriptor()); + EXPECT_EQ(unittest::ForeignEnum_descriptor(), + GetEnumDescriptor()); + EXPECT_EQ(unittest::TestEnumWithDupValue_descriptor(), + GetEnumDescriptor()); + EXPECT_EQ(unittest::TestSparseEnum_descriptor(), + GetEnumDescriptor()); +} + +#endif // PROTOBUF_TEST_NO_DESCRIPTORS + +// =================================================================== + +#ifndef PROTOBUF_TEST_NO_DESCRIPTORS + +// Support code for testing services. +class GeneratedServiceTest : public testing::Test { + protected: + class MockTestService : public unittest::TestService { + public: + MockTestService() + : called_(false), + method_(""), + controller_(NULL), + request_(NULL), + response_(NULL), + done_(NULL) {} + + ~MockTestService() {} + + void Reset() { called_ = false; } + + // implements TestService ---------------------------------------- + + void Foo(RpcController* controller, + const unittest::FooRequest* request, + unittest::FooResponse* response, + Closure* done) { + ASSERT_FALSE(called_); + called_ = true; + method_ = "Foo"; + controller_ = controller; + request_ = request; + response_ = response; + done_ = done; + } + + void Bar(RpcController* controller, + const unittest::BarRequest* request, + unittest::BarResponse* response, + Closure* done) { + ASSERT_FALSE(called_); + called_ = true; + method_ = "Bar"; + controller_ = controller; + request_ = request; + response_ = response; + done_ = done; + } + + // --------------------------------------------------------------- + + bool called_; + string method_; + RpcController* controller_; + const Message* request_; + Message* response_; + Closure* done_; + }; + + class MockRpcChannel : public RpcChannel { + public: + MockRpcChannel() + : called_(false), + method_(NULL), + controller_(NULL), + request_(NULL), + response_(NULL), + done_(NULL), + destroyed_(NULL) {} + + ~MockRpcChannel() { + if (destroyed_ != NULL) *destroyed_ = true; + } + + void Reset() { called_ = false; } + + // implements TestService ---------------------------------------- + + void CallMethod(const MethodDescriptor* method, + RpcController* controller, + const Message* request, + Message* response, + Closure* done) { + ASSERT_FALSE(called_); + called_ = true; + method_ = method; + controller_ = controller; + request_ = request; + response_ = response; + done_ = done; + } + + // --------------------------------------------------------------- + + bool called_; + const MethodDescriptor* method_; + RpcController* controller_; + const Message* request_; + Message* response_; + Closure* done_; + bool* destroyed_; + }; + + class MockController : public RpcController { + public: + void Reset() { + ADD_FAILURE() << "Reset() not expected during this test."; + } + bool Failed() const { + ADD_FAILURE() << "Failed() not expected during this test."; + return false; + } + string ErrorText() const { + ADD_FAILURE() << "ErrorText() not expected during this test."; + return ""; + } + void StartCancel() { + ADD_FAILURE() << "StartCancel() not expected during this test."; + } + void SetFailed(const string& reason) { + ADD_FAILURE() << "SetFailed() not expected during this test."; + } + bool IsCanceled() const { + ADD_FAILURE() << "IsCanceled() not expected during this test."; + return false; + } + void NotifyOnCancel(Closure* callback) { + ADD_FAILURE() << "NotifyOnCancel() not expected during this test."; + } + }; + + GeneratedServiceTest() + : descriptor_(unittest::TestService::descriptor()), + foo_(descriptor_->FindMethodByName("Foo")), + bar_(descriptor_->FindMethodByName("Bar")), + stub_(&mock_channel_), + done_(NewPermanentCallback(&DoNothing)) {} + + virtual void SetUp() { + ASSERT_TRUE(foo_ != NULL); + ASSERT_TRUE(bar_ != NULL); + } + + const ServiceDescriptor* descriptor_; + const MethodDescriptor* foo_; + const MethodDescriptor* bar_; + + MockTestService mock_service_; + MockController mock_controller_; + + MockRpcChannel mock_channel_; + unittest::TestService::Stub stub_; + + // Just so we don't have to re-define these with every test. + unittest::FooRequest foo_request_; + unittest::FooResponse foo_response_; + unittest::BarRequest bar_request_; + unittest::BarResponse bar_response_; + scoped_ptr done_; +}; + +TEST_F(GeneratedServiceTest, GetDescriptor) { + // Test that GetDescriptor() works. + + EXPECT_EQ(descriptor_, mock_service_.GetDescriptor()); +} + +TEST_F(GeneratedServiceTest, GetChannel) { + EXPECT_EQ(&mock_channel_, stub_.channel()); +} + +TEST_F(GeneratedServiceTest, OwnsChannel) { + MockRpcChannel* channel = new MockRpcChannel; + bool destroyed = false; + channel->destroyed_ = &destroyed; + + { + unittest::TestService::Stub owning_stub(channel, + Service::STUB_OWNS_CHANNEL); + EXPECT_FALSE(destroyed); + } + + EXPECT_TRUE(destroyed); +} + +TEST_F(GeneratedServiceTest, CallMethod) { + // Test that CallMethod() works. + + // Call Foo() via CallMethod(). + mock_service_.CallMethod(foo_, &mock_controller_, + &foo_request_, &foo_response_, done_.get()); + + ASSERT_TRUE(mock_service_.called_); + + EXPECT_EQ("Foo" , mock_service_.method_ ); + EXPECT_EQ(&mock_controller_, mock_service_.controller_); + EXPECT_EQ(&foo_request_ , mock_service_.request_ ); + EXPECT_EQ(&foo_response_ , mock_service_.response_ ); + EXPECT_EQ(done_.get() , mock_service_.done_ ); + + // Try again, but call Bar() instead. + mock_service_.Reset(); + mock_service_.CallMethod(bar_, &mock_controller_, + &bar_request_, &bar_response_, done_.get()); + + ASSERT_TRUE(mock_service_.called_); + EXPECT_EQ("Bar", mock_service_.method_); +} + +TEST_F(GeneratedServiceTest, CallMethodTypeFailure) { + // Verify death if we call Foo() with Bar's message types. + +#ifdef GTEST_HAS_DEATH_TEST // death tests do not work on Windows yet + EXPECT_DEBUG_DEATH( + mock_service_.CallMethod(foo_, &mock_controller_, + &foo_request_, &bar_response_, done_.get()), + "dynamic_cast"); + + mock_service_.Reset(); + EXPECT_DEBUG_DEATH( + mock_service_.CallMethod(foo_, &mock_controller_, + &bar_request_, &foo_response_, done_.get()), + "dynamic_cast"); +#endif // GTEST_HAS_DEATH_TEST +} + +TEST_F(GeneratedServiceTest, GetPrototypes) { + // Test Get{Request,Response}Prototype() methods. + + EXPECT_EQ(&unittest::FooRequest::default_instance(), + &mock_service_.GetRequestPrototype(foo_)); + EXPECT_EQ(&unittest::BarRequest::default_instance(), + &mock_service_.GetRequestPrototype(bar_)); + + EXPECT_EQ(&unittest::FooResponse::default_instance(), + &mock_service_.GetResponsePrototype(foo_)); + EXPECT_EQ(&unittest::BarResponse::default_instance(), + &mock_service_.GetResponsePrototype(bar_)); +} + +TEST_F(GeneratedServiceTest, Stub) { + // Test that the stub class works. + + // Call Foo() via the stub. + stub_.Foo(&mock_controller_, &foo_request_, &foo_response_, done_.get()); + + ASSERT_TRUE(mock_channel_.called_); + + EXPECT_EQ(foo_ , mock_channel_.method_ ); + EXPECT_EQ(&mock_controller_, mock_channel_.controller_); + EXPECT_EQ(&foo_request_ , mock_channel_.request_ ); + EXPECT_EQ(&foo_response_ , mock_channel_.response_ ); + EXPECT_EQ(done_.get() , mock_channel_.done_ ); + + // Call Bar() via the stub. + mock_channel_.Reset(); + stub_.Bar(&mock_controller_, &bar_request_, &bar_response_, done_.get()); + + ASSERT_TRUE(mock_channel_.called_); + EXPECT_EQ(bar_, mock_channel_.method_); +} + +TEST_F(GeneratedServiceTest, NotImplemented) { + // Test that failing to implement a method of a service causes it to fail + // with a "not implemented" error message. + + // A service which doesn't implement any methods. + class UnimplementedService : public unittest::TestService { + public: + UnimplementedService() {} + }; + + UnimplementedService unimplemented_service; + + // And a controller which expects to get a "not implemented" error. + class ExpectUnimplementedController : public MockController { + public: + ExpectUnimplementedController() : called_(false) {} + + void SetFailed(const string& reason) { + EXPECT_FALSE(called_); + called_ = true; + EXPECT_EQ("Method Foo() not implemented.", reason); + } + + bool called_; + }; + + ExpectUnimplementedController controller; + + // Call Foo. + unimplemented_service.Foo(&controller, &foo_request_, &foo_response_, + done_.get()); + + EXPECT_TRUE(controller.called_); +} + +} // namespace cpp_unittest +} // namespace cpp +} // namespace compiler + +namespace no_generic_services_test { + // Verify that no class called "TestService" was defined in + // unittest_no_generic_services.pb.h by defining a different type by the same + // name. If such a service was generated, this will not compile. + struct TestService { + int i; + }; +} + +namespace compiler { +namespace cpp { +namespace cpp_unittest { + +TEST_F(GeneratedServiceTest, NoGenericServices) { + // Verify that non-services in unittest_no_generic_services.proto were + // generated. + no_generic_services_test::TestMessage message; + message.set_a(1); + message.SetExtension(no_generic_services_test::test_extension, 123); + no_generic_services_test::TestEnum e = no_generic_services_test::FOO; + EXPECT_EQ(e, 1); + + // Verify that a ServiceDescriptor is generated for the service even if the + // class itself is not. + const FileDescriptor* file = + no_generic_services_test::TestMessage::descriptor()->file(); + + ASSERT_EQ(1, file->service_count()); + EXPECT_EQ("TestService", file->service(0)->name()); + ASSERT_EQ(1, file->service(0)->method_count()); + EXPECT_EQ("Foo", file->service(0)->method(0)->name()); +} + +#endif // !PROTOBUF_TEST_NO_DESCRIPTORS + +// =================================================================== + +// This test must run last. It verifies that descriptors were or were not +// initialized depending on whether PROTOBUF_TEST_NO_DESCRIPTORS was defined. +// When this is defined, we skip all tests which are expected to trigger +// descriptor initialization. This verifies that everything else still works +// if descriptors are not initialized. +TEST(DescriptorInitializationTest, Initialized) { +#ifdef PROTOBUF_TEST_NO_DESCRIPTORS + bool should_have_descriptors = false; +#else + bool should_have_descriptors = true; +#endif + + EXPECT_EQ(should_have_descriptors, + DescriptorPool::generated_pool()->InternalIsFileLoaded( + "google/protobuf/unittest.proto")); +} + +} // namespace cpp_unittest + +} // namespace cpp +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/importer.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/importer.cc new file mode 100644 index 0000000000..422f759f2b --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/importer.cc @@ -0,0 +1,459 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#ifdef _MSC_VER +#include +#else +#include +#endif +#include +#include +#include +#include + +#include + +#include + +#include +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { + +#ifdef _WIN32 +#ifndef F_OK +#define F_OK 00 // not defined by MSVC for whatever reason +#endif +#include +#endif + +// Returns true if the text looks like a Windows-style absolute path, starting +// with a drive letter. Example: "C:\foo". TODO(kenton): Share this with +// copy in command_line_interface.cc? +static bool IsWindowsAbsolutePath(const string& text) { +#if defined(_WIN32) || defined(__CYGWIN__) + return text.size() >= 3 && text[1] == ':' && + isalpha(text[0]) && + (text[2] == '/' || text[2] == '\\') && + text.find_last_of(':') == 1; +#else + return false; +#endif +} + +MultiFileErrorCollector::~MultiFileErrorCollector() {} + +// This class serves two purposes: +// - It implements the ErrorCollector interface (used by Tokenizer and Parser) +// in terms of MultiFileErrorCollector, using a particular filename. +// - It lets us check if any errors have occurred. +class SourceTreeDescriptorDatabase::SingleFileErrorCollector + : public io::ErrorCollector { + public: + SingleFileErrorCollector(const string& filename, + MultiFileErrorCollector* multi_file_error_collector) + : filename_(filename), + multi_file_error_collector_(multi_file_error_collector), + had_errors_(false) {} + ~SingleFileErrorCollector() {} + + bool had_errors() { return had_errors_; } + + // implements ErrorCollector --------------------------------------- + void AddError(int line, int column, const string& message) { + if (multi_file_error_collector_ != NULL) { + multi_file_error_collector_->AddError(filename_, line, column, message); + } + had_errors_ = true; + } + + private: + string filename_; + MultiFileErrorCollector* multi_file_error_collector_; + bool had_errors_; +}; + +// =================================================================== + +SourceTreeDescriptorDatabase::SourceTreeDescriptorDatabase( + SourceTree* source_tree) + : source_tree_(source_tree), + error_collector_(NULL), + using_validation_error_collector_(false), + validation_error_collector_(this) {} + +SourceTreeDescriptorDatabase::~SourceTreeDescriptorDatabase() {} + +bool SourceTreeDescriptorDatabase::FindFileByName( + const string& filename, FileDescriptorProto* output) { + scoped_ptr input(source_tree_->Open(filename)); + if (input == NULL) { + if (error_collector_ != NULL) { + error_collector_->AddError(filename, -1, 0, "File not found."); + } + return false; + } + + // Set up the tokenizer and parser. + SingleFileErrorCollector file_error_collector(filename, error_collector_); + io::Tokenizer tokenizer(input.get(), &file_error_collector); + + Parser parser; + if (error_collector_ != NULL) { + parser.RecordErrorsTo(&file_error_collector); + } + if (using_validation_error_collector_) { + parser.RecordSourceLocationsTo(&source_locations_); + } + + // Parse it. + output->set_name(filename); + return parser.Parse(&tokenizer, output) && + !file_error_collector.had_errors(); +} + +bool SourceTreeDescriptorDatabase::FindFileContainingSymbol( + const string& symbol_name, FileDescriptorProto* output) { + return false; +} + +bool SourceTreeDescriptorDatabase::FindFileContainingExtension( + const string& containing_type, int field_number, + FileDescriptorProto* output) { + return false; +} + +// ------------------------------------------------------------------- + +SourceTreeDescriptorDatabase::ValidationErrorCollector:: +ValidationErrorCollector(SourceTreeDescriptorDatabase* owner) + : owner_(owner) {} + +SourceTreeDescriptorDatabase::ValidationErrorCollector:: +~ValidationErrorCollector() {} + +void SourceTreeDescriptorDatabase::ValidationErrorCollector::AddError( + const string& filename, + const string& element_name, + const Message* descriptor, + ErrorLocation location, + const string& message) { + if (owner_->error_collector_ == NULL) return; + + int line, column; + owner_->source_locations_.Find(descriptor, location, &line, &column); + owner_->error_collector_->AddError(filename, line, column, message); +} + +// =================================================================== + +Importer::Importer(SourceTree* source_tree, + MultiFileErrorCollector* error_collector) + : database_(source_tree), + pool_(&database_, database_.GetValidationErrorCollector()) { + database_.RecordErrorsTo(error_collector); +} + +Importer::~Importer() {} + +const FileDescriptor* Importer::Import(const string& filename) { + return pool_.FindFileByName(filename); +} + +// =================================================================== + +SourceTree::~SourceTree() {} + +DiskSourceTree::DiskSourceTree() {} + +DiskSourceTree::~DiskSourceTree() {} + +static inline char LastChar(const string& str) { + return str[str.size() - 1]; +} + +// Given a path, returns an equivalent path with these changes: +// - On Windows, any backslashes are replaced with forward slashes. +// - Any instances of the directory "." are removed. +// - Any consecutive '/'s are collapsed into a single slash. +// Note that the resulting string may be empty. +// +// TODO(kenton): It would be nice to handle "..", e.g. so that we can figure +// out that "foo/bar.proto" is inside "baz/../foo". However, if baz is a +// symlink or doesn't exist, then things get complicated, and we can't +// actually determine this without investigating the filesystem, probably +// in non-portable ways. So, we punt. +// +// TODO(kenton): It would be nice to use realpath() here except that it +// resolves symbolic links. This could cause problems if people place +// symbolic links in their source tree. For example, if you executed: +// protoc --proto_path=foo foo/bar/baz.proto +// then if foo/bar is a symbolic link, foo/bar/baz.proto will canonicalize +// to a path which does not appear to be under foo, and thus the compiler +// will complain that baz.proto is not inside the --proto_path. +static string CanonicalizePath(string path) { +#ifdef _WIN32 + // The Win32 API accepts forward slashes as a path delimiter even though + // backslashes are standard. Let's avoid confusion and use only forward + // slashes. + if (HasPrefixString(path, "\\\\")) { + // Avoid converting two leading backslashes. + path = "\\\\" + StringReplace(path.substr(2), "\\", "/", true); + } else { + path = StringReplace(path, "\\", "/", true); + } +#endif + + vector parts; + vector canonical_parts; + SplitStringUsing(path, "/", &parts); // Note: Removes empty parts. + for (int i = 0; i < parts.size(); i++) { + if (parts[i] == ".") { + // Ignore. + } else { + canonical_parts.push_back(parts[i]); + } + } + string result = JoinStrings(canonical_parts, "/"); + if (!path.empty() && path[0] == '/') { + // Restore leading slash. + result = '/' + result; + } + if (!path.empty() && LastChar(path) == '/' && + !result.empty() && LastChar(result) != '/') { + // Restore trailing slash. + result += '/'; + } + return result; +} + +static inline bool ContainsParentReference(const string& path) { + return path == ".." || + HasPrefixString(path, "../") || + HasSuffixString(path, "/..") || + path.find("/../") != string::npos; +} + +// Maps a file from an old location to a new one. Typically, old_prefix is +// a virtual path and new_prefix is its corresponding disk path. Returns +// false if the filename did not start with old_prefix, otherwise replaces +// old_prefix with new_prefix and stores the result in *result. Examples: +// string result; +// assert(ApplyMapping("foo/bar", "", "baz", &result)); +// assert(result == "baz/foo/bar"); +// +// assert(ApplyMapping("foo/bar", "foo", "baz", &result)); +// assert(result == "baz/bar"); +// +// assert(ApplyMapping("foo", "foo", "bar", &result)); +// assert(result == "bar"); +// +// assert(!ApplyMapping("foo/bar", "baz", "qux", &result)); +// assert(!ApplyMapping("foo/bar", "baz", "qux", &result)); +// assert(!ApplyMapping("foobar", "foo", "baz", &result)); +static bool ApplyMapping(const string& filename, + const string& old_prefix, + const string& new_prefix, + string* result) { + if (old_prefix.empty()) { + // old_prefix matches any relative path. + if (ContainsParentReference(filename)) { + // We do not allow the file name to use "..". + return false; + } + if (HasPrefixString(filename, "/") || + IsWindowsAbsolutePath(filename)) { + // This is an absolute path, so it isn't matched by the empty string. + return false; + } + result->assign(new_prefix); + if (!result->empty()) result->push_back('/'); + result->append(filename); + return true; + } else if (HasPrefixString(filename, old_prefix)) { + // old_prefix is a prefix of the filename. Is it the whole filename? + if (filename.size() == old_prefix.size()) { + // Yep, it's an exact match. + *result = new_prefix; + return true; + } else { + // Not an exact match. Is the next character a '/'? Otherwise, + // this isn't actually a match at all. E.g. the prefix "foo/bar" + // does not match the filename "foo/barbaz". + int after_prefix_start = -1; + if (filename[old_prefix.size()] == '/') { + after_prefix_start = old_prefix.size() + 1; + } else if (filename[old_prefix.size() - 1] == '/') { + // old_prefix is never empty, and canonicalized paths never have + // consecutive '/' characters. + after_prefix_start = old_prefix.size(); + } + if (after_prefix_start != -1) { + // Yep. So the prefixes are directories and the filename is a file + // inside them. + string after_prefix = filename.substr(after_prefix_start); + if (ContainsParentReference(after_prefix)) { + // We do not allow the file name to use "..". + return false; + } + result->assign(new_prefix); + if (!result->empty()) result->push_back('/'); + result->append(after_prefix); + return true; + } + } + } + + return false; +} + +void DiskSourceTree::MapPath(const string& virtual_path, + const string& disk_path) { + mappings_.push_back(Mapping(virtual_path, CanonicalizePath(disk_path))); +} + +DiskSourceTree::DiskFileToVirtualFileResult +DiskSourceTree::DiskFileToVirtualFile( + const string& disk_file, + string* virtual_file, + string* shadowing_disk_file) { + int mapping_index = -1; + string canonical_disk_file = CanonicalizePath(disk_file); + + for (int i = 0; i < mappings_.size(); i++) { + // Apply the mapping in reverse. + if (ApplyMapping(canonical_disk_file, mappings_[i].disk_path, + mappings_[i].virtual_path, virtual_file)) { + // Success. + mapping_index = i; + break; + } + } + + if (mapping_index == -1) { + return NO_MAPPING; + } + + // Iterate through all mappings with higher precedence and verify that none + // of them map this file to some other existing file. + for (int i = 0; i < mapping_index; i++) { + if (ApplyMapping(*virtual_file, mappings_[i].virtual_path, + mappings_[i].disk_path, shadowing_disk_file)) { + if (access(shadowing_disk_file->c_str(), F_OK) >= 0) { + // File exists. + return SHADOWED; + } + } + } + shadowing_disk_file->clear(); + + // Verify that we can open the file. Note that this also has the side-effect + // of verifying that we are not canonicalizing away any non-existent + // directories. + scoped_ptr stream(OpenDiskFile(disk_file)); + if (stream == NULL) { + return CANNOT_OPEN; + } + + return SUCCESS; +} + +bool DiskSourceTree::VirtualFileToDiskFile(const string& virtual_file, + string* disk_file) { + scoped_ptr stream(OpenVirtualFile(virtual_file, + disk_file)); + return stream != NULL; +} + +io::ZeroCopyInputStream* DiskSourceTree::Open(const string& filename) { + return OpenVirtualFile(filename, NULL); +} + +io::ZeroCopyInputStream* DiskSourceTree::OpenVirtualFile( + const string& virtual_file, + string* disk_file) { + if (virtual_file != CanonicalizePath(virtual_file) || + ContainsParentReference(virtual_file)) { + // We do not allow importing of paths containing things like ".." or + // consecutive slashes since the compiler expects files to be uniquely + // identified by file name. + return NULL; + } + + for (int i = 0; i < mappings_.size(); i++) { + string temp_disk_file; + if (ApplyMapping(virtual_file, mappings_[i].virtual_path, + mappings_[i].disk_path, &temp_disk_file)) { + io::ZeroCopyInputStream* stream = OpenDiskFile(temp_disk_file); + if (stream != NULL) { + if (disk_file != NULL) { + *disk_file = temp_disk_file; + } + return stream; + } + + if (errno == EACCES) { + // The file exists but is not readable. + // TODO(kenton): Find a way to report this more nicely. + GOOGLE_LOG(WARNING) << "Read access is denied for file: " << temp_disk_file; + return NULL; + } + } + } + + return NULL; +} + +io::ZeroCopyInputStream* DiskSourceTree::OpenDiskFile( + const string& filename) { + int file_descriptor; + do { + file_descriptor = open(filename.c_str(), O_RDONLY); + } while (file_descriptor < 0 && errno == EINTR); + if (file_descriptor >= 0) { + io::FileInputStream* result = new io::FileInputStream(file_descriptor); + result->SetCloseOnDelete(true); + return result; + } else { + return NULL; + } +} + +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/importer.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/importer.h new file mode 100644 index 0000000000..7a2efc2976 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/importer.h @@ -0,0 +1,303 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// This file is the public interface to the .proto file parser. + +#ifndef GOOGLE_PROTOBUF_COMPILER_IMPORTER_H__ +#define GOOGLE_PROTOBUF_COMPILER_IMPORTER_H__ + +#include +#include +#include +#include +#include +#include +#include + +namespace google { +namespace protobuf { + +namespace io { class ZeroCopyInputStream; } + +namespace compiler { + +// Defined in this file. +class Importer; +class MultiFileErrorCollector; +class SourceTree; +class DiskSourceTree; + +// TODO(kenton): Move all SourceTree stuff to a separate file? + +// An implementation of DescriptorDatabase which loads files from a SourceTree +// and parses them. +// +// Note: This class is not thread-safe since it maintains a table of source +// code locations for error reporting. However, when a DescriptorPool wraps +// a DescriptorDatabase, it uses mutex locking to make sure only one method +// of the database is called at a time, even if the DescriptorPool is used +// from multiple threads. Therefore, there is only a problem if you create +// multiple DescriptorPools wrapping the same SourceTreeDescriptorDatabase +// and use them from multiple threads. +// +// Note: This class does not implement FindFileContainingSymbol() or +// FindFileContainingExtension(); these will always return false. +class LIBPROTOBUF_EXPORT SourceTreeDescriptorDatabase : public DescriptorDatabase { + public: + SourceTreeDescriptorDatabase(SourceTree* source_tree); + ~SourceTreeDescriptorDatabase(); + + // Instructs the SourceTreeDescriptorDatabase to report any parse errors + // to the given MultiFileErrorCollector. This should be called before + // parsing. error_collector must remain valid until either this method + // is called again or the SourceTreeDescriptorDatabase is destroyed. + void RecordErrorsTo(MultiFileErrorCollector* error_collector) { + error_collector_ = error_collector; + } + + // Gets a DescriptorPool::ErrorCollector which records errors to the + // MultiFileErrorCollector specified with RecordErrorsTo(). This collector + // has the ability to determine exact line and column numbers of errors + // from the information given to it by the DescriptorPool. + DescriptorPool::ErrorCollector* GetValidationErrorCollector() { + using_validation_error_collector_ = true; + return &validation_error_collector_; + } + + // implements DescriptorDatabase ----------------------------------- + bool FindFileByName(const string& filename, FileDescriptorProto* output); + bool FindFileContainingSymbol(const string& symbol_name, + FileDescriptorProto* output); + bool FindFileContainingExtension(const string& containing_type, + int field_number, + FileDescriptorProto* output); + + private: + class SingleFileErrorCollector; + + SourceTree* source_tree_; + MultiFileErrorCollector* error_collector_; + + class LIBPROTOBUF_EXPORT ValidationErrorCollector : public DescriptorPool::ErrorCollector { + public: + ValidationErrorCollector(SourceTreeDescriptorDatabase* owner); + ~ValidationErrorCollector(); + + // implements ErrorCollector --------------------------------------- + void AddError(const string& filename, + const string& element_name, + const Message* descriptor, + ErrorLocation location, + const string& message); + + private: + SourceTreeDescriptorDatabase* owner_; + }; + friend class ValidationErrorCollector; + + bool using_validation_error_collector_; + SourceLocationTable source_locations_; + ValidationErrorCollector validation_error_collector_; +}; + +// Simple interface for parsing .proto files. This wraps the process +// of opening the file, parsing it with a Parser, recursively parsing all its +// imports, and then cross-linking the results to produce a FileDescriptor. +// +// This is really just a thin wrapper around SourceTreeDescriptorDatabase. +// You may find that SourceTreeDescriptorDatabase is more flexible. +// +// TODO(kenton): I feel like this class is not well-named. +class LIBPROTOBUF_EXPORT Importer { + public: + Importer(SourceTree* source_tree, + MultiFileErrorCollector* error_collector); + ~Importer(); + + // Import the given file and build a FileDescriptor representing it. If + // the file is already in the DescriptorPool, the existing FileDescriptor + // will be returned. The FileDescriptor is property of the DescriptorPool, + // and will remain valid until it is destroyed. If any errors occur, they + // will be reported using the error collector and Import() will return NULL. + // + // A particular Importer object will only report errors for a particular + // file once. All future attempts to import the same file will return NULL + // without reporting any errors. The idea is that you might want to import + // a lot of files without seeing the same errors over and over again. If + // you want to see errors for the same files repeatedly, you can use a + // separate Importer object to import each one (but use the same + // DescriptorPool so that they can be cross-linked). + const FileDescriptor* Import(const string& filename); + + // The DescriptorPool in which all imported FileDescriptors and their + // contents are stored. + inline const DescriptorPool* pool() const { + return &pool_; + } + + private: + SourceTreeDescriptorDatabase database_; + DescriptorPool pool_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(Importer); +}; + +// If the importer encounters problems while trying to import the proto files, +// it reports them to a MultiFileErrorCollector. +class LIBPROTOBUF_EXPORT MultiFileErrorCollector { + public: + inline MultiFileErrorCollector() {} + virtual ~MultiFileErrorCollector(); + + // Line and column numbers are zero-based. A line number of -1 indicates + // an error with the entire file (e.g. "not found"). + virtual void AddError(const string& filename, int line, int column, + const string& message) = 0; + + private: + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(MultiFileErrorCollector); +}; + +// Abstract interface which represents a directory tree containing proto files. +// Used by the default implementation of Importer to resolve import statements +// Most users will probably want to use the DiskSourceTree implementation, +// below. +class LIBPROTOBUF_EXPORT SourceTree { + public: + inline SourceTree() {} + virtual ~SourceTree(); + + // Open the given file and return a stream that reads it, or NULL if not + // found. The caller takes ownership of the returned object. The filename + // must be a path relative to the root of the source tree and must not + // contain "." or ".." components. + virtual io::ZeroCopyInputStream* Open(const string& filename) = 0; + + private: + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(SourceTree); +}; + +// An implementation of SourceTree which loads files from locations on disk. +// Multiple mappings can be set up to map locations in the DiskSourceTree to +// locations in the physical filesystem. +class LIBPROTOBUF_EXPORT DiskSourceTree : public SourceTree { + public: + DiskSourceTree(); + ~DiskSourceTree(); + + // Map a path on disk to a location in the SourceTree. The path may be + // either a file or a directory. If it is a directory, the entire tree + // under it will be mapped to the given virtual location. To map a directory + // to the root of the source tree, pass an empty string for virtual_path. + // + // If multiple mapped paths apply when opening a file, they will be searched + // in order. For example, if you do: + // MapPath("bar", "foo/bar"); + // MapPath("", "baz"); + // and then you do: + // Open("bar/qux"); + // the DiskSourceTree will first try to open foo/bar/qux, then baz/bar/qux, + // returning the first one that opens successfuly. + // + // disk_path may be an absolute path or relative to the current directory, + // just like a path you'd pass to open(). + void MapPath(const string& virtual_path, const string& disk_path); + + // Return type for DiskFileToVirtualFile(). + enum DiskFileToVirtualFileResult { + SUCCESS, + SHADOWED, + CANNOT_OPEN, + NO_MAPPING + }; + + // Given a path to a file on disk, find a virtual path mapping to that + // file. The first mapping created with MapPath() whose disk_path contains + // the filename is used. However, that virtual path may not actually be + // usable to open the given file. Possible return values are: + // * SUCCESS: The mapping was found. *virtual_file is filled in so that + // calling Open(*virtual_file) will open the file named by disk_file. + // * SHADOWED: A mapping was found, but using Open() to open this virtual + // path will end up returning some different file. This is because some + // other mapping with a higher precedence also matches this virtual path + // and maps it to a different file that exists on disk. *virtual_file + // is filled in as it would be in the SUCCESS case. *shadowing_disk_file + // is filled in with the disk path of the file which would be opened if + // you were to call Open(*virtual_file). + // * CANNOT_OPEN: The mapping was found and was not shadowed, but the + // file specified cannot be opened. When this value is returned, + // errno will indicate the reason the file cannot be opened. *virtual_file + // will be set to the virtual path as in the SUCCESS case, even though + // it is not useful. + // * NO_MAPPING: Indicates that no mapping was found which contains this + // file. + DiskFileToVirtualFileResult + DiskFileToVirtualFile(const string& disk_file, + string* virtual_file, + string* shadowing_disk_file); + + // Given a virtual path, find the path to the file on disk. + // Return true and update disk_file with the on-disk path if the file exists. + // Return false and leave disk_file untouched if the file doesn't exist. + bool VirtualFileToDiskFile(const string& virtual_file, string* disk_file); + + // implements SourceTree ------------------------------------------- + io::ZeroCopyInputStream* Open(const string& filename); + + private: + struct Mapping { + string virtual_path; + string disk_path; + + inline Mapping(const string& virtual_path, const string& disk_path) + : virtual_path(virtual_path), disk_path(disk_path) {} + }; + vector mappings_; + + // Like Open(), but returns the on-disk path in disk_file if disk_file is + // non-NULL and the file could be successfully opened. + io::ZeroCopyInputStream* OpenVirtualFile(const string& virtual_file, + string* disk_file); + + // Like Open() but given the actual on-disk path. + io::ZeroCopyInputStream* OpenDiskFile(const string& filename); + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(DiskSourceTree); +}; + +} // namespace compiler +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_COMPILER_IMPORTER_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/importer_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/importer_unittest.cc new file mode 100644 index 0000000000..56fad56ed3 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/importer_unittest.cc @@ -0,0 +1,600 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include + +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { + +namespace { + +#define EXPECT_SUBSTRING(needle, haystack) \ + EXPECT_PRED_FORMAT2(testing::IsSubstring, (needle), (haystack)) + +class MockErrorCollector : public MultiFileErrorCollector { + public: + MockErrorCollector() {} + ~MockErrorCollector() {} + + string text_; + + // implements ErrorCollector --------------------------------------- + void AddError(const string& filename, int line, int column, + const string& message) { + strings::SubstituteAndAppend(&text_, "$0:$1:$2: $3\n", + filename, line, column, message); + } +}; + +// ------------------------------------------------------------------- + +// A dummy implementation of SourceTree backed by a simple map. +class MockSourceTree : public SourceTree { + public: + MockSourceTree() {} + ~MockSourceTree() {} + + void AddFile(const string& name, const char* contents) { + files_[name] = contents; + } + + // implements SourceTree ------------------------------------------- + io::ZeroCopyInputStream* Open(const string& filename) { + const char* contents = FindPtrOrNull(files_, filename); + if (contents == NULL) { + return NULL; + } else { + return new io::ArrayInputStream(contents, strlen(contents)); + } + } + + private: + hash_map files_; +}; + +// =================================================================== + +class ImporterTest : public testing::Test { + protected: + ImporterTest() + : importer_(&source_tree_, &error_collector_) {} + + void AddFile(const string& filename, const char* text) { + source_tree_.AddFile(filename, text); + } + + // Return the collected error text + string error() const { return error_collector_.text_; } + + MockErrorCollector error_collector_; + MockSourceTree source_tree_; + Importer importer_; +}; + +TEST_F(ImporterTest, Import) { + // Test normal importing. + AddFile("foo.proto", + "syntax = \"proto2\";\n" + "message Foo {}\n"); + + const FileDescriptor* file = importer_.Import("foo.proto"); + EXPECT_EQ("", error_collector_.text_); + ASSERT_TRUE(file != NULL); + + ASSERT_EQ(1, file->message_type_count()); + EXPECT_EQ("Foo", file->message_type(0)->name()); + + // Importing again should return same object. + EXPECT_EQ(file, importer_.Import("foo.proto")); +} + +TEST_F(ImporterTest, ImportNested) { + // Test that importing a file which imports another file works. + AddFile("foo.proto", + "syntax = \"proto2\";\n" + "import \"bar.proto\";\n" + "message Foo {\n" + " optional Bar bar = 1;\n" + "}\n"); + AddFile("bar.proto", + "syntax = \"proto2\";\n" + "message Bar {}\n"); + + // Note that both files are actually parsed by the first call to Import() + // here, since foo.proto imports bar.proto. The second call just returns + // the same ProtoFile for bar.proto which was constructed while importing + // foo.proto. We test that this is the case below by checking that bar + // is among foo's dependencies (by pointer). + const FileDescriptor* foo = importer_.Import("foo.proto"); + const FileDescriptor* bar = importer_.Import("bar.proto"); + EXPECT_EQ("", error_collector_.text_); + ASSERT_TRUE(foo != NULL); + ASSERT_TRUE(bar != NULL); + + // Check that foo's dependency is the same object as bar. + ASSERT_EQ(1, foo->dependency_count()); + EXPECT_EQ(bar, foo->dependency(0)); + + // Check that foo properly cross-links bar. + ASSERT_EQ(1, foo->message_type_count()); + ASSERT_EQ(1, bar->message_type_count()); + ASSERT_EQ(1, foo->message_type(0)->field_count()); + ASSERT_EQ(FieldDescriptor::TYPE_MESSAGE, + foo->message_type(0)->field(0)->type()); + EXPECT_EQ(bar->message_type(0), + foo->message_type(0)->field(0)->message_type()); +} + +TEST_F(ImporterTest, FileNotFound) { + // Error: Parsing a file that doesn't exist. + EXPECT_TRUE(importer_.Import("foo.proto") == NULL); + EXPECT_EQ( + "foo.proto:-1:0: File not found.\n", + error_collector_.text_); +} + +TEST_F(ImporterTest, ImportNotFound) { + // Error: Importing a file that doesn't exist. + AddFile("foo.proto", + "syntax = \"proto2\";\n" + "import \"bar.proto\";\n"); + + EXPECT_TRUE(importer_.Import("foo.proto") == NULL); + EXPECT_EQ( + "bar.proto:-1:0: File not found.\n" + "foo.proto:-1:0: Import \"bar.proto\" was not found or had errors.\n", + error_collector_.text_); +} + +TEST_F(ImporterTest, RecursiveImport) { + // Error: Recursive import. + AddFile("recursive1.proto", + "syntax = \"proto2\";\n" + "import \"recursive2.proto\";\n"); + AddFile("recursive2.proto", + "syntax = \"proto2\";\n" + "import \"recursive1.proto\";\n"); + + EXPECT_TRUE(importer_.Import("recursive1.proto") == NULL); + EXPECT_EQ( + "recursive1.proto:-1:0: File recursively imports itself: recursive1.proto " + "-> recursive2.proto -> recursive1.proto\n" + "recursive2.proto:-1:0: Import \"recursive1.proto\" was not found " + "or had errors.\n" + "recursive1.proto:-1:0: Import \"recursive2.proto\" was not found " + "or had errors.\n", + error_collector_.text_); +} + +// TODO(sanjay): The MapField tests below more properly belong in +// descriptor_unittest, but are more convenient to test here. +TEST_F(ImporterTest, MapFieldValid) { + AddFile( + "map.proto", + "syntax = \"proto2\";\n" + "message Item {\n" + " required string key = 1;\n" + "}\n" + "message Map {\n" + " repeated Item items = 1 [experimental_map_key = \"key\"];\n" + "}\n" + ); + const FileDescriptor* file = importer_.Import("map.proto"); + ASSERT_TRUE(file != NULL) << error_collector_.text_; + EXPECT_EQ("", error_collector_.text_); + + // Check that Map::items points to Item::key + const Descriptor* item_type = file->FindMessageTypeByName("Item"); + ASSERT_TRUE(item_type != NULL); + const Descriptor* map_type = file->FindMessageTypeByName("Map"); + ASSERT_TRUE(map_type != NULL); + const FieldDescriptor* key_field = item_type->FindFieldByName("key"); + ASSERT_TRUE(key_field != NULL); + const FieldDescriptor* items_field = map_type->FindFieldByName("items"); + ASSERT_TRUE(items_field != NULL); + EXPECT_EQ(items_field->experimental_map_key(), key_field); +} + +TEST_F(ImporterTest, MapFieldNotRepeated) { + AddFile( + "map.proto", + "syntax = \"proto2\";\n" + "message Item {\n" + " required string key = 1;\n" + "}\n" + "message Map {\n" + " required Item items = 1 [experimental_map_key = \"key\"];\n" + "}\n" + ); + EXPECT_TRUE(importer_.Import("map.proto") == NULL); + EXPECT_SUBSTRING("only allowed for repeated fields", error()); +} + +TEST_F(ImporterTest, MapFieldNotMessageType) { + AddFile( + "map.proto", + "syntax = \"proto2\";\n" + "message Map {\n" + " repeated int32 items = 1 [experimental_map_key = \"key\"];\n" + "}\n" + ); + EXPECT_TRUE(importer_.Import("map.proto") == NULL); + EXPECT_SUBSTRING("only allowed for fields with a message type", error()); +} + +TEST_F(ImporterTest, MapFieldTypeNotFound) { + AddFile( + "map.proto", + "syntax = \"proto2\";\n" + "message Map {\n" + " repeated Unknown items = 1 [experimental_map_key = \"key\"];\n" + "}\n" + ); + EXPECT_TRUE(importer_.Import("map.proto") == NULL); + EXPECT_SUBSTRING("not defined", error()); +} + +TEST_F(ImporterTest, MapFieldKeyNotFound) { + AddFile( + "map.proto", + "syntax = \"proto2\";\n" + "message Item {\n" + " required string key = 1;\n" + "}\n" + "message Map {\n" + " repeated Item items = 1 [experimental_map_key = \"badkey\"];\n" + "}\n" + ); + EXPECT_TRUE(importer_.Import("map.proto") == NULL); + EXPECT_SUBSTRING("Could not find field", error()); +} + +TEST_F(ImporterTest, MapFieldKeyRepeated) { + AddFile( + "map.proto", + "syntax = \"proto2\";\n" + "message Item {\n" + " repeated string key = 1;\n" + "}\n" + "message Map {\n" + " repeated Item items = 1 [experimental_map_key = \"key\"];\n" + "}\n" + ); + EXPECT_TRUE(importer_.Import("map.proto") == NULL); + EXPECT_SUBSTRING("must not name a repeated field", error()); +} + +TEST_F(ImporterTest, MapFieldKeyNotScalar) { + AddFile( + "map.proto", + "syntax = \"proto2\";\n" + "message ItemKey { }\n" + "message Item {\n" + " required ItemKey key = 1;\n" + "}\n" + "message Map {\n" + " repeated Item items = 1 [experimental_map_key = \"key\"];\n" + "}\n" + ); + EXPECT_TRUE(importer_.Import("map.proto") == NULL); + EXPECT_SUBSTRING("must name a scalar or string", error()); +} + +// =================================================================== + +class DiskSourceTreeTest : public testing::Test { + protected: + virtual void SetUp() { + dirnames_.push_back(TestTempDir() + "/test_proto2_import_path_1"); + dirnames_.push_back(TestTempDir() + "/test_proto2_import_path_2"); + + for (int i = 0; i < dirnames_.size(); i++) { + if (File::Exists(dirnames_[i])) { + File::DeleteRecursively(dirnames_[i], NULL, NULL); + } + GOOGLE_CHECK(File::CreateDir(dirnames_[i].c_str(), DEFAULT_FILE_MODE)); + } + } + + virtual void TearDown() { + for (int i = 0; i < dirnames_.size(); i++) { + File::DeleteRecursively(dirnames_[i], NULL, NULL); + } + } + + void AddFile(const string& filename, const char* contents) { + File::WriteStringToFileOrDie(contents, filename); + } + + void AddSubdir(const string& dirname) { + GOOGLE_CHECK(File::CreateDir(dirname.c_str(), DEFAULT_FILE_MODE)); + } + + void ExpectFileContents(const string& filename, + const char* expected_contents) { + scoped_ptr input(source_tree_.Open(filename)); + + ASSERT_FALSE(input == NULL); + + // Read all the data from the file. + string file_contents; + const void* data; + int size; + while (input->Next(&data, &size)) { + file_contents.append(reinterpret_cast(data), size); + } + + EXPECT_EQ(expected_contents, file_contents); + } + + void ExpectFileNotFound(const string& filename) { + scoped_ptr input(source_tree_.Open(filename)); + EXPECT_TRUE(input == NULL); + } + + DiskSourceTree source_tree_; + + // Paths of two on-disk directories to use during the test. + vector dirnames_; +}; + +TEST_F(DiskSourceTreeTest, MapRoot) { + // Test opening a file in a directory that is mapped to the root of the + // source tree. + AddFile(dirnames_[0] + "/foo", "Hello World!"); + source_tree_.MapPath("", dirnames_[0]); + + ExpectFileContents("foo", "Hello World!"); + ExpectFileNotFound("bar"); +} + +TEST_F(DiskSourceTreeTest, MapDirectory) { + // Test opening a file in a directory that is mapped to somewhere other + // than the root of the source tree. + + AddFile(dirnames_[0] + "/foo", "Hello World!"); + source_tree_.MapPath("baz", dirnames_[0]); + + ExpectFileContents("baz/foo", "Hello World!"); + ExpectFileNotFound("baz/bar"); + ExpectFileNotFound("foo"); + ExpectFileNotFound("bar"); + + // Non-canonical file names should not work. + ExpectFileNotFound("baz//foo"); + ExpectFileNotFound("baz/../baz/foo"); + ExpectFileNotFound("baz/./foo"); + ExpectFileNotFound("baz/foo/"); +} + +TEST_F(DiskSourceTreeTest, NoParent) { + // Test that we cannot open files in a parent of a mapped directory. + + AddFile(dirnames_[0] + "/foo", "Hello World!"); + AddSubdir(dirnames_[0] + "/bar"); + AddFile(dirnames_[0] + "/bar/baz", "Blah."); + source_tree_.MapPath("", dirnames_[0] + "/bar"); + + ExpectFileContents("baz", "Blah."); + ExpectFileNotFound("../foo"); + ExpectFileNotFound("../bar/baz"); +} + +TEST_F(DiskSourceTreeTest, MapFile) { + // Test opening a file that is mapped directly into the source tree. + + AddFile(dirnames_[0] + "/foo", "Hello World!"); + source_tree_.MapPath("foo", dirnames_[0] + "/foo"); + + ExpectFileContents("foo", "Hello World!"); + ExpectFileNotFound("bar"); +} + +TEST_F(DiskSourceTreeTest, SearchMultipleDirectories) { + // Test mapping and searching multiple directories. + + AddFile(dirnames_[0] + "/foo", "Hello World!"); + AddFile(dirnames_[1] + "/foo", "This file should be hidden."); + AddFile(dirnames_[1] + "/bar", "Goodbye World!"); + source_tree_.MapPath("", dirnames_[0]); + source_tree_.MapPath("", dirnames_[1]); + + ExpectFileContents("foo", "Hello World!"); + ExpectFileContents("bar", "Goodbye World!"); + ExpectFileNotFound("baz"); +} + +TEST_F(DiskSourceTreeTest, OrderingTrumpsSpecificity) { + // Test that directories are always searched in order, even when a latter + // directory is more-specific than a former one. + + // Create the "bar" directory so we can put a file in it. + ASSERT_TRUE(File::CreateDir((dirnames_[0] + "/bar").c_str(), + DEFAULT_FILE_MODE)); + + // Add files and map paths. + AddFile(dirnames_[0] + "/bar/foo", "Hello World!"); + AddFile(dirnames_[1] + "/foo", "This file should be hidden."); + source_tree_.MapPath("", dirnames_[0]); + source_tree_.MapPath("bar", dirnames_[1]); + + // Check. + ExpectFileContents("bar/foo", "Hello World!"); +} + +TEST_F(DiskSourceTreeTest, DiskFileToVirtualFile) { + // Test DiskFileToVirtualFile. + + AddFile(dirnames_[0] + "/foo", "Hello World!"); + AddFile(dirnames_[1] + "/foo", "This file should be hidden."); + source_tree_.MapPath("bar", dirnames_[0]); + source_tree_.MapPath("bar", dirnames_[1]); + + string virtual_file; + string shadowing_disk_file; + + EXPECT_EQ(DiskSourceTree::NO_MAPPING, + source_tree_.DiskFileToVirtualFile( + "/foo", &virtual_file, &shadowing_disk_file)); + + EXPECT_EQ(DiskSourceTree::SHADOWED, + source_tree_.DiskFileToVirtualFile( + dirnames_[1] + "/foo", &virtual_file, &shadowing_disk_file)); + EXPECT_EQ("bar/foo", virtual_file); + EXPECT_EQ(dirnames_[0] + "/foo", shadowing_disk_file); + + EXPECT_EQ(DiskSourceTree::CANNOT_OPEN, + source_tree_.DiskFileToVirtualFile( + dirnames_[1] + "/baz", &virtual_file, &shadowing_disk_file)); + EXPECT_EQ("bar/baz", virtual_file); + + EXPECT_EQ(DiskSourceTree::SUCCESS, + source_tree_.DiskFileToVirtualFile( + dirnames_[0] + "/foo", &virtual_file, &shadowing_disk_file)); + EXPECT_EQ("bar/foo", virtual_file); +} + +TEST_F(DiskSourceTreeTest, DiskFileToVirtualFileCanonicalization) { + // Test handling of "..", ".", etc. in DiskFileToVirtualFile(). + + source_tree_.MapPath("dir1", ".."); + source_tree_.MapPath("dir2", "../../foo"); + source_tree_.MapPath("dir3", "./foo/bar/."); + source_tree_.MapPath("dir4", "."); + source_tree_.MapPath("", "/qux"); + source_tree_.MapPath("dir5", "/quux/"); + + string virtual_file; + string shadowing_disk_file; + + // "../.." should not be considered to be under "..". + EXPECT_EQ(DiskSourceTree::NO_MAPPING, + source_tree_.DiskFileToVirtualFile( + "../../baz", &virtual_file, &shadowing_disk_file)); + + // "/foo" is not mapped (it should not be misintepreted as being under "."). + EXPECT_EQ(DiskSourceTree::NO_MAPPING, + source_tree_.DiskFileToVirtualFile( + "/foo", &virtual_file, &shadowing_disk_file)); + +#ifdef WIN32 + // "C:\foo" is not mapped (it should not be misintepreted as being under "."). + EXPECT_EQ(DiskSourceTree::NO_MAPPING, + source_tree_.DiskFileToVirtualFile( + "C:\\foo", &virtual_file, &shadowing_disk_file)); +#endif // WIN32 + + // But "../baz" should be. + EXPECT_EQ(DiskSourceTree::CANNOT_OPEN, + source_tree_.DiskFileToVirtualFile( + "../baz", &virtual_file, &shadowing_disk_file)); + EXPECT_EQ("dir1/baz", virtual_file); + + // "../../foo/baz" is under "../../foo". + EXPECT_EQ(DiskSourceTree::CANNOT_OPEN, + source_tree_.DiskFileToVirtualFile( + "../../foo/baz", &virtual_file, &shadowing_disk_file)); + EXPECT_EQ("dir2/baz", virtual_file); + + // "foo/./bar/baz" is under "./foo/bar/.". + EXPECT_EQ(DiskSourceTree::CANNOT_OPEN, + source_tree_.DiskFileToVirtualFile( + "foo/bar/baz", &virtual_file, &shadowing_disk_file)); + EXPECT_EQ("dir3/baz", virtual_file); + + // "bar" is under ".". + EXPECT_EQ(DiskSourceTree::CANNOT_OPEN, + source_tree_.DiskFileToVirtualFile( + "bar", &virtual_file, &shadowing_disk_file)); + EXPECT_EQ("dir4/bar", virtual_file); + + // "/qux/baz" is under "/qux". + EXPECT_EQ(DiskSourceTree::CANNOT_OPEN, + source_tree_.DiskFileToVirtualFile( + "/qux/baz", &virtual_file, &shadowing_disk_file)); + EXPECT_EQ("baz", virtual_file); + + // "/quux/bar" is under "/quux". + EXPECT_EQ(DiskSourceTree::CANNOT_OPEN, + source_tree_.DiskFileToVirtualFile( + "/quux/bar", &virtual_file, &shadowing_disk_file)); + EXPECT_EQ("dir5/bar", virtual_file); +} + +TEST_F(DiskSourceTreeTest, VirtualFileToDiskFile) { + // Test VirtualFileToDiskFile. + + AddFile(dirnames_[0] + "/foo", "Hello World!"); + AddFile(dirnames_[1] + "/foo", "This file should be hidden."); + AddFile(dirnames_[1] + "/quux", "This file should not be hidden."); + source_tree_.MapPath("bar", dirnames_[0]); + source_tree_.MapPath("bar", dirnames_[1]); + + // Existent files, shadowed and non-shadowed case. + string disk_file; + EXPECT_TRUE(source_tree_.VirtualFileToDiskFile("bar/foo", &disk_file)); + EXPECT_EQ(dirnames_[0] + "/foo", disk_file); + EXPECT_TRUE(source_tree_.VirtualFileToDiskFile("bar/quux", &disk_file)); + EXPECT_EQ(dirnames_[1] + "/quux", disk_file); + + // Nonexistent file in existent directory and vice versa. + string not_touched = "not touched"; + EXPECT_FALSE(source_tree_.VirtualFileToDiskFile("bar/baz", ¬_touched)); + EXPECT_EQ("not touched", not_touched); + EXPECT_FALSE(source_tree_.VirtualFileToDiskFile("baz/foo", ¬_touched)); + EXPECT_EQ("not touched", not_touched); + + // Accept NULL as output parameter. + EXPECT_TRUE(source_tree_.VirtualFileToDiskFile("bar/foo", NULL)); + EXPECT_FALSE(source_tree_.VirtualFileToDiskFile("baz/foo", NULL)); +} + +} // namespace + +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_enum.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_enum.cc new file mode 100644 index 0000000000..9d7bcab64f --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_enum.cc @@ -0,0 +1,243 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include + +#include +#include +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace java { + +EnumGenerator::EnumGenerator(const EnumDescriptor* descriptor) + : descriptor_(descriptor) { + for (int i = 0; i < descriptor_->value_count(); i++) { + const EnumValueDescriptor* value = descriptor_->value(i); + const EnumValueDescriptor* canonical_value = + descriptor_->FindValueByNumber(value->number()); + + if (value == canonical_value) { + canonical_values_.push_back(value); + } else { + Alias alias; + alias.value = value; + alias.canonical_value = canonical_value; + aliases_.push_back(alias); + } + } +} + +EnumGenerator::~EnumGenerator() {} + +void EnumGenerator::Generate(io::Printer* printer) { + if (HasDescriptorMethods(descriptor_)) { + printer->Print( + "public enum $classname$\n" + " implements com.google.protobuf.ProtocolMessageEnum {\n", + "classname", descriptor_->name()); + } else { + printer->Print( + "public enum $classname$\n" + " implements com.google.protobuf.Internal.EnumLite {\n", + "classname", descriptor_->name()); + } + printer->Indent(); + + for (int i = 0; i < canonical_values_.size(); i++) { + map vars; + vars["name"] = canonical_values_[i]->name(); + vars["index"] = SimpleItoa(canonical_values_[i]->index()); + vars["number"] = SimpleItoa(canonical_values_[i]->number()); + printer->Print(vars, + "$name$($index$, $number$),\n"); + } + + printer->Print( + ";\n" + "\n"); + + // ----------------------------------------------------------------- + + for (int i = 0; i < aliases_.size(); i++) { + map vars; + vars["classname"] = descriptor_->name(); + vars["name"] = aliases_[i].value->name(); + vars["canonical_name"] = aliases_[i].canonical_value->name(); + printer->Print(vars, + "public static final $classname$ $name$ = $canonical_name$;\n"); + } + + for (int i = 0; i < descriptor_->value_count(); i++) { + map vars; + vars["name"] = descriptor_->value(i)->name(); + vars["number"] = SimpleItoa(descriptor_->value(i)->number()); + printer->Print(vars, + "public static final int $name$_VALUE = $number$;\n"); + } + printer->Print("\n"); + + // ----------------------------------------------------------------- + + printer->Print( + "\n" + "public final int getNumber() { return value; }\n" + "\n" + "public static $classname$ valueOf(int value) {\n" + " switch (value) {\n", + "classname", descriptor_->name()); + printer->Indent(); + printer->Indent(); + + for (int i = 0; i < canonical_values_.size(); i++) { + printer->Print( + "case $number$: return $name$;\n", + "name", canonical_values_[i]->name(), + "number", SimpleItoa(canonical_values_[i]->number())); + } + + printer->Outdent(); + printer->Outdent(); + printer->Print( + " default: return null;\n" + " }\n" + "}\n" + "\n" + "public static com.google.protobuf.Internal.EnumLiteMap<$classname$>\n" + " internalGetValueMap() {\n" + " return internalValueMap;\n" + "}\n" + "private static com.google.protobuf.Internal.EnumLiteMap<$classname$>\n" + " internalValueMap =\n" + " new com.google.protobuf.Internal.EnumLiteMap<$classname$>() {\n" + " public $classname$ findValueByNumber(int number) {\n" + " return $classname$.valueOf(number);\n" + " }\n" + " };\n" + "\n", + "classname", descriptor_->name()); + + // ----------------------------------------------------------------- + // Reflection + + if (HasDescriptorMethods(descriptor_)) { + printer->Print( + "public final com.google.protobuf.Descriptors.EnumValueDescriptor\n" + " getValueDescriptor() {\n" + " return getDescriptor().getValues().get(index);\n" + "}\n" + "public final com.google.protobuf.Descriptors.EnumDescriptor\n" + " getDescriptorForType() {\n" + " return getDescriptor();\n" + "}\n" + "public static final com.google.protobuf.Descriptors.EnumDescriptor\n" + " getDescriptor() {\n"); + + // TODO(kenton): Cache statically? Note that we can't access descriptors + // at module init time because it wouldn't work with descriptor.proto, but + // we can cache the value the first time getDescriptor() is called. + if (descriptor_->containing_type() == NULL) { + printer->Print( + " return $file$.getDescriptor().getEnumTypes().get($index$);\n", + "file", ClassName(descriptor_->file()), + "index", SimpleItoa(descriptor_->index())); + } else { + printer->Print( + " return $parent$.getDescriptor().getEnumTypes().get($index$);\n", + "parent", ClassName(descriptor_->containing_type()), + "index", SimpleItoa(descriptor_->index())); + } + + printer->Print( + "}\n" + "\n" + "private static final $classname$[] VALUES = {\n" + " ", + "classname", descriptor_->name()); + + for (int i = 0; i < descriptor_->value_count(); i++) { + printer->Print("$name$, ", + "name", descriptor_->value(i)->name()); + } + + printer->Print( + "\n" + "};\n" + "\n" + "public static $classname$ valueOf(\n" + " com.google.protobuf.Descriptors.EnumValueDescriptor desc) {\n" + " if (desc.getType() != getDescriptor()) {\n" + " throw new java.lang.IllegalArgumentException(\n" + " \"EnumValueDescriptor is not for this type.\");\n" + " }\n" + " return VALUES[desc.getIndex()];\n" + "}\n" + "\n", + "classname", descriptor_->name()); + + // index is only used for reflection; lite implementation does not need it + printer->Print("private final int index;\n"); + } + + // ----------------------------------------------------------------- + + printer->Print( + "private final int value;\n\n" + "private $classname$(int index, int value) {\n", + "classname", descriptor_->name()); + if (HasDescriptorMethods(descriptor_)) { + printer->Print(" this.index = index;\n"); + } + printer->Print( + " this.value = value;\n" + "}\n"); + + printer->Print( + "\n" + "// @@protoc_insertion_point(enum_scope:$full_name$)\n", + "full_name", descriptor_->full_name()); + + printer->Outdent(); + printer->Print("}\n\n"); +} + +} // namespace java +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_enum.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_enum.h new file mode 100644 index 0000000000..05ece1f13b --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_enum.h @@ -0,0 +1,84 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#ifndef GOOGLE_PROTOBUF_COMPILER_JAVA_ENUM_H__ +#define GOOGLE_PROTOBUF_COMPILER_JAVA_ENUM_H__ + +#include +#include +#include + +namespace google { +namespace protobuf { + namespace io { + class Printer; // printer.h + } +} + +namespace protobuf { +namespace compiler { +namespace java { + +class EnumGenerator { + public: + explicit EnumGenerator(const EnumDescriptor* descriptor); + ~EnumGenerator(); + + void Generate(io::Printer* printer); + + private: + const EnumDescriptor* descriptor_; + + // The proto language allows multiple enum constants to have the same numeric + // value. Java, however, does not allow multiple enum constants to be + // considered equivalent. We treat the first defined constant for any + // given numeric value as "canonical" and the rest as aliases of that + // canonical value. + vector canonical_values_; + + struct Alias { + const EnumValueDescriptor* value; + const EnumValueDescriptor* canonical_value; + }; + vector aliases_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(EnumGenerator); +}; + +} // namespace java +} // namespace compiler +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_COMPILER_JAVA_ENUM_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_enum_field.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_enum_field.cc new file mode 100644 index 0000000000..72caa10b5d --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_enum_field.cc @@ -0,0 +1,537 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include + +#include +#include +#include +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace java { + +namespace { + +// TODO(kenton): Factor out a "SetCommonFieldVariables()" to get rid of +// repeat code between this and the other field types. +void SetEnumVariables(const FieldDescriptor* descriptor, + int messageBitIndex, + int builderBitIndex, + map* variables) { + (*variables)["name"] = + UnderscoresToCamelCase(descriptor); + (*variables)["capitalized_name"] = + UnderscoresToCapitalizedCamelCase(descriptor); + (*variables)["constant_name"] = FieldConstantName(descriptor); + (*variables)["number"] = SimpleItoa(descriptor->number()); + (*variables)["type"] = ClassName(descriptor->enum_type()); + (*variables)["default"] = DefaultValue(descriptor); + (*variables)["tag"] = SimpleItoa(internal::WireFormat::MakeTag(descriptor)); + (*variables)["tag_size"] = SimpleItoa( + internal::WireFormat::TagSize(descriptor->number(), GetType(descriptor))); + // TODO(birdo): Add @deprecated javadoc when generating javadoc is supported + // by the proto compiler + (*variables)["deprecation"] = descriptor->options().deprecated() + ? "@java.lang.Deprecated " : ""; + (*variables)["on_changed"] = + HasDescriptorMethods(descriptor->containing_type()) ? "onChanged();" : ""; + + // For singular messages and builders, one bit is used for the hasField bit. + (*variables)["get_has_field_bit_message"] = GenerateGetBit(messageBitIndex); + + (*variables)["get_has_field_bit_builder"] = GenerateGetBit(builderBitIndex); + (*variables)["set_has_field_bit_builder"] = GenerateSetBit(builderBitIndex); + (*variables)["clear_has_field_bit_builder"] = + GenerateClearBit(builderBitIndex); + + // For repated builders, one bit is used for whether the array is immutable. + (*variables)["get_mutable_bit_builder"] = GenerateGetBit(builderBitIndex); + (*variables)["set_mutable_bit_builder"] = GenerateSetBit(builderBitIndex); + (*variables)["clear_mutable_bit_builder"] = GenerateClearBit(builderBitIndex); + + (*variables)["get_has_field_bit_from_local"] = + GenerateGetBitFromLocal(builderBitIndex); + (*variables)["set_has_field_bit_to_local"] = + GenerateSetBitToLocal(messageBitIndex); +} + +} // namespace + +// =================================================================== + +EnumFieldGenerator:: +EnumFieldGenerator(const FieldDescriptor* descriptor, + int messageBitIndex, + int builderBitIndex) + : descriptor_(descriptor), messageBitIndex_(messageBitIndex), + builderBitIndex_(builderBitIndex) { + SetEnumVariables(descriptor, messageBitIndex, builderBitIndex, &variables_); +} + +EnumFieldGenerator::~EnumFieldGenerator() {} + +int EnumFieldGenerator::GetNumBitsForMessage() const { + return 1; +} + +int EnumFieldGenerator::GetNumBitsForBuilder() const { + return 1; +} + +void EnumFieldGenerator:: +GenerateInterfaceMembers(io::Printer* printer) const { + printer->Print(variables_, + "$deprecation$boolean has$capitalized_name$();\n" + "$deprecation$$type$ get$capitalized_name$();\n"); +} + +void EnumFieldGenerator:: +GenerateMembers(io::Printer* printer) const { + printer->Print(variables_, + "private $type$ $name$_;\n" + "$deprecation$public boolean has$capitalized_name$() {\n" + " return $get_has_field_bit_message$;\n" + "}\n" + "$deprecation$public $type$ get$capitalized_name$() {\n" + " return $name$_;\n" + "}\n"); +} + +void EnumFieldGenerator:: +GenerateBuilderMembers(io::Printer* printer) const { + printer->Print(variables_, + "private $type$ $name$_ = $default$;\n" + "$deprecation$public boolean has$capitalized_name$() {\n" + " return $get_has_field_bit_builder$;\n" + "}\n" + "$deprecation$public $type$ get$capitalized_name$() {\n" + " return $name$_;\n" + "}\n" + "$deprecation$public Builder set$capitalized_name$($type$ value) {\n" + " if (value == null) {\n" + " throw new NullPointerException();\n" + " }\n" + " $set_has_field_bit_builder$;\n" + " $name$_ = value;\n" + " $on_changed$\n" + " return this;\n" + "}\n" + "$deprecation$public Builder clear$capitalized_name$() {\n" + " $clear_has_field_bit_builder$;\n" + " $name$_ = $default$;\n" + " $on_changed$\n" + " return this;\n" + "}\n"); +} + +void EnumFieldGenerator:: +GenerateFieldBuilderInitializationCode(io::Printer* printer) const { + // noop for enums +} + +void EnumFieldGenerator:: +GenerateInitializationCode(io::Printer* printer) const { + printer->Print(variables_, "$name$_ = $default$;\n"); +} + +void EnumFieldGenerator:: +GenerateBuilderClearCode(io::Printer* printer) const { + printer->Print(variables_, + "$name$_ = $default$;\n" + "$clear_has_field_bit_builder$;\n"); +} + +void EnumFieldGenerator:: +GenerateMergingCode(io::Printer* printer) const { + printer->Print(variables_, + "if (other.has$capitalized_name$()) {\n" + " set$capitalized_name$(other.get$capitalized_name$());\n" + "}\n"); +} + +void EnumFieldGenerator:: +GenerateBuildingCode(io::Printer* printer) const { + printer->Print(variables_, + "if ($get_has_field_bit_from_local$) {\n" + " $set_has_field_bit_to_local$;\n" + "}\n" + "result.$name$_ = $name$_;\n"); +} + +void EnumFieldGenerator:: +GenerateParsingCode(io::Printer* printer) const { + printer->Print(variables_, + "int rawValue = input.readEnum();\n" + "$type$ value = $type$.valueOf(rawValue);\n"); + if (HasUnknownFields(descriptor_->containing_type())) { + printer->Print(variables_, + "if (value == null) {\n" + " unknownFields.mergeVarintField($number$, rawValue);\n" + "} else {\n"); + } else { + printer->Print(variables_, + "if (value != null) {\n"); + } + printer->Print(variables_, + " $set_has_field_bit_builder$;\n" + " $name$_ = value;\n" + "}\n"); +} + +void EnumFieldGenerator:: +GenerateSerializationCode(io::Printer* printer) const { + printer->Print(variables_, + "if ($get_has_field_bit_message$) {\n" + " output.writeEnum($number$, $name$_.getNumber());\n" + "}\n"); +} + +void EnumFieldGenerator:: +GenerateSerializedSizeCode(io::Printer* printer) const { + printer->Print(variables_, + "if ($get_has_field_bit_message$) {\n" + " size += com.google.protobuf.CodedOutputStream\n" + " .computeEnumSize($number$, $name$_.getNumber());\n" + "}\n"); +} + +void EnumFieldGenerator:: +GenerateEqualsCode(io::Printer* printer) const { + printer->Print(variables_, + "result = result &&\n" + " (get$capitalized_name$() == other.get$capitalized_name$());\n"); +} + +void EnumFieldGenerator:: +GenerateHashCode(io::Printer* printer) const { + printer->Print(variables_, + "hash = (37 * hash) + $constant_name$;\n" + "hash = (53 * hash) + hashEnum(get$capitalized_name$());\n"); +} + +string EnumFieldGenerator::GetBoxedType() const { + return ClassName(descriptor_->enum_type()); +} + +// =================================================================== + +RepeatedEnumFieldGenerator:: +RepeatedEnumFieldGenerator(const FieldDescriptor* descriptor, + int messageBitIndex, + int builderBitIndex) + : descriptor_(descriptor), messageBitIndex_(messageBitIndex), + builderBitIndex_(builderBitIndex) { + SetEnumVariables(descriptor, messageBitIndex, builderBitIndex, &variables_); +} + +RepeatedEnumFieldGenerator::~RepeatedEnumFieldGenerator() {} + +int RepeatedEnumFieldGenerator::GetNumBitsForMessage() const { + return 0; +} + +int RepeatedEnumFieldGenerator::GetNumBitsForBuilder() const { + return 1; +} + +void RepeatedEnumFieldGenerator:: +GenerateInterfaceMembers(io::Printer* printer) const { + printer->Print(variables_, + "$deprecation$java.util.List<$type$> get$capitalized_name$List();\n" + "$deprecation$int get$capitalized_name$Count();\n" + "$deprecation$$type$ get$capitalized_name$(int index);\n"); +} + +void RepeatedEnumFieldGenerator:: +GenerateMembers(io::Printer* printer) const { + printer->Print(variables_, + "private java.util.List<$type$> $name$_;\n" + "$deprecation$public java.util.List<$type$> get$capitalized_name$List() {\n" + " return $name$_;\n" // note: unmodifiable list + "}\n" + "$deprecation$public int get$capitalized_name$Count() {\n" + " return $name$_.size();\n" + "}\n" + "$deprecation$public $type$ get$capitalized_name$(int index) {\n" + " return $name$_.get(index);\n" + "}\n"); + + if (descriptor_->options().packed() && + HasGeneratedMethods(descriptor_->containing_type())) { + printer->Print(variables_, + "private int $name$MemoizedSerializedSize;\n"); + } +} + +void RepeatedEnumFieldGenerator:: +GenerateBuilderMembers(io::Printer* printer) const { + printer->Print(variables_, + // One field is the list and the other field keeps track of whether the + // list is immutable. If it's immutable, the invariant is that it must + // either an instance of Collections.emptyList() or it's an ArrayList + // wrapped in a Collections.unmodifiableList() wrapper and nobody else has + // a refererence to the underlying ArrayList. This invariant allows us to + // share instances of lists between protocol buffers avoiding expensive + // memory allocations. Note, immutable is a strong guarantee here -- not + // just that the list cannot be modified via the reference but that the + // list can never be modified. + "private java.util.List<$type$> $name$_ =\n" + " java.util.Collections.emptyList();\n" + + "private void ensure$capitalized_name$IsMutable() {\n" + " if (!$get_mutable_bit_builder$) {\n" + " $name$_ = new java.util.ArrayList<$type$>($name$_);\n" + " $set_mutable_bit_builder$;\n" + " }\n" + "}\n" + + // Note: We return an unmodifiable list because otherwise the caller + // could hold on to the returned list and modify it after the message + // has been built, thus mutating the message which is supposed to be + // immutable. + "$deprecation$public java.util.List<$type$> get$capitalized_name$List() {\n" + " return java.util.Collections.unmodifiableList($name$_);\n" + "}\n" + "$deprecation$public int get$capitalized_name$Count() {\n" + " return $name$_.size();\n" + "}\n" + "$deprecation$public $type$ get$capitalized_name$(int index) {\n" + " return $name$_.get(index);\n" + "}\n" + "$deprecation$public Builder set$capitalized_name$(\n" + " int index, $type$ value) {\n" + " if (value == null) {\n" + " throw new NullPointerException();\n" + " }\n" + " ensure$capitalized_name$IsMutable();\n" + " $name$_.set(index, value);\n" + " $on_changed$\n" + " return this;\n" + "}\n" + "$deprecation$public Builder add$capitalized_name$($type$ value) {\n" + " if (value == null) {\n" + " throw new NullPointerException();\n" + " }\n" + " ensure$capitalized_name$IsMutable();\n" + " $name$_.add(value);\n" + " $on_changed$\n" + " return this;\n" + "}\n" + "$deprecation$public Builder addAll$capitalized_name$(\n" + " java.lang.Iterable values) {\n" + " ensure$capitalized_name$IsMutable();\n" + " super.addAll(values, $name$_);\n" + " $on_changed$\n" + " return this;\n" + "}\n" + "$deprecation$public Builder clear$capitalized_name$() {\n" + " $name$_ = java.util.Collections.emptyList();\n" + " $clear_mutable_bit_builder$;\n" + " $on_changed$\n" + " return this;\n" + "}\n"); +} + +void RepeatedEnumFieldGenerator:: +GenerateFieldBuilderInitializationCode(io::Printer* printer) const { + // noop for enums +} + +void RepeatedEnumFieldGenerator:: +GenerateInitializationCode(io::Printer* printer) const { + printer->Print(variables_, "$name$_ = java.util.Collections.emptyList();\n"); +} + +void RepeatedEnumFieldGenerator:: +GenerateBuilderClearCode(io::Printer* printer) const { + printer->Print(variables_, + "$name$_ = java.util.Collections.emptyList();\n" + "$clear_mutable_bit_builder$;\n"); +} + +void RepeatedEnumFieldGenerator:: +GenerateMergingCode(io::Printer* printer) const { + // The code below does two optimizations: + // 1. If the other list is empty, there's nothing to do. This ensures we + // don't allocate a new array if we already have an immutable one. + // 2. If the other list is non-empty and our current list is empty, we can + // reuse the other list which is guaranteed to be immutable. + printer->Print(variables_, + "if (!other.$name$_.isEmpty()) {\n" + " if ($name$_.isEmpty()) {\n" + " $name$_ = other.$name$_;\n" + " $clear_mutable_bit_builder$;\n" + " } else {\n" + " ensure$capitalized_name$IsMutable();\n" + " $name$_.addAll(other.$name$_);\n" + " }\n" + " $on_changed$\n" + "}\n"); +} + +void RepeatedEnumFieldGenerator:: +GenerateBuildingCode(io::Printer* printer) const { + // The code below ensures that the result has an immutable list. If our + // list is immutable, we can just reuse it. If not, we make it immutable. + printer->Print(variables_, + "if ($get_mutable_bit_builder$) {\n" + " $name$_ = java.util.Collections.unmodifiableList($name$_);\n" + " $clear_mutable_bit_builder$;\n" + "}\n" + "result.$name$_ = $name$_;\n"); +} + +void RepeatedEnumFieldGenerator:: +GenerateParsingCode(io::Printer* printer) const { + // Read and store the enum + printer->Print(variables_, + "int rawValue = input.readEnum();\n" + "$type$ value = $type$.valueOf(rawValue);\n"); + if (HasUnknownFields(descriptor_->containing_type())) { + printer->Print(variables_, + "if (value == null) {\n" + " unknownFields.mergeVarintField($number$, rawValue);\n" + "} else {\n"); + } else { + printer->Print(variables_, + "if (value != null) {\n"); + } + printer->Print(variables_, + " add$capitalized_name$(value);\n" + "}\n"); +} + +void RepeatedEnumFieldGenerator:: +GenerateParsingCodeFromPacked(io::Printer* printer) const { + // Wrap GenerateParsingCode's contents with a while loop. + + printer->Print(variables_, + "int length = input.readRawVarint32();\n" + "int oldLimit = input.pushLimit(length);\n" + "while(input.getBytesUntilLimit() > 0) {\n"); + printer->Indent(); + + GenerateParsingCode(printer); + + printer->Outdent(); + printer->Print(variables_, + "}\n" + "input.popLimit(oldLimit);\n"); +} + +void RepeatedEnumFieldGenerator:: +GenerateSerializationCode(io::Printer* printer) const { + if (descriptor_->options().packed()) { + printer->Print(variables_, + "if (get$capitalized_name$List().size() > 0) {\n" + " output.writeRawVarint32($tag$);\n" + " output.writeRawVarint32($name$MemoizedSerializedSize);\n" + "}\n" + "for (int i = 0; i < $name$_.size(); i++) {\n" + " output.writeEnumNoTag($name$_.get(i).getNumber());\n" + "}\n"); + } else { + printer->Print(variables_, + "for (int i = 0; i < $name$_.size(); i++) {\n" + " output.writeEnum($number$, $name$_.get(i).getNumber());\n" + "}\n"); + } +} + +void RepeatedEnumFieldGenerator:: +GenerateSerializedSizeCode(io::Printer* printer) const { + printer->Print(variables_, + "{\n" + " int dataSize = 0;\n"); + printer->Indent(); + + printer->Print(variables_, + "for (int i = 0; i < $name$_.size(); i++) {\n" + " dataSize += com.google.protobuf.CodedOutputStream\n" + " .computeEnumSizeNoTag($name$_.get(i).getNumber());\n" + "}\n"); + printer->Print( + "size += dataSize;\n"); + if (descriptor_->options().packed()) { + printer->Print(variables_, + "if (!get$capitalized_name$List().isEmpty()) {" + " size += $tag_size$;\n" + " size += com.google.protobuf.CodedOutputStream\n" + " .computeRawVarint32Size(dataSize);\n" + "}"); + } else { + printer->Print(variables_, + "size += $tag_size$ * $name$_.size();\n"); + } + + // cache the data size for packed fields. + if (descriptor_->options().packed()) { + printer->Print(variables_, + "$name$MemoizedSerializedSize = dataSize;\n"); + } + + printer->Outdent(); + printer->Print("}\n"); +} + +void RepeatedEnumFieldGenerator:: +GenerateEqualsCode(io::Printer* printer) const { + printer->Print(variables_, + "result = result && get$capitalized_name$List()\n" + " .equals(other.get$capitalized_name$List());\n"); +} + +void RepeatedEnumFieldGenerator:: +GenerateHashCode(io::Printer* printer) const { + printer->Print(variables_, + "if (get$capitalized_name$Count() > 0) {\n" + " hash = (37 * hash) + $constant_name$;\n" + " hash = (53 * hash) + hashEnumList(get$capitalized_name$List());\n" + "}\n"); +} + +string RepeatedEnumFieldGenerator::GetBoxedType() const { + return ClassName(descriptor_->enum_type()); +} + +} // namespace java +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_enum_field.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_enum_field.h new file mode 100644 index 0000000000..0cad6be0ea --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_enum_field.h @@ -0,0 +1,121 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#ifndef GOOGLE_PROTOBUF_COMPILER_JAVA_ENUM_FIELD_H__ +#define GOOGLE_PROTOBUF_COMPILER_JAVA_ENUM_FIELD_H__ + +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace java { + +class EnumFieldGenerator : public FieldGenerator { + public: + explicit EnumFieldGenerator(const FieldDescriptor* descriptor, + int messageBitIndex, int builderBitIndex); + ~EnumFieldGenerator(); + + // implements FieldGenerator --------------------------------------- + int GetNumBitsForMessage() const; + int GetNumBitsForBuilder() const; + void GenerateInterfaceMembers(io::Printer* printer) const; + void GenerateMembers(io::Printer* printer) const; + void GenerateBuilderMembers(io::Printer* printer) const; + void GenerateInitializationCode(io::Printer* printer) const; + void GenerateBuilderClearCode(io::Printer* printer) const; + void GenerateMergingCode(io::Printer* printer) const; + void GenerateBuildingCode(io::Printer* printer) const; + void GenerateParsingCode(io::Printer* printer) const; + void GenerateSerializationCode(io::Printer* printer) const; + void GenerateSerializedSizeCode(io::Printer* printer) const; + void GenerateFieldBuilderInitializationCode(io::Printer* printer) const; + void GenerateEqualsCode(io::Printer* printer) const; + void GenerateHashCode(io::Printer* printer) const; + + string GetBoxedType() const; + + private: + const FieldDescriptor* descriptor_; + map variables_; + const int messageBitIndex_; + const int builderBitIndex_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(EnumFieldGenerator); +}; + +class RepeatedEnumFieldGenerator : public FieldGenerator { + public: + explicit RepeatedEnumFieldGenerator(const FieldDescriptor* descriptor, + int messageBitIndex, int builderBitIndex); + ~RepeatedEnumFieldGenerator(); + + // implements FieldGenerator --------------------------------------- + int GetNumBitsForMessage() const; + int GetNumBitsForBuilder() const; + void GenerateInterfaceMembers(io::Printer* printer) const; + void GenerateMembers(io::Printer* printer) const; + void GenerateBuilderMembers(io::Printer* printer) const; + void GenerateInitializationCode(io::Printer* printer) const; + void GenerateBuilderClearCode(io::Printer* printer) const; + void GenerateMergingCode(io::Printer* printer) const; + void GenerateBuildingCode(io::Printer* printer) const; + void GenerateParsingCode(io::Printer* printer) const; + void GenerateParsingCodeFromPacked(io::Printer* printer) const; + void GenerateSerializationCode(io::Printer* printer) const; + void GenerateSerializedSizeCode(io::Printer* printer) const; + void GenerateFieldBuilderInitializationCode(io::Printer* printer) const; + void GenerateEqualsCode(io::Printer* printer) const; + void GenerateHashCode(io::Printer* printer) const; + + string GetBoxedType() const; + + private: + const FieldDescriptor* descriptor_; + map variables_; + const int messageBitIndex_; + const int builderBitIndex_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(RepeatedEnumFieldGenerator); +}; + +} // namespace java +} // namespace compiler +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_COMPILER_JAVA_ENUM_FIELD_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_extension.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_extension.cc new file mode 100644 index 0000000000..9b147c7766 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_extension.cc @@ -0,0 +1,216 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace java { + +namespace { + +const char* TypeName(FieldDescriptor::Type field_type) { + switch (field_type) { + case FieldDescriptor::TYPE_INT32 : return "INT32"; + case FieldDescriptor::TYPE_UINT32 : return "UINT32"; + case FieldDescriptor::TYPE_SINT32 : return "SINT32"; + case FieldDescriptor::TYPE_FIXED32 : return "FIXED32"; + case FieldDescriptor::TYPE_SFIXED32: return "SFIXED32"; + case FieldDescriptor::TYPE_INT64 : return "INT64"; + case FieldDescriptor::TYPE_UINT64 : return "UINT64"; + case FieldDescriptor::TYPE_SINT64 : return "SINT64"; + case FieldDescriptor::TYPE_FIXED64 : return "FIXED64"; + case FieldDescriptor::TYPE_SFIXED64: return "SFIXED64"; + case FieldDescriptor::TYPE_FLOAT : return "FLOAT"; + case FieldDescriptor::TYPE_DOUBLE : return "DOUBLE"; + case FieldDescriptor::TYPE_BOOL : return "BOOL"; + case FieldDescriptor::TYPE_STRING : return "STRING"; + case FieldDescriptor::TYPE_BYTES : return "BYTES"; + case FieldDescriptor::TYPE_ENUM : return "ENUM"; + case FieldDescriptor::TYPE_GROUP : return "GROUP"; + case FieldDescriptor::TYPE_MESSAGE : return "MESSAGE"; + + // No default because we want the compiler to complain if any new + // types are added. + } + + GOOGLE_LOG(FATAL) << "Can't get here."; + return NULL; +} + +} + +ExtensionGenerator::ExtensionGenerator(const FieldDescriptor* descriptor) + : descriptor_(descriptor) { + if (descriptor_->extension_scope() != NULL) { + scope_ = ClassName(descriptor_->extension_scope()); + } else { + scope_ = ClassName(descriptor_->file()); + } +} + +ExtensionGenerator::~ExtensionGenerator() {} + +// Initializes the vars referenced in the generated code templates. +void InitTemplateVars(const FieldDescriptor* descriptor, + const string& scope, + map* vars_pointer) { + map &vars = *vars_pointer; + vars["scope"] = scope; + vars["name"] = UnderscoresToCamelCase(descriptor); + vars["containing_type"] = ClassName(descriptor->containing_type()); + vars["number"] = SimpleItoa(descriptor->number()); + vars["constant_name"] = FieldConstantName(descriptor); + vars["index"] = SimpleItoa(descriptor->index()); + vars["default"] = + descriptor->is_repeated() ? "" : DefaultValue(descriptor); + vars["type_constant"] = TypeName(GetType(descriptor)); + vars["packed"] = descriptor->options().packed() ? "true" : "false"; + vars["enum_map"] = "null"; + vars["prototype"] = "null"; + + JavaType java_type = GetJavaType(descriptor); + string singular_type; + switch (java_type) { + case JAVATYPE_MESSAGE: + singular_type = ClassName(descriptor->message_type()); + vars["prototype"] = singular_type + ".getDefaultInstance()"; + break; + case JAVATYPE_ENUM: + singular_type = ClassName(descriptor->enum_type()); + vars["enum_map"] = singular_type + ".internalGetValueMap()"; + break; + default: + singular_type = BoxedPrimitiveTypeName(java_type); + break; + } + vars["type"] = descriptor->is_repeated() ? + "java.util.List<" + singular_type + ">" : singular_type; + vars["singular_type"] = singular_type; +} + +void ExtensionGenerator::Generate(io::Printer* printer) { + map vars; + InitTemplateVars(descriptor_, scope_, &vars); + printer->Print(vars, + "public static final int $constant_name$ = $number$;\n"); + + if (HasDescriptorMethods(descriptor_->file())) { + // Non-lite extensions + if (descriptor_->extension_scope() == NULL) { + // Non-nested + printer->Print( + vars, + "public static final\n" + " com.google.protobuf.GeneratedMessage.GeneratedExtension<\n" + " $containing_type$,\n" + " $type$> $name$ = com.google.protobuf.GeneratedMessage\n" + " .newFileScopedGeneratedExtension(\n" + " $singular_type$.class,\n" + " $prototype$);\n"); + } else { + // Nested + printer->Print( + vars, + "public static final\n" + " com.google.protobuf.GeneratedMessage.GeneratedExtension<\n" + " $containing_type$,\n" + " $type$> $name$ = com.google.protobuf.GeneratedMessage\n" + " .newMessageScopedGeneratedExtension(\n" + " $scope$.getDefaultInstance(),\n" + " $index$,\n" + " $singular_type$.class,\n" + " $prototype$);\n"); + } + } else { + // Lite extensions + if (descriptor_->is_repeated()) { + printer->Print( + vars, + "public static final\n" + " com.google.protobuf.GeneratedMessageLite.GeneratedExtension<\n" + " $containing_type$,\n" + " $type$> $name$ = com.google.protobuf.GeneratedMessageLite\n" + " .newRepeatedGeneratedExtension(\n" + " $containing_type$.getDefaultInstance(),\n" + " $prototype$,\n" + " $enum_map$,\n" + " $number$,\n" + " com.google.protobuf.WireFormat.FieldType.$type_constant$,\n" + " $packed$);\n"); + } else { + printer->Print( + vars, + "public static final\n" + " com.google.protobuf.GeneratedMessageLite.GeneratedExtension<\n" + " $containing_type$,\n" + " $type$> $name$ = com.google.protobuf.GeneratedMessageLite\n" + " .newSingularGeneratedExtension(\n" + " $containing_type$.getDefaultInstance(),\n" + " $default$,\n" + " $prototype$,\n" + " $enum_map$,\n" + " $number$,\n" + " com.google.protobuf.WireFormat.FieldType.$type_constant$);\n"); + } + } +} + +void ExtensionGenerator::GenerateNonNestedInitializationCode( + io::Printer* printer) { + if (descriptor_->extension_scope() == NULL && + HasDescriptorMethods(descriptor_->file())) { + // Only applies to non-nested, non-lite extensions. + printer->Print( + "$name$.internalInit(descriptor.getExtensions().get($index$));\n", + "name", UnderscoresToCamelCase(descriptor_), + "index", SimpleItoa(descriptor_->index())); + } +} + +void ExtensionGenerator::GenerateRegistrationCode(io::Printer* printer) { + printer->Print( + "registry.add($scope$.$name$);\n", + "scope", scope_, + "name", UnderscoresToCamelCase(descriptor_)); +} + +} // namespace java +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_extension.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_extension.h new file mode 100644 index 0000000000..009ed9ffac --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_extension.h @@ -0,0 +1,77 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#ifndef GOOGLE_PROTOBUF_COMPILER_JAVA_EXTENSION_H__ +#define GOOGLE_PROTOBUF_COMPILER_JAVA_EXTENSION_H__ + +#include + +#include + +namespace google { +namespace protobuf { + class FieldDescriptor; // descriptor.h + namespace io { + class Printer; // printer.h + } +} + +namespace protobuf { +namespace compiler { +namespace java { + +// Generates code for an extension, which may be within the scope of some +// message or may be at file scope. This is much simpler than FieldGenerator +// since extensions are just simple identifiers with interesting types. +class ExtensionGenerator { + public: + explicit ExtensionGenerator(const FieldDescriptor* descriptor); + ~ExtensionGenerator(); + + void Generate(io::Printer* printer); + void GenerateNonNestedInitializationCode(io::Printer* printer); + void GenerateRegistrationCode(io::Printer* printer); + + private: + const FieldDescriptor* descriptor_; + string scope_; + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(ExtensionGenerator); +}; + +} // namespace java +} // namespace compiler +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_COMPILER_JAVA_MESSAGE_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_field.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_field.cc new file mode 100644 index 0000000000..c7d433c8ef --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_field.cc @@ -0,0 +1,137 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include +#include +#include +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace java { + +FieldGenerator::~FieldGenerator() {} + +void FieldGenerator::GenerateParsingCodeFromPacked(io::Printer* printer) const { + // Reaching here indicates a bug. Cases are: + // - This FieldGenerator should support packing, but this method should be + // overridden. + // - This FieldGenerator doesn't support packing, and this method should + // never have been called. + GOOGLE_LOG(FATAL) << "GenerateParsingCodeFromPacked() " + << "called on field generator that does not support packing."; +} + +FieldGeneratorMap::FieldGeneratorMap(const Descriptor* descriptor) + : descriptor_(descriptor), + field_generators_( + new scoped_ptr[descriptor->field_count()]), + extension_generators_( + new scoped_ptr[descriptor->extension_count()]) { + + // Construct all the FieldGenerators and assign them bit indices for their + // bit fields. + int messageBitIndex = 0; + int builderBitIndex = 0; + for (int i = 0; i < descriptor->field_count(); i++) { + FieldGenerator* generator = MakeGenerator(descriptor->field(i), + messageBitIndex, builderBitIndex); + field_generators_[i].reset(generator); + messageBitIndex += generator->GetNumBitsForMessage(); + builderBitIndex += generator->GetNumBitsForBuilder(); + } + for (int i = 0; i < descriptor->extension_count(); i++) { + FieldGenerator* generator = MakeGenerator(descriptor->extension(i), + messageBitIndex, builderBitIndex); + extension_generators_[i].reset(generator); + messageBitIndex += generator->GetNumBitsForMessage(); + builderBitIndex += generator->GetNumBitsForBuilder(); + } +} + +FieldGenerator* FieldGeneratorMap::MakeGenerator( + const FieldDescriptor* field, int messageBitIndex, int builderBitIndex) { + if (field->is_repeated()) { + switch (GetJavaType(field)) { + case JAVATYPE_MESSAGE: + return new RepeatedMessageFieldGenerator( + field, messageBitIndex, builderBitIndex); + case JAVATYPE_ENUM: + return new RepeatedEnumFieldGenerator( + field, messageBitIndex, builderBitIndex); + case JAVATYPE_STRING: + return new RepeatedStringFieldGenerator( + field, messageBitIndex, builderBitIndex); + default: + return new RepeatedPrimitiveFieldGenerator( + field, messageBitIndex, builderBitIndex); + } + } else { + switch (GetJavaType(field)) { + case JAVATYPE_MESSAGE: + return new MessageFieldGenerator( + field, messageBitIndex, builderBitIndex); + case JAVATYPE_ENUM: + return new EnumFieldGenerator( + field, messageBitIndex, builderBitIndex); + case JAVATYPE_STRING: + return new StringFieldGenerator( + field, messageBitIndex, builderBitIndex); + default: + return new PrimitiveFieldGenerator( + field, messageBitIndex, builderBitIndex); + } + } +} + +FieldGeneratorMap::~FieldGeneratorMap() {} + +const FieldGenerator& FieldGeneratorMap::get( + const FieldDescriptor* field) const { + GOOGLE_CHECK_EQ(field->containing_type(), descriptor_); + return *field_generators_[field->index()]; +} + +const FieldGenerator& FieldGeneratorMap::get_extension(int index) const { + return *extension_generators_[index]; +} + +} // namespace java +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_field.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_field.h new file mode 100644 index 0000000000..6097f357c5 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_field.h @@ -0,0 +1,108 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#ifndef GOOGLE_PROTOBUF_COMPILER_JAVA_FIELD_H__ +#define GOOGLE_PROTOBUF_COMPILER_JAVA_FIELD_H__ + +#include +#include +#include + +namespace google { +namespace protobuf { + namespace io { + class Printer; // printer.h + } +} + +namespace protobuf { +namespace compiler { +namespace java { + +class FieldGenerator { + public: + FieldGenerator() {} + virtual ~FieldGenerator(); + + virtual int GetNumBitsForMessage() const = 0; + virtual int GetNumBitsForBuilder() const = 0; + virtual void GenerateInterfaceMembers(io::Printer* printer) const = 0; + virtual void GenerateMembers(io::Printer* printer) const = 0; + virtual void GenerateBuilderMembers(io::Printer* printer) const = 0; + virtual void GenerateInitializationCode(io::Printer* printer) const = 0; + virtual void GenerateBuilderClearCode(io::Printer* printer) const = 0; + virtual void GenerateMergingCode(io::Printer* printer) const = 0; + virtual void GenerateBuildingCode(io::Printer* printer) const = 0; + virtual void GenerateParsingCode(io::Printer* printer) const = 0; + virtual void GenerateParsingCodeFromPacked(io::Printer* printer) const; + virtual void GenerateSerializationCode(io::Printer* printer) const = 0; + virtual void GenerateSerializedSizeCode(io::Printer* printer) const = 0; + virtual void GenerateFieldBuilderInitializationCode(io::Printer* printer) + const = 0; + + virtual void GenerateEqualsCode(io::Printer* printer) const = 0; + virtual void GenerateHashCode(io::Printer* printer) const = 0; + + virtual string GetBoxedType() const = 0; + + private: + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(FieldGenerator); +}; + +// Convenience class which constructs FieldGenerators for a Descriptor. +class FieldGeneratorMap { + public: + explicit FieldGeneratorMap(const Descriptor* descriptor); + ~FieldGeneratorMap(); + + const FieldGenerator& get(const FieldDescriptor* field) const; + const FieldGenerator& get_extension(int index) const; + + private: + const Descriptor* descriptor_; + scoped_array > field_generators_; + scoped_array > extension_generators_; + + static FieldGenerator* MakeGenerator(const FieldDescriptor* field, + int messageBitIndex, int builderBitIndex); + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(FieldGeneratorMap); +}; + +} // namespace java +} // namespace compiler +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_COMPILER_JAVA_FIELD_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_file.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_file.cc new file mode 100644 index 0000000000..8968069f4f --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_file.cc @@ -0,0 +1,428 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace java { + +namespace { + +// Recursively searches the given message to see if it contains any extensions. +bool UsesExtensions(const Message& message) { + const Reflection* reflection = message.GetReflection(); + + // We conservatively assume that unknown fields are extensions. + if (reflection->GetUnknownFields(message).field_count() > 0) return true; + + vector fields; + reflection->ListFields(message, &fields); + + for (int i = 0; i < fields.size(); i++) { + if (fields[i]->is_extension()) return true; + + if (GetJavaType(fields[i]) == JAVATYPE_MESSAGE) { + if (fields[i]->is_repeated()) { + int size = reflection->FieldSize(message, fields[i]); + for (int j = 0; j < size; j++) { + const Message& sub_message = + reflection->GetRepeatedMessage(message, fields[i], j); + if (UsesExtensions(sub_message)) return true; + } + } else { + const Message& sub_message = reflection->GetMessage(message, fields[i]); + if (UsesExtensions(sub_message)) return true; + } + } + } + + return false; +} + + +} // namespace + +FileGenerator::FileGenerator(const FileDescriptor* file) + : file_(file), + java_package_(FileJavaPackage(file)), + classname_(FileClassName(file)) { +} + +FileGenerator::~FileGenerator() {} + +bool FileGenerator::Validate(string* error) { + // Check that no class name matches the file's class name. This is a common + // problem that leads to Java compile errors that can be hard to understand. + // It's especially bad when using the java_multiple_files, since we would + // end up overwriting the outer class with one of the inner ones. + + bool found_conflict = false; + for (int i = 0; i < file_->enum_type_count() && !found_conflict; i++) { + if (file_->enum_type(i)->name() == classname_) { + found_conflict = true; + } + } + for (int i = 0; i < file_->message_type_count() && !found_conflict; i++) { + if (file_->message_type(i)->name() == classname_) { + found_conflict = true; + } + } + for (int i = 0; i < file_->service_count() && !found_conflict; i++) { + if (file_->service(i)->name() == classname_) { + found_conflict = true; + } + } + + if (found_conflict) { + error->assign(file_->name()); + error->append( + ": Cannot generate Java output because the file's outer class name, \""); + error->append(classname_); + error->append( + "\", matches the name of one of the types declared inside it. " + "Please either rename the type or use the java_outer_classname " + "option to specify a different outer class name for the .proto file."); + return false; + } + + return true; +} + +void FileGenerator::Generate(io::Printer* printer) { + // We don't import anything because we refer to all classes by their + // fully-qualified names in the generated source. + printer->Print( + "// Generated by the protocol buffer compiler. DO NOT EDIT!\n" + "// source: $filename$\n" + "\n", + "filename", file_->name()); + if (!java_package_.empty()) { + printer->Print( + "package $package$;\n" + "\n", + "package", java_package_); + } + printer->Print( + "public final class $classname$ {\n" + " private $classname$() {}\n", + "classname", classname_); + printer->Indent(); + + // ----------------------------------------------------------------- + + printer->Print( + "public static void registerAllExtensions(\n" + " com.google.protobuf.ExtensionRegistry$lite$ registry) {\n", + "lite", HasDescriptorMethods(file_) ? "" : "Lite"); + + printer->Indent(); + + for (int i = 0; i < file_->extension_count(); i++) { + ExtensionGenerator(file_->extension(i)).GenerateRegistrationCode(printer); + } + + for (int i = 0; i < file_->message_type_count(); i++) { + MessageGenerator(file_->message_type(i)) + .GenerateExtensionRegistrationCode(printer); + } + + printer->Outdent(); + printer->Print( + "}\n"); + + // ----------------------------------------------------------------- + + if (!file_->options().java_multiple_files()) { + for (int i = 0; i < file_->enum_type_count(); i++) { + EnumGenerator(file_->enum_type(i)).Generate(printer); + } + for (int i = 0; i < file_->message_type_count(); i++) { + MessageGenerator messageGenerator(file_->message_type(i)); + messageGenerator.GenerateInterface(printer); + messageGenerator.Generate(printer); + } + if (HasGenericServices(file_)) { + for (int i = 0; i < file_->service_count(); i++) { + ServiceGenerator(file_->service(i)).Generate(printer); + } + } + } + + // Extensions must be generated in the outer class since they are values, + // not classes. + for (int i = 0; i < file_->extension_count(); i++) { + ExtensionGenerator(file_->extension(i)).Generate(printer); + } + + // Static variables. + for (int i = 0; i < file_->message_type_count(); i++) { + // TODO(kenton): Reuse MessageGenerator objects? + MessageGenerator(file_->message_type(i)).GenerateStaticVariables(printer); + } + + printer->Print("\n"); + + if (HasDescriptorMethods(file_)) { + GenerateEmbeddedDescriptor(printer); + } else { + printer->Print( + "static {\n"); + printer->Indent(); + + for (int i = 0; i < file_->message_type_count(); i++) { + // TODO(kenton): Reuse MessageGenerator objects? + MessageGenerator(file_->message_type(i)) + .GenerateStaticVariableInitializers(printer); + } + + printer->Outdent(); + printer->Print( + "}\n"); + } + + printer->Print( + "\n" + "// @@protoc_insertion_point(outer_class_scope)\n"); + + printer->Outdent(); + printer->Print("}\n"); +} + +void FileGenerator::GenerateEmbeddedDescriptor(io::Printer* printer) { + // Embed the descriptor. We simply serialize the entire FileDescriptorProto + // and embed it as a string literal, which is parsed and built into real + // descriptors at initialization time. We unfortunately have to put it in + // a string literal, not a byte array, because apparently using a literal + // byte array causes the Java compiler to generate *instructions* to + // initialize each and every byte of the array, e.g. as if you typed: + // b[0] = 123; b[1] = 456; b[2] = 789; + // This makes huge bytecode files and can easily hit the compiler's internal + // code size limits (error "code to large"). String literals are apparently + // embedded raw, which is what we want. + FileDescriptorProto file_proto; + file_->CopyTo(&file_proto); + + string file_data; + file_proto.SerializeToString(&file_data); + + printer->Print( + "public static com.google.protobuf.Descriptors.FileDescriptor\n" + " getDescriptor() {\n" + " return descriptor;\n" + "}\n" + "private static com.google.protobuf.Descriptors.FileDescriptor\n" + " descriptor;\n" + "static {\n" + " java.lang.String[] descriptorData = {\n"); + printer->Indent(); + printer->Indent(); + + // Only write 40 bytes per line. + static const int kBytesPerLine = 40; + for (int i = 0; i < file_data.size(); i += kBytesPerLine) { + if (i > 0) { + // Every 400 lines, start a new string literal, in order to avoid the + // 64k length limit. + if (i % 400 == 0) { + printer->Print(",\n"); + } else { + printer->Print(" +\n"); + } + } + printer->Print("\"$data$\"", + "data", CEscape(file_data.substr(i, kBytesPerLine))); + } + + printer->Outdent(); + printer->Print("\n};\n"); + + // ----------------------------------------------------------------- + // Create the InternalDescriptorAssigner. + + printer->Print( + "com.google.protobuf.Descriptors.FileDescriptor." + "InternalDescriptorAssigner assigner =\n" + " new com.google.protobuf.Descriptors.FileDescriptor." + "InternalDescriptorAssigner() {\n" + " public com.google.protobuf.ExtensionRegistry assignDescriptors(\n" + " com.google.protobuf.Descriptors.FileDescriptor root) {\n" + " descriptor = root;\n"); + + printer->Indent(); + printer->Indent(); + printer->Indent(); + + for (int i = 0; i < file_->message_type_count(); i++) { + // TODO(kenton): Reuse MessageGenerator objects? + MessageGenerator(file_->message_type(i)) + .GenerateStaticVariableInitializers(printer); + } + for (int i = 0; i < file_->extension_count(); i++) { + // TODO(kenton): Reuse ExtensionGenerator objects? + ExtensionGenerator(file_->extension(i)) + .GenerateNonNestedInitializationCode(printer); + } + + if (UsesExtensions(file_proto)) { + // Must construct an ExtensionRegistry containing all possible extensions + // and return it. + printer->Print( + "com.google.protobuf.ExtensionRegistry registry =\n" + " com.google.protobuf.ExtensionRegistry.newInstance();\n" + "registerAllExtensions(registry);\n"); + for (int i = 0; i < file_->dependency_count(); i++) { + if (ShouldIncludeDependency(file_->dependency(i))) { + printer->Print( + "$dependency$.registerAllExtensions(registry);\n", + "dependency", ClassName(file_->dependency(i))); + } + } + printer->Print( + "return registry;\n"); + } else { + printer->Print( + "return null;\n"); + } + + printer->Outdent(); + printer->Outdent(); + printer->Outdent(); + + printer->Print( + " }\n" + " };\n"); + + // ----------------------------------------------------------------- + // Invoke internalBuildGeneratedFileFrom() to build the file. + + printer->Print( + "com.google.protobuf.Descriptors.FileDescriptor\n" + " .internalBuildGeneratedFileFrom(descriptorData,\n" + " new com.google.protobuf.Descriptors.FileDescriptor[] {\n"); + + for (int i = 0; i < file_->dependency_count(); i++) { + if (ShouldIncludeDependency(file_->dependency(i))) { + printer->Print( + " $dependency$.getDescriptor(),\n", + "dependency", ClassName(file_->dependency(i))); + } + } + + printer->Print( + " }, assigner);\n"); + + printer->Outdent(); + printer->Print( + "}\n"); +} + +template +static void GenerateSibling(const string& package_dir, + const string& java_package, + const DescriptorClass* descriptor, + GeneratorContext* context, + vector* file_list, + const string& name_suffix, + void (GeneratorClass::*pfn)(io::Printer* printer)) { + string filename = package_dir + descriptor->name() + name_suffix + ".java"; + file_list->push_back(filename); + + scoped_ptr output(context->Open(filename)); + io::Printer printer(output.get(), '$'); + + printer.Print( + "// Generated by the protocol buffer compiler. DO NOT EDIT!\n" + "\n"); + if (!java_package.empty()) { + printer.Print( + "package $package$;\n" + "\n", + "package", java_package); + } + + GeneratorClass generator(descriptor); + (generator.*pfn)(&printer); +} + +void FileGenerator::GenerateSiblings(const string& package_dir, + GeneratorContext* context, + vector* file_list) { + if (file_->options().java_multiple_files()) { + for (int i = 0; i < file_->enum_type_count(); i++) { + GenerateSibling(package_dir, java_package_, + file_->enum_type(i), + context, file_list, "", + &EnumGenerator::Generate); + } + for (int i = 0; i < file_->message_type_count(); i++) { + GenerateSibling(package_dir, java_package_, + file_->message_type(i), + context, file_list, "OrBuilder", + &MessageGenerator::GenerateInterface); + GenerateSibling(package_dir, java_package_, + file_->message_type(i), + context, file_list, "", + &MessageGenerator::Generate); + } + if (HasGenericServices(file_)) { + for (int i = 0; i < file_->service_count(); i++) { + GenerateSibling(package_dir, java_package_, + file_->service(i), + context, file_list, "", + &ServiceGenerator::Generate); + } + } + } +} + +bool FileGenerator::ShouldIncludeDependency(const FileDescriptor* descriptor) { + return true; +} + +} // namespace java +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_file.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_file.h new file mode 100644 index 0000000000..5991146266 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_file.h @@ -0,0 +1,101 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#ifndef GOOGLE_PROTOBUF_COMPILER_JAVA_FILE_H__ +#define GOOGLE_PROTOBUF_COMPILER_JAVA_FILE_H__ + +#include +#include +#include + +namespace google { +namespace protobuf { + class FileDescriptor; // descriptor.h + namespace io { + class Printer; // printer.h + } + namespace compiler { + class GeneratorContext; // code_generator.h + } +} + +namespace protobuf { +namespace compiler { +namespace java { + +class FileGenerator { + public: + explicit FileGenerator(const FileDescriptor* file); + ~FileGenerator(); + + // Checks for problems that would otherwise lead to cryptic compile errors. + // Returns true if there are no problems, or writes an error description to + // the given string and returns false otherwise. + bool Validate(string* error); + + void Generate(io::Printer* printer); + + // If we aren't putting everything into one file, this will write all the + // files other than the outer file (i.e. one for each message, enum, and + // service type). + void GenerateSiblings(const string& package_dir, + GeneratorContext* generator_context, + vector* file_list); + + const string& java_package() { return java_package_; } + const string& classname() { return classname_; } + + + private: + // Returns whether the dependency should be included in the output file. + // Always returns true for opensource, but used internally at Google to help + // improve compatibility with version 1 of protocol buffers. + bool ShouldIncludeDependency(const FileDescriptor* descriptor); + + const FileDescriptor* file_; + string java_package_; + string classname_; + + + void GenerateEmbeddedDescriptor(io::Printer* printer); + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(FileGenerator); +}; + +} // namespace java +} // namespace compiler +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_COMPILER_JAVA_FILE_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_generator.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_generator.cc new file mode 100644 index 0000000000..e6c79abcd1 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_generator.cc @@ -0,0 +1,128 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include +#include +#include +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace java { + + +JavaGenerator::JavaGenerator() {} +JavaGenerator::~JavaGenerator() {} + +bool JavaGenerator::Generate(const FileDescriptor* file, + const string& parameter, + GeneratorContext* context, + string* error) const { + // ----------------------------------------------------------------- + // parse generator options + + // Name a file where we will write a list of generated file names, one + // per line. + string output_list_file; + + + vector > options; + ParseGeneratorParameter(parameter, &options); + + for (int i = 0; i < options.size(); i++) { + if (options[i].first == "output_list_file") { + output_list_file = options[i].second; + } else { + *error = "Unknown generator option: " + options[i].first; + return false; + } + } + + // ----------------------------------------------------------------- + + + if (file->options().optimize_for() == FileOptions::LITE_RUNTIME && + file->options().java_generate_equals_and_hash()) { + *error = "The \"java_generate_equals_and_hash\" option is incompatible " + "with \"optimize_for = LITE_RUNTIME\". You must optimize for " + "SPEED or CODE_SIZE if you want to use this option."; + return false; + } + + FileGenerator file_generator(file); + if (!file_generator.Validate(error)) { + return false; + } + + string package_dir = JavaPackageToDir(file_generator.java_package()); + + vector all_files; + + string java_filename = package_dir; + java_filename += file_generator.classname(); + java_filename += ".java"; + all_files.push_back(java_filename); + + // Generate main java file. + scoped_ptr output( + context->Open(java_filename)); + io::Printer printer(output.get(), '$'); + file_generator.Generate(&printer); + + // Generate sibling files. + file_generator.GenerateSiblings(package_dir, context, &all_files); + + // Generate output list if requested. + if (!output_list_file.empty()) { + // Generate output list. This is just a simple text file placed in a + // deterministic location which lists the .java files being generated. + scoped_ptr srclist_raw_output( + context->Open(output_list_file)); + io::Printer srclist_printer(srclist_raw_output.get(), '$'); + for (int i = 0; i < all_files.size(); i++) { + srclist_printer.Print("$filename$\n", "filename", all_files[i]); + } + } + + return true; +} + +} // namespace java +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_generator.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_generator.h new file mode 100644 index 0000000000..888b8d85e9 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_generator.h @@ -0,0 +1,72 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// Generates Java code for a given .proto file. + +#ifndef GOOGLE_PROTOBUF_COMPILER_JAVA_GENERATOR_H__ +#define GOOGLE_PROTOBUF_COMPILER_JAVA_GENERATOR_H__ + +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace java { + +// CodeGenerator implementation which generates Java code. If you create your +// own protocol compiler binary and you want it to support Java output, you +// can do so by registering an instance of this CodeGenerator with the +// CommandLineInterface in your main() function. +class LIBPROTOC_EXPORT JavaGenerator : public CodeGenerator { + public: + JavaGenerator(); + ~JavaGenerator(); + + // implements CodeGenerator ---------------------------------------- + bool Generate(const FileDescriptor* file, + const string& parameter, + GeneratorContext* context, + string* error) const; + + private: + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(JavaGenerator); +}; + +} // namespace java +} // namespace compiler +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_COMPILER_JAVA_GENERATOR_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_helpers.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_helpers.cc new file mode 100644 index 0000000000..1b6f165310 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_helpers.cc @@ -0,0 +1,478 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include + +#include +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace java { + +const char kThickSeparator[] = + "// ===================================================================\n"; +const char kThinSeparator[] = + "// -------------------------------------------------------------------\n"; + +namespace { + +const char* kDefaultPackage = ""; + +const string& FieldName(const FieldDescriptor* field) { + // Groups are hacky: The name of the field is just the lower-cased name + // of the group type. In Java, though, we would like to retain the original + // capitalization of the type name. + if (GetType(field) == FieldDescriptor::TYPE_GROUP) { + return field->message_type()->name(); + } else { + return field->name(); + } +} + +string UnderscoresToCamelCaseImpl(const string& input, bool cap_next_letter) { + string result; + // Note: I distrust ctype.h due to locales. + for (int i = 0; i < input.size(); i++) { + if ('a' <= input[i] && input[i] <= 'z') { + if (cap_next_letter) { + result += input[i] + ('A' - 'a'); + } else { + result += input[i]; + } + cap_next_letter = false; + } else if ('A' <= input[i] && input[i] <= 'Z') { + if (i == 0 && !cap_next_letter) { + // Force first letter to lower-case unless explicitly told to + // capitalize it. + result += input[i] + ('a' - 'A'); + } else { + // Capital letters after the first are left as-is. + result += input[i]; + } + cap_next_letter = false; + } else if ('0' <= input[i] && input[i] <= '9') { + result += input[i]; + cap_next_letter = true; + } else { + cap_next_letter = true; + } + } + return result; +} + +} // namespace + +string UnderscoresToCamelCase(const FieldDescriptor* field) { + return UnderscoresToCamelCaseImpl(FieldName(field), false); +} + +string UnderscoresToCapitalizedCamelCase(const FieldDescriptor* field) { + return UnderscoresToCamelCaseImpl(FieldName(field), true); +} + +string UnderscoresToCamelCase(const MethodDescriptor* method) { + return UnderscoresToCamelCaseImpl(method->name(), false); +} + +string StripProto(const string& filename) { + if (HasSuffixString(filename, ".protodevel")) { + return StripSuffixString(filename, ".protodevel"); + } else { + return StripSuffixString(filename, ".proto"); + } +} + +string FileClassName(const FileDescriptor* file) { + if (file->options().has_java_outer_classname()) { + return file->options().java_outer_classname(); + } else { + string basename; + string::size_type last_slash = file->name().find_last_of('/'); + if (last_slash == string::npos) { + basename = file->name(); + } else { + basename = file->name().substr(last_slash + 1); + } + return UnderscoresToCamelCaseImpl(StripProto(basename), true); + } +} + +string FileJavaPackage(const FileDescriptor* file) { + string result; + + if (file->options().has_java_package()) { + result = file->options().java_package(); + } else { + result = kDefaultPackage; + if (!file->package().empty()) { + if (!result.empty()) result += '.'; + result += file->package(); + } + } + + + return result; +} + +string JavaPackageToDir(string package_name) { + string package_dir = + StringReplace(package_name, ".", "/", true); + if (!package_dir.empty()) package_dir += "/"; + return package_dir; +} + +string ToJavaName(const string& full_name, const FileDescriptor* file) { + string result; + if (file->options().java_multiple_files()) { + result = FileJavaPackage(file); + } else { + result = ClassName(file); + } + if (!result.empty()) { + result += '.'; + } + if (file->package().empty()) { + result += full_name; + } else { + // Strip the proto package from full_name since we've replaced it with + // the Java package. + result += full_name.substr(file->package().size() + 1); + } + return result; +} + +string ClassName(const FileDescriptor* descriptor) { + string result = FileJavaPackage(descriptor); + if (!result.empty()) result += '.'; + result += FileClassName(descriptor); + return result; +} + +string FieldConstantName(const FieldDescriptor *field) { + string name = field->name() + "_FIELD_NUMBER"; + UpperString(&name); + return name; +} + +FieldDescriptor::Type GetType(const FieldDescriptor* field) { + return field->type(); +} + +JavaType GetJavaType(const FieldDescriptor* field) { + switch (GetType(field)) { + case FieldDescriptor::TYPE_INT32: + case FieldDescriptor::TYPE_UINT32: + case FieldDescriptor::TYPE_SINT32: + case FieldDescriptor::TYPE_FIXED32: + case FieldDescriptor::TYPE_SFIXED32: + return JAVATYPE_INT; + + case FieldDescriptor::TYPE_INT64: + case FieldDescriptor::TYPE_UINT64: + case FieldDescriptor::TYPE_SINT64: + case FieldDescriptor::TYPE_FIXED64: + case FieldDescriptor::TYPE_SFIXED64: + return JAVATYPE_LONG; + + case FieldDescriptor::TYPE_FLOAT: + return JAVATYPE_FLOAT; + + case FieldDescriptor::TYPE_DOUBLE: + return JAVATYPE_DOUBLE; + + case FieldDescriptor::TYPE_BOOL: + return JAVATYPE_BOOLEAN; + + case FieldDescriptor::TYPE_STRING: + return JAVATYPE_STRING; + + case FieldDescriptor::TYPE_BYTES: + return JAVATYPE_BYTES; + + case FieldDescriptor::TYPE_ENUM: + return JAVATYPE_ENUM; + + case FieldDescriptor::TYPE_GROUP: + case FieldDescriptor::TYPE_MESSAGE: + return JAVATYPE_MESSAGE; + + // No default because we want the compiler to complain if any new + // types are added. + } + + GOOGLE_LOG(FATAL) << "Can't get here."; + return JAVATYPE_INT; +} + +const char* BoxedPrimitiveTypeName(JavaType type) { + switch (type) { + case JAVATYPE_INT : return "java.lang.Integer"; + case JAVATYPE_LONG : return "java.lang.Long"; + case JAVATYPE_FLOAT : return "java.lang.Float"; + case JAVATYPE_DOUBLE : return "java.lang.Double"; + case JAVATYPE_BOOLEAN: return "java.lang.Boolean"; + case JAVATYPE_STRING : return "java.lang.String"; + case JAVATYPE_BYTES : return "com.google.protobuf.ByteString"; + case JAVATYPE_ENUM : return NULL; + case JAVATYPE_MESSAGE: return NULL; + + // No default because we want the compiler to complain if any new + // JavaTypes are added. + } + + GOOGLE_LOG(FATAL) << "Can't get here."; + return NULL; +} + +bool AllAscii(const string& text) { + for (int i = 0; i < text.size(); i++) { + if ((text[i] & 0x80) != 0) { + return false; + } + } + return true; +} + +string DefaultValue(const FieldDescriptor* field) { + // Switch on CppType since we need to know which default_value_* method + // of FieldDescriptor to call. + switch (field->cpp_type()) { + case FieldDescriptor::CPPTYPE_INT32: + return SimpleItoa(field->default_value_int32()); + case FieldDescriptor::CPPTYPE_UINT32: + // Need to print as a signed int since Java has no unsigned. + return SimpleItoa(static_cast(field->default_value_uint32())); + case FieldDescriptor::CPPTYPE_INT64: + return SimpleItoa(field->default_value_int64()) + "L"; + case FieldDescriptor::CPPTYPE_UINT64: + return SimpleItoa(static_cast(field->default_value_uint64())) + + "L"; + case FieldDescriptor::CPPTYPE_DOUBLE: { + double value = field->default_value_double(); + if (value == numeric_limits::infinity()) { + return "Double.POSITIVE_INFINITY"; + } else if (value == -numeric_limits::infinity()) { + return "Double.NEGATIVE_INFINITY"; + } else if (value != value) { + return "Double.NaN"; + } else { + return SimpleDtoa(value) + "D"; + } + } + case FieldDescriptor::CPPTYPE_FLOAT: { + float value = field->default_value_float(); + if (value == numeric_limits::infinity()) { + return "Float.POSITIVE_INFINITY"; + } else if (value == -numeric_limits::infinity()) { + return "Float.NEGATIVE_INFINITY"; + } else if (value != value) { + return "Float.NaN"; + } else { + return SimpleFtoa(value) + "F"; + } + } + case FieldDescriptor::CPPTYPE_BOOL: + return field->default_value_bool() ? "true" : "false"; + case FieldDescriptor::CPPTYPE_STRING: + if (GetType(field) == FieldDescriptor::TYPE_BYTES) { + if (field->has_default_value()) { + // See comments in Internal.java for gory details. + return strings::Substitute( + "com.google.protobuf.Internal.bytesDefaultValue(\"$0\")", + CEscape(field->default_value_string())); + } else { + return "com.google.protobuf.ByteString.EMPTY"; + } + } else { + if (AllAscii(field->default_value_string())) { + // All chars are ASCII. In this case CEscape() works fine. + return "\"" + CEscape(field->default_value_string()) + "\""; + } else { + // See comments in Internal.java for gory details. + return strings::Substitute( + "com.google.protobuf.Internal.stringDefaultValue(\"$0\")", + CEscape(field->default_value_string())); + } + } + + case FieldDescriptor::CPPTYPE_ENUM: + return ClassName(field->enum_type()) + "." + + field->default_value_enum()->name(); + + case FieldDescriptor::CPPTYPE_MESSAGE: + return ClassName(field->message_type()) + ".getDefaultInstance()"; + + // No default because we want the compiler to complain if any new + // types are added. + } + + GOOGLE_LOG(FATAL) << "Can't get here."; + return ""; +} + +bool IsDefaultValueJavaDefault(const FieldDescriptor* field) { + // Switch on CppType since we need to know which default_value_* method + // of FieldDescriptor to call. + switch (field->cpp_type()) { + case FieldDescriptor::CPPTYPE_INT32: + return field->default_value_int32() == 0; + case FieldDescriptor::CPPTYPE_UINT32: + return field->default_value_uint32() == 0; + case FieldDescriptor::CPPTYPE_INT64: + return field->default_value_int64() == 0L; + case FieldDescriptor::CPPTYPE_UINT64: + return field->default_value_uint64() == 0L; + case FieldDescriptor::CPPTYPE_DOUBLE: + return field->default_value_double() == 0.0; + case FieldDescriptor::CPPTYPE_FLOAT: + return field->default_value_float() == 0.0; + case FieldDescriptor::CPPTYPE_BOOL: + return field->default_value_bool() == false; + + case FieldDescriptor::CPPTYPE_STRING: + case FieldDescriptor::CPPTYPE_ENUM: + case FieldDescriptor::CPPTYPE_MESSAGE: + return false; + + // No default because we want the compiler to complain if any new + // types are added. + } + + GOOGLE_LOG(FATAL) << "Can't get here."; + return false; +} + +const char* bit_masks[] = { + "0x00000001", + "0x00000002", + "0x00000004", + "0x00000008", + "0x00000010", + "0x00000020", + "0x00000040", + "0x00000080", + + "0x00000100", + "0x00000200", + "0x00000400", + "0x00000800", + "0x00001000", + "0x00002000", + "0x00004000", + "0x00008000", + + "0x00010000", + "0x00020000", + "0x00040000", + "0x00080000", + "0x00100000", + "0x00200000", + "0x00400000", + "0x00800000", + + "0x01000000", + "0x02000000", + "0x04000000", + "0x08000000", + "0x10000000", + "0x20000000", + "0x40000000", + "0x80000000", +}; + +string GetBitFieldName(int index) { + string varName = "bitField"; + varName += SimpleItoa(index); + varName += "_"; + return varName; +} + +string GetBitFieldNameForBit(int bitIndex) { + return GetBitFieldName(bitIndex / 32); +} + +string GenerateGetBit(int bitIndex) { + string varName = GetBitFieldNameForBit(bitIndex); + int bitInVarIndex = bitIndex % 32; + + string mask = bit_masks[bitInVarIndex]; + string result = "((" + varName + " & " + mask + ") == " + mask + ")"; + return result; +} + +string GenerateSetBit(int bitIndex) { + string varName = GetBitFieldNameForBit(bitIndex); + int bitInVarIndex = bitIndex % 32; + + string mask = bit_masks[bitInVarIndex]; + string result = varName + " |= " + mask; + return result; +} + +string GenerateClearBit(int bitIndex) { + string varName = GetBitFieldNameForBit(bitIndex); + int bitInVarIndex = bitIndex % 32; + + string mask = bit_masks[bitInVarIndex]; + string result = varName + " = (" + varName + " & ~" + mask + ")"; + return result; +} + +string GenerateGetBitFromLocal(int bitIndex) { + string varName = "from_" + GetBitFieldNameForBit(bitIndex); + int bitInVarIndex = bitIndex % 32; + + string mask = bit_masks[bitInVarIndex]; + string result = "((" + varName + " & " + mask + ") == " + mask + ")"; + return result; +} + +string GenerateSetBitToLocal(int bitIndex) { + string varName = "to_" + GetBitFieldNameForBit(bitIndex); + int bitInVarIndex = bitIndex % 32; + + string mask = bit_masks[bitInVarIndex]; + string result = varName + " |= " + mask; + return result; +} + +} // namespace java +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_helpers.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_helpers.h new file mode 100644 index 0000000000..4ae07f150f --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_helpers.h @@ -0,0 +1,213 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#ifndef GOOGLE_PROTOBUF_COMPILER_JAVA_HELPERS_H__ +#define GOOGLE_PROTOBUF_COMPILER_JAVA_HELPERS_H__ + +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace java { + +// Commonly-used separator comments. Thick is a line of '=', thin is a line +// of '-'. +extern const char kThickSeparator[]; +extern const char kThinSeparator[]; + +// Converts the field's name to camel-case, e.g. "foo_bar_baz" becomes +// "fooBarBaz" or "FooBarBaz", respectively. +string UnderscoresToCamelCase(const FieldDescriptor* field); +string UnderscoresToCapitalizedCamelCase(const FieldDescriptor* field); + +// Similar, but for method names. (Typically, this merely has the effect +// of lower-casing the first letter of the name.) +string UnderscoresToCamelCase(const MethodDescriptor* method); + +// Strips ".proto" or ".protodevel" from the end of a filename. +string StripProto(const string& filename); + +// Gets the unqualified class name for the file. Each .proto file becomes a +// single Java class, with all its contents nested in that class. +string FileClassName(const FileDescriptor* file); + +// Returns the file's Java package name. +string FileJavaPackage(const FileDescriptor* file); + +// Returns output directory for the given package name. +string JavaPackageToDir(string package_name); + +// Converts the given fully-qualified name in the proto namespace to its +// fully-qualified name in the Java namespace, given that it is in the given +// file. +string ToJavaName(const string& full_name, const FileDescriptor* file); + +// These return the fully-qualified class name corresponding to the given +// descriptor. +inline string ClassName(const Descriptor* descriptor) { + return ToJavaName(descriptor->full_name(), descriptor->file()); +} +inline string ClassName(const EnumDescriptor* descriptor) { + return ToJavaName(descriptor->full_name(), descriptor->file()); +} +inline string ClassName(const ServiceDescriptor* descriptor) { + return ToJavaName(descriptor->full_name(), descriptor->file()); +} +inline string ExtensionIdentifierName(const FieldDescriptor* descriptor) { + return ToJavaName(descriptor->full_name(), descriptor->file()); +} +string ClassName(const FileDescriptor* descriptor); + +// Get the unqualified name that should be used for a field's field +// number constant. +string FieldConstantName(const FieldDescriptor *field); + +// Returns the type of the FieldDescriptor. +// This does nothing interesting for the open source release, but is used for +// hacks that improve compatability with version 1 protocol buffers at Google. +FieldDescriptor::Type GetType(const FieldDescriptor* field); + +enum JavaType { + JAVATYPE_INT, + JAVATYPE_LONG, + JAVATYPE_FLOAT, + JAVATYPE_DOUBLE, + JAVATYPE_BOOLEAN, + JAVATYPE_STRING, + JAVATYPE_BYTES, + JAVATYPE_ENUM, + JAVATYPE_MESSAGE +}; + +JavaType GetJavaType(const FieldDescriptor* field); + +// Get the fully-qualified class name for a boxed primitive type, e.g. +// "java.lang.Integer" for JAVATYPE_INT. Returns NULL for enum and message +// types. +const char* BoxedPrimitiveTypeName(JavaType type); + +string DefaultValue(const FieldDescriptor* field); +bool IsDefaultValueJavaDefault(const FieldDescriptor* field); + +// Does this message class keep track of unknown fields? +inline bool HasUnknownFields(const Descriptor* descriptor) { + return descriptor->file()->options().optimize_for() != + FileOptions::LITE_RUNTIME; +} + +// Does this message class have generated parsing, serialization, and other +// standard methods for which reflection-based fallback implementations exist? +inline bool HasGeneratedMethods(const Descriptor* descriptor) { + return descriptor->file()->options().optimize_for() != + FileOptions::CODE_SIZE; +} + +// Does this message have specialized equals() and hashCode() methods? +inline bool HasEqualsAndHashCode(const Descriptor* descriptor) { + return descriptor->file()->options().java_generate_equals_and_hash(); +} + +// Does this message class have descriptor and reflection methods? +inline bool HasDescriptorMethods(const Descriptor* descriptor) { + return descriptor->file()->options().optimize_for() != + FileOptions::LITE_RUNTIME; +} +inline bool HasDescriptorMethods(const EnumDescriptor* descriptor) { + return descriptor->file()->options().optimize_for() != + FileOptions::LITE_RUNTIME; +} +inline bool HasDescriptorMethods(const FileDescriptor* descriptor) { + return descriptor->options().optimize_for() != + FileOptions::LITE_RUNTIME; +} + +inline bool HasNestedBuilders(const Descriptor* descriptor) { + // The proto-lite version doesn't support nested builders. + return descriptor->file()->options().optimize_for() != + FileOptions::LITE_RUNTIME; +} + +// Should we generate generic services for this file? +inline bool HasGenericServices(const FileDescriptor *file) { + return file->service_count() > 0 && + file->options().optimize_for() != FileOptions::LITE_RUNTIME && + file->options().java_generic_services(); +} + + +// Methods for shared bitfields. + +// Gets the name of the shared bitfield for the given index. +string GetBitFieldName(int index); + +// Gets the name of the shared bitfield for the given bit index. +// Effectively, GetBitFieldName(bitIndex / 32) +string GetBitFieldNameForBit(int bitIndex); + +// Generates the java code for the expression that returns the boolean value +// of the bit of the shared bitfields for the given bit index. +// Example: "((bitField1_ & 0x04) == 0x04)" +string GenerateGetBit(int bitIndex); + +// Generates the java code for the expression that sets the bit of the shared +// bitfields for the given bit index. +// Example: "bitField1_ = (bitField1_ | 0x04)" +string GenerateSetBit(int bitIndex); + +// Generates the java code for the expression that clears the bit of the shared +// bitfields for the given bit index. +// Example: "bitField1_ = (bitField1_ & ~0x04)" +string GenerateClearBit(int bitIndex); + +// Does the same as GenerateGetBit but operates on the bit field on a local +// variable. This is used by the builder to copy the value in the builder to +// the message. +// Example: "((from_bitField1_ & 0x04) == 0x04)" +string GenerateGetBitFromLocal(int bitIndex); + +// Does the same as GenerateSetBit but operates on the bit field on a local +// variable. This is used by the builder to copy the value in the builder to +// the message. +// Example: "to_bitField1_ = (to_bitField1_ | 0x04)" +string GenerateSetBitToLocal(int bitIndex); + +} // namespace java +} // namespace compiler +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_COMPILER_JAVA_HELPERS_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_message.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_message.cc new file mode 100644 index 0000000000..4c087db54e --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_message.cc @@ -0,0 +1,1287 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace java { + +using internal::WireFormat; +using internal::WireFormatLite; + +namespace { + +void PrintFieldComment(io::Printer* printer, const FieldDescriptor* field) { + // Print the field's proto-syntax definition as a comment. We don't want to + // print group bodies so we cut off after the first line. + string def = field->DebugString(); + printer->Print("// $def$\n", + "def", def.substr(0, def.find_first_of('\n'))); +} + +struct FieldOrderingByNumber { + inline bool operator()(const FieldDescriptor* a, + const FieldDescriptor* b) const { + return a->number() < b->number(); + } +}; + +struct ExtensionRangeOrdering { + bool operator()(const Descriptor::ExtensionRange* a, + const Descriptor::ExtensionRange* b) const { + return a->start < b->start; + } +}; + +// Sort the fields of the given Descriptor by number into a new[]'d array +// and return it. +const FieldDescriptor** SortFieldsByNumber(const Descriptor* descriptor) { + const FieldDescriptor** fields = + new const FieldDescriptor*[descriptor->field_count()]; + for (int i = 0; i < descriptor->field_count(); i++) { + fields[i] = descriptor->field(i); + } + sort(fields, fields + descriptor->field_count(), + FieldOrderingByNumber()); + return fields; +} + +// Get an identifier that uniquely identifies this type within the file. +// This is used to declare static variables related to this type at the +// outermost file scope. +string UniqueFileScopeIdentifier(const Descriptor* descriptor) { + return "static_" + StringReplace(descriptor->full_name(), ".", "_", true); +} + +// Returns true if the message type has any required fields. If it doesn't, +// we can optimize out calls to its isInitialized() method. +// +// already_seen is used to avoid checking the same type multiple times +// (and also to protect against recursion). +static bool HasRequiredFields( + const Descriptor* type, + hash_set* already_seen) { + if (already_seen->count(type) > 0) { + // The type is already in cache. This means that either: + // a. The type has no required fields. + // b. We are in the midst of checking if the type has required fields, + // somewhere up the stack. In this case, we know that if the type + // has any required fields, they'll be found when we return to it, + // and the whole call to HasRequiredFields() will return true. + // Therefore, we don't have to check if this type has required fields + // here. + return false; + } + already_seen->insert(type); + + // If the type has extensions, an extension with message type could contain + // required fields, so we have to be conservative and assume such an + // extension exists. + if (type->extension_range_count() > 0) return true; + + for (int i = 0; i < type->field_count(); i++) { + const FieldDescriptor* field = type->field(i); + if (field->is_required()) { + return true; + } + if (GetJavaType(field) == JAVATYPE_MESSAGE) { + if (HasRequiredFields(field->message_type(), already_seen)) { + return true; + } + } + } + + return false; +} + +static bool HasRequiredFields(const Descriptor* type) { + hash_set already_seen; + return HasRequiredFields(type, &already_seen); +} + +} // namespace + +// =================================================================== + +MessageGenerator::MessageGenerator(const Descriptor* descriptor) + : descriptor_(descriptor), + field_generators_(descriptor) { +} + +MessageGenerator::~MessageGenerator() {} + +void MessageGenerator::GenerateStaticVariables(io::Printer* printer) { + if (HasDescriptorMethods(descriptor_)) { + // Because descriptor.proto (com.google.protobuf.DescriptorProtos) is + // used in the construction of descriptors, we have a tricky bootstrapping + // problem. To help control static initialization order, we make sure all + // descriptors and other static data that depends on them are members of + // the outermost class in the file. This way, they will be initialized in + // a deterministic order. + + map vars; + vars["identifier"] = UniqueFileScopeIdentifier(descriptor_); + vars["index"] = SimpleItoa(descriptor_->index()); + vars["classname"] = ClassName(descriptor_); + if (descriptor_->containing_type() != NULL) { + vars["parent"] = UniqueFileScopeIdentifier( + descriptor_->containing_type()); + } + if (descriptor_->file()->options().java_multiple_files()) { + // We can only make these package-private since the classes that use them + // are in separate files. + vars["private"] = ""; + } else { + vars["private"] = "private "; + } + + // The descriptor for this type. + printer->Print(vars, + "$private$static com.google.protobuf.Descriptors.Descriptor\n" + " internal_$identifier$_descriptor;\n"); + + // And the FieldAccessorTable. + printer->Print(vars, + "$private$static\n" + " com.google.protobuf.GeneratedMessage.FieldAccessorTable\n" + " internal_$identifier$_fieldAccessorTable;\n"); + } + + // Generate static members for all nested types. + for (int i = 0; i < descriptor_->nested_type_count(); i++) { + // TODO(kenton): Reuse MessageGenerator objects? + MessageGenerator(descriptor_->nested_type(i)) + .GenerateStaticVariables(printer); + } +} + +void MessageGenerator::GenerateStaticVariableInitializers( + io::Printer* printer) { + if (HasDescriptorMethods(descriptor_)) { + map vars; + vars["identifier"] = UniqueFileScopeIdentifier(descriptor_); + vars["index"] = SimpleItoa(descriptor_->index()); + vars["classname"] = ClassName(descriptor_); + if (descriptor_->containing_type() != NULL) { + vars["parent"] = UniqueFileScopeIdentifier( + descriptor_->containing_type()); + } + + // The descriptor for this type. + if (descriptor_->containing_type() == NULL) { + printer->Print(vars, + "internal_$identifier$_descriptor =\n" + " getDescriptor().getMessageTypes().get($index$);\n"); + } else { + printer->Print(vars, + "internal_$identifier$_descriptor =\n" + " internal_$parent$_descriptor.getNestedTypes().get($index$);\n"); + } + + // And the FieldAccessorTable. + printer->Print(vars, + "internal_$identifier$_fieldAccessorTable = new\n" + " com.google.protobuf.GeneratedMessage.FieldAccessorTable(\n" + " internal_$identifier$_descriptor,\n" + " new java.lang.String[] { "); + for (int i = 0; i < descriptor_->field_count(); i++) { + printer->Print( + "\"$field_name$\", ", + "field_name", + UnderscoresToCapitalizedCamelCase(descriptor_->field(i))); + } + printer->Print("},\n" + " $classname$.class,\n" + " $classname$.Builder.class);\n", + "classname", ClassName(descriptor_)); + } + + // Generate static member initializers for all nested types. + for (int i = 0; i < descriptor_->nested_type_count(); i++) { + // TODO(kenton): Reuse MessageGenerator objects? + MessageGenerator(descriptor_->nested_type(i)) + .GenerateStaticVariableInitializers(printer); + } +} + +// =================================================================== + +void MessageGenerator::GenerateInterface(io::Printer* printer) { + + if (descriptor_->extension_range_count() > 0) { + if (HasDescriptorMethods(descriptor_)) { + printer->Print( + "public interface $classname$OrBuilder extends\n" + " com.google.protobuf.GeneratedMessage.\n" + " ExtendableMessageOrBuilder<$classname$> {\n", + "classname", descriptor_->name()); + } else { + printer->Print( + "public interface $classname$OrBuilder extends \n" + " com.google.protobuf.GeneratedMessageLite.\n" + " ExtendableMessageOrBuilder<$classname$> {\n", + "classname", descriptor_->name()); + } + } else { + if (HasDescriptorMethods(descriptor_)) { + printer->Print( + "public interface $classname$OrBuilder\n" + " extends com.google.protobuf.MessageOrBuilder {\n", + "classname", descriptor_->name()); + } else { + printer->Print( + "public interface $classname$OrBuilder\n" + " extends com.google.protobuf.MessageLiteOrBuilder {\n", + "classname", descriptor_->name()); + } + } + + printer->Indent(); + for (int i = 0; i < descriptor_->field_count(); i++) { + printer->Print("\n"); + PrintFieldComment(printer, descriptor_->field(i)); + field_generators_.get(descriptor_->field(i)) + .GenerateInterfaceMembers(printer); + } + printer->Outdent(); + + printer->Print("}\n"); +} + +// =================================================================== + +void MessageGenerator::Generate(io::Printer* printer) { + bool is_own_file = + descriptor_->containing_type() == NULL && + descriptor_->file()->options().java_multiple_files(); + + if (descriptor_->extension_range_count() > 0) { + if (HasDescriptorMethods(descriptor_)) { + printer->Print( + "public $static$ final class $classname$ extends\n" + " com.google.protobuf.GeneratedMessage.ExtendableMessage<\n" + " $classname$> implements $classname$OrBuilder {\n", + "static", is_own_file ? "" : "static", + "classname", descriptor_->name()); + } else { + printer->Print( + "public $static$ final class $classname$ extends\n" + " com.google.protobuf.GeneratedMessageLite.ExtendableMessage<\n" + " $classname$> implements $classname$OrBuilder {\n", + "static", is_own_file ? "" : "static", + "classname", descriptor_->name()); + } + } else { + if (HasDescriptorMethods(descriptor_)) { + printer->Print( + "public $static$ final class $classname$ extends\n" + " com.google.protobuf.GeneratedMessage\n" + " implements $classname$OrBuilder {\n", + "static", is_own_file ? "" : "static", + "classname", descriptor_->name()); + } else { + printer->Print( + "public $static$ final class $classname$ extends\n" + " com.google.protobuf.GeneratedMessageLite\n" + " implements $classname$OrBuilder {\n", + "static", is_own_file ? "" : "static", + "classname", descriptor_->name()); + } + } + printer->Indent(); + printer->Print( + "// Use $classname$.newBuilder() to construct.\n" + "private $classname$(Builder builder) {\n" + " super(builder);\n" + "}\n" + // Used when constructing the default instance, which cannot be initialized + // immediately because it may cyclically refer to other default instances. + "private $classname$(boolean noInit) {}\n" + "\n" + "private static final $classname$ defaultInstance;\n" + "public static $classname$ getDefaultInstance() {\n" + " return defaultInstance;\n" + "}\n" + "\n" + "public $classname$ getDefaultInstanceForType() {\n" + " return defaultInstance;\n" + "}\n" + "\n", + "classname", descriptor_->name()); + + GenerateDescriptorMethods(printer); + + // Nested types + for (int i = 0; i < descriptor_->enum_type_count(); i++) { + EnumGenerator(descriptor_->enum_type(i)).Generate(printer); + } + + for (int i = 0; i < descriptor_->nested_type_count(); i++) { + MessageGenerator messageGenerator(descriptor_->nested_type(i)); + messageGenerator.GenerateInterface(printer); + messageGenerator.Generate(printer); + } + + // Integers for bit fields. + int totalBits = 0; + for (int i = 0; i < descriptor_->field_count(); i++) { + totalBits += field_generators_.get(descriptor_->field(i)) + .GetNumBitsForMessage(); + } + int totalInts = (totalBits + 31) / 32; + for (int i = 0; i < totalInts; i++) { + printer->Print("private int $bit_field_name$;\n", + "bit_field_name", GetBitFieldName(i)); + } + + // Fields + for (int i = 0; i < descriptor_->field_count(); i++) { + PrintFieldComment(printer, descriptor_->field(i)); + printer->Print("public static final int $constant_name$ = $number$;\n", + "constant_name", FieldConstantName(descriptor_->field(i)), + "number", SimpleItoa(descriptor_->field(i)->number())); + field_generators_.get(descriptor_->field(i)).GenerateMembers(printer); + printer->Print("\n"); + } + + // Called by the constructor, except in the case of the default instance, + // in which case this is called by static init code later on. + printer->Print("private void initFields() {\n"); + printer->Indent(); + for (int i = 0; i < descriptor_->field_count(); i++) { + field_generators_.get(descriptor_->field(i)) + .GenerateInitializationCode(printer); + } + printer->Outdent(); + printer->Print("}\n"); + + if (HasGeneratedMethods(descriptor_)) { + GenerateIsInitialized(printer, MEMOIZE); + GenerateMessageSerializationMethods(printer); + } + + if (HasEqualsAndHashCode(descriptor_)) { + GenerateEqualsAndHashCode(printer); + } + + GenerateParseFromMethods(printer); + GenerateBuilder(printer); + + // Carefully initialize the default instance in such a way that it doesn't + // conflict with other initialization. + printer->Print( + "\n" + "static {\n" + " defaultInstance = new $classname$(true);\n" + " defaultInstance.initFields();\n" + "}\n" + "\n" + "// @@protoc_insertion_point(class_scope:$full_name$)\n", + "classname", descriptor_->name(), + "full_name", descriptor_->full_name()); + + // Extensions must be declared after the defaultInstance is initialized + // because the defaultInstance is used by the extension to lazily retrieve + // the outer class's FileDescriptor. + for (int i = 0; i < descriptor_->extension_count(); i++) { + ExtensionGenerator(descriptor_->extension(i)).Generate(printer); + } + + printer->Outdent(); + printer->Print("}\n\n"); +} + + +// =================================================================== + +void MessageGenerator:: +GenerateMessageSerializationMethods(io::Printer* printer) { + scoped_array sorted_fields( + SortFieldsByNumber(descriptor_)); + + vector sorted_extensions; + for (int i = 0; i < descriptor_->extension_range_count(); ++i) { + sorted_extensions.push_back(descriptor_->extension_range(i)); + } + sort(sorted_extensions.begin(), sorted_extensions.end(), + ExtensionRangeOrdering()); + + printer->Print( + "public void writeTo(com.google.protobuf.CodedOutputStream output)\n" + " throws java.io.IOException {\n"); + printer->Indent(); + // writeTo(CodedOutputStream output) might be invoked without + // getSerializedSize() ever being called, but we need the memoized + // sizes in case this message has packed fields. Rather than emit checks for + // each packed field, just call getSerializedSize() up front for all messages. + // In most cases, getSerializedSize() will have already been called anyway by + // one of the wrapper writeTo() methods, making this call cheap. + printer->Print( + "getSerializedSize();\n"); + + if (descriptor_->extension_range_count() > 0) { + if (descriptor_->options().message_set_wire_format()) { + printer->Print( + "com.google.protobuf.GeneratedMessage$lite$\n" + " .ExtendableMessage<$classname$>.ExtensionWriter extensionWriter =\n" + " newMessageSetExtensionWriter();\n", + "lite", HasDescriptorMethods(descriptor_) ? "" : "Lite", + "classname", ClassName(descriptor_)); + } else { + printer->Print( + "com.google.protobuf.GeneratedMessage$lite$\n" + " .ExtendableMessage<$classname$>.ExtensionWriter extensionWriter =\n" + " newExtensionWriter();\n", + "lite", HasDescriptorMethods(descriptor_) ? "" : "Lite", + "classname", ClassName(descriptor_)); + } + } + + // Merge the fields and the extension ranges, both sorted by field number. + for (int i = 0, j = 0; + i < descriptor_->field_count() || j < sorted_extensions.size(); + ) { + if (i == descriptor_->field_count()) { + GenerateSerializeOneExtensionRange(printer, sorted_extensions[j++]); + } else if (j == sorted_extensions.size()) { + GenerateSerializeOneField(printer, sorted_fields[i++]); + } else if (sorted_fields[i]->number() < sorted_extensions[j]->start) { + GenerateSerializeOneField(printer, sorted_fields[i++]); + } else { + GenerateSerializeOneExtensionRange(printer, sorted_extensions[j++]); + } + } + + if (HasUnknownFields(descriptor_)) { + if (descriptor_->options().message_set_wire_format()) { + printer->Print( + "getUnknownFields().writeAsMessageSetTo(output);\n"); + } else { + printer->Print( + "getUnknownFields().writeTo(output);\n"); + } + } + + printer->Outdent(); + printer->Print( + "}\n" + "\n" + "private int memoizedSerializedSize = -1;\n" + "public int getSerializedSize() {\n" + " int size = memoizedSerializedSize;\n" + " if (size != -1) return size;\n" + "\n" + " size = 0;\n"); + printer->Indent(); + + for (int i = 0; i < descriptor_->field_count(); i++) { + field_generators_.get(sorted_fields[i]).GenerateSerializedSizeCode(printer); + } + + if (descriptor_->extension_range_count() > 0) { + if (descriptor_->options().message_set_wire_format()) { + printer->Print( + "size += extensionsSerializedSizeAsMessageSet();\n"); + } else { + printer->Print( + "size += extensionsSerializedSize();\n"); + } + } + + if (HasUnknownFields(descriptor_)) { + if (descriptor_->options().message_set_wire_format()) { + printer->Print( + "size += getUnknownFields().getSerializedSizeAsMessageSet();\n"); + } else { + printer->Print( + "size += getUnknownFields().getSerializedSize();\n"); + } + } + + printer->Outdent(); + printer->Print( + " memoizedSerializedSize = size;\n" + " return size;\n" + "}\n" + "\n"); + + printer->Print( + "private static final long serialVersionUID = 0L;\n" + "@java.lang.Override\n" + "protected java.lang.Object writeReplace()\n" + " throws java.io.ObjectStreamException {\n" + " return super.writeReplace();\n" + "}\n" + "\n"); +} + +void MessageGenerator:: +GenerateParseFromMethods(io::Printer* printer) { + // Note: These are separate from GenerateMessageSerializationMethods() + // because they need to be generated even for messages that are optimized + // for code size. + printer->Print( + "public static $classname$ parseFrom(\n" + " com.google.protobuf.ByteString data)\n" + " throws com.google.protobuf.InvalidProtocolBufferException {\n" + " return newBuilder().mergeFrom(data).buildParsed();\n" + "}\n" + "public static $classname$ parseFrom(\n" + " com.google.protobuf.ByteString data,\n" + " com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n" + " throws com.google.protobuf.InvalidProtocolBufferException {\n" + " return newBuilder().mergeFrom(data, extensionRegistry)\n" + " .buildParsed();\n" + "}\n" + "public static $classname$ parseFrom(byte[] data)\n" + " throws com.google.protobuf.InvalidProtocolBufferException {\n" + " return newBuilder().mergeFrom(data).buildParsed();\n" + "}\n" + "public static $classname$ parseFrom(\n" + " byte[] data,\n" + " com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n" + " throws com.google.protobuf.InvalidProtocolBufferException {\n" + " return newBuilder().mergeFrom(data, extensionRegistry)\n" + " .buildParsed();\n" + "}\n" + "public static $classname$ parseFrom(java.io.InputStream input)\n" + " throws java.io.IOException {\n" + " return newBuilder().mergeFrom(input).buildParsed();\n" + "}\n" + "public static $classname$ parseFrom(\n" + " java.io.InputStream input,\n" + " com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n" + " throws java.io.IOException {\n" + " return newBuilder().mergeFrom(input, extensionRegistry)\n" + " .buildParsed();\n" + "}\n" + "public static $classname$ parseDelimitedFrom(java.io.InputStream input)\n" + " throws java.io.IOException {\n" + " Builder builder = newBuilder();\n" + " if (builder.mergeDelimitedFrom(input)) {\n" + " return builder.buildParsed();\n" + " } else {\n" + " return null;\n" + " }\n" + "}\n" + "public static $classname$ parseDelimitedFrom(\n" + " java.io.InputStream input,\n" + " com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n" + " throws java.io.IOException {\n" + " Builder builder = newBuilder();\n" + " if (builder.mergeDelimitedFrom(input, extensionRegistry)) {\n" + " return builder.buildParsed();\n" + " } else {\n" + " return null;\n" + " }\n" + "}\n" + "public static $classname$ parseFrom(\n" + " com.google.protobuf.CodedInputStream input)\n" + " throws java.io.IOException {\n" + " return newBuilder().mergeFrom(input).buildParsed();\n" + "}\n" + "public static $classname$ parseFrom(\n" + " com.google.protobuf.CodedInputStream input,\n" + " com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n" + " throws java.io.IOException {\n" + " return newBuilder().mergeFrom(input, extensionRegistry)\n" + " .buildParsed();\n" + "}\n" + "\n", + "classname", ClassName(descriptor_)); +} + +void MessageGenerator::GenerateSerializeOneField( + io::Printer* printer, const FieldDescriptor* field) { + field_generators_.get(field).GenerateSerializationCode(printer); +} + +void MessageGenerator::GenerateSerializeOneExtensionRange( + io::Printer* printer, const Descriptor::ExtensionRange* range) { + printer->Print( + "extensionWriter.writeUntil($end$, output);\n", + "end", SimpleItoa(range->end)); +} + +// =================================================================== + +void MessageGenerator::GenerateBuilder(io::Printer* printer) { + printer->Print( + "public static Builder newBuilder() { return Builder.create(); }\n" + "public Builder newBuilderForType() { return newBuilder(); }\n" + "public static Builder newBuilder($classname$ prototype) {\n" + " return newBuilder().mergeFrom(prototype);\n" + "}\n" + "public Builder toBuilder() { return newBuilder(this); }\n" + "\n", + "classname", ClassName(descriptor_)); + + if (HasNestedBuilders(descriptor_)) { + printer->Print( + "@java.lang.Override\n" + "protected Builder newBuilderForType(\n" + " com.google.protobuf.GeneratedMessage.BuilderParent parent) {\n" + " Builder builder = new Builder(parent);\n" + " return builder;\n" + "}\n"); + } + + if (descriptor_->extension_range_count() > 0) { + if (HasDescriptorMethods(descriptor_)) { + printer->Print( + "public static final class Builder extends\n" + " com.google.protobuf.GeneratedMessage.ExtendableBuilder<\n" + " $classname$, Builder> implements $classname$OrBuilder {\n", + "classname", ClassName(descriptor_)); + } else { + printer->Print( + "public static final class Builder extends\n" + " com.google.protobuf.GeneratedMessageLite.ExtendableBuilder<\n" + " $classname$, Builder> implements $classname$OrBuilder {\n", + "classname", ClassName(descriptor_)); + } + } else { + if (HasDescriptorMethods(descriptor_)) { + printer->Print( + "public static final class Builder extends\n" + " com.google.protobuf.GeneratedMessage.Builder\n" + " implements $classname$OrBuilder {\n", + "classname", ClassName(descriptor_)); + } else { + printer->Print( + "public static final class Builder extends\n" + " com.google.protobuf.GeneratedMessageLite.Builder<\n" + " $classname$, Builder>\n" + " implements $classname$OrBuilder {\n", + "classname", ClassName(descriptor_)); + } + } + printer->Indent(); + + GenerateDescriptorMethods(printer); + GenerateCommonBuilderMethods(printer); + + if (HasGeneratedMethods(descriptor_)) { + GenerateIsInitialized(printer, DONT_MEMOIZE); + GenerateBuilderParsingMethods(printer); + } + + // Integers for bit fields. + int totalBits = 0; + for (int i = 0; i < descriptor_->field_count(); i++) { + totalBits += field_generators_.get(descriptor_->field(i)) + .GetNumBitsForBuilder(); + } + int totalInts = (totalBits + 31) / 32; + for (int i = 0; i < totalInts; i++) { + printer->Print("private int $bit_field_name$;\n", + "bit_field_name", GetBitFieldName(i)); + } + + for (int i = 0; i < descriptor_->field_count(); i++) { + printer->Print("\n"); + PrintFieldComment(printer, descriptor_->field(i)); + field_generators_.get(descriptor_->field(i)) + .GenerateBuilderMembers(printer); + } + + printer->Print( + "\n" + "// @@protoc_insertion_point(builder_scope:$full_name$)\n", + "full_name", descriptor_->full_name()); + + printer->Outdent(); + printer->Print("}\n"); +} + +void MessageGenerator::GenerateDescriptorMethods(io::Printer* printer) { + if (HasDescriptorMethods(descriptor_)) { + printer->Print( + "public static final com.google.protobuf.Descriptors.Descriptor\n" + " getDescriptor() {\n" + " return $fileclass$.internal_$identifier$_descriptor;\n" + "}\n" + "\n" + "protected com.google.protobuf.GeneratedMessage.FieldAccessorTable\n" + " internalGetFieldAccessorTable() {\n" + " return $fileclass$.internal_$identifier$_fieldAccessorTable;\n" + "}\n" + "\n", + "fileclass", ClassName(descriptor_->file()), + "identifier", UniqueFileScopeIdentifier(descriptor_)); + } +} + +// =================================================================== + +void MessageGenerator::GenerateCommonBuilderMethods(io::Printer* printer) { + printer->Print( + "// Construct using $classname$.newBuilder()\n" + "private Builder() {\n" + " maybeForceBuilderInitialization();\n" + "}\n" + "\n", + "classname", ClassName(descriptor_)); + + if (HasDescriptorMethods(descriptor_)) { + printer->Print( + "private Builder(BuilderParent parent) {\n" + " super(parent);\n" + " maybeForceBuilderInitialization();\n" + "}\n", + "classname", ClassName(descriptor_)); + } + + + if (HasNestedBuilders(descriptor_)) { + printer->Print( + "private void maybeForceBuilderInitialization() {\n" + " if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {\n"); + + printer->Indent(); + printer->Indent(); + for (int i = 0; i < descriptor_->field_count(); i++) { + field_generators_.get(descriptor_->field(i)) + .GenerateFieldBuilderInitializationCode(printer); + } + printer->Outdent(); + printer->Outdent(); + + printer->Print( + " }\n" + "}\n"); + } else { + printer->Print( + "private void maybeForceBuilderInitialization() {\n" + "}\n"); + } + + printer->Print( + "private static Builder create() {\n" + " return new Builder();\n" + "}\n" + "\n" + "public Builder clear() {\n" + " super.clear();\n", + "classname", ClassName(descriptor_)); + + printer->Indent(); + + for (int i = 0; i < descriptor_->field_count(); i++) { + field_generators_.get(descriptor_->field(i)) + .GenerateBuilderClearCode(printer); + } + + printer->Outdent(); + + printer->Print( + " return this;\n" + "}\n" + "\n" + "public Builder clone() {\n" + " return create().mergeFrom(buildPartial());\n" + "}\n" + "\n", + "classname", ClassName(descriptor_)); + if (HasDescriptorMethods(descriptor_)) { + printer->Print( + "public com.google.protobuf.Descriptors.Descriptor\n" + " getDescriptorForType() {\n" + " return $classname$.getDescriptor();\n" + "}\n" + "\n", + "classname", ClassName(descriptor_)); + } + printer->Print( + "public $classname$ getDefaultInstanceForType() {\n" + " return $classname$.getDefaultInstance();\n" + "}\n" + "\n", + "classname", ClassName(descriptor_)); + + // ----------------------------------------------------------------- + + printer->Print( + "public $classname$ build() {\n" + " $classname$ result = buildPartial();\n" + " if (!result.isInitialized()) {\n" + " throw newUninitializedMessageException(result);\n" + " }\n" + " return result;\n" + "}\n" + "\n" + "private $classname$ buildParsed()\n" + " throws com.google.protobuf.InvalidProtocolBufferException {\n" + " $classname$ result = buildPartial();\n" + " if (!result.isInitialized()) {\n" + " throw newUninitializedMessageException(\n" + " result).asInvalidProtocolBufferException();\n" + " }\n" + " return result;\n" + "}\n" + "\n" + "public $classname$ buildPartial() {\n" + " $classname$ result = new $classname$(this);\n", + "classname", ClassName(descriptor_)); + + printer->Indent(); + + // Local vars for from and to bit fields to avoid accessing the builder and + // message over and over for these fields. Seems to provide a slight + // perforamance improvement in micro benchmark and this is also what proto1 + // code does. + int totalBuilderBits = 0; + int totalMessageBits = 0; + for (int i = 0; i < descriptor_->field_count(); i++) { + const FieldGenerator& field = field_generators_.get(descriptor_->field(i)); + totalBuilderBits += field.GetNumBitsForBuilder(); + totalMessageBits += field.GetNumBitsForMessage(); + } + int totalBuilderInts = (totalBuilderBits + 31) / 32; + int totalMessageInts = (totalMessageBits + 31) / 32; + for (int i = 0; i < totalBuilderInts; i++) { + printer->Print("int from_$bit_field_name$ = $bit_field_name$;\n", + "bit_field_name", GetBitFieldName(i)); + } + for (int i = 0; i < totalMessageInts; i++) { + printer->Print("int to_$bit_field_name$ = 0;\n", + "bit_field_name", GetBitFieldName(i)); + } + + // Output generation code for each field. + for (int i = 0; i < descriptor_->field_count(); i++) { + field_generators_.get(descriptor_->field(i)).GenerateBuildingCode(printer); + } + + // Copy the bit field results to the generated message + for (int i = 0; i < totalMessageInts; i++) { + printer->Print("result.$bit_field_name$ = to_$bit_field_name$;\n", + "bit_field_name", GetBitFieldName(i)); + } + + printer->Outdent(); + + if (HasDescriptorMethods(descriptor_)) { + printer->Print( + " onBuilt();\n"); + } + + printer->Print( + " return result;\n" + "}\n" + "\n", + "classname", ClassName(descriptor_)); + + // ----------------------------------------------------------------- + + if (HasGeneratedMethods(descriptor_)) { + // MergeFrom(Message other) requires the ability to distinguish the other + // messages type by its descriptor. + if (HasDescriptorMethods(descriptor_)) { + printer->Print( + "public Builder mergeFrom(com.google.protobuf.Message other) {\n" + " if (other instanceof $classname$) {\n" + " return mergeFrom(($classname$)other);\n" + " } else {\n" + " super.mergeFrom(other);\n" + " return this;\n" + " }\n" + "}\n" + "\n", + "classname", ClassName(descriptor_)); + } + + printer->Print( + "public Builder mergeFrom($classname$ other) {\n" + // Optimization: If other is the default instance, we know none of its + // fields are set so we can skip the merge. + " if (other == $classname$.getDefaultInstance()) return this;\n", + "classname", ClassName(descriptor_)); + printer->Indent(); + + for (int i = 0; i < descriptor_->field_count(); i++) { + field_generators_.get(descriptor_->field(i)).GenerateMergingCode(printer); + } + + printer->Outdent(); + + // if message type has extensions + if (descriptor_->extension_range_count() > 0) { + printer->Print( + " this.mergeExtensionFields(other);\n"); + } + + if (HasUnknownFields(descriptor_)) { + printer->Print( + " this.mergeUnknownFields(other.getUnknownFields());\n"); + } + + printer->Print( + " return this;\n" + "}\n" + "\n"); + } +} + +// =================================================================== + +void MessageGenerator::GenerateBuilderParsingMethods(io::Printer* printer) { + scoped_array sorted_fields( + SortFieldsByNumber(descriptor_)); + + printer->Print( + "public Builder mergeFrom(\n" + " com.google.protobuf.CodedInputStream input,\n" + " com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n" + " throws java.io.IOException {\n"); + printer->Indent(); + + if (HasUnknownFields(descriptor_)) { + printer->Print( + "com.google.protobuf.UnknownFieldSet.Builder unknownFields =\n" + " com.google.protobuf.UnknownFieldSet.newBuilder(\n" + " this.getUnknownFields());\n"); + } + + printer->Print( + "while (true) {\n"); + printer->Indent(); + + printer->Print( + "int tag = input.readTag();\n" + "switch (tag) {\n"); + printer->Indent(); + + if (HasUnknownFields(descriptor_)) { + printer->Print( + "case 0:\n" // zero signals EOF / limit reached + " this.setUnknownFields(unknownFields.build());\n" + " $on_changed$\n" + " return this;\n" + "default: {\n" + " if (!parseUnknownField(input, unknownFields,\n" + " extensionRegistry, tag)) {\n" + " this.setUnknownFields(unknownFields.build());\n" + " $on_changed$\n" + " return this;\n" // it's an endgroup tag + " }\n" + " break;\n" + "}\n", + "on_changed", HasDescriptorMethods(descriptor_) ? "onChanged();" : ""); + } else { + printer->Print( + "case 0:\n" // zero signals EOF / limit reached + " $on_changed$\n" + " return this;\n" + "default: {\n" + " if (!parseUnknownField(input, extensionRegistry, tag)) {\n" + " $on_changed$\n" + " return this;\n" // it's an endgroup tag + " }\n" + " break;\n" + "}\n", + "on_changed", HasDescriptorMethods(descriptor_) ? "onChanged();" : ""); + } + + for (int i = 0; i < descriptor_->field_count(); i++) { + const FieldDescriptor* field = sorted_fields[i]; + uint32 tag = WireFormatLite::MakeTag(field->number(), + WireFormat::WireTypeForFieldType(field->type())); + + printer->Print( + "case $tag$: {\n", + "tag", SimpleItoa(tag)); + printer->Indent(); + + field_generators_.get(field).GenerateParsingCode(printer); + + printer->Outdent(); + printer->Print( + " break;\n" + "}\n"); + + if (field->is_packable()) { + // To make packed = true wire compatible, we generate parsing code from a + // packed version of this field regardless of field->options().packed(). + uint32 packed_tag = WireFormatLite::MakeTag(field->number(), + WireFormatLite::WIRETYPE_LENGTH_DELIMITED); + printer->Print( + "case $tag$: {\n", + "tag", SimpleItoa(packed_tag)); + printer->Indent(); + + field_generators_.get(field).GenerateParsingCodeFromPacked(printer); + + printer->Outdent(); + printer->Print( + " break;\n" + "}\n"); + } + } + + printer->Outdent(); + printer->Outdent(); + printer->Outdent(); + printer->Print( + " }\n" // switch (tag) + " }\n" // while (true) + "}\n" + + "\n"); +} + +// =================================================================== + +void MessageGenerator::GenerateIsInitialized( + io::Printer* printer, UseMemoization useMemoization) { + bool memoization = useMemoization == MEMOIZE; + if (memoization) { + // Memoizes whether the protocol buffer is fully initialized (has all + // required fields). -1 means not yet computed. 0 means false and 1 means + // true. + printer->Print( + "private byte memoizedIsInitialized = -1;\n"); + } + printer->Print( + "public final boolean isInitialized() {\n"); + printer->Indent(); + + if (memoization) { + printer->Print( + "byte isInitialized = memoizedIsInitialized;\n" + "if (isInitialized != -1) return isInitialized == 1;\n" + "\n"); + } + + // Check that all required fields in this message are set. + // TODO(kenton): We can optimize this when we switch to putting all the + // "has" fields into a single bitfield. + for (int i = 0; i < descriptor_->field_count(); i++) { + const FieldDescriptor* field = descriptor_->field(i); + + if (field->is_required()) { + printer->Print( + "if (!has$name$()) {\n" + " $memoize$\n" + " return false;\n" + "}\n", + "name", UnderscoresToCapitalizedCamelCase(field), + "memoize", memoization ? "memoizedIsInitialized = 0;" : ""); + } + } + + // Now check that all embedded messages are initialized. + for (int i = 0; i < descriptor_->field_count(); i++) { + const FieldDescriptor* field = descriptor_->field(i); + if (GetJavaType(field) == JAVATYPE_MESSAGE && + HasRequiredFields(field->message_type())) { + switch (field->label()) { + case FieldDescriptor::LABEL_REQUIRED: + printer->Print( + "if (!get$name$().isInitialized()) {\n" + " $memoize$\n" + " return false;\n" + "}\n", + "type", ClassName(field->message_type()), + "name", UnderscoresToCapitalizedCamelCase(field), + "memoize", memoization ? "memoizedIsInitialized = 0;" : ""); + break; + case FieldDescriptor::LABEL_OPTIONAL: + printer->Print( + "if (has$name$()) {\n" + " if (!get$name$().isInitialized()) {\n" + " $memoize$\n" + " return false;\n" + " }\n" + "}\n", + "type", ClassName(field->message_type()), + "name", UnderscoresToCapitalizedCamelCase(field), + "memoize", memoization ? "memoizedIsInitialized = 0;" : ""); + break; + case FieldDescriptor::LABEL_REPEATED: + printer->Print( + "for (int i = 0; i < get$name$Count(); i++) {\n" + " if (!get$name$(i).isInitialized()) {\n" + " $memoize$\n" + " return false;\n" + " }\n" + "}\n", + "type", ClassName(field->message_type()), + "name", UnderscoresToCapitalizedCamelCase(field), + "memoize", memoization ? "memoizedIsInitialized = 0;" : ""); + break; + } + } + } + + if (descriptor_->extension_range_count() > 0) { + printer->Print( + "if (!extensionsAreInitialized()) {\n" + " $memoize$\n" + " return false;\n" + "}\n", + "memoize", memoization ? "memoizedIsInitialized = 0;" : ""); + } + + printer->Outdent(); + + if (memoization) { + printer->Print( + " memoizedIsInitialized = 1;\n"); + } + + printer->Print( + " return true;\n" + "}\n" + "\n"); +} + +// =================================================================== + +void MessageGenerator::GenerateEqualsAndHashCode(io::Printer* printer) { + printer->Print( + "@java.lang.Override\n" + "public boolean equals(final java.lang.Object obj) {\n"); + printer->Indent(); + printer->Print( + "if (obj == this) {\n" + " return true;\n" + "}\n" + "if (!(obj instanceof $classname$)) {\n" + " return super.equals(obj);\n" + "}\n" + "$classname$ other = ($classname$) obj;\n" + "\n", + "classname", ClassName(descriptor_)); + + printer->Print("boolean result = true;\n"); + for (int i = 0; i < descriptor_->field_count(); i++) { + const FieldDescriptor* field = descriptor_->field(i); + if (!field->is_repeated()) { + printer->Print( + "result = result && (has$name$() == other.has$name$());\n" + "if (has$name$()) {\n", + "name", UnderscoresToCapitalizedCamelCase(field)); + printer->Indent(); + } + field_generators_.get(field).GenerateEqualsCode(printer); + if (!field->is_repeated()) { + printer->Outdent(); + printer->Print( + "}\n"); + } + } + if (HasDescriptorMethods(descriptor_)) { + printer->Print( + "result = result &&\n" + " getUnknownFields().equals(other.getUnknownFields());\n"); + if (descriptor_->extension_range_count() > 0) { + printer->Print( + "result = result &&\n" + " getExtensionFields().equals(other.getExtensionFields());\n"); + } + } + printer->Print( + "return result;\n"); + printer->Outdent(); + printer->Print( + "}\n" + "\n"); + + printer->Print( + "@java.lang.Override\n" + "public int hashCode() {\n"); + printer->Indent(); + printer->Print( + "int hash = 41;\n" + "hash = (19 * hash) + getDescriptorForType().hashCode();\n"); + for (int i = 0; i < descriptor_->field_count(); i++) { + const FieldDescriptor* field = descriptor_->field(i); + if (!field->is_repeated()) { + printer->Print( + "if (has$name$()) {\n", + "name", UnderscoresToCapitalizedCamelCase(field)); + printer->Indent(); + } + field_generators_.get(field).GenerateHashCode(printer); + if (!field->is_repeated()) { + printer->Outdent(); + printer->Print("}\n"); + } + } + if (HasDescriptorMethods(descriptor_)) { + if (descriptor_->extension_range_count() > 0) { + printer->Print( + "hash = hashFields(hash, getExtensionFields());\n"); + } + } + printer->Print( + "hash = (29 * hash) + getUnknownFields().hashCode();\n" + "return hash;\n"); + printer->Outdent(); + printer->Print( + "}\n" + "\n"); +} + +// =================================================================== + +void MessageGenerator::GenerateExtensionRegistrationCode(io::Printer* printer) { + for (int i = 0; i < descriptor_->extension_count(); i++) { + ExtensionGenerator(descriptor_->extension(i)) + .GenerateRegistrationCode(printer); + } + + for (int i = 0; i < descriptor_->nested_type_count(); i++) { + MessageGenerator(descriptor_->nested_type(i)) + .GenerateExtensionRegistrationCode(printer); + } +} + +} // namespace java +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_message.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_message.h new file mode 100644 index 0000000000..4c6fbbe512 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_message.h @@ -0,0 +1,109 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#ifndef GOOGLE_PROTOBUF_COMPILER_JAVA_MESSAGE_H__ +#define GOOGLE_PROTOBUF_COMPILER_JAVA_MESSAGE_H__ + +#include +#include +#include + +namespace google { +namespace protobuf { + namespace io { + class Printer; // printer.h + } +} + +namespace protobuf { +namespace compiler { +namespace java { + +class MessageGenerator { + public: + explicit MessageGenerator(const Descriptor* descriptor); + ~MessageGenerator(); + + // All static variables have to be declared at the top-level of the file + // so that we can control initialization order, which is important for + // DescriptorProto bootstrapping to work. + void GenerateStaticVariables(io::Printer* printer); + + // Output code which initializes the static variables generated by + // GenerateStaticVariables(). + void GenerateStaticVariableInitializers(io::Printer* printer); + + // Generate the class itself. + void Generate(io::Printer* printer); + + // Generates the base interface that both the class and its builder implement + void GenerateInterface(io::Printer* printer); + + // Generate code to register all contained extensions with an + // ExtensionRegistry. + void GenerateExtensionRegistrationCode(io::Printer* printer); + + private: + enum UseMemoization { + MEMOIZE, + DONT_MEMOIZE + }; + + void GenerateMessageSerializationMethods(io::Printer* printer); + void GenerateParseFromMethods(io::Printer* printer); + void GenerateSerializeOneField(io::Printer* printer, + const FieldDescriptor* field); + void GenerateSerializeOneExtensionRange( + io::Printer* printer, const Descriptor::ExtensionRange* range); + + void GenerateBuilder(io::Printer* printer); + void GenerateCommonBuilderMethods(io::Printer* printer); + void GenerateDescriptorMethods(io::Printer* printer); + void GenerateBuilderParsingMethods(io::Printer* printer); + void GenerateIsInitialized(io::Printer* printer, + UseMemoization useMemoization); + void GenerateEqualsAndHashCode(io::Printer* printer); + + const Descriptor* descriptor_; + FieldGeneratorMap field_generators_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(MessageGenerator); +}; + +} // namespace java +} // namespace compiler +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_COMPILER_JAVA_MESSAGE_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_message_field.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_message_field.cc new file mode 100644 index 0000000000..251945afd5 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_message_field.cc @@ -0,0 +1,884 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include + +#include +#include +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace java { + +namespace { + +// TODO(kenton): Factor out a "SetCommonFieldVariables()" to get rid of +// repeat code between this and the other field types. +void SetMessageVariables(const FieldDescriptor* descriptor, + int messageBitIndex, + int builderBitIndex, + map* variables) { + (*variables)["name"] = + UnderscoresToCamelCase(descriptor); + (*variables)["capitalized_name"] = + UnderscoresToCapitalizedCamelCase(descriptor); + (*variables)["constant_name"] = FieldConstantName(descriptor); + (*variables)["number"] = SimpleItoa(descriptor->number()); + (*variables)["type"] = ClassName(descriptor->message_type()); + (*variables)["group_or_message"] = + (GetType(descriptor) == FieldDescriptor::TYPE_GROUP) ? + "Group" : "Message"; + // TODO(birdo): Add @deprecated javadoc when generating javadoc is supported + // by the proto compiler + (*variables)["deprecation"] = descriptor->options().deprecated() + ? "@java.lang.Deprecated " : ""; + (*variables)["on_changed"] = + HasDescriptorMethods(descriptor->containing_type()) ? "onChanged();" : ""; + + // For singular messages and builders, one bit is used for the hasField bit. + (*variables)["get_has_field_bit_message"] = GenerateGetBit(messageBitIndex); + + (*variables)["get_has_field_bit_builder"] = GenerateGetBit(builderBitIndex); + (*variables)["set_has_field_bit_builder"] = GenerateSetBit(builderBitIndex); + (*variables)["clear_has_field_bit_builder"] = + GenerateClearBit(builderBitIndex); + + // For repated builders, one bit is used for whether the array is immutable. + (*variables)["get_mutable_bit_builder"] = GenerateGetBit(builderBitIndex); + (*variables)["set_mutable_bit_builder"] = GenerateSetBit(builderBitIndex); + (*variables)["clear_mutable_bit_builder"] = GenerateClearBit(builderBitIndex); + + (*variables)["get_has_field_bit_from_local"] = + GenerateGetBitFromLocal(builderBitIndex); + (*variables)["set_has_field_bit_to_local"] = + GenerateSetBitToLocal(messageBitIndex); +} + +} // namespace + +// =================================================================== + +MessageFieldGenerator:: +MessageFieldGenerator(const FieldDescriptor* descriptor, + int messageBitIndex, + int builderBitIndex) + : descriptor_(descriptor), messageBitIndex_(messageBitIndex), + builderBitIndex_(builderBitIndex) { + SetMessageVariables(descriptor, messageBitIndex, builderBitIndex, + &variables_); +} + +MessageFieldGenerator::~MessageFieldGenerator() {} + +int MessageFieldGenerator::GetNumBitsForMessage() const { + return 1; +} + +int MessageFieldGenerator::GetNumBitsForBuilder() const { + return 1; +} + +void MessageFieldGenerator:: +GenerateInterfaceMembers(io::Printer* printer) const { + // TODO(jonp): In the future, consider having a method specific to the + // interface so that builders can choose dynamically to either return a + // message or a nested builder, so that asking for the interface doesn't + // cause a message to ever be built. + printer->Print(variables_, + "$deprecation$boolean has$capitalized_name$();\n" + "$deprecation$$type$ get$capitalized_name$();\n"); + + if (HasNestedBuilders(descriptor_->containing_type())) { + printer->Print(variables_, + "$deprecation$$type$OrBuilder get$capitalized_name$OrBuilder();\n"); + } +} + +void MessageFieldGenerator:: +GenerateMembers(io::Printer* printer) const { + printer->Print(variables_, + "private $type$ $name$_;\n" + "$deprecation$public boolean has$capitalized_name$() {\n" + " return $get_has_field_bit_message$;\n" + "}\n" + "$deprecation$public $type$ get$capitalized_name$() {\n" + " return $name$_;\n" + "}\n"); + + if (HasNestedBuilders(descriptor_->containing_type())) { + printer->Print(variables_, + "$deprecation$public $type$OrBuilder get$capitalized_name$OrBuilder() {\n" + " return $name$_;\n" + "}\n"); + } +} + +void MessageFieldGenerator::PrintNestedBuilderCondition( + io::Printer* printer, + const char* regular_case, + const char* nested_builder_case) const { + if (HasNestedBuilders(descriptor_->containing_type())) { + printer->Print(variables_, "if ($name$Builder_ == null) {\n"); + printer->Indent(); + printer->Print(variables_, regular_case); + printer->Outdent(); + printer->Print("} else {\n"); + printer->Indent(); + printer->Print(variables_, nested_builder_case); + printer->Outdent(); + printer->Print("}\n"); + } else { + printer->Print(variables_, regular_case); + } +} + +void MessageFieldGenerator::PrintNestedBuilderFunction( + io::Printer* printer, + const char* method_prototype, + const char* regular_case, + const char* nested_builder_case, + const char* trailing_code) const { + printer->Print(variables_, method_prototype); + printer->Print(" {\n"); + printer->Indent(); + PrintNestedBuilderCondition(printer, regular_case, nested_builder_case); + if (trailing_code != NULL) { + printer->Print(variables_, trailing_code); + } + printer->Outdent(); + printer->Print("}\n"); +} + +void MessageFieldGenerator:: +GenerateBuilderMembers(io::Printer* printer) const { + // When using nested-builders, the code initially works just like the + // non-nested builder case. It only creates a nested builder lazily on + // demand and then forever delegates to it after creation. + + printer->Print(variables_, + // Used when the builder is null. + "private $type$ $name$_ = $type$.getDefaultInstance();\n"); + + if (HasNestedBuilders(descriptor_->containing_type())) { + printer->Print(variables_, + // If this builder is non-null, it is used and the other fields are + // ignored. + "private com.google.protobuf.SingleFieldBuilder<\n" + " $type$, $type$.Builder, $type$OrBuilder> $name$Builder_;" + "\n"); + } + + // The comments above the methods below are based on a hypothetical + // field of type "Field" called "Field". + + // boolean hasField() + printer->Print(variables_, + "$deprecation$public boolean has$capitalized_name$() {\n" + " return $get_has_field_bit_builder$;\n" + "}\n"); + + // Field getField() + PrintNestedBuilderFunction(printer, + "$deprecation$public $type$ get$capitalized_name$()", + + "return $name$_;\n", + + "return $name$Builder_.getMessage();\n", + + NULL); + + // Field.Builder setField(Field value) + PrintNestedBuilderFunction(printer, + "$deprecation$public Builder set$capitalized_name$($type$ value)", + + "if (value == null) {\n" + " throw new NullPointerException();\n" + "}\n" + "$name$_ = value;\n" + "$on_changed$\n", + + "$name$Builder_.setMessage(value);\n", + + "$set_has_field_bit_builder$;\n" + "return this;\n"); + + // Field.Builder setField(Field.Builder builderForValue) + PrintNestedBuilderFunction(printer, + "$deprecation$public Builder set$capitalized_name$(\n" + " $type$.Builder builderForValue)", + + "$name$_ = builderForValue.build();\n" + "$on_changed$\n", + + "$name$Builder_.setMessage(builderForValue.build());\n", + + "$set_has_field_bit_builder$;\n" + "return this;\n"); + + // Field.Builder mergeField(Field value) + PrintNestedBuilderFunction(printer, + "$deprecation$public Builder merge$capitalized_name$($type$ value)", + + "if ($get_has_field_bit_builder$ &&\n" + " $name$_ != $type$.getDefaultInstance()) {\n" + " $name$_ =\n" + " $type$.newBuilder($name$_).mergeFrom(value).buildPartial();\n" + "} else {\n" + " $name$_ = value;\n" + "}\n" + "$on_changed$\n", + + "$name$Builder_.mergeFrom(value);\n", + + "$set_has_field_bit_builder$;\n" + "return this;\n"); + + // Field.Builder clearField() + PrintNestedBuilderFunction(printer, + "$deprecation$public Builder clear$capitalized_name$()", + + "$name$_ = $type$.getDefaultInstance();\n" + "$on_changed$\n", + + "$name$Builder_.clear();\n", + + "$clear_has_field_bit_builder$;\n" + "return this;\n"); + + if (HasNestedBuilders(descriptor_->containing_type())) { + printer->Print(variables_, + "$deprecation$public $type$.Builder get$capitalized_name$Builder() {\n" + " $set_has_field_bit_builder$;\n" + " $on_changed$\n" + " return get$capitalized_name$FieldBuilder().getBuilder();\n" + "}\n" + "$deprecation$public $type$OrBuilder get$capitalized_name$OrBuilder() {\n" + " if ($name$Builder_ != null) {\n" + " return $name$Builder_.getMessageOrBuilder();\n" + " } else {\n" + " return $name$_;\n" + " }\n" + "}\n" + "private com.google.protobuf.SingleFieldBuilder<\n" + " $type$, $type$.Builder, $type$OrBuilder> \n" + " get$capitalized_name$FieldBuilder() {\n" + " if ($name$Builder_ == null) {\n" + " $name$Builder_ = new com.google.protobuf.SingleFieldBuilder<\n" + " $type$, $type$.Builder, $type$OrBuilder>(\n" + " $name$_,\n" + " getParentForChildren(),\n" + " isClean());\n" + " $name$_ = null;\n" + " }\n" + " return $name$Builder_;\n" + "}\n"); + } +} + +void MessageFieldGenerator:: +GenerateFieldBuilderInitializationCode(io::Printer* printer) const { + printer->Print(variables_, + "get$capitalized_name$FieldBuilder();\n"); +} + + +void MessageFieldGenerator:: +GenerateInitializationCode(io::Printer* printer) const { + printer->Print(variables_, "$name$_ = $type$.getDefaultInstance();\n"); +} + +void MessageFieldGenerator:: +GenerateBuilderClearCode(io::Printer* printer) const { + PrintNestedBuilderCondition(printer, + "$name$_ = $type$.getDefaultInstance();\n", + + "$name$Builder_.clear();\n"); + printer->Print(variables_, "$clear_has_field_bit_builder$;\n"); +} + +void MessageFieldGenerator:: +GenerateMergingCode(io::Printer* printer) const { + printer->Print(variables_, + "if (other.has$capitalized_name$()) {\n" + " merge$capitalized_name$(other.get$capitalized_name$());\n" + "}\n"); +} + +void MessageFieldGenerator:: +GenerateBuildingCode(io::Printer* printer) const { + + printer->Print(variables_, + "if ($get_has_field_bit_from_local$) {\n" + " $set_has_field_bit_to_local$;\n" + "}\n"); + + PrintNestedBuilderCondition(printer, + "result.$name$_ = $name$_;\n", + + "result.$name$_ = $name$Builder_.build();\n"); +} + +void MessageFieldGenerator:: +GenerateParsingCode(io::Printer* printer) const { + printer->Print(variables_, + "$type$.Builder subBuilder = $type$.newBuilder();\n" + "if (has$capitalized_name$()) {\n" + " subBuilder.mergeFrom(get$capitalized_name$());\n" + "}\n"); + + if (GetType(descriptor_) == FieldDescriptor::TYPE_GROUP) { + printer->Print(variables_, + "input.readGroup($number$, subBuilder, extensionRegistry);\n"); + } else { + printer->Print(variables_, + "input.readMessage(subBuilder, extensionRegistry);\n"); + } + + printer->Print(variables_, + "set$capitalized_name$(subBuilder.buildPartial());\n"); +} + +void MessageFieldGenerator:: +GenerateSerializationCode(io::Printer* printer) const { + printer->Print(variables_, + "if ($get_has_field_bit_message$) {\n" + " output.write$group_or_message$($number$, $name$_);\n" + "}\n"); +} + +void MessageFieldGenerator:: +GenerateSerializedSizeCode(io::Printer* printer) const { + printer->Print(variables_, + "if ($get_has_field_bit_message$) {\n" + " size += com.google.protobuf.CodedOutputStream\n" + " .compute$group_or_message$Size($number$, $name$_);\n" + "}\n"); +} + +void MessageFieldGenerator:: +GenerateEqualsCode(io::Printer* printer) const { + printer->Print(variables_, + "result = result && get$capitalized_name$()\n" + " .equals(other.get$capitalized_name$());\n"); +} + +void MessageFieldGenerator:: +GenerateHashCode(io::Printer* printer) const { + printer->Print(variables_, + "hash = (37 * hash) + $constant_name$;\n" + "hash = (53 * hash) + get$capitalized_name$().hashCode();\n"); +} + +string MessageFieldGenerator::GetBoxedType() const { + return ClassName(descriptor_->message_type()); +} + +// =================================================================== + +RepeatedMessageFieldGenerator:: +RepeatedMessageFieldGenerator(const FieldDescriptor* descriptor, + int messageBitIndex, + int builderBitIndex) + : descriptor_(descriptor), messageBitIndex_(messageBitIndex), + builderBitIndex_(builderBitIndex) { + SetMessageVariables(descriptor, messageBitIndex, builderBitIndex, + &variables_); +} + +RepeatedMessageFieldGenerator::~RepeatedMessageFieldGenerator() {} + +int RepeatedMessageFieldGenerator::GetNumBitsForMessage() const { + return 0; +} + +int RepeatedMessageFieldGenerator::GetNumBitsForBuilder() const { + return 1; +} + +void RepeatedMessageFieldGenerator:: +GenerateInterfaceMembers(io::Printer* printer) const { + // TODO(jonp): In the future, consider having methods specific to the + // interface so that builders can choose dynamically to either return a + // message or a nested builder, so that asking for the interface doesn't + // cause a message to ever be built. + printer->Print(variables_, + "$deprecation$java.util.List<$type$> \n" + " get$capitalized_name$List();\n" + "$deprecation$$type$ get$capitalized_name$(int index);\n" + "$deprecation$int get$capitalized_name$Count();\n"); + if (HasNestedBuilders(descriptor_->containing_type())) { + printer->Print(variables_, + "$deprecation$java.util.List \n" + " get$capitalized_name$OrBuilderList();\n" + "$deprecation$$type$OrBuilder get$capitalized_name$OrBuilder(\n" + " int index);\n"); + } +} + +void RepeatedMessageFieldGenerator:: +GenerateMembers(io::Printer* printer) const { + printer->Print(variables_, + "private java.util.List<$type$> $name$_;\n" + "$deprecation$public java.util.List<$type$> get$capitalized_name$List() {\n" + " return $name$_;\n" // note: unmodifiable list + "}\n" + "$deprecation$public java.util.List \n" + " get$capitalized_name$OrBuilderList() {\n" + " return $name$_;\n" + "}\n" + "$deprecation$public int get$capitalized_name$Count() {\n" + " return $name$_.size();\n" + "}\n" + "$deprecation$public $type$ get$capitalized_name$(int index) {\n" + " return $name$_.get(index);\n" + "}\n" + "$deprecation$public $type$OrBuilder get$capitalized_name$OrBuilder(\n" + " int index) {\n" + " return $name$_.get(index);\n" + "}\n"); + +} + +void RepeatedMessageFieldGenerator::PrintNestedBuilderCondition( + io::Printer* printer, + const char* regular_case, + const char* nested_builder_case) const { + if (HasNestedBuilders(descriptor_->containing_type())) { + printer->Print(variables_, "if ($name$Builder_ == null) {\n"); + printer->Indent(); + printer->Print(variables_, regular_case); + printer->Outdent(); + printer->Print("} else {\n"); + printer->Indent(); + printer->Print(variables_, nested_builder_case); + printer->Outdent(); + printer->Print("}\n"); + } else { + printer->Print(variables_, regular_case); + } +} + +void RepeatedMessageFieldGenerator::PrintNestedBuilderFunction( + io::Printer* printer, + const char* method_prototype, + const char* regular_case, + const char* nested_builder_case, + const char* trailing_code) const { + printer->Print(variables_, method_prototype); + printer->Print(" {\n"); + printer->Indent(); + PrintNestedBuilderCondition(printer, regular_case, nested_builder_case); + if (trailing_code != NULL) { + printer->Print(variables_, trailing_code); + } + printer->Outdent(); + printer->Print("}\n"); +} + +void RepeatedMessageFieldGenerator:: +GenerateBuilderMembers(io::Printer* printer) const { + // When using nested-builders, the code initially works just like the + // non-nested builder case. It only creates a nested builder lazily on + // demand and then forever delegates to it after creation. + + printer->Print(variables_, + // Used when the builder is null. + // One field is the list and the other field keeps track of whether the + // list is immutable. If it's immutable, the invariant is that it must + // either an instance of Collections.emptyList() or it's an ArrayList + // wrapped in a Collections.unmodifiableList() wrapper and nobody else has + // a refererence to the underlying ArrayList. This invariant allows us to + // share instances of lists between protocol buffers avoiding expensive + // memory allocations. Note, immutable is a strong guarantee here -- not + // just that the list cannot be modified via the reference but that the + // list can never be modified. + "private java.util.List<$type$> $name$_ =\n" + " java.util.Collections.emptyList();\n" + + "private void ensure$capitalized_name$IsMutable() {\n" + " if (!$get_mutable_bit_builder$) {\n" + " $name$_ = new java.util.ArrayList<$type$>($name$_);\n" + " $set_mutable_bit_builder$;\n" + " }\n" + "}\n" + "\n"); + + if (HasNestedBuilders(descriptor_->containing_type())) { + printer->Print(variables_, + // If this builder is non-null, it is used and the other fields are + // ignored. + "private com.google.protobuf.RepeatedFieldBuilder<\n" + " $type$, $type$.Builder, $type$OrBuilder> $name$Builder_;\n" + "\n"); + } + + // The comments above the methods below are based on a hypothetical + // repeated field of type "Field" called "RepeatedField". + + // List getRepeatedFieldList() + PrintNestedBuilderFunction(printer, + "$deprecation$public java.util.List<$type$> get$capitalized_name$List()", + + "return java.util.Collections.unmodifiableList($name$_);\n", + "return $name$Builder_.getMessageList();\n", + + NULL); + + // int getRepeatedFieldCount() + PrintNestedBuilderFunction(printer, + "$deprecation$public int get$capitalized_name$Count()", + + "return $name$_.size();\n", + "return $name$Builder_.getCount();\n", + + NULL); + + // Field getRepeatedField(int index) + PrintNestedBuilderFunction(printer, + "$deprecation$public $type$ get$capitalized_name$(int index)", + + "return $name$_.get(index);\n", + + "return $name$Builder_.getMessage(index);\n", + + NULL); + + // Builder setRepeatedField(int index, Field value) + PrintNestedBuilderFunction(printer, + "$deprecation$public Builder set$capitalized_name$(\n" + " int index, $type$ value)", + "if (value == null) {\n" + " throw new NullPointerException();\n" + "}\n" + "ensure$capitalized_name$IsMutable();\n" + "$name$_.set(index, value);\n" + "$on_changed$\n", + "$name$Builder_.setMessage(index, value);\n", + "return this;\n"); + + // Builder setRepeatedField(int index, Field.Builder builderForValue) + PrintNestedBuilderFunction(printer, + "$deprecation$public Builder set$capitalized_name$(\n" + " int index, $type$.Builder builderForValue)", + + "ensure$capitalized_name$IsMutable();\n" + "$name$_.set(index, builderForValue.build());\n" + "$on_changed$\n", + + "$name$Builder_.setMessage(index, builderForValue.build());\n", + + "return this;\n"); + + // Builder addRepeatedField(Field value) + PrintNestedBuilderFunction(printer, + "$deprecation$public Builder add$capitalized_name$($type$ value)", + + "if (value == null) {\n" + " throw new NullPointerException();\n" + "}\n" + "ensure$capitalized_name$IsMutable();\n" + "$name$_.add(value);\n" + + "$on_changed$\n", + + "$name$Builder_.addMessage(value);\n", + + "return this;\n"); + + // Builder addRepeatedField(int index, Field value) + PrintNestedBuilderFunction(printer, + "$deprecation$public Builder add$capitalized_name$(\n" + " int index, $type$ value)", + + "if (value == null) {\n" + " throw new NullPointerException();\n" + "}\n" + "ensure$capitalized_name$IsMutable();\n" + "$name$_.add(index, value);\n" + "$on_changed$\n", + + "$name$Builder_.addMessage(index, value);\n", + + "return this;\n"); + + // Builder addRepeatedField(Field.Builder builderForValue) + PrintNestedBuilderFunction(printer, + "$deprecation$public Builder add$capitalized_name$(\n" + " $type$.Builder builderForValue)", + + "ensure$capitalized_name$IsMutable();\n" + "$name$_.add(builderForValue.build());\n" + "$on_changed$\n", + + "$name$Builder_.addMessage(builderForValue.build());\n", + + "return this;\n"); + + // Builder addRepeatedField(int index, Field.Builder builderForValue) + PrintNestedBuilderFunction(printer, + "$deprecation$public Builder add$capitalized_name$(\n" + " int index, $type$.Builder builderForValue)", + + "ensure$capitalized_name$IsMutable();\n" + "$name$_.add(index, builderForValue.build());\n" + "$on_changed$\n", + + "$name$Builder_.addMessage(index, builderForValue.build());\n", + + "return this;\n"); + + // Builder addAllRepeatedField(Iterable values) + PrintNestedBuilderFunction(printer, + "$deprecation$public Builder addAll$capitalized_name$(\n" + " java.lang.Iterable values)", + + "ensure$capitalized_name$IsMutable();\n" + "super.addAll(values, $name$_);\n" + "$on_changed$\n", + + "$name$Builder_.addAllMessages(values);\n", + + "return this;\n"); + + // Builder clearAllRepeatedField() + PrintNestedBuilderFunction(printer, + "$deprecation$public Builder clear$capitalized_name$()", + + "$name$_ = java.util.Collections.emptyList();\n" + "$clear_mutable_bit_builder$;\n" + "$on_changed$\n", + + "$name$Builder_.clear();\n", + + "return this;\n"); + + // Builder removeRepeatedField(int index) + PrintNestedBuilderFunction(printer, + "$deprecation$public Builder remove$capitalized_name$(int index)", + + "ensure$capitalized_name$IsMutable();\n" + "$name$_.remove(index);\n" + "$on_changed$\n", + + "$name$Builder_.remove(index);\n", + + "return this;\n"); + + if (HasNestedBuilders(descriptor_->containing_type())) { + printer->Print(variables_, + "$deprecation$public $type$.Builder get$capitalized_name$Builder(\n" + " int index) {\n" + " return get$capitalized_name$FieldBuilder().getBuilder(index);\n" + "}\n" + + "$deprecation$public $type$OrBuilder get$capitalized_name$OrBuilder(\n" + " int index) {\n" + " if ($name$Builder_ == null) {\n" + " return $name$_.get(index);" + " } else {\n" + " return $name$Builder_.getMessageOrBuilder(index);\n" + " }\n" + "}\n" + + "$deprecation$public java.util.List \n" + " get$capitalized_name$OrBuilderList() {\n" + " if ($name$Builder_ != null) {\n" + " return $name$Builder_.getMessageOrBuilderList();\n" + " } else {\n" + " return java.util.Collections.unmodifiableList($name$_);\n" + " }\n" + "}\n" + + "$deprecation$public $type$.Builder add$capitalized_name$Builder() {\n" + " return get$capitalized_name$FieldBuilder().addBuilder(\n" + " $type$.getDefaultInstance());\n" + "}\n" + "$deprecation$public $type$.Builder add$capitalized_name$Builder(\n" + " int index) {\n" + " return get$capitalized_name$FieldBuilder().addBuilder(\n" + " index, $type$.getDefaultInstance());\n" + "}\n" + "$deprecation$public java.util.List<$type$.Builder> \n" + " get$capitalized_name$BuilderList() {\n" + " return get$capitalized_name$FieldBuilder().getBuilderList();\n" + "}\n" + "private com.google.protobuf.RepeatedFieldBuilder<\n" + " $type$, $type$.Builder, $type$OrBuilder> \n" + " get$capitalized_name$FieldBuilder() {\n" + " if ($name$Builder_ == null) {\n" + " $name$Builder_ = new com.google.protobuf.RepeatedFieldBuilder<\n" + " $type$, $type$.Builder, $type$OrBuilder>(\n" + " $name$_,\n" + " $get_mutable_bit_builder$,\n" + " getParentForChildren(),\n" + " isClean());\n" + " $name$_ = null;\n" + " }\n" + " return $name$Builder_;\n" + "}\n"); + } +} + +void RepeatedMessageFieldGenerator:: +GenerateFieldBuilderInitializationCode(io::Printer* printer) const { + printer->Print(variables_, + "get$capitalized_name$FieldBuilder();\n"); +} + +void RepeatedMessageFieldGenerator:: +GenerateInitializationCode(io::Printer* printer) const { + printer->Print(variables_, "$name$_ = java.util.Collections.emptyList();\n"); +} + +void RepeatedMessageFieldGenerator:: +GenerateBuilderClearCode(io::Printer* printer) const { + PrintNestedBuilderCondition(printer, + "$name$_ = java.util.Collections.emptyList();\n" + "$clear_mutable_bit_builder$;\n", + + "$name$Builder_.clear();\n"); +} + +void RepeatedMessageFieldGenerator:: +GenerateMergingCode(io::Printer* printer) const { + // The code below does two optimizations (non-nested builder case): + // 1. If the other list is empty, there's nothing to do. This ensures we + // don't allocate a new array if we already have an immutable one. + // 2. If the other list is non-empty and our current list is empty, we can + // reuse the other list which is guaranteed to be immutable. + PrintNestedBuilderCondition(printer, + "if (!other.$name$_.isEmpty()) {\n" + " if ($name$_.isEmpty()) {\n" + " $name$_ = other.$name$_;\n" + " $clear_mutable_bit_builder$;\n" + " } else {\n" + " ensure$capitalized_name$IsMutable();\n" + " $name$_.addAll(other.$name$_);\n" + " }\n" + " $on_changed$\n" + "}\n", + + "if (!other.$name$_.isEmpty()) {\n" + " if ($name$Builder_.isEmpty()) {\n" + " $name$Builder_.dispose();\n" + " $name$Builder_ = null;\n" + " $name$_ = other.$name$_;\n" + " $clear_mutable_bit_builder$;\n" + " $name$Builder_ = \n" + " com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?\n" + " get$capitalized_name$FieldBuilder() : null;\n" + " } else {\n" + " $name$Builder_.addAllMessages(other.$name$_);\n" + " }\n" + "}\n"); +} + +void RepeatedMessageFieldGenerator:: +GenerateBuildingCode(io::Printer* printer) const { + // The code below (non-nested builder case) ensures that the result has an + // immutable list. If our list is immutable, we can just reuse it. If not, + // we make it immutable. + PrintNestedBuilderCondition(printer, + "if ($get_mutable_bit_builder$) {\n" + " $name$_ = java.util.Collections.unmodifiableList($name$_);\n" + " $clear_mutable_bit_builder$;\n" + "}\n" + "result.$name$_ = $name$_;\n", + + "result.$name$_ = $name$Builder_.build();\n"); +} + +void RepeatedMessageFieldGenerator:: +GenerateParsingCode(io::Printer* printer) const { + printer->Print(variables_, + "$type$.Builder subBuilder = $type$.newBuilder();\n"); + + if (GetType(descriptor_) == FieldDescriptor::TYPE_GROUP) { + printer->Print(variables_, + "input.readGroup($number$, subBuilder, extensionRegistry);\n"); + } else { + printer->Print(variables_, + "input.readMessage(subBuilder, extensionRegistry);\n"); + } + + printer->Print(variables_, + "add$capitalized_name$(subBuilder.buildPartial());\n"); +} + +void RepeatedMessageFieldGenerator:: +GenerateSerializationCode(io::Printer* printer) const { + printer->Print(variables_, + "for (int i = 0; i < $name$_.size(); i++) {\n" + " output.write$group_or_message$($number$, $name$_.get(i));\n" + "}\n"); +} + +void RepeatedMessageFieldGenerator:: +GenerateSerializedSizeCode(io::Printer* printer) const { + printer->Print(variables_, + "for (int i = 0; i < $name$_.size(); i++) {\n" + " size += com.google.protobuf.CodedOutputStream\n" + " .compute$group_or_message$Size($number$, $name$_.get(i));\n" + "}\n"); +} + +void RepeatedMessageFieldGenerator:: +GenerateEqualsCode(io::Printer* printer) const { + printer->Print(variables_, + "result = result && get$capitalized_name$List()\n" + " .equals(other.get$capitalized_name$List());\n"); +} + +void RepeatedMessageFieldGenerator:: +GenerateHashCode(io::Printer* printer) const { + printer->Print(variables_, + "if (get$capitalized_name$Count() > 0) {\n" + " hash = (37 * hash) + $constant_name$;\n" + " hash = (53 * hash) + get$capitalized_name$List().hashCode();\n" + "}\n"); +} + +string RepeatedMessageFieldGenerator::GetBoxedType() const { + return ClassName(descriptor_->message_type()); +} + +} // namespace java +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_message_field.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_message_field.h new file mode 100644 index 0000000000..2efbcd97b0 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_message_field.h @@ -0,0 +1,134 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#ifndef GOOGLE_PROTOBUF_COMPILER_JAVA_MESSAGE_FIELD_H__ +#define GOOGLE_PROTOBUF_COMPILER_JAVA_MESSAGE_FIELD_H__ + +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace java { + +class MessageFieldGenerator : public FieldGenerator { + public: + explicit MessageFieldGenerator(const FieldDescriptor* descriptor, + int messageBitIndex, int builderBitIndex); + ~MessageFieldGenerator(); + + // implements FieldGenerator --------------------------------------- + int GetNumBitsForMessage() const; + int GetNumBitsForBuilder() const; + void GenerateInterfaceMembers(io::Printer* printer) const; + void GenerateMembers(io::Printer* printer) const; + void GenerateBuilderMembers(io::Printer* printer) const; + void GenerateInitializationCode(io::Printer* printer) const; + void GenerateBuilderClearCode(io::Printer* printer) const; + void GenerateMergingCode(io::Printer* printer) const; + void GenerateBuildingCode(io::Printer* printer) const; + void GenerateParsingCode(io::Printer* printer) const; + void GenerateSerializationCode(io::Printer* printer) const; + void GenerateSerializedSizeCode(io::Printer* printer) const; + void GenerateFieldBuilderInitializationCode(io::Printer* printer) const; + void GenerateEqualsCode(io::Printer* printer) const; + void GenerateHashCode(io::Printer* printer) const; + + string GetBoxedType() const; + + private: + const FieldDescriptor* descriptor_; + map variables_; + const int messageBitIndex_; + const int builderBitIndex_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(MessageFieldGenerator); + + void PrintNestedBuilderCondition(io::Printer* printer, + const char* regular_case, const char* nested_builder_case) const; + void PrintNestedBuilderFunction(io::Printer* printer, + const char* method_prototype, const char* regular_case, + const char* nested_builder_case, + const char* trailing_code) const; +}; + +class RepeatedMessageFieldGenerator : public FieldGenerator { + public: + explicit RepeatedMessageFieldGenerator(const FieldDescriptor* descriptor, + int messageBitIndex, int builderBitIndex); + ~RepeatedMessageFieldGenerator(); + + // implements FieldGenerator --------------------------------------- + int GetNumBitsForMessage() const; + int GetNumBitsForBuilder() const; + void GenerateInterfaceMembers(io::Printer* printer) const; + void GenerateMembers(io::Printer* printer) const; + void GenerateBuilderMembers(io::Printer* printer) const; + void GenerateInitializationCode(io::Printer* printer) const; + void GenerateBuilderClearCode(io::Printer* printer) const; + void GenerateMergingCode(io::Printer* printer) const; + void GenerateBuildingCode(io::Printer* printer) const; + void GenerateParsingCode(io::Printer* printer) const; + void GenerateSerializationCode(io::Printer* printer) const; + void GenerateSerializedSizeCode(io::Printer* printer) const; + void GenerateFieldBuilderInitializationCode(io::Printer* printer) const; + void GenerateEqualsCode(io::Printer* printer) const; + void GenerateHashCode(io::Printer* printer) const; + + string GetBoxedType() const; + + private: + const FieldDescriptor* descriptor_; + map variables_; + const int messageBitIndex_; + const int builderBitIndex_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(RepeatedMessageFieldGenerator); + + void PrintNestedBuilderCondition(io::Printer* printer, + const char* regular_case, const char* nested_builder_case) const; + void PrintNestedBuilderFunction(io::Printer* printer, + const char* method_prototype, const char* regular_case, + const char* nested_builder_case, + const char* trailing_code) const; +}; + +} // namespace java +} // namespace compiler +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_COMPILER_JAVA_MESSAGE_FIELD_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_plugin_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_plugin_unittest.cc new file mode 100644 index 0000000000..ccc94c9dfe --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_plugin_unittest.cc @@ -0,0 +1,122 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// +// TODO(kenton): Share code with the versions of this test in other languages? +// It seemed like parameterizing it would add more complexity than it is +// worth. + +#include +#include +#include +#include + +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace java { +namespace { + +class TestGenerator : public CodeGenerator { + public: + TestGenerator() {} + ~TestGenerator() {} + + virtual bool Generate(const FileDescriptor* file, + const string& parameter, + GeneratorContext* context, + string* error) const { + string filename = "Test.java"; + TryInsert(filename, "outer_class_scope", context); + TryInsert(filename, "class_scope:foo.Bar", context); + TryInsert(filename, "class_scope:foo.Bar.Baz", context); + TryInsert(filename, "builder_scope:foo.Bar", context); + TryInsert(filename, "builder_scope:foo.Bar.Baz", context); + TryInsert(filename, "enum_scope:foo.Qux", context); + return true; + } + + void TryInsert(const string& filename, const string& insertion_point, + GeneratorContext* context) const { + scoped_ptr output( + context->OpenForInsert(filename, insertion_point)); + io::Printer printer(output.get(), '$'); + printer.Print("// inserted $name$\n", "name", insertion_point); + } +}; + +// This test verifies that all the expected insertion points exist. It does +// not verify that they are correctly-placed; that would require actually +// compiling the output which is a bit more than I care to do for this test. +TEST(JavaPluginTest, PluginTest) { + File::WriteStringToFileOrDie( + "syntax = \"proto2\";\n" + "package foo;\n" + "option java_package = \"\";\n" + "option java_outer_classname = \"Test\";\n" + "message Bar {\n" + " message Baz {}\n" + "}\n" + "enum Qux { BLAH = 1; }\n", + TestTempDir() + "/test.proto"); + + google::protobuf::compiler::CommandLineInterface cli; + cli.SetInputsAreProtoPathRelative(true); + + JavaGenerator java_generator; + TestGenerator test_generator; + cli.RegisterGenerator("--java_out", &java_generator, ""); + cli.RegisterGenerator("--test_out", &test_generator, ""); + + string proto_path = "-I" + TestTempDir(); + string java_out = "--java_out=" + TestTempDir(); + string test_out = "--test_out=" + TestTempDir(); + + const char* argv[] = { + "protoc", + proto_path.c_str(), + java_out.c_str(), + test_out.c_str(), + "test.proto" + }; + + EXPECT_EQ(0, cli.Run(5, argv)); +} + +} // namespace +} // namespace java +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_primitive_field.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_primitive_field.cc new file mode 100644 index 0000000000..712e047af1 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_primitive_field.cc @@ -0,0 +1,719 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include + +#include +#include +#include +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace java { + +using internal::WireFormat; +using internal::WireFormatLite; + +namespace { + +const char* PrimitiveTypeName(JavaType type) { + switch (type) { + case JAVATYPE_INT : return "int"; + case JAVATYPE_LONG : return "long"; + case JAVATYPE_FLOAT : return "float"; + case JAVATYPE_DOUBLE : return "double"; + case JAVATYPE_BOOLEAN: return "boolean"; + case JAVATYPE_STRING : return "java.lang.String"; + case JAVATYPE_BYTES : return "com.google.protobuf.ByteString"; + case JAVATYPE_ENUM : return NULL; + case JAVATYPE_MESSAGE: return NULL; + + // No default because we want the compiler to complain if any new + // JavaTypes are added. + } + + GOOGLE_LOG(FATAL) << "Can't get here."; + return NULL; +} + +bool IsReferenceType(JavaType type) { + switch (type) { + case JAVATYPE_INT : return false; + case JAVATYPE_LONG : return false; + case JAVATYPE_FLOAT : return false; + case JAVATYPE_DOUBLE : return false; + case JAVATYPE_BOOLEAN: return false; + case JAVATYPE_STRING : return true; + case JAVATYPE_BYTES : return true; + case JAVATYPE_ENUM : return true; + case JAVATYPE_MESSAGE: return true; + + // No default because we want the compiler to complain if any new + // JavaTypes are added. + } + + GOOGLE_LOG(FATAL) << "Can't get here."; + return false; +} + +const char* GetCapitalizedType(const FieldDescriptor* field) { + switch (GetType(field)) { + case FieldDescriptor::TYPE_INT32 : return "Int32" ; + case FieldDescriptor::TYPE_UINT32 : return "UInt32" ; + case FieldDescriptor::TYPE_SINT32 : return "SInt32" ; + case FieldDescriptor::TYPE_FIXED32 : return "Fixed32" ; + case FieldDescriptor::TYPE_SFIXED32: return "SFixed32"; + case FieldDescriptor::TYPE_INT64 : return "Int64" ; + case FieldDescriptor::TYPE_UINT64 : return "UInt64" ; + case FieldDescriptor::TYPE_SINT64 : return "SInt64" ; + case FieldDescriptor::TYPE_FIXED64 : return "Fixed64" ; + case FieldDescriptor::TYPE_SFIXED64: return "SFixed64"; + case FieldDescriptor::TYPE_FLOAT : return "Float" ; + case FieldDescriptor::TYPE_DOUBLE : return "Double" ; + case FieldDescriptor::TYPE_BOOL : return "Bool" ; + case FieldDescriptor::TYPE_STRING : return "String" ; + case FieldDescriptor::TYPE_BYTES : return "Bytes" ; + case FieldDescriptor::TYPE_ENUM : return "Enum" ; + case FieldDescriptor::TYPE_GROUP : return "Group" ; + case FieldDescriptor::TYPE_MESSAGE : return "Message" ; + + // No default because we want the compiler to complain if any new + // types are added. + } + + GOOGLE_LOG(FATAL) << "Can't get here."; + return NULL; +} + +// For encodings with fixed sizes, returns that size in bytes. Otherwise +// returns -1. +int FixedSize(FieldDescriptor::Type type) { + switch (type) { + case FieldDescriptor::TYPE_INT32 : return -1; + case FieldDescriptor::TYPE_INT64 : return -1; + case FieldDescriptor::TYPE_UINT32 : return -1; + case FieldDescriptor::TYPE_UINT64 : return -1; + case FieldDescriptor::TYPE_SINT32 : return -1; + case FieldDescriptor::TYPE_SINT64 : return -1; + case FieldDescriptor::TYPE_FIXED32 : return WireFormatLite::kFixed32Size; + case FieldDescriptor::TYPE_FIXED64 : return WireFormatLite::kFixed64Size; + case FieldDescriptor::TYPE_SFIXED32: return WireFormatLite::kSFixed32Size; + case FieldDescriptor::TYPE_SFIXED64: return WireFormatLite::kSFixed64Size; + case FieldDescriptor::TYPE_FLOAT : return WireFormatLite::kFloatSize; + case FieldDescriptor::TYPE_DOUBLE : return WireFormatLite::kDoubleSize; + + case FieldDescriptor::TYPE_BOOL : return WireFormatLite::kBoolSize; + case FieldDescriptor::TYPE_ENUM : return -1; + + case FieldDescriptor::TYPE_STRING : return -1; + case FieldDescriptor::TYPE_BYTES : return -1; + case FieldDescriptor::TYPE_GROUP : return -1; + case FieldDescriptor::TYPE_MESSAGE : return -1; + + // No default because we want the compiler to complain if any new + // types are added. + } + GOOGLE_LOG(FATAL) << "Can't get here."; + return -1; +} + +void SetPrimitiveVariables(const FieldDescriptor* descriptor, + int messageBitIndex, + int builderBitIndex, + map* variables) { + (*variables)["name"] = + UnderscoresToCamelCase(descriptor); + (*variables)["capitalized_name"] = + UnderscoresToCapitalizedCamelCase(descriptor); + (*variables)["constant_name"] = FieldConstantName(descriptor); + (*variables)["number"] = SimpleItoa(descriptor->number()); + (*variables)["type"] = PrimitiveTypeName(GetJavaType(descriptor)); + (*variables)["boxed_type"] = BoxedPrimitiveTypeName(GetJavaType(descriptor)); + (*variables)["field_type"] = (*variables)["type"]; + (*variables)["field_list_type"] = "java.util.List<" + + (*variables)["boxed_type"] + ">"; + (*variables)["empty_list"] = "java.util.Collections.emptyList()"; + (*variables)["default"] = DefaultValue(descriptor); + (*variables)["default_init"] = IsDefaultValueJavaDefault(descriptor) ? + "" : ("= " + DefaultValue(descriptor)); + (*variables)["capitalized_type"] = GetCapitalizedType(descriptor); + (*variables)["tag"] = SimpleItoa(WireFormat::MakeTag(descriptor)); + (*variables)["tag_size"] = SimpleItoa( + WireFormat::TagSize(descriptor->number(), GetType(descriptor))); + if (IsReferenceType(GetJavaType(descriptor))) { + (*variables)["null_check"] = + " if (value == null) {\n" + " throw new NullPointerException();\n" + " }\n"; + } else { + (*variables)["null_check"] = ""; + } + // TODO(birdo): Add @deprecated javadoc when generating javadoc is supported + // by the proto compiler + (*variables)["deprecation"] = descriptor->options().deprecated() + ? "@java.lang.Deprecated " : ""; + int fixed_size = FixedSize(GetType(descriptor)); + if (fixed_size != -1) { + (*variables)["fixed_size"] = SimpleItoa(fixed_size); + } + (*variables)["on_changed"] = + HasDescriptorMethods(descriptor->containing_type()) ? "onChanged();" : ""; + + // For singular messages and builders, one bit is used for the hasField bit. + (*variables)["get_has_field_bit_message"] = GenerateGetBit(messageBitIndex); + + (*variables)["get_has_field_bit_builder"] = GenerateGetBit(builderBitIndex); + (*variables)["set_has_field_bit_builder"] = GenerateSetBit(builderBitIndex); + (*variables)["clear_has_field_bit_builder"] = + GenerateClearBit(builderBitIndex); + + // For repated builders, one bit is used for whether the array is immutable. + (*variables)["get_mutable_bit_builder"] = GenerateGetBit(builderBitIndex); + (*variables)["set_mutable_bit_builder"] = GenerateSetBit(builderBitIndex); + (*variables)["clear_mutable_bit_builder"] = GenerateClearBit(builderBitIndex); + + (*variables)["get_has_field_bit_from_local"] = + GenerateGetBitFromLocal(builderBitIndex); + (*variables)["set_has_field_bit_to_local"] = + GenerateSetBitToLocal(messageBitIndex); +} + +} // namespace + +// =================================================================== + +PrimitiveFieldGenerator:: +PrimitiveFieldGenerator(const FieldDescriptor* descriptor, + int messageBitIndex, + int builderBitIndex) + : descriptor_(descriptor), messageBitIndex_(messageBitIndex), + builderBitIndex_(builderBitIndex) { + SetPrimitiveVariables(descriptor, messageBitIndex, builderBitIndex, + &variables_); +} + +PrimitiveFieldGenerator::~PrimitiveFieldGenerator() {} + +int PrimitiveFieldGenerator::GetNumBitsForMessage() const { + return 1; +} + +int PrimitiveFieldGenerator::GetNumBitsForBuilder() const { + return 1; +} + +void PrimitiveFieldGenerator:: +GenerateInterfaceMembers(io::Printer* printer) const { + printer->Print(variables_, + "$deprecation$boolean has$capitalized_name$();\n" + "$deprecation$$type$ get$capitalized_name$();\n"); +} + +void PrimitiveFieldGenerator:: +GenerateMembers(io::Printer* printer) const { + printer->Print(variables_, + "private $field_type$ $name$_;\n" + "$deprecation$public boolean has$capitalized_name$() {\n" + " return $get_has_field_bit_message$;\n" + "}\n"); + + printer->Print(variables_, + "$deprecation$public $type$ get$capitalized_name$() {\n" + " return $name$_;\n" + "}\n"); +} + +void PrimitiveFieldGenerator:: +GenerateBuilderMembers(io::Printer* printer) const { + printer->Print(variables_, + "private $field_type$ $name$_ $default_init$;\n" + "$deprecation$public boolean has$capitalized_name$() {\n" + " return $get_has_field_bit_builder$;\n" + "}\n"); + + printer->Print(variables_, + "$deprecation$public $type$ get$capitalized_name$() {\n" + " return $name$_;\n" + "}\n"); + + printer->Print(variables_, + "$deprecation$public Builder set$capitalized_name$($type$ value) {\n" + "$null_check$" + " $set_has_field_bit_builder$;\n" + " $name$_ = value;\n" + " $on_changed$\n" + " return this;\n" + "}\n" + "$deprecation$public Builder clear$capitalized_name$() {\n" + " $clear_has_field_bit_builder$;\n"); + JavaType type = GetJavaType(descriptor_); + if (type == JAVATYPE_STRING || type == JAVATYPE_BYTES) { + // The default value is not a simple literal so we want to avoid executing + // it multiple times. Instead, get the default out of the default instance. + printer->Print(variables_, + " $name$_ = getDefaultInstance().get$capitalized_name$();\n"); + } else { + printer->Print(variables_, + " $name$_ = $default$;\n"); + } + printer->Print(variables_, + " $on_changed$\n" + " return this;\n" + "}\n"); +} + +void PrimitiveFieldGenerator:: +GenerateFieldBuilderInitializationCode(io::Printer* printer) const { + // noop for primitives +} + +void PrimitiveFieldGenerator:: +GenerateInitializationCode(io::Printer* printer) const { + printer->Print(variables_, "$name$_ = $default$;\n"); +} + +void PrimitiveFieldGenerator:: +GenerateBuilderClearCode(io::Printer* printer) const { + printer->Print(variables_, + "$name$_ = $default$;\n" + "$clear_has_field_bit_builder$;\n"); +} + +void PrimitiveFieldGenerator:: +GenerateMergingCode(io::Printer* printer) const { + printer->Print(variables_, + "if (other.has$capitalized_name$()) {\n" + " set$capitalized_name$(other.get$capitalized_name$());\n" + "}\n"); +} + +void PrimitiveFieldGenerator:: +GenerateBuildingCode(io::Printer* printer) const { + printer->Print(variables_, + "if ($get_has_field_bit_from_local$) {\n" + " $set_has_field_bit_to_local$;\n" + "}\n" + "result.$name$_ = $name$_;\n"); +} + +void PrimitiveFieldGenerator:: +GenerateParsingCode(io::Printer* printer) const { + printer->Print(variables_, + "$set_has_field_bit_builder$;\n" + "$name$_ = input.read$capitalized_type$();\n"); +} + +void PrimitiveFieldGenerator:: +GenerateSerializationCode(io::Printer* printer) const { + printer->Print(variables_, + "if ($get_has_field_bit_message$) {\n" + " output.write$capitalized_type$($number$, $name$_);\n" + "}\n"); +} + +void PrimitiveFieldGenerator:: +GenerateSerializedSizeCode(io::Printer* printer) const { + printer->Print(variables_, + "if ($get_has_field_bit_message$) {\n" + " size += com.google.protobuf.CodedOutputStream\n" + " .compute$capitalized_type$Size($number$, $name$_);\n" + "}\n"); +} + +void PrimitiveFieldGenerator:: +GenerateEqualsCode(io::Printer* printer) const { + switch (GetJavaType(descriptor_)) { + case JAVATYPE_INT: + case JAVATYPE_LONG: + case JAVATYPE_BOOLEAN: + printer->Print(variables_, + "result = result && (get$capitalized_name$()\n" + " == other.get$capitalized_name$());\n"); + break; + + case JAVATYPE_FLOAT: + printer->Print(variables_, + "result = result && (Float.floatToIntBits(get$capitalized_name$())" + " == Float.floatToIntBits(other.get$capitalized_name$()));\n"); + break; + + case JAVATYPE_DOUBLE: + printer->Print(variables_, + "result = result && (Double.doubleToLongBits(get$capitalized_name$())" + " == Double.doubleToLongBits(other.get$capitalized_name$()));\n"); + break; + + case JAVATYPE_STRING: + case JAVATYPE_BYTES: + printer->Print(variables_, + "result = result && get$capitalized_name$()\n" + " .equals(other.get$capitalized_name$());\n"); + break; + + case JAVATYPE_ENUM: + case JAVATYPE_MESSAGE: + default: + GOOGLE_LOG(FATAL) << "Can't get here."; + break; + } +} + +void PrimitiveFieldGenerator:: +GenerateHashCode(io::Printer* printer) const { + printer->Print(variables_, + "hash = (37 * hash) + $constant_name$;\n"); + switch (GetJavaType(descriptor_)) { + case JAVATYPE_INT: + printer->Print(variables_, + "hash = (53 * hash) + get$capitalized_name$();\n"); + break; + + case JAVATYPE_LONG: + printer->Print(variables_, + "hash = (53 * hash) + hashLong(get$capitalized_name$());\n"); + break; + + case JAVATYPE_BOOLEAN: + printer->Print(variables_, + "hash = (53 * hash) + hashBoolean(get$capitalized_name$());\n"); + break; + + case JAVATYPE_FLOAT: + printer->Print(variables_, + "hash = (53 * hash) + Float.floatToIntBits(\n" + " get$capitalized_name$());\n"); + break; + + case JAVATYPE_DOUBLE: + printer->Print(variables_, + "hash = (53 * hash) + hashLong(\n" + " Double.doubleToLongBits(get$capitalized_name$()));\n"); + break; + + case JAVATYPE_STRING: + case JAVATYPE_BYTES: + printer->Print(variables_, + "hash = (53 * hash) + get$capitalized_name$().hashCode();\n"); + break; + + case JAVATYPE_ENUM: + case JAVATYPE_MESSAGE: + default: + GOOGLE_LOG(FATAL) << "Can't get here."; + break; + } +} + +string PrimitiveFieldGenerator::GetBoxedType() const { + return BoxedPrimitiveTypeName(GetJavaType(descriptor_)); +} + +// =================================================================== + +RepeatedPrimitiveFieldGenerator:: +RepeatedPrimitiveFieldGenerator(const FieldDescriptor* descriptor, + int messageBitIndex, + int builderBitIndex) + : descriptor_(descriptor), messageBitIndex_(messageBitIndex), + builderBitIndex_(builderBitIndex) { + SetPrimitiveVariables(descriptor, messageBitIndex, builderBitIndex, + &variables_); +} + +RepeatedPrimitiveFieldGenerator::~RepeatedPrimitiveFieldGenerator() {} + +int RepeatedPrimitiveFieldGenerator::GetNumBitsForMessage() const { + return 0; +} + +int RepeatedPrimitiveFieldGenerator::GetNumBitsForBuilder() const { + return 1; +} + +void RepeatedPrimitiveFieldGenerator:: +GenerateInterfaceMembers(io::Printer* printer) const { + printer->Print(variables_, + "$deprecation$java.util.List<$boxed_type$> get$capitalized_name$List();\n" + "$deprecation$int get$capitalized_name$Count();\n" + "$deprecation$$type$ get$capitalized_name$(int index);\n"); +} + + +void RepeatedPrimitiveFieldGenerator:: +GenerateMembers(io::Printer* printer) const { + printer->Print(variables_, + "private $field_list_type$ $name$_;\n" + "$deprecation$public java.util.List<$boxed_type$>\n" + " get$capitalized_name$List() {\n" + " return $name$_;\n" // note: unmodifiable list + "}\n" + "$deprecation$public int get$capitalized_name$Count() {\n" + " return $name$_.size();\n" + "}\n" + "$deprecation$public $type$ get$capitalized_name$(int index) {\n" + " return $name$_.get(index);\n" + "}\n"); + + if (descriptor_->options().packed() && + HasGeneratedMethods(descriptor_->containing_type())) { + printer->Print(variables_, + "private int $name$MemoizedSerializedSize = -1;\n"); + } +} + +void RepeatedPrimitiveFieldGenerator:: +GenerateBuilderMembers(io::Printer* printer) const { + // One field is the list and the bit field keeps track of whether the + // list is immutable. If it's immutable, the invariant is that it must + // either an instance of Collections.emptyList() or it's an ArrayList + // wrapped in a Collections.unmodifiableList() wrapper and nobody else has + // a refererence to the underlying ArrayList. This invariant allows us to + // share instances of lists between protocol buffers avoiding expensive + // memory allocations. Note, immutable is a strong guarantee here -- not + // just that the list cannot be modified via the reference but that the + // list can never be modified. + printer->Print(variables_, + "private $field_list_type$ $name$_ = $empty_list$;\n"); + + printer->Print(variables_, + "private void ensure$capitalized_name$IsMutable() {\n" + " if (!$get_mutable_bit_builder$) {\n" + " $name$_ = new java.util.ArrayList<$boxed_type$>($name$_);\n" + " $set_mutable_bit_builder$;\n" + " }\n" + "}\n"); + + // Note: We return an unmodifiable list because otherwise the caller + // could hold on to the returned list and modify it after the message + // has been built, thus mutating the message which is supposed to be + // immutable. + printer->Print(variables_, + "$deprecation$public java.util.List<$boxed_type$>\n" + " get$capitalized_name$List() {\n" + " return java.util.Collections.unmodifiableList($name$_);\n" + "}\n" + "$deprecation$public int get$capitalized_name$Count() {\n" + " return $name$_.size();\n" + "}\n" + "$deprecation$public $type$ get$capitalized_name$(int index) {\n" + " return $name$_.get(index);\n" + "}\n" + "$deprecation$public Builder set$capitalized_name$(\n" + " int index, $type$ value) {\n" + "$null_check$" + " ensure$capitalized_name$IsMutable();\n" + " $name$_.set(index, value);\n" + " $on_changed$\n" + " return this;\n" + "}\n" + "$deprecation$public Builder add$capitalized_name$($type$ value) {\n" + "$null_check$" + " ensure$capitalized_name$IsMutable();\n" + " $name$_.add(value);\n" + " $on_changed$\n" + " return this;\n" + "}\n" + "$deprecation$public Builder addAll$capitalized_name$(\n" + " java.lang.Iterable values) {\n" + " ensure$capitalized_name$IsMutable();\n" + " super.addAll(values, $name$_);\n" + " $on_changed$\n" + " return this;\n" + "}\n" + "$deprecation$public Builder clear$capitalized_name$() {\n" + " $name$_ = $empty_list$;\n" + " $clear_mutable_bit_builder$;\n" + " $on_changed$\n" + " return this;\n" + "}\n"); +} + +void RepeatedPrimitiveFieldGenerator:: +GenerateFieldBuilderInitializationCode(io::Printer* printer) const { + // noop for primitives +} + +void RepeatedPrimitiveFieldGenerator:: +GenerateInitializationCode(io::Printer* printer) const { + printer->Print(variables_, "$name$_ = $empty_list$;\n"); +} + +void RepeatedPrimitiveFieldGenerator:: +GenerateBuilderClearCode(io::Printer* printer) const { + printer->Print(variables_, + "$name$_ = $empty_list$;\n" + "$clear_mutable_bit_builder$;\n"); +} + +void RepeatedPrimitiveFieldGenerator:: +GenerateMergingCode(io::Printer* printer) const { + // The code below does two optimizations: + // 1. If the other list is empty, there's nothing to do. This ensures we + // don't allocate a new array if we already have an immutable one. + // 2. If the other list is non-empty and our current list is empty, we can + // reuse the other list which is guaranteed to be immutable. + printer->Print(variables_, + "if (!other.$name$_.isEmpty()) {\n" + " if ($name$_.isEmpty()) {\n" + " $name$_ = other.$name$_;\n" + " $clear_mutable_bit_builder$;\n" + " } else {\n" + " ensure$capitalized_name$IsMutable();\n" + " $name$_.addAll(other.$name$_);\n" + " }\n" + " $on_changed$\n" + "}\n"); +} + +void RepeatedPrimitiveFieldGenerator:: +GenerateBuildingCode(io::Printer* printer) const { + // The code below ensures that the result has an immutable list. If our + // list is immutable, we can just reuse it. If not, we make it immutable. + printer->Print(variables_, + "if ($get_mutable_bit_builder$) {\n" + " $name$_ = java.util.Collections.unmodifiableList($name$_);\n" + " $clear_mutable_bit_builder$;\n" + "}\n" + "result.$name$_ = $name$_;\n"); +} + +void RepeatedPrimitiveFieldGenerator:: +GenerateParsingCode(io::Printer* printer) const { + printer->Print(variables_, + "ensure$capitalized_name$IsMutable();\n" + "$name$_.add(input.read$capitalized_type$());\n"); +} + +void RepeatedPrimitiveFieldGenerator:: +GenerateParsingCodeFromPacked(io::Printer* printer) const { + printer->Print(variables_, + "int length = input.readRawVarint32();\n" + "int limit = input.pushLimit(length);\n" + "while (input.getBytesUntilLimit() > 0) {\n" + " add$capitalized_name$(input.read$capitalized_type$());\n" + "}\n" + "input.popLimit(limit);\n"); +} + +void RepeatedPrimitiveFieldGenerator:: +GenerateSerializationCode(io::Printer* printer) const { + if (descriptor_->options().packed()) { + printer->Print(variables_, + "if (get$capitalized_name$List().size() > 0) {\n" + " output.writeRawVarint32($tag$);\n" + " output.writeRawVarint32($name$MemoizedSerializedSize);\n" + "}\n" + "for (int i = 0; i < $name$_.size(); i++) {\n" + " output.write$capitalized_type$NoTag($name$_.get(i));\n" + "}\n"); + } else { + printer->Print(variables_, + "for (int i = 0; i < $name$_.size(); i++) {\n" + " output.write$capitalized_type$($number$, $name$_.get(i));\n" + "}\n"); + } +} + +void RepeatedPrimitiveFieldGenerator:: +GenerateSerializedSizeCode(io::Printer* printer) const { + printer->Print(variables_, + "{\n" + " int dataSize = 0;\n"); + printer->Indent(); + + if (FixedSize(GetType(descriptor_)) == -1) { + printer->Print(variables_, + "for (int i = 0; i < $name$_.size(); i++) {\n" + " dataSize += com.google.protobuf.CodedOutputStream\n" + " .compute$capitalized_type$SizeNoTag($name$_.get(i));\n" + "}\n"); + } else { + printer->Print(variables_, + "dataSize = $fixed_size$ * get$capitalized_name$List().size();\n"); + } + + printer->Print( + "size += dataSize;\n"); + + if (descriptor_->options().packed()) { + printer->Print(variables_, + "if (!get$capitalized_name$List().isEmpty()) {\n" + " size += $tag_size$;\n" + " size += com.google.protobuf.CodedOutputStream\n" + " .computeInt32SizeNoTag(dataSize);\n" + "}\n"); + } else { + printer->Print(variables_, + "size += $tag_size$ * get$capitalized_name$List().size();\n"); + } + + // cache the data size for packed fields. + if (descriptor_->options().packed()) { + printer->Print(variables_, + "$name$MemoizedSerializedSize = dataSize;\n"); + } + + printer->Outdent(); + printer->Print("}\n"); +} + +void RepeatedPrimitiveFieldGenerator:: +GenerateEqualsCode(io::Printer* printer) const { + printer->Print(variables_, + "result = result && get$capitalized_name$List()\n" + " .equals(other.get$capitalized_name$List());\n"); +} + +void RepeatedPrimitiveFieldGenerator:: +GenerateHashCode(io::Printer* printer) const { + printer->Print(variables_, + "if (get$capitalized_name$Count() > 0) {\n" + " hash = (37 * hash) + $constant_name$;\n" + " hash = (53 * hash) + get$capitalized_name$List().hashCode();\n" + "}\n"); +} + +string RepeatedPrimitiveFieldGenerator::GetBoxedType() const { + return BoxedPrimitiveTypeName(GetJavaType(descriptor_)); +} + +} // namespace java +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_primitive_field.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_primitive_field.h new file mode 100644 index 0000000000..7900fac52b --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_primitive_field.h @@ -0,0 +1,121 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#ifndef GOOGLE_PROTOBUF_COMPILER_JAVA_PRIMITIVE_FIELD_H__ +#define GOOGLE_PROTOBUF_COMPILER_JAVA_PRIMITIVE_FIELD_H__ + +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace java { + +class PrimitiveFieldGenerator : public FieldGenerator { + public: + explicit PrimitiveFieldGenerator(const FieldDescriptor* descriptor, + int messageBitIndex, int builderBitIndex); + ~PrimitiveFieldGenerator(); + + // implements FieldGenerator --------------------------------------- + int GetNumBitsForMessage() const; + int GetNumBitsForBuilder() const; + void GenerateInterfaceMembers(io::Printer* printer) const; + void GenerateMembers(io::Printer* printer) const; + void GenerateBuilderMembers(io::Printer* printer) const; + void GenerateInitializationCode(io::Printer* printer) const; + void GenerateBuilderClearCode(io::Printer* printer) const; + void GenerateMergingCode(io::Printer* printer) const; + void GenerateBuildingCode(io::Printer* printer) const; + void GenerateParsingCode(io::Printer* printer) const; + void GenerateSerializationCode(io::Printer* printer) const; + void GenerateSerializedSizeCode(io::Printer* printer) const; + void GenerateFieldBuilderInitializationCode(io::Printer* printer) const; + void GenerateEqualsCode(io::Printer* printer) const; + void GenerateHashCode(io::Printer* printer) const; + + string GetBoxedType() const; + + private: + const FieldDescriptor* descriptor_; + map variables_; + const int messageBitIndex_; + const int builderBitIndex_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(PrimitiveFieldGenerator); +}; + +class RepeatedPrimitiveFieldGenerator : public FieldGenerator { + public: + explicit RepeatedPrimitiveFieldGenerator(const FieldDescriptor* descriptor, + int messageBitIndex, int builderBitIndex); + ~RepeatedPrimitiveFieldGenerator(); + + // implements FieldGenerator --------------------------------------- + int GetNumBitsForMessage() const; + int GetNumBitsForBuilder() const; + void GenerateInterfaceMembers(io::Printer* printer) const; + void GenerateMembers(io::Printer* printer) const; + void GenerateBuilderMembers(io::Printer* printer) const; + void GenerateInitializationCode(io::Printer* printer) const; + void GenerateBuilderClearCode(io::Printer* printer) const; + void GenerateMergingCode(io::Printer* printer) const; + void GenerateBuildingCode(io::Printer* printer) const; + void GenerateParsingCode(io::Printer* printer) const; + void GenerateParsingCodeFromPacked(io::Printer* printer) const; + void GenerateSerializationCode(io::Printer* printer) const; + void GenerateSerializedSizeCode(io::Printer* printer) const; + void GenerateFieldBuilderInitializationCode(io::Printer* printer) const; + void GenerateEqualsCode(io::Printer* printer) const; + void GenerateHashCode(io::Printer* printer) const; + + string GetBoxedType() const; + + private: + const FieldDescriptor* descriptor_; + map variables_; + const int messageBitIndex_; + const int builderBitIndex_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(RepeatedPrimitiveFieldGenerator); +}; + +} // namespace java +} // namespace compiler +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_COMPILER_JAVA_PRIMITIVE_FIELD_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_service.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_service.cc new file mode 100644 index 0000000000..1ae4f46166 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_service.cc @@ -0,0 +1,444 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace java { + +ServiceGenerator::ServiceGenerator(const ServiceDescriptor* descriptor) + : descriptor_(descriptor) {} + +ServiceGenerator::~ServiceGenerator() {} + +void ServiceGenerator::Generate(io::Printer* printer) { + bool is_own_file = descriptor_->file()->options().java_multiple_files(); + printer->Print( + "public $static$ abstract class $classname$\n" + " implements com.google.protobuf.Service {\n", + "static", is_own_file ? "" : "static", + "classname", descriptor_->name()); + printer->Indent(); + + printer->Print( + "protected $classname$() {}\n\n", + "classname", descriptor_->name()); + + GenerateInterface(printer); + + GenerateNewReflectiveServiceMethod(printer); + GenerateNewReflectiveBlockingServiceMethod(printer); + + GenerateAbstractMethods(printer); + + // Generate getDescriptor() and getDescriptorForType(). + printer->Print( + "public static final\n" + " com.google.protobuf.Descriptors.ServiceDescriptor\n" + " getDescriptor() {\n" + " return $file$.getDescriptor().getServices().get($index$);\n" + "}\n", + "file", ClassName(descriptor_->file()), + "index", SimpleItoa(descriptor_->index())); + GenerateGetDescriptorForType(printer); + + // Generate more stuff. + GenerateCallMethod(printer); + GenerateGetPrototype(REQUEST, printer); + GenerateGetPrototype(RESPONSE, printer); + GenerateStub(printer); + GenerateBlockingStub(printer); + + printer->Outdent(); + printer->Print("}\n\n"); +} + +void ServiceGenerator::GenerateGetDescriptorForType(io::Printer* printer) { + printer->Print( + "public final com.google.protobuf.Descriptors.ServiceDescriptor\n" + " getDescriptorForType() {\n" + " return getDescriptor();\n" + "}\n"); +} + +void ServiceGenerator::GenerateInterface(io::Printer* printer) { + printer->Print("public interface Interface {\n"); + printer->Indent(); + GenerateAbstractMethods(printer); + printer->Outdent(); + printer->Print("}\n\n"); +} + +void ServiceGenerator::GenerateNewReflectiveServiceMethod( + io::Printer* printer) { + printer->Print( + "public static com.google.protobuf.Service newReflectiveService(\n" + " final Interface impl) {\n" + " return new $classname$() {\n", + "classname", descriptor_->name()); + printer->Indent(); + printer->Indent(); + + for (int i = 0; i < descriptor_->method_count(); i++) { + const MethodDescriptor* method = descriptor_->method(i); + printer->Print("@java.lang.Override\n"); + GenerateMethodSignature(printer, method, IS_CONCRETE); + printer->Print( + " {\n" + " impl.$method$(controller, request, done);\n" + "}\n\n", + "method", UnderscoresToCamelCase(method)); + } + + printer->Outdent(); + printer->Print("};\n"); + printer->Outdent(); + printer->Print("}\n\n"); +} + +void ServiceGenerator::GenerateNewReflectiveBlockingServiceMethod( + io::Printer* printer) { + printer->Print( + "public static com.google.protobuf.BlockingService\n" + " newReflectiveBlockingService(final BlockingInterface impl) {\n" + " return new com.google.protobuf.BlockingService() {\n"); + printer->Indent(); + printer->Indent(); + + GenerateGetDescriptorForType(printer); + + GenerateCallBlockingMethod(printer); + GenerateGetPrototype(REQUEST, printer); + GenerateGetPrototype(RESPONSE, printer); + + printer->Outdent(); + printer->Print("};\n"); + printer->Outdent(); + printer->Print("}\n\n"); +} + +void ServiceGenerator::GenerateAbstractMethods(io::Printer* printer) { + for (int i = 0; i < descriptor_->method_count(); i++) { + const MethodDescriptor* method = descriptor_->method(i); + GenerateMethodSignature(printer, method, IS_ABSTRACT); + printer->Print(";\n\n"); + } +} + +void ServiceGenerator::GenerateCallMethod(io::Printer* printer) { + printer->Print( + "\n" + "public final void callMethod(\n" + " com.google.protobuf.Descriptors.MethodDescriptor method,\n" + " com.google.protobuf.RpcController controller,\n" + " com.google.protobuf.Message request,\n" + " com.google.protobuf.RpcCallback<\n" + " com.google.protobuf.Message> done) {\n" + " if (method.getService() != getDescriptor()) {\n" + " throw new java.lang.IllegalArgumentException(\n" + " \"Service.callMethod() given method descriptor for wrong \" +\n" + " \"service type.\");\n" + " }\n" + " switch(method.getIndex()) {\n"); + printer->Indent(); + printer->Indent(); + + for (int i = 0; i < descriptor_->method_count(); i++) { + const MethodDescriptor* method = descriptor_->method(i); + map vars; + vars["index"] = SimpleItoa(i); + vars["method"] = UnderscoresToCamelCase(method); + vars["input"] = ClassName(method->input_type()); + vars["output"] = ClassName(method->output_type()); + printer->Print(vars, + "case $index$:\n" + " this.$method$(controller, ($input$)request,\n" + " com.google.protobuf.RpcUtil.<$output$>specializeCallback(\n" + " done));\n" + " return;\n"); + } + + printer->Print( + "default:\n" + " throw new java.lang.AssertionError(\"Can't get here.\");\n"); + + printer->Outdent(); + printer->Outdent(); + + printer->Print( + " }\n" + "}\n" + "\n"); +} + +void ServiceGenerator::GenerateCallBlockingMethod(io::Printer* printer) { + printer->Print( + "\n" + "public final com.google.protobuf.Message callBlockingMethod(\n" + " com.google.protobuf.Descriptors.MethodDescriptor method,\n" + " com.google.protobuf.RpcController controller,\n" + " com.google.protobuf.Message request)\n" + " throws com.google.protobuf.ServiceException {\n" + " if (method.getService() != getDescriptor()) {\n" + " throw new java.lang.IllegalArgumentException(\n" + " \"Service.callBlockingMethod() given method descriptor for \" +\n" + " \"wrong service type.\");\n" + " }\n" + " switch(method.getIndex()) {\n"); + printer->Indent(); + printer->Indent(); + + for (int i = 0; i < descriptor_->method_count(); i++) { + const MethodDescriptor* method = descriptor_->method(i); + map vars; + vars["index"] = SimpleItoa(i); + vars["method"] = UnderscoresToCamelCase(method); + vars["input"] = ClassName(method->input_type()); + vars["output"] = ClassName(method->output_type()); + printer->Print(vars, + "case $index$:\n" + " return impl.$method$(controller, ($input$)request);\n"); + } + + printer->Print( + "default:\n" + " throw new java.lang.AssertionError(\"Can't get here.\");\n"); + + printer->Outdent(); + printer->Outdent(); + + printer->Print( + " }\n" + "}\n" + "\n"); +} + +void ServiceGenerator::GenerateGetPrototype(RequestOrResponse which, + io::Printer* printer) { + /* + * TODO(cpovirk): The exception message says "Service.foo" when it may be + * "BlockingService.foo." Consider fixing. + */ + printer->Print( + "public final com.google.protobuf.Message\n" + " get$request_or_response$Prototype(\n" + " com.google.protobuf.Descriptors.MethodDescriptor method) {\n" + " if (method.getService() != getDescriptor()) {\n" + " throw new java.lang.IllegalArgumentException(\n" + " \"Service.get$request_or_response$Prototype() given method \" +\n" + " \"descriptor for wrong service type.\");\n" + " }\n" + " switch(method.getIndex()) {\n", + "request_or_response", (which == REQUEST) ? "Request" : "Response"); + printer->Indent(); + printer->Indent(); + + for (int i = 0; i < descriptor_->method_count(); i++) { + const MethodDescriptor* method = descriptor_->method(i); + map vars; + vars["index"] = SimpleItoa(i); + vars["type"] = ClassName( + (which == REQUEST) ? method->input_type() : method->output_type()); + printer->Print(vars, + "case $index$:\n" + " return $type$.getDefaultInstance();\n"); + } + + printer->Print( + "default:\n" + " throw new java.lang.AssertionError(\"Can't get here.\");\n"); + + printer->Outdent(); + printer->Outdent(); + + printer->Print( + " }\n" + "}\n" + "\n"); +} + +void ServiceGenerator::GenerateStub(io::Printer* printer) { + printer->Print( + "public static Stub newStub(\n" + " com.google.protobuf.RpcChannel channel) {\n" + " return new Stub(channel);\n" + "}\n" + "\n" + "public static final class Stub extends $classname$ implements Interface {" + "\n", + "classname", ClassName(descriptor_)); + printer->Indent(); + + printer->Print( + "private Stub(com.google.protobuf.RpcChannel channel) {\n" + " this.channel = channel;\n" + "}\n" + "\n" + "private final com.google.protobuf.RpcChannel channel;\n" + "\n" + "public com.google.protobuf.RpcChannel getChannel() {\n" + " return channel;\n" + "}\n"); + + for (int i = 0; i < descriptor_->method_count(); i++) { + const MethodDescriptor* method = descriptor_->method(i); + printer->Print("\n"); + GenerateMethodSignature(printer, method, IS_CONCRETE); + printer->Print(" {\n"); + printer->Indent(); + + map vars; + vars["index"] = SimpleItoa(i); + vars["output"] = ClassName(method->output_type()); + printer->Print(vars, + "channel.callMethod(\n" + " getDescriptor().getMethods().get($index$),\n" + " controller,\n" + " request,\n" + " $output$.getDefaultInstance(),\n" + " com.google.protobuf.RpcUtil.generalizeCallback(\n" + " done,\n" + " $output$.class,\n" + " $output$.getDefaultInstance()));\n"); + + printer->Outdent(); + printer->Print("}\n"); + } + + printer->Outdent(); + printer->Print( + "}\n" + "\n"); +} + +void ServiceGenerator::GenerateBlockingStub(io::Printer* printer) { + printer->Print( + "public static BlockingInterface newBlockingStub(\n" + " com.google.protobuf.BlockingRpcChannel channel) {\n" + " return new BlockingStub(channel);\n" + "}\n" + "\n"); + + printer->Print( + "public interface BlockingInterface {"); + printer->Indent(); + + for (int i = 0; i < descriptor_->method_count(); i++) { + const MethodDescriptor* method = descriptor_->method(i); + GenerateBlockingMethodSignature(printer, method); + printer->Print(";\n"); + } + + printer->Outdent(); + printer->Print( + "}\n" + "\n"); + + printer->Print( + "private static final class BlockingStub implements BlockingInterface {\n"); + printer->Indent(); + + printer->Print( + "private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {\n" + " this.channel = channel;\n" + "}\n" + "\n" + "private final com.google.protobuf.BlockingRpcChannel channel;\n"); + + for (int i = 0; i < descriptor_->method_count(); i++) { + const MethodDescriptor* method = descriptor_->method(i); + GenerateBlockingMethodSignature(printer, method); + printer->Print(" {\n"); + printer->Indent(); + + map vars; + vars["index"] = SimpleItoa(i); + vars["output"] = ClassName(method->output_type()); + printer->Print(vars, + "return ($output$) channel.callBlockingMethod(\n" + " getDescriptor().getMethods().get($index$),\n" + " controller,\n" + " request,\n" + " $output$.getDefaultInstance());\n"); + + printer->Outdent(); + printer->Print( + "}\n" + "\n"); + } + + printer->Outdent(); + printer->Print("}\n"); +} + +void ServiceGenerator::GenerateMethodSignature(io::Printer* printer, + const MethodDescriptor* method, + IsAbstract is_abstract) { + map vars; + vars["name"] = UnderscoresToCamelCase(method); + vars["input"] = ClassName(method->input_type()); + vars["output"] = ClassName(method->output_type()); + vars["abstract"] = (is_abstract == IS_ABSTRACT) ? "abstract" : ""; + printer->Print(vars, + "public $abstract$ void $name$(\n" + " com.google.protobuf.RpcController controller,\n" + " $input$ request,\n" + " com.google.protobuf.RpcCallback<$output$> done)"); +} + +void ServiceGenerator::GenerateBlockingMethodSignature( + io::Printer* printer, + const MethodDescriptor* method) { + map vars; + vars["method"] = UnderscoresToCamelCase(method); + vars["input"] = ClassName(method->input_type()); + vars["output"] = ClassName(method->output_type()); + printer->Print(vars, + "\n" + "public $output$ $method$(\n" + " com.google.protobuf.RpcController controller,\n" + " $input$ request)\n" + " throws com.google.protobuf.ServiceException"); +} + +} // namespace java +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_service.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_service.h new file mode 100644 index 0000000000..e07eebf75a --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_service.h @@ -0,0 +1,113 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#ifndef GOOGLE_PROTOBUF_COMPILER_JAVA_SERVICE_H__ +#define GOOGLE_PROTOBUF_COMPILER_JAVA_SERVICE_H__ + +#include +#include + +namespace google { +namespace protobuf { + namespace io { + class Printer; // printer.h + } +} + +namespace protobuf { +namespace compiler { +namespace java { + +class ServiceGenerator { + public: + explicit ServiceGenerator(const ServiceDescriptor* descriptor); + ~ServiceGenerator(); + + void Generate(io::Printer* printer); + + private: + + // Generate the getDescriptorForType() method. + void GenerateGetDescriptorForType(io::Printer* printer); + + // Generate a Java interface for the service. + void GenerateInterface(io::Printer* printer); + + // Generate newReflectiveService() method. + void GenerateNewReflectiveServiceMethod(io::Printer* printer); + + // Generate newReflectiveBlockingService() method. + void GenerateNewReflectiveBlockingServiceMethod(io::Printer* printer); + + // Generate abstract method declarations for all methods. + void GenerateAbstractMethods(io::Printer* printer); + + // Generate the implementation of Service.callMethod(). + void GenerateCallMethod(io::Printer* printer); + + // Generate the implementation of BlockingService.callBlockingMethod(). + void GenerateCallBlockingMethod(io::Printer* printer); + + // Generate the implementations of Service.get{Request,Response}Prototype(). + enum RequestOrResponse { REQUEST, RESPONSE }; + void GenerateGetPrototype(RequestOrResponse which, io::Printer* printer); + + // Generate a stub implementation of the service. + void GenerateStub(io::Printer* printer); + + // Generate a method signature, possibly abstract, without body or trailing + // semicolon. + enum IsAbstract { IS_ABSTRACT, IS_CONCRETE }; + void GenerateMethodSignature(io::Printer* printer, + const MethodDescriptor* method, + IsAbstract is_abstract); + + // Generate a blocking stub interface and implementation of the service. + void GenerateBlockingStub(io::Printer* printer); + + // Generate the method signature for one method of a blocking stub. + void GenerateBlockingMethodSignature(io::Printer* printer, + const MethodDescriptor* method); + + const ServiceDescriptor* descriptor_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(ServiceGenerator); +}; + +} // namespace java +} // namespace compiler +} // namespace protobuf + +#endif // NET_PROTO2_COMPILER_JAVA_SERVICE_H__ +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_string_field.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_string_field.cc new file mode 100644 index 0000000000..222285bdb0 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_string_field.cc @@ -0,0 +1,610 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Author: jonp@google.com (Jon Perlow) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include + +#include +#include +#include +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace java { + +using internal::WireFormat; +using internal::WireFormatLite; + +namespace { + +void SetPrimitiveVariables(const FieldDescriptor* descriptor, + int messageBitIndex, + int builderBitIndex, + map* variables) { + (*variables)["name"] = + UnderscoresToCamelCase(descriptor); + (*variables)["capitalized_name"] = + UnderscoresToCapitalizedCamelCase(descriptor); + (*variables)["constant_name"] = FieldConstantName(descriptor); + (*variables)["number"] = SimpleItoa(descriptor->number()); + (*variables)["empty_list"] = "com.google.protobuf.LazyStringArrayList.EMPTY"; + + (*variables)["default"] = DefaultValue(descriptor); + (*variables)["default_init"] = ("= " + DefaultValue(descriptor)); + (*variables)["capitalized_type"] = "String"; + (*variables)["tag"] = SimpleItoa(WireFormat::MakeTag(descriptor)); + (*variables)["tag_size"] = SimpleItoa( + WireFormat::TagSize(descriptor->number(), GetType(descriptor))); + (*variables)["null_check"] = + " if (value == null) {\n" + " throw new NullPointerException();\n" + " }\n"; + + // TODO(birdo): Add @deprecated javadoc when generating javadoc is supported + // by the proto compiler + (*variables)["deprecation"] = descriptor->options().deprecated() + ? "@java.lang.Deprecated " : ""; + (*variables)["on_changed"] = + HasDescriptorMethods(descriptor->containing_type()) ? "onChanged();" : ""; + + // For singular messages and builders, one bit is used for the hasField bit. + (*variables)["get_has_field_bit_message"] = GenerateGetBit(messageBitIndex); + + (*variables)["get_has_field_bit_builder"] = GenerateGetBit(builderBitIndex); + (*variables)["set_has_field_bit_builder"] = GenerateSetBit(builderBitIndex); + (*variables)["clear_has_field_bit_builder"] = + GenerateClearBit(builderBitIndex); + + // For repated builders, one bit is used for whether the array is immutable. + (*variables)["get_mutable_bit_builder"] = GenerateGetBit(builderBitIndex); + (*variables)["set_mutable_bit_builder"] = GenerateSetBit(builderBitIndex); + (*variables)["clear_mutable_bit_builder"] = GenerateClearBit(builderBitIndex); + + (*variables)["get_has_field_bit_from_local"] = + GenerateGetBitFromLocal(builderBitIndex); + (*variables)["set_has_field_bit_to_local"] = + GenerateSetBitToLocal(messageBitIndex); +} + +} // namespace + +// =================================================================== + +StringFieldGenerator:: +StringFieldGenerator(const FieldDescriptor* descriptor, + int messageBitIndex, + int builderBitIndex) + : descriptor_(descriptor), messageBitIndex_(messageBitIndex), + builderBitIndex_(builderBitIndex) { + SetPrimitiveVariables(descriptor, messageBitIndex, builderBitIndex, + &variables_); +} + +StringFieldGenerator::~StringFieldGenerator() {} + +int StringFieldGenerator::GetNumBitsForMessage() const { + return 1; +} + +int StringFieldGenerator::GetNumBitsForBuilder() const { + return 1; +} + +// A note about how strings are handled. This code used to just store a String +// in the Message. This had two issues: +// +// 1. It wouldn't roundtrip byte arrays that were not vaid UTF-8 encoded +// strings, but rather fields that were raw bytes incorrectly marked +// as strings in the proto file. This is common because in the proto1 +// syntax, string was the way to indicate bytes and C++ engineers can +// easily make this mistake without affecting the C++ API. By converting to +// strings immediately, some java code might corrupt these byte arrays as +// it passes through a java server even if the field was never accessed by +// application code. +// +// 2. There's a performance hit to converting between bytes and strings and +// it many cases, the field is never even read by the application code. This +// avoids unnecessary conversions in the common use cases. +// +// So now, the field for String is maintained as an Object reference which can +// either store a String or a ByteString. The code uses an instanceof check +// to see which one it has and converts to the other one if needed. It remembers +// the last value requested (in a thread safe manner) as this is most likely +// the one needed next. The thread safety is such that if two threads both +// convert the field because the changes made by each thread were not visible to +// the other, they may cause a conversion to happen more times than would +// otherwise be necessary. This was deemed better than adding synchronization +// overhead. It will not cause any corruption issues or affect the behavior of +// the API. The instanceof check is also highly optimized in the JVM and we +// decided it was better to reduce the memory overhead by not having two +// separate fields but rather use dynamic type checking. +// +// For single fields, the logic for this is done inside the generated code. For +// repeated fields, the logic is done in LazyStringArrayList and +// UnmodifiableLazyStringList. +void StringFieldGenerator:: +GenerateInterfaceMembers(io::Printer* printer) const { + printer->Print(variables_, + "$deprecation$boolean has$capitalized_name$();\n" + "$deprecation$java.lang.String get$capitalized_name$();\n"); +} + +void StringFieldGenerator:: +GenerateMembers(io::Printer* printer) const { + printer->Print(variables_, + "private java.lang.Object $name$_;\n" + "$deprecation$public boolean has$capitalized_name$() {\n" + " return $get_has_field_bit_message$;\n" + "}\n"); + + printer->Print(variables_, + "$deprecation$public java.lang.String get$capitalized_name$() {\n" + " java.lang.Object ref = $name$_;\n" + " if (ref instanceof java.lang.String) {\n" + " return (java.lang.String) ref;\n" + " } else {\n" + " com.google.protobuf.ByteString bs = \n" + " (com.google.protobuf.ByteString) ref;\n" + " java.lang.String s = bs.toStringUtf8();\n" + " if (com.google.protobuf.Internal.isValidUtf8(bs)) {\n" + " $name$_ = s;\n" + " }\n" + " return s;\n" + " }\n" + "}\n" + "private com.google.protobuf.ByteString get$capitalized_name$Bytes() {\n" + " java.lang.Object ref = $name$_;\n" + " if (ref instanceof java.lang.String) {\n" + " com.google.protobuf.ByteString b = \n" + " com.google.protobuf.ByteString.copyFromUtf8(\n" + " (java.lang.String) ref);\n" + " $name$_ = b;\n" + " return b;\n" + " } else {\n" + " return (com.google.protobuf.ByteString) ref;\n" + " }\n" + "}\n"); +} + +void StringFieldGenerator:: +GenerateBuilderMembers(io::Printer* printer) const { + printer->Print(variables_, + "private java.lang.Object $name$_ $default_init$;\n" + "$deprecation$public boolean has$capitalized_name$() {\n" + " return $get_has_field_bit_builder$;\n" + "}\n"); + + printer->Print(variables_, + "$deprecation$public java.lang.String get$capitalized_name$() {\n" + " java.lang.Object ref = $name$_;\n" + " if (!(ref instanceof java.lang.String)) {\n" + " java.lang.String s = ((com.google.protobuf.ByteString) ref)\n" + " .toStringUtf8();\n" + " $name$_ = s;\n" + " return s;\n" + " } else {\n" + " return (java.lang.String) ref;\n" + " }\n" + "}\n"); + + printer->Print(variables_, + "$deprecation$public Builder set$capitalized_name$(\n" + " java.lang.String value) {\n" + "$null_check$" + " $set_has_field_bit_builder$;\n" + " $name$_ = value;\n" + " $on_changed$\n" + " return this;\n" + "}\n" + "$deprecation$public Builder clear$capitalized_name$() {\n" + " $clear_has_field_bit_builder$;\n"); + // The default value is not a simple literal so we want to avoid executing + // it multiple times. Instead, get the default out of the default instance. + printer->Print(variables_, + " $name$_ = getDefaultInstance().get$capitalized_name$();\n"); + printer->Print(variables_, + " $on_changed$\n" + " return this;\n" + "}\n"); + + printer->Print(variables_, + "void set$capitalized_name$(com.google.protobuf.ByteString value) {\n" + " $set_has_field_bit_builder$;\n" + " $name$_ = value;\n" + " $on_changed$\n" + "}\n"); +} + +void StringFieldGenerator:: +GenerateFieldBuilderInitializationCode(io::Printer* printer) const { + // noop for primitives +} + +void StringFieldGenerator:: +GenerateInitializationCode(io::Printer* printer) const { + printer->Print(variables_, "$name$_ = $default$;\n"); +} + +void StringFieldGenerator:: +GenerateBuilderClearCode(io::Printer* printer) const { + printer->Print(variables_, + "$name$_ = $default$;\n" + "$clear_has_field_bit_builder$;\n"); +} + +void StringFieldGenerator:: +GenerateMergingCode(io::Printer* printer) const { + printer->Print(variables_, + "if (other.has$capitalized_name$()) {\n" + " set$capitalized_name$(other.get$capitalized_name$());\n" + "}\n"); +} + +void StringFieldGenerator:: +GenerateBuildingCode(io::Printer* printer) const { + printer->Print(variables_, + "if ($get_has_field_bit_from_local$) {\n" + " $set_has_field_bit_to_local$;\n" + "}\n" + "result.$name$_ = $name$_;\n"); +} + +void StringFieldGenerator:: +GenerateParsingCode(io::Printer* printer) const { + printer->Print(variables_, + "$set_has_field_bit_builder$;\n" + "$name$_ = input.readBytes();\n"); +} + +void StringFieldGenerator:: +GenerateSerializationCode(io::Printer* printer) const { + printer->Print(variables_, + "if ($get_has_field_bit_message$) {\n" + " output.writeBytes($number$, get$capitalized_name$Bytes());\n" + "}\n"); +} + +void StringFieldGenerator:: +GenerateSerializedSizeCode(io::Printer* printer) const { + printer->Print(variables_, + "if ($get_has_field_bit_message$) {\n" + " size += com.google.protobuf.CodedOutputStream\n" + " .computeBytesSize($number$, get$capitalized_name$Bytes());\n" + "}\n"); +} + +void StringFieldGenerator:: +GenerateEqualsCode(io::Printer* printer) const { + printer->Print(variables_, + "result = result && get$capitalized_name$()\n" + " .equals(other.get$capitalized_name$());\n"); +} + +void StringFieldGenerator:: +GenerateHashCode(io::Printer* printer) const { + printer->Print(variables_, + "hash = (37 * hash) + $constant_name$;\n"); + printer->Print(variables_, + "hash = (53 * hash) + get$capitalized_name$().hashCode();\n"); +} + +string StringFieldGenerator::GetBoxedType() const { + return "java.lang.String"; +} + + +// =================================================================== + +RepeatedStringFieldGenerator:: +RepeatedStringFieldGenerator(const FieldDescriptor* descriptor, + int messageBitIndex, + int builderBitIndex) + : descriptor_(descriptor), messageBitIndex_(messageBitIndex), + builderBitIndex_(builderBitIndex) { + SetPrimitiveVariables(descriptor, messageBitIndex, builderBitIndex, + &variables_); +} + +RepeatedStringFieldGenerator::~RepeatedStringFieldGenerator() {} + +int RepeatedStringFieldGenerator::GetNumBitsForMessage() const { + return 0; +} + +int RepeatedStringFieldGenerator::GetNumBitsForBuilder() const { + return 1; +} + +void RepeatedStringFieldGenerator:: +GenerateInterfaceMembers(io::Printer* printer) const { + printer->Print(variables_, + "$deprecation$java.util.List\n" + " get$capitalized_name$List();\n" + "$deprecation$int get$capitalized_name$Count();\n" + "$deprecation$java.lang.String get$capitalized_name$(int index);\n"); +} + + +void RepeatedStringFieldGenerator:: +GenerateMembers(io::Printer* printer) const { + printer->Print(variables_, + "private com.google.protobuf.LazyStringList $name$_;\n" + "$deprecation$public java.util.List\n" + " get$capitalized_name$List() {\n" + " return $name$_;\n" // note: unmodifiable list + "}\n" + "$deprecation$public int get$capitalized_name$Count() {\n" + " return $name$_.size();\n" + "}\n" + "$deprecation$public java.lang.String get$capitalized_name$(int index) {\n" + " return $name$_.get(index);\n" + "}\n"); + + if (descriptor_->options().packed() && + HasGeneratedMethods(descriptor_->containing_type())) { + printer->Print(variables_, + "private int $name$MemoizedSerializedSize = -1;\n"); + } +} + +void RepeatedStringFieldGenerator:: +GenerateBuilderMembers(io::Printer* printer) const { + // One field is the list and the bit field keeps track of whether the + // list is immutable. If it's immutable, the invariant is that it must + // either an instance of Collections.emptyList() or it's an ArrayList + // wrapped in a Collections.unmodifiableList() wrapper and nobody else has + // a refererence to the underlying ArrayList. This invariant allows us to + // share instances of lists between protocol buffers avoiding expensive + // memory allocations. Note, immutable is a strong guarantee here -- not + // just that the list cannot be modified via the reference but that the + // list can never be modified. + printer->Print(variables_, + "private com.google.protobuf.LazyStringList $name$_ = $empty_list$;\n"); + + printer->Print(variables_, + "private void ensure$capitalized_name$IsMutable() {\n" + " if (!$get_mutable_bit_builder$) {\n" + " $name$_ = new com.google.protobuf.LazyStringArrayList($name$_);\n" + " $set_mutable_bit_builder$;\n" + " }\n" + "}\n"); + + // Note: We return an unmodifiable list because otherwise the caller + // could hold on to the returned list and modify it after the message + // has been built, thus mutating the message which is supposed to be + // immutable. + printer->Print(variables_, + "$deprecation$public java.util.List\n" + " get$capitalized_name$List() {\n" + " return java.util.Collections.unmodifiableList($name$_);\n" + "}\n" + "$deprecation$public int get$capitalized_name$Count() {\n" + " return $name$_.size();\n" + "}\n" + "$deprecation$public java.lang.String get$capitalized_name$(int index) {\n" + " return $name$_.get(index);\n" + "}\n" + "$deprecation$public Builder set$capitalized_name$(\n" + " int index, java.lang.String value) {\n" + "$null_check$" + " ensure$capitalized_name$IsMutable();\n" + " $name$_.set(index, value);\n" + " $on_changed$\n" + " return this;\n" + "}\n" + "$deprecation$public Builder add$capitalized_name$(\n" + " java.lang.String value) {\n" + "$null_check$" + " ensure$capitalized_name$IsMutable();\n" + " $name$_.add(value);\n" + " $on_changed$\n" + " return this;\n" + "}\n" + "$deprecation$public Builder addAll$capitalized_name$(\n" + " java.lang.Iterable values) {\n" + " ensure$capitalized_name$IsMutable();\n" + " super.addAll(values, $name$_);\n" + " $on_changed$\n" + " return this;\n" + "}\n" + "$deprecation$public Builder clear$capitalized_name$() {\n" + " $name$_ = $empty_list$;\n" + " $clear_mutable_bit_builder$;\n" + " $on_changed$\n" + " return this;\n" + "}\n"); + + printer->Print(variables_, + "void add$capitalized_name$(com.google.protobuf.ByteString value) {\n" + " ensure$capitalized_name$IsMutable();\n" + " $name$_.add(value);\n" + " $on_changed$\n" + "}\n"); +} + +void RepeatedStringFieldGenerator:: +GenerateFieldBuilderInitializationCode(io::Printer* printer) const { + // noop for primitives +} + +void RepeatedStringFieldGenerator:: +GenerateInitializationCode(io::Printer* printer) const { + printer->Print(variables_, "$name$_ = $empty_list$;\n"); +} + +void RepeatedStringFieldGenerator:: +GenerateBuilderClearCode(io::Printer* printer) const { + printer->Print(variables_, + "$name$_ = $empty_list$;\n" + "$clear_mutable_bit_builder$;\n"); +} + +void RepeatedStringFieldGenerator:: +GenerateMergingCode(io::Printer* printer) const { + // The code below does two optimizations: + // 1. If the other list is empty, there's nothing to do. This ensures we + // don't allocate a new array if we already have an immutable one. + // 2. If the other list is non-empty and our current list is empty, we can + // reuse the other list which is guaranteed to be immutable. + printer->Print(variables_, + "if (!other.$name$_.isEmpty()) {\n" + " if ($name$_.isEmpty()) {\n" + " $name$_ = other.$name$_;\n" + " $clear_mutable_bit_builder$;\n" + " } else {\n" + " ensure$capitalized_name$IsMutable();\n" + " $name$_.addAll(other.$name$_);\n" + " }\n" + " $on_changed$\n" + "}\n"); +} + +void RepeatedStringFieldGenerator:: +GenerateBuildingCode(io::Printer* printer) const { + // The code below ensures that the result has an immutable list. If our + // list is immutable, we can just reuse it. If not, we make it immutable. + + printer->Print(variables_, + "if ($get_mutable_bit_builder$) {\n" + " $name$_ = new com.google.protobuf.UnmodifiableLazyStringList(\n" + " $name$_);\n" + " $clear_mutable_bit_builder$;\n" + "}\n" + "result.$name$_ = $name$_;\n"); +} + +void RepeatedStringFieldGenerator:: +GenerateParsingCode(io::Printer* printer) const { + printer->Print(variables_, + "ensure$capitalized_name$IsMutable();\n" + "$name$_.add(input.readBytes());\n"); +} + +void RepeatedStringFieldGenerator:: +GenerateParsingCodeFromPacked(io::Printer* printer) const { + printer->Print(variables_, + "int length = input.readRawVarint32();\n" + "int limit = input.pushLimit(length);\n" + "while (input.getBytesUntilLimit() > 0) {\n" + " add$capitalized_name$(input.read$capitalized_type$());\n" + "}\n" + "input.popLimit(limit);\n"); +} + +void RepeatedStringFieldGenerator:: +GenerateSerializationCode(io::Printer* printer) const { + if (descriptor_->options().packed()) { + printer->Print(variables_, + "if (get$capitalized_name$List().size() > 0) {\n" + " output.writeRawVarint32($tag$);\n" + " output.writeRawVarint32($name$MemoizedSerializedSize);\n" + "}\n" + "for (int i = 0; i < $name$_.size(); i++) {\n" + " output.write$capitalized_type$NoTag($name$_.get(i));\n" + "}\n"); + } else { + printer->Print(variables_, + "for (int i = 0; i < $name$_.size(); i++) {\n" + " output.writeBytes($number$, $name$_.getByteString(i));\n" + "}\n"); + } +} + +void RepeatedStringFieldGenerator:: +GenerateSerializedSizeCode(io::Printer* printer) const { + printer->Print(variables_, + "{\n" + " int dataSize = 0;\n"); + printer->Indent(); + + printer->Print(variables_, + "for (int i = 0; i < $name$_.size(); i++) {\n" + " dataSize += com.google.protobuf.CodedOutputStream\n" + " .computeBytesSizeNoTag($name$_.getByteString(i));\n" + "}\n"); + + printer->Print( + "size += dataSize;\n"); + + if (descriptor_->options().packed()) { + printer->Print(variables_, + "if (!get$capitalized_name$List().isEmpty()) {\n" + " size += $tag_size$;\n" + " size += com.google.protobuf.CodedOutputStream\n" + " .computeInt32SizeNoTag(dataSize);\n" + "}\n"); + } else { + printer->Print(variables_, + "size += $tag_size$ * get$capitalized_name$List().size();\n"); + } + + // cache the data size for packed fields. + if (descriptor_->options().packed()) { + printer->Print(variables_, + "$name$MemoizedSerializedSize = dataSize;\n"); + } + + printer->Outdent(); + printer->Print("}\n"); +} + +void RepeatedStringFieldGenerator:: +GenerateEqualsCode(io::Printer* printer) const { + printer->Print(variables_, + "result = result && get$capitalized_name$List()\n" + " .equals(other.get$capitalized_name$List());\n"); +} + +void RepeatedStringFieldGenerator:: +GenerateHashCode(io::Printer* printer) const { + printer->Print(variables_, + "if (get$capitalized_name$Count() > 0) {\n" + " hash = (37 * hash) + $constant_name$;\n" + " hash = (53 * hash) + get$capitalized_name$List().hashCode();\n" + "}\n"); +} + +string RepeatedStringFieldGenerator::GetBoxedType() const { + return "String"; +} + +} // namespace java +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_string_field.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_string_field.h new file mode 100644 index 0000000000..8cb414694a --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/java/java_string_field.h @@ -0,0 +1,120 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Author: jonp@google.com (Jon Perlow) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#ifndef GOOGLE_PROTOBUF_COMPILER_JAVA_STRING_FIELD_H__ +#define GOOGLE_PROTOBUF_COMPILER_JAVA_STRING_FIELD_H__ + +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace java { + +class StringFieldGenerator : public FieldGenerator { + public: + explicit StringFieldGenerator(const FieldDescriptor* descriptor, + int messageBitIndex, int builderBitIndex); + ~StringFieldGenerator(); + + // implements FieldGenerator --------------------------------------- + int GetNumBitsForMessage() const; + int GetNumBitsForBuilder() const; + void GenerateInterfaceMembers(io::Printer* printer) const; + void GenerateMembers(io::Printer* printer) const; + void GenerateBuilderMembers(io::Printer* printer) const; + void GenerateInitializationCode(io::Printer* printer) const; + void GenerateBuilderClearCode(io::Printer* printer) const; + void GenerateMergingCode(io::Printer* printer) const; + void GenerateBuildingCode(io::Printer* printer) const; + void GenerateParsingCode(io::Printer* printer) const; + void GenerateSerializationCode(io::Printer* printer) const; + void GenerateSerializedSizeCode(io::Printer* printer) const; + void GenerateFieldBuilderInitializationCode(io::Printer* printer) const; + void GenerateEqualsCode(io::Printer* printer) const; + void GenerateHashCode(io::Printer* printer) const; + string GetBoxedType() const; + + private: + const FieldDescriptor* descriptor_; + map variables_; + const int messageBitIndex_; + const int builderBitIndex_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(StringFieldGenerator); +}; + +class RepeatedStringFieldGenerator : public FieldGenerator { + public: + explicit RepeatedStringFieldGenerator(const FieldDescriptor* descriptor, + int messageBitIndex, int builderBitIndex); + ~RepeatedStringFieldGenerator(); + + // implements FieldGenerator --------------------------------------- + int GetNumBitsForMessage() const; + int GetNumBitsForBuilder() const; + void GenerateInterfaceMembers(io::Printer* printer) const; + void GenerateMembers(io::Printer* printer) const; + void GenerateBuilderMembers(io::Printer* printer) const; + void GenerateInitializationCode(io::Printer* printer) const; + void GenerateBuilderClearCode(io::Printer* printer) const; + void GenerateMergingCode(io::Printer* printer) const; + void GenerateBuildingCode(io::Printer* printer) const; + void GenerateParsingCode(io::Printer* printer) const; + void GenerateParsingCodeFromPacked(io::Printer* printer) const; + void GenerateSerializationCode(io::Printer* printer) const; + void GenerateSerializedSizeCode(io::Printer* printer) const; + void GenerateFieldBuilderInitializationCode(io::Printer* printer) const; + void GenerateEqualsCode(io::Printer* printer) const; + void GenerateHashCode(io::Printer* printer) const; + string GetBoxedType() const; + + private: + const FieldDescriptor* descriptor_; + map variables_; + const int messageBitIndex_; + const int builderBitIndex_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(RepeatedStringFieldGenerator); +}; + +} // namespace java +} // namespace compiler +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_COMPILER_JAVA_STRING_FIELD_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/main.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/main.cc new file mode 100644 index 0000000000..d9b0c3f9d4 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/main.cc @@ -0,0 +1,61 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) + +#include +#include +#include +#include + + +int main(int argc, char* argv[]) { + + google::protobuf::compiler::CommandLineInterface cli; + cli.AllowPlugins("protoc-"); + + // Proto2 C++ + google::protobuf::compiler::cpp::CppGenerator cpp_generator; + cli.RegisterGenerator("--cpp_out", &cpp_generator, + "Generate C++ header and source."); + + // Proto2 Java + google::protobuf::compiler::java::JavaGenerator java_generator; + cli.RegisterGenerator("--java_out", &java_generator, + "Generate Java source file."); + + + // Proto2 Python + google::protobuf::compiler::python::Generator py_generator; + cli.RegisterGenerator("--python_out", &py_generator, + "Generate Python source file."); + + return cli.Run(argc, argv); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/mock_code_generator.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/mock_code_generator.cc new file mode 100644 index 0000000000..5b76af2577 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/mock_code_generator.cc @@ -0,0 +1,231 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) + +#include + +#include +#include +#include +#include +#include +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { + +// Returns the list of the names of files in all_files in the form of a +// comma-separated string. +string CommaSeparatedList(const vector all_files) { + vector names; + for (int i = 0; i < all_files.size(); i++) { + names.push_back(all_files[i]->name()); + } + return JoinStrings(names, ","); +} + +static const char* kFirstInsertionPointName = "first_mock_insertion_point"; +static const char* kSecondInsertionPointName = "second_mock_insertion_point"; +static const char* kFirstInsertionPoint = + "# @@protoc_insertion_point(first_mock_insertion_point) is here\n"; +static const char* kSecondInsertionPoint = + " # @@protoc_insertion_point(second_mock_insertion_point) is here\n"; + +MockCodeGenerator::MockCodeGenerator(const string& name) + : name_(name) {} + +MockCodeGenerator::~MockCodeGenerator() {} + +void MockCodeGenerator::ExpectGenerated( + const string& name, + const string& parameter, + const string& insertions, + const string& file, + const string& first_message_name, + const string& first_parsed_file_name, + const string& output_directory) { + string content; + ASSERT_TRUE(File::ReadFileToString( + output_directory + "/" + GetOutputFileName(name, file), &content)); + + vector lines; + SplitStringUsing(content, "\n", &lines); + + while (!lines.empty() && lines.back().empty()) { + lines.pop_back(); + } + for (int i = 0; i < lines.size(); i++) { + lines[i] += "\n"; + } + + vector insertion_list; + if (!insertions.empty()) { + SplitStringUsing(insertions, ",", &insertion_list); + } + + ASSERT_EQ(lines.size(), 3 + insertion_list.size() * 2); + EXPECT_EQ(GetOutputFileContent(name, parameter, file, + first_parsed_file_name, first_message_name), + lines[0]); + + EXPECT_EQ(kFirstInsertionPoint, lines[1 + insertion_list.size()]); + EXPECT_EQ(kSecondInsertionPoint, lines[2 + insertion_list.size() * 2]); + + for (int i = 0; i < insertion_list.size(); i++) { + EXPECT_EQ(GetOutputFileContent(insertion_list[i], "first_insert", + file, file, first_message_name), + lines[1 + i]); + // Second insertion point is indented, so the inserted text should + // automatically be indented too. + EXPECT_EQ(" " + GetOutputFileContent(insertion_list[i], "second_insert", + file, file, first_message_name), + lines[2 + insertion_list.size() + i]); + } +} + +bool MockCodeGenerator::Generate( + const FileDescriptor* file, + const string& parameter, + GeneratorContext* context, + string* error) const { + for (int i = 0; i < file->message_type_count(); i++) { + if (HasPrefixString(file->message_type(i)->name(), "MockCodeGenerator_")) { + string command = StripPrefixString(file->message_type(i)->name(), + "MockCodeGenerator_"); + if (command == "Error") { + *error = "Saw message type MockCodeGenerator_Error."; + return false; + } else if (command == "Exit") { + cerr << "Saw message type MockCodeGenerator_Exit." << endl; + exit(123); + } else if (command == "Abort") { + cerr << "Saw message type MockCodeGenerator_Abort." << endl; + abort(); + } else { + GOOGLE_LOG(FATAL) << "Unknown MockCodeGenerator command: " << command; + } + } + } + + if (HasPrefixString(parameter, "insert=")) { + vector insert_into; + SplitStringUsing(StripPrefixString(parameter, "insert="), + ",", &insert_into); + + for (int i = 0; i < insert_into.size(); i++) { + { + scoped_ptr output( + context->OpenForInsert( + GetOutputFileName(insert_into[i], file), + kFirstInsertionPointName)); + io::Printer printer(output.get(), '$'); + printer.PrintRaw(GetOutputFileContent(name_, "first_insert", + file, context)); + if (printer.failed()) { + *error = "MockCodeGenerator detected write error."; + return false; + } + } + + { + scoped_ptr output( + context->OpenForInsert( + GetOutputFileName(insert_into[i], file), + kSecondInsertionPointName)); + io::Printer printer(output.get(), '$'); + printer.PrintRaw(GetOutputFileContent(name_, "second_insert", + file, context)); + if (printer.failed()) { + *error = "MockCodeGenerator detected write error."; + return false; + } + } + } + } else { + scoped_ptr output( + context->Open(GetOutputFileName(name_, file))); + + io::Printer printer(output.get(), '$'); + printer.PrintRaw(GetOutputFileContent(name_, parameter, + file, context)); + printer.PrintRaw(kFirstInsertionPoint); + printer.PrintRaw(kSecondInsertionPoint); + + if (printer.failed()) { + *error = "MockCodeGenerator detected write error."; + return false; + } + } + + return true; +} + +string MockCodeGenerator::GetOutputFileName(const string& generator_name, + const FileDescriptor* file) { + return GetOutputFileName(generator_name, file->name()); +} + +string MockCodeGenerator::GetOutputFileName(const string& generator_name, + const string& file) { + return file + ".MockCodeGenerator." + generator_name; +} + +string MockCodeGenerator::GetOutputFileContent( + const string& generator_name, + const string& parameter, + const FileDescriptor* file, + GeneratorContext *context) { + vector all_files; + context->ListParsedFiles(&all_files); + return GetOutputFileContent( + generator_name, parameter, file->name(), + CommaSeparatedList(all_files), + file->message_type_count() > 0 ? + file->message_type(0)->name() : "(none)"); +} + +string MockCodeGenerator::GetOutputFileContent( + const string& generator_name, + const string& parameter, + const string& file, + const string& parsed_file_list, + const string& first_message_name) { + return strings::Substitute("$0: $1, $2, $3, $4\n", + generator_name, parameter, file, + first_message_name, parsed_file_list); +} + +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/mock_code_generator.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/mock_code_generator.h new file mode 100644 index 0000000000..5c7942bda6 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/mock_code_generator.h @@ -0,0 +1,113 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) + +#ifndef GOOGLE_PROTOBUF_COMPILER_MOCK_CODE_GENERATOR_H__ +#define GOOGLE_PROTOBUF_COMPILER_MOCK_CODE_GENERATOR_H__ + +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { + +// A mock CodeGenerator, used by command_line_interface_unittest. This is in +// its own file so that it can be used both directly and as a plugin. +// +// Generate() produces some output which can be checked by ExpectCalled(). The +// generator can run in a different process (e.g. a plugin). +// +// If the parameter is "insert=NAMES", the MockCodeGenerator will insert lines +// into the files generated by other MockCodeGenerators instead of creating +// its own file. NAMES is a comma-separated list of the names of those other +// MockCodeGenerators. +// +// MockCodeGenerator will also modify its behavior slightly if the input file +// contains a message type with one of the following names: +// MockCodeGenerator_Error: Causes Generate() to return false and set the +// error message to "Saw message type MockCodeGenerator_Error." +// MockCodeGenerator_Exit: Generate() prints "Saw message type +// MockCodeGenerator_Exit." to stderr and then calls exit(123). +// MockCodeGenerator_Abort: Generate() prints "Saw message type +// MockCodeGenerator_Abort." to stderr and then calls abort(). +class MockCodeGenerator : public CodeGenerator { + public: + MockCodeGenerator(const string& name); + virtual ~MockCodeGenerator(); + + // Expect (via gTest) that a MockCodeGenerator with the given name was called + // with the given parameters by inspecting the output location. + // + // |insertions| is a comma-separated list of names of MockCodeGenerators which + // should have inserted lines into this file. + // |parsed_file_list| is a comma-separated list of names of the files + // that are being compiled together in this run. + static void ExpectGenerated(const string& name, + const string& parameter, + const string& insertions, + const string& file, + const string& first_message_name, + const string& parsed_file_list, + const string& output_directory); + + // Get the name of the file which would be written by the given generator. + static string GetOutputFileName(const string& generator_name, + const FileDescriptor* file); + static string GetOutputFileName(const string& generator_name, + const string& file); + + // implements CodeGenerator ---------------------------------------- + + virtual bool Generate(const FileDescriptor* file, + const string& parameter, + GeneratorContext* context, + string* error) const; + + private: + string name_; + + static string GetOutputFileContent(const string& generator_name, + const string& parameter, + const FileDescriptor* file, + GeneratorContext *context); + static string GetOutputFileContent(const string& generator_name, + const string& parameter, + const string& file, + const string& parsed_file_list, + const string& first_message_name); +}; + +} // namespace compiler +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_COMPILER_MOCK_CODE_GENERATOR_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/package_info.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/package_info.h new file mode 100644 index 0000000000..b897126742 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/package_info.h @@ -0,0 +1,64 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// This file exists solely to document the google::protobuf::compiler namespace. +// It is not compiled into anything, but it may be read by an automated +// documentation generator. + +namespace google { + +namespace protobuf { + +// Implementation of the Protocol Buffer compiler. +// +// This package contains code for parsing .proto files and generating code +// based on them. There are two reasons you might be interested in this +// package: +// - You want to parse .proto files at runtime. In this case, you should +// look at importer.h. Since this functionality is widely useful, it is +// included in the libprotobuf base library; you do not have to link against +// libprotoc. +// - You want to write a custom protocol compiler which generates different +// kinds of code, e.g. code in a different language which is not supported +// by the official compiler. For this purpose, command_line_interface.h +// provides you with a complete compiler front-end, so all you need to do +// is write a custom implementation of CodeGenerator and a trivial main() +// function. You can even make your compiler support the official languages +// in addition to your own. Since this functionality is only useful to those +// writing custom compilers, it is in a separate library called "libprotoc" +// which you will have to link against. +namespace compiler {} + +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/parser.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/parser.cc new file mode 100644 index 0000000000..34317b1fd9 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/parser.cc @@ -0,0 +1,1473 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// Recursive descent FTW. + +#include +#include +#include + + +#include +#include +#include +#include +#include +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { + +using internal::WireFormat; + +namespace { + +typedef hash_map TypeNameMap; + +TypeNameMap MakeTypeNameTable() { + TypeNameMap result; + + result["double" ] = FieldDescriptorProto::TYPE_DOUBLE; + result["float" ] = FieldDescriptorProto::TYPE_FLOAT; + result["uint64" ] = FieldDescriptorProto::TYPE_UINT64; + result["fixed64" ] = FieldDescriptorProto::TYPE_FIXED64; + result["fixed32" ] = FieldDescriptorProto::TYPE_FIXED32; + result["bool" ] = FieldDescriptorProto::TYPE_BOOL; + result["string" ] = FieldDescriptorProto::TYPE_STRING; + result["group" ] = FieldDescriptorProto::TYPE_GROUP; + + result["bytes" ] = FieldDescriptorProto::TYPE_BYTES; + result["uint32" ] = FieldDescriptorProto::TYPE_UINT32; + result["sfixed32"] = FieldDescriptorProto::TYPE_SFIXED32; + result["sfixed64"] = FieldDescriptorProto::TYPE_SFIXED64; + result["int32" ] = FieldDescriptorProto::TYPE_INT32; + result["int64" ] = FieldDescriptorProto::TYPE_INT64; + result["sint32" ] = FieldDescriptorProto::TYPE_SINT32; + result["sint64" ] = FieldDescriptorProto::TYPE_SINT64; + + return result; +} + +const TypeNameMap kTypeNames = MakeTypeNameTable(); + +} // anonymous namespace + +// Makes code slightly more readable. The meaning of "DO(foo)" is +// "Execute foo and fail if it fails.", where failure is indicated by +// returning false. +#define DO(STATEMENT) if (STATEMENT) {} else return false + +// =================================================================== + +Parser::Parser() + : input_(NULL), + error_collector_(NULL), + source_location_table_(NULL), + had_errors_(false), + require_syntax_identifier_(false), + stop_after_syntax_identifier_(false) { +} + +Parser::~Parser() { +} + +// =================================================================== + +inline bool Parser::LookingAt(const char* text) { + return input_->current().text == text; +} + +inline bool Parser::LookingAtType(io::Tokenizer::TokenType token_type) { + return input_->current().type == token_type; +} + +inline bool Parser::AtEnd() { + return LookingAtType(io::Tokenizer::TYPE_END); +} + +bool Parser::TryConsume(const char* text) { + if (LookingAt(text)) { + input_->Next(); + return true; + } else { + return false; + } +} + +bool Parser::Consume(const char* text, const char* error) { + if (TryConsume(text)) { + return true; + } else { + AddError(error); + return false; + } +} + +bool Parser::Consume(const char* text) { + if (TryConsume(text)) { + return true; + } else { + AddError("Expected \"" + string(text) + "\"."); + return false; + } +} + +bool Parser::ConsumeIdentifier(string* output, const char* error) { + if (LookingAtType(io::Tokenizer::TYPE_IDENTIFIER)) { + *output = input_->current().text; + input_->Next(); + return true; + } else { + AddError(error); + return false; + } +} + +bool Parser::ConsumeInteger(int* output, const char* error) { + if (LookingAtType(io::Tokenizer::TYPE_INTEGER)) { + uint64 value = 0; + if (!io::Tokenizer::ParseInteger(input_->current().text, + kint32max, &value)) { + AddError("Integer out of range."); + // We still return true because we did, in fact, parse an integer. + } + *output = value; + input_->Next(); + return true; + } else { + AddError(error); + return false; + } +} + +bool Parser::ConsumeInteger64(uint64 max_value, uint64* output, + const char* error) { + if (LookingAtType(io::Tokenizer::TYPE_INTEGER)) { + if (!io::Tokenizer::ParseInteger(input_->current().text, max_value, + output)) { + AddError("Integer out of range."); + // We still return true because we did, in fact, parse an integer. + *output = 0; + } + input_->Next(); + return true; + } else { + AddError(error); + return false; + } +} + +bool Parser::ConsumeNumber(double* output, const char* error) { + if (LookingAtType(io::Tokenizer::TYPE_FLOAT)) { + *output = io::Tokenizer::ParseFloat(input_->current().text); + input_->Next(); + return true; + } else if (LookingAtType(io::Tokenizer::TYPE_INTEGER)) { + // Also accept integers. + uint64 value = 0; + if (!io::Tokenizer::ParseInteger(input_->current().text, + kuint64max, &value)) { + AddError("Integer out of range."); + // We still return true because we did, in fact, parse a number. + } + *output = value; + input_->Next(); + return true; + } else if (LookingAt("inf")) { + *output = numeric_limits::infinity(); + input_->Next(); + return true; + } else if (LookingAt("nan")) { + *output = numeric_limits::quiet_NaN(); + input_->Next(); + return true; + } else { + AddError(error); + return false; + } +} + +bool Parser::ConsumeString(string* output, const char* error) { + if (LookingAtType(io::Tokenizer::TYPE_STRING)) { + io::Tokenizer::ParseString(input_->current().text, output); + input_->Next(); + // Allow C++ like concatenation of adjacent string tokens. + while (LookingAtType(io::Tokenizer::TYPE_STRING)) { + io::Tokenizer::ParseStringAppend(input_->current().text, output); + input_->Next(); + } + return true; + } else { + AddError(error); + return false; + } +} + +// ------------------------------------------------------------------- + +void Parser::AddError(int line, int column, const string& error) { + if (error_collector_ != NULL) { + error_collector_->AddError(line, column, error); + } + had_errors_ = true; +} + +void Parser::AddError(const string& error) { + AddError(input_->current().line, input_->current().column, error); +} + +// ------------------------------------------------------------------- + +Parser::LocationRecorder::LocationRecorder(Parser* parser) + : parser_(parser), + location_(parser_->source_code_info_->add_location()) { + location_->add_span(parser_->input_->current().line); + location_->add_span(parser_->input_->current().column); +} + +Parser::LocationRecorder::LocationRecorder(const LocationRecorder& parent) { + Init(parent); +} + +Parser::LocationRecorder::LocationRecorder(const LocationRecorder& parent, + int path1) { + Init(parent); + AddPath(path1); +} + +Parser::LocationRecorder::LocationRecorder(const LocationRecorder& parent, + int path1, int path2) { + Init(parent); + AddPath(path1); + AddPath(path2); +} + +void Parser::LocationRecorder::Init(const LocationRecorder& parent) { + parser_ = parent.parser_; + location_ = parser_->source_code_info_->add_location(); + location_->mutable_path()->CopyFrom(parent.location_->path()); + + location_->add_span(parser_->input_->current().line); + location_->add_span(parser_->input_->current().column); +} + +Parser::LocationRecorder::~LocationRecorder() { + if (location_->span_size() <= 2) { + EndAt(parser_->input_->previous()); + } +} + +void Parser::LocationRecorder::AddPath(int path_component) { + location_->add_path(path_component); +} + +void Parser::LocationRecorder::StartAt(const io::Tokenizer::Token& token) { + location_->set_span(0, token.line); + location_->set_span(1, token.column); +} + +void Parser::LocationRecorder::EndAt(const io::Tokenizer::Token& token) { + if (token.line != location_->span(0)) { + location_->add_span(token.line); + } + location_->add_span(token.end_column); +} + +void Parser::LocationRecorder::RecordLegacyLocation(const Message* descriptor, + DescriptorPool::ErrorCollector::ErrorLocation location) { + if (parser_->source_location_table_ != NULL) { + parser_->source_location_table_->Add( + descriptor, location, location_->span(0), location_->span(1)); + } +} + +// ------------------------------------------------------------------- + +void Parser::SkipStatement() { + while (true) { + if (AtEnd()) { + return; + } else if (LookingAtType(io::Tokenizer::TYPE_SYMBOL)) { + if (TryConsume(";")) { + return; + } else if (TryConsume("{")) { + SkipRestOfBlock(); + return; + } else if (LookingAt("}")) { + return; + } + } + input_->Next(); + } +} + +void Parser::SkipRestOfBlock() { + while (true) { + if (AtEnd()) { + return; + } else if (LookingAtType(io::Tokenizer::TYPE_SYMBOL)) { + if (TryConsume("}")) { + return; + } else if (TryConsume("{")) { + SkipRestOfBlock(); + } + } + input_->Next(); + } +} + +// =================================================================== + +bool Parser::Parse(io::Tokenizer* input, FileDescriptorProto* file) { + input_ = input; + had_errors_ = false; + syntax_identifier_.clear(); + + // Note that |file| could be NULL at this point if + // stop_after_syntax_identifier_ is true. So, we conservatively allocate + // SourceCodeInfo on the stack, then swap it into the FileDescriptorProto + // later on. + SourceCodeInfo source_code_info; + source_code_info_ = &source_code_info; + + if (LookingAtType(io::Tokenizer::TYPE_START)) { + // Advance to first token. + input_->Next(); + } + + { + LocationRecorder root_location(this); + + if (require_syntax_identifier_ || LookingAt("syntax")) { + if (!ParseSyntaxIdentifier()) { + // Don't attempt to parse the file if we didn't recognize the syntax + // identifier. + return false; + } + } else if (!stop_after_syntax_identifier_) { + syntax_identifier_ = "proto2"; + } + + if (stop_after_syntax_identifier_) return !had_errors_; + + // Repeatedly parse statements until we reach the end of the file. + while (!AtEnd()) { + if (!ParseTopLevelStatement(file, root_location)) { + // This statement failed to parse. Skip it, but keep looping to parse + // other statements. + SkipStatement(); + + if (LookingAt("}")) { + AddError("Unmatched \"}\"."); + input_->Next(); + } + } + } + } + + input_ = NULL; + source_code_info_ = NULL; + source_code_info.Swap(file->mutable_source_code_info()); + return !had_errors_; +} + +bool Parser::ParseSyntaxIdentifier() { + DO(Consume("syntax", "File must begin with 'syntax = \"proto2\";'.")); + DO(Consume("=")); + io::Tokenizer::Token syntax_token = input_->current(); + string syntax; + DO(ConsumeString(&syntax, "Expected syntax identifier.")); + DO(Consume(";")); + + syntax_identifier_ = syntax; + + if (syntax != "proto2" && !stop_after_syntax_identifier_) { + AddError(syntax_token.line, syntax_token.column, + "Unrecognized syntax identifier \"" + syntax + "\". This parser " + "only recognizes \"proto2\"."); + return false; + } + + return true; +} + +bool Parser::ParseTopLevelStatement(FileDescriptorProto* file, + const LocationRecorder& root_location) { + if (TryConsume(";")) { + // empty statement; ignore + return true; + } else if (LookingAt("message")) { + LocationRecorder location(root_location, + FileDescriptorProto::kMessageTypeFieldNumber, file->message_type_size()); + return ParseMessageDefinition(file->add_message_type(), location); + } else if (LookingAt("enum")) { + LocationRecorder location(root_location, + FileDescriptorProto::kEnumTypeFieldNumber, file->enum_type_size()); + return ParseEnumDefinition(file->add_enum_type(), location); + } else if (LookingAt("service")) { + LocationRecorder location(root_location, + FileDescriptorProto::kServiceFieldNumber, file->service_size()); + return ParseServiceDefinition(file->add_service(), location); + } else if (LookingAt("extend")) { + LocationRecorder location(root_location, + FileDescriptorProto::kExtensionFieldNumber); + return ParseExtend(file->mutable_extension(), + file->mutable_message_type(), + root_location, + FileDescriptorProto::kMessageTypeFieldNumber, + location); + } else if (LookingAt("import")) { + int index = file->dependency_size(); + return ParseImport(file->add_dependency(), root_location, index); + } else if (LookingAt("package")) { + return ParsePackage(file, root_location); + } else if (LookingAt("option")) { + LocationRecorder location(root_location, + FileDescriptorProto::kOptionsFieldNumber); + return ParseOption(file->mutable_options(), location); + } else { + AddError("Expected top-level statement (e.g. \"message\")."); + return false; + } +} + +// ------------------------------------------------------------------- +// Messages + +bool Parser::ParseMessageDefinition(DescriptorProto* message, + const LocationRecorder& message_location) { + DO(Consume("message")); + { + LocationRecorder location(message_location, + DescriptorProto::kNameFieldNumber); + location.RecordLegacyLocation( + message, DescriptorPool::ErrorCollector::NAME); + DO(ConsumeIdentifier(message->mutable_name(), "Expected message name.")); + } + DO(ParseMessageBlock(message, message_location)); + return true; +} + +bool Parser::ParseMessageBlock(DescriptorProto* message, + const LocationRecorder& message_location) { + DO(Consume("{")); + + while (!TryConsume("}")) { + if (AtEnd()) { + AddError("Reached end of input in message definition (missing '}')."); + return false; + } + + if (!ParseMessageStatement(message, message_location)) { + // This statement failed to parse. Skip it, but keep looping to parse + // other statements. + SkipStatement(); + } + } + + return true; +} + +bool Parser::ParseMessageStatement(DescriptorProto* message, + const LocationRecorder& message_location) { + if (TryConsume(";")) { + // empty statement; ignore + return true; + } else if (LookingAt("message")) { + LocationRecorder location(message_location, + DescriptorProto::kNestedTypeFieldNumber, + message->nested_type_size()); + return ParseMessageDefinition(message->add_nested_type(), location); + } else if (LookingAt("enum")) { + LocationRecorder location(message_location, + DescriptorProto::kEnumTypeFieldNumber, + message->enum_type_size()); + return ParseEnumDefinition(message->add_enum_type(), location); + } else if (LookingAt("extensions")) { + LocationRecorder location(message_location, + DescriptorProto::kExtensionRangeFieldNumber); + return ParseExtensions(message, location); + } else if (LookingAt("extend")) { + LocationRecorder location(message_location, + DescriptorProto::kExtensionFieldNumber); + return ParseExtend(message->mutable_extension(), + message->mutable_nested_type(), + message_location, + DescriptorProto::kNestedTypeFieldNumber, + location); + } else if (LookingAt("option")) { + LocationRecorder location(message_location, + DescriptorProto::kOptionsFieldNumber); + return ParseOption(message->mutable_options(), location); + } else { + LocationRecorder location(message_location, + DescriptorProto::kFieldFieldNumber, + message->field_size()); + return ParseMessageField(message->add_field(), + message->mutable_nested_type(), + message_location, + DescriptorProto::kNestedTypeFieldNumber, + location); + } +} + +bool Parser::ParseMessageField(FieldDescriptorProto* field, + RepeatedPtrField* messages, + const LocationRecorder& parent_location, + int location_field_number_for_nested_type, + const LocationRecorder& field_location) { + // Parse label and type. + io::Tokenizer::Token label_token = input_->current(); + { + LocationRecorder location(field_location, + FieldDescriptorProto::kLabelFieldNumber); + FieldDescriptorProto::Label label; + DO(ParseLabel(&label)); + field->set_label(label); + } + + { + LocationRecorder location(field_location); // add path later + location.RecordLegacyLocation(field, DescriptorPool::ErrorCollector::TYPE); + + FieldDescriptorProto::Type type = FieldDescriptorProto::TYPE_INT32; + string type_name; + DO(ParseType(&type, &type_name)); + if (type_name.empty()) { + location.AddPath(FieldDescriptorProto::kTypeFieldNumber); + field->set_type(type); + } else { + location.AddPath(FieldDescriptorProto::kTypeNameFieldNumber); + field->set_type_name(type_name); + } + } + + // Parse name and '='. + io::Tokenizer::Token name_token = input_->current(); + { + LocationRecorder location(field_location, + FieldDescriptorProto::kNameFieldNumber); + location.RecordLegacyLocation(field, DescriptorPool::ErrorCollector::NAME); + DO(ConsumeIdentifier(field->mutable_name(), "Expected field name.")); + } + DO(Consume("=", "Missing field number.")); + + // Parse field number. + { + LocationRecorder location(field_location, + FieldDescriptorProto::kNumberFieldNumber); + location.RecordLegacyLocation( + field, DescriptorPool::ErrorCollector::NUMBER); + int number; + DO(ConsumeInteger(&number, "Expected field number.")); + field->set_number(number); + } + + // Parse options. + DO(ParseFieldOptions(field, field_location)); + + // Deal with groups. + if (field->has_type() && field->type() == FieldDescriptorProto::TYPE_GROUP) { + // Awkward: Since a group declares both a message type and a field, we + // have to create overlapping locations. + LocationRecorder group_location(parent_location); + group_location.StartAt(label_token); + group_location.AddPath(location_field_number_for_nested_type); + group_location.AddPath(messages->size()); + + DescriptorProto* group = messages->Add(); + group->set_name(field->name()); + + // Record name location to match the field name's location. + { + LocationRecorder location(group_location, + DescriptorProto::kNameFieldNumber); + location.StartAt(name_token); + location.EndAt(name_token); + location.RecordLegacyLocation( + group, DescriptorPool::ErrorCollector::NAME); + } + + // The field's type_name also comes from the name. Confusing! + { + LocationRecorder location(field_location, + FieldDescriptorProto::kTypeNameFieldNumber); + location.StartAt(name_token); + location.EndAt(name_token); + } + + // As a hack for backwards-compatibility, we force the group name to start + // with a capital letter and lower-case the field name. New code should + // not use groups; it should use nested messages. + if (group->name()[0] < 'A' || 'Z' < group->name()[0]) { + AddError(name_token.line, name_token.column, + "Group names must start with a capital letter."); + } + LowerString(field->mutable_name()); + + field->set_type_name(group->name()); + if (LookingAt("{")) { + DO(ParseMessageBlock(group, group_location)); + } else { + AddError("Missing group body."); + return false; + } + } else { + DO(Consume(";")); + } + + return true; +} + +bool Parser::ParseFieldOptions(FieldDescriptorProto* field, + const LocationRecorder& field_location) { + if (!LookingAt("[")) return true; + + LocationRecorder location(field_location, + FieldDescriptorProto::kOptionsFieldNumber); + + DO(Consume("[")); + + // Parse field options. + do { + if (LookingAt("default")) { + // We intentionally pass field_location rather than location here, since + // the default value is not actually an option. + DO(ParseDefaultAssignment(field, field_location)); + } else { + DO(ParseOptionAssignment(field->mutable_options(), location)); + } + } while (TryConsume(",")); + + DO(Consume("]")); + return true; +} + +bool Parser::ParseDefaultAssignment(FieldDescriptorProto* field, + const LocationRecorder& field_location) { + if (field->has_default_value()) { + AddError("Already set option \"default\"."); + field->clear_default_value(); + } + + DO(Consume("default")); + DO(Consume("=")); + + LocationRecorder location(field_location, + FieldDescriptorProto::kDefaultValueFieldNumber); + location.RecordLegacyLocation( + field, DescriptorPool::ErrorCollector::DEFAULT_VALUE); + string* default_value = field->mutable_default_value(); + + if (!field->has_type()) { + // The field has a type name, but we don't know if it is a message or an + // enum yet. Assume an enum for now. + DO(ConsumeIdentifier(default_value, "Expected identifier.")); + return true; + } + + switch (field->type()) { + case FieldDescriptorProto::TYPE_INT32: + case FieldDescriptorProto::TYPE_INT64: + case FieldDescriptorProto::TYPE_SINT32: + case FieldDescriptorProto::TYPE_SINT64: + case FieldDescriptorProto::TYPE_SFIXED32: + case FieldDescriptorProto::TYPE_SFIXED64: { + uint64 max_value = kint64max; + if (field->type() == FieldDescriptorProto::TYPE_INT32 || + field->type() == FieldDescriptorProto::TYPE_SINT32 || + field->type() == FieldDescriptorProto::TYPE_SFIXED32) { + max_value = kint32max; + } + + // These types can be negative. + if (TryConsume("-")) { + default_value->append("-"); + // Two's complement always has one more negative value than positive. + ++max_value; + } + // Parse the integer to verify that it is not out-of-range. + uint64 value; + DO(ConsumeInteger64(max_value, &value, "Expected integer.")); + // And stringify it again. + default_value->append(SimpleItoa(value)); + break; + } + + case FieldDescriptorProto::TYPE_UINT32: + case FieldDescriptorProto::TYPE_UINT64: + case FieldDescriptorProto::TYPE_FIXED32: + case FieldDescriptorProto::TYPE_FIXED64: { + uint64 max_value = kuint64max; + if (field->type() == FieldDescriptorProto::TYPE_UINT32 || + field->type() == FieldDescriptorProto::TYPE_FIXED32) { + max_value = kuint32max; + } + + // Numeric, not negative. + if (TryConsume("-")) { + AddError("Unsigned field can't have negative default value."); + } + // Parse the integer to verify that it is not out-of-range. + uint64 value; + DO(ConsumeInteger64(max_value, &value, "Expected integer.")); + // And stringify it again. + default_value->append(SimpleItoa(value)); + break; + } + + case FieldDescriptorProto::TYPE_FLOAT: + case FieldDescriptorProto::TYPE_DOUBLE: + // These types can be negative. + if (TryConsume("-")) { + default_value->append("-"); + } + // Parse the integer because we have to convert hex integers to decimal + // floats. + double value; + DO(ConsumeNumber(&value, "Expected number.")); + // And stringify it again. + default_value->append(SimpleDtoa(value)); + break; + + case FieldDescriptorProto::TYPE_BOOL: + if (TryConsume("true")) { + default_value->assign("true"); + } else if (TryConsume("false")) { + default_value->assign("false"); + } else { + AddError("Expected \"true\" or \"false\"."); + return false; + } + break; + + case FieldDescriptorProto::TYPE_STRING: + DO(ConsumeString(default_value, "Expected string.")); + break; + + case FieldDescriptorProto::TYPE_BYTES: + DO(ConsumeString(default_value, "Expected string.")); + *default_value = CEscape(*default_value); + break; + + case FieldDescriptorProto::TYPE_ENUM: + DO(ConsumeIdentifier(default_value, "Expected identifier.")); + break; + + case FieldDescriptorProto::TYPE_MESSAGE: + case FieldDescriptorProto::TYPE_GROUP: + AddError("Messages can't have default values."); + return false; + } + + return true; +} + +bool Parser::ParseOptionNamePart(UninterpretedOption* uninterpreted_option, + const LocationRecorder& part_location) { + UninterpretedOption::NamePart* name = uninterpreted_option->add_name(); + string identifier; // We parse identifiers into this string. + if (LookingAt("(")) { // This is an extension. + DO(Consume("(")); + + { + LocationRecorder location( + part_location, UninterpretedOption::NamePart::kNamePartFieldNumber); + // An extension name consists of dot-separated identifiers, and may begin + // with a dot. + if (LookingAtType(io::Tokenizer::TYPE_IDENTIFIER)) { + DO(ConsumeIdentifier(&identifier, "Expected identifier.")); + name->mutable_name_part()->append(identifier); + } + while (LookingAt(".")) { + DO(Consume(".")); + name->mutable_name_part()->append("."); + DO(ConsumeIdentifier(&identifier, "Expected identifier.")); + name->mutable_name_part()->append(identifier); + } + } + + DO(Consume(")")); + name->set_is_extension(true); + } else { // This is a regular field. + LocationRecorder location( + part_location, UninterpretedOption::NamePart::kNamePartFieldNumber); + DO(ConsumeIdentifier(&identifier, "Expected identifier.")); + name->mutable_name_part()->append(identifier); + name->set_is_extension(false); + } + return true; +} + +bool Parser::ParseUninterpretedBlock(string* value) { + // Note that enclosing braces are not added to *value. + DO(Consume("{")); + int brace_depth = 1; + while (!AtEnd()) { + if (LookingAt("{")) { + brace_depth++; + } else if (LookingAt("}")) { + brace_depth--; + if (brace_depth == 0) { + input_->Next(); + return true; + } + } + // TODO(sanjay): Interpret line/column numbers to preserve formatting + if (!value->empty()) value->push_back(' '); + value->append(input_->current().text); + input_->Next(); + } + AddError("Unexpected end of stream while parsing aggregate value."); + return false; +} + +// We don't interpret the option here. Instead we store it in an +// UninterpretedOption, to be interpreted later. +bool Parser::ParseOptionAssignment(Message* options, + const LocationRecorder& options_location) { + // Create an entry in the uninterpreted_option field. + const FieldDescriptor* uninterpreted_option_field = options->GetDescriptor()-> + FindFieldByName("uninterpreted_option"); + GOOGLE_CHECK(uninterpreted_option_field != NULL) + << "No field named \"uninterpreted_option\" in the Options proto."; + + const Reflection* reflection = options->GetReflection(); + + LocationRecorder location( + options_location, uninterpreted_option_field->number(), + reflection->FieldSize(*options, uninterpreted_option_field)); + + UninterpretedOption* uninterpreted_option = down_cast( + options->GetReflection()->AddMessage(options, + uninterpreted_option_field)); + + // Parse dot-separated name. + { + LocationRecorder name_location(location, + UninterpretedOption::kNameFieldNumber); + name_location.RecordLegacyLocation( + uninterpreted_option, DescriptorPool::ErrorCollector::OPTION_NAME); + + { + LocationRecorder part_location(name_location, + uninterpreted_option->name_size()); + DO(ParseOptionNamePart(uninterpreted_option, part_location)); + } + + while (LookingAt(".")) { + DO(Consume(".")); + LocationRecorder part_location(name_location, + uninterpreted_option->name_size()); + DO(ParseOptionNamePart(uninterpreted_option, part_location)); + } + } + + DO(Consume("=")); + + LocationRecorder value_location(location); + value_location.RecordLegacyLocation( + uninterpreted_option, DescriptorPool::ErrorCollector::OPTION_VALUE); + + // All values are a single token, except for negative numbers, which consist + // of a single '-' symbol, followed by a positive number. + bool is_negative = TryConsume("-"); + + switch (input_->current().type) { + case io::Tokenizer::TYPE_START: + GOOGLE_LOG(FATAL) << "Trying to read value before any tokens have been read."; + return false; + + case io::Tokenizer::TYPE_END: + AddError("Unexpected end of stream while parsing option value."); + return false; + + case io::Tokenizer::TYPE_IDENTIFIER: { + value_location.AddPath(UninterpretedOption::kIdentifierValueFieldNumber); + if (is_negative) { + AddError("Invalid '-' symbol before identifier."); + return false; + } + string value; + DO(ConsumeIdentifier(&value, "Expected identifier.")); + uninterpreted_option->set_identifier_value(value); + break; + } + + case io::Tokenizer::TYPE_INTEGER: { + uint64 value; + uint64 max_value = + is_negative ? static_cast(kint64max) + 1 : kuint64max; + DO(ConsumeInteger64(max_value, &value, "Expected integer.")); + if (is_negative) { + value_location.AddPath( + UninterpretedOption::kNegativeIntValueFieldNumber); + uninterpreted_option->set_negative_int_value(-static_cast(value)); + } else { + value_location.AddPath( + UninterpretedOption::kPositiveIntValueFieldNumber); + uninterpreted_option->set_positive_int_value(value); + } + break; + } + + case io::Tokenizer::TYPE_FLOAT: { + value_location.AddPath(UninterpretedOption::kDoubleValueFieldNumber); + double value; + DO(ConsumeNumber(&value, "Expected number.")); + uninterpreted_option->set_double_value(is_negative ? -value : value); + break; + } + + case io::Tokenizer::TYPE_STRING: { + value_location.AddPath(UninterpretedOption::kStringValueFieldNumber); + if (is_negative) { + AddError("Invalid '-' symbol before string."); + return false; + } + string value; + DO(ConsumeString(&value, "Expected string.")); + uninterpreted_option->set_string_value(value); + break; + } + + case io::Tokenizer::TYPE_SYMBOL: + if (LookingAt("{")) { + value_location.AddPath(UninterpretedOption::kAggregateValueFieldNumber); + DO(ParseUninterpretedBlock( + uninterpreted_option->mutable_aggregate_value())); + } else { + AddError("Expected option value."); + return false; + } + break; + } + + return true; +} + +bool Parser::ParseExtensions(DescriptorProto* message, + const LocationRecorder& extensions_location) { + // Parse the declaration. + DO(Consume("extensions")); + + do { + // Note that kExtensionRangeFieldNumber was already pushed by the parent. + LocationRecorder location(extensions_location, + message->extension_range_size()); + + DescriptorProto::ExtensionRange* range = message->add_extension_range(); + location.RecordLegacyLocation( + range, DescriptorPool::ErrorCollector::NUMBER); + + int start, end; + io::Tokenizer::Token start_token; + + { + LocationRecorder start_location( + location, DescriptorProto::ExtensionRange::kStartFieldNumber); + start_token = input_->current(); + DO(ConsumeInteger(&start, "Expected field number range.")); + } + + if (TryConsume("to")) { + LocationRecorder end_location( + location, DescriptorProto::ExtensionRange::kEndFieldNumber); + if (TryConsume("max")) { + end = FieldDescriptor::kMaxNumber; + } else { + DO(ConsumeInteger(&end, "Expected integer.")); + } + } else { + LocationRecorder end_location( + location, DescriptorProto::ExtensionRange::kEndFieldNumber); + end_location.StartAt(start_token); + end_location.EndAt(start_token); + end = start; + } + + // Users like to specify inclusive ranges, but in code we like the end + // number to be exclusive. + ++end; + + range->set_start(start); + range->set_end(end); + } while (TryConsume(",")); + + DO(Consume(";")); + return true; +} + +bool Parser::ParseExtend(RepeatedPtrField* extensions, + RepeatedPtrField* messages, + const LocationRecorder& parent_location, + int location_field_number_for_nested_type, + const LocationRecorder& extend_location) { + DO(Consume("extend")); + + // Parse the extendee type. + io::Tokenizer::Token extendee_start = input_->current(); + string extendee; + DO(ParseUserDefinedType(&extendee)); + io::Tokenizer::Token extendee_end = input_->previous(); + + // Parse the block. + DO(Consume("{")); + + bool is_first = true; + + do { + if (AtEnd()) { + AddError("Reached end of input in extend definition (missing '}')."); + return false; + } + + // Note that kExtensionFieldNumber was already pushed by the parent. + LocationRecorder location(extend_location, extensions->size()); + + FieldDescriptorProto* field = extensions->Add(); + + { + LocationRecorder extendee_location( + location, FieldDescriptorProto::kExtendeeFieldNumber); + extendee_location.StartAt(extendee_start); + extendee_location.EndAt(extendee_end); + + if (is_first) { + extendee_location.RecordLegacyLocation( + field, DescriptorPool::ErrorCollector::EXTENDEE); + is_first = false; + } + } + + field->set_extendee(extendee); + + if (!ParseMessageField(field, messages, parent_location, + location_field_number_for_nested_type, + location)) { + // This statement failed to parse. Skip it, but keep looping to parse + // other statements. + SkipStatement(); + } + } while(!TryConsume("}")); + + return true; +} + +// ------------------------------------------------------------------- +// Enums + +bool Parser::ParseEnumDefinition(EnumDescriptorProto* enum_type, + const LocationRecorder& enum_location) { + DO(Consume("enum")); + + { + LocationRecorder location(enum_location, + EnumDescriptorProto::kNameFieldNumber); + location.RecordLegacyLocation( + enum_type, DescriptorPool::ErrorCollector::NAME); + DO(ConsumeIdentifier(enum_type->mutable_name(), "Expected enum name.")); + } + + DO(ParseEnumBlock(enum_type, enum_location)); + return true; +} + +bool Parser::ParseEnumBlock(EnumDescriptorProto* enum_type, + const LocationRecorder& enum_location) { + DO(Consume("{")); + + while (!TryConsume("}")) { + if (AtEnd()) { + AddError("Reached end of input in enum definition (missing '}')."); + return false; + } + + if (!ParseEnumStatement(enum_type, enum_location)) { + // This statement failed to parse. Skip it, but keep looping to parse + // other statements. + SkipStatement(); + } + } + + return true; +} + +bool Parser::ParseEnumStatement(EnumDescriptorProto* enum_type, + const LocationRecorder& enum_location) { + if (TryConsume(";")) { + // empty statement; ignore + return true; + } else if (LookingAt("option")) { + LocationRecorder location(enum_location, + EnumDescriptorProto::kOptionsFieldNumber); + return ParseOption(enum_type->mutable_options(), location); + } else { + LocationRecorder location(enum_location, + EnumDescriptorProto::kValueFieldNumber, enum_type->value_size()); + return ParseEnumConstant(enum_type->add_value(), location); + } +} + +bool Parser::ParseEnumConstant(EnumValueDescriptorProto* enum_value, + const LocationRecorder& enum_value_location) { + // Parse name. + { + LocationRecorder location(enum_value_location, + EnumValueDescriptorProto::kNameFieldNumber); + location.RecordLegacyLocation( + enum_value, DescriptorPool::ErrorCollector::NAME); + DO(ConsumeIdentifier(enum_value->mutable_name(), + "Expected enum constant name.")); + } + + DO(Consume("=", "Missing numeric value for enum constant.")); + + // Parse value. + { + LocationRecorder location( + enum_value_location, EnumValueDescriptorProto::kNumberFieldNumber); + location.RecordLegacyLocation( + enum_value, DescriptorPool::ErrorCollector::NUMBER); + + bool is_negative = TryConsume("-"); + int number; + DO(ConsumeInteger(&number, "Expected integer.")); + if (is_negative) number *= -1; + enum_value->set_number(number); + } + + DO(ParseEnumConstantOptions(enum_value, enum_value_location)); + + DO(Consume(";")); + + return true; +} + +bool Parser::ParseEnumConstantOptions( + EnumValueDescriptorProto* value, + const LocationRecorder& enum_value_location) { + if (!LookingAt("[")) return true; + + LocationRecorder location( + enum_value_location, EnumValueDescriptorProto::kOptionsFieldNumber); + + DO(Consume("[")); + + do { + DO(ParseOptionAssignment(value->mutable_options(), location)); + } while (TryConsume(",")); + + DO(Consume("]")); + return true; +} + +// ------------------------------------------------------------------- +// Services + +bool Parser::ParseServiceDefinition(ServiceDescriptorProto* service, + const LocationRecorder& service_location) { + DO(Consume("service")); + + { + LocationRecorder location(service_location, + ServiceDescriptorProto::kNameFieldNumber); + location.RecordLegacyLocation( + service, DescriptorPool::ErrorCollector::NAME); + DO(ConsumeIdentifier(service->mutable_name(), "Expected service name.")); + } + + DO(ParseServiceBlock(service, service_location)); + return true; +} + +bool Parser::ParseServiceBlock(ServiceDescriptorProto* service, + const LocationRecorder& service_location) { + DO(Consume("{")); + + while (!TryConsume("}")) { + if (AtEnd()) { + AddError("Reached end of input in service definition (missing '}')."); + return false; + } + + if (!ParseServiceStatement(service, service_location)) { + // This statement failed to parse. Skip it, but keep looping to parse + // other statements. + SkipStatement(); + } + } + + return true; +} + +bool Parser::ParseServiceStatement(ServiceDescriptorProto* service, + const LocationRecorder& service_location) { + if (TryConsume(";")) { + // empty statement; ignore + return true; + } else if (LookingAt("option")) { + LocationRecorder location( + service_location, ServiceDescriptorProto::kOptionsFieldNumber); + return ParseOption(service->mutable_options(), location); + } else { + LocationRecorder location(service_location, + ServiceDescriptorProto::kMethodFieldNumber, service->method_size()); + return ParseServiceMethod(service->add_method(), location); + } +} + +bool Parser::ParseServiceMethod(MethodDescriptorProto* method, + const LocationRecorder& method_location) { + DO(Consume("rpc")); + + { + LocationRecorder location(method_location, + MethodDescriptorProto::kNameFieldNumber); + location.RecordLegacyLocation( + method, DescriptorPool::ErrorCollector::NAME); + DO(ConsumeIdentifier(method->mutable_name(), "Expected method name.")); + } + + // Parse input type. + DO(Consume("(")); + { + LocationRecorder location(method_location, + MethodDescriptorProto::kInputTypeFieldNumber); + location.RecordLegacyLocation( + method, DescriptorPool::ErrorCollector::INPUT_TYPE); + DO(ParseUserDefinedType(method->mutable_input_type())); + } + DO(Consume(")")); + + // Parse output type. + DO(Consume("returns")); + DO(Consume("(")); + { + LocationRecorder location(method_location, + MethodDescriptorProto::kOutputTypeFieldNumber); + location.RecordLegacyLocation( + method, DescriptorPool::ErrorCollector::OUTPUT_TYPE); + DO(ParseUserDefinedType(method->mutable_output_type())); + } + DO(Consume(")")); + + if (TryConsume("{")) { + // Options! + while (!TryConsume("}")) { + if (AtEnd()) { + AddError("Reached end of input in method options (missing '}')."); + return false; + } + + if (TryConsume(";")) { + // empty statement; ignore + } else { + LocationRecorder location(method_location, + MethodDescriptorProto::kOptionsFieldNumber); + if (!ParseOption(method->mutable_options(), location)) { + // This statement failed to parse. Skip it, but keep looping to + // parse other statements. + SkipStatement(); + } + } + } + } else { + DO(Consume(";")); + } + + return true; +} + +// ------------------------------------------------------------------- + +bool Parser::ParseLabel(FieldDescriptorProto::Label* label) { + if (TryConsume("optional")) { + *label = FieldDescriptorProto::LABEL_OPTIONAL; + return true; + } else if (TryConsume("repeated")) { + *label = FieldDescriptorProto::LABEL_REPEATED; + return true; + } else if (TryConsume("required")) { + *label = FieldDescriptorProto::LABEL_REQUIRED; + return true; + } else { + AddError("Expected \"required\", \"optional\", or \"repeated\"."); + // We can actually reasonably recover here by just assuming the user + // forgot the label altogether. + *label = FieldDescriptorProto::LABEL_OPTIONAL; + return true; + } +} + +bool Parser::ParseType(FieldDescriptorProto::Type* type, + string* type_name) { + TypeNameMap::const_iterator iter = kTypeNames.find(input_->current().text); + if (iter != kTypeNames.end()) { + *type = iter->second; + input_->Next(); + } else { + DO(ParseUserDefinedType(type_name)); + } + return true; +} + +bool Parser::ParseUserDefinedType(string* type_name) { + type_name->clear(); + + TypeNameMap::const_iterator iter = kTypeNames.find(input_->current().text); + if (iter != kTypeNames.end()) { + // Note: The only place enum types are allowed is for field types, but + // if we are parsing a field type then we would not get here because + // primitives are allowed there as well. So this error message doesn't + // need to account for enums. + AddError("Expected message type."); + + // Pretend to accept this type so that we can go on parsing. + *type_name = input_->current().text; + input_->Next(); + return true; + } + + // A leading "." means the name is fully-qualified. + if (TryConsume(".")) type_name->append("."); + + // Consume the first part of the name. + string identifier; + DO(ConsumeIdentifier(&identifier, "Expected type name.")); + type_name->append(identifier); + + // Consume more parts. + while (TryConsume(".")) { + type_name->append("."); + DO(ConsumeIdentifier(&identifier, "Expected identifier.")); + type_name->append(identifier); + } + + return true; +} + +// =================================================================== + +bool Parser::ParsePackage(FileDescriptorProto* file, + const LocationRecorder& root_location) { + if (file->has_package()) { + AddError("Multiple package definitions."); + // Don't append the new package to the old one. Just replace it. Not + // that it really matters since this is an error anyway. + file->clear_package(); + } + + DO(Consume("package")); + + { + LocationRecorder location(root_location, + FileDescriptorProto::kPackageFieldNumber); + location.RecordLegacyLocation(file, DescriptorPool::ErrorCollector::NAME); + + while (true) { + string identifier; + DO(ConsumeIdentifier(&identifier, "Expected identifier.")); + file->mutable_package()->append(identifier); + if (!TryConsume(".")) break; + file->mutable_package()->append("."); + } + } + + DO(Consume(";")); + return true; +} + +bool Parser::ParseImport(string* import_filename, + const LocationRecorder& root_location, + int index) { + DO(Consume("import")); + { + LocationRecorder location(root_location, + FileDescriptorProto::kDependencyFieldNumber, + index); + DO(ConsumeString(import_filename, + "Expected a string naming the file to import.")); + } + DO(Consume(";")); + return true; +} + +bool Parser::ParseOption(Message* options, + const LocationRecorder& options_location) { + DO(Consume("option")); + DO(ParseOptionAssignment(options, options_location)); + DO(Consume(";")); + return true; +} + +// =================================================================== + +SourceLocationTable::SourceLocationTable() {} +SourceLocationTable::~SourceLocationTable() {} + +bool SourceLocationTable::Find( + const Message* descriptor, + DescriptorPool::ErrorCollector::ErrorLocation location, + int* line, int* column) const { + const pair* result = + FindOrNull(location_map_, make_pair(descriptor, location)); + if (result == NULL) { + *line = -1; + *column = 0; + return false; + } else { + *line = result->first; + *column = result->second; + return true; + } +} + +void SourceLocationTable::Add( + const Message* descriptor, + DescriptorPool::ErrorCollector::ErrorLocation location, + int line, int column) { + location_map_[make_pair(descriptor, location)] = make_pair(line, column); +} + +void SourceLocationTable::Clear() { + location_map_.clear(); +} + +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/parser.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/parser.h new file mode 100644 index 0000000000..4cc90a29af --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/parser.h @@ -0,0 +1,434 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// Implements parsing of .proto files to FileDescriptorProtos. + +#ifndef GOOGLE_PROTOBUF_COMPILER_PARSER_H__ +#define GOOGLE_PROTOBUF_COMPILER_PARSER_H__ + +#include +#include +#include +#include +#include +#include +#include +#include + +namespace google { +namespace protobuf { class Message; } + +namespace protobuf { +namespace compiler { + +// Defined in this file. +class Parser; +class SourceLocationTable; + +// Implements parsing of protocol definitions (such as .proto files). +// +// Note that most users will be more interested in the Importer class. +// Parser is a lower-level class which simply converts a single .proto file +// to a FileDescriptorProto. It does not resolve import directives or perform +// many other kinds of validation needed to construct a complete +// FileDescriptor. +class LIBPROTOBUF_EXPORT Parser { + public: + Parser(); + ~Parser(); + + // Parse the entire input and construct a FileDescriptorProto representing + // it. Returns true if no errors occurred, false otherwise. + bool Parse(io::Tokenizer* input, FileDescriptorProto* file); + + // Optional fetaures: + + // DEPRECATED: New code should use the SourceCodeInfo embedded in the + // FileDescriptorProto. + // + // Requests that locations of certain definitions be recorded to the given + // SourceLocationTable while parsing. This can be used to look up exact line + // and column numbers for errors reported by DescriptorPool during validation. + // Set to NULL (the default) to discard source location information. + void RecordSourceLocationsTo(SourceLocationTable* location_table) { + source_location_table_ = location_table; + } + + // Requests that errors be recorded to the given ErrorCollector while + // parsing. Set to NULL (the default) to discard error messages. + void RecordErrorsTo(io::ErrorCollector* error_collector) { + error_collector_ = error_collector; + } + + // Returns the identifier used in the "syntax = " declaration, if one was + // seen during the last call to Parse(), or the empty string otherwise. + const string& GetSyntaxIdentifier() { return syntax_identifier_; } + + // If set true, input files will be required to begin with a syntax + // identifier. Otherwise, files may omit this. If a syntax identifier + // is provided, it must be 'syntax = "proto2";' and must appear at the + // top of this file regardless of whether or not it was required. + void SetRequireSyntaxIdentifier(bool value) { + require_syntax_identifier_ = value; + } + + // Call SetStopAfterSyntaxIdentifier(true) to tell the parser to stop + // parsing as soon as it has seen the syntax identifier, or lack thereof. + // This is useful for quickly identifying the syntax of the file without + // parsing the whole thing. If this is enabled, no error will be recorded + // if the syntax identifier is something other than "proto2" (since + // presumably the caller intends to deal with that), but other kinds of + // errors (e.g. parse errors) will still be reported. When this is enabled, + // you may pass a NULL FileDescriptorProto to Parse(). + void SetStopAfterSyntaxIdentifier(bool value) { + stop_after_syntax_identifier_ = value; + } + + private: + // ================================================================= + // Error recovery helpers + + // Consume the rest of the current statement. This consumes tokens + // until it sees one of: + // ';' Consumes the token and returns. + // '{' Consumes the brace then calls SkipRestOfBlock(). + // '}' Returns without consuming. + // EOF Returns (can't consume). + // The Parser often calls SkipStatement() after encountering a syntax + // error. This allows it to go on parsing the following lines, allowing + // it to report more than just one error in the file. + void SkipStatement(); + + // Consume the rest of the current block, including nested blocks, + // ending after the closing '}' is encountered and consumed, or at EOF. + void SkipRestOfBlock(); + + // ----------------------------------------------------------------- + // Single-token consuming helpers + // + // These make parsing code more readable. + + // True if the current token is TYPE_END. + inline bool AtEnd(); + + // True if the next token matches the given text. + inline bool LookingAt(const char* text); + // True if the next token is of the given type. + inline bool LookingAtType(io::Tokenizer::TokenType token_type); + + // If the next token exactly matches the text given, consume it and return + // true. Otherwise, return false without logging an error. + bool TryConsume(const char* text); + + // These attempt to read some kind of token from the input. If successful, + // they return true. Otherwise they return false and add the given error + // to the error list. + + // Consume a token with the exact text given. + bool Consume(const char* text, const char* error); + // Same as above, but automatically generates the error "Expected \"text\".", + // where "text" is the expected token text. + bool Consume(const char* text); + // Consume a token of type IDENTIFIER and store its text in "output". + bool ConsumeIdentifier(string* output, const char* error); + // Consume an integer and store its value in "output". + bool ConsumeInteger(int* output, const char* error); + // Consume a 64-bit integer and store its value in "output". If the value + // is greater than max_value, an error will be reported. + bool ConsumeInteger64(uint64 max_value, uint64* output, const char* error); + // Consume a number and store its value in "output". This will accept + // tokens of either INTEGER or FLOAT type. + bool ConsumeNumber(double* output, const char* error); + // Consume a string literal and store its (unescaped) value in "output". + bool ConsumeString(string* output, const char* error); + + // ----------------------------------------------------------------- + // Error logging helpers + + // Invokes error_collector_->AddError(), if error_collector_ is not NULL. + void AddError(int line, int column, const string& error); + + // Invokes error_collector_->AddError() with the line and column number + // of the current token. + void AddError(const string& error); + + // Records a location in the SourceCodeInfo.location table (see + // descriptor.proto). We use RAII to ensure that the start and end locations + // are recorded -- the constructor records the start location and the + // destructor records the end location. Since the parser is + // recursive-descent, this works out beautifully. + class LIBPROTOBUF_EXPORT LocationRecorder { + public: + // Construct the file's "root" location. + LocationRecorder(Parser* parser); + + // Construct a location that represents a declaration nested within the + // given parent. E.g. a field's location is nested within the location + // for a message type. The parent's path will be copied, so you should + // call AddPath() only to add the path components leading from the parent + // to the child (as opposed to leading from the root to the child). + LocationRecorder(const LocationRecorder& parent); + + // Convenience constructors that call AddPath() one or two times. + LocationRecorder(const LocationRecorder& parent, int path1); + LocationRecorder(const LocationRecorder& parent, int path1, int path2); + + ~LocationRecorder(); + + // Add a path component. See SourceCodeInfo.Location.path in + // descriptor.proto. + void AddPath(int path_component); + + // By default the location is considered to start at the current token at + // the time the LocationRecorder is created. StartAt() sets the start + // location to the given token instead. + void StartAt(const io::Tokenizer::Token& token); + + // By default the location is considered to end at the previous token at + // the time the LocationRecorder is destroyed. EndAt() sets the end + // location to the given token instead. + void EndAt(const io::Tokenizer::Token& token); + + // Records the start point of this location to the SourceLocationTable that + // was passed to RecordSourceLocationsTo(), if any. SourceLocationTable + // is an older way of keeping track of source locations which is still + // used in some places. + void RecordLegacyLocation(const Message* descriptor, + DescriptorPool::ErrorCollector::ErrorLocation location); + + private: + Parser* parser_; + SourceCodeInfo::Location* location_; + + void Init(const LocationRecorder& parent); + }; + + // ================================================================= + // Parsers for various language constructs + + // Parses the "syntax = \"proto2\";" line at the top of the file. Returns + // false if it failed to parse or if the syntax identifier was not + // recognized. + bool ParseSyntaxIdentifier(); + + // These methods parse various individual bits of code. They return + // false if they completely fail to parse the construct. In this case, + // it is probably necessary to skip the rest of the statement to recover. + // However, if these methods return true, it does NOT mean that there + // were no errors; only that there were no *syntax* errors. For instance, + // if a service method is defined using proper syntax but uses a primitive + // type as its input or output, ParseMethodField() still returns true + // and only reports the error by calling AddError(). In practice, this + // makes logic much simpler for the caller. + + // Parse a top-level message, enum, service, etc. + bool ParseTopLevelStatement(FileDescriptorProto* file, + const LocationRecorder& root_location); + + // Parse various language high-level language construrcts. + bool ParseMessageDefinition(DescriptorProto* message, + const LocationRecorder& message_location); + bool ParseEnumDefinition(EnumDescriptorProto* enum_type, + const LocationRecorder& enum_location); + bool ParseServiceDefinition(ServiceDescriptorProto* service, + const LocationRecorder& service_location); + bool ParsePackage(FileDescriptorProto* file, + const LocationRecorder& root_location); + bool ParseImport(string* import_filename, + const LocationRecorder& root_location, + int index); + bool ParseOption(Message* options, + const LocationRecorder& options_location); + + // These methods parse the contents of a message, enum, or service type and + // add them to the given object. They consume the entire block including + // the beginning and ending brace. + bool ParseMessageBlock(DescriptorProto* message, + const LocationRecorder& message_location); + bool ParseEnumBlock(EnumDescriptorProto* enum_type, + const LocationRecorder& enum_location); + bool ParseServiceBlock(ServiceDescriptorProto* service, + const LocationRecorder& service_location); + + // Parse one statement within a message, enum, or service block, inclunding + // final semicolon. + bool ParseMessageStatement(DescriptorProto* message, + const LocationRecorder& message_location); + bool ParseEnumStatement(EnumDescriptorProto* message, + const LocationRecorder& enum_location); + bool ParseServiceStatement(ServiceDescriptorProto* message, + const LocationRecorder& service_location); + + // Parse a field of a message. If the field is a group, its type will be + // added to "messages". + // + // parent_location and location_field_number_for_nested_type are needed when + // parsing groups -- we need to generate a nested message type within the + // parent and record its location accordingly. Since the parent could be + // either a FileDescriptorProto or a DescriptorProto, we must pass in the + // correct field number to use. + bool ParseMessageField(FieldDescriptorProto* field, + RepeatedPtrField* messages, + const LocationRecorder& parent_location, + int location_field_number_for_nested_type, + const LocationRecorder& field_location); + + // Parse an "extensions" declaration. + bool ParseExtensions(DescriptorProto* message, + const LocationRecorder& extensions_location); + + // Parse an "extend" declaration. (See also comments for + // ParseMessageField().) + bool ParseExtend(RepeatedPtrField* extensions, + RepeatedPtrField* messages, + const LocationRecorder& parent_location, + int location_field_number_for_nested_type, + const LocationRecorder& extend_location); + + // Parse a single enum value within an enum block. + bool ParseEnumConstant(EnumValueDescriptorProto* enum_value, + const LocationRecorder& enum_value_location); + + // Parse enum constant options, i.e. the list in square brackets at the end + // of the enum constant value definition. + bool ParseEnumConstantOptions(EnumValueDescriptorProto* value, + const LocationRecorder& enum_value_location); + + // Parse a single method within a service definition. + bool ParseServiceMethod(MethodDescriptorProto* method, + const LocationRecorder& method_location); + + // Parse "required", "optional", or "repeated" and fill in "label" + // with the value. + bool ParseLabel(FieldDescriptorProto::Label* label); + + // Parse a type name and fill in "type" (if it is a primitive) or + // "type_name" (if it is not) with the type parsed. + bool ParseType(FieldDescriptorProto::Type* type, + string* type_name); + // Parse a user-defined type and fill in "type_name" with the name. + // If a primitive type is named, it is treated as an error. + bool ParseUserDefinedType(string* type_name); + + // Parses field options, i.e. the stuff in square brackets at the end + // of a field definition. Also parses default value. + bool ParseFieldOptions(FieldDescriptorProto* field, + const LocationRecorder& field_location); + + // Parse the "default" option. This needs special handling because its + // type is the field's type. + bool ParseDefaultAssignment(FieldDescriptorProto* field, + const LocationRecorder& field_location); + + // Parse a single option name/value pair, e.g. "ctype = CORD". The name + // identifies a field of the given Message, and the value of that field + // is set to the parsed value. + bool ParseOptionAssignment(Message* options, + const LocationRecorder& options_location); + + // Parses a single part of a multipart option name. A multipart name consists + // of names separated by dots. Each name is either an identifier or a series + // of identifiers separated by dots and enclosed in parentheses. E.g., + // "foo.(bar.baz).qux". + bool ParseOptionNamePart(UninterpretedOption* uninterpreted_option, + const LocationRecorder& part_location); + + // Parses a string surrounded by balanced braces. Strips off the outer + // braces and stores the enclosed string in *value. + // E.g., + // { foo } *value gets 'foo' + // { foo { bar: box } } *value gets 'foo { bar: box }' + // {} *value gets '' + // + // REQUIRES: LookingAt("{") + // When finished successfully, we are looking at the first token past + // the ending brace. + bool ParseUninterpretedBlock(string* value); + + // ================================================================= + + io::Tokenizer* input_; + io::ErrorCollector* error_collector_; + SourceCodeInfo* source_code_info_; + SourceLocationTable* source_location_table_; // legacy + bool had_errors_; + bool require_syntax_identifier_; + bool stop_after_syntax_identifier_; + string syntax_identifier_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(Parser); +}; + +// A table mapping (descriptor, ErrorLocation) pairs -- as reported by +// DescriptorPool when validating descriptors -- to line and column numbers +// within the original source code. +// +// This is semi-obsolete: FileDescriptorProto.source_code_info now contains +// far more complete information about source locations. However, as of this +// writing you still need to use SourceLocationTable when integrating with +// DescriptorPool. +class LIBPROTOBUF_EXPORT SourceLocationTable { + public: + SourceLocationTable(); + ~SourceLocationTable(); + + // Finds the precise location of the given error and fills in *line and + // *column with the line and column numbers. If not found, sets *line to + // -1 and *column to 0 (since line = -1 is used to mean "error has no exact + // location" in the ErrorCollector interface). Returns true if found, false + // otherwise. + bool Find(const Message* descriptor, + DescriptorPool::ErrorCollector::ErrorLocation location, + int* line, int* column) const; + + // Adds a location to the table. + void Add(const Message* descriptor, + DescriptorPool::ErrorCollector::ErrorLocation location, + int line, int column); + + // Clears the contents of the table. + void Clear(); + + private: + typedef map< + pair, + pair > LocationMap; + LocationMap location_map_; +}; + +} // namespace compiler +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_COMPILER_PARSER_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/parser_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/parser_unittest.cc new file mode 100644 index 0000000000..156c0dc3c8 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/parser_unittest.cc @@ -0,0 +1,2122 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include +#include + +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { + +namespace { + +class MockErrorCollector : public io::ErrorCollector { + public: + MockErrorCollector() {} + ~MockErrorCollector() {} + + string text_; + + // implements ErrorCollector --------------------------------------- + void AddError(int line, int column, const string& message) { + strings::SubstituteAndAppend(&text_, "$0:$1: $2\n", + line, column, message); + } +}; + +class MockValidationErrorCollector : public DescriptorPool::ErrorCollector { + public: + MockValidationErrorCollector(const SourceLocationTable& source_locations, + io::ErrorCollector* wrapped_collector) + : source_locations_(source_locations), + wrapped_collector_(wrapped_collector) {} + ~MockValidationErrorCollector() {} + + // implements ErrorCollector --------------------------------------- + void AddError(const string& filename, + const string& element_name, + const Message* descriptor, + ErrorLocation location, + const string& message) { + int line, column; + source_locations_.Find(descriptor, location, &line, &column); + wrapped_collector_->AddError(line, column, message); + } + + private: + const SourceLocationTable& source_locations_; + io::ErrorCollector* wrapped_collector_; +}; + +class ParserTest : public testing::Test { + protected: + ParserTest() + : require_syntax_identifier_(false) {} + + // Set up the parser to parse the given text. + void SetupParser(const char* text) { + raw_input_.reset(new io::ArrayInputStream(text, strlen(text))); + input_.reset(new io::Tokenizer(raw_input_.get(), &error_collector_)); + parser_.reset(new Parser()); + parser_->RecordErrorsTo(&error_collector_); + parser_->SetRequireSyntaxIdentifier(require_syntax_identifier_); + } + + // Parse the input and expect that the resulting FileDescriptorProto matches + // the given output. The output is a FileDescriptorProto in protocol buffer + // text format. + void ExpectParsesTo(const char* input, const char* output) { + SetupParser(input); + FileDescriptorProto actual, expected; + + parser_->Parse(input_.get(), &actual); + EXPECT_EQ(io::Tokenizer::TYPE_END, input_->current().type); + ASSERT_EQ("", error_collector_.text_); + + // We don't cover SourceCodeInfo in these tests. + actual.clear_source_code_info(); + + // Parse the ASCII representation in order to canonicalize it. We could + // just compare directly to actual.DebugString(), but that would require + // that the caller precisely match the formatting that DebugString() + // produces. + ASSERT_TRUE(TextFormat::ParseFromString(output, &expected)); + + // Compare by comparing debug strings. + // TODO(kenton): Use differencer, once it is available. + EXPECT_EQ(expected.DebugString(), actual.DebugString()); + } + + // Parse the text and expect that the given errors are reported. + void ExpectHasErrors(const char* text, const char* expected_errors) { + ExpectHasEarlyExitErrors(text, expected_errors); + EXPECT_EQ(io::Tokenizer::TYPE_END, input_->current().type); + } + + // Same as above but does not expect that the parser parses the complete + // input. + void ExpectHasEarlyExitErrors(const char* text, const char* expected_errors) { + SetupParser(text); + FileDescriptorProto file; + parser_->Parse(input_.get(), &file); + EXPECT_EQ(expected_errors, error_collector_.text_); + } + + // Parse the text as a file and validate it (with a DescriptorPool), and + // expect that the validation step reports the given errors. + void ExpectHasValidationErrors(const char* text, + const char* expected_errors) { + SetupParser(text); + SourceLocationTable source_locations; + parser_->RecordSourceLocationsTo(&source_locations); + + FileDescriptorProto file; + file.set_name("foo.proto"); + parser_->Parse(input_.get(), &file); + EXPECT_EQ(io::Tokenizer::TYPE_END, input_->current().type); + ASSERT_EQ("", error_collector_.text_); + + MockValidationErrorCollector validation_error_collector( + source_locations, &error_collector_); + EXPECT_TRUE(pool_.BuildFileCollectingErrors( + file, &validation_error_collector) == NULL); + EXPECT_EQ(expected_errors, error_collector_.text_); + } + + MockErrorCollector error_collector_; + DescriptorPool pool_; + + scoped_ptr raw_input_; + scoped_ptr input_; + scoped_ptr parser_; + bool require_syntax_identifier_; +}; + +// =================================================================== + +TEST_F(ParserTest, StopAfterSyntaxIdentifier) { + SetupParser( + "// blah\n" + "syntax = \"foobar\";\n" + "this line will not be parsed\n"); + parser_->SetStopAfterSyntaxIdentifier(true); + EXPECT_TRUE(parser_->Parse(input_.get(), NULL)); + EXPECT_EQ("", error_collector_.text_); + EXPECT_EQ("foobar", parser_->GetSyntaxIdentifier()); +} + +TEST_F(ParserTest, StopAfterOmittedSyntaxIdentifier) { + SetupParser( + "// blah\n" + "this line will not be parsed\n"); + parser_->SetStopAfterSyntaxIdentifier(true); + EXPECT_TRUE(parser_->Parse(input_.get(), NULL)); + EXPECT_EQ("", error_collector_.text_); + EXPECT_EQ("", parser_->GetSyntaxIdentifier()); +} + +TEST_F(ParserTest, StopAfterSyntaxIdentifierWithErrors) { + SetupParser( + "// blah\n" + "syntax = error;\n"); + parser_->SetStopAfterSyntaxIdentifier(true); + EXPECT_FALSE(parser_->Parse(input_.get(), NULL)); + EXPECT_EQ("1:9: Expected syntax identifier.\n", error_collector_.text_); +} + +// =================================================================== + +typedef ParserTest ParseMessageTest; + +TEST_F(ParseMessageTest, SimpleMessage) { + ExpectParsesTo( + "message TestMessage {\n" + " required int32 foo = 1;\n" + "}\n", + + "message_type {" + " name: \"TestMessage\"" + " field { name:\"foo\" label:LABEL_REQUIRED type:TYPE_INT32 number:1 }" + "}"); +} + +TEST_F(ParseMessageTest, ImplicitSyntaxIdentifier) { + require_syntax_identifier_ = false; + ExpectParsesTo( + "message TestMessage {\n" + " required int32 foo = 1;\n" + "}\n", + + "message_type {" + " name: \"TestMessage\"" + " field { name:\"foo\" label:LABEL_REQUIRED type:TYPE_INT32 number:1 }" + "}"); + EXPECT_EQ("proto2", parser_->GetSyntaxIdentifier()); +} + +TEST_F(ParseMessageTest, ExplicitSyntaxIdentifier) { + ExpectParsesTo( + "syntax = \"proto2\";\n" + "message TestMessage {\n" + " required int32 foo = 1;\n" + "}\n", + + "message_type {" + " name: \"TestMessage\"" + " field { name:\"foo\" label:LABEL_REQUIRED type:TYPE_INT32 number:1 }" + "}"); + EXPECT_EQ("proto2", parser_->GetSyntaxIdentifier()); +} + +TEST_F(ParseMessageTest, ExplicitRequiredSyntaxIdentifier) { + require_syntax_identifier_ = true; + ExpectParsesTo( + "syntax = \"proto2\";\n" + "message TestMessage {\n" + " required int32 foo = 1;\n" + "}\n", + + "message_type {" + " name: \"TestMessage\"" + " field { name:\"foo\" label:LABEL_REQUIRED type:TYPE_INT32 number:1 }" + "}"); + EXPECT_EQ("proto2", parser_->GetSyntaxIdentifier()); +} + +TEST_F(ParseMessageTest, SimpleFields) { + ExpectParsesTo( + "message TestMessage {\n" + " required int32 foo = 15;\n" + " optional int32 bar = 34;\n" + " repeated int32 baz = 3;\n" + "}\n", + + "message_type {" + " name: \"TestMessage\"" + " field { name:\"foo\" label:LABEL_REQUIRED type:TYPE_INT32 number:15 }" + " field { name:\"bar\" label:LABEL_OPTIONAL type:TYPE_INT32 number:34 }" + " field { name:\"baz\" label:LABEL_REPEATED type:TYPE_INT32 number:3 }" + "}"); +} + +TEST_F(ParseMessageTest, PrimitiveFieldTypes) { + ExpectParsesTo( + "message TestMessage {\n" + " required int32 foo = 1;\n" + " required int64 foo = 1;\n" + " required uint32 foo = 1;\n" + " required uint64 foo = 1;\n" + " required sint32 foo = 1;\n" + " required sint64 foo = 1;\n" + " required fixed32 foo = 1;\n" + " required fixed64 foo = 1;\n" + " required sfixed32 foo = 1;\n" + " required sfixed64 foo = 1;\n" + " required float foo = 1;\n" + " required double foo = 1;\n" + " required string foo = 1;\n" + " required bytes foo = 1;\n" + " required bool foo = 1;\n" + "}\n", + + "message_type {" + " name: \"TestMessage\"" + " field { name:\"foo\" label:LABEL_REQUIRED type:TYPE_INT32 number:1 }" + " field { name:\"foo\" label:LABEL_REQUIRED type:TYPE_INT64 number:1 }" + " field { name:\"foo\" label:LABEL_REQUIRED type:TYPE_UINT32 number:1 }" + " field { name:\"foo\" label:LABEL_REQUIRED type:TYPE_UINT64 number:1 }" + " field { name:\"foo\" label:LABEL_REQUIRED type:TYPE_SINT32 number:1 }" + " field { name:\"foo\" label:LABEL_REQUIRED type:TYPE_SINT64 number:1 }" + " field { name:\"foo\" label:LABEL_REQUIRED type:TYPE_FIXED32 number:1 }" + " field { name:\"foo\" label:LABEL_REQUIRED type:TYPE_FIXED64 number:1 }" + " field { name:\"foo\" label:LABEL_REQUIRED type:TYPE_SFIXED32 number:1 }" + " field { name:\"foo\" label:LABEL_REQUIRED type:TYPE_SFIXED64 number:1 }" + " field { name:\"foo\" label:LABEL_REQUIRED type:TYPE_FLOAT number:1 }" + " field { name:\"foo\" label:LABEL_REQUIRED type:TYPE_DOUBLE number:1 }" + " field { name:\"foo\" label:LABEL_REQUIRED type:TYPE_STRING number:1 }" + " field { name:\"foo\" label:LABEL_REQUIRED type:TYPE_BYTES number:1 }" + " field { name:\"foo\" label:LABEL_REQUIRED type:TYPE_BOOL number:1 }" + "}"); +} + +TEST_F(ParseMessageTest, FieldDefaults) { + ExpectParsesTo( + "message TestMessage {\n" + " required int32 foo = 1 [default= 1 ];\n" + " required int32 foo = 1 [default= -2 ];\n" + " required int64 foo = 1 [default= 3 ];\n" + " required int64 foo = 1 [default= -4 ];\n" + " required uint32 foo = 1 [default= 5 ];\n" + " required uint64 foo = 1 [default= 6 ];\n" + " required float foo = 1 [default= 7.5];\n" + " required float foo = 1 [default= -8.5];\n" + " required float foo = 1 [default= 9 ];\n" + " required double foo = 1 [default= 10.5];\n" + " required double foo = 1 [default=-11.5];\n" + " required double foo = 1 [default= 12 ];\n" + " required double foo = 1 [default= inf ];\n" + " required double foo = 1 [default=-inf ];\n" + " required double foo = 1 [default= nan ];\n" + " required string foo = 1 [default='13\\001'];\n" + " required string foo = 1 [default='a' \"b\" \n \"c\"];\n" + " required bytes foo = 1 [default='14\\002'];\n" + " required bytes foo = 1 [default='a' \"b\" \n 'c'];\n" + " required bool foo = 1 [default=true ];\n" + " required Foo foo = 1 [default=FOO ];\n" + + " required int32 foo = 1 [default= 0x7FFFFFFF];\n" + " required int32 foo = 1 [default=-0x80000000];\n" + " required uint32 foo = 1 [default= 0xFFFFFFFF];\n" + " required int64 foo = 1 [default= 0x7FFFFFFFFFFFFFFF];\n" + " required int64 foo = 1 [default=-0x8000000000000000];\n" + " required uint64 foo = 1 [default= 0xFFFFFFFFFFFFFFFF];\n" + " required double foo = 1 [default= 0xabcd];\n" + "}\n", + +#define ETC "name:\"foo\" label:LABEL_REQUIRED number:1" + "message_type {" + " name: \"TestMessage\"" + " field { type:TYPE_INT32 default_value:\"1\" "ETC" }" + " field { type:TYPE_INT32 default_value:\"-2\" "ETC" }" + " field { type:TYPE_INT64 default_value:\"3\" "ETC" }" + " field { type:TYPE_INT64 default_value:\"-4\" "ETC" }" + " field { type:TYPE_UINT32 default_value:\"5\" "ETC" }" + " field { type:TYPE_UINT64 default_value:\"6\" "ETC" }" + " field { type:TYPE_FLOAT default_value:\"7.5\" "ETC" }" + " field { type:TYPE_FLOAT default_value:\"-8.5\" "ETC" }" + " field { type:TYPE_FLOAT default_value:\"9\" "ETC" }" + " field { type:TYPE_DOUBLE default_value:\"10.5\" "ETC" }" + " field { type:TYPE_DOUBLE default_value:\"-11.5\" "ETC" }" + " field { type:TYPE_DOUBLE default_value:\"12\" "ETC" }" + " field { type:TYPE_DOUBLE default_value:\"inf\" "ETC" }" + " field { type:TYPE_DOUBLE default_value:\"-inf\" "ETC" }" + " field { type:TYPE_DOUBLE default_value:\"nan\" "ETC" }" + " field { type:TYPE_STRING default_value:\"13\\001\" "ETC" }" + " field { type:TYPE_STRING default_value:\"abc\" "ETC" }" + " field { type:TYPE_BYTES default_value:\"14\\\\002\" "ETC" }" + " field { type:TYPE_BYTES default_value:\"abc\" "ETC" }" + " field { type:TYPE_BOOL default_value:\"true\" "ETC" }" + " field { type_name:\"Foo\" default_value:\"FOO\" "ETC" }" + + " field { type:TYPE_INT32 default_value:\"2147483647\" "ETC" }" + " field { type:TYPE_INT32 default_value:\"-2147483648\" "ETC" }" + " field { type:TYPE_UINT32 default_value:\"4294967295\" "ETC" }" + " field { type:TYPE_INT64 default_value:\"9223372036854775807\" "ETC" }" + " field { type:TYPE_INT64 default_value:\"-9223372036854775808\" "ETC" }" + " field { type:TYPE_UINT64 default_value:\"18446744073709551615\" "ETC" }" + " field { type:TYPE_DOUBLE default_value:\"43981\" "ETC" }" + "}"); +#undef ETC +} + +TEST_F(ParseMessageTest, FieldOptions) { + ExpectParsesTo( + "message TestMessage {\n" + " optional string foo = 1\n" + " [ctype=CORD, (foo)=7, foo.(.bar.baz).qux.quux.(corge)=-33, \n" + " (quux)=\"x\040y\", (baz.qux)=hey];\n" + "}\n", + + "message_type {" + " name: \"TestMessage\"" + " field { name: \"foo\" label: LABEL_OPTIONAL type: TYPE_STRING number: 1" + " options { uninterpreted_option: { name { name_part: \"ctype\" " + " is_extension: false } " + " identifier_value: \"CORD\" }" + " uninterpreted_option: { name { name_part: \"foo\" " + " is_extension: true } " + " positive_int_value: 7 }" + " uninterpreted_option: { name { name_part: \"foo\" " + " is_extension: false } " + " name { name_part: \".bar.baz\"" + " is_extension: true } " + " name { name_part: \"qux\" " + " is_extension: false } " + " name { name_part: \"quux\" " + " is_extension: false } " + " name { name_part: \"corge\" " + " is_extension: true } " + " negative_int_value: -33 }" + " uninterpreted_option: { name { name_part: \"quux\" " + " is_extension: true } " + " string_value: \"x y\" }" + " uninterpreted_option: { name { name_part: \"baz.qux\" " + " is_extension: true } " + " identifier_value: \"hey\" }" + " }" + " }" + "}"); +} + +TEST_F(ParseMessageTest, Group) { + ExpectParsesTo( + "message TestMessage {\n" + " optional group TestGroup = 1 {};\n" + "}\n", + + "message_type {" + " name: \"TestMessage\"" + " nested_type { name: \"TestGroup\" }" + " field { name:\"testgroup\" label:LABEL_OPTIONAL number:1" + " type:TYPE_GROUP type_name: \"TestGroup\" }" + "}"); +} + +TEST_F(ParseMessageTest, NestedMessage) { + ExpectParsesTo( + "message TestMessage {\n" + " message Nested {}\n" + " optional Nested test_nested = 1;\n" + "}\n", + + "message_type {" + " name: \"TestMessage\"" + " nested_type { name: \"Nested\" }" + " field { name:\"test_nested\" label:LABEL_OPTIONAL number:1" + " type_name: \"Nested\" }" + "}"); +} + +TEST_F(ParseMessageTest, NestedEnum) { + ExpectParsesTo( + "message TestMessage {\n" + " enum NestedEnum {}\n" + " optional NestedEnum test_enum = 1;\n" + "}\n", + + "message_type {" + " name: \"TestMessage\"" + " enum_type { name: \"NestedEnum\" }" + " field { name:\"test_enum\" label:LABEL_OPTIONAL number:1" + " type_name: \"NestedEnum\" }" + "}"); +} + +TEST_F(ParseMessageTest, ExtensionRange) { + ExpectParsesTo( + "message TestMessage {\n" + " extensions 10 to 19;\n" + " extensions 30 to max;\n" + "}\n", + + "message_type {" + " name: \"TestMessage\"" + " extension_range { start:10 end:20 }" + " extension_range { start:30 end:536870912 }" + "}"); +} + +TEST_F(ParseMessageTest, CompoundExtensionRange) { + ExpectParsesTo( + "message TestMessage {\n" + " extensions 2, 15, 9 to 11, 100 to max, 3;\n" + "}\n", + + "message_type {" + " name: \"TestMessage\"" + " extension_range { start:2 end:3 }" + " extension_range { start:15 end:16 }" + " extension_range { start:9 end:12 }" + " extension_range { start:100 end:536870912 }" + " extension_range { start:3 end:4 }" + "}"); +} + +TEST_F(ParseMessageTest, Extensions) { + ExpectParsesTo( + "extend Extendee1 { optional int32 foo = 12; }\n" + "extend Extendee2 { repeated TestMessage bar = 22; }\n", + + "extension { name:\"foo\" label:LABEL_OPTIONAL type:TYPE_INT32 number:12" + " extendee: \"Extendee1\" } " + "extension { name:\"bar\" label:LABEL_REPEATED number:22" + " type_name:\"TestMessage\" extendee: \"Extendee2\" }"); +} + +TEST_F(ParseMessageTest, ExtensionsInMessageScope) { + ExpectParsesTo( + "message TestMessage {\n" + " extend Extendee1 { optional int32 foo = 12; }\n" + " extend Extendee2 { repeated TestMessage bar = 22; }\n" + "}\n", + + "message_type {" + " name: \"TestMessage\"" + " extension { name:\"foo\" label:LABEL_OPTIONAL type:TYPE_INT32 number:12" + " extendee: \"Extendee1\" }" + " extension { name:\"bar\" label:LABEL_REPEATED number:22" + " type_name:\"TestMessage\" extendee: \"Extendee2\" }" + "}"); +} + +TEST_F(ParseMessageTest, MultipleExtensionsOneExtendee) { + ExpectParsesTo( + "extend Extendee1 {\n" + " optional int32 foo = 12;\n" + " repeated TestMessage bar = 22;\n" + "}\n", + + "extension { name:\"foo\" label:LABEL_OPTIONAL type:TYPE_INT32 number:12" + " extendee: \"Extendee1\" } " + "extension { name:\"bar\" label:LABEL_REPEATED number:22" + " type_name:\"TestMessage\" extendee: \"Extendee1\" }"); +} + +// =================================================================== + +typedef ParserTest ParseEnumTest; + +TEST_F(ParseEnumTest, SimpleEnum) { + ExpectParsesTo( + "enum TestEnum {\n" + " FOO = 0;\n" + "}\n", + + "enum_type {" + " name: \"TestEnum\"" + " value { name:\"FOO\" number:0 }" + "}"); +} + +TEST_F(ParseEnumTest, Values) { + ExpectParsesTo( + "enum TestEnum {\n" + " FOO = 13;\n" + " BAR = -10;\n" + " BAZ = 500;\n" + "}\n", + + "enum_type {" + " name: \"TestEnum\"" + " value { name:\"FOO\" number:13 }" + " value { name:\"BAR\" number:-10 }" + " value { name:\"BAZ\" number:500 }" + "}"); +} + +TEST_F(ParseEnumTest, ValueOptions) { + ExpectParsesTo( + "enum TestEnum {\n" + " FOO = 13;\n" + " BAR = -10 [ (something.text) = 'abc' ];\n" + " BAZ = 500 [ (something.text) = 'def', other = 1 ];\n" + "}\n", + + "enum_type {" + " name: \"TestEnum\"" + " value { name: \"FOO\" number: 13 }" + " value { name: \"BAR\" number: -10 " + " options { " + " uninterpreted_option { " + " name { name_part: \"something.text\" is_extension: true } " + " string_value: \"abc\" " + " } " + " } " + " } " + " value { name: \"BAZ\" number: 500 " + " options { " + " uninterpreted_option { " + " name { name_part: \"something.text\" is_extension: true } " + " string_value: \"def\" " + " } " + " uninterpreted_option { " + " name { name_part: \"other\" is_extension: false } " + " positive_int_value: 1 " + " } " + " } " + " } " + "}"); +} + +// =================================================================== + +typedef ParserTest ParseServiceTest; + +TEST_F(ParseServiceTest, SimpleService) { + ExpectParsesTo( + "service TestService {\n" + " rpc Foo(In) returns (Out);\n" + "}\n", + + "service {" + " name: \"TestService\"" + " method { name:\"Foo\" input_type:\"In\" output_type:\"Out\" }" + "}"); +} + +TEST_F(ParseServiceTest, Methods) { + ExpectParsesTo( + "service TestService {\n" + " rpc Foo(In1) returns (Out1);\n" + " rpc Bar(In2) returns (Out2);\n" + " rpc Baz(In3) returns (Out3);\n" + "}\n", + + "service {" + " name: \"TestService\"" + " method { name:\"Foo\" input_type:\"In1\" output_type:\"Out1\" }" + " method { name:\"Bar\" input_type:\"In2\" output_type:\"Out2\" }" + " method { name:\"Baz\" input_type:\"In3\" output_type:\"Out3\" }" + "}"); +} + +// =================================================================== +// imports and packages + +typedef ParserTest ParseMiscTest; + +TEST_F(ParseMiscTest, ParseImport) { + ExpectParsesTo( + "import \"foo/bar/baz.proto\";\n", + "dependency: \"foo/bar/baz.proto\""); +} + +TEST_F(ParseMiscTest, ParseMultipleImports) { + ExpectParsesTo( + "import \"foo.proto\";\n" + "import \"bar.proto\";\n" + "import \"baz.proto\";\n", + "dependency: \"foo.proto\"" + "dependency: \"bar.proto\"" + "dependency: \"baz.proto\""); +} + +TEST_F(ParseMiscTest, ParsePackage) { + ExpectParsesTo( + "package foo.bar.baz;\n", + "package: \"foo.bar.baz\""); +} + +TEST_F(ParseMiscTest, ParsePackageWithSpaces) { + ExpectParsesTo( + "package foo . bar. \n" + " baz;\n", + "package: \"foo.bar.baz\""); +} + +// =================================================================== +// options + +TEST_F(ParseMiscTest, ParseFileOptions) { + ExpectParsesTo( + "option java_package = \"com.google.foo\";\n" + "option optimize_for = CODE_SIZE;", + + "options {" + "uninterpreted_option { name { name_part: \"java_package\" " + " is_extension: false }" + " string_value: \"com.google.foo\"} " + "uninterpreted_option { name { name_part: \"optimize_for\" " + " is_extension: false }" + " identifier_value: \"CODE_SIZE\" } " + "}"); +} + +// =================================================================== +// Error tests +// +// There are a very large number of possible errors that the parser could +// report, so it's infeasible to test every single one of them. Instead, +// we test each unique call to AddError() in parser.h. This does not mean +// we are testing every possible error that Parser can generate because +// each variant of the Consume() helper only counts as one unique call to +// AddError(). + +typedef ParserTest ParseErrorTest; + +TEST_F(ParseErrorTest, MissingSyntaxIdentifier) { + require_syntax_identifier_ = true; + ExpectHasEarlyExitErrors( + "message TestMessage {}", + "0:0: File must begin with 'syntax = \"proto2\";'.\n"); + EXPECT_EQ("", parser_->GetSyntaxIdentifier()); +} + +TEST_F(ParseErrorTest, UnknownSyntaxIdentifier) { + ExpectHasEarlyExitErrors( + "syntax = \"no_such_syntax\";", + "0:9: Unrecognized syntax identifier \"no_such_syntax\". This parser " + "only recognizes \"proto2\".\n"); + EXPECT_EQ("no_such_syntax", parser_->GetSyntaxIdentifier()); +} + +TEST_F(ParseErrorTest, SimpleSyntaxError) { + ExpectHasErrors( + "message TestMessage @#$ { blah }", + "0:20: Expected \"{\".\n"); + EXPECT_EQ("proto2", parser_->GetSyntaxIdentifier()); +} + +TEST_F(ParseErrorTest, ExpectedTopLevel) { + ExpectHasErrors( + "blah;", + "0:0: Expected top-level statement (e.g. \"message\").\n"); +} + +TEST_F(ParseErrorTest, UnmatchedCloseBrace) { + // This used to cause an infinite loop. Doh. + ExpectHasErrors( + "}", + "0:0: Expected top-level statement (e.g. \"message\").\n" + "0:0: Unmatched \"}\".\n"); +} + +// ------------------------------------------------------------------- +// Message errors + +TEST_F(ParseErrorTest, MessageMissingName) { + ExpectHasErrors( + "message {}", + "0:8: Expected message name.\n"); +} + +TEST_F(ParseErrorTest, MessageMissingBody) { + ExpectHasErrors( + "message TestMessage;", + "0:19: Expected \"{\".\n"); +} + +TEST_F(ParseErrorTest, EofInMessage) { + ExpectHasErrors( + "message TestMessage {", + "0:21: Reached end of input in message definition (missing '}').\n"); +} + +TEST_F(ParseErrorTest, MissingFieldNumber) { + ExpectHasErrors( + "message TestMessage {\n" + " optional int32 foo;\n" + "}\n", + "1:20: Missing field number.\n"); +} + +TEST_F(ParseErrorTest, ExpectedFieldNumber) { + ExpectHasErrors( + "message TestMessage {\n" + " optional int32 foo = ;\n" + "}\n", + "1:23: Expected field number.\n"); +} + +TEST_F(ParseErrorTest, FieldNumberOutOfRange) { + ExpectHasErrors( + "message TestMessage {\n" + " optional int32 foo = 0x100000000;\n" + "}\n", + "1:23: Integer out of range.\n"); +} + +TEST_F(ParseErrorTest, MissingLabel) { + ExpectHasErrors( + "message TestMessage {\n" + " int32 foo = 1;\n" + "}\n", + "1:2: Expected \"required\", \"optional\", or \"repeated\".\n"); +} + +TEST_F(ParseErrorTest, ExpectedOptionName) { + ExpectHasErrors( + "message TestMessage {\n" + " optional uint32 foo = 1 [];\n" + "}\n", + "1:27: Expected identifier.\n"); +} + +TEST_F(ParseErrorTest, NonExtensionOptionNameBeginningWithDot) { + ExpectHasErrors( + "message TestMessage {\n" + " optional uint32 foo = 1 [.foo=1];\n" + "}\n", + "1:27: Expected identifier.\n"); +} + +TEST_F(ParseErrorTest, DefaultValueTypeMismatch) { + ExpectHasErrors( + "message TestMessage {\n" + " optional uint32 foo = 1 [default=true];\n" + "}\n", + "1:35: Expected integer.\n"); +} + +TEST_F(ParseErrorTest, DefaultValueNotBoolean) { + ExpectHasErrors( + "message TestMessage {\n" + " optional bool foo = 1 [default=blah];\n" + "}\n", + "1:33: Expected \"true\" or \"false\".\n"); +} + +TEST_F(ParseErrorTest, DefaultValueNotString) { + ExpectHasErrors( + "message TestMessage {\n" + " optional string foo = 1 [default=1];\n" + "}\n", + "1:35: Expected string.\n"); +} + +TEST_F(ParseErrorTest, DefaultValueUnsignedNegative) { + ExpectHasErrors( + "message TestMessage {\n" + " optional uint32 foo = 1 [default=-1];\n" + "}\n", + "1:36: Unsigned field can't have negative default value.\n"); +} + +TEST_F(ParseErrorTest, DefaultValueTooLarge) { + ExpectHasErrors( + "message TestMessage {\n" + " optional int32 foo = 1 [default= 0x80000000];\n" + " optional int32 foo = 1 [default=-0x80000001];\n" + " optional uint32 foo = 1 [default= 0x100000000];\n" + " optional int64 foo = 1 [default= 0x80000000000000000];\n" + " optional int64 foo = 1 [default=-0x80000000000000001];\n" + " optional uint64 foo = 1 [default= 0x100000000000000000];\n" + "}\n", + "1:36: Integer out of range.\n" + "2:36: Integer out of range.\n" + "3:36: Integer out of range.\n" + "4:36: Integer out of range.\n" + "5:36: Integer out of range.\n" + "6:36: Integer out of range.\n"); +} + +TEST_F(ParseErrorTest, DefaultValueMissing) { + ExpectHasErrors( + "message TestMessage {\n" + " optional uint32 foo = 1 [default=];\n" + "}\n", + "1:35: Expected integer.\n"); +} + +TEST_F(ParseErrorTest, DefaultValueForGroup) { + ExpectHasErrors( + "message TestMessage {\n" + " optional group Foo = 1 [default=blah] {}\n" + "}\n", + "1:34: Messages can't have default values.\n"); +} + +TEST_F(ParseErrorTest, DuplicateDefaultValue) { + ExpectHasErrors( + "message TestMessage {\n" + " optional uint32 foo = 1 [default=1,default=2];\n" + "}\n", + "1:37: Already set option \"default\".\n"); +} + +TEST_F(ParseErrorTest, GroupNotCapitalized) { + ExpectHasErrors( + "message TestMessage {\n" + " optional group foo = 1 {}\n" + "}\n", + "1:17: Group names must start with a capital letter.\n"); +} + +TEST_F(ParseErrorTest, GroupMissingBody) { + ExpectHasErrors( + "message TestMessage {\n" + " optional group Foo = 1;\n" + "}\n", + "1:24: Missing group body.\n"); +} + +TEST_F(ParseErrorTest, ExtendingPrimitive) { + ExpectHasErrors( + "extend int32 { optional string foo = 4; }\n", + "0:7: Expected message type.\n"); +} + +TEST_F(ParseErrorTest, ErrorInExtension) { + ExpectHasErrors( + "message Foo { extensions 100 to 199; }\n" + "extend Foo { optional string foo; }\n", + "1:32: Missing field number.\n"); +} + +TEST_F(ParseErrorTest, MultipleParseErrors) { + // When a statement has a parse error, the parser should be able to continue + // parsing at the next statement. + ExpectHasErrors( + "message TestMessage {\n" + " optional int32 foo;\n" + " !invalid statement ending in a block { blah blah { blah } blah }\n" + " optional int32 bar = 3 {}\n" + "}\n", + "1:20: Missing field number.\n" + "2:2: Expected \"required\", \"optional\", or \"repeated\".\n" + "2:2: Expected type name.\n" + "3:25: Expected \";\".\n"); +} + +TEST_F(ParseErrorTest, EofInAggregateValue) { + ExpectHasErrors( + "option (fileopt) = { i:100\n", + "1:0: Unexpected end of stream while parsing aggregate value.\n"); +} + +// ------------------------------------------------------------------- +// Enum errors + +TEST_F(ParseErrorTest, EofInEnum) { + ExpectHasErrors( + "enum TestEnum {", + "0:15: Reached end of input in enum definition (missing '}').\n"); +} + +TEST_F(ParseErrorTest, EnumValueMissingNumber) { + ExpectHasErrors( + "enum TestEnum {\n" + " FOO;\n" + "}\n", + "1:5: Missing numeric value for enum constant.\n"); +} + +// ------------------------------------------------------------------- +// Service errors + +TEST_F(ParseErrorTest, EofInService) { + ExpectHasErrors( + "service TestService {", + "0:21: Reached end of input in service definition (missing '}').\n"); +} + +TEST_F(ParseErrorTest, ServiceMethodPrimitiveParams) { + ExpectHasErrors( + "service TestService {\n" + " rpc Foo(int32) returns (string);\n" + "}\n", + "1:10: Expected message type.\n" + "1:26: Expected message type.\n"); +} + +TEST_F(ParseErrorTest, EofInMethodOptions) { + ExpectHasErrors( + "service TestService {\n" + " rpc Foo(Bar) returns(Bar) {", + "1:29: Reached end of input in method options (missing '}').\n" + "1:29: Reached end of input in service definition (missing '}').\n"); +} + +TEST_F(ParseErrorTest, PrimitiveMethodInput) { + ExpectHasErrors( + "service TestService {\n" + " rpc Foo(int32) returns(Bar);\n" + "}\n", + "1:10: Expected message type.\n"); +} + +TEST_F(ParseErrorTest, MethodOptionTypeError) { + // This used to cause an infinite loop. + ExpectHasErrors( + "message Baz {}\n" + "service Foo {\n" + " rpc Bar(Baz) returns(Baz) { option invalid syntax; }\n" + "}\n", + "2:45: Expected \"=\".\n"); +} + +// ------------------------------------------------------------------- +// Import and package errors + +TEST_F(ParseErrorTest, ImportNotQuoted) { + ExpectHasErrors( + "import foo;\n", + "0:7: Expected a string naming the file to import.\n"); +} + +TEST_F(ParseErrorTest, MultiplePackagesInFile) { + ExpectHasErrors( + "package foo;\n" + "package bar;\n", + "1:0: Multiple package definitions.\n"); +} + +// =================================================================== +// Test that errors detected by DescriptorPool correctly report line and +// column numbers. We have one test for every call to RecordLocation() in +// parser.cc. + +typedef ParserTest ParserValidationErrorTest; + +TEST_F(ParserValidationErrorTest, PackageNameError) { + // Create another file which defines symbol "foo". + FileDescriptorProto other_file; + other_file.set_name("bar.proto"); + other_file.add_message_type()->set_name("foo"); + EXPECT_TRUE(pool_.BuildFile(other_file) != NULL); + + // Now try to define it as a package. + ExpectHasValidationErrors( + "package foo.bar;", + "0:8: \"foo\" is already defined (as something other than a package) " + "in file \"bar.proto\".\n"); +} + +TEST_F(ParserValidationErrorTest, MessageNameError) { + ExpectHasValidationErrors( + "message Foo {}\n" + "message Foo {}\n", + "1:8: \"Foo\" is already defined.\n"); +} + +TEST_F(ParserValidationErrorTest, FieldNameError) { + ExpectHasValidationErrors( + "message Foo {\n" + " optional int32 bar = 1;\n" + " optional int32 bar = 2;\n" + "}\n", + "2:17: \"bar\" is already defined in \"Foo\".\n"); +} + +TEST_F(ParserValidationErrorTest, FieldTypeError) { + ExpectHasValidationErrors( + "message Foo {\n" + " optional Baz bar = 1;\n" + "}\n", + "1:11: \"Baz\" is not defined.\n"); +} + +TEST_F(ParserValidationErrorTest, FieldNumberError) { + ExpectHasValidationErrors( + "message Foo {\n" + " optional int32 bar = 0;\n" + "}\n", + "1:23: Field numbers must be positive integers.\n"); +} + +TEST_F(ParserValidationErrorTest, FieldExtendeeError) { + ExpectHasValidationErrors( + "extend Baz { optional int32 bar = 1; }\n", + "0:7: \"Baz\" is not defined.\n"); +} + +TEST_F(ParserValidationErrorTest, FieldDefaultValueError) { + ExpectHasValidationErrors( + "enum Baz { QUX = 1; }\n" + "message Foo {\n" + " optional Baz bar = 1 [default=NO_SUCH_VALUE];\n" + "}\n", + "2:32: Enum type \"Baz\" has no value named \"NO_SUCH_VALUE\".\n"); +} + +TEST_F(ParserValidationErrorTest, FileOptionNameError) { + ExpectHasValidationErrors( + "option foo = 5;", + "0:7: Option \"foo\" unknown.\n"); +} + +TEST_F(ParserValidationErrorTest, FileOptionValueError) { + ExpectHasValidationErrors( + "option java_outer_classname = 5;", + "0:30: Value must be quoted string for string option " + "\"google.protobuf.FileOptions.java_outer_classname\".\n"); +} + +TEST_F(ParserValidationErrorTest, FieldOptionNameError) { + ExpectHasValidationErrors( + "message Foo {\n" + " optional bool bar = 1 [foo=1];\n" + "}\n", + "1:25: Option \"foo\" unknown.\n"); +} + +TEST_F(ParserValidationErrorTest, FieldOptionValueError) { + ExpectHasValidationErrors( + "message Foo {\n" + " optional int32 bar = 1 [ctype=1];\n" + "}\n", + "1:32: Value must be identifier for enum-valued option " + "\"google.protobuf.FieldOptions.ctype\".\n"); +} + +TEST_F(ParserValidationErrorTest, ExtensionRangeNumberError) { + ExpectHasValidationErrors( + "message Foo {\n" + " extensions 0;\n" + "}\n", + "1:13: Extension numbers must be positive integers.\n"); +} + +TEST_F(ParserValidationErrorTest, EnumNameError) { + ExpectHasValidationErrors( + "enum Foo {A = 1;}\n" + "enum Foo {B = 1;}\n", + "1:5: \"Foo\" is already defined.\n"); +} + +TEST_F(ParserValidationErrorTest, EnumValueNameError) { + ExpectHasValidationErrors( + "enum Foo {\n" + " BAR = 1;\n" + " BAR = 1;\n" + "}\n", + "2:2: \"BAR\" is already defined.\n"); +} + +TEST_F(ParserValidationErrorTest, ServiceNameError) { + ExpectHasValidationErrors( + "service Foo {}\n" + "service Foo {}\n", + "1:8: \"Foo\" is already defined.\n"); +} + +TEST_F(ParserValidationErrorTest, MethodNameError) { + ExpectHasValidationErrors( + "message Baz {}\n" + "service Foo {\n" + " rpc Bar(Baz) returns(Baz);\n" + " rpc Bar(Baz) returns(Baz);\n" + "}\n", + "3:6: \"Bar\" is already defined in \"Foo\".\n"); +} + +TEST_F(ParserValidationErrorTest, MethodInputTypeError) { + ExpectHasValidationErrors( + "message Baz {}\n" + "service Foo {\n" + " rpc Bar(Qux) returns(Baz);\n" + "}\n", + "2:10: \"Qux\" is not defined.\n"); +} + +TEST_F(ParserValidationErrorTest, MethodOutputTypeError) { + ExpectHasValidationErrors( + "message Baz {}\n" + "service Foo {\n" + " rpc Bar(Baz) returns(Qux);\n" + "}\n", + "2:23: \"Qux\" is not defined.\n"); +} + +// =================================================================== +// Test that the output from FileDescriptor::DebugString() (and all other +// descriptor types) is parseable, and results in the same Descriptor +// definitions again afoter parsing (not, however, that the order of messages +// cannot be guaranteed to be the same) + +typedef ParserTest ParseDecriptorDebugTest; + +class CompareDescriptorNames { + public: + bool operator()(const DescriptorProto* left, const DescriptorProto* right) { + return left->name() < right->name(); + } +}; + +// Sorts nested DescriptorProtos of a DescriptoProto, by name. +void SortMessages(DescriptorProto *descriptor_proto) { + int size = descriptor_proto->nested_type_size(); + // recursively sort; we can't guarantee the order of nested messages either + for (int i = 0; i < size; ++i) { + SortMessages(descriptor_proto->mutable_nested_type(i)); + } + DescriptorProto **data = + descriptor_proto->mutable_nested_type()->mutable_data(); + sort(data, data + size, CompareDescriptorNames()); +} + +// Sorts DescriptorProtos belonging to a FileDescriptorProto, by name. +void SortMessages(FileDescriptorProto *file_descriptor_proto) { + int size = file_descriptor_proto->message_type_size(); + // recursively sort; we can't guarantee the order of nested messages either + for (int i = 0; i < size; ++i) { + SortMessages(file_descriptor_proto->mutable_message_type(i)); + } + DescriptorProto **data = + file_descriptor_proto->mutable_message_type()->mutable_data(); + sort(data, data + size, CompareDescriptorNames()); +} + +TEST_F(ParseDecriptorDebugTest, TestAllDescriptorTypes) { + const FileDescriptor* original_file = + protobuf_unittest::TestAllTypes::descriptor()->file(); + FileDescriptorProto expected; + original_file->CopyTo(&expected); + + // Get the DebugString of the unittest.proto FileDecriptor, which includes + // all other descriptor types + string debug_string = original_file->DebugString(); + + // Parse the debug string + SetupParser(debug_string.c_str()); + FileDescriptorProto parsed; + parser_->Parse(input_.get(), &parsed); + EXPECT_EQ(io::Tokenizer::TYPE_END, input_->current().type); + ASSERT_EQ("", error_collector_.text_); + + // We now have a FileDescriptorProto, but to compare with the expected we + // need to link to a FileDecriptor, then output back to a proto. We'll + // also need to give it the same name as the original. + parsed.set_name("google/protobuf/unittest.proto"); + // We need the imported dependency before we can build our parsed proto + const FileDescriptor* import = + protobuf_unittest_import::ImportMessage::descriptor()->file(); + FileDescriptorProto import_proto; + import->CopyTo(&import_proto); + ASSERT_TRUE(pool_.BuildFile(import_proto) != NULL); + const FileDescriptor* actual = pool_.BuildFile(parsed); + parsed.Clear(); + actual->CopyTo(&parsed); + ASSERT_TRUE(actual != NULL); + + // The messages might be in different orders, making them hard to compare. + // So, sort the messages in the descriptor protos (including nested messages, + // recursively). + SortMessages(&expected); + SortMessages(&parsed); + + // I really wanted to use StringDiff here for the debug output on fail, + // but the strings are too long for it, and if I increase its max size, + // we get a memory allocation failure :( + EXPECT_EQ(expected.DebugString(), parsed.DebugString()); +} + +// =================================================================== +// SourceCodeInfo tests. + +// Follows a path -- as defined by SourceCodeInfo.Location.path -- from a +// message to a particular sub-field. +// * If the target is itself a message, sets *output_message to point at it, +// *output_field to NULL, and *output_index to -1. +// * Otherwise, if the target is an element of a repeated field, sets +// *output_message to the containing message, *output_field to the descriptor +// of the field, and *output_index to the index of the element. +// * Otherwise, the target is a field (possibly a repeated field, but not any +// one element). Sets *output_message to the containing message, +// *output_field to the descriptor of the field, and *output_index to -1. +// Returns true if the path was valid, false otherwise. A gTest failure is +// recorded before returning false. +bool FollowPath(const Message& root, + const int* path_begin, const int* path_end, + const Message** output_message, + const FieldDescriptor** output_field, + int* output_index) { + if (path_begin == path_end) { + // Path refers to this whole message. + *output_message = &root; + *output_field = NULL; + *output_index = -1; + return true; + } + + const Descriptor* descriptor = root.GetDescriptor(); + const Reflection* reflection = root.GetReflection(); + + const FieldDescriptor* field = descriptor->FindFieldByNumber(*path_begin); + + if (field == NULL) { + ADD_FAILURE() << descriptor->name() << " has no field number: " + << *path_begin; + return false; + } + + ++path_begin; + + if (field->is_repeated()) { + if (path_begin == path_end) { + // Path refers to the whole repeated field. + *output_message = &root; + *output_field = field; + *output_index = -1; + return true; + } + + int index = *path_begin++; + int size = reflection->FieldSize(root, field); + + if (index >= size) { + ADD_FAILURE() << descriptor->name() << "." << field->name() + << " has size " << size << ", but path contained index: " + << index; + return false; + } + + if (field->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) { + // Descend into child message. + const Message& child = reflection->GetRepeatedMessage(root, field, index); + return FollowPath(child, path_begin, path_end, + output_message, output_field, output_index); + } else if (path_begin == path_end) { + // Path refers to this element. + *output_message = &root; + *output_field = field; + *output_index = index; + return true; + } else { + ADD_FAILURE() << descriptor->name() << "." << field->name() + << " is not a message; cannot descend into it."; + return false; + } + } else { + if (field->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) { + const Message& child = reflection->GetMessage(root, field); + return FollowPath(child, path_begin, path_end, + output_message, output_field, output_index); + } else if (path_begin == path_end) { + // Path refers to this field. + *output_message = &root; + *output_field = field; + *output_index = -1; + return true; + } else { + ADD_FAILURE() << descriptor->name() << "." << field->name() + << " is not a message; cannot descend into it."; + return false; + } + } +} + +// Split some text on line breaks. The line breaks are retained in the output, +// so each line (except the last) ends with a '\n', and the lines can be +// concatenated to produce the original text. +// +// I couldn't find the proper string utility function for this. Our +// split-on-delimiter functions don't include the delimiter in the output. +void SplitLines(const string& text, vector* lines) { + string::size_type pos = 0; + + while (pos != string::npos) { + string::size_type last_pos = pos; + pos = text.find_first_of('\n', pos); + if (pos != string::npos) ++pos; + lines->push_back(text.substr(last_pos, pos - last_pos)); + } +} + +// Look for the given tags in the given text and construct a span (as defined +// by SourceCodeInfo.Location.span) from them. E.g. for text like: +// /*a*/message /*b*/Foo/*c*/ {}/*d*/ +// There are four tags: "a", "b", "c", and "d". The constructed span starts +// immediately after the start tag's trailing '/' and ends immediately before +// the end tags leading '/'. +void MakeExpectedSpan(const vector& lines, + const string& start_tag, const string& end_tag, + RepeatedField* output) { + string start_comment = "/*" + start_tag + "*/"; + string end_comment = "/*" + end_tag + "*/"; + + int start_line = -1; + int start_column = -1; + for (int i = 0; i < lines.size(); i++) { + string::size_type pos = lines[i].find(start_comment); + if (pos != string::npos) { + start_line = i; + start_column = pos + start_comment.size(); + break; + } + } + ASSERT_NE(start_line, -1) + << "Tag \"" << start_comment << "\" not found in text."; + + int end_line = -1; + int end_column = -1; + for (int i = start_line; i < lines.size(); i++) { + string::size_type pos = lines[i].find(end_comment); + if (pos != string::npos) { + end_line = i; + end_column = pos; + break; + } + } + ASSERT_NE(end_line, -1) + << "Tag \"" << end_comment << "\" not found in text."; + + output->Add(start_line); + output->Add(start_column); + if (end_line != start_line) output->Add(end_line); + output->Add(end_column); +} + +// Check if two spans are equal. +bool CompareSpans(const RepeatedField& span1, + const RepeatedField& span2) { + if (span1.size() != span2.size()) return false; + for (int i = 0; i < span1.size(); i++) { + if (span1.Get(i) != span2.Get(i)) return false; + } + return true; +} + +// Test fixture for source info tests, which check that source locations are +// recorded correctly in FileDescriptorProto.source_code_info.location. +class SourceInfoTest : public ParserTest { + protected: + // The parsed file (initialized by Parse()). + FileDescriptorProto file_; + + // Parse the given text as a .proto file and populate the spans_ map with + // all the source location spans in its SourceCodeInfo table. + bool Parse(const char* text) { + SetupParser(text); + SplitLines(text, &lines_); + if (!parser_->Parse(input_.get(), &file_)) { + return false; + } + + const SourceCodeInfo& source_info = file_.source_code_info(); + for (int i = 0; i < source_info.location_size(); i++) { + const SourceCodeInfo::Location& location = source_info.location(i); + const Message* descriptor_proto = NULL; + const FieldDescriptor* field = NULL; + int index = 0; + if (!FollowPath(file_, location.path().begin(), location.path().end(), + &descriptor_proto, &field, &index)) { + return false; + } + + spans_.insert(make_pair(SpanKey(*descriptor_proto, field, index), + &location)); + } + + return true; + } + + virtual void TearDown() { + EXPECT_TRUE(spans_.empty()) + << "Forgot to call HasSpan() for:\n" + << spans_.begin()->second->DebugString(); + } + + // ----------------------------------------------------------------- + // HasSpan() checks that the span of source code delimited by the given + // tags (comments) correspond via the SourceCodeInfo table to the given + // part of the FileDescriptorProto. (If unclear, look at the actual tests; + // it should quickly become obvious.) + + bool HasSpan(const char* start_tag, const char* end_tag, + const Message& descriptor_proto) { + return HasSpan(start_tag, end_tag, descriptor_proto, NULL, -1); + } + + bool HasSpan(const char* start_tag, const char* end_tag, + const Message& descriptor_proto, const string& field_name) { + return HasSpan(start_tag, end_tag, descriptor_proto, field_name, -1); + } + + bool HasSpan(const char* start_tag, const char* end_tag, + const Message& descriptor_proto, const string& field_name, + int index) { + const FieldDescriptor* field = + descriptor_proto.GetDescriptor()->FindFieldByName(field_name); + if (field == NULL) { + ADD_FAILURE() << descriptor_proto.GetDescriptor()->name() + << " has no such field: " << field_name; + return false; + } + + return HasSpan(start_tag, end_tag, descriptor_proto, field, index); + } + + bool HasSpan(const Message& descriptor_proto) { + return HasSpan(NULL, NULL, descriptor_proto, NULL, -1); + } + + bool HasSpan(const Message& descriptor_proto, const string& field_name) { + return HasSpan(NULL, NULL, descriptor_proto, field_name, -1); + } + + bool HasSpan(const Message& descriptor_proto, const string& field_name, + int index) { + return HasSpan(NULL, NULL, descriptor_proto, field_name, index); + } + + bool HasSpan(const char* start_tag, const char* end_tag, + const Message& descriptor_proto, const FieldDescriptor* field, + int index) { + pair range = + spans_.equal_range(SpanKey(descriptor_proto, field, index)); + + if (start_tag == NULL) { + if (range.first == range.second) { + return false; + } else { + spans_.erase(range.first); + return true; + } + } else { + RepeatedField expected_span; + MakeExpectedSpan(lines_, start_tag, end_tag, &expected_span); + + for (SpanMap::iterator iter = range.first; iter != range.second; ++iter) { + if (CompareSpans(expected_span, iter->second->span())) { + spans_.erase(iter); + return true; + } + } + + return false; + } + } + + private: + struct SpanKey { + const Message* descriptor_proto; + const FieldDescriptor* field; + int index; + + inline SpanKey() {} + inline SpanKey(const Message& descriptor_proto, + const FieldDescriptor* field, + int index) + : descriptor_proto(&descriptor_proto), field(field), index(index) {} + + inline bool operator<(const SpanKey& other) const { + if (descriptor_proto < other.descriptor_proto) return true; + if (descriptor_proto > other.descriptor_proto) return false; + if (field < other.field) return true; + if (field > other.field) return false; + return index < other.index; + } + }; + + typedef multimap SpanMap; + SpanMap spans_; + vector lines_; +}; + +TEST_F(SourceInfoTest, BasicFileDecls) { + EXPECT_TRUE(Parse( + "/*a*/syntax = \"proto2\";\n" + "package /*b*/foo.bar/*c*/;\n" + "import /*d*/\"baz.proto\"/*e*/;\n" + "import /*f*/\"qux.proto\"/*g*/;/*h*/\n" + "// comment ignored\n")); + + EXPECT_TRUE(HasSpan("a", "h", file_)); + EXPECT_TRUE(HasSpan("b", "c", file_, "package")); + EXPECT_TRUE(HasSpan("d", "e", file_, "dependency", 0)); + EXPECT_TRUE(HasSpan("f", "g", file_, "dependency", 1)); +} + +TEST_F(SourceInfoTest, Messages) { + EXPECT_TRUE(Parse( + "/*a*/message /*b*/Foo/*c*/ {}/*d*/\n" + "/*e*/message /*f*/Bar/*g*/ {}/*h*/\n")); + + EXPECT_TRUE(HasSpan("a", "d", file_.message_type(0))); + EXPECT_TRUE(HasSpan("b", "c", file_.message_type(0), "name")); + EXPECT_TRUE(HasSpan("e", "h", file_.message_type(1))); + EXPECT_TRUE(HasSpan("f", "g", file_.message_type(1), "name")); + + // Ignore these. + EXPECT_TRUE(HasSpan(file_)); +} + +TEST_F(SourceInfoTest, Fields) { + EXPECT_TRUE(Parse( + "message Foo {\n" + " /*a*/optional/*b*/ /*c*/int32/*d*/ /*e*/bar/*f*/ = /*g*/1/*h*/;/*i*/\n" + " /*j*/repeated/*k*/ /*l*/X.Y/*m*/ /*n*/baz/*o*/ = /*p*/2/*q*/;/*r*/\n" + "}\n")); + + const FieldDescriptorProto& field1 = file_.message_type(0).field(0); + const FieldDescriptorProto& field2 = file_.message_type(0).field(1); + + EXPECT_TRUE(HasSpan("a", "i", field1)); + EXPECT_TRUE(HasSpan("a", "b", field1, "label")); + EXPECT_TRUE(HasSpan("c", "d", field1, "type")); + EXPECT_TRUE(HasSpan("e", "f", field1, "name")); + EXPECT_TRUE(HasSpan("g", "h", field1, "number")); + + EXPECT_TRUE(HasSpan("j", "r", field2)); + EXPECT_TRUE(HasSpan("j", "k", field2, "label")); + EXPECT_TRUE(HasSpan("l", "m", field2, "type_name")); + EXPECT_TRUE(HasSpan("n", "o", field2, "name")); + EXPECT_TRUE(HasSpan("p", "q", field2, "number")); + + // Ignore these. + EXPECT_TRUE(HasSpan(file_)); + EXPECT_TRUE(HasSpan(file_.message_type(0))); + EXPECT_TRUE(HasSpan(file_.message_type(0), "name")); +} + +TEST_F(SourceInfoTest, Extensions) { + EXPECT_TRUE(Parse( + "/*a*/extend /*b*/Foo/*c*/ {\n" + " /*d*/optional/*e*/ int32 bar = 1;/*f*/\n" + " /*g*/repeated/*h*/ X.Y baz = 2;/*i*/\n" + "}/*j*/\n" + "/*k*/extend /*l*/Bar/*m*/ {\n" + " /*n*/optional int32 qux = 1;/*o*/\n" + "}/*p*/\n")); + + const FieldDescriptorProto& field1 = file_.extension(0); + const FieldDescriptorProto& field2 = file_.extension(1); + const FieldDescriptorProto& field3 = file_.extension(2); + + EXPECT_TRUE(HasSpan("a", "j", file_, "extension")); + EXPECT_TRUE(HasSpan("k", "p", file_, "extension")); + + EXPECT_TRUE(HasSpan("d", "f", field1)); + EXPECT_TRUE(HasSpan("d", "e", field1, "label")); + EXPECT_TRUE(HasSpan("b", "c", field1, "extendee")); + + EXPECT_TRUE(HasSpan("g", "i", field2)); + EXPECT_TRUE(HasSpan("g", "h", field2, "label")); + EXPECT_TRUE(HasSpan("b", "c", field2, "extendee")); + + EXPECT_TRUE(HasSpan("n", "o", field3)); + EXPECT_TRUE(HasSpan("l", "m", field3, "extendee")); + + // Ignore these. + EXPECT_TRUE(HasSpan(file_)); + EXPECT_TRUE(HasSpan(field1, "type")); + EXPECT_TRUE(HasSpan(field1, "name")); + EXPECT_TRUE(HasSpan(field1, "number")); + EXPECT_TRUE(HasSpan(field2, "type_name")); + EXPECT_TRUE(HasSpan(field2, "name")); + EXPECT_TRUE(HasSpan(field2, "number")); + EXPECT_TRUE(HasSpan(field3, "label")); + EXPECT_TRUE(HasSpan(field3, "type")); + EXPECT_TRUE(HasSpan(field3, "name")); + EXPECT_TRUE(HasSpan(field3, "number")); +} + +TEST_F(SourceInfoTest, NestedExtensions) { + EXPECT_TRUE(Parse( + "message Message {\n" + " /*a*/extend /*b*/Foo/*c*/ {\n" + " /*d*/optional/*e*/ int32 bar = 1;/*f*/\n" + " /*g*/repeated/*h*/ X.Y baz = 2;/*i*/\n" + " }/*j*/\n" + " /*k*/extend /*l*/Bar/*m*/ {\n" + " /*n*/optional int32 qux = 1;/*o*/\n" + " }/*p*/\n" + "}\n")); + + const FieldDescriptorProto& field1 = file_.message_type(0).extension(0); + const FieldDescriptorProto& field2 = file_.message_type(0).extension(1); + const FieldDescriptorProto& field3 = file_.message_type(0).extension(2); + + EXPECT_TRUE(HasSpan("a", "j", file_.message_type(0), "extension")); + EXPECT_TRUE(HasSpan("k", "p", file_.message_type(0), "extension")); + + EXPECT_TRUE(HasSpan("d", "f", field1)); + EXPECT_TRUE(HasSpan("d", "e", field1, "label")); + EXPECT_TRUE(HasSpan("b", "c", field1, "extendee")); + + EXPECT_TRUE(HasSpan("g", "i", field2)); + EXPECT_TRUE(HasSpan("g", "h", field2, "label")); + EXPECT_TRUE(HasSpan("b", "c", field2, "extendee")); + + EXPECT_TRUE(HasSpan("n", "o", field3)); + EXPECT_TRUE(HasSpan("l", "m", field3, "extendee")); + + // Ignore these. + EXPECT_TRUE(HasSpan(file_)); + EXPECT_TRUE(HasSpan(file_.message_type(0))); + EXPECT_TRUE(HasSpan(file_.message_type(0), "name")); + EXPECT_TRUE(HasSpan(field1, "type")); + EXPECT_TRUE(HasSpan(field1, "name")); + EXPECT_TRUE(HasSpan(field1, "number")); + EXPECT_TRUE(HasSpan(field2, "type_name")); + EXPECT_TRUE(HasSpan(field2, "name")); + EXPECT_TRUE(HasSpan(field2, "number")); + EXPECT_TRUE(HasSpan(field3, "label")); + EXPECT_TRUE(HasSpan(field3, "type")); + EXPECT_TRUE(HasSpan(field3, "name")); + EXPECT_TRUE(HasSpan(field3, "number")); +} + +TEST_F(SourceInfoTest, ExtensionRanges) { + EXPECT_TRUE(Parse( + "message Message {\n" + " /*a*/extensions /*b*/1/*c*/ to /*d*/4/*e*/, /*f*/6/*g*/;/*h*/\n" + " /*i*/extensions /*j*/8/*k*/ to /*l*/max/*m*/;/*n*/\n" + "}\n")); + + const DescriptorProto::ExtensionRange& range1 = + file_.message_type(0).extension_range(0); + const DescriptorProto::ExtensionRange& range2 = + file_.message_type(0).extension_range(1); + const DescriptorProto::ExtensionRange& range3 = + file_.message_type(0).extension_range(2); + + EXPECT_TRUE(HasSpan("a", "h", file_.message_type(0), "extension_range")); + EXPECT_TRUE(HasSpan("i", "n", file_.message_type(0), "extension_range")); + + EXPECT_TRUE(HasSpan("b", "e", range1)); + EXPECT_TRUE(HasSpan("b", "c", range1, "start")); + EXPECT_TRUE(HasSpan("d", "e", range1, "end")); + + EXPECT_TRUE(HasSpan("f", "g", range2)); + EXPECT_TRUE(HasSpan("f", "g", range2, "start")); + EXPECT_TRUE(HasSpan("f", "g", range2, "end")); + + EXPECT_TRUE(HasSpan("j", "m", range3)); + EXPECT_TRUE(HasSpan("j", "k", range3, "start")); + EXPECT_TRUE(HasSpan("l", "m", range3, "end")); + + // Ignore these. + EXPECT_TRUE(HasSpan(file_)); + EXPECT_TRUE(HasSpan(file_.message_type(0))); + EXPECT_TRUE(HasSpan(file_.message_type(0), "name")); +} + +TEST_F(SourceInfoTest, NestedMessages) { + EXPECT_TRUE(Parse( + "message Foo {\n" + " /*a*/message /*b*/Bar/*c*/ {\n" + " /*d*/message /*e*/Baz/*f*/ {}/*g*/\n" + " }/*h*/\n" + " /*i*/message /*j*/Qux/*k*/ {}/*l*/\n" + "}\n")); + + const DescriptorProto& bar = file_.message_type(0).nested_type(0); + const DescriptorProto& baz = bar.nested_type(0); + const DescriptorProto& qux = file_.message_type(0).nested_type(1); + + EXPECT_TRUE(HasSpan("a", "h", bar)); + EXPECT_TRUE(HasSpan("b", "c", bar, "name")); + EXPECT_TRUE(HasSpan("d", "g", baz)); + EXPECT_TRUE(HasSpan("e", "f", baz, "name")); + EXPECT_TRUE(HasSpan("i", "l", qux)); + EXPECT_TRUE(HasSpan("j", "k", qux, "name")); + + // Ignore these. + EXPECT_TRUE(HasSpan(file_)); + EXPECT_TRUE(HasSpan(file_.message_type(0))); + EXPECT_TRUE(HasSpan(file_.message_type(0), "name")); +} + +TEST_F(SourceInfoTest, Groups) { + EXPECT_TRUE(Parse( + "message Foo {\n" + " message Bar {}\n" + " /*a*/optional/*b*/ /*c*/group/*d*/ /*e*/Baz/*f*/ = /*g*/1/*h*/ {\n" + " /*i*/message Qux {}/*j*/\n" + " }/*k*/\n" + "}\n")); + + const DescriptorProto& bar = file_.message_type(0).nested_type(0); + const DescriptorProto& baz = file_.message_type(0).nested_type(1); + const DescriptorProto& qux = baz.nested_type(0); + const FieldDescriptorProto& field = file_.message_type(0).field(0); + + EXPECT_TRUE(HasSpan("a", "k", field)); + EXPECT_TRUE(HasSpan("a", "b", field, "label")); + EXPECT_TRUE(HasSpan("c", "d", field, "type")); + EXPECT_TRUE(HasSpan("e", "f", field, "name")); + EXPECT_TRUE(HasSpan("e", "f", field, "type_name")); + EXPECT_TRUE(HasSpan("g", "h", field, "number")); + + EXPECT_TRUE(HasSpan("a", "k", baz)); + EXPECT_TRUE(HasSpan("e", "f", baz, "name")); + EXPECT_TRUE(HasSpan("i", "j", qux)); + + // Ignore these. + EXPECT_TRUE(HasSpan(file_)); + EXPECT_TRUE(HasSpan(file_.message_type(0))); + EXPECT_TRUE(HasSpan(file_.message_type(0), "name")); + EXPECT_TRUE(HasSpan(bar)); + EXPECT_TRUE(HasSpan(bar, "name")); + EXPECT_TRUE(HasSpan(qux, "name")); +} + +TEST_F(SourceInfoTest, Enums) { + EXPECT_TRUE(Parse( + "/*a*/enum /*b*/Foo/*c*/ {}/*d*/\n" + "/*e*/enum /*f*/Bar/*g*/ {}/*h*/\n")); + + EXPECT_TRUE(HasSpan("a", "d", file_.enum_type(0))); + EXPECT_TRUE(HasSpan("b", "c", file_.enum_type(0), "name")); + EXPECT_TRUE(HasSpan("e", "h", file_.enum_type(1))); + EXPECT_TRUE(HasSpan("f", "g", file_.enum_type(1), "name")); + + // Ignore these. + EXPECT_TRUE(HasSpan(file_)); +} + +TEST_F(SourceInfoTest, EnumValues) { + EXPECT_TRUE(Parse( + "enum Foo {\n" + " /*a*/BAR/*b*/ = /*c*/1/*d*/;/*e*/\n" + " /*f*/BAZ/*g*/ = /*h*/2/*i*/;/*j*/\n" + "}")); + + const EnumValueDescriptorProto& bar = file_.enum_type(0).value(0); + const EnumValueDescriptorProto& baz = file_.enum_type(0).value(1); + + EXPECT_TRUE(HasSpan("a", "e", bar)); + EXPECT_TRUE(HasSpan("a", "b", bar, "name")); + EXPECT_TRUE(HasSpan("c", "d", bar, "number")); + EXPECT_TRUE(HasSpan("f", "j", baz)); + EXPECT_TRUE(HasSpan("f", "g", baz, "name")); + EXPECT_TRUE(HasSpan("h", "i", baz, "number")); + + // Ignore these. + EXPECT_TRUE(HasSpan(file_)); + EXPECT_TRUE(HasSpan(file_.enum_type(0))); + EXPECT_TRUE(HasSpan(file_.enum_type(0), "name")); +} + +TEST_F(SourceInfoTest, NestedEnums) { + EXPECT_TRUE(Parse( + "message Foo {\n" + " /*a*/enum /*b*/Bar/*c*/ {}/*d*/\n" + " /*e*/enum /*f*/Baz/*g*/ {}/*h*/\n" + "}\n")); + + const EnumDescriptorProto& bar = file_.message_type(0).enum_type(0); + const EnumDescriptorProto& baz = file_.message_type(0).enum_type(1); + + EXPECT_TRUE(HasSpan("a", "d", bar)); + EXPECT_TRUE(HasSpan("b", "c", bar, "name")); + EXPECT_TRUE(HasSpan("e", "h", baz)); + EXPECT_TRUE(HasSpan("f", "g", baz, "name")); + + // Ignore these. + EXPECT_TRUE(HasSpan(file_)); + EXPECT_TRUE(HasSpan(file_.message_type(0))); + EXPECT_TRUE(HasSpan(file_.message_type(0), "name")); +} + +TEST_F(SourceInfoTest, Services) { + EXPECT_TRUE(Parse( + "/*a*/service /*b*/Foo/*c*/ {}/*d*/\n" + "/*e*/service /*f*/Bar/*g*/ {}/*h*/\n")); + + EXPECT_TRUE(HasSpan("a", "d", file_.service(0))); + EXPECT_TRUE(HasSpan("b", "c", file_.service(0), "name")); + EXPECT_TRUE(HasSpan("e", "h", file_.service(1))); + EXPECT_TRUE(HasSpan("f", "g", file_.service(1), "name")); + + // Ignore these. + EXPECT_TRUE(HasSpan(file_)); +} + +TEST_F(SourceInfoTest, Methods) { + EXPECT_TRUE(Parse( + "service Foo {\n" + " /*a*/rpc /*b*/Bar/*c*/(/*d*/X/*e*/) returns(/*f*/Y/*g*/);/*h*/" + " /*i*/rpc /*j*/Baz/*k*/(/*l*/Z/*m*/) returns(/*n*/W/*o*/);/*p*/" + "}")); + + const MethodDescriptorProto& bar = file_.service(0).method(0); + const MethodDescriptorProto& baz = file_.service(0).method(1); + + EXPECT_TRUE(HasSpan("a", "h", bar)); + EXPECT_TRUE(HasSpan("b", "c", bar, "name")); + EXPECT_TRUE(HasSpan("d", "e", bar, "input_type")); + EXPECT_TRUE(HasSpan("f", "g", bar, "output_type")); + + EXPECT_TRUE(HasSpan("i", "p", baz)); + EXPECT_TRUE(HasSpan("j", "k", baz, "name")); + EXPECT_TRUE(HasSpan("l", "m", baz, "input_type")); + EXPECT_TRUE(HasSpan("n", "o", baz, "output_type")); + + // Ignore these. + EXPECT_TRUE(HasSpan(file_)); + EXPECT_TRUE(HasSpan(file_.service(0))); + EXPECT_TRUE(HasSpan(file_.service(0), "name")); +} + +TEST_F(SourceInfoTest, Options) { + EXPECT_TRUE(Parse( + "/*a*/option /*b*/foo/*c*/./*d*/(/*e*/bar.baz/*f*/)/*g*/ = " + "/*h*/123/*i*/;/*j*/\n" + "/*k*/option qux = /*l*/-123/*m*/;/*n*/\n" + "/*o*/option corge = /*p*/abc/*q*/;/*r*/\n" + "/*s*/option grault = /*t*/'blah'/*u*/;/*v*/\n" + "/*w*/option garply = /*x*/{ yadda yadda }/*y*/;/*z*/\n" + "/*0*/option waldo = /*1*/123.0/*2*/;/*3*/\n" + )); + + const UninterpretedOption& option1 = file_.options().uninterpreted_option(0); + const UninterpretedOption& option2 = file_.options().uninterpreted_option(1); + const UninterpretedOption& option3 = file_.options().uninterpreted_option(2); + const UninterpretedOption& option4 = file_.options().uninterpreted_option(3); + const UninterpretedOption& option5 = file_.options().uninterpreted_option(4); + const UninterpretedOption& option6 = file_.options().uninterpreted_option(5); + + EXPECT_TRUE(HasSpan("a", "j", file_.options())); + EXPECT_TRUE(HasSpan("b", "i", option1)); + EXPECT_TRUE(HasSpan("b", "g", option1, "name")); + EXPECT_TRUE(HasSpan("b", "c", option1.name(0))); + EXPECT_TRUE(HasSpan("b", "c", option1.name(0), "name_part")); + EXPECT_TRUE(HasSpan("d", "g", option1.name(1))); + EXPECT_TRUE(HasSpan("e", "f", option1.name(1), "name_part")); + EXPECT_TRUE(HasSpan("h", "i", option1, "positive_int_value")); + + EXPECT_TRUE(HasSpan("k", "n", file_.options())); + EXPECT_TRUE(HasSpan("l", "m", option2, "negative_int_value")); + + EXPECT_TRUE(HasSpan("o", "r", file_.options())); + EXPECT_TRUE(HasSpan("p", "q", option3, "identifier_value")); + + EXPECT_TRUE(HasSpan("s", "v", file_.options())); + EXPECT_TRUE(HasSpan("t", "u", option4, "string_value")); + + EXPECT_TRUE(HasSpan("w", "z", file_.options())); + EXPECT_TRUE(HasSpan("x", "y", option5, "aggregate_value")); + + EXPECT_TRUE(HasSpan("0", "3", file_.options())); + EXPECT_TRUE(HasSpan("1", "2", option6, "double_value")); + + // Ignore these. + EXPECT_TRUE(HasSpan(file_)); + EXPECT_TRUE(HasSpan(option2)); + EXPECT_TRUE(HasSpan(option3)); + EXPECT_TRUE(HasSpan(option4)); + EXPECT_TRUE(HasSpan(option5)); + EXPECT_TRUE(HasSpan(option6)); + EXPECT_TRUE(HasSpan(option2, "name")); + EXPECT_TRUE(HasSpan(option3, "name")); + EXPECT_TRUE(HasSpan(option4, "name")); + EXPECT_TRUE(HasSpan(option5, "name")); + EXPECT_TRUE(HasSpan(option6, "name")); + EXPECT_TRUE(HasSpan(option2.name(0))); + EXPECT_TRUE(HasSpan(option3.name(0))); + EXPECT_TRUE(HasSpan(option4.name(0))); + EXPECT_TRUE(HasSpan(option5.name(0))); + EXPECT_TRUE(HasSpan(option6.name(0))); + EXPECT_TRUE(HasSpan(option2.name(0), "name_part")); + EXPECT_TRUE(HasSpan(option3.name(0), "name_part")); + EXPECT_TRUE(HasSpan(option4.name(0), "name_part")); + EXPECT_TRUE(HasSpan(option5.name(0), "name_part")); + EXPECT_TRUE(HasSpan(option6.name(0), "name_part")); +} + +TEST_F(SourceInfoTest, ScopedOptions) { + EXPECT_TRUE(Parse( + "message Foo {\n" + " /*a*/option mopt = 1;/*b*/\n" + "}\n" + "enum Bar {\n" + " /*c*/option eopt = 1;/*d*/\n" + "}\n" + "service Baz {\n" + " /*e*/option sopt = 1;/*f*/\n" + " rpc M(X) returns(Y) {\n" + " /*g*/option mopt = 1;/*h*/\n" + " }\n" + "}\n")); + + EXPECT_TRUE(HasSpan("a", "b", file_.message_type(0).options())); + EXPECT_TRUE(HasSpan("c", "d", file_.enum_type(0).options())); + EXPECT_TRUE(HasSpan("e", "f", file_.service(0).options())); + EXPECT_TRUE(HasSpan("g", "h", file_.service(0).method(0).options())); + + // Ignore these. + EXPECT_TRUE(HasSpan(file_)); + EXPECT_TRUE(HasSpan(file_.message_type(0))); + EXPECT_TRUE(HasSpan(file_.message_type(0), "name")); + EXPECT_TRUE(HasSpan(file_.message_type(0).options() + .uninterpreted_option(0))); + EXPECT_TRUE(HasSpan(file_.message_type(0).options() + .uninterpreted_option(0), "name")); + EXPECT_TRUE(HasSpan(file_.message_type(0).options() + .uninterpreted_option(0).name(0))); + EXPECT_TRUE(HasSpan(file_.message_type(0).options() + .uninterpreted_option(0).name(0), "name_part")); + EXPECT_TRUE(HasSpan(file_.message_type(0).options() + .uninterpreted_option(0), "positive_int_value")); + EXPECT_TRUE(HasSpan(file_.enum_type(0))); + EXPECT_TRUE(HasSpan(file_.enum_type(0), "name")); + EXPECT_TRUE(HasSpan(file_.enum_type(0).options() + .uninterpreted_option(0))); + EXPECT_TRUE(HasSpan(file_.enum_type(0).options() + .uninterpreted_option(0), "name")); + EXPECT_TRUE(HasSpan(file_.enum_type(0).options() + .uninterpreted_option(0).name(0))); + EXPECT_TRUE(HasSpan(file_.enum_type(0).options() + .uninterpreted_option(0).name(0), "name_part")); + EXPECT_TRUE(HasSpan(file_.enum_type(0).options() + .uninterpreted_option(0), "positive_int_value")); + EXPECT_TRUE(HasSpan(file_.service(0))); + EXPECT_TRUE(HasSpan(file_.service(0), "name")); + EXPECT_TRUE(HasSpan(file_.service(0).method(0))); + EXPECT_TRUE(HasSpan(file_.service(0).options() + .uninterpreted_option(0))); + EXPECT_TRUE(HasSpan(file_.service(0).options() + .uninterpreted_option(0), "name")); + EXPECT_TRUE(HasSpan(file_.service(0).options() + .uninterpreted_option(0).name(0))); + EXPECT_TRUE(HasSpan(file_.service(0).options() + .uninterpreted_option(0).name(0), "name_part")); + EXPECT_TRUE(HasSpan(file_.service(0).options() + .uninterpreted_option(0), "positive_int_value")); + EXPECT_TRUE(HasSpan(file_.service(0).method(0), "name")); + EXPECT_TRUE(HasSpan(file_.service(0).method(0), "input_type")); + EXPECT_TRUE(HasSpan(file_.service(0).method(0), "output_type")); + EXPECT_TRUE(HasSpan(file_.service(0).method(0).options() + .uninterpreted_option(0))); + EXPECT_TRUE(HasSpan(file_.service(0).method(0).options() + .uninterpreted_option(0), "name")); + EXPECT_TRUE(HasSpan(file_.service(0).method(0).options() + .uninterpreted_option(0).name(0))); + EXPECT_TRUE(HasSpan(file_.service(0).method(0).options() + .uninterpreted_option(0).name(0), "name_part")); + EXPECT_TRUE(HasSpan(file_.service(0).method(0).options() + .uninterpreted_option(0), "positive_int_value")); +} + +TEST_F(SourceInfoTest, FieldOptions) { + // The actual "name = value" pairs are parsed by the same code as for + // top-level options so we won't re-test that -- just make sure that the + // syntax used for field options is understood. + EXPECT_TRUE(Parse( + "message Foo {" + " optional int32 bar = 1 " + "/*a*/[default=/*b*/123/*c*/,/*d*/opt1=123/*e*/," + "/*f*/opt2='hi'/*g*/]/*h*/;" + "}\n" + )); + + const FieldDescriptorProto& field = file_.message_type(0).field(0); + const UninterpretedOption& option1 = field.options().uninterpreted_option(0); + const UninterpretedOption& option2 = field.options().uninterpreted_option(1); + + EXPECT_TRUE(HasSpan("a", "h", field.options())); + EXPECT_TRUE(HasSpan("b", "c", field, "default_value")); + EXPECT_TRUE(HasSpan("d", "e", option1)); + EXPECT_TRUE(HasSpan("f", "g", option2)); + + // Ignore these. + EXPECT_TRUE(HasSpan(file_)); + EXPECT_TRUE(HasSpan(file_.message_type(0))); + EXPECT_TRUE(HasSpan(file_.message_type(0), "name")); + EXPECT_TRUE(HasSpan(field)); + EXPECT_TRUE(HasSpan(field, "label")); + EXPECT_TRUE(HasSpan(field, "type")); + EXPECT_TRUE(HasSpan(field, "name")); + EXPECT_TRUE(HasSpan(field, "number")); + EXPECT_TRUE(HasSpan(option1, "name")); + EXPECT_TRUE(HasSpan(option2, "name")); + EXPECT_TRUE(HasSpan(option1.name(0))); + EXPECT_TRUE(HasSpan(option2.name(0))); + EXPECT_TRUE(HasSpan(option1.name(0), "name_part")); + EXPECT_TRUE(HasSpan(option2.name(0), "name_part")); + EXPECT_TRUE(HasSpan(option1, "positive_int_value")); + EXPECT_TRUE(HasSpan(option2, "string_value")); +} + +TEST_F(SourceInfoTest, EnumValueOptions) { + // The actual "name = value" pairs are parsed by the same code as for + // top-level options so we won't re-test that -- just make sure that the + // syntax used for enum options is understood. + EXPECT_TRUE(Parse( + "enum Foo {" + " BAR = 1 /*a*/[/*b*/opt1=123/*c*/,/*d*/opt2='hi'/*e*/]/*f*/;" + "}\n" + )); + + const EnumValueDescriptorProto& value = file_.enum_type(0).value(0); + const UninterpretedOption& option1 = value.options().uninterpreted_option(0); + const UninterpretedOption& option2 = value.options().uninterpreted_option(1); + + EXPECT_TRUE(HasSpan("a", "f", value.options())); + EXPECT_TRUE(HasSpan("b", "c", option1)); + EXPECT_TRUE(HasSpan("d", "e", option2)); + + // Ignore these. + EXPECT_TRUE(HasSpan(file_)); + EXPECT_TRUE(HasSpan(file_.enum_type(0))); + EXPECT_TRUE(HasSpan(file_.enum_type(0), "name")); + EXPECT_TRUE(HasSpan(value)); + EXPECT_TRUE(HasSpan(value, "name")); + EXPECT_TRUE(HasSpan(value, "number")); + EXPECT_TRUE(HasSpan(option1, "name")); + EXPECT_TRUE(HasSpan(option2, "name")); + EXPECT_TRUE(HasSpan(option1.name(0))); + EXPECT_TRUE(HasSpan(option2.name(0))); + EXPECT_TRUE(HasSpan(option1.name(0), "name_part")); + EXPECT_TRUE(HasSpan(option2.name(0), "name_part")); + EXPECT_TRUE(HasSpan(option1, "positive_int_value")); + EXPECT_TRUE(HasSpan(option2, "string_value")); +} + +// =================================================================== + +} // anonymous namespace + +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/plugin.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/plugin.cc new file mode 100644 index 0000000000..727f942166 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/plugin.cc @@ -0,0 +1,163 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) + +#include + +#include +#include + +#ifdef _WIN32 +#include +#include +#ifndef STDIN_FILENO +#define STDIN_FILENO 0 +#endif +#ifndef STDOUT_FILENO +#define STDOUT_FILENO 1 +#endif +#else +#include +#endif + +#include +#include +#include +#include +#include + + +namespace google { +namespace protobuf { +namespace compiler { + +class GeneratorResponseContext : public GeneratorContext { + public: + GeneratorResponseContext(CodeGeneratorResponse* response, + const vector& parsed_files) + : response_(response), + parsed_files_(parsed_files) {} + virtual ~GeneratorResponseContext() {} + + // implements GeneratorContext -------------------------------------- + + virtual io::ZeroCopyOutputStream* Open(const string& filename) { + CodeGeneratorResponse::File* file = response_->add_file(); + file->set_name(filename); + return new io::StringOutputStream(file->mutable_content()); + } + + virtual io::ZeroCopyOutputStream* OpenForInsert( + const string& filename, const string& insertion_point) { + CodeGeneratorResponse::File* file = response_->add_file(); + file->set_name(filename); + file->set_insertion_point(insertion_point); + return new io::StringOutputStream(file->mutable_content()); + } + + void ListParsedFiles(vector* output) { + *output = parsed_files_; + } + + private: + CodeGeneratorResponse* response_; + const vector& parsed_files_; +}; + +int PluginMain(int argc, char* argv[], const CodeGenerator* generator) { + + if (argc > 1) { + cerr << argv[0] << ": Unknown option: " << argv[1] << endl; + return 1; + } + +#ifdef _WIN32 + _setmode(STDIN_FILENO, _O_BINARY); + _setmode(STDOUT_FILENO, _O_BINARY); +#endif + + CodeGeneratorRequest request; + if (!request.ParseFromFileDescriptor(STDIN_FILENO)) { + cerr << argv[0] << ": protoc sent unparseable request to plugin." << endl; + return 1; + } + + DescriptorPool pool; + for (int i = 0; i < request.proto_file_size(); i++) { + const FileDescriptor* file = pool.BuildFile(request.proto_file(i)); + if (file == NULL) { + // BuildFile() already wrote an error message. + return 1; + } + } + + vector parsed_files; + for (int i = 0; i < request.file_to_generate_size(); i++) { + parsed_files.push_back(pool.FindFileByName(request.file_to_generate(i))); + if (parsed_files.back() == NULL) { + cerr << argv[0] << ": protoc asked plugin to generate a file but " + "did not provide a descriptor for the file: " + << request.file_to_generate(i) << endl; + return 1; + } + } + + CodeGeneratorResponse response; + GeneratorResponseContext context(&response, parsed_files); + + for (int i = 0; i < parsed_files.size(); i++) { + const FileDescriptor* file = parsed_files[i]; + + string error; + bool succeeded = generator->Generate( + file, request.parameter(), &context, &error); + + if (!succeeded && error.empty()) { + error = "Code generator returned false but provided no error " + "description."; + } + if (!error.empty()) { + response.set_error(file->name() + ": " + error); + break; + } + } + + if (!response.SerializeToFileDescriptor(STDOUT_FILENO)) { + cerr << argv[0] << ": Error writing to stdout." << endl; + return 1; + } + + return 0; +} + +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/plugin.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/plugin.h new file mode 100644 index 0000000000..7c40333210 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/plugin.h @@ -0,0 +1,73 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// +// Front-end for protoc code generator plugins written in C++. +// +// To implement a protoc plugin in C++, simply write an implementation of +// CodeGenerator, then create a main() function like: +// int main(int argc, char* argv[]) { +// MyCodeGenerator generator; +// return google::protobuf::compiler::PluginMain(argc, argv, &generator); +// } +// You must link your plugin against libprotobuf and libprotoc. +// +// To get protoc to use the plugin, do one of the following: +// * Place the plugin binary somewhere in the PATH and give it the name +// "protoc-gen-NAME" (replacing "NAME" with the name of your plugin). If you +// then invoke protoc with the parameter --NAME_out=OUT_DIR (again, replace +// "NAME" with your plugin's name), protoc will invoke your plugin to generate +// the output, which will be placed in OUT_DIR. +// * Place the plugin binary anywhere, with any name, and pass the --plugin +// parameter to protoc to direct it to your plugin like so: +// protoc --plugin=protoc-gen-NAME=path/to/mybinary --NAME_out=OUT_DIR +// On Windows, make sure to include the .exe suffix: +// protoc --plugin=protoc-gen-NAME=path/to/mybinary.exe --NAME_out=OUT_DIR + +#ifndef GOOGLE_PROTOBUF_COMPILER_PLUGIN_H__ +#define GOOGLE_PROTOBUF_COMPILER_PLUGIN_H__ + +#include + +namespace google { +namespace protobuf { +namespace compiler { + +class CodeGenerator; // code_generator.h + +// Implements main() for a protoc plugin exposing the given code generator. +LIBPROTOC_EXPORT int PluginMain(int argc, char* argv[], const CodeGenerator* generator); + +} // namespace compiler +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_COMPILER_PLUGIN_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/plugin.pb.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/plugin.pb.cc new file mode 100644 index 0000000000..ad4b4deb0e --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/plugin.pb.cc @@ -0,0 +1,1082 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! + +#define INTERNAL_SUPPRESS_PROTOBUF_FIELD_DEPRECATION +#include "google/protobuf/compiler/plugin.pb.h" + +#include + +#include +#include +#include +#include +#include +#include +// @@protoc_insertion_point(includes) + +namespace google { +namespace protobuf { +namespace compiler { + +namespace { + +const ::google::protobuf::Descriptor* CodeGeneratorRequest_descriptor_ = NULL; +const ::google::protobuf::internal::GeneratedMessageReflection* + CodeGeneratorRequest_reflection_ = NULL; +const ::google::protobuf::Descriptor* CodeGeneratorResponse_descriptor_ = NULL; +const ::google::protobuf::internal::GeneratedMessageReflection* + CodeGeneratorResponse_reflection_ = NULL; +const ::google::protobuf::Descriptor* CodeGeneratorResponse_File_descriptor_ = NULL; +const ::google::protobuf::internal::GeneratedMessageReflection* + CodeGeneratorResponse_File_reflection_ = NULL; + +} // namespace + + +void protobuf_AssignDesc_google_2fprotobuf_2fcompiler_2fplugin_2eproto() { + protobuf_AddDesc_google_2fprotobuf_2fcompiler_2fplugin_2eproto(); + const ::google::protobuf::FileDescriptor* file = + ::google::protobuf::DescriptorPool::generated_pool()->FindFileByName( + "google/protobuf/compiler/plugin.proto"); + GOOGLE_CHECK(file != NULL); + CodeGeneratorRequest_descriptor_ = file->message_type(0); + static const int CodeGeneratorRequest_offsets_[3] = { + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(CodeGeneratorRequest, file_to_generate_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(CodeGeneratorRequest, parameter_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(CodeGeneratorRequest, proto_file_), + }; + CodeGeneratorRequest_reflection_ = + new ::google::protobuf::internal::GeneratedMessageReflection( + CodeGeneratorRequest_descriptor_, + CodeGeneratorRequest::default_instance_, + CodeGeneratorRequest_offsets_, + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(CodeGeneratorRequest, _has_bits_[0]), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(CodeGeneratorRequest, _unknown_fields_), + -1, + ::google::protobuf::DescriptorPool::generated_pool(), + ::google::protobuf::MessageFactory::generated_factory(), + sizeof(CodeGeneratorRequest)); + CodeGeneratorResponse_descriptor_ = file->message_type(1); + static const int CodeGeneratorResponse_offsets_[2] = { + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(CodeGeneratorResponse, error_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(CodeGeneratorResponse, file_), + }; + CodeGeneratorResponse_reflection_ = + new ::google::protobuf::internal::GeneratedMessageReflection( + CodeGeneratorResponse_descriptor_, + CodeGeneratorResponse::default_instance_, + CodeGeneratorResponse_offsets_, + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(CodeGeneratorResponse, _has_bits_[0]), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(CodeGeneratorResponse, _unknown_fields_), + -1, + ::google::protobuf::DescriptorPool::generated_pool(), + ::google::protobuf::MessageFactory::generated_factory(), + sizeof(CodeGeneratorResponse)); + CodeGeneratorResponse_File_descriptor_ = CodeGeneratorResponse_descriptor_->nested_type(0); + static const int CodeGeneratorResponse_File_offsets_[3] = { + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(CodeGeneratorResponse_File, name_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(CodeGeneratorResponse_File, insertion_point_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(CodeGeneratorResponse_File, content_), + }; + CodeGeneratorResponse_File_reflection_ = + new ::google::protobuf::internal::GeneratedMessageReflection( + CodeGeneratorResponse_File_descriptor_, + CodeGeneratorResponse_File::default_instance_, + CodeGeneratorResponse_File_offsets_, + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(CodeGeneratorResponse_File, _has_bits_[0]), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(CodeGeneratorResponse_File, _unknown_fields_), + -1, + ::google::protobuf::DescriptorPool::generated_pool(), + ::google::protobuf::MessageFactory::generated_factory(), + sizeof(CodeGeneratorResponse_File)); +} + +namespace { + +GOOGLE_PROTOBUF_DECLARE_ONCE(protobuf_AssignDescriptors_once_); +inline void protobuf_AssignDescriptorsOnce() { + ::google::protobuf::GoogleOnceInit(&protobuf_AssignDescriptors_once_, + &protobuf_AssignDesc_google_2fprotobuf_2fcompiler_2fplugin_2eproto); +} + +void protobuf_RegisterTypes(const ::std::string&) { + protobuf_AssignDescriptorsOnce(); + ::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage( + CodeGeneratorRequest_descriptor_, &CodeGeneratorRequest::default_instance()); + ::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage( + CodeGeneratorResponse_descriptor_, &CodeGeneratorResponse::default_instance()); + ::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage( + CodeGeneratorResponse_File_descriptor_, &CodeGeneratorResponse_File::default_instance()); +} + +} // namespace + +void protobuf_ShutdownFile_google_2fprotobuf_2fcompiler_2fplugin_2eproto() { + delete CodeGeneratorRequest::default_instance_; + delete CodeGeneratorRequest_reflection_; + delete CodeGeneratorResponse::default_instance_; + delete CodeGeneratorResponse_reflection_; + delete CodeGeneratorResponse_File::default_instance_; + delete CodeGeneratorResponse_File_reflection_; +} + +void protobuf_AddDesc_google_2fprotobuf_2fcompiler_2fplugin_2eproto() { + static bool already_here = false; + if (already_here) return; + already_here = true; + GOOGLE_PROTOBUF_VERIFY_VERSION; + + ::google::protobuf::protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); + ::google::protobuf::DescriptorPool::InternalAddGeneratedFile( + "\n%google/protobuf/compiler/plugin.proto\022" + "\030google.protobuf.compiler\032 google/protob" + "uf/descriptor.proto\"}\n\024CodeGeneratorRequ" + "est\022\030\n\020file_to_generate\030\001 \003(\t\022\021\n\tparamet" + "er\030\002 \001(\t\0228\n\nproto_file\030\017 \003(\0132$.google.pr" + "otobuf.FileDescriptorProto\"\252\001\n\025CodeGener" + "atorResponse\022\r\n\005error\030\001 \001(\t\022B\n\004file\030\017 \003(" + "\01324.google.protobuf.compiler.CodeGenerat" + "orResponse.File\032>\n\004File\022\014\n\004name\030\001 \001(\t\022\027\n" + "\017insertion_point\030\002 \001(\t\022\017\n\007content\030\017 \001(\t", 399); + ::google::protobuf::MessageFactory::InternalRegisterGeneratedFile( + "google/protobuf/compiler/plugin.proto", &protobuf_RegisterTypes); + CodeGeneratorRequest::default_instance_ = new CodeGeneratorRequest(); + CodeGeneratorResponse::default_instance_ = new CodeGeneratorResponse(); + CodeGeneratorResponse_File::default_instance_ = new CodeGeneratorResponse_File(); + CodeGeneratorRequest::default_instance_->InitAsDefaultInstance(); + CodeGeneratorResponse::default_instance_->InitAsDefaultInstance(); + CodeGeneratorResponse_File::default_instance_->InitAsDefaultInstance(); + ::google::protobuf::internal::OnShutdown(&protobuf_ShutdownFile_google_2fprotobuf_2fcompiler_2fplugin_2eproto); +} + +// Force AddDescriptors() to be called at static initialization time. +struct StaticDescriptorInitializer_google_2fprotobuf_2fcompiler_2fplugin_2eproto { + StaticDescriptorInitializer_google_2fprotobuf_2fcompiler_2fplugin_2eproto() { + protobuf_AddDesc_google_2fprotobuf_2fcompiler_2fplugin_2eproto(); + } +} static_descriptor_initializer_google_2fprotobuf_2fcompiler_2fplugin_2eproto_; + + +// =================================================================== + +#ifndef _MSC_VER +const int CodeGeneratorRequest::kFileToGenerateFieldNumber; +const int CodeGeneratorRequest::kParameterFieldNumber; +const int CodeGeneratorRequest::kProtoFileFieldNumber; +#endif // !_MSC_VER + +CodeGeneratorRequest::CodeGeneratorRequest() + : ::google::protobuf::Message() { + SharedCtor(); +} + +void CodeGeneratorRequest::InitAsDefaultInstance() { +} + +CodeGeneratorRequest::CodeGeneratorRequest(const CodeGeneratorRequest& from) + : ::google::protobuf::Message() { + SharedCtor(); + MergeFrom(from); +} + +void CodeGeneratorRequest::SharedCtor() { + _cached_size_ = 0; + parameter_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + ::memset(_has_bits_, 0, sizeof(_has_bits_)); +} + +CodeGeneratorRequest::~CodeGeneratorRequest() { + SharedDtor(); +} + +void CodeGeneratorRequest::SharedDtor() { + if (parameter_ != &::google::protobuf::internal::kEmptyString) { + delete parameter_; + } + if (this != default_instance_) { + } +} + +void CodeGeneratorRequest::SetCachedSize(int size) const { + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); +} +const ::google::protobuf::Descriptor* CodeGeneratorRequest::descriptor() { + protobuf_AssignDescriptorsOnce(); + return CodeGeneratorRequest_descriptor_; +} + +const CodeGeneratorRequest& CodeGeneratorRequest::default_instance() { + if (default_instance_ == NULL) protobuf_AddDesc_google_2fprotobuf_2fcompiler_2fplugin_2eproto(); return *default_instance_; +} + +CodeGeneratorRequest* CodeGeneratorRequest::default_instance_ = NULL; + +CodeGeneratorRequest* CodeGeneratorRequest::New() const { + return new CodeGeneratorRequest; +} + +void CodeGeneratorRequest::Clear() { + if (_has_bits_[1 / 32] & (0xffu << (1 % 32))) { + if (has_parameter()) { + if (parameter_ != &::google::protobuf::internal::kEmptyString) { + parameter_->clear(); + } + } + } + file_to_generate_.Clear(); + proto_file_.Clear(); + ::memset(_has_bits_, 0, sizeof(_has_bits_)); + mutable_unknown_fields()->Clear(); +} + +bool CodeGeneratorRequest::MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!(EXPRESSION)) return false + ::google::protobuf::uint32 tag; + while ((tag = input->ReadTag()) != 0) { + switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // repeated string file_to_generate = 1; + case 1: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_file_to_generate: + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->add_file_to_generate())); + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->file_to_generate(0).data(), this->file_to_generate(0).length(), + ::google::protobuf::internal::WireFormat::PARSE); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(10)) goto parse_file_to_generate; + if (input->ExpectTag(18)) goto parse_parameter; + break; + } + + // optional string parameter = 2; + case 2: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_parameter: + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_parameter())); + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->parameter().data(), this->parameter().length(), + ::google::protobuf::internal::WireFormat::PARSE); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(122)) goto parse_proto_file; + break; + } + + // repeated .google.protobuf.FileDescriptorProto proto_file = 15; + case 15: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_proto_file: + DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual( + input, add_proto_file())); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(122)) goto parse_proto_file; + if (input->ExpectAtEnd()) return true; + break; + } + + default: { + handle_uninterpreted: + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) { + return true; + } + DO_(::google::protobuf::internal::WireFormat::SkipField( + input, tag, mutable_unknown_fields())); + break; + } + } + } + return true; +#undef DO_ +} + +void CodeGeneratorRequest::SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const { + // repeated string file_to_generate = 1; + for (int i = 0; i < this->file_to_generate_size(); i++) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->file_to_generate(i).data(), this->file_to_generate(i).length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + ::google::protobuf::internal::WireFormatLite::WriteString( + 1, this->file_to_generate(i), output); + } + + // optional string parameter = 2; + if (has_parameter()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->parameter().data(), this->parameter().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + ::google::protobuf::internal::WireFormatLite::WriteString( + 2, this->parameter(), output); + } + + // repeated .google.protobuf.FileDescriptorProto proto_file = 15; + for (int i = 0; i < this->proto_file_size(); i++) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 15, this->proto_file(i), output); + } + + if (!unknown_fields().empty()) { + ::google::protobuf::internal::WireFormat::SerializeUnknownFields( + unknown_fields(), output); + } +} + +::google::protobuf::uint8* CodeGeneratorRequest::SerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const { + // repeated string file_to_generate = 1; + for (int i = 0; i < this->file_to_generate_size(); i++) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->file_to_generate(i).data(), this->file_to_generate(i).length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + target = ::google::protobuf::internal::WireFormatLite:: + WriteStringToArray(1, this->file_to_generate(i), target); + } + + // optional string parameter = 2; + if (has_parameter()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->parameter().data(), this->parameter().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 2, this->parameter(), target); + } + + // repeated .google.protobuf.FileDescriptorProto proto_file = 15; + for (int i = 0; i < this->proto_file_size(); i++) { + target = ::google::protobuf::internal::WireFormatLite:: + WriteMessageNoVirtualToArray( + 15, this->proto_file(i), target); + } + + if (!unknown_fields().empty()) { + target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( + unknown_fields(), target); + } + return target; +} + +int CodeGeneratorRequest::ByteSize() const { + int total_size = 0; + + if (_has_bits_[1 / 32] & (0xffu << (1 % 32))) { + // optional string parameter = 2; + if (has_parameter()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->parameter()); + } + + } + // repeated string file_to_generate = 1; + total_size += 1 * this->file_to_generate_size(); + for (int i = 0; i < this->file_to_generate_size(); i++) { + total_size += ::google::protobuf::internal::WireFormatLite::StringSize( + this->file_to_generate(i)); + } + + // repeated .google.protobuf.FileDescriptorProto proto_file = 15; + total_size += 1 * this->proto_file_size(); + for (int i = 0; i < this->proto_file_size(); i++) { + total_size += + ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual( + this->proto_file(i)); + } + + if (!unknown_fields().empty()) { + total_size += + ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( + unknown_fields()); + } + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = total_size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); + return total_size; +} + +void CodeGeneratorRequest::MergeFrom(const ::google::protobuf::Message& from) { + GOOGLE_CHECK_NE(&from, this); + const CodeGeneratorRequest* source = + ::google::protobuf::internal::dynamic_cast_if_available( + &from); + if (source == NULL) { + ::google::protobuf::internal::ReflectionOps::Merge(from, this); + } else { + MergeFrom(*source); + } +} + +void CodeGeneratorRequest::MergeFrom(const CodeGeneratorRequest& from) { + GOOGLE_CHECK_NE(&from, this); + file_to_generate_.MergeFrom(from.file_to_generate_); + proto_file_.MergeFrom(from.proto_file_); + if (from._has_bits_[1 / 32] & (0xffu << (1 % 32))) { + if (from.has_parameter()) { + set_parameter(from.parameter()); + } + } + mutable_unknown_fields()->MergeFrom(from.unknown_fields()); +} + +void CodeGeneratorRequest::CopyFrom(const ::google::protobuf::Message& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void CodeGeneratorRequest::CopyFrom(const CodeGeneratorRequest& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool CodeGeneratorRequest::IsInitialized() const { + + for (int i = 0; i < proto_file_size(); i++) { + if (!this->proto_file(i).IsInitialized()) return false; + } + return true; +} + +void CodeGeneratorRequest::Swap(CodeGeneratorRequest* other) { + if (other != this) { + file_to_generate_.Swap(&other->file_to_generate_); + std::swap(parameter_, other->parameter_); + proto_file_.Swap(&other->proto_file_); + std::swap(_has_bits_[0], other->_has_bits_[0]); + _unknown_fields_.Swap(&other->_unknown_fields_); + std::swap(_cached_size_, other->_cached_size_); + } +} + +::google::protobuf::Metadata CodeGeneratorRequest::GetMetadata() const { + protobuf_AssignDescriptorsOnce(); + ::google::protobuf::Metadata metadata; + metadata.descriptor = CodeGeneratorRequest_descriptor_; + metadata.reflection = CodeGeneratorRequest_reflection_; + return metadata; +} + + +// =================================================================== + +#ifndef _MSC_VER +const int CodeGeneratorResponse_File::kNameFieldNumber; +const int CodeGeneratorResponse_File::kInsertionPointFieldNumber; +const int CodeGeneratorResponse_File::kContentFieldNumber; +#endif // !_MSC_VER + +CodeGeneratorResponse_File::CodeGeneratorResponse_File() + : ::google::protobuf::Message() { + SharedCtor(); +} + +void CodeGeneratorResponse_File::InitAsDefaultInstance() { +} + +CodeGeneratorResponse_File::CodeGeneratorResponse_File(const CodeGeneratorResponse_File& from) + : ::google::protobuf::Message() { + SharedCtor(); + MergeFrom(from); +} + +void CodeGeneratorResponse_File::SharedCtor() { + _cached_size_ = 0; + name_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + insertion_point_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + content_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + ::memset(_has_bits_, 0, sizeof(_has_bits_)); +} + +CodeGeneratorResponse_File::~CodeGeneratorResponse_File() { + SharedDtor(); +} + +void CodeGeneratorResponse_File::SharedDtor() { + if (name_ != &::google::protobuf::internal::kEmptyString) { + delete name_; + } + if (insertion_point_ != &::google::protobuf::internal::kEmptyString) { + delete insertion_point_; + } + if (content_ != &::google::protobuf::internal::kEmptyString) { + delete content_; + } + if (this != default_instance_) { + } +} + +void CodeGeneratorResponse_File::SetCachedSize(int size) const { + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); +} +const ::google::protobuf::Descriptor* CodeGeneratorResponse_File::descriptor() { + protobuf_AssignDescriptorsOnce(); + return CodeGeneratorResponse_File_descriptor_; +} + +const CodeGeneratorResponse_File& CodeGeneratorResponse_File::default_instance() { + if (default_instance_ == NULL) protobuf_AddDesc_google_2fprotobuf_2fcompiler_2fplugin_2eproto(); return *default_instance_; +} + +CodeGeneratorResponse_File* CodeGeneratorResponse_File::default_instance_ = NULL; + +CodeGeneratorResponse_File* CodeGeneratorResponse_File::New() const { + return new CodeGeneratorResponse_File; +} + +void CodeGeneratorResponse_File::Clear() { + if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) { + if (has_name()) { + if (name_ != &::google::protobuf::internal::kEmptyString) { + name_->clear(); + } + } + if (has_insertion_point()) { + if (insertion_point_ != &::google::protobuf::internal::kEmptyString) { + insertion_point_->clear(); + } + } + if (has_content()) { + if (content_ != &::google::protobuf::internal::kEmptyString) { + content_->clear(); + } + } + } + ::memset(_has_bits_, 0, sizeof(_has_bits_)); + mutable_unknown_fields()->Clear(); +} + +bool CodeGeneratorResponse_File::MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!(EXPRESSION)) return false + ::google::protobuf::uint32 tag; + while ((tag = input->ReadTag()) != 0) { + switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // optional string name = 1; + case 1: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_name())); + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->name().data(), this->name().length(), + ::google::protobuf::internal::WireFormat::PARSE); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(18)) goto parse_insertion_point; + break; + } + + // optional string insertion_point = 2; + case 2: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_insertion_point: + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_insertion_point())); + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->insertion_point().data(), this->insertion_point().length(), + ::google::protobuf::internal::WireFormat::PARSE); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(122)) goto parse_content; + break; + } + + // optional string content = 15; + case 15: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_content: + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_content())); + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->content().data(), this->content().length(), + ::google::protobuf::internal::WireFormat::PARSE); + } else { + goto handle_uninterpreted; + } + if (input->ExpectAtEnd()) return true; + break; + } + + default: { + handle_uninterpreted: + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) { + return true; + } + DO_(::google::protobuf::internal::WireFormat::SkipField( + input, tag, mutable_unknown_fields())); + break; + } + } + } + return true; +#undef DO_ +} + +void CodeGeneratorResponse_File::SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const { + // optional string name = 1; + if (has_name()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->name().data(), this->name().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + ::google::protobuf::internal::WireFormatLite::WriteString( + 1, this->name(), output); + } + + // optional string insertion_point = 2; + if (has_insertion_point()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->insertion_point().data(), this->insertion_point().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + ::google::protobuf::internal::WireFormatLite::WriteString( + 2, this->insertion_point(), output); + } + + // optional string content = 15; + if (has_content()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->content().data(), this->content().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + ::google::protobuf::internal::WireFormatLite::WriteString( + 15, this->content(), output); + } + + if (!unknown_fields().empty()) { + ::google::protobuf::internal::WireFormat::SerializeUnknownFields( + unknown_fields(), output); + } +} + +::google::protobuf::uint8* CodeGeneratorResponse_File::SerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const { + // optional string name = 1; + if (has_name()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->name().data(), this->name().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 1, this->name(), target); + } + + // optional string insertion_point = 2; + if (has_insertion_point()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->insertion_point().data(), this->insertion_point().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 2, this->insertion_point(), target); + } + + // optional string content = 15; + if (has_content()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->content().data(), this->content().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 15, this->content(), target); + } + + if (!unknown_fields().empty()) { + target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( + unknown_fields(), target); + } + return target; +} + +int CodeGeneratorResponse_File::ByteSize() const { + int total_size = 0; + + if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) { + // optional string name = 1; + if (has_name()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->name()); + } + + // optional string insertion_point = 2; + if (has_insertion_point()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->insertion_point()); + } + + // optional string content = 15; + if (has_content()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->content()); + } + + } + if (!unknown_fields().empty()) { + total_size += + ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( + unknown_fields()); + } + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = total_size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); + return total_size; +} + +void CodeGeneratorResponse_File::MergeFrom(const ::google::protobuf::Message& from) { + GOOGLE_CHECK_NE(&from, this); + const CodeGeneratorResponse_File* source = + ::google::protobuf::internal::dynamic_cast_if_available( + &from); + if (source == NULL) { + ::google::protobuf::internal::ReflectionOps::Merge(from, this); + } else { + MergeFrom(*source); + } +} + +void CodeGeneratorResponse_File::MergeFrom(const CodeGeneratorResponse_File& from) { + GOOGLE_CHECK_NE(&from, this); + if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) { + if (from.has_name()) { + set_name(from.name()); + } + if (from.has_insertion_point()) { + set_insertion_point(from.insertion_point()); + } + if (from.has_content()) { + set_content(from.content()); + } + } + mutable_unknown_fields()->MergeFrom(from.unknown_fields()); +} + +void CodeGeneratorResponse_File::CopyFrom(const ::google::protobuf::Message& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void CodeGeneratorResponse_File::CopyFrom(const CodeGeneratorResponse_File& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool CodeGeneratorResponse_File::IsInitialized() const { + + return true; +} + +void CodeGeneratorResponse_File::Swap(CodeGeneratorResponse_File* other) { + if (other != this) { + std::swap(name_, other->name_); + std::swap(insertion_point_, other->insertion_point_); + std::swap(content_, other->content_); + std::swap(_has_bits_[0], other->_has_bits_[0]); + _unknown_fields_.Swap(&other->_unknown_fields_); + std::swap(_cached_size_, other->_cached_size_); + } +} + +::google::protobuf::Metadata CodeGeneratorResponse_File::GetMetadata() const { + protobuf_AssignDescriptorsOnce(); + ::google::protobuf::Metadata metadata; + metadata.descriptor = CodeGeneratorResponse_File_descriptor_; + metadata.reflection = CodeGeneratorResponse_File_reflection_; + return metadata; +} + + +// ------------------------------------------------------------------- + +#ifndef _MSC_VER +const int CodeGeneratorResponse::kErrorFieldNumber; +const int CodeGeneratorResponse::kFileFieldNumber; +#endif // !_MSC_VER + +CodeGeneratorResponse::CodeGeneratorResponse() + : ::google::protobuf::Message() { + SharedCtor(); +} + +void CodeGeneratorResponse::InitAsDefaultInstance() { +} + +CodeGeneratorResponse::CodeGeneratorResponse(const CodeGeneratorResponse& from) + : ::google::protobuf::Message() { + SharedCtor(); + MergeFrom(from); +} + +void CodeGeneratorResponse::SharedCtor() { + _cached_size_ = 0; + error_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + ::memset(_has_bits_, 0, sizeof(_has_bits_)); +} + +CodeGeneratorResponse::~CodeGeneratorResponse() { + SharedDtor(); +} + +void CodeGeneratorResponse::SharedDtor() { + if (error_ != &::google::protobuf::internal::kEmptyString) { + delete error_; + } + if (this != default_instance_) { + } +} + +void CodeGeneratorResponse::SetCachedSize(int size) const { + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); +} +const ::google::protobuf::Descriptor* CodeGeneratorResponse::descriptor() { + protobuf_AssignDescriptorsOnce(); + return CodeGeneratorResponse_descriptor_; +} + +const CodeGeneratorResponse& CodeGeneratorResponse::default_instance() { + if (default_instance_ == NULL) protobuf_AddDesc_google_2fprotobuf_2fcompiler_2fplugin_2eproto(); return *default_instance_; +} + +CodeGeneratorResponse* CodeGeneratorResponse::default_instance_ = NULL; + +CodeGeneratorResponse* CodeGeneratorResponse::New() const { + return new CodeGeneratorResponse; +} + +void CodeGeneratorResponse::Clear() { + if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) { + if (has_error()) { + if (error_ != &::google::protobuf::internal::kEmptyString) { + error_->clear(); + } + } + } + file_.Clear(); + ::memset(_has_bits_, 0, sizeof(_has_bits_)); + mutable_unknown_fields()->Clear(); +} + +bool CodeGeneratorResponse::MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!(EXPRESSION)) return false + ::google::protobuf::uint32 tag; + while ((tag = input->ReadTag()) != 0) { + switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // optional string error = 1; + case 1: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_error())); + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->error().data(), this->error().length(), + ::google::protobuf::internal::WireFormat::PARSE); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(122)) goto parse_file; + break; + } + + // repeated .google.protobuf.compiler.CodeGeneratorResponse.File file = 15; + case 15: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_file: + DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual( + input, add_file())); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(122)) goto parse_file; + if (input->ExpectAtEnd()) return true; + break; + } + + default: { + handle_uninterpreted: + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) { + return true; + } + DO_(::google::protobuf::internal::WireFormat::SkipField( + input, tag, mutable_unknown_fields())); + break; + } + } + } + return true; +#undef DO_ +} + +void CodeGeneratorResponse::SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const { + // optional string error = 1; + if (has_error()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->error().data(), this->error().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + ::google::protobuf::internal::WireFormatLite::WriteString( + 1, this->error(), output); + } + + // repeated .google.protobuf.compiler.CodeGeneratorResponse.File file = 15; + for (int i = 0; i < this->file_size(); i++) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 15, this->file(i), output); + } + + if (!unknown_fields().empty()) { + ::google::protobuf::internal::WireFormat::SerializeUnknownFields( + unknown_fields(), output); + } +} + +::google::protobuf::uint8* CodeGeneratorResponse::SerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const { + // optional string error = 1; + if (has_error()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->error().data(), this->error().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 1, this->error(), target); + } + + // repeated .google.protobuf.compiler.CodeGeneratorResponse.File file = 15; + for (int i = 0; i < this->file_size(); i++) { + target = ::google::protobuf::internal::WireFormatLite:: + WriteMessageNoVirtualToArray( + 15, this->file(i), target); + } + + if (!unknown_fields().empty()) { + target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( + unknown_fields(), target); + } + return target; +} + +int CodeGeneratorResponse::ByteSize() const { + int total_size = 0; + + if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) { + // optional string error = 1; + if (has_error()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->error()); + } + + } + // repeated .google.protobuf.compiler.CodeGeneratorResponse.File file = 15; + total_size += 1 * this->file_size(); + for (int i = 0; i < this->file_size(); i++) { + total_size += + ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual( + this->file(i)); + } + + if (!unknown_fields().empty()) { + total_size += + ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( + unknown_fields()); + } + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = total_size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); + return total_size; +} + +void CodeGeneratorResponse::MergeFrom(const ::google::protobuf::Message& from) { + GOOGLE_CHECK_NE(&from, this); + const CodeGeneratorResponse* source = + ::google::protobuf::internal::dynamic_cast_if_available( + &from); + if (source == NULL) { + ::google::protobuf::internal::ReflectionOps::Merge(from, this); + } else { + MergeFrom(*source); + } +} + +void CodeGeneratorResponse::MergeFrom(const CodeGeneratorResponse& from) { + GOOGLE_CHECK_NE(&from, this); + file_.MergeFrom(from.file_); + if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) { + if (from.has_error()) { + set_error(from.error()); + } + } + mutable_unknown_fields()->MergeFrom(from.unknown_fields()); +} + +void CodeGeneratorResponse::CopyFrom(const ::google::protobuf::Message& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void CodeGeneratorResponse::CopyFrom(const CodeGeneratorResponse& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool CodeGeneratorResponse::IsInitialized() const { + + return true; +} + +void CodeGeneratorResponse::Swap(CodeGeneratorResponse* other) { + if (other != this) { + std::swap(error_, other->error_); + file_.Swap(&other->file_); + std::swap(_has_bits_[0], other->_has_bits_[0]); + _unknown_fields_.Swap(&other->_unknown_fields_); + std::swap(_cached_size_, other->_cached_size_); + } +} + +::google::protobuf::Metadata CodeGeneratorResponse::GetMetadata() const { + protobuf_AssignDescriptorsOnce(); + ::google::protobuf::Metadata metadata; + metadata.descriptor = CodeGeneratorResponse_descriptor_; + metadata.reflection = CodeGeneratorResponse_reflection_; + return metadata; +} + + +// @@protoc_insertion_point(namespace_scope) + +} // namespace compiler +} // namespace protobuf +} // namespace google + +// @@protoc_insertion_point(global_scope) diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/plugin.pb.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/plugin.pb.h new file mode 100644 index 0000000000..1cf8a55621 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/plugin.pb.h @@ -0,0 +1,790 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/protobuf/compiler/plugin.proto + +#ifndef PROTOBUF_google_2fprotobuf_2fcompiler_2fplugin_2eproto__INCLUDED +#define PROTOBUF_google_2fprotobuf_2fcompiler_2fplugin_2eproto__INCLUDED + +#include + +#include + +#if GOOGLE_PROTOBUF_VERSION < 2004000 +#error This file was generated by a newer version of protoc which is +#error incompatible with your Protocol Buffer headers. Please update +#error your headers. +#endif +#if 2004002 < GOOGLE_PROTOBUF_MIN_PROTOC_VERSION +#error This file was generated by an older version of protoc which is +#error incompatible with your Protocol Buffer headers. Please +#error regenerate this file with a newer version of protoc. +#endif + +#include +#include +#include +#include +#include "google/protobuf/descriptor.pb.h" +// @@protoc_insertion_point(includes) + +namespace google { +namespace protobuf { +namespace compiler { + +// Internal implementation detail -- do not call these. +void LIBPROTOC_EXPORT protobuf_AddDesc_google_2fprotobuf_2fcompiler_2fplugin_2eproto(); +void protobuf_AssignDesc_google_2fprotobuf_2fcompiler_2fplugin_2eproto(); +void protobuf_ShutdownFile_google_2fprotobuf_2fcompiler_2fplugin_2eproto(); + +class CodeGeneratorRequest; +class CodeGeneratorResponse; +class CodeGeneratorResponse_File; + +// =================================================================== + +class LIBPROTOC_EXPORT CodeGeneratorRequest : public ::google::protobuf::Message { + public: + CodeGeneratorRequest(); + virtual ~CodeGeneratorRequest(); + + CodeGeneratorRequest(const CodeGeneratorRequest& from); + + inline CodeGeneratorRequest& operator=(const CodeGeneratorRequest& from) { + CopyFrom(from); + return *this; + } + + inline const ::google::protobuf::UnknownFieldSet& unknown_fields() const { + return _unknown_fields_; + } + + inline ::google::protobuf::UnknownFieldSet* mutable_unknown_fields() { + return &_unknown_fields_; + } + + static const ::google::protobuf::Descriptor* descriptor(); + static const CodeGeneratorRequest& default_instance(); + + void Swap(CodeGeneratorRequest* other); + + // implements Message ---------------------------------------------- + + CodeGeneratorRequest* New() const; + void CopyFrom(const ::google::protobuf::Message& from); + void MergeFrom(const ::google::protobuf::Message& from); + void CopyFrom(const CodeGeneratorRequest& from); + void MergeFrom(const CodeGeneratorRequest& from); + void Clear(); + bool IsInitialized() const; + + int ByteSize() const; + bool MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input); + void SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const; + ::google::protobuf::uint8* SerializeWithCachedSizesToArray(::google::protobuf::uint8* output) const; + int GetCachedSize() const { return _cached_size_; } + private: + void SharedCtor(); + void SharedDtor(); + void SetCachedSize(int size) const; + public: + + ::google::protobuf::Metadata GetMetadata() const; + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // repeated string file_to_generate = 1; + inline int file_to_generate_size() const; + inline void clear_file_to_generate(); + static const int kFileToGenerateFieldNumber = 1; + inline const ::std::string& file_to_generate(int index) const; + inline ::std::string* mutable_file_to_generate(int index); + inline void set_file_to_generate(int index, const ::std::string& value); + inline void set_file_to_generate(int index, const char* value); + inline void set_file_to_generate(int index, const char* value, size_t size); + inline ::std::string* add_file_to_generate(); + inline void add_file_to_generate(const ::std::string& value); + inline void add_file_to_generate(const char* value); + inline void add_file_to_generate(const char* value, size_t size); + inline const ::google::protobuf::RepeatedPtrField< ::std::string>& file_to_generate() const; + inline ::google::protobuf::RepeatedPtrField< ::std::string>* mutable_file_to_generate(); + + // optional string parameter = 2; + inline bool has_parameter() const; + inline void clear_parameter(); + static const int kParameterFieldNumber = 2; + inline const ::std::string& parameter() const; + inline void set_parameter(const ::std::string& value); + inline void set_parameter(const char* value); + inline void set_parameter(const char* value, size_t size); + inline ::std::string* mutable_parameter(); + inline ::std::string* release_parameter(); + + // repeated .google.protobuf.FileDescriptorProto proto_file = 15; + inline int proto_file_size() const; + inline void clear_proto_file(); + static const int kProtoFileFieldNumber = 15; + inline const ::google::protobuf::FileDescriptorProto& proto_file(int index) const; + inline ::google::protobuf::FileDescriptorProto* mutable_proto_file(int index); + inline ::google::protobuf::FileDescriptorProto* add_proto_file(); + inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::FileDescriptorProto >& + proto_file() const; + inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::FileDescriptorProto >* + mutable_proto_file(); + + // @@protoc_insertion_point(class_scope:google.protobuf.compiler.CodeGeneratorRequest) + private: + inline void set_has_parameter(); + inline void clear_has_parameter(); + + ::google::protobuf::UnknownFieldSet _unknown_fields_; + + ::google::protobuf::RepeatedPtrField< ::std::string> file_to_generate_; + ::std::string* parameter_; + ::google::protobuf::RepeatedPtrField< ::google::protobuf::FileDescriptorProto > proto_file_; + + mutable int _cached_size_; + ::google::protobuf::uint32 _has_bits_[(3 + 31) / 32]; + + friend void LIBPROTOC_EXPORT protobuf_AddDesc_google_2fprotobuf_2fcompiler_2fplugin_2eproto(); + friend void protobuf_AssignDesc_google_2fprotobuf_2fcompiler_2fplugin_2eproto(); + friend void protobuf_ShutdownFile_google_2fprotobuf_2fcompiler_2fplugin_2eproto(); + + void InitAsDefaultInstance(); + static CodeGeneratorRequest* default_instance_; +}; +// ------------------------------------------------------------------- + +class LIBPROTOC_EXPORT CodeGeneratorResponse_File : public ::google::protobuf::Message { + public: + CodeGeneratorResponse_File(); + virtual ~CodeGeneratorResponse_File(); + + CodeGeneratorResponse_File(const CodeGeneratorResponse_File& from); + + inline CodeGeneratorResponse_File& operator=(const CodeGeneratorResponse_File& from) { + CopyFrom(from); + return *this; + } + + inline const ::google::protobuf::UnknownFieldSet& unknown_fields() const { + return _unknown_fields_; + } + + inline ::google::protobuf::UnknownFieldSet* mutable_unknown_fields() { + return &_unknown_fields_; + } + + static const ::google::protobuf::Descriptor* descriptor(); + static const CodeGeneratorResponse_File& default_instance(); + + void Swap(CodeGeneratorResponse_File* other); + + // implements Message ---------------------------------------------- + + CodeGeneratorResponse_File* New() const; + void CopyFrom(const ::google::protobuf::Message& from); + void MergeFrom(const ::google::protobuf::Message& from); + void CopyFrom(const CodeGeneratorResponse_File& from); + void MergeFrom(const CodeGeneratorResponse_File& from); + void Clear(); + bool IsInitialized() const; + + int ByteSize() const; + bool MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input); + void SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const; + ::google::protobuf::uint8* SerializeWithCachedSizesToArray(::google::protobuf::uint8* output) const; + int GetCachedSize() const { return _cached_size_; } + private: + void SharedCtor(); + void SharedDtor(); + void SetCachedSize(int size) const; + public: + + ::google::protobuf::Metadata GetMetadata() const; + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // optional string name = 1; + inline bool has_name() const; + inline void clear_name(); + static const int kNameFieldNumber = 1; + inline const ::std::string& name() const; + inline void set_name(const ::std::string& value); + inline void set_name(const char* value); + inline void set_name(const char* value, size_t size); + inline ::std::string* mutable_name(); + inline ::std::string* release_name(); + + // optional string insertion_point = 2; + inline bool has_insertion_point() const; + inline void clear_insertion_point(); + static const int kInsertionPointFieldNumber = 2; + inline const ::std::string& insertion_point() const; + inline void set_insertion_point(const ::std::string& value); + inline void set_insertion_point(const char* value); + inline void set_insertion_point(const char* value, size_t size); + inline ::std::string* mutable_insertion_point(); + inline ::std::string* release_insertion_point(); + + // optional string content = 15; + inline bool has_content() const; + inline void clear_content(); + static const int kContentFieldNumber = 15; + inline const ::std::string& content() const; + inline void set_content(const ::std::string& value); + inline void set_content(const char* value); + inline void set_content(const char* value, size_t size); + inline ::std::string* mutable_content(); + inline ::std::string* release_content(); + + // @@protoc_insertion_point(class_scope:google.protobuf.compiler.CodeGeneratorResponse.File) + private: + inline void set_has_name(); + inline void clear_has_name(); + inline void set_has_insertion_point(); + inline void clear_has_insertion_point(); + inline void set_has_content(); + inline void clear_has_content(); + + ::google::protobuf::UnknownFieldSet _unknown_fields_; + + ::std::string* name_; + ::std::string* insertion_point_; + ::std::string* content_; + + mutable int _cached_size_; + ::google::protobuf::uint32 _has_bits_[(3 + 31) / 32]; + + friend void LIBPROTOC_EXPORT protobuf_AddDesc_google_2fprotobuf_2fcompiler_2fplugin_2eproto(); + friend void protobuf_AssignDesc_google_2fprotobuf_2fcompiler_2fplugin_2eproto(); + friend void protobuf_ShutdownFile_google_2fprotobuf_2fcompiler_2fplugin_2eproto(); + + void InitAsDefaultInstance(); + static CodeGeneratorResponse_File* default_instance_; +}; +// ------------------------------------------------------------------- + +class LIBPROTOC_EXPORT CodeGeneratorResponse : public ::google::protobuf::Message { + public: + CodeGeneratorResponse(); + virtual ~CodeGeneratorResponse(); + + CodeGeneratorResponse(const CodeGeneratorResponse& from); + + inline CodeGeneratorResponse& operator=(const CodeGeneratorResponse& from) { + CopyFrom(from); + return *this; + } + + inline const ::google::protobuf::UnknownFieldSet& unknown_fields() const { + return _unknown_fields_; + } + + inline ::google::protobuf::UnknownFieldSet* mutable_unknown_fields() { + return &_unknown_fields_; + } + + static const ::google::protobuf::Descriptor* descriptor(); + static const CodeGeneratorResponse& default_instance(); + + void Swap(CodeGeneratorResponse* other); + + // implements Message ---------------------------------------------- + + CodeGeneratorResponse* New() const; + void CopyFrom(const ::google::protobuf::Message& from); + void MergeFrom(const ::google::protobuf::Message& from); + void CopyFrom(const CodeGeneratorResponse& from); + void MergeFrom(const CodeGeneratorResponse& from); + void Clear(); + bool IsInitialized() const; + + int ByteSize() const; + bool MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input); + void SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const; + ::google::protobuf::uint8* SerializeWithCachedSizesToArray(::google::protobuf::uint8* output) const; + int GetCachedSize() const { return _cached_size_; } + private: + void SharedCtor(); + void SharedDtor(); + void SetCachedSize(int size) const; + public: + + ::google::protobuf::Metadata GetMetadata() const; + + // nested types ---------------------------------------------------- + + typedef CodeGeneratorResponse_File File; + + // accessors ------------------------------------------------------- + + // optional string error = 1; + inline bool has_error() const; + inline void clear_error(); + static const int kErrorFieldNumber = 1; + inline const ::std::string& error() const; + inline void set_error(const ::std::string& value); + inline void set_error(const char* value); + inline void set_error(const char* value, size_t size); + inline ::std::string* mutable_error(); + inline ::std::string* release_error(); + + // repeated .google.protobuf.compiler.CodeGeneratorResponse.File file = 15; + inline int file_size() const; + inline void clear_file(); + static const int kFileFieldNumber = 15; + inline const ::google::protobuf::compiler::CodeGeneratorResponse_File& file(int index) const; + inline ::google::protobuf::compiler::CodeGeneratorResponse_File* mutable_file(int index); + inline ::google::protobuf::compiler::CodeGeneratorResponse_File* add_file(); + inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::compiler::CodeGeneratorResponse_File >& + file() const; + inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::compiler::CodeGeneratorResponse_File >* + mutable_file(); + + // @@protoc_insertion_point(class_scope:google.protobuf.compiler.CodeGeneratorResponse) + private: + inline void set_has_error(); + inline void clear_has_error(); + + ::google::protobuf::UnknownFieldSet _unknown_fields_; + + ::std::string* error_; + ::google::protobuf::RepeatedPtrField< ::google::protobuf::compiler::CodeGeneratorResponse_File > file_; + + mutable int _cached_size_; + ::google::protobuf::uint32 _has_bits_[(2 + 31) / 32]; + + friend void LIBPROTOC_EXPORT protobuf_AddDesc_google_2fprotobuf_2fcompiler_2fplugin_2eproto(); + friend void protobuf_AssignDesc_google_2fprotobuf_2fcompiler_2fplugin_2eproto(); + friend void protobuf_ShutdownFile_google_2fprotobuf_2fcompiler_2fplugin_2eproto(); + + void InitAsDefaultInstance(); + static CodeGeneratorResponse* default_instance_; +}; +// =================================================================== + + +// =================================================================== + +// CodeGeneratorRequest + +// repeated string file_to_generate = 1; +inline int CodeGeneratorRequest::file_to_generate_size() const { + return file_to_generate_.size(); +} +inline void CodeGeneratorRequest::clear_file_to_generate() { + file_to_generate_.Clear(); +} +inline const ::std::string& CodeGeneratorRequest::file_to_generate(int index) const { + return file_to_generate_.Get(index); +} +inline ::std::string* CodeGeneratorRequest::mutable_file_to_generate(int index) { + return file_to_generate_.Mutable(index); +} +inline void CodeGeneratorRequest::set_file_to_generate(int index, const ::std::string& value) { + file_to_generate_.Mutable(index)->assign(value); +} +inline void CodeGeneratorRequest::set_file_to_generate(int index, const char* value) { + file_to_generate_.Mutable(index)->assign(value); +} +inline void CodeGeneratorRequest::set_file_to_generate(int index, const char* value, size_t size) { + file_to_generate_.Mutable(index)->assign( + reinterpret_cast(value), size); +} +inline ::std::string* CodeGeneratorRequest::add_file_to_generate() { + return file_to_generate_.Add(); +} +inline void CodeGeneratorRequest::add_file_to_generate(const ::std::string& value) { + file_to_generate_.Add()->assign(value); +} +inline void CodeGeneratorRequest::add_file_to_generate(const char* value) { + file_to_generate_.Add()->assign(value); +} +inline void CodeGeneratorRequest::add_file_to_generate(const char* value, size_t size) { + file_to_generate_.Add()->assign(reinterpret_cast(value), size); +} +inline const ::google::protobuf::RepeatedPtrField< ::std::string>& +CodeGeneratorRequest::file_to_generate() const { + return file_to_generate_; +} +inline ::google::protobuf::RepeatedPtrField< ::std::string>* +CodeGeneratorRequest::mutable_file_to_generate() { + return &file_to_generate_; +} + +// optional string parameter = 2; +inline bool CodeGeneratorRequest::has_parameter() const { + return (_has_bits_[0] & 0x00000002u) != 0; +} +inline void CodeGeneratorRequest::set_has_parameter() { + _has_bits_[0] |= 0x00000002u; +} +inline void CodeGeneratorRequest::clear_has_parameter() { + _has_bits_[0] &= ~0x00000002u; +} +inline void CodeGeneratorRequest::clear_parameter() { + if (parameter_ != &::google::protobuf::internal::kEmptyString) { + parameter_->clear(); + } + clear_has_parameter(); +} +inline const ::std::string& CodeGeneratorRequest::parameter() const { + return *parameter_; +} +inline void CodeGeneratorRequest::set_parameter(const ::std::string& value) { + set_has_parameter(); + if (parameter_ == &::google::protobuf::internal::kEmptyString) { + parameter_ = new ::std::string; + } + parameter_->assign(value); +} +inline void CodeGeneratorRequest::set_parameter(const char* value) { + set_has_parameter(); + if (parameter_ == &::google::protobuf::internal::kEmptyString) { + parameter_ = new ::std::string; + } + parameter_->assign(value); +} +inline void CodeGeneratorRequest::set_parameter(const char* value, size_t size) { + set_has_parameter(); + if (parameter_ == &::google::protobuf::internal::kEmptyString) { + parameter_ = new ::std::string; + } + parameter_->assign(reinterpret_cast(value), size); +} +inline ::std::string* CodeGeneratorRequest::mutable_parameter() { + set_has_parameter(); + if (parameter_ == &::google::protobuf::internal::kEmptyString) { + parameter_ = new ::std::string; + } + return parameter_; +} +inline ::std::string* CodeGeneratorRequest::release_parameter() { + clear_has_parameter(); + if (parameter_ == &::google::protobuf::internal::kEmptyString) { + return NULL; + } else { + ::std::string* temp = parameter_; + parameter_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + return temp; + } +} + +// repeated .google.protobuf.FileDescriptorProto proto_file = 15; +inline int CodeGeneratorRequest::proto_file_size() const { + return proto_file_.size(); +} +inline void CodeGeneratorRequest::clear_proto_file() { + proto_file_.Clear(); +} +inline const ::google::protobuf::FileDescriptorProto& CodeGeneratorRequest::proto_file(int index) const { + return proto_file_.Get(index); +} +inline ::google::protobuf::FileDescriptorProto* CodeGeneratorRequest::mutable_proto_file(int index) { + return proto_file_.Mutable(index); +} +inline ::google::protobuf::FileDescriptorProto* CodeGeneratorRequest::add_proto_file() { + return proto_file_.Add(); +} +inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::FileDescriptorProto >& +CodeGeneratorRequest::proto_file() const { + return proto_file_; +} +inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::FileDescriptorProto >* +CodeGeneratorRequest::mutable_proto_file() { + return &proto_file_; +} + +// ------------------------------------------------------------------- + +// CodeGeneratorResponse_File + +// optional string name = 1; +inline bool CodeGeneratorResponse_File::has_name() const { + return (_has_bits_[0] & 0x00000001u) != 0; +} +inline void CodeGeneratorResponse_File::set_has_name() { + _has_bits_[0] |= 0x00000001u; +} +inline void CodeGeneratorResponse_File::clear_has_name() { + _has_bits_[0] &= ~0x00000001u; +} +inline void CodeGeneratorResponse_File::clear_name() { + if (name_ != &::google::protobuf::internal::kEmptyString) { + name_->clear(); + } + clear_has_name(); +} +inline const ::std::string& CodeGeneratorResponse_File::name() const { + return *name_; +} +inline void CodeGeneratorResponse_File::set_name(const ::std::string& value) { + set_has_name(); + if (name_ == &::google::protobuf::internal::kEmptyString) { + name_ = new ::std::string; + } + name_->assign(value); +} +inline void CodeGeneratorResponse_File::set_name(const char* value) { + set_has_name(); + if (name_ == &::google::protobuf::internal::kEmptyString) { + name_ = new ::std::string; + } + name_->assign(value); +} +inline void CodeGeneratorResponse_File::set_name(const char* value, size_t size) { + set_has_name(); + if (name_ == &::google::protobuf::internal::kEmptyString) { + name_ = new ::std::string; + } + name_->assign(reinterpret_cast(value), size); +} +inline ::std::string* CodeGeneratorResponse_File::mutable_name() { + set_has_name(); + if (name_ == &::google::protobuf::internal::kEmptyString) { + name_ = new ::std::string; + } + return name_; +} +inline ::std::string* CodeGeneratorResponse_File::release_name() { + clear_has_name(); + if (name_ == &::google::protobuf::internal::kEmptyString) { + return NULL; + } else { + ::std::string* temp = name_; + name_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + return temp; + } +} + +// optional string insertion_point = 2; +inline bool CodeGeneratorResponse_File::has_insertion_point() const { + return (_has_bits_[0] & 0x00000002u) != 0; +} +inline void CodeGeneratorResponse_File::set_has_insertion_point() { + _has_bits_[0] |= 0x00000002u; +} +inline void CodeGeneratorResponse_File::clear_has_insertion_point() { + _has_bits_[0] &= ~0x00000002u; +} +inline void CodeGeneratorResponse_File::clear_insertion_point() { + if (insertion_point_ != &::google::protobuf::internal::kEmptyString) { + insertion_point_->clear(); + } + clear_has_insertion_point(); +} +inline const ::std::string& CodeGeneratorResponse_File::insertion_point() const { + return *insertion_point_; +} +inline void CodeGeneratorResponse_File::set_insertion_point(const ::std::string& value) { + set_has_insertion_point(); + if (insertion_point_ == &::google::protobuf::internal::kEmptyString) { + insertion_point_ = new ::std::string; + } + insertion_point_->assign(value); +} +inline void CodeGeneratorResponse_File::set_insertion_point(const char* value) { + set_has_insertion_point(); + if (insertion_point_ == &::google::protobuf::internal::kEmptyString) { + insertion_point_ = new ::std::string; + } + insertion_point_->assign(value); +} +inline void CodeGeneratorResponse_File::set_insertion_point(const char* value, size_t size) { + set_has_insertion_point(); + if (insertion_point_ == &::google::protobuf::internal::kEmptyString) { + insertion_point_ = new ::std::string; + } + insertion_point_->assign(reinterpret_cast(value), size); +} +inline ::std::string* CodeGeneratorResponse_File::mutable_insertion_point() { + set_has_insertion_point(); + if (insertion_point_ == &::google::protobuf::internal::kEmptyString) { + insertion_point_ = new ::std::string; + } + return insertion_point_; +} +inline ::std::string* CodeGeneratorResponse_File::release_insertion_point() { + clear_has_insertion_point(); + if (insertion_point_ == &::google::protobuf::internal::kEmptyString) { + return NULL; + } else { + ::std::string* temp = insertion_point_; + insertion_point_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + return temp; + } +} + +// optional string content = 15; +inline bool CodeGeneratorResponse_File::has_content() const { + return (_has_bits_[0] & 0x00000004u) != 0; +} +inline void CodeGeneratorResponse_File::set_has_content() { + _has_bits_[0] |= 0x00000004u; +} +inline void CodeGeneratorResponse_File::clear_has_content() { + _has_bits_[0] &= ~0x00000004u; +} +inline void CodeGeneratorResponse_File::clear_content() { + if (content_ != &::google::protobuf::internal::kEmptyString) { + content_->clear(); + } + clear_has_content(); +} +inline const ::std::string& CodeGeneratorResponse_File::content() const { + return *content_; +} +inline void CodeGeneratorResponse_File::set_content(const ::std::string& value) { + set_has_content(); + if (content_ == &::google::protobuf::internal::kEmptyString) { + content_ = new ::std::string; + } + content_->assign(value); +} +inline void CodeGeneratorResponse_File::set_content(const char* value) { + set_has_content(); + if (content_ == &::google::protobuf::internal::kEmptyString) { + content_ = new ::std::string; + } + content_->assign(value); +} +inline void CodeGeneratorResponse_File::set_content(const char* value, size_t size) { + set_has_content(); + if (content_ == &::google::protobuf::internal::kEmptyString) { + content_ = new ::std::string; + } + content_->assign(reinterpret_cast(value), size); +} +inline ::std::string* CodeGeneratorResponse_File::mutable_content() { + set_has_content(); + if (content_ == &::google::protobuf::internal::kEmptyString) { + content_ = new ::std::string; + } + return content_; +} +inline ::std::string* CodeGeneratorResponse_File::release_content() { + clear_has_content(); + if (content_ == &::google::protobuf::internal::kEmptyString) { + return NULL; + } else { + ::std::string* temp = content_; + content_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + return temp; + } +} + +// ------------------------------------------------------------------- + +// CodeGeneratorResponse + +// optional string error = 1; +inline bool CodeGeneratorResponse::has_error() const { + return (_has_bits_[0] & 0x00000001u) != 0; +} +inline void CodeGeneratorResponse::set_has_error() { + _has_bits_[0] |= 0x00000001u; +} +inline void CodeGeneratorResponse::clear_has_error() { + _has_bits_[0] &= ~0x00000001u; +} +inline void CodeGeneratorResponse::clear_error() { + if (error_ != &::google::protobuf::internal::kEmptyString) { + error_->clear(); + } + clear_has_error(); +} +inline const ::std::string& CodeGeneratorResponse::error() const { + return *error_; +} +inline void CodeGeneratorResponse::set_error(const ::std::string& value) { + set_has_error(); + if (error_ == &::google::protobuf::internal::kEmptyString) { + error_ = new ::std::string; + } + error_->assign(value); +} +inline void CodeGeneratorResponse::set_error(const char* value) { + set_has_error(); + if (error_ == &::google::protobuf::internal::kEmptyString) { + error_ = new ::std::string; + } + error_->assign(value); +} +inline void CodeGeneratorResponse::set_error(const char* value, size_t size) { + set_has_error(); + if (error_ == &::google::protobuf::internal::kEmptyString) { + error_ = new ::std::string; + } + error_->assign(reinterpret_cast(value), size); +} +inline ::std::string* CodeGeneratorResponse::mutable_error() { + set_has_error(); + if (error_ == &::google::protobuf::internal::kEmptyString) { + error_ = new ::std::string; + } + return error_; +} +inline ::std::string* CodeGeneratorResponse::release_error() { + clear_has_error(); + if (error_ == &::google::protobuf::internal::kEmptyString) { + return NULL; + } else { + ::std::string* temp = error_; + error_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + return temp; + } +} + +// repeated .google.protobuf.compiler.CodeGeneratorResponse.File file = 15; +inline int CodeGeneratorResponse::file_size() const { + return file_.size(); +} +inline void CodeGeneratorResponse::clear_file() { + file_.Clear(); +} +inline const ::google::protobuf::compiler::CodeGeneratorResponse_File& CodeGeneratorResponse::file(int index) const { + return file_.Get(index); +} +inline ::google::protobuf::compiler::CodeGeneratorResponse_File* CodeGeneratorResponse::mutable_file(int index) { + return file_.Mutable(index); +} +inline ::google::protobuf::compiler::CodeGeneratorResponse_File* CodeGeneratorResponse::add_file() { + return file_.Add(); +} +inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::compiler::CodeGeneratorResponse_File >& +CodeGeneratorResponse::file() const { + return file_; +} +inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::compiler::CodeGeneratorResponse_File >* +CodeGeneratorResponse::mutable_file() { + return &file_; +} + + +// @@protoc_insertion_point(namespace_scope) + +} // namespace compiler +} // namespace protobuf +} // namespace google + +#ifndef SWIG +namespace google { +namespace protobuf { + + +} // namespace google +} // namespace protobuf +#endif // SWIG + +// @@protoc_insertion_point(global_scope) + +#endif // PROTOBUF_google_2fprotobuf_2fcompiler_2fplugin_2eproto__INCLUDED diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/plugin.proto b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/plugin.proto new file mode 100644 index 0000000000..651ed10c22 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/plugin.proto @@ -0,0 +1,145 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// +// WARNING: The plugin interface is currently EXPERIMENTAL and is subject to +// change. +// +// protoc (aka the Protocol Compiler) can be extended via plugins. A plugin is +// just a program that reads a CodeGeneratorRequest from stdin and writes a +// CodeGeneratorResponse to stdout. +// +// Plugins written using C++ can use google/protobuf/compiler/plugin.h instead +// of dealing with the raw protocol defined here. +// +// A plugin executable needs only to be placed somewhere in the path. The +// plugin should be named "protoc-gen-$NAME", and will then be used when the +// flag "--${NAME}_out" is passed to protoc. + +package google.protobuf.compiler; + +import "google/protobuf/descriptor.proto"; + +// An encoded CodeGeneratorRequest is written to the plugin's stdin. +message CodeGeneratorRequest { + // The .proto files that were explicitly listed on the command-line. The + // code generator should generate code only for these files. Each file's + // descriptor will be included in proto_file, below. + repeated string file_to_generate = 1; + + // The generator parameter passed on the command-line. + optional string parameter = 2; + + // FileDescriptorProtos for all files in files_to_generate and everything + // they import. The files will appear in topological order, so each file + // appears before any file that imports it. + // + // protoc guarantees that all proto_files will be written after + // the fields above, even though this is not technically guaranteed by the + // protobuf wire format. This theoretically could allow a plugin to stream + // in the FileDescriptorProtos and handle them one by one rather than read + // the entire set into memory at once. However, as of this writing, this + // is not similarly optimized on protoc's end -- it will store all fields in + // memory at once before sending them to the plugin. + repeated FileDescriptorProto proto_file = 15; +} + +// The plugin writes an encoded CodeGeneratorResponse to stdout. +message CodeGeneratorResponse { + // Error message. If non-empty, code generation failed. The plugin process + // should exit with status code zero even if it reports an error in this way. + // + // This should be used to indicate errors in .proto files which prevent the + // code generator from generating correct code. Errors which indicate a + // problem in protoc itself -- such as the input CodeGeneratorRequest being + // unparseable -- should be reported by writing a message to stderr and + // exiting with a non-zero status code. + optional string error = 1; + + // Represents a single generated file. + message File { + // The file name, relative to the output directory. The name must not + // contain "." or ".." components and must be relative, not be absolute (so, + // the file cannot lie outside the output directory). "/" must be used as + // the path separator, not "\". + // + // If the name is omitted, the content will be appended to the previous + // file. This allows the generator to break large files into small chunks, + // and allows the generated text to be streamed back to protoc so that large + // files need not reside completely in memory at one time. Note that as of + // this writing protoc does not optimize for this -- it will read the entire + // CodeGeneratorResponse before writing files to disk. + optional string name = 1; + + // If non-empty, indicates that the named file should already exist, and the + // content here is to be inserted into that file at a defined insertion + // point. This feature allows a code generator to extend the output + // produced by another code generator. The original generator may provide + // insertion points by placing special annotations in the file that look + // like: + // @@protoc_insertion_point(NAME) + // The annotation can have arbitrary text before and after it on the line, + // which allows it to be placed in a comment. NAME should be replaced with + // an identifier naming the point -- this is what other generators will use + // as the insertion_point. Code inserted at this point will be placed + // immediately above the line containing the insertion point (thus multiple + // insertions to the same point will come out in the order they were added). + // The double-@ is intended to make it unlikely that the generated code + // could contain things that look like insertion points by accident. + // + // For example, the C++ code generator places the following line in the + // .pb.h files that it generates: + // // @@protoc_insertion_point(namespace_scope) + // This line appears within the scope of the file's package namespace, but + // outside of any particular class. Another plugin can then specify the + // insertion_point "namespace_scope" to generate additional classes or + // other declarations that should be placed in this scope. + // + // Note that if the line containing the insertion point begins with + // whitespace, the same whitespace will be added to every line of the + // inserted text. This is useful for languages like Python, where + // indentation matters. In these languages, the insertion point comment + // should be indented the same amount as any inserted code will need to be + // in order to work correctly in that context. + // + // The code generator that generates the initial file and the one which + // inserts into it must both run as part of a single invocation of protoc. + // Code generators are executed in the order in which they appear on the + // command line. + // + // If |insertion_point| is present, |name| must also be present. + optional string insertion_point = 2; + + // The file contents. + optional string content = 15; + } + repeated File file = 15; +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/python/python_generator.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/python/python_generator.cc new file mode 100644 index 0000000000..9b1093780f --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/python/python_generator.cc @@ -0,0 +1,1006 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: robinson@google.com (Will Robinson) +// +// This module outputs pure-Python protocol message classes that will +// largely be constructed at runtime via the metaclass in reflection.py. +// In other words, our job is basically to output a Python equivalent +// of the C++ *Descriptor objects, and fix up all circular references +// within these objects. +// +// Note that the runtime performance of protocol message classes created in +// this way is expected to be lousy. The plan is to create an alternate +// generator that outputs a Python/C extension module that lets +// performance-minded Python code leverage the fast C++ implementation +// directly. + +#include +#include +#include +#include +#include + +#include +#include + +#include +#include +#include +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace python { + +namespace { + +// Returns a copy of |filename| with any trailing ".protodevel" or ".proto +// suffix stripped. +// TODO(robinson): Unify with copy in compiler/cpp/internal/helpers.cc. +string StripProto(const string& filename) { + const char* suffix = HasSuffixString(filename, ".protodevel") + ? ".protodevel" : ".proto"; + return StripSuffixString(filename, suffix); +} + + +// Returns the Python module name expected for a given .proto filename. +string ModuleName(const string& filename) { + string basename = StripProto(filename); + StripString(&basename, "-", '_'); + StripString(&basename, "/", '.'); + return basename + "_pb2"; +} + + +// Returns the name of all containing types for descriptor, +// in order from outermost to innermost, followed by descriptor's +// own name. Each name is separated by |separator|. +template +string NamePrefixedWithNestedTypes(const DescriptorT& descriptor, + const string& separator) { + string name = descriptor.name(); + for (const Descriptor* current = descriptor.containing_type(); + current != NULL; current = current->containing_type()) { + name = current->name() + separator + name; + } + return name; +} + + +// Name of the class attribute where we store the Python +// descriptor.Descriptor instance for the generated class. +// Must stay consistent with the _DESCRIPTOR_KEY constant +// in proto2/public/reflection.py. +const char kDescriptorKey[] = "DESCRIPTOR"; + + +// Should we generate generic services for this file? +inline bool HasGenericServices(const FileDescriptor *file) { + return file->service_count() > 0 && + file->options().py_generic_services(); +} + + +// Prints the common boilerplate needed at the top of every .py +// file output by this generator. +void PrintTopBoilerplate( + io::Printer* printer, const FileDescriptor* file, bool descriptor_proto) { + // TODO(robinson): Allow parameterization of Python version? + printer->Print( + "# Generated by the protocol buffer compiler. DO NOT EDIT!\n" + "\n" + "from google.protobuf import descriptor\n" + "from google.protobuf import message\n" + "from google.protobuf import reflection\n"); + if (HasGenericServices(file)) { + printer->Print( + "from google.protobuf import service\n" + "from google.protobuf import service_reflection\n"); + } + + // Avoid circular imports if this module is descriptor_pb2. + if (!descriptor_proto) { + printer->Print( + "from google.protobuf import descriptor_pb2\n"); + } + printer->Print( + "# @@protoc_insertion_point(imports)\n"); + printer->Print("\n\n"); +} + + +// Returns a Python literal giving the default value for a field. +// If the field specifies no explicit default value, we'll return +// the default default value for the field type (zero for numbers, +// empty string for strings, empty list for repeated fields, and +// None for non-repeated, composite fields). +// +// TODO(robinson): Unify with code from +// //compiler/cpp/internal/primitive_field.cc +// //compiler/cpp/internal/enum_field.cc +// //compiler/cpp/internal/string_field.cc +string StringifyDefaultValue(const FieldDescriptor& field) { + if (field.is_repeated()) { + return "[]"; + } + + switch (field.cpp_type()) { + case FieldDescriptor::CPPTYPE_INT32: + return SimpleItoa(field.default_value_int32()); + case FieldDescriptor::CPPTYPE_UINT32: + return SimpleItoa(field.default_value_uint32()); + case FieldDescriptor::CPPTYPE_INT64: + return SimpleItoa(field.default_value_int64()); + case FieldDescriptor::CPPTYPE_UINT64: + return SimpleItoa(field.default_value_uint64()); + case FieldDescriptor::CPPTYPE_DOUBLE: { + double value = field.default_value_double(); + if (value == numeric_limits::infinity()) { + // Python pre-2.6 on Windows does not parse "inf" correctly. However, + // a numeric literal that is too big for a double will become infinity. + return "1e10000"; + } else if (value == -numeric_limits::infinity()) { + // See above. + return "-1e10000"; + } else if (value != value) { + // infinity * 0 = nan + return "(1e10000 * 0)"; + } else { + return SimpleDtoa(value); + } + } + case FieldDescriptor::CPPTYPE_FLOAT: { + float value = field.default_value_float(); + if (value == numeric_limits::infinity()) { + // Python pre-2.6 on Windows does not parse "inf" correctly. However, + // a numeric literal that is too big for a double will become infinity. + return "1e10000"; + } else if (value == -numeric_limits::infinity()) { + // See above. + return "-1e10000"; + } else if (value != value) { + // infinity - infinity = nan + return "(1e10000 * 0)"; + } else { + return SimpleFtoa(value); + } + } + case FieldDescriptor::CPPTYPE_BOOL: + return field.default_value_bool() ? "True" : "False"; + case FieldDescriptor::CPPTYPE_ENUM: + return SimpleItoa(field.default_value_enum()->number()); + case FieldDescriptor::CPPTYPE_STRING: + if (field.type() == FieldDescriptor::TYPE_STRING) { + return "unicode(\"" + CEscape(field.default_value_string()) + + "\", \"utf-8\")"; + } else { + return "\"" + CEscape(field.default_value_string()) + "\""; + } + case FieldDescriptor::CPPTYPE_MESSAGE: + return "None"; + } + // (We could add a default case above but then we wouldn't get the nice + // compiler warning when a new type is added.) + GOOGLE_LOG(FATAL) << "Not reached."; + return ""; +} + + + +} // namespace + + +Generator::Generator() : file_(NULL) { +} + +Generator::~Generator() { +} + +bool Generator::Generate(const FileDescriptor* file, + const string& parameter, + GeneratorContext* context, + string* error) const { + + // Completely serialize all Generate() calls on this instance. The + // thread-safety constraints of the CodeGenerator interface aren't clear so + // just be as conservative as possible. It's easier to relax this later if + // we need to, but I doubt it will be an issue. + // TODO(kenton): The proper thing to do would be to allocate any state on + // the stack and use that, so that the Generator class itself does not need + // to have any mutable members. Then it is implicitly thread-safe. + MutexLock lock(&mutex_); + file_ = file; + string module_name = ModuleName(file->name()); + string filename = module_name; + StripString(&filename, ".", '/'); + filename += ".py"; + + FileDescriptorProto fdp; + file_->CopyTo(&fdp); + fdp.SerializeToString(&file_descriptor_serialized_); + + + scoped_ptr output(context->Open(filename)); + GOOGLE_CHECK(output.get()); + io::Printer printer(output.get(), '$'); + printer_ = &printer; + + PrintTopBoilerplate(printer_, file_, GeneratingDescriptorProto()); + PrintImports(); + PrintFileDescriptor(); + PrintTopLevelEnums(); + PrintTopLevelExtensions(); + PrintAllNestedEnumsInFile(); + PrintMessageDescriptors(); + FixForeignFieldsInDescriptors(); + PrintMessages(); + // We have to fix up the extensions after the message classes themselves, + // since they need to call static RegisterExtension() methods on these + // classes. + FixForeignFieldsInExtensions(); + if (HasGenericServices(file)) { + PrintServices(); + } + + printer.Print( + "# @@protoc_insertion_point(module_scope)\n"); + + return !printer.failed(); +} + +// Prints Python imports for all modules imported by |file|. +void Generator::PrintImports() const { + for (int i = 0; i < file_->dependency_count(); ++i) { + string module_name = ModuleName(file_->dependency(i)->name()); + printer_->Print("import $module$\n", "module", + module_name); + } + printer_->Print("\n"); +} + +// Prints the single file descriptor for this file. +void Generator::PrintFileDescriptor() const { + map m; + m["descriptor_name"] = kDescriptorKey; + m["name"] = file_->name(); + m["package"] = file_->package(); + const char file_descriptor_template[] = + "$descriptor_name$ = descriptor.FileDescriptor(\n" + " name='$name$',\n" + " package='$package$',\n"; + printer_->Print(m, file_descriptor_template); + printer_->Indent(); + printer_->Print( + "serialized_pb='$value$'", + "value", strings::CHexEscape(file_descriptor_serialized_)); + + // TODO(falk): Also print options and fix the message_type, enum_type, + // service and extension later in the generation. + + printer_->Outdent(); + printer_->Print(")\n"); + printer_->Print("\n"); +} + +// Prints descriptors and module-level constants for all top-level +// enums defined in |file|. +void Generator::PrintTopLevelEnums() const { + vector > top_level_enum_values; + for (int i = 0; i < file_->enum_type_count(); ++i) { + const EnumDescriptor& enum_descriptor = *file_->enum_type(i); + PrintEnum(enum_descriptor); + printer_->Print("\n"); + + for (int j = 0; j < enum_descriptor.value_count(); ++j) { + const EnumValueDescriptor& value_descriptor = *enum_descriptor.value(j); + top_level_enum_values.push_back( + make_pair(value_descriptor.name(), value_descriptor.number())); + } + } + + for (int i = 0; i < top_level_enum_values.size(); ++i) { + printer_->Print("$name$ = $value$\n", + "name", top_level_enum_values[i].first, + "value", SimpleItoa(top_level_enum_values[i].second)); + } + printer_->Print("\n"); +} + +// Prints all enums contained in all message types in |file|. +void Generator::PrintAllNestedEnumsInFile() const { + for (int i = 0; i < file_->message_type_count(); ++i) { + PrintNestedEnums(*file_->message_type(i)); + } +} + +// Prints a Python statement assigning the appropriate module-level +// enum name to a Python EnumDescriptor object equivalent to +// enum_descriptor. +void Generator::PrintEnum(const EnumDescriptor& enum_descriptor) const { + map m; + m["descriptor_name"] = ModuleLevelDescriptorName(enum_descriptor); + m["name"] = enum_descriptor.name(); + m["full_name"] = enum_descriptor.full_name(); + m["file"] = kDescriptorKey; + const char enum_descriptor_template[] = + "$descriptor_name$ = descriptor.EnumDescriptor(\n" + " name='$name$',\n" + " full_name='$full_name$',\n" + " filename=None,\n" + " file=$file$,\n" + " values=[\n"; + string options_string; + enum_descriptor.options().SerializeToString(&options_string); + printer_->Print(m, enum_descriptor_template); + printer_->Indent(); + printer_->Indent(); + for (int i = 0; i < enum_descriptor.value_count(); ++i) { + PrintEnumValueDescriptor(*enum_descriptor.value(i)); + printer_->Print(",\n"); + } + printer_->Outdent(); + printer_->Print("],\n"); + printer_->Print("containing_type=None,\n"); + printer_->Print("options=$options_value$,\n", + "options_value", + OptionsValue("EnumOptions", options_string)); + EnumDescriptorProto edp; + PrintSerializedPbInterval(enum_descriptor, edp); + printer_->Outdent(); + printer_->Print(")\n"); + printer_->Print("\n"); +} + +// Recursively prints enums in nested types within descriptor, then +// prints enums contained at the top level in descriptor. +void Generator::PrintNestedEnums(const Descriptor& descriptor) const { + for (int i = 0; i < descriptor.nested_type_count(); ++i) { + PrintNestedEnums(*descriptor.nested_type(i)); + } + + for (int i = 0; i < descriptor.enum_type_count(); ++i) { + PrintEnum(*descriptor.enum_type(i)); + } +} + +void Generator::PrintTopLevelExtensions() const { + const bool is_extension = true; + for (int i = 0; i < file_->extension_count(); ++i) { + const FieldDescriptor& extension_field = *file_->extension(i); + string constant_name = extension_field.name() + "_FIELD_NUMBER"; + UpperString(&constant_name); + printer_->Print("$constant_name$ = $number$\n", + "constant_name", constant_name, + "number", SimpleItoa(extension_field.number())); + printer_->Print("$name$ = ", "name", extension_field.name()); + PrintFieldDescriptor(extension_field, is_extension); + printer_->Print("\n"); + } + printer_->Print("\n"); +} + +// Prints Python equivalents of all Descriptors in |file|. +void Generator::PrintMessageDescriptors() const { + for (int i = 0; i < file_->message_type_count(); ++i) { + PrintDescriptor(*file_->message_type(i)); + printer_->Print("\n"); + } +} + +void Generator::PrintServices() const { + for (int i = 0; i < file_->service_count(); ++i) { + PrintServiceDescriptor(*file_->service(i)); + PrintServiceClass(*file_->service(i)); + PrintServiceStub(*file_->service(i)); + printer_->Print("\n"); + } +} + +void Generator::PrintServiceDescriptor( + const ServiceDescriptor& descriptor) const { + printer_->Print("\n"); + string service_name = ModuleLevelServiceDescriptorName(descriptor); + string options_string; + descriptor.options().SerializeToString(&options_string); + + printer_->Print( + "$service_name$ = descriptor.ServiceDescriptor(\n", + "service_name", service_name); + printer_->Indent(); + map m; + m["name"] = descriptor.name(); + m["full_name"] = descriptor.full_name(); + m["file"] = kDescriptorKey; + m["index"] = SimpleItoa(descriptor.index()); + m["options_value"] = OptionsValue("ServiceOptions", options_string); + const char required_function_arguments[] = + "name='$name$',\n" + "full_name='$full_name$',\n" + "file=$file$,\n" + "index=$index$,\n" + "options=$options_value$,\n"; + printer_->Print(m, required_function_arguments); + + ServiceDescriptorProto sdp; + PrintSerializedPbInterval(descriptor, sdp); + + printer_->Print("methods=[\n"); + for (int i = 0; i < descriptor.method_count(); ++i) { + const MethodDescriptor* method = descriptor.method(i); + string options_string; + method->options().SerializeToString(&options_string); + + m.clear(); + m["name"] = method->name(); + m["full_name"] = method->full_name(); + m["index"] = SimpleItoa(method->index()); + m["serialized_options"] = CEscape(options_string); + m["input_type"] = ModuleLevelDescriptorName(*(method->input_type())); + m["output_type"] = ModuleLevelDescriptorName(*(method->output_type())); + m["options_value"] = OptionsValue("MethodOptions", options_string); + printer_->Print("descriptor.MethodDescriptor(\n"); + printer_->Indent(); + printer_->Print( + m, + "name='$name$',\n" + "full_name='$full_name$',\n" + "index=$index$,\n" + "containing_service=None,\n" + "input_type=$input_type$,\n" + "output_type=$output_type$,\n" + "options=$options_value$,\n"); + printer_->Outdent(); + printer_->Print("),\n"); + } + + printer_->Outdent(); + printer_->Print("])\n\n"); +} + +void Generator::PrintServiceClass(const ServiceDescriptor& descriptor) const { + // Print the service. + printer_->Print("class $class_name$(service.Service):\n", + "class_name", descriptor.name()); + printer_->Indent(); + printer_->Print( + "__metaclass__ = service_reflection.GeneratedServiceType\n" + "$descriptor_key$ = $descriptor_name$\n", + "descriptor_key", kDescriptorKey, + "descriptor_name", ModuleLevelServiceDescriptorName(descriptor)); + printer_->Outdent(); +} + +void Generator::PrintServiceStub(const ServiceDescriptor& descriptor) const { + // Print the service stub. + printer_->Print("class $class_name$_Stub($class_name$):\n", + "class_name", descriptor.name()); + printer_->Indent(); + printer_->Print( + "__metaclass__ = service_reflection.GeneratedServiceStubType\n" + "$descriptor_key$ = $descriptor_name$\n", + "descriptor_key", kDescriptorKey, + "descriptor_name", ModuleLevelServiceDescriptorName(descriptor)); + printer_->Outdent(); +} + +// Prints statement assigning ModuleLevelDescriptorName(message_descriptor) +// to a Python Descriptor object for message_descriptor. +// +// Mutually recursive with PrintNestedDescriptors(). +void Generator::PrintDescriptor(const Descriptor& message_descriptor) const { + PrintNestedDescriptors(message_descriptor); + + printer_->Print("\n"); + printer_->Print("$descriptor_name$ = descriptor.Descriptor(\n", + "descriptor_name", + ModuleLevelDescriptorName(message_descriptor)); + printer_->Indent(); + map m; + m["name"] = message_descriptor.name(); + m["full_name"] = message_descriptor.full_name(); + m["file"] = kDescriptorKey; + const char required_function_arguments[] = + "name='$name$',\n" + "full_name='$full_name$',\n" + "filename=None,\n" + "file=$file$,\n" + "containing_type=None,\n"; + printer_->Print(m, required_function_arguments); + PrintFieldsInDescriptor(message_descriptor); + PrintExtensionsInDescriptor(message_descriptor); + + // Nested types + printer_->Print("nested_types=["); + for (int i = 0; i < message_descriptor.nested_type_count(); ++i) { + const string nested_name = ModuleLevelDescriptorName( + *message_descriptor.nested_type(i)); + printer_->Print("$name$, ", "name", nested_name); + } + printer_->Print("],\n"); + + // Enum types + printer_->Print("enum_types=[\n"); + printer_->Indent(); + for (int i = 0; i < message_descriptor.enum_type_count(); ++i) { + const string descriptor_name = ModuleLevelDescriptorName( + *message_descriptor.enum_type(i)); + printer_->Print(descriptor_name.c_str()); + printer_->Print(",\n"); + } + printer_->Outdent(); + printer_->Print("],\n"); + string options_string; + message_descriptor.options().SerializeToString(&options_string); + printer_->Print( + "options=$options_value$,\n" + "is_extendable=$extendable$", + "options_value", OptionsValue("MessageOptions", options_string), + "extendable", message_descriptor.extension_range_count() > 0 ? + "True" : "False"); + printer_->Print(",\n"); + + // Extension ranges + printer_->Print("extension_ranges=["); + for (int i = 0; i < message_descriptor.extension_range_count(); ++i) { + const Descriptor::ExtensionRange* range = + message_descriptor.extension_range(i); + printer_->Print("($start$, $end$), ", + "start", SimpleItoa(range->start), + "end", SimpleItoa(range->end)); + } + printer_->Print("],\n"); + + // Serialization of proto + DescriptorProto edp; + PrintSerializedPbInterval(message_descriptor, edp); + + printer_->Outdent(); + printer_->Print(")\n"); +} + +// Prints Python Descriptor objects for all nested types contained in +// message_descriptor. +// +// Mutually recursive with PrintDescriptor(). +void Generator::PrintNestedDescriptors( + const Descriptor& containing_descriptor) const { + for (int i = 0; i < containing_descriptor.nested_type_count(); ++i) { + PrintDescriptor(*containing_descriptor.nested_type(i)); + } +} + +// Prints all messages in |file|. +void Generator::PrintMessages() const { + for (int i = 0; i < file_->message_type_count(); ++i) { + PrintMessage(*file_->message_type(i)); + printer_->Print("\n"); + } +} + +// Prints a Python class for the given message descriptor. We defer to the +// metaclass to do almost all of the work of actually creating a useful class. +// The purpose of this function and its many helper functions above is merely +// to output a Python version of the descriptors, which the metaclass in +// reflection.py will use to construct the meat of the class itself. +// +// Mutually recursive with PrintNestedMessages(). +void Generator::PrintMessage( + const Descriptor& message_descriptor) const { + printer_->Print("class $name$(message.Message):\n", "name", + message_descriptor.name()); + printer_->Indent(); + printer_->Print("__metaclass__ = reflection.GeneratedProtocolMessageType\n"); + PrintNestedMessages(message_descriptor); + map m; + m["descriptor_key"] = kDescriptorKey; + m["descriptor_name"] = ModuleLevelDescriptorName(message_descriptor); + printer_->Print(m, "$descriptor_key$ = $descriptor_name$\n"); + + printer_->Print( + "\n" + "# @@protoc_insertion_point(class_scope:$full_name$)\n", + "full_name", message_descriptor.full_name()); + + printer_->Outdent(); +} + +// Prints all nested messages within |containing_descriptor|. +// Mutually recursive with PrintMessage(). +void Generator::PrintNestedMessages( + const Descriptor& containing_descriptor) const { + for (int i = 0; i < containing_descriptor.nested_type_count(); ++i) { + printer_->Print("\n"); + PrintMessage(*containing_descriptor.nested_type(i)); + } +} + +// Recursively fixes foreign fields in all nested types in |descriptor|, then +// sets the message_type and enum_type of all message and enum fields to point +// to their respective descriptors. +// Args: +// descriptor: descriptor to print fields for. +// containing_descriptor: if descriptor is a nested type, this is its +// containing type, or NULL if this is a root/top-level type. +void Generator::FixForeignFieldsInDescriptor( + const Descriptor& descriptor, + const Descriptor* containing_descriptor) const { + for (int i = 0; i < descriptor.nested_type_count(); ++i) { + FixForeignFieldsInDescriptor(*descriptor.nested_type(i), &descriptor); + } + + for (int i = 0; i < descriptor.field_count(); ++i) { + const FieldDescriptor& field_descriptor = *descriptor.field(i); + FixForeignFieldsInField(&descriptor, field_descriptor, "fields_by_name"); + } + + FixContainingTypeInDescriptor(descriptor, containing_descriptor); + for (int i = 0; i < descriptor.enum_type_count(); ++i) { + const EnumDescriptor& enum_descriptor = *descriptor.enum_type(i); + FixContainingTypeInDescriptor(enum_descriptor, &descriptor); + } +} + +void Generator::AddMessageToFileDescriptor(const Descriptor& descriptor) const { + map m; + m["descriptor_name"] = kDescriptorKey; + m["message_name"] = descriptor.name(); + m["message_descriptor_name"] = ModuleLevelDescriptorName(descriptor); + const char file_descriptor_template[] = + "$descriptor_name$.message_types_by_name['$message_name$'] = " + "$message_descriptor_name$\n"; + printer_->Print(m, file_descriptor_template); +} + +// Sets any necessary message_type and enum_type attributes +// for the Python version of |field|. +// +// containing_type may be NULL, in which case this is a module-level field. +// +// python_dict_name is the name of the Python dict where we should +// look the field up in the containing type. (e.g., fields_by_name +// or extensions_by_name). We ignore python_dict_name if containing_type +// is NULL. +void Generator::FixForeignFieldsInField(const Descriptor* containing_type, + const FieldDescriptor& field, + const string& python_dict_name) const { + const string field_referencing_expression = FieldReferencingExpression( + containing_type, field, python_dict_name); + map m; + m["field_ref"] = field_referencing_expression; + const Descriptor* foreign_message_type = field.message_type(); + if (foreign_message_type) { + m["foreign_type"] = ModuleLevelDescriptorName(*foreign_message_type); + printer_->Print(m, "$field_ref$.message_type = $foreign_type$\n"); + } + const EnumDescriptor* enum_type = field.enum_type(); + if (enum_type) { + m["enum_type"] = ModuleLevelDescriptorName(*enum_type); + printer_->Print(m, "$field_ref$.enum_type = $enum_type$\n"); + } +} + +// Returns the module-level expression for the given FieldDescriptor. +// Only works for fields in the .proto file this Generator is generating for. +// +// containing_type may be NULL, in which case this is a module-level field. +// +// python_dict_name is the name of the Python dict where we should +// look the field up in the containing type. (e.g., fields_by_name +// or extensions_by_name). We ignore python_dict_name if containing_type +// is NULL. +string Generator::FieldReferencingExpression( + const Descriptor* containing_type, + const FieldDescriptor& field, + const string& python_dict_name) const { + // We should only ever be looking up fields in the current file. + // The only things we refer to from other files are message descriptors. + GOOGLE_CHECK_EQ(field.file(), file_) << field.file()->name() << " vs. " + << file_->name(); + if (!containing_type) { + return field.name(); + } + return strings::Substitute( + "$0.$1['$2']", + ModuleLevelDescriptorName(*containing_type), + python_dict_name, field.name()); +} + +// Prints containing_type for nested descriptors or enum descriptors. +template +void Generator::FixContainingTypeInDescriptor( + const DescriptorT& descriptor, + const Descriptor* containing_descriptor) const { + if (containing_descriptor != NULL) { + const string nested_name = ModuleLevelDescriptorName(descriptor); + const string parent_name = ModuleLevelDescriptorName( + *containing_descriptor); + printer_->Print( + "$nested_name$.containing_type = $parent_name$;\n", + "nested_name", nested_name, + "parent_name", parent_name); + } +} + +// Prints statements setting the message_type and enum_type fields in the +// Python descriptor objects we've already output in ths file. We must +// do this in a separate step due to circular references (otherwise, we'd +// just set everything in the initial assignment statements). +void Generator::FixForeignFieldsInDescriptors() const { + for (int i = 0; i < file_->message_type_count(); ++i) { + FixForeignFieldsInDescriptor(*file_->message_type(i), NULL); + } + for (int i = 0; i < file_->message_type_count(); ++i) { + AddMessageToFileDescriptor(*file_->message_type(i)); + } + printer_->Print("\n"); +} + +// We need to not only set any necessary message_type fields, but +// also need to call RegisterExtension() on each message we're +// extending. +void Generator::FixForeignFieldsInExtensions() const { + // Top-level extensions. + for (int i = 0; i < file_->extension_count(); ++i) { + FixForeignFieldsInExtension(*file_->extension(i)); + } + // Nested extensions. + for (int i = 0; i < file_->message_type_count(); ++i) { + FixForeignFieldsInNestedExtensions(*file_->message_type(i)); + } +} + +void Generator::FixForeignFieldsInExtension( + const FieldDescriptor& extension_field) const { + GOOGLE_CHECK(extension_field.is_extension()); + // extension_scope() will be NULL for top-level extensions, which is + // exactly what FixForeignFieldsInField() wants. + FixForeignFieldsInField(extension_field.extension_scope(), extension_field, + "extensions_by_name"); + + map m; + // Confusingly, for FieldDescriptors that happen to be extensions, + // containing_type() means "extended type." + // On the other hand, extension_scope() will give us what we normally + // mean by containing_type(). + m["extended_message_class"] = ModuleLevelMessageName( + *extension_field.containing_type()); + m["field"] = FieldReferencingExpression(extension_field.extension_scope(), + extension_field, + "extensions_by_name"); + printer_->Print(m, "$extended_message_class$.RegisterExtension($field$)\n"); +} + +void Generator::FixForeignFieldsInNestedExtensions( + const Descriptor& descriptor) const { + // Recursively fix up extensions in all nested types. + for (int i = 0; i < descriptor.nested_type_count(); ++i) { + FixForeignFieldsInNestedExtensions(*descriptor.nested_type(i)); + } + // Fix up extensions directly contained within this type. + for (int i = 0; i < descriptor.extension_count(); ++i) { + FixForeignFieldsInExtension(*descriptor.extension(i)); + } +} + +// Returns a Python expression that instantiates a Python EnumValueDescriptor +// object for the given C++ descriptor. +void Generator::PrintEnumValueDescriptor( + const EnumValueDescriptor& descriptor) const { + // TODO(robinson): Fix up EnumValueDescriptor "type" fields. + // More circular references. ::sigh:: + string options_string; + descriptor.options().SerializeToString(&options_string); + map m; + m["name"] = descriptor.name(); + m["index"] = SimpleItoa(descriptor.index()); + m["number"] = SimpleItoa(descriptor.number()); + m["options"] = OptionsValue("EnumValueOptions", options_string); + printer_->Print( + m, + "descriptor.EnumValueDescriptor(\n" + " name='$name$', index=$index$, number=$number$,\n" + " options=$options$,\n" + " type=None)"); +} + +// Returns a Python expression that calls descriptor._ParseOptions using +// the given descriptor class name and serialized options protobuf string. +string Generator::OptionsValue( + const string& class_name, const string& serialized_options) const { + if (serialized_options.length() == 0 || GeneratingDescriptorProto()) { + return "None"; + } else { + string full_class_name = "descriptor_pb2." + class_name; + return "descriptor._ParseOptions(" + full_class_name + "(), '" + + CEscape(serialized_options)+ "')"; + } +} + +// Prints an expression for a Python FieldDescriptor for |field|. +void Generator::PrintFieldDescriptor( + const FieldDescriptor& field, bool is_extension) const { + string options_string; + field.options().SerializeToString(&options_string); + map m; + m["name"] = field.name(); + m["full_name"] = field.full_name(); + m["index"] = SimpleItoa(field.index()); + m["number"] = SimpleItoa(field.number()); + m["type"] = SimpleItoa(field.type()); + m["cpp_type"] = SimpleItoa(field.cpp_type()); + m["label"] = SimpleItoa(field.label()); + m["has_default_value"] = field.has_default_value() ? "True" : "False"; + m["default_value"] = StringifyDefaultValue(field); + m["is_extension"] = is_extension ? "True" : "False"; + m["options"] = OptionsValue("FieldOptions", options_string); + // We always set message_type and enum_type to None at this point, and then + // these fields in correctly after all referenced descriptors have been + // defined and/or imported (see FixForeignFieldsInDescriptors()). + const char field_descriptor_decl[] = + "descriptor.FieldDescriptor(\n" + " name='$name$', full_name='$full_name$', index=$index$,\n" + " number=$number$, type=$type$, cpp_type=$cpp_type$, label=$label$,\n" + " has_default_value=$has_default_value$, default_value=$default_value$,\n" + " message_type=None, enum_type=None, containing_type=None,\n" + " is_extension=$is_extension$, extension_scope=None,\n" + " options=$options$)"; + printer_->Print(m, field_descriptor_decl); +} + +// Helper for Print{Fields,Extensions}InDescriptor(). +void Generator::PrintFieldDescriptorsInDescriptor( + const Descriptor& message_descriptor, + bool is_extension, + const string& list_variable_name, + int (Descriptor::*CountFn)() const, + const FieldDescriptor* (Descriptor::*GetterFn)(int) const) const { + printer_->Print("$list$=[\n", "list", list_variable_name); + printer_->Indent(); + for (int i = 0; i < (message_descriptor.*CountFn)(); ++i) { + PrintFieldDescriptor(*(message_descriptor.*GetterFn)(i), + is_extension); + printer_->Print(",\n"); + } + printer_->Outdent(); + printer_->Print("],\n"); +} + +// Prints a statement assigning "fields" to a list of Python FieldDescriptors, +// one for each field present in message_descriptor. +void Generator::PrintFieldsInDescriptor( + const Descriptor& message_descriptor) const { + const bool is_extension = false; + PrintFieldDescriptorsInDescriptor( + message_descriptor, is_extension, "fields", + &Descriptor::field_count, &Descriptor::field); +} + +// Prints a statement assigning "extensions" to a list of Python +// FieldDescriptors, one for each extension present in message_descriptor. +void Generator::PrintExtensionsInDescriptor( + const Descriptor& message_descriptor) const { + const bool is_extension = true; + PrintFieldDescriptorsInDescriptor( + message_descriptor, is_extension, "extensions", + &Descriptor::extension_count, &Descriptor::extension); +} + +bool Generator::GeneratingDescriptorProto() const { + return file_->name() == "google/protobuf/descriptor.proto"; +} + +// Returns the unique Python module-level identifier given to a descriptor. +// This name is module-qualified iff the given descriptor describes an +// entity that doesn't come from the current file. +template +string Generator::ModuleLevelDescriptorName( + const DescriptorT& descriptor) const { + // FIXME(robinson): + // We currently don't worry about collisions with underscores in the type + // names, so these would collide in nasty ways if found in the same file: + // OuterProto.ProtoA.ProtoB + // OuterProto_ProtoA.ProtoB # Underscore instead of period. + // As would these: + // OuterProto.ProtoA_.ProtoB + // OuterProto.ProtoA._ProtoB # Leading vs. trailing underscore. + // (Contrived, but certainly possible). + // + // The C++ implementation doesn't guard against this either. Leaving + // it for now... + string name = NamePrefixedWithNestedTypes(descriptor, "_"); + UpperString(&name); + // Module-private for now. Easy to make public later; almost impossible + // to make private later. + name = "_" + name; + // We now have the name relative to its own module. Also qualify with + // the module name iff this descriptor is from a different .proto file. + if (descriptor.file() != file_) { + name = ModuleName(descriptor.file()->name()) + "." + name; + } + return name; +} + +// Returns the name of the message class itself, not the descriptor. +// Like ModuleLevelDescriptorName(), module-qualifies the name iff +// the given descriptor describes an entity that doesn't come from +// the current file. +string Generator::ModuleLevelMessageName(const Descriptor& descriptor) const { + string name = NamePrefixedWithNestedTypes(descriptor, "."); + if (descriptor.file() != file_) { + name = ModuleName(descriptor.file()->name()) + "." + name; + } + return name; +} + +// Returns the unique Python module-level identifier given to a service +// descriptor. +string Generator::ModuleLevelServiceDescriptorName( + const ServiceDescriptor& descriptor) const { + string name = descriptor.name(); + UpperString(&name); + name = "_" + name; + if (descriptor.file() != file_) { + name = ModuleName(descriptor.file()->name()) + "." + name; + } + return name; +} + +// Prints standard constructor arguments serialized_start and serialized_end. +// Args: +// descriptor: The cpp descriptor to have a serialized reference. +// proto: A proto +// Example printer output: +// serialized_start=41, +// serialized_end=43, +// +template +void Generator::PrintSerializedPbInterval( + const DescriptorT& descriptor, DescriptorProtoT& proto) const { + descriptor.CopyTo(&proto); + string sp; + proto.SerializeToString(&sp); + int offset = file_descriptor_serialized_.find(sp); + GOOGLE_CHECK_GE(offset, 0); + + printer_->Print("serialized_start=$serialized_start$,\n" + "serialized_end=$serialized_end$,\n", + "serialized_start", SimpleItoa(offset), + "serialized_end", SimpleItoa(offset + sp.size())); +} + +} // namespace python +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/python/python_generator.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/python/python_generator.h new file mode 100644 index 0000000000..84eaf8abd0 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/python/python_generator.h @@ -0,0 +1,156 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: robinson@google.com (Will Robinson) +// +// Generates Python code for a given .proto file. + +#ifndef GOOGLE_PROTOBUF_COMPILER_PYTHON_GENERATOR_H__ +#define GOOGLE_PROTOBUF_COMPILER_PYTHON_GENERATOR_H__ + +#include + +#include +#include + +namespace google { +namespace protobuf { + +class Descriptor; +class EnumDescriptor; +class EnumValueDescriptor; +class FieldDescriptor; +class ServiceDescriptor; + +namespace io { class Printer; } + +namespace compiler { +namespace python { + +// CodeGenerator implementation for generated Python protocol buffer classes. +// If you create your own protocol compiler binary and you want it to support +// Python output, you can do so by registering an instance of this +// CodeGenerator with the CommandLineInterface in your main() function. +class LIBPROTOC_EXPORT Generator : public CodeGenerator { + public: + Generator(); + virtual ~Generator(); + + // CodeGenerator methods. + virtual bool Generate(const FileDescriptor* file, + const string& parameter, + GeneratorContext* generator_context, + string* error) const; + + private: + void PrintImports() const; + void PrintFileDescriptor() const; + void PrintTopLevelEnums() const; + void PrintAllNestedEnumsInFile() const; + void PrintNestedEnums(const Descriptor& descriptor) const; + void PrintEnum(const EnumDescriptor& enum_descriptor) const; + + void PrintTopLevelExtensions() const; + + void PrintFieldDescriptor( + const FieldDescriptor& field, bool is_extension) const; + void PrintFieldDescriptorsInDescriptor( + const Descriptor& message_descriptor, + bool is_extension, + const string& list_variable_name, + int (Descriptor::*CountFn)() const, + const FieldDescriptor* (Descriptor::*GetterFn)(int) const) const; + void PrintFieldsInDescriptor(const Descriptor& message_descriptor) const; + void PrintExtensionsInDescriptor(const Descriptor& message_descriptor) const; + void PrintMessageDescriptors() const; + void PrintDescriptor(const Descriptor& message_descriptor) const; + void PrintNestedDescriptors(const Descriptor& containing_descriptor) const; + + void PrintMessages() const; + void PrintMessage(const Descriptor& message_descriptor) const; + void PrintNestedMessages(const Descriptor& containing_descriptor) const; + + void FixForeignFieldsInDescriptors() const; + void FixForeignFieldsInDescriptor( + const Descriptor& descriptor, + const Descriptor* containing_descriptor) const; + void FixForeignFieldsInField(const Descriptor* containing_type, + const FieldDescriptor& field, + const string& python_dict_name) const; + void AddMessageToFileDescriptor(const Descriptor& descriptor) const; + string FieldReferencingExpression(const Descriptor* containing_type, + const FieldDescriptor& field, + const string& python_dict_name) const; + template + void FixContainingTypeInDescriptor( + const DescriptorT& descriptor, + const Descriptor* containing_descriptor) const; + + void FixForeignFieldsInExtensions() const; + void FixForeignFieldsInExtension( + const FieldDescriptor& extension_field) const; + void FixForeignFieldsInNestedExtensions(const Descriptor& descriptor) const; + + void PrintServices() const; + void PrintServiceDescriptor(const ServiceDescriptor& descriptor) const; + void PrintServiceClass(const ServiceDescriptor& descriptor) const; + void PrintServiceStub(const ServiceDescriptor& descriptor) const; + + void PrintEnumValueDescriptor(const EnumValueDescriptor& descriptor) const; + string OptionsValue(const string& class_name, + const string& serialized_options) const; + bool GeneratingDescriptorProto() const; + + template + string ModuleLevelDescriptorName(const DescriptorT& descriptor) const; + string ModuleLevelMessageName(const Descriptor& descriptor) const; + string ModuleLevelServiceDescriptorName( + const ServiceDescriptor& descriptor) const; + + template + void PrintSerializedPbInterval( + const DescriptorT& descriptor, DescriptorProtoT& proto) const; + + // Very coarse-grained lock to ensure that Generate() is reentrant. + // Guards file_, printer_ and file_descriptor_serialized_. + mutable Mutex mutex_; + mutable const FileDescriptor* file_; // Set in Generate(). Under mutex_. + mutable string file_descriptor_serialized_; + mutable io::Printer* printer_; // Set in Generate(). Under mutex_. + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(Generator); +}; + +} // namespace python +} // namespace compiler +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_COMPILER_PYTHON_GENERATOR_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/python/python_plugin_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/python/python_plugin_unittest.cc new file mode 100644 index 0000000000..da619ad3e9 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/python/python_plugin_unittest.cc @@ -0,0 +1,116 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// +// TODO(kenton): Share code with the versions of this test in other languages? +// It seemed like parameterizing it would add more complexity than it is +// worth. + +#include +#include +#include +#include + +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { +namespace python { +namespace { + +class TestGenerator : public CodeGenerator { + public: + TestGenerator() {} + ~TestGenerator() {} + + virtual bool Generate(const FileDescriptor* file, + const string& parameter, + GeneratorContext* context, + string* error) const { + TryInsert("test_pb2.py", "imports", context); + TryInsert("test_pb2.py", "module_scope", context); + TryInsert("test_pb2.py", "class_scope:foo.Bar", context); + TryInsert("test_pb2.py", "class_scope:foo.Bar.Baz", context); + return true; + } + + void TryInsert(const string& filename, const string& insertion_point, + GeneratorContext* context) const { + scoped_ptr output( + context->OpenForInsert(filename, insertion_point)); + io::Printer printer(output.get(), '$'); + printer.Print("// inserted $name$\n", "name", insertion_point); + } +}; + +// This test verifies that all the expected insertion points exist. It does +// not verify that they are correctly-placed; that would require actually +// compiling the output which is a bit more than I care to do for this test. +TEST(PythonPluginTest, PluginTest) { + File::WriteStringToFileOrDie( + "syntax = \"proto2\";\n" + "package foo;\n" + "message Bar {\n" + " message Baz {}\n" + "}\n", + TestTempDir() + "/test.proto"); + + google::protobuf::compiler::CommandLineInterface cli; + cli.SetInputsAreProtoPathRelative(true); + + python::Generator python_generator; + TestGenerator test_generator; + cli.RegisterGenerator("--python_out", &python_generator, ""); + cli.RegisterGenerator("--test_out", &test_generator, ""); + + string proto_path = "-I" + TestTempDir(); + string python_out = "--python_out=" + TestTempDir(); + string test_out = "--test_out=" + TestTempDir(); + + const char* argv[] = { + "protoc", + proto_path.c_str(), + python_out.c_str(), + test_out.c_str(), + "test.proto" + }; + + EXPECT_EQ(0, cli.Run(5, argv)); +} + +} // namespace +} // namespace python +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/subprocess.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/subprocess.cc new file mode 100644 index 0000000000..5fb5d5cb7d --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/subprocess.cc @@ -0,0 +1,460 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) + +#include + +#include + +#ifndef _WIN32 +#include +#include +#include +#include +#endif + +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { + +#ifdef _WIN32 + +static void CloseHandleOrDie(HANDLE handle) { + if (!CloseHandle(handle)) { + GOOGLE_LOG(FATAL) << "CloseHandle: " + << Subprocess::Win32ErrorMessage(GetLastError()); + } +} + +Subprocess::Subprocess() + : process_start_error_(ERROR_SUCCESS), + child_handle_(NULL), child_stdin_(NULL), child_stdout_(NULL) {} + +Subprocess::~Subprocess() { + if (child_stdin_ != NULL) { + CloseHandleOrDie(child_stdin_); + } + if (child_stdout_ != NULL) { + CloseHandleOrDie(child_stdout_); + } +} + +void Subprocess::Start(const string& program, SearchMode search_mode) { + // Create the pipes. + HANDLE stdin_pipe_read; + HANDLE stdin_pipe_write; + HANDLE stdout_pipe_read; + HANDLE stdout_pipe_write; + + if (!CreatePipe(&stdin_pipe_read, &stdin_pipe_write, NULL, 0)) { + GOOGLE_LOG(FATAL) << "CreatePipe: " << Win32ErrorMessage(GetLastError()); + } + if (!CreatePipe(&stdout_pipe_read, &stdout_pipe_write, NULL, 0)) { + GOOGLE_LOG(FATAL) << "CreatePipe: " << Win32ErrorMessage(GetLastError()); + } + + // Make child side of the pipes inheritable. + if (!SetHandleInformation(stdin_pipe_read, + HANDLE_FLAG_INHERIT, HANDLE_FLAG_INHERIT)) { + GOOGLE_LOG(FATAL) << "SetHandleInformation: " + << Win32ErrorMessage(GetLastError()); + } + if (!SetHandleInformation(stdout_pipe_write, + HANDLE_FLAG_INHERIT, HANDLE_FLAG_INHERIT)) { + GOOGLE_LOG(FATAL) << "SetHandleInformation: " + << Win32ErrorMessage(GetLastError()); + } + + // Setup STARTUPINFO to redirect handles. + STARTUPINFOA startup_info; + ZeroMemory(&startup_info, sizeof(startup_info)); + startup_info.cb = sizeof(startup_info); + startup_info.dwFlags = STARTF_USESTDHANDLES; + startup_info.hStdInput = stdin_pipe_read; + startup_info.hStdOutput = stdout_pipe_write; + startup_info.hStdError = GetStdHandle(STD_ERROR_HANDLE); + + if (startup_info.hStdError == INVALID_HANDLE_VALUE) { + GOOGLE_LOG(FATAL) << "GetStdHandle: " + << Win32ErrorMessage(GetLastError()); + } + + // CreateProcess() mutates its second parameter. WTF? + char* name_copy = strdup(program.c_str()); + + // Create the process. + PROCESS_INFORMATION process_info; + + if (CreateProcessA((search_mode == SEARCH_PATH) ? NULL : program.c_str(), + (search_mode == SEARCH_PATH) ? name_copy : NULL, + NULL, // process security attributes + NULL, // thread security attributes + TRUE, // inherit handles? + 0, // obscure creation flags + NULL, // environment (inherit from parent) + NULL, // current directory (inherit from parent) + &startup_info, + &process_info)) { + child_handle_ = process_info.hProcess; + CloseHandleOrDie(process_info.hThread); + child_stdin_ = stdin_pipe_write; + child_stdout_ = stdout_pipe_read; + } else { + process_start_error_ = GetLastError(); + CloseHandleOrDie(stdin_pipe_write); + CloseHandleOrDie(stdout_pipe_read); + } + + CloseHandleOrDie(stdin_pipe_read); + CloseHandleOrDie(stdout_pipe_write); + free(name_copy); +} + +bool Subprocess::Communicate(const Message& input, Message* output, + string* error) { + if (process_start_error_ != ERROR_SUCCESS) { + *error = Win32ErrorMessage(process_start_error_); + return false; + } + + GOOGLE_CHECK(child_handle_ != NULL) << "Must call Start() first."; + + string input_data = input.SerializeAsString(); + string output_data; + + int input_pos = 0; + + while (child_stdout_ != NULL) { + HANDLE handles[2]; + int handle_count = 0; + + if (child_stdin_ != NULL) { + handles[handle_count++] = child_stdin_; + } + if (child_stdout_ != NULL) { + handles[handle_count++] = child_stdout_; + } + + DWORD wait_result = + WaitForMultipleObjects(handle_count, handles, FALSE, INFINITE); + + HANDLE signaled_handle; + if (wait_result >= WAIT_OBJECT_0 && + wait_result < WAIT_OBJECT_0 + handle_count) { + signaled_handle = handles[wait_result - WAIT_OBJECT_0]; + } else if (wait_result == WAIT_FAILED) { + GOOGLE_LOG(FATAL) << "WaitForMultipleObjects: " + << Win32ErrorMessage(GetLastError()); + } else { + GOOGLE_LOG(FATAL) << "WaitForMultipleObjects: Unexpected return code: " + << wait_result; + } + + if (signaled_handle == child_stdin_) { + DWORD n; + if (!WriteFile(child_stdin_, + input_data.data() + input_pos, + input_data.size() - input_pos, + &n, NULL)) { + // Child closed pipe. Presumably it will report an error later. + // Pretend we're done for now. + input_pos = input_data.size(); + } else { + input_pos += n; + } + + if (input_pos == input_data.size()) { + // We're done writing. Close. + CloseHandleOrDie(child_stdin_); + child_stdin_ = NULL; + } + } else if (signaled_handle == child_stdout_) { + char buffer[4096]; + DWORD n; + + if (!ReadFile(child_stdout_, buffer, sizeof(buffer), &n, NULL)) { + // We're done reading. Close. + CloseHandleOrDie(child_stdout_); + child_stdout_ = NULL; + } else { + output_data.append(buffer, n); + } + } + } + + if (child_stdin_ != NULL) { + // Child did not finish reading input before it closed the output. + // Presumably it exited with an error. + CloseHandleOrDie(child_stdin_); + child_stdin_ = NULL; + } + + DWORD wait_result = WaitForSingleObject(child_handle_, INFINITE); + + if (wait_result == WAIT_FAILED) { + GOOGLE_LOG(FATAL) << "WaitForSingleObject: " + << Win32ErrorMessage(GetLastError()); + } else if (wait_result != WAIT_OBJECT_0) { + GOOGLE_LOG(FATAL) << "WaitForSingleObject: Unexpected return code: " + << wait_result; + } + + DWORD exit_code; + if (!GetExitCodeProcess(child_handle_, &exit_code)) { + GOOGLE_LOG(FATAL) << "GetExitCodeProcess: " + << Win32ErrorMessage(GetLastError()); + } + + CloseHandleOrDie(child_handle_); + child_handle_ = NULL; + + if (exit_code != 0) { + *error = strings::Substitute( + "Plugin failed with status code $0.", exit_code); + return false; + } + + if (!output->ParseFromString(output_data)) { + *error = "Plugin output is unparseable: " + CEscape(output_data); + return false; + } + + return true; +} + +string Subprocess::Win32ErrorMessage(DWORD error_code) { + char* message; + + // WTF? + FormatMessage(FORMAT_MESSAGE_ALLOCATE_BUFFER | + FORMAT_MESSAGE_FROM_SYSTEM | + FORMAT_MESSAGE_IGNORE_INSERTS, + NULL, error_code, 0, + (LPTSTR)&message, // NOT A BUG! + 0, NULL); + + string result = message; + LocalFree(message); + return result; +} + +// =================================================================== + +#else // _WIN32 + +Subprocess::Subprocess() + : child_pid_(-1), child_stdin_(-1), child_stdout_(-1) {} + +Subprocess::~Subprocess() { + if (child_stdin_ != -1) { + close(child_stdin_); + } + if (child_stdout_ != -1) { + close(child_stdout_); + } +} + +void Subprocess::Start(const string& program, SearchMode search_mode) { + // Note that we assume that there are no other threads, thus we don't have to + // do crazy stuff like using socket pairs or avoiding libc locks. + + // [0] is read end, [1] is write end. + int stdin_pipe[2]; + int stdout_pipe[2]; + + pipe(stdin_pipe); + pipe(stdout_pipe); + + char* argv[2] = { strdup(program.c_str()), NULL }; + + child_pid_ = fork(); + if (child_pid_ == -1) { + GOOGLE_LOG(FATAL) << "fork: " << strerror(errno); + } else if (child_pid_ == 0) { + // We are the child. + dup2(stdin_pipe[0], STDIN_FILENO); + dup2(stdout_pipe[1], STDOUT_FILENO); + + close(stdin_pipe[0]); + close(stdin_pipe[1]); + close(stdout_pipe[0]); + close(stdout_pipe[1]); + + switch (search_mode) { + case SEARCH_PATH: + execvp(argv[0], argv); + break; + case EXACT_NAME: + execv(argv[0], argv); + break; + } + + // Write directly to STDERR_FILENO to avoid stdio code paths that may do + // stuff that is unsafe here. + write(STDERR_FILENO, argv[0], strlen(argv[0])); + const char* message = ": program not found or is not executable\n"; + write(STDERR_FILENO, message, strlen(message)); + + // Must use _exit() rather than exit() to avoid flushing output buffers + // that will also be flushed by the parent. + _exit(1); + } else { + free(argv[0]); + + close(stdin_pipe[0]); + close(stdout_pipe[1]); + + child_stdin_ = stdin_pipe[1]; + child_stdout_ = stdout_pipe[0]; + } +} + +bool Subprocess::Communicate(const Message& input, Message* output, + string* error) { + + GOOGLE_CHECK_NE(child_stdin_, -1) << "Must call Start() first."; + + // The "sighandler_t" typedef is GNU-specific, so define our own. + typedef void SignalHandler(int); + + // Make sure SIGPIPE is disabled so that if the child dies it doesn't kill us. + SignalHandler* old_pipe_handler = signal(SIGPIPE, SIG_IGN); + + string input_data = input.SerializeAsString(); + string output_data; + + int input_pos = 0; + int max_fd = max(child_stdin_, child_stdout_); + + while (child_stdout_ != -1) { + fd_set read_fds; + fd_set write_fds; + FD_ZERO(&read_fds); + FD_ZERO(&write_fds); + if (child_stdout_ != -1) { + FD_SET(child_stdout_, &read_fds); + } + if (child_stdin_ != -1) { + FD_SET(child_stdin_, &write_fds); + } + + if (select(max_fd + 1, &read_fds, &write_fds, NULL, NULL) < 0) { + if (errno == EINTR) { + // Interrupted by signal. Try again. + continue; + } else { + GOOGLE_LOG(FATAL) << "select: " << strerror(errno); + } + } + + if (child_stdin_ != -1 && FD_ISSET(child_stdin_, &write_fds)) { + int n = write(child_stdin_, input_data.data() + input_pos, + input_data.size() - input_pos); + if (n < 0) { + // Child closed pipe. Presumably it will report an error later. + // Pretend we're done for now. + input_pos = input_data.size(); + } else { + input_pos += n; + } + + if (input_pos == input_data.size()) { + // We're done writing. Close. + close(child_stdin_); + child_stdin_ = -1; + } + } + + if (child_stdout_ != -1 && FD_ISSET(child_stdout_, &read_fds)) { + char buffer[4096]; + int n = read(child_stdout_, buffer, sizeof(buffer)); + + if (n > 0) { + output_data.append(buffer, n); + } else { + // We're done reading. Close. + close(child_stdout_); + child_stdout_ = -1; + } + } + } + + if (child_stdin_ != -1) { + // Child did not finish reading input before it closed the output. + // Presumably it exited with an error. + close(child_stdin_); + child_stdin_ = -1; + } + + int status; + while (waitpid(child_pid_, &status, 0) == -1) { + if (errno != EINTR) { + GOOGLE_LOG(FATAL) << "waitpid: " << strerror(errno); + } + } + + // Restore SIGPIPE handling. + signal(SIGPIPE, old_pipe_handler); + + if (WIFEXITED(status)) { + if (WEXITSTATUS(status) != 0) { + int error_code = WEXITSTATUS(status); + *error = strings::Substitute( + "Plugin failed with status code $0.", error_code); + return false; + } + } else if (WIFSIGNALED(status)) { + int signal = WTERMSIG(status); + *error = strings::Substitute( + "Plugin killed by signal $0.", signal); + return false; + } else { + *error = "Neither WEXITSTATUS nor WTERMSIG is true?"; + return false; + } + + if (!output->ParseFromString(output_data)) { + *error = "Plugin output is unparseable."; + return false; + } + + return true; +} + +#endif // !_WIN32 + +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/subprocess.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/subprocess.h new file mode 100644 index 0000000000..005649640e --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/subprocess.h @@ -0,0 +1,108 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) + +#ifndef GOOGLE_PROTOBUF_COMPILER_SUBPROCESS_H__ +#define GOOGLE_PROTOBUF_COMPILER_SUBPROCESS_H__ + +#ifdef _WIN32 +#define WIN32_LEAN_AND_MEAN // right... +#include +#else // _WIN32 +#include +#include +#endif // !_WIN32 +#include + +#include + + +namespace google { +namespace protobuf { + +class Message; + +namespace compiler { + +// Utility class for launching sub-processes. +class LIBPROTOC_EXPORT Subprocess { + public: + Subprocess(); + ~Subprocess(); + + enum SearchMode { + SEARCH_PATH, // Use PATH environment variable. + EXACT_NAME // Program is an exact file name; don't use the PATH. + }; + + // Start the subprocess. Currently we don't provide a way to specify + // arguments as protoc plugins don't have any. + void Start(const string& program, SearchMode search_mode); + + // Serialize the input message and pipe it to the subprocess's stdin, then + // close the pipe. Meanwhile, read from the subprocess's stdout and parse + // the data into *output. All this is done carefully to avoid deadlocks. + // Returns true if successful. On any sort of error, returns false and sets + // *error to a description of the problem. + bool Communicate(const Message& input, Message* output, string* error); + +#ifdef _WIN32 + // Given an error code, returns a human-readable error message. This is + // defined here so that CommandLineInterface can share it. + static string Win32ErrorMessage(DWORD error_code); +#endif + + private: +#ifdef _WIN32 + DWORD process_start_error_; + HANDLE child_handle_; + + // The file handles for our end of the child's pipes. We close each and + // set it to NULL when no longer needed. + HANDLE child_stdin_; + HANDLE child_stdout_; + +#else // _WIN32 + pid_t child_pid_; + + // The file descriptors for our end of the child's pipes. We close each and + // set it to -1 when no longer needed. + int child_stdin_; + int child_stdout_; + +#endif // !_WIN32 +}; + +} // namespace compiler +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_COMPILER_SUBPROCESS_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/test_plugin.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/test_plugin.cc new file mode 100644 index 0000000000..5cbbf3d9df --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/test_plugin.cc @@ -0,0 +1,51 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// +// This is a dummy code generator plugin used by +// command_line_interface_unittest. + +#include +#include +#include +#include +#include + +int main(int argc, char* argv[]) { +#ifdef _MSC_VER + // Don't print a silly message or stick a modal dialog box in my face, + // please. + _set_abort_behavior(0, ~0); +#endif // !_MSC_VER + + google::protobuf::compiler::MockCodeGenerator generator("test_plugin"); + return google::protobuf::compiler::PluginMain(argc, argv, &generator); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/zip_output_unittest.sh b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/zip_output_unittest.sh new file mode 100644 index 0000000000..3a024364da --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/zip_output_unittest.sh @@ -0,0 +1,91 @@ +#!/bin/sh +# +# Protocol Buffers - Google's data interchange format +# Copyright 2009 Google Inc. All rights reserved. +# http://code.google.com/p/protobuf/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# Author: kenton@google.com (Kenton Varda) +# +# Test protoc's zip output mode. + +fail() { + echo "$@" >&2 + exit 1 +} + +TEST_TMPDIR=. +PROTOC=./protoc + +echo ' + syntax = "proto2"; + option java_multiple_files = true; + option java_package = "test.jar"; + option java_outer_classname = "Outer"; + message Foo {} + message Bar {} +' > $TEST_TMPDIR/testzip.proto + +$PROTOC \ + --cpp_out=$TEST_TMPDIR/testzip.zip --python_out=$TEST_TMPDIR/testzip.zip \ + --java_out=$TEST_TMPDIR/testzip.jar -I$TEST_TMPDIR testzip.proto \ + || fail 'protoc failed.' + +echo "Testing output to zip..." +if unzip -h > /dev/null; then + unzip -t $TEST_TMPDIR/testzip.zip > $TEST_TMPDIR/testzip.list || fail 'unzip failed.' + + grep 'testing: testzip\.pb\.cc *OK$' $TEST_TMPDIR/testzip.list > /dev/null \ + || fail 'testzip.pb.cc not found in output zip.' + grep 'testing: testzip\.pb\.h *OK$' $TEST_TMPDIR/testzip.list > /dev/null \ + || fail 'testzip.pb.h not found in output zip.' + grep 'testing: testzip_pb2\.py *OK$' $TEST_TMPDIR/testzip.list > /dev/null \ + || fail 'testzip_pb2.py not found in output zip.' + grep -i 'manifest' $TEST_TMPDIR/testzip.list > /dev/null \ + && fail 'Zip file contained manifest.' +else + echo "Warning: 'unzip' command not available. Skipping test." +fi + +echo "Testing output to jar..." +if jar c $TEST_TMPDIR/testzip.proto > /dev/null; then + jar tf $TEST_TMPDIR/testzip.jar > $TEST_TMPDIR/testzip.list || fail 'jar failed.' + + grep '^test/jar/Foo\.java$' $TEST_TMPDIR/testzip.list > /dev/null \ + || fail 'Foo.java not found in output jar.' + grep '^test/jar/Bar\.java$' $TEST_TMPDIR/testzip.list > /dev/null \ + || fail 'Bar.java not found in output jar.' + grep '^test/jar/Outer\.java$' $TEST_TMPDIR/testzip.list > /dev/null \ + || fail 'Outer.java not found in output jar.' + grep '^META-INF/MANIFEST\.MF$' $TEST_TMPDIR/testzip.list > /dev/null \ + || fail 'Manifest not found in output jar.' +else + echo "Warning: 'jar' command not available. Skipping test." +fi + +echo PASS diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/zip_writer.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/zip_writer.cc new file mode 100644 index 0000000000..65d7352705 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/zip_writer.cc @@ -0,0 +1,218 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: ambrose@google.com (Ambrose Feinstein), +// kenton@google.com (Kenton Varda) +// +// Based on http://www.pkware.com/documents/casestudies/APPNOTE.TXT + +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { + +static const uint32 kCRC32Table[256] = { + 0x00000000, 0x77073096, 0xee0e612c, 0x990951ba, 0x076dc419, 0x706af48f, + 0xe963a535, 0x9e6495a3, 0x0edb8832, 0x79dcb8a4, 0xe0d5e91e, 0x97d2d988, + 0x09b64c2b, 0x7eb17cbd, 0xe7b82d07, 0x90bf1d91, 0x1db71064, 0x6ab020f2, + 0xf3b97148, 0x84be41de, 0x1adad47d, 0x6ddde4eb, 0xf4d4b551, 0x83d385c7, + 0x136c9856, 0x646ba8c0, 0xfd62f97a, 0x8a65c9ec, 0x14015c4f, 0x63066cd9, + 0xfa0f3d63, 0x8d080df5, 0x3b6e20c8, 0x4c69105e, 0xd56041e4, 0xa2677172, + 0x3c03e4d1, 0x4b04d447, 0xd20d85fd, 0xa50ab56b, 0x35b5a8fa, 0x42b2986c, + 0xdbbbc9d6, 0xacbcf940, 0x32d86ce3, 0x45df5c75, 0xdcd60dcf, 0xabd13d59, + 0x26d930ac, 0x51de003a, 0xc8d75180, 0xbfd06116, 0x21b4f4b5, 0x56b3c423, + 0xcfba9599, 0xb8bda50f, 0x2802b89e, 0x5f058808, 0xc60cd9b2, 0xb10be924, + 0x2f6f7c87, 0x58684c11, 0xc1611dab, 0xb6662d3d, 0x76dc4190, 0x01db7106, + 0x98d220bc, 0xefd5102a, 0x71b18589, 0x06b6b51f, 0x9fbfe4a5, 0xe8b8d433, + 0x7807c9a2, 0x0f00f934, 0x9609a88e, 0xe10e9818, 0x7f6a0dbb, 0x086d3d2d, + 0x91646c97, 0xe6635c01, 0x6b6b51f4, 0x1c6c6162, 0x856530d8, 0xf262004e, + 0x6c0695ed, 0x1b01a57b, 0x8208f4c1, 0xf50fc457, 0x65b0d9c6, 0x12b7e950, + 0x8bbeb8ea, 0xfcb9887c, 0x62dd1ddf, 0x15da2d49, 0x8cd37cf3, 0xfbd44c65, + 0x4db26158, 0x3ab551ce, 0xa3bc0074, 0xd4bb30e2, 0x4adfa541, 0x3dd895d7, + 0xa4d1c46d, 0xd3d6f4fb, 0x4369e96a, 0x346ed9fc, 0xad678846, 0xda60b8d0, + 0x44042d73, 0x33031de5, 0xaa0a4c5f, 0xdd0d7cc9, 0x5005713c, 0x270241aa, + 0xbe0b1010, 0xc90c2086, 0x5768b525, 0x206f85b3, 0xb966d409, 0xce61e49f, + 0x5edef90e, 0x29d9c998, 0xb0d09822, 0xc7d7a8b4, 0x59b33d17, 0x2eb40d81, + 0xb7bd5c3b, 0xc0ba6cad, 0xedb88320, 0x9abfb3b6, 0x03b6e20c, 0x74b1d29a, + 0xead54739, 0x9dd277af, 0x04db2615, 0x73dc1683, 0xe3630b12, 0x94643b84, + 0x0d6d6a3e, 0x7a6a5aa8, 0xe40ecf0b, 0x9309ff9d, 0x0a00ae27, 0x7d079eb1, + 0xf00f9344, 0x8708a3d2, 0x1e01f268, 0x6906c2fe, 0xf762575d, 0x806567cb, + 0x196c3671, 0x6e6b06e7, 0xfed41b76, 0x89d32be0, 0x10da7a5a, 0x67dd4acc, + 0xf9b9df6f, 0x8ebeeff9, 0x17b7be43, 0x60b08ed5, 0xd6d6a3e8, 0xa1d1937e, + 0x38d8c2c4, 0x4fdff252, 0xd1bb67f1, 0xa6bc5767, 0x3fb506dd, 0x48b2364b, + 0xd80d2bda, 0xaf0a1b4c, 0x36034af6, 0x41047a60, 0xdf60efc3, 0xa867df55, + 0x316e8eef, 0x4669be79, 0xcb61b38c, 0xbc66831a, 0x256fd2a0, 0x5268e236, + 0xcc0c7795, 0xbb0b4703, 0x220216b9, 0x5505262f, 0xc5ba3bbe, 0xb2bd0b28, + 0x2bb45a92, 0x5cb36a04, 0xc2d7ffa7, 0xb5d0cf31, 0x2cd99e8b, 0x5bdeae1d, + 0x9b64c2b0, 0xec63f226, 0x756aa39c, 0x026d930a, 0x9c0906a9, 0xeb0e363f, + 0x72076785, 0x05005713, 0x95bf4a82, 0xe2b87a14, 0x7bb12bae, 0x0cb61b38, + 0x92d28e9b, 0xe5d5be0d, 0x7cdcefb7, 0x0bdbdf21, 0x86d3d2d4, 0xf1d4e242, + 0x68ddb3f8, 0x1fda836e, 0x81be16cd, 0xf6b9265b, 0x6fb077e1, 0x18b74777, + 0x88085ae6, 0xff0f6a70, 0x66063bca, 0x11010b5c, 0x8f659eff, 0xf862ae69, + 0x616bffd3, 0x166ccf45, 0xa00ae278, 0xd70dd2ee, 0x4e048354, 0x3903b3c2, + 0xa7672661, 0xd06016f7, 0x4969474d, 0x3e6e77db, 0xaed16a4a, 0xd9d65adc, + 0x40df0b66, 0x37d83bf0, 0xa9bcae53, 0xdebb9ec5, 0x47b2cf7f, 0x30b5ffe9, + 0xbdbdf21c, 0xcabac28a, 0x53b39330, 0x24b4a3a6, 0xbad03605, 0xcdd70693, + 0x54de5729, 0x23d967bf, 0xb3667a2e, 0xc4614ab8, 0x5d681b02, 0x2a6f2b94, + 0xb40bbe37, 0xc30c8ea1, 0x5a05df1b, 0x2d02ef8d +}; + +static uint32 ComputeCRC32(const string &buf) { + uint32 x = ~0U; + for (int i = 0; i < buf.size(); ++i) { + unsigned char c = buf[i]; + x = kCRC32Table[(x ^ c) & 0xff] ^ (x >> 8); + } + return ~x; +} + +static void WriteShort(io::CodedOutputStream *out, uint16 val) { + uint8 p[2]; + p[0] = static_cast(val); + p[1] = static_cast(val >> 8); + out->WriteRaw(p, 2); +} + +ZipWriter::ZipWriter(io::ZeroCopyOutputStream* raw_output) + : raw_output_(raw_output) {} +ZipWriter::~ZipWriter() {} + +bool ZipWriter::Write(const string& filename, const string& contents) { + FileInfo info; + + info.name = filename; + uint16 filename_size = filename.size(); + info.offset = raw_output_->ByteCount(); + info.size = contents.size(); + info.crc32 = ComputeCRC32(contents); + + files_.push_back(info); + + // write file header + io::CodedOutputStream output(raw_output_); + output.WriteLittleEndian32(0x04034b50); // magic + WriteShort(&output, 10); // version needed to extract + WriteShort(&output, 0); // flags + WriteShort(&output, 0); // compression method: stored + WriteShort(&output, 0); // last modified time + WriteShort(&output, 0); // last modified date + output.WriteLittleEndian32(info.crc32); // crc-32 + output.WriteLittleEndian32(info.size); // compressed size + output.WriteLittleEndian32(info.size); // uncompressed size + WriteShort(&output, filename_size); // file name length + WriteShort(&output, 0); // extra field length + output.WriteString(filename); // file name + output.WriteString(contents); // file data + + return !output.HadError(); +} + +bool ZipWriter::WriteDirectory() { + uint16 num_entries = files_.size(); + uint32 dir_ofs = raw_output_->ByteCount(); + + // write central directory + io::CodedOutputStream output(raw_output_); + for (int i = 0; i < num_entries; ++i) { + const string &filename = files_[i].name; + uint16 filename_size = filename.size(); + uint32 crc32 = files_[i].crc32; + uint32 size = files_[i].size; + uint32 offset = files_[i].offset; + + output.WriteLittleEndian32(0x02014b50); // magic + WriteShort(&output, 10); // version made by + WriteShort(&output, 10); // version needed to extract + WriteShort(&output, 0); // flags + WriteShort(&output, 0); // compression method: stored + WriteShort(&output, 0); // last modified time + WriteShort(&output, 0); // last modified date + output.WriteLittleEndian32(crc32); // crc-32 + output.WriteLittleEndian32(size); // compressed size + output.WriteLittleEndian32(size); // uncompressed size + WriteShort(&output, filename_size); // file name length + WriteShort(&output, 0); // extra field length + WriteShort(&output, 0); // file comment length + WriteShort(&output, 0); // starting disk number + WriteShort(&output, 0); // internal file attributes + output.WriteLittleEndian32(0); // external file attributes + output.WriteLittleEndian32(offset); // local header offset + output.WriteString(filename); // file name + } + uint32 dir_len = output.ByteCount(); + + // write end of central directory marker + output.WriteLittleEndian32(0x06054b50); // magic + WriteShort(&output, 0); // disk number + WriteShort(&output, 0); // disk with start of central directory + WriteShort(&output, num_entries); // central directory entries (this disk) + WriteShort(&output, num_entries); // central directory entries (total) + output.WriteLittleEndian32(dir_len); // central directory byte size + output.WriteLittleEndian32(dir_ofs); // central directory offset + WriteShort(&output, 0); // comment length + + return output.HadError(); +} + +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/zip_writer.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/zip_writer.h new file mode 100644 index 0000000000..be73972a6d --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/compiler/zip_writer.h @@ -0,0 +1,93 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) + +#include +#include +#include + +namespace google { +namespace protobuf { +namespace compiler { + +class ZipWriter { + public: + ZipWriter(io::ZeroCopyOutputStream* raw_output); + ~ZipWriter(); + + bool Write(const string& filename, const string& contents); + bool WriteDirectory(); + + private: + struct FileInfo { + string name; + uint32 offset; + uint32 size; + uint32 crc32; + }; + + io::ZeroCopyOutputStream* raw_output_; + vector files_; +}; + +} // namespace compiler +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/descriptor.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/descriptor.cc new file mode 100644 index 0000000000..754b56283e --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/descriptor.cc @@ -0,0 +1,4497 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#undef PACKAGE // autoheader #defines this. :( + +namespace google { +namespace protobuf { + +const FieldDescriptor::CppType +FieldDescriptor::kTypeToCppTypeMap[MAX_TYPE + 1] = { + static_cast(0), // 0 is reserved for errors + + CPPTYPE_DOUBLE, // TYPE_DOUBLE + CPPTYPE_FLOAT, // TYPE_FLOAT + CPPTYPE_INT64, // TYPE_INT64 + CPPTYPE_UINT64, // TYPE_UINT64 + CPPTYPE_INT32, // TYPE_INT32 + CPPTYPE_UINT64, // TYPE_FIXED64 + CPPTYPE_UINT32, // TYPE_FIXED32 + CPPTYPE_BOOL, // TYPE_BOOL + CPPTYPE_STRING, // TYPE_STRING + CPPTYPE_MESSAGE, // TYPE_GROUP + CPPTYPE_MESSAGE, // TYPE_MESSAGE + CPPTYPE_STRING, // TYPE_BYTES + CPPTYPE_UINT32, // TYPE_UINT32 + CPPTYPE_ENUM, // TYPE_ENUM + CPPTYPE_INT32, // TYPE_SFIXED32 + CPPTYPE_INT64, // TYPE_SFIXED64 + CPPTYPE_INT32, // TYPE_SINT32 + CPPTYPE_INT64, // TYPE_SINT64 +}; + +const char * const FieldDescriptor::kTypeToName[MAX_TYPE + 1] = { + "ERROR", // 0 is reserved for errors + + "double", // TYPE_DOUBLE + "float", // TYPE_FLOAT + "int64", // TYPE_INT64 + "uint64", // TYPE_UINT64 + "int32", // TYPE_INT32 + "fixed64", // TYPE_FIXED64 + "fixed32", // TYPE_FIXED32 + "bool", // TYPE_BOOL + "string", // TYPE_STRING + "group", // TYPE_GROUP + "message", // TYPE_MESSAGE + "bytes", // TYPE_BYTES + "uint32", // TYPE_UINT32 + "enum", // TYPE_ENUM + "sfixed32", // TYPE_SFIXED32 + "sfixed64", // TYPE_SFIXED64 + "sint32", // TYPE_SINT32 + "sint64", // TYPE_SINT64 +}; + +const char * const FieldDescriptor::kLabelToName[MAX_LABEL + 1] = { + "ERROR", // 0 is reserved for errors + + "optional", // LABEL_OPTIONAL + "required", // LABEL_REQUIRED + "repeated", // LABEL_REPEATED +}; + +#ifndef _MSC_VER // MSVC doesn't need these and won't even accept them. +const int FieldDescriptor::kMaxNumber; +const int FieldDescriptor::kFirstReservedNumber; +const int FieldDescriptor::kLastReservedNumber; +#endif + +namespace { + +const string kEmptyString; + +string ToCamelCase(const string& input) { + bool capitalize_next = false; + string result; + result.reserve(input.size()); + + for (int i = 0; i < input.size(); i++) { + if (input[i] == '_') { + capitalize_next = true; + } else if (capitalize_next) { + // Note: I distrust ctype.h due to locales. + if ('a' <= input[i] && input[i] <= 'z') { + result.push_back(input[i] - 'a' + 'A'); + } else { + result.push_back(input[i]); + } + capitalize_next = false; + } else { + result.push_back(input[i]); + } + } + + // Lower-case the first letter. + if (!result.empty() && 'A' <= result[0] && result[0] <= 'Z') { + result[0] = result[0] - 'A' + 'a'; + } + + return result; +} + +// A DescriptorPool contains a bunch of hash_maps to implement the +// various Find*By*() methods. Since hashtable lookups are O(1), it's +// most efficient to construct a fixed set of large hash_maps used by +// all objects in the pool rather than construct one or more small +// hash_maps for each object. +// +// The keys to these hash_maps are (parent, name) or (parent, number) +// pairs. Unfortunately STL doesn't provide hash functions for pair<>, +// so we must invent our own. +// +// TODO(kenton): Use StringPiece rather than const char* in keys? It would +// be a lot cleaner but we'd just have to convert it back to const char* +// for the open source release. + +typedef pair PointerStringPair; + +struct PointerStringPairEqual { + inline bool operator()(const PointerStringPair& a, + const PointerStringPair& b) const { + return a.first == b.first && strcmp(a.second, b.second) == 0; + } +}; + +template +struct PointerIntegerPairHash { + size_t operator()(const PairType& p) const { + // FIXME(kenton): What is the best way to compute this hash? I have + // no idea! This seems a bit better than an XOR. + return reinterpret_cast(p.first) * ((1 << 16) - 1) + p.second; + } + + // Used only by MSVC and platforms where hash_map is not available. + static const size_t bucket_size = 4; + static const size_t min_buckets = 8; + inline bool operator()(const PairType& a, const PairType& b) const { + return a.first < b.first || + (a.first == b.first && a.second < b.second); + } +}; + +typedef pair DescriptorIntPair; +typedef pair EnumIntPair; + +struct PointerStringPairHash { + size_t operator()(const PointerStringPair& p) const { + // FIXME(kenton): What is the best way to compute this hash? I have + // no idea! This seems a bit better than an XOR. + hash cstring_hash; + return reinterpret_cast(p.first) * ((1 << 16) - 1) + + cstring_hash(p.second); + } + + // Used only by MSVC and platforms where hash_map is not available. + static const size_t bucket_size = 4; + static const size_t min_buckets = 8; + inline bool operator()(const PointerStringPair& a, + const PointerStringPair& b) const { + if (a.first < b.first) return true; + if (a.first > b.first) return false; + return strcmp(a.second, b.second) < 0; + } +}; + + +struct Symbol { + enum Type { + NULL_SYMBOL, MESSAGE, FIELD, ENUM, ENUM_VALUE, SERVICE, METHOD, PACKAGE + }; + Type type; + union { + const Descriptor* descriptor; + const FieldDescriptor* field_descriptor; + const EnumDescriptor* enum_descriptor; + const EnumValueDescriptor* enum_value_descriptor; + const ServiceDescriptor* service_descriptor; + const MethodDescriptor* method_descriptor; + const FileDescriptor* package_file_descriptor; + }; + + inline Symbol() : type(NULL_SYMBOL) { descriptor = NULL; } + inline bool IsNull() const { return type == NULL_SYMBOL; } + inline bool IsType() const { + return type == MESSAGE || type == ENUM; + } + inline bool IsAggregate() const { + return type == MESSAGE || type == PACKAGE + || type == ENUM || type == SERVICE; + } + +#define CONSTRUCTOR(TYPE, TYPE_CONSTANT, FIELD) \ + inline explicit Symbol(const TYPE* value) { \ + type = TYPE_CONSTANT; \ + this->FIELD = value; \ + } + + CONSTRUCTOR(Descriptor , MESSAGE , descriptor ) + CONSTRUCTOR(FieldDescriptor , FIELD , field_descriptor ) + CONSTRUCTOR(EnumDescriptor , ENUM , enum_descriptor ) + CONSTRUCTOR(EnumValueDescriptor, ENUM_VALUE, enum_value_descriptor ) + CONSTRUCTOR(ServiceDescriptor , SERVICE , service_descriptor ) + CONSTRUCTOR(MethodDescriptor , METHOD , method_descriptor ) + CONSTRUCTOR(FileDescriptor , PACKAGE , package_file_descriptor) +#undef CONSTRUCTOR + + const FileDescriptor* GetFile() const { + switch (type) { + case NULL_SYMBOL: return NULL; + case MESSAGE : return descriptor ->file(); + case FIELD : return field_descriptor ->file(); + case ENUM : return enum_descriptor ->file(); + case ENUM_VALUE : return enum_value_descriptor->type()->file(); + case SERVICE : return service_descriptor ->file(); + case METHOD : return method_descriptor ->service()->file(); + case PACKAGE : return package_file_descriptor; + } + return NULL; + } +}; + +const Symbol kNullSymbol; + +typedef hash_map, streq> + SymbolsByNameMap; +typedef hash_map + SymbolsByParentMap; +typedef hash_map, streq> + FilesByNameMap; +typedef hash_map + FieldsByNameMap; +typedef hash_map > + FieldsByNumberMap; +typedef hash_map > + EnumValuesByNumberMap; +// This is a map rather than a hash_map, since we use it to iterate +// through all the extensions that extend a given Descriptor, and an +// ordered data structure that implements lower_bound is convenient +// for that. +typedef map + ExtensionsGroupedByDescriptorMap; + +} // anonymous namespace + +// =================================================================== +// DescriptorPool::Tables + +class DescriptorPool::Tables { + public: + Tables(); + ~Tables(); + + // Checkpoint the state of the tables. Future calls to Rollback() will + // return the Tables to this state. This is used when building files, since + // some kinds of validation errors cannot be detected until the file's + // descriptors have already been added to the tables. BuildFile() calls + // Checkpoint() before it starts building and Rollback() if it encounters + // an error. + void Checkpoint(); + + // Roll back the Tables to the state of the last Checkpoint(), removing + // everything that was added after that point. + void Rollback(); + + // The stack of files which are currently being built. Used to detect + // cyclic dependencies when loading files from a DescriptorDatabase. Not + // used when fallback_database_ == NULL. + vector pending_files_; + + // A set of files which we have tried to load from the fallback database + // and encountered errors. We will not attempt to load them again. + // Not used when fallback_database_ == NULL. + hash_set known_bad_files_; + + // The set of descriptors for which we've already loaded the full + // set of extensions numbers from fallback_database_. + hash_set extensions_loaded_from_db_; + + // ----------------------------------------------------------------- + // Finding items. + + // Find symbols. This returns a null Symbol (symbol.IsNull() is true) + // if not found. + inline Symbol FindSymbol(const string& key) const; + + // This implements the body of DescriptorPool::Find*ByName(). It should + // really be a private method of DescriptorPool, but that would require + // declaring Symbol in descriptor.h, which would drag all kinds of other + // stuff into the header. Yay C++. + Symbol FindByNameHelper( + const DescriptorPool* pool, const string& name) const; + + // These return NULL if not found. + inline const FileDescriptor* FindFile(const string& key) const; + inline const FieldDescriptor* FindExtension(const Descriptor* extendee, + int number); + inline void FindAllExtensions(const Descriptor* extendee, + vector* out) const; + + // ----------------------------------------------------------------- + // Adding items. + + // These add items to the corresponding tables. They return false if + // the key already exists in the table. For AddSymbol(), the string passed + // in must be one that was constructed using AllocateString(), as it will + // be used as a key in the symbols_by_name_ map without copying. + bool AddSymbol(const string& full_name, Symbol symbol); + bool AddFile(const FileDescriptor* file); + bool AddExtension(const FieldDescriptor* field); + + // ----------------------------------------------------------------- + // Allocating memory. + + // Allocate an object which will be reclaimed when the pool is + // destroyed. Note that the object's destructor will never be called, + // so its fields must be plain old data (primitive data types and + // pointers). All of the descriptor types are such objects. + template Type* Allocate(); + + // Allocate an array of objects which will be reclaimed when the + // pool in destroyed. Again, destructors are never called. + template Type* AllocateArray(int count); + + // Allocate a string which will be destroyed when the pool is destroyed. + // The string is initialized to the given value for convenience. + string* AllocateString(const string& value); + + // Allocate a protocol message object. Some older versions of GCC have + // trouble understanding explicit template instantiations in some cases, so + // in those cases we have to pass a dummy pointer of the right type as the + // parameter instead of specifying the type explicitly. + template Type* AllocateMessage(Type* dummy = NULL); + + // Allocate a FileDescriptorTables object. + FileDescriptorTables* AllocateFileTables(); + + private: + vector strings_; // All strings in the pool. + vector messages_; // All messages in the pool. + vector file_tables_; // All file tables in the pool. + vector allocations_; // All other memory allocated in the pool. + + SymbolsByNameMap symbols_by_name_; + FilesByNameMap files_by_name_; + ExtensionsGroupedByDescriptorMap extensions_; + + int strings_before_checkpoint_; + int messages_before_checkpoint_; + int file_tables_before_checkpoint_; + int allocations_before_checkpoint_; + vector symbols_after_checkpoint_; + vector files_after_checkpoint_; + vector extensions_after_checkpoint_; + + // Allocate some bytes which will be reclaimed when the pool is + // destroyed. + void* AllocateBytes(int size); +}; + +// Contains tables specific to a particular file. These tables are not +// modified once the file has been constructed, so they need not be +// protected by a mutex. This makes operations that depend only on the +// contents of a single file -- e.g. Descriptor::FindFieldByName() -- +// lock-free. +// +// For historical reasons, the definitions of the methods of +// FileDescriptorTables and DescriptorPool::Tables are interleaved below. +// These used to be a single class. +class FileDescriptorTables { + public: + FileDescriptorTables(); + ~FileDescriptorTables(); + + // Empty table, used with placeholder files. + static const FileDescriptorTables kEmpty; + + // ----------------------------------------------------------------- + // Finding items. + + // Find symbols. These return a null Symbol (symbol.IsNull() is true) + // if not found. + inline Symbol FindNestedSymbol(const void* parent, + const string& name) const; + inline Symbol FindNestedSymbolOfType(const void* parent, + const string& name, + const Symbol::Type type) const; + + // These return NULL if not found. + inline const FieldDescriptor* FindFieldByNumber( + const Descriptor* parent, int number) const; + inline const FieldDescriptor* FindFieldByLowercaseName( + const void* parent, const string& lowercase_name) const; + inline const FieldDescriptor* FindFieldByCamelcaseName( + const void* parent, const string& camelcase_name) const; + inline const EnumValueDescriptor* FindEnumValueByNumber( + const EnumDescriptor* parent, int number) const; + + // ----------------------------------------------------------------- + // Adding items. + + // These add items to the corresponding tables. They return false if + // the key already exists in the table. For AddAliasUnderParent(), the + // string passed in must be one that was constructed using AllocateString(), + // as it will be used as a key in the symbols_by_parent_ map without copying. + bool AddAliasUnderParent(const void* parent, const string& name, + Symbol symbol); + bool AddFieldByNumber(const FieldDescriptor* field); + bool AddEnumValueByNumber(const EnumValueDescriptor* value); + + // Adds the field to the lowercase_name and camelcase_name maps. Never + // fails because we allow duplicates; the first field by the name wins. + void AddFieldByStylizedNames(const FieldDescriptor* field); + + private: + SymbolsByParentMap symbols_by_parent_; + FieldsByNameMap fields_by_lowercase_name_; + FieldsByNameMap fields_by_camelcase_name_; + FieldsByNumberMap fields_by_number_; // Not including extensions. + EnumValuesByNumberMap enum_values_by_number_; +}; + +DescriptorPool::Tables::Tables() + : strings_before_checkpoint_(0), + messages_before_checkpoint_(0), + allocations_before_checkpoint_(0) {} + +DescriptorPool::Tables::~Tables() { + // Note that the deletion order is important, since the destructors of some + // messages may refer to objects in allocations_. + STLDeleteElements(&messages_); + for (int i = 0; i < allocations_.size(); i++) { + operator delete(allocations_[i]); + } + STLDeleteElements(&strings_); + STLDeleteElements(&file_tables_); +} + +FileDescriptorTables::FileDescriptorTables() {} +FileDescriptorTables::~FileDescriptorTables() {} + +const FileDescriptorTables FileDescriptorTables::kEmpty; + +void DescriptorPool::Tables::Checkpoint() { + strings_before_checkpoint_ = strings_.size(); + messages_before_checkpoint_ = messages_.size(); + file_tables_before_checkpoint_ = file_tables_.size(); + allocations_before_checkpoint_ = allocations_.size(); + + symbols_after_checkpoint_.clear(); + files_after_checkpoint_.clear(); + extensions_after_checkpoint_.clear(); +} + +void DescriptorPool::Tables::Rollback() { + for (int i = 0; i < symbols_after_checkpoint_.size(); i++) { + symbols_by_name_.erase(symbols_after_checkpoint_[i]); + } + for (int i = 0; i < files_after_checkpoint_.size(); i++) { + files_by_name_.erase(files_after_checkpoint_[i]); + } + for (int i = 0; i < extensions_after_checkpoint_.size(); i++) { + extensions_.erase(extensions_after_checkpoint_[i]); + } + + symbols_after_checkpoint_.clear(); + files_after_checkpoint_.clear(); + extensions_after_checkpoint_.clear(); + + STLDeleteContainerPointers( + strings_.begin() + strings_before_checkpoint_, strings_.end()); + STLDeleteContainerPointers( + messages_.begin() + messages_before_checkpoint_, messages_.end()); + STLDeleteContainerPointers( + file_tables_.begin() + file_tables_before_checkpoint_, file_tables_.end()); + for (int i = allocations_before_checkpoint_; i < allocations_.size(); i++) { + operator delete(allocations_[i]); + } + + strings_.resize(strings_before_checkpoint_); + messages_.resize(messages_before_checkpoint_); + file_tables_.resize(file_tables_before_checkpoint_); + allocations_.resize(allocations_before_checkpoint_); +} + +// ------------------------------------------------------------------- + +inline Symbol DescriptorPool::Tables::FindSymbol(const string& key) const { + const Symbol* result = FindOrNull(symbols_by_name_, key.c_str()); + if (result == NULL) { + return kNullSymbol; + } else { + return *result; + } +} + +inline Symbol FileDescriptorTables::FindNestedSymbol( + const void* parent, const string& name) const { + const Symbol* result = + FindOrNull(symbols_by_parent_, PointerStringPair(parent, name.c_str())); + if (result == NULL) { + return kNullSymbol; + } else { + return *result; + } +} + +inline Symbol FileDescriptorTables::FindNestedSymbolOfType( + const void* parent, const string& name, const Symbol::Type type) const { + Symbol result = FindNestedSymbol(parent, name); + if (result.type != type) return kNullSymbol; + return result; +} + +Symbol DescriptorPool::Tables::FindByNameHelper( + const DescriptorPool* pool, const string& name) const { + MutexLockMaybe lock(pool->mutex_); + Symbol result = FindSymbol(name); + + if (result.IsNull() && pool->underlay_ != NULL) { + // Symbol not found; check the underlay. + result = + pool->underlay_->tables_->FindByNameHelper(pool->underlay_, name); + } + + if (result.IsNull()) { + // Symbol still not found, so check fallback database. + if (pool->TryFindSymbolInFallbackDatabase(name)) { + result = FindSymbol(name); + } + } + + return result; +} + +inline const FileDescriptor* DescriptorPool::Tables::FindFile( + const string& key) const { + return FindPtrOrNull(files_by_name_, key.c_str()); +} + +inline const FieldDescriptor* FileDescriptorTables::FindFieldByNumber( + const Descriptor* parent, int number) const { + return FindPtrOrNull(fields_by_number_, make_pair(parent, number)); +} + +inline const FieldDescriptor* FileDescriptorTables::FindFieldByLowercaseName( + const void* parent, const string& lowercase_name) const { + return FindPtrOrNull(fields_by_lowercase_name_, + PointerStringPair(parent, lowercase_name.c_str())); +} + +inline const FieldDescriptor* FileDescriptorTables::FindFieldByCamelcaseName( + const void* parent, const string& camelcase_name) const { + return FindPtrOrNull(fields_by_camelcase_name_, + PointerStringPair(parent, camelcase_name.c_str())); +} + +inline const EnumValueDescriptor* FileDescriptorTables::FindEnumValueByNumber( + const EnumDescriptor* parent, int number) const { + return FindPtrOrNull(enum_values_by_number_, make_pair(parent, number)); +} + +inline const FieldDescriptor* DescriptorPool::Tables::FindExtension( + const Descriptor* extendee, int number) { + return FindPtrOrNull(extensions_, make_pair(extendee, number)); +} + +inline void DescriptorPool::Tables::FindAllExtensions( + const Descriptor* extendee, vector* out) const { + ExtensionsGroupedByDescriptorMap::const_iterator it = + extensions_.lower_bound(make_pair(extendee, 0)); + for (; it != extensions_.end() && it->first.first == extendee; ++it) { + out->push_back(it->second); + } +} + +// ------------------------------------------------------------------- + +bool DescriptorPool::Tables::AddSymbol( + const string& full_name, Symbol symbol) { + if (InsertIfNotPresent(&symbols_by_name_, full_name.c_str(), symbol)) { + symbols_after_checkpoint_.push_back(full_name.c_str()); + return true; + } else { + return false; + } +} + +bool FileDescriptorTables::AddAliasUnderParent( + const void* parent, const string& name, Symbol symbol) { + PointerStringPair by_parent_key(parent, name.c_str()); + return InsertIfNotPresent(&symbols_by_parent_, by_parent_key, symbol); +} + +bool DescriptorPool::Tables::AddFile(const FileDescriptor* file) { + if (InsertIfNotPresent(&files_by_name_, file->name().c_str(), file)) { + files_after_checkpoint_.push_back(file->name().c_str()); + return true; + } else { + return false; + } +} + +void FileDescriptorTables::AddFieldByStylizedNames( + const FieldDescriptor* field) { + const void* parent; + if (field->is_extension()) { + if (field->extension_scope() == NULL) { + parent = field->file(); + } else { + parent = field->extension_scope(); + } + } else { + parent = field->containing_type(); + } + + PointerStringPair lowercase_key(parent, field->lowercase_name().c_str()); + InsertIfNotPresent(&fields_by_lowercase_name_, lowercase_key, field); + + PointerStringPair camelcase_key(parent, field->camelcase_name().c_str()); + InsertIfNotPresent(&fields_by_camelcase_name_, camelcase_key, field); +} + +bool FileDescriptorTables::AddFieldByNumber(const FieldDescriptor* field) { + DescriptorIntPair key(field->containing_type(), field->number()); + return InsertIfNotPresent(&fields_by_number_, key, field); +} + +bool FileDescriptorTables::AddEnumValueByNumber( + const EnumValueDescriptor* value) { + EnumIntPair key(value->type(), value->number()); + return InsertIfNotPresent(&enum_values_by_number_, key, value); +} + +bool DescriptorPool::Tables::AddExtension(const FieldDescriptor* field) { + DescriptorIntPair key(field->containing_type(), field->number()); + if (InsertIfNotPresent(&extensions_, key, field)) { + extensions_after_checkpoint_.push_back(key); + return true; + } else { + return false; + } +} + +// ------------------------------------------------------------------- + +template +Type* DescriptorPool::Tables::Allocate() { + return reinterpret_cast(AllocateBytes(sizeof(Type))); +} + +template +Type* DescriptorPool::Tables::AllocateArray(int count) { + return reinterpret_cast(AllocateBytes(sizeof(Type) * count)); +} + +string* DescriptorPool::Tables::AllocateString(const string& value) { + string* result = new string(value); + strings_.push_back(result); + return result; +} + +template +Type* DescriptorPool::Tables::AllocateMessage(Type* dummy) { + Type* result = new Type; + messages_.push_back(result); + return result; +} + +FileDescriptorTables* DescriptorPool::Tables::AllocateFileTables() { + FileDescriptorTables* result = new FileDescriptorTables; + file_tables_.push_back(result); + return result; +} + +void* DescriptorPool::Tables::AllocateBytes(int size) { + // TODO(kenton): Would it be worthwhile to implement this in some more + // sophisticated way? Probably not for the open source release, but for + // internal use we could easily plug in one of our existing memory pool + // allocators... + if (size == 0) return NULL; + + void* result = operator new(size); + allocations_.push_back(result); + return result; +} + +// =================================================================== +// DescriptorPool + +DescriptorPool::ErrorCollector::~ErrorCollector() {} + +DescriptorPool::DescriptorPool() + : mutex_(NULL), + fallback_database_(NULL), + default_error_collector_(NULL), + underlay_(NULL), + tables_(new Tables), + enforce_dependencies_(true), + allow_unknown_(false) {} + +DescriptorPool::DescriptorPool(DescriptorDatabase* fallback_database, + ErrorCollector* error_collector) + : mutex_(new Mutex), + fallback_database_(fallback_database), + default_error_collector_(error_collector), + underlay_(NULL), + tables_(new Tables), + enforce_dependencies_(true), + allow_unknown_(false) { +} + +DescriptorPool::DescriptorPool(const DescriptorPool* underlay) + : mutex_(NULL), + fallback_database_(NULL), + default_error_collector_(NULL), + underlay_(underlay), + tables_(new Tables), + enforce_dependencies_(true), + allow_unknown_(false) {} + +DescriptorPool::~DescriptorPool() { + if (mutex_ != NULL) delete mutex_; +} + +// DescriptorPool::BuildFile() defined later. +// DescriptorPool::BuildFileCollectingErrors() defined later. + +void DescriptorPool::InternalDontEnforceDependencies() { + enforce_dependencies_ = false; +} + +bool DescriptorPool::InternalIsFileLoaded(const string& filename) const { + MutexLockMaybe lock(mutex_); + return tables_->FindFile(filename) != NULL; +} + +// generated_pool ==================================================== + +namespace { + + +EncodedDescriptorDatabase* generated_database_ = NULL; +DescriptorPool* generated_pool_ = NULL; +GOOGLE_PROTOBUF_DECLARE_ONCE(generated_pool_init_); + +void DeleteGeneratedPool() { + delete generated_database_; + generated_database_ = NULL; + delete generated_pool_; + generated_pool_ = NULL; +} + +void InitGeneratedPool() { + generated_database_ = new EncodedDescriptorDatabase; + generated_pool_ = new DescriptorPool(generated_database_); + + internal::OnShutdown(&DeleteGeneratedPool); +} + +inline void InitGeneratedPoolOnce() { + ::google::protobuf::GoogleOnceInit(&generated_pool_init_, &InitGeneratedPool); +} + +} // anonymous namespace + +const DescriptorPool* DescriptorPool::generated_pool() { + InitGeneratedPoolOnce(); + return generated_pool_; +} + +DescriptorPool* DescriptorPool::internal_generated_pool() { + InitGeneratedPoolOnce(); + return generated_pool_; +} + +void DescriptorPool::InternalAddGeneratedFile( + const void* encoded_file_descriptor, int size) { + // So, this function is called in the process of initializing the + // descriptors for generated proto classes. Each generated .pb.cc file + // has an internal procedure called AddDescriptors() which is called at + // process startup, and that function calls this one in order to register + // the raw bytes of the FileDescriptorProto representing the file. + // + // We do not actually construct the descriptor objects right away. We just + // hang on to the bytes until they are actually needed. We actually construct + // the descriptor the first time one of the following things happens: + // * Someone calls a method like descriptor(), GetDescriptor(), or + // GetReflection() on the generated types, which requires returning the + // descriptor or an object based on it. + // * Someone looks up the descriptor in DescriptorPool::generated_pool(). + // + // Once one of these happens, the DescriptorPool actually parses the + // FileDescriptorProto and generates a FileDescriptor (and all its children) + // based on it. + // + // Note that FileDescriptorProto is itself a generated protocol message. + // Therefore, when we parse one, we have to be very careful to avoid using + // any descriptor-based operations, since this might cause infinite recursion + // or deadlock. + InitGeneratedPoolOnce(); + GOOGLE_CHECK(generated_database_->Add(encoded_file_descriptor, size)); +} + + +// Find*By* methods ================================================== + +// TODO(kenton): There's a lot of repeated code here, but I'm not sure if +// there's any good way to factor it out. Think about this some time when +// there's nothing more important to do (read: never). + +const FileDescriptor* DescriptorPool::FindFileByName(const string& name) const { + MutexLockMaybe lock(mutex_); + const FileDescriptor* result = tables_->FindFile(name); + if (result != NULL) return result; + if (underlay_ != NULL) { + const FileDescriptor* result = underlay_->FindFileByName(name); + if (result != NULL) return result; + } + if (TryFindFileInFallbackDatabase(name)) { + const FileDescriptor* result = tables_->FindFile(name); + if (result != NULL) return result; + } + return NULL; +} + +const FileDescriptor* DescriptorPool::FindFileContainingSymbol( + const string& symbol_name) const { + MutexLockMaybe lock(mutex_); + Symbol result = tables_->FindSymbol(symbol_name); + if (!result.IsNull()) return result.GetFile(); + if (underlay_ != NULL) { + const FileDescriptor* result = + underlay_->FindFileContainingSymbol(symbol_name); + if (result != NULL) return result; + } + if (TryFindSymbolInFallbackDatabase(symbol_name)) { + Symbol result = tables_->FindSymbol(symbol_name); + if (!result.IsNull()) return result.GetFile(); + } + return NULL; +} + +const Descriptor* DescriptorPool::FindMessageTypeByName( + const string& name) const { + Symbol result = tables_->FindByNameHelper(this, name); + return (result.type == Symbol::MESSAGE) ? result.descriptor : NULL; +} + +const FieldDescriptor* DescriptorPool::FindFieldByName( + const string& name) const { + Symbol result = tables_->FindByNameHelper(this, name); + if (result.type == Symbol::FIELD && + !result.field_descriptor->is_extension()) { + return result.field_descriptor; + } else { + return NULL; + } +} + +const FieldDescriptor* DescriptorPool::FindExtensionByName( + const string& name) const { + Symbol result = tables_->FindByNameHelper(this, name); + if (result.type == Symbol::FIELD && + result.field_descriptor->is_extension()) { + return result.field_descriptor; + } else { + return NULL; + } +} + +const EnumDescriptor* DescriptorPool::FindEnumTypeByName( + const string& name) const { + Symbol result = tables_->FindByNameHelper(this, name); + return (result.type == Symbol::ENUM) ? result.enum_descriptor : NULL; +} + +const EnumValueDescriptor* DescriptorPool::FindEnumValueByName( + const string& name) const { + Symbol result = tables_->FindByNameHelper(this, name); + return (result.type == Symbol::ENUM_VALUE) ? + result.enum_value_descriptor : NULL; +} + +const ServiceDescriptor* DescriptorPool::FindServiceByName( + const string& name) const { + Symbol result = tables_->FindByNameHelper(this, name); + return (result.type == Symbol::SERVICE) ? result.service_descriptor : NULL; +} + +const MethodDescriptor* DescriptorPool::FindMethodByName( + const string& name) const { + Symbol result = tables_->FindByNameHelper(this, name); + return (result.type == Symbol::METHOD) ? result.method_descriptor : NULL; +} + +const FieldDescriptor* DescriptorPool::FindExtensionByNumber( + const Descriptor* extendee, int number) const { + MutexLockMaybe lock(mutex_); + const FieldDescriptor* result = tables_->FindExtension(extendee, number); + if (result != NULL) { + return result; + } + if (underlay_ != NULL) { + const FieldDescriptor* result = + underlay_->FindExtensionByNumber(extendee, number); + if (result != NULL) return result; + } + if (TryFindExtensionInFallbackDatabase(extendee, number)) { + const FieldDescriptor* result = tables_->FindExtension(extendee, number); + if (result != NULL) { + return result; + } + } + return NULL; +} + +void DescriptorPool::FindAllExtensions( + const Descriptor* extendee, vector* out) const { + MutexLockMaybe lock(mutex_); + + // Initialize tables_->extensions_ from the fallback database first + // (but do this only once per descriptor). + if (fallback_database_ != NULL && + tables_->extensions_loaded_from_db_.count(extendee) == 0) { + vector numbers; + if (fallback_database_->FindAllExtensionNumbers(extendee->full_name(), + &numbers)) { + for (int i = 0; i < numbers.size(); ++i) { + int number = numbers[i]; + if (tables_->FindExtension(extendee, number) == NULL) { + TryFindExtensionInFallbackDatabase(extendee, number); + } + } + tables_->extensions_loaded_from_db_.insert(extendee); + } + } + + tables_->FindAllExtensions(extendee, out); + if (underlay_ != NULL) { + underlay_->FindAllExtensions(extendee, out); + } +} + +// ------------------------------------------------------------------- + +const FieldDescriptor* +Descriptor::FindFieldByNumber(int key) const { + const FieldDescriptor* result = + file()->tables_->FindFieldByNumber(this, key); + if (result == NULL || result->is_extension()) { + return NULL; + } else { + return result; + } +} + +const FieldDescriptor* +Descriptor::FindFieldByLowercaseName(const string& key) const { + const FieldDescriptor* result = + file()->tables_->FindFieldByLowercaseName(this, key); + if (result == NULL || result->is_extension()) { + return NULL; + } else { + return result; + } +} + +const FieldDescriptor* +Descriptor::FindFieldByCamelcaseName(const string& key) const { + const FieldDescriptor* result = + file()->tables_->FindFieldByCamelcaseName(this, key); + if (result == NULL || result->is_extension()) { + return NULL; + } else { + return result; + } +} + +const FieldDescriptor* +Descriptor::FindFieldByName(const string& key) const { + Symbol result = + file()->tables_->FindNestedSymbolOfType(this, key, Symbol::FIELD); + if (!result.IsNull() && !result.field_descriptor->is_extension()) { + return result.field_descriptor; + } else { + return NULL; + } +} + +const FieldDescriptor* +Descriptor::FindExtensionByName(const string& key) const { + Symbol result = + file()->tables_->FindNestedSymbolOfType(this, key, Symbol::FIELD); + if (!result.IsNull() && result.field_descriptor->is_extension()) { + return result.field_descriptor; + } else { + return NULL; + } +} + +const FieldDescriptor* +Descriptor::FindExtensionByLowercaseName(const string& key) const { + const FieldDescriptor* result = + file()->tables_->FindFieldByLowercaseName(this, key); + if (result == NULL || !result->is_extension()) { + return NULL; + } else { + return result; + } +} + +const FieldDescriptor* +Descriptor::FindExtensionByCamelcaseName(const string& key) const { + const FieldDescriptor* result = + file()->tables_->FindFieldByCamelcaseName(this, key); + if (result == NULL || !result->is_extension()) { + return NULL; + } else { + return result; + } +} + +const Descriptor* +Descriptor::FindNestedTypeByName(const string& key) const { + Symbol result = + file()->tables_->FindNestedSymbolOfType(this, key, Symbol::MESSAGE); + if (!result.IsNull()) { + return result.descriptor; + } else { + return NULL; + } +} + +const EnumDescriptor* +Descriptor::FindEnumTypeByName(const string& key) const { + Symbol result = + file()->tables_->FindNestedSymbolOfType(this, key, Symbol::ENUM); + if (!result.IsNull()) { + return result.enum_descriptor; + } else { + return NULL; + } +} + +const EnumValueDescriptor* +Descriptor::FindEnumValueByName(const string& key) const { + Symbol result = + file()->tables_->FindNestedSymbolOfType(this, key, Symbol::ENUM_VALUE); + if (!result.IsNull()) { + return result.enum_value_descriptor; + } else { + return NULL; + } +} + +const EnumValueDescriptor* +EnumDescriptor::FindValueByName(const string& key) const { + Symbol result = + file()->tables_->FindNestedSymbolOfType(this, key, Symbol::ENUM_VALUE); + if (!result.IsNull()) { + return result.enum_value_descriptor; + } else { + return NULL; + } +} + +const EnumValueDescriptor* +EnumDescriptor::FindValueByNumber(int key) const { + return file()->tables_->FindEnumValueByNumber(this, key); +} + +const MethodDescriptor* +ServiceDescriptor::FindMethodByName(const string& key) const { + Symbol result = + file()->tables_->FindNestedSymbolOfType(this, key, Symbol::METHOD); + if (!result.IsNull()) { + return result.method_descriptor; + } else { + return NULL; + } +} + +const Descriptor* +FileDescriptor::FindMessageTypeByName(const string& key) const { + Symbol result = tables_->FindNestedSymbolOfType(this, key, Symbol::MESSAGE); + if (!result.IsNull()) { + return result.descriptor; + } else { + return NULL; + } +} + +const EnumDescriptor* +FileDescriptor::FindEnumTypeByName(const string& key) const { + Symbol result = tables_->FindNestedSymbolOfType(this, key, Symbol::ENUM); + if (!result.IsNull()) { + return result.enum_descriptor; + } else { + return NULL; + } +} + +const EnumValueDescriptor* +FileDescriptor::FindEnumValueByName(const string& key) const { + Symbol result = + tables_->FindNestedSymbolOfType(this, key, Symbol::ENUM_VALUE); + if (!result.IsNull()) { + return result.enum_value_descriptor; + } else { + return NULL; + } +} + +const ServiceDescriptor* +FileDescriptor::FindServiceByName(const string& key) const { + Symbol result = tables_->FindNestedSymbolOfType(this, key, Symbol::SERVICE); + if (!result.IsNull()) { + return result.service_descriptor; + } else { + return NULL; + } +} + +const FieldDescriptor* +FileDescriptor::FindExtensionByName(const string& key) const { + Symbol result = tables_->FindNestedSymbolOfType(this, key, Symbol::FIELD); + if (!result.IsNull() && result.field_descriptor->is_extension()) { + return result.field_descriptor; + } else { + return NULL; + } +} + +const FieldDescriptor* +FileDescriptor::FindExtensionByLowercaseName(const string& key) const { + const FieldDescriptor* result = tables_->FindFieldByLowercaseName(this, key); + if (result == NULL || !result->is_extension()) { + return NULL; + } else { + return result; + } +} + +const FieldDescriptor* +FileDescriptor::FindExtensionByCamelcaseName(const string& key) const { + const FieldDescriptor* result = tables_->FindFieldByCamelcaseName(this, key); + if (result == NULL || !result->is_extension()) { + return NULL; + } else { + return result; + } +} + +bool Descriptor::IsExtensionNumber(int number) const { + // Linear search should be fine because we don't expect a message to have + // more than a couple extension ranges. + for (int i = 0; i < extension_range_count(); i++) { + if (number >= extension_range(i)->start && + number < extension_range(i)->end) { + return true; + } + } + return false; +} + +// ------------------------------------------------------------------- + +bool DescriptorPool::TryFindFileInFallbackDatabase(const string& name) const { + if (fallback_database_ == NULL) return false; + + if (tables_->known_bad_files_.count(name) > 0) return false; + + FileDescriptorProto file_proto; + if (!fallback_database_->FindFileByName(name, &file_proto) || + BuildFileFromDatabase(file_proto) == NULL) { + tables_->known_bad_files_.insert(name); + return false; + } + + return true; +} + +bool DescriptorPool::TryFindSymbolInFallbackDatabase(const string& name) const { + if (fallback_database_ == NULL) return false; + + FileDescriptorProto file_proto; + if (!fallback_database_->FindFileContainingSymbol(name, &file_proto)) { + return false; + } + + if (tables_->FindFile(file_proto.name()) != NULL) { + // We've already loaded this file, and it apparently doesn't contain the + // symbol we're looking for. Some DescriptorDatabases return false + // positives. + return false; + } + + if (BuildFileFromDatabase(file_proto) == NULL) { + return false; + } + + return true; +} + +bool DescriptorPool::TryFindExtensionInFallbackDatabase( + const Descriptor* containing_type, int field_number) const { + if (fallback_database_ == NULL) return false; + + FileDescriptorProto file_proto; + if (!fallback_database_->FindFileContainingExtension( + containing_type->full_name(), field_number, &file_proto)) { + return false; + } + + if (tables_->FindFile(file_proto.name()) != NULL) { + // We've already loaded this file, and it apparently doesn't contain the + // extension we're looking for. Some DescriptorDatabases return false + // positives. + return false; + } + + if (BuildFileFromDatabase(file_proto) == NULL) { + return false; + } + + return true; +} + +// =================================================================== + +string FieldDescriptor::DefaultValueAsString(bool quote_string_type) const { + GOOGLE_CHECK(has_default_value()) << "No default value"; + switch (cpp_type()) { + case CPPTYPE_INT32: + return SimpleItoa(default_value_int32()); + break; + case CPPTYPE_INT64: + return SimpleItoa(default_value_int64()); + break; + case CPPTYPE_UINT32: + return SimpleItoa(default_value_uint32()); + break; + case CPPTYPE_UINT64: + return SimpleItoa(default_value_uint64()); + break; + case CPPTYPE_FLOAT: + return SimpleFtoa(default_value_float()); + break; + case CPPTYPE_DOUBLE: + return SimpleDtoa(default_value_double()); + break; + case CPPTYPE_BOOL: + return default_value_bool() ? "true" : "false"; + break; + case CPPTYPE_STRING: + if (quote_string_type) { + return "\"" + CEscape(default_value_string()) + "\""; + } else { + if (type() == TYPE_BYTES) { + return CEscape(default_value_string()); + } else { + return default_value_string(); + } + } + break; + case CPPTYPE_ENUM: + return default_value_enum()->name(); + break; + case CPPTYPE_MESSAGE: + GOOGLE_LOG(DFATAL) << "Messages can't have default values!"; + break; + } + GOOGLE_LOG(FATAL) << "Can't get here: failed to get default value as string"; + return ""; +} + +// CopyTo methods ==================================================== + +void FileDescriptor::CopyTo(FileDescriptorProto* proto) const { + proto->set_name(name()); + if (!package().empty()) proto->set_package(package()); + + for (int i = 0; i < dependency_count(); i++) { + proto->add_dependency(dependency(i)->name()); + } + + for (int i = 0; i < message_type_count(); i++) { + message_type(i)->CopyTo(proto->add_message_type()); + } + for (int i = 0; i < enum_type_count(); i++) { + enum_type(i)->CopyTo(proto->add_enum_type()); + } + for (int i = 0; i < service_count(); i++) { + service(i)->CopyTo(proto->add_service()); + } + for (int i = 0; i < extension_count(); i++) { + extension(i)->CopyTo(proto->add_extension()); + } + + if (&options() != &FileOptions::default_instance()) { + proto->mutable_options()->CopyFrom(options()); + } +} + +void Descriptor::CopyTo(DescriptorProto* proto) const { + proto->set_name(name()); + + for (int i = 0; i < field_count(); i++) { + field(i)->CopyTo(proto->add_field()); + } + for (int i = 0; i < nested_type_count(); i++) { + nested_type(i)->CopyTo(proto->add_nested_type()); + } + for (int i = 0; i < enum_type_count(); i++) { + enum_type(i)->CopyTo(proto->add_enum_type()); + } + for (int i = 0; i < extension_range_count(); i++) { + DescriptorProto::ExtensionRange* range = proto->add_extension_range(); + range->set_start(extension_range(i)->start); + range->set_end(extension_range(i)->end); + } + for (int i = 0; i < extension_count(); i++) { + extension(i)->CopyTo(proto->add_extension()); + } + + if (&options() != &MessageOptions::default_instance()) { + proto->mutable_options()->CopyFrom(options()); + } +} + +void FieldDescriptor::CopyTo(FieldDescriptorProto* proto) const { + proto->set_name(name()); + proto->set_number(number()); + + // Some compilers do not allow static_cast directly between two enum types, + // so we must cast to int first. + proto->set_label(static_cast( + implicit_cast(label()))); + proto->set_type(static_cast( + implicit_cast(type()))); + + if (is_extension()) { + if (!containing_type()->is_unqualified_placeholder_) { + proto->set_extendee("."); + } + proto->mutable_extendee()->append(containing_type()->full_name()); + } + + if (cpp_type() == CPPTYPE_MESSAGE) { + if (message_type()->is_placeholder_) { + // We don't actually know if the type is a message type. It could be + // an enum. + proto->clear_type(); + } + + if (!message_type()->is_unqualified_placeholder_) { + proto->set_type_name("."); + } + proto->mutable_type_name()->append(message_type()->full_name()); + } else if (cpp_type() == CPPTYPE_ENUM) { + if (!enum_type()->is_unqualified_placeholder_) { + proto->set_type_name("."); + } + proto->mutable_type_name()->append(enum_type()->full_name()); + } + + if (has_default_value()) { + proto->set_default_value(DefaultValueAsString(false)); + } + + if (&options() != &FieldOptions::default_instance()) { + proto->mutable_options()->CopyFrom(options()); + } +} + +void EnumDescriptor::CopyTo(EnumDescriptorProto* proto) const { + proto->set_name(name()); + + for (int i = 0; i < value_count(); i++) { + value(i)->CopyTo(proto->add_value()); + } + + if (&options() != &EnumOptions::default_instance()) { + proto->mutable_options()->CopyFrom(options()); + } +} + +void EnumValueDescriptor::CopyTo(EnumValueDescriptorProto* proto) const { + proto->set_name(name()); + proto->set_number(number()); + + if (&options() != &EnumValueOptions::default_instance()) { + proto->mutable_options()->CopyFrom(options()); + } +} + +void ServiceDescriptor::CopyTo(ServiceDescriptorProto* proto) const { + proto->set_name(name()); + + for (int i = 0; i < method_count(); i++) { + method(i)->CopyTo(proto->add_method()); + } + + if (&options() != &ServiceOptions::default_instance()) { + proto->mutable_options()->CopyFrom(options()); + } +} + +void MethodDescriptor::CopyTo(MethodDescriptorProto* proto) const { + proto->set_name(name()); + + if (!input_type()->is_unqualified_placeholder_) { + proto->set_input_type("."); + } + proto->mutable_input_type()->append(input_type()->full_name()); + + if (!output_type()->is_unqualified_placeholder_) { + proto->set_output_type("."); + } + proto->mutable_output_type()->append(output_type()->full_name()); + + if (&options() != &MethodOptions::default_instance()) { + proto->mutable_options()->CopyFrom(options()); + } +} + +// DebugString methods =============================================== + +namespace { + +// Used by each of the option formatters. +bool RetrieveOptions(const Message &options, vector *option_entries) { + option_entries->clear(); + const Reflection* reflection = options.GetReflection(); + vector fields; + reflection->ListFields(options, &fields); + for (int i = 0; i < fields.size(); i++) { + // Doesn't make sense to have message type fields here + if (fields[i]->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) { + continue; + } + int count = 1; + bool repeated = false; + if (fields[i]->is_repeated()) { + count = reflection->FieldSize(options, fields[i]); + repeated = true; + } + for (int j = 0; j < count; j++) { + string fieldval; + TextFormat::PrintFieldValueToString(options, fields[i], + repeated ? count : -1, &fieldval); + option_entries->push_back(fields[i]->name() + " = " + fieldval); + } + } + return !option_entries->empty(); +} + +// Formats options that all appear together in brackets. Does not include +// brackets. +bool FormatBracketedOptions(const Message &options, string *output) { + vector all_options; + if (RetrieveOptions(options, &all_options)) { + output->append(JoinStrings(all_options, ", ")); + } + return !all_options.empty(); +} + +// Formats options one per line +bool FormatLineOptions(int depth, const Message &options, string *output) { + string prefix(depth * 2, ' '); + vector all_options; + if (RetrieveOptions(options, &all_options)) { + for (int i = 0; i < all_options.size(); i++) { + strings::SubstituteAndAppend(output, "$0option $1;\n", + prefix, all_options[i]); + } + } + return !all_options.empty(); +} + +} // anonymous namespace + +string FileDescriptor::DebugString() const { + string contents = "syntax = \"proto2\";\n\n"; + + for (int i = 0; i < dependency_count(); i++) { + strings::SubstituteAndAppend(&contents, "import \"$0\";\n", + dependency(i)->name()); + } + + if (!package().empty()) { + strings::SubstituteAndAppend(&contents, "package $0;\n\n", package()); + } + + if (FormatLineOptions(0, options(), &contents)) { + contents.append("\n"); // add some space if we had options + } + + for (int i = 0; i < enum_type_count(); i++) { + enum_type(i)->DebugString(0, &contents); + contents.append("\n"); + } + + // Find all the 'group' type extensions; we will not output their nested + // definitions (those will be done with their group field descriptor). + set groups; + for (int i = 0; i < extension_count(); i++) { + if (extension(i)->type() == FieldDescriptor::TYPE_GROUP) { + groups.insert(extension(i)->message_type()); + } + } + + for (int i = 0; i < message_type_count(); i++) { + if (groups.count(message_type(i)) == 0) { + strings::SubstituteAndAppend(&contents, "message $0", + message_type(i)->name()); + message_type(i)->DebugString(0, &contents); + contents.append("\n"); + } + } + + for (int i = 0; i < service_count(); i++) { + service(i)->DebugString(&contents); + contents.append("\n"); + } + + const Descriptor* containing_type = NULL; + for (int i = 0; i < extension_count(); i++) { + if (extension(i)->containing_type() != containing_type) { + if (i > 0) contents.append("}\n\n"); + containing_type = extension(i)->containing_type(); + strings::SubstituteAndAppend(&contents, "extend .$0 {\n", + containing_type->full_name()); + } + extension(i)->DebugString(1, &contents); + } + if (extension_count() > 0) contents.append("}\n\n"); + + return contents; +} + +string Descriptor::DebugString() const { + string contents; + strings::SubstituteAndAppend(&contents, "message $0", name()); + DebugString(0, &contents); + return contents; +} + +void Descriptor::DebugString(int depth, string *contents) const { + string prefix(depth * 2, ' '); + ++depth; + contents->append(" {\n"); + + FormatLineOptions(depth, options(), contents); + + // Find all the 'group' types for fields and extensions; we will not output + // their nested definitions (those will be done with their group field + // descriptor). + set groups; + for (int i = 0; i < field_count(); i++) { + if (field(i)->type() == FieldDescriptor::TYPE_GROUP) { + groups.insert(field(i)->message_type()); + } + } + for (int i = 0; i < extension_count(); i++) { + if (extension(i)->type() == FieldDescriptor::TYPE_GROUP) { + groups.insert(extension(i)->message_type()); + } + } + + for (int i = 0; i < nested_type_count(); i++) { + if (groups.count(nested_type(i)) == 0) { + strings::SubstituteAndAppend(contents, "$0 message $1", + prefix, nested_type(i)->name()); + nested_type(i)->DebugString(depth, contents); + } + } + for (int i = 0; i < enum_type_count(); i++) { + enum_type(i)->DebugString(depth, contents); + } + for (int i = 0; i < field_count(); i++) { + field(i)->DebugString(depth, contents); + } + + for (int i = 0; i < extension_range_count(); i++) { + strings::SubstituteAndAppend(contents, "$0 extensions $1 to $2;\n", + prefix, + extension_range(i)->start, + extension_range(i)->end - 1); + } + + // Group extensions by what they extend, so they can be printed out together. + const Descriptor* containing_type = NULL; + for (int i = 0; i < extension_count(); i++) { + if (extension(i)->containing_type() != containing_type) { + if (i > 0) strings::SubstituteAndAppend(contents, "$0 }\n", prefix); + containing_type = extension(i)->containing_type(); + strings::SubstituteAndAppend(contents, "$0 extend .$1 {\n", + prefix, containing_type->full_name()); + } + extension(i)->DebugString(depth + 1, contents); + } + if (extension_count() > 0) + strings::SubstituteAndAppend(contents, "$0 }\n", prefix); + + strings::SubstituteAndAppend(contents, "$0}\n", prefix); +} + +string FieldDescriptor::DebugString() const { + string contents; + int depth = 0; + if (is_extension()) { + strings::SubstituteAndAppend(&contents, "extend .$0 {\n", + containing_type()->full_name()); + depth = 1; + } + DebugString(depth, &contents); + if (is_extension()) { + contents.append("}\n"); + } + return contents; +} + +void FieldDescriptor::DebugString(int depth, string *contents) const { + string prefix(depth * 2, ' '); + string field_type; + switch (type()) { + case TYPE_MESSAGE: + field_type = "." + message_type()->full_name(); + break; + case TYPE_ENUM: + field_type = "." + enum_type()->full_name(); + break; + default: + field_type = kTypeToName[type()]; + } + + strings::SubstituteAndAppend(contents, "$0$1 $2 $3 = $4", + prefix, + kLabelToName[label()], + field_type, + type() == TYPE_GROUP ? message_type()->name() : + name(), + number()); + + bool bracketed = false; + if (has_default_value()) { + bracketed = true; + strings::SubstituteAndAppend(contents, " [default = $0", + DefaultValueAsString(true)); + } + + string formatted_options; + if (FormatBracketedOptions(options(), &formatted_options)) { + contents->append(bracketed ? ", " : " ["); + bracketed = true; + contents->append(formatted_options); + } + + if (bracketed) { + contents->append("]"); + } + + if (type() == TYPE_GROUP) { + message_type()->DebugString(depth, contents); + } else { + contents->append(";\n"); + } +} + +string EnumDescriptor::DebugString() const { + string contents; + DebugString(0, &contents); + return contents; +} + +void EnumDescriptor::DebugString(int depth, string *contents) const { + string prefix(depth * 2, ' '); + ++depth; + strings::SubstituteAndAppend(contents, "$0enum $1 {\n", + prefix, name()); + + FormatLineOptions(depth, options(), contents); + + for (int i = 0; i < value_count(); i++) { + value(i)->DebugString(depth, contents); + } + strings::SubstituteAndAppend(contents, "$0}\n", prefix); +} + +string EnumValueDescriptor::DebugString() const { + string contents; + DebugString(0, &contents); + return contents; +} + +void EnumValueDescriptor::DebugString(int depth, string *contents) const { + string prefix(depth * 2, ' '); + strings::SubstituteAndAppend(contents, "$0$1 = $2", + prefix, name(), number()); + + string formatted_options; + if (FormatBracketedOptions(options(), &formatted_options)) { + strings::SubstituteAndAppend(contents, " [$0]", formatted_options); + } + contents->append(";\n"); +} + +string ServiceDescriptor::DebugString() const { + string contents; + DebugString(&contents); + return contents; +} + +void ServiceDescriptor::DebugString(string *contents) const { + strings::SubstituteAndAppend(contents, "service $0 {\n", name()); + + FormatLineOptions(1, options(), contents); + + for (int i = 0; i < method_count(); i++) { + method(i)->DebugString(1, contents); + } + + contents->append("}\n"); +} + +string MethodDescriptor::DebugString() const { + string contents; + DebugString(0, &contents); + return contents; +} + +void MethodDescriptor::DebugString(int depth, string *contents) const { + string prefix(depth * 2, ' '); + ++depth; + strings::SubstituteAndAppend(contents, "$0rpc $1(.$2) returns (.$3)", + prefix, name(), + input_type()->full_name(), + output_type()->full_name()); + + string formatted_options; + if (FormatLineOptions(depth, options(), &formatted_options)) { + strings::SubstituteAndAppend(contents, " {\n$0$1}\n", + formatted_options, prefix); + } else { + contents->append(";\n"); + } +} +// =================================================================== + +namespace { + +// Represents an options message to interpret. Extension names in the option +// name are respolved relative to name_scope. element_name and orig_opt are +// used only for error reporting (since the parser records locations against +// pointers in the original options, not the mutable copy). The Message must be +// one of the Options messages in descriptor.proto. +struct OptionsToInterpret { + OptionsToInterpret(const string& ns, + const string& el, + const Message* orig_opt, + Message* opt) + : name_scope(ns), + element_name(el), + original_options(orig_opt), + options(opt) { + } + string name_scope; + string element_name; + const Message* original_options; + Message* options; +}; + +} // namespace + +class DescriptorBuilder { + public: + DescriptorBuilder(const DescriptorPool* pool, + DescriptorPool::Tables* tables, + DescriptorPool::ErrorCollector* error_collector); + ~DescriptorBuilder(); + + const FileDescriptor* BuildFile(const FileDescriptorProto& proto); + + private: + friend class OptionInterpreter; + + const DescriptorPool* pool_; + DescriptorPool::Tables* tables_; // for convenience + DescriptorPool::ErrorCollector* error_collector_; + + // As we build descriptors we store copies of the options messages in + // them. We put pointers to those copies in this vector, as we build, so we + // can later (after cross-linking) interpret those options. + vector options_to_interpret_; + + bool had_errors_; + string filename_; + FileDescriptor* file_; + FileDescriptorTables* file_tables_; + + // If LookupSymbol() finds a symbol that is in a file which is not a declared + // dependency of this file, it will fail, but will set + // possible_undeclared_dependency_ to point at that file. This is only used + // by AddNotDefinedError() to report a more useful error message. + // possible_undeclared_dependency_name_ is the name of the symbol that was + // actually found in possible_undeclared_dependency_, which may be a parent + // of the symbol actually looked for. + const FileDescriptor* possible_undeclared_dependency_; + string possible_undeclared_dependency_name_; + + void AddError(const string& element_name, + const Message& descriptor, + DescriptorPool::ErrorCollector::ErrorLocation location, + const string& error); + + // Adds an error indicating that undefined_symbol was not defined. Must + // only be called after LookupSymbol() fails. + void AddNotDefinedError( + const string& element_name, + const Message& descriptor, + DescriptorPool::ErrorCollector::ErrorLocation location, + const string& undefined_symbol); + + // Silly helper which determines if the given file is in the given package. + // I.e., either file->package() == package_name or file->package() is a + // nested package within package_name. + bool IsInPackage(const FileDescriptor* file, const string& package_name); + + // Like tables_->FindSymbol(), but additionally: + // - Search the pool's underlay if not found in tables_. + // - Insure that the resulting Symbol is from one of the file's declared + // dependencies. + Symbol FindSymbol(const string& name); + + // Like FindSymbol() but does not require that the symbol is in one of the + // file's declared dependencies. + Symbol FindSymbolNotEnforcingDeps(const string& name); + + // Like FindSymbol(), but looks up the name relative to some other symbol + // name. This first searches siblings of relative_to, then siblings of its + // parents, etc. For example, LookupSymbol("foo.bar", "baz.qux.corge") makes + // the following calls, returning the first non-null result: + // FindSymbol("baz.qux.foo.bar"), FindSymbol("baz.foo.bar"), + // FindSymbol("foo.bar"). If AllowUnknownDependencies() has been called + // on the DescriptorPool, this will generate a placeholder type if + // the name is not found (unless the name itself is malformed). The + // placeholder_type parameter indicates what kind of placeholder should be + // constructed in this case. The resolve_mode parameter determines whether + // any symbol is returned, or only symbols that are types. Note, however, + // that LookupSymbol may still return a non-type symbol in LOOKUP_TYPES mode, + // if it believes that's all it could refer to. The caller should always + // check that it receives the type of symbol it was expecting. + enum PlaceholderType { + PLACEHOLDER_MESSAGE, + PLACEHOLDER_ENUM, + PLACEHOLDER_EXTENDABLE_MESSAGE + }; + enum ResolveMode { + LOOKUP_ALL, LOOKUP_TYPES + }; + Symbol LookupSymbol(const string& name, const string& relative_to, + PlaceholderType placeholder_type = PLACEHOLDER_MESSAGE, + ResolveMode resolve_mode = LOOKUP_ALL); + + // Like LookupSymbol() but will not return a placeholder even if + // AllowUnknownDependencies() has been used. + Symbol LookupSymbolNoPlaceholder(const string& name, + const string& relative_to, + ResolveMode resolve_mode = LOOKUP_ALL); + + // Creates a placeholder type suitable for return from LookupSymbol(). May + // return kNullSymbol if the name is not a valid type name. + Symbol NewPlaceholder(const string& name, PlaceholderType placeholder_type); + + // Creates a placeholder file. Never returns NULL. This is used when an + // import is not found and AllowUnknownDependencies() is enabled. + const FileDescriptor* NewPlaceholderFile(const string& name); + + // Calls tables_->AddSymbol() and records an error if it fails. Returns + // true if successful or false if failed, though most callers can ignore + // the return value since an error has already been recorded. + bool AddSymbol(const string& full_name, + const void* parent, const string& name, + const Message& proto, Symbol symbol); + + // Like AddSymbol(), but succeeds if the symbol is already defined as long + // as the existing definition is also a package (because it's OK to define + // the same package in two different files). Also adds all parents of the + // packgae to the symbol table (e.g. AddPackage("foo.bar", ...) will add + // "foo.bar" and "foo" to the table). + void AddPackage(const string& name, const Message& proto, + const FileDescriptor* file); + + // Checks that the symbol name contains only alphanumeric characters and + // underscores. Records an error otherwise. + void ValidateSymbolName(const string& name, const string& full_name, + const Message& proto); + + // Like ValidateSymbolName(), but the name is allowed to contain periods and + // an error is indicated by returning false (not recording the error). + bool ValidateQualifiedName(const string& name); + + // Used by BUILD_ARRAY macro (below) to avoid having to have the type + // specified as a macro parameter. + template + inline void AllocateArray(int size, Type** output) { + *output = tables_->AllocateArray(size); + } + + // Allocates a copy of orig_options in tables_ and stores it in the + // descriptor. Remembers its uninterpreted options, to be interpreted + // later. DescriptorT must be one of the Descriptor messages from + // descriptor.proto. + template void AllocateOptions( + const typename DescriptorT::OptionsType& orig_options, + DescriptorT* descriptor); + // Specialization for FileOptions. + void AllocateOptions(const FileOptions& orig_options, + FileDescriptor* descriptor); + + // Implementation for AllocateOptions(). Don't call this directly. + template void AllocateOptionsImpl( + const string& name_scope, + const string& element_name, + const typename DescriptorT::OptionsType& orig_options, + DescriptorT* descriptor); + + // These methods all have the same signature for the sake of the BUILD_ARRAY + // macro, below. + void BuildMessage(const DescriptorProto& proto, + const Descriptor* parent, + Descriptor* result); + void BuildFieldOrExtension(const FieldDescriptorProto& proto, + const Descriptor* parent, + FieldDescriptor* result, + bool is_extension); + void BuildField(const FieldDescriptorProto& proto, + const Descriptor* parent, + FieldDescriptor* result) { + BuildFieldOrExtension(proto, parent, result, false); + } + void BuildExtension(const FieldDescriptorProto& proto, + const Descriptor* parent, + FieldDescriptor* result) { + BuildFieldOrExtension(proto, parent, result, true); + } + void BuildExtensionRange(const DescriptorProto::ExtensionRange& proto, + const Descriptor* parent, + Descriptor::ExtensionRange* result); + void BuildEnum(const EnumDescriptorProto& proto, + const Descriptor* parent, + EnumDescriptor* result); + void BuildEnumValue(const EnumValueDescriptorProto& proto, + const EnumDescriptor* parent, + EnumValueDescriptor* result); + void BuildService(const ServiceDescriptorProto& proto, + const void* dummy, + ServiceDescriptor* result); + void BuildMethod(const MethodDescriptorProto& proto, + const ServiceDescriptor* parent, + MethodDescriptor* result); + + // Must be run only after building. + // + // NOTE: Options will not be available during cross-linking, as they + // have not yet been interpreted. Defer any handling of options to the + // Validate*Options methods. + void CrossLinkFile(FileDescriptor* file, const FileDescriptorProto& proto); + void CrossLinkMessage(Descriptor* message, const DescriptorProto& proto); + void CrossLinkField(FieldDescriptor* field, + const FieldDescriptorProto& proto); + void CrossLinkEnum(EnumDescriptor* enum_type, + const EnumDescriptorProto& proto); + void CrossLinkEnumValue(EnumValueDescriptor* enum_value, + const EnumValueDescriptorProto& proto); + void CrossLinkService(ServiceDescriptor* service, + const ServiceDescriptorProto& proto); + void CrossLinkMethod(MethodDescriptor* method, + const MethodDescriptorProto& proto); + + // Must be run only after cross-linking. + void InterpretOptions(); + + // A helper class for interpreting options. + class OptionInterpreter { + public: + // Creates an interpreter that operates in the context of the pool of the + // specified builder, which must not be NULL. We don't take ownership of the + // builder. + explicit OptionInterpreter(DescriptorBuilder* builder); + + ~OptionInterpreter(); + + // Interprets the uninterpreted options in the specified Options message. + // On error, calls AddError() on the underlying builder and returns false. + // Otherwise returns true. + bool InterpretOptions(OptionsToInterpret* options_to_interpret); + + class AggregateOptionFinder; + + private: + // Interprets uninterpreted_option_ on the specified message, which + // must be the mutable copy of the original options message to which + // uninterpreted_option_ belongs. + bool InterpretSingleOption(Message* options); + + // Adds the uninterpreted_option to the given options message verbatim. + // Used when AllowUnknownDependencies() is in effect and we can't find + // the option's definition. + void AddWithoutInterpreting(const UninterpretedOption& uninterpreted_option, + Message* options); + + // A recursive helper function that drills into the intermediate fields + // in unknown_fields to check if field innermost_field is set on the + // innermost message. Returns false and sets an error if so. + bool ExamineIfOptionIsSet( + vector::const_iterator intermediate_fields_iter, + vector::const_iterator intermediate_fields_end, + const FieldDescriptor* innermost_field, const string& debug_msg_name, + const UnknownFieldSet& unknown_fields); + + // Validates the value for the option field of the currently interpreted + // option and then sets it on the unknown_field. + bool SetOptionValue(const FieldDescriptor* option_field, + UnknownFieldSet* unknown_fields); + + // Parses an aggregate value for a CPPTYPE_MESSAGE option and + // saves it into *unknown_fields. + bool SetAggregateOption(const FieldDescriptor* option_field, + UnknownFieldSet* unknown_fields); + + // Convenience functions to set an int field the right way, depending on + // its wire type (a single int CppType can represent multiple wire types). + void SetInt32(int number, int32 value, FieldDescriptor::Type type, + UnknownFieldSet* unknown_fields); + void SetInt64(int number, int64 value, FieldDescriptor::Type type, + UnknownFieldSet* unknown_fields); + void SetUInt32(int number, uint32 value, FieldDescriptor::Type type, + UnknownFieldSet* unknown_fields); + void SetUInt64(int number, uint64 value, FieldDescriptor::Type type, + UnknownFieldSet* unknown_fields); + + // A helper function that adds an error at the specified location of the + // option we're currently interpreting, and returns false. + bool AddOptionError(DescriptorPool::ErrorCollector::ErrorLocation location, + const string& msg) { + builder_->AddError(options_to_interpret_->element_name, + *uninterpreted_option_, location, msg); + return false; + } + + // A helper function that adds an error at the location of the option name + // and returns false. + bool AddNameError(const string& msg) { + return AddOptionError(DescriptorPool::ErrorCollector::OPTION_NAME, msg); + } + + // A helper function that adds an error at the location of the option name + // and returns false. + bool AddValueError(const string& msg) { + return AddOptionError(DescriptorPool::ErrorCollector::OPTION_VALUE, msg); + } + + // We interpret against this builder's pool. Is never NULL. We don't own + // this pointer. + DescriptorBuilder* builder_; + + // The options we're currently interpreting, or NULL if we're not in a call + // to InterpretOptions. + const OptionsToInterpret* options_to_interpret_; + + // The option we're currently interpreting within options_to_interpret_, or + // NULL if we're not in a call to InterpretOptions(). This points to a + // submessage of the original option, not the mutable copy. Therefore we + // can use it to find locations recorded by the parser. + const UninterpretedOption* uninterpreted_option_; + + // Factory used to create the dynamic messages we need to parse + // any aggregate option values we encounter. + DynamicMessageFactory dynamic_factory_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(OptionInterpreter); + }; + + // Work-around for broken compilers: According to the C++ standard, + // OptionInterpreter should have access to the private members of any class + // which has declared DescriptorBuilder as a friend. Unfortunately some old + // versions of GCC and other compilers do not implement this correctly. So, + // we have to have these intermediate methods to provide access. We also + // redundantly declare OptionInterpreter a friend just to make things extra + // clear for these bad compilers. + friend class OptionInterpreter; + friend class OptionInterpreter::AggregateOptionFinder; + + static inline bool get_allow_unknown(const DescriptorPool* pool) { + return pool->allow_unknown_; + } + static inline bool get_is_placeholder(const Descriptor* descriptor) { + return descriptor->is_placeholder_; + } + static inline void assert_mutex_held(const DescriptorPool* pool) { + if (pool->mutex_ != NULL) { + pool->mutex_->AssertHeld(); + } + } + + // Must be run only after options have been interpreted. + // + // NOTE: Validation code must only reference the options in the mutable + // descriptors, which are the ones that have been interpreted. The const + // proto references are passed in only so they can be provided to calls to + // AddError(). Do not look at their options, which have not been interpreted. + void ValidateFileOptions(FileDescriptor* file, + const FileDescriptorProto& proto); + void ValidateMessageOptions(Descriptor* message, + const DescriptorProto& proto); + void ValidateFieldOptions(FieldDescriptor* field, + const FieldDescriptorProto& proto); + void ValidateEnumOptions(EnumDescriptor* enm, + const EnumDescriptorProto& proto); + void ValidateEnumValueOptions(EnumValueDescriptor* enum_value, + const EnumValueDescriptorProto& proto); + void ValidateServiceOptions(ServiceDescriptor* service, + const ServiceDescriptorProto& proto); + void ValidateMethodOptions(MethodDescriptor* method, + const MethodDescriptorProto& proto); + + void ValidateMapKey(FieldDescriptor* field, + const FieldDescriptorProto& proto); +}; + +const FileDescriptor* DescriptorPool::BuildFile( + const FileDescriptorProto& proto) { + GOOGLE_CHECK(fallback_database_ == NULL) + << "Cannot call BuildFile on a DescriptorPool that uses a " + "DescriptorDatabase. You must instead find a way to get your file " + "into the underlying database."; + GOOGLE_CHECK(mutex_ == NULL); // Implied by the above GOOGLE_CHECK. + return DescriptorBuilder(this, tables_.get(), NULL).BuildFile(proto); +} + +const FileDescriptor* DescriptorPool::BuildFileCollectingErrors( + const FileDescriptorProto& proto, + ErrorCollector* error_collector) { + GOOGLE_CHECK(fallback_database_ == NULL) + << "Cannot call BuildFile on a DescriptorPool that uses a " + "DescriptorDatabase. You must instead find a way to get your file " + "into the underlying database."; + GOOGLE_CHECK(mutex_ == NULL); // Implied by the above GOOGLE_CHECK. + return DescriptorBuilder(this, tables_.get(), + error_collector).BuildFile(proto); +} + +const FileDescriptor* DescriptorPool::BuildFileFromDatabase( + const FileDescriptorProto& proto) const { + mutex_->AssertHeld(); + return DescriptorBuilder(this, tables_.get(), + default_error_collector_).BuildFile(proto); +} + +DescriptorBuilder::DescriptorBuilder( + const DescriptorPool* pool, + DescriptorPool::Tables* tables, + DescriptorPool::ErrorCollector* error_collector) + : pool_(pool), + tables_(tables), + error_collector_(error_collector), + had_errors_(false), + possible_undeclared_dependency_(NULL) {} + +DescriptorBuilder::~DescriptorBuilder() {} + +void DescriptorBuilder::AddError( + const string& element_name, + const Message& descriptor, + DescriptorPool::ErrorCollector::ErrorLocation location, + const string& error) { + if (error_collector_ == NULL) { + if (!had_errors_) { + GOOGLE_LOG(ERROR) << "Invalid proto descriptor for file \"" << filename_ + << "\":"; + } + GOOGLE_LOG(ERROR) << " " << element_name << ": " << error; + } else { + error_collector_->AddError(filename_, element_name, + &descriptor, location, error); + } + had_errors_ = true; +} + +void DescriptorBuilder::AddNotDefinedError( + const string& element_name, + const Message& descriptor, + DescriptorPool::ErrorCollector::ErrorLocation location, + const string& undefined_symbol) { + if (possible_undeclared_dependency_ == NULL) { + AddError(element_name, descriptor, location, + "\"" + undefined_symbol + "\" is not defined."); + } else { + AddError(element_name, descriptor, location, + "\"" + possible_undeclared_dependency_name_ + + "\" seems to be defined in \"" + + possible_undeclared_dependency_->name() + "\", which is not " + "imported by \"" + filename_ + "\". To use it here, please " + "add the necessary import."); + } +} + +bool DescriptorBuilder::IsInPackage(const FileDescriptor* file, + const string& package_name) { + return HasPrefixString(file->package(), package_name) && + (file->package().size() == package_name.size() || + file->package()[package_name.size()] == '.'); +} + +Symbol DescriptorBuilder::FindSymbolNotEnforcingDeps(const string& name) { + Symbol result; + + // We need to search our pool and all its underlays. + const DescriptorPool* pool = pool_; + while (true) { + // If we are looking at an underlay, we must lock its mutex_, since we are + // accessing the underlay's tables_ dircetly. + MutexLockMaybe lock((pool == pool_) ? NULL : pool->mutex_); + + // Note that we don't have to check fallback_database_ here because the + // symbol has to be in one of its file's direct dependencies, and we have + // already loaded those by the time we get here. + result = pool->tables_->FindSymbol(name); + if (!result.IsNull()) break; + if (pool->underlay_ == NULL) return kNullSymbol; + pool = pool->underlay_; + } + + return result; +} + +Symbol DescriptorBuilder::FindSymbol(const string& name) { + Symbol result = FindSymbolNotEnforcingDeps(name); + + if (!pool_->enforce_dependencies_) { + // Hack for CompilerUpgrader. + return result; + } + + // Only find symbols which were defined in this file or one of its + // dependencies. + const FileDescriptor* file = result.GetFile(); + if (file == file_) return result; + for (int i = 0; i < file_->dependency_count(); i++) { + if (file == file_->dependency(i)) return result; + } + + if (result.type == Symbol::PACKAGE) { + // Arg, this is overcomplicated. The symbol is a package name. It could + // be that the package was defined in multiple files. result.GetFile() + // returns the first file we saw that used this package. We've determined + // that that file is not a direct dependency of the file we are currently + // building, but it could be that some other file which *is* a direct + // dependency also defines the same package. We can't really rule out this + // symbol unless none of the dependencies define it. + if (IsInPackage(file_, name)) return result; + for (int i = 0; i < file_->dependency_count(); i++) { + // Note: A dependency may be NULL if it was not found or had errors. + if (file_->dependency(i) != NULL && + IsInPackage(file_->dependency(i), name)) { + return result; + } + } + } + + possible_undeclared_dependency_ = file; + possible_undeclared_dependency_name_ = name; + return kNullSymbol; +} + +Symbol DescriptorBuilder::LookupSymbolNoPlaceholder( + const string& name, const string& relative_to, ResolveMode resolve_mode) { + possible_undeclared_dependency_ = NULL; + + if (name.size() > 0 && name[0] == '.') { + // Fully-qualified name. + return FindSymbol(name.substr(1)); + } + + // If name is something like "Foo.Bar.baz", and symbols named "Foo" are + // defined in multiple parent scopes, we only want to find "Bar.baz" in the + // innermost one. E.g., the following should produce an error: + // message Bar { message Baz {} } + // message Foo { + // message Bar { + // } + // optional Bar.Baz baz = 1; + // } + // So, we look for just "Foo" first, then look for "Bar.baz" within it if + // found. + int name_dot_pos = name.find_first_of('.'); + string first_part_of_name; + if (name_dot_pos == string::npos) { + first_part_of_name = name; + } else { + first_part_of_name = name.substr(0, name_dot_pos); + } + + string scope_to_try(relative_to); + + while (true) { + // Chop off the last component of the scope. + string::size_type dot_pos = scope_to_try.find_last_of('.'); + if (dot_pos == string::npos) { + return FindSymbol(name); + } else { + scope_to_try.erase(dot_pos); + } + + // Append ".first_part_of_name" and try to find. + string::size_type old_size = scope_to_try.size(); + scope_to_try.append(1, '.'); + scope_to_try.append(first_part_of_name); + Symbol result = FindSymbol(scope_to_try); + if (!result.IsNull()) { + if (first_part_of_name.size() < name.size()) { + // name is a compound symbol, of which we only found the first part. + // Now try to look up the rest of it. + if (result.IsAggregate()) { + scope_to_try.append(name, first_part_of_name.size(), + name.size() - first_part_of_name.size()); + return FindSymbol(scope_to_try); + } else { + // We found a symbol but it's not an aggregate. Continue the loop. + } + } else { + if (resolve_mode == LOOKUP_TYPES && !result.IsType()) { + // We found a symbol but it's not a type. Continue the loop. + } else { + return result; + } + } + } + + // Not found. Remove the name so we can try again. + scope_to_try.erase(old_size); + } +} + +Symbol DescriptorBuilder::LookupSymbol( + const string& name, const string& relative_to, + PlaceholderType placeholder_type, ResolveMode resolve_mode) { + Symbol result = LookupSymbolNoPlaceholder( + name, relative_to, resolve_mode); + if (result.IsNull() && pool_->allow_unknown_) { + // Not found, but AllowUnknownDependencies() is enabled. Return a + // placeholder instead. + result = NewPlaceholder(name, placeholder_type); + } + return result; +} + +Symbol DescriptorBuilder::NewPlaceholder(const string& name, + PlaceholderType placeholder_type) { + // Compute names. + const string* placeholder_full_name; + const string* placeholder_name; + const string* placeholder_package; + + if (!ValidateQualifiedName(name)) return kNullSymbol; + if (name[0] == '.') { + // Fully-qualified. + placeholder_full_name = tables_->AllocateString(name.substr(1)); + } else { + placeholder_full_name = tables_->AllocateString(name); + } + + string::size_type dotpos = placeholder_full_name->find_last_of('.'); + if (dotpos != string::npos) { + placeholder_package = tables_->AllocateString( + placeholder_full_name->substr(0, dotpos)); + placeholder_name = tables_->AllocateString( + placeholder_full_name->substr(dotpos + 1)); + } else { + placeholder_package = &kEmptyString; + placeholder_name = placeholder_full_name; + } + + // Create the placeholders. + FileDescriptor* placeholder_file = tables_->Allocate(); + memset(placeholder_file, 0, sizeof(*placeholder_file)); + + placeholder_file->name_ = + tables_->AllocateString(*placeholder_full_name + ".placeholder.proto"); + placeholder_file->package_ = placeholder_package; + placeholder_file->pool_ = pool_; + placeholder_file->options_ = &FileOptions::default_instance(); + placeholder_file->tables_ = &FileDescriptorTables::kEmpty; + // All other fields are zero or NULL. + + if (placeholder_type == PLACEHOLDER_ENUM) { + placeholder_file->enum_type_count_ = 1; + placeholder_file->enum_types_ = + tables_->AllocateArray(1); + + EnumDescriptor* placeholder_enum = &placeholder_file->enum_types_[0]; + memset(placeholder_enum, 0, sizeof(*placeholder_enum)); + + placeholder_enum->full_name_ = placeholder_full_name; + placeholder_enum->name_ = placeholder_name; + placeholder_enum->file_ = placeholder_file; + placeholder_enum->options_ = &EnumOptions::default_instance(); + placeholder_enum->is_placeholder_ = true; + placeholder_enum->is_unqualified_placeholder_ = (name[0] != '.'); + + // Enums must have at least one value. + placeholder_enum->value_count_ = 1; + placeholder_enum->values_ = tables_->AllocateArray(1); + + EnumValueDescriptor* placeholder_value = &placeholder_enum->values_[0]; + memset(placeholder_value, 0, sizeof(*placeholder_value)); + + placeholder_value->name_ = tables_->AllocateString("PLACEHOLDER_VALUE"); + // Note that enum value names are siblings of their type, not children. + placeholder_value->full_name_ = + placeholder_package->empty() ? placeholder_value->name_ : + tables_->AllocateString(*placeholder_package + ".PLACEHOLDER_VALUE"); + + placeholder_value->number_ = 0; + placeholder_value->type_ = placeholder_enum; + placeholder_value->options_ = &EnumValueOptions::default_instance(); + + return Symbol(placeholder_enum); + } else { + placeholder_file->message_type_count_ = 1; + placeholder_file->message_types_ = + tables_->AllocateArray(1); + + Descriptor* placeholder_message = &placeholder_file->message_types_[0]; + memset(placeholder_message, 0, sizeof(*placeholder_message)); + + placeholder_message->full_name_ = placeholder_full_name; + placeholder_message->name_ = placeholder_name; + placeholder_message->file_ = placeholder_file; + placeholder_message->options_ = &MessageOptions::default_instance(); + placeholder_message->is_placeholder_ = true; + placeholder_message->is_unqualified_placeholder_ = (name[0] != '.'); + + if (placeholder_type == PLACEHOLDER_EXTENDABLE_MESSAGE) { + placeholder_message->extension_range_count_ = 1; + placeholder_message->extension_ranges_ = + tables_->AllocateArray(1); + placeholder_message->extension_ranges_->start = 1; + // kMaxNumber + 1 because ExtensionRange::end is exclusive. + placeholder_message->extension_ranges_->end = + FieldDescriptor::kMaxNumber + 1; + } + + return Symbol(placeholder_message); + } +} + +const FileDescriptor* DescriptorBuilder::NewPlaceholderFile( + const string& name) { + FileDescriptor* placeholder = tables_->Allocate(); + memset(placeholder, 0, sizeof(*placeholder)); + + placeholder->name_ = tables_->AllocateString(name); + placeholder->package_ = &kEmptyString; + placeholder->pool_ = pool_; + placeholder->options_ = &FileOptions::default_instance(); + placeholder->tables_ = &FileDescriptorTables::kEmpty; + // All other fields are zero or NULL. + + return placeholder; +} + +bool DescriptorBuilder::AddSymbol( + const string& full_name, const void* parent, const string& name, + const Message& proto, Symbol symbol) { + // If the caller passed NULL for the parent, the symbol is at file scope. + // Use its file as the parent instead. + if (parent == NULL) parent = file_; + + if (tables_->AddSymbol(full_name, symbol)) { + if (!file_tables_->AddAliasUnderParent(parent, name, symbol)) { + GOOGLE_LOG(DFATAL) << "\"" << full_name << "\" not previously defined in " + "symbols_by_name_, but was defined in symbols_by_parent_; " + "this shouldn't be possible."; + return false; + } + return true; + } else { + const FileDescriptor* other_file = tables_->FindSymbol(full_name).GetFile(); + if (other_file == file_) { + string::size_type dot_pos = full_name.find_last_of('.'); + if (dot_pos == string::npos) { + AddError(full_name, proto, DescriptorPool::ErrorCollector::NAME, + "\"" + full_name + "\" is already defined."); + } else { + AddError(full_name, proto, DescriptorPool::ErrorCollector::NAME, + "\"" + full_name.substr(dot_pos + 1) + + "\" is already defined in \"" + + full_name.substr(0, dot_pos) + "\"."); + } + } else { + // Symbol seems to have been defined in a different file. + AddError(full_name, proto, DescriptorPool::ErrorCollector::NAME, + "\"" + full_name + "\" is already defined in file \"" + + other_file->name() + "\"."); + } + return false; + } +} + +void DescriptorBuilder::AddPackage( + const string& name, const Message& proto, const FileDescriptor* file) { + if (tables_->AddSymbol(name, Symbol(file))) { + // Success. Also add parent package, if any. + string::size_type dot_pos = name.find_last_of('.'); + if (dot_pos == string::npos) { + // No parents. + ValidateSymbolName(name, name, proto); + } else { + // Has parent. + string* parent_name = tables_->AllocateString(name.substr(0, dot_pos)); + AddPackage(*parent_name, proto, file); + ValidateSymbolName(name.substr(dot_pos + 1), name, proto); + } + } else { + Symbol existing_symbol = tables_->FindSymbol(name); + // It's OK to redefine a package. + if (existing_symbol.type != Symbol::PACKAGE) { + // Symbol seems to have been defined in a different file. + AddError(name, proto, DescriptorPool::ErrorCollector::NAME, + "\"" + name + "\" is already defined (as something other than " + "a package) in file \"" + existing_symbol.GetFile()->name() + + "\"."); + } + } +} + +void DescriptorBuilder::ValidateSymbolName( + const string& name, const string& full_name, const Message& proto) { + if (name.empty()) { + AddError(full_name, proto, DescriptorPool::ErrorCollector::NAME, + "Missing name."); + } else { + for (int i = 0; i < name.size(); i++) { + // I don't trust isalnum() due to locales. :( + if ((name[i] < 'a' || 'z' < name[i]) && + (name[i] < 'A' || 'Z' < name[i]) && + (name[i] < '0' || '9' < name[i]) && + (name[i] != '_')) { + AddError(full_name, proto, DescriptorPool::ErrorCollector::NAME, + "\"" + name + "\" is not a valid identifier."); + } + } + } +} + +bool DescriptorBuilder::ValidateQualifiedName(const string& name) { + bool last_was_period = false; + + for (int i = 0; i < name.size(); i++) { + // I don't trust isalnum() due to locales. :( + if (('a' <= name[i] && name[i] <= 'z') || + ('A' <= name[i] && name[i] <= 'Z') || + ('0' <= name[i] && name[i] <= '9') || + (name[i] == '_')) { + last_was_period = false; + } else if (name[i] == '.') { + if (last_was_period) return false; + last_was_period = true; + } else { + return false; + } + } + + return !name.empty() && !last_was_period; +} + +// ------------------------------------------------------------------- + +// This generic implementation is good for all descriptors except +// FileDescriptor. +template void DescriptorBuilder::AllocateOptions( + const typename DescriptorT::OptionsType& orig_options, + DescriptorT* descriptor) { + AllocateOptionsImpl(descriptor->full_name(), descriptor->full_name(), + orig_options, descriptor); +} + +// We specialize for FileDescriptor. +void DescriptorBuilder::AllocateOptions(const FileOptions& orig_options, + FileDescriptor* descriptor) { + // We add the dummy token so that LookupSymbol does the right thing. + AllocateOptionsImpl(descriptor->package() + ".dummy", descriptor->name(), + orig_options, descriptor); +} + +template void DescriptorBuilder::AllocateOptionsImpl( + const string& name_scope, + const string& element_name, + const typename DescriptorT::OptionsType& orig_options, + DescriptorT* descriptor) { + // We need to use a dummy pointer to work around a bug in older versions of + // GCC. Otherwise, the following two lines could be replaced with: + // typename DescriptorT::OptionsType* options = + // tables_->AllocateMessage(); + typename DescriptorT::OptionsType* const dummy = NULL; + typename DescriptorT::OptionsType* options = tables_->AllocateMessage(dummy); + options->CopyFrom(orig_options); + descriptor->options_ = options; + + // Don't add to options_to_interpret_ unless there were uninterpreted + // options. This not only avoids unnecessary work, but prevents a + // bootstrapping problem when building descriptors for descriptor.proto. + // descriptor.proto does not contain any uninterpreted options, but + // attempting to interpret options anyway will cause + // OptionsType::GetDescriptor() to be called which may then deadlock since + // we're still trying to build it. + if (options->uninterpreted_option_size() > 0) { + options_to_interpret_.push_back( + OptionsToInterpret(name_scope, element_name, &orig_options, options)); + } +} + + +// A common pattern: We want to convert a repeated field in the descriptor +// to an array of values, calling some method to build each value. +#define BUILD_ARRAY(INPUT, OUTPUT, NAME, METHOD, PARENT) \ + OUTPUT->NAME##_count_ = INPUT.NAME##_size(); \ + AllocateArray(INPUT.NAME##_size(), &OUTPUT->NAME##s_); \ + for (int i = 0; i < INPUT.NAME##_size(); i++) { \ + METHOD(INPUT.NAME(i), PARENT, OUTPUT->NAME##s_ + i); \ + } + +const FileDescriptor* DescriptorBuilder::BuildFile( + const FileDescriptorProto& proto) { + filename_ = proto.name(); + + // Check if the file already exists and is identical to the one being built. + // Note: This only works if the input is canonical -- that is, it + // fully-qualifies all type names, has no UninterpretedOptions, etc. + // This is fine, because this idempotency "feature" really only exists to + // accomodate one hack in the proto1->proto2 migration layer. + const FileDescriptor* existing_file = tables_->FindFile(filename_); + if (existing_file != NULL) { + // File already in pool. Compare the existing one to the input. + FileDescriptorProto existing_proto; + existing_file->CopyTo(&existing_proto); + if (existing_proto.SerializeAsString() == proto.SerializeAsString()) { + // They're identical. Return the existing descriptor. + return existing_file; + } + + // Not a match. The error will be detected and handled later. + } + + // Check to see if this file is already on the pending files list. + // TODO(kenton): Allow recursive imports? It may not work with some + // (most?) programming languages. E.g., in C++, a forward declaration + // of a type is not sufficient to allow it to be used even in a + // generated header file due to inlining. This could perhaps be + // worked around using tricks involving inserting #include statements + // mid-file, but that's pretty ugly, and I'm pretty sure there are + // some languages out there that do not allow recursive dependencies + // at all. + for (int i = 0; i < tables_->pending_files_.size(); i++) { + if (tables_->pending_files_[i] == proto.name()) { + string error_message("File recursively imports itself: "); + for (; i < tables_->pending_files_.size(); i++) { + error_message.append(tables_->pending_files_[i]); + error_message.append(" -> "); + } + error_message.append(proto.name()); + + AddError(proto.name(), proto, DescriptorPool::ErrorCollector::OTHER, + error_message); + return NULL; + } + } + + // If we have a fallback_database_, attempt to load all dependencies now, + // before checkpointing tables_. This avoids confusion with recursive + // checkpoints. + if (pool_->fallback_database_ != NULL) { + tables_->pending_files_.push_back(proto.name()); + for (int i = 0; i < proto.dependency_size(); i++) { + if (tables_->FindFile(proto.dependency(i)) == NULL && + (pool_->underlay_ == NULL || + pool_->underlay_->FindFileByName(proto.dependency(i)) == NULL)) { + // We don't care what this returns since we'll find out below anyway. + pool_->TryFindFileInFallbackDatabase(proto.dependency(i)); + } + } + tables_->pending_files_.pop_back(); + } + + // Checkpoint the tables so that we can roll back if something goes wrong. + tables_->Checkpoint(); + + FileDescriptor* result = tables_->Allocate(); + file_ = result; + + file_tables_ = tables_->AllocateFileTables(); + file_->tables_ = file_tables_; + + if (!proto.has_name()) { + AddError("", proto, DescriptorPool::ErrorCollector::OTHER, + "Missing field: FileDescriptorProto.name."); + } + + result->name_ = tables_->AllocateString(proto.name()); + if (proto.has_package()) { + result->package_ = tables_->AllocateString(proto.package()); + } else { + // We cannot rely on proto.package() returning a valid string if + // proto.has_package() is false, because we might be running at static + // initialization time, in which case default values have not yet been + // initialized. + result->package_ = tables_->AllocateString(""); + } + result->pool_ = pool_; + + // Add to tables. + if (!tables_->AddFile(result)) { + AddError(proto.name(), proto, DescriptorPool::ErrorCollector::OTHER, + "A file with this name is already in the pool."); + // Bail out early so that if this is actually the exact same file, we + // don't end up reporting that every single symbol is already defined. + tables_->Rollback(); + return NULL; + } + if (!result->package().empty()) { + AddPackage(result->package(), proto, result); + } + + // Make sure all dependencies are loaded. + set seen_dependencies; + result->dependency_count_ = proto.dependency_size(); + result->dependencies_ = + tables_->AllocateArray(proto.dependency_size()); + for (int i = 0; i < proto.dependency_size(); i++) { + if (!seen_dependencies.insert(proto.dependency(i)).second) { + AddError(proto.name(), proto, + DescriptorPool::ErrorCollector::OTHER, + "Import \"" + proto.dependency(i) + "\" was listed twice."); + } + + const FileDescriptor* dependency = tables_->FindFile(proto.dependency(i)); + if (dependency == NULL && pool_->underlay_ != NULL) { + dependency = pool_->underlay_->FindFileByName(proto.dependency(i)); + } + + if (dependency == NULL) { + if (pool_->allow_unknown_) { + dependency = NewPlaceholderFile(proto.dependency(i)); + } else { + string message; + if (pool_->fallback_database_ == NULL) { + message = "Import \"" + proto.dependency(i) + + "\" has not been loaded."; + } else { + message = "Import \"" + proto.dependency(i) + + "\" was not found or had errors."; + } + AddError(proto.name(), proto, + DescriptorPool::ErrorCollector::OTHER, + message); + } + } + + result->dependencies_[i] = dependency; + } + + // Convert children. + BUILD_ARRAY(proto, result, message_type, BuildMessage , NULL); + BUILD_ARRAY(proto, result, enum_type , BuildEnum , NULL); + BUILD_ARRAY(proto, result, service , BuildService , NULL); + BUILD_ARRAY(proto, result, extension , BuildExtension, NULL); + + // Copy options. + if (!proto.has_options()) { + result->options_ = NULL; // Will set to default_instance later. + } else { + AllocateOptions(proto.options(), result); + } + + // Note that the following steps must occur in exactly the specified order. + + // Cross-link. + CrossLinkFile(result, proto); + + // Interpret any remaining uninterpreted options gathered into + // options_to_interpret_ during descriptor building. Cross-linking has made + // extension options known, so all interpretations should now succeed. + if (!had_errors_) { + OptionInterpreter option_interpreter(this); + for (vector::iterator iter = + options_to_interpret_.begin(); + iter != options_to_interpret_.end(); ++iter) { + option_interpreter.InterpretOptions(&(*iter)); + } + options_to_interpret_.clear(); + } + + // Validate options. + if (!had_errors_) { + ValidateFileOptions(result, proto); + } + + if (had_errors_) { + tables_->Rollback(); + return NULL; + } else { + tables_->Checkpoint(); + return result; + } +} + +void DescriptorBuilder::BuildMessage(const DescriptorProto& proto, + const Descriptor* parent, + Descriptor* result) { + const string& scope = (parent == NULL) ? + file_->package() : parent->full_name(); + string* full_name = tables_->AllocateString(scope); + if (!full_name->empty()) full_name->append(1, '.'); + full_name->append(proto.name()); + + ValidateSymbolName(proto.name(), *full_name, proto); + + result->name_ = tables_->AllocateString(proto.name()); + result->full_name_ = full_name; + result->file_ = file_; + result->containing_type_ = parent; + result->is_placeholder_ = false; + result->is_unqualified_placeholder_ = false; + + BUILD_ARRAY(proto, result, field , BuildField , result); + BUILD_ARRAY(proto, result, nested_type , BuildMessage , result); + BUILD_ARRAY(proto, result, enum_type , BuildEnum , result); + BUILD_ARRAY(proto, result, extension_range, BuildExtensionRange, result); + BUILD_ARRAY(proto, result, extension , BuildExtension , result); + + // Copy options. + if (!proto.has_options()) { + result->options_ = NULL; // Will set to default_instance later. + } else { + AllocateOptions(proto.options(), result); + } + + AddSymbol(result->full_name(), parent, result->name(), + proto, Symbol(result)); + + // Check that no fields have numbers in extension ranges. + for (int i = 0; i < result->field_count(); i++) { + const FieldDescriptor* field = result->field(i); + for (int j = 0; j < result->extension_range_count(); j++) { + const Descriptor::ExtensionRange* range = result->extension_range(j); + if (range->start <= field->number() && field->number() < range->end) { + AddError(field->full_name(), proto.extension_range(j), + DescriptorPool::ErrorCollector::NUMBER, + strings::Substitute( + "Extension range $0 to $1 includes field \"$2\" ($3).", + range->start, range->end - 1, + field->name(), field->number())); + } + } + } + + // Check that extension ranges don't overlap. + for (int i = 0; i < result->extension_range_count(); i++) { + const Descriptor::ExtensionRange* range1 = result->extension_range(i); + for (int j = i + 1; j < result->extension_range_count(); j++) { + const Descriptor::ExtensionRange* range2 = result->extension_range(j); + if (range1->end > range2->start && range2->end > range1->start) { + AddError(result->full_name(), proto.extension_range(j), + DescriptorPool::ErrorCollector::NUMBER, + strings::Substitute("Extension range $0 to $1 overlaps with " + "already-defined range $2 to $3.", + range2->start, range2->end - 1, + range1->start, range1->end - 1)); + } + } + } +} + +void DescriptorBuilder::BuildFieldOrExtension(const FieldDescriptorProto& proto, + const Descriptor* parent, + FieldDescriptor* result, + bool is_extension) { + const string& scope = (parent == NULL) ? + file_->package() : parent->full_name(); + string* full_name = tables_->AllocateString(scope); + if (!full_name->empty()) full_name->append(1, '.'); + full_name->append(proto.name()); + + ValidateSymbolName(proto.name(), *full_name, proto); + + result->name_ = tables_->AllocateString(proto.name()); + result->full_name_ = full_name; + result->file_ = file_; + result->number_ = proto.number(); + result->is_extension_ = is_extension; + + // If .proto files follow the style guide then the name should already be + // lower-cased. If that's the case we can just reuse the string we already + // allocated rather than allocate a new one. + string lowercase_name(proto.name()); + LowerString(&lowercase_name); + if (lowercase_name == proto.name()) { + result->lowercase_name_ = result->name_; + } else { + result->lowercase_name_ = tables_->AllocateString(lowercase_name); + } + + // Don't bother with the above optimization for camel-case names since + // .proto files that follow the guide shouldn't be using names in this + // format, so the optimization wouldn't help much. + result->camelcase_name_ = tables_->AllocateString(ToCamelCase(proto.name())); + + // Some compilers do not allow static_cast directly between two enum types, + // so we must cast to int first. + result->type_ = static_cast( + implicit_cast(proto.type())); + result->label_ = static_cast( + implicit_cast(proto.label())); + + // Some of these may be filled in when cross-linking. + result->containing_type_ = NULL; + result->extension_scope_ = NULL; + result->experimental_map_key_ = NULL; + result->message_type_ = NULL; + result->enum_type_ = NULL; + + result->has_default_value_ = proto.has_default_value(); + if (proto.has_default_value() && result->is_repeated()) { + AddError(result->full_name(), proto, + DescriptorPool::ErrorCollector::DEFAULT_VALUE, + "Repeated fields can't have default values."); + } + + if (proto.has_type()) { + if (proto.has_default_value()) { + char* end_pos = NULL; + switch (result->cpp_type()) { + case FieldDescriptor::CPPTYPE_INT32: + result->default_value_int32_ = + strtol(proto.default_value().c_str(), &end_pos, 0); + break; + case FieldDescriptor::CPPTYPE_INT64: + result->default_value_int64_ = + strto64(proto.default_value().c_str(), &end_pos, 0); + break; + case FieldDescriptor::CPPTYPE_UINT32: + result->default_value_uint32_ = + strtoul(proto.default_value().c_str(), &end_pos, 0); + break; + case FieldDescriptor::CPPTYPE_UINT64: + result->default_value_uint64_ = + strtou64(proto.default_value().c_str(), &end_pos, 0); + break; + case FieldDescriptor::CPPTYPE_FLOAT: + if (proto.default_value() == "inf") { + result->default_value_float_ = numeric_limits::infinity(); + } else if (proto.default_value() == "-inf") { + result->default_value_float_ = -numeric_limits::infinity(); + } else if (proto.default_value() == "nan") { + result->default_value_float_ = numeric_limits::quiet_NaN(); + } else { + result->default_value_float_ = + NoLocaleStrtod(proto.default_value().c_str(), &end_pos); + } + break; + case FieldDescriptor::CPPTYPE_DOUBLE: + if (proto.default_value() == "inf") { + result->default_value_double_ = numeric_limits::infinity(); + } else if (proto.default_value() == "-inf") { + result->default_value_double_ = -numeric_limits::infinity(); + } else if (proto.default_value() == "nan") { + result->default_value_double_ = numeric_limits::quiet_NaN(); + } else { + result->default_value_double_ = + NoLocaleStrtod(proto.default_value().c_str(), &end_pos); + } + break; + case FieldDescriptor::CPPTYPE_BOOL: + if (proto.default_value() == "true") { + result->default_value_bool_ = true; + } else if (proto.default_value() == "false") { + result->default_value_bool_ = false; + } else { + AddError(result->full_name(), proto, + DescriptorPool::ErrorCollector::DEFAULT_VALUE, + "Boolean default must be true or false."); + } + break; + case FieldDescriptor::CPPTYPE_ENUM: + // This will be filled in when cross-linking. + result->default_value_enum_ = NULL; + break; + case FieldDescriptor::CPPTYPE_STRING: + if (result->type() == FieldDescriptor::TYPE_BYTES) { + result->default_value_string_ = tables_->AllocateString( + UnescapeCEscapeString(proto.default_value())); + } else { + result->default_value_string_ = + tables_->AllocateString(proto.default_value()); + } + break; + case FieldDescriptor::CPPTYPE_MESSAGE: + AddError(result->full_name(), proto, + DescriptorPool::ErrorCollector::DEFAULT_VALUE, + "Messages can't have default values."); + result->has_default_value_ = false; + break; + } + + if (end_pos != NULL) { + // end_pos is only set non-NULL by the parsers for numeric types, above. + // This checks that the default was non-empty and had no extra junk + // after the end of the number. + if (proto.default_value().empty() || *end_pos != '\0') { + AddError(result->full_name(), proto, + DescriptorPool::ErrorCollector::DEFAULT_VALUE, + "Couldn't parse default value."); + } + } + } else { + // No explicit default value + switch (result->cpp_type()) { + case FieldDescriptor::CPPTYPE_INT32: + result->default_value_int32_ = 0; + break; + case FieldDescriptor::CPPTYPE_INT64: + result->default_value_int64_ = 0; + break; + case FieldDescriptor::CPPTYPE_UINT32: + result->default_value_uint32_ = 0; + break; + case FieldDescriptor::CPPTYPE_UINT64: + result->default_value_uint64_ = 0; + break; + case FieldDescriptor::CPPTYPE_FLOAT: + result->default_value_float_ = 0.0f; + break; + case FieldDescriptor::CPPTYPE_DOUBLE: + result->default_value_double_ = 0.0; + break; + case FieldDescriptor::CPPTYPE_BOOL: + result->default_value_bool_ = false; + break; + case FieldDescriptor::CPPTYPE_ENUM: + // This will be filled in when cross-linking. + result->default_value_enum_ = NULL; + break; + case FieldDescriptor::CPPTYPE_STRING: + result->default_value_string_ = &kEmptyString; + break; + case FieldDescriptor::CPPTYPE_MESSAGE: + break; + } + } + } + + if (result->number() <= 0) { + AddError(result->full_name(), proto, DescriptorPool::ErrorCollector::NUMBER, + "Field numbers must be positive integers."); + } else if (result->number() > FieldDescriptor::kMaxNumber) { + AddError(result->full_name(), proto, DescriptorPool::ErrorCollector::NUMBER, + strings::Substitute("Field numbers cannot be greater than $0.", + FieldDescriptor::kMaxNumber)); + } else if (result->number() >= FieldDescriptor::kFirstReservedNumber && + result->number() <= FieldDescriptor::kLastReservedNumber) { + AddError(result->full_name(), proto, DescriptorPool::ErrorCollector::NUMBER, + strings::Substitute( + "Field numbers $0 through $1 are reserved for the protocol " + "buffer library implementation.", + FieldDescriptor::kFirstReservedNumber, + FieldDescriptor::kLastReservedNumber)); + } + + if (is_extension) { + if (!proto.has_extendee()) { + AddError(result->full_name(), proto, + DescriptorPool::ErrorCollector::EXTENDEE, + "FieldDescriptorProto.extendee not set for extension field."); + } + + result->extension_scope_ = parent; + } else { + if (proto.has_extendee()) { + AddError(result->full_name(), proto, + DescriptorPool::ErrorCollector::EXTENDEE, + "FieldDescriptorProto.extendee set for non-extension field."); + } + + result->containing_type_ = parent; + } + + // Copy options. + if (!proto.has_options()) { + result->options_ = NULL; // Will set to default_instance later. + } else { + AllocateOptions(proto.options(), result); + } + + AddSymbol(result->full_name(), parent, result->name(), + proto, Symbol(result)); +} + +void DescriptorBuilder::BuildExtensionRange( + const DescriptorProto::ExtensionRange& proto, + const Descriptor* parent, + Descriptor::ExtensionRange* result) { + result->start = proto.start(); + result->end = proto.end(); + if (result->start <= 0) { + AddError(parent->full_name(), proto, + DescriptorPool::ErrorCollector::NUMBER, + "Extension numbers must be positive integers."); + } + + if (result->end > FieldDescriptor::kMaxNumber + 1) { + AddError(parent->full_name(), proto, + DescriptorPool::ErrorCollector::NUMBER, + strings::Substitute("Extension numbers cannot be greater than $0.", + FieldDescriptor::kMaxNumber)); + } + + if (result->start >= result->end) { + AddError(parent->full_name(), proto, + DescriptorPool::ErrorCollector::NUMBER, + "Extension range end number must be greater than start number."); + } +} + +void DescriptorBuilder::BuildEnum(const EnumDescriptorProto& proto, + const Descriptor* parent, + EnumDescriptor* result) { + const string& scope = (parent == NULL) ? + file_->package() : parent->full_name(); + string* full_name = tables_->AllocateString(scope); + if (!full_name->empty()) full_name->append(1, '.'); + full_name->append(proto.name()); + + ValidateSymbolName(proto.name(), *full_name, proto); + + result->name_ = tables_->AllocateString(proto.name()); + result->full_name_ = full_name; + result->file_ = file_; + result->containing_type_ = parent; + result->is_placeholder_ = false; + result->is_unqualified_placeholder_ = false; + + if (proto.value_size() == 0) { + // We cannot allow enums with no values because this would mean there + // would be no valid default value for fields of this type. + AddError(result->full_name(), proto, + DescriptorPool::ErrorCollector::NAME, + "Enums must contain at least one value."); + } + + BUILD_ARRAY(proto, result, value, BuildEnumValue, result); + + // Copy options. + if (!proto.has_options()) { + result->options_ = NULL; // Will set to default_instance later. + } else { + AllocateOptions(proto.options(), result); + } + + AddSymbol(result->full_name(), parent, result->name(), + proto, Symbol(result)); +} + +void DescriptorBuilder::BuildEnumValue(const EnumValueDescriptorProto& proto, + const EnumDescriptor* parent, + EnumValueDescriptor* result) { + result->name_ = tables_->AllocateString(proto.name()); + result->number_ = proto.number(); + result->type_ = parent; + + // Note: full_name for enum values is a sibling to the parent's name, not a + // child of it. + string* full_name = tables_->AllocateString(*parent->full_name_); + full_name->resize(full_name->size() - parent->name_->size()); + full_name->append(*result->name_); + result->full_name_ = full_name; + + ValidateSymbolName(proto.name(), *full_name, proto); + + // Copy options. + if (!proto.has_options()) { + result->options_ = NULL; // Will set to default_instance later. + } else { + AllocateOptions(proto.options(), result); + } + + // Again, enum values are weird because we makes them appear as siblings + // of the enum type instead of children of it. So, we use + // parent->containing_type() as the value's parent. + bool added_to_outer_scope = + AddSymbol(result->full_name(), parent->containing_type(), result->name(), + proto, Symbol(result)); + + // However, we also want to be able to search for values within a single + // enum type, so we add it as a child of the enum type itself, too. + // Note: This could fail, but if it does, the error has already been + // reported by the above AddSymbol() call, so we ignore the return code. + bool added_to_inner_scope = + file_tables_->AddAliasUnderParent(parent, result->name(), Symbol(result)); + + if (added_to_inner_scope && !added_to_outer_scope) { + // This value did not conflict with any values defined in the same enum, + // but it did conflict with some other symbol defined in the enum type's + // scope. Let's print an additional error to explain this. + string outer_scope; + if (parent->containing_type() == NULL) { + outer_scope = file_->package(); + } else { + outer_scope = parent->containing_type()->full_name(); + } + + if (outer_scope.empty()) { + outer_scope = "the global scope"; + } else { + outer_scope = "\"" + outer_scope + "\""; + } + + AddError(result->full_name(), proto, + DescriptorPool::ErrorCollector::NAME, + "Note that enum values use C++ scoping rules, meaning that " + "enum values are siblings of their type, not children of it. " + "Therefore, \"" + result->name() + "\" must be unique within " + + outer_scope + ", not just within \"" + parent->name() + "\"."); + } + + // An enum is allowed to define two numbers that refer to the same value. + // FindValueByNumber() should return the first such value, so we simply + // ignore AddEnumValueByNumber()'s return code. + file_tables_->AddEnumValueByNumber(result); +} + +void DescriptorBuilder::BuildService(const ServiceDescriptorProto& proto, + const void* dummy, + ServiceDescriptor* result) { + string* full_name = tables_->AllocateString(file_->package()); + if (!full_name->empty()) full_name->append(1, '.'); + full_name->append(proto.name()); + + ValidateSymbolName(proto.name(), *full_name, proto); + + result->name_ = tables_->AllocateString(proto.name()); + result->full_name_ = full_name; + result->file_ = file_; + + BUILD_ARRAY(proto, result, method, BuildMethod, result); + + // Copy options. + if (!proto.has_options()) { + result->options_ = NULL; // Will set to default_instance later. + } else { + AllocateOptions(proto.options(), result); + } + + AddSymbol(result->full_name(), NULL, result->name(), + proto, Symbol(result)); +} + +void DescriptorBuilder::BuildMethod(const MethodDescriptorProto& proto, + const ServiceDescriptor* parent, + MethodDescriptor* result) { + result->name_ = tables_->AllocateString(proto.name()); + result->service_ = parent; + + string* full_name = tables_->AllocateString(parent->full_name()); + full_name->append(1, '.'); + full_name->append(*result->name_); + result->full_name_ = full_name; + + ValidateSymbolName(proto.name(), *full_name, proto); + + // These will be filled in when cross-linking. + result->input_type_ = NULL; + result->output_type_ = NULL; + + // Copy options. + if (!proto.has_options()) { + result->options_ = NULL; // Will set to default_instance later. + } else { + AllocateOptions(proto.options(), result); + } + + AddSymbol(result->full_name(), parent, result->name(), + proto, Symbol(result)); +} + +#undef BUILD_ARRAY + +// ------------------------------------------------------------------- + +void DescriptorBuilder::CrossLinkFile( + FileDescriptor* file, const FileDescriptorProto& proto) { + if (file->options_ == NULL) { + file->options_ = &FileOptions::default_instance(); + } + + for (int i = 0; i < file->message_type_count(); i++) { + CrossLinkMessage(&file->message_types_[i], proto.message_type(i)); + } + + for (int i = 0; i < file->extension_count(); i++) { + CrossLinkField(&file->extensions_[i], proto.extension(i)); + } + + for (int i = 0; i < file->enum_type_count(); i++) { + CrossLinkEnum(&file->enum_types_[i], proto.enum_type(i)); + } + + for (int i = 0; i < file->service_count(); i++) { + CrossLinkService(&file->services_[i], proto.service(i)); + } +} + +void DescriptorBuilder::CrossLinkMessage( + Descriptor* message, const DescriptorProto& proto) { + if (message->options_ == NULL) { + message->options_ = &MessageOptions::default_instance(); + } + + for (int i = 0; i < message->nested_type_count(); i++) { + CrossLinkMessage(&message->nested_types_[i], proto.nested_type(i)); + } + + for (int i = 0; i < message->enum_type_count(); i++) { + CrossLinkEnum(&message->enum_types_[i], proto.enum_type(i)); + } + + for (int i = 0; i < message->field_count(); i++) { + CrossLinkField(&message->fields_[i], proto.field(i)); + } + + for (int i = 0; i < message->extension_count(); i++) { + CrossLinkField(&message->extensions_[i], proto.extension(i)); + } +} + +void DescriptorBuilder::CrossLinkField( + FieldDescriptor* field, const FieldDescriptorProto& proto) { + if (field->options_ == NULL) { + field->options_ = &FieldOptions::default_instance(); + } + + if (proto.has_extendee()) { + Symbol extendee = LookupSymbol(proto.extendee(), field->full_name(), + PLACEHOLDER_EXTENDABLE_MESSAGE); + if (extendee.IsNull()) { + AddNotDefinedError(field->full_name(), proto, + DescriptorPool::ErrorCollector::EXTENDEE, + proto.extendee()); + return; + } else if (extendee.type != Symbol::MESSAGE) { + AddError(field->full_name(), proto, + DescriptorPool::ErrorCollector::EXTENDEE, + "\"" + proto.extendee() + "\" is not a message type."); + return; + } + field->containing_type_ = extendee.descriptor; + + if (!field->containing_type()->IsExtensionNumber(field->number())) { + AddError(field->full_name(), proto, + DescriptorPool::ErrorCollector::NUMBER, + strings::Substitute("\"$0\" does not declare $1 as an " + "extension number.", + field->containing_type()->full_name(), + field->number())); + } + } + + if (proto.has_type_name()) { + // Assume we are expecting a message type unless the proto contains some + // evidence that it expects an enum type. This only makes a difference if + // we end up creating a placeholder. + bool expecting_enum = (proto.type() == FieldDescriptorProto::TYPE_ENUM) || + proto.has_default_value(); + + Symbol type = + LookupSymbol(proto.type_name(), field->full_name(), + expecting_enum ? PLACEHOLDER_ENUM : PLACEHOLDER_MESSAGE, + LOOKUP_TYPES); + + if (type.IsNull()) { + AddNotDefinedError(field->full_name(), proto, + DescriptorPool::ErrorCollector::TYPE, + proto.type_name()); + return; + } + + if (!proto.has_type()) { + // Choose field type based on symbol. + if (type.type == Symbol::MESSAGE) { + field->type_ = FieldDescriptor::TYPE_MESSAGE; + } else if (type.type == Symbol::ENUM) { + field->type_ = FieldDescriptor::TYPE_ENUM; + } else { + AddError(field->full_name(), proto, + DescriptorPool::ErrorCollector::TYPE, + "\"" + proto.type_name() + "\" is not a type."); + return; + } + } + + if (field->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) { + if (type.type != Symbol::MESSAGE) { + AddError(field->full_name(), proto, + DescriptorPool::ErrorCollector::TYPE, + "\"" + proto.type_name() + "\" is not a message type."); + return; + } + field->message_type_ = type.descriptor; + + if (field->has_default_value()) { + AddError(field->full_name(), proto, + DescriptorPool::ErrorCollector::DEFAULT_VALUE, + "Messages can't have default values."); + } + } else if (field->cpp_type() == FieldDescriptor::CPPTYPE_ENUM) { + if (type.type != Symbol::ENUM) { + AddError(field->full_name(), proto, + DescriptorPool::ErrorCollector::TYPE, + "\"" + proto.type_name() + "\" is not an enum type."); + return; + } + field->enum_type_ = type.enum_descriptor; + + if (field->enum_type()->is_placeholder_) { + // We can't look up default values for placeholder types. We'll have + // to just drop them. + field->has_default_value_ = false; + } + + if (field->has_default_value()) { + // We can't just use field->enum_type()->FindValueByName() here + // because that locks the pool's mutex, which we have already locked + // at this point. + Symbol default_value = + LookupSymbolNoPlaceholder(proto.default_value(), + field->enum_type()->full_name()); + + if (default_value.type == Symbol::ENUM_VALUE && + default_value.enum_value_descriptor->type() == field->enum_type()) { + field->default_value_enum_ = default_value.enum_value_descriptor; + } else { + AddError(field->full_name(), proto, + DescriptorPool::ErrorCollector::DEFAULT_VALUE, + "Enum type \"" + field->enum_type()->full_name() + + "\" has no value named \"" + proto.default_value() + "\"."); + } + } else if (field->enum_type()->value_count() > 0) { + // All enums must have at least one value, or we would have reported + // an error elsewhere. We use the first defined value as the default + // if a default is not explicitly defined. + field->default_value_enum_ = field->enum_type()->value(0); + } + } else { + AddError(field->full_name(), proto, DescriptorPool::ErrorCollector::TYPE, + "Field with primitive type has type_name."); + } + } else { + if (field->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE || + field->cpp_type() == FieldDescriptor::CPPTYPE_ENUM) { + AddError(field->full_name(), proto, DescriptorPool::ErrorCollector::TYPE, + "Field with message or enum type missing type_name."); + } + } + + // Add the field to the fields-by-number table. + // Note: We have to do this *after* cross-linking because extensions do not + // know their containing type until now. + if (!file_tables_->AddFieldByNumber(field)) { + const FieldDescriptor* conflicting_field = + file_tables_->FindFieldByNumber(field->containing_type(), + field->number()); + if (field->is_extension()) { + AddError(field->full_name(), proto, + DescriptorPool::ErrorCollector::NUMBER, + strings::Substitute("Extension number $0 has already been used " + "in \"$1\" by extension \"$2\".", + field->number(), + field->containing_type()->full_name(), + conflicting_field->full_name())); + } else { + AddError(field->full_name(), proto, + DescriptorPool::ErrorCollector::NUMBER, + strings::Substitute("Field number $0 has already been used in " + "\"$1\" by field \"$2\".", + field->number(), + field->containing_type()->full_name(), + conflicting_field->name())); + } + } + + if (field->is_extension()) { + // No need for error checking: if the extension number collided, + // we've already been informed of it by the if() above. + tables_->AddExtension(field); + } + + // Add the field to the lowercase-name and camelcase-name tables. + file_tables_->AddFieldByStylizedNames(field); +} + +void DescriptorBuilder::CrossLinkEnum( + EnumDescriptor* enum_type, const EnumDescriptorProto& proto) { + if (enum_type->options_ == NULL) { + enum_type->options_ = &EnumOptions::default_instance(); + } + + for (int i = 0; i < enum_type->value_count(); i++) { + CrossLinkEnumValue(&enum_type->values_[i], proto.value(i)); + } +} + +void DescriptorBuilder::CrossLinkEnumValue( + EnumValueDescriptor* enum_value, const EnumValueDescriptorProto& proto) { + if (enum_value->options_ == NULL) { + enum_value->options_ = &EnumValueOptions::default_instance(); + } +} + +void DescriptorBuilder::CrossLinkService( + ServiceDescriptor* service, const ServiceDescriptorProto& proto) { + if (service->options_ == NULL) { + service->options_ = &ServiceOptions::default_instance(); + } + + for (int i = 0; i < service->method_count(); i++) { + CrossLinkMethod(&service->methods_[i], proto.method(i)); + } +} + +void DescriptorBuilder::CrossLinkMethod( + MethodDescriptor* method, const MethodDescriptorProto& proto) { + if (method->options_ == NULL) { + method->options_ = &MethodOptions::default_instance(); + } + + Symbol input_type = LookupSymbol(proto.input_type(), method->full_name()); + if (input_type.IsNull()) { + AddNotDefinedError(method->full_name(), proto, + DescriptorPool::ErrorCollector::INPUT_TYPE, + proto.input_type()); + } else if (input_type.type != Symbol::MESSAGE) { + AddError(method->full_name(), proto, + DescriptorPool::ErrorCollector::INPUT_TYPE, + "\"" + proto.input_type() + "\" is not a message type."); + } else { + method->input_type_ = input_type.descriptor; + } + + Symbol output_type = LookupSymbol(proto.output_type(), method->full_name()); + if (output_type.IsNull()) { + AddNotDefinedError(method->full_name(), proto, + DescriptorPool::ErrorCollector::OUTPUT_TYPE, + proto.output_type()); + } else if (output_type.type != Symbol::MESSAGE) { + AddError(method->full_name(), proto, + DescriptorPool::ErrorCollector::OUTPUT_TYPE, + "\"" + proto.output_type() + "\" is not a message type."); + } else { + method->output_type_ = output_type.descriptor; + } +} + +// ------------------------------------------------------------------- + +#define VALIDATE_OPTIONS_FROM_ARRAY(descriptor, array_name, type) \ + for (int i = 0; i < descriptor->array_name##_count(); ++i) { \ + Validate##type##Options(descriptor->array_name##s_ + i, \ + proto.array_name(i)); \ + } + +// Determine if the file uses optimize_for = LITE_RUNTIME, being careful to +// avoid problems that exist at init time. +static bool IsLite(const FileDescriptor* file) { + // TODO(kenton): I don't even remember how many of these conditions are + // actually possible. I'm just being super-safe. + return file != NULL && + &file->options() != NULL && + &file->options() != &FileOptions::default_instance() && + file->options().optimize_for() == FileOptions::LITE_RUNTIME; +} + +void DescriptorBuilder::ValidateFileOptions(FileDescriptor* file, + const FileDescriptorProto& proto) { + VALIDATE_OPTIONS_FROM_ARRAY(file, message_type, Message); + VALIDATE_OPTIONS_FROM_ARRAY(file, enum_type, Enum); + VALIDATE_OPTIONS_FROM_ARRAY(file, service, Service); + VALIDATE_OPTIONS_FROM_ARRAY(file, extension, Field); + + // Lite files can only be imported by other Lite files. + if (!IsLite(file)) { + for (int i = 0; i < file->dependency_count(); i++) { + if (IsLite(file->dependency(i))) { + AddError( + file->name(), proto, + DescriptorPool::ErrorCollector::OTHER, + "Files that do not use optimize_for = LITE_RUNTIME cannot import " + "files which do use this option. This file is not lite, but it " + "imports \"" + file->dependency(i)->name() + "\" which is."); + break; + } + } + } +} + +void DescriptorBuilder::ValidateMessageOptions(Descriptor* message, + const DescriptorProto& proto) { + VALIDATE_OPTIONS_FROM_ARRAY(message, field, Field); + VALIDATE_OPTIONS_FROM_ARRAY(message, nested_type, Message); + VALIDATE_OPTIONS_FROM_ARRAY(message, enum_type, Enum); + VALIDATE_OPTIONS_FROM_ARRAY(message, extension, Field); +} + +void DescriptorBuilder::ValidateFieldOptions(FieldDescriptor* field, + const FieldDescriptorProto& proto) { + if (field->options().has_experimental_map_key()) { + ValidateMapKey(field, proto); + } + + // Only repeated primitive fields may be packed. + if (field->options().packed() && !field->is_packable()) { + AddError( + field->full_name(), proto, + DescriptorPool::ErrorCollector::TYPE, + "[packed = true] can only be specified for repeated primitive fields."); + } + + // Note: Default instance may not yet be initialized here, so we have to + // avoid reading from it. + if (field->containing_type_ != NULL && + &field->containing_type()->options() != + &MessageOptions::default_instance() && + field->containing_type()->options().message_set_wire_format()) { + if (field->is_extension()) { + if (!field->is_optional() || + field->type() != FieldDescriptor::TYPE_MESSAGE) { + AddError(field->full_name(), proto, + DescriptorPool::ErrorCollector::TYPE, + "Extensions of MessageSets must be optional messages."); + } + } else { + AddError(field->full_name(), proto, + DescriptorPool::ErrorCollector::NAME, + "MessageSets cannot have fields, only extensions."); + } + } + + // Lite extensions can only be of Lite types. + if (IsLite(field->file()) && + field->containing_type_ != NULL && + !IsLite(field->containing_type()->file())) { + AddError(field->full_name(), proto, + DescriptorPool::ErrorCollector::EXTENDEE, + "Extensions to non-lite types can only be declared in non-lite " + "files. Note that you cannot extend a non-lite type to contain " + "a lite type, but the reverse is allowed."); + } +} + +void DescriptorBuilder::ValidateEnumOptions(EnumDescriptor* enm, + const EnumDescriptorProto& proto) { + VALIDATE_OPTIONS_FROM_ARRAY(enm, value, EnumValue); +} + +void DescriptorBuilder::ValidateEnumValueOptions( + EnumValueDescriptor* enum_value, const EnumValueDescriptorProto& proto) { + // Nothing to do so far. +} +void DescriptorBuilder::ValidateServiceOptions(ServiceDescriptor* service, + const ServiceDescriptorProto& proto) { + if (IsLite(service->file()) && + (service->file()->options().cc_generic_services() || + service->file()->options().java_generic_services())) { + AddError(service->full_name(), proto, + DescriptorPool::ErrorCollector::NAME, + "Files with optimize_for = LITE_RUNTIME cannot define services " + "unless you set both options cc_generic_services and " + "java_generic_sevices to false."); + } + + VALIDATE_OPTIONS_FROM_ARRAY(service, method, Method); +} + +void DescriptorBuilder::ValidateMethodOptions(MethodDescriptor* method, + const MethodDescriptorProto& proto) { + // Nothing to do so far. +} + +void DescriptorBuilder::ValidateMapKey(FieldDescriptor* field, + const FieldDescriptorProto& proto) { + if (!field->is_repeated()) { + AddError(field->full_name(), proto, DescriptorPool::ErrorCollector::TYPE, + "map type is only allowed for repeated fields."); + return; + } + + if (field->cpp_type() != FieldDescriptor::CPPTYPE_MESSAGE) { + AddError(field->full_name(), proto, DescriptorPool::ErrorCollector::TYPE, + "map type is only allowed for fields with a message type."); + return; + } + + const Descriptor* item_type = field->message_type(); + if (item_type == NULL) { + AddError(field->full_name(), proto, DescriptorPool::ErrorCollector::TYPE, + "Could not find field type."); + return; + } + + // Find the field in item_type named by "experimental_map_key" + const string& key_name = field->options().experimental_map_key(); + const Symbol key_symbol = LookupSymbol( + key_name, + // We append ".key_name" to the containing type's name since + // LookupSymbol() searches for peers of the supplied name, not + // children of the supplied name. + item_type->full_name() + "." + key_name); + + if (key_symbol.IsNull() || key_symbol.field_descriptor->is_extension()) { + AddError(field->full_name(), proto, DescriptorPool::ErrorCollector::TYPE, + "Could not find field named \"" + key_name + "\" in type \"" + + item_type->full_name() + "\"."); + return; + } + const FieldDescriptor* key_field = key_symbol.field_descriptor; + + if (key_field->is_repeated()) { + AddError(field->full_name(), proto, DescriptorPool::ErrorCollector::TYPE, + "map_key must not name a repeated field."); + return; + } + + if (key_field->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) { + AddError(field->full_name(), proto, DescriptorPool::ErrorCollector::TYPE, + "map key must name a scalar or string field."); + return; + } + + field->experimental_map_key_ = key_field; +} + +#undef VALIDATE_OPTIONS_FROM_ARRAY + +// ------------------------------------------------------------------- + +DescriptorBuilder::OptionInterpreter::OptionInterpreter( + DescriptorBuilder* builder) : builder_(builder) { + GOOGLE_CHECK(builder_); +} + +DescriptorBuilder::OptionInterpreter::~OptionInterpreter() { +} + +bool DescriptorBuilder::OptionInterpreter::InterpretOptions( + OptionsToInterpret* options_to_interpret) { + // Note that these may be in different pools, so we can't use the same + // descriptor and reflection objects on both. + Message* options = options_to_interpret->options; + const Message* original_options = options_to_interpret->original_options; + + bool failed = false; + options_to_interpret_ = options_to_interpret; + + // Find the uninterpreted_option field in the mutable copy of the options + // and clear them, since we're about to interpret them. + const FieldDescriptor* uninterpreted_options_field = + options->GetDescriptor()->FindFieldByName("uninterpreted_option"); + GOOGLE_CHECK(uninterpreted_options_field != NULL) + << "No field named \"uninterpreted_option\" in the Options proto."; + options->GetReflection()->ClearField(options, uninterpreted_options_field); + + // Find the uninterpreted_option field in the original options. + const FieldDescriptor* original_uninterpreted_options_field = + original_options->GetDescriptor()-> + FindFieldByName("uninterpreted_option"); + GOOGLE_CHECK(original_uninterpreted_options_field != NULL) + << "No field named \"uninterpreted_option\" in the Options proto."; + + const int num_uninterpreted_options = original_options->GetReflection()-> + FieldSize(*original_options, original_uninterpreted_options_field); + for (int i = 0; i < num_uninterpreted_options; ++i) { + uninterpreted_option_ = down_cast( + &original_options->GetReflection()->GetRepeatedMessage( + *original_options, original_uninterpreted_options_field, i)); + if (!InterpretSingleOption(options)) { + // Error already added by InterpretSingleOption(). + failed = true; + break; + } + } + // Reset these, so we don't have any dangling pointers. + uninterpreted_option_ = NULL; + options_to_interpret_ = NULL; + + if (!failed) { + // InterpretSingleOption() added the interpreted options in the + // UnknownFieldSet, in case the option isn't yet known to us. Now we + // serialize the options message and deserialize it back. That way, any + // option fields that we do happen to know about will get moved from the + // UnknownFieldSet into the real fields, and thus be available right away. + // If they are not known, that's OK too. They will get reparsed into the + // UnknownFieldSet and wait there until the message is parsed by something + // that does know about the options. + string buf; + options->AppendToString(&buf); + GOOGLE_CHECK(options->ParseFromString(buf)) + << "Protocol message serialized itself in invalid fashion."; + } + + return !failed; +} + +bool DescriptorBuilder::OptionInterpreter::InterpretSingleOption( + Message* options) { + // First do some basic validation. + if (uninterpreted_option_->name_size() == 0) { + // This should never happen unless the parser has gone seriously awry or + // someone has manually created the uninterpreted option badly. + return AddNameError("Option must have a name."); + } + if (uninterpreted_option_->name(0).name_part() == "uninterpreted_option") { + return AddNameError("Option must not use reserved name " + "\"uninterpreted_option\"."); + } + + const Descriptor* options_descriptor = NULL; + // Get the options message's descriptor from the builder's pool, so that we + // get the version that knows about any extension options declared in the + // file we're currently building. The descriptor should be there as long as + // the file we're building imported "google/protobuf/descriptors.proto". + + // Note that we use DescriptorBuilder::FindSymbol(), not + // DescriptorPool::FindMessageTypeByName() because we're already holding the + // pool's mutex, and the latter method locks it again. + Symbol symbol = builder_->FindSymbolNotEnforcingDeps( + options->GetDescriptor()->full_name()); + if (!symbol.IsNull() && symbol.type == Symbol::MESSAGE) { + options_descriptor = symbol.descriptor; + } else { + // The options message's descriptor was not in the builder's pool, so use + // the standard version from the generated pool. We're not holding the + // generated pool's mutex, so we can search it the straightforward way. + options_descriptor = options->GetDescriptor(); + } + GOOGLE_CHECK(options_descriptor); + + // We iterate over the name parts to drill into the submessages until we find + // the leaf field for the option. As we drill down we remember the current + // submessage's descriptor in |descriptor| and the next field in that + // submessage in |field|. We also track the fields we're drilling down + // through in |intermediate_fields|. As we go, we reconstruct the full option + // name in |debug_msg_name|, for use in error messages. + const Descriptor* descriptor = options_descriptor; + const FieldDescriptor* field = NULL; + vector intermediate_fields; + string debug_msg_name = ""; + + for (int i = 0; i < uninterpreted_option_->name_size(); ++i) { + const string& name_part = uninterpreted_option_->name(i).name_part(); + if (debug_msg_name.size() > 0) { + debug_msg_name += "."; + } + if (uninterpreted_option_->name(i).is_extension()) { + debug_msg_name += "(" + name_part + ")"; + // Search for the extension's descriptor as an extension in the builder's + // pool. Note that we use DescriptorBuilder::LookupSymbol(), not + // DescriptorPool::FindExtensionByName(), for two reasons: 1) It allows + // relative lookups, and 2) because we're already holding the pool's + // mutex, and the latter method locks it again. + Symbol symbol = builder_->LookupSymbol(name_part, + options_to_interpret_->name_scope); + if (!symbol.IsNull() && symbol.type == Symbol::FIELD) { + field = symbol.field_descriptor; + } + // If we don't find the field then the field's descriptor was not in the + // builder's pool, but there's no point in looking in the generated + // pool. We require that you import the file that defines any extensions + // you use, so they must be present in the builder's pool. + } else { + debug_msg_name += name_part; + // Search for the field's descriptor as a regular field. + field = descriptor->FindFieldByName(name_part); + } + + if (field == NULL) { + if (get_allow_unknown(builder_->pool_)) { + // We can't find the option, but AllowUnknownDependencies() is enabled, + // so we will just leave it as uninterpreted. + AddWithoutInterpreting(*uninterpreted_option_, options); + return true; + } else { + return AddNameError("Option \"" + debug_msg_name + "\" unknown."); + } + } else if (field->containing_type() != descriptor) { + if (get_is_placeholder(field->containing_type())) { + // The field is an extension of a placeholder type, so we can't + // reliably verify whether it is a valid extension to use here (e.g. + // we don't know if it is an extension of the correct *Options message, + // or if it has a valid field number, etc.). Just leave it as + // uninterpreted instead. + AddWithoutInterpreting(*uninterpreted_option_, options); + return true; + } else { + // This can only happen if, due to some insane misconfiguration of the + // pools, we find the options message in one pool but the field in + // another. This would probably imply a hefty bug somewhere. + return AddNameError("Option field \"" + debug_msg_name + + "\" is not a field or extension of message \"" + + descriptor->name() + "\"."); + } + } else if (field->is_repeated()) { + return AddNameError("Option field \"" + debug_msg_name + + "\" is repeated. Repeated options are not " + "supported."); + } else if (i < uninterpreted_option_->name_size() - 1) { + if (field->cpp_type() != FieldDescriptor::CPPTYPE_MESSAGE) { + return AddNameError("Option \"" + debug_msg_name + + "\" is an atomic type, not a message."); + } else { + // Drill down into the submessage. + intermediate_fields.push_back(field); + descriptor = field->message_type(); + } + } + } + + // We've found the leaf field. Now we use UnknownFieldSets to set its value + // on the options message. We do so because the message may not yet know + // about its extension fields, so we may not be able to set the fields + // directly. But the UnknownFieldSets will serialize to the same wire-format + // message, so reading that message back in once the extension fields are + // known will populate them correctly. + + // First see if the option is already set. + if (!ExamineIfOptionIsSet( + intermediate_fields.begin(), + intermediate_fields.end(), + field, debug_msg_name, + options->GetReflection()->GetUnknownFields(*options))) { + return false; // ExamineIfOptionIsSet() already added the error. + } + + + // First set the value on the UnknownFieldSet corresponding to the + // innermost message. + scoped_ptr unknown_fields(new UnknownFieldSet()); + if (!SetOptionValue(field, unknown_fields.get())) { + return false; // SetOptionValue() already added the error. + } + + // Now wrap the UnknownFieldSet with UnknownFieldSets corresponding to all + // the intermediate messages. + for (vector::reverse_iterator iter = + intermediate_fields.rbegin(); + iter != intermediate_fields.rend(); ++iter) { + scoped_ptr parent_unknown_fields(new UnknownFieldSet()); + switch ((*iter)->type()) { + case FieldDescriptor::TYPE_MESSAGE: { + io::StringOutputStream outstr( + parent_unknown_fields->AddLengthDelimited((*iter)->number())); + io::CodedOutputStream out(&outstr); + internal::WireFormat::SerializeUnknownFields(*unknown_fields, &out); + GOOGLE_CHECK(!out.HadError()) + << "Unexpected failure while serializing option submessage " + << debug_msg_name << "\"."; + break; + } + + case FieldDescriptor::TYPE_GROUP: { + parent_unknown_fields->AddGroup((*iter)->number()) + ->MergeFrom(*unknown_fields); + break; + } + + default: + GOOGLE_LOG(FATAL) << "Invalid wire type for CPPTYPE_MESSAGE: " + << (*iter)->type(); + return false; + } + unknown_fields.reset(parent_unknown_fields.release()); + } + + // Now merge the UnknownFieldSet corresponding to the top-level message into + // the options message. + options->GetReflection()->MutableUnknownFields(options)->MergeFrom( + *unknown_fields); + + return true; +} + +void DescriptorBuilder::OptionInterpreter::AddWithoutInterpreting( + const UninterpretedOption& uninterpreted_option, Message* options) { + const FieldDescriptor* field = + options->GetDescriptor()->FindFieldByName("uninterpreted_option"); + GOOGLE_CHECK(field != NULL); + + options->GetReflection()->AddMessage(options, field) + ->CopyFrom(uninterpreted_option); +} + +bool DescriptorBuilder::OptionInterpreter::ExamineIfOptionIsSet( + vector::const_iterator intermediate_fields_iter, + vector::const_iterator intermediate_fields_end, + const FieldDescriptor* innermost_field, const string& debug_msg_name, + const UnknownFieldSet& unknown_fields) { + // We do linear searches of the UnknownFieldSet and its sub-groups. This + // should be fine since it's unlikely that any one options structure will + // contain more than a handful of options. + + if (intermediate_fields_iter == intermediate_fields_end) { + // We're at the innermost submessage. + for (int i = 0; i < unknown_fields.field_count(); i++) { + if (unknown_fields.field(i).number() == innermost_field->number()) { + return AddNameError("Option \"" + debug_msg_name + + "\" was already set."); + } + } + return true; + } + + for (int i = 0; i < unknown_fields.field_count(); i++) { + if (unknown_fields.field(i).number() == + (*intermediate_fields_iter)->number()) { + const UnknownField* unknown_field = &unknown_fields.field(i); + FieldDescriptor::Type type = (*intermediate_fields_iter)->type(); + // Recurse into the next submessage. + switch (type) { + case FieldDescriptor::TYPE_MESSAGE: + if (unknown_field->type() == UnknownField::TYPE_LENGTH_DELIMITED) { + UnknownFieldSet intermediate_unknown_fields; + if (intermediate_unknown_fields.ParseFromString( + unknown_field->length_delimited()) && + !ExamineIfOptionIsSet(intermediate_fields_iter + 1, + intermediate_fields_end, + innermost_field, debug_msg_name, + intermediate_unknown_fields)) { + return false; // Error already added. + } + } + break; + + case FieldDescriptor::TYPE_GROUP: + if (unknown_field->type() == UnknownField::TYPE_GROUP) { + if (!ExamineIfOptionIsSet(intermediate_fields_iter + 1, + intermediate_fields_end, + innermost_field, debug_msg_name, + unknown_field->group())) { + return false; // Error already added. + } + } + break; + + default: + GOOGLE_LOG(FATAL) << "Invalid wire type for CPPTYPE_MESSAGE: " << type; + return false; + } + } + } + return true; +} + +bool DescriptorBuilder::OptionInterpreter::SetOptionValue( + const FieldDescriptor* option_field, + UnknownFieldSet* unknown_fields) { + // We switch on the CppType to validate. + switch (option_field->cpp_type()) { + + case FieldDescriptor::CPPTYPE_INT32: + if (uninterpreted_option_->has_positive_int_value()) { + if (uninterpreted_option_->positive_int_value() > + static_cast(kint32max)) { + return AddValueError("Value out of range for int32 option \"" + + option_field->full_name() + "\"."); + } else { + SetInt32(option_field->number(), + uninterpreted_option_->positive_int_value(), + option_field->type(), unknown_fields); + } + } else if (uninterpreted_option_->has_negative_int_value()) { + if (uninterpreted_option_->negative_int_value() < + static_cast(kint32min)) { + return AddValueError("Value out of range for int32 option \"" + + option_field->full_name() + "\"."); + } else { + SetInt32(option_field->number(), + uninterpreted_option_->negative_int_value(), + option_field->type(), unknown_fields); + } + } else { + return AddValueError("Value must be integer for int32 option \"" + + option_field->full_name() + "\"."); + } + break; + + case FieldDescriptor::CPPTYPE_INT64: + if (uninterpreted_option_->has_positive_int_value()) { + if (uninterpreted_option_->positive_int_value() > + static_cast(kint64max)) { + return AddValueError("Value out of range for int64 option \"" + + option_field->full_name() + "\"."); + } else { + SetInt64(option_field->number(), + uninterpreted_option_->positive_int_value(), + option_field->type(), unknown_fields); + } + } else if (uninterpreted_option_->has_negative_int_value()) { + SetInt64(option_field->number(), + uninterpreted_option_->negative_int_value(), + option_field->type(), unknown_fields); + } else { + return AddValueError("Value must be integer for int64 option \"" + + option_field->full_name() + "\"."); + } + break; + + case FieldDescriptor::CPPTYPE_UINT32: + if (uninterpreted_option_->has_positive_int_value()) { + if (uninterpreted_option_->positive_int_value() > kuint32max) { + return AddValueError("Value out of range for uint32 option \"" + + option_field->name() + "\"."); + } else { + SetUInt32(option_field->number(), + uninterpreted_option_->positive_int_value(), + option_field->type(), unknown_fields); + } + } else { + return AddValueError("Value must be non-negative integer for uint32 " + "option \"" + option_field->full_name() + "\"."); + } + break; + + case FieldDescriptor::CPPTYPE_UINT64: + if (uninterpreted_option_->has_positive_int_value()) { + SetUInt64(option_field->number(), + uninterpreted_option_->positive_int_value(), + option_field->type(), unknown_fields); + } else { + return AddValueError("Value must be non-negative integer for uint64 " + "option \"" + option_field->full_name() + "\"."); + } + break; + + case FieldDescriptor::CPPTYPE_FLOAT: { + float value; + if (uninterpreted_option_->has_double_value()) { + value = uninterpreted_option_->double_value(); + } else if (uninterpreted_option_->has_positive_int_value()) { + value = uninterpreted_option_->positive_int_value(); + } else if (uninterpreted_option_->has_negative_int_value()) { + value = uninterpreted_option_->negative_int_value(); + } else { + return AddValueError("Value must be number for float option \"" + + option_field->full_name() + "\"."); + } + unknown_fields->AddFixed32(option_field->number(), + google::protobuf::internal::WireFormatLite::EncodeFloat(value)); + break; + } + + case FieldDescriptor::CPPTYPE_DOUBLE: { + double value; + if (uninterpreted_option_->has_double_value()) { + value = uninterpreted_option_->double_value(); + } else if (uninterpreted_option_->has_positive_int_value()) { + value = uninterpreted_option_->positive_int_value(); + } else if (uninterpreted_option_->has_negative_int_value()) { + value = uninterpreted_option_->negative_int_value(); + } else { + return AddValueError("Value must be number for double option \"" + + option_field->full_name() + "\"."); + } + unknown_fields->AddFixed64(option_field->number(), + google::protobuf::internal::WireFormatLite::EncodeDouble(value)); + break; + } + + case FieldDescriptor::CPPTYPE_BOOL: + uint64 value; + if (!uninterpreted_option_->has_identifier_value()) { + return AddValueError("Value must be identifier for boolean option " + "\"" + option_field->full_name() + "\"."); + } + if (uninterpreted_option_->identifier_value() == "true") { + value = 1; + } else if (uninterpreted_option_->identifier_value() == "false") { + value = 0; + } else { + return AddValueError("Value must be \"true\" or \"false\" for boolean " + "option \"" + option_field->full_name() + "\"."); + } + unknown_fields->AddVarint(option_field->number(), value); + break; + + case FieldDescriptor::CPPTYPE_ENUM: { + if (!uninterpreted_option_->has_identifier_value()) { + return AddValueError("Value must be identifier for enum-valued option " + "\"" + option_field->full_name() + "\"."); + } + const EnumDescriptor* enum_type = option_field->enum_type(); + const string& value_name = uninterpreted_option_->identifier_value(); + const EnumValueDescriptor* enum_value = NULL; + + if (enum_type->file()->pool() != DescriptorPool::generated_pool()) { + // Note that the enum value's fully-qualified name is a sibling of the + // enum's name, not a child of it. + string fully_qualified_name = enum_type->full_name(); + fully_qualified_name.resize(fully_qualified_name.size() - + enum_type->name().size()); + fully_qualified_name += value_name; + + // Search for the enum value's descriptor in the builder's pool. Note + // that we use DescriptorBuilder::FindSymbolNotEnforcingDeps(), not + // DescriptorPool::FindEnumValueByName() because we're already holding + // the pool's mutex, and the latter method locks it again. + Symbol symbol = + builder_->FindSymbolNotEnforcingDeps(fully_qualified_name); + if (!symbol.IsNull() && symbol.type == Symbol::ENUM_VALUE) { + if (symbol.enum_value_descriptor->type() != enum_type) { + return AddValueError("Enum type \"" + enum_type->full_name() + + "\" has no value named \"" + value_name + "\" for option \"" + + option_field->full_name() + + "\". This appears to be a value from a sibling type."); + } else { + enum_value = symbol.enum_value_descriptor; + } + } + } else { + // The enum type is in the generated pool, so we can search for the + // value there. + enum_value = enum_type->FindValueByName(value_name); + } + + if (enum_value == NULL) { + return AddValueError("Enum type \"" + + option_field->enum_type()->full_name() + + "\" has no value named \"" + value_name + "\" for " + "option \"" + option_field->full_name() + "\"."); + } else { + // Sign-extension is not a problem, since we cast directly from int32 to + // uint64, without first going through uint32. + unknown_fields->AddVarint(option_field->number(), + static_cast(static_cast(enum_value->number()))); + } + break; + } + + case FieldDescriptor::CPPTYPE_STRING: + if (!uninterpreted_option_->has_string_value()) { + return AddValueError("Value must be quoted string for string option " + "\"" + option_field->full_name() + "\"."); + } + // The string has already been unquoted and unescaped by the parser. + unknown_fields->AddLengthDelimited(option_field->number(), + uninterpreted_option_->string_value()); + break; + + case FieldDescriptor::CPPTYPE_MESSAGE: + if (!SetAggregateOption(option_field, unknown_fields)) { + return false; + } + break; + } + + return true; +} + +class DescriptorBuilder::OptionInterpreter::AggregateOptionFinder + : public TextFormat::Finder { + public: + DescriptorBuilder* builder_; + + virtual const FieldDescriptor* FindExtension( + Message* message, const string& name) const { + assert_mutex_held(builder_->pool_); + Symbol result = builder_->LookupSymbolNoPlaceholder( + name, message->GetDescriptor()->full_name()); + if (result.type == Symbol::FIELD && + result.field_descriptor->is_extension()) { + return result.field_descriptor; + } else { + return NULL; + } + } +}; + +// A custom error collector to record any text-format parsing errors +namespace { +class AggregateErrorCollector : public io::ErrorCollector { + public: + string error_; + + virtual void AddError(int line, int column, const string& message) { + if (!error_.empty()) { + error_ += "; "; + } + error_ += message; + } + + virtual void AddWarning(int line, int column, const string& message) { + // Ignore warnings + } +}; +} + +// We construct a dynamic message of the type corresponding to +// option_field, parse the supplied text-format string into this +// message, and serialize the resulting message to produce the value. +bool DescriptorBuilder::OptionInterpreter::SetAggregateOption( + const FieldDescriptor* option_field, + UnknownFieldSet* unknown_fields) { + if (!uninterpreted_option_->has_aggregate_value()) { + return AddValueError("Option \"" + option_field->full_name() + + "\" is a message. To set the entire message, use " + "syntax like \"" + option_field->name() + + " = { }\". " + "To set fields within it, use " + "syntax like \"" + option_field->name() + + ".foo = value\"."); + } + + const Descriptor* type = option_field->message_type(); + scoped_ptr dynamic(dynamic_factory_.GetPrototype(type)->New()); + GOOGLE_CHECK(dynamic.get() != NULL) + << "Could not create an instance of " << option_field->DebugString(); + + AggregateErrorCollector collector; + AggregateOptionFinder finder; + finder.builder_ = builder_; + TextFormat::Parser parser; + parser.RecordErrorsTo(&collector); + parser.SetFinder(&finder); + if (!parser.ParseFromString(uninterpreted_option_->aggregate_value(), + dynamic.get())) { + AddValueError("Error while parsing option value for \"" + + option_field->name() + "\": " + collector.error_); + return false; + } else { + string serial; + dynamic->SerializeToString(&serial); // Never fails + unknown_fields->AddLengthDelimited(option_field->number(), serial); + return true; + } +} + +void DescriptorBuilder::OptionInterpreter::SetInt32(int number, int32 value, + FieldDescriptor::Type type, UnknownFieldSet* unknown_fields) { + switch (type) { + case FieldDescriptor::TYPE_INT32: + unknown_fields->AddVarint(number, + static_cast(static_cast(value))); + break; + + case FieldDescriptor::TYPE_SFIXED32: + unknown_fields->AddFixed32(number, static_cast(value)); + break; + + case FieldDescriptor::TYPE_SINT32: + unknown_fields->AddVarint(number, + google::protobuf::internal::WireFormatLite::ZigZagEncode32(value)); + break; + + default: + GOOGLE_LOG(FATAL) << "Invalid wire type for CPPTYPE_INT32: " << type; + break; + } +} + +void DescriptorBuilder::OptionInterpreter::SetInt64(int number, int64 value, + FieldDescriptor::Type type, UnknownFieldSet* unknown_fields) { + switch (type) { + case FieldDescriptor::TYPE_INT64: + unknown_fields->AddVarint(number, static_cast(value)); + break; + + case FieldDescriptor::TYPE_SFIXED64: + unknown_fields->AddFixed64(number, static_cast(value)); + break; + + case FieldDescriptor::TYPE_SINT64: + unknown_fields->AddVarint(number, + google::protobuf::internal::WireFormatLite::ZigZagEncode64(value)); + break; + + default: + GOOGLE_LOG(FATAL) << "Invalid wire type for CPPTYPE_INT64: " << type; + break; + } +} + +void DescriptorBuilder::OptionInterpreter::SetUInt32(int number, uint32 value, + FieldDescriptor::Type type, UnknownFieldSet* unknown_fields) { + switch (type) { + case FieldDescriptor::TYPE_UINT32: + unknown_fields->AddVarint(number, static_cast(value)); + break; + + case FieldDescriptor::TYPE_FIXED32: + unknown_fields->AddFixed32(number, static_cast(value)); + break; + + default: + GOOGLE_LOG(FATAL) << "Invalid wire type for CPPTYPE_UINT32: " << type; + break; + } +} + +void DescriptorBuilder::OptionInterpreter::SetUInt64(int number, uint64 value, + FieldDescriptor::Type type, UnknownFieldSet* unknown_fields) { + switch (type) { + case FieldDescriptor::TYPE_UINT64: + unknown_fields->AddVarint(number, value); + break; + + case FieldDescriptor::TYPE_FIXED64: + unknown_fields->AddFixed64(number, value); + break; + + default: + GOOGLE_LOG(FATAL) << "Invalid wire type for CPPTYPE_UINT64: " << type; + break; + } +} + +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/descriptor.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/descriptor.h new file mode 100644 index 0000000000..7f87dd809c --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/descriptor.h @@ -0,0 +1,1367 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// This file contains classes which describe a type of protocol message. +// You can use a message's descriptor to learn at runtime what fields +// it contains and what the types of those fields are. The Message +// interface also allows you to dynamically access and modify individual +// fields by passing the FieldDescriptor of the field you are interested +// in. +// +// Most users will not care about descriptors, because they will write +// code specific to certain protocol types and will simply use the classes +// generated by the protocol compiler directly. Advanced users who want +// to operate on arbitrary types (not known at compile time) may want to +// read descriptors in order to learn about the contents of a message. +// A very small number of users will want to construct their own +// Descriptors, either because they are implementing Message manually or +// because they are writing something like the protocol compiler. +// +// For an example of how you might use descriptors, see the code example +// at the top of message.h. + +#ifndef GOOGLE_PROTOBUF_DESCRIPTOR_H__ +#define GOOGLE_PROTOBUF_DESCRIPTOR_H__ + +#include +#include +#include + + +namespace google { +namespace protobuf { + +// Defined in this file. +class Descriptor; +class FieldDescriptor; +class EnumDescriptor; +class EnumValueDescriptor; +class ServiceDescriptor; +class MethodDescriptor; +class FileDescriptor; +class DescriptorDatabase; +class DescriptorPool; + +// Defined in descriptor.proto +class DescriptorProto; +class FieldDescriptorProto; +class EnumDescriptorProto; +class EnumValueDescriptorProto; +class ServiceDescriptorProto; +class MethodDescriptorProto; +class FileDescriptorProto; +class MessageOptions; +class FieldOptions; +class EnumOptions; +class EnumValueOptions; +class ServiceOptions; +class MethodOptions; +class FileOptions; +class UninterpretedOption; + +// Defined in message.h +class Message; + +// Defined in descriptor.cc +class DescriptorBuilder; +class FileDescriptorTables; + +// Defined in unknown_field_set.h. +class UnknownField; + +// Describes a type of protocol message, or a particular group within a +// message. To obtain the Descriptor for a given message object, call +// Message::GetDescriptor(). Generated message classes also have a +// static method called descriptor() which returns the type's descriptor. +// Use DescriptorPool to construct your own descriptors. +class LIBPROTOBUF_EXPORT Descriptor { + public: + // The name of the message type, not including its scope. + const string& name() const; + + // The fully-qualified name of the message type, scope delimited by + // periods. For example, message type "Foo" which is declared in package + // "bar" has full name "bar.Foo". If a type "Baz" is nested within + // Foo, Baz's full_name is "bar.Foo.Baz". To get only the part that + // comes after the last '.', use name(). + const string& full_name() const; + + // Index of this descriptor within the file or containing type's message + // type array. + int index() const; + + // The .proto file in which this message type was defined. Never NULL. + const FileDescriptor* file() const; + + // If this Descriptor describes a nested type, this returns the type + // in which it is nested. Otherwise, returns NULL. + const Descriptor* containing_type() const; + + // Get options for this message type. These are specified in the .proto file + // by placing lines like "option foo = 1234;" in the message definition. + // Allowed options are defined by MessageOptions in + // google/protobuf/descriptor.proto, and any available extensions of that + // message. + const MessageOptions& options() const; + + // Write the contents of this Descriptor into the given DescriptorProto. + // The target DescriptorProto must be clear before calling this; if it + // isn't, the result may be garbage. + void CopyTo(DescriptorProto* proto) const; + + // Write the contents of this decriptor in a human-readable form. Output + // will be suitable for re-parsing. + string DebugString() const; + + // Field stuff ----------------------------------------------------- + + // The number of fields in this message type. + int field_count() const; + // Gets a field by index, where 0 <= index < field_count(). + // These are returned in the order they were defined in the .proto file. + const FieldDescriptor* field(int index) const; + + // Looks up a field by declared tag number. Returns NULL if no such field + // exists. + const FieldDescriptor* FindFieldByNumber(int number) const; + // Looks up a field by name. Returns NULL if no such field exists. + const FieldDescriptor* FindFieldByName(const string& name) const; + + // Looks up a field by lowercased name (as returned by lowercase_name()). + // This lookup may be ambiguous if multiple field names differ only by case, + // in which case the field returned is chosen arbitrarily from the matches. + const FieldDescriptor* FindFieldByLowercaseName( + const string& lowercase_name) const; + + // Looks up a field by camel-case name (as returned by camelcase_name()). + // This lookup may be ambiguous if multiple field names differ in a way that + // leads them to have identical camel-case names, in which case the field + // returned is chosen arbitrarily from the matches. + const FieldDescriptor* FindFieldByCamelcaseName( + const string& camelcase_name) const; + + // Nested type stuff ----------------------------------------------- + + // The number of nested types in this message type. + int nested_type_count() const; + // Gets a nested type by index, where 0 <= index < nested_type_count(). + // These are returned in the order they were defined in the .proto file. + const Descriptor* nested_type(int index) const; + + // Looks up a nested type by name. Returns NULL if no such nested type + // exists. + const Descriptor* FindNestedTypeByName(const string& name) const; + + // Enum stuff ------------------------------------------------------ + + // The number of enum types in this message type. + int enum_type_count() const; + // Gets an enum type by index, where 0 <= index < enum_type_count(). + // These are returned in the order they were defined in the .proto file. + const EnumDescriptor* enum_type(int index) const; + + // Looks up an enum type by name. Returns NULL if no such enum type exists. + const EnumDescriptor* FindEnumTypeByName(const string& name) const; + + // Looks up an enum value by name, among all enum types in this message. + // Returns NULL if no such value exists. + const EnumValueDescriptor* FindEnumValueByName(const string& name) const; + + // Extensions ------------------------------------------------------ + + // A range of field numbers which are designated for third-party + // extensions. + struct ExtensionRange { + int start; // inclusive + int end; // exclusive + }; + + // The number of extension ranges in this message type. + int extension_range_count() const; + // Gets an extension range by index, where 0 <= index < + // extension_range_count(). These are returned in the order they were defined + // in the .proto file. + const ExtensionRange* extension_range(int index) const; + + // Returns true if the number is in one of the extension ranges. + bool IsExtensionNumber(int number) const; + + // The number of extensions -- extending *other* messages -- that were + // defined nested within this message type's scope. + int extension_count() const; + // Get an extension by index, where 0 <= index < extension_count(). + // These are returned in the order they were defined in the .proto file. + const FieldDescriptor* extension(int index) const; + + // Looks up a named extension (which extends some *other* message type) + // defined within this message type's scope. + const FieldDescriptor* FindExtensionByName(const string& name) const; + + // Similar to FindFieldByLowercaseName(), but finds extensions defined within + // this message type's scope. + const FieldDescriptor* FindExtensionByLowercaseName(const string& name) const; + + // Similar to FindFieldByCamelcaseName(), but finds extensions defined within + // this message type's scope. + const FieldDescriptor* FindExtensionByCamelcaseName(const string& name) const; + + private: + typedef MessageOptions OptionsType; + + // Internal version of DebugString; controls the level of indenting for + // correct depth + void DebugString(int depth, string *contents) const; + + const string* name_; + const string* full_name_; + const FileDescriptor* file_; + const Descriptor* containing_type_; + const MessageOptions* options_; + + // True if this is a placeholder for an unknown type. + bool is_placeholder_; + // True if this is a placeholder and the type name wasn't fully-qualified. + bool is_unqualified_placeholder_; + + int field_count_; + FieldDescriptor* fields_; + int nested_type_count_; + Descriptor* nested_types_; + int enum_type_count_; + EnumDescriptor* enum_types_; + int extension_range_count_; + ExtensionRange* extension_ranges_; + int extension_count_; + FieldDescriptor* extensions_; + // IMPORTANT: If you add a new field, make sure to search for all instances + // of Allocate() and AllocateArray() in descriptor.cc + // and update them to initialize the field. + + // Must be constructed using DescriptorPool. + Descriptor() {} + friend class DescriptorBuilder; + friend class EnumDescriptor; + friend class FieldDescriptor; + friend class MethodDescriptor; + friend class FileDescriptor; + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(Descriptor); +}; + +// Describes a single field of a message. To get the descriptor for a given +// field, first get the Descriptor for the message in which it is defined, +// then call Descriptor::FindFieldByName(). To get a FieldDescriptor for +// an extension, do one of the following: +// - Get the Descriptor or FileDescriptor for its containing scope, then +// call Descriptor::FindExtensionByName() or +// FileDescriptor::FindExtensionByName(). +// - Given a DescriptorPool, call DescriptorPool::FindExtensionByNumber(). +// - Given a Reflection for a message object, call +// Reflection::FindKnownExtensionByName() or +// Reflection::FindKnownExtensionByNumber(). +// Use DescriptorPool to construct your own descriptors. +class LIBPROTOBUF_EXPORT FieldDescriptor { + public: + // Identifies a field type. 0 is reserved for errors. The order is weird + // for historical reasons. Types 12 and up are new in proto2. + enum Type { + TYPE_DOUBLE = 1, // double, exactly eight bytes on the wire. + TYPE_FLOAT = 2, // float, exactly four bytes on the wire. + TYPE_INT64 = 3, // int64, varint on the wire. Negative numbers + // take 10 bytes. Use TYPE_SINT64 if negative + // values are likely. + TYPE_UINT64 = 4, // uint64, varint on the wire. + TYPE_INT32 = 5, // int32, varint on the wire. Negative numbers + // take 10 bytes. Use TYPE_SINT32 if negative + // values are likely. + TYPE_FIXED64 = 6, // uint64, exactly eight bytes on the wire. + TYPE_FIXED32 = 7, // uint32, exactly four bytes on the wire. + TYPE_BOOL = 8, // bool, varint on the wire. + TYPE_STRING = 9, // UTF-8 text. + TYPE_GROUP = 10, // Tag-delimited message. Deprecated. + TYPE_MESSAGE = 11, // Length-delimited message. + + TYPE_BYTES = 12, // Arbitrary byte array. + TYPE_UINT32 = 13, // uint32, varint on the wire + TYPE_ENUM = 14, // Enum, varint on the wire + TYPE_SFIXED32 = 15, // int32, exactly four bytes on the wire + TYPE_SFIXED64 = 16, // int64, exactly eight bytes on the wire + TYPE_SINT32 = 17, // int32, ZigZag-encoded varint on the wire + TYPE_SINT64 = 18, // int64, ZigZag-encoded varint on the wire + + MAX_TYPE = 18, // Constant useful for defining lookup tables + // indexed by Type. + }; + + // Specifies the C++ data type used to represent the field. There is a + // fixed mapping from Type to CppType where each Type maps to exactly one + // CppType. 0 is reserved for errors. + enum CppType { + CPPTYPE_INT32 = 1, // TYPE_INT32, TYPE_SINT32, TYPE_SFIXED32 + CPPTYPE_INT64 = 2, // TYPE_INT64, TYPE_SINT64, TYPE_SFIXED64 + CPPTYPE_UINT32 = 3, // TYPE_UINT32, TYPE_FIXED32 + CPPTYPE_UINT64 = 4, // TYPE_UINT64, TYPE_FIXED64 + CPPTYPE_DOUBLE = 5, // TYPE_DOUBLE + CPPTYPE_FLOAT = 6, // TYPE_FLOAT + CPPTYPE_BOOL = 7, // TYPE_BOOL + CPPTYPE_ENUM = 8, // TYPE_ENUM + CPPTYPE_STRING = 9, // TYPE_STRING, TYPE_BYTES + CPPTYPE_MESSAGE = 10, // TYPE_MESSAGE, TYPE_GROUP + + MAX_CPPTYPE = 10, // Constant useful for defining lookup tables + // indexed by CppType. + }; + + // Identifies whether the field is optional, required, or repeated. 0 is + // reserved for errors. + enum Label { + LABEL_OPTIONAL = 1, // optional + LABEL_REQUIRED = 2, // required + LABEL_REPEATED = 3, // repeated + + MAX_LABEL = 3, // Constant useful for defining lookup tables + // indexed by Label. + }; + + // Valid field numbers are positive integers up to kMaxNumber. + static const int kMaxNumber = (1 << 29) - 1; + + // First field number reserved for the protocol buffer library implementation. + // Users may not declare fields that use reserved numbers. + static const int kFirstReservedNumber = 19000; + // Last field number reserved for the protocol buffer library implementation. + // Users may not declare fields that use reserved numbers. + static const int kLastReservedNumber = 19999; + + const string& name() const; // Name of this field within the message. + const string& full_name() const; // Fully-qualified name of the field. + const FileDescriptor* file() const;// File in which this field was defined. + bool is_extension() const; // Is this an extension field? + int number() const; // Declared tag number. + + // Same as name() except converted to lower-case. This (and especially the + // FindFieldByLowercaseName() method) can be useful when parsing formats + // which prefer to use lowercase naming style. (Although, technically + // field names should be lowercased anyway according to the protobuf style + // guide, so this only makes a difference when dealing with old .proto files + // which do not follow the guide.) + const string& lowercase_name() const; + + // Same as name() except converted to camel-case. In this conversion, any + // time an underscore appears in the name, it is removed and the next + // letter is capitalized. Furthermore, the first letter of the name is + // lower-cased. Examples: + // FooBar -> fooBar + // foo_bar -> fooBar + // fooBar -> fooBar + // This (and especially the FindFieldByCamelcaseName() method) can be useful + // when parsing formats which prefer to use camel-case naming style. + const string& camelcase_name() const; + + Type type() const; // Declared type of this field. + CppType cpp_type() const; // C++ type of this field. + Label label() const; // optional/required/repeated + + bool is_required() const; // shorthand for label() == LABEL_REQUIRED + bool is_optional() const; // shorthand for label() == LABEL_OPTIONAL + bool is_repeated() const; // shorthand for label() == LABEL_REPEATED + bool is_packable() const; // shorthand for is_repeated() && + // IsTypePackable(type()) + + // Index of this field within the message's field array, or the file or + // extension scope's extensions array. + int index() const; + + // Does this field have an explicitly-declared default value? + bool has_default_value() const; + + // Get the field default value if cpp_type() == CPPTYPE_INT32. If no + // explicit default was defined, the default is 0. + int32 default_value_int32() const; + // Get the field default value if cpp_type() == CPPTYPE_INT64. If no + // explicit default was defined, the default is 0. + int64 default_value_int64() const; + // Get the field default value if cpp_type() == CPPTYPE_UINT32. If no + // explicit default was defined, the default is 0. + uint32 default_value_uint32() const; + // Get the field default value if cpp_type() == CPPTYPE_UINT64. If no + // explicit default was defined, the default is 0. + uint64 default_value_uint64() const; + // Get the field default value if cpp_type() == CPPTYPE_FLOAT. If no + // explicit default was defined, the default is 0.0. + float default_value_float() const; + // Get the field default value if cpp_type() == CPPTYPE_DOUBLE. If no + // explicit default was defined, the default is 0.0. + double default_value_double() const; + // Get the field default value if cpp_type() == CPPTYPE_BOOL. If no + // explicit default was defined, the default is false. + bool default_value_bool() const; + // Get the field default value if cpp_type() == CPPTYPE_ENUM. If no + // explicit default was defined, the default is the first value defined + // in the enum type (all enum types are required to have at least one value). + // This never returns NULL. + const EnumValueDescriptor* default_value_enum() const; + // Get the field default value if cpp_type() == CPPTYPE_STRING. If no + // explicit default was defined, the default is the empty string. + const string& default_value_string() const; + + // The Descriptor for the message of which this is a field. For extensions, + // this is the extended type. Never NULL. + const Descriptor* containing_type() const; + + // An extension may be declared within the scope of another message. If this + // field is an extension (is_extension() is true), then extension_scope() + // returns that message, or NULL if the extension was declared at global + // scope. If this is not an extension, extension_scope() is undefined (may + // assert-fail). + const Descriptor* extension_scope() const; + + // If type is TYPE_MESSAGE or TYPE_GROUP, returns a descriptor for the + // message or the group type. Otherwise, undefined. + const Descriptor* message_type() const; + // If type is TYPE_ENUM, returns a descriptor for the enum. Otherwise, + // undefined. + const EnumDescriptor* enum_type() const; + + // EXPERIMENTAL; DO NOT USE. + // If this field is a map field, experimental_map_key() is the field + // that is the key for this map. + // experimental_map_key()->containing_type() is the same as message_type(). + const FieldDescriptor* experimental_map_key() const; + + // Get the FieldOptions for this field. This includes things listed in + // square brackets after the field definition. E.g., the field: + // optional string text = 1 [ctype=CORD]; + // has the "ctype" option set. Allowed options are defined by FieldOptions + // in google/protobuf/descriptor.proto, and any available extensions of that + // message. + const FieldOptions& options() const; + + // See Descriptor::CopyTo(). + void CopyTo(FieldDescriptorProto* proto) const; + + // See Descriptor::DebugString(). + string DebugString() const; + + // Helper method to get the CppType for a particular Type. + static CppType TypeToCppType(Type type); + + // Return true iff [packed = true] is valid for fields of this type. + static inline bool IsTypePackable(Type field_type); + + private: + typedef FieldOptions OptionsType; + + // See Descriptor::DebugString(). + void DebugString(int depth, string *contents) const; + + // formats the default value appropriately and returns it as a string. + // Must have a default value to call this. If quote_string_type is true, then + // types of CPPTYPE_STRING whill be surrounded by quotes and CEscaped. + string DefaultValueAsString(bool quote_string_type) const; + + const string* name_; + const string* full_name_; + const string* lowercase_name_; + const string* camelcase_name_; + const FileDescriptor* file_; + int number_; + Type type_; + Label label_; + bool is_extension_; + const Descriptor* containing_type_; + const Descriptor* extension_scope_; + const Descriptor* message_type_; + const EnumDescriptor* enum_type_; + const FieldDescriptor* experimental_map_key_; + const FieldOptions* options_; + // IMPORTANT: If you add a new field, make sure to search for all instances + // of Allocate() and AllocateArray() in + // descriptor.cc and update them to initialize the field. + + bool has_default_value_; + union { + int32 default_value_int32_; + int64 default_value_int64_; + uint32 default_value_uint32_; + uint64 default_value_uint64_; + float default_value_float_; + double default_value_double_; + bool default_value_bool_; + + const EnumValueDescriptor* default_value_enum_; + const string* default_value_string_; + }; + + static const CppType kTypeToCppTypeMap[MAX_TYPE + 1]; + + static const char * const kTypeToName[MAX_TYPE + 1]; + + static const char * const kLabelToName[MAX_LABEL + 1]; + + // Must be constructed using DescriptorPool. + FieldDescriptor() {} + friend class DescriptorBuilder; + friend class FileDescriptor; + friend class Descriptor; + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(FieldDescriptor); +}; + +// Describes an enum type defined in a .proto file. To get the EnumDescriptor +// for a generated enum type, call TypeName_descriptor(). Use DescriptorPool +// to construct your own descriptors. +class LIBPROTOBUF_EXPORT EnumDescriptor { + public: + // The name of this enum type in the containing scope. + const string& name() const; + + // The fully-qualified name of the enum type, scope delimited by periods. + const string& full_name() const; + + // Index of this enum within the file or containing message's enum array. + int index() const; + + // The .proto file in which this enum type was defined. Never NULL. + const FileDescriptor* file() const; + + // The number of values for this EnumDescriptor. Guaranteed to be greater + // than zero. + int value_count() const; + // Gets a value by index, where 0 <= index < value_count(). + // These are returned in the order they were defined in the .proto file. + const EnumValueDescriptor* value(int index) const; + + // Looks up a value by name. Returns NULL if no such value exists. + const EnumValueDescriptor* FindValueByName(const string& name) const; + // Looks up a value by number. Returns NULL if no such value exists. If + // multiple values have this number, the first one defined is returned. + const EnumValueDescriptor* FindValueByNumber(int number) const; + + // If this enum type is nested in a message type, this is that message type. + // Otherwise, NULL. + const Descriptor* containing_type() const; + + // Get options for this enum type. These are specified in the .proto file by + // placing lines like "option foo = 1234;" in the enum definition. Allowed + // options are defined by EnumOptions in google/protobuf/descriptor.proto, + // and any available extensions of that message. + const EnumOptions& options() const; + + // See Descriptor::CopyTo(). + void CopyTo(EnumDescriptorProto* proto) const; + + // See Descriptor::DebugString(). + string DebugString() const; + + private: + typedef EnumOptions OptionsType; + + // See Descriptor::DebugString(). + void DebugString(int depth, string *contents) const; + + const string* name_; + const string* full_name_; + const FileDescriptor* file_; + const Descriptor* containing_type_; + const EnumOptions* options_; + + // True if this is a placeholder for an unknown type. + bool is_placeholder_; + // True if this is a placeholder and the type name wasn't fully-qualified. + bool is_unqualified_placeholder_; + + int value_count_; + EnumValueDescriptor* values_; + // IMPORTANT: If you add a new field, make sure to search for all instances + // of Allocate() and AllocateArray() in + // descriptor.cc and update them to initialize the field. + + // Must be constructed using DescriptorPool. + EnumDescriptor() {} + friend class DescriptorBuilder; + friend class Descriptor; + friend class FieldDescriptor; + friend class EnumValueDescriptor; + friend class FileDescriptor; + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(EnumDescriptor); +}; + +// Describes an individual enum constant of a particular type. To get the +// EnumValueDescriptor for a given enum value, first get the EnumDescriptor +// for its type, then use EnumDescriptor::FindValueByName() or +// EnumDescriptor::FindValueByNumber(). Use DescriptorPool to construct +// your own descriptors. +class LIBPROTOBUF_EXPORT EnumValueDescriptor { + public: + const string& name() const; // Name of this enum constant. + int index() const; // Index within the enums's Descriptor. + int number() const; // Numeric value of this enum constant. + + // The full_name of an enum value is a sibling symbol of the enum type. + // e.g. the full name of FieldDescriptorProto::TYPE_INT32 is actually + // "google.protobuf.FieldDescriptorProto.TYPE_INT32", NOT + // "google.protobuf.FieldDescriptorProto.Type.TYPE_INT32". This is to conform + // with C++ scoping rules for enums. + const string& full_name() const; + + // The type of this value. Never NULL. + const EnumDescriptor* type() const; + + // Get options for this enum value. These are specified in the .proto file + // by adding text like "[foo = 1234]" after an enum value definition. + // Allowed options are defined by EnumValueOptions in + // google/protobuf/descriptor.proto, and any available extensions of that + // message. + const EnumValueOptions& options() const; + + // See Descriptor::CopyTo(). + void CopyTo(EnumValueDescriptorProto* proto) const; + + // See Descriptor::DebugString(). + string DebugString() const; + + private: + typedef EnumValueOptions OptionsType; + + // See Descriptor::DebugString(). + void DebugString(int depth, string *contents) const; + + const string* name_; + const string* full_name_; + int number_; + const EnumDescriptor* type_; + const EnumValueOptions* options_; + // IMPORTANT: If you add a new field, make sure to search for all instances + // of Allocate() and AllocateArray() + // in descriptor.cc and update them to initialize the field. + + // Must be constructed using DescriptorPool. + EnumValueDescriptor() {} + friend class DescriptorBuilder; + friend class EnumDescriptor; + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(EnumValueDescriptor); +}; + +// Describes an RPC service. To get the ServiceDescriptor for a service, +// call Service::GetDescriptor(). Generated service classes also have a +// static method called descriptor() which returns the type's +// ServiceDescriptor. Use DescriptorPool to construct your own descriptors. +class LIBPROTOBUF_EXPORT ServiceDescriptor { + public: + // The name of the service, not including its containing scope. + const string& name() const; + // The fully-qualified name of the service, scope delimited by periods. + const string& full_name() const; + // Index of this service within the file's services array. + int index() const; + + // The .proto file in which this service was defined. Never NULL. + const FileDescriptor* file() const; + + // Get options for this service type. These are specified in the .proto file + // by placing lines like "option foo = 1234;" in the service definition. + // Allowed options are defined by ServiceOptions in + // google/protobuf/descriptor.proto, and any available extensions of that + // message. + const ServiceOptions& options() const; + + // The number of methods this service defines. + int method_count() const; + // Gets a MethodDescriptor by index, where 0 <= index < method_count(). + // These are returned in the order they were defined in the .proto file. + const MethodDescriptor* method(int index) const; + + // Look up a MethodDescriptor by name. + const MethodDescriptor* FindMethodByName(const string& name) const; + + // See Descriptor::CopyTo(). + void CopyTo(ServiceDescriptorProto* proto) const; + + // See Descriptor::DebugString(). + string DebugString() const; + + private: + typedef ServiceOptions OptionsType; + + // See Descriptor::DebugString(). + void DebugString(string *contents) const; + + const string* name_; + const string* full_name_; + const FileDescriptor* file_; + const ServiceOptions* options_; + int method_count_; + MethodDescriptor* methods_; + // IMPORTANT: If you add a new field, make sure to search for all instances + // of Allocate() and AllocateArray() in + // descriptor.cc and update them to initialize the field. + + // Must be constructed using DescriptorPool. + ServiceDescriptor() {} + friend class DescriptorBuilder; + friend class FileDescriptor; + friend class MethodDescriptor; + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(ServiceDescriptor); +}; + +// Describes an individual service method. To obtain a MethodDescriptor given +// a service, first get its ServiceDescriptor, then call +// ServiceDescriptor::FindMethodByName(). Use DescriptorPool to construct your +// own descriptors. +class LIBPROTOBUF_EXPORT MethodDescriptor { + public: + // Name of this method, not including containing scope. + const string& name() const; + // The fully-qualified name of the method, scope delimited by periods. + const string& full_name() const; + // Index within the service's Descriptor. + int index() const; + + // Gets the service to which this method belongs. Never NULL. + const ServiceDescriptor* service() const; + + // Gets the type of protocol message which this method accepts as input. + const Descriptor* input_type() const; + // Gets the type of protocol message which this message produces as output. + const Descriptor* output_type() const; + + // Get options for this method. These are specified in the .proto file by + // placing lines like "option foo = 1234;" in curly-braces after a method + // declaration. Allowed options are defined by MethodOptions in + // google/protobuf/descriptor.proto, and any available extensions of that + // message. + const MethodOptions& options() const; + + // See Descriptor::CopyTo(). + void CopyTo(MethodDescriptorProto* proto) const; + + // See Descriptor::DebugString(). + string DebugString() const; + + private: + typedef MethodOptions OptionsType; + + // See Descriptor::DebugString(). + void DebugString(int depth, string *contents) const; + + const string* name_; + const string* full_name_; + const ServiceDescriptor* service_; + const Descriptor* input_type_; + const Descriptor* output_type_; + const MethodOptions* options_; + // IMPORTANT: If you add a new field, make sure to search for all instances + // of Allocate() and AllocateArray() in + // descriptor.cc and update them to initialize the field. + + // Must be constructed using DescriptorPool. + MethodDescriptor() {} + friend class DescriptorBuilder; + friend class ServiceDescriptor; + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(MethodDescriptor); +}; + +// Describes a whole .proto file. To get the FileDescriptor for a compiled-in +// file, get the descriptor for something defined in that file and call +// descriptor->file(). Use DescriptorPool to construct your own descriptors. +class LIBPROTOBUF_EXPORT FileDescriptor { + public: + // The filename, relative to the source tree. + // e.g. "google/protobuf/descriptor.proto" + const string& name() const; + + // The package, e.g. "google.protobuf.compiler". + const string& package() const; + + // The DescriptorPool in which this FileDescriptor and all its contents were + // allocated. Never NULL. + const DescriptorPool* pool() const; + + // The number of files imported by this one. + int dependency_count() const; + // Gets an imported file by index, where 0 <= index < dependency_count(). + // These are returned in the order they were defined in the .proto file. + const FileDescriptor* dependency(int index) const; + + // Number of top-level message types defined in this file. (This does not + // include nested types.) + int message_type_count() const; + // Gets a top-level message type, where 0 <= index < message_type_count(). + // These are returned in the order they were defined in the .proto file. + const Descriptor* message_type(int index) const; + + // Number of top-level enum types defined in this file. (This does not + // include nested types.) + int enum_type_count() const; + // Gets a top-level enum type, where 0 <= index < enum_type_count(). + // These are returned in the order they were defined in the .proto file. + const EnumDescriptor* enum_type(int index) const; + + // Number of services defined in this file. + int service_count() const; + // Gets a service, where 0 <= index < service_count(). + // These are returned in the order they were defined in the .proto file. + const ServiceDescriptor* service(int index) const; + + // Number of extensions defined at file scope. (This does not include + // extensions nested within message types.) + int extension_count() const; + // Gets an extension's descriptor, where 0 <= index < extension_count(). + // These are returned in the order they were defined in the .proto file. + const FieldDescriptor* extension(int index) const; + + // Get options for this file. These are specified in the .proto file by + // placing lines like "option foo = 1234;" at the top level, outside of any + // other definitions. Allowed options are defined by FileOptions in + // google/protobuf/descriptor.proto, and any available extensions of that + // message. + const FileOptions& options() const; + + // Find a top-level message type by name. Returns NULL if not found. + const Descriptor* FindMessageTypeByName(const string& name) const; + // Find a top-level enum type by name. Returns NULL if not found. + const EnumDescriptor* FindEnumTypeByName(const string& name) const; + // Find an enum value defined in any top-level enum by name. Returns NULL if + // not found. + const EnumValueDescriptor* FindEnumValueByName(const string& name) const; + // Find a service definition by name. Returns NULL if not found. + const ServiceDescriptor* FindServiceByName(const string& name) const; + // Find a top-level extension definition by name. Returns NULL if not found. + const FieldDescriptor* FindExtensionByName(const string& name) const; + // Similar to FindExtensionByName(), but searches by lowercased-name. See + // Descriptor::FindFieldByLowercaseName(). + const FieldDescriptor* FindExtensionByLowercaseName(const string& name) const; + // Similar to FindExtensionByName(), but searches by camelcased-name. See + // Descriptor::FindFieldByCamelcaseName(). + const FieldDescriptor* FindExtensionByCamelcaseName(const string& name) const; + + // See Descriptor::CopyTo(). + void CopyTo(FileDescriptorProto* proto) const; + + // See Descriptor::DebugString(). + string DebugString() const; + + private: + typedef FileOptions OptionsType; + + const string* name_; + const string* package_; + const DescriptorPool* pool_; + int dependency_count_; + const FileDescriptor** dependencies_; + int message_type_count_; + Descriptor* message_types_; + int enum_type_count_; + EnumDescriptor* enum_types_; + int service_count_; + ServiceDescriptor* services_; + int extension_count_; + FieldDescriptor* extensions_; + const FileOptions* options_; + + const FileDescriptorTables* tables_; + // IMPORTANT: If you add a new field, make sure to search for all instances + // of Allocate() and AllocateArray() in + // descriptor.cc and update them to initialize the field. + + FileDescriptor() {} + friend class DescriptorBuilder; + friend class Descriptor; + friend class FieldDescriptor; + friend class EnumDescriptor; + friend class ServiceDescriptor; + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(FileDescriptor); +}; + +// =================================================================== + +// Used to construct descriptors. +// +// Normally you won't want to build your own descriptors. Message classes +// constructed by the protocol compiler will provide them for you. However, +// if you are implementing Message on your own, or if you are writing a +// program which can operate on totally arbitrary types and needs to load +// them from some sort of database, you might need to. +// +// Since Descriptors are composed of a whole lot of cross-linked bits of +// data that would be a pain to put together manually, the +// DescriptorPool class is provided to make the process easier. It can +// take a FileDescriptorProto (defined in descriptor.proto), validate it, +// and convert it to a set of nicely cross-linked Descriptors. +// +// DescriptorPool also helps with memory management. Descriptors are +// composed of many objects containing static data and pointers to each +// other. In all likelihood, when it comes time to delete this data, +// you'll want to delete it all at once. In fact, it is not uncommon to +// have a whole pool of descriptors all cross-linked with each other which +// you wish to delete all at once. This class represents such a pool, and +// handles the memory management for you. +// +// You can also search for descriptors within a DescriptorPool by name, and +// extensions by number. +class LIBPROTOBUF_EXPORT DescriptorPool { + public: + // Create a normal, empty DescriptorPool. + DescriptorPool(); + + // Constructs a DescriptorPool that, when it can't find something among the + // descriptors already in the pool, looks for it in the given + // DescriptorDatabase. + // Notes: + // - If a DescriptorPool is constructed this way, its BuildFile*() methods + // must not be called (they will assert-fail). The only way to populate + // the pool with descriptors is to call the Find*By*() methods. + // - The Find*By*() methods may block the calling thread if the + // DescriptorDatabase blocks. This in turn means that parsing messages + // may block if they need to look up extensions. + // - The Find*By*() methods will use mutexes for thread-safety, thus making + // them slower even when they don't have to fall back to the database. + // In fact, even the Find*By*() methods of descriptor objects owned by + // this pool will be slower, since they will have to obtain locks too. + // - An ErrorCollector may optionally be given to collect validation errors + // in files loaded from the database. If not given, errors will be printed + // to GOOGLE_LOG(ERROR). Remember that files are built on-demand, so this + // ErrorCollector may be called from any thread that calls one of the + // Find*By*() methods. + class ErrorCollector; + explicit DescriptorPool(DescriptorDatabase* fallback_database, + ErrorCollector* error_collector = NULL); + + ~DescriptorPool(); + + // Get a pointer to the generated pool. Generated protocol message classes + // which are compiled into the binary will allocate their descriptors in + // this pool. Do not add your own descriptors to this pool. + static const DescriptorPool* generated_pool(); + + // Find a FileDescriptor in the pool by file name. Returns NULL if not + // found. + const FileDescriptor* FindFileByName(const string& name) const; + + // Find the FileDescriptor in the pool which defines the given symbol. + // If any of the Find*ByName() methods below would succeed, then this is + // equivalent to calling that method and calling the result's file() method. + // Otherwise this returns NULL. + const FileDescriptor* FindFileContainingSymbol( + const string& symbol_name) const; + + // Looking up descriptors ------------------------------------------ + // These find descriptors by fully-qualified name. These will find both + // top-level descriptors and nested descriptors. They return NULL if not + // found. + + const Descriptor* FindMessageTypeByName(const string& name) const; + const FieldDescriptor* FindFieldByName(const string& name) const; + const FieldDescriptor* FindExtensionByName(const string& name) const; + const EnumDescriptor* FindEnumTypeByName(const string& name) const; + const EnumValueDescriptor* FindEnumValueByName(const string& name) const; + const ServiceDescriptor* FindServiceByName(const string& name) const; + const MethodDescriptor* FindMethodByName(const string& name) const; + + // Finds an extension of the given type by number. The extendee must be + // a member of this DescriptorPool or one of its underlays. + const FieldDescriptor* FindExtensionByNumber(const Descriptor* extendee, + int number) const; + + // Finds extensions of extendee. The extensions will be appended to + // out in an undefined order. Only extensions defined directly in + // this DescriptorPool or one of its underlays are guaranteed to be + // found: extensions defined in the fallback database might not be found + // depending on the database implementation. + void FindAllExtensions(const Descriptor* extendee, + vector* out) const; + + // Building descriptors -------------------------------------------- + + // When converting a FileDescriptorProto to a FileDescriptor, various + // errors might be detected in the input. The caller may handle these + // programmatically by implementing an ErrorCollector. + class LIBPROTOBUF_EXPORT ErrorCollector { + public: + inline ErrorCollector() {} + virtual ~ErrorCollector(); + + // These constants specify what exact part of the construct is broken. + // This is useful e.g. for mapping the error back to an exact location + // in a .proto file. + enum ErrorLocation { + NAME, // the symbol name, or the package name for files + NUMBER, // field or extension range number + TYPE, // field type + EXTENDEE, // field extendee + DEFAULT_VALUE, // field default value + INPUT_TYPE, // method input type + OUTPUT_TYPE, // method output type + OPTION_NAME, // name in assignment + OPTION_VALUE, // value in option assignment + OTHER // some other problem + }; + + // Reports an error in the FileDescriptorProto. + virtual void AddError( + const string& filename, // File name in which the error occurred. + const string& element_name, // Full name of the erroneous element. + const Message* descriptor, // Descriptor of the erroneous element. + ErrorLocation location, // One of the location constants, above. + const string& message // Human-readable error message. + ) = 0; + + private: + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(ErrorCollector); + }; + + // Convert the FileDescriptorProto to real descriptors and place them in + // this DescriptorPool. All dependencies of the file must already be in + // the pool. Returns the resulting FileDescriptor, or NULL if there were + // problems with the input (e.g. the message was invalid, or dependencies + // were missing). Details about the errors are written to GOOGLE_LOG(ERROR). + const FileDescriptor* BuildFile(const FileDescriptorProto& proto); + + // Same as BuildFile() except errors are sent to the given ErrorCollector. + const FileDescriptor* BuildFileCollectingErrors( + const FileDescriptorProto& proto, + ErrorCollector* error_collector); + + // By default, it is an error if a FileDescriptorProto contains references + // to types or other files that are not found in the DescriptorPool (or its + // backing DescriptorDatabase, if any). If you call + // AllowUnknownDependencies(), however, then unknown types and files + // will be replaced by placeholder descriptors. This can allow you to + // perform some useful operations with a .proto file even if you do not + // have access to other .proto files on which it depends. However, some + // heuristics must be used to fill in the gaps in information, and these + // can lead to descriptors which are inaccurate. For example, the + // DescriptorPool may be forced to guess whether an unknown type is a message + // or an enum, as well as what package it resides in. Furthermore, + // placeholder types will not be discoverable via FindMessageTypeByName() + // and similar methods, which could confuse some descriptor-based algorithms. + // Generally, the results of this option should only be relied upon for + // debugging purposes. + void AllowUnknownDependencies() { allow_unknown_ = true; } + + // Internal stuff -------------------------------------------------- + // These methods MUST NOT be called from outside the proto2 library. + // These methods may contain hidden pitfalls and may be removed in a + // future library version. + + // Create a DescriptorPool which is overlaid on top of some other pool. + // If you search for a descriptor in the overlay and it is not found, the + // underlay will be searched as a backup. If the underlay has its own + // underlay, that will be searched next, and so on. This also means that + // files built in the overlay will be cross-linked with the underlay's + // descriptors if necessary. The underlay remains property of the caller; + // it must remain valid for the lifetime of the newly-constructed pool. + // + // Example: Say you want to parse a .proto file at runtime in order to use + // its type with a DynamicMessage. Say this .proto file has dependencies, + // but you know that all the dependencies will be things that are already + // compiled into the binary. For ease of use, you'd like to load the types + // right out of generated_pool() rather than have to parse redundant copies + // of all these .protos and runtime. But, you don't want to add the parsed + // types directly into generated_pool(): this is not allowed, and would be + // bad design anyway. So, instead, you could use generated_pool() as an + // underlay for a new DescriptorPool in which you add only the new file. + // + // WARNING: Use of underlays can lead to many subtle gotchas. Instead, + // try to formulate what you want to do in terms of DescriptorDatabases. + explicit DescriptorPool(const DescriptorPool* underlay); + + // Called by generated classes at init time to add their descriptors to + // generated_pool. Do NOT call this in your own code! filename must be a + // permanent string (e.g. a string literal). + static void InternalAddGeneratedFile( + const void* encoded_file_descriptor, int size); + + + // For internal use only: Gets a non-const pointer to the generated pool. + // This is called at static-initialization time only, so thread-safety is + // not a concern. If both an underlay and a fallback database are present, + // the fallback database takes precedence. + static DescriptorPool* internal_generated_pool(); + + // For internal use only: Changes the behavior of BuildFile() such that it + // allows the file to make reference to message types declared in other files + // which it did not officially declare as dependencies. + void InternalDontEnforceDependencies(); + + // For internal use only. + void internal_set_underlay(const DescriptorPool* underlay) { + underlay_ = underlay; + } + + // For internal (unit test) use only: Returns true if a FileDescriptor has + // been constructed for the given file, false otherwise. Useful for testing + // lazy descriptor initialization behavior. + bool InternalIsFileLoaded(const string& filename) const; + + private: + friend class Descriptor; + friend class FieldDescriptor; + friend class EnumDescriptor; + friend class ServiceDescriptor; + friend class FileDescriptor; + friend class DescriptorBuilder; + + // Tries to find something in the fallback database and link in the + // corresponding proto file. Returns true if successful, in which case + // the caller should search for the thing again. These are declared + // const because they are called by (semantically) const methods. + bool TryFindFileInFallbackDatabase(const string& name) const; + bool TryFindSymbolInFallbackDatabase(const string& name) const; + bool TryFindExtensionInFallbackDatabase(const Descriptor* containing_type, + int field_number) const; + + // Like BuildFile() but called internally when the file has been loaded from + // fallback_database_. Declared const because it is called by (semantically) + // const methods. + const FileDescriptor* BuildFileFromDatabase( + const FileDescriptorProto& proto) const; + + // If fallback_database_ is NULL, this is NULL. Otherwise, this is a mutex + // which must be locked while accessing tables_. + Mutex* mutex_; + + // See constructor. + DescriptorDatabase* fallback_database_; + ErrorCollector* default_error_collector_; + const DescriptorPool* underlay_; + + // This class contains a lot of hash maps with complicated types that + // we'd like to keep out of the header. + class Tables; + scoped_ptr tables_; + + bool enforce_dependencies_; + bool allow_unknown_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(DescriptorPool); +}; + +// inline methods ==================================================== + +// These macros makes this repetitive code more readable. +#define PROTOBUF_DEFINE_ACCESSOR(CLASS, FIELD, TYPE) \ + inline TYPE CLASS::FIELD() const { return FIELD##_; } + +// Strings fields are stored as pointers but returned as const references. +#define PROTOBUF_DEFINE_STRING_ACCESSOR(CLASS, FIELD) \ + inline const string& CLASS::FIELD() const { return *FIELD##_; } + +// Arrays take an index parameter, obviously. +#define PROTOBUF_DEFINE_ARRAY_ACCESSOR(CLASS, FIELD, TYPE) \ + inline TYPE CLASS::FIELD(int index) const { return FIELD##s_ + index; } + +#define PROTOBUF_DEFINE_OPTIONS_ACCESSOR(CLASS, TYPE) \ + inline const TYPE& CLASS::options() const { return *options_; } + +PROTOBUF_DEFINE_STRING_ACCESSOR(Descriptor, name) +PROTOBUF_DEFINE_STRING_ACCESSOR(Descriptor, full_name) +PROTOBUF_DEFINE_ACCESSOR(Descriptor, file, const FileDescriptor*) +PROTOBUF_DEFINE_ACCESSOR(Descriptor, containing_type, const Descriptor*) + +PROTOBUF_DEFINE_ACCESSOR(Descriptor, field_count, int) +PROTOBUF_DEFINE_ACCESSOR(Descriptor, nested_type_count, int) +PROTOBUF_DEFINE_ACCESSOR(Descriptor, enum_type_count, int) + +PROTOBUF_DEFINE_ARRAY_ACCESSOR(Descriptor, field, const FieldDescriptor*) +PROTOBUF_DEFINE_ARRAY_ACCESSOR(Descriptor, nested_type, const Descriptor*) +PROTOBUF_DEFINE_ARRAY_ACCESSOR(Descriptor, enum_type, const EnumDescriptor*) + +PROTOBUF_DEFINE_ACCESSOR(Descriptor, extension_range_count, int) +PROTOBUF_DEFINE_ACCESSOR(Descriptor, extension_count, int) +PROTOBUF_DEFINE_ARRAY_ACCESSOR(Descriptor, extension_range, + const Descriptor::ExtensionRange*) +PROTOBUF_DEFINE_ARRAY_ACCESSOR(Descriptor, extension, + const FieldDescriptor*) +PROTOBUF_DEFINE_OPTIONS_ACCESSOR(Descriptor, MessageOptions); + +PROTOBUF_DEFINE_STRING_ACCESSOR(FieldDescriptor, name) +PROTOBUF_DEFINE_STRING_ACCESSOR(FieldDescriptor, full_name) +PROTOBUF_DEFINE_STRING_ACCESSOR(FieldDescriptor, lowercase_name) +PROTOBUF_DEFINE_STRING_ACCESSOR(FieldDescriptor, camelcase_name) +PROTOBUF_DEFINE_ACCESSOR(FieldDescriptor, file, const FileDescriptor*) +PROTOBUF_DEFINE_ACCESSOR(FieldDescriptor, number, int) +PROTOBUF_DEFINE_ACCESSOR(FieldDescriptor, is_extension, bool) +PROTOBUF_DEFINE_ACCESSOR(FieldDescriptor, type, FieldDescriptor::Type) +PROTOBUF_DEFINE_ACCESSOR(FieldDescriptor, label, FieldDescriptor::Label) +PROTOBUF_DEFINE_ACCESSOR(FieldDescriptor, containing_type, const Descriptor*) +PROTOBUF_DEFINE_ACCESSOR(FieldDescriptor, extension_scope, const Descriptor*) +PROTOBUF_DEFINE_ACCESSOR(FieldDescriptor, message_type, const Descriptor*) +PROTOBUF_DEFINE_ACCESSOR(FieldDescriptor, enum_type, const EnumDescriptor*) +PROTOBUF_DEFINE_ACCESSOR(FieldDescriptor, experimental_map_key, + const FieldDescriptor*) +PROTOBUF_DEFINE_OPTIONS_ACCESSOR(FieldDescriptor, FieldOptions); +PROTOBUF_DEFINE_ACCESSOR(FieldDescriptor, has_default_value, bool) +PROTOBUF_DEFINE_ACCESSOR(FieldDescriptor, default_value_int32 , int32 ) +PROTOBUF_DEFINE_ACCESSOR(FieldDescriptor, default_value_int64 , int64 ) +PROTOBUF_DEFINE_ACCESSOR(FieldDescriptor, default_value_uint32, uint32) +PROTOBUF_DEFINE_ACCESSOR(FieldDescriptor, default_value_uint64, uint64) +PROTOBUF_DEFINE_ACCESSOR(FieldDescriptor, default_value_float , float ) +PROTOBUF_DEFINE_ACCESSOR(FieldDescriptor, default_value_double, double) +PROTOBUF_DEFINE_ACCESSOR(FieldDescriptor, default_value_bool , bool ) +PROTOBUF_DEFINE_ACCESSOR(FieldDescriptor, default_value_enum, + const EnumValueDescriptor*) +PROTOBUF_DEFINE_STRING_ACCESSOR(FieldDescriptor, default_value_string) + +PROTOBUF_DEFINE_STRING_ACCESSOR(EnumDescriptor, name) +PROTOBUF_DEFINE_STRING_ACCESSOR(EnumDescriptor, full_name) +PROTOBUF_DEFINE_ACCESSOR(EnumDescriptor, file, const FileDescriptor*) +PROTOBUF_DEFINE_ACCESSOR(EnumDescriptor, containing_type, const Descriptor*) +PROTOBUF_DEFINE_ACCESSOR(EnumDescriptor, value_count, int) +PROTOBUF_DEFINE_ARRAY_ACCESSOR(EnumDescriptor, value, + const EnumValueDescriptor*) +PROTOBUF_DEFINE_OPTIONS_ACCESSOR(EnumDescriptor, EnumOptions); + +PROTOBUF_DEFINE_STRING_ACCESSOR(EnumValueDescriptor, name) +PROTOBUF_DEFINE_STRING_ACCESSOR(EnumValueDescriptor, full_name) +PROTOBUF_DEFINE_ACCESSOR(EnumValueDescriptor, number, int) +PROTOBUF_DEFINE_ACCESSOR(EnumValueDescriptor, type, const EnumDescriptor*) +PROTOBUF_DEFINE_OPTIONS_ACCESSOR(EnumValueDescriptor, EnumValueOptions); + +PROTOBUF_DEFINE_STRING_ACCESSOR(ServiceDescriptor, name) +PROTOBUF_DEFINE_STRING_ACCESSOR(ServiceDescriptor, full_name) +PROTOBUF_DEFINE_ACCESSOR(ServiceDescriptor, file, const FileDescriptor*) +PROTOBUF_DEFINE_ACCESSOR(ServiceDescriptor, method_count, int) +PROTOBUF_DEFINE_ARRAY_ACCESSOR(ServiceDescriptor, method, + const MethodDescriptor*) +PROTOBUF_DEFINE_OPTIONS_ACCESSOR(ServiceDescriptor, ServiceOptions); + +PROTOBUF_DEFINE_STRING_ACCESSOR(MethodDescriptor, name) +PROTOBUF_DEFINE_STRING_ACCESSOR(MethodDescriptor, full_name) +PROTOBUF_DEFINE_ACCESSOR(MethodDescriptor, service, const ServiceDescriptor*) +PROTOBUF_DEFINE_ACCESSOR(MethodDescriptor, input_type, const Descriptor*) +PROTOBUF_DEFINE_ACCESSOR(MethodDescriptor, output_type, const Descriptor*) +PROTOBUF_DEFINE_OPTIONS_ACCESSOR(MethodDescriptor, MethodOptions); + +PROTOBUF_DEFINE_STRING_ACCESSOR(FileDescriptor, name) +PROTOBUF_DEFINE_STRING_ACCESSOR(FileDescriptor, package) +PROTOBUF_DEFINE_ACCESSOR(FileDescriptor, pool, const DescriptorPool*) +PROTOBUF_DEFINE_ACCESSOR(FileDescriptor, dependency_count, int) +PROTOBUF_DEFINE_ACCESSOR(FileDescriptor, message_type_count, int) +PROTOBUF_DEFINE_ACCESSOR(FileDescriptor, enum_type_count, int) +PROTOBUF_DEFINE_ACCESSOR(FileDescriptor, service_count, int) +PROTOBUF_DEFINE_ACCESSOR(FileDescriptor, extension_count, int) +PROTOBUF_DEFINE_OPTIONS_ACCESSOR(FileDescriptor, FileOptions); + +PROTOBUF_DEFINE_ARRAY_ACCESSOR(FileDescriptor, message_type, const Descriptor*) +PROTOBUF_DEFINE_ARRAY_ACCESSOR(FileDescriptor, enum_type, const EnumDescriptor*) +PROTOBUF_DEFINE_ARRAY_ACCESSOR(FileDescriptor, service, + const ServiceDescriptor*) +PROTOBUF_DEFINE_ARRAY_ACCESSOR(FileDescriptor, extension, + const FieldDescriptor*) + +#undef PROTOBUF_DEFINE_ACCESSOR +#undef PROTOBUF_DEFINE_STRING_ACCESSOR +#undef PROTOBUF_DEFINE_ARRAY_ACCESSOR + +// A few accessors differ from the macros... + +inline bool FieldDescriptor::is_required() const { + return label() == LABEL_REQUIRED; +} + +inline bool FieldDescriptor::is_optional() const { + return label() == LABEL_OPTIONAL; +} + +inline bool FieldDescriptor::is_repeated() const { + return label() == LABEL_REPEATED; +} + +inline bool FieldDescriptor::is_packable() const { + return is_repeated() && IsTypePackable(type()); +} + +// To save space, index() is computed by looking at the descriptor's position +// in the parent's array of children. +inline int FieldDescriptor::index() const { + if (!is_extension_) { + return this - containing_type_->fields_; + } else if (extension_scope_ != NULL) { + return this - extension_scope_->extensions_; + } else { + return this - file_->extensions_; + } +} + +inline int Descriptor::index() const { + if (containing_type_ == NULL) { + return this - file_->message_types_; + } else { + return this - containing_type_->nested_types_; + } +} + +inline int EnumDescriptor::index() const { + if (containing_type_ == NULL) { + return this - file_->enum_types_; + } else { + return this - containing_type_->enum_types_; + } +} + +inline int EnumValueDescriptor::index() const { + return this - type_->values_; +} + +inline int ServiceDescriptor::index() const { + return this - file_->services_; +} + +inline int MethodDescriptor::index() const { + return this - service_->methods_; +} + +inline FieldDescriptor::CppType FieldDescriptor::cpp_type() const { + return kTypeToCppTypeMap[type_]; +} + +inline FieldDescriptor::CppType FieldDescriptor::TypeToCppType(Type type) { + return kTypeToCppTypeMap[type]; +} + +inline bool FieldDescriptor::IsTypePackable(Type field_type) { + return (field_type != FieldDescriptor::TYPE_STRING && + field_type != FieldDescriptor::TYPE_GROUP && + field_type != FieldDescriptor::TYPE_MESSAGE && + field_type != FieldDescriptor::TYPE_BYTES); +} + +inline const FileDescriptor* FileDescriptor::dependency(int index) const { + return dependencies_[index]; +} + +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_DESCRIPTOR_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/descriptor.pb.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/descriptor.pb.cc new file mode 100644 index 0000000000..87303ef87b --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/descriptor.pb.cc @@ -0,0 +1,7708 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! + +#define INTERNAL_SUPPRESS_PROTOBUF_FIELD_DEPRECATION +#include "google/protobuf/descriptor.pb.h" + +#include + +#include +#include +#include +#include +#include +#include +// @@protoc_insertion_point(includes) + +namespace google { +namespace protobuf { + +namespace { + +const ::google::protobuf::Descriptor* FileDescriptorSet_descriptor_ = NULL; +const ::google::protobuf::internal::GeneratedMessageReflection* + FileDescriptorSet_reflection_ = NULL; +const ::google::protobuf::Descriptor* FileDescriptorProto_descriptor_ = NULL; +const ::google::protobuf::internal::GeneratedMessageReflection* + FileDescriptorProto_reflection_ = NULL; +const ::google::protobuf::Descriptor* DescriptorProto_descriptor_ = NULL; +const ::google::protobuf::internal::GeneratedMessageReflection* + DescriptorProto_reflection_ = NULL; +const ::google::protobuf::Descriptor* DescriptorProto_ExtensionRange_descriptor_ = NULL; +const ::google::protobuf::internal::GeneratedMessageReflection* + DescriptorProto_ExtensionRange_reflection_ = NULL; +const ::google::protobuf::Descriptor* FieldDescriptorProto_descriptor_ = NULL; +const ::google::protobuf::internal::GeneratedMessageReflection* + FieldDescriptorProto_reflection_ = NULL; +const ::google::protobuf::EnumDescriptor* FieldDescriptorProto_Type_descriptor_ = NULL; +const ::google::protobuf::EnumDescriptor* FieldDescriptorProto_Label_descriptor_ = NULL; +const ::google::protobuf::Descriptor* EnumDescriptorProto_descriptor_ = NULL; +const ::google::protobuf::internal::GeneratedMessageReflection* + EnumDescriptorProto_reflection_ = NULL; +const ::google::protobuf::Descriptor* EnumValueDescriptorProto_descriptor_ = NULL; +const ::google::protobuf::internal::GeneratedMessageReflection* + EnumValueDescriptorProto_reflection_ = NULL; +const ::google::protobuf::Descriptor* ServiceDescriptorProto_descriptor_ = NULL; +const ::google::protobuf::internal::GeneratedMessageReflection* + ServiceDescriptorProto_reflection_ = NULL; +const ::google::protobuf::Descriptor* MethodDescriptorProto_descriptor_ = NULL; +const ::google::protobuf::internal::GeneratedMessageReflection* + MethodDescriptorProto_reflection_ = NULL; +const ::google::protobuf::Descriptor* FileOptions_descriptor_ = NULL; +const ::google::protobuf::internal::GeneratedMessageReflection* + FileOptions_reflection_ = NULL; +const ::google::protobuf::EnumDescriptor* FileOptions_OptimizeMode_descriptor_ = NULL; +const ::google::protobuf::Descriptor* MessageOptions_descriptor_ = NULL; +const ::google::protobuf::internal::GeneratedMessageReflection* + MessageOptions_reflection_ = NULL; +const ::google::protobuf::Descriptor* FieldOptions_descriptor_ = NULL; +const ::google::protobuf::internal::GeneratedMessageReflection* + FieldOptions_reflection_ = NULL; +const ::google::protobuf::EnumDescriptor* FieldOptions_CType_descriptor_ = NULL; +const ::google::protobuf::Descriptor* EnumOptions_descriptor_ = NULL; +const ::google::protobuf::internal::GeneratedMessageReflection* + EnumOptions_reflection_ = NULL; +const ::google::protobuf::Descriptor* EnumValueOptions_descriptor_ = NULL; +const ::google::protobuf::internal::GeneratedMessageReflection* + EnumValueOptions_reflection_ = NULL; +const ::google::protobuf::Descriptor* ServiceOptions_descriptor_ = NULL; +const ::google::protobuf::internal::GeneratedMessageReflection* + ServiceOptions_reflection_ = NULL; +const ::google::protobuf::Descriptor* MethodOptions_descriptor_ = NULL; +const ::google::protobuf::internal::GeneratedMessageReflection* + MethodOptions_reflection_ = NULL; +const ::google::protobuf::Descriptor* UninterpretedOption_descriptor_ = NULL; +const ::google::protobuf::internal::GeneratedMessageReflection* + UninterpretedOption_reflection_ = NULL; +const ::google::protobuf::Descriptor* UninterpretedOption_NamePart_descriptor_ = NULL; +const ::google::protobuf::internal::GeneratedMessageReflection* + UninterpretedOption_NamePart_reflection_ = NULL; +const ::google::protobuf::Descriptor* SourceCodeInfo_descriptor_ = NULL; +const ::google::protobuf::internal::GeneratedMessageReflection* + SourceCodeInfo_reflection_ = NULL; +const ::google::protobuf::Descriptor* SourceCodeInfo_Location_descriptor_ = NULL; +const ::google::protobuf::internal::GeneratedMessageReflection* + SourceCodeInfo_Location_reflection_ = NULL; + +} // namespace + + +void protobuf_AssignDesc_google_2fprotobuf_2fdescriptor_2eproto() { + protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); + const ::google::protobuf::FileDescriptor* file = + ::google::protobuf::DescriptorPool::generated_pool()->FindFileByName( + "google/protobuf/descriptor.proto"); + GOOGLE_CHECK(file != NULL); + FileDescriptorSet_descriptor_ = file->message_type(0); + static const int FileDescriptorSet_offsets_[1] = { + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FileDescriptorSet, file_), + }; + FileDescriptorSet_reflection_ = + new ::google::protobuf::internal::GeneratedMessageReflection( + FileDescriptorSet_descriptor_, + FileDescriptorSet::default_instance_, + FileDescriptorSet_offsets_, + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FileDescriptorSet, _has_bits_[0]), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FileDescriptorSet, _unknown_fields_), + -1, + ::google::protobuf::DescriptorPool::generated_pool(), + ::google::protobuf::MessageFactory::generated_factory(), + sizeof(FileDescriptorSet)); + FileDescriptorProto_descriptor_ = file->message_type(1); + static const int FileDescriptorProto_offsets_[9] = { + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FileDescriptorProto, name_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FileDescriptorProto, package_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FileDescriptorProto, dependency_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FileDescriptorProto, message_type_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FileDescriptorProto, enum_type_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FileDescriptorProto, service_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FileDescriptorProto, extension_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FileDescriptorProto, options_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FileDescriptorProto, source_code_info_), + }; + FileDescriptorProto_reflection_ = + new ::google::protobuf::internal::GeneratedMessageReflection( + FileDescriptorProto_descriptor_, + FileDescriptorProto::default_instance_, + FileDescriptorProto_offsets_, + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FileDescriptorProto, _has_bits_[0]), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FileDescriptorProto, _unknown_fields_), + -1, + ::google::protobuf::DescriptorPool::generated_pool(), + ::google::protobuf::MessageFactory::generated_factory(), + sizeof(FileDescriptorProto)); + DescriptorProto_descriptor_ = file->message_type(2); + static const int DescriptorProto_offsets_[7] = { + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(DescriptorProto, name_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(DescriptorProto, field_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(DescriptorProto, extension_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(DescriptorProto, nested_type_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(DescriptorProto, enum_type_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(DescriptorProto, extension_range_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(DescriptorProto, options_), + }; + DescriptorProto_reflection_ = + new ::google::protobuf::internal::GeneratedMessageReflection( + DescriptorProto_descriptor_, + DescriptorProto::default_instance_, + DescriptorProto_offsets_, + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(DescriptorProto, _has_bits_[0]), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(DescriptorProto, _unknown_fields_), + -1, + ::google::protobuf::DescriptorPool::generated_pool(), + ::google::protobuf::MessageFactory::generated_factory(), + sizeof(DescriptorProto)); + DescriptorProto_ExtensionRange_descriptor_ = DescriptorProto_descriptor_->nested_type(0); + static const int DescriptorProto_ExtensionRange_offsets_[2] = { + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(DescriptorProto_ExtensionRange, start_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(DescriptorProto_ExtensionRange, end_), + }; + DescriptorProto_ExtensionRange_reflection_ = + new ::google::protobuf::internal::GeneratedMessageReflection( + DescriptorProto_ExtensionRange_descriptor_, + DescriptorProto_ExtensionRange::default_instance_, + DescriptorProto_ExtensionRange_offsets_, + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(DescriptorProto_ExtensionRange, _has_bits_[0]), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(DescriptorProto_ExtensionRange, _unknown_fields_), + -1, + ::google::protobuf::DescriptorPool::generated_pool(), + ::google::protobuf::MessageFactory::generated_factory(), + sizeof(DescriptorProto_ExtensionRange)); + FieldDescriptorProto_descriptor_ = file->message_type(3); + static const int FieldDescriptorProto_offsets_[8] = { + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FieldDescriptorProto, name_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FieldDescriptorProto, number_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FieldDescriptorProto, label_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FieldDescriptorProto, type_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FieldDescriptorProto, type_name_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FieldDescriptorProto, extendee_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FieldDescriptorProto, default_value_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FieldDescriptorProto, options_), + }; + FieldDescriptorProto_reflection_ = + new ::google::protobuf::internal::GeneratedMessageReflection( + FieldDescriptorProto_descriptor_, + FieldDescriptorProto::default_instance_, + FieldDescriptorProto_offsets_, + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FieldDescriptorProto, _has_bits_[0]), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FieldDescriptorProto, _unknown_fields_), + -1, + ::google::protobuf::DescriptorPool::generated_pool(), + ::google::protobuf::MessageFactory::generated_factory(), + sizeof(FieldDescriptorProto)); + FieldDescriptorProto_Type_descriptor_ = FieldDescriptorProto_descriptor_->enum_type(0); + FieldDescriptorProto_Label_descriptor_ = FieldDescriptorProto_descriptor_->enum_type(1); + EnumDescriptorProto_descriptor_ = file->message_type(4); + static const int EnumDescriptorProto_offsets_[3] = { + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(EnumDescriptorProto, name_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(EnumDescriptorProto, value_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(EnumDescriptorProto, options_), + }; + EnumDescriptorProto_reflection_ = + new ::google::protobuf::internal::GeneratedMessageReflection( + EnumDescriptorProto_descriptor_, + EnumDescriptorProto::default_instance_, + EnumDescriptorProto_offsets_, + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(EnumDescriptorProto, _has_bits_[0]), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(EnumDescriptorProto, _unknown_fields_), + -1, + ::google::protobuf::DescriptorPool::generated_pool(), + ::google::protobuf::MessageFactory::generated_factory(), + sizeof(EnumDescriptorProto)); + EnumValueDescriptorProto_descriptor_ = file->message_type(5); + static const int EnumValueDescriptorProto_offsets_[3] = { + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(EnumValueDescriptorProto, name_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(EnumValueDescriptorProto, number_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(EnumValueDescriptorProto, options_), + }; + EnumValueDescriptorProto_reflection_ = + new ::google::protobuf::internal::GeneratedMessageReflection( + EnumValueDescriptorProto_descriptor_, + EnumValueDescriptorProto::default_instance_, + EnumValueDescriptorProto_offsets_, + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(EnumValueDescriptorProto, _has_bits_[0]), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(EnumValueDescriptorProto, _unknown_fields_), + -1, + ::google::protobuf::DescriptorPool::generated_pool(), + ::google::protobuf::MessageFactory::generated_factory(), + sizeof(EnumValueDescriptorProto)); + ServiceDescriptorProto_descriptor_ = file->message_type(6); + static const int ServiceDescriptorProto_offsets_[3] = { + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ServiceDescriptorProto, name_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ServiceDescriptorProto, method_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ServiceDescriptorProto, options_), + }; + ServiceDescriptorProto_reflection_ = + new ::google::protobuf::internal::GeneratedMessageReflection( + ServiceDescriptorProto_descriptor_, + ServiceDescriptorProto::default_instance_, + ServiceDescriptorProto_offsets_, + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ServiceDescriptorProto, _has_bits_[0]), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ServiceDescriptorProto, _unknown_fields_), + -1, + ::google::protobuf::DescriptorPool::generated_pool(), + ::google::protobuf::MessageFactory::generated_factory(), + sizeof(ServiceDescriptorProto)); + MethodDescriptorProto_descriptor_ = file->message_type(7); + static const int MethodDescriptorProto_offsets_[4] = { + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(MethodDescriptorProto, name_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(MethodDescriptorProto, input_type_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(MethodDescriptorProto, output_type_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(MethodDescriptorProto, options_), + }; + MethodDescriptorProto_reflection_ = + new ::google::protobuf::internal::GeneratedMessageReflection( + MethodDescriptorProto_descriptor_, + MethodDescriptorProto::default_instance_, + MethodDescriptorProto_offsets_, + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(MethodDescriptorProto, _has_bits_[0]), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(MethodDescriptorProto, _unknown_fields_), + -1, + ::google::protobuf::DescriptorPool::generated_pool(), + ::google::protobuf::MessageFactory::generated_factory(), + sizeof(MethodDescriptorProto)); + FileOptions_descriptor_ = file->message_type(8); + static const int FileOptions_offsets_[9] = { + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FileOptions, java_package_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FileOptions, java_outer_classname_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FileOptions, java_multiple_files_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FileOptions, java_generate_equals_and_hash_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FileOptions, optimize_for_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FileOptions, cc_generic_services_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FileOptions, java_generic_services_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FileOptions, py_generic_services_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FileOptions, uninterpreted_option_), + }; + FileOptions_reflection_ = + new ::google::protobuf::internal::GeneratedMessageReflection( + FileOptions_descriptor_, + FileOptions::default_instance_, + FileOptions_offsets_, + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FileOptions, _has_bits_[0]), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FileOptions, _unknown_fields_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FileOptions, _extensions_), + ::google::protobuf::DescriptorPool::generated_pool(), + ::google::protobuf::MessageFactory::generated_factory(), + sizeof(FileOptions)); + FileOptions_OptimizeMode_descriptor_ = FileOptions_descriptor_->enum_type(0); + MessageOptions_descriptor_ = file->message_type(9); + static const int MessageOptions_offsets_[3] = { + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(MessageOptions, message_set_wire_format_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(MessageOptions, no_standard_descriptor_accessor_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(MessageOptions, uninterpreted_option_), + }; + MessageOptions_reflection_ = + new ::google::protobuf::internal::GeneratedMessageReflection( + MessageOptions_descriptor_, + MessageOptions::default_instance_, + MessageOptions_offsets_, + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(MessageOptions, _has_bits_[0]), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(MessageOptions, _unknown_fields_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(MessageOptions, _extensions_), + ::google::protobuf::DescriptorPool::generated_pool(), + ::google::protobuf::MessageFactory::generated_factory(), + sizeof(MessageOptions)); + FieldOptions_descriptor_ = file->message_type(10); + static const int FieldOptions_offsets_[5] = { + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FieldOptions, ctype_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FieldOptions, packed_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FieldOptions, deprecated_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FieldOptions, experimental_map_key_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FieldOptions, uninterpreted_option_), + }; + FieldOptions_reflection_ = + new ::google::protobuf::internal::GeneratedMessageReflection( + FieldOptions_descriptor_, + FieldOptions::default_instance_, + FieldOptions_offsets_, + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FieldOptions, _has_bits_[0]), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FieldOptions, _unknown_fields_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(FieldOptions, _extensions_), + ::google::protobuf::DescriptorPool::generated_pool(), + ::google::protobuf::MessageFactory::generated_factory(), + sizeof(FieldOptions)); + FieldOptions_CType_descriptor_ = FieldOptions_descriptor_->enum_type(0); + EnumOptions_descriptor_ = file->message_type(11); + static const int EnumOptions_offsets_[1] = { + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(EnumOptions, uninterpreted_option_), + }; + EnumOptions_reflection_ = + new ::google::protobuf::internal::GeneratedMessageReflection( + EnumOptions_descriptor_, + EnumOptions::default_instance_, + EnumOptions_offsets_, + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(EnumOptions, _has_bits_[0]), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(EnumOptions, _unknown_fields_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(EnumOptions, _extensions_), + ::google::protobuf::DescriptorPool::generated_pool(), + ::google::protobuf::MessageFactory::generated_factory(), + sizeof(EnumOptions)); + EnumValueOptions_descriptor_ = file->message_type(12); + static const int EnumValueOptions_offsets_[1] = { + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(EnumValueOptions, uninterpreted_option_), + }; + EnumValueOptions_reflection_ = + new ::google::protobuf::internal::GeneratedMessageReflection( + EnumValueOptions_descriptor_, + EnumValueOptions::default_instance_, + EnumValueOptions_offsets_, + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(EnumValueOptions, _has_bits_[0]), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(EnumValueOptions, _unknown_fields_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(EnumValueOptions, _extensions_), + ::google::protobuf::DescriptorPool::generated_pool(), + ::google::protobuf::MessageFactory::generated_factory(), + sizeof(EnumValueOptions)); + ServiceOptions_descriptor_ = file->message_type(13); + static const int ServiceOptions_offsets_[1] = { + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ServiceOptions, uninterpreted_option_), + }; + ServiceOptions_reflection_ = + new ::google::protobuf::internal::GeneratedMessageReflection( + ServiceOptions_descriptor_, + ServiceOptions::default_instance_, + ServiceOptions_offsets_, + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ServiceOptions, _has_bits_[0]), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ServiceOptions, _unknown_fields_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ServiceOptions, _extensions_), + ::google::protobuf::DescriptorPool::generated_pool(), + ::google::protobuf::MessageFactory::generated_factory(), + sizeof(ServiceOptions)); + MethodOptions_descriptor_ = file->message_type(14); + static const int MethodOptions_offsets_[1] = { + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(MethodOptions, uninterpreted_option_), + }; + MethodOptions_reflection_ = + new ::google::protobuf::internal::GeneratedMessageReflection( + MethodOptions_descriptor_, + MethodOptions::default_instance_, + MethodOptions_offsets_, + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(MethodOptions, _has_bits_[0]), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(MethodOptions, _unknown_fields_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(MethodOptions, _extensions_), + ::google::protobuf::DescriptorPool::generated_pool(), + ::google::protobuf::MessageFactory::generated_factory(), + sizeof(MethodOptions)); + UninterpretedOption_descriptor_ = file->message_type(15); + static const int UninterpretedOption_offsets_[7] = { + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(UninterpretedOption, name_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(UninterpretedOption, identifier_value_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(UninterpretedOption, positive_int_value_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(UninterpretedOption, negative_int_value_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(UninterpretedOption, double_value_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(UninterpretedOption, string_value_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(UninterpretedOption, aggregate_value_), + }; + UninterpretedOption_reflection_ = + new ::google::protobuf::internal::GeneratedMessageReflection( + UninterpretedOption_descriptor_, + UninterpretedOption::default_instance_, + UninterpretedOption_offsets_, + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(UninterpretedOption, _has_bits_[0]), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(UninterpretedOption, _unknown_fields_), + -1, + ::google::protobuf::DescriptorPool::generated_pool(), + ::google::protobuf::MessageFactory::generated_factory(), + sizeof(UninterpretedOption)); + UninterpretedOption_NamePart_descriptor_ = UninterpretedOption_descriptor_->nested_type(0); + static const int UninterpretedOption_NamePart_offsets_[2] = { + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(UninterpretedOption_NamePart, name_part_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(UninterpretedOption_NamePart, is_extension_), + }; + UninterpretedOption_NamePart_reflection_ = + new ::google::protobuf::internal::GeneratedMessageReflection( + UninterpretedOption_NamePart_descriptor_, + UninterpretedOption_NamePart::default_instance_, + UninterpretedOption_NamePart_offsets_, + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(UninterpretedOption_NamePart, _has_bits_[0]), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(UninterpretedOption_NamePart, _unknown_fields_), + -1, + ::google::protobuf::DescriptorPool::generated_pool(), + ::google::protobuf::MessageFactory::generated_factory(), + sizeof(UninterpretedOption_NamePart)); + SourceCodeInfo_descriptor_ = file->message_type(16); + static const int SourceCodeInfo_offsets_[1] = { + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(SourceCodeInfo, location_), + }; + SourceCodeInfo_reflection_ = + new ::google::protobuf::internal::GeneratedMessageReflection( + SourceCodeInfo_descriptor_, + SourceCodeInfo::default_instance_, + SourceCodeInfo_offsets_, + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(SourceCodeInfo, _has_bits_[0]), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(SourceCodeInfo, _unknown_fields_), + -1, + ::google::protobuf::DescriptorPool::generated_pool(), + ::google::protobuf::MessageFactory::generated_factory(), + sizeof(SourceCodeInfo)); + SourceCodeInfo_Location_descriptor_ = SourceCodeInfo_descriptor_->nested_type(0); + static const int SourceCodeInfo_Location_offsets_[2] = { + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(SourceCodeInfo_Location, path_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(SourceCodeInfo_Location, span_), + }; + SourceCodeInfo_Location_reflection_ = + new ::google::protobuf::internal::GeneratedMessageReflection( + SourceCodeInfo_Location_descriptor_, + SourceCodeInfo_Location::default_instance_, + SourceCodeInfo_Location_offsets_, + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(SourceCodeInfo_Location, _has_bits_[0]), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(SourceCodeInfo_Location, _unknown_fields_), + -1, + ::google::protobuf::DescriptorPool::generated_pool(), + ::google::protobuf::MessageFactory::generated_factory(), + sizeof(SourceCodeInfo_Location)); +} + +namespace { + +GOOGLE_PROTOBUF_DECLARE_ONCE(protobuf_AssignDescriptors_once_); +inline void protobuf_AssignDescriptorsOnce() { + ::google::protobuf::GoogleOnceInit(&protobuf_AssignDescriptors_once_, + &protobuf_AssignDesc_google_2fprotobuf_2fdescriptor_2eproto); +} + +void protobuf_RegisterTypes(const ::std::string&) { + protobuf_AssignDescriptorsOnce(); + ::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage( + FileDescriptorSet_descriptor_, &FileDescriptorSet::default_instance()); + ::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage( + FileDescriptorProto_descriptor_, &FileDescriptorProto::default_instance()); + ::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage( + DescriptorProto_descriptor_, &DescriptorProto::default_instance()); + ::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage( + DescriptorProto_ExtensionRange_descriptor_, &DescriptorProto_ExtensionRange::default_instance()); + ::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage( + FieldDescriptorProto_descriptor_, &FieldDescriptorProto::default_instance()); + ::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage( + EnumDescriptorProto_descriptor_, &EnumDescriptorProto::default_instance()); + ::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage( + EnumValueDescriptorProto_descriptor_, &EnumValueDescriptorProto::default_instance()); + ::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage( + ServiceDescriptorProto_descriptor_, &ServiceDescriptorProto::default_instance()); + ::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage( + MethodDescriptorProto_descriptor_, &MethodDescriptorProto::default_instance()); + ::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage( + FileOptions_descriptor_, &FileOptions::default_instance()); + ::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage( + MessageOptions_descriptor_, &MessageOptions::default_instance()); + ::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage( + FieldOptions_descriptor_, &FieldOptions::default_instance()); + ::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage( + EnumOptions_descriptor_, &EnumOptions::default_instance()); + ::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage( + EnumValueOptions_descriptor_, &EnumValueOptions::default_instance()); + ::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage( + ServiceOptions_descriptor_, &ServiceOptions::default_instance()); + ::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage( + MethodOptions_descriptor_, &MethodOptions::default_instance()); + ::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage( + UninterpretedOption_descriptor_, &UninterpretedOption::default_instance()); + ::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage( + UninterpretedOption_NamePart_descriptor_, &UninterpretedOption_NamePart::default_instance()); + ::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage( + SourceCodeInfo_descriptor_, &SourceCodeInfo::default_instance()); + ::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage( + SourceCodeInfo_Location_descriptor_, &SourceCodeInfo_Location::default_instance()); +} + +} // namespace + +void protobuf_ShutdownFile_google_2fprotobuf_2fdescriptor_2eproto() { + delete FileDescriptorSet::default_instance_; + delete FileDescriptorSet_reflection_; + delete FileDescriptorProto::default_instance_; + delete FileDescriptorProto_reflection_; + delete DescriptorProto::default_instance_; + delete DescriptorProto_reflection_; + delete DescriptorProto_ExtensionRange::default_instance_; + delete DescriptorProto_ExtensionRange_reflection_; + delete FieldDescriptorProto::default_instance_; + delete FieldDescriptorProto_reflection_; + delete EnumDescriptorProto::default_instance_; + delete EnumDescriptorProto_reflection_; + delete EnumValueDescriptorProto::default_instance_; + delete EnumValueDescriptorProto_reflection_; + delete ServiceDescriptorProto::default_instance_; + delete ServiceDescriptorProto_reflection_; + delete MethodDescriptorProto::default_instance_; + delete MethodDescriptorProto_reflection_; + delete FileOptions::default_instance_; + delete FileOptions_reflection_; + delete MessageOptions::default_instance_; + delete MessageOptions_reflection_; + delete FieldOptions::default_instance_; + delete FieldOptions_reflection_; + delete EnumOptions::default_instance_; + delete EnumOptions_reflection_; + delete EnumValueOptions::default_instance_; + delete EnumValueOptions_reflection_; + delete ServiceOptions::default_instance_; + delete ServiceOptions_reflection_; + delete MethodOptions::default_instance_; + delete MethodOptions_reflection_; + delete UninterpretedOption::default_instance_; + delete UninterpretedOption_reflection_; + delete UninterpretedOption_NamePart::default_instance_; + delete UninterpretedOption_NamePart_reflection_; + delete SourceCodeInfo::default_instance_; + delete SourceCodeInfo_reflection_; + delete SourceCodeInfo_Location::default_instance_; + delete SourceCodeInfo_Location_reflection_; +} + +void protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto() { + static bool already_here = false; + if (already_here) return; + already_here = true; + GOOGLE_PROTOBUF_VERIFY_VERSION; + + ::google::protobuf::DescriptorPool::InternalAddGeneratedFile( + "\n google/protobuf/descriptor.proto\022\017goog" + "le.protobuf\"G\n\021FileDescriptorSet\0222\n\004file" + "\030\001 \003(\0132$.google.protobuf.FileDescriptorP" + "roto\"\227\003\n\023FileDescriptorProto\022\014\n\004name\030\001 \001" + "(\t\022\017\n\007package\030\002 \001(\t\022\022\n\ndependency\030\003 \003(\t\022" + "6\n\014message_type\030\004 \003(\0132 .google.protobuf." + "DescriptorProto\0227\n\tenum_type\030\005 \003(\0132$.goo" + "gle.protobuf.EnumDescriptorProto\0228\n\007serv" + "ice\030\006 \003(\0132\'.google.protobuf.ServiceDescr" + "iptorProto\0228\n\textension\030\007 \003(\0132%.google.p" + "rotobuf.FieldDescriptorProto\022-\n\007options\030" + "\010 \001(\0132\034.google.protobuf.FileOptions\0229\n\020s" + "ource_code_info\030\t \001(\0132\037.google.protobuf." + "SourceCodeInfo\"\251\003\n\017DescriptorProto\022\014\n\004na" + "me\030\001 \001(\t\0224\n\005field\030\002 \003(\0132%.google.protobu" + "f.FieldDescriptorProto\0228\n\textension\030\006 \003(" + "\0132%.google.protobuf.FieldDescriptorProto" + "\0225\n\013nested_type\030\003 \003(\0132 .google.protobuf." + "DescriptorProto\0227\n\tenum_type\030\004 \003(\0132$.goo" + "gle.protobuf.EnumDescriptorProto\022H\n\017exte" + "nsion_range\030\005 \003(\0132/.google.protobuf.Desc" + "riptorProto.ExtensionRange\0220\n\007options\030\007 " + "\001(\0132\037.google.protobuf.MessageOptions\032,\n\016" + "ExtensionRange\022\r\n\005start\030\001 \001(\005\022\013\n\003end\030\002 \001" + "(\005\"\224\005\n\024FieldDescriptorProto\022\014\n\004name\030\001 \001(" + "\t\022\016\n\006number\030\003 \001(\005\022:\n\005label\030\004 \001(\0162+.googl" + "e.protobuf.FieldDescriptorProto.Label\0228\n" + "\004type\030\005 \001(\0162*.google.protobuf.FieldDescr" + "iptorProto.Type\022\021\n\ttype_name\030\006 \001(\t\022\020\n\010ex" + "tendee\030\002 \001(\t\022\025\n\rdefault_value\030\007 \001(\t\022.\n\007o" + "ptions\030\010 \001(\0132\035.google.protobuf.FieldOpti" + "ons\"\266\002\n\004Type\022\017\n\013TYPE_DOUBLE\020\001\022\016\n\nTYPE_FL" + "OAT\020\002\022\016\n\nTYPE_INT64\020\003\022\017\n\013TYPE_UINT64\020\004\022\016" + "\n\nTYPE_INT32\020\005\022\020\n\014TYPE_FIXED64\020\006\022\020\n\014TYPE" + "_FIXED32\020\007\022\r\n\tTYPE_BOOL\020\010\022\017\n\013TYPE_STRING" + "\020\t\022\016\n\nTYPE_GROUP\020\n\022\020\n\014TYPE_MESSAGE\020\013\022\016\n\n" + "TYPE_BYTES\020\014\022\017\n\013TYPE_UINT32\020\r\022\r\n\tTYPE_EN" + "UM\020\016\022\021\n\rTYPE_SFIXED32\020\017\022\021\n\rTYPE_SFIXED64" + "\020\020\022\017\n\013TYPE_SINT32\020\021\022\017\n\013TYPE_SINT64\020\022\"C\n\005" + "Label\022\022\n\016LABEL_OPTIONAL\020\001\022\022\n\016LABEL_REQUI" + "RED\020\002\022\022\n\016LABEL_REPEATED\020\003\"\214\001\n\023EnumDescri" + "ptorProto\022\014\n\004name\030\001 \001(\t\0228\n\005value\030\002 \003(\0132)" + ".google.protobuf.EnumValueDescriptorProt" + "o\022-\n\007options\030\003 \001(\0132\034.google.protobuf.Enu" + "mOptions\"l\n\030EnumValueDescriptorProto\022\014\n\004" + "name\030\001 \001(\t\022\016\n\006number\030\002 \001(\005\0222\n\007options\030\003 " + "\001(\0132!.google.protobuf.EnumValueOptions\"\220" + "\001\n\026ServiceDescriptorProto\022\014\n\004name\030\001 \001(\t\022" + "6\n\006method\030\002 \003(\0132&.google.protobuf.Method" + "DescriptorProto\0220\n\007options\030\003 \001(\0132\037.googl" + "e.protobuf.ServiceOptions\"\177\n\025MethodDescr" + "iptorProto\022\014\n\004name\030\001 \001(\t\022\022\n\ninput_type\030\002" + " \001(\t\022\023\n\013output_type\030\003 \001(\t\022/\n\007options\030\004 \001" + "(\0132\036.google.protobuf.MethodOptions\"\325\003\n\013F" + "ileOptions\022\024\n\014java_package\030\001 \001(\t\022\034\n\024java" + "_outer_classname\030\010 \001(\t\022\"\n\023java_multiple_" + "files\030\n \001(\010:\005false\022,\n\035java_generate_equa" + "ls_and_hash\030\024 \001(\010:\005false\022F\n\014optimize_for" + "\030\t \001(\0162).google.protobuf.FileOptions.Opt" + "imizeMode:\005SPEED\022\"\n\023cc_generic_services\030" + "\020 \001(\010:\005false\022$\n\025java_generic_services\030\021 " + "\001(\010:\005false\022\"\n\023py_generic_services\030\022 \001(\010:" + "\005false\022C\n\024uninterpreted_option\030\347\007 \003(\0132$." + "google.protobuf.UninterpretedOption\":\n\014O" + "ptimizeMode\022\t\n\005SPEED\020\001\022\r\n\tCODE_SIZE\020\002\022\020\n" + "\014LITE_RUNTIME\020\003*\t\010\350\007\020\200\200\200\200\002\"\270\001\n\016MessageOp" + "tions\022&\n\027message_set_wire_format\030\001 \001(\010:\005" + "false\022.\n\037no_standard_descriptor_accessor" + "\030\002 \001(\010:\005false\022C\n\024uninterpreted_option\030\347\007" + " \003(\0132$.google.protobuf.UninterpretedOpti" + "on*\t\010\350\007\020\200\200\200\200\002\"\224\002\n\014FieldOptions\022:\n\005ctype\030" + "\001 \001(\0162#.google.protobuf.FieldOptions.CTy" + "pe:\006STRING\022\016\n\006packed\030\002 \001(\010\022\031\n\ndeprecated" + "\030\003 \001(\010:\005false\022\034\n\024experimental_map_key\030\t " + "\001(\t\022C\n\024uninterpreted_option\030\347\007 \003(\0132$.goo" + "gle.protobuf.UninterpretedOption\"/\n\005CTyp" + "e\022\n\n\006STRING\020\000\022\010\n\004CORD\020\001\022\020\n\014STRING_PIECE\020" + "\002*\t\010\350\007\020\200\200\200\200\002\"]\n\013EnumOptions\022C\n\024uninterpr" + "eted_option\030\347\007 \003(\0132$.google.protobuf.Uni" + "nterpretedOption*\t\010\350\007\020\200\200\200\200\002\"b\n\020EnumValue" + "Options\022C\n\024uninterpreted_option\030\347\007 \003(\0132$" + ".google.protobuf.UninterpretedOption*\t\010\350" + "\007\020\200\200\200\200\002\"`\n\016ServiceOptions\022C\n\024uninterpret" + "ed_option\030\347\007 \003(\0132$.google.protobuf.Unint" + "erpretedOption*\t\010\350\007\020\200\200\200\200\002\"_\n\rMethodOptio" + "ns\022C\n\024uninterpreted_option\030\347\007 \003(\0132$.goog" + "le.protobuf.UninterpretedOption*\t\010\350\007\020\200\200\200" + "\200\002\"\236\002\n\023UninterpretedOption\022;\n\004name\030\002 \003(\013" + "2-.google.protobuf.UninterpretedOption.N" + "amePart\022\030\n\020identifier_value\030\003 \001(\t\022\032\n\022pos" + "itive_int_value\030\004 \001(\004\022\032\n\022negative_int_va" + "lue\030\005 \001(\003\022\024\n\014double_value\030\006 \001(\001\022\024\n\014strin" + "g_value\030\007 \001(\014\022\027\n\017aggregate_value\030\010 \001(\t\0323" + "\n\010NamePart\022\021\n\tname_part\030\001 \002(\t\022\024\n\014is_exte" + "nsion\030\002 \002(\010\"|\n\016SourceCodeInfo\022:\n\010locatio" + "n\030\001 \003(\0132(.google.protobuf.SourceCodeInfo" + ".Location\032.\n\010Location\022\020\n\004path\030\001 \003(\005B\002\020\001\022" + "\020\n\004span\030\002 \003(\005B\002\020\001B)\n\023com.google.protobuf" + "B\020DescriptorProtosH\001", 3940); + ::google::protobuf::MessageFactory::InternalRegisterGeneratedFile( + "google/protobuf/descriptor.proto", &protobuf_RegisterTypes); + FileDescriptorSet::default_instance_ = new FileDescriptorSet(); + FileDescriptorProto::default_instance_ = new FileDescriptorProto(); + DescriptorProto::default_instance_ = new DescriptorProto(); + DescriptorProto_ExtensionRange::default_instance_ = new DescriptorProto_ExtensionRange(); + FieldDescriptorProto::default_instance_ = new FieldDescriptorProto(); + EnumDescriptorProto::default_instance_ = new EnumDescriptorProto(); + EnumValueDescriptorProto::default_instance_ = new EnumValueDescriptorProto(); + ServiceDescriptorProto::default_instance_ = new ServiceDescriptorProto(); + MethodDescriptorProto::default_instance_ = new MethodDescriptorProto(); + FileOptions::default_instance_ = new FileOptions(); + MessageOptions::default_instance_ = new MessageOptions(); + FieldOptions::default_instance_ = new FieldOptions(); + EnumOptions::default_instance_ = new EnumOptions(); + EnumValueOptions::default_instance_ = new EnumValueOptions(); + ServiceOptions::default_instance_ = new ServiceOptions(); + MethodOptions::default_instance_ = new MethodOptions(); + UninterpretedOption::default_instance_ = new UninterpretedOption(); + UninterpretedOption_NamePart::default_instance_ = new UninterpretedOption_NamePart(); + SourceCodeInfo::default_instance_ = new SourceCodeInfo(); + SourceCodeInfo_Location::default_instance_ = new SourceCodeInfo_Location(); + FileDescriptorSet::default_instance_->InitAsDefaultInstance(); + FileDescriptorProto::default_instance_->InitAsDefaultInstance(); + DescriptorProto::default_instance_->InitAsDefaultInstance(); + DescriptorProto_ExtensionRange::default_instance_->InitAsDefaultInstance(); + FieldDescriptorProto::default_instance_->InitAsDefaultInstance(); + EnumDescriptorProto::default_instance_->InitAsDefaultInstance(); + EnumValueDescriptorProto::default_instance_->InitAsDefaultInstance(); + ServiceDescriptorProto::default_instance_->InitAsDefaultInstance(); + MethodDescriptorProto::default_instance_->InitAsDefaultInstance(); + FileOptions::default_instance_->InitAsDefaultInstance(); + MessageOptions::default_instance_->InitAsDefaultInstance(); + FieldOptions::default_instance_->InitAsDefaultInstance(); + EnumOptions::default_instance_->InitAsDefaultInstance(); + EnumValueOptions::default_instance_->InitAsDefaultInstance(); + ServiceOptions::default_instance_->InitAsDefaultInstance(); + MethodOptions::default_instance_->InitAsDefaultInstance(); + UninterpretedOption::default_instance_->InitAsDefaultInstance(); + UninterpretedOption_NamePart::default_instance_->InitAsDefaultInstance(); + SourceCodeInfo::default_instance_->InitAsDefaultInstance(); + SourceCodeInfo_Location::default_instance_->InitAsDefaultInstance(); + ::google::protobuf::internal::OnShutdown(&protobuf_ShutdownFile_google_2fprotobuf_2fdescriptor_2eproto); +} + +// Force AddDescriptors() to be called at static initialization time. +struct StaticDescriptorInitializer_google_2fprotobuf_2fdescriptor_2eproto { + StaticDescriptorInitializer_google_2fprotobuf_2fdescriptor_2eproto() { + protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); + } +} static_descriptor_initializer_google_2fprotobuf_2fdescriptor_2eproto_; + + +// =================================================================== + +#ifndef _MSC_VER +const int FileDescriptorSet::kFileFieldNumber; +#endif // !_MSC_VER + +FileDescriptorSet::FileDescriptorSet() + : ::google::protobuf::Message() { + SharedCtor(); +} + +void FileDescriptorSet::InitAsDefaultInstance() { +} + +FileDescriptorSet::FileDescriptorSet(const FileDescriptorSet& from) + : ::google::protobuf::Message() { + SharedCtor(); + MergeFrom(from); +} + +void FileDescriptorSet::SharedCtor() { + _cached_size_ = 0; + ::memset(_has_bits_, 0, sizeof(_has_bits_)); +} + +FileDescriptorSet::~FileDescriptorSet() { + SharedDtor(); +} + +void FileDescriptorSet::SharedDtor() { + if (this != default_instance_) { + } +} + +void FileDescriptorSet::SetCachedSize(int size) const { + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); +} +const ::google::protobuf::Descriptor* FileDescriptorSet::descriptor() { + protobuf_AssignDescriptorsOnce(); + return FileDescriptorSet_descriptor_; +} + +const FileDescriptorSet& FileDescriptorSet::default_instance() { + if (default_instance_ == NULL) protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); return *default_instance_; +} + +FileDescriptorSet* FileDescriptorSet::default_instance_ = NULL; + +FileDescriptorSet* FileDescriptorSet::New() const { + return new FileDescriptorSet; +} + +void FileDescriptorSet::Clear() { + file_.Clear(); + ::memset(_has_bits_, 0, sizeof(_has_bits_)); + mutable_unknown_fields()->Clear(); +} + +bool FileDescriptorSet::MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!(EXPRESSION)) return false + ::google::protobuf::uint32 tag; + while ((tag = input->ReadTag()) != 0) { + switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // repeated .google.protobuf.FileDescriptorProto file = 1; + case 1: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_file: + DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual( + input, add_file())); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(10)) goto parse_file; + if (input->ExpectAtEnd()) return true; + break; + } + + default: { + handle_uninterpreted: + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) { + return true; + } + DO_(::google::protobuf::internal::WireFormat::SkipField( + input, tag, mutable_unknown_fields())); + break; + } + } + } + return true; +#undef DO_ +} + +void FileDescriptorSet::SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const { + // repeated .google.protobuf.FileDescriptorProto file = 1; + for (int i = 0; i < this->file_size(); i++) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 1, this->file(i), output); + } + + if (!unknown_fields().empty()) { + ::google::protobuf::internal::WireFormat::SerializeUnknownFields( + unknown_fields(), output); + } +} + +::google::protobuf::uint8* FileDescriptorSet::SerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const { + // repeated .google.protobuf.FileDescriptorProto file = 1; + for (int i = 0; i < this->file_size(); i++) { + target = ::google::protobuf::internal::WireFormatLite:: + WriteMessageNoVirtualToArray( + 1, this->file(i), target); + } + + if (!unknown_fields().empty()) { + target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( + unknown_fields(), target); + } + return target; +} + +int FileDescriptorSet::ByteSize() const { + int total_size = 0; + + // repeated .google.protobuf.FileDescriptorProto file = 1; + total_size += 1 * this->file_size(); + for (int i = 0; i < this->file_size(); i++) { + total_size += + ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual( + this->file(i)); + } + + if (!unknown_fields().empty()) { + total_size += + ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( + unknown_fields()); + } + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = total_size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); + return total_size; +} + +void FileDescriptorSet::MergeFrom(const ::google::protobuf::Message& from) { + GOOGLE_CHECK_NE(&from, this); + const FileDescriptorSet* source = + ::google::protobuf::internal::dynamic_cast_if_available( + &from); + if (source == NULL) { + ::google::protobuf::internal::ReflectionOps::Merge(from, this); + } else { + MergeFrom(*source); + } +} + +void FileDescriptorSet::MergeFrom(const FileDescriptorSet& from) { + GOOGLE_CHECK_NE(&from, this); + file_.MergeFrom(from.file_); + mutable_unknown_fields()->MergeFrom(from.unknown_fields()); +} + +void FileDescriptorSet::CopyFrom(const ::google::protobuf::Message& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void FileDescriptorSet::CopyFrom(const FileDescriptorSet& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool FileDescriptorSet::IsInitialized() const { + + for (int i = 0; i < file_size(); i++) { + if (!this->file(i).IsInitialized()) return false; + } + return true; +} + +void FileDescriptorSet::Swap(FileDescriptorSet* other) { + if (other != this) { + file_.Swap(&other->file_); + std::swap(_has_bits_[0], other->_has_bits_[0]); + _unknown_fields_.Swap(&other->_unknown_fields_); + std::swap(_cached_size_, other->_cached_size_); + } +} + +::google::protobuf::Metadata FileDescriptorSet::GetMetadata() const { + protobuf_AssignDescriptorsOnce(); + ::google::protobuf::Metadata metadata; + metadata.descriptor = FileDescriptorSet_descriptor_; + metadata.reflection = FileDescriptorSet_reflection_; + return metadata; +} + + +// =================================================================== + +#ifndef _MSC_VER +const int FileDescriptorProto::kNameFieldNumber; +const int FileDescriptorProto::kPackageFieldNumber; +const int FileDescriptorProto::kDependencyFieldNumber; +const int FileDescriptorProto::kMessageTypeFieldNumber; +const int FileDescriptorProto::kEnumTypeFieldNumber; +const int FileDescriptorProto::kServiceFieldNumber; +const int FileDescriptorProto::kExtensionFieldNumber; +const int FileDescriptorProto::kOptionsFieldNumber; +const int FileDescriptorProto::kSourceCodeInfoFieldNumber; +#endif // !_MSC_VER + +FileDescriptorProto::FileDescriptorProto() + : ::google::protobuf::Message() { + SharedCtor(); +} + +void FileDescriptorProto::InitAsDefaultInstance() { + options_ = const_cast< ::google::protobuf::FileOptions*>(&::google::protobuf::FileOptions::default_instance()); + source_code_info_ = const_cast< ::google::protobuf::SourceCodeInfo*>(&::google::protobuf::SourceCodeInfo::default_instance()); +} + +FileDescriptorProto::FileDescriptorProto(const FileDescriptorProto& from) + : ::google::protobuf::Message() { + SharedCtor(); + MergeFrom(from); +} + +void FileDescriptorProto::SharedCtor() { + _cached_size_ = 0; + name_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + package_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + options_ = NULL; + source_code_info_ = NULL; + ::memset(_has_bits_, 0, sizeof(_has_bits_)); +} + +FileDescriptorProto::~FileDescriptorProto() { + SharedDtor(); +} + +void FileDescriptorProto::SharedDtor() { + if (name_ != &::google::protobuf::internal::kEmptyString) { + delete name_; + } + if (package_ != &::google::protobuf::internal::kEmptyString) { + delete package_; + } + if (this != default_instance_) { + delete options_; + delete source_code_info_; + } +} + +void FileDescriptorProto::SetCachedSize(int size) const { + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); +} +const ::google::protobuf::Descriptor* FileDescriptorProto::descriptor() { + protobuf_AssignDescriptorsOnce(); + return FileDescriptorProto_descriptor_; +} + +const FileDescriptorProto& FileDescriptorProto::default_instance() { + if (default_instance_ == NULL) protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); return *default_instance_; +} + +FileDescriptorProto* FileDescriptorProto::default_instance_ = NULL; + +FileDescriptorProto* FileDescriptorProto::New() const { + return new FileDescriptorProto; +} + +void FileDescriptorProto::Clear() { + if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) { + if (has_name()) { + if (name_ != &::google::protobuf::internal::kEmptyString) { + name_->clear(); + } + } + if (has_package()) { + if (package_ != &::google::protobuf::internal::kEmptyString) { + package_->clear(); + } + } + if (has_options()) { + if (options_ != NULL) options_->::google::protobuf::FileOptions::Clear(); + } + } + if (_has_bits_[8 / 32] & (0xffu << (8 % 32))) { + if (has_source_code_info()) { + if (source_code_info_ != NULL) source_code_info_->::google::protobuf::SourceCodeInfo::Clear(); + } + } + dependency_.Clear(); + message_type_.Clear(); + enum_type_.Clear(); + service_.Clear(); + extension_.Clear(); + ::memset(_has_bits_, 0, sizeof(_has_bits_)); + mutable_unknown_fields()->Clear(); +} + +bool FileDescriptorProto::MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!(EXPRESSION)) return false + ::google::protobuf::uint32 tag; + while ((tag = input->ReadTag()) != 0) { + switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // optional string name = 1; + case 1: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_name())); + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->name().data(), this->name().length(), + ::google::protobuf::internal::WireFormat::PARSE); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(18)) goto parse_package; + break; + } + + // optional string package = 2; + case 2: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_package: + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_package())); + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->package().data(), this->package().length(), + ::google::protobuf::internal::WireFormat::PARSE); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(26)) goto parse_dependency; + break; + } + + // repeated string dependency = 3; + case 3: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_dependency: + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->add_dependency())); + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->dependency(0).data(), this->dependency(0).length(), + ::google::protobuf::internal::WireFormat::PARSE); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(26)) goto parse_dependency; + if (input->ExpectTag(34)) goto parse_message_type; + break; + } + + // repeated .google.protobuf.DescriptorProto message_type = 4; + case 4: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_message_type: + DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual( + input, add_message_type())); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(34)) goto parse_message_type; + if (input->ExpectTag(42)) goto parse_enum_type; + break; + } + + // repeated .google.protobuf.EnumDescriptorProto enum_type = 5; + case 5: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_enum_type: + DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual( + input, add_enum_type())); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(42)) goto parse_enum_type; + if (input->ExpectTag(50)) goto parse_service; + break; + } + + // repeated .google.protobuf.ServiceDescriptorProto service = 6; + case 6: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_service: + DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual( + input, add_service())); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(50)) goto parse_service; + if (input->ExpectTag(58)) goto parse_extension; + break; + } + + // repeated .google.protobuf.FieldDescriptorProto extension = 7; + case 7: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_extension: + DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual( + input, add_extension())); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(58)) goto parse_extension; + if (input->ExpectTag(66)) goto parse_options; + break; + } + + // optional .google.protobuf.FileOptions options = 8; + case 8: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_options: + DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual( + input, mutable_options())); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(74)) goto parse_source_code_info; + break; + } + + // optional .google.protobuf.SourceCodeInfo source_code_info = 9; + case 9: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_source_code_info: + DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual( + input, mutable_source_code_info())); + } else { + goto handle_uninterpreted; + } + if (input->ExpectAtEnd()) return true; + break; + } + + default: { + handle_uninterpreted: + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) { + return true; + } + DO_(::google::protobuf::internal::WireFormat::SkipField( + input, tag, mutable_unknown_fields())); + break; + } + } + } + return true; +#undef DO_ +} + +void FileDescriptorProto::SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const { + // optional string name = 1; + if (has_name()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->name().data(), this->name().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + ::google::protobuf::internal::WireFormatLite::WriteString( + 1, this->name(), output); + } + + // optional string package = 2; + if (has_package()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->package().data(), this->package().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + ::google::protobuf::internal::WireFormatLite::WriteString( + 2, this->package(), output); + } + + // repeated string dependency = 3; + for (int i = 0; i < this->dependency_size(); i++) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->dependency(i).data(), this->dependency(i).length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + ::google::protobuf::internal::WireFormatLite::WriteString( + 3, this->dependency(i), output); + } + + // repeated .google.protobuf.DescriptorProto message_type = 4; + for (int i = 0; i < this->message_type_size(); i++) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 4, this->message_type(i), output); + } + + // repeated .google.protobuf.EnumDescriptorProto enum_type = 5; + for (int i = 0; i < this->enum_type_size(); i++) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 5, this->enum_type(i), output); + } + + // repeated .google.protobuf.ServiceDescriptorProto service = 6; + for (int i = 0; i < this->service_size(); i++) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 6, this->service(i), output); + } + + // repeated .google.protobuf.FieldDescriptorProto extension = 7; + for (int i = 0; i < this->extension_size(); i++) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 7, this->extension(i), output); + } + + // optional .google.protobuf.FileOptions options = 8; + if (has_options()) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 8, this->options(), output); + } + + // optional .google.protobuf.SourceCodeInfo source_code_info = 9; + if (has_source_code_info()) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 9, this->source_code_info(), output); + } + + if (!unknown_fields().empty()) { + ::google::protobuf::internal::WireFormat::SerializeUnknownFields( + unknown_fields(), output); + } +} + +::google::protobuf::uint8* FileDescriptorProto::SerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const { + // optional string name = 1; + if (has_name()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->name().data(), this->name().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 1, this->name(), target); + } + + // optional string package = 2; + if (has_package()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->package().data(), this->package().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 2, this->package(), target); + } + + // repeated string dependency = 3; + for (int i = 0; i < this->dependency_size(); i++) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->dependency(i).data(), this->dependency(i).length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + target = ::google::protobuf::internal::WireFormatLite:: + WriteStringToArray(3, this->dependency(i), target); + } + + // repeated .google.protobuf.DescriptorProto message_type = 4; + for (int i = 0; i < this->message_type_size(); i++) { + target = ::google::protobuf::internal::WireFormatLite:: + WriteMessageNoVirtualToArray( + 4, this->message_type(i), target); + } + + // repeated .google.protobuf.EnumDescriptorProto enum_type = 5; + for (int i = 0; i < this->enum_type_size(); i++) { + target = ::google::protobuf::internal::WireFormatLite:: + WriteMessageNoVirtualToArray( + 5, this->enum_type(i), target); + } + + // repeated .google.protobuf.ServiceDescriptorProto service = 6; + for (int i = 0; i < this->service_size(); i++) { + target = ::google::protobuf::internal::WireFormatLite:: + WriteMessageNoVirtualToArray( + 6, this->service(i), target); + } + + // repeated .google.protobuf.FieldDescriptorProto extension = 7; + for (int i = 0; i < this->extension_size(); i++) { + target = ::google::protobuf::internal::WireFormatLite:: + WriteMessageNoVirtualToArray( + 7, this->extension(i), target); + } + + // optional .google.protobuf.FileOptions options = 8; + if (has_options()) { + target = ::google::protobuf::internal::WireFormatLite:: + WriteMessageNoVirtualToArray( + 8, this->options(), target); + } + + // optional .google.protobuf.SourceCodeInfo source_code_info = 9; + if (has_source_code_info()) { + target = ::google::protobuf::internal::WireFormatLite:: + WriteMessageNoVirtualToArray( + 9, this->source_code_info(), target); + } + + if (!unknown_fields().empty()) { + target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( + unknown_fields(), target); + } + return target; +} + +int FileDescriptorProto::ByteSize() const { + int total_size = 0; + + if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) { + // optional string name = 1; + if (has_name()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->name()); + } + + // optional string package = 2; + if (has_package()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->package()); + } + + // optional .google.protobuf.FileOptions options = 8; + if (has_options()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual( + this->options()); + } + + } + if (_has_bits_[8 / 32] & (0xffu << (8 % 32))) { + // optional .google.protobuf.SourceCodeInfo source_code_info = 9; + if (has_source_code_info()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual( + this->source_code_info()); + } + + } + // repeated string dependency = 3; + total_size += 1 * this->dependency_size(); + for (int i = 0; i < this->dependency_size(); i++) { + total_size += ::google::protobuf::internal::WireFormatLite::StringSize( + this->dependency(i)); + } + + // repeated .google.protobuf.DescriptorProto message_type = 4; + total_size += 1 * this->message_type_size(); + for (int i = 0; i < this->message_type_size(); i++) { + total_size += + ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual( + this->message_type(i)); + } + + // repeated .google.protobuf.EnumDescriptorProto enum_type = 5; + total_size += 1 * this->enum_type_size(); + for (int i = 0; i < this->enum_type_size(); i++) { + total_size += + ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual( + this->enum_type(i)); + } + + // repeated .google.protobuf.ServiceDescriptorProto service = 6; + total_size += 1 * this->service_size(); + for (int i = 0; i < this->service_size(); i++) { + total_size += + ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual( + this->service(i)); + } + + // repeated .google.protobuf.FieldDescriptorProto extension = 7; + total_size += 1 * this->extension_size(); + for (int i = 0; i < this->extension_size(); i++) { + total_size += + ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual( + this->extension(i)); + } + + if (!unknown_fields().empty()) { + total_size += + ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( + unknown_fields()); + } + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = total_size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); + return total_size; +} + +void FileDescriptorProto::MergeFrom(const ::google::protobuf::Message& from) { + GOOGLE_CHECK_NE(&from, this); + const FileDescriptorProto* source = + ::google::protobuf::internal::dynamic_cast_if_available( + &from); + if (source == NULL) { + ::google::protobuf::internal::ReflectionOps::Merge(from, this); + } else { + MergeFrom(*source); + } +} + +void FileDescriptorProto::MergeFrom(const FileDescriptorProto& from) { + GOOGLE_CHECK_NE(&from, this); + dependency_.MergeFrom(from.dependency_); + message_type_.MergeFrom(from.message_type_); + enum_type_.MergeFrom(from.enum_type_); + service_.MergeFrom(from.service_); + extension_.MergeFrom(from.extension_); + if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) { + if (from.has_name()) { + set_name(from.name()); + } + if (from.has_package()) { + set_package(from.package()); + } + if (from.has_options()) { + mutable_options()->::google::protobuf::FileOptions::MergeFrom(from.options()); + } + } + if (from._has_bits_[8 / 32] & (0xffu << (8 % 32))) { + if (from.has_source_code_info()) { + mutable_source_code_info()->::google::protobuf::SourceCodeInfo::MergeFrom(from.source_code_info()); + } + } + mutable_unknown_fields()->MergeFrom(from.unknown_fields()); +} + +void FileDescriptorProto::CopyFrom(const ::google::protobuf::Message& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void FileDescriptorProto::CopyFrom(const FileDescriptorProto& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool FileDescriptorProto::IsInitialized() const { + + for (int i = 0; i < message_type_size(); i++) { + if (!this->message_type(i).IsInitialized()) return false; + } + for (int i = 0; i < enum_type_size(); i++) { + if (!this->enum_type(i).IsInitialized()) return false; + } + for (int i = 0; i < service_size(); i++) { + if (!this->service(i).IsInitialized()) return false; + } + for (int i = 0; i < extension_size(); i++) { + if (!this->extension(i).IsInitialized()) return false; + } + if (has_options()) { + if (!this->options().IsInitialized()) return false; + } + return true; +} + +void FileDescriptorProto::Swap(FileDescriptorProto* other) { + if (other != this) { + std::swap(name_, other->name_); + std::swap(package_, other->package_); + dependency_.Swap(&other->dependency_); + message_type_.Swap(&other->message_type_); + enum_type_.Swap(&other->enum_type_); + service_.Swap(&other->service_); + extension_.Swap(&other->extension_); + std::swap(options_, other->options_); + std::swap(source_code_info_, other->source_code_info_); + std::swap(_has_bits_[0], other->_has_bits_[0]); + _unknown_fields_.Swap(&other->_unknown_fields_); + std::swap(_cached_size_, other->_cached_size_); + } +} + +::google::protobuf::Metadata FileDescriptorProto::GetMetadata() const { + protobuf_AssignDescriptorsOnce(); + ::google::protobuf::Metadata metadata; + metadata.descriptor = FileDescriptorProto_descriptor_; + metadata.reflection = FileDescriptorProto_reflection_; + return metadata; +} + + +// =================================================================== + +#ifndef _MSC_VER +const int DescriptorProto_ExtensionRange::kStartFieldNumber; +const int DescriptorProto_ExtensionRange::kEndFieldNumber; +#endif // !_MSC_VER + +DescriptorProto_ExtensionRange::DescriptorProto_ExtensionRange() + : ::google::protobuf::Message() { + SharedCtor(); +} + +void DescriptorProto_ExtensionRange::InitAsDefaultInstance() { +} + +DescriptorProto_ExtensionRange::DescriptorProto_ExtensionRange(const DescriptorProto_ExtensionRange& from) + : ::google::protobuf::Message() { + SharedCtor(); + MergeFrom(from); +} + +void DescriptorProto_ExtensionRange::SharedCtor() { + _cached_size_ = 0; + start_ = 0; + end_ = 0; + ::memset(_has_bits_, 0, sizeof(_has_bits_)); +} + +DescriptorProto_ExtensionRange::~DescriptorProto_ExtensionRange() { + SharedDtor(); +} + +void DescriptorProto_ExtensionRange::SharedDtor() { + if (this != default_instance_) { + } +} + +void DescriptorProto_ExtensionRange::SetCachedSize(int size) const { + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); +} +const ::google::protobuf::Descriptor* DescriptorProto_ExtensionRange::descriptor() { + protobuf_AssignDescriptorsOnce(); + return DescriptorProto_ExtensionRange_descriptor_; +} + +const DescriptorProto_ExtensionRange& DescriptorProto_ExtensionRange::default_instance() { + if (default_instance_ == NULL) protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); return *default_instance_; +} + +DescriptorProto_ExtensionRange* DescriptorProto_ExtensionRange::default_instance_ = NULL; + +DescriptorProto_ExtensionRange* DescriptorProto_ExtensionRange::New() const { + return new DescriptorProto_ExtensionRange; +} + +void DescriptorProto_ExtensionRange::Clear() { + if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) { + start_ = 0; + end_ = 0; + } + ::memset(_has_bits_, 0, sizeof(_has_bits_)); + mutable_unknown_fields()->Clear(); +} + +bool DescriptorProto_ExtensionRange::MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!(EXPRESSION)) return false + ::google::protobuf::uint32 tag; + while ((tag = input->ReadTag()) != 0) { + switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // optional int32 start = 1; + case 1: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_VARINT) { + DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< + ::google::protobuf::int32, ::google::protobuf::internal::WireFormatLite::TYPE_INT32>( + input, &start_))); + set_has_start(); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(16)) goto parse_end; + break; + } + + // optional int32 end = 2; + case 2: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_VARINT) { + parse_end: + DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< + ::google::protobuf::int32, ::google::protobuf::internal::WireFormatLite::TYPE_INT32>( + input, &end_))); + set_has_end(); + } else { + goto handle_uninterpreted; + } + if (input->ExpectAtEnd()) return true; + break; + } + + default: { + handle_uninterpreted: + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) { + return true; + } + DO_(::google::protobuf::internal::WireFormat::SkipField( + input, tag, mutable_unknown_fields())); + break; + } + } + } + return true; +#undef DO_ +} + +void DescriptorProto_ExtensionRange::SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const { + // optional int32 start = 1; + if (has_start()) { + ::google::protobuf::internal::WireFormatLite::WriteInt32(1, this->start(), output); + } + + // optional int32 end = 2; + if (has_end()) { + ::google::protobuf::internal::WireFormatLite::WriteInt32(2, this->end(), output); + } + + if (!unknown_fields().empty()) { + ::google::protobuf::internal::WireFormat::SerializeUnknownFields( + unknown_fields(), output); + } +} + +::google::protobuf::uint8* DescriptorProto_ExtensionRange::SerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const { + // optional int32 start = 1; + if (has_start()) { + target = ::google::protobuf::internal::WireFormatLite::WriteInt32ToArray(1, this->start(), target); + } + + // optional int32 end = 2; + if (has_end()) { + target = ::google::protobuf::internal::WireFormatLite::WriteInt32ToArray(2, this->end(), target); + } + + if (!unknown_fields().empty()) { + target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( + unknown_fields(), target); + } + return target; +} + +int DescriptorProto_ExtensionRange::ByteSize() const { + int total_size = 0; + + if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) { + // optional int32 start = 1; + if (has_start()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::Int32Size( + this->start()); + } + + // optional int32 end = 2; + if (has_end()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::Int32Size( + this->end()); + } + + } + if (!unknown_fields().empty()) { + total_size += + ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( + unknown_fields()); + } + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = total_size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); + return total_size; +} + +void DescriptorProto_ExtensionRange::MergeFrom(const ::google::protobuf::Message& from) { + GOOGLE_CHECK_NE(&from, this); + const DescriptorProto_ExtensionRange* source = + ::google::protobuf::internal::dynamic_cast_if_available( + &from); + if (source == NULL) { + ::google::protobuf::internal::ReflectionOps::Merge(from, this); + } else { + MergeFrom(*source); + } +} + +void DescriptorProto_ExtensionRange::MergeFrom(const DescriptorProto_ExtensionRange& from) { + GOOGLE_CHECK_NE(&from, this); + if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) { + if (from.has_start()) { + set_start(from.start()); + } + if (from.has_end()) { + set_end(from.end()); + } + } + mutable_unknown_fields()->MergeFrom(from.unknown_fields()); +} + +void DescriptorProto_ExtensionRange::CopyFrom(const ::google::protobuf::Message& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void DescriptorProto_ExtensionRange::CopyFrom(const DescriptorProto_ExtensionRange& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool DescriptorProto_ExtensionRange::IsInitialized() const { + + return true; +} + +void DescriptorProto_ExtensionRange::Swap(DescriptorProto_ExtensionRange* other) { + if (other != this) { + std::swap(start_, other->start_); + std::swap(end_, other->end_); + std::swap(_has_bits_[0], other->_has_bits_[0]); + _unknown_fields_.Swap(&other->_unknown_fields_); + std::swap(_cached_size_, other->_cached_size_); + } +} + +::google::protobuf::Metadata DescriptorProto_ExtensionRange::GetMetadata() const { + protobuf_AssignDescriptorsOnce(); + ::google::protobuf::Metadata metadata; + metadata.descriptor = DescriptorProto_ExtensionRange_descriptor_; + metadata.reflection = DescriptorProto_ExtensionRange_reflection_; + return metadata; +} + + +// ------------------------------------------------------------------- + +#ifndef _MSC_VER +const int DescriptorProto::kNameFieldNumber; +const int DescriptorProto::kFieldFieldNumber; +const int DescriptorProto::kExtensionFieldNumber; +const int DescriptorProto::kNestedTypeFieldNumber; +const int DescriptorProto::kEnumTypeFieldNumber; +const int DescriptorProto::kExtensionRangeFieldNumber; +const int DescriptorProto::kOptionsFieldNumber; +#endif // !_MSC_VER + +DescriptorProto::DescriptorProto() + : ::google::protobuf::Message() { + SharedCtor(); +} + +void DescriptorProto::InitAsDefaultInstance() { + options_ = const_cast< ::google::protobuf::MessageOptions*>(&::google::protobuf::MessageOptions::default_instance()); +} + +DescriptorProto::DescriptorProto(const DescriptorProto& from) + : ::google::protobuf::Message() { + SharedCtor(); + MergeFrom(from); +} + +void DescriptorProto::SharedCtor() { + _cached_size_ = 0; + name_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + options_ = NULL; + ::memset(_has_bits_, 0, sizeof(_has_bits_)); +} + +DescriptorProto::~DescriptorProto() { + SharedDtor(); +} + +void DescriptorProto::SharedDtor() { + if (name_ != &::google::protobuf::internal::kEmptyString) { + delete name_; + } + if (this != default_instance_) { + delete options_; + } +} + +void DescriptorProto::SetCachedSize(int size) const { + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); +} +const ::google::protobuf::Descriptor* DescriptorProto::descriptor() { + protobuf_AssignDescriptorsOnce(); + return DescriptorProto_descriptor_; +} + +const DescriptorProto& DescriptorProto::default_instance() { + if (default_instance_ == NULL) protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); return *default_instance_; +} + +DescriptorProto* DescriptorProto::default_instance_ = NULL; + +DescriptorProto* DescriptorProto::New() const { + return new DescriptorProto; +} + +void DescriptorProto::Clear() { + if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) { + if (has_name()) { + if (name_ != &::google::protobuf::internal::kEmptyString) { + name_->clear(); + } + } + if (has_options()) { + if (options_ != NULL) options_->::google::protobuf::MessageOptions::Clear(); + } + } + field_.Clear(); + extension_.Clear(); + nested_type_.Clear(); + enum_type_.Clear(); + extension_range_.Clear(); + ::memset(_has_bits_, 0, sizeof(_has_bits_)); + mutable_unknown_fields()->Clear(); +} + +bool DescriptorProto::MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!(EXPRESSION)) return false + ::google::protobuf::uint32 tag; + while ((tag = input->ReadTag()) != 0) { + switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // optional string name = 1; + case 1: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_name())); + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->name().data(), this->name().length(), + ::google::protobuf::internal::WireFormat::PARSE); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(18)) goto parse_field; + break; + } + + // repeated .google.protobuf.FieldDescriptorProto field = 2; + case 2: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_field: + DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual( + input, add_field())); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(18)) goto parse_field; + if (input->ExpectTag(26)) goto parse_nested_type; + break; + } + + // repeated .google.protobuf.DescriptorProto nested_type = 3; + case 3: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_nested_type: + DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual( + input, add_nested_type())); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(26)) goto parse_nested_type; + if (input->ExpectTag(34)) goto parse_enum_type; + break; + } + + // repeated .google.protobuf.EnumDescriptorProto enum_type = 4; + case 4: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_enum_type: + DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual( + input, add_enum_type())); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(34)) goto parse_enum_type; + if (input->ExpectTag(42)) goto parse_extension_range; + break; + } + + // repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5; + case 5: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_extension_range: + DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual( + input, add_extension_range())); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(42)) goto parse_extension_range; + if (input->ExpectTag(50)) goto parse_extension; + break; + } + + // repeated .google.protobuf.FieldDescriptorProto extension = 6; + case 6: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_extension: + DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual( + input, add_extension())); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(50)) goto parse_extension; + if (input->ExpectTag(58)) goto parse_options; + break; + } + + // optional .google.protobuf.MessageOptions options = 7; + case 7: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_options: + DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual( + input, mutable_options())); + } else { + goto handle_uninterpreted; + } + if (input->ExpectAtEnd()) return true; + break; + } + + default: { + handle_uninterpreted: + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) { + return true; + } + DO_(::google::protobuf::internal::WireFormat::SkipField( + input, tag, mutable_unknown_fields())); + break; + } + } + } + return true; +#undef DO_ +} + +void DescriptorProto::SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const { + // optional string name = 1; + if (has_name()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->name().data(), this->name().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + ::google::protobuf::internal::WireFormatLite::WriteString( + 1, this->name(), output); + } + + // repeated .google.protobuf.FieldDescriptorProto field = 2; + for (int i = 0; i < this->field_size(); i++) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 2, this->field(i), output); + } + + // repeated .google.protobuf.DescriptorProto nested_type = 3; + for (int i = 0; i < this->nested_type_size(); i++) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 3, this->nested_type(i), output); + } + + // repeated .google.protobuf.EnumDescriptorProto enum_type = 4; + for (int i = 0; i < this->enum_type_size(); i++) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 4, this->enum_type(i), output); + } + + // repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5; + for (int i = 0; i < this->extension_range_size(); i++) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 5, this->extension_range(i), output); + } + + // repeated .google.protobuf.FieldDescriptorProto extension = 6; + for (int i = 0; i < this->extension_size(); i++) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 6, this->extension(i), output); + } + + // optional .google.protobuf.MessageOptions options = 7; + if (has_options()) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 7, this->options(), output); + } + + if (!unknown_fields().empty()) { + ::google::protobuf::internal::WireFormat::SerializeUnknownFields( + unknown_fields(), output); + } +} + +::google::protobuf::uint8* DescriptorProto::SerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const { + // optional string name = 1; + if (has_name()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->name().data(), this->name().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 1, this->name(), target); + } + + // repeated .google.protobuf.FieldDescriptorProto field = 2; + for (int i = 0; i < this->field_size(); i++) { + target = ::google::protobuf::internal::WireFormatLite:: + WriteMessageNoVirtualToArray( + 2, this->field(i), target); + } + + // repeated .google.protobuf.DescriptorProto nested_type = 3; + for (int i = 0; i < this->nested_type_size(); i++) { + target = ::google::protobuf::internal::WireFormatLite:: + WriteMessageNoVirtualToArray( + 3, this->nested_type(i), target); + } + + // repeated .google.protobuf.EnumDescriptorProto enum_type = 4; + for (int i = 0; i < this->enum_type_size(); i++) { + target = ::google::protobuf::internal::WireFormatLite:: + WriteMessageNoVirtualToArray( + 4, this->enum_type(i), target); + } + + // repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5; + for (int i = 0; i < this->extension_range_size(); i++) { + target = ::google::protobuf::internal::WireFormatLite:: + WriteMessageNoVirtualToArray( + 5, this->extension_range(i), target); + } + + // repeated .google.protobuf.FieldDescriptorProto extension = 6; + for (int i = 0; i < this->extension_size(); i++) { + target = ::google::protobuf::internal::WireFormatLite:: + WriteMessageNoVirtualToArray( + 6, this->extension(i), target); + } + + // optional .google.protobuf.MessageOptions options = 7; + if (has_options()) { + target = ::google::protobuf::internal::WireFormatLite:: + WriteMessageNoVirtualToArray( + 7, this->options(), target); + } + + if (!unknown_fields().empty()) { + target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( + unknown_fields(), target); + } + return target; +} + +int DescriptorProto::ByteSize() const { + int total_size = 0; + + if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) { + // optional string name = 1; + if (has_name()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->name()); + } + + // optional .google.protobuf.MessageOptions options = 7; + if (has_options()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual( + this->options()); + } + + } + // repeated .google.protobuf.FieldDescriptorProto field = 2; + total_size += 1 * this->field_size(); + for (int i = 0; i < this->field_size(); i++) { + total_size += + ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual( + this->field(i)); + } + + // repeated .google.protobuf.FieldDescriptorProto extension = 6; + total_size += 1 * this->extension_size(); + for (int i = 0; i < this->extension_size(); i++) { + total_size += + ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual( + this->extension(i)); + } + + // repeated .google.protobuf.DescriptorProto nested_type = 3; + total_size += 1 * this->nested_type_size(); + for (int i = 0; i < this->nested_type_size(); i++) { + total_size += + ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual( + this->nested_type(i)); + } + + // repeated .google.protobuf.EnumDescriptorProto enum_type = 4; + total_size += 1 * this->enum_type_size(); + for (int i = 0; i < this->enum_type_size(); i++) { + total_size += + ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual( + this->enum_type(i)); + } + + // repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5; + total_size += 1 * this->extension_range_size(); + for (int i = 0; i < this->extension_range_size(); i++) { + total_size += + ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual( + this->extension_range(i)); + } + + if (!unknown_fields().empty()) { + total_size += + ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( + unknown_fields()); + } + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = total_size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); + return total_size; +} + +void DescriptorProto::MergeFrom(const ::google::protobuf::Message& from) { + GOOGLE_CHECK_NE(&from, this); + const DescriptorProto* source = + ::google::protobuf::internal::dynamic_cast_if_available( + &from); + if (source == NULL) { + ::google::protobuf::internal::ReflectionOps::Merge(from, this); + } else { + MergeFrom(*source); + } +} + +void DescriptorProto::MergeFrom(const DescriptorProto& from) { + GOOGLE_CHECK_NE(&from, this); + field_.MergeFrom(from.field_); + extension_.MergeFrom(from.extension_); + nested_type_.MergeFrom(from.nested_type_); + enum_type_.MergeFrom(from.enum_type_); + extension_range_.MergeFrom(from.extension_range_); + if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) { + if (from.has_name()) { + set_name(from.name()); + } + if (from.has_options()) { + mutable_options()->::google::protobuf::MessageOptions::MergeFrom(from.options()); + } + } + mutable_unknown_fields()->MergeFrom(from.unknown_fields()); +} + +void DescriptorProto::CopyFrom(const ::google::protobuf::Message& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void DescriptorProto::CopyFrom(const DescriptorProto& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool DescriptorProto::IsInitialized() const { + + for (int i = 0; i < field_size(); i++) { + if (!this->field(i).IsInitialized()) return false; + } + for (int i = 0; i < extension_size(); i++) { + if (!this->extension(i).IsInitialized()) return false; + } + for (int i = 0; i < nested_type_size(); i++) { + if (!this->nested_type(i).IsInitialized()) return false; + } + for (int i = 0; i < enum_type_size(); i++) { + if (!this->enum_type(i).IsInitialized()) return false; + } + if (has_options()) { + if (!this->options().IsInitialized()) return false; + } + return true; +} + +void DescriptorProto::Swap(DescriptorProto* other) { + if (other != this) { + std::swap(name_, other->name_); + field_.Swap(&other->field_); + extension_.Swap(&other->extension_); + nested_type_.Swap(&other->nested_type_); + enum_type_.Swap(&other->enum_type_); + extension_range_.Swap(&other->extension_range_); + std::swap(options_, other->options_); + std::swap(_has_bits_[0], other->_has_bits_[0]); + _unknown_fields_.Swap(&other->_unknown_fields_); + std::swap(_cached_size_, other->_cached_size_); + } +} + +::google::protobuf::Metadata DescriptorProto::GetMetadata() const { + protobuf_AssignDescriptorsOnce(); + ::google::protobuf::Metadata metadata; + metadata.descriptor = DescriptorProto_descriptor_; + metadata.reflection = DescriptorProto_reflection_; + return metadata; +} + + +// =================================================================== + +const ::google::protobuf::EnumDescriptor* FieldDescriptorProto_Type_descriptor() { + protobuf_AssignDescriptorsOnce(); + return FieldDescriptorProto_Type_descriptor_; +} +bool FieldDescriptorProto_Type_IsValid(int value) { + switch(value) { + case 1: + case 2: + case 3: + case 4: + case 5: + case 6: + case 7: + case 8: + case 9: + case 10: + case 11: + case 12: + case 13: + case 14: + case 15: + case 16: + case 17: + case 18: + return true; + default: + return false; + } +} + +#ifndef _MSC_VER +const FieldDescriptorProto_Type FieldDescriptorProto::TYPE_DOUBLE; +const FieldDescriptorProto_Type FieldDescriptorProto::TYPE_FLOAT; +const FieldDescriptorProto_Type FieldDescriptorProto::TYPE_INT64; +const FieldDescriptorProto_Type FieldDescriptorProto::TYPE_UINT64; +const FieldDescriptorProto_Type FieldDescriptorProto::TYPE_INT32; +const FieldDescriptorProto_Type FieldDescriptorProto::TYPE_FIXED64; +const FieldDescriptorProto_Type FieldDescriptorProto::TYPE_FIXED32; +const FieldDescriptorProto_Type FieldDescriptorProto::TYPE_BOOL; +const FieldDescriptorProto_Type FieldDescriptorProto::TYPE_STRING; +const FieldDescriptorProto_Type FieldDescriptorProto::TYPE_GROUP; +const FieldDescriptorProto_Type FieldDescriptorProto::TYPE_MESSAGE; +const FieldDescriptorProto_Type FieldDescriptorProto::TYPE_BYTES; +const FieldDescriptorProto_Type FieldDescriptorProto::TYPE_UINT32; +const FieldDescriptorProto_Type FieldDescriptorProto::TYPE_ENUM; +const FieldDescriptorProto_Type FieldDescriptorProto::TYPE_SFIXED32; +const FieldDescriptorProto_Type FieldDescriptorProto::TYPE_SFIXED64; +const FieldDescriptorProto_Type FieldDescriptorProto::TYPE_SINT32; +const FieldDescriptorProto_Type FieldDescriptorProto::TYPE_SINT64; +const FieldDescriptorProto_Type FieldDescriptorProto::Type_MIN; +const FieldDescriptorProto_Type FieldDescriptorProto::Type_MAX; +const int FieldDescriptorProto::Type_ARRAYSIZE; +#endif // _MSC_VER +const ::google::protobuf::EnumDescriptor* FieldDescriptorProto_Label_descriptor() { + protobuf_AssignDescriptorsOnce(); + return FieldDescriptorProto_Label_descriptor_; +} +bool FieldDescriptorProto_Label_IsValid(int value) { + switch(value) { + case 1: + case 2: + case 3: + return true; + default: + return false; + } +} + +#ifndef _MSC_VER +const FieldDescriptorProto_Label FieldDescriptorProto::LABEL_OPTIONAL; +const FieldDescriptorProto_Label FieldDescriptorProto::LABEL_REQUIRED; +const FieldDescriptorProto_Label FieldDescriptorProto::LABEL_REPEATED; +const FieldDescriptorProto_Label FieldDescriptorProto::Label_MIN; +const FieldDescriptorProto_Label FieldDescriptorProto::Label_MAX; +const int FieldDescriptorProto::Label_ARRAYSIZE; +#endif // _MSC_VER +#ifndef _MSC_VER +const int FieldDescriptorProto::kNameFieldNumber; +const int FieldDescriptorProto::kNumberFieldNumber; +const int FieldDescriptorProto::kLabelFieldNumber; +const int FieldDescriptorProto::kTypeFieldNumber; +const int FieldDescriptorProto::kTypeNameFieldNumber; +const int FieldDescriptorProto::kExtendeeFieldNumber; +const int FieldDescriptorProto::kDefaultValueFieldNumber; +const int FieldDescriptorProto::kOptionsFieldNumber; +#endif // !_MSC_VER + +FieldDescriptorProto::FieldDescriptorProto() + : ::google::protobuf::Message() { + SharedCtor(); +} + +void FieldDescriptorProto::InitAsDefaultInstance() { + options_ = const_cast< ::google::protobuf::FieldOptions*>(&::google::protobuf::FieldOptions::default_instance()); +} + +FieldDescriptorProto::FieldDescriptorProto(const FieldDescriptorProto& from) + : ::google::protobuf::Message() { + SharedCtor(); + MergeFrom(from); +} + +void FieldDescriptorProto::SharedCtor() { + _cached_size_ = 0; + name_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + number_ = 0; + label_ = 1; + type_ = 1; + type_name_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + extendee_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + default_value_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + options_ = NULL; + ::memset(_has_bits_, 0, sizeof(_has_bits_)); +} + +FieldDescriptorProto::~FieldDescriptorProto() { + SharedDtor(); +} + +void FieldDescriptorProto::SharedDtor() { + if (name_ != &::google::protobuf::internal::kEmptyString) { + delete name_; + } + if (type_name_ != &::google::protobuf::internal::kEmptyString) { + delete type_name_; + } + if (extendee_ != &::google::protobuf::internal::kEmptyString) { + delete extendee_; + } + if (default_value_ != &::google::protobuf::internal::kEmptyString) { + delete default_value_; + } + if (this != default_instance_) { + delete options_; + } +} + +void FieldDescriptorProto::SetCachedSize(int size) const { + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); +} +const ::google::protobuf::Descriptor* FieldDescriptorProto::descriptor() { + protobuf_AssignDescriptorsOnce(); + return FieldDescriptorProto_descriptor_; +} + +const FieldDescriptorProto& FieldDescriptorProto::default_instance() { + if (default_instance_ == NULL) protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); return *default_instance_; +} + +FieldDescriptorProto* FieldDescriptorProto::default_instance_ = NULL; + +FieldDescriptorProto* FieldDescriptorProto::New() const { + return new FieldDescriptorProto; +} + +void FieldDescriptorProto::Clear() { + if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) { + if (has_name()) { + if (name_ != &::google::protobuf::internal::kEmptyString) { + name_->clear(); + } + } + number_ = 0; + label_ = 1; + type_ = 1; + if (has_type_name()) { + if (type_name_ != &::google::protobuf::internal::kEmptyString) { + type_name_->clear(); + } + } + if (has_extendee()) { + if (extendee_ != &::google::protobuf::internal::kEmptyString) { + extendee_->clear(); + } + } + if (has_default_value()) { + if (default_value_ != &::google::protobuf::internal::kEmptyString) { + default_value_->clear(); + } + } + if (has_options()) { + if (options_ != NULL) options_->::google::protobuf::FieldOptions::Clear(); + } + } + ::memset(_has_bits_, 0, sizeof(_has_bits_)); + mutable_unknown_fields()->Clear(); +} + +bool FieldDescriptorProto::MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!(EXPRESSION)) return false + ::google::protobuf::uint32 tag; + while ((tag = input->ReadTag()) != 0) { + switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // optional string name = 1; + case 1: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_name())); + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->name().data(), this->name().length(), + ::google::protobuf::internal::WireFormat::PARSE); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(18)) goto parse_extendee; + break; + } + + // optional string extendee = 2; + case 2: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_extendee: + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_extendee())); + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->extendee().data(), this->extendee().length(), + ::google::protobuf::internal::WireFormat::PARSE); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(24)) goto parse_number; + break; + } + + // optional int32 number = 3; + case 3: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_VARINT) { + parse_number: + DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< + ::google::protobuf::int32, ::google::protobuf::internal::WireFormatLite::TYPE_INT32>( + input, &number_))); + set_has_number(); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(32)) goto parse_label; + break; + } + + // optional .google.protobuf.FieldDescriptorProto.Label label = 4; + case 4: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_VARINT) { + parse_label: + int value; + DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< + int, ::google::protobuf::internal::WireFormatLite::TYPE_ENUM>( + input, &value))); + if (::google::protobuf::FieldDescriptorProto_Label_IsValid(value)) { + set_label(static_cast< ::google::protobuf::FieldDescriptorProto_Label >(value)); + } else { + mutable_unknown_fields()->AddVarint(4, value); + } + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(40)) goto parse_type; + break; + } + + // optional .google.protobuf.FieldDescriptorProto.Type type = 5; + case 5: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_VARINT) { + parse_type: + int value; + DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< + int, ::google::protobuf::internal::WireFormatLite::TYPE_ENUM>( + input, &value))); + if (::google::protobuf::FieldDescriptorProto_Type_IsValid(value)) { + set_type(static_cast< ::google::protobuf::FieldDescriptorProto_Type >(value)); + } else { + mutable_unknown_fields()->AddVarint(5, value); + } + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(50)) goto parse_type_name; + break; + } + + // optional string type_name = 6; + case 6: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_type_name: + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_type_name())); + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->type_name().data(), this->type_name().length(), + ::google::protobuf::internal::WireFormat::PARSE); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(58)) goto parse_default_value; + break; + } + + // optional string default_value = 7; + case 7: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_default_value: + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_default_value())); + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->default_value().data(), this->default_value().length(), + ::google::protobuf::internal::WireFormat::PARSE); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(66)) goto parse_options; + break; + } + + // optional .google.protobuf.FieldOptions options = 8; + case 8: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_options: + DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual( + input, mutable_options())); + } else { + goto handle_uninterpreted; + } + if (input->ExpectAtEnd()) return true; + break; + } + + default: { + handle_uninterpreted: + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) { + return true; + } + DO_(::google::protobuf::internal::WireFormat::SkipField( + input, tag, mutable_unknown_fields())); + break; + } + } + } + return true; +#undef DO_ +} + +void FieldDescriptorProto::SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const { + // optional string name = 1; + if (has_name()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->name().data(), this->name().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + ::google::protobuf::internal::WireFormatLite::WriteString( + 1, this->name(), output); + } + + // optional string extendee = 2; + if (has_extendee()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->extendee().data(), this->extendee().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + ::google::protobuf::internal::WireFormatLite::WriteString( + 2, this->extendee(), output); + } + + // optional int32 number = 3; + if (has_number()) { + ::google::protobuf::internal::WireFormatLite::WriteInt32(3, this->number(), output); + } + + // optional .google.protobuf.FieldDescriptorProto.Label label = 4; + if (has_label()) { + ::google::protobuf::internal::WireFormatLite::WriteEnum( + 4, this->label(), output); + } + + // optional .google.protobuf.FieldDescriptorProto.Type type = 5; + if (has_type()) { + ::google::protobuf::internal::WireFormatLite::WriteEnum( + 5, this->type(), output); + } + + // optional string type_name = 6; + if (has_type_name()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->type_name().data(), this->type_name().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + ::google::protobuf::internal::WireFormatLite::WriteString( + 6, this->type_name(), output); + } + + // optional string default_value = 7; + if (has_default_value()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->default_value().data(), this->default_value().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + ::google::protobuf::internal::WireFormatLite::WriteString( + 7, this->default_value(), output); + } + + // optional .google.protobuf.FieldOptions options = 8; + if (has_options()) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 8, this->options(), output); + } + + if (!unknown_fields().empty()) { + ::google::protobuf::internal::WireFormat::SerializeUnknownFields( + unknown_fields(), output); + } +} + +::google::protobuf::uint8* FieldDescriptorProto::SerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const { + // optional string name = 1; + if (has_name()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->name().data(), this->name().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 1, this->name(), target); + } + + // optional string extendee = 2; + if (has_extendee()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->extendee().data(), this->extendee().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 2, this->extendee(), target); + } + + // optional int32 number = 3; + if (has_number()) { + target = ::google::protobuf::internal::WireFormatLite::WriteInt32ToArray(3, this->number(), target); + } + + // optional .google.protobuf.FieldDescriptorProto.Label label = 4; + if (has_label()) { + target = ::google::protobuf::internal::WireFormatLite::WriteEnumToArray( + 4, this->label(), target); + } + + // optional .google.protobuf.FieldDescriptorProto.Type type = 5; + if (has_type()) { + target = ::google::protobuf::internal::WireFormatLite::WriteEnumToArray( + 5, this->type(), target); + } + + // optional string type_name = 6; + if (has_type_name()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->type_name().data(), this->type_name().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 6, this->type_name(), target); + } + + // optional string default_value = 7; + if (has_default_value()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->default_value().data(), this->default_value().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 7, this->default_value(), target); + } + + // optional .google.protobuf.FieldOptions options = 8; + if (has_options()) { + target = ::google::protobuf::internal::WireFormatLite:: + WriteMessageNoVirtualToArray( + 8, this->options(), target); + } + + if (!unknown_fields().empty()) { + target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( + unknown_fields(), target); + } + return target; +} + +int FieldDescriptorProto::ByteSize() const { + int total_size = 0; + + if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) { + // optional string name = 1; + if (has_name()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->name()); + } + + // optional int32 number = 3; + if (has_number()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::Int32Size( + this->number()); + } + + // optional .google.protobuf.FieldDescriptorProto.Label label = 4; + if (has_label()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::EnumSize(this->label()); + } + + // optional .google.protobuf.FieldDescriptorProto.Type type = 5; + if (has_type()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::EnumSize(this->type()); + } + + // optional string type_name = 6; + if (has_type_name()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->type_name()); + } + + // optional string extendee = 2; + if (has_extendee()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->extendee()); + } + + // optional string default_value = 7; + if (has_default_value()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->default_value()); + } + + // optional .google.protobuf.FieldOptions options = 8; + if (has_options()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual( + this->options()); + } + + } + if (!unknown_fields().empty()) { + total_size += + ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( + unknown_fields()); + } + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = total_size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); + return total_size; +} + +void FieldDescriptorProto::MergeFrom(const ::google::protobuf::Message& from) { + GOOGLE_CHECK_NE(&from, this); + const FieldDescriptorProto* source = + ::google::protobuf::internal::dynamic_cast_if_available( + &from); + if (source == NULL) { + ::google::protobuf::internal::ReflectionOps::Merge(from, this); + } else { + MergeFrom(*source); + } +} + +void FieldDescriptorProto::MergeFrom(const FieldDescriptorProto& from) { + GOOGLE_CHECK_NE(&from, this); + if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) { + if (from.has_name()) { + set_name(from.name()); + } + if (from.has_number()) { + set_number(from.number()); + } + if (from.has_label()) { + set_label(from.label()); + } + if (from.has_type()) { + set_type(from.type()); + } + if (from.has_type_name()) { + set_type_name(from.type_name()); + } + if (from.has_extendee()) { + set_extendee(from.extendee()); + } + if (from.has_default_value()) { + set_default_value(from.default_value()); + } + if (from.has_options()) { + mutable_options()->::google::protobuf::FieldOptions::MergeFrom(from.options()); + } + } + mutable_unknown_fields()->MergeFrom(from.unknown_fields()); +} + +void FieldDescriptorProto::CopyFrom(const ::google::protobuf::Message& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void FieldDescriptorProto::CopyFrom(const FieldDescriptorProto& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool FieldDescriptorProto::IsInitialized() const { + + if (has_options()) { + if (!this->options().IsInitialized()) return false; + } + return true; +} + +void FieldDescriptorProto::Swap(FieldDescriptorProto* other) { + if (other != this) { + std::swap(name_, other->name_); + std::swap(number_, other->number_); + std::swap(label_, other->label_); + std::swap(type_, other->type_); + std::swap(type_name_, other->type_name_); + std::swap(extendee_, other->extendee_); + std::swap(default_value_, other->default_value_); + std::swap(options_, other->options_); + std::swap(_has_bits_[0], other->_has_bits_[0]); + _unknown_fields_.Swap(&other->_unknown_fields_); + std::swap(_cached_size_, other->_cached_size_); + } +} + +::google::protobuf::Metadata FieldDescriptorProto::GetMetadata() const { + protobuf_AssignDescriptorsOnce(); + ::google::protobuf::Metadata metadata; + metadata.descriptor = FieldDescriptorProto_descriptor_; + metadata.reflection = FieldDescriptorProto_reflection_; + return metadata; +} + + +// =================================================================== + +#ifndef _MSC_VER +const int EnumDescriptorProto::kNameFieldNumber; +const int EnumDescriptorProto::kValueFieldNumber; +const int EnumDescriptorProto::kOptionsFieldNumber; +#endif // !_MSC_VER + +EnumDescriptorProto::EnumDescriptorProto() + : ::google::protobuf::Message() { + SharedCtor(); +} + +void EnumDescriptorProto::InitAsDefaultInstance() { + options_ = const_cast< ::google::protobuf::EnumOptions*>(&::google::protobuf::EnumOptions::default_instance()); +} + +EnumDescriptorProto::EnumDescriptorProto(const EnumDescriptorProto& from) + : ::google::protobuf::Message() { + SharedCtor(); + MergeFrom(from); +} + +void EnumDescriptorProto::SharedCtor() { + _cached_size_ = 0; + name_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + options_ = NULL; + ::memset(_has_bits_, 0, sizeof(_has_bits_)); +} + +EnumDescriptorProto::~EnumDescriptorProto() { + SharedDtor(); +} + +void EnumDescriptorProto::SharedDtor() { + if (name_ != &::google::protobuf::internal::kEmptyString) { + delete name_; + } + if (this != default_instance_) { + delete options_; + } +} + +void EnumDescriptorProto::SetCachedSize(int size) const { + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); +} +const ::google::protobuf::Descriptor* EnumDescriptorProto::descriptor() { + protobuf_AssignDescriptorsOnce(); + return EnumDescriptorProto_descriptor_; +} + +const EnumDescriptorProto& EnumDescriptorProto::default_instance() { + if (default_instance_ == NULL) protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); return *default_instance_; +} + +EnumDescriptorProto* EnumDescriptorProto::default_instance_ = NULL; + +EnumDescriptorProto* EnumDescriptorProto::New() const { + return new EnumDescriptorProto; +} + +void EnumDescriptorProto::Clear() { + if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) { + if (has_name()) { + if (name_ != &::google::protobuf::internal::kEmptyString) { + name_->clear(); + } + } + if (has_options()) { + if (options_ != NULL) options_->::google::protobuf::EnumOptions::Clear(); + } + } + value_.Clear(); + ::memset(_has_bits_, 0, sizeof(_has_bits_)); + mutable_unknown_fields()->Clear(); +} + +bool EnumDescriptorProto::MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!(EXPRESSION)) return false + ::google::protobuf::uint32 tag; + while ((tag = input->ReadTag()) != 0) { + switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // optional string name = 1; + case 1: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_name())); + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->name().data(), this->name().length(), + ::google::protobuf::internal::WireFormat::PARSE); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(18)) goto parse_value; + break; + } + + // repeated .google.protobuf.EnumValueDescriptorProto value = 2; + case 2: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_value: + DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual( + input, add_value())); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(18)) goto parse_value; + if (input->ExpectTag(26)) goto parse_options; + break; + } + + // optional .google.protobuf.EnumOptions options = 3; + case 3: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_options: + DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual( + input, mutable_options())); + } else { + goto handle_uninterpreted; + } + if (input->ExpectAtEnd()) return true; + break; + } + + default: { + handle_uninterpreted: + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) { + return true; + } + DO_(::google::protobuf::internal::WireFormat::SkipField( + input, tag, mutable_unknown_fields())); + break; + } + } + } + return true; +#undef DO_ +} + +void EnumDescriptorProto::SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const { + // optional string name = 1; + if (has_name()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->name().data(), this->name().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + ::google::protobuf::internal::WireFormatLite::WriteString( + 1, this->name(), output); + } + + // repeated .google.protobuf.EnumValueDescriptorProto value = 2; + for (int i = 0; i < this->value_size(); i++) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 2, this->value(i), output); + } + + // optional .google.protobuf.EnumOptions options = 3; + if (has_options()) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 3, this->options(), output); + } + + if (!unknown_fields().empty()) { + ::google::protobuf::internal::WireFormat::SerializeUnknownFields( + unknown_fields(), output); + } +} + +::google::protobuf::uint8* EnumDescriptorProto::SerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const { + // optional string name = 1; + if (has_name()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->name().data(), this->name().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 1, this->name(), target); + } + + // repeated .google.protobuf.EnumValueDescriptorProto value = 2; + for (int i = 0; i < this->value_size(); i++) { + target = ::google::protobuf::internal::WireFormatLite:: + WriteMessageNoVirtualToArray( + 2, this->value(i), target); + } + + // optional .google.protobuf.EnumOptions options = 3; + if (has_options()) { + target = ::google::protobuf::internal::WireFormatLite:: + WriteMessageNoVirtualToArray( + 3, this->options(), target); + } + + if (!unknown_fields().empty()) { + target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( + unknown_fields(), target); + } + return target; +} + +int EnumDescriptorProto::ByteSize() const { + int total_size = 0; + + if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) { + // optional string name = 1; + if (has_name()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->name()); + } + + // optional .google.protobuf.EnumOptions options = 3; + if (has_options()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual( + this->options()); + } + + } + // repeated .google.protobuf.EnumValueDescriptorProto value = 2; + total_size += 1 * this->value_size(); + for (int i = 0; i < this->value_size(); i++) { + total_size += + ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual( + this->value(i)); + } + + if (!unknown_fields().empty()) { + total_size += + ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( + unknown_fields()); + } + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = total_size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); + return total_size; +} + +void EnumDescriptorProto::MergeFrom(const ::google::protobuf::Message& from) { + GOOGLE_CHECK_NE(&from, this); + const EnumDescriptorProto* source = + ::google::protobuf::internal::dynamic_cast_if_available( + &from); + if (source == NULL) { + ::google::protobuf::internal::ReflectionOps::Merge(from, this); + } else { + MergeFrom(*source); + } +} + +void EnumDescriptorProto::MergeFrom(const EnumDescriptorProto& from) { + GOOGLE_CHECK_NE(&from, this); + value_.MergeFrom(from.value_); + if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) { + if (from.has_name()) { + set_name(from.name()); + } + if (from.has_options()) { + mutable_options()->::google::protobuf::EnumOptions::MergeFrom(from.options()); + } + } + mutable_unknown_fields()->MergeFrom(from.unknown_fields()); +} + +void EnumDescriptorProto::CopyFrom(const ::google::protobuf::Message& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void EnumDescriptorProto::CopyFrom(const EnumDescriptorProto& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool EnumDescriptorProto::IsInitialized() const { + + for (int i = 0; i < value_size(); i++) { + if (!this->value(i).IsInitialized()) return false; + } + if (has_options()) { + if (!this->options().IsInitialized()) return false; + } + return true; +} + +void EnumDescriptorProto::Swap(EnumDescriptorProto* other) { + if (other != this) { + std::swap(name_, other->name_); + value_.Swap(&other->value_); + std::swap(options_, other->options_); + std::swap(_has_bits_[0], other->_has_bits_[0]); + _unknown_fields_.Swap(&other->_unknown_fields_); + std::swap(_cached_size_, other->_cached_size_); + } +} + +::google::protobuf::Metadata EnumDescriptorProto::GetMetadata() const { + protobuf_AssignDescriptorsOnce(); + ::google::protobuf::Metadata metadata; + metadata.descriptor = EnumDescriptorProto_descriptor_; + metadata.reflection = EnumDescriptorProto_reflection_; + return metadata; +} + + +// =================================================================== + +#ifndef _MSC_VER +const int EnumValueDescriptorProto::kNameFieldNumber; +const int EnumValueDescriptorProto::kNumberFieldNumber; +const int EnumValueDescriptorProto::kOptionsFieldNumber; +#endif // !_MSC_VER + +EnumValueDescriptorProto::EnumValueDescriptorProto() + : ::google::protobuf::Message() { + SharedCtor(); +} + +void EnumValueDescriptorProto::InitAsDefaultInstance() { + options_ = const_cast< ::google::protobuf::EnumValueOptions*>(&::google::protobuf::EnumValueOptions::default_instance()); +} + +EnumValueDescriptorProto::EnumValueDescriptorProto(const EnumValueDescriptorProto& from) + : ::google::protobuf::Message() { + SharedCtor(); + MergeFrom(from); +} + +void EnumValueDescriptorProto::SharedCtor() { + _cached_size_ = 0; + name_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + number_ = 0; + options_ = NULL; + ::memset(_has_bits_, 0, sizeof(_has_bits_)); +} + +EnumValueDescriptorProto::~EnumValueDescriptorProto() { + SharedDtor(); +} + +void EnumValueDescriptorProto::SharedDtor() { + if (name_ != &::google::protobuf::internal::kEmptyString) { + delete name_; + } + if (this != default_instance_) { + delete options_; + } +} + +void EnumValueDescriptorProto::SetCachedSize(int size) const { + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); +} +const ::google::protobuf::Descriptor* EnumValueDescriptorProto::descriptor() { + protobuf_AssignDescriptorsOnce(); + return EnumValueDescriptorProto_descriptor_; +} + +const EnumValueDescriptorProto& EnumValueDescriptorProto::default_instance() { + if (default_instance_ == NULL) protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); return *default_instance_; +} + +EnumValueDescriptorProto* EnumValueDescriptorProto::default_instance_ = NULL; + +EnumValueDescriptorProto* EnumValueDescriptorProto::New() const { + return new EnumValueDescriptorProto; +} + +void EnumValueDescriptorProto::Clear() { + if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) { + if (has_name()) { + if (name_ != &::google::protobuf::internal::kEmptyString) { + name_->clear(); + } + } + number_ = 0; + if (has_options()) { + if (options_ != NULL) options_->::google::protobuf::EnumValueOptions::Clear(); + } + } + ::memset(_has_bits_, 0, sizeof(_has_bits_)); + mutable_unknown_fields()->Clear(); +} + +bool EnumValueDescriptorProto::MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!(EXPRESSION)) return false + ::google::protobuf::uint32 tag; + while ((tag = input->ReadTag()) != 0) { + switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // optional string name = 1; + case 1: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_name())); + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->name().data(), this->name().length(), + ::google::protobuf::internal::WireFormat::PARSE); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(16)) goto parse_number; + break; + } + + // optional int32 number = 2; + case 2: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_VARINT) { + parse_number: + DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< + ::google::protobuf::int32, ::google::protobuf::internal::WireFormatLite::TYPE_INT32>( + input, &number_))); + set_has_number(); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(26)) goto parse_options; + break; + } + + // optional .google.protobuf.EnumValueOptions options = 3; + case 3: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_options: + DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual( + input, mutable_options())); + } else { + goto handle_uninterpreted; + } + if (input->ExpectAtEnd()) return true; + break; + } + + default: { + handle_uninterpreted: + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) { + return true; + } + DO_(::google::protobuf::internal::WireFormat::SkipField( + input, tag, mutable_unknown_fields())); + break; + } + } + } + return true; +#undef DO_ +} + +void EnumValueDescriptorProto::SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const { + // optional string name = 1; + if (has_name()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->name().data(), this->name().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + ::google::protobuf::internal::WireFormatLite::WriteString( + 1, this->name(), output); + } + + // optional int32 number = 2; + if (has_number()) { + ::google::protobuf::internal::WireFormatLite::WriteInt32(2, this->number(), output); + } + + // optional .google.protobuf.EnumValueOptions options = 3; + if (has_options()) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 3, this->options(), output); + } + + if (!unknown_fields().empty()) { + ::google::protobuf::internal::WireFormat::SerializeUnknownFields( + unknown_fields(), output); + } +} + +::google::protobuf::uint8* EnumValueDescriptorProto::SerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const { + // optional string name = 1; + if (has_name()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->name().data(), this->name().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 1, this->name(), target); + } + + // optional int32 number = 2; + if (has_number()) { + target = ::google::protobuf::internal::WireFormatLite::WriteInt32ToArray(2, this->number(), target); + } + + // optional .google.protobuf.EnumValueOptions options = 3; + if (has_options()) { + target = ::google::protobuf::internal::WireFormatLite:: + WriteMessageNoVirtualToArray( + 3, this->options(), target); + } + + if (!unknown_fields().empty()) { + target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( + unknown_fields(), target); + } + return target; +} + +int EnumValueDescriptorProto::ByteSize() const { + int total_size = 0; + + if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) { + // optional string name = 1; + if (has_name()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->name()); + } + + // optional int32 number = 2; + if (has_number()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::Int32Size( + this->number()); + } + + // optional .google.protobuf.EnumValueOptions options = 3; + if (has_options()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual( + this->options()); + } + + } + if (!unknown_fields().empty()) { + total_size += + ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( + unknown_fields()); + } + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = total_size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); + return total_size; +} + +void EnumValueDescriptorProto::MergeFrom(const ::google::protobuf::Message& from) { + GOOGLE_CHECK_NE(&from, this); + const EnumValueDescriptorProto* source = + ::google::protobuf::internal::dynamic_cast_if_available( + &from); + if (source == NULL) { + ::google::protobuf::internal::ReflectionOps::Merge(from, this); + } else { + MergeFrom(*source); + } +} + +void EnumValueDescriptorProto::MergeFrom(const EnumValueDescriptorProto& from) { + GOOGLE_CHECK_NE(&from, this); + if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) { + if (from.has_name()) { + set_name(from.name()); + } + if (from.has_number()) { + set_number(from.number()); + } + if (from.has_options()) { + mutable_options()->::google::protobuf::EnumValueOptions::MergeFrom(from.options()); + } + } + mutable_unknown_fields()->MergeFrom(from.unknown_fields()); +} + +void EnumValueDescriptorProto::CopyFrom(const ::google::protobuf::Message& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void EnumValueDescriptorProto::CopyFrom(const EnumValueDescriptorProto& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool EnumValueDescriptorProto::IsInitialized() const { + + if (has_options()) { + if (!this->options().IsInitialized()) return false; + } + return true; +} + +void EnumValueDescriptorProto::Swap(EnumValueDescriptorProto* other) { + if (other != this) { + std::swap(name_, other->name_); + std::swap(number_, other->number_); + std::swap(options_, other->options_); + std::swap(_has_bits_[0], other->_has_bits_[0]); + _unknown_fields_.Swap(&other->_unknown_fields_); + std::swap(_cached_size_, other->_cached_size_); + } +} + +::google::protobuf::Metadata EnumValueDescriptorProto::GetMetadata() const { + protobuf_AssignDescriptorsOnce(); + ::google::protobuf::Metadata metadata; + metadata.descriptor = EnumValueDescriptorProto_descriptor_; + metadata.reflection = EnumValueDescriptorProto_reflection_; + return metadata; +} + + +// =================================================================== + +#ifndef _MSC_VER +const int ServiceDescriptorProto::kNameFieldNumber; +const int ServiceDescriptorProto::kMethodFieldNumber; +const int ServiceDescriptorProto::kOptionsFieldNumber; +#endif // !_MSC_VER + +ServiceDescriptorProto::ServiceDescriptorProto() + : ::google::protobuf::Message() { + SharedCtor(); +} + +void ServiceDescriptorProto::InitAsDefaultInstance() { + options_ = const_cast< ::google::protobuf::ServiceOptions*>(&::google::protobuf::ServiceOptions::default_instance()); +} + +ServiceDescriptorProto::ServiceDescriptorProto(const ServiceDescriptorProto& from) + : ::google::protobuf::Message() { + SharedCtor(); + MergeFrom(from); +} + +void ServiceDescriptorProto::SharedCtor() { + _cached_size_ = 0; + name_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + options_ = NULL; + ::memset(_has_bits_, 0, sizeof(_has_bits_)); +} + +ServiceDescriptorProto::~ServiceDescriptorProto() { + SharedDtor(); +} + +void ServiceDescriptorProto::SharedDtor() { + if (name_ != &::google::protobuf::internal::kEmptyString) { + delete name_; + } + if (this != default_instance_) { + delete options_; + } +} + +void ServiceDescriptorProto::SetCachedSize(int size) const { + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); +} +const ::google::protobuf::Descriptor* ServiceDescriptorProto::descriptor() { + protobuf_AssignDescriptorsOnce(); + return ServiceDescriptorProto_descriptor_; +} + +const ServiceDescriptorProto& ServiceDescriptorProto::default_instance() { + if (default_instance_ == NULL) protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); return *default_instance_; +} + +ServiceDescriptorProto* ServiceDescriptorProto::default_instance_ = NULL; + +ServiceDescriptorProto* ServiceDescriptorProto::New() const { + return new ServiceDescriptorProto; +} + +void ServiceDescriptorProto::Clear() { + if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) { + if (has_name()) { + if (name_ != &::google::protobuf::internal::kEmptyString) { + name_->clear(); + } + } + if (has_options()) { + if (options_ != NULL) options_->::google::protobuf::ServiceOptions::Clear(); + } + } + method_.Clear(); + ::memset(_has_bits_, 0, sizeof(_has_bits_)); + mutable_unknown_fields()->Clear(); +} + +bool ServiceDescriptorProto::MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!(EXPRESSION)) return false + ::google::protobuf::uint32 tag; + while ((tag = input->ReadTag()) != 0) { + switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // optional string name = 1; + case 1: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_name())); + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->name().data(), this->name().length(), + ::google::protobuf::internal::WireFormat::PARSE); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(18)) goto parse_method; + break; + } + + // repeated .google.protobuf.MethodDescriptorProto method = 2; + case 2: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_method: + DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual( + input, add_method())); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(18)) goto parse_method; + if (input->ExpectTag(26)) goto parse_options; + break; + } + + // optional .google.protobuf.ServiceOptions options = 3; + case 3: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_options: + DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual( + input, mutable_options())); + } else { + goto handle_uninterpreted; + } + if (input->ExpectAtEnd()) return true; + break; + } + + default: { + handle_uninterpreted: + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) { + return true; + } + DO_(::google::protobuf::internal::WireFormat::SkipField( + input, tag, mutable_unknown_fields())); + break; + } + } + } + return true; +#undef DO_ +} + +void ServiceDescriptorProto::SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const { + // optional string name = 1; + if (has_name()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->name().data(), this->name().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + ::google::protobuf::internal::WireFormatLite::WriteString( + 1, this->name(), output); + } + + // repeated .google.protobuf.MethodDescriptorProto method = 2; + for (int i = 0; i < this->method_size(); i++) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 2, this->method(i), output); + } + + // optional .google.protobuf.ServiceOptions options = 3; + if (has_options()) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 3, this->options(), output); + } + + if (!unknown_fields().empty()) { + ::google::protobuf::internal::WireFormat::SerializeUnknownFields( + unknown_fields(), output); + } +} + +::google::protobuf::uint8* ServiceDescriptorProto::SerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const { + // optional string name = 1; + if (has_name()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->name().data(), this->name().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 1, this->name(), target); + } + + // repeated .google.protobuf.MethodDescriptorProto method = 2; + for (int i = 0; i < this->method_size(); i++) { + target = ::google::protobuf::internal::WireFormatLite:: + WriteMessageNoVirtualToArray( + 2, this->method(i), target); + } + + // optional .google.protobuf.ServiceOptions options = 3; + if (has_options()) { + target = ::google::protobuf::internal::WireFormatLite:: + WriteMessageNoVirtualToArray( + 3, this->options(), target); + } + + if (!unknown_fields().empty()) { + target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( + unknown_fields(), target); + } + return target; +} + +int ServiceDescriptorProto::ByteSize() const { + int total_size = 0; + + if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) { + // optional string name = 1; + if (has_name()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->name()); + } + + // optional .google.protobuf.ServiceOptions options = 3; + if (has_options()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual( + this->options()); + } + + } + // repeated .google.protobuf.MethodDescriptorProto method = 2; + total_size += 1 * this->method_size(); + for (int i = 0; i < this->method_size(); i++) { + total_size += + ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual( + this->method(i)); + } + + if (!unknown_fields().empty()) { + total_size += + ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( + unknown_fields()); + } + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = total_size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); + return total_size; +} + +void ServiceDescriptorProto::MergeFrom(const ::google::protobuf::Message& from) { + GOOGLE_CHECK_NE(&from, this); + const ServiceDescriptorProto* source = + ::google::protobuf::internal::dynamic_cast_if_available( + &from); + if (source == NULL) { + ::google::protobuf::internal::ReflectionOps::Merge(from, this); + } else { + MergeFrom(*source); + } +} + +void ServiceDescriptorProto::MergeFrom(const ServiceDescriptorProto& from) { + GOOGLE_CHECK_NE(&from, this); + method_.MergeFrom(from.method_); + if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) { + if (from.has_name()) { + set_name(from.name()); + } + if (from.has_options()) { + mutable_options()->::google::protobuf::ServiceOptions::MergeFrom(from.options()); + } + } + mutable_unknown_fields()->MergeFrom(from.unknown_fields()); +} + +void ServiceDescriptorProto::CopyFrom(const ::google::protobuf::Message& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void ServiceDescriptorProto::CopyFrom(const ServiceDescriptorProto& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool ServiceDescriptorProto::IsInitialized() const { + + for (int i = 0; i < method_size(); i++) { + if (!this->method(i).IsInitialized()) return false; + } + if (has_options()) { + if (!this->options().IsInitialized()) return false; + } + return true; +} + +void ServiceDescriptorProto::Swap(ServiceDescriptorProto* other) { + if (other != this) { + std::swap(name_, other->name_); + method_.Swap(&other->method_); + std::swap(options_, other->options_); + std::swap(_has_bits_[0], other->_has_bits_[0]); + _unknown_fields_.Swap(&other->_unknown_fields_); + std::swap(_cached_size_, other->_cached_size_); + } +} + +::google::protobuf::Metadata ServiceDescriptorProto::GetMetadata() const { + protobuf_AssignDescriptorsOnce(); + ::google::protobuf::Metadata metadata; + metadata.descriptor = ServiceDescriptorProto_descriptor_; + metadata.reflection = ServiceDescriptorProto_reflection_; + return metadata; +} + + +// =================================================================== + +#ifndef _MSC_VER +const int MethodDescriptorProto::kNameFieldNumber; +const int MethodDescriptorProto::kInputTypeFieldNumber; +const int MethodDescriptorProto::kOutputTypeFieldNumber; +const int MethodDescriptorProto::kOptionsFieldNumber; +#endif // !_MSC_VER + +MethodDescriptorProto::MethodDescriptorProto() + : ::google::protobuf::Message() { + SharedCtor(); +} + +void MethodDescriptorProto::InitAsDefaultInstance() { + options_ = const_cast< ::google::protobuf::MethodOptions*>(&::google::protobuf::MethodOptions::default_instance()); +} + +MethodDescriptorProto::MethodDescriptorProto(const MethodDescriptorProto& from) + : ::google::protobuf::Message() { + SharedCtor(); + MergeFrom(from); +} + +void MethodDescriptorProto::SharedCtor() { + _cached_size_ = 0; + name_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + input_type_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + output_type_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + options_ = NULL; + ::memset(_has_bits_, 0, sizeof(_has_bits_)); +} + +MethodDescriptorProto::~MethodDescriptorProto() { + SharedDtor(); +} + +void MethodDescriptorProto::SharedDtor() { + if (name_ != &::google::protobuf::internal::kEmptyString) { + delete name_; + } + if (input_type_ != &::google::protobuf::internal::kEmptyString) { + delete input_type_; + } + if (output_type_ != &::google::protobuf::internal::kEmptyString) { + delete output_type_; + } + if (this != default_instance_) { + delete options_; + } +} + +void MethodDescriptorProto::SetCachedSize(int size) const { + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); +} +const ::google::protobuf::Descriptor* MethodDescriptorProto::descriptor() { + protobuf_AssignDescriptorsOnce(); + return MethodDescriptorProto_descriptor_; +} + +const MethodDescriptorProto& MethodDescriptorProto::default_instance() { + if (default_instance_ == NULL) protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); return *default_instance_; +} + +MethodDescriptorProto* MethodDescriptorProto::default_instance_ = NULL; + +MethodDescriptorProto* MethodDescriptorProto::New() const { + return new MethodDescriptorProto; +} + +void MethodDescriptorProto::Clear() { + if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) { + if (has_name()) { + if (name_ != &::google::protobuf::internal::kEmptyString) { + name_->clear(); + } + } + if (has_input_type()) { + if (input_type_ != &::google::protobuf::internal::kEmptyString) { + input_type_->clear(); + } + } + if (has_output_type()) { + if (output_type_ != &::google::protobuf::internal::kEmptyString) { + output_type_->clear(); + } + } + if (has_options()) { + if (options_ != NULL) options_->::google::protobuf::MethodOptions::Clear(); + } + } + ::memset(_has_bits_, 0, sizeof(_has_bits_)); + mutable_unknown_fields()->Clear(); +} + +bool MethodDescriptorProto::MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!(EXPRESSION)) return false + ::google::protobuf::uint32 tag; + while ((tag = input->ReadTag()) != 0) { + switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // optional string name = 1; + case 1: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_name())); + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->name().data(), this->name().length(), + ::google::protobuf::internal::WireFormat::PARSE); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(18)) goto parse_input_type; + break; + } + + // optional string input_type = 2; + case 2: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_input_type: + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_input_type())); + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->input_type().data(), this->input_type().length(), + ::google::protobuf::internal::WireFormat::PARSE); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(26)) goto parse_output_type; + break; + } + + // optional string output_type = 3; + case 3: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_output_type: + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_output_type())); + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->output_type().data(), this->output_type().length(), + ::google::protobuf::internal::WireFormat::PARSE); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(34)) goto parse_options; + break; + } + + // optional .google.protobuf.MethodOptions options = 4; + case 4: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_options: + DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual( + input, mutable_options())); + } else { + goto handle_uninterpreted; + } + if (input->ExpectAtEnd()) return true; + break; + } + + default: { + handle_uninterpreted: + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) { + return true; + } + DO_(::google::protobuf::internal::WireFormat::SkipField( + input, tag, mutable_unknown_fields())); + break; + } + } + } + return true; +#undef DO_ +} + +void MethodDescriptorProto::SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const { + // optional string name = 1; + if (has_name()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->name().data(), this->name().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + ::google::protobuf::internal::WireFormatLite::WriteString( + 1, this->name(), output); + } + + // optional string input_type = 2; + if (has_input_type()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->input_type().data(), this->input_type().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + ::google::protobuf::internal::WireFormatLite::WriteString( + 2, this->input_type(), output); + } + + // optional string output_type = 3; + if (has_output_type()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->output_type().data(), this->output_type().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + ::google::protobuf::internal::WireFormatLite::WriteString( + 3, this->output_type(), output); + } + + // optional .google.protobuf.MethodOptions options = 4; + if (has_options()) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 4, this->options(), output); + } + + if (!unknown_fields().empty()) { + ::google::protobuf::internal::WireFormat::SerializeUnknownFields( + unknown_fields(), output); + } +} + +::google::protobuf::uint8* MethodDescriptorProto::SerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const { + // optional string name = 1; + if (has_name()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->name().data(), this->name().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 1, this->name(), target); + } + + // optional string input_type = 2; + if (has_input_type()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->input_type().data(), this->input_type().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 2, this->input_type(), target); + } + + // optional string output_type = 3; + if (has_output_type()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->output_type().data(), this->output_type().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 3, this->output_type(), target); + } + + // optional .google.protobuf.MethodOptions options = 4; + if (has_options()) { + target = ::google::protobuf::internal::WireFormatLite:: + WriteMessageNoVirtualToArray( + 4, this->options(), target); + } + + if (!unknown_fields().empty()) { + target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( + unknown_fields(), target); + } + return target; +} + +int MethodDescriptorProto::ByteSize() const { + int total_size = 0; + + if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) { + // optional string name = 1; + if (has_name()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->name()); + } + + // optional string input_type = 2; + if (has_input_type()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->input_type()); + } + + // optional string output_type = 3; + if (has_output_type()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->output_type()); + } + + // optional .google.protobuf.MethodOptions options = 4; + if (has_options()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual( + this->options()); + } + + } + if (!unknown_fields().empty()) { + total_size += + ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( + unknown_fields()); + } + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = total_size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); + return total_size; +} + +void MethodDescriptorProto::MergeFrom(const ::google::protobuf::Message& from) { + GOOGLE_CHECK_NE(&from, this); + const MethodDescriptorProto* source = + ::google::protobuf::internal::dynamic_cast_if_available( + &from); + if (source == NULL) { + ::google::protobuf::internal::ReflectionOps::Merge(from, this); + } else { + MergeFrom(*source); + } +} + +void MethodDescriptorProto::MergeFrom(const MethodDescriptorProto& from) { + GOOGLE_CHECK_NE(&from, this); + if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) { + if (from.has_name()) { + set_name(from.name()); + } + if (from.has_input_type()) { + set_input_type(from.input_type()); + } + if (from.has_output_type()) { + set_output_type(from.output_type()); + } + if (from.has_options()) { + mutable_options()->::google::protobuf::MethodOptions::MergeFrom(from.options()); + } + } + mutable_unknown_fields()->MergeFrom(from.unknown_fields()); +} + +void MethodDescriptorProto::CopyFrom(const ::google::protobuf::Message& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void MethodDescriptorProto::CopyFrom(const MethodDescriptorProto& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool MethodDescriptorProto::IsInitialized() const { + + if (has_options()) { + if (!this->options().IsInitialized()) return false; + } + return true; +} + +void MethodDescriptorProto::Swap(MethodDescriptorProto* other) { + if (other != this) { + std::swap(name_, other->name_); + std::swap(input_type_, other->input_type_); + std::swap(output_type_, other->output_type_); + std::swap(options_, other->options_); + std::swap(_has_bits_[0], other->_has_bits_[0]); + _unknown_fields_.Swap(&other->_unknown_fields_); + std::swap(_cached_size_, other->_cached_size_); + } +} + +::google::protobuf::Metadata MethodDescriptorProto::GetMetadata() const { + protobuf_AssignDescriptorsOnce(); + ::google::protobuf::Metadata metadata; + metadata.descriptor = MethodDescriptorProto_descriptor_; + metadata.reflection = MethodDescriptorProto_reflection_; + return metadata; +} + + +// =================================================================== + +const ::google::protobuf::EnumDescriptor* FileOptions_OptimizeMode_descriptor() { + protobuf_AssignDescriptorsOnce(); + return FileOptions_OptimizeMode_descriptor_; +} +bool FileOptions_OptimizeMode_IsValid(int value) { + switch(value) { + case 1: + case 2: + case 3: + return true; + default: + return false; + } +} + +#ifndef _MSC_VER +const FileOptions_OptimizeMode FileOptions::SPEED; +const FileOptions_OptimizeMode FileOptions::CODE_SIZE; +const FileOptions_OptimizeMode FileOptions::LITE_RUNTIME; +const FileOptions_OptimizeMode FileOptions::OptimizeMode_MIN; +const FileOptions_OptimizeMode FileOptions::OptimizeMode_MAX; +const int FileOptions::OptimizeMode_ARRAYSIZE; +#endif // _MSC_VER +#ifndef _MSC_VER +const int FileOptions::kJavaPackageFieldNumber; +const int FileOptions::kJavaOuterClassnameFieldNumber; +const int FileOptions::kJavaMultipleFilesFieldNumber; +const int FileOptions::kJavaGenerateEqualsAndHashFieldNumber; +const int FileOptions::kOptimizeForFieldNumber; +const int FileOptions::kCcGenericServicesFieldNumber; +const int FileOptions::kJavaGenericServicesFieldNumber; +const int FileOptions::kPyGenericServicesFieldNumber; +const int FileOptions::kUninterpretedOptionFieldNumber; +#endif // !_MSC_VER + +FileOptions::FileOptions() + : ::google::protobuf::Message() { + SharedCtor(); +} + +void FileOptions::InitAsDefaultInstance() { +} + +FileOptions::FileOptions(const FileOptions& from) + : ::google::protobuf::Message() { + SharedCtor(); + MergeFrom(from); +} + +void FileOptions::SharedCtor() { + _cached_size_ = 0; + java_package_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + java_outer_classname_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + java_multiple_files_ = false; + java_generate_equals_and_hash_ = false; + optimize_for_ = 1; + cc_generic_services_ = false; + java_generic_services_ = false; + py_generic_services_ = false; + ::memset(_has_bits_, 0, sizeof(_has_bits_)); +} + +FileOptions::~FileOptions() { + SharedDtor(); +} + +void FileOptions::SharedDtor() { + if (java_package_ != &::google::protobuf::internal::kEmptyString) { + delete java_package_; + } + if (java_outer_classname_ != &::google::protobuf::internal::kEmptyString) { + delete java_outer_classname_; + } + if (this != default_instance_) { + } +} + +void FileOptions::SetCachedSize(int size) const { + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); +} +const ::google::protobuf::Descriptor* FileOptions::descriptor() { + protobuf_AssignDescriptorsOnce(); + return FileOptions_descriptor_; +} + +const FileOptions& FileOptions::default_instance() { + if (default_instance_ == NULL) protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); return *default_instance_; +} + +FileOptions* FileOptions::default_instance_ = NULL; + +FileOptions* FileOptions::New() const { + return new FileOptions; +} + +void FileOptions::Clear() { + _extensions_.Clear(); + if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) { + if (has_java_package()) { + if (java_package_ != &::google::protobuf::internal::kEmptyString) { + java_package_->clear(); + } + } + if (has_java_outer_classname()) { + if (java_outer_classname_ != &::google::protobuf::internal::kEmptyString) { + java_outer_classname_->clear(); + } + } + java_multiple_files_ = false; + java_generate_equals_and_hash_ = false; + optimize_for_ = 1; + cc_generic_services_ = false; + java_generic_services_ = false; + py_generic_services_ = false; + } + uninterpreted_option_.Clear(); + ::memset(_has_bits_, 0, sizeof(_has_bits_)); + mutable_unknown_fields()->Clear(); +} + +bool FileOptions::MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!(EXPRESSION)) return false + ::google::protobuf::uint32 tag; + while ((tag = input->ReadTag()) != 0) { + switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // optional string java_package = 1; + case 1: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_java_package())); + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->java_package().data(), this->java_package().length(), + ::google::protobuf::internal::WireFormat::PARSE); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(66)) goto parse_java_outer_classname; + break; + } + + // optional string java_outer_classname = 8; + case 8: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_java_outer_classname: + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_java_outer_classname())); + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->java_outer_classname().data(), this->java_outer_classname().length(), + ::google::protobuf::internal::WireFormat::PARSE); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(72)) goto parse_optimize_for; + break; + } + + // optional .google.protobuf.FileOptions.OptimizeMode optimize_for = 9 [default = SPEED]; + case 9: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_VARINT) { + parse_optimize_for: + int value; + DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< + int, ::google::protobuf::internal::WireFormatLite::TYPE_ENUM>( + input, &value))); + if (::google::protobuf::FileOptions_OptimizeMode_IsValid(value)) { + set_optimize_for(static_cast< ::google::protobuf::FileOptions_OptimizeMode >(value)); + } else { + mutable_unknown_fields()->AddVarint(9, value); + } + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(80)) goto parse_java_multiple_files; + break; + } + + // optional bool java_multiple_files = 10 [default = false]; + case 10: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_VARINT) { + parse_java_multiple_files: + DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< + bool, ::google::protobuf::internal::WireFormatLite::TYPE_BOOL>( + input, &java_multiple_files_))); + set_has_java_multiple_files(); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(128)) goto parse_cc_generic_services; + break; + } + + // optional bool cc_generic_services = 16 [default = false]; + case 16: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_VARINT) { + parse_cc_generic_services: + DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< + bool, ::google::protobuf::internal::WireFormatLite::TYPE_BOOL>( + input, &cc_generic_services_))); + set_has_cc_generic_services(); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(136)) goto parse_java_generic_services; + break; + } + + // optional bool java_generic_services = 17 [default = false]; + case 17: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_VARINT) { + parse_java_generic_services: + DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< + bool, ::google::protobuf::internal::WireFormatLite::TYPE_BOOL>( + input, &java_generic_services_))); + set_has_java_generic_services(); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(144)) goto parse_py_generic_services; + break; + } + + // optional bool py_generic_services = 18 [default = false]; + case 18: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_VARINT) { + parse_py_generic_services: + DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< + bool, ::google::protobuf::internal::WireFormatLite::TYPE_BOOL>( + input, &py_generic_services_))); + set_has_py_generic_services(); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(160)) goto parse_java_generate_equals_and_hash; + break; + } + + // optional bool java_generate_equals_and_hash = 20 [default = false]; + case 20: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_VARINT) { + parse_java_generate_equals_and_hash: + DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< + bool, ::google::protobuf::internal::WireFormatLite::TYPE_BOOL>( + input, &java_generate_equals_and_hash_))); + set_has_java_generate_equals_and_hash(); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(7994)) goto parse_uninterpreted_option; + break; + } + + // repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; + case 999: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_uninterpreted_option: + DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual( + input, add_uninterpreted_option())); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(7994)) goto parse_uninterpreted_option; + if (input->ExpectAtEnd()) return true; + break; + } + + default: { + handle_uninterpreted: + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) { + return true; + } + if ((8000u <= tag)) { + DO_(_extensions_.ParseField(tag, input, default_instance_, + mutable_unknown_fields())); + continue; + } + DO_(::google::protobuf::internal::WireFormat::SkipField( + input, tag, mutable_unknown_fields())); + break; + } + } + } + return true; +#undef DO_ +} + +void FileOptions::SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const { + // optional string java_package = 1; + if (has_java_package()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->java_package().data(), this->java_package().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + ::google::protobuf::internal::WireFormatLite::WriteString( + 1, this->java_package(), output); + } + + // optional string java_outer_classname = 8; + if (has_java_outer_classname()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->java_outer_classname().data(), this->java_outer_classname().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + ::google::protobuf::internal::WireFormatLite::WriteString( + 8, this->java_outer_classname(), output); + } + + // optional .google.protobuf.FileOptions.OptimizeMode optimize_for = 9 [default = SPEED]; + if (has_optimize_for()) { + ::google::protobuf::internal::WireFormatLite::WriteEnum( + 9, this->optimize_for(), output); + } + + // optional bool java_multiple_files = 10 [default = false]; + if (has_java_multiple_files()) { + ::google::protobuf::internal::WireFormatLite::WriteBool(10, this->java_multiple_files(), output); + } + + // optional bool cc_generic_services = 16 [default = false]; + if (has_cc_generic_services()) { + ::google::protobuf::internal::WireFormatLite::WriteBool(16, this->cc_generic_services(), output); + } + + // optional bool java_generic_services = 17 [default = false]; + if (has_java_generic_services()) { + ::google::protobuf::internal::WireFormatLite::WriteBool(17, this->java_generic_services(), output); + } + + // optional bool py_generic_services = 18 [default = false]; + if (has_py_generic_services()) { + ::google::protobuf::internal::WireFormatLite::WriteBool(18, this->py_generic_services(), output); + } + + // optional bool java_generate_equals_and_hash = 20 [default = false]; + if (has_java_generate_equals_and_hash()) { + ::google::protobuf::internal::WireFormatLite::WriteBool(20, this->java_generate_equals_and_hash(), output); + } + + // repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; + for (int i = 0; i < this->uninterpreted_option_size(); i++) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 999, this->uninterpreted_option(i), output); + } + + // Extension range [1000, 536870912) + _extensions_.SerializeWithCachedSizes( + 1000, 536870912, output); + + if (!unknown_fields().empty()) { + ::google::protobuf::internal::WireFormat::SerializeUnknownFields( + unknown_fields(), output); + } +} + +::google::protobuf::uint8* FileOptions::SerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const { + // optional string java_package = 1; + if (has_java_package()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->java_package().data(), this->java_package().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 1, this->java_package(), target); + } + + // optional string java_outer_classname = 8; + if (has_java_outer_classname()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->java_outer_classname().data(), this->java_outer_classname().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 8, this->java_outer_classname(), target); + } + + // optional .google.protobuf.FileOptions.OptimizeMode optimize_for = 9 [default = SPEED]; + if (has_optimize_for()) { + target = ::google::protobuf::internal::WireFormatLite::WriteEnumToArray( + 9, this->optimize_for(), target); + } + + // optional bool java_multiple_files = 10 [default = false]; + if (has_java_multiple_files()) { + target = ::google::protobuf::internal::WireFormatLite::WriteBoolToArray(10, this->java_multiple_files(), target); + } + + // optional bool cc_generic_services = 16 [default = false]; + if (has_cc_generic_services()) { + target = ::google::protobuf::internal::WireFormatLite::WriteBoolToArray(16, this->cc_generic_services(), target); + } + + // optional bool java_generic_services = 17 [default = false]; + if (has_java_generic_services()) { + target = ::google::protobuf::internal::WireFormatLite::WriteBoolToArray(17, this->java_generic_services(), target); + } + + // optional bool py_generic_services = 18 [default = false]; + if (has_py_generic_services()) { + target = ::google::protobuf::internal::WireFormatLite::WriteBoolToArray(18, this->py_generic_services(), target); + } + + // optional bool java_generate_equals_and_hash = 20 [default = false]; + if (has_java_generate_equals_and_hash()) { + target = ::google::protobuf::internal::WireFormatLite::WriteBoolToArray(20, this->java_generate_equals_and_hash(), target); + } + + // repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; + for (int i = 0; i < this->uninterpreted_option_size(); i++) { + target = ::google::protobuf::internal::WireFormatLite:: + WriteMessageNoVirtualToArray( + 999, this->uninterpreted_option(i), target); + } + + // Extension range [1000, 536870912) + target = _extensions_.SerializeWithCachedSizesToArray( + 1000, 536870912, target); + + if (!unknown_fields().empty()) { + target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( + unknown_fields(), target); + } + return target; +} + +int FileOptions::ByteSize() const { + int total_size = 0; + + if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) { + // optional string java_package = 1; + if (has_java_package()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->java_package()); + } + + // optional string java_outer_classname = 8; + if (has_java_outer_classname()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->java_outer_classname()); + } + + // optional bool java_multiple_files = 10 [default = false]; + if (has_java_multiple_files()) { + total_size += 1 + 1; + } + + // optional bool java_generate_equals_and_hash = 20 [default = false]; + if (has_java_generate_equals_and_hash()) { + total_size += 2 + 1; + } + + // optional .google.protobuf.FileOptions.OptimizeMode optimize_for = 9 [default = SPEED]; + if (has_optimize_for()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::EnumSize(this->optimize_for()); + } + + // optional bool cc_generic_services = 16 [default = false]; + if (has_cc_generic_services()) { + total_size += 2 + 1; + } + + // optional bool java_generic_services = 17 [default = false]; + if (has_java_generic_services()) { + total_size += 2 + 1; + } + + // optional bool py_generic_services = 18 [default = false]; + if (has_py_generic_services()) { + total_size += 2 + 1; + } + + } + // repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; + total_size += 2 * this->uninterpreted_option_size(); + for (int i = 0; i < this->uninterpreted_option_size(); i++) { + total_size += + ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual( + this->uninterpreted_option(i)); + } + + total_size += _extensions_.ByteSize(); + + if (!unknown_fields().empty()) { + total_size += + ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( + unknown_fields()); + } + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = total_size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); + return total_size; +} + +void FileOptions::MergeFrom(const ::google::protobuf::Message& from) { + GOOGLE_CHECK_NE(&from, this); + const FileOptions* source = + ::google::protobuf::internal::dynamic_cast_if_available( + &from); + if (source == NULL) { + ::google::protobuf::internal::ReflectionOps::Merge(from, this); + } else { + MergeFrom(*source); + } +} + +void FileOptions::MergeFrom(const FileOptions& from) { + GOOGLE_CHECK_NE(&from, this); + uninterpreted_option_.MergeFrom(from.uninterpreted_option_); + if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) { + if (from.has_java_package()) { + set_java_package(from.java_package()); + } + if (from.has_java_outer_classname()) { + set_java_outer_classname(from.java_outer_classname()); + } + if (from.has_java_multiple_files()) { + set_java_multiple_files(from.java_multiple_files()); + } + if (from.has_java_generate_equals_and_hash()) { + set_java_generate_equals_and_hash(from.java_generate_equals_and_hash()); + } + if (from.has_optimize_for()) { + set_optimize_for(from.optimize_for()); + } + if (from.has_cc_generic_services()) { + set_cc_generic_services(from.cc_generic_services()); + } + if (from.has_java_generic_services()) { + set_java_generic_services(from.java_generic_services()); + } + if (from.has_py_generic_services()) { + set_py_generic_services(from.py_generic_services()); + } + } + _extensions_.MergeFrom(from._extensions_); + mutable_unknown_fields()->MergeFrom(from.unknown_fields()); +} + +void FileOptions::CopyFrom(const ::google::protobuf::Message& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void FileOptions::CopyFrom(const FileOptions& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool FileOptions::IsInitialized() const { + + for (int i = 0; i < uninterpreted_option_size(); i++) { + if (!this->uninterpreted_option(i).IsInitialized()) return false; + } + + if (!_extensions_.IsInitialized()) return false; return true; +} + +void FileOptions::Swap(FileOptions* other) { + if (other != this) { + std::swap(java_package_, other->java_package_); + std::swap(java_outer_classname_, other->java_outer_classname_); + std::swap(java_multiple_files_, other->java_multiple_files_); + std::swap(java_generate_equals_and_hash_, other->java_generate_equals_and_hash_); + std::swap(optimize_for_, other->optimize_for_); + std::swap(cc_generic_services_, other->cc_generic_services_); + std::swap(java_generic_services_, other->java_generic_services_); + std::swap(py_generic_services_, other->py_generic_services_); + uninterpreted_option_.Swap(&other->uninterpreted_option_); + std::swap(_has_bits_[0], other->_has_bits_[0]); + _unknown_fields_.Swap(&other->_unknown_fields_); + std::swap(_cached_size_, other->_cached_size_); + _extensions_.Swap(&other->_extensions_); + } +} + +::google::protobuf::Metadata FileOptions::GetMetadata() const { + protobuf_AssignDescriptorsOnce(); + ::google::protobuf::Metadata metadata; + metadata.descriptor = FileOptions_descriptor_; + metadata.reflection = FileOptions_reflection_; + return metadata; +} + + +// =================================================================== + +#ifndef _MSC_VER +const int MessageOptions::kMessageSetWireFormatFieldNumber; +const int MessageOptions::kNoStandardDescriptorAccessorFieldNumber; +const int MessageOptions::kUninterpretedOptionFieldNumber; +#endif // !_MSC_VER + +MessageOptions::MessageOptions() + : ::google::protobuf::Message() { + SharedCtor(); +} + +void MessageOptions::InitAsDefaultInstance() { +} + +MessageOptions::MessageOptions(const MessageOptions& from) + : ::google::protobuf::Message() { + SharedCtor(); + MergeFrom(from); +} + +void MessageOptions::SharedCtor() { + _cached_size_ = 0; + message_set_wire_format_ = false; + no_standard_descriptor_accessor_ = false; + ::memset(_has_bits_, 0, sizeof(_has_bits_)); +} + +MessageOptions::~MessageOptions() { + SharedDtor(); +} + +void MessageOptions::SharedDtor() { + if (this != default_instance_) { + } +} + +void MessageOptions::SetCachedSize(int size) const { + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); +} +const ::google::protobuf::Descriptor* MessageOptions::descriptor() { + protobuf_AssignDescriptorsOnce(); + return MessageOptions_descriptor_; +} + +const MessageOptions& MessageOptions::default_instance() { + if (default_instance_ == NULL) protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); return *default_instance_; +} + +MessageOptions* MessageOptions::default_instance_ = NULL; + +MessageOptions* MessageOptions::New() const { + return new MessageOptions; +} + +void MessageOptions::Clear() { + _extensions_.Clear(); + if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) { + message_set_wire_format_ = false; + no_standard_descriptor_accessor_ = false; + } + uninterpreted_option_.Clear(); + ::memset(_has_bits_, 0, sizeof(_has_bits_)); + mutable_unknown_fields()->Clear(); +} + +bool MessageOptions::MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!(EXPRESSION)) return false + ::google::protobuf::uint32 tag; + while ((tag = input->ReadTag()) != 0) { + switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // optional bool message_set_wire_format = 1 [default = false]; + case 1: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_VARINT) { + DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< + bool, ::google::protobuf::internal::WireFormatLite::TYPE_BOOL>( + input, &message_set_wire_format_))); + set_has_message_set_wire_format(); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(16)) goto parse_no_standard_descriptor_accessor; + break; + } + + // optional bool no_standard_descriptor_accessor = 2 [default = false]; + case 2: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_VARINT) { + parse_no_standard_descriptor_accessor: + DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< + bool, ::google::protobuf::internal::WireFormatLite::TYPE_BOOL>( + input, &no_standard_descriptor_accessor_))); + set_has_no_standard_descriptor_accessor(); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(7994)) goto parse_uninterpreted_option; + break; + } + + // repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; + case 999: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_uninterpreted_option: + DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual( + input, add_uninterpreted_option())); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(7994)) goto parse_uninterpreted_option; + if (input->ExpectAtEnd()) return true; + break; + } + + default: { + handle_uninterpreted: + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) { + return true; + } + if ((8000u <= tag)) { + DO_(_extensions_.ParseField(tag, input, default_instance_, + mutable_unknown_fields())); + continue; + } + DO_(::google::protobuf::internal::WireFormat::SkipField( + input, tag, mutable_unknown_fields())); + break; + } + } + } + return true; +#undef DO_ +} + +void MessageOptions::SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const { + // optional bool message_set_wire_format = 1 [default = false]; + if (has_message_set_wire_format()) { + ::google::protobuf::internal::WireFormatLite::WriteBool(1, this->message_set_wire_format(), output); + } + + // optional bool no_standard_descriptor_accessor = 2 [default = false]; + if (has_no_standard_descriptor_accessor()) { + ::google::protobuf::internal::WireFormatLite::WriteBool(2, this->no_standard_descriptor_accessor(), output); + } + + // repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; + for (int i = 0; i < this->uninterpreted_option_size(); i++) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 999, this->uninterpreted_option(i), output); + } + + // Extension range [1000, 536870912) + _extensions_.SerializeWithCachedSizes( + 1000, 536870912, output); + + if (!unknown_fields().empty()) { + ::google::protobuf::internal::WireFormat::SerializeUnknownFields( + unknown_fields(), output); + } +} + +::google::protobuf::uint8* MessageOptions::SerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const { + // optional bool message_set_wire_format = 1 [default = false]; + if (has_message_set_wire_format()) { + target = ::google::protobuf::internal::WireFormatLite::WriteBoolToArray(1, this->message_set_wire_format(), target); + } + + // optional bool no_standard_descriptor_accessor = 2 [default = false]; + if (has_no_standard_descriptor_accessor()) { + target = ::google::protobuf::internal::WireFormatLite::WriteBoolToArray(2, this->no_standard_descriptor_accessor(), target); + } + + // repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; + for (int i = 0; i < this->uninterpreted_option_size(); i++) { + target = ::google::protobuf::internal::WireFormatLite:: + WriteMessageNoVirtualToArray( + 999, this->uninterpreted_option(i), target); + } + + // Extension range [1000, 536870912) + target = _extensions_.SerializeWithCachedSizesToArray( + 1000, 536870912, target); + + if (!unknown_fields().empty()) { + target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( + unknown_fields(), target); + } + return target; +} + +int MessageOptions::ByteSize() const { + int total_size = 0; + + if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) { + // optional bool message_set_wire_format = 1 [default = false]; + if (has_message_set_wire_format()) { + total_size += 1 + 1; + } + + // optional bool no_standard_descriptor_accessor = 2 [default = false]; + if (has_no_standard_descriptor_accessor()) { + total_size += 1 + 1; + } + + } + // repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; + total_size += 2 * this->uninterpreted_option_size(); + for (int i = 0; i < this->uninterpreted_option_size(); i++) { + total_size += + ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual( + this->uninterpreted_option(i)); + } + + total_size += _extensions_.ByteSize(); + + if (!unknown_fields().empty()) { + total_size += + ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( + unknown_fields()); + } + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = total_size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); + return total_size; +} + +void MessageOptions::MergeFrom(const ::google::protobuf::Message& from) { + GOOGLE_CHECK_NE(&from, this); + const MessageOptions* source = + ::google::protobuf::internal::dynamic_cast_if_available( + &from); + if (source == NULL) { + ::google::protobuf::internal::ReflectionOps::Merge(from, this); + } else { + MergeFrom(*source); + } +} + +void MessageOptions::MergeFrom(const MessageOptions& from) { + GOOGLE_CHECK_NE(&from, this); + uninterpreted_option_.MergeFrom(from.uninterpreted_option_); + if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) { + if (from.has_message_set_wire_format()) { + set_message_set_wire_format(from.message_set_wire_format()); + } + if (from.has_no_standard_descriptor_accessor()) { + set_no_standard_descriptor_accessor(from.no_standard_descriptor_accessor()); + } + } + _extensions_.MergeFrom(from._extensions_); + mutable_unknown_fields()->MergeFrom(from.unknown_fields()); +} + +void MessageOptions::CopyFrom(const ::google::protobuf::Message& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void MessageOptions::CopyFrom(const MessageOptions& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool MessageOptions::IsInitialized() const { + + for (int i = 0; i < uninterpreted_option_size(); i++) { + if (!this->uninterpreted_option(i).IsInitialized()) return false; + } + + if (!_extensions_.IsInitialized()) return false; return true; +} + +void MessageOptions::Swap(MessageOptions* other) { + if (other != this) { + std::swap(message_set_wire_format_, other->message_set_wire_format_); + std::swap(no_standard_descriptor_accessor_, other->no_standard_descriptor_accessor_); + uninterpreted_option_.Swap(&other->uninterpreted_option_); + std::swap(_has_bits_[0], other->_has_bits_[0]); + _unknown_fields_.Swap(&other->_unknown_fields_); + std::swap(_cached_size_, other->_cached_size_); + _extensions_.Swap(&other->_extensions_); + } +} + +::google::protobuf::Metadata MessageOptions::GetMetadata() const { + protobuf_AssignDescriptorsOnce(); + ::google::protobuf::Metadata metadata; + metadata.descriptor = MessageOptions_descriptor_; + metadata.reflection = MessageOptions_reflection_; + return metadata; +} + + +// =================================================================== + +const ::google::protobuf::EnumDescriptor* FieldOptions_CType_descriptor() { + protobuf_AssignDescriptorsOnce(); + return FieldOptions_CType_descriptor_; +} +bool FieldOptions_CType_IsValid(int value) { + switch(value) { + case 0: + case 1: + case 2: + return true; + default: + return false; + } +} + +#ifndef _MSC_VER +const FieldOptions_CType FieldOptions::STRING; +const FieldOptions_CType FieldOptions::CORD; +const FieldOptions_CType FieldOptions::STRING_PIECE; +const FieldOptions_CType FieldOptions::CType_MIN; +const FieldOptions_CType FieldOptions::CType_MAX; +const int FieldOptions::CType_ARRAYSIZE; +#endif // _MSC_VER +#ifndef _MSC_VER +const int FieldOptions::kCtypeFieldNumber; +const int FieldOptions::kPackedFieldNumber; +const int FieldOptions::kDeprecatedFieldNumber; +const int FieldOptions::kExperimentalMapKeyFieldNumber; +const int FieldOptions::kUninterpretedOptionFieldNumber; +#endif // !_MSC_VER + +FieldOptions::FieldOptions() + : ::google::protobuf::Message() { + SharedCtor(); +} + +void FieldOptions::InitAsDefaultInstance() { +} + +FieldOptions::FieldOptions(const FieldOptions& from) + : ::google::protobuf::Message() { + SharedCtor(); + MergeFrom(from); +} + +void FieldOptions::SharedCtor() { + _cached_size_ = 0; + ctype_ = 0; + packed_ = false; + deprecated_ = false; + experimental_map_key_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + ::memset(_has_bits_, 0, sizeof(_has_bits_)); +} + +FieldOptions::~FieldOptions() { + SharedDtor(); +} + +void FieldOptions::SharedDtor() { + if (experimental_map_key_ != &::google::protobuf::internal::kEmptyString) { + delete experimental_map_key_; + } + if (this != default_instance_) { + } +} + +void FieldOptions::SetCachedSize(int size) const { + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); +} +const ::google::protobuf::Descriptor* FieldOptions::descriptor() { + protobuf_AssignDescriptorsOnce(); + return FieldOptions_descriptor_; +} + +const FieldOptions& FieldOptions::default_instance() { + if (default_instance_ == NULL) protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); return *default_instance_; +} + +FieldOptions* FieldOptions::default_instance_ = NULL; + +FieldOptions* FieldOptions::New() const { + return new FieldOptions; +} + +void FieldOptions::Clear() { + _extensions_.Clear(); + if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) { + ctype_ = 0; + packed_ = false; + deprecated_ = false; + if (has_experimental_map_key()) { + if (experimental_map_key_ != &::google::protobuf::internal::kEmptyString) { + experimental_map_key_->clear(); + } + } + } + uninterpreted_option_.Clear(); + ::memset(_has_bits_, 0, sizeof(_has_bits_)); + mutable_unknown_fields()->Clear(); +} + +bool FieldOptions::MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!(EXPRESSION)) return false + ::google::protobuf::uint32 tag; + while ((tag = input->ReadTag()) != 0) { + switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // optional .google.protobuf.FieldOptions.CType ctype = 1 [default = STRING]; + case 1: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_VARINT) { + int value; + DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< + int, ::google::protobuf::internal::WireFormatLite::TYPE_ENUM>( + input, &value))); + if (::google::protobuf::FieldOptions_CType_IsValid(value)) { + set_ctype(static_cast< ::google::protobuf::FieldOptions_CType >(value)); + } else { + mutable_unknown_fields()->AddVarint(1, value); + } + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(16)) goto parse_packed; + break; + } + + // optional bool packed = 2; + case 2: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_VARINT) { + parse_packed: + DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< + bool, ::google::protobuf::internal::WireFormatLite::TYPE_BOOL>( + input, &packed_))); + set_has_packed(); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(24)) goto parse_deprecated; + break; + } + + // optional bool deprecated = 3 [default = false]; + case 3: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_VARINT) { + parse_deprecated: + DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< + bool, ::google::protobuf::internal::WireFormatLite::TYPE_BOOL>( + input, &deprecated_))); + set_has_deprecated(); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(74)) goto parse_experimental_map_key; + break; + } + + // optional string experimental_map_key = 9; + case 9: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_experimental_map_key: + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_experimental_map_key())); + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->experimental_map_key().data(), this->experimental_map_key().length(), + ::google::protobuf::internal::WireFormat::PARSE); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(7994)) goto parse_uninterpreted_option; + break; + } + + // repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; + case 999: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_uninterpreted_option: + DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual( + input, add_uninterpreted_option())); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(7994)) goto parse_uninterpreted_option; + if (input->ExpectAtEnd()) return true; + break; + } + + default: { + handle_uninterpreted: + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) { + return true; + } + if ((8000u <= tag)) { + DO_(_extensions_.ParseField(tag, input, default_instance_, + mutable_unknown_fields())); + continue; + } + DO_(::google::protobuf::internal::WireFormat::SkipField( + input, tag, mutable_unknown_fields())); + break; + } + } + } + return true; +#undef DO_ +} + +void FieldOptions::SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const { + // optional .google.protobuf.FieldOptions.CType ctype = 1 [default = STRING]; + if (has_ctype()) { + ::google::protobuf::internal::WireFormatLite::WriteEnum( + 1, this->ctype(), output); + } + + // optional bool packed = 2; + if (has_packed()) { + ::google::protobuf::internal::WireFormatLite::WriteBool(2, this->packed(), output); + } + + // optional bool deprecated = 3 [default = false]; + if (has_deprecated()) { + ::google::protobuf::internal::WireFormatLite::WriteBool(3, this->deprecated(), output); + } + + // optional string experimental_map_key = 9; + if (has_experimental_map_key()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->experimental_map_key().data(), this->experimental_map_key().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + ::google::protobuf::internal::WireFormatLite::WriteString( + 9, this->experimental_map_key(), output); + } + + // repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; + for (int i = 0; i < this->uninterpreted_option_size(); i++) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 999, this->uninterpreted_option(i), output); + } + + // Extension range [1000, 536870912) + _extensions_.SerializeWithCachedSizes( + 1000, 536870912, output); + + if (!unknown_fields().empty()) { + ::google::protobuf::internal::WireFormat::SerializeUnknownFields( + unknown_fields(), output); + } +} + +::google::protobuf::uint8* FieldOptions::SerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const { + // optional .google.protobuf.FieldOptions.CType ctype = 1 [default = STRING]; + if (has_ctype()) { + target = ::google::protobuf::internal::WireFormatLite::WriteEnumToArray( + 1, this->ctype(), target); + } + + // optional bool packed = 2; + if (has_packed()) { + target = ::google::protobuf::internal::WireFormatLite::WriteBoolToArray(2, this->packed(), target); + } + + // optional bool deprecated = 3 [default = false]; + if (has_deprecated()) { + target = ::google::protobuf::internal::WireFormatLite::WriteBoolToArray(3, this->deprecated(), target); + } + + // optional string experimental_map_key = 9; + if (has_experimental_map_key()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->experimental_map_key().data(), this->experimental_map_key().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 9, this->experimental_map_key(), target); + } + + // repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; + for (int i = 0; i < this->uninterpreted_option_size(); i++) { + target = ::google::protobuf::internal::WireFormatLite:: + WriteMessageNoVirtualToArray( + 999, this->uninterpreted_option(i), target); + } + + // Extension range [1000, 536870912) + target = _extensions_.SerializeWithCachedSizesToArray( + 1000, 536870912, target); + + if (!unknown_fields().empty()) { + target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( + unknown_fields(), target); + } + return target; +} + +int FieldOptions::ByteSize() const { + int total_size = 0; + + if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) { + // optional .google.protobuf.FieldOptions.CType ctype = 1 [default = STRING]; + if (has_ctype()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::EnumSize(this->ctype()); + } + + // optional bool packed = 2; + if (has_packed()) { + total_size += 1 + 1; + } + + // optional bool deprecated = 3 [default = false]; + if (has_deprecated()) { + total_size += 1 + 1; + } + + // optional string experimental_map_key = 9; + if (has_experimental_map_key()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->experimental_map_key()); + } + + } + // repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; + total_size += 2 * this->uninterpreted_option_size(); + for (int i = 0; i < this->uninterpreted_option_size(); i++) { + total_size += + ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual( + this->uninterpreted_option(i)); + } + + total_size += _extensions_.ByteSize(); + + if (!unknown_fields().empty()) { + total_size += + ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( + unknown_fields()); + } + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = total_size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); + return total_size; +} + +void FieldOptions::MergeFrom(const ::google::protobuf::Message& from) { + GOOGLE_CHECK_NE(&from, this); + const FieldOptions* source = + ::google::protobuf::internal::dynamic_cast_if_available( + &from); + if (source == NULL) { + ::google::protobuf::internal::ReflectionOps::Merge(from, this); + } else { + MergeFrom(*source); + } +} + +void FieldOptions::MergeFrom(const FieldOptions& from) { + GOOGLE_CHECK_NE(&from, this); + uninterpreted_option_.MergeFrom(from.uninterpreted_option_); + if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) { + if (from.has_ctype()) { + set_ctype(from.ctype()); + } + if (from.has_packed()) { + set_packed(from.packed()); + } + if (from.has_deprecated()) { + set_deprecated(from.deprecated()); + } + if (from.has_experimental_map_key()) { + set_experimental_map_key(from.experimental_map_key()); + } + } + _extensions_.MergeFrom(from._extensions_); + mutable_unknown_fields()->MergeFrom(from.unknown_fields()); +} + +void FieldOptions::CopyFrom(const ::google::protobuf::Message& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void FieldOptions::CopyFrom(const FieldOptions& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool FieldOptions::IsInitialized() const { + + for (int i = 0; i < uninterpreted_option_size(); i++) { + if (!this->uninterpreted_option(i).IsInitialized()) return false; + } + + if (!_extensions_.IsInitialized()) return false; return true; +} + +void FieldOptions::Swap(FieldOptions* other) { + if (other != this) { + std::swap(ctype_, other->ctype_); + std::swap(packed_, other->packed_); + std::swap(deprecated_, other->deprecated_); + std::swap(experimental_map_key_, other->experimental_map_key_); + uninterpreted_option_.Swap(&other->uninterpreted_option_); + std::swap(_has_bits_[0], other->_has_bits_[0]); + _unknown_fields_.Swap(&other->_unknown_fields_); + std::swap(_cached_size_, other->_cached_size_); + _extensions_.Swap(&other->_extensions_); + } +} + +::google::protobuf::Metadata FieldOptions::GetMetadata() const { + protobuf_AssignDescriptorsOnce(); + ::google::protobuf::Metadata metadata; + metadata.descriptor = FieldOptions_descriptor_; + metadata.reflection = FieldOptions_reflection_; + return metadata; +} + + +// =================================================================== + +#ifndef _MSC_VER +const int EnumOptions::kUninterpretedOptionFieldNumber; +#endif // !_MSC_VER + +EnumOptions::EnumOptions() + : ::google::protobuf::Message() { + SharedCtor(); +} + +void EnumOptions::InitAsDefaultInstance() { +} + +EnumOptions::EnumOptions(const EnumOptions& from) + : ::google::protobuf::Message() { + SharedCtor(); + MergeFrom(from); +} + +void EnumOptions::SharedCtor() { + _cached_size_ = 0; + ::memset(_has_bits_, 0, sizeof(_has_bits_)); +} + +EnumOptions::~EnumOptions() { + SharedDtor(); +} + +void EnumOptions::SharedDtor() { + if (this != default_instance_) { + } +} + +void EnumOptions::SetCachedSize(int size) const { + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); +} +const ::google::protobuf::Descriptor* EnumOptions::descriptor() { + protobuf_AssignDescriptorsOnce(); + return EnumOptions_descriptor_; +} + +const EnumOptions& EnumOptions::default_instance() { + if (default_instance_ == NULL) protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); return *default_instance_; +} + +EnumOptions* EnumOptions::default_instance_ = NULL; + +EnumOptions* EnumOptions::New() const { + return new EnumOptions; +} + +void EnumOptions::Clear() { + _extensions_.Clear(); + uninterpreted_option_.Clear(); + ::memset(_has_bits_, 0, sizeof(_has_bits_)); + mutable_unknown_fields()->Clear(); +} + +bool EnumOptions::MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!(EXPRESSION)) return false + ::google::protobuf::uint32 tag; + while ((tag = input->ReadTag()) != 0) { + switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; + case 999: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_uninterpreted_option: + DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual( + input, add_uninterpreted_option())); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(7994)) goto parse_uninterpreted_option; + if (input->ExpectAtEnd()) return true; + break; + } + + default: { + handle_uninterpreted: + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) { + return true; + } + if ((8000u <= tag)) { + DO_(_extensions_.ParseField(tag, input, default_instance_, + mutable_unknown_fields())); + continue; + } + DO_(::google::protobuf::internal::WireFormat::SkipField( + input, tag, mutable_unknown_fields())); + break; + } + } + } + return true; +#undef DO_ +} + +void EnumOptions::SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const { + // repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; + for (int i = 0; i < this->uninterpreted_option_size(); i++) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 999, this->uninterpreted_option(i), output); + } + + // Extension range [1000, 536870912) + _extensions_.SerializeWithCachedSizes( + 1000, 536870912, output); + + if (!unknown_fields().empty()) { + ::google::protobuf::internal::WireFormat::SerializeUnknownFields( + unknown_fields(), output); + } +} + +::google::protobuf::uint8* EnumOptions::SerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const { + // repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; + for (int i = 0; i < this->uninterpreted_option_size(); i++) { + target = ::google::protobuf::internal::WireFormatLite:: + WriteMessageNoVirtualToArray( + 999, this->uninterpreted_option(i), target); + } + + // Extension range [1000, 536870912) + target = _extensions_.SerializeWithCachedSizesToArray( + 1000, 536870912, target); + + if (!unknown_fields().empty()) { + target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( + unknown_fields(), target); + } + return target; +} + +int EnumOptions::ByteSize() const { + int total_size = 0; + + // repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; + total_size += 2 * this->uninterpreted_option_size(); + for (int i = 0; i < this->uninterpreted_option_size(); i++) { + total_size += + ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual( + this->uninterpreted_option(i)); + } + + total_size += _extensions_.ByteSize(); + + if (!unknown_fields().empty()) { + total_size += + ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( + unknown_fields()); + } + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = total_size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); + return total_size; +} + +void EnumOptions::MergeFrom(const ::google::protobuf::Message& from) { + GOOGLE_CHECK_NE(&from, this); + const EnumOptions* source = + ::google::protobuf::internal::dynamic_cast_if_available( + &from); + if (source == NULL) { + ::google::protobuf::internal::ReflectionOps::Merge(from, this); + } else { + MergeFrom(*source); + } +} + +void EnumOptions::MergeFrom(const EnumOptions& from) { + GOOGLE_CHECK_NE(&from, this); + uninterpreted_option_.MergeFrom(from.uninterpreted_option_); + _extensions_.MergeFrom(from._extensions_); + mutable_unknown_fields()->MergeFrom(from.unknown_fields()); +} + +void EnumOptions::CopyFrom(const ::google::protobuf::Message& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void EnumOptions::CopyFrom(const EnumOptions& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool EnumOptions::IsInitialized() const { + + for (int i = 0; i < uninterpreted_option_size(); i++) { + if (!this->uninterpreted_option(i).IsInitialized()) return false; + } + + if (!_extensions_.IsInitialized()) return false; return true; +} + +void EnumOptions::Swap(EnumOptions* other) { + if (other != this) { + uninterpreted_option_.Swap(&other->uninterpreted_option_); + std::swap(_has_bits_[0], other->_has_bits_[0]); + _unknown_fields_.Swap(&other->_unknown_fields_); + std::swap(_cached_size_, other->_cached_size_); + _extensions_.Swap(&other->_extensions_); + } +} + +::google::protobuf::Metadata EnumOptions::GetMetadata() const { + protobuf_AssignDescriptorsOnce(); + ::google::protobuf::Metadata metadata; + metadata.descriptor = EnumOptions_descriptor_; + metadata.reflection = EnumOptions_reflection_; + return metadata; +} + + +// =================================================================== + +#ifndef _MSC_VER +const int EnumValueOptions::kUninterpretedOptionFieldNumber; +#endif // !_MSC_VER + +EnumValueOptions::EnumValueOptions() + : ::google::protobuf::Message() { + SharedCtor(); +} + +void EnumValueOptions::InitAsDefaultInstance() { +} + +EnumValueOptions::EnumValueOptions(const EnumValueOptions& from) + : ::google::protobuf::Message() { + SharedCtor(); + MergeFrom(from); +} + +void EnumValueOptions::SharedCtor() { + _cached_size_ = 0; + ::memset(_has_bits_, 0, sizeof(_has_bits_)); +} + +EnumValueOptions::~EnumValueOptions() { + SharedDtor(); +} + +void EnumValueOptions::SharedDtor() { + if (this != default_instance_) { + } +} + +void EnumValueOptions::SetCachedSize(int size) const { + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); +} +const ::google::protobuf::Descriptor* EnumValueOptions::descriptor() { + protobuf_AssignDescriptorsOnce(); + return EnumValueOptions_descriptor_; +} + +const EnumValueOptions& EnumValueOptions::default_instance() { + if (default_instance_ == NULL) protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); return *default_instance_; +} + +EnumValueOptions* EnumValueOptions::default_instance_ = NULL; + +EnumValueOptions* EnumValueOptions::New() const { + return new EnumValueOptions; +} + +void EnumValueOptions::Clear() { + _extensions_.Clear(); + uninterpreted_option_.Clear(); + ::memset(_has_bits_, 0, sizeof(_has_bits_)); + mutable_unknown_fields()->Clear(); +} + +bool EnumValueOptions::MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!(EXPRESSION)) return false + ::google::protobuf::uint32 tag; + while ((tag = input->ReadTag()) != 0) { + switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; + case 999: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_uninterpreted_option: + DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual( + input, add_uninterpreted_option())); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(7994)) goto parse_uninterpreted_option; + if (input->ExpectAtEnd()) return true; + break; + } + + default: { + handle_uninterpreted: + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) { + return true; + } + if ((8000u <= tag)) { + DO_(_extensions_.ParseField(tag, input, default_instance_, + mutable_unknown_fields())); + continue; + } + DO_(::google::protobuf::internal::WireFormat::SkipField( + input, tag, mutable_unknown_fields())); + break; + } + } + } + return true; +#undef DO_ +} + +void EnumValueOptions::SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const { + // repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; + for (int i = 0; i < this->uninterpreted_option_size(); i++) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 999, this->uninterpreted_option(i), output); + } + + // Extension range [1000, 536870912) + _extensions_.SerializeWithCachedSizes( + 1000, 536870912, output); + + if (!unknown_fields().empty()) { + ::google::protobuf::internal::WireFormat::SerializeUnknownFields( + unknown_fields(), output); + } +} + +::google::protobuf::uint8* EnumValueOptions::SerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const { + // repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; + for (int i = 0; i < this->uninterpreted_option_size(); i++) { + target = ::google::protobuf::internal::WireFormatLite:: + WriteMessageNoVirtualToArray( + 999, this->uninterpreted_option(i), target); + } + + // Extension range [1000, 536870912) + target = _extensions_.SerializeWithCachedSizesToArray( + 1000, 536870912, target); + + if (!unknown_fields().empty()) { + target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( + unknown_fields(), target); + } + return target; +} + +int EnumValueOptions::ByteSize() const { + int total_size = 0; + + // repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; + total_size += 2 * this->uninterpreted_option_size(); + for (int i = 0; i < this->uninterpreted_option_size(); i++) { + total_size += + ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual( + this->uninterpreted_option(i)); + } + + total_size += _extensions_.ByteSize(); + + if (!unknown_fields().empty()) { + total_size += + ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( + unknown_fields()); + } + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = total_size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); + return total_size; +} + +void EnumValueOptions::MergeFrom(const ::google::protobuf::Message& from) { + GOOGLE_CHECK_NE(&from, this); + const EnumValueOptions* source = + ::google::protobuf::internal::dynamic_cast_if_available( + &from); + if (source == NULL) { + ::google::protobuf::internal::ReflectionOps::Merge(from, this); + } else { + MergeFrom(*source); + } +} + +void EnumValueOptions::MergeFrom(const EnumValueOptions& from) { + GOOGLE_CHECK_NE(&from, this); + uninterpreted_option_.MergeFrom(from.uninterpreted_option_); + _extensions_.MergeFrom(from._extensions_); + mutable_unknown_fields()->MergeFrom(from.unknown_fields()); +} + +void EnumValueOptions::CopyFrom(const ::google::protobuf::Message& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void EnumValueOptions::CopyFrom(const EnumValueOptions& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool EnumValueOptions::IsInitialized() const { + + for (int i = 0; i < uninterpreted_option_size(); i++) { + if (!this->uninterpreted_option(i).IsInitialized()) return false; + } + + if (!_extensions_.IsInitialized()) return false; return true; +} + +void EnumValueOptions::Swap(EnumValueOptions* other) { + if (other != this) { + uninterpreted_option_.Swap(&other->uninterpreted_option_); + std::swap(_has_bits_[0], other->_has_bits_[0]); + _unknown_fields_.Swap(&other->_unknown_fields_); + std::swap(_cached_size_, other->_cached_size_); + _extensions_.Swap(&other->_extensions_); + } +} + +::google::protobuf::Metadata EnumValueOptions::GetMetadata() const { + protobuf_AssignDescriptorsOnce(); + ::google::protobuf::Metadata metadata; + metadata.descriptor = EnumValueOptions_descriptor_; + metadata.reflection = EnumValueOptions_reflection_; + return metadata; +} + + +// =================================================================== + +#ifndef _MSC_VER +const int ServiceOptions::kUninterpretedOptionFieldNumber; +#endif // !_MSC_VER + +ServiceOptions::ServiceOptions() + : ::google::protobuf::Message() { + SharedCtor(); +} + +void ServiceOptions::InitAsDefaultInstance() { +} + +ServiceOptions::ServiceOptions(const ServiceOptions& from) + : ::google::protobuf::Message() { + SharedCtor(); + MergeFrom(from); +} + +void ServiceOptions::SharedCtor() { + _cached_size_ = 0; + ::memset(_has_bits_, 0, sizeof(_has_bits_)); +} + +ServiceOptions::~ServiceOptions() { + SharedDtor(); +} + +void ServiceOptions::SharedDtor() { + if (this != default_instance_) { + } +} + +void ServiceOptions::SetCachedSize(int size) const { + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); +} +const ::google::protobuf::Descriptor* ServiceOptions::descriptor() { + protobuf_AssignDescriptorsOnce(); + return ServiceOptions_descriptor_; +} + +const ServiceOptions& ServiceOptions::default_instance() { + if (default_instance_ == NULL) protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); return *default_instance_; +} + +ServiceOptions* ServiceOptions::default_instance_ = NULL; + +ServiceOptions* ServiceOptions::New() const { + return new ServiceOptions; +} + +void ServiceOptions::Clear() { + _extensions_.Clear(); + uninterpreted_option_.Clear(); + ::memset(_has_bits_, 0, sizeof(_has_bits_)); + mutable_unknown_fields()->Clear(); +} + +bool ServiceOptions::MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!(EXPRESSION)) return false + ::google::protobuf::uint32 tag; + while ((tag = input->ReadTag()) != 0) { + switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; + case 999: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_uninterpreted_option: + DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual( + input, add_uninterpreted_option())); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(7994)) goto parse_uninterpreted_option; + if (input->ExpectAtEnd()) return true; + break; + } + + default: { + handle_uninterpreted: + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) { + return true; + } + if ((8000u <= tag)) { + DO_(_extensions_.ParseField(tag, input, default_instance_, + mutable_unknown_fields())); + continue; + } + DO_(::google::protobuf::internal::WireFormat::SkipField( + input, tag, mutable_unknown_fields())); + break; + } + } + } + return true; +#undef DO_ +} + +void ServiceOptions::SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const { + // repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; + for (int i = 0; i < this->uninterpreted_option_size(); i++) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 999, this->uninterpreted_option(i), output); + } + + // Extension range [1000, 536870912) + _extensions_.SerializeWithCachedSizes( + 1000, 536870912, output); + + if (!unknown_fields().empty()) { + ::google::protobuf::internal::WireFormat::SerializeUnknownFields( + unknown_fields(), output); + } +} + +::google::protobuf::uint8* ServiceOptions::SerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const { + // repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; + for (int i = 0; i < this->uninterpreted_option_size(); i++) { + target = ::google::protobuf::internal::WireFormatLite:: + WriteMessageNoVirtualToArray( + 999, this->uninterpreted_option(i), target); + } + + // Extension range [1000, 536870912) + target = _extensions_.SerializeWithCachedSizesToArray( + 1000, 536870912, target); + + if (!unknown_fields().empty()) { + target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( + unknown_fields(), target); + } + return target; +} + +int ServiceOptions::ByteSize() const { + int total_size = 0; + + // repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; + total_size += 2 * this->uninterpreted_option_size(); + for (int i = 0; i < this->uninterpreted_option_size(); i++) { + total_size += + ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual( + this->uninterpreted_option(i)); + } + + total_size += _extensions_.ByteSize(); + + if (!unknown_fields().empty()) { + total_size += + ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( + unknown_fields()); + } + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = total_size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); + return total_size; +} + +void ServiceOptions::MergeFrom(const ::google::protobuf::Message& from) { + GOOGLE_CHECK_NE(&from, this); + const ServiceOptions* source = + ::google::protobuf::internal::dynamic_cast_if_available( + &from); + if (source == NULL) { + ::google::protobuf::internal::ReflectionOps::Merge(from, this); + } else { + MergeFrom(*source); + } +} + +void ServiceOptions::MergeFrom(const ServiceOptions& from) { + GOOGLE_CHECK_NE(&from, this); + uninterpreted_option_.MergeFrom(from.uninterpreted_option_); + _extensions_.MergeFrom(from._extensions_); + mutable_unknown_fields()->MergeFrom(from.unknown_fields()); +} + +void ServiceOptions::CopyFrom(const ::google::protobuf::Message& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void ServiceOptions::CopyFrom(const ServiceOptions& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool ServiceOptions::IsInitialized() const { + + for (int i = 0; i < uninterpreted_option_size(); i++) { + if (!this->uninterpreted_option(i).IsInitialized()) return false; + } + + if (!_extensions_.IsInitialized()) return false; return true; +} + +void ServiceOptions::Swap(ServiceOptions* other) { + if (other != this) { + uninterpreted_option_.Swap(&other->uninterpreted_option_); + std::swap(_has_bits_[0], other->_has_bits_[0]); + _unknown_fields_.Swap(&other->_unknown_fields_); + std::swap(_cached_size_, other->_cached_size_); + _extensions_.Swap(&other->_extensions_); + } +} + +::google::protobuf::Metadata ServiceOptions::GetMetadata() const { + protobuf_AssignDescriptorsOnce(); + ::google::protobuf::Metadata metadata; + metadata.descriptor = ServiceOptions_descriptor_; + metadata.reflection = ServiceOptions_reflection_; + return metadata; +} + + +// =================================================================== + +#ifndef _MSC_VER +const int MethodOptions::kUninterpretedOptionFieldNumber; +#endif // !_MSC_VER + +MethodOptions::MethodOptions() + : ::google::protobuf::Message() { + SharedCtor(); +} + +void MethodOptions::InitAsDefaultInstance() { +} + +MethodOptions::MethodOptions(const MethodOptions& from) + : ::google::protobuf::Message() { + SharedCtor(); + MergeFrom(from); +} + +void MethodOptions::SharedCtor() { + _cached_size_ = 0; + ::memset(_has_bits_, 0, sizeof(_has_bits_)); +} + +MethodOptions::~MethodOptions() { + SharedDtor(); +} + +void MethodOptions::SharedDtor() { + if (this != default_instance_) { + } +} + +void MethodOptions::SetCachedSize(int size) const { + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); +} +const ::google::protobuf::Descriptor* MethodOptions::descriptor() { + protobuf_AssignDescriptorsOnce(); + return MethodOptions_descriptor_; +} + +const MethodOptions& MethodOptions::default_instance() { + if (default_instance_ == NULL) protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); return *default_instance_; +} + +MethodOptions* MethodOptions::default_instance_ = NULL; + +MethodOptions* MethodOptions::New() const { + return new MethodOptions; +} + +void MethodOptions::Clear() { + _extensions_.Clear(); + uninterpreted_option_.Clear(); + ::memset(_has_bits_, 0, sizeof(_has_bits_)); + mutable_unknown_fields()->Clear(); +} + +bool MethodOptions::MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!(EXPRESSION)) return false + ::google::protobuf::uint32 tag; + while ((tag = input->ReadTag()) != 0) { + switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; + case 999: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_uninterpreted_option: + DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual( + input, add_uninterpreted_option())); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(7994)) goto parse_uninterpreted_option; + if (input->ExpectAtEnd()) return true; + break; + } + + default: { + handle_uninterpreted: + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) { + return true; + } + if ((8000u <= tag)) { + DO_(_extensions_.ParseField(tag, input, default_instance_, + mutable_unknown_fields())); + continue; + } + DO_(::google::protobuf::internal::WireFormat::SkipField( + input, tag, mutable_unknown_fields())); + break; + } + } + } + return true; +#undef DO_ +} + +void MethodOptions::SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const { + // repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; + for (int i = 0; i < this->uninterpreted_option_size(); i++) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 999, this->uninterpreted_option(i), output); + } + + // Extension range [1000, 536870912) + _extensions_.SerializeWithCachedSizes( + 1000, 536870912, output); + + if (!unknown_fields().empty()) { + ::google::protobuf::internal::WireFormat::SerializeUnknownFields( + unknown_fields(), output); + } +} + +::google::protobuf::uint8* MethodOptions::SerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const { + // repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; + for (int i = 0; i < this->uninterpreted_option_size(); i++) { + target = ::google::protobuf::internal::WireFormatLite:: + WriteMessageNoVirtualToArray( + 999, this->uninterpreted_option(i), target); + } + + // Extension range [1000, 536870912) + target = _extensions_.SerializeWithCachedSizesToArray( + 1000, 536870912, target); + + if (!unknown_fields().empty()) { + target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( + unknown_fields(), target); + } + return target; +} + +int MethodOptions::ByteSize() const { + int total_size = 0; + + // repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; + total_size += 2 * this->uninterpreted_option_size(); + for (int i = 0; i < this->uninterpreted_option_size(); i++) { + total_size += + ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual( + this->uninterpreted_option(i)); + } + + total_size += _extensions_.ByteSize(); + + if (!unknown_fields().empty()) { + total_size += + ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( + unknown_fields()); + } + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = total_size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); + return total_size; +} + +void MethodOptions::MergeFrom(const ::google::protobuf::Message& from) { + GOOGLE_CHECK_NE(&from, this); + const MethodOptions* source = + ::google::protobuf::internal::dynamic_cast_if_available( + &from); + if (source == NULL) { + ::google::protobuf::internal::ReflectionOps::Merge(from, this); + } else { + MergeFrom(*source); + } +} + +void MethodOptions::MergeFrom(const MethodOptions& from) { + GOOGLE_CHECK_NE(&from, this); + uninterpreted_option_.MergeFrom(from.uninterpreted_option_); + _extensions_.MergeFrom(from._extensions_); + mutable_unknown_fields()->MergeFrom(from.unknown_fields()); +} + +void MethodOptions::CopyFrom(const ::google::protobuf::Message& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void MethodOptions::CopyFrom(const MethodOptions& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool MethodOptions::IsInitialized() const { + + for (int i = 0; i < uninterpreted_option_size(); i++) { + if (!this->uninterpreted_option(i).IsInitialized()) return false; + } + + if (!_extensions_.IsInitialized()) return false; return true; +} + +void MethodOptions::Swap(MethodOptions* other) { + if (other != this) { + uninterpreted_option_.Swap(&other->uninterpreted_option_); + std::swap(_has_bits_[0], other->_has_bits_[0]); + _unknown_fields_.Swap(&other->_unknown_fields_); + std::swap(_cached_size_, other->_cached_size_); + _extensions_.Swap(&other->_extensions_); + } +} + +::google::protobuf::Metadata MethodOptions::GetMetadata() const { + protobuf_AssignDescriptorsOnce(); + ::google::protobuf::Metadata metadata; + metadata.descriptor = MethodOptions_descriptor_; + metadata.reflection = MethodOptions_reflection_; + return metadata; +} + + +// =================================================================== + +#ifndef _MSC_VER +const int UninterpretedOption_NamePart::kNamePartFieldNumber; +const int UninterpretedOption_NamePart::kIsExtensionFieldNumber; +#endif // !_MSC_VER + +UninterpretedOption_NamePart::UninterpretedOption_NamePart() + : ::google::protobuf::Message() { + SharedCtor(); +} + +void UninterpretedOption_NamePart::InitAsDefaultInstance() { +} + +UninterpretedOption_NamePart::UninterpretedOption_NamePart(const UninterpretedOption_NamePart& from) + : ::google::protobuf::Message() { + SharedCtor(); + MergeFrom(from); +} + +void UninterpretedOption_NamePart::SharedCtor() { + _cached_size_ = 0; + name_part_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + is_extension_ = false; + ::memset(_has_bits_, 0, sizeof(_has_bits_)); +} + +UninterpretedOption_NamePart::~UninterpretedOption_NamePart() { + SharedDtor(); +} + +void UninterpretedOption_NamePart::SharedDtor() { + if (name_part_ != &::google::protobuf::internal::kEmptyString) { + delete name_part_; + } + if (this != default_instance_) { + } +} + +void UninterpretedOption_NamePart::SetCachedSize(int size) const { + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); +} +const ::google::protobuf::Descriptor* UninterpretedOption_NamePart::descriptor() { + protobuf_AssignDescriptorsOnce(); + return UninterpretedOption_NamePart_descriptor_; +} + +const UninterpretedOption_NamePart& UninterpretedOption_NamePart::default_instance() { + if (default_instance_ == NULL) protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); return *default_instance_; +} + +UninterpretedOption_NamePart* UninterpretedOption_NamePart::default_instance_ = NULL; + +UninterpretedOption_NamePart* UninterpretedOption_NamePart::New() const { + return new UninterpretedOption_NamePart; +} + +void UninterpretedOption_NamePart::Clear() { + if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) { + if (has_name_part()) { + if (name_part_ != &::google::protobuf::internal::kEmptyString) { + name_part_->clear(); + } + } + is_extension_ = false; + } + ::memset(_has_bits_, 0, sizeof(_has_bits_)); + mutable_unknown_fields()->Clear(); +} + +bool UninterpretedOption_NamePart::MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!(EXPRESSION)) return false + ::google::protobuf::uint32 tag; + while ((tag = input->ReadTag()) != 0) { + switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // required string name_part = 1; + case 1: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_name_part())); + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->name_part().data(), this->name_part().length(), + ::google::protobuf::internal::WireFormat::PARSE); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(16)) goto parse_is_extension; + break; + } + + // required bool is_extension = 2; + case 2: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_VARINT) { + parse_is_extension: + DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< + bool, ::google::protobuf::internal::WireFormatLite::TYPE_BOOL>( + input, &is_extension_))); + set_has_is_extension(); + } else { + goto handle_uninterpreted; + } + if (input->ExpectAtEnd()) return true; + break; + } + + default: { + handle_uninterpreted: + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) { + return true; + } + DO_(::google::protobuf::internal::WireFormat::SkipField( + input, tag, mutable_unknown_fields())); + break; + } + } + } + return true; +#undef DO_ +} + +void UninterpretedOption_NamePart::SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const { + // required string name_part = 1; + if (has_name_part()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->name_part().data(), this->name_part().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + ::google::protobuf::internal::WireFormatLite::WriteString( + 1, this->name_part(), output); + } + + // required bool is_extension = 2; + if (has_is_extension()) { + ::google::protobuf::internal::WireFormatLite::WriteBool(2, this->is_extension(), output); + } + + if (!unknown_fields().empty()) { + ::google::protobuf::internal::WireFormat::SerializeUnknownFields( + unknown_fields(), output); + } +} + +::google::protobuf::uint8* UninterpretedOption_NamePart::SerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const { + // required string name_part = 1; + if (has_name_part()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->name_part().data(), this->name_part().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 1, this->name_part(), target); + } + + // required bool is_extension = 2; + if (has_is_extension()) { + target = ::google::protobuf::internal::WireFormatLite::WriteBoolToArray(2, this->is_extension(), target); + } + + if (!unknown_fields().empty()) { + target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( + unknown_fields(), target); + } + return target; +} + +int UninterpretedOption_NamePart::ByteSize() const { + int total_size = 0; + + if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) { + // required string name_part = 1; + if (has_name_part()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->name_part()); + } + + // required bool is_extension = 2; + if (has_is_extension()) { + total_size += 1 + 1; + } + + } + if (!unknown_fields().empty()) { + total_size += + ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( + unknown_fields()); + } + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = total_size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); + return total_size; +} + +void UninterpretedOption_NamePart::MergeFrom(const ::google::protobuf::Message& from) { + GOOGLE_CHECK_NE(&from, this); + const UninterpretedOption_NamePart* source = + ::google::protobuf::internal::dynamic_cast_if_available( + &from); + if (source == NULL) { + ::google::protobuf::internal::ReflectionOps::Merge(from, this); + } else { + MergeFrom(*source); + } +} + +void UninterpretedOption_NamePart::MergeFrom(const UninterpretedOption_NamePart& from) { + GOOGLE_CHECK_NE(&from, this); + if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) { + if (from.has_name_part()) { + set_name_part(from.name_part()); + } + if (from.has_is_extension()) { + set_is_extension(from.is_extension()); + } + } + mutable_unknown_fields()->MergeFrom(from.unknown_fields()); +} + +void UninterpretedOption_NamePart::CopyFrom(const ::google::protobuf::Message& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void UninterpretedOption_NamePart::CopyFrom(const UninterpretedOption_NamePart& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool UninterpretedOption_NamePart::IsInitialized() const { + if ((_has_bits_[0] & 0x00000003) != 0x00000003) return false; + + return true; +} + +void UninterpretedOption_NamePart::Swap(UninterpretedOption_NamePart* other) { + if (other != this) { + std::swap(name_part_, other->name_part_); + std::swap(is_extension_, other->is_extension_); + std::swap(_has_bits_[0], other->_has_bits_[0]); + _unknown_fields_.Swap(&other->_unknown_fields_); + std::swap(_cached_size_, other->_cached_size_); + } +} + +::google::protobuf::Metadata UninterpretedOption_NamePart::GetMetadata() const { + protobuf_AssignDescriptorsOnce(); + ::google::protobuf::Metadata metadata; + metadata.descriptor = UninterpretedOption_NamePart_descriptor_; + metadata.reflection = UninterpretedOption_NamePart_reflection_; + return metadata; +} + + +// ------------------------------------------------------------------- + +#ifndef _MSC_VER +const int UninterpretedOption::kNameFieldNumber; +const int UninterpretedOption::kIdentifierValueFieldNumber; +const int UninterpretedOption::kPositiveIntValueFieldNumber; +const int UninterpretedOption::kNegativeIntValueFieldNumber; +const int UninterpretedOption::kDoubleValueFieldNumber; +const int UninterpretedOption::kStringValueFieldNumber; +const int UninterpretedOption::kAggregateValueFieldNumber; +#endif // !_MSC_VER + +UninterpretedOption::UninterpretedOption() + : ::google::protobuf::Message() { + SharedCtor(); +} + +void UninterpretedOption::InitAsDefaultInstance() { +} + +UninterpretedOption::UninterpretedOption(const UninterpretedOption& from) + : ::google::protobuf::Message() { + SharedCtor(); + MergeFrom(from); +} + +void UninterpretedOption::SharedCtor() { + _cached_size_ = 0; + identifier_value_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + positive_int_value_ = GOOGLE_ULONGLONG(0); + negative_int_value_ = GOOGLE_LONGLONG(0); + double_value_ = 0; + string_value_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + aggregate_value_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + ::memset(_has_bits_, 0, sizeof(_has_bits_)); +} + +UninterpretedOption::~UninterpretedOption() { + SharedDtor(); +} + +void UninterpretedOption::SharedDtor() { + if (identifier_value_ != &::google::protobuf::internal::kEmptyString) { + delete identifier_value_; + } + if (string_value_ != &::google::protobuf::internal::kEmptyString) { + delete string_value_; + } + if (aggregate_value_ != &::google::protobuf::internal::kEmptyString) { + delete aggregate_value_; + } + if (this != default_instance_) { + } +} + +void UninterpretedOption::SetCachedSize(int size) const { + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); +} +const ::google::protobuf::Descriptor* UninterpretedOption::descriptor() { + protobuf_AssignDescriptorsOnce(); + return UninterpretedOption_descriptor_; +} + +const UninterpretedOption& UninterpretedOption::default_instance() { + if (default_instance_ == NULL) protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); return *default_instance_; +} + +UninterpretedOption* UninterpretedOption::default_instance_ = NULL; + +UninterpretedOption* UninterpretedOption::New() const { + return new UninterpretedOption; +} + +void UninterpretedOption::Clear() { + if (_has_bits_[1 / 32] & (0xffu << (1 % 32))) { + if (has_identifier_value()) { + if (identifier_value_ != &::google::protobuf::internal::kEmptyString) { + identifier_value_->clear(); + } + } + positive_int_value_ = GOOGLE_ULONGLONG(0); + negative_int_value_ = GOOGLE_LONGLONG(0); + double_value_ = 0; + if (has_string_value()) { + if (string_value_ != &::google::protobuf::internal::kEmptyString) { + string_value_->clear(); + } + } + if (has_aggregate_value()) { + if (aggregate_value_ != &::google::protobuf::internal::kEmptyString) { + aggregate_value_->clear(); + } + } + } + name_.Clear(); + ::memset(_has_bits_, 0, sizeof(_has_bits_)); + mutable_unknown_fields()->Clear(); +} + +bool UninterpretedOption::MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!(EXPRESSION)) return false + ::google::protobuf::uint32 tag; + while ((tag = input->ReadTag()) != 0) { + switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // repeated .google.protobuf.UninterpretedOption.NamePart name = 2; + case 2: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_name: + DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual( + input, add_name())); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(18)) goto parse_name; + if (input->ExpectTag(26)) goto parse_identifier_value; + break; + } + + // optional string identifier_value = 3; + case 3: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_identifier_value: + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_identifier_value())); + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->identifier_value().data(), this->identifier_value().length(), + ::google::protobuf::internal::WireFormat::PARSE); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(32)) goto parse_positive_int_value; + break; + } + + // optional uint64 positive_int_value = 4; + case 4: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_VARINT) { + parse_positive_int_value: + DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< + ::google::protobuf::uint64, ::google::protobuf::internal::WireFormatLite::TYPE_UINT64>( + input, &positive_int_value_))); + set_has_positive_int_value(); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(40)) goto parse_negative_int_value; + break; + } + + // optional int64 negative_int_value = 5; + case 5: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_VARINT) { + parse_negative_int_value: + DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< + ::google::protobuf::int64, ::google::protobuf::internal::WireFormatLite::TYPE_INT64>( + input, &negative_int_value_))); + set_has_negative_int_value(); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(49)) goto parse_double_value; + break; + } + + // optional double double_value = 6; + case 6: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_FIXED64) { + parse_double_value: + DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< + double, ::google::protobuf::internal::WireFormatLite::TYPE_DOUBLE>( + input, &double_value_))); + set_has_double_value(); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(58)) goto parse_string_value; + break; + } + + // optional bytes string_value = 7; + case 7: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_string_value: + DO_(::google::protobuf::internal::WireFormatLite::ReadBytes( + input, this->mutable_string_value())); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(66)) goto parse_aggregate_value; + break; + } + + // optional string aggregate_value = 8; + case 8: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_aggregate_value: + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_aggregate_value())); + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->aggregate_value().data(), this->aggregate_value().length(), + ::google::protobuf::internal::WireFormat::PARSE); + } else { + goto handle_uninterpreted; + } + if (input->ExpectAtEnd()) return true; + break; + } + + default: { + handle_uninterpreted: + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) { + return true; + } + DO_(::google::protobuf::internal::WireFormat::SkipField( + input, tag, mutable_unknown_fields())); + break; + } + } + } + return true; +#undef DO_ +} + +void UninterpretedOption::SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const { + // repeated .google.protobuf.UninterpretedOption.NamePart name = 2; + for (int i = 0; i < this->name_size(); i++) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 2, this->name(i), output); + } + + // optional string identifier_value = 3; + if (has_identifier_value()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->identifier_value().data(), this->identifier_value().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + ::google::protobuf::internal::WireFormatLite::WriteString( + 3, this->identifier_value(), output); + } + + // optional uint64 positive_int_value = 4; + if (has_positive_int_value()) { + ::google::protobuf::internal::WireFormatLite::WriteUInt64(4, this->positive_int_value(), output); + } + + // optional int64 negative_int_value = 5; + if (has_negative_int_value()) { + ::google::protobuf::internal::WireFormatLite::WriteInt64(5, this->negative_int_value(), output); + } + + // optional double double_value = 6; + if (has_double_value()) { + ::google::protobuf::internal::WireFormatLite::WriteDouble(6, this->double_value(), output); + } + + // optional bytes string_value = 7; + if (has_string_value()) { + ::google::protobuf::internal::WireFormatLite::WriteBytes( + 7, this->string_value(), output); + } + + // optional string aggregate_value = 8; + if (has_aggregate_value()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->aggregate_value().data(), this->aggregate_value().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + ::google::protobuf::internal::WireFormatLite::WriteString( + 8, this->aggregate_value(), output); + } + + if (!unknown_fields().empty()) { + ::google::protobuf::internal::WireFormat::SerializeUnknownFields( + unknown_fields(), output); + } +} + +::google::protobuf::uint8* UninterpretedOption::SerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const { + // repeated .google.protobuf.UninterpretedOption.NamePart name = 2; + for (int i = 0; i < this->name_size(); i++) { + target = ::google::protobuf::internal::WireFormatLite:: + WriteMessageNoVirtualToArray( + 2, this->name(i), target); + } + + // optional string identifier_value = 3; + if (has_identifier_value()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->identifier_value().data(), this->identifier_value().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 3, this->identifier_value(), target); + } + + // optional uint64 positive_int_value = 4; + if (has_positive_int_value()) { + target = ::google::protobuf::internal::WireFormatLite::WriteUInt64ToArray(4, this->positive_int_value(), target); + } + + // optional int64 negative_int_value = 5; + if (has_negative_int_value()) { + target = ::google::protobuf::internal::WireFormatLite::WriteInt64ToArray(5, this->negative_int_value(), target); + } + + // optional double double_value = 6; + if (has_double_value()) { + target = ::google::protobuf::internal::WireFormatLite::WriteDoubleToArray(6, this->double_value(), target); + } + + // optional bytes string_value = 7; + if (has_string_value()) { + target = + ::google::protobuf::internal::WireFormatLite::WriteBytesToArray( + 7, this->string_value(), target); + } + + // optional string aggregate_value = 8; + if (has_aggregate_value()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8String( + this->aggregate_value().data(), this->aggregate_value().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 8, this->aggregate_value(), target); + } + + if (!unknown_fields().empty()) { + target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( + unknown_fields(), target); + } + return target; +} + +int UninterpretedOption::ByteSize() const { + int total_size = 0; + + if (_has_bits_[1 / 32] & (0xffu << (1 % 32))) { + // optional string identifier_value = 3; + if (has_identifier_value()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->identifier_value()); + } + + // optional uint64 positive_int_value = 4; + if (has_positive_int_value()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::UInt64Size( + this->positive_int_value()); + } + + // optional int64 negative_int_value = 5; + if (has_negative_int_value()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::Int64Size( + this->negative_int_value()); + } + + // optional double double_value = 6; + if (has_double_value()) { + total_size += 1 + 8; + } + + // optional bytes string_value = 7; + if (has_string_value()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::BytesSize( + this->string_value()); + } + + // optional string aggregate_value = 8; + if (has_aggregate_value()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->aggregate_value()); + } + + } + // repeated .google.protobuf.UninterpretedOption.NamePart name = 2; + total_size += 1 * this->name_size(); + for (int i = 0; i < this->name_size(); i++) { + total_size += + ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual( + this->name(i)); + } + + if (!unknown_fields().empty()) { + total_size += + ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( + unknown_fields()); + } + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = total_size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); + return total_size; +} + +void UninterpretedOption::MergeFrom(const ::google::protobuf::Message& from) { + GOOGLE_CHECK_NE(&from, this); + const UninterpretedOption* source = + ::google::protobuf::internal::dynamic_cast_if_available( + &from); + if (source == NULL) { + ::google::protobuf::internal::ReflectionOps::Merge(from, this); + } else { + MergeFrom(*source); + } +} + +void UninterpretedOption::MergeFrom(const UninterpretedOption& from) { + GOOGLE_CHECK_NE(&from, this); + name_.MergeFrom(from.name_); + if (from._has_bits_[1 / 32] & (0xffu << (1 % 32))) { + if (from.has_identifier_value()) { + set_identifier_value(from.identifier_value()); + } + if (from.has_positive_int_value()) { + set_positive_int_value(from.positive_int_value()); + } + if (from.has_negative_int_value()) { + set_negative_int_value(from.negative_int_value()); + } + if (from.has_double_value()) { + set_double_value(from.double_value()); + } + if (from.has_string_value()) { + set_string_value(from.string_value()); + } + if (from.has_aggregate_value()) { + set_aggregate_value(from.aggregate_value()); + } + } + mutable_unknown_fields()->MergeFrom(from.unknown_fields()); +} + +void UninterpretedOption::CopyFrom(const ::google::protobuf::Message& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void UninterpretedOption::CopyFrom(const UninterpretedOption& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool UninterpretedOption::IsInitialized() const { + + for (int i = 0; i < name_size(); i++) { + if (!this->name(i).IsInitialized()) return false; + } + return true; +} + +void UninterpretedOption::Swap(UninterpretedOption* other) { + if (other != this) { + name_.Swap(&other->name_); + std::swap(identifier_value_, other->identifier_value_); + std::swap(positive_int_value_, other->positive_int_value_); + std::swap(negative_int_value_, other->negative_int_value_); + std::swap(double_value_, other->double_value_); + std::swap(string_value_, other->string_value_); + std::swap(aggregate_value_, other->aggregate_value_); + std::swap(_has_bits_[0], other->_has_bits_[0]); + _unknown_fields_.Swap(&other->_unknown_fields_); + std::swap(_cached_size_, other->_cached_size_); + } +} + +::google::protobuf::Metadata UninterpretedOption::GetMetadata() const { + protobuf_AssignDescriptorsOnce(); + ::google::protobuf::Metadata metadata; + metadata.descriptor = UninterpretedOption_descriptor_; + metadata.reflection = UninterpretedOption_reflection_; + return metadata; +} + + +// =================================================================== + +#ifndef _MSC_VER +const int SourceCodeInfo_Location::kPathFieldNumber; +const int SourceCodeInfo_Location::kSpanFieldNumber; +#endif // !_MSC_VER + +SourceCodeInfo_Location::SourceCodeInfo_Location() + : ::google::protobuf::Message() { + SharedCtor(); +} + +void SourceCodeInfo_Location::InitAsDefaultInstance() { +} + +SourceCodeInfo_Location::SourceCodeInfo_Location(const SourceCodeInfo_Location& from) + : ::google::protobuf::Message() { + SharedCtor(); + MergeFrom(from); +} + +void SourceCodeInfo_Location::SharedCtor() { + _cached_size_ = 0; + ::memset(_has_bits_, 0, sizeof(_has_bits_)); +} + +SourceCodeInfo_Location::~SourceCodeInfo_Location() { + SharedDtor(); +} + +void SourceCodeInfo_Location::SharedDtor() { + if (this != default_instance_) { + } +} + +void SourceCodeInfo_Location::SetCachedSize(int size) const { + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); +} +const ::google::protobuf::Descriptor* SourceCodeInfo_Location::descriptor() { + protobuf_AssignDescriptorsOnce(); + return SourceCodeInfo_Location_descriptor_; +} + +const SourceCodeInfo_Location& SourceCodeInfo_Location::default_instance() { + if (default_instance_ == NULL) protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); return *default_instance_; +} + +SourceCodeInfo_Location* SourceCodeInfo_Location::default_instance_ = NULL; + +SourceCodeInfo_Location* SourceCodeInfo_Location::New() const { + return new SourceCodeInfo_Location; +} + +void SourceCodeInfo_Location::Clear() { + path_.Clear(); + span_.Clear(); + ::memset(_has_bits_, 0, sizeof(_has_bits_)); + mutable_unknown_fields()->Clear(); +} + +bool SourceCodeInfo_Location::MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!(EXPRESSION)) return false + ::google::protobuf::uint32 tag; + while ((tag = input->ReadTag()) != 0) { + switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // repeated int32 path = 1 [packed = true]; + case 1: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + DO_((::google::protobuf::internal::WireFormatLite::ReadPackedPrimitive< + ::google::protobuf::int32, ::google::protobuf::internal::WireFormatLite::TYPE_INT32>( + input, this->mutable_path()))); + } else if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) + == ::google::protobuf::internal::WireFormatLite:: + WIRETYPE_VARINT) { + DO_((::google::protobuf::internal::WireFormatLite::ReadRepeatedPrimitiveNoInline< + ::google::protobuf::int32, ::google::protobuf::internal::WireFormatLite::TYPE_INT32>( + 1, 10, input, this->mutable_path()))); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(18)) goto parse_span; + break; + } + + // repeated int32 span = 2 [packed = true]; + case 2: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_span: + DO_((::google::protobuf::internal::WireFormatLite::ReadPackedPrimitive< + ::google::protobuf::int32, ::google::protobuf::internal::WireFormatLite::TYPE_INT32>( + input, this->mutable_span()))); + } else if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) + == ::google::protobuf::internal::WireFormatLite:: + WIRETYPE_VARINT) { + DO_((::google::protobuf::internal::WireFormatLite::ReadRepeatedPrimitiveNoInline< + ::google::protobuf::int32, ::google::protobuf::internal::WireFormatLite::TYPE_INT32>( + 1, 18, input, this->mutable_span()))); + } else { + goto handle_uninterpreted; + } + if (input->ExpectAtEnd()) return true; + break; + } + + default: { + handle_uninterpreted: + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) { + return true; + } + DO_(::google::protobuf::internal::WireFormat::SkipField( + input, tag, mutable_unknown_fields())); + break; + } + } + } + return true; +#undef DO_ +} + +void SourceCodeInfo_Location::SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const { + // repeated int32 path = 1 [packed = true]; + if (this->path_size() > 0) { + ::google::protobuf::internal::WireFormatLite::WriteTag(1, ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED, output); + output->WriteVarint32(_path_cached_byte_size_); + } + for (int i = 0; i < this->path_size(); i++) { + ::google::protobuf::internal::WireFormatLite::WriteInt32NoTag( + this->path(i), output); + } + + // repeated int32 span = 2 [packed = true]; + if (this->span_size() > 0) { + ::google::protobuf::internal::WireFormatLite::WriteTag(2, ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED, output); + output->WriteVarint32(_span_cached_byte_size_); + } + for (int i = 0; i < this->span_size(); i++) { + ::google::protobuf::internal::WireFormatLite::WriteInt32NoTag( + this->span(i), output); + } + + if (!unknown_fields().empty()) { + ::google::protobuf::internal::WireFormat::SerializeUnknownFields( + unknown_fields(), output); + } +} + +::google::protobuf::uint8* SourceCodeInfo_Location::SerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const { + // repeated int32 path = 1 [packed = true]; + if (this->path_size() > 0) { + target = ::google::protobuf::internal::WireFormatLite::WriteTagToArray( + 1, + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED, + target); + target = ::google::protobuf::io::CodedOutputStream::WriteVarint32ToArray( + _path_cached_byte_size_, target); + } + for (int i = 0; i < this->path_size(); i++) { + target = ::google::protobuf::internal::WireFormatLite:: + WriteInt32NoTagToArray(this->path(i), target); + } + + // repeated int32 span = 2 [packed = true]; + if (this->span_size() > 0) { + target = ::google::protobuf::internal::WireFormatLite::WriteTagToArray( + 2, + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED, + target); + target = ::google::protobuf::io::CodedOutputStream::WriteVarint32ToArray( + _span_cached_byte_size_, target); + } + for (int i = 0; i < this->span_size(); i++) { + target = ::google::protobuf::internal::WireFormatLite:: + WriteInt32NoTagToArray(this->span(i), target); + } + + if (!unknown_fields().empty()) { + target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( + unknown_fields(), target); + } + return target; +} + +int SourceCodeInfo_Location::ByteSize() const { + int total_size = 0; + + // repeated int32 path = 1 [packed = true]; + { + int data_size = 0; + for (int i = 0; i < this->path_size(); i++) { + data_size += ::google::protobuf::internal::WireFormatLite:: + Int32Size(this->path(i)); + } + if (data_size > 0) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::Int32Size(data_size); + } + _path_cached_byte_size_ = data_size; + total_size += data_size; + } + + // repeated int32 span = 2 [packed = true]; + { + int data_size = 0; + for (int i = 0; i < this->span_size(); i++) { + data_size += ::google::protobuf::internal::WireFormatLite:: + Int32Size(this->span(i)); + } + if (data_size > 0) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::Int32Size(data_size); + } + _span_cached_byte_size_ = data_size; + total_size += data_size; + } + + if (!unknown_fields().empty()) { + total_size += + ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( + unknown_fields()); + } + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = total_size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); + return total_size; +} + +void SourceCodeInfo_Location::MergeFrom(const ::google::protobuf::Message& from) { + GOOGLE_CHECK_NE(&from, this); + const SourceCodeInfo_Location* source = + ::google::protobuf::internal::dynamic_cast_if_available( + &from); + if (source == NULL) { + ::google::protobuf::internal::ReflectionOps::Merge(from, this); + } else { + MergeFrom(*source); + } +} + +void SourceCodeInfo_Location::MergeFrom(const SourceCodeInfo_Location& from) { + GOOGLE_CHECK_NE(&from, this); + path_.MergeFrom(from.path_); + span_.MergeFrom(from.span_); + mutable_unknown_fields()->MergeFrom(from.unknown_fields()); +} + +void SourceCodeInfo_Location::CopyFrom(const ::google::protobuf::Message& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void SourceCodeInfo_Location::CopyFrom(const SourceCodeInfo_Location& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool SourceCodeInfo_Location::IsInitialized() const { + + return true; +} + +void SourceCodeInfo_Location::Swap(SourceCodeInfo_Location* other) { + if (other != this) { + path_.Swap(&other->path_); + span_.Swap(&other->span_); + std::swap(_has_bits_[0], other->_has_bits_[0]); + _unknown_fields_.Swap(&other->_unknown_fields_); + std::swap(_cached_size_, other->_cached_size_); + } +} + +::google::protobuf::Metadata SourceCodeInfo_Location::GetMetadata() const { + protobuf_AssignDescriptorsOnce(); + ::google::protobuf::Metadata metadata; + metadata.descriptor = SourceCodeInfo_Location_descriptor_; + metadata.reflection = SourceCodeInfo_Location_reflection_; + return metadata; +} + + +// ------------------------------------------------------------------- + +#ifndef _MSC_VER +const int SourceCodeInfo::kLocationFieldNumber; +#endif // !_MSC_VER + +SourceCodeInfo::SourceCodeInfo() + : ::google::protobuf::Message() { + SharedCtor(); +} + +void SourceCodeInfo::InitAsDefaultInstance() { +} + +SourceCodeInfo::SourceCodeInfo(const SourceCodeInfo& from) + : ::google::protobuf::Message() { + SharedCtor(); + MergeFrom(from); +} + +void SourceCodeInfo::SharedCtor() { + _cached_size_ = 0; + ::memset(_has_bits_, 0, sizeof(_has_bits_)); +} + +SourceCodeInfo::~SourceCodeInfo() { + SharedDtor(); +} + +void SourceCodeInfo::SharedDtor() { + if (this != default_instance_) { + } +} + +void SourceCodeInfo::SetCachedSize(int size) const { + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); +} +const ::google::protobuf::Descriptor* SourceCodeInfo::descriptor() { + protobuf_AssignDescriptorsOnce(); + return SourceCodeInfo_descriptor_; +} + +const SourceCodeInfo& SourceCodeInfo::default_instance() { + if (default_instance_ == NULL) protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); return *default_instance_; +} + +SourceCodeInfo* SourceCodeInfo::default_instance_ = NULL; + +SourceCodeInfo* SourceCodeInfo::New() const { + return new SourceCodeInfo; +} + +void SourceCodeInfo::Clear() { + location_.Clear(); + ::memset(_has_bits_, 0, sizeof(_has_bits_)); + mutable_unknown_fields()->Clear(); +} + +bool SourceCodeInfo::MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!(EXPRESSION)) return false + ::google::protobuf::uint32 tag; + while ((tag = input->ReadTag()) != 0) { + switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // repeated .google.protobuf.SourceCodeInfo.Location location = 1; + case 1: { + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + parse_location: + DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual( + input, add_location())); + } else { + goto handle_uninterpreted; + } + if (input->ExpectTag(10)) goto parse_location; + if (input->ExpectAtEnd()) return true; + break; + } + + default: { + handle_uninterpreted: + if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) { + return true; + } + DO_(::google::protobuf::internal::WireFormat::SkipField( + input, tag, mutable_unknown_fields())); + break; + } + } + } + return true; +#undef DO_ +} + +void SourceCodeInfo::SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const { + // repeated .google.protobuf.SourceCodeInfo.Location location = 1; + for (int i = 0; i < this->location_size(); i++) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 1, this->location(i), output); + } + + if (!unknown_fields().empty()) { + ::google::protobuf::internal::WireFormat::SerializeUnknownFields( + unknown_fields(), output); + } +} + +::google::protobuf::uint8* SourceCodeInfo::SerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const { + // repeated .google.protobuf.SourceCodeInfo.Location location = 1; + for (int i = 0; i < this->location_size(); i++) { + target = ::google::protobuf::internal::WireFormatLite:: + WriteMessageNoVirtualToArray( + 1, this->location(i), target); + } + + if (!unknown_fields().empty()) { + target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( + unknown_fields(), target); + } + return target; +} + +int SourceCodeInfo::ByteSize() const { + int total_size = 0; + + // repeated .google.protobuf.SourceCodeInfo.Location location = 1; + total_size += 1 * this->location_size(); + for (int i = 0; i < this->location_size(); i++) { + total_size += + ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual( + this->location(i)); + } + + if (!unknown_fields().empty()) { + total_size += + ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( + unknown_fields()); + } + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = total_size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); + return total_size; +} + +void SourceCodeInfo::MergeFrom(const ::google::protobuf::Message& from) { + GOOGLE_CHECK_NE(&from, this); + const SourceCodeInfo* source = + ::google::protobuf::internal::dynamic_cast_if_available( + &from); + if (source == NULL) { + ::google::protobuf::internal::ReflectionOps::Merge(from, this); + } else { + MergeFrom(*source); + } +} + +void SourceCodeInfo::MergeFrom(const SourceCodeInfo& from) { + GOOGLE_CHECK_NE(&from, this); + location_.MergeFrom(from.location_); + mutable_unknown_fields()->MergeFrom(from.unknown_fields()); +} + +void SourceCodeInfo::CopyFrom(const ::google::protobuf::Message& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void SourceCodeInfo::CopyFrom(const SourceCodeInfo& from) { + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool SourceCodeInfo::IsInitialized() const { + + return true; +} + +void SourceCodeInfo::Swap(SourceCodeInfo* other) { + if (other != this) { + location_.Swap(&other->location_); + std::swap(_has_bits_[0], other->_has_bits_[0]); + _unknown_fields_.Swap(&other->_unknown_fields_); + std::swap(_cached_size_, other->_cached_size_); + } +} + +::google::protobuf::Metadata SourceCodeInfo::GetMetadata() const { + protobuf_AssignDescriptorsOnce(); + ::google::protobuf::Metadata metadata; + metadata.descriptor = SourceCodeInfo_descriptor_; + metadata.reflection = SourceCodeInfo_reflection_; + return metadata; +} + + +// @@protoc_insertion_point(namespace_scope) + +} // namespace protobuf +} // namespace google + +// @@protoc_insertion_point(global_scope) diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/descriptor.pb.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/descriptor.pb.h new file mode 100644 index 0000000000..267d0d3505 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/descriptor.pb.h @@ -0,0 +1,5223 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/protobuf/descriptor.proto + +#ifndef PROTOBUF_google_2fprotobuf_2fdescriptor_2eproto__INCLUDED +#define PROTOBUF_google_2fprotobuf_2fdescriptor_2eproto__INCLUDED + +#include + +#include + +#if GOOGLE_PROTOBUF_VERSION < 2004000 +#error This file was generated by a newer version of protoc which is +#error incompatible with your Protocol Buffer headers. Please update +#error your headers. +#endif +#if 2004002 < GOOGLE_PROTOBUF_MIN_PROTOC_VERSION +#error This file was generated by an older version of protoc which is +#error incompatible with your Protocol Buffer headers. Please +#error regenerate this file with a newer version of protoc. +#endif + +#include +#include +#include +#include +// @@protoc_insertion_point(includes) + +namespace google { +namespace protobuf { + +// Internal implementation detail -- do not call these. +void LIBPROTOBUF_EXPORT protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); +void protobuf_AssignDesc_google_2fprotobuf_2fdescriptor_2eproto(); +void protobuf_ShutdownFile_google_2fprotobuf_2fdescriptor_2eproto(); + +class FileDescriptorSet; +class FileDescriptorProto; +class DescriptorProto; +class DescriptorProto_ExtensionRange; +class FieldDescriptorProto; +class EnumDescriptorProto; +class EnumValueDescriptorProto; +class ServiceDescriptorProto; +class MethodDescriptorProto; +class FileOptions; +class MessageOptions; +class FieldOptions; +class EnumOptions; +class EnumValueOptions; +class ServiceOptions; +class MethodOptions; +class UninterpretedOption; +class UninterpretedOption_NamePart; +class SourceCodeInfo; +class SourceCodeInfo_Location; + +enum FieldDescriptorProto_Type { + FieldDescriptorProto_Type_TYPE_DOUBLE = 1, + FieldDescriptorProto_Type_TYPE_FLOAT = 2, + FieldDescriptorProto_Type_TYPE_INT64 = 3, + FieldDescriptorProto_Type_TYPE_UINT64 = 4, + FieldDescriptorProto_Type_TYPE_INT32 = 5, + FieldDescriptorProto_Type_TYPE_FIXED64 = 6, + FieldDescriptorProto_Type_TYPE_FIXED32 = 7, + FieldDescriptorProto_Type_TYPE_BOOL = 8, + FieldDescriptorProto_Type_TYPE_STRING = 9, + FieldDescriptorProto_Type_TYPE_GROUP = 10, + FieldDescriptorProto_Type_TYPE_MESSAGE = 11, + FieldDescriptorProto_Type_TYPE_BYTES = 12, + FieldDescriptorProto_Type_TYPE_UINT32 = 13, + FieldDescriptorProto_Type_TYPE_ENUM = 14, + FieldDescriptorProto_Type_TYPE_SFIXED32 = 15, + FieldDescriptorProto_Type_TYPE_SFIXED64 = 16, + FieldDescriptorProto_Type_TYPE_SINT32 = 17, + FieldDescriptorProto_Type_TYPE_SINT64 = 18 +}; +LIBPROTOBUF_EXPORT bool FieldDescriptorProto_Type_IsValid(int value); +const FieldDescriptorProto_Type FieldDescriptorProto_Type_Type_MIN = FieldDescriptorProto_Type_TYPE_DOUBLE; +const FieldDescriptorProto_Type FieldDescriptorProto_Type_Type_MAX = FieldDescriptorProto_Type_TYPE_SINT64; +const int FieldDescriptorProto_Type_Type_ARRAYSIZE = FieldDescriptorProto_Type_Type_MAX + 1; + +LIBPROTOBUF_EXPORT const ::google::protobuf::EnumDescriptor* FieldDescriptorProto_Type_descriptor(); +inline const ::std::string& FieldDescriptorProto_Type_Name(FieldDescriptorProto_Type value) { + return ::google::protobuf::internal::NameOfEnum( + FieldDescriptorProto_Type_descriptor(), value); +} +inline bool FieldDescriptorProto_Type_Parse( + const ::std::string& name, FieldDescriptorProto_Type* value) { + return ::google::protobuf::internal::ParseNamedEnum( + FieldDescriptorProto_Type_descriptor(), name, value); +} +enum FieldDescriptorProto_Label { + FieldDescriptorProto_Label_LABEL_OPTIONAL = 1, + FieldDescriptorProto_Label_LABEL_REQUIRED = 2, + FieldDescriptorProto_Label_LABEL_REPEATED = 3 +}; +LIBPROTOBUF_EXPORT bool FieldDescriptorProto_Label_IsValid(int value); +const FieldDescriptorProto_Label FieldDescriptorProto_Label_Label_MIN = FieldDescriptorProto_Label_LABEL_OPTIONAL; +const FieldDescriptorProto_Label FieldDescriptorProto_Label_Label_MAX = FieldDescriptorProto_Label_LABEL_REPEATED; +const int FieldDescriptorProto_Label_Label_ARRAYSIZE = FieldDescriptorProto_Label_Label_MAX + 1; + +LIBPROTOBUF_EXPORT const ::google::protobuf::EnumDescriptor* FieldDescriptorProto_Label_descriptor(); +inline const ::std::string& FieldDescriptorProto_Label_Name(FieldDescriptorProto_Label value) { + return ::google::protobuf::internal::NameOfEnum( + FieldDescriptorProto_Label_descriptor(), value); +} +inline bool FieldDescriptorProto_Label_Parse( + const ::std::string& name, FieldDescriptorProto_Label* value) { + return ::google::protobuf::internal::ParseNamedEnum( + FieldDescriptorProto_Label_descriptor(), name, value); +} +enum FileOptions_OptimizeMode { + FileOptions_OptimizeMode_SPEED = 1, + FileOptions_OptimizeMode_CODE_SIZE = 2, + FileOptions_OptimizeMode_LITE_RUNTIME = 3 +}; +LIBPROTOBUF_EXPORT bool FileOptions_OptimizeMode_IsValid(int value); +const FileOptions_OptimizeMode FileOptions_OptimizeMode_OptimizeMode_MIN = FileOptions_OptimizeMode_SPEED; +const FileOptions_OptimizeMode FileOptions_OptimizeMode_OptimizeMode_MAX = FileOptions_OptimizeMode_LITE_RUNTIME; +const int FileOptions_OptimizeMode_OptimizeMode_ARRAYSIZE = FileOptions_OptimizeMode_OptimizeMode_MAX + 1; + +LIBPROTOBUF_EXPORT const ::google::protobuf::EnumDescriptor* FileOptions_OptimizeMode_descriptor(); +inline const ::std::string& FileOptions_OptimizeMode_Name(FileOptions_OptimizeMode value) { + return ::google::protobuf::internal::NameOfEnum( + FileOptions_OptimizeMode_descriptor(), value); +} +inline bool FileOptions_OptimizeMode_Parse( + const ::std::string& name, FileOptions_OptimizeMode* value) { + return ::google::protobuf::internal::ParseNamedEnum( + FileOptions_OptimizeMode_descriptor(), name, value); +} +enum FieldOptions_CType { + FieldOptions_CType_STRING = 0, + FieldOptions_CType_CORD = 1, + FieldOptions_CType_STRING_PIECE = 2 +}; +LIBPROTOBUF_EXPORT bool FieldOptions_CType_IsValid(int value); +const FieldOptions_CType FieldOptions_CType_CType_MIN = FieldOptions_CType_STRING; +const FieldOptions_CType FieldOptions_CType_CType_MAX = FieldOptions_CType_STRING_PIECE; +const int FieldOptions_CType_CType_ARRAYSIZE = FieldOptions_CType_CType_MAX + 1; + +LIBPROTOBUF_EXPORT const ::google::protobuf::EnumDescriptor* FieldOptions_CType_descriptor(); +inline const ::std::string& FieldOptions_CType_Name(FieldOptions_CType value) { + return ::google::protobuf::internal::NameOfEnum( + FieldOptions_CType_descriptor(), value); +} +inline bool FieldOptions_CType_Parse( + const ::std::string& name, FieldOptions_CType* value) { + return ::google::protobuf::internal::ParseNamedEnum( + FieldOptions_CType_descriptor(), name, value); +} +// =================================================================== + +class LIBPROTOBUF_EXPORT FileDescriptorSet : public ::google::protobuf::Message { + public: + FileDescriptorSet(); + virtual ~FileDescriptorSet(); + + FileDescriptorSet(const FileDescriptorSet& from); + + inline FileDescriptorSet& operator=(const FileDescriptorSet& from) { + CopyFrom(from); + return *this; + } + + inline const ::google::protobuf::UnknownFieldSet& unknown_fields() const { + return _unknown_fields_; + } + + inline ::google::protobuf::UnknownFieldSet* mutable_unknown_fields() { + return &_unknown_fields_; + } + + static const ::google::protobuf::Descriptor* descriptor(); + static const FileDescriptorSet& default_instance(); + + void Swap(FileDescriptorSet* other); + + // implements Message ---------------------------------------------- + + FileDescriptorSet* New() const; + void CopyFrom(const ::google::protobuf::Message& from); + void MergeFrom(const ::google::protobuf::Message& from); + void CopyFrom(const FileDescriptorSet& from); + void MergeFrom(const FileDescriptorSet& from); + void Clear(); + bool IsInitialized() const; + + int ByteSize() const; + bool MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input); + void SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const; + ::google::protobuf::uint8* SerializeWithCachedSizesToArray(::google::protobuf::uint8* output) const; + int GetCachedSize() const { return _cached_size_; } + private: + void SharedCtor(); + void SharedDtor(); + void SetCachedSize(int size) const; + public: + + ::google::protobuf::Metadata GetMetadata() const; + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // repeated .google.protobuf.FileDescriptorProto file = 1; + inline int file_size() const; + inline void clear_file(); + static const int kFileFieldNumber = 1; + inline const ::google::protobuf::FileDescriptorProto& file(int index) const; + inline ::google::protobuf::FileDescriptorProto* mutable_file(int index); + inline ::google::protobuf::FileDescriptorProto* add_file(); + inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::FileDescriptorProto >& + file() const; + inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::FileDescriptorProto >* + mutable_file(); + + // @@protoc_insertion_point(class_scope:google.protobuf.FileDescriptorSet) + private: + + ::google::protobuf::UnknownFieldSet _unknown_fields_; + + ::google::protobuf::RepeatedPtrField< ::google::protobuf::FileDescriptorProto > file_; + + mutable int _cached_size_; + ::google::protobuf::uint32 _has_bits_[(1 + 31) / 32]; + + friend void LIBPROTOBUF_EXPORT protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); + friend void protobuf_AssignDesc_google_2fprotobuf_2fdescriptor_2eproto(); + friend void protobuf_ShutdownFile_google_2fprotobuf_2fdescriptor_2eproto(); + + void InitAsDefaultInstance(); + static FileDescriptorSet* default_instance_; +}; +// ------------------------------------------------------------------- + +class LIBPROTOBUF_EXPORT FileDescriptorProto : public ::google::protobuf::Message { + public: + FileDescriptorProto(); + virtual ~FileDescriptorProto(); + + FileDescriptorProto(const FileDescriptorProto& from); + + inline FileDescriptorProto& operator=(const FileDescriptorProto& from) { + CopyFrom(from); + return *this; + } + + inline const ::google::protobuf::UnknownFieldSet& unknown_fields() const { + return _unknown_fields_; + } + + inline ::google::protobuf::UnknownFieldSet* mutable_unknown_fields() { + return &_unknown_fields_; + } + + static const ::google::protobuf::Descriptor* descriptor(); + static const FileDescriptorProto& default_instance(); + + void Swap(FileDescriptorProto* other); + + // implements Message ---------------------------------------------- + + FileDescriptorProto* New() const; + void CopyFrom(const ::google::protobuf::Message& from); + void MergeFrom(const ::google::protobuf::Message& from); + void CopyFrom(const FileDescriptorProto& from); + void MergeFrom(const FileDescriptorProto& from); + void Clear(); + bool IsInitialized() const; + + int ByteSize() const; + bool MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input); + void SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const; + ::google::protobuf::uint8* SerializeWithCachedSizesToArray(::google::protobuf::uint8* output) const; + int GetCachedSize() const { return _cached_size_; } + private: + void SharedCtor(); + void SharedDtor(); + void SetCachedSize(int size) const; + public: + + ::google::protobuf::Metadata GetMetadata() const; + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // optional string name = 1; + inline bool has_name() const; + inline void clear_name(); + static const int kNameFieldNumber = 1; + inline const ::std::string& name() const; + inline void set_name(const ::std::string& value); + inline void set_name(const char* value); + inline void set_name(const char* value, size_t size); + inline ::std::string* mutable_name(); + inline ::std::string* release_name(); + + // optional string package = 2; + inline bool has_package() const; + inline void clear_package(); + static const int kPackageFieldNumber = 2; + inline const ::std::string& package() const; + inline void set_package(const ::std::string& value); + inline void set_package(const char* value); + inline void set_package(const char* value, size_t size); + inline ::std::string* mutable_package(); + inline ::std::string* release_package(); + + // repeated string dependency = 3; + inline int dependency_size() const; + inline void clear_dependency(); + static const int kDependencyFieldNumber = 3; + inline const ::std::string& dependency(int index) const; + inline ::std::string* mutable_dependency(int index); + inline void set_dependency(int index, const ::std::string& value); + inline void set_dependency(int index, const char* value); + inline void set_dependency(int index, const char* value, size_t size); + inline ::std::string* add_dependency(); + inline void add_dependency(const ::std::string& value); + inline void add_dependency(const char* value); + inline void add_dependency(const char* value, size_t size); + inline const ::google::protobuf::RepeatedPtrField< ::std::string>& dependency() const; + inline ::google::protobuf::RepeatedPtrField< ::std::string>* mutable_dependency(); + + // repeated .google.protobuf.DescriptorProto message_type = 4; + inline int message_type_size() const; + inline void clear_message_type(); + static const int kMessageTypeFieldNumber = 4; + inline const ::google::protobuf::DescriptorProto& message_type(int index) const; + inline ::google::protobuf::DescriptorProto* mutable_message_type(int index); + inline ::google::protobuf::DescriptorProto* add_message_type(); + inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::DescriptorProto >& + message_type() const; + inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::DescriptorProto >* + mutable_message_type(); + + // repeated .google.protobuf.EnumDescriptorProto enum_type = 5; + inline int enum_type_size() const; + inline void clear_enum_type(); + static const int kEnumTypeFieldNumber = 5; + inline const ::google::protobuf::EnumDescriptorProto& enum_type(int index) const; + inline ::google::protobuf::EnumDescriptorProto* mutable_enum_type(int index); + inline ::google::protobuf::EnumDescriptorProto* add_enum_type(); + inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::EnumDescriptorProto >& + enum_type() const; + inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::EnumDescriptorProto >* + mutable_enum_type(); + + // repeated .google.protobuf.ServiceDescriptorProto service = 6; + inline int service_size() const; + inline void clear_service(); + static const int kServiceFieldNumber = 6; + inline const ::google::protobuf::ServiceDescriptorProto& service(int index) const; + inline ::google::protobuf::ServiceDescriptorProto* mutable_service(int index); + inline ::google::protobuf::ServiceDescriptorProto* add_service(); + inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::ServiceDescriptorProto >& + service() const; + inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::ServiceDescriptorProto >* + mutable_service(); + + // repeated .google.protobuf.FieldDescriptorProto extension = 7; + inline int extension_size() const; + inline void clear_extension(); + static const int kExtensionFieldNumber = 7; + inline const ::google::protobuf::FieldDescriptorProto& extension(int index) const; + inline ::google::protobuf::FieldDescriptorProto* mutable_extension(int index); + inline ::google::protobuf::FieldDescriptorProto* add_extension(); + inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::FieldDescriptorProto >& + extension() const; + inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::FieldDescriptorProto >* + mutable_extension(); + + // optional .google.protobuf.FileOptions options = 8; + inline bool has_options() const; + inline void clear_options(); + static const int kOptionsFieldNumber = 8; + inline const ::google::protobuf::FileOptions& options() const; + inline ::google::protobuf::FileOptions* mutable_options(); + inline ::google::protobuf::FileOptions* release_options(); + + // optional .google.protobuf.SourceCodeInfo source_code_info = 9; + inline bool has_source_code_info() const; + inline void clear_source_code_info(); + static const int kSourceCodeInfoFieldNumber = 9; + inline const ::google::protobuf::SourceCodeInfo& source_code_info() const; + inline ::google::protobuf::SourceCodeInfo* mutable_source_code_info(); + inline ::google::protobuf::SourceCodeInfo* release_source_code_info(); + + // @@protoc_insertion_point(class_scope:google.protobuf.FileDescriptorProto) + private: + inline void set_has_name(); + inline void clear_has_name(); + inline void set_has_package(); + inline void clear_has_package(); + inline void set_has_options(); + inline void clear_has_options(); + inline void set_has_source_code_info(); + inline void clear_has_source_code_info(); + + ::google::protobuf::UnknownFieldSet _unknown_fields_; + + ::std::string* name_; + ::std::string* package_; + ::google::protobuf::RepeatedPtrField< ::std::string> dependency_; + ::google::protobuf::RepeatedPtrField< ::google::protobuf::DescriptorProto > message_type_; + ::google::protobuf::RepeatedPtrField< ::google::protobuf::EnumDescriptorProto > enum_type_; + ::google::protobuf::RepeatedPtrField< ::google::protobuf::ServiceDescriptorProto > service_; + ::google::protobuf::RepeatedPtrField< ::google::protobuf::FieldDescriptorProto > extension_; + ::google::protobuf::FileOptions* options_; + ::google::protobuf::SourceCodeInfo* source_code_info_; + + mutable int _cached_size_; + ::google::protobuf::uint32 _has_bits_[(9 + 31) / 32]; + + friend void LIBPROTOBUF_EXPORT protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); + friend void protobuf_AssignDesc_google_2fprotobuf_2fdescriptor_2eproto(); + friend void protobuf_ShutdownFile_google_2fprotobuf_2fdescriptor_2eproto(); + + void InitAsDefaultInstance(); + static FileDescriptorProto* default_instance_; +}; +// ------------------------------------------------------------------- + +class LIBPROTOBUF_EXPORT DescriptorProto_ExtensionRange : public ::google::protobuf::Message { + public: + DescriptorProto_ExtensionRange(); + virtual ~DescriptorProto_ExtensionRange(); + + DescriptorProto_ExtensionRange(const DescriptorProto_ExtensionRange& from); + + inline DescriptorProto_ExtensionRange& operator=(const DescriptorProto_ExtensionRange& from) { + CopyFrom(from); + return *this; + } + + inline const ::google::protobuf::UnknownFieldSet& unknown_fields() const { + return _unknown_fields_; + } + + inline ::google::protobuf::UnknownFieldSet* mutable_unknown_fields() { + return &_unknown_fields_; + } + + static const ::google::protobuf::Descriptor* descriptor(); + static const DescriptorProto_ExtensionRange& default_instance(); + + void Swap(DescriptorProto_ExtensionRange* other); + + // implements Message ---------------------------------------------- + + DescriptorProto_ExtensionRange* New() const; + void CopyFrom(const ::google::protobuf::Message& from); + void MergeFrom(const ::google::protobuf::Message& from); + void CopyFrom(const DescriptorProto_ExtensionRange& from); + void MergeFrom(const DescriptorProto_ExtensionRange& from); + void Clear(); + bool IsInitialized() const; + + int ByteSize() const; + bool MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input); + void SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const; + ::google::protobuf::uint8* SerializeWithCachedSizesToArray(::google::protobuf::uint8* output) const; + int GetCachedSize() const { return _cached_size_; } + private: + void SharedCtor(); + void SharedDtor(); + void SetCachedSize(int size) const; + public: + + ::google::protobuf::Metadata GetMetadata() const; + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // optional int32 start = 1; + inline bool has_start() const; + inline void clear_start(); + static const int kStartFieldNumber = 1; + inline ::google::protobuf::int32 start() const; + inline void set_start(::google::protobuf::int32 value); + + // optional int32 end = 2; + inline bool has_end() const; + inline void clear_end(); + static const int kEndFieldNumber = 2; + inline ::google::protobuf::int32 end() const; + inline void set_end(::google::protobuf::int32 value); + + // @@protoc_insertion_point(class_scope:google.protobuf.DescriptorProto.ExtensionRange) + private: + inline void set_has_start(); + inline void clear_has_start(); + inline void set_has_end(); + inline void clear_has_end(); + + ::google::protobuf::UnknownFieldSet _unknown_fields_; + + ::google::protobuf::int32 start_; + ::google::protobuf::int32 end_; + + mutable int _cached_size_; + ::google::protobuf::uint32 _has_bits_[(2 + 31) / 32]; + + friend void LIBPROTOBUF_EXPORT protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); + friend void protobuf_AssignDesc_google_2fprotobuf_2fdescriptor_2eproto(); + friend void protobuf_ShutdownFile_google_2fprotobuf_2fdescriptor_2eproto(); + + void InitAsDefaultInstance(); + static DescriptorProto_ExtensionRange* default_instance_; +}; +// ------------------------------------------------------------------- + +class LIBPROTOBUF_EXPORT DescriptorProto : public ::google::protobuf::Message { + public: + DescriptorProto(); + virtual ~DescriptorProto(); + + DescriptorProto(const DescriptorProto& from); + + inline DescriptorProto& operator=(const DescriptorProto& from) { + CopyFrom(from); + return *this; + } + + inline const ::google::protobuf::UnknownFieldSet& unknown_fields() const { + return _unknown_fields_; + } + + inline ::google::protobuf::UnknownFieldSet* mutable_unknown_fields() { + return &_unknown_fields_; + } + + static const ::google::protobuf::Descriptor* descriptor(); + static const DescriptorProto& default_instance(); + + void Swap(DescriptorProto* other); + + // implements Message ---------------------------------------------- + + DescriptorProto* New() const; + void CopyFrom(const ::google::protobuf::Message& from); + void MergeFrom(const ::google::protobuf::Message& from); + void CopyFrom(const DescriptorProto& from); + void MergeFrom(const DescriptorProto& from); + void Clear(); + bool IsInitialized() const; + + int ByteSize() const; + bool MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input); + void SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const; + ::google::protobuf::uint8* SerializeWithCachedSizesToArray(::google::protobuf::uint8* output) const; + int GetCachedSize() const { return _cached_size_; } + private: + void SharedCtor(); + void SharedDtor(); + void SetCachedSize(int size) const; + public: + + ::google::protobuf::Metadata GetMetadata() const; + + // nested types ---------------------------------------------------- + + typedef DescriptorProto_ExtensionRange ExtensionRange; + + // accessors ------------------------------------------------------- + + // optional string name = 1; + inline bool has_name() const; + inline void clear_name(); + static const int kNameFieldNumber = 1; + inline const ::std::string& name() const; + inline void set_name(const ::std::string& value); + inline void set_name(const char* value); + inline void set_name(const char* value, size_t size); + inline ::std::string* mutable_name(); + inline ::std::string* release_name(); + + // repeated .google.protobuf.FieldDescriptorProto field = 2; + inline int field_size() const; + inline void clear_field(); + static const int kFieldFieldNumber = 2; + inline const ::google::protobuf::FieldDescriptorProto& field(int index) const; + inline ::google::protobuf::FieldDescriptorProto* mutable_field(int index); + inline ::google::protobuf::FieldDescriptorProto* add_field(); + inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::FieldDescriptorProto >& + field() const; + inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::FieldDescriptorProto >* + mutable_field(); + + // repeated .google.protobuf.FieldDescriptorProto extension = 6; + inline int extension_size() const; + inline void clear_extension(); + static const int kExtensionFieldNumber = 6; + inline const ::google::protobuf::FieldDescriptorProto& extension(int index) const; + inline ::google::protobuf::FieldDescriptorProto* mutable_extension(int index); + inline ::google::protobuf::FieldDescriptorProto* add_extension(); + inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::FieldDescriptorProto >& + extension() const; + inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::FieldDescriptorProto >* + mutable_extension(); + + // repeated .google.protobuf.DescriptorProto nested_type = 3; + inline int nested_type_size() const; + inline void clear_nested_type(); + static const int kNestedTypeFieldNumber = 3; + inline const ::google::protobuf::DescriptorProto& nested_type(int index) const; + inline ::google::protobuf::DescriptorProto* mutable_nested_type(int index); + inline ::google::protobuf::DescriptorProto* add_nested_type(); + inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::DescriptorProto >& + nested_type() const; + inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::DescriptorProto >* + mutable_nested_type(); + + // repeated .google.protobuf.EnumDescriptorProto enum_type = 4; + inline int enum_type_size() const; + inline void clear_enum_type(); + static const int kEnumTypeFieldNumber = 4; + inline const ::google::protobuf::EnumDescriptorProto& enum_type(int index) const; + inline ::google::protobuf::EnumDescriptorProto* mutable_enum_type(int index); + inline ::google::protobuf::EnumDescriptorProto* add_enum_type(); + inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::EnumDescriptorProto >& + enum_type() const; + inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::EnumDescriptorProto >* + mutable_enum_type(); + + // repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5; + inline int extension_range_size() const; + inline void clear_extension_range(); + static const int kExtensionRangeFieldNumber = 5; + inline const ::google::protobuf::DescriptorProto_ExtensionRange& extension_range(int index) const; + inline ::google::protobuf::DescriptorProto_ExtensionRange* mutable_extension_range(int index); + inline ::google::protobuf::DescriptorProto_ExtensionRange* add_extension_range(); + inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::DescriptorProto_ExtensionRange >& + extension_range() const; + inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::DescriptorProto_ExtensionRange >* + mutable_extension_range(); + + // optional .google.protobuf.MessageOptions options = 7; + inline bool has_options() const; + inline void clear_options(); + static const int kOptionsFieldNumber = 7; + inline const ::google::protobuf::MessageOptions& options() const; + inline ::google::protobuf::MessageOptions* mutable_options(); + inline ::google::protobuf::MessageOptions* release_options(); + + // @@protoc_insertion_point(class_scope:google.protobuf.DescriptorProto) + private: + inline void set_has_name(); + inline void clear_has_name(); + inline void set_has_options(); + inline void clear_has_options(); + + ::google::protobuf::UnknownFieldSet _unknown_fields_; + + ::std::string* name_; + ::google::protobuf::RepeatedPtrField< ::google::protobuf::FieldDescriptorProto > field_; + ::google::protobuf::RepeatedPtrField< ::google::protobuf::FieldDescriptorProto > extension_; + ::google::protobuf::RepeatedPtrField< ::google::protobuf::DescriptorProto > nested_type_; + ::google::protobuf::RepeatedPtrField< ::google::protobuf::EnumDescriptorProto > enum_type_; + ::google::protobuf::RepeatedPtrField< ::google::protobuf::DescriptorProto_ExtensionRange > extension_range_; + ::google::protobuf::MessageOptions* options_; + + mutable int _cached_size_; + ::google::protobuf::uint32 _has_bits_[(7 + 31) / 32]; + + friend void LIBPROTOBUF_EXPORT protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); + friend void protobuf_AssignDesc_google_2fprotobuf_2fdescriptor_2eproto(); + friend void protobuf_ShutdownFile_google_2fprotobuf_2fdescriptor_2eproto(); + + void InitAsDefaultInstance(); + static DescriptorProto* default_instance_; +}; +// ------------------------------------------------------------------- + +class LIBPROTOBUF_EXPORT FieldDescriptorProto : public ::google::protobuf::Message { + public: + FieldDescriptorProto(); + virtual ~FieldDescriptorProto(); + + FieldDescriptorProto(const FieldDescriptorProto& from); + + inline FieldDescriptorProto& operator=(const FieldDescriptorProto& from) { + CopyFrom(from); + return *this; + } + + inline const ::google::protobuf::UnknownFieldSet& unknown_fields() const { + return _unknown_fields_; + } + + inline ::google::protobuf::UnknownFieldSet* mutable_unknown_fields() { + return &_unknown_fields_; + } + + static const ::google::protobuf::Descriptor* descriptor(); + static const FieldDescriptorProto& default_instance(); + + void Swap(FieldDescriptorProto* other); + + // implements Message ---------------------------------------------- + + FieldDescriptorProto* New() const; + void CopyFrom(const ::google::protobuf::Message& from); + void MergeFrom(const ::google::protobuf::Message& from); + void CopyFrom(const FieldDescriptorProto& from); + void MergeFrom(const FieldDescriptorProto& from); + void Clear(); + bool IsInitialized() const; + + int ByteSize() const; + bool MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input); + void SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const; + ::google::protobuf::uint8* SerializeWithCachedSizesToArray(::google::protobuf::uint8* output) const; + int GetCachedSize() const { return _cached_size_; } + private: + void SharedCtor(); + void SharedDtor(); + void SetCachedSize(int size) const; + public: + + ::google::protobuf::Metadata GetMetadata() const; + + // nested types ---------------------------------------------------- + + typedef FieldDescriptorProto_Type Type; + static const Type TYPE_DOUBLE = FieldDescriptorProto_Type_TYPE_DOUBLE; + static const Type TYPE_FLOAT = FieldDescriptorProto_Type_TYPE_FLOAT; + static const Type TYPE_INT64 = FieldDescriptorProto_Type_TYPE_INT64; + static const Type TYPE_UINT64 = FieldDescriptorProto_Type_TYPE_UINT64; + static const Type TYPE_INT32 = FieldDescriptorProto_Type_TYPE_INT32; + static const Type TYPE_FIXED64 = FieldDescriptorProto_Type_TYPE_FIXED64; + static const Type TYPE_FIXED32 = FieldDescriptorProto_Type_TYPE_FIXED32; + static const Type TYPE_BOOL = FieldDescriptorProto_Type_TYPE_BOOL; + static const Type TYPE_STRING = FieldDescriptorProto_Type_TYPE_STRING; + static const Type TYPE_GROUP = FieldDescriptorProto_Type_TYPE_GROUP; + static const Type TYPE_MESSAGE = FieldDescriptorProto_Type_TYPE_MESSAGE; + static const Type TYPE_BYTES = FieldDescriptorProto_Type_TYPE_BYTES; + static const Type TYPE_UINT32 = FieldDescriptorProto_Type_TYPE_UINT32; + static const Type TYPE_ENUM = FieldDescriptorProto_Type_TYPE_ENUM; + static const Type TYPE_SFIXED32 = FieldDescriptorProto_Type_TYPE_SFIXED32; + static const Type TYPE_SFIXED64 = FieldDescriptorProto_Type_TYPE_SFIXED64; + static const Type TYPE_SINT32 = FieldDescriptorProto_Type_TYPE_SINT32; + static const Type TYPE_SINT64 = FieldDescriptorProto_Type_TYPE_SINT64; + static inline bool Type_IsValid(int value) { + return FieldDescriptorProto_Type_IsValid(value); + } + static const Type Type_MIN = + FieldDescriptorProto_Type_Type_MIN; + static const Type Type_MAX = + FieldDescriptorProto_Type_Type_MAX; + static const int Type_ARRAYSIZE = + FieldDescriptorProto_Type_Type_ARRAYSIZE; + static inline const ::google::protobuf::EnumDescriptor* + Type_descriptor() { + return FieldDescriptorProto_Type_descriptor(); + } + static inline const ::std::string& Type_Name(Type value) { + return FieldDescriptorProto_Type_Name(value); + } + static inline bool Type_Parse(const ::std::string& name, + Type* value) { + return FieldDescriptorProto_Type_Parse(name, value); + } + + typedef FieldDescriptorProto_Label Label; + static const Label LABEL_OPTIONAL = FieldDescriptorProto_Label_LABEL_OPTIONAL; + static const Label LABEL_REQUIRED = FieldDescriptorProto_Label_LABEL_REQUIRED; + static const Label LABEL_REPEATED = FieldDescriptorProto_Label_LABEL_REPEATED; + static inline bool Label_IsValid(int value) { + return FieldDescriptorProto_Label_IsValid(value); + } + static const Label Label_MIN = + FieldDescriptorProto_Label_Label_MIN; + static const Label Label_MAX = + FieldDescriptorProto_Label_Label_MAX; + static const int Label_ARRAYSIZE = + FieldDescriptorProto_Label_Label_ARRAYSIZE; + static inline const ::google::protobuf::EnumDescriptor* + Label_descriptor() { + return FieldDescriptorProto_Label_descriptor(); + } + static inline const ::std::string& Label_Name(Label value) { + return FieldDescriptorProto_Label_Name(value); + } + static inline bool Label_Parse(const ::std::string& name, + Label* value) { + return FieldDescriptorProto_Label_Parse(name, value); + } + + // accessors ------------------------------------------------------- + + // optional string name = 1; + inline bool has_name() const; + inline void clear_name(); + static const int kNameFieldNumber = 1; + inline const ::std::string& name() const; + inline void set_name(const ::std::string& value); + inline void set_name(const char* value); + inline void set_name(const char* value, size_t size); + inline ::std::string* mutable_name(); + inline ::std::string* release_name(); + + // optional int32 number = 3; + inline bool has_number() const; + inline void clear_number(); + static const int kNumberFieldNumber = 3; + inline ::google::protobuf::int32 number() const; + inline void set_number(::google::protobuf::int32 value); + + // optional .google.protobuf.FieldDescriptorProto.Label label = 4; + inline bool has_label() const; + inline void clear_label(); + static const int kLabelFieldNumber = 4; + inline ::google::protobuf::FieldDescriptorProto_Label label() const; + inline void set_label(::google::protobuf::FieldDescriptorProto_Label value); + + // optional .google.protobuf.FieldDescriptorProto.Type type = 5; + inline bool has_type() const; + inline void clear_type(); + static const int kTypeFieldNumber = 5; + inline ::google::protobuf::FieldDescriptorProto_Type type() const; + inline void set_type(::google::protobuf::FieldDescriptorProto_Type value); + + // optional string type_name = 6; + inline bool has_type_name() const; + inline void clear_type_name(); + static const int kTypeNameFieldNumber = 6; + inline const ::std::string& type_name() const; + inline void set_type_name(const ::std::string& value); + inline void set_type_name(const char* value); + inline void set_type_name(const char* value, size_t size); + inline ::std::string* mutable_type_name(); + inline ::std::string* release_type_name(); + + // optional string extendee = 2; + inline bool has_extendee() const; + inline void clear_extendee(); + static const int kExtendeeFieldNumber = 2; + inline const ::std::string& extendee() const; + inline void set_extendee(const ::std::string& value); + inline void set_extendee(const char* value); + inline void set_extendee(const char* value, size_t size); + inline ::std::string* mutable_extendee(); + inline ::std::string* release_extendee(); + + // optional string default_value = 7; + inline bool has_default_value() const; + inline void clear_default_value(); + static const int kDefaultValueFieldNumber = 7; + inline const ::std::string& default_value() const; + inline void set_default_value(const ::std::string& value); + inline void set_default_value(const char* value); + inline void set_default_value(const char* value, size_t size); + inline ::std::string* mutable_default_value(); + inline ::std::string* release_default_value(); + + // optional .google.protobuf.FieldOptions options = 8; + inline bool has_options() const; + inline void clear_options(); + static const int kOptionsFieldNumber = 8; + inline const ::google::protobuf::FieldOptions& options() const; + inline ::google::protobuf::FieldOptions* mutable_options(); + inline ::google::protobuf::FieldOptions* release_options(); + + // @@protoc_insertion_point(class_scope:google.protobuf.FieldDescriptorProto) + private: + inline void set_has_name(); + inline void clear_has_name(); + inline void set_has_number(); + inline void clear_has_number(); + inline void set_has_label(); + inline void clear_has_label(); + inline void set_has_type(); + inline void clear_has_type(); + inline void set_has_type_name(); + inline void clear_has_type_name(); + inline void set_has_extendee(); + inline void clear_has_extendee(); + inline void set_has_default_value(); + inline void clear_has_default_value(); + inline void set_has_options(); + inline void clear_has_options(); + + ::google::protobuf::UnknownFieldSet _unknown_fields_; + + ::std::string* name_; + ::google::protobuf::int32 number_; + int label_; + ::std::string* type_name_; + ::std::string* extendee_; + ::std::string* default_value_; + ::google::protobuf::FieldOptions* options_; + int type_; + + mutable int _cached_size_; + ::google::protobuf::uint32 _has_bits_[(8 + 31) / 32]; + + friend void LIBPROTOBUF_EXPORT protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); + friend void protobuf_AssignDesc_google_2fprotobuf_2fdescriptor_2eproto(); + friend void protobuf_ShutdownFile_google_2fprotobuf_2fdescriptor_2eproto(); + + void InitAsDefaultInstance(); + static FieldDescriptorProto* default_instance_; +}; +// ------------------------------------------------------------------- + +class LIBPROTOBUF_EXPORT EnumDescriptorProto : public ::google::protobuf::Message { + public: + EnumDescriptorProto(); + virtual ~EnumDescriptorProto(); + + EnumDescriptorProto(const EnumDescriptorProto& from); + + inline EnumDescriptorProto& operator=(const EnumDescriptorProto& from) { + CopyFrom(from); + return *this; + } + + inline const ::google::protobuf::UnknownFieldSet& unknown_fields() const { + return _unknown_fields_; + } + + inline ::google::protobuf::UnknownFieldSet* mutable_unknown_fields() { + return &_unknown_fields_; + } + + static const ::google::protobuf::Descriptor* descriptor(); + static const EnumDescriptorProto& default_instance(); + + void Swap(EnumDescriptorProto* other); + + // implements Message ---------------------------------------------- + + EnumDescriptorProto* New() const; + void CopyFrom(const ::google::protobuf::Message& from); + void MergeFrom(const ::google::protobuf::Message& from); + void CopyFrom(const EnumDescriptorProto& from); + void MergeFrom(const EnumDescriptorProto& from); + void Clear(); + bool IsInitialized() const; + + int ByteSize() const; + bool MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input); + void SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const; + ::google::protobuf::uint8* SerializeWithCachedSizesToArray(::google::protobuf::uint8* output) const; + int GetCachedSize() const { return _cached_size_; } + private: + void SharedCtor(); + void SharedDtor(); + void SetCachedSize(int size) const; + public: + + ::google::protobuf::Metadata GetMetadata() const; + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // optional string name = 1; + inline bool has_name() const; + inline void clear_name(); + static const int kNameFieldNumber = 1; + inline const ::std::string& name() const; + inline void set_name(const ::std::string& value); + inline void set_name(const char* value); + inline void set_name(const char* value, size_t size); + inline ::std::string* mutable_name(); + inline ::std::string* release_name(); + + // repeated .google.protobuf.EnumValueDescriptorProto value = 2; + inline int value_size() const; + inline void clear_value(); + static const int kValueFieldNumber = 2; + inline const ::google::protobuf::EnumValueDescriptorProto& value(int index) const; + inline ::google::protobuf::EnumValueDescriptorProto* mutable_value(int index); + inline ::google::protobuf::EnumValueDescriptorProto* add_value(); + inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::EnumValueDescriptorProto >& + value() const; + inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::EnumValueDescriptorProto >* + mutable_value(); + + // optional .google.protobuf.EnumOptions options = 3; + inline bool has_options() const; + inline void clear_options(); + static const int kOptionsFieldNumber = 3; + inline const ::google::protobuf::EnumOptions& options() const; + inline ::google::protobuf::EnumOptions* mutable_options(); + inline ::google::protobuf::EnumOptions* release_options(); + + // @@protoc_insertion_point(class_scope:google.protobuf.EnumDescriptorProto) + private: + inline void set_has_name(); + inline void clear_has_name(); + inline void set_has_options(); + inline void clear_has_options(); + + ::google::protobuf::UnknownFieldSet _unknown_fields_; + + ::std::string* name_; + ::google::protobuf::RepeatedPtrField< ::google::protobuf::EnumValueDescriptorProto > value_; + ::google::protobuf::EnumOptions* options_; + + mutable int _cached_size_; + ::google::protobuf::uint32 _has_bits_[(3 + 31) / 32]; + + friend void LIBPROTOBUF_EXPORT protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); + friend void protobuf_AssignDesc_google_2fprotobuf_2fdescriptor_2eproto(); + friend void protobuf_ShutdownFile_google_2fprotobuf_2fdescriptor_2eproto(); + + void InitAsDefaultInstance(); + static EnumDescriptorProto* default_instance_; +}; +// ------------------------------------------------------------------- + +class LIBPROTOBUF_EXPORT EnumValueDescriptorProto : public ::google::protobuf::Message { + public: + EnumValueDescriptorProto(); + virtual ~EnumValueDescriptorProto(); + + EnumValueDescriptorProto(const EnumValueDescriptorProto& from); + + inline EnumValueDescriptorProto& operator=(const EnumValueDescriptorProto& from) { + CopyFrom(from); + return *this; + } + + inline const ::google::protobuf::UnknownFieldSet& unknown_fields() const { + return _unknown_fields_; + } + + inline ::google::protobuf::UnknownFieldSet* mutable_unknown_fields() { + return &_unknown_fields_; + } + + static const ::google::protobuf::Descriptor* descriptor(); + static const EnumValueDescriptorProto& default_instance(); + + void Swap(EnumValueDescriptorProto* other); + + // implements Message ---------------------------------------------- + + EnumValueDescriptorProto* New() const; + void CopyFrom(const ::google::protobuf::Message& from); + void MergeFrom(const ::google::protobuf::Message& from); + void CopyFrom(const EnumValueDescriptorProto& from); + void MergeFrom(const EnumValueDescriptorProto& from); + void Clear(); + bool IsInitialized() const; + + int ByteSize() const; + bool MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input); + void SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const; + ::google::protobuf::uint8* SerializeWithCachedSizesToArray(::google::protobuf::uint8* output) const; + int GetCachedSize() const { return _cached_size_; } + private: + void SharedCtor(); + void SharedDtor(); + void SetCachedSize(int size) const; + public: + + ::google::protobuf::Metadata GetMetadata() const; + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // optional string name = 1; + inline bool has_name() const; + inline void clear_name(); + static const int kNameFieldNumber = 1; + inline const ::std::string& name() const; + inline void set_name(const ::std::string& value); + inline void set_name(const char* value); + inline void set_name(const char* value, size_t size); + inline ::std::string* mutable_name(); + inline ::std::string* release_name(); + + // optional int32 number = 2; + inline bool has_number() const; + inline void clear_number(); + static const int kNumberFieldNumber = 2; + inline ::google::protobuf::int32 number() const; + inline void set_number(::google::protobuf::int32 value); + + // optional .google.protobuf.EnumValueOptions options = 3; + inline bool has_options() const; + inline void clear_options(); + static const int kOptionsFieldNumber = 3; + inline const ::google::protobuf::EnumValueOptions& options() const; + inline ::google::protobuf::EnumValueOptions* mutable_options(); + inline ::google::protobuf::EnumValueOptions* release_options(); + + // @@protoc_insertion_point(class_scope:google.protobuf.EnumValueDescriptorProto) + private: + inline void set_has_name(); + inline void clear_has_name(); + inline void set_has_number(); + inline void clear_has_number(); + inline void set_has_options(); + inline void clear_has_options(); + + ::google::protobuf::UnknownFieldSet _unknown_fields_; + + ::std::string* name_; + ::google::protobuf::EnumValueOptions* options_; + ::google::protobuf::int32 number_; + + mutable int _cached_size_; + ::google::protobuf::uint32 _has_bits_[(3 + 31) / 32]; + + friend void LIBPROTOBUF_EXPORT protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); + friend void protobuf_AssignDesc_google_2fprotobuf_2fdescriptor_2eproto(); + friend void protobuf_ShutdownFile_google_2fprotobuf_2fdescriptor_2eproto(); + + void InitAsDefaultInstance(); + static EnumValueDescriptorProto* default_instance_; +}; +// ------------------------------------------------------------------- + +class LIBPROTOBUF_EXPORT ServiceDescriptorProto : public ::google::protobuf::Message { + public: + ServiceDescriptorProto(); + virtual ~ServiceDescriptorProto(); + + ServiceDescriptorProto(const ServiceDescriptorProto& from); + + inline ServiceDescriptorProto& operator=(const ServiceDescriptorProto& from) { + CopyFrom(from); + return *this; + } + + inline const ::google::protobuf::UnknownFieldSet& unknown_fields() const { + return _unknown_fields_; + } + + inline ::google::protobuf::UnknownFieldSet* mutable_unknown_fields() { + return &_unknown_fields_; + } + + static const ::google::protobuf::Descriptor* descriptor(); + static const ServiceDescriptorProto& default_instance(); + + void Swap(ServiceDescriptorProto* other); + + // implements Message ---------------------------------------------- + + ServiceDescriptorProto* New() const; + void CopyFrom(const ::google::protobuf::Message& from); + void MergeFrom(const ::google::protobuf::Message& from); + void CopyFrom(const ServiceDescriptorProto& from); + void MergeFrom(const ServiceDescriptorProto& from); + void Clear(); + bool IsInitialized() const; + + int ByteSize() const; + bool MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input); + void SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const; + ::google::protobuf::uint8* SerializeWithCachedSizesToArray(::google::protobuf::uint8* output) const; + int GetCachedSize() const { return _cached_size_; } + private: + void SharedCtor(); + void SharedDtor(); + void SetCachedSize(int size) const; + public: + + ::google::protobuf::Metadata GetMetadata() const; + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // optional string name = 1; + inline bool has_name() const; + inline void clear_name(); + static const int kNameFieldNumber = 1; + inline const ::std::string& name() const; + inline void set_name(const ::std::string& value); + inline void set_name(const char* value); + inline void set_name(const char* value, size_t size); + inline ::std::string* mutable_name(); + inline ::std::string* release_name(); + + // repeated .google.protobuf.MethodDescriptorProto method = 2; + inline int method_size() const; + inline void clear_method(); + static const int kMethodFieldNumber = 2; + inline const ::google::protobuf::MethodDescriptorProto& method(int index) const; + inline ::google::protobuf::MethodDescriptorProto* mutable_method(int index); + inline ::google::protobuf::MethodDescriptorProto* add_method(); + inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::MethodDescriptorProto >& + method() const; + inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::MethodDescriptorProto >* + mutable_method(); + + // optional .google.protobuf.ServiceOptions options = 3; + inline bool has_options() const; + inline void clear_options(); + static const int kOptionsFieldNumber = 3; + inline const ::google::protobuf::ServiceOptions& options() const; + inline ::google::protobuf::ServiceOptions* mutable_options(); + inline ::google::protobuf::ServiceOptions* release_options(); + + // @@protoc_insertion_point(class_scope:google.protobuf.ServiceDescriptorProto) + private: + inline void set_has_name(); + inline void clear_has_name(); + inline void set_has_options(); + inline void clear_has_options(); + + ::google::protobuf::UnknownFieldSet _unknown_fields_; + + ::std::string* name_; + ::google::protobuf::RepeatedPtrField< ::google::protobuf::MethodDescriptorProto > method_; + ::google::protobuf::ServiceOptions* options_; + + mutable int _cached_size_; + ::google::protobuf::uint32 _has_bits_[(3 + 31) / 32]; + + friend void LIBPROTOBUF_EXPORT protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); + friend void protobuf_AssignDesc_google_2fprotobuf_2fdescriptor_2eproto(); + friend void protobuf_ShutdownFile_google_2fprotobuf_2fdescriptor_2eproto(); + + void InitAsDefaultInstance(); + static ServiceDescriptorProto* default_instance_; +}; +// ------------------------------------------------------------------- + +class LIBPROTOBUF_EXPORT MethodDescriptorProto : public ::google::protobuf::Message { + public: + MethodDescriptorProto(); + virtual ~MethodDescriptorProto(); + + MethodDescriptorProto(const MethodDescriptorProto& from); + + inline MethodDescriptorProto& operator=(const MethodDescriptorProto& from) { + CopyFrom(from); + return *this; + } + + inline const ::google::protobuf::UnknownFieldSet& unknown_fields() const { + return _unknown_fields_; + } + + inline ::google::protobuf::UnknownFieldSet* mutable_unknown_fields() { + return &_unknown_fields_; + } + + static const ::google::protobuf::Descriptor* descriptor(); + static const MethodDescriptorProto& default_instance(); + + void Swap(MethodDescriptorProto* other); + + // implements Message ---------------------------------------------- + + MethodDescriptorProto* New() const; + void CopyFrom(const ::google::protobuf::Message& from); + void MergeFrom(const ::google::protobuf::Message& from); + void CopyFrom(const MethodDescriptorProto& from); + void MergeFrom(const MethodDescriptorProto& from); + void Clear(); + bool IsInitialized() const; + + int ByteSize() const; + bool MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input); + void SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const; + ::google::protobuf::uint8* SerializeWithCachedSizesToArray(::google::protobuf::uint8* output) const; + int GetCachedSize() const { return _cached_size_; } + private: + void SharedCtor(); + void SharedDtor(); + void SetCachedSize(int size) const; + public: + + ::google::protobuf::Metadata GetMetadata() const; + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // optional string name = 1; + inline bool has_name() const; + inline void clear_name(); + static const int kNameFieldNumber = 1; + inline const ::std::string& name() const; + inline void set_name(const ::std::string& value); + inline void set_name(const char* value); + inline void set_name(const char* value, size_t size); + inline ::std::string* mutable_name(); + inline ::std::string* release_name(); + + // optional string input_type = 2; + inline bool has_input_type() const; + inline void clear_input_type(); + static const int kInputTypeFieldNumber = 2; + inline const ::std::string& input_type() const; + inline void set_input_type(const ::std::string& value); + inline void set_input_type(const char* value); + inline void set_input_type(const char* value, size_t size); + inline ::std::string* mutable_input_type(); + inline ::std::string* release_input_type(); + + // optional string output_type = 3; + inline bool has_output_type() const; + inline void clear_output_type(); + static const int kOutputTypeFieldNumber = 3; + inline const ::std::string& output_type() const; + inline void set_output_type(const ::std::string& value); + inline void set_output_type(const char* value); + inline void set_output_type(const char* value, size_t size); + inline ::std::string* mutable_output_type(); + inline ::std::string* release_output_type(); + + // optional .google.protobuf.MethodOptions options = 4; + inline bool has_options() const; + inline void clear_options(); + static const int kOptionsFieldNumber = 4; + inline const ::google::protobuf::MethodOptions& options() const; + inline ::google::protobuf::MethodOptions* mutable_options(); + inline ::google::protobuf::MethodOptions* release_options(); + + // @@protoc_insertion_point(class_scope:google.protobuf.MethodDescriptorProto) + private: + inline void set_has_name(); + inline void clear_has_name(); + inline void set_has_input_type(); + inline void clear_has_input_type(); + inline void set_has_output_type(); + inline void clear_has_output_type(); + inline void set_has_options(); + inline void clear_has_options(); + + ::google::protobuf::UnknownFieldSet _unknown_fields_; + + ::std::string* name_; + ::std::string* input_type_; + ::std::string* output_type_; + ::google::protobuf::MethodOptions* options_; + + mutable int _cached_size_; + ::google::protobuf::uint32 _has_bits_[(4 + 31) / 32]; + + friend void LIBPROTOBUF_EXPORT protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); + friend void protobuf_AssignDesc_google_2fprotobuf_2fdescriptor_2eproto(); + friend void protobuf_ShutdownFile_google_2fprotobuf_2fdescriptor_2eproto(); + + void InitAsDefaultInstance(); + static MethodDescriptorProto* default_instance_; +}; +// ------------------------------------------------------------------- + +class LIBPROTOBUF_EXPORT FileOptions : public ::google::protobuf::Message { + public: + FileOptions(); + virtual ~FileOptions(); + + FileOptions(const FileOptions& from); + + inline FileOptions& operator=(const FileOptions& from) { + CopyFrom(from); + return *this; + } + + inline const ::google::protobuf::UnknownFieldSet& unknown_fields() const { + return _unknown_fields_; + } + + inline ::google::protobuf::UnknownFieldSet* mutable_unknown_fields() { + return &_unknown_fields_; + } + + static const ::google::protobuf::Descriptor* descriptor(); + static const FileOptions& default_instance(); + + void Swap(FileOptions* other); + + // implements Message ---------------------------------------------- + + FileOptions* New() const; + void CopyFrom(const ::google::protobuf::Message& from); + void MergeFrom(const ::google::protobuf::Message& from); + void CopyFrom(const FileOptions& from); + void MergeFrom(const FileOptions& from); + void Clear(); + bool IsInitialized() const; + + int ByteSize() const; + bool MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input); + void SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const; + ::google::protobuf::uint8* SerializeWithCachedSizesToArray(::google::protobuf::uint8* output) const; + int GetCachedSize() const { return _cached_size_; } + private: + void SharedCtor(); + void SharedDtor(); + void SetCachedSize(int size) const; + public: + + ::google::protobuf::Metadata GetMetadata() const; + + // nested types ---------------------------------------------------- + + typedef FileOptions_OptimizeMode OptimizeMode; + static const OptimizeMode SPEED = FileOptions_OptimizeMode_SPEED; + static const OptimizeMode CODE_SIZE = FileOptions_OptimizeMode_CODE_SIZE; + static const OptimizeMode LITE_RUNTIME = FileOptions_OptimizeMode_LITE_RUNTIME; + static inline bool OptimizeMode_IsValid(int value) { + return FileOptions_OptimizeMode_IsValid(value); + } + static const OptimizeMode OptimizeMode_MIN = + FileOptions_OptimizeMode_OptimizeMode_MIN; + static const OptimizeMode OptimizeMode_MAX = + FileOptions_OptimizeMode_OptimizeMode_MAX; + static const int OptimizeMode_ARRAYSIZE = + FileOptions_OptimizeMode_OptimizeMode_ARRAYSIZE; + static inline const ::google::protobuf::EnumDescriptor* + OptimizeMode_descriptor() { + return FileOptions_OptimizeMode_descriptor(); + } + static inline const ::std::string& OptimizeMode_Name(OptimizeMode value) { + return FileOptions_OptimizeMode_Name(value); + } + static inline bool OptimizeMode_Parse(const ::std::string& name, + OptimizeMode* value) { + return FileOptions_OptimizeMode_Parse(name, value); + } + + // accessors ------------------------------------------------------- + + // optional string java_package = 1; + inline bool has_java_package() const; + inline void clear_java_package(); + static const int kJavaPackageFieldNumber = 1; + inline const ::std::string& java_package() const; + inline void set_java_package(const ::std::string& value); + inline void set_java_package(const char* value); + inline void set_java_package(const char* value, size_t size); + inline ::std::string* mutable_java_package(); + inline ::std::string* release_java_package(); + + // optional string java_outer_classname = 8; + inline bool has_java_outer_classname() const; + inline void clear_java_outer_classname(); + static const int kJavaOuterClassnameFieldNumber = 8; + inline const ::std::string& java_outer_classname() const; + inline void set_java_outer_classname(const ::std::string& value); + inline void set_java_outer_classname(const char* value); + inline void set_java_outer_classname(const char* value, size_t size); + inline ::std::string* mutable_java_outer_classname(); + inline ::std::string* release_java_outer_classname(); + + // optional bool java_multiple_files = 10 [default = false]; + inline bool has_java_multiple_files() const; + inline void clear_java_multiple_files(); + static const int kJavaMultipleFilesFieldNumber = 10; + inline bool java_multiple_files() const; + inline void set_java_multiple_files(bool value); + + // optional bool java_generate_equals_and_hash = 20 [default = false]; + inline bool has_java_generate_equals_and_hash() const; + inline void clear_java_generate_equals_and_hash(); + static const int kJavaGenerateEqualsAndHashFieldNumber = 20; + inline bool java_generate_equals_and_hash() const; + inline void set_java_generate_equals_and_hash(bool value); + + // optional .google.protobuf.FileOptions.OptimizeMode optimize_for = 9 [default = SPEED]; + inline bool has_optimize_for() const; + inline void clear_optimize_for(); + static const int kOptimizeForFieldNumber = 9; + inline ::google::protobuf::FileOptions_OptimizeMode optimize_for() const; + inline void set_optimize_for(::google::protobuf::FileOptions_OptimizeMode value); + + // optional bool cc_generic_services = 16 [default = false]; + inline bool has_cc_generic_services() const; + inline void clear_cc_generic_services(); + static const int kCcGenericServicesFieldNumber = 16; + inline bool cc_generic_services() const; + inline void set_cc_generic_services(bool value); + + // optional bool java_generic_services = 17 [default = false]; + inline bool has_java_generic_services() const; + inline void clear_java_generic_services(); + static const int kJavaGenericServicesFieldNumber = 17; + inline bool java_generic_services() const; + inline void set_java_generic_services(bool value); + + // optional bool py_generic_services = 18 [default = false]; + inline bool has_py_generic_services() const; + inline void clear_py_generic_services(); + static const int kPyGenericServicesFieldNumber = 18; + inline bool py_generic_services() const; + inline void set_py_generic_services(bool value); + + // repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; + inline int uninterpreted_option_size() const; + inline void clear_uninterpreted_option(); + static const int kUninterpretedOptionFieldNumber = 999; + inline const ::google::protobuf::UninterpretedOption& uninterpreted_option(int index) const; + inline ::google::protobuf::UninterpretedOption* mutable_uninterpreted_option(int index); + inline ::google::protobuf::UninterpretedOption* add_uninterpreted_option(); + inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::UninterpretedOption >& + uninterpreted_option() const; + inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::UninterpretedOption >* + mutable_uninterpreted_option(); + + GOOGLE_PROTOBUF_EXTENSION_ACCESSORS(FileOptions) + // @@protoc_insertion_point(class_scope:google.protobuf.FileOptions) + private: + inline void set_has_java_package(); + inline void clear_has_java_package(); + inline void set_has_java_outer_classname(); + inline void clear_has_java_outer_classname(); + inline void set_has_java_multiple_files(); + inline void clear_has_java_multiple_files(); + inline void set_has_java_generate_equals_and_hash(); + inline void clear_has_java_generate_equals_and_hash(); + inline void set_has_optimize_for(); + inline void clear_has_optimize_for(); + inline void set_has_cc_generic_services(); + inline void clear_has_cc_generic_services(); + inline void set_has_java_generic_services(); + inline void clear_has_java_generic_services(); + inline void set_has_py_generic_services(); + inline void clear_has_py_generic_services(); + + ::google::protobuf::internal::ExtensionSet _extensions_; + + ::google::protobuf::UnknownFieldSet _unknown_fields_; + + ::std::string* java_package_; + ::std::string* java_outer_classname_; + int optimize_for_; + bool java_multiple_files_; + bool java_generate_equals_and_hash_; + bool cc_generic_services_; + bool java_generic_services_; + ::google::protobuf::RepeatedPtrField< ::google::protobuf::UninterpretedOption > uninterpreted_option_; + bool py_generic_services_; + + mutable int _cached_size_; + ::google::protobuf::uint32 _has_bits_[(9 + 31) / 32]; + + friend void LIBPROTOBUF_EXPORT protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); + friend void protobuf_AssignDesc_google_2fprotobuf_2fdescriptor_2eproto(); + friend void protobuf_ShutdownFile_google_2fprotobuf_2fdescriptor_2eproto(); + + void InitAsDefaultInstance(); + static FileOptions* default_instance_; +}; +// ------------------------------------------------------------------- + +class LIBPROTOBUF_EXPORT MessageOptions : public ::google::protobuf::Message { + public: + MessageOptions(); + virtual ~MessageOptions(); + + MessageOptions(const MessageOptions& from); + + inline MessageOptions& operator=(const MessageOptions& from) { + CopyFrom(from); + return *this; + } + + inline const ::google::protobuf::UnknownFieldSet& unknown_fields() const { + return _unknown_fields_; + } + + inline ::google::protobuf::UnknownFieldSet* mutable_unknown_fields() { + return &_unknown_fields_; + } + + static const ::google::protobuf::Descriptor* descriptor(); + static const MessageOptions& default_instance(); + + void Swap(MessageOptions* other); + + // implements Message ---------------------------------------------- + + MessageOptions* New() const; + void CopyFrom(const ::google::protobuf::Message& from); + void MergeFrom(const ::google::protobuf::Message& from); + void CopyFrom(const MessageOptions& from); + void MergeFrom(const MessageOptions& from); + void Clear(); + bool IsInitialized() const; + + int ByteSize() const; + bool MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input); + void SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const; + ::google::protobuf::uint8* SerializeWithCachedSizesToArray(::google::protobuf::uint8* output) const; + int GetCachedSize() const { return _cached_size_; } + private: + void SharedCtor(); + void SharedDtor(); + void SetCachedSize(int size) const; + public: + + ::google::protobuf::Metadata GetMetadata() const; + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // optional bool message_set_wire_format = 1 [default = false]; + inline bool has_message_set_wire_format() const; + inline void clear_message_set_wire_format(); + static const int kMessageSetWireFormatFieldNumber = 1; + inline bool message_set_wire_format() const; + inline void set_message_set_wire_format(bool value); + + // optional bool no_standard_descriptor_accessor = 2 [default = false]; + inline bool has_no_standard_descriptor_accessor() const; + inline void clear_no_standard_descriptor_accessor(); + static const int kNoStandardDescriptorAccessorFieldNumber = 2; + inline bool no_standard_descriptor_accessor() const; + inline void set_no_standard_descriptor_accessor(bool value); + + // repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; + inline int uninterpreted_option_size() const; + inline void clear_uninterpreted_option(); + static const int kUninterpretedOptionFieldNumber = 999; + inline const ::google::protobuf::UninterpretedOption& uninterpreted_option(int index) const; + inline ::google::protobuf::UninterpretedOption* mutable_uninterpreted_option(int index); + inline ::google::protobuf::UninterpretedOption* add_uninterpreted_option(); + inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::UninterpretedOption >& + uninterpreted_option() const; + inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::UninterpretedOption >* + mutable_uninterpreted_option(); + + GOOGLE_PROTOBUF_EXTENSION_ACCESSORS(MessageOptions) + // @@protoc_insertion_point(class_scope:google.protobuf.MessageOptions) + private: + inline void set_has_message_set_wire_format(); + inline void clear_has_message_set_wire_format(); + inline void set_has_no_standard_descriptor_accessor(); + inline void clear_has_no_standard_descriptor_accessor(); + + ::google::protobuf::internal::ExtensionSet _extensions_; + + ::google::protobuf::UnknownFieldSet _unknown_fields_; + + ::google::protobuf::RepeatedPtrField< ::google::protobuf::UninterpretedOption > uninterpreted_option_; + bool message_set_wire_format_; + bool no_standard_descriptor_accessor_; + + mutable int _cached_size_; + ::google::protobuf::uint32 _has_bits_[(3 + 31) / 32]; + + friend void LIBPROTOBUF_EXPORT protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); + friend void protobuf_AssignDesc_google_2fprotobuf_2fdescriptor_2eproto(); + friend void protobuf_ShutdownFile_google_2fprotobuf_2fdescriptor_2eproto(); + + void InitAsDefaultInstance(); + static MessageOptions* default_instance_; +}; +// ------------------------------------------------------------------- + +class LIBPROTOBUF_EXPORT FieldOptions : public ::google::protobuf::Message { + public: + FieldOptions(); + virtual ~FieldOptions(); + + FieldOptions(const FieldOptions& from); + + inline FieldOptions& operator=(const FieldOptions& from) { + CopyFrom(from); + return *this; + } + + inline const ::google::protobuf::UnknownFieldSet& unknown_fields() const { + return _unknown_fields_; + } + + inline ::google::protobuf::UnknownFieldSet* mutable_unknown_fields() { + return &_unknown_fields_; + } + + static const ::google::protobuf::Descriptor* descriptor(); + static const FieldOptions& default_instance(); + + void Swap(FieldOptions* other); + + // implements Message ---------------------------------------------- + + FieldOptions* New() const; + void CopyFrom(const ::google::protobuf::Message& from); + void MergeFrom(const ::google::protobuf::Message& from); + void CopyFrom(const FieldOptions& from); + void MergeFrom(const FieldOptions& from); + void Clear(); + bool IsInitialized() const; + + int ByteSize() const; + bool MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input); + void SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const; + ::google::protobuf::uint8* SerializeWithCachedSizesToArray(::google::protobuf::uint8* output) const; + int GetCachedSize() const { return _cached_size_; } + private: + void SharedCtor(); + void SharedDtor(); + void SetCachedSize(int size) const; + public: + + ::google::protobuf::Metadata GetMetadata() const; + + // nested types ---------------------------------------------------- + + typedef FieldOptions_CType CType; + static const CType STRING = FieldOptions_CType_STRING; + static const CType CORD = FieldOptions_CType_CORD; + static const CType STRING_PIECE = FieldOptions_CType_STRING_PIECE; + static inline bool CType_IsValid(int value) { + return FieldOptions_CType_IsValid(value); + } + static const CType CType_MIN = + FieldOptions_CType_CType_MIN; + static const CType CType_MAX = + FieldOptions_CType_CType_MAX; + static const int CType_ARRAYSIZE = + FieldOptions_CType_CType_ARRAYSIZE; + static inline const ::google::protobuf::EnumDescriptor* + CType_descriptor() { + return FieldOptions_CType_descriptor(); + } + static inline const ::std::string& CType_Name(CType value) { + return FieldOptions_CType_Name(value); + } + static inline bool CType_Parse(const ::std::string& name, + CType* value) { + return FieldOptions_CType_Parse(name, value); + } + + // accessors ------------------------------------------------------- + + // optional .google.protobuf.FieldOptions.CType ctype = 1 [default = STRING]; + inline bool has_ctype() const; + inline void clear_ctype(); + static const int kCtypeFieldNumber = 1; + inline ::google::protobuf::FieldOptions_CType ctype() const; + inline void set_ctype(::google::protobuf::FieldOptions_CType value); + + // optional bool packed = 2; + inline bool has_packed() const; + inline void clear_packed(); + static const int kPackedFieldNumber = 2; + inline bool packed() const; + inline void set_packed(bool value); + + // optional bool deprecated = 3 [default = false]; + inline bool has_deprecated() const; + inline void clear_deprecated(); + static const int kDeprecatedFieldNumber = 3; + inline bool deprecated() const; + inline void set_deprecated(bool value); + + // optional string experimental_map_key = 9; + inline bool has_experimental_map_key() const; + inline void clear_experimental_map_key(); + static const int kExperimentalMapKeyFieldNumber = 9; + inline const ::std::string& experimental_map_key() const; + inline void set_experimental_map_key(const ::std::string& value); + inline void set_experimental_map_key(const char* value); + inline void set_experimental_map_key(const char* value, size_t size); + inline ::std::string* mutable_experimental_map_key(); + inline ::std::string* release_experimental_map_key(); + + // repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; + inline int uninterpreted_option_size() const; + inline void clear_uninterpreted_option(); + static const int kUninterpretedOptionFieldNumber = 999; + inline const ::google::protobuf::UninterpretedOption& uninterpreted_option(int index) const; + inline ::google::protobuf::UninterpretedOption* mutable_uninterpreted_option(int index); + inline ::google::protobuf::UninterpretedOption* add_uninterpreted_option(); + inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::UninterpretedOption >& + uninterpreted_option() const; + inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::UninterpretedOption >* + mutable_uninterpreted_option(); + + GOOGLE_PROTOBUF_EXTENSION_ACCESSORS(FieldOptions) + // @@protoc_insertion_point(class_scope:google.protobuf.FieldOptions) + private: + inline void set_has_ctype(); + inline void clear_has_ctype(); + inline void set_has_packed(); + inline void clear_has_packed(); + inline void set_has_deprecated(); + inline void clear_has_deprecated(); + inline void set_has_experimental_map_key(); + inline void clear_has_experimental_map_key(); + + ::google::protobuf::internal::ExtensionSet _extensions_; + + ::google::protobuf::UnknownFieldSet _unknown_fields_; + + int ctype_; + bool packed_; + bool deprecated_; + ::std::string* experimental_map_key_; + ::google::protobuf::RepeatedPtrField< ::google::protobuf::UninterpretedOption > uninterpreted_option_; + + mutable int _cached_size_; + ::google::protobuf::uint32 _has_bits_[(5 + 31) / 32]; + + friend void LIBPROTOBUF_EXPORT protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); + friend void protobuf_AssignDesc_google_2fprotobuf_2fdescriptor_2eproto(); + friend void protobuf_ShutdownFile_google_2fprotobuf_2fdescriptor_2eproto(); + + void InitAsDefaultInstance(); + static FieldOptions* default_instance_; +}; +// ------------------------------------------------------------------- + +class LIBPROTOBUF_EXPORT EnumOptions : public ::google::protobuf::Message { + public: + EnumOptions(); + virtual ~EnumOptions(); + + EnumOptions(const EnumOptions& from); + + inline EnumOptions& operator=(const EnumOptions& from) { + CopyFrom(from); + return *this; + } + + inline const ::google::protobuf::UnknownFieldSet& unknown_fields() const { + return _unknown_fields_; + } + + inline ::google::protobuf::UnknownFieldSet* mutable_unknown_fields() { + return &_unknown_fields_; + } + + static const ::google::protobuf::Descriptor* descriptor(); + static const EnumOptions& default_instance(); + + void Swap(EnumOptions* other); + + // implements Message ---------------------------------------------- + + EnumOptions* New() const; + void CopyFrom(const ::google::protobuf::Message& from); + void MergeFrom(const ::google::protobuf::Message& from); + void CopyFrom(const EnumOptions& from); + void MergeFrom(const EnumOptions& from); + void Clear(); + bool IsInitialized() const; + + int ByteSize() const; + bool MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input); + void SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const; + ::google::protobuf::uint8* SerializeWithCachedSizesToArray(::google::protobuf::uint8* output) const; + int GetCachedSize() const { return _cached_size_; } + private: + void SharedCtor(); + void SharedDtor(); + void SetCachedSize(int size) const; + public: + + ::google::protobuf::Metadata GetMetadata() const; + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; + inline int uninterpreted_option_size() const; + inline void clear_uninterpreted_option(); + static const int kUninterpretedOptionFieldNumber = 999; + inline const ::google::protobuf::UninterpretedOption& uninterpreted_option(int index) const; + inline ::google::protobuf::UninterpretedOption* mutable_uninterpreted_option(int index); + inline ::google::protobuf::UninterpretedOption* add_uninterpreted_option(); + inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::UninterpretedOption >& + uninterpreted_option() const; + inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::UninterpretedOption >* + mutable_uninterpreted_option(); + + GOOGLE_PROTOBUF_EXTENSION_ACCESSORS(EnumOptions) + // @@protoc_insertion_point(class_scope:google.protobuf.EnumOptions) + private: + + ::google::protobuf::internal::ExtensionSet _extensions_; + + ::google::protobuf::UnknownFieldSet _unknown_fields_; + + ::google::protobuf::RepeatedPtrField< ::google::protobuf::UninterpretedOption > uninterpreted_option_; + + mutable int _cached_size_; + ::google::protobuf::uint32 _has_bits_[(1 + 31) / 32]; + + friend void LIBPROTOBUF_EXPORT protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); + friend void protobuf_AssignDesc_google_2fprotobuf_2fdescriptor_2eproto(); + friend void protobuf_ShutdownFile_google_2fprotobuf_2fdescriptor_2eproto(); + + void InitAsDefaultInstance(); + static EnumOptions* default_instance_; +}; +// ------------------------------------------------------------------- + +class LIBPROTOBUF_EXPORT EnumValueOptions : public ::google::protobuf::Message { + public: + EnumValueOptions(); + virtual ~EnumValueOptions(); + + EnumValueOptions(const EnumValueOptions& from); + + inline EnumValueOptions& operator=(const EnumValueOptions& from) { + CopyFrom(from); + return *this; + } + + inline const ::google::protobuf::UnknownFieldSet& unknown_fields() const { + return _unknown_fields_; + } + + inline ::google::protobuf::UnknownFieldSet* mutable_unknown_fields() { + return &_unknown_fields_; + } + + static const ::google::protobuf::Descriptor* descriptor(); + static const EnumValueOptions& default_instance(); + + void Swap(EnumValueOptions* other); + + // implements Message ---------------------------------------------- + + EnumValueOptions* New() const; + void CopyFrom(const ::google::protobuf::Message& from); + void MergeFrom(const ::google::protobuf::Message& from); + void CopyFrom(const EnumValueOptions& from); + void MergeFrom(const EnumValueOptions& from); + void Clear(); + bool IsInitialized() const; + + int ByteSize() const; + bool MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input); + void SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const; + ::google::protobuf::uint8* SerializeWithCachedSizesToArray(::google::protobuf::uint8* output) const; + int GetCachedSize() const { return _cached_size_; } + private: + void SharedCtor(); + void SharedDtor(); + void SetCachedSize(int size) const; + public: + + ::google::protobuf::Metadata GetMetadata() const; + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; + inline int uninterpreted_option_size() const; + inline void clear_uninterpreted_option(); + static const int kUninterpretedOptionFieldNumber = 999; + inline const ::google::protobuf::UninterpretedOption& uninterpreted_option(int index) const; + inline ::google::protobuf::UninterpretedOption* mutable_uninterpreted_option(int index); + inline ::google::protobuf::UninterpretedOption* add_uninterpreted_option(); + inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::UninterpretedOption >& + uninterpreted_option() const; + inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::UninterpretedOption >* + mutable_uninterpreted_option(); + + GOOGLE_PROTOBUF_EXTENSION_ACCESSORS(EnumValueOptions) + // @@protoc_insertion_point(class_scope:google.protobuf.EnumValueOptions) + private: + + ::google::protobuf::internal::ExtensionSet _extensions_; + + ::google::protobuf::UnknownFieldSet _unknown_fields_; + + ::google::protobuf::RepeatedPtrField< ::google::protobuf::UninterpretedOption > uninterpreted_option_; + + mutable int _cached_size_; + ::google::protobuf::uint32 _has_bits_[(1 + 31) / 32]; + + friend void LIBPROTOBUF_EXPORT protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); + friend void protobuf_AssignDesc_google_2fprotobuf_2fdescriptor_2eproto(); + friend void protobuf_ShutdownFile_google_2fprotobuf_2fdescriptor_2eproto(); + + void InitAsDefaultInstance(); + static EnumValueOptions* default_instance_; +}; +// ------------------------------------------------------------------- + +class LIBPROTOBUF_EXPORT ServiceOptions : public ::google::protobuf::Message { + public: + ServiceOptions(); + virtual ~ServiceOptions(); + + ServiceOptions(const ServiceOptions& from); + + inline ServiceOptions& operator=(const ServiceOptions& from) { + CopyFrom(from); + return *this; + } + + inline const ::google::protobuf::UnknownFieldSet& unknown_fields() const { + return _unknown_fields_; + } + + inline ::google::protobuf::UnknownFieldSet* mutable_unknown_fields() { + return &_unknown_fields_; + } + + static const ::google::protobuf::Descriptor* descriptor(); + static const ServiceOptions& default_instance(); + + void Swap(ServiceOptions* other); + + // implements Message ---------------------------------------------- + + ServiceOptions* New() const; + void CopyFrom(const ::google::protobuf::Message& from); + void MergeFrom(const ::google::protobuf::Message& from); + void CopyFrom(const ServiceOptions& from); + void MergeFrom(const ServiceOptions& from); + void Clear(); + bool IsInitialized() const; + + int ByteSize() const; + bool MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input); + void SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const; + ::google::protobuf::uint8* SerializeWithCachedSizesToArray(::google::protobuf::uint8* output) const; + int GetCachedSize() const { return _cached_size_; } + private: + void SharedCtor(); + void SharedDtor(); + void SetCachedSize(int size) const; + public: + + ::google::protobuf::Metadata GetMetadata() const; + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; + inline int uninterpreted_option_size() const; + inline void clear_uninterpreted_option(); + static const int kUninterpretedOptionFieldNumber = 999; + inline const ::google::protobuf::UninterpretedOption& uninterpreted_option(int index) const; + inline ::google::protobuf::UninterpretedOption* mutable_uninterpreted_option(int index); + inline ::google::protobuf::UninterpretedOption* add_uninterpreted_option(); + inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::UninterpretedOption >& + uninterpreted_option() const; + inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::UninterpretedOption >* + mutable_uninterpreted_option(); + + GOOGLE_PROTOBUF_EXTENSION_ACCESSORS(ServiceOptions) + // @@protoc_insertion_point(class_scope:google.protobuf.ServiceOptions) + private: + + ::google::protobuf::internal::ExtensionSet _extensions_; + + ::google::protobuf::UnknownFieldSet _unknown_fields_; + + ::google::protobuf::RepeatedPtrField< ::google::protobuf::UninterpretedOption > uninterpreted_option_; + + mutable int _cached_size_; + ::google::protobuf::uint32 _has_bits_[(1 + 31) / 32]; + + friend void LIBPROTOBUF_EXPORT protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); + friend void protobuf_AssignDesc_google_2fprotobuf_2fdescriptor_2eproto(); + friend void protobuf_ShutdownFile_google_2fprotobuf_2fdescriptor_2eproto(); + + void InitAsDefaultInstance(); + static ServiceOptions* default_instance_; +}; +// ------------------------------------------------------------------- + +class LIBPROTOBUF_EXPORT MethodOptions : public ::google::protobuf::Message { + public: + MethodOptions(); + virtual ~MethodOptions(); + + MethodOptions(const MethodOptions& from); + + inline MethodOptions& operator=(const MethodOptions& from) { + CopyFrom(from); + return *this; + } + + inline const ::google::protobuf::UnknownFieldSet& unknown_fields() const { + return _unknown_fields_; + } + + inline ::google::protobuf::UnknownFieldSet* mutable_unknown_fields() { + return &_unknown_fields_; + } + + static const ::google::protobuf::Descriptor* descriptor(); + static const MethodOptions& default_instance(); + + void Swap(MethodOptions* other); + + // implements Message ---------------------------------------------- + + MethodOptions* New() const; + void CopyFrom(const ::google::protobuf::Message& from); + void MergeFrom(const ::google::protobuf::Message& from); + void CopyFrom(const MethodOptions& from); + void MergeFrom(const MethodOptions& from); + void Clear(); + bool IsInitialized() const; + + int ByteSize() const; + bool MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input); + void SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const; + ::google::protobuf::uint8* SerializeWithCachedSizesToArray(::google::protobuf::uint8* output) const; + int GetCachedSize() const { return _cached_size_; } + private: + void SharedCtor(); + void SharedDtor(); + void SetCachedSize(int size) const; + public: + + ::google::protobuf::Metadata GetMetadata() const; + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; + inline int uninterpreted_option_size() const; + inline void clear_uninterpreted_option(); + static const int kUninterpretedOptionFieldNumber = 999; + inline const ::google::protobuf::UninterpretedOption& uninterpreted_option(int index) const; + inline ::google::protobuf::UninterpretedOption* mutable_uninterpreted_option(int index); + inline ::google::protobuf::UninterpretedOption* add_uninterpreted_option(); + inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::UninterpretedOption >& + uninterpreted_option() const; + inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::UninterpretedOption >* + mutable_uninterpreted_option(); + + GOOGLE_PROTOBUF_EXTENSION_ACCESSORS(MethodOptions) + // @@protoc_insertion_point(class_scope:google.protobuf.MethodOptions) + private: + + ::google::protobuf::internal::ExtensionSet _extensions_; + + ::google::protobuf::UnknownFieldSet _unknown_fields_; + + ::google::protobuf::RepeatedPtrField< ::google::protobuf::UninterpretedOption > uninterpreted_option_; + + mutable int _cached_size_; + ::google::protobuf::uint32 _has_bits_[(1 + 31) / 32]; + + friend void LIBPROTOBUF_EXPORT protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); + friend void protobuf_AssignDesc_google_2fprotobuf_2fdescriptor_2eproto(); + friend void protobuf_ShutdownFile_google_2fprotobuf_2fdescriptor_2eproto(); + + void InitAsDefaultInstance(); + static MethodOptions* default_instance_; +}; +// ------------------------------------------------------------------- + +class LIBPROTOBUF_EXPORT UninterpretedOption_NamePart : public ::google::protobuf::Message { + public: + UninterpretedOption_NamePart(); + virtual ~UninterpretedOption_NamePart(); + + UninterpretedOption_NamePart(const UninterpretedOption_NamePart& from); + + inline UninterpretedOption_NamePart& operator=(const UninterpretedOption_NamePart& from) { + CopyFrom(from); + return *this; + } + + inline const ::google::protobuf::UnknownFieldSet& unknown_fields() const { + return _unknown_fields_; + } + + inline ::google::protobuf::UnknownFieldSet* mutable_unknown_fields() { + return &_unknown_fields_; + } + + static const ::google::protobuf::Descriptor* descriptor(); + static const UninterpretedOption_NamePart& default_instance(); + + void Swap(UninterpretedOption_NamePart* other); + + // implements Message ---------------------------------------------- + + UninterpretedOption_NamePart* New() const; + void CopyFrom(const ::google::protobuf::Message& from); + void MergeFrom(const ::google::protobuf::Message& from); + void CopyFrom(const UninterpretedOption_NamePart& from); + void MergeFrom(const UninterpretedOption_NamePart& from); + void Clear(); + bool IsInitialized() const; + + int ByteSize() const; + bool MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input); + void SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const; + ::google::protobuf::uint8* SerializeWithCachedSizesToArray(::google::protobuf::uint8* output) const; + int GetCachedSize() const { return _cached_size_; } + private: + void SharedCtor(); + void SharedDtor(); + void SetCachedSize(int size) const; + public: + + ::google::protobuf::Metadata GetMetadata() const; + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // required string name_part = 1; + inline bool has_name_part() const; + inline void clear_name_part(); + static const int kNamePartFieldNumber = 1; + inline const ::std::string& name_part() const; + inline void set_name_part(const ::std::string& value); + inline void set_name_part(const char* value); + inline void set_name_part(const char* value, size_t size); + inline ::std::string* mutable_name_part(); + inline ::std::string* release_name_part(); + + // required bool is_extension = 2; + inline bool has_is_extension() const; + inline void clear_is_extension(); + static const int kIsExtensionFieldNumber = 2; + inline bool is_extension() const; + inline void set_is_extension(bool value); + + // @@protoc_insertion_point(class_scope:google.protobuf.UninterpretedOption.NamePart) + private: + inline void set_has_name_part(); + inline void clear_has_name_part(); + inline void set_has_is_extension(); + inline void clear_has_is_extension(); + + ::google::protobuf::UnknownFieldSet _unknown_fields_; + + ::std::string* name_part_; + bool is_extension_; + + mutable int _cached_size_; + ::google::protobuf::uint32 _has_bits_[(2 + 31) / 32]; + + friend void LIBPROTOBUF_EXPORT protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); + friend void protobuf_AssignDesc_google_2fprotobuf_2fdescriptor_2eproto(); + friend void protobuf_ShutdownFile_google_2fprotobuf_2fdescriptor_2eproto(); + + void InitAsDefaultInstance(); + static UninterpretedOption_NamePart* default_instance_; +}; +// ------------------------------------------------------------------- + +class LIBPROTOBUF_EXPORT UninterpretedOption : public ::google::protobuf::Message { + public: + UninterpretedOption(); + virtual ~UninterpretedOption(); + + UninterpretedOption(const UninterpretedOption& from); + + inline UninterpretedOption& operator=(const UninterpretedOption& from) { + CopyFrom(from); + return *this; + } + + inline const ::google::protobuf::UnknownFieldSet& unknown_fields() const { + return _unknown_fields_; + } + + inline ::google::protobuf::UnknownFieldSet* mutable_unknown_fields() { + return &_unknown_fields_; + } + + static const ::google::protobuf::Descriptor* descriptor(); + static const UninterpretedOption& default_instance(); + + void Swap(UninterpretedOption* other); + + // implements Message ---------------------------------------------- + + UninterpretedOption* New() const; + void CopyFrom(const ::google::protobuf::Message& from); + void MergeFrom(const ::google::protobuf::Message& from); + void CopyFrom(const UninterpretedOption& from); + void MergeFrom(const UninterpretedOption& from); + void Clear(); + bool IsInitialized() const; + + int ByteSize() const; + bool MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input); + void SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const; + ::google::protobuf::uint8* SerializeWithCachedSizesToArray(::google::protobuf::uint8* output) const; + int GetCachedSize() const { return _cached_size_; } + private: + void SharedCtor(); + void SharedDtor(); + void SetCachedSize(int size) const; + public: + + ::google::protobuf::Metadata GetMetadata() const; + + // nested types ---------------------------------------------------- + + typedef UninterpretedOption_NamePart NamePart; + + // accessors ------------------------------------------------------- + + // repeated .google.protobuf.UninterpretedOption.NamePart name = 2; + inline int name_size() const; + inline void clear_name(); + static const int kNameFieldNumber = 2; + inline const ::google::protobuf::UninterpretedOption_NamePart& name(int index) const; + inline ::google::protobuf::UninterpretedOption_NamePart* mutable_name(int index); + inline ::google::protobuf::UninterpretedOption_NamePart* add_name(); + inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::UninterpretedOption_NamePart >& + name() const; + inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::UninterpretedOption_NamePart >* + mutable_name(); + + // optional string identifier_value = 3; + inline bool has_identifier_value() const; + inline void clear_identifier_value(); + static const int kIdentifierValueFieldNumber = 3; + inline const ::std::string& identifier_value() const; + inline void set_identifier_value(const ::std::string& value); + inline void set_identifier_value(const char* value); + inline void set_identifier_value(const char* value, size_t size); + inline ::std::string* mutable_identifier_value(); + inline ::std::string* release_identifier_value(); + + // optional uint64 positive_int_value = 4; + inline bool has_positive_int_value() const; + inline void clear_positive_int_value(); + static const int kPositiveIntValueFieldNumber = 4; + inline ::google::protobuf::uint64 positive_int_value() const; + inline void set_positive_int_value(::google::protobuf::uint64 value); + + // optional int64 negative_int_value = 5; + inline bool has_negative_int_value() const; + inline void clear_negative_int_value(); + static const int kNegativeIntValueFieldNumber = 5; + inline ::google::protobuf::int64 negative_int_value() const; + inline void set_negative_int_value(::google::protobuf::int64 value); + + // optional double double_value = 6; + inline bool has_double_value() const; + inline void clear_double_value(); + static const int kDoubleValueFieldNumber = 6; + inline double double_value() const; + inline void set_double_value(double value); + + // optional bytes string_value = 7; + inline bool has_string_value() const; + inline void clear_string_value(); + static const int kStringValueFieldNumber = 7; + inline const ::std::string& string_value() const; + inline void set_string_value(const ::std::string& value); + inline void set_string_value(const char* value); + inline void set_string_value(const void* value, size_t size); + inline ::std::string* mutable_string_value(); + inline ::std::string* release_string_value(); + + // optional string aggregate_value = 8; + inline bool has_aggregate_value() const; + inline void clear_aggregate_value(); + static const int kAggregateValueFieldNumber = 8; + inline const ::std::string& aggregate_value() const; + inline void set_aggregate_value(const ::std::string& value); + inline void set_aggregate_value(const char* value); + inline void set_aggregate_value(const char* value, size_t size); + inline ::std::string* mutable_aggregate_value(); + inline ::std::string* release_aggregate_value(); + + // @@protoc_insertion_point(class_scope:google.protobuf.UninterpretedOption) + private: + inline void set_has_identifier_value(); + inline void clear_has_identifier_value(); + inline void set_has_positive_int_value(); + inline void clear_has_positive_int_value(); + inline void set_has_negative_int_value(); + inline void clear_has_negative_int_value(); + inline void set_has_double_value(); + inline void clear_has_double_value(); + inline void set_has_string_value(); + inline void clear_has_string_value(); + inline void set_has_aggregate_value(); + inline void clear_has_aggregate_value(); + + ::google::protobuf::UnknownFieldSet _unknown_fields_; + + ::google::protobuf::RepeatedPtrField< ::google::protobuf::UninterpretedOption_NamePart > name_; + ::std::string* identifier_value_; + ::google::protobuf::uint64 positive_int_value_; + ::google::protobuf::int64 negative_int_value_; + double double_value_; + ::std::string* string_value_; + ::std::string* aggregate_value_; + + mutable int _cached_size_; + ::google::protobuf::uint32 _has_bits_[(7 + 31) / 32]; + + friend void LIBPROTOBUF_EXPORT protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); + friend void protobuf_AssignDesc_google_2fprotobuf_2fdescriptor_2eproto(); + friend void protobuf_ShutdownFile_google_2fprotobuf_2fdescriptor_2eproto(); + + void InitAsDefaultInstance(); + static UninterpretedOption* default_instance_; +}; +// ------------------------------------------------------------------- + +class LIBPROTOBUF_EXPORT SourceCodeInfo_Location : public ::google::protobuf::Message { + public: + SourceCodeInfo_Location(); + virtual ~SourceCodeInfo_Location(); + + SourceCodeInfo_Location(const SourceCodeInfo_Location& from); + + inline SourceCodeInfo_Location& operator=(const SourceCodeInfo_Location& from) { + CopyFrom(from); + return *this; + } + + inline const ::google::protobuf::UnknownFieldSet& unknown_fields() const { + return _unknown_fields_; + } + + inline ::google::protobuf::UnknownFieldSet* mutable_unknown_fields() { + return &_unknown_fields_; + } + + static const ::google::protobuf::Descriptor* descriptor(); + static const SourceCodeInfo_Location& default_instance(); + + void Swap(SourceCodeInfo_Location* other); + + // implements Message ---------------------------------------------- + + SourceCodeInfo_Location* New() const; + void CopyFrom(const ::google::protobuf::Message& from); + void MergeFrom(const ::google::protobuf::Message& from); + void CopyFrom(const SourceCodeInfo_Location& from); + void MergeFrom(const SourceCodeInfo_Location& from); + void Clear(); + bool IsInitialized() const; + + int ByteSize() const; + bool MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input); + void SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const; + ::google::protobuf::uint8* SerializeWithCachedSizesToArray(::google::protobuf::uint8* output) const; + int GetCachedSize() const { return _cached_size_; } + private: + void SharedCtor(); + void SharedDtor(); + void SetCachedSize(int size) const; + public: + + ::google::protobuf::Metadata GetMetadata() const; + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // repeated int32 path = 1 [packed = true]; + inline int path_size() const; + inline void clear_path(); + static const int kPathFieldNumber = 1; + inline ::google::protobuf::int32 path(int index) const; + inline void set_path(int index, ::google::protobuf::int32 value); + inline void add_path(::google::protobuf::int32 value); + inline const ::google::protobuf::RepeatedField< ::google::protobuf::int32 >& + path() const; + inline ::google::protobuf::RepeatedField< ::google::protobuf::int32 >* + mutable_path(); + + // repeated int32 span = 2 [packed = true]; + inline int span_size() const; + inline void clear_span(); + static const int kSpanFieldNumber = 2; + inline ::google::protobuf::int32 span(int index) const; + inline void set_span(int index, ::google::protobuf::int32 value); + inline void add_span(::google::protobuf::int32 value); + inline const ::google::protobuf::RepeatedField< ::google::protobuf::int32 >& + span() const; + inline ::google::protobuf::RepeatedField< ::google::protobuf::int32 >* + mutable_span(); + + // @@protoc_insertion_point(class_scope:google.protobuf.SourceCodeInfo.Location) + private: + + ::google::protobuf::UnknownFieldSet _unknown_fields_; + + ::google::protobuf::RepeatedField< ::google::protobuf::int32 > path_; + mutable int _path_cached_byte_size_; + ::google::protobuf::RepeatedField< ::google::protobuf::int32 > span_; + mutable int _span_cached_byte_size_; + + mutable int _cached_size_; + ::google::protobuf::uint32 _has_bits_[(2 + 31) / 32]; + + friend void LIBPROTOBUF_EXPORT protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); + friend void protobuf_AssignDesc_google_2fprotobuf_2fdescriptor_2eproto(); + friend void protobuf_ShutdownFile_google_2fprotobuf_2fdescriptor_2eproto(); + + void InitAsDefaultInstance(); + static SourceCodeInfo_Location* default_instance_; +}; +// ------------------------------------------------------------------- + +class LIBPROTOBUF_EXPORT SourceCodeInfo : public ::google::protobuf::Message { + public: + SourceCodeInfo(); + virtual ~SourceCodeInfo(); + + SourceCodeInfo(const SourceCodeInfo& from); + + inline SourceCodeInfo& operator=(const SourceCodeInfo& from) { + CopyFrom(from); + return *this; + } + + inline const ::google::protobuf::UnknownFieldSet& unknown_fields() const { + return _unknown_fields_; + } + + inline ::google::protobuf::UnknownFieldSet* mutable_unknown_fields() { + return &_unknown_fields_; + } + + static const ::google::protobuf::Descriptor* descriptor(); + static const SourceCodeInfo& default_instance(); + + void Swap(SourceCodeInfo* other); + + // implements Message ---------------------------------------------- + + SourceCodeInfo* New() const; + void CopyFrom(const ::google::protobuf::Message& from); + void MergeFrom(const ::google::protobuf::Message& from); + void CopyFrom(const SourceCodeInfo& from); + void MergeFrom(const SourceCodeInfo& from); + void Clear(); + bool IsInitialized() const; + + int ByteSize() const; + bool MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input); + void SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const; + ::google::protobuf::uint8* SerializeWithCachedSizesToArray(::google::protobuf::uint8* output) const; + int GetCachedSize() const { return _cached_size_; } + private: + void SharedCtor(); + void SharedDtor(); + void SetCachedSize(int size) const; + public: + + ::google::protobuf::Metadata GetMetadata() const; + + // nested types ---------------------------------------------------- + + typedef SourceCodeInfo_Location Location; + + // accessors ------------------------------------------------------- + + // repeated .google.protobuf.SourceCodeInfo.Location location = 1; + inline int location_size() const; + inline void clear_location(); + static const int kLocationFieldNumber = 1; + inline const ::google::protobuf::SourceCodeInfo_Location& location(int index) const; + inline ::google::protobuf::SourceCodeInfo_Location* mutable_location(int index); + inline ::google::protobuf::SourceCodeInfo_Location* add_location(); + inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::SourceCodeInfo_Location >& + location() const; + inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::SourceCodeInfo_Location >* + mutable_location(); + + // @@protoc_insertion_point(class_scope:google.protobuf.SourceCodeInfo) + private: + + ::google::protobuf::UnknownFieldSet _unknown_fields_; + + ::google::protobuf::RepeatedPtrField< ::google::protobuf::SourceCodeInfo_Location > location_; + + mutable int _cached_size_; + ::google::protobuf::uint32 _has_bits_[(1 + 31) / 32]; + + friend void LIBPROTOBUF_EXPORT protobuf_AddDesc_google_2fprotobuf_2fdescriptor_2eproto(); + friend void protobuf_AssignDesc_google_2fprotobuf_2fdescriptor_2eproto(); + friend void protobuf_ShutdownFile_google_2fprotobuf_2fdescriptor_2eproto(); + + void InitAsDefaultInstance(); + static SourceCodeInfo* default_instance_; +}; +// =================================================================== + + +// =================================================================== + +// FileDescriptorSet + +// repeated .google.protobuf.FileDescriptorProto file = 1; +inline int FileDescriptorSet::file_size() const { + return file_.size(); +} +inline void FileDescriptorSet::clear_file() { + file_.Clear(); +} +inline const ::google::protobuf::FileDescriptorProto& FileDescriptorSet::file(int index) const { + return file_.Get(index); +} +inline ::google::protobuf::FileDescriptorProto* FileDescriptorSet::mutable_file(int index) { + return file_.Mutable(index); +} +inline ::google::protobuf::FileDescriptorProto* FileDescriptorSet::add_file() { + return file_.Add(); +} +inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::FileDescriptorProto >& +FileDescriptorSet::file() const { + return file_; +} +inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::FileDescriptorProto >* +FileDescriptorSet::mutable_file() { + return &file_; +} + +// ------------------------------------------------------------------- + +// FileDescriptorProto + +// optional string name = 1; +inline bool FileDescriptorProto::has_name() const { + return (_has_bits_[0] & 0x00000001u) != 0; +} +inline void FileDescriptorProto::set_has_name() { + _has_bits_[0] |= 0x00000001u; +} +inline void FileDescriptorProto::clear_has_name() { + _has_bits_[0] &= ~0x00000001u; +} +inline void FileDescriptorProto::clear_name() { + if (name_ != &::google::protobuf::internal::kEmptyString) { + name_->clear(); + } + clear_has_name(); +} +inline const ::std::string& FileDescriptorProto::name() const { + return *name_; +} +inline void FileDescriptorProto::set_name(const ::std::string& value) { + set_has_name(); + if (name_ == &::google::protobuf::internal::kEmptyString) { + name_ = new ::std::string; + } + name_->assign(value); +} +inline void FileDescriptorProto::set_name(const char* value) { + set_has_name(); + if (name_ == &::google::protobuf::internal::kEmptyString) { + name_ = new ::std::string; + } + name_->assign(value); +} +inline void FileDescriptorProto::set_name(const char* value, size_t size) { + set_has_name(); + if (name_ == &::google::protobuf::internal::kEmptyString) { + name_ = new ::std::string; + } + name_->assign(reinterpret_cast(value), size); +} +inline ::std::string* FileDescriptorProto::mutable_name() { + set_has_name(); + if (name_ == &::google::protobuf::internal::kEmptyString) { + name_ = new ::std::string; + } + return name_; +} +inline ::std::string* FileDescriptorProto::release_name() { + clear_has_name(); + if (name_ == &::google::protobuf::internal::kEmptyString) { + return NULL; + } else { + ::std::string* temp = name_; + name_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + return temp; + } +} + +// optional string package = 2; +inline bool FileDescriptorProto::has_package() const { + return (_has_bits_[0] & 0x00000002u) != 0; +} +inline void FileDescriptorProto::set_has_package() { + _has_bits_[0] |= 0x00000002u; +} +inline void FileDescriptorProto::clear_has_package() { + _has_bits_[0] &= ~0x00000002u; +} +inline void FileDescriptorProto::clear_package() { + if (package_ != &::google::protobuf::internal::kEmptyString) { + package_->clear(); + } + clear_has_package(); +} +inline const ::std::string& FileDescriptorProto::package() const { + return *package_; +} +inline void FileDescriptorProto::set_package(const ::std::string& value) { + set_has_package(); + if (package_ == &::google::protobuf::internal::kEmptyString) { + package_ = new ::std::string; + } + package_->assign(value); +} +inline void FileDescriptorProto::set_package(const char* value) { + set_has_package(); + if (package_ == &::google::protobuf::internal::kEmptyString) { + package_ = new ::std::string; + } + package_->assign(value); +} +inline void FileDescriptorProto::set_package(const char* value, size_t size) { + set_has_package(); + if (package_ == &::google::protobuf::internal::kEmptyString) { + package_ = new ::std::string; + } + package_->assign(reinterpret_cast(value), size); +} +inline ::std::string* FileDescriptorProto::mutable_package() { + set_has_package(); + if (package_ == &::google::protobuf::internal::kEmptyString) { + package_ = new ::std::string; + } + return package_; +} +inline ::std::string* FileDescriptorProto::release_package() { + clear_has_package(); + if (package_ == &::google::protobuf::internal::kEmptyString) { + return NULL; + } else { + ::std::string* temp = package_; + package_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + return temp; + } +} + +// repeated string dependency = 3; +inline int FileDescriptorProto::dependency_size() const { + return dependency_.size(); +} +inline void FileDescriptorProto::clear_dependency() { + dependency_.Clear(); +} +inline const ::std::string& FileDescriptorProto::dependency(int index) const { + return dependency_.Get(index); +} +inline ::std::string* FileDescriptorProto::mutable_dependency(int index) { + return dependency_.Mutable(index); +} +inline void FileDescriptorProto::set_dependency(int index, const ::std::string& value) { + dependency_.Mutable(index)->assign(value); +} +inline void FileDescriptorProto::set_dependency(int index, const char* value) { + dependency_.Mutable(index)->assign(value); +} +inline void FileDescriptorProto::set_dependency(int index, const char* value, size_t size) { + dependency_.Mutable(index)->assign( + reinterpret_cast(value), size); +} +inline ::std::string* FileDescriptorProto::add_dependency() { + return dependency_.Add(); +} +inline void FileDescriptorProto::add_dependency(const ::std::string& value) { + dependency_.Add()->assign(value); +} +inline void FileDescriptorProto::add_dependency(const char* value) { + dependency_.Add()->assign(value); +} +inline void FileDescriptorProto::add_dependency(const char* value, size_t size) { + dependency_.Add()->assign(reinterpret_cast(value), size); +} +inline const ::google::protobuf::RepeatedPtrField< ::std::string>& +FileDescriptorProto::dependency() const { + return dependency_; +} +inline ::google::protobuf::RepeatedPtrField< ::std::string>* +FileDescriptorProto::mutable_dependency() { + return &dependency_; +} + +// repeated .google.protobuf.DescriptorProto message_type = 4; +inline int FileDescriptorProto::message_type_size() const { + return message_type_.size(); +} +inline void FileDescriptorProto::clear_message_type() { + message_type_.Clear(); +} +inline const ::google::protobuf::DescriptorProto& FileDescriptorProto::message_type(int index) const { + return message_type_.Get(index); +} +inline ::google::protobuf::DescriptorProto* FileDescriptorProto::mutable_message_type(int index) { + return message_type_.Mutable(index); +} +inline ::google::protobuf::DescriptorProto* FileDescriptorProto::add_message_type() { + return message_type_.Add(); +} +inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::DescriptorProto >& +FileDescriptorProto::message_type() const { + return message_type_; +} +inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::DescriptorProto >* +FileDescriptorProto::mutable_message_type() { + return &message_type_; +} + +// repeated .google.protobuf.EnumDescriptorProto enum_type = 5; +inline int FileDescriptorProto::enum_type_size() const { + return enum_type_.size(); +} +inline void FileDescriptorProto::clear_enum_type() { + enum_type_.Clear(); +} +inline const ::google::protobuf::EnumDescriptorProto& FileDescriptorProto::enum_type(int index) const { + return enum_type_.Get(index); +} +inline ::google::protobuf::EnumDescriptorProto* FileDescriptorProto::mutable_enum_type(int index) { + return enum_type_.Mutable(index); +} +inline ::google::protobuf::EnumDescriptorProto* FileDescriptorProto::add_enum_type() { + return enum_type_.Add(); +} +inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::EnumDescriptorProto >& +FileDescriptorProto::enum_type() const { + return enum_type_; +} +inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::EnumDescriptorProto >* +FileDescriptorProto::mutable_enum_type() { + return &enum_type_; +} + +// repeated .google.protobuf.ServiceDescriptorProto service = 6; +inline int FileDescriptorProto::service_size() const { + return service_.size(); +} +inline void FileDescriptorProto::clear_service() { + service_.Clear(); +} +inline const ::google::protobuf::ServiceDescriptorProto& FileDescriptorProto::service(int index) const { + return service_.Get(index); +} +inline ::google::protobuf::ServiceDescriptorProto* FileDescriptorProto::mutable_service(int index) { + return service_.Mutable(index); +} +inline ::google::protobuf::ServiceDescriptorProto* FileDescriptorProto::add_service() { + return service_.Add(); +} +inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::ServiceDescriptorProto >& +FileDescriptorProto::service() const { + return service_; +} +inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::ServiceDescriptorProto >* +FileDescriptorProto::mutable_service() { + return &service_; +} + +// repeated .google.protobuf.FieldDescriptorProto extension = 7; +inline int FileDescriptorProto::extension_size() const { + return extension_.size(); +} +inline void FileDescriptorProto::clear_extension() { + extension_.Clear(); +} +inline const ::google::protobuf::FieldDescriptorProto& FileDescriptorProto::extension(int index) const { + return extension_.Get(index); +} +inline ::google::protobuf::FieldDescriptorProto* FileDescriptorProto::mutable_extension(int index) { + return extension_.Mutable(index); +} +inline ::google::protobuf::FieldDescriptorProto* FileDescriptorProto::add_extension() { + return extension_.Add(); +} +inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::FieldDescriptorProto >& +FileDescriptorProto::extension() const { + return extension_; +} +inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::FieldDescriptorProto >* +FileDescriptorProto::mutable_extension() { + return &extension_; +} + +// optional .google.protobuf.FileOptions options = 8; +inline bool FileDescriptorProto::has_options() const { + return (_has_bits_[0] & 0x00000080u) != 0; +} +inline void FileDescriptorProto::set_has_options() { + _has_bits_[0] |= 0x00000080u; +} +inline void FileDescriptorProto::clear_has_options() { + _has_bits_[0] &= ~0x00000080u; +} +inline void FileDescriptorProto::clear_options() { + if (options_ != NULL) options_->::google::protobuf::FileOptions::Clear(); + clear_has_options(); +} +inline const ::google::protobuf::FileOptions& FileDescriptorProto::options() const { + return options_ != NULL ? *options_ : *default_instance_->options_; +} +inline ::google::protobuf::FileOptions* FileDescriptorProto::mutable_options() { + set_has_options(); + if (options_ == NULL) options_ = new ::google::protobuf::FileOptions; + return options_; +} +inline ::google::protobuf::FileOptions* FileDescriptorProto::release_options() { + clear_has_options(); + ::google::protobuf::FileOptions* temp = options_; + options_ = NULL; + return temp; +} + +// optional .google.protobuf.SourceCodeInfo source_code_info = 9; +inline bool FileDescriptorProto::has_source_code_info() const { + return (_has_bits_[0] & 0x00000100u) != 0; +} +inline void FileDescriptorProto::set_has_source_code_info() { + _has_bits_[0] |= 0x00000100u; +} +inline void FileDescriptorProto::clear_has_source_code_info() { + _has_bits_[0] &= ~0x00000100u; +} +inline void FileDescriptorProto::clear_source_code_info() { + if (source_code_info_ != NULL) source_code_info_->::google::protobuf::SourceCodeInfo::Clear(); + clear_has_source_code_info(); +} +inline const ::google::protobuf::SourceCodeInfo& FileDescriptorProto::source_code_info() const { + return source_code_info_ != NULL ? *source_code_info_ : *default_instance_->source_code_info_; +} +inline ::google::protobuf::SourceCodeInfo* FileDescriptorProto::mutable_source_code_info() { + set_has_source_code_info(); + if (source_code_info_ == NULL) source_code_info_ = new ::google::protobuf::SourceCodeInfo; + return source_code_info_; +} +inline ::google::protobuf::SourceCodeInfo* FileDescriptorProto::release_source_code_info() { + clear_has_source_code_info(); + ::google::protobuf::SourceCodeInfo* temp = source_code_info_; + source_code_info_ = NULL; + return temp; +} + +// ------------------------------------------------------------------- + +// DescriptorProto_ExtensionRange + +// optional int32 start = 1; +inline bool DescriptorProto_ExtensionRange::has_start() const { + return (_has_bits_[0] & 0x00000001u) != 0; +} +inline void DescriptorProto_ExtensionRange::set_has_start() { + _has_bits_[0] |= 0x00000001u; +} +inline void DescriptorProto_ExtensionRange::clear_has_start() { + _has_bits_[0] &= ~0x00000001u; +} +inline void DescriptorProto_ExtensionRange::clear_start() { + start_ = 0; + clear_has_start(); +} +inline ::google::protobuf::int32 DescriptorProto_ExtensionRange::start() const { + return start_; +} +inline void DescriptorProto_ExtensionRange::set_start(::google::protobuf::int32 value) { + set_has_start(); + start_ = value; +} + +// optional int32 end = 2; +inline bool DescriptorProto_ExtensionRange::has_end() const { + return (_has_bits_[0] & 0x00000002u) != 0; +} +inline void DescriptorProto_ExtensionRange::set_has_end() { + _has_bits_[0] |= 0x00000002u; +} +inline void DescriptorProto_ExtensionRange::clear_has_end() { + _has_bits_[0] &= ~0x00000002u; +} +inline void DescriptorProto_ExtensionRange::clear_end() { + end_ = 0; + clear_has_end(); +} +inline ::google::protobuf::int32 DescriptorProto_ExtensionRange::end() const { + return end_; +} +inline void DescriptorProto_ExtensionRange::set_end(::google::protobuf::int32 value) { + set_has_end(); + end_ = value; +} + +// ------------------------------------------------------------------- + +// DescriptorProto + +// optional string name = 1; +inline bool DescriptorProto::has_name() const { + return (_has_bits_[0] & 0x00000001u) != 0; +} +inline void DescriptorProto::set_has_name() { + _has_bits_[0] |= 0x00000001u; +} +inline void DescriptorProto::clear_has_name() { + _has_bits_[0] &= ~0x00000001u; +} +inline void DescriptorProto::clear_name() { + if (name_ != &::google::protobuf::internal::kEmptyString) { + name_->clear(); + } + clear_has_name(); +} +inline const ::std::string& DescriptorProto::name() const { + return *name_; +} +inline void DescriptorProto::set_name(const ::std::string& value) { + set_has_name(); + if (name_ == &::google::protobuf::internal::kEmptyString) { + name_ = new ::std::string; + } + name_->assign(value); +} +inline void DescriptorProto::set_name(const char* value) { + set_has_name(); + if (name_ == &::google::protobuf::internal::kEmptyString) { + name_ = new ::std::string; + } + name_->assign(value); +} +inline void DescriptorProto::set_name(const char* value, size_t size) { + set_has_name(); + if (name_ == &::google::protobuf::internal::kEmptyString) { + name_ = new ::std::string; + } + name_->assign(reinterpret_cast(value), size); +} +inline ::std::string* DescriptorProto::mutable_name() { + set_has_name(); + if (name_ == &::google::protobuf::internal::kEmptyString) { + name_ = new ::std::string; + } + return name_; +} +inline ::std::string* DescriptorProto::release_name() { + clear_has_name(); + if (name_ == &::google::protobuf::internal::kEmptyString) { + return NULL; + } else { + ::std::string* temp = name_; + name_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + return temp; + } +} + +// repeated .google.protobuf.FieldDescriptorProto field = 2; +inline int DescriptorProto::field_size() const { + return field_.size(); +} +inline void DescriptorProto::clear_field() { + field_.Clear(); +} +inline const ::google::protobuf::FieldDescriptorProto& DescriptorProto::field(int index) const { + return field_.Get(index); +} +inline ::google::protobuf::FieldDescriptorProto* DescriptorProto::mutable_field(int index) { + return field_.Mutable(index); +} +inline ::google::protobuf::FieldDescriptorProto* DescriptorProto::add_field() { + return field_.Add(); +} +inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::FieldDescriptorProto >& +DescriptorProto::field() const { + return field_; +} +inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::FieldDescriptorProto >* +DescriptorProto::mutable_field() { + return &field_; +} + +// repeated .google.protobuf.FieldDescriptorProto extension = 6; +inline int DescriptorProto::extension_size() const { + return extension_.size(); +} +inline void DescriptorProto::clear_extension() { + extension_.Clear(); +} +inline const ::google::protobuf::FieldDescriptorProto& DescriptorProto::extension(int index) const { + return extension_.Get(index); +} +inline ::google::protobuf::FieldDescriptorProto* DescriptorProto::mutable_extension(int index) { + return extension_.Mutable(index); +} +inline ::google::protobuf::FieldDescriptorProto* DescriptorProto::add_extension() { + return extension_.Add(); +} +inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::FieldDescriptorProto >& +DescriptorProto::extension() const { + return extension_; +} +inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::FieldDescriptorProto >* +DescriptorProto::mutable_extension() { + return &extension_; +} + +// repeated .google.protobuf.DescriptorProto nested_type = 3; +inline int DescriptorProto::nested_type_size() const { + return nested_type_.size(); +} +inline void DescriptorProto::clear_nested_type() { + nested_type_.Clear(); +} +inline const ::google::protobuf::DescriptorProto& DescriptorProto::nested_type(int index) const { + return nested_type_.Get(index); +} +inline ::google::protobuf::DescriptorProto* DescriptorProto::mutable_nested_type(int index) { + return nested_type_.Mutable(index); +} +inline ::google::protobuf::DescriptorProto* DescriptorProto::add_nested_type() { + return nested_type_.Add(); +} +inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::DescriptorProto >& +DescriptorProto::nested_type() const { + return nested_type_; +} +inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::DescriptorProto >* +DescriptorProto::mutable_nested_type() { + return &nested_type_; +} + +// repeated .google.protobuf.EnumDescriptorProto enum_type = 4; +inline int DescriptorProto::enum_type_size() const { + return enum_type_.size(); +} +inline void DescriptorProto::clear_enum_type() { + enum_type_.Clear(); +} +inline const ::google::protobuf::EnumDescriptorProto& DescriptorProto::enum_type(int index) const { + return enum_type_.Get(index); +} +inline ::google::protobuf::EnumDescriptorProto* DescriptorProto::mutable_enum_type(int index) { + return enum_type_.Mutable(index); +} +inline ::google::protobuf::EnumDescriptorProto* DescriptorProto::add_enum_type() { + return enum_type_.Add(); +} +inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::EnumDescriptorProto >& +DescriptorProto::enum_type() const { + return enum_type_; +} +inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::EnumDescriptorProto >* +DescriptorProto::mutable_enum_type() { + return &enum_type_; +} + +// repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5; +inline int DescriptorProto::extension_range_size() const { + return extension_range_.size(); +} +inline void DescriptorProto::clear_extension_range() { + extension_range_.Clear(); +} +inline const ::google::protobuf::DescriptorProto_ExtensionRange& DescriptorProto::extension_range(int index) const { + return extension_range_.Get(index); +} +inline ::google::protobuf::DescriptorProto_ExtensionRange* DescriptorProto::mutable_extension_range(int index) { + return extension_range_.Mutable(index); +} +inline ::google::protobuf::DescriptorProto_ExtensionRange* DescriptorProto::add_extension_range() { + return extension_range_.Add(); +} +inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::DescriptorProto_ExtensionRange >& +DescriptorProto::extension_range() const { + return extension_range_; +} +inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::DescriptorProto_ExtensionRange >* +DescriptorProto::mutable_extension_range() { + return &extension_range_; +} + +// optional .google.protobuf.MessageOptions options = 7; +inline bool DescriptorProto::has_options() const { + return (_has_bits_[0] & 0x00000040u) != 0; +} +inline void DescriptorProto::set_has_options() { + _has_bits_[0] |= 0x00000040u; +} +inline void DescriptorProto::clear_has_options() { + _has_bits_[0] &= ~0x00000040u; +} +inline void DescriptorProto::clear_options() { + if (options_ != NULL) options_->::google::protobuf::MessageOptions::Clear(); + clear_has_options(); +} +inline const ::google::protobuf::MessageOptions& DescriptorProto::options() const { + return options_ != NULL ? *options_ : *default_instance_->options_; +} +inline ::google::protobuf::MessageOptions* DescriptorProto::mutable_options() { + set_has_options(); + if (options_ == NULL) options_ = new ::google::protobuf::MessageOptions; + return options_; +} +inline ::google::protobuf::MessageOptions* DescriptorProto::release_options() { + clear_has_options(); + ::google::protobuf::MessageOptions* temp = options_; + options_ = NULL; + return temp; +} + +// ------------------------------------------------------------------- + +// FieldDescriptorProto + +// optional string name = 1; +inline bool FieldDescriptorProto::has_name() const { + return (_has_bits_[0] & 0x00000001u) != 0; +} +inline void FieldDescriptorProto::set_has_name() { + _has_bits_[0] |= 0x00000001u; +} +inline void FieldDescriptorProto::clear_has_name() { + _has_bits_[0] &= ~0x00000001u; +} +inline void FieldDescriptorProto::clear_name() { + if (name_ != &::google::protobuf::internal::kEmptyString) { + name_->clear(); + } + clear_has_name(); +} +inline const ::std::string& FieldDescriptorProto::name() const { + return *name_; +} +inline void FieldDescriptorProto::set_name(const ::std::string& value) { + set_has_name(); + if (name_ == &::google::protobuf::internal::kEmptyString) { + name_ = new ::std::string; + } + name_->assign(value); +} +inline void FieldDescriptorProto::set_name(const char* value) { + set_has_name(); + if (name_ == &::google::protobuf::internal::kEmptyString) { + name_ = new ::std::string; + } + name_->assign(value); +} +inline void FieldDescriptorProto::set_name(const char* value, size_t size) { + set_has_name(); + if (name_ == &::google::protobuf::internal::kEmptyString) { + name_ = new ::std::string; + } + name_->assign(reinterpret_cast(value), size); +} +inline ::std::string* FieldDescriptorProto::mutable_name() { + set_has_name(); + if (name_ == &::google::protobuf::internal::kEmptyString) { + name_ = new ::std::string; + } + return name_; +} +inline ::std::string* FieldDescriptorProto::release_name() { + clear_has_name(); + if (name_ == &::google::protobuf::internal::kEmptyString) { + return NULL; + } else { + ::std::string* temp = name_; + name_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + return temp; + } +} + +// optional int32 number = 3; +inline bool FieldDescriptorProto::has_number() const { + return (_has_bits_[0] & 0x00000002u) != 0; +} +inline void FieldDescriptorProto::set_has_number() { + _has_bits_[0] |= 0x00000002u; +} +inline void FieldDescriptorProto::clear_has_number() { + _has_bits_[0] &= ~0x00000002u; +} +inline void FieldDescriptorProto::clear_number() { + number_ = 0; + clear_has_number(); +} +inline ::google::protobuf::int32 FieldDescriptorProto::number() const { + return number_; +} +inline void FieldDescriptorProto::set_number(::google::protobuf::int32 value) { + set_has_number(); + number_ = value; +} + +// optional .google.protobuf.FieldDescriptorProto.Label label = 4; +inline bool FieldDescriptorProto::has_label() const { + return (_has_bits_[0] & 0x00000004u) != 0; +} +inline void FieldDescriptorProto::set_has_label() { + _has_bits_[0] |= 0x00000004u; +} +inline void FieldDescriptorProto::clear_has_label() { + _has_bits_[0] &= ~0x00000004u; +} +inline void FieldDescriptorProto::clear_label() { + label_ = 1; + clear_has_label(); +} +inline ::google::protobuf::FieldDescriptorProto_Label FieldDescriptorProto::label() const { + return static_cast< ::google::protobuf::FieldDescriptorProto_Label >(label_); +} +inline void FieldDescriptorProto::set_label(::google::protobuf::FieldDescriptorProto_Label value) { + GOOGLE_DCHECK(::google::protobuf::FieldDescriptorProto_Label_IsValid(value)); + set_has_label(); + label_ = value; +} + +// optional .google.protobuf.FieldDescriptorProto.Type type = 5; +inline bool FieldDescriptorProto::has_type() const { + return (_has_bits_[0] & 0x00000008u) != 0; +} +inline void FieldDescriptorProto::set_has_type() { + _has_bits_[0] |= 0x00000008u; +} +inline void FieldDescriptorProto::clear_has_type() { + _has_bits_[0] &= ~0x00000008u; +} +inline void FieldDescriptorProto::clear_type() { + type_ = 1; + clear_has_type(); +} +inline ::google::protobuf::FieldDescriptorProto_Type FieldDescriptorProto::type() const { + return static_cast< ::google::protobuf::FieldDescriptorProto_Type >(type_); +} +inline void FieldDescriptorProto::set_type(::google::protobuf::FieldDescriptorProto_Type value) { + GOOGLE_DCHECK(::google::protobuf::FieldDescriptorProto_Type_IsValid(value)); + set_has_type(); + type_ = value; +} + +// optional string type_name = 6; +inline bool FieldDescriptorProto::has_type_name() const { + return (_has_bits_[0] & 0x00000010u) != 0; +} +inline void FieldDescriptorProto::set_has_type_name() { + _has_bits_[0] |= 0x00000010u; +} +inline void FieldDescriptorProto::clear_has_type_name() { + _has_bits_[0] &= ~0x00000010u; +} +inline void FieldDescriptorProto::clear_type_name() { + if (type_name_ != &::google::protobuf::internal::kEmptyString) { + type_name_->clear(); + } + clear_has_type_name(); +} +inline const ::std::string& FieldDescriptorProto::type_name() const { + return *type_name_; +} +inline void FieldDescriptorProto::set_type_name(const ::std::string& value) { + set_has_type_name(); + if (type_name_ == &::google::protobuf::internal::kEmptyString) { + type_name_ = new ::std::string; + } + type_name_->assign(value); +} +inline void FieldDescriptorProto::set_type_name(const char* value) { + set_has_type_name(); + if (type_name_ == &::google::protobuf::internal::kEmptyString) { + type_name_ = new ::std::string; + } + type_name_->assign(value); +} +inline void FieldDescriptorProto::set_type_name(const char* value, size_t size) { + set_has_type_name(); + if (type_name_ == &::google::protobuf::internal::kEmptyString) { + type_name_ = new ::std::string; + } + type_name_->assign(reinterpret_cast(value), size); +} +inline ::std::string* FieldDescriptorProto::mutable_type_name() { + set_has_type_name(); + if (type_name_ == &::google::protobuf::internal::kEmptyString) { + type_name_ = new ::std::string; + } + return type_name_; +} +inline ::std::string* FieldDescriptorProto::release_type_name() { + clear_has_type_name(); + if (type_name_ == &::google::protobuf::internal::kEmptyString) { + return NULL; + } else { + ::std::string* temp = type_name_; + type_name_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + return temp; + } +} + +// optional string extendee = 2; +inline bool FieldDescriptorProto::has_extendee() const { + return (_has_bits_[0] & 0x00000020u) != 0; +} +inline void FieldDescriptorProto::set_has_extendee() { + _has_bits_[0] |= 0x00000020u; +} +inline void FieldDescriptorProto::clear_has_extendee() { + _has_bits_[0] &= ~0x00000020u; +} +inline void FieldDescriptorProto::clear_extendee() { + if (extendee_ != &::google::protobuf::internal::kEmptyString) { + extendee_->clear(); + } + clear_has_extendee(); +} +inline const ::std::string& FieldDescriptorProto::extendee() const { + return *extendee_; +} +inline void FieldDescriptorProto::set_extendee(const ::std::string& value) { + set_has_extendee(); + if (extendee_ == &::google::protobuf::internal::kEmptyString) { + extendee_ = new ::std::string; + } + extendee_->assign(value); +} +inline void FieldDescriptorProto::set_extendee(const char* value) { + set_has_extendee(); + if (extendee_ == &::google::protobuf::internal::kEmptyString) { + extendee_ = new ::std::string; + } + extendee_->assign(value); +} +inline void FieldDescriptorProto::set_extendee(const char* value, size_t size) { + set_has_extendee(); + if (extendee_ == &::google::protobuf::internal::kEmptyString) { + extendee_ = new ::std::string; + } + extendee_->assign(reinterpret_cast(value), size); +} +inline ::std::string* FieldDescriptorProto::mutable_extendee() { + set_has_extendee(); + if (extendee_ == &::google::protobuf::internal::kEmptyString) { + extendee_ = new ::std::string; + } + return extendee_; +} +inline ::std::string* FieldDescriptorProto::release_extendee() { + clear_has_extendee(); + if (extendee_ == &::google::protobuf::internal::kEmptyString) { + return NULL; + } else { + ::std::string* temp = extendee_; + extendee_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + return temp; + } +} + +// optional string default_value = 7; +inline bool FieldDescriptorProto::has_default_value() const { + return (_has_bits_[0] & 0x00000040u) != 0; +} +inline void FieldDescriptorProto::set_has_default_value() { + _has_bits_[0] |= 0x00000040u; +} +inline void FieldDescriptorProto::clear_has_default_value() { + _has_bits_[0] &= ~0x00000040u; +} +inline void FieldDescriptorProto::clear_default_value() { + if (default_value_ != &::google::protobuf::internal::kEmptyString) { + default_value_->clear(); + } + clear_has_default_value(); +} +inline const ::std::string& FieldDescriptorProto::default_value() const { + return *default_value_; +} +inline void FieldDescriptorProto::set_default_value(const ::std::string& value) { + set_has_default_value(); + if (default_value_ == &::google::protobuf::internal::kEmptyString) { + default_value_ = new ::std::string; + } + default_value_->assign(value); +} +inline void FieldDescriptorProto::set_default_value(const char* value) { + set_has_default_value(); + if (default_value_ == &::google::protobuf::internal::kEmptyString) { + default_value_ = new ::std::string; + } + default_value_->assign(value); +} +inline void FieldDescriptorProto::set_default_value(const char* value, size_t size) { + set_has_default_value(); + if (default_value_ == &::google::protobuf::internal::kEmptyString) { + default_value_ = new ::std::string; + } + default_value_->assign(reinterpret_cast(value), size); +} +inline ::std::string* FieldDescriptorProto::mutable_default_value() { + set_has_default_value(); + if (default_value_ == &::google::protobuf::internal::kEmptyString) { + default_value_ = new ::std::string; + } + return default_value_; +} +inline ::std::string* FieldDescriptorProto::release_default_value() { + clear_has_default_value(); + if (default_value_ == &::google::protobuf::internal::kEmptyString) { + return NULL; + } else { + ::std::string* temp = default_value_; + default_value_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + return temp; + } +} + +// optional .google.protobuf.FieldOptions options = 8; +inline bool FieldDescriptorProto::has_options() const { + return (_has_bits_[0] & 0x00000080u) != 0; +} +inline void FieldDescriptorProto::set_has_options() { + _has_bits_[0] |= 0x00000080u; +} +inline void FieldDescriptorProto::clear_has_options() { + _has_bits_[0] &= ~0x00000080u; +} +inline void FieldDescriptorProto::clear_options() { + if (options_ != NULL) options_->::google::protobuf::FieldOptions::Clear(); + clear_has_options(); +} +inline const ::google::protobuf::FieldOptions& FieldDescriptorProto::options() const { + return options_ != NULL ? *options_ : *default_instance_->options_; +} +inline ::google::protobuf::FieldOptions* FieldDescriptorProto::mutable_options() { + set_has_options(); + if (options_ == NULL) options_ = new ::google::protobuf::FieldOptions; + return options_; +} +inline ::google::protobuf::FieldOptions* FieldDescriptorProto::release_options() { + clear_has_options(); + ::google::protobuf::FieldOptions* temp = options_; + options_ = NULL; + return temp; +} + +// ------------------------------------------------------------------- + +// EnumDescriptorProto + +// optional string name = 1; +inline bool EnumDescriptorProto::has_name() const { + return (_has_bits_[0] & 0x00000001u) != 0; +} +inline void EnumDescriptorProto::set_has_name() { + _has_bits_[0] |= 0x00000001u; +} +inline void EnumDescriptorProto::clear_has_name() { + _has_bits_[0] &= ~0x00000001u; +} +inline void EnumDescriptorProto::clear_name() { + if (name_ != &::google::protobuf::internal::kEmptyString) { + name_->clear(); + } + clear_has_name(); +} +inline const ::std::string& EnumDescriptorProto::name() const { + return *name_; +} +inline void EnumDescriptorProto::set_name(const ::std::string& value) { + set_has_name(); + if (name_ == &::google::protobuf::internal::kEmptyString) { + name_ = new ::std::string; + } + name_->assign(value); +} +inline void EnumDescriptorProto::set_name(const char* value) { + set_has_name(); + if (name_ == &::google::protobuf::internal::kEmptyString) { + name_ = new ::std::string; + } + name_->assign(value); +} +inline void EnumDescriptorProto::set_name(const char* value, size_t size) { + set_has_name(); + if (name_ == &::google::protobuf::internal::kEmptyString) { + name_ = new ::std::string; + } + name_->assign(reinterpret_cast(value), size); +} +inline ::std::string* EnumDescriptorProto::mutable_name() { + set_has_name(); + if (name_ == &::google::protobuf::internal::kEmptyString) { + name_ = new ::std::string; + } + return name_; +} +inline ::std::string* EnumDescriptorProto::release_name() { + clear_has_name(); + if (name_ == &::google::protobuf::internal::kEmptyString) { + return NULL; + } else { + ::std::string* temp = name_; + name_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + return temp; + } +} + +// repeated .google.protobuf.EnumValueDescriptorProto value = 2; +inline int EnumDescriptorProto::value_size() const { + return value_.size(); +} +inline void EnumDescriptorProto::clear_value() { + value_.Clear(); +} +inline const ::google::protobuf::EnumValueDescriptorProto& EnumDescriptorProto::value(int index) const { + return value_.Get(index); +} +inline ::google::protobuf::EnumValueDescriptorProto* EnumDescriptorProto::mutable_value(int index) { + return value_.Mutable(index); +} +inline ::google::protobuf::EnumValueDescriptorProto* EnumDescriptorProto::add_value() { + return value_.Add(); +} +inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::EnumValueDescriptorProto >& +EnumDescriptorProto::value() const { + return value_; +} +inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::EnumValueDescriptorProto >* +EnumDescriptorProto::mutable_value() { + return &value_; +} + +// optional .google.protobuf.EnumOptions options = 3; +inline bool EnumDescriptorProto::has_options() const { + return (_has_bits_[0] & 0x00000004u) != 0; +} +inline void EnumDescriptorProto::set_has_options() { + _has_bits_[0] |= 0x00000004u; +} +inline void EnumDescriptorProto::clear_has_options() { + _has_bits_[0] &= ~0x00000004u; +} +inline void EnumDescriptorProto::clear_options() { + if (options_ != NULL) options_->::google::protobuf::EnumOptions::Clear(); + clear_has_options(); +} +inline const ::google::protobuf::EnumOptions& EnumDescriptorProto::options() const { + return options_ != NULL ? *options_ : *default_instance_->options_; +} +inline ::google::protobuf::EnumOptions* EnumDescriptorProto::mutable_options() { + set_has_options(); + if (options_ == NULL) options_ = new ::google::protobuf::EnumOptions; + return options_; +} +inline ::google::protobuf::EnumOptions* EnumDescriptorProto::release_options() { + clear_has_options(); + ::google::protobuf::EnumOptions* temp = options_; + options_ = NULL; + return temp; +} + +// ------------------------------------------------------------------- + +// EnumValueDescriptorProto + +// optional string name = 1; +inline bool EnumValueDescriptorProto::has_name() const { + return (_has_bits_[0] & 0x00000001u) != 0; +} +inline void EnumValueDescriptorProto::set_has_name() { + _has_bits_[0] |= 0x00000001u; +} +inline void EnumValueDescriptorProto::clear_has_name() { + _has_bits_[0] &= ~0x00000001u; +} +inline void EnumValueDescriptorProto::clear_name() { + if (name_ != &::google::protobuf::internal::kEmptyString) { + name_->clear(); + } + clear_has_name(); +} +inline const ::std::string& EnumValueDescriptorProto::name() const { + return *name_; +} +inline void EnumValueDescriptorProto::set_name(const ::std::string& value) { + set_has_name(); + if (name_ == &::google::protobuf::internal::kEmptyString) { + name_ = new ::std::string; + } + name_->assign(value); +} +inline void EnumValueDescriptorProto::set_name(const char* value) { + set_has_name(); + if (name_ == &::google::protobuf::internal::kEmptyString) { + name_ = new ::std::string; + } + name_->assign(value); +} +inline void EnumValueDescriptorProto::set_name(const char* value, size_t size) { + set_has_name(); + if (name_ == &::google::protobuf::internal::kEmptyString) { + name_ = new ::std::string; + } + name_->assign(reinterpret_cast(value), size); +} +inline ::std::string* EnumValueDescriptorProto::mutable_name() { + set_has_name(); + if (name_ == &::google::protobuf::internal::kEmptyString) { + name_ = new ::std::string; + } + return name_; +} +inline ::std::string* EnumValueDescriptorProto::release_name() { + clear_has_name(); + if (name_ == &::google::protobuf::internal::kEmptyString) { + return NULL; + } else { + ::std::string* temp = name_; + name_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + return temp; + } +} + +// optional int32 number = 2; +inline bool EnumValueDescriptorProto::has_number() const { + return (_has_bits_[0] & 0x00000002u) != 0; +} +inline void EnumValueDescriptorProto::set_has_number() { + _has_bits_[0] |= 0x00000002u; +} +inline void EnumValueDescriptorProto::clear_has_number() { + _has_bits_[0] &= ~0x00000002u; +} +inline void EnumValueDescriptorProto::clear_number() { + number_ = 0; + clear_has_number(); +} +inline ::google::protobuf::int32 EnumValueDescriptorProto::number() const { + return number_; +} +inline void EnumValueDescriptorProto::set_number(::google::protobuf::int32 value) { + set_has_number(); + number_ = value; +} + +// optional .google.protobuf.EnumValueOptions options = 3; +inline bool EnumValueDescriptorProto::has_options() const { + return (_has_bits_[0] & 0x00000004u) != 0; +} +inline void EnumValueDescriptorProto::set_has_options() { + _has_bits_[0] |= 0x00000004u; +} +inline void EnumValueDescriptorProto::clear_has_options() { + _has_bits_[0] &= ~0x00000004u; +} +inline void EnumValueDescriptorProto::clear_options() { + if (options_ != NULL) options_->::google::protobuf::EnumValueOptions::Clear(); + clear_has_options(); +} +inline const ::google::protobuf::EnumValueOptions& EnumValueDescriptorProto::options() const { + return options_ != NULL ? *options_ : *default_instance_->options_; +} +inline ::google::protobuf::EnumValueOptions* EnumValueDescriptorProto::mutable_options() { + set_has_options(); + if (options_ == NULL) options_ = new ::google::protobuf::EnumValueOptions; + return options_; +} +inline ::google::protobuf::EnumValueOptions* EnumValueDescriptorProto::release_options() { + clear_has_options(); + ::google::protobuf::EnumValueOptions* temp = options_; + options_ = NULL; + return temp; +} + +// ------------------------------------------------------------------- + +// ServiceDescriptorProto + +// optional string name = 1; +inline bool ServiceDescriptorProto::has_name() const { + return (_has_bits_[0] & 0x00000001u) != 0; +} +inline void ServiceDescriptorProto::set_has_name() { + _has_bits_[0] |= 0x00000001u; +} +inline void ServiceDescriptorProto::clear_has_name() { + _has_bits_[0] &= ~0x00000001u; +} +inline void ServiceDescriptorProto::clear_name() { + if (name_ != &::google::protobuf::internal::kEmptyString) { + name_->clear(); + } + clear_has_name(); +} +inline const ::std::string& ServiceDescriptorProto::name() const { + return *name_; +} +inline void ServiceDescriptorProto::set_name(const ::std::string& value) { + set_has_name(); + if (name_ == &::google::protobuf::internal::kEmptyString) { + name_ = new ::std::string; + } + name_->assign(value); +} +inline void ServiceDescriptorProto::set_name(const char* value) { + set_has_name(); + if (name_ == &::google::protobuf::internal::kEmptyString) { + name_ = new ::std::string; + } + name_->assign(value); +} +inline void ServiceDescriptorProto::set_name(const char* value, size_t size) { + set_has_name(); + if (name_ == &::google::protobuf::internal::kEmptyString) { + name_ = new ::std::string; + } + name_->assign(reinterpret_cast(value), size); +} +inline ::std::string* ServiceDescriptorProto::mutable_name() { + set_has_name(); + if (name_ == &::google::protobuf::internal::kEmptyString) { + name_ = new ::std::string; + } + return name_; +} +inline ::std::string* ServiceDescriptorProto::release_name() { + clear_has_name(); + if (name_ == &::google::protobuf::internal::kEmptyString) { + return NULL; + } else { + ::std::string* temp = name_; + name_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + return temp; + } +} + +// repeated .google.protobuf.MethodDescriptorProto method = 2; +inline int ServiceDescriptorProto::method_size() const { + return method_.size(); +} +inline void ServiceDescriptorProto::clear_method() { + method_.Clear(); +} +inline const ::google::protobuf::MethodDescriptorProto& ServiceDescriptorProto::method(int index) const { + return method_.Get(index); +} +inline ::google::protobuf::MethodDescriptorProto* ServiceDescriptorProto::mutable_method(int index) { + return method_.Mutable(index); +} +inline ::google::protobuf::MethodDescriptorProto* ServiceDescriptorProto::add_method() { + return method_.Add(); +} +inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::MethodDescriptorProto >& +ServiceDescriptorProto::method() const { + return method_; +} +inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::MethodDescriptorProto >* +ServiceDescriptorProto::mutable_method() { + return &method_; +} + +// optional .google.protobuf.ServiceOptions options = 3; +inline bool ServiceDescriptorProto::has_options() const { + return (_has_bits_[0] & 0x00000004u) != 0; +} +inline void ServiceDescriptorProto::set_has_options() { + _has_bits_[0] |= 0x00000004u; +} +inline void ServiceDescriptorProto::clear_has_options() { + _has_bits_[0] &= ~0x00000004u; +} +inline void ServiceDescriptorProto::clear_options() { + if (options_ != NULL) options_->::google::protobuf::ServiceOptions::Clear(); + clear_has_options(); +} +inline const ::google::protobuf::ServiceOptions& ServiceDescriptorProto::options() const { + return options_ != NULL ? *options_ : *default_instance_->options_; +} +inline ::google::protobuf::ServiceOptions* ServiceDescriptorProto::mutable_options() { + set_has_options(); + if (options_ == NULL) options_ = new ::google::protobuf::ServiceOptions; + return options_; +} +inline ::google::protobuf::ServiceOptions* ServiceDescriptorProto::release_options() { + clear_has_options(); + ::google::protobuf::ServiceOptions* temp = options_; + options_ = NULL; + return temp; +} + +// ------------------------------------------------------------------- + +// MethodDescriptorProto + +// optional string name = 1; +inline bool MethodDescriptorProto::has_name() const { + return (_has_bits_[0] & 0x00000001u) != 0; +} +inline void MethodDescriptorProto::set_has_name() { + _has_bits_[0] |= 0x00000001u; +} +inline void MethodDescriptorProto::clear_has_name() { + _has_bits_[0] &= ~0x00000001u; +} +inline void MethodDescriptorProto::clear_name() { + if (name_ != &::google::protobuf::internal::kEmptyString) { + name_->clear(); + } + clear_has_name(); +} +inline const ::std::string& MethodDescriptorProto::name() const { + return *name_; +} +inline void MethodDescriptorProto::set_name(const ::std::string& value) { + set_has_name(); + if (name_ == &::google::protobuf::internal::kEmptyString) { + name_ = new ::std::string; + } + name_->assign(value); +} +inline void MethodDescriptorProto::set_name(const char* value) { + set_has_name(); + if (name_ == &::google::protobuf::internal::kEmptyString) { + name_ = new ::std::string; + } + name_->assign(value); +} +inline void MethodDescriptorProto::set_name(const char* value, size_t size) { + set_has_name(); + if (name_ == &::google::protobuf::internal::kEmptyString) { + name_ = new ::std::string; + } + name_->assign(reinterpret_cast(value), size); +} +inline ::std::string* MethodDescriptorProto::mutable_name() { + set_has_name(); + if (name_ == &::google::protobuf::internal::kEmptyString) { + name_ = new ::std::string; + } + return name_; +} +inline ::std::string* MethodDescriptorProto::release_name() { + clear_has_name(); + if (name_ == &::google::protobuf::internal::kEmptyString) { + return NULL; + } else { + ::std::string* temp = name_; + name_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + return temp; + } +} + +// optional string input_type = 2; +inline bool MethodDescriptorProto::has_input_type() const { + return (_has_bits_[0] & 0x00000002u) != 0; +} +inline void MethodDescriptorProto::set_has_input_type() { + _has_bits_[0] |= 0x00000002u; +} +inline void MethodDescriptorProto::clear_has_input_type() { + _has_bits_[0] &= ~0x00000002u; +} +inline void MethodDescriptorProto::clear_input_type() { + if (input_type_ != &::google::protobuf::internal::kEmptyString) { + input_type_->clear(); + } + clear_has_input_type(); +} +inline const ::std::string& MethodDescriptorProto::input_type() const { + return *input_type_; +} +inline void MethodDescriptorProto::set_input_type(const ::std::string& value) { + set_has_input_type(); + if (input_type_ == &::google::protobuf::internal::kEmptyString) { + input_type_ = new ::std::string; + } + input_type_->assign(value); +} +inline void MethodDescriptorProto::set_input_type(const char* value) { + set_has_input_type(); + if (input_type_ == &::google::protobuf::internal::kEmptyString) { + input_type_ = new ::std::string; + } + input_type_->assign(value); +} +inline void MethodDescriptorProto::set_input_type(const char* value, size_t size) { + set_has_input_type(); + if (input_type_ == &::google::protobuf::internal::kEmptyString) { + input_type_ = new ::std::string; + } + input_type_->assign(reinterpret_cast(value), size); +} +inline ::std::string* MethodDescriptorProto::mutable_input_type() { + set_has_input_type(); + if (input_type_ == &::google::protobuf::internal::kEmptyString) { + input_type_ = new ::std::string; + } + return input_type_; +} +inline ::std::string* MethodDescriptorProto::release_input_type() { + clear_has_input_type(); + if (input_type_ == &::google::protobuf::internal::kEmptyString) { + return NULL; + } else { + ::std::string* temp = input_type_; + input_type_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + return temp; + } +} + +// optional string output_type = 3; +inline bool MethodDescriptorProto::has_output_type() const { + return (_has_bits_[0] & 0x00000004u) != 0; +} +inline void MethodDescriptorProto::set_has_output_type() { + _has_bits_[0] |= 0x00000004u; +} +inline void MethodDescriptorProto::clear_has_output_type() { + _has_bits_[0] &= ~0x00000004u; +} +inline void MethodDescriptorProto::clear_output_type() { + if (output_type_ != &::google::protobuf::internal::kEmptyString) { + output_type_->clear(); + } + clear_has_output_type(); +} +inline const ::std::string& MethodDescriptorProto::output_type() const { + return *output_type_; +} +inline void MethodDescriptorProto::set_output_type(const ::std::string& value) { + set_has_output_type(); + if (output_type_ == &::google::protobuf::internal::kEmptyString) { + output_type_ = new ::std::string; + } + output_type_->assign(value); +} +inline void MethodDescriptorProto::set_output_type(const char* value) { + set_has_output_type(); + if (output_type_ == &::google::protobuf::internal::kEmptyString) { + output_type_ = new ::std::string; + } + output_type_->assign(value); +} +inline void MethodDescriptorProto::set_output_type(const char* value, size_t size) { + set_has_output_type(); + if (output_type_ == &::google::protobuf::internal::kEmptyString) { + output_type_ = new ::std::string; + } + output_type_->assign(reinterpret_cast(value), size); +} +inline ::std::string* MethodDescriptorProto::mutable_output_type() { + set_has_output_type(); + if (output_type_ == &::google::protobuf::internal::kEmptyString) { + output_type_ = new ::std::string; + } + return output_type_; +} +inline ::std::string* MethodDescriptorProto::release_output_type() { + clear_has_output_type(); + if (output_type_ == &::google::protobuf::internal::kEmptyString) { + return NULL; + } else { + ::std::string* temp = output_type_; + output_type_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + return temp; + } +} + +// optional .google.protobuf.MethodOptions options = 4; +inline bool MethodDescriptorProto::has_options() const { + return (_has_bits_[0] & 0x00000008u) != 0; +} +inline void MethodDescriptorProto::set_has_options() { + _has_bits_[0] |= 0x00000008u; +} +inline void MethodDescriptorProto::clear_has_options() { + _has_bits_[0] &= ~0x00000008u; +} +inline void MethodDescriptorProto::clear_options() { + if (options_ != NULL) options_->::google::protobuf::MethodOptions::Clear(); + clear_has_options(); +} +inline const ::google::protobuf::MethodOptions& MethodDescriptorProto::options() const { + return options_ != NULL ? *options_ : *default_instance_->options_; +} +inline ::google::protobuf::MethodOptions* MethodDescriptorProto::mutable_options() { + set_has_options(); + if (options_ == NULL) options_ = new ::google::protobuf::MethodOptions; + return options_; +} +inline ::google::protobuf::MethodOptions* MethodDescriptorProto::release_options() { + clear_has_options(); + ::google::protobuf::MethodOptions* temp = options_; + options_ = NULL; + return temp; +} + +// ------------------------------------------------------------------- + +// FileOptions + +// optional string java_package = 1; +inline bool FileOptions::has_java_package() const { + return (_has_bits_[0] & 0x00000001u) != 0; +} +inline void FileOptions::set_has_java_package() { + _has_bits_[0] |= 0x00000001u; +} +inline void FileOptions::clear_has_java_package() { + _has_bits_[0] &= ~0x00000001u; +} +inline void FileOptions::clear_java_package() { + if (java_package_ != &::google::protobuf::internal::kEmptyString) { + java_package_->clear(); + } + clear_has_java_package(); +} +inline const ::std::string& FileOptions::java_package() const { + return *java_package_; +} +inline void FileOptions::set_java_package(const ::std::string& value) { + set_has_java_package(); + if (java_package_ == &::google::protobuf::internal::kEmptyString) { + java_package_ = new ::std::string; + } + java_package_->assign(value); +} +inline void FileOptions::set_java_package(const char* value) { + set_has_java_package(); + if (java_package_ == &::google::protobuf::internal::kEmptyString) { + java_package_ = new ::std::string; + } + java_package_->assign(value); +} +inline void FileOptions::set_java_package(const char* value, size_t size) { + set_has_java_package(); + if (java_package_ == &::google::protobuf::internal::kEmptyString) { + java_package_ = new ::std::string; + } + java_package_->assign(reinterpret_cast(value), size); +} +inline ::std::string* FileOptions::mutable_java_package() { + set_has_java_package(); + if (java_package_ == &::google::protobuf::internal::kEmptyString) { + java_package_ = new ::std::string; + } + return java_package_; +} +inline ::std::string* FileOptions::release_java_package() { + clear_has_java_package(); + if (java_package_ == &::google::protobuf::internal::kEmptyString) { + return NULL; + } else { + ::std::string* temp = java_package_; + java_package_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + return temp; + } +} + +// optional string java_outer_classname = 8; +inline bool FileOptions::has_java_outer_classname() const { + return (_has_bits_[0] & 0x00000002u) != 0; +} +inline void FileOptions::set_has_java_outer_classname() { + _has_bits_[0] |= 0x00000002u; +} +inline void FileOptions::clear_has_java_outer_classname() { + _has_bits_[0] &= ~0x00000002u; +} +inline void FileOptions::clear_java_outer_classname() { + if (java_outer_classname_ != &::google::protobuf::internal::kEmptyString) { + java_outer_classname_->clear(); + } + clear_has_java_outer_classname(); +} +inline const ::std::string& FileOptions::java_outer_classname() const { + return *java_outer_classname_; +} +inline void FileOptions::set_java_outer_classname(const ::std::string& value) { + set_has_java_outer_classname(); + if (java_outer_classname_ == &::google::protobuf::internal::kEmptyString) { + java_outer_classname_ = new ::std::string; + } + java_outer_classname_->assign(value); +} +inline void FileOptions::set_java_outer_classname(const char* value) { + set_has_java_outer_classname(); + if (java_outer_classname_ == &::google::protobuf::internal::kEmptyString) { + java_outer_classname_ = new ::std::string; + } + java_outer_classname_->assign(value); +} +inline void FileOptions::set_java_outer_classname(const char* value, size_t size) { + set_has_java_outer_classname(); + if (java_outer_classname_ == &::google::protobuf::internal::kEmptyString) { + java_outer_classname_ = new ::std::string; + } + java_outer_classname_->assign(reinterpret_cast(value), size); +} +inline ::std::string* FileOptions::mutable_java_outer_classname() { + set_has_java_outer_classname(); + if (java_outer_classname_ == &::google::protobuf::internal::kEmptyString) { + java_outer_classname_ = new ::std::string; + } + return java_outer_classname_; +} +inline ::std::string* FileOptions::release_java_outer_classname() { + clear_has_java_outer_classname(); + if (java_outer_classname_ == &::google::protobuf::internal::kEmptyString) { + return NULL; + } else { + ::std::string* temp = java_outer_classname_; + java_outer_classname_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + return temp; + } +} + +// optional bool java_multiple_files = 10 [default = false]; +inline bool FileOptions::has_java_multiple_files() const { + return (_has_bits_[0] & 0x00000004u) != 0; +} +inline void FileOptions::set_has_java_multiple_files() { + _has_bits_[0] |= 0x00000004u; +} +inline void FileOptions::clear_has_java_multiple_files() { + _has_bits_[0] &= ~0x00000004u; +} +inline void FileOptions::clear_java_multiple_files() { + java_multiple_files_ = false; + clear_has_java_multiple_files(); +} +inline bool FileOptions::java_multiple_files() const { + return java_multiple_files_; +} +inline void FileOptions::set_java_multiple_files(bool value) { + set_has_java_multiple_files(); + java_multiple_files_ = value; +} + +// optional bool java_generate_equals_and_hash = 20 [default = false]; +inline bool FileOptions::has_java_generate_equals_and_hash() const { + return (_has_bits_[0] & 0x00000008u) != 0; +} +inline void FileOptions::set_has_java_generate_equals_and_hash() { + _has_bits_[0] |= 0x00000008u; +} +inline void FileOptions::clear_has_java_generate_equals_and_hash() { + _has_bits_[0] &= ~0x00000008u; +} +inline void FileOptions::clear_java_generate_equals_and_hash() { + java_generate_equals_and_hash_ = false; + clear_has_java_generate_equals_and_hash(); +} +inline bool FileOptions::java_generate_equals_and_hash() const { + return java_generate_equals_and_hash_; +} +inline void FileOptions::set_java_generate_equals_and_hash(bool value) { + set_has_java_generate_equals_and_hash(); + java_generate_equals_and_hash_ = value; +} + +// optional .google.protobuf.FileOptions.OptimizeMode optimize_for = 9 [default = SPEED]; +inline bool FileOptions::has_optimize_for() const { + return (_has_bits_[0] & 0x00000010u) != 0; +} +inline void FileOptions::set_has_optimize_for() { + _has_bits_[0] |= 0x00000010u; +} +inline void FileOptions::clear_has_optimize_for() { + _has_bits_[0] &= ~0x00000010u; +} +inline void FileOptions::clear_optimize_for() { + optimize_for_ = 1; + clear_has_optimize_for(); +} +inline ::google::protobuf::FileOptions_OptimizeMode FileOptions::optimize_for() const { + return static_cast< ::google::protobuf::FileOptions_OptimizeMode >(optimize_for_); +} +inline void FileOptions::set_optimize_for(::google::protobuf::FileOptions_OptimizeMode value) { + GOOGLE_DCHECK(::google::protobuf::FileOptions_OptimizeMode_IsValid(value)); + set_has_optimize_for(); + optimize_for_ = value; +} + +// optional bool cc_generic_services = 16 [default = false]; +inline bool FileOptions::has_cc_generic_services() const { + return (_has_bits_[0] & 0x00000020u) != 0; +} +inline void FileOptions::set_has_cc_generic_services() { + _has_bits_[0] |= 0x00000020u; +} +inline void FileOptions::clear_has_cc_generic_services() { + _has_bits_[0] &= ~0x00000020u; +} +inline void FileOptions::clear_cc_generic_services() { + cc_generic_services_ = false; + clear_has_cc_generic_services(); +} +inline bool FileOptions::cc_generic_services() const { + return cc_generic_services_; +} +inline void FileOptions::set_cc_generic_services(bool value) { + set_has_cc_generic_services(); + cc_generic_services_ = value; +} + +// optional bool java_generic_services = 17 [default = false]; +inline bool FileOptions::has_java_generic_services() const { + return (_has_bits_[0] & 0x00000040u) != 0; +} +inline void FileOptions::set_has_java_generic_services() { + _has_bits_[0] |= 0x00000040u; +} +inline void FileOptions::clear_has_java_generic_services() { + _has_bits_[0] &= ~0x00000040u; +} +inline void FileOptions::clear_java_generic_services() { + java_generic_services_ = false; + clear_has_java_generic_services(); +} +inline bool FileOptions::java_generic_services() const { + return java_generic_services_; +} +inline void FileOptions::set_java_generic_services(bool value) { + set_has_java_generic_services(); + java_generic_services_ = value; +} + +// optional bool py_generic_services = 18 [default = false]; +inline bool FileOptions::has_py_generic_services() const { + return (_has_bits_[0] & 0x00000080u) != 0; +} +inline void FileOptions::set_has_py_generic_services() { + _has_bits_[0] |= 0x00000080u; +} +inline void FileOptions::clear_has_py_generic_services() { + _has_bits_[0] &= ~0x00000080u; +} +inline void FileOptions::clear_py_generic_services() { + py_generic_services_ = false; + clear_has_py_generic_services(); +} +inline bool FileOptions::py_generic_services() const { + return py_generic_services_; +} +inline void FileOptions::set_py_generic_services(bool value) { + set_has_py_generic_services(); + py_generic_services_ = value; +} + +// repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; +inline int FileOptions::uninterpreted_option_size() const { + return uninterpreted_option_.size(); +} +inline void FileOptions::clear_uninterpreted_option() { + uninterpreted_option_.Clear(); +} +inline const ::google::protobuf::UninterpretedOption& FileOptions::uninterpreted_option(int index) const { + return uninterpreted_option_.Get(index); +} +inline ::google::protobuf::UninterpretedOption* FileOptions::mutable_uninterpreted_option(int index) { + return uninterpreted_option_.Mutable(index); +} +inline ::google::protobuf::UninterpretedOption* FileOptions::add_uninterpreted_option() { + return uninterpreted_option_.Add(); +} +inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::UninterpretedOption >& +FileOptions::uninterpreted_option() const { + return uninterpreted_option_; +} +inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::UninterpretedOption >* +FileOptions::mutable_uninterpreted_option() { + return &uninterpreted_option_; +} + +// ------------------------------------------------------------------- + +// MessageOptions + +// optional bool message_set_wire_format = 1 [default = false]; +inline bool MessageOptions::has_message_set_wire_format() const { + return (_has_bits_[0] & 0x00000001u) != 0; +} +inline void MessageOptions::set_has_message_set_wire_format() { + _has_bits_[0] |= 0x00000001u; +} +inline void MessageOptions::clear_has_message_set_wire_format() { + _has_bits_[0] &= ~0x00000001u; +} +inline void MessageOptions::clear_message_set_wire_format() { + message_set_wire_format_ = false; + clear_has_message_set_wire_format(); +} +inline bool MessageOptions::message_set_wire_format() const { + return message_set_wire_format_; +} +inline void MessageOptions::set_message_set_wire_format(bool value) { + set_has_message_set_wire_format(); + message_set_wire_format_ = value; +} + +// optional bool no_standard_descriptor_accessor = 2 [default = false]; +inline bool MessageOptions::has_no_standard_descriptor_accessor() const { + return (_has_bits_[0] & 0x00000002u) != 0; +} +inline void MessageOptions::set_has_no_standard_descriptor_accessor() { + _has_bits_[0] |= 0x00000002u; +} +inline void MessageOptions::clear_has_no_standard_descriptor_accessor() { + _has_bits_[0] &= ~0x00000002u; +} +inline void MessageOptions::clear_no_standard_descriptor_accessor() { + no_standard_descriptor_accessor_ = false; + clear_has_no_standard_descriptor_accessor(); +} +inline bool MessageOptions::no_standard_descriptor_accessor() const { + return no_standard_descriptor_accessor_; +} +inline void MessageOptions::set_no_standard_descriptor_accessor(bool value) { + set_has_no_standard_descriptor_accessor(); + no_standard_descriptor_accessor_ = value; +} + +// repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; +inline int MessageOptions::uninterpreted_option_size() const { + return uninterpreted_option_.size(); +} +inline void MessageOptions::clear_uninterpreted_option() { + uninterpreted_option_.Clear(); +} +inline const ::google::protobuf::UninterpretedOption& MessageOptions::uninterpreted_option(int index) const { + return uninterpreted_option_.Get(index); +} +inline ::google::protobuf::UninterpretedOption* MessageOptions::mutable_uninterpreted_option(int index) { + return uninterpreted_option_.Mutable(index); +} +inline ::google::protobuf::UninterpretedOption* MessageOptions::add_uninterpreted_option() { + return uninterpreted_option_.Add(); +} +inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::UninterpretedOption >& +MessageOptions::uninterpreted_option() const { + return uninterpreted_option_; +} +inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::UninterpretedOption >* +MessageOptions::mutable_uninterpreted_option() { + return &uninterpreted_option_; +} + +// ------------------------------------------------------------------- + +// FieldOptions + +// optional .google.protobuf.FieldOptions.CType ctype = 1 [default = STRING]; +inline bool FieldOptions::has_ctype() const { + return (_has_bits_[0] & 0x00000001u) != 0; +} +inline void FieldOptions::set_has_ctype() { + _has_bits_[0] |= 0x00000001u; +} +inline void FieldOptions::clear_has_ctype() { + _has_bits_[0] &= ~0x00000001u; +} +inline void FieldOptions::clear_ctype() { + ctype_ = 0; + clear_has_ctype(); +} +inline ::google::protobuf::FieldOptions_CType FieldOptions::ctype() const { + return static_cast< ::google::protobuf::FieldOptions_CType >(ctype_); +} +inline void FieldOptions::set_ctype(::google::protobuf::FieldOptions_CType value) { + GOOGLE_DCHECK(::google::protobuf::FieldOptions_CType_IsValid(value)); + set_has_ctype(); + ctype_ = value; +} + +// optional bool packed = 2; +inline bool FieldOptions::has_packed() const { + return (_has_bits_[0] & 0x00000002u) != 0; +} +inline void FieldOptions::set_has_packed() { + _has_bits_[0] |= 0x00000002u; +} +inline void FieldOptions::clear_has_packed() { + _has_bits_[0] &= ~0x00000002u; +} +inline void FieldOptions::clear_packed() { + packed_ = false; + clear_has_packed(); +} +inline bool FieldOptions::packed() const { + return packed_; +} +inline void FieldOptions::set_packed(bool value) { + set_has_packed(); + packed_ = value; +} + +// optional bool deprecated = 3 [default = false]; +inline bool FieldOptions::has_deprecated() const { + return (_has_bits_[0] & 0x00000004u) != 0; +} +inline void FieldOptions::set_has_deprecated() { + _has_bits_[0] |= 0x00000004u; +} +inline void FieldOptions::clear_has_deprecated() { + _has_bits_[0] &= ~0x00000004u; +} +inline void FieldOptions::clear_deprecated() { + deprecated_ = false; + clear_has_deprecated(); +} +inline bool FieldOptions::deprecated() const { + return deprecated_; +} +inline void FieldOptions::set_deprecated(bool value) { + set_has_deprecated(); + deprecated_ = value; +} + +// optional string experimental_map_key = 9; +inline bool FieldOptions::has_experimental_map_key() const { + return (_has_bits_[0] & 0x00000008u) != 0; +} +inline void FieldOptions::set_has_experimental_map_key() { + _has_bits_[0] |= 0x00000008u; +} +inline void FieldOptions::clear_has_experimental_map_key() { + _has_bits_[0] &= ~0x00000008u; +} +inline void FieldOptions::clear_experimental_map_key() { + if (experimental_map_key_ != &::google::protobuf::internal::kEmptyString) { + experimental_map_key_->clear(); + } + clear_has_experimental_map_key(); +} +inline const ::std::string& FieldOptions::experimental_map_key() const { + return *experimental_map_key_; +} +inline void FieldOptions::set_experimental_map_key(const ::std::string& value) { + set_has_experimental_map_key(); + if (experimental_map_key_ == &::google::protobuf::internal::kEmptyString) { + experimental_map_key_ = new ::std::string; + } + experimental_map_key_->assign(value); +} +inline void FieldOptions::set_experimental_map_key(const char* value) { + set_has_experimental_map_key(); + if (experimental_map_key_ == &::google::protobuf::internal::kEmptyString) { + experimental_map_key_ = new ::std::string; + } + experimental_map_key_->assign(value); +} +inline void FieldOptions::set_experimental_map_key(const char* value, size_t size) { + set_has_experimental_map_key(); + if (experimental_map_key_ == &::google::protobuf::internal::kEmptyString) { + experimental_map_key_ = new ::std::string; + } + experimental_map_key_->assign(reinterpret_cast(value), size); +} +inline ::std::string* FieldOptions::mutable_experimental_map_key() { + set_has_experimental_map_key(); + if (experimental_map_key_ == &::google::protobuf::internal::kEmptyString) { + experimental_map_key_ = new ::std::string; + } + return experimental_map_key_; +} +inline ::std::string* FieldOptions::release_experimental_map_key() { + clear_has_experimental_map_key(); + if (experimental_map_key_ == &::google::protobuf::internal::kEmptyString) { + return NULL; + } else { + ::std::string* temp = experimental_map_key_; + experimental_map_key_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + return temp; + } +} + +// repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; +inline int FieldOptions::uninterpreted_option_size() const { + return uninterpreted_option_.size(); +} +inline void FieldOptions::clear_uninterpreted_option() { + uninterpreted_option_.Clear(); +} +inline const ::google::protobuf::UninterpretedOption& FieldOptions::uninterpreted_option(int index) const { + return uninterpreted_option_.Get(index); +} +inline ::google::protobuf::UninterpretedOption* FieldOptions::mutable_uninterpreted_option(int index) { + return uninterpreted_option_.Mutable(index); +} +inline ::google::protobuf::UninterpretedOption* FieldOptions::add_uninterpreted_option() { + return uninterpreted_option_.Add(); +} +inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::UninterpretedOption >& +FieldOptions::uninterpreted_option() const { + return uninterpreted_option_; +} +inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::UninterpretedOption >* +FieldOptions::mutable_uninterpreted_option() { + return &uninterpreted_option_; +} + +// ------------------------------------------------------------------- + +// EnumOptions + +// repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; +inline int EnumOptions::uninterpreted_option_size() const { + return uninterpreted_option_.size(); +} +inline void EnumOptions::clear_uninterpreted_option() { + uninterpreted_option_.Clear(); +} +inline const ::google::protobuf::UninterpretedOption& EnumOptions::uninterpreted_option(int index) const { + return uninterpreted_option_.Get(index); +} +inline ::google::protobuf::UninterpretedOption* EnumOptions::mutable_uninterpreted_option(int index) { + return uninterpreted_option_.Mutable(index); +} +inline ::google::protobuf::UninterpretedOption* EnumOptions::add_uninterpreted_option() { + return uninterpreted_option_.Add(); +} +inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::UninterpretedOption >& +EnumOptions::uninterpreted_option() const { + return uninterpreted_option_; +} +inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::UninterpretedOption >* +EnumOptions::mutable_uninterpreted_option() { + return &uninterpreted_option_; +} + +// ------------------------------------------------------------------- + +// EnumValueOptions + +// repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; +inline int EnumValueOptions::uninterpreted_option_size() const { + return uninterpreted_option_.size(); +} +inline void EnumValueOptions::clear_uninterpreted_option() { + uninterpreted_option_.Clear(); +} +inline const ::google::protobuf::UninterpretedOption& EnumValueOptions::uninterpreted_option(int index) const { + return uninterpreted_option_.Get(index); +} +inline ::google::protobuf::UninterpretedOption* EnumValueOptions::mutable_uninterpreted_option(int index) { + return uninterpreted_option_.Mutable(index); +} +inline ::google::protobuf::UninterpretedOption* EnumValueOptions::add_uninterpreted_option() { + return uninterpreted_option_.Add(); +} +inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::UninterpretedOption >& +EnumValueOptions::uninterpreted_option() const { + return uninterpreted_option_; +} +inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::UninterpretedOption >* +EnumValueOptions::mutable_uninterpreted_option() { + return &uninterpreted_option_; +} + +// ------------------------------------------------------------------- + +// ServiceOptions + +// repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; +inline int ServiceOptions::uninterpreted_option_size() const { + return uninterpreted_option_.size(); +} +inline void ServiceOptions::clear_uninterpreted_option() { + uninterpreted_option_.Clear(); +} +inline const ::google::protobuf::UninterpretedOption& ServiceOptions::uninterpreted_option(int index) const { + return uninterpreted_option_.Get(index); +} +inline ::google::protobuf::UninterpretedOption* ServiceOptions::mutable_uninterpreted_option(int index) { + return uninterpreted_option_.Mutable(index); +} +inline ::google::protobuf::UninterpretedOption* ServiceOptions::add_uninterpreted_option() { + return uninterpreted_option_.Add(); +} +inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::UninterpretedOption >& +ServiceOptions::uninterpreted_option() const { + return uninterpreted_option_; +} +inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::UninterpretedOption >* +ServiceOptions::mutable_uninterpreted_option() { + return &uninterpreted_option_; +} + +// ------------------------------------------------------------------- + +// MethodOptions + +// repeated .google.protobuf.UninterpretedOption uninterpreted_option = 999; +inline int MethodOptions::uninterpreted_option_size() const { + return uninterpreted_option_.size(); +} +inline void MethodOptions::clear_uninterpreted_option() { + uninterpreted_option_.Clear(); +} +inline const ::google::protobuf::UninterpretedOption& MethodOptions::uninterpreted_option(int index) const { + return uninterpreted_option_.Get(index); +} +inline ::google::protobuf::UninterpretedOption* MethodOptions::mutable_uninterpreted_option(int index) { + return uninterpreted_option_.Mutable(index); +} +inline ::google::protobuf::UninterpretedOption* MethodOptions::add_uninterpreted_option() { + return uninterpreted_option_.Add(); +} +inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::UninterpretedOption >& +MethodOptions::uninterpreted_option() const { + return uninterpreted_option_; +} +inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::UninterpretedOption >* +MethodOptions::mutable_uninterpreted_option() { + return &uninterpreted_option_; +} + +// ------------------------------------------------------------------- + +// UninterpretedOption_NamePart + +// required string name_part = 1; +inline bool UninterpretedOption_NamePart::has_name_part() const { + return (_has_bits_[0] & 0x00000001u) != 0; +} +inline void UninterpretedOption_NamePart::set_has_name_part() { + _has_bits_[0] |= 0x00000001u; +} +inline void UninterpretedOption_NamePart::clear_has_name_part() { + _has_bits_[0] &= ~0x00000001u; +} +inline void UninterpretedOption_NamePart::clear_name_part() { + if (name_part_ != &::google::protobuf::internal::kEmptyString) { + name_part_->clear(); + } + clear_has_name_part(); +} +inline const ::std::string& UninterpretedOption_NamePart::name_part() const { + return *name_part_; +} +inline void UninterpretedOption_NamePart::set_name_part(const ::std::string& value) { + set_has_name_part(); + if (name_part_ == &::google::protobuf::internal::kEmptyString) { + name_part_ = new ::std::string; + } + name_part_->assign(value); +} +inline void UninterpretedOption_NamePart::set_name_part(const char* value) { + set_has_name_part(); + if (name_part_ == &::google::protobuf::internal::kEmptyString) { + name_part_ = new ::std::string; + } + name_part_->assign(value); +} +inline void UninterpretedOption_NamePart::set_name_part(const char* value, size_t size) { + set_has_name_part(); + if (name_part_ == &::google::protobuf::internal::kEmptyString) { + name_part_ = new ::std::string; + } + name_part_->assign(reinterpret_cast(value), size); +} +inline ::std::string* UninterpretedOption_NamePart::mutable_name_part() { + set_has_name_part(); + if (name_part_ == &::google::protobuf::internal::kEmptyString) { + name_part_ = new ::std::string; + } + return name_part_; +} +inline ::std::string* UninterpretedOption_NamePart::release_name_part() { + clear_has_name_part(); + if (name_part_ == &::google::protobuf::internal::kEmptyString) { + return NULL; + } else { + ::std::string* temp = name_part_; + name_part_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + return temp; + } +} + +// required bool is_extension = 2; +inline bool UninterpretedOption_NamePart::has_is_extension() const { + return (_has_bits_[0] & 0x00000002u) != 0; +} +inline void UninterpretedOption_NamePart::set_has_is_extension() { + _has_bits_[0] |= 0x00000002u; +} +inline void UninterpretedOption_NamePart::clear_has_is_extension() { + _has_bits_[0] &= ~0x00000002u; +} +inline void UninterpretedOption_NamePart::clear_is_extension() { + is_extension_ = false; + clear_has_is_extension(); +} +inline bool UninterpretedOption_NamePart::is_extension() const { + return is_extension_; +} +inline void UninterpretedOption_NamePart::set_is_extension(bool value) { + set_has_is_extension(); + is_extension_ = value; +} + +// ------------------------------------------------------------------- + +// UninterpretedOption + +// repeated .google.protobuf.UninterpretedOption.NamePart name = 2; +inline int UninterpretedOption::name_size() const { + return name_.size(); +} +inline void UninterpretedOption::clear_name() { + name_.Clear(); +} +inline const ::google::protobuf::UninterpretedOption_NamePart& UninterpretedOption::name(int index) const { + return name_.Get(index); +} +inline ::google::protobuf::UninterpretedOption_NamePart* UninterpretedOption::mutable_name(int index) { + return name_.Mutable(index); +} +inline ::google::protobuf::UninterpretedOption_NamePart* UninterpretedOption::add_name() { + return name_.Add(); +} +inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::UninterpretedOption_NamePart >& +UninterpretedOption::name() const { + return name_; +} +inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::UninterpretedOption_NamePart >* +UninterpretedOption::mutable_name() { + return &name_; +} + +// optional string identifier_value = 3; +inline bool UninterpretedOption::has_identifier_value() const { + return (_has_bits_[0] & 0x00000002u) != 0; +} +inline void UninterpretedOption::set_has_identifier_value() { + _has_bits_[0] |= 0x00000002u; +} +inline void UninterpretedOption::clear_has_identifier_value() { + _has_bits_[0] &= ~0x00000002u; +} +inline void UninterpretedOption::clear_identifier_value() { + if (identifier_value_ != &::google::protobuf::internal::kEmptyString) { + identifier_value_->clear(); + } + clear_has_identifier_value(); +} +inline const ::std::string& UninterpretedOption::identifier_value() const { + return *identifier_value_; +} +inline void UninterpretedOption::set_identifier_value(const ::std::string& value) { + set_has_identifier_value(); + if (identifier_value_ == &::google::protobuf::internal::kEmptyString) { + identifier_value_ = new ::std::string; + } + identifier_value_->assign(value); +} +inline void UninterpretedOption::set_identifier_value(const char* value) { + set_has_identifier_value(); + if (identifier_value_ == &::google::protobuf::internal::kEmptyString) { + identifier_value_ = new ::std::string; + } + identifier_value_->assign(value); +} +inline void UninterpretedOption::set_identifier_value(const char* value, size_t size) { + set_has_identifier_value(); + if (identifier_value_ == &::google::protobuf::internal::kEmptyString) { + identifier_value_ = new ::std::string; + } + identifier_value_->assign(reinterpret_cast(value), size); +} +inline ::std::string* UninterpretedOption::mutable_identifier_value() { + set_has_identifier_value(); + if (identifier_value_ == &::google::protobuf::internal::kEmptyString) { + identifier_value_ = new ::std::string; + } + return identifier_value_; +} +inline ::std::string* UninterpretedOption::release_identifier_value() { + clear_has_identifier_value(); + if (identifier_value_ == &::google::protobuf::internal::kEmptyString) { + return NULL; + } else { + ::std::string* temp = identifier_value_; + identifier_value_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + return temp; + } +} + +// optional uint64 positive_int_value = 4; +inline bool UninterpretedOption::has_positive_int_value() const { + return (_has_bits_[0] & 0x00000004u) != 0; +} +inline void UninterpretedOption::set_has_positive_int_value() { + _has_bits_[0] |= 0x00000004u; +} +inline void UninterpretedOption::clear_has_positive_int_value() { + _has_bits_[0] &= ~0x00000004u; +} +inline void UninterpretedOption::clear_positive_int_value() { + positive_int_value_ = GOOGLE_ULONGLONG(0); + clear_has_positive_int_value(); +} +inline ::google::protobuf::uint64 UninterpretedOption::positive_int_value() const { + return positive_int_value_; +} +inline void UninterpretedOption::set_positive_int_value(::google::protobuf::uint64 value) { + set_has_positive_int_value(); + positive_int_value_ = value; +} + +// optional int64 negative_int_value = 5; +inline bool UninterpretedOption::has_negative_int_value() const { + return (_has_bits_[0] & 0x00000008u) != 0; +} +inline void UninterpretedOption::set_has_negative_int_value() { + _has_bits_[0] |= 0x00000008u; +} +inline void UninterpretedOption::clear_has_negative_int_value() { + _has_bits_[0] &= ~0x00000008u; +} +inline void UninterpretedOption::clear_negative_int_value() { + negative_int_value_ = GOOGLE_LONGLONG(0); + clear_has_negative_int_value(); +} +inline ::google::protobuf::int64 UninterpretedOption::negative_int_value() const { + return negative_int_value_; +} +inline void UninterpretedOption::set_negative_int_value(::google::protobuf::int64 value) { + set_has_negative_int_value(); + negative_int_value_ = value; +} + +// optional double double_value = 6; +inline bool UninterpretedOption::has_double_value() const { + return (_has_bits_[0] & 0x00000010u) != 0; +} +inline void UninterpretedOption::set_has_double_value() { + _has_bits_[0] |= 0x00000010u; +} +inline void UninterpretedOption::clear_has_double_value() { + _has_bits_[0] &= ~0x00000010u; +} +inline void UninterpretedOption::clear_double_value() { + double_value_ = 0; + clear_has_double_value(); +} +inline double UninterpretedOption::double_value() const { + return double_value_; +} +inline void UninterpretedOption::set_double_value(double value) { + set_has_double_value(); + double_value_ = value; +} + +// optional bytes string_value = 7; +inline bool UninterpretedOption::has_string_value() const { + return (_has_bits_[0] & 0x00000020u) != 0; +} +inline void UninterpretedOption::set_has_string_value() { + _has_bits_[0] |= 0x00000020u; +} +inline void UninterpretedOption::clear_has_string_value() { + _has_bits_[0] &= ~0x00000020u; +} +inline void UninterpretedOption::clear_string_value() { + if (string_value_ != &::google::protobuf::internal::kEmptyString) { + string_value_->clear(); + } + clear_has_string_value(); +} +inline const ::std::string& UninterpretedOption::string_value() const { + return *string_value_; +} +inline void UninterpretedOption::set_string_value(const ::std::string& value) { + set_has_string_value(); + if (string_value_ == &::google::protobuf::internal::kEmptyString) { + string_value_ = new ::std::string; + } + string_value_->assign(value); +} +inline void UninterpretedOption::set_string_value(const char* value) { + set_has_string_value(); + if (string_value_ == &::google::protobuf::internal::kEmptyString) { + string_value_ = new ::std::string; + } + string_value_->assign(value); +} +inline void UninterpretedOption::set_string_value(const void* value, size_t size) { + set_has_string_value(); + if (string_value_ == &::google::protobuf::internal::kEmptyString) { + string_value_ = new ::std::string; + } + string_value_->assign(reinterpret_cast(value), size); +} +inline ::std::string* UninterpretedOption::mutable_string_value() { + set_has_string_value(); + if (string_value_ == &::google::protobuf::internal::kEmptyString) { + string_value_ = new ::std::string; + } + return string_value_; +} +inline ::std::string* UninterpretedOption::release_string_value() { + clear_has_string_value(); + if (string_value_ == &::google::protobuf::internal::kEmptyString) { + return NULL; + } else { + ::std::string* temp = string_value_; + string_value_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + return temp; + } +} + +// optional string aggregate_value = 8; +inline bool UninterpretedOption::has_aggregate_value() const { + return (_has_bits_[0] & 0x00000040u) != 0; +} +inline void UninterpretedOption::set_has_aggregate_value() { + _has_bits_[0] |= 0x00000040u; +} +inline void UninterpretedOption::clear_has_aggregate_value() { + _has_bits_[0] &= ~0x00000040u; +} +inline void UninterpretedOption::clear_aggregate_value() { + if (aggregate_value_ != &::google::protobuf::internal::kEmptyString) { + aggregate_value_->clear(); + } + clear_has_aggregate_value(); +} +inline const ::std::string& UninterpretedOption::aggregate_value() const { + return *aggregate_value_; +} +inline void UninterpretedOption::set_aggregate_value(const ::std::string& value) { + set_has_aggregate_value(); + if (aggregate_value_ == &::google::protobuf::internal::kEmptyString) { + aggregate_value_ = new ::std::string; + } + aggregate_value_->assign(value); +} +inline void UninterpretedOption::set_aggregate_value(const char* value) { + set_has_aggregate_value(); + if (aggregate_value_ == &::google::protobuf::internal::kEmptyString) { + aggregate_value_ = new ::std::string; + } + aggregate_value_->assign(value); +} +inline void UninterpretedOption::set_aggregate_value(const char* value, size_t size) { + set_has_aggregate_value(); + if (aggregate_value_ == &::google::protobuf::internal::kEmptyString) { + aggregate_value_ = new ::std::string; + } + aggregate_value_->assign(reinterpret_cast(value), size); +} +inline ::std::string* UninterpretedOption::mutable_aggregate_value() { + set_has_aggregate_value(); + if (aggregate_value_ == &::google::protobuf::internal::kEmptyString) { + aggregate_value_ = new ::std::string; + } + return aggregate_value_; +} +inline ::std::string* UninterpretedOption::release_aggregate_value() { + clear_has_aggregate_value(); + if (aggregate_value_ == &::google::protobuf::internal::kEmptyString) { + return NULL; + } else { + ::std::string* temp = aggregate_value_; + aggregate_value_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString); + return temp; + } +} + +// ------------------------------------------------------------------- + +// SourceCodeInfo_Location + +// repeated int32 path = 1 [packed = true]; +inline int SourceCodeInfo_Location::path_size() const { + return path_.size(); +} +inline void SourceCodeInfo_Location::clear_path() { + path_.Clear(); +} +inline ::google::protobuf::int32 SourceCodeInfo_Location::path(int index) const { + return path_.Get(index); +} +inline void SourceCodeInfo_Location::set_path(int index, ::google::protobuf::int32 value) { + path_.Set(index, value); +} +inline void SourceCodeInfo_Location::add_path(::google::protobuf::int32 value) { + path_.Add(value); +} +inline const ::google::protobuf::RepeatedField< ::google::protobuf::int32 >& +SourceCodeInfo_Location::path() const { + return path_; +} +inline ::google::protobuf::RepeatedField< ::google::protobuf::int32 >* +SourceCodeInfo_Location::mutable_path() { + return &path_; +} + +// repeated int32 span = 2 [packed = true]; +inline int SourceCodeInfo_Location::span_size() const { + return span_.size(); +} +inline void SourceCodeInfo_Location::clear_span() { + span_.Clear(); +} +inline ::google::protobuf::int32 SourceCodeInfo_Location::span(int index) const { + return span_.Get(index); +} +inline void SourceCodeInfo_Location::set_span(int index, ::google::protobuf::int32 value) { + span_.Set(index, value); +} +inline void SourceCodeInfo_Location::add_span(::google::protobuf::int32 value) { + span_.Add(value); +} +inline const ::google::protobuf::RepeatedField< ::google::protobuf::int32 >& +SourceCodeInfo_Location::span() const { + return span_; +} +inline ::google::protobuf::RepeatedField< ::google::protobuf::int32 >* +SourceCodeInfo_Location::mutable_span() { + return &span_; +} + +// ------------------------------------------------------------------- + +// SourceCodeInfo + +// repeated .google.protobuf.SourceCodeInfo.Location location = 1; +inline int SourceCodeInfo::location_size() const { + return location_.size(); +} +inline void SourceCodeInfo::clear_location() { + location_.Clear(); +} +inline const ::google::protobuf::SourceCodeInfo_Location& SourceCodeInfo::location(int index) const { + return location_.Get(index); +} +inline ::google::protobuf::SourceCodeInfo_Location* SourceCodeInfo::mutable_location(int index) { + return location_.Mutable(index); +} +inline ::google::protobuf::SourceCodeInfo_Location* SourceCodeInfo::add_location() { + return location_.Add(); +} +inline const ::google::protobuf::RepeatedPtrField< ::google::protobuf::SourceCodeInfo_Location >& +SourceCodeInfo::location() const { + return location_; +} +inline ::google::protobuf::RepeatedPtrField< ::google::protobuf::SourceCodeInfo_Location >* +SourceCodeInfo::mutable_location() { + return &location_; +} + + +// @@protoc_insertion_point(namespace_scope) + +} // namespace protobuf +} // namespace google + +#ifndef SWIG +namespace google { +namespace protobuf { + +template <> +inline const EnumDescriptor* GetEnumDescriptor< ::google::protobuf::FieldDescriptorProto_Type>() { + return ::google::protobuf::FieldDescriptorProto_Type_descriptor(); +} +template <> +inline const EnumDescriptor* GetEnumDescriptor< ::google::protobuf::FieldDescriptorProto_Label>() { + return ::google::protobuf::FieldDescriptorProto_Label_descriptor(); +} +template <> +inline const EnumDescriptor* GetEnumDescriptor< ::google::protobuf::FileOptions_OptimizeMode>() { + return ::google::protobuf::FileOptions_OptimizeMode_descriptor(); +} +template <> +inline const EnumDescriptor* GetEnumDescriptor< ::google::protobuf::FieldOptions_CType>() { + return ::google::protobuf::FieldOptions_CType_descriptor(); +} + +} // namespace google +} // namespace protobuf +#endif // SWIG + +// @@protoc_insertion_point(global_scope) + +#endif // PROTOBUF_google_2fprotobuf_2fdescriptor_2eproto__INCLUDED diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/descriptor.proto b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/descriptor.proto new file mode 100644 index 0000000000..4eeb43b59d --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/descriptor.proto @@ -0,0 +1,538 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// The messages in this file describe the definitions found in .proto files. +// A valid .proto file can be translated directly to a FileDescriptorProto +// without any other information (e.g. without reading its imports). + + + +package google.protobuf; +option java_package = "com.google.protobuf"; +option java_outer_classname = "DescriptorProtos"; + +// descriptor.proto must be optimized for speed because reflection-based +// algorithms don't work during bootstrapping. +option optimize_for = SPEED; + +// The protocol compiler can output a FileDescriptorSet containing the .proto +// files it parses. +message FileDescriptorSet { + repeated FileDescriptorProto file = 1; +} + +// Describes a complete .proto file. +message FileDescriptorProto { + optional string name = 1; // file name, relative to root of source tree + optional string package = 2; // e.g. "foo", "foo.bar", etc. + + // Names of files imported by this file. + repeated string dependency = 3; + + // All top-level definitions in this file. + repeated DescriptorProto message_type = 4; + repeated EnumDescriptorProto enum_type = 5; + repeated ServiceDescriptorProto service = 6; + repeated FieldDescriptorProto extension = 7; + + optional FileOptions options = 8; + + // This field contains optional information about the original source code. + // You may safely remove this entire field whithout harming runtime + // functionality of the descriptors -- the information is needed only by + // development tools. + optional SourceCodeInfo source_code_info = 9; +} + +// Describes a message type. +message DescriptorProto { + optional string name = 1; + + repeated FieldDescriptorProto field = 2; + repeated FieldDescriptorProto extension = 6; + + repeated DescriptorProto nested_type = 3; + repeated EnumDescriptorProto enum_type = 4; + + message ExtensionRange { + optional int32 start = 1; + optional int32 end = 2; + } + repeated ExtensionRange extension_range = 5; + + optional MessageOptions options = 7; +} + +// Describes a field within a message. +message FieldDescriptorProto { + enum Type { + // 0 is reserved for errors. + // Order is weird for historical reasons. + TYPE_DOUBLE = 1; + TYPE_FLOAT = 2; + TYPE_INT64 = 3; // Not ZigZag encoded. Negative numbers + // take 10 bytes. Use TYPE_SINT64 if negative + // values are likely. + TYPE_UINT64 = 4; + TYPE_INT32 = 5; // Not ZigZag encoded. Negative numbers + // take 10 bytes. Use TYPE_SINT32 if negative + // values are likely. + TYPE_FIXED64 = 6; + TYPE_FIXED32 = 7; + TYPE_BOOL = 8; + TYPE_STRING = 9; + TYPE_GROUP = 10; // Tag-delimited aggregate. + TYPE_MESSAGE = 11; // Length-delimited aggregate. + + // New in version 2. + TYPE_BYTES = 12; + TYPE_UINT32 = 13; + TYPE_ENUM = 14; + TYPE_SFIXED32 = 15; + TYPE_SFIXED64 = 16; + TYPE_SINT32 = 17; // Uses ZigZag encoding. + TYPE_SINT64 = 18; // Uses ZigZag encoding. + }; + + enum Label { + // 0 is reserved for errors + LABEL_OPTIONAL = 1; + LABEL_REQUIRED = 2; + LABEL_REPEATED = 3; + // TODO(sanjay): Should we add LABEL_MAP? + }; + + optional string name = 1; + optional int32 number = 3; + optional Label label = 4; + + // If type_name is set, this need not be set. If both this and type_name + // are set, this must be either TYPE_ENUM or TYPE_MESSAGE. + optional Type type = 5; + + // For message and enum types, this is the name of the type. If the name + // starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + // rules are used to find the type (i.e. first the nested types within this + // message are searched, then within the parent, on up to the root + // namespace). + optional string type_name = 6; + + // For extensions, this is the name of the type being extended. It is + // resolved in the same manner as type_name. + optional string extendee = 2; + + // For numeric types, contains the original text representation of the value. + // For booleans, "true" or "false". + // For strings, contains the default text contents (not escaped in any way). + // For bytes, contains the C escaped value. All bytes >= 128 are escaped. + // TODO(kenton): Base-64 encode? + optional string default_value = 7; + + optional FieldOptions options = 8; +} + +// Describes an enum type. +message EnumDescriptorProto { + optional string name = 1; + + repeated EnumValueDescriptorProto value = 2; + + optional EnumOptions options = 3; +} + +// Describes a value within an enum. +message EnumValueDescriptorProto { + optional string name = 1; + optional int32 number = 2; + + optional EnumValueOptions options = 3; +} + +// Describes a service. +message ServiceDescriptorProto { + optional string name = 1; + repeated MethodDescriptorProto method = 2; + + optional ServiceOptions options = 3; +} + +// Describes a method of a service. +message MethodDescriptorProto { + optional string name = 1; + + // Input and output type names. These are resolved in the same way as + // FieldDescriptorProto.type_name, but must refer to a message type. + optional string input_type = 2; + optional string output_type = 3; + + optional MethodOptions options = 4; +} + +// =================================================================== +// Options + +// Each of the definitions above may have "options" attached. These are +// just annotations which may cause code to be generated slightly differently +// or may contain hints for code that manipulates protocol messages. +// +// Clients may define custom options as extensions of the *Options messages. +// These extensions may not yet be known at parsing time, so the parser cannot +// store the values in them. Instead it stores them in a field in the *Options +// message called uninterpreted_option. This field must have the same name +// across all *Options messages. We then use this field to populate the +// extensions when we build a descriptor, at which point all protos have been +// parsed and so all extensions are known. +// +// Extension numbers for custom options may be chosen as follows: +// * For options which will only be used within a single application or +// organization, or for experimental options, use field numbers 50000 +// through 99999. It is up to you to ensure that you do not use the +// same number for multiple options. +// * For options which will be published and used publicly by multiple +// independent entities, e-mail protobuf-global-extension-registry@google.com +// to reserve extension numbers. Simply provide your project name (e.g. +// Object-C plugin) and your porject website (if available) -- there's no need +// to explain how you intend to use them. Usually you only need one extension +// number. You can declare multiple options with only one extension number by +// putting them in a sub-message. See the Custom Options section of the docs +// for examples: +// http://code.google.com/apis/protocolbuffers/docs/proto.html#options +// If this turns out to be popular, a web service will be set up +// to automatically assign option numbers. + + +message FileOptions { + + // Sets the Java package where classes generated from this .proto will be + // placed. By default, the proto package is used, but this is often + // inappropriate because proto packages do not normally start with backwards + // domain names. + optional string java_package = 1; + + + // If set, all the classes from the .proto file are wrapped in a single + // outer class with the given name. This applies to both Proto1 + // (equivalent to the old "--one_java_file" option) and Proto2 (where + // a .proto always translates to a single class, but you may want to + // explicitly choose the class name). + optional string java_outer_classname = 8; + + // If set true, then the Java code generator will generate a separate .java + // file for each top-level message, enum, and service defined in the .proto + // file. Thus, these types will *not* be nested inside the outer class + // named by java_outer_classname. However, the outer class will still be + // generated to contain the file's getDescriptor() method as well as any + // top-level extensions defined in the file. + optional bool java_multiple_files = 10 [default=false]; + + // If set true, then the Java code generator will generate equals() and + // hashCode() methods for all messages defined in the .proto file. This is + // purely a speed optimization, as the AbstractMessage base class includes + // reflection-based implementations of these methods. + optional bool java_generate_equals_and_hash = 20 [default=false]; + + // Generated classes can be optimized for speed or code size. + enum OptimizeMode { + SPEED = 1; // Generate complete code for parsing, serialization, + // etc. + CODE_SIZE = 2; // Use ReflectionOps to implement these methods. + LITE_RUNTIME = 3; // Generate code using MessageLite and the lite runtime. + } + optional OptimizeMode optimize_for = 9 [default=SPEED]; + + + + + // Should generic services be generated in each language? "Generic" services + // are not specific to any particular RPC system. They are generated by the + // main code generators in each language (without additional plugins). + // Generic services were the only kind of service generation supported by + // early versions of proto2. + // + // Generic services are now considered deprecated in favor of using plugins + // that generate code specific to your particular RPC system. Therefore, + // these default to false. Old code which depends on generic services should + // explicitly set them to true. + optional bool cc_generic_services = 16 [default=false]; + optional bool java_generic_services = 17 [default=false]; + optional bool py_generic_services = 18 [default=false]; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message MessageOptions { + // Set true to use the old proto1 MessageSet wire format for extensions. + // This is provided for backwards-compatibility with the MessageSet wire + // format. You should not use this for any other reason: It's less + // efficient, has fewer features, and is more complicated. + // + // The message must be defined exactly as follows: + // message Foo { + // option message_set_wire_format = true; + // extensions 4 to max; + // } + // Note that the message cannot have any defined fields; MessageSets only + // have extensions. + // + // All extensions of your type must be singular messages; e.g. they cannot + // be int32s, enums, or repeated messages. + // + // Because this is an option, the above two restrictions are not enforced by + // the protocol compiler. + optional bool message_set_wire_format = 1 [default=false]; + + // Disables the generation of the standard "descriptor()" accessor, which can + // conflict with a field of the same name. This is meant to make migration + // from proto1 easier; new code should avoid fields named "descriptor". + optional bool no_standard_descriptor_accessor = 2 [default=false]; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message FieldOptions { + // The ctype option instructs the C++ code generator to use a different + // representation of the field than it normally would. See the specific + // options below. This option is not yet implemented in the open source + // release -- sorry, we'll try to include it in a future version! + optional CType ctype = 1 [default = STRING]; + enum CType { + // Default mode. + STRING = 0; + + CORD = 1; + + STRING_PIECE = 2; + } + // The packed option can be enabled for repeated primitive fields to enable + // a more efficient representation on the wire. Rather than repeatedly + // writing the tag and type for each element, the entire array is encoded as + // a single length-delimited blob. + optional bool packed = 2; + + + // Is this field deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for accessors, or it will be completely ignored; in the very least, this + // is a formalization for deprecating fields. + optional bool deprecated = 3 [default=false]; + + // EXPERIMENTAL. DO NOT USE. + // For "map" fields, the name of the field in the enclosed type that + // is the key for this map. For example, suppose we have: + // message Item { + // required string name = 1; + // required string value = 2; + // } + // message Config { + // repeated Item items = 1 [experimental_map_key="name"]; + // } + // In this situation, the map key for Item will be set to "name". + // TODO: Fully-implement this, then remove the "experimental_" prefix. + optional string experimental_map_key = 9; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message EnumOptions { + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message EnumValueOptions { + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message ServiceOptions { + + // Note: Field numbers 1 through 32 are reserved for Google's internal RPC + // framework. We apologize for hoarding these numbers to ourselves, but + // we were already using them long before we decided to release Protocol + // Buffers. + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message MethodOptions { + + // Note: Field numbers 1 through 32 are reserved for Google's internal RPC + // framework. We apologize for hoarding these numbers to ourselves, but + // we were already using them long before we decided to release Protocol + // Buffers. + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +// A message representing a option the parser does not recognize. This only +// appears in options protos created by the compiler::Parser class. +// DescriptorPool resolves these when building Descriptor objects. Therefore, +// options protos in descriptor objects (e.g. returned by Descriptor::options(), +// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions +// in them. +message UninterpretedOption { + // The name of the uninterpreted option. Each string represents a segment in + // a dot-separated name. is_extension is true iff a segment represents an + // extension (denoted with parentheses in options specs in .proto files). + // E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents + // "foo.(bar.baz).qux". + message NamePart { + required string name_part = 1; + required bool is_extension = 2; + } + repeated NamePart name = 2; + + // The value of the uninterpreted option, in whatever type the tokenizer + // identified it as during parsing. Exactly one of these should be set. + optional string identifier_value = 3; + optional uint64 positive_int_value = 4; + optional int64 negative_int_value = 5; + optional double double_value = 6; + optional bytes string_value = 7; + optional string aggregate_value = 8; +} + +// =================================================================== +// Optional source code info + +// Encapsulates information about the original source file from which a +// FileDescriptorProto was generated. +message SourceCodeInfo { + // A Location identifies a piece of source code in a .proto file which + // corresponds to a particular definition. This information is intended + // to be useful to IDEs, code indexers, documentation generators, and similar + // tools. + // + // For example, say we have a file like: + // message Foo { + // optional string foo = 1; + // } + // Let's look at just the field definition: + // optional string foo = 1; + // ^ ^^ ^^ ^ ^^^ + // a bc de f ghi + // We have the following locations: + // span path represents + // [a,i) [ 4, 0, 2, 0 ] The whole field definition. + // [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + // [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + // [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + // [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + // + // Notes: + // - A location may refer to a repeated field itself (i.e. not to any + // particular index within it). This is used whenever a set of elements are + // logically enclosed in a single code segment. For example, an entire + // extend block (possibly containing multiple extension definitions) will + // have an outer location whose path refers to the "extensions" repeated + // field without an index. + // - Multiple locations may have the same path. This happens when a single + // logical declaration is spread out across multiple places. The most + // obvious example is the "extend" block again -- there may be multiple + // extend blocks in the same scope, each of which will have the same path. + // - A location's span is not always a subset of its parent's span. For + // example, the "extendee" of an extension declaration appears at the + // beginning of the "extend" block and is shared by all extensions within + // the block. + // - Just because a location's span is a subset of some other location's span + // does not mean that it is a descendent. For example, a "group" defines + // both a type and a field in a single declaration. Thus, the locations + // corresponding to the type and field and their components will overlap. + // - Code which tries to interpret locations should probably be designed to + // ignore those that it doesn't understand, as more types of locations could + // be recorded in the future. + repeated Location location = 1; + message Location { + // Identifies which part of the FileDescriptorProto was defined at this + // location. + // + // Each element is a field number or an index. They form a path from + // the root FileDescriptorProto to the place where the definition. For + // example, this path: + // [ 4, 3, 2, 7, 1 ] + // refers to: + // file.message_type(3) // 4, 3 + // .field(7) // 2, 7 + // .name() // 1 + // This is because FileDescriptorProto.message_type has field number 4: + // repeated DescriptorProto message_type = 4; + // and DescriptorProto.field has field number 2: + // repeated FieldDescriptorProto field = 2; + // and FieldDescriptorProto.name has field number 1: + // optional string name = 1; + // + // Thus, the above path gives the location of a field name. If we removed + // the last element: + // [ 4, 3, 2, 7 ] + // this path refers to the whole field declaration (from the beginning + // of the label to the terminating semicolon). + repeated int32 path = 1 [packed=true]; + + // Always has exactly three or four elements: start line, start column, + // end line (optional, otherwise assumed same as start line), end column. + // These are packed into a single field for efficiency. Note that line + // and column numbers are zero-based -- typically you will want to add + // 1 to each before displaying to a user. + repeated int32 span = 2 [packed=true]; + + // TODO(kenton): Record comments appearing before and after the + // declaration. + } +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/descriptor_database.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/descriptor_database.cc new file mode 100644 index 0000000000..23e48a67f5 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/descriptor_database.cc @@ -0,0 +1,541 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include + +#include + +#include +#include +#include +#include +#include + +namespace google { +namespace protobuf { + +DescriptorDatabase::~DescriptorDatabase() {} + +// =================================================================== + +template +bool SimpleDescriptorDatabase::DescriptorIndex::AddFile( + const FileDescriptorProto& file, + Value value) { + if (!InsertIfNotPresent(&by_name_, file.name(), value)) { + GOOGLE_LOG(ERROR) << "File already exists in database: " << file.name(); + return false; + } + + // We must be careful here -- calling file.package() if file.has_package() is + // false could access an uninitialized static-storage variable if we are being + // run at startup time. + string path = file.has_package() ? file.package() : string(); + if (!path.empty()) path += '.'; + + for (int i = 0; i < file.message_type_size(); i++) { + if (!AddSymbol(path + file.message_type(i).name(), value)) return false; + if (!AddNestedExtensions(file.message_type(i), value)) return false; + } + for (int i = 0; i < file.enum_type_size(); i++) { + if (!AddSymbol(path + file.enum_type(i).name(), value)) return false; + } + for (int i = 0; i < file.extension_size(); i++) { + if (!AddSymbol(path + file.extension(i).name(), value)) return false; + if (!AddExtension(file.extension(i), value)) return false; + } + for (int i = 0; i < file.service_size(); i++) { + if (!AddSymbol(path + file.service(i).name(), value)) return false; + } + + return true; +} + +template +bool SimpleDescriptorDatabase::DescriptorIndex::AddSymbol( + const string& name, Value value) { + // We need to make sure not to violate our map invariant. + + // If the symbol name is invalid it could break our lookup algorithm (which + // relies on the fact that '.' sorts before all other characters that are + // valid in symbol names). + if (!ValidateSymbolName(name)) { + GOOGLE_LOG(ERROR) << "Invalid symbol name: " << name; + return false; + } + + // Try to look up the symbol to make sure a super-symbol doesn't already + // exist. + typename map::iterator iter = FindLastLessOrEqual(name); + + if (iter == by_symbol_.end()) { + // Apparently the map is currently empty. Just insert and be done with it. + by_symbol_.insert(typename map::value_type(name, value)); + return true; + } + + if (IsSubSymbol(iter->first, name)) { + GOOGLE_LOG(ERROR) << "Symbol name \"" << name << "\" conflicts with the existing " + "symbol \"" << iter->first << "\"."; + return false; + } + + // OK, that worked. Now we have to make sure that no symbol in the map is + // a sub-symbol of the one we are inserting. The only symbol which could + // be so is the first symbol that is greater than the new symbol. Since + // |iter| points at the last symbol that is less than or equal, we just have + // to increment it. + ++iter; + + if (iter != by_symbol_.end() && IsSubSymbol(name, iter->first)) { + GOOGLE_LOG(ERROR) << "Symbol name \"" << name << "\" conflicts with the existing " + "symbol \"" << iter->first << "\"."; + return false; + } + + // OK, no conflicts. + + // Insert the new symbol using the iterator as a hint, the new entry will + // appear immediately before the one the iterator is pointing at. + by_symbol_.insert(iter, typename map::value_type(name, value)); + + return true; +} + +template +bool SimpleDescriptorDatabase::DescriptorIndex::AddNestedExtensions( + const DescriptorProto& message_type, + Value value) { + for (int i = 0; i < message_type.nested_type_size(); i++) { + if (!AddNestedExtensions(message_type.nested_type(i), value)) return false; + } + for (int i = 0; i < message_type.extension_size(); i++) { + if (!AddExtension(message_type.extension(i), value)) return false; + } + return true; +} + +template +bool SimpleDescriptorDatabase::DescriptorIndex::AddExtension( + const FieldDescriptorProto& field, + Value value) { + if (!field.extendee().empty() && field.extendee()[0] == '.') { + // The extension is fully-qualified. We can use it as a lookup key in + // the by_symbol_ table. + if (!InsertIfNotPresent(&by_extension_, + make_pair(field.extendee().substr(1), + field.number()), + value)) { + GOOGLE_LOG(ERROR) << "Extension conflicts with extension already in database: " + "extend " << field.extendee() << " { " + << field.name() << " = " << field.number() << " }"; + return false; + } + } else { + // Not fully-qualified. We can't really do anything here, unfortunately. + // We don't consider this an error, though, because the descriptor is + // valid. + } + return true; +} + +template +Value SimpleDescriptorDatabase::DescriptorIndex::FindFile( + const string& filename) { + return FindWithDefault(by_name_, filename, Value()); +} + +template +Value SimpleDescriptorDatabase::DescriptorIndex::FindSymbol( + const string& name) { + typename map::iterator iter = FindLastLessOrEqual(name); + + return (iter != by_symbol_.end() && IsSubSymbol(iter->first, name)) ? + iter->second : Value(); +} + +template +Value SimpleDescriptorDatabase::DescriptorIndex::FindExtension( + const string& containing_type, + int field_number) { + return FindWithDefault(by_extension_, + make_pair(containing_type, field_number), + Value()); +} + +template +bool SimpleDescriptorDatabase::DescriptorIndex::FindAllExtensionNumbers( + const string& containing_type, + vector* output) { + typename map, Value >::const_iterator it = + by_extension_.lower_bound(make_pair(containing_type, 0)); + bool success = false; + + for (; it != by_extension_.end() && it->first.first == containing_type; + ++it) { + output->push_back(it->first.second); + success = true; + } + + return success; +} + +template +typename map::iterator +SimpleDescriptorDatabase::DescriptorIndex::FindLastLessOrEqual( + const string& name) { + // Find the last key in the map which sorts less than or equal to the + // symbol name. Since upper_bound() returns the *first* key that sorts + // *greater* than the input, we want the element immediately before that. + typename map::iterator iter = by_symbol_.upper_bound(name); + if (iter != by_symbol_.begin()) --iter; + return iter; +} + +template +bool SimpleDescriptorDatabase::DescriptorIndex::IsSubSymbol( + const string& sub_symbol, const string& super_symbol) { + return sub_symbol == super_symbol || + (HasPrefixString(super_symbol, sub_symbol) && + super_symbol[sub_symbol.size()] == '.'); +} + +template +bool SimpleDescriptorDatabase::DescriptorIndex::ValidateSymbolName( + const string& name) { + for (int i = 0; i < name.size(); i++) { + // I don't trust ctype.h due to locales. :( + if (name[i] != '.' && name[i] != '_' && + (name[i] < '0' || name[i] > '9') && + (name[i] < 'A' || name[i] > 'Z') && + (name[i] < 'a' || name[i] > 'z')) { + return false; + } + } + return true; +} + +// ------------------------------------------------------------------- + +SimpleDescriptorDatabase::SimpleDescriptorDatabase() {} +SimpleDescriptorDatabase::~SimpleDescriptorDatabase() { + STLDeleteElements(&files_to_delete_); +} + +bool SimpleDescriptorDatabase::Add(const FileDescriptorProto& file) { + FileDescriptorProto* new_file = new FileDescriptorProto; + new_file->CopyFrom(file); + return AddAndOwn(new_file); +} + +bool SimpleDescriptorDatabase::AddAndOwn(const FileDescriptorProto* file) { + files_to_delete_.push_back(file); + return index_.AddFile(*file, file); +} + +bool SimpleDescriptorDatabase::FindFileByName( + const string& filename, + FileDescriptorProto* output) { + return MaybeCopy(index_.FindFile(filename), output); +} + +bool SimpleDescriptorDatabase::FindFileContainingSymbol( + const string& symbol_name, + FileDescriptorProto* output) { + return MaybeCopy(index_.FindSymbol(symbol_name), output); +} + +bool SimpleDescriptorDatabase::FindFileContainingExtension( + const string& containing_type, + int field_number, + FileDescriptorProto* output) { + return MaybeCopy(index_.FindExtension(containing_type, field_number), output); +} + +bool SimpleDescriptorDatabase::FindAllExtensionNumbers( + const string& extendee_type, + vector* output) { + return index_.FindAllExtensionNumbers(extendee_type, output); +} + +bool SimpleDescriptorDatabase::MaybeCopy(const FileDescriptorProto* file, + FileDescriptorProto* output) { + if (file == NULL) return false; + output->CopyFrom(*file); + return true; +} + +// ------------------------------------------------------------------- + +EncodedDescriptorDatabase::EncodedDescriptorDatabase() {} +EncodedDescriptorDatabase::~EncodedDescriptorDatabase() { + for (int i = 0; i < files_to_delete_.size(); i++) { + operator delete(files_to_delete_[i]); + } +} + +bool EncodedDescriptorDatabase::Add( + const void* encoded_file_descriptor, int size) { + FileDescriptorProto file; + if (file.ParseFromArray(encoded_file_descriptor, size)) { + return index_.AddFile(file, make_pair(encoded_file_descriptor, size)); + } else { + GOOGLE_LOG(ERROR) << "Invalid file descriptor data passed to " + "EncodedDescriptorDatabase::Add()."; + return false; + } +} + +bool EncodedDescriptorDatabase::AddCopy( + const void* encoded_file_descriptor, int size) { + void* copy = operator new(size); + memcpy(copy, encoded_file_descriptor, size); + files_to_delete_.push_back(copy); + return Add(copy, size); +} + +bool EncodedDescriptorDatabase::FindFileByName( + const string& filename, + FileDescriptorProto* output) { + return MaybeParse(index_.FindFile(filename), output); +} + +bool EncodedDescriptorDatabase::FindFileContainingSymbol( + const string& symbol_name, + FileDescriptorProto* output) { + return MaybeParse(index_.FindSymbol(symbol_name), output); +} + +bool EncodedDescriptorDatabase::FindNameOfFileContainingSymbol( + const string& symbol_name, + string* output) { + pair encoded_file = index_.FindSymbol(symbol_name); + if (encoded_file.first == NULL) return false; + + // Optimization: The name should be the first field in the encoded message. + // Try to just read it directly. + io::CodedInputStream input(reinterpret_cast(encoded_file.first), + encoded_file.second); + + const uint32 kNameTag = internal::WireFormatLite::MakeTag( + FileDescriptorProto::kNameFieldNumber, + internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED); + + if (input.ReadTag() == kNameTag) { + // Success! + return internal::WireFormatLite::ReadString(&input, output); + } else { + // Slow path. Parse whole message. + FileDescriptorProto file_proto; + if (!file_proto.ParseFromArray(encoded_file.first, encoded_file.second)) { + return false; + } + *output = file_proto.name(); + return true; + } +} + +bool EncodedDescriptorDatabase::FindFileContainingExtension( + const string& containing_type, + int field_number, + FileDescriptorProto* output) { + return MaybeParse(index_.FindExtension(containing_type, field_number), + output); +} + +bool EncodedDescriptorDatabase::FindAllExtensionNumbers( + const string& extendee_type, + vector* output) { + return index_.FindAllExtensionNumbers(extendee_type, output); +} + +bool EncodedDescriptorDatabase::MaybeParse( + pair encoded_file, + FileDescriptorProto* output) { + if (encoded_file.first == NULL) return false; + return output->ParseFromArray(encoded_file.first, encoded_file.second); +} + +// =================================================================== + +DescriptorPoolDatabase::DescriptorPoolDatabase(const DescriptorPool& pool) + : pool_(pool) {} +DescriptorPoolDatabase::~DescriptorPoolDatabase() {} + +bool DescriptorPoolDatabase::FindFileByName( + const string& filename, + FileDescriptorProto* output) { + const FileDescriptor* file = pool_.FindFileByName(filename); + if (file == NULL) return false; + output->Clear(); + file->CopyTo(output); + return true; +} + +bool DescriptorPoolDatabase::FindFileContainingSymbol( + const string& symbol_name, + FileDescriptorProto* output) { + const FileDescriptor* file = pool_.FindFileContainingSymbol(symbol_name); + if (file == NULL) return false; + output->Clear(); + file->CopyTo(output); + return true; +} + +bool DescriptorPoolDatabase::FindFileContainingExtension( + const string& containing_type, + int field_number, + FileDescriptorProto* output) { + const Descriptor* extendee = pool_.FindMessageTypeByName(containing_type); + if (extendee == NULL) return false; + + const FieldDescriptor* extension = + pool_.FindExtensionByNumber(extendee, field_number); + if (extension == NULL) return false; + + output->Clear(); + extension->file()->CopyTo(output); + return true; +} + +bool DescriptorPoolDatabase::FindAllExtensionNumbers( + const string& extendee_type, + vector* output) { + const Descriptor* extendee = pool_.FindMessageTypeByName(extendee_type); + if (extendee == NULL) return false; + + vector extensions; + pool_.FindAllExtensions(extendee, &extensions); + + for (int i = 0; i < extensions.size(); ++i) { + output->push_back(extensions[i]->number()); + } + + return true; +} + +// =================================================================== + +MergedDescriptorDatabase::MergedDescriptorDatabase( + DescriptorDatabase* source1, + DescriptorDatabase* source2) { + sources_.push_back(source1); + sources_.push_back(source2); +} +MergedDescriptorDatabase::MergedDescriptorDatabase( + const vector& sources) + : sources_(sources) {} +MergedDescriptorDatabase::~MergedDescriptorDatabase() {} + +bool MergedDescriptorDatabase::FindFileByName( + const string& filename, + FileDescriptorProto* output) { + for (int i = 0; i < sources_.size(); i++) { + if (sources_[i]->FindFileByName(filename, output)) { + return true; + } + } + return false; +} + +bool MergedDescriptorDatabase::FindFileContainingSymbol( + const string& symbol_name, + FileDescriptorProto* output) { + for (int i = 0; i < sources_.size(); i++) { + if (sources_[i]->FindFileContainingSymbol(symbol_name, output)) { + // The symbol was found in source i. However, if one of the previous + // sources defines a file with the same name (which presumably doesn't + // contain the symbol, since it wasn't found in that source), then we + // must hide it from the caller. + FileDescriptorProto temp; + for (int j = 0; j < i; j++) { + if (sources_[j]->FindFileByName(output->name(), &temp)) { + // Found conflicting file in a previous source. + return false; + } + } + return true; + } + } + return false; +} + +bool MergedDescriptorDatabase::FindFileContainingExtension( + const string& containing_type, + int field_number, + FileDescriptorProto* output) { + for (int i = 0; i < sources_.size(); i++) { + if (sources_[i]->FindFileContainingExtension( + containing_type, field_number, output)) { + // The symbol was found in source i. However, if one of the previous + // sources defines a file with the same name (which presumably doesn't + // contain the symbol, since it wasn't found in that source), then we + // must hide it from the caller. + FileDescriptorProto temp; + for (int j = 0; j < i; j++) { + if (sources_[j]->FindFileByName(output->name(), &temp)) { + // Found conflicting file in a previous source. + return false; + } + } + return true; + } + } + return false; +} + +bool MergedDescriptorDatabase::FindAllExtensionNumbers( + const string& extendee_type, + vector* output) { + set merged_results; + vector results; + bool success = false; + + for (int i = 0; i < sources_.size(); i++) { + if (sources_[i]->FindAllExtensionNumbers(extendee_type, &results)) { + copy(results.begin(), results.end(), + insert_iterator >(merged_results, merged_results.begin())); + success = true; + } + results.clear(); + } + + copy(merged_results.begin(), merged_results.end(), + insert_iterator >(*output, output->end())); + + return success; +} + +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/descriptor_database.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/descriptor_database.h new file mode 100644 index 0000000000..f32b1db935 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/descriptor_database.h @@ -0,0 +1,366 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// Interface for manipulating databases of descriptors. + +#ifndef GOOGLE_PROTOBUF_DESCRIPTOR_DATABASE_H__ +#define GOOGLE_PROTOBUF_DESCRIPTOR_DATABASE_H__ + +#include +#include +#include +#include +#include + +namespace google { +namespace protobuf { + +// Defined in this file. +class DescriptorDatabase; +class SimpleDescriptorDatabase; +class EncodedDescriptorDatabase; +class DescriptorPoolDatabase; +class MergedDescriptorDatabase; + +// Abstract interface for a database of descriptors. +// +// This is useful if you want to create a DescriptorPool which loads +// descriptors on-demand from some sort of large database. If the database +// is large, it may be inefficient to enumerate every .proto file inside it +// calling DescriptorPool::BuildFile() for each one. Instead, a DescriptorPool +// can be created which wraps a DescriptorDatabase and only builds particular +// descriptors when they are needed. +class LIBPROTOBUF_EXPORT DescriptorDatabase { + public: + inline DescriptorDatabase() {} + virtual ~DescriptorDatabase(); + + // Find a file by file name. Fills in in *output and returns true if found. + // Otherwise, returns false, leaving the contents of *output undefined. + virtual bool FindFileByName(const string& filename, + FileDescriptorProto* output) = 0; + + // Find the file that declares the given fully-qualified symbol name. + // If found, fills in *output and returns true, otherwise returns false + // and leaves *output undefined. + virtual bool FindFileContainingSymbol(const string& symbol_name, + FileDescriptorProto* output) = 0; + + // Find the file which defines an extension extending the given message type + // with the given field number. If found, fills in *output and returns true, + // otherwise returns false and leaves *output undefined. containing_type + // must be a fully-qualified type name. + virtual bool FindFileContainingExtension(const string& containing_type, + int field_number, + FileDescriptorProto* output) = 0; + + // Finds the tag numbers used by all known extensions of + // extendee_type, and appends them to output in an undefined + // order. This method is best-effort: it's not guaranteed that the + // database will find all extensions, and it's not guaranteed that + // FindFileContainingExtension will return true on all of the found + // numbers. Returns true if the search was successful, otherwise + // returns false and leaves output unchanged. + // + // This method has a default implementation that always returns + // false. + virtual bool FindAllExtensionNumbers(const string& extendee_type, + vector* output) { + return false; + } + + private: + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(DescriptorDatabase); +}; + +// A DescriptorDatabase into which you can insert files manually. +// +// FindFileContainingSymbol() is fully-implemented. When you add a file, its +// symbols will be indexed for this purpose. Note that the implementation +// may return false positives, but only if it isn't possible for the symbol +// to be defined in any other file. In particular, if a file defines a symbol +// "Foo", then searching for "Foo.[anything]" will match that file. This way, +// the database does not need to aggressively index all children of a symbol. +// +// FindFileContainingExtension() is mostly-implemented. It works if and only +// if the original FieldDescriptorProto defining the extension has a +// fully-qualified type name in its "extendee" field (i.e. starts with a '.'). +// If the extendee is a relative name, SimpleDescriptorDatabase will not +// attempt to resolve the type, so it will not know what type the extension is +// extending. Therefore, calling FindFileContainingExtension() with the +// extension's containing type will never actually find that extension. Note +// that this is an unlikely problem, as all FileDescriptorProtos created by the +// protocol compiler (as well as ones created by calling +// FileDescriptor::CopyTo()) will always use fully-qualified names for all +// types. You only need to worry if you are constructing FileDescriptorProtos +// yourself, or are calling compiler::Parser directly. +class LIBPROTOBUF_EXPORT SimpleDescriptorDatabase : public DescriptorDatabase { + public: + SimpleDescriptorDatabase(); + ~SimpleDescriptorDatabase(); + + // Adds the FileDescriptorProto to the database, making a copy. The object + // can be deleted after Add() returns. Returns false if the file conflicted + // with a file already in the database, in which case an error will have + // been written to GOOGLE_LOG(ERROR). + bool Add(const FileDescriptorProto& file); + + // Adds the FileDescriptorProto to the database and takes ownership of it. + bool AddAndOwn(const FileDescriptorProto* file); + + // implements DescriptorDatabase ----------------------------------- + bool FindFileByName(const string& filename, + FileDescriptorProto* output); + bool FindFileContainingSymbol(const string& symbol_name, + FileDescriptorProto* output); + bool FindFileContainingExtension(const string& containing_type, + int field_number, + FileDescriptorProto* output); + bool FindAllExtensionNumbers(const string& extendee_type, + vector* output); + + private: + // So that it can use DescriptorIndex. + friend class EncodedDescriptorDatabase; + + // An index mapping file names, symbol names, and extension numbers to + // some sort of values. + template + class DescriptorIndex { + public: + // Helpers to recursively add particular descriptors and all their contents + // to the index. + bool AddFile(const FileDescriptorProto& file, + Value value); + bool AddSymbol(const string& name, Value value); + bool AddNestedExtensions(const DescriptorProto& message_type, + Value value); + bool AddExtension(const FieldDescriptorProto& field, + Value value); + + Value FindFile(const string& filename); + Value FindSymbol(const string& name); + Value FindExtension(const string& containing_type, int field_number); + bool FindAllExtensionNumbers(const string& containing_type, + vector* output); + + private: + map by_name_; + map by_symbol_; + map, Value> by_extension_; + + // Invariant: The by_symbol_ map does not contain any symbols which are + // prefixes of other symbols in the map. For example, "foo.bar" is a + // prefix of "foo.bar.baz" (but is not a prefix of "foo.barbaz"). + // + // This invariant is important because it means that given a symbol name, + // we can find a key in the map which is a prefix of the symbol in O(lg n) + // time, and we know that there is at most one such key. + // + // The prefix lookup algorithm works like so: + // 1) Find the last key in the map which is less than or equal to the + // search key. + // 2) If the found key is a prefix of the search key, then return it. + // Otherwise, there is no match. + // + // I am sure this algorithm has been described elsewhere, but since I + // wasn't able to find it quickly I will instead prove that it works + // myself. The key to the algorithm is that if a match exists, step (1) + // will find it. Proof: + // 1) Define the "search key" to be the key we are looking for, the "found + // key" to be the key found in step (1), and the "match key" to be the + // key which actually matches the serach key (i.e. the key we're trying + // to find). + // 2) The found key must be less than or equal to the search key by + // definition. + // 3) The match key must also be less than or equal to the search key + // (because it is a prefix). + // 4) The match key cannot be greater than the found key, because if it + // were, then step (1) of the algorithm would have returned the match + // key instead (since it finds the *greatest* key which is less than or + // equal to the search key). + // 5) Therefore, the found key must be between the match key and the search + // key, inclusive. + // 6) Since the search key must be a sub-symbol of the match key, if it is + // not equal to the match key, then search_key[match_key.size()] must + // be '.'. + // 7) Since '.' sorts before any other character that is valid in a symbol + // name, then if the found key is not equal to the match key, then + // found_key[match_key.size()] must also be '.', because any other value + // would make it sort after the search key. + // 8) Therefore, if the found key is not equal to the match key, then the + // found key must be a sub-symbol of the match key. However, this would + // contradict our map invariant which says that no symbol in the map is + // a sub-symbol of any other. + // 9) Therefore, the found key must match the match key. + // + // The above proof assumes the match key exists. In the case that the + // match key does not exist, then step (1) will return some other symbol. + // That symbol cannot be a super-symbol of the search key since if it were, + // then it would be a match, and we're assuming the match key doesn't exist. + // Therefore, step 2 will correctly return no match. + + // Find the last entry in the by_symbol_ map whose key is less than or + // equal to the given name. + typename map::iterator FindLastLessOrEqual( + const string& name); + + // True if either the arguments are equal or super_symbol identifies a + // parent symbol of sub_symbol (e.g. "foo.bar" is a parent of + // "foo.bar.baz", but not a parent of "foo.barbaz"). + bool IsSubSymbol(const string& sub_symbol, const string& super_symbol); + + // Returns true if and only if all characters in the name are alphanumerics, + // underscores, or periods. + bool ValidateSymbolName(const string& name); + }; + + + DescriptorIndex index_; + vector files_to_delete_; + + // If file is non-NULL, copy it into *output and return true, otherwise + // return false. + bool MaybeCopy(const FileDescriptorProto* file, + FileDescriptorProto* output); + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(SimpleDescriptorDatabase); +}; + +// Very similar to SimpleDescriptorDatabase, but stores all the descriptors +// as raw bytes and generally tries to use as little memory as possible. +// +// The same caveats regarding FindFileContainingExtension() apply as with +// SimpleDescriptorDatabase. +class LIBPROTOBUF_EXPORT EncodedDescriptorDatabase : public DescriptorDatabase { + public: + EncodedDescriptorDatabase(); + ~EncodedDescriptorDatabase(); + + // Adds the FileDescriptorProto to the database. The descriptor is provided + // in encoded form. The database does not make a copy of the bytes, nor + // does it take ownership; it's up to the caller to make sure the bytes + // remain valid for the life of the database. Returns false and logs an error + // if the bytes are not a valid FileDescriptorProto or if the file conflicted + // with a file already in the database. + bool Add(const void* encoded_file_descriptor, int size); + + // Like Add(), but makes a copy of the data, so that the caller does not + // need to keep it around. + bool AddCopy(const void* encoded_file_descriptor, int size); + + // Like FindFileContainingSymbol but returns only the name of the file. + bool FindNameOfFileContainingSymbol(const string& symbol_name, + string* output); + + // implements DescriptorDatabase ----------------------------------- + bool FindFileByName(const string& filename, + FileDescriptorProto* output); + bool FindFileContainingSymbol(const string& symbol_name, + FileDescriptorProto* output); + bool FindFileContainingExtension(const string& containing_type, + int field_number, + FileDescriptorProto* output); + bool FindAllExtensionNumbers(const string& extendee_type, + vector* output); + + private: + SimpleDescriptorDatabase::DescriptorIndex > index_; + vector files_to_delete_; + + // If encoded_file.first is non-NULL, parse the data into *output and return + // true, otherwise return false. + bool MaybeParse(pair encoded_file, + FileDescriptorProto* output); + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(EncodedDescriptorDatabase); +}; + +// A DescriptorDatabase that fetches files from a given pool. +class LIBPROTOBUF_EXPORT DescriptorPoolDatabase : public DescriptorDatabase { + public: + DescriptorPoolDatabase(const DescriptorPool& pool); + ~DescriptorPoolDatabase(); + + // implements DescriptorDatabase ----------------------------------- + bool FindFileByName(const string& filename, + FileDescriptorProto* output); + bool FindFileContainingSymbol(const string& symbol_name, + FileDescriptorProto* output); + bool FindFileContainingExtension(const string& containing_type, + int field_number, + FileDescriptorProto* output); + bool FindAllExtensionNumbers(const string& extendee_type, + vector* output); + + private: + const DescriptorPool& pool_; + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(DescriptorPoolDatabase); +}; + +// A DescriptorDatabase that wraps two or more others. It first searches the +// first database and, if that fails, tries the second, and so on. +class LIBPROTOBUF_EXPORT MergedDescriptorDatabase : public DescriptorDatabase { + public: + // Merge just two databases. The sources remain property of the caller. + MergedDescriptorDatabase(DescriptorDatabase* source1, + DescriptorDatabase* source2); + // Merge more than two databases. The sources remain property of the caller. + // The vector may be deleted after the constructor returns but the + // DescriptorDatabases need to stick around. + MergedDescriptorDatabase(const vector& sources); + ~MergedDescriptorDatabase(); + + // implements DescriptorDatabase ----------------------------------- + bool FindFileByName(const string& filename, + FileDescriptorProto* output); + bool FindFileContainingSymbol(const string& symbol_name, + FileDescriptorProto* output); + bool FindFileContainingExtension(const string& containing_type, + int field_number, + FileDescriptorProto* output); + // Merges the results of calling all databases. Returns true iff any + // of the databases returned true. + bool FindAllExtensionNumbers(const string& extendee_type, + vector* output); + + private: + vector sources_; + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(MergedDescriptorDatabase); +}; + +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_DESCRIPTOR_DATABASE_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/descriptor_database_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/descriptor_database_unittest.cc new file mode 100644 index 0000000000..ac72ddcdbe --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/descriptor_database_unittest.cc @@ -0,0 +1,748 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// This file makes extensive use of RFC 3092. :) + +#include + +#include +#include +#include +#include +#include + +#include +#include +#include + +namespace google { +namespace protobuf { +namespace { + +static void AddToDatabase(SimpleDescriptorDatabase* database, + const char* file_text) { + FileDescriptorProto file_proto; + EXPECT_TRUE(TextFormat::ParseFromString(file_text, &file_proto)); + database->Add(file_proto); +} + +static void ExpectContainsType(const FileDescriptorProto& proto, + const string& type_name) { + for (int i = 0; i < proto.message_type_size(); i++) { + if (proto.message_type(i).name() == type_name) return; + } + ADD_FAILURE() << "\"" << proto.name() + << "\" did not contain expected type \"" + << type_name << "\"."; +} + +// =================================================================== + +#if GTEST_HAS_PARAM_TEST + +// SimpleDescriptorDatabase, EncodedDescriptorDatabase, and +// DescriptorPoolDatabase call for very similar tests. Instead of writing +// three nearly-identical sets of tests, we use parameterized tests to apply +// the same code to all three. + +// The parameterized test runs against a DescriptarDatabaseTestCase. We have +// implementations for each of the three classes we want to test. +class DescriptorDatabaseTestCase { + public: + virtual ~DescriptorDatabaseTestCase() {} + + virtual DescriptorDatabase* GetDatabase() = 0; + virtual bool AddToDatabase(const FileDescriptorProto& file) = 0; +}; + +// Factory function type. +typedef DescriptorDatabaseTestCase* DescriptorDatabaseTestCaseFactory(); + +// Specialization for SimpleDescriptorDatabase. +class SimpleDescriptorDatabaseTestCase : public DescriptorDatabaseTestCase { + public: + static DescriptorDatabaseTestCase* New() { + return new SimpleDescriptorDatabaseTestCase; + } + + virtual ~SimpleDescriptorDatabaseTestCase() {} + + virtual DescriptorDatabase* GetDatabase() { + return &database_; + } + virtual bool AddToDatabase(const FileDescriptorProto& file) { + return database_.Add(file); + } + + private: + SimpleDescriptorDatabase database_; +}; + +// Specialization for EncodedDescriptorDatabase. +class EncodedDescriptorDatabaseTestCase : public DescriptorDatabaseTestCase { + public: + static DescriptorDatabaseTestCase* New() { + return new EncodedDescriptorDatabaseTestCase; + } + + virtual ~EncodedDescriptorDatabaseTestCase() {} + + virtual DescriptorDatabase* GetDatabase() { + return &database_; + } + virtual bool AddToDatabase(const FileDescriptorProto& file) { + string data; + file.SerializeToString(&data); + return database_.AddCopy(data.data(), data.size()); + } + + private: + EncodedDescriptorDatabase database_; +}; + +// Specialization for DescriptorPoolDatabase. +class DescriptorPoolDatabaseTestCase : public DescriptorDatabaseTestCase { + public: + static DescriptorDatabaseTestCase* New() { + return new EncodedDescriptorDatabaseTestCase; + } + + DescriptorPoolDatabaseTestCase() : database_(pool_) {} + virtual ~DescriptorPoolDatabaseTestCase() {} + + virtual DescriptorDatabase* GetDatabase() { + return &database_; + } + virtual bool AddToDatabase(const FileDescriptorProto& file) { + return pool_.BuildFile(file); + } + + private: + DescriptorPool pool_; + DescriptorPoolDatabase database_; +}; + +// ------------------------------------------------------------------- + +class DescriptorDatabaseTest + : public testing::TestWithParam { + protected: + virtual void SetUp() { + test_case_.reset(GetParam()()); + database_ = test_case_->GetDatabase(); + } + + void AddToDatabase(const char* file_descriptor_text) { + FileDescriptorProto file_proto; + EXPECT_TRUE(TextFormat::ParseFromString(file_descriptor_text, &file_proto)); + EXPECT_TRUE(test_case_->AddToDatabase(file_proto)); + } + + void AddToDatabaseWithError(const char* file_descriptor_text) { + FileDescriptorProto file_proto; + EXPECT_TRUE(TextFormat::ParseFromString(file_descriptor_text, &file_proto)); + EXPECT_FALSE(test_case_->AddToDatabase(file_proto)); + } + + scoped_ptr test_case_; + DescriptorDatabase* database_; +}; + +TEST_P(DescriptorDatabaseTest, FindFileByName) { + AddToDatabase( + "name: \"foo.proto\" " + "message_type { name:\"Foo\" }"); + AddToDatabase( + "name: \"bar.proto\" " + "message_type { name:\"Bar\" }"); + + { + FileDescriptorProto file; + EXPECT_TRUE(database_->FindFileByName("foo.proto", &file)); + EXPECT_EQ("foo.proto", file.name()); + ExpectContainsType(file, "Foo"); + } + + { + FileDescriptorProto file; + EXPECT_TRUE(database_->FindFileByName("bar.proto", &file)); + EXPECT_EQ("bar.proto", file.name()); + ExpectContainsType(file, "Bar"); + } + + { + // Fails to find undefined files. + FileDescriptorProto file; + EXPECT_FALSE(database_->FindFileByName("baz.proto", &file)); + } +} + +TEST_P(DescriptorDatabaseTest, FindFileContainingSymbol) { + AddToDatabase( + "name: \"foo.proto\" " + "message_type { " + " name: \"Foo\" " + " field { name:\"qux\" }" + " nested_type { name: \"Grault\" } " + " enum_type { name: \"Garply\" } " + "} " + "enum_type { " + " name: \"Waldo\" " + " value { name:\"FRED\" } " + "} " + "extension { name: \"plugh\" } " + "service { " + " name: \"Xyzzy\" " + " method { name: \"Thud\" } " + "}" + ); + AddToDatabase( + "name: \"bar.proto\" " + "package: \"corge\" " + "message_type { name: \"Bar\" }"); + + { + FileDescriptorProto file; + EXPECT_TRUE(database_->FindFileContainingSymbol("Foo", &file)); + EXPECT_EQ("foo.proto", file.name()); + } + + { + // Can find fields. + FileDescriptorProto file; + EXPECT_TRUE(database_->FindFileContainingSymbol("Foo.qux", &file)); + EXPECT_EQ("foo.proto", file.name()); + } + + { + // Can find nested types. + FileDescriptorProto file; + EXPECT_TRUE(database_->FindFileContainingSymbol("Foo.Grault", &file)); + EXPECT_EQ("foo.proto", file.name()); + } + + { + // Can find nested enums. + FileDescriptorProto file; + EXPECT_TRUE(database_->FindFileContainingSymbol("Foo.Garply", &file)); + EXPECT_EQ("foo.proto", file.name()); + } + + { + // Can find enum types. + FileDescriptorProto file; + EXPECT_TRUE(database_->FindFileContainingSymbol("Waldo", &file)); + EXPECT_EQ("foo.proto", file.name()); + } + + { + // Can find enum values. + FileDescriptorProto file; + EXPECT_TRUE(database_->FindFileContainingSymbol("Waldo.FRED", &file)); + EXPECT_EQ("foo.proto", file.name()); + } + + { + // Can find extensions. + FileDescriptorProto file; + EXPECT_TRUE(database_->FindFileContainingSymbol("plugh", &file)); + EXPECT_EQ("foo.proto", file.name()); + } + + { + // Can find services. + FileDescriptorProto file; + EXPECT_TRUE(database_->FindFileContainingSymbol("Xyzzy", &file)); + EXPECT_EQ("foo.proto", file.name()); + } + + { + // Can find methods. + FileDescriptorProto file; + EXPECT_TRUE(database_->FindFileContainingSymbol("Xyzzy.Thud", &file)); + EXPECT_EQ("foo.proto", file.name()); + } + + { + // Can find things in packages. + FileDescriptorProto file; + EXPECT_TRUE(database_->FindFileContainingSymbol("corge.Bar", &file)); + EXPECT_EQ("bar.proto", file.name()); + } + + { + // Fails to find undefined symbols. + FileDescriptorProto file; + EXPECT_FALSE(database_->FindFileContainingSymbol("Baz", &file)); + } + + { + // Names must be fully-qualified. + FileDescriptorProto file; + EXPECT_FALSE(database_->FindFileContainingSymbol("Bar", &file)); + } +} + +TEST_P(DescriptorDatabaseTest, FindFileContainingExtension) { + AddToDatabase( + "name: \"foo.proto\" " + "message_type { " + " name: \"Foo\" " + " extension_range { start: 1 end: 1000 } " + " extension { name:\"qux\" label:LABEL_OPTIONAL type:TYPE_INT32 number:5 " + " extendee: \".Foo\" }" + "}"); + AddToDatabase( + "name: \"bar.proto\" " + "package: \"corge\" " + "dependency: \"foo.proto\" " + "message_type { " + " name: \"Bar\" " + " extension_range { start: 1 end: 1000 } " + "} " + "extension { name:\"grault\" extendee: \".Foo\" number:32 } " + "extension { name:\"garply\" extendee: \".corge.Bar\" number:70 } " + "extension { name:\"waldo\" extendee: \"Bar\" number:56 } "); + + { + FileDescriptorProto file; + EXPECT_TRUE(database_->FindFileContainingExtension("Foo", 5, &file)); + EXPECT_EQ("foo.proto", file.name()); + } + + { + FileDescriptorProto file; + EXPECT_TRUE(database_->FindFileContainingExtension("Foo", 32, &file)); + EXPECT_EQ("bar.proto", file.name()); + } + + { + // Can find extensions for qualified type names. + FileDescriptorProto file; + EXPECT_TRUE(database_->FindFileContainingExtension("corge.Bar", 70, &file)); + EXPECT_EQ("bar.proto", file.name()); + } + + { + // Can't find extensions whose extendee was not fully-qualified in the + // FileDescriptorProto. + FileDescriptorProto file; + EXPECT_FALSE(database_->FindFileContainingExtension("Bar", 56, &file)); + EXPECT_FALSE( + database_->FindFileContainingExtension("corge.Bar", 56, &file)); + } + + { + // Can't find non-existent extension numbers. + FileDescriptorProto file; + EXPECT_FALSE(database_->FindFileContainingExtension("Foo", 12, &file)); + } + + { + // Can't find extensions for non-existent types. + FileDescriptorProto file; + EXPECT_FALSE( + database_->FindFileContainingExtension("NoSuchType", 5, &file)); + } + + { + // Can't find extensions for unqualified type names. + FileDescriptorProto file; + EXPECT_FALSE(database_->FindFileContainingExtension("Bar", 70, &file)); + } +} + +TEST_P(DescriptorDatabaseTest, FindAllExtensionNumbers) { + AddToDatabase( + "name: \"foo.proto\" " + "message_type { " + " name: \"Foo\" " + " extension_range { start: 1 end: 1000 } " + " extension { name:\"qux\" label:LABEL_OPTIONAL type:TYPE_INT32 number:5 " + " extendee: \".Foo\" }" + "}"); + AddToDatabase( + "name: \"bar.proto\" " + "package: \"corge\" " + "dependency: \"foo.proto\" " + "message_type { " + " name: \"Bar\" " + " extension_range { start: 1 end: 1000 } " + "} " + "extension { name:\"grault\" extendee: \".Foo\" number:32 } " + "extension { name:\"garply\" extendee: \".corge.Bar\" number:70 } " + "extension { name:\"waldo\" extendee: \"Bar\" number:56 } "); + + { + vector numbers; + EXPECT_TRUE(database_->FindAllExtensionNumbers("Foo", &numbers)); + ASSERT_EQ(2, numbers.size()); + sort(numbers.begin(), numbers.end()); + EXPECT_EQ(5, numbers[0]); + EXPECT_EQ(32, numbers[1]); + } + + { + vector numbers; + EXPECT_TRUE(database_->FindAllExtensionNumbers("corge.Bar", &numbers)); + // Note: won't find extension 56 due to the name not being fully qualified. + ASSERT_EQ(1, numbers.size()); + EXPECT_EQ(70, numbers[0]); + } + + { + // Can't find extensions for non-existent types. + vector numbers; + EXPECT_FALSE(database_->FindAllExtensionNumbers("NoSuchType", &numbers)); + } + + { + // Can't find extensions for unqualified types. + vector numbers; + EXPECT_FALSE(database_->FindAllExtensionNumbers("Bar", &numbers)); + } +} + +TEST_P(DescriptorDatabaseTest, ConflictingFileError) { + AddToDatabase( + "name: \"foo.proto\" " + "message_type { " + " name: \"Foo\" " + "}"); + AddToDatabaseWithError( + "name: \"foo.proto\" " + "message_type { " + " name: \"Bar\" " + "}"); +} + +TEST_P(DescriptorDatabaseTest, ConflictingTypeError) { + AddToDatabase( + "name: \"foo.proto\" " + "message_type { " + " name: \"Foo\" " + "}"); + AddToDatabaseWithError( + "name: \"bar.proto\" " + "message_type { " + " name: \"Foo\" " + "}"); +} + +TEST_P(DescriptorDatabaseTest, ConflictingExtensionError) { + AddToDatabase( + "name: \"foo.proto\" " + "extension { name:\"foo\" label:LABEL_OPTIONAL type:TYPE_INT32 number:5 " + " extendee: \".Foo\" }"); + AddToDatabaseWithError( + "name: \"bar.proto\" " + "extension { name:\"bar\" label:LABEL_OPTIONAL type:TYPE_INT32 number:5 " + " extendee: \".Foo\" }"); +} + +INSTANTIATE_TEST_CASE_P(Simple, DescriptorDatabaseTest, + testing::Values(&SimpleDescriptorDatabaseTestCase::New)); +INSTANTIATE_TEST_CASE_P(MemoryConserving, DescriptorDatabaseTest, + testing::Values(&EncodedDescriptorDatabaseTestCase::New)); +INSTANTIATE_TEST_CASE_P(Pool, DescriptorDatabaseTest, + testing::Values(&DescriptorPoolDatabaseTestCase::New)); + +#endif // GTEST_HAS_PARAM_TEST + +TEST(EncodedDescriptorDatabaseExtraTest, FindNameOfFileContainingSymbol) { + // Create two files, one of which is in two parts. + FileDescriptorProto file1, file2a, file2b; + file1.set_name("foo.proto"); + file1.set_package("foo"); + file1.add_message_type()->set_name("Foo"); + file2a.set_name("bar.proto"); + file2b.set_package("bar"); + file2b.add_message_type()->set_name("Bar"); + + // Normal serialization allows our optimization to kick in. + string data1 = file1.SerializeAsString(); + + // Force out-of-order serialization to test slow path. + string data2 = file2b.SerializeAsString() + file2a.SerializeAsString(); + + // Create EncodedDescriptorDatabase containing both files. + EncodedDescriptorDatabase db; + db.Add(data1.data(), data1.size()); + db.Add(data2.data(), data2.size()); + + // Test! + string filename; + EXPECT_TRUE(db.FindNameOfFileContainingSymbol("foo.Foo", &filename)); + EXPECT_EQ("foo.proto", filename); + EXPECT_TRUE(db.FindNameOfFileContainingSymbol("foo.Foo.Blah", &filename)); + EXPECT_EQ("foo.proto", filename); + EXPECT_TRUE(db.FindNameOfFileContainingSymbol("bar.Bar", &filename)); + EXPECT_EQ("bar.proto", filename); + EXPECT_FALSE(db.FindNameOfFileContainingSymbol("foo", &filename)); + EXPECT_FALSE(db.FindNameOfFileContainingSymbol("bar", &filename)); + EXPECT_FALSE(db.FindNameOfFileContainingSymbol("baz.Baz", &filename)); +} + +// =================================================================== + +class MergedDescriptorDatabaseTest : public testing::Test { + protected: + MergedDescriptorDatabaseTest() + : forward_merged_(&database1_, &database2_), + reverse_merged_(&database2_, &database1_) {} + + virtual void SetUp() { + AddToDatabase(&database1_, + "name: \"foo.proto\" " + "message_type { name:\"Foo\" extension_range { start: 1 end: 100 } } " + "extension { name:\"foo_ext\" extendee: \".Foo\" number:3 " + " label:LABEL_OPTIONAL type:TYPE_INT32 } "); + AddToDatabase(&database2_, + "name: \"bar.proto\" " + "message_type { name:\"Bar\" extension_range { start: 1 end: 100 } } " + "extension { name:\"bar_ext\" extendee: \".Bar\" number:5 " + " label:LABEL_OPTIONAL type:TYPE_INT32 } "); + + // baz.proto exists in both pools, with different definitions. + AddToDatabase(&database1_, + "name: \"baz.proto\" " + "message_type { name:\"Baz\" extension_range { start: 1 end: 100 } } " + "message_type { name:\"FromPool1\" } " + "extension { name:\"baz_ext\" extendee: \".Baz\" number:12 " + " label:LABEL_OPTIONAL type:TYPE_INT32 } " + "extension { name:\"database1_only_ext\" extendee: \".Baz\" number:13 " + " label:LABEL_OPTIONAL type:TYPE_INT32 } "); + AddToDatabase(&database2_, + "name: \"baz.proto\" " + "message_type { name:\"Baz\" extension_range { start: 1 end: 100 } } " + "message_type { name:\"FromPool2\" } " + "extension { name:\"baz_ext\" extendee: \".Baz\" number:12 " + " label:LABEL_OPTIONAL type:TYPE_INT32 } "); + } + + SimpleDescriptorDatabase database1_; + SimpleDescriptorDatabase database2_; + + MergedDescriptorDatabase forward_merged_; + MergedDescriptorDatabase reverse_merged_; +}; + +TEST_F(MergedDescriptorDatabaseTest, FindFileByName) { + { + // Can find file that is only in database1_. + FileDescriptorProto file; + EXPECT_TRUE(forward_merged_.FindFileByName("foo.proto", &file)); + EXPECT_EQ("foo.proto", file.name()); + ExpectContainsType(file, "Foo"); + } + + { + // Can find file that is only in database2_. + FileDescriptorProto file; + EXPECT_TRUE(forward_merged_.FindFileByName("bar.proto", &file)); + EXPECT_EQ("bar.proto", file.name()); + ExpectContainsType(file, "Bar"); + } + + { + // In forward_merged_, database1_'s baz.proto takes precedence. + FileDescriptorProto file; + EXPECT_TRUE(forward_merged_.FindFileByName("baz.proto", &file)); + EXPECT_EQ("baz.proto", file.name()); + ExpectContainsType(file, "FromPool1"); + } + + { + // In reverse_merged_, database2_'s baz.proto takes precedence. + FileDescriptorProto file; + EXPECT_TRUE(reverse_merged_.FindFileByName("baz.proto", &file)); + EXPECT_EQ("baz.proto", file.name()); + ExpectContainsType(file, "FromPool2"); + } + + { + // Can't find non-existent file. + FileDescriptorProto file; + EXPECT_FALSE(forward_merged_.FindFileByName("no_such.proto", &file)); + } +} + +TEST_F(MergedDescriptorDatabaseTest, FindFileContainingSymbol) { + { + // Can find file that is only in database1_. + FileDescriptorProto file; + EXPECT_TRUE(forward_merged_.FindFileContainingSymbol("Foo", &file)); + EXPECT_EQ("foo.proto", file.name()); + ExpectContainsType(file, "Foo"); + } + + { + // Can find file that is only in database2_. + FileDescriptorProto file; + EXPECT_TRUE(forward_merged_.FindFileContainingSymbol("Bar", &file)); + EXPECT_EQ("bar.proto", file.name()); + ExpectContainsType(file, "Bar"); + } + + { + // In forward_merged_, database1_'s baz.proto takes precedence. + FileDescriptorProto file; + EXPECT_TRUE(forward_merged_.FindFileContainingSymbol("Baz", &file)); + EXPECT_EQ("baz.proto", file.name()); + ExpectContainsType(file, "FromPool1"); + } + + { + // In reverse_merged_, database2_'s baz.proto takes precedence. + FileDescriptorProto file; + EXPECT_TRUE(reverse_merged_.FindFileContainingSymbol("Baz", &file)); + EXPECT_EQ("baz.proto", file.name()); + ExpectContainsType(file, "FromPool2"); + } + + { + // FromPool1 only shows up in forward_merged_ because it is masked by + // database2_'s baz.proto in reverse_merged_. + FileDescriptorProto file; + EXPECT_TRUE(forward_merged_.FindFileContainingSymbol("FromPool1", &file)); + EXPECT_FALSE(reverse_merged_.FindFileContainingSymbol("FromPool1", &file)); + } + + { + // Can't find non-existent symbol. + FileDescriptorProto file; + EXPECT_FALSE( + forward_merged_.FindFileContainingSymbol("NoSuchType", &file)); + } +} + +TEST_F(MergedDescriptorDatabaseTest, FindFileContainingExtension) { + { + // Can find file that is only in database1_. + FileDescriptorProto file; + EXPECT_TRUE( + forward_merged_.FindFileContainingExtension("Foo", 3, &file)); + EXPECT_EQ("foo.proto", file.name()); + ExpectContainsType(file, "Foo"); + } + + { + // Can find file that is only in database2_. + FileDescriptorProto file; + EXPECT_TRUE( + forward_merged_.FindFileContainingExtension("Bar", 5, &file)); + EXPECT_EQ("bar.proto", file.name()); + ExpectContainsType(file, "Bar"); + } + + { + // In forward_merged_, database1_'s baz.proto takes precedence. + FileDescriptorProto file; + EXPECT_TRUE( + forward_merged_.FindFileContainingExtension("Baz", 12, &file)); + EXPECT_EQ("baz.proto", file.name()); + ExpectContainsType(file, "FromPool1"); + } + + { + // In reverse_merged_, database2_'s baz.proto takes precedence. + FileDescriptorProto file; + EXPECT_TRUE( + reverse_merged_.FindFileContainingExtension("Baz", 12, &file)); + EXPECT_EQ("baz.proto", file.name()); + ExpectContainsType(file, "FromPool2"); + } + + { + // Baz's extension 13 only shows up in forward_merged_ because it is + // masked by database2_'s baz.proto in reverse_merged_. + FileDescriptorProto file; + EXPECT_TRUE(forward_merged_.FindFileContainingExtension("Baz", 13, &file)); + EXPECT_FALSE(reverse_merged_.FindFileContainingExtension("Baz", 13, &file)); + } + + { + // Can't find non-existent extension. + FileDescriptorProto file; + EXPECT_FALSE( + forward_merged_.FindFileContainingExtension("Foo", 6, &file)); + } +} + +TEST_F(MergedDescriptorDatabaseTest, FindAllExtensionNumbers) { + { + // Message only has extension in database1_ + vector numbers; + EXPECT_TRUE(forward_merged_.FindAllExtensionNumbers("Foo", &numbers)); + ASSERT_EQ(1, numbers.size()); + EXPECT_EQ(3, numbers[0]); + } + + { + // Message only has extension in database2_ + vector numbers; + EXPECT_TRUE(forward_merged_.FindAllExtensionNumbers("Bar", &numbers)); + ASSERT_EQ(1, numbers.size()); + EXPECT_EQ(5, numbers[0]); + } + + { + // Merge results from the two databases. + vector numbers; + EXPECT_TRUE(forward_merged_.FindAllExtensionNumbers("Baz", &numbers)); + ASSERT_EQ(2, numbers.size()); + sort(numbers.begin(), numbers.end()); + EXPECT_EQ(12, numbers[0]); + EXPECT_EQ(13, numbers[1]); + } + + { + vector numbers; + EXPECT_TRUE(reverse_merged_.FindAllExtensionNumbers("Baz", &numbers)); + ASSERT_EQ(2, numbers.size()); + sort(numbers.begin(), numbers.end()); + EXPECT_EQ(12, numbers[0]); + EXPECT_EQ(13, numbers[1]); + } + + { + // Can't find extensions for a non-existent message. + vector numbers; + EXPECT_FALSE(reverse_merged_.FindAllExtensionNumbers("Blah", &numbers)); + } +} + +} // anonymous namespace +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/descriptor_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/descriptor_unittest.cc new file mode 100644 index 0000000000..55aebfd122 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/descriptor_unittest.cc @@ -0,0 +1,4034 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// This file makes extensive use of RFC 3092. :) + +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +namespace google { +namespace protobuf { + +// Can't use an anonymous namespace here due to brokenness of Tru64 compiler. +namespace descriptor_unittest { + +// Some helpers to make assembling descriptors faster. +DescriptorProto* AddMessage(FileDescriptorProto* file, const string& name) { + DescriptorProto* result = file->add_message_type(); + result->set_name(name); + return result; +} + +DescriptorProto* AddNestedMessage(DescriptorProto* parent, const string& name) { + DescriptorProto* result = parent->add_nested_type(); + result->set_name(name); + return result; +} + +EnumDescriptorProto* AddEnum(FileDescriptorProto* file, const string& name) { + EnumDescriptorProto* result = file->add_enum_type(); + result->set_name(name); + return result; +} + +EnumDescriptorProto* AddNestedEnum(DescriptorProto* parent, + const string& name) { + EnumDescriptorProto* result = parent->add_enum_type(); + result->set_name(name); + return result; +} + +ServiceDescriptorProto* AddService(FileDescriptorProto* file, + const string& name) { + ServiceDescriptorProto* result = file->add_service(); + result->set_name(name); + return result; +} + +FieldDescriptorProto* AddField(DescriptorProto* parent, + const string& name, int number, + FieldDescriptorProto::Label label, + FieldDescriptorProto::Type type) { + FieldDescriptorProto* result = parent->add_field(); + result->set_name(name); + result->set_number(number); + result->set_label(label); + result->set_type(type); + return result; +} + +FieldDescriptorProto* AddExtension(FileDescriptorProto* file, + const string& extendee, + const string& name, int number, + FieldDescriptorProto::Label label, + FieldDescriptorProto::Type type) { + FieldDescriptorProto* result = file->add_extension(); + result->set_name(name); + result->set_number(number); + result->set_label(label); + result->set_type(type); + result->set_extendee(extendee); + return result; +} + +FieldDescriptorProto* AddNestedExtension(DescriptorProto* parent, + const string& extendee, + const string& name, int number, + FieldDescriptorProto::Label label, + FieldDescriptorProto::Type type) { + FieldDescriptorProto* result = parent->add_extension(); + result->set_name(name); + result->set_number(number); + result->set_label(label); + result->set_type(type); + result->set_extendee(extendee); + return result; +} + +DescriptorProto::ExtensionRange* AddExtensionRange(DescriptorProto* parent, + int start, int end) { + DescriptorProto::ExtensionRange* result = parent->add_extension_range(); + result->set_start(start); + result->set_end(end); + return result; +} + +EnumValueDescriptorProto* AddEnumValue(EnumDescriptorProto* enum_proto, + const string& name, int number) { + EnumValueDescriptorProto* result = enum_proto->add_value(); + result->set_name(name); + result->set_number(number); + return result; +} + +MethodDescriptorProto* AddMethod(ServiceDescriptorProto* service, + const string& name, + const string& input_type, + const string& output_type) { + MethodDescriptorProto* result = service->add_method(); + result->set_name(name); + result->set_input_type(input_type); + result->set_output_type(output_type); + return result; +} + +// Empty enums technically aren't allowed. We need to insert a dummy value +// into them. +void AddEmptyEnum(FileDescriptorProto* file, const string& name) { + AddEnumValue(AddEnum(file, name), name + "_DUMMY", 1); +} + +// =================================================================== + +// Test simple files. +class FileDescriptorTest : public testing::Test { + protected: + virtual void SetUp() { + // Build descriptors for the following definitions: + // + // // in "foo.proto" + // message FooMessage { extensions 1; } + // enum FooEnum {FOO_ENUM_VALUE = 1;} + // service FooService {} + // extend FooMessage { optional int32 foo_extension = 1; } + // + // // in "bar.proto" + // package bar_package; + // message BarMessage { extensions 1; } + // enum BarEnum {BAR_ENUM_VALUE = 1;} + // service BarService {} + // extend BarMessage { optional int32 bar_extension = 1; } + // + // Also, we have an empty file "baz.proto". This file's purpose is to + // make sure that even though it has the same package as foo.proto, + // searching it for members of foo.proto won't work. + + FileDescriptorProto foo_file; + foo_file.set_name("foo.proto"); + AddExtensionRange(AddMessage(&foo_file, "FooMessage"), 1, 2); + AddEnumValue(AddEnum(&foo_file, "FooEnum"), "FOO_ENUM_VALUE", 1); + AddService(&foo_file, "FooService"); + AddExtension(&foo_file, "FooMessage", "foo_extension", 1, + FieldDescriptorProto::LABEL_OPTIONAL, + FieldDescriptorProto::TYPE_INT32); + + FileDescriptorProto bar_file; + bar_file.set_name("bar.proto"); + bar_file.set_package("bar_package"); + bar_file.add_dependency("foo.proto"); + AddExtensionRange(AddMessage(&bar_file, "BarMessage"), 1, 2); + AddEnumValue(AddEnum(&bar_file, "BarEnum"), "BAR_ENUM_VALUE", 1); + AddService(&bar_file, "BarService"); + AddExtension(&bar_file, "bar_package.BarMessage", "bar_extension", 1, + FieldDescriptorProto::LABEL_OPTIONAL, + FieldDescriptorProto::TYPE_INT32); + + FileDescriptorProto baz_file; + baz_file.set_name("baz.proto"); + + // Build the descriptors and get the pointers. + foo_file_ = pool_.BuildFile(foo_file); + ASSERT_TRUE(foo_file_ != NULL); + + bar_file_ = pool_.BuildFile(bar_file); + ASSERT_TRUE(bar_file_ != NULL); + + baz_file_ = pool_.BuildFile(baz_file); + ASSERT_TRUE(baz_file_ != NULL); + + ASSERT_EQ(1, foo_file_->message_type_count()); + foo_message_ = foo_file_->message_type(0); + ASSERT_EQ(1, foo_file_->enum_type_count()); + foo_enum_ = foo_file_->enum_type(0); + ASSERT_EQ(1, foo_enum_->value_count()); + foo_enum_value_ = foo_enum_->value(0); + ASSERT_EQ(1, foo_file_->service_count()); + foo_service_ = foo_file_->service(0); + ASSERT_EQ(1, foo_file_->extension_count()); + foo_extension_ = foo_file_->extension(0); + + ASSERT_EQ(1, bar_file_->message_type_count()); + bar_message_ = bar_file_->message_type(0); + ASSERT_EQ(1, bar_file_->enum_type_count()); + bar_enum_ = bar_file_->enum_type(0); + ASSERT_EQ(1, bar_enum_->value_count()); + bar_enum_value_ = bar_enum_->value(0); + ASSERT_EQ(1, bar_file_->service_count()); + bar_service_ = bar_file_->service(0); + ASSERT_EQ(1, bar_file_->extension_count()); + bar_extension_ = bar_file_->extension(0); + } + + DescriptorPool pool_; + + const FileDescriptor* foo_file_; + const FileDescriptor* bar_file_; + const FileDescriptor* baz_file_; + + const Descriptor* foo_message_; + const EnumDescriptor* foo_enum_; + const EnumValueDescriptor* foo_enum_value_; + const ServiceDescriptor* foo_service_; + const FieldDescriptor* foo_extension_; + + const Descriptor* bar_message_; + const EnumDescriptor* bar_enum_; + const EnumValueDescriptor* bar_enum_value_; + const ServiceDescriptor* bar_service_; + const FieldDescriptor* bar_extension_; +}; + +TEST_F(FileDescriptorTest, Name) { + EXPECT_EQ("foo.proto", foo_file_->name()); + EXPECT_EQ("bar.proto", bar_file_->name()); + EXPECT_EQ("baz.proto", baz_file_->name()); +} + +TEST_F(FileDescriptorTest, Package) { + EXPECT_EQ("", foo_file_->package()); + EXPECT_EQ("bar_package", bar_file_->package()); +} + +TEST_F(FileDescriptorTest, Dependencies) { + EXPECT_EQ(0, foo_file_->dependency_count()); + EXPECT_EQ(1, bar_file_->dependency_count()); + EXPECT_EQ(foo_file_, bar_file_->dependency(0)); +} + +TEST_F(FileDescriptorTest, FindMessageTypeByName) { + EXPECT_EQ(foo_message_, foo_file_->FindMessageTypeByName("FooMessage")); + EXPECT_EQ(bar_message_, bar_file_->FindMessageTypeByName("BarMessage")); + + EXPECT_TRUE(foo_file_->FindMessageTypeByName("BarMessage") == NULL); + EXPECT_TRUE(bar_file_->FindMessageTypeByName("FooMessage") == NULL); + EXPECT_TRUE(baz_file_->FindMessageTypeByName("FooMessage") == NULL); + + EXPECT_TRUE(foo_file_->FindMessageTypeByName("NoSuchMessage") == NULL); + EXPECT_TRUE(foo_file_->FindMessageTypeByName("FooEnum") == NULL); +} + +TEST_F(FileDescriptorTest, FindEnumTypeByName) { + EXPECT_EQ(foo_enum_, foo_file_->FindEnumTypeByName("FooEnum")); + EXPECT_EQ(bar_enum_, bar_file_->FindEnumTypeByName("BarEnum")); + + EXPECT_TRUE(foo_file_->FindEnumTypeByName("BarEnum") == NULL); + EXPECT_TRUE(bar_file_->FindEnumTypeByName("FooEnum") == NULL); + EXPECT_TRUE(baz_file_->FindEnumTypeByName("FooEnum") == NULL); + + EXPECT_TRUE(foo_file_->FindEnumTypeByName("NoSuchEnum") == NULL); + EXPECT_TRUE(foo_file_->FindEnumTypeByName("FooMessage") == NULL); +} + +TEST_F(FileDescriptorTest, FindEnumValueByName) { + EXPECT_EQ(foo_enum_value_, foo_file_->FindEnumValueByName("FOO_ENUM_VALUE")); + EXPECT_EQ(bar_enum_value_, bar_file_->FindEnumValueByName("BAR_ENUM_VALUE")); + + EXPECT_TRUE(foo_file_->FindEnumValueByName("BAR_ENUM_VALUE") == NULL); + EXPECT_TRUE(bar_file_->FindEnumValueByName("FOO_ENUM_VALUE") == NULL); + EXPECT_TRUE(baz_file_->FindEnumValueByName("FOO_ENUM_VALUE") == NULL); + + EXPECT_TRUE(foo_file_->FindEnumValueByName("NO_SUCH_VALUE") == NULL); + EXPECT_TRUE(foo_file_->FindEnumValueByName("FooMessage") == NULL); +} + +TEST_F(FileDescriptorTest, FindServiceByName) { + EXPECT_EQ(foo_service_, foo_file_->FindServiceByName("FooService")); + EXPECT_EQ(bar_service_, bar_file_->FindServiceByName("BarService")); + + EXPECT_TRUE(foo_file_->FindServiceByName("BarService") == NULL); + EXPECT_TRUE(bar_file_->FindServiceByName("FooService") == NULL); + EXPECT_TRUE(baz_file_->FindServiceByName("FooService") == NULL); + + EXPECT_TRUE(foo_file_->FindServiceByName("NoSuchService") == NULL); + EXPECT_TRUE(foo_file_->FindServiceByName("FooMessage") == NULL); +} + +TEST_F(FileDescriptorTest, FindExtensionByName) { + EXPECT_EQ(foo_extension_, foo_file_->FindExtensionByName("foo_extension")); + EXPECT_EQ(bar_extension_, bar_file_->FindExtensionByName("bar_extension")); + + EXPECT_TRUE(foo_file_->FindExtensionByName("bar_extension") == NULL); + EXPECT_TRUE(bar_file_->FindExtensionByName("foo_extension") == NULL); + EXPECT_TRUE(baz_file_->FindExtensionByName("foo_extension") == NULL); + + EXPECT_TRUE(foo_file_->FindExtensionByName("no_such_extension") == NULL); + EXPECT_TRUE(foo_file_->FindExtensionByName("FooMessage") == NULL); +} + +TEST_F(FileDescriptorTest, FindExtensionByNumber) { + EXPECT_EQ(foo_extension_, pool_.FindExtensionByNumber(foo_message_, 1)); + EXPECT_EQ(bar_extension_, pool_.FindExtensionByNumber(bar_message_, 1)); + + EXPECT_TRUE(pool_.FindExtensionByNumber(foo_message_, 2) == NULL); +} + +TEST_F(FileDescriptorTest, BuildAgain) { + // Test that if te call BuildFile again on the same input we get the same + // FileDescriptor back. + FileDescriptorProto file; + foo_file_->CopyTo(&file); + EXPECT_EQ(foo_file_, pool_.BuildFile(file)); + + // But if we change the file then it won't work. + file.set_package("some.other.package"); + EXPECT_TRUE(pool_.BuildFile(file) == NULL); +} + +// =================================================================== + +// Test simple flat messages and fields. +class DescriptorTest : public testing::Test { + protected: + virtual void SetUp() { + // Build descriptors for the following definitions: + // + // // in "foo.proto" + // message TestForeign {} + // enum TestEnum {} + // + // message TestMessage { + // required string foo = 1; + // optional TestEnum bar = 6; + // repeated TestForeign baz = 500000000; + // optional group qux = 15 {} + // } + // + // // in "bar.proto" + // package corge.grault; + // message TestMessage2 { + // required string foo = 1; + // required string bar = 2; + // required string quux = 6; + // } + // + // We cheat and use TestForeign as the type for qux rather than create + // an actual nested type. + // + // Since all primitive types (including string) use the same building + // code, there's no need to test each one individually. + // + // TestMessage2 is primarily here to test FindFieldByName and friends. + // All messages created from the same DescriptorPool share the same lookup + // table, so we need to insure that they don't interfere. + + FileDescriptorProto foo_file; + foo_file.set_name("foo.proto"); + AddMessage(&foo_file, "TestForeign"); + AddEmptyEnum(&foo_file, "TestEnum"); + + DescriptorProto* message = AddMessage(&foo_file, "TestMessage"); + AddField(message, "foo", 1, + FieldDescriptorProto::LABEL_REQUIRED, + FieldDescriptorProto::TYPE_STRING); + AddField(message, "bar", 6, + FieldDescriptorProto::LABEL_OPTIONAL, + FieldDescriptorProto::TYPE_ENUM) + ->set_type_name("TestEnum"); + AddField(message, "baz", 500000000, + FieldDescriptorProto::LABEL_REPEATED, + FieldDescriptorProto::TYPE_MESSAGE) + ->set_type_name("TestForeign"); + AddField(message, "qux", 15, + FieldDescriptorProto::LABEL_OPTIONAL, + FieldDescriptorProto::TYPE_GROUP) + ->set_type_name("TestForeign"); + + FileDescriptorProto bar_file; + bar_file.set_name("bar.proto"); + bar_file.set_package("corge.grault"); + + DescriptorProto* message2 = AddMessage(&bar_file, "TestMessage2"); + AddField(message2, "foo", 1, + FieldDescriptorProto::LABEL_REQUIRED, + FieldDescriptorProto::TYPE_STRING); + AddField(message2, "bar", 2, + FieldDescriptorProto::LABEL_REQUIRED, + FieldDescriptorProto::TYPE_STRING); + AddField(message2, "quux", 6, + FieldDescriptorProto::LABEL_REQUIRED, + FieldDescriptorProto::TYPE_STRING); + + // Build the descriptors and get the pointers. + foo_file_ = pool_.BuildFile(foo_file); + ASSERT_TRUE(foo_file_ != NULL); + + bar_file_ = pool_.BuildFile(bar_file); + ASSERT_TRUE(bar_file_ != NULL); + + ASSERT_EQ(1, foo_file_->enum_type_count()); + enum_ = foo_file_->enum_type(0); + + ASSERT_EQ(2, foo_file_->message_type_count()); + foreign_ = foo_file_->message_type(0); + message_ = foo_file_->message_type(1); + + ASSERT_EQ(4, message_->field_count()); + foo_ = message_->field(0); + bar_ = message_->field(1); + baz_ = message_->field(2); + qux_ = message_->field(3); + + ASSERT_EQ(1, bar_file_->message_type_count()); + message2_ = bar_file_->message_type(0); + + ASSERT_EQ(3, message2_->field_count()); + foo2_ = message2_->field(0); + bar2_ = message2_->field(1); + quux2_ = message2_->field(2); + } + + DescriptorPool pool_; + + const FileDescriptor* foo_file_; + const FileDescriptor* bar_file_; + + const Descriptor* message_; + const Descriptor* message2_; + const Descriptor* foreign_; + const EnumDescriptor* enum_; + + const FieldDescriptor* foo_; + const FieldDescriptor* bar_; + const FieldDescriptor* baz_; + const FieldDescriptor* qux_; + + const FieldDescriptor* foo2_; + const FieldDescriptor* bar2_; + const FieldDescriptor* quux2_; +}; + +TEST_F(DescriptorTest, Name) { + EXPECT_EQ("TestMessage", message_->name()); + EXPECT_EQ("TestMessage", message_->full_name()); + EXPECT_EQ(foo_file_, message_->file()); + + EXPECT_EQ("TestMessage2", message2_->name()); + EXPECT_EQ("corge.grault.TestMessage2", message2_->full_name()); + EXPECT_EQ(bar_file_, message2_->file()); +} + +TEST_F(DescriptorTest, ContainingType) { + EXPECT_TRUE(message_->containing_type() == NULL); + EXPECT_TRUE(message2_->containing_type() == NULL); +} + +TEST_F(DescriptorTest, FieldsByIndex) { + ASSERT_EQ(4, message_->field_count()); + EXPECT_EQ(foo_, message_->field(0)); + EXPECT_EQ(bar_, message_->field(1)); + EXPECT_EQ(baz_, message_->field(2)); + EXPECT_EQ(qux_, message_->field(3)); +} + +TEST_F(DescriptorTest, FindFieldByName) { + // All messages in the same DescriptorPool share a single lookup table for + // fields. So, in addition to testing that FindFieldByName finds the fields + // of the message, we need to test that it does *not* find the fields of + // *other* messages. + + EXPECT_EQ(foo_, message_->FindFieldByName("foo")); + EXPECT_EQ(bar_, message_->FindFieldByName("bar")); + EXPECT_EQ(baz_, message_->FindFieldByName("baz")); + EXPECT_EQ(qux_, message_->FindFieldByName("qux")); + EXPECT_TRUE(message_->FindFieldByName("no_such_field") == NULL); + EXPECT_TRUE(message_->FindFieldByName("quux") == NULL); + + EXPECT_EQ(foo2_ , message2_->FindFieldByName("foo" )); + EXPECT_EQ(bar2_ , message2_->FindFieldByName("bar" )); + EXPECT_EQ(quux2_, message2_->FindFieldByName("quux")); + EXPECT_TRUE(message2_->FindFieldByName("baz") == NULL); + EXPECT_TRUE(message2_->FindFieldByName("qux") == NULL); +} + +TEST_F(DescriptorTest, FindFieldByNumber) { + EXPECT_EQ(foo_, message_->FindFieldByNumber(1)); + EXPECT_EQ(bar_, message_->FindFieldByNumber(6)); + EXPECT_EQ(baz_, message_->FindFieldByNumber(500000000)); + EXPECT_EQ(qux_, message_->FindFieldByNumber(15)); + EXPECT_TRUE(message_->FindFieldByNumber(837592) == NULL); + EXPECT_TRUE(message_->FindFieldByNumber(2) == NULL); + + EXPECT_EQ(foo2_ , message2_->FindFieldByNumber(1)); + EXPECT_EQ(bar2_ , message2_->FindFieldByNumber(2)); + EXPECT_EQ(quux2_, message2_->FindFieldByNumber(6)); + EXPECT_TRUE(message2_->FindFieldByNumber(15) == NULL); + EXPECT_TRUE(message2_->FindFieldByNumber(500000000) == NULL); +} + +TEST_F(DescriptorTest, FieldName) { + EXPECT_EQ("foo", foo_->name()); + EXPECT_EQ("bar", bar_->name()); + EXPECT_EQ("baz", baz_->name()); + EXPECT_EQ("qux", qux_->name()); +} + +TEST_F(DescriptorTest, FieldFullName) { + EXPECT_EQ("TestMessage.foo", foo_->full_name()); + EXPECT_EQ("TestMessage.bar", bar_->full_name()); + EXPECT_EQ("TestMessage.baz", baz_->full_name()); + EXPECT_EQ("TestMessage.qux", qux_->full_name()); + + EXPECT_EQ("corge.grault.TestMessage2.foo", foo2_->full_name()); + EXPECT_EQ("corge.grault.TestMessage2.bar", bar2_->full_name()); + EXPECT_EQ("corge.grault.TestMessage2.quux", quux2_->full_name()); +} + +TEST_F(DescriptorTest, FieldFile) { + EXPECT_EQ(foo_file_, foo_->file()); + EXPECT_EQ(foo_file_, bar_->file()); + EXPECT_EQ(foo_file_, baz_->file()); + EXPECT_EQ(foo_file_, qux_->file()); + + EXPECT_EQ(bar_file_, foo2_->file()); + EXPECT_EQ(bar_file_, bar2_->file()); + EXPECT_EQ(bar_file_, quux2_->file()); +} + +TEST_F(DescriptorTest, FieldIndex) { + EXPECT_EQ(0, foo_->index()); + EXPECT_EQ(1, bar_->index()); + EXPECT_EQ(2, baz_->index()); + EXPECT_EQ(3, qux_->index()); +} + +TEST_F(DescriptorTest, FieldNumber) { + EXPECT_EQ( 1, foo_->number()); + EXPECT_EQ( 6, bar_->number()); + EXPECT_EQ(500000000, baz_->number()); + EXPECT_EQ( 15, qux_->number()); +} + +TEST_F(DescriptorTest, FieldType) { + EXPECT_EQ(FieldDescriptor::TYPE_STRING , foo_->type()); + EXPECT_EQ(FieldDescriptor::TYPE_ENUM , bar_->type()); + EXPECT_EQ(FieldDescriptor::TYPE_MESSAGE, baz_->type()); + EXPECT_EQ(FieldDescriptor::TYPE_GROUP , qux_->type()); +} + +TEST_F(DescriptorTest, FieldLabel) { + EXPECT_EQ(FieldDescriptor::LABEL_REQUIRED, foo_->label()); + EXPECT_EQ(FieldDescriptor::LABEL_OPTIONAL, bar_->label()); + EXPECT_EQ(FieldDescriptor::LABEL_REPEATED, baz_->label()); + EXPECT_EQ(FieldDescriptor::LABEL_OPTIONAL, qux_->label()); + + EXPECT_TRUE (foo_->is_required()); + EXPECT_FALSE(foo_->is_optional()); + EXPECT_FALSE(foo_->is_repeated()); + + EXPECT_FALSE(bar_->is_required()); + EXPECT_TRUE (bar_->is_optional()); + EXPECT_FALSE(bar_->is_repeated()); + + EXPECT_FALSE(baz_->is_required()); + EXPECT_FALSE(baz_->is_optional()); + EXPECT_TRUE (baz_->is_repeated()); +} + +TEST_F(DescriptorTest, FieldHasDefault) { + EXPECT_FALSE(foo_->has_default_value()); + EXPECT_FALSE(bar_->has_default_value()); + EXPECT_FALSE(baz_->has_default_value()); + EXPECT_FALSE(qux_->has_default_value()); +} + +TEST_F(DescriptorTest, FieldContainingType) { + EXPECT_EQ(message_, foo_->containing_type()); + EXPECT_EQ(message_, bar_->containing_type()); + EXPECT_EQ(message_, baz_->containing_type()); + EXPECT_EQ(message_, qux_->containing_type()); + + EXPECT_EQ(message2_, foo2_ ->containing_type()); + EXPECT_EQ(message2_, bar2_ ->containing_type()); + EXPECT_EQ(message2_, quux2_->containing_type()); +} + +TEST_F(DescriptorTest, FieldMessageType) { + EXPECT_TRUE(foo_->message_type() == NULL); + EXPECT_TRUE(bar_->message_type() == NULL); + + EXPECT_EQ(foreign_, baz_->message_type()); + EXPECT_EQ(foreign_, qux_->message_type()); +} + +TEST_F(DescriptorTest, FieldEnumType) { + EXPECT_TRUE(foo_->enum_type() == NULL); + EXPECT_TRUE(baz_->enum_type() == NULL); + EXPECT_TRUE(qux_->enum_type() == NULL); + + EXPECT_EQ(enum_, bar_->enum_type()); +} + +// =================================================================== + +class StylizedFieldNamesTest : public testing::Test { + protected: + void SetUp() { + FileDescriptorProto file; + file.set_name("foo.proto"); + + AddExtensionRange(AddMessage(&file, "ExtendableMessage"), 1, 1000); + + DescriptorProto* message = AddMessage(&file, "TestMessage"); + AddField(message, "foo_foo", 1, + FieldDescriptorProto::LABEL_OPTIONAL, + FieldDescriptorProto::TYPE_INT32); + AddField(message, "FooBar", 2, + FieldDescriptorProto::LABEL_OPTIONAL, + FieldDescriptorProto::TYPE_INT32); + AddField(message, "fooBaz", 3, + FieldDescriptorProto::LABEL_OPTIONAL, + FieldDescriptorProto::TYPE_INT32); + AddField(message, "fooFoo", 4, // Camel-case conflict with foo_foo. + FieldDescriptorProto::LABEL_OPTIONAL, + FieldDescriptorProto::TYPE_INT32); + AddField(message, "foobar", 5, // Lower-case conflict with FooBar. + FieldDescriptorProto::LABEL_OPTIONAL, + FieldDescriptorProto::TYPE_INT32); + + AddNestedExtension(message, "ExtendableMessage", "bar_foo", 1, + FieldDescriptorProto::LABEL_OPTIONAL, + FieldDescriptorProto::TYPE_INT32); + AddNestedExtension(message, "ExtendableMessage", "BarBar", 2, + FieldDescriptorProto::LABEL_OPTIONAL, + FieldDescriptorProto::TYPE_INT32); + AddNestedExtension(message, "ExtendableMessage", "BarBaz", 3, + FieldDescriptorProto::LABEL_OPTIONAL, + FieldDescriptorProto::TYPE_INT32); + AddNestedExtension(message, "ExtendableMessage", "barFoo", 4, // Conflict + FieldDescriptorProto::LABEL_OPTIONAL, + FieldDescriptorProto::TYPE_INT32); + AddNestedExtension(message, "ExtendableMessage", "barbar", 5, // Conflict + FieldDescriptorProto::LABEL_OPTIONAL, + FieldDescriptorProto::TYPE_INT32); + + AddExtension(&file, "ExtendableMessage", "baz_foo", 11, + FieldDescriptorProto::LABEL_OPTIONAL, + FieldDescriptorProto::TYPE_INT32); + AddExtension(&file, "ExtendableMessage", "BazBar", 12, + FieldDescriptorProto::LABEL_OPTIONAL, + FieldDescriptorProto::TYPE_INT32); + AddExtension(&file, "ExtendableMessage", "BazBaz", 13, + FieldDescriptorProto::LABEL_OPTIONAL, + FieldDescriptorProto::TYPE_INT32); + AddExtension(&file, "ExtendableMessage", "bazFoo", 14, // Conflict + FieldDescriptorProto::LABEL_OPTIONAL, + FieldDescriptorProto::TYPE_INT32); + AddExtension(&file, "ExtendableMessage", "bazbar", 15, // Conflict + FieldDescriptorProto::LABEL_OPTIONAL, + FieldDescriptorProto::TYPE_INT32); + + file_ = pool_.BuildFile(file); + ASSERT_TRUE(file_ != NULL); + ASSERT_EQ(2, file_->message_type_count()); + message_ = file_->message_type(1); + ASSERT_EQ("TestMessage", message_->name()); + ASSERT_EQ(5, message_->field_count()); + ASSERT_EQ(5, message_->extension_count()); + ASSERT_EQ(5, file_->extension_count()); + } + + DescriptorPool pool_; + const FileDescriptor* file_; + const Descriptor* message_; +}; + +TEST_F(StylizedFieldNamesTest, LowercaseName) { + EXPECT_EQ("foo_foo", message_->field(0)->lowercase_name()); + EXPECT_EQ("foobar" , message_->field(1)->lowercase_name()); + EXPECT_EQ("foobaz" , message_->field(2)->lowercase_name()); + EXPECT_EQ("foofoo" , message_->field(3)->lowercase_name()); + EXPECT_EQ("foobar" , message_->field(4)->lowercase_name()); + + EXPECT_EQ("bar_foo", message_->extension(0)->lowercase_name()); + EXPECT_EQ("barbar" , message_->extension(1)->lowercase_name()); + EXPECT_EQ("barbaz" , message_->extension(2)->lowercase_name()); + EXPECT_EQ("barfoo" , message_->extension(3)->lowercase_name()); + EXPECT_EQ("barbar" , message_->extension(4)->lowercase_name()); + + EXPECT_EQ("baz_foo", file_->extension(0)->lowercase_name()); + EXPECT_EQ("bazbar" , file_->extension(1)->lowercase_name()); + EXPECT_EQ("bazbaz" , file_->extension(2)->lowercase_name()); + EXPECT_EQ("bazfoo" , file_->extension(3)->lowercase_name()); + EXPECT_EQ("bazbar" , file_->extension(4)->lowercase_name()); +} + +TEST_F(StylizedFieldNamesTest, CamelcaseName) { + EXPECT_EQ("fooFoo", message_->field(0)->camelcase_name()); + EXPECT_EQ("fooBar", message_->field(1)->camelcase_name()); + EXPECT_EQ("fooBaz", message_->field(2)->camelcase_name()); + EXPECT_EQ("fooFoo", message_->field(3)->camelcase_name()); + EXPECT_EQ("foobar", message_->field(4)->camelcase_name()); + + EXPECT_EQ("barFoo", message_->extension(0)->camelcase_name()); + EXPECT_EQ("barBar", message_->extension(1)->camelcase_name()); + EXPECT_EQ("barBaz", message_->extension(2)->camelcase_name()); + EXPECT_EQ("barFoo", message_->extension(3)->camelcase_name()); + EXPECT_EQ("barbar", message_->extension(4)->camelcase_name()); + + EXPECT_EQ("bazFoo", file_->extension(0)->camelcase_name()); + EXPECT_EQ("bazBar", file_->extension(1)->camelcase_name()); + EXPECT_EQ("bazBaz", file_->extension(2)->camelcase_name()); + EXPECT_EQ("bazFoo", file_->extension(3)->camelcase_name()); + EXPECT_EQ("bazbar", file_->extension(4)->camelcase_name()); +} + +TEST_F(StylizedFieldNamesTest, FindByLowercaseName) { + EXPECT_EQ(message_->field(0), + message_->FindFieldByLowercaseName("foo_foo")); + EXPECT_EQ(message_->field(1), + message_->FindFieldByLowercaseName("foobar")); + EXPECT_EQ(message_->field(2), + message_->FindFieldByLowercaseName("foobaz")); + EXPECT_TRUE(message_->FindFieldByLowercaseName("FooBar") == NULL); + EXPECT_TRUE(message_->FindFieldByLowercaseName("fooBaz") == NULL); + EXPECT_TRUE(message_->FindFieldByLowercaseName("bar_foo") == NULL); + EXPECT_TRUE(message_->FindFieldByLowercaseName("nosuchfield") == NULL); + + EXPECT_EQ(message_->extension(0), + message_->FindExtensionByLowercaseName("bar_foo")); + EXPECT_EQ(message_->extension(1), + message_->FindExtensionByLowercaseName("barbar")); + EXPECT_EQ(message_->extension(2), + message_->FindExtensionByLowercaseName("barbaz")); + EXPECT_TRUE(message_->FindExtensionByLowercaseName("BarBar") == NULL); + EXPECT_TRUE(message_->FindExtensionByLowercaseName("barBaz") == NULL); + EXPECT_TRUE(message_->FindExtensionByLowercaseName("foo_foo") == NULL); + EXPECT_TRUE(message_->FindExtensionByLowercaseName("nosuchfield") == NULL); + + EXPECT_EQ(file_->extension(0), + file_->FindExtensionByLowercaseName("baz_foo")); + EXPECT_EQ(file_->extension(1), + file_->FindExtensionByLowercaseName("bazbar")); + EXPECT_EQ(file_->extension(2), + file_->FindExtensionByLowercaseName("bazbaz")); + EXPECT_TRUE(file_->FindExtensionByLowercaseName("BazBar") == NULL); + EXPECT_TRUE(file_->FindExtensionByLowercaseName("bazBaz") == NULL); + EXPECT_TRUE(file_->FindExtensionByLowercaseName("nosuchfield") == NULL); +} + +TEST_F(StylizedFieldNamesTest, FindByCamelcaseName) { + EXPECT_EQ(message_->field(0), + message_->FindFieldByCamelcaseName("fooFoo")); + EXPECT_EQ(message_->field(1), + message_->FindFieldByCamelcaseName("fooBar")); + EXPECT_EQ(message_->field(2), + message_->FindFieldByCamelcaseName("fooBaz")); + EXPECT_TRUE(message_->FindFieldByCamelcaseName("foo_foo") == NULL); + EXPECT_TRUE(message_->FindFieldByCamelcaseName("FooBar") == NULL); + EXPECT_TRUE(message_->FindFieldByCamelcaseName("barFoo") == NULL); + EXPECT_TRUE(message_->FindFieldByCamelcaseName("nosuchfield") == NULL); + + EXPECT_EQ(message_->extension(0), + message_->FindExtensionByCamelcaseName("barFoo")); + EXPECT_EQ(message_->extension(1), + message_->FindExtensionByCamelcaseName("barBar")); + EXPECT_EQ(message_->extension(2), + message_->FindExtensionByCamelcaseName("barBaz")); + EXPECT_TRUE(message_->FindExtensionByCamelcaseName("bar_foo") == NULL); + EXPECT_TRUE(message_->FindExtensionByCamelcaseName("BarBar") == NULL); + EXPECT_TRUE(message_->FindExtensionByCamelcaseName("fooFoo") == NULL); + EXPECT_TRUE(message_->FindExtensionByCamelcaseName("nosuchfield") == NULL); + + EXPECT_EQ(file_->extension(0), + file_->FindExtensionByCamelcaseName("bazFoo")); + EXPECT_EQ(file_->extension(1), + file_->FindExtensionByCamelcaseName("bazBar")); + EXPECT_EQ(file_->extension(2), + file_->FindExtensionByCamelcaseName("bazBaz")); + EXPECT_TRUE(file_->FindExtensionByCamelcaseName("baz_foo") == NULL); + EXPECT_TRUE(file_->FindExtensionByCamelcaseName("BazBar") == NULL); + EXPECT_TRUE(file_->FindExtensionByCamelcaseName("nosuchfield") == NULL); +} + +// =================================================================== + +// Test enum descriptors. +class EnumDescriptorTest : public testing::Test { + protected: + virtual void SetUp() { + // Build descriptors for the following definitions: + // + // // in "foo.proto" + // enum TestEnum { + // FOO = 1; + // BAR = 2; + // } + // + // // in "bar.proto" + // package corge.grault; + // enum TestEnum2 { + // FOO = 1; + // BAZ = 3; + // } + // + // TestEnum2 is primarily here to test FindValueByName and friends. + // All enums created from the same DescriptorPool share the same lookup + // table, so we need to insure that they don't interfere. + + // TestEnum + FileDescriptorProto foo_file; + foo_file.set_name("foo.proto"); + + EnumDescriptorProto* enum_proto = AddEnum(&foo_file, "TestEnum"); + AddEnumValue(enum_proto, "FOO", 1); + AddEnumValue(enum_proto, "BAR", 2); + + // TestEnum2 + FileDescriptorProto bar_file; + bar_file.set_name("bar.proto"); + bar_file.set_package("corge.grault"); + + EnumDescriptorProto* enum2_proto = AddEnum(&bar_file, "TestEnum2"); + AddEnumValue(enum2_proto, "FOO", 1); + AddEnumValue(enum2_proto, "BAZ", 3); + + // Build the descriptors and get the pointers. + foo_file_ = pool_.BuildFile(foo_file); + ASSERT_TRUE(foo_file_ != NULL); + + bar_file_ = pool_.BuildFile(bar_file); + ASSERT_TRUE(bar_file_ != NULL); + + ASSERT_EQ(1, foo_file_->enum_type_count()); + enum_ = foo_file_->enum_type(0); + + ASSERT_EQ(2, enum_->value_count()); + foo_ = enum_->value(0); + bar_ = enum_->value(1); + + ASSERT_EQ(1, bar_file_->enum_type_count()); + enum2_ = bar_file_->enum_type(0); + + ASSERT_EQ(2, enum2_->value_count()); + foo2_ = enum2_->value(0); + baz2_ = enum2_->value(1); + } + + DescriptorPool pool_; + + const FileDescriptor* foo_file_; + const FileDescriptor* bar_file_; + + const EnumDescriptor* enum_; + const EnumDescriptor* enum2_; + + const EnumValueDescriptor* foo_; + const EnumValueDescriptor* bar_; + + const EnumValueDescriptor* foo2_; + const EnumValueDescriptor* baz2_; +}; + +TEST_F(EnumDescriptorTest, Name) { + EXPECT_EQ("TestEnum", enum_->name()); + EXPECT_EQ("TestEnum", enum_->full_name()); + EXPECT_EQ(foo_file_, enum_->file()); + + EXPECT_EQ("TestEnum2", enum2_->name()); + EXPECT_EQ("corge.grault.TestEnum2", enum2_->full_name()); + EXPECT_EQ(bar_file_, enum2_->file()); +} + +TEST_F(EnumDescriptorTest, ContainingType) { + EXPECT_TRUE(enum_->containing_type() == NULL); + EXPECT_TRUE(enum2_->containing_type() == NULL); +} + +TEST_F(EnumDescriptorTest, ValuesByIndex) { + ASSERT_EQ(2, enum_->value_count()); + EXPECT_EQ(foo_, enum_->value(0)); + EXPECT_EQ(bar_, enum_->value(1)); +} + +TEST_F(EnumDescriptorTest, FindValueByName) { + EXPECT_EQ(foo_ , enum_ ->FindValueByName("FOO")); + EXPECT_EQ(bar_ , enum_ ->FindValueByName("BAR")); + EXPECT_EQ(foo2_, enum2_->FindValueByName("FOO")); + EXPECT_EQ(baz2_, enum2_->FindValueByName("BAZ")); + + EXPECT_TRUE(enum_ ->FindValueByName("NO_SUCH_VALUE") == NULL); + EXPECT_TRUE(enum_ ->FindValueByName("BAZ" ) == NULL); + EXPECT_TRUE(enum2_->FindValueByName("BAR" ) == NULL); +} + +TEST_F(EnumDescriptorTest, FindValueByNumber) { + EXPECT_EQ(foo_ , enum_ ->FindValueByNumber(1)); + EXPECT_EQ(bar_ , enum_ ->FindValueByNumber(2)); + EXPECT_EQ(foo2_, enum2_->FindValueByNumber(1)); + EXPECT_EQ(baz2_, enum2_->FindValueByNumber(3)); + + EXPECT_TRUE(enum_ ->FindValueByNumber(416) == NULL); + EXPECT_TRUE(enum_ ->FindValueByNumber(3) == NULL); + EXPECT_TRUE(enum2_->FindValueByNumber(2) == NULL); +} + +TEST_F(EnumDescriptorTest, ValueName) { + EXPECT_EQ("FOO", foo_->name()); + EXPECT_EQ("BAR", bar_->name()); +} + +TEST_F(EnumDescriptorTest, ValueFullName) { + EXPECT_EQ("FOO", foo_->full_name()); + EXPECT_EQ("BAR", bar_->full_name()); + EXPECT_EQ("corge.grault.FOO", foo2_->full_name()); + EXPECT_EQ("corge.grault.BAZ", baz2_->full_name()); +} + +TEST_F(EnumDescriptorTest, ValueIndex) { + EXPECT_EQ(0, foo_->index()); + EXPECT_EQ(1, bar_->index()); +} + +TEST_F(EnumDescriptorTest, ValueNumber) { + EXPECT_EQ(1, foo_->number()); + EXPECT_EQ(2, bar_->number()); +} + +TEST_F(EnumDescriptorTest, ValueType) { + EXPECT_EQ(enum_ , foo_ ->type()); + EXPECT_EQ(enum_ , bar_ ->type()); + EXPECT_EQ(enum2_, foo2_->type()); + EXPECT_EQ(enum2_, baz2_->type()); +} + +// =================================================================== + +// Test service descriptors. +class ServiceDescriptorTest : public testing::Test { + protected: + virtual void SetUp() { + // Build descriptors for the following messages and service: + // // in "foo.proto" + // message FooRequest {} + // message FooResponse {} + // message BarRequest {} + // message BarResponse {} + // message BazRequest {} + // message BazResponse {} + // + // service TestService { + // rpc Foo(FooRequest) returns (FooResponse); + // rpc Bar(BarRequest) returns (BarResponse); + // } + // + // // in "bar.proto" + // package corge.grault + // service TestService2 { + // rpc Foo(FooRequest) returns (FooResponse); + // rpc Baz(BazRequest) returns (BazResponse); + // } + + FileDescriptorProto foo_file; + foo_file.set_name("foo.proto"); + + AddMessage(&foo_file, "FooRequest"); + AddMessage(&foo_file, "FooResponse"); + AddMessage(&foo_file, "BarRequest"); + AddMessage(&foo_file, "BarResponse"); + AddMessage(&foo_file, "BazRequest"); + AddMessage(&foo_file, "BazResponse"); + + ServiceDescriptorProto* service = AddService(&foo_file, "TestService"); + AddMethod(service, "Foo", "FooRequest", "FooResponse"); + AddMethod(service, "Bar", "BarRequest", "BarResponse"); + + FileDescriptorProto bar_file; + bar_file.set_name("bar.proto"); + bar_file.set_package("corge.grault"); + bar_file.add_dependency("foo.proto"); + + ServiceDescriptorProto* service2 = AddService(&bar_file, "TestService2"); + AddMethod(service2, "Foo", "FooRequest", "FooResponse"); + AddMethod(service2, "Baz", "BazRequest", "BazResponse"); + + // Build the descriptors and get the pointers. + foo_file_ = pool_.BuildFile(foo_file); + ASSERT_TRUE(foo_file_ != NULL); + + bar_file_ = pool_.BuildFile(bar_file); + ASSERT_TRUE(bar_file_ != NULL); + + ASSERT_EQ(6, foo_file_->message_type_count()); + foo_request_ = foo_file_->message_type(0); + foo_response_ = foo_file_->message_type(1); + bar_request_ = foo_file_->message_type(2); + bar_response_ = foo_file_->message_type(3); + baz_request_ = foo_file_->message_type(4); + baz_response_ = foo_file_->message_type(5); + + ASSERT_EQ(1, foo_file_->service_count()); + service_ = foo_file_->service(0); + + ASSERT_EQ(2, service_->method_count()); + foo_ = service_->method(0); + bar_ = service_->method(1); + + ASSERT_EQ(1, bar_file_->service_count()); + service2_ = bar_file_->service(0); + + ASSERT_EQ(2, service2_->method_count()); + foo2_ = service2_->method(0); + baz2_ = service2_->method(1); + } + + DescriptorPool pool_; + + const FileDescriptor* foo_file_; + const FileDescriptor* bar_file_; + + const Descriptor* foo_request_; + const Descriptor* foo_response_; + const Descriptor* bar_request_; + const Descriptor* bar_response_; + const Descriptor* baz_request_; + const Descriptor* baz_response_; + + const ServiceDescriptor* service_; + const ServiceDescriptor* service2_; + + const MethodDescriptor* foo_; + const MethodDescriptor* bar_; + + const MethodDescriptor* foo2_; + const MethodDescriptor* baz2_; +}; + +TEST_F(ServiceDescriptorTest, Name) { + EXPECT_EQ("TestService", service_->name()); + EXPECT_EQ("TestService", service_->full_name()); + EXPECT_EQ(foo_file_, service_->file()); + + EXPECT_EQ("TestService2", service2_->name()); + EXPECT_EQ("corge.grault.TestService2", service2_->full_name()); + EXPECT_EQ(bar_file_, service2_->file()); +} + +TEST_F(ServiceDescriptorTest, MethodsByIndex) { + ASSERT_EQ(2, service_->method_count()); + EXPECT_EQ(foo_, service_->method(0)); + EXPECT_EQ(bar_, service_->method(1)); +} + +TEST_F(ServiceDescriptorTest, FindMethodByName) { + EXPECT_EQ(foo_ , service_ ->FindMethodByName("Foo")); + EXPECT_EQ(bar_ , service_ ->FindMethodByName("Bar")); + EXPECT_EQ(foo2_, service2_->FindMethodByName("Foo")); + EXPECT_EQ(baz2_, service2_->FindMethodByName("Baz")); + + EXPECT_TRUE(service_ ->FindMethodByName("NoSuchMethod") == NULL); + EXPECT_TRUE(service_ ->FindMethodByName("Baz" ) == NULL); + EXPECT_TRUE(service2_->FindMethodByName("Bar" ) == NULL); +} + +TEST_F(ServiceDescriptorTest, MethodName) { + EXPECT_EQ("Foo", foo_->name()); + EXPECT_EQ("Bar", bar_->name()); +} + +TEST_F(ServiceDescriptorTest, MethodFullName) { + EXPECT_EQ("TestService.Foo", foo_->full_name()); + EXPECT_EQ("TestService.Bar", bar_->full_name()); + EXPECT_EQ("corge.grault.TestService2.Foo", foo2_->full_name()); + EXPECT_EQ("corge.grault.TestService2.Baz", baz2_->full_name()); +} + +TEST_F(ServiceDescriptorTest, MethodIndex) { + EXPECT_EQ(0, foo_->index()); + EXPECT_EQ(1, bar_->index()); +} + +TEST_F(ServiceDescriptorTest, MethodParent) { + EXPECT_EQ(service_, foo_->service()); + EXPECT_EQ(service_, bar_->service()); +} + +TEST_F(ServiceDescriptorTest, MethodInputType) { + EXPECT_EQ(foo_request_, foo_->input_type()); + EXPECT_EQ(bar_request_, bar_->input_type()); +} + +TEST_F(ServiceDescriptorTest, MethodOutputType) { + EXPECT_EQ(foo_response_, foo_->output_type()); + EXPECT_EQ(bar_response_, bar_->output_type()); +} + +// =================================================================== + +// Test nested types. +class NestedDescriptorTest : public testing::Test { + protected: + virtual void SetUp() { + // Build descriptors for the following definitions: + // + // // in "foo.proto" + // message TestMessage { + // message Foo {} + // message Bar {} + // enum Baz { A = 1; } + // enum Qux { B = 1; } + // } + // + // // in "bar.proto" + // package corge.grault; + // message TestMessage2 { + // message Foo {} + // message Baz {} + // enum Qux { A = 1; } + // enum Quux { C = 1; } + // } + // + // TestMessage2 is primarily here to test FindNestedTypeByName and friends. + // All messages created from the same DescriptorPool share the same lookup + // table, so we need to insure that they don't interfere. + // + // We add enum values to the enums in order to test searching for enum + // values across a message's scope. + + FileDescriptorProto foo_file; + foo_file.set_name("foo.proto"); + + DescriptorProto* message = AddMessage(&foo_file, "TestMessage"); + AddNestedMessage(message, "Foo"); + AddNestedMessage(message, "Bar"); + EnumDescriptorProto* baz = AddNestedEnum(message, "Baz"); + AddEnumValue(baz, "A", 1); + EnumDescriptorProto* qux = AddNestedEnum(message, "Qux"); + AddEnumValue(qux, "B", 1); + + FileDescriptorProto bar_file; + bar_file.set_name("bar.proto"); + bar_file.set_package("corge.grault"); + + DescriptorProto* message2 = AddMessage(&bar_file, "TestMessage2"); + AddNestedMessage(message2, "Foo"); + AddNestedMessage(message2, "Baz"); + EnumDescriptorProto* qux2 = AddNestedEnum(message2, "Qux"); + AddEnumValue(qux2, "A", 1); + EnumDescriptorProto* quux2 = AddNestedEnum(message2, "Quux"); + AddEnumValue(quux2, "C", 1); + + // Build the descriptors and get the pointers. + foo_file_ = pool_.BuildFile(foo_file); + ASSERT_TRUE(foo_file_ != NULL); + + bar_file_ = pool_.BuildFile(bar_file); + ASSERT_TRUE(bar_file_ != NULL); + + ASSERT_EQ(1, foo_file_->message_type_count()); + message_ = foo_file_->message_type(0); + + ASSERT_EQ(2, message_->nested_type_count()); + foo_ = message_->nested_type(0); + bar_ = message_->nested_type(1); + + ASSERT_EQ(2, message_->enum_type_count()); + baz_ = message_->enum_type(0); + qux_ = message_->enum_type(1); + + ASSERT_EQ(1, baz_->value_count()); + a_ = baz_->value(0); + ASSERT_EQ(1, qux_->value_count()); + b_ = qux_->value(0); + + ASSERT_EQ(1, bar_file_->message_type_count()); + message2_ = bar_file_->message_type(0); + + ASSERT_EQ(2, message2_->nested_type_count()); + foo2_ = message2_->nested_type(0); + baz2_ = message2_->nested_type(1); + + ASSERT_EQ(2, message2_->enum_type_count()); + qux2_ = message2_->enum_type(0); + quux2_ = message2_->enum_type(1); + + ASSERT_EQ(1, qux2_->value_count()); + a2_ = qux2_->value(0); + ASSERT_EQ(1, quux2_->value_count()); + c2_ = quux2_->value(0); + } + + DescriptorPool pool_; + + const FileDescriptor* foo_file_; + const FileDescriptor* bar_file_; + + const Descriptor* message_; + const Descriptor* message2_; + + const Descriptor* foo_; + const Descriptor* bar_; + const EnumDescriptor* baz_; + const EnumDescriptor* qux_; + const EnumValueDescriptor* a_; + const EnumValueDescriptor* b_; + + const Descriptor* foo2_; + const Descriptor* baz2_; + const EnumDescriptor* qux2_; + const EnumDescriptor* quux2_; + const EnumValueDescriptor* a2_; + const EnumValueDescriptor* c2_; +}; + +TEST_F(NestedDescriptorTest, MessageName) { + EXPECT_EQ("Foo", foo_ ->name()); + EXPECT_EQ("Bar", bar_ ->name()); + EXPECT_EQ("Foo", foo2_->name()); + EXPECT_EQ("Baz", baz2_->name()); + + EXPECT_EQ("TestMessage.Foo", foo_->full_name()); + EXPECT_EQ("TestMessage.Bar", bar_->full_name()); + EXPECT_EQ("corge.grault.TestMessage2.Foo", foo2_->full_name()); + EXPECT_EQ("corge.grault.TestMessage2.Baz", baz2_->full_name()); +} + +TEST_F(NestedDescriptorTest, MessageContainingType) { + EXPECT_EQ(message_ , foo_ ->containing_type()); + EXPECT_EQ(message_ , bar_ ->containing_type()); + EXPECT_EQ(message2_, foo2_->containing_type()); + EXPECT_EQ(message2_, baz2_->containing_type()); +} + +TEST_F(NestedDescriptorTest, NestedMessagesByIndex) { + ASSERT_EQ(2, message_->nested_type_count()); + EXPECT_EQ(foo_, message_->nested_type(0)); + EXPECT_EQ(bar_, message_->nested_type(1)); +} + +TEST_F(NestedDescriptorTest, FindFieldByNameDoesntFindNestedTypes) { + EXPECT_TRUE(message_->FindFieldByName("Foo") == NULL); + EXPECT_TRUE(message_->FindFieldByName("Qux") == NULL); + EXPECT_TRUE(message_->FindExtensionByName("Foo") == NULL); + EXPECT_TRUE(message_->FindExtensionByName("Qux") == NULL); +} + +TEST_F(NestedDescriptorTest, FindNestedTypeByName) { + EXPECT_EQ(foo_ , message_ ->FindNestedTypeByName("Foo")); + EXPECT_EQ(bar_ , message_ ->FindNestedTypeByName("Bar")); + EXPECT_EQ(foo2_, message2_->FindNestedTypeByName("Foo")); + EXPECT_EQ(baz2_, message2_->FindNestedTypeByName("Baz")); + + EXPECT_TRUE(message_ ->FindNestedTypeByName("NoSuchType") == NULL); + EXPECT_TRUE(message_ ->FindNestedTypeByName("Baz" ) == NULL); + EXPECT_TRUE(message2_->FindNestedTypeByName("Bar" ) == NULL); + + EXPECT_TRUE(message_->FindNestedTypeByName("Qux") == NULL); +} + +TEST_F(NestedDescriptorTest, EnumName) { + EXPECT_EQ("Baz" , baz_ ->name()); + EXPECT_EQ("Qux" , qux_ ->name()); + EXPECT_EQ("Qux" , qux2_->name()); + EXPECT_EQ("Quux", quux2_->name()); + + EXPECT_EQ("TestMessage.Baz", baz_->full_name()); + EXPECT_EQ("TestMessage.Qux", qux_->full_name()); + EXPECT_EQ("corge.grault.TestMessage2.Qux" , qux2_ ->full_name()); + EXPECT_EQ("corge.grault.TestMessage2.Quux", quux2_->full_name()); +} + +TEST_F(NestedDescriptorTest, EnumContainingType) { + EXPECT_EQ(message_ , baz_ ->containing_type()); + EXPECT_EQ(message_ , qux_ ->containing_type()); + EXPECT_EQ(message2_, qux2_ ->containing_type()); + EXPECT_EQ(message2_, quux2_->containing_type()); +} + +TEST_F(NestedDescriptorTest, NestedEnumsByIndex) { + ASSERT_EQ(2, message_->nested_type_count()); + EXPECT_EQ(foo_, message_->nested_type(0)); + EXPECT_EQ(bar_, message_->nested_type(1)); +} + +TEST_F(NestedDescriptorTest, FindEnumTypeByName) { + EXPECT_EQ(baz_ , message_ ->FindEnumTypeByName("Baz" )); + EXPECT_EQ(qux_ , message_ ->FindEnumTypeByName("Qux" )); + EXPECT_EQ(qux2_ , message2_->FindEnumTypeByName("Qux" )); + EXPECT_EQ(quux2_, message2_->FindEnumTypeByName("Quux")); + + EXPECT_TRUE(message_ ->FindEnumTypeByName("NoSuchType") == NULL); + EXPECT_TRUE(message_ ->FindEnumTypeByName("Quux" ) == NULL); + EXPECT_TRUE(message2_->FindEnumTypeByName("Baz" ) == NULL); + + EXPECT_TRUE(message_->FindEnumTypeByName("Foo") == NULL); +} + +TEST_F(NestedDescriptorTest, FindEnumValueByName) { + EXPECT_EQ(a_ , message_ ->FindEnumValueByName("A")); + EXPECT_EQ(b_ , message_ ->FindEnumValueByName("B")); + EXPECT_EQ(a2_, message2_->FindEnumValueByName("A")); + EXPECT_EQ(c2_, message2_->FindEnumValueByName("C")); + + EXPECT_TRUE(message_ ->FindEnumValueByName("NO_SUCH_VALUE") == NULL); + EXPECT_TRUE(message_ ->FindEnumValueByName("C" ) == NULL); + EXPECT_TRUE(message2_->FindEnumValueByName("B" ) == NULL); + + EXPECT_TRUE(message_->FindEnumValueByName("Foo") == NULL); +} + +// =================================================================== + +// Test extensions. +class ExtensionDescriptorTest : public testing::Test { + protected: + virtual void SetUp() { + // Build descriptors for the following definitions: + // + // enum Baz {} + // message Qux {} + // + // message Foo { + // extensions 10 to 19; + // extensions 30 to 39; + // } + // extends Foo with optional int32 foo_int32 = 10; + // extends Foo with repeated TestEnum foo_enum = 19; + // message Bar { + // extends Foo with optional Qux foo_message = 30; + // // (using Qux as the group type) + // extends Foo with repeated group foo_group = 39; + // } + + FileDescriptorProto foo_file; + foo_file.set_name("foo.proto"); + + AddEmptyEnum(&foo_file, "Baz"); + AddMessage(&foo_file, "Qux"); + + DescriptorProto* foo = AddMessage(&foo_file, "Foo"); + AddExtensionRange(foo, 10, 20); + AddExtensionRange(foo, 30, 40); + + AddExtension(&foo_file, "Foo", "foo_int32", 10, + FieldDescriptorProto::LABEL_OPTIONAL, + FieldDescriptorProto::TYPE_INT32); + AddExtension(&foo_file, "Foo", "foo_enum", 19, + FieldDescriptorProto::LABEL_REPEATED, + FieldDescriptorProto::TYPE_ENUM) + ->set_type_name("Baz"); + + DescriptorProto* bar = AddMessage(&foo_file, "Bar"); + AddNestedExtension(bar, "Foo", "foo_message", 30, + FieldDescriptorProto::LABEL_OPTIONAL, + FieldDescriptorProto::TYPE_MESSAGE) + ->set_type_name("Qux"); + AddNestedExtension(bar, "Foo", "foo_group", 39, + FieldDescriptorProto::LABEL_REPEATED, + FieldDescriptorProto::TYPE_GROUP) + ->set_type_name("Qux"); + + // Build the descriptors and get the pointers. + foo_file_ = pool_.BuildFile(foo_file); + ASSERT_TRUE(foo_file_ != NULL); + + ASSERT_EQ(1, foo_file_->enum_type_count()); + baz_ = foo_file_->enum_type(0); + + ASSERT_EQ(3, foo_file_->message_type_count()); + qux_ = foo_file_->message_type(0); + foo_ = foo_file_->message_type(1); + bar_ = foo_file_->message_type(2); + } + + DescriptorPool pool_; + + const FileDescriptor* foo_file_; + + const Descriptor* foo_; + const Descriptor* bar_; + const EnumDescriptor* baz_; + const Descriptor* qux_; +}; + +TEST_F(ExtensionDescriptorTest, ExtensionRanges) { + EXPECT_EQ(0, bar_->extension_range_count()); + ASSERT_EQ(2, foo_->extension_range_count()); + + EXPECT_EQ(10, foo_->extension_range(0)->start); + EXPECT_EQ(30, foo_->extension_range(1)->start); + + EXPECT_EQ(20, foo_->extension_range(0)->end); + EXPECT_EQ(40, foo_->extension_range(1)->end); +}; + +TEST_F(ExtensionDescriptorTest, Extensions) { + EXPECT_EQ(0, foo_->extension_count()); + ASSERT_EQ(2, foo_file_->extension_count()); + ASSERT_EQ(2, bar_->extension_count()); + + EXPECT_TRUE(foo_file_->extension(0)->is_extension()); + EXPECT_TRUE(foo_file_->extension(1)->is_extension()); + EXPECT_TRUE(bar_->extension(0)->is_extension()); + EXPECT_TRUE(bar_->extension(1)->is_extension()); + + EXPECT_EQ("foo_int32" , foo_file_->extension(0)->name()); + EXPECT_EQ("foo_enum" , foo_file_->extension(1)->name()); + EXPECT_EQ("foo_message", bar_->extension(0)->name()); + EXPECT_EQ("foo_group" , bar_->extension(1)->name()); + + EXPECT_EQ(10, foo_file_->extension(0)->number()); + EXPECT_EQ(19, foo_file_->extension(1)->number()); + EXPECT_EQ(30, bar_->extension(0)->number()); + EXPECT_EQ(39, bar_->extension(1)->number()); + + EXPECT_EQ(FieldDescriptor::TYPE_INT32 , foo_file_->extension(0)->type()); + EXPECT_EQ(FieldDescriptor::TYPE_ENUM , foo_file_->extension(1)->type()); + EXPECT_EQ(FieldDescriptor::TYPE_MESSAGE, bar_->extension(0)->type()); + EXPECT_EQ(FieldDescriptor::TYPE_GROUP , bar_->extension(1)->type()); + + EXPECT_EQ(baz_, foo_file_->extension(1)->enum_type()); + EXPECT_EQ(qux_, bar_->extension(0)->message_type()); + EXPECT_EQ(qux_, bar_->extension(1)->message_type()); + + EXPECT_EQ(FieldDescriptor::LABEL_OPTIONAL, foo_file_->extension(0)->label()); + EXPECT_EQ(FieldDescriptor::LABEL_REPEATED, foo_file_->extension(1)->label()); + EXPECT_EQ(FieldDescriptor::LABEL_OPTIONAL, bar_->extension(0)->label()); + EXPECT_EQ(FieldDescriptor::LABEL_REPEATED, bar_->extension(1)->label()); + + EXPECT_EQ(foo_, foo_file_->extension(0)->containing_type()); + EXPECT_EQ(foo_, foo_file_->extension(1)->containing_type()); + EXPECT_EQ(foo_, bar_->extension(0)->containing_type()); + EXPECT_EQ(foo_, bar_->extension(1)->containing_type()); + + EXPECT_TRUE(foo_file_->extension(0)->extension_scope() == NULL); + EXPECT_TRUE(foo_file_->extension(1)->extension_scope() == NULL); + EXPECT_EQ(bar_, bar_->extension(0)->extension_scope()); + EXPECT_EQ(bar_, bar_->extension(1)->extension_scope()); +}; + +TEST_F(ExtensionDescriptorTest, IsExtensionNumber) { + EXPECT_FALSE(foo_->IsExtensionNumber( 9)); + EXPECT_TRUE (foo_->IsExtensionNumber(10)); + EXPECT_TRUE (foo_->IsExtensionNumber(19)); + EXPECT_FALSE(foo_->IsExtensionNumber(20)); + EXPECT_FALSE(foo_->IsExtensionNumber(29)); + EXPECT_TRUE (foo_->IsExtensionNumber(30)); + EXPECT_TRUE (foo_->IsExtensionNumber(39)); + EXPECT_FALSE(foo_->IsExtensionNumber(40)); +} + +TEST_F(ExtensionDescriptorTest, FindExtensionByName) { + // Note that FileDescriptor::FindExtensionByName() is tested by + // FileDescriptorTest. + ASSERT_EQ(2, bar_->extension_count()); + + EXPECT_EQ(bar_->extension(0), bar_->FindExtensionByName("foo_message")); + EXPECT_EQ(bar_->extension(1), bar_->FindExtensionByName("foo_group" )); + + EXPECT_TRUE(bar_->FindExtensionByName("no_such_extension") == NULL); + EXPECT_TRUE(foo_->FindExtensionByName("foo_int32") == NULL); + EXPECT_TRUE(foo_->FindExtensionByName("foo_message") == NULL); +} + +TEST_F(ExtensionDescriptorTest, FindAllExtensions) { + vector extensions; + pool_.FindAllExtensions(foo_, &extensions); + ASSERT_EQ(4, extensions.size()); + EXPECT_EQ(10, extensions[0]->number()); + EXPECT_EQ(19, extensions[1]->number()); + EXPECT_EQ(30, extensions[2]->number()); + EXPECT_EQ(39, extensions[3]->number()); +} + +// =================================================================== + +class MiscTest : public testing::Test { + protected: + // Function which makes a field of the given type just to find out what its + // cpp_type is. + FieldDescriptor::CppType GetCppTypeForFieldType(FieldDescriptor::Type type) { + FileDescriptorProto file_proto; + file_proto.set_name("foo.proto"); + AddEmptyEnum(&file_proto, "DummyEnum"); + + DescriptorProto* message = AddMessage(&file_proto, "TestMessage"); + FieldDescriptorProto* field = + AddField(message, "foo", 1, FieldDescriptorProto::LABEL_OPTIONAL, + static_cast(static_cast(type))); + + if (type == FieldDescriptor::TYPE_MESSAGE || + type == FieldDescriptor::TYPE_GROUP) { + field->set_type_name("TestMessage"); + } else if (type == FieldDescriptor::TYPE_ENUM) { + field->set_type_name("DummyEnum"); + } + + // Build the descriptors and get the pointers. + DescriptorPool pool; + const FileDescriptor* file = pool.BuildFile(file_proto); + + if (file != NULL && + file->message_type_count() == 1 && + file->message_type(0)->field_count() == 1) { + return file->message_type(0)->field(0)->cpp_type(); + } else { + return static_cast(0); + } + } +}; + +TEST_F(MiscTest, CppTypes) { + // Test that CPP types are assigned correctly. + + typedef FieldDescriptor FD; // avoid ugly line wrapping + + EXPECT_EQ(FD::CPPTYPE_DOUBLE , GetCppTypeForFieldType(FD::TYPE_DOUBLE )); + EXPECT_EQ(FD::CPPTYPE_FLOAT , GetCppTypeForFieldType(FD::TYPE_FLOAT )); + EXPECT_EQ(FD::CPPTYPE_INT64 , GetCppTypeForFieldType(FD::TYPE_INT64 )); + EXPECT_EQ(FD::CPPTYPE_UINT64 , GetCppTypeForFieldType(FD::TYPE_UINT64 )); + EXPECT_EQ(FD::CPPTYPE_INT32 , GetCppTypeForFieldType(FD::TYPE_INT32 )); + EXPECT_EQ(FD::CPPTYPE_UINT64 , GetCppTypeForFieldType(FD::TYPE_FIXED64 )); + EXPECT_EQ(FD::CPPTYPE_UINT32 , GetCppTypeForFieldType(FD::TYPE_FIXED32 )); + EXPECT_EQ(FD::CPPTYPE_BOOL , GetCppTypeForFieldType(FD::TYPE_BOOL )); + EXPECT_EQ(FD::CPPTYPE_STRING , GetCppTypeForFieldType(FD::TYPE_STRING )); + EXPECT_EQ(FD::CPPTYPE_MESSAGE, GetCppTypeForFieldType(FD::TYPE_GROUP )); + EXPECT_EQ(FD::CPPTYPE_MESSAGE, GetCppTypeForFieldType(FD::TYPE_MESSAGE )); + EXPECT_EQ(FD::CPPTYPE_STRING , GetCppTypeForFieldType(FD::TYPE_BYTES )); + EXPECT_EQ(FD::CPPTYPE_UINT32 , GetCppTypeForFieldType(FD::TYPE_UINT32 )); + EXPECT_EQ(FD::CPPTYPE_ENUM , GetCppTypeForFieldType(FD::TYPE_ENUM )); + EXPECT_EQ(FD::CPPTYPE_INT32 , GetCppTypeForFieldType(FD::TYPE_SFIXED32)); + EXPECT_EQ(FD::CPPTYPE_INT64 , GetCppTypeForFieldType(FD::TYPE_SFIXED64)); + EXPECT_EQ(FD::CPPTYPE_INT32 , GetCppTypeForFieldType(FD::TYPE_SINT32 )); + EXPECT_EQ(FD::CPPTYPE_INT64 , GetCppTypeForFieldType(FD::TYPE_SINT64 )); +} + +TEST_F(MiscTest, DefaultValues) { + // Test that setting default values works. + FileDescriptorProto file_proto; + file_proto.set_name("foo.proto"); + + EnumDescriptorProto* enum_type_proto = AddEnum(&file_proto, "DummyEnum"); + AddEnumValue(enum_type_proto, "A", 1); + AddEnumValue(enum_type_proto, "B", 2); + + DescriptorProto* message_proto = AddMessage(&file_proto, "TestMessage"); + + typedef FieldDescriptorProto FD; // avoid ugly line wrapping + const FD::Label label = FD::LABEL_OPTIONAL; + + // Create fields of every CPP type with default values. + AddField(message_proto, "int32" , 1, label, FD::TYPE_INT32 ) + ->set_default_value("-1"); + AddField(message_proto, "int64" , 2, label, FD::TYPE_INT64 ) + ->set_default_value("-1000000000000"); + AddField(message_proto, "uint32", 3, label, FD::TYPE_UINT32) + ->set_default_value("42"); + AddField(message_proto, "uint64", 4, label, FD::TYPE_UINT64) + ->set_default_value("2000000000000"); + AddField(message_proto, "float" , 5, label, FD::TYPE_FLOAT ) + ->set_default_value("4.5"); + AddField(message_proto, "double", 6, label, FD::TYPE_DOUBLE) + ->set_default_value("10e100"); + AddField(message_proto, "bool" , 7, label, FD::TYPE_BOOL ) + ->set_default_value("true"); + AddField(message_proto, "string", 8, label, FD::TYPE_STRING) + ->set_default_value("hello"); + AddField(message_proto, "data" , 9, label, FD::TYPE_BYTES ) + ->set_default_value("\\001\\002\\003"); + + FieldDescriptorProto* enum_field = + AddField(message_proto, "enum", 10, label, FD::TYPE_ENUM); + enum_field->set_type_name("DummyEnum"); + enum_field->set_default_value("B"); + + // Strings are allowed to have empty defaults. (At one point, due to + // a bug, empty defaults for strings were rejected. Oops.) + AddField(message_proto, "empty_string", 11, label, FD::TYPE_STRING) + ->set_default_value(""); + + // Add a second set of fields with implicit defalut values. + AddField(message_proto, "implicit_int32" , 21, label, FD::TYPE_INT32 ); + AddField(message_proto, "implicit_int64" , 22, label, FD::TYPE_INT64 ); + AddField(message_proto, "implicit_uint32", 23, label, FD::TYPE_UINT32); + AddField(message_proto, "implicit_uint64", 24, label, FD::TYPE_UINT64); + AddField(message_proto, "implicit_float" , 25, label, FD::TYPE_FLOAT ); + AddField(message_proto, "implicit_double", 26, label, FD::TYPE_DOUBLE); + AddField(message_proto, "implicit_bool" , 27, label, FD::TYPE_BOOL ); + AddField(message_proto, "implicit_string", 28, label, FD::TYPE_STRING); + AddField(message_proto, "implicit_data" , 29, label, FD::TYPE_BYTES ); + AddField(message_proto, "implicit_enum" , 30, label, FD::TYPE_ENUM) + ->set_type_name("DummyEnum"); + + // Build it. + DescriptorPool pool; + const FileDescriptor* file = pool.BuildFile(file_proto); + ASSERT_TRUE(file != NULL); + + ASSERT_EQ(1, file->enum_type_count()); + const EnumDescriptor* enum_type = file->enum_type(0); + ASSERT_EQ(2, enum_type->value_count()); + const EnumValueDescriptor* enum_value_a = enum_type->value(0); + const EnumValueDescriptor* enum_value_b = enum_type->value(1); + + ASSERT_EQ(1, file->message_type_count()); + const Descriptor* message = file->message_type(0); + + ASSERT_EQ(21, message->field_count()); + + // Check the default values. + ASSERT_TRUE(message->field(0)->has_default_value()); + ASSERT_TRUE(message->field(1)->has_default_value()); + ASSERT_TRUE(message->field(2)->has_default_value()); + ASSERT_TRUE(message->field(3)->has_default_value()); + ASSERT_TRUE(message->field(4)->has_default_value()); + ASSERT_TRUE(message->field(5)->has_default_value()); + ASSERT_TRUE(message->field(6)->has_default_value()); + ASSERT_TRUE(message->field(7)->has_default_value()); + ASSERT_TRUE(message->field(8)->has_default_value()); + ASSERT_TRUE(message->field(9)->has_default_value()); + ASSERT_TRUE(message->field(10)->has_default_value()); + + EXPECT_EQ(-1 , message->field(0)->default_value_int32 ()); + EXPECT_EQ(-GOOGLE_ULONGLONG(1000000000000), + message->field(1)->default_value_int64 ()); + EXPECT_EQ(42 , message->field(2)->default_value_uint32()); + EXPECT_EQ(GOOGLE_ULONGLONG(2000000000000), + message->field(3)->default_value_uint64()); + EXPECT_EQ(4.5 , message->field(4)->default_value_float ()); + EXPECT_EQ(10e100 , message->field(5)->default_value_double()); + EXPECT_EQ(true , message->field(6)->default_value_bool ()); + EXPECT_EQ("hello" , message->field(7)->default_value_string()); + EXPECT_EQ("\001\002\003" , message->field(8)->default_value_string()); + EXPECT_EQ(enum_value_b , message->field(9)->default_value_enum ()); + EXPECT_EQ("" , message->field(10)->default_value_string()); + + ASSERT_FALSE(message->field(11)->has_default_value()); + ASSERT_FALSE(message->field(12)->has_default_value()); + ASSERT_FALSE(message->field(13)->has_default_value()); + ASSERT_FALSE(message->field(14)->has_default_value()); + ASSERT_FALSE(message->field(15)->has_default_value()); + ASSERT_FALSE(message->field(16)->has_default_value()); + ASSERT_FALSE(message->field(17)->has_default_value()); + ASSERT_FALSE(message->field(18)->has_default_value()); + ASSERT_FALSE(message->field(19)->has_default_value()); + ASSERT_FALSE(message->field(20)->has_default_value()); + + EXPECT_EQ(0 , message->field(11)->default_value_int32 ()); + EXPECT_EQ(0 , message->field(12)->default_value_int64 ()); + EXPECT_EQ(0 , message->field(13)->default_value_uint32()); + EXPECT_EQ(0 , message->field(14)->default_value_uint64()); + EXPECT_EQ(0.0f , message->field(15)->default_value_float ()); + EXPECT_EQ(0.0 , message->field(16)->default_value_double()); + EXPECT_EQ(false, message->field(17)->default_value_bool ()); + EXPECT_EQ("" , message->field(18)->default_value_string()); + EXPECT_EQ("" , message->field(19)->default_value_string()); + EXPECT_EQ(enum_value_a, message->field(20)->default_value_enum()); +} + +TEST_F(MiscTest, FieldOptions) { + // Try setting field options. + + FileDescriptorProto file_proto; + file_proto.set_name("foo.proto"); + + DescriptorProto* message_proto = AddMessage(&file_proto, "TestMessage"); + AddField(message_proto, "foo", 1, + FieldDescriptorProto::LABEL_OPTIONAL, + FieldDescriptorProto::TYPE_INT32); + FieldDescriptorProto* bar_proto = + AddField(message_proto, "bar", 2, + FieldDescriptorProto::LABEL_OPTIONAL, + FieldDescriptorProto::TYPE_INT32); + + FieldOptions* options = bar_proto->mutable_options(); + options->set_ctype(FieldOptions::CORD); + + // Build the descriptors and get the pointers. + DescriptorPool pool; + const FileDescriptor* file = pool.BuildFile(file_proto); + ASSERT_TRUE(file != NULL); + + ASSERT_EQ(1, file->message_type_count()); + const Descriptor* message = file->message_type(0); + + ASSERT_EQ(2, message->field_count()); + const FieldDescriptor* foo = message->field(0); + const FieldDescriptor* bar = message->field(1); + + // "foo" had no options set, so it should return the default options. + EXPECT_EQ(&FieldOptions::default_instance(), &foo->options()); + + // "bar" had options set. + EXPECT_NE(&FieldOptions::default_instance(), options); + EXPECT_TRUE(bar->options().has_ctype()); + EXPECT_EQ(FieldOptions::CORD, bar->options().ctype()); +} + +// =================================================================== + +class AllowUnknownDependenciesTest : public testing::Test { + protected: + virtual void SetUp() { + FileDescriptorProto foo_proto, bar_proto; + + pool_.AllowUnknownDependencies(); + + ASSERT_TRUE(TextFormat::ParseFromString( + "name: 'foo.proto'" + "dependency: 'bar.proto'" + "dependency: 'baz.proto'" + "message_type {" + " name: 'Foo'" + " field { name:'bar' number:1 label:LABEL_OPTIONAL type_name:'Bar' }" + " field { name:'baz' number:2 label:LABEL_OPTIONAL type_name:'Baz' }" + " field { name:'qux' number:3 label:LABEL_OPTIONAL" + " type_name: '.corge.Qux'" + " type: TYPE_ENUM" + " options {" + " uninterpreted_option {" + " name {" + " name_part: 'grault'" + " is_extension: true" + " }" + " positive_int_value: 1234" + " }" + " }" + " }" + "}", + &foo_proto)); + ASSERT_TRUE(TextFormat::ParseFromString( + "name: 'bar.proto'" + "message_type { name: 'Bar' }", + &bar_proto)); + + // Collect pointers to stuff. + bar_file_ = pool_.BuildFile(bar_proto); + ASSERT_TRUE(bar_file_ != NULL); + + ASSERT_EQ(1, bar_file_->message_type_count()); + bar_type_ = bar_file_->message_type(0); + + foo_file_ = pool_.BuildFile(foo_proto); + ASSERT_TRUE(foo_file_ != NULL); + + ASSERT_EQ(1, foo_file_->message_type_count()); + foo_type_ = foo_file_->message_type(0); + + ASSERT_EQ(3, foo_type_->field_count()); + bar_field_ = foo_type_->field(0); + baz_field_ = foo_type_->field(1); + qux_field_ = foo_type_->field(2); + } + + const FileDescriptor* bar_file_; + const Descriptor* bar_type_; + const FileDescriptor* foo_file_; + const Descriptor* foo_type_; + const FieldDescriptor* bar_field_; + const FieldDescriptor* baz_field_; + const FieldDescriptor* qux_field_; + + DescriptorPool pool_; +}; + +TEST_F(AllowUnknownDependenciesTest, PlaceholderFile) { + ASSERT_EQ(2, foo_file_->dependency_count()); + EXPECT_EQ(bar_file_, foo_file_->dependency(0)); + + const FileDescriptor* baz_file = foo_file_->dependency(1); + EXPECT_EQ("baz.proto", baz_file->name()); + EXPECT_EQ(0, baz_file->message_type_count()); + + // Placeholder files should not be findable. + EXPECT_EQ(bar_file_, pool_.FindFileByName(bar_file_->name())); + EXPECT_TRUE(pool_.FindFileByName(baz_file->name()) == NULL); +} + +TEST_F(AllowUnknownDependenciesTest, PlaceholderTypes) { + ASSERT_EQ(FieldDescriptor::TYPE_MESSAGE, bar_field_->type()); + EXPECT_EQ(bar_type_, bar_field_->message_type()); + + ASSERT_EQ(FieldDescriptor::TYPE_MESSAGE, baz_field_->type()); + const Descriptor* baz_type = baz_field_->message_type(); + EXPECT_EQ("Baz", baz_type->name()); + EXPECT_EQ("Baz", baz_type->full_name()); + EXPECT_EQ("Baz.placeholder.proto", baz_type->file()->name()); + EXPECT_EQ(0, baz_type->extension_range_count()); + + ASSERT_EQ(FieldDescriptor::TYPE_ENUM, qux_field_->type()); + const EnumDescriptor* qux_type = qux_field_->enum_type(); + EXPECT_EQ("Qux", qux_type->name()); + EXPECT_EQ("corge.Qux", qux_type->full_name()); + EXPECT_EQ("corge.Qux.placeholder.proto", qux_type->file()->name()); + + // Placeholder types should not be findable. + EXPECT_EQ(bar_type_, pool_.FindMessageTypeByName(bar_type_->full_name())); + EXPECT_TRUE(pool_.FindMessageTypeByName(baz_type->full_name()) == NULL); + EXPECT_TRUE(pool_.FindEnumTypeByName(qux_type->full_name()) == NULL); +} + +TEST_F(AllowUnknownDependenciesTest, CopyTo) { + // FieldDescriptor::CopyTo() should write non-fully-qualified type names + // for placeholder types which were not originally fully-qualified. + FieldDescriptorProto proto; + + // Bar is not a placeholder, so it is fully-qualified. + bar_field_->CopyTo(&proto); + EXPECT_EQ(".Bar", proto.type_name()); + EXPECT_EQ(FieldDescriptorProto::TYPE_MESSAGE, proto.type()); + + // Baz is an unqualified placeholder. + proto.Clear(); + baz_field_->CopyTo(&proto); + EXPECT_EQ("Baz", proto.type_name()); + EXPECT_FALSE(proto.has_type()); + + // Qux is a fully-qualified placeholder. + proto.Clear(); + qux_field_->CopyTo(&proto); + EXPECT_EQ(".corge.Qux", proto.type_name()); + EXPECT_EQ(FieldDescriptorProto::TYPE_ENUM, proto.type()); +} + +TEST_F(AllowUnknownDependenciesTest, CustomOptions) { + // Qux should still have the uninterpreted option attached. + ASSERT_EQ(1, qux_field_->options().uninterpreted_option_size()); + const UninterpretedOption& option = + qux_field_->options().uninterpreted_option(0); + ASSERT_EQ(1, option.name_size()); + EXPECT_EQ("grault", option.name(0).name_part()); +} + +TEST_F(AllowUnknownDependenciesTest, UnknownExtendee) { + // Test that we can extend an unknown type. This is slightly tricky because + // it means that the placeholder type must have an extension range. + + FileDescriptorProto extension_proto; + + ASSERT_TRUE(TextFormat::ParseFromString( + "name: 'extension.proto'" + "extension { extendee: 'UnknownType' name:'some_extension' number:123" + " label:LABEL_OPTIONAL type:TYPE_INT32 }", + &extension_proto)); + const FileDescriptor* file = pool_.BuildFile(extension_proto); + + ASSERT_TRUE(file != NULL); + + ASSERT_EQ(1, file->extension_count()); + const Descriptor* extendee = file->extension(0)->containing_type(); + EXPECT_EQ("UnknownType", extendee->name()); + ASSERT_EQ(1, extendee->extension_range_count()); + EXPECT_EQ(1, extendee->extension_range(0)->start); + EXPECT_EQ(FieldDescriptor::kMaxNumber + 1, extendee->extension_range(0)->end); +} + +TEST_F(AllowUnknownDependenciesTest, CustomOption) { + // Test that we can use a custom option without having parsed + // descriptor.proto. + + FileDescriptorProto option_proto; + + ASSERT_TRUE(TextFormat::ParseFromString( + "name: \"unknown_custom_options.proto\" " + "dependency: \"google/protobuf/descriptor.proto\" " + "extension { " + " extendee: \"google.protobuf.FileOptions\" " + " name: \"some_option\" " + " number: 123456 " + " label: LABEL_OPTIONAL " + " type: TYPE_INT32 " + "} " + "options { " + " uninterpreted_option { " + " name { " + " name_part: \"some_option\" " + " is_extension: true " + " } " + " positive_int_value: 1234 " + " } " + " uninterpreted_option { " + " name { " + " name_part: \"unknown_option\" " + " is_extension: true " + " } " + " positive_int_value: 1234 " + " } " + " uninterpreted_option { " + " name { " + " name_part: \"optimize_for\" " + " is_extension: false " + " } " + " identifier_value: \"SPEED\" " + " } " + "}", + &option_proto)); + + const FileDescriptor* file = pool_.BuildFile(option_proto); + ASSERT_TRUE(file != NULL); + + // Verify that no extension options were set, but they were left as + // uninterpreted_options. + vector fields; + file->options().GetReflection()->ListFields(file->options(), &fields); + ASSERT_EQ(2, fields.size()); + EXPECT_TRUE(file->options().has_optimize_for()); + EXPECT_EQ(2, file->options().uninterpreted_option_size()); +} + +// =================================================================== + +TEST(CustomOptions, OptionLocations) { + const Descriptor* message = + protobuf_unittest::TestMessageWithCustomOptions::descriptor(); + const FileDescriptor* file = message->file(); + const FieldDescriptor* field = message->FindFieldByName("field1"); + const EnumDescriptor* enm = message->FindEnumTypeByName("AnEnum"); + // TODO(benjy): Support EnumValue options, once the compiler does. + const ServiceDescriptor* service = + file->FindServiceByName("TestServiceWithCustomOptions"); + const MethodDescriptor* method = service->FindMethodByName("Foo"); + + EXPECT_EQ(GOOGLE_LONGLONG(9876543210), + file->options().GetExtension(protobuf_unittest::file_opt1)); + EXPECT_EQ(-56, + message->options().GetExtension(protobuf_unittest::message_opt1)); + EXPECT_EQ(GOOGLE_LONGLONG(8765432109), + field->options().GetExtension(protobuf_unittest::field_opt1)); + EXPECT_EQ(42, // Check that we get the default for an option we don't set. + field->options().GetExtension(protobuf_unittest::field_opt2)); + EXPECT_EQ(-789, + enm->options().GetExtension(protobuf_unittest::enum_opt1)); + EXPECT_EQ(123, + enm->value(1)->options().GetExtension( + protobuf_unittest::enum_value_opt1)); + EXPECT_EQ(GOOGLE_LONGLONG(-9876543210), + service->options().GetExtension(protobuf_unittest::service_opt1)); + EXPECT_EQ(protobuf_unittest::METHODOPT1_VAL2, + method->options().GetExtension(protobuf_unittest::method_opt1)); + + // See that the regular options went through unscathed. + EXPECT_TRUE(message->options().has_message_set_wire_format()); + EXPECT_EQ(FieldOptions::CORD, field->options().ctype()); +} + +TEST(CustomOptions, OptionTypes) { + const MessageOptions* options = NULL; + + options = + &protobuf_unittest::CustomOptionMinIntegerValues::descriptor()->options(); + EXPECT_EQ(false , options->GetExtension(protobuf_unittest::bool_opt)); + EXPECT_EQ(kint32min, options->GetExtension(protobuf_unittest::int32_opt)); + EXPECT_EQ(kint64min, options->GetExtension(protobuf_unittest::int64_opt)); + EXPECT_EQ(0 , options->GetExtension(protobuf_unittest::uint32_opt)); + EXPECT_EQ(0 , options->GetExtension(protobuf_unittest::uint64_opt)); + EXPECT_EQ(kint32min, options->GetExtension(protobuf_unittest::sint32_opt)); + EXPECT_EQ(kint64min, options->GetExtension(protobuf_unittest::sint64_opt)); + EXPECT_EQ(0 , options->GetExtension(protobuf_unittest::fixed32_opt)); + EXPECT_EQ(0 , options->GetExtension(protobuf_unittest::fixed64_opt)); + EXPECT_EQ(kint32min, options->GetExtension(protobuf_unittest::sfixed32_opt)); + EXPECT_EQ(kint64min, options->GetExtension(protobuf_unittest::sfixed64_opt)); + + options = + &protobuf_unittest::CustomOptionMaxIntegerValues::descriptor()->options(); + EXPECT_EQ(true , options->GetExtension(protobuf_unittest::bool_opt)); + EXPECT_EQ(kint32max , options->GetExtension(protobuf_unittest::int32_opt)); + EXPECT_EQ(kint64max , options->GetExtension(protobuf_unittest::int64_opt)); + EXPECT_EQ(kuint32max, options->GetExtension(protobuf_unittest::uint32_opt)); + EXPECT_EQ(kuint64max, options->GetExtension(protobuf_unittest::uint64_opt)); + EXPECT_EQ(kint32max , options->GetExtension(protobuf_unittest::sint32_opt)); + EXPECT_EQ(kint64max , options->GetExtension(protobuf_unittest::sint64_opt)); + EXPECT_EQ(kuint32max, options->GetExtension(protobuf_unittest::fixed32_opt)); + EXPECT_EQ(kuint64max, options->GetExtension(protobuf_unittest::fixed64_opt)); + EXPECT_EQ(kint32max , options->GetExtension(protobuf_unittest::sfixed32_opt)); + EXPECT_EQ(kint64max , options->GetExtension(protobuf_unittest::sfixed64_opt)); + + options = + &protobuf_unittest::CustomOptionOtherValues::descriptor()->options(); + EXPECT_EQ(-100, options->GetExtension(protobuf_unittest::int32_opt)); + EXPECT_FLOAT_EQ(12.3456789, + options->GetExtension(protobuf_unittest::float_opt)); + EXPECT_DOUBLE_EQ(1.234567890123456789, + options->GetExtension(protobuf_unittest::double_opt)); + EXPECT_EQ("Hello, \"World\"", + options->GetExtension(protobuf_unittest::string_opt)); + + EXPECT_EQ(string("Hello\0World", 11), + options->GetExtension(protobuf_unittest::bytes_opt)); + + EXPECT_EQ(protobuf_unittest::DummyMessageContainingEnum::TEST_OPTION_ENUM_TYPE2, + options->GetExtension(protobuf_unittest::enum_opt)); + + options = + &protobuf_unittest::SettingRealsFromPositiveInts::descriptor()->options(); + EXPECT_FLOAT_EQ(12, options->GetExtension(protobuf_unittest::float_opt)); + EXPECT_DOUBLE_EQ(154, options->GetExtension(protobuf_unittest::double_opt)); + + options = + &protobuf_unittest::SettingRealsFromNegativeInts::descriptor()->options(); + EXPECT_FLOAT_EQ(-12, options->GetExtension(protobuf_unittest::float_opt)); + EXPECT_DOUBLE_EQ(-154, options->GetExtension(protobuf_unittest::double_opt)); +} + +TEST(CustomOptions, ComplexExtensionOptions) { + const MessageOptions* options = + &protobuf_unittest::VariousComplexOptions::descriptor()->options(); + EXPECT_EQ(options->GetExtension(protobuf_unittest::complex_opt1).foo(), 42); + EXPECT_EQ(options->GetExtension(protobuf_unittest::complex_opt1). + GetExtension(protobuf_unittest::quux), 324); + EXPECT_EQ(options->GetExtension(protobuf_unittest::complex_opt1). + GetExtension(protobuf_unittest::corge).qux(), 876); + EXPECT_EQ(options->GetExtension(protobuf_unittest::complex_opt2).baz(), 987); + EXPECT_EQ(options->GetExtension(protobuf_unittest::complex_opt2). + GetExtension(protobuf_unittest::grault), 654); + EXPECT_EQ(options->GetExtension(protobuf_unittest::complex_opt2).bar().foo(), + 743); + EXPECT_EQ(options->GetExtension(protobuf_unittest::complex_opt2).bar(). + GetExtension(protobuf_unittest::quux), 1999); + EXPECT_EQ(options->GetExtension(protobuf_unittest::complex_opt2).bar(). + GetExtension(protobuf_unittest::corge).qux(), 2008); + EXPECT_EQ(options->GetExtension(protobuf_unittest::complex_opt2). + GetExtension(protobuf_unittest::garply).foo(), 741); + EXPECT_EQ(options->GetExtension(protobuf_unittest::complex_opt2). + GetExtension(protobuf_unittest::garply). + GetExtension(protobuf_unittest::quux), 1998); + EXPECT_EQ(options->GetExtension(protobuf_unittest::complex_opt2). + GetExtension(protobuf_unittest::garply). + GetExtension(protobuf_unittest::corge).qux(), 2121); + EXPECT_EQ(options->GetExtension( + protobuf_unittest::ComplexOptionType2::ComplexOptionType4::complex_opt4). + waldo(), 1971); + EXPECT_EQ(options->GetExtension(protobuf_unittest::complex_opt2). + fred().waldo(), 321); + EXPECT_EQ(9, options->GetExtension(protobuf_unittest::complex_opt3).qux()); + EXPECT_EQ(22, options->GetExtension(protobuf_unittest::complex_opt3). + complexoptiontype5().plugh()); + EXPECT_EQ(24, options->GetExtension(protobuf_unittest::complexopt6).xyzzy()); +} + +TEST(CustomOptions, OptionsFromOtherFile) { + // Test that to use a custom option, we only need to import the file + // defining the option; we do not also have to import descriptor.proto. + DescriptorPool pool; + + FileDescriptorProto file_proto; + FileDescriptorProto::descriptor()->file()->CopyTo(&file_proto); + ASSERT_TRUE(pool.BuildFile(file_proto) != NULL); + + protobuf_unittest::TestMessageWithCustomOptions::descriptor() + ->file()->CopyTo(&file_proto); + ASSERT_TRUE(pool.BuildFile(file_proto) != NULL); + + ASSERT_TRUE(TextFormat::ParseFromString( + "name: \"custom_options_import.proto\" " + "package: \"protobuf_unittest\" " + "dependency: \"google/protobuf/unittest_custom_options.proto\" " + "options { " + " uninterpreted_option { " + " name { " + " name_part: \"file_opt1\" " + " is_extension: true " + " } " + " positive_int_value: 1234 " + " } " + // Test a non-extension option too. (At one point this failed due to a + // bug.) + " uninterpreted_option { " + " name { " + " name_part: \"java_package\" " + " is_extension: false " + " } " + " string_value: \"foo\" " + " } " + // Test that enum-typed options still work too. (At one point this also + // failed due to a bug.) + " uninterpreted_option { " + " name { " + " name_part: \"optimize_for\" " + " is_extension: false " + " } " + " identifier_value: \"SPEED\" " + " } " + "}" + , + &file_proto)); + + const FileDescriptor* file = pool.BuildFile(file_proto); + ASSERT_TRUE(file != NULL); + EXPECT_EQ(1234, file->options().GetExtension(protobuf_unittest::file_opt1)); + EXPECT_TRUE(file->options().has_java_package()); + EXPECT_EQ("foo", file->options().java_package()); + EXPECT_TRUE(file->options().has_optimize_for()); + EXPECT_EQ(FileOptions::SPEED, file->options().optimize_for()); +} + +TEST(CustomOptions, MessageOptionThreeFieldsSet) { + // This tests a bug which previously existed in custom options parsing. The + // bug occurred when you defined a custom option with message type and then + // set three fields of that option on a single definition (see the example + // below). The bug is a bit hard to explain, so check the change history if + // you want to know more. + DescriptorPool pool; + + FileDescriptorProto file_proto; + FileDescriptorProto::descriptor()->file()->CopyTo(&file_proto); + ASSERT_TRUE(pool.BuildFile(file_proto) != NULL); + + protobuf_unittest::TestMessageWithCustomOptions::descriptor() + ->file()->CopyTo(&file_proto); + ASSERT_TRUE(pool.BuildFile(file_proto) != NULL); + + // The following represents the definition: + // + // import "google/protobuf/unittest_custom_options.proto" + // package protobuf_unittest; + // message Foo { + // option (complex_opt1).foo = 1234; + // option (complex_opt1).foo2 = 1234; + // option (complex_opt1).foo3 = 1234; + // } + ASSERT_TRUE(TextFormat::ParseFromString( + "name: \"custom_options_import.proto\" " + "package: \"protobuf_unittest\" " + "dependency: \"google/protobuf/unittest_custom_options.proto\" " + "message_type { " + " name: \"Foo\" " + " options { " + " uninterpreted_option { " + " name { " + " name_part: \"complex_opt1\" " + " is_extension: true " + " } " + " name { " + " name_part: \"foo\" " + " is_extension: false " + " } " + " positive_int_value: 1234 " + " } " + " uninterpreted_option { " + " name { " + " name_part: \"complex_opt1\" " + " is_extension: true " + " } " + " name { " + " name_part: \"foo2\" " + " is_extension: false " + " } " + " positive_int_value: 1234 " + " } " + " uninterpreted_option { " + " name { " + " name_part: \"complex_opt1\" " + " is_extension: true " + " } " + " name { " + " name_part: \"foo3\" " + " is_extension: false " + " } " + " positive_int_value: 1234 " + " } " + " } " + "}", + &file_proto)); + + const FileDescriptor* file = pool.BuildFile(file_proto); + ASSERT_TRUE(file != NULL); + ASSERT_EQ(1, file->message_type_count()); + + const MessageOptions& options = file->message_type(0)->options(); + EXPECT_EQ(1234, options.GetExtension(protobuf_unittest::complex_opt1).foo()); +} + +// Check that aggregate options were parsed and saved correctly in +// the appropriate descriptors. +TEST(CustomOptions, AggregateOptions) { + const Descriptor* msg = protobuf_unittest::AggregateMessage::descriptor(); + const FileDescriptor* file = msg->file(); + const FieldDescriptor* field = msg->FindFieldByName("fieldname"); + const EnumDescriptor* enumd = file->FindEnumTypeByName("AggregateEnum"); + const EnumValueDescriptor* enumv = enumd->FindValueByName("VALUE"); + const ServiceDescriptor* service = file->FindServiceByName( + "AggregateService"); + const MethodDescriptor* method = service->FindMethodByName("Method"); + + // Tests for the different types of data embedded in fileopt + const protobuf_unittest::Aggregate& file_options = + file->options().GetExtension(protobuf_unittest::fileopt); + EXPECT_EQ(100, file_options.i()); + EXPECT_EQ("FileAnnotation", file_options.s()); + EXPECT_EQ("NestedFileAnnotation", file_options.sub().s()); + EXPECT_EQ("FileExtensionAnnotation", + file_options.file().GetExtension(protobuf_unittest::fileopt).s()); + EXPECT_EQ("EmbeddedMessageSetElement", + file_options.mset().GetExtension( + protobuf_unittest::AggregateMessageSetElement + ::message_set_extension).s()); + + // Simple tests for all the other types of annotations + EXPECT_EQ("MessageAnnotation", + msg->options().GetExtension(protobuf_unittest::msgopt).s()); + EXPECT_EQ("FieldAnnotation", + field->options().GetExtension(protobuf_unittest::fieldopt).s()); + EXPECT_EQ("EnumAnnotation", + enumd->options().GetExtension(protobuf_unittest::enumopt).s()); + EXPECT_EQ("EnumValueAnnotation", + enumv->options().GetExtension(protobuf_unittest::enumvalopt).s()); + EXPECT_EQ("ServiceAnnotation", + service->options().GetExtension(protobuf_unittest::serviceopt).s()); + EXPECT_EQ("MethodAnnotation", + method->options().GetExtension(protobuf_unittest::methodopt).s()); +} + +// =================================================================== + +// The tests below trigger every unique call to AddError() in descriptor.cc, +// in the order in which they appear in that file. I'm using TextFormat here +// to specify the input descriptors because building them using code would +// be too bulky. + +class MockErrorCollector : public DescriptorPool::ErrorCollector { + public: + MockErrorCollector() {} + ~MockErrorCollector() {} + + string text_; + + // implements ErrorCollector --------------------------------------- + void AddError(const string& filename, + const string& element_name, const Message* descriptor, + ErrorLocation location, const string& message) { + const char* location_name = NULL; + switch (location) { + case NAME : location_name = "NAME" ; break; + case NUMBER : location_name = "NUMBER" ; break; + case TYPE : location_name = "TYPE" ; break; + case EXTENDEE : location_name = "EXTENDEE" ; break; + case DEFAULT_VALUE: location_name = "DEFAULT_VALUE"; break; + case OPTION_NAME : location_name = "OPTION_NAME" ; break; + case OPTION_VALUE : location_name = "OPTION_VALUE" ; break; + case INPUT_TYPE : location_name = "INPUT_TYPE" ; break; + case OUTPUT_TYPE : location_name = "OUTPUT_TYPE" ; break; + case OTHER : location_name = "OTHER" ; break; + } + + strings::SubstituteAndAppend( + &text_, "$0: $1: $2: $3\n", + filename, element_name, location_name, message); + } +}; + +class ValidationErrorTest : public testing::Test { + protected: + // Parse file_text as a FileDescriptorProto in text format and add it + // to the DescriptorPool. Expect no errors. + void BuildFile(const string& file_text) { + FileDescriptorProto file_proto; + ASSERT_TRUE(TextFormat::ParseFromString(file_text, &file_proto)); + ASSERT_TRUE(pool_.BuildFile(file_proto) != NULL); + } + + // Parse file_text as a FileDescriptorProto in text format and add it + // to the DescriptorPool. Expect errors to be produced which match the + // given error text. + void BuildFileWithErrors(const string& file_text, + const string& expected_errors) { + FileDescriptorProto file_proto; + ASSERT_TRUE(TextFormat::ParseFromString(file_text, &file_proto)); + + MockErrorCollector error_collector; + EXPECT_TRUE( + pool_.BuildFileCollectingErrors(file_proto, &error_collector) == NULL); + EXPECT_EQ(expected_errors, error_collector.text_); + } + + // Builds some already-parsed file in our test pool. + void BuildFileInTestPool(const FileDescriptor* file) { + FileDescriptorProto file_proto; + file->CopyTo(&file_proto); + ASSERT_TRUE(pool_.BuildFile(file_proto) != NULL); + } + + // Build descriptor.proto in our test pool. This allows us to extend it in + // the test pool, so we can test custom options. + void BuildDescriptorMessagesInTestPool() { + BuildFileInTestPool(DescriptorProto::descriptor()->file()); + } + + DescriptorPool pool_; +}; + +TEST_F(ValidationErrorTest, AlreadyDefined) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "message_type { name: \"Foo\" }" + "message_type { name: \"Foo\" }", + + "foo.proto: Foo: NAME: \"Foo\" is already defined.\n"); +} + +TEST_F(ValidationErrorTest, AlreadyDefinedInPackage) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "package: \"foo.bar\" " + "message_type { name: \"Foo\" }" + "message_type { name: \"Foo\" }", + + "foo.proto: foo.bar.Foo: NAME: \"Foo\" is already defined in " + "\"foo.bar\".\n"); +} + +TEST_F(ValidationErrorTest, AlreadyDefinedInOtherFile) { + BuildFile( + "name: \"foo.proto\" " + "message_type { name: \"Foo\" }"); + + BuildFileWithErrors( + "name: \"bar.proto\" " + "message_type { name: \"Foo\" }", + + "bar.proto: Foo: NAME: \"Foo\" is already defined in file " + "\"foo.proto\".\n"); +} + +TEST_F(ValidationErrorTest, PackageAlreadyDefined) { + BuildFile( + "name: \"foo.proto\" " + "message_type { name: \"foo\" }"); + BuildFileWithErrors( + "name: \"bar.proto\" " + "package: \"foo.bar\"", + + "bar.proto: foo: NAME: \"foo\" is already defined (as something other " + "than a package) in file \"foo.proto\".\n"); +} + +TEST_F(ValidationErrorTest, EnumValueAlreadyDefinedInParent) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "enum_type { name: \"Foo\" value { name: \"FOO\" number: 1 } } " + "enum_type { name: \"Bar\" value { name: \"FOO\" number: 1 } } ", + + "foo.proto: FOO: NAME: \"FOO\" is already defined.\n" + "foo.proto: FOO: NAME: Note that enum values use C++ scoping rules, " + "meaning that enum values are siblings of their type, not children of " + "it. Therefore, \"FOO\" must be unique within the global scope, not " + "just within \"Bar\".\n"); +} + +TEST_F(ValidationErrorTest, EnumValueAlreadyDefinedInParentNonGlobal) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "package: \"pkg\" " + "enum_type { name: \"Foo\" value { name: \"FOO\" number: 1 } } " + "enum_type { name: \"Bar\" value { name: \"FOO\" number: 1 } } ", + + "foo.proto: pkg.FOO: NAME: \"FOO\" is already defined in \"pkg\".\n" + "foo.proto: pkg.FOO: NAME: Note that enum values use C++ scoping rules, " + "meaning that enum values are siblings of their type, not children of " + "it. Therefore, \"FOO\" must be unique within \"pkg\", not just within " + "\"Bar\".\n"); +} + +TEST_F(ValidationErrorTest, MissingName) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "message_type { }", + + "foo.proto: : NAME: Missing name.\n"); +} + +TEST_F(ValidationErrorTest, InvalidName) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "message_type { name: \"$\" }", + + "foo.proto: $: NAME: \"$\" is not a valid identifier.\n"); +} + +TEST_F(ValidationErrorTest, InvalidPackageName) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "package: \"foo.$\"", + + "foo.proto: foo.$: NAME: \"$\" is not a valid identifier.\n"); +} + +TEST_F(ValidationErrorTest, MissingFileName) { + BuildFileWithErrors( + "", + + ": : OTHER: Missing field: FileDescriptorProto.name.\n"); +} + +TEST_F(ValidationErrorTest, DupeDependency) { + BuildFile("name: \"foo.proto\""); + BuildFileWithErrors( + "name: \"bar.proto\" " + "dependency: \"foo.proto\" " + "dependency: \"foo.proto\" ", + + "bar.proto: bar.proto: OTHER: Import \"foo.proto\" was listed twice.\n"); +} + +TEST_F(ValidationErrorTest, UnknownDependency) { + BuildFileWithErrors( + "name: \"bar.proto\" " + "dependency: \"foo.proto\" ", + + "bar.proto: bar.proto: OTHER: Import \"foo.proto\" has not been loaded.\n"); +} + +TEST_F(ValidationErrorTest, ForeignUnimportedPackageNoCrash) { + // Used to crash: If we depend on a non-existent file and then refer to a + // package defined in a file that we didn't import, and that package is + // nested within a parent package which this file is also in, and we don't + // include that parent package in the name (i.e. we do a relative lookup)... + // Yes, really. + BuildFile( + "name: 'foo.proto' " + "package: 'outer.foo' "); + BuildFileWithErrors( + "name: 'bar.proto' " + "dependency: 'baz.proto' " + "package: 'outer.bar' " + "message_type { " + " name: 'Bar' " + " field { name:'bar' number:1 label:LABEL_OPTIONAL type_name:'foo.Foo' }" + "}", + + "bar.proto: bar.proto: OTHER: Import \"baz.proto\" has not been loaded.\n" + "bar.proto: outer.bar.Bar.bar: TYPE: \"outer.foo\" seems to be defined in " + "\"foo.proto\", which is not imported by \"bar.proto\". To use it here, " + "please add the necessary import.\n"); +} + +TEST_F(ValidationErrorTest, DupeFile) { + BuildFile( + "name: \"foo.proto\" " + "message_type { name: \"Foo\" }"); + // Note: We should *not* get redundant errors about "Foo" already being + // defined. + BuildFileWithErrors( + "name: \"foo.proto\" " + "message_type { name: \"Foo\" } " + // Add another type so that the files aren't identical (in which case there + // would be no error). + "enum_type { name: \"Bar\" }", + + "foo.proto: foo.proto: OTHER: A file with this name is already in the " + "pool.\n"); +} + +TEST_F(ValidationErrorTest, FieldInExtensionRange) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "message_type {" + " name: \"Foo\"" + " field { name: \"foo\" number: 9 label:LABEL_OPTIONAL type:TYPE_INT32 }" + " field { name: \"bar\" number: 10 label:LABEL_OPTIONAL type:TYPE_INT32 }" + " field { name: \"baz\" number: 19 label:LABEL_OPTIONAL type:TYPE_INT32 }" + " field { name: \"qux\" number: 20 label:LABEL_OPTIONAL type:TYPE_INT32 }" + " extension_range { start: 10 end: 20 }" + "}", + + "foo.proto: Foo.bar: NUMBER: Extension range 10 to 19 includes field " + "\"bar\" (10).\n" + "foo.proto: Foo.baz: NUMBER: Extension range 10 to 19 includes field " + "\"baz\" (19).\n"); +} + +TEST_F(ValidationErrorTest, OverlappingExtensionRanges) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "message_type {" + " name: \"Foo\"" + " extension_range { start: 10 end: 20 }" + " extension_range { start: 20 end: 30 }" + " extension_range { start: 19 end: 21 }" + "}", + + "foo.proto: Foo: NUMBER: Extension range 19 to 20 overlaps with " + "already-defined range 10 to 19.\n" + "foo.proto: Foo: NUMBER: Extension range 19 to 20 overlaps with " + "already-defined range 20 to 29.\n"); +} + +TEST_F(ValidationErrorTest, InvalidDefaults) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "message_type {" + " name: \"Foo\"" + + // Invalid number. + " field { name: \"foo\" number: 1 label: LABEL_OPTIONAL type: TYPE_INT32" + " default_value: \"abc\" }" + + // Empty default value. + " field { name: \"bar\" number: 2 label: LABEL_OPTIONAL type: TYPE_INT32" + " default_value: \"\" }" + + // Invalid boolean. + " field { name: \"baz\" number: 3 label: LABEL_OPTIONAL type: TYPE_BOOL" + " default_value: \"abc\" }" + + // Messages can't have defaults. + " field { name: \"qux\" number: 4 label: LABEL_OPTIONAL type: TYPE_MESSAGE" + " default_value: \"abc\" type_name: \"Foo\" }" + + // Same thing, but we don't know that this field has message type until + // we look up the type name. + " field { name: \"quux\" number: 5 label: LABEL_OPTIONAL" + " default_value: \"abc\" type_name: \"Foo\" }" + + // Repeateds can't have defaults. + " field { name: \"corge\" number: 6 label: LABEL_REPEATED type: TYPE_INT32" + " default_value: \"1\" }" + "}", + + "foo.proto: Foo.foo: DEFAULT_VALUE: Couldn't parse default value.\n" + "foo.proto: Foo.bar: DEFAULT_VALUE: Couldn't parse default value.\n" + "foo.proto: Foo.baz: DEFAULT_VALUE: Boolean default must be true or " + "false.\n" + "foo.proto: Foo.qux: DEFAULT_VALUE: Messages can't have default values.\n" + "foo.proto: Foo.corge: DEFAULT_VALUE: Repeated fields can't have default " + "values.\n" + // This ends up being reported later because the error is detected at + // cross-linking time. + "foo.proto: Foo.quux: DEFAULT_VALUE: Messages can't have default " + "values.\n"); +} + +TEST_F(ValidationErrorTest, NegativeFieldNumber) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "message_type {" + " name: \"Foo\"" + " field { name: \"foo\" number: -1 label:LABEL_OPTIONAL type:TYPE_INT32 }" + "}", + + "foo.proto: Foo.foo: NUMBER: Field numbers must be positive integers.\n"); +} + +TEST_F(ValidationErrorTest, HugeFieldNumber) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "message_type {" + " name: \"Foo\"" + " field { name: \"foo\" number: 0x70000000 " + " label:LABEL_OPTIONAL type:TYPE_INT32 }" + "}", + + "foo.proto: Foo.foo: NUMBER: Field numbers cannot be greater than " + "536870911.\n"); +} + +TEST_F(ValidationErrorTest, ReservedFieldNumber) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "message_type {" + " name: \"Foo\"" + " field {name:\"foo\" number: 18999 label:LABEL_OPTIONAL type:TYPE_INT32 }" + " field {name:\"bar\" number: 19000 label:LABEL_OPTIONAL type:TYPE_INT32 }" + " field {name:\"baz\" number: 19999 label:LABEL_OPTIONAL type:TYPE_INT32 }" + " field {name:\"qux\" number: 20000 label:LABEL_OPTIONAL type:TYPE_INT32 }" + "}", + + "foo.proto: Foo.bar: NUMBER: Field numbers 19000 through 19999 are " + "reserved for the protocol buffer library implementation.\n" + "foo.proto: Foo.baz: NUMBER: Field numbers 19000 through 19999 are " + "reserved for the protocol buffer library implementation.\n"); +} + +TEST_F(ValidationErrorTest, ExtensionMissingExtendee) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "message_type {" + " name: \"Foo\"" + " extension { name: \"foo\" number: 1 label: LABEL_OPTIONAL" + " type_name: \"Foo\" }" + "}", + + "foo.proto: Foo.foo: EXTENDEE: FieldDescriptorProto.extendee not set for " + "extension field.\n"); +} + +TEST_F(ValidationErrorTest, NonExtensionWithExtendee) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "message_type {" + " name: \"Bar\"" + " extension_range { start: 1 end: 2 }" + "}" + "message_type {" + " name: \"Foo\"" + " field { name: \"foo\" number: 1 label: LABEL_OPTIONAL" + " type_name: \"Foo\" extendee: \"Bar\" }" + "}", + + "foo.proto: Foo.foo: EXTENDEE: FieldDescriptorProto.extendee set for " + "non-extension field.\n"); +} + +TEST_F(ValidationErrorTest, FieldNumberConflict) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "message_type {" + " name: \"Foo\"" + " field { name: \"foo\" number: 1 label:LABEL_OPTIONAL type:TYPE_INT32 }" + " field { name: \"bar\" number: 1 label:LABEL_OPTIONAL type:TYPE_INT32 }" + "}", + + "foo.proto: Foo.bar: NUMBER: Field number 1 has already been used in " + "\"Foo\" by field \"foo\".\n"); +} + +TEST_F(ValidationErrorTest, BadMessageSetExtensionType) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "message_type {" + " name: \"MessageSet\"" + " options { message_set_wire_format: true }" + " extension_range { start: 4 end: 5 }" + "}" + "message_type {" + " name: \"Foo\"" + " extension { name:\"foo\" number:4 label:LABEL_OPTIONAL type:TYPE_INT32" + " extendee: \"MessageSet\" }" + "}", + + "foo.proto: Foo.foo: TYPE: Extensions of MessageSets must be optional " + "messages.\n"); +} + +TEST_F(ValidationErrorTest, BadMessageSetExtensionLabel) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "message_type {" + " name: \"MessageSet\"" + " options { message_set_wire_format: true }" + " extension_range { start: 4 end: 5 }" + "}" + "message_type {" + " name: \"Foo\"" + " extension { name:\"foo\" number:4 label:LABEL_REPEATED type:TYPE_MESSAGE" + " type_name: \"Foo\" extendee: \"MessageSet\" }" + "}", + + "foo.proto: Foo.foo: TYPE: Extensions of MessageSets must be optional " + "messages.\n"); +} + +TEST_F(ValidationErrorTest, FieldInMessageSet) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "message_type {" + " name: \"Foo\"" + " options { message_set_wire_format: true }" + " field { name: \"foo\" number: 1 label:LABEL_OPTIONAL type:TYPE_INT32 }" + "}", + + "foo.proto: Foo.foo: NAME: MessageSets cannot have fields, only " + "extensions.\n"); +} + +TEST_F(ValidationErrorTest, NegativeExtensionRangeNumber) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "message_type {" + " name: \"Foo\"" + " extension_range { start: -10 end: -1 }" + "}", + + "foo.proto: Foo: NUMBER: Extension numbers must be positive integers.\n"); +} + +TEST_F(ValidationErrorTest, HugeExtensionRangeNumber) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "message_type {" + " name: \"Foo\"" + " extension_range { start: 1 end: 0x70000000 }" + "}", + + "foo.proto: Foo: NUMBER: Extension numbers cannot be greater than " + "536870911.\n"); +} + +TEST_F(ValidationErrorTest, ExtensionRangeEndBeforeStart) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "message_type {" + " name: \"Foo\"" + " extension_range { start: 10 end: 10 }" + " extension_range { start: 10 end: 5 }" + "}", + + "foo.proto: Foo: NUMBER: Extension range end number must be greater than " + "start number.\n" + "foo.proto: Foo: NUMBER: Extension range end number must be greater than " + "start number.\n"); +} + +TEST_F(ValidationErrorTest, EmptyEnum) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "enum_type { name: \"Foo\" }" + // Also use the empty enum in a message to make sure there are no crashes + // during validation (possible if the code attempts to derive a default + // value for the field). + "message_type {" + " name: \"Bar\"" + " field { name: \"foo\" number: 1 label:LABEL_OPTIONAL type_name:\"Foo\" }" + " field { name: \"bar\" number: 2 label:LABEL_OPTIONAL type_name:\"Foo\" " + " default_value: \"NO_SUCH_VALUE\" }" + "}", + + "foo.proto: Foo: NAME: Enums must contain at least one value.\n" + "foo.proto: Bar.bar: DEFAULT_VALUE: Enum type \"Foo\" has no value named " + "\"NO_SUCH_VALUE\".\n"); +} + +TEST_F(ValidationErrorTest, UndefinedExtendee) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "message_type {" + " name: \"Foo\"" + " extension { name:\"foo\" number:1 label:LABEL_OPTIONAL type:TYPE_INT32" + " extendee: \"Bar\" }" + "}", + + "foo.proto: Foo.foo: EXTENDEE: \"Bar\" is not defined.\n"); +} + +TEST_F(ValidationErrorTest, NonMessageExtendee) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "enum_type { name: \"Bar\" value { name:\"DUMMY\" number:0 } }" + "message_type {" + " name: \"Foo\"" + " extension { name:\"foo\" number:1 label:LABEL_OPTIONAL type:TYPE_INT32" + " extendee: \"Bar\" }" + "}", + + "foo.proto: Foo.foo: EXTENDEE: \"Bar\" is not a message type.\n"); +} + +TEST_F(ValidationErrorTest, NotAnExtensionNumber) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "message_type {" + " name: \"Bar\"" + "}" + "message_type {" + " name: \"Foo\"" + " extension { name:\"foo\" number:1 label:LABEL_OPTIONAL type:TYPE_INT32" + " extendee: \"Bar\" }" + "}", + + "foo.proto: Foo.foo: NUMBER: \"Bar\" does not declare 1 as an extension " + "number.\n"); +} + +TEST_F(ValidationErrorTest, UndefinedFieldType) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "message_type {" + " name: \"Foo\"" + " field { name:\"foo\" number:1 label:LABEL_OPTIONAL type_name:\"Bar\" }" + "}", + + "foo.proto: Foo.foo: TYPE: \"Bar\" is not defined.\n"); +} + +TEST_F(ValidationErrorTest, FieldTypeDefinedInUndeclaredDependency) { + BuildFile( + "name: \"bar.proto\" " + "message_type { name: \"Bar\" } "); + + BuildFileWithErrors( + "name: \"foo.proto\" " + "message_type {" + " name: \"Foo\"" + " field { name:\"foo\" number:1 label:LABEL_OPTIONAL type_name:\"Bar\" }" + "}", + "foo.proto: Foo.foo: TYPE: \"Bar\" seems to be defined in \"bar.proto\", " + "which is not imported by \"foo.proto\". To use it here, please add the " + "necessary import.\n"); +} + +TEST_F(ValidationErrorTest, SearchMostLocalFirst) { + // The following should produce an error that Bar.Baz is not defined: + // message Bar { message Baz {} } + // message Foo { + // message Bar { + // // Placing "message Baz{}" here, or removing Foo.Bar altogether, + // // would fix the error. + // } + // optional Bar.Baz baz = 1; + // } + // An one point the lookup code incorrectly did not produce an error in this + // case, because when looking for Bar.Baz, it would try "Foo.Bar.Baz" first, + // fail, and ten try "Bar.Baz" and succeed, even though "Bar" should actually + // refer to the inner Bar, not the outer one. + BuildFileWithErrors( + "name: \"foo.proto\" " + "message_type {" + " name: \"Bar\"" + " nested_type { name: \"Baz\" }" + "}" + "message_type {" + " name: \"Foo\"" + " nested_type { name: \"Bar\" }" + " field { name:\"baz\" number:1 label:LABEL_OPTIONAL" + " type_name:\"Bar.Baz\" }" + "}", + + "foo.proto: Foo.baz: TYPE: \"Bar.Baz\" is not defined.\n"); +} + +TEST_F(ValidationErrorTest, SearchMostLocalFirst2) { + // This test would find the most local "Bar" first, and does, but + // proceeds to find the outer one because the inner one's not an + // aggregate. + BuildFile( + "name: \"foo.proto\" " + "message_type {" + " name: \"Bar\"" + " nested_type { name: \"Baz\" }" + "}" + "message_type {" + " name: \"Foo\"" + " field { name: \"Bar\" number:1 type:TYPE_BYTES } " + " field { name:\"baz\" number:2 label:LABEL_OPTIONAL" + " type_name:\"Bar.Baz\" }" + "}"); +} + +TEST_F(ValidationErrorTest, PackageOriginallyDeclaredInTransitiveDependent) { + // Imagine we have the following: + // + // foo.proto: + // package foo.bar; + // bar.proto: + // package foo.bar; + // import "foo.proto"; + // message Bar {} + // baz.proto: + // package foo; + // import "bar.proto" + // message Baz { optional bar.Bar qux = 1; } + // + // When validating baz.proto, we will look up "bar.Bar". As part of this + // lookup, we first lookup "bar" then try to find "Bar" within it. "bar" + // should resolve to "foo.bar". Note, though, that "foo.bar" was originally + // defined in foo.proto, which is not a direct dependency of baz.proto. The + // implementation of FindSymbol() normally only returns symbols in direct + // dependencies, not indirect ones. This test insures that this does not + // prevent it from finding "foo.bar". + + BuildFile( + "name: \"foo.proto\" " + "package: \"foo.bar\" "); + BuildFile( + "name: \"bar.proto\" " + "package: \"foo.bar\" " + "dependency: \"foo.proto\" " + "message_type { name: \"Bar\" }"); + BuildFile( + "name: \"baz.proto\" " + "package: \"foo\" " + "dependency: \"bar.proto\" " + "message_type { " + " name: \"Baz\" " + " field { name:\"qux\" number:1 label:LABEL_OPTIONAL " + " type_name:\"bar.Bar\" }" + "}"); +} + +TEST_F(ValidationErrorTest, FieldTypeNotAType) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "message_type {" + " name: \"Foo\"" + " field { name:\"foo\" number:1 label:LABEL_OPTIONAL " + " type_name:\".Foo.bar\" }" + " field { name:\"bar\" number:2 label:LABEL_OPTIONAL type:TYPE_INT32 }" + "}", + + "foo.proto: Foo.foo: TYPE: \".Foo.bar\" is not a type.\n"); +} + +TEST_F(ValidationErrorTest, RelativeFieldTypeNotAType) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "message_type {" + " nested_type {" + " name: \"Bar\"" + " field { name:\"Baz\" number:2 label:LABEL_OPTIONAL type:TYPE_INT32 }" + " }" + " name: \"Foo\"" + " field { name:\"foo\" number:1 label:LABEL_OPTIONAL " + " type_name:\"Bar.Baz\" }" + "}", + "foo.proto: Foo.foo: TYPE: \"Bar.Baz\" is not a type.\n"); +} + +TEST_F(ValidationErrorTest, FieldTypeMayBeItsName) { + BuildFile( + "name: \"foo.proto\" " + "message_type {" + " name: \"Bar\"" + "}" + "message_type {" + " name: \"Foo\"" + " field { name:\"Bar\" number:1 label:LABEL_OPTIONAL type_name:\"Bar\" }" + "}"); +} + +TEST_F(ValidationErrorTest, EnumFieldTypeIsMessage) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "message_type { name: \"Bar\" } " + "message_type {" + " name: \"Foo\"" + " field { name:\"foo\" number:1 label:LABEL_OPTIONAL type:TYPE_ENUM" + " type_name:\"Bar\" }" + "}", + + "foo.proto: Foo.foo: TYPE: \"Bar\" is not an enum type.\n"); +} + +TEST_F(ValidationErrorTest, MessageFieldTypeIsEnum) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "enum_type { name: \"Bar\" value { name:\"DUMMY\" number:0 } } " + "message_type {" + " name: \"Foo\"" + " field { name:\"foo\" number:1 label:LABEL_OPTIONAL type:TYPE_MESSAGE" + " type_name:\"Bar\" }" + "}", + + "foo.proto: Foo.foo: TYPE: \"Bar\" is not a message type.\n"); +} + +TEST_F(ValidationErrorTest, BadEnumDefaultValue) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "enum_type { name: \"Bar\" value { name:\"DUMMY\" number:0 } } " + "message_type {" + " name: \"Foo\"" + " field { name:\"foo\" number:1 label:LABEL_OPTIONAL type_name:\"Bar\"" + " default_value:\"NO_SUCH_VALUE\" }" + "}", + + "foo.proto: Foo.foo: DEFAULT_VALUE: Enum type \"Bar\" has no value named " + "\"NO_SUCH_VALUE\".\n"); +} + +TEST_F(ValidationErrorTest, PrimitiveWithTypeName) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "message_type {" + " name: \"Foo\"" + " field { name:\"foo\" number:1 label:LABEL_OPTIONAL type:TYPE_INT32" + " type_name:\"Foo\" }" + "}", + + "foo.proto: Foo.foo: TYPE: Field with primitive type has type_name.\n"); +} + +TEST_F(ValidationErrorTest, NonPrimitiveWithoutTypeName) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "message_type {" + " name: \"Foo\"" + " field { name:\"foo\" number:1 label:LABEL_OPTIONAL type:TYPE_MESSAGE }" + "}", + + "foo.proto: Foo.foo: TYPE: Field with message or enum type missing " + "type_name.\n"); +} + +TEST_F(ValidationErrorTest, InputTypeNotDefined) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "message_type { name: \"Foo\" } " + "service {" + " name: \"TestService\"" + " method { name: \"A\" input_type: \"Bar\" output_type: \"Foo\" }" + "}", + + "foo.proto: TestService.A: INPUT_TYPE: \"Bar\" is not defined.\n"); +} + +TEST_F(ValidationErrorTest, InputTypeNotAMessage) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "message_type { name: \"Foo\" } " + "enum_type { name: \"Bar\" value { name:\"DUMMY\" number:0 } } " + "service {" + " name: \"TestService\"" + " method { name: \"A\" input_type: \"Bar\" output_type: \"Foo\" }" + "}", + + "foo.proto: TestService.A: INPUT_TYPE: \"Bar\" is not a message type.\n"); +} + +TEST_F(ValidationErrorTest, OutputTypeNotDefined) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "message_type { name: \"Foo\" } " + "service {" + " name: \"TestService\"" + " method { name: \"A\" input_type: \"Foo\" output_type: \"Bar\" }" + "}", + + "foo.proto: TestService.A: OUTPUT_TYPE: \"Bar\" is not defined.\n"); +} + +TEST_F(ValidationErrorTest, OutputTypeNotAMessage) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "message_type { name: \"Foo\" } " + "enum_type { name: \"Bar\" value { name:\"DUMMY\" number:0 } } " + "service {" + " name: \"TestService\"" + " method { name: \"A\" input_type: \"Foo\" output_type: \"Bar\" }" + "}", + + "foo.proto: TestService.A: OUTPUT_TYPE: \"Bar\" is not a message type.\n"); +} + +TEST_F(ValidationErrorTest, IllegalPackedField) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "message_type {\n" + " name: \"Foo\"" + " field { name:\"packed_string\" number:1 label:LABEL_REPEATED " + " type:TYPE_STRING " + " options { uninterpreted_option {" + " name { name_part: \"packed\" is_extension: false }" + " identifier_value: \"true\" }}}\n" + " field { name:\"packed_message\" number:3 label:LABEL_REPEATED " + " type_name: \"Foo\"" + " options { uninterpreted_option {" + " name { name_part: \"packed\" is_extension: false }" + " identifier_value: \"true\" }}}\n" + " field { name:\"optional_int32\" number: 4 label: LABEL_OPTIONAL " + " type:TYPE_INT32 " + " options { uninterpreted_option {" + " name { name_part: \"packed\" is_extension: false }" + " identifier_value: \"true\" }}}\n" + "}", + + "foo.proto: Foo.packed_string: TYPE: [packed = true] can only be " + "specified for repeated primitive fields.\n" + "foo.proto: Foo.packed_message: TYPE: [packed = true] can only be " + "specified for repeated primitive fields.\n" + "foo.proto: Foo.optional_int32: TYPE: [packed = true] can only be " + "specified for repeated primitive fields.\n" + ); +} + +TEST_F(ValidationErrorTest, OptionWrongType) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "message_type { " + " name: \"TestMessage\" " + " field { name:\"foo\" number:1 label:LABEL_OPTIONAL type:TYPE_STRING " + " options { uninterpreted_option { name { name_part: \"ctype\" " + " is_extension: false }" + " positive_int_value: 1 }" + " }" + " }" + "}\n", + + "foo.proto: TestMessage.foo: OPTION_VALUE: Value must be identifier for " + "enum-valued option \"google.protobuf.FieldOptions.ctype\".\n"); +} + +TEST_F(ValidationErrorTest, OptionExtendsAtomicType) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "message_type { " + " name: \"TestMessage\" " + " field { name:\"foo\" number:1 label:LABEL_OPTIONAL type:TYPE_STRING " + " options { uninterpreted_option { name { name_part: \"ctype\" " + " is_extension: false }" + " name { name_part: \"foo\" " + " is_extension: true }" + " positive_int_value: 1 }" + " }" + " }" + "}\n", + + "foo.proto: TestMessage.foo: OPTION_NAME: Option \"ctype\" is an " + "atomic type, not a message.\n"); +} + +TEST_F(ValidationErrorTest, DupOption) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "message_type { " + " name: \"TestMessage\" " + " field { name:\"foo\" number:1 label:LABEL_OPTIONAL type:TYPE_UINT32 " + " options { uninterpreted_option { name { name_part: \"ctype\" " + " is_extension: false }" + " identifier_value: \"CORD\" }" + " uninterpreted_option { name { name_part: \"ctype\" " + " is_extension: false }" + " identifier_value: \"CORD\" }" + " }" + " }" + "}\n", + + "foo.proto: TestMessage.foo: OPTION_NAME: Option \"ctype\" was " + "already set.\n"); +} + +TEST_F(ValidationErrorTest, InvalidOptionName) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "message_type { " + " name: \"TestMessage\" " + " field { name:\"foo\" number:1 label:LABEL_OPTIONAL type:TYPE_BOOL " + " options { uninterpreted_option { " + " name { name_part: \"uninterpreted_option\" " + " is_extension: false }" + " positive_int_value: 1 " + " }" + " }" + " }" + "}\n", + + "foo.proto: TestMessage.foo: OPTION_NAME: Option must not use " + "reserved name \"uninterpreted_option\".\n"); +} + +TEST_F(ValidationErrorTest, RepeatedOption) { + BuildDescriptorMessagesInTestPool(); + + BuildFileWithErrors( + "name: \"foo.proto\" " + "dependency: \"google/protobuf/descriptor.proto\" " + "extension { name: \"foo\" number: 7672757 label: LABEL_REPEATED " + " type: TYPE_FLOAT extendee: \"google.protobuf.FileOptions\" }" + "options { uninterpreted_option { name { name_part: \"foo\" " + " is_extension: true } " + " double_value: 1.2 } }", + + "foo.proto: foo.proto: OPTION_NAME: Option field \"(foo)\" is repeated. " + "Repeated options are not supported.\n"); +} + +TEST_F(ValidationErrorTest, CustomOptionConflictingFieldNumber) { + BuildDescriptorMessagesInTestPool(); + + BuildFileWithErrors( + "name: \"foo.proto\" " + "dependency: \"google/protobuf/descriptor.proto\" " + "extension { name: \"foo1\" number: 7672757 label: LABEL_OPTIONAL " + " type: TYPE_INT32 extendee: \"google.protobuf.FieldOptions\" }" + "extension { name: \"foo2\" number: 7672757 label: LABEL_OPTIONAL " + " type: TYPE_INT32 extendee: \"google.protobuf.FieldOptions\" }", + + "foo.proto: foo2: NUMBER: Extension number 7672757 has already been used " + "in \"google.protobuf.FieldOptions\" by extension \"foo1\".\n"); +} + +TEST_F(ValidationErrorTest, Int32OptionValueOutOfPositiveRange) { + BuildDescriptorMessagesInTestPool(); + + BuildFileWithErrors( + "name: \"foo.proto\" " + "dependency: \"google/protobuf/descriptor.proto\" " + "extension { name: \"foo\" number: 7672757 label: LABEL_OPTIONAL " + " type: TYPE_INT32 extendee: \"google.protobuf.FileOptions\" }" + "options { uninterpreted_option { name { name_part: \"foo\" " + " is_extension: true } " + " positive_int_value: 0x80000000 } " + "}", + + "foo.proto: foo.proto: OPTION_VALUE: Value out of range " + "for int32 option \"foo\".\n"); +} + +TEST_F(ValidationErrorTest, Int32OptionValueOutOfNegativeRange) { + BuildDescriptorMessagesInTestPool(); + + BuildFileWithErrors( + "name: \"foo.proto\" " + "dependency: \"google/protobuf/descriptor.proto\" " + "extension { name: \"foo\" number: 7672757 label: LABEL_OPTIONAL " + " type: TYPE_INT32 extendee: \"google.protobuf.FileOptions\" }" + "options { uninterpreted_option { name { name_part: \"foo\" " + " is_extension: true } " + " negative_int_value: -0x80000001 } " + "}", + + "foo.proto: foo.proto: OPTION_VALUE: Value out of range " + "for int32 option \"foo\".\n"); +} + +TEST_F(ValidationErrorTest, Int32OptionValueIsNotPositiveInt) { + BuildDescriptorMessagesInTestPool(); + + BuildFileWithErrors( + "name: \"foo.proto\" " + "dependency: \"google/protobuf/descriptor.proto\" " + "extension { name: \"foo\" number: 7672757 label: LABEL_OPTIONAL " + " type: TYPE_INT32 extendee: \"google.protobuf.FileOptions\" }" + "options { uninterpreted_option { name { name_part: \"foo\" " + " is_extension: true } " + " string_value: \"5\" } }", + + "foo.proto: foo.proto: OPTION_VALUE: Value must be integer " + "for int32 option \"foo\".\n"); +} + +TEST_F(ValidationErrorTest, Int64OptionValueOutOfRange) { + BuildDescriptorMessagesInTestPool(); + + BuildFileWithErrors( + "name: \"foo.proto\" " + "dependency: \"google/protobuf/descriptor.proto\" " + "extension { name: \"foo\" number: 7672757 label: LABEL_OPTIONAL " + " type: TYPE_INT64 extendee: \"google.protobuf.FileOptions\" }" + "options { uninterpreted_option { name { name_part: \"foo\" " + " is_extension: true } " + " positive_int_value: 0x8000000000000000 } " + "}", + + "foo.proto: foo.proto: OPTION_VALUE: Value out of range " + "for int64 option \"foo\".\n"); +} + +TEST_F(ValidationErrorTest, Int64OptionValueIsNotPositiveInt) { + BuildDescriptorMessagesInTestPool(); + + BuildFileWithErrors( + "name: \"foo.proto\" " + "dependency: \"google/protobuf/descriptor.proto\" " + "extension { name: \"foo\" number: 7672757 label: LABEL_OPTIONAL " + " type: TYPE_INT64 extendee: \"google.protobuf.FileOptions\" }" + "options { uninterpreted_option { name { name_part: \"foo\" " + " is_extension: true } " + " identifier_value: \"5\" } }", + + "foo.proto: foo.proto: OPTION_VALUE: Value must be integer " + "for int64 option \"foo\".\n"); +} + +TEST_F(ValidationErrorTest, UInt32OptionValueOutOfRange) { + BuildDescriptorMessagesInTestPool(); + + BuildFileWithErrors( + "name: \"foo.proto\" " + "dependency: \"google/protobuf/descriptor.proto\" " + "extension { name: \"foo\" number: 7672757 label: LABEL_OPTIONAL " + " type: TYPE_UINT32 extendee: \"google.protobuf.FileOptions\" }" + "options { uninterpreted_option { name { name_part: \"foo\" " + " is_extension: true } " + " positive_int_value: 0x100000000 } }", + + "foo.proto: foo.proto: OPTION_VALUE: Value out of range " + "for uint32 option \"foo\".\n"); +} + +TEST_F(ValidationErrorTest, UInt32OptionValueIsNotPositiveInt) { + BuildDescriptorMessagesInTestPool(); + + BuildFileWithErrors( + "name: \"foo.proto\" " + "dependency: \"google/protobuf/descriptor.proto\" " + "extension { name: \"foo\" number: 7672757 label: LABEL_OPTIONAL " + " type: TYPE_UINT32 extendee: \"google.protobuf.FileOptions\" }" + "options { uninterpreted_option { name { name_part: \"foo\" " + " is_extension: true } " + " double_value: -5.6 } }", + + "foo.proto: foo.proto: OPTION_VALUE: Value must be non-negative integer " + "for uint32 option \"foo\".\n"); +} + +TEST_F(ValidationErrorTest, UInt64OptionValueIsNotPositiveInt) { + BuildDescriptorMessagesInTestPool(); + + BuildFileWithErrors( + "name: \"foo.proto\" " + "dependency: \"google/protobuf/descriptor.proto\" " + "extension { name: \"foo\" number: 7672757 label: LABEL_OPTIONAL " + " type: TYPE_UINT64 extendee: \"google.protobuf.FileOptions\" }" + "options { uninterpreted_option { name { name_part: \"foo\" " + " is_extension: true } " + " negative_int_value: -5 } }", + + "foo.proto: foo.proto: OPTION_VALUE: Value must be non-negative integer " + "for uint64 option \"foo\".\n"); +} + +TEST_F(ValidationErrorTest, FloatOptionValueIsNotNumber) { + BuildDescriptorMessagesInTestPool(); + + BuildFileWithErrors( + "name: \"foo.proto\" " + "dependency: \"google/protobuf/descriptor.proto\" " + "extension { name: \"foo\" number: 7672757 label: LABEL_OPTIONAL " + " type: TYPE_FLOAT extendee: \"google.protobuf.FileOptions\" }" + "options { uninterpreted_option { name { name_part: \"foo\" " + " is_extension: true } " + " string_value: \"bar\" } }", + + "foo.proto: foo.proto: OPTION_VALUE: Value must be number " + "for float option \"foo\".\n"); +} + +TEST_F(ValidationErrorTest, DoubleOptionValueIsNotNumber) { + BuildDescriptorMessagesInTestPool(); + + BuildFileWithErrors( + "name: \"foo.proto\" " + "dependency: \"google/protobuf/descriptor.proto\" " + "extension { name: \"foo\" number: 7672757 label: LABEL_OPTIONAL " + " type: TYPE_DOUBLE extendee: \"google.protobuf.FileOptions\" }" + "options { uninterpreted_option { name { name_part: \"foo\" " + " is_extension: true } " + " string_value: \"bar\" } }", + + "foo.proto: foo.proto: OPTION_VALUE: Value must be number " + "for double option \"foo\".\n"); +} + +TEST_F(ValidationErrorTest, BoolOptionValueIsNotTrueOrFalse) { + BuildDescriptorMessagesInTestPool(); + + BuildFileWithErrors( + "name: \"foo.proto\" " + "dependency: \"google/protobuf/descriptor.proto\" " + "extension { name: \"foo\" number: 7672757 label: LABEL_OPTIONAL " + " type: TYPE_BOOL extendee: \"google.protobuf.FileOptions\" }" + "options { uninterpreted_option { name { name_part: \"foo\" " + " is_extension: true } " + " identifier_value: \"bar\" } }", + + "foo.proto: foo.proto: OPTION_VALUE: Value must be \"true\" or \"false\" " + "for boolean option \"foo\".\n"); +} + +TEST_F(ValidationErrorTest, EnumOptionValueIsNotIdentifier) { + BuildDescriptorMessagesInTestPool(); + + BuildFileWithErrors( + "name: \"foo.proto\" " + "dependency: \"google/protobuf/descriptor.proto\" " + "enum_type { name: \"FooEnum\" value { name: \"BAR\" number: 1 } " + " value { name: \"BAZ\" number: 2 } }" + "extension { name: \"foo\" number: 7672757 label: LABEL_OPTIONAL " + " type: TYPE_ENUM type_name: \"FooEnum\" " + " extendee: \"google.protobuf.FileOptions\" }" + "options { uninterpreted_option { name { name_part: \"foo\" " + " is_extension: true } " + " string_value: \"QUUX\" } }", + + "foo.proto: foo.proto: OPTION_VALUE: Value must be identifier for " + "enum-valued option \"foo\".\n"); +} + +TEST_F(ValidationErrorTest, EnumOptionValueIsNotEnumValueName) { + BuildDescriptorMessagesInTestPool(); + + BuildFileWithErrors( + "name: \"foo.proto\" " + "dependency: \"google/protobuf/descriptor.proto\" " + "enum_type { name: \"FooEnum\" value { name: \"BAR\" number: 1 } " + " value { name: \"BAZ\" number: 2 } }" + "extension { name: \"foo\" number: 7672757 label: LABEL_OPTIONAL " + " type: TYPE_ENUM type_name: \"FooEnum\" " + " extendee: \"google.protobuf.FileOptions\" }" + "options { uninterpreted_option { name { name_part: \"foo\" " + " is_extension: true } " + " identifier_value: \"QUUX\" } }", + + "foo.proto: foo.proto: OPTION_VALUE: Enum type \"FooEnum\" has no value " + "named \"QUUX\" for option \"foo\".\n"); +} + +TEST_F(ValidationErrorTest, EnumOptionValueIsSiblingEnumValueName) { + BuildDescriptorMessagesInTestPool(); + + BuildFileWithErrors( + "name: \"foo.proto\" " + "dependency: \"google/protobuf/descriptor.proto\" " + "enum_type { name: \"FooEnum1\" value { name: \"BAR\" number: 1 } " + " value { name: \"BAZ\" number: 2 } }" + "enum_type { name: \"FooEnum2\" value { name: \"QUX\" number: 1 } " + " value { name: \"QUUX\" number: 2 } }" + "extension { name: \"foo\" number: 7672757 label: LABEL_OPTIONAL " + " type: TYPE_ENUM type_name: \"FooEnum1\" " + " extendee: \"google.protobuf.FileOptions\" }" + "options { uninterpreted_option { name { name_part: \"foo\" " + " is_extension: true } " + " identifier_value: \"QUUX\" } }", + + "foo.proto: foo.proto: OPTION_VALUE: Enum type \"FooEnum1\" has no value " + "named \"QUUX\" for option \"foo\". This appears to be a value from a " + "sibling type.\n"); +} + +TEST_F(ValidationErrorTest, StringOptionValueIsNotString) { + BuildDescriptorMessagesInTestPool(); + + BuildFileWithErrors( + "name: \"foo.proto\" " + "dependency: \"google/protobuf/descriptor.proto\" " + "extension { name: \"foo\" number: 7672757 label: LABEL_OPTIONAL " + " type: TYPE_STRING extendee: \"google.protobuf.FileOptions\" }" + "options { uninterpreted_option { name { name_part: \"foo\" " + " is_extension: true } " + " identifier_value: \"QUUX\" } }", + + "foo.proto: foo.proto: OPTION_VALUE: Value must be quoted string for " + "string option \"foo\".\n"); +} + +// Helper function for tests that check for aggregate value parsing +// errors. The "value" argument is embedded inside the +// "uninterpreted_option" portion of the result. +static string EmbedAggregateValue(const char* value) { + return strings::Substitute( + "name: \"foo.proto\" " + "dependency: \"google/protobuf/descriptor.proto\" " + "message_type { name: \"Foo\" } " + "extension { name: \"foo\" number: 7672757 label: LABEL_OPTIONAL " + " type: TYPE_MESSAGE type_name: \"Foo\" " + " extendee: \"google.protobuf.FileOptions\" }" + "options { uninterpreted_option { name { name_part: \"foo\" " + " is_extension: true } " + " $0 } }", + value); +} + +TEST_F(ValidationErrorTest, AggregateValueNotFound) { + BuildDescriptorMessagesInTestPool(); + + BuildFileWithErrors( + EmbedAggregateValue("string_value: \"\""), + "foo.proto: foo.proto: OPTION_VALUE: Option \"foo\" is a message. " + "To set the entire message, use syntax like " + "\"foo = { }\". To set fields within it, use " + "syntax like \"foo.foo = value\".\n"); +} + +TEST_F(ValidationErrorTest, AggregateValueParseError) { + BuildDescriptorMessagesInTestPool(); + + BuildFileWithErrors( + EmbedAggregateValue("aggregate_value: \"1+2\""), + "foo.proto: foo.proto: OPTION_VALUE: Error while parsing option " + "value for \"foo\": Expected identifier.\n"); +} + +TEST_F(ValidationErrorTest, AggregateValueUnknownFields) { + BuildDescriptorMessagesInTestPool(); + + BuildFileWithErrors( + EmbedAggregateValue("aggregate_value: \"x:100\""), + "foo.proto: foo.proto: OPTION_VALUE: Error while parsing option " + "value for \"foo\": Message type \"Foo\" has no field named \"x\".\n"); +} + +TEST_F(ValidationErrorTest, NotLiteImportsLite) { + BuildFile( + "name: \"bar.proto\" " + "options { optimize_for: LITE_RUNTIME } "); + + BuildFileWithErrors( + "name: \"foo.proto\" " + "dependency: \"bar.proto\" ", + + "foo.proto: foo.proto: OTHER: Files that do not use optimize_for = " + "LITE_RUNTIME cannot import files which do use this option. This file " + "is not lite, but it imports \"bar.proto\" which is.\n"); +} + +TEST_F(ValidationErrorTest, LiteExtendsNotLite) { + BuildFile( + "name: \"bar.proto\" " + "message_type: {" + " name: \"Bar\"" + " extension_range { start: 1 end: 1000 }" + "}"); + + BuildFileWithErrors( + "name: \"foo.proto\" " + "dependency: \"bar.proto\" " + "options { optimize_for: LITE_RUNTIME } " + "extension { name: \"ext\" number: 123 label: LABEL_OPTIONAL " + " type: TYPE_INT32 extendee: \"Bar\" }", + + "foo.proto: ext: EXTENDEE: Extensions to non-lite types can only be " + "declared in non-lite files. Note that you cannot extend a non-lite " + "type to contain a lite type, but the reverse is allowed.\n"); +} + +TEST_F(ValidationErrorTest, NoLiteServices) { + BuildFileWithErrors( + "name: \"foo.proto\" " + "options {" + " optimize_for: LITE_RUNTIME" + " cc_generic_services: true" + " java_generic_services: true" + "} " + "service { name: \"Foo\" }", + + "foo.proto: Foo: NAME: Files with optimize_for = LITE_RUNTIME cannot " + "define services unless you set both options cc_generic_services and " + "java_generic_sevices to false.\n"); + + BuildFile( + "name: \"bar.proto\" " + "options {" + " optimize_for: LITE_RUNTIME" + " cc_generic_services: false" + " java_generic_services: false" + "} " + "service { name: \"Bar\" }"); +} + +TEST_F(ValidationErrorTest, RollbackAfterError) { + // Build a file which contains every kind of construct but references an + // undefined type. All these constructs will be added to the symbol table + // before the undefined type error is noticed. The DescriptorPool will then + // have to roll everything back. + BuildFileWithErrors( + "name: \"foo.proto\" " + "message_type {" + " name: \"TestMessage\"" + " field { name:\"foo\" label:LABEL_OPTIONAL type:TYPE_INT32 number:1 }" + "} " + "enum_type {" + " name: \"TestEnum\"" + " value { name:\"BAR\" number:1 }" + "} " + "service {" + " name: \"TestService\"" + " method {" + " name: \"Baz\"" + " input_type: \"NoSuchType\"" // error + " output_type: \"TestMessage\"" + " }" + "}", + + "foo.proto: TestService.Baz: INPUT_TYPE: \"NoSuchType\" is not defined.\n"); + + // Make sure that if we build the same file again with the error fixed, + // it works. If the above rollback was incomplete, then some symbols will + // be left defined, and this second attempt will fail since it tries to + // re-define the same symbols. + BuildFile( + "name: \"foo.proto\" " + "message_type {" + " name: \"TestMessage\"" + " field { name:\"foo\" label:LABEL_OPTIONAL type:TYPE_INT32 number:1 }" + "} " + "enum_type {" + " name: \"TestEnum\"" + " value { name:\"BAR\" number:1 }" + "} " + "service {" + " name: \"TestService\"" + " method { name:\"Baz\"" + " input_type:\"TestMessage\"" + " output_type:\"TestMessage\" }" + "}"); +} + +TEST_F(ValidationErrorTest, ErrorsReportedToLogError) { + // Test that errors are reported to GOOGLE_LOG(ERROR) if no error collector is + // provided. + + FileDescriptorProto file_proto; + ASSERT_TRUE(TextFormat::ParseFromString( + "name: \"foo.proto\" " + "message_type { name: \"Foo\" } " + "message_type { name: \"Foo\" } ", + &file_proto)); + + vector errors; + + { + ScopedMemoryLog log; + EXPECT_TRUE(pool_.BuildFile(file_proto) == NULL); + errors = log.GetMessages(ERROR); + } + + ASSERT_EQ(2, errors.size()); + + EXPECT_EQ("Invalid proto descriptor for file \"foo.proto\":", errors[0]); + EXPECT_EQ(" Foo: \"Foo\" is already defined.", errors[1]); +} + +// =================================================================== +// DescriptorDatabase + +static void AddToDatabase(SimpleDescriptorDatabase* database, + const char* file_text) { + FileDescriptorProto file_proto; + EXPECT_TRUE(TextFormat::ParseFromString(file_text, &file_proto)); + database->Add(file_proto); +} + +class DatabaseBackedPoolTest : public testing::Test { + protected: + DatabaseBackedPoolTest() {} + + SimpleDescriptorDatabase database_; + + virtual void SetUp() { + AddToDatabase(&database_, + "name: \"foo.proto\" " + "message_type { name:\"Foo\" extension_range { start: 1 end: 100 } } " + "enum_type { name:\"TestEnum\" value { name:\"DUMMY\" number:0 } } " + "service { name:\"TestService\" } "); + AddToDatabase(&database_, + "name: \"bar.proto\" " + "dependency: \"foo.proto\" " + "message_type { name:\"Bar\" } " + "extension { name:\"foo_ext\" extendee: \".Foo\" number:5 " + " label:LABEL_OPTIONAL type:TYPE_INT32 } "); + } + + // We can't inject a file containing errors into a DescriptorPool, so we + // need an actual mock DescriptorDatabase to test errors. + class ErrorDescriptorDatabase : public DescriptorDatabase { + public: + ErrorDescriptorDatabase() {} + ~ErrorDescriptorDatabase() {} + + // implements DescriptorDatabase --------------------------------- + bool FindFileByName(const string& filename, + FileDescriptorProto* output) { + // error.proto and error2.proto cyclically import each other. + if (filename == "error.proto") { + output->Clear(); + output->set_name("error.proto"); + output->add_dependency("error2.proto"); + return true; + } else if (filename == "error2.proto") { + output->Clear(); + output->set_name("error2.proto"); + output->add_dependency("error.proto"); + return true; + } else { + return false; + } + } + bool FindFileContainingSymbol(const string& symbol_name, + FileDescriptorProto* output) { + return false; + } + bool FindFileContainingExtension(const string& containing_type, + int field_number, + FileDescriptorProto* output) { + return false; + } + }; + + // A DescriptorDatabase that counts how many times each method has been + // called and forwards to some other DescriptorDatabase. + class CallCountingDatabase : public DescriptorDatabase { + public: + CallCountingDatabase(DescriptorDatabase* wrapped_db) + : wrapped_db_(wrapped_db) { + Clear(); + } + ~CallCountingDatabase() {} + + DescriptorDatabase* wrapped_db_; + + int call_count_; + + void Clear() { + call_count_ = 0; + } + + // implements DescriptorDatabase --------------------------------- + bool FindFileByName(const string& filename, + FileDescriptorProto* output) { + ++call_count_; + return wrapped_db_->FindFileByName(filename, output); + } + bool FindFileContainingSymbol(const string& symbol_name, + FileDescriptorProto* output) { + ++call_count_; + return wrapped_db_->FindFileContainingSymbol(symbol_name, output); + } + bool FindFileContainingExtension(const string& containing_type, + int field_number, + FileDescriptorProto* output) { + ++call_count_; + return wrapped_db_->FindFileContainingExtension( + containing_type, field_number, output); + } + }; + + // A DescriptorDatabase which falsely always returns foo.proto when searching + // for any symbol or extension number. This shouldn't cause the + // DescriptorPool to reload foo.proto if it is already loaded. + class FalsePositiveDatabase : public DescriptorDatabase { + public: + FalsePositiveDatabase(DescriptorDatabase* wrapped_db) + : wrapped_db_(wrapped_db) {} + ~FalsePositiveDatabase() {} + + DescriptorDatabase* wrapped_db_; + + // implements DescriptorDatabase --------------------------------- + bool FindFileByName(const string& filename, + FileDescriptorProto* output) { + return wrapped_db_->FindFileByName(filename, output); + } + bool FindFileContainingSymbol(const string& symbol_name, + FileDescriptorProto* output) { + return FindFileByName("foo.proto", output); + } + bool FindFileContainingExtension(const string& containing_type, + int field_number, + FileDescriptorProto* output) { + return FindFileByName("foo.proto", output); + } + }; +}; + +TEST_F(DatabaseBackedPoolTest, FindFileByName) { + DescriptorPool pool(&database_); + + const FileDescriptor* foo = pool.FindFileByName("foo.proto"); + ASSERT_TRUE(foo != NULL); + EXPECT_EQ("foo.proto", foo->name()); + ASSERT_EQ(1, foo->message_type_count()); + EXPECT_EQ("Foo", foo->message_type(0)->name()); + + EXPECT_EQ(foo, pool.FindFileByName("foo.proto")); + + EXPECT_TRUE(pool.FindFileByName("no_such_file.proto") == NULL); +} + +TEST_F(DatabaseBackedPoolTest, FindDependencyBeforeDependent) { + DescriptorPool pool(&database_); + + const FileDescriptor* foo = pool.FindFileByName("foo.proto"); + ASSERT_TRUE(foo != NULL); + EXPECT_EQ("foo.proto", foo->name()); + ASSERT_EQ(1, foo->message_type_count()); + EXPECT_EQ("Foo", foo->message_type(0)->name()); + + const FileDescriptor* bar = pool.FindFileByName("bar.proto"); + ASSERT_TRUE(bar != NULL); + EXPECT_EQ("bar.proto", bar->name()); + ASSERT_EQ(1, bar->message_type_count()); + EXPECT_EQ("Bar", bar->message_type(0)->name()); + + ASSERT_EQ(1, bar->dependency_count()); + EXPECT_EQ(foo, bar->dependency(0)); +} + +TEST_F(DatabaseBackedPoolTest, FindDependentBeforeDependency) { + DescriptorPool pool(&database_); + + const FileDescriptor* bar = pool.FindFileByName("bar.proto"); + ASSERT_TRUE(bar != NULL); + EXPECT_EQ("bar.proto", bar->name()); + ASSERT_EQ(1, bar->message_type_count()); + ASSERT_EQ("Bar", bar->message_type(0)->name()); + + const FileDescriptor* foo = pool.FindFileByName("foo.proto"); + ASSERT_TRUE(foo != NULL); + EXPECT_EQ("foo.proto", foo->name()); + ASSERT_EQ(1, foo->message_type_count()); + ASSERT_EQ("Foo", foo->message_type(0)->name()); + + ASSERT_EQ(1, bar->dependency_count()); + EXPECT_EQ(foo, bar->dependency(0)); +} + +TEST_F(DatabaseBackedPoolTest, FindFileContainingSymbol) { + DescriptorPool pool(&database_); + + const FileDescriptor* file = pool.FindFileContainingSymbol("Foo"); + ASSERT_TRUE(file != NULL); + EXPECT_EQ("foo.proto", file->name()); + EXPECT_EQ(file, pool.FindFileByName("foo.proto")); + + EXPECT_TRUE(pool.FindFileContainingSymbol("NoSuchSymbol") == NULL); +} + +TEST_F(DatabaseBackedPoolTest, FindMessageTypeByName) { + DescriptorPool pool(&database_); + + const Descriptor* type = pool.FindMessageTypeByName("Foo"); + ASSERT_TRUE(type != NULL); + EXPECT_EQ("Foo", type->name()); + EXPECT_EQ(type->file(), pool.FindFileByName("foo.proto")); + + EXPECT_TRUE(pool.FindMessageTypeByName("NoSuchType") == NULL); +} + +TEST_F(DatabaseBackedPoolTest, FindExtensionByNumber) { + DescriptorPool pool(&database_); + + const Descriptor* foo = pool.FindMessageTypeByName("Foo"); + ASSERT_TRUE(foo != NULL); + + const FieldDescriptor* extension = pool.FindExtensionByNumber(foo, 5); + ASSERT_TRUE(extension != NULL); + EXPECT_EQ("foo_ext", extension->name()); + EXPECT_EQ(extension->file(), pool.FindFileByName("bar.proto")); + + EXPECT_TRUE(pool.FindExtensionByNumber(foo, 12) == NULL); +} + +TEST_F(DatabaseBackedPoolTest, FindAllExtensions) { + DescriptorPool pool(&database_); + + const Descriptor* foo = pool.FindMessageTypeByName("Foo"); + + for (int i = 0; i < 2; ++i) { + // Repeat the lookup twice, to check that we get consistent + // results despite the fallback database lookup mutating the pool. + vector extensions; + pool.FindAllExtensions(foo, &extensions); + ASSERT_EQ(1, extensions.size()); + EXPECT_EQ(5, extensions[0]->number()); + } +} + +TEST_F(DatabaseBackedPoolTest, ErrorWithoutErrorCollector) { + ErrorDescriptorDatabase error_database; + DescriptorPool pool(&error_database); + + vector errors; + + { + ScopedMemoryLog log; + EXPECT_TRUE(pool.FindFileByName("error.proto") == NULL); + errors = log.GetMessages(ERROR); + } + + EXPECT_FALSE(errors.empty()); +} + +TEST_F(DatabaseBackedPoolTest, ErrorWithErrorCollector) { + ErrorDescriptorDatabase error_database; + MockErrorCollector error_collector; + DescriptorPool pool(&error_database, &error_collector); + + EXPECT_TRUE(pool.FindFileByName("error.proto") == NULL); + EXPECT_EQ( + "error.proto: error.proto: OTHER: File recursively imports itself: " + "error.proto -> error2.proto -> error.proto\n" + "error2.proto: error2.proto: OTHER: Import \"error.proto\" was not " + "found or had errors.\n" + "error.proto: error.proto: OTHER: Import \"error2.proto\" was not " + "found or had errors.\n", + error_collector.text_); +} + +TEST_F(DatabaseBackedPoolTest, UnittestProto) { + // Try to load all of unittest.proto from a DescriptorDatabase. This should + // thoroughly test all paths through DescriptorBuilder to insure that there + // are no deadlocking problems when pool_->mutex_ is non-NULL. + const FileDescriptor* original_file = + protobuf_unittest::TestAllTypes::descriptor()->file(); + + DescriptorPoolDatabase database(*DescriptorPool::generated_pool()); + DescriptorPool pool(&database); + const FileDescriptor* file_from_database = + pool.FindFileByName(original_file->name()); + + ASSERT_TRUE(file_from_database != NULL); + + FileDescriptorProto original_file_proto; + original_file->CopyTo(&original_file_proto); + + FileDescriptorProto file_from_database_proto; + file_from_database->CopyTo(&file_from_database_proto); + + EXPECT_EQ(original_file_proto.DebugString(), + file_from_database_proto.DebugString()); +} + +TEST_F(DatabaseBackedPoolTest, DoesntRetryDbUnnecessarily) { + // Searching for a child of an existing descriptor should never fall back + // to the DescriptorDatabase even if it isn't found, because we know all + // children are already loaded. + CallCountingDatabase call_counter(&database_); + DescriptorPool pool(&call_counter); + + const FileDescriptor* file = pool.FindFileByName("foo.proto"); + ASSERT_TRUE(file != NULL); + const Descriptor* foo = pool.FindMessageTypeByName("Foo"); + ASSERT_TRUE(foo != NULL); + const EnumDescriptor* test_enum = pool.FindEnumTypeByName("TestEnum"); + ASSERT_TRUE(test_enum != NULL); + const ServiceDescriptor* test_service = pool.FindServiceByName("TestService"); + ASSERT_TRUE(test_service != NULL); + + EXPECT_NE(0, call_counter.call_count_); + call_counter.Clear(); + + EXPECT_TRUE(foo->FindFieldByName("no_such_field") == NULL); + EXPECT_TRUE(foo->FindExtensionByName("no_such_extension") == NULL); + EXPECT_TRUE(foo->FindNestedTypeByName("NoSuchMessageType") == NULL); + EXPECT_TRUE(foo->FindEnumTypeByName("NoSuchEnumType") == NULL); + EXPECT_TRUE(foo->FindEnumValueByName("NO_SUCH_VALUE") == NULL); + EXPECT_TRUE(test_enum->FindValueByName("NO_SUCH_VALUE") == NULL); + EXPECT_TRUE(test_service->FindMethodByName("NoSuchMethod") == NULL); + + EXPECT_TRUE(file->FindMessageTypeByName("NoSuchMessageType") == NULL); + EXPECT_TRUE(file->FindEnumTypeByName("NoSuchEnumType") == NULL); + EXPECT_TRUE(file->FindEnumValueByName("NO_SUCH_VALUE") == NULL); + EXPECT_TRUE(file->FindServiceByName("NO_SUCH_VALUE") == NULL); + EXPECT_TRUE(file->FindExtensionByName("no_such_extension") == NULL); + EXPECT_EQ(0, call_counter.call_count_); +} + +TEST_F(DatabaseBackedPoolTest, DoesntReloadFilesUncesessarily) { + // If FindFileContainingSymbol() or FindFileContainingExtension() return a + // file that is already in the DescriptorPool, it should not attempt to + // reload the file. + FalsePositiveDatabase false_positive_database(&database_); + MockErrorCollector error_collector; + DescriptorPool pool(&false_positive_database, &error_collector); + + // First make sure foo.proto is loaded. + const Descriptor* foo = pool.FindMessageTypeByName("Foo"); + ASSERT_TRUE(foo != NULL); + + // Try inducing false positives. + EXPECT_TRUE(pool.FindMessageTypeByName("NoSuchSymbol") == NULL); + EXPECT_TRUE(pool.FindExtensionByNumber(foo, 22) == NULL); + + // No errors should have been reported. (If foo.proto was incorrectly + // loaded multiple times, errors would have been reported.) + EXPECT_EQ("", error_collector.text_); +} + +TEST_F(DatabaseBackedPoolTest, DoesntReloadKnownBadFiles) { + ErrorDescriptorDatabase error_database; + MockErrorCollector error_collector; + DescriptorPool pool(&error_database, &error_collector); + + EXPECT_TRUE(pool.FindFileByName("error.proto") == NULL); + error_collector.text_.clear(); + EXPECT_TRUE(pool.FindFileByName("error.proto") == NULL); + EXPECT_EQ("", error_collector.text_); +} + +TEST_F(DatabaseBackedPoolTest, DoesntFallbackOnWrongType) { + // If a lookup finds a symbol of the wrong type (e.g. we pass a type name + // to FindFieldByName()), we should fail fast, without checking the fallback + // database. + CallCountingDatabase call_counter(&database_); + DescriptorPool pool(&call_counter); + + const FileDescriptor* file = pool.FindFileByName("foo.proto"); + ASSERT_TRUE(file != NULL); + const Descriptor* foo = pool.FindMessageTypeByName("Foo"); + ASSERT_TRUE(foo != NULL); + const EnumDescriptor* test_enum = pool.FindEnumTypeByName("TestEnum"); + ASSERT_TRUE(test_enum != NULL); + + EXPECT_NE(0, call_counter.call_count_); + call_counter.Clear(); + + EXPECT_TRUE(pool.FindMessageTypeByName("TestEnum") == NULL); + EXPECT_TRUE(pool.FindFieldByName("Foo") == NULL); + EXPECT_TRUE(pool.FindExtensionByName("Foo") == NULL); + EXPECT_TRUE(pool.FindEnumTypeByName("Foo") == NULL); + EXPECT_TRUE(pool.FindEnumValueByName("Foo") == NULL); + EXPECT_TRUE(pool.FindServiceByName("Foo") == NULL); + EXPECT_TRUE(pool.FindMethodByName("Foo") == NULL); + + EXPECT_EQ(0, call_counter.call_count_); +} + +// =================================================================== + + +} // namespace descriptor_unittest +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/dynamic_message.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/dynamic_message.cc new file mode 100644 index 0000000000..c711a2da43 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/dynamic_message.cc @@ -0,0 +1,558 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// DynamicMessage is implemented by constructing a data structure which +// has roughly the same memory layout as a generated message would have. +// Then, we use GeneratedMessageReflection to implement our reflection +// interface. All the other operations we need to implement (e.g. +// parsing, copying, etc.) are already implemented in terms of +// Reflection, so the rest is easy. +// +// The up side of this strategy is that it's very efficient. We don't +// need to use hash_maps or generic representations of fields. The +// down side is that this is a low-level memory management hack which +// can be tricky to get right. +// +// As mentioned in the header, we only expose a DynamicMessageFactory +// publicly, not the DynamicMessage class itself. This is because +// GenericMessageReflection wants to have a pointer to a "default" +// copy of the class, with all fields initialized to their default +// values. We only want to construct one of these per message type, +// so DynamicMessageFactory stores a cache of default messages for +// each type it sees (each unique Descriptor pointer). The code +// refers to the "default" copy of the class as the "prototype". +// +// Note on memory allocation: This module often calls "operator new()" +// to allocate untyped memory, rather than calling something like +// "new uint8[]". This is because "operator new()" means "Give me some +// space which I can use as I please." while "new uint8[]" means "Give +// me an array of 8-bit integers.". In practice, the later may return +// a pointer that is not aligned correctly for general use. I believe +// Item 8 of "More Effective C++" discusses this in more detail, though +// I don't have the book on me right now so I'm not sure. + +#include +#include + +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace google { +namespace protobuf { + +using internal::WireFormat; +using internal::ExtensionSet; +using internal::GeneratedMessageReflection; + + +// =================================================================== +// Some helper tables and functions... + +namespace { + +// Compute the byte size of the in-memory representation of the field. +int FieldSpaceUsed(const FieldDescriptor* field) { + typedef FieldDescriptor FD; // avoid line wrapping + if (field->label() == FD::LABEL_REPEATED) { + switch (field->cpp_type()) { + case FD::CPPTYPE_INT32 : return sizeof(RepeatedField); + case FD::CPPTYPE_INT64 : return sizeof(RepeatedField); + case FD::CPPTYPE_UINT32 : return sizeof(RepeatedField); + case FD::CPPTYPE_UINT64 : return sizeof(RepeatedField); + case FD::CPPTYPE_DOUBLE : return sizeof(RepeatedField); + case FD::CPPTYPE_FLOAT : return sizeof(RepeatedField); + case FD::CPPTYPE_BOOL : return sizeof(RepeatedField); + case FD::CPPTYPE_ENUM : return sizeof(RepeatedField); + case FD::CPPTYPE_MESSAGE: return sizeof(RepeatedPtrField); + + case FD::CPPTYPE_STRING: + switch (field->options().ctype()) { + default: // TODO(kenton): Support other string reps. + case FieldOptions::STRING: + return sizeof(RepeatedPtrField); + } + break; + } + } else { + switch (field->cpp_type()) { + case FD::CPPTYPE_INT32 : return sizeof(int32 ); + case FD::CPPTYPE_INT64 : return sizeof(int64 ); + case FD::CPPTYPE_UINT32 : return sizeof(uint32 ); + case FD::CPPTYPE_UINT64 : return sizeof(uint64 ); + case FD::CPPTYPE_DOUBLE : return sizeof(double ); + case FD::CPPTYPE_FLOAT : return sizeof(float ); + case FD::CPPTYPE_BOOL : return sizeof(bool ); + case FD::CPPTYPE_ENUM : return sizeof(int ); + case FD::CPPTYPE_MESSAGE: return sizeof(Message*); + + case FD::CPPTYPE_STRING: + switch (field->options().ctype()) { + default: // TODO(kenton): Support other string reps. + case FieldOptions::STRING: + return sizeof(string*); + } + break; + } + } + + GOOGLE_LOG(DFATAL) << "Can't get here."; + return 0; +} + +inline int DivideRoundingUp(int i, int j) { + return (i + (j - 1)) / j; +} + +static const int kSafeAlignment = sizeof(uint64); + +inline int AlignTo(int offset, int alignment) { + return DivideRoundingUp(offset, alignment) * alignment; +} + +// Rounds the given byte offset up to the next offset aligned such that any +// type may be stored at it. +inline int AlignOffset(int offset) { + return AlignTo(offset, kSafeAlignment); +} + +#define bitsizeof(T) (sizeof(T) * 8) + +} // namespace + +// =================================================================== + +class DynamicMessage : public Message { + public: + struct TypeInfo { + int size; + int has_bits_offset; + int unknown_fields_offset; + int extensions_offset; + + // Not owned by the TypeInfo. + DynamicMessageFactory* factory; // The factory that created this object. + const DescriptorPool* pool; // The factory's DescriptorPool. + const Descriptor* type; // Type of this DynamicMessage. + + // Warning: The order in which the following pointers are defined is + // important (the prototype must be deleted *before* the offsets). + scoped_array offsets; + scoped_ptr reflection; + scoped_ptr prototype; + }; + + DynamicMessage(const TypeInfo* type_info); + ~DynamicMessage(); + + // Called on the prototype after construction to initialize message fields. + void CrossLinkPrototypes(); + + // implements Message ---------------------------------------------- + + Message* New() const; + + int GetCachedSize() const; + void SetCachedSize(int size) const; + + Metadata GetMetadata() const; + + private: + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(DynamicMessage); + + inline bool is_prototype() const { + return type_info_->prototype == this || + // If type_info_->prototype is NULL, then we must be constructing + // the prototype now, which means we must be the prototype. + type_info_->prototype == NULL; + } + + inline void* OffsetToPointer(int offset) { + return reinterpret_cast(this) + offset; + } + inline const void* OffsetToPointer(int offset) const { + return reinterpret_cast(this) + offset; + } + + const TypeInfo* type_info_; + + // TODO(kenton): Make this an atomic when C++ supports it. + mutable int cached_byte_size_; +}; + +DynamicMessage::DynamicMessage(const TypeInfo* type_info) + : type_info_(type_info), + cached_byte_size_(0) { + // We need to call constructors for various fields manually and set + // default values where appropriate. We use placement new to call + // constructors. If you haven't heard of placement new, I suggest Googling + // it now. We use placement new even for primitive types that don't have + // constructors for consistency. (In theory, placement new should be used + // any time you are trying to convert untyped memory to typed memory, though + // in practice that's not strictly necessary for types that don't have a + // constructor.) + + const Descriptor* descriptor = type_info_->type; + + new(OffsetToPointer(type_info_->unknown_fields_offset)) UnknownFieldSet; + + if (type_info_->extensions_offset != -1) { + new(OffsetToPointer(type_info_->extensions_offset)) ExtensionSet; + } + + for (int i = 0; i < descriptor->field_count(); i++) { + const FieldDescriptor* field = descriptor->field(i); + void* field_ptr = OffsetToPointer(type_info_->offsets[i]); + switch (field->cpp_type()) { +#define HANDLE_TYPE(CPPTYPE, TYPE) \ + case FieldDescriptor::CPPTYPE_##CPPTYPE: \ + if (!field->is_repeated()) { \ + new(field_ptr) TYPE(field->default_value_##TYPE()); \ + } else { \ + new(field_ptr) RepeatedField(); \ + } \ + break; + + HANDLE_TYPE(INT32 , int32 ); + HANDLE_TYPE(INT64 , int64 ); + HANDLE_TYPE(UINT32, uint32); + HANDLE_TYPE(UINT64, uint64); + HANDLE_TYPE(DOUBLE, double); + HANDLE_TYPE(FLOAT , float ); + HANDLE_TYPE(BOOL , bool ); +#undef HANDLE_TYPE + + case FieldDescriptor::CPPTYPE_ENUM: + if (!field->is_repeated()) { + new(field_ptr) int(field->default_value_enum()->number()); + } else { + new(field_ptr) RepeatedField(); + } + break; + + case FieldDescriptor::CPPTYPE_STRING: + switch (field->options().ctype()) { + default: // TODO(kenton): Support other string reps. + case FieldOptions::STRING: + if (!field->is_repeated()) { + if (is_prototype()) { + new(field_ptr) const string*(&field->default_value_string()); + } else { + string* default_value = + *reinterpret_cast( + type_info_->prototype->OffsetToPointer( + type_info_->offsets[i])); + new(field_ptr) string*(default_value); + } + } else { + new(field_ptr) RepeatedPtrField(); + } + break; + } + break; + + case FieldDescriptor::CPPTYPE_MESSAGE: { + if (!field->is_repeated()) { + new(field_ptr) Message*(NULL); + } else { + new(field_ptr) RepeatedPtrField(); + } + break; + } + } + } +} + +DynamicMessage::~DynamicMessage() { + const Descriptor* descriptor = type_info_->type; + + reinterpret_cast( + OffsetToPointer(type_info_->unknown_fields_offset))->~UnknownFieldSet(); + + if (type_info_->extensions_offset != -1) { + reinterpret_cast( + OffsetToPointer(type_info_->extensions_offset))->~ExtensionSet(); + } + + // We need to manually run the destructors for repeated fields and strings, + // just as we ran their constructors in the the DynamicMessage constructor. + // Additionally, if any singular embedded messages have been allocated, we + // need to delete them, UNLESS we are the prototype message of this type, + // in which case any embedded messages are other prototypes and shouldn't + // be touched. + for (int i = 0; i < descriptor->field_count(); i++) { + const FieldDescriptor* field = descriptor->field(i); + void* field_ptr = OffsetToPointer(type_info_->offsets[i]); + + if (field->is_repeated()) { + switch (field->cpp_type()) { +#define HANDLE_TYPE(UPPERCASE, LOWERCASE) \ + case FieldDescriptor::CPPTYPE_##UPPERCASE : \ + reinterpret_cast*>(field_ptr) \ + ->~RepeatedField(); \ + break + + HANDLE_TYPE( INT32, int32); + HANDLE_TYPE( INT64, int64); + HANDLE_TYPE(UINT32, uint32); + HANDLE_TYPE(UINT64, uint64); + HANDLE_TYPE(DOUBLE, double); + HANDLE_TYPE( FLOAT, float); + HANDLE_TYPE( BOOL, bool); + HANDLE_TYPE( ENUM, int); +#undef HANDLE_TYPE + + case FieldDescriptor::CPPTYPE_STRING: + switch (field->options().ctype()) { + default: // TODO(kenton): Support other string reps. + case FieldOptions::STRING: + reinterpret_cast*>(field_ptr) + ->~RepeatedPtrField(); + break; + } + break; + + case FieldDescriptor::CPPTYPE_MESSAGE: + reinterpret_cast*>(field_ptr) + ->~RepeatedPtrField(); + break; + } + + } else if (field->cpp_type() == FieldDescriptor::CPPTYPE_STRING) { + switch (field->options().ctype()) { + default: // TODO(kenton): Support other string reps. + case FieldOptions::STRING: { + string* ptr = *reinterpret_cast(field_ptr); + if (ptr != &field->default_value_string()) { + delete ptr; + } + break; + } + } + } else if ((field->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) && + !is_prototype()) { + Message* message = *reinterpret_cast(field_ptr); + if (message != NULL) { + delete message; + } + } + } +} + +void DynamicMessage::CrossLinkPrototypes() { + // This should only be called on the prototype message. + GOOGLE_CHECK(is_prototype()); + + DynamicMessageFactory* factory = type_info_->factory; + const Descriptor* descriptor = type_info_->type; + + // Cross-link default messages. + for (int i = 0; i < descriptor->field_count(); i++) { + const FieldDescriptor* field = descriptor->field(i); + void* field_ptr = OffsetToPointer(type_info_->offsets[i]); + + if (field->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE && + !field->is_repeated()) { + // For fields with message types, we need to cross-link with the + // prototype for the field's type. + // For singular fields, the field is just a pointer which should + // point to the prototype. + *reinterpret_cast(field_ptr) = + factory->GetPrototypeNoLock(field->message_type()); + } + } +} + +Message* DynamicMessage::New() const { + void* new_base = reinterpret_cast(operator new(type_info_->size)); + memset(new_base, 0, type_info_->size); + return new(new_base) DynamicMessage(type_info_); +} + +int DynamicMessage::GetCachedSize() const { + return cached_byte_size_; +} + +void DynamicMessage::SetCachedSize(int size) const { + // This is theoretically not thread-compatible, but in practice it works + // because if multiple threads write this simultaneously, they will be + // writing the exact same value. + cached_byte_size_ = size; +} + +Metadata DynamicMessage::GetMetadata() const { + Metadata metadata; + metadata.descriptor = type_info_->type; + metadata.reflection = type_info_->reflection.get(); + return metadata; +} + +// =================================================================== + +struct DynamicMessageFactory::PrototypeMap { + typedef hash_map Map; + Map map_; +}; + +DynamicMessageFactory::DynamicMessageFactory() + : pool_(NULL), delegate_to_generated_factory_(false), + prototypes_(new PrototypeMap) { +} + +DynamicMessageFactory::DynamicMessageFactory(const DescriptorPool* pool) + : pool_(pool), delegate_to_generated_factory_(false), + prototypes_(new PrototypeMap) { +} + +DynamicMessageFactory::~DynamicMessageFactory() { + for (PrototypeMap::Map::iterator iter = prototypes_->map_.begin(); + iter != prototypes_->map_.end(); ++iter) { + delete iter->second; + } +} + +const Message* DynamicMessageFactory::GetPrototype(const Descriptor* type) { + MutexLock lock(&prototypes_mutex_); + return GetPrototypeNoLock(type); +} + +const Message* DynamicMessageFactory::GetPrototypeNoLock( + const Descriptor* type) { + if (delegate_to_generated_factory_ && + type->file()->pool() == DescriptorPool::generated_pool()) { + return MessageFactory::generated_factory()->GetPrototype(type); + } + + const DynamicMessage::TypeInfo** target = &prototypes_->map_[type]; + if (*target != NULL) { + // Already exists. + return (*target)->prototype.get(); + } + + DynamicMessage::TypeInfo* type_info = new DynamicMessage::TypeInfo; + *target = type_info; + + type_info->type = type; + type_info->pool = (pool_ == NULL) ? type->file()->pool() : pool_; + type_info->factory = this; + + // We need to construct all the structures passed to + // GeneratedMessageReflection's constructor. This includes: + // - A block of memory that contains space for all the message's fields. + // - An array of integers indicating the byte offset of each field within + // this block. + // - A big bitfield containing a bit for each field indicating whether + // or not that field is set. + + // Compute size and offsets. + int* offsets = new int[type->field_count()]; + type_info->offsets.reset(offsets); + + // Decide all field offsets by packing in order. + // We place the DynamicMessage object itself at the beginning of the allocated + // space. + int size = sizeof(DynamicMessage); + size = AlignOffset(size); + + // Next the has_bits, which is an array of uint32s. + type_info->has_bits_offset = size; + int has_bits_array_size = + DivideRoundingUp(type->field_count(), bitsizeof(uint32)); + size += has_bits_array_size * sizeof(uint32); + size = AlignOffset(size); + + // The ExtensionSet, if any. + if (type->extension_range_count() > 0) { + type_info->extensions_offset = size; + size += sizeof(ExtensionSet); + size = AlignOffset(size); + } else { + // No extensions. + type_info->extensions_offset = -1; + } + + // All the fields. + for (int i = 0; i < type->field_count(); i++) { + // Make sure field is aligned to avoid bus errors. + int field_size = FieldSpaceUsed(type->field(i)); + size = AlignTo(size, min(kSafeAlignment, field_size)); + offsets[i] = size; + size += field_size; + } + + // Add the UnknownFieldSet to the end. + size = AlignOffset(size); + type_info->unknown_fields_offset = size; + size += sizeof(UnknownFieldSet); + + // Align the final size to make sure no clever allocators think that + // alignment is not necessary. + size = AlignOffset(size); + type_info->size = size; + + // Allocate the prototype. + void* base = operator new(size); + memset(base, 0, size); + DynamicMessage* prototype = new(base) DynamicMessage(type_info); + type_info->prototype.reset(prototype); + + // Construct the reflection object. + type_info->reflection.reset( + new GeneratedMessageReflection( + type_info->type, + type_info->prototype.get(), + type_info->offsets.get(), + type_info->has_bits_offset, + type_info->unknown_fields_offset, + type_info->extensions_offset, + type_info->pool, + this, + type_info->size)); + + // Cross link prototypes. + prototype->CrossLinkPrototypes(); + + return prototype; +} + +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/dynamic_message.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/dynamic_message.h new file mode 100644 index 0000000000..81dd2c639e --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/dynamic_message.h @@ -0,0 +1,136 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// Defines an implementation of Message which can emulate types which are not +// known at compile-time. + +#ifndef GOOGLE_PROTOBUF_DYNAMIC_MESSAGE_H__ +#define GOOGLE_PROTOBUF_DYNAMIC_MESSAGE_H__ + +#include +#include + +namespace google { +namespace protobuf { + +// Defined in other files. +class Descriptor; // descriptor.h +class DescriptorPool; // descriptor.h + +// Constructs implementations of Message which can emulate types which are not +// known at compile-time. +// +// Sometimes you want to be able to manipulate protocol types that you don't +// know about at compile time. It would be nice to be able to construct +// a Message object which implements the message type given by any arbitrary +// Descriptor. DynamicMessage provides this. +// +// As it turns out, a DynamicMessage needs to construct extra +// information about its type in order to operate. Most of this information +// can be shared between all DynamicMessages of the same type. But, caching +// this information in some sort of global map would be a bad idea, since +// the cached information for a particular descriptor could outlive the +// descriptor itself. To avoid this problem, DynamicMessageFactory +// encapsulates this "cache". All DynamicMessages of the same type created +// from the same factory will share the same support data. Any Descriptors +// used with a particular factory must outlive the factory. +class LIBPROTOBUF_EXPORT DynamicMessageFactory : public MessageFactory { + public: + // Construct a DynamicMessageFactory that will search for extensions in + // the DescriptorPool in which the exendee is defined. + DynamicMessageFactory(); + + // Construct a DynamicMessageFactory that will search for extensions in + // the given DescriptorPool. + // + // DEPRECATED: Use CodedInputStream::SetExtensionRegistry() to tell the + // parser to look for extensions in an alternate pool. However, note that + // this is almost never what you want to do. Almost all users should use + // the zero-arg constructor. + DynamicMessageFactory(const DescriptorPool* pool); + + ~DynamicMessageFactory(); + + // Call this to tell the DynamicMessageFactory that if it is given a + // Descriptor d for which: + // d->file()->pool() == DescriptorPool::generated_pool(), + // then it should delegate to MessageFactory::generated_factory() instead + // of constructing a dynamic implementation of the message. In theory there + // is no down side to doing this, so it may become the default in the future. + void SetDelegateToGeneratedFactory(bool enable) { + delegate_to_generated_factory_ = enable; + } + + // implements MessageFactory --------------------------------------- + + // Given a Descriptor, constructs the default (prototype) Message of that + // type. You can then call that message's New() method to construct a + // mutable message of that type. + // + // Calling this method twice with the same Descriptor returns the same + // object. The returned object remains property of the factory and will + // be destroyed when the factory is destroyed. Also, any objects created + // by calling the prototype's New() method share some data with the + // prototype, so these must be destoyed before the DynamicMessageFactory + // is destroyed. + // + // The given descriptor must outlive the returned message, and hence must + // outlive the DynamicMessageFactory. + // + // The method is thread-safe. + const Message* GetPrototype(const Descriptor* type); + + private: + const DescriptorPool* pool_; + bool delegate_to_generated_factory_; + + // This struct just contains a hash_map. We can't #include from + // this header due to hacks needed for hash_map portability in the open source + // release. Namely, stubs/hash.h, which defines hash_map portably, is not a + // public header (for good reason), but dynamic_message.h is, and public + // headers may only #include other public headers. + struct PrototypeMap; + scoped_ptr prototypes_; + mutable Mutex prototypes_mutex_; + + friend class DynamicMessage; + const Message* GetPrototypeNoLock(const Descriptor* type); + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(DynamicMessageFactory); +}; + +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_DYNAMIC_MESSAGE_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/dynamic_message_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/dynamic_message_unittest.cc new file mode 100644 index 0000000000..41b89ab52f --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/dynamic_message_unittest.cc @@ -0,0 +1,162 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// Since the reflection interface for DynamicMessage is implemented by +// GenericMessageReflection, the only thing we really have to test is +// that DynamicMessage correctly sets up the information that +// GenericMessageReflection needs to use. So, we focus on that in this +// test. Other tests, such as generic_message_reflection_unittest and +// reflection_ops_unittest, cover the rest of the functionality used by +// DynamicMessage. + +#include +#include +#include +#include +#include +#include + +#include +#include + +namespace google { +namespace protobuf { + +class DynamicMessageTest : public testing::Test { + protected: + DescriptorPool pool_; + DynamicMessageFactory factory_; + const Descriptor* descriptor_; + const Message* prototype_; + const Descriptor* extensions_descriptor_; + const Message* extensions_prototype_; + const Descriptor* packed_descriptor_; + const Message* packed_prototype_; + + DynamicMessageTest(): factory_(&pool_) {} + + virtual void SetUp() { + // We want to make sure that DynamicMessage works (particularly with + // extensions) even if we use descriptors that are *not* from compiled-in + // types, so we make copies of the descriptors for unittest.proto and + // unittest_import.proto. + FileDescriptorProto unittest_file; + FileDescriptorProto unittest_import_file; + + unittest::TestAllTypes::descriptor()->file()->CopyTo(&unittest_file); + unittest_import::ImportMessage::descriptor()->file()->CopyTo( + &unittest_import_file); + + ASSERT_TRUE(pool_.BuildFile(unittest_import_file) != NULL); + ASSERT_TRUE(pool_.BuildFile(unittest_file) != NULL); + + descriptor_ = pool_.FindMessageTypeByName("protobuf_unittest.TestAllTypes"); + ASSERT_TRUE(descriptor_ != NULL); + prototype_ = factory_.GetPrototype(descriptor_); + + extensions_descriptor_ = + pool_.FindMessageTypeByName("protobuf_unittest.TestAllExtensions"); + ASSERT_TRUE(extensions_descriptor_ != NULL); + extensions_prototype_ = factory_.GetPrototype(extensions_descriptor_); + + packed_descriptor_ = + pool_.FindMessageTypeByName("protobuf_unittest.TestPackedTypes"); + ASSERT_TRUE(packed_descriptor_ != NULL); + packed_prototype_ = factory_.GetPrototype(packed_descriptor_); + } +}; + +TEST_F(DynamicMessageTest, Descriptor) { + // Check that the descriptor on the DynamicMessage matches the descriptor + // passed to GetPrototype(). + EXPECT_EQ(prototype_->GetDescriptor(), descriptor_); +} + +TEST_F(DynamicMessageTest, OnePrototype) { + // Check that requesting the same prototype twice produces the same object. + EXPECT_EQ(prototype_, factory_.GetPrototype(descriptor_)); +} + +TEST_F(DynamicMessageTest, Defaults) { + // Check that all default values are set correctly in the initial message. + TestUtil::ReflectionTester reflection_tester(descriptor_); + reflection_tester.ExpectClearViaReflection(*prototype_); +} + +TEST_F(DynamicMessageTest, IndependentOffsets) { + // Check that all fields have independent offsets by setting each + // one to a unique value then checking that they all still have those + // unique values (i.e. they don't stomp each other). + scoped_ptr message(prototype_->New()); + TestUtil::ReflectionTester reflection_tester(descriptor_); + + reflection_tester.SetAllFieldsViaReflection(message.get()); + reflection_tester.ExpectAllFieldsSetViaReflection(*message); +} + +TEST_F(DynamicMessageTest, Extensions) { + // Check that extensions work. + scoped_ptr message(extensions_prototype_->New()); + TestUtil::ReflectionTester reflection_tester(extensions_descriptor_); + + reflection_tester.SetAllFieldsViaReflection(message.get()); + reflection_tester.ExpectAllFieldsSetViaReflection(*message); +} + +TEST_F(DynamicMessageTest, PackedFields) { + // Check that packed fields work properly. + scoped_ptr message(packed_prototype_->New()); + TestUtil::ReflectionTester reflection_tester(packed_descriptor_); + + reflection_tester.SetPackedFieldsViaReflection(message.get()); + reflection_tester.ExpectPackedFieldsSetViaReflection(*message); +} + +TEST_F(DynamicMessageTest, SpaceUsed) { + // Test that SpaceUsed() works properly + + // Since we share the implementation with generated messages, we don't need + // to test very much here. Just make sure it appears to be working. + + scoped_ptr message(prototype_->New()); + TestUtil::ReflectionTester reflection_tester(descriptor_); + + int initial_space_used = message->SpaceUsed(); + + reflection_tester.SetAllFieldsViaReflection(message.get()); + EXPECT_LT(initial_space_used, message->SpaceUsed()); +} + +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/extension_set.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/extension_set.cc new file mode 100644 index 0000000000..1a6f5dfc25 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/extension_set.cc @@ -0,0 +1,1464 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace google { +namespace protobuf { +namespace internal { + +namespace { + +inline WireFormatLite::FieldType real_type(FieldType type) { + GOOGLE_DCHECK(type > 0 && type <= WireFormatLite::MAX_FIELD_TYPE); + return static_cast(type); +} + +inline WireFormatLite::CppType cpp_type(FieldType type) { + return WireFormatLite::FieldTypeToCppType(real_type(type)); +} + +// Registry stuff. +typedef hash_map, + ExtensionInfo> ExtensionRegistry; +ExtensionRegistry* registry_ = NULL; +GOOGLE_PROTOBUF_DECLARE_ONCE(registry_init_); + +void DeleteRegistry() { + delete registry_; + registry_ = NULL; +} + +void InitRegistry() { + registry_ = new ExtensionRegistry; + internal::OnShutdown(&DeleteRegistry); +} + +// This function is only called at startup, so there is no need for thread- +// safety. +void Register(const MessageLite* containing_type, + int number, ExtensionInfo info) { + ::google::protobuf::GoogleOnceInit(®istry_init_, &InitRegistry); + + if (!InsertIfNotPresent(registry_, make_pair(containing_type, number), + info)) { + GOOGLE_LOG(FATAL) << "Multiple extension registrations for type \"" + << containing_type->GetTypeName() + << "\", field number " << number << "."; + } +} + +const ExtensionInfo* FindRegisteredExtension( + const MessageLite* containing_type, int number) { + return (registry_ == NULL) ? NULL : + FindOrNull(*registry_, make_pair(containing_type, number)); +} + +} // namespace + +ExtensionFinder::~ExtensionFinder() {} + +bool GeneratedExtensionFinder::Find(int number, ExtensionInfo* output) { + const ExtensionInfo* extension = + FindRegisteredExtension(containing_type_, number); + if (extension == NULL) { + return false; + } else { + *output = *extension; + return true; + } +} + +void ExtensionSet::RegisterExtension(const MessageLite* containing_type, + int number, FieldType type, + bool is_repeated, bool is_packed) { + GOOGLE_CHECK_NE(type, WireFormatLite::TYPE_ENUM); + GOOGLE_CHECK_NE(type, WireFormatLite::TYPE_MESSAGE); + GOOGLE_CHECK_NE(type, WireFormatLite::TYPE_GROUP); + ExtensionInfo info(type, is_repeated, is_packed); + Register(containing_type, number, info); +} + +static bool CallNoArgValidityFunc(const void* arg, int number) { + // Note: Must use C-style cast here rather than reinterpret_cast because + // the C++ standard at one point did not allow casts between function and + // data pointers and some compilers enforce this for C++-style casts. No + // compiler enforces it for C-style casts since lots of C-style code has + // relied on these kinds of casts for a long time, despite being + // technically undefined. See: + // http://www.open-std.org/jtc1/sc22/wg21/docs/cwg_defects.html#195 + // Also note: Some compilers do not allow function pointers to be "const". + // Which makes sense, I suppose, because it's meaningless. + return ((EnumValidityFunc*)arg)(number); +} + +void ExtensionSet::RegisterEnumExtension(const MessageLite* containing_type, + int number, FieldType type, + bool is_repeated, bool is_packed, + EnumValidityFunc* is_valid) { + GOOGLE_CHECK_EQ(type, WireFormatLite::TYPE_ENUM); + ExtensionInfo info(type, is_repeated, is_packed); + info.enum_validity_check.func = CallNoArgValidityFunc; + // See comment in CallNoArgValidityFunc() about why we use a c-style cast. + info.enum_validity_check.arg = (void*)is_valid; + Register(containing_type, number, info); +} + +void ExtensionSet::RegisterMessageExtension(const MessageLite* containing_type, + int number, FieldType type, + bool is_repeated, bool is_packed, + const MessageLite* prototype) { + GOOGLE_CHECK(type == WireFormatLite::TYPE_MESSAGE || + type == WireFormatLite::TYPE_GROUP); + ExtensionInfo info(type, is_repeated, is_packed); + info.message_prototype = prototype; + Register(containing_type, number, info); +} + + +// =================================================================== +// Constructors and basic methods. + +ExtensionSet::ExtensionSet() {} + +ExtensionSet::~ExtensionSet() { + for (map::iterator iter = extensions_.begin(); + iter != extensions_.end(); ++iter) { + iter->second.Free(); + } +} + +// Defined in extension_set_heavy.cc. +// void ExtensionSet::AppendToList(const Descriptor* containing_type, +// const DescriptorPool* pool, +// vector* output) const + +bool ExtensionSet::Has(int number) const { + map::const_iterator iter = extensions_.find(number); + if (iter == extensions_.end()) return false; + GOOGLE_DCHECK(!iter->second.is_repeated); + return !iter->second.is_cleared; +} + +int ExtensionSet::ExtensionSize(int number) const { + map::const_iterator iter = extensions_.find(number); + if (iter == extensions_.end()) return false; + return iter->second.GetSize(); +} + +FieldType ExtensionSet::ExtensionType(int number) const { + map::const_iterator iter = extensions_.find(number); + if (iter == extensions_.end()) { + GOOGLE_LOG(DFATAL) << "Don't lookup extension types if they aren't present (1). "; + return 0; + } + if (iter->second.is_cleared) { + GOOGLE_LOG(DFATAL) << "Don't lookup extension types if they aren't present (2). "; + } + return iter->second.type; +} + +void ExtensionSet::ClearExtension(int number) { + map::iterator iter = extensions_.find(number); + if (iter == extensions_.end()) return; + iter->second.Clear(); +} + +// =================================================================== +// Field accessors + +namespace { + +enum Cardinality { + REPEATED, + OPTIONAL +}; + +} // namespace + +#define GOOGLE_DCHECK_TYPE(EXTENSION, LABEL, CPPTYPE) \ + GOOGLE_DCHECK_EQ((EXTENSION).is_repeated ? REPEATED : OPTIONAL, LABEL); \ + GOOGLE_DCHECK_EQ(cpp_type((EXTENSION).type), WireFormatLite::CPPTYPE_##CPPTYPE) + +// ------------------------------------------------------------------- +// Primitives + +#define PRIMITIVE_ACCESSORS(UPPERCASE, LOWERCASE, CAMELCASE) \ + \ +LOWERCASE ExtensionSet::Get##CAMELCASE(int number, \ + LOWERCASE default_value) const { \ + map::const_iterator iter = extensions_.find(number); \ + if (iter == extensions_.end() || iter->second.is_cleared) { \ + return default_value; \ + } else { \ + GOOGLE_DCHECK_TYPE(iter->second, OPTIONAL, UPPERCASE); \ + return iter->second.LOWERCASE##_value; \ + } \ +} \ + \ +void ExtensionSet::Set##CAMELCASE(int number, FieldType type, \ + LOWERCASE value, \ + const FieldDescriptor* descriptor) { \ + Extension* extension; \ + if (MaybeNewExtension(number, descriptor, &extension)) { \ + extension->type = type; \ + GOOGLE_DCHECK_EQ(cpp_type(extension->type), WireFormatLite::CPPTYPE_##UPPERCASE); \ + extension->is_repeated = false; \ + } else { \ + GOOGLE_DCHECK_TYPE(*extension, OPTIONAL, UPPERCASE); \ + } \ + extension->is_cleared = false; \ + extension->LOWERCASE##_value = value; \ +} \ + \ +LOWERCASE ExtensionSet::GetRepeated##CAMELCASE(int number, int index) const { \ + map::const_iterator iter = extensions_.find(number); \ + GOOGLE_CHECK(iter != extensions_.end()) << "Index out-of-bounds (field is empty)."; \ + GOOGLE_DCHECK_TYPE(iter->second, REPEATED, UPPERCASE); \ + return iter->second.repeated_##LOWERCASE##_value->Get(index); \ +} \ + \ +void ExtensionSet::SetRepeated##CAMELCASE( \ + int number, int index, LOWERCASE value) { \ + map::iterator iter = extensions_.find(number); \ + GOOGLE_CHECK(iter != extensions_.end()) << "Index out-of-bounds (field is empty)."; \ + GOOGLE_DCHECK_TYPE(iter->second, REPEATED, UPPERCASE); \ + iter->second.repeated_##LOWERCASE##_value->Set(index, value); \ +} \ + \ +void ExtensionSet::Add##CAMELCASE(int number, FieldType type, \ + bool packed, LOWERCASE value, \ + const FieldDescriptor* descriptor) { \ + Extension* extension; \ + if (MaybeNewExtension(number, descriptor, &extension)) { \ + extension->type = type; \ + GOOGLE_DCHECK_EQ(cpp_type(extension->type), WireFormatLite::CPPTYPE_##UPPERCASE); \ + extension->is_repeated = true; \ + extension->is_packed = packed; \ + extension->repeated_##LOWERCASE##_value = new RepeatedField(); \ + } else { \ + GOOGLE_DCHECK_TYPE(*extension, REPEATED, UPPERCASE); \ + GOOGLE_DCHECK_EQ(extension->is_packed, packed); \ + } \ + extension->repeated_##LOWERCASE##_value->Add(value); \ +} + +PRIMITIVE_ACCESSORS( INT32, int32, Int32) +PRIMITIVE_ACCESSORS( INT64, int64, Int64) +PRIMITIVE_ACCESSORS(UINT32, uint32, UInt32) +PRIMITIVE_ACCESSORS(UINT64, uint64, UInt64) +PRIMITIVE_ACCESSORS( FLOAT, float, Float) +PRIMITIVE_ACCESSORS(DOUBLE, double, Double) +PRIMITIVE_ACCESSORS( BOOL, bool, Bool) + +#undef PRIMITIVE_ACCESSORS + +// ------------------------------------------------------------------- +// Enums + +int ExtensionSet::GetEnum(int number, int default_value) const { + map::const_iterator iter = extensions_.find(number); + if (iter == extensions_.end() || iter->second.is_cleared) { + // Not present. Return the default value. + return default_value; + } else { + GOOGLE_DCHECK_TYPE(iter->second, OPTIONAL, ENUM); + return iter->second.enum_value; + } +} + +void ExtensionSet::SetEnum(int number, FieldType type, int value, + const FieldDescriptor* descriptor) { + Extension* extension; + if (MaybeNewExtension(number, descriptor, &extension)) { + extension->type = type; + GOOGLE_DCHECK_EQ(cpp_type(extension->type), WireFormatLite::CPPTYPE_ENUM); + extension->is_repeated = false; + } else { + GOOGLE_DCHECK_TYPE(*extension, OPTIONAL, ENUM); + } + extension->is_cleared = false; + extension->enum_value = value; +} + +int ExtensionSet::GetRepeatedEnum(int number, int index) const { + map::const_iterator iter = extensions_.find(number); + GOOGLE_CHECK(iter != extensions_.end()) << "Index out-of-bounds (field is empty)."; + GOOGLE_DCHECK_TYPE(iter->second, REPEATED, ENUM); + return iter->second.repeated_enum_value->Get(index); +} + +void ExtensionSet::SetRepeatedEnum(int number, int index, int value) { + map::iterator iter = extensions_.find(number); + GOOGLE_CHECK(iter != extensions_.end()) << "Index out-of-bounds (field is empty)."; + GOOGLE_DCHECK_TYPE(iter->second, REPEATED, ENUM); + iter->second.repeated_enum_value->Set(index, value); +} + +void ExtensionSet::AddEnum(int number, FieldType type, + bool packed, int value, + const FieldDescriptor* descriptor) { + Extension* extension; + if (MaybeNewExtension(number, descriptor, &extension)) { + extension->type = type; + GOOGLE_DCHECK_EQ(cpp_type(extension->type), WireFormatLite::CPPTYPE_ENUM); + extension->is_repeated = true; + extension->is_packed = packed; + extension->repeated_enum_value = new RepeatedField(); + } else { + GOOGLE_DCHECK_TYPE(*extension, REPEATED, ENUM); + GOOGLE_DCHECK_EQ(extension->is_packed, packed); + } + extension->repeated_enum_value->Add(value); +} + +// ------------------------------------------------------------------- +// Strings + +const string& ExtensionSet::GetString(int number, + const string& default_value) const { + map::const_iterator iter = extensions_.find(number); + if (iter == extensions_.end() || iter->second.is_cleared) { + // Not present. Return the default value. + return default_value; + } else { + GOOGLE_DCHECK_TYPE(iter->second, OPTIONAL, STRING); + return *iter->second.string_value; + } +} + +string* ExtensionSet::MutableString(int number, FieldType type, + const FieldDescriptor* descriptor) { + Extension* extension; + if (MaybeNewExtension(number, descriptor, &extension)) { + extension->type = type; + GOOGLE_DCHECK_EQ(cpp_type(extension->type), WireFormatLite::CPPTYPE_STRING); + extension->is_repeated = false; + extension->string_value = new string; + } else { + GOOGLE_DCHECK_TYPE(*extension, OPTIONAL, STRING); + } + extension->is_cleared = false; + return extension->string_value; +} + +const string& ExtensionSet::GetRepeatedString(int number, int index) const { + map::const_iterator iter = extensions_.find(number); + GOOGLE_CHECK(iter != extensions_.end()) << "Index out-of-bounds (field is empty)."; + GOOGLE_DCHECK_TYPE(iter->second, REPEATED, STRING); + return iter->second.repeated_string_value->Get(index); +} + +string* ExtensionSet::MutableRepeatedString(int number, int index) { + map::iterator iter = extensions_.find(number); + GOOGLE_CHECK(iter != extensions_.end()) << "Index out-of-bounds (field is empty)."; + GOOGLE_DCHECK_TYPE(iter->second, REPEATED, STRING); + return iter->second.repeated_string_value->Mutable(index); +} + +string* ExtensionSet::AddString(int number, FieldType type, + const FieldDescriptor* descriptor) { + Extension* extension; + if (MaybeNewExtension(number, descriptor, &extension)) { + extension->type = type; + GOOGLE_DCHECK_EQ(cpp_type(extension->type), WireFormatLite::CPPTYPE_STRING); + extension->is_repeated = true; + extension->is_packed = false; + extension->repeated_string_value = new RepeatedPtrField(); + } else { + GOOGLE_DCHECK_TYPE(*extension, REPEATED, STRING); + } + return extension->repeated_string_value->Add(); +} + +// ------------------------------------------------------------------- +// Messages + +const MessageLite& ExtensionSet::GetMessage( + int number, const MessageLite& default_value) const { + map::const_iterator iter = extensions_.find(number); + if (iter == extensions_.end()) { + // Not present. Return the default value. + return default_value; + } else { + GOOGLE_DCHECK_TYPE(iter->second, OPTIONAL, MESSAGE); + return *iter->second.message_value; + } +} + +// Defined in extension_set_heavy.cc. +// const MessageLite& ExtensionSet::GetMessage(int number, +// const Descriptor* message_type, +// MessageFactory* factory) const + +MessageLite* ExtensionSet::MutableMessage(int number, FieldType type, + const MessageLite& prototype, + const FieldDescriptor* descriptor) { + Extension* extension; + if (MaybeNewExtension(number, descriptor, &extension)) { + extension->type = type; + GOOGLE_DCHECK_EQ(cpp_type(extension->type), WireFormatLite::CPPTYPE_MESSAGE); + extension->is_repeated = false; + extension->message_value = prototype.New(); + } else { + GOOGLE_DCHECK_TYPE(*extension, OPTIONAL, MESSAGE); + } + extension->is_cleared = false; + return extension->message_value; +} + +// Defined in extension_set_heavy.cc. +// MessageLite* ExtensionSet::MutableMessage(int number, FieldType type, +// const Descriptor* message_type, +// MessageFactory* factory) + +const MessageLite& ExtensionSet::GetRepeatedMessage( + int number, int index) const { + map::const_iterator iter = extensions_.find(number); + GOOGLE_CHECK(iter != extensions_.end()) << "Index out-of-bounds (field is empty)."; + GOOGLE_DCHECK_TYPE(iter->second, REPEATED, MESSAGE); + return iter->second.repeated_message_value->Get(index); +} + +MessageLite* ExtensionSet::MutableRepeatedMessage(int number, int index) { + map::iterator iter = extensions_.find(number); + GOOGLE_CHECK(iter != extensions_.end()) << "Index out-of-bounds (field is empty)."; + GOOGLE_DCHECK_TYPE(iter->second, REPEATED, MESSAGE); + return iter->second.repeated_message_value->Mutable(index); +} + +MessageLite* ExtensionSet::AddMessage(int number, FieldType type, + const MessageLite& prototype, + const FieldDescriptor* descriptor) { + Extension* extension; + if (MaybeNewExtension(number, descriptor, &extension)) { + extension->type = type; + GOOGLE_DCHECK_EQ(cpp_type(extension->type), WireFormatLite::CPPTYPE_MESSAGE); + extension->is_repeated = true; + extension->repeated_message_value = + new RepeatedPtrField(); + } else { + GOOGLE_DCHECK_TYPE(*extension, REPEATED, MESSAGE); + } + + // RepeatedPtrField does not know how to Add() since it cannot + // allocate an abstract object, so we have to be tricky. + MessageLite* result = extension->repeated_message_value + ->AddFromCleared >(); + if (result == NULL) { + result = prototype.New(); + extension->repeated_message_value->AddAllocated(result); + } + return result; +} + +// Defined in extension_set_heavy.cc. +// MessageLite* ExtensionSet::AddMessage(int number, FieldType type, +// const Descriptor* message_type, +// MessageFactory* factory) + +#undef GOOGLE_DCHECK_TYPE + +void ExtensionSet::RemoveLast(int number) { + map::iterator iter = extensions_.find(number); + GOOGLE_CHECK(iter != extensions_.end()) << "Index out-of-bounds (field is empty)."; + + Extension* extension = &iter->second; + GOOGLE_DCHECK(extension->is_repeated); + + switch(cpp_type(extension->type)) { + case WireFormatLite::CPPTYPE_INT32: + extension->repeated_int32_value->RemoveLast(); + break; + case WireFormatLite::CPPTYPE_INT64: + extension->repeated_int64_value->RemoveLast(); + break; + case WireFormatLite::CPPTYPE_UINT32: + extension->repeated_uint32_value->RemoveLast(); + break; + case WireFormatLite::CPPTYPE_UINT64: + extension->repeated_uint64_value->RemoveLast(); + break; + case WireFormatLite::CPPTYPE_FLOAT: + extension->repeated_float_value->RemoveLast(); + break; + case WireFormatLite::CPPTYPE_DOUBLE: + extension->repeated_double_value->RemoveLast(); + break; + case WireFormatLite::CPPTYPE_BOOL: + extension->repeated_bool_value->RemoveLast(); + break; + case WireFormatLite::CPPTYPE_ENUM: + extension->repeated_enum_value->RemoveLast(); + break; + case WireFormatLite::CPPTYPE_STRING: + extension->repeated_string_value->RemoveLast(); + break; + case WireFormatLite::CPPTYPE_MESSAGE: + extension->repeated_message_value->RemoveLast(); + break; + } +} + +void ExtensionSet::SwapElements(int number, int index1, int index2) { + map::iterator iter = extensions_.find(number); + GOOGLE_CHECK(iter != extensions_.end()) << "Index out-of-bounds (field is empty)."; + + Extension* extension = &iter->second; + GOOGLE_DCHECK(extension->is_repeated); + + switch(cpp_type(extension->type)) { + case WireFormatLite::CPPTYPE_INT32: + extension->repeated_int32_value->SwapElements(index1, index2); + break; + case WireFormatLite::CPPTYPE_INT64: + extension->repeated_int64_value->SwapElements(index1, index2); + break; + case WireFormatLite::CPPTYPE_UINT32: + extension->repeated_uint32_value->SwapElements(index1, index2); + break; + case WireFormatLite::CPPTYPE_UINT64: + extension->repeated_uint64_value->SwapElements(index1, index2); + break; + case WireFormatLite::CPPTYPE_FLOAT: + extension->repeated_float_value->SwapElements(index1, index2); + break; + case WireFormatLite::CPPTYPE_DOUBLE: + extension->repeated_double_value->SwapElements(index1, index2); + break; + case WireFormatLite::CPPTYPE_BOOL: + extension->repeated_bool_value->SwapElements(index1, index2); + break; + case WireFormatLite::CPPTYPE_ENUM: + extension->repeated_enum_value->SwapElements(index1, index2); + break; + case WireFormatLite::CPPTYPE_STRING: + extension->repeated_string_value->SwapElements(index1, index2); + break; + case WireFormatLite::CPPTYPE_MESSAGE: + extension->repeated_message_value->SwapElements(index1, index2); + break; + } +} + +// =================================================================== + +void ExtensionSet::Clear() { + for (map::iterator iter = extensions_.begin(); + iter != extensions_.end(); ++iter) { + iter->second.Clear(); + } +} + +void ExtensionSet::MergeFrom(const ExtensionSet& other) { + for (map::const_iterator iter = other.extensions_.begin(); + iter != other.extensions_.end(); ++iter) { + const Extension& other_extension = iter->second; + + if (other_extension.is_repeated) { + Extension* extension; + bool is_new = MaybeNewExtension(iter->first, other_extension.descriptor, + &extension); + if (is_new) { + // Extension did not already exist in set. + extension->type = other_extension.type; + extension->is_repeated = true; + } else { + GOOGLE_DCHECK_EQ(extension->type, other_extension.type); + GOOGLE_DCHECK(extension->is_repeated); + } + + switch (cpp_type(other_extension.type)) { +#define HANDLE_TYPE(UPPERCASE, LOWERCASE, REPEATED_TYPE) \ + case WireFormatLite::CPPTYPE_##UPPERCASE: \ + if (is_new) { \ + extension->repeated_##LOWERCASE##_value = \ + new REPEATED_TYPE; \ + } \ + extension->repeated_##LOWERCASE##_value->MergeFrom( \ + *other_extension.repeated_##LOWERCASE##_value); \ + break; + + HANDLE_TYPE( INT32, int32, RepeatedField < int32>); + HANDLE_TYPE( INT64, int64, RepeatedField < int64>); + HANDLE_TYPE( UINT32, uint32, RepeatedField < uint32>); + HANDLE_TYPE( UINT64, uint64, RepeatedField < uint64>); + HANDLE_TYPE( FLOAT, float, RepeatedField < float>); + HANDLE_TYPE( DOUBLE, double, RepeatedField < double>); + HANDLE_TYPE( BOOL, bool, RepeatedField < bool>); + HANDLE_TYPE( ENUM, enum, RepeatedField < int>); + HANDLE_TYPE( STRING, string, RepeatedPtrField< string>); +#undef HANDLE_TYPE + + case WireFormatLite::CPPTYPE_MESSAGE: + if (is_new) { + extension->repeated_message_value = + new RepeatedPtrField(); + } + // We can't call RepeatedPtrField::MergeFrom() because + // it would attempt to allocate new objects. + RepeatedPtrField* other_repeated_message = + other_extension.repeated_message_value; + for (int i = 0; i < other_repeated_message->size(); i++) { + const MessageLite& other_message = other_repeated_message->Get(i); + MessageLite* target = extension->repeated_message_value + ->AddFromCleared >(); + if (target == NULL) { + target = other_message.New(); + extension->repeated_message_value->AddAllocated(target); + } + target->CheckTypeAndMergeFrom(other_message); + } + break; + } + } else { + if (!other_extension.is_cleared) { + switch (cpp_type(other_extension.type)) { +#define HANDLE_TYPE(UPPERCASE, LOWERCASE, CAMELCASE) \ + case WireFormatLite::CPPTYPE_##UPPERCASE: \ + Set##CAMELCASE(iter->first, other_extension.type, \ + other_extension.LOWERCASE##_value, \ + other_extension.descriptor); \ + break; + + HANDLE_TYPE( INT32, int32, Int32); + HANDLE_TYPE( INT64, int64, Int64); + HANDLE_TYPE(UINT32, uint32, UInt32); + HANDLE_TYPE(UINT64, uint64, UInt64); + HANDLE_TYPE( FLOAT, float, Float); + HANDLE_TYPE(DOUBLE, double, Double); + HANDLE_TYPE( BOOL, bool, Bool); + HANDLE_TYPE( ENUM, enum, Enum); +#undef HANDLE_TYPE + case WireFormatLite::CPPTYPE_STRING: + SetString(iter->first, other_extension.type, + *other_extension.string_value, + other_extension.descriptor); + break; + case WireFormatLite::CPPTYPE_MESSAGE: + MutableMessage(iter->first, other_extension.type, + *other_extension.message_value, + other_extension.descriptor) + ->CheckTypeAndMergeFrom(*other_extension.message_value); + break; + } + } + } + } +} + +void ExtensionSet::Swap(ExtensionSet* x) { + extensions_.swap(x->extensions_); +} + +bool ExtensionSet::IsInitialized() const { + // Extensions are never required. However, we need to check that all + // embedded messages are initialized. + for (map::const_iterator iter = extensions_.begin(); + iter != extensions_.end(); ++iter) { + const Extension& extension = iter->second; + if (cpp_type(extension.type) == WireFormatLite::CPPTYPE_MESSAGE) { + if (extension.is_repeated) { + for (int i = 0; i < extension.repeated_message_value->size(); i++) { + if (!extension.repeated_message_value->Get(i).IsInitialized()) { + return false; + } + } + } else { + if (!extension.is_cleared) { + if (!extension.message_value->IsInitialized()) return false; + } + } + } + } + + return true; +} + +bool ExtensionSet::ParseField(uint32 tag, io::CodedInputStream* input, + ExtensionFinder* extension_finder, + FieldSkipper* field_skipper) { + int number = WireFormatLite::GetTagFieldNumber(tag); + WireFormatLite::WireType wire_type = WireFormatLite::GetTagWireType(tag); + + ExtensionInfo extension; + bool is_unknown; + if (!extension_finder->Find(number, &extension)) { + is_unknown = true; + } else if (extension.is_packed) { + is_unknown = (wire_type != WireFormatLite::WIRETYPE_LENGTH_DELIMITED); + } else { + WireFormatLite::WireType expected_wire_type = + WireFormatLite::WireTypeForFieldType(real_type(extension.type)); + is_unknown = (wire_type != expected_wire_type); + } + + if (is_unknown) { + field_skipper->SkipField(input, tag); + } else if (extension.is_packed) { + uint32 size; + if (!input->ReadVarint32(&size)) return false; + io::CodedInputStream::Limit limit = input->PushLimit(size); + + switch (extension.type) { +#define HANDLE_TYPE(UPPERCASE, CPP_CAMELCASE, CPP_LOWERCASE) \ + case WireFormatLite::TYPE_##UPPERCASE: \ + while (input->BytesUntilLimit() > 0) { \ + CPP_LOWERCASE value; \ + if (!WireFormatLite::ReadPrimitive< \ + CPP_LOWERCASE, WireFormatLite::TYPE_##UPPERCASE>( \ + input, &value)) return false; \ + Add##CPP_CAMELCASE(number, WireFormatLite::TYPE_##UPPERCASE, \ + true, value, extension.descriptor); \ + } \ + break + + HANDLE_TYPE( INT32, Int32, int32); + HANDLE_TYPE( INT64, Int64, int64); + HANDLE_TYPE( UINT32, UInt32, uint32); + HANDLE_TYPE( UINT64, UInt64, uint64); + HANDLE_TYPE( SINT32, Int32, int32); + HANDLE_TYPE( SINT64, Int64, int64); + HANDLE_TYPE( FIXED32, UInt32, uint32); + HANDLE_TYPE( FIXED64, UInt64, uint64); + HANDLE_TYPE(SFIXED32, Int32, int32); + HANDLE_TYPE(SFIXED64, Int64, int64); + HANDLE_TYPE( FLOAT, Float, float); + HANDLE_TYPE( DOUBLE, Double, double); + HANDLE_TYPE( BOOL, Bool, bool); +#undef HANDLE_TYPE + + case WireFormatLite::TYPE_ENUM: + while (input->BytesUntilLimit() > 0) { + int value; + if (!WireFormatLite::ReadPrimitive( + input, &value)) return false; + if (extension.enum_validity_check.func( + extension.enum_validity_check.arg, value)) { + AddEnum(number, WireFormatLite::TYPE_ENUM, true, value, + extension.descriptor); + } + } + break; + + case WireFormatLite::TYPE_STRING: + case WireFormatLite::TYPE_BYTES: + case WireFormatLite::TYPE_GROUP: + case WireFormatLite::TYPE_MESSAGE: + GOOGLE_LOG(FATAL) << "Non-primitive types can't be packed."; + break; + } + + input->PopLimit(limit); + } else { + switch (extension.type) { +#define HANDLE_TYPE(UPPERCASE, CPP_CAMELCASE, CPP_LOWERCASE) \ + case WireFormatLite::TYPE_##UPPERCASE: { \ + CPP_LOWERCASE value; \ + if (!WireFormatLite::ReadPrimitive< \ + CPP_LOWERCASE, WireFormatLite::TYPE_##UPPERCASE>( \ + input, &value)) return false; \ + if (extension.is_repeated) { \ + Add##CPP_CAMELCASE(number, WireFormatLite::TYPE_##UPPERCASE, \ + false, value, extension.descriptor); \ + } else { \ + Set##CPP_CAMELCASE(number, WireFormatLite::TYPE_##UPPERCASE, value, \ + extension.descriptor); \ + } \ + } break + + HANDLE_TYPE( INT32, Int32, int32); + HANDLE_TYPE( INT64, Int64, int64); + HANDLE_TYPE( UINT32, UInt32, uint32); + HANDLE_TYPE( UINT64, UInt64, uint64); + HANDLE_TYPE( SINT32, Int32, int32); + HANDLE_TYPE( SINT64, Int64, int64); + HANDLE_TYPE( FIXED32, UInt32, uint32); + HANDLE_TYPE( FIXED64, UInt64, uint64); + HANDLE_TYPE(SFIXED32, Int32, int32); + HANDLE_TYPE(SFIXED64, Int64, int64); + HANDLE_TYPE( FLOAT, Float, float); + HANDLE_TYPE( DOUBLE, Double, double); + HANDLE_TYPE( BOOL, Bool, bool); +#undef HANDLE_TYPE + + case WireFormatLite::TYPE_ENUM: { + int value; + if (!WireFormatLite::ReadPrimitive( + input, &value)) return false; + + if (!extension.enum_validity_check.func( + extension.enum_validity_check.arg, value)) { + // Invalid value. Treat as unknown. + field_skipper->SkipUnknownEnum(number, value); + } else if (extension.is_repeated) { + AddEnum(number, WireFormatLite::TYPE_ENUM, false, value, + extension.descriptor); + } else { + SetEnum(number, WireFormatLite::TYPE_ENUM, value, + extension.descriptor); + } + break; + } + + case WireFormatLite::TYPE_STRING: { + string* value = extension.is_repeated ? + AddString(number, WireFormatLite::TYPE_STRING, extension.descriptor) : + MutableString(number, WireFormatLite::TYPE_STRING, + extension.descriptor); + if (!WireFormatLite::ReadString(input, value)) return false; + break; + } + + case WireFormatLite::TYPE_BYTES: { + string* value = extension.is_repeated ? + AddString(number, WireFormatLite::TYPE_STRING, extension.descriptor) : + MutableString(number, WireFormatLite::TYPE_STRING, + extension.descriptor); + if (!WireFormatLite::ReadBytes(input, value)) return false; + break; + } + + case WireFormatLite::TYPE_GROUP: { + MessageLite* value = extension.is_repeated ? + AddMessage(number, WireFormatLite::TYPE_GROUP, + *extension.message_prototype, extension.descriptor) : + MutableMessage(number, WireFormatLite::TYPE_GROUP, + *extension.message_prototype, extension.descriptor); + if (!WireFormatLite::ReadGroup(number, input, value)) return false; + break; + } + + case WireFormatLite::TYPE_MESSAGE: { + MessageLite* value = extension.is_repeated ? + AddMessage(number, WireFormatLite::TYPE_MESSAGE, + *extension.message_prototype, extension.descriptor) : + MutableMessage(number, WireFormatLite::TYPE_MESSAGE, + *extension.message_prototype, extension.descriptor); + if (!WireFormatLite::ReadMessage(input, value)) return false; + break; + } + } + } + + return true; +} + +bool ExtensionSet::ParseField(uint32 tag, io::CodedInputStream* input, + const MessageLite* containing_type) { + FieldSkipper skipper; + GeneratedExtensionFinder finder(containing_type); + return ParseField(tag, input, &finder, &skipper); +} + +// Defined in extension_set_heavy.cc. +// bool ExtensionSet::ParseField(uint32 tag, io::CodedInputStream* input, +// const MessageLite* containing_type, +// UnknownFieldSet* unknown_fields) + +bool ExtensionSet::ParseMessageSet(io::CodedInputStream* input, + ExtensionFinder* extension_finder, + FieldSkipper* field_skipper) { + while (true) { + uint32 tag = input->ReadTag(); + switch (tag) { + case 0: + return true; + case WireFormatLite::kMessageSetItemStartTag: + if (!ParseMessageSetItem(input, extension_finder, field_skipper)) { + return false; + } + break; + default: + if (!ParseField(tag, input, extension_finder, field_skipper)) { + return false; + } + break; + } + } +} + +bool ExtensionSet::ParseMessageSet(io::CodedInputStream* input, + const MessageLite* containing_type) { + FieldSkipper skipper; + GeneratedExtensionFinder finder(containing_type); + return ParseMessageSet(input, &finder, &skipper); +} + +// Defined in extension_set_heavy.cc. +// bool ExtensionSet::ParseMessageSet(io::CodedInputStream* input, +// const MessageLite* containing_type, +// UnknownFieldSet* unknown_fields); + +bool ExtensionSet::ParseMessageSetItem(io::CodedInputStream* input, + ExtensionFinder* extension_finder, + FieldSkipper* field_skipper) { + // TODO(kenton): It would be nice to share code between this and + // WireFormatLite::ParseAndMergeMessageSetItem(), but I think the + // differences would be hard to factor out. + + // This method parses a group which should contain two fields: + // required int32 type_id = 2; + // required data message = 3; + + // Once we see a type_id, we'll construct a fake tag for this extension + // which is the tag it would have had under the proto2 extensions wire + // format. + uint32 fake_tag = 0; + + // If we see message data before the type_id, we'll append it to this so + // we can parse it later. This will probably never happen in practice, + // as no MessageSet encoder I know of writes the message before the type ID. + // But, it's technically valid so we should allow it. + // TODO(kenton): Use a Cord instead? Do I care? + string message_data; + + while (true) { + uint32 tag = input->ReadTag(); + if (tag == 0) return false; + + switch (tag) { + case WireFormatLite::kMessageSetTypeIdTag: { + uint32 type_id; + if (!input->ReadVarint32(&type_id)) return false; + fake_tag = WireFormatLite::MakeTag(type_id, + WireFormatLite::WIRETYPE_LENGTH_DELIMITED); + + if (!message_data.empty()) { + // We saw some message data before the type_id. Have to parse it + // now. + io::CodedInputStream sub_input( + reinterpret_cast(message_data.data()), + message_data.size()); + if (!ParseField(fake_tag, &sub_input, + extension_finder, field_skipper)) { + return false; + } + message_data.clear(); + } + + break; + } + + case WireFormatLite::kMessageSetMessageTag: { + if (fake_tag == 0) { + // We haven't seen a type_id yet. Append this data to message_data. + string temp; + uint32 length; + if (!input->ReadVarint32(&length)) return false; + if (!input->ReadString(&temp, length)) return false; + message_data.append(temp); + } else { + // Already saw type_id, so we can parse this directly. + if (!ParseField(fake_tag, input, + extension_finder, field_skipper)) { + return false; + } + } + + break; + } + + case WireFormatLite::kMessageSetItemEndTag: { + return true; + } + + default: { + if (!field_skipper->SkipField(input, tag)) return false; + } + } + } +} + +void ExtensionSet::SerializeWithCachedSizes( + int start_field_number, int end_field_number, + io::CodedOutputStream* output) const { + map::const_iterator iter; + for (iter = extensions_.lower_bound(start_field_number); + iter != extensions_.end() && iter->first < end_field_number; + ++iter) { + iter->second.SerializeFieldWithCachedSizes(iter->first, output); + } +} + +void ExtensionSet::SerializeMessageSetWithCachedSizes( + io::CodedOutputStream* output) const { + map::const_iterator iter; + for (iter = extensions_.begin(); iter != extensions_.end(); ++iter) { + iter->second.SerializeMessageSetItemWithCachedSizes(iter->first, output); + } +} + +int ExtensionSet::ByteSize() const { + int total_size = 0; + + for (map::const_iterator iter = extensions_.begin(); + iter != extensions_.end(); ++iter) { + total_size += iter->second.ByteSize(iter->first); + } + + return total_size; +} + +int ExtensionSet::MessageSetByteSize() const { + int total_size = 0; + + for (map::const_iterator iter = extensions_.begin(); + iter != extensions_.end(); ++iter) { + total_size += iter->second.MessageSetItemByteSize(iter->first); + } + + return total_size; +} + +// Defined in extension_set_heavy.cc. +// int ExtensionSet::SpaceUsedExcludingSelf() const + +bool ExtensionSet::MaybeNewExtension(int number, + const FieldDescriptor* descriptor, + Extension** result) { + pair::iterator, bool> insert_result = + extensions_.insert(make_pair(number, Extension())); + *result = &insert_result.first->second; + (*result)->descriptor = descriptor; + return insert_result.second; +} + +// =================================================================== +// Methods of ExtensionSet::Extension + +void ExtensionSet::Extension::Clear() { + if (is_repeated) { + switch (cpp_type(type)) { +#define HANDLE_TYPE(UPPERCASE, LOWERCASE) \ + case WireFormatLite::CPPTYPE_##UPPERCASE: \ + repeated_##LOWERCASE##_value->Clear(); \ + break + + HANDLE_TYPE( INT32, int32); + HANDLE_TYPE( INT64, int64); + HANDLE_TYPE( UINT32, uint32); + HANDLE_TYPE( UINT64, uint64); + HANDLE_TYPE( FLOAT, float); + HANDLE_TYPE( DOUBLE, double); + HANDLE_TYPE( BOOL, bool); + HANDLE_TYPE( ENUM, enum); + HANDLE_TYPE( STRING, string); + HANDLE_TYPE(MESSAGE, message); +#undef HANDLE_TYPE + } + } else { + if (!is_cleared) { + switch (cpp_type(type)) { + case WireFormatLite::CPPTYPE_STRING: + string_value->clear(); + break; + case WireFormatLite::CPPTYPE_MESSAGE: + message_value->Clear(); + break; + default: + // No need to do anything. Get*() will return the default value + // as long as is_cleared is true and Set*() will overwrite the + // previous value. + break; + } + + is_cleared = true; + } + } +} + +void ExtensionSet::Extension::SerializeFieldWithCachedSizes( + int number, + io::CodedOutputStream* output) const { + if (is_repeated) { + if (is_packed) { + if (cached_size == 0) return; + + WireFormatLite::WriteTag(number, + WireFormatLite::WIRETYPE_LENGTH_DELIMITED, output); + output->WriteVarint32(cached_size); + + switch (real_type(type)) { +#define HANDLE_TYPE(UPPERCASE, CAMELCASE, LOWERCASE) \ + case WireFormatLite::TYPE_##UPPERCASE: \ + for (int i = 0; i < repeated_##LOWERCASE##_value->size(); i++) { \ + WireFormatLite::Write##CAMELCASE##NoTag( \ + repeated_##LOWERCASE##_value->Get(i), output); \ + } \ + break + + HANDLE_TYPE( INT32, Int32, int32); + HANDLE_TYPE( INT64, Int64, int64); + HANDLE_TYPE( UINT32, UInt32, uint32); + HANDLE_TYPE( UINT64, UInt64, uint64); + HANDLE_TYPE( SINT32, SInt32, int32); + HANDLE_TYPE( SINT64, SInt64, int64); + HANDLE_TYPE( FIXED32, Fixed32, uint32); + HANDLE_TYPE( FIXED64, Fixed64, uint64); + HANDLE_TYPE(SFIXED32, SFixed32, int32); + HANDLE_TYPE(SFIXED64, SFixed64, int64); + HANDLE_TYPE( FLOAT, Float, float); + HANDLE_TYPE( DOUBLE, Double, double); + HANDLE_TYPE( BOOL, Bool, bool); + HANDLE_TYPE( ENUM, Enum, enum); +#undef HANDLE_TYPE + + case WireFormatLite::TYPE_STRING: + case WireFormatLite::TYPE_BYTES: + case WireFormatLite::TYPE_GROUP: + case WireFormatLite::TYPE_MESSAGE: + GOOGLE_LOG(FATAL) << "Non-primitive types can't be packed."; + break; + } + } else { + switch (real_type(type)) { +#define HANDLE_TYPE(UPPERCASE, CAMELCASE, LOWERCASE) \ + case WireFormatLite::TYPE_##UPPERCASE: \ + for (int i = 0; i < repeated_##LOWERCASE##_value->size(); i++) { \ + WireFormatLite::Write##CAMELCASE(number, \ + repeated_##LOWERCASE##_value->Get(i), output); \ + } \ + break + + HANDLE_TYPE( INT32, Int32, int32); + HANDLE_TYPE( INT64, Int64, int64); + HANDLE_TYPE( UINT32, UInt32, uint32); + HANDLE_TYPE( UINT64, UInt64, uint64); + HANDLE_TYPE( SINT32, SInt32, int32); + HANDLE_TYPE( SINT64, SInt64, int64); + HANDLE_TYPE( FIXED32, Fixed32, uint32); + HANDLE_TYPE( FIXED64, Fixed64, uint64); + HANDLE_TYPE(SFIXED32, SFixed32, int32); + HANDLE_TYPE(SFIXED64, SFixed64, int64); + HANDLE_TYPE( FLOAT, Float, float); + HANDLE_TYPE( DOUBLE, Double, double); + HANDLE_TYPE( BOOL, Bool, bool); + HANDLE_TYPE( STRING, String, string); + HANDLE_TYPE( BYTES, Bytes, string); + HANDLE_TYPE( ENUM, Enum, enum); + HANDLE_TYPE( GROUP, Group, message); + HANDLE_TYPE( MESSAGE, Message, message); +#undef HANDLE_TYPE + } + } + } else if (!is_cleared) { + switch (real_type(type)) { +#define HANDLE_TYPE(UPPERCASE, CAMELCASE, VALUE) \ + case WireFormatLite::TYPE_##UPPERCASE: \ + WireFormatLite::Write##CAMELCASE(number, VALUE, output); \ + break + + HANDLE_TYPE( INT32, Int32, int32_value); + HANDLE_TYPE( INT64, Int64, int64_value); + HANDLE_TYPE( UINT32, UInt32, uint32_value); + HANDLE_TYPE( UINT64, UInt64, uint64_value); + HANDLE_TYPE( SINT32, SInt32, int32_value); + HANDLE_TYPE( SINT64, SInt64, int64_value); + HANDLE_TYPE( FIXED32, Fixed32, uint32_value); + HANDLE_TYPE( FIXED64, Fixed64, uint64_value); + HANDLE_TYPE(SFIXED32, SFixed32, int32_value); + HANDLE_TYPE(SFIXED64, SFixed64, int64_value); + HANDLE_TYPE( FLOAT, Float, float_value); + HANDLE_TYPE( DOUBLE, Double, double_value); + HANDLE_TYPE( BOOL, Bool, bool_value); + HANDLE_TYPE( STRING, String, *string_value); + HANDLE_TYPE( BYTES, Bytes, *string_value); + HANDLE_TYPE( ENUM, Enum, enum_value); + HANDLE_TYPE( GROUP, Group, *message_value); + HANDLE_TYPE( MESSAGE, Message, *message_value); +#undef HANDLE_TYPE + } + } +} + +void ExtensionSet::Extension::SerializeMessageSetItemWithCachedSizes( + int number, + io::CodedOutputStream* output) const { + if (type != WireFormatLite::TYPE_MESSAGE || is_repeated) { + // Not a valid MessageSet extension, but serialize it the normal way. + SerializeFieldWithCachedSizes(number, output); + return; + } + + if (is_cleared) return; + + // Start group. + output->WriteTag(WireFormatLite::kMessageSetItemStartTag); + + // Write type ID. + WireFormatLite::WriteUInt32(WireFormatLite::kMessageSetTypeIdNumber, + number, + output); + // Write message. + WireFormatLite::WriteMessageMaybeToArray( + WireFormatLite::kMessageSetMessageNumber, + *message_value, + output); + + // End group. + output->WriteTag(WireFormatLite::kMessageSetItemEndTag); +} + +int ExtensionSet::Extension::ByteSize(int number) const { + int result = 0; + + if (is_repeated) { + if (is_packed) { + switch (real_type(type)) { +#define HANDLE_TYPE(UPPERCASE, CAMELCASE, LOWERCASE) \ + case WireFormatLite::TYPE_##UPPERCASE: \ + for (int i = 0; i < repeated_##LOWERCASE##_value->size(); i++) { \ + result += WireFormatLite::CAMELCASE##Size( \ + repeated_##LOWERCASE##_value->Get(i)); \ + } \ + break + + HANDLE_TYPE( INT32, Int32, int32); + HANDLE_TYPE( INT64, Int64, int64); + HANDLE_TYPE( UINT32, UInt32, uint32); + HANDLE_TYPE( UINT64, UInt64, uint64); + HANDLE_TYPE( SINT32, SInt32, int32); + HANDLE_TYPE( SINT64, SInt64, int64); + HANDLE_TYPE( ENUM, Enum, enum); +#undef HANDLE_TYPE + + // Stuff with fixed size. +#define HANDLE_TYPE(UPPERCASE, CAMELCASE, LOWERCASE) \ + case WireFormatLite::TYPE_##UPPERCASE: \ + result += WireFormatLite::k##CAMELCASE##Size * \ + repeated_##LOWERCASE##_value->size(); \ + break + HANDLE_TYPE( FIXED32, Fixed32, uint32); + HANDLE_TYPE( FIXED64, Fixed64, uint64); + HANDLE_TYPE(SFIXED32, SFixed32, int32); + HANDLE_TYPE(SFIXED64, SFixed64, int64); + HANDLE_TYPE( FLOAT, Float, float); + HANDLE_TYPE( DOUBLE, Double, double); + HANDLE_TYPE( BOOL, Bool, bool); +#undef HANDLE_TYPE + + case WireFormatLite::TYPE_STRING: + case WireFormatLite::TYPE_BYTES: + case WireFormatLite::TYPE_GROUP: + case WireFormatLite::TYPE_MESSAGE: + GOOGLE_LOG(FATAL) << "Non-primitive types can't be packed."; + break; + } + + cached_size = result; + if (result > 0) { + result += io::CodedOutputStream::VarintSize32(result); + result += io::CodedOutputStream::VarintSize32( + WireFormatLite::MakeTag(number, + WireFormatLite::WIRETYPE_LENGTH_DELIMITED)); + } + } else { + int tag_size = WireFormatLite::TagSize(number, real_type(type)); + + switch (real_type(type)) { +#define HANDLE_TYPE(UPPERCASE, CAMELCASE, LOWERCASE) \ + case WireFormatLite::TYPE_##UPPERCASE: \ + result += tag_size * repeated_##LOWERCASE##_value->size(); \ + for (int i = 0; i < repeated_##LOWERCASE##_value->size(); i++) { \ + result += WireFormatLite::CAMELCASE##Size( \ + repeated_##LOWERCASE##_value->Get(i)); \ + } \ + break + + HANDLE_TYPE( INT32, Int32, int32); + HANDLE_TYPE( INT64, Int64, int64); + HANDLE_TYPE( UINT32, UInt32, uint32); + HANDLE_TYPE( UINT64, UInt64, uint64); + HANDLE_TYPE( SINT32, SInt32, int32); + HANDLE_TYPE( SINT64, SInt64, int64); + HANDLE_TYPE( STRING, String, string); + HANDLE_TYPE( BYTES, Bytes, string); + HANDLE_TYPE( ENUM, Enum, enum); + HANDLE_TYPE( GROUP, Group, message); + HANDLE_TYPE( MESSAGE, Message, message); +#undef HANDLE_TYPE + + // Stuff with fixed size. +#define HANDLE_TYPE(UPPERCASE, CAMELCASE, LOWERCASE) \ + case WireFormatLite::TYPE_##UPPERCASE: \ + result += (tag_size + WireFormatLite::k##CAMELCASE##Size) * \ + repeated_##LOWERCASE##_value->size(); \ + break + HANDLE_TYPE( FIXED32, Fixed32, uint32); + HANDLE_TYPE( FIXED64, Fixed64, uint64); + HANDLE_TYPE(SFIXED32, SFixed32, int32); + HANDLE_TYPE(SFIXED64, SFixed64, int64); + HANDLE_TYPE( FLOAT, Float, float); + HANDLE_TYPE( DOUBLE, Double, double); + HANDLE_TYPE( BOOL, Bool, bool); +#undef HANDLE_TYPE + } + } + } else if (!is_cleared) { + result += WireFormatLite::TagSize(number, real_type(type)); + switch (real_type(type)) { +#define HANDLE_TYPE(UPPERCASE, CAMELCASE, LOWERCASE) \ + case WireFormatLite::TYPE_##UPPERCASE: \ + result += WireFormatLite::CAMELCASE##Size(LOWERCASE); \ + break + + HANDLE_TYPE( INT32, Int32, int32_value); + HANDLE_TYPE( INT64, Int64, int64_value); + HANDLE_TYPE( UINT32, UInt32, uint32_value); + HANDLE_TYPE( UINT64, UInt64, uint64_value); + HANDLE_TYPE( SINT32, SInt32, int32_value); + HANDLE_TYPE( SINT64, SInt64, int64_value); + HANDLE_TYPE( STRING, String, *string_value); + HANDLE_TYPE( BYTES, Bytes, *string_value); + HANDLE_TYPE( ENUM, Enum, enum_value); + HANDLE_TYPE( GROUP, Group, *message_value); + HANDLE_TYPE( MESSAGE, Message, *message_value); +#undef HANDLE_TYPE + + // Stuff with fixed size. +#define HANDLE_TYPE(UPPERCASE, CAMELCASE) \ + case WireFormatLite::TYPE_##UPPERCASE: \ + result += WireFormatLite::k##CAMELCASE##Size; \ + break + HANDLE_TYPE( FIXED32, Fixed32); + HANDLE_TYPE( FIXED64, Fixed64); + HANDLE_TYPE(SFIXED32, SFixed32); + HANDLE_TYPE(SFIXED64, SFixed64); + HANDLE_TYPE( FLOAT, Float); + HANDLE_TYPE( DOUBLE, Double); + HANDLE_TYPE( BOOL, Bool); +#undef HANDLE_TYPE + } + } + + return result; +} + +int ExtensionSet::Extension::MessageSetItemByteSize(int number) const { + if (type != WireFormatLite::TYPE_MESSAGE || is_repeated) { + // Not a valid MessageSet extension, but compute the byte size for it the + // normal way. + return ByteSize(number); + } + + if (is_cleared) return 0; + + int our_size = WireFormatLite::kMessageSetItemTagsSize; + + // type_id + our_size += io::CodedOutputStream::VarintSize32(number); + + // message + int message_size = message_value->ByteSize(); + + our_size += io::CodedOutputStream::VarintSize32(message_size); + our_size += message_size; + + return our_size; +} + +int ExtensionSet::Extension::GetSize() const { + GOOGLE_DCHECK(is_repeated); + switch (cpp_type(type)) { +#define HANDLE_TYPE(UPPERCASE, LOWERCASE) \ + case WireFormatLite::CPPTYPE_##UPPERCASE: \ + return repeated_##LOWERCASE##_value->size() + + HANDLE_TYPE( INT32, int32); + HANDLE_TYPE( INT64, int64); + HANDLE_TYPE( UINT32, uint32); + HANDLE_TYPE( UINT64, uint64); + HANDLE_TYPE( FLOAT, float); + HANDLE_TYPE( DOUBLE, double); + HANDLE_TYPE( BOOL, bool); + HANDLE_TYPE( ENUM, enum); + HANDLE_TYPE( STRING, string); + HANDLE_TYPE(MESSAGE, message); +#undef HANDLE_TYPE + } + + GOOGLE_LOG(FATAL) << "Can't get here."; + return 0; +} + +void ExtensionSet::Extension::Free() { + if (is_repeated) { + switch (cpp_type(type)) { +#define HANDLE_TYPE(UPPERCASE, LOWERCASE) \ + case WireFormatLite::CPPTYPE_##UPPERCASE: \ + delete repeated_##LOWERCASE##_value; \ + break + + HANDLE_TYPE( INT32, int32); + HANDLE_TYPE( INT64, int64); + HANDLE_TYPE( UINT32, uint32); + HANDLE_TYPE( UINT64, uint64); + HANDLE_TYPE( FLOAT, float); + HANDLE_TYPE( DOUBLE, double); + HANDLE_TYPE( BOOL, bool); + HANDLE_TYPE( ENUM, enum); + HANDLE_TYPE( STRING, string); + HANDLE_TYPE(MESSAGE, message); +#undef HANDLE_TYPE + } + } else { + switch (cpp_type(type)) { + case WireFormatLite::CPPTYPE_STRING: + delete string_value; + break; + case WireFormatLite::CPPTYPE_MESSAGE: + delete message_value; + break; + default: + break; + } + } +} + +// Defined in extension_set_heavy.cc. +// int ExtensionSet::Extension::SpaceUsedExcludingSelf() const + +} // namespace internal +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/extension_set.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/extension_set.h new file mode 100644 index 0000000000..ac1ada029f --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/extension_set.h @@ -0,0 +1,904 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// This header is logically internal, but is made public because it is used +// from protocol-compiler-generated code, which may reside in other components. + +#ifndef GOOGLE_PROTOBUF_EXTENSION_SET_H__ +#define GOOGLE_PROTOBUF_EXTENSION_SET_H__ + +#include +#include +#include +#include + + +#include + +namespace google { + +namespace protobuf { + class Descriptor; // descriptor.h + class FieldDescriptor; // descriptor.h + class DescriptorPool; // descriptor.h + class MessageLite; // message_lite.h + class Message; // message.h + class MessageFactory; // message.h + class UnknownFieldSet; // unknown_field_set.h + namespace io { + class CodedInputStream; // coded_stream.h + class CodedOutputStream; // coded_stream.h + } + namespace internal { + class FieldSkipper; // wire_format_lite.h + class RepeatedPtrFieldBase; // repeated_field.h + } + template class RepeatedField; // repeated_field.h + template class RepeatedPtrField; // repeated_field.h +} + +namespace protobuf { +namespace internal { + +// Used to store values of type WireFormatLite::FieldType without having to +// #include wire_format_lite.h. Also, ensures that we use only one byte to +// store these values, which is important to keep the layout of +// ExtensionSet::Extension small. +typedef uint8 FieldType; + +// A function which, given an integer value, returns true if the number +// matches one of the defined values for the corresponding enum type. This +// is used with RegisterEnumExtension, below. +typedef bool EnumValidityFunc(int number); + +// Version of the above which takes an argument. This is needed to deal with +// extensions that are not compiled in. +typedef bool EnumValidityFuncWithArg(const void* arg, int number); + +// Information about a registered extension. +struct ExtensionInfo { + inline ExtensionInfo() {} + inline ExtensionInfo(FieldType type, bool is_repeated, bool is_packed) + : type(type), is_repeated(is_repeated), is_packed(is_packed), + descriptor(NULL) {} + + FieldType type; + bool is_repeated; + bool is_packed; + + struct EnumValidityCheck { + EnumValidityFuncWithArg* func; + const void* arg; + }; + + union { + EnumValidityCheck enum_validity_check; + const MessageLite* message_prototype; + }; + + // The descriptor for this extension, if one exists and is known. May be + // NULL. Must not be NULL if the descriptor for the extension does not + // live in the same pool as the descriptor for the containing type. + const FieldDescriptor* descriptor; +}; + +// Abstract interface for an object which looks up extension definitions. Used +// when parsing. +class LIBPROTOBUF_EXPORT ExtensionFinder { + public: + virtual ~ExtensionFinder(); + + // Find the extension with the given containing type and number. + virtual bool Find(int number, ExtensionInfo* output) = 0; +}; + +// Implementation of ExtensionFinder which finds extensions defined in .proto +// files which have been compiled into the binary. +class LIBPROTOBUF_EXPORT GeneratedExtensionFinder : public ExtensionFinder { + public: + GeneratedExtensionFinder(const MessageLite* containing_type) + : containing_type_(containing_type) {} + virtual ~GeneratedExtensionFinder() {} + + // Returns true and fills in *output if found, otherwise returns false. + virtual bool Find(int number, ExtensionInfo* output); + + private: + const MessageLite* containing_type_; +}; + +// Note: extension_set_heavy.cc defines DescriptorPoolExtensionFinder for +// finding extensions from a DescriptorPool. + +// This is an internal helper class intended for use within the protocol buffer +// library and generated classes. Clients should not use it directly. Instead, +// use the generated accessors such as GetExtension() of the class being +// extended. +// +// This class manages extensions for a protocol message object. The +// message's HasExtension(), GetExtension(), MutableExtension(), and +// ClearExtension() methods are just thin wrappers around the embedded +// ExtensionSet. When parsing, if a tag number is encountered which is +// inside one of the message type's extension ranges, the tag is passed +// off to the ExtensionSet for parsing. Etc. +class LIBPROTOBUF_EXPORT ExtensionSet { + public: + ExtensionSet(); + ~ExtensionSet(); + + // These are called at startup by protocol-compiler-generated code to + // register known extensions. The registrations are used by ParseField() + // to look up extensions for parsed field numbers. Note that dynamic parsing + // does not use ParseField(); only protocol-compiler-generated parsing + // methods do. + static void RegisterExtension(const MessageLite* containing_type, + int number, FieldType type, + bool is_repeated, bool is_packed); + static void RegisterEnumExtension(const MessageLite* containing_type, + int number, FieldType type, + bool is_repeated, bool is_packed, + EnumValidityFunc* is_valid); + static void RegisterMessageExtension(const MessageLite* containing_type, + int number, FieldType type, + bool is_repeated, bool is_packed, + const MessageLite* prototype); + + // ================================================================= + + // Add all fields which are currently present to the given vector. This + // is useful to implement Reflection::ListFields(). + void AppendToList(const Descriptor* containing_type, + const DescriptorPool* pool, + vector* output) const; + + // ================================================================= + // Accessors + // + // Generated message classes include type-safe templated wrappers around + // these methods. Generally you should use those rather than call these + // directly, unless you are doing low-level memory management. + // + // When calling any of these accessors, the extension number requested + // MUST exist in the DescriptorPool provided to the constructor. Otheriwse, + // the method will fail an assert. Normally, though, you would not call + // these directly; you would either call the generated accessors of your + // message class (e.g. GetExtension()) or you would call the accessors + // of the reflection interface. In both cases, it is impossible to + // trigger this assert failure: the generated accessors only accept + // linked-in extension types as parameters, while the Reflection interface + // requires you to provide the FieldDescriptor describing the extension. + // + // When calling any of these accessors, a protocol-compiler-generated + // implementation of the extension corresponding to the number MUST + // be linked in, and the FieldDescriptor used to refer to it MUST be + // the one generated by that linked-in code. Otherwise, the method will + // die on an assert failure. The message objects returned by the message + // accessors are guaranteed to be of the correct linked-in type. + // + // These methods pretty much match Reflection except that: + // - They're not virtual. + // - They identify fields by number rather than FieldDescriptors. + // - They identify enum values using integers rather than descriptors. + // - Strings provide Mutable() in addition to Set() accessors. + + bool Has(int number) const; + int ExtensionSize(int number) const; // Size of a repeated extension. + FieldType ExtensionType(int number) const; + void ClearExtension(int number); + + // singular fields ------------------------------------------------- + + int32 GetInt32 (int number, int32 default_value) const; + int64 GetInt64 (int number, int64 default_value) const; + uint32 GetUInt32(int number, uint32 default_value) const; + uint64 GetUInt64(int number, uint64 default_value) const; + float GetFloat (int number, float default_value) const; + double GetDouble(int number, double default_value) const; + bool GetBool (int number, bool default_value) const; + int GetEnum (int number, int default_value) const; + const string & GetString (int number, const string& default_value) const; + const MessageLite& GetMessage(int number, + const MessageLite& default_value) const; + const MessageLite& GetMessage(int number, const Descriptor* message_type, + MessageFactory* factory) const; + + // |descriptor| may be NULL so long as it is known that the descriptor for + // the extension lives in the same pool as the descriptor for the containing + // type. +#define desc const FieldDescriptor* descriptor // avoid line wrapping + void SetInt32 (int number, FieldType type, int32 value, desc); + void SetInt64 (int number, FieldType type, int64 value, desc); + void SetUInt32(int number, FieldType type, uint32 value, desc); + void SetUInt64(int number, FieldType type, uint64 value, desc); + void SetFloat (int number, FieldType type, float value, desc); + void SetDouble(int number, FieldType type, double value, desc); + void SetBool (int number, FieldType type, bool value, desc); + void SetEnum (int number, FieldType type, int value, desc); + void SetString(int number, FieldType type, const string& value, desc); + string * MutableString (int number, FieldType type, desc); + MessageLite* MutableMessage(int number, FieldType type, + const MessageLite& prototype, desc); + MessageLite* MutableMessage(const FieldDescriptor* decsriptor, + MessageFactory* factory); +#undef desc + + // repeated fields ------------------------------------------------- + + int32 GetRepeatedInt32 (int number, int index) const; + int64 GetRepeatedInt64 (int number, int index) const; + uint32 GetRepeatedUInt32(int number, int index) const; + uint64 GetRepeatedUInt64(int number, int index) const; + float GetRepeatedFloat (int number, int index) const; + double GetRepeatedDouble(int number, int index) const; + bool GetRepeatedBool (int number, int index) const; + int GetRepeatedEnum (int number, int index) const; + const string & GetRepeatedString (int number, int index) const; + const MessageLite& GetRepeatedMessage(int number, int index) const; + + void SetRepeatedInt32 (int number, int index, int32 value); + void SetRepeatedInt64 (int number, int index, int64 value); + void SetRepeatedUInt32(int number, int index, uint32 value); + void SetRepeatedUInt64(int number, int index, uint64 value); + void SetRepeatedFloat (int number, int index, float value); + void SetRepeatedDouble(int number, int index, double value); + void SetRepeatedBool (int number, int index, bool value); + void SetRepeatedEnum (int number, int index, int value); + void SetRepeatedString(int number, int index, const string& value); + string * MutableRepeatedString (int number, int index); + MessageLite* MutableRepeatedMessage(int number, int index); + +#define desc const FieldDescriptor* descriptor // avoid line wrapping + void AddInt32 (int number, FieldType type, bool packed, int32 value, desc); + void AddInt64 (int number, FieldType type, bool packed, int64 value, desc); + void AddUInt32(int number, FieldType type, bool packed, uint32 value, desc); + void AddUInt64(int number, FieldType type, bool packed, uint64 value, desc); + void AddFloat (int number, FieldType type, bool packed, float value, desc); + void AddDouble(int number, FieldType type, bool packed, double value, desc); + void AddBool (int number, FieldType type, bool packed, bool value, desc); + void AddEnum (int number, FieldType type, bool packed, int value, desc); + void AddString(int number, FieldType type, const string& value, desc); + string * AddString (int number, FieldType type, desc); + MessageLite* AddMessage(int number, FieldType type, + const MessageLite& prototype, desc); + MessageLite* AddMessage(const FieldDescriptor* descriptor, + MessageFactory* factory); +#undef desc + + void RemoveLast(int number); + void SwapElements(int number, int index1, int index2); + + // ----------------------------------------------------------------- + // TODO(kenton): Hardcore memory management accessors + + // ================================================================= + // convenience methods for implementing methods of Message + // + // These could all be implemented in terms of the other methods of this + // class, but providing them here helps keep the generated code size down. + + void Clear(); + void MergeFrom(const ExtensionSet& other); + void Swap(ExtensionSet* other); + bool IsInitialized() const; + + // Parses a single extension from the input. The input should start out + // positioned immediately after the tag. |containing_type| is the default + // instance for the containing message; it is used only to look up the + // extension by number. See RegisterExtension(), above. Unlike the other + // methods of ExtensionSet, this only works for generated message types -- + // it looks up extensions registered using RegisterExtension(). + bool ParseField(uint32 tag, io::CodedInputStream* input, + ExtensionFinder* extension_finder, + FieldSkipper* field_skipper); + + // Specific versions for lite or full messages (constructs the appropriate + // FieldSkipper automatically). + bool ParseField(uint32 tag, io::CodedInputStream* input, + const MessageLite* containing_type); + bool ParseField(uint32 tag, io::CodedInputStream* input, + const Message* containing_type, + UnknownFieldSet* unknown_fields); + + // Parse an entire message in MessageSet format. Such messages have no + // fields, only extensions. + bool ParseMessageSet(io::CodedInputStream* input, + ExtensionFinder* extension_finder, + FieldSkipper* field_skipper); + + // Specific versions for lite or full messages (constructs the appropriate + // FieldSkipper automatically). + bool ParseMessageSet(io::CodedInputStream* input, + const MessageLite* containing_type); + bool ParseMessageSet(io::CodedInputStream* input, + const Message* containing_type, + UnknownFieldSet* unknown_fields); + + // Write all extension fields with field numbers in the range + // [start_field_number, end_field_number) + // to the output stream, using the cached sizes computed when ByteSize() was + // last called. Note that the range bounds are inclusive-exclusive. + void SerializeWithCachedSizes(int start_field_number, + int end_field_number, + io::CodedOutputStream* output) const; + + // Same as SerializeWithCachedSizes, but without any bounds checking. + // The caller must ensure that target has sufficient capacity for the + // serialized extensions. + // + // Returns a pointer past the last written byte. + uint8* SerializeWithCachedSizesToArray(int start_field_number, + int end_field_number, + uint8* target) const; + + // Like above but serializes in MessageSet format. + void SerializeMessageSetWithCachedSizes(io::CodedOutputStream* output) const; + uint8* SerializeMessageSetWithCachedSizesToArray(uint8* target) const; + + // Returns the total serialized size of all the extensions. + int ByteSize() const; + + // Like ByteSize() but uses MessageSet format. + int MessageSetByteSize() const; + + // Returns (an estimate of) the total number of bytes used for storing the + // extensions in memory, excluding sizeof(*this). If the ExtensionSet is + // for a lite message (and thus possibly contains lite messages), the results + // are undefined (might work, might crash, might corrupt data, might not even + // be linked in). It's up to the protocol compiler to avoid calling this on + // such ExtensionSets (easy enough since lite messages don't implement + // SpaceUsed()). + int SpaceUsedExcludingSelf() const; + + private: + + struct Extension { + union { + int32 int32_value; + int64 int64_value; + uint32 uint32_value; + uint64 uint64_value; + float float_value; + double double_value; + bool bool_value; + int enum_value; + string* string_value; + MessageLite* message_value; + + RepeatedField * repeated_int32_value; + RepeatedField * repeated_int64_value; + RepeatedField * repeated_uint32_value; + RepeatedField * repeated_uint64_value; + RepeatedField * repeated_float_value; + RepeatedField * repeated_double_value; + RepeatedField * repeated_bool_value; + RepeatedField * repeated_enum_value; + RepeatedPtrField* repeated_string_value; + RepeatedPtrField* repeated_message_value; + }; + + FieldType type; + bool is_repeated; + + // For singular types, indicates if the extension is "cleared". This + // happens when an extension is set and then later cleared by the caller. + // We want to keep the Extension object around for reuse, so instead of + // removing it from the map, we just set is_cleared = true. This has no + // meaning for repeated types; for those, the size of the RepeatedField + // simply becomes zero when cleared. + bool is_cleared; + + // For repeated types, this indicates if the [packed=true] option is set. + bool is_packed; + + // The descriptor for this extension, if one exists and is known. May be + // NULL. Must not be NULL if the descriptor for the extension does not + // live in the same pool as the descriptor for the containing type. + const FieldDescriptor* descriptor; + + // For packed fields, the size of the packed data is recorded here when + // ByteSize() is called then used during serialization. + // TODO(kenton): Use atomic when C++ supports it. + mutable int cached_size; + + // Some helper methods for operations on a single Extension. + void SerializeFieldWithCachedSizes( + int number, + io::CodedOutputStream* output) const; + uint8* SerializeFieldWithCachedSizesToArray( + int number, + uint8* target) const; + void SerializeMessageSetItemWithCachedSizes( + int number, + io::CodedOutputStream* output) const; + uint8* SerializeMessageSetItemWithCachedSizesToArray( + int number, + uint8* target) const; + int ByteSize(int number) const; + int MessageSetItemByteSize(int number) const; + void Clear(); + int GetSize() const; + void Free(); + int SpaceUsedExcludingSelf() const; + }; + + + // Gets the extension with the given number, creating it if it does not + // already exist. Returns true if the extension did not already exist. + bool MaybeNewExtension(int number, const FieldDescriptor* descriptor, + Extension** result); + + // Parse a single MessageSet item -- called just after the item group start + // tag has been read. + bool ParseMessageSetItem(io::CodedInputStream* input, + ExtensionFinder* extension_finder, + FieldSkipper* field_skipper); + + + // Hack: RepeatedPtrFieldBase declares ExtensionSet as a friend. This + // friendship should automatically extend to ExtensionSet::Extension, but + // unfortunately some older compilers (e.g. GCC 3.4.4) do not implement this + // correctly. So, we must provide helpers for calling methods of that + // class. + + // Defined in extension_set_heavy.cc. + static inline int RepeatedMessage_SpaceUsedExcludingSelf( + RepeatedPtrFieldBase* field); + + // The Extension struct is small enough to be passed by value, so we use it + // directly as the value type in the map rather than use pointers. We use + // a map rather than hash_map here because we expect most ExtensionSets will + // only contain a small number of extensions whereas hash_map is optimized + // for 100 elements or more. Also, we want AppendToList() to order fields + // by field number. + map extensions_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(ExtensionSet); +}; + +// These are just for convenience... +inline void ExtensionSet::SetString(int number, FieldType type, + const string& value, + const FieldDescriptor* descriptor) { + MutableString(number, type, descriptor)->assign(value); +} +inline void ExtensionSet::SetRepeatedString(int number, int index, + const string& value) { + MutableRepeatedString(number, index)->assign(value); +} +inline void ExtensionSet::AddString(int number, FieldType type, + const string& value, + const FieldDescriptor* descriptor) { + AddString(number, type, descriptor)->assign(value); +} + +// =================================================================== +// Glue for generated extension accessors + +// ------------------------------------------------------------------- +// Template magic + +// First we have a set of classes representing "type traits" for different +// field types. A type traits class knows how to implement basic accessors +// for extensions of a particular type given an ExtensionSet. The signature +// for a type traits class looks like this: +// +// class TypeTraits { +// public: +// typedef ? ConstType; +// typedef ? MutableType; +// +// static inline ConstType Get(int number, const ExtensionSet& set); +// static inline void Set(int number, ConstType value, ExtensionSet* set); +// static inline MutableType Mutable(int number, ExtensionSet* set); +// +// // Variants for repeated fields. +// static inline ConstType Get(int number, const ExtensionSet& set, +// int index); +// static inline void Set(int number, int index, +// ConstType value, ExtensionSet* set); +// static inline MutableType Mutable(int number, int index, +// ExtensionSet* set); +// static inline void Add(int number, ConstType value, ExtensionSet* set); +// static inline MutableType Add(int number, ExtensionSet* set); +// }; +// +// Not all of these methods make sense for all field types. For example, the +// "Mutable" methods only make sense for strings and messages, and the +// repeated methods only make sense for repeated types. So, each type +// traits class implements only the set of methods from this signature that it +// actually supports. This will cause a compiler error if the user tries to +// access an extension using a method that doesn't make sense for its type. +// For example, if "foo" is an extension of type "optional int32", then if you +// try to write code like: +// my_message.MutableExtension(foo) +// you will get a compile error because PrimitiveTypeTraits does not +// have a "Mutable()" method. + +// ------------------------------------------------------------------- +// PrimitiveTypeTraits + +// Since the ExtensionSet has different methods for each primitive type, +// we must explicitly define the methods of the type traits class for each +// known type. +template +class PrimitiveTypeTraits { + public: + typedef Type ConstType; + + static inline ConstType Get(int number, const ExtensionSet& set, + ConstType default_value); + static inline void Set(int number, FieldType field_type, + ConstType value, ExtensionSet* set); +}; + +template +class RepeatedPrimitiveTypeTraits { + public: + typedef Type ConstType; + + static inline Type Get(int number, const ExtensionSet& set, int index); + static inline void Set(int number, int index, Type value, ExtensionSet* set); + static inline void Add(int number, FieldType field_type, + bool is_packed, Type value, ExtensionSet* set); +}; + +#define PROTOBUF_DEFINE_PRIMITIVE_TYPE(TYPE, METHOD) \ +template<> inline TYPE PrimitiveTypeTraits::Get( \ + int number, const ExtensionSet& set, TYPE default_value) { \ + return set.Get##METHOD(number, default_value); \ +} \ +template<> inline void PrimitiveTypeTraits::Set( \ + int number, FieldType field_type, TYPE value, ExtensionSet* set) { \ + set->Set##METHOD(number, field_type, value, NULL); \ +} \ + \ +template<> inline TYPE RepeatedPrimitiveTypeTraits::Get( \ + int number, const ExtensionSet& set, int index) { \ + return set.GetRepeated##METHOD(number, index); \ +} \ +template<> inline void RepeatedPrimitiveTypeTraits::Set( \ + int number, int index, TYPE value, ExtensionSet* set) { \ + set->SetRepeated##METHOD(number, index, value); \ +} \ +template<> inline void RepeatedPrimitiveTypeTraits::Add( \ + int number, FieldType field_type, bool is_packed, \ + TYPE value, ExtensionSet* set) { \ + set->Add##METHOD(number, field_type, is_packed, value, NULL); \ +} + +PROTOBUF_DEFINE_PRIMITIVE_TYPE( int32, Int32) +PROTOBUF_DEFINE_PRIMITIVE_TYPE( int64, Int64) +PROTOBUF_DEFINE_PRIMITIVE_TYPE(uint32, UInt32) +PROTOBUF_DEFINE_PRIMITIVE_TYPE(uint64, UInt64) +PROTOBUF_DEFINE_PRIMITIVE_TYPE( float, Float) +PROTOBUF_DEFINE_PRIMITIVE_TYPE(double, Double) +PROTOBUF_DEFINE_PRIMITIVE_TYPE( bool, Bool) + +#undef PROTOBUF_DEFINE_PRIMITIVE_TYPE + +// ------------------------------------------------------------------- +// StringTypeTraits + +// Strings support both Set() and Mutable(). +class LIBPROTOBUF_EXPORT StringTypeTraits { + public: + typedef const string& ConstType; + typedef string* MutableType; + + static inline const string& Get(int number, const ExtensionSet& set, + ConstType default_value) { + return set.GetString(number, default_value); + } + static inline void Set(int number, FieldType field_type, + const string& value, ExtensionSet* set) { + set->SetString(number, field_type, value, NULL); + } + static inline string* Mutable(int number, FieldType field_type, + ExtensionSet* set) { + return set->MutableString(number, field_type, NULL); + } +}; + +class LIBPROTOBUF_EXPORT RepeatedStringTypeTraits { + public: + typedef const string& ConstType; + typedef string* MutableType; + + static inline const string& Get(int number, const ExtensionSet& set, + int index) { + return set.GetRepeatedString(number, index); + } + static inline void Set(int number, int index, + const string& value, ExtensionSet* set) { + set->SetRepeatedString(number, index, value); + } + static inline string* Mutable(int number, int index, ExtensionSet* set) { + return set->MutableRepeatedString(number, index); + } + static inline void Add(int number, FieldType field_type, + bool /*is_packed*/, const string& value, + ExtensionSet* set) { + set->AddString(number, field_type, value, NULL); + } + static inline string* Add(int number, FieldType field_type, + ExtensionSet* set) { + return set->AddString(number, field_type, NULL); + } +}; + +// ------------------------------------------------------------------- +// EnumTypeTraits + +// ExtensionSet represents enums using integers internally, so we have to +// static_cast around. +template +class EnumTypeTraits { + public: + typedef Type ConstType; + + static inline ConstType Get(int number, const ExtensionSet& set, + ConstType default_value) { + return static_cast(set.GetEnum(number, default_value)); + } + static inline void Set(int number, FieldType field_type, + ConstType value, ExtensionSet* set) { + GOOGLE_DCHECK(IsValid(value)); + set->SetEnum(number, field_type, value, NULL); + } +}; + +template +class RepeatedEnumTypeTraits { + public: + typedef Type ConstType; + + static inline ConstType Get(int number, const ExtensionSet& set, int index) { + return static_cast(set.GetRepeatedEnum(number, index)); + } + static inline void Set(int number, int index, + ConstType value, ExtensionSet* set) { + GOOGLE_DCHECK(IsValid(value)); + set->SetRepeatedEnum(number, index, value); + } + static inline void Add(int number, FieldType field_type, + bool is_packed, ConstType value, ExtensionSet* set) { + GOOGLE_DCHECK(IsValid(value)); + set->AddEnum(number, field_type, is_packed, value, NULL); + } +}; + +// ------------------------------------------------------------------- +// MessageTypeTraits + +// ExtensionSet guarantees that when manipulating extensions with message +// types, the implementation used will be the compiled-in class representing +// that type. So, we can static_cast down to the exact type we expect. +template +class MessageTypeTraits { + public: + typedef const Type& ConstType; + typedef Type* MutableType; + + static inline ConstType Get(int number, const ExtensionSet& set, + ConstType default_value) { + return static_cast( + set.GetMessage(number, default_value)); + } + static inline MutableType Mutable(int number, FieldType field_type, + ExtensionSet* set) { + return static_cast( + set->MutableMessage(number, field_type, Type::default_instance(), NULL)); + } +}; + +template +class RepeatedMessageTypeTraits { + public: + typedef const Type& ConstType; + typedef Type* MutableType; + + static inline ConstType Get(int number, const ExtensionSet& set, int index) { + return static_cast(set.GetRepeatedMessage(number, index)); + } + static inline MutableType Mutable(int number, int index, ExtensionSet* set) { + return static_cast(set->MutableRepeatedMessage(number, index)); + } + static inline MutableType Add(int number, FieldType field_type, + ExtensionSet* set) { + return static_cast( + set->AddMessage(number, field_type, Type::default_instance(), NULL)); + } +}; + +// ------------------------------------------------------------------- +// ExtensionIdentifier + +// This is the type of actual extension objects. E.g. if you have: +// extends Foo with optional int32 bar = 1234; +// then "bar" will be defined in C++ as: +// ExtensionIdentifier, 1, false> bar(1234); +// +// Note that we could, in theory, supply the field number as a template +// parameter, and thus make an instance of ExtensionIdentifier have no +// actual contents. However, if we did that, then using at extension +// identifier would not necessarily cause the compiler to output any sort +// of reference to any simple defined in the extension's .pb.o file. Some +// linkers will actually drop object files that are not explicitly referenced, +// but that would be bad because it would cause this extension to not be +// registered at static initialization, and therefore using it would crash. + +template +class ExtensionIdentifier { + public: + typedef TypeTraitsType TypeTraits; + typedef ExtendeeType Extendee; + + ExtensionIdentifier(int number, typename TypeTraits::ConstType default_value) + : number_(number), default_value_(default_value) {} + inline int number() const { return number_; } + typename TypeTraits::ConstType default_value() const { + return default_value_; + } + + private: + const int number_; + typename TypeTraits::ConstType default_value_; +}; + +// ------------------------------------------------------------------- +// Generated accessors + +// This macro should be expanded in the context of a generated type which +// has extensions. +// +// We use "_proto_TypeTraits" as a type name below because "TypeTraits" +// causes problems if the class has a nested message or enum type with that +// name and "_TypeTraits" is technically reserved for the C++ library since +// it starts with an underscore followed by a capital letter. +#define GOOGLE_PROTOBUF_EXTENSION_ACCESSORS(CLASSNAME) \ + /* Has, Size, Clear */ \ + template \ + inline bool HasExtension( \ + const ::google::protobuf::internal::ExtensionIdentifier< \ + CLASSNAME, _proto_TypeTraits, field_type, is_packed>& id) const { \ + return _extensions_.Has(id.number()); \ + } \ + \ + template \ + inline void ClearExtension( \ + const ::google::protobuf::internal::ExtensionIdentifier< \ + CLASSNAME, _proto_TypeTraits, field_type, is_packed>& id) { \ + _extensions_.ClearExtension(id.number()); \ + } \ + \ + template \ + inline int ExtensionSize( \ + const ::google::protobuf::internal::ExtensionIdentifier< \ + CLASSNAME, _proto_TypeTraits, field_type, is_packed>& id) const { \ + return _extensions_.ExtensionSize(id.number()); \ + } \ + \ + /* Singular accessors */ \ + template \ + inline typename _proto_TypeTraits::ConstType GetExtension( \ + const ::google::protobuf::internal::ExtensionIdentifier< \ + CLASSNAME, _proto_TypeTraits, field_type, is_packed>& id) const { \ + return _proto_TypeTraits::Get(id.number(), _extensions_, \ + id.default_value()); \ + } \ + \ + template \ + inline typename _proto_TypeTraits::MutableType MutableExtension( \ + const ::google::protobuf::internal::ExtensionIdentifier< \ + CLASSNAME, _proto_TypeTraits, field_type, is_packed>& id) { \ + return _proto_TypeTraits::Mutable(id.number(), field_type, &_extensions_);\ + } \ + \ + template \ + inline void SetExtension( \ + const ::google::protobuf::internal::ExtensionIdentifier< \ + CLASSNAME, _proto_TypeTraits, field_type, is_packed>& id, \ + typename _proto_TypeTraits::ConstType value) { \ + _proto_TypeTraits::Set(id.number(), field_type, value, &_extensions_); \ + } \ + \ + /* Repeated accessors */ \ + template \ + inline typename _proto_TypeTraits::ConstType GetExtension( \ + const ::google::protobuf::internal::ExtensionIdentifier< \ + CLASSNAME, _proto_TypeTraits, field_type, is_packed>& id, \ + int index) const { \ + return _proto_TypeTraits::Get(id.number(), _extensions_, index); \ + } \ + \ + template \ + inline typename _proto_TypeTraits::MutableType MutableExtension( \ + const ::google::protobuf::internal::ExtensionIdentifier< \ + CLASSNAME, _proto_TypeTraits, field_type, is_packed>& id, \ + int index) { \ + return _proto_TypeTraits::Mutable(id.number(), index, &_extensions_); \ + } \ + \ + template \ + inline void SetExtension( \ + const ::google::protobuf::internal::ExtensionIdentifier< \ + CLASSNAME, _proto_TypeTraits, field_type, is_packed>& id, \ + int index, typename _proto_TypeTraits::ConstType value) { \ + _proto_TypeTraits::Set(id.number(), index, value, &_extensions_); \ + } \ + \ + template \ + inline typename _proto_TypeTraits::MutableType AddExtension( \ + const ::google::protobuf::internal::ExtensionIdentifier< \ + CLASSNAME, _proto_TypeTraits, field_type, is_packed>& id) { \ + return _proto_TypeTraits::Add(id.number(), field_type, &_extensions_); \ + } \ + \ + template \ + inline void AddExtension( \ + const ::google::protobuf::internal::ExtensionIdentifier< \ + CLASSNAME, _proto_TypeTraits, field_type, is_packed>& id, \ + typename _proto_TypeTraits::ConstType value) { \ + _proto_TypeTraits::Add(id.number(), field_type, is_packed, \ + value, &_extensions_); \ + } + +} // namespace internal +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_EXTENSION_SET_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/extension_set_heavy.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/extension_set_heavy.cc new file mode 100644 index 0000000000..2721f15dcd --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/extension_set_heavy.cc @@ -0,0 +1,457 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// Contains methods defined in extension_set.h which cannot be part of the +// lite library because they use descriptors or reflection. + +#include +#include +#include +#include +#include +#include + +namespace google { +namespace protobuf { +namespace internal { + +// Implementation of ExtensionFinder which finds extensions in a given +// DescriptorPool, using the given MessageFactory to construct sub-objects. +// This class is implemented in extension_set_heavy.cc. +class DescriptorPoolExtensionFinder : public ExtensionFinder { + public: + DescriptorPoolExtensionFinder(const DescriptorPool* pool, + MessageFactory* factory, + const Descriptor* containing_type) + : pool_(pool), factory_(factory), containing_type_(containing_type) {} + virtual ~DescriptorPoolExtensionFinder() {} + + virtual bool Find(int number, ExtensionInfo* output); + + private: + const DescriptorPool* pool_; + MessageFactory* factory_; + const Descriptor* containing_type_; +}; + +void ExtensionSet::AppendToList(const Descriptor* containing_type, + const DescriptorPool* pool, + vector* output) const { + for (map::const_iterator iter = extensions_.begin(); + iter != extensions_.end(); ++iter) { + bool has = false; + if (iter->second.is_repeated) { + has = iter->second.GetSize() > 0; + } else { + has = !iter->second.is_cleared; + } + + if (has) { + // TODO(kenton): Looking up each field by number is somewhat unfortunate. + // Is there a better way? The problem is that descriptors are lazily- + // initialized, so they might not even be constructed until + // AppendToList() is called. + + if (iter->second.descriptor == NULL) { + output->push_back(pool->FindExtensionByNumber( + containing_type, iter->first)); + } else { + output->push_back(iter->second.descriptor); + } + } + } +} + +inline FieldDescriptor::Type real_type(FieldType type) { + GOOGLE_DCHECK(type > 0 && type <= FieldDescriptor::MAX_TYPE); + return static_cast(type); +} + +inline FieldDescriptor::CppType cpp_type(FieldType type) { + return FieldDescriptor::TypeToCppType( + static_cast(type)); +} + +#define GOOGLE_DCHECK_TYPE(EXTENSION, LABEL, CPPTYPE) \ + GOOGLE_DCHECK_EQ((EXTENSION).is_repeated ? FieldDescriptor::LABEL_REPEATED \ + : FieldDescriptor::LABEL_OPTIONAL, \ + FieldDescriptor::LABEL_##LABEL); \ + GOOGLE_DCHECK_EQ(cpp_type((EXTENSION).type), FieldDescriptor::CPPTYPE_##CPPTYPE) + +const MessageLite& ExtensionSet::GetMessage(int number, + const Descriptor* message_type, + MessageFactory* factory) const { + map::const_iterator iter = extensions_.find(number); + if (iter == extensions_.end() || iter->second.is_cleared) { + // Not present. Return the default value. + return *factory->GetPrototype(message_type); + } else { + GOOGLE_DCHECK_TYPE(iter->second, OPTIONAL, MESSAGE); + return *iter->second.message_value; + } +} + +MessageLite* ExtensionSet::MutableMessage(const FieldDescriptor* descriptor, + MessageFactory* factory) { + Extension* extension; + if (MaybeNewExtension(descriptor->number(), descriptor, &extension)) { + extension->type = descriptor->type(); + GOOGLE_DCHECK_EQ(cpp_type(extension->type), FieldDescriptor::CPPTYPE_MESSAGE); + extension->is_repeated = false; + extension->is_packed = false; + const MessageLite* prototype = + factory->GetPrototype(descriptor->message_type()); + GOOGLE_CHECK(prototype != NULL); + extension->message_value = prototype->New(); + } else { + GOOGLE_DCHECK_TYPE(*extension, OPTIONAL, MESSAGE); + } + extension->is_cleared = false; + return extension->message_value; +} + +MessageLite* ExtensionSet::AddMessage(const FieldDescriptor* descriptor, + MessageFactory* factory) { + Extension* extension; + if (MaybeNewExtension(descriptor->number(), descriptor, &extension)) { + extension->type = descriptor->type(); + GOOGLE_DCHECK_EQ(cpp_type(extension->type), FieldDescriptor::CPPTYPE_MESSAGE); + extension->is_repeated = true; + extension->repeated_message_value = + new RepeatedPtrField(); + } else { + GOOGLE_DCHECK_TYPE(*extension, REPEATED, MESSAGE); + } + + // RepeatedPtrField does not know how to Add() since it cannot + // allocate an abstract object, so we have to be tricky. + MessageLite* result = extension->repeated_message_value + ->AddFromCleared >(); + if (result == NULL) { + const MessageLite* prototype; + if (extension->repeated_message_value->size() == 0) { + prototype = factory->GetPrototype(descriptor->message_type()); + GOOGLE_CHECK(prototype != NULL); + } else { + prototype = &extension->repeated_message_value->Get(0); + } + result = prototype->New(); + extension->repeated_message_value->AddAllocated(result); + } + return result; +} + +static bool ValidateEnumUsingDescriptor(const void* arg, int number) { + return reinterpret_cast(arg) + ->FindValueByNumber(number) != NULL; +} + +bool DescriptorPoolExtensionFinder::Find(int number, ExtensionInfo* output) { + const FieldDescriptor* extension = + pool_->FindExtensionByNumber(containing_type_, number); + if (extension == NULL) { + return false; + } else { + output->type = extension->type(); + output->is_repeated = extension->is_repeated(); + output->is_packed = extension->options().packed(); + output->descriptor = extension; + if (extension->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) { + output->message_prototype = + factory_->GetPrototype(extension->message_type()); + GOOGLE_CHECK(output->message_prototype != NULL) + << "Extension factory's GetPrototype() returned NULL for extension: " + << extension->full_name(); + } else if (extension->cpp_type() == FieldDescriptor::CPPTYPE_ENUM) { + output->enum_validity_check.func = ValidateEnumUsingDescriptor; + output->enum_validity_check.arg = extension->enum_type(); + } + + return true; + } +} + +bool ExtensionSet::ParseField(uint32 tag, io::CodedInputStream* input, + const Message* containing_type, + UnknownFieldSet* unknown_fields) { + UnknownFieldSetFieldSkipper skipper(unknown_fields); + if (input->GetExtensionPool() == NULL) { + GeneratedExtensionFinder finder(containing_type); + return ParseField(tag, input, &finder, &skipper); + } else { + DescriptorPoolExtensionFinder finder(input->GetExtensionPool(), + input->GetExtensionFactory(), + containing_type->GetDescriptor()); + return ParseField(tag, input, &finder, &skipper); + } +} + +bool ExtensionSet::ParseMessageSet(io::CodedInputStream* input, + const Message* containing_type, + UnknownFieldSet* unknown_fields) { + UnknownFieldSetFieldSkipper skipper(unknown_fields); + if (input->GetExtensionPool() == NULL) { + GeneratedExtensionFinder finder(containing_type); + return ParseMessageSet(input, &finder, &skipper); + } else { + DescriptorPoolExtensionFinder finder(input->GetExtensionPool(), + input->GetExtensionFactory(), + containing_type->GetDescriptor()); + return ParseMessageSet(input, &finder, &skipper); + } +} + +int ExtensionSet::SpaceUsedExcludingSelf() const { + int total_size = + extensions_.size() * sizeof(map::value_type); + for (map::const_iterator iter = extensions_.begin(), + end = extensions_.end(); + iter != end; + ++iter) { + total_size += iter->second.SpaceUsedExcludingSelf(); + } + return total_size; +} + +inline int ExtensionSet::RepeatedMessage_SpaceUsedExcludingSelf( + RepeatedPtrFieldBase* field) { + return field->SpaceUsedExcludingSelf >(); +} + +int ExtensionSet::Extension::SpaceUsedExcludingSelf() const { + int total_size = 0; + if (is_repeated) { + switch (cpp_type(type)) { +#define HANDLE_TYPE(UPPERCASE, LOWERCASE) \ + case FieldDescriptor::CPPTYPE_##UPPERCASE: \ + total_size += sizeof(*repeated_##LOWERCASE##_value) + \ + repeated_##LOWERCASE##_value->SpaceUsedExcludingSelf();\ + break + + HANDLE_TYPE( INT32, int32); + HANDLE_TYPE( INT64, int64); + HANDLE_TYPE( UINT32, uint32); + HANDLE_TYPE( UINT64, uint64); + HANDLE_TYPE( FLOAT, float); + HANDLE_TYPE( DOUBLE, double); + HANDLE_TYPE( BOOL, bool); + HANDLE_TYPE( ENUM, enum); + HANDLE_TYPE( STRING, string); +#undef HANDLE_TYPE + + case FieldDescriptor::CPPTYPE_MESSAGE: + // repeated_message_value is actually a RepeatedPtrField, + // but MessageLite has no SpaceUsed(), so we must directly call + // RepeatedPtrFieldBase::SpaceUsedExcludingSelf() with a different type + // handler. + total_size += sizeof(*repeated_message_value) + + RepeatedMessage_SpaceUsedExcludingSelf(repeated_message_value); + break; + } + } else { + switch (cpp_type(type)) { + case FieldDescriptor::CPPTYPE_STRING: + total_size += sizeof(*string_value) + + StringSpaceUsedExcludingSelf(*string_value); + break; + case FieldDescriptor::CPPTYPE_MESSAGE: + total_size += down_cast(message_value)->SpaceUsed(); + break; + default: + // No extra storage costs for primitive types. + break; + } + } + return total_size; +} + +// The Serialize*ToArray methods are only needed in the heavy library, as +// the lite library only generates SerializeWithCachedSizes. +uint8* ExtensionSet::SerializeWithCachedSizesToArray( + int start_field_number, int end_field_number, + uint8* target) const { + map::const_iterator iter; + for (iter = extensions_.lower_bound(start_field_number); + iter != extensions_.end() && iter->first < end_field_number; + ++iter) { + target = iter->second.SerializeFieldWithCachedSizesToArray(iter->first, + target); + } + return target; +} + +uint8* ExtensionSet::SerializeMessageSetWithCachedSizesToArray( + uint8* target) const { + map::const_iterator iter; + for (iter = extensions_.begin(); iter != extensions_.end(); ++iter) { + target = iter->second.SerializeMessageSetItemWithCachedSizesToArray( + iter->first, target); + } + return target; +} + +uint8* ExtensionSet::Extension::SerializeFieldWithCachedSizesToArray( + int number, uint8* target) const { + if (is_repeated) { + if (is_packed) { + if (cached_size == 0) return target; + + target = WireFormatLite::WriteTagToArray(number, + WireFormatLite::WIRETYPE_LENGTH_DELIMITED, target); + target = WireFormatLite::WriteInt32NoTagToArray(cached_size, target); + + switch (real_type(type)) { +#define HANDLE_TYPE(UPPERCASE, CAMELCASE, LOWERCASE) \ + case FieldDescriptor::TYPE_##UPPERCASE: \ + for (int i = 0; i < repeated_##LOWERCASE##_value->size(); i++) { \ + target = WireFormatLite::Write##CAMELCASE##NoTagToArray( \ + repeated_##LOWERCASE##_value->Get(i), target); \ + } \ + break + + HANDLE_TYPE( INT32, Int32, int32); + HANDLE_TYPE( INT64, Int64, int64); + HANDLE_TYPE( UINT32, UInt32, uint32); + HANDLE_TYPE( UINT64, UInt64, uint64); + HANDLE_TYPE( SINT32, SInt32, int32); + HANDLE_TYPE( SINT64, SInt64, int64); + HANDLE_TYPE( FIXED32, Fixed32, uint32); + HANDLE_TYPE( FIXED64, Fixed64, uint64); + HANDLE_TYPE(SFIXED32, SFixed32, int32); + HANDLE_TYPE(SFIXED64, SFixed64, int64); + HANDLE_TYPE( FLOAT, Float, float); + HANDLE_TYPE( DOUBLE, Double, double); + HANDLE_TYPE( BOOL, Bool, bool); + HANDLE_TYPE( ENUM, Enum, enum); +#undef HANDLE_TYPE + + case WireFormatLite::TYPE_STRING: + case WireFormatLite::TYPE_BYTES: + case WireFormatLite::TYPE_GROUP: + case WireFormatLite::TYPE_MESSAGE: + GOOGLE_LOG(FATAL) << "Non-primitive types can't be packed."; + break; + } + } else { + switch (real_type(type)) { +#define HANDLE_TYPE(UPPERCASE, CAMELCASE, LOWERCASE) \ + case FieldDescriptor::TYPE_##UPPERCASE: \ + for (int i = 0; i < repeated_##LOWERCASE##_value->size(); i++) { \ + target = WireFormatLite::Write##CAMELCASE##ToArray(number, \ + repeated_##LOWERCASE##_value->Get(i), target); \ + } \ + break + + HANDLE_TYPE( INT32, Int32, int32); + HANDLE_TYPE( INT64, Int64, int64); + HANDLE_TYPE( UINT32, UInt32, uint32); + HANDLE_TYPE( UINT64, UInt64, uint64); + HANDLE_TYPE( SINT32, SInt32, int32); + HANDLE_TYPE( SINT64, SInt64, int64); + HANDLE_TYPE( FIXED32, Fixed32, uint32); + HANDLE_TYPE( FIXED64, Fixed64, uint64); + HANDLE_TYPE(SFIXED32, SFixed32, int32); + HANDLE_TYPE(SFIXED64, SFixed64, int64); + HANDLE_TYPE( FLOAT, Float, float); + HANDLE_TYPE( DOUBLE, Double, double); + HANDLE_TYPE( BOOL, Bool, bool); + HANDLE_TYPE( STRING, String, string); + HANDLE_TYPE( BYTES, Bytes, string); + HANDLE_TYPE( ENUM, Enum, enum); + HANDLE_TYPE( GROUP, Group, message); + HANDLE_TYPE( MESSAGE, Message, message); +#undef HANDLE_TYPE + } + } + } else if (!is_cleared) { + switch (real_type(type)) { +#define HANDLE_TYPE(UPPERCASE, CAMELCASE, VALUE) \ + case FieldDescriptor::TYPE_##UPPERCASE: \ + target = WireFormatLite::Write##CAMELCASE##ToArray( \ + number, VALUE, target); \ + break + + HANDLE_TYPE( INT32, Int32, int32_value); + HANDLE_TYPE( INT64, Int64, int64_value); + HANDLE_TYPE( UINT32, UInt32, uint32_value); + HANDLE_TYPE( UINT64, UInt64, uint64_value); + HANDLE_TYPE( SINT32, SInt32, int32_value); + HANDLE_TYPE( SINT64, SInt64, int64_value); + HANDLE_TYPE( FIXED32, Fixed32, uint32_value); + HANDLE_TYPE( FIXED64, Fixed64, uint64_value); + HANDLE_TYPE(SFIXED32, SFixed32, int32_value); + HANDLE_TYPE(SFIXED64, SFixed64, int64_value); + HANDLE_TYPE( FLOAT, Float, float_value); + HANDLE_TYPE( DOUBLE, Double, double_value); + HANDLE_TYPE( BOOL, Bool, bool_value); + HANDLE_TYPE( STRING, String, *string_value); + HANDLE_TYPE( BYTES, Bytes, *string_value); + HANDLE_TYPE( ENUM, Enum, enum_value); + HANDLE_TYPE( GROUP, Group, *message_value); + HANDLE_TYPE( MESSAGE, Message, *message_value); +#undef HANDLE_TYPE + } + } + return target; +} + +uint8* ExtensionSet::Extension::SerializeMessageSetItemWithCachedSizesToArray( + int number, + uint8* target) const { + if (type != WireFormatLite::TYPE_MESSAGE || is_repeated) { + // Not a valid MessageSet extension, but serialize it the normal way. + GOOGLE_LOG(WARNING) << "Invalid message set extension."; + return SerializeFieldWithCachedSizesToArray(number, target); + } + + if (is_cleared) return target; + + // Start group. + target = io::CodedOutputStream::WriteTagToArray( + WireFormatLite::kMessageSetItemStartTag, target); + // Write type ID. + target = WireFormatLite::WriteUInt32ToArray( + WireFormatLite::kMessageSetTypeIdNumber, number, target); + // Write message. + target = WireFormatLite::WriteMessageToArray( + WireFormatLite::kMessageSetMessageNumber, *message_value, target); + // End group. + target = io::CodedOutputStream::WriteTagToArray( + WireFormatLite::kMessageSetItemEndTag, target); + return target; +} + +} // namespace internal +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/extension_set_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/extension_set_unittest.cc new file mode 100644 index 0000000000..000f846c7f --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/extension_set_unittest.cc @@ -0,0 +1,642 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include + +namespace google { +namespace protobuf { +namespace internal { +namespace { + +// This test closely mirrors google/protobuf/compiler/cpp/unittest.cc +// except that it uses extensions rather than regular fields. + +TEST(ExtensionSetTest, Defaults) { + // Check that all default values are set correctly in the initial message. + unittest::TestAllExtensions message; + + TestUtil::ExpectExtensionsClear(message); + + // Messages should return pointers to default instances until first use. + // (This is not checked by ExpectClear() since it is not actually true after + // the fields have been set and then cleared.) + EXPECT_EQ(&unittest::OptionalGroup_extension::default_instance(), + &message.GetExtension(unittest::optionalgroup_extension)); + EXPECT_EQ(&unittest::TestAllTypes::NestedMessage::default_instance(), + &message.GetExtension(unittest::optional_nested_message_extension)); + EXPECT_EQ(&unittest::ForeignMessage::default_instance(), + &message.GetExtension( + unittest::optional_foreign_message_extension)); + EXPECT_EQ(&unittest_import::ImportMessage::default_instance(), + &message.GetExtension(unittest::optional_import_message_extension)); +} + +TEST(ExtensionSetTest, Accessors) { + // Set every field to a unique value then go back and check all those + // values. + unittest::TestAllExtensions message; + + TestUtil::SetAllExtensions(&message); + TestUtil::ExpectAllExtensionsSet(message); + + TestUtil::ModifyRepeatedExtensions(&message); + TestUtil::ExpectRepeatedExtensionsModified(message); +} + +TEST(ExtensionSetTest, Clear) { + // Set every field to a unique value, clear the message, then check that + // it is cleared. + unittest::TestAllExtensions message; + + TestUtil::SetAllExtensions(&message); + message.Clear(); + TestUtil::ExpectExtensionsClear(message); + + // Unlike with the defaults test, we do NOT expect that requesting embedded + // messages will return a pointer to the default instance. Instead, they + // should return the objects that were created when mutable_blah() was + // called. + EXPECT_NE(&unittest::OptionalGroup_extension::default_instance(), + &message.GetExtension(unittest::optionalgroup_extension)); + EXPECT_NE(&unittest::TestAllTypes::NestedMessage::default_instance(), + &message.GetExtension(unittest::optional_nested_message_extension)); + EXPECT_NE(&unittest::ForeignMessage::default_instance(), + &message.GetExtension( + unittest::optional_foreign_message_extension)); + EXPECT_NE(&unittest_import::ImportMessage::default_instance(), + &message.GetExtension(unittest::optional_import_message_extension)); + + // Make sure setting stuff again after clearing works. (This takes slightly + // different code paths since the objects are reused.) + TestUtil::SetAllExtensions(&message); + TestUtil::ExpectAllExtensionsSet(message); +} + +TEST(ExtensionSetTest, ClearOneField) { + // Set every field to a unique value, then clear one value and insure that + // only that one value is cleared. + unittest::TestAllExtensions message; + + TestUtil::SetAllExtensions(&message); + int64 original_value = + message.GetExtension(unittest::optional_int64_extension); + + // Clear the field and make sure it shows up as cleared. + message.ClearExtension(unittest::optional_int64_extension); + EXPECT_FALSE(message.HasExtension(unittest::optional_int64_extension)); + EXPECT_EQ(0, message.GetExtension(unittest::optional_int64_extension)); + + // Other adjacent fields should not be cleared. + EXPECT_TRUE(message.HasExtension(unittest::optional_int32_extension)); + EXPECT_TRUE(message.HasExtension(unittest::optional_uint32_extension)); + + // Make sure if we set it again, then all fields are set. + message.SetExtension(unittest::optional_int64_extension, original_value); + TestUtil::ExpectAllExtensionsSet(message); +} + +TEST(ExtensionSetTest, CopyFrom) { + unittest::TestAllExtensions message1, message2; + string data; + + TestUtil::SetAllExtensions(&message1); + message2.CopyFrom(message1); + TestUtil::ExpectAllExtensionsSet(message2); +} + +TEST(ExtensionSetTest, CopyFromUpcasted) { + unittest::TestAllExtensions message1, message2; + string data; + const Message& upcasted_message = message1; + + TestUtil::SetAllExtensions(&message1); + message2.CopyFrom(upcasted_message); + TestUtil::ExpectAllExtensionsSet(message2); +} + +TEST(ExtensionSetTest, SwapWithEmpty) { + unittest::TestAllExtensions message1, message2; + TestUtil::SetAllExtensions(&message1); + + TestUtil::ExpectAllExtensionsSet(message1); + TestUtil::ExpectExtensionsClear(message2); + message1.Swap(&message2); + TestUtil::ExpectAllExtensionsSet(message2); + TestUtil::ExpectExtensionsClear(message1); +} + +TEST(ExtensionSetTest, SwapWithSelf) { + unittest::TestAllExtensions message; + TestUtil::SetAllExtensions(&message); + + TestUtil::ExpectAllExtensionsSet(message); + message.Swap(&message); + TestUtil::ExpectAllExtensionsSet(message); +} + +TEST(ExtensionSetTest, SerializationToArray) { + // Serialize as TestAllExtensions and parse as TestAllTypes to insure wire + // compatibility of extensions. + // + // This checks serialization to a flat array by explicitly reserving space in + // the string and calling the generated message's + // SerializeWithCachedSizesToArray. + unittest::TestAllExtensions source; + unittest::TestAllTypes destination; + TestUtil::SetAllExtensions(&source); + int size = source.ByteSize(); + string data; + data.resize(size); + uint8* target = reinterpret_cast(string_as_array(&data)); + uint8* end = source.SerializeWithCachedSizesToArray(target); + EXPECT_EQ(size, end - target); + EXPECT_TRUE(destination.ParseFromString(data)); + TestUtil::ExpectAllFieldsSet(destination); +} + +TEST(ExtensionSetTest, SerializationToStream) { + // Serialize as TestAllExtensions and parse as TestAllTypes to insure wire + // compatibility of extensions. + // + // This checks serialization to an output stream by creating an array output + // stream that can only buffer 1 byte at a time - this prevents the message + // from ever jumping to the fast path, ensuring that serialization happens via + // the CodedOutputStream. + unittest::TestAllExtensions source; + unittest::TestAllTypes destination; + TestUtil::SetAllExtensions(&source); + int size = source.ByteSize(); + string data; + data.resize(size); + { + io::ArrayOutputStream array_stream(string_as_array(&data), size, 1); + io::CodedOutputStream output_stream(&array_stream); + source.SerializeWithCachedSizes(&output_stream); + ASSERT_FALSE(output_stream.HadError()); + } + EXPECT_TRUE(destination.ParseFromString(data)); + TestUtil::ExpectAllFieldsSet(destination); +} + +TEST(ExtensionSetTest, PackedSerializationToArray) { + // Serialize as TestPackedExtensions and parse as TestPackedTypes to insure + // wire compatibility of extensions. + // + // This checks serialization to a flat array by explicitly reserving space in + // the string and calling the generated message's + // SerializeWithCachedSizesToArray. + unittest::TestPackedExtensions source; + unittest::TestPackedTypes destination; + TestUtil::SetPackedExtensions(&source); + int size = source.ByteSize(); + string data; + data.resize(size); + uint8* target = reinterpret_cast(string_as_array(&data)); + uint8* end = source.SerializeWithCachedSizesToArray(target); + EXPECT_EQ(size, end - target); + EXPECT_TRUE(destination.ParseFromString(data)); + TestUtil::ExpectPackedFieldsSet(destination); +} + +TEST(ExtensionSetTest, PackedSerializationToStream) { + // Serialize as TestPackedExtensions and parse as TestPackedTypes to insure + // wire compatibility of extensions. + // + // This checks serialization to an output stream by creating an array output + // stream that can only buffer 1 byte at a time - this prevents the message + // from ever jumping to the fast path, ensuring that serialization happens via + // the CodedOutputStream. + unittest::TestPackedExtensions source; + unittest::TestPackedTypes destination; + TestUtil::SetPackedExtensions(&source); + int size = source.ByteSize(); + string data; + data.resize(size); + { + io::ArrayOutputStream array_stream(string_as_array(&data), size, 1); + io::CodedOutputStream output_stream(&array_stream); + source.SerializeWithCachedSizes(&output_stream); + ASSERT_FALSE(output_stream.HadError()); + } + EXPECT_TRUE(destination.ParseFromString(data)); + TestUtil::ExpectPackedFieldsSet(destination); +} + +TEST(ExtensionSetTest, Parsing) { + // Serialize as TestAllTypes and parse as TestAllExtensions. + unittest::TestAllTypes source; + unittest::TestAllExtensions destination; + string data; + + TestUtil::SetAllFields(&source); + source.SerializeToString(&data); + EXPECT_TRUE(destination.ParseFromString(data)); + TestUtil::ExpectAllExtensionsSet(destination); +} + +TEST(ExtensionSetTest, PackedParsing) { + // Serialize as TestPackedTypes and parse as TestPackedExtensions. + unittest::TestPackedTypes source; + unittest::TestPackedExtensions destination; + string data; + + TestUtil::SetPackedFields(&source); + source.SerializeToString(&data); + EXPECT_TRUE(destination.ParseFromString(data)); + TestUtil::ExpectPackedExtensionsSet(destination); +} + +TEST(ExtensionSetTest, IsInitialized) { + // Test that IsInitialized() returns false if required fields in nested + // extensions are missing. + unittest::TestAllExtensions message; + + EXPECT_TRUE(message.IsInitialized()); + + message.MutableExtension(unittest::TestRequired::single); + EXPECT_FALSE(message.IsInitialized()); + + message.MutableExtension(unittest::TestRequired::single)->set_a(1); + EXPECT_FALSE(message.IsInitialized()); + message.MutableExtension(unittest::TestRequired::single)->set_b(2); + EXPECT_FALSE(message.IsInitialized()); + message.MutableExtension(unittest::TestRequired::single)->set_c(3); + EXPECT_TRUE(message.IsInitialized()); + + message.AddExtension(unittest::TestRequired::multi); + EXPECT_FALSE(message.IsInitialized()); + + message.MutableExtension(unittest::TestRequired::multi, 0)->set_a(1); + EXPECT_FALSE(message.IsInitialized()); + message.MutableExtension(unittest::TestRequired::multi, 0)->set_b(2); + EXPECT_FALSE(message.IsInitialized()); + message.MutableExtension(unittest::TestRequired::multi, 0)->set_c(3); + EXPECT_TRUE(message.IsInitialized()); +} + +TEST(ExtensionSetTest, MutableString) { + // Test the mutable string accessors. + unittest::TestAllExtensions message; + + message.MutableExtension(unittest::optional_string_extension)->assign("foo"); + EXPECT_TRUE(message.HasExtension(unittest::optional_string_extension)); + EXPECT_EQ("foo", message.GetExtension(unittest::optional_string_extension)); + + message.AddExtension(unittest::repeated_string_extension)->assign("bar"); + ASSERT_EQ(1, message.ExtensionSize(unittest::repeated_string_extension)); + EXPECT_EQ("bar", + message.GetExtension(unittest::repeated_string_extension, 0)); +} + +TEST(ExtensionSetTest, SpaceUsedExcludingSelf) { + // Scalar primitive extensions should increase the extension set size by a + // minimum of the size of the primitive type. +#define TEST_SCALAR_EXTENSIONS_SPACE_USED(type, value) \ + do { \ + unittest::TestAllExtensions message; \ + const int base_size = message.SpaceUsed(); \ + message.SetExtension(unittest::optional_##type##_extension, value); \ + int min_expected_size = base_size + \ + sizeof(message.GetExtension(unittest::optional_##type##_extension)); \ + EXPECT_LE(min_expected_size, message.SpaceUsed()); \ + } while (0) + + TEST_SCALAR_EXTENSIONS_SPACE_USED(int32 , 101); + TEST_SCALAR_EXTENSIONS_SPACE_USED(int64 , 102); + TEST_SCALAR_EXTENSIONS_SPACE_USED(uint32 , 103); + TEST_SCALAR_EXTENSIONS_SPACE_USED(uint64 , 104); + TEST_SCALAR_EXTENSIONS_SPACE_USED(sint32 , 105); + TEST_SCALAR_EXTENSIONS_SPACE_USED(sint64 , 106); + TEST_SCALAR_EXTENSIONS_SPACE_USED(fixed32 , 107); + TEST_SCALAR_EXTENSIONS_SPACE_USED(fixed64 , 108); + TEST_SCALAR_EXTENSIONS_SPACE_USED(sfixed32, 109); + TEST_SCALAR_EXTENSIONS_SPACE_USED(sfixed64, 110); + TEST_SCALAR_EXTENSIONS_SPACE_USED(float , 111); + TEST_SCALAR_EXTENSIONS_SPACE_USED(double , 112); + TEST_SCALAR_EXTENSIONS_SPACE_USED(bool , true); +#undef TEST_SCALAR_EXTENSIONS_SPACE_USED + { + unittest::TestAllExtensions message; + const int base_size = message.SpaceUsed(); + message.SetExtension(unittest::optional_nested_enum_extension, + unittest::TestAllTypes::FOO); + int min_expected_size = base_size + + sizeof(message.GetExtension(unittest::optional_nested_enum_extension)); + EXPECT_LE(min_expected_size, message.SpaceUsed()); + } + { + // Strings may cause extra allocations depending on their length; ensure + // that gets included as well. + unittest::TestAllExtensions message; + const int base_size = message.SpaceUsed(); + const string s("this is a fairly large string that will cause some " + "allocation in order to store it in the extension"); + message.SetExtension(unittest::optional_string_extension, s); + int min_expected_size = base_size + s.length(); + EXPECT_LE(min_expected_size, message.SpaceUsed()); + } + { + // Messages also have additional allocation that need to be counted. + unittest::TestAllExtensions message; + const int base_size = message.SpaceUsed(); + unittest::ForeignMessage foreign; + foreign.set_c(42); + message.MutableExtension(unittest::optional_foreign_message_extension)-> + CopyFrom(foreign); + int min_expected_size = base_size + foreign.SpaceUsed(); + EXPECT_LE(min_expected_size, message.SpaceUsed()); + } + + // Repeated primitive extensions will increase space used by at least a + // RepeatedField, and will cause additional allocations when the array + // gets too big for the initial space. + // This macro: + // - Adds a value to the repeated extension, then clears it, establishing + // the base size. + // - Adds a small number of values, testing that it doesn't increase the + // SpaceUsed() + // - Adds a large number of values (requiring allocation in the repeated + // field), and ensures that that allocation is included in SpaceUsed() +#define TEST_REPEATED_EXTENSIONS_SPACE_USED(type, cpptype, value) \ + do { \ + unittest::TestAllExtensions message; \ + const int base_size = message.SpaceUsed(); \ + int min_expected_size = sizeof(RepeatedField) + base_size; \ + message.AddExtension(unittest::repeated_##type##_extension, value); \ + message.ClearExtension(unittest::repeated_##type##_extension); \ + const int empty_repeated_field_size = message.SpaceUsed(); \ + EXPECT_LE(min_expected_size, empty_repeated_field_size) << #type; \ + message.AddExtension(unittest::repeated_##type##_extension, value); \ + message.AddExtension(unittest::repeated_##type##_extension, value); \ + EXPECT_EQ(empty_repeated_field_size, message.SpaceUsed()) << #type; \ + message.ClearExtension(unittest::repeated_##type##_extension); \ + for (int i = 0; i < 16; ++i) { \ + message.AddExtension(unittest::repeated_##type##_extension, value); \ + } \ + int expected_size = sizeof(cpptype) * 16 + empty_repeated_field_size; \ + EXPECT_EQ(expected_size, message.SpaceUsed()) << #type; \ + } while (0) + + TEST_REPEATED_EXTENSIONS_SPACE_USED(int32 , int32 , 101); + TEST_REPEATED_EXTENSIONS_SPACE_USED(int64 , int64 , 102); + TEST_REPEATED_EXTENSIONS_SPACE_USED(uint32 , uint32, 103); + TEST_REPEATED_EXTENSIONS_SPACE_USED(uint64 , uint64, 104); + TEST_REPEATED_EXTENSIONS_SPACE_USED(sint32 , int32 , 105); + TEST_REPEATED_EXTENSIONS_SPACE_USED(sint64 , int64 , 106); + TEST_REPEATED_EXTENSIONS_SPACE_USED(fixed32 , uint32, 107); + TEST_REPEATED_EXTENSIONS_SPACE_USED(fixed64 , uint64, 108); + TEST_REPEATED_EXTENSIONS_SPACE_USED(sfixed32, int32 , 109); + TEST_REPEATED_EXTENSIONS_SPACE_USED(sfixed64, int64 , 110); + TEST_REPEATED_EXTENSIONS_SPACE_USED(float , float , 111); + TEST_REPEATED_EXTENSIONS_SPACE_USED(double , double, 112); + TEST_REPEATED_EXTENSIONS_SPACE_USED(bool , bool , true); + TEST_REPEATED_EXTENSIONS_SPACE_USED(nested_enum, int, + unittest::TestAllTypes::FOO); +#undef TEST_REPEATED_EXTENSIONS_SPACE_USED + // Repeated strings + { + unittest::TestAllExtensions message; + const int base_size = message.SpaceUsed(); + int min_expected_size = sizeof(RepeatedPtrField) + base_size; + const string value(256, 'x'); + // Once items are allocated, they may stick around even when cleared so + // without the hardcore memory management accessors there isn't a notion of + // the empty repeated field memory usage as there is with primitive types. + for (int i = 0; i < 16; ++i) { + message.AddExtension(unittest::repeated_string_extension, value); + } + min_expected_size += (sizeof(value) + value.size()) * 16; + EXPECT_LE(min_expected_size, message.SpaceUsed()); + } + // Repeated messages + { + unittest::TestAllExtensions message; + const int base_size = message.SpaceUsed(); + int min_expected_size = sizeof(RepeatedPtrField) + + base_size; + unittest::ForeignMessage prototype; + prototype.set_c(2); + for (int i = 0; i < 16; ++i) { + message.AddExtension(unittest::repeated_foreign_message_extension)-> + CopyFrom(prototype); + } + min_expected_size += 16 * prototype.SpaceUsed(); + EXPECT_LE(min_expected_size, message.SpaceUsed()); + } +} + +#ifdef GTEST_HAS_DEATH_TEST + +TEST(ExtensionSetTest, InvalidEnumDeath) { + unittest::TestAllExtensions message; + EXPECT_DEBUG_DEATH( + message.SetExtension(unittest::optional_foreign_enum_extension, + static_cast(53)), + "IsValid"); +} + +#endif // GTEST_HAS_DEATH_TEST + +TEST(ExtensionSetTest, DynamicExtensions) { + // Test adding a dynamic extension to a compiled-in message object. + + FileDescriptorProto dynamic_proto; + dynamic_proto.set_name("dynamic_extensions_test.proto"); + dynamic_proto.add_dependency( + unittest::TestAllExtensions::descriptor()->file()->name()); + dynamic_proto.set_package("dynamic_extensions"); + + // Copy the fields and nested types from TestDynamicExtensions into our new + // proto, converting the fields into extensions. + const Descriptor* template_descriptor = + unittest::TestDynamicExtensions::descriptor(); + DescriptorProto template_descriptor_proto; + template_descriptor->CopyTo(&template_descriptor_proto); + dynamic_proto.mutable_message_type()->MergeFrom( + template_descriptor_proto.nested_type()); + dynamic_proto.mutable_enum_type()->MergeFrom( + template_descriptor_proto.enum_type()); + dynamic_proto.mutable_extension()->MergeFrom( + template_descriptor_proto.field()); + + // For each extension that we added... + for (int i = 0; i < dynamic_proto.extension_size(); i++) { + // Set its extendee to TestAllExtensions. + FieldDescriptorProto* extension = dynamic_proto.mutable_extension(i); + extension->set_extendee( + unittest::TestAllExtensions::descriptor()->full_name()); + + // If the field refers to one of the types nested in TestDynamicExtensions, + // make it refer to the type in our dynamic proto instead. + string prefix = "." + template_descriptor->full_name() + "."; + if (extension->has_type_name()) { + string* type_name = extension->mutable_type_name(); + if (HasPrefixString(*type_name, prefix)) { + type_name->replace(0, prefix.size(), ".dynamic_extensions."); + } + } + } + + // Now build the file, using the generated pool as an underlay. + DescriptorPool dynamic_pool(DescriptorPool::generated_pool()); + const FileDescriptor* file = dynamic_pool.BuildFile(dynamic_proto); + ASSERT_TRUE(file != NULL); + DynamicMessageFactory dynamic_factory(&dynamic_pool); + dynamic_factory.SetDelegateToGeneratedFactory(true); + + // Construct a message that we can parse with the extensions we defined. + // Since the extensions were based off of the fields of TestDynamicExtensions, + // we can use that message to create this test message. + string data; + { + unittest::TestDynamicExtensions message; + message.set_scalar_extension(123); + message.set_enum_extension(unittest::FOREIGN_BAR); + message.set_dynamic_enum_extension( + unittest::TestDynamicExtensions::DYNAMIC_BAZ); + message.mutable_message_extension()->set_c(456); + message.mutable_dynamic_message_extension()->set_dynamic_field(789); + message.add_repeated_extension("foo"); + message.add_repeated_extension("bar"); + message.add_packed_extension(12); + message.add_packed_extension(-34); + message.add_packed_extension(56); + message.add_packed_extension(-78); + + // Also add some unknown fields. + + // An unknown enum value (for a known field). + message.mutable_unknown_fields()->AddVarint( + unittest::TestDynamicExtensions::kDynamicEnumExtensionFieldNumber, + 12345); + // A regular unknown field. + message.mutable_unknown_fields()->AddLengthDelimited(54321, "unknown"); + + message.SerializeToString(&data); + } + + // Now we can parse this using our dynamic extension definitions... + unittest::TestAllExtensions message; + { + io::ArrayInputStream raw_input(data.data(), data.size()); + io::CodedInputStream input(&raw_input); + input.SetExtensionRegistry(&dynamic_pool, &dynamic_factory); + ASSERT_TRUE(message.ParseFromCodedStream(&input)); + ASSERT_TRUE(input.ConsumedEntireMessage()); + } + + // Can we print it? + EXPECT_EQ( + "[dynamic_extensions.scalar_extension]: 123\n" + "[dynamic_extensions.enum_extension]: FOREIGN_BAR\n" + "[dynamic_extensions.dynamic_enum_extension]: DYNAMIC_BAZ\n" + "[dynamic_extensions.message_extension] {\n" + " c: 456\n" + "}\n" + "[dynamic_extensions.dynamic_message_extension] {\n" + " dynamic_field: 789\n" + "}\n" + "[dynamic_extensions.repeated_extension]: \"foo\"\n" + "[dynamic_extensions.repeated_extension]: \"bar\"\n" + "[dynamic_extensions.packed_extension]: 12\n" + "[dynamic_extensions.packed_extension]: -34\n" + "[dynamic_extensions.packed_extension]: 56\n" + "[dynamic_extensions.packed_extension]: -78\n" + "2002: 12345\n" + "54321: \"unknown\"\n", + message.DebugString()); + + // Can we serialize it? + // (Don't use EXPECT_EQ because we don't want to dump raw binary data to the + // terminal on failure.) + EXPECT_TRUE(message.SerializeAsString() == data); + + // What if we parse using the reflection-based parser? + { + unittest::TestAllExtensions message2; + io::ArrayInputStream raw_input(data.data(), data.size()); + io::CodedInputStream input(&raw_input); + input.SetExtensionRegistry(&dynamic_pool, &dynamic_factory); + ASSERT_TRUE(WireFormat::ParseAndMergePartial(&input, &message2)); + ASSERT_TRUE(input.ConsumedEntireMessage()); + EXPECT_EQ(message.DebugString(), message2.DebugString()); + } + + // Are the embedded generated types actually using the generated objects? + { + const FieldDescriptor* message_extension = + file->FindExtensionByName("message_extension"); + ASSERT_TRUE(message_extension != NULL); + const Message& sub_message = + message.GetReflection()->GetMessage(message, message_extension); + const unittest::ForeignMessage* typed_sub_message = + dynamic_cast(&sub_message); + ASSERT_TRUE(typed_sub_message != NULL); + EXPECT_EQ(456, typed_sub_message->c()); + } + + // What does GetMessage() return for the embedded dynamic type if it isn't + // present? + { + const FieldDescriptor* dynamic_message_extension = + file->FindExtensionByName("dynamic_message_extension"); + ASSERT_TRUE(dynamic_message_extension != NULL); + const Message& parent = unittest::TestAllExtensions::default_instance(); + const Message& sub_message = + parent.GetReflection()->GetMessage(parent, dynamic_message_extension, + &dynamic_factory); + const Message* prototype = + dynamic_factory.GetPrototype(dynamic_message_extension->message_type()); + EXPECT_EQ(prototype, &sub_message); + } +} + +} // namespace +} // namespace internal +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/generated_message_reflection.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/generated_message_reflection.cc new file mode 100644 index 0000000000..a0dd75c65b --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/generated_message_reflection.cc @@ -0,0 +1,1228 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include +#include +#include +#include +#include +#include +#include + +namespace google { +namespace protobuf { +namespace internal { + +int StringSpaceUsedExcludingSelf(const string& str) { + const void* start = &str; + const void* end = &str + 1; + + if (start <= str.data() && str.data() <= end) { + // The string's data is stored inside the string object itself. + return 0; + } else { + return str.capacity(); + } +} + +bool ParseNamedEnum(const EnumDescriptor* descriptor, + const string& name, + int* value) { + const EnumValueDescriptor* d = descriptor->FindValueByName(name); + if (d == NULL) return false; + *value = d->number(); + return true; +} + +const string& NameOfEnum(const EnumDescriptor* descriptor, int value) { + const EnumValueDescriptor* d = descriptor->FindValueByNumber(value); + return (d == NULL ? kEmptyString : d->name()); +} + +// =================================================================== +// Helpers for reporting usage errors (e.g. trying to use GetInt32() on +// a string field). + +namespace { + +void ReportReflectionUsageError( + const Descriptor* descriptor, const FieldDescriptor* field, + const char* method, const char* description) { + GOOGLE_LOG(FATAL) + << "Protocol Buffer reflection usage error:\n" + " Method : google::protobuf::Reflection::" << method << "\n" + " Message type: " << descriptor->full_name() << "\n" + " Field : " << field->full_name() << "\n" + " Problem : " << description; +} + +const char* cpptype_names_[FieldDescriptor::MAX_CPPTYPE + 1] = { + "INVALID_CPPTYPE", + "CPPTYPE_INT32", + "CPPTYPE_INT64", + "CPPTYPE_UINT32", + "CPPTYPE_UINT64", + "CPPTYPE_DOUBLE", + "CPPTYPE_FLOAT", + "CPPTYPE_BOOL", + "CPPTYPE_ENUM", + "CPPTYPE_STRING", + "CPPTYPE_MESSAGE" +}; + +static void ReportReflectionUsageTypeError( + const Descriptor* descriptor, const FieldDescriptor* field, + const char* method, + FieldDescriptor::CppType expected_type) { + GOOGLE_LOG(FATAL) + << "Protocol Buffer reflection usage error:\n" + " Method : google::protobuf::Reflection::" << method << "\n" + " Message type: " << descriptor->full_name() << "\n" + " Field : " << field->full_name() << "\n" + " Problem : Field is not the right type for this message:\n" + " Expected : " << cpptype_names_[expected_type] << "\n" + " Field type: " << cpptype_names_[field->cpp_type()]; +} + +static void ReportReflectionUsageEnumTypeError( + const Descriptor* descriptor, const FieldDescriptor* field, + const char* method, const EnumValueDescriptor* value) { + GOOGLE_LOG(FATAL) + << "Protocol Buffer reflection usage error:\n" + " Method : google::protobuf::Reflection::" << method << "\n" + " Message type: " << descriptor->full_name() << "\n" + " Field : " << field->full_name() << "\n" + " Problem : Enum value did not match field type:\n" + " Expected : " << field->enum_type()->full_name() << "\n" + " Actual : " << value->full_name(); +} + +#define USAGE_CHECK(CONDITION, METHOD, ERROR_DESCRIPTION) \ + if (!(CONDITION)) \ + ReportReflectionUsageError(descriptor_, field, #METHOD, ERROR_DESCRIPTION) +#define USAGE_CHECK_EQ(A, B, METHOD, ERROR_DESCRIPTION) \ + USAGE_CHECK((A) == (B), METHOD, ERROR_DESCRIPTION) +#define USAGE_CHECK_NE(A, B, METHOD, ERROR_DESCRIPTION) \ + USAGE_CHECK((A) != (B), METHOD, ERROR_DESCRIPTION) + +#define USAGE_CHECK_TYPE(METHOD, CPPTYPE) \ + if (field->cpp_type() != FieldDescriptor::CPPTYPE_##CPPTYPE) \ + ReportReflectionUsageTypeError(descriptor_, field, #METHOD, \ + FieldDescriptor::CPPTYPE_##CPPTYPE) + +#define USAGE_CHECK_ENUM_VALUE(METHOD) \ + if (value->type() != field->enum_type()) \ + ReportReflectionUsageEnumTypeError(descriptor_, field, #METHOD, value) + +#define USAGE_CHECK_MESSAGE_TYPE(METHOD) \ + USAGE_CHECK_EQ(field->containing_type(), descriptor_, \ + METHOD, "Field does not match message type."); +#define USAGE_CHECK_SINGULAR(METHOD) \ + USAGE_CHECK_NE(field->label(), FieldDescriptor::LABEL_REPEATED, METHOD, \ + "Field is repeated; the method requires a singular field.") +#define USAGE_CHECK_REPEATED(METHOD) \ + USAGE_CHECK_EQ(field->label(), FieldDescriptor::LABEL_REPEATED, METHOD, \ + "Field is singular; the method requires a repeated field.") + +#define USAGE_CHECK_ALL(METHOD, LABEL, CPPTYPE) \ + USAGE_CHECK_MESSAGE_TYPE(METHOD); \ + USAGE_CHECK_##LABEL(METHOD); \ + USAGE_CHECK_TYPE(METHOD, CPPTYPE) + +} // namespace + +// =================================================================== + +GeneratedMessageReflection::GeneratedMessageReflection( + const Descriptor* descriptor, + const Message* default_instance, + const int offsets[], + int has_bits_offset, + int unknown_fields_offset, + int extensions_offset, + const DescriptorPool* descriptor_pool, + MessageFactory* factory, + int object_size) + : descriptor_ (descriptor), + default_instance_ (default_instance), + offsets_ (offsets), + has_bits_offset_ (has_bits_offset), + unknown_fields_offset_(unknown_fields_offset), + extensions_offset_(extensions_offset), + object_size_ (object_size), + descriptor_pool_ ((descriptor_pool == NULL) ? + DescriptorPool::generated_pool() : + descriptor_pool), + message_factory_ (factory) { +} + +GeneratedMessageReflection::~GeneratedMessageReflection() {} + +const UnknownFieldSet& GeneratedMessageReflection::GetUnknownFields( + const Message& message) const { + const void* ptr = reinterpret_cast(&message) + + unknown_fields_offset_; + return *reinterpret_cast(ptr); +} +UnknownFieldSet* GeneratedMessageReflection::MutableUnknownFields( + Message* message) const { + void* ptr = reinterpret_cast(message) + unknown_fields_offset_; + return reinterpret_cast(ptr); +} + +int GeneratedMessageReflection::SpaceUsed(const Message& message) const { + // object_size_ already includes the in-memory representation of each field + // in the message, so we only need to account for additional memory used by + // the fields. + int total_size = object_size_; + + total_size += GetUnknownFields(message).SpaceUsedExcludingSelf(); + + if (extensions_offset_ != -1) { + total_size += GetExtensionSet(message).SpaceUsedExcludingSelf(); + } + + for (int i = 0; i < descriptor_->field_count(); i++) { + const FieldDescriptor* field = descriptor_->field(i); + + if (field->is_repeated()) { + switch (field->cpp_type()) { +#define HANDLE_TYPE(UPPERCASE, LOWERCASE) \ + case FieldDescriptor::CPPTYPE_##UPPERCASE : \ + total_size += GetRaw >(message, field) \ + .SpaceUsedExcludingSelf(); \ + break + + HANDLE_TYPE( INT32, int32); + HANDLE_TYPE( INT64, int64); + HANDLE_TYPE(UINT32, uint32); + HANDLE_TYPE(UINT64, uint64); + HANDLE_TYPE(DOUBLE, double); + HANDLE_TYPE( FLOAT, float); + HANDLE_TYPE( BOOL, bool); + HANDLE_TYPE( ENUM, int); +#undef HANDLE_TYPE + + case FieldDescriptor::CPPTYPE_STRING: + switch (field->options().ctype()) { + default: // TODO(kenton): Support other string reps. + case FieldOptions::STRING: + total_size += GetRaw >(message, field) + .SpaceUsedExcludingSelf(); + break; + } + break; + + case FieldDescriptor::CPPTYPE_MESSAGE: + // We don't know which subclass of RepeatedPtrFieldBase the type is, + // so we use RepeatedPtrFieldBase directly. + total_size += + GetRaw(message, field) + .SpaceUsedExcludingSelf >(); + break; + } + } else { + switch (field->cpp_type()) { + case FieldDescriptor::CPPTYPE_INT32 : + case FieldDescriptor::CPPTYPE_INT64 : + case FieldDescriptor::CPPTYPE_UINT32: + case FieldDescriptor::CPPTYPE_UINT64: + case FieldDescriptor::CPPTYPE_DOUBLE: + case FieldDescriptor::CPPTYPE_FLOAT : + case FieldDescriptor::CPPTYPE_BOOL : + case FieldDescriptor::CPPTYPE_ENUM : + // Field is inline, so we've already counted it. + break; + + case FieldDescriptor::CPPTYPE_STRING: { + switch (field->options().ctype()) { + default: // TODO(kenton): Support other string reps. + case FieldOptions::STRING: { + const string* ptr = GetField(message, field); + + // Initially, the string points to the default value stored in + // the prototype. Only count the string if it has been changed + // from the default value. + const string* default_ptr = DefaultRaw(field); + + if (ptr != default_ptr) { + // string fields are represented by just a pointer, so also + // include sizeof(string) as well. + total_size += sizeof(*ptr) + StringSpaceUsedExcludingSelf(*ptr); + } + break; + } + } + break; + } + + case FieldDescriptor::CPPTYPE_MESSAGE: + if (&message == default_instance_) { + // For singular fields, the prototype just stores a pointer to the + // external type's prototype, so there is no extra memory usage. + } else { + const Message* sub_message = GetRaw(message, field); + if (sub_message != NULL) { + total_size += sub_message->SpaceUsed(); + } + } + break; + } + } + } + + return total_size; +} + +void GeneratedMessageReflection::Swap( + Message* message1, + Message* message2) const { + if (message1 == message2) return; + + // TODO(kenton): Other Reflection methods should probably check this too. + GOOGLE_CHECK_EQ(message1->GetReflection(), this) + << "First argument to Swap() (of type \"" + << message1->GetDescriptor()->full_name() + << "\") is not compatible with this reflection object (which is for type \"" + << descriptor_->full_name() + << "\"). Note that the exact same class is required; not just the same " + "descriptor."; + GOOGLE_CHECK_EQ(message2->GetReflection(), this) + << "Second argument to Swap() (of type \"" + << message1->GetDescriptor()->full_name() + << "\") is not compatible with this reflection object (which is for type \"" + << descriptor_->full_name() + << "\"). Note that the exact same class is required; not just the same " + "descriptor."; + + uint32* has_bits1 = MutableHasBits(message1); + uint32* has_bits2 = MutableHasBits(message2); + int has_bits_size = (descriptor_->field_count() + 31) / 32; + + for (int i = 0; i < has_bits_size; i++) { + std::swap(has_bits1[i], has_bits2[i]); + } + + for (int i = 0; i < descriptor_->field_count(); i++) { + const FieldDescriptor* field = descriptor_->field(i); + if (field->is_repeated()) { + switch (field->cpp_type()) { +#define SWAP_ARRAYS(CPPTYPE, TYPE) \ + case FieldDescriptor::CPPTYPE_##CPPTYPE: \ + MutableRaw >(message1, field)->Swap( \ + MutableRaw >(message2, field)); \ + break; + + SWAP_ARRAYS(INT32 , int32 ); + SWAP_ARRAYS(INT64 , int64 ); + SWAP_ARRAYS(UINT32, uint32); + SWAP_ARRAYS(UINT64, uint64); + SWAP_ARRAYS(FLOAT , float ); + SWAP_ARRAYS(DOUBLE, double); + SWAP_ARRAYS(BOOL , bool ); + SWAP_ARRAYS(ENUM , int ); +#undef SWAP_ARRAYS + + case FieldDescriptor::CPPTYPE_STRING: + case FieldDescriptor::CPPTYPE_MESSAGE: + MutableRaw(message1, field)->Swap( + MutableRaw(message2, field)); + break; + + default: + GOOGLE_LOG(FATAL) << "Unimplemented type: " << field->cpp_type(); + } + } else { + switch (field->cpp_type()) { +#define SWAP_VALUES(CPPTYPE, TYPE) \ + case FieldDescriptor::CPPTYPE_##CPPTYPE: \ + std::swap(*MutableRaw(message1, field), \ + *MutableRaw(message2, field)); \ + break; + + SWAP_VALUES(INT32 , int32 ); + SWAP_VALUES(INT64 , int64 ); + SWAP_VALUES(UINT32, uint32); + SWAP_VALUES(UINT64, uint64); + SWAP_VALUES(FLOAT , float ); + SWAP_VALUES(DOUBLE, double); + SWAP_VALUES(BOOL , bool ); + SWAP_VALUES(ENUM , int ); + SWAP_VALUES(MESSAGE, Message*); +#undef SWAP_VALUES + + case FieldDescriptor::CPPTYPE_STRING: + switch (field->options().ctype()) { + default: // TODO(kenton): Support other string reps. + case FieldOptions::STRING: + std::swap(*MutableRaw(message1, field), + *MutableRaw(message2, field)); + break; + } + break; + + default: + GOOGLE_LOG(FATAL) << "Unimplemented type: " << field->cpp_type(); + } + } + } + + if (extensions_offset_ != -1) { + MutableExtensionSet(message1)->Swap(MutableExtensionSet(message2)); + } + + MutableUnknownFields(message1)->Swap(MutableUnknownFields(message2)); +} + +// ------------------------------------------------------------------- + +bool GeneratedMessageReflection::HasField(const Message& message, + const FieldDescriptor* field) const { + USAGE_CHECK_MESSAGE_TYPE(HasField); + USAGE_CHECK_SINGULAR(HasField); + + if (field->is_extension()) { + return GetExtensionSet(message).Has(field->number()); + } else { + return HasBit(message, field); + } +} + +int GeneratedMessageReflection::FieldSize(const Message& message, + const FieldDescriptor* field) const { + USAGE_CHECK_MESSAGE_TYPE(FieldSize); + USAGE_CHECK_REPEATED(FieldSize); + + if (field->is_extension()) { + return GetExtensionSet(message).ExtensionSize(field->number()); + } else { + switch (field->cpp_type()) { +#define HANDLE_TYPE(UPPERCASE, LOWERCASE) \ + case FieldDescriptor::CPPTYPE_##UPPERCASE : \ + return GetRaw >(message, field).size() + + HANDLE_TYPE( INT32, int32); + HANDLE_TYPE( INT64, int64); + HANDLE_TYPE(UINT32, uint32); + HANDLE_TYPE(UINT64, uint64); + HANDLE_TYPE(DOUBLE, double); + HANDLE_TYPE( FLOAT, float); + HANDLE_TYPE( BOOL, bool); + HANDLE_TYPE( ENUM, int); +#undef HANDLE_TYPE + + case FieldDescriptor::CPPTYPE_STRING: + case FieldDescriptor::CPPTYPE_MESSAGE: + return GetRaw(message, field).size(); + } + + GOOGLE_LOG(FATAL) << "Can't get here."; + return 0; + } +} + +void GeneratedMessageReflection::ClearField( + Message* message, const FieldDescriptor* field) const { + USAGE_CHECK_MESSAGE_TYPE(ClearField); + + if (field->is_extension()) { + MutableExtensionSet(message)->ClearExtension(field->number()); + } else if (!field->is_repeated()) { + if (HasBit(*message, field)) { + ClearBit(message, field); + + // We need to set the field back to its default value. + switch (field->cpp_type()) { +#define CLEAR_TYPE(CPPTYPE, TYPE) \ + case FieldDescriptor::CPPTYPE_##CPPTYPE: \ + *MutableRaw(message, field) = \ + field->default_value_##TYPE(); \ + break; + + CLEAR_TYPE(INT32 , int32 ); + CLEAR_TYPE(INT64 , int64 ); + CLEAR_TYPE(UINT32, uint32); + CLEAR_TYPE(UINT64, uint64); + CLEAR_TYPE(FLOAT , float ); + CLEAR_TYPE(DOUBLE, double); + CLEAR_TYPE(BOOL , bool ); +#undef CLEAR_TYPE + + case FieldDescriptor::CPPTYPE_ENUM: + *MutableRaw(message, field) = + field->default_value_enum()->number(); + break; + + case FieldDescriptor::CPPTYPE_STRING: { + switch (field->options().ctype()) { + default: // TODO(kenton): Support other string reps. + case FieldOptions::STRING: + const string* default_ptr = DefaultRaw(field); + string** value = MutableRaw(message, field); + if (*value != default_ptr) { + if (field->has_default_value()) { + (*value)->assign(field->default_value_string()); + } else { + (*value)->clear(); + } + } + break; + } + break; + } + + case FieldDescriptor::CPPTYPE_MESSAGE: + (*MutableRaw(message, field))->Clear(); + break; + } + } + } else { + switch (field->cpp_type()) { +#define HANDLE_TYPE(UPPERCASE, LOWERCASE) \ + case FieldDescriptor::CPPTYPE_##UPPERCASE : \ + MutableRaw >(message, field)->Clear(); \ + break + + HANDLE_TYPE( INT32, int32); + HANDLE_TYPE( INT64, int64); + HANDLE_TYPE(UINT32, uint32); + HANDLE_TYPE(UINT64, uint64); + HANDLE_TYPE(DOUBLE, double); + HANDLE_TYPE( FLOAT, float); + HANDLE_TYPE( BOOL, bool); + HANDLE_TYPE( ENUM, int); +#undef HANDLE_TYPE + + case FieldDescriptor::CPPTYPE_STRING: { + switch (field->options().ctype()) { + default: // TODO(kenton): Support other string reps. + case FieldOptions::STRING: + MutableRaw >(message, field)->Clear(); + break; + } + break; + } + + case FieldDescriptor::CPPTYPE_MESSAGE: { + // We don't know which subclass of RepeatedPtrFieldBase the type is, + // so we use RepeatedPtrFieldBase directly. + MutableRaw(message, field) + ->Clear >(); + break; + } + } + } +} + +void GeneratedMessageReflection::RemoveLast( + Message* message, + const FieldDescriptor* field) const { + USAGE_CHECK_MESSAGE_TYPE(RemoveLast); + USAGE_CHECK_REPEATED(RemoveLast); + + if (field->is_extension()) { + MutableExtensionSet(message)->RemoveLast(field->number()); + } else { + switch (field->cpp_type()) { +#define HANDLE_TYPE(UPPERCASE, LOWERCASE) \ + case FieldDescriptor::CPPTYPE_##UPPERCASE : \ + MutableRaw >(message, field)->RemoveLast(); \ + break + + HANDLE_TYPE( INT32, int32); + HANDLE_TYPE( INT64, int64); + HANDLE_TYPE(UINT32, uint32); + HANDLE_TYPE(UINT64, uint64); + HANDLE_TYPE(DOUBLE, double); + HANDLE_TYPE( FLOAT, float); + HANDLE_TYPE( BOOL, bool); + HANDLE_TYPE( ENUM, int); +#undef HANDLE_TYPE + + case FieldDescriptor::CPPTYPE_STRING: + switch (field->options().ctype()) { + default: // TODO(kenton): Support other string reps. + case FieldOptions::STRING: + MutableRaw >(message, field)->RemoveLast(); + break; + } + break; + + case FieldDescriptor::CPPTYPE_MESSAGE: + MutableRaw(message, field) + ->RemoveLast >(); + break; + } + } +} + +void GeneratedMessageReflection::SwapElements( + Message* message, + const FieldDescriptor* field, + int index1, + int index2) const { + USAGE_CHECK_MESSAGE_TYPE(Swap); + USAGE_CHECK_REPEATED(Swap); + + if (field->is_extension()) { + MutableExtensionSet(message)->SwapElements(field->number(), index1, index2); + } else { + switch (field->cpp_type()) { +#define HANDLE_TYPE(UPPERCASE, LOWERCASE) \ + case FieldDescriptor::CPPTYPE_##UPPERCASE : \ + MutableRaw >(message, field) \ + ->SwapElements(index1, index2); \ + break + + HANDLE_TYPE( INT32, int32); + HANDLE_TYPE( INT64, int64); + HANDLE_TYPE(UINT32, uint32); + HANDLE_TYPE(UINT64, uint64); + HANDLE_TYPE(DOUBLE, double); + HANDLE_TYPE( FLOAT, float); + HANDLE_TYPE( BOOL, bool); + HANDLE_TYPE( ENUM, int); +#undef HANDLE_TYPE + + case FieldDescriptor::CPPTYPE_STRING: + case FieldDescriptor::CPPTYPE_MESSAGE: + MutableRaw(message, field) + ->SwapElements(index1, index2); + break; + } + } +} + +namespace { +// Comparison functor for sorting FieldDescriptors by field number. +struct FieldNumberSorter { + bool operator()(const FieldDescriptor* left, + const FieldDescriptor* right) const { + return left->number() < right->number(); + } +}; +} // namespace + +void GeneratedMessageReflection::ListFields( + const Message& message, + vector* output) const { + output->clear(); + + // Optimization: The default instance never has any fields set. + if (&message == default_instance_) return; + + for (int i = 0; i < descriptor_->field_count(); i++) { + const FieldDescriptor* field = descriptor_->field(i); + if (field->is_repeated()) { + if (FieldSize(message, field) > 0) { + output->push_back(field); + } + } else { + if (HasBit(message, field)) { + output->push_back(field); + } + } + } + + if (extensions_offset_ != -1) { + GetExtensionSet(message).AppendToList(descriptor_, descriptor_pool_, + output); + } + + // ListFields() must sort output by field number. + sort(output->begin(), output->end(), FieldNumberSorter()); +} + +// ------------------------------------------------------------------- + +#undef DEFINE_PRIMITIVE_ACCESSORS +#define DEFINE_PRIMITIVE_ACCESSORS(TYPENAME, TYPE, PASSTYPE, CPPTYPE) \ + PASSTYPE GeneratedMessageReflection::Get##TYPENAME( \ + const Message& message, const FieldDescriptor* field) const { \ + USAGE_CHECK_ALL(Get##TYPENAME, SINGULAR, CPPTYPE); \ + if (field->is_extension()) { \ + return GetExtensionSet(message).Get##TYPENAME( \ + field->number(), field->default_value_##PASSTYPE()); \ + } else { \ + return GetField(message, field); \ + } \ + } \ + \ + void GeneratedMessageReflection::Set##TYPENAME( \ + Message* message, const FieldDescriptor* field, \ + PASSTYPE value) const { \ + USAGE_CHECK_ALL(Set##TYPENAME, SINGULAR, CPPTYPE); \ + if (field->is_extension()) { \ + return MutableExtensionSet(message)->Set##TYPENAME( \ + field->number(), field->type(), value, field); \ + } else { \ + SetField(message, field, value); \ + } \ + } \ + \ + PASSTYPE GeneratedMessageReflection::GetRepeated##TYPENAME( \ + const Message& message, \ + const FieldDescriptor* field, int index) const { \ + USAGE_CHECK_ALL(GetRepeated##TYPENAME, REPEATED, CPPTYPE); \ + if (field->is_extension()) { \ + return GetExtensionSet(message).GetRepeated##TYPENAME( \ + field->number(), index); \ + } else { \ + return GetRepeatedField(message, field, index); \ + } \ + } \ + \ + void GeneratedMessageReflection::SetRepeated##TYPENAME( \ + Message* message, const FieldDescriptor* field, \ + int index, PASSTYPE value) const { \ + USAGE_CHECK_ALL(SetRepeated##TYPENAME, REPEATED, CPPTYPE); \ + if (field->is_extension()) { \ + MutableExtensionSet(message)->SetRepeated##TYPENAME( \ + field->number(), index, value); \ + } else { \ + SetRepeatedField(message, field, index, value); \ + } \ + } \ + \ + void GeneratedMessageReflection::Add##TYPENAME( \ + Message* message, const FieldDescriptor* field, \ + PASSTYPE value) const { \ + USAGE_CHECK_ALL(Add##TYPENAME, REPEATED, CPPTYPE); \ + if (field->is_extension()) { \ + MutableExtensionSet(message)->Add##TYPENAME( \ + field->number(), field->type(), field->options().packed(), value, \ + field); \ + } else { \ + AddField(message, field, value); \ + } \ + } + +DEFINE_PRIMITIVE_ACCESSORS(Int32 , int32 , int32 , INT32 ) +DEFINE_PRIMITIVE_ACCESSORS(Int64 , int64 , int64 , INT64 ) +DEFINE_PRIMITIVE_ACCESSORS(UInt32, uint32, uint32, UINT32) +DEFINE_PRIMITIVE_ACCESSORS(UInt64, uint64, uint64, UINT64) +DEFINE_PRIMITIVE_ACCESSORS(Float , float , float , FLOAT ) +DEFINE_PRIMITIVE_ACCESSORS(Double, double, double, DOUBLE) +DEFINE_PRIMITIVE_ACCESSORS(Bool , bool , bool , BOOL ) +#undef DEFINE_PRIMITIVE_ACCESSORS + +// ------------------------------------------------------------------- + +string GeneratedMessageReflection::GetString( + const Message& message, const FieldDescriptor* field) const { + USAGE_CHECK_ALL(GetString, SINGULAR, STRING); + if (field->is_extension()) { + return GetExtensionSet(message).GetString(field->number(), + field->default_value_string()); + } else { + switch (field->options().ctype()) { + default: // TODO(kenton): Support other string reps. + case FieldOptions::STRING: + return *GetField(message, field); + } + + GOOGLE_LOG(FATAL) << "Can't get here."; + return kEmptyString; // Make compiler happy. + } +} + +const string& GeneratedMessageReflection::GetStringReference( + const Message& message, + const FieldDescriptor* field, string* scratch) const { + USAGE_CHECK_ALL(GetStringReference, SINGULAR, STRING); + if (field->is_extension()) { + return GetExtensionSet(message).GetString(field->number(), + field->default_value_string()); + } else { + switch (field->options().ctype()) { + default: // TODO(kenton): Support other string reps. + case FieldOptions::STRING: + return *GetField(message, field); + } + + GOOGLE_LOG(FATAL) << "Can't get here."; + return kEmptyString; // Make compiler happy. + } +} + + +void GeneratedMessageReflection::SetString( + Message* message, const FieldDescriptor* field, + const string& value) const { + USAGE_CHECK_ALL(SetString, SINGULAR, STRING); + if (field->is_extension()) { + return MutableExtensionSet(message)->SetString(field->number(), + field->type(), value, field); + } else { + switch (field->options().ctype()) { + default: // TODO(kenton): Support other string reps. + case FieldOptions::STRING: { + string** ptr = MutableField(message, field); + if (*ptr == DefaultRaw(field)) { + *ptr = new string(value); + } else { + (*ptr)->assign(value); + } + break; + } + } + } +} + + +string GeneratedMessageReflection::GetRepeatedString( + const Message& message, const FieldDescriptor* field, int index) const { + USAGE_CHECK_ALL(GetRepeatedString, REPEATED, STRING); + if (field->is_extension()) { + return GetExtensionSet(message).GetRepeatedString(field->number(), index); + } else { + switch (field->options().ctype()) { + default: // TODO(kenton): Support other string reps. + case FieldOptions::STRING: + return GetRepeatedPtrField(message, field, index); + } + + GOOGLE_LOG(FATAL) << "Can't get here."; + return kEmptyString; // Make compiler happy. + } +} + +const string& GeneratedMessageReflection::GetRepeatedStringReference( + const Message& message, const FieldDescriptor* field, + int index, string* scratch) const { + USAGE_CHECK_ALL(GetRepeatedStringReference, REPEATED, STRING); + if (field->is_extension()) { + return GetExtensionSet(message).GetRepeatedString(field->number(), index); + } else { + switch (field->options().ctype()) { + default: // TODO(kenton): Support other string reps. + case FieldOptions::STRING: + return GetRepeatedPtrField(message, field, index); + } + + GOOGLE_LOG(FATAL) << "Can't get here."; + return kEmptyString; // Make compiler happy. + } +} + + +void GeneratedMessageReflection::SetRepeatedString( + Message* message, const FieldDescriptor* field, + int index, const string& value) const { + USAGE_CHECK_ALL(SetRepeatedString, REPEATED, STRING); + if (field->is_extension()) { + MutableExtensionSet(message)->SetRepeatedString( + field->number(), index, value); + } else { + switch (field->options().ctype()) { + default: // TODO(kenton): Support other string reps. + case FieldOptions::STRING: + *MutableRepeatedField(message, field, index) = value; + break; + } + } +} + + +void GeneratedMessageReflection::AddString( + Message* message, const FieldDescriptor* field, + const string& value) const { + USAGE_CHECK_ALL(AddString, REPEATED, STRING); + if (field->is_extension()) { + MutableExtensionSet(message)->AddString(field->number(), + field->type(), value, field); + } else { + switch (field->options().ctype()) { + default: // TODO(kenton): Support other string reps. + case FieldOptions::STRING: + *AddField(message, field) = value; + break; + } + } +} + + +// ------------------------------------------------------------------- + +const EnumValueDescriptor* GeneratedMessageReflection::GetEnum( + const Message& message, const FieldDescriptor* field) const { + USAGE_CHECK_ALL(GetEnum, SINGULAR, ENUM); + + int value; + if (field->is_extension()) { + value = GetExtensionSet(message).GetEnum( + field->number(), field->default_value_enum()->number()); + } else { + value = GetField(message, field); + } + const EnumValueDescriptor* result = + field->enum_type()->FindValueByNumber(value); + GOOGLE_CHECK(result != NULL); + return result; +} + +void GeneratedMessageReflection::SetEnum( + Message* message, const FieldDescriptor* field, + const EnumValueDescriptor* value) const { + USAGE_CHECK_ALL(SetEnum, SINGULAR, ENUM); + USAGE_CHECK_ENUM_VALUE(SetEnum); + + if (field->is_extension()) { + MutableExtensionSet(message)->SetEnum(field->number(), field->type(), + value->number(), field); + } else { + SetField(message, field, value->number()); + } +} + +const EnumValueDescriptor* GeneratedMessageReflection::GetRepeatedEnum( + const Message& message, const FieldDescriptor* field, int index) const { + USAGE_CHECK_ALL(GetRepeatedEnum, REPEATED, ENUM); + + int value; + if (field->is_extension()) { + value = GetExtensionSet(message).GetRepeatedEnum(field->number(), index); + } else { + value = GetRepeatedField(message, field, index); + } + const EnumValueDescriptor* result = + field->enum_type()->FindValueByNumber(value); + GOOGLE_CHECK(result != NULL); + return result; +} + +void GeneratedMessageReflection::SetRepeatedEnum( + Message* message, + const FieldDescriptor* field, int index, + const EnumValueDescriptor* value) const { + USAGE_CHECK_ALL(SetRepeatedEnum, REPEATED, ENUM); + USAGE_CHECK_ENUM_VALUE(SetRepeatedEnum); + + if (field->is_extension()) { + MutableExtensionSet(message)->SetRepeatedEnum( + field->number(), index, value->number()); + } else { + SetRepeatedField(message, field, index, value->number()); + } +} + +void GeneratedMessageReflection::AddEnum( + Message* message, const FieldDescriptor* field, + const EnumValueDescriptor* value) const { + USAGE_CHECK_ALL(AddEnum, REPEATED, ENUM); + USAGE_CHECK_ENUM_VALUE(AddEnum); + + if (field->is_extension()) { + MutableExtensionSet(message)->AddEnum(field->number(), field->type(), + field->options().packed(), + value->number(), field); + } else { + AddField(message, field, value->number()); + } +} + +// ------------------------------------------------------------------- + +const Message& GeneratedMessageReflection::GetMessage( + const Message& message, const FieldDescriptor* field, + MessageFactory* factory) const { + USAGE_CHECK_ALL(GetMessage, SINGULAR, MESSAGE); + + if (field->is_extension()) { + return static_cast( + GetExtensionSet(message).GetMessage( + field->number(), field->message_type(), + factory == NULL ? message_factory_ : factory)); + } else { + const Message* result = GetRaw(message, field); + if (result == NULL) { + result = DefaultRaw(field); + } + return *result; + } +} + +Message* GeneratedMessageReflection::MutableMessage( + Message* message, const FieldDescriptor* field, + MessageFactory* factory) const { + USAGE_CHECK_ALL(MutableMessage, SINGULAR, MESSAGE); + + if (field->is_extension()) { + return static_cast( + MutableExtensionSet(message)->MutableMessage(field, + factory == NULL ? message_factory_ : factory)); + } else { + Message** result = MutableField(message, field); + if (*result == NULL) { + const Message* default_message = DefaultRaw(field); + *result = default_message->New(); + } + return *result; + } +} + +const Message& GeneratedMessageReflection::GetRepeatedMessage( + const Message& message, const FieldDescriptor* field, int index) const { + USAGE_CHECK_ALL(GetRepeatedMessage, REPEATED, MESSAGE); + + if (field->is_extension()) { + return static_cast( + GetExtensionSet(message).GetRepeatedMessage(field->number(), index)); + } else { + return GetRaw(message, field) + .Get >(index); + } +} + +Message* GeneratedMessageReflection::MutableRepeatedMessage( + Message* message, const FieldDescriptor* field, int index) const { + USAGE_CHECK_ALL(MutableRepeatedMessage, REPEATED, MESSAGE); + + if (field->is_extension()) { + return static_cast( + MutableExtensionSet(message)->MutableRepeatedMessage( + field->number(), index)); + } else { + return MutableRaw(message, field) + ->Mutable >(index); + } +} + +Message* GeneratedMessageReflection::AddMessage( + Message* message, const FieldDescriptor* field, + MessageFactory* factory) const { + USAGE_CHECK_ALL(AddMessage, REPEATED, MESSAGE); + + if (factory == NULL) factory = message_factory_; + + if (field->is_extension()) { + return static_cast( + MutableExtensionSet(message)->AddMessage(field, factory)); + } else { + // We can't use AddField() because RepeatedPtrFieldBase doesn't + // know how to allocate one. + RepeatedPtrFieldBase* repeated = + MutableRaw(message, field); + Message* result = repeated->AddFromCleared >(); + if (result == NULL) { + // We must allocate a new object. + const Message* prototype; + if (repeated->size() == 0) { + prototype = factory->GetPrototype(field->message_type()); + } else { + prototype = &repeated->Get >(0); + } + result = prototype->New(); + repeated->AddAllocated >(result); + } + return result; + } +} + +// ------------------------------------------------------------------- + +const FieldDescriptor* GeneratedMessageReflection::FindKnownExtensionByName( + const string& name) const { + if (extensions_offset_ == -1) return NULL; + + const FieldDescriptor* result = descriptor_pool_->FindExtensionByName(name); + if (result != NULL && result->containing_type() == descriptor_) { + return result; + } + + if (descriptor_->options().message_set_wire_format()) { + // MessageSet extensions may be identified by type name. + const Descriptor* type = descriptor_pool_->FindMessageTypeByName(name); + if (type != NULL) { + // Look for a matching extension in the foreign type's scope. + for (int i = 0; i < type->extension_count(); i++) { + const FieldDescriptor* extension = type->extension(i); + if (extension->containing_type() == descriptor_ && + extension->type() == FieldDescriptor::TYPE_MESSAGE && + extension->is_optional() && + extension->message_type() == type) { + // Found it. + return extension; + } + } + } + } + + return NULL; +} + +const FieldDescriptor* GeneratedMessageReflection::FindKnownExtensionByNumber( + int number) const { + if (extensions_offset_ == -1) return NULL; + return descriptor_pool_->FindExtensionByNumber(descriptor_, number); +} + +// =================================================================== +// Some private helpers. + +// These simple template accessors obtain pointers (or references) to +// the given field. +template +inline const Type& GeneratedMessageReflection::GetRaw( + const Message& message, const FieldDescriptor* field) const { + const void* ptr = reinterpret_cast(&message) + + offsets_[field->index()]; + return *reinterpret_cast(ptr); +} + +template +inline Type* GeneratedMessageReflection::MutableRaw( + Message* message, const FieldDescriptor* field) const { + void* ptr = reinterpret_cast(message) + offsets_[field->index()]; + return reinterpret_cast(ptr); +} + +template +inline const Type& GeneratedMessageReflection::DefaultRaw( + const FieldDescriptor* field) const { + const void* ptr = reinterpret_cast(default_instance_) + + offsets_[field->index()]; + return *reinterpret_cast(ptr); +} + +inline const uint32* GeneratedMessageReflection::GetHasBits( + const Message& message) const { + const void* ptr = reinterpret_cast(&message) + has_bits_offset_; + return reinterpret_cast(ptr); +} +inline uint32* GeneratedMessageReflection::MutableHasBits( + Message* message) const { + void* ptr = reinterpret_cast(message) + has_bits_offset_; + return reinterpret_cast(ptr); +} + +inline const ExtensionSet& GeneratedMessageReflection::GetExtensionSet( + const Message& message) const { + GOOGLE_DCHECK_NE(extensions_offset_, -1); + const void* ptr = reinterpret_cast(&message) + + extensions_offset_; + return *reinterpret_cast(ptr); +} +inline ExtensionSet* GeneratedMessageReflection::MutableExtensionSet( + Message* message) const { + GOOGLE_DCHECK_NE(extensions_offset_, -1); + void* ptr = reinterpret_cast(message) + extensions_offset_; + return reinterpret_cast(ptr); +} + +// Simple accessors for manipulating has_bits_. +inline bool GeneratedMessageReflection::HasBit( + const Message& message, const FieldDescriptor* field) const { + return GetHasBits(message)[field->index() / 32] & + (1 << (field->index() % 32)); +} + +inline void GeneratedMessageReflection::SetBit( + Message* message, const FieldDescriptor* field) const { + MutableHasBits(message)[field->index() / 32] |= (1 << (field->index() % 32)); +} + +inline void GeneratedMessageReflection::ClearBit( + Message* message, const FieldDescriptor* field) const { + MutableHasBits(message)[field->index() / 32] &= ~(1 << (field->index() % 32)); +} + +// Template implementations of basic accessors. Inline because each +// template instance is only called from one location. These are +// used for all types except messages. +template +inline const Type& GeneratedMessageReflection::GetField( + const Message& message, const FieldDescriptor* field) const { + return GetRaw(message, field); +} + +template +inline void GeneratedMessageReflection::SetField( + Message* message, const FieldDescriptor* field, const Type& value) const { + *MutableRaw(message, field) = value; + SetBit(message, field); +} + +template +inline Type* GeneratedMessageReflection::MutableField( + Message* message, const FieldDescriptor* field) const { + SetBit(message, field); + return MutableRaw(message, field); +} + +template +inline const Type& GeneratedMessageReflection::GetRepeatedField( + const Message& message, const FieldDescriptor* field, int index) const { + return GetRaw >(message, field).Get(index); +} + +template +inline const Type& GeneratedMessageReflection::GetRepeatedPtrField( + const Message& message, const FieldDescriptor* field, int index) const { + return GetRaw >(message, field).Get(index); +} + +template +inline void GeneratedMessageReflection::SetRepeatedField( + Message* message, const FieldDescriptor* field, + int index, Type value) const { + MutableRaw >(message, field)->Set(index, value); +} + +template +inline Type* GeneratedMessageReflection::MutableRepeatedField( + Message* message, const FieldDescriptor* field, int index) const { + RepeatedPtrField* repeated = + MutableRaw >(message, field); + return repeated->Mutable(index); +} + +template +inline void GeneratedMessageReflection::AddField( + Message* message, const FieldDescriptor* field, const Type& value) const { + MutableRaw >(message, field)->Add(value); +} + +template +inline Type* GeneratedMessageReflection::AddField( + Message* message, const FieldDescriptor* field) const { + RepeatedPtrField* repeated = + MutableRaw >(message, field); + return repeated->Add(); +} + +} // namespace internal +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/generated_message_reflection.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/generated_message_reflection.h new file mode 100644 index 0000000000..b545fa1a2a --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/generated_message_reflection.h @@ -0,0 +1,424 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// This header is logically internal, but is made public because it is used +// from protocol-compiler-generated code, which may reside in other components. + +#ifndef GOOGLE_PROTOBUF_GENERATED_MESSAGE_REFLECTION_H__ +#define GOOGLE_PROTOBUF_GENERATED_MESSAGE_REFLECTION_H__ + +#include +#include +#include +#include + + +namespace google { +namespace protobuf { + class DescriptorPool; + // Generated code needs these to have been forward-declared. Easier to do it + // here than to print them inside every .pb.h file. + class FileDescriptor; + class EnumDescriptor; +} + +namespace protobuf { +namespace internal { + +// Defined in this file. +class GeneratedMessageReflection; + +// Defined in other files. +class ExtensionSet; // extension_set.h + +// THIS CLASS IS NOT INTENDED FOR DIRECT USE. It is intended for use +// by generated code. This class is just a big hack that reduces code +// size. +// +// A GeneratedMessageReflection is an implementation of Reflection +// which expects all fields to be backed by simple variables located in +// memory. The locations are given using a base pointer and a set of +// offsets. +// +// It is required that the user represents fields of each type in a standard +// way, so that GeneratedMessageReflection can cast the void* pointer to +// the appropriate type. For primitive fields and string fields, each field +// should be represented using the obvious C++ primitive type. Enums and +// Messages are different: +// - Singular Message fields are stored as a pointer to a Message. These +// should start out NULL, except for in the default instance where they +// should start out pointing to other default instances. +// - Enum fields are stored as an int. This int must always contain +// a valid value, such that EnumDescriptor::FindValueByNumber() would +// not return NULL. +// - Repeated fields are stored as RepeatedFields or RepeatedPtrFields +// of whatever type the individual field would be. Strings and +// Messages use RepeatedPtrFields while everything else uses +// RepeatedFields. +class LIBPROTOBUF_EXPORT GeneratedMessageReflection : public Reflection { + public: + // Constructs a GeneratedMessageReflection. + // Parameters: + // descriptor: The descriptor for the message type being implemented. + // default_instance: The default instance of the message. This is only + // used to obtain pointers to default instances of embedded + // messages, which GetMessage() will return if the particular + // sub-message has not been initialized yet. (Thus, all + // embedded message fields *must* have non-NULL pointers + // in the default instance.) + // offsets: An array of ints giving the byte offsets, relative to + // the start of the message object, of each field. These can + // be computed at compile time using the + // GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET() macro, defined + // below. + // has_bits_offset: Offset in the message of an array of uint32s of size + // descriptor->field_count()/32, rounded up. This is a + // bitfield where each bit indicates whether or not the + // corresponding field of the message has been initialized. + // The bit for field index i is obtained by the expression: + // has_bits[i / 32] & (1 << (i % 32)) + // unknown_fields_offset: Offset in the message of the UnknownFieldSet for + // the message. + // extensions_offset: Offset in the message of the ExtensionSet for the + // message, or -1 if the message type has no extension + // ranges. + // pool: DescriptorPool to search for extension definitions. Only + // used by FindKnownExtensionByName() and + // FindKnownExtensionByNumber(). + // factory: MessageFactory to use to construct extension messages. + // object_size: The size of a message object of this type, as measured + // by sizeof(). + GeneratedMessageReflection(const Descriptor* descriptor, + const Message* default_instance, + const int offsets[], + int has_bits_offset, + int unknown_fields_offset, + int extensions_offset, + const DescriptorPool* pool, + MessageFactory* factory, + int object_size); + ~GeneratedMessageReflection(); + + // implements Reflection ------------------------------------------- + + const UnknownFieldSet& GetUnknownFields(const Message& message) const; + UnknownFieldSet* MutableUnknownFields(Message* message) const; + + int SpaceUsed(const Message& message) const; + + bool HasField(const Message& message, const FieldDescriptor* field) const; + int FieldSize(const Message& message, const FieldDescriptor* field) const; + void ClearField(Message* message, const FieldDescriptor* field) const; + void RemoveLast(Message* message, const FieldDescriptor* field) const; + void Swap(Message* message1, Message* message2) const; + void SwapElements(Message* message, const FieldDescriptor* field, + int index1, int index2) const; + void ListFields(const Message& message, + vector* output) const; + + int32 GetInt32 (const Message& message, + const FieldDescriptor* field) const; + int64 GetInt64 (const Message& message, + const FieldDescriptor* field) const; + uint32 GetUInt32(const Message& message, + const FieldDescriptor* field) const; + uint64 GetUInt64(const Message& message, + const FieldDescriptor* field) const; + float GetFloat (const Message& message, + const FieldDescriptor* field) const; + double GetDouble(const Message& message, + const FieldDescriptor* field) const; + bool GetBool (const Message& message, + const FieldDescriptor* field) const; + string GetString(const Message& message, + const FieldDescriptor* field) const; + const string& GetStringReference(const Message& message, + const FieldDescriptor* field, + string* scratch) const; + const EnumValueDescriptor* GetEnum(const Message& message, + const FieldDescriptor* field) const; + const Message& GetMessage(const Message& message, + const FieldDescriptor* field, + MessageFactory* factory = NULL) const; + + void SetInt32 (Message* message, + const FieldDescriptor* field, int32 value) const; + void SetInt64 (Message* message, + const FieldDescriptor* field, int64 value) const; + void SetUInt32(Message* message, + const FieldDescriptor* field, uint32 value) const; + void SetUInt64(Message* message, + const FieldDescriptor* field, uint64 value) const; + void SetFloat (Message* message, + const FieldDescriptor* field, float value) const; + void SetDouble(Message* message, + const FieldDescriptor* field, double value) const; + void SetBool (Message* message, + const FieldDescriptor* field, bool value) const; + void SetString(Message* message, + const FieldDescriptor* field, + const string& value) const; + void SetEnum (Message* message, const FieldDescriptor* field, + const EnumValueDescriptor* value) const; + Message* MutableMessage(Message* message, const FieldDescriptor* field, + MessageFactory* factory = NULL) const; + + int32 GetRepeatedInt32 (const Message& message, + const FieldDescriptor* field, int index) const; + int64 GetRepeatedInt64 (const Message& message, + const FieldDescriptor* field, int index) const; + uint32 GetRepeatedUInt32(const Message& message, + const FieldDescriptor* field, int index) const; + uint64 GetRepeatedUInt64(const Message& message, + const FieldDescriptor* field, int index) const; + float GetRepeatedFloat (const Message& message, + const FieldDescriptor* field, int index) const; + double GetRepeatedDouble(const Message& message, + const FieldDescriptor* field, int index) const; + bool GetRepeatedBool (const Message& message, + const FieldDescriptor* field, int index) const; + string GetRepeatedString(const Message& message, + const FieldDescriptor* field, int index) const; + const string& GetRepeatedStringReference(const Message& message, + const FieldDescriptor* field, + int index, string* scratch) const; + const EnumValueDescriptor* GetRepeatedEnum(const Message& message, + const FieldDescriptor* field, + int index) const; + const Message& GetRepeatedMessage(const Message& message, + const FieldDescriptor* field, + int index) const; + + // Set the value of a field. + void SetRepeatedInt32 (Message* message, + const FieldDescriptor* field, int index, int32 value) const; + void SetRepeatedInt64 (Message* message, + const FieldDescriptor* field, int index, int64 value) const; + void SetRepeatedUInt32(Message* message, + const FieldDescriptor* field, int index, uint32 value) const; + void SetRepeatedUInt64(Message* message, + const FieldDescriptor* field, int index, uint64 value) const; + void SetRepeatedFloat (Message* message, + const FieldDescriptor* field, int index, float value) const; + void SetRepeatedDouble(Message* message, + const FieldDescriptor* field, int index, double value) const; + void SetRepeatedBool (Message* message, + const FieldDescriptor* field, int index, bool value) const; + void SetRepeatedString(Message* message, + const FieldDescriptor* field, int index, + const string& value) const; + void SetRepeatedEnum(Message* message, const FieldDescriptor* field, + int index, const EnumValueDescriptor* value) const; + // Get a mutable pointer to a field with a message type. + Message* MutableRepeatedMessage(Message* message, + const FieldDescriptor* field, + int index) const; + + void AddInt32 (Message* message, + const FieldDescriptor* field, int32 value) const; + void AddInt64 (Message* message, + const FieldDescriptor* field, int64 value) const; + void AddUInt32(Message* message, + const FieldDescriptor* field, uint32 value) const; + void AddUInt64(Message* message, + const FieldDescriptor* field, uint64 value) const; + void AddFloat (Message* message, + const FieldDescriptor* field, float value) const; + void AddDouble(Message* message, + const FieldDescriptor* field, double value) const; + void AddBool (Message* message, + const FieldDescriptor* field, bool value) const; + void AddString(Message* message, + const FieldDescriptor* field, const string& value) const; + void AddEnum(Message* message, + const FieldDescriptor* field, + const EnumValueDescriptor* value) const; + Message* AddMessage(Message* message, const FieldDescriptor* field, + MessageFactory* factory = NULL) const; + + const FieldDescriptor* FindKnownExtensionByName(const string& name) const; + const FieldDescriptor* FindKnownExtensionByNumber(int number) const; + + private: + friend class GeneratedMessage; + + const Descriptor* descriptor_; + const Message* default_instance_; + const int* offsets_; + + int has_bits_offset_; + int unknown_fields_offset_; + int extensions_offset_; + int object_size_; + + const DescriptorPool* descriptor_pool_; + MessageFactory* message_factory_; + + template + inline const Type& GetRaw(const Message& message, + const FieldDescriptor* field) const; + template + inline Type* MutableRaw(Message* message, + const FieldDescriptor* field) const; + template + inline const Type& DefaultRaw(const FieldDescriptor* field) const; + inline const Message* GetMessagePrototype(const FieldDescriptor* field) const; + + inline const uint32* GetHasBits(const Message& message) const; + inline uint32* MutableHasBits(Message* message) const; + inline const ExtensionSet& GetExtensionSet(const Message& message) const; + inline ExtensionSet* MutableExtensionSet(Message* message) const; + + inline bool HasBit(const Message& message, + const FieldDescriptor* field) const; + inline void SetBit(Message* message, + const FieldDescriptor* field) const; + inline void ClearBit(Message* message, + const FieldDescriptor* field) const; + + template + inline const Type& GetField(const Message& message, + const FieldDescriptor* field) const; + template + inline void SetField(Message* message, + const FieldDescriptor* field, const Type& value) const; + template + inline Type* MutableField(Message* message, + const FieldDescriptor* field) const; + template + inline const Type& GetRepeatedField(const Message& message, + const FieldDescriptor* field, + int index) const; + template + inline const Type& GetRepeatedPtrField(const Message& message, + const FieldDescriptor* field, + int index) const; + template + inline void SetRepeatedField(Message* message, + const FieldDescriptor* field, int index, + Type value) const; + template + inline Type* MutableRepeatedField(Message* message, + const FieldDescriptor* field, + int index) const; + template + inline void AddField(Message* message, + const FieldDescriptor* field, const Type& value) const; + template + inline Type* AddField(Message* message, + const FieldDescriptor* field) const; + + int GetExtensionNumberOrDie(const Descriptor* type) const; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(GeneratedMessageReflection); +}; + +// Returns the offset of the given field within the given aggregate type. +// This is equivalent to the ANSI C offsetof() macro. However, according +// to the C++ standard, offsetof() only works on POD types, and GCC +// enforces this requirement with a warning. In practice, this rule is +// unnecessarily strict; there is probably no compiler or platform on +// which the offsets of the direct fields of a class are non-constant. +// Fields inherited from superclasses *can* have non-constant offsets, +// but that's not what this macro will be used for. +// +// Note that we calculate relative to the pointer value 16 here since if we +// just use zero, GCC complains about dereferencing a NULL pointer. We +// choose 16 rather than some other number just in case the compiler would +// be confused by an unaligned pointer. +#define GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(TYPE, FIELD) \ + static_cast( \ + reinterpret_cast( \ + &reinterpret_cast(16)->FIELD) - \ + reinterpret_cast(16)) + +// There are some places in proto2 where dynamic_cast would be useful as an +// optimization. For example, take Message::MergeFrom(const Message& other). +// For a given generated message FooMessage, we generate these two methods: +// void MergeFrom(const FooMessage& other); +// void MergeFrom(const Message& other); +// The former method can be implemented directly in terms of FooMessage's +// inline accessors, but the latter method must work with the reflection +// interface. However, if the parameter to the latter method is actually of +// type FooMessage, then we'd like to be able to just call the other method +// as an optimization. So, we use dynamic_cast to check this. +// +// That said, dynamic_cast requires RTTI, which many people like to disable +// for performance and code size reasons. When RTTI is not available, we +// still need to produce correct results. So, in this case we have to fall +// back to using reflection, which is what we would have done anyway if the +// objects were not of the exact same class. +// +// dynamic_cast_if_available() implements this logic. If RTTI is +// enabled, it does a dynamic_cast. If RTTI is disabled, it just returns +// NULL. +// +// If you need to compile without RTTI, simply #define GOOGLE_PROTOBUF_NO_RTTI. +// On MSVC, this should be detected automatically. +template +inline To dynamic_cast_if_available(From from) { +#if defined(GOOGLE_PROTOBUF_NO_RTTI) || (defined(_MSC_VER)&&!defined(_CPPRTTI)) + return NULL; +#else + return dynamic_cast(from); +#endif +} + +// Helper for EnumType_Parse functions: try to parse the string 'name' as an +// enum name of the given type, returning true and filling in value on success, +// or returning false and leaving value unchanged on failure. +LIBPROTOBUF_EXPORT bool ParseNamedEnum(const EnumDescriptor* descriptor, + const string& name, + int* value); + +template +bool ParseNamedEnum(const EnumDescriptor* descriptor, + const string& name, + EnumType* value) { + int tmp; + if (!ParseNamedEnum(descriptor, name, &tmp)) return false; + *value = static_cast(tmp); + return true; +} + +// Just a wrapper around printing the name of a value. The main point of this +// function is not to be inlined, so that you can do this without including +// descriptor.h. +LIBPROTOBUF_EXPORT const string& NameOfEnum(const EnumDescriptor* descriptor, int value); + +} // namespace internal +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_GENERATED_MESSAGE_REFLECTION_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/generated_message_reflection_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/generated_message_reflection_unittest.cc new file mode 100644 index 0000000000..a03bcdb7d0 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/generated_message_reflection_unittest.cc @@ -0,0 +1,384 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// To test GeneratedMessageReflection, we actually let the protocol compiler +// generate a full protocol message implementation and then test its +// reflection interface. This is much easier and more maintainable than +// trying to create our own Message class for GeneratedMessageReflection +// to wrap. +// +// The tests here closely mirror some of the tests in +// compiler/cpp/unittest, except using the reflection interface +// rather than generated accessors. + +#include +#include +#include +#include + +#include +#include +#include + +namespace google { +namespace protobuf { + +namespace { + +// Shorthand to get a FieldDescriptor for a field of unittest::TestAllTypes. +const FieldDescriptor* F(const string& name) { + const FieldDescriptor* result = + unittest::TestAllTypes::descriptor()->FindFieldByName(name); + GOOGLE_CHECK(result != NULL); + return result; +} + +TEST(GeneratedMessageReflectionTest, Defaults) { + // Check that all default values are set correctly in the initial message. + unittest::TestAllTypes message; + TestUtil::ReflectionTester reflection_tester( + unittest::TestAllTypes::descriptor()); + + reflection_tester.ExpectClearViaReflection(message); + + const Reflection* reflection = message.GetReflection(); + + // Messages should return pointers to default instances until first use. + // (This is not checked by ExpectClear() since it is not actually true after + // the fields have been set and then cleared.) + EXPECT_EQ(&unittest::TestAllTypes::OptionalGroup::default_instance(), + &reflection->GetMessage(message, F("optionalgroup"))); + EXPECT_EQ(&unittest::TestAllTypes::NestedMessage::default_instance(), + &reflection->GetMessage(message, F("optional_nested_message"))); + EXPECT_EQ(&unittest::ForeignMessage::default_instance(), + &reflection->GetMessage(message, F("optional_foreign_message"))); + EXPECT_EQ(&unittest_import::ImportMessage::default_instance(), + &reflection->GetMessage(message, F("optional_import_message"))); +} + +TEST(GeneratedMessageReflectionTest, Accessors) { + // Set every field to a unique value then go back and check all those + // values. + unittest::TestAllTypes message; + TestUtil::ReflectionTester reflection_tester( + unittest::TestAllTypes::descriptor()); + + reflection_tester.SetAllFieldsViaReflection(&message); + TestUtil::ExpectAllFieldsSet(message); + reflection_tester.ExpectAllFieldsSetViaReflection(message); + + reflection_tester.ModifyRepeatedFieldsViaReflection(&message); + TestUtil::ExpectRepeatedFieldsModified(message); +} + +TEST(GeneratedMessageReflectionTest, GetStringReference) { + // Test that GetStringReference() returns the underlying string when it is + // a normal string field. + unittest::TestAllTypes message; + message.set_optional_string("foo"); + message.add_repeated_string("foo"); + + const Reflection* reflection = message.GetReflection(); + string scratch; + + EXPECT_EQ(&message.optional_string(), + &reflection->GetStringReference(message, F("optional_string"), &scratch)) + << "For simple string fields, GetStringReference() should return a " + "reference to the underlying string."; + EXPECT_EQ(&message.repeated_string(0), + &reflection->GetRepeatedStringReference(message, F("repeated_string"), + 0, &scratch)) + << "For simple string fields, GetRepeatedStringReference() should return " + "a reference to the underlying string."; +} + + +TEST(GeneratedMessageReflectionTest, DefaultsAfterClear) { + // Check that after setting all fields and then clearing, getting an + // embedded message does NOT return the default instance. + unittest::TestAllTypes message; + TestUtil::ReflectionTester reflection_tester( + unittest::TestAllTypes::descriptor()); + + TestUtil::SetAllFields(&message); + message.Clear(); + + const Reflection* reflection = message.GetReflection(); + + EXPECT_NE(&unittest::TestAllTypes::OptionalGroup::default_instance(), + &reflection->GetMessage(message, F("optionalgroup"))); + EXPECT_NE(&unittest::TestAllTypes::NestedMessage::default_instance(), + &reflection->GetMessage(message, F("optional_nested_message"))); + EXPECT_NE(&unittest::ForeignMessage::default_instance(), + &reflection->GetMessage(message, F("optional_foreign_message"))); + EXPECT_NE(&unittest_import::ImportMessage::default_instance(), + &reflection->GetMessage(message, F("optional_import_message"))); +} + + +TEST(GeneratedMessageReflectionTest, Swap) { + unittest::TestAllTypes message1; + unittest::TestAllTypes message2; + + TestUtil::SetAllFields(&message1); + + const Reflection* reflection = message1.GetReflection(); + reflection->Swap(&message1, &message2); + + TestUtil::ExpectClear(message1); + TestUtil::ExpectAllFieldsSet(message2); +} + +TEST(GeneratedMessageReflectionTest, SwapWithBothSet) { + unittest::TestAllTypes message1; + unittest::TestAllTypes message2; + + TestUtil::SetAllFields(&message1); + TestUtil::SetAllFields(&message2); + TestUtil::ModifyRepeatedFields(&message2); + + const Reflection* reflection = message1.GetReflection(); + reflection->Swap(&message1, &message2); + + TestUtil::ExpectRepeatedFieldsModified(message1); + TestUtil::ExpectAllFieldsSet(message2); + + message1.set_optional_int32(532819); + + reflection->Swap(&message1, &message2); + + EXPECT_EQ(532819, message2.optional_int32()); +} + +TEST(GeneratedMessageReflectionTest, SwapExtensions) { + unittest::TestAllExtensions message1; + unittest::TestAllExtensions message2; + + TestUtil::SetAllExtensions(&message1); + + const Reflection* reflection = message1.GetReflection(); + reflection->Swap(&message1, &message2); + + TestUtil::ExpectExtensionsClear(message1); + TestUtil::ExpectAllExtensionsSet(message2); +} + +TEST(GeneratedMessageReflectionTest, SwapUnknown) { + unittest::TestEmptyMessage message1, message2; + + message1.mutable_unknown_fields()->AddVarint(1234, 1); + + EXPECT_EQ(1, message1.unknown_fields().field_count()); + EXPECT_EQ(0, message2.unknown_fields().field_count()); + const Reflection* reflection = message1.GetReflection(); + reflection->Swap(&message1, &message2); + EXPECT_EQ(0, message1.unknown_fields().field_count()); + EXPECT_EQ(1, message2.unknown_fields().field_count()); +} + +TEST(GeneratedMessageReflectionTest, RemoveLast) { + unittest::TestAllTypes message; + TestUtil::ReflectionTester reflection_tester( + unittest::TestAllTypes::descriptor()); + + TestUtil::SetAllFields(&message); + + reflection_tester.RemoveLastRepeatedsViaReflection(&message); + + TestUtil::ExpectLastRepeatedsRemoved(message); +} + +TEST(GeneratedMessageReflectionTest, RemoveLastExtensions) { + unittest::TestAllExtensions message; + TestUtil::ReflectionTester reflection_tester( + unittest::TestAllExtensions::descriptor()); + + TestUtil::SetAllExtensions(&message); + reflection_tester.RemoveLastRepeatedsViaReflection(&message); + + TestUtil::ExpectLastRepeatedExtensionsRemoved(message); +} + +TEST(GeneratedMessageReflectionTest, SwapRepeatedElements) { + unittest::TestAllTypes message; + TestUtil::ReflectionTester reflection_tester( + unittest::TestAllTypes::descriptor()); + + TestUtil::SetAllFields(&message); + + // Swap and test that fields are all swapped. + reflection_tester.SwapRepeatedsViaReflection(&message); + TestUtil::ExpectRepeatedsSwapped(message); + + // Swap back and test that fields are all back to original values. + reflection_tester.SwapRepeatedsViaReflection(&message); + TestUtil::ExpectAllFieldsSet(message); +} + +TEST(GeneratedMessageReflectionTest, SwapRepeatedElementsExtension) { + unittest::TestAllExtensions message; + TestUtil::ReflectionTester reflection_tester( + unittest::TestAllExtensions::descriptor()); + + TestUtil::SetAllExtensions(&message); + + // Swap and test that fields are all swapped. + reflection_tester.SwapRepeatedsViaReflection(&message); + TestUtil::ExpectRepeatedExtensionsSwapped(message); + + // Swap back and test that fields are all back to original values. + reflection_tester.SwapRepeatedsViaReflection(&message); + TestUtil::ExpectAllExtensionsSet(message); +} + +TEST(GeneratedMessageReflectionTest, Extensions) { + // Set every extension to a unique value then go back and check all those + // values. + unittest::TestAllExtensions message; + TestUtil::ReflectionTester reflection_tester( + unittest::TestAllExtensions::descriptor()); + + reflection_tester.SetAllFieldsViaReflection(&message); + TestUtil::ExpectAllExtensionsSet(message); + reflection_tester.ExpectAllFieldsSetViaReflection(message); + + reflection_tester.ModifyRepeatedFieldsViaReflection(&message); + TestUtil::ExpectRepeatedExtensionsModified(message); +} + +TEST(GeneratedMessageReflectionTest, FindExtensionTypeByNumber) { + const Reflection* reflection = + unittest::TestAllExtensions::default_instance().GetReflection(); + + const FieldDescriptor* extension1 = + unittest::TestAllExtensions::descriptor()->file()->FindExtensionByName( + "optional_int32_extension"); + const FieldDescriptor* extension2 = + unittest::TestAllExtensions::descriptor()->file()->FindExtensionByName( + "repeated_string_extension"); + + EXPECT_EQ(extension1, + reflection->FindKnownExtensionByNumber(extension1->number())); + EXPECT_EQ(extension2, + reflection->FindKnownExtensionByNumber(extension2->number())); + + // Non-existent extension. + EXPECT_TRUE(reflection->FindKnownExtensionByNumber(62341) == NULL); + + // Extensions of TestAllExtensions should not show up as extensions of + // other types. + EXPECT_TRUE(unittest::TestAllTypes::default_instance().GetReflection()-> + FindKnownExtensionByNumber(extension1->number()) == NULL); +} + +TEST(GeneratedMessageReflectionTest, FindKnownExtensionByName) { + const Reflection* reflection = + unittest::TestAllExtensions::default_instance().GetReflection(); + + const FieldDescriptor* extension1 = + unittest::TestAllExtensions::descriptor()->file()->FindExtensionByName( + "optional_int32_extension"); + const FieldDescriptor* extension2 = + unittest::TestAllExtensions::descriptor()->file()->FindExtensionByName( + "repeated_string_extension"); + + EXPECT_EQ(extension1, + reflection->FindKnownExtensionByName(extension1->full_name())); + EXPECT_EQ(extension2, + reflection->FindKnownExtensionByName(extension2->full_name())); + + // Non-existent extension. + EXPECT_TRUE(reflection->FindKnownExtensionByName("no_such_ext") == NULL); + + // Extensions of TestAllExtensions should not show up as extensions of + // other types. + EXPECT_TRUE(unittest::TestAllTypes::default_instance().GetReflection()-> + FindKnownExtensionByName(extension1->full_name()) == NULL); +} + +#ifdef GTEST_HAS_DEATH_TEST + +TEST(GeneratedMessageReflectionTest, UsageErrors) { + unittest::TestAllTypes message; + const Reflection* reflection = message.GetReflection(); + const Descriptor* descriptor = message.GetDescriptor(); + +#define f(NAME) descriptor->FindFieldByName(NAME) + + // Testing every single failure mode would be too much work. Let's just + // check a few. + EXPECT_DEATH( + reflection->GetInt32( + message, descriptor->FindFieldByName("optional_int64")), + "Protocol Buffer reflection usage error:\n" + " Method : google::protobuf::Reflection::GetInt32\n" + " Message type: protobuf_unittest\\.TestAllTypes\n" + " Field : protobuf_unittest\\.TestAllTypes\\.optional_int64\n" + " Problem : Field is not the right type for this message:\n" + " Expected : CPPTYPE_INT32\n" + " Field type: CPPTYPE_INT64"); + EXPECT_DEATH( + reflection->GetInt32( + message, descriptor->FindFieldByName("repeated_int32")), + "Protocol Buffer reflection usage error:\n" + " Method : google::protobuf::Reflection::GetInt32\n" + " Message type: protobuf_unittest.TestAllTypes\n" + " Field : protobuf_unittest.TestAllTypes.repeated_int32\n" + " Problem : Field is repeated; the method requires a singular field."); + EXPECT_DEATH( + reflection->GetInt32( + message, unittest::ForeignMessage::descriptor()->FindFieldByName("c")), + "Protocol Buffer reflection usage error:\n" + " Method : google::protobuf::Reflection::GetInt32\n" + " Message type: protobuf_unittest.TestAllTypes\n" + " Field : protobuf_unittest.ForeignMessage.c\n" + " Problem : Field does not match message type."); + EXPECT_DEATH( + reflection->HasField( + message, unittest::ForeignMessage::descriptor()->FindFieldByName("c")), + "Protocol Buffer reflection usage error:\n" + " Method : google::protobuf::Reflection::HasField\n" + " Message type: protobuf_unittest.TestAllTypes\n" + " Field : protobuf_unittest.ForeignMessage.c\n" + " Problem : Field does not match message type."); + +#undef f +} + +#endif // GTEST_HAS_DEATH_TEST + + +} // namespace +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/generated_message_util.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/generated_message_util.cc new file mode 100644 index 0000000000..76e547bb8d --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/generated_message_util.cc @@ -0,0 +1,55 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include + +#include + +namespace google { +namespace protobuf { +namespace internal { + +double Infinity() { + return std::numeric_limits::infinity(); +} +double NaN() { + return std::numeric_limits::quiet_NaN(); +} + +const ::std::string kEmptyString; + + +} // namespace internal +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/generated_message_util.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/generated_message_util.h new file mode 100644 index 0000000000..77ae10640a --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/generated_message_util.h @@ -0,0 +1,82 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// This file contains miscellaneous helper code used by generated code -- +// including lite types -- but which should not be used directly by users. + +#ifndef GOOGLE_PROTOBUF_GENERATED_MESSAGE_UTIL_H__ +#define GOOGLE_PROTOBUF_GENERATED_MESSAGE_UTIL_H__ + +#include + +#include + + +namespace google { +namespace protobuf { + namespace io { + class CodedInputStream; // coded_stream.h + } +} + +namespace protobuf { +namespace internal { + +// Annotation for the compiler to emit a deprecation message if a field marked +// with option 'deprecated=true' is used in the code, or for other things in +// generated code which are deprecated. +// +// For internal use in the pb.cc files, deprecation warnings are suppressed +// there. +#undef DEPRECATED_PROTOBUF_FIELD +#if !defined(INTERNAL_SUPPRESS_PROTOBUF_FIELD_DEPRECATION) +# define PROTOBUF_DEPRECATED GOOGLE_ATTRIBUTE_DEPRECATED +#else +# define PROTOBUF_DEPRECATED +#endif + + +// Constants for special floating point values. +LIBPROTOBUF_EXPORT double Infinity(); +LIBPROTOBUF_EXPORT double NaN(); + +// Constant used for empty default strings. +LIBPROTOBUF_EXPORT extern const ::std::string kEmptyString; + + +} // namespace internal +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_GENERATED_MESSAGE_UTIL_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/coded_stream.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/coded_stream.cc new file mode 100644 index 0000000000..57d486f958 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/coded_stream.cc @@ -0,0 +1,839 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// This implementation is heavily optimized to make reads and writes +// of small values (especially varints) as fast as possible. In +// particular, we optimize for the common case that a read or a write +// will not cross the end of the buffer, since we can avoid a lot +// of branching in this case. + +#include +#include +#include +#include +#include +#include + + +namespace google { +namespace protobuf { +namespace io { + +namespace { + +static const int kMaxVarintBytes = 10; +static const int kMaxVarint32Bytes = 5; + + +inline bool NextNonEmpty(ZeroCopyInputStream* input, + const void** data, int* size) { + bool success; + do { + success = input->Next(data, size); + } while (success && *size == 0); + return success; +} + +} // namespace + +// CodedInputStream ================================================== + + +void CodedInputStream::BackUpInputToCurrentPosition() { + int backup_bytes = BufferSize() + buffer_size_after_limit_ + overflow_bytes_; + if (backup_bytes > 0) { + input_->BackUp(backup_bytes); + + // total_bytes_read_ doesn't include overflow_bytes_. + total_bytes_read_ -= BufferSize() + buffer_size_after_limit_; + buffer_end_ = buffer_; + buffer_size_after_limit_ = 0; + overflow_bytes_ = 0; + } +} + +inline void CodedInputStream::RecomputeBufferLimits() { + buffer_end_ += buffer_size_after_limit_; + int closest_limit = min(current_limit_, total_bytes_limit_); + if (closest_limit < total_bytes_read_) { + // The limit position is in the current buffer. We must adjust + // the buffer size accordingly. + buffer_size_after_limit_ = total_bytes_read_ - closest_limit; + buffer_end_ -= buffer_size_after_limit_; + } else { + buffer_size_after_limit_ = 0; + } +} + +CodedInputStream::Limit CodedInputStream::PushLimit(int byte_limit) { + // Current position relative to the beginning of the stream. + int current_position = total_bytes_read_ - + (BufferSize() + buffer_size_after_limit_); + + Limit old_limit = current_limit_; + + // security: byte_limit is possibly evil, so check for negative values + // and overflow. + if (byte_limit >= 0 && + byte_limit <= INT_MAX - current_position) { + current_limit_ = current_position + byte_limit; + } else { + // Negative or overflow. + current_limit_ = INT_MAX; + } + + // We need to enforce all limits, not just the new one, so if the previous + // limit was before the new requested limit, we continue to enforce the + // previous limit. + current_limit_ = min(current_limit_, old_limit); + + RecomputeBufferLimits(); + return old_limit; +} + +void CodedInputStream::PopLimit(Limit limit) { + // The limit passed in is actually the *old* limit, which we returned from + // PushLimit(). + current_limit_ = limit; + RecomputeBufferLimits(); + + // We may no longer be at a legitimate message end. ReadTag() needs to be + // called again to find out. + legitimate_message_end_ = false; +} + +int CodedInputStream::BytesUntilLimit() { + if (current_limit_ == INT_MAX) return -1; + int current_position = total_bytes_read_ - + (BufferSize() + buffer_size_after_limit_); + + return current_limit_ - current_position; +} + +void CodedInputStream::SetTotalBytesLimit( + int total_bytes_limit, int warning_threshold) { + // Make sure the limit isn't already past, since this could confuse other + // code. + int current_position = total_bytes_read_ - + (BufferSize() + buffer_size_after_limit_); + total_bytes_limit_ = max(current_position, total_bytes_limit); + total_bytes_warning_threshold_ = warning_threshold; + RecomputeBufferLimits(); +} + +void CodedInputStream::PrintTotalBytesLimitError() { + GOOGLE_LOG(ERROR) << "A protocol message was rejected because it was too " + "big (more than " << total_bytes_limit_ + << " bytes). To increase the limit (or to disable these " + "warnings), see CodedInputStream::SetTotalBytesLimit() " + "in google/protobuf/io/coded_stream.h."; +} + +bool CodedInputStream::Skip(int count) { + if (count < 0) return false; // security: count is often user-supplied + + const int original_buffer_size = BufferSize(); + + if (count <= original_buffer_size) { + // Just skipping within the current buffer. Easy. + Advance(count); + return true; + } + + if (buffer_size_after_limit_ > 0) { + // We hit a limit inside this buffer. Advance to the limit and fail. + Advance(original_buffer_size); + return false; + } + + count -= original_buffer_size; + buffer_ = NULL; + buffer_end_ = buffer_; + + // Make sure this skip doesn't try to skip past the current limit. + int closest_limit = min(current_limit_, total_bytes_limit_); + int bytes_until_limit = closest_limit - total_bytes_read_; + if (bytes_until_limit < count) { + // We hit the limit. Skip up to it then fail. + if (bytes_until_limit > 0) { + total_bytes_read_ = closest_limit; + input_->Skip(bytes_until_limit); + } + return false; + } + + total_bytes_read_ += count; + return input_->Skip(count); +} + +bool CodedInputStream::GetDirectBufferPointer(const void** data, int* size) { + if (BufferSize() == 0 && !Refresh()) return false; + + *data = buffer_; + *size = BufferSize(); + return true; +} + +bool CodedInputStream::ReadRaw(void* buffer, int size) { + int current_buffer_size; + while ((current_buffer_size = BufferSize()) < size) { + // Reading past end of buffer. Copy what we have, then refresh. + memcpy(buffer, buffer_, current_buffer_size); + buffer = reinterpret_cast(buffer) + current_buffer_size; + size -= current_buffer_size; + Advance(current_buffer_size); + if (!Refresh()) return false; + } + + memcpy(buffer, buffer_, size); + Advance(size); + + return true; +} + +bool CodedInputStream::ReadString(string* buffer, int size) { + if (size < 0) return false; // security: size is often user-supplied + return InternalReadStringInline(buffer, size); +} + +bool CodedInputStream::ReadStringFallback(string* buffer, int size) { + if (!buffer->empty()) { + buffer->clear(); + } + + int current_buffer_size; + while ((current_buffer_size = BufferSize()) < size) { + // Some STL implementations "helpfully" crash on buffer->append(NULL, 0). + if (current_buffer_size != 0) { + // Note: string1.append(string2) is O(string2.size()) (as opposed to + // O(string1.size() + string2.size()), which would be bad). + buffer->append(reinterpret_cast(buffer_), + current_buffer_size); + } + size -= current_buffer_size; + Advance(current_buffer_size); + if (!Refresh()) return false; + } + + buffer->append(reinterpret_cast(buffer_), size); + Advance(size); + + return true; +} + + +bool CodedInputStream::ReadLittleEndian32Fallback(uint32* value) { + uint8 bytes[sizeof(*value)]; + + const uint8* ptr; + if (BufferSize() >= sizeof(*value)) { + // Fast path: Enough bytes in the buffer to read directly. + ptr = buffer_; + Advance(sizeof(*value)); + } else { + // Slow path: Had to read past the end of the buffer. + if (!ReadRaw(bytes, sizeof(*value))) return false; + ptr = bytes; + } + ReadLittleEndian32FromArray(ptr, value); + return true; +} + +bool CodedInputStream::ReadLittleEndian64Fallback(uint64* value) { + uint8 bytes[sizeof(*value)]; + + const uint8* ptr; + if (BufferSize() >= sizeof(*value)) { + // Fast path: Enough bytes in the buffer to read directly. + ptr = buffer_; + Advance(sizeof(*value)); + } else { + // Slow path: Had to read past the end of the buffer. + if (!ReadRaw(bytes, sizeof(*value))) return false; + ptr = bytes; + } + ReadLittleEndian64FromArray(ptr, value); + return true; +} + +namespace { + +inline const uint8* ReadVarint32FromArray( + const uint8* buffer, uint32* value) GOOGLE_ATTRIBUTE_ALWAYS_INLINE; +inline const uint8* ReadVarint32FromArray(const uint8* buffer, uint32* value) { + // Fast path: We have enough bytes left in the buffer to guarantee that + // this read won't cross the end, so we can skip the checks. + const uint8* ptr = buffer; + uint32 b; + uint32 result; + + b = *(ptr++); result = (b & 0x7F) ; if (!(b & 0x80)) goto done; + b = *(ptr++); result |= (b & 0x7F) << 7; if (!(b & 0x80)) goto done; + b = *(ptr++); result |= (b & 0x7F) << 14; if (!(b & 0x80)) goto done; + b = *(ptr++); result |= (b & 0x7F) << 21; if (!(b & 0x80)) goto done; + b = *(ptr++); result |= b << 28; if (!(b & 0x80)) goto done; + + // If the input is larger than 32 bits, we still need to read it all + // and discard the high-order bits. + for (int i = 0; i < kMaxVarintBytes - kMaxVarint32Bytes; i++) { + b = *(ptr++); if (!(b & 0x80)) goto done; + } + + // We have overrun the maximum size of a varint (10 bytes). Assume + // the data is corrupt. + return NULL; + + done: + *value = result; + return ptr; +} + +} // namespace + +bool CodedInputStream::ReadVarint32Slow(uint32* value) { + uint64 result; + // Directly invoke ReadVarint64Fallback, since we already tried to optimize + // for one-byte varints. + if (!ReadVarint64Fallback(&result)) return false; + *value = (uint32)result; + return true; +} + +bool CodedInputStream::ReadVarint32Fallback(uint32* value) { + if (BufferSize() >= kMaxVarintBytes || + // Optimization: If the varint ends at exactly the end of the buffer, + // we can detect that and still use the fast path. + (buffer_end_ > buffer_ && !(buffer_end_[-1] & 0x80))) { + const uint8* end = ReadVarint32FromArray(buffer_, value); + if (end == NULL) return false; + buffer_ = end; + return true; + } else { + // Really slow case: we will incur the cost of an extra function call here, + // but moving this out of line reduces the size of this function, which + // improves the common case. In micro benchmarks, this is worth about 10-15% + return ReadVarint32Slow(value); + } +} + +uint32 CodedInputStream::ReadTagSlow() { + if (buffer_ == buffer_end_) { + // Call refresh. + if (!Refresh()) { + // Refresh failed. Make sure that it failed due to EOF, not because + // we hit total_bytes_limit_, which, unlike normal limits, is not a + // valid place to end a message. + int current_position = total_bytes_read_ - buffer_size_after_limit_; + if (current_position >= total_bytes_limit_) { + // Hit total_bytes_limit_. But if we also hit the normal limit, + // we're still OK. + legitimate_message_end_ = current_limit_ == total_bytes_limit_; + } else { + legitimate_message_end_ = true; + } + return 0; + } + } + + // For the slow path, just do a 64-bit read. Try to optimize for one-byte tags + // again, since we have now refreshed the buffer. + uint64 result; + if (!ReadVarint64(&result)) return 0; + return static_cast(result); +} + +uint32 CodedInputStream::ReadTagFallback() { + if (BufferSize() >= kMaxVarintBytes || + // Optimization: If the varint ends at exactly the end of the buffer, + // we can detect that and still use the fast path. + (buffer_end_ > buffer_ && !(buffer_end_[-1] & 0x80))) { + uint32 tag; + const uint8* end = ReadVarint32FromArray(buffer_, &tag); + if (end == NULL) { + return 0; + } + buffer_ = end; + return tag; + } else { + // We are commonly at a limit when attempting to read tags. Try to quickly + // detect this case without making another function call. + if (buffer_ == buffer_end_ && buffer_size_after_limit_ > 0 && + // Make sure that the limit we hit is not total_bytes_limit_, since + // in that case we still need to call Refresh() so that it prints an + // error. + total_bytes_read_ - buffer_size_after_limit_ < total_bytes_limit_) { + // We hit a byte limit. + legitimate_message_end_ = true; + return 0; + } + return ReadTagSlow(); + } +} + +bool CodedInputStream::ReadVarint64Slow(uint64* value) { + // Slow path: This read might cross the end of the buffer, so we + // need to check and refresh the buffer if and when it does. + + uint64 result = 0; + int count = 0; + uint32 b; + + do { + if (count == kMaxVarintBytes) return false; + while (buffer_ == buffer_end_) { + if (!Refresh()) return false; + } + b = *buffer_; + result |= static_cast(b & 0x7F) << (7 * count); + Advance(1); + ++count; + } while (b & 0x80); + + *value = result; + return true; +} + +bool CodedInputStream::ReadVarint64Fallback(uint64* value) { + if (BufferSize() >= kMaxVarintBytes || + // Optimization: If the varint ends at exactly the end of the buffer, + // we can detect that and still use the fast path. + (buffer_end_ > buffer_ && !(buffer_end_[-1] & 0x80))) { + // Fast path: We have enough bytes left in the buffer to guarantee that + // this read won't cross the end, so we can skip the checks. + + const uint8* ptr = buffer_; + uint32 b; + + // Splitting into 32-bit pieces gives better performance on 32-bit + // processors. + uint32 part0 = 0, part1 = 0, part2 = 0; + + b = *(ptr++); part0 = (b & 0x7F) ; if (!(b & 0x80)) goto done; + b = *(ptr++); part0 |= (b & 0x7F) << 7; if (!(b & 0x80)) goto done; + b = *(ptr++); part0 |= (b & 0x7F) << 14; if (!(b & 0x80)) goto done; + b = *(ptr++); part0 |= (b & 0x7F) << 21; if (!(b & 0x80)) goto done; + b = *(ptr++); part1 = (b & 0x7F) ; if (!(b & 0x80)) goto done; + b = *(ptr++); part1 |= (b & 0x7F) << 7; if (!(b & 0x80)) goto done; + b = *(ptr++); part1 |= (b & 0x7F) << 14; if (!(b & 0x80)) goto done; + b = *(ptr++); part1 |= (b & 0x7F) << 21; if (!(b & 0x80)) goto done; + b = *(ptr++); part2 = (b & 0x7F) ; if (!(b & 0x80)) goto done; + b = *(ptr++); part2 |= (b & 0x7F) << 7; if (!(b & 0x80)) goto done; + + // We have overrun the maximum size of a varint (10 bytes). The data + // must be corrupt. + return NULL; + + done: + Advance(ptr - buffer_); + *value = (static_cast(part0) ) | + (static_cast(part1) << 28) | + (static_cast(part2) << 56); + return true; + } else { + return ReadVarint64Slow(value); + } +} + +bool CodedInputStream::Refresh() { + GOOGLE_DCHECK_EQ(0, BufferSize()); + + if (buffer_size_after_limit_ > 0 || overflow_bytes_ > 0 || + total_bytes_read_ == current_limit_) { + // We've hit a limit. Stop. + int current_position = total_bytes_read_ - buffer_size_after_limit_; + + if (current_position >= total_bytes_limit_ && + total_bytes_limit_ != current_limit_) { + // Hit total_bytes_limit_. + PrintTotalBytesLimitError(); + } + + return false; + } + + if (total_bytes_warning_threshold_ >= 0 && + total_bytes_read_ >= total_bytes_warning_threshold_) { + GOOGLE_LOG(WARNING) << "Reading dangerously large protocol message. If the " + "message turns out to be larger than " + << total_bytes_limit_ << " bytes, parsing will be halted " + "for security reasons. To increase the limit (or to " + "disable these warnings), see " + "CodedInputStream::SetTotalBytesLimit() in " + "google/protobuf/io/coded_stream.h."; + + // Don't warn again for this stream. + total_bytes_warning_threshold_ = -1; + } + + const void* void_buffer; + int buffer_size; + if (NextNonEmpty(input_, &void_buffer, &buffer_size)) { + buffer_ = reinterpret_cast(void_buffer); + buffer_end_ = buffer_ + buffer_size; + GOOGLE_CHECK_GE(buffer_size, 0); + + if (total_bytes_read_ <= INT_MAX - buffer_size) { + total_bytes_read_ += buffer_size; + } else { + // Overflow. Reset buffer_end_ to not include the bytes beyond INT_MAX. + // We can't get that far anyway, because total_bytes_limit_ is guaranteed + // to be less than it. We need to keep track of the number of bytes + // we discarded, though, so that we can call input_->BackUp() to back + // up over them on destruction. + + // The following line is equivalent to: + // overflow_bytes_ = total_bytes_read_ + buffer_size - INT_MAX; + // except that it avoids overflows. Signed integer overflow has + // undefined results according to the C standard. + overflow_bytes_ = total_bytes_read_ - (INT_MAX - buffer_size); + buffer_end_ -= overflow_bytes_; + total_bytes_read_ = INT_MAX; + } + + RecomputeBufferLimits(); + return true; + } else { + buffer_ = NULL; + buffer_end_ = NULL; + return false; + } +} + +// CodedOutputStream ================================================= + +CodedOutputStream::CodedOutputStream(ZeroCopyOutputStream* output) + : output_(output), + buffer_(NULL), + buffer_size_(0), + total_bytes_(0), + had_error_(false) { + // Eagerly Refresh() so buffer space is immediately available. + Refresh(); + // The Refresh() may have failed. If the client doesn't write any data, + // though, don't consider this an error. If the client does write data, then + // another Refresh() will be attempted and it will set the error once again. + had_error_ = false; +} + +CodedOutputStream::~CodedOutputStream() { + if (buffer_size_ > 0) { + output_->BackUp(buffer_size_); + } +} + +bool CodedOutputStream::Skip(int count) { + if (count < 0) return false; + + while (count > buffer_size_) { + count -= buffer_size_; + if (!Refresh()) return false; + } + + Advance(count); + return true; +} + +bool CodedOutputStream::GetDirectBufferPointer(void** data, int* size) { + if (buffer_size_ == 0 && !Refresh()) return false; + + *data = buffer_; + *size = buffer_size_; + return true; +} + +void CodedOutputStream::WriteRaw(const void* data, int size) { + while (buffer_size_ < size) { + memcpy(buffer_, data, buffer_size_); + size -= buffer_size_; + data = reinterpret_cast(data) + buffer_size_; + if (!Refresh()) return; + } + + memcpy(buffer_, data, size); + Advance(size); +} + +uint8* CodedOutputStream::WriteRawToArray( + const void* data, int size, uint8* target) { + memcpy(target, data, size); + return target + size; +} + + +void CodedOutputStream::WriteLittleEndian32(uint32 value) { + uint8 bytes[sizeof(value)]; + + bool use_fast = buffer_size_ >= sizeof(value); + uint8* ptr = use_fast ? buffer_ : bytes; + + WriteLittleEndian32ToArray(value, ptr); + + if (use_fast) { + Advance(sizeof(value)); + } else { + WriteRaw(bytes, sizeof(value)); + } +} + +void CodedOutputStream::WriteLittleEndian64(uint64 value) { + uint8 bytes[sizeof(value)]; + + bool use_fast = buffer_size_ >= sizeof(value); + uint8* ptr = use_fast ? buffer_ : bytes; + + WriteLittleEndian64ToArray(value, ptr); + + if (use_fast) { + Advance(sizeof(value)); + } else { + WriteRaw(bytes, sizeof(value)); + } +} + +inline uint8* CodedOutputStream::WriteVarint32FallbackToArrayInline( + uint32 value, uint8* target) { + target[0] = static_cast(value | 0x80); + if (value >= (1 << 7)) { + target[1] = static_cast((value >> 7) | 0x80); + if (value >= (1 << 14)) { + target[2] = static_cast((value >> 14) | 0x80); + if (value >= (1 << 21)) { + target[3] = static_cast((value >> 21) | 0x80); + if (value >= (1 << 28)) { + target[4] = static_cast(value >> 28); + return target + 5; + } else { + target[3] &= 0x7F; + return target + 4; + } + } else { + target[2] &= 0x7F; + return target + 3; + } + } else { + target[1] &= 0x7F; + return target + 2; + } + } else { + target[0] &= 0x7F; + return target + 1; + } +} + +void CodedOutputStream::WriteVarint32(uint32 value) { + if (buffer_size_ >= kMaxVarint32Bytes) { + // Fast path: We have enough bytes left in the buffer to guarantee that + // this write won't cross the end, so we can skip the checks. + uint8* target = buffer_; + uint8* end = WriteVarint32FallbackToArrayInline(value, target); + int size = end - target; + Advance(size); + } else { + // Slow path: This write might cross the end of the buffer, so we + // compose the bytes first then use WriteRaw(). + uint8 bytes[kMaxVarint32Bytes]; + int size = 0; + while (value > 0x7F) { + bytes[size++] = (static_cast(value) & 0x7F) | 0x80; + value >>= 7; + } + bytes[size++] = static_cast(value) & 0x7F; + WriteRaw(bytes, size); + } +} + +uint8* CodedOutputStream::WriteVarint32FallbackToArray( + uint32 value, uint8* target) { + return WriteVarint32FallbackToArrayInline(value, target); +} + +inline uint8* CodedOutputStream::WriteVarint64ToArrayInline( + uint64 value, uint8* target) { + // Splitting into 32-bit pieces gives better performance on 32-bit + // processors. + uint32 part0 = static_cast(value ); + uint32 part1 = static_cast(value >> 28); + uint32 part2 = static_cast(value >> 56); + + int size; + + // Here we can't really optimize for small numbers, since the value is + // split into three parts. Cheking for numbers < 128, for instance, + // would require three comparisons, since you'd have to make sure part1 + // and part2 are zero. However, if the caller is using 64-bit integers, + // it is likely that they expect the numbers to often be very large, so + // we probably don't want to optimize for small numbers anyway. Thus, + // we end up with a hardcoded binary search tree... + if (part2 == 0) { + if (part1 == 0) { + if (part0 < (1 << 14)) { + if (part0 < (1 << 7)) { + size = 1; goto size1; + } else { + size = 2; goto size2; + } + } else { + if (part0 < (1 << 21)) { + size = 3; goto size3; + } else { + size = 4; goto size4; + } + } + } else { + if (part1 < (1 << 14)) { + if (part1 < (1 << 7)) { + size = 5; goto size5; + } else { + size = 6; goto size6; + } + } else { + if (part1 < (1 << 21)) { + size = 7; goto size7; + } else { + size = 8; goto size8; + } + } + } + } else { + if (part2 < (1 << 7)) { + size = 9; goto size9; + } else { + size = 10; goto size10; + } + } + + GOOGLE_LOG(FATAL) << "Can't get here."; + + size10: target[9] = static_cast((part2 >> 7) | 0x80); + size9 : target[8] = static_cast((part2 ) | 0x80); + size8 : target[7] = static_cast((part1 >> 21) | 0x80); + size7 : target[6] = static_cast((part1 >> 14) | 0x80); + size6 : target[5] = static_cast((part1 >> 7) | 0x80); + size5 : target[4] = static_cast((part1 ) | 0x80); + size4 : target[3] = static_cast((part0 >> 21) | 0x80); + size3 : target[2] = static_cast((part0 >> 14) | 0x80); + size2 : target[1] = static_cast((part0 >> 7) | 0x80); + size1 : target[0] = static_cast((part0 ) | 0x80); + + target[size-1] &= 0x7F; + return target + size; +} + +void CodedOutputStream::WriteVarint64(uint64 value) { + if (buffer_size_ >= kMaxVarintBytes) { + // Fast path: We have enough bytes left in the buffer to guarantee that + // this write won't cross the end, so we can skip the checks. + uint8* target = buffer_; + + uint8* end = WriteVarint64ToArrayInline(value, target); + int size = end - target; + Advance(size); + } else { + // Slow path: This write might cross the end of the buffer, so we + // compose the bytes first then use WriteRaw(). + uint8 bytes[kMaxVarintBytes]; + int size = 0; + while (value > 0x7F) { + bytes[size++] = (static_cast(value) & 0x7F) | 0x80; + value >>= 7; + } + bytes[size++] = static_cast(value) & 0x7F; + WriteRaw(bytes, size); + } +} + +uint8* CodedOutputStream::WriteVarint64ToArray( + uint64 value, uint8* target) { + return WriteVarint64ToArrayInline(value, target); +} + +bool CodedOutputStream::Refresh() { + void* void_buffer; + if (output_->Next(&void_buffer, &buffer_size_)) { + buffer_ = reinterpret_cast(void_buffer); + total_bytes_ += buffer_size_; + return true; + } else { + buffer_ = NULL; + buffer_size_ = 0; + had_error_ = true; + return false; + } +} + +int CodedOutputStream::VarintSize32Fallback(uint32 value) { + if (value < (1 << 7)) { + return 1; + } else if (value < (1 << 14)) { + return 2; + } else if (value < (1 << 21)) { + return 3; + } else if (value < (1 << 28)) { + return 4; + } else { + return 5; + } +} + +int CodedOutputStream::VarintSize64(uint64 value) { + if (value < (1ull << 35)) { + if (value < (1ull << 7)) { + return 1; + } else if (value < (1ull << 14)) { + return 2; + } else if (value < (1ull << 21)) { + return 3; + } else if (value < (1ull << 28)) { + return 4; + } else { + return 5; + } + } else { + if (value < (1ull << 42)) { + return 6; + } else if (value < (1ull << 49)) { + return 7; + } else if (value < (1ull << 56)) { + return 8; + } else if (value < (1ull << 63)) { + return 9; + } else { + return 10; + } + } +} + +} // namespace io +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/coded_stream.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/coded_stream.h new file mode 100644 index 0000000000..1b6b4e18b4 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/coded_stream.h @@ -0,0 +1,1102 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// This file contains the CodedInputStream and CodedOutputStream classes, +// which wrap a ZeroCopyInputStream or ZeroCopyOutputStream, respectively, +// and allow you to read or write individual pieces of data in various +// formats. In particular, these implement the varint encoding for +// integers, a simple variable-length encoding in which smaller numbers +// take fewer bytes. +// +// Typically these classes will only be used internally by the protocol +// buffer library in order to encode and decode protocol buffers. Clients +// of the library only need to know about this class if they wish to write +// custom message parsing or serialization procedures. +// +// CodedOutputStream example: +// // Write some data to "myfile". First we write a 4-byte "magic number" +// // to identify the file type, then write a length-delimited string. The +// // string is composed of a varint giving the length followed by the raw +// // bytes. +// int fd = open("myfile", O_WRONLY); +// ZeroCopyOutputStream* raw_output = new FileOutputStream(fd); +// CodedOutputStream* coded_output = new CodedOutputStream(raw_output); +// +// int magic_number = 1234; +// char text[] = "Hello world!"; +// coded_output->WriteLittleEndian32(magic_number); +// coded_output->WriteVarint32(strlen(text)); +// coded_output->WriteRaw(text, strlen(text)); +// +// delete coded_output; +// delete raw_output; +// close(fd); +// +// CodedInputStream example: +// // Read a file created by the above code. +// int fd = open("myfile", O_RDONLY); +// ZeroCopyInputStream* raw_input = new FileInputStream(fd); +// CodedInputStream coded_input = new CodedInputStream(raw_input); +// +// coded_input->ReadLittleEndian32(&magic_number); +// if (magic_number != 1234) { +// cerr << "File not in expected format." << endl; +// return; +// } +// +// uint32 size; +// coded_input->ReadVarint32(&size); +// +// char* text = new char[size + 1]; +// coded_input->ReadRaw(buffer, size); +// text[size] = '\0'; +// +// delete coded_input; +// delete raw_input; +// close(fd); +// +// cout << "Text is: " << text << endl; +// delete [] text; +// +// For those who are interested, varint encoding is defined as follows: +// +// The encoding operates on unsigned integers of up to 64 bits in length. +// Each byte of the encoded value has the format: +// * bits 0-6: Seven bits of the number being encoded. +// * bit 7: Zero if this is the last byte in the encoding (in which +// case all remaining bits of the number are zero) or 1 if +// more bytes follow. +// The first byte contains the least-significant 7 bits of the number, the +// second byte (if present) contains the next-least-significant 7 bits, +// and so on. So, the binary number 1011000101011 would be encoded in two +// bytes as "10101011 00101100". +// +// In theory, varint could be used to encode integers of any length. +// However, for practicality we set a limit at 64 bits. The maximum encoded +// length of a number is thus 10 bytes. + +#ifndef GOOGLE_PROTOBUF_IO_CODED_STREAM_H__ +#define GOOGLE_PROTOBUF_IO_CODED_STREAM_H__ + +#include +#ifdef _MSC_VER + #if defined(_M_IX86) && \ + !defined(PROTOBUF_DISABLE_LITTLE_ENDIAN_OPT_FOR_TEST) + #define PROTOBUF_LITTLE_ENDIAN 1 + #endif + #if _MSC_VER >= 1300 + // If MSVC has "/RTCc" set, it will complain about truncating casts at + // runtime. This file contains some intentional truncating casts. + #pragma runtime_checks("c", off) + #endif +#else + #include // __BYTE_ORDER + #if defined(__BYTE_ORDER) && __BYTE_ORDER == __LITTLE_ENDIAN && \ + !defined(PROTOBUF_DISABLE_LITTLE_ENDIAN_OPT_FOR_TEST) + #define PROTOBUF_LITTLE_ENDIAN 1 + #endif +#endif +#include + + +namespace google { +namespace protobuf { + +class DescriptorPool; +class MessageFactory; + +namespace io { + +// Defined in this file. +class CodedInputStream; +class CodedOutputStream; + +// Defined in other files. +class ZeroCopyInputStream; // zero_copy_stream.h +class ZeroCopyOutputStream; // zero_copy_stream.h + +// Class which reads and decodes binary data which is composed of varint- +// encoded integers and fixed-width pieces. Wraps a ZeroCopyInputStream. +// Most users will not need to deal with CodedInputStream. +// +// Most methods of CodedInputStream that return a bool return false if an +// underlying I/O error occurs or if the data is malformed. Once such a +// failure occurs, the CodedInputStream is broken and is no longer useful. +class LIBPROTOBUF_EXPORT CodedInputStream { + public: + // Create a CodedInputStream that reads from the given ZeroCopyInputStream. + explicit CodedInputStream(ZeroCopyInputStream* input); + + // Create a CodedInputStream that reads from the given flat array. This is + // faster than using an ArrayInputStream. PushLimit(size) is implied by + // this constructor. + explicit CodedInputStream(const uint8* buffer, int size); + + // Destroy the CodedInputStream and position the underlying + // ZeroCopyInputStream at the first unread byte. If an error occurred while + // reading (causing a method to return false), then the exact position of + // the input stream may be anywhere between the last value that was read + // successfully and the stream's byte limit. + ~CodedInputStream(); + + + // Skips a number of bytes. Returns false if an underlying read error + // occurs. + bool Skip(int count); + + // Sets *data to point directly at the unread part of the CodedInputStream's + // underlying buffer, and *size to the size of that buffer, but does not + // advance the stream's current position. This will always either produce + // a non-empty buffer or return false. If the caller consumes any of + // this data, it should then call Skip() to skip over the consumed bytes. + // This may be useful for implementing external fast parsing routines for + // types of data not covered by the CodedInputStream interface. + bool GetDirectBufferPointer(const void** data, int* size); + + // Like GetDirectBufferPointer, but this method is inlined, and does not + // attempt to Refresh() if the buffer is currently empty. + inline void GetDirectBufferPointerInline(const void** data, + int* size) GOOGLE_ATTRIBUTE_ALWAYS_INLINE; + + // Read raw bytes, copying them into the given buffer. + bool ReadRaw(void* buffer, int size); + + // Like ReadRaw, but reads into a string. + // + // Implementation Note: ReadString() grows the string gradually as it + // reads in the data, rather than allocating the entire requested size + // upfront. This prevents denial-of-service attacks in which a client + // could claim that a string is going to be MAX_INT bytes long in order to + // crash the server because it can't allocate this much space at once. + bool ReadString(string* buffer, int size); + // Like the above, with inlined optimizations. This should only be used + // by the protobuf implementation. + inline bool InternalReadStringInline(string* buffer, + int size) GOOGLE_ATTRIBUTE_ALWAYS_INLINE; + + + // Read a 32-bit little-endian integer. + bool ReadLittleEndian32(uint32* value); + // Read a 64-bit little-endian integer. + bool ReadLittleEndian64(uint64* value); + + // These methods read from an externally provided buffer. The caller is + // responsible for ensuring that the buffer has sufficient space. + // Read a 32-bit little-endian integer. + static const uint8* ReadLittleEndian32FromArray(const uint8* buffer, + uint32* value); + // Read a 64-bit little-endian integer. + static const uint8* ReadLittleEndian64FromArray(const uint8* buffer, + uint64* value); + + // Read an unsigned integer with Varint encoding, truncating to 32 bits. + // Reading a 32-bit value is equivalent to reading a 64-bit one and casting + // it to uint32, but may be more efficient. + bool ReadVarint32(uint32* value); + // Read an unsigned integer with Varint encoding. + bool ReadVarint64(uint64* value); + + // Read a tag. This calls ReadVarint32() and returns the result, or returns + // zero (which is not a valid tag) if ReadVarint32() fails. Also, it updates + // the last tag value, which can be checked with LastTagWas(). + // Always inline because this is only called in once place per parse loop + // but it is called for every iteration of said loop, so it should be fast. + // GCC doesn't want to inline this by default. + uint32 ReadTag() GOOGLE_ATTRIBUTE_ALWAYS_INLINE; + + // Usually returns true if calling ReadVarint32() now would produce the given + // value. Will always return false if ReadVarint32() would not return the + // given value. If ExpectTag() returns true, it also advances past + // the varint. For best performance, use a compile-time constant as the + // parameter. + // Always inline because this collapses to a small number of instructions + // when given a constant parameter, but GCC doesn't want to inline by default. + bool ExpectTag(uint32 expected) GOOGLE_ATTRIBUTE_ALWAYS_INLINE; + + // Like above, except this reads from the specified buffer. The caller is + // responsible for ensuring that the buffer is large enough to read a varint + // of the expected size. For best performance, use a compile-time constant as + // the expected tag parameter. + // + // Returns a pointer beyond the expected tag if it was found, or NULL if it + // was not. + static const uint8* ExpectTagFromArray( + const uint8* buffer, + uint32 expected) GOOGLE_ATTRIBUTE_ALWAYS_INLINE; + + // Usually returns true if no more bytes can be read. Always returns false + // if more bytes can be read. If ExpectAtEnd() returns true, a subsequent + // call to LastTagWas() will act as if ReadTag() had been called and returned + // zero, and ConsumedEntireMessage() will return true. + bool ExpectAtEnd(); + + // If the last call to ReadTag() returned the given value, returns true. + // Otherwise, returns false; + // + // This is needed because parsers for some types of embedded messages + // (with field type TYPE_GROUP) don't actually know that they've reached the + // end of a message until they see an ENDGROUP tag, which was actually part + // of the enclosing message. The enclosing message would like to check that + // tag to make sure it had the right number, so it calls LastTagWas() on + // return from the embedded parser to check. + bool LastTagWas(uint32 expected); + + // When parsing message (but NOT a group), this method must be called + // immediately after MergeFromCodedStream() returns (if it returns true) + // to further verify that the message ended in a legitimate way. For + // example, this verifies that parsing did not end on an end-group tag. + // It also checks for some cases where, due to optimizations, + // MergeFromCodedStream() can incorrectly return true. + bool ConsumedEntireMessage(); + + // Limits ---------------------------------------------------------- + // Limits are used when parsing length-delimited embedded messages. + // After the message's length is read, PushLimit() is used to prevent + // the CodedInputStream from reading beyond that length. Once the + // embedded message has been parsed, PopLimit() is called to undo the + // limit. + + // Opaque type used with PushLimit() and PopLimit(). Do not modify + // values of this type yourself. The only reason that this isn't a + // struct with private internals is for efficiency. + typedef int Limit; + + // Places a limit on the number of bytes that the stream may read, + // starting from the current position. Once the stream hits this limit, + // it will act like the end of the input has been reached until PopLimit() + // is called. + // + // As the names imply, the stream conceptually has a stack of limits. The + // shortest limit on the stack is always enforced, even if it is not the + // top limit. + // + // The value returned by PushLimit() is opaque to the caller, and must + // be passed unchanged to the corresponding call to PopLimit(). + Limit PushLimit(int byte_limit); + + // Pops the last limit pushed by PushLimit(). The input must be the value + // returned by that call to PushLimit(). + void PopLimit(Limit limit); + + // Returns the number of bytes left until the nearest limit on the + // stack is hit, or -1 if no limits are in place. + int BytesUntilLimit(); + + // Total Bytes Limit ----------------------------------------------- + // To prevent malicious users from sending excessively large messages + // and causing integer overflows or memory exhaustion, CodedInputStream + // imposes a hard limit on the total number of bytes it will read. + + // Sets the maximum number of bytes that this CodedInputStream will read + // before refusing to continue. To prevent integer overflows in the + // protocol buffers implementation, as well as to prevent servers from + // allocating enormous amounts of memory to hold parsed messages, the + // maximum message length should be limited to the shortest length that + // will not harm usability. The theoretical shortest message that could + // cause integer overflows is 512MB. The default limit is 64MB. Apps + // should set shorter limits if possible. If warning_threshold is not -1, + // a warning will be printed to stderr after warning_threshold bytes are + // read. An error will always be printed to stderr if the limit is + // reached. + // + // This is unrelated to PushLimit()/PopLimit(). + // + // Hint: If you are reading this because your program is printing a + // warning about dangerously large protocol messages, you may be + // confused about what to do next. The best option is to change your + // design such that excessively large messages are not necessary. + // For example, try to design file formats to consist of many small + // messages rather than a single large one. If this is infeasible, + // you will need to increase the limit. Chances are, though, that + // your code never constructs a CodedInputStream on which the limit + // can be set. You probably parse messages by calling things like + // Message::ParseFromString(). In this case, you will need to change + // your code to instead construct some sort of ZeroCopyInputStream + // (e.g. an ArrayInputStream), construct a CodedInputStream around + // that, then call Message::ParseFromCodedStream() instead. Then + // you can adjust the limit. Yes, it's more work, but you're doing + // something unusual. + void SetTotalBytesLimit(int total_bytes_limit, int warning_threshold); + + // Recursion Limit ------------------------------------------------- + // To prevent corrupt or malicious messages from causing stack overflows, + // we must keep track of the depth of recursion when parsing embedded + // messages and groups. CodedInputStream keeps track of this because it + // is the only object that is passed down the stack during parsing. + + // Sets the maximum recursion depth. The default is 64. + void SetRecursionLimit(int limit); + + // Increments the current recursion depth. Returns true if the depth is + // under the limit, false if it has gone over. + bool IncrementRecursionDepth(); + + // Decrements the recursion depth. + void DecrementRecursionDepth(); + + // Extension Registry ---------------------------------------------- + // ADVANCED USAGE: 99.9% of people can ignore this section. + // + // By default, when parsing extensions, the parser looks for extension + // definitions in the pool which owns the outer message's Descriptor. + // However, you may call SetExtensionRegistry() to provide an alternative + // pool instead. This makes it possible, for example, to parse a message + // using a generated class, but represent some extensions using + // DynamicMessage. + + // Set the pool used to look up extensions. Most users do not need to call + // this as the correct pool will be chosen automatically. + // + // WARNING: It is very easy to misuse this. Carefully read the requirements + // below. Do not use this unless you are sure you need it. Almost no one + // does. + // + // Let's say you are parsing a message into message object m, and you want + // to take advantage of SetExtensionRegistry(). You must follow these + // requirements: + // + // The given DescriptorPool must contain m->GetDescriptor(). It is not + // sufficient for it to simply contain a descriptor that has the same name + // and content -- it must be the *exact object*. In other words: + // assert(pool->FindMessageTypeByName(m->GetDescriptor()->full_name()) == + // m->GetDescriptor()); + // There are two ways to satisfy this requirement: + // 1) Use m->GetDescriptor()->pool() as the pool. This is generally useless + // because this is the pool that would be used anyway if you didn't call + // SetExtensionRegistry() at all. + // 2) Use a DescriptorPool which has m->GetDescriptor()->pool() as an + // "underlay". Read the documentation for DescriptorPool for more + // information about underlays. + // + // You must also provide a MessageFactory. This factory will be used to + // construct Message objects representing extensions. The factory's + // GetPrototype() MUST return non-NULL for any Descriptor which can be found + // through the provided pool. + // + // If the provided factory might return instances of protocol-compiler- + // generated (i.e. compiled-in) types, or if the outer message object m is + // a generated type, then the given factory MUST have this property: If + // GetPrototype() is given a Descriptor which resides in + // DescriptorPool::generated_pool(), the factory MUST return the same + // prototype which MessageFactory::generated_factory() would return. That + // is, given a descriptor for a generated type, the factory must return an + // instance of the generated class (NOT DynamicMessage). However, when + // given a descriptor for a type that is NOT in generated_pool, the factory + // is free to return any implementation. + // + // The reason for this requirement is that generated sub-objects may be + // accessed via the standard (non-reflection) extension accessor methods, + // and these methods will down-cast the object to the generated class type. + // If the object is not actually of that type, the results would be undefined. + // On the other hand, if an extension is not compiled in, then there is no + // way the code could end up accessing it via the standard accessors -- the + // only way to access the extension is via reflection. When using reflection, + // DynamicMessage and generated messages are indistinguishable, so it's fine + // if these objects are represented using DynamicMessage. + // + // Using DynamicMessageFactory on which you have called + // SetDelegateToGeneratedFactory(true) should be sufficient to satisfy the + // above requirement. + // + // If either pool or factory is NULL, both must be NULL. + // + // Note that this feature is ignored when parsing "lite" messages as they do + // not have descriptors. + void SetExtensionRegistry(DescriptorPool* pool, MessageFactory* factory); + + // Get the DescriptorPool set via SetExtensionRegistry(), or NULL if no pool + // has been provided. + const DescriptorPool* GetExtensionPool(); + + // Get the MessageFactory set via SetExtensionRegistry(), or NULL if no + // factory has been provided. + MessageFactory* GetExtensionFactory(); + + private: + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(CodedInputStream); + + ZeroCopyInputStream* input_; + const uint8* buffer_; + const uint8* buffer_end_; // pointer to the end of the buffer. + int total_bytes_read_; // total bytes read from input_, including + // the current buffer + + // If total_bytes_read_ surpasses INT_MAX, we record the extra bytes here + // so that we can BackUp() on destruction. + int overflow_bytes_; + + // LastTagWas() stuff. + uint32 last_tag_; // result of last ReadTag(). + + // This is set true by ReadTag{Fallback/Slow}() if it is called when exactly + // at EOF, or by ExpectAtEnd() when it returns true. This happens when we + // reach the end of a message and attempt to read another tag. + bool legitimate_message_end_; + + // See EnableAliasing(). + bool aliasing_enabled_; + + // Limits + Limit current_limit_; // if position = -1, no limit is applied + + // For simplicity, if the current buffer crosses a limit (either a normal + // limit created by PushLimit() or the total bytes limit), buffer_size_ + // only tracks the number of bytes before that limit. This field + // contains the number of bytes after it. Note that this implies that if + // buffer_size_ == 0 and buffer_size_after_limit_ > 0, we know we've + // hit a limit. However, if both are zero, it doesn't necessarily mean + // we aren't at a limit -- the buffer may have ended exactly at the limit. + int buffer_size_after_limit_; + + // Maximum number of bytes to read, period. This is unrelated to + // current_limit_. Set using SetTotalBytesLimit(). + int total_bytes_limit_; + int total_bytes_warning_threshold_; + + // Current recursion depth, controlled by IncrementRecursionDepth() and + // DecrementRecursionDepth(). + int recursion_depth_; + // Recursion depth limit, set by SetRecursionLimit(). + int recursion_limit_; + + // See SetExtensionRegistry(). + const DescriptorPool* extension_pool_; + MessageFactory* extension_factory_; + + // Private member functions. + + // Advance the buffer by a given number of bytes. + void Advance(int amount); + + // Back up input_ to the current buffer position. + void BackUpInputToCurrentPosition(); + + // Recomputes the value of buffer_size_after_limit_. Must be called after + // current_limit_ or total_bytes_limit_ changes. + void RecomputeBufferLimits(); + + // Writes an error message saying that we hit total_bytes_limit_. + void PrintTotalBytesLimitError(); + + // Called when the buffer runs out to request more data. Implies an + // Advance(BufferSize()). + bool Refresh(); + + // When parsing varints, we optimize for the common case of small values, and + // then optimize for the case when the varint fits within the current buffer + // piece. The Fallback method is used when we can't use the one-byte + // optimization. The Slow method is yet another fallback when the buffer is + // not large enough. Making the slow path out-of-line speeds up the common + // case by 10-15%. The slow path is fairly uncommon: it only triggers when a + // message crosses multiple buffers. + bool ReadVarint32Fallback(uint32* value); + bool ReadVarint64Fallback(uint64* value); + bool ReadVarint32Slow(uint32* value); + bool ReadVarint64Slow(uint64* value); + bool ReadLittleEndian32Fallback(uint32* value); + bool ReadLittleEndian64Fallback(uint64* value); + // Fallback/slow methods for reading tags. These do not update last_tag_, + // but will set legitimate_message_end_ if we are at the end of the input + // stream. + uint32 ReadTagFallback(); + uint32 ReadTagSlow(); + bool ReadStringFallback(string* buffer, int size); + + // Return the size of the buffer. + int BufferSize() const; + + static const int kDefaultTotalBytesLimit = 64 << 20; // 64MB + + static const int kDefaultTotalBytesWarningThreshold = 32 << 20; // 32MB + static const int kDefaultRecursionLimit = 64; +}; + +// Class which encodes and writes binary data which is composed of varint- +// encoded integers and fixed-width pieces. Wraps a ZeroCopyOutputStream. +// Most users will not need to deal with CodedOutputStream. +// +// Most methods of CodedOutputStream which return a bool return false if an +// underlying I/O error occurs. Once such a failure occurs, the +// CodedOutputStream is broken and is no longer useful. The Write* methods do +// not return the stream status, but will invalidate the stream if an error +// occurs. The client can probe HadError() to determine the status. +// +// Note that every method of CodedOutputStream which writes some data has +// a corresponding static "ToArray" version. These versions write directly +// to the provided buffer, returning a pointer past the last written byte. +// They require that the buffer has sufficient capacity for the encoded data. +// This allows an optimization where we check if an output stream has enough +// space for an entire message before we start writing and, if there is, we +// call only the ToArray methods to avoid doing bound checks for each +// individual value. +// i.e., in the example above: +// +// CodedOutputStream coded_output = new CodedOutputStream(raw_output); +// int magic_number = 1234; +// char text[] = "Hello world!"; +// +// int coded_size = sizeof(magic_number) + +// CodedOutputStream::VarintSize32(strlen(text)) + +// strlen(text); +// +// uint8* buffer = +// coded_output->GetDirectBufferForNBytesAndAdvance(coded_size); +// if (buffer != NULL) { +// // The output stream has enough space in the buffer: write directly to +// // the array. +// buffer = CodedOutputStream::WriteLittleEndian32ToArray(magic_number, +// buffer); +// buffer = CodedOutputStream::WriteVarint32ToArray(strlen(text), buffer); +// buffer = CodedOutputStream::WriteRawToArray(text, strlen(text), buffer); +// } else { +// // Make bound-checked writes, which will ask the underlying stream for +// // more space as needed. +// coded_output->WriteLittleEndian32(magic_number); +// coded_output->WriteVarint32(strlen(text)); +// coded_output->WriteRaw(text, strlen(text)); +// } +// +// delete coded_output; +class LIBPROTOBUF_EXPORT CodedOutputStream { + public: + // Create an CodedOutputStream that writes to the given ZeroCopyOutputStream. + explicit CodedOutputStream(ZeroCopyOutputStream* output); + + // Destroy the CodedOutputStream and position the underlying + // ZeroCopyOutputStream immediately after the last byte written. + ~CodedOutputStream(); + + // Skips a number of bytes, leaving the bytes unmodified in the underlying + // buffer. Returns false if an underlying write error occurs. This is + // mainly useful with GetDirectBufferPointer(). + bool Skip(int count); + + // Sets *data to point directly at the unwritten part of the + // CodedOutputStream's underlying buffer, and *size to the size of that + // buffer, but does not advance the stream's current position. This will + // always either produce a non-empty buffer or return false. If the caller + // writes any data to this buffer, it should then call Skip() to skip over + // the consumed bytes. This may be useful for implementing external fast + // serialization routines for types of data not covered by the + // CodedOutputStream interface. + bool GetDirectBufferPointer(void** data, int* size); + + // If there are at least "size" bytes available in the current buffer, + // returns a pointer directly into the buffer and advances over these bytes. + // The caller may then write directly into this buffer (e.g. using the + // *ToArray static methods) rather than go through CodedOutputStream. If + // there are not enough bytes available, returns NULL. The return pointer is + // invalidated as soon as any other non-const method of CodedOutputStream + // is called. + inline uint8* GetDirectBufferForNBytesAndAdvance(int size); + + // Write raw bytes, copying them from the given buffer. + void WriteRaw(const void* buffer, int size); + // Like WriteRaw() but writing directly to the target array. + // This is _not_ inlined, as the compiler often optimizes memcpy into inline + // copy loops. Since this gets called by every field with string or bytes + // type, inlining may lead to a significant amount of code bloat, with only a + // minor performance gain. + static uint8* WriteRawToArray(const void* buffer, int size, uint8* target); + + // Equivalent to WriteRaw(str.data(), str.size()). + void WriteString(const string& str); + // Like WriteString() but writing directly to the target array. + static uint8* WriteStringToArray(const string& str, uint8* target); + + + // Write a 32-bit little-endian integer. + void WriteLittleEndian32(uint32 value); + // Like WriteLittleEndian32() but writing directly to the target array. + static uint8* WriteLittleEndian32ToArray(uint32 value, uint8* target); + // Write a 64-bit little-endian integer. + void WriteLittleEndian64(uint64 value); + // Like WriteLittleEndian64() but writing directly to the target array. + static uint8* WriteLittleEndian64ToArray(uint64 value, uint8* target); + + // Write an unsigned integer with Varint encoding. Writing a 32-bit value + // is equivalent to casting it to uint64 and writing it as a 64-bit value, + // but may be more efficient. + void WriteVarint32(uint32 value); + // Like WriteVarint32() but writing directly to the target array. + static uint8* WriteVarint32ToArray(uint32 value, uint8* target); + // Write an unsigned integer with Varint encoding. + void WriteVarint64(uint64 value); + // Like WriteVarint64() but writing directly to the target array. + static uint8* WriteVarint64ToArray(uint64 value, uint8* target); + + // Equivalent to WriteVarint32() except when the value is negative, + // in which case it must be sign-extended to a full 10 bytes. + void WriteVarint32SignExtended(int32 value); + // Like WriteVarint32SignExtended() but writing directly to the target array. + static uint8* WriteVarint32SignExtendedToArray(int32 value, uint8* target); + + // This is identical to WriteVarint32(), but optimized for writing tags. + // In particular, if the input is a compile-time constant, this method + // compiles down to a couple instructions. + // Always inline because otherwise the aformentioned optimization can't work, + // but GCC by default doesn't want to inline this. + void WriteTag(uint32 value); + // Like WriteTag() but writing directly to the target array. + static uint8* WriteTagToArray( + uint32 value, uint8* target) GOOGLE_ATTRIBUTE_ALWAYS_INLINE; + + // Returns the number of bytes needed to encode the given value as a varint. + static int VarintSize32(uint32 value); + // Returns the number of bytes needed to encode the given value as a varint. + static int VarintSize64(uint64 value); + + // If negative, 10 bytes. Otheriwse, same as VarintSize32(). + static int VarintSize32SignExtended(int32 value); + + // Returns the total number of bytes written since this object was created. + inline int ByteCount() const; + + // Returns true if there was an underlying I/O error since this object was + // created. + bool HadError() const { return had_error_; } + + private: + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(CodedOutputStream); + + ZeroCopyOutputStream* output_; + uint8* buffer_; + int buffer_size_; + int total_bytes_; // Sum of sizes of all buffers seen so far. + bool had_error_; // Whether an error occurred during output. + + // Advance the buffer by a given number of bytes. + void Advance(int amount); + + // Called when the buffer runs out to request more data. Implies an + // Advance(buffer_size_). + bool Refresh(); + + static uint8* WriteVarint32FallbackToArray(uint32 value, uint8* target); + + // Always-inlined versions of WriteVarint* functions so that code can be + // reused, while still controlling size. For instance, WriteVarint32ToArray() + // should not directly call this: since it is inlined itself, doing so + // would greatly increase the size of generated code. Instead, it should call + // WriteVarint32FallbackToArray. Meanwhile, WriteVarint32() is already + // out-of-line, so it should just invoke this directly to avoid any extra + // function call overhead. + static uint8* WriteVarint32FallbackToArrayInline( + uint32 value, uint8* target) GOOGLE_ATTRIBUTE_ALWAYS_INLINE; + static uint8* WriteVarint64ToArrayInline( + uint64 value, uint8* target) GOOGLE_ATTRIBUTE_ALWAYS_INLINE; + + static int VarintSize32Fallback(uint32 value); +}; + +// inline methods ==================================================== +// The vast majority of varints are only one byte. These inline +// methods optimize for that case. + +inline bool CodedInputStream::ReadVarint32(uint32* value) { + if (GOOGLE_PREDICT_TRUE(buffer_ < buffer_end_) && *buffer_ < 0x80) { + *value = *buffer_; + Advance(1); + return true; + } else { + return ReadVarint32Fallback(value); + } +} + +inline bool CodedInputStream::ReadVarint64(uint64* value) { + if (GOOGLE_PREDICT_TRUE(buffer_ < buffer_end_) && *buffer_ < 0x80) { + *value = *buffer_; + Advance(1); + return true; + } else { + return ReadVarint64Fallback(value); + } +} + +// static +inline const uint8* CodedInputStream::ReadLittleEndian32FromArray( + const uint8* buffer, + uint32* value) { +#if defined(PROTOBUF_LITTLE_ENDIAN) + memcpy(value, buffer, sizeof(*value)); + return buffer + sizeof(*value); +#else + *value = (static_cast(buffer[0]) ) | + (static_cast(buffer[1]) << 8) | + (static_cast(buffer[2]) << 16) | + (static_cast(buffer[3]) << 24); + return buffer + sizeof(*value); +#endif +} +// static +inline const uint8* CodedInputStream::ReadLittleEndian64FromArray( + const uint8* buffer, + uint64* value) { +#if defined(PROTOBUF_LITTLE_ENDIAN) + memcpy(value, buffer, sizeof(*value)); + return buffer + sizeof(*value); +#else + uint32 part0 = (static_cast(buffer[0]) ) | + (static_cast(buffer[1]) << 8) | + (static_cast(buffer[2]) << 16) | + (static_cast(buffer[3]) << 24); + uint32 part1 = (static_cast(buffer[4]) ) | + (static_cast(buffer[5]) << 8) | + (static_cast(buffer[6]) << 16) | + (static_cast(buffer[7]) << 24); + *value = static_cast(part0) | + (static_cast(part1) << 32); + return buffer + sizeof(*value); +#endif +} + +inline bool CodedInputStream::ReadLittleEndian32(uint32* value) { +#if defined(PROTOBUF_LITTLE_ENDIAN) + if (GOOGLE_PREDICT_TRUE(BufferSize() >= static_cast(sizeof(*value)))) { + memcpy(value, buffer_, sizeof(*value)); + Advance(sizeof(*value)); + return true; + } else { + return ReadLittleEndian32Fallback(value); + } +#else + return ReadLittleEndian32Fallback(value); +#endif +} + +inline bool CodedInputStream::ReadLittleEndian64(uint64* value) { +#if defined(PROTOBUF_LITTLE_ENDIAN) + if (GOOGLE_PREDICT_TRUE(BufferSize() >= static_cast(sizeof(*value)))) { + memcpy(value, buffer_, sizeof(*value)); + Advance(sizeof(*value)); + return true; + } else { + return ReadLittleEndian64Fallback(value); + } +#else + return ReadLittleEndian64Fallback(value); +#endif +} + +inline uint32 CodedInputStream::ReadTag() { + if (GOOGLE_PREDICT_TRUE(buffer_ < buffer_end_) && buffer_[0] < 0x80) { + last_tag_ = buffer_[0]; + Advance(1); + return last_tag_; + } else { + last_tag_ = ReadTagFallback(); + return last_tag_; + } +} + +inline bool CodedInputStream::LastTagWas(uint32 expected) { + return last_tag_ == expected; +} + +inline bool CodedInputStream::ConsumedEntireMessage() { + return legitimate_message_end_; +} + +inline bool CodedInputStream::ExpectTag(uint32 expected) { + if (expected < (1 << 7)) { + if (GOOGLE_PREDICT_TRUE(buffer_ < buffer_end_) && buffer_[0] == expected) { + Advance(1); + return true; + } else { + return false; + } + } else if (expected < (1 << 14)) { + if (GOOGLE_PREDICT_TRUE(BufferSize() >= 2) && + buffer_[0] == static_cast(expected | 0x80) && + buffer_[1] == static_cast(expected >> 7)) { + Advance(2); + return true; + } else { + return false; + } + } else { + // Don't bother optimizing for larger values. + return false; + } +} + +inline const uint8* CodedInputStream::ExpectTagFromArray( + const uint8* buffer, uint32 expected) { + if (expected < (1 << 7)) { + if (buffer[0] == expected) { + return buffer + 1; + } + } else if (expected < (1 << 14)) { + if (buffer[0] == static_cast(expected | 0x80) && + buffer[1] == static_cast(expected >> 7)) { + return buffer + 2; + } + } + return NULL; +} + +inline void CodedInputStream::GetDirectBufferPointerInline(const void** data, + int* size) { + *data = buffer_; + *size = buffer_end_ - buffer_; +} + +inline bool CodedInputStream::ExpectAtEnd() { + // If we are at a limit we know no more bytes can be read. Otherwise, it's + // hard to say without calling Refresh(), and we'd rather not do that. + + if (buffer_ == buffer_end_ && buffer_size_after_limit_ != 0) { + last_tag_ = 0; // Pretend we called ReadTag()... + legitimate_message_end_ = true; // ... and it hit EOF. + return true; + } else { + return false; + } +} + +inline uint8* CodedOutputStream::GetDirectBufferForNBytesAndAdvance(int size) { + if (buffer_size_ < size) { + return NULL; + } else { + uint8* result = buffer_; + Advance(size); + return result; + } +} + +inline uint8* CodedOutputStream::WriteVarint32ToArray(uint32 value, + uint8* target) { + if (value < 0x80) { + *target = value; + return target + 1; + } else { + return WriteVarint32FallbackToArray(value, target); + } +} + +inline void CodedOutputStream::WriteVarint32SignExtended(int32 value) { + if (value < 0) { + WriteVarint64(static_cast(value)); + } else { + WriteVarint32(static_cast(value)); + } +} + +inline uint8* CodedOutputStream::WriteVarint32SignExtendedToArray( + int32 value, uint8* target) { + if (value < 0) { + return WriteVarint64ToArray(static_cast(value), target); + } else { + return WriteVarint32ToArray(static_cast(value), target); + } +} + +inline uint8* CodedOutputStream::WriteLittleEndian32ToArray(uint32 value, + uint8* target) { +#if defined(PROTOBUF_LITTLE_ENDIAN) + memcpy(target, &value, sizeof(value)); +#else + target[0] = static_cast(value); + target[1] = static_cast(value >> 8); + target[2] = static_cast(value >> 16); + target[3] = static_cast(value >> 24); +#endif + return target + sizeof(value); +} + +inline uint8* CodedOutputStream::WriteLittleEndian64ToArray(uint64 value, + uint8* target) { +#if defined(PROTOBUF_LITTLE_ENDIAN) + memcpy(target, &value, sizeof(value)); +#else + uint32 part0 = static_cast(value); + uint32 part1 = static_cast(value >> 32); + + target[0] = static_cast(part0); + target[1] = static_cast(part0 >> 8); + target[2] = static_cast(part0 >> 16); + target[3] = static_cast(part0 >> 24); + target[4] = static_cast(part1); + target[5] = static_cast(part1 >> 8); + target[6] = static_cast(part1 >> 16); + target[7] = static_cast(part1 >> 24); +#endif + return target + sizeof(value); +} + +inline void CodedOutputStream::WriteTag(uint32 value) { + WriteVarint32(value); +} + +inline uint8* CodedOutputStream::WriteTagToArray( + uint32 value, uint8* target) { + if (value < (1 << 7)) { + target[0] = value; + return target + 1; + } else if (value < (1 << 14)) { + target[0] = static_cast(value | 0x80); + target[1] = static_cast(value >> 7); + return target + 2; + } else { + return WriteVarint32FallbackToArray(value, target); + } +} + +inline int CodedOutputStream::VarintSize32(uint32 value) { + if (value < (1 << 7)) { + return 1; + } else { + return VarintSize32Fallback(value); + } +} + +inline int CodedOutputStream::VarintSize32SignExtended(int32 value) { + if (value < 0) { + return 10; // TODO(kenton): Make this a symbolic constant. + } else { + return VarintSize32(static_cast(value)); + } +} + +inline void CodedOutputStream::WriteString(const string& str) { + WriteRaw(str.data(), static_cast(str.size())); +} + +inline uint8* CodedOutputStream::WriteStringToArray( + const string& str, uint8* target) { + return WriteRawToArray(str.data(), static_cast(str.size()), target); +} + +inline int CodedOutputStream::ByteCount() const { + return total_bytes_ - buffer_size_; +} + +inline void CodedInputStream::Advance(int amount) { + buffer_ += amount; +} + +inline void CodedOutputStream::Advance(int amount) { + buffer_ += amount; + buffer_size_ -= amount; +} + +inline void CodedInputStream::SetRecursionLimit(int limit) { + recursion_limit_ = limit; +} + +inline bool CodedInputStream::IncrementRecursionDepth() { + ++recursion_depth_; + return recursion_depth_ <= recursion_limit_; +} + +inline void CodedInputStream::DecrementRecursionDepth() { + if (recursion_depth_ > 0) --recursion_depth_; +} + +inline void CodedInputStream::SetExtensionRegistry(DescriptorPool* pool, + MessageFactory* factory) { + extension_pool_ = pool; + extension_factory_ = factory; +} + +inline const DescriptorPool* CodedInputStream::GetExtensionPool() { + return extension_pool_; +} + +inline MessageFactory* CodedInputStream::GetExtensionFactory() { + return extension_factory_; +} + +inline int CodedInputStream::BufferSize() const { + return buffer_end_ - buffer_; +} + +inline CodedInputStream::CodedInputStream(ZeroCopyInputStream* input) + : input_(input), + buffer_(NULL), + buffer_end_(NULL), + total_bytes_read_(0), + overflow_bytes_(0), + last_tag_(0), + legitimate_message_end_(false), + aliasing_enabled_(false), + current_limit_(kint32max), + buffer_size_after_limit_(0), + total_bytes_limit_(kDefaultTotalBytesLimit), + total_bytes_warning_threshold_(kDefaultTotalBytesWarningThreshold), + recursion_depth_(0), + recursion_limit_(kDefaultRecursionLimit), + extension_pool_(NULL), + extension_factory_(NULL) { + // Eagerly Refresh() so buffer space is immediately available. + Refresh(); +} + +inline CodedInputStream::CodedInputStream(const uint8* buffer, int size) + : input_(NULL), + buffer_(buffer), + buffer_end_(buffer + size), + total_bytes_read_(size), + overflow_bytes_(0), + last_tag_(0), + legitimate_message_end_(false), + aliasing_enabled_(false), + current_limit_(size), + buffer_size_after_limit_(0), + total_bytes_limit_(kDefaultTotalBytesLimit), + total_bytes_warning_threshold_(kDefaultTotalBytesWarningThreshold), + recursion_depth_(0), + recursion_limit_(kDefaultRecursionLimit), + extension_pool_(NULL), + extension_factory_(NULL) { + // Note that setting current_limit_ == size is important to prevent some + // code paths from trying to access input_ and segfaulting. +} + +inline CodedInputStream::~CodedInputStream() { + if (input_ != NULL) { + BackUpInputToCurrentPosition(); + } +} + +} // namespace io +} // namespace protobuf + + +#if defined(_MSC_VER) && _MSC_VER >= 1300 + #pragma runtime_checks("c", restore) +#endif // _MSC_VER + +} // namespace google +#endif // GOOGLE_PROTOBUF_IO_CODED_STREAM_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/coded_stream_inl.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/coded_stream_inl.h new file mode 100644 index 0000000000..e9799d4772 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/coded_stream_inl.h @@ -0,0 +1,64 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: jasonh@google.com (Jason Hsueh) +// +// Implements methods of coded_stream.h that need to be inlined for performance +// reasons, but should not be defined in a public header. + +#ifndef GOOGLE_PROTOBUF_IO_CODED_STREAM_INL_H__ +#define GOOGLE_PROTOBUF_IO_CODED_STREAM_INL_H__ + +#include +#include +#include + +namespace google { +namespace protobuf { +namespace io { + +inline bool CodedInputStream::InternalReadStringInline(string* buffer, + int size) { + if (size < 0) return false; // security: size is often user-supplied + + if (BufferSize() >= size) { + STLStringResizeUninitialized(buffer, size); + memcpy(string_as_array(buffer), buffer_, size); + Advance(size); + return true; + } + + return ReadStringFallback(buffer, size); +} + +} // namespace io +} // namespace protobuf +} // namespace google +#endif // GOOGLE_PROTOBUF_IO_CODED_STREAM_INL_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/coded_stream_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/coded_stream_unittest.cc new file mode 100644 index 0000000000..ff268ab9bb --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/coded_stream_unittest.cc @@ -0,0 +1,1131 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// This file contains tests and benchmarks. + +#include + +#include + +#include + +#include +#include +#include +#include +#include + + +// This declares an unsigned long long integer literal in a portable way. +// (The original macro is way too big and ruins my formatting.) +#undef ULL +#define ULL(x) GOOGLE_ULONGLONG(x) + +namespace google { +namespace protobuf { +namespace io { +namespace { + +// =================================================================== +// Data-Driven Test Infrastructure + +// TEST_1D and TEST_2D are macros I'd eventually like to see added to +// gTest. These macros can be used to declare tests which should be +// run multiple times, once for each item in some input array. TEST_1D +// tests all cases in a single input array. TEST_2D tests all +// combinations of cases from two arrays. The arrays must be statically +// defined such that the GOOGLE_ARRAYSIZE() macro works on them. Example: +// +// int kCases[] = {1, 2, 3, 4} +// TEST_1D(MyFixture, MyTest, kCases) { +// EXPECT_GT(kCases_case, 0); +// } +// +// This test iterates through the numbers 1, 2, 3, and 4 and tests that +// they are all grater than zero. In case of failure, the exact case +// which failed will be printed. The case type must be printable using +// ostream::operator<<. + +// TODO(kenton): gTest now supports "parameterized tests" which would be +// a better way to accomplish this. Rewrite when time permits. + +#define TEST_1D(FIXTURE, NAME, CASES) \ + class FIXTURE##_##NAME##_DD : public FIXTURE { \ + protected: \ + template \ + void DoSingleCase(const CaseType& CASES##_case); \ + }; \ + \ + TEST_F(FIXTURE##_##NAME##_DD, NAME) { \ + for (int i = 0; i < GOOGLE_ARRAYSIZE(CASES); i++) { \ + SCOPED_TRACE(testing::Message() \ + << #CASES " case #" << i << ": " << CASES[i]); \ + DoSingleCase(CASES[i]); \ + } \ + } \ + \ + template \ + void FIXTURE##_##NAME##_DD::DoSingleCase(const CaseType& CASES##_case) + +#define TEST_2D(FIXTURE, NAME, CASES1, CASES2) \ + class FIXTURE##_##NAME##_DD : public FIXTURE { \ + protected: \ + template \ + void DoSingleCase(const CaseType1& CASES1##_case, \ + const CaseType2& CASES2##_case); \ + }; \ + \ + TEST_F(FIXTURE##_##NAME##_DD, NAME) { \ + for (int i = 0; i < GOOGLE_ARRAYSIZE(CASES1); i++) { \ + for (int j = 0; j < GOOGLE_ARRAYSIZE(CASES2); j++) { \ + SCOPED_TRACE(testing::Message() \ + << #CASES1 " case #" << i << ": " << CASES1[i] << ", " \ + << #CASES2 " case #" << j << ": " << CASES2[j]); \ + DoSingleCase(CASES1[i], CASES2[j]); \ + } \ + } \ + } \ + \ + template \ + void FIXTURE##_##NAME##_DD::DoSingleCase(const CaseType1& CASES1##_case, \ + const CaseType2& CASES2##_case) + +// =================================================================== + +class CodedStreamTest : public testing::Test { + protected: + static const int kBufferSize = 1024 * 64; + static uint8 buffer_[kBufferSize]; +}; + +uint8 CodedStreamTest::buffer_[CodedStreamTest::kBufferSize]; + +// We test each operation over a variety of block sizes to insure that +// we test cases where reads or writes cross buffer boundaries, cases +// where they don't, and cases where there is so much buffer left that +// we can use special optimized paths that don't worry about bounds +// checks. +const int kBlockSizes[] = {1, 2, 3, 5, 7, 13, 32, 1024}; + +// ------------------------------------------------------------------- +// Varint tests. + +struct VarintCase { + uint8 bytes[10]; // Encoded bytes. + int size; // Encoded size, in bytes. + uint64 value; // Parsed value. +}; + +inline std::ostream& operator<<(std::ostream& os, const VarintCase& c) { + return os << c.value; +} + +VarintCase kVarintCases[] = { + // 32-bit values + {{0x00} , 1, 0}, + {{0x01} , 1, 1}, + {{0x7f} , 1, 127}, + {{0xa2, 0x74}, 2, (0x22 << 0) | (0x74 << 7)}, // 14882 + {{0xbe, 0xf7, 0x92, 0x84, 0x0b}, 5, // 2961488830 + (0x3e << 0) | (0x77 << 7) | (0x12 << 14) | (0x04 << 21) | + (ULL(0x0b) << 28)}, + + // 64-bit + {{0xbe, 0xf7, 0x92, 0x84, 0x1b}, 5, // 7256456126 + (0x3e << 0) | (0x77 << 7) | (0x12 << 14) | (0x04 << 21) | + (ULL(0x1b) << 28)}, + {{0x80, 0xe6, 0xeb, 0x9c, 0xc3, 0xc9, 0xa4, 0x49}, 8, // 41256202580718336 + (0x00 << 0) | (0x66 << 7) | (0x6b << 14) | (0x1c << 21) | + (ULL(0x43) << 28) | (ULL(0x49) << 35) | (ULL(0x24) << 42) | + (ULL(0x49) << 49)}, + // 11964378330978735131 + {{0x9b, 0xa8, 0xf9, 0xc2, 0xbb, 0xd6, 0x80, 0x85, 0xa6, 0x01}, 10, + (0x1b << 0) | (0x28 << 7) | (0x79 << 14) | (0x42 << 21) | + (ULL(0x3b) << 28) | (ULL(0x56) << 35) | (ULL(0x00) << 42) | + (ULL(0x05) << 49) | (ULL(0x26) << 56) | (ULL(0x01) << 63)}, +}; + +TEST_2D(CodedStreamTest, ReadVarint32, kVarintCases, kBlockSizes) { + memcpy(buffer_, kVarintCases_case.bytes, kVarintCases_case.size); + ArrayInputStream input(buffer_, sizeof(buffer_), kBlockSizes_case); + + { + CodedInputStream coded_input(&input); + + uint32 value; + EXPECT_TRUE(coded_input.ReadVarint32(&value)); + EXPECT_EQ(static_cast(kVarintCases_case.value), value); + } + + EXPECT_EQ(kVarintCases_case.size, input.ByteCount()); +} + +TEST_2D(CodedStreamTest, ReadTag, kVarintCases, kBlockSizes) { + memcpy(buffer_, kVarintCases_case.bytes, kVarintCases_case.size); + ArrayInputStream input(buffer_, sizeof(buffer_), kBlockSizes_case); + + { + CodedInputStream coded_input(&input); + + uint32 expected_value = static_cast(kVarintCases_case.value); + EXPECT_EQ(expected_value, coded_input.ReadTag()); + + EXPECT_TRUE(coded_input.LastTagWas(expected_value)); + EXPECT_FALSE(coded_input.LastTagWas(expected_value + 1)); + } + + EXPECT_EQ(kVarintCases_case.size, input.ByteCount()); +} + +// This is the regression test that verifies that there is no issues +// with the empty input buffers handling. +TEST_F(CodedStreamTest, EmptyInputBeforeEos) { + class In : public ZeroCopyInputStream { + public: + In() : count_(0) {} + private: + virtual bool Next(const void** data, int* size) { + *data = NULL; + *size = 0; + return count_++ < 2; + } + virtual void BackUp(int count) { + GOOGLE_LOG(FATAL) << "Tests never call this."; + } + virtual bool Skip(int count) { + GOOGLE_LOG(FATAL) << "Tests never call this."; + return false; + } + virtual int64 ByteCount() const { return 0; } + int count_; + } in; + CodedInputStream input(&in); + input.ReadTag(); + EXPECT_TRUE(input.ConsumedEntireMessage()); +} + +TEST_1D(CodedStreamTest, ExpectTag, kVarintCases) { + // Leave one byte at the beginning of the buffer so we can read it + // to force the first buffer to be loaded. + buffer_[0] = '\0'; + memcpy(buffer_ + 1, kVarintCases_case.bytes, kVarintCases_case.size); + ArrayInputStream input(buffer_, sizeof(buffer_)); + + { + CodedInputStream coded_input(&input); + + // Read one byte to force coded_input.Refill() to be called. Otherwise, + // ExpectTag() will return a false negative. + uint8 dummy; + coded_input.ReadRaw(&dummy, 1); + EXPECT_EQ((uint)'\0', (uint)dummy); + + uint32 expected_value = static_cast(kVarintCases_case.value); + + // ExpectTag() produces false negatives for large values. + if (kVarintCases_case.size <= 2) { + EXPECT_FALSE(coded_input.ExpectTag(expected_value + 1)); + EXPECT_TRUE(coded_input.ExpectTag(expected_value)); + } else { + EXPECT_FALSE(coded_input.ExpectTag(expected_value)); + } + } + + if (kVarintCases_case.size <= 2) { + EXPECT_EQ(kVarintCases_case.size + 1, input.ByteCount()); + } else { + EXPECT_EQ(1, input.ByteCount()); + } +} + +TEST_1D(CodedStreamTest, ExpectTagFromArray, kVarintCases) { + memcpy(buffer_, kVarintCases_case.bytes, kVarintCases_case.size); + + const uint32 expected_value = static_cast(kVarintCases_case.value); + + // If the expectation succeeds, it should return a pointer past the tag. + if (kVarintCases_case.size <= 2) { + EXPECT_TRUE(NULL == + CodedInputStream::ExpectTagFromArray(buffer_, + expected_value + 1)); + EXPECT_TRUE(buffer_ + kVarintCases_case.size == + CodedInputStream::ExpectTagFromArray(buffer_, expected_value)); + } else { + EXPECT_TRUE(NULL == + CodedInputStream::ExpectTagFromArray(buffer_, expected_value)); + } +} + +TEST_2D(CodedStreamTest, ReadVarint64, kVarintCases, kBlockSizes) { + memcpy(buffer_, kVarintCases_case.bytes, kVarintCases_case.size); + ArrayInputStream input(buffer_, sizeof(buffer_), kBlockSizes_case); + + { + CodedInputStream coded_input(&input); + + uint64 value; + EXPECT_TRUE(coded_input.ReadVarint64(&value)); + EXPECT_EQ(kVarintCases_case.value, value); + } + + EXPECT_EQ(kVarintCases_case.size, input.ByteCount()); +} + +TEST_2D(CodedStreamTest, WriteVarint32, kVarintCases, kBlockSizes) { + if (kVarintCases_case.value > ULL(0x00000000FFFFFFFF)) { + // Skip this test for the 64-bit values. + return; + } + + ArrayOutputStream output(buffer_, sizeof(buffer_), kBlockSizes_case); + + { + CodedOutputStream coded_output(&output); + + coded_output.WriteVarint32(static_cast(kVarintCases_case.value)); + EXPECT_FALSE(coded_output.HadError()); + + EXPECT_EQ(kVarintCases_case.size, coded_output.ByteCount()); + } + + EXPECT_EQ(kVarintCases_case.size, output.ByteCount()); + EXPECT_EQ(0, + memcmp(buffer_, kVarintCases_case.bytes, kVarintCases_case.size)); +} + +TEST_2D(CodedStreamTest, WriteVarint64, kVarintCases, kBlockSizes) { + ArrayOutputStream output(buffer_, sizeof(buffer_), kBlockSizes_case); + + { + CodedOutputStream coded_output(&output); + + coded_output.WriteVarint64(kVarintCases_case.value); + EXPECT_FALSE(coded_output.HadError()); + + EXPECT_EQ(kVarintCases_case.size, coded_output.ByteCount()); + } + + EXPECT_EQ(kVarintCases_case.size, output.ByteCount()); + EXPECT_EQ(0, + memcmp(buffer_, kVarintCases_case.bytes, kVarintCases_case.size)); +} + +// This test causes gcc 3.3.5 (and earlier?) to give the cryptic error: +// "sorry, unimplemented: `method_call_expr' not supported by dump_expr" +#if !defined(__GNUC__) || __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 3) + +int32 kSignExtendedVarintCases[] = { + 0, 1, -1, 1237894, -37895138 +}; + +TEST_2D(CodedStreamTest, WriteVarint32SignExtended, + kSignExtendedVarintCases, kBlockSizes) { + ArrayOutputStream output(buffer_, sizeof(buffer_), kBlockSizes_case); + + { + CodedOutputStream coded_output(&output); + + coded_output.WriteVarint32SignExtended(kSignExtendedVarintCases_case); + EXPECT_FALSE(coded_output.HadError()); + + if (kSignExtendedVarintCases_case < 0) { + EXPECT_EQ(10, coded_output.ByteCount()); + } else { + EXPECT_LE(coded_output.ByteCount(), 5); + } + } + + if (kSignExtendedVarintCases_case < 0) { + EXPECT_EQ(10, output.ByteCount()); + } else { + EXPECT_LE(output.ByteCount(), 5); + } + + // Read value back in as a varint64 and insure it matches. + ArrayInputStream input(buffer_, sizeof(buffer_)); + + { + CodedInputStream coded_input(&input); + + uint64 value; + EXPECT_TRUE(coded_input.ReadVarint64(&value)); + + EXPECT_EQ(kSignExtendedVarintCases_case, static_cast(value)); + } + + EXPECT_EQ(output.ByteCount(), input.ByteCount()); +} + +#endif + + +// ------------------------------------------------------------------- +// Varint failure test. + +struct VarintErrorCase { + uint8 bytes[12]; + int size; + bool can_parse; +}; + +inline std::ostream& operator<<(std::ostream& os, const VarintErrorCase& c) { + return os << "size " << c.size; +} + +const VarintErrorCase kVarintErrorCases[] = { + // Control case. (Insures that there isn't something else wrong that + // makes parsing always fail.) + {{0x00}, 1, true}, + + // No input data. + {{}, 0, false}, + + // Input ends unexpectedly. + {{0xf0, 0xab}, 2, false}, + + // Input ends unexpectedly after 32 bits. + {{0xf0, 0xab, 0xc9, 0x9a, 0xf8, 0xb2}, 6, false}, + + // Longer than 10 bytes. + {{0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x01}, + 11, false}, +}; + +TEST_2D(CodedStreamTest, ReadVarint32Error, kVarintErrorCases, kBlockSizes) { + memcpy(buffer_, kVarintErrorCases_case.bytes, kVarintErrorCases_case.size); + ArrayInputStream input(buffer_, kVarintErrorCases_case.size, + kBlockSizes_case); + CodedInputStream coded_input(&input); + + uint32 value; + EXPECT_EQ(kVarintErrorCases_case.can_parse, coded_input.ReadVarint32(&value)); +} + +TEST_2D(CodedStreamTest, ReadVarint64Error, kVarintErrorCases, kBlockSizes) { + memcpy(buffer_, kVarintErrorCases_case.bytes, kVarintErrorCases_case.size); + ArrayInputStream input(buffer_, kVarintErrorCases_case.size, + kBlockSizes_case); + CodedInputStream coded_input(&input); + + uint64 value; + EXPECT_EQ(kVarintErrorCases_case.can_parse, coded_input.ReadVarint64(&value)); +} + +// ------------------------------------------------------------------- +// VarintSize + +struct VarintSizeCase { + uint64 value; + int size; +}; + +inline std::ostream& operator<<(std::ostream& os, const VarintSizeCase& c) { + return os << c.value; +} + +VarintSizeCase kVarintSizeCases[] = { + {0u, 1}, + {1u, 1}, + {127u, 1}, + {128u, 2}, + {758923u, 3}, + {4000000000u, 5}, + {ULL(41256202580718336), 8}, + {ULL(11964378330978735131), 10}, +}; + +TEST_1D(CodedStreamTest, VarintSize32, kVarintSizeCases) { + if (kVarintSizeCases_case.value > 0xffffffffu) { + // Skip 64-bit values. + return; + } + + EXPECT_EQ(kVarintSizeCases_case.size, + CodedOutputStream::VarintSize32( + static_cast(kVarintSizeCases_case.value))); +} + +TEST_1D(CodedStreamTest, VarintSize64, kVarintSizeCases) { + EXPECT_EQ(kVarintSizeCases_case.size, + CodedOutputStream::VarintSize64(kVarintSizeCases_case.value)); +} + +// ------------------------------------------------------------------- +// Fixed-size int tests + +struct Fixed32Case { + uint8 bytes[sizeof(uint32)]; // Encoded bytes. + uint32 value; // Parsed value. +}; + +struct Fixed64Case { + uint8 bytes[sizeof(uint64)]; // Encoded bytes. + uint64 value; // Parsed value. +}; + +inline std::ostream& operator<<(std::ostream& os, const Fixed32Case& c) { + return os << "0x" << hex << c.value << dec; +} + +inline std::ostream& operator<<(std::ostream& os, const Fixed64Case& c) { + return os << "0x" << hex << c.value << dec; +} + +Fixed32Case kFixed32Cases[] = { + {{0xef, 0xcd, 0xab, 0x90}, 0x90abcdefu}, + {{0x12, 0x34, 0x56, 0x78}, 0x78563412u}, +}; + +Fixed64Case kFixed64Cases[] = { + {{0xef, 0xcd, 0xab, 0x90, 0x12, 0x34, 0x56, 0x78}, ULL(0x7856341290abcdef)}, + {{0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88}, ULL(0x8877665544332211)}, +}; + +TEST_2D(CodedStreamTest, ReadLittleEndian32, kFixed32Cases, kBlockSizes) { + memcpy(buffer_, kFixed32Cases_case.bytes, sizeof(kFixed32Cases_case.bytes)); + ArrayInputStream input(buffer_, sizeof(buffer_), kBlockSizes_case); + + { + CodedInputStream coded_input(&input); + + uint32 value; + EXPECT_TRUE(coded_input.ReadLittleEndian32(&value)); + EXPECT_EQ(kFixed32Cases_case.value, value); + } + + EXPECT_EQ(sizeof(uint32), input.ByteCount()); +} + +TEST_2D(CodedStreamTest, ReadLittleEndian64, kFixed64Cases, kBlockSizes) { + memcpy(buffer_, kFixed64Cases_case.bytes, sizeof(kFixed64Cases_case.bytes)); + ArrayInputStream input(buffer_, sizeof(buffer_), kBlockSizes_case); + + { + CodedInputStream coded_input(&input); + + uint64 value; + EXPECT_TRUE(coded_input.ReadLittleEndian64(&value)); + EXPECT_EQ(kFixed64Cases_case.value, value); + } + + EXPECT_EQ(sizeof(uint64), input.ByteCount()); +} + +TEST_2D(CodedStreamTest, WriteLittleEndian32, kFixed32Cases, kBlockSizes) { + ArrayOutputStream output(buffer_, sizeof(buffer_), kBlockSizes_case); + + { + CodedOutputStream coded_output(&output); + + coded_output.WriteLittleEndian32(kFixed32Cases_case.value); + EXPECT_FALSE(coded_output.HadError()); + + EXPECT_EQ(sizeof(uint32), coded_output.ByteCount()); + } + + EXPECT_EQ(sizeof(uint32), output.ByteCount()); + EXPECT_EQ(0, memcmp(buffer_, kFixed32Cases_case.bytes, sizeof(uint32))); +} + +TEST_2D(CodedStreamTest, WriteLittleEndian64, kFixed64Cases, kBlockSizes) { + ArrayOutputStream output(buffer_, sizeof(buffer_), kBlockSizes_case); + + { + CodedOutputStream coded_output(&output); + + coded_output.WriteLittleEndian64(kFixed64Cases_case.value); + EXPECT_FALSE(coded_output.HadError()); + + EXPECT_EQ(sizeof(uint64), coded_output.ByteCount()); + } + + EXPECT_EQ(sizeof(uint64), output.ByteCount()); + EXPECT_EQ(0, memcmp(buffer_, kFixed64Cases_case.bytes, sizeof(uint64))); +} + +// Tests using the static methods to read fixed-size values from raw arrays. + +TEST_1D(CodedStreamTest, ReadLittleEndian32FromArray, kFixed32Cases) { + memcpy(buffer_, kFixed32Cases_case.bytes, sizeof(kFixed32Cases_case.bytes)); + + uint32 value; + const uint8* end = CodedInputStream::ReadLittleEndian32FromArray( + buffer_, &value); + EXPECT_EQ(kFixed32Cases_case.value, value); + EXPECT_TRUE(end == buffer_ + sizeof(value)); +} + +TEST_1D(CodedStreamTest, ReadLittleEndian64FromArray, kFixed64Cases) { + memcpy(buffer_, kFixed64Cases_case.bytes, sizeof(kFixed64Cases_case.bytes)); + + uint64 value; + const uint8* end = CodedInputStream::ReadLittleEndian64FromArray( + buffer_, &value); + EXPECT_EQ(kFixed64Cases_case.value, value); + EXPECT_TRUE(end == buffer_ + sizeof(value)); +} + +// ------------------------------------------------------------------- +// Raw reads and writes + +const char kRawBytes[] = "Some bytes which will be written and read raw."; + +TEST_1D(CodedStreamTest, ReadRaw, kBlockSizes) { + memcpy(buffer_, kRawBytes, sizeof(kRawBytes)); + ArrayInputStream input(buffer_, sizeof(buffer_), kBlockSizes_case); + char read_buffer[sizeof(kRawBytes)]; + + { + CodedInputStream coded_input(&input); + + EXPECT_TRUE(coded_input.ReadRaw(read_buffer, sizeof(kRawBytes))); + EXPECT_EQ(0, memcmp(kRawBytes, read_buffer, sizeof(kRawBytes))); + } + + EXPECT_EQ(sizeof(kRawBytes), input.ByteCount()); +} + +TEST_1D(CodedStreamTest, WriteRaw, kBlockSizes) { + ArrayOutputStream output(buffer_, sizeof(buffer_), kBlockSizes_case); + + { + CodedOutputStream coded_output(&output); + + coded_output.WriteRaw(kRawBytes, sizeof(kRawBytes)); + EXPECT_FALSE(coded_output.HadError()); + + EXPECT_EQ(sizeof(kRawBytes), coded_output.ByteCount()); + } + + EXPECT_EQ(sizeof(kRawBytes), output.ByteCount()); + EXPECT_EQ(0, memcmp(buffer_, kRawBytes, sizeof(kRawBytes))); +} + +TEST_1D(CodedStreamTest, ReadString, kBlockSizes) { + memcpy(buffer_, kRawBytes, sizeof(kRawBytes)); + ArrayInputStream input(buffer_, sizeof(buffer_), kBlockSizes_case); + + { + CodedInputStream coded_input(&input); + + string str; + EXPECT_TRUE(coded_input.ReadString(&str, strlen(kRawBytes))); + EXPECT_EQ(kRawBytes, str); + } + + EXPECT_EQ(strlen(kRawBytes), input.ByteCount()); +} + +// Check to make sure ReadString doesn't crash on impossibly large strings. +TEST_1D(CodedStreamTest, ReadStringImpossiblyLarge, kBlockSizes) { + ArrayInputStream input(buffer_, sizeof(buffer_), kBlockSizes_case); + + { + CodedInputStream coded_input(&input); + + string str; + // Try to read a gigabyte. + EXPECT_FALSE(coded_input.ReadString(&str, 1 << 30)); + } +} + +TEST_F(CodedStreamTest, ReadStringImpossiblyLargeFromStringOnStack) { + // Same test as above, except directly use a buffer. This used to cause + // crashes while the above did not. + uint8 buffer[8]; + CodedInputStream coded_input(buffer, 8); + string str; + EXPECT_FALSE(coded_input.ReadString(&str, 1 << 30)); +} + +TEST_F(CodedStreamTest, ReadStringImpossiblyLargeFromStringOnHeap) { + scoped_array buffer(new uint8[8]); + CodedInputStream coded_input(buffer.get(), 8); + string str; + EXPECT_FALSE(coded_input.ReadString(&str, 1 << 30)); +} + + +// ------------------------------------------------------------------- +// Skip + +const char kSkipTestBytes[] = + ""; +const char kSkipOutputTestBytes[] = + "---------------------------------"; + +TEST_1D(CodedStreamTest, SkipInput, kBlockSizes) { + memcpy(buffer_, kSkipTestBytes, sizeof(kSkipTestBytes)); + ArrayInputStream input(buffer_, sizeof(buffer_), kBlockSizes_case); + + { + CodedInputStream coded_input(&input); + + string str; + EXPECT_TRUE(coded_input.ReadString(&str, strlen(""))); + EXPECT_EQ("", str); + EXPECT_TRUE(coded_input.Skip(strlen(""))); + EXPECT_TRUE(coded_input.ReadString(&str, strlen(""))); + EXPECT_EQ("", str); + } + + EXPECT_EQ(strlen(kSkipTestBytes), input.ByteCount()); +} + +// ------------------------------------------------------------------- +// GetDirectBufferPointer + +TEST_F(CodedStreamTest, GetDirectBufferPointerInput) { + ArrayInputStream input(buffer_, sizeof(buffer_), 8); + CodedInputStream coded_input(&input); + + const void* ptr; + int size; + + EXPECT_TRUE(coded_input.GetDirectBufferPointer(&ptr, &size)); + EXPECT_EQ(buffer_, ptr); + EXPECT_EQ(8, size); + + // Peeking again should return the same pointer. + EXPECT_TRUE(coded_input.GetDirectBufferPointer(&ptr, &size)); + EXPECT_EQ(buffer_, ptr); + EXPECT_EQ(8, size); + + // Skip forward in the same buffer then peek again. + EXPECT_TRUE(coded_input.Skip(3)); + EXPECT_TRUE(coded_input.GetDirectBufferPointer(&ptr, &size)); + EXPECT_EQ(buffer_ + 3, ptr); + EXPECT_EQ(5, size); + + // Skip to end of buffer and peek -- should get next buffer. + EXPECT_TRUE(coded_input.Skip(5)); + EXPECT_TRUE(coded_input.GetDirectBufferPointer(&ptr, &size)); + EXPECT_EQ(buffer_ + 8, ptr); + EXPECT_EQ(8, size); +} + +TEST_F(CodedStreamTest, GetDirectBufferPointerInlineInput) { + ArrayInputStream input(buffer_, sizeof(buffer_), 8); + CodedInputStream coded_input(&input); + + const void* ptr; + int size; + + coded_input.GetDirectBufferPointerInline(&ptr, &size); + EXPECT_EQ(buffer_, ptr); + EXPECT_EQ(8, size); + + // Peeking again should return the same pointer. + coded_input.GetDirectBufferPointerInline(&ptr, &size); + EXPECT_EQ(buffer_, ptr); + EXPECT_EQ(8, size); + + // Skip forward in the same buffer then peek again. + EXPECT_TRUE(coded_input.Skip(3)); + coded_input.GetDirectBufferPointerInline(&ptr, &size); + EXPECT_EQ(buffer_ + 3, ptr); + EXPECT_EQ(5, size); + + // Skip to end of buffer and peek -- should return false and provide an empty + // buffer. It does not try to Refresh(). + EXPECT_TRUE(coded_input.Skip(5)); + coded_input.GetDirectBufferPointerInline(&ptr, &size); + EXPECT_EQ(buffer_ + 8, ptr); + EXPECT_EQ(0, size); +} + +TEST_F(CodedStreamTest, GetDirectBufferPointerOutput) { + ArrayOutputStream output(buffer_, sizeof(buffer_), 8); + CodedOutputStream coded_output(&output); + + void* ptr; + int size; + + EXPECT_TRUE(coded_output.GetDirectBufferPointer(&ptr, &size)); + EXPECT_EQ(buffer_, ptr); + EXPECT_EQ(8, size); + + // Peeking again should return the same pointer. + EXPECT_TRUE(coded_output.GetDirectBufferPointer(&ptr, &size)); + EXPECT_EQ(buffer_, ptr); + EXPECT_EQ(8, size); + + // Skip forward in the same buffer then peek again. + EXPECT_TRUE(coded_output.Skip(3)); + EXPECT_TRUE(coded_output.GetDirectBufferPointer(&ptr, &size)); + EXPECT_EQ(buffer_ + 3, ptr); + EXPECT_EQ(5, size); + + // Skip to end of buffer and peek -- should get next buffer. + EXPECT_TRUE(coded_output.Skip(5)); + EXPECT_TRUE(coded_output.GetDirectBufferPointer(&ptr, &size)); + EXPECT_EQ(buffer_ + 8, ptr); + EXPECT_EQ(8, size); + + // Skip over multiple buffers. + EXPECT_TRUE(coded_output.Skip(22)); + EXPECT_TRUE(coded_output.GetDirectBufferPointer(&ptr, &size)); + EXPECT_EQ(buffer_ + 30, ptr); + EXPECT_EQ(2, size); +} + +// ------------------------------------------------------------------- +// Limits + +TEST_1D(CodedStreamTest, BasicLimit, kBlockSizes) { + ArrayInputStream input(buffer_, sizeof(buffer_), kBlockSizes_case); + + { + CodedInputStream coded_input(&input); + + EXPECT_EQ(-1, coded_input.BytesUntilLimit()); + CodedInputStream::Limit limit = coded_input.PushLimit(8); + + // Read until we hit the limit. + uint32 value; + EXPECT_EQ(8, coded_input.BytesUntilLimit()); + EXPECT_TRUE(coded_input.ReadLittleEndian32(&value)); + EXPECT_EQ(4, coded_input.BytesUntilLimit()); + EXPECT_TRUE(coded_input.ReadLittleEndian32(&value)); + EXPECT_EQ(0, coded_input.BytesUntilLimit()); + EXPECT_FALSE(coded_input.ReadLittleEndian32(&value)); + EXPECT_EQ(0, coded_input.BytesUntilLimit()); + + coded_input.PopLimit(limit); + + EXPECT_EQ(-1, coded_input.BytesUntilLimit()); + EXPECT_TRUE(coded_input.ReadLittleEndian32(&value)); + } + + EXPECT_EQ(12, input.ByteCount()); +} + +// Test what happens when we push two limits where the second (top) one is +// shorter. +TEST_1D(CodedStreamTest, SmallLimitOnTopOfBigLimit, kBlockSizes) { + ArrayInputStream input(buffer_, sizeof(buffer_), kBlockSizes_case); + + { + CodedInputStream coded_input(&input); + + EXPECT_EQ(-1, coded_input.BytesUntilLimit()); + CodedInputStream::Limit limit1 = coded_input.PushLimit(8); + EXPECT_EQ(8, coded_input.BytesUntilLimit()); + CodedInputStream::Limit limit2 = coded_input.PushLimit(4); + + uint32 value; + + // Read until we hit limit2, the top and shortest limit. + EXPECT_EQ(4, coded_input.BytesUntilLimit()); + EXPECT_TRUE(coded_input.ReadLittleEndian32(&value)); + EXPECT_EQ(0, coded_input.BytesUntilLimit()); + EXPECT_FALSE(coded_input.ReadLittleEndian32(&value)); + EXPECT_EQ(0, coded_input.BytesUntilLimit()); + + coded_input.PopLimit(limit2); + + // Read until we hit limit1. + EXPECT_EQ(4, coded_input.BytesUntilLimit()); + EXPECT_TRUE(coded_input.ReadLittleEndian32(&value)); + EXPECT_EQ(0, coded_input.BytesUntilLimit()); + EXPECT_FALSE(coded_input.ReadLittleEndian32(&value)); + EXPECT_EQ(0, coded_input.BytesUntilLimit()); + + coded_input.PopLimit(limit1); + + // No more limits. + EXPECT_EQ(-1, coded_input.BytesUntilLimit()); + EXPECT_TRUE(coded_input.ReadLittleEndian32(&value)); + } + + EXPECT_EQ(12, input.ByteCount()); +} + +// Test what happens when we push two limits where the second (top) one is +// longer. In this case, the top limit is shortened to match the previous +// limit. +TEST_1D(CodedStreamTest, BigLimitOnTopOfSmallLimit, kBlockSizes) { + ArrayInputStream input(buffer_, sizeof(buffer_), kBlockSizes_case); + + { + CodedInputStream coded_input(&input); + + EXPECT_EQ(-1, coded_input.BytesUntilLimit()); + CodedInputStream::Limit limit1 = coded_input.PushLimit(4); + EXPECT_EQ(4, coded_input.BytesUntilLimit()); + CodedInputStream::Limit limit2 = coded_input.PushLimit(8); + + uint32 value; + + // Read until we hit limit2. Except, wait! limit1 is shorter, so + // we end up hitting that first, despite having 4 bytes to go on + // limit2. + EXPECT_EQ(4, coded_input.BytesUntilLimit()); + EXPECT_TRUE(coded_input.ReadLittleEndian32(&value)); + EXPECT_EQ(0, coded_input.BytesUntilLimit()); + EXPECT_FALSE(coded_input.ReadLittleEndian32(&value)); + EXPECT_EQ(0, coded_input.BytesUntilLimit()); + + coded_input.PopLimit(limit2); + + // OK, popped limit2, now limit1 is on top, which we've already hit. + EXPECT_EQ(0, coded_input.BytesUntilLimit()); + EXPECT_FALSE(coded_input.ReadLittleEndian32(&value)); + EXPECT_EQ(0, coded_input.BytesUntilLimit()); + + coded_input.PopLimit(limit1); + + // No more limits. + EXPECT_EQ(-1, coded_input.BytesUntilLimit()); + EXPECT_TRUE(coded_input.ReadLittleEndian32(&value)); + } + + EXPECT_EQ(8, input.ByteCount()); +} + +TEST_F(CodedStreamTest, ExpectAtEnd) { + // Test ExpectAtEnd(), which is based on limits. + ArrayInputStream input(buffer_, sizeof(buffer_)); + CodedInputStream coded_input(&input); + + EXPECT_FALSE(coded_input.ExpectAtEnd()); + + CodedInputStream::Limit limit = coded_input.PushLimit(4); + + uint32 value; + EXPECT_TRUE(coded_input.ReadLittleEndian32(&value)); + EXPECT_TRUE(coded_input.ExpectAtEnd()); + + coded_input.PopLimit(limit); + EXPECT_FALSE(coded_input.ExpectAtEnd()); +} + +TEST_F(CodedStreamTest, NegativeLimit) { + // Check what happens when we push a negative limit. + ArrayInputStream input(buffer_, sizeof(buffer_)); + CodedInputStream coded_input(&input); + + CodedInputStream::Limit limit = coded_input.PushLimit(-1234); + // BytesUntilLimit() returns -1 to mean "no limit", which actually means + // "the limit is INT_MAX relative to the beginning of the stream". + EXPECT_EQ(-1, coded_input.BytesUntilLimit()); + coded_input.PopLimit(limit); +} + +TEST_F(CodedStreamTest, NegativeLimitAfterReading) { + // Check what happens when we push a negative limit. + ArrayInputStream input(buffer_, sizeof(buffer_)); + CodedInputStream coded_input(&input); + ASSERT_TRUE(coded_input.Skip(128)); + + CodedInputStream::Limit limit = coded_input.PushLimit(-64); + // BytesUntilLimit() returns -1 to mean "no limit", which actually means + // "the limit is INT_MAX relative to the beginning of the stream". + EXPECT_EQ(-1, coded_input.BytesUntilLimit()); + coded_input.PopLimit(limit); +} + +TEST_F(CodedStreamTest, OverflowLimit) { + // Check what happens when we push a limit large enough that its absolute + // position is more than 2GB into the stream. + ArrayInputStream input(buffer_, sizeof(buffer_)); + CodedInputStream coded_input(&input); + ASSERT_TRUE(coded_input.Skip(128)); + + CodedInputStream::Limit limit = coded_input.PushLimit(INT_MAX); + // BytesUntilLimit() returns -1 to mean "no limit", which actually means + // "the limit is INT_MAX relative to the beginning of the stream". + EXPECT_EQ(-1, coded_input.BytesUntilLimit()); + coded_input.PopLimit(limit); +} + +TEST_F(CodedStreamTest, TotalBytesLimit) { + ArrayInputStream input(buffer_, sizeof(buffer_)); + CodedInputStream coded_input(&input); + coded_input.SetTotalBytesLimit(16, -1); + + string str; + EXPECT_TRUE(coded_input.ReadString(&str, 16)); + + vector errors; + + { + ScopedMemoryLog error_log; + EXPECT_FALSE(coded_input.ReadString(&str, 1)); + errors = error_log.GetMessages(ERROR); + } + + ASSERT_EQ(1, errors.size()); + EXPECT_PRED_FORMAT2(testing::IsSubstring, + "A protocol message was rejected because it was too big", errors[0]); + + coded_input.SetTotalBytesLimit(32, -1); + EXPECT_TRUE(coded_input.ReadString(&str, 16)); +} + +TEST_F(CodedStreamTest, TotalBytesLimitNotValidMessageEnd) { + // total_bytes_limit_ is not a valid place for a message to end. + + ArrayInputStream input(buffer_, sizeof(buffer_)); + CodedInputStream coded_input(&input); + + // Set both total_bytes_limit and a regular limit at 16 bytes. + coded_input.SetTotalBytesLimit(16, -1); + CodedInputStream::Limit limit = coded_input.PushLimit(16); + + // Read 16 bytes. + string str; + EXPECT_TRUE(coded_input.ReadString(&str, 16)); + + // Read a tag. Should fail, but report being a valid endpoint since it's + // a regular limit. + EXPECT_EQ(0, coded_input.ReadTag()); + EXPECT_TRUE(coded_input.ConsumedEntireMessage()); + + // Pop the limit. + coded_input.PopLimit(limit); + + // Read a tag. Should fail, and report *not* being a valid endpoint, since + // this time we're hitting the total bytes limit. + EXPECT_EQ(0, coded_input.ReadTag()); + EXPECT_FALSE(coded_input.ConsumedEntireMessage()); +} + + +TEST_F(CodedStreamTest, RecursionLimit) { + ArrayInputStream input(buffer_, sizeof(buffer_)); + CodedInputStream coded_input(&input); + coded_input.SetRecursionLimit(4); + + // This is way too much testing for a counter. + EXPECT_TRUE(coded_input.IncrementRecursionDepth()); // 1 + EXPECT_TRUE(coded_input.IncrementRecursionDepth()); // 2 + EXPECT_TRUE(coded_input.IncrementRecursionDepth()); // 3 + EXPECT_TRUE(coded_input.IncrementRecursionDepth()); // 4 + EXPECT_FALSE(coded_input.IncrementRecursionDepth()); // 5 + EXPECT_FALSE(coded_input.IncrementRecursionDepth()); // 6 + coded_input.DecrementRecursionDepth(); // 5 + EXPECT_FALSE(coded_input.IncrementRecursionDepth()); // 6 + coded_input.DecrementRecursionDepth(); // 5 + coded_input.DecrementRecursionDepth(); // 4 + coded_input.DecrementRecursionDepth(); // 3 + EXPECT_TRUE(coded_input.IncrementRecursionDepth()); // 4 + EXPECT_FALSE(coded_input.IncrementRecursionDepth()); // 5 + coded_input.DecrementRecursionDepth(); // 4 + coded_input.DecrementRecursionDepth(); // 3 + coded_input.DecrementRecursionDepth(); // 2 + coded_input.DecrementRecursionDepth(); // 1 + coded_input.DecrementRecursionDepth(); // 0 + coded_input.DecrementRecursionDepth(); // 0 + coded_input.DecrementRecursionDepth(); // 0 + EXPECT_TRUE(coded_input.IncrementRecursionDepth()); // 1 + EXPECT_TRUE(coded_input.IncrementRecursionDepth()); // 2 + EXPECT_TRUE(coded_input.IncrementRecursionDepth()); // 3 + EXPECT_TRUE(coded_input.IncrementRecursionDepth()); // 4 + EXPECT_FALSE(coded_input.IncrementRecursionDepth()); // 5 + + coded_input.SetRecursionLimit(6); + EXPECT_TRUE(coded_input.IncrementRecursionDepth()); // 6 + EXPECT_FALSE(coded_input.IncrementRecursionDepth()); // 7 +} + +class ReallyBigInputStream : public ZeroCopyInputStream { + public: + ReallyBigInputStream() : backup_amount_(0), buffer_count_(0) {} + ~ReallyBigInputStream() {} + + // implements ZeroCopyInputStream ---------------------------------- + bool Next(const void** data, int* size) { + // We only expect BackUp() to be called at the end. + EXPECT_EQ(0, backup_amount_); + + switch (buffer_count_++) { + case 0: + *data = buffer_; + *size = sizeof(buffer_); + return true; + case 1: + // Return an enormously large buffer that, when combined with the 1k + // returned already, should overflow the total_bytes_read_ counter in + // CodedInputStream. Note that we'll only read the first 1024 bytes + // of this buffer so it's OK that we have it point at buffer_. + *data = buffer_; + *size = INT_MAX; + return true; + default: + return false; + } + } + + void BackUp(int count) { + backup_amount_ = count; + } + + bool Skip(int count) { GOOGLE_LOG(FATAL) << "Not implemented."; return false; } + int64 ByteCount() const { GOOGLE_LOG(FATAL) << "Not implemented."; return 0; } + + int backup_amount_; + + private: + char buffer_[1024]; + int64 buffer_count_; +}; + +TEST_F(CodedStreamTest, InputOver2G) { + // CodedInputStream should gracefully handle input over 2G and call + // input.BackUp() with the correct number of bytes on destruction. + ReallyBigInputStream input; + + vector errors; + + { + ScopedMemoryLog error_log; + CodedInputStream coded_input(&input); + string str; + EXPECT_TRUE(coded_input.ReadString(&str, 512)); + EXPECT_TRUE(coded_input.ReadString(&str, 1024)); + errors = error_log.GetMessages(ERROR); + } + + EXPECT_EQ(INT_MAX - 512, input.backup_amount_); + EXPECT_EQ(0, errors.size()); +} + +// =================================================================== + + +} // namespace +} // namespace io +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/gzip_stream.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/gzip_stream.cc new file mode 100644 index 0000000000..0f1ff872aa --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/gzip_stream.cc @@ -0,0 +1,335 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: brianolson@google.com (Brian Olson) +// +// This file contains the implementation of classes GzipInputStream and +// GzipOutputStream. + +#include "config.h" + +#if HAVE_ZLIB +#include + +#include + +namespace google { +namespace protobuf { +namespace io { + +static const int kDefaultBufferSize = 65536; + +GzipInputStream::GzipInputStream( + ZeroCopyInputStream* sub_stream, Format format, int buffer_size) + : format_(format), sub_stream_(sub_stream), zerror_(Z_OK) { + zcontext_.zalloc = Z_NULL; + zcontext_.zfree = Z_NULL; + zcontext_.opaque = Z_NULL; + zcontext_.total_out = 0; + zcontext_.next_in = NULL; + zcontext_.avail_in = 0; + zcontext_.total_in = 0; + zcontext_.msg = NULL; + if (buffer_size == -1) { + output_buffer_length_ = kDefaultBufferSize; + } else { + output_buffer_length_ = buffer_size; + } + output_buffer_ = operator new(output_buffer_length_); + GOOGLE_CHECK(output_buffer_ != NULL); + zcontext_.next_out = static_cast(output_buffer_); + zcontext_.avail_out = output_buffer_length_; + output_position_ = output_buffer_; +} +GzipInputStream::~GzipInputStream() { + operator delete(output_buffer_); + zerror_ = inflateEnd(&zcontext_); +} + +static inline int internalInflateInit2( + z_stream* zcontext, GzipInputStream::Format format) { + int windowBitsFormat = 0; + switch (format) { + case GzipInputStream::GZIP: windowBitsFormat = 16; break; + case GzipInputStream::AUTO: windowBitsFormat = 32; break; + case GzipInputStream::ZLIB: windowBitsFormat = 0; break; + } + return inflateInit2(zcontext, /* windowBits */15 | windowBitsFormat); +} + +int GzipInputStream::Inflate(int flush) { + if ((zerror_ == Z_OK) && (zcontext_.avail_out == 0)) { + // previous inflate filled output buffer. don't change input params yet. + } else if (zcontext_.avail_in == 0) { + const void* in; + int in_size; + bool first = zcontext_.next_in == NULL; + bool ok = sub_stream_->Next(&in, &in_size); + if (!ok) { + zcontext_.next_out = NULL; + zcontext_.avail_out = 0; + return Z_STREAM_END; + } + zcontext_.next_in = static_cast(const_cast(in)); + zcontext_.avail_in = in_size; + if (first) { + int error = internalInflateInit2(&zcontext_, format_); + if (error != Z_OK) { + return error; + } + } + } + zcontext_.next_out = static_cast(output_buffer_); + zcontext_.avail_out = output_buffer_length_; + output_position_ = output_buffer_; + int error = inflate(&zcontext_, flush); + return error; +} + +void GzipInputStream::DoNextOutput(const void** data, int* size) { + *data = output_position_; + *size = ((uintptr_t)zcontext_.next_out) - ((uintptr_t)output_position_); + output_position_ = zcontext_.next_out; +} + +// implements ZeroCopyInputStream ---------------------------------- +bool GzipInputStream::Next(const void** data, int* size) { + bool ok = (zerror_ == Z_OK) || (zerror_ == Z_STREAM_END) + || (zerror_ == Z_BUF_ERROR); + if ((!ok) || (zcontext_.next_out == NULL)) { + return false; + } + if (zcontext_.next_out != output_position_) { + DoNextOutput(data, size); + return true; + } + if (zerror_ == Z_STREAM_END) { + if (zcontext_.next_out != NULL) { + // sub_stream_ may have concatenated streams to follow + zerror_ = inflateEnd(&zcontext_); + if (zerror_ != Z_OK) { + return false; + } + zerror_ = internalInflateInit2(&zcontext_, format_); + if (zerror_ != Z_OK) { + return false; + } + } else { + *data = NULL; + *size = 0; + return false; + } + } + zerror_ = Inflate(Z_NO_FLUSH); + if ((zerror_ == Z_STREAM_END) && (zcontext_.next_out == NULL)) { + // The underlying stream's Next returned false inside Inflate. + return false; + } + ok = (zerror_ == Z_OK) || (zerror_ == Z_STREAM_END) + || (zerror_ == Z_BUF_ERROR); + if (!ok) { + return false; + } + DoNextOutput(data, size); + return true; +} +void GzipInputStream::BackUp(int count) { + output_position_ = reinterpret_cast( + reinterpret_cast(output_position_) - count); +} +bool GzipInputStream::Skip(int count) { + const void* data; + int size; + bool ok = Next(&data, &size); + while (ok && (size < count)) { + count -= size; + ok = Next(&data, &size); + } + if (size > count) { + BackUp(size - count); + } + return ok; +} +int64 GzipInputStream::ByteCount() const { + return zcontext_.total_out + + (((uintptr_t)zcontext_.next_out) - ((uintptr_t)output_position_)); +} + +// ========================================================================= + +GzipOutputStream::Options::Options() + : format(GZIP), + buffer_size(kDefaultBufferSize), + compression_level(Z_DEFAULT_COMPRESSION), + compression_strategy(Z_DEFAULT_STRATEGY) {} + +GzipOutputStream::GzipOutputStream(ZeroCopyOutputStream* sub_stream) { + Init(sub_stream, Options()); +} + +GzipOutputStream::GzipOutputStream(ZeroCopyOutputStream* sub_stream, + const Options& options) { + Init(sub_stream, options); +} + +GzipOutputStream::GzipOutputStream( + ZeroCopyOutputStream* sub_stream, Format format, int buffer_size) { + Options options; + options.format = format; + if (buffer_size != -1) { + options.buffer_size = buffer_size; + } + Init(sub_stream, options); +} + +void GzipOutputStream::Init(ZeroCopyOutputStream* sub_stream, + const Options& options) { + sub_stream_ = sub_stream; + sub_data_ = NULL; + sub_data_size_ = 0; + + input_buffer_length_ = options.buffer_size; + input_buffer_ = operator new(input_buffer_length_); + GOOGLE_CHECK(input_buffer_ != NULL); + + zcontext_.zalloc = Z_NULL; + zcontext_.zfree = Z_NULL; + zcontext_.opaque = Z_NULL; + zcontext_.next_out = NULL; + zcontext_.avail_out = 0; + zcontext_.total_out = 0; + zcontext_.next_in = NULL; + zcontext_.avail_in = 0; + zcontext_.total_in = 0; + zcontext_.msg = NULL; + // default to GZIP format + int windowBitsFormat = 16; + if (options.format == ZLIB) { + windowBitsFormat = 0; + } + zerror_ = deflateInit2( + &zcontext_, + options.compression_level, + Z_DEFLATED, + /* windowBits */15 | windowBitsFormat, + /* memLevel (default) */8, + options.compression_strategy); +} + +GzipOutputStream::~GzipOutputStream() { + Close(); + if (input_buffer_ != NULL) { + operator delete(input_buffer_); + } +} + +// private +int GzipOutputStream::Deflate(int flush) { + int error = Z_OK; + do { + if ((sub_data_ == NULL) || (zcontext_.avail_out == 0)) { + bool ok = sub_stream_->Next(&sub_data_, &sub_data_size_); + if (!ok) { + sub_data_ = NULL; + sub_data_size_ = 0; + return Z_BUF_ERROR; + } + GOOGLE_CHECK_GT(sub_data_size_, 0); + zcontext_.next_out = static_cast(sub_data_); + zcontext_.avail_out = sub_data_size_; + } + error = deflate(&zcontext_, flush); + } while (error == Z_OK && zcontext_.avail_out == 0); + if ((flush == Z_FULL_FLUSH) || (flush == Z_FINISH)) { + // Notify lower layer of data. + sub_stream_->BackUp(zcontext_.avail_out); + // We don't own the buffer anymore. + sub_data_ = NULL; + sub_data_size_ = 0; + } + return error; +} + +// implements ZeroCopyOutputStream --------------------------------- +bool GzipOutputStream::Next(void** data, int* size) { + if ((zerror_ != Z_OK) && (zerror_ != Z_BUF_ERROR)) { + return false; + } + if (zcontext_.avail_in != 0) { + zerror_ = Deflate(Z_NO_FLUSH); + if (zerror_ != Z_OK) { + return false; + } + } + if (zcontext_.avail_in == 0) { + // all input was consumed. reset the buffer. + zcontext_.next_in = static_cast(input_buffer_); + zcontext_.avail_in = input_buffer_length_; + *data = input_buffer_; + *size = input_buffer_length_; + } else { + // The loop in Deflate should consume all avail_in + GOOGLE_LOG(DFATAL) << "Deflate left bytes unconsumed"; + } + return true; +} +void GzipOutputStream::BackUp(int count) { + GOOGLE_CHECK_GE(zcontext_.avail_in, count); + zcontext_.avail_in -= count; +} +int64 GzipOutputStream::ByteCount() const { + return zcontext_.total_in + zcontext_.avail_in; +} + +bool GzipOutputStream::Flush() { + do { + zerror_ = Deflate(Z_FULL_FLUSH); + } while (zerror_ == Z_OK); + return zerror_ == Z_OK; +} + +bool GzipOutputStream::Close() { + if ((zerror_ != Z_OK) && (zerror_ != Z_BUF_ERROR)) { + return false; + } + do { + zerror_ = Deflate(Z_FINISH); + } while (zerror_ == Z_OK); + zerror_ = deflateEnd(&zcontext_); + bool ok = zerror_ == Z_OK; + zerror_ = Z_STREAM_END; + return ok; +} + +} // namespace io +} // namespace protobuf +} // namespace google + +#endif // HAVE_ZLIB diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/gzip_stream.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/gzip_stream.h new file mode 100644 index 0000000000..65dbc5b557 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/gzip_stream.h @@ -0,0 +1,207 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: brianolson@google.com (Brian Olson) +// +// This file contains the definition for classes GzipInputStream and +// GzipOutputStream. +// +// GzipInputStream decompresses data from an underlying +// ZeroCopyInputStream and provides the decompressed data as a +// ZeroCopyInputStream. +// +// GzipOutputStream is an ZeroCopyOutputStream that compresses data to +// an underlying ZeroCopyOutputStream. + +#ifndef GOOGLE_PROTOBUF_IO_GZIP_STREAM_H__ +#define GOOGLE_PROTOBUF_IO_GZIP_STREAM_H__ + +#include + +#include + +namespace google { +namespace protobuf { +namespace io { + +// A ZeroCopyInputStream that reads compressed data through zlib +class LIBPROTOBUF_EXPORT GzipInputStream : public ZeroCopyInputStream { + public: + // Format key for constructor + enum Format { + // zlib will autodetect gzip header or deflate stream + AUTO = 0, + + // GZIP streams have some extra header data for file attributes. + GZIP = 1, + + // Simpler zlib stream format. + ZLIB = 2, + }; + + // buffer_size and format may be -1 for default of 64kB and GZIP format + explicit GzipInputStream( + ZeroCopyInputStream* sub_stream, + Format format = AUTO, + int buffer_size = -1); + virtual ~GzipInputStream(); + + // Return last error message or NULL if no error. + inline const char* ZlibErrorMessage() const { + return zcontext_.msg; + } + inline int ZlibErrorCode() const { + return zerror_; + } + + // implements ZeroCopyInputStream ---------------------------------- + bool Next(const void** data, int* size); + void BackUp(int count); + bool Skip(int count); + int64 ByteCount() const; + + private: + Format format_; + + ZeroCopyInputStream* sub_stream_; + + z_stream zcontext_; + int zerror_; + + void* output_buffer_; + void* output_position_; + size_t output_buffer_length_; + + int Inflate(int flush); + void DoNextOutput(const void** data, int* size); + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(GzipInputStream); +}; + + +class LIBPROTOBUF_EXPORT GzipOutputStream : public ZeroCopyOutputStream { + public: + // Format key for constructor + enum Format { + // GZIP streams have some extra header data for file attributes. + GZIP = 1, + + // Simpler zlib stream format. + ZLIB = 2, + }; + + struct Options { + // Defaults to GZIP. + Format format; + + // What size buffer to use internally. Defaults to 64kB. + int buffer_size; + + // A number between 0 and 9, where 0 is no compression and 9 is best + // compression. Defaults to Z_DEFAULT_COMPRESSION (see zlib.h). + int compression_level; + + // Defaults to Z_DEFAULT_STRATEGY. Can also be set to Z_FILTERED, + // Z_HUFFMAN_ONLY, or Z_RLE. See the documentation for deflateInit2 in + // zlib.h for definitions of these constants. + int compression_strategy; + + Options(); // Initializes with default values. + }; + + // Create a GzipOutputStream with default options. + explicit GzipOutputStream(ZeroCopyOutputStream* sub_stream); + + // Create a GzipOutputStream with the given options. + GzipOutputStream( + ZeroCopyOutputStream* sub_stream, + const Options& options); + + // DEPRECATED: Use one of the above constructors instead. + GzipOutputStream( + ZeroCopyOutputStream* sub_stream, + Format format, + int buffer_size = -1) GOOGLE_ATTRIBUTE_DEPRECATED; + + virtual ~GzipOutputStream(); + + // Return last error message or NULL if no error. + inline const char* ZlibErrorMessage() const { + return zcontext_.msg; + } + inline int ZlibErrorCode() const { + return zerror_; + } + + // Flushes data written so far to zipped data in the underlying stream. + // It is the caller's responsibility to flush the underlying stream if + // necessary. + // Compression may be less efficient stopping and starting around flushes. + // Returns true if no error. + bool Flush(); + + // Writes out all data and closes the gzip stream. + // It is the caller's responsibility to close the underlying stream if + // necessary. + // Returns true if no error. + bool Close(); + + // implements ZeroCopyOutputStream --------------------------------- + bool Next(void** data, int* size); + void BackUp(int count); + int64 ByteCount() const; + + private: + ZeroCopyOutputStream* sub_stream_; + // Result from calling Next() on sub_stream_ + void* sub_data_; + int sub_data_size_; + + z_stream zcontext_; + int zerror_; + void* input_buffer_; + size_t input_buffer_length_; + + // Shared constructor code. + void Init(ZeroCopyOutputStream* sub_stream, const Options& options); + + // Do some compression. + // Takes zlib flush mode. + // Returns zlib error code. + int Deflate(int flush); + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(GzipOutputStream); +}; + +} // namespace io +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_IO_GZIP_STREAM_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/gzip_stream_unittest.sh b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/gzip_stream_unittest.sh new file mode 100644 index 0000000000..6e8a09437d --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/gzip_stream_unittest.sh @@ -0,0 +1,44 @@ +#!/bin/sh -x +# +# Protocol Buffers - Google's data interchange format +# Copyright 2009 Google Inc. All rights reserved. +# http://code.google.com/p/protobuf/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +# Author: brianolson@google.com (Brian Olson) +# +# Test compatibility between command line gzip/gunzip binaries and +# ZeroCopyStream versions. + +TESTFILE=Makefile + +(./zcgzip < ${TESTFILE} | gunzip | cmp - ${TESTFILE}) && \ +(gzip < ${TESTFILE} | ./zcgunzip | cmp - ${TESTFILE}) + +# Result of "(cmd) && (cmd)" implicitly becomes result of this script +# and thus the test. diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/package_info.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/package_info.h new file mode 100644 index 0000000000..7a7a4e7738 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/package_info.h @@ -0,0 +1,54 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// This file exists solely to document the google::protobuf::io namespace. +// It is not compiled into anything, but it may be read by an automated +// documentation generator. + +namespace google { + +namespace protobuf { + +// Auxiliary classes used for I/O. +// +// The Protocol Buffer library uses the classes in this package to deal with +// I/O and encoding/decoding raw bytes. Most users will not need to +// deal with this package. However, users who want to adapt the system to +// work with their own I/O abstractions -- e.g., to allow Protocol Buffers +// to be read from a different kind of input stream without the need for a +// temporary buffer -- should take a closer look. +namespace io {} + +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/printer.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/printer.cc new file mode 100644 index 0000000000..9ab90deec6 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/printer.cc @@ -0,0 +1,199 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include +#include +#include + +namespace google { +namespace protobuf { +namespace io { + +Printer::Printer(ZeroCopyOutputStream* output, char variable_delimiter) + : variable_delimiter_(variable_delimiter), + output_(output), + buffer_(NULL), + buffer_size_(0), + at_start_of_line_(true), + failed_(false) { +} + +Printer::~Printer() { + // Only BackUp() if we're sure we've successfully called Next() at least once. + if (buffer_size_ > 0) { + output_->BackUp(buffer_size_); + } +} + +void Printer::Print(const map& variables, const char* text) { + int size = strlen(text); + int pos = 0; // The number of bytes we've written so far. + + for (int i = 0; i < size; i++) { + if (text[i] == '\n') { + // Saw newline. If there is more text, we may need to insert an indent + // here. So, write what we have so far, including the '\n'. + WriteRaw(text + pos, i - pos + 1); + pos = i + 1; + + // Setting this true will cause the next WriteRaw() to insert an indent + // first. + at_start_of_line_ = true; + + } else if (text[i] == variable_delimiter_) { + // Saw the start of a variable name. + + // Write what we have so far. + WriteRaw(text + pos, i - pos); + pos = i + 1; + + // Find closing delimiter. + const char* end = strchr(text + pos, variable_delimiter_); + if (end == NULL) { + GOOGLE_LOG(DFATAL) << " Unclosed variable name."; + end = text + pos; + } + int endpos = end - text; + + string varname(text + pos, endpos - pos); + if (varname.empty()) { + // Two delimiters in a row reduce to a literal delimiter character. + WriteRaw(&variable_delimiter_, 1); + } else { + // Replace with the variable's value. + map::const_iterator iter = variables.find(varname); + if (iter == variables.end()) { + GOOGLE_LOG(DFATAL) << " Undefined variable: " << varname; + } else { + WriteRaw(iter->second.data(), iter->second.size()); + } + } + + // Advance past this variable. + i = endpos; + pos = endpos + 1; + } + } + + // Write the rest. + WriteRaw(text + pos, size - pos); +} + +void Printer::Print(const char* text) { + static map empty; + Print(empty, text); +} + +void Printer::Print(const char* text, + const char* variable, const string& value) { + map vars; + vars[variable] = value; + Print(vars, text); +} + +void Printer::Print(const char* text, + const char* variable1, const string& value1, + const char* variable2, const string& value2) { + map vars; + vars[variable1] = value1; + vars[variable2] = value2; + Print(vars, text); +} + +void Printer::Print(const char* text, + const char* variable1, const string& value1, + const char* variable2, const string& value2, + const char* variable3, const string& value3) { + map vars; + vars[variable1] = value1; + vars[variable2] = value2; + vars[variable3] = value3; + Print(vars, text); +} + +void Printer::Indent() { + indent_ += " "; +} + +void Printer::Outdent() { + if (indent_.empty()) { + GOOGLE_LOG(DFATAL) << " Outdent() without matching Indent()."; + return; + } + + indent_.resize(indent_.size() - 2); +} + +void Printer::PrintRaw(const string& data) { + WriteRaw(data.data(), data.size()); +} + +void Printer::PrintRaw(const char* data) { + if (failed_) return; + WriteRaw(data, strlen(data)); +} + +void Printer::WriteRaw(const char* data, int size) { + if (failed_) return; + if (size == 0) return; + + if (at_start_of_line_) { + // Insert an indent. + at_start_of_line_ = false; + WriteRaw(indent_.data(), indent_.size()); + if (failed_) return; + } + + while (size > buffer_size_) { + // Data exceeds space in the buffer. Copy what we can and request a + // new buffer. + memcpy(buffer_, data, buffer_size_); + data += buffer_size_; + size -= buffer_size_; + void* void_buffer; + failed_ = !output_->Next(&void_buffer, &buffer_size_); + if (failed_) return; + buffer_ = reinterpret_cast(void_buffer); + } + + // Buffer is big enough to receive the data; copy it. + memcpy(buffer_, data, size); + buffer_ += size; + buffer_size_ -= size; +} + +} // namespace io +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/printer.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/printer.h new file mode 100644 index 0000000000..5be48543ab --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/printer.h @@ -0,0 +1,136 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// Utility class for writing text to a ZeroCopyOutputStream. + +#ifndef GOOGLE_PROTOBUF_IO_PRINTER_H__ +#define GOOGLE_PROTOBUF_IO_PRINTER_H__ + +#include +#include +#include + +namespace google { +namespace protobuf { +namespace io { + +class ZeroCopyOutputStream; // zero_copy_stream.h + +// This simple utility class assists in code generation. It basically +// allows the caller to define a set of variables and then output some +// text with variable substitutions. Example usage: +// +// Printer printer(output, '$'); +// map vars; +// vars["name"] = "Bob"; +// printer.Print(vars, "My name is $name$."); +// +// The above writes "My name is Bob." to the output stream. +// +// Printer aggressively enforces correct usage, crashing (with assert failures) +// in the case of undefined variables in debug builds. This helps greatly in +// debugging code which uses it. +class LIBPROTOBUF_EXPORT Printer { + public: + // Create a printer that writes text to the given output stream. Use the + // given character as the delimiter for variables. + Printer(ZeroCopyOutputStream* output, char variable_delimiter); + ~Printer(); + + // Print some text after applying variable substitutions. If a particular + // variable in the text is not defined, this will crash. Variables to be + // substituted are identified by their names surrounded by delimiter + // characters (as given to the constructor). The variable bindings are + // defined by the given map. + void Print(const map& variables, const char* text); + + // Like the first Print(), except the substitutions are given as parameters. + void Print(const char* text); + // Like the first Print(), except the substitutions are given as parameters. + void Print(const char* text, const char* variable, const string& value); + // Like the first Print(), except the substitutions are given as parameters. + void Print(const char* text, const char* variable1, const string& value1, + const char* variable2, const string& value2); + // Like the first Print(), except the substitutions are given as parameters. + void Print(const char* text, const char* variable1, const string& value1, + const char* variable2, const string& value2, + const char* variable3, const string& value3); + // TODO(kenton): Overloaded versions with more variables? Three seems + // to be enough. + + // Indent text by two spaces. After calling Indent(), two spaces will be + // inserted at the beginning of each line of text. Indent() may be called + // multiple times to produce deeper indents. + void Indent(); + + // Reduces the current indent level by two spaces, or crashes if the indent + // level is zero. + void Outdent(); + + // Write a string to the output buffer. + // This method does not look for newlines to add indentation. + void PrintRaw(const string& data); + + // Write a zero-delimited string to output buffer. + // This method does not look for newlines to add indentation. + void PrintRaw(const char* data); + + // Write some bytes to the output buffer. + // This method does not look for newlines to add indentation. + void WriteRaw(const char* data, int size); + + // True if any write to the underlying stream failed. (We don't just + // crash in this case because this is an I/O failure, not a programming + // error.) + bool failed() const { return failed_; } + + private: + const char variable_delimiter_; + + ZeroCopyOutputStream* const output_; + char* buffer_; + int buffer_size_; + + string indent_; + bool at_start_of_line_; + bool failed_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(Printer); +}; + +} // namespace io +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_IO_PRINTER_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/printer_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/printer_unittest.cc new file mode 100644 index 0000000000..580a53da22 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/printer_unittest.cc @@ -0,0 +1,261 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include + +#include +#include + +#include +#include +#include + +namespace google { +namespace protobuf { +namespace io { +namespace { + +// Each test repeats over several block sizes in order to test both cases +// where particular writes cross a buffer boundary and cases where they do +// not. + +TEST(Printer, EmptyPrinter) { + char buffer[8192]; + const int block_size = 100; + ArrayOutputStream output(buffer, GOOGLE_ARRAYSIZE(buffer), block_size); + Printer printer(&output, '\0'); + EXPECT_TRUE(!printer.failed()); +} + +TEST(Printer, BasicPrinting) { + char buffer[8192]; + + for (int block_size = 1; block_size < 512; block_size *= 2) { + ArrayOutputStream output(buffer, sizeof(buffer), block_size); + + { + Printer printer(&output, '\0'); + + printer.Print("Hello World!"); + printer.Print(" This is the same line.\n"); + printer.Print("But this is a new one.\nAnd this is another one."); + + EXPECT_FALSE(printer.failed()); + } + + buffer[output.ByteCount()] = '\0'; + + EXPECT_STREQ("Hello World! This is the same line.\n" + "But this is a new one.\n" + "And this is another one.", + buffer); + } +} + +TEST(Printer, WriteRaw) { + char buffer[8192]; + + for (int block_size = 1; block_size < 512; block_size *= 2) { + ArrayOutputStream output(buffer, sizeof(buffer), block_size); + + { + string string_obj = "From an object\n"; + Printer printer(&output, '$'); + printer.WriteRaw("Hello World!", 12); + printer.PrintRaw(" This is the same line.\n"); + printer.PrintRaw("But this is a new one.\nAnd this is another one."); + printer.WriteRaw("\n", 1); + printer.PrintRaw(string_obj); + EXPECT_FALSE(printer.failed()); + } + + buffer[output.ByteCount()] = '\0'; + + EXPECT_STREQ("Hello World! This is the same line.\n" + "But this is a new one.\n" + "And this is another one." + "\n" + "From an object\n", + buffer); + } +} + +TEST(Printer, VariableSubstitution) { + char buffer[8192]; + + for (int block_size = 1; block_size < 512; block_size *= 2) { + ArrayOutputStream output(buffer, sizeof(buffer), block_size); + + { + Printer printer(&output, '$'); + map vars; + + vars["foo"] = "World"; + vars["bar"] = "$foo$"; + vars["abcdefg"] = "1234"; + + printer.Print(vars, "Hello $foo$!\nbar = $bar$\n"); + printer.PrintRaw("RawBit\n"); + printer.Print(vars, "$abcdefg$\nA literal dollar sign: $$"); + + vars["foo"] = "blah"; + printer.Print(vars, "\nNow foo = $foo$."); + + EXPECT_FALSE(printer.failed()); + } + + buffer[output.ByteCount()] = '\0'; + + EXPECT_STREQ("Hello World!\n" + "bar = $foo$\n" + "RawBit\n" + "1234\n" + "A literal dollar sign: $\n" + "Now foo = blah.", + buffer); + } +} + +TEST(Printer, InlineVariableSubstitution) { + char buffer[8192]; + + ArrayOutputStream output(buffer, sizeof(buffer)); + + { + Printer printer(&output, '$'); + printer.Print("Hello $foo$!\n", "foo", "World"); + printer.PrintRaw("RawBit\n"); + printer.Print("$foo$ $bar$\n", "foo", "one", "bar", "two"); + EXPECT_FALSE(printer.failed()); + } + + buffer[output.ByteCount()] = '\0'; + + EXPECT_STREQ("Hello World!\n" + "RawBit\n" + "one two\n", + buffer); +} + +TEST(Printer, Indenting) { + char buffer[8192]; + + for (int block_size = 1; block_size < 512; block_size *= 2) { + ArrayOutputStream output(buffer, sizeof(buffer), block_size); + + { + Printer printer(&output, '$'); + map vars; + + vars["newline"] = "\n"; + + printer.Print("This is not indented.\n"); + printer.Indent(); + printer.Print("This is indented\nAnd so is this\n"); + printer.Outdent(); + printer.Print("But this is not."); + printer.Indent(); + printer.Print(" And this is still the same line.\n" + "But this is indented.\n"); + printer.PrintRaw("RawBit has indent at start\n"); + printer.PrintRaw("but not after a raw newline\n"); + printer.Print(vars, "Note that a newline in a variable will break " + "indenting, as we see$newline$here.\n"); + printer.Indent(); + printer.Print("And this"); + printer.Outdent(); + printer.Outdent(); + printer.Print(" is double-indented\nBack to normal."); + + EXPECT_FALSE(printer.failed()); + } + + buffer[output.ByteCount()] = '\0'; + + EXPECT_STREQ( + "This is not indented.\n" + " This is indented\n" + " And so is this\n" + "But this is not. And this is still the same line.\n" + " But this is indented.\n" + " RawBit has indent at start\n" + "but not after a raw newline\n" + "Note that a newline in a variable will break indenting, as we see\n" + "here.\n" + " And this is double-indented\n" + "Back to normal.", + buffer); + } +} + +// Death tests do not work on Windows as of yet. +#ifdef GTEST_HAS_DEATH_TEST +TEST(Printer, Death) { + char buffer[8192]; + + ArrayOutputStream output(buffer, sizeof(buffer)); + Printer printer(&output, '$'); + + EXPECT_DEBUG_DEATH(printer.Print("$nosuchvar$"), "Undefined variable"); + EXPECT_DEBUG_DEATH(printer.Print("$unclosed"), "Unclosed variable name"); + EXPECT_DEBUG_DEATH(printer.Outdent(), "without matching Indent"); +} +#endif // GTEST_HAS_DEATH_TEST + +TEST(Printer, WriteFailure) { + char buffer[16]; + + ArrayOutputStream output(buffer, sizeof(buffer)); + Printer printer(&output, '$'); + + // Print 16 bytes to fill the buffer exactly (should not fail). + printer.Print("0123456789abcdef"); + EXPECT_FALSE(printer.failed()); + + // Try to print one more byte (should fail). + printer.Print(" "); + EXPECT_TRUE(printer.failed()); + + // Should not crash + printer.Print("blah"); + EXPECT_TRUE(printer.failed()); + + // Buffer should contain the first 16 bytes written. + EXPECT_EQ("0123456789abcdef", string(buffer, sizeof(buffer))); +} + +} // namespace +} // namespace io +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/tokenizer.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/tokenizer.cc new file mode 100644 index 0000000000..513831d55f --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/tokenizer.cc @@ -0,0 +1,694 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// Here we have a hand-written lexer. At first you might ask yourself, +// "Hand-written text processing? Is Kenton crazy?!" Well, first of all, +// yes I am crazy, but that's beside the point. There are actually reasons +// why I ended up writing this this way. +// +// The traditional approach to lexing is to use lex to generate a lexer for +// you. Unfortunately, lex's output is ridiculously ugly and difficult to +// integrate cleanly with C++ code, especially abstract code or code meant +// as a library. Better parser-generators exist but would add dependencies +// which most users won't already have, which we'd like to avoid. (GNU flex +// has a C++ output option, but it's still ridiculously ugly, non-abstract, +// and not library-friendly.) +// +// The next approach that any good software engineer should look at is to +// use regular expressions. And, indeed, I did. I have code which +// implements this same class using regular expressions. It's about 200 +// lines shorter. However: +// - Rather than error messages telling you "This string has an invalid +// escape sequence at line 5, column 45", you get error messages like +// "Parse error on line 5". Giving more precise errors requires adding +// a lot of code that ends up basically as complex as the hand-coded +// version anyway. +// - The regular expression to match a string literal looks like this: +// kString = new RE("(\"([^\"\\\\]|" // non-escaped +// "\\\\[abfnrtv?\"'\\\\0-7]|" // normal escape +// "\\\\x[0-9a-fA-F])*\"|" // hex escape +// "\'([^\'\\\\]|" // Also support single-quotes. +// "\\\\[abfnrtv?\"'\\\\0-7]|" +// "\\\\x[0-9a-fA-F])*\')"); +// Verifying the correctness of this line noise is actually harder than +// verifying the correctness of ConsumeString(), defined below. I'm not +// even confident that the above is correct, after staring at it for some +// time. +// - PCRE is fast, but there's still more overhead involved than the code +// below. +// - Sadly, regular expressions are not part of the C standard library, so +// using them would require depending on some other library. For the +// open source release, this could be really annoying. Nobody likes +// downloading one piece of software just to find that they need to +// download something else to make it work, and in all likelihood +// people downloading Protocol Buffers will already be doing so just +// to make something else work. We could include a copy of PCRE with +// our code, but that obligates us to keep it up-to-date and just seems +// like a big waste just to save 200 lines of code. +// +// On a similar but unrelated note, I'm even scared to use ctype.h. +// Apparently functions like isalpha() are locale-dependent. So, if we used +// that, then if this code is being called from some program that doesn't +// have its locale set to "C", it would behave strangely. We can't just set +// the locale to "C" ourselves since we might break the calling program that +// way, particularly if it is multi-threaded. WTF? Someone please let me +// (Kenton) know if I'm missing something here... +// +// I'd love to hear about other alternatives, though, as this code isn't +// exactly pretty. + +#include +#include +#include + +namespace google { +namespace protobuf { +namespace io { +namespace { + +// As mentioned above, I don't trust ctype.h due to the presence of "locales". +// So, I have written replacement functions here. Someone please smack me if +// this is a bad idea or if there is some way around this. +// +// These "character classes" are designed to be used in template methods. +// For instance, Tokenizer::ConsumeZeroOrMore() will eat +// whitespace. + +// Note: No class is allowed to contain '\0', since this is used to mark end- +// of-input and is handled specially. + +#define CHARACTER_CLASS(NAME, EXPRESSION) \ + class NAME { \ + public: \ + static inline bool InClass(char c) { \ + return EXPRESSION; \ + } \ + } + +CHARACTER_CLASS(Whitespace, c == ' ' || c == '\n' || c == '\t' || + c == '\r' || c == '\v' || c == '\f'); + +CHARACTER_CLASS(Unprintable, c < ' ' && c > '\0'); + +CHARACTER_CLASS(Digit, '0' <= c && c <= '9'); +CHARACTER_CLASS(OctalDigit, '0' <= c && c <= '7'); +CHARACTER_CLASS(HexDigit, ('0' <= c && c <= '9') || + ('a' <= c && c <= 'f') || + ('A' <= c && c <= 'F')); + +CHARACTER_CLASS(Letter, ('a' <= c && c <= 'z') || + ('A' <= c && c <= 'Z') || + (c == '_')); + +CHARACTER_CLASS(Alphanumeric, ('a' <= c && c <= 'z') || + ('A' <= c && c <= 'Z') || + ('0' <= c && c <= '9') || + (c == '_')); + +CHARACTER_CLASS(Escape, c == 'a' || c == 'b' || c == 'f' || c == 'n' || + c == 'r' || c == 't' || c == 'v' || c == '\\' || + c == '?' || c == '\'' || c == '\"'); + +#undef CHARACTER_CLASS + +// Given a char, interpret it as a numeric digit and return its value. +// This supports any number base up to 36. +inline int DigitValue(char digit) { + if ('0' <= digit && digit <= '9') return digit - '0'; + if ('a' <= digit && digit <= 'z') return digit - 'a' + 10; + if ('A' <= digit && digit <= 'Z') return digit - 'A' + 10; + return -1; +} + +// Inline because it's only used in one place. +inline char TranslateEscape(char c) { + switch (c) { + case 'a': return '\a'; + case 'b': return '\b'; + case 'f': return '\f'; + case 'n': return '\n'; + case 'r': return '\r'; + case 't': return '\t'; + case 'v': return '\v'; + case '\\': return '\\'; + case '?': return '\?'; // Trigraphs = :( + case '\'': return '\''; + case '"': return '\"'; + + // We expect escape sequences to have been validated separately. + default: return '?'; + } +} + +} // anonymous namespace + +ErrorCollector::~ErrorCollector() {} + +// =================================================================== + +Tokenizer::Tokenizer(ZeroCopyInputStream* input, + ErrorCollector* error_collector) + : input_(input), + error_collector_(error_collector), + buffer_(NULL), + buffer_size_(0), + buffer_pos_(0), + read_error_(false), + line_(0), + column_(0), + token_start_(-1), + allow_f_after_float_(false), + comment_style_(CPP_COMMENT_STYLE) { + + current_.line = 0; + current_.column = 0; + current_.end_column = 0; + current_.type = TYPE_START; + + Refresh(); +} + +Tokenizer::~Tokenizer() { + // If we had any buffer left unread, return it to the underlying stream + // so that someone else can read it. + if (buffer_size_ > buffer_pos_) { + input_->BackUp(buffer_size_ - buffer_pos_); + } +} + +// ------------------------------------------------------------------- +// Internal helpers. + +void Tokenizer::NextChar() { + // Update our line and column counters based on the character being + // consumed. + if (current_char_ == '\n') { + ++line_; + column_ = 0; + } else if (current_char_ == '\t') { + column_ += kTabWidth - column_ % kTabWidth; + } else { + ++column_; + } + + // Advance to the next character. + ++buffer_pos_; + if (buffer_pos_ < buffer_size_) { + current_char_ = buffer_[buffer_pos_]; + } else { + Refresh(); + } +} + +void Tokenizer::Refresh() { + if (read_error_) { + current_char_ = '\0'; + return; + } + + // If we're in a token, append the rest of the buffer to it. + if (token_start_ >= 0 && token_start_ < buffer_size_) { + current_.text.append(buffer_ + token_start_, buffer_size_ - token_start_); + token_start_ = 0; + } + + const void* data = NULL; + buffer_ = NULL; + buffer_pos_ = 0; + do { + if (!input_->Next(&data, &buffer_size_)) { + // end of stream (or read error) + buffer_size_ = 0; + read_error_ = true; + current_char_ = '\0'; + return; + } + } while (buffer_size_ == 0); + + buffer_ = static_cast(data); + + current_char_ = buffer_[0]; +} + +inline void Tokenizer::StartToken() { + token_start_ = buffer_pos_; + current_.type = TYPE_START; // Just for the sake of initializing it. + current_.text.clear(); + current_.line = line_; + current_.column = column_; +} + +inline void Tokenizer::EndToken() { + // Note: The if() is necessary because some STL implementations crash when + // you call string::append(NULL, 0), presumably because they are trying to + // be helpful by detecting the NULL pointer, even though there's nothing + // wrong with reading zero bytes from NULL. + if (buffer_pos_ != token_start_) { + current_.text.append(buffer_ + token_start_, buffer_pos_ - token_start_); + } + token_start_ = -1; + current_.end_column = column_; +} + +// ------------------------------------------------------------------- +// Helper methods that consume characters. + +template +inline bool Tokenizer::LookingAt() { + return CharacterClass::InClass(current_char_); +} + +template +inline bool Tokenizer::TryConsumeOne() { + if (CharacterClass::InClass(current_char_)) { + NextChar(); + return true; + } else { + return false; + } +} + +inline bool Tokenizer::TryConsume(char c) { + if (current_char_ == c) { + NextChar(); + return true; + } else { + return false; + } +} + +template +inline void Tokenizer::ConsumeZeroOrMore() { + while (CharacterClass::InClass(current_char_)) { + NextChar(); + } +} + +template +inline void Tokenizer::ConsumeOneOrMore(const char* error) { + if (!CharacterClass::InClass(current_char_)) { + AddError(error); + } else { + do { + NextChar(); + } while (CharacterClass::InClass(current_char_)); + } +} + +// ------------------------------------------------------------------- +// Methods that read whole patterns matching certain kinds of tokens +// or comments. + +void Tokenizer::ConsumeString(char delimiter) { + while (true) { + switch (current_char_) { + case '\0': + case '\n': { + AddError("String literals cannot cross line boundaries."); + return; + } + + case '\\': { + // An escape sequence. + NextChar(); + if (TryConsumeOne()) { + // Valid escape sequence. + } else if (TryConsumeOne()) { + // Possibly followed by two more octal digits, but these will + // just be consumed by the main loop anyway so we don't need + // to do so explicitly here. + } else if (TryConsume('x') || TryConsume('X')) { + if (!TryConsumeOne()) { + AddError("Expected hex digits for escape sequence."); + } + // Possibly followed by another hex digit, but again we don't care. + } else { + AddError("Invalid escape sequence in string literal."); + } + break; + } + + default: { + if (current_char_ == delimiter) { + NextChar(); + return; + } + NextChar(); + break; + } + } + } +} + +Tokenizer::TokenType Tokenizer::ConsumeNumber(bool started_with_zero, + bool started_with_dot) { + bool is_float = false; + + if (started_with_zero && (TryConsume('x') || TryConsume('X'))) { + // A hex number (started with "0x"). + ConsumeOneOrMore("\"0x\" must be followed by hex digits."); + + } else if (started_with_zero && LookingAt()) { + // An octal number (had a leading zero). + ConsumeZeroOrMore(); + if (LookingAt()) { + AddError("Numbers starting with leading zero must be in octal."); + ConsumeZeroOrMore(); + } + + } else { + // A decimal number. + if (started_with_dot) { + is_float = true; + ConsumeZeroOrMore(); + } else { + ConsumeZeroOrMore(); + + if (TryConsume('.')) { + is_float = true; + ConsumeZeroOrMore(); + } + } + + if (TryConsume('e') || TryConsume('E')) { + is_float = true; + TryConsume('-') || TryConsume('+'); + ConsumeOneOrMore("\"e\" must be followed by exponent."); + } + + if (allow_f_after_float_ && (TryConsume('f') || TryConsume('F'))) { + is_float = true; + } + } + + if (LookingAt()) { + AddError("Need space between number and identifier."); + } else if (current_char_ == '.') { + if (is_float) { + AddError( + "Already saw decimal point or exponent; can't have another one."); + } else { + AddError("Hex and octal numbers must be integers."); + } + } + + return is_float ? TYPE_FLOAT : TYPE_INTEGER; +} + +void Tokenizer::ConsumeLineComment() { + while (current_char_ != '\0' && current_char_ != '\n') { + NextChar(); + } + TryConsume('\n'); +} + +void Tokenizer::ConsumeBlockComment() { + int start_line = line_; + int start_column = column_ - 2; + + while (true) { + while (current_char_ != '\0' && + current_char_ != '*' && + current_char_ != '/') { + NextChar(); + } + + if (TryConsume('*') && TryConsume('/')) { + // End of comment. + break; + } else if (TryConsume('/') && current_char_ == '*') { + // Note: We didn't consume the '*' because if there is a '/' after it + // we want to interpret that as the end of the comment. + AddError( + "\"/*\" inside block comment. Block comments cannot be nested."); + } else if (current_char_ == '\0') { + AddError("End-of-file inside block comment."); + error_collector_->AddError( + start_line, start_column, " Comment started here."); + break; + } + } +} + +// ------------------------------------------------------------------- + +bool Tokenizer::Next() { + previous_ = current_; + + // Did we skip any characters after the last token? + bool skipped_stuff = false; + + while (!read_error_) { + if (TryConsumeOne()) { + ConsumeZeroOrMore(); + + } else if (comment_style_ == CPP_COMMENT_STYLE && TryConsume('/')) { + // Starting a comment? + if (TryConsume('/')) { + ConsumeLineComment(); + } else if (TryConsume('*')) { + ConsumeBlockComment(); + } else { + // Oops, it was just a slash. Return it. + current_.type = TYPE_SYMBOL; + current_.text = "/"; + current_.line = line_; + current_.column = column_ - 1; + return true; + } + + } else if (comment_style_ == SH_COMMENT_STYLE && TryConsume('#')) { + ConsumeLineComment(); + + } else if (LookingAt() || current_char_ == '\0') { + AddError("Invalid control characters encountered in text."); + NextChar(); + // Skip more unprintable characters, too. But, remember that '\0' is + // also what current_char_ is set to after EOF / read error. We have + // to be careful not to go into an infinite loop of trying to consume + // it, so make sure to check read_error_ explicitly before consuming + // '\0'. + while (TryConsumeOne() || + (!read_error_ && TryConsume('\0'))) { + // Ignore. + } + + } else { + // Reading some sort of token. + StartToken(); + + if (TryConsumeOne()) { + ConsumeZeroOrMore(); + current_.type = TYPE_IDENTIFIER; + } else if (TryConsume('0')) { + current_.type = ConsumeNumber(true, false); + } else if (TryConsume('.')) { + // This could be the beginning of a floating-point number, or it could + // just be a '.' symbol. + + if (TryConsumeOne()) { + // It's a floating-point number. + if (previous_.type == TYPE_IDENTIFIER && !skipped_stuff) { + // We don't accept syntax like "blah.123". + error_collector_->AddError(line_, column_ - 2, + "Need space between identifier and decimal point."); + } + current_.type = ConsumeNumber(false, true); + } else { + current_.type = TYPE_SYMBOL; + } + } else if (TryConsumeOne()) { + current_.type = ConsumeNumber(false, false); + } else if (TryConsume('\"')) { + ConsumeString('\"'); + current_.type = TYPE_STRING; + } else if (TryConsume('\'')) { + ConsumeString('\''); + current_.type = TYPE_STRING; + } else { + NextChar(); + current_.type = TYPE_SYMBOL; + } + + EndToken(); + return true; + } + + skipped_stuff = true; + } + + // EOF + current_.type = TYPE_END; + current_.text.clear(); + current_.line = line_; + current_.column = column_; + current_.end_column = column_; + return false; +} + +// ------------------------------------------------------------------- +// Token-parsing helpers. Remember that these don't need to report +// errors since any errors should already have been reported while +// tokenizing. Also, these can assume that whatever text they +// are given is text that the tokenizer actually parsed as a token +// of the given type. + +bool Tokenizer::ParseInteger(const string& text, uint64 max_value, + uint64* output) { + // Sadly, we can't just use strtoul() since it is only 32-bit and strtoull() + // is non-standard. I hate the C standard library. :( + +// return strtoull(text.c_str(), NULL, 0); + + const char* ptr = text.c_str(); + int base = 10; + if (ptr[0] == '0') { + if (ptr[1] == 'x' || ptr[1] == 'X') { + // This is hex. + base = 16; + ptr += 2; + } else { + // This is octal. + base = 8; + } + } + + uint64 result = 0; + for (; *ptr != '\0'; ptr++) { + int digit = DigitValue(*ptr); + GOOGLE_LOG_IF(DFATAL, digit < 0 || digit >= base) + << " Tokenizer::ParseInteger() passed text that could not have been" + " tokenized as an integer: " << CEscape(text); + if (digit > max_value || result > (max_value - digit) / base) { + // Overflow. + return false; + } + result = result * base + digit; + } + + *output = result; + return true; +} + +double Tokenizer::ParseFloat(const string& text) { + const char* start = text.c_str(); + char* end; + double result = NoLocaleStrtod(start, &end); + + // "1e" is not a valid float, but if the tokenizer reads it, it will + // report an error but still return it as a valid token. We need to + // accept anything the tokenizer could possibly return, error or not. + if (*end == 'e' || *end == 'E') { + ++end; + if (*end == '-' || *end == '+') ++end; + } + + // If the Tokenizer had allow_f_after_float_ enabled, the float may be + // suffixed with the letter 'f'. + if (*end == 'f' || *end == 'F') { + ++end; + } + + GOOGLE_LOG_IF(DFATAL, end - start != text.size() || *start == '-') + << " Tokenizer::ParseFloat() passed text that could not have been" + " tokenized as a float: " << CEscape(text); + return result; +} + +void Tokenizer::ParseStringAppend(const string& text, string* output) { + // Reminder: text[0] is always the quote character. (If text is + // empty, it's invalid, so we'll just return.) + if (text.empty()) { + GOOGLE_LOG(DFATAL) + << " Tokenizer::ParseStringAppend() passed text that could not" + " have been tokenized as a string: " << CEscape(text); + return; + } + + output->reserve(output->size() + text.size()); + + // Loop through the string copying characters to "output" and + // interpreting escape sequences. Note that any invalid escape + // sequences or other errors were already reported while tokenizing. + // In this case we do not need to produce valid results. + for (const char* ptr = text.c_str() + 1; *ptr != '\0'; ptr++) { + if (*ptr == '\\' && ptr[1] != '\0') { + // An escape sequence. + ++ptr; + + if (OctalDigit::InClass(*ptr)) { + // An octal escape. May one, two, or three digits. + int code = DigitValue(*ptr); + if (OctalDigit::InClass(ptr[1])) { + ++ptr; + code = code * 8 + DigitValue(*ptr); + } + if (OctalDigit::InClass(ptr[1])) { + ++ptr; + code = code * 8 + DigitValue(*ptr); + } + output->push_back(static_cast(code)); + + } else if (*ptr == 'x') { + // A hex escape. May zero, one, or two digits. (The zero case + // will have been caught as an error earlier.) + int code = 0; + if (HexDigit::InClass(ptr[1])) { + ++ptr; + code = DigitValue(*ptr); + } + if (HexDigit::InClass(ptr[1])) { + ++ptr; + code = code * 16 + DigitValue(*ptr); + } + output->push_back(static_cast(code)); + + } else { + // Some other escape code. + output->push_back(TranslateEscape(*ptr)); + } + + } else if (*ptr == text[0]) { + // Ignore quote matching the starting quote. + } else { + output->push_back(*ptr); + } + } + + return; +} + +} // namespace io +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/tokenizer.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/tokenizer.h new file mode 100644 index 0000000000..8f759abbd8 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/tokenizer.h @@ -0,0 +1,313 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// Class for parsing tokenized text from a ZeroCopyInputStream. + +#ifndef GOOGLE_PROTOBUF_IO_TOKENIZER_H__ +#define GOOGLE_PROTOBUF_IO_TOKENIZER_H__ + +#include +#include + +namespace google { +namespace protobuf { +namespace io { + +class ZeroCopyInputStream; // zero_copy_stream.h + +// Defined in this file. +class ErrorCollector; +class Tokenizer; + +// Abstract interface for an object which collects the errors that occur +// during parsing. A typical implementation might simply print the errors +// to stdout. +class LIBPROTOBUF_EXPORT ErrorCollector { + public: + inline ErrorCollector() {} + virtual ~ErrorCollector(); + + // Indicates that there was an error in the input at the given line and + // column numbers. The numbers are zero-based, so you may want to add + // 1 to each before printing them. + virtual void AddError(int line, int column, const string& message) = 0; + + // Indicates that there was a warning in the input at the given line and + // column numbers. The numbers are zero-based, so you may want to add + // 1 to each before printing them. + virtual void AddWarning(int line, int column, const string& message) { } + + private: + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(ErrorCollector); +}; + +// This class converts a stream of raw text into a stream of tokens for +// the protocol definition parser to parse. The tokens recognized are +// similar to those that make up the C language; see the TokenType enum for +// precise descriptions. Whitespace and comments are skipped. By default, +// C- and C++-style comments are recognized, but other styles can be used by +// calling set_comment_style(). +class LIBPROTOBUF_EXPORT Tokenizer { + public: + // Construct a Tokenizer that reads and tokenizes text from the given + // input stream and writes errors to the given error_collector. + // The caller keeps ownership of input and error_collector. + Tokenizer(ZeroCopyInputStream* input, ErrorCollector* error_collector); + ~Tokenizer(); + + enum TokenType { + TYPE_START, // Next() has not yet been called. + TYPE_END, // End of input reached. "text" is empty. + + TYPE_IDENTIFIER, // A sequence of letters, digits, and underscores, not + // starting with a digit. It is an error for a number + // to be followed by an identifier with no space in + // between. + TYPE_INTEGER, // A sequence of digits representing an integer. Normally + // the digits are decimal, but a prefix of "0x" indicates + // a hex number and a leading zero indicates octal, just + // like with C numeric literals. A leading negative sign + // is NOT included in the token; it's up to the parser to + // interpret the unary minus operator on its own. + TYPE_FLOAT, // A floating point literal, with a fractional part and/or + // an exponent. Always in decimal. Again, never + // negative. + TYPE_STRING, // A quoted sequence of escaped characters. Either single + // or double quotes can be used, but they must match. + // A string literal cannot cross a line break. + TYPE_SYMBOL, // Any other printable character, like '!' or '+'. + // Symbols are always a single character, so "!+$%" is + // four tokens. + }; + + // Structure representing a token read from the token stream. + struct Token { + TokenType type; + string text; // The exact text of the token as it appeared in + // the input. e.g. tokens of TYPE_STRING will still + // be escaped and in quotes. + + // "line" and "column" specify the position of the first character of + // the token within the input stream. They are zero-based. + int line; + int column; + int end_column; + }; + + // Get the current token. This is updated when Next() is called. Before + // the first call to Next(), current() has type TYPE_START and no contents. + const Token& current(); + + // Return the previous token -- i.e. what current() returned before the + // previous call to Next(). + const Token& previous(); + + // Advance to the next token. Returns false if the end of the input is + // reached. + bool Next(); + + // Parse helpers --------------------------------------------------- + + // Parses a TYPE_FLOAT token. This never fails, so long as the text actually + // comes from a TYPE_FLOAT token parsed by Tokenizer. If it doesn't, the + // result is undefined (possibly an assert failure). + static double ParseFloat(const string& text); + + // Parses a TYPE_STRING token. This never fails, so long as the text actually + // comes from a TYPE_STRING token parsed by Tokenizer. If it doesn't, the + // result is undefined (possibly an assert failure). + static void ParseString(const string& text, string* output); + + // Identical to ParseString, but appends to output. + static void ParseStringAppend(const string& text, string* output); + + // Parses a TYPE_INTEGER token. Returns false if the result would be + // greater than max_value. Otherwise, returns true and sets *output to the + // result. If the text is not from a Token of type TYPE_INTEGER originally + // parsed by a Tokenizer, the result is undefined (possibly an assert + // failure). + static bool ParseInteger(const string& text, uint64 max_value, + uint64* output); + + // Options --------------------------------------------------------- + + // Set true to allow floats to be suffixed with the letter 'f'. Tokens + // which would otherwise be integers but which have the 'f' suffix will be + // forced to be interpreted as floats. For all other purposes, the 'f' is + // ignored. + void set_allow_f_after_float(bool value) { allow_f_after_float_ = value; } + + // Valid values for set_comment_style(). + enum CommentStyle { + // Line comments begin with "//", block comments are delimited by "/*" and + // "*/". + CPP_COMMENT_STYLE, + // Line comments begin with "#". No way to write block comments. + SH_COMMENT_STYLE + }; + + // Sets the comment style. + void set_comment_style(CommentStyle style) { comment_style_ = style; } + + // ----------------------------------------------------------------- + private: + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(Tokenizer); + + Token current_; // Returned by current(). + Token previous_; // Returned by previous(). + + ZeroCopyInputStream* input_; + ErrorCollector* error_collector_; + + char current_char_; // == buffer_[buffer_pos_], updated by NextChar(). + const char* buffer_; // Current buffer returned from input_. + int buffer_size_; // Size of buffer_. + int buffer_pos_; // Current position within the buffer. + bool read_error_; // Did we previously encounter a read error? + + // Line and column number of current_char_ within the whole input stream. + int line_; + int column_; + + // Position in buffer_ where StartToken() was called. If the token + // started in the previous buffer, this is zero, and current_.text already + // contains the part of the token from the previous buffer. If not + // currently parsing a token, this is -1. + int token_start_; + + // Options. + bool allow_f_after_float_; + CommentStyle comment_style_; + + // Since we count columns we need to interpret tabs somehow. We'll take + // the standard 8-character definition for lack of any way to do better. + static const int kTabWidth = 8; + + // ----------------------------------------------------------------- + // Helper methods. + + // Consume this character and advance to the next one. + void NextChar(); + + // Read a new buffer from the input. + void Refresh(); + + // Called when the current character is the first character of a new + // token (not including whitespace or comments). + inline void StartToken(); + // Called when the current character is the first character after the + // end of the last token. After this returns, current_.text will + // contain all text consumed since StartToken() was called. + inline void EndToken(); + + // Convenience method to add an error at the current line and column. + void AddError(const string& message) { + error_collector_->AddError(line_, column_, message); + } + + // ----------------------------------------------------------------- + // The following four methods are used to consume tokens of specific + // types. They are actually used to consume all characters *after* + // the first, since the calling function consumes the first character + // in order to decide what kind of token is being read. + + // Read and consume a string, ending when the given delimiter is + // consumed. + void ConsumeString(char delimiter); + + // Read and consume a number, returning TYPE_FLOAT or TYPE_INTEGER + // depending on what was read. This needs to know if the first + // character was a zero in order to correctly recognize hex and octal + // numbers. + // It also needs to know if the first characted was a . to parse floating + // point correctly. + TokenType ConsumeNumber(bool started_with_zero, bool started_with_dot); + + // Consume the rest of a line. + void ConsumeLineComment(); + // Consume until "*/". + void ConsumeBlockComment(); + + // ----------------------------------------------------------------- + // These helper methods make the parsing code more readable. The + // "character classes" refered to are defined at the top of the .cc file. + // Basically it is a C++ class with one method: + // static bool InClass(char c); + // The method returns true if c is a member of this "class", like "Letter" + // or "Digit". + + // Returns true if the current character is of the given character + // class, but does not consume anything. + template + inline bool LookingAt(); + + // If the current character is in the given class, consume it and return + // true. Otherwise return false. + // e.g. TryConsumeOne() + template + inline bool TryConsumeOne(); + + // Like above, but try to consume the specific character indicated. + inline bool TryConsume(char c); + + // Consume zero or more of the given character class. + template + inline void ConsumeZeroOrMore(); + + // Consume one or more of the given character class or log the given + // error message. + // e.g. ConsumeOneOrMore("Expected digits."); + template + inline void ConsumeOneOrMore(const char* error); +}; + +// inline methods ==================================================== +inline const Tokenizer::Token& Tokenizer::current() { + return current_; +} + +inline const Tokenizer::Token& Tokenizer::previous() { + return previous_; +} + +inline void Tokenizer::ParseString(const string& text, string* output) { + output->clear(); + ParseStringAppend(text, output); +} + +} // namespace io +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_IO_TOKENIZER_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/tokenizer_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/tokenizer_unittest.cc new file mode 100644 index 0000000000..106d080ff7 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/tokenizer_unittest.cc @@ -0,0 +1,766 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include +#include + +#include +#include + +#include +#include +#include +#include +#include + +namespace google { +namespace protobuf { +namespace io { +namespace { + +// =================================================================== +// Data-Driven Test Infrastructure + +// TODO(kenton): This is copied from coded_stream_unittest. This is +// temporary until these fetaures are integrated into gTest itself. + +// TEST_1D and TEST_2D are macros I'd eventually like to see added to +// gTest. These macros can be used to declare tests which should be +// run multiple times, once for each item in some input array. TEST_1D +// tests all cases in a single input array. TEST_2D tests all +// combinations of cases from two arrays. The arrays must be statically +// defined such that the GOOGLE_ARRAYSIZE() macro works on them. Example: +// +// int kCases[] = {1, 2, 3, 4} +// TEST_1D(MyFixture, MyTest, kCases) { +// EXPECT_GT(kCases_case, 0); +// } +// +// This test iterates through the numbers 1, 2, 3, and 4 and tests that +// they are all grater than zero. In case of failure, the exact case +// which failed will be printed. The case type must be printable using +// ostream::operator<<. + +#define TEST_1D(FIXTURE, NAME, CASES) \ + class FIXTURE##_##NAME##_DD : public FIXTURE { \ + protected: \ + template \ + void DoSingleCase(const CaseType& CASES##_case); \ + }; \ + \ + TEST_F(FIXTURE##_##NAME##_DD, NAME) { \ + for (int i = 0; i < GOOGLE_ARRAYSIZE(CASES); i++) { \ + SCOPED_TRACE(testing::Message() \ + << #CASES " case #" << i << ": " << CASES[i]); \ + DoSingleCase(CASES[i]); \ + } \ + } \ + \ + template \ + void FIXTURE##_##NAME##_DD::DoSingleCase(const CaseType& CASES##_case) + +#define TEST_2D(FIXTURE, NAME, CASES1, CASES2) \ + class FIXTURE##_##NAME##_DD : public FIXTURE { \ + protected: \ + template \ + void DoSingleCase(const CaseType1& CASES1##_case, \ + const CaseType2& CASES2##_case); \ + }; \ + \ + TEST_F(FIXTURE##_##NAME##_DD, NAME) { \ + for (int i = 0; i < GOOGLE_ARRAYSIZE(CASES1); i++) { \ + for (int j = 0; j < GOOGLE_ARRAYSIZE(CASES2); j++) { \ + SCOPED_TRACE(testing::Message() \ + << #CASES1 " case #" << i << ": " << CASES1[i] << ", " \ + << #CASES2 " case #" << j << ": " << CASES2[j]); \ + DoSingleCase(CASES1[i], CASES2[j]); \ + } \ + } \ + } \ + \ + template \ + void FIXTURE##_##NAME##_DD::DoSingleCase(const CaseType1& CASES1##_case, \ + const CaseType2& CASES2##_case) + +// ------------------------------------------------------------------- + +// An input stream that is basically like an ArrayInputStream but sometimes +// returns empty buffers, just to throw us off. +class TestInputStream : public ZeroCopyInputStream { + public: + TestInputStream(const void* data, int size, int block_size) + : array_stream_(data, size, block_size), counter_(0) {} + ~TestInputStream() {} + + // implements ZeroCopyInputStream ---------------------------------- + bool Next(const void** data, int* size) { + // We'll return empty buffers starting with the first buffer, and every + // 3 and 5 buffers after that. + if (counter_ % 3 == 0 || counter_ % 5 == 0) { + *data = NULL; + *size = 0; + ++counter_; + return true; + } else { + ++counter_; + return array_stream_.Next(data, size); + } + } + + void BackUp(int count) { return array_stream_.BackUp(count); } + bool Skip(int count) { return array_stream_.Skip(count); } + int64 ByteCount() const { return array_stream_.ByteCount(); } + + private: + ArrayInputStream array_stream_; + int counter_; +}; + +// ------------------------------------------------------------------- + +// An error collector which simply concatenates all its errors into a big +// block of text which can be checked. +class TestErrorCollector : public ErrorCollector { + public: + TestErrorCollector() {} + ~TestErrorCollector() {} + + string text_; + + // implements ErrorCollector --------------------------------------- + void AddError(int line, int column, const string& message) { + strings::SubstituteAndAppend(&text_, "$0:$1: $2\n", + line, column, message); + } +}; + +// ------------------------------------------------------------------- + +// We test each operation over a variety of block sizes to insure that +// we test cases where reads cross buffer boundaries as well as cases +// where they don't. This is sort of a brute-force approach to this, +// but it's easy to write and easy to understand. +const int kBlockSizes[] = {1, 2, 3, 5, 7, 13, 32, 1024}; + +class TokenizerTest : public testing::Test { + protected: + // For easy testing. + uint64 ParseInteger(const string& text) { + uint64 result; + EXPECT_TRUE(Tokenizer::ParseInteger(text, kuint64max, &result)); + return result; + } +}; + +// =================================================================== + +// These tests causes gcc 3.3.5 (and earlier?) to give the cryptic error: +// "sorry, unimplemented: `method_call_expr' not supported by dump_expr" +#if !defined(__GNUC__) || __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 3) + +// In each test case, the entire input text should parse as a single token +// of the given type. +struct SimpleTokenCase { + string input; + Tokenizer::TokenType type; +}; + +inline ostream& operator<<(ostream& out, + const SimpleTokenCase& test_case) { + return out << CEscape(test_case.input); +} + +SimpleTokenCase kSimpleTokenCases[] = { + // Test identifiers. + { "hello", Tokenizer::TYPE_IDENTIFIER }, + + // Test integers. + { "123", Tokenizer::TYPE_INTEGER }, + { "0xab6", Tokenizer::TYPE_INTEGER }, + { "0XAB6", Tokenizer::TYPE_INTEGER }, + { "0X1234567", Tokenizer::TYPE_INTEGER }, + { "0x89abcdef", Tokenizer::TYPE_INTEGER }, + { "0x89ABCDEF", Tokenizer::TYPE_INTEGER }, + { "01234567", Tokenizer::TYPE_INTEGER }, + + // Test floats. + { "123.45", Tokenizer::TYPE_FLOAT }, + { "1.", Tokenizer::TYPE_FLOAT }, + { "1e3", Tokenizer::TYPE_FLOAT }, + { "1E3", Tokenizer::TYPE_FLOAT }, + { "1e-3", Tokenizer::TYPE_FLOAT }, + { "1e+3", Tokenizer::TYPE_FLOAT }, + { "1.e3", Tokenizer::TYPE_FLOAT }, + { "1.2e3", Tokenizer::TYPE_FLOAT }, + { ".1", Tokenizer::TYPE_FLOAT }, + { ".1e3", Tokenizer::TYPE_FLOAT }, + { ".1e-3", Tokenizer::TYPE_FLOAT }, + { ".1e+3", Tokenizer::TYPE_FLOAT }, + + // Test strings. + { "'hello'", Tokenizer::TYPE_STRING }, + { "\"foo\"", Tokenizer::TYPE_STRING }, + { "'a\"b'", Tokenizer::TYPE_STRING }, + { "\"a'b\"", Tokenizer::TYPE_STRING }, + { "'a\\'b'", Tokenizer::TYPE_STRING }, + { "\"a\\\"b\"", Tokenizer::TYPE_STRING }, + { "'\\xf'", Tokenizer::TYPE_STRING }, + { "'\\0'", Tokenizer::TYPE_STRING }, + + // Test symbols. + { "+", Tokenizer::TYPE_SYMBOL }, + { ".", Tokenizer::TYPE_SYMBOL }, +}; + +TEST_2D(TokenizerTest, SimpleTokens, kSimpleTokenCases, kBlockSizes) { + // Set up the tokenizer. + TestInputStream input(kSimpleTokenCases_case.input.data(), + kSimpleTokenCases_case.input.size(), + kBlockSizes_case); + TestErrorCollector error_collector; + Tokenizer tokenizer(&input, &error_collector); + + // Before Next() is called, the initial token should always be TYPE_START. + EXPECT_EQ(Tokenizer::TYPE_START, tokenizer.current().type); + EXPECT_EQ("", tokenizer.current().text); + EXPECT_EQ(0, tokenizer.current().line); + EXPECT_EQ(0, tokenizer.current().column); + EXPECT_EQ(0, tokenizer.current().end_column); + + // Parse the token. + ASSERT_TRUE(tokenizer.Next()); + + // Check that it has the right type. + EXPECT_EQ(kSimpleTokenCases_case.type, tokenizer.current().type); + // Check that it contains the complete input text. + EXPECT_EQ(kSimpleTokenCases_case.input, tokenizer.current().text); + // Check that it is located at the beginning of the input + EXPECT_EQ(0, tokenizer.current().line); + EXPECT_EQ(0, tokenizer.current().column); + EXPECT_EQ(kSimpleTokenCases_case.input.size(), + tokenizer.current().end_column); + + // There should be no more input. + EXPECT_FALSE(tokenizer.Next()); + + // After Next() returns false, the token should have type TYPE_END. + EXPECT_EQ(Tokenizer::TYPE_END, tokenizer.current().type); + EXPECT_EQ("", tokenizer.current().text); + EXPECT_EQ(0, tokenizer.current().line); + EXPECT_EQ(kSimpleTokenCases_case.input.size(), tokenizer.current().column); + EXPECT_EQ(kSimpleTokenCases_case.input.size(), + tokenizer.current().end_column); + + // There should be no errors. + EXPECT_TRUE(error_collector.text_.empty()); +} + +TEST_1D(TokenizerTest, FloatSuffix, kBlockSizes) { + // Test the "allow_f_after_float" option. + + // Set up the tokenizer. + const char* text = "1f 2.5f 6e3f 7F"; + TestInputStream input(text, strlen(text), kBlockSizes_case); + TestErrorCollector error_collector; + Tokenizer tokenizer(&input, &error_collector); + tokenizer.set_allow_f_after_float(true); + + // Advance through tokens and check that they are parsed as expected. + ASSERT_TRUE(tokenizer.Next()); + EXPECT_EQ(tokenizer.current().text, "1f"); + EXPECT_EQ(tokenizer.current().type, Tokenizer::TYPE_FLOAT); + ASSERT_TRUE(tokenizer.Next()); + EXPECT_EQ(tokenizer.current().text, "2.5f"); + EXPECT_EQ(tokenizer.current().type, Tokenizer::TYPE_FLOAT); + ASSERT_TRUE(tokenizer.Next()); + EXPECT_EQ(tokenizer.current().text, "6e3f"); + EXPECT_EQ(tokenizer.current().type, Tokenizer::TYPE_FLOAT); + ASSERT_TRUE(tokenizer.Next()); + EXPECT_EQ(tokenizer.current().text, "7F"); + EXPECT_EQ(tokenizer.current().type, Tokenizer::TYPE_FLOAT); + + // There should be no more input. + EXPECT_FALSE(tokenizer.Next()); + // There should be no errors. + EXPECT_TRUE(error_collector.text_.empty()); +} + +#endif + +// ------------------------------------------------------------------- + +// In each case, the input is parsed to produce a list of tokens. The +// last token in "output" must have type TYPE_END. +struct MultiTokenCase { + string input; + Tokenizer::Token output[10]; // The compiler wants a constant array + // size for initialization to work. There + // is no reason this can't be increased if + // needed. +}; + +inline ostream& operator<<(ostream& out, + const MultiTokenCase& test_case) { + return out << CEscape(test_case.input); +} + +MultiTokenCase kMultiTokenCases[] = { + // Test empty input. + { "", { + { Tokenizer::TYPE_END , "" , 0, 0 }, + }}, + + // Test all token types at the same time. + { "foo 1 1.2 + 'bar'", { + { Tokenizer::TYPE_IDENTIFIER, "foo" , 0, 0, 3 }, + { Tokenizer::TYPE_INTEGER , "1" , 0, 4, 5 }, + { Tokenizer::TYPE_FLOAT , "1.2" , 0, 6, 9 }, + { Tokenizer::TYPE_SYMBOL , "+" , 0, 10, 11 }, + { Tokenizer::TYPE_STRING , "'bar'", 0, 12, 17 }, + { Tokenizer::TYPE_END , "" , 0, 17, 17 }, + }}, + + // Test that consecutive symbols are parsed as separate tokens. + { "!@+%", { + { Tokenizer::TYPE_SYMBOL , "!" , 0, 0, 1 }, + { Tokenizer::TYPE_SYMBOL , "@" , 0, 1, 2 }, + { Tokenizer::TYPE_SYMBOL , "+" , 0, 2, 3 }, + { Tokenizer::TYPE_SYMBOL , "%" , 0, 3, 4 }, + { Tokenizer::TYPE_END , "" , 0, 4, 4 }, + }}, + + // Test that newlines affect line numbers correctly. + { "foo bar\nrab oof", { + { Tokenizer::TYPE_IDENTIFIER, "foo", 0, 0, 3 }, + { Tokenizer::TYPE_IDENTIFIER, "bar", 0, 4, 7 }, + { Tokenizer::TYPE_IDENTIFIER, "rab", 1, 0, 3 }, + { Tokenizer::TYPE_IDENTIFIER, "oof", 1, 4, 7 }, + { Tokenizer::TYPE_END , "" , 1, 7, 7 }, + }}, + + // Test that tabs affect column numbers correctly. + { "foo\tbar \tbaz", { + { Tokenizer::TYPE_IDENTIFIER, "foo", 0, 0, 3 }, + { Tokenizer::TYPE_IDENTIFIER, "bar", 0, 8, 11 }, + { Tokenizer::TYPE_IDENTIFIER, "baz", 0, 16, 19 }, + { Tokenizer::TYPE_END , "" , 0, 19, 19 }, + }}, + + // Test that tabs in string literals affect column numbers correctly. + { "\"foo\tbar\" baz", { + { Tokenizer::TYPE_STRING , "\"foo\tbar\"", 0, 0, 12 }, + { Tokenizer::TYPE_IDENTIFIER, "baz" , 0, 13, 16 }, + { Tokenizer::TYPE_END , "" , 0, 16, 16 }, + }}, + + // Test that line comments are ignored. + { "foo // This is a comment\n" + "bar // This is another comment", { + { Tokenizer::TYPE_IDENTIFIER, "foo", 0, 0, 3 }, + { Tokenizer::TYPE_IDENTIFIER, "bar", 1, 0, 3 }, + { Tokenizer::TYPE_END , "" , 1, 30, 30 }, + }}, + + // Test that block comments are ignored. + { "foo /* This is a block comment */ bar", { + { Tokenizer::TYPE_IDENTIFIER, "foo", 0, 0, 3 }, + { Tokenizer::TYPE_IDENTIFIER, "bar", 0, 34, 37 }, + { Tokenizer::TYPE_END , "" , 0, 37, 37 }, + }}, + + // Test that sh-style comments are not ignored by default. + { "foo # bar\n" + "baz", { + { Tokenizer::TYPE_IDENTIFIER, "foo", 0, 0, 3 }, + { Tokenizer::TYPE_SYMBOL , "#" , 0, 4, 5 }, + { Tokenizer::TYPE_IDENTIFIER, "bar", 0, 6, 9 }, + { Tokenizer::TYPE_IDENTIFIER, "baz", 1, 0, 3 }, + { Tokenizer::TYPE_END , "" , 1, 3, 3 }, + }}, + + // Bytes with the high-order bit set should not be seen as control characters. + { "\300", { + { Tokenizer::TYPE_SYMBOL, "\300", 0, 0, 1 }, + { Tokenizer::TYPE_END , "" , 0, 1, 1 }, + }}, + + // Test all whitespace chars + { "foo\n\t\r\v\fbar", { + { Tokenizer::TYPE_IDENTIFIER, "foo", 0, 0, 3 }, + { Tokenizer::TYPE_IDENTIFIER, "bar", 1, 11, 14 }, + { Tokenizer::TYPE_END , "" , 1, 14, 14 }, + }}, +}; + +TEST_2D(TokenizerTest, MultipleTokens, kMultiTokenCases, kBlockSizes) { + // Set up the tokenizer. + TestInputStream input(kMultiTokenCases_case.input.data(), + kMultiTokenCases_case.input.size(), + kBlockSizes_case); + TestErrorCollector error_collector; + Tokenizer tokenizer(&input, &error_collector); + + // Before Next() is called, the initial token should always be TYPE_START. + EXPECT_EQ(Tokenizer::TYPE_START, tokenizer.current().type); + EXPECT_EQ("", tokenizer.current().text); + EXPECT_EQ(0, tokenizer.current().line); + EXPECT_EQ(0, tokenizer.current().column); + EXPECT_EQ(0, tokenizer.current().end_column); + + // Loop through all expected tokens. + int i = 0; + Tokenizer::Token token; + do { + token = kMultiTokenCases_case.output[i++]; + + SCOPED_TRACE(testing::Message() << "Token #" << i << ": " << token.text); + + Tokenizer::Token previous = tokenizer.current(); + + // Next() should only return false when it hits the end token. + if (token.type != Tokenizer::TYPE_END) { + ASSERT_TRUE(tokenizer.Next()); + } else { + ASSERT_FALSE(tokenizer.Next()); + } + + // Check that the previous token is set correctly. + EXPECT_EQ(previous.type, tokenizer.previous().type); + EXPECT_EQ(previous.text, tokenizer.previous().text); + EXPECT_EQ(previous.line, tokenizer.previous().line); + EXPECT_EQ(previous.column, tokenizer.previous().column); + EXPECT_EQ(previous.end_column, tokenizer.previous().end_column); + + // Check that the token matches the expected one. + EXPECT_EQ(token.type, tokenizer.current().type); + EXPECT_EQ(token.text, tokenizer.current().text); + EXPECT_EQ(token.line, tokenizer.current().line); + EXPECT_EQ(token.column, tokenizer.current().column); + EXPECT_EQ(token.end_column, tokenizer.current().end_column); + + } while (token.type != Tokenizer::TYPE_END); + + // There should be no errors. + EXPECT_TRUE(error_collector.text_.empty()); +} + +// This test causes gcc 3.3.5 (and earlier?) to give the cryptic error: +// "sorry, unimplemented: `method_call_expr' not supported by dump_expr" +#if !defined(__GNUC__) || __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 3) + +TEST_1D(TokenizerTest, ShCommentStyle, kBlockSizes) { + // Test the "comment_style" option. + + const char* text = "foo # bar\n" + "baz // qux\n" + "corge /* grault */\n" + "garply"; + const char* const kTokens[] = {"foo", // "# bar" is ignored + "baz", "/", "/", "qux", + "corge", "/", "*", "grault", "*", "/", + "garply"}; + + // Set up the tokenizer. + TestInputStream input(text, strlen(text), kBlockSizes_case); + TestErrorCollector error_collector; + Tokenizer tokenizer(&input, &error_collector); + tokenizer.set_comment_style(Tokenizer::SH_COMMENT_STYLE); + + // Advance through tokens and check that they are parsed as expected. + for (int i = 0; i < GOOGLE_ARRAYSIZE(kTokens); i++) { + EXPECT_TRUE(tokenizer.Next()); + EXPECT_EQ(tokenizer.current().text, kTokens[i]); + } + + // There should be no more input. + EXPECT_FALSE(tokenizer.Next()); + // There should be no errors. + EXPECT_TRUE(error_collector.text_.empty()); +} + +#endif + +// ------------------------------------------------------------------- + +// Test parse helpers. It's not really worth setting up a full data-driven +// test here. +TEST_F(TokenizerTest, ParseInteger) { + EXPECT_EQ(0, ParseInteger("0")); + EXPECT_EQ(123, ParseInteger("123")); + EXPECT_EQ(0xabcdef12u, ParseInteger("0xabcdef12")); + EXPECT_EQ(0xabcdef12u, ParseInteger("0xABCDEF12")); + EXPECT_EQ(kuint64max, ParseInteger("0xFFFFFFFFFFFFFFFF")); + EXPECT_EQ(01234567, ParseInteger("01234567")); + EXPECT_EQ(0X123, ParseInteger("0X123")); + + // Test invalid integers that may still be tokenized as integers. + EXPECT_EQ(0, ParseInteger("0x")); + + uint64 i; +#ifdef GTEST_HAS_DEATH_TEST // death tests do not work on Windows yet + // Test invalid integers that will never be tokenized as integers. + EXPECT_DEBUG_DEATH(Tokenizer::ParseInteger("zxy", kuint64max, &i), + "passed text that could not have been tokenized as an integer"); + EXPECT_DEBUG_DEATH(Tokenizer::ParseInteger("1.2", kuint64max, &i), + "passed text that could not have been tokenized as an integer"); + EXPECT_DEBUG_DEATH(Tokenizer::ParseInteger("08", kuint64max, &i), + "passed text that could not have been tokenized as an integer"); + EXPECT_DEBUG_DEATH(Tokenizer::ParseInteger("0xg", kuint64max, &i), + "passed text that could not have been tokenized as an integer"); + EXPECT_DEBUG_DEATH(Tokenizer::ParseInteger("-1", kuint64max, &i), + "passed text that could not have been tokenized as an integer"); +#endif // GTEST_HAS_DEATH_TEST + + // Test overflows. + EXPECT_TRUE (Tokenizer::ParseInteger("0", 0, &i)); + EXPECT_FALSE(Tokenizer::ParseInteger("1", 0, &i)); + EXPECT_TRUE (Tokenizer::ParseInteger("1", 1, &i)); + EXPECT_TRUE (Tokenizer::ParseInteger("12345", 12345, &i)); + EXPECT_FALSE(Tokenizer::ParseInteger("12346", 12345, &i)); + EXPECT_TRUE (Tokenizer::ParseInteger("0xFFFFFFFFFFFFFFFF" , kuint64max, &i)); + EXPECT_FALSE(Tokenizer::ParseInteger("0x10000000000000000", kuint64max, &i)); +} + +TEST_F(TokenizerTest, ParseFloat) { + EXPECT_DOUBLE_EQ(1 , Tokenizer::ParseFloat("1.")); + EXPECT_DOUBLE_EQ(1e3 , Tokenizer::ParseFloat("1e3")); + EXPECT_DOUBLE_EQ(1e3 , Tokenizer::ParseFloat("1E3")); + EXPECT_DOUBLE_EQ(1.5e3, Tokenizer::ParseFloat("1.5e3")); + EXPECT_DOUBLE_EQ(.1 , Tokenizer::ParseFloat(".1")); + EXPECT_DOUBLE_EQ(.25 , Tokenizer::ParseFloat(".25")); + EXPECT_DOUBLE_EQ(.1e3 , Tokenizer::ParseFloat(".1e3")); + EXPECT_DOUBLE_EQ(.25e3, Tokenizer::ParseFloat(".25e3")); + EXPECT_DOUBLE_EQ(.1e+3, Tokenizer::ParseFloat(".1e+3")); + EXPECT_DOUBLE_EQ(.1e-3, Tokenizer::ParseFloat(".1e-3")); + EXPECT_DOUBLE_EQ(5 , Tokenizer::ParseFloat("5")); + EXPECT_DOUBLE_EQ(6e-12, Tokenizer::ParseFloat("6e-12")); + EXPECT_DOUBLE_EQ(1.2 , Tokenizer::ParseFloat("1.2")); + EXPECT_DOUBLE_EQ(1.e2 , Tokenizer::ParseFloat("1.e2")); + + // Test invalid integers that may still be tokenized as integers. + EXPECT_DOUBLE_EQ(1, Tokenizer::ParseFloat("1e")); + EXPECT_DOUBLE_EQ(1, Tokenizer::ParseFloat("1e-")); + EXPECT_DOUBLE_EQ(1, Tokenizer::ParseFloat("1.e")); + + // Test 'f' suffix. + EXPECT_DOUBLE_EQ(1, Tokenizer::ParseFloat("1f")); + EXPECT_DOUBLE_EQ(1, Tokenizer::ParseFloat("1.0f")); + EXPECT_DOUBLE_EQ(1, Tokenizer::ParseFloat("1F")); + + // These should parse successfully even though they are out of range. + // Overflows become infinity and underflows become zero. + EXPECT_EQ( 0.0, Tokenizer::ParseFloat("1e-9999999999999999999999999999")); + EXPECT_EQ(HUGE_VAL, Tokenizer::ParseFloat("1e+9999999999999999999999999999")); + +#ifdef GTEST_HAS_DEATH_TEST // death tests do not work on Windows yet + // Test invalid integers that will never be tokenized as integers. + EXPECT_DEBUG_DEATH(Tokenizer::ParseFloat("zxy"), + "passed text that could not have been tokenized as a float"); + EXPECT_DEBUG_DEATH(Tokenizer::ParseFloat("1-e0"), + "passed text that could not have been tokenized as a float"); + EXPECT_DEBUG_DEATH(Tokenizer::ParseFloat("-1.0"), + "passed text that could not have been tokenized as a float"); +#endif // GTEST_HAS_DEATH_TEST +} + +TEST_F(TokenizerTest, ParseString) { + string output; + Tokenizer::ParseString("'hello'", &output); + EXPECT_EQ("hello", output); + Tokenizer::ParseString("\"blah\\nblah2\"", &output); + EXPECT_EQ("blah\nblah2", output); + Tokenizer::ParseString("'\\1x\\1\\123\\739\\52\\334n\\3'", &output); + EXPECT_EQ("\1x\1\123\739\52\334n\3", output); + Tokenizer::ParseString("'\\x20\\x4'", &output); + EXPECT_EQ("\x20\x4", output); + + // Test invalid strings that may still be tokenized as strings. + Tokenizer::ParseString("\"\\a\\l\\v\\t", &output); // \l is invalid + EXPECT_EQ("\a?\v\t", output); + Tokenizer::ParseString("'", &output); + EXPECT_EQ("", output); + Tokenizer::ParseString("'\\", &output); + EXPECT_EQ("\\", output); + + // Test invalid strings that will never be tokenized as strings. +#ifdef GTEST_HAS_DEATH_TEST // death tests do not work on Windows yet + EXPECT_DEBUG_DEATH(Tokenizer::ParseString("", &output), + "passed text that could not have been tokenized as a string"); +#endif // GTEST_HAS_DEATH_TEST +} + +TEST_F(TokenizerTest, ParseStringAppend) { + // Check that ParseString and ParseStringAppend differ. + string output("stuff+"); + Tokenizer::ParseStringAppend("'hello'", &output); + EXPECT_EQ("stuff+hello", output); + Tokenizer::ParseString("'hello'", &output); + EXPECT_EQ("hello", output); +} + +// ------------------------------------------------------------------- + +// Each case parses some input text, ignoring the tokens produced, and +// checks that the error output matches what is expected. +struct ErrorCase { + string input; + bool recoverable; // True if the tokenizer should be able to recover and + // parse more tokens after seeing this error. Cases + // for which this is true must end with "foo" as + // the last token, which the test will check for. + const char* errors; +}; + +inline ostream& operator<<(ostream& out, + const ErrorCase& test_case) { + return out << CEscape(test_case.input); +} + +ErrorCase kErrorCases[] = { + // String errors. + { "'\\l' foo", true, + "0:2: Invalid escape sequence in string literal.\n" }, + { "'\\x' foo", true, + "0:3: Expected hex digits for escape sequence.\n" }, + { "'foo", false, + "0:4: String literals cannot cross line boundaries.\n" }, + { "'bar\nfoo", true, + "0:4: String literals cannot cross line boundaries.\n" }, + + // Integer errors. + { "123foo", true, + "0:3: Need space between number and identifier.\n" }, + + // Hex/octal errors. + { "0x foo", true, + "0:2: \"0x\" must be followed by hex digits.\n" }, + { "0541823 foo", true, + "0:4: Numbers starting with leading zero must be in octal.\n" }, + { "0x123z foo", true, + "0:5: Need space between number and identifier.\n" }, + { "0x123.4 foo", true, + "0:5: Hex and octal numbers must be integers.\n" }, + { "0123.4 foo", true, + "0:4: Hex and octal numbers must be integers.\n" }, + + // Float errors. + { "1e foo", true, + "0:2: \"e\" must be followed by exponent.\n" }, + { "1e- foo", true, + "0:3: \"e\" must be followed by exponent.\n" }, + { "1.2.3 foo", true, + "0:3: Already saw decimal point or exponent; can't have another one.\n" }, + { "1e2.3 foo", true, + "0:3: Already saw decimal point or exponent; can't have another one.\n" }, + { "a.1 foo", true, + "0:1: Need space between identifier and decimal point.\n" }, + // allow_f_after_float not enabled, so this should be an error. + { "1.0f foo", true, + "0:3: Need space between number and identifier.\n" }, + + // Block comment errors. + { "/*", false, + "0:2: End-of-file inside block comment.\n" + "0:0: Comment started here.\n"}, + { "/*/*/ foo", true, + "0:3: \"/*\" inside block comment. Block comments cannot be nested.\n"}, + + // Control characters. Multiple consecutive control characters should only + // produce one error. + { "\b foo", true, + "0:0: Invalid control characters encountered in text.\n" }, + { "\b\b foo", true, + "0:0: Invalid control characters encountered in text.\n" }, + + // Check that control characters at end of input don't result in an + // infinite loop. + { "\b", false, + "0:0: Invalid control characters encountered in text.\n" }, + + // Check recovery from '\0'. We have to explicitly specify the length of + // these strings because otherwise the string constructor will just call + // strlen() which will see the first '\0' and think that is the end of the + // string. + { string("\0foo", 4), true, + "0:0: Invalid control characters encountered in text.\n" }, + { string("\0\0foo", 5), true, + "0:0: Invalid control characters encountered in text.\n" }, +}; + +TEST_2D(TokenizerTest, Errors, kErrorCases, kBlockSizes) { + // Set up the tokenizer. + TestInputStream input(kErrorCases_case.input.data(), + kErrorCases_case.input.size(), + kBlockSizes_case); + TestErrorCollector error_collector; + Tokenizer tokenizer(&input, &error_collector); + + // Ignore all input, except remember if the last token was "foo". + bool last_was_foo = false; + while (tokenizer.Next()) { + last_was_foo = tokenizer.current().text == "foo"; + } + + // Check that the errors match what was expected. + EXPECT_EQ(error_collector.text_, kErrorCases_case.errors); + + // If the error was recoverable, make sure we saw "foo" after it. + if (kErrorCases_case.recoverable) { + EXPECT_TRUE(last_was_foo); + } +} + +// ------------------------------------------------------------------- + +TEST_1D(TokenizerTest, BackUpOnDestruction, kBlockSizes) { + string text = "foo bar"; + TestInputStream input(text.data(), text.size(), kBlockSizes_case); + + // Create a tokenizer, read one token, then destroy it. + { + TestErrorCollector error_collector; + Tokenizer tokenizer(&input, &error_collector); + + tokenizer.Next(); + } + + // Only "foo" should have been read. + EXPECT_EQ(strlen("foo"), input.ByteCount()); +} + +} // namespace +} // namespace io +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/zero_copy_stream.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/zero_copy_stream.cc new file mode 100644 index 0000000000..dad6ff1446 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/zero_copy_stream.cc @@ -0,0 +1,48 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include + + +namespace google { +namespace protobuf { +namespace io { + +ZeroCopyInputStream::~ZeroCopyInputStream() {} +ZeroCopyOutputStream::~ZeroCopyOutputStream() {} + + +} // namespace io +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/zero_copy_stream.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/zero_copy_stream.h new file mode 100644 index 0000000000..db5326f703 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/zero_copy_stream.h @@ -0,0 +1,238 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// This file contains the ZeroCopyInputStream and ZeroCopyOutputStream +// interfaces, which represent abstract I/O streams to and from which +// protocol buffers can be read and written. For a few simple +// implementations of these interfaces, see zero_copy_stream_impl.h. +// +// These interfaces are different from classic I/O streams in that they +// try to minimize the amount of data copying that needs to be done. +// To accomplish this, responsibility for allocating buffers is moved to +// the stream object, rather than being the responsibility of the caller. +// So, the stream can return a buffer which actually points directly into +// the final data structure where the bytes are to be stored, and the caller +// can interact directly with that buffer, eliminating an intermediate copy +// operation. +// +// As an example, consider the common case in which you are reading bytes +// from an array that is already in memory (or perhaps an mmap()ed file). +// With classic I/O streams, you would do something like: +// char buffer[BUFFER_SIZE]; +// input->Read(buffer, BUFFER_SIZE); +// DoSomething(buffer, BUFFER_SIZE); +// Then, the stream basically just calls memcpy() to copy the data from +// the array into your buffer. With a ZeroCopyInputStream, you would do +// this instead: +// const void* buffer; +// int size; +// input->Next(&buffer, &size); +// DoSomething(buffer, size); +// Here, no copy is performed. The input stream returns a pointer directly +// into the backing array, and the caller ends up reading directly from it. +// +// If you want to be able to read the old-fashion way, you can create +// a CodedInputStream or CodedOutputStream wrapping these objects and use +// their ReadRaw()/WriteRaw() methods. These will, of course, add a copy +// step, but Coded*Stream will handle buffering so at least it will be +// reasonably efficient. +// +// ZeroCopyInputStream example: +// // Read in a file and print its contents to stdout. +// int fd = open("myfile", O_RDONLY); +// ZeroCopyInputStream* input = new FileInputStream(fd); +// +// const void* buffer; +// int size; +// while (input->Next(&buffer, &size)) { +// cout.write(buffer, size); +// } +// +// delete input; +// close(fd); +// +// ZeroCopyOutputStream example: +// // Copy the contents of "infile" to "outfile", using plain read() for +// // "infile" but a ZeroCopyOutputStream for "outfile". +// int infd = open("infile", O_RDONLY); +// int outfd = open("outfile", O_WRONLY); +// ZeroCopyOutputStream* output = new FileOutputStream(outfd); +// +// void* buffer; +// int size; +// while (output->Next(&buffer, &size)) { +// int bytes = read(infd, buffer, size); +// if (bytes < size) { +// // Reached EOF. +// output->BackUp(size - bytes); +// break; +// } +// } +// +// delete output; +// close(infd); +// close(outfd); + +#ifndef GOOGLE_PROTOBUF_IO_ZERO_COPY_STREAM_H__ +#define GOOGLE_PROTOBUF_IO_ZERO_COPY_STREAM_H__ + +#include +#include + +namespace google { + +namespace protobuf { +namespace io { + +// Defined in this file. +class ZeroCopyInputStream; +class ZeroCopyOutputStream; + +// Abstract interface similar to an input stream but designed to minimize +// copying. +class LIBPROTOBUF_EXPORT ZeroCopyInputStream { + public: + inline ZeroCopyInputStream() {} + virtual ~ZeroCopyInputStream(); + + // Obtains a chunk of data from the stream. + // + // Preconditions: + // * "size" and "data" are not NULL. + // + // Postconditions: + // * If the returned value is false, there is no more data to return or + // an error occurred. All errors are permanent. + // * Otherwise, "size" points to the actual number of bytes read and "data" + // points to a pointer to a buffer containing these bytes. + // * Ownership of this buffer remains with the stream, and the buffer + // remains valid only until some other method of the stream is called + // or the stream is destroyed. + // * It is legal for the returned buffer to have zero size, as long + // as repeatedly calling Next() eventually yields a buffer with non-zero + // size. + virtual bool Next(const void** data, int* size) = 0; + + // Backs up a number of bytes, so that the next call to Next() returns + // data again that was already returned by the last call to Next(). This + // is useful when writing procedures that are only supposed to read up + // to a certain point in the input, then return. If Next() returns a + // buffer that goes beyond what you wanted to read, you can use BackUp() + // to return to the point where you intended to finish. + // + // Preconditions: + // * The last method called must have been Next(). + // * count must be less than or equal to the size of the last buffer + // returned by Next(). + // + // Postconditions: + // * The last "count" bytes of the last buffer returned by Next() will be + // pushed back into the stream. Subsequent calls to Next() will return + // the same data again before producing new data. + virtual void BackUp(int count) = 0; + + // Skips a number of bytes. Returns false if the end of the stream is + // reached or some input error occurred. In the end-of-stream case, the + // stream is advanced to the end of the stream (so ByteCount() will return + // the total size of the stream). + virtual bool Skip(int count) = 0; + + // Returns the total number of bytes read since this object was created. + virtual int64 ByteCount() const = 0; + + + private: + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(ZeroCopyInputStream); +}; + +// Abstract interface similar to an output stream but designed to minimize +// copying. +class LIBPROTOBUF_EXPORT ZeroCopyOutputStream { + public: + inline ZeroCopyOutputStream() {} + virtual ~ZeroCopyOutputStream(); + + // Obtains a buffer into which data can be written. Any data written + // into this buffer will eventually (maybe instantly, maybe later on) + // be written to the output. + // + // Preconditions: + // * "size" and "data" are not NULL. + // + // Postconditions: + // * If the returned value is false, an error occurred. All errors are + // permanent. + // * Otherwise, "size" points to the actual number of bytes in the buffer + // and "data" points to the buffer. + // * Ownership of this buffer remains with the stream, and the buffer + // remains valid only until some other method of the stream is called + // or the stream is destroyed. + // * Any data which the caller stores in this buffer will eventually be + // written to the output (unless BackUp() is called). + // * It is legal for the returned buffer to have zero size, as long + // as repeatedly calling Next() eventually yields a buffer with non-zero + // size. + virtual bool Next(void** data, int* size) = 0; + + // Backs up a number of bytes, so that the end of the last buffer returned + // by Next() is not actually written. This is needed when you finish + // writing all the data you want to write, but the last buffer was bigger + // than you needed. You don't want to write a bunch of garbage after the + // end of your data, so you use BackUp() to back up. + // + // Preconditions: + // * The last method called must have been Next(). + // * count must be less than or equal to the size of the last buffer + // returned by Next(). + // * The caller must not have written anything to the last "count" bytes + // of that buffer. + // + // Postconditions: + // * The last "count" bytes of the last buffer returned by Next() will be + // ignored. + virtual void BackUp(int count) = 0; + + // Returns the total number of bytes written since this object was created. + virtual int64 ByteCount() const = 0; + + + private: + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(ZeroCopyOutputStream); +}; + +} // namespace io +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_IO_ZERO_COPY_STREAM_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/zero_copy_stream_impl.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/zero_copy_stream_impl.cc new file mode 100644 index 0000000000..1384c746af --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/zero_copy_stream_impl.cc @@ -0,0 +1,470 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#ifdef _MSC_VER +#include +#else +#include +#include +#include +#include +#endif +#include +#include +#include + +#include +#include +#include + +namespace google { +namespace protobuf { +namespace io { + +#ifdef _WIN32 +// Win32 lseek is broken: If invoked on a non-seekable file descriptor, its +// return value is undefined. We re-define it to always produce an error. +#define lseek(fd, offset, origin) ((off_t)-1) +#endif + +namespace { + +// EINTR sucks. +int close_no_eintr(int fd) { + int result; + do { + result = close(fd); + } while (result < 0 && errno == EINTR); + return result; +} + +} // namespace + + +// =================================================================== + +FileInputStream::FileInputStream(int file_descriptor, int block_size) + : copying_input_(file_descriptor), + impl_(©ing_input_, block_size) { +} + +FileInputStream::~FileInputStream() {} + +bool FileInputStream::Close() { + return copying_input_.Close(); +} + +bool FileInputStream::Next(const void** data, int* size) { + return impl_.Next(data, size); +} + +void FileInputStream::BackUp(int count) { + impl_.BackUp(count); +} + +bool FileInputStream::Skip(int count) { + return impl_.Skip(count); +} + +int64 FileInputStream::ByteCount() const { + return impl_.ByteCount(); +} + +FileInputStream::CopyingFileInputStream::CopyingFileInputStream( + int file_descriptor) + : file_(file_descriptor), + close_on_delete_(false), + is_closed_(false), + errno_(0), + previous_seek_failed_(false) { +} + +FileInputStream::CopyingFileInputStream::~CopyingFileInputStream() { + if (close_on_delete_) { + if (!Close()) { + GOOGLE_LOG(ERROR) << "close() failed: " << strerror(errno_); + } + } +} + +bool FileInputStream::CopyingFileInputStream::Close() { + GOOGLE_CHECK(!is_closed_); + + is_closed_ = true; + if (close_no_eintr(file_) != 0) { + // The docs on close() do not specify whether a file descriptor is still + // open after close() fails with EIO. However, the glibc source code + // seems to indicate that it is not. + errno_ = errno; + return false; + } + + return true; +} + +int FileInputStream::CopyingFileInputStream::Read(void* buffer, int size) { + GOOGLE_CHECK(!is_closed_); + + int result; + do { + result = read(file_, buffer, size); + } while (result < 0 && errno == EINTR); + + if (result < 0) { + // Read error (not EOF). + errno_ = errno; + } + + return result; +} + +int FileInputStream::CopyingFileInputStream::Skip(int count) { + GOOGLE_CHECK(!is_closed_); + + if (!previous_seek_failed_ && + lseek(file_, count, SEEK_CUR) != (off_t)-1) { + // Seek succeeded. + return count; + } else { + // Failed to seek. + + // Note to self: Don't seek again. This file descriptor doesn't + // support it. + previous_seek_failed_ = true; + + // Use the default implementation. + return CopyingInputStream::Skip(count); + } +} + +// =================================================================== + +FileOutputStream::FileOutputStream(int file_descriptor, int block_size) + : copying_output_(file_descriptor), + impl_(©ing_output_, block_size) { +} + +FileOutputStream::~FileOutputStream() { + impl_.Flush(); +} + +bool FileOutputStream::Close() { + bool flush_succeeded = impl_.Flush(); + return copying_output_.Close() && flush_succeeded; +} + +bool FileOutputStream::Flush() { + return impl_.Flush(); +} + +bool FileOutputStream::Next(void** data, int* size) { + return impl_.Next(data, size); +} + +void FileOutputStream::BackUp(int count) { + impl_.BackUp(count); +} + +int64 FileOutputStream::ByteCount() const { + return impl_.ByteCount(); +} + +FileOutputStream::CopyingFileOutputStream::CopyingFileOutputStream( + int file_descriptor) + : file_(file_descriptor), + close_on_delete_(false), + is_closed_(false), + errno_(0) { +} + +FileOutputStream::CopyingFileOutputStream::~CopyingFileOutputStream() { + if (close_on_delete_) { + if (!Close()) { + GOOGLE_LOG(ERROR) << "close() failed: " << strerror(errno_); + } + } +} + +bool FileOutputStream::CopyingFileOutputStream::Close() { + GOOGLE_CHECK(!is_closed_); + + is_closed_ = true; + if (close_no_eintr(file_) != 0) { + // The docs on close() do not specify whether a file descriptor is still + // open after close() fails with EIO. However, the glibc source code + // seems to indicate that it is not. + errno_ = errno; + return false; + } + + return true; +} + +bool FileOutputStream::CopyingFileOutputStream::Write( + const void* buffer, int size) { + GOOGLE_CHECK(!is_closed_); + int total_written = 0; + + const uint8* buffer_base = reinterpret_cast(buffer); + + while (total_written < size) { + int bytes; + do { + bytes = write(file_, buffer_base + total_written, size - total_written); + } while (bytes < 0 && errno == EINTR); + + if (bytes <= 0) { + // Write error. + + // FIXME(kenton): According to the man page, if write() returns zero, + // there was no error; write() simply did not write anything. It's + // unclear under what circumstances this might happen, but presumably + // errno won't be set in this case. I am confused as to how such an + // event should be handled. For now I'm treating it as an error, since + // retrying seems like it could lead to an infinite loop. I suspect + // this never actually happens anyway. + + if (bytes < 0) { + errno_ = errno; + } + return false; + } + total_written += bytes; + } + + return true; +} + +// =================================================================== + +IstreamInputStream::IstreamInputStream(istream* input, int block_size) + : copying_input_(input), + impl_(©ing_input_, block_size) { +} + +IstreamInputStream::~IstreamInputStream() {} + +bool IstreamInputStream::Next(const void** data, int* size) { + return impl_.Next(data, size); +} + +void IstreamInputStream::BackUp(int count) { + impl_.BackUp(count); +} + +bool IstreamInputStream::Skip(int count) { + return impl_.Skip(count); +} + +int64 IstreamInputStream::ByteCount() const { + return impl_.ByteCount(); +} + +IstreamInputStream::CopyingIstreamInputStream::CopyingIstreamInputStream( + istream* input) + : input_(input) { +} + +IstreamInputStream::CopyingIstreamInputStream::~CopyingIstreamInputStream() {} + +int IstreamInputStream::CopyingIstreamInputStream::Read( + void* buffer, int size) { + input_->read(reinterpret_cast(buffer), size); + int result = input_->gcount(); + if (result == 0 && input_->fail() && !input_->eof()) { + return -1; + } + return result; +} + +// =================================================================== + +OstreamOutputStream::OstreamOutputStream(ostream* output, int block_size) + : copying_output_(output), + impl_(©ing_output_, block_size) { +} + +OstreamOutputStream::~OstreamOutputStream() { + impl_.Flush(); +} + +bool OstreamOutputStream::Next(void** data, int* size) { + return impl_.Next(data, size); +} + +void OstreamOutputStream::BackUp(int count) { + impl_.BackUp(count); +} + +int64 OstreamOutputStream::ByteCount() const { + return impl_.ByteCount(); +} + +OstreamOutputStream::CopyingOstreamOutputStream::CopyingOstreamOutputStream( + ostream* output) + : output_(output) { +} + +OstreamOutputStream::CopyingOstreamOutputStream::~CopyingOstreamOutputStream() { +} + +bool OstreamOutputStream::CopyingOstreamOutputStream::Write( + const void* buffer, int size) { + output_->write(reinterpret_cast(buffer), size); + return output_->good(); +} + +// =================================================================== + +ConcatenatingInputStream::ConcatenatingInputStream( + ZeroCopyInputStream* const streams[], int count) + : streams_(streams), stream_count_(count), bytes_retired_(0) { +} + +ConcatenatingInputStream::~ConcatenatingInputStream() { +} + +bool ConcatenatingInputStream::Next(const void** data, int* size) { + while (stream_count_ > 0) { + if (streams_[0]->Next(data, size)) return true; + + // That stream is done. Advance to the next one. + bytes_retired_ += streams_[0]->ByteCount(); + ++streams_; + --stream_count_; + } + + // No more streams. + return false; +} + +void ConcatenatingInputStream::BackUp(int count) { + if (stream_count_ > 0) { + streams_[0]->BackUp(count); + } else { + GOOGLE_LOG(DFATAL) << "Can't BackUp() after failed Next()."; + } +} + +bool ConcatenatingInputStream::Skip(int count) { + while (stream_count_ > 0) { + // Assume that ByteCount() can be used to find out how much we actually + // skipped when Skip() fails. + int64 target_byte_count = streams_[0]->ByteCount() + count; + if (streams_[0]->Skip(count)) return true; + + // Hit the end of the stream. Figure out how many more bytes we still have + // to skip. + int64 final_byte_count = streams_[0]->ByteCount(); + GOOGLE_DCHECK_LT(final_byte_count, target_byte_count); + count = target_byte_count - final_byte_count; + + // That stream is done. Advance to the next one. + bytes_retired_ += final_byte_count; + ++streams_; + --stream_count_; + } + + return false; +} + +int64 ConcatenatingInputStream::ByteCount() const { + if (stream_count_ == 0) { + return bytes_retired_; + } else { + return bytes_retired_ + streams_[0]->ByteCount(); + } +} + + +// =================================================================== + +LimitingInputStream::LimitingInputStream(ZeroCopyInputStream* input, + int64 limit) + : input_(input), limit_(limit) {} + +LimitingInputStream::~LimitingInputStream() { + // If we overshot the limit, back up. + if (limit_ < 0) input_->BackUp(-limit_); +} + +bool LimitingInputStream::Next(const void** data, int* size) { + if (limit_ <= 0) return false; + if (!input_->Next(data, size)) return false; + + limit_ -= *size; + if (limit_ < 0) { + // We overshot the limit. Reduce *size to hide the rest of the buffer. + *size += limit_; + } + return true; +} + +void LimitingInputStream::BackUp(int count) { + if (limit_ < 0) { + input_->BackUp(count - limit_); + limit_ = count; + } else { + input_->BackUp(count); + limit_ += count; + } +} + +bool LimitingInputStream::Skip(int count) { + if (count > limit_) { + if (limit_ < 0) return false; + input_->Skip(limit_); + limit_ = 0; + return false; + } else { + if (!input_->Skip(count)) return false; + limit_ -= count; + return true; + } +} + +int64 LimitingInputStream::ByteCount() const { + if (limit_ < 0) { + return input_->ByteCount() + limit_; + } else { + return input_->ByteCount(); + } +} + + +// =================================================================== + +} // namespace io +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/zero_copy_stream_impl.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/zero_copy_stream_impl.h new file mode 100644 index 0000000000..9fedb00576 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/zero_copy_stream_impl.h @@ -0,0 +1,357 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// This file contains common implementations of the interfaces defined in +// zero_copy_stream.h which are only included in the full (non-lite) +// protobuf library. These implementations include Unix file descriptors +// and C++ iostreams. See also: zero_copy_stream_impl_lite.h + +#ifndef GOOGLE_PROTOBUF_IO_ZERO_COPY_STREAM_IMPL_H__ +#define GOOGLE_PROTOBUF_IO_ZERO_COPY_STREAM_IMPL_H__ + +#include +#include +#include +#include +#include + + +namespace google { +namespace protobuf { +namespace io { + + +// =================================================================== + +// A ZeroCopyInputStream which reads from a file descriptor. +// +// FileInputStream is preferred over using an ifstream with IstreamInputStream. +// The latter will introduce an extra layer of buffering, harming performance. +// Also, it's conceivable that FileInputStream could someday be enhanced +// to use zero-copy file descriptors on OSs which support them. +class LIBPROTOBUF_EXPORT FileInputStream : public ZeroCopyInputStream { + public: + // Creates a stream that reads from the given Unix file descriptor. + // If a block_size is given, it specifies the number of bytes that + // should be read and returned with each call to Next(). Otherwise, + // a reasonable default is used. + explicit FileInputStream(int file_descriptor, int block_size = -1); + ~FileInputStream(); + + // Flushes any buffers and closes the underlying file. Returns false if + // an error occurs during the process; use GetErrno() to examine the error. + // Even if an error occurs, the file descriptor is closed when this returns. + bool Close(); + + // By default, the file descriptor is not closed when the stream is + // destroyed. Call SetCloseOnDelete(true) to change that. WARNING: + // This leaves no way for the caller to detect if close() fails. If + // detecting close() errors is important to you, you should arrange + // to close the descriptor yourself. + void SetCloseOnDelete(bool value) { copying_input_.SetCloseOnDelete(value); } + + // If an I/O error has occurred on this file descriptor, this is the + // errno from that error. Otherwise, this is zero. Once an error + // occurs, the stream is broken and all subsequent operations will + // fail. + int GetErrno() { return copying_input_.GetErrno(); } + + // implements ZeroCopyInputStream ---------------------------------- + bool Next(const void** data, int* size); + void BackUp(int count); + bool Skip(int count); + int64 ByteCount() const; + + private: + class LIBPROTOBUF_EXPORT CopyingFileInputStream : public CopyingInputStream { + public: + CopyingFileInputStream(int file_descriptor); + ~CopyingFileInputStream(); + + bool Close(); + void SetCloseOnDelete(bool value) { close_on_delete_ = value; } + int GetErrno() { return errno_; } + + // implements CopyingInputStream --------------------------------- + int Read(void* buffer, int size); + int Skip(int count); + + private: + // The file descriptor. + const int file_; + bool close_on_delete_; + bool is_closed_; + + // The errno of the I/O error, if one has occurred. Otherwise, zero. + int errno_; + + // Did we try to seek once and fail? If so, we assume this file descriptor + // doesn't support seeking and won't try again. + bool previous_seek_failed_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(CopyingFileInputStream); + }; + + CopyingFileInputStream copying_input_; + CopyingInputStreamAdaptor impl_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(FileInputStream); +}; + +// =================================================================== + +// A ZeroCopyOutputStream which writes to a file descriptor. +// +// FileOutputStream is preferred over using an ofstream with +// OstreamOutputStream. The latter will introduce an extra layer of buffering, +// harming performance. Also, it's conceivable that FileOutputStream could +// someday be enhanced to use zero-copy file descriptors on OSs which +// support them. +class LIBPROTOBUF_EXPORT FileOutputStream : public ZeroCopyOutputStream { + public: + // Creates a stream that writes to the given Unix file descriptor. + // If a block_size is given, it specifies the size of the buffers + // that should be returned by Next(). Otherwise, a reasonable default + // is used. + explicit FileOutputStream(int file_descriptor, int block_size = -1); + ~FileOutputStream(); + + // Flushes any buffers and closes the underlying file. Returns false if + // an error occurs during the process; use GetErrno() to examine the error. + // Even if an error occurs, the file descriptor is closed when this returns. + bool Close(); + + // Flushes FileOutputStream's buffers but does not close the + // underlying file. No special measures are taken to ensure that + // underlying operating system file object is synchronized to disk. + bool Flush(); + + // By default, the file descriptor is not closed when the stream is + // destroyed. Call SetCloseOnDelete(true) to change that. WARNING: + // This leaves no way for the caller to detect if close() fails. If + // detecting close() errors is important to you, you should arrange + // to close the descriptor yourself. + void SetCloseOnDelete(bool value) { copying_output_.SetCloseOnDelete(value); } + + // If an I/O error has occurred on this file descriptor, this is the + // errno from that error. Otherwise, this is zero. Once an error + // occurs, the stream is broken and all subsequent operations will + // fail. + int GetErrno() { return copying_output_.GetErrno(); } + + // implements ZeroCopyOutputStream --------------------------------- + bool Next(void** data, int* size); + void BackUp(int count); + int64 ByteCount() const; + + private: + class LIBPROTOBUF_EXPORT CopyingFileOutputStream : public CopyingOutputStream { + public: + CopyingFileOutputStream(int file_descriptor); + ~CopyingFileOutputStream(); + + bool Close(); + void SetCloseOnDelete(bool value) { close_on_delete_ = value; } + int GetErrno() { return errno_; } + + // implements CopyingOutputStream -------------------------------- + bool Write(const void* buffer, int size); + + private: + // The file descriptor. + const int file_; + bool close_on_delete_; + bool is_closed_; + + // The errno of the I/O error, if one has occurred. Otherwise, zero. + int errno_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(CopyingFileOutputStream); + }; + + CopyingFileOutputStream copying_output_; + CopyingOutputStreamAdaptor impl_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(FileOutputStream); +}; + +// =================================================================== + +// A ZeroCopyInputStream which reads from a C++ istream. +// +// Note that for reading files (or anything represented by a file descriptor), +// FileInputStream is more efficient. +class LIBPROTOBUF_EXPORT IstreamInputStream : public ZeroCopyInputStream { + public: + // Creates a stream that reads from the given C++ istream. + // If a block_size is given, it specifies the number of bytes that + // should be read and returned with each call to Next(). Otherwise, + // a reasonable default is used. + explicit IstreamInputStream(istream* stream, int block_size = -1); + ~IstreamInputStream(); + + // implements ZeroCopyInputStream ---------------------------------- + bool Next(const void** data, int* size); + void BackUp(int count); + bool Skip(int count); + int64 ByteCount() const; + + private: + class LIBPROTOBUF_EXPORT CopyingIstreamInputStream : public CopyingInputStream { + public: + CopyingIstreamInputStream(istream* input); + ~CopyingIstreamInputStream(); + + // implements CopyingInputStream --------------------------------- + int Read(void* buffer, int size); + // (We use the default implementation of Skip().) + + private: + // The stream. + istream* input_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(CopyingIstreamInputStream); + }; + + CopyingIstreamInputStream copying_input_; + CopyingInputStreamAdaptor impl_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(IstreamInputStream); +}; + +// =================================================================== + +// A ZeroCopyOutputStream which writes to a C++ ostream. +// +// Note that for writing files (or anything represented by a file descriptor), +// FileOutputStream is more efficient. +class LIBPROTOBUF_EXPORT OstreamOutputStream : public ZeroCopyOutputStream { + public: + // Creates a stream that writes to the given C++ ostream. + // If a block_size is given, it specifies the size of the buffers + // that should be returned by Next(). Otherwise, a reasonable default + // is used. + explicit OstreamOutputStream(ostream* stream, int block_size = -1); + ~OstreamOutputStream(); + + // implements ZeroCopyOutputStream --------------------------------- + bool Next(void** data, int* size); + void BackUp(int count); + int64 ByteCount() const; + + private: + class LIBPROTOBUF_EXPORT CopyingOstreamOutputStream : public CopyingOutputStream { + public: + CopyingOstreamOutputStream(ostream* output); + ~CopyingOstreamOutputStream(); + + // implements CopyingOutputStream -------------------------------- + bool Write(const void* buffer, int size); + + private: + // The stream. + ostream* output_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(CopyingOstreamOutputStream); + }; + + CopyingOstreamOutputStream copying_output_; + CopyingOutputStreamAdaptor impl_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(OstreamOutputStream); +}; + +// =================================================================== + +// A ZeroCopyInputStream which reads from several other streams in sequence. +// ConcatenatingInputStream is unable to distinguish between end-of-stream +// and read errors in the underlying streams, so it assumes any errors mean +// end-of-stream. So, if the underlying streams fail for any other reason, +// ConcatenatingInputStream may do odd things. It is suggested that you do +// not use ConcatenatingInputStream on streams that might produce read errors +// other than end-of-stream. +class LIBPROTOBUF_EXPORT ConcatenatingInputStream : public ZeroCopyInputStream { + public: + // All streams passed in as well as the array itself must remain valid + // until the ConcatenatingInputStream is destroyed. + ConcatenatingInputStream(ZeroCopyInputStream* const streams[], int count); + ~ConcatenatingInputStream(); + + // implements ZeroCopyInputStream ---------------------------------- + bool Next(const void** data, int* size); + void BackUp(int count); + bool Skip(int count); + int64 ByteCount() const; + + + private: + // As streams are retired, streams_ is incremented and count_ is + // decremented. + ZeroCopyInputStream* const* streams_; + int stream_count_; + int64 bytes_retired_; // Bytes read from previous streams. + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(ConcatenatingInputStream); +}; + +// =================================================================== + +// A ZeroCopyInputStream which wraps some other stream and limits it to +// a particular byte count. +class LIBPROTOBUF_EXPORT LimitingInputStream : public ZeroCopyInputStream { + public: + LimitingInputStream(ZeroCopyInputStream* input, int64 limit); + ~LimitingInputStream(); + + // implements ZeroCopyInputStream ---------------------------------- + bool Next(const void** data, int* size); + void BackUp(int count); + bool Skip(int count); + int64 ByteCount() const; + + + private: + ZeroCopyInputStream* input_; + int64 limit_; // Decreases as we go, becomes negative if we overshoot. + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(LimitingInputStream); +}; + +// =================================================================== + +} // namespace io +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_IO_ZERO_COPY_STREAM_IMPL_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/zero_copy_stream_impl_lite.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/zero_copy_stream_impl_lite.cc new file mode 100644 index 0000000000..e80125109f --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/zero_copy_stream_impl_lite.cc @@ -0,0 +1,393 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include +#include + +namespace google { +namespace protobuf { +namespace io { + +namespace { + +// Default block size for Copying{In,Out}putStreamAdaptor. +static const int kDefaultBlockSize = 8192; + +} // namespace + +// =================================================================== + +ArrayInputStream::ArrayInputStream(const void* data, int size, + int block_size) + : data_(reinterpret_cast(data)), + size_(size), + block_size_(block_size > 0 ? block_size : size), + position_(0), + last_returned_size_(0) { +} + +ArrayInputStream::~ArrayInputStream() { +} + +bool ArrayInputStream::Next(const void** data, int* size) { + if (position_ < size_) { + last_returned_size_ = min(block_size_, size_ - position_); + *data = data_ + position_; + *size = last_returned_size_; + position_ += last_returned_size_; + return true; + } else { + // We're at the end of the array. + last_returned_size_ = 0; // Don't let caller back up. + return false; + } +} + +void ArrayInputStream::BackUp(int count) { + GOOGLE_CHECK_GT(last_returned_size_, 0) + << "BackUp() can only be called after a successful Next()."; + GOOGLE_CHECK_LE(count, last_returned_size_); + GOOGLE_CHECK_GE(count, 0); + position_ -= count; + last_returned_size_ = 0; // Don't let caller back up further. +} + +bool ArrayInputStream::Skip(int count) { + GOOGLE_CHECK_GE(count, 0); + last_returned_size_ = 0; // Don't let caller back up. + if (count > size_ - position_) { + position_ = size_; + return false; + } else { + position_ += count; + return true; + } +} + +int64 ArrayInputStream::ByteCount() const { + return position_; +} + + +// =================================================================== + +ArrayOutputStream::ArrayOutputStream(void* data, int size, int block_size) + : data_(reinterpret_cast(data)), + size_(size), + block_size_(block_size > 0 ? block_size : size), + position_(0), + last_returned_size_(0) { +} + +ArrayOutputStream::~ArrayOutputStream() { +} + +bool ArrayOutputStream::Next(void** data, int* size) { + if (position_ < size_) { + last_returned_size_ = min(block_size_, size_ - position_); + *data = data_ + position_; + *size = last_returned_size_; + position_ += last_returned_size_; + return true; + } else { + // We're at the end of the array. + last_returned_size_ = 0; // Don't let caller back up. + return false; + } +} + +void ArrayOutputStream::BackUp(int count) { + GOOGLE_CHECK_GT(last_returned_size_, 0) + << "BackUp() can only be called after a successful Next()."; + GOOGLE_CHECK_LE(count, last_returned_size_); + GOOGLE_CHECK_GE(count, 0); + position_ -= count; + last_returned_size_ = 0; // Don't let caller back up further. +} + +int64 ArrayOutputStream::ByteCount() const { + return position_; +} + +// =================================================================== + +StringOutputStream::StringOutputStream(string* target) + : target_(target) { +} + +StringOutputStream::~StringOutputStream() { +} + +bool StringOutputStream::Next(void** data, int* size) { + int old_size = target_->size(); + + // Grow the string. + if (old_size < target_->capacity()) { + // Resize the string to match its capacity, since we can get away + // without a memory allocation this way. + STLStringResizeUninitialized(target_, target_->capacity()); + } else { + // Size has reached capacity, so double the size. Also make sure + // that the new size is at least kMinimumSize. + STLStringResizeUninitialized( + target_, + max(old_size * 2, + kMinimumSize + 0)); // "+ 0" works around GCC4 weirdness. + } + + *data = string_as_array(target_) + old_size; + *size = target_->size() - old_size; + return true; +} + +void StringOutputStream::BackUp(int count) { + GOOGLE_CHECK_GE(count, 0); + GOOGLE_CHECK_LE(count, target_->size()); + target_->resize(target_->size() - count); +} + +int64 StringOutputStream::ByteCount() const { + return target_->size(); +} + +// =================================================================== + +CopyingInputStream::~CopyingInputStream() {} + +int CopyingInputStream::Skip(int count) { + char junk[4096]; + int skipped = 0; + while (skipped < count) { + int bytes = Read(junk, min(count - skipped, + implicit_cast(sizeof(junk)))); + if (bytes <= 0) { + // EOF or read error. + return skipped; + } + skipped += bytes; + } + return skipped; +} + +CopyingInputStreamAdaptor::CopyingInputStreamAdaptor( + CopyingInputStream* copying_stream, int block_size) + : copying_stream_(copying_stream), + owns_copying_stream_(false), + failed_(false), + position_(0), + buffer_size_(block_size > 0 ? block_size : kDefaultBlockSize), + buffer_used_(0), + backup_bytes_(0) { +} + +CopyingInputStreamAdaptor::~CopyingInputStreamAdaptor() { + if (owns_copying_stream_) { + delete copying_stream_; + } +} + +bool CopyingInputStreamAdaptor::Next(const void** data, int* size) { + if (failed_) { + // Already failed on a previous read. + return false; + } + + AllocateBufferIfNeeded(); + + if (backup_bytes_ > 0) { + // We have data left over from a previous BackUp(), so just return that. + *data = buffer_.get() + buffer_used_ - backup_bytes_; + *size = backup_bytes_; + backup_bytes_ = 0; + return true; + } + + // Read new data into the buffer. + buffer_used_ = copying_stream_->Read(buffer_.get(), buffer_size_); + if (buffer_used_ <= 0) { + // EOF or read error. We don't need the buffer anymore. + if (buffer_used_ < 0) { + // Read error (not EOF). + failed_ = true; + } + FreeBuffer(); + return false; + } + position_ += buffer_used_; + + *size = buffer_used_; + *data = buffer_.get(); + return true; +} + +void CopyingInputStreamAdaptor::BackUp(int count) { + GOOGLE_CHECK(backup_bytes_ == 0 && buffer_.get() != NULL) + << " BackUp() can only be called after Next()."; + GOOGLE_CHECK_LE(count, buffer_used_) + << " Can't back up over more bytes than were returned by the last call" + " to Next()."; + GOOGLE_CHECK_GE(count, 0) + << " Parameter to BackUp() can't be negative."; + + backup_bytes_ = count; +} + +bool CopyingInputStreamAdaptor::Skip(int count) { + GOOGLE_CHECK_GE(count, 0); + + if (failed_) { + // Already failed on a previous read. + return false; + } + + // First skip any bytes left over from a previous BackUp(). + if (backup_bytes_ >= count) { + // We have more data left over than we're trying to skip. Just chop it. + backup_bytes_ -= count; + return true; + } + + count -= backup_bytes_; + backup_bytes_ = 0; + + int skipped = copying_stream_->Skip(count); + position_ += skipped; + return skipped == count; +} + +int64 CopyingInputStreamAdaptor::ByteCount() const { + return position_ - backup_bytes_; +} + +void CopyingInputStreamAdaptor::AllocateBufferIfNeeded() { + if (buffer_.get() == NULL) { + buffer_.reset(new uint8[buffer_size_]); + } +} + +void CopyingInputStreamAdaptor::FreeBuffer() { + GOOGLE_CHECK_EQ(backup_bytes_, 0); + buffer_used_ = 0; + buffer_.reset(); +} + +// =================================================================== + +CopyingOutputStream::~CopyingOutputStream() {} + +CopyingOutputStreamAdaptor::CopyingOutputStreamAdaptor( + CopyingOutputStream* copying_stream, int block_size) + : copying_stream_(copying_stream), + owns_copying_stream_(false), + failed_(false), + position_(0), + buffer_size_(block_size > 0 ? block_size : kDefaultBlockSize), + buffer_used_(0) { +} + +CopyingOutputStreamAdaptor::~CopyingOutputStreamAdaptor() { + WriteBuffer(); + if (owns_copying_stream_) { + delete copying_stream_; + } +} + +bool CopyingOutputStreamAdaptor::Flush() { + return WriteBuffer(); +} + +bool CopyingOutputStreamAdaptor::Next(void** data, int* size) { + if (buffer_used_ == buffer_size_) { + if (!WriteBuffer()) return false; + } + + AllocateBufferIfNeeded(); + + *data = buffer_.get() + buffer_used_; + *size = buffer_size_ - buffer_used_; + buffer_used_ = buffer_size_; + return true; +} + +void CopyingOutputStreamAdaptor::BackUp(int count) { + GOOGLE_CHECK_GE(count, 0); + GOOGLE_CHECK_EQ(buffer_used_, buffer_size_) + << " BackUp() can only be called after Next()."; + GOOGLE_CHECK_LE(count, buffer_used_) + << " Can't back up over more bytes than were returned by the last call" + " to Next()."; + + buffer_used_ -= count; +} + +int64 CopyingOutputStreamAdaptor::ByteCount() const { + return position_ + buffer_used_; +} + +bool CopyingOutputStreamAdaptor::WriteBuffer() { + if (failed_) { + // Already failed on a previous write. + return false; + } + + if (buffer_used_ == 0) return true; + + if (copying_stream_->Write(buffer_.get(), buffer_used_)) { + position_ += buffer_used_; + buffer_used_ = 0; + return true; + } else { + failed_ = true; + FreeBuffer(); + return false; + } +} + +void CopyingOutputStreamAdaptor::AllocateBufferIfNeeded() { + if (buffer_ == NULL) { + buffer_.reset(new uint8[buffer_size_]); + } +} + +void CopyingOutputStreamAdaptor::FreeBuffer() { + buffer_used_ = 0; + buffer_.reset(); +} + +// =================================================================== + +} // namespace io +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/zero_copy_stream_impl_lite.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/zero_copy_stream_impl_lite.h new file mode 100644 index 0000000000..153f543ee4 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/zero_copy_stream_impl_lite.h @@ -0,0 +1,340 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// This file contains common implementations of the interfaces defined in +// zero_copy_stream.h which are included in the "lite" protobuf library. +// These implementations cover I/O on raw arrays and strings, as well as +// adaptors which make it easy to implement streams based on traditional +// streams. Of course, many users will probably want to write their own +// implementations of these interfaces specific to the particular I/O +// abstractions they prefer to use, but these should cover the most common +// cases. + +#ifndef GOOGLE_PROTOBUF_IO_ZERO_COPY_STREAM_IMPL_LITE_H__ +#define GOOGLE_PROTOBUF_IO_ZERO_COPY_STREAM_IMPL_LITE_H__ + +#include +#include +#include +#include + + +namespace google { +namespace protobuf { +namespace io { + +// =================================================================== + +// A ZeroCopyInputStream backed by an in-memory array of bytes. +class LIBPROTOBUF_EXPORT ArrayInputStream : public ZeroCopyInputStream { + public: + // Create an InputStream that returns the bytes pointed to by "data". + // "data" remains the property of the caller but must remain valid until + // the stream is destroyed. If a block_size is given, calls to Next() + // will return data blocks no larger than the given size. Otherwise, the + // first call to Next() returns the entire array. block_size is mainly + // useful for testing; in production you would probably never want to set + // it. + ArrayInputStream(const void* data, int size, int block_size = -1); + ~ArrayInputStream(); + + // implements ZeroCopyInputStream ---------------------------------- + bool Next(const void** data, int* size); + void BackUp(int count); + bool Skip(int count); + int64 ByteCount() const; + + + private: + const uint8* const data_; // The byte array. + const int size_; // Total size of the array. + const int block_size_; // How many bytes to return at a time. + + int position_; + int last_returned_size_; // How many bytes we returned last time Next() + // was called (used for error checking only). + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(ArrayInputStream); +}; + +// =================================================================== + +// A ZeroCopyOutputStream backed by an in-memory array of bytes. +class LIBPROTOBUF_EXPORT ArrayOutputStream : public ZeroCopyOutputStream { + public: + // Create an OutputStream that writes to the bytes pointed to by "data". + // "data" remains the property of the caller but must remain valid until + // the stream is destroyed. If a block_size is given, calls to Next() + // will return data blocks no larger than the given size. Otherwise, the + // first call to Next() returns the entire array. block_size is mainly + // useful for testing; in production you would probably never want to set + // it. + ArrayOutputStream(void* data, int size, int block_size = -1); + ~ArrayOutputStream(); + + // implements ZeroCopyOutputStream --------------------------------- + bool Next(void** data, int* size); + void BackUp(int count); + int64 ByteCount() const; + + private: + uint8* const data_; // The byte array. + const int size_; // Total size of the array. + const int block_size_; // How many bytes to return at a time. + + int position_; + int last_returned_size_; // How many bytes we returned last time Next() + // was called (used for error checking only). + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(ArrayOutputStream); +}; + +// =================================================================== + +// A ZeroCopyOutputStream which appends bytes to a string. +class LIBPROTOBUF_EXPORT StringOutputStream : public ZeroCopyOutputStream { + public: + // Create a StringOutputStream which appends bytes to the given string. + // The string remains property of the caller, but it MUST NOT be accessed + // in any way until the stream is destroyed. + // + // Hint: If you call target->reserve(n) before creating the stream, + // the first call to Next() will return at least n bytes of buffer + // space. + explicit StringOutputStream(string* target); + ~StringOutputStream(); + + // implements ZeroCopyOutputStream --------------------------------- + bool Next(void** data, int* size); + void BackUp(int count); + int64 ByteCount() const; + + private: + static const int kMinimumSize = 16; + + string* target_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(StringOutputStream); +}; + +// Note: There is no StringInputStream. Instead, just create an +// ArrayInputStream as follows: +// ArrayInputStream input(str.data(), str.size()); + +// =================================================================== + +// A generic traditional input stream interface. +// +// Lots of traditional input streams (e.g. file descriptors, C stdio +// streams, and C++ iostreams) expose an interface where every read +// involves copying bytes into a buffer. If you want to take such an +// interface and make a ZeroCopyInputStream based on it, simply implement +// CopyingInputStream and then use CopyingInputStreamAdaptor. +// +// CopyingInputStream implementations should avoid buffering if possible. +// CopyingInputStreamAdaptor does its own buffering and will read data +// in large blocks. +class LIBPROTOBUF_EXPORT CopyingInputStream { + public: + virtual ~CopyingInputStream(); + + // Reads up to "size" bytes into the given buffer. Returns the number of + // bytes read. Read() waits until at least one byte is available, or + // returns zero if no bytes will ever become available (EOF), or -1 if a + // permanent read error occurred. + virtual int Read(void* buffer, int size) = 0; + + // Skips the next "count" bytes of input. Returns the number of bytes + // actually skipped. This will always be exactly equal to "count" unless + // EOF was reached or a permanent read error occurred. + // + // The default implementation just repeatedly calls Read() into a scratch + // buffer. + virtual int Skip(int count); +}; + +// A ZeroCopyInputStream which reads from a CopyingInputStream. This is +// useful for implementing ZeroCopyInputStreams that read from traditional +// streams. Note that this class is not really zero-copy. +// +// If you want to read from file descriptors or C++ istreams, this is +// already implemented for you: use FileInputStream or IstreamInputStream +// respectively. +class LIBPROTOBUF_EXPORT CopyingInputStreamAdaptor : public ZeroCopyInputStream { + public: + // Creates a stream that reads from the given CopyingInputStream. + // If a block_size is given, it specifies the number of bytes that + // should be read and returned with each call to Next(). Otherwise, + // a reasonable default is used. The caller retains ownership of + // copying_stream unless SetOwnsCopyingStream(true) is called. + explicit CopyingInputStreamAdaptor(CopyingInputStream* copying_stream, + int block_size = -1); + ~CopyingInputStreamAdaptor(); + + // Call SetOwnsCopyingStream(true) to tell the CopyingInputStreamAdaptor to + // delete the underlying CopyingInputStream when it is destroyed. + void SetOwnsCopyingStream(bool value) { owns_copying_stream_ = value; } + + // implements ZeroCopyInputStream ---------------------------------- + bool Next(const void** data, int* size); + void BackUp(int count); + bool Skip(int count); + int64 ByteCount() const; + + private: + // Insures that buffer_ is not NULL. + void AllocateBufferIfNeeded(); + // Frees the buffer and resets buffer_used_. + void FreeBuffer(); + + // The underlying copying stream. + CopyingInputStream* copying_stream_; + bool owns_copying_stream_; + + // True if we have seen a permenant error from the underlying stream. + bool failed_; + + // The current position of copying_stream_, relative to the point where + // we started reading. + int64 position_; + + // Data is read into this buffer. It may be NULL if no buffer is currently + // in use. Otherwise, it points to an array of size buffer_size_. + scoped_array buffer_; + const int buffer_size_; + + // Number of valid bytes currently in the buffer (i.e. the size last + // returned by Next()). 0 <= buffer_used_ <= buffer_size_. + int buffer_used_; + + // Number of bytes in the buffer which were backed up over by a call to + // BackUp(). These need to be returned again. + // 0 <= backup_bytes_ <= buffer_used_ + int backup_bytes_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(CopyingInputStreamAdaptor); +}; + +// =================================================================== + +// A generic traditional output stream interface. +// +// Lots of traditional output streams (e.g. file descriptors, C stdio +// streams, and C++ iostreams) expose an interface where every write +// involves copying bytes from a buffer. If you want to take such an +// interface and make a ZeroCopyOutputStream based on it, simply implement +// CopyingOutputStream and then use CopyingOutputStreamAdaptor. +// +// CopyingOutputStream implementations should avoid buffering if possible. +// CopyingOutputStreamAdaptor does its own buffering and will write data +// in large blocks. +class LIBPROTOBUF_EXPORT CopyingOutputStream { + public: + virtual ~CopyingOutputStream(); + + // Writes "size" bytes from the given buffer to the output. Returns true + // if successful, false on a write error. + virtual bool Write(const void* buffer, int size) = 0; +}; + +// A ZeroCopyOutputStream which writes to a CopyingOutputStream. This is +// useful for implementing ZeroCopyOutputStreams that write to traditional +// streams. Note that this class is not really zero-copy. +// +// If you want to write to file descriptors or C++ ostreams, this is +// already implemented for you: use FileOutputStream or OstreamOutputStream +// respectively. +class LIBPROTOBUF_EXPORT CopyingOutputStreamAdaptor : public ZeroCopyOutputStream { + public: + // Creates a stream that writes to the given Unix file descriptor. + // If a block_size is given, it specifies the size of the buffers + // that should be returned by Next(). Otherwise, a reasonable default + // is used. + explicit CopyingOutputStreamAdaptor(CopyingOutputStream* copying_stream, + int block_size = -1); + ~CopyingOutputStreamAdaptor(); + + // Writes all pending data to the underlying stream. Returns false if a + // write error occurred on the underlying stream. (The underlying + // stream itself is not necessarily flushed.) + bool Flush(); + + // Call SetOwnsCopyingStream(true) to tell the CopyingOutputStreamAdaptor to + // delete the underlying CopyingOutputStream when it is destroyed. + void SetOwnsCopyingStream(bool value) { owns_copying_stream_ = value; } + + // implements ZeroCopyOutputStream --------------------------------- + bool Next(void** data, int* size); + void BackUp(int count); + int64 ByteCount() const; + + private: + // Write the current buffer, if it is present. + bool WriteBuffer(); + // Insures that buffer_ is not NULL. + void AllocateBufferIfNeeded(); + // Frees the buffer. + void FreeBuffer(); + + // The underlying copying stream. + CopyingOutputStream* copying_stream_; + bool owns_copying_stream_; + + // True if we have seen a permenant error from the underlying stream. + bool failed_; + + // The current position of copying_stream_, relative to the point where + // we started writing. + int64 position_; + + // Data is written from this buffer. It may be NULL if no buffer is + // currently in use. Otherwise, it points to an array of size buffer_size_. + scoped_array buffer_; + const int buffer_size_; + + // Number of valid bytes currently in the buffer (i.e. the size last + // returned by Next()). When BackUp() is called, we just reduce this. + // 0 <= buffer_used_ <= buffer_size_. + int buffer_used_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(CopyingOutputStreamAdaptor); +}; + +// =================================================================== + +} // namespace io +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_IO_ZERO_COPY_STREAM_IMPL_LITE_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/zero_copy_stream_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/zero_copy_stream_unittest.cc new file mode 100644 index 0000000000..5196d905b4 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/io/zero_copy_stream_unittest.cc @@ -0,0 +1,850 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// Testing strategy: For each type of I/O (array, string, file, etc.) we +// create an output stream and write some data to it, then create a +// corresponding input stream to read the same data back and expect it to +// match. When the data is written, it is written in several small chunks +// of varying sizes, with a BackUp() after each chunk. It is read back +// similarly, but with chunks separated at different points. The whole +// process is run with a variety of block sizes for both the input and +// the output. +// +// TODO(kenton): Rewrite this test to bring it up to the standards of all +// the other proto2 tests. May want to wait for gTest to implement +// "parametized tests" so that one set of tests can be used on all the +// implementations. + +#include "config.h" + +#ifdef _MSC_VER +#include +#else +#include +#endif +#include +#include +#include +#include +#include +#include + +#include +#include + +#if HAVE_ZLIB +#include +#endif + +#include +#include +#include +#include + +namespace google { +namespace protobuf { +namespace io { +namespace { + +#ifdef _WIN32 +#define pipe(fds) _pipe(fds, 4096, O_BINARY) +#endif + +#ifndef O_BINARY +#ifdef _O_BINARY +#define O_BINARY _O_BINARY +#else +#define O_BINARY 0 // If this isn't defined, the platform doesn't need it. +#endif +#endif + +class IoTest : public testing::Test { + protected: + // Test helpers. + + // Helper to write an array of data to an output stream. + bool WriteToOutput(ZeroCopyOutputStream* output, const void* data, int size); + // Helper to read a fixed-length array of data from an input stream. + int ReadFromInput(ZeroCopyInputStream* input, void* data, int size); + // Write a string to the output stream. + void WriteString(ZeroCopyOutputStream* output, const string& str); + // Read a number of bytes equal to the size of the given string and checks + // that it matches the string. + void ReadString(ZeroCopyInputStream* input, const string& str); + // Writes some text to the output stream in a particular order. Returns + // the number of bytes written, incase the caller needs that to set up an + // input stream. + int WriteStuff(ZeroCopyOutputStream* output); + // Reads text from an input stream and expects it to match what + // WriteStuff() writes. + void ReadStuff(ZeroCopyInputStream* input); + + // Similar to WriteStuff, but performs more sophisticated testing. + int WriteStuffLarge(ZeroCopyOutputStream* output); + // Reads and tests a stream that should have been written to + // via WriteStuffLarge(). + void ReadStuffLarge(ZeroCopyInputStream* input); + +#if HAVE_ZLIB + string Compress(const string& data, const GzipOutputStream::Options& options); + string Uncompress(const string& data); +#endif + + static const int kBlockSizes[]; + static const int kBlockSizeCount; +}; + +const int IoTest::kBlockSizes[] = {-1, 1, 2, 5, 7, 10, 23, 64}; +const int IoTest::kBlockSizeCount = GOOGLE_ARRAYSIZE(IoTest::kBlockSizes); + +bool IoTest::WriteToOutput(ZeroCopyOutputStream* output, + const void* data, int size) { + const uint8* in = reinterpret_cast(data); + int in_size = size; + + void* out; + int out_size; + + while (true) { + if (!output->Next(&out, &out_size)) { + return false; + } + EXPECT_GT(out_size, 0); + + if (in_size <= out_size) { + memcpy(out, in, in_size); + output->BackUp(out_size - in_size); + return true; + } + + memcpy(out, in, out_size); + in += out_size; + in_size -= out_size; + } +} + +#define MAX_REPEATED_ZEROS 100 + +int IoTest::ReadFromInput(ZeroCopyInputStream* input, void* data, int size) { + uint8* out = reinterpret_cast(data); + int out_size = size; + + const void* in; + int in_size = 0; + + int repeated_zeros = 0; + + while (true) { + if (!input->Next(&in, &in_size)) { + return size - out_size; + } + EXPECT_GT(in_size, -1); + if (in_size == 0) { + repeated_zeros++; + } else { + repeated_zeros = 0; + } + EXPECT_LT(repeated_zeros, MAX_REPEATED_ZEROS); + + if (out_size <= in_size) { + memcpy(out, in, out_size); + if (in_size > out_size) { + input->BackUp(in_size - out_size); + } + return size; // Copied all of it. + } + + memcpy(out, in, in_size); + out += in_size; + out_size -= in_size; + } +} + +void IoTest::WriteString(ZeroCopyOutputStream* output, const string& str) { + EXPECT_TRUE(WriteToOutput(output, str.c_str(), str.size())); +} + +void IoTest::ReadString(ZeroCopyInputStream* input, const string& str) { + scoped_array buffer(new char[str.size() + 1]); + buffer[str.size()] = '\0'; + EXPECT_EQ(ReadFromInput(input, buffer.get(), str.size()), str.size()); + EXPECT_STREQ(str.c_str(), buffer.get()); +} + +int IoTest::WriteStuff(ZeroCopyOutputStream* output) { + WriteString(output, "Hello world!\n"); + WriteString(output, "Some te"); + WriteString(output, "xt. Blah blah."); + WriteString(output, "abcdefg"); + WriteString(output, "01234567890123456789"); + WriteString(output, "foobar"); + + EXPECT_EQ(output->ByteCount(), 68); + + int result = output->ByteCount(); + return result; +} + +// Reads text from an input stream and expects it to match what WriteStuff() +// writes. +void IoTest::ReadStuff(ZeroCopyInputStream* input) { + ReadString(input, "Hello world!\n"); + ReadString(input, "Some text. "); + ReadString(input, "Blah "); + ReadString(input, "blah."); + ReadString(input, "abcdefg"); + EXPECT_TRUE(input->Skip(20)); + ReadString(input, "foo"); + ReadString(input, "bar"); + + EXPECT_EQ(input->ByteCount(), 68); + + uint8 byte; + EXPECT_EQ(ReadFromInput(input, &byte, 1), 0); +} + +int IoTest::WriteStuffLarge(ZeroCopyOutputStream* output) { + WriteString(output, "Hello world!\n"); + WriteString(output, "Some te"); + WriteString(output, "xt. Blah blah."); + WriteString(output, string(100000, 'x')); // A very long string + WriteString(output, string(100000, 'y')); // A very long string + WriteString(output, "01234567890123456789"); + + EXPECT_EQ(output->ByteCount(), 200055); + + int result = output->ByteCount(); + return result; +} + +// Reads text from an input stream and expects it to match what WriteStuff() +// writes. +void IoTest::ReadStuffLarge(ZeroCopyInputStream* input) { + ReadString(input, "Hello world!\nSome text. "); + EXPECT_TRUE(input->Skip(5)); + ReadString(input, "blah."); + EXPECT_TRUE(input->Skip(100000 - 10)); + ReadString(input, string(10, 'x') + string(100000 - 20000, 'y')); + EXPECT_TRUE(input->Skip(20000 - 10)); + ReadString(input, "yyyyyyyyyy01234567890123456789"); + + EXPECT_EQ(input->ByteCount(), 200055); + + uint8 byte; + EXPECT_EQ(ReadFromInput(input, &byte, 1), 0); +} + +// =================================================================== + +TEST_F(IoTest, ArrayIo) { + const int kBufferSize = 256; + uint8 buffer[kBufferSize]; + + for (int i = 0; i < kBlockSizeCount; i++) { + for (int j = 0; j < kBlockSizeCount; j++) { + int size; + { + ArrayOutputStream output(buffer, kBufferSize, kBlockSizes[i]); + size = WriteStuff(&output); + } + { + ArrayInputStream input(buffer, size, kBlockSizes[j]); + ReadStuff(&input); + } + } + } +} + +TEST_F(IoTest, TwoSessionWrite) { + // Test that two concatenated write sessions read correctly + + static const char* strA = "0123456789"; + static const char* strB = "WhirledPeas"; + const int kBufferSize = 2*1024; + uint8* buffer = new uint8[kBufferSize]; + char* temp_buffer = new char[40]; + + for (int i = 0; i < kBlockSizeCount; i++) { + for (int j = 0; j < kBlockSizeCount; j++) { + ArrayOutputStream* output = + new ArrayOutputStream(buffer, kBufferSize, kBlockSizes[i]); + CodedOutputStream* coded_output = new CodedOutputStream(output); + coded_output->WriteVarint32(strlen(strA)); + coded_output->WriteRaw(strA, strlen(strA)); + delete coded_output; // flush + int64 pos = output->ByteCount(); + delete output; + output = new ArrayOutputStream( + buffer + pos, kBufferSize - pos, kBlockSizes[i]); + coded_output = new CodedOutputStream(output); + coded_output->WriteVarint32(strlen(strB)); + coded_output->WriteRaw(strB, strlen(strB)); + delete coded_output; // flush + int64 size = pos + output->ByteCount(); + delete output; + + ArrayInputStream* input = + new ArrayInputStream(buffer, size, kBlockSizes[j]); + CodedInputStream* coded_input = new CodedInputStream(input); + uint32 insize; + EXPECT_TRUE(coded_input->ReadVarint32(&insize)); + EXPECT_EQ(strlen(strA), insize); + EXPECT_TRUE(coded_input->ReadRaw(temp_buffer, insize)); + EXPECT_EQ(0, memcmp(temp_buffer, strA, insize)); + + EXPECT_TRUE(coded_input->ReadVarint32(&insize)); + EXPECT_EQ(strlen(strB), insize); + EXPECT_TRUE(coded_input->ReadRaw(temp_buffer, insize)); + EXPECT_EQ(0, memcmp(temp_buffer, strB, insize)); + + delete coded_input; + delete input; + } + } + + delete [] temp_buffer; + delete [] buffer; +} + +#if HAVE_ZLIB +TEST_F(IoTest, GzipIo) { + const int kBufferSize = 2*1024; + uint8* buffer = new uint8[kBufferSize]; + for (int i = 0; i < kBlockSizeCount; i++) { + for (int j = 0; j < kBlockSizeCount; j++) { + for (int z = 0; z < kBlockSizeCount; z++) { + int gzip_buffer_size = kBlockSizes[z]; + int size; + { + ArrayOutputStream output(buffer, kBufferSize, kBlockSizes[i]); + GzipOutputStream::Options options; + options.format = GzipOutputStream::GZIP; + if (gzip_buffer_size != -1) { + options.buffer_size = gzip_buffer_size; + } + GzipOutputStream gzout(&output, options); + WriteStuff(&gzout); + gzout.Close(); + size = output.ByteCount(); + } + { + ArrayInputStream input(buffer, size, kBlockSizes[j]); + GzipInputStream gzin( + &input, GzipInputStream::GZIP, gzip_buffer_size); + ReadStuff(&gzin); + } + } + } + } + delete [] buffer; +} + +TEST_F(IoTest, ZlibIo) { + const int kBufferSize = 2*1024; + uint8* buffer = new uint8[kBufferSize]; + for (int i = 0; i < kBlockSizeCount; i++) { + for (int j = 0; j < kBlockSizeCount; j++) { + for (int z = 0; z < kBlockSizeCount; z++) { + int gzip_buffer_size = kBlockSizes[z]; + int size; + { + ArrayOutputStream output(buffer, kBufferSize, kBlockSizes[i]); + GzipOutputStream::Options options; + options.format = GzipOutputStream::ZLIB; + if (gzip_buffer_size != -1) { + options.buffer_size = gzip_buffer_size; + } + GzipOutputStream gzout(&output, options); + WriteStuff(&gzout); + gzout.Close(); + size = output.ByteCount(); + } + { + ArrayInputStream input(buffer, size, kBlockSizes[j]); + GzipInputStream gzin( + &input, GzipInputStream::ZLIB, gzip_buffer_size); + ReadStuff(&gzin); + } + } + } + } + delete [] buffer; +} + +TEST_F(IoTest, ZlibIoInputAutodetect) { + const int kBufferSize = 2*1024; + uint8* buffer = new uint8[kBufferSize]; + int size; + { + ArrayOutputStream output(buffer, kBufferSize); + GzipOutputStream::Options options; + options.format = GzipOutputStream::ZLIB; + GzipOutputStream gzout(&output, options); + WriteStuff(&gzout); + gzout.Close(); + size = output.ByteCount(); + } + { + ArrayInputStream input(buffer, size); + GzipInputStream gzin(&input, GzipInputStream::AUTO); + ReadStuff(&gzin); + } + { + ArrayOutputStream output(buffer, kBufferSize); + GzipOutputStream::Options options; + options.format = GzipOutputStream::GZIP; + GzipOutputStream gzout(&output, options); + WriteStuff(&gzout); + gzout.Close(); + size = output.ByteCount(); + } + { + ArrayInputStream input(buffer, size); + GzipInputStream gzin(&input, GzipInputStream::AUTO); + ReadStuff(&gzin); + } + delete [] buffer; +} + +string IoTest::Compress(const string& data, + const GzipOutputStream::Options& options) { + string result; + { + StringOutputStream output(&result); + GzipOutputStream gzout(&output, options); + WriteToOutput(&gzout, data.data(), data.size()); + } + return result; +} + +string IoTest::Uncompress(const string& data) { + string result; + { + ArrayInputStream input(data.data(), data.size()); + GzipInputStream gzin(&input); + const void* buffer; + int size; + while (gzin.Next(&buffer, &size)) { + result.append(reinterpret_cast(buffer), size); + } + } + return result; +} + +TEST_F(IoTest, CompressionOptions) { + // Some ad-hoc testing of compression options. + + string golden; + File::ReadFileToStringOrDie( + TestSourceDir() + "/google/protobuf/testdata/golden_message", + &golden); + + GzipOutputStream::Options options; + string gzip_compressed = Compress(golden, options); + + options.compression_level = 0; + string not_compressed = Compress(golden, options); + + // Try zlib compression for fun. + options = GzipOutputStream::Options(); + options.format = GzipOutputStream::ZLIB; + string zlib_compressed = Compress(golden, options); + + // Uncompressed should be bigger than the original since it should have some + // sort of header. + EXPECT_GT(not_compressed.size(), golden.size()); + + // Higher compression levels should result in smaller sizes. + EXPECT_LT(zlib_compressed.size(), not_compressed.size()); + + // ZLIB format should differ from GZIP format. + EXPECT_TRUE(zlib_compressed != gzip_compressed); + + // Everything should decompress correctly. + EXPECT_TRUE(Uncompress(not_compressed) == golden); + EXPECT_TRUE(Uncompress(gzip_compressed) == golden); + EXPECT_TRUE(Uncompress(zlib_compressed) == golden); +} + +TEST_F(IoTest, TwoSessionWriteGzip) { + // Test that two concatenated gzip streams can be read correctly + + static const char* strA = "0123456789"; + static const char* strB = "QuickBrownFox"; + const int kBufferSize = 2*1024; + uint8* buffer = new uint8[kBufferSize]; + char* temp_buffer = new char[40]; + + for (int i = 0; i < kBlockSizeCount; i++) { + for (int j = 0; j < kBlockSizeCount; j++) { + ArrayOutputStream* output = + new ArrayOutputStream(buffer, kBufferSize, kBlockSizes[i]); + GzipOutputStream* gzout = new GzipOutputStream(output); + CodedOutputStream* coded_output = new CodedOutputStream(gzout); + int32 outlen = strlen(strA) + 1; + coded_output->WriteVarint32(outlen); + coded_output->WriteRaw(strA, outlen); + delete coded_output; // flush + delete gzout; // flush + int64 pos = output->ByteCount(); + delete output; + output = new ArrayOutputStream( + buffer + pos, kBufferSize - pos, kBlockSizes[i]); + gzout = new GzipOutputStream(output); + coded_output = new CodedOutputStream(gzout); + outlen = strlen(strB) + 1; + coded_output->WriteVarint32(outlen); + coded_output->WriteRaw(strB, outlen); + delete coded_output; // flush + delete gzout; // flush + int64 size = pos + output->ByteCount(); + delete output; + + ArrayInputStream* input = + new ArrayInputStream(buffer, size, kBlockSizes[j]); + GzipInputStream* gzin = new GzipInputStream(input); + CodedInputStream* coded_input = new CodedInputStream(gzin); + uint32 insize; + EXPECT_TRUE(coded_input->ReadVarint32(&insize)); + EXPECT_EQ(strlen(strA) + 1, insize); + EXPECT_TRUE(coded_input->ReadRaw(temp_buffer, insize)); + EXPECT_EQ(0, memcmp(temp_buffer, strA, insize)) + << "strA=" << strA << " in=" << temp_buffer; + + EXPECT_TRUE(coded_input->ReadVarint32(&insize)); + EXPECT_EQ(strlen(strB) + 1, insize); + EXPECT_TRUE(coded_input->ReadRaw(temp_buffer, insize)); + EXPECT_EQ(0, memcmp(temp_buffer, strB, insize)) + << " out_block_size=" << kBlockSizes[i] + << " in_block_size=" << kBlockSizes[j] + << " pos=" << pos + << " size=" << size + << " strB=" << strB << " in=" << temp_buffer; + + delete coded_input; + delete gzin; + delete input; + } + } + + delete [] temp_buffer; + delete [] buffer; +} +#endif + +// There is no string input, only string output. Also, it doesn't support +// explicit block sizes. So, we'll only run one test and we'll use +// ArrayInput to read back the results. +TEST_F(IoTest, StringIo) { + string str; + { + StringOutputStream output(&str); + WriteStuff(&output); + } + { + ArrayInputStream input(str.data(), str.size()); + ReadStuff(&input); + } +} + + +// To test files, we create a temporary file, write, read, truncate, repeat. +TEST_F(IoTest, FileIo) { + string filename = TestTempDir() + "/zero_copy_stream_test_file"; + + for (int i = 0; i < kBlockSizeCount; i++) { + for (int j = 0; j < kBlockSizeCount; j++) { + // Make a temporary file. + int file = + open(filename.c_str(), O_RDWR | O_CREAT | O_TRUNC | O_BINARY, 0777); + ASSERT_GE(file, 0); + + { + FileOutputStream output(file, kBlockSizes[i]); + WriteStuff(&output); + EXPECT_EQ(0, output.GetErrno()); + } + + // Rewind. + ASSERT_NE(lseek(file, 0, SEEK_SET), (off_t)-1); + + { + FileInputStream input(file, kBlockSizes[j]); + ReadStuff(&input); + EXPECT_EQ(0, input.GetErrno()); + } + + close(file); + } + } +} + +#if HAVE_ZLIB +TEST_F(IoTest, GzipFileIo) { + string filename = TestTempDir() + "/zero_copy_stream_test_file"; + + for (int i = 0; i < kBlockSizeCount; i++) { + for (int j = 0; j < kBlockSizeCount; j++) { + // Make a temporary file. + int file = + open(filename.c_str(), O_RDWR | O_CREAT | O_TRUNC | O_BINARY, 0777); + ASSERT_GE(file, 0); + { + FileOutputStream output(file, kBlockSizes[i]); + GzipOutputStream gzout(&output); + WriteStuffLarge(&gzout); + gzout.Close(); + output.Flush(); + EXPECT_EQ(0, output.GetErrno()); + } + + // Rewind. + ASSERT_NE(lseek(file, 0, SEEK_SET), (off_t)-1); + + { + FileInputStream input(file, kBlockSizes[j]); + GzipInputStream gzin(&input); + ReadStuffLarge(&gzin); + EXPECT_EQ(0, input.GetErrno()); + } + + close(file); + } + } +} +#endif + +// MSVC raises various debugging exceptions if we try to use a file +// descriptor of -1, defeating our tests below. This class will disable +// these debug assertions while in scope. +class MsvcDebugDisabler { + public: +#if defined(_MSC_VER) && _MSC_VER >= 1400 + MsvcDebugDisabler() { + old_handler_ = _set_invalid_parameter_handler(MyHandler); + old_mode_ = _CrtSetReportMode(_CRT_ASSERT, 0); + } + ~MsvcDebugDisabler() { + old_handler_ = _set_invalid_parameter_handler(old_handler_); + old_mode_ = _CrtSetReportMode(_CRT_ASSERT, old_mode_); + } + + static void MyHandler(const wchar_t *expr, + const wchar_t *func, + const wchar_t *file, + unsigned int line, + uintptr_t pReserved) { + // do nothing + } + + _invalid_parameter_handler old_handler_; + int old_mode_; +#else + // Dummy constructor and destructor to ensure that GCC doesn't complain + // that debug_disabler is an unused variable. + MsvcDebugDisabler() {} + ~MsvcDebugDisabler() {} +#endif +}; + +// Test that FileInputStreams report errors correctly. +TEST_F(IoTest, FileReadError) { + MsvcDebugDisabler debug_disabler; + + // -1 = invalid file descriptor. + FileInputStream input(-1); + + const void* buffer; + int size; + EXPECT_FALSE(input.Next(&buffer, &size)); + EXPECT_EQ(EBADF, input.GetErrno()); +} + +// Test that FileOutputStreams report errors correctly. +TEST_F(IoTest, FileWriteError) { + MsvcDebugDisabler debug_disabler; + + // -1 = invalid file descriptor. + FileOutputStream input(-1); + + void* buffer; + int size; + + // The first call to Next() succeeds because it doesn't have anything to + // write yet. + EXPECT_TRUE(input.Next(&buffer, &size)); + + // Second call fails. + EXPECT_FALSE(input.Next(&buffer, &size)); + + EXPECT_EQ(EBADF, input.GetErrno()); +} + +// Pipes are not seekable, so File{Input,Output}Stream ends up doing some +// different things to handle them. We'll test by writing to a pipe and +// reading back from it. +TEST_F(IoTest, PipeIo) { + int files[2]; + + for (int i = 0; i < kBlockSizeCount; i++) { + for (int j = 0; j < kBlockSizeCount; j++) { + // Need to create a new pipe each time because ReadStuff() expects + // to see EOF at the end. + ASSERT_EQ(pipe(files), 0); + + { + FileOutputStream output(files[1], kBlockSizes[i]); + WriteStuff(&output); + EXPECT_EQ(0, output.GetErrno()); + } + close(files[1]); // Send EOF. + + { + FileInputStream input(files[0], kBlockSizes[j]); + ReadStuff(&input); + EXPECT_EQ(0, input.GetErrno()); + } + close(files[0]); + } + } +} + +// Test using C++ iostreams. +TEST_F(IoTest, IostreamIo) { + for (int i = 0; i < kBlockSizeCount; i++) { + for (int j = 0; j < kBlockSizeCount; j++) { + { + stringstream stream; + + { + OstreamOutputStream output(&stream, kBlockSizes[i]); + WriteStuff(&output); + EXPECT_FALSE(stream.fail()); + } + + { + IstreamInputStream input(&stream, kBlockSizes[j]); + ReadStuff(&input); + EXPECT_TRUE(stream.eof()); + } + } + + { + stringstream stream; + + { + OstreamOutputStream output(&stream, kBlockSizes[i]); + WriteStuffLarge(&output); + EXPECT_FALSE(stream.fail()); + } + + { + IstreamInputStream input(&stream, kBlockSizes[j]); + ReadStuffLarge(&input); + EXPECT_TRUE(stream.eof()); + } + } + } + } +} + +// To test ConcatenatingInputStream, we create several ArrayInputStreams +// covering a buffer and then concatenate them. +TEST_F(IoTest, ConcatenatingInputStream) { + const int kBufferSize = 256; + uint8 buffer[kBufferSize]; + + // Fill the buffer. + ArrayOutputStream output(buffer, kBufferSize); + WriteStuff(&output); + + // Now split it up into multiple streams of varying sizes. + ASSERT_EQ(68, output.ByteCount()); // Test depends on this. + ArrayInputStream input1(buffer , 12); + ArrayInputStream input2(buffer + 12, 7); + ArrayInputStream input3(buffer + 19, 6); + ArrayInputStream input4(buffer + 25, 15); + ArrayInputStream input5(buffer + 40, 0); + // Note: We want to make sure we have a stream boundary somewhere between + // bytes 42 and 62, which is the range that it Skip()ed by ReadStuff(). This + // tests that a bug that existed in the original code for Skip() is fixed. + ArrayInputStream input6(buffer + 40, 10); + ArrayInputStream input7(buffer + 50, 18); // Total = 68 bytes. + + ZeroCopyInputStream* streams[] = + {&input1, &input2, &input3, &input4, &input5, &input6, &input7}; + + // Create the concatenating stream and read. + ConcatenatingInputStream input(streams, GOOGLE_ARRAYSIZE(streams)); + ReadStuff(&input); +} + +// To test LimitingInputStream, we write our golden text to a buffer, then +// create an ArrayInputStream that contains the whole buffer (not just the +// bytes written), then use a LimitingInputStream to limit it just to the +// bytes written. +TEST_F(IoTest, LimitingInputStream) { + const int kBufferSize = 256; + uint8 buffer[kBufferSize]; + + // Fill the buffer. + ArrayOutputStream output(buffer, kBufferSize); + WriteStuff(&output); + + // Set up input. + ArrayInputStream array_input(buffer, kBufferSize); + LimitingInputStream input(&array_input, output.ByteCount()); + + ReadStuff(&input); +} + +// Check that a zero-size array doesn't confuse the code. +TEST(ZeroSizeArray, Input) { + ArrayInputStream input(NULL, 0); + const void* data; + int size; + EXPECT_FALSE(input.Next(&data, &size)); +} + +TEST(ZeroSizeArray, Output) { + ArrayOutputStream output(NULL, 0); + void* data; + int size; + EXPECT_FALSE(output.Next(&data, &size)); +} + +} // namespace +} // namespace io +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/lite_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/lite_unittest.cc new file mode 100644 index 0000000000..ffeec3c445 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/lite_unittest.cc @@ -0,0 +1,112 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) + +#include +#include + +#include +#include + +using namespace std; + +int main(int argc, char* argv[]) { + string data, packed_data; + + { + protobuf_unittest::TestAllTypesLite message, message2, message3; + google::protobuf::TestUtilLite::ExpectClear(message); + google::protobuf::TestUtilLite::SetAllFields(&message); + message2.CopyFrom(message); + data = message.SerializeAsString(); + message3.ParseFromString(data); + google::protobuf::TestUtilLite::ExpectAllFieldsSet(message); + google::protobuf::TestUtilLite::ExpectAllFieldsSet(message2); + google::protobuf::TestUtilLite::ExpectAllFieldsSet(message3); + google::protobuf::TestUtilLite::ModifyRepeatedFields(&message); + google::protobuf::TestUtilLite::ExpectRepeatedFieldsModified(message); + message.Clear(); + google::protobuf::TestUtilLite::ExpectClear(message); + } + + { + protobuf_unittest::TestAllExtensionsLite message, message2, message3; + google::protobuf::TestUtilLite::ExpectExtensionsClear(message); + google::protobuf::TestUtilLite::SetAllExtensions(&message); + message2.CopyFrom(message); + string extensions_data = message.SerializeAsString(); + GOOGLE_CHECK(extensions_data == data); + message3.ParseFromString(extensions_data); + google::protobuf::TestUtilLite::ExpectAllExtensionsSet(message); + google::protobuf::TestUtilLite::ExpectAllExtensionsSet(message2); + google::protobuf::TestUtilLite::ExpectAllExtensionsSet(message3); + google::protobuf::TestUtilLite::ModifyRepeatedExtensions(&message); + google::protobuf::TestUtilLite::ExpectRepeatedExtensionsModified(message); + message.Clear(); + google::protobuf::TestUtilLite::ExpectExtensionsClear(message); + } + + { + protobuf_unittest::TestPackedTypesLite message, message2, message3; + google::protobuf::TestUtilLite::ExpectPackedClear(message); + google::protobuf::TestUtilLite::SetPackedFields(&message); + message2.CopyFrom(message); + packed_data = message.SerializeAsString(); + message3.ParseFromString(packed_data); + google::protobuf::TestUtilLite::ExpectPackedFieldsSet(message); + google::protobuf::TestUtilLite::ExpectPackedFieldsSet(message2); + google::protobuf::TestUtilLite::ExpectPackedFieldsSet(message3); + google::protobuf::TestUtilLite::ModifyPackedFields(&message); + google::protobuf::TestUtilLite::ExpectPackedFieldsModified(message); + message.Clear(); + google::protobuf::TestUtilLite::ExpectPackedClear(message); + } + + { + protobuf_unittest::TestPackedExtensionsLite message, message2, message3; + google::protobuf::TestUtilLite::ExpectPackedExtensionsClear(message); + google::protobuf::TestUtilLite::SetPackedExtensions(&message); + message2.CopyFrom(message); + string packed_extensions_data = message.SerializeAsString(); + GOOGLE_CHECK(packed_extensions_data == packed_data); + message3.ParseFromString(packed_extensions_data); + google::protobuf::TestUtilLite::ExpectPackedExtensionsSet(message); + google::protobuf::TestUtilLite::ExpectPackedExtensionsSet(message2); + google::protobuf::TestUtilLite::ExpectPackedExtensionsSet(message3); + google::protobuf::TestUtilLite::ModifyPackedExtensions(&message); + google::protobuf::TestUtilLite::ExpectPackedExtensionsModified(message); + message.Clear(); + google::protobuf::TestUtilLite::ExpectPackedExtensionsClear(message); + } + + cout << "PASS" << endl; + return 0; +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/message.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/message.cc new file mode 100644 index 0000000000..91e6878e84 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/message.cc @@ -0,0 +1,318 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include + +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace google { +namespace protobuf { + +using internal::WireFormat; +using internal::ReflectionOps; + +Message::~Message() {} + +void Message::MergeFrom(const Message& from) { + const Descriptor* descriptor = GetDescriptor(); + GOOGLE_CHECK_EQ(from.GetDescriptor(), descriptor) + << ": Tried to merge from a message with a different type. " + "to: " << descriptor->full_name() << ", " + "from:" << from.GetDescriptor()->full_name(); + ReflectionOps::Merge(from, this); +} + +void Message::CheckTypeAndMergeFrom(const MessageLite& other) { + MergeFrom(*down_cast(&other)); +} + +void Message::CopyFrom(const Message& from) { + const Descriptor* descriptor = GetDescriptor(); + GOOGLE_CHECK_EQ(from.GetDescriptor(), descriptor) + << ": Tried to copy from a message with a different type." + "to: " << descriptor->full_name() << ", " + "from:" << from.GetDescriptor()->full_name(); + ReflectionOps::Copy(from, this); +} + +string Message::GetTypeName() const { + return GetDescriptor()->full_name(); +} + +void Message::Clear() { + ReflectionOps::Clear(this); +} + +bool Message::IsInitialized() const { + return ReflectionOps::IsInitialized(*this); +} + +void Message::FindInitializationErrors(vector* errors) const { + return ReflectionOps::FindInitializationErrors(*this, "", errors); +} + +string Message::InitializationErrorString() const { + vector errors; + FindInitializationErrors(&errors); + return JoinStrings(errors, ", "); +} + +void Message::CheckInitialized() const { + GOOGLE_CHECK(IsInitialized()) + << "Message of type \"" << GetDescriptor()->full_name() + << "\" is missing required fields: " << InitializationErrorString(); +} + +void Message::DiscardUnknownFields() { + return ReflectionOps::DiscardUnknownFields(this); +} + +bool Message::MergePartialFromCodedStream(io::CodedInputStream* input) { + return WireFormat::ParseAndMergePartial(input, this); +} + +bool Message::ParseFromFileDescriptor(int file_descriptor) { + io::FileInputStream input(file_descriptor); + return ParseFromZeroCopyStream(&input) && input.GetErrno() == 0; +} + +bool Message::ParsePartialFromFileDescriptor(int file_descriptor) { + io::FileInputStream input(file_descriptor); + return ParsePartialFromZeroCopyStream(&input) && input.GetErrno() == 0; +} + +bool Message::ParseFromIstream(istream* input) { + io::IstreamInputStream zero_copy_input(input); + return ParseFromZeroCopyStream(&zero_copy_input) && input->eof(); +} + +bool Message::ParsePartialFromIstream(istream* input) { + io::IstreamInputStream zero_copy_input(input); + return ParsePartialFromZeroCopyStream(&zero_copy_input) && input->eof(); +} + + +void Message::SerializeWithCachedSizes( + io::CodedOutputStream* output) const { + WireFormat::SerializeWithCachedSizes(*this, GetCachedSize(), output); +} + +int Message::ByteSize() const { + int size = WireFormat::ByteSize(*this); + SetCachedSize(size); + return size; +} + +void Message::SetCachedSize(int size) const { + GOOGLE_LOG(FATAL) << "Message class \"" << GetDescriptor()->full_name() + << "\" implements neither SetCachedSize() nor ByteSize(). " + "Must implement one or the other."; +} + +int Message::SpaceUsed() const { + return GetReflection()->SpaceUsed(*this); +} + +bool Message::SerializeToFileDescriptor(int file_descriptor) const { + io::FileOutputStream output(file_descriptor); + return SerializeToZeroCopyStream(&output); +} + +bool Message::SerializePartialToFileDescriptor(int file_descriptor) const { + io::FileOutputStream output(file_descriptor); + return SerializePartialToZeroCopyStream(&output); +} + +bool Message::SerializeToOstream(ostream* output) const { + { + io::OstreamOutputStream zero_copy_output(output); + if (!SerializeToZeroCopyStream(&zero_copy_output)) return false; + } + return output->good(); +} + +bool Message::SerializePartialToOstream(ostream* output) const { + io::OstreamOutputStream zero_copy_output(output); + return SerializePartialToZeroCopyStream(&zero_copy_output); +} + + +Reflection::~Reflection() {} + +// =================================================================== +// MessageFactory + +MessageFactory::~MessageFactory() {} + +namespace { + +class GeneratedMessageFactory : public MessageFactory { + public: + GeneratedMessageFactory(); + ~GeneratedMessageFactory(); + + static GeneratedMessageFactory* singleton(); + + typedef void RegistrationFunc(const string&); + void RegisterFile(const char* file, RegistrationFunc* registration_func); + void RegisterType(const Descriptor* descriptor, const Message* prototype); + + // implements MessageFactory --------------------------------------- + const Message* GetPrototype(const Descriptor* type); + + private: + // Only written at static init time, so does not require locking. + hash_map, streq> file_map_; + + // Initialized lazily, so requires locking. + Mutex mutex_; + hash_map type_map_; +}; + +GeneratedMessageFactory* generated_message_factory_ = NULL; +GOOGLE_PROTOBUF_DECLARE_ONCE(generated_message_factory_once_init_); + +void ShutdownGeneratedMessageFactory() { + delete generated_message_factory_; +} + +void InitGeneratedMessageFactory() { + generated_message_factory_ = new GeneratedMessageFactory; + internal::OnShutdown(&ShutdownGeneratedMessageFactory); +} + +GeneratedMessageFactory::GeneratedMessageFactory() {} +GeneratedMessageFactory::~GeneratedMessageFactory() {} + +GeneratedMessageFactory* GeneratedMessageFactory::singleton() { + ::google::protobuf::GoogleOnceInit(&generated_message_factory_once_init_, + &InitGeneratedMessageFactory); + return generated_message_factory_; +} + +void GeneratedMessageFactory::RegisterFile( + const char* file, RegistrationFunc* registration_func) { + if (!InsertIfNotPresent(&file_map_, file, registration_func)) { + GOOGLE_LOG(FATAL) << "File is already registered: " << file; + } +} + +void GeneratedMessageFactory::RegisterType(const Descriptor* descriptor, + const Message* prototype) { + GOOGLE_DCHECK_EQ(descriptor->file()->pool(), DescriptorPool::generated_pool()) + << "Tried to register a non-generated type with the generated " + "type registry."; + + // This should only be called as a result of calling a file registration + // function during GetPrototype(), in which case we already have locked + // the mutex. + mutex_.AssertHeld(); + if (!InsertIfNotPresent(&type_map_, descriptor, prototype)) { + GOOGLE_LOG(DFATAL) << "Type is already registered: " << descriptor->full_name(); + } +} + +const Message* GeneratedMessageFactory::GetPrototype(const Descriptor* type) { + { + ReaderMutexLock lock(&mutex_); + const Message* result = FindPtrOrNull(type_map_, type); + if (result != NULL) return result; + } + + // If the type is not in the generated pool, then we can't possibly handle + // it. + if (type->file()->pool() != DescriptorPool::generated_pool()) return NULL; + + // Apparently the file hasn't been registered yet. Let's do that now. + RegistrationFunc* registration_func = + FindPtrOrNull(file_map_, type->file()->name().c_str()); + if (registration_func == NULL) { + GOOGLE_LOG(DFATAL) << "File appears to be in generated pool but wasn't " + "registered: " << type->file()->name(); + return NULL; + } + + WriterMutexLock lock(&mutex_); + + // Check if another thread preempted us. + const Message* result = FindPtrOrNull(type_map_, type); + if (result == NULL) { + // Nope. OK, register everything. + registration_func(type->file()->name()); + // Should be here now. + result = FindPtrOrNull(type_map_, type); + } + + if (result == NULL) { + GOOGLE_LOG(DFATAL) << "Type appears to be in generated pool but wasn't " + << "registered: " << type->full_name(); + } + + return result; +} + +} // namespace + +MessageFactory* MessageFactory::generated_factory() { + return GeneratedMessageFactory::singleton(); +} + +void MessageFactory::InternalRegisterGeneratedFile( + const char* filename, void (*register_messages)(const string&)) { + GeneratedMessageFactory::singleton()->RegisterFile(filename, + register_messages); +} + +void MessageFactory::InternalRegisterGeneratedMessage( + const Descriptor* descriptor, const Message* prototype) { + GeneratedMessageFactory::singleton()->RegisterType(descriptor, prototype); +} + + +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/message.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/message.h new file mode 100644 index 0000000000..4bbc641832 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/message.h @@ -0,0 +1,692 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// Defines Message, the abstract interface implemented by non-lite +// protocol message objects. Although it's possible to implement this +// interface manually, most users will use the protocol compiler to +// generate implementations. +// +// Example usage: +// +// Say you have a message defined as: +// +// message Foo { +// optional string text = 1; +// repeated int32 numbers = 2; +// } +// +// Then, if you used the protocol compiler to generate a class from the above +// definition, you could use it like so: +// +// string data; // Will store a serialized version of the message. +// +// { +// // Create a message and serialize it. +// Foo foo; +// foo.set_text("Hello World!"); +// foo.add_numbers(1); +// foo.add_numbers(5); +// foo.add_numbers(42); +// +// foo.SerializeToString(&data); +// } +// +// { +// // Parse the serialized message and check that it contains the +// // correct data. +// Foo foo; +// foo.ParseFromString(data); +// +// assert(foo.text() == "Hello World!"); +// assert(foo.numbers_size() == 3); +// assert(foo.numbers(0) == 1); +// assert(foo.numbers(1) == 5); +// assert(foo.numbers(2) == 42); +// } +// +// { +// // Same as the last block, but do it dynamically via the Message +// // reflection interface. +// Message* foo = new Foo; +// Descriptor* descriptor = foo->GetDescriptor(); +// +// // Get the descriptors for the fields we're interested in and verify +// // their types. +// FieldDescriptor* text_field = descriptor->FindFieldByName("text"); +// assert(text_field != NULL); +// assert(text_field->type() == FieldDescriptor::TYPE_STRING); +// assert(text_field->label() == FieldDescriptor::TYPE_OPTIONAL); +// FieldDescriptor* numbers_field = descriptor->FindFieldByName("numbers"); +// assert(numbers_field != NULL); +// assert(numbers_field->type() == FieldDescriptor::TYPE_INT32); +// assert(numbers_field->label() == FieldDescriptor::TYPE_REPEATED); +// +// // Parse the message. +// foo->ParseFromString(data); +// +// // Use the reflection interface to examine the contents. +// const Reflection* reflection = foo->GetReflection(); +// assert(reflection->GetString(foo, text_field) == "Hello World!"); +// assert(reflection->FieldSize(foo, numbers_field) == 3); +// assert(reflection->GetRepeatedInt32(foo, numbers_field, 0) == 1); +// assert(reflection->GetRepeatedInt32(foo, numbers_field, 1) == 5); +// assert(reflection->GetRepeatedInt32(foo, numbers_field, 2) == 42); +// +// delete foo; +// } + +#ifndef GOOGLE_PROTOBUF_MESSAGE_H__ +#define GOOGLE_PROTOBUF_MESSAGE_H__ + +#include +#include + +#ifdef __DECCXX +// HP C++'s iosfwd doesn't work. +#include +#else +#include +#endif + +#include + +#include + + +namespace google { +namespace protobuf { + +// Defined in this file. +class Message; +class Reflection; +class MessageFactory; + +// Defined in other files. +class Descriptor; // descriptor.h +class FieldDescriptor; // descriptor.h +class EnumDescriptor; // descriptor.h +class EnumValueDescriptor; // descriptor.h +namespace io { + class ZeroCopyInputStream; // zero_copy_stream.h + class ZeroCopyOutputStream; // zero_copy_stream.h + class CodedInputStream; // coded_stream.h + class CodedOutputStream; // coded_stream.h +} +class UnknownFieldSet; // unknown_field_set.h + +// A container to hold message metadata. +struct Metadata { + const Descriptor* descriptor; + const Reflection* reflection; +}; + +// Returns the EnumDescriptor for enum type E, which must be a +// proto-declared enum type. Code generated by the protocol compiler +// will include specializations of this template for each enum type declared. +template +const EnumDescriptor* GetEnumDescriptor(); + +// Abstract interface for protocol messages. +// +// See also MessageLite, which contains most every-day operations. Message +// adds descriptors and reflection on top of that. +// +// The methods of this class that are virtual but not pure-virtual have +// default implementations based on reflection. Message classes which are +// optimized for speed will want to override these with faster implementations, +// but classes optimized for code size may be happy with keeping them. See +// the optimize_for option in descriptor.proto. +class LIBPROTOBUF_EXPORT Message : public MessageLite { + public: + inline Message() {} + virtual ~Message(); + + // Basic Operations ------------------------------------------------ + + // Construct a new instance of the same type. Ownership is passed to the + // caller. (This is also defined in MessageLite, but is defined again here + // for return-type covariance.) + virtual Message* New() const = 0; + + // Make this message into a copy of the given message. The given message + // must have the same descriptor, but need not necessarily be the same class. + // By default this is just implemented as "Clear(); MergeFrom(from);". + virtual void CopyFrom(const Message& from); + + // Merge the fields from the given message into this message. Singular + // fields will be overwritten, except for embedded messages which will + // be merged. Repeated fields will be concatenated. The given message + // must be of the same type as this message (i.e. the exact same class). + virtual void MergeFrom(const Message& from); + + // Verifies that IsInitialized() returns true. GOOGLE_CHECK-fails otherwise, with + // a nice error message. + void CheckInitialized() const; + + // Slowly build a list of all required fields that are not set. + // This is much, much slower than IsInitialized() as it is implemented + // purely via reflection. Generally, you should not call this unless you + // have already determined that an error exists by calling IsInitialized(). + void FindInitializationErrors(vector* errors) const; + + // Like FindInitializationErrors, but joins all the strings, delimited by + // commas, and returns them. + string InitializationErrorString() const; + + // Clears all unknown fields from this message and all embedded messages. + // Normally, if unknown tag numbers are encountered when parsing a message, + // the tag and value are stored in the message's UnknownFieldSet and + // then written back out when the message is serialized. This allows servers + // which simply route messages to other servers to pass through messages + // that have new field definitions which they don't yet know about. However, + // this behavior can have security implications. To avoid it, call this + // method after parsing. + // + // See Reflection::GetUnknownFields() for more on unknown fields. + virtual void DiscardUnknownFields(); + + // Computes (an estimate of) the total number of bytes currently used for + // storing the message in memory. The default implementation calls the + // Reflection object's SpaceUsed() method. + virtual int SpaceUsed() const; + + // Debugging & Testing---------------------------------------------- + + // Generates a human readable form of this message, useful for debugging + // and other purposes. + string DebugString() const; + // Like DebugString(), but with less whitespace. + string ShortDebugString() const; + // Like DebugString(), but do not escape UTF-8 byte sequences. + string Utf8DebugString() const; + // Convenience function useful in GDB. Prints DebugString() to stdout. + void PrintDebugString() const; + + // Heavy I/O ------------------------------------------------------- + // Additional parsing and serialization methods not implemented by + // MessageLite because they are not supported by the lite library. + + // Parse a protocol buffer from a file descriptor. If successful, the entire + // input will be consumed. + bool ParseFromFileDescriptor(int file_descriptor); + // Like ParseFromFileDescriptor(), but accepts messages that are missing + // required fields. + bool ParsePartialFromFileDescriptor(int file_descriptor); + // Parse a protocol buffer from a C++ istream. If successful, the entire + // input will be consumed. + bool ParseFromIstream(istream* input); + // Like ParseFromIstream(), but accepts messages that are missing + // required fields. + bool ParsePartialFromIstream(istream* input); + + // Serialize the message and write it to the given file descriptor. All + // required fields must be set. + bool SerializeToFileDescriptor(int file_descriptor) const; + // Like SerializeToFileDescriptor(), but allows missing required fields. + bool SerializePartialToFileDescriptor(int file_descriptor) const; + // Serialize the message and write it to the given C++ ostream. All + // required fields must be set. + bool SerializeToOstream(ostream* output) const; + // Like SerializeToOstream(), but allows missing required fields. + bool SerializePartialToOstream(ostream* output) const; + + + // Reflection-based methods ---------------------------------------- + // These methods are pure-virtual in MessageLite, but Message provides + // reflection-based default implementations. + + virtual string GetTypeName() const; + virtual void Clear(); + virtual bool IsInitialized() const; + virtual void CheckTypeAndMergeFrom(const MessageLite& other); + virtual bool MergePartialFromCodedStream(io::CodedInputStream* input); + virtual int ByteSize() const; + virtual void SerializeWithCachedSizes(io::CodedOutputStream* output) const; + + private: + // This is called only by the default implementation of ByteSize(), to + // update the cached size. If you override ByteSize(), you do not need + // to override this. If you do not override ByteSize(), you MUST override + // this; the default implementation will crash. + // + // The method is private because subclasses should never call it; only + // override it. Yes, C++ lets you do that. Crazy, huh? + virtual void SetCachedSize(int size) const; + + public: + + // Introspection --------------------------------------------------- + + // Typedef for backwards-compatibility. + typedef google::protobuf::Reflection Reflection; + + // Get a Descriptor for this message's type. This describes what + // fields the message contains, the types of those fields, etc. + const Descriptor* GetDescriptor() const { return GetMetadata().descriptor; } + + // Get the Reflection interface for this Message, which can be used to + // read and modify the fields of the Message dynamically (in other words, + // without knowing the message type at compile time). This object remains + // property of the Message. + // + // This method remains virtual in case a subclass does not implement + // reflection and wants to override the default behavior. + virtual const Reflection* GetReflection() const { + return GetMetadata().reflection; + } + + protected: + // Get a struct containing the metadata for the Message. Most subclasses only + // need to implement this method, rather than the GetDescriptor() and + // GetReflection() wrappers. + virtual Metadata GetMetadata() const = 0; + + + private: + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(Message); +}; + +// This interface contains methods that can be used to dynamically access +// and modify the fields of a protocol message. Their semantics are +// similar to the accessors the protocol compiler generates. +// +// To get the Reflection for a given Message, call Message::GetReflection(). +// +// This interface is separate from Message only for efficiency reasons; +// the vast majority of implementations of Message will share the same +// implementation of Reflection (GeneratedMessageReflection, +// defined in generated_message.h), and all Messages of a particular class +// should share the same Reflection object (though you should not rely on +// the latter fact). +// +// There are several ways that these methods can be used incorrectly. For +// example, any of the following conditions will lead to undefined +// results (probably assertion failures): +// - The FieldDescriptor is not a field of this message type. +// - The method called is not appropriate for the field's type. For +// each field type in FieldDescriptor::TYPE_*, there is only one +// Get*() method, one Set*() method, and one Add*() method that is +// valid for that type. It should be obvious which (except maybe +// for TYPE_BYTES, which are represented using strings in C++). +// - A Get*() or Set*() method for singular fields is called on a repeated +// field. +// - GetRepeated*(), SetRepeated*(), or Add*() is called on a non-repeated +// field. +// - The Message object passed to any method is not of the right type for +// this Reflection object (i.e. message.GetReflection() != reflection). +// +// You might wonder why there is not any abstract representation for a field +// of arbitrary type. E.g., why isn't there just a "GetField()" method that +// returns "const Field&", where "Field" is some class with accessors like +// "GetInt32Value()". The problem is that someone would have to deal with +// allocating these Field objects. For generated message classes, having to +// allocate space for an additional object to wrap every field would at least +// double the message's memory footprint, probably worse. Allocating the +// objects on-demand, on the other hand, would be expensive and prone to +// memory leaks. So, instead we ended up with this flat interface. +// +// TODO(kenton): Create a utility class which callers can use to read and +// write fields from a Reflection without paying attention to the type. +class LIBPROTOBUF_EXPORT Reflection { + public: + // TODO(kenton): Remove parameter. + inline Reflection() {} + virtual ~Reflection(); + + // Get the UnknownFieldSet for the message. This contains fields which + // were seen when the Message was parsed but were not recognized according + // to the Message's definition. + virtual const UnknownFieldSet& GetUnknownFields( + const Message& message) const = 0; + // Get a mutable pointer to the UnknownFieldSet for the message. This + // contains fields which were seen when the Message was parsed but were not + // recognized according to the Message's definition. + virtual UnknownFieldSet* MutableUnknownFields(Message* message) const = 0; + + // Estimate the amount of memory used by the message object. + virtual int SpaceUsed(const Message& message) const = 0; + + // Check if the given non-repeated field is set. + virtual bool HasField(const Message& message, + const FieldDescriptor* field) const = 0; + + // Get the number of elements of a repeated field. + virtual int FieldSize(const Message& message, + const FieldDescriptor* field) const = 0; + + // Clear the value of a field, so that HasField() returns false or + // FieldSize() returns zero. + virtual void ClearField(Message* message, + const FieldDescriptor* field) const = 0; + + // Remove the last element of a repeated field. + // We don't provide a way to remove any element other than the last + // because it invites inefficient use, such as O(n^2) filtering loops + // that should have been O(n). If you want to remove an element other + // than the last, the best way to do it is to re-arrange the elements + // (using Swap()) so that the one you want removed is at the end, then + // call RemoveLast(). + virtual void RemoveLast(Message* message, + const FieldDescriptor* field) const = 0; + + // Swap the complete contents of two messages. + virtual void Swap(Message* message1, Message* message2) const = 0; + + // Swap two elements of a repeated field. + virtual void SwapElements(Message* message, + const FieldDescriptor* field, + int index1, + int index2) const = 0; + + // List all fields of the message which are currently set. This includes + // extensions. Singular fields will only be listed if HasField(field) would + // return true and repeated fields will only be listed if FieldSize(field) + // would return non-zero. Fields (both normal fields and extension fields) + // will be listed ordered by field number. + virtual void ListFields(const Message& message, + vector* output) const = 0; + + // Singular field getters ------------------------------------------ + // These get the value of a non-repeated field. They return the default + // value for fields that aren't set. + + virtual int32 GetInt32 (const Message& message, + const FieldDescriptor* field) const = 0; + virtual int64 GetInt64 (const Message& message, + const FieldDescriptor* field) const = 0; + virtual uint32 GetUInt32(const Message& message, + const FieldDescriptor* field) const = 0; + virtual uint64 GetUInt64(const Message& message, + const FieldDescriptor* field) const = 0; + virtual float GetFloat (const Message& message, + const FieldDescriptor* field) const = 0; + virtual double GetDouble(const Message& message, + const FieldDescriptor* field) const = 0; + virtual bool GetBool (const Message& message, + const FieldDescriptor* field) const = 0; + virtual string GetString(const Message& message, + const FieldDescriptor* field) const = 0; + virtual const EnumValueDescriptor* GetEnum( + const Message& message, const FieldDescriptor* field) const = 0; + // See MutableMessage() for the meaning of the "factory" parameter. + virtual const Message& GetMessage(const Message& message, + const FieldDescriptor* field, + MessageFactory* factory = NULL) const = 0; + + // Get a string value without copying, if possible. + // + // GetString() necessarily returns a copy of the string. This can be + // inefficient when the string is already stored in a string object in the + // underlying message. GetStringReference() will return a reference to the + // underlying string in this case. Otherwise, it will copy the string into + // *scratch and return that. + // + // Note: It is perfectly reasonable and useful to write code like: + // str = reflection->GetStringReference(field, &str); + // This line would ensure that only one copy of the string is made + // regardless of the field's underlying representation. When initializing + // a newly-constructed string, though, it's just as fast and more readable + // to use code like: + // string str = reflection->GetString(field); + virtual const string& GetStringReference(const Message& message, + const FieldDescriptor* field, + string* scratch) const = 0; + + + // Singular field mutators ----------------------------------------- + // These mutate the value of a non-repeated field. + + virtual void SetInt32 (Message* message, + const FieldDescriptor* field, int32 value) const = 0; + virtual void SetInt64 (Message* message, + const FieldDescriptor* field, int64 value) const = 0; + virtual void SetUInt32(Message* message, + const FieldDescriptor* field, uint32 value) const = 0; + virtual void SetUInt64(Message* message, + const FieldDescriptor* field, uint64 value) const = 0; + virtual void SetFloat (Message* message, + const FieldDescriptor* field, float value) const = 0; + virtual void SetDouble(Message* message, + const FieldDescriptor* field, double value) const = 0; + virtual void SetBool (Message* message, + const FieldDescriptor* field, bool value) const = 0; + virtual void SetString(Message* message, + const FieldDescriptor* field, + const string& value) const = 0; + virtual void SetEnum (Message* message, + const FieldDescriptor* field, + const EnumValueDescriptor* value) const = 0; + // Get a mutable pointer to a field with a message type. If a MessageFactory + // is provided, it will be used to construct instances of the sub-message; + // otherwise, the default factory is used. If the field is an extension that + // does not live in the same pool as the containing message's descriptor (e.g. + // it lives in an overlay pool), then a MessageFactory must be provided. + // If you have no idea what that meant, then you probably don't need to worry + // about it (don't provide a MessageFactory). WARNING: If the + // FieldDescriptor is for a compiled-in extension, then + // factory->GetPrototype(field->message_type() MUST return an instance of the + // compiled-in class for this type, NOT DynamicMessage. + virtual Message* MutableMessage(Message* message, + const FieldDescriptor* field, + MessageFactory* factory = NULL) const = 0; + + + // Repeated field getters ------------------------------------------ + // These get the value of one element of a repeated field. + + virtual int32 GetRepeatedInt32 (const Message& message, + const FieldDescriptor* field, + int index) const = 0; + virtual int64 GetRepeatedInt64 (const Message& message, + const FieldDescriptor* field, + int index) const = 0; + virtual uint32 GetRepeatedUInt32(const Message& message, + const FieldDescriptor* field, + int index) const = 0; + virtual uint64 GetRepeatedUInt64(const Message& message, + const FieldDescriptor* field, + int index) const = 0; + virtual float GetRepeatedFloat (const Message& message, + const FieldDescriptor* field, + int index) const = 0; + virtual double GetRepeatedDouble(const Message& message, + const FieldDescriptor* field, + int index) const = 0; + virtual bool GetRepeatedBool (const Message& message, + const FieldDescriptor* field, + int index) const = 0; + virtual string GetRepeatedString(const Message& message, + const FieldDescriptor* field, + int index) const = 0; + virtual const EnumValueDescriptor* GetRepeatedEnum( + const Message& message, + const FieldDescriptor* field, int index) const = 0; + virtual const Message& GetRepeatedMessage( + const Message& message, + const FieldDescriptor* field, int index) const = 0; + + // See GetStringReference(), above. + virtual const string& GetRepeatedStringReference( + const Message& message, const FieldDescriptor* field, + int index, string* scratch) const = 0; + + + // Repeated field mutators ----------------------------------------- + // These mutate the value of one element of a repeated field. + + virtual void SetRepeatedInt32 (Message* message, + const FieldDescriptor* field, + int index, int32 value) const = 0; + virtual void SetRepeatedInt64 (Message* message, + const FieldDescriptor* field, + int index, int64 value) const = 0; + virtual void SetRepeatedUInt32(Message* message, + const FieldDescriptor* field, + int index, uint32 value) const = 0; + virtual void SetRepeatedUInt64(Message* message, + const FieldDescriptor* field, + int index, uint64 value) const = 0; + virtual void SetRepeatedFloat (Message* message, + const FieldDescriptor* field, + int index, float value) const = 0; + virtual void SetRepeatedDouble(Message* message, + const FieldDescriptor* field, + int index, double value) const = 0; + virtual void SetRepeatedBool (Message* message, + const FieldDescriptor* field, + int index, bool value) const = 0; + virtual void SetRepeatedString(Message* message, + const FieldDescriptor* field, + int index, const string& value) const = 0; + virtual void SetRepeatedEnum(Message* message, + const FieldDescriptor* field, int index, + const EnumValueDescriptor* value) const = 0; + // Get a mutable pointer to an element of a repeated field with a message + // type. + virtual Message* MutableRepeatedMessage( + Message* message, const FieldDescriptor* field, int index) const = 0; + + + // Repeated field adders ------------------------------------------- + // These add an element to a repeated field. + + virtual void AddInt32 (Message* message, + const FieldDescriptor* field, int32 value) const = 0; + virtual void AddInt64 (Message* message, + const FieldDescriptor* field, int64 value) const = 0; + virtual void AddUInt32(Message* message, + const FieldDescriptor* field, uint32 value) const = 0; + virtual void AddUInt64(Message* message, + const FieldDescriptor* field, uint64 value) const = 0; + virtual void AddFloat (Message* message, + const FieldDescriptor* field, float value) const = 0; + virtual void AddDouble(Message* message, + const FieldDescriptor* field, double value) const = 0; + virtual void AddBool (Message* message, + const FieldDescriptor* field, bool value) const = 0; + virtual void AddString(Message* message, + const FieldDescriptor* field, + const string& value) const = 0; + virtual void AddEnum (Message* message, + const FieldDescriptor* field, + const EnumValueDescriptor* value) const = 0; + // See MutableMessage() for comments on the "factory" parameter. + virtual Message* AddMessage(Message* message, + const FieldDescriptor* field, + MessageFactory* factory = NULL) const = 0; + + + // Extensions ------------------------------------------------------ + + // Try to find an extension of this message type by fully-qualified field + // name. Returns NULL if no extension is known for this name or number. + virtual const FieldDescriptor* FindKnownExtensionByName( + const string& name) const = 0; + + // Try to find an extension of this message type by field number. + // Returns NULL if no extension is known for this name or number. + virtual const FieldDescriptor* FindKnownExtensionByNumber( + int number) const = 0; + + private: + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(Reflection); +}; + +// Abstract interface for a factory for message objects. +class LIBPROTOBUF_EXPORT MessageFactory { + public: + inline MessageFactory() {} + virtual ~MessageFactory(); + + // Given a Descriptor, gets or constructs the default (prototype) Message + // of that type. You can then call that message's New() method to construct + // a mutable message of that type. + // + // Calling this method twice with the same Descriptor returns the same + // object. The returned object remains property of the factory. Also, any + // objects created by calling the prototype's New() method share some data + // with the prototype, so these must be destoyed before the MessageFactory + // is destroyed. + // + // The given descriptor must outlive the returned message, and hence must + // outlive the MessageFactory. + // + // Some implementations do not support all types. GetPrototype() will + // return NULL if the descriptor passed in is not supported. + // + // This method may or may not be thread-safe depending on the implementation. + // Each implementation should document its own degree thread-safety. + virtual const Message* GetPrototype(const Descriptor* type) = 0; + + // Gets a MessageFactory which supports all generated, compiled-in messages. + // In other words, for any compiled-in type FooMessage, the following is true: + // MessageFactory::generated_factory()->GetPrototype( + // FooMessage::descriptor()) == FooMessage::default_instance() + // This factory supports all types which are found in + // DescriptorPool::generated_pool(). If given a descriptor from any other + // pool, GetPrototype() will return NULL. (You can also check if a + // descriptor is for a generated message by checking if + // descriptor->file()->pool() == DescriptorPool::generated_pool().) + // + // This factory is 100% thread-safe; calling GetPrototype() does not modify + // any shared data. + // + // This factory is a singleton. The caller must not delete the object. + static MessageFactory* generated_factory(); + + // For internal use only: Registers a .proto file at static initialization + // time, to be placed in generated_factory. The first time GetPrototype() + // is called with a descriptor from this file, |register_messages| will be + // called, with the file name as the parameter. It must call + // InternalRegisterGeneratedMessage() (below) to register each message type + // in the file. This strange mechanism is necessary because descriptors are + // built lazily, so we can't register types by their descriptor until we + // know that the descriptor exists. |filename| must be a permanent string. + static void InternalRegisterGeneratedFile( + const char* filename, void (*register_messages)(const string&)); + + // For internal use only: Registers a message type. Called only by the + // functions which are registered with InternalRegisterGeneratedFile(), + // above. + static void InternalRegisterGeneratedMessage(const Descriptor* descriptor, + const Message* prototype); + + private: + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(MessageFactory); +}; + +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_MESSAGE_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/message_lite.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/message_lite.cc new file mode 100644 index 0000000000..7c8f37dc7f --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/message_lite.cc @@ -0,0 +1,334 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Authors: wink@google.com (Wink Saville), +// kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include +#include +#include +#include +#include + +namespace google { +namespace protobuf { + +MessageLite::~MessageLite() {} + +string MessageLite::InitializationErrorString() const { + return "(cannot determine missing fields for lite message)"; +} + +namespace { + +// When serializing, we first compute the byte size, then serialize the message. +// If serialization produces a different number of bytes than expected, we +// call this function, which crashes. The problem could be due to a bug in the +// protobuf implementation but is more likely caused by concurrent modification +// of the message. This function attempts to distinguish between the two and +// provide a useful error message. +void ByteSizeConsistencyError(int byte_size_before_serialization, + int byte_size_after_serialization, + int bytes_produced_by_serialization) { + GOOGLE_CHECK_EQ(byte_size_before_serialization, byte_size_after_serialization) + << "Protocol message was modified concurrently during serialization."; + GOOGLE_CHECK_EQ(bytes_produced_by_serialization, byte_size_before_serialization) + << "Byte size calculation and serialization were inconsistent. This " + "may indicate a bug in protocol buffers or it may be caused by " + "concurrent modification of the message."; + GOOGLE_LOG(FATAL) << "This shouldn't be called if all the sizes are equal."; +} + +string InitializationErrorMessage(const char* action, + const MessageLite& message) { + // Note: We want to avoid depending on strutil in the lite library, otherwise + // we'd use: + // + // return strings::Substitute( + // "Can't $0 message of type \"$1\" because it is missing required " + // "fields: $2", + // action, message.GetTypeName(), + // message.InitializationErrorString()); + + string result; + result += "Can't "; + result += action; + result += " message of type \""; + result += message.GetTypeName(); + result += "\" because it is missing required fields: "; + result += message.InitializationErrorString(); + return result; +} + +// Several of the Parse methods below just do one thing and then call another +// method. In a naive implementation, we might have ParseFromString() call +// ParseFromArray() which would call ParseFromZeroCopyStream() which would call +// ParseFromCodedStream() which would call MergeFromCodedStream() which would +// call MergePartialFromCodedStream(). However, when parsing very small +// messages, every function call introduces significant overhead. To avoid +// this without reproducing code, we use these forced-inline helpers. +// +// Note: GCC only allows GOOGLE_ATTRIBUTE_ALWAYS_INLINE on declarations, not +// definitions. +inline bool InlineMergeFromCodedStream(io::CodedInputStream* input, + MessageLite* message) + GOOGLE_ATTRIBUTE_ALWAYS_INLINE; +inline bool InlineParseFromCodedStream(io::CodedInputStream* input, + MessageLite* message) + GOOGLE_ATTRIBUTE_ALWAYS_INLINE; +inline bool InlineParsePartialFromCodedStream(io::CodedInputStream* input, + MessageLite* message) + GOOGLE_ATTRIBUTE_ALWAYS_INLINE; +inline bool InlineParseFromArray(const void* data, int size, + MessageLite* message) + GOOGLE_ATTRIBUTE_ALWAYS_INLINE; +inline bool InlineParsePartialFromArray(const void* data, int size, + MessageLite* message) + GOOGLE_ATTRIBUTE_ALWAYS_INLINE; + +bool InlineMergeFromCodedStream(io::CodedInputStream* input, + MessageLite* message) { + if (!message->MergePartialFromCodedStream(input)) return false; + if (!message->IsInitialized()) { + GOOGLE_LOG(ERROR) << InitializationErrorMessage("parse", *message); + return false; + } + return true; +} + +bool InlineParseFromCodedStream(io::CodedInputStream* input, + MessageLite* message) { + message->Clear(); + return InlineMergeFromCodedStream(input, message); +} + +bool InlineParsePartialFromCodedStream(io::CodedInputStream* input, + MessageLite* message) { + message->Clear(); + return message->MergePartialFromCodedStream(input); +} + +bool InlineParseFromArray(const void* data, int size, MessageLite* message) { + io::CodedInputStream input(reinterpret_cast(data), size); + return InlineParseFromCodedStream(&input, message) && + input.ConsumedEntireMessage(); +} + +bool InlineParsePartialFromArray(const void* data, int size, + MessageLite* message) { + io::CodedInputStream input(reinterpret_cast(data), size); + return InlineParsePartialFromCodedStream(&input, message) && + input.ConsumedEntireMessage(); +} + +} // namespace + +bool MessageLite::MergeFromCodedStream(io::CodedInputStream* input) { + return InlineMergeFromCodedStream(input, this); +} + +bool MessageLite::ParseFromCodedStream(io::CodedInputStream* input) { + return InlineParseFromCodedStream(input, this); +} + +bool MessageLite::ParsePartialFromCodedStream(io::CodedInputStream* input) { + return InlineParsePartialFromCodedStream(input, this); +} + +bool MessageLite::ParseFromZeroCopyStream(io::ZeroCopyInputStream* input) { + io::CodedInputStream decoder(input); + return ParseFromCodedStream(&decoder) && decoder.ConsumedEntireMessage(); +} + +bool MessageLite::ParsePartialFromZeroCopyStream( + io::ZeroCopyInputStream* input) { + io::CodedInputStream decoder(input); + return ParsePartialFromCodedStream(&decoder) && + decoder.ConsumedEntireMessage(); +} + +bool MessageLite::ParseFromBoundedZeroCopyStream( + io::ZeroCopyInputStream* input, int size) { + io::CodedInputStream decoder(input); + decoder.PushLimit(size); + return ParseFromCodedStream(&decoder) && + decoder.ConsumedEntireMessage() && + decoder.BytesUntilLimit() == 0; +} + +bool MessageLite::ParsePartialFromBoundedZeroCopyStream( + io::ZeroCopyInputStream* input, int size) { + io::CodedInputStream decoder(input); + decoder.PushLimit(size); + return ParsePartialFromCodedStream(&decoder) && + decoder.ConsumedEntireMessage() && + decoder.BytesUntilLimit() == 0; +} + +bool MessageLite::ParseFromString(const string& data) { + return InlineParseFromArray(data.data(), data.size(), this); +} + +bool MessageLite::ParsePartialFromString(const string& data) { + return InlineParsePartialFromArray(data.data(), data.size(), this); +} + +bool MessageLite::ParseFromArray(const void* data, int size) { + return InlineParseFromArray(data, size, this); +} + +bool MessageLite::ParsePartialFromArray(const void* data, int size) { + return InlineParsePartialFromArray(data, size, this); +} + + +// =================================================================== + +uint8* MessageLite::SerializeWithCachedSizesToArray(uint8* target) const { + // We only optimize this when using optimize_for = SPEED. In other cases + // we just use the CodedOutputStream path. + int size = GetCachedSize(); + io::ArrayOutputStream out(target, size); + io::CodedOutputStream coded_out(&out); + SerializeWithCachedSizes(&coded_out); + GOOGLE_CHECK(!coded_out.HadError()); + return target + size; +} + +bool MessageLite::SerializeToCodedStream(io::CodedOutputStream* output) const { + GOOGLE_DCHECK(IsInitialized()) << InitializationErrorMessage("serialize", *this); + return SerializePartialToCodedStream(output); +} + +bool MessageLite::SerializePartialToCodedStream( + io::CodedOutputStream* output) const { + const int size = ByteSize(); // Force size to be cached. + uint8* buffer = output->GetDirectBufferForNBytesAndAdvance(size); + if (buffer != NULL) { + uint8* end = SerializeWithCachedSizesToArray(buffer); + if (end - buffer != size) { + ByteSizeConsistencyError(size, ByteSize(), end - buffer); + } + return true; + } else { + int original_byte_count = output->ByteCount(); + SerializeWithCachedSizes(output); + if (output->HadError()) { + return false; + } + int final_byte_count = output->ByteCount(); + + if (final_byte_count - original_byte_count != size) { + ByteSizeConsistencyError(size, ByteSize(), + final_byte_count - original_byte_count); + } + + return true; + } +} + +bool MessageLite::SerializeToZeroCopyStream( + io::ZeroCopyOutputStream* output) const { + io::CodedOutputStream encoder(output); + return SerializeToCodedStream(&encoder); +} + +bool MessageLite::SerializePartialToZeroCopyStream( + io::ZeroCopyOutputStream* output) const { + io::CodedOutputStream encoder(output); + return SerializePartialToCodedStream(&encoder); +} + +bool MessageLite::AppendToString(string* output) const { + GOOGLE_DCHECK(IsInitialized()) << InitializationErrorMessage("serialize", *this); + return AppendPartialToString(output); +} + +bool MessageLite::AppendPartialToString(string* output) const { + int old_size = output->size(); + int byte_size = ByteSize(); + STLStringResizeUninitialized(output, old_size + byte_size); + uint8* start = reinterpret_cast(string_as_array(output) + old_size); + uint8* end = SerializeWithCachedSizesToArray(start); + if (end - start != byte_size) { + ByteSizeConsistencyError(byte_size, ByteSize(), end - start); + } + return true; +} + +bool MessageLite::SerializeToString(string* output) const { + output->clear(); + return AppendToString(output); +} + +bool MessageLite::SerializePartialToString(string* output) const { + output->clear(); + return AppendPartialToString(output); +} + +bool MessageLite::SerializeToArray(void* data, int size) const { + GOOGLE_DCHECK(IsInitialized()) << InitializationErrorMessage("serialize", *this); + return SerializePartialToArray(data, size); +} + +bool MessageLite::SerializePartialToArray(void* data, int size) const { + int byte_size = ByteSize(); + if (size < byte_size) return false; + uint8* start = reinterpret_cast(data); + uint8* end = SerializeWithCachedSizesToArray(start); + if (end - start != byte_size) { + ByteSizeConsistencyError(byte_size, ByteSize(), end - start); + } + return true; +} + +string MessageLite::SerializeAsString() const { + // If the compiler implements the (Named) Return Value Optimization, + // the local variable 'result' will not actually reside on the stack + // of this function, but will be overlaid with the object that the + // caller supplied for the return value to be constructed in. + string output; + if (!AppendToString(&output)) + output.clear(); + return output; +} + +string MessageLite::SerializePartialAsString() const { + string output; + if (!AppendPartialToString(&output)) + output.clear(); + return output; +} + +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/message_lite.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/message_lite.h new file mode 100644 index 0000000000..ebf4ba3c88 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/message_lite.h @@ -0,0 +1,239 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Authors: wink@google.com (Wink Saville), +// kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// Defines MessageLite, the abstract interface implemented by all (lite +// and non-lite) protocol message objects. + +#ifndef GOOGLE_PROTOBUF_MESSAGE_LITE_H__ +#define GOOGLE_PROTOBUF_MESSAGE_LITE_H__ + +#include +#include + +namespace google { +namespace protobuf { + +// Interface to light weight protocol messages. +// +// This interface is implemented by all protocol message objects. Non-lite +// messages additionally implement the Message interface, which is a +// subclass of MessageLite. Use MessageLite instead when you only need +// the subset of features which it supports -- namely, nothing that uses +// descriptors or reflection. You can instruct the protocol compiler +// to generate classes which implement only MessageLite, not the full +// Message interface, by adding the following line to the .proto file: +// +// option optimize_for = LITE_RUNTIME; +// +// This is particularly useful on resource-constrained systems where +// the full protocol buffers runtime library is too big. +// +// Note that on non-constrained systems (e.g. servers) when you need +// to link in lots of protocol definitions, a better way to reduce +// total code footprint is to use optimize_for = CODE_SIZE. This +// will make the generated code smaller while still supporting all the +// same features (at the expense of speed). optimize_for = LITE_RUNTIME +// is best when you only have a small number of message types linked +// into your binary, in which case the size of the protocol buffers +// runtime itself is the biggest problem. +class LIBPROTOBUF_EXPORT MessageLite { + public: + inline MessageLite() {} + virtual ~MessageLite(); + + // Basic Operations ------------------------------------------------ + + // Get the name of this message type, e.g. "foo.bar.BazProto". + virtual string GetTypeName() const = 0; + + // Construct a new instance of the same type. Ownership is passed to the + // caller. + virtual MessageLite* New() const = 0; + + // Clear all fields of the message and set them to their default values. + // Clear() avoids freeing memory, assuming that any memory allocated + // to hold parts of the message will be needed again to hold the next + // message. If you actually want to free the memory used by a Message, + // you must delete it. + virtual void Clear() = 0; + + // Quickly check if all required fields have values set. + virtual bool IsInitialized() const = 0; + + // This is not implemented for Lite messages -- it just returns "(cannot + // determine missing fields for lite message)". However, it is implemented + // for full messages. See message.h. + virtual string InitializationErrorString() const; + + // If |other| is the exact same class as this, calls MergeFrom(). Otherwise, + // results are undefined (probably crash). + virtual void CheckTypeAndMergeFrom(const MessageLite& other) = 0; + + // Parsing --------------------------------------------------------- + // Methods for parsing in protocol buffer format. Most of these are + // just simple wrappers around MergeFromCodedStream(). + + // Fill the message with a protocol buffer parsed from the given input + // stream. Returns false on a read error or if the input is in the + // wrong format. + bool ParseFromCodedStream(io::CodedInputStream* input); + // Like ParseFromCodedStream(), but accepts messages that are missing + // required fields. + bool ParsePartialFromCodedStream(io::CodedInputStream* input); + // Read a protocol buffer from the given zero-copy input stream. If + // successful, the entire input will be consumed. + bool ParseFromZeroCopyStream(io::ZeroCopyInputStream* input); + // Like ParseFromZeroCopyStream(), but accepts messages that are missing + // required fields. + bool ParsePartialFromZeroCopyStream(io::ZeroCopyInputStream* input); + // Read a protocol buffer from the given zero-copy input stream, expecting + // the message to be exactly "size" bytes long. If successful, exactly + // this many bytes will have been consumed from the input. + bool ParseFromBoundedZeroCopyStream(io::ZeroCopyInputStream* input, int size); + // Like ParseFromBoundedZeroCopyStream(), but accepts messages that are + // missing required fields. + bool ParsePartialFromBoundedZeroCopyStream(io::ZeroCopyInputStream* input, + int size); + // Parse a protocol buffer contained in a string. + bool ParseFromString(const string& data); + // Like ParseFromString(), but accepts messages that are missing + // required fields. + bool ParsePartialFromString(const string& data); + // Parse a protocol buffer contained in an array of bytes. + bool ParseFromArray(const void* data, int size); + // Like ParseFromArray(), but accepts messages that are missing + // required fields. + bool ParsePartialFromArray(const void* data, int size); + + + // Reads a protocol buffer from the stream and merges it into this + // Message. Singular fields read from the input overwrite what is + // already in the Message and repeated fields are appended to those + // already present. + // + // It is the responsibility of the caller to call input->LastTagWas() + // (for groups) or input->ConsumedEntireMessage() (for non-groups) after + // this returns to verify that the message's end was delimited correctly. + // + // ParsefromCodedStream() is implemented as Clear() followed by + // MergeFromCodedStream(). + bool MergeFromCodedStream(io::CodedInputStream* input); + + // Like MergeFromCodedStream(), but succeeds even if required fields are + // missing in the input. + // + // MergeFromCodedStream() is just implemented as MergePartialFromCodedStream() + // followed by IsInitialized(). + virtual bool MergePartialFromCodedStream(io::CodedInputStream* input) = 0; + + // Serialization --------------------------------------------------- + // Methods for serializing in protocol buffer format. Most of these + // are just simple wrappers around ByteSize() and SerializeWithCachedSizes(). + + // Write a protocol buffer of this message to the given output. Returns + // false on a write error. If the message is missing required fields, + // this may GOOGLE_CHECK-fail. + bool SerializeToCodedStream(io::CodedOutputStream* output) const; + // Like SerializeToCodedStream(), but allows missing required fields. + bool SerializePartialToCodedStream(io::CodedOutputStream* output) const; + // Write the message to the given zero-copy output stream. All required + // fields must be set. + bool SerializeToZeroCopyStream(io::ZeroCopyOutputStream* output) const; + // Like SerializeToZeroCopyStream(), but allows missing required fields. + bool SerializePartialToZeroCopyStream(io::ZeroCopyOutputStream* output) const; + // Serialize the message and store it in the given string. All required + // fields must be set. + bool SerializeToString(string* output) const; + // Like SerializeToString(), but allows missing required fields. + bool SerializePartialToString(string* output) const; + // Serialize the message and store it in the given byte array. All required + // fields must be set. + bool SerializeToArray(void* data, int size) const; + // Like SerializeToArray(), but allows missing required fields. + bool SerializePartialToArray(void* data, int size) const; + + // Make a string encoding the message. Is equivalent to calling + // SerializeToString() on a string and using that. Returns the empty + // string if SerializeToString() would have returned an error. + // Note: If you intend to generate many such strings, you may + // reduce heap fragmentation by instead re-using the same string + // object with calls to SerializeToString(). + string SerializeAsString() const; + // Like SerializeAsString(), but allows missing required fields. + string SerializePartialAsString() const; + + // Like SerializeToString(), but appends to the data to the string's existing + // contents. All required fields must be set. + bool AppendToString(string* output) const; + // Like AppendToString(), but allows missing required fields. + bool AppendPartialToString(string* output) const; + + // Computes the serialized size of the message. This recursively calls + // ByteSize() on all embedded messages. If a subclass does not override + // this, it MUST override SetCachedSize(). + virtual int ByteSize() const = 0; + + // Serializes the message without recomputing the size. The message must + // not have changed since the last call to ByteSize(); if it has, the results + // are undefined. + virtual void SerializeWithCachedSizes( + io::CodedOutputStream* output) const = 0; + + // Like SerializeWithCachedSizes, but writes directly to *target, returning + // a pointer to the byte immediately after the last byte written. "target" + // must point at a byte array of at least ByteSize() bytes. + virtual uint8* SerializeWithCachedSizesToArray(uint8* target) const; + + // Returns the result of the last call to ByteSize(). An embedded message's + // size is needed both to serialize it (because embedded messages are + // length-delimited) and to compute the outer message's size. Caching + // the size avoids computing it multiple times. + // + // ByteSize() does not automatically use the cached size when available + // because this would require invalidating it every time the message was + // modified, which would be too hard and expensive. (E.g. if a deeply-nested + // sub-message is changed, all of its parents' cached sizes would need to be + // invalidated, which is too much work for an otherwise inlined setter + // method.) + virtual int GetCachedSize() const = 0; + + private: + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(MessageLite); +}; + +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_MESSAGE_LITE_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/message_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/message_unittest.cc new file mode 100644 index 0000000000..33b9e77c89 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/message_unittest.cc @@ -0,0 +1,281 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include + +#include +#include +#include +#ifdef _MSC_VER +#include +#else +#include +#endif +#include +#include + +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +namespace google { +namespace protobuf { + +#ifndef O_BINARY +#ifdef _O_BINARY +#define O_BINARY _O_BINARY +#else +#define O_BINARY 0 // If this isn't defined, the platform doesn't need it. +#endif +#endif + +TEST(MessageTest, SerializeHelpers) { + // TODO(kenton): Test more helpers? They're all two-liners so it seems + // like a waste of time. + + protobuf_unittest::TestAllTypes message; + TestUtil::SetAllFields(&message); + stringstream stream; + + string str1("foo"); + string str2("bar"); + + EXPECT_TRUE(message.SerializeToString(&str1)); + EXPECT_TRUE(message.AppendToString(&str2)); + EXPECT_TRUE(message.SerializeToOstream(&stream)); + + EXPECT_EQ(str1.size() + 3, str2.size()); + EXPECT_EQ("bar", str2.substr(0, 3)); + // Don't use EXPECT_EQ because we don't want to dump raw binary data to + // stdout. + EXPECT_TRUE(str2.substr(3) == str1); + + // GCC gives some sort of error if we try to just do stream.str() == str1. + string temp = stream.str(); + EXPECT_TRUE(temp == str1); + + EXPECT_TRUE(message.SerializeAsString() == str1); + +} + +TEST(MessageTest, SerializeToBrokenOstream) { + ofstream out; + protobuf_unittest::TestAllTypes message; + message.set_optional_int32(123); + + EXPECT_FALSE(message.SerializeToOstream(&out)); +} + +TEST(MessageTest, ParseFromFileDescriptor) { + string filename = TestSourceDir() + + "/google/protobuf/testdata/golden_message"; + int file = open(filename.c_str(), O_RDONLY | O_BINARY); + + unittest::TestAllTypes message; + EXPECT_TRUE(message.ParseFromFileDescriptor(file)); + TestUtil::ExpectAllFieldsSet(message); + + EXPECT_GE(close(file), 0); +} + +TEST(MessageTest, ParsePackedFromFileDescriptor) { + string filename = + TestSourceDir() + + "/google/protobuf/testdata/golden_packed_fields_message"; + int file = open(filename.c_str(), O_RDONLY | O_BINARY); + + unittest::TestPackedTypes message; + EXPECT_TRUE(message.ParseFromFileDescriptor(file)); + TestUtil::ExpectPackedFieldsSet(message); + + EXPECT_GE(close(file), 0); +} + +TEST(MessageTest, ParseHelpers) { + // TODO(kenton): Test more helpers? They're all two-liners so it seems + // like a waste of time. + string data; + + { + // Set up. + protobuf_unittest::TestAllTypes message; + TestUtil::SetAllFields(&message); + message.SerializeToString(&data); + } + + { + // Test ParseFromString. + protobuf_unittest::TestAllTypes message; + EXPECT_TRUE(message.ParseFromString(data)); + TestUtil::ExpectAllFieldsSet(message); + } + + { + // Test ParseFromIstream. + protobuf_unittest::TestAllTypes message; + stringstream stream(data); + EXPECT_TRUE(message.ParseFromIstream(&stream)); + EXPECT_TRUE(stream.eof()); + TestUtil::ExpectAllFieldsSet(message); + } + + { + // Test ParseFromBoundedZeroCopyStream. + string data_with_junk(data); + data_with_junk.append("some junk on the end"); + io::ArrayInputStream stream(data_with_junk.data(), data_with_junk.size()); + protobuf_unittest::TestAllTypes message; + EXPECT_TRUE(message.ParseFromBoundedZeroCopyStream(&stream, data.size())); + TestUtil::ExpectAllFieldsSet(message); + } + + { + // Test that ParseFromBoundedZeroCopyStream fails (but doesn't crash) if + // EOF is reached before the expected number of bytes. + io::ArrayInputStream stream(data.data(), data.size()); + protobuf_unittest::TestAllTypes message; + EXPECT_FALSE( + message.ParseFromBoundedZeroCopyStream(&stream, data.size() + 1)); + } +} + +TEST(MessageTest, ParseFailsIfNotInitialized) { + unittest::TestRequired message; + vector errors; + + { + ScopedMemoryLog log; + EXPECT_FALSE(message.ParseFromString("")); + errors = log.GetMessages(ERROR); + } + + ASSERT_EQ(1, errors.size()); + EXPECT_EQ("Can't parse message of type \"protobuf_unittest.TestRequired\" " + "because it is missing required fields: a, b, c", + errors[0]); +} + +TEST(MessageTest, BypassInitializationCheckOnParse) { + unittest::TestRequired message; + io::ArrayInputStream raw_input(NULL, 0); + io::CodedInputStream input(&raw_input); + EXPECT_TRUE(message.MergePartialFromCodedStream(&input)); +} + +TEST(MessageTest, InitializationErrorString) { + unittest::TestRequired message; + EXPECT_EQ("a, b, c", message.InitializationErrorString()); +} + +#ifdef GTEST_HAS_DEATH_TEST // death tests do not work on Windows yet. + +TEST(MessageTest, SerializeFailsIfNotInitialized) { + unittest::TestRequired message; + string data; + EXPECT_DEBUG_DEATH(EXPECT_TRUE(message.SerializeToString(&data)), + "Can't serialize message of type \"protobuf_unittest.TestRequired\" because " + "it is missing required fields: a, b, c"); +} + +TEST(MessageTest, CheckInitialized) { + unittest::TestRequired message; + EXPECT_DEATH(message.CheckInitialized(), + "Message of type \"protobuf_unittest.TestRequired\" is missing required " + "fields: a, b, c"); +} + +#endif // GTEST_HAS_DEATH_TEST + +TEST(MessageTest, BypassInitializationCheckOnSerialize) { + unittest::TestRequired message; + io::ArrayOutputStream raw_output(NULL, 0); + io::CodedOutputStream output(&raw_output); + EXPECT_TRUE(message.SerializePartialToCodedStream(&output)); +} + +TEST(MessageTest, FindInitializationErrors) { + unittest::TestRequired message; + vector errors; + message.FindInitializationErrors(&errors); + ASSERT_EQ(3, errors.size()); + EXPECT_EQ("a", errors[0]); + EXPECT_EQ("b", errors[1]); + EXPECT_EQ("c", errors[2]); +} + +TEST(MessageTest, ParseFailsOnInvalidMessageEnd) { + unittest::TestAllTypes message; + + // Control case. + EXPECT_TRUE(message.ParseFromArray("", 0)); + + // The byte is a valid varint, but not a valid tag (zero). + EXPECT_FALSE(message.ParseFromArray("\0", 1)); + + // The byte is a malformed varint. + EXPECT_FALSE(message.ParseFromArray("\200", 1)); + + // The byte is an endgroup tag, but we aren't parsing a group. + EXPECT_FALSE(message.ParseFromArray("\014", 1)); +} + +TEST(MessageFactoryTest, GeneratedFactoryLookup) { + EXPECT_EQ( + MessageFactory::generated_factory()->GetPrototype( + protobuf_unittest::TestAllTypes::descriptor()), + &protobuf_unittest::TestAllTypes::default_instance()); +} + +TEST(MessageFactoryTest, GeneratedFactoryUnknownType) { + // Construct a new descriptor. + DescriptorPool pool; + FileDescriptorProto file; + file.set_name("foo.proto"); + file.add_message_type()->set_name("Foo"); + const Descriptor* descriptor = pool.BuildFile(file)->message_type(0); + + // Trying to construct it should return NULL. + EXPECT_TRUE( + MessageFactory::generated_factory()->GetPrototype(descriptor) == NULL); +} + +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/package_info.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/package_info.h new file mode 100644 index 0000000000..60cd3994cb --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/package_info.h @@ -0,0 +1,64 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// This file exists solely to document the google::protobuf namespace. +// It is not compiled into anything, but it may be read by an automated +// documentation generator. + +namespace google { + +// Core components of the Protocol Buffers runtime library. +// +// The files in this package represent the core of the Protocol Buffer +// system. All of them are part of the libprotobuf library. +// +// A note on thread-safety: +// +// Thread-safety in the Protocol Buffer library follows a simple rule: +// unless explicitly noted otherwise, it is always safe to use an object +// from multiple threads simultaneously as long as the object is declared +// const in all threads (or, it is only used in ways that would be allowed +// if it were declared const). However, if an object is accessed in one +// thread in a way that would not be allowed if it were const, then it is +// not safe to access that object in any other thread simultaneously. +// +// Put simply, read-only access to an object can happen in multiple threads +// simultaneously, but write access can only happen in a single thread at +// a time. +// +// The implementation does contain some "const" methods which actually modify +// the object behind the scenes -- e.g., to cache results -- but in these cases +// mutex locking is used to make the access thread-safe. +namespace protobuf {} +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/reflection_ops.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/reflection_ops.cc new file mode 100644 index 0000000000..897c0d7c8a --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/reflection_ops.cc @@ -0,0 +1,262 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include +#include +#include + +namespace google { +namespace protobuf { +namespace internal { + +void ReflectionOps::Copy(const Message& from, Message* to) { + if (&from == to) return; + Clear(to); + Merge(from, to); +} + +void ReflectionOps::Merge(const Message& from, Message* to) { + GOOGLE_CHECK_NE(&from, to); + + const Descriptor* descriptor = from.GetDescriptor(); + GOOGLE_CHECK_EQ(to->GetDescriptor(), descriptor) + << "Tried to merge messages of different types."; + + const Reflection* from_reflection = from.GetReflection(); + const Reflection* to_reflection = to->GetReflection(); + + vector fields; + from_reflection->ListFields(from, &fields); + for (int i = 0; i < fields.size(); i++) { + const FieldDescriptor* field = fields[i]; + + if (field->is_repeated()) { + int count = from_reflection->FieldSize(from, field); + for (int j = 0; j < count; j++) { + switch (field->cpp_type()) { +#define HANDLE_TYPE(CPPTYPE, METHOD) \ + case FieldDescriptor::CPPTYPE_##CPPTYPE: \ + to_reflection->Add##METHOD(to, field, \ + from_reflection->GetRepeated##METHOD(from, field, j)); \ + break; + + HANDLE_TYPE(INT32 , Int32 ); + HANDLE_TYPE(INT64 , Int64 ); + HANDLE_TYPE(UINT32, UInt32); + HANDLE_TYPE(UINT64, UInt64); + HANDLE_TYPE(FLOAT , Float ); + HANDLE_TYPE(DOUBLE, Double); + HANDLE_TYPE(BOOL , Bool ); + HANDLE_TYPE(STRING, String); + HANDLE_TYPE(ENUM , Enum ); +#undef HANDLE_TYPE + + case FieldDescriptor::CPPTYPE_MESSAGE: + to_reflection->AddMessage(to, field)->MergeFrom( + from_reflection->GetRepeatedMessage(from, field, j)); + break; + } + } + } else { + switch (field->cpp_type()) { +#define HANDLE_TYPE(CPPTYPE, METHOD) \ + case FieldDescriptor::CPPTYPE_##CPPTYPE: \ + to_reflection->Set##METHOD(to, field, \ + from_reflection->Get##METHOD(from, field)); \ + break; + + HANDLE_TYPE(INT32 , Int32 ); + HANDLE_TYPE(INT64 , Int64 ); + HANDLE_TYPE(UINT32, UInt32); + HANDLE_TYPE(UINT64, UInt64); + HANDLE_TYPE(FLOAT , Float ); + HANDLE_TYPE(DOUBLE, Double); + HANDLE_TYPE(BOOL , Bool ); + HANDLE_TYPE(STRING, String); + HANDLE_TYPE(ENUM , Enum ); +#undef HANDLE_TYPE + + case FieldDescriptor::CPPTYPE_MESSAGE: + to_reflection->MutableMessage(to, field)->MergeFrom( + from_reflection->GetMessage(from, field)); + break; + } + } + } + + to_reflection->MutableUnknownFields(to)->MergeFrom( + from_reflection->GetUnknownFields(from)); +} + +void ReflectionOps::Clear(Message* message) { + const Reflection* reflection = message->GetReflection(); + + vector fields; + reflection->ListFields(*message, &fields); + for (int i = 0; i < fields.size(); i++) { + reflection->ClearField(message, fields[i]); + } + + reflection->MutableUnknownFields(message)->Clear(); +} + +bool ReflectionOps::IsInitialized(const Message& message) { + const Descriptor* descriptor = message.GetDescriptor(); + const Reflection* reflection = message.GetReflection(); + + // Check required fields of this message. + for (int i = 0; i < descriptor->field_count(); i++) { + if (descriptor->field(i)->is_required()) { + if (!reflection->HasField(message, descriptor->field(i))) { + return false; + } + } + } + + // Check that sub-messages are initialized. + vector fields; + reflection->ListFields(message, &fields); + for (int i = 0; i < fields.size(); i++) { + const FieldDescriptor* field = fields[i]; + if (field->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) { + if (field->is_repeated()) { + int size = reflection->FieldSize(message, field); + + for (int i = 0; i < size; i++) { + if (!reflection->GetRepeatedMessage(message, field, i) + .IsInitialized()) { + return false; + } + } + } else { + if (!reflection->GetMessage(message, field).IsInitialized()) { + return false; + } + } + } + } + + return true; +} + +void ReflectionOps::DiscardUnknownFields(Message* message) { + const Reflection* reflection = message->GetReflection(); + + reflection->MutableUnknownFields(message)->Clear(); + + vector fields; + reflection->ListFields(*message, &fields); + for (int i = 0; i < fields.size(); i++) { + const FieldDescriptor* field = fields[i]; + if (field->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) { + if (field->is_repeated()) { + int size = reflection->FieldSize(*message, field); + for (int i = 0; i < size; i++) { + reflection->MutableRepeatedMessage(message, field, i) + ->DiscardUnknownFields(); + } + } else { + reflection->MutableMessage(message, field)->DiscardUnknownFields(); + } + } + } +} + +static string SubMessagePrefix(const string& prefix, + const FieldDescriptor* field, + int index) { + string result(prefix); + if (field->is_extension()) { + result.append("("); + result.append(field->full_name()); + result.append(")"); + } else { + result.append(field->name()); + } + if (index != -1) { + result.append("["); + result.append(SimpleItoa(index)); + result.append("]"); + } + result.append("."); + return result; +} + +void ReflectionOps::FindInitializationErrors( + const Message& message, + const string& prefix, + vector* errors) { + const Descriptor* descriptor = message.GetDescriptor(); + const Reflection* reflection = message.GetReflection(); + + // Check required fields of this message. + for (int i = 0; i < descriptor->field_count(); i++) { + if (descriptor->field(i)->is_required()) { + if (!reflection->HasField(message, descriptor->field(i))) { + errors->push_back(prefix + descriptor->field(i)->name()); + } + } + } + + // Check sub-messages. + vector fields; + reflection->ListFields(message, &fields); + for (int i = 0; i < fields.size(); i++) { + const FieldDescriptor* field = fields[i]; + if (field->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) { + + if (field->is_repeated()) { + int size = reflection->FieldSize(message, field); + + for (int i = 0; i < size; i++) { + const Message& sub_message = + reflection->GetRepeatedMessage(message, field, i); + FindInitializationErrors(sub_message, + SubMessagePrefix(prefix, field, i), + errors); + } + } else { + const Message& sub_message = reflection->GetMessage(message, field); + FindInitializationErrors(sub_message, + SubMessagePrefix(prefix, field, -1), + errors); + } + } + } +} + +} // namespace internal +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/reflection_ops.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/reflection_ops.h new file mode 100644 index 0000000000..355a0a5d26 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/reflection_ops.h @@ -0,0 +1,80 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// This header is logically internal, but is made public because it is used +// from protocol-compiler-generated code, which may reside in other components. + +#ifndef GOOGLE_PROTOBUF_REFLECTION_OPS_H__ +#define GOOGLE_PROTOBUF_REFLECTION_OPS_H__ + +#include + +namespace google { +namespace protobuf { +namespace internal { + +// Basic operations that can be performed using reflection. +// These can be used as a cheap way to implement the corresponding +// methods of the Message interface, though they are likely to be +// slower than implementations tailored for the specific message type. +// +// This class should stay limited to operations needed to implement +// the Message interface. +// +// This class is really a namespace that contains only static methods. +class LIBPROTOBUF_EXPORT ReflectionOps { + public: + static void Copy(const Message& from, Message* to); + static void Merge(const Message& from, Message* to); + static void Clear(Message* message); + static bool IsInitialized(const Message& message); + static void DiscardUnknownFields(Message* message); + + // Finds all unset required fields in the message and adds their full + // paths (e.g. "foo.bar[5].baz") to *names. "prefix" will be attached to + // the front of each name. + static void FindInitializationErrors(const Message& message, + const string& prefix, + vector* errors); + + private: + // All methods are static. No need to construct. + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(ReflectionOps); +}; + +} // namespace internal +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_REFLECTION_OPS_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/reflection_ops_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/reflection_ops_unittest.cc new file mode 100644 index 0000000000..1cd56f1e64 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/reflection_ops_unittest.cc @@ -0,0 +1,405 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include +#include +#include + +#include +#include +#include +#include + +namespace google { +namespace protobuf { +namespace internal { +namespace { + +TEST(ReflectionOpsTest, SanityCheck) { + unittest::TestAllTypes message; + + TestUtil::SetAllFields(&message); + TestUtil::ExpectAllFieldsSet(message); +} + +TEST(ReflectionOpsTest, Copy) { + unittest::TestAllTypes message, message2; + + TestUtil::SetAllFields(&message); + + ReflectionOps::Copy(message, &message2); + + TestUtil::ExpectAllFieldsSet(message2); + + // Copying from self should be a no-op. + ReflectionOps::Copy(message2, &message2); + TestUtil::ExpectAllFieldsSet(message2); +} + +TEST(ReflectionOpsTest, CopyExtensions) { + unittest::TestAllExtensions message, message2; + + TestUtil::SetAllExtensions(&message); + + ReflectionOps::Copy(message, &message2); + + TestUtil::ExpectAllExtensionsSet(message2); +} + +TEST(ReflectionOpsTest, Merge) { + // Note: Copy is implemented in terms of Merge() so technically the Copy + // test already tested most of this. + + unittest::TestAllTypes message, message2; + + TestUtil::SetAllFields(&message); + + // This field will test merging into an empty spot. + message2.set_optional_int32(message.optional_int32()); + message.clear_optional_int32(); + + // This tests overwriting. + message2.set_optional_string(message.optional_string()); + message.set_optional_string("something else"); + + // This tests concatenating. + message2.add_repeated_int32(message.repeated_int32(1)); + int32 i = message.repeated_int32(0); + message.clear_repeated_int32(); + message.add_repeated_int32(i); + + ReflectionOps::Merge(message2, &message); + + TestUtil::ExpectAllFieldsSet(message); +} + +TEST(ReflectionOpsTest, MergeExtensions) { + // Note: Copy is implemented in terms of Merge() so technically the Copy + // test already tested most of this. + + unittest::TestAllExtensions message, message2; + + TestUtil::SetAllExtensions(&message); + + // This field will test merging into an empty spot. + message2.SetExtension(unittest::optional_int32_extension, + message.GetExtension(unittest::optional_int32_extension)); + message.ClearExtension(unittest::optional_int32_extension); + + // This tests overwriting. + message2.SetExtension(unittest::optional_string_extension, + message.GetExtension(unittest::optional_string_extension)); + message.SetExtension(unittest::optional_string_extension, "something else"); + + // This tests concatenating. + message2.AddExtension(unittest::repeated_int32_extension, + message.GetExtension(unittest::repeated_int32_extension, 1)); + int32 i = message.GetExtension(unittest::repeated_int32_extension, 0); + message.ClearExtension(unittest::repeated_int32_extension); + message.AddExtension(unittest::repeated_int32_extension, i); + + ReflectionOps::Merge(message2, &message); + + TestUtil::ExpectAllExtensionsSet(message); +} + +TEST(ReflectionOpsTest, MergeUnknown) { + // Test that the messages' UnknownFieldSets are correctly merged. + unittest::TestEmptyMessage message1, message2; + message1.mutable_unknown_fields()->AddVarint(1234, 1); + message2.mutable_unknown_fields()->AddVarint(1234, 2); + + ReflectionOps::Merge(message2, &message1); + + ASSERT_EQ(2, message1.unknown_fields().field_count()); + ASSERT_EQ(UnknownField::TYPE_VARINT, + message1.unknown_fields().field(0).type()); + EXPECT_EQ(1, message1.unknown_fields().field(0).varint()); + ASSERT_EQ(UnknownField::TYPE_VARINT, + message1.unknown_fields().field(1).type()); + EXPECT_EQ(2, message1.unknown_fields().field(1).varint()); +} + +#ifdef GTEST_HAS_DEATH_TEST + +TEST(ReflectionOpsTest, MergeFromSelf) { + // Note: Copy is implemented in terms of Merge() so technically the Copy + // test already tested most of this. + + unittest::TestAllTypes message; + + EXPECT_DEATH( + ReflectionOps::Merge(message, &message), + "&from"); +} + +#endif // GTEST_HAS_DEATH_TEST + +TEST(ReflectionOpsTest, Clear) { + unittest::TestAllTypes message; + + TestUtil::SetAllFields(&message); + + ReflectionOps::Clear(&message); + + TestUtil::ExpectClear(message); + + // Check that getting embedded messages returns the objects created during + // SetAllFields() rather than default instances. + EXPECT_NE(&unittest::TestAllTypes::OptionalGroup::default_instance(), + &message.optionalgroup()); + EXPECT_NE(&unittest::TestAllTypes::NestedMessage::default_instance(), + &message.optional_nested_message()); + EXPECT_NE(&unittest::ForeignMessage::default_instance(), + &message.optional_foreign_message()); + EXPECT_NE(&unittest_import::ImportMessage::default_instance(), + &message.optional_import_message()); +} + +TEST(ReflectionOpsTest, ClearExtensions) { + unittest::TestAllExtensions message; + + TestUtil::SetAllExtensions(&message); + + ReflectionOps::Clear(&message); + + TestUtil::ExpectExtensionsClear(message); + + // Check that getting embedded messages returns the objects created during + // SetAllExtensions() rather than default instances. + EXPECT_NE(&unittest::OptionalGroup_extension::default_instance(), + &message.GetExtension(unittest::optionalgroup_extension)); + EXPECT_NE(&unittest::TestAllTypes::NestedMessage::default_instance(), + &message.GetExtension(unittest::optional_nested_message_extension)); + EXPECT_NE(&unittest::ForeignMessage::default_instance(), + &message.GetExtension( + unittest::optional_foreign_message_extension)); + EXPECT_NE(&unittest_import::ImportMessage::default_instance(), + &message.GetExtension(unittest::optional_import_message_extension)); +} + +TEST(ReflectionOpsTest, ClearUnknown) { + // Test that the message's UnknownFieldSet is correctly cleared. + unittest::TestEmptyMessage message; + message.mutable_unknown_fields()->AddVarint(1234, 1); + + ReflectionOps::Clear(&message); + + EXPECT_EQ(0, message.unknown_fields().field_count()); +} + +TEST(ReflectionOpsTest, DiscardUnknownFields) { + unittest::TestAllTypes message; + TestUtil::SetAllFields(&message); + + // Set some unknown fields in message. + message.mutable_unknown_fields() + ->AddVarint(123456, 654321); + message.mutable_optional_nested_message() + ->mutable_unknown_fields() + ->AddVarint(123456, 654321); + message.mutable_repeated_nested_message(0) + ->mutable_unknown_fields() + ->AddVarint(123456, 654321); + + EXPECT_EQ(1, message.unknown_fields().field_count()); + EXPECT_EQ(1, message.optional_nested_message() + .unknown_fields().field_count()); + EXPECT_EQ(1, message.repeated_nested_message(0) + .unknown_fields().field_count()); + + // Discard them. + ReflectionOps::DiscardUnknownFields(&message); + TestUtil::ExpectAllFieldsSet(message); + + EXPECT_EQ(0, message.unknown_fields().field_count()); + EXPECT_EQ(0, message.optional_nested_message() + .unknown_fields().field_count()); + EXPECT_EQ(0, message.repeated_nested_message(0) + .unknown_fields().field_count()); +} + +TEST(ReflectionOpsTest, DiscardUnknownExtensions) { + unittest::TestAllExtensions message; + TestUtil::SetAllExtensions(&message); + + // Set some unknown fields. + message.mutable_unknown_fields() + ->AddVarint(123456, 654321); + message.MutableExtension(unittest::optional_nested_message_extension) + ->mutable_unknown_fields() + ->AddVarint(123456, 654321); + message.MutableExtension(unittest::repeated_nested_message_extension, 0) + ->mutable_unknown_fields() + ->AddVarint(123456, 654321); + + EXPECT_EQ(1, message.unknown_fields().field_count()); + EXPECT_EQ(1, + message.GetExtension(unittest::optional_nested_message_extension) + .unknown_fields().field_count()); + EXPECT_EQ(1, + message.GetExtension(unittest::repeated_nested_message_extension, 0) + .unknown_fields().field_count()); + + // Discard them. + ReflectionOps::DiscardUnknownFields(&message); + TestUtil::ExpectAllExtensionsSet(message); + + EXPECT_EQ(0, message.unknown_fields().field_count()); + EXPECT_EQ(0, + message.GetExtension(unittest::optional_nested_message_extension) + .unknown_fields().field_count()); + EXPECT_EQ(0, + message.GetExtension(unittest::repeated_nested_message_extension, 0) + .unknown_fields().field_count()); +} + +TEST(ReflectionOpsTest, IsInitialized) { + unittest::TestRequired message; + + EXPECT_FALSE(ReflectionOps::IsInitialized(message)); + message.set_a(1); + EXPECT_FALSE(ReflectionOps::IsInitialized(message)); + message.set_b(2); + EXPECT_FALSE(ReflectionOps::IsInitialized(message)); + message.set_c(3); + EXPECT_TRUE(ReflectionOps::IsInitialized(message)); +} + +TEST(ReflectionOpsTest, ForeignIsInitialized) { + unittest::TestRequiredForeign message; + + // Starts out initialized because the foreign message is itself an optional + // field. + EXPECT_TRUE(ReflectionOps::IsInitialized(message)); + + // Once we create that field, the message is no longer initialized. + message.mutable_optional_message(); + EXPECT_FALSE(ReflectionOps::IsInitialized(message)); + + // Initialize it. Now we're initialized. + message.mutable_optional_message()->set_a(1); + message.mutable_optional_message()->set_b(2); + message.mutable_optional_message()->set_c(3); + EXPECT_TRUE(ReflectionOps::IsInitialized(message)); + + // Add a repeated version of the message. No longer initialized. + unittest::TestRequired* sub_message = message.add_repeated_message(); + EXPECT_FALSE(ReflectionOps::IsInitialized(message)); + + // Initialize that repeated version. + sub_message->set_a(1); + sub_message->set_b(2); + sub_message->set_c(3); + EXPECT_TRUE(ReflectionOps::IsInitialized(message)); +} + +TEST(ReflectionOpsTest, ExtensionIsInitialized) { + unittest::TestAllExtensions message; + + // Starts out initialized because the foreign message is itself an optional + // field. + EXPECT_TRUE(ReflectionOps::IsInitialized(message)); + + // Once we create that field, the message is no longer initialized. + message.MutableExtension(unittest::TestRequired::single); + EXPECT_FALSE(ReflectionOps::IsInitialized(message)); + + // Initialize it. Now we're initialized. + message.MutableExtension(unittest::TestRequired::single)->set_a(1); + message.MutableExtension(unittest::TestRequired::single)->set_b(2); + message.MutableExtension(unittest::TestRequired::single)->set_c(3); + EXPECT_TRUE(ReflectionOps::IsInitialized(message)); + + // Add a repeated version of the message. No longer initialized. + message.AddExtension(unittest::TestRequired::multi); + EXPECT_FALSE(ReflectionOps::IsInitialized(message)); + + // Initialize that repeated version. + message.MutableExtension(unittest::TestRequired::multi, 0)->set_a(1); + message.MutableExtension(unittest::TestRequired::multi, 0)->set_b(2); + message.MutableExtension(unittest::TestRequired::multi, 0)->set_c(3); + EXPECT_TRUE(ReflectionOps::IsInitialized(message)); +} + +static string FindInitializationErrors(const Message& message) { + vector errors; + ReflectionOps::FindInitializationErrors(message, "", &errors); + return JoinStrings(errors, ","); +} + +TEST(ReflectionOpsTest, FindInitializationErrors) { + unittest::TestRequired message; + EXPECT_EQ("a,b,c", FindInitializationErrors(message)); +} + +TEST(ReflectionOpsTest, FindForeignInitializationErrors) { + unittest::TestRequiredForeign message; + message.mutable_optional_message(); + message.add_repeated_message(); + message.add_repeated_message(); + EXPECT_EQ("optional_message.a," + "optional_message.b," + "optional_message.c," + "repeated_message[0].a," + "repeated_message[0].b," + "repeated_message[0].c," + "repeated_message[1].a," + "repeated_message[1].b," + "repeated_message[1].c", + FindInitializationErrors(message)); +} + +TEST(ReflectionOpsTest, FindExtensionInitializationErrors) { + unittest::TestAllExtensions message; + message.MutableExtension(unittest::TestRequired::single); + message.AddExtension(unittest::TestRequired::multi); + message.AddExtension(unittest::TestRequired::multi); + EXPECT_EQ("(protobuf_unittest.TestRequired.single).a," + "(protobuf_unittest.TestRequired.single).b," + "(protobuf_unittest.TestRequired.single).c," + "(protobuf_unittest.TestRequired.multi)[0].a," + "(protobuf_unittest.TestRequired.multi)[0].b," + "(protobuf_unittest.TestRequired.multi)[0].c," + "(protobuf_unittest.TestRequired.multi)[1].a," + "(protobuf_unittest.TestRequired.multi)[1].b," + "(protobuf_unittest.TestRequired.multi)[1].c", + FindInitializationErrors(message)); +} + +} // namespace +} // namespace internal +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/repeated_field.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/repeated_field.cc new file mode 100644 index 0000000000..09377742af --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/repeated_field.cc @@ -0,0 +1,98 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include + +#include +#include + +namespace google { +namespace protobuf { + +namespace internal { + +void RepeatedPtrFieldBase::Reserve(int new_size) { + if (total_size_ >= new_size) return; + + void** old_elements = elements_; + total_size_ = max(total_size_ * 2, new_size); + elements_ = new void*[total_size_]; + memcpy(elements_, old_elements, allocated_size_ * sizeof(elements_[0])); + if (old_elements != initial_space_) { + delete [] old_elements; + } +} + +void RepeatedPtrFieldBase::Swap(RepeatedPtrFieldBase* other) { + void** swap_elements = elements_; + int swap_current_size = current_size_; + int swap_allocated_size = allocated_size_; + int swap_total_size = total_size_; + // We may not be using initial_space_ but it's not worth checking. Just + // copy it anyway. + void* swap_initial_space[kInitialSize]; + memcpy(swap_initial_space, initial_space_, sizeof(initial_space_)); + + elements_ = other->elements_; + current_size_ = other->current_size_; + allocated_size_ = other->allocated_size_; + total_size_ = other->total_size_; + memcpy(initial_space_, other->initial_space_, sizeof(initial_space_)); + + other->elements_ = swap_elements; + other->current_size_ = swap_current_size; + other->allocated_size_ = swap_allocated_size; + other->total_size_ = swap_total_size; + memcpy(other->initial_space_, swap_initial_space, sizeof(swap_initial_space)); + + if (elements_ == other->initial_space_) { + elements_ = initial_space_; + } + if (other->elements_ == initial_space_) { + other->elements_ = other->initial_space_; + } +} + +string* StringTypeHandlerBase::New() { + return new string; +} +void StringTypeHandlerBase::Delete(string* value) { + delete value; +} + +} // namespace internal + + +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/repeated_field.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/repeated_field.h new file mode 100644 index 0000000000..aed4ce9f25 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/repeated_field.h @@ -0,0 +1,1295 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// RepeatedField and RepeatedPtrField are used by generated protocol message +// classes to manipulate repeated fields. These classes are very similar to +// STL's vector, but include a number of optimizations found to be useful +// specifically in the case of Protocol Buffers. RepeatedPtrField is +// particularly different from STL vector as it manages ownership of the +// pointers that it contains. +// +// Typically, clients should not need to access RepeatedField objects directly, +// but should instead use the accessor functions generated automatically by the +// protocol compiler. + +#ifndef GOOGLE_PROTOBUF_REPEATED_FIELD_H__ +#define GOOGLE_PROTOBUF_REPEATED_FIELD_H__ + +#include +#include +#include +#include + +namespace google { + +namespace protobuf { + +class Message; + +namespace internal { + +// We need this (from generated_message_reflection.cc). +LIBPROTOBUF_EXPORT int StringSpaceUsedExcludingSelf(const string& str); + +} // namespace internal + +// RepeatedField is used to represent repeated fields of a primitive type (in +// other words, everything except strings and nested Messages). Most users will +// not ever use a RepeatedField directly; they will use the get-by-index, +// set-by-index, and add accessors that are generated for all repeated fields. +template +class RepeatedField { + public: + RepeatedField(); + RepeatedField(const RepeatedField& other); + ~RepeatedField(); + + RepeatedField& operator=(const RepeatedField& other); + + int size() const; + + const Element& Get(int index) const; + Element* Mutable(int index); + void Set(int index, const Element& value); + void Add(const Element& value); + Element* Add(); + // Remove the last element in the array. + // We don't provide a way to remove any element other than the last + // because it invites inefficient use, such as O(n^2) filtering loops + // that should have been O(n). If you want to remove an element other + // than the last, the best way to do it is to re-arrange the elements + // so that the one you want removed is at the end, then call RemoveLast(). + void RemoveLast(); + void Clear(); + void MergeFrom(const RepeatedField& other); + void CopyFrom(const RepeatedField& other); + + // Reserve space to expand the field to at least the given size. If the + // array is grown, it will always be at least doubled in size. + void Reserve(int new_size); + + // Resize the RepeatedField to a new, smaller size. This is O(1). + void Truncate(int new_size); + + void AddAlreadyReserved(const Element& value); + Element* AddAlreadyReserved(); + int Capacity() const; + + // Gets the underlying array. This pointer is possibly invalidated by + // any add or remove operation. + Element* mutable_data(); + const Element* data() const; + + // Swap entire contents with "other". + void Swap(RepeatedField* other); + + // Swap two elements. + void SwapElements(int index1, int index2); + + // STL-like iterator support + typedef Element* iterator; + typedef const Element* const_iterator; + typedef Element value_type; + + iterator begin(); + const_iterator begin() const; + iterator end(); + const_iterator end() const; + + // Returns the number of bytes used by the repeated field, excluding + // sizeof(*this) + int SpaceUsedExcludingSelf() const; + + private: + static const int kInitialSize = 4; + + Element* elements_; + int current_size_; + int total_size_; + + Element initial_space_[kInitialSize]; + + // Move the contents of |from| into |to|, possibly clobbering |from| in the + // process. For primitive types this is just a memcpy(), but it could be + // specialized for non-primitive types to, say, swap each element instead. + void MoveArray(Element to[], Element from[], int size); + + // Copy the elements of |from| into |to|. + void CopyArray(Element to[], const Element from[], int size); +}; + +namespace internal { +template class RepeatedPtrIterator; +template class RepeatedPtrOverPtrsIterator; +} // namespace internal + +namespace internal { + +// This is the common base class for RepeatedPtrFields. It deals only in void* +// pointers. Users should not use this interface directly. +// +// The methods of this interface correspond to the methods of RepeatedPtrField, +// but may have a template argument called TypeHandler. Its signature is: +// class TypeHandler { +// public: +// typedef MyType Type; +// static Type* New(); +// static void Delete(Type*); +// static void Clear(Type*); +// static void Merge(const Type& from, Type* to); +// +// // Only needs to be implemented if SpaceUsedExcludingSelf() is called. +// static int SpaceUsed(const Type&); +// }; +class LIBPROTOBUF_EXPORT RepeatedPtrFieldBase { + protected: + // The reflection implementation needs to call protected methods directly, + // reinterpreting pointers as being to Message instead of a specific Message + // subclass. + friend class GeneratedMessageReflection; + + // ExtensionSet stores repeated message extensions as + // RepeatedPtrField, but non-lite ExtensionSets need to + // implement SpaceUsed(), and thus need to call SpaceUsedExcludingSelf() + // reinterpreting MessageLite as Message. ExtensionSet also needs to make + // use of AddFromCleared(), which is not part of the public interface. + friend class ExtensionSet; + + RepeatedPtrFieldBase(); + + // Must be called from destructor. + template + void Destroy(); + + int size() const; + + template + const typename TypeHandler::Type& Get(int index) const; + template + typename TypeHandler::Type* Mutable(int index); + template + typename TypeHandler::Type* Add(); + template + void RemoveLast(); + template + void Clear(); + template + void MergeFrom(const RepeatedPtrFieldBase& other); + template + void CopyFrom(const RepeatedPtrFieldBase& other); + + void Reserve(int new_size); + + int Capacity() const; + + // Used for constructing iterators. + void* const* raw_data() const; + void** raw_mutable_data() const; + + template + typename TypeHandler::Type** mutable_data(); + template + const typename TypeHandler::Type* const* data() const; + + void Swap(RepeatedPtrFieldBase* other); + + void SwapElements(int index1, int index2); + + template + int SpaceUsedExcludingSelf() const; + + + // Advanced memory management -------------------------------------- + + // Like Add(), but if there are no cleared objects to use, returns NULL. + template + typename TypeHandler::Type* AddFromCleared(); + + template + void AddAllocated(typename TypeHandler::Type* value); + template + typename TypeHandler::Type* ReleaseLast(); + + int ClearedCount() const; + template + void AddCleared(typename TypeHandler::Type* value); + template + typename TypeHandler::Type* ReleaseCleared(); + + private: + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(RepeatedPtrFieldBase); + + static const int kInitialSize = 4; + + void** elements_; + int current_size_; + int allocated_size_; + int total_size_; + + void* initial_space_[kInitialSize]; + + template + static inline typename TypeHandler::Type* cast(void* element) { + return reinterpret_cast(element); + } + template + static inline const typename TypeHandler::Type* cast(const void* element) { + return reinterpret_cast(element); + } +}; + +template +class GenericTypeHandler { + public: + typedef GenericType Type; + static GenericType* New() { return new GenericType; } + static void Delete(GenericType* value) { delete value; } + static void Clear(GenericType* value) { value->Clear(); } + static void Merge(const GenericType& from, GenericType* to) { + to->MergeFrom(from); + } + static int SpaceUsed(const GenericType& value) { return value.SpaceUsed(); } +}; + +template <> +inline void GenericTypeHandler::Merge( + const MessageLite& from, MessageLite* to) { + to->CheckTypeAndMergeFrom(from); +} + +// HACK: If a class is declared as DLL-exported in MSVC, it insists on +// generating copies of all its methods -- even inline ones -- to include +// in the DLL. But SpaceUsed() calls StringSpaceUsedExcludingSelf() which +// isn't in the lite library, therefore the lite library cannot link if +// StringTypeHandler is exported. So, we factor out StringTypeHandlerBase, +// export that, then make StringTypeHandler be a subclass which is NOT +// exported. +// TODO(kenton): There has to be a better way. +class LIBPROTOBUF_EXPORT StringTypeHandlerBase { + public: + typedef string Type; + static string* New(); + static void Delete(string* value); + static void Clear(string* value) { value->clear(); } + static void Merge(const string& from, string* to) { *to = from; } +}; + +class StringTypeHandler : public StringTypeHandlerBase { + public: + static int SpaceUsed(const string& value) { + return sizeof(value) + StringSpaceUsedExcludingSelf(value); + } +}; + + +} // namespace internal + +// RepeatedPtrField is like RepeatedField, but used for repeated strings or +// Messages. +template +class RepeatedPtrField : public internal::RepeatedPtrFieldBase { + public: + RepeatedPtrField(); + RepeatedPtrField(const RepeatedPtrField& other); + ~RepeatedPtrField(); + + RepeatedPtrField& operator=(const RepeatedPtrField& other); + + int size() const; + + const Element& Get(int index) const; + Element* Mutable(int index); + Element* Add(); + void RemoveLast(); // Remove the last element in the array. + void Clear(); + void MergeFrom(const RepeatedPtrField& other); + void CopyFrom(const RepeatedPtrField& other); + + // Reserve space to expand the field to at least the given size. This only + // resizes the pointer array; it doesn't allocate any objects. If the + // array is grown, it will always be at least doubled in size. + void Reserve(int new_size); + + int Capacity() const; + + // Gets the underlying array. This pointer is possibly invalidated by + // any add or remove operation. + Element** mutable_data(); + const Element* const* data() const; + + // Swap entire contents with "other". + void Swap(RepeatedPtrField* other); + + // Swap two elements. + void SwapElements(int index1, int index2); + + // STL-like iterator support + typedef internal::RepeatedPtrIterator iterator; + typedef internal::RepeatedPtrIterator const_iterator; + typedef Element value_type; + + iterator begin(); + const_iterator begin() const; + iterator end(); + const_iterator end() const; + + // Custom STL-like iterator that iterates over and returns the underlying + // pointers to Element rather than Element itself. + typedef internal::RepeatedPtrOverPtrsIterator pointer_iterator; + pointer_iterator pointer_begin(); + pointer_iterator pointer_end(); + + // Returns (an estimate of) the number of bytes used by the repeated field, + // excluding sizeof(*this). + int SpaceUsedExcludingSelf() const; + + // Advanced memory management -------------------------------------- + // When hardcore memory management becomes necessary -- as it often + // does here at Google -- the following methods may be useful. + + // Add an already-allocated object, passing ownership to the + // RepeatedPtrField. + void AddAllocated(Element* value); + // Remove the last element and return it, passing ownership to the + // caller. + // Requires: size() > 0 + Element* ReleaseLast(); + + // When elements are removed by calls to RemoveLast() or Clear(), they + // are not actually freed. Instead, they are cleared and kept so that + // they can be reused later. This can save lots of CPU time when + // repeatedly reusing a protocol message for similar purposes. + // + // Really, extremely hardcore programs may actually want to manipulate + // these objects to better-optimize memory management. These methods + // allow that. + + // Get the number of cleared objects that are currently being kept + // around for reuse. + int ClearedCount() const; + // Add an element to the pool of cleared objects, passing ownership to + // the RepeatedPtrField. The element must be cleared prior to calling + // this method. + void AddCleared(Element* value); + // Remove a single element from the cleared pool and return it, passing + // ownership to the caller. The element is guaranteed to be cleared. + // Requires: ClearedCount() > 0 + Element* ReleaseCleared(); + + protected: + // Note: RepeatedPtrField SHOULD NOT be subclassed by users. We only + // subclass it in one place as a hack for compatibility with proto1. The + // subclass needs to know about TypeHandler in order to call protected + // methods on RepeatedPtrFieldBase. + class TypeHandler; + +}; + +// implementation ==================================================== + +template +inline RepeatedField::RepeatedField() + : elements_(initial_space_), + current_size_(0), + total_size_(kInitialSize) { +} + +template +inline RepeatedField::RepeatedField(const RepeatedField& other) + : elements_(initial_space_), + current_size_(0), + total_size_(kInitialSize) { + CopyFrom(other); +} + +template +RepeatedField::~RepeatedField() { + if (elements_ != initial_space_) { + delete [] elements_; + } +} + +template +inline RepeatedField& +RepeatedField::operator=(const RepeatedField& other) { + CopyFrom(other); + return *this; +} + +template +inline int RepeatedField::size() const { + return current_size_; +} + +template +inline int RepeatedField::Capacity() const { + return total_size_; +} + +template +inline void RepeatedField::AddAlreadyReserved(const Element& value) { + GOOGLE_DCHECK_LT(size(), Capacity()); + elements_[current_size_++] = value; +} + +template +inline Element* RepeatedField::AddAlreadyReserved() { + GOOGLE_DCHECK_LT(size(), Capacity()); + return &elements_[current_size_++]; +} + +template +inline const Element& RepeatedField::Get(int index) const { + GOOGLE_DCHECK_LT(index, size()); + return elements_[index]; +} + +template +inline Element* RepeatedField::Mutable(int index) { + GOOGLE_DCHECK_LT(index, size()); + return elements_ + index; +} + +template +inline void RepeatedField::Set(int index, const Element& value) { + GOOGLE_DCHECK_LT(index, size()); + elements_[index] = value; +} + +template +inline void RepeatedField::Add(const Element& value) { + if (current_size_ == total_size_) Reserve(total_size_ + 1); + elements_[current_size_++] = value; +} + +template +inline Element* RepeatedField::Add() { + if (current_size_ == total_size_) Reserve(total_size_ + 1); + return &elements_[current_size_++]; +} + +template +inline void RepeatedField::RemoveLast() { + GOOGLE_DCHECK_GT(current_size_, 0); + --current_size_; +} + +template +inline void RepeatedField::Clear() { + current_size_ = 0; +} + +template +inline void RepeatedField::MergeFrom(const RepeatedField& other) { + Reserve(current_size_ + other.current_size_); + CopyArray(elements_ + current_size_, other.elements_, other.current_size_); + current_size_ += other.current_size_; +} + +template +inline void RepeatedField::CopyFrom(const RepeatedField& other) { + Clear(); + MergeFrom(other); +} + +template +inline Element* RepeatedField::mutable_data() { + return elements_; +} + +template +inline const Element* RepeatedField::data() const { + return elements_; +} + + +template +void RepeatedField::Swap(RepeatedField* other) { + Element* swap_elements = elements_; + int swap_current_size = current_size_; + int swap_total_size = total_size_; + // We may not be using initial_space_ but it's not worth checking. Just + // copy it anyway. + Element swap_initial_space[kInitialSize]; + MoveArray(swap_initial_space, initial_space_, kInitialSize); + + elements_ = other->elements_; + current_size_ = other->current_size_; + total_size_ = other->total_size_; + MoveArray(initial_space_, other->initial_space_, kInitialSize); + + other->elements_ = swap_elements; + other->current_size_ = swap_current_size; + other->total_size_ = swap_total_size; + MoveArray(other->initial_space_, swap_initial_space, kInitialSize); + + if (elements_ == other->initial_space_) { + elements_ = initial_space_; + } + if (other->elements_ == initial_space_) { + other->elements_ = other->initial_space_; + } +} + +template +void RepeatedField::SwapElements(int index1, int index2) { + std::swap(elements_[index1], elements_[index2]); +} + +template +inline typename RepeatedField::iterator +RepeatedField::begin() { + return elements_; +} +template +inline typename RepeatedField::const_iterator +RepeatedField::begin() const { + return elements_; +} +template +inline typename RepeatedField::iterator +RepeatedField::end() { + return elements_ + current_size_; +} +template +inline typename RepeatedField::const_iterator +RepeatedField::end() const { + return elements_ + current_size_; +} + +template +inline int RepeatedField::SpaceUsedExcludingSelf() const { + return (elements_ != initial_space_) ? total_size_ * sizeof(elements_[0]) : 0; +} + +// Avoid inlining of Reserve(): new, memcpy, and delete[] lead to a significant +// amount of code bloat. +template +void RepeatedField::Reserve(int new_size) { + if (total_size_ >= new_size) return; + + Element* old_elements = elements_; + total_size_ = max(total_size_ * 2, new_size); + elements_ = new Element[total_size_]; + MoveArray(elements_, old_elements, current_size_); + if (old_elements != initial_space_) { + delete [] old_elements; + } +} + +template +inline void RepeatedField::Truncate(int new_size) { + GOOGLE_DCHECK_LE(new_size, current_size_); + current_size_ = new_size; +} + +template +inline void RepeatedField::MoveArray( + Element to[], Element from[], int array_size) { + memcpy(to, from, array_size * sizeof(Element)); +} + +template +inline void RepeatedField::CopyArray( + Element to[], const Element from[], int array_size) { + memcpy(to, from, array_size * sizeof(Element)); +} + + +// ------------------------------------------------------------------- + +namespace internal { + +inline RepeatedPtrFieldBase::RepeatedPtrFieldBase() + : elements_(initial_space_), + current_size_(0), + allocated_size_(0), + total_size_(kInitialSize) { +} + +template +void RepeatedPtrFieldBase::Destroy() { + for (int i = 0; i < allocated_size_; i++) { + TypeHandler::Delete(cast(elements_[i])); + } + if (elements_ != initial_space_) { + delete [] elements_; + } +} + +inline int RepeatedPtrFieldBase::size() const { + return current_size_; +} + + +template +inline const typename TypeHandler::Type& +RepeatedPtrFieldBase::Get(int index) const { + GOOGLE_DCHECK_LT(index, size()); + return *cast(elements_[index]); +} + +template +inline typename TypeHandler::Type* +RepeatedPtrFieldBase::Mutable(int index) { + GOOGLE_DCHECK_LT(index, size()); + return cast(elements_[index]); +} + +template +inline typename TypeHandler::Type* RepeatedPtrFieldBase::Add() { + if (current_size_ < allocated_size_) { + return cast(elements_[current_size_++]); + } + if (allocated_size_ == total_size_) Reserve(total_size_ + 1); + ++allocated_size_; + typename TypeHandler::Type* result = TypeHandler::New(); + elements_[current_size_++] = result; + return result; +} + +template +inline void RepeatedPtrFieldBase::RemoveLast() { + GOOGLE_DCHECK_GT(current_size_, 0); + TypeHandler::Clear(cast(elements_[--current_size_])); +} + +template +void RepeatedPtrFieldBase::Clear() { + for (int i = 0; i < current_size_; i++) { + TypeHandler::Clear(cast(elements_[i])); + } + current_size_ = 0; +} + +template +inline void RepeatedPtrFieldBase::MergeFrom(const RepeatedPtrFieldBase& other) { + Reserve(current_size_ + other.current_size_); + for (int i = 0; i < other.current_size_; i++) { + TypeHandler::Merge(other.template Get(i), Add()); + } +} + +template +inline void RepeatedPtrFieldBase::CopyFrom(const RepeatedPtrFieldBase& other) { + RepeatedPtrFieldBase::Clear(); + RepeatedPtrFieldBase::MergeFrom(other); +} + +inline int RepeatedPtrFieldBase::Capacity() const { + return total_size_; +} + +inline void* const* RepeatedPtrFieldBase::raw_data() const { + return elements_; +} + +inline void** RepeatedPtrFieldBase::raw_mutable_data() const { + return elements_; +} + +template +inline typename TypeHandler::Type** RepeatedPtrFieldBase::mutable_data() { + // TODO(kenton): Breaks C++ aliasing rules. We should probably remove this + // method entirely. + return reinterpret_cast(elements_); +} + +template +inline const typename TypeHandler::Type* const* +RepeatedPtrFieldBase::data() const { + // TODO(kenton): Breaks C++ aliasing rules. We should probably remove this + // method entirely. + return reinterpret_cast(elements_); +} + +inline void RepeatedPtrFieldBase::SwapElements(int index1, int index2) { + std::swap(elements_[index1], elements_[index2]); +} + +template +inline int RepeatedPtrFieldBase::SpaceUsedExcludingSelf() const { + int allocated_bytes = + (elements_ != initial_space_) ? total_size_ * sizeof(elements_[0]) : 0; + for (int i = 0; i < allocated_size_; ++i) { + allocated_bytes += TypeHandler::SpaceUsed(*cast(elements_[i])); + } + return allocated_bytes; +} + +template +inline typename TypeHandler::Type* RepeatedPtrFieldBase::AddFromCleared() { + if (current_size_ < allocated_size_) { + return cast(elements_[current_size_++]); + } else { + return NULL; + } +} + +template +void RepeatedPtrFieldBase::AddAllocated( + typename TypeHandler::Type* value) { + // Make room for the new pointer. + if (current_size_ == total_size_) { + // The array is completely full with no cleared objects, so grow it. + Reserve(total_size_ + 1); + ++allocated_size_; + } else if (allocated_size_ == total_size_) { + // There is no more space in the pointer array because it contains some + // cleared objects awaiting reuse. We don't want to grow the array in this + // case because otherwise a loop calling AddAllocated() followed by Clear() + // would leak memory. + TypeHandler::Delete(cast(elements_[current_size_])); + } else if (current_size_ < allocated_size_) { + // We have some cleared objects. We don't care about their order, so we + // can just move the first one to the end to make space. + elements_[allocated_size_] = elements_[current_size_]; + ++allocated_size_; + } else { + // There are no cleared objects. + ++allocated_size_; + } + + elements_[current_size_++] = value; +} + +template +inline typename TypeHandler::Type* RepeatedPtrFieldBase::ReleaseLast() { + GOOGLE_DCHECK_GT(current_size_, 0); + typename TypeHandler::Type* result = + cast(elements_[--current_size_]); + --allocated_size_; + if (current_size_ < allocated_size_) { + // There are cleared elements on the end; replace the removed element + // with the last allocated element. + elements_[current_size_] = elements_[allocated_size_]; + } + return result; +} + + +inline int RepeatedPtrFieldBase::ClearedCount() const { + return allocated_size_ - current_size_; +} + +template +inline void RepeatedPtrFieldBase::AddCleared( + typename TypeHandler::Type* value) { + if (allocated_size_ == total_size_) Reserve(total_size_ + 1); + elements_[allocated_size_++] = value; +} + +template +inline typename TypeHandler::Type* RepeatedPtrFieldBase::ReleaseCleared() { + GOOGLE_DCHECK_GT(allocated_size_, current_size_); + return cast(elements_[--allocated_size_]); +} + +} // namespace internal + +// ------------------------------------------------------------------- + +template +class RepeatedPtrField::TypeHandler + : public internal::GenericTypeHandler {}; + +template <> +class RepeatedPtrField::TypeHandler + : public internal::StringTypeHandler {}; + + +template +inline RepeatedPtrField::RepeatedPtrField() {} + +template +inline RepeatedPtrField::RepeatedPtrField( + const RepeatedPtrField& other) { + CopyFrom(other); +} + +template +RepeatedPtrField::~RepeatedPtrField() { + Destroy(); +} + +template +inline RepeatedPtrField& RepeatedPtrField::operator=( + const RepeatedPtrField& other) { + CopyFrom(other); + return *this; +} + +template +inline int RepeatedPtrField::size() const { + return RepeatedPtrFieldBase::size(); +} + +template +inline const Element& RepeatedPtrField::Get(int index) const { + return RepeatedPtrFieldBase::Get(index); +} + +template +inline Element* RepeatedPtrField::Mutable(int index) { + return RepeatedPtrFieldBase::Mutable(index); +} + +template +inline Element* RepeatedPtrField::Add() { + return RepeatedPtrFieldBase::Add(); +} + +template +inline void RepeatedPtrField::RemoveLast() { + RepeatedPtrFieldBase::RemoveLast(); +} + +template +inline void RepeatedPtrField::Clear() { + RepeatedPtrFieldBase::Clear(); +} + +template +inline void RepeatedPtrField::MergeFrom( + const RepeatedPtrField& other) { + RepeatedPtrFieldBase::MergeFrom(other); +} + +template +inline void RepeatedPtrField::CopyFrom( + const RepeatedPtrField& other) { + RepeatedPtrFieldBase::CopyFrom(other); +} + +template +inline Element** RepeatedPtrField::mutable_data() { + return RepeatedPtrFieldBase::mutable_data(); +} + +template +inline const Element* const* RepeatedPtrField::data() const { + return RepeatedPtrFieldBase::data(); +} + +template +void RepeatedPtrField::Swap(RepeatedPtrField* other) { + RepeatedPtrFieldBase::Swap(other); +} + +template +void RepeatedPtrField::SwapElements(int index1, int index2) { + RepeatedPtrFieldBase::SwapElements(index1, index2); +} + +template +inline int RepeatedPtrField::SpaceUsedExcludingSelf() const { + return RepeatedPtrFieldBase::SpaceUsedExcludingSelf(); +} + +template +inline void RepeatedPtrField::AddAllocated(Element* value) { + RepeatedPtrFieldBase::AddAllocated(value); +} + +template +inline Element* RepeatedPtrField::ReleaseLast() { + return RepeatedPtrFieldBase::ReleaseLast(); +} + + +template +inline int RepeatedPtrField::ClearedCount() const { + return RepeatedPtrFieldBase::ClearedCount(); +} + +template +inline void RepeatedPtrField::AddCleared(Element* value) { + return RepeatedPtrFieldBase::AddCleared(value); +} + +template +inline Element* RepeatedPtrField::ReleaseCleared() { + return RepeatedPtrFieldBase::ReleaseCleared(); +} + +template +inline void RepeatedPtrField::Reserve(int new_size) { + return RepeatedPtrFieldBase::Reserve(new_size); +} + +template +inline int RepeatedPtrField::Capacity() const { + return RepeatedPtrFieldBase::Capacity(); +} + +// ------------------------------------------------------------------- + +namespace internal { + +// STL-like iterator implementation for RepeatedPtrField. You should not +// refer to this class directly; use RepeatedPtrField::iterator instead. +// +// The iterator for RepeatedPtrField, RepeatedPtrIterator, is +// very similar to iterator_ptr in util/gtl/iterator_adaptors-inl.h, +// but adds random-access operators and is modified to wrap a void** base +// iterator (since RepeatedPtrField stores its array as a void* array and +// casting void** to T** would violate C++ aliasing rules). +// +// This code based on net/proto/proto-array-internal.h by Jeffrey Yasskin +// (jyasskin@google.com). +template +class RepeatedPtrIterator + : public std::iterator< + std::random_access_iterator_tag, Element> { + public: + typedef RepeatedPtrIterator iterator; + typedef std::iterator< + std::random_access_iterator_tag, Element> superclass; + + // Let the compiler know that these are type names, so we don't have to + // write "typename" in front of them everywhere. + typedef typename superclass::reference reference; + typedef typename superclass::pointer pointer; + typedef typename superclass::difference_type difference_type; + + RepeatedPtrIterator() : it_(NULL) {} + explicit RepeatedPtrIterator(void* const* it) : it_(it) {} + + // Allow "upcasting" from RepeatedPtrIterator to + // RepeatedPtrIterator. + template + RepeatedPtrIterator(const RepeatedPtrIterator& other) + : it_(other.it_) { + // Force a compiler error if the other type is not convertible to ours. + if (false) { + implicit_cast(0); + } + } + + // dereferenceable + reference operator*() const { return *reinterpret_cast(*it_); } + pointer operator->() const { return &(operator*()); } + + // {inc,dec}rementable + iterator& operator++() { ++it_; return *this; } + iterator operator++(int) { return iterator(it_++); } + iterator& operator--() { --it_; return *this; } + iterator operator--(int) { return iterator(it_--); } + + // equality_comparable + bool operator==(const iterator& x) const { return it_ == x.it_; } + bool operator!=(const iterator& x) const { return it_ != x.it_; } + + // less_than_comparable + bool operator<(const iterator& x) const { return it_ < x.it_; } + bool operator<=(const iterator& x) const { return it_ <= x.it_; } + bool operator>(const iterator& x) const { return it_ > x.it_; } + bool operator>=(const iterator& x) const { return it_ >= x.it_; } + + // addable, subtractable + iterator& operator+=(difference_type d) { + it_ += d; + return *this; + } + friend iterator operator+(iterator it, difference_type d) { + it += d; + return it; + } + friend iterator operator+(difference_type d, iterator it) { + it += d; + return it; + } + iterator& operator-=(difference_type d) { + it_ -= d; + return *this; + } + friend iterator operator-(iterator it, difference_type d) { + it -= d; + return it; + } + + // indexable + reference operator[](difference_type d) const { return *(*this + d); } + + // random access iterator + difference_type operator-(const iterator& x) const { return it_ - x.it_; } + + private: + template + friend class RepeatedPtrIterator; + + // The internal iterator. + void* const* it_; +}; + +// Provide an iterator that operates on pointers to the underlying objects +// rather than the objects themselves as RepeatedPtrIterator does. +// Consider using this when working with stl algorithms that change +// the array. +template +class RepeatedPtrOverPtrsIterator + : public std::iterator { + public: + typedef RepeatedPtrOverPtrsIterator iterator; + typedef std::iterator< + std::random_access_iterator_tag, Element*> superclass; + + // Let the compiler know that these are type names, so we don't have to + // write "typename" in front of them everywhere. + typedef typename superclass::reference reference; + typedef typename superclass::pointer pointer; + typedef typename superclass::difference_type difference_type; + + RepeatedPtrOverPtrsIterator() : it_(NULL) {} + explicit RepeatedPtrOverPtrsIterator(void** it) : it_(it) {} + + // dereferenceable + reference operator*() const { return *reinterpret_cast(it_); } + pointer operator->() const { return &(operator*()); } + + // {inc,dec}rementable + iterator& operator++() { ++it_; return *this; } + iterator operator++(int) { return iterator(it_++); } + iterator& operator--() { --it_; return *this; } + iterator operator--(int) { return iterator(it_--); } + + // equality_comparable + bool operator==(const iterator& x) const { return it_ == x.it_; } + bool operator!=(const iterator& x) const { return it_ != x.it_; } + + // less_than_comparable + bool operator<(const iterator& x) const { return it_ < x.it_; } + bool operator<=(const iterator& x) const { return it_ <= x.it_; } + bool operator>(const iterator& x) const { return it_ > x.it_; } + bool operator>=(const iterator& x) const { return it_ >= x.it_; } + + // addable, subtractable + iterator& operator+=(difference_type d) { + it_ += d; + return *this; + } + friend iterator operator+(iterator it, difference_type d) { + it += d; + return it; + } + friend iterator operator+(difference_type d, iterator it) { + it += d; + return it; + } + iterator& operator-=(difference_type d) { + it_ -= d; + return *this; + } + friend iterator operator-(iterator it, difference_type d) { + it -= d; + return it; + } + + // indexable + reference operator[](difference_type d) const { return *(*this + d); } + + // random access iterator + difference_type operator-(const iterator& x) const { return it_ - x.it_; } + + private: + template + friend class RepeatedPtrIterator; + + // The internal iterator. + void** it_; +}; + + +} // namespace internal + +template +inline typename RepeatedPtrField::iterator +RepeatedPtrField::begin() { + return iterator(raw_data()); +} +template +inline typename RepeatedPtrField::const_iterator +RepeatedPtrField::begin() const { + return iterator(raw_data()); +} +template +inline typename RepeatedPtrField::iterator +RepeatedPtrField::end() { + return iterator(raw_data() + size()); +} +template +inline typename RepeatedPtrField::const_iterator +RepeatedPtrField::end() const { + return iterator(raw_data() + size()); +} + +template +inline typename RepeatedPtrField::pointer_iterator +RepeatedPtrField::pointer_begin() { + return pointer_iterator(raw_mutable_data()); +} +template +inline typename RepeatedPtrField::pointer_iterator +RepeatedPtrField::pointer_end() { + return pointer_iterator(raw_mutable_data() + size()); +} + + +// Iterators and helper functions that follow the spirit of the STL +// std::back_insert_iterator and std::back_inserter but are tailor-made +// for RepeatedField and RepatedPtrField. Typical usage would be: +// +// std::copy(some_sequence.begin(), some_sequence.end(), +// google::protobuf::RepeatedFieldBackInserter(proto.mutable_sequence())); +// +// Ported by johannes from util/gtl/proto-array-iterators-inl.h + +namespace internal { +// A back inserter for RepeatedField objects. +template class RepeatedFieldBackInsertIterator + : public std::iterator { + public: + explicit RepeatedFieldBackInsertIterator( + RepeatedField* const mutable_field) + : field_(mutable_field) { + } + RepeatedFieldBackInsertIterator& operator=(const T& value) { + field_->Add(value); + return *this; + } + RepeatedFieldBackInsertIterator& operator*() { + return *this; + } + RepeatedFieldBackInsertIterator& operator++() { + return *this; + } + RepeatedFieldBackInsertIterator& operator++(int ignores_parameter) { + return *this; + } + + private: + RepeatedField* field_; +}; + +// A back inserter for RepeatedPtrField objects. +template class RepeatedPtrFieldBackInsertIterator + : public std::iterator { + public: + RepeatedPtrFieldBackInsertIterator( + RepeatedPtrField* const mutable_field) + : field_(mutable_field) { + } + RepeatedPtrFieldBackInsertIterator& operator=(const T& value) { + *field_->Add() = value; + return *this; + } + RepeatedPtrFieldBackInsertIterator& operator=( + const T* const ptr_to_value) { + *field_->Add() = *ptr_to_value; + return *this; + } + RepeatedPtrFieldBackInsertIterator& operator*() { + return *this; + } + RepeatedPtrFieldBackInsertIterator& operator++() { + return *this; + } + RepeatedPtrFieldBackInsertIterator& operator++(int ignores_parameter) { + return *this; + } + + private: + RepeatedPtrField* field_; +}; + +// A back inserter for RepeatedPtrFields that inserts by transfering ownership +// of a pointer. +template class AllocatedRepeatedPtrFieldBackInsertIterator + : public std::iterator { + public: + explicit AllocatedRepeatedPtrFieldBackInsertIterator( + RepeatedPtrField* const mutable_field) + : field_(mutable_field) { + } + AllocatedRepeatedPtrFieldBackInsertIterator& operator=( + T* const ptr_to_value) { + field_->AddAllocated(ptr_to_value); + return *this; + } + AllocatedRepeatedPtrFieldBackInsertIterator& operator*() { + return *this; + } + AllocatedRepeatedPtrFieldBackInsertIterator& operator++() { + return *this; + } + AllocatedRepeatedPtrFieldBackInsertIterator& operator++( + int ignores_parameter) { + return *this; + } + + private: + RepeatedPtrField* field_; +}; +} // namespace internal + +// Provides a back insert iterator for RepeatedField instances, +// similar to std::back_inserter(). Note the identically named +// function for RepeatedPtrField instances. +template internal::RepeatedFieldBackInsertIterator +RepeatedFieldBackInserter(RepeatedField* const mutable_field) { + return internal::RepeatedFieldBackInsertIterator(mutable_field); +} + +// Provides a back insert iterator for RepeatedPtrField instances, +// similar to std::back_inserter(). Note the identically named +// function for RepeatedField instances. +template internal::RepeatedPtrFieldBackInsertIterator +RepeatedFieldBackInserter(RepeatedPtrField* const mutable_field) { + return internal::RepeatedPtrFieldBackInsertIterator(mutable_field); +} + +// Provides a back insert iterator for RepeatedPtrField instances +// similar to std::back_inserter() which transfers the ownership while +// copying elements. +template internal::AllocatedRepeatedPtrFieldBackInsertIterator +AllocatedRepeatedPtrFieldBackInserter( + RepeatedPtrField* const mutable_field) { + return internal::AllocatedRepeatedPtrFieldBackInsertIterator( + mutable_field); +} + +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_REPEATED_FIELD_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/repeated_field_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/repeated_field_unittest.cc new file mode 100644 index 0000000000..e7e1e99b84 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/repeated_field_unittest.cc @@ -0,0 +1,1086 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// TODO(kenton): Improve this unittest to bring it up to the standards of +// other proto2 unittests. + +#include +#include +#include + +#include + +#include +#include +#include +#include +#include +#include + +namespace google { +using protobuf_unittest::TestAllTypes; + +namespace protobuf { +namespace { + +// Test operations on a RepeatedField which is small enough that it does +// not allocate a separate array for storage. +TEST(RepeatedField, Small) { + RepeatedField field; + + EXPECT_EQ(field.size(), 0); + + field.Add(5); + + EXPECT_EQ(field.size(), 1); + EXPECT_EQ(field.Get(0), 5); + + field.Add(42); + + EXPECT_EQ(field.size(), 2); + EXPECT_EQ(field.Get(0), 5); + EXPECT_EQ(field.Get(1), 42); + + field.Set(1, 23); + + EXPECT_EQ(field.size(), 2); + EXPECT_EQ(field.Get(0), 5); + EXPECT_EQ(field.Get(1), 23); + EXPECT_EQ(field.SpaceUsedExcludingSelf(), 0); + + field.RemoveLast(); + + EXPECT_EQ(field.size(), 1); + EXPECT_EQ(field.Get(0), 5); + + field.Clear(); + + EXPECT_EQ(field.size(), 0); + EXPECT_EQ(field.SpaceUsedExcludingSelf(), 0); +} + +// Test operations on a RepeatedField which is large enough to allocate a +// separate array. +TEST(RepeatedField, Large) { + RepeatedField field; + + for (int i = 0; i < 16; i++) { + field.Add(i * i); + } + + EXPECT_EQ(field.size(), 16); + + for (int i = 0; i < 16; i++) { + EXPECT_EQ(field.Get(i), i * i); + } + + int expected_usage = 16 * sizeof(int); + EXPECT_GE(field.SpaceUsedExcludingSelf(), expected_usage); +} + +// Test swapping between various types of RepeatedFields. +TEST(RepeatedField, SwapSmallSmall) { + RepeatedField field1; + RepeatedField field2; + + field1.Add(5); + field1.Add(42); + + field1.Swap(&field2); + + EXPECT_EQ(field1.size(), 0); + EXPECT_EQ(field2.size(), 2); + EXPECT_EQ(field2.Get(0), 5); + EXPECT_EQ(field2.Get(1), 42); +} + +TEST(RepeatedField, SwapLargeSmall) { + RepeatedField field1; + RepeatedField field2; + + for (int i = 0; i < 16; i++) { + field1.Add(i * i); + } + field2.Add(5); + field2.Add(42); + field1.Swap(&field2); + + EXPECT_EQ(field1.size(), 2); + EXPECT_EQ(field1.Get(0), 5); + EXPECT_EQ(field1.Get(1), 42); + EXPECT_EQ(field2.size(), 16); + for (int i = 0; i < 16; i++) { + EXPECT_EQ(field2.Get(i), i * i); + } +} + +TEST(RepeatedField, SwapLargeLarge) { + RepeatedField field1; + RepeatedField field2; + + field1.Add(5); + field1.Add(42); + for (int i = 0; i < 16; i++) { + field1.Add(i); + field2.Add(i * i); + } + field2.Swap(&field1); + + EXPECT_EQ(field1.size(), 16); + for (int i = 0; i < 16; i++) { + EXPECT_EQ(field1.Get(i), i * i); + } + EXPECT_EQ(field2.size(), 18); + EXPECT_EQ(field2.Get(0), 5); + EXPECT_EQ(field2.Get(1), 42); + for (int i = 2; i < 18; i++) { + EXPECT_EQ(field2.Get(i), i - 2); + } +} + +// Determines how much space was reserved by the given field by adding elements +// to it until it re-allocates its space. +static int ReservedSpace(RepeatedField* field) { + const int* ptr = field->data(); + do { + field->Add(0); + } while (field->data() == ptr); + + return field->size() - 1; +} + +TEST(RepeatedField, ReserveMoreThanDouble) { + // Reserve more than double the previous space in the field and expect the + // field to reserve exactly the amount specified. + RepeatedField field; + field.Reserve(20); + + EXPECT_EQ(20, ReservedSpace(&field)); +} + +TEST(RepeatedField, ReserveLessThanDouble) { + // Reserve less than double the previous space in the field and expect the + // field to grow by double instead. + RepeatedField field; + field.Reserve(20); + field.Reserve(30); + + EXPECT_EQ(40, ReservedSpace(&field)); +} + +TEST(RepeatedField, ReserveLessThanExisting) { + // Reserve less than the previous space in the field and expect the + // field to not re-allocate at all. + RepeatedField field; + field.Reserve(20); + const int* previous_ptr = field.data(); + field.Reserve(10); + + EXPECT_EQ(previous_ptr, field.data()); + EXPECT_EQ(20, ReservedSpace(&field)); +} + +TEST(RepeatedField, MergeFrom) { + RepeatedField source, destination; + + source.Add(4); + source.Add(5); + + destination.Add(1); + destination.Add(2); + destination.Add(3); + + destination.MergeFrom(source); + + ASSERT_EQ(5, destination.size()); + + EXPECT_EQ(1, destination.Get(0)); + EXPECT_EQ(2, destination.Get(1)); + EXPECT_EQ(3, destination.Get(2)); + EXPECT_EQ(4, destination.Get(3)); + EXPECT_EQ(5, destination.Get(4)); +} + +TEST(RepeatedField, CopyFrom) { + RepeatedField source, destination; + + source.Add(4); + source.Add(5); + + destination.Add(1); + destination.Add(2); + destination.Add(3); + + destination.CopyFrom(source); + + ASSERT_EQ(2, destination.size()); + + EXPECT_EQ(4, destination.Get(0)); + EXPECT_EQ(5, destination.Get(1)); +} + +TEST(RepeatedField, CopyConstruct) { + RepeatedField source; + source.Add(1); + source.Add(2); + + RepeatedField destination(source); + + ASSERT_EQ(2, destination.size()); + EXPECT_EQ(1, destination.Get(0)); + EXPECT_EQ(2, destination.Get(1)); +} + +TEST(RepeatedField, CopyAssign) { + RepeatedField source, destination; + + source.Add(4); + source.Add(5); + + destination.Add(1); + destination.Add(2); + destination.Add(3); + + destination = source; + + ASSERT_EQ(2, destination.size()); + + EXPECT_EQ(4, destination.Get(0)); + EXPECT_EQ(5, destination.Get(1)); +} + +TEST(RepeatedField, MutableDataIsMutable) { + RepeatedField field; + field.Add(1); + EXPECT_EQ(1, field.Get(0)); + // The fact that this line compiles would be enough, but we'll check the + // value anyway. + *field.mutable_data() = 2; + EXPECT_EQ(2, field.Get(0)); +} + +TEST(RepeatedField, Truncate) { + RepeatedField field; + + field.Add(12); + field.Add(34); + field.Add(56); + field.Add(78); + EXPECT_EQ(4, field.size()); + + field.Truncate(3); + EXPECT_EQ(3, field.size()); + + field.Add(90); + EXPECT_EQ(4, field.size()); + EXPECT_EQ(90, field.Get(3)); + + // Truncations that don't change the size are allowed, but growing is not + // allowed. + field.Truncate(field.size()); +#ifdef GTEST_HAS_DEATH_TEST + EXPECT_DEBUG_DEATH(field.Truncate(field.size() + 1), "new_size"); +#endif +} + + +// =================================================================== +// RepeatedPtrField tests. These pretty much just mirror the RepeatedField +// tests above. + +TEST(RepeatedPtrField, Small) { + RepeatedPtrField field; + + EXPECT_EQ(field.size(), 0); + + field.Add()->assign("foo"); + + EXPECT_EQ(field.size(), 1); + EXPECT_EQ(field.Get(0), "foo"); + + field.Add()->assign("bar"); + + EXPECT_EQ(field.size(), 2); + EXPECT_EQ(field.Get(0), "foo"); + EXPECT_EQ(field.Get(1), "bar"); + + field.Mutable(1)->assign("baz"); + + EXPECT_EQ(field.size(), 2); + EXPECT_EQ(field.Get(0), "foo"); + EXPECT_EQ(field.Get(1), "baz"); + + field.RemoveLast(); + + EXPECT_EQ(field.size(), 1); + EXPECT_EQ(field.Get(0), "foo"); + + field.Clear(); + + EXPECT_EQ(field.size(), 0); +} + +TEST(RepeatedPtrField, Large) { + RepeatedPtrField field; + + for (int i = 0; i < 16; i++) { + *field.Add() += 'a' + i; + } + + EXPECT_EQ(field.size(), 16); + + for (int i = 0; i < 16; i++) { + EXPECT_EQ(field.Get(i).size(), 1); + EXPECT_EQ(field.Get(i)[0], 'a' + i); + } + + int min_expected_usage = 16 * sizeof(string); + EXPECT_GE(field.SpaceUsedExcludingSelf(), min_expected_usage); +} + +TEST(RepeatedPtrField, SwapSmallSmall) { + RepeatedPtrField field1; + RepeatedPtrField field2; + + field1.Add()->assign("foo"); + field1.Add()->assign("bar"); + field1.Swap(&field2); + + EXPECT_EQ(field1.size(), 0); + EXPECT_EQ(field2.size(), 2); + EXPECT_EQ(field2.Get(0), "foo"); + EXPECT_EQ(field2.Get(1), "bar"); +} + +TEST(RepeatedPtrField, SwapLargeSmall) { + RepeatedPtrField field1; + RepeatedPtrField field2; + + field2.Add()->assign("foo"); + field2.Add()->assign("bar"); + for (int i = 0; i < 16; i++) { + *field1.Add() += 'a' + i; + } + field1.Swap(&field2); + + EXPECT_EQ(field1.size(), 2); + EXPECT_EQ(field1.Get(0), "foo"); + EXPECT_EQ(field1.Get(1), "bar"); + EXPECT_EQ(field2.size(), 16); + for (int i = 0; i < 16; i++) { + EXPECT_EQ(field2.Get(i).size(), 1); + EXPECT_EQ(field2.Get(i)[0], 'a' + i); + } +} + +TEST(RepeatedPtrField, SwapLargeLarge) { + RepeatedPtrField field1; + RepeatedPtrField field2; + + field1.Add()->assign("foo"); + field1.Add()->assign("bar"); + for (int i = 0; i < 16; i++) { + *field1.Add() += 'A' + i; + *field2.Add() += 'a' + i; + } + field2.Swap(&field1); + + EXPECT_EQ(field1.size(), 16); + for (int i = 0; i < 16; i++) { + EXPECT_EQ(field1.Get(i).size(), 1); + EXPECT_EQ(field1.Get(i)[0], 'a' + i); + } + EXPECT_EQ(field2.size(), 18); + EXPECT_EQ(field2.Get(0), "foo"); + EXPECT_EQ(field2.Get(1), "bar"); + for (int i = 2; i < 18; i++) { + EXPECT_EQ(field2.Get(i).size(), 1); + EXPECT_EQ(field2.Get(i)[0], 'A' + i - 2); + } +} + +static int ReservedSpace(RepeatedPtrField* field) { + const string* const* ptr = field->data(); + do { + field->Add(); + } while (field->data() == ptr); + + return field->size() - 1; +} + +TEST(RepeatedPtrField, ReserveMoreThanDouble) { + RepeatedPtrField field; + field.Reserve(20); + + EXPECT_EQ(20, ReservedSpace(&field)); +} + +TEST(RepeatedPtrField, ReserveLessThanDouble) { + RepeatedPtrField field; + field.Reserve(20); + field.Reserve(30); + + EXPECT_EQ(40, ReservedSpace(&field)); +} + +TEST(RepeatedPtrField, ReserveLessThanExisting) { + RepeatedPtrField field; + field.Reserve(20); + const string* const* previous_ptr = field.data(); + field.Reserve(10); + + EXPECT_EQ(previous_ptr, field.data()); + EXPECT_EQ(20, ReservedSpace(&field)); +} + +TEST(RepeatedPtrField, ReserveDoesntLoseAllocated) { + // Check that a bug is fixed: An earlier implementation of Reserve() + // failed to copy pointers to allocated-but-cleared objects, possibly + // leading to segfaults. + RepeatedPtrField field; + string* first = field.Add(); + field.RemoveLast(); + + field.Reserve(20); + EXPECT_EQ(first, field.Add()); +} + +// Clearing elements is tricky with RepeatedPtrFields since the memory for +// the elements is retained and reused. +TEST(RepeatedPtrField, ClearedElements) { + RepeatedPtrField field; + + string* original = field.Add(); + *original = "foo"; + + EXPECT_EQ(field.ClearedCount(), 0); + + field.RemoveLast(); + EXPECT_TRUE(original->empty()); + EXPECT_EQ(field.ClearedCount(), 1); + + EXPECT_EQ(field.Add(), original); // Should return same string for reuse. + + EXPECT_EQ(field.ReleaseLast(), original); // We take ownership. + EXPECT_EQ(field.ClearedCount(), 0); + + EXPECT_NE(field.Add(), original); // Should NOT return the same string. + EXPECT_EQ(field.ClearedCount(), 0); + + field.AddAllocated(original); // Give ownership back. + EXPECT_EQ(field.ClearedCount(), 0); + EXPECT_EQ(field.Mutable(1), original); + + field.Clear(); + EXPECT_EQ(field.ClearedCount(), 2); + EXPECT_EQ(field.ReleaseCleared(), original); // Take ownership again. + EXPECT_EQ(field.ClearedCount(), 1); + EXPECT_NE(field.Add(), original); + EXPECT_EQ(field.ClearedCount(), 0); + EXPECT_NE(field.Add(), original); + EXPECT_EQ(field.ClearedCount(), 0); + + field.AddCleared(original); // Give ownership back, but as a cleared object. + EXPECT_EQ(field.ClearedCount(), 1); + EXPECT_EQ(field.Add(), original); + EXPECT_EQ(field.ClearedCount(), 0); +} + +// Test all code paths in AddAllocated(). +TEST(RepeatedPtrField, AddAlocated) { + RepeatedPtrField field; + while (field.size() < field.Capacity()) { + field.Add()->assign("filler"); + } + + int index = field.size(); + + // First branch: Field is at capacity with no cleared objects. + string* foo = new string("foo"); + field.AddAllocated(foo); + EXPECT_EQ(index + 1, field.size()); + EXPECT_EQ(0, field.ClearedCount()); + EXPECT_EQ(foo, &field.Get(index)); + + // Last branch: Field is not at capacity and there are no cleared objects. + string* bar = new string("bar"); + field.AddAllocated(bar); + ++index; + EXPECT_EQ(index + 1, field.size()); + EXPECT_EQ(0, field.ClearedCount()); + EXPECT_EQ(bar, &field.Get(index)); + + // Third branch: Field is not at capacity and there are no cleared objects. + field.RemoveLast(); + string* baz = new string("baz"); + field.AddAllocated(baz); + EXPECT_EQ(index + 1, field.size()); + EXPECT_EQ(1, field.ClearedCount()); + EXPECT_EQ(baz, &field.Get(index)); + + // Second branch: Field is at capacity but has some cleared objects. + while (field.size() < field.Capacity()) { + field.Add()->assign("filler2"); + } + field.RemoveLast(); + index = field.size(); + string* qux = new string("qux"); + field.AddAllocated(qux); + EXPECT_EQ(index + 1, field.size()); + // We should have discarded the cleared object. + EXPECT_EQ(0, field.ClearedCount()); + EXPECT_EQ(qux, &field.Get(index)); +} + +TEST(RepeatedPtrField, MergeFrom) { + RepeatedPtrField source, destination; + + source.Add()->assign("4"); + source.Add()->assign("5"); + + destination.Add()->assign("1"); + destination.Add()->assign("2"); + destination.Add()->assign("3"); + + destination.MergeFrom(source); + + ASSERT_EQ(5, destination.size()); + + EXPECT_EQ("1", destination.Get(0)); + EXPECT_EQ("2", destination.Get(1)); + EXPECT_EQ("3", destination.Get(2)); + EXPECT_EQ("4", destination.Get(3)); + EXPECT_EQ("5", destination.Get(4)); +} + +TEST(RepeatedPtrField, CopyFrom) { + RepeatedPtrField source, destination; + + source.Add()->assign("4"); + source.Add()->assign("5"); + + destination.Add()->assign("1"); + destination.Add()->assign("2"); + destination.Add()->assign("3"); + + destination.CopyFrom(source); + + ASSERT_EQ(2, destination.size()); + + EXPECT_EQ("4", destination.Get(0)); + EXPECT_EQ("5", destination.Get(1)); +} + +TEST(RepeatedPtrField, CopyConstruct) { + RepeatedPtrField source; + + source.Add()->assign("1"); + source.Add()->assign("2"); + + RepeatedPtrField destination(source); + + ASSERT_EQ(2, destination.size()); + EXPECT_EQ("1", destination.Get(0)); + EXPECT_EQ("2", destination.Get(1)); +} + +TEST(RepeatedPtrField, CopyAssign) { + RepeatedPtrField source, destination; + + source.Add()->assign("4"); + source.Add()->assign("5"); + + destination.Add()->assign("1"); + destination.Add()->assign("2"); + destination.Add()->assign("3"); + + destination = source; + + ASSERT_EQ(2, destination.size()); + + EXPECT_EQ("4", destination.Get(0)); + EXPECT_EQ("5", destination.Get(1)); +} + +TEST(RepeatedPtrField, MutableDataIsMutable) { + RepeatedPtrField field; + *field.Add() = "1"; + EXPECT_EQ("1", field.Get(0)); + // The fact that this line compiles would be enough, but we'll check the + // value anyway. + string** data = field.mutable_data(); + **data = "2"; + EXPECT_EQ("2", field.Get(0)); +} + +// =================================================================== + +// Iterator tests stolen from net/proto/proto-array_unittest. +class RepeatedFieldIteratorTest : public testing::Test { + protected: + virtual void SetUp() { + for (int i = 0; i < 3; ++i) { + proto_array_.Add(i); + } + } + + RepeatedField proto_array_; +}; + +TEST_F(RepeatedFieldIteratorTest, Convertible) { + RepeatedField::iterator iter = proto_array_.begin(); + RepeatedField::const_iterator c_iter = iter; + RepeatedField::value_type value = *c_iter; + EXPECT_EQ(0, value); +} + +TEST_F(RepeatedFieldIteratorTest, MutableIteration) { + RepeatedField::iterator iter = proto_array_.begin(); + EXPECT_EQ(0, *iter); + ++iter; + EXPECT_EQ(1, *iter++); + EXPECT_EQ(2, *iter); + ++iter; + EXPECT_TRUE(proto_array_.end() == iter); + + EXPECT_EQ(2, *(proto_array_.end() - 1)); +} + +TEST_F(RepeatedFieldIteratorTest, ConstIteration) { + const RepeatedField& const_proto_array = proto_array_; + RepeatedField::const_iterator iter = const_proto_array.begin(); + EXPECT_EQ(0, *iter); + ++iter; + EXPECT_EQ(1, *iter++); + EXPECT_EQ(2, *iter); + ++iter; + EXPECT_TRUE(proto_array_.end() == iter); + EXPECT_EQ(2, *(proto_array_.end() - 1)); +} + +TEST_F(RepeatedFieldIteratorTest, Mutation) { + RepeatedField::iterator iter = proto_array_.begin(); + *iter = 7; + EXPECT_EQ(7, proto_array_.Get(0)); +} + +// ------------------------------------------------------------------- + +class RepeatedPtrFieldIteratorTest : public testing::Test { + protected: + virtual void SetUp() { + proto_array_.Add()->assign("foo"); + proto_array_.Add()->assign("bar"); + proto_array_.Add()->assign("baz"); + } + + RepeatedPtrField proto_array_; +}; + +TEST_F(RepeatedPtrFieldIteratorTest, Convertible) { + RepeatedPtrField::iterator iter = proto_array_.begin(); + RepeatedPtrField::const_iterator c_iter = iter; + RepeatedPtrField::value_type value = *c_iter; + EXPECT_EQ("foo", value); +} + +TEST_F(RepeatedPtrFieldIteratorTest, MutableIteration) { + RepeatedPtrField::iterator iter = proto_array_.begin(); + EXPECT_EQ("foo", *iter); + ++iter; + EXPECT_EQ("bar", *(iter++)); + EXPECT_EQ("baz", *iter); + ++iter; + EXPECT_TRUE(proto_array_.end() == iter); + EXPECT_EQ("baz", *(--proto_array_.end())); +} + +TEST_F(RepeatedPtrFieldIteratorTest, ConstIteration) { + const RepeatedPtrField& const_proto_array = proto_array_; + RepeatedPtrField::const_iterator iter = const_proto_array.begin(); + EXPECT_EQ("foo", *iter); + ++iter; + EXPECT_EQ("bar", *(iter++)); + EXPECT_EQ("baz", *iter); + ++iter; + EXPECT_TRUE(const_proto_array.end() == iter); + EXPECT_EQ("baz", *(--const_proto_array.end())); +} + +TEST_F(RepeatedPtrFieldIteratorTest, RandomAccess) { + RepeatedPtrField::iterator iter = proto_array_.begin(); + RepeatedPtrField::iterator iter2 = iter; + ++iter2; + ++iter2; + EXPECT_TRUE(iter + 2 == iter2); + EXPECT_TRUE(iter == iter2 - 2); + EXPECT_EQ("baz", iter[2]); + EXPECT_EQ("baz", *(iter + 2)); + EXPECT_EQ(3, proto_array_.end() - proto_array_.begin()); +} + +TEST_F(RepeatedPtrFieldIteratorTest, Comparable) { + RepeatedPtrField::const_iterator iter = proto_array_.begin(); + RepeatedPtrField::const_iterator iter2 = iter + 1; + EXPECT_TRUE(iter == iter); + EXPECT_TRUE(iter != iter2); + EXPECT_TRUE(iter < iter2); + EXPECT_TRUE(iter <= iter2); + EXPECT_TRUE(iter <= iter); + EXPECT_TRUE(iter2 > iter); + EXPECT_TRUE(iter2 >= iter); + EXPECT_TRUE(iter >= iter); +} + +// Uninitialized iterator does not point to any of the RepeatedPtrField. +TEST_F(RepeatedPtrFieldIteratorTest, UninitializedIterator) { + RepeatedPtrField::iterator iter; + EXPECT_TRUE(iter != proto_array_.begin()); + EXPECT_TRUE(iter != proto_array_.begin() + 1); + EXPECT_TRUE(iter != proto_array_.begin() + 2); + EXPECT_TRUE(iter != proto_array_.begin() + 3); + EXPECT_TRUE(iter != proto_array_.end()); +} + +TEST_F(RepeatedPtrFieldIteratorTest, STLAlgorithms_lower_bound) { + proto_array_.Clear(); + proto_array_.Add()->assign("a"); + proto_array_.Add()->assign("c"); + proto_array_.Add()->assign("d"); + proto_array_.Add()->assign("n"); + proto_array_.Add()->assign("p"); + proto_array_.Add()->assign("x"); + proto_array_.Add()->assign("y"); + + string v = "f"; + RepeatedPtrField::const_iterator it = + lower_bound(proto_array_.begin(), proto_array_.end(), v); + + EXPECT_EQ(*it, "n"); + EXPECT_TRUE(it == proto_array_.begin() + 3); +} + +TEST_F(RepeatedPtrFieldIteratorTest, Mutation) { + RepeatedPtrField::iterator iter = proto_array_.begin(); + *iter = "qux"; + EXPECT_EQ("qux", proto_array_.Get(0)); +} + +// ------------------------------------------------------------------- + +class RepeatedPtrFieldPtrsIteratorTest : public testing::Test { + protected: + virtual void SetUp() { + proto_array_.Add()->assign("foo"); + proto_array_.Add()->assign("bar"); + proto_array_.Add()->assign("baz"); + } + + RepeatedPtrField proto_array_; +}; + +TEST_F(RepeatedPtrFieldPtrsIteratorTest, ConvertiblePtr) { + RepeatedPtrField::pointer_iterator iter = + proto_array_.pointer_begin(); +} + +TEST_F(RepeatedPtrFieldPtrsIteratorTest, MutablePtrIteration) { + RepeatedPtrField::pointer_iterator iter = + proto_array_.pointer_begin(); + EXPECT_EQ("foo", **iter); + ++iter; + EXPECT_EQ("bar", **(iter++)); + EXPECT_EQ("baz", **iter); + ++iter; + EXPECT_TRUE(proto_array_.pointer_end() == iter); + EXPECT_EQ("baz", **(--proto_array_.pointer_end())); +} + +TEST_F(RepeatedPtrFieldPtrsIteratorTest, RandomPtrAccess) { + RepeatedPtrField::pointer_iterator iter = + proto_array_.pointer_begin(); + RepeatedPtrField::pointer_iterator iter2 = iter; + ++iter2; + ++iter2; + EXPECT_TRUE(iter + 2 == iter2); + EXPECT_TRUE(iter == iter2 - 2); + EXPECT_EQ("baz", *iter[2]); + EXPECT_EQ("baz", **(iter + 2)); + EXPECT_EQ(3, proto_array_.end() - proto_array_.begin()); +} + +TEST_F(RepeatedPtrFieldPtrsIteratorTest, ComparablePtr) { + RepeatedPtrField::pointer_iterator iter = + proto_array_.pointer_begin(); + RepeatedPtrField::pointer_iterator iter2 = iter + 1; + EXPECT_TRUE(iter == iter); + EXPECT_TRUE(iter != iter2); + EXPECT_TRUE(iter < iter2); + EXPECT_TRUE(iter <= iter2); + EXPECT_TRUE(iter <= iter); + EXPECT_TRUE(iter2 > iter); + EXPECT_TRUE(iter2 >= iter); + EXPECT_TRUE(iter >= iter); +} + +// Uninitialized iterator does not point to any of the RepeatedPtrOverPtrs. +// Dereferencing an uninitialized iterator crashes the process. +TEST_F(RepeatedPtrFieldPtrsIteratorTest, UninitializedPtrIterator) { + RepeatedPtrField::pointer_iterator iter; + EXPECT_TRUE(iter != proto_array_.pointer_begin()); + EXPECT_TRUE(iter != proto_array_.pointer_begin() + 1); + EXPECT_TRUE(iter != proto_array_.pointer_begin() + 2); + EXPECT_TRUE(iter != proto_array_.pointer_begin() + 3); + EXPECT_TRUE(iter != proto_array_.pointer_end()); +} + + +// This comparison functor is required by the tests for RepeatedPtrOverPtrs. +// They operate on strings and need to compare strings as strings in +// any stl algorithm, even though the iterator returns a pointer to a string +// - i.e. *iter has type string*. +struct StringLessThan { + bool operator()(const string* z, const string& y) { + return *z < y; + } + bool operator()(const string* z, const string* y) { + return *z < *y; + } +}; + +TEST_F(RepeatedPtrFieldPtrsIteratorTest, PtrSTLAlgorithms_lower_bound) { + proto_array_.Clear(); + proto_array_.Add()->assign("a"); + proto_array_.Add()->assign("c"); + proto_array_.Add()->assign("d"); + proto_array_.Add()->assign("n"); + proto_array_.Add()->assign("p"); + proto_array_.Add()->assign("x"); + proto_array_.Add()->assign("y"); + + RepeatedPtrField::pointer_iterator iter = + proto_array_.pointer_begin(); + string v = "f"; + RepeatedPtrField::pointer_iterator it = + lower_bound(proto_array_.pointer_begin(), proto_array_.pointer_end(), + &v, StringLessThan()); + + GOOGLE_CHECK(*it != NULL); + + EXPECT_EQ(**it, "n"); + EXPECT_TRUE(it == proto_array_.pointer_begin() + 3); +} + +TEST_F(RepeatedPtrFieldPtrsIteratorTest, PtrMutation) { + RepeatedPtrField::pointer_iterator iter = + proto_array_.pointer_begin(); + **iter = "qux"; + EXPECT_EQ("qux", proto_array_.Get(0)); + + EXPECT_EQ("bar", proto_array_.Get(1)); + EXPECT_EQ("baz", proto_array_.Get(2)); + ++iter; + delete *iter; + *iter = new string("a"); + ++iter; + delete *iter; + *iter = new string("b"); + EXPECT_EQ("a", proto_array_.Get(1)); + EXPECT_EQ("b", proto_array_.Get(2)); +} + +TEST_F(RepeatedPtrFieldPtrsIteratorTest, Sort) { + proto_array_.Add()->assign("c"); + proto_array_.Add()->assign("d"); + proto_array_.Add()->assign("n"); + proto_array_.Add()->assign("p"); + proto_array_.Add()->assign("a"); + proto_array_.Add()->assign("y"); + proto_array_.Add()->assign("x"); + EXPECT_EQ("foo", proto_array_.Get(0)); + EXPECT_EQ("n", proto_array_.Get(5)); + EXPECT_EQ("x", proto_array_.Get(9)); + sort(proto_array_.pointer_begin(), + proto_array_.pointer_end(), + StringLessThan()); + EXPECT_EQ("a", proto_array_.Get(0)); + EXPECT_EQ("baz", proto_array_.Get(2)); + EXPECT_EQ("y", proto_array_.Get(9)); +} + + +// ----------------------------------------------------------------------------- +// Unit-tests for the insert iterators +// google::protobuf::RepeatedFieldBackInserter, +// google::protobuf::AllocatedRepeatedPtrFieldBackInserter +// Ported from util/gtl/proto-array-iterators_unittest. + +class RepeatedFieldInsertionIteratorsTest : public testing::Test { + protected: + std::list halves; + std::list fibonacci; + std::vector words; + typedef TestAllTypes::NestedMessage Nested; + Nested nesteds[2]; + std::vector nested_ptrs; + TestAllTypes protobuffer; + + virtual void SetUp() { + fibonacci.push_back(1); + fibonacci.push_back(1); + fibonacci.push_back(2); + fibonacci.push_back(3); + fibonacci.push_back(5); + fibonacci.push_back(8); + std::copy(fibonacci.begin(), fibonacci.end(), + RepeatedFieldBackInserter(protobuffer.mutable_repeated_int32())); + + halves.push_back(1.0); + halves.push_back(0.5); + halves.push_back(0.25); + halves.push_back(0.125); + halves.push_back(0.0625); + std::copy(halves.begin(), halves.end(), + RepeatedFieldBackInserter(protobuffer.mutable_repeated_double())); + + words.push_back("Able"); + words.push_back("was"); + words.push_back("I"); + words.push_back("ere"); + words.push_back("I"); + words.push_back("saw"); + words.push_back("Elba"); + std::copy(words.begin(), words.end(), + RepeatedFieldBackInserter(protobuffer.mutable_repeated_string())); + + nesteds[0].set_bb(17); + nesteds[1].set_bb(4711); + std::copy(&nesteds[0], &nesteds[2], + RepeatedFieldBackInserter( + protobuffer.mutable_repeated_nested_message())); + + nested_ptrs.push_back(new Nested); + nested_ptrs.back()->set_bb(170); + nested_ptrs.push_back(new Nested); + nested_ptrs.back()->set_bb(47110); + std::copy(nested_ptrs.begin(), nested_ptrs.end(), + RepeatedFieldBackInserter( + protobuffer.mutable_repeated_nested_message())); + + } + + virtual void TearDown() { + STLDeleteContainerPointers(nested_ptrs.begin(), nested_ptrs.end()); + } +}; + +TEST_F(RepeatedFieldInsertionIteratorsTest, Fibonacci) { + EXPECT_TRUE(std::equal(fibonacci.begin(), + fibonacci.end(), + protobuffer.repeated_int32().begin())); + EXPECT_TRUE(std::equal(protobuffer.repeated_int32().begin(), + protobuffer.repeated_int32().end(), + fibonacci.begin())); +} + +TEST_F(RepeatedFieldInsertionIteratorsTest, Halves) { + EXPECT_TRUE(std::equal(halves.begin(), + halves.end(), + protobuffer.repeated_double().begin())); + EXPECT_TRUE(std::equal(protobuffer.repeated_double().begin(), + protobuffer.repeated_double().end(), + halves.begin())); +} + +TEST_F(RepeatedFieldInsertionIteratorsTest, Words) { + ASSERT_EQ(words.size(), protobuffer.repeated_string_size()); + EXPECT_EQ(words.at(0), protobuffer.repeated_string(0)); + EXPECT_EQ(words.at(1), protobuffer.repeated_string(1)); + EXPECT_EQ(words.at(2), protobuffer.repeated_string(2)); + EXPECT_EQ(words.at(3), protobuffer.repeated_string(3)); + EXPECT_EQ(words.at(4), protobuffer.repeated_string(4)); + EXPECT_EQ(words.at(5), protobuffer.repeated_string(5)); + EXPECT_EQ(words.at(6), protobuffer.repeated_string(6)); +} + +TEST_F(RepeatedFieldInsertionIteratorsTest, Nesteds) { + ASSERT_EQ(protobuffer.repeated_nested_message_size(), 4); + EXPECT_EQ(protobuffer.repeated_nested_message(0).bb(), 17); + EXPECT_EQ(protobuffer.repeated_nested_message(1).bb(), 4711); + EXPECT_EQ(protobuffer.repeated_nested_message(2).bb(), 170); + EXPECT_EQ(protobuffer.repeated_nested_message(3).bb(), 47110); +} + +TEST_F(RepeatedFieldInsertionIteratorsTest, + AllocatedRepeatedPtrFieldWithStringIntData) { + vector data; + TestAllTypes goldenproto; + for (int i = 0; i < 10; ++i) { + Nested* new_data = new Nested; + new_data->set_bb(i); + data.push_back(new_data); + + new_data = goldenproto.add_repeated_nested_message(); + new_data->set_bb(i); + } + TestAllTypes testproto; + copy(data.begin(), data.end(), + AllocatedRepeatedPtrFieldBackInserter( + testproto.mutable_repeated_nested_message())); + EXPECT_EQ(testproto.DebugString(), goldenproto.DebugString()); +} + +TEST_F(RepeatedFieldInsertionIteratorsTest, + AllocatedRepeatedPtrFieldWithString) { + vector data; + TestAllTypes goldenproto; + for (int i = 0; i < 10; ++i) { + string* new_data = new string; + *new_data = "name-" + SimpleItoa(i); + data.push_back(new_data); + + new_data = goldenproto.add_repeated_string(); + *new_data = "name-" + SimpleItoa(i); + } + TestAllTypes testproto; + copy(data.begin(), data.end(), + AllocatedRepeatedPtrFieldBackInserter( + testproto.mutable_repeated_string())); + EXPECT_EQ(testproto.DebugString(), goldenproto.DebugString()); +} + +} // namespace + +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/service.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/service.cc new file mode 100644 index 0000000000..caf968ca5b --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/service.cc @@ -0,0 +1,46 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include + +namespace google { +namespace protobuf { + +Service::~Service() {} +RpcChannel::~RpcChannel() {} +RpcController::~RpcController() {} + +} // namespace protobuf + +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/service.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/service.h new file mode 100644 index 0000000000..a6a7d16deb --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/service.h @@ -0,0 +1,291 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// DEPRECATED: This module declares the abstract interfaces underlying proto2 +// RPC services. These are intented to be independent of any particular RPC +// implementation, so that proto2 services can be used on top of a variety +// of implementations. Starting with version 2.3.0, RPC implementations should +// not try to build on these, but should instead provide code generator plugins +// which generate code specific to the particular RPC implementation. This way +// the generated code can be more appropriate for the implementation in use +// and can avoid unnecessary layers of indirection. +// +// +// When you use the protocol compiler to compile a service definition, it +// generates two classes: An abstract interface for the service (with +// methods matching the service definition) and a "stub" implementation. +// A stub is just a type-safe wrapper around an RpcChannel which emulates a +// local implementation of the service. +// +// For example, the service definition: +// service MyService { +// rpc Foo(MyRequest) returns(MyResponse); +// } +// will generate abstract interface "MyService" and class "MyService::Stub". +// You could implement a MyService as follows: +// class MyServiceImpl : public MyService { +// public: +// MyServiceImpl() {} +// ~MyServiceImpl() {} +// +// // implements MyService --------------------------------------- +// +// void Foo(google::protobuf::RpcController* controller, +// const MyRequest* request, +// MyResponse* response, +// Closure* done) { +// // ... read request and fill in response ... +// done->Run(); +// } +// }; +// You would then register an instance of MyServiceImpl with your RPC server +// implementation. (How to do that depends on the implementation.) +// +// To call a remote MyServiceImpl, first you need an RpcChannel connected to it. +// How to construct a channel depends, again, on your RPC implementation. +// Here we use a hypothentical "MyRpcChannel" as an example: +// MyRpcChannel channel("rpc:hostname:1234/myservice"); +// MyRpcController controller; +// MyServiceImpl::Stub stub(&channel); +// FooRequest request; +// FooRespnose response; +// +// // ... fill in request ... +// +// stub.Foo(&controller, request, &response, NewCallback(HandleResponse)); +// +// On Thread-Safety: +// +// Different RPC implementations may make different guarantees about what +// threads they may run callbacks on, and what threads the application is +// allowed to use to call the RPC system. Portable software should be ready +// for callbacks to be called on any thread, but should not try to call the +// RPC system from any thread except for the ones on which it received the +// callbacks. Realistically, though, simple software will probably want to +// use a single-threaded RPC system while high-end software will want to +// use multiple threads. RPC implementations should provide multiple +// choices. + +#ifndef GOOGLE_PROTOBUF_SERVICE_H__ +#define GOOGLE_PROTOBUF_SERVICE_H__ + +#include +#include + +namespace google { +namespace protobuf { + +// Defined in this file. +class Service; +class RpcController; +class RpcChannel; + +// Defined in other files. +class Descriptor; // descriptor.h +class ServiceDescriptor; // descriptor.h +class MethodDescriptor; // descriptor.h +class Message; // message.h + +// Abstract base interface for protocol-buffer-based RPC services. Services +// themselves are abstract interfaces (implemented either by servers or as +// stubs), but they subclass this base interface. The methods of this +// interface can be used to call the methods of the Service without knowing +// its exact type at compile time (analogous to Reflection). +class LIBPROTOBUF_EXPORT Service { + public: + inline Service() {} + virtual ~Service(); + + // When constructing a stub, you may pass STUB_OWNS_CHANNEL as the second + // parameter to the constructor to tell it to delete its RpcChannel when + // destroyed. + enum ChannelOwnership { + STUB_OWNS_CHANNEL, + STUB_DOESNT_OWN_CHANNEL + }; + + // Get the ServiceDescriptor describing this service and its methods. + virtual const ServiceDescriptor* GetDescriptor() = 0; + + // Call a method of the service specified by MethodDescriptor. This is + // normally implemented as a simple switch() that calls the standard + // definitions of the service's methods. + // + // Preconditions: + // * method->service() == GetDescriptor() + // * request and response are of the exact same classes as the objects + // returned by GetRequestPrototype(method) and + // GetResponsePrototype(method). + // * After the call has started, the request must not be modified and the + // response must not be accessed at all until "done" is called. + // * "controller" is of the correct type for the RPC implementation being + // used by this Service. For stubs, the "correct type" depends on the + // RpcChannel which the stub is using. Server-side Service + // implementations are expected to accept whatever type of RpcController + // the server-side RPC implementation uses. + // + // Postconditions: + // * "done" will be called when the method is complete. This may be + // before CallMethod() returns or it may be at some point in the future. + // * If the RPC succeeded, "response" contains the response returned by + // the server. + // * If the RPC failed, "response"'s contents are undefined. The + // RpcController can be queried to determine if an error occurred and + // possibly to get more information about the error. + virtual void CallMethod(const MethodDescriptor* method, + RpcController* controller, + const Message* request, + Message* response, + Closure* done) = 0; + + // CallMethod() requires that the request and response passed in are of a + // particular subclass of Message. GetRequestPrototype() and + // GetResponsePrototype() get the default instances of these required types. + // You can then call Message::New() on these instances to construct mutable + // objects which you can then pass to CallMethod(). + // + // Example: + // const MethodDescriptor* method = + // service->GetDescriptor()->FindMethodByName("Foo"); + // Message* request = stub->GetRequestPrototype (method)->New(); + // Message* response = stub->GetResponsePrototype(method)->New(); + // request->ParseFromString(input); + // service->CallMethod(method, *request, response, callback); + virtual const Message& GetRequestPrototype( + const MethodDescriptor* method) const = 0; + virtual const Message& GetResponsePrototype( + const MethodDescriptor* method) const = 0; + + private: + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(Service); +}; + +// An RpcController mediates a single method call. The primary purpose of +// the controller is to provide a way to manipulate settings specific to the +// RPC implementation and to find out about RPC-level errors. +// +// The methods provided by the RpcController interface are intended to be a +// "least common denominator" set of features which we expect all +// implementations to support. Specific implementations may provide more +// advanced features (e.g. deadline propagation). +class LIBPROTOBUF_EXPORT RpcController { + public: + inline RpcController() {} + virtual ~RpcController(); + + // Client-side methods --------------------------------------------- + // These calls may be made from the client side only. Their results + // are undefined on the server side (may crash). + + // Resets the RpcController to its initial state so that it may be reused in + // a new call. Must not be called while an RPC is in progress. + virtual void Reset() = 0; + + // After a call has finished, returns true if the call failed. The possible + // reasons for failure depend on the RPC implementation. Failed() must not + // be called before a call has finished. If Failed() returns true, the + // contents of the response message are undefined. + virtual bool Failed() const = 0; + + // If Failed() is true, returns a human-readable description of the error. + virtual string ErrorText() const = 0; + + // Advises the RPC system that the caller desires that the RPC call be + // canceled. The RPC system may cancel it immediately, may wait awhile and + // then cancel it, or may not even cancel the call at all. If the call is + // canceled, the "done" callback will still be called and the RpcController + // will indicate that the call failed at that time. + virtual void StartCancel() = 0; + + // Server-side methods --------------------------------------------- + // These calls may be made from the server side only. Their results + // are undefined on the client side (may crash). + + // Causes Failed() to return true on the client side. "reason" will be + // incorporated into the message returned by ErrorText(). If you find + // you need to return machine-readable information about failures, you + // should incorporate it into your response protocol buffer and should + // NOT call SetFailed(). + virtual void SetFailed(const string& reason) = 0; + + // If true, indicates that the client canceled the RPC, so the server may + // as well give up on replying to it. The server should still call the + // final "done" callback. + virtual bool IsCanceled() const = 0; + + // Asks that the given callback be called when the RPC is canceled. The + // callback will always be called exactly once. If the RPC completes without + // being canceled, the callback will be called after completion. If the RPC + // has already been canceled when NotifyOnCancel() is called, the callback + // will be called immediately. + // + // NotifyOnCancel() must be called no more than once per request. + virtual void NotifyOnCancel(Closure* callback) = 0; + + private: + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(RpcController); +}; + +// Abstract interface for an RPC channel. An RpcChannel represents a +// communication line to a Service which can be used to call that Service's +// methods. The Service may be running on another machine. Normally, you +// should not call an RpcChannel directly, but instead construct a stub Service +// wrapping it. Example: +// RpcChannel* channel = new MyRpcChannel("remotehost.example.com:1234"); +// MyService* service = new MyService::Stub(channel); +// service->MyMethod(request, &response, callback); +class LIBPROTOBUF_EXPORT RpcChannel { + public: + inline RpcChannel() {} + virtual ~RpcChannel(); + + // Call the given method of the remote service. The signature of this + // procedure looks the same as Service::CallMethod(), but the requirements + // are less strict in one important way: the request and response objects + // need not be of any specific class as long as their descriptors are + // method->input_type() and method->output_type(). + virtual void CallMethod(const MethodDescriptor* method, + RpcController* controller, + const Message* request, + Message* response, + Closure* done) = 0; + + private: + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(RpcChannel); +}; + +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_SERVICE_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/common.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/common.cc new file mode 100644 index 0000000000..7b15be44d8 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/common.cc @@ -0,0 +1,377 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) + +#include +#include +#include +#include +#include + +#include "config.h" + +#ifdef _WIN32 +#define WIN32_LEAN_AND_MEAN // We only need minimal includes +#include +#define snprintf _snprintf // see comment in strutil.cc +#elif defined(HAVE_PTHREAD) +#include +#else +#error "No suitable threading library available." +#endif + +namespace google { +namespace protobuf { + +namespace internal { + +void VerifyVersion(int headerVersion, + int minLibraryVersion, + const char* filename) { + if (GOOGLE_PROTOBUF_VERSION < minLibraryVersion) { + // Library is too old for headers. + GOOGLE_LOG(FATAL) + << "This program requires version " << VersionString(minLibraryVersion) + << " of the Protocol Buffer runtime library, but the installed version " + "is " << VersionString(GOOGLE_PROTOBUF_VERSION) << ". Please update " + "your library. If you compiled the program yourself, make sure that " + "your headers are from the same version of Protocol Buffers as your " + "link-time library. (Version verification failed in \"" + << filename << "\".)"; + } + if (headerVersion < kMinHeaderVersionForLibrary) { + // Headers are too old for library. + GOOGLE_LOG(FATAL) + << "This program was compiled against version " + << VersionString(headerVersion) << " of the Protocol Buffer runtime " + "library, which is not compatible with the installed version (" + << VersionString(GOOGLE_PROTOBUF_VERSION) << "). Contact the program " + "author for an update. If you compiled the program yourself, make " + "sure that your headers are from the same version of Protocol Buffers " + "as your link-time library. (Version verification failed in \"" + << filename << "\".)"; + } +} + +string VersionString(int version) { + int major = version / 1000000; + int minor = (version / 1000) % 1000; + int micro = version % 1000; + + // 128 bytes should always be enough, but we use snprintf() anyway to be + // safe. + char buffer[128]; + snprintf(buffer, sizeof(buffer), "%d.%d.%d", major, minor, micro); + + // Guard against broken MSVC snprintf(). + buffer[sizeof(buffer)-1] = '\0'; + + return buffer; +} + +} // namespace internal + +// =================================================================== +// emulates google3/base/logging.cc + +namespace internal { + +void DefaultLogHandler(LogLevel level, const char* filename, int line, + const string& message) { + static const char* level_names[] = { "INFO", "WARNING", "ERROR", "FATAL" }; + + // We use fprintf() instead of cerr because we want this to work at static + // initialization time. + fprintf(stderr, "libprotobuf %s %s:%d] %s\n", + level_names[level], filename, line, message.c_str()); + fflush(stderr); // Needed on MSVC. +} + +void NullLogHandler(LogLevel level, const char* filename, int line, + const string& message) { + // Nothing. +} + +static LogHandler* log_handler_ = &DefaultLogHandler; +static int log_silencer_count_ = 0; + +static Mutex* log_silencer_count_mutex_ = NULL; +GOOGLE_PROTOBUF_DECLARE_ONCE(log_silencer_count_init_); + +void DeleteLogSilencerCount() { + delete log_silencer_count_mutex_; + log_silencer_count_mutex_ = NULL; +} +void InitLogSilencerCount() { + log_silencer_count_mutex_ = new Mutex; + OnShutdown(&DeleteLogSilencerCount); +} +void InitLogSilencerCountOnce() { + GoogleOnceInit(&log_silencer_count_init_, &InitLogSilencerCount); +} + +LogMessage& LogMessage::operator<<(const string& value) { + message_ += value; + return *this; +} + +LogMessage& LogMessage::operator<<(const char* value) { + message_ += value; + return *this; +} + +// Since this is just for logging, we don't care if the current locale changes +// the results -- in fact, we probably prefer that. So we use snprintf() +// instead of Simple*toa(). +#undef DECLARE_STREAM_OPERATOR +#define DECLARE_STREAM_OPERATOR(TYPE, FORMAT) \ + LogMessage& LogMessage::operator<<(TYPE value) { \ + /* 128 bytes should be big enough for any of the primitive */ \ + /* values which we print with this, but well use snprintf() */ \ + /* anyway to be extra safe. */ \ + char buffer[128]; \ + snprintf(buffer, sizeof(buffer), FORMAT, value); \ + /* Guard against broken MSVC snprintf(). */ \ + buffer[sizeof(buffer)-1] = '\0'; \ + message_ += buffer; \ + return *this; \ + } + +DECLARE_STREAM_OPERATOR(char , "%c" ) +DECLARE_STREAM_OPERATOR(int , "%d" ) +DECLARE_STREAM_OPERATOR(uint , "%u" ) +DECLARE_STREAM_OPERATOR(long , "%ld") +DECLARE_STREAM_OPERATOR(unsigned long, "%lu") +DECLARE_STREAM_OPERATOR(double , "%g" ) +#undef DECLARE_STREAM_OPERATOR + +LogMessage::LogMessage(LogLevel level, const char* filename, int line) + : level_(level), filename_(filename), line_(line) {} +LogMessage::~LogMessage() {} + +void LogMessage::Finish() { + bool suppress = false; + + if (level_ != LOGLEVEL_FATAL) { + InitLogSilencerCountOnce(); + MutexLock lock(log_silencer_count_mutex_); + suppress = internal::log_silencer_count_ > 0; + } + + if (!suppress) { + internal::log_handler_(level_, filename_, line_, message_); + } + + if (level_ == LOGLEVEL_FATAL) { +#ifdef PROTOBUF_USE_EXCEPTIONS + throw FatalException(filename_, line_, message_); +#else + abort(); +#endif + } +} + +void LogFinisher::operator=(LogMessage& other) { + other.Finish(); +} + +} // namespace internal + +LogHandler* SetLogHandler(LogHandler* new_func) { + LogHandler* old = internal::log_handler_; + if (old == &internal::NullLogHandler) { + old = NULL; + } + if (new_func == NULL) { + internal::log_handler_ = &internal::NullLogHandler; + } else { + internal::log_handler_ = new_func; + } + return old; +} + +LogSilencer::LogSilencer() { + internal::InitLogSilencerCountOnce(); + MutexLock lock(internal::log_silencer_count_mutex_); + ++internal::log_silencer_count_; +}; + +LogSilencer::~LogSilencer() { + internal::InitLogSilencerCountOnce(); + MutexLock lock(internal::log_silencer_count_mutex_); + --internal::log_silencer_count_; +}; + +// =================================================================== +// emulates google3/base/callback.cc + +Closure::~Closure() {} + +namespace internal { FunctionClosure0::~FunctionClosure0() {} } + +void DoNothing() {} + +// =================================================================== +// emulates google3/base/mutex.cc + +#ifdef _WIN32 + +struct Mutex::Internal { + CRITICAL_SECTION mutex; +#ifndef NDEBUG + // Used only to implement AssertHeld(). + DWORD thread_id; +#endif +}; + +Mutex::Mutex() + : mInternal(new Internal) { + InitializeCriticalSection(&mInternal->mutex); +} + +Mutex::~Mutex() { + DeleteCriticalSection(&mInternal->mutex); + delete mInternal; +} + +void Mutex::Lock() { + EnterCriticalSection(&mInternal->mutex); +#ifndef NDEBUG + mInternal->thread_id = GetCurrentThreadId(); +#endif +} + +void Mutex::Unlock() { +#ifndef NDEBUG + mInternal->thread_id = 0; +#endif + LeaveCriticalSection(&mInternal->mutex); +} + +void Mutex::AssertHeld() { +#ifndef NDEBUG + GOOGLE_DCHECK_EQ(mInternal->thread_id, GetCurrentThreadId()); +#endif +} + +#elif defined(HAVE_PTHREAD) + +struct Mutex::Internal { + pthread_mutex_t mutex; +}; + +Mutex::Mutex() + : mInternal(new Internal) { + pthread_mutex_init(&mInternal->mutex, NULL); +} + +Mutex::~Mutex() { + pthread_mutex_destroy(&mInternal->mutex); + delete mInternal; +} + +void Mutex::Lock() { + int result = pthread_mutex_lock(&mInternal->mutex); + if (result != 0) { + GOOGLE_LOG(FATAL) << "pthread_mutex_lock: " << strerror(result); + } +} + +void Mutex::Unlock() { + int result = pthread_mutex_unlock(&mInternal->mutex); + if (result != 0) { + GOOGLE_LOG(FATAL) << "pthread_mutex_unlock: " << strerror(result); + } +} + +void Mutex::AssertHeld() { + // pthreads dosn't provide a way to check which thread holds the mutex. + // TODO(kenton): Maybe keep track of locking thread ID like with WIN32? +} + +#endif + +// =================================================================== +// Shutdown support. + +namespace internal { + +typedef void OnShutdownFunc(); +vector* shutdown_functions = NULL; +Mutex* shutdown_functions_mutex = NULL; +GOOGLE_PROTOBUF_DECLARE_ONCE(shutdown_functions_init); + +void InitShutdownFunctions() { + shutdown_functions = new vector; + shutdown_functions_mutex = new Mutex; +} + +inline void InitShutdownFunctionsOnce() { + GoogleOnceInit(&shutdown_functions_init, &InitShutdownFunctions); +} + +void OnShutdown(void (*func)()) { + InitShutdownFunctionsOnce(); + MutexLock lock(shutdown_functions_mutex); + shutdown_functions->push_back(func); +} + +} // namespace internal + +void ShutdownProtobufLibrary() { + internal::InitShutdownFunctionsOnce(); + + // We don't need to lock shutdown_functions_mutex because it's up to the + // caller to make sure that no one is using the library before this is + // called. + + // Make it safe to call this multiple times. + if (internal::shutdown_functions == NULL) return; + + for (int i = 0; i < internal::shutdown_functions->size(); i++) { + internal::shutdown_functions->at(i)(); + } + delete internal::shutdown_functions; + internal::shutdown_functions = NULL; + delete internal::shutdown_functions_mutex; + internal::shutdown_functions_mutex = NULL; +} + +#ifdef PROTOBUF_USE_EXCEPTIONS +FatalException::~FatalException() throw() {} + +const char* FatalException::what() const throw() { + return message_.c_str(); +} +#endif + +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/common.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/common.h new file mode 100644 index 0000000000..7f5fd4d2d2 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/common.h @@ -0,0 +1,1211 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) and others +// +// Contains basic types and utilities used by the rest of the library. + +#ifndef GOOGLE_PROTOBUF_COMMON_H__ +#define GOOGLE_PROTOBUF_COMMON_H__ + +#include +#include +#include +#include +#include +#if defined(__osf__) +// Tru64 lacks stdint.h, but has inttypes.h which defines a superset of +// what stdint.h would define. +#include +#elif !defined(_MSC_VER) +#include +#endif + +#if defined(_MSC_VER) && defined(_CPPUNWIND) + #define PROTOBUF_USE_EXCEPTIONS +#elif defined(__EXCEPTIONS) + #define PROTOBUF_USE_EXCEPTIONS +#endif +#ifdef PROTOBUF_USE_EXCEPTIONS +#include +#endif + +#if defined(_WIN32) && defined(GetMessage) +// Allow GetMessage to be used as a valid method name in protobuf classes. +// windows.h defines GetMessage() as a macro. Let's re-define it as an inline +// function. The inline function should be equivalent for C++ users. +inline BOOL GetMessage_Win32( + LPMSG lpMsg, HWND hWnd, + UINT wMsgFilterMin, UINT wMsgFilterMax) { + return GetMessage(lpMsg, hWnd, wMsgFilterMin, wMsgFilterMax); +} +#undef GetMessage +inline BOOL GetMessage( + LPMSG lpMsg, HWND hWnd, + UINT wMsgFilterMin, UINT wMsgFilterMax) { + return GetMessage_Win32(lpMsg, hWnd, wMsgFilterMin, wMsgFilterMax); +} +#endif + + +namespace std {} + +namespace google { +namespace protobuf { + +#undef GOOGLE_DISALLOW_EVIL_CONSTRUCTORS +#define GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(TypeName) \ + TypeName(const TypeName&); \ + void operator=(const TypeName&) + +#if defined(_MSC_VER) && defined(PROTOBUF_USE_DLLS) + #ifdef LIBPROTOBUF_EXPORTS + #define LIBPROTOBUF_EXPORT __declspec(dllexport) + #else + #define LIBPROTOBUF_EXPORT __declspec(dllimport) + #endif + #ifdef LIBPROTOC_EXPORTS + #define LIBPROTOC_EXPORT __declspec(dllexport) + #else + #define LIBPROTOC_EXPORT __declspec(dllimport) + #endif +#else + #define LIBPROTOBUF_EXPORT + #define LIBPROTOC_EXPORT +#endif + +namespace internal { + +// Some of these constants are macros rather than const ints so that they can +// be used in #if directives. + +// The current version, represented as a single integer to make comparison +// easier: major * 10^6 + minor * 10^3 + micro +#define GOOGLE_PROTOBUF_VERSION 2004002 + +// The minimum library version which works with the current version of the +// headers. +#define GOOGLE_PROTOBUF_MIN_LIBRARY_VERSION 2004000 + +// The minimum header version which works with the current version of +// the library. This constant should only be used by protoc's C++ code +// generator. +static const int kMinHeaderVersionForLibrary = 2004000; + +// The minimum protoc version which works with the current version of the +// headers. +#define GOOGLE_PROTOBUF_MIN_PROTOC_VERSION 2004000 + +// The minimum header version which works with the current version of +// protoc. This constant should only be used in VerifyVersion(). +static const int kMinHeaderVersionForProtoc = 2004000; + +// Verifies that the headers and libraries are compatible. Use the macro +// below to call this. +void LIBPROTOBUF_EXPORT VerifyVersion(int headerVersion, int minLibraryVersion, + const char* filename); + +// Converts a numeric version number to a string. +std::string LIBPROTOBUF_EXPORT VersionString(int version); + +} // namespace internal + +// Place this macro in your main() function (or somewhere before you attempt +// to use the protobuf library) to verify that the version you link against +// matches the headers you compiled against. If a version mismatch is +// detected, the process will abort. +#define GOOGLE_PROTOBUF_VERIFY_VERSION \ + ::google::protobuf::internal::VerifyVersion( \ + GOOGLE_PROTOBUF_VERSION, GOOGLE_PROTOBUF_MIN_LIBRARY_VERSION, \ + __FILE__) + +// =================================================================== +// from google3/base/port.h + +typedef unsigned int uint; + +#ifdef _MSC_VER +typedef __int8 int8; +typedef __int16 int16; +typedef __int32 int32; +typedef __int64 int64; + +typedef unsigned __int8 uint8; +typedef unsigned __int16 uint16; +typedef unsigned __int32 uint32; +typedef unsigned __int64 uint64; +#else +typedef int8_t int8; +typedef int16_t int16; +typedef int32_t int32; +typedef int64_t int64; + +typedef uint8_t uint8; +typedef uint16_t uint16; +typedef uint32_t uint32; +typedef uint64_t uint64; +#endif + +// long long macros to be used because gcc and vc++ use different suffixes, +// and different size specifiers in format strings +#undef GOOGLE_LONGLONG +#undef GOOGLE_ULONGLONG +#undef GOOGLE_LL_FORMAT + +#ifdef _MSC_VER +#define GOOGLE_LONGLONG(x) x##I64 +#define GOOGLE_ULONGLONG(x) x##UI64 +#define GOOGLE_LL_FORMAT "I64" // As in printf("%I64d", ...) +#else +#define GOOGLE_LONGLONG(x) x##LL +#define GOOGLE_ULONGLONG(x) x##ULL +#define GOOGLE_LL_FORMAT "ll" // As in "%lld". Note that "q" is poor form also. +#endif + +static const int32 kint32max = 0x7FFFFFFF; +static const int32 kint32min = -kint32max - 1; +static const int64 kint64max = GOOGLE_LONGLONG(0x7FFFFFFFFFFFFFFF); +static const int64 kint64min = -kint64max - 1; +static const uint32 kuint32max = 0xFFFFFFFFu; +static const uint64 kuint64max = GOOGLE_ULONGLONG(0xFFFFFFFFFFFFFFFF); + +// ------------------------------------------------------------------- +// Annotations: Some parts of the code have been annotated in ways that might +// be useful to some compilers or tools, but are not supported universally. +// You can #define these annotations yourself if the default implementation +// is not right for you. + +#ifndef GOOGLE_ATTRIBUTE_ALWAYS_INLINE +#if defined(__GNUC__) && (__GNUC__ > 3 ||(__GNUC__ == 3 && __GNUC_MINOR__ >= 1)) +// For functions we want to force inline. +// Introduced in gcc 3.1. +#define GOOGLE_ATTRIBUTE_ALWAYS_INLINE __attribute__ ((always_inline)) +#else +// Other compilers will have to figure it out for themselves. +#define GOOGLE_ATTRIBUTE_ALWAYS_INLINE +#endif +#endif + +#ifndef GOOGLE_ATTRIBUTE_DEPRECATED +#ifdef __GNUC__ +// If the method/variable/type is used anywhere, produce a warning. +#define GOOGLE_ATTRIBUTE_DEPRECATED __attribute__((deprecated)) +#else +#define GOOGLE_ATTRIBUTE_DEPRECATED +#endif +#endif + +#ifndef GOOGLE_PREDICT_TRUE +#ifdef __GNUC__ +// Provided at least since GCC 3.0. +#define GOOGLE_PREDICT_TRUE(x) (__builtin_expect(!!(x), 1)) +#else +#define GOOGLE_PREDICT_TRUE +#endif +#endif + +// Delimits a block of code which may write to memory which is simultaneously +// written by other threads, but which has been determined to be thread-safe +// (e.g. because it is an idempotent write). +#ifndef GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN +#define GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN() +#endif +#ifndef GOOGLE_SAFE_CONCURRENT_WRITES_END +#define GOOGLE_SAFE_CONCURRENT_WRITES_END() +#endif + +// =================================================================== +// from google3/base/basictypes.h + +// The GOOGLE_ARRAYSIZE(arr) macro returns the # of elements in an array arr. +// The expression is a compile-time constant, and therefore can be +// used in defining new arrays, for example. +// +// GOOGLE_ARRAYSIZE catches a few type errors. If you see a compiler error +// +// "warning: division by zero in ..." +// +// when using GOOGLE_ARRAYSIZE, you are (wrongfully) giving it a pointer. +// You should only use GOOGLE_ARRAYSIZE on statically allocated arrays. +// +// The following comments are on the implementation details, and can +// be ignored by the users. +// +// ARRAYSIZE(arr) works by inspecting sizeof(arr) (the # of bytes in +// the array) and sizeof(*(arr)) (the # of bytes in one array +// element). If the former is divisible by the latter, perhaps arr is +// indeed an array, in which case the division result is the # of +// elements in the array. Otherwise, arr cannot possibly be an array, +// and we generate a compiler error to prevent the code from +// compiling. +// +// Since the size of bool is implementation-defined, we need to cast +// !(sizeof(a) & sizeof(*(a))) to size_t in order to ensure the final +// result has type size_t. +// +// This macro is not perfect as it wrongfully accepts certain +// pointers, namely where the pointer size is divisible by the pointee +// size. Since all our code has to go through a 32-bit compiler, +// where a pointer is 4 bytes, this means all pointers to a type whose +// size is 3 or greater than 4 will be (righteously) rejected. +// +// Kudos to Jorg Brown for this simple and elegant implementation. + +#undef GOOGLE_ARRAYSIZE +#define GOOGLE_ARRAYSIZE(a) \ + ((sizeof(a) / sizeof(*(a))) / \ + static_cast(!(sizeof(a) % sizeof(*(a))))) + +namespace internal { + +// Use implicit_cast as a safe version of static_cast or const_cast +// for upcasting in the type hierarchy (i.e. casting a pointer to Foo +// to a pointer to SuperclassOfFoo or casting a pointer to Foo to +// a const pointer to Foo). +// When you use implicit_cast, the compiler checks that the cast is safe. +// Such explicit implicit_casts are necessary in surprisingly many +// situations where C++ demands an exact type match instead of an +// argument type convertable to a target type. +// +// The From type can be inferred, so the preferred syntax for using +// implicit_cast is the same as for static_cast etc.: +// +// implicit_cast(expr) +// +// implicit_cast would have been part of the C++ standard library, +// but the proposal was submitted too late. It will probably make +// its way into the language in the future. +template +inline To implicit_cast(From const &f) { + return f; +} + +// When you upcast (that is, cast a pointer from type Foo to type +// SuperclassOfFoo), it's fine to use implicit_cast<>, since upcasts +// always succeed. When you downcast (that is, cast a pointer from +// type Foo to type SubclassOfFoo), static_cast<> isn't safe, because +// how do you know the pointer is really of type SubclassOfFoo? It +// could be a bare Foo, or of type DifferentSubclassOfFoo. Thus, +// when you downcast, you should use this macro. In debug mode, we +// use dynamic_cast<> to double-check the downcast is legal (we die +// if it's not). In normal mode, we do the efficient static_cast<> +// instead. Thus, it's important to test in debug mode to make sure +// the cast is legal! +// This is the only place in the code we should use dynamic_cast<>. +// In particular, you SHOULDN'T be using dynamic_cast<> in order to +// do RTTI (eg code like this: +// if (dynamic_cast(foo)) HandleASubclass1Object(foo); +// if (dynamic_cast(foo)) HandleASubclass2Object(foo); +// You should design the code some other way not to need this. + +template // use like this: down_cast(foo); +inline To down_cast(From* f) { // so we only accept pointers + // Ensures that To is a sub-type of From *. This test is here only + // for compile-time type checking, and has no overhead in an + // optimized build at run-time, as it will be optimized away + // completely. + if (false) { + implicit_cast(0); + } + +#if !defined(NDEBUG) && !defined(GOOGLE_PROTOBUF_NO_RTTI) + assert(f == NULL || dynamic_cast(f) != NULL); // RTTI: debug mode only! +#endif + return static_cast(f); +} + +} // namespace internal + +// We made these internal so that they would show up as such in the docs, +// but we don't want to stick "internal::" in front of them everywhere. +using internal::implicit_cast; +using internal::down_cast; + +// The COMPILE_ASSERT macro can be used to verify that a compile time +// expression is true. For example, you could use it to verify the +// size of a static array: +// +// COMPILE_ASSERT(ARRAYSIZE(content_type_names) == CONTENT_NUM_TYPES, +// content_type_names_incorrect_size); +// +// or to make sure a struct is smaller than a certain size: +// +// COMPILE_ASSERT(sizeof(foo) < 128, foo_too_large); +// +// The second argument to the macro is the name of the variable. If +// the expression is false, most compilers will issue a warning/error +// containing the name of the variable. + +namespace internal { + +template +struct CompileAssert { +}; + +} // namespace internal + +#undef GOOGLE_COMPILE_ASSERT +#define GOOGLE_COMPILE_ASSERT(expr, msg) \ + typedef ::google::protobuf::internal::CompileAssert<(bool(expr))> \ + msg[bool(expr) ? 1 : -1] + + +// Implementation details of COMPILE_ASSERT: +// +// - COMPILE_ASSERT works by defining an array type that has -1 +// elements (and thus is invalid) when the expression is false. +// +// - The simpler definition +// +// #define COMPILE_ASSERT(expr, msg) typedef char msg[(expr) ? 1 : -1] +// +// does not work, as gcc supports variable-length arrays whose sizes +// are determined at run-time (this is gcc's extension and not part +// of the C++ standard). As a result, gcc fails to reject the +// following code with the simple definition: +// +// int foo; +// COMPILE_ASSERT(foo, msg); // not supposed to compile as foo is +// // not a compile-time constant. +// +// - By using the type CompileAssert<(bool(expr))>, we ensures that +// expr is a compile-time constant. (Template arguments must be +// determined at compile-time.) +// +// - The outter parentheses in CompileAssert<(bool(expr))> are necessary +// to work around a bug in gcc 3.4.4 and 4.0.1. If we had written +// +// CompileAssert +// +// instead, these compilers will refuse to compile +// +// COMPILE_ASSERT(5 > 0, some_message); +// +// (They seem to think the ">" in "5 > 0" marks the end of the +// template argument list.) +// +// - The array size is (bool(expr) ? 1 : -1), instead of simply +// +// ((expr) ? 1 : -1). +// +// This is to avoid running into a bug in MS VC 7.1, which +// causes ((0.0) ? 1 : -1) to incorrectly evaluate to 1. + +// =================================================================== +// from google3/base/scoped_ptr.h + +namespace internal { + +// This is an implementation designed to match the anticipated future TR2 +// implementation of the scoped_ptr class, and its closely-related brethren, +// scoped_array, scoped_ptr_malloc, and make_scoped_ptr. + +template class scoped_ptr; +template class scoped_array; + +// A scoped_ptr is like a T*, except that the destructor of scoped_ptr +// automatically deletes the pointer it holds (if any). +// That is, scoped_ptr owns the T object that it points to. +// Like a T*, a scoped_ptr may hold either NULL or a pointer to a T object. +// +// The size of a scoped_ptr is small: +// sizeof(scoped_ptr) == sizeof(C*) +template +class scoped_ptr { + public: + + // The element type + typedef C element_type; + + // Constructor. Defaults to intializing with NULL. + // There is no way to create an uninitialized scoped_ptr. + // The input parameter must be allocated with new. + explicit scoped_ptr(C* p = NULL) : ptr_(p) { } + + // Destructor. If there is a C object, delete it. + // We don't need to test ptr_ == NULL because C++ does that for us. + ~scoped_ptr() { + enum { type_must_be_complete = sizeof(C) }; + delete ptr_; + } + + // Reset. Deletes the current owned object, if any. + // Then takes ownership of a new object, if given. + // this->reset(this->get()) works. + void reset(C* p = NULL) { + if (p != ptr_) { + enum { type_must_be_complete = sizeof(C) }; + delete ptr_; + ptr_ = p; + } + } + + // Accessors to get the owned object. + // operator* and operator-> will assert() if there is no current object. + C& operator*() const { + assert(ptr_ != NULL); + return *ptr_; + } + C* operator->() const { + assert(ptr_ != NULL); + return ptr_; + } + C* get() const { return ptr_; } + + // Comparison operators. + // These return whether two scoped_ptr refer to the same object, not just to + // two different but equal objects. + bool operator==(C* p) const { return ptr_ == p; } + bool operator!=(C* p) const { return ptr_ != p; } + + // Swap two scoped pointers. + void swap(scoped_ptr& p2) { + C* tmp = ptr_; + ptr_ = p2.ptr_; + p2.ptr_ = tmp; + } + + // Release a pointer. + // The return value is the current pointer held by this object. + // If this object holds a NULL pointer, the return value is NULL. + // After this operation, this object will hold a NULL pointer, + // and will not own the object any more. + C* release() { + C* retVal = ptr_; + ptr_ = NULL; + return retVal; + } + + private: + C* ptr_; + + // Forbid comparison of scoped_ptr types. If C2 != C, it totally doesn't + // make sense, and if C2 == C, it still doesn't make sense because you should + // never have the same object owned by two different scoped_ptrs. + template bool operator==(scoped_ptr const& p2) const; + template bool operator!=(scoped_ptr const& p2) const; + + // Disallow evil constructors + scoped_ptr(const scoped_ptr&); + void operator=(const scoped_ptr&); +}; + +// scoped_array is like scoped_ptr, except that the caller must allocate +// with new [] and the destructor deletes objects with delete []. +// +// As with scoped_ptr, a scoped_array either points to an object +// or is NULL. A scoped_array owns the object that it points to. +// +// Size: sizeof(scoped_array) == sizeof(C*) +template +class scoped_array { + public: + + // The element type + typedef C element_type; + + // Constructor. Defaults to intializing with NULL. + // There is no way to create an uninitialized scoped_array. + // The input parameter must be allocated with new []. + explicit scoped_array(C* p = NULL) : array_(p) { } + + // Destructor. If there is a C object, delete it. + // We don't need to test ptr_ == NULL because C++ does that for us. + ~scoped_array() { + enum { type_must_be_complete = sizeof(C) }; + delete[] array_; + } + + // Reset. Deletes the current owned object, if any. + // Then takes ownership of a new object, if given. + // this->reset(this->get()) works. + void reset(C* p = NULL) { + if (p != array_) { + enum { type_must_be_complete = sizeof(C) }; + delete[] array_; + array_ = p; + } + } + + // Get one element of the current object. + // Will assert() if there is no current object, or index i is negative. + C& operator[](std::ptrdiff_t i) const { + assert(i >= 0); + assert(array_ != NULL); + return array_[i]; + } + + // Get a pointer to the zeroth element of the current object. + // If there is no current object, return NULL. + C* get() const { + return array_; + } + + // Comparison operators. + // These return whether two scoped_array refer to the same object, not just to + // two different but equal objects. + bool operator==(C* p) const { return array_ == p; } + bool operator!=(C* p) const { return array_ != p; } + + // Swap two scoped arrays. + void swap(scoped_array& p2) { + C* tmp = array_; + array_ = p2.array_; + p2.array_ = tmp; + } + + // Release an array. + // The return value is the current pointer held by this object. + // If this object holds a NULL pointer, the return value is NULL. + // After this operation, this object will hold a NULL pointer, + // and will not own the object any more. + C* release() { + C* retVal = array_; + array_ = NULL; + return retVal; + } + + private: + C* array_; + + // Forbid comparison of different scoped_array types. + template bool operator==(scoped_array const& p2) const; + template bool operator!=(scoped_array const& p2) const; + + // Disallow evil constructors + scoped_array(const scoped_array&); + void operator=(const scoped_array&); +}; + +} // namespace internal + +// We made these internal so that they would show up as such in the docs, +// but we don't want to stick "internal::" in front of them everywhere. +using internal::scoped_ptr; +using internal::scoped_array; + +// =================================================================== +// emulates google3/base/logging.h + +enum LogLevel { + LOGLEVEL_INFO, // Informational. This is never actually used by + // libprotobuf. + LOGLEVEL_WARNING, // Warns about issues that, although not technically a + // problem now, could cause problems in the future. For + // example, a // warning will be printed when parsing a + // message that is near the message size limit. + LOGLEVEL_ERROR, // An error occurred which should never happen during + // normal use. + LOGLEVEL_FATAL, // An error occurred from which the library cannot + // recover. This usually indicates a programming error + // in the code which calls the library, especially when + // compiled in debug mode. + +#ifdef NDEBUG + LOGLEVEL_DFATAL = LOGLEVEL_ERROR +#else + LOGLEVEL_DFATAL = LOGLEVEL_FATAL +#endif +}; + +namespace internal { + +class LogFinisher; + +class LIBPROTOBUF_EXPORT LogMessage { + public: + LogMessage(LogLevel level, const char* filename, int line); + ~LogMessage(); + + LogMessage& operator<<(const std::string& value); + LogMessage& operator<<(const char* value); + LogMessage& operator<<(char value); + LogMessage& operator<<(int value); + LogMessage& operator<<(uint value); + LogMessage& operator<<(long value); + LogMessage& operator<<(unsigned long value); + LogMessage& operator<<(double value); + + private: + friend class LogFinisher; + void Finish(); + + LogLevel level_; + const char* filename_; + int line_; + std::string message_; +}; + +// Used to make the entire "LOG(BLAH) << etc." expression have a void return +// type and print a newline after each message. +class LIBPROTOBUF_EXPORT LogFinisher { + public: + void operator=(LogMessage& other); +}; + +} // namespace internal + +// Undef everything in case we're being mixed with some other Google library +// which already defined them itself. Presumably all Google libraries will +// support the same syntax for these so it should not be a big deal if they +// end up using our definitions instead. +#undef GOOGLE_LOG +#undef GOOGLE_LOG_IF + +#undef GOOGLE_CHECK +#undef GOOGLE_CHECK_EQ +#undef GOOGLE_CHECK_NE +#undef GOOGLE_CHECK_LT +#undef GOOGLE_CHECK_LE +#undef GOOGLE_CHECK_GT +#undef GOOGLE_CHECK_GE + +#undef GOOGLE_DLOG +#undef GOOGLE_DCHECK +#undef GOOGLE_DCHECK_EQ +#undef GOOGLE_DCHECK_NE +#undef GOOGLE_DCHECK_LT +#undef GOOGLE_DCHECK_LE +#undef GOOGLE_DCHECK_GT +#undef GOOGLE_DCHECK_GE + +#define GOOGLE_LOG(LEVEL) \ + ::google::protobuf::internal::LogFinisher() = \ + ::google::protobuf::internal::LogMessage( \ + ::google::protobuf::LOGLEVEL_##LEVEL, __FILE__, __LINE__) +#define GOOGLE_LOG_IF(LEVEL, CONDITION) \ + !(CONDITION) ? (void)0 : GOOGLE_LOG(LEVEL) + +#define GOOGLE_CHECK(EXPRESSION) \ + GOOGLE_LOG_IF(FATAL, !(EXPRESSION)) << "CHECK failed: " #EXPRESSION ": " +#define GOOGLE_CHECK_EQ(A, B) GOOGLE_CHECK((A) == (B)) +#define GOOGLE_CHECK_NE(A, B) GOOGLE_CHECK((A) != (B)) +#define GOOGLE_CHECK_LT(A, B) GOOGLE_CHECK((A) < (B)) +#define GOOGLE_CHECK_LE(A, B) GOOGLE_CHECK((A) <= (B)) +#define GOOGLE_CHECK_GT(A, B) GOOGLE_CHECK((A) > (B)) +#define GOOGLE_CHECK_GE(A, B) GOOGLE_CHECK((A) >= (B)) + +#ifdef NDEBUG + +#define GOOGLE_DLOG GOOGLE_LOG_IF(INFO, false) + +#define GOOGLE_DCHECK(EXPRESSION) while(false) GOOGLE_CHECK(EXPRESSION) +#define GOOGLE_DCHECK_EQ(A, B) GOOGLE_DCHECK((A) == (B)) +#define GOOGLE_DCHECK_NE(A, B) GOOGLE_DCHECK((A) != (B)) +#define GOOGLE_DCHECK_LT(A, B) GOOGLE_DCHECK((A) < (B)) +#define GOOGLE_DCHECK_LE(A, B) GOOGLE_DCHECK((A) <= (B)) +#define GOOGLE_DCHECK_GT(A, B) GOOGLE_DCHECK((A) > (B)) +#define GOOGLE_DCHECK_GE(A, B) GOOGLE_DCHECK((A) >= (B)) + +#else // NDEBUG + +#define GOOGLE_DLOG GOOGLE_LOG + +#define GOOGLE_DCHECK GOOGLE_CHECK +#define GOOGLE_DCHECK_EQ GOOGLE_CHECK_EQ +#define GOOGLE_DCHECK_NE GOOGLE_CHECK_NE +#define GOOGLE_DCHECK_LT GOOGLE_CHECK_LT +#define GOOGLE_DCHECK_LE GOOGLE_CHECK_LE +#define GOOGLE_DCHECK_GT GOOGLE_CHECK_GT +#define GOOGLE_DCHECK_GE GOOGLE_CHECK_GE + +#endif // !NDEBUG + +typedef void LogHandler(LogLevel level, const char* filename, int line, + const std::string& message); + +// The protobuf library sometimes writes warning and error messages to +// stderr. These messages are primarily useful for developers, but may +// also help end users figure out a problem. If you would prefer that +// these messages be sent somewhere other than stderr, call SetLogHandler() +// to set your own handler. This returns the old handler. Set the handler +// to NULL to ignore log messages (but see also LogSilencer, below). +// +// Obviously, SetLogHandler is not thread-safe. You should only call it +// at initialization time, and probably not from library code. If you +// simply want to suppress log messages temporarily (e.g. because you +// have some code that tends to trigger them frequently and you know +// the warnings are not important to you), use the LogSilencer class +// below. +LIBPROTOBUF_EXPORT LogHandler* SetLogHandler(LogHandler* new_func); + +// Create a LogSilencer if you want to temporarily suppress all log +// messages. As long as any LogSilencer objects exist, non-fatal +// log messages will be discarded (the current LogHandler will *not* +// be called). Constructing a LogSilencer is thread-safe. You may +// accidentally suppress log messages occurring in another thread, but +// since messages are generally for debugging purposes only, this isn't +// a big deal. If you want to intercept log messages, use SetLogHandler(). +class LIBPROTOBUF_EXPORT LogSilencer { + public: + LogSilencer(); + ~LogSilencer(); +}; + +// =================================================================== +// emulates google3/base/callback.h + +// Abstract interface for a callback. When calling an RPC, you must provide +// a Closure to call when the procedure completes. See the Service interface +// in service.h. +// +// To automatically construct a Closure which calls a particular function or +// method with a particular set of parameters, use the NewCallback() function. +// Example: +// void FooDone(const FooResponse* response) { +// ... +// } +// +// void CallFoo() { +// ... +// // When done, call FooDone() and pass it a pointer to the response. +// Closure* callback = NewCallback(&FooDone, response); +// // Make the call. +// service->Foo(controller, request, response, callback); +// } +// +// Example that calls a method: +// class Handler { +// public: +// ... +// +// void FooDone(const FooResponse* response) { +// ... +// } +// +// void CallFoo() { +// ... +// // When done, call FooDone() and pass it a pointer to the response. +// Closure* callback = NewCallback(this, &Handler::FooDone, response); +// // Make the call. +// service->Foo(controller, request, response, callback); +// } +// }; +// +// Currently NewCallback() supports binding zero, one, or two arguments. +// +// Callbacks created with NewCallback() automatically delete themselves when +// executed. They should be used when a callback is to be called exactly +// once (usually the case with RPC callbacks). If a callback may be called +// a different number of times (including zero), create it with +// NewPermanentCallback() instead. You are then responsible for deleting the +// callback (using the "delete" keyword as normal). +// +// Note that NewCallback() is a bit touchy regarding argument types. Generally, +// the values you provide for the parameter bindings must exactly match the +// types accepted by the callback function. For example: +// void Foo(string s); +// NewCallback(&Foo, "foo"); // WON'T WORK: const char* != string +// NewCallback(&Foo, string("foo")); // WORKS +// Also note that the arguments cannot be references: +// void Foo(const string& s); +// string my_str; +// NewCallback(&Foo, my_str); // WON'T WORK: Can't use referecnes. +// However, correctly-typed pointers will work just fine. +class LIBPROTOBUF_EXPORT Closure { + public: + Closure() {} + virtual ~Closure(); + + virtual void Run() = 0; + + private: + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(Closure); +}; + +namespace internal { + +class LIBPROTOBUF_EXPORT FunctionClosure0 : public Closure { + public: + typedef void (*FunctionType)(); + + FunctionClosure0(FunctionType function, bool self_deleting) + : function_(function), self_deleting_(self_deleting) {} + ~FunctionClosure0(); + + void Run() { + bool needs_delete = self_deleting_; // read in case callback deletes + function_(); + if (needs_delete) delete this; + } + + private: + FunctionType function_; + bool self_deleting_; +}; + +template +class MethodClosure0 : public Closure { + public: + typedef void (Class::*MethodType)(); + + MethodClosure0(Class* object, MethodType method, bool self_deleting) + : object_(object), method_(method), self_deleting_(self_deleting) {} + ~MethodClosure0() {} + + void Run() { + bool needs_delete = self_deleting_; // read in case callback deletes + (object_->*method_)(); + if (needs_delete) delete this; + } + + private: + Class* object_; + MethodType method_; + bool self_deleting_; +}; + +template +class FunctionClosure1 : public Closure { + public: + typedef void (*FunctionType)(Arg1 arg1); + + FunctionClosure1(FunctionType function, bool self_deleting, + Arg1 arg1) + : function_(function), self_deleting_(self_deleting), + arg1_(arg1) {} + ~FunctionClosure1() {} + + void Run() { + bool needs_delete = self_deleting_; // read in case callback deletes + function_(arg1_); + if (needs_delete) delete this; + } + + private: + FunctionType function_; + bool self_deleting_; + Arg1 arg1_; +}; + +template +class MethodClosure1 : public Closure { + public: + typedef void (Class::*MethodType)(Arg1 arg1); + + MethodClosure1(Class* object, MethodType method, bool self_deleting, + Arg1 arg1) + : object_(object), method_(method), self_deleting_(self_deleting), + arg1_(arg1) {} + ~MethodClosure1() {} + + void Run() { + bool needs_delete = self_deleting_; // read in case callback deletes + (object_->*method_)(arg1_); + if (needs_delete) delete this; + } + + private: + Class* object_; + MethodType method_; + bool self_deleting_; + Arg1 arg1_; +}; + +template +class FunctionClosure2 : public Closure { + public: + typedef void (*FunctionType)(Arg1 arg1, Arg2 arg2); + + FunctionClosure2(FunctionType function, bool self_deleting, + Arg1 arg1, Arg2 arg2) + : function_(function), self_deleting_(self_deleting), + arg1_(arg1), arg2_(arg2) {} + ~FunctionClosure2() {} + + void Run() { + bool needs_delete = self_deleting_; // read in case callback deletes + function_(arg1_, arg2_); + if (needs_delete) delete this; + } + + private: + FunctionType function_; + bool self_deleting_; + Arg1 arg1_; + Arg2 arg2_; +}; + +template +class MethodClosure2 : public Closure { + public: + typedef void (Class::*MethodType)(Arg1 arg1, Arg2 arg2); + + MethodClosure2(Class* object, MethodType method, bool self_deleting, + Arg1 arg1, Arg2 arg2) + : object_(object), method_(method), self_deleting_(self_deleting), + arg1_(arg1), arg2_(arg2) {} + ~MethodClosure2() {} + + void Run() { + bool needs_delete = self_deleting_; // read in case callback deletes + (object_->*method_)(arg1_, arg2_); + if (needs_delete) delete this; + } + + private: + Class* object_; + MethodType method_; + bool self_deleting_; + Arg1 arg1_; + Arg2 arg2_; +}; + +} // namespace internal + +// See Closure. +inline Closure* NewCallback(void (*function)()) { + return new internal::FunctionClosure0(function, true); +} + +// See Closure. +inline Closure* NewPermanentCallback(void (*function)()) { + return new internal::FunctionClosure0(function, false); +} + +// See Closure. +template +inline Closure* NewCallback(Class* object, void (Class::*method)()) { + return new internal::MethodClosure0(object, method, true); +} + +// See Closure. +template +inline Closure* NewPermanentCallback(Class* object, void (Class::*method)()) { + return new internal::MethodClosure0(object, method, false); +} + +// See Closure. +template +inline Closure* NewCallback(void (*function)(Arg1), + Arg1 arg1) { + return new internal::FunctionClosure1(function, true, arg1); +} + +// See Closure. +template +inline Closure* NewPermanentCallback(void (*function)(Arg1), + Arg1 arg1) { + return new internal::FunctionClosure1(function, false, arg1); +} + +// See Closure. +template +inline Closure* NewCallback(Class* object, void (Class::*method)(Arg1), + Arg1 arg1) { + return new internal::MethodClosure1(object, method, true, arg1); +} + +// See Closure. +template +inline Closure* NewPermanentCallback(Class* object, void (Class::*method)(Arg1), + Arg1 arg1) { + return new internal::MethodClosure1(object, method, false, arg1); +} + +// See Closure. +template +inline Closure* NewCallback(void (*function)(Arg1, Arg2), + Arg1 arg1, Arg2 arg2) { + return new internal::FunctionClosure2( + function, true, arg1, arg2); +} + +// See Closure. +template +inline Closure* NewPermanentCallback(void (*function)(Arg1, Arg2), + Arg1 arg1, Arg2 arg2) { + return new internal::FunctionClosure2( + function, false, arg1, arg2); +} + +// See Closure. +template +inline Closure* NewCallback(Class* object, void (Class::*method)(Arg1, Arg2), + Arg1 arg1, Arg2 arg2) { + return new internal::MethodClosure2( + object, method, true, arg1, arg2); +} + +// See Closure. +template +inline Closure* NewPermanentCallback( + Class* object, void (Class::*method)(Arg1, Arg2), + Arg1 arg1, Arg2 arg2) { + return new internal::MethodClosure2( + object, method, false, arg1, arg2); +} + +// A function which does nothing. Useful for creating no-op callbacks, e.g.: +// Closure* nothing = NewCallback(&DoNothing); +void LIBPROTOBUF_EXPORT DoNothing(); + +// =================================================================== +// emulates google3/base/mutex.h + +namespace internal { + +// A Mutex is a non-reentrant (aka non-recursive) mutex. At most one thread T +// may hold a mutex at a given time. If T attempts to Lock() the same Mutex +// while holding it, T will deadlock. +class LIBPROTOBUF_EXPORT Mutex { + public: + // Create a Mutex that is not held by anybody. + Mutex(); + + // Destructor + ~Mutex(); + + // Block if necessary until this Mutex is free, then acquire it exclusively. + void Lock(); + + // Release this Mutex. Caller must hold it exclusively. + void Unlock(); + + // Crash if this Mutex is not held exclusively by this thread. + // May fail to crash when it should; will never crash when it should not. + void AssertHeld(); + + private: + struct Internal; + Internal* mInternal; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(Mutex); +}; + +// MutexLock(mu) acquires mu when constructed and releases it when destroyed. +class LIBPROTOBUF_EXPORT MutexLock { + public: + explicit MutexLock(Mutex *mu) : mu_(mu) { this->mu_->Lock(); } + ~MutexLock() { this->mu_->Unlock(); } + private: + Mutex *const mu_; + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(MutexLock); +}; + +// TODO(kenton): Implement these? Hard to implement portably. +typedef MutexLock ReaderMutexLock; +typedef MutexLock WriterMutexLock; + +// MutexLockMaybe is like MutexLock, but is a no-op when mu is NULL. +class LIBPROTOBUF_EXPORT MutexLockMaybe { + public: + explicit MutexLockMaybe(Mutex *mu) : + mu_(mu) { if (this->mu_ != NULL) { this->mu_->Lock(); } } + ~MutexLockMaybe() { if (this->mu_ != NULL) { this->mu_->Unlock(); } } + private: + Mutex *const mu_; + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(MutexLockMaybe); +}; + +} // namespace internal + +// We made these internal so that they would show up as such in the docs, +// but we don't want to stick "internal::" in front of them everywhere. +using internal::Mutex; +using internal::MutexLock; +using internal::ReaderMutexLock; +using internal::WriterMutexLock; +using internal::MutexLockMaybe; + +// =================================================================== +// from google3/base/type_traits.h + +namespace internal { + +// Specified by TR1 [4.7.4] Pointer modifications. +template struct remove_pointer { typedef T type; }; +template struct remove_pointer { typedef T type; }; +template struct remove_pointer { typedef T type; }; +template struct remove_pointer { typedef T type; }; +template struct remove_pointer { + typedef T type; }; + +// =================================================================== + +// Checks if the buffer contains structurally-valid UTF-8. Implemented in +// structurally_valid.cc. +LIBPROTOBUF_EXPORT bool IsStructurallyValidUTF8(const char* buf, int len); + +} // namespace internal + +// =================================================================== +// Shutdown support. + +// Shut down the entire protocol buffers library, deleting all static-duration +// objects allocated by the library or by generated .pb.cc files. +// +// There are two reasons you might want to call this: +// * You use a draconian definition of "memory leak" in which you expect +// every single malloc() to have a corresponding free(), even for objects +// which live until program exit. +// * You are writing a dynamically-loaded library which needs to clean up +// after itself when the library is unloaded. +// +// It is safe to call this multiple times. However, it is not safe to use +// any other part of the protocol buffers library after +// ShutdownProtobufLibrary() has been called. +LIBPROTOBUF_EXPORT void ShutdownProtobufLibrary(); + +namespace internal { + +// Register a function to be called when ShutdownProtocolBuffers() is called. +LIBPROTOBUF_EXPORT void OnShutdown(void (*func)()); + +} // namespace internal + +#ifdef PROTOBUF_USE_EXCEPTIONS +class FatalException : public std::exception { + public: + FatalException(const char* filename, int line, const std::string& message) + : filename_(filename), line_(line), message_(message) {} + virtual ~FatalException() throw(); + + virtual const char* what() const throw(); + + const char* filename() const { return filename_; } + int line() const { return line_; } + const std::string& message() const { return message_; } + + private: + const char* filename_; + const int line_; + const std::string message_; +}; +#endif + +// This is at the end of the file instead of the beginning to work around a bug +// in some versions of MSVC. +using namespace std; // Don't do this at home, kids. + +} // namespace protobuf +} // namespace google + +#endif // GOOGLE_PROTOBUF_COMMON_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/common_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/common_unittest.cc new file mode 100644 index 0000000000..4109a52cb9 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/common_unittest.cc @@ -0,0 +1,357 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) + +#include +#include +#include +#include + +#include +#include + +#include "config.h" + +namespace google { +namespace protobuf { +namespace { + +// TODO(kenton): More tests. + +#ifdef PACKAGE_VERSION // only defined when using automake, not MSVC + +TEST(VersionTest, VersionMatchesConfig) { + // Verify that the version string specified in config.h matches the one + // in common.h. The config.h version is a string which may have a suffix + // like "beta" or "rc1", so we remove that. + string version = PACKAGE_VERSION; + int pos = 0; + while (pos < version.size() && + (ascii_isdigit(version[pos]) || version[pos] == '.')) { + ++pos; + } + version.erase(pos); + + EXPECT_EQ(version, internal::VersionString(GOOGLE_PROTOBUF_VERSION)); +} + +#endif // PACKAGE_VERSION + +TEST(CommonTest, IntMinMaxConstants) { + // kint32min was declared incorrectly in the first release of protobufs. + // Ugh. + EXPECT_LT(kint32min, kint32max); + EXPECT_EQ(static_cast(kint32min), static_cast(kint32max) + 1); + EXPECT_LT(kint64min, kint64max); + EXPECT_EQ(static_cast(kint64min), static_cast(kint64max) + 1); + EXPECT_EQ(0, kuint32max + 1); + EXPECT_EQ(0, kuint64max + 1); +} + +vector captured_messages_; + +void CaptureLog(LogLevel level, const char* filename, int line, + const string& message) { + captured_messages_.push_back( + strings::Substitute("$0 $1:$2: $3", + implicit_cast(level), filename, line, message)); +} + +TEST(LoggingTest, DefaultLogging) { + CaptureTestStderr(); + int line = __LINE__; + GOOGLE_LOG(INFO ) << "A message."; + GOOGLE_LOG(WARNING) << "A warning."; + GOOGLE_LOG(ERROR ) << "An error."; + + string text = GetCapturedTestStderr(); + EXPECT_EQ( + "libprotobuf INFO "__FILE__":" + SimpleItoa(line + 1) + "] A message.\n" + "libprotobuf WARNING "__FILE__":" + SimpleItoa(line + 2) + "] A warning.\n" + "libprotobuf ERROR "__FILE__":" + SimpleItoa(line + 3) + "] An error.\n", + text); +} + +TEST(LoggingTest, NullLogging) { + LogHandler* old_handler = SetLogHandler(NULL); + + CaptureTestStderr(); + GOOGLE_LOG(INFO ) << "A message."; + GOOGLE_LOG(WARNING) << "A warning."; + GOOGLE_LOG(ERROR ) << "An error."; + + EXPECT_TRUE(SetLogHandler(old_handler) == NULL); + + string text = GetCapturedTestStderr(); + EXPECT_EQ("", text); +} + +TEST(LoggingTest, CaptureLogging) { + captured_messages_.clear(); + + LogHandler* old_handler = SetLogHandler(&CaptureLog); + + int start_line = __LINE__; + GOOGLE_LOG(ERROR) << "An error."; + GOOGLE_LOG(WARNING) << "A warning."; + + EXPECT_TRUE(SetLogHandler(old_handler) == &CaptureLog); + + ASSERT_EQ(2, captured_messages_.size()); + EXPECT_EQ( + "2 "__FILE__":" + SimpleItoa(start_line + 1) + ": An error.", + captured_messages_[0]); + EXPECT_EQ( + "1 "__FILE__":" + SimpleItoa(start_line + 2) + ": A warning.", + captured_messages_[1]); +} + +TEST(LoggingTest, SilenceLogging) { + captured_messages_.clear(); + + LogHandler* old_handler = SetLogHandler(&CaptureLog); + + int line1 = __LINE__; GOOGLE_LOG(INFO) << "Visible1"; + LogSilencer* silencer1 = new LogSilencer; + GOOGLE_LOG(INFO) << "Not visible."; + LogSilencer* silencer2 = new LogSilencer; + GOOGLE_LOG(INFO) << "Not visible."; + delete silencer1; + GOOGLE_LOG(INFO) << "Not visible."; + delete silencer2; + int line2 = __LINE__; GOOGLE_LOG(INFO) << "Visible2"; + + EXPECT_TRUE(SetLogHandler(old_handler) == &CaptureLog); + + ASSERT_EQ(2, captured_messages_.size()); + EXPECT_EQ( + "0 "__FILE__":" + SimpleItoa(line1) + ": Visible1", + captured_messages_[0]); + EXPECT_EQ( + "0 "__FILE__":" + SimpleItoa(line2) + ": Visible2", + captured_messages_[1]); +} + +class ClosureTest : public testing::Test { + public: + void SetA123Method() { a_ = 123; } + static void SetA123Function() { current_instance_->a_ = 123; } + + void SetAMethod(int a) { a_ = a; } + void SetCMethod(string c) { c_ = c; } + + static void SetAFunction(int a) { current_instance_->a_ = a; } + static void SetCFunction(string c) { current_instance_->c_ = c; } + + void SetABMethod(int a, const char* b) { a_ = a; b_ = b; } + static void SetABFunction(int a, const char* b) { + current_instance_->a_ = a; + current_instance_->b_ = b; + } + + virtual void SetUp() { + current_instance_ = this; + a_ = 0; + b_ = NULL; + c_.clear(); + permanent_closure_ = NULL; + } + + void DeleteClosureInCallback() { + delete permanent_closure_; + } + + int a_; + const char* b_; + string c_; + Closure* permanent_closure_; + + static ClosureTest* current_instance_; +}; + +ClosureTest* ClosureTest::current_instance_ = NULL; + +TEST_F(ClosureTest, TestClosureFunction0) { + Closure* closure = NewCallback(&SetA123Function); + EXPECT_NE(123, a_); + closure->Run(); + EXPECT_EQ(123, a_); +} + +TEST_F(ClosureTest, TestClosureMethod0) { + Closure* closure = NewCallback(current_instance_, + &ClosureTest::SetA123Method); + EXPECT_NE(123, a_); + closure->Run(); + EXPECT_EQ(123, a_); +} + +TEST_F(ClosureTest, TestClosureFunction1) { + Closure* closure = NewCallback(&SetAFunction, 456); + EXPECT_NE(456, a_); + closure->Run(); + EXPECT_EQ(456, a_); +} + +TEST_F(ClosureTest, TestClosureMethod1) { + Closure* closure = NewCallback(current_instance_, + &ClosureTest::SetAMethod, 456); + EXPECT_NE(456, a_); + closure->Run(); + EXPECT_EQ(456, a_); +} + +TEST_F(ClosureTest, TestClosureFunction1String) { + Closure* closure = NewCallback(&SetCFunction, string("test")); + EXPECT_NE("test", c_); + closure->Run(); + EXPECT_EQ("test", c_); +} + +TEST_F(ClosureTest, TestClosureMethod1String) { + Closure* closure = NewCallback(current_instance_, + &ClosureTest::SetCMethod, string("test")); + EXPECT_NE("test", c_); + closure->Run(); + EXPECT_EQ("test", c_); +} + +TEST_F(ClosureTest, TestClosureFunction2) { + const char* cstr = "hello"; + Closure* closure = NewCallback(&SetABFunction, 789, cstr); + EXPECT_NE(789, a_); + EXPECT_NE(cstr, b_); + closure->Run(); + EXPECT_EQ(789, a_); + EXPECT_EQ(cstr, b_); +} + +TEST_F(ClosureTest, TestClosureMethod2) { + const char* cstr = "hello"; + Closure* closure = NewCallback(current_instance_, + &ClosureTest::SetABMethod, 789, cstr); + EXPECT_NE(789, a_); + EXPECT_NE(cstr, b_); + closure->Run(); + EXPECT_EQ(789, a_); + EXPECT_EQ(cstr, b_); +} + +// Repeat all of the above with NewPermanentCallback() + +TEST_F(ClosureTest, TestPermanentClosureFunction0) { + Closure* closure = NewPermanentCallback(&SetA123Function); + EXPECT_NE(123, a_); + closure->Run(); + EXPECT_EQ(123, a_); + a_ = 0; + closure->Run(); + EXPECT_EQ(123, a_); + delete closure; +} + +TEST_F(ClosureTest, TestPermanentClosureMethod0) { + Closure* closure = NewPermanentCallback(current_instance_, + &ClosureTest::SetA123Method); + EXPECT_NE(123, a_); + closure->Run(); + EXPECT_EQ(123, a_); + a_ = 0; + closure->Run(); + EXPECT_EQ(123, a_); + delete closure; +} + +TEST_F(ClosureTest, TestPermanentClosureFunction1) { + Closure* closure = NewPermanentCallback(&SetAFunction, 456); + EXPECT_NE(456, a_); + closure->Run(); + EXPECT_EQ(456, a_); + a_ = 0; + closure->Run(); + EXPECT_EQ(456, a_); + delete closure; +} + +TEST_F(ClosureTest, TestPermanentClosureMethod1) { + Closure* closure = NewPermanentCallback(current_instance_, + &ClosureTest::SetAMethod, 456); + EXPECT_NE(456, a_); + closure->Run(); + EXPECT_EQ(456, a_); + a_ = 0; + closure->Run(); + EXPECT_EQ(456, a_); + delete closure; +} + +TEST_F(ClosureTest, TestPermanentClosureFunction2) { + const char* cstr = "hello"; + Closure* closure = NewPermanentCallback(&SetABFunction, 789, cstr); + EXPECT_NE(789, a_); + EXPECT_NE(cstr, b_); + closure->Run(); + EXPECT_EQ(789, a_); + EXPECT_EQ(cstr, b_); + a_ = 0; + b_ = NULL; + closure->Run(); + EXPECT_EQ(789, a_); + EXPECT_EQ(cstr, b_); + delete closure; +} + +TEST_F(ClosureTest, TestPermanentClosureMethod2) { + const char* cstr = "hello"; + Closure* closure = NewPermanentCallback(current_instance_, + &ClosureTest::SetABMethod, 789, cstr); + EXPECT_NE(789, a_); + EXPECT_NE(cstr, b_); + closure->Run(); + EXPECT_EQ(789, a_); + EXPECT_EQ(cstr, b_); + a_ = 0; + b_ = NULL; + closure->Run(); + EXPECT_EQ(789, a_); + EXPECT_EQ(cstr, b_); + delete closure; +} + +TEST_F(ClosureTest, TestPermanentClosureDeleteInCallback) { + permanent_closure_ = NewPermanentCallback((ClosureTest*) this, + &ClosureTest::DeleteClosureInCallback); + permanent_closure_->Run(); +} + +} // anonymous namespace +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/hash.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/hash.h new file mode 100644 index 0000000000..822d605013 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/hash.h @@ -0,0 +1,220 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// +// Deals with the fact that hash_map is not defined everywhere. + +#ifndef GOOGLE_PROTOBUF_STUBS_HASH_H__ +#define GOOGLE_PROTOBUF_STUBS_HASH_H__ + +#include +#include +#include "config.h" + +#if defined(HAVE_HASH_MAP) && defined(HAVE_HASH_SET) +#include HASH_MAP_H +#include HASH_SET_H +#else +#define MISSING_HASH +#include +#include +#endif + +namespace google { +namespace protobuf { + +#ifdef MISSING_HASH + +// This system doesn't have hash_map or hash_set. Emulate them using map and +// set. + +// Make hash be the same as less. Note that everywhere where custom +// hash functions are defined in the protobuf code, they are also defined such +// that they can be used as "less" functions, which is required by MSVC anyway. +template +struct hash { + // Dummy, just to make derivative hash functions compile. + int operator()(const Key& key) { + GOOGLE_LOG(FATAL) << "Should never be called."; + return 0; + } + + inline bool operator()(const Key& a, const Key& b) const { + return a < b; + } +}; + +// Make sure char* is compared by value. +template <> +struct hash { + // Dummy, just to make derivative hash functions compile. + int operator()(const char* key) { + GOOGLE_LOG(FATAL) << "Should never be called."; + return 0; + } + + inline bool operator()(const char* a, const char* b) const { + return strcmp(a, b) < 0; + } +}; + +template , + typename EqualKey = int > +class hash_map : public std::map { +}; + +template , + typename EqualKey = int > +class hash_set : public std::set { +}; + +#elif defined(_MSC_VER) && !defined(_STLPORT_VERSION) + +template +struct hash : public HASH_NAMESPACE::hash_compare { +}; + +// MSVC's hash_compare hashes based on the string contents but +// compares based on the string pointer. WTF? +class CstringLess { + public: + inline bool operator()(const char* a, const char* b) const { + return strcmp(a, b) < 0; + } +}; + +template <> +struct hash + : public HASH_NAMESPACE::hash_compare { +}; + +template , + typename EqualKey = int > +class hash_map : public HASH_NAMESPACE::hash_map< + Key, Data, HashFcn> { +}; + +template , + typename EqualKey = int > +class hash_set : public HASH_NAMESPACE::hash_set< + Key, HashFcn> { +}; + +#else + +template +struct hash : public HASH_NAMESPACE::hash { +}; + +template +struct hash { + inline size_t operator()(const Key* key) const { + return reinterpret_cast(key); + } +}; + +// Unlike the old SGI version, the TR1 "hash" does not special-case char*. So, +// we go ahead and provide our own implementation. +template <> +struct hash { + inline size_t operator()(const char* str) const { + size_t result = 0; + for (; *str != '\0'; str++) { + result = 5 * result + *str; + } + return result; + } +}; + +template , + typename EqualKey = std::equal_to > +class hash_map : public HASH_NAMESPACE::HASH_MAP_CLASS< + Key, Data, HashFcn, EqualKey> { +}; + +template , + typename EqualKey = std::equal_to > +class hash_set : public HASH_NAMESPACE::HASH_SET_CLASS< + Key, HashFcn, EqualKey> { +}; + +#endif + +template <> +struct hash { + inline size_t operator()(const string& key) const { + return hash()(key.c_str()); + } + + static const size_t bucket_size = 4; + static const size_t min_buckets = 8; + inline size_t operator()(const string& a, const string& b) const { + return a < b; + } +}; + +template +struct hash > { + inline size_t operator()(const pair& key) const { + size_t first_hash = hash()(key.first); + size_t second_hash = hash()(key.second); + + // FIXME(kenton): What is the best way to compute this hash? I have + // no idea! This seems a bit better than an XOR. + return first_hash * ((1 << 16) - 1) + second_hash; + } + + static const size_t bucket_size = 4; + static const size_t min_buckets = 8; + inline size_t operator()(const pair& a, + const pair& b) const { + return a < b; + } +}; + +// Used by GCC/SGI STL only. (Why isn't this provided by the standard +// library? :( ) +struct streq { + inline bool operator()(const char* a, const char* b) const { + return strcmp(a, b) == 0; + } +}; + +} // namespace protobuf +} // namespace google + +#endif // GOOGLE_PROTOBUF_STUBS_HASH_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/map-util.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/map-util.h new file mode 100644 index 0000000000..f5c9d6b6d9 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/map-util.h @@ -0,0 +1,119 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// from google3/util/gtl/map-util.h +// Author: Anton Carver + +#ifndef GOOGLE_PROTOBUF_STUBS_MAP_UTIL_H__ +#define GOOGLE_PROTOBUF_STUBS_MAP_UTIL_H__ + +#include + +namespace google { +namespace protobuf { + +// Perform a lookup in a map or hash_map. +// If the key is present in the map then the value associated with that +// key is returned, otherwise the value passed as a default is returned. +template +const typename Collection::value_type::second_type& +FindWithDefault(const Collection& collection, + const typename Collection::value_type::first_type& key, + const typename Collection::value_type::second_type& value) { + typename Collection::const_iterator it = collection.find(key); + if (it == collection.end()) { + return value; + } + return it->second; +} + +// Perform a lookup in a map or hash_map. +// If the key is present a const pointer to the associated value is returned, +// otherwise a NULL pointer is returned. +template +const typename Collection::value_type::second_type* +FindOrNull(const Collection& collection, + const typename Collection::value_type::first_type& key) { + typename Collection::const_iterator it = collection.find(key); + if (it == collection.end()) { + return 0; + } + return &it->second; +} + +// Perform a lookup in a map or hash_map whose values are pointers. +// If the key is present a const pointer to the associated value is returned, +// otherwise a NULL pointer is returned. +// This function does not distinguish between a missing key and a key mapped +// to a NULL value. +template +const typename Collection::value_type::second_type +FindPtrOrNull(const Collection& collection, + const typename Collection::value_type::first_type& key) { + typename Collection::const_iterator it = collection.find(key); + if (it == collection.end()) { + return 0; + } + return it->second; +} + +// Change the value associated with a particular key in a map or hash_map. +// If the key is not present in the map the key and value are inserted, +// otherwise the value is updated to be a copy of the value provided. +// True indicates that an insert took place, false indicates an update. +template +bool InsertOrUpdate(Collection * const collection, + const Key& key, const Value& value) { + pair ret = + collection->insert(typename Collection::value_type(key, value)); + if (!ret.second) { + // update + ret.first->second = value; + return false; + } + return true; +} + +// Insert a new key and value into a map or hash_map. +// If the key is not present in the map the key and value are +// inserted, otherwise nothing happens. True indicates that an insert +// took place, false indicates the key was already present. +template +bool InsertIfNotPresent(Collection * const collection, + const Key& key, const Value& value) { + pair ret = + collection->insert(typename Collection::value_type(key, value)); + return ret.second; +} + +} // namespace protobuf +} // namespace google + +#endif // GOOGLE_PROTOBUF_STUBS_MAP_UTIL_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/once.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/once.cc new file mode 100644 index 0000000000..5b7af9ce99 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/once.cc @@ -0,0 +1,88 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// +// emulates google3/base/once.h +// +// This header is intended to be included only by internal .cc files and +// generated .pb.cc files. Users should not use this directly. + +#ifdef _WIN32 +#include +#endif + +#include + +namespace google { +namespace protobuf { + +#ifdef _WIN32 + +struct ProtobufOnceInternal { + ProtobufOnceInternal() { + InitializeCriticalSection(&critical_section); + } + ~ProtobufOnceInternal() { + DeleteCriticalSection(&critical_section); + } + CRITICAL_SECTION critical_section; +}; + +ProtobufOnceType::~ProtobufOnceType() +{ + delete internal_; + internal_ = NULL; +} + +ProtobufOnceType::ProtobufOnceType() { + // internal_ may be non-NULL if Init() was already called. + if (internal_ == NULL) internal_ = new ProtobufOnceInternal; +} + +void ProtobufOnceType::Init(void (*init_func)()) { + // internal_ may be NULL if we're still in dynamic initialization and the + // constructor has not been called yet. As mentioned in once.h, we assume + // that the program is still single-threaded at this time, and therefore it + // should be safe to initialize internal_ like so. + if (internal_ == NULL) internal_ = new ProtobufOnceInternal; + + EnterCriticalSection(&internal_->critical_section); + if (!initialized_) { + init_func(); + initialized_ = true; + } + LeaveCriticalSection(&internal_->critical_section); +} + +#endif + +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/once.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/once.h new file mode 100644 index 0000000000..0dee407662 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/once.h @@ -0,0 +1,123 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// +// emulates google3/base/once.h +// +// This header is intended to be included only by internal .cc files and +// generated .pb.cc files. Users should not use this directly. +// +// This is basically a portable version of pthread_once(). +// +// This header declares three things: +// * A type called ProtobufOnceType. +// * A macro GOOGLE_PROTOBUF_DECLARE_ONCE() which declares a variable of type +// ProtobufOnceType. This is the only legal way to declare such a variable. +// The macro may only be used at the global scope (you cannot create local +// or class member variables of this type). +// * A function GogoleOnceInit(ProtobufOnceType* once, void (*init_func)()). +// This function, when invoked multiple times given the same ProtobufOnceType +// object, will invoke init_func on the first call only, and will make sure +// none of the calls return before that first call to init_func has finished. +// +// This implements a way to perform lazy initialization. It's more efficient +// than using mutexes as no lock is needed if initialization has already +// happened. +// +// Example usage: +// void Init(); +// GOOGLE_PROTOBUF_DECLARE_ONCE(once_init); +// +// // Calls Init() exactly once. +// void InitOnce() { +// GoogleOnceInit(&once_init, &Init); +// } +// +// Note that if GoogleOnceInit() is called before main() has begun, it must +// only be called by the thread that will eventually call main() -- that is, +// the thread that performs dynamic initialization. In general this is a safe +// assumption since people don't usually construct threads before main() starts, +// but it is technically not guaranteed. Unfortunately, Win32 provides no way +// whatsoever to statically-initialize its synchronization primitives, so our +// only choice is to assume that dynamic initialization is single-threaded. + +#ifndef GOOGLE_PROTOBUF_STUBS_ONCE_H__ +#define GOOGLE_PROTOBUF_STUBS_ONCE_H__ + +#include + +#ifndef _WIN32 +#include +#endif + +namespace google { +namespace protobuf { + +#ifdef _WIN32 + +struct ProtobufOnceInternal; + +struct LIBPROTOBUF_EXPORT ProtobufOnceType { + ProtobufOnceType(); + ~ProtobufOnceType(); + void Init(void (*init_func)()); + + volatile bool initialized_; + ProtobufOnceInternal* internal_; +}; + +#define GOOGLE_PROTOBUF_DECLARE_ONCE(NAME) \ + ::google::protobuf::ProtobufOnceType NAME + +inline void GoogleOnceInit(ProtobufOnceType* once, void (*init_func)()) { + // Note: Double-checked locking is safe on x86. + if (!once->initialized_) { + once->Init(init_func); + } +} + +#else + +typedef pthread_once_t ProtobufOnceType; + +#define GOOGLE_PROTOBUF_DECLARE_ONCE(NAME) \ + pthread_once_t NAME = PTHREAD_ONCE_INIT + +inline void GoogleOnceInit(ProtobufOnceType* once, void (*init_func)()) { + pthread_once(once, init_func); +} + +#endif + +} // namespace protobuf +} // namespace google + +#endif // GOOGLE_PROTOBUF_STUBS_ONCE_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/once_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/once_unittest.cc new file mode 100644 index 0000000000..b8f86a0fc8 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/once_unittest.cc @@ -0,0 +1,253 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) + +#ifdef _WIN32 +#include +#else +#include +#include +#endif + +#include +#include +#include + +namespace google { +namespace protobuf { +namespace { + +class OnceInitTest : public testing::Test { + protected: + void SetUp() { + state_ = INIT_NOT_STARTED; + current_test_ = this; + } + + // Since ProtobufOnceType is only allowed to be allocated in static storage, + // each test must use a different pair of ProtobufOnceType objects which it + // must declare itself. + void SetOnces(ProtobufOnceType* once, ProtobufOnceType* recursive_once) { + once_ = once; + recursive_once_ = recursive_once; + } + + void InitOnce() { + GoogleOnceInit(once_, &InitStatic); + } + void InitRecursiveOnce() { + GoogleOnceInit(recursive_once_, &InitRecursiveStatic); + } + + void BlockInit() { init_blocker_.Lock(); } + void UnblockInit() { init_blocker_.Unlock(); } + + class TestThread { + public: + TestThread(Closure* callback) + : done_(false), joined_(false), callback_(callback) { +#ifdef _WIN32 + thread_ = CreateThread(NULL, 0, &Start, this, 0, NULL); +#else + pthread_create(&thread_, NULL, &Start, this); +#endif + } + ~TestThread() { + if (!joined_) Join(); + } + + bool IsDone() { + MutexLock lock(&done_mutex_); + return done_; + } + void Join() { + joined_ = true; +#ifdef _WIN32 + WaitForSingleObject(thread_, INFINITE); + CloseHandle(thread_); +#else + pthread_join(thread_, NULL); +#endif + } + + private: +#ifdef _WIN32 + HANDLE thread_; +#else + pthread_t thread_; +#endif + + Mutex done_mutex_; + bool done_; + bool joined_; + Closure* callback_; + +#ifdef _WIN32 + static DWORD WINAPI Start(LPVOID arg) { +#else + static void* Start(void* arg) { +#endif + reinterpret_cast(arg)->Run(); + return 0; + } + + void Run() { + callback_->Run(); + MutexLock lock(&done_mutex_); + done_ = true; + } + }; + + TestThread* RunInitOnceInNewThread() { + return new TestThread(NewCallback(this, &OnceInitTest::InitOnce)); + } + TestThread* RunInitRecursiveOnceInNewThread() { + return new TestThread(NewCallback(this, &OnceInitTest::InitRecursiveOnce)); + } + + enum State { + INIT_NOT_STARTED, + INIT_STARTED, + INIT_DONE + }; + State CurrentState() { + MutexLock lock(&mutex_); + return state_; + } + + void WaitABit() { +#ifdef _WIN32 + Sleep(1000); +#else + sleep(1); +#endif + } + + private: + Mutex mutex_; + Mutex init_blocker_; + State state_; + ProtobufOnceType* once_; + ProtobufOnceType* recursive_once_; + + void Init() { + MutexLock lock(&mutex_); + EXPECT_EQ(INIT_NOT_STARTED, state_); + state_ = INIT_STARTED; + mutex_.Unlock(); + init_blocker_.Lock(); + init_blocker_.Unlock(); + mutex_.Lock(); + state_ = INIT_DONE; + } + + static OnceInitTest* current_test_; + static void InitStatic() { current_test_->Init(); } + static void InitRecursiveStatic() { current_test_->InitOnce(); } +}; + +OnceInitTest* OnceInitTest::current_test_ = NULL; + +GOOGLE_PROTOBUF_DECLARE_ONCE(simple_once); + +TEST_F(OnceInitTest, Simple) { + SetOnces(&simple_once, NULL); + + EXPECT_EQ(INIT_NOT_STARTED, CurrentState()); + InitOnce(); + EXPECT_EQ(INIT_DONE, CurrentState()); + + // Calling again has no effect. + InitOnce(); + EXPECT_EQ(INIT_DONE, CurrentState()); +} + +GOOGLE_PROTOBUF_DECLARE_ONCE(recursive_once1); +GOOGLE_PROTOBUF_DECLARE_ONCE(recursive_once2); + +TEST_F(OnceInitTest, Recursive) { + SetOnces(&recursive_once1, &recursive_once2); + + EXPECT_EQ(INIT_NOT_STARTED, CurrentState()); + InitRecursiveOnce(); + EXPECT_EQ(INIT_DONE, CurrentState()); +} + +GOOGLE_PROTOBUF_DECLARE_ONCE(multiple_threads_once); + +TEST_F(OnceInitTest, MultipleThreads) { + SetOnces(&multiple_threads_once, NULL); + + scoped_ptr threads[4]; + EXPECT_EQ(INIT_NOT_STARTED, CurrentState()); + for (int i = 0; i < 4; i++) { + threads[i].reset(RunInitOnceInNewThread()); + } + for (int i = 0; i < 4; i++) { + threads[i]->Join(); + } + EXPECT_EQ(INIT_DONE, CurrentState()); +} + +GOOGLE_PROTOBUF_DECLARE_ONCE(multiple_threads_blocked_once1); +GOOGLE_PROTOBUF_DECLARE_ONCE(multiple_threads_blocked_once2); + +TEST_F(OnceInitTest, MultipleThreadsBlocked) { + SetOnces(&multiple_threads_blocked_once1, &multiple_threads_blocked_once2); + + scoped_ptr threads[8]; + EXPECT_EQ(INIT_NOT_STARTED, CurrentState()); + + BlockInit(); + for (int i = 0; i < 4; i++) { + threads[i].reset(RunInitOnceInNewThread()); + } + for (int i = 4; i < 8; i++) { + threads[i].reset(RunInitRecursiveOnceInNewThread()); + } + + WaitABit(); + + // We should now have one thread blocked inside Init(), four blocked waiting + // for Init() to complete, and three blocked waiting for InitRecursive() to + // complete. + EXPECT_EQ(INIT_STARTED, CurrentState()); + UnblockInit(); + + for (int i = 0; i < 8; i++) { + threads[i]->Join(); + } + EXPECT_EQ(INIT_DONE, CurrentState()); +} + +} // anonymous namespace +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/stl_util-inl.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/stl_util-inl.h new file mode 100644 index 0000000000..a2e671bb74 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/stl_util-inl.h @@ -0,0 +1,121 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// from google3/util/gtl/stl_util-inl.h + +#ifndef GOOGLE_PROTOBUF_STUBS_STL_UTIL_INL_H__ +#define GOOGLE_PROTOBUF_STUBS_STL_UTIL_INL_H__ + +#include + +namespace google { +namespace protobuf { + +// STLDeleteContainerPointers() +// For a range within a container of pointers, calls delete +// (non-array version) on these pointers. +// NOTE: for these three functions, we could just implement a DeleteObject +// functor and then call for_each() on the range and functor, but this +// requires us to pull in all of algorithm.h, which seems expensive. +// For hash_[multi]set, it is important that this deletes behind the iterator +// because the hash_set may call the hash function on the iterator when it is +// advanced, which could result in the hash function trying to deference a +// stale pointer. +template +void STLDeleteContainerPointers(ForwardIterator begin, + ForwardIterator end) { + while (begin != end) { + ForwardIterator temp = begin; + ++begin; + delete *temp; + } +} + +// Inside Google, this function implements a horrible, disgusting hack in which +// we reach into the string's private implementation and resize it without +// initializing the new bytes. In some cases doing this can significantly +// improve performance. However, since it's totally non-portable it has no +// place in open source code. Feel free to fill this function in with your +// own disgusting hack if you want the perf boost. +inline void STLStringResizeUninitialized(string* s, size_t new_size) { + s->resize(new_size); +} + +// Return a mutable char* pointing to a string's internal buffer, +// which may not be null-terminated. Writing through this pointer will +// modify the string. +// +// string_as_array(&str)[i] is valid for 0 <= i < str.size() until the +// next call to a string method that invalidates iterators. +// +// As of 2006-04, there is no standard-blessed way of getting a +// mutable reference to a string's internal buffer. However, issue 530 +// (http://www.open-std.org/JTC1/SC22/WG21/docs/lwg-active.html#530) +// proposes this as the method. According to Matt Austern, this should +// already work on all current implementations. +inline char* string_as_array(string* str) { + // DO NOT USE const_cast(str->data())! See the unittest for why. + return str->empty() ? NULL : &*str->begin(); +} + +// STLDeleteElements() deletes all the elements in an STL container and clears +// the container. This function is suitable for use with a vector, set, +// hash_set, or any other STL container which defines sensible begin(), end(), +// and clear() methods. +// +// If container is NULL, this function is a no-op. +// +// As an alternative to calling STLDeleteElements() directly, consider +// ElementDeleter (defined below), which ensures that your container's elements +// are deleted when the ElementDeleter goes out of scope. +template +void STLDeleteElements(T *container) { + if (!container) return; + STLDeleteContainerPointers(container->begin(), container->end()); + container->clear(); +} + +// Given an STL container consisting of (key, value) pairs, STLDeleteValues +// deletes all the "value" components and clears the container. Does nothing +// in the case it's given a NULL pointer. + +template +void STLDeleteValues(T *v) { + if (!v) return; + for (typename T::iterator i = v->begin(); i != v->end(); ++i) { + delete i->second; + } + v->clear(); +} + +} // namespace protobuf +} // namespace google + +#endif // GOOGLE_PROTOBUF_STUBS_STL_UTIL_INL_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/structurally_valid.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/structurally_valid.cc new file mode 100644 index 0000000000..0f6afe6dc8 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/structurally_valid.cc @@ -0,0 +1,536 @@ +// Copyright 2005-2008 Google Inc. All Rights Reserved. +// Author: jrm@google.com (Jim Meehan) + +#include + +namespace google { +namespace protobuf { +namespace internal { + +// These four-byte entries compactly encode how many bytes 0..255 to delete +// in making a string replacement, how many bytes to add 0..255, and the offset +// 0..64k-1 of the replacement string in remap_string. +struct RemapEntry { + uint8 delete_bytes; + uint8 add_bytes; + uint16 bytes_offset; +}; + +// Exit type codes for state tables. All but the first get stuffed into +// signed one-byte entries. The first is only generated by executable code. +// To distinguish from next-state entries, these must be contiguous and +// all <= kExitNone +typedef enum { + kExitDstSpaceFull = 239, + kExitIllegalStructure, // 240 + kExitOK, // 241 + kExitReject, // ... + kExitReplace1, + kExitReplace2, + kExitReplace3, + kExitReplace21, + kExitReplace31, + kExitReplace32, + kExitReplaceOffset1, + kExitReplaceOffset2, + kExitReplace1S0, + kExitSpecial, + kExitDoAgain, + kExitRejectAlt, + kExitNone // 255 +} ExitReason; + + +// This struct represents one entire state table. The three initialized byte +// areas are state_table, remap_base, and remap_string. state0 and state0_size +// give the byte offset and length within state_table of the initial state -- +// table lookups are expected to start and end in this state, but for +// truncated UTF-8 strings, may end in a different state. These allow a quick +// test for that condition. entry_shift is 8 for tables subscripted by a full +// byte value and 6 for space-optimized tables subscripted by only six +// significant bits in UTF-8 continuation bytes. +typedef struct { + const uint32 state0; + const uint32 state0_size; + const uint32 total_size; + const int max_expand; + const int entry_shift; + const int bytes_per_entry; + const uint32 losub; + const uint32 hiadd; + const uint8* state_table; + const RemapEntry* remap_base; + const uint8* remap_string; + const uint8* fast_state; +} UTF8StateMachineObj; + +typedef UTF8StateMachineObj UTF8ScanObj; + +#define X__ (kExitIllegalStructure) +#define RJ_ (kExitReject) +#define S1_ (kExitReplace1) +#define S2_ (kExitReplace2) +#define S3_ (kExitReplace3) +#define S21 (kExitReplace21) +#define S31 (kExitReplace31) +#define S32 (kExitReplace32) +#define T1_ (kExitReplaceOffset1) +#define T2_ (kExitReplaceOffset2) +#define S11 (kExitReplace1S0) +#define SP_ (kExitSpecial) +#define D__ (kExitDoAgain) +#define RJA (kExitRejectAlt) + +// Entire table has 9 state blocks of 256 entries each +static const unsigned int utf8acceptnonsurrogates_STATE0 = 0; // state[0] +static const unsigned int utf8acceptnonsurrogates_STATE0_SIZE = 256; // =[1] +static const unsigned int utf8acceptnonsurrogates_TOTAL_SIZE = 2304; +static const unsigned int utf8acceptnonsurrogates_MAX_EXPAND_X4 = 0; +static const unsigned int utf8acceptnonsurrogates_SHIFT = 8; +static const unsigned int utf8acceptnonsurrogates_BYTES = 1; +static const unsigned int utf8acceptnonsurrogates_LOSUB = 0x20202020; +static const unsigned int utf8acceptnonsurrogates_HIADD = 0x00000000; + +static const uint8 utf8acceptnonsurrogates[] = { +// state[0] 0x000000 Byte 1 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, + +X__, X__, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 7, 3, 3, + 4, 5, 5, 5, 6, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, + +// state[1] 0x000080 Byte 2 of 2 +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, + +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, + + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, + +// state[2] 0x000000 Byte 2 of 3 +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, + +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, + +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, + +// state[3] 0x001000 Byte 2 of 3 +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, + +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, + + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, + +// state[4] 0x000000 Byte 2 of 4 +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, + +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, + +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, + +// state[5] 0x040000 Byte 2 of 4 +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, + +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, + + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, + +// state[6] 0x100000 Byte 2 of 4 +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, + +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, + + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, + +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, + +// state[7] 0x00d000 Byte 2 of 3 +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, + +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, + + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, + 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, + +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, + +// state[8] 0x00d800 Byte 3 of 3 +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, + +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, + +RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, +RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, +RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, +RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, RJ_, + +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, X__, +}; + +// Remap base[0] = (del, add, string_offset) +static const RemapEntry utf8acceptnonsurrogates_remap_base[] = { +{0, 0, 0} }; + +// Remap string[0] +static const unsigned char utf8acceptnonsurrogates_remap_string[] = { +0 }; + +static const unsigned char utf8acceptnonsurrogates_fast[256] = { +0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, +0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, +0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, +0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + +0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, +0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, +0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, +0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + +1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, +1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, +1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, +1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + +1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, +1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, +1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, +1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, +}; + +static const UTF8ScanObj utf8acceptnonsurrogates_obj = { + utf8acceptnonsurrogates_STATE0, + utf8acceptnonsurrogates_STATE0_SIZE, + utf8acceptnonsurrogates_TOTAL_SIZE, + utf8acceptnonsurrogates_MAX_EXPAND_X4, + utf8acceptnonsurrogates_SHIFT, + utf8acceptnonsurrogates_BYTES, + utf8acceptnonsurrogates_LOSUB, + utf8acceptnonsurrogates_HIADD, + utf8acceptnonsurrogates, + utf8acceptnonsurrogates_remap_base, + utf8acceptnonsurrogates_remap_string, + utf8acceptnonsurrogates_fast +}; + + +#undef X__ +#undef RJ_ +#undef S1_ +#undef S2_ +#undef S3_ +#undef S21 +#undef S31 +#undef S32 +#undef T1_ +#undef T2_ +#undef S11 +#undef SP_ +#undef D__ +#undef RJA + +// Return true if current Tbl pointer is within state0 range +// Note that unsigned compare checks both ends of range simultaneously +static inline bool InStateZero(const UTF8ScanObj* st, const uint8* Tbl) { + const uint8* Tbl0 = &st->state_table[st->state0]; + return (static_cast(Tbl - Tbl0) < st->state0_size); +} + +// Scan a UTF-8 string based on state table. +// Always scan complete UTF-8 characters +// Set number of bytes scanned. Return reason for exiting +int UTF8GenericScan(const UTF8ScanObj* st, + const char * str, + int str_length, + int* bytes_consumed) { + *bytes_consumed = 0; + if (str_length == 0) return kExitOK; + + int eshift = st->entry_shift; + const uint8* isrc = reinterpret_cast(str); + const uint8* src = isrc; + const uint8* srclimit = isrc + str_length; + const uint8* srclimit8 = srclimit - 7; + const uint8* Tbl_0 = &st->state_table[st->state0]; + + DoAgain: + // Do state-table scan + int e = 0; + uint8 c; + const uint8* Tbl2 = &st->fast_state[0]; + const uint32 losub = st->losub; + const uint32 hiadd = st->hiadd; + // Check initial few bytes one at a time until 8-byte aligned + //---------------------------- + while ((((uintptr_t)src & 0x07) != 0) && + (src < srclimit) && + Tbl2[src[0]] == 0) { + src++; + } + if (((uintptr_t)src & 0x07) == 0) { + // Do fast for groups of 8 identity bytes. + // This covers a lot of 7-bit ASCII ~8x faster then the 1-byte loop, + // including slowing slightly on cr/lf/ht + //---------------------------- + while (src < srclimit8) { + uint32 s0123 = (reinterpret_cast(src))[0]; + uint32 s4567 = (reinterpret_cast(src))[1]; + src += 8; + // This is a fast range check for all bytes in [lowsub..0x80-hiadd) + uint32 temp = (s0123 - losub) | (s0123 + hiadd) | + (s4567 - losub) | (s4567 + hiadd); + if ((temp & 0x80808080) != 0) { + // We typically end up here on cr/lf/ht; src was incremented + int e0123 = (Tbl2[src[-8]] | Tbl2[src[-7]]) | + (Tbl2[src[-6]] | Tbl2[src[-5]]); + if (e0123 != 0) { + src -= 8; + break; + } // Exit on Non-interchange + e0123 = (Tbl2[src[-4]] | Tbl2[src[-3]]) | + (Tbl2[src[-2]] | Tbl2[src[-1]]); + if (e0123 != 0) { + src -= 4; + break; + } // Exit on Non-interchange + // Else OK, go around again + } + } + } + //---------------------------- + + // Byte-at-a-time scan + //---------------------------- + const uint8* Tbl = Tbl_0; + while (src < srclimit) { + c = *src; + e = Tbl[c]; + src++; + if (e >= kExitIllegalStructure) {break;} + Tbl = &Tbl_0[e << eshift]; + } + //---------------------------- + + + // Exit posibilities: + // Some exit code, !state0, back up over last char + // Some exit code, state0, back up one byte exactly + // source consumed, !state0, back up over partial char + // source consumed, state0, exit OK + // For illegal byte in state0, avoid backup up over PREVIOUS char + // For truncated last char, back up to beginning of it + + if (e >= kExitIllegalStructure) { + // Back up over exactly one byte of rejected/illegal UTF-8 character + src--; + // Back up more if needed + if (!InStateZero(st, Tbl)) { + do { + src--; + } while ((src > isrc) && ((src[0] & 0xc0) == 0x80)); + } + } else if (!InStateZero(st, Tbl)) { + // Back up over truncated UTF-8 character + e = kExitIllegalStructure; + do { + src--; + } while ((src > isrc) && ((src[0] & 0xc0) == 0x80)); + } else { + // Normal termination, source fully consumed + e = kExitOK; + } + + if (e == kExitDoAgain) { + // Loop back up to the fast scan + goto DoAgain; + } + + *bytes_consumed = src - isrc; + return e; +} + +int UTF8GenericScanFastAscii(const UTF8ScanObj* st, + const char * str, + int str_length, + int* bytes_consumed) { + *bytes_consumed = 0; + if (str_length == 0) return kExitOK; + + const uint8* isrc = reinterpret_cast(str); + const uint8* src = isrc; + const uint8* srclimit = isrc + str_length; + const uint8* srclimit8 = srclimit - 7; + int n; + int rest_consumed; + int exit_reason; + do { + // Check initial few bytes one at a time until 8-byte aligned + while ((((uintptr_t)src & 0x07) != 0) && + (src < srclimit) && (src[0] < 0x80)) { + src++; + } + if (((uintptr_t)src & 0x07) == 0) { + while ((src < srclimit8) && + (((reinterpret_cast(src)[0] | + reinterpret_cast(src)[1]) & 0x80808080) == 0)) { + src += 8; + } + } + while ((src < srclimit) && (src[0] < 0x80)) { + src++; + } + // Run state table on the rest + n = src - isrc; + exit_reason = UTF8GenericScan(st, str + n, str_length - n, &rest_consumed); + src += rest_consumed; + } while ( exit_reason == kExitDoAgain ); + + *bytes_consumed = src - isrc; + return exit_reason; +} + +// Hack: On some compilers the static tables are initialized at startup. +// We can't use them until they are initialized. However, some Protocol +// Buffer parsing happens at static init time and may try to validate +// UTF-8 strings. Since UTF-8 validation is only used for debugging +// anyway, we simply always return success if initialization hasn't +// occurred yet. +namespace { + +bool module_initialized_ = false; + +struct InitDetector { + InitDetector() { + module_initialized_ = true; + } +}; +InitDetector init_detector; + +} // namespace + +bool IsStructurallyValidUTF8(const char* buf, int len) { + if (!module_initialized_) return true; + + int bytes_consumed = 0; + UTF8GenericScanFastAscii(&utf8acceptnonsurrogates_obj, + buf, len, &bytes_consumed); + return (bytes_consumed == len); +} + +} // namespace internal +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/structurally_valid_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/structurally_valid_unittest.cc new file mode 100644 index 0000000000..90888885ad --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/structurally_valid_unittest.cc @@ -0,0 +1,40 @@ +// Copyright 2008 Google Inc. All Rights Reserved. +// Author: xpeng@google.com (Peter Peng) + +#include +#include + +namespace google { +namespace protobuf { +namespace internal { +namespace { + +TEST(StructurallyValidTest, ValidUTF8String) { + // On GCC, this string can be written as: + // "abcd 1234 - \u2014\u2013\u2212" + // MSVC seems to interpret \u differently. + string valid_str("abcd 1234 - \342\200\224\342\200\223\342\210\222 - xyz789"); + EXPECT_TRUE(IsStructurallyValidUTF8(valid_str.data(), + valid_str.size())); + // Additional check for pointer alignment + for (int i = 1; i < 8; ++i) { + EXPECT_TRUE(IsStructurallyValidUTF8(valid_str.data() + i, + valid_str.size() - i)); + } +} + +TEST(StructurallyValidTest, InvalidUTF8String) { + const string invalid_str("abcd\xA0\xB0\xA0\xB0\xA0\xB0 - xyz789"); + EXPECT_FALSE(IsStructurallyValidUTF8(invalid_str.data(), + invalid_str.size())); + // Additional check for pointer alignment + for (int i = 1; i < 8; ++i) { + EXPECT_FALSE(IsStructurallyValidUTF8(invalid_str.data() + i, + invalid_str.size() - i)); + } +} + +} // namespace +} // namespace internal +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/strutil.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/strutil.cc new file mode 100644 index 0000000000..ee07ce7590 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/strutil.cc @@ -0,0 +1,1173 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// from google3/strings/strutil.cc + +#include +#include +#include // FLT_DIG and DBL_DIG +#include +#include +#include +#include + +#ifdef _WIN32 +// MSVC has only _snprintf, not snprintf. +// +// MinGW has both snprintf and _snprintf, but they appear to be different +// functions. The former is buggy. When invoked like so: +// char buffer[32]; +// snprintf(buffer, 32, "%.*g\n", FLT_DIG, 1.23e10f); +// it prints "1.23000e+10". This is plainly wrong: %g should never print +// trailing zeros after the decimal point. For some reason this bug only +// occurs with some input values, not all. In any case, _snprintf does the +// right thing, so we use it. +#define snprintf _snprintf +#endif + +namespace google { +namespace protobuf { + +inline bool IsNaN(double value) { + // NaN is never equal to anything, even itself. + return value != value; +} + +// These are defined as macros on some platforms. #undef them so that we can +// redefine them. +#undef isxdigit +#undef isprint + +// The definitions of these in ctype.h change based on locale. Since our +// string manipulation is all in relation to the protocol buffer and C++ +// languages, we always want to use the C locale. So, we re-define these +// exactly as we want them. +inline bool isxdigit(char c) { + return ('0' <= c && c <= '9') || + ('a' <= c && c <= 'f') || + ('A' <= c && c <= 'F'); +} + +inline bool isprint(char c) { + return c >= 0x20 && c <= 0x7E; +} + +// ---------------------------------------------------------------------- +// StripString +// Replaces any occurrence of the character 'remove' (or the characters +// in 'remove') with the character 'replacewith'. +// ---------------------------------------------------------------------- +void StripString(string* s, const char* remove, char replacewith) { + const char * str_start = s->c_str(); + const char * str = str_start; + for (str = strpbrk(str, remove); + str != NULL; + str = strpbrk(str + 1, remove)) { + (*s)[str - str_start] = replacewith; + } +} + +// ---------------------------------------------------------------------- +// StringReplace() +// Replace the "old" pattern with the "new" pattern in a string, +// and append the result to "res". If replace_all is false, +// it only replaces the first instance of "old." +// ---------------------------------------------------------------------- + +void StringReplace(const string& s, const string& oldsub, + const string& newsub, bool replace_all, + string* res) { + if (oldsub.empty()) { + res->append(s); // if empty, append the given string. + return; + } + + string::size_type start_pos = 0; + string::size_type pos; + do { + pos = s.find(oldsub, start_pos); + if (pos == string::npos) { + break; + } + res->append(s, start_pos, pos - start_pos); + res->append(newsub); + start_pos = pos + oldsub.size(); // start searching again after the "old" + } while (replace_all); + res->append(s, start_pos, s.length() - start_pos); +} + +// ---------------------------------------------------------------------- +// StringReplace() +// Give me a string and two patterns "old" and "new", and I replace +// the first instance of "old" in the string with "new", if it +// exists. If "global" is true; call this repeatedly until it +// fails. RETURN a new string, regardless of whether the replacement +// happened or not. +// ---------------------------------------------------------------------- + +string StringReplace(const string& s, const string& oldsub, + const string& newsub, bool replace_all) { + string ret; + StringReplace(s, oldsub, newsub, replace_all, &ret); + return ret; +} + +// ---------------------------------------------------------------------- +// SplitStringUsing() +// Split a string using a character delimiter. Append the components +// to 'result'. +// +// Note: For multi-character delimiters, this routine will split on *ANY* of +// the characters in the string, not the entire string as a single delimiter. +// ---------------------------------------------------------------------- +template +static inline +void SplitStringToIteratorUsing(const string& full, + const char* delim, + ITR& result) { + // Optimize the common case where delim is a single character. + if (delim[0] != '\0' && delim[1] == '\0') { + char c = delim[0]; + const char* p = full.data(); + const char* end = p + full.size(); + while (p != end) { + if (*p == c) { + ++p; + } else { + const char* start = p; + while (++p != end && *p != c); + *result++ = string(start, p - start); + } + } + return; + } + + string::size_type begin_index, end_index; + begin_index = full.find_first_not_of(delim); + while (begin_index != string::npos) { + end_index = full.find_first_of(delim, begin_index); + if (end_index == string::npos) { + *result++ = full.substr(begin_index); + return; + } + *result++ = full.substr(begin_index, (end_index - begin_index)); + begin_index = full.find_first_not_of(delim, end_index); + } +} + +void SplitStringUsing(const string& full, + const char* delim, + vector* result) { + back_insert_iterator< vector > it(*result); + SplitStringToIteratorUsing(full, delim, it); +} + +// ---------------------------------------------------------------------- +// JoinStrings() +// This merges a vector of string components with delim inserted +// as separaters between components. +// +// ---------------------------------------------------------------------- +template +static void JoinStringsIterator(const ITERATOR& start, + const ITERATOR& end, + const char* delim, + string* result) { + GOOGLE_CHECK(result != NULL); + result->clear(); + int delim_length = strlen(delim); + + // Precompute resulting length so we can reserve() memory in one shot. + int length = 0; + for (ITERATOR iter = start; iter != end; ++iter) { + if (iter != start) { + length += delim_length; + } + length += iter->size(); + } + result->reserve(length); + + // Now combine everything. + for (ITERATOR iter = start; iter != end; ++iter) { + if (iter != start) { + result->append(delim, delim_length); + } + result->append(iter->data(), iter->size()); + } +} + +void JoinStrings(const vector& components, + const char* delim, + string * result) { + JoinStringsIterator(components.begin(), components.end(), delim, result); +} + +// ---------------------------------------------------------------------- +// UnescapeCEscapeSequences() +// This does all the unescaping that C does: \ooo, \r, \n, etc +// Returns length of resulting string. +// The implementation of \x parses any positive number of hex digits, +// but it is an error if the value requires more than 8 bits, and the +// result is truncated to 8 bits. +// +// The second call stores its errors in a supplied string vector. +// If the string vector pointer is NULL, it reports the errors with LOG(). +// ---------------------------------------------------------------------- + +#define IS_OCTAL_DIGIT(c) (((c) >= '0') && ((c) <= '7')) + +inline int hex_digit_to_int(char c) { + /* Assume ASCII. */ + assert('0' == 0x30 && 'A' == 0x41 && 'a' == 0x61); + assert(isxdigit(c)); + int x = static_cast(c); + if (x > '9') { + x += 9; + } + return x & 0xf; +} + +// Protocol buffers doesn't ever care about errors, but I don't want to remove +// the code. +#define LOG_STRING(LEVEL, VECTOR) GOOGLE_LOG_IF(LEVEL, false) + +int UnescapeCEscapeSequences(const char* source, char* dest) { + return UnescapeCEscapeSequences(source, dest, NULL); +} + +int UnescapeCEscapeSequences(const char* source, char* dest, + vector *errors) { + GOOGLE_DCHECK(errors == NULL) << "Error reporting not implemented."; + + char* d = dest; + const char* p = source; + + // Small optimization for case where source = dest and there's no escaping + while ( p == d && *p != '\0' && *p != '\\' ) + p++, d++; + + while (*p != '\0') { + if (*p != '\\') { + *d++ = *p++; + } else { + switch ( *++p ) { // skip past the '\\' + case '\0': + LOG_STRING(ERROR, errors) << "String cannot end with \\"; + *d = '\0'; + return d - dest; // we're done with p + case 'a': *d++ = '\a'; break; + case 'b': *d++ = '\b'; break; + case 'f': *d++ = '\f'; break; + case 'n': *d++ = '\n'; break; + case 'r': *d++ = '\r'; break; + case 't': *d++ = '\t'; break; + case 'v': *d++ = '\v'; break; + case '\\': *d++ = '\\'; break; + case '?': *d++ = '\?'; break; // \? Who knew? + case '\'': *d++ = '\''; break; + case '"': *d++ = '\"'; break; + case '0': case '1': case '2': case '3': // octal digit: 1 to 3 digits + case '4': case '5': case '6': case '7': { + char ch = *p - '0'; + if ( IS_OCTAL_DIGIT(p[1]) ) + ch = ch * 8 + *++p - '0'; + if ( IS_OCTAL_DIGIT(p[1]) ) // safe (and easy) to do this twice + ch = ch * 8 + *++p - '0'; // now points at last digit + *d++ = ch; + break; + } + case 'x': case 'X': { + if (!isxdigit(p[1])) { + if (p[1] == '\0') { + LOG_STRING(ERROR, errors) << "String cannot end with \\x"; + } else { + LOG_STRING(ERROR, errors) << + "\\x cannot be followed by non-hex digit: \\" << *p << p[1]; + } + break; + } + unsigned int ch = 0; + const char *hex_start = p; + while (isxdigit(p[1])) // arbitrarily many hex digits + ch = (ch << 4) + hex_digit_to_int(*++p); + if (ch > 0xFF) + LOG_STRING(ERROR, errors) << "Value of " << + "\\" << string(hex_start, p+1-hex_start) << " exceeds 8 bits"; + *d++ = ch; + break; + } +#if 0 // TODO(kenton): Support \u and \U? Requires runetochar(). + case 'u': { + // \uhhhh => convert 4 hex digits to UTF-8 + char32 rune = 0; + const char *hex_start = p; + for (int i = 0; i < 4; ++i) { + if (isxdigit(p[1])) { // Look one char ahead. + rune = (rune << 4) + hex_digit_to_int(*++p); // Advance p. + } else { + LOG_STRING(ERROR, errors) + << "\\u must be followed by 4 hex digits: \\" + << string(hex_start, p+1-hex_start); + break; + } + } + d += runetochar(d, &rune); + break; + } + case 'U': { + // \Uhhhhhhhh => convert 8 hex digits to UTF-8 + char32 rune = 0; + const char *hex_start = p; + for (int i = 0; i < 8; ++i) { + if (isxdigit(p[1])) { // Look one char ahead. + // Don't change rune until we're sure this + // is within the Unicode limit, but do advance p. + char32 newrune = (rune << 4) + hex_digit_to_int(*++p); + if (newrune > 0x10FFFF) { + LOG_STRING(ERROR, errors) + << "Value of \\" + << string(hex_start, p + 1 - hex_start) + << " exceeds Unicode limit (0x10FFFF)"; + break; + } else { + rune = newrune; + } + } else { + LOG_STRING(ERROR, errors) + << "\\U must be followed by 8 hex digits: \\" + << string(hex_start, p+1-hex_start); + break; + } + } + d += runetochar(d, &rune); + break; + } +#endif + default: + LOG_STRING(ERROR, errors) << "Unknown escape sequence: \\" << *p; + } + p++; // read past letter we escaped + } + } + *d = '\0'; + return d - dest; +} + +// ---------------------------------------------------------------------- +// UnescapeCEscapeString() +// This does the same thing as UnescapeCEscapeSequences, but creates +// a new string. The caller does not need to worry about allocating +// a dest buffer. This should be used for non performance critical +// tasks such as printing debug messages. It is safe for src and dest +// to be the same. +// +// The second call stores its errors in a supplied string vector. +// If the string vector pointer is NULL, it reports the errors with LOG(). +// +// In the first and second calls, the length of dest is returned. In the +// the third call, the new string is returned. +// ---------------------------------------------------------------------- +int UnescapeCEscapeString(const string& src, string* dest) { + return UnescapeCEscapeString(src, dest, NULL); +} + +int UnescapeCEscapeString(const string& src, string* dest, + vector *errors) { + scoped_array unescaped(new char[src.size() + 1]); + int len = UnescapeCEscapeSequences(src.c_str(), unescaped.get(), errors); + GOOGLE_CHECK(dest); + dest->assign(unescaped.get(), len); + return len; +} + +string UnescapeCEscapeString(const string& src) { + scoped_array unescaped(new char[src.size() + 1]); + int len = UnescapeCEscapeSequences(src.c_str(), unescaped.get(), NULL); + return string(unescaped.get(), len); +} + +// ---------------------------------------------------------------------- +// CEscapeString() +// CHexEscapeString() +// Copies 'src' to 'dest', escaping dangerous characters using +// C-style escape sequences. This is very useful for preparing query +// flags. 'src' and 'dest' should not overlap. The 'Hex' version uses +// hexadecimal rather than octal sequences. +// Returns the number of bytes written to 'dest' (not including the \0) +// or -1 if there was insufficient space. +// +// Currently only \n, \r, \t, ", ', \ and !isprint() chars are escaped. +// ---------------------------------------------------------------------- +int CEscapeInternal(const char* src, int src_len, char* dest, + int dest_len, bool use_hex, bool utf8_safe) { + const char* src_end = src + src_len; + int used = 0; + bool last_hex_escape = false; // true if last output char was \xNN + + for (; src < src_end; src++) { + if (dest_len - used < 2) // Need space for two letter escape + return -1; + + bool is_hex_escape = false; + switch (*src) { + case '\n': dest[used++] = '\\'; dest[used++] = 'n'; break; + case '\r': dest[used++] = '\\'; dest[used++] = 'r'; break; + case '\t': dest[used++] = '\\'; dest[used++] = 't'; break; + case '\"': dest[used++] = '\\'; dest[used++] = '\"'; break; + case '\'': dest[used++] = '\\'; dest[used++] = '\''; break; + case '\\': dest[used++] = '\\'; dest[used++] = '\\'; break; + default: + // Note that if we emit \xNN and the src character after that is a hex + // digit then that digit must be escaped too to prevent it being + // interpreted as part of the character code by C. + if ((!utf8_safe || static_cast(*src) < 0x80) && + (!isprint(*src) || + (last_hex_escape && isxdigit(*src)))) { + if (dest_len - used < 4) // need space for 4 letter escape + return -1; + sprintf(dest + used, (use_hex ? "\\x%02x" : "\\%03o"), + static_cast(*src)); + is_hex_escape = use_hex; + used += 4; + } else { + dest[used++] = *src; break; + } + } + last_hex_escape = is_hex_escape; + } + + if (dest_len - used < 1) // make sure that there is room for \0 + return -1; + + dest[used] = '\0'; // doesn't count towards return value though + return used; +} + +int CEscapeString(const char* src, int src_len, char* dest, int dest_len) { + return CEscapeInternal(src, src_len, dest, dest_len, false, false); +} + +// ---------------------------------------------------------------------- +// CEscape() +// CHexEscape() +// Copies 'src' to result, escaping dangerous characters using +// C-style escape sequences. This is very useful for preparing query +// flags. 'src' and 'dest' should not overlap. The 'Hex' version +// hexadecimal rather than octal sequences. +// +// Currently only \n, \r, \t, ", ', \ and !isprint() chars are escaped. +// ---------------------------------------------------------------------- +string CEscape(const string& src) { + const int dest_length = src.size() * 4 + 1; // Maximum possible expansion + scoped_array dest(new char[dest_length]); + const int len = CEscapeInternal(src.data(), src.size(), + dest.get(), dest_length, false, false); + GOOGLE_DCHECK_GE(len, 0); + return string(dest.get(), len); +} + +namespace strings { + +string Utf8SafeCEscape(const string& src) { + const int dest_length = src.size() * 4 + 1; // Maximum possible expansion + scoped_array dest(new char[dest_length]); + const int len = CEscapeInternal(src.data(), src.size(), + dest.get(), dest_length, false, true); + GOOGLE_DCHECK_GE(len, 0); + return string(dest.get(), len); +} + +string CHexEscape(const string& src) { + const int dest_length = src.size() * 4 + 1; // Maximum possible expansion + scoped_array dest(new char[dest_length]); + const int len = CEscapeInternal(src.data(), src.size(), + dest.get(), dest_length, true, false); + GOOGLE_DCHECK_GE(len, 0); + return string(dest.get(), len); +} + +} // namespace strings + +// ---------------------------------------------------------------------- +// strto32_adaptor() +// strtou32_adaptor() +// Implementation of strto[u]l replacements that have identical +// overflow and underflow characteristics for both ILP-32 and LP-64 +// platforms, including errno preservation in error-free calls. +// ---------------------------------------------------------------------- + +int32 strto32_adaptor(const char *nptr, char **endptr, int base) { + const int saved_errno = errno; + errno = 0; + const long result = strtol(nptr, endptr, base); + if (errno == ERANGE && result == LONG_MIN) { + return kint32min; + } else if (errno == ERANGE && result == LONG_MAX) { + return kint32max; + } else if (errno == 0 && result < kint32min) { + errno = ERANGE; + return kint32min; + } else if (errno == 0 && result > kint32max) { + errno = ERANGE; + return kint32max; + } + if (errno == 0) + errno = saved_errno; + return static_cast(result); +} + +uint32 strtou32_adaptor(const char *nptr, char **endptr, int base) { + const int saved_errno = errno; + errno = 0; + const unsigned long result = strtoul(nptr, endptr, base); + if (errno == ERANGE && result == ULONG_MAX) { + return kuint32max; + } else if (errno == 0 && result > kuint32max) { + errno = ERANGE; + return kuint32max; + } + if (errno == 0) + errno = saved_errno; + return static_cast(result); +} + +// ---------------------------------------------------------------------- +// FastIntToBuffer() +// FastInt64ToBuffer() +// FastHexToBuffer() +// FastHex64ToBuffer() +// FastHex32ToBuffer() +// ---------------------------------------------------------------------- + +// Offset into buffer where FastInt64ToBuffer places the end of string +// null character. Also used by FastInt64ToBufferLeft. +static const int kFastInt64ToBufferOffset = 21; + +char *FastInt64ToBuffer(int64 i, char* buffer) { + // We could collapse the positive and negative sections, but that + // would be slightly slower for positive numbers... + // 22 bytes is enough to store -2**64, -18446744073709551616. + char* p = buffer + kFastInt64ToBufferOffset; + *p-- = '\0'; + if (i >= 0) { + do { + *p-- = '0' + i % 10; + i /= 10; + } while (i > 0); + return p + 1; + } else { + // On different platforms, % and / have different behaviors for + // negative numbers, so we need to jump through hoops to make sure + // we don't divide negative numbers. + if (i > -10) { + i = -i; + *p-- = '0' + i; + *p = '-'; + return p; + } else { + // Make sure we aren't at MIN_INT, in which case we can't say i = -i + i = i + 10; + i = -i; + *p-- = '0' + i % 10; + // Undo what we did a moment ago + i = i / 10 + 1; + do { + *p-- = '0' + i % 10; + i /= 10; + } while (i > 0); + *p = '-'; + return p; + } + } +} + +// Offset into buffer where FastInt32ToBuffer places the end of string +// null character. Also used by FastInt32ToBufferLeft +static const int kFastInt32ToBufferOffset = 11; + +// Yes, this is a duplicate of FastInt64ToBuffer. But, we need this for the +// compiler to generate 32 bit arithmetic instructions. It's much faster, at +// least with 32 bit binaries. +char *FastInt32ToBuffer(int32 i, char* buffer) { + // We could collapse the positive and negative sections, but that + // would be slightly slower for positive numbers... + // 12 bytes is enough to store -2**32, -4294967296. + char* p = buffer + kFastInt32ToBufferOffset; + *p-- = '\0'; + if (i >= 0) { + do { + *p-- = '0' + i % 10; + i /= 10; + } while (i > 0); + return p + 1; + } else { + // On different platforms, % and / have different behaviors for + // negative numbers, so we need to jump through hoops to make sure + // we don't divide negative numbers. + if (i > -10) { + i = -i; + *p-- = '0' + i; + *p = '-'; + return p; + } else { + // Make sure we aren't at MIN_INT, in which case we can't say i = -i + i = i + 10; + i = -i; + *p-- = '0' + i % 10; + // Undo what we did a moment ago + i = i / 10 + 1; + do { + *p-- = '0' + i % 10; + i /= 10; + } while (i > 0); + *p = '-'; + return p; + } + } +} + +char *FastHexToBuffer(int i, char* buffer) { + GOOGLE_CHECK(i >= 0) << "FastHexToBuffer() wants non-negative integers, not " << i; + + static const char *hexdigits = "0123456789abcdef"; + char *p = buffer + 21; + *p-- = '\0'; + do { + *p-- = hexdigits[i & 15]; // mod by 16 + i >>= 4; // divide by 16 + } while (i > 0); + return p + 1; +} + +char *InternalFastHexToBuffer(uint64 value, char* buffer, int num_byte) { + static const char *hexdigits = "0123456789abcdef"; + buffer[num_byte] = '\0'; + for (int i = num_byte - 1; i >= 0; i--) { +#ifdef _M_X64 + // MSVC x64 platform has a bug optimizing the uint32(value) in the #else + // block. Given that the uint32 cast was to improve performance on 32-bit + // platforms, we use 64-bit '&' directly. + buffer[i] = hexdigits[value & 0xf]; +#else + buffer[i] = hexdigits[uint32(value) & 0xf]; +#endif + value >>= 4; + } + return buffer; +} + +char *FastHex64ToBuffer(uint64 value, char* buffer) { + return InternalFastHexToBuffer(value, buffer, 16); +} + +char *FastHex32ToBuffer(uint32 value, char* buffer) { + return InternalFastHexToBuffer(value, buffer, 8); +} + +static inline char* PlaceNum(char* p, int num, char prev_sep) { + *p-- = '0' + num % 10; + *p-- = '0' + num / 10; + *p-- = prev_sep; + return p; +} + +// ---------------------------------------------------------------------- +// FastInt32ToBufferLeft() +// FastUInt32ToBufferLeft() +// FastInt64ToBufferLeft() +// FastUInt64ToBufferLeft() +// +// Like the Fast*ToBuffer() functions above, these are intended for speed. +// Unlike the Fast*ToBuffer() functions, however, these functions write +// their output to the beginning of the buffer (hence the name, as the +// output is left-aligned). The caller is responsible for ensuring that +// the buffer has enough space to hold the output. +// +// Returns a pointer to the end of the string (i.e. the null character +// terminating the string). +// ---------------------------------------------------------------------- + +static const char two_ASCII_digits[100][2] = { + {'0','0'}, {'0','1'}, {'0','2'}, {'0','3'}, {'0','4'}, + {'0','5'}, {'0','6'}, {'0','7'}, {'0','8'}, {'0','9'}, + {'1','0'}, {'1','1'}, {'1','2'}, {'1','3'}, {'1','4'}, + {'1','5'}, {'1','6'}, {'1','7'}, {'1','8'}, {'1','9'}, + {'2','0'}, {'2','1'}, {'2','2'}, {'2','3'}, {'2','4'}, + {'2','5'}, {'2','6'}, {'2','7'}, {'2','8'}, {'2','9'}, + {'3','0'}, {'3','1'}, {'3','2'}, {'3','3'}, {'3','4'}, + {'3','5'}, {'3','6'}, {'3','7'}, {'3','8'}, {'3','9'}, + {'4','0'}, {'4','1'}, {'4','2'}, {'4','3'}, {'4','4'}, + {'4','5'}, {'4','6'}, {'4','7'}, {'4','8'}, {'4','9'}, + {'5','0'}, {'5','1'}, {'5','2'}, {'5','3'}, {'5','4'}, + {'5','5'}, {'5','6'}, {'5','7'}, {'5','8'}, {'5','9'}, + {'6','0'}, {'6','1'}, {'6','2'}, {'6','3'}, {'6','4'}, + {'6','5'}, {'6','6'}, {'6','7'}, {'6','8'}, {'6','9'}, + {'7','0'}, {'7','1'}, {'7','2'}, {'7','3'}, {'7','4'}, + {'7','5'}, {'7','6'}, {'7','7'}, {'7','8'}, {'7','9'}, + {'8','0'}, {'8','1'}, {'8','2'}, {'8','3'}, {'8','4'}, + {'8','5'}, {'8','6'}, {'8','7'}, {'8','8'}, {'8','9'}, + {'9','0'}, {'9','1'}, {'9','2'}, {'9','3'}, {'9','4'}, + {'9','5'}, {'9','6'}, {'9','7'}, {'9','8'}, {'9','9'} +}; + +char* FastUInt32ToBufferLeft(uint32 u, char* buffer) { + int digits; + const char *ASCII_digits = NULL; + // The idea of this implementation is to trim the number of divides to as few + // as possible by using multiplication and subtraction rather than mod (%), + // and by outputting two digits at a time rather than one. + // The huge-number case is first, in the hopes that the compiler will output + // that case in one branch-free block of code, and only output conditional + // branches into it from below. + if (u >= 1000000000) { // >= 1,000,000,000 + digits = u / 100000000; // 100,000,000 + ASCII_digits = two_ASCII_digits[digits]; + buffer[0] = ASCII_digits[0]; + buffer[1] = ASCII_digits[1]; + buffer += 2; +sublt100_000_000: + u -= digits * 100000000; // 100,000,000 +lt100_000_000: + digits = u / 1000000; // 1,000,000 + ASCII_digits = two_ASCII_digits[digits]; + buffer[0] = ASCII_digits[0]; + buffer[1] = ASCII_digits[1]; + buffer += 2; +sublt1_000_000: + u -= digits * 1000000; // 1,000,000 +lt1_000_000: + digits = u / 10000; // 10,000 + ASCII_digits = two_ASCII_digits[digits]; + buffer[0] = ASCII_digits[0]; + buffer[1] = ASCII_digits[1]; + buffer += 2; +sublt10_000: + u -= digits * 10000; // 10,000 +lt10_000: + digits = u / 100; + ASCII_digits = two_ASCII_digits[digits]; + buffer[0] = ASCII_digits[0]; + buffer[1] = ASCII_digits[1]; + buffer += 2; +sublt100: + u -= digits * 100; +lt100: + digits = u; + ASCII_digits = two_ASCII_digits[digits]; + buffer[0] = ASCII_digits[0]; + buffer[1] = ASCII_digits[1]; + buffer += 2; +done: + *buffer = 0; + return buffer; + } + + if (u < 100) { + digits = u; + if (u >= 10) goto lt100; + *buffer++ = '0' + digits; + goto done; + } + if (u < 10000) { // 10,000 + if (u >= 1000) goto lt10_000; + digits = u / 100; + *buffer++ = '0' + digits; + goto sublt100; + } + if (u < 1000000) { // 1,000,000 + if (u >= 100000) goto lt1_000_000; + digits = u / 10000; // 10,000 + *buffer++ = '0' + digits; + goto sublt10_000; + } + if (u < 100000000) { // 100,000,000 + if (u >= 10000000) goto lt100_000_000; + digits = u / 1000000; // 1,000,000 + *buffer++ = '0' + digits; + goto sublt1_000_000; + } + // we already know that u < 1,000,000,000 + digits = u / 100000000; // 100,000,000 + *buffer++ = '0' + digits; + goto sublt100_000_000; +} + +char* FastInt32ToBufferLeft(int32 i, char* buffer) { + uint32 u = i; + if (i < 0) { + *buffer++ = '-'; + u = -i; + } + return FastUInt32ToBufferLeft(u, buffer); +} + +char* FastUInt64ToBufferLeft(uint64 u64, char* buffer) { + int digits; + const char *ASCII_digits = NULL; + + uint32 u = static_cast(u64); + if (u == u64) return FastUInt32ToBufferLeft(u, buffer); + + uint64 top_11_digits = u64 / 1000000000; + buffer = FastUInt64ToBufferLeft(top_11_digits, buffer); + u = u64 - (top_11_digits * 1000000000); + + digits = u / 10000000; // 10,000,000 + GOOGLE_DCHECK_LT(digits, 100); + ASCII_digits = two_ASCII_digits[digits]; + buffer[0] = ASCII_digits[0]; + buffer[1] = ASCII_digits[1]; + buffer += 2; + u -= digits * 10000000; // 10,000,000 + digits = u / 100000; // 100,000 + ASCII_digits = two_ASCII_digits[digits]; + buffer[0] = ASCII_digits[0]; + buffer[1] = ASCII_digits[1]; + buffer += 2; + u -= digits * 100000; // 100,000 + digits = u / 1000; // 1,000 + ASCII_digits = two_ASCII_digits[digits]; + buffer[0] = ASCII_digits[0]; + buffer[1] = ASCII_digits[1]; + buffer += 2; + u -= digits * 1000; // 1,000 + digits = u / 10; + ASCII_digits = two_ASCII_digits[digits]; + buffer[0] = ASCII_digits[0]; + buffer[1] = ASCII_digits[1]; + buffer += 2; + u -= digits * 10; + digits = u; + *buffer++ = '0' + digits; + *buffer = 0; + return buffer; +} + +char* FastInt64ToBufferLeft(int64 i, char* buffer) { + uint64 u = i; + if (i < 0) { + *buffer++ = '-'; + u = -i; + } + return FastUInt64ToBufferLeft(u, buffer); +} + +// ---------------------------------------------------------------------- +// SimpleItoa() +// Description: converts an integer to a string. +// +// Return value: string +// ---------------------------------------------------------------------- + +string SimpleItoa(int i) { + char buffer[kFastToBufferSize]; + return (sizeof(i) == 4) ? + FastInt32ToBuffer(i, buffer) : + FastInt64ToBuffer(i, buffer); +} + +string SimpleItoa(unsigned int i) { + char buffer[kFastToBufferSize]; + return string(buffer, (sizeof(i) == 4) ? + FastUInt32ToBufferLeft(i, buffer) : + FastUInt64ToBufferLeft(i, buffer)); +} + +string SimpleItoa(long i) { + char buffer[kFastToBufferSize]; + return (sizeof(i) == 4) ? + FastInt32ToBuffer(i, buffer) : + FastInt64ToBuffer(i, buffer); +} + +string SimpleItoa(unsigned long i) { + char buffer[kFastToBufferSize]; + return string(buffer, (sizeof(i) == 4) ? + FastUInt32ToBufferLeft(i, buffer) : + FastUInt64ToBufferLeft(i, buffer)); +} + +string SimpleItoa(long long i) { + char buffer[kFastToBufferSize]; + return (sizeof(i) == 4) ? + FastInt32ToBuffer(i, buffer) : + FastInt64ToBuffer(i, buffer); +} + +string SimpleItoa(unsigned long long i) { + char buffer[kFastToBufferSize]; + return string(buffer, (sizeof(i) == 4) ? + FastUInt32ToBufferLeft(i, buffer) : + FastUInt64ToBufferLeft(i, buffer)); +} + +// ---------------------------------------------------------------------- +// SimpleDtoa() +// SimpleFtoa() +// DoubleToBuffer() +// FloatToBuffer() +// We want to print the value without losing precision, but we also do +// not want to print more digits than necessary. This turns out to be +// trickier than it sounds. Numbers like 0.2 cannot be represented +// exactly in binary. If we print 0.2 with a very large precision, +// e.g. "%.50g", we get "0.2000000000000000111022302462515654042363167". +// On the other hand, if we set the precision too low, we lose +// significant digits when printing numbers that actually need them. +// It turns out there is no precision value that does the right thing +// for all numbers. +// +// Our strategy is to first try printing with a precision that is never +// over-precise, then parse the result with strtod() to see if it +// matches. If not, we print again with a precision that will always +// give a precise result, but may use more digits than necessary. +// +// An arguably better strategy would be to use the algorithm described +// in "How to Print Floating-Point Numbers Accurately" by Steele & +// White, e.g. as implemented by David M. Gay's dtoa(). It turns out, +// however, that the following implementation is about as fast as +// DMG's code. Furthermore, DMG's code locks mutexes, which means it +// will not scale well on multi-core machines. DMG's code is slightly +// more accurate (in that it will never use more digits than +// necessary), but this is probably irrelevant for most users. +// +// Rob Pike and Ken Thompson also have an implementation of dtoa() in +// third_party/fmt/fltfmt.cc. Their implementation is similar to this +// one in that it makes guesses and then uses strtod() to check them. +// Their implementation is faster because they use their own code to +// generate the digits in the first place rather than use snprintf(), +// thus avoiding format string parsing overhead. However, this makes +// it considerably more complicated than the following implementation, +// and it is embedded in a larger library. If speed turns out to be +// an issue, we could re-implement this in terms of their +// implementation. +// ---------------------------------------------------------------------- + +string SimpleDtoa(double value) { + char buffer[kDoubleToBufferSize]; + return DoubleToBuffer(value, buffer); +} + +string SimpleFtoa(float value) { + char buffer[kFloatToBufferSize]; + return FloatToBuffer(value, buffer); +} + +static inline bool IsValidFloatChar(char c) { + return ('0' <= c && c <= '9') || + c == 'e' || c == 'E' || + c == '+' || c == '-'; +} + +void DelocalizeRadix(char* buffer) { + // Fast check: if the buffer has a normal decimal point, assume no + // translation is needed. + if (strchr(buffer, '.') != NULL) return; + + // Find the first unknown character. + while (IsValidFloatChar(*buffer)) ++buffer; + + if (*buffer == '\0') { + // No radix character found. + return; + } + + // We are now pointing at the locale-specific radix character. Replace it + // with '.'. + *buffer = '.'; + ++buffer; + + if (!IsValidFloatChar(*buffer) && *buffer != '\0') { + // It appears the radix was a multi-byte character. We need to remove the + // extra bytes. + char* target = buffer; + do { ++buffer; } while (!IsValidFloatChar(*buffer) && *buffer != '\0'); + memmove(target, buffer, strlen(buffer) + 1); + } +} + +char* DoubleToBuffer(double value, char* buffer) { + // DBL_DIG is 15 for IEEE-754 doubles, which are used on almost all + // platforms these days. Just in case some system exists where DBL_DIG + // is significantly larger -- and risks overflowing our buffer -- we have + // this assert. + GOOGLE_COMPILE_ASSERT(DBL_DIG < 20, DBL_DIG_is_too_big); + + if (value == numeric_limits::infinity()) { + strcpy(buffer, "inf"); + return buffer; + } else if (value == -numeric_limits::infinity()) { + strcpy(buffer, "-inf"); + return buffer; + } else if (IsNaN(value)) { + strcpy(buffer, "nan"); + return buffer; + } + + int snprintf_result = + snprintf(buffer, kDoubleToBufferSize, "%.*g", DBL_DIG, value); + + // The snprintf should never overflow because the buffer is significantly + // larger than the precision we asked for. + GOOGLE_DCHECK(snprintf_result > 0 && snprintf_result < kDoubleToBufferSize); + + // We need to make parsed_value volatile in order to force the compiler to + // write it out to the stack. Otherwise, it may keep the value in a + // register, and if it does that, it may keep it as a long double instead + // of a double. This long double may have extra bits that make it compare + // unequal to "value" even though it would be exactly equal if it were + // truncated to a double. + volatile double parsed_value = strtod(buffer, NULL); + if (parsed_value != value) { + int snprintf_result = + snprintf(buffer, kDoubleToBufferSize, "%.*g", DBL_DIG+2, value); + + // Should never overflow; see above. + GOOGLE_DCHECK(snprintf_result > 0 && snprintf_result < kDoubleToBufferSize); + } + + DelocalizeRadix(buffer); + return buffer; +} + +bool safe_strtof(const char* str, float* value) { + char* endptr; + errno = 0; // errno only gets set on errors +#if defined(_WIN32) || defined (__hpux) // has no strtof() + *value = strtod(str, &endptr); +#else + *value = strtof(str, &endptr); +#endif + return *str != 0 && *endptr == 0 && errno == 0; +} + +char* FloatToBuffer(float value, char* buffer) { + // FLT_DIG is 6 for IEEE-754 floats, which are used on almost all + // platforms these days. Just in case some system exists where FLT_DIG + // is significantly larger -- and risks overflowing our buffer -- we have + // this assert. + GOOGLE_COMPILE_ASSERT(FLT_DIG < 10, FLT_DIG_is_too_big); + + if (value == numeric_limits::infinity()) { + strcpy(buffer, "inf"); + return buffer; + } else if (value == -numeric_limits::infinity()) { + strcpy(buffer, "-inf"); + return buffer; + } else if (IsNaN(value)) { + strcpy(buffer, "nan"); + return buffer; + } + + int snprintf_result = + snprintf(buffer, kFloatToBufferSize, "%.*g", FLT_DIG, value); + + // The snprintf should never overflow because the buffer is significantly + // larger than the precision we asked for. + GOOGLE_DCHECK(snprintf_result > 0 && snprintf_result < kFloatToBufferSize); + + float parsed_value; + if (!safe_strtof(buffer, &parsed_value) || parsed_value != value) { + int snprintf_result = + snprintf(buffer, kFloatToBufferSize, "%.*g", FLT_DIG+2, value); + + // Should never overflow; see above. + GOOGLE_DCHECK(snprintf_result > 0 && snprintf_result < kFloatToBufferSize); + } + + DelocalizeRadix(buffer); + return buffer; +} + +// ---------------------------------------------------------------------- +// NoLocaleStrtod() +// This code will make you cry. +// ---------------------------------------------------------------------- + +// Returns a string identical to *input except that the character pointed to +// by radix_pos (which should be '.') is replaced with the locale-specific +// radix character. +string LocalizeRadix(const char* input, const char* radix_pos) { + // Determine the locale-specific radix character by calling sprintf() to + // print the number 1.5, then stripping off the digits. As far as I can + // tell, this is the only portable, thread-safe way to get the C library + // to divuldge the locale's radix character. No, localeconv() is NOT + // thread-safe. + char temp[16]; + int size = sprintf(temp, "%.1f", 1.5); + GOOGLE_CHECK_EQ(temp[0], '1'); + GOOGLE_CHECK_EQ(temp[size-1], '5'); + GOOGLE_CHECK_LE(size, 6); + + // Now replace the '.' in the input with it. + string result; + result.reserve(strlen(input) + size - 3); + result.append(input, radix_pos); + result.append(temp + 1, size - 2); + result.append(radix_pos + 1); + return result; +} + +double NoLocaleStrtod(const char* text, char** original_endptr) { + // We cannot simply set the locale to "C" temporarily with setlocale() + // as this is not thread-safe. Instead, we try to parse in the current + // locale first. If parsing stops at a '.' character, then this is a + // pretty good hint that we're actually in some other locale in which + // '.' is not the radix character. + + char* temp_endptr; + double result = strtod(text, &temp_endptr); + if (original_endptr != NULL) *original_endptr = temp_endptr; + if (*temp_endptr != '.') return result; + + // Parsing halted on a '.'. Perhaps we're in a different locale? Let's + // try to replace the '.' with a locale-specific radix character and + // try again. + string localized = LocalizeRadix(text, temp_endptr); + const char* localized_cstr = localized.c_str(); + char* localized_endptr; + result = strtod(localized_cstr, &localized_endptr); + if ((localized_endptr - localized_cstr) > + (temp_endptr - text)) { + // This attempt got further, so replacing the decimal must have helped. + // Update original_endptr to point at the right location. + if (original_endptr != NULL) { + // size_diff is non-zero if the localized radix has multiple bytes. + int size_diff = localized.size() - strlen(text); + // const_cast is necessary to match the strtod() interface. + *original_endptr = const_cast( + text + (localized_endptr - localized_cstr - size_diff)); + } + } + + return result; +} + +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/strutil.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/strutil.h new file mode 100644 index 0000000000..4a79c2240c --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/strutil.h @@ -0,0 +1,457 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// from google3/strings/strutil.h + +#ifndef GOOGLE_PROTOBUF_STUBS_STRUTIL_H__ +#define GOOGLE_PROTOBUF_STUBS_STRUTIL_H__ + +#include +#include +#include + +namespace google { +namespace protobuf { + +#ifdef _MSC_VER +#define strtoll _strtoi64 +#define strtoull _strtoui64 +#elif defined(__DECCXX) && defined(__osf__) +// HP C++ on Tru64 does not have strtoll, but strtol is already 64-bit. +#define strtoll strtol +#define strtoull strtoul +#endif + +// ---------------------------------------------------------------------- +// ascii_isalnum() +// Check if an ASCII character is alphanumeric. We can't use ctype's +// isalnum() because it is affected by locale. This function is applied +// to identifiers in the protocol buffer language, not to natural-language +// strings, so locale should not be taken into account. +// ascii_isdigit() +// Like above, but only accepts digits. +// ---------------------------------------------------------------------- + +inline bool ascii_isalnum(char c) { + return ('a' <= c && c <= 'z') || + ('A' <= c && c <= 'Z') || + ('0' <= c && c <= '9'); +} + +inline bool ascii_isdigit(char c) { + return ('0' <= c && c <= '9'); +} + +// ---------------------------------------------------------------------- +// HasPrefixString() +// Check if a string begins with a given prefix. +// StripPrefixString() +// Given a string and a putative prefix, returns the string minus the +// prefix string if the prefix matches, otherwise the original +// string. +// ---------------------------------------------------------------------- +inline bool HasPrefixString(const string& str, + const string& prefix) { + return str.size() >= prefix.size() && + str.compare(0, prefix.size(), prefix) == 0; +} + +inline string StripPrefixString(const string& str, const string& prefix) { + if (HasPrefixString(str, prefix)) { + return str.substr(prefix.size()); + } else { + return str; + } +} + +// ---------------------------------------------------------------------- +// HasSuffixString() +// Return true if str ends in suffix. +// StripSuffixString() +// Given a string and a putative suffix, returns the string minus the +// suffix string if the suffix matches, otherwise the original +// string. +// ---------------------------------------------------------------------- +inline bool HasSuffixString(const string& str, + const string& suffix) { + return str.size() >= suffix.size() && + str.compare(str.size() - suffix.size(), suffix.size(), suffix) == 0; +} + +inline string StripSuffixString(const string& str, const string& suffix) { + if (HasSuffixString(str, suffix)) { + return str.substr(0, str.size() - suffix.size()); + } else { + return str; + } +} + +// ---------------------------------------------------------------------- +// StripString +// Replaces any occurrence of the character 'remove' (or the characters +// in 'remove') with the character 'replacewith'. +// Good for keeping html characters or protocol characters (\t) out +// of places where they might cause a problem. +// ---------------------------------------------------------------------- +LIBPROTOBUF_EXPORT void StripString(string* s, const char* remove, + char replacewith); + +// ---------------------------------------------------------------------- +// LowerString() +// UpperString() +// Convert the characters in "s" to lowercase or uppercase. ASCII-only: +// these functions intentionally ignore locale because they are applied to +// identifiers used in the Protocol Buffer language, not to natural-language +// strings. +// ---------------------------------------------------------------------- + +inline void LowerString(string * s) { + string::iterator end = s->end(); + for (string::iterator i = s->begin(); i != end; ++i) { + // tolower() changes based on locale. We don't want this! + if ('A' <= *i && *i <= 'Z') *i += 'a' - 'A'; + } +} + +inline void UpperString(string * s) { + string::iterator end = s->end(); + for (string::iterator i = s->begin(); i != end; ++i) { + // toupper() changes based on locale. We don't want this! + if ('a' <= *i && *i <= 'z') *i += 'A' - 'a'; + } +} + +// ---------------------------------------------------------------------- +// StringReplace() +// Give me a string and two patterns "old" and "new", and I replace +// the first instance of "old" in the string with "new", if it +// exists. RETURN a new string, regardless of whether the replacement +// happened or not. +// ---------------------------------------------------------------------- + +LIBPROTOBUF_EXPORT string StringReplace(const string& s, const string& oldsub, + const string& newsub, bool replace_all); + +// ---------------------------------------------------------------------- +// SplitStringUsing() +// Split a string using a character delimiter. Append the components +// to 'result'. If there are consecutive delimiters, this function skips +// over all of them. +// ---------------------------------------------------------------------- +LIBPROTOBUF_EXPORT void SplitStringUsing(const string& full, const char* delim, + vector* res); + +// ---------------------------------------------------------------------- +// JoinStrings() +// These methods concatenate a vector of strings into a C++ string, using +// the C-string "delim" as a separator between components. There are two +// flavors of the function, one flavor returns the concatenated string, +// another takes a pointer to the target string. In the latter case the +// target string is cleared and overwritten. +// ---------------------------------------------------------------------- +LIBPROTOBUF_EXPORT void JoinStrings(const vector& components, + const char* delim, string* result); + +inline string JoinStrings(const vector& components, + const char* delim) { + string result; + JoinStrings(components, delim, &result); + return result; +} + +// ---------------------------------------------------------------------- +// UnescapeCEscapeSequences() +// Copies "source" to "dest", rewriting C-style escape sequences +// -- '\n', '\r', '\\', '\ooo', etc -- to their ASCII +// equivalents. "dest" must be sufficiently large to hold all +// the characters in the rewritten string (i.e. at least as large +// as strlen(source) + 1 should be safe, since the replacements +// are always shorter than the original escaped sequences). It's +// safe for source and dest to be the same. RETURNS the length +// of dest. +// +// It allows hex sequences \xhh, or generally \xhhhhh with an +// arbitrary number of hex digits, but all of them together must +// specify a value of a single byte (e.g. \x0045 is equivalent +// to \x45, and \x1234 is erroneous). +// +// It also allows escape sequences of the form \uhhhh (exactly four +// hex digits, upper or lower case) or \Uhhhhhhhh (exactly eight +// hex digits, upper or lower case) to specify a Unicode code +// point. The dest array will contain the UTF8-encoded version of +// that code-point (e.g., if source contains \u2019, then dest will +// contain the three bytes 0xE2, 0x80, and 0x99). +// +// Errors: In the first form of the call, errors are reported with +// LOG(ERROR). The same is true for the second form of the call if +// the pointer to the string vector is NULL; otherwise, error +// messages are stored in the vector. In either case, the effect on +// the dest array is not defined, but rest of the source will be +// processed. +// ---------------------------------------------------------------------- + +LIBPROTOBUF_EXPORT int UnescapeCEscapeSequences(const char* source, char* dest); +LIBPROTOBUF_EXPORT int UnescapeCEscapeSequences(const char* source, char* dest, + vector *errors); + +// ---------------------------------------------------------------------- +// UnescapeCEscapeString() +// This does the same thing as UnescapeCEscapeSequences, but creates +// a new string. The caller does not need to worry about allocating +// a dest buffer. This should be used for non performance critical +// tasks such as printing debug messages. It is safe for src and dest +// to be the same. +// +// The second call stores its errors in a supplied string vector. +// If the string vector pointer is NULL, it reports the errors with LOG(). +// +// In the first and second calls, the length of dest is returned. In the +// the third call, the new string is returned. +// ---------------------------------------------------------------------- + +LIBPROTOBUF_EXPORT int UnescapeCEscapeString(const string& src, string* dest); +LIBPROTOBUF_EXPORT int UnescapeCEscapeString(const string& src, string* dest, + vector *errors); +LIBPROTOBUF_EXPORT string UnescapeCEscapeString(const string& src); + +// ---------------------------------------------------------------------- +// CEscapeString() +// Copies 'src' to 'dest', escaping dangerous characters using +// C-style escape sequences. This is very useful for preparing query +// flags. 'src' and 'dest' should not overlap. +// Returns the number of bytes written to 'dest' (not including the \0) +// or -1 if there was insufficient space. +// +// Currently only \n, \r, \t, ", ', \ and !isprint() chars are escaped. +// ---------------------------------------------------------------------- +LIBPROTOBUF_EXPORT int CEscapeString(const char* src, int src_len, + char* dest, int dest_len); + +// ---------------------------------------------------------------------- +// CEscape() +// More convenient form of CEscapeString: returns result as a "string". +// This version is slower than CEscapeString() because it does more +// allocation. However, it is much more convenient to use in +// non-speed-critical code like logging messages etc. +// ---------------------------------------------------------------------- +LIBPROTOBUF_EXPORT string CEscape(const string& src); + +namespace strings { +// Like CEscape() but does not escape bytes with the upper bit set. +LIBPROTOBUF_EXPORT string Utf8SafeCEscape(const string& src); + +// Like CEscape() but uses hex (\x) escapes instead of octals. +LIBPROTOBUF_EXPORT string CHexEscape(const string& src); +} // namespace strings + +// ---------------------------------------------------------------------- +// strto32() +// strtou32() +// strto64() +// strtou64() +// Architecture-neutral plug compatible replacements for strtol() and +// strtoul(). Long's have different lengths on ILP-32 and LP-64 +// platforms, so using these is safer, from the point of view of +// overflow behavior, than using the standard libc functions. +// ---------------------------------------------------------------------- +LIBPROTOBUF_EXPORT int32 strto32_adaptor(const char *nptr, char **endptr, + int base); +LIBPROTOBUF_EXPORT uint32 strtou32_adaptor(const char *nptr, char **endptr, + int base); + +inline int32 strto32(const char *nptr, char **endptr, int base) { + if (sizeof(int32) == sizeof(long)) + return strtol(nptr, endptr, base); + else + return strto32_adaptor(nptr, endptr, base); +} + +inline uint32 strtou32(const char *nptr, char **endptr, int base) { + if (sizeof(uint32) == sizeof(unsigned long)) + return strtoul(nptr, endptr, base); + else + return strtou32_adaptor(nptr, endptr, base); +} + +// For now, long long is 64-bit on all the platforms we care about, so these +// functions can simply pass the call to strto[u]ll. +inline int64 strto64(const char *nptr, char **endptr, int base) { + GOOGLE_COMPILE_ASSERT(sizeof(int64) == sizeof(long long), + sizeof_int64_is_not_sizeof_long_long); + return strtoll(nptr, endptr, base); +} + +inline uint64 strtou64(const char *nptr, char **endptr, int base) { + GOOGLE_COMPILE_ASSERT(sizeof(uint64) == sizeof(unsigned long long), + sizeof_uint64_is_not_sizeof_long_long); + return strtoull(nptr, endptr, base); +} + +// ---------------------------------------------------------------------- +// FastIntToBuffer() +// FastHexToBuffer() +// FastHex64ToBuffer() +// FastHex32ToBuffer() +// FastTimeToBuffer() +// These are intended for speed. FastIntToBuffer() assumes the +// integer is non-negative. FastHexToBuffer() puts output in +// hex rather than decimal. FastTimeToBuffer() puts the output +// into RFC822 format. +// +// FastHex64ToBuffer() puts a 64-bit unsigned value in hex-format, +// padded to exactly 16 bytes (plus one byte for '\0') +// +// FastHex32ToBuffer() puts a 32-bit unsigned value in hex-format, +// padded to exactly 8 bytes (plus one byte for '\0') +// +// All functions take the output buffer as an arg. +// They all return a pointer to the beginning of the output, +// which may not be the beginning of the input buffer. +// ---------------------------------------------------------------------- + +// Suggested buffer size for FastToBuffer functions. Also works with +// DoubleToBuffer() and FloatToBuffer(). +static const int kFastToBufferSize = 32; + +LIBPROTOBUF_EXPORT char* FastInt32ToBuffer(int32 i, char* buffer); +LIBPROTOBUF_EXPORT char* FastInt64ToBuffer(int64 i, char* buffer); +char* FastUInt32ToBuffer(uint32 i, char* buffer); // inline below +char* FastUInt64ToBuffer(uint64 i, char* buffer); // inline below +LIBPROTOBUF_EXPORT char* FastHexToBuffer(int i, char* buffer); +LIBPROTOBUF_EXPORT char* FastHex64ToBuffer(uint64 i, char* buffer); +LIBPROTOBUF_EXPORT char* FastHex32ToBuffer(uint32 i, char* buffer); + +// at least 22 bytes long +inline char* FastIntToBuffer(int i, char* buffer) { + return (sizeof(i) == 4 ? + FastInt32ToBuffer(i, buffer) : FastInt64ToBuffer(i, buffer)); +} +inline char* FastUIntToBuffer(unsigned int i, char* buffer) { + return (sizeof(i) == 4 ? + FastUInt32ToBuffer(i, buffer) : FastUInt64ToBuffer(i, buffer)); +} +inline char* FastLongToBuffer(long i, char* buffer) { + return (sizeof(i) == 4 ? + FastInt32ToBuffer(i, buffer) : FastInt64ToBuffer(i, buffer)); +} +inline char* FastULongToBuffer(unsigned long i, char* buffer) { + return (sizeof(i) == 4 ? + FastUInt32ToBuffer(i, buffer) : FastUInt64ToBuffer(i, buffer)); +} + +// ---------------------------------------------------------------------- +// FastInt32ToBufferLeft() +// FastUInt32ToBufferLeft() +// FastInt64ToBufferLeft() +// FastUInt64ToBufferLeft() +// +// Like the Fast*ToBuffer() functions above, these are intended for speed. +// Unlike the Fast*ToBuffer() functions, however, these functions write +// their output to the beginning of the buffer (hence the name, as the +// output is left-aligned). The caller is responsible for ensuring that +// the buffer has enough space to hold the output. +// +// Returns a pointer to the end of the string (i.e. the null character +// terminating the string). +// ---------------------------------------------------------------------- + +LIBPROTOBUF_EXPORT char* FastInt32ToBufferLeft(int32 i, char* buffer); +LIBPROTOBUF_EXPORT char* FastUInt32ToBufferLeft(uint32 i, char* buffer); +LIBPROTOBUF_EXPORT char* FastInt64ToBufferLeft(int64 i, char* buffer); +LIBPROTOBUF_EXPORT char* FastUInt64ToBufferLeft(uint64 i, char* buffer); + +// Just define these in terms of the above. +inline char* FastUInt32ToBuffer(uint32 i, char* buffer) { + FastUInt32ToBufferLeft(i, buffer); + return buffer; +} +inline char* FastUInt64ToBuffer(uint64 i, char* buffer) { + FastUInt64ToBufferLeft(i, buffer); + return buffer; +} + +// ---------------------------------------------------------------------- +// SimpleItoa() +// Description: converts an integer to a string. +// +// Return value: string +// ---------------------------------------------------------------------- +LIBPROTOBUF_EXPORT string SimpleItoa(int i); +LIBPROTOBUF_EXPORT string SimpleItoa(unsigned int i); +LIBPROTOBUF_EXPORT string SimpleItoa(long i); +LIBPROTOBUF_EXPORT string SimpleItoa(unsigned long i); +LIBPROTOBUF_EXPORT string SimpleItoa(long long i); +LIBPROTOBUF_EXPORT string SimpleItoa(unsigned long long i); + +// ---------------------------------------------------------------------- +// SimpleDtoa() +// SimpleFtoa() +// DoubleToBuffer() +// FloatToBuffer() +// Description: converts a double or float to a string which, if +// passed to NoLocaleStrtod(), will produce the exact same original double +// (except in case of NaN; all NaNs are considered the same value). +// We try to keep the string short but it's not guaranteed to be as +// short as possible. +// +// DoubleToBuffer() and FloatToBuffer() write the text to the given +// buffer and return it. The buffer must be at least +// kDoubleToBufferSize bytes for doubles and kFloatToBufferSize +// bytes for floats. kFastToBufferSize is also guaranteed to be large +// enough to hold either. +// +// Return value: string +// ---------------------------------------------------------------------- +LIBPROTOBUF_EXPORT string SimpleDtoa(double value); +LIBPROTOBUF_EXPORT string SimpleFtoa(float value); + +LIBPROTOBUF_EXPORT char* DoubleToBuffer(double i, char* buffer); +LIBPROTOBUF_EXPORT char* FloatToBuffer(float i, char* buffer); + +// In practice, doubles should never need more than 24 bytes and floats +// should never need more than 14 (including null terminators), but we +// overestimate to be safe. +static const int kDoubleToBufferSize = 32; +static const int kFloatToBufferSize = 24; + +// ---------------------------------------------------------------------- +// NoLocaleStrtod() +// Exactly like strtod(), except it always behaves as if in the "C" +// locale (i.e. decimal points must be '.'s). +// ---------------------------------------------------------------------- + +LIBPROTOBUF_EXPORT double NoLocaleStrtod(const char* text, char** endptr); + +} // namespace protobuf +} // namespace google + +#endif // GOOGLE_PROTOBUF_STUBS_STRUTIL_H__ + + diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/strutil_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/strutil_unittest.cc new file mode 100644 index 0000000000..b9c9253b04 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/strutil_unittest.cc @@ -0,0 +1,83 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) + +#include + +#include +#include +#include + +namespace google { +namespace protobuf { +namespace { + +// TODO(kenton): Copy strutil tests from google3? + +TEST(StringUtilityTest, ImmuneToLocales) { + // Remember the old locale. + char* old_locale_cstr = setlocale(LC_NUMERIC, NULL); + ASSERT_TRUE(old_locale_cstr != NULL); + string old_locale = old_locale_cstr; + + // Set the locale to "C". + ASSERT_TRUE(setlocale(LC_NUMERIC, "C") != NULL); + + EXPECT_EQ(1.5, NoLocaleStrtod("1.5", NULL)); + EXPECT_EQ("1.5", SimpleDtoa(1.5)); + EXPECT_EQ("1.5", SimpleFtoa(1.5)); + + // Verify that the endptr is set correctly even if not all text was parsed. + const char* text = "1.5f"; + char* endptr; + EXPECT_EQ(1.5, NoLocaleStrtod(text, &endptr)); + EXPECT_EQ(3, endptr - text); + + if (setlocale(LC_NUMERIC, "es_ES") == NULL && + setlocale(LC_NUMERIC, "es_ES.utf8") == NULL) { + // Some systems may not have the desired locale available. + GOOGLE_LOG(WARNING) + << "Couldn't set locale to es_ES. Skipping this test."; + } else { + EXPECT_EQ(1.5, NoLocaleStrtod("1.5", NULL)); + EXPECT_EQ("1.5", SimpleDtoa(1.5)); + EXPECT_EQ("1.5", SimpleFtoa(1.5)); + EXPECT_EQ(1.5, NoLocaleStrtod(text, &endptr)); + EXPECT_EQ(3, endptr - text); + } + + // Return to original locale. + setlocale(LC_NUMERIC, old_locale.c_str()); +} + +} // anonymous namespace +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/substitute.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/substitute.cc new file mode 100644 index 0000000000..b542aaa41e --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/substitute.cc @@ -0,0 +1,134 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) + +#include +#include +#include + +namespace google { +namespace protobuf { +namespace strings { + +using internal::SubstituteArg; + +// Returns the number of args in arg_array which were passed explicitly +// to Substitute(). +static int CountSubstituteArgs(const SubstituteArg* const* args_array) { + int count = 0; + while (args_array[count] != NULL && args_array[count]->size() != -1) { + ++count; + } + return count; +} + +string Substitute( + const char* format, + const SubstituteArg& arg0, const SubstituteArg& arg1, + const SubstituteArg& arg2, const SubstituteArg& arg3, + const SubstituteArg& arg4, const SubstituteArg& arg5, + const SubstituteArg& arg6, const SubstituteArg& arg7, + const SubstituteArg& arg8, const SubstituteArg& arg9) { + string result; + SubstituteAndAppend(&result, format, arg0, arg1, arg2, arg3, arg4, + arg5, arg6, arg7, arg8, arg9); + return result; +} + +void SubstituteAndAppend( + string* output, const char* format, + const SubstituteArg& arg0, const SubstituteArg& arg1, + const SubstituteArg& arg2, const SubstituteArg& arg3, + const SubstituteArg& arg4, const SubstituteArg& arg5, + const SubstituteArg& arg6, const SubstituteArg& arg7, + const SubstituteArg& arg8, const SubstituteArg& arg9) { + const SubstituteArg* const args_array[] = { + &arg0, &arg1, &arg2, &arg3, &arg4, &arg5, &arg6, &arg7, &arg8, &arg9, NULL + }; + + // Determine total size needed. + int size = 0; + for (int i = 0; format[i] != '\0'; i++) { + if (format[i] == '$') { + if (ascii_isdigit(format[i+1])) { + int index = format[i+1] - '0'; + if (args_array[index]->size() == -1) { + GOOGLE_LOG(DFATAL) + << "strings::Substitute format string invalid: asked for \"$" + << index << "\", but only " << CountSubstituteArgs(args_array) + << " args were given. Full format string was: \"" + << CEscape(format) << "\"."; + return; + } + size += args_array[index]->size(); + ++i; // Skip next char. + } else if (format[i+1] == '$') { + ++size; + ++i; // Skip next char. + } else { + GOOGLE_LOG(DFATAL) + << "Invalid strings::Substitute() format string: \"" + << CEscape(format) << "\"."; + return; + } + } else { + ++size; + } + } + + if (size == 0) return; + + // Build the string. + int original_size = output->size(); + STLStringResizeUninitialized(output, original_size + size); + char* target = string_as_array(output) + original_size; + for (int i = 0; format[i] != '\0'; i++) { + if (format[i] == '$') { + if (ascii_isdigit(format[i+1])) { + const SubstituteArg* src = args_array[format[i+1] - '0']; + memcpy(target, src->data(), src->size()); + target += src->size(); + ++i; // Skip next char. + } else if (format[i+1] == '$') { + *target++ = '$'; + ++i; // Skip next char. + } + } else { + *target++ = format[i]; + } + } + + GOOGLE_DCHECK_EQ(target - output->data(), output->size()); +} + +} // namespace strings +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/substitute.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/substitute.h new file mode 100644 index 0000000000..2581793b57 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/stubs/substitute.h @@ -0,0 +1,170 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// from google3/strings/substitute.h + +#include +#include +#include + +#ifndef GOOGLE_PROTOBUF_STUBS_SUBSTITUTE_H_ +#define GOOGLE_PROTOBUF_STUBS_SUBSTITUTE_H_ + +namespace google { +namespace protobuf { +namespace strings { + +// ---------------------------------------------------------------------- +// strings::Substitute() +// strings::SubstituteAndAppend() +// Kind of like StringPrintf, but different. +// +// Example: +// string GetMessage(string first_name, string last_name, int age) { +// return strings::Substitute("My name is $0 $1 and I am $2 years old.", +// first_name, last_name, age); +// } +// +// Differences from StringPrintf: +// * The format string does not identify the types of arguments. +// Instead, the magic of C++ deals with this for us. See below +// for a list of accepted types. +// * Substitutions in the format string are identified by a '$' +// followed by a digit. So, you can use arguments out-of-order and +// use the same argument multiple times. +// * It's much faster than StringPrintf. +// +// Supported types: +// * Strings (const char*, const string&) +// * Note that this means you do not have to add .c_str() to all of +// your strings. In fact, you shouldn't; it will be slower. +// * int32, int64, uint32, uint64: Formatted using SimpleItoa(). +// * float, double: Formatted using SimpleFtoa() and SimpleDtoa(). +// * bool: Printed as "true" or "false". +// +// SubstituteAndAppend() is like Substitute() but appends the result to +// *output. Example: +// +// string str; +// strings::SubstituteAndAppend(&str, +// "My name is $0 $1 and I am $2 years old.", +// first_name, last_name, age); +// +// Substitute() is significantly faster than StringPrintf(). For very +// large strings, it may be orders of magnitude faster. +// ---------------------------------------------------------------------- + +namespace internal { // Implementation details. + +class SubstituteArg { + public: + inline SubstituteArg(const char* value) + : text_(value), size_(strlen(text_)) {} + inline SubstituteArg(const string& value) + : text_(value.data()), size_(value.size()) {} + + // Indicates that no argument was given. + inline explicit SubstituteArg() + : text_(NULL), size_(-1) {} + + // Primitives + // We don't overload for signed and unsigned char because if people are + // explicitly declaring their chars as signed or unsigned then they are + // probably actually using them as 8-bit integers and would probably + // prefer an integer representation. But, we don't really know. So, we + // make the caller decide what to do. + inline SubstituteArg(char value) + : text_(scratch_), size_(1) { scratch_[0] = value; } + inline SubstituteArg(short value) + : text_(FastInt32ToBuffer(value, scratch_)), size_(strlen(text_)) {} + inline SubstituteArg(unsigned short value) + : text_(FastUInt32ToBuffer(value, scratch_)), size_(strlen(text_)) {} + inline SubstituteArg(int value) + : text_(FastInt32ToBuffer(value, scratch_)), size_(strlen(text_)) {} + inline SubstituteArg(unsigned int value) + : text_(FastUInt32ToBuffer(value, scratch_)), size_(strlen(text_)) {} + inline SubstituteArg(long value) + : text_(FastLongToBuffer(value, scratch_)), size_(strlen(text_)) {} + inline SubstituteArg(unsigned long value) + : text_(FastULongToBuffer(value, scratch_)), size_(strlen(text_)) {} + inline SubstituteArg(long long value) + : text_(FastInt64ToBuffer(value, scratch_)), size_(strlen(text_)) {} + inline SubstituteArg(unsigned long long value) + : text_(FastUInt64ToBuffer(value, scratch_)), size_(strlen(text_)) {} + inline SubstituteArg(float value) + : text_(FloatToBuffer(value, scratch_)), size_(strlen(text_)) {} + inline SubstituteArg(double value) + : text_(DoubleToBuffer(value, scratch_)), size_(strlen(text_)) {} + inline SubstituteArg(bool value) + : text_(value ? "true" : "false"), size_(strlen(text_)) {} + + inline const char* data() const { return text_; } + inline int size() const { return size_; } + + private: + const char* text_; + int size_; + char scratch_[kFastToBufferSize]; +}; + +} // namespace internal + +LIBPROTOBUF_EXPORT string Substitute( + const char* format, + const internal::SubstituteArg& arg0 = internal::SubstituteArg(), + const internal::SubstituteArg& arg1 = internal::SubstituteArg(), + const internal::SubstituteArg& arg2 = internal::SubstituteArg(), + const internal::SubstituteArg& arg3 = internal::SubstituteArg(), + const internal::SubstituteArg& arg4 = internal::SubstituteArg(), + const internal::SubstituteArg& arg5 = internal::SubstituteArg(), + const internal::SubstituteArg& arg6 = internal::SubstituteArg(), + const internal::SubstituteArg& arg7 = internal::SubstituteArg(), + const internal::SubstituteArg& arg8 = internal::SubstituteArg(), + const internal::SubstituteArg& arg9 = internal::SubstituteArg()); + +LIBPROTOBUF_EXPORT void SubstituteAndAppend( + string* output, const char* format, + const internal::SubstituteArg& arg0 = internal::SubstituteArg(), + const internal::SubstituteArg& arg1 = internal::SubstituteArg(), + const internal::SubstituteArg& arg2 = internal::SubstituteArg(), + const internal::SubstituteArg& arg3 = internal::SubstituteArg(), + const internal::SubstituteArg& arg4 = internal::SubstituteArg(), + const internal::SubstituteArg& arg5 = internal::SubstituteArg(), + const internal::SubstituteArg& arg6 = internal::SubstituteArg(), + const internal::SubstituteArg& arg7 = internal::SubstituteArg(), + const internal::SubstituteArg& arg8 = internal::SubstituteArg(), + const internal::SubstituteArg& arg9 = internal::SubstituteArg()); + +} // namespace strings +} // namespace protobuf +} // namespace google + +#endif // GOOGLE_PROTOBUF_STUBS_SUBSTITUTE_H_ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/test_util.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/test_util.cc new file mode 100644 index 0000000000..af8b3909b5 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/test_util.cc @@ -0,0 +1,2854 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#ifdef _WIN32 +// Verify that #including windows.h does not break anything (e.g. because +// windows.h #defines GetMessage() as a macro). +#include +#endif + +#include +#include +#include + +#include +#include +#include + +namespace google { +namespace protobuf { + +void TestUtil::SetAllFields(unittest::TestAllTypes* message) { + message->set_optional_int32 (101); + message->set_optional_int64 (102); + message->set_optional_uint32 (103); + message->set_optional_uint64 (104); + message->set_optional_sint32 (105); + message->set_optional_sint64 (106); + message->set_optional_fixed32 (107); + message->set_optional_fixed64 (108); + message->set_optional_sfixed32(109); + message->set_optional_sfixed64(110); + message->set_optional_float (111); + message->set_optional_double (112); + message->set_optional_bool (true); + message->set_optional_string ("115"); + message->set_optional_bytes ("116"); + + message->mutable_optionalgroup ()->set_a(117); + message->mutable_optional_nested_message ()->set_bb(118); + message->mutable_optional_foreign_message()->set_c(119); + message->mutable_optional_import_message ()->set_d(120); + + message->set_optional_nested_enum (unittest::TestAllTypes::BAZ); + message->set_optional_foreign_enum(unittest::FOREIGN_BAZ ); + message->set_optional_import_enum (unittest_import::IMPORT_BAZ); + + // StringPiece and Cord fields are only accessible via reflection in the + // open source release; see comments in compiler/cpp/string_field.cc. +#ifndef PROTOBUF_TEST_NO_DESCRIPTORS + message->GetReflection()->SetString( + message, + message->GetDescriptor()->FindFieldByName("optional_string_piece"), + "124"); + message->GetReflection()->SetString( + message, + message->GetDescriptor()->FindFieldByName("optional_cord"), + "125"); +#endif // !PROTOBUF_TEST_NO_DESCRIPTORS + + // ----------------------------------------------------------------- + + message->add_repeated_int32 (201); + message->add_repeated_int64 (202); + message->add_repeated_uint32 (203); + message->add_repeated_uint64 (204); + message->add_repeated_sint32 (205); + message->add_repeated_sint64 (206); + message->add_repeated_fixed32 (207); + message->add_repeated_fixed64 (208); + message->add_repeated_sfixed32(209); + message->add_repeated_sfixed64(210); + message->add_repeated_float (211); + message->add_repeated_double (212); + message->add_repeated_bool (true); + message->add_repeated_string ("215"); + message->add_repeated_bytes ("216"); + + message->add_repeatedgroup ()->set_a(217); + message->add_repeated_nested_message ()->set_bb(218); + message->add_repeated_foreign_message()->set_c(219); + message->add_repeated_import_message ()->set_d(220); + + message->add_repeated_nested_enum (unittest::TestAllTypes::BAR); + message->add_repeated_foreign_enum(unittest::FOREIGN_BAR ); + message->add_repeated_import_enum (unittest_import::IMPORT_BAR); + +#ifndef PROTOBUF_TEST_NO_DESCRIPTORS + message->GetReflection()->AddString( + message, + message->GetDescriptor()->FindFieldByName("repeated_string_piece"), + "224"); + message->GetReflection()->AddString( + message, + message->GetDescriptor()->FindFieldByName("repeated_cord"), + "225"); +#endif // !PROTOBUF_TEST_NO_DESCRIPTORS + + // Add a second one of each field. + message->add_repeated_int32 (301); + message->add_repeated_int64 (302); + message->add_repeated_uint32 (303); + message->add_repeated_uint64 (304); + message->add_repeated_sint32 (305); + message->add_repeated_sint64 (306); + message->add_repeated_fixed32 (307); + message->add_repeated_fixed64 (308); + message->add_repeated_sfixed32(309); + message->add_repeated_sfixed64(310); + message->add_repeated_float (311); + message->add_repeated_double (312); + message->add_repeated_bool (false); + message->add_repeated_string ("315"); + message->add_repeated_bytes ("316"); + + message->add_repeatedgroup ()->set_a(317); + message->add_repeated_nested_message ()->set_bb(318); + message->add_repeated_foreign_message()->set_c(319); + message->add_repeated_import_message ()->set_d(320); + + message->add_repeated_nested_enum (unittest::TestAllTypes::BAZ); + message->add_repeated_foreign_enum(unittest::FOREIGN_BAZ ); + message->add_repeated_import_enum (unittest_import::IMPORT_BAZ); + +#ifndef PROTOBUF_TEST_NO_DESCRIPTORS + message->GetReflection()->AddString( + message, + message->GetDescriptor()->FindFieldByName("repeated_string_piece"), + "324"); + message->GetReflection()->AddString( + message, + message->GetDescriptor()->FindFieldByName("repeated_cord"), + "325"); +#endif // !PROTOBUF_TEST_NO_DESCRIPTORS + + // ----------------------------------------------------------------- + + message->set_default_int32 (401); + message->set_default_int64 (402); + message->set_default_uint32 (403); + message->set_default_uint64 (404); + message->set_default_sint32 (405); + message->set_default_sint64 (406); + message->set_default_fixed32 (407); + message->set_default_fixed64 (408); + message->set_default_sfixed32(409); + message->set_default_sfixed64(410); + message->set_default_float (411); + message->set_default_double (412); + message->set_default_bool (false); + message->set_default_string ("415"); + message->set_default_bytes ("416"); + + message->set_default_nested_enum (unittest::TestAllTypes::FOO); + message->set_default_foreign_enum(unittest::FOREIGN_FOO ); + message->set_default_import_enum (unittest_import::IMPORT_FOO); + +#ifndef PROTOBUF_TEST_NO_DESCRIPTORS + message->GetReflection()->SetString( + message, + message->GetDescriptor()->FindFieldByName("default_string_piece"), + "424"); + message->GetReflection()->SetString( + message, + message->GetDescriptor()->FindFieldByName("default_cord"), + "425"); +#endif // !PROTOBUF_TEST_NO_DESCRIPTORS +} + +// ------------------------------------------------------------------- + +void TestUtil::ModifyRepeatedFields(unittest::TestAllTypes* message) { + message->set_repeated_int32 (1, 501); + message->set_repeated_int64 (1, 502); + message->set_repeated_uint32 (1, 503); + message->set_repeated_uint64 (1, 504); + message->set_repeated_sint32 (1, 505); + message->set_repeated_sint64 (1, 506); + message->set_repeated_fixed32 (1, 507); + message->set_repeated_fixed64 (1, 508); + message->set_repeated_sfixed32(1, 509); + message->set_repeated_sfixed64(1, 510); + message->set_repeated_float (1, 511); + message->set_repeated_double (1, 512); + message->set_repeated_bool (1, true); + message->set_repeated_string (1, "515"); + message->set_repeated_bytes (1, "516"); + + message->mutable_repeatedgroup (1)->set_a(517); + message->mutable_repeated_nested_message (1)->set_bb(518); + message->mutable_repeated_foreign_message(1)->set_c(519); + message->mutable_repeated_import_message (1)->set_d(520); + + message->set_repeated_nested_enum (1, unittest::TestAllTypes::FOO); + message->set_repeated_foreign_enum(1, unittest::FOREIGN_FOO ); + message->set_repeated_import_enum (1, unittest_import::IMPORT_FOO); + +#ifndef PROTOBUF_TEST_NO_DESCRIPTORS + message->GetReflection()->SetRepeatedString( + message, + message->GetDescriptor()->FindFieldByName("repeated_string_piece"), + 1, "524"); + message->GetReflection()->SetRepeatedString( + message, + message->GetDescriptor()->FindFieldByName("repeated_cord"), + 1, "525"); +#endif // !PROTOBUF_TEST_NO_DESCRIPTORS +} + +// ------------------------------------------------------------------- + +void TestUtil::ExpectAllFieldsSet(const unittest::TestAllTypes& message) { + EXPECT_TRUE(message.has_optional_int32 ()); + EXPECT_TRUE(message.has_optional_int64 ()); + EXPECT_TRUE(message.has_optional_uint32 ()); + EXPECT_TRUE(message.has_optional_uint64 ()); + EXPECT_TRUE(message.has_optional_sint32 ()); + EXPECT_TRUE(message.has_optional_sint64 ()); + EXPECT_TRUE(message.has_optional_fixed32 ()); + EXPECT_TRUE(message.has_optional_fixed64 ()); + EXPECT_TRUE(message.has_optional_sfixed32()); + EXPECT_TRUE(message.has_optional_sfixed64()); + EXPECT_TRUE(message.has_optional_float ()); + EXPECT_TRUE(message.has_optional_double ()); + EXPECT_TRUE(message.has_optional_bool ()); + EXPECT_TRUE(message.has_optional_string ()); + EXPECT_TRUE(message.has_optional_bytes ()); + + EXPECT_TRUE(message.has_optionalgroup ()); + EXPECT_TRUE(message.has_optional_nested_message ()); + EXPECT_TRUE(message.has_optional_foreign_message()); + EXPECT_TRUE(message.has_optional_import_message ()); + + EXPECT_TRUE(message.optionalgroup ().has_a()); + EXPECT_TRUE(message.optional_nested_message ().has_bb()); + EXPECT_TRUE(message.optional_foreign_message().has_c()); + EXPECT_TRUE(message.optional_import_message ().has_d()); + + EXPECT_TRUE(message.has_optional_nested_enum ()); + EXPECT_TRUE(message.has_optional_foreign_enum()); + EXPECT_TRUE(message.has_optional_import_enum ()); + +#ifndef PROTOBUF_TEST_NO_DESCRIPTORS + EXPECT_TRUE(message.has_optional_string_piece()); + EXPECT_TRUE(message.has_optional_cord()); +#endif + + EXPECT_EQ(101 , message.optional_int32 ()); + EXPECT_EQ(102 , message.optional_int64 ()); + EXPECT_EQ(103 , message.optional_uint32 ()); + EXPECT_EQ(104 , message.optional_uint64 ()); + EXPECT_EQ(105 , message.optional_sint32 ()); + EXPECT_EQ(106 , message.optional_sint64 ()); + EXPECT_EQ(107 , message.optional_fixed32 ()); + EXPECT_EQ(108 , message.optional_fixed64 ()); + EXPECT_EQ(109 , message.optional_sfixed32()); + EXPECT_EQ(110 , message.optional_sfixed64()); + EXPECT_EQ(111 , message.optional_float ()); + EXPECT_EQ(112 , message.optional_double ()); + EXPECT_EQ(true , message.optional_bool ()); + EXPECT_EQ("115", message.optional_string ()); + EXPECT_EQ("116", message.optional_bytes ()); + + EXPECT_EQ(117, message.optionalgroup ().a()); + EXPECT_EQ(118, message.optional_nested_message ().bb()); + EXPECT_EQ(119, message.optional_foreign_message().c()); + EXPECT_EQ(120, message.optional_import_message ().d()); + + EXPECT_EQ(unittest::TestAllTypes::BAZ, message.optional_nested_enum ()); + EXPECT_EQ(unittest::FOREIGN_BAZ , message.optional_foreign_enum()); + EXPECT_EQ(unittest_import::IMPORT_BAZ, message.optional_import_enum ()); + + + // ----------------------------------------------------------------- + + ASSERT_EQ(2, message.repeated_int32_size ()); + ASSERT_EQ(2, message.repeated_int64_size ()); + ASSERT_EQ(2, message.repeated_uint32_size ()); + ASSERT_EQ(2, message.repeated_uint64_size ()); + ASSERT_EQ(2, message.repeated_sint32_size ()); + ASSERT_EQ(2, message.repeated_sint64_size ()); + ASSERT_EQ(2, message.repeated_fixed32_size ()); + ASSERT_EQ(2, message.repeated_fixed64_size ()); + ASSERT_EQ(2, message.repeated_sfixed32_size()); + ASSERT_EQ(2, message.repeated_sfixed64_size()); + ASSERT_EQ(2, message.repeated_float_size ()); + ASSERT_EQ(2, message.repeated_double_size ()); + ASSERT_EQ(2, message.repeated_bool_size ()); + ASSERT_EQ(2, message.repeated_string_size ()); + ASSERT_EQ(2, message.repeated_bytes_size ()); + + ASSERT_EQ(2, message.repeatedgroup_size ()); + ASSERT_EQ(2, message.repeated_nested_message_size ()); + ASSERT_EQ(2, message.repeated_foreign_message_size()); + ASSERT_EQ(2, message.repeated_import_message_size ()); + ASSERT_EQ(2, message.repeated_nested_enum_size ()); + ASSERT_EQ(2, message.repeated_foreign_enum_size ()); + ASSERT_EQ(2, message.repeated_import_enum_size ()); + +#ifndef PROTOBUF_TEST_NO_DESCRIPTORS + ASSERT_EQ(2, message.repeated_string_piece_size()); + ASSERT_EQ(2, message.repeated_cord_size()); +#endif + + EXPECT_EQ(201 , message.repeated_int32 (0)); + EXPECT_EQ(202 , message.repeated_int64 (0)); + EXPECT_EQ(203 , message.repeated_uint32 (0)); + EXPECT_EQ(204 , message.repeated_uint64 (0)); + EXPECT_EQ(205 , message.repeated_sint32 (0)); + EXPECT_EQ(206 , message.repeated_sint64 (0)); + EXPECT_EQ(207 , message.repeated_fixed32 (0)); + EXPECT_EQ(208 , message.repeated_fixed64 (0)); + EXPECT_EQ(209 , message.repeated_sfixed32(0)); + EXPECT_EQ(210 , message.repeated_sfixed64(0)); + EXPECT_EQ(211 , message.repeated_float (0)); + EXPECT_EQ(212 , message.repeated_double (0)); + EXPECT_EQ(true , message.repeated_bool (0)); + EXPECT_EQ("215", message.repeated_string (0)); + EXPECT_EQ("216", message.repeated_bytes (0)); + + EXPECT_EQ(217, message.repeatedgroup (0).a()); + EXPECT_EQ(218, message.repeated_nested_message (0).bb()); + EXPECT_EQ(219, message.repeated_foreign_message(0).c()); + EXPECT_EQ(220, message.repeated_import_message (0).d()); + + + EXPECT_EQ(unittest::TestAllTypes::BAR, message.repeated_nested_enum (0)); + EXPECT_EQ(unittest::FOREIGN_BAR , message.repeated_foreign_enum(0)); + EXPECT_EQ(unittest_import::IMPORT_BAR, message.repeated_import_enum (0)); + + EXPECT_EQ(301 , message.repeated_int32 (1)); + EXPECT_EQ(302 , message.repeated_int64 (1)); + EXPECT_EQ(303 , message.repeated_uint32 (1)); + EXPECT_EQ(304 , message.repeated_uint64 (1)); + EXPECT_EQ(305 , message.repeated_sint32 (1)); + EXPECT_EQ(306 , message.repeated_sint64 (1)); + EXPECT_EQ(307 , message.repeated_fixed32 (1)); + EXPECT_EQ(308 , message.repeated_fixed64 (1)); + EXPECT_EQ(309 , message.repeated_sfixed32(1)); + EXPECT_EQ(310 , message.repeated_sfixed64(1)); + EXPECT_EQ(311 , message.repeated_float (1)); + EXPECT_EQ(312 , message.repeated_double (1)); + EXPECT_EQ(false, message.repeated_bool (1)); + EXPECT_EQ("315", message.repeated_string (1)); + EXPECT_EQ("316", message.repeated_bytes (1)); + + EXPECT_EQ(317, message.repeatedgroup (1).a()); + EXPECT_EQ(318, message.repeated_nested_message (1).bb()); + EXPECT_EQ(319, message.repeated_foreign_message(1).c()); + EXPECT_EQ(320, message.repeated_import_message (1).d()); + + EXPECT_EQ(unittest::TestAllTypes::BAZ, message.repeated_nested_enum (1)); + EXPECT_EQ(unittest::FOREIGN_BAZ , message.repeated_foreign_enum(1)); + EXPECT_EQ(unittest_import::IMPORT_BAZ, message.repeated_import_enum (1)); + + + // ----------------------------------------------------------------- + + EXPECT_TRUE(message.has_default_int32 ()); + EXPECT_TRUE(message.has_default_int64 ()); + EXPECT_TRUE(message.has_default_uint32 ()); + EXPECT_TRUE(message.has_default_uint64 ()); + EXPECT_TRUE(message.has_default_sint32 ()); + EXPECT_TRUE(message.has_default_sint64 ()); + EXPECT_TRUE(message.has_default_fixed32 ()); + EXPECT_TRUE(message.has_default_fixed64 ()); + EXPECT_TRUE(message.has_default_sfixed32()); + EXPECT_TRUE(message.has_default_sfixed64()); + EXPECT_TRUE(message.has_default_float ()); + EXPECT_TRUE(message.has_default_double ()); + EXPECT_TRUE(message.has_default_bool ()); + EXPECT_TRUE(message.has_default_string ()); + EXPECT_TRUE(message.has_default_bytes ()); + + EXPECT_TRUE(message.has_default_nested_enum ()); + EXPECT_TRUE(message.has_default_foreign_enum()); + EXPECT_TRUE(message.has_default_import_enum ()); + + + EXPECT_EQ(401 , message.default_int32 ()); + EXPECT_EQ(402 , message.default_int64 ()); + EXPECT_EQ(403 , message.default_uint32 ()); + EXPECT_EQ(404 , message.default_uint64 ()); + EXPECT_EQ(405 , message.default_sint32 ()); + EXPECT_EQ(406 , message.default_sint64 ()); + EXPECT_EQ(407 , message.default_fixed32 ()); + EXPECT_EQ(408 , message.default_fixed64 ()); + EXPECT_EQ(409 , message.default_sfixed32()); + EXPECT_EQ(410 , message.default_sfixed64()); + EXPECT_EQ(411 , message.default_float ()); + EXPECT_EQ(412 , message.default_double ()); + EXPECT_EQ(false, message.default_bool ()); + EXPECT_EQ("415", message.default_string ()); + EXPECT_EQ("416", message.default_bytes ()); + + EXPECT_EQ(unittest::TestAllTypes::FOO, message.default_nested_enum ()); + EXPECT_EQ(unittest::FOREIGN_FOO , message.default_foreign_enum()); + EXPECT_EQ(unittest_import::IMPORT_FOO, message.default_import_enum ()); + +} + +// ------------------------------------------------------------------- + +void TestUtil::ExpectClear(const unittest::TestAllTypes& message) { + // has_blah() should initially be false for all optional fields. + EXPECT_FALSE(message.has_optional_int32 ()); + EXPECT_FALSE(message.has_optional_int64 ()); + EXPECT_FALSE(message.has_optional_uint32 ()); + EXPECT_FALSE(message.has_optional_uint64 ()); + EXPECT_FALSE(message.has_optional_sint32 ()); + EXPECT_FALSE(message.has_optional_sint64 ()); + EXPECT_FALSE(message.has_optional_fixed32 ()); + EXPECT_FALSE(message.has_optional_fixed64 ()); + EXPECT_FALSE(message.has_optional_sfixed32()); + EXPECT_FALSE(message.has_optional_sfixed64()); + EXPECT_FALSE(message.has_optional_float ()); + EXPECT_FALSE(message.has_optional_double ()); + EXPECT_FALSE(message.has_optional_bool ()); + EXPECT_FALSE(message.has_optional_string ()); + EXPECT_FALSE(message.has_optional_bytes ()); + + EXPECT_FALSE(message.has_optionalgroup ()); + EXPECT_FALSE(message.has_optional_nested_message ()); + EXPECT_FALSE(message.has_optional_foreign_message()); + EXPECT_FALSE(message.has_optional_import_message ()); + + EXPECT_FALSE(message.has_optional_nested_enum ()); + EXPECT_FALSE(message.has_optional_foreign_enum()); + EXPECT_FALSE(message.has_optional_import_enum ()); + + EXPECT_FALSE(message.has_optional_string_piece()); + EXPECT_FALSE(message.has_optional_cord()); + + // Optional fields without defaults are set to zero or something like it. + EXPECT_EQ(0 , message.optional_int32 ()); + EXPECT_EQ(0 , message.optional_int64 ()); + EXPECT_EQ(0 , message.optional_uint32 ()); + EXPECT_EQ(0 , message.optional_uint64 ()); + EXPECT_EQ(0 , message.optional_sint32 ()); + EXPECT_EQ(0 , message.optional_sint64 ()); + EXPECT_EQ(0 , message.optional_fixed32 ()); + EXPECT_EQ(0 , message.optional_fixed64 ()); + EXPECT_EQ(0 , message.optional_sfixed32()); + EXPECT_EQ(0 , message.optional_sfixed64()); + EXPECT_EQ(0 , message.optional_float ()); + EXPECT_EQ(0 , message.optional_double ()); + EXPECT_EQ(false, message.optional_bool ()); + EXPECT_EQ("" , message.optional_string ()); + EXPECT_EQ("" , message.optional_bytes ()); + + // Embedded messages should also be clear. + EXPECT_FALSE(message.optionalgroup ().has_a()); + EXPECT_FALSE(message.optional_nested_message ().has_bb()); + EXPECT_FALSE(message.optional_foreign_message().has_c()); + EXPECT_FALSE(message.optional_import_message ().has_d()); + + EXPECT_EQ(0, message.optionalgroup ().a()); + EXPECT_EQ(0, message.optional_nested_message ().bb()); + EXPECT_EQ(0, message.optional_foreign_message().c()); + EXPECT_EQ(0, message.optional_import_message ().d()); + + // Enums without defaults are set to the first value in the enum. + EXPECT_EQ(unittest::TestAllTypes::FOO, message.optional_nested_enum ()); + EXPECT_EQ(unittest::FOREIGN_FOO , message.optional_foreign_enum()); + EXPECT_EQ(unittest_import::IMPORT_FOO, message.optional_import_enum ()); + + + // Repeated fields are empty. + EXPECT_EQ(0, message.repeated_int32_size ()); + EXPECT_EQ(0, message.repeated_int64_size ()); + EXPECT_EQ(0, message.repeated_uint32_size ()); + EXPECT_EQ(0, message.repeated_uint64_size ()); + EXPECT_EQ(0, message.repeated_sint32_size ()); + EXPECT_EQ(0, message.repeated_sint64_size ()); + EXPECT_EQ(0, message.repeated_fixed32_size ()); + EXPECT_EQ(0, message.repeated_fixed64_size ()); + EXPECT_EQ(0, message.repeated_sfixed32_size()); + EXPECT_EQ(0, message.repeated_sfixed64_size()); + EXPECT_EQ(0, message.repeated_float_size ()); + EXPECT_EQ(0, message.repeated_double_size ()); + EXPECT_EQ(0, message.repeated_bool_size ()); + EXPECT_EQ(0, message.repeated_string_size ()); + EXPECT_EQ(0, message.repeated_bytes_size ()); + + EXPECT_EQ(0, message.repeatedgroup_size ()); + EXPECT_EQ(0, message.repeated_nested_message_size ()); + EXPECT_EQ(0, message.repeated_foreign_message_size()); + EXPECT_EQ(0, message.repeated_import_message_size ()); + EXPECT_EQ(0, message.repeated_nested_enum_size ()); + EXPECT_EQ(0, message.repeated_foreign_enum_size ()); + EXPECT_EQ(0, message.repeated_import_enum_size ()); + + EXPECT_EQ(0, message.repeated_string_piece_size()); + EXPECT_EQ(0, message.repeated_cord_size()); + + // has_blah() should also be false for all default fields. + EXPECT_FALSE(message.has_default_int32 ()); + EXPECT_FALSE(message.has_default_int64 ()); + EXPECT_FALSE(message.has_default_uint32 ()); + EXPECT_FALSE(message.has_default_uint64 ()); + EXPECT_FALSE(message.has_default_sint32 ()); + EXPECT_FALSE(message.has_default_sint64 ()); + EXPECT_FALSE(message.has_default_fixed32 ()); + EXPECT_FALSE(message.has_default_fixed64 ()); + EXPECT_FALSE(message.has_default_sfixed32()); + EXPECT_FALSE(message.has_default_sfixed64()); + EXPECT_FALSE(message.has_default_float ()); + EXPECT_FALSE(message.has_default_double ()); + EXPECT_FALSE(message.has_default_bool ()); + EXPECT_FALSE(message.has_default_string ()); + EXPECT_FALSE(message.has_default_bytes ()); + + EXPECT_FALSE(message.has_default_nested_enum ()); + EXPECT_FALSE(message.has_default_foreign_enum()); + EXPECT_FALSE(message.has_default_import_enum ()); + + + // Fields with defaults have their default values (duh). + EXPECT_EQ( 41 , message.default_int32 ()); + EXPECT_EQ( 42 , message.default_int64 ()); + EXPECT_EQ( 43 , message.default_uint32 ()); + EXPECT_EQ( 44 , message.default_uint64 ()); + EXPECT_EQ(-45 , message.default_sint32 ()); + EXPECT_EQ( 46 , message.default_sint64 ()); + EXPECT_EQ( 47 , message.default_fixed32 ()); + EXPECT_EQ( 48 , message.default_fixed64 ()); + EXPECT_EQ( 49 , message.default_sfixed32()); + EXPECT_EQ(-50 , message.default_sfixed64()); + EXPECT_EQ( 51.5 , message.default_float ()); + EXPECT_EQ( 52e3 , message.default_double ()); + EXPECT_EQ(true , message.default_bool ()); + EXPECT_EQ("hello", message.default_string ()); + EXPECT_EQ("world", message.default_bytes ()); + + EXPECT_EQ(unittest::TestAllTypes::BAR, message.default_nested_enum ()); + EXPECT_EQ(unittest::FOREIGN_BAR , message.default_foreign_enum()); + EXPECT_EQ(unittest_import::IMPORT_BAR, message.default_import_enum ()); + +} + +// ------------------------------------------------------------------- + +void TestUtil::ExpectRepeatedFieldsModified( + const unittest::TestAllTypes& message) { + // ModifyRepeatedFields only sets the second repeated element of each + // field. In addition to verifying this, we also verify that the first + // element and size were *not* modified. + ASSERT_EQ(2, message.repeated_int32_size ()); + ASSERT_EQ(2, message.repeated_int64_size ()); + ASSERT_EQ(2, message.repeated_uint32_size ()); + ASSERT_EQ(2, message.repeated_uint64_size ()); + ASSERT_EQ(2, message.repeated_sint32_size ()); + ASSERT_EQ(2, message.repeated_sint64_size ()); + ASSERT_EQ(2, message.repeated_fixed32_size ()); + ASSERT_EQ(2, message.repeated_fixed64_size ()); + ASSERT_EQ(2, message.repeated_sfixed32_size()); + ASSERT_EQ(2, message.repeated_sfixed64_size()); + ASSERT_EQ(2, message.repeated_float_size ()); + ASSERT_EQ(2, message.repeated_double_size ()); + ASSERT_EQ(2, message.repeated_bool_size ()); + ASSERT_EQ(2, message.repeated_string_size ()); + ASSERT_EQ(2, message.repeated_bytes_size ()); + + ASSERT_EQ(2, message.repeatedgroup_size ()); + ASSERT_EQ(2, message.repeated_nested_message_size ()); + ASSERT_EQ(2, message.repeated_foreign_message_size()); + ASSERT_EQ(2, message.repeated_import_message_size ()); + ASSERT_EQ(2, message.repeated_nested_enum_size ()); + ASSERT_EQ(2, message.repeated_foreign_enum_size ()); + ASSERT_EQ(2, message.repeated_import_enum_size ()); + +#ifndef PROTOBUF_TEST_NO_DESCRIPTORS + ASSERT_EQ(2, message.repeated_string_piece_size()); + ASSERT_EQ(2, message.repeated_cord_size()); +#endif + + EXPECT_EQ(201 , message.repeated_int32 (0)); + EXPECT_EQ(202 , message.repeated_int64 (0)); + EXPECT_EQ(203 , message.repeated_uint32 (0)); + EXPECT_EQ(204 , message.repeated_uint64 (0)); + EXPECT_EQ(205 , message.repeated_sint32 (0)); + EXPECT_EQ(206 , message.repeated_sint64 (0)); + EXPECT_EQ(207 , message.repeated_fixed32 (0)); + EXPECT_EQ(208 , message.repeated_fixed64 (0)); + EXPECT_EQ(209 , message.repeated_sfixed32(0)); + EXPECT_EQ(210 , message.repeated_sfixed64(0)); + EXPECT_EQ(211 , message.repeated_float (0)); + EXPECT_EQ(212 , message.repeated_double (0)); + EXPECT_EQ(true , message.repeated_bool (0)); + EXPECT_EQ("215", message.repeated_string (0)); + EXPECT_EQ("216", message.repeated_bytes (0)); + + EXPECT_EQ(217, message.repeatedgroup (0).a()); + EXPECT_EQ(218, message.repeated_nested_message (0).bb()); + EXPECT_EQ(219, message.repeated_foreign_message(0).c()); + EXPECT_EQ(220, message.repeated_import_message (0).d()); + + EXPECT_EQ(unittest::TestAllTypes::BAR, message.repeated_nested_enum (0)); + EXPECT_EQ(unittest::FOREIGN_BAR , message.repeated_foreign_enum(0)); + EXPECT_EQ(unittest_import::IMPORT_BAR, message.repeated_import_enum (0)); + + + // Actually verify the second (modified) elements now. + EXPECT_EQ(501 , message.repeated_int32 (1)); + EXPECT_EQ(502 , message.repeated_int64 (1)); + EXPECT_EQ(503 , message.repeated_uint32 (1)); + EXPECT_EQ(504 , message.repeated_uint64 (1)); + EXPECT_EQ(505 , message.repeated_sint32 (1)); + EXPECT_EQ(506 , message.repeated_sint64 (1)); + EXPECT_EQ(507 , message.repeated_fixed32 (1)); + EXPECT_EQ(508 , message.repeated_fixed64 (1)); + EXPECT_EQ(509 , message.repeated_sfixed32(1)); + EXPECT_EQ(510 , message.repeated_sfixed64(1)); + EXPECT_EQ(511 , message.repeated_float (1)); + EXPECT_EQ(512 , message.repeated_double (1)); + EXPECT_EQ(true , message.repeated_bool (1)); + EXPECT_EQ("515", message.repeated_string (1)); + EXPECT_EQ("516", message.repeated_bytes (1)); + + EXPECT_EQ(517, message.repeatedgroup (1).a()); + EXPECT_EQ(518, message.repeated_nested_message (1).bb()); + EXPECT_EQ(519, message.repeated_foreign_message(1).c()); + EXPECT_EQ(520, message.repeated_import_message (1).d()); + + EXPECT_EQ(unittest::TestAllTypes::FOO, message.repeated_nested_enum (1)); + EXPECT_EQ(unittest::FOREIGN_FOO , message.repeated_foreign_enum(1)); + EXPECT_EQ(unittest_import::IMPORT_FOO, message.repeated_import_enum (1)); + +} + +// ------------------------------------------------------------------- + +void TestUtil::SetPackedFields(unittest::TestPackedTypes* message) { + message->add_packed_int32 (601); + message->add_packed_int64 (602); + message->add_packed_uint32 (603); + message->add_packed_uint64 (604); + message->add_packed_sint32 (605); + message->add_packed_sint64 (606); + message->add_packed_fixed32 (607); + message->add_packed_fixed64 (608); + message->add_packed_sfixed32(609); + message->add_packed_sfixed64(610); + message->add_packed_float (611); + message->add_packed_double (612); + message->add_packed_bool (true); + message->add_packed_enum (unittest::FOREIGN_BAR); + // add a second one of each field + message->add_packed_int32 (701); + message->add_packed_int64 (702); + message->add_packed_uint32 (703); + message->add_packed_uint64 (704); + message->add_packed_sint32 (705); + message->add_packed_sint64 (706); + message->add_packed_fixed32 (707); + message->add_packed_fixed64 (708); + message->add_packed_sfixed32(709); + message->add_packed_sfixed64(710); + message->add_packed_float (711); + message->add_packed_double (712); + message->add_packed_bool (false); + message->add_packed_enum (unittest::FOREIGN_BAZ); +} + +void TestUtil::SetUnpackedFields(unittest::TestUnpackedTypes* message) { + // The values applied here must match those of SetPackedFields. + + message->add_unpacked_int32 (601); + message->add_unpacked_int64 (602); + message->add_unpacked_uint32 (603); + message->add_unpacked_uint64 (604); + message->add_unpacked_sint32 (605); + message->add_unpacked_sint64 (606); + message->add_unpacked_fixed32 (607); + message->add_unpacked_fixed64 (608); + message->add_unpacked_sfixed32(609); + message->add_unpacked_sfixed64(610); + message->add_unpacked_float (611); + message->add_unpacked_double (612); + message->add_unpacked_bool (true); + message->add_unpacked_enum (unittest::FOREIGN_BAR); + // add a second one of each field + message->add_unpacked_int32 (701); + message->add_unpacked_int64 (702); + message->add_unpacked_uint32 (703); + message->add_unpacked_uint64 (704); + message->add_unpacked_sint32 (705); + message->add_unpacked_sint64 (706); + message->add_unpacked_fixed32 (707); + message->add_unpacked_fixed64 (708); + message->add_unpacked_sfixed32(709); + message->add_unpacked_sfixed64(710); + message->add_unpacked_float (711); + message->add_unpacked_double (712); + message->add_unpacked_bool (false); + message->add_unpacked_enum (unittest::FOREIGN_BAZ); +} + +// ------------------------------------------------------------------- + +void TestUtil::ModifyPackedFields(unittest::TestPackedTypes* message) { + message->set_packed_int32 (1, 801); + message->set_packed_int64 (1, 802); + message->set_packed_uint32 (1, 803); + message->set_packed_uint64 (1, 804); + message->set_packed_sint32 (1, 805); + message->set_packed_sint64 (1, 806); + message->set_packed_fixed32 (1, 807); + message->set_packed_fixed64 (1, 808); + message->set_packed_sfixed32(1, 809); + message->set_packed_sfixed64(1, 810); + message->set_packed_float (1, 811); + message->set_packed_double (1, 812); + message->set_packed_bool (1, true); + message->set_packed_enum (1, unittest::FOREIGN_FOO); +} + +// ------------------------------------------------------------------- + +void TestUtil::ExpectPackedFieldsSet(const unittest::TestPackedTypes& message) { + ASSERT_EQ(2, message.packed_int32_size ()); + ASSERT_EQ(2, message.packed_int64_size ()); + ASSERT_EQ(2, message.packed_uint32_size ()); + ASSERT_EQ(2, message.packed_uint64_size ()); + ASSERT_EQ(2, message.packed_sint32_size ()); + ASSERT_EQ(2, message.packed_sint64_size ()); + ASSERT_EQ(2, message.packed_fixed32_size ()); + ASSERT_EQ(2, message.packed_fixed64_size ()); + ASSERT_EQ(2, message.packed_sfixed32_size()); + ASSERT_EQ(2, message.packed_sfixed64_size()); + ASSERT_EQ(2, message.packed_float_size ()); + ASSERT_EQ(2, message.packed_double_size ()); + ASSERT_EQ(2, message.packed_bool_size ()); + ASSERT_EQ(2, message.packed_enum_size ()); + + EXPECT_EQ(601 , message.packed_int32 (0)); + EXPECT_EQ(602 , message.packed_int64 (0)); + EXPECT_EQ(603 , message.packed_uint32 (0)); + EXPECT_EQ(604 , message.packed_uint64 (0)); + EXPECT_EQ(605 , message.packed_sint32 (0)); + EXPECT_EQ(606 , message.packed_sint64 (0)); + EXPECT_EQ(607 , message.packed_fixed32 (0)); + EXPECT_EQ(608 , message.packed_fixed64 (0)); + EXPECT_EQ(609 , message.packed_sfixed32(0)); + EXPECT_EQ(610 , message.packed_sfixed64(0)); + EXPECT_EQ(611 , message.packed_float (0)); + EXPECT_EQ(612 , message.packed_double (0)); + EXPECT_EQ(true , message.packed_bool (0)); + EXPECT_EQ(unittest::FOREIGN_BAR, message.packed_enum(0)); + + EXPECT_EQ(701 , message.packed_int32 (1)); + EXPECT_EQ(702 , message.packed_int64 (1)); + EXPECT_EQ(703 , message.packed_uint32 (1)); + EXPECT_EQ(704 , message.packed_uint64 (1)); + EXPECT_EQ(705 , message.packed_sint32 (1)); + EXPECT_EQ(706 , message.packed_sint64 (1)); + EXPECT_EQ(707 , message.packed_fixed32 (1)); + EXPECT_EQ(708 , message.packed_fixed64 (1)); + EXPECT_EQ(709 , message.packed_sfixed32(1)); + EXPECT_EQ(710 , message.packed_sfixed64(1)); + EXPECT_EQ(711 , message.packed_float (1)); + EXPECT_EQ(712 , message.packed_double (1)); + EXPECT_EQ(false, message.packed_bool (1)); + EXPECT_EQ(unittest::FOREIGN_BAZ, message.packed_enum(1)); +} + +void TestUtil::ExpectUnpackedFieldsSet( + const unittest::TestUnpackedTypes& message) { + // The values expected here must match those of ExpectPackedFieldsSet. + + ASSERT_EQ(2, message.unpacked_int32_size ()); + ASSERT_EQ(2, message.unpacked_int64_size ()); + ASSERT_EQ(2, message.unpacked_uint32_size ()); + ASSERT_EQ(2, message.unpacked_uint64_size ()); + ASSERT_EQ(2, message.unpacked_sint32_size ()); + ASSERT_EQ(2, message.unpacked_sint64_size ()); + ASSERT_EQ(2, message.unpacked_fixed32_size ()); + ASSERT_EQ(2, message.unpacked_fixed64_size ()); + ASSERT_EQ(2, message.unpacked_sfixed32_size()); + ASSERT_EQ(2, message.unpacked_sfixed64_size()); + ASSERT_EQ(2, message.unpacked_float_size ()); + ASSERT_EQ(2, message.unpacked_double_size ()); + ASSERT_EQ(2, message.unpacked_bool_size ()); + ASSERT_EQ(2, message.unpacked_enum_size ()); + + EXPECT_EQ(601 , message.unpacked_int32 (0)); + EXPECT_EQ(602 , message.unpacked_int64 (0)); + EXPECT_EQ(603 , message.unpacked_uint32 (0)); + EXPECT_EQ(604 , message.unpacked_uint64 (0)); + EXPECT_EQ(605 , message.unpacked_sint32 (0)); + EXPECT_EQ(606 , message.unpacked_sint64 (0)); + EXPECT_EQ(607 , message.unpacked_fixed32 (0)); + EXPECT_EQ(608 , message.unpacked_fixed64 (0)); + EXPECT_EQ(609 , message.unpacked_sfixed32(0)); + EXPECT_EQ(610 , message.unpacked_sfixed64(0)); + EXPECT_EQ(611 , message.unpacked_float (0)); + EXPECT_EQ(612 , message.unpacked_double (0)); + EXPECT_EQ(true , message.unpacked_bool (0)); + EXPECT_EQ(unittest::FOREIGN_BAR, message.unpacked_enum(0)); + + EXPECT_EQ(701 , message.unpacked_int32 (1)); + EXPECT_EQ(702 , message.unpacked_int64 (1)); + EXPECT_EQ(703 , message.unpacked_uint32 (1)); + EXPECT_EQ(704 , message.unpacked_uint64 (1)); + EXPECT_EQ(705 , message.unpacked_sint32 (1)); + EXPECT_EQ(706 , message.unpacked_sint64 (1)); + EXPECT_EQ(707 , message.unpacked_fixed32 (1)); + EXPECT_EQ(708 , message.unpacked_fixed64 (1)); + EXPECT_EQ(709 , message.unpacked_sfixed32(1)); + EXPECT_EQ(710 , message.unpacked_sfixed64(1)); + EXPECT_EQ(711 , message.unpacked_float (1)); + EXPECT_EQ(712 , message.unpacked_double (1)); + EXPECT_EQ(false, message.unpacked_bool (1)); + EXPECT_EQ(unittest::FOREIGN_BAZ, message.unpacked_enum(1)); +} + +// ------------------------------------------------------------------- + +void TestUtil::ExpectPackedClear( + const unittest::TestPackedTypes& message) { + // Packed repeated fields are empty. + EXPECT_EQ(0, message.packed_int32_size ()); + EXPECT_EQ(0, message.packed_int64_size ()); + EXPECT_EQ(0, message.packed_uint32_size ()); + EXPECT_EQ(0, message.packed_uint64_size ()); + EXPECT_EQ(0, message.packed_sint32_size ()); + EXPECT_EQ(0, message.packed_sint64_size ()); + EXPECT_EQ(0, message.packed_fixed32_size ()); + EXPECT_EQ(0, message.packed_fixed64_size ()); + EXPECT_EQ(0, message.packed_sfixed32_size()); + EXPECT_EQ(0, message.packed_sfixed64_size()); + EXPECT_EQ(0, message.packed_float_size ()); + EXPECT_EQ(0, message.packed_double_size ()); + EXPECT_EQ(0, message.packed_bool_size ()); + EXPECT_EQ(0, message.packed_enum_size ()); +} + +// ------------------------------------------------------------------- + +void TestUtil::ExpectPackedFieldsModified( + const unittest::TestPackedTypes& message) { + // Do the same for packed repeated fields. + ASSERT_EQ(2, message.packed_int32_size ()); + ASSERT_EQ(2, message.packed_int64_size ()); + ASSERT_EQ(2, message.packed_uint32_size ()); + ASSERT_EQ(2, message.packed_uint64_size ()); + ASSERT_EQ(2, message.packed_sint32_size ()); + ASSERT_EQ(2, message.packed_sint64_size ()); + ASSERT_EQ(2, message.packed_fixed32_size ()); + ASSERT_EQ(2, message.packed_fixed64_size ()); + ASSERT_EQ(2, message.packed_sfixed32_size()); + ASSERT_EQ(2, message.packed_sfixed64_size()); + ASSERT_EQ(2, message.packed_float_size ()); + ASSERT_EQ(2, message.packed_double_size ()); + ASSERT_EQ(2, message.packed_bool_size ()); + ASSERT_EQ(2, message.packed_enum_size ()); + + EXPECT_EQ(601 , message.packed_int32 (0)); + EXPECT_EQ(602 , message.packed_int64 (0)); + EXPECT_EQ(603 , message.packed_uint32 (0)); + EXPECT_EQ(604 , message.packed_uint64 (0)); + EXPECT_EQ(605 , message.packed_sint32 (0)); + EXPECT_EQ(606 , message.packed_sint64 (0)); + EXPECT_EQ(607 , message.packed_fixed32 (0)); + EXPECT_EQ(608 , message.packed_fixed64 (0)); + EXPECT_EQ(609 , message.packed_sfixed32(0)); + EXPECT_EQ(610 , message.packed_sfixed64(0)); + EXPECT_EQ(611 , message.packed_float (0)); + EXPECT_EQ(612 , message.packed_double (0)); + EXPECT_EQ(true , message.packed_bool (0)); + EXPECT_EQ(unittest::FOREIGN_BAR, message.packed_enum(0)); + // Actually verify the second (modified) elements now. + EXPECT_EQ(801 , message.packed_int32 (1)); + EXPECT_EQ(802 , message.packed_int64 (1)); + EXPECT_EQ(803 , message.packed_uint32 (1)); + EXPECT_EQ(804 , message.packed_uint64 (1)); + EXPECT_EQ(805 , message.packed_sint32 (1)); + EXPECT_EQ(806 , message.packed_sint64 (1)); + EXPECT_EQ(807 , message.packed_fixed32 (1)); + EXPECT_EQ(808 , message.packed_fixed64 (1)); + EXPECT_EQ(809 , message.packed_sfixed32(1)); + EXPECT_EQ(810 , message.packed_sfixed64(1)); + EXPECT_EQ(811 , message.packed_float (1)); + EXPECT_EQ(812 , message.packed_double (1)); + EXPECT_EQ(true , message.packed_bool (1)); + EXPECT_EQ(unittest::FOREIGN_FOO, message.packed_enum(1)); +} + +// =================================================================== +// Extensions +// +// All this code is exactly equivalent to the above code except that it's +// manipulating extension fields instead of normal ones. +// +// I gave up on the 80-char limit here. Sorry. + +void TestUtil::SetAllExtensions(unittest::TestAllExtensions* message) { + message->SetExtension(unittest::optional_int32_extension , 101); + message->SetExtension(unittest::optional_int64_extension , 102); + message->SetExtension(unittest::optional_uint32_extension , 103); + message->SetExtension(unittest::optional_uint64_extension , 104); + message->SetExtension(unittest::optional_sint32_extension , 105); + message->SetExtension(unittest::optional_sint64_extension , 106); + message->SetExtension(unittest::optional_fixed32_extension , 107); + message->SetExtension(unittest::optional_fixed64_extension , 108); + message->SetExtension(unittest::optional_sfixed32_extension, 109); + message->SetExtension(unittest::optional_sfixed64_extension, 110); + message->SetExtension(unittest::optional_float_extension , 111); + message->SetExtension(unittest::optional_double_extension , 112); + message->SetExtension(unittest::optional_bool_extension , true); + message->SetExtension(unittest::optional_string_extension , "115"); + message->SetExtension(unittest::optional_bytes_extension , "116"); + + message->MutableExtension(unittest::optionalgroup_extension )->set_a(117); + message->MutableExtension(unittest::optional_nested_message_extension )->set_bb(118); + message->MutableExtension(unittest::optional_foreign_message_extension)->set_c(119); + message->MutableExtension(unittest::optional_import_message_extension )->set_d(120); + + message->SetExtension(unittest::optional_nested_enum_extension , unittest::TestAllTypes::BAZ); + message->SetExtension(unittest::optional_foreign_enum_extension, unittest::FOREIGN_BAZ ); + message->SetExtension(unittest::optional_import_enum_extension , unittest_import::IMPORT_BAZ); + + message->SetExtension(unittest::optional_string_piece_extension, "124"); + message->SetExtension(unittest::optional_cord_extension, "125"); + + // ----------------------------------------------------------------- + + message->AddExtension(unittest::repeated_int32_extension , 201); + message->AddExtension(unittest::repeated_int64_extension , 202); + message->AddExtension(unittest::repeated_uint32_extension , 203); + message->AddExtension(unittest::repeated_uint64_extension , 204); + message->AddExtension(unittest::repeated_sint32_extension , 205); + message->AddExtension(unittest::repeated_sint64_extension , 206); + message->AddExtension(unittest::repeated_fixed32_extension , 207); + message->AddExtension(unittest::repeated_fixed64_extension , 208); + message->AddExtension(unittest::repeated_sfixed32_extension, 209); + message->AddExtension(unittest::repeated_sfixed64_extension, 210); + message->AddExtension(unittest::repeated_float_extension , 211); + message->AddExtension(unittest::repeated_double_extension , 212); + message->AddExtension(unittest::repeated_bool_extension , true); + message->AddExtension(unittest::repeated_string_extension , "215"); + message->AddExtension(unittest::repeated_bytes_extension , "216"); + + message->AddExtension(unittest::repeatedgroup_extension )->set_a(217); + message->AddExtension(unittest::repeated_nested_message_extension )->set_bb(218); + message->AddExtension(unittest::repeated_foreign_message_extension)->set_c(219); + message->AddExtension(unittest::repeated_import_message_extension )->set_d(220); + + message->AddExtension(unittest::repeated_nested_enum_extension , unittest::TestAllTypes::BAR); + message->AddExtension(unittest::repeated_foreign_enum_extension, unittest::FOREIGN_BAR ); + message->AddExtension(unittest::repeated_import_enum_extension , unittest_import::IMPORT_BAR); + + message->AddExtension(unittest::repeated_string_piece_extension, "224"); + message->AddExtension(unittest::repeated_cord_extension, "225"); + + // Add a second one of each field. + message->AddExtension(unittest::repeated_int32_extension , 301); + message->AddExtension(unittest::repeated_int64_extension , 302); + message->AddExtension(unittest::repeated_uint32_extension , 303); + message->AddExtension(unittest::repeated_uint64_extension , 304); + message->AddExtension(unittest::repeated_sint32_extension , 305); + message->AddExtension(unittest::repeated_sint64_extension , 306); + message->AddExtension(unittest::repeated_fixed32_extension , 307); + message->AddExtension(unittest::repeated_fixed64_extension , 308); + message->AddExtension(unittest::repeated_sfixed32_extension, 309); + message->AddExtension(unittest::repeated_sfixed64_extension, 310); + message->AddExtension(unittest::repeated_float_extension , 311); + message->AddExtension(unittest::repeated_double_extension , 312); + message->AddExtension(unittest::repeated_bool_extension , false); + message->AddExtension(unittest::repeated_string_extension , "315"); + message->AddExtension(unittest::repeated_bytes_extension , "316"); + + message->AddExtension(unittest::repeatedgroup_extension )->set_a(317); + message->AddExtension(unittest::repeated_nested_message_extension )->set_bb(318); + message->AddExtension(unittest::repeated_foreign_message_extension)->set_c(319); + message->AddExtension(unittest::repeated_import_message_extension )->set_d(320); + + message->AddExtension(unittest::repeated_nested_enum_extension , unittest::TestAllTypes::BAZ); + message->AddExtension(unittest::repeated_foreign_enum_extension, unittest::FOREIGN_BAZ ); + message->AddExtension(unittest::repeated_import_enum_extension , unittest_import::IMPORT_BAZ); + + message->AddExtension(unittest::repeated_string_piece_extension, "324"); + message->AddExtension(unittest::repeated_cord_extension, "325"); + + // ----------------------------------------------------------------- + + message->SetExtension(unittest::default_int32_extension , 401); + message->SetExtension(unittest::default_int64_extension , 402); + message->SetExtension(unittest::default_uint32_extension , 403); + message->SetExtension(unittest::default_uint64_extension , 404); + message->SetExtension(unittest::default_sint32_extension , 405); + message->SetExtension(unittest::default_sint64_extension , 406); + message->SetExtension(unittest::default_fixed32_extension , 407); + message->SetExtension(unittest::default_fixed64_extension , 408); + message->SetExtension(unittest::default_sfixed32_extension, 409); + message->SetExtension(unittest::default_sfixed64_extension, 410); + message->SetExtension(unittest::default_float_extension , 411); + message->SetExtension(unittest::default_double_extension , 412); + message->SetExtension(unittest::default_bool_extension , false); + message->SetExtension(unittest::default_string_extension , "415"); + message->SetExtension(unittest::default_bytes_extension , "416"); + + message->SetExtension(unittest::default_nested_enum_extension , unittest::TestAllTypes::FOO); + message->SetExtension(unittest::default_foreign_enum_extension, unittest::FOREIGN_FOO ); + message->SetExtension(unittest::default_import_enum_extension , unittest_import::IMPORT_FOO); + + message->SetExtension(unittest::default_string_piece_extension, "424"); + message->SetExtension(unittest::default_cord_extension, "425"); +} + +// ------------------------------------------------------------------- + +void TestUtil::SetAllFieldsAndExtensions( + unittest::TestFieldOrderings* message) { + GOOGLE_CHECK(message); + message->set_my_int(1); + message->set_my_string("foo"); + message->set_my_float(1.0); + message->SetExtension(unittest::my_extension_int, 23); + message->SetExtension(unittest::my_extension_string, "bar"); +} + +// ------------------------------------------------------------------- + +void TestUtil::ModifyRepeatedExtensions(unittest::TestAllExtensions* message) { + message->SetExtension(unittest::repeated_int32_extension , 1, 501); + message->SetExtension(unittest::repeated_int64_extension , 1, 502); + message->SetExtension(unittest::repeated_uint32_extension , 1, 503); + message->SetExtension(unittest::repeated_uint64_extension , 1, 504); + message->SetExtension(unittest::repeated_sint32_extension , 1, 505); + message->SetExtension(unittest::repeated_sint64_extension , 1, 506); + message->SetExtension(unittest::repeated_fixed32_extension , 1, 507); + message->SetExtension(unittest::repeated_fixed64_extension , 1, 508); + message->SetExtension(unittest::repeated_sfixed32_extension, 1, 509); + message->SetExtension(unittest::repeated_sfixed64_extension, 1, 510); + message->SetExtension(unittest::repeated_float_extension , 1, 511); + message->SetExtension(unittest::repeated_double_extension , 1, 512); + message->SetExtension(unittest::repeated_bool_extension , 1, true); + message->SetExtension(unittest::repeated_string_extension , 1, "515"); + message->SetExtension(unittest::repeated_bytes_extension , 1, "516"); + + message->MutableExtension(unittest::repeatedgroup_extension , 1)->set_a(517); + message->MutableExtension(unittest::repeated_nested_message_extension , 1)->set_bb(518); + message->MutableExtension(unittest::repeated_foreign_message_extension, 1)->set_c(519); + message->MutableExtension(unittest::repeated_import_message_extension , 1)->set_d(520); + + message->SetExtension(unittest::repeated_nested_enum_extension , 1, unittest::TestAllTypes::FOO); + message->SetExtension(unittest::repeated_foreign_enum_extension, 1, unittest::FOREIGN_FOO ); + message->SetExtension(unittest::repeated_import_enum_extension , 1, unittest_import::IMPORT_FOO); + + message->SetExtension(unittest::repeated_string_piece_extension, 1, "524"); + message->SetExtension(unittest::repeated_cord_extension, 1, "525"); +} + +// ------------------------------------------------------------------- + +void TestUtil::ExpectAllExtensionsSet( + const unittest::TestAllExtensions& message) { + EXPECT_TRUE(message.HasExtension(unittest::optional_int32_extension )); + EXPECT_TRUE(message.HasExtension(unittest::optional_int64_extension )); + EXPECT_TRUE(message.HasExtension(unittest::optional_uint32_extension )); + EXPECT_TRUE(message.HasExtension(unittest::optional_uint64_extension )); + EXPECT_TRUE(message.HasExtension(unittest::optional_sint32_extension )); + EXPECT_TRUE(message.HasExtension(unittest::optional_sint64_extension )); + EXPECT_TRUE(message.HasExtension(unittest::optional_fixed32_extension )); + EXPECT_TRUE(message.HasExtension(unittest::optional_fixed64_extension )); + EXPECT_TRUE(message.HasExtension(unittest::optional_sfixed32_extension)); + EXPECT_TRUE(message.HasExtension(unittest::optional_sfixed64_extension)); + EXPECT_TRUE(message.HasExtension(unittest::optional_float_extension )); + EXPECT_TRUE(message.HasExtension(unittest::optional_double_extension )); + EXPECT_TRUE(message.HasExtension(unittest::optional_bool_extension )); + EXPECT_TRUE(message.HasExtension(unittest::optional_string_extension )); + EXPECT_TRUE(message.HasExtension(unittest::optional_bytes_extension )); + + EXPECT_TRUE(message.HasExtension(unittest::optionalgroup_extension )); + EXPECT_TRUE(message.HasExtension(unittest::optional_nested_message_extension )); + EXPECT_TRUE(message.HasExtension(unittest::optional_foreign_message_extension)); + EXPECT_TRUE(message.HasExtension(unittest::optional_import_message_extension )); + + EXPECT_TRUE(message.GetExtension(unittest::optionalgroup_extension ).has_a()); + EXPECT_TRUE(message.GetExtension(unittest::optional_nested_message_extension ).has_bb()); + EXPECT_TRUE(message.GetExtension(unittest::optional_foreign_message_extension).has_c()); + EXPECT_TRUE(message.GetExtension(unittest::optional_import_message_extension ).has_d()); + + EXPECT_TRUE(message.HasExtension(unittest::optional_nested_enum_extension )); + EXPECT_TRUE(message.HasExtension(unittest::optional_foreign_enum_extension)); + EXPECT_TRUE(message.HasExtension(unittest::optional_import_enum_extension )); + + EXPECT_TRUE(message.HasExtension(unittest::optional_string_piece_extension)); + EXPECT_TRUE(message.HasExtension(unittest::optional_cord_extension)); + + EXPECT_EQ(101 , message.GetExtension(unittest::optional_int32_extension )); + EXPECT_EQ(102 , message.GetExtension(unittest::optional_int64_extension )); + EXPECT_EQ(103 , message.GetExtension(unittest::optional_uint32_extension )); + EXPECT_EQ(104 , message.GetExtension(unittest::optional_uint64_extension )); + EXPECT_EQ(105 , message.GetExtension(unittest::optional_sint32_extension )); + EXPECT_EQ(106 , message.GetExtension(unittest::optional_sint64_extension )); + EXPECT_EQ(107 , message.GetExtension(unittest::optional_fixed32_extension )); + EXPECT_EQ(108 , message.GetExtension(unittest::optional_fixed64_extension )); + EXPECT_EQ(109 , message.GetExtension(unittest::optional_sfixed32_extension)); + EXPECT_EQ(110 , message.GetExtension(unittest::optional_sfixed64_extension)); + EXPECT_EQ(111 , message.GetExtension(unittest::optional_float_extension )); + EXPECT_EQ(112 , message.GetExtension(unittest::optional_double_extension )); + EXPECT_EQ(true , message.GetExtension(unittest::optional_bool_extension )); + EXPECT_EQ("115", message.GetExtension(unittest::optional_string_extension )); + EXPECT_EQ("116", message.GetExtension(unittest::optional_bytes_extension )); + + EXPECT_EQ(117, message.GetExtension(unittest::optionalgroup_extension ).a()); + EXPECT_EQ(118, message.GetExtension(unittest::optional_nested_message_extension ).bb()); + EXPECT_EQ(119, message.GetExtension(unittest::optional_foreign_message_extension).c()); + EXPECT_EQ(120, message.GetExtension(unittest::optional_import_message_extension ).d()); + + EXPECT_EQ(unittest::TestAllTypes::BAZ, message.GetExtension(unittest::optional_nested_enum_extension )); + EXPECT_EQ(unittest::FOREIGN_BAZ , message.GetExtension(unittest::optional_foreign_enum_extension)); + EXPECT_EQ(unittest_import::IMPORT_BAZ, message.GetExtension(unittest::optional_import_enum_extension )); + + EXPECT_EQ("124", message.GetExtension(unittest::optional_string_piece_extension)); + EXPECT_EQ("125", message.GetExtension(unittest::optional_cord_extension)); + + // ----------------------------------------------------------------- + + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_int32_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_int64_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_uint32_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_uint64_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_sint32_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_sint64_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_fixed32_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_fixed64_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_sfixed32_extension)); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_sfixed64_extension)); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_float_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_double_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_bool_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_string_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_bytes_extension )); + + ASSERT_EQ(2, message.ExtensionSize(unittest::repeatedgroup_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_nested_message_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_foreign_message_extension)); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_import_message_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_nested_enum_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_foreign_enum_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_import_enum_extension )); + + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_string_piece_extension)); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_cord_extension)); + + EXPECT_EQ(201 , message.GetExtension(unittest::repeated_int32_extension , 0)); + EXPECT_EQ(202 , message.GetExtension(unittest::repeated_int64_extension , 0)); + EXPECT_EQ(203 , message.GetExtension(unittest::repeated_uint32_extension , 0)); + EXPECT_EQ(204 , message.GetExtension(unittest::repeated_uint64_extension , 0)); + EXPECT_EQ(205 , message.GetExtension(unittest::repeated_sint32_extension , 0)); + EXPECT_EQ(206 , message.GetExtension(unittest::repeated_sint64_extension , 0)); + EXPECT_EQ(207 , message.GetExtension(unittest::repeated_fixed32_extension , 0)); + EXPECT_EQ(208 , message.GetExtension(unittest::repeated_fixed64_extension , 0)); + EXPECT_EQ(209 , message.GetExtension(unittest::repeated_sfixed32_extension, 0)); + EXPECT_EQ(210 , message.GetExtension(unittest::repeated_sfixed64_extension, 0)); + EXPECT_EQ(211 , message.GetExtension(unittest::repeated_float_extension , 0)); + EXPECT_EQ(212 , message.GetExtension(unittest::repeated_double_extension , 0)); + EXPECT_EQ(true , message.GetExtension(unittest::repeated_bool_extension , 0)); + EXPECT_EQ("215", message.GetExtension(unittest::repeated_string_extension , 0)); + EXPECT_EQ("216", message.GetExtension(unittest::repeated_bytes_extension , 0)); + + EXPECT_EQ(217, message.GetExtension(unittest::repeatedgroup_extension , 0).a()); + EXPECT_EQ(218, message.GetExtension(unittest::repeated_nested_message_extension , 0).bb()); + EXPECT_EQ(219, message.GetExtension(unittest::repeated_foreign_message_extension, 0).c()); + EXPECT_EQ(220, message.GetExtension(unittest::repeated_import_message_extension , 0).d()); + + EXPECT_EQ(unittest::TestAllTypes::BAR, message.GetExtension(unittest::repeated_nested_enum_extension , 0)); + EXPECT_EQ(unittest::FOREIGN_BAR , message.GetExtension(unittest::repeated_foreign_enum_extension, 0)); + EXPECT_EQ(unittest_import::IMPORT_BAR, message.GetExtension(unittest::repeated_import_enum_extension , 0)); + + EXPECT_EQ("224", message.GetExtension(unittest::repeated_string_piece_extension, 0)); + EXPECT_EQ("225", message.GetExtension(unittest::repeated_cord_extension, 0)); + + EXPECT_EQ(301 , message.GetExtension(unittest::repeated_int32_extension , 1)); + EXPECT_EQ(302 , message.GetExtension(unittest::repeated_int64_extension , 1)); + EXPECT_EQ(303 , message.GetExtension(unittest::repeated_uint32_extension , 1)); + EXPECT_EQ(304 , message.GetExtension(unittest::repeated_uint64_extension , 1)); + EXPECT_EQ(305 , message.GetExtension(unittest::repeated_sint32_extension , 1)); + EXPECT_EQ(306 , message.GetExtension(unittest::repeated_sint64_extension , 1)); + EXPECT_EQ(307 , message.GetExtension(unittest::repeated_fixed32_extension , 1)); + EXPECT_EQ(308 , message.GetExtension(unittest::repeated_fixed64_extension , 1)); + EXPECT_EQ(309 , message.GetExtension(unittest::repeated_sfixed32_extension, 1)); + EXPECT_EQ(310 , message.GetExtension(unittest::repeated_sfixed64_extension, 1)); + EXPECT_EQ(311 , message.GetExtension(unittest::repeated_float_extension , 1)); + EXPECT_EQ(312 , message.GetExtension(unittest::repeated_double_extension , 1)); + EXPECT_EQ(false, message.GetExtension(unittest::repeated_bool_extension , 1)); + EXPECT_EQ("315", message.GetExtension(unittest::repeated_string_extension , 1)); + EXPECT_EQ("316", message.GetExtension(unittest::repeated_bytes_extension , 1)); + + EXPECT_EQ(317, message.GetExtension(unittest::repeatedgroup_extension , 1).a()); + EXPECT_EQ(318, message.GetExtension(unittest::repeated_nested_message_extension , 1).bb()); + EXPECT_EQ(319, message.GetExtension(unittest::repeated_foreign_message_extension, 1).c()); + EXPECT_EQ(320, message.GetExtension(unittest::repeated_import_message_extension , 1).d()); + + EXPECT_EQ(unittest::TestAllTypes::BAZ, message.GetExtension(unittest::repeated_nested_enum_extension , 1)); + EXPECT_EQ(unittest::FOREIGN_BAZ , message.GetExtension(unittest::repeated_foreign_enum_extension, 1)); + EXPECT_EQ(unittest_import::IMPORT_BAZ, message.GetExtension(unittest::repeated_import_enum_extension , 1)); + + EXPECT_EQ("324", message.GetExtension(unittest::repeated_string_piece_extension, 1)); + EXPECT_EQ("325", message.GetExtension(unittest::repeated_cord_extension, 1)); + + // ----------------------------------------------------------------- + + EXPECT_TRUE(message.HasExtension(unittest::default_int32_extension )); + EXPECT_TRUE(message.HasExtension(unittest::default_int64_extension )); + EXPECT_TRUE(message.HasExtension(unittest::default_uint32_extension )); + EXPECT_TRUE(message.HasExtension(unittest::default_uint64_extension )); + EXPECT_TRUE(message.HasExtension(unittest::default_sint32_extension )); + EXPECT_TRUE(message.HasExtension(unittest::default_sint64_extension )); + EXPECT_TRUE(message.HasExtension(unittest::default_fixed32_extension )); + EXPECT_TRUE(message.HasExtension(unittest::default_fixed64_extension )); + EXPECT_TRUE(message.HasExtension(unittest::default_sfixed32_extension)); + EXPECT_TRUE(message.HasExtension(unittest::default_sfixed64_extension)); + EXPECT_TRUE(message.HasExtension(unittest::default_float_extension )); + EXPECT_TRUE(message.HasExtension(unittest::default_double_extension )); + EXPECT_TRUE(message.HasExtension(unittest::default_bool_extension )); + EXPECT_TRUE(message.HasExtension(unittest::default_string_extension )); + EXPECT_TRUE(message.HasExtension(unittest::default_bytes_extension )); + + EXPECT_TRUE(message.HasExtension(unittest::default_nested_enum_extension )); + EXPECT_TRUE(message.HasExtension(unittest::default_foreign_enum_extension)); + EXPECT_TRUE(message.HasExtension(unittest::default_import_enum_extension )); + + EXPECT_TRUE(message.HasExtension(unittest::default_string_piece_extension)); + EXPECT_TRUE(message.HasExtension(unittest::default_cord_extension)); + + EXPECT_EQ(401 , message.GetExtension(unittest::default_int32_extension )); + EXPECT_EQ(402 , message.GetExtension(unittest::default_int64_extension )); + EXPECT_EQ(403 , message.GetExtension(unittest::default_uint32_extension )); + EXPECT_EQ(404 , message.GetExtension(unittest::default_uint64_extension )); + EXPECT_EQ(405 , message.GetExtension(unittest::default_sint32_extension )); + EXPECT_EQ(406 , message.GetExtension(unittest::default_sint64_extension )); + EXPECT_EQ(407 , message.GetExtension(unittest::default_fixed32_extension )); + EXPECT_EQ(408 , message.GetExtension(unittest::default_fixed64_extension )); + EXPECT_EQ(409 , message.GetExtension(unittest::default_sfixed32_extension)); + EXPECT_EQ(410 , message.GetExtension(unittest::default_sfixed64_extension)); + EXPECT_EQ(411 , message.GetExtension(unittest::default_float_extension )); + EXPECT_EQ(412 , message.GetExtension(unittest::default_double_extension )); + EXPECT_EQ(false, message.GetExtension(unittest::default_bool_extension )); + EXPECT_EQ("415", message.GetExtension(unittest::default_string_extension )); + EXPECT_EQ("416", message.GetExtension(unittest::default_bytes_extension )); + + EXPECT_EQ(unittest::TestAllTypes::FOO, message.GetExtension(unittest::default_nested_enum_extension )); + EXPECT_EQ(unittest::FOREIGN_FOO , message.GetExtension(unittest::default_foreign_enum_extension)); + EXPECT_EQ(unittest_import::IMPORT_FOO, message.GetExtension(unittest::default_import_enum_extension )); + + EXPECT_EQ("424", message.GetExtension(unittest::default_string_piece_extension)); + EXPECT_EQ("425", message.GetExtension(unittest::default_cord_extension)); +} + +// ------------------------------------------------------------------- + +void TestUtil::ExpectExtensionsClear( + const unittest::TestAllExtensions& message) { + string serialized; + ASSERT_TRUE(message.SerializeToString(&serialized)); + EXPECT_EQ("", serialized); + EXPECT_EQ(0, message.ByteSize()); + + // has_blah() should initially be false for all optional fields. + EXPECT_FALSE(message.HasExtension(unittest::optional_int32_extension )); + EXPECT_FALSE(message.HasExtension(unittest::optional_int64_extension )); + EXPECT_FALSE(message.HasExtension(unittest::optional_uint32_extension )); + EXPECT_FALSE(message.HasExtension(unittest::optional_uint64_extension )); + EXPECT_FALSE(message.HasExtension(unittest::optional_sint32_extension )); + EXPECT_FALSE(message.HasExtension(unittest::optional_sint64_extension )); + EXPECT_FALSE(message.HasExtension(unittest::optional_fixed32_extension )); + EXPECT_FALSE(message.HasExtension(unittest::optional_fixed64_extension )); + EXPECT_FALSE(message.HasExtension(unittest::optional_sfixed32_extension)); + EXPECT_FALSE(message.HasExtension(unittest::optional_sfixed64_extension)); + EXPECT_FALSE(message.HasExtension(unittest::optional_float_extension )); + EXPECT_FALSE(message.HasExtension(unittest::optional_double_extension )); + EXPECT_FALSE(message.HasExtension(unittest::optional_bool_extension )); + EXPECT_FALSE(message.HasExtension(unittest::optional_string_extension )); + EXPECT_FALSE(message.HasExtension(unittest::optional_bytes_extension )); + + EXPECT_FALSE(message.HasExtension(unittest::optionalgroup_extension )); + EXPECT_FALSE(message.HasExtension(unittest::optional_nested_message_extension )); + EXPECT_FALSE(message.HasExtension(unittest::optional_foreign_message_extension)); + EXPECT_FALSE(message.HasExtension(unittest::optional_import_message_extension )); + + EXPECT_FALSE(message.HasExtension(unittest::optional_nested_enum_extension )); + EXPECT_FALSE(message.HasExtension(unittest::optional_foreign_enum_extension)); + EXPECT_FALSE(message.HasExtension(unittest::optional_import_enum_extension )); + + EXPECT_FALSE(message.HasExtension(unittest::optional_string_piece_extension)); + EXPECT_FALSE(message.HasExtension(unittest::optional_cord_extension)); + + // Optional fields without defaults are set to zero or something like it. + EXPECT_EQ(0 , message.GetExtension(unittest::optional_int32_extension )); + EXPECT_EQ(0 , message.GetExtension(unittest::optional_int64_extension )); + EXPECT_EQ(0 , message.GetExtension(unittest::optional_uint32_extension )); + EXPECT_EQ(0 , message.GetExtension(unittest::optional_uint64_extension )); + EXPECT_EQ(0 , message.GetExtension(unittest::optional_sint32_extension )); + EXPECT_EQ(0 , message.GetExtension(unittest::optional_sint64_extension )); + EXPECT_EQ(0 , message.GetExtension(unittest::optional_fixed32_extension )); + EXPECT_EQ(0 , message.GetExtension(unittest::optional_fixed64_extension )); + EXPECT_EQ(0 , message.GetExtension(unittest::optional_sfixed32_extension)); + EXPECT_EQ(0 , message.GetExtension(unittest::optional_sfixed64_extension)); + EXPECT_EQ(0 , message.GetExtension(unittest::optional_float_extension )); + EXPECT_EQ(0 , message.GetExtension(unittest::optional_double_extension )); + EXPECT_EQ(false, message.GetExtension(unittest::optional_bool_extension )); + EXPECT_EQ("" , message.GetExtension(unittest::optional_string_extension )); + EXPECT_EQ("" , message.GetExtension(unittest::optional_bytes_extension )); + + // Embedded messages should also be clear. + EXPECT_FALSE(message.GetExtension(unittest::optionalgroup_extension ).has_a()); + EXPECT_FALSE(message.GetExtension(unittest::optional_nested_message_extension ).has_bb()); + EXPECT_FALSE(message.GetExtension(unittest::optional_foreign_message_extension).has_c()); + EXPECT_FALSE(message.GetExtension(unittest::optional_import_message_extension ).has_d()); + + EXPECT_EQ(0, message.GetExtension(unittest::optionalgroup_extension ).a()); + EXPECT_EQ(0, message.GetExtension(unittest::optional_nested_message_extension ).bb()); + EXPECT_EQ(0, message.GetExtension(unittest::optional_foreign_message_extension).c()); + EXPECT_EQ(0, message.GetExtension(unittest::optional_import_message_extension ).d()); + + // Enums without defaults are set to the first value in the enum. + EXPECT_EQ(unittest::TestAllTypes::FOO, message.GetExtension(unittest::optional_nested_enum_extension )); + EXPECT_EQ(unittest::FOREIGN_FOO , message.GetExtension(unittest::optional_foreign_enum_extension)); + EXPECT_EQ(unittest_import::IMPORT_FOO, message.GetExtension(unittest::optional_import_enum_extension )); + + EXPECT_EQ("", message.GetExtension(unittest::optional_string_piece_extension)); + EXPECT_EQ("", message.GetExtension(unittest::optional_cord_extension)); + + // Repeated fields are empty. + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_int32_extension )); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_int64_extension )); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_uint32_extension )); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_uint64_extension )); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_sint32_extension )); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_sint64_extension )); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_fixed32_extension )); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_fixed64_extension )); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_sfixed32_extension)); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_sfixed64_extension)); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_float_extension )); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_double_extension )); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_bool_extension )); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_string_extension )); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_bytes_extension )); + + EXPECT_EQ(0, message.ExtensionSize(unittest::repeatedgroup_extension )); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_nested_message_extension )); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_foreign_message_extension)); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_import_message_extension )); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_nested_enum_extension )); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_foreign_enum_extension )); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_import_enum_extension )); + + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_string_piece_extension)); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_cord_extension)); + + // has_blah() should also be false for all default fields. + EXPECT_FALSE(message.HasExtension(unittest::default_int32_extension )); + EXPECT_FALSE(message.HasExtension(unittest::default_int64_extension )); + EXPECT_FALSE(message.HasExtension(unittest::default_uint32_extension )); + EXPECT_FALSE(message.HasExtension(unittest::default_uint64_extension )); + EXPECT_FALSE(message.HasExtension(unittest::default_sint32_extension )); + EXPECT_FALSE(message.HasExtension(unittest::default_sint64_extension )); + EXPECT_FALSE(message.HasExtension(unittest::default_fixed32_extension )); + EXPECT_FALSE(message.HasExtension(unittest::default_fixed64_extension )); + EXPECT_FALSE(message.HasExtension(unittest::default_sfixed32_extension)); + EXPECT_FALSE(message.HasExtension(unittest::default_sfixed64_extension)); + EXPECT_FALSE(message.HasExtension(unittest::default_float_extension )); + EXPECT_FALSE(message.HasExtension(unittest::default_double_extension )); + EXPECT_FALSE(message.HasExtension(unittest::default_bool_extension )); + EXPECT_FALSE(message.HasExtension(unittest::default_string_extension )); + EXPECT_FALSE(message.HasExtension(unittest::default_bytes_extension )); + + EXPECT_FALSE(message.HasExtension(unittest::default_nested_enum_extension )); + EXPECT_FALSE(message.HasExtension(unittest::default_foreign_enum_extension)); + EXPECT_FALSE(message.HasExtension(unittest::default_import_enum_extension )); + + EXPECT_FALSE(message.HasExtension(unittest::default_string_piece_extension)); + EXPECT_FALSE(message.HasExtension(unittest::default_cord_extension)); + + // Fields with defaults have their default values (duh). + EXPECT_EQ( 41 , message.GetExtension(unittest::default_int32_extension )); + EXPECT_EQ( 42 , message.GetExtension(unittest::default_int64_extension )); + EXPECT_EQ( 43 , message.GetExtension(unittest::default_uint32_extension )); + EXPECT_EQ( 44 , message.GetExtension(unittest::default_uint64_extension )); + EXPECT_EQ(-45 , message.GetExtension(unittest::default_sint32_extension )); + EXPECT_EQ( 46 , message.GetExtension(unittest::default_sint64_extension )); + EXPECT_EQ( 47 , message.GetExtension(unittest::default_fixed32_extension )); + EXPECT_EQ( 48 , message.GetExtension(unittest::default_fixed64_extension )); + EXPECT_EQ( 49 , message.GetExtension(unittest::default_sfixed32_extension)); + EXPECT_EQ(-50 , message.GetExtension(unittest::default_sfixed64_extension)); + EXPECT_EQ( 51.5 , message.GetExtension(unittest::default_float_extension )); + EXPECT_EQ( 52e3 , message.GetExtension(unittest::default_double_extension )); + EXPECT_EQ(true , message.GetExtension(unittest::default_bool_extension )); + EXPECT_EQ("hello", message.GetExtension(unittest::default_string_extension )); + EXPECT_EQ("world", message.GetExtension(unittest::default_bytes_extension )); + + EXPECT_EQ(unittest::TestAllTypes::BAR, message.GetExtension(unittest::default_nested_enum_extension )); + EXPECT_EQ(unittest::FOREIGN_BAR , message.GetExtension(unittest::default_foreign_enum_extension)); + EXPECT_EQ(unittest_import::IMPORT_BAR, message.GetExtension(unittest::default_import_enum_extension )); + + EXPECT_EQ("abc", message.GetExtension(unittest::default_string_piece_extension)); + EXPECT_EQ("123", message.GetExtension(unittest::default_cord_extension)); +} + +// ------------------------------------------------------------------- + +void TestUtil::ExpectRepeatedExtensionsModified( + const unittest::TestAllExtensions& message) { + // ModifyRepeatedFields only sets the second repeated element of each + // field. In addition to verifying this, we also verify that the first + // element and size were *not* modified. + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_int32_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_int64_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_uint32_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_uint64_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_sint32_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_sint64_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_fixed32_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_fixed64_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_sfixed32_extension)); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_sfixed64_extension)); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_float_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_double_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_bool_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_string_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_bytes_extension )); + + ASSERT_EQ(2, message.ExtensionSize(unittest::repeatedgroup_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_nested_message_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_foreign_message_extension)); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_import_message_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_nested_enum_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_foreign_enum_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_import_enum_extension )); + + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_string_piece_extension)); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_cord_extension)); + + EXPECT_EQ(201 , message.GetExtension(unittest::repeated_int32_extension , 0)); + EXPECT_EQ(202 , message.GetExtension(unittest::repeated_int64_extension , 0)); + EXPECT_EQ(203 , message.GetExtension(unittest::repeated_uint32_extension , 0)); + EXPECT_EQ(204 , message.GetExtension(unittest::repeated_uint64_extension , 0)); + EXPECT_EQ(205 , message.GetExtension(unittest::repeated_sint32_extension , 0)); + EXPECT_EQ(206 , message.GetExtension(unittest::repeated_sint64_extension , 0)); + EXPECT_EQ(207 , message.GetExtension(unittest::repeated_fixed32_extension , 0)); + EXPECT_EQ(208 , message.GetExtension(unittest::repeated_fixed64_extension , 0)); + EXPECT_EQ(209 , message.GetExtension(unittest::repeated_sfixed32_extension, 0)); + EXPECT_EQ(210 , message.GetExtension(unittest::repeated_sfixed64_extension, 0)); + EXPECT_EQ(211 , message.GetExtension(unittest::repeated_float_extension , 0)); + EXPECT_EQ(212 , message.GetExtension(unittest::repeated_double_extension , 0)); + EXPECT_EQ(true , message.GetExtension(unittest::repeated_bool_extension , 0)); + EXPECT_EQ("215", message.GetExtension(unittest::repeated_string_extension , 0)); + EXPECT_EQ("216", message.GetExtension(unittest::repeated_bytes_extension , 0)); + + EXPECT_EQ(217, message.GetExtension(unittest::repeatedgroup_extension , 0).a()); + EXPECT_EQ(218, message.GetExtension(unittest::repeated_nested_message_extension , 0).bb()); + EXPECT_EQ(219, message.GetExtension(unittest::repeated_foreign_message_extension, 0).c()); + EXPECT_EQ(220, message.GetExtension(unittest::repeated_import_message_extension , 0).d()); + + EXPECT_EQ(unittest::TestAllTypes::BAR, message.GetExtension(unittest::repeated_nested_enum_extension , 0)); + EXPECT_EQ(unittest::FOREIGN_BAR , message.GetExtension(unittest::repeated_foreign_enum_extension, 0)); + EXPECT_EQ(unittest_import::IMPORT_BAR, message.GetExtension(unittest::repeated_import_enum_extension , 0)); + + EXPECT_EQ("224", message.GetExtension(unittest::repeated_string_piece_extension, 0)); + EXPECT_EQ("225", message.GetExtension(unittest::repeated_cord_extension, 0)); + + // Actually verify the second (modified) elements now. + EXPECT_EQ(501 , message.GetExtension(unittest::repeated_int32_extension , 1)); + EXPECT_EQ(502 , message.GetExtension(unittest::repeated_int64_extension , 1)); + EXPECT_EQ(503 , message.GetExtension(unittest::repeated_uint32_extension , 1)); + EXPECT_EQ(504 , message.GetExtension(unittest::repeated_uint64_extension , 1)); + EXPECT_EQ(505 , message.GetExtension(unittest::repeated_sint32_extension , 1)); + EXPECT_EQ(506 , message.GetExtension(unittest::repeated_sint64_extension , 1)); + EXPECT_EQ(507 , message.GetExtension(unittest::repeated_fixed32_extension , 1)); + EXPECT_EQ(508 , message.GetExtension(unittest::repeated_fixed64_extension , 1)); + EXPECT_EQ(509 , message.GetExtension(unittest::repeated_sfixed32_extension, 1)); + EXPECT_EQ(510 , message.GetExtension(unittest::repeated_sfixed64_extension, 1)); + EXPECT_EQ(511 , message.GetExtension(unittest::repeated_float_extension , 1)); + EXPECT_EQ(512 , message.GetExtension(unittest::repeated_double_extension , 1)); + EXPECT_EQ(true , message.GetExtension(unittest::repeated_bool_extension , 1)); + EXPECT_EQ("515", message.GetExtension(unittest::repeated_string_extension , 1)); + EXPECT_EQ("516", message.GetExtension(unittest::repeated_bytes_extension , 1)); + + EXPECT_EQ(517, message.GetExtension(unittest::repeatedgroup_extension , 1).a()); + EXPECT_EQ(518, message.GetExtension(unittest::repeated_nested_message_extension , 1).bb()); + EXPECT_EQ(519, message.GetExtension(unittest::repeated_foreign_message_extension, 1).c()); + EXPECT_EQ(520, message.GetExtension(unittest::repeated_import_message_extension , 1).d()); + + EXPECT_EQ(unittest::TestAllTypes::FOO, message.GetExtension(unittest::repeated_nested_enum_extension , 1)); + EXPECT_EQ(unittest::FOREIGN_FOO , message.GetExtension(unittest::repeated_foreign_enum_extension, 1)); + EXPECT_EQ(unittest_import::IMPORT_FOO, message.GetExtension(unittest::repeated_import_enum_extension , 1)); + + EXPECT_EQ("524", message.GetExtension(unittest::repeated_string_piece_extension, 1)); + EXPECT_EQ("525", message.GetExtension(unittest::repeated_cord_extension, 1)); +} + +// ------------------------------------------------------------------- + +void TestUtil::SetPackedExtensions(unittest::TestPackedExtensions* message) { + message->AddExtension(unittest::packed_int32_extension , 601); + message->AddExtension(unittest::packed_int64_extension , 602); + message->AddExtension(unittest::packed_uint32_extension , 603); + message->AddExtension(unittest::packed_uint64_extension , 604); + message->AddExtension(unittest::packed_sint32_extension , 605); + message->AddExtension(unittest::packed_sint64_extension , 606); + message->AddExtension(unittest::packed_fixed32_extension , 607); + message->AddExtension(unittest::packed_fixed64_extension , 608); + message->AddExtension(unittest::packed_sfixed32_extension, 609); + message->AddExtension(unittest::packed_sfixed64_extension, 610); + message->AddExtension(unittest::packed_float_extension , 611); + message->AddExtension(unittest::packed_double_extension , 612); + message->AddExtension(unittest::packed_bool_extension , true); + message->AddExtension(unittest::packed_enum_extension, unittest::FOREIGN_BAR); + // add a second one of each field + message->AddExtension(unittest::packed_int32_extension , 701); + message->AddExtension(unittest::packed_int64_extension , 702); + message->AddExtension(unittest::packed_uint32_extension , 703); + message->AddExtension(unittest::packed_uint64_extension , 704); + message->AddExtension(unittest::packed_sint32_extension , 705); + message->AddExtension(unittest::packed_sint64_extension , 706); + message->AddExtension(unittest::packed_fixed32_extension , 707); + message->AddExtension(unittest::packed_fixed64_extension , 708); + message->AddExtension(unittest::packed_sfixed32_extension, 709); + message->AddExtension(unittest::packed_sfixed64_extension, 710); + message->AddExtension(unittest::packed_float_extension , 711); + message->AddExtension(unittest::packed_double_extension , 712); + message->AddExtension(unittest::packed_bool_extension , false); + message->AddExtension(unittest::packed_enum_extension, unittest::FOREIGN_BAZ); +} + +// ------------------------------------------------------------------- + +void TestUtil::ModifyPackedExtensions(unittest::TestPackedExtensions* message) { + message->SetExtension(unittest::packed_int32_extension , 1, 801); + message->SetExtension(unittest::packed_int64_extension , 1, 802); + message->SetExtension(unittest::packed_uint32_extension , 1, 803); + message->SetExtension(unittest::packed_uint64_extension , 1, 804); + message->SetExtension(unittest::packed_sint32_extension , 1, 805); + message->SetExtension(unittest::packed_sint64_extension , 1, 806); + message->SetExtension(unittest::packed_fixed32_extension , 1, 807); + message->SetExtension(unittest::packed_fixed64_extension , 1, 808); + message->SetExtension(unittest::packed_sfixed32_extension, 1, 809); + message->SetExtension(unittest::packed_sfixed64_extension, 1, 810); + message->SetExtension(unittest::packed_float_extension , 1, 811); + message->SetExtension(unittest::packed_double_extension , 1, 812); + message->SetExtension(unittest::packed_bool_extension , 1, true); + message->SetExtension(unittest::packed_enum_extension , 1, + unittest::FOREIGN_FOO); +} + +// ------------------------------------------------------------------- + +void TestUtil::ExpectPackedExtensionsSet( + const unittest::TestPackedExtensions& message) { + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_int32_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_int64_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_uint32_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_uint64_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_sint32_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_sint64_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_fixed32_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_fixed64_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_sfixed32_extension)); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_sfixed64_extension)); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_float_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_double_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_bool_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_enum_extension )); + + EXPECT_EQ(601 , message.GetExtension(unittest::packed_int32_extension , 0)); + EXPECT_EQ(602 , message.GetExtension(unittest::packed_int64_extension , 0)); + EXPECT_EQ(603 , message.GetExtension(unittest::packed_uint32_extension , 0)); + EXPECT_EQ(604 , message.GetExtension(unittest::packed_uint64_extension , 0)); + EXPECT_EQ(605 , message.GetExtension(unittest::packed_sint32_extension , 0)); + EXPECT_EQ(606 , message.GetExtension(unittest::packed_sint64_extension , 0)); + EXPECT_EQ(607 , message.GetExtension(unittest::packed_fixed32_extension , 0)); + EXPECT_EQ(608 , message.GetExtension(unittest::packed_fixed64_extension , 0)); + EXPECT_EQ(609 , message.GetExtension(unittest::packed_sfixed32_extension, 0)); + EXPECT_EQ(610 , message.GetExtension(unittest::packed_sfixed64_extension, 0)); + EXPECT_EQ(611 , message.GetExtension(unittest::packed_float_extension , 0)); + EXPECT_EQ(612 , message.GetExtension(unittest::packed_double_extension , 0)); + EXPECT_EQ(true , message.GetExtension(unittest::packed_bool_extension , 0)); + EXPECT_EQ(unittest::FOREIGN_BAR, + message.GetExtension(unittest::packed_enum_extension, 0)); + EXPECT_EQ(701 , message.GetExtension(unittest::packed_int32_extension , 1)); + EXPECT_EQ(702 , message.GetExtension(unittest::packed_int64_extension , 1)); + EXPECT_EQ(703 , message.GetExtension(unittest::packed_uint32_extension , 1)); + EXPECT_EQ(704 , message.GetExtension(unittest::packed_uint64_extension , 1)); + EXPECT_EQ(705 , message.GetExtension(unittest::packed_sint32_extension , 1)); + EXPECT_EQ(706 , message.GetExtension(unittest::packed_sint64_extension , 1)); + EXPECT_EQ(707 , message.GetExtension(unittest::packed_fixed32_extension , 1)); + EXPECT_EQ(708 , message.GetExtension(unittest::packed_fixed64_extension , 1)); + EXPECT_EQ(709 , message.GetExtension(unittest::packed_sfixed32_extension, 1)); + EXPECT_EQ(710 , message.GetExtension(unittest::packed_sfixed64_extension, 1)); + EXPECT_EQ(711 , message.GetExtension(unittest::packed_float_extension , 1)); + EXPECT_EQ(712 , message.GetExtension(unittest::packed_double_extension , 1)); + EXPECT_EQ(false, message.GetExtension(unittest::packed_bool_extension , 1)); + EXPECT_EQ(unittest::FOREIGN_BAZ, + message.GetExtension(unittest::packed_enum_extension, 1)); +} + +// ------------------------------------------------------------------- + +void TestUtil::ExpectPackedExtensionsClear( + const unittest::TestPackedExtensions& message) { + EXPECT_EQ(0, message.ExtensionSize(unittest::packed_int32_extension )); + EXPECT_EQ(0, message.ExtensionSize(unittest::packed_int64_extension )); + EXPECT_EQ(0, message.ExtensionSize(unittest::packed_uint32_extension )); + EXPECT_EQ(0, message.ExtensionSize(unittest::packed_uint64_extension )); + EXPECT_EQ(0, message.ExtensionSize(unittest::packed_sint32_extension )); + EXPECT_EQ(0, message.ExtensionSize(unittest::packed_sint64_extension )); + EXPECT_EQ(0, message.ExtensionSize(unittest::packed_fixed32_extension )); + EXPECT_EQ(0, message.ExtensionSize(unittest::packed_fixed64_extension )); + EXPECT_EQ(0, message.ExtensionSize(unittest::packed_sfixed32_extension)); + EXPECT_EQ(0, message.ExtensionSize(unittest::packed_sfixed64_extension)); + EXPECT_EQ(0, message.ExtensionSize(unittest::packed_float_extension )); + EXPECT_EQ(0, message.ExtensionSize(unittest::packed_double_extension )); + EXPECT_EQ(0, message.ExtensionSize(unittest::packed_bool_extension )); + EXPECT_EQ(0, message.ExtensionSize(unittest::packed_enum_extension )); +} + +// ------------------------------------------------------------------- + +void TestUtil::ExpectPackedExtensionsModified( + const unittest::TestPackedExtensions& message) { + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_int32_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_int64_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_uint32_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_uint64_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_sint32_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_sint64_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_fixed32_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_fixed64_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_sfixed32_extension)); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_sfixed64_extension)); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_float_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_double_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_bool_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_enum_extension )); + EXPECT_EQ(601 , message.GetExtension(unittest::packed_int32_extension , 0)); + EXPECT_EQ(602 , message.GetExtension(unittest::packed_int64_extension , 0)); + EXPECT_EQ(603 , message.GetExtension(unittest::packed_uint32_extension , 0)); + EXPECT_EQ(604 , message.GetExtension(unittest::packed_uint64_extension , 0)); + EXPECT_EQ(605 , message.GetExtension(unittest::packed_sint32_extension , 0)); + EXPECT_EQ(606 , message.GetExtension(unittest::packed_sint64_extension , 0)); + EXPECT_EQ(607 , message.GetExtension(unittest::packed_fixed32_extension , 0)); + EXPECT_EQ(608 , message.GetExtension(unittest::packed_fixed64_extension , 0)); + EXPECT_EQ(609 , message.GetExtension(unittest::packed_sfixed32_extension, 0)); + EXPECT_EQ(610 , message.GetExtension(unittest::packed_sfixed64_extension, 0)); + EXPECT_EQ(611 , message.GetExtension(unittest::packed_float_extension , 0)); + EXPECT_EQ(612 , message.GetExtension(unittest::packed_double_extension , 0)); + EXPECT_EQ(true , message.GetExtension(unittest::packed_bool_extension , 0)); + EXPECT_EQ(unittest::FOREIGN_BAR, + message.GetExtension(unittest::packed_enum_extension, 0)); + + // Actually verify the second (modified) elements now. + EXPECT_EQ(801 , message.GetExtension(unittest::packed_int32_extension , 1)); + EXPECT_EQ(802 , message.GetExtension(unittest::packed_int64_extension , 1)); + EXPECT_EQ(803 , message.GetExtension(unittest::packed_uint32_extension , 1)); + EXPECT_EQ(804 , message.GetExtension(unittest::packed_uint64_extension , 1)); + EXPECT_EQ(805 , message.GetExtension(unittest::packed_sint32_extension , 1)); + EXPECT_EQ(806 , message.GetExtension(unittest::packed_sint64_extension , 1)); + EXPECT_EQ(807 , message.GetExtension(unittest::packed_fixed32_extension , 1)); + EXPECT_EQ(808 , message.GetExtension(unittest::packed_fixed64_extension , 1)); + EXPECT_EQ(809 , message.GetExtension(unittest::packed_sfixed32_extension, 1)); + EXPECT_EQ(810 , message.GetExtension(unittest::packed_sfixed64_extension, 1)); + EXPECT_EQ(811 , message.GetExtension(unittest::packed_float_extension , 1)); + EXPECT_EQ(812 , message.GetExtension(unittest::packed_double_extension , 1)); + EXPECT_EQ(true , message.GetExtension(unittest::packed_bool_extension , 1)); + EXPECT_EQ(unittest::FOREIGN_FOO, + message.GetExtension(unittest::packed_enum_extension, 1)); +} + +// ------------------------------------------------------------------- + +void TestUtil::ExpectAllFieldsAndExtensionsInOrder(const string& serialized) { + // We set each field individually, serialize separately, and concatenate all + // the strings in canonical order to determine the expected serialization. + string expected; + unittest::TestFieldOrderings message; + message.set_my_int(1); // Field 1. + message.AppendToString(&expected); + message.Clear(); + message.SetExtension(unittest::my_extension_int, 23); // Field 5. + message.AppendToString(&expected); + message.Clear(); + message.set_my_string("foo"); // Field 11. + message.AppendToString(&expected); + message.Clear(); + message.SetExtension(unittest::my_extension_string, "bar"); // Field 50. + message.AppendToString(&expected); + message.Clear(); + message.set_my_float(1.0); // Field 101. + message.AppendToString(&expected); + message.Clear(); + + // We don't EXPECT_EQ() since we don't want to print raw bytes to stdout. + EXPECT_TRUE(serialized == expected); +} + +void TestUtil::ExpectLastRepeatedsRemoved( + const unittest::TestAllTypes& message) { + ASSERT_EQ(1, message.repeated_int32_size ()); + ASSERT_EQ(1, message.repeated_int64_size ()); + ASSERT_EQ(1, message.repeated_uint32_size ()); + ASSERT_EQ(1, message.repeated_uint64_size ()); + ASSERT_EQ(1, message.repeated_sint32_size ()); + ASSERT_EQ(1, message.repeated_sint64_size ()); + ASSERT_EQ(1, message.repeated_fixed32_size ()); + ASSERT_EQ(1, message.repeated_fixed64_size ()); + ASSERT_EQ(1, message.repeated_sfixed32_size()); + ASSERT_EQ(1, message.repeated_sfixed64_size()); + ASSERT_EQ(1, message.repeated_float_size ()); + ASSERT_EQ(1, message.repeated_double_size ()); + ASSERT_EQ(1, message.repeated_bool_size ()); + ASSERT_EQ(1, message.repeated_string_size ()); + ASSERT_EQ(1, message.repeated_bytes_size ()); + + ASSERT_EQ(1, message.repeatedgroup_size ()); + ASSERT_EQ(1, message.repeated_nested_message_size ()); + ASSERT_EQ(1, message.repeated_foreign_message_size()); + ASSERT_EQ(1, message.repeated_import_message_size ()); + ASSERT_EQ(1, message.repeated_nested_enum_size ()); + ASSERT_EQ(1, message.repeated_foreign_enum_size ()); + ASSERT_EQ(1, message.repeated_import_enum_size ()); + +#ifndef PROTOBUF_TEST_NO_DESCRIPTORS + ASSERT_EQ(1, message.repeated_string_piece_size()); + ASSERT_EQ(1, message.repeated_cord_size()); +#endif + + // Test that the remaining element is the correct one. + EXPECT_EQ(201 , message.repeated_int32 (0)); + EXPECT_EQ(202 , message.repeated_int64 (0)); + EXPECT_EQ(203 , message.repeated_uint32 (0)); + EXPECT_EQ(204 , message.repeated_uint64 (0)); + EXPECT_EQ(205 , message.repeated_sint32 (0)); + EXPECT_EQ(206 , message.repeated_sint64 (0)); + EXPECT_EQ(207 , message.repeated_fixed32 (0)); + EXPECT_EQ(208 , message.repeated_fixed64 (0)); + EXPECT_EQ(209 , message.repeated_sfixed32(0)); + EXPECT_EQ(210 , message.repeated_sfixed64(0)); + EXPECT_EQ(211 , message.repeated_float (0)); + EXPECT_EQ(212 , message.repeated_double (0)); + EXPECT_EQ(true , message.repeated_bool (0)); + EXPECT_EQ("215", message.repeated_string (0)); + EXPECT_EQ("216", message.repeated_bytes (0)); + + EXPECT_EQ(217, message.repeatedgroup (0).a()); + EXPECT_EQ(218, message.repeated_nested_message (0).bb()); + EXPECT_EQ(219, message.repeated_foreign_message(0).c()); + EXPECT_EQ(220, message.repeated_import_message (0).d()); + + EXPECT_EQ(unittest::TestAllTypes::BAR, message.repeated_nested_enum (0)); + EXPECT_EQ(unittest::FOREIGN_BAR , message.repeated_foreign_enum(0)); + EXPECT_EQ(unittest_import::IMPORT_BAR, message.repeated_import_enum (0)); +} + +void TestUtil::ExpectLastRepeatedExtensionsRemoved( + const unittest::TestAllExtensions& message) { + + // Test that one element was removed. + ASSERT_EQ(1, message.ExtensionSize(unittest::repeated_int32_extension )); + ASSERT_EQ(1, message.ExtensionSize(unittest::repeated_int64_extension )); + ASSERT_EQ(1, message.ExtensionSize(unittest::repeated_uint32_extension )); + ASSERT_EQ(1, message.ExtensionSize(unittest::repeated_uint64_extension )); + ASSERT_EQ(1, message.ExtensionSize(unittest::repeated_sint32_extension )); + ASSERT_EQ(1, message.ExtensionSize(unittest::repeated_sint64_extension )); + ASSERT_EQ(1, message.ExtensionSize(unittest::repeated_fixed32_extension )); + ASSERT_EQ(1, message.ExtensionSize(unittest::repeated_fixed64_extension )); + ASSERT_EQ(1, message.ExtensionSize(unittest::repeated_sfixed32_extension)); + ASSERT_EQ(1, message.ExtensionSize(unittest::repeated_sfixed64_extension)); + ASSERT_EQ(1, message.ExtensionSize(unittest::repeated_float_extension )); + ASSERT_EQ(1, message.ExtensionSize(unittest::repeated_double_extension )); + ASSERT_EQ(1, message.ExtensionSize(unittest::repeated_bool_extension )); + ASSERT_EQ(1, message.ExtensionSize(unittest::repeated_string_extension )); + ASSERT_EQ(1, message.ExtensionSize(unittest::repeated_bytes_extension )); + + ASSERT_EQ(1, message.ExtensionSize(unittest::repeatedgroup_extension )); + ASSERT_EQ(1, message.ExtensionSize(unittest::repeated_nested_message_extension )); + ASSERT_EQ(1, message.ExtensionSize(unittest::repeated_foreign_message_extension)); + ASSERT_EQ(1, message.ExtensionSize(unittest::repeated_import_message_extension )); + ASSERT_EQ(1, message.ExtensionSize(unittest::repeated_nested_enum_extension )); + ASSERT_EQ(1, message.ExtensionSize(unittest::repeated_foreign_enum_extension )); + ASSERT_EQ(1, message.ExtensionSize(unittest::repeated_import_enum_extension )); + + ASSERT_EQ(1, message.ExtensionSize(unittest::repeated_string_piece_extension)); + ASSERT_EQ(1, message.ExtensionSize(unittest::repeated_cord_extension)); + + // Test that the remaining element is the correct one. + EXPECT_EQ(201 , message.GetExtension(unittest::repeated_int32_extension , 0)); + EXPECT_EQ(202 , message.GetExtension(unittest::repeated_int64_extension , 0)); + EXPECT_EQ(203 , message.GetExtension(unittest::repeated_uint32_extension , 0)); + EXPECT_EQ(204 , message.GetExtension(unittest::repeated_uint64_extension , 0)); + EXPECT_EQ(205 , message.GetExtension(unittest::repeated_sint32_extension , 0)); + EXPECT_EQ(206 , message.GetExtension(unittest::repeated_sint64_extension , 0)); + EXPECT_EQ(207 , message.GetExtension(unittest::repeated_fixed32_extension , 0)); + EXPECT_EQ(208 , message.GetExtension(unittest::repeated_fixed64_extension , 0)); + EXPECT_EQ(209 , message.GetExtension(unittest::repeated_sfixed32_extension, 0)); + EXPECT_EQ(210 , message.GetExtension(unittest::repeated_sfixed64_extension, 0)); + EXPECT_EQ(211 , message.GetExtension(unittest::repeated_float_extension , 0)); + EXPECT_EQ(212 , message.GetExtension(unittest::repeated_double_extension , 0)); + EXPECT_EQ(true , message.GetExtension(unittest::repeated_bool_extension , 0)); + EXPECT_EQ("215", message.GetExtension(unittest::repeated_string_extension , 0)); + EXPECT_EQ("216", message.GetExtension(unittest::repeated_bytes_extension , 0)); + + EXPECT_EQ(217, message.GetExtension(unittest::repeatedgroup_extension , 0).a()); + EXPECT_EQ(218, message.GetExtension(unittest::repeated_nested_message_extension , 0).bb()); + EXPECT_EQ(219, message.GetExtension(unittest::repeated_foreign_message_extension, 0).c()); + EXPECT_EQ(220, message.GetExtension(unittest::repeated_import_message_extension , 0).d()); + + EXPECT_EQ(unittest::TestAllTypes::BAR, message.GetExtension(unittest::repeated_nested_enum_extension , 0)); + EXPECT_EQ(unittest::FOREIGN_BAR , message.GetExtension(unittest::repeated_foreign_enum_extension, 0)); + EXPECT_EQ(unittest_import::IMPORT_BAR, message.GetExtension(unittest::repeated_import_enum_extension , 0)); + + EXPECT_EQ("224", message.GetExtension(unittest::repeated_string_piece_extension, 0)); + EXPECT_EQ("225", message.GetExtension(unittest::repeated_cord_extension, 0)); +} + +void TestUtil::ExpectRepeatedsSwapped( + const unittest::TestAllTypes& message) { + ASSERT_EQ(2, message.repeated_int32_size ()); + ASSERT_EQ(2, message.repeated_int64_size ()); + ASSERT_EQ(2, message.repeated_uint32_size ()); + ASSERT_EQ(2, message.repeated_uint64_size ()); + ASSERT_EQ(2, message.repeated_sint32_size ()); + ASSERT_EQ(2, message.repeated_sint64_size ()); + ASSERT_EQ(2, message.repeated_fixed32_size ()); + ASSERT_EQ(2, message.repeated_fixed64_size ()); + ASSERT_EQ(2, message.repeated_sfixed32_size()); + ASSERT_EQ(2, message.repeated_sfixed64_size()); + ASSERT_EQ(2, message.repeated_float_size ()); + ASSERT_EQ(2, message.repeated_double_size ()); + ASSERT_EQ(2, message.repeated_bool_size ()); + ASSERT_EQ(2, message.repeated_string_size ()); + ASSERT_EQ(2, message.repeated_bytes_size ()); + + ASSERT_EQ(2, message.repeatedgroup_size ()); + ASSERT_EQ(2, message.repeated_nested_message_size ()); + ASSERT_EQ(2, message.repeated_foreign_message_size()); + ASSERT_EQ(2, message.repeated_import_message_size ()); + ASSERT_EQ(2, message.repeated_nested_enum_size ()); + ASSERT_EQ(2, message.repeated_foreign_enum_size ()); + ASSERT_EQ(2, message.repeated_import_enum_size ()); + +#ifndef PROTOBUF_TEST_NO_DESCRIPTORS + ASSERT_EQ(2, message.repeated_string_piece_size()); + ASSERT_EQ(2, message.repeated_cord_size()); +#endif + + // Test that the first element and second element are flipped. + EXPECT_EQ(201 , message.repeated_int32 (1)); + EXPECT_EQ(202 , message.repeated_int64 (1)); + EXPECT_EQ(203 , message.repeated_uint32 (1)); + EXPECT_EQ(204 , message.repeated_uint64 (1)); + EXPECT_EQ(205 , message.repeated_sint32 (1)); + EXPECT_EQ(206 , message.repeated_sint64 (1)); + EXPECT_EQ(207 , message.repeated_fixed32 (1)); + EXPECT_EQ(208 , message.repeated_fixed64 (1)); + EXPECT_EQ(209 , message.repeated_sfixed32(1)); + EXPECT_EQ(210 , message.repeated_sfixed64(1)); + EXPECT_EQ(211 , message.repeated_float (1)); + EXPECT_EQ(212 , message.repeated_double (1)); + EXPECT_EQ(true , message.repeated_bool (1)); + EXPECT_EQ("215", message.repeated_string (1)); + EXPECT_EQ("216", message.repeated_bytes (1)); + + EXPECT_EQ(217, message.repeatedgroup (1).a()); + EXPECT_EQ(218, message.repeated_nested_message (1).bb()); + EXPECT_EQ(219, message.repeated_foreign_message(1).c()); + EXPECT_EQ(220, message.repeated_import_message (1).d()); + + EXPECT_EQ(unittest::TestAllTypes::BAR, message.repeated_nested_enum (1)); + EXPECT_EQ(unittest::FOREIGN_BAR , message.repeated_foreign_enum(1)); + EXPECT_EQ(unittest_import::IMPORT_BAR, message.repeated_import_enum (1)); + + EXPECT_EQ(301 , message.repeated_int32 (0)); + EXPECT_EQ(302 , message.repeated_int64 (0)); + EXPECT_EQ(303 , message.repeated_uint32 (0)); + EXPECT_EQ(304 , message.repeated_uint64 (0)); + EXPECT_EQ(305 , message.repeated_sint32 (0)); + EXPECT_EQ(306 , message.repeated_sint64 (0)); + EXPECT_EQ(307 , message.repeated_fixed32 (0)); + EXPECT_EQ(308 , message.repeated_fixed64 (0)); + EXPECT_EQ(309 , message.repeated_sfixed32(0)); + EXPECT_EQ(310 , message.repeated_sfixed64(0)); + EXPECT_EQ(311 , message.repeated_float (0)); + EXPECT_EQ(312 , message.repeated_double (0)); + EXPECT_EQ(false, message.repeated_bool (0)); + EXPECT_EQ("315", message.repeated_string (0)); + EXPECT_EQ("316", message.repeated_bytes (0)); + + EXPECT_EQ(317, message.repeatedgroup (0).a()); + EXPECT_EQ(318, message.repeated_nested_message (0).bb()); + EXPECT_EQ(319, message.repeated_foreign_message(0).c()); + EXPECT_EQ(320, message.repeated_import_message (0).d()); + + EXPECT_EQ(unittest::TestAllTypes::BAZ, message.repeated_nested_enum (0)); + EXPECT_EQ(unittest::FOREIGN_BAZ , message.repeated_foreign_enum(0)); + EXPECT_EQ(unittest_import::IMPORT_BAZ, message.repeated_import_enum (0)); +} + +void TestUtil::ExpectRepeatedExtensionsSwapped( + const unittest::TestAllExtensions& message) { + + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_int32_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_int64_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_uint32_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_uint64_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_sint32_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_sint64_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_fixed32_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_fixed64_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_sfixed32_extension)); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_sfixed64_extension)); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_float_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_double_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_bool_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_string_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_bytes_extension )); + + ASSERT_EQ(2, message.ExtensionSize(unittest::repeatedgroup_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_nested_message_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_foreign_message_extension)); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_import_message_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_nested_enum_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_foreign_enum_extension )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_import_enum_extension )); + + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_string_piece_extension)); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_cord_extension)); + + EXPECT_EQ(201 , message.GetExtension(unittest::repeated_int32_extension , 1)); + EXPECT_EQ(202 , message.GetExtension(unittest::repeated_int64_extension , 1)); + EXPECT_EQ(203 , message.GetExtension(unittest::repeated_uint32_extension , 1)); + EXPECT_EQ(204 , message.GetExtension(unittest::repeated_uint64_extension , 1)); + EXPECT_EQ(205 , message.GetExtension(unittest::repeated_sint32_extension , 1)); + EXPECT_EQ(206 , message.GetExtension(unittest::repeated_sint64_extension , 1)); + EXPECT_EQ(207 , message.GetExtension(unittest::repeated_fixed32_extension , 1)); + EXPECT_EQ(208 , message.GetExtension(unittest::repeated_fixed64_extension , 1)); + EXPECT_EQ(209 , message.GetExtension(unittest::repeated_sfixed32_extension, 1)); + EXPECT_EQ(210 , message.GetExtension(unittest::repeated_sfixed64_extension, 1)); + EXPECT_EQ(211 , message.GetExtension(unittest::repeated_float_extension , 1)); + EXPECT_EQ(212 , message.GetExtension(unittest::repeated_double_extension , 1)); + EXPECT_EQ(true , message.GetExtension(unittest::repeated_bool_extension , 1)); + EXPECT_EQ("215", message.GetExtension(unittest::repeated_string_extension , 1)); + EXPECT_EQ("216", message.GetExtension(unittest::repeated_bytes_extension , 1)); + + EXPECT_EQ(217, message.GetExtension(unittest::repeatedgroup_extension , 1).a()); + EXPECT_EQ(218, message.GetExtension(unittest::repeated_nested_message_extension , 1).bb()); + EXPECT_EQ(219, message.GetExtension(unittest::repeated_foreign_message_extension, 1).c()); + EXPECT_EQ(220, message.GetExtension(unittest::repeated_import_message_extension , 1).d()); + + EXPECT_EQ(unittest::TestAllTypes::BAR, message.GetExtension(unittest::repeated_nested_enum_extension , 1)); + EXPECT_EQ(unittest::FOREIGN_BAR , message.GetExtension(unittest::repeated_foreign_enum_extension, 1)); + EXPECT_EQ(unittest_import::IMPORT_BAR, message.GetExtension(unittest::repeated_import_enum_extension , 1)); + + EXPECT_EQ("224", message.GetExtension(unittest::repeated_string_piece_extension, 1)); + EXPECT_EQ("225", message.GetExtension(unittest::repeated_cord_extension, 1)); + + EXPECT_EQ(301 , message.GetExtension(unittest::repeated_int32_extension , 0)); + EXPECT_EQ(302 , message.GetExtension(unittest::repeated_int64_extension , 0)); + EXPECT_EQ(303 , message.GetExtension(unittest::repeated_uint32_extension , 0)); + EXPECT_EQ(304 , message.GetExtension(unittest::repeated_uint64_extension , 0)); + EXPECT_EQ(305 , message.GetExtension(unittest::repeated_sint32_extension , 0)); + EXPECT_EQ(306 , message.GetExtension(unittest::repeated_sint64_extension , 0)); + EXPECT_EQ(307 , message.GetExtension(unittest::repeated_fixed32_extension , 0)); + EXPECT_EQ(308 , message.GetExtension(unittest::repeated_fixed64_extension , 0)); + EXPECT_EQ(309 , message.GetExtension(unittest::repeated_sfixed32_extension, 0)); + EXPECT_EQ(310 , message.GetExtension(unittest::repeated_sfixed64_extension, 0)); + EXPECT_EQ(311 , message.GetExtension(unittest::repeated_float_extension , 0)); + EXPECT_EQ(312 , message.GetExtension(unittest::repeated_double_extension , 0)); + EXPECT_EQ(false, message.GetExtension(unittest::repeated_bool_extension , 0)); + EXPECT_EQ("315", message.GetExtension(unittest::repeated_string_extension , 0)); + EXPECT_EQ("316", message.GetExtension(unittest::repeated_bytes_extension , 0)); + + EXPECT_EQ(317, message.GetExtension(unittest::repeatedgroup_extension , 0).a()); + EXPECT_EQ(318, message.GetExtension(unittest::repeated_nested_message_extension , 0).bb()); + EXPECT_EQ(319, message.GetExtension(unittest::repeated_foreign_message_extension, 0).c()); + EXPECT_EQ(320, message.GetExtension(unittest::repeated_import_message_extension , 0).d()); + + EXPECT_EQ(unittest::TestAllTypes::BAZ, message.GetExtension(unittest::repeated_nested_enum_extension , 0)); + EXPECT_EQ(unittest::FOREIGN_BAZ , message.GetExtension(unittest::repeated_foreign_enum_extension, 0)); + EXPECT_EQ(unittest_import::IMPORT_BAZ, message.GetExtension(unittest::repeated_import_enum_extension , 0)); + + EXPECT_EQ("324", message.GetExtension(unittest::repeated_string_piece_extension, 0)); + EXPECT_EQ("325", message.GetExtension(unittest::repeated_cord_extension, 0)); +} + +// =================================================================== + +TestUtil::ReflectionTester::ReflectionTester( + const Descriptor* base_descriptor) + : base_descriptor_(base_descriptor) { + + const DescriptorPool* pool = base_descriptor->file()->pool(); + + nested_b_ = + pool->FindFieldByName("protobuf_unittest.TestAllTypes.NestedMessage.bb"); + foreign_c_ = + pool->FindFieldByName("protobuf_unittest.ForeignMessage.c"); + import_d_ = + pool->FindFieldByName("protobuf_unittest_import.ImportMessage.d"); + nested_foo_ = + pool->FindEnumValueByName("protobuf_unittest.TestAllTypes.FOO"); + nested_bar_ = + pool->FindEnumValueByName("protobuf_unittest.TestAllTypes.BAR"); + nested_baz_ = + pool->FindEnumValueByName("protobuf_unittest.TestAllTypes.BAZ"); + foreign_foo_ = + pool->FindEnumValueByName("protobuf_unittest.FOREIGN_FOO"); + foreign_bar_ = + pool->FindEnumValueByName("protobuf_unittest.FOREIGN_BAR"); + foreign_baz_ = + pool->FindEnumValueByName("protobuf_unittest.FOREIGN_BAZ"); + import_foo_ = + pool->FindEnumValueByName("protobuf_unittest_import.IMPORT_FOO"); + import_bar_ = + pool->FindEnumValueByName("protobuf_unittest_import.IMPORT_BAR"); + import_baz_ = + pool->FindEnumValueByName("protobuf_unittest_import.IMPORT_BAZ"); + + if (base_descriptor_->name() == "TestAllExtensions") { + group_a_ = + pool->FindFieldByName("protobuf_unittest.OptionalGroup_extension.a"); + repeated_group_a_ = + pool->FindFieldByName("protobuf_unittest.RepeatedGroup_extension.a"); + } else { + group_a_ = + pool->FindFieldByName("protobuf_unittest.TestAllTypes.OptionalGroup.a"); + repeated_group_a_ = + pool->FindFieldByName("protobuf_unittest.TestAllTypes.RepeatedGroup.a"); + } + + EXPECT_TRUE(group_a_ != NULL); + EXPECT_TRUE(repeated_group_a_ != NULL); + EXPECT_TRUE(nested_b_ != NULL); + EXPECT_TRUE(foreign_c_ != NULL); + EXPECT_TRUE(import_d_ != NULL); + EXPECT_TRUE(nested_foo_ != NULL); + EXPECT_TRUE(nested_bar_ != NULL); + EXPECT_TRUE(nested_baz_ != NULL); + EXPECT_TRUE(foreign_foo_ != NULL); + EXPECT_TRUE(foreign_bar_ != NULL); + EXPECT_TRUE(foreign_baz_ != NULL); + EXPECT_TRUE(import_foo_ != NULL); + EXPECT_TRUE(import_bar_ != NULL); + EXPECT_TRUE(import_baz_ != NULL); +} + +// Shorthand to get a FieldDescriptor for a field of unittest::TestAllTypes. +const FieldDescriptor* TestUtil::ReflectionTester::F(const string& name) { + const FieldDescriptor* result = NULL; + if (base_descriptor_->name() == "TestAllExtensions" || + base_descriptor_->name() == "TestPackedExtensions") { + result = base_descriptor_->file()->FindExtensionByName(name + "_extension"); + } else { + result = base_descriptor_->FindFieldByName(name); + } + GOOGLE_CHECK(result != NULL); + return result; +} + +// ------------------------------------------------------------------- + +void TestUtil::ReflectionTester::SetAllFieldsViaReflection(Message* message) { + const Reflection* reflection = message->GetReflection(); + Message* sub_message; + + reflection->SetInt32 (message, F("optional_int32" ), 101); + reflection->SetInt64 (message, F("optional_int64" ), 102); + reflection->SetUInt32(message, F("optional_uint32" ), 103); + reflection->SetUInt64(message, F("optional_uint64" ), 104); + reflection->SetInt32 (message, F("optional_sint32" ), 105); + reflection->SetInt64 (message, F("optional_sint64" ), 106); + reflection->SetUInt32(message, F("optional_fixed32" ), 107); + reflection->SetUInt64(message, F("optional_fixed64" ), 108); + reflection->SetInt32 (message, F("optional_sfixed32"), 109); + reflection->SetInt64 (message, F("optional_sfixed64"), 110); + reflection->SetFloat (message, F("optional_float" ), 111); + reflection->SetDouble(message, F("optional_double" ), 112); + reflection->SetBool (message, F("optional_bool" ), true); + reflection->SetString(message, F("optional_string" ), "115"); + reflection->SetString(message, F("optional_bytes" ), "116"); + + sub_message = reflection->MutableMessage(message, F("optionalgroup")); + sub_message->GetReflection()->SetInt32(sub_message, group_a_, 117); + sub_message = reflection->MutableMessage(message, F("optional_nested_message")); + sub_message->GetReflection()->SetInt32(sub_message, nested_b_, 118); + sub_message = reflection->MutableMessage(message, F("optional_foreign_message")); + sub_message->GetReflection()->SetInt32(sub_message, foreign_c_, 119); + sub_message = reflection->MutableMessage(message, F("optional_import_message")); + sub_message->GetReflection()->SetInt32(sub_message, import_d_, 120); + + reflection->SetEnum(message, F("optional_nested_enum" ), nested_baz_); + reflection->SetEnum(message, F("optional_foreign_enum"), foreign_baz_); + reflection->SetEnum(message, F("optional_import_enum" ), import_baz_); + + reflection->SetString(message, F("optional_string_piece"), "124"); + reflection->SetString(message, F("optional_cord"), "125"); + + // ----------------------------------------------------------------- + + reflection->AddInt32 (message, F("repeated_int32" ), 201); + reflection->AddInt64 (message, F("repeated_int64" ), 202); + reflection->AddUInt32(message, F("repeated_uint32" ), 203); + reflection->AddUInt64(message, F("repeated_uint64" ), 204); + reflection->AddInt32 (message, F("repeated_sint32" ), 205); + reflection->AddInt64 (message, F("repeated_sint64" ), 206); + reflection->AddUInt32(message, F("repeated_fixed32" ), 207); + reflection->AddUInt64(message, F("repeated_fixed64" ), 208); + reflection->AddInt32 (message, F("repeated_sfixed32"), 209); + reflection->AddInt64 (message, F("repeated_sfixed64"), 210); + reflection->AddFloat (message, F("repeated_float" ), 211); + reflection->AddDouble(message, F("repeated_double" ), 212); + reflection->AddBool (message, F("repeated_bool" ), true); + reflection->AddString(message, F("repeated_string" ), "215"); + reflection->AddString(message, F("repeated_bytes" ), "216"); + + sub_message = reflection->AddMessage(message, F("repeatedgroup")); + sub_message->GetReflection()->SetInt32(sub_message, repeated_group_a_, 217); + sub_message = reflection->AddMessage(message, F("repeated_nested_message")); + sub_message->GetReflection()->SetInt32(sub_message, nested_b_, 218); + sub_message = reflection->AddMessage(message, F("repeated_foreign_message")); + sub_message->GetReflection()->SetInt32(sub_message, foreign_c_, 219); + sub_message = reflection->AddMessage(message, F("repeated_import_message")); + sub_message->GetReflection()->SetInt32(sub_message, import_d_, 220); + + reflection->AddEnum(message, F("repeated_nested_enum" ), nested_bar_); + reflection->AddEnum(message, F("repeated_foreign_enum"), foreign_bar_); + reflection->AddEnum(message, F("repeated_import_enum" ), import_bar_); + + reflection->AddString(message, F("repeated_string_piece"), "224"); + reflection->AddString(message, F("repeated_cord"), "225"); + + // Add a second one of each field. + reflection->AddInt32 (message, F("repeated_int32" ), 301); + reflection->AddInt64 (message, F("repeated_int64" ), 302); + reflection->AddUInt32(message, F("repeated_uint32" ), 303); + reflection->AddUInt64(message, F("repeated_uint64" ), 304); + reflection->AddInt32 (message, F("repeated_sint32" ), 305); + reflection->AddInt64 (message, F("repeated_sint64" ), 306); + reflection->AddUInt32(message, F("repeated_fixed32" ), 307); + reflection->AddUInt64(message, F("repeated_fixed64" ), 308); + reflection->AddInt32 (message, F("repeated_sfixed32"), 309); + reflection->AddInt64 (message, F("repeated_sfixed64"), 310); + reflection->AddFloat (message, F("repeated_float" ), 311); + reflection->AddDouble(message, F("repeated_double" ), 312); + reflection->AddBool (message, F("repeated_bool" ), false); + reflection->AddString(message, F("repeated_string" ), "315"); + reflection->AddString(message, F("repeated_bytes" ), "316"); + + sub_message = reflection->AddMessage(message, F("repeatedgroup")); + sub_message->GetReflection()->SetInt32(sub_message, repeated_group_a_, 317); + sub_message = reflection->AddMessage(message, F("repeated_nested_message")); + sub_message->GetReflection()->SetInt32(sub_message, nested_b_, 318); + sub_message = reflection->AddMessage(message, F("repeated_foreign_message")); + sub_message->GetReflection()->SetInt32(sub_message, foreign_c_, 319); + sub_message = reflection->AddMessage(message, F("repeated_import_message")); + sub_message->GetReflection()->SetInt32(sub_message, import_d_, 320); + + reflection->AddEnum(message, F("repeated_nested_enum" ), nested_baz_); + reflection->AddEnum(message, F("repeated_foreign_enum"), foreign_baz_); + reflection->AddEnum(message, F("repeated_import_enum" ), import_baz_); + + reflection->AddString(message, F("repeated_string_piece"), "324"); + reflection->AddString(message, F("repeated_cord"), "325"); + + // ----------------------------------------------------------------- + + reflection->SetInt32 (message, F("default_int32" ), 401); + reflection->SetInt64 (message, F("default_int64" ), 402); + reflection->SetUInt32(message, F("default_uint32" ), 403); + reflection->SetUInt64(message, F("default_uint64" ), 404); + reflection->SetInt32 (message, F("default_sint32" ), 405); + reflection->SetInt64 (message, F("default_sint64" ), 406); + reflection->SetUInt32(message, F("default_fixed32" ), 407); + reflection->SetUInt64(message, F("default_fixed64" ), 408); + reflection->SetInt32 (message, F("default_sfixed32"), 409); + reflection->SetInt64 (message, F("default_sfixed64"), 410); + reflection->SetFloat (message, F("default_float" ), 411); + reflection->SetDouble(message, F("default_double" ), 412); + reflection->SetBool (message, F("default_bool" ), false); + reflection->SetString(message, F("default_string" ), "415"); + reflection->SetString(message, F("default_bytes" ), "416"); + + reflection->SetEnum(message, F("default_nested_enum" ), nested_foo_); + reflection->SetEnum(message, F("default_foreign_enum"), foreign_foo_); + reflection->SetEnum(message, F("default_import_enum" ), import_foo_); + + reflection->SetString(message, F("default_string_piece"), "424"); + reflection->SetString(message, F("default_cord"), "425"); +} + +void TestUtil::ReflectionTester::SetPackedFieldsViaReflection( + Message* message) { + const Reflection* reflection = message->GetReflection(); + reflection->AddInt32 (message, F("packed_int32" ), 601); + reflection->AddInt64 (message, F("packed_int64" ), 602); + reflection->AddUInt32(message, F("packed_uint32" ), 603); + reflection->AddUInt64(message, F("packed_uint64" ), 604); + reflection->AddInt32 (message, F("packed_sint32" ), 605); + reflection->AddInt64 (message, F("packed_sint64" ), 606); + reflection->AddUInt32(message, F("packed_fixed32" ), 607); + reflection->AddUInt64(message, F("packed_fixed64" ), 608); + reflection->AddInt32 (message, F("packed_sfixed32"), 609); + reflection->AddInt64 (message, F("packed_sfixed64"), 610); + reflection->AddFloat (message, F("packed_float" ), 611); + reflection->AddDouble(message, F("packed_double" ), 612); + reflection->AddBool (message, F("packed_bool" ), true); + reflection->AddEnum (message, F("packed_enum" ), foreign_bar_); + + reflection->AddInt32 (message, F("packed_int32" ), 701); + reflection->AddInt64 (message, F("packed_int64" ), 702); + reflection->AddUInt32(message, F("packed_uint32" ), 703); + reflection->AddUInt64(message, F("packed_uint64" ), 704); + reflection->AddInt32 (message, F("packed_sint32" ), 705); + reflection->AddInt64 (message, F("packed_sint64" ), 706); + reflection->AddUInt32(message, F("packed_fixed32" ), 707); + reflection->AddUInt64(message, F("packed_fixed64" ), 708); + reflection->AddInt32 (message, F("packed_sfixed32"), 709); + reflection->AddInt64 (message, F("packed_sfixed64"), 710); + reflection->AddFloat (message, F("packed_float" ), 711); + reflection->AddDouble(message, F("packed_double" ), 712); + reflection->AddBool (message, F("packed_bool" ), false); + reflection->AddEnum (message, F("packed_enum" ), foreign_baz_); +} + +// ------------------------------------------------------------------- + +void TestUtil::ReflectionTester::ExpectAllFieldsSetViaReflection( + const Message& message) { + // We have to split this into three function otherwise it creates a stack + // frame so large that it triggers a warning. + ExpectAllFieldsSetViaReflection1(message); + ExpectAllFieldsSetViaReflection2(message); + ExpectAllFieldsSetViaReflection3(message); +} + +void TestUtil::ReflectionTester::ExpectAllFieldsSetViaReflection1( + const Message& message) { + const Reflection* reflection = message.GetReflection(); + string scratch; + const Message* sub_message; + + EXPECT_TRUE(reflection->HasField(message, F("optional_int32" ))); + EXPECT_TRUE(reflection->HasField(message, F("optional_int64" ))); + EXPECT_TRUE(reflection->HasField(message, F("optional_uint32" ))); + EXPECT_TRUE(reflection->HasField(message, F("optional_uint64" ))); + EXPECT_TRUE(reflection->HasField(message, F("optional_sint32" ))); + EXPECT_TRUE(reflection->HasField(message, F("optional_sint64" ))); + EXPECT_TRUE(reflection->HasField(message, F("optional_fixed32" ))); + EXPECT_TRUE(reflection->HasField(message, F("optional_fixed64" ))); + EXPECT_TRUE(reflection->HasField(message, F("optional_sfixed32"))); + EXPECT_TRUE(reflection->HasField(message, F("optional_sfixed64"))); + EXPECT_TRUE(reflection->HasField(message, F("optional_float" ))); + EXPECT_TRUE(reflection->HasField(message, F("optional_double" ))); + EXPECT_TRUE(reflection->HasField(message, F("optional_bool" ))); + EXPECT_TRUE(reflection->HasField(message, F("optional_string" ))); + EXPECT_TRUE(reflection->HasField(message, F("optional_bytes" ))); + + EXPECT_TRUE(reflection->HasField(message, F("optionalgroup" ))); + EXPECT_TRUE(reflection->HasField(message, F("optional_nested_message" ))); + EXPECT_TRUE(reflection->HasField(message, F("optional_foreign_message"))); + EXPECT_TRUE(reflection->HasField(message, F("optional_import_message" ))); + + sub_message = &reflection->GetMessage(message, F("optionalgroup")); + EXPECT_TRUE(sub_message->GetReflection()->HasField(*sub_message, group_a_)); + sub_message = &reflection->GetMessage(message, F("optional_nested_message")); + EXPECT_TRUE(sub_message->GetReflection()->HasField(*sub_message, nested_b_)); + sub_message = &reflection->GetMessage(message, F("optional_foreign_message")); + EXPECT_TRUE(sub_message->GetReflection()->HasField(*sub_message, foreign_c_)); + sub_message = &reflection->GetMessage(message, F("optional_import_message")); + EXPECT_TRUE(sub_message->GetReflection()->HasField(*sub_message, import_d_)); + + EXPECT_TRUE(reflection->HasField(message, F("optional_nested_enum" ))); + EXPECT_TRUE(reflection->HasField(message, F("optional_foreign_enum"))); + EXPECT_TRUE(reflection->HasField(message, F("optional_import_enum" ))); + + EXPECT_TRUE(reflection->HasField(message, F("optional_string_piece"))); + EXPECT_TRUE(reflection->HasField(message, F("optional_cord"))); + + EXPECT_EQ(101 , reflection->GetInt32 (message, F("optional_int32" ))); + EXPECT_EQ(102 , reflection->GetInt64 (message, F("optional_int64" ))); + EXPECT_EQ(103 , reflection->GetUInt32(message, F("optional_uint32" ))); + EXPECT_EQ(104 , reflection->GetUInt64(message, F("optional_uint64" ))); + EXPECT_EQ(105 , reflection->GetInt32 (message, F("optional_sint32" ))); + EXPECT_EQ(106 , reflection->GetInt64 (message, F("optional_sint64" ))); + EXPECT_EQ(107 , reflection->GetUInt32(message, F("optional_fixed32" ))); + EXPECT_EQ(108 , reflection->GetUInt64(message, F("optional_fixed64" ))); + EXPECT_EQ(109 , reflection->GetInt32 (message, F("optional_sfixed32"))); + EXPECT_EQ(110 , reflection->GetInt64 (message, F("optional_sfixed64"))); + EXPECT_EQ(111 , reflection->GetFloat (message, F("optional_float" ))); + EXPECT_EQ(112 , reflection->GetDouble(message, F("optional_double" ))); + EXPECT_EQ(true , reflection->GetBool (message, F("optional_bool" ))); + EXPECT_EQ("115", reflection->GetString(message, F("optional_string" ))); + EXPECT_EQ("116", reflection->GetString(message, F("optional_bytes" ))); + + EXPECT_EQ("115", reflection->GetStringReference(message, F("optional_string"), &scratch)); + EXPECT_EQ("116", reflection->GetStringReference(message, F("optional_bytes" ), &scratch)); + + sub_message = &reflection->GetMessage(message, F("optionalgroup")); + EXPECT_EQ(117, sub_message->GetReflection()->GetInt32(*sub_message, group_a_)); + sub_message = &reflection->GetMessage(message, F("optional_nested_message")); + EXPECT_EQ(118, sub_message->GetReflection()->GetInt32(*sub_message, nested_b_)); + sub_message = &reflection->GetMessage(message, F("optional_foreign_message")); + EXPECT_EQ(119, sub_message->GetReflection()->GetInt32(*sub_message, foreign_c_)); + sub_message = &reflection->GetMessage(message, F("optional_import_message")); + EXPECT_EQ(120, sub_message->GetReflection()->GetInt32(*sub_message, import_d_)); + + EXPECT_EQ( nested_baz_, reflection->GetEnum(message, F("optional_nested_enum" ))); + EXPECT_EQ(foreign_baz_, reflection->GetEnum(message, F("optional_foreign_enum"))); + EXPECT_EQ( import_baz_, reflection->GetEnum(message, F("optional_import_enum" ))); + + EXPECT_EQ("124", reflection->GetString(message, F("optional_string_piece"))); + EXPECT_EQ("124", reflection->GetStringReference(message, F("optional_string_piece"), &scratch)); + + EXPECT_EQ("125", reflection->GetString(message, F("optional_cord"))); + EXPECT_EQ("125", reflection->GetStringReference(message, F("optional_cord"), &scratch)); +} + +void TestUtil::ReflectionTester::ExpectAllFieldsSetViaReflection2( + const Message& message) { + const Reflection* reflection = message.GetReflection(); + string scratch; + const Message* sub_message; + + // ----------------------------------------------------------------- + + ASSERT_EQ(2, reflection->FieldSize(message, F("repeated_int32" ))); + ASSERT_EQ(2, reflection->FieldSize(message, F("repeated_int64" ))); + ASSERT_EQ(2, reflection->FieldSize(message, F("repeated_uint32" ))); + ASSERT_EQ(2, reflection->FieldSize(message, F("repeated_uint64" ))); + ASSERT_EQ(2, reflection->FieldSize(message, F("repeated_sint32" ))); + ASSERT_EQ(2, reflection->FieldSize(message, F("repeated_sint64" ))); + ASSERT_EQ(2, reflection->FieldSize(message, F("repeated_fixed32" ))); + ASSERT_EQ(2, reflection->FieldSize(message, F("repeated_fixed64" ))); + ASSERT_EQ(2, reflection->FieldSize(message, F("repeated_sfixed32"))); + ASSERT_EQ(2, reflection->FieldSize(message, F("repeated_sfixed64"))); + ASSERT_EQ(2, reflection->FieldSize(message, F("repeated_float" ))); + ASSERT_EQ(2, reflection->FieldSize(message, F("repeated_double" ))); + ASSERT_EQ(2, reflection->FieldSize(message, F("repeated_bool" ))); + ASSERT_EQ(2, reflection->FieldSize(message, F("repeated_string" ))); + ASSERT_EQ(2, reflection->FieldSize(message, F("repeated_bytes" ))); + + ASSERT_EQ(2, reflection->FieldSize(message, F("repeatedgroup" ))); + ASSERT_EQ(2, reflection->FieldSize(message, F("repeated_nested_message" ))); + ASSERT_EQ(2, reflection->FieldSize(message, F("repeated_foreign_message"))); + ASSERT_EQ(2, reflection->FieldSize(message, F("repeated_import_message" ))); + ASSERT_EQ(2, reflection->FieldSize(message, F("repeated_nested_enum" ))); + ASSERT_EQ(2, reflection->FieldSize(message, F("repeated_foreign_enum" ))); + ASSERT_EQ(2, reflection->FieldSize(message, F("repeated_import_enum" ))); + + ASSERT_EQ(2, reflection->FieldSize(message, F("repeated_string_piece"))); + ASSERT_EQ(2, reflection->FieldSize(message, F("repeated_cord"))); + + EXPECT_EQ(201 , reflection->GetRepeatedInt32 (message, F("repeated_int32" ), 0)); + EXPECT_EQ(202 , reflection->GetRepeatedInt64 (message, F("repeated_int64" ), 0)); + EXPECT_EQ(203 , reflection->GetRepeatedUInt32(message, F("repeated_uint32" ), 0)); + EXPECT_EQ(204 , reflection->GetRepeatedUInt64(message, F("repeated_uint64" ), 0)); + EXPECT_EQ(205 , reflection->GetRepeatedInt32 (message, F("repeated_sint32" ), 0)); + EXPECT_EQ(206 , reflection->GetRepeatedInt64 (message, F("repeated_sint64" ), 0)); + EXPECT_EQ(207 , reflection->GetRepeatedUInt32(message, F("repeated_fixed32" ), 0)); + EXPECT_EQ(208 , reflection->GetRepeatedUInt64(message, F("repeated_fixed64" ), 0)); + EXPECT_EQ(209 , reflection->GetRepeatedInt32 (message, F("repeated_sfixed32"), 0)); + EXPECT_EQ(210 , reflection->GetRepeatedInt64 (message, F("repeated_sfixed64"), 0)); + EXPECT_EQ(211 , reflection->GetRepeatedFloat (message, F("repeated_float" ), 0)); + EXPECT_EQ(212 , reflection->GetRepeatedDouble(message, F("repeated_double" ), 0)); + EXPECT_EQ(true , reflection->GetRepeatedBool (message, F("repeated_bool" ), 0)); + EXPECT_EQ("215", reflection->GetRepeatedString(message, F("repeated_string" ), 0)); + EXPECT_EQ("216", reflection->GetRepeatedString(message, F("repeated_bytes" ), 0)); + + EXPECT_EQ("215", reflection->GetRepeatedStringReference(message, F("repeated_string"), 0, &scratch)); + EXPECT_EQ("216", reflection->GetRepeatedStringReference(message, F("repeated_bytes"), 0, &scratch)); + + sub_message = &reflection->GetRepeatedMessage(message, F("repeatedgroup"), 0); + EXPECT_EQ(217, sub_message->GetReflection()->GetInt32(*sub_message, repeated_group_a_)); + sub_message = &reflection->GetRepeatedMessage(message, F("repeated_nested_message"), 0); + EXPECT_EQ(218, sub_message->GetReflection()->GetInt32(*sub_message, nested_b_)); + sub_message = &reflection->GetRepeatedMessage(message, F("repeated_foreign_message"), 0); + EXPECT_EQ(219, sub_message->GetReflection()->GetInt32(*sub_message, foreign_c_)); + sub_message = &reflection->GetRepeatedMessage(message, F("repeated_import_message"), 0); + EXPECT_EQ(220, sub_message->GetReflection()->GetInt32(*sub_message, import_d_)); + + EXPECT_EQ( nested_bar_, reflection->GetRepeatedEnum(message, F("repeated_nested_enum" ),0)); + EXPECT_EQ(foreign_bar_, reflection->GetRepeatedEnum(message, F("repeated_foreign_enum"),0)); + EXPECT_EQ( import_bar_, reflection->GetRepeatedEnum(message, F("repeated_import_enum" ),0)); + + EXPECT_EQ("224", reflection->GetRepeatedString(message, F("repeated_string_piece"), 0)); + EXPECT_EQ("224", reflection->GetRepeatedStringReference( + message, F("repeated_string_piece"), 0, &scratch)); + + EXPECT_EQ("225", reflection->GetRepeatedString(message, F("repeated_cord"), 0)); + EXPECT_EQ("225", reflection->GetRepeatedStringReference( + message, F("repeated_cord"), 0, &scratch)); + + EXPECT_EQ(301 , reflection->GetRepeatedInt32 (message, F("repeated_int32" ), 1)); + EXPECT_EQ(302 , reflection->GetRepeatedInt64 (message, F("repeated_int64" ), 1)); + EXPECT_EQ(303 , reflection->GetRepeatedUInt32(message, F("repeated_uint32" ), 1)); + EXPECT_EQ(304 , reflection->GetRepeatedUInt64(message, F("repeated_uint64" ), 1)); + EXPECT_EQ(305 , reflection->GetRepeatedInt32 (message, F("repeated_sint32" ), 1)); + EXPECT_EQ(306 , reflection->GetRepeatedInt64 (message, F("repeated_sint64" ), 1)); + EXPECT_EQ(307 , reflection->GetRepeatedUInt32(message, F("repeated_fixed32" ), 1)); + EXPECT_EQ(308 , reflection->GetRepeatedUInt64(message, F("repeated_fixed64" ), 1)); + EXPECT_EQ(309 , reflection->GetRepeatedInt32 (message, F("repeated_sfixed32"), 1)); + EXPECT_EQ(310 , reflection->GetRepeatedInt64 (message, F("repeated_sfixed64"), 1)); + EXPECT_EQ(311 , reflection->GetRepeatedFloat (message, F("repeated_float" ), 1)); + EXPECT_EQ(312 , reflection->GetRepeatedDouble(message, F("repeated_double" ), 1)); + EXPECT_EQ(false, reflection->GetRepeatedBool (message, F("repeated_bool" ), 1)); + EXPECT_EQ("315", reflection->GetRepeatedString(message, F("repeated_string" ), 1)); + EXPECT_EQ("316", reflection->GetRepeatedString(message, F("repeated_bytes" ), 1)); + + EXPECT_EQ("315", reflection->GetRepeatedStringReference(message, F("repeated_string"), + 1, &scratch)); + EXPECT_EQ("316", reflection->GetRepeatedStringReference(message, F("repeated_bytes"), + 1, &scratch)); + + sub_message = &reflection->GetRepeatedMessage(message, F("repeatedgroup"), 1); + EXPECT_EQ(317, sub_message->GetReflection()->GetInt32(*sub_message, repeated_group_a_)); + sub_message = &reflection->GetRepeatedMessage(message, F("repeated_nested_message"), 1); + EXPECT_EQ(318, sub_message->GetReflection()->GetInt32(*sub_message, nested_b_)); + sub_message = &reflection->GetRepeatedMessage(message, F("repeated_foreign_message"), 1); + EXPECT_EQ(319, sub_message->GetReflection()->GetInt32(*sub_message, foreign_c_)); + sub_message = &reflection->GetRepeatedMessage(message, F("repeated_import_message"), 1); + EXPECT_EQ(320, sub_message->GetReflection()->GetInt32(*sub_message, import_d_)); + + EXPECT_EQ( nested_baz_, reflection->GetRepeatedEnum(message, F("repeated_nested_enum" ),1)); + EXPECT_EQ(foreign_baz_, reflection->GetRepeatedEnum(message, F("repeated_foreign_enum"),1)); + EXPECT_EQ( import_baz_, reflection->GetRepeatedEnum(message, F("repeated_import_enum" ),1)); + + EXPECT_EQ("324", reflection->GetRepeatedString(message, F("repeated_string_piece"), 1)); + EXPECT_EQ("324", reflection->GetRepeatedStringReference( + message, F("repeated_string_piece"), 1, &scratch)); + + EXPECT_EQ("325", reflection->GetRepeatedString(message, F("repeated_cord"), 1)); + EXPECT_EQ("325", reflection->GetRepeatedStringReference( + message, F("repeated_cord"), 1, &scratch)); +} + +void TestUtil::ReflectionTester::ExpectAllFieldsSetViaReflection3( + const Message& message) { + const Reflection* reflection = message.GetReflection(); + string scratch; + + // ----------------------------------------------------------------- + + EXPECT_TRUE(reflection->HasField(message, F("default_int32" ))); + EXPECT_TRUE(reflection->HasField(message, F("default_int64" ))); + EXPECT_TRUE(reflection->HasField(message, F("default_uint32" ))); + EXPECT_TRUE(reflection->HasField(message, F("default_uint64" ))); + EXPECT_TRUE(reflection->HasField(message, F("default_sint32" ))); + EXPECT_TRUE(reflection->HasField(message, F("default_sint64" ))); + EXPECT_TRUE(reflection->HasField(message, F("default_fixed32" ))); + EXPECT_TRUE(reflection->HasField(message, F("default_fixed64" ))); + EXPECT_TRUE(reflection->HasField(message, F("default_sfixed32"))); + EXPECT_TRUE(reflection->HasField(message, F("default_sfixed64"))); + EXPECT_TRUE(reflection->HasField(message, F("default_float" ))); + EXPECT_TRUE(reflection->HasField(message, F("default_double" ))); + EXPECT_TRUE(reflection->HasField(message, F("default_bool" ))); + EXPECT_TRUE(reflection->HasField(message, F("default_string" ))); + EXPECT_TRUE(reflection->HasField(message, F("default_bytes" ))); + + EXPECT_TRUE(reflection->HasField(message, F("default_nested_enum" ))); + EXPECT_TRUE(reflection->HasField(message, F("default_foreign_enum"))); + EXPECT_TRUE(reflection->HasField(message, F("default_import_enum" ))); + + EXPECT_TRUE(reflection->HasField(message, F("default_string_piece"))); + EXPECT_TRUE(reflection->HasField(message, F("default_cord"))); + + EXPECT_EQ(401 , reflection->GetInt32 (message, F("default_int32" ))); + EXPECT_EQ(402 , reflection->GetInt64 (message, F("default_int64" ))); + EXPECT_EQ(403 , reflection->GetUInt32(message, F("default_uint32" ))); + EXPECT_EQ(404 , reflection->GetUInt64(message, F("default_uint64" ))); + EXPECT_EQ(405 , reflection->GetInt32 (message, F("default_sint32" ))); + EXPECT_EQ(406 , reflection->GetInt64 (message, F("default_sint64" ))); + EXPECT_EQ(407 , reflection->GetUInt32(message, F("default_fixed32" ))); + EXPECT_EQ(408 , reflection->GetUInt64(message, F("default_fixed64" ))); + EXPECT_EQ(409 , reflection->GetInt32 (message, F("default_sfixed32"))); + EXPECT_EQ(410 , reflection->GetInt64 (message, F("default_sfixed64"))); + EXPECT_EQ(411 , reflection->GetFloat (message, F("default_float" ))); + EXPECT_EQ(412 , reflection->GetDouble(message, F("default_double" ))); + EXPECT_EQ(false, reflection->GetBool (message, F("default_bool" ))); + EXPECT_EQ("415", reflection->GetString(message, F("default_string" ))); + EXPECT_EQ("416", reflection->GetString(message, F("default_bytes" ))); + + EXPECT_EQ("415", reflection->GetStringReference(message, F("default_string"), &scratch)); + EXPECT_EQ("416", reflection->GetStringReference(message, F("default_bytes" ), &scratch)); + + EXPECT_EQ( nested_foo_, reflection->GetEnum(message, F("default_nested_enum" ))); + EXPECT_EQ(foreign_foo_, reflection->GetEnum(message, F("default_foreign_enum"))); + EXPECT_EQ( import_foo_, reflection->GetEnum(message, F("default_import_enum" ))); + + EXPECT_EQ("424", reflection->GetString(message, F("default_string_piece"))); + EXPECT_EQ("424", reflection->GetStringReference(message, F("default_string_piece"), + &scratch)); + + EXPECT_EQ("425", reflection->GetString(message, F("default_cord"))); + EXPECT_EQ("425", reflection->GetStringReference(message, F("default_cord"), &scratch)); +} + +void TestUtil::ReflectionTester::ExpectPackedFieldsSetViaReflection( + const Message& message) { + const Reflection* reflection = message.GetReflection(); + + ASSERT_EQ(2, reflection->FieldSize(message, F("packed_int32" ))); + ASSERT_EQ(2, reflection->FieldSize(message, F("packed_int64" ))); + ASSERT_EQ(2, reflection->FieldSize(message, F("packed_uint32" ))); + ASSERT_EQ(2, reflection->FieldSize(message, F("packed_uint64" ))); + ASSERT_EQ(2, reflection->FieldSize(message, F("packed_sint32" ))); + ASSERT_EQ(2, reflection->FieldSize(message, F("packed_sint64" ))); + ASSERT_EQ(2, reflection->FieldSize(message, F("packed_fixed32" ))); + ASSERT_EQ(2, reflection->FieldSize(message, F("packed_fixed64" ))); + ASSERT_EQ(2, reflection->FieldSize(message, F("packed_sfixed32"))); + ASSERT_EQ(2, reflection->FieldSize(message, F("packed_sfixed64"))); + ASSERT_EQ(2, reflection->FieldSize(message, F("packed_float" ))); + ASSERT_EQ(2, reflection->FieldSize(message, F("packed_double" ))); + ASSERT_EQ(2, reflection->FieldSize(message, F("packed_bool" ))); + ASSERT_EQ(2, reflection->FieldSize(message, F("packed_enum" ))); + + EXPECT_EQ(601 , reflection->GetRepeatedInt32 (message, F("packed_int32" ), 0)); + EXPECT_EQ(602 , reflection->GetRepeatedInt64 (message, F("packed_int64" ), 0)); + EXPECT_EQ(603 , reflection->GetRepeatedUInt32(message, F("packed_uint32" ), 0)); + EXPECT_EQ(604 , reflection->GetRepeatedUInt64(message, F("packed_uint64" ), 0)); + EXPECT_EQ(605 , reflection->GetRepeatedInt32 (message, F("packed_sint32" ), 0)); + EXPECT_EQ(606 , reflection->GetRepeatedInt64 (message, F("packed_sint64" ), 0)); + EXPECT_EQ(607 , reflection->GetRepeatedUInt32(message, F("packed_fixed32" ), 0)); + EXPECT_EQ(608 , reflection->GetRepeatedUInt64(message, F("packed_fixed64" ), 0)); + EXPECT_EQ(609 , reflection->GetRepeatedInt32 (message, F("packed_sfixed32"), 0)); + EXPECT_EQ(610 , reflection->GetRepeatedInt64 (message, F("packed_sfixed64"), 0)); + EXPECT_EQ(611 , reflection->GetRepeatedFloat (message, F("packed_float" ), 0)); + EXPECT_EQ(612 , reflection->GetRepeatedDouble(message, F("packed_double" ), 0)); + EXPECT_EQ(true , reflection->GetRepeatedBool (message, F("packed_bool" ), 0)); + EXPECT_EQ(foreign_bar_, + reflection->GetRepeatedEnum(message, F("packed_enum"), 0)); + + EXPECT_EQ(701 , reflection->GetRepeatedInt32 (message, F("packed_int32" ), 1)); + EXPECT_EQ(702 , reflection->GetRepeatedInt64 (message, F("packed_int64" ), 1)); + EXPECT_EQ(703 , reflection->GetRepeatedUInt32(message, F("packed_uint32" ), 1)); + EXPECT_EQ(704 , reflection->GetRepeatedUInt64(message, F("packed_uint64" ), 1)); + EXPECT_EQ(705 , reflection->GetRepeatedInt32 (message, F("packed_sint32" ), 1)); + EXPECT_EQ(706 , reflection->GetRepeatedInt64 (message, F("packed_sint64" ), 1)); + EXPECT_EQ(707 , reflection->GetRepeatedUInt32(message, F("packed_fixed32" ), 1)); + EXPECT_EQ(708 , reflection->GetRepeatedUInt64(message, F("packed_fixed64" ), 1)); + EXPECT_EQ(709 , reflection->GetRepeatedInt32 (message, F("packed_sfixed32"), 1)); + EXPECT_EQ(710 , reflection->GetRepeatedInt64 (message, F("packed_sfixed64"), 1)); + EXPECT_EQ(711 , reflection->GetRepeatedFloat (message, F("packed_float" ), 1)); + EXPECT_EQ(712 , reflection->GetRepeatedDouble(message, F("packed_double" ), 1)); + EXPECT_EQ(false, reflection->GetRepeatedBool (message, F("packed_bool" ), 1)); + EXPECT_EQ(foreign_baz_, + reflection->GetRepeatedEnum(message, F("packed_enum"), 1)); +} + +// ------------------------------------------------------------------- + +void TestUtil::ReflectionTester::ExpectClearViaReflection( + const Message& message) { + const Reflection* reflection = message.GetReflection(); + string scratch; + const Message* sub_message; + + // has_blah() should initially be false for all optional fields. + EXPECT_FALSE(reflection->HasField(message, F("optional_int32" ))); + EXPECT_FALSE(reflection->HasField(message, F("optional_int64" ))); + EXPECT_FALSE(reflection->HasField(message, F("optional_uint32" ))); + EXPECT_FALSE(reflection->HasField(message, F("optional_uint64" ))); + EXPECT_FALSE(reflection->HasField(message, F("optional_sint32" ))); + EXPECT_FALSE(reflection->HasField(message, F("optional_sint64" ))); + EXPECT_FALSE(reflection->HasField(message, F("optional_fixed32" ))); + EXPECT_FALSE(reflection->HasField(message, F("optional_fixed64" ))); + EXPECT_FALSE(reflection->HasField(message, F("optional_sfixed32"))); + EXPECT_FALSE(reflection->HasField(message, F("optional_sfixed64"))); + EXPECT_FALSE(reflection->HasField(message, F("optional_float" ))); + EXPECT_FALSE(reflection->HasField(message, F("optional_double" ))); + EXPECT_FALSE(reflection->HasField(message, F("optional_bool" ))); + EXPECT_FALSE(reflection->HasField(message, F("optional_string" ))); + EXPECT_FALSE(reflection->HasField(message, F("optional_bytes" ))); + + EXPECT_FALSE(reflection->HasField(message, F("optionalgroup" ))); + EXPECT_FALSE(reflection->HasField(message, F("optional_nested_message" ))); + EXPECT_FALSE(reflection->HasField(message, F("optional_foreign_message"))); + EXPECT_FALSE(reflection->HasField(message, F("optional_import_message" ))); + + EXPECT_FALSE(reflection->HasField(message, F("optional_nested_enum" ))); + EXPECT_FALSE(reflection->HasField(message, F("optional_foreign_enum"))); + EXPECT_FALSE(reflection->HasField(message, F("optional_import_enum" ))); + + EXPECT_FALSE(reflection->HasField(message, F("optional_string_piece"))); + EXPECT_FALSE(reflection->HasField(message, F("optional_cord"))); + + // Optional fields without defaults are set to zero or something like it. + EXPECT_EQ(0 , reflection->GetInt32 (message, F("optional_int32" ))); + EXPECT_EQ(0 , reflection->GetInt64 (message, F("optional_int64" ))); + EXPECT_EQ(0 , reflection->GetUInt32(message, F("optional_uint32" ))); + EXPECT_EQ(0 , reflection->GetUInt64(message, F("optional_uint64" ))); + EXPECT_EQ(0 , reflection->GetInt32 (message, F("optional_sint32" ))); + EXPECT_EQ(0 , reflection->GetInt64 (message, F("optional_sint64" ))); + EXPECT_EQ(0 , reflection->GetUInt32(message, F("optional_fixed32" ))); + EXPECT_EQ(0 , reflection->GetUInt64(message, F("optional_fixed64" ))); + EXPECT_EQ(0 , reflection->GetInt32 (message, F("optional_sfixed32"))); + EXPECT_EQ(0 , reflection->GetInt64 (message, F("optional_sfixed64"))); + EXPECT_EQ(0 , reflection->GetFloat (message, F("optional_float" ))); + EXPECT_EQ(0 , reflection->GetDouble(message, F("optional_double" ))); + EXPECT_EQ(false, reflection->GetBool (message, F("optional_bool" ))); + EXPECT_EQ("" , reflection->GetString(message, F("optional_string" ))); + EXPECT_EQ("" , reflection->GetString(message, F("optional_bytes" ))); + + EXPECT_EQ("", reflection->GetStringReference(message, F("optional_string"), &scratch)); + EXPECT_EQ("", reflection->GetStringReference(message, F("optional_bytes" ), &scratch)); + + // Embedded messages should also be clear. + sub_message = &reflection->GetMessage(message, F("optionalgroup")); + EXPECT_FALSE(sub_message->GetReflection()->HasField(*sub_message, group_a_)); + EXPECT_EQ(0, sub_message->GetReflection()->GetInt32(*sub_message, group_a_)); + sub_message = &reflection->GetMessage(message, F("optional_nested_message")); + EXPECT_FALSE(sub_message->GetReflection()->HasField(*sub_message, nested_b_)); + EXPECT_EQ(0, sub_message->GetReflection()->GetInt32(*sub_message, nested_b_)); + sub_message = &reflection->GetMessage(message, F("optional_foreign_message")); + EXPECT_FALSE(sub_message->GetReflection()->HasField(*sub_message, foreign_c_)); + EXPECT_EQ(0, sub_message->GetReflection()->GetInt32(*sub_message, foreign_c_)); + sub_message = &reflection->GetMessage(message, F("optional_import_message")); + EXPECT_FALSE(sub_message->GetReflection()->HasField(*sub_message, import_d_)); + EXPECT_EQ(0, sub_message->GetReflection()->GetInt32(*sub_message, import_d_)); + + // Enums without defaults are set to the first value in the enum. + EXPECT_EQ( nested_foo_, reflection->GetEnum(message, F("optional_nested_enum" ))); + EXPECT_EQ(foreign_foo_, reflection->GetEnum(message, F("optional_foreign_enum"))); + EXPECT_EQ( import_foo_, reflection->GetEnum(message, F("optional_import_enum" ))); + + EXPECT_EQ("", reflection->GetString(message, F("optional_string_piece"))); + EXPECT_EQ("", reflection->GetStringReference(message, F("optional_string_piece"), &scratch)); + + EXPECT_EQ("", reflection->GetString(message, F("optional_cord"))); + EXPECT_EQ("", reflection->GetStringReference(message, F("optional_cord"), &scratch)); + + // Repeated fields are empty. + EXPECT_EQ(0, reflection->FieldSize(message, F("repeated_int32" ))); + EXPECT_EQ(0, reflection->FieldSize(message, F("repeated_int64" ))); + EXPECT_EQ(0, reflection->FieldSize(message, F("repeated_uint32" ))); + EXPECT_EQ(0, reflection->FieldSize(message, F("repeated_uint64" ))); + EXPECT_EQ(0, reflection->FieldSize(message, F("repeated_sint32" ))); + EXPECT_EQ(0, reflection->FieldSize(message, F("repeated_sint64" ))); + EXPECT_EQ(0, reflection->FieldSize(message, F("repeated_fixed32" ))); + EXPECT_EQ(0, reflection->FieldSize(message, F("repeated_fixed64" ))); + EXPECT_EQ(0, reflection->FieldSize(message, F("repeated_sfixed32"))); + EXPECT_EQ(0, reflection->FieldSize(message, F("repeated_sfixed64"))); + EXPECT_EQ(0, reflection->FieldSize(message, F("repeated_float" ))); + EXPECT_EQ(0, reflection->FieldSize(message, F("repeated_double" ))); + EXPECT_EQ(0, reflection->FieldSize(message, F("repeated_bool" ))); + EXPECT_EQ(0, reflection->FieldSize(message, F("repeated_string" ))); + EXPECT_EQ(0, reflection->FieldSize(message, F("repeated_bytes" ))); + + EXPECT_EQ(0, reflection->FieldSize(message, F("repeatedgroup" ))); + EXPECT_EQ(0, reflection->FieldSize(message, F("repeated_nested_message" ))); + EXPECT_EQ(0, reflection->FieldSize(message, F("repeated_foreign_message"))); + EXPECT_EQ(0, reflection->FieldSize(message, F("repeated_import_message" ))); + EXPECT_EQ(0, reflection->FieldSize(message, F("repeated_nested_enum" ))); + EXPECT_EQ(0, reflection->FieldSize(message, F("repeated_foreign_enum" ))); + EXPECT_EQ(0, reflection->FieldSize(message, F("repeated_import_enum" ))); + + EXPECT_EQ(0, reflection->FieldSize(message, F("repeated_string_piece"))); + EXPECT_EQ(0, reflection->FieldSize(message, F("repeated_cord"))); + + // has_blah() should also be false for all default fields. + EXPECT_FALSE(reflection->HasField(message, F("default_int32" ))); + EXPECT_FALSE(reflection->HasField(message, F("default_int64" ))); + EXPECT_FALSE(reflection->HasField(message, F("default_uint32" ))); + EXPECT_FALSE(reflection->HasField(message, F("default_uint64" ))); + EXPECT_FALSE(reflection->HasField(message, F("default_sint32" ))); + EXPECT_FALSE(reflection->HasField(message, F("default_sint64" ))); + EXPECT_FALSE(reflection->HasField(message, F("default_fixed32" ))); + EXPECT_FALSE(reflection->HasField(message, F("default_fixed64" ))); + EXPECT_FALSE(reflection->HasField(message, F("default_sfixed32"))); + EXPECT_FALSE(reflection->HasField(message, F("default_sfixed64"))); + EXPECT_FALSE(reflection->HasField(message, F("default_float" ))); + EXPECT_FALSE(reflection->HasField(message, F("default_double" ))); + EXPECT_FALSE(reflection->HasField(message, F("default_bool" ))); + EXPECT_FALSE(reflection->HasField(message, F("default_string" ))); + EXPECT_FALSE(reflection->HasField(message, F("default_bytes" ))); + + EXPECT_FALSE(reflection->HasField(message, F("default_nested_enum" ))); + EXPECT_FALSE(reflection->HasField(message, F("default_foreign_enum"))); + EXPECT_FALSE(reflection->HasField(message, F("default_import_enum" ))); + + EXPECT_FALSE(reflection->HasField(message, F("default_string_piece"))); + EXPECT_FALSE(reflection->HasField(message, F("default_cord"))); + + // Fields with defaults have their default values (duh). + EXPECT_EQ( 41 , reflection->GetInt32 (message, F("default_int32" ))); + EXPECT_EQ( 42 , reflection->GetInt64 (message, F("default_int64" ))); + EXPECT_EQ( 43 , reflection->GetUInt32(message, F("default_uint32" ))); + EXPECT_EQ( 44 , reflection->GetUInt64(message, F("default_uint64" ))); + EXPECT_EQ(-45 , reflection->GetInt32 (message, F("default_sint32" ))); + EXPECT_EQ( 46 , reflection->GetInt64 (message, F("default_sint64" ))); + EXPECT_EQ( 47 , reflection->GetUInt32(message, F("default_fixed32" ))); + EXPECT_EQ( 48 , reflection->GetUInt64(message, F("default_fixed64" ))); + EXPECT_EQ( 49 , reflection->GetInt32 (message, F("default_sfixed32"))); + EXPECT_EQ(-50 , reflection->GetInt64 (message, F("default_sfixed64"))); + EXPECT_EQ( 51.5 , reflection->GetFloat (message, F("default_float" ))); + EXPECT_EQ( 52e3 , reflection->GetDouble(message, F("default_double" ))); + EXPECT_EQ(true , reflection->GetBool (message, F("default_bool" ))); + EXPECT_EQ("hello", reflection->GetString(message, F("default_string" ))); + EXPECT_EQ("world", reflection->GetString(message, F("default_bytes" ))); + + EXPECT_EQ("hello", reflection->GetStringReference(message, F("default_string"), &scratch)); + EXPECT_EQ("world", reflection->GetStringReference(message, F("default_bytes" ), &scratch)); + + EXPECT_EQ( nested_bar_, reflection->GetEnum(message, F("default_nested_enum" ))); + EXPECT_EQ(foreign_bar_, reflection->GetEnum(message, F("default_foreign_enum"))); + EXPECT_EQ( import_bar_, reflection->GetEnum(message, F("default_import_enum" ))); + + EXPECT_EQ("abc", reflection->GetString(message, F("default_string_piece"))); + EXPECT_EQ("abc", reflection->GetStringReference(message, F("default_string_piece"), &scratch)); + + EXPECT_EQ("123", reflection->GetString(message, F("default_cord"))); + EXPECT_EQ("123", reflection->GetStringReference(message, F("default_cord"), &scratch)); +} + +void TestUtil::ReflectionTester::ExpectPackedClearViaReflection( + const Message& message) { + const Reflection* reflection = message.GetReflection(); + + EXPECT_EQ(0, reflection->FieldSize(message, F("packed_int32" ))); + EXPECT_EQ(0, reflection->FieldSize(message, F("packed_int64" ))); + EXPECT_EQ(0, reflection->FieldSize(message, F("packed_uint32" ))); + EXPECT_EQ(0, reflection->FieldSize(message, F("packed_uint64" ))); + EXPECT_EQ(0, reflection->FieldSize(message, F("packed_sint32" ))); + EXPECT_EQ(0, reflection->FieldSize(message, F("packed_sint64" ))); + EXPECT_EQ(0, reflection->FieldSize(message, F("packed_fixed32" ))); + EXPECT_EQ(0, reflection->FieldSize(message, F("packed_fixed64" ))); + EXPECT_EQ(0, reflection->FieldSize(message, F("packed_sfixed32"))); + EXPECT_EQ(0, reflection->FieldSize(message, F("packed_sfixed64"))); + EXPECT_EQ(0, reflection->FieldSize(message, F("packed_float" ))); + EXPECT_EQ(0, reflection->FieldSize(message, F("packed_double" ))); + EXPECT_EQ(0, reflection->FieldSize(message, F("packed_bool" ))); + EXPECT_EQ(0, reflection->FieldSize(message, F("packed_enum" ))); +} + +// ------------------------------------------------------------------- + +void TestUtil::ReflectionTester::ModifyRepeatedFieldsViaReflection( + Message* message) { + const Reflection* reflection = message->GetReflection(); + Message* sub_message; + + reflection->SetRepeatedInt32 (message, F("repeated_int32" ), 1, 501); + reflection->SetRepeatedInt64 (message, F("repeated_int64" ), 1, 502); + reflection->SetRepeatedUInt32(message, F("repeated_uint32" ), 1, 503); + reflection->SetRepeatedUInt64(message, F("repeated_uint64" ), 1, 504); + reflection->SetRepeatedInt32 (message, F("repeated_sint32" ), 1, 505); + reflection->SetRepeatedInt64 (message, F("repeated_sint64" ), 1, 506); + reflection->SetRepeatedUInt32(message, F("repeated_fixed32" ), 1, 507); + reflection->SetRepeatedUInt64(message, F("repeated_fixed64" ), 1, 508); + reflection->SetRepeatedInt32 (message, F("repeated_sfixed32"), 1, 509); + reflection->SetRepeatedInt64 (message, F("repeated_sfixed64"), 1, 510); + reflection->SetRepeatedFloat (message, F("repeated_float" ), 1, 511); + reflection->SetRepeatedDouble(message, F("repeated_double" ), 1, 512); + reflection->SetRepeatedBool (message, F("repeated_bool" ), 1, true); + reflection->SetRepeatedString(message, F("repeated_string" ), 1, "515"); + reflection->SetRepeatedString(message, F("repeated_bytes" ), 1, "516"); + + sub_message = reflection->MutableRepeatedMessage(message, F("repeatedgroup"), 1); + sub_message->GetReflection()->SetInt32(sub_message, repeated_group_a_, 517); + sub_message = reflection->MutableRepeatedMessage(message, F("repeated_nested_message"), 1); + sub_message->GetReflection()->SetInt32(sub_message, nested_b_, 518); + sub_message = reflection->MutableRepeatedMessage(message, F("repeated_foreign_message"), 1); + sub_message->GetReflection()->SetInt32(sub_message, foreign_c_, 519); + sub_message = reflection->MutableRepeatedMessage(message, F("repeated_import_message"), 1); + sub_message->GetReflection()->SetInt32(sub_message, import_d_, 520); + + reflection->SetRepeatedEnum(message, F("repeated_nested_enum" ), 1, nested_foo_); + reflection->SetRepeatedEnum(message, F("repeated_foreign_enum"), 1, foreign_foo_); + reflection->SetRepeatedEnum(message, F("repeated_import_enum" ), 1, import_foo_); + + reflection->SetRepeatedString(message, F("repeated_string_piece"), 1, "524"); + reflection->SetRepeatedString(message, F("repeated_cord"), 1, "525"); +} + +void TestUtil::ReflectionTester::ModifyPackedFieldsViaReflection( + Message* message) { + const Reflection* reflection = message->GetReflection(); + reflection->SetRepeatedInt32 (message, F("packed_int32" ), 1, 801); + reflection->SetRepeatedInt64 (message, F("packed_int64" ), 1, 802); + reflection->SetRepeatedUInt32(message, F("packed_uint32" ), 1, 803); + reflection->SetRepeatedUInt64(message, F("packed_uint64" ), 1, 804); + reflection->SetRepeatedInt32 (message, F("packed_sint32" ), 1, 805); + reflection->SetRepeatedInt64 (message, F("packed_sint64" ), 1, 806); + reflection->SetRepeatedUInt32(message, F("packed_fixed32" ), 1, 807); + reflection->SetRepeatedUInt64(message, F("packed_fixed64" ), 1, 808); + reflection->SetRepeatedInt32 (message, F("packed_sfixed32"), 1, 809); + reflection->SetRepeatedInt64 (message, F("packed_sfixed64"), 1, 810); + reflection->SetRepeatedFloat (message, F("packed_float" ), 1, 811); + reflection->SetRepeatedDouble(message, F("packed_double" ), 1, 812); + reflection->SetRepeatedBool (message, F("packed_bool" ), 1, true); + reflection->SetRepeatedEnum (message, F("packed_enum" ), 1, foreign_foo_); +} + +void TestUtil::ReflectionTester::RemoveLastRepeatedsViaReflection(Message* message) { + const Reflection* reflection = message->GetReflection(); + + vector output; + reflection->ListFields(*message, &output); + for (int i=0; iis_repeated()) continue; + + reflection->RemoveLast(message, field); + } +} + +void TestUtil::ReflectionTester::SwapRepeatedsViaReflection(Message* message) { + const Reflection* reflection = message->GetReflection(); + + vector output; + reflection->ListFields(*message, &output); + for (int i=0; iis_repeated()) continue; + + reflection->SwapElements(message, field, 0, 1); + } +} + +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/test_util.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/test_util.h new file mode 100644 index 0000000000..25165f3ada --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/test_util.h @@ -0,0 +1,174 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#ifndef GOOGLE_PROTOBUF_TEST_UTIL_H__ +#define GOOGLE_PROTOBUF_TEST_UTIL_H__ + +#include +#include +#include +#include + +namespace google { +namespace protobuf { + +namespace unittest = protobuf_unittest; +namespace unittest_import = protobuf_unittest_import; + +class TestUtil { + public: + // Set every field in the message to a unique value. + static void SetAllFields(unittest::TestAllTypes* message); + static void SetAllExtensions(unittest::TestAllExtensions* message); + static void SetAllFieldsAndExtensions(unittest::TestFieldOrderings* message); + static void SetPackedFields(unittest::TestPackedTypes* message); + static void SetPackedExtensions(unittest::TestPackedExtensions* message); + static void SetUnpackedFields(unittest::TestUnpackedTypes* message); + + // Use the repeated versions of the set_*() accessors to modify all the + // repeated fields of the messsage (which should already have been + // initialized with Set*Fields()). Set*Fields() itself only tests + // the add_*() accessors. + static void ModifyRepeatedFields(unittest::TestAllTypes* message); + static void ModifyRepeatedExtensions(unittest::TestAllExtensions* message); + static void ModifyPackedFields(unittest::TestPackedTypes* message); + static void ModifyPackedExtensions(unittest::TestPackedExtensions* message); + + // Check that all fields have the values that they should have after + // Set*Fields() is called. + static void ExpectAllFieldsSet(const unittest::TestAllTypes& message); + static void ExpectAllExtensionsSet( + const unittest::TestAllExtensions& message); + static void ExpectPackedFieldsSet(const unittest::TestPackedTypes& message); + static void ExpectPackedExtensionsSet( + const unittest::TestPackedExtensions& message); + static void ExpectUnpackedFieldsSet( + const unittest::TestUnpackedTypes& message); + + // Expect that the message is modified as would be expected from + // Modify*Fields(). + static void ExpectRepeatedFieldsModified( + const unittest::TestAllTypes& message); + static void ExpectRepeatedExtensionsModified( + const unittest::TestAllExtensions& message); + static void ExpectPackedFieldsModified( + const unittest::TestPackedTypes& message); + static void ExpectPackedExtensionsModified( + const unittest::TestPackedExtensions& message); + + // Check that all fields have their default values. + static void ExpectClear(const unittest::TestAllTypes& message); + static void ExpectExtensionsClear(const unittest::TestAllExtensions& message); + static void ExpectPackedClear(const unittest::TestPackedTypes& message); + static void ExpectPackedExtensionsClear( + const unittest::TestPackedExtensions& message); + + // Check that the passed-in serialization is the canonical serialization we + // expect for a TestFieldOrderings message filled in by + // SetAllFieldsAndExtensions(). + static void ExpectAllFieldsAndExtensionsInOrder(const string& serialized); + + // Check that all repeated fields have had their last elements removed. + static void ExpectLastRepeatedsRemoved( + const unittest::TestAllTypes& message); + static void ExpectLastRepeatedExtensionsRemoved( + const unittest::TestAllExtensions& message); + + // Check that all repeated fields have had their first and last elements + // swapped. + static void ExpectRepeatedsSwapped(const unittest::TestAllTypes& message); + static void ExpectRepeatedExtensionsSwapped( + const unittest::TestAllExtensions& message); + + // Like above, but use the reflection interface. + class ReflectionTester { + public: + // base_descriptor must be a descriptor for TestAllTypes or + // TestAllExtensions. In the former case, ReflectionTester fetches from + // it the FieldDescriptors needed to use the reflection interface. In + // the latter case, ReflectionTester searches for extension fields in + // its file. + explicit ReflectionTester(const Descriptor* base_descriptor); + + void SetAllFieldsViaReflection(Message* message); + void ModifyRepeatedFieldsViaReflection(Message* message); + void ExpectAllFieldsSetViaReflection(const Message& message); + void ExpectClearViaReflection(const Message& message); + + void SetPackedFieldsViaReflection(Message* message); + void ModifyPackedFieldsViaReflection(Message* message); + void ExpectPackedFieldsSetViaReflection(const Message& message); + void ExpectPackedClearViaReflection(const Message& message); + + void RemoveLastRepeatedsViaReflection(Message* message); + void SwapRepeatedsViaReflection(Message* message); + + private: + const FieldDescriptor* F(const string& name); + + const Descriptor* base_descriptor_; + + const FieldDescriptor* group_a_; + const FieldDescriptor* repeated_group_a_; + const FieldDescriptor* nested_b_; + const FieldDescriptor* foreign_c_; + const FieldDescriptor* import_d_; + + const EnumValueDescriptor* nested_foo_; + const EnumValueDescriptor* nested_bar_; + const EnumValueDescriptor* nested_baz_; + const EnumValueDescriptor* foreign_foo_; + const EnumValueDescriptor* foreign_bar_; + const EnumValueDescriptor* foreign_baz_; + const EnumValueDescriptor* import_foo_; + const EnumValueDescriptor* import_bar_; + const EnumValueDescriptor* import_baz_; + + // We have to split this into three function otherwise it creates a stack + // frame so large that it triggers a warning. + void ExpectAllFieldsSetViaReflection1(const Message& message); + void ExpectAllFieldsSetViaReflection2(const Message& message); + void ExpectAllFieldsSetViaReflection3(const Message& message); + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(ReflectionTester); + }; + + private: + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(TestUtil); +}; + +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_TEST_UTIL_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/test_util_lite.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/test_util_lite.cc new file mode 100644 index 0000000000..d7140e0cad --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/test_util_lite.cc @@ -0,0 +1,1502 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include + + +#define EXPECT_TRUE GOOGLE_CHECK +#define ASSERT_TRUE GOOGLE_CHECK +#define EXPECT_FALSE(COND) GOOGLE_CHECK(!(COND)) +#define EXPECT_EQ GOOGLE_CHECK_EQ +#define ASSERT_EQ GOOGLE_CHECK_EQ + +namespace google { +namespace protobuf { + +void TestUtilLite::SetAllFields(unittest::TestAllTypesLite* message) { + message->set_optional_int32 (101); + message->set_optional_int64 (102); + message->set_optional_uint32 (103); + message->set_optional_uint64 (104); + message->set_optional_sint32 (105); + message->set_optional_sint64 (106); + message->set_optional_fixed32 (107); + message->set_optional_fixed64 (108); + message->set_optional_sfixed32(109); + message->set_optional_sfixed64(110); + message->set_optional_float (111); + message->set_optional_double (112); + message->set_optional_bool (true); + message->set_optional_string ("115"); + message->set_optional_bytes ("116"); + + message->mutable_optionalgroup ()->set_a(117); + message->mutable_optional_nested_message ()->set_bb(118); + message->mutable_optional_foreign_message()->set_c(119); + message->mutable_optional_import_message ()->set_d(120); + + message->set_optional_nested_enum (unittest::TestAllTypesLite::BAZ ); + message->set_optional_foreign_enum(unittest::FOREIGN_LITE_BAZ ); + message->set_optional_import_enum (unittest_import::IMPORT_LITE_BAZ); + + + // ----------------------------------------------------------------- + + message->add_repeated_int32 (201); + message->add_repeated_int64 (202); + message->add_repeated_uint32 (203); + message->add_repeated_uint64 (204); + message->add_repeated_sint32 (205); + message->add_repeated_sint64 (206); + message->add_repeated_fixed32 (207); + message->add_repeated_fixed64 (208); + message->add_repeated_sfixed32(209); + message->add_repeated_sfixed64(210); + message->add_repeated_float (211); + message->add_repeated_double (212); + message->add_repeated_bool (true); + message->add_repeated_string ("215"); + message->add_repeated_bytes ("216"); + + message->add_repeatedgroup ()->set_a(217); + message->add_repeated_nested_message ()->set_bb(218); + message->add_repeated_foreign_message()->set_c(219); + message->add_repeated_import_message ()->set_d(220); + + message->add_repeated_nested_enum (unittest::TestAllTypesLite::BAR ); + message->add_repeated_foreign_enum(unittest::FOREIGN_LITE_BAR ); + message->add_repeated_import_enum (unittest_import::IMPORT_LITE_BAR); + + + // Add a second one of each field. + message->add_repeated_int32 (301); + message->add_repeated_int64 (302); + message->add_repeated_uint32 (303); + message->add_repeated_uint64 (304); + message->add_repeated_sint32 (305); + message->add_repeated_sint64 (306); + message->add_repeated_fixed32 (307); + message->add_repeated_fixed64 (308); + message->add_repeated_sfixed32(309); + message->add_repeated_sfixed64(310); + message->add_repeated_float (311); + message->add_repeated_double (312); + message->add_repeated_bool (false); + message->add_repeated_string ("315"); + message->add_repeated_bytes ("316"); + + message->add_repeatedgroup ()->set_a(317); + message->add_repeated_nested_message ()->set_bb(318); + message->add_repeated_foreign_message()->set_c(319); + message->add_repeated_import_message ()->set_d(320); + + message->add_repeated_nested_enum (unittest::TestAllTypesLite::BAZ ); + message->add_repeated_foreign_enum(unittest::FOREIGN_LITE_BAZ ); + message->add_repeated_import_enum (unittest_import::IMPORT_LITE_BAZ); + + + // ----------------------------------------------------------------- + + message->set_default_int32 (401); + message->set_default_int64 (402); + message->set_default_uint32 (403); + message->set_default_uint64 (404); + message->set_default_sint32 (405); + message->set_default_sint64 (406); + message->set_default_fixed32 (407); + message->set_default_fixed64 (408); + message->set_default_sfixed32(409); + message->set_default_sfixed64(410); + message->set_default_float (411); + message->set_default_double (412); + message->set_default_bool (false); + message->set_default_string ("415"); + message->set_default_bytes ("416"); + + message->set_default_nested_enum (unittest::TestAllTypesLite::FOO ); + message->set_default_foreign_enum(unittest::FOREIGN_LITE_FOO ); + message->set_default_import_enum (unittest_import::IMPORT_LITE_FOO); + +} + +// ------------------------------------------------------------------- + +void TestUtilLite::ModifyRepeatedFields(unittest::TestAllTypesLite* message) { + message->set_repeated_int32 (1, 501); + message->set_repeated_int64 (1, 502); + message->set_repeated_uint32 (1, 503); + message->set_repeated_uint64 (1, 504); + message->set_repeated_sint32 (1, 505); + message->set_repeated_sint64 (1, 506); + message->set_repeated_fixed32 (1, 507); + message->set_repeated_fixed64 (1, 508); + message->set_repeated_sfixed32(1, 509); + message->set_repeated_sfixed64(1, 510); + message->set_repeated_float (1, 511); + message->set_repeated_double (1, 512); + message->set_repeated_bool (1, true); + message->set_repeated_string (1, "515"); + message->set_repeated_bytes (1, "516"); + + message->mutable_repeatedgroup (1)->set_a(517); + message->mutable_repeated_nested_message (1)->set_bb(518); + message->mutable_repeated_foreign_message(1)->set_c(519); + message->mutable_repeated_import_message (1)->set_d(520); + + message->set_repeated_nested_enum (1, unittest::TestAllTypesLite::FOO ); + message->set_repeated_foreign_enum(1, unittest::FOREIGN_LITE_FOO ); + message->set_repeated_import_enum (1, unittest_import::IMPORT_LITE_FOO); + +} + +// ------------------------------------------------------------------- + +void TestUtilLite::ExpectAllFieldsSet( + const unittest::TestAllTypesLite& message) { + EXPECT_TRUE(message.has_optional_int32 ()); + EXPECT_TRUE(message.has_optional_int64 ()); + EXPECT_TRUE(message.has_optional_uint32 ()); + EXPECT_TRUE(message.has_optional_uint64 ()); + EXPECT_TRUE(message.has_optional_sint32 ()); + EXPECT_TRUE(message.has_optional_sint64 ()); + EXPECT_TRUE(message.has_optional_fixed32 ()); + EXPECT_TRUE(message.has_optional_fixed64 ()); + EXPECT_TRUE(message.has_optional_sfixed32()); + EXPECT_TRUE(message.has_optional_sfixed64()); + EXPECT_TRUE(message.has_optional_float ()); + EXPECT_TRUE(message.has_optional_double ()); + EXPECT_TRUE(message.has_optional_bool ()); + EXPECT_TRUE(message.has_optional_string ()); + EXPECT_TRUE(message.has_optional_bytes ()); + + EXPECT_TRUE(message.has_optionalgroup ()); + EXPECT_TRUE(message.has_optional_nested_message ()); + EXPECT_TRUE(message.has_optional_foreign_message()); + EXPECT_TRUE(message.has_optional_import_message ()); + + EXPECT_TRUE(message.optionalgroup ().has_a()); + EXPECT_TRUE(message.optional_nested_message ().has_bb()); + EXPECT_TRUE(message.optional_foreign_message().has_c()); + EXPECT_TRUE(message.optional_import_message ().has_d()); + + EXPECT_TRUE(message.has_optional_nested_enum ()); + EXPECT_TRUE(message.has_optional_foreign_enum()); + EXPECT_TRUE(message.has_optional_import_enum ()); + + + EXPECT_EQ(101 , message.optional_int32 ()); + EXPECT_EQ(102 , message.optional_int64 ()); + EXPECT_EQ(103 , message.optional_uint32 ()); + EXPECT_EQ(104 , message.optional_uint64 ()); + EXPECT_EQ(105 , message.optional_sint32 ()); + EXPECT_EQ(106 , message.optional_sint64 ()); + EXPECT_EQ(107 , message.optional_fixed32 ()); + EXPECT_EQ(108 , message.optional_fixed64 ()); + EXPECT_EQ(109 , message.optional_sfixed32()); + EXPECT_EQ(110 , message.optional_sfixed64()); + EXPECT_EQ(111 , message.optional_float ()); + EXPECT_EQ(112 , message.optional_double ()); + EXPECT_EQ(true , message.optional_bool ()); + EXPECT_EQ("115", message.optional_string ()); + EXPECT_EQ("116", message.optional_bytes ()); + + EXPECT_EQ(117, message.optionalgroup ().a()); + EXPECT_EQ(118, message.optional_nested_message ().bb()); + EXPECT_EQ(119, message.optional_foreign_message().c()); + EXPECT_EQ(120, message.optional_import_message ().d()); + + EXPECT_EQ(unittest::TestAllTypesLite::BAZ , message.optional_nested_enum ()); + EXPECT_EQ(unittest::FOREIGN_LITE_BAZ , message.optional_foreign_enum()); + EXPECT_EQ(unittest_import::IMPORT_LITE_BAZ, message.optional_import_enum ()); + + + // ----------------------------------------------------------------- + + ASSERT_EQ(2, message.repeated_int32_size ()); + ASSERT_EQ(2, message.repeated_int64_size ()); + ASSERT_EQ(2, message.repeated_uint32_size ()); + ASSERT_EQ(2, message.repeated_uint64_size ()); + ASSERT_EQ(2, message.repeated_sint32_size ()); + ASSERT_EQ(2, message.repeated_sint64_size ()); + ASSERT_EQ(2, message.repeated_fixed32_size ()); + ASSERT_EQ(2, message.repeated_fixed64_size ()); + ASSERT_EQ(2, message.repeated_sfixed32_size()); + ASSERT_EQ(2, message.repeated_sfixed64_size()); + ASSERT_EQ(2, message.repeated_float_size ()); + ASSERT_EQ(2, message.repeated_double_size ()); + ASSERT_EQ(2, message.repeated_bool_size ()); + ASSERT_EQ(2, message.repeated_string_size ()); + ASSERT_EQ(2, message.repeated_bytes_size ()); + + ASSERT_EQ(2, message.repeatedgroup_size ()); + ASSERT_EQ(2, message.repeated_nested_message_size ()); + ASSERT_EQ(2, message.repeated_foreign_message_size()); + ASSERT_EQ(2, message.repeated_import_message_size ()); + ASSERT_EQ(2, message.repeated_nested_enum_size ()); + ASSERT_EQ(2, message.repeated_foreign_enum_size ()); + ASSERT_EQ(2, message.repeated_import_enum_size ()); + + + EXPECT_EQ(201 , message.repeated_int32 (0)); + EXPECT_EQ(202 , message.repeated_int64 (0)); + EXPECT_EQ(203 , message.repeated_uint32 (0)); + EXPECT_EQ(204 , message.repeated_uint64 (0)); + EXPECT_EQ(205 , message.repeated_sint32 (0)); + EXPECT_EQ(206 , message.repeated_sint64 (0)); + EXPECT_EQ(207 , message.repeated_fixed32 (0)); + EXPECT_EQ(208 , message.repeated_fixed64 (0)); + EXPECT_EQ(209 , message.repeated_sfixed32(0)); + EXPECT_EQ(210 , message.repeated_sfixed64(0)); + EXPECT_EQ(211 , message.repeated_float (0)); + EXPECT_EQ(212 , message.repeated_double (0)); + EXPECT_EQ(true , message.repeated_bool (0)); + EXPECT_EQ("215", message.repeated_string (0)); + EXPECT_EQ("216", message.repeated_bytes (0)); + + EXPECT_EQ(217, message.repeatedgroup (0).a()); + EXPECT_EQ(218, message.repeated_nested_message (0).bb()); + EXPECT_EQ(219, message.repeated_foreign_message(0).c()); + EXPECT_EQ(220, message.repeated_import_message (0).d()); + + + EXPECT_EQ(unittest::TestAllTypesLite::BAR , message.repeated_nested_enum (0)); + EXPECT_EQ(unittest::FOREIGN_LITE_BAR , message.repeated_foreign_enum(0)); + EXPECT_EQ(unittest_import::IMPORT_LITE_BAR, message.repeated_import_enum (0)); + + EXPECT_EQ(301 , message.repeated_int32 (1)); + EXPECT_EQ(302 , message.repeated_int64 (1)); + EXPECT_EQ(303 , message.repeated_uint32 (1)); + EXPECT_EQ(304 , message.repeated_uint64 (1)); + EXPECT_EQ(305 , message.repeated_sint32 (1)); + EXPECT_EQ(306 , message.repeated_sint64 (1)); + EXPECT_EQ(307 , message.repeated_fixed32 (1)); + EXPECT_EQ(308 , message.repeated_fixed64 (1)); + EXPECT_EQ(309 , message.repeated_sfixed32(1)); + EXPECT_EQ(310 , message.repeated_sfixed64(1)); + EXPECT_EQ(311 , message.repeated_float (1)); + EXPECT_EQ(312 , message.repeated_double (1)); + EXPECT_EQ(false, message.repeated_bool (1)); + EXPECT_EQ("315", message.repeated_string (1)); + EXPECT_EQ("316", message.repeated_bytes (1)); + + EXPECT_EQ(317, message.repeatedgroup (1).a()); + EXPECT_EQ(318, message.repeated_nested_message (1).bb()); + EXPECT_EQ(319, message.repeated_foreign_message(1).c()); + EXPECT_EQ(320, message.repeated_import_message (1).d()); + + EXPECT_EQ(unittest::TestAllTypesLite::BAZ , message.repeated_nested_enum (1)); + EXPECT_EQ(unittest::FOREIGN_LITE_BAZ , message.repeated_foreign_enum(1)); + EXPECT_EQ(unittest_import::IMPORT_LITE_BAZ, message.repeated_import_enum (1)); + + + // ----------------------------------------------------------------- + + EXPECT_TRUE(message.has_default_int32 ()); + EXPECT_TRUE(message.has_default_int64 ()); + EXPECT_TRUE(message.has_default_uint32 ()); + EXPECT_TRUE(message.has_default_uint64 ()); + EXPECT_TRUE(message.has_default_sint32 ()); + EXPECT_TRUE(message.has_default_sint64 ()); + EXPECT_TRUE(message.has_default_fixed32 ()); + EXPECT_TRUE(message.has_default_fixed64 ()); + EXPECT_TRUE(message.has_default_sfixed32()); + EXPECT_TRUE(message.has_default_sfixed64()); + EXPECT_TRUE(message.has_default_float ()); + EXPECT_TRUE(message.has_default_double ()); + EXPECT_TRUE(message.has_default_bool ()); + EXPECT_TRUE(message.has_default_string ()); + EXPECT_TRUE(message.has_default_bytes ()); + + EXPECT_TRUE(message.has_default_nested_enum ()); + EXPECT_TRUE(message.has_default_foreign_enum()); + EXPECT_TRUE(message.has_default_import_enum ()); + + + EXPECT_EQ(401 , message.default_int32 ()); + EXPECT_EQ(402 , message.default_int64 ()); + EXPECT_EQ(403 , message.default_uint32 ()); + EXPECT_EQ(404 , message.default_uint64 ()); + EXPECT_EQ(405 , message.default_sint32 ()); + EXPECT_EQ(406 , message.default_sint64 ()); + EXPECT_EQ(407 , message.default_fixed32 ()); + EXPECT_EQ(408 , message.default_fixed64 ()); + EXPECT_EQ(409 , message.default_sfixed32()); + EXPECT_EQ(410 , message.default_sfixed64()); + EXPECT_EQ(411 , message.default_float ()); + EXPECT_EQ(412 , message.default_double ()); + EXPECT_EQ(false, message.default_bool ()); + EXPECT_EQ("415", message.default_string ()); + EXPECT_EQ("416", message.default_bytes ()); + + EXPECT_EQ(unittest::TestAllTypesLite::FOO , message.default_nested_enum ()); + EXPECT_EQ(unittest::FOREIGN_LITE_FOO , message.default_foreign_enum()); + EXPECT_EQ(unittest_import::IMPORT_LITE_FOO, message.default_import_enum ()); + +} + +// ------------------------------------------------------------------- + +void TestUtilLite::ExpectClear(const unittest::TestAllTypesLite& message) { + // has_blah() should initially be false for all optional fields. + EXPECT_FALSE(message.has_optional_int32 ()); + EXPECT_FALSE(message.has_optional_int64 ()); + EXPECT_FALSE(message.has_optional_uint32 ()); + EXPECT_FALSE(message.has_optional_uint64 ()); + EXPECT_FALSE(message.has_optional_sint32 ()); + EXPECT_FALSE(message.has_optional_sint64 ()); + EXPECT_FALSE(message.has_optional_fixed32 ()); + EXPECT_FALSE(message.has_optional_fixed64 ()); + EXPECT_FALSE(message.has_optional_sfixed32()); + EXPECT_FALSE(message.has_optional_sfixed64()); + EXPECT_FALSE(message.has_optional_float ()); + EXPECT_FALSE(message.has_optional_double ()); + EXPECT_FALSE(message.has_optional_bool ()); + EXPECT_FALSE(message.has_optional_string ()); + EXPECT_FALSE(message.has_optional_bytes ()); + + EXPECT_FALSE(message.has_optionalgroup ()); + EXPECT_FALSE(message.has_optional_nested_message ()); + EXPECT_FALSE(message.has_optional_foreign_message()); + EXPECT_FALSE(message.has_optional_import_message ()); + + EXPECT_FALSE(message.has_optional_nested_enum ()); + EXPECT_FALSE(message.has_optional_foreign_enum()); + EXPECT_FALSE(message.has_optional_import_enum ()); + + + // Optional fields without defaults are set to zero or something like it. + EXPECT_EQ(0 , message.optional_int32 ()); + EXPECT_EQ(0 , message.optional_int64 ()); + EXPECT_EQ(0 , message.optional_uint32 ()); + EXPECT_EQ(0 , message.optional_uint64 ()); + EXPECT_EQ(0 , message.optional_sint32 ()); + EXPECT_EQ(0 , message.optional_sint64 ()); + EXPECT_EQ(0 , message.optional_fixed32 ()); + EXPECT_EQ(0 , message.optional_fixed64 ()); + EXPECT_EQ(0 , message.optional_sfixed32()); + EXPECT_EQ(0 , message.optional_sfixed64()); + EXPECT_EQ(0 , message.optional_float ()); + EXPECT_EQ(0 , message.optional_double ()); + EXPECT_EQ(false, message.optional_bool ()); + EXPECT_EQ("" , message.optional_string ()); + EXPECT_EQ("" , message.optional_bytes ()); + + // Embedded messages should also be clear. + EXPECT_FALSE(message.optionalgroup ().has_a()); + EXPECT_FALSE(message.optional_nested_message ().has_bb()); + EXPECT_FALSE(message.optional_foreign_message().has_c()); + EXPECT_FALSE(message.optional_import_message ().has_d()); + + EXPECT_EQ(0, message.optionalgroup ().a()); + EXPECT_EQ(0, message.optional_nested_message ().bb()); + EXPECT_EQ(0, message.optional_foreign_message().c()); + EXPECT_EQ(0, message.optional_import_message ().d()); + + // Enums without defaults are set to the first value in the enum. + EXPECT_EQ(unittest::TestAllTypesLite::FOO , message.optional_nested_enum ()); + EXPECT_EQ(unittest::FOREIGN_LITE_FOO , message.optional_foreign_enum()); + EXPECT_EQ(unittest_import::IMPORT_LITE_FOO, message.optional_import_enum ()); + + + // Repeated fields are empty. + EXPECT_EQ(0, message.repeated_int32_size ()); + EXPECT_EQ(0, message.repeated_int64_size ()); + EXPECT_EQ(0, message.repeated_uint32_size ()); + EXPECT_EQ(0, message.repeated_uint64_size ()); + EXPECT_EQ(0, message.repeated_sint32_size ()); + EXPECT_EQ(0, message.repeated_sint64_size ()); + EXPECT_EQ(0, message.repeated_fixed32_size ()); + EXPECT_EQ(0, message.repeated_fixed64_size ()); + EXPECT_EQ(0, message.repeated_sfixed32_size()); + EXPECT_EQ(0, message.repeated_sfixed64_size()); + EXPECT_EQ(0, message.repeated_float_size ()); + EXPECT_EQ(0, message.repeated_double_size ()); + EXPECT_EQ(0, message.repeated_bool_size ()); + EXPECT_EQ(0, message.repeated_string_size ()); + EXPECT_EQ(0, message.repeated_bytes_size ()); + + EXPECT_EQ(0, message.repeatedgroup_size ()); + EXPECT_EQ(0, message.repeated_nested_message_size ()); + EXPECT_EQ(0, message.repeated_foreign_message_size()); + EXPECT_EQ(0, message.repeated_import_message_size ()); + EXPECT_EQ(0, message.repeated_nested_enum_size ()); + EXPECT_EQ(0, message.repeated_foreign_enum_size ()); + EXPECT_EQ(0, message.repeated_import_enum_size ()); + + + // has_blah() should also be false for all default fields. + EXPECT_FALSE(message.has_default_int32 ()); + EXPECT_FALSE(message.has_default_int64 ()); + EXPECT_FALSE(message.has_default_uint32 ()); + EXPECT_FALSE(message.has_default_uint64 ()); + EXPECT_FALSE(message.has_default_sint32 ()); + EXPECT_FALSE(message.has_default_sint64 ()); + EXPECT_FALSE(message.has_default_fixed32 ()); + EXPECT_FALSE(message.has_default_fixed64 ()); + EXPECT_FALSE(message.has_default_sfixed32()); + EXPECT_FALSE(message.has_default_sfixed64()); + EXPECT_FALSE(message.has_default_float ()); + EXPECT_FALSE(message.has_default_double ()); + EXPECT_FALSE(message.has_default_bool ()); + EXPECT_FALSE(message.has_default_string ()); + EXPECT_FALSE(message.has_default_bytes ()); + + EXPECT_FALSE(message.has_default_nested_enum ()); + EXPECT_FALSE(message.has_default_foreign_enum()); + EXPECT_FALSE(message.has_default_import_enum ()); + + + // Fields with defaults have their default values (duh). + EXPECT_EQ( 41 , message.default_int32 ()); + EXPECT_EQ( 42 , message.default_int64 ()); + EXPECT_EQ( 43 , message.default_uint32 ()); + EXPECT_EQ( 44 , message.default_uint64 ()); + EXPECT_EQ(-45 , message.default_sint32 ()); + EXPECT_EQ( 46 , message.default_sint64 ()); + EXPECT_EQ( 47 , message.default_fixed32 ()); + EXPECT_EQ( 48 , message.default_fixed64 ()); + EXPECT_EQ( 49 , message.default_sfixed32()); + EXPECT_EQ(-50 , message.default_sfixed64()); + EXPECT_EQ( 51.5 , message.default_float ()); + EXPECT_EQ( 52e3 , message.default_double ()); + EXPECT_EQ(true , message.default_bool ()); + EXPECT_EQ("hello", message.default_string ()); + EXPECT_EQ("world", message.default_bytes ()); + + EXPECT_EQ(unittest::TestAllTypesLite::BAR , message.default_nested_enum ()); + EXPECT_EQ(unittest::FOREIGN_LITE_BAR , message.default_foreign_enum()); + EXPECT_EQ(unittest_import::IMPORT_LITE_BAR, message.default_import_enum ()); + +} + +// ------------------------------------------------------------------- + +void TestUtilLite::ExpectRepeatedFieldsModified( + const unittest::TestAllTypesLite& message) { + // ModifyRepeatedFields only sets the second repeated element of each + // field. In addition to verifying this, we also verify that the first + // element and size were *not* modified. + ASSERT_EQ(2, message.repeated_int32_size ()); + ASSERT_EQ(2, message.repeated_int64_size ()); + ASSERT_EQ(2, message.repeated_uint32_size ()); + ASSERT_EQ(2, message.repeated_uint64_size ()); + ASSERT_EQ(2, message.repeated_sint32_size ()); + ASSERT_EQ(2, message.repeated_sint64_size ()); + ASSERT_EQ(2, message.repeated_fixed32_size ()); + ASSERT_EQ(2, message.repeated_fixed64_size ()); + ASSERT_EQ(2, message.repeated_sfixed32_size()); + ASSERT_EQ(2, message.repeated_sfixed64_size()); + ASSERT_EQ(2, message.repeated_float_size ()); + ASSERT_EQ(2, message.repeated_double_size ()); + ASSERT_EQ(2, message.repeated_bool_size ()); + ASSERT_EQ(2, message.repeated_string_size ()); + ASSERT_EQ(2, message.repeated_bytes_size ()); + + ASSERT_EQ(2, message.repeatedgroup_size ()); + ASSERT_EQ(2, message.repeated_nested_message_size ()); + ASSERT_EQ(2, message.repeated_foreign_message_size()); + ASSERT_EQ(2, message.repeated_import_message_size ()); + ASSERT_EQ(2, message.repeated_nested_enum_size ()); + ASSERT_EQ(2, message.repeated_foreign_enum_size ()); + ASSERT_EQ(2, message.repeated_import_enum_size ()); + + + EXPECT_EQ(201 , message.repeated_int32 (0)); + EXPECT_EQ(202 , message.repeated_int64 (0)); + EXPECT_EQ(203 , message.repeated_uint32 (0)); + EXPECT_EQ(204 , message.repeated_uint64 (0)); + EXPECT_EQ(205 , message.repeated_sint32 (0)); + EXPECT_EQ(206 , message.repeated_sint64 (0)); + EXPECT_EQ(207 , message.repeated_fixed32 (0)); + EXPECT_EQ(208 , message.repeated_fixed64 (0)); + EXPECT_EQ(209 , message.repeated_sfixed32(0)); + EXPECT_EQ(210 , message.repeated_sfixed64(0)); + EXPECT_EQ(211 , message.repeated_float (0)); + EXPECT_EQ(212 , message.repeated_double (0)); + EXPECT_EQ(true , message.repeated_bool (0)); + EXPECT_EQ("215", message.repeated_string (0)); + EXPECT_EQ("216", message.repeated_bytes (0)); + + EXPECT_EQ(217, message.repeatedgroup (0).a()); + EXPECT_EQ(218, message.repeated_nested_message (0).bb()); + EXPECT_EQ(219, message.repeated_foreign_message(0).c()); + EXPECT_EQ(220, message.repeated_import_message (0).d()); + + EXPECT_EQ(unittest::TestAllTypesLite::BAR , message.repeated_nested_enum (0)); + EXPECT_EQ(unittest::FOREIGN_LITE_BAR , message.repeated_foreign_enum(0)); + EXPECT_EQ(unittest_import::IMPORT_LITE_BAR, message.repeated_import_enum (0)); + + + // Actually verify the second (modified) elements now. + EXPECT_EQ(501 , message.repeated_int32 (1)); + EXPECT_EQ(502 , message.repeated_int64 (1)); + EXPECT_EQ(503 , message.repeated_uint32 (1)); + EXPECT_EQ(504 , message.repeated_uint64 (1)); + EXPECT_EQ(505 , message.repeated_sint32 (1)); + EXPECT_EQ(506 , message.repeated_sint64 (1)); + EXPECT_EQ(507 , message.repeated_fixed32 (1)); + EXPECT_EQ(508 , message.repeated_fixed64 (1)); + EXPECT_EQ(509 , message.repeated_sfixed32(1)); + EXPECT_EQ(510 , message.repeated_sfixed64(1)); + EXPECT_EQ(511 , message.repeated_float (1)); + EXPECT_EQ(512 , message.repeated_double (1)); + EXPECT_EQ(true , message.repeated_bool (1)); + EXPECT_EQ("515", message.repeated_string (1)); + EXPECT_EQ("516", message.repeated_bytes (1)); + + EXPECT_EQ(517, message.repeatedgroup (1).a()); + EXPECT_EQ(518, message.repeated_nested_message (1).bb()); + EXPECT_EQ(519, message.repeated_foreign_message(1).c()); + EXPECT_EQ(520, message.repeated_import_message (1).d()); + + EXPECT_EQ(unittest::TestAllTypesLite::FOO , message.repeated_nested_enum (1)); + EXPECT_EQ(unittest::FOREIGN_LITE_FOO , message.repeated_foreign_enum(1)); + EXPECT_EQ(unittest_import::IMPORT_LITE_FOO, message.repeated_import_enum (1)); + +} + +// ------------------------------------------------------------------- + +void TestUtilLite::SetPackedFields(unittest::TestPackedTypesLite* message) { + message->add_packed_int32 (601); + message->add_packed_int64 (602); + message->add_packed_uint32 (603); + message->add_packed_uint64 (604); + message->add_packed_sint32 (605); + message->add_packed_sint64 (606); + message->add_packed_fixed32 (607); + message->add_packed_fixed64 (608); + message->add_packed_sfixed32(609); + message->add_packed_sfixed64(610); + message->add_packed_float (611); + message->add_packed_double (612); + message->add_packed_bool (true); + message->add_packed_enum (unittest::FOREIGN_LITE_BAR); + // add a second one of each field + message->add_packed_int32 (701); + message->add_packed_int64 (702); + message->add_packed_uint32 (703); + message->add_packed_uint64 (704); + message->add_packed_sint32 (705); + message->add_packed_sint64 (706); + message->add_packed_fixed32 (707); + message->add_packed_fixed64 (708); + message->add_packed_sfixed32(709); + message->add_packed_sfixed64(710); + message->add_packed_float (711); + message->add_packed_double (712); + message->add_packed_bool (false); + message->add_packed_enum (unittest::FOREIGN_LITE_BAZ); +} + +// ------------------------------------------------------------------- + +void TestUtilLite::ModifyPackedFields(unittest::TestPackedTypesLite* message) { + message->set_packed_int32 (1, 801); + message->set_packed_int64 (1, 802); + message->set_packed_uint32 (1, 803); + message->set_packed_uint64 (1, 804); + message->set_packed_sint32 (1, 805); + message->set_packed_sint64 (1, 806); + message->set_packed_fixed32 (1, 807); + message->set_packed_fixed64 (1, 808); + message->set_packed_sfixed32(1, 809); + message->set_packed_sfixed64(1, 810); + message->set_packed_float (1, 811); + message->set_packed_double (1, 812); + message->set_packed_bool (1, true); + message->set_packed_enum (1, unittest::FOREIGN_LITE_FOO); +} + +// ------------------------------------------------------------------- + +void TestUtilLite::ExpectPackedFieldsSet( + const unittest::TestPackedTypesLite& message) { + ASSERT_EQ(2, message.packed_int32_size ()); + ASSERT_EQ(2, message.packed_int64_size ()); + ASSERT_EQ(2, message.packed_uint32_size ()); + ASSERT_EQ(2, message.packed_uint64_size ()); + ASSERT_EQ(2, message.packed_sint32_size ()); + ASSERT_EQ(2, message.packed_sint64_size ()); + ASSERT_EQ(2, message.packed_fixed32_size ()); + ASSERT_EQ(2, message.packed_fixed64_size ()); + ASSERT_EQ(2, message.packed_sfixed32_size()); + ASSERT_EQ(2, message.packed_sfixed64_size()); + ASSERT_EQ(2, message.packed_float_size ()); + ASSERT_EQ(2, message.packed_double_size ()); + ASSERT_EQ(2, message.packed_bool_size ()); + ASSERT_EQ(2, message.packed_enum_size ()); + + EXPECT_EQ(601 , message.packed_int32 (0)); + EXPECT_EQ(602 , message.packed_int64 (0)); + EXPECT_EQ(603 , message.packed_uint32 (0)); + EXPECT_EQ(604 , message.packed_uint64 (0)); + EXPECT_EQ(605 , message.packed_sint32 (0)); + EXPECT_EQ(606 , message.packed_sint64 (0)); + EXPECT_EQ(607 , message.packed_fixed32 (0)); + EXPECT_EQ(608 , message.packed_fixed64 (0)); + EXPECT_EQ(609 , message.packed_sfixed32(0)); + EXPECT_EQ(610 , message.packed_sfixed64(0)); + EXPECT_EQ(611 , message.packed_float (0)); + EXPECT_EQ(612 , message.packed_double (0)); + EXPECT_EQ(true , message.packed_bool (0)); + EXPECT_EQ(unittest::FOREIGN_LITE_BAR, message.packed_enum(0)); + + EXPECT_EQ(701 , message.packed_int32 (1)); + EXPECT_EQ(702 , message.packed_int64 (1)); + EXPECT_EQ(703 , message.packed_uint32 (1)); + EXPECT_EQ(704 , message.packed_uint64 (1)); + EXPECT_EQ(705 , message.packed_sint32 (1)); + EXPECT_EQ(706 , message.packed_sint64 (1)); + EXPECT_EQ(707 , message.packed_fixed32 (1)); + EXPECT_EQ(708 , message.packed_fixed64 (1)); + EXPECT_EQ(709 , message.packed_sfixed32(1)); + EXPECT_EQ(710 , message.packed_sfixed64(1)); + EXPECT_EQ(711 , message.packed_float (1)); + EXPECT_EQ(712 , message.packed_double (1)); + EXPECT_EQ(false, message.packed_bool (1)); + EXPECT_EQ(unittest::FOREIGN_LITE_BAZ, message.packed_enum(1)); +} + +// ------------------------------------------------------------------- + +void TestUtilLite::ExpectPackedClear( + const unittest::TestPackedTypesLite& message) { + // Packed repeated fields are empty. + EXPECT_EQ(0, message.packed_int32_size ()); + EXPECT_EQ(0, message.packed_int64_size ()); + EXPECT_EQ(0, message.packed_uint32_size ()); + EXPECT_EQ(0, message.packed_uint64_size ()); + EXPECT_EQ(0, message.packed_sint32_size ()); + EXPECT_EQ(0, message.packed_sint64_size ()); + EXPECT_EQ(0, message.packed_fixed32_size ()); + EXPECT_EQ(0, message.packed_fixed64_size ()); + EXPECT_EQ(0, message.packed_sfixed32_size()); + EXPECT_EQ(0, message.packed_sfixed64_size()); + EXPECT_EQ(0, message.packed_float_size ()); + EXPECT_EQ(0, message.packed_double_size ()); + EXPECT_EQ(0, message.packed_bool_size ()); + EXPECT_EQ(0, message.packed_enum_size ()); +} + +// ------------------------------------------------------------------- + +void TestUtilLite::ExpectPackedFieldsModified( + const unittest::TestPackedTypesLite& message) { + // Do the same for packed repeated fields. + ASSERT_EQ(2, message.packed_int32_size ()); + ASSERT_EQ(2, message.packed_int64_size ()); + ASSERT_EQ(2, message.packed_uint32_size ()); + ASSERT_EQ(2, message.packed_uint64_size ()); + ASSERT_EQ(2, message.packed_sint32_size ()); + ASSERT_EQ(2, message.packed_sint64_size ()); + ASSERT_EQ(2, message.packed_fixed32_size ()); + ASSERT_EQ(2, message.packed_fixed64_size ()); + ASSERT_EQ(2, message.packed_sfixed32_size()); + ASSERT_EQ(2, message.packed_sfixed64_size()); + ASSERT_EQ(2, message.packed_float_size ()); + ASSERT_EQ(2, message.packed_double_size ()); + ASSERT_EQ(2, message.packed_bool_size ()); + ASSERT_EQ(2, message.packed_enum_size ()); + + EXPECT_EQ(601 , message.packed_int32 (0)); + EXPECT_EQ(602 , message.packed_int64 (0)); + EXPECT_EQ(603 , message.packed_uint32 (0)); + EXPECT_EQ(604 , message.packed_uint64 (0)); + EXPECT_EQ(605 , message.packed_sint32 (0)); + EXPECT_EQ(606 , message.packed_sint64 (0)); + EXPECT_EQ(607 , message.packed_fixed32 (0)); + EXPECT_EQ(608 , message.packed_fixed64 (0)); + EXPECT_EQ(609 , message.packed_sfixed32(0)); + EXPECT_EQ(610 , message.packed_sfixed64(0)); + EXPECT_EQ(611 , message.packed_float (0)); + EXPECT_EQ(612 , message.packed_double (0)); + EXPECT_EQ(true , message.packed_bool (0)); + EXPECT_EQ(unittest::FOREIGN_LITE_BAR, message.packed_enum(0)); + // Actually verify the second (modified) elements now. + EXPECT_EQ(801 , message.packed_int32 (1)); + EXPECT_EQ(802 , message.packed_int64 (1)); + EXPECT_EQ(803 , message.packed_uint32 (1)); + EXPECT_EQ(804 , message.packed_uint64 (1)); + EXPECT_EQ(805 , message.packed_sint32 (1)); + EXPECT_EQ(806 , message.packed_sint64 (1)); + EXPECT_EQ(807 , message.packed_fixed32 (1)); + EXPECT_EQ(808 , message.packed_fixed64 (1)); + EXPECT_EQ(809 , message.packed_sfixed32(1)); + EXPECT_EQ(810 , message.packed_sfixed64(1)); + EXPECT_EQ(811 , message.packed_float (1)); + EXPECT_EQ(812 , message.packed_double (1)); + EXPECT_EQ(true , message.packed_bool (1)); + EXPECT_EQ(unittest::FOREIGN_LITE_FOO, message.packed_enum(1)); +} + +// =================================================================== +// Extensions +// +// All this code is exactly equivalent to the above code except that it's +// manipulating extension fields instead of normal ones. +// +// I gave up on the 80-char limit here. Sorry. + +void TestUtilLite::SetAllExtensions(unittest::TestAllExtensionsLite* message) { + message->SetExtension(unittest::optional_int32_extension_lite , 101); + message->SetExtension(unittest::optional_int64_extension_lite , 102); + message->SetExtension(unittest::optional_uint32_extension_lite , 103); + message->SetExtension(unittest::optional_uint64_extension_lite , 104); + message->SetExtension(unittest::optional_sint32_extension_lite , 105); + message->SetExtension(unittest::optional_sint64_extension_lite , 106); + message->SetExtension(unittest::optional_fixed32_extension_lite , 107); + message->SetExtension(unittest::optional_fixed64_extension_lite , 108); + message->SetExtension(unittest::optional_sfixed32_extension_lite, 109); + message->SetExtension(unittest::optional_sfixed64_extension_lite, 110); + message->SetExtension(unittest::optional_float_extension_lite , 111); + message->SetExtension(unittest::optional_double_extension_lite , 112); + message->SetExtension(unittest::optional_bool_extension_lite , true); + message->SetExtension(unittest::optional_string_extension_lite , "115"); + message->SetExtension(unittest::optional_bytes_extension_lite , "116"); + + message->MutableExtension(unittest::optionalgroup_extension_lite )->set_a(117); + message->MutableExtension(unittest::optional_nested_message_extension_lite )->set_bb(118); + message->MutableExtension(unittest::optional_foreign_message_extension_lite)->set_c(119); + message->MutableExtension(unittest::optional_import_message_extension_lite )->set_d(120); + + message->SetExtension(unittest::optional_nested_enum_extension_lite , unittest::TestAllTypesLite::BAZ ); + message->SetExtension(unittest::optional_foreign_enum_extension_lite, unittest::FOREIGN_LITE_BAZ ); + message->SetExtension(unittest::optional_import_enum_extension_lite , unittest_import::IMPORT_LITE_BAZ); + + + // ----------------------------------------------------------------- + + message->AddExtension(unittest::repeated_int32_extension_lite , 201); + message->AddExtension(unittest::repeated_int64_extension_lite , 202); + message->AddExtension(unittest::repeated_uint32_extension_lite , 203); + message->AddExtension(unittest::repeated_uint64_extension_lite , 204); + message->AddExtension(unittest::repeated_sint32_extension_lite , 205); + message->AddExtension(unittest::repeated_sint64_extension_lite , 206); + message->AddExtension(unittest::repeated_fixed32_extension_lite , 207); + message->AddExtension(unittest::repeated_fixed64_extension_lite , 208); + message->AddExtension(unittest::repeated_sfixed32_extension_lite, 209); + message->AddExtension(unittest::repeated_sfixed64_extension_lite, 210); + message->AddExtension(unittest::repeated_float_extension_lite , 211); + message->AddExtension(unittest::repeated_double_extension_lite , 212); + message->AddExtension(unittest::repeated_bool_extension_lite , true); + message->AddExtension(unittest::repeated_string_extension_lite , "215"); + message->AddExtension(unittest::repeated_bytes_extension_lite , "216"); + + message->AddExtension(unittest::repeatedgroup_extension_lite )->set_a(217); + message->AddExtension(unittest::repeated_nested_message_extension_lite )->set_bb(218); + message->AddExtension(unittest::repeated_foreign_message_extension_lite)->set_c(219); + message->AddExtension(unittest::repeated_import_message_extension_lite )->set_d(220); + + message->AddExtension(unittest::repeated_nested_enum_extension_lite , unittest::TestAllTypesLite::BAR ); + message->AddExtension(unittest::repeated_foreign_enum_extension_lite, unittest::FOREIGN_LITE_BAR ); + message->AddExtension(unittest::repeated_import_enum_extension_lite , unittest_import::IMPORT_LITE_BAR); + + + // Add a second one of each field. + message->AddExtension(unittest::repeated_int32_extension_lite , 301); + message->AddExtension(unittest::repeated_int64_extension_lite , 302); + message->AddExtension(unittest::repeated_uint32_extension_lite , 303); + message->AddExtension(unittest::repeated_uint64_extension_lite , 304); + message->AddExtension(unittest::repeated_sint32_extension_lite , 305); + message->AddExtension(unittest::repeated_sint64_extension_lite , 306); + message->AddExtension(unittest::repeated_fixed32_extension_lite , 307); + message->AddExtension(unittest::repeated_fixed64_extension_lite , 308); + message->AddExtension(unittest::repeated_sfixed32_extension_lite, 309); + message->AddExtension(unittest::repeated_sfixed64_extension_lite, 310); + message->AddExtension(unittest::repeated_float_extension_lite , 311); + message->AddExtension(unittest::repeated_double_extension_lite , 312); + message->AddExtension(unittest::repeated_bool_extension_lite , false); + message->AddExtension(unittest::repeated_string_extension_lite , "315"); + message->AddExtension(unittest::repeated_bytes_extension_lite , "316"); + + message->AddExtension(unittest::repeatedgroup_extension_lite )->set_a(317); + message->AddExtension(unittest::repeated_nested_message_extension_lite )->set_bb(318); + message->AddExtension(unittest::repeated_foreign_message_extension_lite)->set_c(319); + message->AddExtension(unittest::repeated_import_message_extension_lite )->set_d(320); + + message->AddExtension(unittest::repeated_nested_enum_extension_lite , unittest::TestAllTypesLite::BAZ ); + message->AddExtension(unittest::repeated_foreign_enum_extension_lite, unittest::FOREIGN_LITE_BAZ ); + message->AddExtension(unittest::repeated_import_enum_extension_lite , unittest_import::IMPORT_LITE_BAZ); + + + // ----------------------------------------------------------------- + + message->SetExtension(unittest::default_int32_extension_lite , 401); + message->SetExtension(unittest::default_int64_extension_lite , 402); + message->SetExtension(unittest::default_uint32_extension_lite , 403); + message->SetExtension(unittest::default_uint64_extension_lite , 404); + message->SetExtension(unittest::default_sint32_extension_lite , 405); + message->SetExtension(unittest::default_sint64_extension_lite , 406); + message->SetExtension(unittest::default_fixed32_extension_lite , 407); + message->SetExtension(unittest::default_fixed64_extension_lite , 408); + message->SetExtension(unittest::default_sfixed32_extension_lite, 409); + message->SetExtension(unittest::default_sfixed64_extension_lite, 410); + message->SetExtension(unittest::default_float_extension_lite , 411); + message->SetExtension(unittest::default_double_extension_lite , 412); + message->SetExtension(unittest::default_bool_extension_lite , false); + message->SetExtension(unittest::default_string_extension_lite , "415"); + message->SetExtension(unittest::default_bytes_extension_lite , "416"); + + message->SetExtension(unittest::default_nested_enum_extension_lite , unittest::TestAllTypesLite::FOO ); + message->SetExtension(unittest::default_foreign_enum_extension_lite, unittest::FOREIGN_LITE_FOO ); + message->SetExtension(unittest::default_import_enum_extension_lite , unittest_import::IMPORT_LITE_FOO); + +} + +// ------------------------------------------------------------------- + +void TestUtilLite::ModifyRepeatedExtensions( + unittest::TestAllExtensionsLite* message) { + message->SetExtension(unittest::repeated_int32_extension_lite , 1, 501); + message->SetExtension(unittest::repeated_int64_extension_lite , 1, 502); + message->SetExtension(unittest::repeated_uint32_extension_lite , 1, 503); + message->SetExtension(unittest::repeated_uint64_extension_lite , 1, 504); + message->SetExtension(unittest::repeated_sint32_extension_lite , 1, 505); + message->SetExtension(unittest::repeated_sint64_extension_lite , 1, 506); + message->SetExtension(unittest::repeated_fixed32_extension_lite , 1, 507); + message->SetExtension(unittest::repeated_fixed64_extension_lite , 1, 508); + message->SetExtension(unittest::repeated_sfixed32_extension_lite, 1, 509); + message->SetExtension(unittest::repeated_sfixed64_extension_lite, 1, 510); + message->SetExtension(unittest::repeated_float_extension_lite , 1, 511); + message->SetExtension(unittest::repeated_double_extension_lite , 1, 512); + message->SetExtension(unittest::repeated_bool_extension_lite , 1, true); + message->SetExtension(unittest::repeated_string_extension_lite , 1, "515"); + message->SetExtension(unittest::repeated_bytes_extension_lite , 1, "516"); + + message->MutableExtension(unittest::repeatedgroup_extension_lite , 1)->set_a(517); + message->MutableExtension(unittest::repeated_nested_message_extension_lite , 1)->set_bb(518); + message->MutableExtension(unittest::repeated_foreign_message_extension_lite, 1)->set_c(519); + message->MutableExtension(unittest::repeated_import_message_extension_lite , 1)->set_d(520); + + message->SetExtension(unittest::repeated_nested_enum_extension_lite , 1, unittest::TestAllTypesLite::FOO ); + message->SetExtension(unittest::repeated_foreign_enum_extension_lite, 1, unittest::FOREIGN_LITE_FOO ); + message->SetExtension(unittest::repeated_import_enum_extension_lite , 1, unittest_import::IMPORT_LITE_FOO); + +} + +// ------------------------------------------------------------------- + +void TestUtilLite::ExpectAllExtensionsSet( + const unittest::TestAllExtensionsLite& message) { + EXPECT_TRUE(message.HasExtension(unittest::optional_int32_extension_lite )); + EXPECT_TRUE(message.HasExtension(unittest::optional_int64_extension_lite )); + EXPECT_TRUE(message.HasExtension(unittest::optional_uint32_extension_lite )); + EXPECT_TRUE(message.HasExtension(unittest::optional_uint64_extension_lite )); + EXPECT_TRUE(message.HasExtension(unittest::optional_sint32_extension_lite )); + EXPECT_TRUE(message.HasExtension(unittest::optional_sint64_extension_lite )); + EXPECT_TRUE(message.HasExtension(unittest::optional_fixed32_extension_lite )); + EXPECT_TRUE(message.HasExtension(unittest::optional_fixed64_extension_lite )); + EXPECT_TRUE(message.HasExtension(unittest::optional_sfixed32_extension_lite)); + EXPECT_TRUE(message.HasExtension(unittest::optional_sfixed64_extension_lite)); + EXPECT_TRUE(message.HasExtension(unittest::optional_float_extension_lite )); + EXPECT_TRUE(message.HasExtension(unittest::optional_double_extension_lite )); + EXPECT_TRUE(message.HasExtension(unittest::optional_bool_extension_lite )); + EXPECT_TRUE(message.HasExtension(unittest::optional_string_extension_lite )); + EXPECT_TRUE(message.HasExtension(unittest::optional_bytes_extension_lite )); + + EXPECT_TRUE(message.HasExtension(unittest::optionalgroup_extension_lite )); + EXPECT_TRUE(message.HasExtension(unittest::optional_nested_message_extension_lite )); + EXPECT_TRUE(message.HasExtension(unittest::optional_foreign_message_extension_lite)); + EXPECT_TRUE(message.HasExtension(unittest::optional_import_message_extension_lite )); + + EXPECT_TRUE(message.GetExtension(unittest::optionalgroup_extension_lite ).has_a()); + EXPECT_TRUE(message.GetExtension(unittest::optional_nested_message_extension_lite ).has_bb()); + EXPECT_TRUE(message.GetExtension(unittest::optional_foreign_message_extension_lite).has_c()); + EXPECT_TRUE(message.GetExtension(unittest::optional_import_message_extension_lite ).has_d()); + + EXPECT_TRUE(message.HasExtension(unittest::optional_nested_enum_extension_lite )); + EXPECT_TRUE(message.HasExtension(unittest::optional_foreign_enum_extension_lite)); + EXPECT_TRUE(message.HasExtension(unittest::optional_import_enum_extension_lite )); + + + EXPECT_EQ(101 , message.GetExtension(unittest::optional_int32_extension_lite )); + EXPECT_EQ(102 , message.GetExtension(unittest::optional_int64_extension_lite )); + EXPECT_EQ(103 , message.GetExtension(unittest::optional_uint32_extension_lite )); + EXPECT_EQ(104 , message.GetExtension(unittest::optional_uint64_extension_lite )); + EXPECT_EQ(105 , message.GetExtension(unittest::optional_sint32_extension_lite )); + EXPECT_EQ(106 , message.GetExtension(unittest::optional_sint64_extension_lite )); + EXPECT_EQ(107 , message.GetExtension(unittest::optional_fixed32_extension_lite )); + EXPECT_EQ(108 , message.GetExtension(unittest::optional_fixed64_extension_lite )); + EXPECT_EQ(109 , message.GetExtension(unittest::optional_sfixed32_extension_lite)); + EXPECT_EQ(110 , message.GetExtension(unittest::optional_sfixed64_extension_lite)); + EXPECT_EQ(111 , message.GetExtension(unittest::optional_float_extension_lite )); + EXPECT_EQ(112 , message.GetExtension(unittest::optional_double_extension_lite )); + EXPECT_EQ(true , message.GetExtension(unittest::optional_bool_extension_lite )); + EXPECT_EQ("115", message.GetExtension(unittest::optional_string_extension_lite )); + EXPECT_EQ("116", message.GetExtension(unittest::optional_bytes_extension_lite )); + + EXPECT_EQ(117, message.GetExtension(unittest::optionalgroup_extension_lite ).a()); + EXPECT_EQ(118, message.GetExtension(unittest::optional_nested_message_extension_lite ).bb()); + EXPECT_EQ(119, message.GetExtension(unittest::optional_foreign_message_extension_lite).c()); + EXPECT_EQ(120, message.GetExtension(unittest::optional_import_message_extension_lite ).d()); + + EXPECT_EQ(unittest::TestAllTypesLite::BAZ , message.GetExtension(unittest::optional_nested_enum_extension_lite )); + EXPECT_EQ(unittest::FOREIGN_LITE_BAZ , message.GetExtension(unittest::optional_foreign_enum_extension_lite)); + EXPECT_EQ(unittest_import::IMPORT_LITE_BAZ, message.GetExtension(unittest::optional_import_enum_extension_lite )); + + + // ----------------------------------------------------------------- + + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_int32_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_int64_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_uint32_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_uint64_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_sint32_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_sint64_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_fixed32_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_fixed64_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_sfixed32_extension_lite)); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_sfixed64_extension_lite)); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_float_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_double_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_bool_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_string_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_bytes_extension_lite )); + + ASSERT_EQ(2, message.ExtensionSize(unittest::repeatedgroup_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_nested_message_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_foreign_message_extension_lite)); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_import_message_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_nested_enum_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_foreign_enum_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_import_enum_extension_lite )); + + + EXPECT_EQ(201 , message.GetExtension(unittest::repeated_int32_extension_lite , 0)); + EXPECT_EQ(202 , message.GetExtension(unittest::repeated_int64_extension_lite , 0)); + EXPECT_EQ(203 , message.GetExtension(unittest::repeated_uint32_extension_lite , 0)); + EXPECT_EQ(204 , message.GetExtension(unittest::repeated_uint64_extension_lite , 0)); + EXPECT_EQ(205 , message.GetExtension(unittest::repeated_sint32_extension_lite , 0)); + EXPECT_EQ(206 , message.GetExtension(unittest::repeated_sint64_extension_lite , 0)); + EXPECT_EQ(207 , message.GetExtension(unittest::repeated_fixed32_extension_lite , 0)); + EXPECT_EQ(208 , message.GetExtension(unittest::repeated_fixed64_extension_lite , 0)); + EXPECT_EQ(209 , message.GetExtension(unittest::repeated_sfixed32_extension_lite, 0)); + EXPECT_EQ(210 , message.GetExtension(unittest::repeated_sfixed64_extension_lite, 0)); + EXPECT_EQ(211 , message.GetExtension(unittest::repeated_float_extension_lite , 0)); + EXPECT_EQ(212 , message.GetExtension(unittest::repeated_double_extension_lite , 0)); + EXPECT_EQ(true , message.GetExtension(unittest::repeated_bool_extension_lite , 0)); + EXPECT_EQ("215", message.GetExtension(unittest::repeated_string_extension_lite , 0)); + EXPECT_EQ("216", message.GetExtension(unittest::repeated_bytes_extension_lite , 0)); + + EXPECT_EQ(217, message.GetExtension(unittest::repeatedgroup_extension_lite , 0).a()); + EXPECT_EQ(218, message.GetExtension(unittest::repeated_nested_message_extension_lite , 0).bb()); + EXPECT_EQ(219, message.GetExtension(unittest::repeated_foreign_message_extension_lite, 0).c()); + EXPECT_EQ(220, message.GetExtension(unittest::repeated_import_message_extension_lite , 0).d()); + + EXPECT_EQ(unittest::TestAllTypesLite::BAR , message.GetExtension(unittest::repeated_nested_enum_extension_lite , 0)); + EXPECT_EQ(unittest::FOREIGN_LITE_BAR , message.GetExtension(unittest::repeated_foreign_enum_extension_lite, 0)); + EXPECT_EQ(unittest_import::IMPORT_LITE_BAR, message.GetExtension(unittest::repeated_import_enum_extension_lite , 0)); + + + EXPECT_EQ(301 , message.GetExtension(unittest::repeated_int32_extension_lite , 1)); + EXPECT_EQ(302 , message.GetExtension(unittest::repeated_int64_extension_lite , 1)); + EXPECT_EQ(303 , message.GetExtension(unittest::repeated_uint32_extension_lite , 1)); + EXPECT_EQ(304 , message.GetExtension(unittest::repeated_uint64_extension_lite , 1)); + EXPECT_EQ(305 , message.GetExtension(unittest::repeated_sint32_extension_lite , 1)); + EXPECT_EQ(306 , message.GetExtension(unittest::repeated_sint64_extension_lite , 1)); + EXPECT_EQ(307 , message.GetExtension(unittest::repeated_fixed32_extension_lite , 1)); + EXPECT_EQ(308 , message.GetExtension(unittest::repeated_fixed64_extension_lite , 1)); + EXPECT_EQ(309 , message.GetExtension(unittest::repeated_sfixed32_extension_lite, 1)); + EXPECT_EQ(310 , message.GetExtension(unittest::repeated_sfixed64_extension_lite, 1)); + EXPECT_EQ(311 , message.GetExtension(unittest::repeated_float_extension_lite , 1)); + EXPECT_EQ(312 , message.GetExtension(unittest::repeated_double_extension_lite , 1)); + EXPECT_EQ(false, message.GetExtension(unittest::repeated_bool_extension_lite , 1)); + EXPECT_EQ("315", message.GetExtension(unittest::repeated_string_extension_lite , 1)); + EXPECT_EQ("316", message.GetExtension(unittest::repeated_bytes_extension_lite , 1)); + + EXPECT_EQ(317, message.GetExtension(unittest::repeatedgroup_extension_lite , 1).a()); + EXPECT_EQ(318, message.GetExtension(unittest::repeated_nested_message_extension_lite , 1).bb()); + EXPECT_EQ(319, message.GetExtension(unittest::repeated_foreign_message_extension_lite, 1).c()); + EXPECT_EQ(320, message.GetExtension(unittest::repeated_import_message_extension_lite , 1).d()); + + EXPECT_EQ(unittest::TestAllTypesLite::BAZ , message.GetExtension(unittest::repeated_nested_enum_extension_lite , 1)); + EXPECT_EQ(unittest::FOREIGN_LITE_BAZ , message.GetExtension(unittest::repeated_foreign_enum_extension_lite, 1)); + EXPECT_EQ(unittest_import::IMPORT_LITE_BAZ, message.GetExtension(unittest::repeated_import_enum_extension_lite , 1)); + + + // ----------------------------------------------------------------- + + EXPECT_TRUE(message.HasExtension(unittest::default_int32_extension_lite )); + EXPECT_TRUE(message.HasExtension(unittest::default_int64_extension_lite )); + EXPECT_TRUE(message.HasExtension(unittest::default_uint32_extension_lite )); + EXPECT_TRUE(message.HasExtension(unittest::default_uint64_extension_lite )); + EXPECT_TRUE(message.HasExtension(unittest::default_sint32_extension_lite )); + EXPECT_TRUE(message.HasExtension(unittest::default_sint64_extension_lite )); + EXPECT_TRUE(message.HasExtension(unittest::default_fixed32_extension_lite )); + EXPECT_TRUE(message.HasExtension(unittest::default_fixed64_extension_lite )); + EXPECT_TRUE(message.HasExtension(unittest::default_sfixed32_extension_lite)); + EXPECT_TRUE(message.HasExtension(unittest::default_sfixed64_extension_lite)); + EXPECT_TRUE(message.HasExtension(unittest::default_float_extension_lite )); + EXPECT_TRUE(message.HasExtension(unittest::default_double_extension_lite )); + EXPECT_TRUE(message.HasExtension(unittest::default_bool_extension_lite )); + EXPECT_TRUE(message.HasExtension(unittest::default_string_extension_lite )); + EXPECT_TRUE(message.HasExtension(unittest::default_bytes_extension_lite )); + + EXPECT_TRUE(message.HasExtension(unittest::default_nested_enum_extension_lite )); + EXPECT_TRUE(message.HasExtension(unittest::default_foreign_enum_extension_lite)); + EXPECT_TRUE(message.HasExtension(unittest::default_import_enum_extension_lite )); + + + EXPECT_EQ(401 , message.GetExtension(unittest::default_int32_extension_lite )); + EXPECT_EQ(402 , message.GetExtension(unittest::default_int64_extension_lite )); + EXPECT_EQ(403 , message.GetExtension(unittest::default_uint32_extension_lite )); + EXPECT_EQ(404 , message.GetExtension(unittest::default_uint64_extension_lite )); + EXPECT_EQ(405 , message.GetExtension(unittest::default_sint32_extension_lite )); + EXPECT_EQ(406 , message.GetExtension(unittest::default_sint64_extension_lite )); + EXPECT_EQ(407 , message.GetExtension(unittest::default_fixed32_extension_lite )); + EXPECT_EQ(408 , message.GetExtension(unittest::default_fixed64_extension_lite )); + EXPECT_EQ(409 , message.GetExtension(unittest::default_sfixed32_extension_lite)); + EXPECT_EQ(410 , message.GetExtension(unittest::default_sfixed64_extension_lite)); + EXPECT_EQ(411 , message.GetExtension(unittest::default_float_extension_lite )); + EXPECT_EQ(412 , message.GetExtension(unittest::default_double_extension_lite )); + EXPECT_EQ(false, message.GetExtension(unittest::default_bool_extension_lite )); + EXPECT_EQ("415", message.GetExtension(unittest::default_string_extension_lite )); + EXPECT_EQ("416", message.GetExtension(unittest::default_bytes_extension_lite )); + + EXPECT_EQ(unittest::TestAllTypesLite::FOO , message.GetExtension(unittest::default_nested_enum_extension_lite )); + EXPECT_EQ(unittest::FOREIGN_LITE_FOO , message.GetExtension(unittest::default_foreign_enum_extension_lite)); + EXPECT_EQ(unittest_import::IMPORT_LITE_FOO, message.GetExtension(unittest::default_import_enum_extension_lite )); + +} + +// ------------------------------------------------------------------- + +void TestUtilLite::ExpectExtensionsClear( + const unittest::TestAllExtensionsLite& message) { + string serialized; + ASSERT_TRUE(message.SerializeToString(&serialized)); + EXPECT_EQ("", serialized); + EXPECT_EQ(0, message.ByteSize()); + + // has_blah() should initially be false for all optional fields. + EXPECT_FALSE(message.HasExtension(unittest::optional_int32_extension_lite )); + EXPECT_FALSE(message.HasExtension(unittest::optional_int64_extension_lite )); + EXPECT_FALSE(message.HasExtension(unittest::optional_uint32_extension_lite )); + EXPECT_FALSE(message.HasExtension(unittest::optional_uint64_extension_lite )); + EXPECT_FALSE(message.HasExtension(unittest::optional_sint32_extension_lite )); + EXPECT_FALSE(message.HasExtension(unittest::optional_sint64_extension_lite )); + EXPECT_FALSE(message.HasExtension(unittest::optional_fixed32_extension_lite )); + EXPECT_FALSE(message.HasExtension(unittest::optional_fixed64_extension_lite )); + EXPECT_FALSE(message.HasExtension(unittest::optional_sfixed32_extension_lite)); + EXPECT_FALSE(message.HasExtension(unittest::optional_sfixed64_extension_lite)); + EXPECT_FALSE(message.HasExtension(unittest::optional_float_extension_lite )); + EXPECT_FALSE(message.HasExtension(unittest::optional_double_extension_lite )); + EXPECT_FALSE(message.HasExtension(unittest::optional_bool_extension_lite )); + EXPECT_FALSE(message.HasExtension(unittest::optional_string_extension_lite )); + EXPECT_FALSE(message.HasExtension(unittest::optional_bytes_extension_lite )); + + EXPECT_FALSE(message.HasExtension(unittest::optionalgroup_extension_lite )); + EXPECT_FALSE(message.HasExtension(unittest::optional_nested_message_extension_lite )); + EXPECT_FALSE(message.HasExtension(unittest::optional_foreign_message_extension_lite)); + EXPECT_FALSE(message.HasExtension(unittest::optional_import_message_extension_lite )); + + EXPECT_FALSE(message.HasExtension(unittest::optional_nested_enum_extension_lite )); + EXPECT_FALSE(message.HasExtension(unittest::optional_foreign_enum_extension_lite)); + EXPECT_FALSE(message.HasExtension(unittest::optional_import_enum_extension_lite )); + + + // Optional fields without defaults are set to zero or something like it. + EXPECT_EQ(0 , message.GetExtension(unittest::optional_int32_extension_lite )); + EXPECT_EQ(0 , message.GetExtension(unittest::optional_int64_extension_lite )); + EXPECT_EQ(0 , message.GetExtension(unittest::optional_uint32_extension_lite )); + EXPECT_EQ(0 , message.GetExtension(unittest::optional_uint64_extension_lite )); + EXPECT_EQ(0 , message.GetExtension(unittest::optional_sint32_extension_lite )); + EXPECT_EQ(0 , message.GetExtension(unittest::optional_sint64_extension_lite )); + EXPECT_EQ(0 , message.GetExtension(unittest::optional_fixed32_extension_lite )); + EXPECT_EQ(0 , message.GetExtension(unittest::optional_fixed64_extension_lite )); + EXPECT_EQ(0 , message.GetExtension(unittest::optional_sfixed32_extension_lite)); + EXPECT_EQ(0 , message.GetExtension(unittest::optional_sfixed64_extension_lite)); + EXPECT_EQ(0 , message.GetExtension(unittest::optional_float_extension_lite )); + EXPECT_EQ(0 , message.GetExtension(unittest::optional_double_extension_lite )); + EXPECT_EQ(false, message.GetExtension(unittest::optional_bool_extension_lite )); + EXPECT_EQ("" , message.GetExtension(unittest::optional_string_extension_lite )); + EXPECT_EQ("" , message.GetExtension(unittest::optional_bytes_extension_lite )); + + // Embedded messages should also be clear. + EXPECT_FALSE(message.GetExtension(unittest::optionalgroup_extension_lite ).has_a()); + EXPECT_FALSE(message.GetExtension(unittest::optional_nested_message_extension_lite ).has_bb()); + EXPECT_FALSE(message.GetExtension(unittest::optional_foreign_message_extension_lite).has_c()); + EXPECT_FALSE(message.GetExtension(unittest::optional_import_message_extension_lite ).has_d()); + + EXPECT_EQ(0, message.GetExtension(unittest::optionalgroup_extension_lite ).a()); + EXPECT_EQ(0, message.GetExtension(unittest::optional_nested_message_extension_lite ).bb()); + EXPECT_EQ(0, message.GetExtension(unittest::optional_foreign_message_extension_lite).c()); + EXPECT_EQ(0, message.GetExtension(unittest::optional_import_message_extension_lite ).d()); + + // Enums without defaults are set to the first value in the enum. + EXPECT_EQ(unittest::TestAllTypesLite::FOO , message.GetExtension(unittest::optional_nested_enum_extension_lite )); + EXPECT_EQ(unittest::FOREIGN_LITE_FOO , message.GetExtension(unittest::optional_foreign_enum_extension_lite)); + EXPECT_EQ(unittest_import::IMPORT_LITE_FOO, message.GetExtension(unittest::optional_import_enum_extension_lite )); + + + // Repeated fields are empty. + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_int32_extension_lite )); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_int64_extension_lite )); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_uint32_extension_lite )); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_uint64_extension_lite )); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_sint32_extension_lite )); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_sint64_extension_lite )); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_fixed32_extension_lite )); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_fixed64_extension_lite )); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_sfixed32_extension_lite)); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_sfixed64_extension_lite)); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_float_extension_lite )); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_double_extension_lite )); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_bool_extension_lite )); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_string_extension_lite )); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_bytes_extension_lite )); + + EXPECT_EQ(0, message.ExtensionSize(unittest::repeatedgroup_extension_lite )); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_nested_message_extension_lite )); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_foreign_message_extension_lite)); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_import_message_extension_lite )); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_nested_enum_extension_lite )); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_foreign_enum_extension_lite )); + EXPECT_EQ(0, message.ExtensionSize(unittest::repeated_import_enum_extension_lite )); + + + // has_blah() should also be false for all default fields. + EXPECT_FALSE(message.HasExtension(unittest::default_int32_extension_lite )); + EXPECT_FALSE(message.HasExtension(unittest::default_int64_extension_lite )); + EXPECT_FALSE(message.HasExtension(unittest::default_uint32_extension_lite )); + EXPECT_FALSE(message.HasExtension(unittest::default_uint64_extension_lite )); + EXPECT_FALSE(message.HasExtension(unittest::default_sint32_extension_lite )); + EXPECT_FALSE(message.HasExtension(unittest::default_sint64_extension_lite )); + EXPECT_FALSE(message.HasExtension(unittest::default_fixed32_extension_lite )); + EXPECT_FALSE(message.HasExtension(unittest::default_fixed64_extension_lite )); + EXPECT_FALSE(message.HasExtension(unittest::default_sfixed32_extension_lite)); + EXPECT_FALSE(message.HasExtension(unittest::default_sfixed64_extension_lite)); + EXPECT_FALSE(message.HasExtension(unittest::default_float_extension_lite )); + EXPECT_FALSE(message.HasExtension(unittest::default_double_extension_lite )); + EXPECT_FALSE(message.HasExtension(unittest::default_bool_extension_lite )); + EXPECT_FALSE(message.HasExtension(unittest::default_string_extension_lite )); + EXPECT_FALSE(message.HasExtension(unittest::default_bytes_extension_lite )); + + EXPECT_FALSE(message.HasExtension(unittest::default_nested_enum_extension_lite )); + EXPECT_FALSE(message.HasExtension(unittest::default_foreign_enum_extension_lite)); + EXPECT_FALSE(message.HasExtension(unittest::default_import_enum_extension_lite )); + + + // Fields with defaults have their default values (duh). + EXPECT_EQ( 41 , message.GetExtension(unittest::default_int32_extension_lite )); + EXPECT_EQ( 42 , message.GetExtension(unittest::default_int64_extension_lite )); + EXPECT_EQ( 43 , message.GetExtension(unittest::default_uint32_extension_lite )); + EXPECT_EQ( 44 , message.GetExtension(unittest::default_uint64_extension_lite )); + EXPECT_EQ(-45 , message.GetExtension(unittest::default_sint32_extension_lite )); + EXPECT_EQ( 46 , message.GetExtension(unittest::default_sint64_extension_lite )); + EXPECT_EQ( 47 , message.GetExtension(unittest::default_fixed32_extension_lite )); + EXPECT_EQ( 48 , message.GetExtension(unittest::default_fixed64_extension_lite )); + EXPECT_EQ( 49 , message.GetExtension(unittest::default_sfixed32_extension_lite)); + EXPECT_EQ(-50 , message.GetExtension(unittest::default_sfixed64_extension_lite)); + EXPECT_EQ( 51.5 , message.GetExtension(unittest::default_float_extension_lite )); + EXPECT_EQ( 52e3 , message.GetExtension(unittest::default_double_extension_lite )); + EXPECT_EQ(true , message.GetExtension(unittest::default_bool_extension_lite )); + EXPECT_EQ("hello", message.GetExtension(unittest::default_string_extension_lite )); + EXPECT_EQ("world", message.GetExtension(unittest::default_bytes_extension_lite )); + + EXPECT_EQ(unittest::TestAllTypesLite::BAR , message.GetExtension(unittest::default_nested_enum_extension_lite )); + EXPECT_EQ(unittest::FOREIGN_LITE_BAR , message.GetExtension(unittest::default_foreign_enum_extension_lite)); + EXPECT_EQ(unittest_import::IMPORT_LITE_BAR, message.GetExtension(unittest::default_import_enum_extension_lite )); + +} + +// ------------------------------------------------------------------- + +void TestUtilLite::ExpectRepeatedExtensionsModified( + const unittest::TestAllExtensionsLite& message) { + // ModifyRepeatedFields only sets the second repeated element of each + // field. In addition to verifying this, we also verify that the first + // element and size were *not* modified. + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_int32_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_int64_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_uint32_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_uint64_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_sint32_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_sint64_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_fixed32_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_fixed64_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_sfixed32_extension_lite)); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_sfixed64_extension_lite)); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_float_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_double_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_bool_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_string_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_bytes_extension_lite )); + + ASSERT_EQ(2, message.ExtensionSize(unittest::repeatedgroup_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_nested_message_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_foreign_message_extension_lite)); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_import_message_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_nested_enum_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_foreign_enum_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::repeated_import_enum_extension_lite )); + + + EXPECT_EQ(201 , message.GetExtension(unittest::repeated_int32_extension_lite , 0)); + EXPECT_EQ(202 , message.GetExtension(unittest::repeated_int64_extension_lite , 0)); + EXPECT_EQ(203 , message.GetExtension(unittest::repeated_uint32_extension_lite , 0)); + EXPECT_EQ(204 , message.GetExtension(unittest::repeated_uint64_extension_lite , 0)); + EXPECT_EQ(205 , message.GetExtension(unittest::repeated_sint32_extension_lite , 0)); + EXPECT_EQ(206 , message.GetExtension(unittest::repeated_sint64_extension_lite , 0)); + EXPECT_EQ(207 , message.GetExtension(unittest::repeated_fixed32_extension_lite , 0)); + EXPECT_EQ(208 , message.GetExtension(unittest::repeated_fixed64_extension_lite , 0)); + EXPECT_EQ(209 , message.GetExtension(unittest::repeated_sfixed32_extension_lite, 0)); + EXPECT_EQ(210 , message.GetExtension(unittest::repeated_sfixed64_extension_lite, 0)); + EXPECT_EQ(211 , message.GetExtension(unittest::repeated_float_extension_lite , 0)); + EXPECT_EQ(212 , message.GetExtension(unittest::repeated_double_extension_lite , 0)); + EXPECT_EQ(true , message.GetExtension(unittest::repeated_bool_extension_lite , 0)); + EXPECT_EQ("215", message.GetExtension(unittest::repeated_string_extension_lite , 0)); + EXPECT_EQ("216", message.GetExtension(unittest::repeated_bytes_extension_lite , 0)); + + EXPECT_EQ(217, message.GetExtension(unittest::repeatedgroup_extension_lite , 0).a()); + EXPECT_EQ(218, message.GetExtension(unittest::repeated_nested_message_extension_lite , 0).bb()); + EXPECT_EQ(219, message.GetExtension(unittest::repeated_foreign_message_extension_lite, 0).c()); + EXPECT_EQ(220, message.GetExtension(unittest::repeated_import_message_extension_lite , 0).d()); + + EXPECT_EQ(unittest::TestAllTypesLite::BAR , message.GetExtension(unittest::repeated_nested_enum_extension_lite , 0)); + EXPECT_EQ(unittest::FOREIGN_LITE_BAR , message.GetExtension(unittest::repeated_foreign_enum_extension_lite, 0)); + EXPECT_EQ(unittest_import::IMPORT_LITE_BAR, message.GetExtension(unittest::repeated_import_enum_extension_lite , 0)); + + + // Actually verify the second (modified) elements now. + EXPECT_EQ(501 , message.GetExtension(unittest::repeated_int32_extension_lite , 1)); + EXPECT_EQ(502 , message.GetExtension(unittest::repeated_int64_extension_lite , 1)); + EXPECT_EQ(503 , message.GetExtension(unittest::repeated_uint32_extension_lite , 1)); + EXPECT_EQ(504 , message.GetExtension(unittest::repeated_uint64_extension_lite , 1)); + EXPECT_EQ(505 , message.GetExtension(unittest::repeated_sint32_extension_lite , 1)); + EXPECT_EQ(506 , message.GetExtension(unittest::repeated_sint64_extension_lite , 1)); + EXPECT_EQ(507 , message.GetExtension(unittest::repeated_fixed32_extension_lite , 1)); + EXPECT_EQ(508 , message.GetExtension(unittest::repeated_fixed64_extension_lite , 1)); + EXPECT_EQ(509 , message.GetExtension(unittest::repeated_sfixed32_extension_lite, 1)); + EXPECT_EQ(510 , message.GetExtension(unittest::repeated_sfixed64_extension_lite, 1)); + EXPECT_EQ(511 , message.GetExtension(unittest::repeated_float_extension_lite , 1)); + EXPECT_EQ(512 , message.GetExtension(unittest::repeated_double_extension_lite , 1)); + EXPECT_EQ(true , message.GetExtension(unittest::repeated_bool_extension_lite , 1)); + EXPECT_EQ("515", message.GetExtension(unittest::repeated_string_extension_lite , 1)); + EXPECT_EQ("516", message.GetExtension(unittest::repeated_bytes_extension_lite , 1)); + + EXPECT_EQ(517, message.GetExtension(unittest::repeatedgroup_extension_lite , 1).a()); + EXPECT_EQ(518, message.GetExtension(unittest::repeated_nested_message_extension_lite , 1).bb()); + EXPECT_EQ(519, message.GetExtension(unittest::repeated_foreign_message_extension_lite, 1).c()); + EXPECT_EQ(520, message.GetExtension(unittest::repeated_import_message_extension_lite , 1).d()); + + EXPECT_EQ(unittest::TestAllTypesLite::FOO , message.GetExtension(unittest::repeated_nested_enum_extension_lite , 1)); + EXPECT_EQ(unittest::FOREIGN_LITE_FOO , message.GetExtension(unittest::repeated_foreign_enum_extension_lite, 1)); + EXPECT_EQ(unittest_import::IMPORT_LITE_FOO, message.GetExtension(unittest::repeated_import_enum_extension_lite , 1)); + +} + +// ------------------------------------------------------------------- + +void TestUtilLite::SetPackedExtensions( + unittest::TestPackedExtensionsLite* message) { + message->AddExtension(unittest::packed_int32_extension_lite , 601); + message->AddExtension(unittest::packed_int64_extension_lite , 602); + message->AddExtension(unittest::packed_uint32_extension_lite , 603); + message->AddExtension(unittest::packed_uint64_extension_lite , 604); + message->AddExtension(unittest::packed_sint32_extension_lite , 605); + message->AddExtension(unittest::packed_sint64_extension_lite , 606); + message->AddExtension(unittest::packed_fixed32_extension_lite , 607); + message->AddExtension(unittest::packed_fixed64_extension_lite , 608); + message->AddExtension(unittest::packed_sfixed32_extension_lite, 609); + message->AddExtension(unittest::packed_sfixed64_extension_lite, 610); + message->AddExtension(unittest::packed_float_extension_lite , 611); + message->AddExtension(unittest::packed_double_extension_lite , 612); + message->AddExtension(unittest::packed_bool_extension_lite , true); + message->AddExtension(unittest::packed_enum_extension_lite, unittest::FOREIGN_LITE_BAR); + // add a second one of each field + message->AddExtension(unittest::packed_int32_extension_lite , 701); + message->AddExtension(unittest::packed_int64_extension_lite , 702); + message->AddExtension(unittest::packed_uint32_extension_lite , 703); + message->AddExtension(unittest::packed_uint64_extension_lite , 704); + message->AddExtension(unittest::packed_sint32_extension_lite , 705); + message->AddExtension(unittest::packed_sint64_extension_lite , 706); + message->AddExtension(unittest::packed_fixed32_extension_lite , 707); + message->AddExtension(unittest::packed_fixed64_extension_lite , 708); + message->AddExtension(unittest::packed_sfixed32_extension_lite, 709); + message->AddExtension(unittest::packed_sfixed64_extension_lite, 710); + message->AddExtension(unittest::packed_float_extension_lite , 711); + message->AddExtension(unittest::packed_double_extension_lite , 712); + message->AddExtension(unittest::packed_bool_extension_lite , false); + message->AddExtension(unittest::packed_enum_extension_lite, unittest::FOREIGN_LITE_BAZ); +} + +// ------------------------------------------------------------------- + +void TestUtilLite::ModifyPackedExtensions( + unittest::TestPackedExtensionsLite* message) { + message->SetExtension(unittest::packed_int32_extension_lite , 1, 801); + message->SetExtension(unittest::packed_int64_extension_lite , 1, 802); + message->SetExtension(unittest::packed_uint32_extension_lite , 1, 803); + message->SetExtension(unittest::packed_uint64_extension_lite , 1, 804); + message->SetExtension(unittest::packed_sint32_extension_lite , 1, 805); + message->SetExtension(unittest::packed_sint64_extension_lite , 1, 806); + message->SetExtension(unittest::packed_fixed32_extension_lite , 1, 807); + message->SetExtension(unittest::packed_fixed64_extension_lite , 1, 808); + message->SetExtension(unittest::packed_sfixed32_extension_lite, 1, 809); + message->SetExtension(unittest::packed_sfixed64_extension_lite, 1, 810); + message->SetExtension(unittest::packed_float_extension_lite , 1, 811); + message->SetExtension(unittest::packed_double_extension_lite , 1, 812); + message->SetExtension(unittest::packed_bool_extension_lite , 1, true); + message->SetExtension(unittest::packed_enum_extension_lite , 1, + unittest::FOREIGN_LITE_FOO); +} + +// ------------------------------------------------------------------- + +void TestUtilLite::ExpectPackedExtensionsSet( + const unittest::TestPackedExtensionsLite& message) { + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_int32_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_int64_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_uint32_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_uint64_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_sint32_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_sint64_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_fixed32_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_fixed64_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_sfixed32_extension_lite)); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_sfixed64_extension_lite)); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_float_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_double_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_bool_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_enum_extension_lite )); + + EXPECT_EQ(601 , message.GetExtension(unittest::packed_int32_extension_lite , 0)); + EXPECT_EQ(602 , message.GetExtension(unittest::packed_int64_extension_lite , 0)); + EXPECT_EQ(603 , message.GetExtension(unittest::packed_uint32_extension_lite , 0)); + EXPECT_EQ(604 , message.GetExtension(unittest::packed_uint64_extension_lite , 0)); + EXPECT_EQ(605 , message.GetExtension(unittest::packed_sint32_extension_lite , 0)); + EXPECT_EQ(606 , message.GetExtension(unittest::packed_sint64_extension_lite , 0)); + EXPECT_EQ(607 , message.GetExtension(unittest::packed_fixed32_extension_lite , 0)); + EXPECT_EQ(608 , message.GetExtension(unittest::packed_fixed64_extension_lite , 0)); + EXPECT_EQ(609 , message.GetExtension(unittest::packed_sfixed32_extension_lite, 0)); + EXPECT_EQ(610 , message.GetExtension(unittest::packed_sfixed64_extension_lite, 0)); + EXPECT_EQ(611 , message.GetExtension(unittest::packed_float_extension_lite , 0)); + EXPECT_EQ(612 , message.GetExtension(unittest::packed_double_extension_lite , 0)); + EXPECT_EQ(true , message.GetExtension(unittest::packed_bool_extension_lite , 0)); + EXPECT_EQ(unittest::FOREIGN_LITE_BAR, + message.GetExtension(unittest::packed_enum_extension_lite, 0)); + EXPECT_EQ(701 , message.GetExtension(unittest::packed_int32_extension_lite , 1)); + EXPECT_EQ(702 , message.GetExtension(unittest::packed_int64_extension_lite , 1)); + EXPECT_EQ(703 , message.GetExtension(unittest::packed_uint32_extension_lite , 1)); + EXPECT_EQ(704 , message.GetExtension(unittest::packed_uint64_extension_lite , 1)); + EXPECT_EQ(705 , message.GetExtension(unittest::packed_sint32_extension_lite , 1)); + EXPECT_EQ(706 , message.GetExtension(unittest::packed_sint64_extension_lite , 1)); + EXPECT_EQ(707 , message.GetExtension(unittest::packed_fixed32_extension_lite , 1)); + EXPECT_EQ(708 , message.GetExtension(unittest::packed_fixed64_extension_lite , 1)); + EXPECT_EQ(709 , message.GetExtension(unittest::packed_sfixed32_extension_lite, 1)); + EXPECT_EQ(710 , message.GetExtension(unittest::packed_sfixed64_extension_lite, 1)); + EXPECT_EQ(711 , message.GetExtension(unittest::packed_float_extension_lite , 1)); + EXPECT_EQ(712 , message.GetExtension(unittest::packed_double_extension_lite , 1)); + EXPECT_EQ(false, message.GetExtension(unittest::packed_bool_extension_lite , 1)); + EXPECT_EQ(unittest::FOREIGN_LITE_BAZ, + message.GetExtension(unittest::packed_enum_extension_lite, 1)); +} + +// ------------------------------------------------------------------- + +void TestUtilLite::ExpectPackedExtensionsClear( + const unittest::TestPackedExtensionsLite& message) { + EXPECT_EQ(0, message.ExtensionSize(unittest::packed_int32_extension_lite )); + EXPECT_EQ(0, message.ExtensionSize(unittest::packed_int64_extension_lite )); + EXPECT_EQ(0, message.ExtensionSize(unittest::packed_uint32_extension_lite )); + EXPECT_EQ(0, message.ExtensionSize(unittest::packed_uint64_extension_lite )); + EXPECT_EQ(0, message.ExtensionSize(unittest::packed_sint32_extension_lite )); + EXPECT_EQ(0, message.ExtensionSize(unittest::packed_sint64_extension_lite )); + EXPECT_EQ(0, message.ExtensionSize(unittest::packed_fixed32_extension_lite )); + EXPECT_EQ(0, message.ExtensionSize(unittest::packed_fixed64_extension_lite )); + EXPECT_EQ(0, message.ExtensionSize(unittest::packed_sfixed32_extension_lite)); + EXPECT_EQ(0, message.ExtensionSize(unittest::packed_sfixed64_extension_lite)); + EXPECT_EQ(0, message.ExtensionSize(unittest::packed_float_extension_lite )); + EXPECT_EQ(0, message.ExtensionSize(unittest::packed_double_extension_lite )); + EXPECT_EQ(0, message.ExtensionSize(unittest::packed_bool_extension_lite )); + EXPECT_EQ(0, message.ExtensionSize(unittest::packed_enum_extension_lite )); +} + +// ------------------------------------------------------------------- + +void TestUtilLite::ExpectPackedExtensionsModified( + const unittest::TestPackedExtensionsLite& message) { + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_int32_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_int64_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_uint32_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_uint64_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_sint32_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_sint64_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_fixed32_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_fixed64_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_sfixed32_extension_lite)); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_sfixed64_extension_lite)); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_float_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_double_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_bool_extension_lite )); + ASSERT_EQ(2, message.ExtensionSize(unittest::packed_enum_extension_lite )); + EXPECT_EQ(601 , message.GetExtension(unittest::packed_int32_extension_lite , 0)); + EXPECT_EQ(602 , message.GetExtension(unittest::packed_int64_extension_lite , 0)); + EXPECT_EQ(603 , message.GetExtension(unittest::packed_uint32_extension_lite , 0)); + EXPECT_EQ(604 , message.GetExtension(unittest::packed_uint64_extension_lite , 0)); + EXPECT_EQ(605 , message.GetExtension(unittest::packed_sint32_extension_lite , 0)); + EXPECT_EQ(606 , message.GetExtension(unittest::packed_sint64_extension_lite , 0)); + EXPECT_EQ(607 , message.GetExtension(unittest::packed_fixed32_extension_lite , 0)); + EXPECT_EQ(608 , message.GetExtension(unittest::packed_fixed64_extension_lite , 0)); + EXPECT_EQ(609 , message.GetExtension(unittest::packed_sfixed32_extension_lite, 0)); + EXPECT_EQ(610 , message.GetExtension(unittest::packed_sfixed64_extension_lite, 0)); + EXPECT_EQ(611 , message.GetExtension(unittest::packed_float_extension_lite , 0)); + EXPECT_EQ(612 , message.GetExtension(unittest::packed_double_extension_lite , 0)); + EXPECT_EQ(true , message.GetExtension(unittest::packed_bool_extension_lite , 0)); + EXPECT_EQ(unittest::FOREIGN_LITE_BAR, + message.GetExtension(unittest::packed_enum_extension_lite, 0)); + + // Actually verify the second (modified) elements now. + EXPECT_EQ(801 , message.GetExtension(unittest::packed_int32_extension_lite , 1)); + EXPECT_EQ(802 , message.GetExtension(unittest::packed_int64_extension_lite , 1)); + EXPECT_EQ(803 , message.GetExtension(unittest::packed_uint32_extension_lite , 1)); + EXPECT_EQ(804 , message.GetExtension(unittest::packed_uint64_extension_lite , 1)); + EXPECT_EQ(805 , message.GetExtension(unittest::packed_sint32_extension_lite , 1)); + EXPECT_EQ(806 , message.GetExtension(unittest::packed_sint64_extension_lite , 1)); + EXPECT_EQ(807 , message.GetExtension(unittest::packed_fixed32_extension_lite , 1)); + EXPECT_EQ(808 , message.GetExtension(unittest::packed_fixed64_extension_lite , 1)); + EXPECT_EQ(809 , message.GetExtension(unittest::packed_sfixed32_extension_lite, 1)); + EXPECT_EQ(810 , message.GetExtension(unittest::packed_sfixed64_extension_lite, 1)); + EXPECT_EQ(811 , message.GetExtension(unittest::packed_float_extension_lite , 1)); + EXPECT_EQ(812 , message.GetExtension(unittest::packed_double_extension_lite , 1)); + EXPECT_EQ(true , message.GetExtension(unittest::packed_bool_extension_lite , 1)); + EXPECT_EQ(unittest::FOREIGN_LITE_FOO, + message.GetExtension(unittest::packed_enum_extension_lite, 1)); +} + +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/test_util_lite.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/test_util_lite.h new file mode 100644 index 0000000000..ca35aaa47e --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/test_util_lite.h @@ -0,0 +1,101 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#ifndef GOOGLE_PROTOBUF_TEST_UTIL_LITE_H__ +#define GOOGLE_PROTOBUF_TEST_UTIL_LITE_H__ + +#include + +namespace google { +namespace protobuf { + +namespace unittest = protobuf_unittest; +namespace unittest_import = protobuf_unittest_import; + +class TestUtilLite { + public: + // Set every field in the message to a unique value. + static void SetAllFields(unittest::TestAllTypesLite* message); + static void SetAllExtensions(unittest::TestAllExtensionsLite* message); + static void SetPackedFields(unittest::TestPackedTypesLite* message); + static void SetPackedExtensions(unittest::TestPackedExtensionsLite* message); + + // Use the repeated versions of the set_*() accessors to modify all the + // repeated fields of the messsage (which should already have been + // initialized with Set*Fields()). Set*Fields() itself only tests + // the add_*() accessors. + static void ModifyRepeatedFields(unittest::TestAllTypesLite* message); + static void ModifyRepeatedExtensions( + unittest::TestAllExtensionsLite* message); + static void ModifyPackedFields(unittest::TestPackedTypesLite* message); + static void ModifyPackedExtensions( + unittest::TestPackedExtensionsLite* message); + + // Check that all fields have the values that they should have after + // Set*Fields() is called. + static void ExpectAllFieldsSet(const unittest::TestAllTypesLite& message); + static void ExpectAllExtensionsSet( + const unittest::TestAllExtensionsLite& message); + static void ExpectPackedFieldsSet( + const unittest::TestPackedTypesLite& message); + static void ExpectPackedExtensionsSet( + const unittest::TestPackedExtensionsLite& message); + + // Expect that the message is modified as would be expected from + // Modify*Fields(). + static void ExpectRepeatedFieldsModified( + const unittest::TestAllTypesLite& message); + static void ExpectRepeatedExtensionsModified( + const unittest::TestAllExtensionsLite& message); + static void ExpectPackedFieldsModified( + const unittest::TestPackedTypesLite& message); + static void ExpectPackedExtensionsModified( + const unittest::TestPackedExtensionsLite& message); + + // Check that all fields have their default values. + static void ExpectClear(const unittest::TestAllTypesLite& message); + static void ExpectExtensionsClear( + const unittest::TestAllExtensionsLite& message); + static void ExpectPackedClear(const unittest::TestPackedTypesLite& message); + static void ExpectPackedExtensionsClear( + const unittest::TestPackedExtensionsLite& message); + + private: + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(TestUtilLite); +}; + +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_TEST_UTIL_LITE_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/testdata/golden_message b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/testdata/golden_message new file mode 100644 index 0000000000..94898e494a Binary files /dev/null and b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/testdata/golden_message differ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/testdata/golden_packed_fields_message b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/testdata/golden_packed_fields_message new file mode 100644 index 0000000000..ee28d38830 Binary files /dev/null and b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/testdata/golden_packed_fields_message differ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/testdata/text_format_unittest_data.txt b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/testdata/text_format_unittest_data.txt new file mode 100644 index 0000000000..feea8f7bbc --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/testdata/text_format_unittest_data.txt @@ -0,0 +1,116 @@ +optional_int32: 101 +optional_int64: 102 +optional_uint32: 103 +optional_uint64: 104 +optional_sint32: 105 +optional_sint64: 106 +optional_fixed32: 107 +optional_fixed64: 108 +optional_sfixed32: 109 +optional_sfixed64: 110 +optional_float: 111 +optional_double: 112 +optional_bool: true +optional_string: "115" +optional_bytes: "116" +OptionalGroup { + a: 117 +} +optional_nested_message { + bb: 118 +} +optional_foreign_message { + c: 119 +} +optional_import_message { + d: 120 +} +optional_nested_enum: BAZ +optional_foreign_enum: FOREIGN_BAZ +optional_import_enum: IMPORT_BAZ +optional_string_piece: "124" +optional_cord: "125" +repeated_int32: 201 +repeated_int32: 301 +repeated_int64: 202 +repeated_int64: 302 +repeated_uint32: 203 +repeated_uint32: 303 +repeated_uint64: 204 +repeated_uint64: 304 +repeated_sint32: 205 +repeated_sint32: 305 +repeated_sint64: 206 +repeated_sint64: 306 +repeated_fixed32: 207 +repeated_fixed32: 307 +repeated_fixed64: 208 +repeated_fixed64: 308 +repeated_sfixed32: 209 +repeated_sfixed32: 309 +repeated_sfixed64: 210 +repeated_sfixed64: 310 +repeated_float: 211 +repeated_float: 311 +repeated_double: 212 +repeated_double: 312 +repeated_bool: true +repeated_bool: false +repeated_string: "215" +repeated_string: "315" +repeated_bytes: "216" +repeated_bytes: "316" +RepeatedGroup { + a: 217 +} +RepeatedGroup { + a: 317 +} +repeated_nested_message { + bb: 218 +} +repeated_nested_message { + bb: 318 +} +repeated_foreign_message { + c: 219 +} +repeated_foreign_message { + c: 319 +} +repeated_import_message { + d: 220 +} +repeated_import_message { + d: 320 +} +repeated_nested_enum: BAR +repeated_nested_enum: BAZ +repeated_foreign_enum: FOREIGN_BAR +repeated_foreign_enum: FOREIGN_BAZ +repeated_import_enum: IMPORT_BAR +repeated_import_enum: IMPORT_BAZ +repeated_string_piece: "224" +repeated_string_piece: "324" +repeated_cord: "225" +repeated_cord: "325" +default_int32: 401 +default_int64: 402 +default_uint32: 403 +default_uint64: 404 +default_sint32: 405 +default_sint64: 406 +default_fixed32: 407 +default_fixed64: 408 +default_sfixed32: 409 +default_sfixed64: 410 +default_float: 411 +default_double: 412 +default_bool: false +default_string: "415" +default_bytes: "416" +default_nested_enum: FOO +default_foreign_enum: FOREIGN_FOO +default_import_enum: IMPORT_FOO +default_string_piece: "424" +default_cord: "425" diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/testdata/text_format_unittest_extensions_data.txt b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/testdata/text_format_unittest_extensions_data.txt new file mode 100644 index 0000000000..057beae89d --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/testdata/text_format_unittest_extensions_data.txt @@ -0,0 +1,116 @@ +[protobuf_unittest.optional_int32_extension]: 101 +[protobuf_unittest.optional_int64_extension]: 102 +[protobuf_unittest.optional_uint32_extension]: 103 +[protobuf_unittest.optional_uint64_extension]: 104 +[protobuf_unittest.optional_sint32_extension]: 105 +[protobuf_unittest.optional_sint64_extension]: 106 +[protobuf_unittest.optional_fixed32_extension]: 107 +[protobuf_unittest.optional_fixed64_extension]: 108 +[protobuf_unittest.optional_sfixed32_extension]: 109 +[protobuf_unittest.optional_sfixed64_extension]: 110 +[protobuf_unittest.optional_float_extension]: 111 +[protobuf_unittest.optional_double_extension]: 112 +[protobuf_unittest.optional_bool_extension]: true +[protobuf_unittest.optional_string_extension]: "115" +[protobuf_unittest.optional_bytes_extension]: "116" +[protobuf_unittest.optionalgroup_extension] { + a: 117 +} +[protobuf_unittest.optional_nested_message_extension] { + bb: 118 +} +[protobuf_unittest.optional_foreign_message_extension] { + c: 119 +} +[protobuf_unittest.optional_import_message_extension] { + d: 120 +} +[protobuf_unittest.optional_nested_enum_extension]: BAZ +[protobuf_unittest.optional_foreign_enum_extension]: FOREIGN_BAZ +[protobuf_unittest.optional_import_enum_extension]: IMPORT_BAZ +[protobuf_unittest.optional_string_piece_extension]: "124" +[protobuf_unittest.optional_cord_extension]: "125" +[protobuf_unittest.repeated_int32_extension]: 201 +[protobuf_unittest.repeated_int32_extension]: 301 +[protobuf_unittest.repeated_int64_extension]: 202 +[protobuf_unittest.repeated_int64_extension]: 302 +[protobuf_unittest.repeated_uint32_extension]: 203 +[protobuf_unittest.repeated_uint32_extension]: 303 +[protobuf_unittest.repeated_uint64_extension]: 204 +[protobuf_unittest.repeated_uint64_extension]: 304 +[protobuf_unittest.repeated_sint32_extension]: 205 +[protobuf_unittest.repeated_sint32_extension]: 305 +[protobuf_unittest.repeated_sint64_extension]: 206 +[protobuf_unittest.repeated_sint64_extension]: 306 +[protobuf_unittest.repeated_fixed32_extension]: 207 +[protobuf_unittest.repeated_fixed32_extension]: 307 +[protobuf_unittest.repeated_fixed64_extension]: 208 +[protobuf_unittest.repeated_fixed64_extension]: 308 +[protobuf_unittest.repeated_sfixed32_extension]: 209 +[protobuf_unittest.repeated_sfixed32_extension]: 309 +[protobuf_unittest.repeated_sfixed64_extension]: 210 +[protobuf_unittest.repeated_sfixed64_extension]: 310 +[protobuf_unittest.repeated_float_extension]: 211 +[protobuf_unittest.repeated_float_extension]: 311 +[protobuf_unittest.repeated_double_extension]: 212 +[protobuf_unittest.repeated_double_extension]: 312 +[protobuf_unittest.repeated_bool_extension]: true +[protobuf_unittest.repeated_bool_extension]: false +[protobuf_unittest.repeated_string_extension]: "215" +[protobuf_unittest.repeated_string_extension]: "315" +[protobuf_unittest.repeated_bytes_extension]: "216" +[protobuf_unittest.repeated_bytes_extension]: "316" +[protobuf_unittest.repeatedgroup_extension] { + a: 217 +} +[protobuf_unittest.repeatedgroup_extension] { + a: 317 +} +[protobuf_unittest.repeated_nested_message_extension] { + bb: 218 +} +[protobuf_unittest.repeated_nested_message_extension] { + bb: 318 +} +[protobuf_unittest.repeated_foreign_message_extension] { + c: 219 +} +[protobuf_unittest.repeated_foreign_message_extension] { + c: 319 +} +[protobuf_unittest.repeated_import_message_extension] { + d: 220 +} +[protobuf_unittest.repeated_import_message_extension] { + d: 320 +} +[protobuf_unittest.repeated_nested_enum_extension]: BAR +[protobuf_unittest.repeated_nested_enum_extension]: BAZ +[protobuf_unittest.repeated_foreign_enum_extension]: FOREIGN_BAR +[protobuf_unittest.repeated_foreign_enum_extension]: FOREIGN_BAZ +[protobuf_unittest.repeated_import_enum_extension]: IMPORT_BAR +[protobuf_unittest.repeated_import_enum_extension]: IMPORT_BAZ +[protobuf_unittest.repeated_string_piece_extension]: "224" +[protobuf_unittest.repeated_string_piece_extension]: "324" +[protobuf_unittest.repeated_cord_extension]: "225" +[protobuf_unittest.repeated_cord_extension]: "325" +[protobuf_unittest.default_int32_extension]: 401 +[protobuf_unittest.default_int64_extension]: 402 +[protobuf_unittest.default_uint32_extension]: 403 +[protobuf_unittest.default_uint64_extension]: 404 +[protobuf_unittest.default_sint32_extension]: 405 +[protobuf_unittest.default_sint64_extension]: 406 +[protobuf_unittest.default_fixed32_extension]: 407 +[protobuf_unittest.default_fixed64_extension]: 408 +[protobuf_unittest.default_sfixed32_extension]: 409 +[protobuf_unittest.default_sfixed64_extension]: 410 +[protobuf_unittest.default_float_extension]: 411 +[protobuf_unittest.default_double_extension]: 412 +[protobuf_unittest.default_bool_extension]: false +[protobuf_unittest.default_string_extension]: "415" +[protobuf_unittest.default_bytes_extension]: "416" +[protobuf_unittest.default_nested_enum_extension]: FOO +[protobuf_unittest.default_foreign_enum_extension]: FOREIGN_FOO +[protobuf_unittest.default_import_enum_extension]: IMPORT_FOO +[protobuf_unittest.default_string_piece_extension]: "424" +[protobuf_unittest.default_cord_extension]: "425" diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/testing/file.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/testing/file.cc new file mode 100644 index 0000000000..e224781db5 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/testing/file.cc @@ -0,0 +1,176 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// emulates google3/file/base/file.cc + +#include +#include +#include +#include +#ifdef _MSC_VER +#define WIN32_LEAN_AND_MEAN // yeah, right +#include // Find*File(). :( +#include +#include +#else +#include +#include +#endif +#include + +namespace google { +namespace protobuf { + +#ifdef _WIN32 +#define mkdir(name, mode) mkdir(name) +// Windows doesn't have symbolic links. +#define lstat stat +#ifndef F_OK +#define F_OK 00 // not defined by MSVC for whatever reason +#endif +#endif + +bool File::Exists(const string& name) { + return access(name.c_str(), F_OK) == 0; +} + +bool File::ReadFileToString(const string& name, string* output) { + char buffer[1024]; + FILE* file = fopen(name.c_str(), "rb"); + if (file == NULL) return false; + + while (true) { + size_t n = fread(buffer, 1, sizeof(buffer), file); + if (n <= 0) break; + output->append(buffer, n); + } + + int error = ferror(file); + if (fclose(file) != 0) return false; + return error == 0; +} + +void File::ReadFileToStringOrDie(const string& name, string* output) { + GOOGLE_CHECK(ReadFileToString(name, output)) << "Could not read: " << name; +} + +void File::WriteStringToFileOrDie(const string& contents, const string& name) { + FILE* file = fopen(name.c_str(), "wb"); + GOOGLE_CHECK(file != NULL) + << "fopen(" << name << ", \"wb\"): " << strerror(errno); + GOOGLE_CHECK_EQ(fwrite(contents.data(), 1, contents.size(), file), + contents.size()) + << "fwrite(" << name << "): " << strerror(errno); + GOOGLE_CHECK(fclose(file) == 0) + << "fclose(" << name << "): " << strerror(errno); +} + +bool File::CreateDir(const string& name, int mode) { + return mkdir(name.c_str(), mode) == 0; +} + +bool File::RecursivelyCreateDir(const string& path, int mode) { + if (CreateDir(path, mode)) return true; + + if (Exists(path)) return false; + + // Try creating the parent. + string::size_type slashpos = path.find_last_of('/'); + if (slashpos == string::npos) { + // No parent given. + return false; + } + + return RecursivelyCreateDir(path.substr(0, slashpos), mode) && + CreateDir(path, mode); +} + +void File::DeleteRecursively(const string& name, + void* dummy1, void* dummy2) { + // We don't care too much about error checking here since this is only used + // in tests to delete temporary directories that are under /tmp anyway. + +#ifdef _MSC_VER + // This interface is so weird. + WIN32_FIND_DATA find_data; + HANDLE find_handle = FindFirstFile((name + "/*").c_str(), &find_data); + if (find_handle == INVALID_HANDLE_VALUE) { + // Just delete it, whatever it is. + DeleteFile(name.c_str()); + RemoveDirectory(name.c_str()); + return; + } + + do { + string entry_name = find_data.cFileName; + if (entry_name != "." && entry_name != "..") { + string path = name + "/" + entry_name; + if (find_data.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY) { + DeleteRecursively(path, NULL, NULL); + RemoveDirectory(path.c_str()); + } else { + DeleteFile(path.c_str()); + } + } + } while(FindNextFile(find_handle, &find_data)); + FindClose(find_handle); + + RemoveDirectory(name.c_str()); +#else + // Use opendir()! Yay! + // lstat = Don't follow symbolic links. + struct stat stats; + if (lstat(name.c_str(), &stats) != 0) return; + + if (S_ISDIR(stats.st_mode)) { + DIR* dir = opendir(name.c_str()); + if (dir != NULL) { + while (true) { + struct dirent* entry = readdir(dir); + if (entry == NULL) break; + string entry_name = entry->d_name; + if (entry_name != "." && entry_name != "..") { + DeleteRecursively(name + "/" + entry_name, NULL, NULL); + } + } + } + + closedir(dir); + rmdir(name.c_str()); + + } else if (S_ISREG(stats.st_mode)) { + remove(name.c_str()); + } +#endif +} + +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/testing/file.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/testing/file.h new file mode 100644 index 0000000000..a6b1c7641f --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/testing/file.h @@ -0,0 +1,83 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// emulates google3/file/base/file.h + +#ifndef GOOGLE_PROTOBUF_TESTING_FILE_H__ +#define GOOGLE_PROTOBUF_TESTING_FILE_H__ + +#include + +namespace google { +namespace protobuf { + +const int DEFAULT_FILE_MODE = 0777; + +// Protocol buffer code only uses a couple static methods of File, and only +// in tests. +class File { + public: + // Check if the file exists. + static bool Exists(const string& name); + + // Read an entire file to a string. Return true if successful, false + // otherwise. + static bool ReadFileToString(const string& name, string* output); + + // Same as above, but crash on failure. + static void ReadFileToStringOrDie(const string& name, string* output); + + // Create a file and write a string to it. + static void WriteStringToFileOrDie(const string& contents, + const string& name); + + // Create a directory. + static bool CreateDir(const string& name, int mode); + + // Create a directory and all parent directories if necessary. + static bool RecursivelyCreateDir(const string& path, int mode); + + // If "name" is a file, we delete it. If it is a directory, we + // call DeleteRecursively() for each file or directory (other than + // dot and double-dot) within it, and then delete the directory itself. + // The "dummy" parameters have a meaning in the original version of this + // method but they are not used anywhere in protocol buffers. + static void DeleteRecursively(const string& name, + void* dummy1, void* dummy2); + + private: + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(File); +}; + +} // namespace protobuf +} // namespace google + +#endif // GOOGLE_PROTOBUF_TESTING_FILE_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/testing/googletest.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/testing/googletest.cc new file mode 100644 index 0000000000..cd094d0cde --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/testing/googletest.cc @@ -0,0 +1,254 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// emulates google3/testing/base/public/googletest.cc + +#include +#include +#include +#include +#include +#include +#include +#ifdef _MSC_VER +#include +#include +#else +#include +#endif +#include +#include +#include +#include + +namespace google { +namespace protobuf { + +#ifdef _WIN32 +#define mkdir(name, mode) mkdir(name) +#endif + +#ifndef O_BINARY +#ifdef _O_BINARY +#define O_BINARY _O_BINARY +#else +#define O_BINARY 0 // If this isn't defined, the platform doesn't need it. +#endif +#endif + +string TestSourceDir() { +#ifdef _MSC_VER + // Look for the "src" directory. + string prefix = "."; + + while (!File::Exists(prefix + "/src/google/protobuf")) { + if (!File::Exists(prefix)) { + GOOGLE_LOG(FATAL) + << "Could not find protobuf source code. Please run tests from " + "somewhere within the protobuf source package."; + } + prefix += "/.."; + } + return prefix + "/src"; +#else + // automake sets the "srcdir" environment variable. + char* result = getenv("srcdir"); + if (result == NULL) { + // Otherwise, the test must be run from the source directory. + return "."; + } else { + return result; + } +#endif +} + +namespace { + +string GetTemporaryDirectoryName() { + // tmpnam() is generally not considered safe but we're only using it for + // testing. We cannot use tmpfile() or mkstemp() since we're creating a + // directory. + char b[L_tmpnam + 1]; // HPUX multithread return 0 if s is 0 + string result = tmpnam(b); +#ifdef _WIN32 + // On Win32, tmpnam() returns a file prefixed with '\', but which is supposed + // to be used in the current working directory. WTF? + if (HasPrefixString(result, "\\")) { + result.erase(0, 1); + } +#endif // _WIN32 + return result; +} + +// Creates a temporary directory on demand and deletes it when the process +// quits. +class TempDirDeleter { + public: + TempDirDeleter() {} + ~TempDirDeleter() { + if (!name_.empty()) { + File::DeleteRecursively(name_, NULL, NULL); + } + } + + string GetTempDir() { + if (name_.empty()) { + name_ = GetTemporaryDirectoryName(); + GOOGLE_CHECK(mkdir(name_.c_str(), 0777) == 0) << strerror(errno); + + // Stick a file in the directory that tells people what this is, in case + // we abort and don't get a chance to delete it. + File::WriteStringToFileOrDie("", name_ + "/TEMP_DIR_FOR_PROTOBUF_TESTS"); + } + return name_; + } + + private: + string name_; +}; + +TempDirDeleter temp_dir_deleter_; + +} // namespace + +string TestTempDir() { + return temp_dir_deleter_.GetTempDir(); +} + +// TODO(kenton): Share duplicated code below. Too busy/lazy for now. + +static string stdout_capture_filename_; +static string stderr_capture_filename_; +static int original_stdout_ = -1; +static int original_stderr_ = -1; + +void CaptureTestStdout() { + GOOGLE_CHECK_EQ(original_stdout_, -1) << "Already capturing."; + + stdout_capture_filename_ = TestTempDir() + "/captured_stdout"; + + int fd = open(stdout_capture_filename_.c_str(), + O_WRONLY | O_CREAT | O_EXCL | O_BINARY, 0777); + GOOGLE_CHECK(fd >= 0) << "open: " << strerror(errno); + + original_stdout_ = dup(1); + close(1); + dup2(fd, 1); + close(fd); +} + +void CaptureTestStderr() { + GOOGLE_CHECK_EQ(original_stderr_, -1) << "Already capturing."; + + stderr_capture_filename_ = TestTempDir() + "/captured_stderr"; + + int fd = open(stderr_capture_filename_.c_str(), + O_WRONLY | O_CREAT | O_EXCL | O_BINARY, 0777); + GOOGLE_CHECK(fd >= 0) << "open: " << strerror(errno); + + original_stderr_ = dup(2); + close(2); + dup2(fd, 2); + close(fd); +} + +string GetCapturedTestStdout() { + GOOGLE_CHECK_NE(original_stdout_, -1) << "Not capturing."; + + close(1); + dup2(original_stdout_, 1); + original_stdout_ = -1; + + string result; + File::ReadFileToStringOrDie(stdout_capture_filename_, &result); + + remove(stdout_capture_filename_.c_str()); + + return result; +} + +string GetCapturedTestStderr() { + GOOGLE_CHECK_NE(original_stderr_, -1) << "Not capturing."; + + close(2); + dup2(original_stderr_, 2); + original_stderr_ = -1; + + string result; + File::ReadFileToStringOrDie(stderr_capture_filename_, &result); + + remove(stderr_capture_filename_.c_str()); + + return result; +} + +ScopedMemoryLog* ScopedMemoryLog::active_log_ = NULL; + +ScopedMemoryLog::ScopedMemoryLog() { + GOOGLE_CHECK(active_log_ == NULL); + active_log_ = this; + old_handler_ = SetLogHandler(&HandleLog); +} + +ScopedMemoryLog::~ScopedMemoryLog() { + SetLogHandler(old_handler_); + active_log_ = NULL; +} + +const vector& ScopedMemoryLog::GetMessages(LogLevel dummy) const { + GOOGLE_CHECK_EQ(dummy, ERROR); + return messages_; +} + +void ScopedMemoryLog::HandleLog(LogLevel level, const char* filename, + int line, const string& message) { + GOOGLE_CHECK(active_log_ != NULL); + if (level == ERROR) { + active_log_->messages_.push_back(message); + } +} + +namespace { + +// Force shutdown at process exit so that we can test for memory leaks. To +// actually check for leaks, I suggest using the heap checker included with +// google-perftools. Set it to "draconian" mode to ensure that every last +// call to malloc() has a corresponding free(). +struct ForceShutdown { + ~ForceShutdown() { + ShutdownProtobufLibrary(); + } +} force_shutdown; + +} // namespace + +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/testing/googletest.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/testing/googletest.h new file mode 100644 index 0000000000..71444c960d --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/testing/googletest.h @@ -0,0 +1,98 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// emulates google3/testing/base/public/googletest.h + +#ifndef GOOGLE_PROTOBUF_GOOGLETEST_H__ +#define GOOGLE_PROTOBUF_GOOGLETEST_H__ + +#include +#include + +namespace google { +namespace protobuf { + +// When running unittests, get the directory containing the source code. +string TestSourceDir(); + +// When running unittests, get a directory where temporary files may be +// placed. +string TestTempDir(); + +// Capture all text written to stdout or stderr. +void CaptureTestStdout(); +void CaptureTestStderr(); + +// Stop capturing stdout or stderr and return the text captured. +string GetCapturedTestStdout(); +string GetCapturedTestStderr(); + +// For use with ScopedMemoryLog::GetMessages(). Inside Google the LogLevel +// constants don't have the LOGLEVEL_ prefix, so the code that used +// ScopedMemoryLog refers to LOGLEVEL_ERROR as just ERROR. +#undef ERROR // defend against promiscuous windows.h +static const LogLevel ERROR = LOGLEVEL_ERROR; + +// Receives copies of all LOG(ERROR) messages while in scope. Sample usage: +// { +// ScopedMemoryLog log; // constructor registers object as a log sink +// SomeRoutineThatMayLogMessages(); +// const vector& warnings = log.GetMessages(ERROR); +// } // destructor unregisters object as a log sink +// This is a dummy implementation which covers only what is used by protocol +// buffer unit tests. +class ScopedMemoryLog { + public: + ScopedMemoryLog(); + virtual ~ScopedMemoryLog(); + + // Fetches all messages logged. The internal version of this class + // would only fetch messages at the given security level, but the protobuf + // open source version ignores the argument since we always pass ERROR + // anyway. + const vector& GetMessages(LogLevel dummy) const; + + private: + vector messages_; + LogHandler* old_handler_; + + static void HandleLog(LogLevel level, const char* filename, int line, + const string& message); + + static ScopedMemoryLog* active_log_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(ScopedMemoryLog); +}; + +} // namespace protobuf +} // namespace google + +#endif // GOOGLE_PROTOBUF_GOOGLETEST_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/testing/zcgunzip.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/testing/zcgunzip.cc new file mode 100644 index 0000000000..a6197854bd --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/testing/zcgunzip.cc @@ -0,0 +1,73 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2009 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: brianolson@google.com (Brian Olson) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// Test program to verify that GzipInputStream is compatible with command line +// gunzip or java.util.zip.GzipInputStream +// +// Reads gzip stream on standard input and writes decompressed data to standard +// output. + +#include "config.h" + +#include +#include +#include +#include + +#include +#include + +using google::protobuf::io::FileInputStream; +using google::protobuf::io::GzipInputStream; + +int main(int argc, const char** argv) { + FileInputStream fin(STDIN_FILENO); + GzipInputStream in(&fin); + + while (true) { + const void* inptr; + int inlen; + bool ok; + ok = in.Next(&inptr, &inlen); + if (!ok) { + break; + } + if (inlen > 0) { + int err = write(STDOUT_FILENO, inptr, inlen); + assert(err == inlen); + } + } + + return 0; +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/testing/zcgzip.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/testing/zcgzip.cc new file mode 100644 index 0000000000..9133275c8a --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/testing/zcgzip.cc @@ -0,0 +1,79 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2009 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: brianolson@google.com (Brian Olson) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// Test program to verify that GzipOutputStream is compatible with command line +// gzip or java.util.zip.GzipOutputStream +// +// Reads data on standard input and writes compressed gzip stream to standard +// output. + +#include "config.h" + +#include +#include +#include + +#include +#include + +using google::protobuf::io::FileOutputStream; +using google::protobuf::io::GzipOutputStream; + +int main(int argc, const char** argv) { + FileOutputStream fout(STDOUT_FILENO); + GzipOutputStream out(&fout); + int readlen; + + while (true) { + void* outptr; + int outlen; + bool ok; + do { + ok = out.Next(&outptr, &outlen); + if (!ok) { + break; + } + } while (outlen <= 0); + readlen = read(STDIN_FILENO, outptr, outlen); + if (readlen <= 0) { + out.BackUp(outlen); + break; + } + if (readlen < outlen) { + out.BackUp(outlen - readlen); + } + } + + return 0; +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/text_format.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/text_format.cc new file mode 100644 index 0000000000..1494ebd78d --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/text_format.cc @@ -0,0 +1,1285 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: jschorr@google.com (Joseph Schorr) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include +#include +#include +#include +#include + +#include + +#include +#include +#include +#include +#include +#include +#include +#include + +namespace google { +namespace protobuf { + +string Message::DebugString() const { + string debug_string; + + TextFormat::PrintToString(*this, &debug_string); + + return debug_string; +} + +string Message::ShortDebugString() const { + string debug_string; + + TextFormat::Printer printer; + printer.SetSingleLineMode(true); + + printer.PrintToString(*this, &debug_string); + // Single line mode currently might have an extra space at the end. + if (debug_string.size() > 0 && + debug_string[debug_string.size() - 1] == ' ') { + debug_string.resize(debug_string.size() - 1); + } + + return debug_string; +} + +string Message::Utf8DebugString() const { + string debug_string; + + TextFormat::Printer printer; + printer.SetUseUtf8StringEscaping(true); + + printer.PrintToString(*this, &debug_string); + + return debug_string; +} + +void Message::PrintDebugString() const { + printf("%s", DebugString().c_str()); +} + + +// =========================================================================== +// Internal class for parsing an ASCII representation of a Protocol Message. +// This class makes use of the Protocol Message compiler's tokenizer found +// in //google/protobuf/io/tokenizer.h. Note that class's Parse +// method is *not* thread-safe and should only be used in a single thread at +// a time. + +// Makes code slightly more readable. The meaning of "DO(foo)" is +// "Execute foo and fail if it fails.", where failure is indicated by +// returning false. Borrowed from parser.cc (Thanks Kenton!). +#define DO(STATEMENT) if (STATEMENT) {} else return false + +class TextFormat::Parser::ParserImpl { + public: + + // Determines if repeated values for a non-repeated field are + // permitted, e.g., the string "foo: 1 foo: 2" for a + // required/optional field named "foo". + enum SingularOverwritePolicy { + ALLOW_SINGULAR_OVERWRITES = 0, // the last value is retained + FORBID_SINGULAR_OVERWRITES = 1, // an error is issued + }; + + ParserImpl(const Descriptor* root_message_type, + io::ZeroCopyInputStream* input_stream, + io::ErrorCollector* error_collector, + TextFormat::Finder* finder, + SingularOverwritePolicy singular_overwrite_policy) + : error_collector_(error_collector), + finder_(finder), + tokenizer_error_collector_(this), + tokenizer_(input_stream, &tokenizer_error_collector_), + root_message_type_(root_message_type), + singular_overwrite_policy_(singular_overwrite_policy), + had_errors_(false) { + // For backwards-compatibility with proto1, we need to allow the 'f' suffix + // for floats. + tokenizer_.set_allow_f_after_float(true); + + // '#' starts a comment. + tokenizer_.set_comment_style(io::Tokenizer::SH_COMMENT_STYLE); + + // Consume the starting token. + tokenizer_.Next(); + } + ~ParserImpl() { } + + // Parses the ASCII representation specified in input and saves the + // information into the output pointer (a Message). Returns + // false if an error occurs (an error will also be logged to + // GOOGLE_LOG(ERROR)). + bool Parse(Message* output) { + // Consume fields until we cannot do so anymore. + while(true) { + if (LookingAtType(io::Tokenizer::TYPE_END)) { + return !had_errors_; + } + + DO(ConsumeField(output)); + } + } + + bool ParseField(const FieldDescriptor* field, Message* output) { + bool suc; + if (field->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) { + suc = ConsumeFieldMessage(output, output->GetReflection(), field); + } else { + suc = ConsumeFieldValue(output, output->GetReflection(), field); + } + return suc && LookingAtType(io::Tokenizer::TYPE_END); + } + + void ReportError(int line, int col, const string& message) { + had_errors_ = true; + if (error_collector_ == NULL) { + if (line >= 0) { + GOOGLE_LOG(ERROR) << "Error parsing text-format " + << root_message_type_->full_name() + << ": " << (line + 1) << ":" + << (col + 1) << ": " << message; + } else { + GOOGLE_LOG(ERROR) << "Error parsing text-format " + << root_message_type_->full_name() + << ": " << message; + } + } else { + error_collector_->AddError(line, col, message); + } + } + + void ReportWarning(int line, int col, const string& message) { + if (error_collector_ == NULL) { + if (line >= 0) { + GOOGLE_LOG(WARNING) << "Warning parsing text-format " + << root_message_type_->full_name() + << ": " << (line + 1) << ":" + << (col + 1) << ": " << message; + } else { + GOOGLE_LOG(WARNING) << "Warning parsing text-format " + << root_message_type_->full_name() + << ": " << message; + } + } else { + error_collector_->AddWarning(line, col, message); + } + } + + private: + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(ParserImpl); + + // Reports an error with the given message with information indicating + // the position (as derived from the current token). + void ReportError(const string& message) { + ReportError(tokenizer_.current().line, tokenizer_.current().column, + message); + } + + // Reports a warning with the given message with information indicating + // the position (as derived from the current token). + void ReportWarning(const string& message) { + ReportWarning(tokenizer_.current().line, tokenizer_.current().column, + message); + } + + // Consumes the specified message with the given starting delimeter. + // This method checks to see that the end delimeter at the conclusion of + // the consumption matches the starting delimeter passed in here. + bool ConsumeMessage(Message* message, const string delimeter) { + while (!LookingAt(">") && !LookingAt("}")) { + DO(ConsumeField(message)); + } + + // Confirm that we have a valid ending delimeter. + DO(Consume(delimeter)); + + return true; + } + + // Consumes the current field (as returned by the tokenizer) on the + // passed in message. + bool ConsumeField(Message* message) { + const Reflection* reflection = message->GetReflection(); + const Descriptor* descriptor = message->GetDescriptor(); + + string field_name; + + const FieldDescriptor* field = NULL; + + if (TryConsume("[")) { + // Extension. + DO(ConsumeIdentifier(&field_name)); + while (TryConsume(".")) { + string part; + DO(ConsumeIdentifier(&part)); + field_name += "."; + field_name += part; + } + DO(Consume("]")); + + field = (finder_ != NULL + ? finder_->FindExtension(message, field_name) + : reflection->FindKnownExtensionByName(field_name)); + + if (field == NULL) { + ReportError("Extension \"" + field_name + "\" is not defined or " + "is not an extension of \"" + + descriptor->full_name() + "\"."); + return false; + } + } else { + DO(ConsumeIdentifier(&field_name)); + + field = descriptor->FindFieldByName(field_name); + // Group names are expected to be capitalized as they appear in the + // .proto file, which actually matches their type names, not their field + // names. + if (field == NULL) { + string lower_field_name = field_name; + LowerString(&lower_field_name); + field = descriptor->FindFieldByName(lower_field_name); + // If the case-insensitive match worked but the field is NOT a group, + if (field != NULL && field->type() != FieldDescriptor::TYPE_GROUP) { + field = NULL; + } + } + // Again, special-case group names as described above. + if (field != NULL && field->type() == FieldDescriptor::TYPE_GROUP + && field->message_type()->name() != field_name) { + field = NULL; + } + + if (field == NULL) { + ReportError("Message type \"" + descriptor->full_name() + + "\" has no field named \"" + field_name + "\"."); + return false; + } + } + + // Fail if the field is not repeated and it has already been specified. + if ((singular_overwrite_policy_ == FORBID_SINGULAR_OVERWRITES) && + !field->is_repeated() && reflection->HasField(*message, field)) { + ReportError("Non-repeated field \"" + field_name + + "\" is specified multiple times."); + return false; + } + + // Perform special handling for embedded message types. + if (field->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) { + // ':' is optional here. + TryConsume(":"); + DO(ConsumeFieldMessage(message, reflection, field)); + } else { + DO(Consume(":")); + if (field->is_repeated() && TryConsume("[")) { + // Short repeated format, e.g. "foo: [1, 2, 3]" + while (true) { + DO(ConsumeFieldValue(message, reflection, field)); + if (TryConsume("]")) { + break; + } + DO(Consume(",")); + } + } else { + DO(ConsumeFieldValue(message, reflection, field)); + } + } + + // For historical reasons, fields may optionally be separated by commas or + // semicolons. + TryConsume(";") || TryConsume(","); + + if (field->options().deprecated()) { + ReportWarning("text format contains deprecated field \"" + + field_name + "\""); + } + + return true; + } + + bool ConsumeFieldMessage(Message* message, + const Reflection* reflection, + const FieldDescriptor* field) { + string delimeter; + if (TryConsume("<")) { + delimeter = ">"; + } else { + DO(Consume("{")); + delimeter = "}"; + } + + if (field->is_repeated()) { + DO(ConsumeMessage(reflection->AddMessage(message, field), delimeter)); + } else { + DO(ConsumeMessage(reflection->MutableMessage(message, field), + delimeter)); + } + return true; + } + + bool ConsumeFieldValue(Message* message, + const Reflection* reflection, + const FieldDescriptor* field) { + +// Define an easy to use macro for setting fields. This macro checks +// to see if the field is repeated (in which case we need to use the Add +// methods or not (in which case we need to use the Set methods). +#define SET_FIELD(CPPTYPE, VALUE) \ + if (field->is_repeated()) { \ + reflection->Add##CPPTYPE(message, field, VALUE); \ + } else { \ + reflection->Set##CPPTYPE(message, field, VALUE); \ + } \ + + switch(field->cpp_type()) { + case FieldDescriptor::CPPTYPE_INT32: { + int64 value; + DO(ConsumeSignedInteger(&value, kint32max)); + SET_FIELD(Int32, static_cast(value)); + break; + } + + case FieldDescriptor::CPPTYPE_UINT32: { + uint64 value; + DO(ConsumeUnsignedInteger(&value, kuint32max)); + SET_FIELD(UInt32, static_cast(value)); + break; + } + + case FieldDescriptor::CPPTYPE_INT64: { + int64 value; + DO(ConsumeSignedInteger(&value, kint64max)); + SET_FIELD(Int64, value); + break; + } + + case FieldDescriptor::CPPTYPE_UINT64: { + uint64 value; + DO(ConsumeUnsignedInteger(&value, kuint64max)); + SET_FIELD(UInt64, value); + break; + } + + case FieldDescriptor::CPPTYPE_FLOAT: { + double value; + DO(ConsumeDouble(&value)); + SET_FIELD(Float, static_cast(value)); + break; + } + + case FieldDescriptor::CPPTYPE_DOUBLE: { + double value; + DO(ConsumeDouble(&value)); + SET_FIELD(Double, value); + break; + } + + case FieldDescriptor::CPPTYPE_STRING: { + string value; + DO(ConsumeString(&value)); + SET_FIELD(String, value); + break; + } + + case FieldDescriptor::CPPTYPE_BOOL: { + if (LookingAtType(io::Tokenizer::TYPE_INTEGER)) { + uint64 value; + DO(ConsumeUnsignedInteger(&value, 1)); + SET_FIELD(Bool, value); + } else { + string value; + DO(ConsumeIdentifier(&value)); + if (value == "true" || value == "t") { + SET_FIELD(Bool, true); + } else if (value == "false" || value == "f") { + SET_FIELD(Bool, false); + } else { + ReportError("Invalid value for boolean field \"" + field->name() + + "\". Value: \"" + value + "\"."); + return false; + } + } + break; + } + + case FieldDescriptor::CPPTYPE_ENUM: { + string value; + const EnumDescriptor* enum_type = field->enum_type(); + const EnumValueDescriptor* enum_value = NULL; + + if (LookingAtType(io::Tokenizer::TYPE_IDENTIFIER)) { + DO(ConsumeIdentifier(&value)); + // Find the enumeration value. + enum_value = enum_type->FindValueByName(value); + + } else if (LookingAt("-") || + LookingAtType(io::Tokenizer::TYPE_INTEGER)) { + int64 int_value; + DO(ConsumeSignedInteger(&int_value, kint32max)); + value = SimpleItoa(int_value); // for error reporting + enum_value = enum_type->FindValueByNumber(int_value); + } else { + ReportError("Expected integer or identifier."); + return false; + } + + if (enum_value == NULL) { + ReportError("Unknown enumeration value of \"" + value + "\" for " + "field \"" + field->name() + "\"."); + return false; + } + + SET_FIELD(Enum, enum_value); + break; + } + + case FieldDescriptor::CPPTYPE_MESSAGE: { + // We should never get here. Put here instead of a default + // so that if new types are added, we get a nice compiler warning. + GOOGLE_LOG(FATAL) << "Reached an unintended state: CPPTYPE_MESSAGE"; + break; + } + } +#undef SET_FIELD + return true; + } + + // Returns true if the current token's text is equal to that specified. + bool LookingAt(const string& text) { + return tokenizer_.current().text == text; + } + + // Returns true if the current token's type is equal to that specified. + bool LookingAtType(io::Tokenizer::TokenType token_type) { + return tokenizer_.current().type == token_type; + } + + // Consumes an identifier and saves its value in the identifier parameter. + // Returns false if the token is not of type IDENTFIER. + bool ConsumeIdentifier(string* identifier) { + if (!LookingAtType(io::Tokenizer::TYPE_IDENTIFIER)) { + ReportError("Expected identifier."); + return false; + } + + *identifier = tokenizer_.current().text; + + tokenizer_.Next(); + return true; + } + + // Consumes a string and saves its value in the text parameter. + // Returns false if the token is not of type STRING. + bool ConsumeString(string* text) { + if (!LookingAtType(io::Tokenizer::TYPE_STRING)) { + ReportError("Expected string."); + return false; + } + + text->clear(); + while (LookingAtType(io::Tokenizer::TYPE_STRING)) { + io::Tokenizer::ParseStringAppend(tokenizer_.current().text, text); + + tokenizer_.Next(); + } + + return true; + } + + // Consumes a uint64 and saves its value in the value parameter. + // Returns false if the token is not of type INTEGER. + bool ConsumeUnsignedInteger(uint64* value, uint64 max_value) { + if (!LookingAtType(io::Tokenizer::TYPE_INTEGER)) { + ReportError("Expected integer."); + return false; + } + + if (!io::Tokenizer::ParseInteger(tokenizer_.current().text, + max_value, value)) { + ReportError("Integer out of range."); + return false; + } + + tokenizer_.Next(); + return true; + } + + // Consumes an int64 and saves its value in the value parameter. + // Note that since the tokenizer does not support negative numbers, + // we actually may consume an additional token (for the minus sign) in this + // method. Returns false if the token is not an integer + // (signed or otherwise). + bool ConsumeSignedInteger(int64* value, uint64 max_value) { + bool negative = false; + + if (TryConsume("-")) { + negative = true; + // Two's complement always allows one more negative integer than + // positive. + ++max_value; + } + + uint64 unsigned_value; + + DO(ConsumeUnsignedInteger(&unsigned_value, max_value)); + + *value = static_cast(unsigned_value); + + if (negative) { + *value = -*value; + } + + return true; + } + + // Consumes a double and saves its value in the value parameter. + // Note that since the tokenizer does not support negative numbers, + // we actually may consume an additional token (for the minus sign) in this + // method. Returns false if the token is not a double + // (signed or otherwise). + bool ConsumeDouble(double* value) { + bool negative = false; + + if (TryConsume("-")) { + negative = true; + } + + // A double can actually be an integer, according to the tokenizer. + // Therefore, we must check both cases here. + if (LookingAtType(io::Tokenizer::TYPE_INTEGER)) { + // We have found an integer value for the double. + uint64 integer_value; + DO(ConsumeUnsignedInteger(&integer_value, kuint64max)); + + *value = static_cast(integer_value); + } else if (LookingAtType(io::Tokenizer::TYPE_FLOAT)) { + // We have found a float value for the double. + *value = io::Tokenizer::ParseFloat(tokenizer_.current().text); + + // Mark the current token as consumed. + tokenizer_.Next(); + } else if (LookingAtType(io::Tokenizer::TYPE_IDENTIFIER)) { + string text = tokenizer_.current().text; + LowerString(&text); + if (text == "inf" || text == "infinity") { + *value = std::numeric_limits::infinity(); + tokenizer_.Next(); + } else if (text == "nan") { + *value = std::numeric_limits::quiet_NaN(); + tokenizer_.Next(); + } else { + ReportError("Expected double."); + return false; + } + } else { + ReportError("Expected double."); + return false; + } + + if (negative) { + *value = -*value; + } + + return true; + } + + // Consumes a token and confirms that it matches that specified in the + // value parameter. Returns false if the token found does not match that + // which was specified. + bool Consume(const string& value) { + const string& current_value = tokenizer_.current().text; + + if (current_value != value) { + ReportError("Expected \"" + value + "\", found \"" + current_value + + "\"."); + return false; + } + + tokenizer_.Next(); + + return true; + } + + // Attempts to consume the supplied value. Returns false if a the + // token found does not match the value specified. + bool TryConsume(const string& value) { + if (tokenizer_.current().text == value) { + tokenizer_.Next(); + return true; + } else { + return false; + } + } + + // An internal instance of the Tokenizer's error collector, used to + // collect any base-level parse errors and feed them to the ParserImpl. + class ParserErrorCollector : public io::ErrorCollector { + public: + explicit ParserErrorCollector(TextFormat::Parser::ParserImpl* parser) : + parser_(parser) { } + + virtual ~ParserErrorCollector() { }; + + virtual void AddError(int line, int column, const string& message) { + parser_->ReportError(line, column, message); + } + + virtual void AddWarning(int line, int column, const string& message) { + parser_->ReportWarning(line, column, message); + } + + private: + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(ParserErrorCollector); + TextFormat::Parser::ParserImpl* parser_; + }; + + io::ErrorCollector* error_collector_; + TextFormat::Finder* finder_; + ParserErrorCollector tokenizer_error_collector_; + io::Tokenizer tokenizer_; + const Descriptor* root_message_type_; + SingularOverwritePolicy singular_overwrite_policy_; + bool had_errors_; +}; + +#undef DO + +// =========================================================================== +// Internal class for writing text to the io::ZeroCopyOutputStream. Adapted +// from the Printer found in //google/protobuf/io/printer.h +class TextFormat::Printer::TextGenerator { + public: + explicit TextGenerator(io::ZeroCopyOutputStream* output, + int initial_indent_level) + : output_(output), + buffer_(NULL), + buffer_size_(0), + at_start_of_line_(true), + failed_(false), + indent_(""), + initial_indent_level_(initial_indent_level) { + indent_.resize(initial_indent_level_ * 2, ' '); + } + + ~TextGenerator() { + // Only BackUp() if we're sure we've successfully called Next() at least + // once. + if (buffer_size_ > 0) { + output_->BackUp(buffer_size_); + } + } + + // Indent text by two spaces. After calling Indent(), two spaces will be + // inserted at the beginning of each line of text. Indent() may be called + // multiple times to produce deeper indents. + void Indent() { + indent_ += " "; + } + + // Reduces the current indent level by two spaces, or crashes if the indent + // level is zero. + void Outdent() { + if (indent_.empty() || + indent_.size() < initial_indent_level_ * 2) { + GOOGLE_LOG(DFATAL) << " Outdent() without matching Indent()."; + return; + } + + indent_.resize(indent_.size() - 2); + } + + // Print text to the output stream. + void Print(const string& str) { + Print(str.data(), str.size()); + } + + // Print text to the output stream. + void Print(const char* text) { + Print(text, strlen(text)); + } + + // Print text to the output stream. + void Print(const char* text, int size) { + int pos = 0; // The number of bytes we've written so far. + + for (int i = 0; i < size; i++) { + if (text[i] == '\n') { + // Saw newline. If there is more text, we may need to insert an indent + // here. So, write what we have so far, including the '\n'. + Write(text + pos, i - pos + 1); + pos = i + 1; + + // Setting this true will cause the next Write() to insert an indent + // first. + at_start_of_line_ = true; + } + } + + // Write the rest. + Write(text + pos, size - pos); + } + + // True if any write to the underlying stream failed. (We don't just + // crash in this case because this is an I/O failure, not a programming + // error.) + bool failed() const { return failed_; } + + private: + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(TextGenerator); + + void Write(const char* data, int size) { + if (failed_) return; + if (size == 0) return; + + if (at_start_of_line_) { + // Insert an indent. + at_start_of_line_ = false; + Write(indent_.data(), indent_.size()); + if (failed_) return; + } + + while (size > buffer_size_) { + // Data exceeds space in the buffer. Copy what we can and request a + // new buffer. + memcpy(buffer_, data, buffer_size_); + data += buffer_size_; + size -= buffer_size_; + void* void_buffer; + failed_ = !output_->Next(&void_buffer, &buffer_size_); + if (failed_) return; + buffer_ = reinterpret_cast(void_buffer); + } + + // Buffer is big enough to receive the data; copy it. + memcpy(buffer_, data, size); + buffer_ += size; + buffer_size_ -= size; + } + + io::ZeroCopyOutputStream* const output_; + char* buffer_; + int buffer_size_; + bool at_start_of_line_; + bool failed_; + + string indent_; + int initial_indent_level_; +}; + +// =========================================================================== + +TextFormat::Finder::~Finder() { +} + +TextFormat::Parser::Parser() + : error_collector_(NULL), + finder_(NULL), + allow_partial_(false) { +} + +TextFormat::Parser::~Parser() {} + +bool TextFormat::Parser::Parse(io::ZeroCopyInputStream* input, + Message* output) { + output->Clear(); + ParserImpl parser(output->GetDescriptor(), input, error_collector_, + finder_, ParserImpl::FORBID_SINGULAR_OVERWRITES); + return MergeUsingImpl(input, output, &parser); +} + +bool TextFormat::Parser::ParseFromString(const string& input, + Message* output) { + io::ArrayInputStream input_stream(input.data(), input.size()); + return Parse(&input_stream, output); +} + +bool TextFormat::Parser::Merge(io::ZeroCopyInputStream* input, + Message* output) { + ParserImpl parser(output->GetDescriptor(), input, error_collector_, + finder_, ParserImpl::ALLOW_SINGULAR_OVERWRITES); + return MergeUsingImpl(input, output, &parser); +} + +bool TextFormat::Parser::MergeFromString(const string& input, + Message* output) { + io::ArrayInputStream input_stream(input.data(), input.size()); + return Merge(&input_stream, output); +} + +bool TextFormat::Parser::MergeUsingImpl(io::ZeroCopyInputStream* input, + Message* output, + ParserImpl* parser_impl) { + if (!parser_impl->Parse(output)) return false; + if (!allow_partial_ && !output->IsInitialized()) { + vector missing_fields; + output->FindInitializationErrors(&missing_fields); + parser_impl->ReportError(-1, 0, "Message missing required fields: " + + JoinStrings(missing_fields, ", ")); + return false; + } + return true; +} + +bool TextFormat::Parser::ParseFieldValueFromString( + const string& input, + const FieldDescriptor* field, + Message* output) { + io::ArrayInputStream input_stream(input.data(), input.size()); + ParserImpl parser(output->GetDescriptor(), &input_stream, error_collector_, + finder_, ParserImpl::ALLOW_SINGULAR_OVERWRITES); + return parser.ParseField(field, output); +} + +/* static */ bool TextFormat::Parse(io::ZeroCopyInputStream* input, + Message* output) { + return Parser().Parse(input, output); +} + +/* static */ bool TextFormat::Merge(io::ZeroCopyInputStream* input, + Message* output) { + return Parser().Merge(input, output); +} + +/* static */ bool TextFormat::ParseFromString(const string& input, + Message* output) { + return Parser().ParseFromString(input, output); +} + +/* static */ bool TextFormat::MergeFromString(const string& input, + Message* output) { + return Parser().MergeFromString(input, output); +} + +// =========================================================================== + +TextFormat::Printer::Printer() + : initial_indent_level_(0), + single_line_mode_(false), + use_short_repeated_primitives_(false), + utf8_string_escaping_(false) {} + +TextFormat::Printer::~Printer() {} + +bool TextFormat::Printer::PrintToString(const Message& message, + string* output) const { + GOOGLE_DCHECK(output) << "output specified is NULL"; + + output->clear(); + io::StringOutputStream output_stream(output); + + bool result = Print(message, &output_stream); + + return result; +} + +bool TextFormat::Printer::PrintUnknownFieldsToString( + const UnknownFieldSet& unknown_fields, + string* output) const { + GOOGLE_DCHECK(output) << "output specified is NULL"; + + output->clear(); + io::StringOutputStream output_stream(output); + return PrintUnknownFields(unknown_fields, &output_stream); +} + +bool TextFormat::Printer::Print(const Message& message, + io::ZeroCopyOutputStream* output) const { + TextGenerator generator(output, initial_indent_level_); + + Print(message, generator); + + // Output false if the generator failed internally. + return !generator.failed(); +} + +bool TextFormat::Printer::PrintUnknownFields( + const UnknownFieldSet& unknown_fields, + io::ZeroCopyOutputStream* output) const { + TextGenerator generator(output, initial_indent_level_); + + PrintUnknownFields(unknown_fields, generator); + + // Output false if the generator failed internally. + return !generator.failed(); +} + +void TextFormat::Printer::Print(const Message& message, + TextGenerator& generator) const { + const Reflection* reflection = message.GetReflection(); + vector fields; + reflection->ListFields(message, &fields); + for (int i = 0; i < fields.size(); i++) { + PrintField(message, reflection, fields[i], generator); + } + PrintUnknownFields(reflection->GetUnknownFields(message), generator); +} + +void TextFormat::Printer::PrintFieldValueToString( + const Message& message, + const FieldDescriptor* field, + int index, + string* output) const { + + GOOGLE_DCHECK(output) << "output specified is NULL"; + + output->clear(); + io::StringOutputStream output_stream(output); + TextGenerator generator(&output_stream, initial_indent_level_); + + PrintFieldValue(message, message.GetReflection(), field, index, generator); +} + +void TextFormat::Printer::PrintField(const Message& message, + const Reflection* reflection, + const FieldDescriptor* field, + TextGenerator& generator) const { + if (use_short_repeated_primitives_ && + field->is_repeated() && + field->cpp_type() != FieldDescriptor::CPPTYPE_STRING && + field->cpp_type() != FieldDescriptor::CPPTYPE_MESSAGE) { + PrintShortRepeatedField(message, reflection, field, generator); + return; + } + + int count = 0; + + if (field->is_repeated()) { + count = reflection->FieldSize(message, field); + } else if (reflection->HasField(message, field)) { + count = 1; + } + + for (int j = 0; j < count; ++j) { + PrintFieldName(message, reflection, field, generator); + + if (field->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) { + if (single_line_mode_) { + generator.Print(" { "); + } else { + generator.Print(" {\n"); + generator.Indent(); + } + } else { + generator.Print(": "); + } + + // Write the field value. + int field_index = j; + if (!field->is_repeated()) { + field_index = -1; + } + + PrintFieldValue(message, reflection, field, field_index, generator); + + if (field->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) { + if (single_line_mode_) { + generator.Print("} "); + } else { + generator.Outdent(); + generator.Print("}\n"); + } + } else { + if (single_line_mode_) { + generator.Print(" "); + } else { + generator.Print("\n"); + } + } + } +} + +void TextFormat::Printer::PrintShortRepeatedField( + const Message& message, + const Reflection* reflection, + const FieldDescriptor* field, + TextGenerator& generator) const { + // Print primitive repeated field in short form. + PrintFieldName(message, reflection, field, generator); + + int size = reflection->FieldSize(message, field); + generator.Print(": ["); + for (int i = 0; i < size; i++) { + if (i > 0) generator.Print(", "); + PrintFieldValue(message, reflection, field, i, generator); + } + if (single_line_mode_) { + generator.Print("] "); + } else { + generator.Print("]\n"); + } +} + +void TextFormat::Printer::PrintFieldName(const Message& message, + const Reflection* reflection, + const FieldDescriptor* field, + TextGenerator& generator) const { + if (field->is_extension()) { + generator.Print("["); + // We special-case MessageSet elements for compatibility with proto1. + if (field->containing_type()->options().message_set_wire_format() + && field->type() == FieldDescriptor::TYPE_MESSAGE + && field->is_optional() + && field->extension_scope() == field->message_type()) { + generator.Print(field->message_type()->full_name()); + } else { + generator.Print(field->full_name()); + } + generator.Print("]"); + } else { + if (field->type() == FieldDescriptor::TYPE_GROUP) { + // Groups must be serialized with their original capitalization. + generator.Print(field->message_type()->name()); + } else { + generator.Print(field->name()); + } + } +} + +void TextFormat::Printer::PrintFieldValue( + const Message& message, + const Reflection* reflection, + const FieldDescriptor* field, + int index, + TextGenerator& generator) const { + GOOGLE_DCHECK(field->is_repeated() || (index == -1)) + << "Index must be -1 for non-repeated fields"; + + switch (field->cpp_type()) { +#define OUTPUT_FIELD(CPPTYPE, METHOD, TO_STRING) \ + case FieldDescriptor::CPPTYPE_##CPPTYPE: \ + generator.Print(TO_STRING(field->is_repeated() ? \ + reflection->GetRepeated##METHOD(message, field, index) : \ + reflection->Get##METHOD(message, field))); \ + break; \ + + OUTPUT_FIELD( INT32, Int32, SimpleItoa); + OUTPUT_FIELD( INT64, Int64, SimpleItoa); + OUTPUT_FIELD(UINT32, UInt32, SimpleItoa); + OUTPUT_FIELD(UINT64, UInt64, SimpleItoa); + OUTPUT_FIELD( FLOAT, Float, SimpleFtoa); + OUTPUT_FIELD(DOUBLE, Double, SimpleDtoa); +#undef OUTPUT_FIELD + + case FieldDescriptor::CPPTYPE_STRING: { + string scratch; + const string& value = field->is_repeated() ? + reflection->GetRepeatedStringReference( + message, field, index, &scratch) : + reflection->GetStringReference(message, field, &scratch); + + generator.Print("\""); + if (utf8_string_escaping_) { + generator.Print(strings::Utf8SafeCEscape(value)); + } else { + generator.Print(CEscape(value)); + } + generator.Print("\""); + + break; + } + + case FieldDescriptor::CPPTYPE_BOOL: + if (field->is_repeated()) { + generator.Print(reflection->GetRepeatedBool(message, field, index) + ? "true" : "false"); + } else { + generator.Print(reflection->GetBool(message, field) + ? "true" : "false"); + } + break; + + case FieldDescriptor::CPPTYPE_ENUM: + generator.Print(field->is_repeated() ? + reflection->GetRepeatedEnum(message, field, index)->name() : + reflection->GetEnum(message, field)->name()); + break; + + case FieldDescriptor::CPPTYPE_MESSAGE: + Print(field->is_repeated() ? + reflection->GetRepeatedMessage(message, field, index) : + reflection->GetMessage(message, field), + generator); + break; + } +} + +/* static */ bool TextFormat::Print(const Message& message, + io::ZeroCopyOutputStream* output) { + return Printer().Print(message, output); +} + +/* static */ bool TextFormat::PrintUnknownFields( + const UnknownFieldSet& unknown_fields, + io::ZeroCopyOutputStream* output) { + return Printer().PrintUnknownFields(unknown_fields, output); +} + +/* static */ bool TextFormat::PrintToString( + const Message& message, string* output) { + return Printer().PrintToString(message, output); +} + +/* static */ bool TextFormat::PrintUnknownFieldsToString( + const UnknownFieldSet& unknown_fields, string* output) { + return Printer().PrintUnknownFieldsToString(unknown_fields, output); +} + +/* static */ void TextFormat::PrintFieldValueToString( + const Message& message, + const FieldDescriptor* field, + int index, + string* output) { + return Printer().PrintFieldValueToString(message, field, index, output); +} + +/* static */ bool TextFormat::ParseFieldValueFromString( + const string& input, + const FieldDescriptor* field, + Message* message) { + return Parser().ParseFieldValueFromString(input, field, message); +} + +// Prints an integer as hex with a fixed number of digits dependent on the +// integer type. +template +static string PaddedHex(IntType value) { + string result; + result.reserve(sizeof(value) * 2); + for (int i = sizeof(value) * 2 - 1; i >= 0; i--) { + result.push_back(int_to_hex_digit(value >> (i*4) & 0x0F)); + } + return result; +} + +void TextFormat::Printer::PrintUnknownFields( + const UnknownFieldSet& unknown_fields, TextGenerator& generator) const { + for (int i = 0; i < unknown_fields.field_count(); i++) { + const UnknownField& field = unknown_fields.field(i); + string field_number = SimpleItoa(field.number()); + + switch (field.type()) { + case UnknownField::TYPE_VARINT: + generator.Print(field_number); + generator.Print(": "); + generator.Print(SimpleItoa(field.varint())); + if (single_line_mode_) { + generator.Print(" "); + } else { + generator.Print("\n"); + } + break; + case UnknownField::TYPE_FIXED32: { + generator.Print(field_number); + generator.Print(": 0x"); + char buffer[kFastToBufferSize]; + generator.Print(FastHex32ToBuffer(field.fixed32(), buffer)); + if (single_line_mode_) { + generator.Print(" "); + } else { + generator.Print("\n"); + } + break; + } + case UnknownField::TYPE_FIXED64: { + generator.Print(field_number); + generator.Print(": 0x"); + char buffer[kFastToBufferSize]; + generator.Print(FastHex64ToBuffer(field.fixed64(), buffer)); + if (single_line_mode_) { + generator.Print(" "); + } else { + generator.Print("\n"); + } + break; + } + case UnknownField::TYPE_LENGTH_DELIMITED: { + generator.Print(field_number); + const string& value = field.length_delimited(); + UnknownFieldSet embedded_unknown_fields; + if (!value.empty() && embedded_unknown_fields.ParseFromString(value)) { + // This field is parseable as a Message. + // So it is probably an embedded message. + if (single_line_mode_) { + generator.Print(" { "); + } else { + generator.Print(" {\n"); + generator.Indent(); + } + PrintUnknownFields(embedded_unknown_fields, generator); + if (single_line_mode_) { + generator.Print("} "); + } else { + generator.Outdent(); + generator.Print("}\n"); + } + } else { + // This field is not parseable as a Message. + // So it is probably just a plain string. + generator.Print(": \""); + generator.Print(CEscape(value)); + generator.Print("\""); + if (single_line_mode_) { + generator.Print(" "); + } else { + generator.Print("\n"); + } + } + break; + } + case UnknownField::TYPE_GROUP: + generator.Print(field_number); + if (single_line_mode_) { + generator.Print(" { "); + } else { + generator.Print(" {\n"); + generator.Indent(); + } + PrintUnknownFields(field.group(), generator); + if (single_line_mode_) { + generator.Print("} "); + } else { + generator.Outdent(); + generator.Print("}\n"); + } + break; + } + } +} + +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/text_format.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/text_format.h new file mode 100644 index 0000000000..03c0491025 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/text_format.h @@ -0,0 +1,285 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: jschorr@google.com (Joseph Schorr) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// Utilities for printing and parsing protocol messages in a human-readable, +// text-based format. + +#ifndef GOOGLE_PROTOBUF_TEXT_FORMAT_H__ +#define GOOGLE_PROTOBUF_TEXT_FORMAT_H__ + +#include +#include +#include + +namespace google { +namespace protobuf { + +namespace io { + class ErrorCollector; // tokenizer.h +} + +// This class implements protocol buffer text format. Printing and parsing +// protocol messages in text format is useful for debugging and human editing +// of messages. +// +// This class is really a namespace that contains only static methods. +class LIBPROTOBUF_EXPORT TextFormat { + public: + // Outputs a textual representation of the given message to the given + // output stream. + static bool Print(const Message& message, io::ZeroCopyOutputStream* output); + + // Print the fields in an UnknownFieldSet. They are printed by tag number + // only. Embedded messages are heuristically identified by attempting to + // parse them. + static bool PrintUnknownFields(const UnknownFieldSet& unknown_fields, + io::ZeroCopyOutputStream* output); + + // Like Print(), but outputs directly to a string. + static bool PrintToString(const Message& message, string* output); + + // Like PrintUnknownFields(), but outputs directly to a string. + static bool PrintUnknownFieldsToString(const UnknownFieldSet& unknown_fields, + string* output); + + // Outputs a textual representation of the value of the field supplied on + // the message supplied. For non-repeated fields, an index of -1 must + // be supplied. Note that this method will print the default value for a + // field if it is not set. + static void PrintFieldValueToString(const Message& message, + const FieldDescriptor* field, + int index, + string* output); + + // Class for those users which require more fine-grained control over how + // a protobuffer message is printed out. + class LIBPROTOBUF_EXPORT Printer { + public: + Printer(); + ~Printer(); + + // Like TextFormat::Print + bool Print(const Message& message, io::ZeroCopyOutputStream* output) const; + // Like TextFormat::PrintUnknownFields + bool PrintUnknownFields(const UnknownFieldSet& unknown_fields, + io::ZeroCopyOutputStream* output) const; + // Like TextFormat::PrintToString + bool PrintToString(const Message& message, string* output) const; + // Like TextFormat::PrintUnknownFieldsToString + bool PrintUnknownFieldsToString(const UnknownFieldSet& unknown_fields, + string* output) const; + // Like TextFormat::PrintFieldValueToString + void PrintFieldValueToString(const Message& message, + const FieldDescriptor* field, + int index, + string* output) const; + + // Adjust the initial indent level of all output. Each indent level is + // equal to two spaces. + void SetInitialIndentLevel(int indent_level) { + initial_indent_level_ = indent_level; + } + + // If printing in single line mode, then the entire message will be output + // on a single line with no line breaks. + void SetSingleLineMode(bool single_line_mode) { + single_line_mode_ = single_line_mode; + } + + // Set true to print repeated primitives in a format like: + // field_name: [1, 2, 3, 4] + // instead of printing each value on its own line. Short format applies + // only to primitive values -- i.e. everything except strings and + // sub-messages/groups. + void SetUseShortRepeatedPrimitives(bool use_short_repeated_primitives) { + use_short_repeated_primitives_ = use_short_repeated_primitives; + } + + // Set true to output UTF-8 instead of ASCII. The only difference + // is that bytes >= 0x80 in string fields will not be escaped, + // because they are assumed to be part of UTF-8 multi-byte + // sequences. + void SetUseUtf8StringEscaping(bool as_utf8) { + utf8_string_escaping_ = as_utf8; + } + + private: + // Forward declaration of an internal class used to print the text + // output to the OutputStream (see text_format.cc for implementation). + class TextGenerator; + + // Internal Print method, used for writing to the OutputStream via + // the TextGenerator class. + void Print(const Message& message, + TextGenerator& generator) const; + + // Print a single field. + void PrintField(const Message& message, + const Reflection* reflection, + const FieldDescriptor* field, + TextGenerator& generator) const; + + // Print a repeated primitive field in short form. + void PrintShortRepeatedField(const Message& message, + const Reflection* reflection, + const FieldDescriptor* field, + TextGenerator& generator) const; + + // Print the name of a field -- i.e. everything that comes before the + // ':' for a single name/value pair. + void PrintFieldName(const Message& message, + const Reflection* reflection, + const FieldDescriptor* field, + TextGenerator& generator) const; + + // Outputs a textual representation of the value of the field supplied on + // the message supplied or the default value if not set. + void PrintFieldValue(const Message& message, + const Reflection* reflection, + const FieldDescriptor* field, + int index, + TextGenerator& generator) const; + + // Print the fields in an UnknownFieldSet. They are printed by tag number + // only. Embedded messages are heuristically identified by attempting to + // parse them. + void PrintUnknownFields(const UnknownFieldSet& unknown_fields, + TextGenerator& generator) const; + + int initial_indent_level_; + + bool single_line_mode_; + + bool use_short_repeated_primitives_; + + bool utf8_string_escaping_; + }; + + // Parses a text-format protocol message from the given input stream to + // the given message object. This function parses the format written + // by Print(). + static bool Parse(io::ZeroCopyInputStream* input, Message* output); + // Like Parse(), but reads directly from a string. + static bool ParseFromString(const string& input, Message* output); + + // Like Parse(), but the data is merged into the given message, as if + // using Message::MergeFrom(). + static bool Merge(io::ZeroCopyInputStream* input, Message* output); + // Like Merge(), but reads directly from a string. + static bool MergeFromString(const string& input, Message* output); + + // Parse the given text as a single field value and store it into the + // given field of the given message. If the field is a repeated field, + // the new value will be added to the end + static bool ParseFieldValueFromString(const string& input, + const FieldDescriptor* field, + Message* message); + + // Interface that TextFormat::Parser can use to find extensions. + // This class may be extended in the future to find more information + // like fields, etc. + class LIBPROTOBUF_EXPORT Finder { + public: + virtual ~Finder(); + + // Try to find an extension of *message by fully-qualified field + // name. Returns NULL if no extension is known for this name or number. + virtual const FieldDescriptor* FindExtension( + Message* message, + const string& name) const = 0; + }; + + // For more control over parsing, use this class. + class LIBPROTOBUF_EXPORT Parser { + public: + Parser(); + ~Parser(); + + // Like TextFormat::Parse(). + bool Parse(io::ZeroCopyInputStream* input, Message* output); + // Like TextFormat::ParseFromString(). + bool ParseFromString(const string& input, Message* output); + // Like TextFormat::Merge(). + bool Merge(io::ZeroCopyInputStream* input, Message* output); + // Like TextFormat::MergeFromString(). + bool MergeFromString(const string& input, Message* output); + + // Set where to report parse errors. If NULL (the default), errors will + // be printed to stderr. + void RecordErrorsTo(io::ErrorCollector* error_collector) { + error_collector_ = error_collector; + } + + // Set how parser finds extensions. If NULL (the default), the + // parser will use the standard Reflection object associated with + // the message being parsed. + void SetFinder(Finder* finder) { + finder_ = finder; + } + + // Normally parsing fails if, after parsing, output->IsInitialized() + // returns false. Call AllowPartialMessage(true) to skip this check. + void AllowPartialMessage(bool allow) { + allow_partial_ = allow; + } + + // Like TextFormat::ParseFieldValueFromString + bool ParseFieldValueFromString(const string& input, + const FieldDescriptor* field, + Message* output); + + private: + // Forward declaration of an internal class used to parse text + // representations (see text_format.cc for implementation). + class ParserImpl; + + // Like TextFormat::Merge(). The provided implementation is used + // to do the parsing. + bool MergeUsingImpl(io::ZeroCopyInputStream* input, + Message* output, + ParserImpl* parser_impl); + + io::ErrorCollector* error_collector_; + Finder* finder_; + bool allow_partial_; + }; + + private: + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(TextFormat); +}; + +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_TEXT_FORMAT_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/text_format_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/text_format_unittest.cc new file mode 100644 index 0000000000..00ea8c3c2f --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/text_format_unittest.cc @@ -0,0 +1,1141 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: jschorr@google.com (Joseph Schorr) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include +#include + +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include + +namespace google { +namespace protobuf { + +// Can't use an anonymous namespace here due to brokenness of Tru64 compiler. +namespace text_format_unittest { + +inline bool IsNaN(double value) { + // NaN is never equal to anything, even itself. + return value != value; +} + +// A basic string with different escapable characters for testing. +const string kEscapeTestString = + "\"A string with ' characters \n and \r newlines and \t tabs and \001 " + "slashes \\ and multiple spaces"; + +// A representation of the above string with all the characters escaped. +const string kEscapeTestStringEscaped = + "\"\\\"A string with \\' characters \\n and \\r newlines " + "and \\t tabs and \\001 slashes \\\\ and multiple spaces\""; + +class TextFormatTest : public testing::Test { + public: + static void SetUpTestCase() { + File::ReadFileToStringOrDie( + TestSourceDir() + + "/google/protobuf/testdata/text_format_unittest_data.txt", + &static_proto_debug_string_); + } + + TextFormatTest() : proto_debug_string_(static_proto_debug_string_) {} + + protected: + // Debug string read from text_format_unittest_data.txt. + const string proto_debug_string_; + unittest::TestAllTypes proto_; + + private: + static string static_proto_debug_string_; +}; +string TextFormatTest::static_proto_debug_string_; + +class TextFormatExtensionsTest : public testing::Test { + public: + static void SetUpTestCase() { + File::ReadFileToStringOrDie( + TestSourceDir() + + "/google/protobuf/testdata/" + "text_format_unittest_extensions_data.txt", + &static_proto_debug_string_); + } + + TextFormatExtensionsTest() + : proto_debug_string_(static_proto_debug_string_) {} + + protected: + // Debug string read from text_format_unittest_data.txt. + const string proto_debug_string_; + unittest::TestAllExtensions proto_; + + private: + static string static_proto_debug_string_; +}; +string TextFormatExtensionsTest::static_proto_debug_string_; + + +TEST_F(TextFormatTest, Basic) { + TestUtil::SetAllFields(&proto_); + EXPECT_EQ(proto_debug_string_, proto_.DebugString()); +} + +TEST_F(TextFormatExtensionsTest, Extensions) { + TestUtil::SetAllExtensions(&proto_); + EXPECT_EQ(proto_debug_string_, proto_.DebugString()); +} + +TEST_F(TextFormatTest, ShortDebugString) { + proto_.set_optional_int32(1); + proto_.set_optional_string("hello"); + proto_.mutable_optional_nested_message()->set_bb(2); + proto_.mutable_optional_foreign_message(); + + EXPECT_EQ("optional_int32: 1 optional_string: \"hello\" " + "optional_nested_message { bb: 2 } " + "optional_foreign_message { }", + proto_.ShortDebugString()); +} + +TEST_F(TextFormatTest, ShortPrimitiveRepeateds) { + proto_.set_optional_int32(123); + proto_.add_repeated_int32(456); + proto_.add_repeated_int32(789); + proto_.add_repeated_string("foo"); + proto_.add_repeated_string("bar"); + proto_.add_repeated_nested_message()->set_bb(2); + proto_.add_repeated_nested_message()->set_bb(3); + proto_.add_repeated_nested_enum(unittest::TestAllTypes::FOO); + proto_.add_repeated_nested_enum(unittest::TestAllTypes::BAR); + + TextFormat::Printer printer; + printer.SetUseShortRepeatedPrimitives(true); + string text; + printer.PrintToString(proto_, &text); + + EXPECT_EQ("optional_int32: 123\n" + "repeated_int32: [456, 789]\n" + "repeated_string: \"foo\"\n" + "repeated_string: \"bar\"\n" + "repeated_nested_message {\n bb: 2\n}\n" + "repeated_nested_message {\n bb: 3\n}\n" + "repeated_nested_enum: [FOO, BAR]\n", + text); + + // Try in single-line mode. + printer.SetSingleLineMode(true); + printer.PrintToString(proto_, &text); + + EXPECT_EQ("optional_int32: 123 " + "repeated_int32: [456, 789] " + "repeated_string: \"foo\" " + "repeated_string: \"bar\" " + "repeated_nested_message { bb: 2 } " + "repeated_nested_message { bb: 3 } " + "repeated_nested_enum: [FOO, BAR] ", + text); +} + + +TEST_F(TextFormatTest, StringEscape) { + // Set the string value to test. + proto_.set_optional_string(kEscapeTestString); + + // Get the DebugString from the proto. + string debug_string = proto_.DebugString(); + string utf8_debug_string = proto_.Utf8DebugString(); + + // Hardcode a correct value to test against. + string correct_string = "optional_string: " + + kEscapeTestStringEscaped + + "\n"; + + // Compare. + EXPECT_EQ(correct_string, debug_string); + // UTF-8 string is the same as non-UTF-8 because + // the protocol buffer contains no UTF-8 text. + EXPECT_EQ(correct_string, utf8_debug_string); + + string expected_short_debug_string = "optional_string: " + + kEscapeTestStringEscaped; + EXPECT_EQ(expected_short_debug_string, proto_.ShortDebugString()); +} + +TEST_F(TextFormatTest, Utf8DebugString) { + // Set the string value to test. + proto_.set_optional_string("\350\260\267\346\255\214"); + + // Get the DebugString from the proto. + string debug_string = proto_.DebugString(); + string utf8_debug_string = proto_.Utf8DebugString(); + + // Hardcode a correct value to test against. + string correct_utf8_string = "optional_string: " + "\"\350\260\267\346\255\214\"" + "\n"; + string correct_string = "optional_string: " + "\"\\350\\260\\267\\346\\255\\214\"" + "\n"; + + // Compare. + EXPECT_EQ(correct_utf8_string, utf8_debug_string); + EXPECT_EQ(correct_string, debug_string); +} + +TEST_F(TextFormatTest, PrintUnknownFields) { + // Test printing of unknown fields in a message. + + unittest::TestEmptyMessage message; + UnknownFieldSet* unknown_fields = message.mutable_unknown_fields(); + + unknown_fields->AddVarint(5, 1); + unknown_fields->AddFixed32(5, 2); + unknown_fields->AddFixed64(5, 3); + unknown_fields->AddLengthDelimited(5, "4"); + unknown_fields->AddGroup(5)->AddVarint(10, 5); + + unknown_fields->AddVarint(8, 1); + unknown_fields->AddVarint(8, 2); + unknown_fields->AddVarint(8, 3); + + EXPECT_EQ( + "5: 1\n" + "5: 0x00000002\n" + "5: 0x0000000000000003\n" + "5: \"4\"\n" + "5 {\n" + " 10: 5\n" + "}\n" + "8: 1\n" + "8: 2\n" + "8: 3\n", + message.DebugString()); +} + +TEST_F(TextFormatTest, PrintUnknownMessage) { + // Test heuristic printing of messages in an UnknownFieldSet. + + protobuf_unittest::TestAllTypes message; + + // Cases which should not be interpreted as sub-messages. + + // 'a' is a valid FIXED64 tag, so for the string to be parseable as a message + // it should be followed by 8 bytes. Since this string only has two + // subsequent bytes, it should be treated as a string. + message.add_repeated_string("abc"); + + // 'd' happens to be a valid ENDGROUP tag. So, + // UnknownFieldSet::MergeFromCodedStream() will successfully parse "def", but + // the ConsumedEntireMessage() check should fail. + message.add_repeated_string("def"); + + // A zero-length string should never be interpreted as a message even though + // it is technically valid as one. + message.add_repeated_string(""); + + // Case which should be interpreted as a sub-message. + + // An actual nested message with content should always be interpreted as a + // nested message. + message.add_repeated_nested_message()->set_bb(123); + + string data; + message.SerializeToString(&data); + + string text; + UnknownFieldSet unknown_fields; + EXPECT_TRUE(unknown_fields.ParseFromString(data)); + EXPECT_TRUE(TextFormat::PrintUnknownFieldsToString(unknown_fields, &text)); + EXPECT_EQ( + "44: \"abc\"\n" + "44: \"def\"\n" + "44: \"\"\n" + "48 {\n" + " 1: 123\n" + "}\n", + text); +} + +TEST_F(TextFormatTest, PrintMessageWithIndent) { + // Test adding an initial indent to printing. + + protobuf_unittest::TestAllTypes message; + + message.add_repeated_string("abc"); + message.add_repeated_string("def"); + message.add_repeated_nested_message()->set_bb(123); + + string text; + TextFormat::Printer printer; + printer.SetInitialIndentLevel(1); + EXPECT_TRUE(printer.PrintToString(message, &text)); + EXPECT_EQ( + " repeated_string: \"abc\"\n" + " repeated_string: \"def\"\n" + " repeated_nested_message {\n" + " bb: 123\n" + " }\n", + text); +} + +TEST_F(TextFormatTest, PrintMessageSingleLine) { + // Test printing a message on a single line. + + protobuf_unittest::TestAllTypes message; + + message.add_repeated_string("abc"); + message.add_repeated_string("def"); + message.add_repeated_nested_message()->set_bb(123); + + string text; + TextFormat::Printer printer; + printer.SetInitialIndentLevel(1); + printer.SetSingleLineMode(true); + EXPECT_TRUE(printer.PrintToString(message, &text)); + EXPECT_EQ( + " repeated_string: \"abc\" repeated_string: \"def\" " + "repeated_nested_message { bb: 123 } ", + text); +} + +TEST_F(TextFormatTest, ParseBasic) { + io::ArrayInputStream input_stream(proto_debug_string_.data(), + proto_debug_string_.size()); + TextFormat::Parse(&input_stream, &proto_); + TestUtil::ExpectAllFieldsSet(proto_); +} + +TEST_F(TextFormatExtensionsTest, ParseExtensions) { + io::ArrayInputStream input_stream(proto_debug_string_.data(), + proto_debug_string_.size()); + TextFormat::Parse(&input_stream, &proto_); + TestUtil::ExpectAllExtensionsSet(proto_); +} + +TEST_F(TextFormatTest, ParseEnumFieldFromNumber) { + // Create a parse string with a numerical value for an enum field. + string parse_string = strings::Substitute("optional_nested_enum: $0", + unittest::TestAllTypes::BAZ); + EXPECT_TRUE(TextFormat::ParseFromString(parse_string, &proto_)); + EXPECT_TRUE(proto_.has_optional_nested_enum()); + EXPECT_EQ(unittest::TestAllTypes::BAZ, proto_.optional_nested_enum()); +} + +TEST_F(TextFormatTest, ParseEnumFieldFromNegativeNumber) { + ASSERT_LT(unittest::SPARSE_E, 0); + string parse_string = strings::Substitute("sparse_enum: $0", + unittest::SPARSE_E); + unittest::SparseEnumMessage proto; + EXPECT_TRUE(TextFormat::ParseFromString(parse_string, &proto)); + EXPECT_TRUE(proto.has_sparse_enum()); + EXPECT_EQ(unittest::SPARSE_E, proto.sparse_enum()); +} + +TEST_F(TextFormatTest, ParseStringEscape) { + // Create a parse string with escpaed characters in it. + string parse_string = "optional_string: " + + kEscapeTestStringEscaped + + "\n"; + + io::ArrayInputStream input_stream(parse_string.data(), + parse_string.size()); + TextFormat::Parse(&input_stream, &proto_); + + // Compare. + EXPECT_EQ(kEscapeTestString, proto_.optional_string()); +} + +TEST_F(TextFormatTest, ParseConcatenatedString) { + // Create a parse string with multiple parts on one line. + string parse_string = "optional_string: \"foo\" \"bar\"\n"; + + io::ArrayInputStream input_stream1(parse_string.data(), + parse_string.size()); + TextFormat::Parse(&input_stream1, &proto_); + + // Compare. + EXPECT_EQ("foobar", proto_.optional_string()); + + // Create a parse string with multiple parts on seperate lines. + parse_string = "optional_string: \"foo\"\n" + "\"bar\"\n"; + + io::ArrayInputStream input_stream2(parse_string.data(), + parse_string.size()); + TextFormat::Parse(&input_stream2, &proto_); + + // Compare. + EXPECT_EQ("foobar", proto_.optional_string()); +} + +TEST_F(TextFormatTest, ParseFloatWithSuffix) { + // Test that we can parse a floating-point value with 'f' appended to the + // end. This is needed for backwards-compatibility with proto1. + + // Have it parse a float with the 'f' suffix. + string parse_string = "optional_float: 1.0f\n"; + + io::ArrayInputStream input_stream(parse_string.data(), + parse_string.size()); + + TextFormat::Parse(&input_stream, &proto_); + + // Compare. + EXPECT_EQ(1.0, proto_.optional_float()); +} + +TEST_F(TextFormatTest, ParseShortRepeatedForm) { + string parse_string = + // Mixed short-form and long-form are simply concatenated. + "repeated_int32: 1\n" + "repeated_int32: [456, 789]\n" + "repeated_nested_enum: [ FOO ,BAR, # comment\n" + " 3]\n" + // Note that while the printer won't print repeated strings in short-form, + // the parser will accept them. + "repeated_string: [ \"foo\", 'bar' ]\n"; + + ASSERT_TRUE(TextFormat::ParseFromString(parse_string, &proto_)); + + ASSERT_EQ(3, proto_.repeated_int32_size()); + EXPECT_EQ(1, proto_.repeated_int32(0)); + EXPECT_EQ(456, proto_.repeated_int32(1)); + EXPECT_EQ(789, proto_.repeated_int32(2)); + + ASSERT_EQ(3, proto_.repeated_nested_enum_size()); + EXPECT_EQ(unittest::TestAllTypes::FOO, proto_.repeated_nested_enum(0)); + EXPECT_EQ(unittest::TestAllTypes::BAR, proto_.repeated_nested_enum(1)); + EXPECT_EQ(unittest::TestAllTypes::BAZ, proto_.repeated_nested_enum(2)); + + ASSERT_EQ(2, proto_.repeated_string_size()); + EXPECT_EQ("foo", proto_.repeated_string(0)); + EXPECT_EQ("bar", proto_.repeated_string(1)); +} + +TEST_F(TextFormatTest, Comments) { + // Test that comments are ignored. + + string parse_string = "optional_int32: 1 # a comment\n" + "optional_int64: 2 # another comment"; + + io::ArrayInputStream input_stream(parse_string.data(), + parse_string.size()); + + TextFormat::Parse(&input_stream, &proto_); + + // Compare. + EXPECT_EQ(1, proto_.optional_int32()); + EXPECT_EQ(2, proto_.optional_int64()); +} + +TEST_F(TextFormatTest, OptionalColon) { + // Test that we can place a ':' after the field name of a nested message, + // even though we don't have to. + + string parse_string = "optional_nested_message: { bb: 1}\n"; + + io::ArrayInputStream input_stream(parse_string.data(), + parse_string.size()); + + TextFormat::Parse(&input_stream, &proto_); + + // Compare. + EXPECT_TRUE(proto_.has_optional_nested_message()); + EXPECT_EQ(1, proto_.optional_nested_message().bb()); +} + +// Some platforms (e.g. Windows) insist on padding the exponent to three +// digits when one or two would be just fine. +static string RemoveRedundantZeros(string text) { + text = StringReplace(text, "e+0", "e+", true); + text = StringReplace(text, "e-0", "e-", true); + return text; +} + +TEST_F(TextFormatTest, PrintExotic) { + unittest::TestAllTypes message; + + // Note: In C, a negative integer literal is actually the unary negation + // operator being applied to a positive integer literal, and + // 9223372036854775808 is outside the range of int64. However, it is not + // outside the range of uint64. Confusingly, this means that everything + // works if we make the literal unsigned, even though we are negating it. + message.add_repeated_int64(-GOOGLE_ULONGLONG(9223372036854775808)); + message.add_repeated_uint64(GOOGLE_ULONGLONG(18446744073709551615)); + message.add_repeated_double(123.456); + message.add_repeated_double(1.23e21); + message.add_repeated_double(1.23e-18); + message.add_repeated_double(std::numeric_limits::infinity()); + message.add_repeated_double(-std::numeric_limits::infinity()); + message.add_repeated_double(std::numeric_limits::quiet_NaN()); + message.add_repeated_string(string("\000\001\a\b\f\n\r\t\v\\\'\"", 12)); + + // Fun story: We used to use 1.23e22 instead of 1.23e21 above, but this + // seemed to trigger an odd case on MinGW/GCC 3.4.5 where GCC's parsing of + // the value differed from strtod()'s parsing. That is to say, the + // following assertion fails on MinGW: + // assert(1.23e22 == strtod("1.23e22", NULL)); + // As a result, SimpleDtoa() would print the value as + // "1.2300000000000001e+22" to make sure strtod() produce the exact same + // result. Our goal is to test runtime parsing, not compile-time parsing, + // so this wasn't our problem. It was found that using 1.23e21 did not + // have this problem, so we switched to that instead. + + EXPECT_EQ( + "repeated_int64: -9223372036854775808\n" + "repeated_uint64: 18446744073709551615\n" + "repeated_double: 123.456\n" + "repeated_double: 1.23e+21\n" + "repeated_double: 1.23e-18\n" + "repeated_double: inf\n" + "repeated_double: -inf\n" + "repeated_double: nan\n" + "repeated_string: \"\\000\\001\\007\\010\\014\\n\\r\\t\\013\\\\\\'\\\"\"\n", + RemoveRedundantZeros(message.DebugString())); +} + +TEST_F(TextFormatTest, PrintFloatPrecision) { + unittest::TestAllTypes message; + + message.add_repeated_float(1.2); + message.add_repeated_float(1.23); + message.add_repeated_float(1.234); + message.add_repeated_float(1.2345); + message.add_repeated_float(1.23456); + message.add_repeated_float(1.2e10); + message.add_repeated_float(1.23e10); + message.add_repeated_float(1.234e10); + message.add_repeated_float(1.2345e10); + message.add_repeated_float(1.23456e10); + message.add_repeated_double(1.2); + message.add_repeated_double(1.23); + message.add_repeated_double(1.234); + message.add_repeated_double(1.2345); + message.add_repeated_double(1.23456); + message.add_repeated_double(1.234567); + message.add_repeated_double(1.2345678); + message.add_repeated_double(1.23456789); + message.add_repeated_double(1.234567898); + message.add_repeated_double(1.2345678987); + message.add_repeated_double(1.23456789876); + message.add_repeated_double(1.234567898765); + message.add_repeated_double(1.2345678987654); + message.add_repeated_double(1.23456789876543); + message.add_repeated_double(1.2e100); + message.add_repeated_double(1.23e100); + message.add_repeated_double(1.234e100); + message.add_repeated_double(1.2345e100); + message.add_repeated_double(1.23456e100); + message.add_repeated_double(1.234567e100); + message.add_repeated_double(1.2345678e100); + message.add_repeated_double(1.23456789e100); + message.add_repeated_double(1.234567898e100); + message.add_repeated_double(1.2345678987e100); + message.add_repeated_double(1.23456789876e100); + message.add_repeated_double(1.234567898765e100); + message.add_repeated_double(1.2345678987654e100); + message.add_repeated_double(1.23456789876543e100); + + EXPECT_EQ( + "repeated_float: 1.2\n" + "repeated_float: 1.23\n" + "repeated_float: 1.234\n" + "repeated_float: 1.2345\n" + "repeated_float: 1.23456\n" + "repeated_float: 1.2e+10\n" + "repeated_float: 1.23e+10\n" + "repeated_float: 1.234e+10\n" + "repeated_float: 1.2345e+10\n" + "repeated_float: 1.23456e+10\n" + "repeated_double: 1.2\n" + "repeated_double: 1.23\n" + "repeated_double: 1.234\n" + "repeated_double: 1.2345\n" + "repeated_double: 1.23456\n" + "repeated_double: 1.234567\n" + "repeated_double: 1.2345678\n" + "repeated_double: 1.23456789\n" + "repeated_double: 1.234567898\n" + "repeated_double: 1.2345678987\n" + "repeated_double: 1.23456789876\n" + "repeated_double: 1.234567898765\n" + "repeated_double: 1.2345678987654\n" + "repeated_double: 1.23456789876543\n" + "repeated_double: 1.2e+100\n" + "repeated_double: 1.23e+100\n" + "repeated_double: 1.234e+100\n" + "repeated_double: 1.2345e+100\n" + "repeated_double: 1.23456e+100\n" + "repeated_double: 1.234567e+100\n" + "repeated_double: 1.2345678e+100\n" + "repeated_double: 1.23456789e+100\n" + "repeated_double: 1.234567898e+100\n" + "repeated_double: 1.2345678987e+100\n" + "repeated_double: 1.23456789876e+100\n" + "repeated_double: 1.234567898765e+100\n" + "repeated_double: 1.2345678987654e+100\n" + "repeated_double: 1.23456789876543e+100\n", + RemoveRedundantZeros(message.DebugString())); +} + + +TEST_F(TextFormatTest, AllowPartial) { + unittest::TestRequired message; + TextFormat::Parser parser; + parser.AllowPartialMessage(true); + EXPECT_TRUE(parser.ParseFromString("a: 1", &message)); + EXPECT_EQ(1, message.a()); + EXPECT_FALSE(message.has_b()); + EXPECT_FALSE(message.has_c()); +} + +TEST_F(TextFormatTest, ParseExotic) { + unittest::TestAllTypes message; + ASSERT_TRUE(TextFormat::ParseFromString( + "repeated_int32: -1\n" + "repeated_int32: -2147483648\n" + "repeated_int64: -1\n" + "repeated_int64: -9223372036854775808\n" + "repeated_uint32: 4294967295\n" + "repeated_uint32: 2147483648\n" + "repeated_uint64: 18446744073709551615\n" + "repeated_uint64: 9223372036854775808\n" + "repeated_double: 123.0\n" + "repeated_double: 123.5\n" + "repeated_double: 0.125\n" + "repeated_double: 1.23E17\n" + "repeated_double: 1.235E+22\n" + "repeated_double: 1.235e-18\n" + "repeated_double: 123.456789\n" + "repeated_double: inf\n" + "repeated_double: Infinity\n" + "repeated_double: -inf\n" + "repeated_double: -Infinity\n" + "repeated_double: nan\n" + "repeated_double: NaN\n" + "repeated_string: \"\\000\\001\\a\\b\\f\\n\\r\\t\\v\\\\\\'\\\"\"\n", + &message)); + + ASSERT_EQ(2, message.repeated_int32_size()); + EXPECT_EQ(-1, message.repeated_int32(0)); + // Note: In C, a negative integer literal is actually the unary negation + // operator being applied to a positive integer literal, and 2147483648 is + // outside the range of int32. However, it is not outside the range of + // uint32. Confusingly, this means that everything works if we make the + // literal unsigned, even though we are negating it. + EXPECT_EQ(-2147483648u, message.repeated_int32(1)); + + ASSERT_EQ(2, message.repeated_int64_size()); + EXPECT_EQ(-1, message.repeated_int64(0)); + // Note: In C, a negative integer literal is actually the unary negation + // operator being applied to a positive integer literal, and + // 9223372036854775808 is outside the range of int64. However, it is not + // outside the range of uint64. Confusingly, this means that everything + // works if we make the literal unsigned, even though we are negating it. + EXPECT_EQ(-GOOGLE_ULONGLONG(9223372036854775808), message.repeated_int64(1)); + + ASSERT_EQ(2, message.repeated_uint32_size()); + EXPECT_EQ(4294967295u, message.repeated_uint32(0)); + EXPECT_EQ(2147483648u, message.repeated_uint32(1)); + + ASSERT_EQ(2, message.repeated_uint64_size()); + EXPECT_EQ(GOOGLE_ULONGLONG(18446744073709551615), message.repeated_uint64(0)); + EXPECT_EQ(GOOGLE_ULONGLONG(9223372036854775808), message.repeated_uint64(1)); + + ASSERT_EQ(13, message.repeated_double_size()); + EXPECT_EQ(123.0 , message.repeated_double(0)); + EXPECT_EQ(123.5 , message.repeated_double(1)); + EXPECT_EQ(0.125 , message.repeated_double(2)); + EXPECT_EQ(1.23E17 , message.repeated_double(3)); + EXPECT_EQ(1.235E22 , message.repeated_double(4)); + EXPECT_EQ(1.235E-18 , message.repeated_double(5)); + EXPECT_EQ(123.456789, message.repeated_double(6)); + EXPECT_EQ(message.repeated_double(7), numeric_limits::infinity()); + EXPECT_EQ(message.repeated_double(8), numeric_limits::infinity()); + EXPECT_EQ(message.repeated_double(9), -numeric_limits::infinity()); + EXPECT_EQ(message.repeated_double(10), -numeric_limits::infinity()); + EXPECT_TRUE(IsNaN(message.repeated_double(11))); + EXPECT_TRUE(IsNaN(message.repeated_double(12))); + + // Note: Since these string literals have \0's in them, we must explicitly + // pass their sizes to string's constructor. + ASSERT_EQ(1, message.repeated_string_size()); + EXPECT_EQ(string("\000\001\a\b\f\n\r\t\v\\\'\"", 12), + message.repeated_string(0)); +} + +class TextFormatParserTest : public testing::Test { + protected: + void ExpectFailure(const string& input, const string& message, int line, + int col) { + scoped_ptr proto(new unittest::TestAllTypes); + ExpectFailure(input, message, line, col, proto.get()); + } + + void ExpectFailure(const string& input, const string& message, int line, + int col, Message* proto) { + ExpectMessage(input, message, line, col, proto, false); + } + + void ExpectMessage(const string& input, const string& message, int line, + int col, Message* proto, bool expected_result) { + TextFormat::Parser parser; + MockErrorCollector error_collector; + parser.RecordErrorsTo(&error_collector); + EXPECT_EQ(parser.ParseFromString(input, proto), expected_result); + EXPECT_EQ(SimpleItoa(line) + ":" + SimpleItoa(col) + ": " + message + "\n", + error_collector.text_); + } + + // An error collector which simply concatenates all its errors into a big + // block of text which can be checked. + class MockErrorCollector : public io::ErrorCollector { + public: + MockErrorCollector() {} + ~MockErrorCollector() {} + + string text_; + + // implements ErrorCollector ------------------------------------- + void AddError(int line, int column, const string& message) { + strings::SubstituteAndAppend(&text_, "$0:$1: $2\n", + line + 1, column + 1, message); + } + + void AddWarning(int line, int column, const string& message) { + AddError(line, column, "WARNING:" + message); + } + }; +}; + +TEST_F(TextFormatParserTest, ParseFieldValueFromString) { + scoped_ptr message(new unittest::TestAllTypes); + const Descriptor* d = message->GetDescriptor(); + +#define EXPECT_FIELD(name, value, valuestring) \ + EXPECT_TRUE(TextFormat::ParseFieldValueFromString( \ + valuestring, d->FindFieldByName("optional_" #name), message.get())); \ + EXPECT_EQ(value, message->optional_##name()); \ + EXPECT_TRUE(message->has_optional_##name()); + +#define EXPECT_FLOAT_FIELD(name, value, valuestring) \ + EXPECT_TRUE(TextFormat::ParseFieldValueFromString( \ + valuestring, d->FindFieldByName("optional_" #name), message.get())); \ + EXPECT_FLOAT_EQ(value, message->optional_##name()); \ + EXPECT_TRUE(message->has_optional_##name()); + +#define EXPECT_DOUBLE_FIELD(name, value, valuestring) \ + EXPECT_TRUE(TextFormat::ParseFieldValueFromString( \ + valuestring, d->FindFieldByName("optional_" #name), message.get())); \ + EXPECT_DOUBLE_EQ(value, message->optional_##name()); \ + EXPECT_TRUE(message->has_optional_##name()); + +#define EXPECT_INVALID(name, valuestring) \ + EXPECT_FALSE(TextFormat::ParseFieldValueFromString( \ + valuestring, d->FindFieldByName("optional_" #name), message.get())); + + // int32 + EXPECT_FIELD(int32, 1, "1"); + EXPECT_FIELD(int32, -1, "-1"); + EXPECT_FIELD(int32, 0x1234, "0x1234"); + EXPECT_INVALID(int32, "a"); + EXPECT_INVALID(int32, "999999999999999999999999999999999999"); + EXPECT_INVALID(int32, "1,2"); + + // int64 + EXPECT_FIELD(int64, 1, "1"); + EXPECT_FIELD(int64, -1, "-1"); + EXPECT_FIELD(int64, 0x1234567812345678LL, "0x1234567812345678"); + EXPECT_INVALID(int64, "a"); + EXPECT_INVALID(int64, "999999999999999999999999999999999999"); + EXPECT_INVALID(int64, "1,2"); + + // uint64 + EXPECT_FIELD(uint64, 1, "1"); + EXPECT_FIELD(uint64, 0xf234567812345678ULL, "0xf234567812345678"); + EXPECT_INVALID(uint64, "-1"); + EXPECT_INVALID(uint64, "a"); + EXPECT_INVALID(uint64, "999999999999999999999999999999999999"); + EXPECT_INVALID(uint64, "1,2"); + + // fixed32 + EXPECT_FIELD(fixed32, 1, "1"); + EXPECT_FIELD(fixed32, 0x12345678, "0x12345678"); + EXPECT_INVALID(fixed32, "-1"); + EXPECT_INVALID(fixed32, "a"); + EXPECT_INVALID(fixed32, "999999999999999999999999999999999999"); + EXPECT_INVALID(fixed32, "1,2"); + + // fixed64 + EXPECT_FIELD(fixed64, 1, "1"); + EXPECT_FIELD(fixed64, 0x1234567812345678ULL, "0x1234567812345678"); + EXPECT_INVALID(fixed64, "-1"); + EXPECT_INVALID(fixed64, "a"); + EXPECT_INVALID(fixed64, "999999999999999999999999999999999999"); + EXPECT_INVALID(fixed64, "1,2"); + + // bool + EXPECT_FIELD(bool, true, "true"); + EXPECT_FIELD(bool, false, "false"); + EXPECT_FIELD(bool, true, "1"); + EXPECT_FIELD(bool, true, "t"); + EXPECT_FIELD(bool, false, "0"); + EXPECT_FIELD(bool, false, "f"); + EXPECT_INVALID(bool, "2"); + EXPECT_INVALID(bool, "-0"); + EXPECT_INVALID(bool, "on"); + EXPECT_INVALID(bool, "a"); + EXPECT_INVALID(bool, "True"); + + // float + EXPECT_FIELD(float, 1, "1"); + EXPECT_FLOAT_FIELD(float, 1.5, "1.5"); + EXPECT_FLOAT_FIELD(float, 1.5e3, "1.5e3"); + EXPECT_FLOAT_FIELD(float, -4.55, "-4.55"); + EXPECT_INVALID(float, "a"); + EXPECT_INVALID(float, "1,2"); + + // double + EXPECT_FIELD(double, 1, "1"); + EXPECT_FIELD(double, -1, "-1"); + EXPECT_DOUBLE_FIELD(double, 2.3, "2.3"); + EXPECT_DOUBLE_FIELD(double, 3e5, "3e5"); + EXPECT_INVALID(double, "a"); + EXPECT_INVALID(double, "1,2"); + + // string + EXPECT_FIELD(string, "hello", "\"hello\""); + EXPECT_FIELD(string, "-1.87", "'-1.87'"); + EXPECT_INVALID(string, "hello"); // without quote for value + + // enum + EXPECT_FIELD(nested_enum, unittest::TestAllTypes::BAR, "BAR"); + EXPECT_FIELD(nested_enum, unittest::TestAllTypes::BAZ, + SimpleItoa(unittest::TestAllTypes::BAZ)); + EXPECT_INVALID(nested_enum, "FOOBAR"); + + // message + EXPECT_TRUE(TextFormat::ParseFieldValueFromString( + "", d->FindFieldByName("optional_nested_message"), message.get())); + EXPECT_EQ(12, message->optional_nested_message().bb()); \ + EXPECT_TRUE(message->has_optional_nested_message()); + EXPECT_INVALID(nested_message, "any"); + +#undef EXPECT_FIELD +#undef EXPECT_FLOAT_FIELD +#undef EXPECT_DOUBLE_FIELD +#undef EXPECT_INVALID +} + + +TEST_F(TextFormatParserTest, InvalidToken) { + ExpectFailure("optional_bool: true\n-5\n", "Expected identifier.", + 2, 1); + + ExpectFailure("optional_bool: true!\n", "Expected identifier.", 1, 20); + ExpectFailure("\"some string\"", "Expected identifier.", 1, 1); +} + +TEST_F(TextFormatParserTest, InvalidFieldName) { + ExpectFailure( + "invalid_field: somevalue\n", + "Message type \"protobuf_unittest.TestAllTypes\" has no field named " + "\"invalid_field\".", + 1, 14); +} + +TEST_F(TextFormatParserTest, InvalidCapitalization) { + // We require that group names be exactly as they appear in the .proto. + ExpectFailure( + "optionalgroup {\na: 15\n}\n", + "Message type \"protobuf_unittest.TestAllTypes\" has no field named " + "\"optionalgroup\".", + 1, 15); + ExpectFailure( + "OPTIONALgroup {\na: 15\n}\n", + "Message type \"protobuf_unittest.TestAllTypes\" has no field named " + "\"OPTIONALgroup\".", + 1, 15); + ExpectFailure( + "Optional_Double: 10.0\n", + "Message type \"protobuf_unittest.TestAllTypes\" has no field named " + "\"Optional_Double\".", + 1, 16); +} + +TEST_F(TextFormatParserTest, InvalidFieldValues) { + // Invalid values for a double/float field. + ExpectFailure("optional_double: \"hello\"\n", "Expected double.", 1, 18); + ExpectFailure("optional_double: true\n", "Expected double.", 1, 18); + ExpectFailure("optional_double: !\n", "Expected double.", 1, 18); + ExpectFailure("optional_double {\n \n}\n", "Expected \":\", found \"{\".", + 1, 17); + + // Invalid values for a signed integer field. + ExpectFailure("optional_int32: \"hello\"\n", "Expected integer.", 1, 17); + ExpectFailure("optional_int32: true\n", "Expected integer.", 1, 17); + ExpectFailure("optional_int32: 4.5\n", "Expected integer.", 1, 17); + ExpectFailure("optional_int32: !\n", "Expected integer.", 1, 17); + ExpectFailure("optional_int32 {\n \n}\n", "Expected \":\", found \"{\".", + 1, 16); + ExpectFailure("optional_int32: 0x80000000\n", + "Integer out of range.", 1, 17); + ExpectFailure("optional_int32: -0x80000001\n", + "Integer out of range.", 1, 18); + ExpectFailure("optional_int64: 0x8000000000000000\n", + "Integer out of range.", 1, 17); + ExpectFailure("optional_int64: -0x8000000000000001\n", + "Integer out of range.", 1, 18); + + // Invalid values for an unsigned integer field. + ExpectFailure("optional_uint64: \"hello\"\n", "Expected integer.", 1, 18); + ExpectFailure("optional_uint64: true\n", "Expected integer.", 1, 18); + ExpectFailure("optional_uint64: 4.5\n", "Expected integer.", 1, 18); + ExpectFailure("optional_uint64: -5\n", "Expected integer.", 1, 18); + ExpectFailure("optional_uint64: !\n", "Expected integer.", 1, 18); + ExpectFailure("optional_uint64 {\n \n}\n", "Expected \":\", found \"{\".", + 1, 17); + ExpectFailure("optional_uint32: 0x100000000\n", + "Integer out of range.", 1, 18); + ExpectFailure("optional_uint64: 0x10000000000000000\n", + "Integer out of range.", 1, 18); + + // Invalid values for a boolean field. + ExpectFailure("optional_bool: \"hello\"\n", "Expected identifier.", 1, 16); + ExpectFailure("optional_bool: 5\n", "Integer out of range.", 1, 16); + ExpectFailure("optional_bool: -7.5\n", "Expected identifier.", 1, 16); + ExpectFailure("optional_bool: !\n", "Expected identifier.", 1, 16); + + ExpectFailure( + "optional_bool: meh\n", + "Invalid value for boolean field \"optional_bool\". Value: \"meh\".", + 2, 1); + + ExpectFailure("optional_bool {\n \n}\n", "Expected \":\", found \"{\".", + 1, 15); + + // Invalid values for a string field. + ExpectFailure("optional_string: true\n", "Expected string.", 1, 18); + ExpectFailure("optional_string: 5\n", "Expected string.", 1, 18); + ExpectFailure("optional_string: -7.5\n", "Expected string.", 1, 18); + ExpectFailure("optional_string: !\n", "Expected string.", 1, 18); + ExpectFailure("optional_string {\n \n}\n", "Expected \":\", found \"{\".", + 1, 17); + + // Invalid values for an enumeration field. + ExpectFailure("optional_nested_enum: \"hello\"\n", + "Expected integer or identifier.", 1, 23); + + // Valid token, but enum value is not defined. + ExpectFailure("optional_nested_enum: 5\n", + "Unknown enumeration value of \"5\" for field " + "\"optional_nested_enum\".", 2, 1); + // We consume the negative sign, so the error position starts one character + // later. + ExpectFailure("optional_nested_enum: -7.5\n", "Expected integer.", 1, 24); + ExpectFailure("optional_nested_enum: !\n", + "Expected integer or identifier.", 1, 23); + + ExpectFailure( + "optional_nested_enum: grah\n", + "Unknown enumeration value of \"grah\" for field " + "\"optional_nested_enum\".", 2, 1); + + ExpectFailure( + "optional_nested_enum {\n \n}\n", + "Expected \":\", found \"{\".", 1, 22); +} + +TEST_F(TextFormatParserTest, MessageDelimeters) { + // Non-matching delimeters. + ExpectFailure("OptionalGroup <\n \n}\n", "Expected \">\", found \"}\".", + 3, 1); + + // Invalid delimeters. + ExpectFailure("OptionalGroup [\n \n]\n", "Expected \"{\", found \"[\".", + 1, 15); + + // Unending message. + ExpectFailure("optional_nested_message {\n \nbb: 118\n", + "Expected identifier.", + 4, 1); +} + +TEST_F(TextFormatParserTest, UnknownExtension) { + // Non-matching delimeters. + ExpectFailure("[blahblah]: 123", + "Extension \"blahblah\" is not defined or is not an " + "extension of \"protobuf_unittest.TestAllTypes\".", + 1, 11); +} + +TEST_F(TextFormatParserTest, MissingRequired) { + unittest::TestRequired message; + ExpectFailure("a: 1", + "Message missing required fields: b, c", + 0, 1, &message); +} + +TEST_F(TextFormatParserTest, ParseDuplicateRequired) { + unittest::TestRequired message; + ExpectFailure("a: 1 b: 2 c: 3 a: 1", + "Non-repeated field \"a\" is specified multiple times.", + 1, 17, &message); +} + +TEST_F(TextFormatParserTest, ParseDuplicateOptional) { + unittest::ForeignMessage message; + ExpectFailure("c: 1 c: 2", + "Non-repeated field \"c\" is specified multiple times.", + 1, 7, &message); +} + +TEST_F(TextFormatParserTest, MergeDuplicateRequired) { + unittest::TestRequired message; + TextFormat::Parser parser; + EXPECT_TRUE(parser.MergeFromString("a: 1 b: 2 c: 3 a: 4", &message)); + EXPECT_EQ(4, message.a()); +} + +TEST_F(TextFormatParserTest, MergeDuplicateOptional) { + unittest::ForeignMessage message; + TextFormat::Parser parser; + EXPECT_TRUE(parser.MergeFromString("c: 1 c: 2", &message)); + EXPECT_EQ(2, message.c()); +} + +TEST_F(TextFormatParserTest, ExplicitDelimiters) { + unittest::TestRequired message; + EXPECT_TRUE(TextFormat::ParseFromString("a:1,b:2;c:3", &message)); + EXPECT_EQ(1, message.a()); + EXPECT_EQ(2, message.b()); + EXPECT_EQ(3, message.c()); +} + +TEST_F(TextFormatParserTest, PrintErrorsToStderr) { + vector errors; + + { + ScopedMemoryLog log; + unittest::TestAllTypes proto; + EXPECT_FALSE(TextFormat::ParseFromString("no_such_field: 1", &proto)); + errors = log.GetMessages(ERROR); + } + + ASSERT_EQ(1, errors.size()); + EXPECT_EQ("Error parsing text-format protobuf_unittest.TestAllTypes: " + "1:14: Message type \"protobuf_unittest.TestAllTypes\" has no field " + "named \"no_such_field\".", + errors[0]); +} + +TEST_F(TextFormatParserTest, FailsOnTokenizationError) { + vector errors; + + { + ScopedMemoryLog log; + unittest::TestAllTypes proto; + EXPECT_FALSE(TextFormat::ParseFromString("\020", &proto)); + errors = log.GetMessages(ERROR); + } + + ASSERT_EQ(1, errors.size()); + EXPECT_EQ("Error parsing text-format protobuf_unittest.TestAllTypes: " + "1:1: Invalid control characters encountered in text.", + errors[0]); +} + +TEST_F(TextFormatParserTest, ParseDeprecatedField) { + unittest::TestDeprecatedFields message; + ExpectMessage("deprecated_int32: 42", + "WARNING:text format contains deprecated field " + "\"deprecated_int32\"", 1, 21, &message, true); +} + +class TextFormatMessageSetTest : public testing::Test { + protected: + static const char proto_debug_string_[]; +}; +const char TextFormatMessageSetTest::proto_debug_string_[] = +"message_set {\n" +" [protobuf_unittest.TestMessageSetExtension1] {\n" +" i: 23\n" +" }\n" +" [protobuf_unittest.TestMessageSetExtension2] {\n" +" str: \"foo\"\n" +" }\n" +"}\n"; + + +TEST_F(TextFormatMessageSetTest, Serialize) { + protobuf_unittest::TestMessageSetContainer proto; + protobuf_unittest::TestMessageSetExtension1* item_a = + proto.mutable_message_set()->MutableExtension( + protobuf_unittest::TestMessageSetExtension1::message_set_extension); + item_a->set_i(23); + protobuf_unittest::TestMessageSetExtension2* item_b = + proto.mutable_message_set()->MutableExtension( + protobuf_unittest::TestMessageSetExtension2::message_set_extension); + item_b->set_str("foo"); + EXPECT_EQ(proto_debug_string_, proto.DebugString()); +} + +TEST_F(TextFormatMessageSetTest, Deserialize) { + protobuf_unittest::TestMessageSetContainer proto; + ASSERT_TRUE(TextFormat::ParseFromString(proto_debug_string_, &proto)); + EXPECT_EQ(23, proto.message_set().GetExtension( + protobuf_unittest::TestMessageSetExtension1::message_set_extension).i()); + EXPECT_EQ("foo", proto.message_set().GetExtension( + protobuf_unittest::TestMessageSetExtension2::message_set_extension).str()); + + // Ensure that these are the only entries present. + vector descriptors; + proto.message_set().GetReflection()->ListFields( + proto.message_set(), &descriptors); + EXPECT_EQ(2, descriptors.size()); +} + +} // namespace text_format_unittest +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unittest.proto b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unittest.proto new file mode 100644 index 0000000000..97ec67475e --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unittest.proto @@ -0,0 +1,630 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// A proto file we will use for unit testing. + + +// Some generic_services option(s) added automatically. +// See: http://go/proto2-generic-services-default +option cc_generic_services = true; // auto-added +option java_generic_services = true; // auto-added +option py_generic_services = true; // auto-added + +import "google/protobuf/unittest_import.proto"; + +// We don't put this in a package within proto2 because we need to make sure +// that the generated code doesn't depend on being in the proto2 namespace. +// In test_util.h we do "using namespace unittest = protobuf_unittest". +package protobuf_unittest; + +// Protos optimized for SPEED use a strict superset of the generated code +// of equivalent ones optimized for CODE_SIZE, so we should optimize all our +// tests for speed unless explicitly testing code size optimization. +option optimize_for = SPEED; + +option java_outer_classname = "UnittestProto"; + +// This proto includes every type of field in both singular and repeated +// forms. +message TestAllTypes { + message NestedMessage { + // The field name "b" fails to compile in proto1 because it conflicts with + // a local variable named "b" in one of the generated methods. Doh. + // This file needs to compile in proto1 to test backwards-compatibility. + optional int32 bb = 1; + } + + enum NestedEnum { + FOO = 1; + BAR = 2; + BAZ = 3; + } + + // Singular + optional int32 optional_int32 = 1; + optional int64 optional_int64 = 2; + optional uint32 optional_uint32 = 3; + optional uint64 optional_uint64 = 4; + optional sint32 optional_sint32 = 5; + optional sint64 optional_sint64 = 6; + optional fixed32 optional_fixed32 = 7; + optional fixed64 optional_fixed64 = 8; + optional sfixed32 optional_sfixed32 = 9; + optional sfixed64 optional_sfixed64 = 10; + optional float optional_float = 11; + optional double optional_double = 12; + optional bool optional_bool = 13; + optional string optional_string = 14; + optional bytes optional_bytes = 15; + + optional group OptionalGroup = 16 { + optional int32 a = 17; + } + + optional NestedMessage optional_nested_message = 18; + optional ForeignMessage optional_foreign_message = 19; + optional protobuf_unittest_import.ImportMessage optional_import_message = 20; + + optional NestedEnum optional_nested_enum = 21; + optional ForeignEnum optional_foreign_enum = 22; + optional protobuf_unittest_import.ImportEnum optional_import_enum = 23; + + optional string optional_string_piece = 24 [ctype=STRING_PIECE]; + optional string optional_cord = 25 [ctype=CORD]; + + // Repeated + repeated int32 repeated_int32 = 31; + repeated int64 repeated_int64 = 32; + repeated uint32 repeated_uint32 = 33; + repeated uint64 repeated_uint64 = 34; + repeated sint32 repeated_sint32 = 35; + repeated sint64 repeated_sint64 = 36; + repeated fixed32 repeated_fixed32 = 37; + repeated fixed64 repeated_fixed64 = 38; + repeated sfixed32 repeated_sfixed32 = 39; + repeated sfixed64 repeated_sfixed64 = 40; + repeated float repeated_float = 41; + repeated double repeated_double = 42; + repeated bool repeated_bool = 43; + repeated string repeated_string = 44; + repeated bytes repeated_bytes = 45; + + repeated group RepeatedGroup = 46 { + optional int32 a = 47; + } + + repeated NestedMessage repeated_nested_message = 48; + repeated ForeignMessage repeated_foreign_message = 49; + repeated protobuf_unittest_import.ImportMessage repeated_import_message = 50; + + repeated NestedEnum repeated_nested_enum = 51; + repeated ForeignEnum repeated_foreign_enum = 52; + repeated protobuf_unittest_import.ImportEnum repeated_import_enum = 53; + + repeated string repeated_string_piece = 54 [ctype=STRING_PIECE]; + repeated string repeated_cord = 55 [ctype=CORD]; + + // Singular with defaults + optional int32 default_int32 = 61 [default = 41 ]; + optional int64 default_int64 = 62 [default = 42 ]; + optional uint32 default_uint32 = 63 [default = 43 ]; + optional uint64 default_uint64 = 64 [default = 44 ]; + optional sint32 default_sint32 = 65 [default = -45 ]; + optional sint64 default_sint64 = 66 [default = 46 ]; + optional fixed32 default_fixed32 = 67 [default = 47 ]; + optional fixed64 default_fixed64 = 68 [default = 48 ]; + optional sfixed32 default_sfixed32 = 69 [default = 49 ]; + optional sfixed64 default_sfixed64 = 70 [default = -50 ]; + optional float default_float = 71 [default = 51.5 ]; + optional double default_double = 72 [default = 52e3 ]; + optional bool default_bool = 73 [default = true ]; + optional string default_string = 74 [default = "hello"]; + optional bytes default_bytes = 75 [default = "world"]; + + optional NestedEnum default_nested_enum = 81 [default = BAR ]; + optional ForeignEnum default_foreign_enum = 82 [default = FOREIGN_BAR]; + optional protobuf_unittest_import.ImportEnum + default_import_enum = 83 [default = IMPORT_BAR]; + + optional string default_string_piece = 84 [ctype=STRING_PIECE,default="abc"]; + optional string default_cord = 85 [ctype=CORD,default="123"]; +} + +message TestDeprecatedFields { + optional int32 deprecated_int32 = 1 [deprecated=true]; +} + +// Define these after TestAllTypes to make sure the compiler can handle +// that. +message ForeignMessage { + optional int32 c = 1; +} + +enum ForeignEnum { + FOREIGN_FOO = 4; + FOREIGN_BAR = 5; + FOREIGN_BAZ = 6; +} + +message TestAllExtensions { + extensions 1 to max; +} + +extend TestAllExtensions { + // Singular + optional int32 optional_int32_extension = 1; + optional int64 optional_int64_extension = 2; + optional uint32 optional_uint32_extension = 3; + optional uint64 optional_uint64_extension = 4; + optional sint32 optional_sint32_extension = 5; + optional sint64 optional_sint64_extension = 6; + optional fixed32 optional_fixed32_extension = 7; + optional fixed64 optional_fixed64_extension = 8; + optional sfixed32 optional_sfixed32_extension = 9; + optional sfixed64 optional_sfixed64_extension = 10; + optional float optional_float_extension = 11; + optional double optional_double_extension = 12; + optional bool optional_bool_extension = 13; + optional string optional_string_extension = 14; + optional bytes optional_bytes_extension = 15; + + optional group OptionalGroup_extension = 16 { + optional int32 a = 17; + } + + optional TestAllTypes.NestedMessage optional_nested_message_extension = 18; + optional ForeignMessage optional_foreign_message_extension = 19; + optional protobuf_unittest_import.ImportMessage + optional_import_message_extension = 20; + + optional TestAllTypes.NestedEnum optional_nested_enum_extension = 21; + optional ForeignEnum optional_foreign_enum_extension = 22; + optional protobuf_unittest_import.ImportEnum + optional_import_enum_extension = 23; + + optional string optional_string_piece_extension = 24 [ctype=STRING_PIECE]; + optional string optional_cord_extension = 25 [ctype=CORD]; + + // Repeated + repeated int32 repeated_int32_extension = 31; + repeated int64 repeated_int64_extension = 32; + repeated uint32 repeated_uint32_extension = 33; + repeated uint64 repeated_uint64_extension = 34; + repeated sint32 repeated_sint32_extension = 35; + repeated sint64 repeated_sint64_extension = 36; + repeated fixed32 repeated_fixed32_extension = 37; + repeated fixed64 repeated_fixed64_extension = 38; + repeated sfixed32 repeated_sfixed32_extension = 39; + repeated sfixed64 repeated_sfixed64_extension = 40; + repeated float repeated_float_extension = 41; + repeated double repeated_double_extension = 42; + repeated bool repeated_bool_extension = 43; + repeated string repeated_string_extension = 44; + repeated bytes repeated_bytes_extension = 45; + + repeated group RepeatedGroup_extension = 46 { + optional int32 a = 47; + } + + repeated TestAllTypes.NestedMessage repeated_nested_message_extension = 48; + repeated ForeignMessage repeated_foreign_message_extension = 49; + repeated protobuf_unittest_import.ImportMessage + repeated_import_message_extension = 50; + + repeated TestAllTypes.NestedEnum repeated_nested_enum_extension = 51; + repeated ForeignEnum repeated_foreign_enum_extension = 52; + repeated protobuf_unittest_import.ImportEnum + repeated_import_enum_extension = 53; + + repeated string repeated_string_piece_extension = 54 [ctype=STRING_PIECE]; + repeated string repeated_cord_extension = 55 [ctype=CORD]; + + // Singular with defaults + optional int32 default_int32_extension = 61 [default = 41 ]; + optional int64 default_int64_extension = 62 [default = 42 ]; + optional uint32 default_uint32_extension = 63 [default = 43 ]; + optional uint64 default_uint64_extension = 64 [default = 44 ]; + optional sint32 default_sint32_extension = 65 [default = -45 ]; + optional sint64 default_sint64_extension = 66 [default = 46 ]; + optional fixed32 default_fixed32_extension = 67 [default = 47 ]; + optional fixed64 default_fixed64_extension = 68 [default = 48 ]; + optional sfixed32 default_sfixed32_extension = 69 [default = 49 ]; + optional sfixed64 default_sfixed64_extension = 70 [default = -50 ]; + optional float default_float_extension = 71 [default = 51.5 ]; + optional double default_double_extension = 72 [default = 52e3 ]; + optional bool default_bool_extension = 73 [default = true ]; + optional string default_string_extension = 74 [default = "hello"]; + optional bytes default_bytes_extension = 75 [default = "world"]; + + optional TestAllTypes.NestedEnum + default_nested_enum_extension = 81 [default = BAR]; + optional ForeignEnum + default_foreign_enum_extension = 82 [default = FOREIGN_BAR]; + optional protobuf_unittest_import.ImportEnum + default_import_enum_extension = 83 [default = IMPORT_BAR]; + + optional string default_string_piece_extension = 84 [ctype=STRING_PIECE, + default="abc"]; + optional string default_cord_extension = 85 [ctype=CORD, default="123"]; +} + +message TestNestedExtension { + extend TestAllExtensions { + // Check for bug where string extensions declared in tested scope did not + // compile. + optional string test = 1002 [default="test"]; + } +} + +// We have separate messages for testing required fields because it's +// annoying to have to fill in required fields in TestProto in order to +// do anything with it. Note that we don't need to test every type of +// required filed because the code output is basically identical to +// optional fields for all types. +message TestRequired { + required int32 a = 1; + optional int32 dummy2 = 2; + required int32 b = 3; + + extend TestAllExtensions { + optional TestRequired single = 1000; + repeated TestRequired multi = 1001; + } + + // Pad the field count to 32 so that we can test that IsInitialized() + // properly checks multiple elements of has_bits_. + optional int32 dummy4 = 4; + optional int32 dummy5 = 5; + optional int32 dummy6 = 6; + optional int32 dummy7 = 7; + optional int32 dummy8 = 8; + optional int32 dummy9 = 9; + optional int32 dummy10 = 10; + optional int32 dummy11 = 11; + optional int32 dummy12 = 12; + optional int32 dummy13 = 13; + optional int32 dummy14 = 14; + optional int32 dummy15 = 15; + optional int32 dummy16 = 16; + optional int32 dummy17 = 17; + optional int32 dummy18 = 18; + optional int32 dummy19 = 19; + optional int32 dummy20 = 20; + optional int32 dummy21 = 21; + optional int32 dummy22 = 22; + optional int32 dummy23 = 23; + optional int32 dummy24 = 24; + optional int32 dummy25 = 25; + optional int32 dummy26 = 26; + optional int32 dummy27 = 27; + optional int32 dummy28 = 28; + optional int32 dummy29 = 29; + optional int32 dummy30 = 30; + optional int32 dummy31 = 31; + optional int32 dummy32 = 32; + + required int32 c = 33; +} + +message TestRequiredForeign { + optional TestRequired optional_message = 1; + repeated TestRequired repeated_message = 2; + optional int32 dummy = 3; +} + +// Test that we can use NestedMessage from outside TestAllTypes. +message TestForeignNested { + optional TestAllTypes.NestedMessage foreign_nested = 1; +} + +// TestEmptyMessage is used to test unknown field support. +message TestEmptyMessage { +} + +// Like above, but declare all field numbers as potential extensions. No +// actual extensions should ever be defined for this type. +message TestEmptyMessageWithExtensions { + extensions 1 to max; +} + +message TestMultipleExtensionRanges { + extensions 42; + extensions 4143 to 4243; + extensions 65536 to max; +} + +// Test that really large tag numbers don't break anything. +message TestReallyLargeTagNumber { + // The largest possible tag number is 2^28 - 1, since the wire format uses + // three bits to communicate wire type. + optional int32 a = 1; + optional int32 bb = 268435455; +} + +message TestRecursiveMessage { + optional TestRecursiveMessage a = 1; + optional int32 i = 2; +} + +// Test that mutual recursion works. +message TestMutualRecursionA { + optional TestMutualRecursionB bb = 1; +} + +message TestMutualRecursionB { + optional TestMutualRecursionA a = 1; + optional int32 optional_int32 = 2; +} + +// Test that groups have disjoint field numbers from their siblings and +// parents. This is NOT possible in proto1; only proto2. When attempting +// to compile with proto1, this will emit an error; so we only include it +// in protobuf_unittest_proto. +message TestDupFieldNumber { // NO_PROTO1 + optional int32 a = 1; // NO_PROTO1 + optional group Foo = 2 { optional int32 a = 1; } // NO_PROTO1 + optional group Bar = 3 { optional int32 a = 1; } // NO_PROTO1 +} // NO_PROTO1 + + +// Needed for a Python test. +message TestNestedMessageHasBits { + message NestedMessage { + repeated int32 nestedmessage_repeated_int32 = 1; + repeated ForeignMessage nestedmessage_repeated_foreignmessage = 2; + } + optional NestedMessage optional_nested_message = 1; +} + + +// Test an enum that has multiple values with the same number. +enum TestEnumWithDupValue { + FOO1 = 1; + BAR1 = 2; + BAZ = 3; + FOO2 = 1; + BAR2 = 2; +} + +// Test an enum with large, unordered values. +enum TestSparseEnum { + SPARSE_A = 123; + SPARSE_B = 62374; + SPARSE_C = 12589234; + SPARSE_D = -15; + SPARSE_E = -53452; + SPARSE_F = 0; + SPARSE_G = 2; +} + +// Test message with CamelCase field names. This violates Protocol Buffer +// standard style. +message TestCamelCaseFieldNames { + optional int32 PrimitiveField = 1; + optional string StringField = 2; + optional ForeignEnum EnumField = 3; + optional ForeignMessage MessageField = 4; + optional string StringPieceField = 5 [ctype=STRING_PIECE]; + optional string CordField = 6 [ctype=CORD]; + + repeated int32 RepeatedPrimitiveField = 7; + repeated string RepeatedStringField = 8; + repeated ForeignEnum RepeatedEnumField = 9; + repeated ForeignMessage RepeatedMessageField = 10; + repeated string RepeatedStringPieceField = 11 [ctype=STRING_PIECE]; + repeated string RepeatedCordField = 12 [ctype=CORD]; +} + + +// We list fields out of order, to ensure that we're using field number and not +// field index to determine serialization order. +message TestFieldOrderings { + optional string my_string = 11; + extensions 2 to 10; + optional int64 my_int = 1; + extensions 12 to 100; + optional float my_float = 101; +} + + +extend TestFieldOrderings { + optional string my_extension_string = 50; + optional int32 my_extension_int = 5; +} + + +message TestExtremeDefaultValues { + optional bytes escaped_bytes = 1 [default = "\0\001\a\b\f\n\r\t\v\\\'\"\xfe"]; + optional uint32 large_uint32 = 2 [default = 0xFFFFFFFF]; + optional uint64 large_uint64 = 3 [default = 0xFFFFFFFFFFFFFFFF]; + optional int32 small_int32 = 4 [default = -0x7FFFFFFF]; + optional int64 small_int64 = 5 [default = -0x7FFFFFFFFFFFFFFF]; + + // The default value here is UTF-8 for "\u1234". (We could also just type + // the UTF-8 text directly into this text file rather than escape it, but + // lots of people use editors that would be confused by this.) + optional string utf8_string = 6 [default = "\341\210\264"]; + + // Tests for single-precision floating-point values. + optional float zero_float = 7 [default = 0]; + optional float one_float = 8 [default = 1]; + optional float small_float = 9 [default = 1.5]; + optional float negative_one_float = 10 [default = -1]; + optional float negative_float = 11 [default = -1.5]; + // Using exponents + optional float large_float = 12 [default = 2E8]; + optional float small_negative_float = 13 [default = -8e-28]; + + // Text for nonfinite floating-point values. + optional double inf_double = 14 [default = inf]; + optional double neg_inf_double = 15 [default = -inf]; + optional double nan_double = 16 [default = nan]; + optional float inf_float = 17 [default = inf]; + optional float neg_inf_float = 18 [default = -inf]; + optional float nan_float = 19 [default = nan]; + + // Tests for C++ trigraphs. + // Trigraphs should be escaped in C++ generated files, but they should not be + // escaped for other languages. + // Note that in .proto file, "\?" is a valid way to escape ? in string + // literals. + optional string cpp_trigraph = 20 [default = "? \? ?? \?? \??? ??/ ?\?-"]; +} + +message SparseEnumMessage { + optional TestSparseEnum sparse_enum = 1; +} + +// Test String and Bytes: string is for valid UTF-8 strings +message OneString { + optional string data = 1; +} + +message OneBytes { + optional bytes data = 1; +} + +// Test messages for packed fields + +message TestPackedTypes { + repeated int32 packed_int32 = 90 [packed = true]; + repeated int64 packed_int64 = 91 [packed = true]; + repeated uint32 packed_uint32 = 92 [packed = true]; + repeated uint64 packed_uint64 = 93 [packed = true]; + repeated sint32 packed_sint32 = 94 [packed = true]; + repeated sint64 packed_sint64 = 95 [packed = true]; + repeated fixed32 packed_fixed32 = 96 [packed = true]; + repeated fixed64 packed_fixed64 = 97 [packed = true]; + repeated sfixed32 packed_sfixed32 = 98 [packed = true]; + repeated sfixed64 packed_sfixed64 = 99 [packed = true]; + repeated float packed_float = 100 [packed = true]; + repeated double packed_double = 101 [packed = true]; + repeated bool packed_bool = 102 [packed = true]; + repeated ForeignEnum packed_enum = 103 [packed = true]; +} + +// A message with the same fields as TestPackedTypes, but without packing. Used +// to test packed <-> unpacked wire compatibility. +message TestUnpackedTypes { + repeated int32 unpacked_int32 = 90 [packed = false]; + repeated int64 unpacked_int64 = 91 [packed = false]; + repeated uint32 unpacked_uint32 = 92 [packed = false]; + repeated uint64 unpacked_uint64 = 93 [packed = false]; + repeated sint32 unpacked_sint32 = 94 [packed = false]; + repeated sint64 unpacked_sint64 = 95 [packed = false]; + repeated fixed32 unpacked_fixed32 = 96 [packed = false]; + repeated fixed64 unpacked_fixed64 = 97 [packed = false]; + repeated sfixed32 unpacked_sfixed32 = 98 [packed = false]; + repeated sfixed64 unpacked_sfixed64 = 99 [packed = false]; + repeated float unpacked_float = 100 [packed = false]; + repeated double unpacked_double = 101 [packed = false]; + repeated bool unpacked_bool = 102 [packed = false]; + repeated ForeignEnum unpacked_enum = 103 [packed = false]; +} + +message TestPackedExtensions { + extensions 1 to max; +} + +extend TestPackedExtensions { + repeated int32 packed_int32_extension = 90 [packed = true]; + repeated int64 packed_int64_extension = 91 [packed = true]; + repeated uint32 packed_uint32_extension = 92 [packed = true]; + repeated uint64 packed_uint64_extension = 93 [packed = true]; + repeated sint32 packed_sint32_extension = 94 [packed = true]; + repeated sint64 packed_sint64_extension = 95 [packed = true]; + repeated fixed32 packed_fixed32_extension = 96 [packed = true]; + repeated fixed64 packed_fixed64_extension = 97 [packed = true]; + repeated sfixed32 packed_sfixed32_extension = 98 [packed = true]; + repeated sfixed64 packed_sfixed64_extension = 99 [packed = true]; + repeated float packed_float_extension = 100 [packed = true]; + repeated double packed_double_extension = 101 [packed = true]; + repeated bool packed_bool_extension = 102 [packed = true]; + repeated ForeignEnum packed_enum_extension = 103 [packed = true]; +} + +// Used by ExtensionSetTest/DynamicExtensions. The test actually builds +// a set of extensions to TestAllExtensions dynamically, based on the fields +// of this message type. +message TestDynamicExtensions { + enum DynamicEnumType { + DYNAMIC_FOO = 2200; + DYNAMIC_BAR = 2201; + DYNAMIC_BAZ = 2202; + } + message DynamicMessageType { + optional int32 dynamic_field = 2100; + } + + optional fixed32 scalar_extension = 2000; + optional ForeignEnum enum_extension = 2001; + optional DynamicEnumType dynamic_enum_extension = 2002; + + optional ForeignMessage message_extension = 2003; + optional DynamicMessageType dynamic_message_extension = 2004; + + repeated string repeated_extension = 2005; + repeated sint32 packed_extension = 2006 [packed = true]; +} + +message TestRepeatedScalarDifferentTagSizes { + // Parsing repeated fixed size values used to fail. This message needs to be + // used in order to get a tag of the right size; all of the repeated fields + // in TestAllTypes didn't trigger the check. + repeated fixed32 repeated_fixed32 = 12; + // Check for a varint type, just for good measure. + repeated int32 repeated_int32 = 13; + + // These have two-byte tags. + repeated fixed64 repeated_fixed64 = 2046; + repeated int64 repeated_int64 = 2047; + + // Three byte tags. + repeated float repeated_float = 262142; + repeated uint64 repeated_uint64 = 262143; +} + + +// Test that RPC services work. +message FooRequest {} +message FooResponse {} + +service TestService { + rpc Foo(FooRequest) returns (FooResponse); + rpc Bar(BarRequest) returns (BarResponse); +} + + +message BarRequest {} +message BarResponse {} diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unittest_custom_options.proto b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unittest_custom_options.proto new file mode 100644 index 0000000000..a610785f1b --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unittest_custom_options.proto @@ -0,0 +1,366 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: benjy@google.com (Benjy Weinberger) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// A proto file used to test the "custom options" feature of proto2. + + +// Some generic_services option(s) added automatically. +// See: http://go/proto2-generic-services-default +option cc_generic_services = true; // auto-added +option java_generic_services = true; // auto-added +option py_generic_services = true; + +// A custom file option (defined below). +option (file_opt1) = 9876543210; + +import "google/protobuf/descriptor.proto"; + +// We don't put this in a package within proto2 because we need to make sure +// that the generated code doesn't depend on being in the proto2 namespace. +package protobuf_unittest; + + +// Some simple test custom options of various types. + +extend google.protobuf.FileOptions { + optional uint64 file_opt1 = 7736974; +} + +extend google.protobuf.MessageOptions { + optional int32 message_opt1 = 7739036; +} + +extend google.protobuf.FieldOptions { + optional fixed64 field_opt1 = 7740936; + // This is useful for testing that we correctly register default values for + // extension options. + optional int32 field_opt2 = 7753913 [default=42]; +} + +extend google.protobuf.EnumOptions { + optional sfixed32 enum_opt1 = 7753576; +} + +extend google.protobuf.EnumValueOptions { + optional int32 enum_value_opt1 = 1560678; +} + +extend google.protobuf.ServiceOptions { + optional sint64 service_opt1 = 7887650; +} + +enum MethodOpt1 { + METHODOPT1_VAL1 = 1; + METHODOPT1_VAL2 = 2; +} + +extend google.protobuf.MethodOptions { + optional MethodOpt1 method_opt1 = 7890860; +} + +// A test message with custom options at all possible locations (and also some +// regular options, to make sure they interact nicely). +message TestMessageWithCustomOptions { + option message_set_wire_format = false; + + option (message_opt1) = -56; + + optional string field1 = 1 [ctype=CORD, + (field_opt1)=8765432109]; + + enum AnEnum { + option (enum_opt1) = -789; + + ANENUM_VAL1 = 1; + ANENUM_VAL2 = 2 [(enum_value_opt1) = 123]; + } +} + + +// A test RPC service with custom options at all possible locations (and also +// some regular options, to make sure they interact nicely). +message CustomOptionFooRequest { +} + +message CustomOptionFooResponse { +} + +service TestServiceWithCustomOptions { + option (service_opt1) = -9876543210; + + rpc Foo(CustomOptionFooRequest) returns (CustomOptionFooResponse) { + option (method_opt1) = METHODOPT1_VAL2; + } +} + + + +// Options of every possible field type, so we can test them all exhaustively. + +message DummyMessageContainingEnum { + enum TestEnumType { + TEST_OPTION_ENUM_TYPE1 = 22; + TEST_OPTION_ENUM_TYPE2 = -23; + } +} + +message DummyMessageInvalidAsOptionType { +} + +extend google.protobuf.MessageOptions { + optional bool bool_opt = 7706090; + optional int32 int32_opt = 7705709; + optional int64 int64_opt = 7705542; + optional uint32 uint32_opt = 7704880; + optional uint64 uint64_opt = 7702367; + optional sint32 sint32_opt = 7701568; + optional sint64 sint64_opt = 7700863; + optional fixed32 fixed32_opt = 7700307; + optional fixed64 fixed64_opt = 7700194; + optional sfixed32 sfixed32_opt = 7698645; + optional sfixed64 sfixed64_opt = 7685475; + optional float float_opt = 7675390; + optional double double_opt = 7673293; + optional string string_opt = 7673285; + optional bytes bytes_opt = 7673238; + optional DummyMessageContainingEnum.TestEnumType enum_opt = 7673233; + optional DummyMessageInvalidAsOptionType message_type_opt = 7665967; +} + +message CustomOptionMinIntegerValues { + option (bool_opt) = false; + option (int32_opt) = -0x80000000; + option (int64_opt) = -0x8000000000000000; + option (uint32_opt) = 0; + option (uint64_opt) = 0; + option (sint32_opt) = -0x80000000; + option (sint64_opt) = -0x8000000000000000; + option (fixed32_opt) = 0; + option (fixed64_opt) = 0; + option (sfixed32_opt) = -0x80000000; + option (sfixed64_opt) = -0x8000000000000000; +} + +message CustomOptionMaxIntegerValues { + option (bool_opt) = true; + option (int32_opt) = 0x7FFFFFFF; + option (int64_opt) = 0x7FFFFFFFFFFFFFFF; + option (uint32_opt) = 0xFFFFFFFF; + option (uint64_opt) = 0xFFFFFFFFFFFFFFFF; + option (sint32_opt) = 0x7FFFFFFF; + option (sint64_opt) = 0x7FFFFFFFFFFFFFFF; + option (fixed32_opt) = 0xFFFFFFFF; + option (fixed64_opt) = 0xFFFFFFFFFFFFFFFF; + option (sfixed32_opt) = 0x7FFFFFFF; + option (sfixed64_opt) = 0x7FFFFFFFFFFFFFFF; +} + +message CustomOptionOtherValues { + option (int32_opt) = -100; // To test sign-extension. + option (float_opt) = 12.3456789; + option (double_opt) = 1.234567890123456789; + option (string_opt) = "Hello, \"World\""; + option (bytes_opt) = "Hello\0World"; + option (enum_opt) = TEST_OPTION_ENUM_TYPE2; +} + +message SettingRealsFromPositiveInts { + option (float_opt) = 12; + option (double_opt) = 154; +} + +message SettingRealsFromNegativeInts { + option (float_opt) = -12; + option (double_opt) = -154; +} + +// Options of complex message types, themselves combined and extended in +// various ways. + +message ComplexOptionType1 { + optional int32 foo = 1; + optional int32 foo2 = 2; + optional int32 foo3 = 3; + + extensions 100 to max; +} + +message ComplexOptionType2 { + optional ComplexOptionType1 bar = 1; + optional int32 baz = 2; + + message ComplexOptionType4 { + optional int32 waldo = 1; + + extend google.protobuf.MessageOptions { + optional ComplexOptionType4 complex_opt4 = 7633546; + } + } + + optional ComplexOptionType4 fred = 3; + + extensions 100 to max; +} + +message ComplexOptionType3 { + optional int32 qux = 1; + + optional group ComplexOptionType5 = 2 { + optional int32 plugh = 3; + } +} + +extend ComplexOptionType1 { + optional int32 quux = 7663707; + optional ComplexOptionType3 corge = 7663442; +} + +extend ComplexOptionType2 { + optional int32 grault = 7650927; + optional ComplexOptionType1 garply = 7649992; +} + +extend google.protobuf.MessageOptions { + optional protobuf_unittest.ComplexOptionType1 complex_opt1 = 7646756; + optional ComplexOptionType2 complex_opt2 = 7636949; + optional ComplexOptionType3 complex_opt3 = 7636463; + optional group ComplexOpt6 = 7595468 { + optional int32 xyzzy = 7593951; + } +} + +// Note that we try various different ways of naming the same extension. +message VariousComplexOptions { + option (.protobuf_unittest.complex_opt1).foo = 42; + option (protobuf_unittest.complex_opt1).(.protobuf_unittest.quux) = 324; + option (.protobuf_unittest.complex_opt1).(protobuf_unittest.corge).qux = 876; + option (complex_opt2).baz = 987; + option (complex_opt2).(grault) = 654; + option (complex_opt2).bar.foo = 743; + option (complex_opt2).bar.(quux) = 1999; + option (complex_opt2).bar.(protobuf_unittest.corge).qux = 2008; + option (complex_opt2).(garply).foo = 741; + option (complex_opt2).(garply).(.protobuf_unittest.quux) = 1998; + option (complex_opt2).(protobuf_unittest.garply).(corge).qux = 2121; + option (ComplexOptionType2.ComplexOptionType4.complex_opt4).waldo = 1971; + option (complex_opt2).fred.waldo = 321; + option (protobuf_unittest.complex_opt3).qux = 9; + option (complex_opt3).complexoptiontype5.plugh = 22; + option (complexopt6).xyzzy = 24; +} + +// ------------------------------------------------------ +// Definitions for testing aggregate option parsing. +// See descriptor_unittest.cc. + +message AggregateMessageSet { + option message_set_wire_format = true; + extensions 4 to max; +} + +message AggregateMessageSetElement { + extend AggregateMessageSet { + optional AggregateMessageSetElement message_set_extension = 15447542; + } + optional string s = 1; +} + +// A helper type used to test aggregate option parsing +message Aggregate { + optional int32 i = 1; + optional string s = 2; + + // A nested object + optional Aggregate sub = 3; + + // To test the parsing of extensions inside aggregate values + optional google.protobuf.FileOptions file = 4; + extend google.protobuf.FileOptions { + optional Aggregate nested = 15476903; + } + + // An embedded message set + optional AggregateMessageSet mset = 5; +} + +// Allow Aggregate to be used as an option at all possible locations +// in the .proto grammer. +extend google.protobuf.FileOptions { optional Aggregate fileopt = 15478479; } +extend google.protobuf.MessageOptions { optional Aggregate msgopt = 15480088; } +extend google.protobuf.FieldOptions { optional Aggregate fieldopt = 15481374; } +extend google.protobuf.EnumOptions { optional Aggregate enumopt = 15483218; } +extend google.protobuf.EnumValueOptions { optional Aggregate enumvalopt = 15486921; } +extend google.protobuf.ServiceOptions { optional Aggregate serviceopt = 15497145; } +extend google.protobuf.MethodOptions { optional Aggregate methodopt = 15512713; } + +// Try using AggregateOption at different points in the proto grammar +option (fileopt) = { + s: 'FileAnnotation' + // Also test the handling of comments + /* of both types */ i: 100 + + sub { s: 'NestedFileAnnotation' } + + // Include a google.protobuf.FileOptions and recursively extend it with + // another fileopt. + file { + [protobuf_unittest.fileopt] { + s:'FileExtensionAnnotation' + } + } + + // A message set inside an option value + mset { + [protobuf_unittest.AggregateMessageSetElement.message_set_extension] { + s: 'EmbeddedMessageSetElement' + } + } +}; + +message AggregateMessage { + option (msgopt) = { i:101 s:'MessageAnnotation' }; + optional int32 fieldname = 1 [(fieldopt) = { s:'FieldAnnotation' }]; +} + +service AggregateService { + option (serviceopt) = { s:'ServiceAnnotation' }; + rpc Method (AggregateMessage) returns (AggregateMessage) { + option (methodopt) = { s:'MethodAnnotation' }; + } +} + +enum AggregateEnum { + option (enumopt) = { s:'EnumAnnotation' }; + VALUE = 1 [(enumvalopt) = { s:'EnumValueAnnotation' }]; +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unittest_embed_optimize_for.proto b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unittest_embed_optimize_for.proto new file mode 100644 index 0000000000..fa1762594e --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unittest_embed_optimize_for.proto @@ -0,0 +1,50 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// A proto file which imports a proto file that uses optimize_for = CODE_SIZE. + +import "google/protobuf/unittest_optimize_for.proto"; + +package protobuf_unittest; + +// We optimize for speed here, but we are importing a proto that is optimized +// for code size. +option optimize_for = SPEED; + +message TestEmbedOptimizedForSize { + // Test that embedding a message which has optimize_for = CODE_SIZE into + // one optimized for speed works. + optional TestOptimizedForSize optional_message = 1; + repeated TestOptimizedForSize repeated_message = 2; +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unittest_empty.proto b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unittest_empty.proto new file mode 100644 index 0000000000..ab12d1fb9f --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unittest_empty.proto @@ -0,0 +1,37 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// This file intentionally left blank. (At one point this wouldn't compile +// correctly.) + diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unittest_enormous_descriptor.proto b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unittest_enormous_descriptor.proto new file mode 100644 index 0000000000..bc0b7c16a0 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unittest_enormous_descriptor.proto @@ -0,0 +1,1046 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// A proto file that has an extremely large descriptor. Used to test that +// descriptors over 64k don't break the string literal length limit in Java. + + +package google.protobuf; +option java_package = "com.google.protobuf"; + +// Avoid generating insanely long methods. +option optimize_for = CODE_SIZE; + +message TestEnormousDescriptor { + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_1 = 1 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_2 = 2 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_3 = 3 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_4 = 4 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_5 = 5 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_6 = 6 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_7 = 7 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_8 = 8 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_9 = 9 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_10 = 10 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_11 = 11 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_12 = 12 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_13 = 13 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_14 = 14 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_15 = 15 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_16 = 16 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_17 = 17 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_18 = 18 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_19 = 19 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_20 = 20 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_21 = 21 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_22 = 22 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_23 = 23 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_24 = 24 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_25 = 25 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_26 = 26 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_27 = 27 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_28 = 28 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_29 = 29 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_30 = 30 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_31 = 31 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_32 = 32 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_33 = 33 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_34 = 34 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_35 = 35 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_36 = 36 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_37 = 37 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_38 = 38 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_39 = 39 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_40 = 40 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_41 = 41 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_42 = 42 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_43 = 43 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_44 = 44 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_45 = 45 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_46 = 46 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_47 = 47 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_48 = 48 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_49 = 49 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_50 = 50 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_51 = 51 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_52 = 52 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_53 = 53 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_54 = 54 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_55 = 55 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_56 = 56 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_57 = 57 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_58 = 58 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_59 = 59 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_60 = 60 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_61 = 61 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_62 = 62 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_63 = 63 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_64 = 64 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_65 = 65 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_66 = 66 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_67 = 67 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_68 = 68 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_69 = 69 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_70 = 70 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_71 = 71 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_72 = 72 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_73 = 73 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_74 = 74 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_75 = 75 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_76 = 76 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_77 = 77 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_78 = 78 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_79 = 79 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_80 = 80 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_81 = 81 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_82 = 82 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_83 = 83 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_84 = 84 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_85 = 85 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_86 = 86 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_87 = 87 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_88 = 88 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_89 = 89 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_90 = 90 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_91 = 91 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_92 = 92 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_93 = 93 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_94 = 94 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_95 = 95 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_96 = 96 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_97 = 97 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_98 = 98 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_99 = 99 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_100 = 100 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_101 = 101 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_102 = 102 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_103 = 103 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_104 = 104 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_105 = 105 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_106 = 106 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_107 = 107 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_108 = 108 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_109 = 109 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_110 = 110 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_111 = 111 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_112 = 112 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_113 = 113 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_114 = 114 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_115 = 115 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_116 = 116 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_117 = 117 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_118 = 118 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_119 = 119 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_120 = 120 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_121 = 121 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_122 = 122 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_123 = 123 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_124 = 124 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_125 = 125 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_126 = 126 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_127 = 127 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_128 = 128 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_129 = 129 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_130 = 130 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_131 = 131 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_132 = 132 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_133 = 133 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_134 = 134 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_135 = 135 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_136 = 136 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_137 = 137 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_138 = 138 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_139 = 139 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_140 = 140 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_141 = 141 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_142 = 142 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_143 = 143 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_144 = 144 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_145 = 145 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_146 = 146 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_147 = 147 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_148 = 148 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_149 = 149 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_150 = 150 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_151 = 151 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_152 = 152 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_153 = 153 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_154 = 154 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_155 = 155 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_156 = 156 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_157 = 157 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_158 = 158 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_159 = 159 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_160 = 160 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_161 = 161 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_162 = 162 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_163 = 163 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_164 = 164 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_165 = 165 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_166 = 166 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_167 = 167 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_168 = 168 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_169 = 169 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_170 = 170 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_171 = 171 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_172 = 172 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_173 = 173 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_174 = 174 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_175 = 175 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_176 = 176 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_177 = 177 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_178 = 178 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_179 = 179 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_180 = 180 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_181 = 181 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_182 = 182 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_183 = 183 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_184 = 184 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_185 = 185 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_186 = 186 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_187 = 187 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_188 = 188 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_189 = 189 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_190 = 190 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_191 = 191 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_192 = 192 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_193 = 193 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_194 = 194 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_195 = 195 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_196 = 196 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_197 = 197 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_198 = 198 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_199 = 199 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_200 = 200 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_201 = 201 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_202 = 202 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_203 = 203 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_204 = 204 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_205 = 205 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_206 = 206 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_207 = 207 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_208 = 208 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_209 = 209 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_210 = 210 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_211 = 211 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_212 = 212 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_213 = 213 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_214 = 214 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_215 = 215 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_216 = 216 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_217 = 217 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_218 = 218 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_219 = 219 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_220 = 220 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_221 = 221 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_222 = 222 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_223 = 223 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_224 = 224 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_225 = 225 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_226 = 226 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_227 = 227 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_228 = 228 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_229 = 229 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_230 = 230 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_231 = 231 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_232 = 232 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_233 = 233 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_234 = 234 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_235 = 235 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_236 = 236 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_237 = 237 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_238 = 238 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_239 = 239 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_240 = 240 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_241 = 241 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_242 = 242 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_243 = 243 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_244 = 244 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_245 = 245 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_246 = 246 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_247 = 247 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_248 = 248 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_249 = 249 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_250 = 250 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_251 = 251 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_252 = 252 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_253 = 253 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_254 = 254 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_255 = 255 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_256 = 256 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_257 = 257 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_258 = 258 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_259 = 259 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_260 = 260 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_261 = 261 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_262 = 262 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_263 = 263 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_264 = 264 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_265 = 265 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_266 = 266 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_267 = 267 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_268 = 268 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_269 = 269 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_270 = 270 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_271 = 271 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_272 = 272 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_273 = 273 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_274 = 274 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_275 = 275 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_276 = 276 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_277 = 277 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_278 = 278 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_279 = 279 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_280 = 280 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_281 = 281 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_282 = 282 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_283 = 283 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_284 = 284 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_285 = 285 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_286 = 286 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_287 = 287 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_288 = 288 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_289 = 289 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_290 = 290 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_291 = 291 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_292 = 292 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_293 = 293 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_294 = 294 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_295 = 295 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_296 = 296 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_297 = 297 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_298 = 298 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_299 = 299 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_300 = 300 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_301 = 301 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_302 = 302 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_303 = 303 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_304 = 304 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_305 = 305 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_306 = 306 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_307 = 307 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_308 = 308 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_309 = 309 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_310 = 310 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_311 = 311 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_312 = 312 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_313 = 313 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_314 = 314 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_315 = 315 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_316 = 316 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_317 = 317 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_318 = 318 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_319 = 319 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_320 = 320 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_321 = 321 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_322 = 322 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_323 = 323 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_324 = 324 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_325 = 325 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_326 = 326 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_327 = 327 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_328 = 328 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_329 = 329 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_330 = 330 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_331 = 331 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_332 = 332 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_333 = 333 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_334 = 334 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_335 = 335 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_336 = 336 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_337 = 337 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_338 = 338 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_339 = 339 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_340 = 340 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_341 = 341 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_342 = 342 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_343 = 343 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_344 = 344 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_345 = 345 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_346 = 346 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_347 = 347 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_348 = 348 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_349 = 349 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_350 = 350 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_351 = 351 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_352 = 352 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_353 = 353 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_354 = 354 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_355 = 355 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_356 = 356 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_357 = 357 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_358 = 358 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_359 = 359 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_360 = 360 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_361 = 361 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_362 = 362 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_363 = 363 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_364 = 364 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_365 = 365 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_366 = 366 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_367 = 367 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_368 = 368 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_369 = 369 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_370 = 370 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_371 = 371 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_372 = 372 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_373 = 373 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_374 = 374 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_375 = 375 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_376 = 376 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_377 = 377 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_378 = 378 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_379 = 379 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_380 = 380 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_381 = 381 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_382 = 382 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_383 = 383 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_384 = 384 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_385 = 385 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_386 = 386 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_387 = 387 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_388 = 388 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_389 = 389 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_390 = 390 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_391 = 391 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_392 = 392 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_393 = 393 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_394 = 394 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_395 = 395 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_396 = 396 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_397 = 397 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_398 = 398 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_399 = 399 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_400 = 400 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_401 = 401 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_402 = 402 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_403 = 403 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_404 = 404 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_405 = 405 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_406 = 406 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_407 = 407 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_408 = 408 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_409 = 409 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_410 = 410 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_411 = 411 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_412 = 412 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_413 = 413 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_414 = 414 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_415 = 415 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_416 = 416 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_417 = 417 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_418 = 418 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_419 = 419 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_420 = 420 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_421 = 421 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_422 = 422 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_423 = 423 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_424 = 424 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_425 = 425 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_426 = 426 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_427 = 427 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_428 = 428 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_429 = 429 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_430 = 430 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_431 = 431 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_432 = 432 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_433 = 433 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_434 = 434 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_435 = 435 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_436 = 436 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_437 = 437 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_438 = 438 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_439 = 439 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_440 = 440 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_441 = 441 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_442 = 442 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_443 = 443 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_444 = 444 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_445 = 445 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_446 = 446 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_447 = 447 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_448 = 448 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_449 = 449 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_450 = 450 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_451 = 451 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_452 = 452 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_453 = 453 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_454 = 454 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_455 = 455 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_456 = 456 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_457 = 457 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_458 = 458 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_459 = 459 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_460 = 460 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_461 = 461 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_462 = 462 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_463 = 463 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_464 = 464 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_465 = 465 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_466 = 466 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_467 = 467 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_468 = 468 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_469 = 469 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_470 = 470 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_471 = 471 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_472 = 472 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_473 = 473 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_474 = 474 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_475 = 475 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_476 = 476 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_477 = 477 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_478 = 478 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_479 = 479 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_480 = 480 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_481 = 481 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_482 = 482 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_483 = 483 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_484 = 484 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_485 = 485 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_486 = 486 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_487 = 487 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_488 = 488 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_489 = 489 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_490 = 490 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_491 = 491 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_492 = 492 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_493 = 493 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_494 = 494 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_495 = 495 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_496 = 496 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_497 = 497 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_498 = 498 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_499 = 499 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_500 = 500 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_501 = 501 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_502 = 502 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_503 = 503 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_504 = 504 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_505 = 505 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_506 = 506 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_507 = 507 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_508 = 508 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_509 = 509 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_510 = 510 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_511 = 511 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_512 = 512 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_513 = 513 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_514 = 514 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_515 = 515 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_516 = 516 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_517 = 517 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_518 = 518 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_519 = 519 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_520 = 520 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_521 = 521 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_522 = 522 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_523 = 523 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_524 = 524 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_525 = 525 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_526 = 526 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_527 = 527 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_528 = 528 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_529 = 529 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_530 = 530 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_531 = 531 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_532 = 532 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_533 = 533 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_534 = 534 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_535 = 535 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_536 = 536 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_537 = 537 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_538 = 538 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_539 = 539 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_540 = 540 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_541 = 541 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_542 = 542 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_543 = 543 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_544 = 544 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_545 = 545 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_546 = 546 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_547 = 547 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_548 = 548 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_549 = 549 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_550 = 550 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_551 = 551 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_552 = 552 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_553 = 553 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_554 = 554 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_555 = 555 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_556 = 556 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_557 = 557 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_558 = 558 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_559 = 559 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_560 = 560 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_561 = 561 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_562 = 562 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_563 = 563 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_564 = 564 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_565 = 565 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_566 = 566 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_567 = 567 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_568 = 568 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_569 = 569 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_570 = 570 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_571 = 571 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_572 = 572 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_573 = 573 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_574 = 574 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_575 = 575 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_576 = 576 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_577 = 577 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_578 = 578 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_579 = 579 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_580 = 580 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_581 = 581 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_582 = 582 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_583 = 583 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_584 = 584 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_585 = 585 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_586 = 586 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_587 = 587 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_588 = 588 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_589 = 589 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_590 = 590 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_591 = 591 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_592 = 592 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_593 = 593 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_594 = 594 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_595 = 595 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_596 = 596 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_597 = 597 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_598 = 598 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_599 = 599 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_600 = 600 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_601 = 601 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_602 = 602 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_603 = 603 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_604 = 604 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_605 = 605 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_606 = 606 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_607 = 607 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_608 = 608 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_609 = 609 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_610 = 610 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_611 = 611 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_612 = 612 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_613 = 613 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_614 = 614 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_615 = 615 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_616 = 616 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_617 = 617 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_618 = 618 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_619 = 619 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_620 = 620 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_621 = 621 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_622 = 622 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_623 = 623 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_624 = 624 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_625 = 625 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_626 = 626 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_627 = 627 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_628 = 628 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_629 = 629 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_630 = 630 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_631 = 631 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_632 = 632 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_633 = 633 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_634 = 634 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_635 = 635 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_636 = 636 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_637 = 637 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_638 = 638 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_639 = 639 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_640 = 640 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_641 = 641 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_642 = 642 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_643 = 643 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_644 = 644 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_645 = 645 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_646 = 646 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_647 = 647 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_648 = 648 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_649 = 649 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_650 = 650 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_651 = 651 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_652 = 652 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_653 = 653 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_654 = 654 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_655 = 655 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_656 = 656 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_657 = 657 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_658 = 658 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_659 = 659 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_660 = 660 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_661 = 661 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_662 = 662 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_663 = 663 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_664 = 664 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_665 = 665 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_666 = 666 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_667 = 667 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_668 = 668 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_669 = 669 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_670 = 670 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_671 = 671 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_672 = 672 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_673 = 673 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_674 = 674 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_675 = 675 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_676 = 676 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_677 = 677 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_678 = 678 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_679 = 679 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_680 = 680 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_681 = 681 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_682 = 682 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_683 = 683 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_684 = 684 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_685 = 685 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_686 = 686 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_687 = 687 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_688 = 688 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_689 = 689 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_690 = 690 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_691 = 691 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_692 = 692 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_693 = 693 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_694 = 694 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_695 = 695 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_696 = 696 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_697 = 697 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_698 = 698 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_699 = 699 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_700 = 700 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_701 = 701 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_702 = 702 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_703 = 703 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_704 = 704 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_705 = 705 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_706 = 706 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_707 = 707 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_708 = 708 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_709 = 709 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_710 = 710 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_711 = 711 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_712 = 712 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_713 = 713 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_714 = 714 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_715 = 715 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_716 = 716 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_717 = 717 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_718 = 718 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_719 = 719 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_720 = 720 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_721 = 721 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_722 = 722 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_723 = 723 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_724 = 724 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_725 = 725 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_726 = 726 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_727 = 727 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_728 = 728 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_729 = 729 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_730 = 730 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_731 = 731 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_732 = 732 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_733 = 733 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_734 = 734 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_735 = 735 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_736 = 736 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_737 = 737 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_738 = 738 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_739 = 739 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_740 = 740 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_741 = 741 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_742 = 742 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_743 = 743 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_744 = 744 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_745 = 745 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_746 = 746 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_747 = 747 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_748 = 748 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_749 = 749 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_750 = 750 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_751 = 751 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_752 = 752 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_753 = 753 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_754 = 754 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_755 = 755 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_756 = 756 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_757 = 757 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_758 = 758 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_759 = 759 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_760 = 760 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_761 = 761 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_762 = 762 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_763 = 763 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_764 = 764 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_765 = 765 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_766 = 766 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_767 = 767 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_768 = 768 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_769 = 769 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_770 = 770 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_771 = 771 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_772 = 772 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_773 = 773 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_774 = 774 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_775 = 775 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_776 = 776 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_777 = 777 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_778 = 778 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_779 = 779 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_780 = 780 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_781 = 781 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_782 = 782 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_783 = 783 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_784 = 784 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_785 = 785 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_786 = 786 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_787 = 787 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_788 = 788 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_789 = 789 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_790 = 790 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_791 = 791 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_792 = 792 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_793 = 793 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_794 = 794 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_795 = 795 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_796 = 796 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_797 = 797 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_798 = 798 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_799 = 799 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_800 = 800 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_801 = 801 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_802 = 802 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_803 = 803 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_804 = 804 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_805 = 805 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_806 = 806 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_807 = 807 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_808 = 808 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_809 = 809 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_810 = 810 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_811 = 811 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_812 = 812 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_813 = 813 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_814 = 814 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_815 = 815 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_816 = 816 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_817 = 817 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_818 = 818 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_819 = 819 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_820 = 820 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_821 = 821 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_822 = 822 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_823 = 823 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_824 = 824 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_825 = 825 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_826 = 826 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_827 = 827 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_828 = 828 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_829 = 829 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_830 = 830 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_831 = 831 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_832 = 832 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_833 = 833 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_834 = 834 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_835 = 835 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_836 = 836 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_837 = 837 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_838 = 838 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_839 = 839 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_840 = 840 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_841 = 841 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_842 = 842 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_843 = 843 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_844 = 844 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_845 = 845 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_846 = 846 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_847 = 847 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_848 = 848 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_849 = 849 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_850 = 850 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_851 = 851 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_852 = 852 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_853 = 853 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_854 = 854 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_855 = 855 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_856 = 856 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_857 = 857 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_858 = 858 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_859 = 859 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_860 = 860 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_861 = 861 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_862 = 862 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_863 = 863 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_864 = 864 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_865 = 865 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_866 = 866 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_867 = 867 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_868 = 868 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_869 = 869 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_870 = 870 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_871 = 871 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_872 = 872 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_873 = 873 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_874 = 874 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_875 = 875 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_876 = 876 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_877 = 877 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_878 = 878 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_879 = 879 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_880 = 880 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_881 = 881 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_882 = 882 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_883 = 883 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_884 = 884 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_885 = 885 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_886 = 886 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_887 = 887 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_888 = 888 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_889 = 889 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_890 = 890 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_891 = 891 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_892 = 892 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_893 = 893 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_894 = 894 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_895 = 895 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_896 = 896 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_897 = 897 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_898 = 898 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_899 = 899 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_900 = 900 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_901 = 901 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_902 = 902 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_903 = 903 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_904 = 904 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_905 = 905 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_906 = 906 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_907 = 907 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_908 = 908 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_909 = 909 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_910 = 910 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_911 = 911 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_912 = 912 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_913 = 913 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_914 = 914 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_915 = 915 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_916 = 916 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_917 = 917 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_918 = 918 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_919 = 919 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_920 = 920 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_921 = 921 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_922 = 922 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_923 = 923 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_924 = 924 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_925 = 925 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_926 = 926 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_927 = 927 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_928 = 928 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_929 = 929 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_930 = 930 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_931 = 931 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_932 = 932 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_933 = 933 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_934 = 934 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_935 = 935 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_936 = 936 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_937 = 937 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_938 = 938 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_939 = 939 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_940 = 940 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_941 = 941 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_942 = 942 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_943 = 943 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_944 = 944 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_945 = 945 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_946 = 946 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_947 = 947 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_948 = 948 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_949 = 949 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_950 = 950 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_951 = 951 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_952 = 952 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_953 = 953 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_954 = 954 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_955 = 955 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_956 = 956 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_957 = 957 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_958 = 958 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_959 = 959 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_960 = 960 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_961 = 961 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_962 = 962 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_963 = 963 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_964 = 964 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_965 = 965 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_966 = 966 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_967 = 967 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_968 = 968 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_969 = 969 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_970 = 970 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_971 = 971 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_972 = 972 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_973 = 973 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_974 = 974 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_975 = 975 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_976 = 976 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_977 = 977 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_978 = 978 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_979 = 979 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_980 = 980 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_981 = 981 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_982 = 982 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_983 = 983 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_984 = 984 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_985 = 985 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_986 = 986 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_987 = 987 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_988 = 988 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_989 = 989 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_990 = 990 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_991 = 991 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_992 = 992 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_993 = 993 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_994 = 994 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_995 = 995 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_996 = 996 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_997 = 997 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_998 = 998 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_999 = 999 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; + optional string long_field_name_is_looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong_1000 = 1000 [default="long default value is also loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong"]; +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unittest_import.proto b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unittest_import.proto new file mode 100644 index 0000000000..cd533ecd33 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unittest_import.proto @@ -0,0 +1,61 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// A proto file which is imported by unittest.proto to test importing. + + +// We don't put this in a package within proto2 because we need to make sure +// that the generated code doesn't depend on being in the proto2 namespace. +// In test_util.h we do +// "using namespace unittest_import = protobuf_unittest_import". +package protobuf_unittest_import; + +option optimize_for = SPEED; + +// Excercise the java_package option. +option java_package = "com.google.protobuf.test"; + +// Do not set a java_outer_classname here to verify that Proto2 works without +// one. + +message ImportMessage { + optional int32 d = 1; +} + +enum ImportEnum { + IMPORT_FOO = 7; + IMPORT_BAR = 8; + IMPORT_BAZ = 9; +} + diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unittest_import_lite.proto b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unittest_import_lite.proto new file mode 100644 index 0000000000..ebaab5c05e --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unittest_import_lite.proto @@ -0,0 +1,49 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// +// This is like unittest_import.proto but with optimize_for = LITE_RUNTIME. + +package protobuf_unittest_import; + +option optimize_for = LITE_RUNTIME; + +option java_package = "com.google.protobuf"; + +message ImportMessageLite { + optional int32 d = 1; +} + +enum ImportEnumLite { + IMPORT_LITE_FOO = 7; + IMPORT_LITE_BAR = 8; + IMPORT_LITE_BAZ = 9; +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unittest_lite.proto b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unittest_lite.proto new file mode 100644 index 0000000000..cca6b497a6 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unittest_lite.proto @@ -0,0 +1,312 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// +// This is like unittest.proto but with optimize_for = LITE_RUNTIME. + +package protobuf_unittest; + +import "google/protobuf/unittest_import_lite.proto"; + +option optimize_for = LITE_RUNTIME; + +option java_package = "com.google.protobuf"; + +// Same as TestAllTypes but with the lite runtime. +message TestAllTypesLite { + message NestedMessage { + optional int32 bb = 1; + } + + enum NestedEnum { + FOO = 1; + BAR = 2; + BAZ = 3; + } + + // Singular + optional int32 optional_int32 = 1; + optional int64 optional_int64 = 2; + optional uint32 optional_uint32 = 3; + optional uint64 optional_uint64 = 4; + optional sint32 optional_sint32 = 5; + optional sint64 optional_sint64 = 6; + optional fixed32 optional_fixed32 = 7; + optional fixed64 optional_fixed64 = 8; + optional sfixed32 optional_sfixed32 = 9; + optional sfixed64 optional_sfixed64 = 10; + optional float optional_float = 11; + optional double optional_double = 12; + optional bool optional_bool = 13; + optional string optional_string = 14; + optional bytes optional_bytes = 15; + + optional group OptionalGroup = 16 { + optional int32 a = 17; + } + + optional NestedMessage optional_nested_message = 18; + optional ForeignMessageLite optional_foreign_message = 19; + optional protobuf_unittest_import.ImportMessageLite + optional_import_message = 20; + + optional NestedEnum optional_nested_enum = 21; + optional ForeignEnumLite optional_foreign_enum = 22; + optional protobuf_unittest_import.ImportEnumLite optional_import_enum = 23; + + optional string optional_string_piece = 24 [ctype=STRING_PIECE]; + optional string optional_cord = 25 [ctype=CORD]; + + // Repeated + repeated int32 repeated_int32 = 31; + repeated int64 repeated_int64 = 32; + repeated uint32 repeated_uint32 = 33; + repeated uint64 repeated_uint64 = 34; + repeated sint32 repeated_sint32 = 35; + repeated sint64 repeated_sint64 = 36; + repeated fixed32 repeated_fixed32 = 37; + repeated fixed64 repeated_fixed64 = 38; + repeated sfixed32 repeated_sfixed32 = 39; + repeated sfixed64 repeated_sfixed64 = 40; + repeated float repeated_float = 41; + repeated double repeated_double = 42; + repeated bool repeated_bool = 43; + repeated string repeated_string = 44; + repeated bytes repeated_bytes = 45; + + repeated group RepeatedGroup = 46 { + optional int32 a = 47; + } + + repeated NestedMessage repeated_nested_message = 48; + repeated ForeignMessageLite repeated_foreign_message = 49; + repeated protobuf_unittest_import.ImportMessageLite + repeated_import_message = 50; + + repeated NestedEnum repeated_nested_enum = 51; + repeated ForeignEnumLite repeated_foreign_enum = 52; + repeated protobuf_unittest_import.ImportEnumLite repeated_import_enum = 53; + + repeated string repeated_string_piece = 54 [ctype=STRING_PIECE]; + repeated string repeated_cord = 55 [ctype=CORD]; + + // Singular with defaults + optional int32 default_int32 = 61 [default = 41 ]; + optional int64 default_int64 = 62 [default = 42 ]; + optional uint32 default_uint32 = 63 [default = 43 ]; + optional uint64 default_uint64 = 64 [default = 44 ]; + optional sint32 default_sint32 = 65 [default = -45 ]; + optional sint64 default_sint64 = 66 [default = 46 ]; + optional fixed32 default_fixed32 = 67 [default = 47 ]; + optional fixed64 default_fixed64 = 68 [default = 48 ]; + optional sfixed32 default_sfixed32 = 69 [default = 49 ]; + optional sfixed64 default_sfixed64 = 70 [default = -50 ]; + optional float default_float = 71 [default = 51.5 ]; + optional double default_double = 72 [default = 52e3 ]; + optional bool default_bool = 73 [default = true ]; + optional string default_string = 74 [default = "hello"]; + optional bytes default_bytes = 75 [default = "world"]; + + optional NestedEnum default_nested_enum = 81 [default = BAR]; + optional ForeignEnumLite default_foreign_enum = 82 + [default = FOREIGN_LITE_BAR]; + optional protobuf_unittest_import.ImportEnumLite + default_import_enum = 83 [default = IMPORT_LITE_BAR]; + + optional string default_string_piece = 84 [ctype=STRING_PIECE,default="abc"]; + optional string default_cord = 85 [ctype=CORD,default="123"]; +} + +message ForeignMessageLite { + optional int32 c = 1; +} + +enum ForeignEnumLite { + FOREIGN_LITE_FOO = 4; + FOREIGN_LITE_BAR = 5; + FOREIGN_LITE_BAZ = 6; +} + +message TestPackedTypesLite { + repeated int32 packed_int32 = 90 [packed = true]; + repeated int64 packed_int64 = 91 [packed = true]; + repeated uint32 packed_uint32 = 92 [packed = true]; + repeated uint64 packed_uint64 = 93 [packed = true]; + repeated sint32 packed_sint32 = 94 [packed = true]; + repeated sint64 packed_sint64 = 95 [packed = true]; + repeated fixed32 packed_fixed32 = 96 [packed = true]; + repeated fixed64 packed_fixed64 = 97 [packed = true]; + repeated sfixed32 packed_sfixed32 = 98 [packed = true]; + repeated sfixed64 packed_sfixed64 = 99 [packed = true]; + repeated float packed_float = 100 [packed = true]; + repeated double packed_double = 101 [packed = true]; + repeated bool packed_bool = 102 [packed = true]; + repeated ForeignEnumLite packed_enum = 103 [packed = true]; +} + +message TestAllExtensionsLite { + extensions 1 to max; +} + +extend TestAllExtensionsLite { + // Singular + optional int32 optional_int32_extension_lite = 1; + optional int64 optional_int64_extension_lite = 2; + optional uint32 optional_uint32_extension_lite = 3; + optional uint64 optional_uint64_extension_lite = 4; + optional sint32 optional_sint32_extension_lite = 5; + optional sint64 optional_sint64_extension_lite = 6; + optional fixed32 optional_fixed32_extension_lite = 7; + optional fixed64 optional_fixed64_extension_lite = 8; + optional sfixed32 optional_sfixed32_extension_lite = 9; + optional sfixed64 optional_sfixed64_extension_lite = 10; + optional float optional_float_extension_lite = 11; + optional double optional_double_extension_lite = 12; + optional bool optional_bool_extension_lite = 13; + optional string optional_string_extension_lite = 14; + optional bytes optional_bytes_extension_lite = 15; + + optional group OptionalGroup_extension_lite = 16 { + optional int32 a = 17; + } + + optional TestAllTypesLite.NestedMessage optional_nested_message_extension_lite + = 18; + optional ForeignMessageLite optional_foreign_message_extension_lite = 19; + optional protobuf_unittest_import.ImportMessageLite + optional_import_message_extension_lite = 20; + + optional TestAllTypesLite.NestedEnum optional_nested_enum_extension_lite = 21; + optional ForeignEnumLite optional_foreign_enum_extension_lite = 22; + optional protobuf_unittest_import.ImportEnumLite + optional_import_enum_extension_lite = 23; + + optional string optional_string_piece_extension_lite = 24 + [ctype=STRING_PIECE]; + optional string optional_cord_extension_lite = 25 [ctype=CORD]; + + // Repeated + repeated int32 repeated_int32_extension_lite = 31; + repeated int64 repeated_int64_extension_lite = 32; + repeated uint32 repeated_uint32_extension_lite = 33; + repeated uint64 repeated_uint64_extension_lite = 34; + repeated sint32 repeated_sint32_extension_lite = 35; + repeated sint64 repeated_sint64_extension_lite = 36; + repeated fixed32 repeated_fixed32_extension_lite = 37; + repeated fixed64 repeated_fixed64_extension_lite = 38; + repeated sfixed32 repeated_sfixed32_extension_lite = 39; + repeated sfixed64 repeated_sfixed64_extension_lite = 40; + repeated float repeated_float_extension_lite = 41; + repeated double repeated_double_extension_lite = 42; + repeated bool repeated_bool_extension_lite = 43; + repeated string repeated_string_extension_lite = 44; + repeated bytes repeated_bytes_extension_lite = 45; + + repeated group RepeatedGroup_extension_lite = 46 { + optional int32 a = 47; + } + + repeated TestAllTypesLite.NestedMessage repeated_nested_message_extension_lite + = 48; + repeated ForeignMessageLite repeated_foreign_message_extension_lite = 49; + repeated protobuf_unittest_import.ImportMessageLite + repeated_import_message_extension_lite = 50; + + repeated TestAllTypesLite.NestedEnum repeated_nested_enum_extension_lite = 51; + repeated ForeignEnumLite repeated_foreign_enum_extension_lite = 52; + repeated protobuf_unittest_import.ImportEnumLite + repeated_import_enum_extension_lite = 53; + + repeated string repeated_string_piece_extension_lite = 54 + [ctype=STRING_PIECE]; + repeated string repeated_cord_extension_lite = 55 [ctype=CORD]; + + // Singular with defaults + optional int32 default_int32_extension_lite = 61 [default = 41 ]; + optional int64 default_int64_extension_lite = 62 [default = 42 ]; + optional uint32 default_uint32_extension_lite = 63 [default = 43 ]; + optional uint64 default_uint64_extension_lite = 64 [default = 44 ]; + optional sint32 default_sint32_extension_lite = 65 [default = -45 ]; + optional sint64 default_sint64_extension_lite = 66 [default = 46 ]; + optional fixed32 default_fixed32_extension_lite = 67 [default = 47 ]; + optional fixed64 default_fixed64_extension_lite = 68 [default = 48 ]; + optional sfixed32 default_sfixed32_extension_lite = 69 [default = 49 ]; + optional sfixed64 default_sfixed64_extension_lite = 70 [default = -50 ]; + optional float default_float_extension_lite = 71 [default = 51.5 ]; + optional double default_double_extension_lite = 72 [default = 52e3 ]; + optional bool default_bool_extension_lite = 73 [default = true ]; + optional string default_string_extension_lite = 74 [default = "hello"]; + optional bytes default_bytes_extension_lite = 75 [default = "world"]; + + optional TestAllTypesLite.NestedEnum + default_nested_enum_extension_lite = 81 [default = BAR]; + optional ForeignEnumLite + default_foreign_enum_extension_lite = 82 [default = FOREIGN_LITE_BAR]; + optional protobuf_unittest_import.ImportEnumLite + default_import_enum_extension_lite = 83 [default = IMPORT_LITE_BAR]; + + optional string default_string_piece_extension_lite = 84 [ctype=STRING_PIECE, + default="abc"]; + optional string default_cord_extension_lite = 85 [ctype=CORD, default="123"]; +} + +message TestPackedExtensionsLite { + extensions 1 to max; +} + +extend TestPackedExtensionsLite { + repeated int32 packed_int32_extension_lite = 90 [packed = true]; + repeated int64 packed_int64_extension_lite = 91 [packed = true]; + repeated uint32 packed_uint32_extension_lite = 92 [packed = true]; + repeated uint64 packed_uint64_extension_lite = 93 [packed = true]; + repeated sint32 packed_sint32_extension_lite = 94 [packed = true]; + repeated sint64 packed_sint64_extension_lite = 95 [packed = true]; + repeated fixed32 packed_fixed32_extension_lite = 96 [packed = true]; + repeated fixed64 packed_fixed64_extension_lite = 97 [packed = true]; + repeated sfixed32 packed_sfixed32_extension_lite = 98 [packed = true]; + repeated sfixed64 packed_sfixed64_extension_lite = 99 [packed = true]; + repeated float packed_float_extension_lite = 100 [packed = true]; + repeated double packed_double_extension_lite = 101 [packed = true]; + repeated bool packed_bool_extension_lite = 102 [packed = true]; + repeated ForeignEnumLite packed_enum_extension_lite = 103 [packed = true]; +} + +message TestNestedExtensionLite { + extend TestAllExtensionsLite { + optional int32 nested_extension = 12345; + } +} + +// Test that deprecated fields work. We only verify that they compile (at one +// point this failed). +message TestDeprecatedLite { + optional int32 deprecated_field = 1 [deprecated = true]; +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unittest_lite_imports_nonlite.proto b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unittest_lite_imports_nonlite.proto new file mode 100644 index 0000000000..d52cb8cc36 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unittest_lite_imports_nonlite.proto @@ -0,0 +1,43 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// +// Tests that a "lite" message can import a regular message. + +package protobuf_unittest; + +import "google/protobuf/unittest.proto"; + +option optimize_for = LITE_RUNTIME; + +message TestLiteImportsNonlite { + optional TestAllTypes message = 1; +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unittest_mset.proto b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unittest_mset.proto new file mode 100644 index 0000000000..3497f09fa6 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unittest_mset.proto @@ -0,0 +1,72 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// This file contains messages for testing message_set_wire_format. + +package protobuf_unittest; + +option optimize_for = SPEED; + +// A message with message_set_wire_format. +message TestMessageSet { + option message_set_wire_format = true; + extensions 4 to max; +} + +message TestMessageSetContainer { + optional TestMessageSet message_set = 1; +} + +message TestMessageSetExtension1 { + extend TestMessageSet { + optional TestMessageSetExtension1 message_set_extension = 1545008; + } + optional int32 i = 15; +} + +message TestMessageSetExtension2 { + extend TestMessageSet { + optional TestMessageSetExtension2 message_set_extension = 1547769; + } + optional string str = 25; +} + +// MessageSet wire format is equivalent to this. +message RawMessageSet { + repeated group Item = 1 { + required int32 type_id = 2; + required bytes message = 3; + } +} + diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unittest_no_generic_services.proto b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unittest_no_generic_services.proto new file mode 100644 index 0000000000..cffb4122c5 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unittest_no_generic_services.proto @@ -0,0 +1,52 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) + +package google.protobuf.no_generic_services_test; + +// *_generic_services are false by default. + +message TestMessage { + optional int32 a = 1; + extensions 1000 to max; +} + +enum TestEnum { + FOO = 1; +} + +extend TestMessage { + optional int32 test_extension = 1000; +} + +service TestService { + rpc Foo(TestMessage) returns(TestMessage); +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unittest_optimize_for.proto b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unittest_optimize_for.proto new file mode 100644 index 0000000000..feecbef8d4 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unittest_optimize_for.proto @@ -0,0 +1,61 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// A proto file which uses optimize_for = CODE_SIZE. + +import "google/protobuf/unittest.proto"; + +package protobuf_unittest; + +option optimize_for = CODE_SIZE; + +message TestOptimizedForSize { + optional int32 i = 1; + optional ForeignMessage msg = 19; + + extensions 1000 to max; + + extend TestOptimizedForSize { + optional int32 test_extension = 1234; + optional TestRequiredOptimizedForSize test_extension2 = 1235; + } +} + +message TestRequiredOptimizedForSize { + required int32 x = 1; +} + +message TestOptionalOptimizedForSize { + optional TestRequiredOptimizedForSize o = 1; +} diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unknown_field_set.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unknown_field_set.cc new file mode 100644 index 0000000000..e1f8b838f8 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unknown_field_set.cc @@ -0,0 +1,204 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include +#include +#include +#include +#include +#include + +namespace google { +namespace protobuf { + +UnknownFieldSet::UnknownFieldSet() + : fields_(NULL) {} + +UnknownFieldSet::~UnknownFieldSet() { + Clear(); + delete fields_; +} + +void UnknownFieldSet::ClearFallback() { + GOOGLE_DCHECK(fields_ != NULL); + for (int i = 0; i < fields_->size(); i++) { + (*fields_)[i].Delete(); + } + fields_->clear(); +} + +void UnknownFieldSet::MergeFrom(const UnknownFieldSet& other) { + for (int i = 0; i < other.field_count(); i++) { + AddField(other.field(i)); + } +} + +int UnknownFieldSet::SpaceUsedExcludingSelf() const { + if (fields_ == NULL) return 0; + + int total_size = sizeof(*fields_) + sizeof(UnknownField) * fields_->size(); + for (int i = 0; i < fields_->size(); i++) { + const UnknownField& field = (*fields_)[i]; + switch (field.type()) { + case UnknownField::TYPE_LENGTH_DELIMITED: + total_size += sizeof(*field.length_delimited_) + + internal::StringSpaceUsedExcludingSelf(*field.length_delimited_); + break; + case UnknownField::TYPE_GROUP: + total_size += field.group_->SpaceUsed(); + break; + default: + break; + } + } + return total_size; +} + +int UnknownFieldSet::SpaceUsed() const { + return sizeof(*this) + SpaceUsedExcludingSelf(); +} + +void UnknownFieldSet::AddVarint(int number, uint64 value) { + if (fields_ == NULL) fields_ = new vector; + UnknownField field; + field.number_ = number; + field.type_ = UnknownField::TYPE_VARINT; + field.varint_ = value; + fields_->push_back(field); +} + +void UnknownFieldSet::AddFixed32(int number, uint32 value) { + if (fields_ == NULL) fields_ = new vector; + UnknownField field; + field.number_ = number; + field.type_ = UnknownField::TYPE_FIXED32; + field.fixed32_ = value; + fields_->push_back(field); +} + +void UnknownFieldSet::AddFixed64(int number, uint64 value) { + if (fields_ == NULL) fields_ = new vector; + UnknownField field; + field.number_ = number; + field.type_ = UnknownField::TYPE_FIXED64; + field.fixed64_ = value; + fields_->push_back(field); +} + +string* UnknownFieldSet::AddLengthDelimited(int number) { + if (fields_ == NULL) fields_ = new vector; + UnknownField field; + field.number_ = number; + field.type_ = UnknownField::TYPE_LENGTH_DELIMITED; + field.length_delimited_ = new string; + fields_->push_back(field); + return field.length_delimited_; +} + +UnknownFieldSet* UnknownFieldSet::AddGroup(int number) { + if (fields_ == NULL) fields_ = new vector; + UnknownField field; + field.number_ = number; + field.type_ = UnknownField::TYPE_GROUP; + field.group_ = new UnknownFieldSet; + fields_->push_back(field); + return field.group_; +} + +void UnknownFieldSet::AddField(const UnknownField& field) { + if (fields_ == NULL) fields_ = new vector; + fields_->push_back(field); + fields_->back().DeepCopy(); +} + +bool UnknownFieldSet::MergeFromCodedStream(io::CodedInputStream* input) { + + UnknownFieldSet other; + if (internal::WireFormat::SkipMessage(input, &other) && + input->ConsumedEntireMessage()) { + MergeFrom(other); + return true; + } else { + return false; + } +} + +bool UnknownFieldSet::ParseFromCodedStream(io::CodedInputStream* input) { + Clear(); + return MergeFromCodedStream(input); +} + +bool UnknownFieldSet::ParseFromZeroCopyStream(io::ZeroCopyInputStream* input) { + io::CodedInputStream coded_input(input); + return ParseFromCodedStream(&coded_input) && + coded_input.ConsumedEntireMessage(); +} + +bool UnknownFieldSet::ParseFromArray(const void* data, int size) { + io::ArrayInputStream input(data, size); + return ParseFromZeroCopyStream(&input); +} + +void UnknownField::Delete() { + switch (type()) { + case UnknownField::TYPE_LENGTH_DELIMITED: + delete length_delimited_; + break; + case UnknownField::TYPE_GROUP: + delete group_; + break; + default: + break; + } +} + +void UnknownField::DeepCopy() { + switch (type()) { + case UnknownField::TYPE_LENGTH_DELIMITED: + length_delimited_ = new string(*length_delimited_); + break; + case UnknownField::TYPE_GROUP: { + UnknownFieldSet* group = new UnknownFieldSet; + group->MergeFrom(*group_); + group_ = group; + break; + } + default: + break; + } +} + +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unknown_field_set.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unknown_field_set.h new file mode 100644 index 0000000000..84c2e2b609 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unknown_field_set.h @@ -0,0 +1,268 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// Contains classes used to keep track of unrecognized fields seen while +// parsing a protocol message. + +#ifndef GOOGLE_PROTOBUF_UNKNOWN_FIELD_SET_H__ +#define GOOGLE_PROTOBUF_UNKNOWN_FIELD_SET_H__ + +#include +#include +#include + +namespace google { +namespace protobuf { + +class Message; // message.h +class UnknownField; // below + +// An UnknownFieldSet contains fields that were encountered while parsing a +// message but were not defined by its type. Keeping track of these can be +// useful, especially in that they may be written if the message is serialized +// again without being cleared in between. This means that software which +// simply receives messages and forwards them to other servers does not need +// to be updated every time a new field is added to the message definition. +// +// To get the UnknownFieldSet attached to any message, call +// Reflection::GetUnknownFields(). +// +// This class is necessarily tied to the protocol buffer wire format, unlike +// the Reflection interface which is independent of any serialization scheme. +class LIBPROTOBUF_EXPORT UnknownFieldSet { + public: + UnknownFieldSet(); + ~UnknownFieldSet(); + + // Remove all fields. + inline void Clear(); + + // Is this set empty? + inline bool empty() const; + + // Merge the contents of some other UnknownFieldSet with this one. + void MergeFrom(const UnknownFieldSet& other); + + // Swaps the contents of some other UnknownFieldSet with this one. + inline void Swap(UnknownFieldSet* x); + + // Computes (an estimate of) the total number of bytes currently used for + // storing the unknown fields in memory. Does NOT include + // sizeof(*this) in the calculation. + int SpaceUsedExcludingSelf() const; + + // Version of SpaceUsed() including sizeof(*this). + int SpaceUsed() const; + + // Returns the number of fields present in the UnknownFieldSet. + inline int field_count() const; + // Get a field in the set, where 0 <= index < field_count(). The fields + // appear in the order in which they were added. + inline const UnknownField& field(int index) const; + // Get a mutable pointer to a field in the set, where + // 0 <= index < field_count(). The fields appear in the order in which + // they were added. + inline UnknownField* mutable_field(int index); + + // Adding fields --------------------------------------------------- + + void AddVarint(int number, uint64 value); + void AddFixed32(int number, uint32 value); + void AddFixed64(int number, uint64 value); + void AddLengthDelimited(int number, const string& value); + string* AddLengthDelimited(int number); + UnknownFieldSet* AddGroup(int number); + + // Adds an unknown field from another set. + void AddField(const UnknownField& field); + + // Parsing helpers ------------------------------------------------- + // These work exactly like the similarly-named methods of Message. + + bool MergeFromCodedStream(io::CodedInputStream* input); + bool ParseFromCodedStream(io::CodedInputStream* input); + bool ParseFromZeroCopyStream(io::ZeroCopyInputStream* input); + bool ParseFromArray(const void* data, int size); + inline bool ParseFromString(const string& data) { + return ParseFromArray(data.data(), data.size()); + } + + private: + void ClearFallback(); + + vector* fields_; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(UnknownFieldSet); +}; + +// Represents one field in an UnknownFieldSet. +class LIBPROTOBUF_EXPORT UnknownField { + public: + enum Type { + TYPE_VARINT, + TYPE_FIXED32, + TYPE_FIXED64, + TYPE_LENGTH_DELIMITED, + TYPE_GROUP + }; + + // The field's tag number, as seen on the wire. + inline int number() const; + + // The field type. + inline Type type() const; + + // Accessors ------------------------------------------------------- + // Each method works only for UnknownFields of the corresponding type. + + inline uint64 varint() const; + inline uint32 fixed32() const; + inline uint64 fixed64() const; + inline const string& length_delimited() const; + inline const UnknownFieldSet& group() const; + + inline void set_varint(uint64 value); + inline void set_fixed32(uint32 value); + inline void set_fixed64(uint64 value); + inline void set_length_delimited(const string& value); + inline string* mutable_length_delimited(); + inline UnknownFieldSet* mutable_group(); + + private: + friend class UnknownFieldSet; + + // If this UnknownField contains a pointer, delete it. + void Delete(); + + // Make a deep copy of any pointers in this UnknownField. + void DeepCopy(); + + unsigned int number_ : 29; + unsigned int type_ : 3; + union { + uint64 varint_; + uint32 fixed32_; + uint64 fixed64_; + string* length_delimited_; + UnknownFieldSet* group_; + }; +}; + +// =================================================================== +// inline implementations + +inline void UnknownFieldSet::Clear() { + if (fields_ != NULL) { + ClearFallback(); + } +} + +inline bool UnknownFieldSet::empty() const { + return fields_ == NULL || fields_->empty(); +} + +inline void UnknownFieldSet::Swap(UnknownFieldSet* x) { + std::swap(fields_, x->fields_); +} + +inline int UnknownFieldSet::field_count() const { + return (fields_ == NULL) ? 0 : fields_->size(); +} +inline const UnknownField& UnknownFieldSet::field(int index) const { + return (*fields_)[index]; +} +inline UnknownField* UnknownFieldSet::mutable_field(int index) { + return &(*fields_)[index]; +} + +inline void UnknownFieldSet::AddLengthDelimited( + int number, const string& value) { + AddLengthDelimited(number)->assign(value); +} + +inline int UnknownField::number() const { return number_; } +inline UnknownField::Type UnknownField::type() const { + return static_cast(type_); +} + +inline uint64 UnknownField::varint () const { + GOOGLE_DCHECK_EQ(type_, TYPE_VARINT); + return varint_; +} +inline uint32 UnknownField::fixed32() const { + GOOGLE_DCHECK_EQ(type_, TYPE_FIXED32); + return fixed32_; +} +inline uint64 UnknownField::fixed64() const { + GOOGLE_DCHECK_EQ(type_, TYPE_FIXED64); + return fixed64_; +} +inline const string& UnknownField::length_delimited() const { + GOOGLE_DCHECK_EQ(type_, TYPE_LENGTH_DELIMITED); + return *length_delimited_; +} +inline const UnknownFieldSet& UnknownField::group() const { + GOOGLE_DCHECK_EQ(type_, TYPE_GROUP); + return *group_; +} + +inline void UnknownField::set_varint(uint64 value) { + GOOGLE_DCHECK_EQ(type_, TYPE_VARINT); + varint_ = value; +} +inline void UnknownField::set_fixed32(uint32 value) { + GOOGLE_DCHECK_EQ(type_, TYPE_FIXED32); + fixed32_ = value; +} +inline void UnknownField::set_fixed64(uint64 value) { + GOOGLE_DCHECK_EQ(type_, TYPE_FIXED64); + fixed64_ = value; +} +inline void UnknownField::set_length_delimited(const string& value) { + GOOGLE_DCHECK_EQ(type_, TYPE_LENGTH_DELIMITED); + length_delimited_->assign(value); +} +inline string* UnknownField::mutable_length_delimited() { + GOOGLE_DCHECK_EQ(type_, TYPE_LENGTH_DELIMITED); + return length_delimited_; +} +inline UnknownFieldSet* UnknownField::mutable_group() { + GOOGLE_DCHECK_EQ(type_, TYPE_GROUP); + return group_; +} + +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_UNKNOWN_FIELD_SET_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unknown_field_set_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unknown_field_set_unittest.cc new file mode 100644 index 0000000000..1235c9ee64 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/unknown_field_set_unittest.cc @@ -0,0 +1,512 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// This test is testing a lot more than just the UnknownFieldSet class. It +// tests handling of unknown fields throughout the system. + +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +namespace google { +namespace protobuf { + +using internal::WireFormat; + +namespace { + +class UnknownFieldSetTest : public testing::Test { + protected: + virtual void SetUp() { + descriptor_ = unittest::TestAllTypes::descriptor(); + TestUtil::SetAllFields(&all_fields_); + all_fields_.SerializeToString(&all_fields_data_); + ASSERT_TRUE(empty_message_.ParseFromString(all_fields_data_)); + unknown_fields_ = empty_message_.mutable_unknown_fields(); + } + + const UnknownField* GetField(const string& name) { + const FieldDescriptor* field = descriptor_->FindFieldByName(name); + if (field == NULL) return NULL; + for (int i = 0; i < unknown_fields_->field_count(); i++) { + if (unknown_fields_->field(i).number() == field->number()) { + return &unknown_fields_->field(i); + } + } + return NULL; + } + + // Constructs a protocol buffer which contains fields with all the same + // numbers as all_fields_data_ except that each field is some other wire + // type. + string GetBizarroData() { + unittest::TestEmptyMessage bizarro_message; + UnknownFieldSet* bizarro_unknown_fields = + bizarro_message.mutable_unknown_fields(); + for (int i = 0; i < unknown_fields_->field_count(); i++) { + const UnknownField& unknown_field = unknown_fields_->field(i); + if (unknown_field.type() == UnknownField::TYPE_VARINT) { + bizarro_unknown_fields->AddFixed32(unknown_field.number(), 1); + } else { + bizarro_unknown_fields->AddVarint(unknown_field.number(), 1); + } + } + + string data; + EXPECT_TRUE(bizarro_message.SerializeToString(&data)); + return data; + } + + const Descriptor* descriptor_; + unittest::TestAllTypes all_fields_; + string all_fields_data_; + + // An empty message that has been parsed from all_fields_data_. So, it has + // unknown fields of every type. + unittest::TestEmptyMessage empty_message_; + UnknownFieldSet* unknown_fields_; +}; + +TEST_F(UnknownFieldSetTest, AllFieldsPresent) { + // All fields of TestAllTypes should be present, in numeric order (because + // that's the order we parsed them in). Fields that are not valid field + // numbers of TestAllTypes should NOT be present. + + int pos = 0; + + for (int i = 0; i < 1000; i++) { + const FieldDescriptor* field = descriptor_->FindFieldByNumber(i); + if (field != NULL) { + ASSERT_LT(pos, unknown_fields_->field_count()); + EXPECT_EQ(i, unknown_fields_->field(pos++).number()); + if (field->is_repeated()) { + // Should have a second instance. + ASSERT_LT(pos, unknown_fields_->field_count()); + EXPECT_EQ(i, unknown_fields_->field(pos++).number()); + } + } + } + EXPECT_EQ(unknown_fields_->field_count(), pos); +} + +TEST_F(UnknownFieldSetTest, Varint) { + const UnknownField* field = GetField("optional_int32"); + ASSERT_TRUE(field != NULL); + + ASSERT_EQ(UnknownField::TYPE_VARINT, field->type()); + EXPECT_EQ(all_fields_.optional_int32(), field->varint()); +} + +TEST_F(UnknownFieldSetTest, Fixed32) { + const UnknownField* field = GetField("optional_fixed32"); + ASSERT_TRUE(field != NULL); + + ASSERT_EQ(UnknownField::TYPE_FIXED32, field->type()); + EXPECT_EQ(all_fields_.optional_fixed32(), field->fixed32()); +} + +TEST_F(UnknownFieldSetTest, Fixed64) { + const UnknownField* field = GetField("optional_fixed64"); + ASSERT_TRUE(field != NULL); + + ASSERT_EQ(UnknownField::TYPE_FIXED64, field->type()); + EXPECT_EQ(all_fields_.optional_fixed64(), field->fixed64()); +} + +TEST_F(UnknownFieldSetTest, LengthDelimited) { + const UnknownField* field = GetField("optional_string"); + ASSERT_TRUE(field != NULL); + + ASSERT_EQ(UnknownField::TYPE_LENGTH_DELIMITED, field->type()); + EXPECT_EQ(all_fields_.optional_string(), field->length_delimited()); +} + +TEST_F(UnknownFieldSetTest, Group) { + const UnknownField* field = GetField("optionalgroup"); + ASSERT_TRUE(field != NULL); + + ASSERT_EQ(UnknownField::TYPE_GROUP, field->type()); + ASSERT_EQ(1, field->group().field_count()); + + const UnknownField& nested_field = field->group().field(0); + const FieldDescriptor* nested_field_descriptor = + unittest::TestAllTypes::OptionalGroup::descriptor()->FindFieldByName("a"); + ASSERT_TRUE(nested_field_descriptor != NULL); + + EXPECT_EQ(nested_field_descriptor->number(), nested_field.number()); + ASSERT_EQ(UnknownField::TYPE_VARINT, nested_field.type()); + EXPECT_EQ(all_fields_.optionalgroup().a(), nested_field.varint()); +} + +TEST_F(UnknownFieldSetTest, SerializeFastAndSlowAreEquivalent) { + int size = WireFormat::ComputeUnknownFieldsSize( + empty_message_.unknown_fields()); + string slow_buffer; + string fast_buffer; + slow_buffer.resize(size); + fast_buffer.resize(size); + + uint8* target = reinterpret_cast(string_as_array(&fast_buffer)); + uint8* result = WireFormat::SerializeUnknownFieldsToArray( + empty_message_.unknown_fields(), target); + EXPECT_EQ(size, result - target); + + { + io::ArrayOutputStream raw_stream(string_as_array(&slow_buffer), size, 1); + io::CodedOutputStream output_stream(&raw_stream); + WireFormat::SerializeUnknownFields(empty_message_.unknown_fields(), + &output_stream); + ASSERT_FALSE(output_stream.HadError()); + } + EXPECT_TRUE(fast_buffer == slow_buffer); +} + +TEST_F(UnknownFieldSetTest, Serialize) { + // Check that serializing the UnknownFieldSet produces the original data + // again. + + string data; + empty_message_.SerializeToString(&data); + + // Don't use EXPECT_EQ because we don't want to dump raw binary data to + // stdout. + EXPECT_TRUE(data == all_fields_data_); +} + +TEST_F(UnknownFieldSetTest, ParseViaReflection) { + // Make sure fields are properly parsed to the UnknownFieldSet when parsing + // via reflection. + + unittest::TestEmptyMessage message; + io::ArrayInputStream raw_input(all_fields_data_.data(), + all_fields_data_.size()); + io::CodedInputStream input(&raw_input); + ASSERT_TRUE(WireFormat::ParseAndMergePartial(&input, &message)); + + EXPECT_EQ(message.DebugString(), empty_message_.DebugString()); +} + +TEST_F(UnknownFieldSetTest, SerializeViaReflection) { + // Make sure fields are properly written from the UnknownFieldSet when + // serializing via reflection. + + string data; + + { + io::StringOutputStream raw_output(&data); + io::CodedOutputStream output(&raw_output); + int size = WireFormat::ByteSize(empty_message_); + WireFormat::SerializeWithCachedSizes(empty_message_, size, &output); + ASSERT_FALSE(output.HadError()); + } + + // Don't use EXPECT_EQ because we don't want to dump raw binary data to + // stdout. + EXPECT_TRUE(data == all_fields_data_); +} + +TEST_F(UnknownFieldSetTest, CopyFrom) { + unittest::TestEmptyMessage message; + + message.CopyFrom(empty_message_); + + EXPECT_EQ(empty_message_.DebugString(), message.DebugString()); +} + +TEST_F(UnknownFieldSetTest, Swap) { + unittest::TestEmptyMessage other_message; + ASSERT_TRUE(other_message.ParseFromString(GetBizarroData())); + + EXPECT_GT(empty_message_.unknown_fields().field_count(), 0); + EXPECT_GT(other_message.unknown_fields().field_count(), 0); + const string debug_string = empty_message_.DebugString(); + const string other_debug_string = other_message.DebugString(); + EXPECT_NE(debug_string, other_debug_string); + + empty_message_.Swap(&other_message); + EXPECT_EQ(debug_string, other_message.DebugString()); + EXPECT_EQ(other_debug_string, empty_message_.DebugString()); +} + +TEST_F(UnknownFieldSetTest, SwapWithSelf) { + const string debug_string = empty_message_.DebugString(); + EXPECT_GT(empty_message_.unknown_fields().field_count(), 0); + + empty_message_.Swap(&empty_message_); + EXPECT_GT(empty_message_.unknown_fields().field_count(), 0); + EXPECT_EQ(debug_string, empty_message_.DebugString()); +} + +TEST_F(UnknownFieldSetTest, MergeFrom) { + unittest::TestEmptyMessage source, destination; + + destination.mutable_unknown_fields()->AddVarint(1, 1); + destination.mutable_unknown_fields()->AddVarint(3, 2); + source.mutable_unknown_fields()->AddVarint(2, 3); + source.mutable_unknown_fields()->AddVarint(3, 4); + + destination.MergeFrom(source); + + EXPECT_EQ( + // Note: The ordering of fields here depends on the ordering of adds + // and merging, above. + "1: 1\n" + "3: 2\n" + "2: 3\n" + "3: 4\n", + destination.DebugString()); +} + +TEST_F(UnknownFieldSetTest, Clear) { + // Clear the set. + empty_message_.Clear(); + EXPECT_EQ(0, unknown_fields_->field_count()); +} + +TEST_F(UnknownFieldSetTest, ParseKnownAndUnknown) { + // Test mixing known and unknown fields when parsing. + + unittest::TestEmptyMessage source; + source.mutable_unknown_fields()->AddVarint(123456, 654321); + string data; + ASSERT_TRUE(source.SerializeToString(&data)); + + unittest::TestAllTypes destination; + ASSERT_TRUE(destination.ParseFromString(all_fields_data_ + data)); + + TestUtil::ExpectAllFieldsSet(destination); + ASSERT_EQ(1, destination.unknown_fields().field_count()); + ASSERT_EQ(UnknownField::TYPE_VARINT, + destination.unknown_fields().field(0).type()); + EXPECT_EQ(654321, destination.unknown_fields().field(0).varint()); +} + +TEST_F(UnknownFieldSetTest, WrongTypeTreatedAsUnknown) { + // Test that fields of the wrong wire type are treated like unknown fields + // when parsing. + + unittest::TestAllTypes all_types_message; + unittest::TestEmptyMessage empty_message; + string bizarro_data = GetBizarroData(); + ASSERT_TRUE(all_types_message.ParseFromString(bizarro_data)); + ASSERT_TRUE(empty_message.ParseFromString(bizarro_data)); + + // All fields should have been interpreted as unknown, so the debug strings + // should be the same. + EXPECT_EQ(empty_message.DebugString(), all_types_message.DebugString()); +} + +TEST_F(UnknownFieldSetTest, WrongTypeTreatedAsUnknownViaReflection) { + // Same as WrongTypeTreatedAsUnknown but via the reflection interface. + + unittest::TestAllTypes all_types_message; + unittest::TestEmptyMessage empty_message; + string bizarro_data = GetBizarroData(); + io::ArrayInputStream raw_input(bizarro_data.data(), bizarro_data.size()); + io::CodedInputStream input(&raw_input); + ASSERT_TRUE(WireFormat::ParseAndMergePartial(&input, &all_types_message)); + ASSERT_TRUE(empty_message.ParseFromString(bizarro_data)); + + EXPECT_EQ(empty_message.DebugString(), all_types_message.DebugString()); +} + +TEST_F(UnknownFieldSetTest, UnknownExtensions) { + // Make sure fields are properly parsed to the UnknownFieldSet even when + // they are declared as extension numbers. + + unittest::TestEmptyMessageWithExtensions message; + ASSERT_TRUE(message.ParseFromString(all_fields_data_)); + + EXPECT_EQ(message.DebugString(), empty_message_.DebugString()); +} + +TEST_F(UnknownFieldSetTest, UnknownExtensionsReflection) { + // Same as UnknownExtensions except parsing via reflection. + + unittest::TestEmptyMessageWithExtensions message; + io::ArrayInputStream raw_input(all_fields_data_.data(), + all_fields_data_.size()); + io::CodedInputStream input(&raw_input); + ASSERT_TRUE(WireFormat::ParseAndMergePartial(&input, &message)); + + EXPECT_EQ(message.DebugString(), empty_message_.DebugString()); +} + +TEST_F(UnknownFieldSetTest, WrongExtensionTypeTreatedAsUnknown) { + // Test that fields of the wrong wire type are treated like unknown fields + // when parsing extensions. + + unittest::TestAllExtensions all_extensions_message; + unittest::TestEmptyMessage empty_message; + string bizarro_data = GetBizarroData(); + ASSERT_TRUE(all_extensions_message.ParseFromString(bizarro_data)); + ASSERT_TRUE(empty_message.ParseFromString(bizarro_data)); + + // All fields should have been interpreted as unknown, so the debug strings + // should be the same. + EXPECT_EQ(empty_message.DebugString(), all_extensions_message.DebugString()); +} + +TEST_F(UnknownFieldSetTest, UnknownEnumValue) { + using unittest::TestAllTypes; + using unittest::TestAllExtensions; + using unittest::TestEmptyMessage; + + const FieldDescriptor* singular_field = + TestAllTypes::descriptor()->FindFieldByName("optional_nested_enum"); + const FieldDescriptor* repeated_field = + TestAllTypes::descriptor()->FindFieldByName("repeated_nested_enum"); + ASSERT_TRUE(singular_field != NULL); + ASSERT_TRUE(repeated_field != NULL); + + string data; + + { + TestEmptyMessage empty_message; + UnknownFieldSet* unknown_fields = empty_message.mutable_unknown_fields(); + unknown_fields->AddVarint(singular_field->number(), TestAllTypes::BAR); + unknown_fields->AddVarint(singular_field->number(), 5); // not valid + unknown_fields->AddVarint(repeated_field->number(), TestAllTypes::FOO); + unknown_fields->AddVarint(repeated_field->number(), 4); // not valid + unknown_fields->AddVarint(repeated_field->number(), TestAllTypes::BAZ); + unknown_fields->AddVarint(repeated_field->number(), 6); // not valid + empty_message.SerializeToString(&data); + } + + { + TestAllTypes message; + ASSERT_TRUE(message.ParseFromString(data)); + EXPECT_EQ(TestAllTypes::BAR, message.optional_nested_enum()); + ASSERT_EQ(2, message.repeated_nested_enum_size()); + EXPECT_EQ(TestAllTypes::FOO, message.repeated_nested_enum(0)); + EXPECT_EQ(TestAllTypes::BAZ, message.repeated_nested_enum(1)); + + const UnknownFieldSet& unknown_fields = message.unknown_fields(); + ASSERT_EQ(3, unknown_fields.field_count()); + + EXPECT_EQ(singular_field->number(), unknown_fields.field(0).number()); + ASSERT_EQ(UnknownField::TYPE_VARINT, unknown_fields.field(0).type()); + EXPECT_EQ(5, unknown_fields.field(0).varint()); + + EXPECT_EQ(repeated_field->number(), unknown_fields.field(1).number()); + ASSERT_EQ(UnknownField::TYPE_VARINT, unknown_fields.field(1).type()); + EXPECT_EQ(4, unknown_fields.field(1).varint()); + + EXPECT_EQ(repeated_field->number(), unknown_fields.field(2).number()); + ASSERT_EQ(UnknownField::TYPE_VARINT, unknown_fields.field(2).type()); + EXPECT_EQ(6, unknown_fields.field(2).varint()); + } + + { + using unittest::optional_nested_enum_extension; + using unittest::repeated_nested_enum_extension; + + TestAllExtensions message; + ASSERT_TRUE(message.ParseFromString(data)); + EXPECT_EQ(TestAllTypes::BAR, + message.GetExtension(optional_nested_enum_extension)); + ASSERT_EQ(2, message.ExtensionSize(repeated_nested_enum_extension)); + EXPECT_EQ(TestAllTypes::FOO, + message.GetExtension(repeated_nested_enum_extension, 0)); + EXPECT_EQ(TestAllTypes::BAZ, + message.GetExtension(repeated_nested_enum_extension, 1)); + + const UnknownFieldSet& unknown_fields = message.unknown_fields(); + ASSERT_EQ(3, unknown_fields.field_count()); + + EXPECT_EQ(singular_field->number(), unknown_fields.field(0).number()); + ASSERT_EQ(UnknownField::TYPE_VARINT, unknown_fields.field(0).type()); + EXPECT_EQ(5, unknown_fields.field(0).varint()); + + EXPECT_EQ(repeated_field->number(), unknown_fields.field(1).number()); + ASSERT_EQ(UnknownField::TYPE_VARINT, unknown_fields.field(1).type()); + EXPECT_EQ(4, unknown_fields.field(1).varint()); + + EXPECT_EQ(repeated_field->number(), unknown_fields.field(2).number()); + ASSERT_EQ(UnknownField::TYPE_VARINT, unknown_fields.field(2).type()); + EXPECT_EQ(6, unknown_fields.field(2).varint()); + } +} + +TEST_F(UnknownFieldSetTest, SpaceUsed) { + unittest::TestEmptyMessage empty_message; + + // Make sure an unknown field set has zero space used until a field is + // actually added. + int base_size = empty_message.SpaceUsed(); + UnknownFieldSet* unknown_fields = empty_message.mutable_unknown_fields(); + EXPECT_EQ(base_size, empty_message.SpaceUsed()); + + // Make sure each thing we add to the set increases the SpaceUsed(). + unknown_fields->AddVarint(1, 0); + EXPECT_LT(base_size, empty_message.SpaceUsed()); + base_size = empty_message.SpaceUsed(); + + string* str = unknown_fields->AddLengthDelimited(1); + EXPECT_LT(base_size, empty_message.SpaceUsed()); + base_size = empty_message.SpaceUsed(); + + str->assign(sizeof(string) + 1, 'x'); + EXPECT_LT(base_size, empty_message.SpaceUsed()); + base_size = empty_message.SpaceUsed(); + + UnknownFieldSet* group = unknown_fields->AddGroup(1); + EXPECT_LT(base_size, empty_message.SpaceUsed()); + base_size = empty_message.SpaceUsed(); + + group->AddVarint(1, 0); + EXPECT_LT(base_size, empty_message.SpaceUsed()); +} + +TEST_F(UnknownFieldSetTest, Empty) { + UnknownFieldSet unknown_fields; + EXPECT_TRUE(unknown_fields.empty()); + unknown_fields.AddVarint(6, 123); + EXPECT_FALSE(unknown_fields.empty()); + unknown_fields.Clear(); + EXPECT_TRUE(unknown_fields.empty()); +} + +} // namespace +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/wire_format.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/wire_format.cc new file mode 100644 index 0000000000..831a579450 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/wire_format.cc @@ -0,0 +1,1069 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include +#include + +#include + +#include +#include +#include +#include +#include +#include +#include +#include + + +namespace google { +namespace protobuf { +namespace internal { + +using internal::WireFormatLite; + +namespace { + +// This function turns out to be convenient when using some macros later. +inline int GetEnumNumber(const EnumValueDescriptor* descriptor) { + return descriptor->number(); +} + +} // anonymous namespace + +// =================================================================== + +bool UnknownFieldSetFieldSkipper::SkipField( + io::CodedInputStream* input, uint32 tag) { + return WireFormat::SkipField(input, tag, unknown_fields_); +} + +bool UnknownFieldSetFieldSkipper::SkipMessage(io::CodedInputStream* input) { + return WireFormat::SkipMessage(input, unknown_fields_); +} + +void UnknownFieldSetFieldSkipper::SkipUnknownEnum( + int field_number, int value) { + unknown_fields_->AddVarint(field_number, value); +} + +bool WireFormat::SkipField(io::CodedInputStream* input, uint32 tag, + UnknownFieldSet* unknown_fields) { + int number = WireFormatLite::GetTagFieldNumber(tag); + + switch (WireFormatLite::GetTagWireType(tag)) { + case WireFormatLite::WIRETYPE_VARINT: { + uint64 value; + if (!input->ReadVarint64(&value)) return false; + if (unknown_fields != NULL) unknown_fields->AddVarint(number, value); + return true; + } + case WireFormatLite::WIRETYPE_FIXED64: { + uint64 value; + if (!input->ReadLittleEndian64(&value)) return false; + if (unknown_fields != NULL) unknown_fields->AddFixed64(number, value); + return true; + } + case WireFormatLite::WIRETYPE_LENGTH_DELIMITED: { + uint32 length; + if (!input->ReadVarint32(&length)) return false; + if (unknown_fields == NULL) { + if (!input->Skip(length)) return false; + } else { + if (!input->ReadString(unknown_fields->AddLengthDelimited(number), + length)) { + return false; + } + } + return true; + } + case WireFormatLite::WIRETYPE_START_GROUP: { + if (!input->IncrementRecursionDepth()) return false; + if (!SkipMessage(input, (unknown_fields == NULL) ? + NULL : unknown_fields->AddGroup(number))) { + return false; + } + input->DecrementRecursionDepth(); + // Check that the ending tag matched the starting tag. + if (!input->LastTagWas(WireFormatLite::MakeTag( + WireFormatLite::GetTagFieldNumber(tag), + WireFormatLite::WIRETYPE_END_GROUP))) { + return false; + } + return true; + } + case WireFormatLite::WIRETYPE_END_GROUP: { + return false; + } + case WireFormatLite::WIRETYPE_FIXED32: { + uint32 value; + if (!input->ReadLittleEndian32(&value)) return false; + if (unknown_fields != NULL) unknown_fields->AddFixed32(number, value); + return true; + } + default: { + return false; + } + } +} + +bool WireFormat::SkipMessage(io::CodedInputStream* input, + UnknownFieldSet* unknown_fields) { + while(true) { + uint32 tag = input->ReadTag(); + if (tag == 0) { + // End of input. This is a valid place to end, so return true. + return true; + } + + WireFormatLite::WireType wire_type = WireFormatLite::GetTagWireType(tag); + + if (wire_type == WireFormatLite::WIRETYPE_END_GROUP) { + // Must be the end of the message. + return true; + } + + if (!SkipField(input, tag, unknown_fields)) return false; + } +} + +void WireFormat::SerializeUnknownFields(const UnknownFieldSet& unknown_fields, + io::CodedOutputStream* output) { + for (int i = 0; i < unknown_fields.field_count(); i++) { + const UnknownField& field = unknown_fields.field(i); + switch (field.type()) { + case UnknownField::TYPE_VARINT: + output->WriteVarint32(WireFormatLite::MakeTag(field.number(), + WireFormatLite::WIRETYPE_VARINT)); + output->WriteVarint64(field.varint()); + break; + case UnknownField::TYPE_FIXED32: + output->WriteVarint32(WireFormatLite::MakeTag(field.number(), + WireFormatLite::WIRETYPE_FIXED32)); + output->WriteLittleEndian32(field.fixed32()); + break; + case UnknownField::TYPE_FIXED64: + output->WriteVarint32(WireFormatLite::MakeTag(field.number(), + WireFormatLite::WIRETYPE_FIXED64)); + output->WriteLittleEndian64(field.fixed64()); + break; + case UnknownField::TYPE_LENGTH_DELIMITED: + output->WriteVarint32(WireFormatLite::MakeTag(field.number(), + WireFormatLite::WIRETYPE_LENGTH_DELIMITED)); + output->WriteVarint32(field.length_delimited().size()); + output->WriteString(field.length_delimited()); + break; + case UnknownField::TYPE_GROUP: + output->WriteVarint32(WireFormatLite::MakeTag(field.number(), + WireFormatLite::WIRETYPE_START_GROUP)); + SerializeUnknownFields(field.group(), output); + output->WriteVarint32(WireFormatLite::MakeTag(field.number(), + WireFormatLite::WIRETYPE_END_GROUP)); + break; + } + } +} + +uint8* WireFormat::SerializeUnknownFieldsToArray( + const UnknownFieldSet& unknown_fields, + uint8* target) { + for (int i = 0; i < unknown_fields.field_count(); i++) { + const UnknownField& field = unknown_fields.field(i); + + switch (field.type()) { + case UnknownField::TYPE_VARINT: + target = WireFormatLite::WriteInt64ToArray( + field.number(), field.varint(), target); + break; + case UnknownField::TYPE_FIXED32: + target = WireFormatLite::WriteFixed32ToArray( + field.number(), field.fixed32(), target); + break; + case UnknownField::TYPE_FIXED64: + target = WireFormatLite::WriteFixed64ToArray( + field.number(), field.fixed64(), target); + break; + case UnknownField::TYPE_LENGTH_DELIMITED: + target = WireFormatLite::WriteBytesToArray( + field.number(), field.length_delimited(), target); + break; + case UnknownField::TYPE_GROUP: + target = WireFormatLite::WriteTagToArray( + field.number(), WireFormatLite::WIRETYPE_START_GROUP, target); + target = SerializeUnknownFieldsToArray(field.group(), target); + target = WireFormatLite::WriteTagToArray( + field.number(), WireFormatLite::WIRETYPE_END_GROUP, target); + break; + } + } + return target; +} + +void WireFormat::SerializeUnknownMessageSetItems( + const UnknownFieldSet& unknown_fields, + io::CodedOutputStream* output) { + for (int i = 0; i < unknown_fields.field_count(); i++) { + const UnknownField& field = unknown_fields.field(i); + // The only unknown fields that are allowed to exist in a MessageSet are + // messages, which are length-delimited. + if (field.type() == UnknownField::TYPE_LENGTH_DELIMITED) { + const string& data = field.length_delimited(); + + // Start group. + output->WriteVarint32(WireFormatLite::kMessageSetItemStartTag); + + // Write type ID. + output->WriteVarint32(WireFormatLite::kMessageSetTypeIdTag); + output->WriteVarint32(field.number()); + + // Write message. + output->WriteVarint32(WireFormatLite::kMessageSetMessageTag); + output->WriteVarint32(data.size()); + output->WriteString(data); + + // End group. + output->WriteVarint32(WireFormatLite::kMessageSetItemEndTag); + } + } +} + +uint8* WireFormat::SerializeUnknownMessageSetItemsToArray( + const UnknownFieldSet& unknown_fields, + uint8* target) { + for (int i = 0; i < unknown_fields.field_count(); i++) { + const UnknownField& field = unknown_fields.field(i); + + // The only unknown fields that are allowed to exist in a MessageSet are + // messages, which are length-delimited. + if (field.type() == UnknownField::TYPE_LENGTH_DELIMITED) { + const string& data = field.length_delimited(); + + // Start group. + target = io::CodedOutputStream::WriteTagToArray( + WireFormatLite::kMessageSetItemStartTag, target); + + // Write type ID. + target = io::CodedOutputStream::WriteTagToArray( + WireFormatLite::kMessageSetTypeIdTag, target); + target = io::CodedOutputStream::WriteVarint32ToArray( + field.number(), target); + + // Write message. + target = io::CodedOutputStream::WriteTagToArray( + WireFormatLite::kMessageSetMessageTag, target); + target = io::CodedOutputStream::WriteVarint32ToArray(data.size(), target); + target = io::CodedOutputStream::WriteStringToArray(data, target); + + // End group. + target = io::CodedOutputStream::WriteTagToArray( + WireFormatLite::kMessageSetItemEndTag, target); + } + } + + return target; +} + +int WireFormat::ComputeUnknownFieldsSize( + const UnknownFieldSet& unknown_fields) { + int size = 0; + for (int i = 0; i < unknown_fields.field_count(); i++) { + const UnknownField& field = unknown_fields.field(i); + + switch (field.type()) { + case UnknownField::TYPE_VARINT: + size += io::CodedOutputStream::VarintSize32( + WireFormatLite::MakeTag(field.number(), + WireFormatLite::WIRETYPE_VARINT)); + size += io::CodedOutputStream::VarintSize64(field.varint()); + break; + case UnknownField::TYPE_FIXED32: + size += io::CodedOutputStream::VarintSize32( + WireFormatLite::MakeTag(field.number(), + WireFormatLite::WIRETYPE_FIXED32)); + size += sizeof(int32); + break; + case UnknownField::TYPE_FIXED64: + size += io::CodedOutputStream::VarintSize32( + WireFormatLite::MakeTag(field.number(), + WireFormatLite::WIRETYPE_FIXED64)); + size += sizeof(int64); + break; + case UnknownField::TYPE_LENGTH_DELIMITED: + size += io::CodedOutputStream::VarintSize32( + WireFormatLite::MakeTag(field.number(), + WireFormatLite::WIRETYPE_LENGTH_DELIMITED)); + size += io::CodedOutputStream::VarintSize32( + field.length_delimited().size()); + size += field.length_delimited().size(); + break; + case UnknownField::TYPE_GROUP: + size += io::CodedOutputStream::VarintSize32( + WireFormatLite::MakeTag(field.number(), + WireFormatLite::WIRETYPE_START_GROUP)); + size += ComputeUnknownFieldsSize(field.group()); + size += io::CodedOutputStream::VarintSize32( + WireFormatLite::MakeTag(field.number(), + WireFormatLite::WIRETYPE_END_GROUP)); + break; + } + } + + return size; +} + +int WireFormat::ComputeUnknownMessageSetItemsSize( + const UnknownFieldSet& unknown_fields) { + int size = 0; + for (int i = 0; i < unknown_fields.field_count(); i++) { + const UnknownField& field = unknown_fields.field(i); + + // The only unknown fields that are allowed to exist in a MessageSet are + // messages, which are length-delimited. + if (field.type() == UnknownField::TYPE_LENGTH_DELIMITED) { + size += WireFormatLite::kMessageSetItemTagsSize; + size += io::CodedOutputStream::VarintSize32(field.number()); + size += io::CodedOutputStream::VarintSize32( + field.length_delimited().size()); + size += field.length_delimited().size(); + } + } + + return size; +} + +// =================================================================== + +bool WireFormat::ParseAndMergePartial(io::CodedInputStream* input, + Message* message) { + const Descriptor* descriptor = message->GetDescriptor(); + const Reflection* message_reflection = message->GetReflection(); + + while(true) { + uint32 tag = input->ReadTag(); + if (tag == 0) { + // End of input. This is a valid place to end, so return true. + return true; + } + + if (WireFormatLite::GetTagWireType(tag) == + WireFormatLite::WIRETYPE_END_GROUP) { + // Must be the end of the message. + return true; + } + + const FieldDescriptor* field = NULL; + + if (descriptor != NULL) { + int field_number = WireFormatLite::GetTagFieldNumber(tag); + field = descriptor->FindFieldByNumber(field_number); + + // If that failed, check if the field is an extension. + if (field == NULL && descriptor->IsExtensionNumber(field_number)) { + if (input->GetExtensionPool() == NULL) { + field = message_reflection->FindKnownExtensionByNumber(field_number); + } else { + field = input->GetExtensionPool() + ->FindExtensionByNumber(descriptor, field_number); + } + } + + // If that failed, but we're a MessageSet, and this is the tag for a + // MessageSet item, then parse that. + if (field == NULL && + descriptor->options().message_set_wire_format() && + tag == WireFormatLite::kMessageSetItemStartTag) { + if (!ParseAndMergeMessageSetItem(input, message)) { + return false; + } + continue; // Skip ParseAndMergeField(); already taken care of. + } + } + + if (!ParseAndMergeField(tag, field, message, input)) { + return false; + } + } +} + +bool WireFormat::ParseAndMergeField( + uint32 tag, + const FieldDescriptor* field, // May be NULL for unknown + Message* message, + io::CodedInputStream* input) { + const Reflection* message_reflection = message->GetReflection(); + + enum { UNKNOWN, NORMAL_FORMAT, PACKED_FORMAT } value_format; + + if (field == NULL) { + value_format = UNKNOWN; + } else if (WireFormatLite::GetTagWireType(tag) == + WireTypeForFieldType(field->type())) { + value_format = NORMAL_FORMAT; + } else if (field->is_packable() && + WireFormatLite::GetTagWireType(tag) == + WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + value_format = PACKED_FORMAT; + } else { + // We don't recognize this field. Either the field number is unknown + // or the wire type doesn't match. Put it in our unknown field set. + value_format = UNKNOWN; + } + + if (value_format == UNKNOWN) { + return SkipField(input, tag, + message_reflection->MutableUnknownFields(message)); + } else if (value_format == PACKED_FORMAT) { + uint32 length; + if (!input->ReadVarint32(&length)) return false; + io::CodedInputStream::Limit limit = input->PushLimit(length); + + switch (field->type()) { +#define HANDLE_PACKED_TYPE(TYPE, CPPTYPE, CPPTYPE_METHOD) \ + case FieldDescriptor::TYPE_##TYPE: { \ + while (input->BytesUntilLimit() > 0) { \ + CPPTYPE value; \ + if (!WireFormatLite::ReadPrimitive< \ + CPPTYPE, WireFormatLite::TYPE_##TYPE>(input, &value)) \ + return false; \ + message_reflection->Add##CPPTYPE_METHOD(message, field, value); \ + } \ + break; \ + } + + HANDLE_PACKED_TYPE( INT32, int32, Int32) + HANDLE_PACKED_TYPE( INT64, int64, Int64) + HANDLE_PACKED_TYPE(SINT32, int32, Int32) + HANDLE_PACKED_TYPE(SINT64, int64, Int64) + HANDLE_PACKED_TYPE(UINT32, uint32, UInt32) + HANDLE_PACKED_TYPE(UINT64, uint64, UInt64) + + HANDLE_PACKED_TYPE( FIXED32, uint32, UInt32) + HANDLE_PACKED_TYPE( FIXED64, uint64, UInt64) + HANDLE_PACKED_TYPE(SFIXED32, int32, Int32) + HANDLE_PACKED_TYPE(SFIXED64, int64, Int64) + + HANDLE_PACKED_TYPE(FLOAT , float , Float ) + HANDLE_PACKED_TYPE(DOUBLE, double, Double) + + HANDLE_PACKED_TYPE(BOOL, bool, Bool) +#undef HANDLE_PACKED_TYPE + + case FieldDescriptor::TYPE_ENUM: { + while (input->BytesUntilLimit() > 0) { + int value; + if (!WireFormatLite::ReadPrimitive( + input, &value)) return false; + const EnumValueDescriptor* enum_value = + field->enum_type()->FindValueByNumber(value); + if (enum_value != NULL) { + message_reflection->AddEnum(message, field, enum_value); + } + } + + break; + } + + case FieldDescriptor::TYPE_STRING: + case FieldDescriptor::TYPE_GROUP: + case FieldDescriptor::TYPE_MESSAGE: + case FieldDescriptor::TYPE_BYTES: + // Can't have packed fields of these types: these should be caught by + // the protocol compiler. + return false; + break; + } + + input->PopLimit(limit); + } else { + // Non-packed value (value_format == NORMAL_FORMAT) + switch (field->type()) { +#define HANDLE_TYPE(TYPE, CPPTYPE, CPPTYPE_METHOD) \ + case FieldDescriptor::TYPE_##TYPE: { \ + CPPTYPE value; \ + if (!WireFormatLite::ReadPrimitive< \ + CPPTYPE, WireFormatLite::TYPE_##TYPE>(input, &value)) \ + return false; \ + if (field->is_repeated()) { \ + message_reflection->Add##CPPTYPE_METHOD(message, field, value); \ + } else { \ + message_reflection->Set##CPPTYPE_METHOD(message, field, value); \ + } \ + break; \ + } + + HANDLE_TYPE( INT32, int32, Int32) + HANDLE_TYPE( INT64, int64, Int64) + HANDLE_TYPE(SINT32, int32, Int32) + HANDLE_TYPE(SINT64, int64, Int64) + HANDLE_TYPE(UINT32, uint32, UInt32) + HANDLE_TYPE(UINT64, uint64, UInt64) + + HANDLE_TYPE( FIXED32, uint32, UInt32) + HANDLE_TYPE( FIXED64, uint64, UInt64) + HANDLE_TYPE(SFIXED32, int32, Int32) + HANDLE_TYPE(SFIXED64, int64, Int64) + + HANDLE_TYPE(FLOAT , float , Float ) + HANDLE_TYPE(DOUBLE, double, Double) + + HANDLE_TYPE(BOOL, bool, Bool) +#undef HANDLE_TYPE + + case FieldDescriptor::TYPE_ENUM: { + int value; + if (!WireFormatLite::ReadPrimitive( + input, &value)) return false; + const EnumValueDescriptor* enum_value = + field->enum_type()->FindValueByNumber(value); + if (enum_value != NULL) { + if (field->is_repeated()) { + message_reflection->AddEnum(message, field, enum_value); + } else { + message_reflection->SetEnum(message, field, enum_value); + } + } else { + // The enum value is not one of the known values. Add it to the + // UnknownFieldSet. + int64 sign_extended_value = static_cast(value); + message_reflection->MutableUnknownFields(message) + ->AddVarint(WireFormatLite::GetTagFieldNumber(tag), + sign_extended_value); + } + break; + } + + // Handle strings separately so that we can optimize the ctype=CORD case. + case FieldDescriptor::TYPE_STRING: { + string value; + if (!WireFormatLite::ReadString(input, &value)) return false; + VerifyUTF8String(value.data(), value.length(), PARSE); + if (field->is_repeated()) { + message_reflection->AddString(message, field, value); + } else { + message_reflection->SetString(message, field, value); + } + break; + } + + case FieldDescriptor::TYPE_BYTES: { + string value; + if (!WireFormatLite::ReadBytes(input, &value)) return false; + if (field->is_repeated()) { + message_reflection->AddString(message, field, value); + } else { + message_reflection->SetString(message, field, value); + } + break; + } + + case FieldDescriptor::TYPE_GROUP: { + Message* sub_message; + if (field->is_repeated()) { + sub_message = message_reflection->AddMessage( + message, field, input->GetExtensionFactory()); + } else { + sub_message = message_reflection->MutableMessage( + message, field, input->GetExtensionFactory()); + } + + if (!WireFormatLite::ReadGroup(WireFormatLite::GetTagFieldNumber(tag), + input, sub_message)) + return false; + break; + } + + case FieldDescriptor::TYPE_MESSAGE: { + Message* sub_message; + if (field->is_repeated()) { + sub_message = message_reflection->AddMessage( + message, field, input->GetExtensionFactory()); + } else { + sub_message = message_reflection->MutableMessage( + message, field, input->GetExtensionFactory()); + } + + if (!WireFormatLite::ReadMessage(input, sub_message)) return false; + break; + } + } + } + + return true; +} + +bool WireFormat::ParseAndMergeMessageSetItem( + io::CodedInputStream* input, + Message* message) { + const Reflection* message_reflection = message->GetReflection(); + + // This method parses a group which should contain two fields: + // required int32 type_id = 2; + // required data message = 3; + + // Once we see a type_id, we'll construct a fake tag for this extension + // which is the tag it would have had under the proto2 extensions wire + // format. + uint32 fake_tag = 0; + + // Once we see a type_id, we'll look up the FieldDescriptor for the + // extension. + const FieldDescriptor* field = NULL; + + // If we see message data before the type_id, we'll append it to this so + // we can parse it later. This will probably never happen in practice, + // as no MessageSet encoder I know of writes the message before the type ID. + // But, it's technically valid so we should allow it. + // TODO(kenton): Use a Cord instead? Do I care? + string message_data; + + while (true) { + uint32 tag = input->ReadTag(); + if (tag == 0) return false; + + switch (tag) { + case WireFormatLite::kMessageSetTypeIdTag: { + uint32 type_id; + if (!input->ReadVarint32(&type_id)) return false; + fake_tag = WireFormatLite::MakeTag( + type_id, WireFormatLite::WIRETYPE_LENGTH_DELIMITED); + field = message_reflection->FindKnownExtensionByNumber(type_id); + + if (!message_data.empty()) { + // We saw some message data before the type_id. Have to parse it + // now. + io::ArrayInputStream raw_input(message_data.data(), + message_data.size()); + io::CodedInputStream sub_input(&raw_input); + if (!ParseAndMergeField(fake_tag, field, message, + &sub_input)) { + return false; + } + message_data.clear(); + } + + break; + } + + case WireFormatLite::kMessageSetMessageTag: { + if (fake_tag == 0) { + // We haven't seen a type_id yet. Append this data to message_data. + string temp; + uint32 length; + if (!input->ReadVarint32(&length)) return false; + if (!input->ReadString(&temp, length)) return false; + message_data.append(temp); + } else { + // Already saw type_id, so we can parse this directly. + if (!ParseAndMergeField(fake_tag, field, message, input)) { + return false; + } + } + + break; + } + + case WireFormatLite::kMessageSetItemEndTag: { + return true; + } + + default: { + if (!SkipField(input, tag, NULL)) return false; + } + } + } +} + +// =================================================================== + +void WireFormat::SerializeWithCachedSizes( + const Message& message, + int size, io::CodedOutputStream* output) { + const Descriptor* descriptor = message.GetDescriptor(); + const Reflection* message_reflection = message.GetReflection(); + int expected_endpoint = output->ByteCount() + size; + + vector fields; + message_reflection->ListFields(message, &fields); + for (int i = 0; i < fields.size(); i++) { + SerializeFieldWithCachedSizes(fields[i], message, output); + } + + if (descriptor->options().message_set_wire_format()) { + SerializeUnknownMessageSetItems( + message_reflection->GetUnknownFields(message), output); + } else { + SerializeUnknownFields( + message_reflection->GetUnknownFields(message), output); + } + + GOOGLE_CHECK_EQ(output->ByteCount(), expected_endpoint) + << ": Protocol message serialized to a size different from what was " + "originally expected. Perhaps it was modified by another thread " + "during serialization?"; +} + +void WireFormat::SerializeFieldWithCachedSizes( + const FieldDescriptor* field, + const Message& message, + io::CodedOutputStream* output) { + const Reflection* message_reflection = message.GetReflection(); + + if (field->is_extension() && + field->containing_type()->options().message_set_wire_format() && + field->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE && + !field->is_repeated()) { + SerializeMessageSetItemWithCachedSizes(field, message, output); + return; + } + + int count = 0; + + if (field->is_repeated()) { + count = message_reflection->FieldSize(message, field); + } else if (message_reflection->HasField(message, field)) { + count = 1; + } + + const bool is_packed = field->options().packed(); + if (is_packed && count > 0) { + WireFormatLite::WriteTag(field->number(), + WireFormatLite::WIRETYPE_LENGTH_DELIMITED, output); + const int data_size = FieldDataOnlyByteSize(field, message); + output->WriteVarint32(data_size); + } + + for (int j = 0; j < count; j++) { + switch (field->type()) { +#define HANDLE_PRIMITIVE_TYPE(TYPE, CPPTYPE, TYPE_METHOD, CPPTYPE_METHOD) \ + case FieldDescriptor::TYPE_##TYPE: { \ + const CPPTYPE value = field->is_repeated() ? \ + message_reflection->GetRepeated##CPPTYPE_METHOD( \ + message, field, j) : \ + message_reflection->Get##CPPTYPE_METHOD( \ + message, field); \ + if (is_packed) { \ + WireFormatLite::Write##TYPE_METHOD##NoTag(value, output); \ + } else { \ + WireFormatLite::Write##TYPE_METHOD(field->number(), value, output); \ + } \ + break; \ + } + + HANDLE_PRIMITIVE_TYPE( INT32, int32, Int32, Int32) + HANDLE_PRIMITIVE_TYPE( INT64, int64, Int64, Int64) + HANDLE_PRIMITIVE_TYPE(SINT32, int32, SInt32, Int32) + HANDLE_PRIMITIVE_TYPE(SINT64, int64, SInt64, Int64) + HANDLE_PRIMITIVE_TYPE(UINT32, uint32, UInt32, UInt32) + HANDLE_PRIMITIVE_TYPE(UINT64, uint64, UInt64, UInt64) + + HANDLE_PRIMITIVE_TYPE( FIXED32, uint32, Fixed32, UInt32) + HANDLE_PRIMITIVE_TYPE( FIXED64, uint64, Fixed64, UInt64) + HANDLE_PRIMITIVE_TYPE(SFIXED32, int32, SFixed32, Int32) + HANDLE_PRIMITIVE_TYPE(SFIXED64, int64, SFixed64, Int64) + + HANDLE_PRIMITIVE_TYPE(FLOAT , float , Float , Float ) + HANDLE_PRIMITIVE_TYPE(DOUBLE, double, Double, Double) + + HANDLE_PRIMITIVE_TYPE(BOOL, bool, Bool, Bool) +#undef HANDLE_PRIMITIVE_TYPE + +#define HANDLE_TYPE(TYPE, TYPE_METHOD, CPPTYPE_METHOD) \ + case FieldDescriptor::TYPE_##TYPE: \ + WireFormatLite::Write##TYPE_METHOD( \ + field->number(), \ + field->is_repeated() ? \ + message_reflection->GetRepeated##CPPTYPE_METHOD( \ + message, field, j) : \ + message_reflection->Get##CPPTYPE_METHOD(message, field), \ + output); \ + break; + + HANDLE_TYPE(GROUP , Group , Message) + HANDLE_TYPE(MESSAGE, Message, Message) +#undef HANDLE_TYPE + + case FieldDescriptor::TYPE_ENUM: { + const EnumValueDescriptor* value = field->is_repeated() ? + message_reflection->GetRepeatedEnum(message, field, j) : + message_reflection->GetEnum(message, field); + if (is_packed) { + WireFormatLite::WriteEnumNoTag(value->number(), output); + } else { + WireFormatLite::WriteEnum(field->number(), value->number(), output); + } + break; + } + + // Handle strings separately so that we can get string references + // instead of copying. + case FieldDescriptor::TYPE_STRING: { + string scratch; + const string& value = field->is_repeated() ? + message_reflection->GetRepeatedStringReference( + message, field, j, &scratch) : + message_reflection->GetStringReference(message, field, &scratch); + VerifyUTF8String(value.data(), value.length(), SERIALIZE); + WireFormatLite::WriteString(field->number(), value, output); + break; + } + + case FieldDescriptor::TYPE_BYTES: { + string scratch; + const string& value = field->is_repeated() ? + message_reflection->GetRepeatedStringReference( + message, field, j, &scratch) : + message_reflection->GetStringReference(message, field, &scratch); + WireFormatLite::WriteBytes(field->number(), value, output); + break; + } + } + } +} + +void WireFormat::SerializeMessageSetItemWithCachedSizes( + const FieldDescriptor* field, + const Message& message, + io::CodedOutputStream* output) { + const Reflection* message_reflection = message.GetReflection(); + + // Start group. + output->WriteVarint32(WireFormatLite::kMessageSetItemStartTag); + + // Write type ID. + output->WriteVarint32(WireFormatLite::kMessageSetTypeIdTag); + output->WriteVarint32(field->number()); + + // Write message. + output->WriteVarint32(WireFormatLite::kMessageSetMessageTag); + + const Message& sub_message = message_reflection->GetMessage(message, field); + output->WriteVarint32(sub_message.GetCachedSize()); + sub_message.SerializeWithCachedSizes(output); + + // End group. + output->WriteVarint32(WireFormatLite::kMessageSetItemEndTag); +} + +// =================================================================== + +int WireFormat::ByteSize(const Message& message) { + const Descriptor* descriptor = message.GetDescriptor(); + const Reflection* message_reflection = message.GetReflection(); + + int our_size = 0; + + vector fields; + message_reflection->ListFields(message, &fields); + for (int i = 0; i < fields.size(); i++) { + our_size += FieldByteSize(fields[i], message); + } + + if (descriptor->options().message_set_wire_format()) { + our_size += ComputeUnknownMessageSetItemsSize( + message_reflection->GetUnknownFields(message)); + } else { + our_size += ComputeUnknownFieldsSize( + message_reflection->GetUnknownFields(message)); + } + + return our_size; +} + +int WireFormat::FieldByteSize( + const FieldDescriptor* field, + const Message& message) { + const Reflection* message_reflection = message.GetReflection(); + + if (field->is_extension() && + field->containing_type()->options().message_set_wire_format() && + field->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE && + !field->is_repeated()) { + return MessageSetItemByteSize(field, message); + } + + int count = 0; + if (field->is_repeated()) { + count = message_reflection->FieldSize(message, field); + } else if (message_reflection->HasField(message, field)) { + count = 1; + } + + const int data_size = FieldDataOnlyByteSize(field, message); + int our_size = data_size; + if (field->options().packed()) { + if (data_size > 0) { + // Packed fields get serialized like a string, not their native type. + // Technically this doesn't really matter; the size only changes if it's + // a GROUP + our_size += TagSize(field->number(), FieldDescriptor::TYPE_STRING); + our_size += io::CodedOutputStream::VarintSize32(data_size); + } + } else { + our_size += count * TagSize(field->number(), field->type()); + } + return our_size; +} + +int WireFormat::FieldDataOnlyByteSize( + const FieldDescriptor* field, + const Message& message) { + const Reflection* message_reflection = message.GetReflection(); + + int count = 0; + if (field->is_repeated()) { + count = message_reflection->FieldSize(message, field); + } else if (message_reflection->HasField(message, field)) { + count = 1; + } + + int data_size = 0; + switch (field->type()) { +#define HANDLE_TYPE(TYPE, TYPE_METHOD, CPPTYPE_METHOD) \ + case FieldDescriptor::TYPE_##TYPE: \ + if (field->is_repeated()) { \ + for (int j = 0; j < count; j++) { \ + data_size += WireFormatLite::TYPE_METHOD##Size( \ + message_reflection->GetRepeated##CPPTYPE_METHOD( \ + message, field, j)); \ + } \ + } else { \ + data_size += WireFormatLite::TYPE_METHOD##Size( \ + message_reflection->Get##CPPTYPE_METHOD(message, field)); \ + } \ + break; + +#define HANDLE_FIXED_TYPE(TYPE, TYPE_METHOD) \ + case FieldDescriptor::TYPE_##TYPE: \ + data_size += count * WireFormatLite::k##TYPE_METHOD##Size; \ + break; + + HANDLE_TYPE( INT32, Int32, Int32) + HANDLE_TYPE( INT64, Int64, Int64) + HANDLE_TYPE(SINT32, SInt32, Int32) + HANDLE_TYPE(SINT64, SInt64, Int64) + HANDLE_TYPE(UINT32, UInt32, UInt32) + HANDLE_TYPE(UINT64, UInt64, UInt64) + + HANDLE_FIXED_TYPE( FIXED32, Fixed32) + HANDLE_FIXED_TYPE( FIXED64, Fixed64) + HANDLE_FIXED_TYPE(SFIXED32, SFixed32) + HANDLE_FIXED_TYPE(SFIXED64, SFixed64) + + HANDLE_FIXED_TYPE(FLOAT , Float ) + HANDLE_FIXED_TYPE(DOUBLE, Double) + + HANDLE_FIXED_TYPE(BOOL, Bool) + + HANDLE_TYPE(GROUP , Group , Message) + HANDLE_TYPE(MESSAGE, Message, Message) +#undef HANDLE_TYPE +#undef HANDLE_FIXED_TYPE + + case FieldDescriptor::TYPE_ENUM: { + if (field->is_repeated()) { + for (int j = 0; j < count; j++) { + data_size += WireFormatLite::EnumSize( + message_reflection->GetRepeatedEnum(message, field, j)->number()); + } + } else { + data_size += WireFormatLite::EnumSize( + message_reflection->GetEnum(message, field)->number()); + } + break; + } + + // Handle strings separately so that we can get string references + // instead of copying. + case FieldDescriptor::TYPE_STRING: + case FieldDescriptor::TYPE_BYTES: { + for (int j = 0; j < count; j++) { + string scratch; + const string& value = field->is_repeated() ? + message_reflection->GetRepeatedStringReference( + message, field, j, &scratch) : + message_reflection->GetStringReference(message, field, &scratch); + data_size += WireFormatLite::StringSize(value); + } + break; + } + } + return data_size; +} + +int WireFormat::MessageSetItemByteSize( + const FieldDescriptor* field, + const Message& message) { + const Reflection* message_reflection = message.GetReflection(); + + int our_size = WireFormatLite::kMessageSetItemTagsSize; + + // type_id + our_size += io::CodedOutputStream::VarintSize32(field->number()); + + // message + const Message& sub_message = message_reflection->GetMessage(message, field); + int message_size = sub_message.ByteSize(); + + our_size += io::CodedOutputStream::VarintSize32(message_size); + our_size += message_size; + + return our_size; +} + +void WireFormat::VerifyUTF8StringFallback(const char* data, + int size, + Operation op) { + if (!IsStructurallyValidUTF8(data, size)) { + const char* operation_str = NULL; + switch (op) { + case PARSE: + operation_str = "parsing"; + break; + case SERIALIZE: + operation_str = "serializing"; + break; + // no default case: have the compiler warn if a case is not covered. + } + GOOGLE_LOG(ERROR) << "Encountered string containing invalid UTF-8 data while " + << operation_str + << " protocol buffer. Strings must contain only UTF-8; " + "use the 'bytes' type for raw bytes."; + } +} + + +} // namespace internal +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/wire_format.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/wire_format.h new file mode 100644 index 0000000000..c753925085 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/wire_format.h @@ -0,0 +1,304 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// atenasio@google.com (Chris Atenasio) (ZigZag transform) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// This header is logically internal, but is made public because it is used +// from protocol-compiler-generated code, which may reside in other components. + +#ifndef GOOGLE_PROTOBUF_WIRE_FORMAT_H__ +#define GOOGLE_PROTOBUF_WIRE_FORMAT_H__ + +#include +#include +#include +#include +#include + +// Do UTF-8 validation on string type in Debug build only +#ifndef NDEBUG +#define GOOGLE_PROTOBUF_UTF8_VALIDATION_ENABLED +#endif + +namespace google { +namespace protobuf { + namespace io { + class CodedInputStream; // coded_stream.h + class CodedOutputStream; // coded_stream.h + } + class UnknownFieldSet; // unknown_field_set.h +} + +namespace protobuf { +namespace internal { + +// This class is for internal use by the protocol buffer library and by +// protocol-complier-generated message classes. It must not be called +// directly by clients. +// +// This class contains code for implementing the binary protocol buffer +// wire format via reflection. The WireFormatLite class implements the +// non-reflection based routines. +// +// This class is really a namespace that contains only static methods +class LIBPROTOBUF_EXPORT WireFormat { + public: + + // Given a field return its WireType + static inline WireFormatLite::WireType WireTypeForField( + const FieldDescriptor* field); + + // Given a FieldSescriptor::Type return its WireType + static inline WireFormatLite::WireType WireTypeForFieldType( + FieldDescriptor::Type type); + + // Compute the byte size of a tag. For groups, this includes both the start + // and end tags. + static inline int TagSize(int field_number, FieldDescriptor::Type type); + + // These procedures can be used to implement the methods of Message which + // handle parsing and serialization of the protocol buffer wire format + // using only the Reflection interface. When you ask the protocol + // compiler to optimize for code size rather than speed, it will implement + // those methods in terms of these procedures. Of course, these are much + // slower than the specialized implementations which the protocol compiler + // generates when told to optimize for speed. + + // Read a message in protocol buffer wire format. + // + // This procedure reads either to the end of the input stream or through + // a WIRETYPE_END_GROUP tag ending the message, whichever comes first. + // It returns false if the input is invalid. + // + // Required fields are NOT checked by this method. You must call + // IsInitialized() on the resulting message yourself. + static bool ParseAndMergePartial(io::CodedInputStream* input, + Message* message); + + // Serialize a message in protocol buffer wire format. + // + // Any embedded messages within the message must have their correct sizes + // cached. However, the top-level message need not; its size is passed as + // a parameter to this procedure. + // + // These return false iff the underlying stream returns a write error. + static void SerializeWithCachedSizes( + const Message& message, + int size, io::CodedOutputStream* output); + + // Implements Message::ByteSize() via reflection. WARNING: The result + // of this method is *not* cached anywhere. However, all embedded messages + // will have their ByteSize() methods called, so their sizes will be cached. + // Therefore, calling this method is sufficient to allow you to call + // WireFormat::SerializeWithCachedSizes() on the same object. + static int ByteSize(const Message& message); + + // ----------------------------------------------------------------- + // Helpers for dealing with unknown fields + + // Skips a field value of the given WireType. The input should start + // positioned immediately after the tag. If unknown_fields is non-NULL, + // the contents of the field will be added to it. + static bool SkipField(io::CodedInputStream* input, uint32 tag, + UnknownFieldSet* unknown_fields); + + // Reads and ignores a message from the input. If unknown_fields is non-NULL, + // the contents will be added to it. + static bool SkipMessage(io::CodedInputStream* input, + UnknownFieldSet* unknown_fields); + + // Write the contents of an UnknownFieldSet to the output. + static void SerializeUnknownFields(const UnknownFieldSet& unknown_fields, + io::CodedOutputStream* output); + // Same as above, except writing directly to the provided buffer. + // Requires that the buffer have sufficient capacity for + // ComputeUnknownFieldsSize(unknown_fields). + // + // Returns a pointer past the last written byte. + static uint8* SerializeUnknownFieldsToArray( + const UnknownFieldSet& unknown_fields, + uint8* target); + + // Same thing except for messages that have the message_set_wire_format + // option. + static void SerializeUnknownMessageSetItems( + const UnknownFieldSet& unknown_fields, + io::CodedOutputStream* output); + // Same as above, except writing directly to the provided buffer. + // Requires that the buffer have sufficient capacity for + // ComputeUnknownMessageSetItemsSize(unknown_fields). + // + // Returns a pointer past the last written byte. + static uint8* SerializeUnknownMessageSetItemsToArray( + const UnknownFieldSet& unknown_fields, + uint8* target); + + // Compute the size of the UnknownFieldSet on the wire. + static int ComputeUnknownFieldsSize(const UnknownFieldSet& unknown_fields); + + // Same thing except for messages that have the message_set_wire_format + // option. + static int ComputeUnknownMessageSetItemsSize( + const UnknownFieldSet& unknown_fields); + + + // Helper functions for encoding and decoding tags. (Inlined below and in + // _inl.h) + // + // This is different from MakeTag(field->number(), field->type()) in the case + // of packed repeated fields. + static uint32 MakeTag(const FieldDescriptor* field); + + // Parse a single field. The input should start out positioned immidately + // after the tag. + static bool ParseAndMergeField( + uint32 tag, + const FieldDescriptor* field, // May be NULL for unknown + Message* message, + io::CodedInputStream* input); + + // Serialize a single field. + static void SerializeFieldWithCachedSizes( + const FieldDescriptor* field, // Cannot be NULL + const Message& message, + io::CodedOutputStream* output); + + // Compute size of a single field. If the field is a message type, this + // will call ByteSize() for the embedded message, insuring that it caches + // its size. + static int FieldByteSize( + const FieldDescriptor* field, // Cannot be NULL + const Message& message); + + // Parse/serialize a MessageSet::Item group. Used with messages that use + // opion message_set_wire_format = true. + static bool ParseAndMergeMessageSetItem( + io::CodedInputStream* input, + Message* message); + static void SerializeMessageSetItemWithCachedSizes( + const FieldDescriptor* field, + const Message& message, + io::CodedOutputStream* output); + static int MessageSetItemByteSize( + const FieldDescriptor* field, + const Message& message); + + // Computes the byte size of a field, excluding tags. For packed fields, it + // only includes the size of the raw data, and not the size of the total + // length, but for other length-delimited types, the size of the length is + // included. + static int FieldDataOnlyByteSize( + const FieldDescriptor* field, // Cannot be NULL + const Message& message); + + enum Operation { + PARSE, + SERIALIZE, + }; + + // Verifies that a string field is valid UTF8, logging an error if not. + static void VerifyUTF8String(const char* data, int size, Operation op); + + private: + // Verifies that a string field is valid UTF8, logging an error if not. + static void VerifyUTF8StringFallback( + const char* data, + int size, + Operation op); + + + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(WireFormat); +}; + +// Subclass of FieldSkipper which saves skipped fields to an UnknownFieldSet. +class LIBPROTOBUF_EXPORT UnknownFieldSetFieldSkipper : public FieldSkipper { + public: + UnknownFieldSetFieldSkipper(UnknownFieldSet* unknown_fields) + : unknown_fields_(unknown_fields) {} + virtual ~UnknownFieldSetFieldSkipper() {} + + // implements FieldSkipper ----------------------------------------- + virtual bool SkipField(io::CodedInputStream* input, uint32 tag); + virtual bool SkipMessage(io::CodedInputStream* input); + virtual void SkipUnknownEnum(int field_number, int value); + + private: + UnknownFieldSet* unknown_fields_; +}; + +// inline methods ==================================================== + +inline WireFormatLite::WireType WireFormat::WireTypeForField( + const FieldDescriptor* field) { + if (field->options().packed()) { + return WireFormatLite::WIRETYPE_LENGTH_DELIMITED; + } else { + return WireTypeForFieldType(field->type()); + } +} + +inline WireFormatLite::WireType WireFormat::WireTypeForFieldType( + FieldDescriptor::Type type) { + // Some compilers don't like enum -> enum casts, so we implicit_cast to + // int first. + return WireFormatLite::WireTypeForFieldType( + static_cast( + implicit_cast(type))); +} + +inline uint32 WireFormat::MakeTag(const FieldDescriptor* field) { + return WireFormatLite::MakeTag(field->number(), WireTypeForField(field)); +} + +inline int WireFormat::TagSize(int field_number, FieldDescriptor::Type type) { + // Some compilers don't like enum -> enum casts, so we implicit_cast to + // int first. + return WireFormatLite::TagSize(field_number, + static_cast( + implicit_cast(type))); +} + +inline void WireFormat::VerifyUTF8String(const char* data, int size, + WireFormat::Operation op) { +#ifdef GOOGLE_PROTOBUF_UTF8_VALIDATION_ENABLED + WireFormat::VerifyUTF8StringFallback(data, size, op); +#endif +} + + +} // namespace internal +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_WIRE_FORMAT_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/wire_format_lite.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/wire_format_lite.cc new file mode 100644 index 0000000000..d347d11697 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/wire_format_lite.cc @@ -0,0 +1,359 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include + +#include +#include +#include +#include +#include +#include +#include + +namespace google { +namespace protobuf { +namespace internal { + +#ifndef _MSC_VER // MSVC doesn't like definitions of inline constants, GCC + // requires them. +const int WireFormatLite::kMessageSetItemStartTag; +const int WireFormatLite::kMessageSetItemEndTag; +const int WireFormatLite::kMessageSetTypeIdTag; +const int WireFormatLite::kMessageSetMessageTag; + +#endif + +const int WireFormatLite::kMessageSetItemTagsSize = + io::CodedOutputStream::VarintSize32(kMessageSetItemStartTag) + + io::CodedOutputStream::VarintSize32(kMessageSetItemEndTag) + + io::CodedOutputStream::VarintSize32(kMessageSetTypeIdTag) + + io::CodedOutputStream::VarintSize32(kMessageSetMessageTag); + +const WireFormatLite::CppType +WireFormatLite::kFieldTypeToCppTypeMap[MAX_FIELD_TYPE + 1] = { + static_cast(0), // 0 is reserved for errors + + CPPTYPE_DOUBLE, // TYPE_DOUBLE + CPPTYPE_FLOAT, // TYPE_FLOAT + CPPTYPE_INT64, // TYPE_INT64 + CPPTYPE_UINT64, // TYPE_UINT64 + CPPTYPE_INT32, // TYPE_INT32 + CPPTYPE_UINT64, // TYPE_FIXED64 + CPPTYPE_UINT32, // TYPE_FIXED32 + CPPTYPE_BOOL, // TYPE_BOOL + CPPTYPE_STRING, // TYPE_STRING + CPPTYPE_MESSAGE, // TYPE_GROUP + CPPTYPE_MESSAGE, // TYPE_MESSAGE + CPPTYPE_STRING, // TYPE_BYTES + CPPTYPE_UINT32, // TYPE_UINT32 + CPPTYPE_ENUM, // TYPE_ENUM + CPPTYPE_INT32, // TYPE_SFIXED32 + CPPTYPE_INT64, // TYPE_SFIXED64 + CPPTYPE_INT32, // TYPE_SINT32 + CPPTYPE_INT64, // TYPE_SINT64 +}; + +const WireFormatLite::WireType +WireFormatLite::kWireTypeForFieldType[MAX_FIELD_TYPE + 1] = { + static_cast(-1), // invalid + WireFormatLite::WIRETYPE_FIXED64, // TYPE_DOUBLE + WireFormatLite::WIRETYPE_FIXED32, // TYPE_FLOAT + WireFormatLite::WIRETYPE_VARINT, // TYPE_INT64 + WireFormatLite::WIRETYPE_VARINT, // TYPE_UINT64 + WireFormatLite::WIRETYPE_VARINT, // TYPE_INT32 + WireFormatLite::WIRETYPE_FIXED64, // TYPE_FIXED64 + WireFormatLite::WIRETYPE_FIXED32, // TYPE_FIXED32 + WireFormatLite::WIRETYPE_VARINT, // TYPE_BOOL + WireFormatLite::WIRETYPE_LENGTH_DELIMITED, // TYPE_STRING + WireFormatLite::WIRETYPE_START_GROUP, // TYPE_GROUP + WireFormatLite::WIRETYPE_LENGTH_DELIMITED, // TYPE_MESSAGE + WireFormatLite::WIRETYPE_LENGTH_DELIMITED, // TYPE_BYTES + WireFormatLite::WIRETYPE_VARINT, // TYPE_UINT32 + WireFormatLite::WIRETYPE_VARINT, // TYPE_ENUM + WireFormatLite::WIRETYPE_FIXED32, // TYPE_SFIXED32 + WireFormatLite::WIRETYPE_FIXED64, // TYPE_SFIXED64 + WireFormatLite::WIRETYPE_VARINT, // TYPE_SINT32 + WireFormatLite::WIRETYPE_VARINT, // TYPE_SINT64 +}; + +bool WireFormatLite::SkipField( + io::CodedInputStream* input, uint32 tag) { + switch (WireFormatLite::GetTagWireType(tag)) { + case WireFormatLite::WIRETYPE_VARINT: { + uint64 value; + if (!input->ReadVarint64(&value)) return false; + return true; + } + case WireFormatLite::WIRETYPE_FIXED64: { + uint64 value; + if (!input->ReadLittleEndian64(&value)) return false; + return true; + } + case WireFormatLite::WIRETYPE_LENGTH_DELIMITED: { + uint32 length; + if (!input->ReadVarint32(&length)) return false; + if (!input->Skip(length)) return false; + return true; + } + case WireFormatLite::WIRETYPE_START_GROUP: { + if (!input->IncrementRecursionDepth()) return false; + if (!SkipMessage(input)) return false; + input->DecrementRecursionDepth(); + // Check that the ending tag matched the starting tag. + if (!input->LastTagWas(WireFormatLite::MakeTag( + WireFormatLite::GetTagFieldNumber(tag), + WireFormatLite::WIRETYPE_END_GROUP))) { + return false; + } + return true; + } + case WireFormatLite::WIRETYPE_END_GROUP: { + return false; + } + case WireFormatLite::WIRETYPE_FIXED32: { + uint32 value; + if (!input->ReadLittleEndian32(&value)) return false; + return true; + } + default: { + return false; + } + } +} + +bool WireFormatLite::SkipMessage(io::CodedInputStream* input) { + while(true) { + uint32 tag = input->ReadTag(); + if (tag == 0) { + // End of input. This is a valid place to end, so return true. + return true; + } + + WireFormatLite::WireType wire_type = WireFormatLite::GetTagWireType(tag); + + if (wire_type == WireFormatLite::WIRETYPE_END_GROUP) { + // Must be the end of the message. + return true; + } + + if (!SkipField(input, tag)) return false; + } +} + +bool FieldSkipper::SkipField( + io::CodedInputStream* input, uint32 tag) { + return WireFormatLite::SkipField(input, tag); +} + +bool FieldSkipper::SkipMessage(io::CodedInputStream* input) { + return WireFormatLite::SkipMessage(input); +} + +void FieldSkipper::SkipUnknownEnum( + int field_number, int value) { + // Nothing. +} + +bool WireFormatLite::ReadPackedEnumNoInline(io::CodedInputStream* input, + bool (*is_valid)(int), + RepeatedField* values) { + uint32 length; + if (!input->ReadVarint32(&length)) return false; + io::CodedInputStream::Limit limit = input->PushLimit(length); + while (input->BytesUntilLimit() > 0) { + int value; + if (!google::protobuf::internal::WireFormatLite::ReadPrimitive< + int, WireFormatLite::TYPE_ENUM>(input, &value)) { + return false; + } + if (is_valid(value)) { + values->Add(value); + } + } + input->PopLimit(limit); + return true; +} + +void WireFormatLite::WriteInt32(int field_number, int32 value, + io::CodedOutputStream* output) { + WriteTag(field_number, WIRETYPE_VARINT, output); + WriteInt32NoTag(value, output); +} +void WireFormatLite::WriteInt64(int field_number, int64 value, + io::CodedOutputStream* output) { + WriteTag(field_number, WIRETYPE_VARINT, output); + WriteInt64NoTag(value, output); +} +void WireFormatLite::WriteUInt32(int field_number, uint32 value, + io::CodedOutputStream* output) { + WriteTag(field_number, WIRETYPE_VARINT, output); + WriteUInt32NoTag(value, output); +} +void WireFormatLite::WriteUInt64(int field_number, uint64 value, + io::CodedOutputStream* output) { + WriteTag(field_number, WIRETYPE_VARINT, output); + WriteUInt64NoTag(value, output); +} +void WireFormatLite::WriteSInt32(int field_number, int32 value, + io::CodedOutputStream* output) { + WriteTag(field_number, WIRETYPE_VARINT, output); + WriteSInt32NoTag(value, output); +} +void WireFormatLite::WriteSInt64(int field_number, int64 value, + io::CodedOutputStream* output) { + WriteTag(field_number, WIRETYPE_VARINT, output); + WriteSInt64NoTag(value, output); +} +void WireFormatLite::WriteFixed32(int field_number, uint32 value, + io::CodedOutputStream* output) { + WriteTag(field_number, WIRETYPE_FIXED32, output); + WriteFixed32NoTag(value, output); +} +void WireFormatLite::WriteFixed64(int field_number, uint64 value, + io::CodedOutputStream* output) { + WriteTag(field_number, WIRETYPE_FIXED64, output); + WriteFixed64NoTag(value, output); +} +void WireFormatLite::WriteSFixed32(int field_number, int32 value, + io::CodedOutputStream* output) { + WriteTag(field_number, WIRETYPE_FIXED32, output); + WriteSFixed32NoTag(value, output); +} +void WireFormatLite::WriteSFixed64(int field_number, int64 value, + io::CodedOutputStream* output) { + WriteTag(field_number, WIRETYPE_FIXED64, output); + WriteSFixed64NoTag(value, output); +} +void WireFormatLite::WriteFloat(int field_number, float value, + io::CodedOutputStream* output) { + WriteTag(field_number, WIRETYPE_FIXED32, output); + WriteFloatNoTag(value, output); +} +void WireFormatLite::WriteDouble(int field_number, double value, + io::CodedOutputStream* output) { + WriteTag(field_number, WIRETYPE_FIXED64, output); + WriteDoubleNoTag(value, output); +} +void WireFormatLite::WriteBool(int field_number, bool value, + io::CodedOutputStream* output) { + WriteTag(field_number, WIRETYPE_VARINT, output); + WriteBoolNoTag(value, output); +} +void WireFormatLite::WriteEnum(int field_number, int value, + io::CodedOutputStream* output) { + WriteTag(field_number, WIRETYPE_VARINT, output); + WriteEnumNoTag(value, output); +} + +void WireFormatLite::WriteString(int field_number, const string& value, + io::CodedOutputStream* output) { + // String is for UTF-8 text only + WriteTag(field_number, WIRETYPE_LENGTH_DELIMITED, output); + output->WriteVarint32(value.size()); + output->WriteString(value); +} +void WireFormatLite::WriteBytes(int field_number, const string& value, + io::CodedOutputStream* output) { + WriteTag(field_number, WIRETYPE_LENGTH_DELIMITED, output); + output->WriteVarint32(value.size()); + output->WriteString(value); +} + + +void WireFormatLite::WriteGroup(int field_number, + const MessageLite& value, + io::CodedOutputStream* output) { + WriteTag(field_number, WIRETYPE_START_GROUP, output); + value.SerializeWithCachedSizes(output); + WriteTag(field_number, WIRETYPE_END_GROUP, output); +} + +void WireFormatLite::WriteMessage(int field_number, + const MessageLite& value, + io::CodedOutputStream* output) { + WriteTag(field_number, WIRETYPE_LENGTH_DELIMITED, output); + const int size = value.GetCachedSize(); + output->WriteVarint32(size); + value.SerializeWithCachedSizes(output); +} + +void WireFormatLite::WriteGroupMaybeToArray(int field_number, + const MessageLite& value, + io::CodedOutputStream* output) { + WriteTag(field_number, WIRETYPE_START_GROUP, output); + const int size = value.GetCachedSize(); + uint8* target = output->GetDirectBufferForNBytesAndAdvance(size); + if (target != NULL) { + uint8* end = value.SerializeWithCachedSizesToArray(target); + GOOGLE_DCHECK_EQ(end - target, size); + } else { + value.SerializeWithCachedSizes(output); + } + WriteTag(field_number, WIRETYPE_END_GROUP, output); +} + +void WireFormatLite::WriteMessageMaybeToArray(int field_number, + const MessageLite& value, + io::CodedOutputStream* output) { + WriteTag(field_number, WIRETYPE_LENGTH_DELIMITED, output); + const int size = value.GetCachedSize(); + output->WriteVarint32(size); + uint8* target = output->GetDirectBufferForNBytesAndAdvance(size); + if (target != NULL) { + uint8* end = value.SerializeWithCachedSizesToArray(target); + GOOGLE_DCHECK_EQ(end - target, size); + } else { + value.SerializeWithCachedSizes(output); + } +} + +bool WireFormatLite::ReadString(io::CodedInputStream* input, + string* value) { + // String is for UTF-8 text only + uint32 length; + if (!input->ReadVarint32(&length)) return false; + if (!input->InternalReadStringInline(value, length)) return false; + return true; +} +bool WireFormatLite::ReadBytes(io::CodedInputStream* input, + string* value) { + uint32 length; + if (!input->ReadVarint32(&length)) return false; + return input->InternalReadStringInline(value, length); +} + +} // namespace internal +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/wire_format_lite.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/wire_format_lite.h new file mode 100644 index 0000000000..e3d5b2d8d0 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/wire_format_lite.h @@ -0,0 +1,620 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// atenasio@google.com (Chris Atenasio) (ZigZag transform) +// wink@google.com (Wink Saville) (refactored from wire_format.h) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// This header is logically internal, but is made public because it is used +// from protocol-compiler-generated code, which may reside in other components. + +#ifndef GOOGLE_PROTOBUF_WIRE_FORMAT_LITE_H__ +#define GOOGLE_PROTOBUF_WIRE_FORMAT_LITE_H__ + +#include +#include + +namespace google { + +namespace protobuf { + template class RepeatedField; // repeated_field.h + namespace io { + class CodedInputStream; // coded_stream.h + class CodedOutputStream; // coded_stream.h + } +} + +namespace protobuf { +namespace internal { + +class StringPieceField; + +// This class is for internal use by the protocol buffer library and by +// protocol-complier-generated message classes. It must not be called +// directly by clients. +// +// This class contains helpers for implementing the binary protocol buffer +// wire format without the need for reflection. Use WireFormat when using +// reflection. +// +// This class is really a namespace that contains only static methods. +class LIBPROTOBUF_EXPORT WireFormatLite { + public: + + // ----------------------------------------------------------------- + // Helper constants and functions related to the format. These are + // mostly meant for internal and generated code to use. + + // The wire format is composed of a sequence of tag/value pairs, each + // of which contains the value of one field (or one element of a repeated + // field). Each tag is encoded as a varint. The lower bits of the tag + // identify its wire type, which specifies the format of the data to follow. + // The rest of the bits contain the field number. Each type of field (as + // declared by FieldDescriptor::Type, in descriptor.h) maps to one of + // these wire types. Immediately following each tag is the field's value, + // encoded in the format specified by the wire type. Because the tag + // identifies the encoding of this data, it is possible to skip + // unrecognized fields for forwards compatibility. + + enum WireType { + WIRETYPE_VARINT = 0, + WIRETYPE_FIXED64 = 1, + WIRETYPE_LENGTH_DELIMITED = 2, + WIRETYPE_START_GROUP = 3, + WIRETYPE_END_GROUP = 4, + WIRETYPE_FIXED32 = 5, + }; + + // Lite alternative to FieldDescriptor::Type. Must be kept in sync. + enum FieldType { + TYPE_DOUBLE = 1, + TYPE_FLOAT = 2, + TYPE_INT64 = 3, + TYPE_UINT64 = 4, + TYPE_INT32 = 5, + TYPE_FIXED64 = 6, + TYPE_FIXED32 = 7, + TYPE_BOOL = 8, + TYPE_STRING = 9, + TYPE_GROUP = 10, + TYPE_MESSAGE = 11, + TYPE_BYTES = 12, + TYPE_UINT32 = 13, + TYPE_ENUM = 14, + TYPE_SFIXED32 = 15, + TYPE_SFIXED64 = 16, + TYPE_SINT32 = 17, + TYPE_SINT64 = 18, + MAX_FIELD_TYPE = 18, + }; + + // Lite alternative to FieldDescriptor::CppType. Must be kept in sync. + enum CppType { + CPPTYPE_INT32 = 1, + CPPTYPE_INT64 = 2, + CPPTYPE_UINT32 = 3, + CPPTYPE_UINT64 = 4, + CPPTYPE_DOUBLE = 5, + CPPTYPE_FLOAT = 6, + CPPTYPE_BOOL = 7, + CPPTYPE_ENUM = 8, + CPPTYPE_STRING = 9, + CPPTYPE_MESSAGE = 10, + MAX_CPPTYPE = 10, + }; + + // Helper method to get the CppType for a particular Type. + static CppType FieldTypeToCppType(FieldType type); + + // Given a FieldSescriptor::Type return its WireType + static inline WireFormatLite::WireType WireTypeForFieldType( + WireFormatLite::FieldType type) { + return kWireTypeForFieldType[type]; + } + + // Number of bits in a tag which identify the wire type. + static const int kTagTypeBits = 3; + // Mask for those bits. + static const uint32 kTagTypeMask = (1 << kTagTypeBits) - 1; + + // Helper functions for encoding and decoding tags. (Inlined below and in + // _inl.h) + // + // This is different from MakeTag(field->number(), field->type()) in the case + // of packed repeated fields. + static uint32 MakeTag(int field_number, WireType type); + static WireType GetTagWireType(uint32 tag); + static int GetTagFieldNumber(uint32 tag); + + // Compute the byte size of a tag. For groups, this includes both the start + // and end tags. + static inline int TagSize(int field_number, WireFormatLite::FieldType type); + + // Skips a field value with the given tag. The input should start + // positioned immediately after the tag. Skipped values are simply discarded, + // not recorded anywhere. See WireFormat::SkipField() for a version that + // records to an UnknownFieldSet. + static bool SkipField(io::CodedInputStream* input, uint32 tag); + + // Reads and ignores a message from the input. Skipped values are simply + // discarded, not recorded anywhere. See WireFormat::SkipMessage() for a + // version that records to an UnknownFieldSet. + static bool SkipMessage(io::CodedInputStream* input); + +// This macro does the same thing as WireFormatLite::MakeTag(), but the +// result is usable as a compile-time constant, which makes it usable +// as a switch case or a template input. WireFormatLite::MakeTag() is more +// type-safe, though, so prefer it if possible. +#define GOOGLE_PROTOBUF_WIRE_FORMAT_MAKE_TAG(FIELD_NUMBER, TYPE) \ + static_cast( \ + ((FIELD_NUMBER) << ::google::protobuf::internal::WireFormatLite::kTagTypeBits) \ + | (TYPE)) + + // These are the tags for the old MessageSet format, which was defined as: + // message MessageSet { + // repeated group Item = 1 { + // required int32 type_id = 2; + // required string message = 3; + // } + // } + static const int kMessageSetItemNumber = 1; + static const int kMessageSetTypeIdNumber = 2; + static const int kMessageSetMessageNumber = 3; + static const int kMessageSetItemStartTag = + GOOGLE_PROTOBUF_WIRE_FORMAT_MAKE_TAG(kMessageSetItemNumber, + WireFormatLite::WIRETYPE_START_GROUP); + static const int kMessageSetItemEndTag = + GOOGLE_PROTOBUF_WIRE_FORMAT_MAKE_TAG(kMessageSetItemNumber, + WireFormatLite::WIRETYPE_END_GROUP); + static const int kMessageSetTypeIdTag = + GOOGLE_PROTOBUF_WIRE_FORMAT_MAKE_TAG(kMessageSetTypeIdNumber, + WireFormatLite::WIRETYPE_VARINT); + static const int kMessageSetMessageTag = + GOOGLE_PROTOBUF_WIRE_FORMAT_MAKE_TAG(kMessageSetMessageNumber, + WireFormatLite::WIRETYPE_LENGTH_DELIMITED); + + // Byte size of all tags of a MessageSet::Item combined. + static const int kMessageSetItemTagsSize; + + // Helper functions for converting between floats/doubles and IEEE-754 + // uint32s/uint64s so that they can be written. (Assumes your platform + // uses IEEE-754 floats.) + static uint32 EncodeFloat(float value); + static float DecodeFloat(uint32 value); + static uint64 EncodeDouble(double value); + static double DecodeDouble(uint64 value); + + // Helper functions for mapping signed integers to unsigned integers in + // such a way that numbers with small magnitudes will encode to smaller + // varints. If you simply static_cast a negative number to an unsigned + // number and varint-encode it, it will always take 10 bytes, defeating + // the purpose of varint. So, for the "sint32" and "sint64" field types, + // we ZigZag-encode the values. + static uint32 ZigZagEncode32(int32 n); + static int32 ZigZagDecode32(uint32 n); + static uint64 ZigZagEncode64(int64 n); + static int64 ZigZagDecode64(uint64 n); + + // ================================================================= + // Methods for reading/writing individual field. The implementations + // of these methods are defined in wire_format_lite_inl.h; you must #include + // that file to use these. + +// Avoid ugly line wrapping +#define input io::CodedInputStream* input +#define output io::CodedOutputStream* output +#define field_number int field_number +#define INL GOOGLE_ATTRIBUTE_ALWAYS_INLINE + + // Read fields, not including tags. The assumption is that you already + // read the tag to determine what field to read. + + // For primitive fields, we just use a templatized routine parameterized by + // the represented type and the FieldType. These are specialized with the + // appropriate definition for each declared type. + template + static inline bool ReadPrimitive(input, CType* value) INL; + + // Reads repeated primitive values, with optimizations for repeats. + // tag_size and tag should both be compile-time constants provided by the + // protocol compiler. + template + static inline bool ReadRepeatedPrimitive(int tag_size, + uint32 tag, + input, + RepeatedField* value) INL; + + // Identical to ReadRepeatedPrimitive, except will not inline the + // implementation. + template + static bool ReadRepeatedPrimitiveNoInline(int tag_size, + uint32 tag, + input, + RepeatedField* value); + + // Reads a primitive value directly from the provided buffer. It returns a + // pointer past the segment of data that was read. + // + // This is only implemented for the types with fixed wire size, e.g. + // float, double, and the (s)fixed* types. + template + static inline const uint8* ReadPrimitiveFromArray(const uint8* buffer, + CType* value) INL; + + // Reads a primitive packed field. + // + // This is only implemented for packable types. + template + static inline bool ReadPackedPrimitive(input, + RepeatedField* value) INL; + + // Identical to ReadPackedPrimitive, except will not inline the + // implementation. + template + static bool ReadPackedPrimitiveNoInline(input, RepeatedField* value); + + // Read a packed enum field. Values for which is_valid() returns false are + // dropped. + static bool ReadPackedEnumNoInline(input, + bool (*is_valid)(int), + RepeatedField* value); + + static bool ReadString(input, string* value); + static bool ReadBytes (input, string* value); + + static inline bool ReadGroup (field_number, input, MessageLite* value); + static inline bool ReadMessage(input, MessageLite* value); + + // Like above, but de-virtualize the call to MergePartialFromCodedStream(). + // The pointer must point at an instance of MessageType, *not* a subclass (or + // the subclass must not override MergePartialFromCodedStream()). + template + static inline bool ReadGroupNoVirtual(field_number, input, + MessageType* value); + template + static inline bool ReadMessageNoVirtual(input, MessageType* value); + + // Write a tag. The Write*() functions typically include the tag, so + // normally there's no need to call this unless using the Write*NoTag() + // variants. + static inline void WriteTag(field_number, WireType type, output) INL; + + // Write fields, without tags. + static inline void WriteInt32NoTag (int32 value, output) INL; + static inline void WriteInt64NoTag (int64 value, output) INL; + static inline void WriteUInt32NoTag (uint32 value, output) INL; + static inline void WriteUInt64NoTag (uint64 value, output) INL; + static inline void WriteSInt32NoTag (int32 value, output) INL; + static inline void WriteSInt64NoTag (int64 value, output) INL; + static inline void WriteFixed32NoTag (uint32 value, output) INL; + static inline void WriteFixed64NoTag (uint64 value, output) INL; + static inline void WriteSFixed32NoTag(int32 value, output) INL; + static inline void WriteSFixed64NoTag(int64 value, output) INL; + static inline void WriteFloatNoTag (float value, output) INL; + static inline void WriteDoubleNoTag (double value, output) INL; + static inline void WriteBoolNoTag (bool value, output) INL; + static inline void WriteEnumNoTag (int value, output) INL; + + // Write fields, including tags. + static void WriteInt32 (field_number, int32 value, output); + static void WriteInt64 (field_number, int64 value, output); + static void WriteUInt32 (field_number, uint32 value, output); + static void WriteUInt64 (field_number, uint64 value, output); + static void WriteSInt32 (field_number, int32 value, output); + static void WriteSInt64 (field_number, int64 value, output); + static void WriteFixed32 (field_number, uint32 value, output); + static void WriteFixed64 (field_number, uint64 value, output); + static void WriteSFixed32(field_number, int32 value, output); + static void WriteSFixed64(field_number, int64 value, output); + static void WriteFloat (field_number, float value, output); + static void WriteDouble (field_number, double value, output); + static void WriteBool (field_number, bool value, output); + static void WriteEnum (field_number, int value, output); + + static void WriteString(field_number, const string& value, output); + static void WriteBytes (field_number, const string& value, output); + + static void WriteGroup( + field_number, const MessageLite& value, output); + static void WriteMessage( + field_number, const MessageLite& value, output); + // Like above, but these will check if the output stream has enough + // space to write directly to a flat array. + static void WriteGroupMaybeToArray( + field_number, const MessageLite& value, output); + static void WriteMessageMaybeToArray( + field_number, const MessageLite& value, output); + + // Like above, but de-virtualize the call to SerializeWithCachedSizes(). The + // pointer must point at an instance of MessageType, *not* a subclass (or + // the subclass must not override SerializeWithCachedSizes()). + template + static inline void WriteGroupNoVirtual( + field_number, const MessageType& value, output); + template + static inline void WriteMessageNoVirtual( + field_number, const MessageType& value, output); + +#undef output +#define output uint8* target + + // Like above, but use only *ToArray methods of CodedOutputStream. + static inline uint8* WriteTagToArray(field_number, WireType type, output) INL; + + // Write fields, without tags. + static inline uint8* WriteInt32NoTagToArray (int32 value, output) INL; + static inline uint8* WriteInt64NoTagToArray (int64 value, output) INL; + static inline uint8* WriteUInt32NoTagToArray (uint32 value, output) INL; + static inline uint8* WriteUInt64NoTagToArray (uint64 value, output) INL; + static inline uint8* WriteSInt32NoTagToArray (int32 value, output) INL; + static inline uint8* WriteSInt64NoTagToArray (int64 value, output) INL; + static inline uint8* WriteFixed32NoTagToArray (uint32 value, output) INL; + static inline uint8* WriteFixed64NoTagToArray (uint64 value, output) INL; + static inline uint8* WriteSFixed32NoTagToArray(int32 value, output) INL; + static inline uint8* WriteSFixed64NoTagToArray(int64 value, output) INL; + static inline uint8* WriteFloatNoTagToArray (float value, output) INL; + static inline uint8* WriteDoubleNoTagToArray (double value, output) INL; + static inline uint8* WriteBoolNoTagToArray (bool value, output) INL; + static inline uint8* WriteEnumNoTagToArray (int value, output) INL; + + // Write fields, including tags. + static inline uint8* WriteInt32ToArray( + field_number, int32 value, output) INL; + static inline uint8* WriteInt64ToArray( + field_number, int64 value, output) INL; + static inline uint8* WriteUInt32ToArray( + field_number, uint32 value, output) INL; + static inline uint8* WriteUInt64ToArray( + field_number, uint64 value, output) INL; + static inline uint8* WriteSInt32ToArray( + field_number, int32 value, output) INL; + static inline uint8* WriteSInt64ToArray( + field_number, int64 value, output) INL; + static inline uint8* WriteFixed32ToArray( + field_number, uint32 value, output) INL; + static inline uint8* WriteFixed64ToArray( + field_number, uint64 value, output) INL; + static inline uint8* WriteSFixed32ToArray( + field_number, int32 value, output) INL; + static inline uint8* WriteSFixed64ToArray( + field_number, int64 value, output) INL; + static inline uint8* WriteFloatToArray( + field_number, float value, output) INL; + static inline uint8* WriteDoubleToArray( + field_number, double value, output) INL; + static inline uint8* WriteBoolToArray( + field_number, bool value, output) INL; + static inline uint8* WriteEnumToArray( + field_number, int value, output) INL; + + static inline uint8* WriteStringToArray( + field_number, const string& value, output) INL; + static inline uint8* WriteBytesToArray( + field_number, const string& value, output) INL; + + static inline uint8* WriteGroupToArray( + field_number, const MessageLite& value, output) INL; + static inline uint8* WriteMessageToArray( + field_number, const MessageLite& value, output) INL; + + // Like above, but de-virtualize the call to SerializeWithCachedSizes(). The + // pointer must point at an instance of MessageType, *not* a subclass (or + // the subclass must not override SerializeWithCachedSizes()). + template + static inline uint8* WriteGroupNoVirtualToArray( + field_number, const MessageType& value, output) INL; + template + static inline uint8* WriteMessageNoVirtualToArray( + field_number, const MessageType& value, output) INL; + +#undef output +#undef input +#undef INL + +#undef field_number + + // Compute the byte size of a field. The XxSize() functions do NOT include + // the tag, so you must also call TagSize(). (This is because, for repeated + // fields, you should only call TagSize() once and multiply it by the element + // count, but you may have to call XxSize() for each individual element.) + static inline int Int32Size ( int32 value); + static inline int Int64Size ( int64 value); + static inline int UInt32Size (uint32 value); + static inline int UInt64Size (uint64 value); + static inline int SInt32Size ( int32 value); + static inline int SInt64Size ( int64 value); + static inline int EnumSize ( int value); + + // These types always have the same size. + static const int kFixed32Size = 4; + static const int kFixed64Size = 8; + static const int kSFixed32Size = 4; + static const int kSFixed64Size = 8; + static const int kFloatSize = 4; + static const int kDoubleSize = 8; + static const int kBoolSize = 1; + + static inline int StringSize(const string& value); + static inline int BytesSize (const string& value); + + static inline int GroupSize (const MessageLite& value); + static inline int MessageSize(const MessageLite& value); + + // Like above, but de-virtualize the call to ByteSize(). The + // pointer must point at an instance of MessageType, *not* a subclass (or + // the subclass must not override ByteSize()). + template + static inline int GroupSizeNoVirtual (const MessageType& value); + template + static inline int MessageSizeNoVirtual(const MessageType& value); + + private: + // A helper method for the repeated primitive reader. This method has + // optimizations for primitive types that have fixed size on the wire, and + // can be read using potentially faster paths. + template + static inline bool ReadRepeatedFixedSizePrimitive( + int tag_size, + uint32 tag, + google::protobuf::io::CodedInputStream* input, + RepeatedField* value) GOOGLE_ATTRIBUTE_ALWAYS_INLINE; + + static const CppType kFieldTypeToCppTypeMap[]; + static const WireFormatLite::WireType kWireTypeForFieldType[]; + + GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(WireFormatLite); +}; + +// A class which deals with unknown values. The default implementation just +// discards them. WireFormat defines a subclass which writes to an +// UnknownFieldSet. This class is used by ExtensionSet::ParseField(), since +// ExtensionSet is part of the lite library but UnknownFieldSet is not. +class LIBPROTOBUF_EXPORT FieldSkipper { + public: + FieldSkipper() {} + virtual ~FieldSkipper() {} + + // Skip a field whose tag has already been consumed. + virtual bool SkipField(io::CodedInputStream* input, uint32 tag); + + // Skip an entire message or group, up to an end-group tag (which is consumed) + // or end-of-stream. + virtual bool SkipMessage(io::CodedInputStream* input); + + // Deal with an already-parsed unrecognized enum value. The default + // implementation does nothing, but the UnknownFieldSet-based implementation + // saves it as an unknown varint. + virtual void SkipUnknownEnum(int field_number, int value); +}; + +// inline methods ==================================================== + +inline WireFormatLite::CppType +WireFormatLite::FieldTypeToCppType(FieldType type) { + return kFieldTypeToCppTypeMap[type]; +} + +inline uint32 WireFormatLite::MakeTag(int field_number, WireType type) { + return GOOGLE_PROTOBUF_WIRE_FORMAT_MAKE_TAG(field_number, type); +} + +inline WireFormatLite::WireType WireFormatLite::GetTagWireType(uint32 tag) { + return static_cast(tag & kTagTypeMask); +} + +inline int WireFormatLite::GetTagFieldNumber(uint32 tag) { + return static_cast(tag >> kTagTypeBits); +} + +inline int WireFormatLite::TagSize(int field_number, + WireFormatLite::FieldType type) { + int result = io::CodedOutputStream::VarintSize32( + field_number << kTagTypeBits); + if (type == TYPE_GROUP) { + // Groups have both a start and an end tag. + return result * 2; + } else { + return result; + } +} + +inline uint32 WireFormatLite::EncodeFloat(float value) { + union {float f; uint32 i;}; + f = value; + return i; +} + +inline float WireFormatLite::DecodeFloat(uint32 value) { + union {float f; uint32 i;}; + i = value; + return f; +} + +inline uint64 WireFormatLite::EncodeDouble(double value) { + union {double f; uint64 i;}; + f = value; + return i; +} + +inline double WireFormatLite::DecodeDouble(uint64 value) { + union {double f; uint64 i;}; + i = value; + return f; +} + +// ZigZag Transform: Encodes signed integers so that they can be +// effectively used with varint encoding. +// +// varint operates on unsigned integers, encoding smaller numbers into +// fewer bytes. If you try to use it on a signed integer, it will treat +// this number as a very large unsigned integer, which means that even +// small signed numbers like -1 will take the maximum number of bytes +// (10) to encode. ZigZagEncode() maps signed integers to unsigned +// in such a way that those with a small absolute value will have smaller +// encoded values, making them appropriate for encoding using varint. +// +// int32 -> uint32 +// ------------------------- +// 0 -> 0 +// -1 -> 1 +// 1 -> 2 +// -2 -> 3 +// ... -> ... +// 2147483647 -> 4294967294 +// -2147483648 -> 4294967295 +// +// >> encode >> +// << decode << + +inline uint32 WireFormatLite::ZigZagEncode32(int32 n) { + // Note: the right-shift must be arithmetic + return (n << 1) ^ (n >> 31); +} + +inline int32 WireFormatLite::ZigZagDecode32(uint32 n) { + return (n >> 1) ^ -static_cast(n & 1); +} + +inline uint64 WireFormatLite::ZigZagEncode64(int64 n) { + // Note: the right-shift must be arithmetic + return (n << 1) ^ (n >> 63); +} + +inline int64 WireFormatLite::ZigZagDecode64(uint64 n) { + return (n >> 1) ^ -static_cast(n & 1); +} + +} // namespace internal +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_WIRE_FORMAT_LITE_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/wire_format_lite_inl.h b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/wire_format_lite_inl.h new file mode 100644 index 0000000000..103b0bd0ca --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/wire_format_lite_inl.h @@ -0,0 +1,774 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// wink@google.com (Wink Saville) (refactored from wire_format.h) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#ifndef GOOGLE_PROTOBUF_WIRE_FORMAT_LITE_INL_H__ +#define GOOGLE_PROTOBUF_WIRE_FORMAT_LITE_INL_H__ + +#include +#include +#include +#include +#include +#include +#include + + +namespace google { +namespace protobuf { +namespace internal { + +// Implementation details of ReadPrimitive. + +template <> +inline bool WireFormatLite::ReadPrimitive( + io::CodedInputStream* input, + int32* value) { + uint32 temp; + if (!input->ReadVarint32(&temp)) return false; + *value = static_cast(temp); + return true; +} +template <> +inline bool WireFormatLite::ReadPrimitive( + io::CodedInputStream* input, + int64* value) { + uint64 temp; + if (!input->ReadVarint64(&temp)) return false; + *value = static_cast(temp); + return true; +} +template <> +inline bool WireFormatLite::ReadPrimitive( + io::CodedInputStream* input, + uint32* value) { + return input->ReadVarint32(value); +} +template <> +inline bool WireFormatLite::ReadPrimitive( + io::CodedInputStream* input, + uint64* value) { + return input->ReadVarint64(value); +} +template <> +inline bool WireFormatLite::ReadPrimitive( + io::CodedInputStream* input, + int32* value) { + uint32 temp; + if (!input->ReadVarint32(&temp)) return false; + *value = ZigZagDecode32(temp); + return true; +} +template <> +inline bool WireFormatLite::ReadPrimitive( + io::CodedInputStream* input, + int64* value) { + uint64 temp; + if (!input->ReadVarint64(&temp)) return false; + *value = ZigZagDecode64(temp); + return true; +} +template <> +inline bool WireFormatLite::ReadPrimitive( + io::CodedInputStream* input, + uint32* value) { + return input->ReadLittleEndian32(value); +} +template <> +inline bool WireFormatLite::ReadPrimitive( + io::CodedInputStream* input, + uint64* value) { + return input->ReadLittleEndian64(value); +} +template <> +inline bool WireFormatLite::ReadPrimitive( + io::CodedInputStream* input, + int32* value) { + uint32 temp; + if (!input->ReadLittleEndian32(&temp)) return false; + *value = static_cast(temp); + return true; +} +template <> +inline bool WireFormatLite::ReadPrimitive( + io::CodedInputStream* input, + int64* value) { + uint64 temp; + if (!input->ReadLittleEndian64(&temp)) return false; + *value = static_cast(temp); + return true; +} +template <> +inline bool WireFormatLite::ReadPrimitive( + io::CodedInputStream* input, + float* value) { + uint32 temp; + if (!input->ReadLittleEndian32(&temp)) return false; + *value = DecodeFloat(temp); + return true; +} +template <> +inline bool WireFormatLite::ReadPrimitive( + io::CodedInputStream* input, + double* value) { + uint64 temp; + if (!input->ReadLittleEndian64(&temp)) return false; + *value = DecodeDouble(temp); + return true; +} +template <> +inline bool WireFormatLite::ReadPrimitive( + io::CodedInputStream* input, + bool* value) { + uint32 temp; + if (!input->ReadVarint32(&temp)) return false; + *value = temp != 0; + return true; +} +template <> +inline bool WireFormatLite::ReadPrimitive( + io::CodedInputStream* input, + int* value) { + uint32 temp; + if (!input->ReadVarint32(&temp)) return false; + *value = static_cast(temp); + return true; +} + +template <> +inline const uint8* WireFormatLite::ReadPrimitiveFromArray< + uint32, WireFormatLite::TYPE_FIXED32>( + const uint8* buffer, + uint32* value) { + return io::CodedInputStream::ReadLittleEndian32FromArray(buffer, value); +} +template <> +inline const uint8* WireFormatLite::ReadPrimitiveFromArray< + uint64, WireFormatLite::TYPE_FIXED64>( + const uint8* buffer, + uint64* value) { + return io::CodedInputStream::ReadLittleEndian64FromArray(buffer, value); +} +template <> +inline const uint8* WireFormatLite::ReadPrimitiveFromArray< + int32, WireFormatLite::TYPE_SFIXED32>( + const uint8* buffer, + int32* value) { + uint32 temp; + buffer = io::CodedInputStream::ReadLittleEndian32FromArray(buffer, &temp); + *value = static_cast(temp); + return buffer; +} +template <> +inline const uint8* WireFormatLite::ReadPrimitiveFromArray< + int64, WireFormatLite::TYPE_SFIXED64>( + const uint8* buffer, + int64* value) { + uint64 temp; + buffer = io::CodedInputStream::ReadLittleEndian64FromArray(buffer, &temp); + *value = static_cast(temp); + return buffer; +} +template <> +inline const uint8* WireFormatLite::ReadPrimitiveFromArray< + float, WireFormatLite::TYPE_FLOAT>( + const uint8* buffer, + float* value) { + uint32 temp; + buffer = io::CodedInputStream::ReadLittleEndian32FromArray(buffer, &temp); + *value = DecodeFloat(temp); + return buffer; +} +template <> +inline const uint8* WireFormatLite::ReadPrimitiveFromArray< + double, WireFormatLite::TYPE_DOUBLE>( + const uint8* buffer, + double* value) { + uint64 temp; + buffer = io::CodedInputStream::ReadLittleEndian64FromArray(buffer, &temp); + *value = DecodeDouble(temp); + return buffer; +} + +template +inline bool WireFormatLite::ReadRepeatedPrimitive(int, // tag_size, unused. + uint32 tag, + io::CodedInputStream* input, + RepeatedField* values) { + CType value; + if (!ReadPrimitive(input, &value)) return false; + values->Add(value); + int elements_already_reserved = values->Capacity() - values->size(); + while (elements_already_reserved > 0 && input->ExpectTag(tag)) { + if (!ReadPrimitive(input, &value)) return false; + values->AddAlreadyReserved(value); + elements_already_reserved--; + } + return true; +} + +template +inline bool WireFormatLite::ReadRepeatedFixedSizePrimitive( + int tag_size, + uint32 tag, + io::CodedInputStream* input, + RepeatedField* values) { + GOOGLE_DCHECK_EQ(UInt32Size(tag), tag_size); + CType value; + if (!ReadPrimitive(input, &value)) + return false; + values->Add(value); + + // For fixed size values, repeated values can be read more quickly by + // reading directly from a raw array. + // + // We can get a tight loop by only reading as many elements as can be + // added to the RepeatedField without having to do any resizing. Additionally, + // we only try to read as many elements as are available from the current + // buffer space. Doing so avoids having to perform boundary checks when + // reading the value: the maximum number of elements that can be read is + // known outside of the loop. + const void* void_pointer; + int size; + input->GetDirectBufferPointerInline(&void_pointer, &size); + if (size > 0) { + const uint8* buffer = reinterpret_cast(void_pointer); + // The number of bytes each type occupies on the wire. + const int per_value_size = tag_size + sizeof(value); + + int elements_available = min(values->Capacity() - values->size(), + size / per_value_size); + int num_read = 0; + while (num_read < elements_available && + (buffer = io::CodedInputStream::ExpectTagFromArray( + buffer, tag)) != NULL) { + buffer = ReadPrimitiveFromArray(buffer, &value); + values->AddAlreadyReserved(value); + ++num_read; + } + const int read_bytes = num_read * per_value_size; + if (read_bytes > 0) { + input->Skip(read_bytes); + } + } + return true; +} + +// Specializations of ReadRepeatedPrimitive for the fixed size types, which use +// the optimized code path. +#define READ_REPEATED_FIXED_SIZE_PRIMITIVE(CPPTYPE, DECLARED_TYPE) \ +template <> \ +inline bool WireFormatLite::ReadRepeatedPrimitive< \ + CPPTYPE, WireFormatLite::DECLARED_TYPE>( \ + int tag_size, \ + uint32 tag, \ + io::CodedInputStream* input, \ + RepeatedField* values) { \ + return ReadRepeatedFixedSizePrimitive< \ + CPPTYPE, WireFormatLite::DECLARED_TYPE>( \ + tag_size, tag, input, values); \ +} + +READ_REPEATED_FIXED_SIZE_PRIMITIVE(uint32, TYPE_FIXED32); +READ_REPEATED_FIXED_SIZE_PRIMITIVE(uint64, TYPE_FIXED64); +READ_REPEATED_FIXED_SIZE_PRIMITIVE(int32, TYPE_SFIXED32); +READ_REPEATED_FIXED_SIZE_PRIMITIVE(int64, TYPE_SFIXED64); +READ_REPEATED_FIXED_SIZE_PRIMITIVE(float, TYPE_FLOAT); +READ_REPEATED_FIXED_SIZE_PRIMITIVE(double, TYPE_DOUBLE); + +#undef READ_REPEATED_FIXED_SIZE_PRIMITIVE + +template +bool WireFormatLite::ReadRepeatedPrimitiveNoInline( + int tag_size, + uint32 tag, + io::CodedInputStream* input, + RepeatedField* value) { + return ReadRepeatedPrimitive( + tag_size, tag, input, value); +} + +template +inline bool WireFormatLite::ReadPackedPrimitive(io::CodedInputStream* input, + RepeatedField* values) { + uint32 length; + if (!input->ReadVarint32(&length)) return false; + io::CodedInputStream::Limit limit = input->PushLimit(length); + while (input->BytesUntilLimit() > 0) { + CType value; + if (!ReadPrimitive(input, &value)) return false; + values->Add(value); + } + input->PopLimit(limit); + return true; +} + +template +bool WireFormatLite::ReadPackedPrimitiveNoInline(io::CodedInputStream* input, + RepeatedField* values) { + return ReadPackedPrimitive(input, values); +} + + +inline bool WireFormatLite::ReadGroup(int field_number, + io::CodedInputStream* input, + MessageLite* value) { + if (!input->IncrementRecursionDepth()) return false; + if (!value->MergePartialFromCodedStream(input)) return false; + input->DecrementRecursionDepth(); + // Make sure the last thing read was an end tag for this group. + if (!input->LastTagWas(MakeTag(field_number, WIRETYPE_END_GROUP))) { + return false; + } + return true; +} +inline bool WireFormatLite::ReadMessage(io::CodedInputStream* input, + MessageLite* value) { + uint32 length; + if (!input->ReadVarint32(&length)) return false; + if (!input->IncrementRecursionDepth()) return false; + io::CodedInputStream::Limit limit = input->PushLimit(length); + if (!value->MergePartialFromCodedStream(input)) return false; + // Make sure that parsing stopped when the limit was hit, not at an endgroup + // tag. + if (!input->ConsumedEntireMessage()) return false; + input->PopLimit(limit); + input->DecrementRecursionDepth(); + return true; +} + +// We name the template parameter something long and extremely unlikely to occur +// elsewhere because a *qualified* member access expression designed to avoid +// virtual dispatch, C++03 [basic.lookup.classref] 3.4.5/4 requires that the +// name of the qualifying class to be looked up both in the context of the full +// expression (finding the template parameter) and in the context of the object +// whose member we are accessing. This could potentially find a nested type +// within that object. The standard goes on to require these names to refer to +// the same entity, which this collision would violate. The lack of a safe way +// to avoid this collision appears to be a defect in the standard, but until it +// is corrected, we choose the name to avoid accidental collisions. +template +inline bool WireFormatLite::ReadGroupNoVirtual( + int field_number, io::CodedInputStream* input, + MessageType_WorkAroundCppLookupDefect* value) { + if (!input->IncrementRecursionDepth()) return false; + if (!value-> + MessageType_WorkAroundCppLookupDefect::MergePartialFromCodedStream(input)) + return false; + input->DecrementRecursionDepth(); + // Make sure the last thing read was an end tag for this group. + if (!input->LastTagWas(MakeTag(field_number, WIRETYPE_END_GROUP))) { + return false; + } + return true; +} +template +inline bool WireFormatLite::ReadMessageNoVirtual( + io::CodedInputStream* input, MessageType_WorkAroundCppLookupDefect* value) { + uint32 length; + if (!input->ReadVarint32(&length)) return false; + if (!input->IncrementRecursionDepth()) return false; + io::CodedInputStream::Limit limit = input->PushLimit(length); + if (!value-> + MessageType_WorkAroundCppLookupDefect::MergePartialFromCodedStream(input)) + return false; + // Make sure that parsing stopped when the limit was hit, not at an endgroup + // tag. + if (!input->ConsumedEntireMessage()) return false; + input->PopLimit(limit); + input->DecrementRecursionDepth(); + return true; +} + +// =================================================================== + +inline void WireFormatLite::WriteTag(int field_number, WireType type, + io::CodedOutputStream* output) { + output->WriteTag(MakeTag(field_number, type)); +} + +inline void WireFormatLite::WriteInt32NoTag(int32 value, + io::CodedOutputStream* output) { + output->WriteVarint32SignExtended(value); +} +inline void WireFormatLite::WriteInt64NoTag(int64 value, + io::CodedOutputStream* output) { + output->WriteVarint64(static_cast(value)); +} +inline void WireFormatLite::WriteUInt32NoTag(uint32 value, + io::CodedOutputStream* output) { + output->WriteVarint32(value); +} +inline void WireFormatLite::WriteUInt64NoTag(uint64 value, + io::CodedOutputStream* output) { + output->WriteVarint64(value); +} +inline void WireFormatLite::WriteSInt32NoTag(int32 value, + io::CodedOutputStream* output) { + output->WriteVarint32(ZigZagEncode32(value)); +} +inline void WireFormatLite::WriteSInt64NoTag(int64 value, + io::CodedOutputStream* output) { + output->WriteVarint64(ZigZagEncode64(value)); +} +inline void WireFormatLite::WriteFixed32NoTag(uint32 value, + io::CodedOutputStream* output) { + output->WriteLittleEndian32(value); +} +inline void WireFormatLite::WriteFixed64NoTag(uint64 value, + io::CodedOutputStream* output) { + output->WriteLittleEndian64(value); +} +inline void WireFormatLite::WriteSFixed32NoTag(int32 value, + io::CodedOutputStream* output) { + output->WriteLittleEndian32(static_cast(value)); +} +inline void WireFormatLite::WriteSFixed64NoTag(int64 value, + io::CodedOutputStream* output) { + output->WriteLittleEndian64(static_cast(value)); +} +inline void WireFormatLite::WriteFloatNoTag(float value, + io::CodedOutputStream* output) { + output->WriteLittleEndian32(EncodeFloat(value)); +} +inline void WireFormatLite::WriteDoubleNoTag(double value, + io::CodedOutputStream* output) { + output->WriteLittleEndian64(EncodeDouble(value)); +} +inline void WireFormatLite::WriteBoolNoTag(bool value, + io::CodedOutputStream* output) { + output->WriteVarint32(value ? 1 : 0); +} +inline void WireFormatLite::WriteEnumNoTag(int value, + io::CodedOutputStream* output) { + output->WriteVarint32SignExtended(value); +} + +// See comment on ReadGroupNoVirtual to understand the need for this template +// parameter name. +template +inline void WireFormatLite::WriteGroupNoVirtual( + int field_number, const MessageType_WorkAroundCppLookupDefect& value, + io::CodedOutputStream* output) { + WriteTag(field_number, WIRETYPE_START_GROUP, output); + value.MessageType_WorkAroundCppLookupDefect::SerializeWithCachedSizes(output); + WriteTag(field_number, WIRETYPE_END_GROUP, output); +} +template +inline void WireFormatLite::WriteMessageNoVirtual( + int field_number, const MessageType_WorkAroundCppLookupDefect& value, + io::CodedOutputStream* output) { + WriteTag(field_number, WIRETYPE_LENGTH_DELIMITED, output); + output->WriteVarint32( + value.MessageType_WorkAroundCppLookupDefect::GetCachedSize()); + value.MessageType_WorkAroundCppLookupDefect::SerializeWithCachedSizes(output); +} + +// =================================================================== + +inline uint8* WireFormatLite::WriteTagToArray(int field_number, + WireType type, + uint8* target) { + return io::CodedOutputStream::WriteTagToArray(MakeTag(field_number, type), + target); +} + +inline uint8* WireFormatLite::WriteInt32NoTagToArray(int32 value, + uint8* target) { + return io::CodedOutputStream::WriteVarint32SignExtendedToArray(value, target); +} +inline uint8* WireFormatLite::WriteInt64NoTagToArray(int64 value, + uint8* target) { + return io::CodedOutputStream::WriteVarint64ToArray( + static_cast(value), target); +} +inline uint8* WireFormatLite::WriteUInt32NoTagToArray(uint32 value, + uint8* target) { + return io::CodedOutputStream::WriteVarint32ToArray(value, target); +} +inline uint8* WireFormatLite::WriteUInt64NoTagToArray(uint64 value, + uint8* target) { + return io::CodedOutputStream::WriteVarint64ToArray(value, target); +} +inline uint8* WireFormatLite::WriteSInt32NoTagToArray(int32 value, + uint8* target) { + return io::CodedOutputStream::WriteVarint32ToArray(ZigZagEncode32(value), + target); +} +inline uint8* WireFormatLite::WriteSInt64NoTagToArray(int64 value, + uint8* target) { + return io::CodedOutputStream::WriteVarint64ToArray(ZigZagEncode64(value), + target); +} +inline uint8* WireFormatLite::WriteFixed32NoTagToArray(uint32 value, + uint8* target) { + return io::CodedOutputStream::WriteLittleEndian32ToArray(value, target); +} +inline uint8* WireFormatLite::WriteFixed64NoTagToArray(uint64 value, + uint8* target) { + return io::CodedOutputStream::WriteLittleEndian64ToArray(value, target); +} +inline uint8* WireFormatLite::WriteSFixed32NoTagToArray(int32 value, + uint8* target) { + return io::CodedOutputStream::WriteLittleEndian32ToArray( + static_cast(value), target); +} +inline uint8* WireFormatLite::WriteSFixed64NoTagToArray(int64 value, + uint8* target) { + return io::CodedOutputStream::WriteLittleEndian64ToArray( + static_cast(value), target); +} +inline uint8* WireFormatLite::WriteFloatNoTagToArray(float value, + uint8* target) { + return io::CodedOutputStream::WriteLittleEndian32ToArray(EncodeFloat(value), + target); +} +inline uint8* WireFormatLite::WriteDoubleNoTagToArray(double value, + uint8* target) { + return io::CodedOutputStream::WriteLittleEndian64ToArray(EncodeDouble(value), + target); +} +inline uint8* WireFormatLite::WriteBoolNoTagToArray(bool value, + uint8* target) { + return io::CodedOutputStream::WriteVarint32ToArray(value ? 1 : 0, target); +} +inline uint8* WireFormatLite::WriteEnumNoTagToArray(int value, + uint8* target) { + return io::CodedOutputStream::WriteVarint32SignExtendedToArray(value, target); +} + +inline uint8* WireFormatLite::WriteInt32ToArray(int field_number, + int32 value, + uint8* target) { + target = WriteTagToArray(field_number, WIRETYPE_VARINT, target); + return WriteInt32NoTagToArray(value, target); +} +inline uint8* WireFormatLite::WriteInt64ToArray(int field_number, + int64 value, + uint8* target) { + target = WriteTagToArray(field_number, WIRETYPE_VARINT, target); + return WriteInt64NoTagToArray(value, target); +} +inline uint8* WireFormatLite::WriteUInt32ToArray(int field_number, + uint32 value, + uint8* target) { + target = WriteTagToArray(field_number, WIRETYPE_VARINT, target); + return WriteUInt32NoTagToArray(value, target); +} +inline uint8* WireFormatLite::WriteUInt64ToArray(int field_number, + uint64 value, + uint8* target) { + target = WriteTagToArray(field_number, WIRETYPE_VARINT, target); + return WriteUInt64NoTagToArray(value, target); +} +inline uint8* WireFormatLite::WriteSInt32ToArray(int field_number, + int32 value, + uint8* target) { + target = WriteTagToArray(field_number, WIRETYPE_VARINT, target); + return WriteSInt32NoTagToArray(value, target); +} +inline uint8* WireFormatLite::WriteSInt64ToArray(int field_number, + int64 value, + uint8* target) { + target = WriteTagToArray(field_number, WIRETYPE_VARINT, target); + return WriteSInt64NoTagToArray(value, target); +} +inline uint8* WireFormatLite::WriteFixed32ToArray(int field_number, + uint32 value, + uint8* target) { + target = WriteTagToArray(field_number, WIRETYPE_FIXED32, target); + return WriteFixed32NoTagToArray(value, target); +} +inline uint8* WireFormatLite::WriteFixed64ToArray(int field_number, + uint64 value, + uint8* target) { + target = WriteTagToArray(field_number, WIRETYPE_FIXED64, target); + return WriteFixed64NoTagToArray(value, target); +} +inline uint8* WireFormatLite::WriteSFixed32ToArray(int field_number, + int32 value, + uint8* target) { + target = WriteTagToArray(field_number, WIRETYPE_FIXED32, target); + return WriteSFixed32NoTagToArray(value, target); +} +inline uint8* WireFormatLite::WriteSFixed64ToArray(int field_number, + int64 value, + uint8* target) { + target = WriteTagToArray(field_number, WIRETYPE_FIXED64, target); + return WriteSFixed64NoTagToArray(value, target); +} +inline uint8* WireFormatLite::WriteFloatToArray(int field_number, + float value, + uint8* target) { + target = WriteTagToArray(field_number, WIRETYPE_FIXED32, target); + return WriteFloatNoTagToArray(value, target); +} +inline uint8* WireFormatLite::WriteDoubleToArray(int field_number, + double value, + uint8* target) { + target = WriteTagToArray(field_number, WIRETYPE_FIXED64, target); + return WriteDoubleNoTagToArray(value, target); +} +inline uint8* WireFormatLite::WriteBoolToArray(int field_number, + bool value, + uint8* target) { + target = WriteTagToArray(field_number, WIRETYPE_VARINT, target); + return WriteBoolNoTagToArray(value, target); +} +inline uint8* WireFormatLite::WriteEnumToArray(int field_number, + int value, + uint8* target) { + target = WriteTagToArray(field_number, WIRETYPE_VARINT, target); + return WriteEnumNoTagToArray(value, target); +} + +inline uint8* WireFormatLite::WriteStringToArray(int field_number, + const string& value, + uint8* target) { + // String is for UTF-8 text only + // WARNING: In wire_format.cc, both strings and bytes are handled by + // WriteString() to avoid code duplication. If the implementations become + // different, you will need to update that usage. + target = WriteTagToArray(field_number, WIRETYPE_LENGTH_DELIMITED, target); + target = io::CodedOutputStream::WriteVarint32ToArray(value.size(), target); + return io::CodedOutputStream::WriteStringToArray(value, target); +} +inline uint8* WireFormatLite::WriteBytesToArray(int field_number, + const string& value, + uint8* target) { + target = WriteTagToArray(field_number, WIRETYPE_LENGTH_DELIMITED, target); + target = io::CodedOutputStream::WriteVarint32ToArray(value.size(), target); + return io::CodedOutputStream::WriteStringToArray(value, target); +} + + +inline uint8* WireFormatLite::WriteGroupToArray(int field_number, + const MessageLite& value, + uint8* target) { + target = WriteTagToArray(field_number, WIRETYPE_START_GROUP, target); + target = value.SerializeWithCachedSizesToArray(target); + return WriteTagToArray(field_number, WIRETYPE_END_GROUP, target); +} +inline uint8* WireFormatLite::WriteMessageToArray(int field_number, + const MessageLite& value, + uint8* target) { + target = WriteTagToArray(field_number, WIRETYPE_LENGTH_DELIMITED, target); + target = io::CodedOutputStream::WriteVarint32ToArray( + value.GetCachedSize(), target); + return value.SerializeWithCachedSizesToArray(target); +} + +// See comment on ReadGroupNoVirtual to understand the need for this template +// parameter name. +template +inline uint8* WireFormatLite::WriteGroupNoVirtualToArray( + int field_number, const MessageType_WorkAroundCppLookupDefect& value, + uint8* target) { + target = WriteTagToArray(field_number, WIRETYPE_START_GROUP, target); + target = value.MessageType_WorkAroundCppLookupDefect + ::SerializeWithCachedSizesToArray(target); + return WriteTagToArray(field_number, WIRETYPE_END_GROUP, target); +} +template +inline uint8* WireFormatLite::WriteMessageNoVirtualToArray( + int field_number, const MessageType_WorkAroundCppLookupDefect& value, + uint8* target) { + target = WriteTagToArray(field_number, WIRETYPE_LENGTH_DELIMITED, target); + target = io::CodedOutputStream::WriteVarint32ToArray( + value.MessageType_WorkAroundCppLookupDefect::GetCachedSize(), target); + return value.MessageType_WorkAroundCppLookupDefect + ::SerializeWithCachedSizesToArray(target); +} + +// =================================================================== + +inline int WireFormatLite::Int32Size(int32 value) { + return io::CodedOutputStream::VarintSize32SignExtended(value); +} +inline int WireFormatLite::Int64Size(int64 value) { + return io::CodedOutputStream::VarintSize64(static_cast(value)); +} +inline int WireFormatLite::UInt32Size(uint32 value) { + return io::CodedOutputStream::VarintSize32(value); +} +inline int WireFormatLite::UInt64Size(uint64 value) { + return io::CodedOutputStream::VarintSize64(value); +} +inline int WireFormatLite::SInt32Size(int32 value) { + return io::CodedOutputStream::VarintSize32(ZigZagEncode32(value)); +} +inline int WireFormatLite::SInt64Size(int64 value) { + return io::CodedOutputStream::VarintSize64(ZigZagEncode64(value)); +} +inline int WireFormatLite::EnumSize(int value) { + return io::CodedOutputStream::VarintSize32SignExtended(value); +} + +inline int WireFormatLite::StringSize(const string& value) { + return io::CodedOutputStream::VarintSize32(value.size()) + + value.size(); +} +inline int WireFormatLite::BytesSize(const string& value) { + return io::CodedOutputStream::VarintSize32(value.size()) + + value.size(); +} + + +inline int WireFormatLite::GroupSize(const MessageLite& value) { + return value.ByteSize(); +} +inline int WireFormatLite::MessageSize(const MessageLite& value) { + int size = value.ByteSize(); + return io::CodedOutputStream::VarintSize32(size) + size; +} + +// See comment on ReadGroupNoVirtual to understand the need for this template +// parameter name. +template +inline int WireFormatLite::GroupSizeNoVirtual( + const MessageType_WorkAroundCppLookupDefect& value) { + return value.MessageType_WorkAroundCppLookupDefect::ByteSize(); +} +template +inline int WireFormatLite::MessageSizeNoVirtual( + const MessageType_WorkAroundCppLookupDefect& value) { + int size = value.MessageType_WorkAroundCppLookupDefect::ByteSize(); + return io::CodedOutputStream::VarintSize32(size) + size; +} + +} // namespace internal +} // namespace protobuf + +} // namespace google +#endif // GOOGLE_PROTOBUF_WIRE_FORMAT_LITE_INL_H__ diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/wire_format_unittest.cc b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/wire_format_unittest.cc new file mode 100644 index 0000000000..867970c495 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/google/protobuf/wire_format_unittest.cc @@ -0,0 +1,905 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. + +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +namespace google { +namespace protobuf { +namespace internal { +namespace { + +TEST(WireFormatTest, EnumsInSync) { + // Verify that WireFormatLite::FieldType and WireFormatLite::CppType match + // FieldDescriptor::Type and FieldDescriptor::CppType. + + EXPECT_EQ(implicit_cast(FieldDescriptor::MAX_TYPE), + implicit_cast(WireFormatLite::MAX_FIELD_TYPE)); + EXPECT_EQ(implicit_cast(FieldDescriptor::MAX_CPPTYPE), + implicit_cast(WireFormatLite::MAX_CPPTYPE)); + + for (int i = 1; i <= WireFormatLite::MAX_FIELD_TYPE; i++) { + EXPECT_EQ( + implicit_cast(FieldDescriptor::TypeToCppType( + static_cast(i))), + implicit_cast(WireFormatLite::FieldTypeToCppType( + static_cast(i)))); + } +} + +TEST(WireFormatTest, MaxFieldNumber) { + // Make sure the max field number constant is accurate. + EXPECT_EQ((1 << (32 - WireFormatLite::kTagTypeBits)) - 1, + FieldDescriptor::kMaxNumber); +} + +TEST(WireFormatTest, Parse) { + unittest::TestAllTypes source, dest; + string data; + + // Serialize using the generated code. + TestUtil::SetAllFields(&source); + source.SerializeToString(&data); + + // Parse using WireFormat. + io::ArrayInputStream raw_input(data.data(), data.size()); + io::CodedInputStream input(&raw_input); + WireFormat::ParseAndMergePartial(&input, &dest); + + // Check. + TestUtil::ExpectAllFieldsSet(dest); +} + +TEST(WireFormatTest, ParseExtensions) { + unittest::TestAllExtensions source, dest; + string data; + + // Serialize using the generated code. + TestUtil::SetAllExtensions(&source); + source.SerializeToString(&data); + + // Parse using WireFormat. + io::ArrayInputStream raw_input(data.data(), data.size()); + io::CodedInputStream input(&raw_input); + WireFormat::ParseAndMergePartial(&input, &dest); + + // Check. + TestUtil::ExpectAllExtensionsSet(dest); +} + +TEST(WireFormatTest, ParsePacked) { + unittest::TestPackedTypes source, dest; + string data; + + // Serialize using the generated code. + TestUtil::SetPackedFields(&source); + source.SerializeToString(&data); + + // Parse using WireFormat. + io::ArrayInputStream raw_input(data.data(), data.size()); + io::CodedInputStream input(&raw_input); + WireFormat::ParseAndMergePartial(&input, &dest); + + // Check. + TestUtil::ExpectPackedFieldsSet(dest); +} + +TEST(WireFormatTest, ParsePackedFromUnpacked) { + // Serialize using the generated code. + unittest::TestUnpackedTypes source; + TestUtil::SetUnpackedFields(&source); + string data = source.SerializeAsString(); + + // Parse using WireFormat. + unittest::TestPackedTypes dest; + io::ArrayInputStream raw_input(data.data(), data.size()); + io::CodedInputStream input(&raw_input); + WireFormat::ParseAndMergePartial(&input, &dest); + + // Check. + TestUtil::ExpectPackedFieldsSet(dest); +} + +TEST(WireFormatTest, ParseUnpackedFromPacked) { + // Serialize using the generated code. + unittest::TestPackedTypes source; + TestUtil::SetPackedFields(&source); + string data = source.SerializeAsString(); + + // Parse using WireFormat. + unittest::TestUnpackedTypes dest; + io::ArrayInputStream raw_input(data.data(), data.size()); + io::CodedInputStream input(&raw_input); + WireFormat::ParseAndMergePartial(&input, &dest); + + // Check. + TestUtil::ExpectUnpackedFieldsSet(dest); +} + +TEST(WireFormatTest, ParsePackedExtensions) { + unittest::TestPackedExtensions source, dest; + string data; + + // Serialize using the generated code. + TestUtil::SetPackedExtensions(&source); + source.SerializeToString(&data); + + // Parse using WireFormat. + io::ArrayInputStream raw_input(data.data(), data.size()); + io::CodedInputStream input(&raw_input); + WireFormat::ParseAndMergePartial(&input, &dest); + + // Check. + TestUtil::ExpectPackedExtensionsSet(dest); +} + +TEST(WireFormatTest, ByteSize) { + unittest::TestAllTypes message; + TestUtil::SetAllFields(&message); + + EXPECT_EQ(message.ByteSize(), WireFormat::ByteSize(message)); + message.Clear(); + EXPECT_EQ(0, message.ByteSize()); + EXPECT_EQ(0, WireFormat::ByteSize(message)); +} + +TEST(WireFormatTest, ByteSizeExtensions) { + unittest::TestAllExtensions message; + TestUtil::SetAllExtensions(&message); + + EXPECT_EQ(message.ByteSize(), + WireFormat::ByteSize(message)); + message.Clear(); + EXPECT_EQ(0, message.ByteSize()); + EXPECT_EQ(0, WireFormat::ByteSize(message)); +} + +TEST(WireFormatTest, ByteSizePacked) { + unittest::TestPackedTypes message; + TestUtil::SetPackedFields(&message); + + EXPECT_EQ(message.ByteSize(), WireFormat::ByteSize(message)); + message.Clear(); + EXPECT_EQ(0, message.ByteSize()); + EXPECT_EQ(0, WireFormat::ByteSize(message)); +} + +TEST(WireFormatTest, ByteSizePackedExtensions) { + unittest::TestPackedExtensions message; + TestUtil::SetPackedExtensions(&message); + + EXPECT_EQ(message.ByteSize(), + WireFormat::ByteSize(message)); + message.Clear(); + EXPECT_EQ(0, message.ByteSize()); + EXPECT_EQ(0, WireFormat::ByteSize(message)); +} + +TEST(WireFormatTest, Serialize) { + unittest::TestAllTypes message; + string generated_data; + string dynamic_data; + + TestUtil::SetAllFields(&message); + int size = message.ByteSize(); + + // Serialize using the generated code. + { + io::StringOutputStream raw_output(&generated_data); + io::CodedOutputStream output(&raw_output); + message.SerializeWithCachedSizes(&output); + ASSERT_FALSE(output.HadError()); + } + + // Serialize using WireFormat. + { + io::StringOutputStream raw_output(&dynamic_data); + io::CodedOutputStream output(&raw_output); + WireFormat::SerializeWithCachedSizes(message, size, &output); + ASSERT_FALSE(output.HadError()); + } + + // Should be the same. + // Don't use EXPECT_EQ here because we're comparing raw binary data and + // we really don't want it dumped to stdout on failure. + EXPECT_TRUE(dynamic_data == generated_data); +} + +TEST(WireFormatTest, SerializeExtensions) { + unittest::TestAllExtensions message; + string generated_data; + string dynamic_data; + + TestUtil::SetAllExtensions(&message); + int size = message.ByteSize(); + + // Serialize using the generated code. + { + io::StringOutputStream raw_output(&generated_data); + io::CodedOutputStream output(&raw_output); + message.SerializeWithCachedSizes(&output); + ASSERT_FALSE(output.HadError()); + } + + // Serialize using WireFormat. + { + io::StringOutputStream raw_output(&dynamic_data); + io::CodedOutputStream output(&raw_output); + WireFormat::SerializeWithCachedSizes(message, size, &output); + ASSERT_FALSE(output.HadError()); + } + + // Should be the same. + // Don't use EXPECT_EQ here because we're comparing raw binary data and + // we really don't want it dumped to stdout on failure. + EXPECT_TRUE(dynamic_data == generated_data); +} + +TEST(WireFormatTest, SerializeFieldsAndExtensions) { + unittest::TestFieldOrderings message; + string generated_data; + string dynamic_data; + + TestUtil::SetAllFieldsAndExtensions(&message); + int size = message.ByteSize(); + + // Serialize using the generated code. + { + io::StringOutputStream raw_output(&generated_data); + io::CodedOutputStream output(&raw_output); + message.SerializeWithCachedSizes(&output); + ASSERT_FALSE(output.HadError()); + } + + // Serialize using WireFormat. + { + io::StringOutputStream raw_output(&dynamic_data); + io::CodedOutputStream output(&raw_output); + WireFormat::SerializeWithCachedSizes(message, size, &output); + ASSERT_FALSE(output.HadError()); + } + + // Should be the same. + // Don't use EXPECT_EQ here because we're comparing raw binary data and + // we really don't want it dumped to stdout on failure. + EXPECT_TRUE(dynamic_data == generated_data); + + // Should output in canonical order. + TestUtil::ExpectAllFieldsAndExtensionsInOrder(dynamic_data); + TestUtil::ExpectAllFieldsAndExtensionsInOrder(generated_data); +} + +TEST(WireFormatTest, ParseMultipleExtensionRanges) { + // Make sure we can parse a message that contains multiple extensions ranges. + unittest::TestFieldOrderings source; + string data; + + TestUtil::SetAllFieldsAndExtensions(&source); + source.SerializeToString(&data); + + { + unittest::TestFieldOrderings dest; + EXPECT_TRUE(dest.ParseFromString(data)); + EXPECT_EQ(source.DebugString(), dest.DebugString()); + } + + // Also test using reflection-based parsing. + { + unittest::TestFieldOrderings dest; + io::ArrayInputStream raw_input(data.data(), data.size()); + io::CodedInputStream coded_input(&raw_input); + EXPECT_TRUE(WireFormat::ParseAndMergePartial(&coded_input, &dest)); + EXPECT_EQ(source.DebugString(), dest.DebugString()); + } +} + +const int kUnknownTypeId = 1550055; + +TEST(WireFormatTest, SerializeMessageSet) { + // Set up a TestMessageSet with two known messages and an unknown one. + unittest::TestMessageSet message_set; + message_set.MutableExtension( + unittest::TestMessageSetExtension1::message_set_extension)->set_i(123); + message_set.MutableExtension( + unittest::TestMessageSetExtension2::message_set_extension)->set_str("foo"); + message_set.mutable_unknown_fields()->AddLengthDelimited( + kUnknownTypeId, "bar"); + + string data; + ASSERT_TRUE(message_set.SerializeToString(&data)); + + // Parse back using RawMessageSet and check the contents. + unittest::RawMessageSet raw; + ASSERT_TRUE(raw.ParseFromString(data)); + + EXPECT_EQ(0, raw.unknown_fields().field_count()); + + ASSERT_EQ(3, raw.item_size()); + EXPECT_EQ( + unittest::TestMessageSetExtension1::descriptor()->extension(0)->number(), + raw.item(0).type_id()); + EXPECT_EQ( + unittest::TestMessageSetExtension2::descriptor()->extension(0)->number(), + raw.item(1).type_id()); + EXPECT_EQ(kUnknownTypeId, raw.item(2).type_id()); + + unittest::TestMessageSetExtension1 message1; + EXPECT_TRUE(message1.ParseFromString(raw.item(0).message())); + EXPECT_EQ(123, message1.i()); + + unittest::TestMessageSetExtension2 message2; + EXPECT_TRUE(message2.ParseFromString(raw.item(1).message())); + EXPECT_EQ("foo", message2.str()); + + EXPECT_EQ("bar", raw.item(2).message()); +} + +TEST(WireFormatTest, SerializeMessageSetVariousWaysAreEqual) { + // Serialize a MessageSet to a stream and to a flat array using generated + // code, and also using WireFormat, and check that the results are equal. + // Set up a TestMessageSet with two known messages and an unknown one, as + // above. + + unittest::TestMessageSet message_set; + message_set.MutableExtension( + unittest::TestMessageSetExtension1::message_set_extension)->set_i(123); + message_set.MutableExtension( + unittest::TestMessageSetExtension2::message_set_extension)->set_str("foo"); + message_set.mutable_unknown_fields()->AddLengthDelimited( + kUnknownTypeId, "bar"); + + int size = message_set.ByteSize(); + EXPECT_EQ(size, message_set.GetCachedSize()); + ASSERT_EQ(size, WireFormat::ByteSize(message_set)); + + string flat_data; + string stream_data; + string dynamic_data; + flat_data.resize(size); + stream_data.resize(size); + + // Serialize to flat array + { + uint8* target = reinterpret_cast(string_as_array(&flat_data)); + uint8* end = message_set.SerializeWithCachedSizesToArray(target); + EXPECT_EQ(size, end - target); + } + + // Serialize to buffer + { + io::ArrayOutputStream array_stream(string_as_array(&stream_data), size, 1); + io::CodedOutputStream output_stream(&array_stream); + message_set.SerializeWithCachedSizes(&output_stream); + ASSERT_FALSE(output_stream.HadError()); + } + + // Serialize to buffer with WireFormat. + { + io::StringOutputStream string_stream(&dynamic_data); + io::CodedOutputStream output_stream(&string_stream); + WireFormat::SerializeWithCachedSizes(message_set, size, &output_stream); + ASSERT_FALSE(output_stream.HadError()); + } + + EXPECT_TRUE(flat_data == stream_data); + EXPECT_TRUE(flat_data == dynamic_data); +} + +TEST(WireFormatTest, ParseMessageSet) { + // Set up a RawMessageSet with two known messages and an unknown one. + unittest::RawMessageSet raw; + + { + unittest::RawMessageSet::Item* item = raw.add_item(); + item->set_type_id( + unittest::TestMessageSetExtension1::descriptor()->extension(0)->number()); + unittest::TestMessageSetExtension1 message; + message.set_i(123); + message.SerializeToString(item->mutable_message()); + } + + { + unittest::RawMessageSet::Item* item = raw.add_item(); + item->set_type_id( + unittest::TestMessageSetExtension2::descriptor()->extension(0)->number()); + unittest::TestMessageSetExtension2 message; + message.set_str("foo"); + message.SerializeToString(item->mutable_message()); + } + + { + unittest::RawMessageSet::Item* item = raw.add_item(); + item->set_type_id(kUnknownTypeId); + item->set_message("bar"); + } + + string data; + ASSERT_TRUE(raw.SerializeToString(&data)); + + // Parse as a TestMessageSet and check the contents. + unittest::TestMessageSet message_set; + ASSERT_TRUE(message_set.ParseFromString(data)); + + EXPECT_EQ(123, message_set.GetExtension( + unittest::TestMessageSetExtension1::message_set_extension).i()); + EXPECT_EQ("foo", message_set.GetExtension( + unittest::TestMessageSetExtension2::message_set_extension).str()); + + ASSERT_EQ(1, message_set.unknown_fields().field_count()); + ASSERT_EQ(UnknownField::TYPE_LENGTH_DELIMITED, + message_set.unknown_fields().field(0).type()); + EXPECT_EQ("bar", message_set.unknown_fields().field(0).length_delimited()); + + // Also parse using WireFormat. + unittest::TestMessageSet dynamic_message_set; + io::CodedInputStream input(reinterpret_cast(data.data()), + data.size()); + ASSERT_TRUE(WireFormat::ParseAndMergePartial(&input, &dynamic_message_set)); + EXPECT_EQ(message_set.DebugString(), dynamic_message_set.DebugString()); +} + +TEST(WireFormatTest, RecursionLimit) { + unittest::TestRecursiveMessage message; + message.mutable_a()->mutable_a()->mutable_a()->mutable_a()->set_i(1); + string data; + message.SerializeToString(&data); + + { + io::ArrayInputStream raw_input(data.data(), data.size()); + io::CodedInputStream input(&raw_input); + input.SetRecursionLimit(4); + unittest::TestRecursiveMessage message2; + EXPECT_TRUE(message2.ParseFromCodedStream(&input)); + } + + { + io::ArrayInputStream raw_input(data.data(), data.size()); + io::CodedInputStream input(&raw_input); + input.SetRecursionLimit(3); + unittest::TestRecursiveMessage message2; + EXPECT_FALSE(message2.ParseFromCodedStream(&input)); + } +} + +TEST(WireFormatTest, UnknownFieldRecursionLimit) { + unittest::TestEmptyMessage message; + message.mutable_unknown_fields() + ->AddGroup(1234) + ->AddGroup(1234) + ->AddGroup(1234) + ->AddGroup(1234) + ->AddVarint(1234, 123); + string data; + message.SerializeToString(&data); + + { + io::ArrayInputStream raw_input(data.data(), data.size()); + io::CodedInputStream input(&raw_input); + input.SetRecursionLimit(4); + unittest::TestEmptyMessage message2; + EXPECT_TRUE(message2.ParseFromCodedStream(&input)); + } + + { + io::ArrayInputStream raw_input(data.data(), data.size()); + io::CodedInputStream input(&raw_input); + input.SetRecursionLimit(3); + unittest::TestEmptyMessage message2; + EXPECT_FALSE(message2.ParseFromCodedStream(&input)); + } +} + +TEST(WireFormatTest, ZigZag) { +// avoid line-wrapping +#define LL(x) GOOGLE_LONGLONG(x) +#define ULL(x) GOOGLE_ULONGLONG(x) +#define ZigZagEncode32(x) WireFormatLite::ZigZagEncode32(x) +#define ZigZagDecode32(x) WireFormatLite::ZigZagDecode32(x) +#define ZigZagEncode64(x) WireFormatLite::ZigZagEncode64(x) +#define ZigZagDecode64(x) WireFormatLite::ZigZagDecode64(x) + + EXPECT_EQ(0u, ZigZagEncode32( 0)); + EXPECT_EQ(1u, ZigZagEncode32(-1)); + EXPECT_EQ(2u, ZigZagEncode32( 1)); + EXPECT_EQ(3u, ZigZagEncode32(-2)); + EXPECT_EQ(0x7FFFFFFEu, ZigZagEncode32(0x3FFFFFFF)); + EXPECT_EQ(0x7FFFFFFFu, ZigZagEncode32(0xC0000000)); + EXPECT_EQ(0xFFFFFFFEu, ZigZagEncode32(0x7FFFFFFF)); + EXPECT_EQ(0xFFFFFFFFu, ZigZagEncode32(0x80000000)); + + EXPECT_EQ( 0, ZigZagDecode32(0u)); + EXPECT_EQ(-1, ZigZagDecode32(1u)); + EXPECT_EQ( 1, ZigZagDecode32(2u)); + EXPECT_EQ(-2, ZigZagDecode32(3u)); + EXPECT_EQ(0x3FFFFFFF, ZigZagDecode32(0x7FFFFFFEu)); + EXPECT_EQ(0xC0000000, ZigZagDecode32(0x7FFFFFFFu)); + EXPECT_EQ(0x7FFFFFFF, ZigZagDecode32(0xFFFFFFFEu)); + EXPECT_EQ(0x80000000, ZigZagDecode32(0xFFFFFFFFu)); + + EXPECT_EQ(0u, ZigZagEncode64( 0)); + EXPECT_EQ(1u, ZigZagEncode64(-1)); + EXPECT_EQ(2u, ZigZagEncode64( 1)); + EXPECT_EQ(3u, ZigZagEncode64(-2)); + EXPECT_EQ(ULL(0x000000007FFFFFFE), ZigZagEncode64(LL(0x000000003FFFFFFF))); + EXPECT_EQ(ULL(0x000000007FFFFFFF), ZigZagEncode64(LL(0xFFFFFFFFC0000000))); + EXPECT_EQ(ULL(0x00000000FFFFFFFE), ZigZagEncode64(LL(0x000000007FFFFFFF))); + EXPECT_EQ(ULL(0x00000000FFFFFFFF), ZigZagEncode64(LL(0xFFFFFFFF80000000))); + EXPECT_EQ(ULL(0xFFFFFFFFFFFFFFFE), ZigZagEncode64(LL(0x7FFFFFFFFFFFFFFF))); + EXPECT_EQ(ULL(0xFFFFFFFFFFFFFFFF), ZigZagEncode64(LL(0x8000000000000000))); + + EXPECT_EQ( 0, ZigZagDecode64(0u)); + EXPECT_EQ(-1, ZigZagDecode64(1u)); + EXPECT_EQ( 1, ZigZagDecode64(2u)); + EXPECT_EQ(-2, ZigZagDecode64(3u)); + EXPECT_EQ(LL(0x000000003FFFFFFF), ZigZagDecode64(ULL(0x000000007FFFFFFE))); + EXPECT_EQ(LL(0xFFFFFFFFC0000000), ZigZagDecode64(ULL(0x000000007FFFFFFF))); + EXPECT_EQ(LL(0x000000007FFFFFFF), ZigZagDecode64(ULL(0x00000000FFFFFFFE))); + EXPECT_EQ(LL(0xFFFFFFFF80000000), ZigZagDecode64(ULL(0x00000000FFFFFFFF))); + EXPECT_EQ(LL(0x7FFFFFFFFFFFFFFF), ZigZagDecode64(ULL(0xFFFFFFFFFFFFFFFE))); + EXPECT_EQ(LL(0x8000000000000000), ZigZagDecode64(ULL(0xFFFFFFFFFFFFFFFF))); + + // Some easier-to-verify round-trip tests. The inputs (other than 0, 1, -1) + // were chosen semi-randomly via keyboard bashing. + EXPECT_EQ( 0, ZigZagDecode32(ZigZagEncode32( 0))); + EXPECT_EQ( 1, ZigZagDecode32(ZigZagEncode32( 1))); + EXPECT_EQ( -1, ZigZagDecode32(ZigZagEncode32( -1))); + EXPECT_EQ(14927, ZigZagDecode32(ZigZagEncode32(14927))); + EXPECT_EQ(-3612, ZigZagDecode32(ZigZagEncode32(-3612))); + + EXPECT_EQ( 0, ZigZagDecode64(ZigZagEncode64( 0))); + EXPECT_EQ( 1, ZigZagDecode64(ZigZagEncode64( 1))); + EXPECT_EQ( -1, ZigZagDecode64(ZigZagEncode64( -1))); + EXPECT_EQ(14927, ZigZagDecode64(ZigZagEncode64(14927))); + EXPECT_EQ(-3612, ZigZagDecode64(ZigZagEncode64(-3612))); + + EXPECT_EQ(LL(856912304801416), ZigZagDecode64(ZigZagEncode64( + LL(856912304801416)))); + EXPECT_EQ(LL(-75123905439571256), ZigZagDecode64(ZigZagEncode64( + LL(-75123905439571256)))); +} + +TEST(WireFormatTest, RepeatedScalarsDifferentTagSizes) { + // At one point checks would trigger when parsing repeated fixed scalar + // fields. + protobuf_unittest::TestRepeatedScalarDifferentTagSizes msg1, msg2; + for (int i = 0; i < 100; ++i) { + msg1.add_repeated_fixed32(i); + msg1.add_repeated_int32(i); + msg1.add_repeated_fixed64(i); + msg1.add_repeated_int64(i); + msg1.add_repeated_float(i); + msg1.add_repeated_uint64(i); + } + + // Make sure that we have a variety of tag sizes. + const google::protobuf::Descriptor* desc = msg1.GetDescriptor(); + const google::protobuf::FieldDescriptor* field; + field = desc->FindFieldByName("repeated_fixed32"); + ASSERT_TRUE(field != NULL); + ASSERT_EQ(1, WireFormat::TagSize(field->number(), field->type())); + field = desc->FindFieldByName("repeated_int32"); + ASSERT_TRUE(field != NULL); + ASSERT_EQ(1, WireFormat::TagSize(field->number(), field->type())); + field = desc->FindFieldByName("repeated_fixed64"); + ASSERT_TRUE(field != NULL); + ASSERT_EQ(2, WireFormat::TagSize(field->number(), field->type())); + field = desc->FindFieldByName("repeated_int64"); + ASSERT_TRUE(field != NULL); + ASSERT_EQ(2, WireFormat::TagSize(field->number(), field->type())); + field = desc->FindFieldByName("repeated_float"); + ASSERT_TRUE(field != NULL); + ASSERT_EQ(3, WireFormat::TagSize(field->number(), field->type())); + field = desc->FindFieldByName("repeated_uint64"); + ASSERT_TRUE(field != NULL); + ASSERT_EQ(3, WireFormat::TagSize(field->number(), field->type())); + + EXPECT_TRUE(msg2.ParseFromString(msg1.SerializeAsString())); + EXPECT_EQ(msg1.DebugString(), msg2.DebugString()); +} + +class WireFormatInvalidInputTest : public testing::Test { + protected: + // Make a serialized TestAllTypes in which the field optional_nested_message + // contains exactly the given bytes, which may be invalid. + string MakeInvalidEmbeddedMessage(const char* bytes, int size) { + const FieldDescriptor* field = + unittest::TestAllTypes::descriptor()->FindFieldByName( + "optional_nested_message"); + GOOGLE_CHECK(field != NULL); + + string result; + + { + io::StringOutputStream raw_output(&result); + io::CodedOutputStream output(&raw_output); + + WireFormatLite::WriteBytes(field->number(), string(bytes, size), &output); + } + + return result; + } + + // Make a serialized TestAllTypes in which the field optionalgroup + // contains exactly the given bytes -- which may be invalid -- and + // possibly no end tag. + string MakeInvalidGroup(const char* bytes, int size, bool include_end_tag) { + const FieldDescriptor* field = + unittest::TestAllTypes::descriptor()->FindFieldByName( + "optionalgroup"); + GOOGLE_CHECK(field != NULL); + + string result; + + { + io::StringOutputStream raw_output(&result); + io::CodedOutputStream output(&raw_output); + + output.WriteVarint32(WireFormat::MakeTag(field)); + output.WriteString(string(bytes, size)); + if (include_end_tag) { + output.WriteVarint32(WireFormatLite::MakeTag( + field->number(), WireFormatLite::WIRETYPE_END_GROUP)); + } + } + + return result; + } +}; + +TEST_F(WireFormatInvalidInputTest, InvalidSubMessage) { + unittest::TestAllTypes message; + + // Control case. + EXPECT_TRUE(message.ParseFromString(MakeInvalidEmbeddedMessage("", 0))); + + // The byte is a valid varint, but not a valid tag (zero). + EXPECT_FALSE(message.ParseFromString(MakeInvalidEmbeddedMessage("\0", 1))); + + // The byte is a malformed varint. + EXPECT_FALSE(message.ParseFromString(MakeInvalidEmbeddedMessage("\200", 1))); + + // The byte is an endgroup tag, but we aren't parsing a group. + EXPECT_FALSE(message.ParseFromString(MakeInvalidEmbeddedMessage("\014", 1))); + + // The byte is a valid varint but not a valid tag (bad wire type). + EXPECT_FALSE(message.ParseFromString(MakeInvalidEmbeddedMessage("\017", 1))); +} + +TEST_F(WireFormatInvalidInputTest, InvalidGroup) { + unittest::TestAllTypes message; + + // Control case. + EXPECT_TRUE(message.ParseFromString(MakeInvalidGroup("", 0, true))); + + // Missing end tag. Groups cannot end at EOF. + EXPECT_FALSE(message.ParseFromString(MakeInvalidGroup("", 0, false))); + + // The byte is a valid varint, but not a valid tag (zero). + EXPECT_FALSE(message.ParseFromString(MakeInvalidGroup("\0", 1, false))); + + // The byte is a malformed varint. + EXPECT_FALSE(message.ParseFromString(MakeInvalidGroup("\200", 1, false))); + + // The byte is an endgroup tag, but not the right one for this group. + EXPECT_FALSE(message.ParseFromString(MakeInvalidGroup("\014", 1, false))); + + // The byte is a valid varint but not a valid tag (bad wire type). + EXPECT_FALSE(message.ParseFromString(MakeInvalidGroup("\017", 1, true))); +} + +TEST_F(WireFormatInvalidInputTest, InvalidUnknownGroup) { + // Use TestEmptyMessage so that the group made by MakeInvalidGroup will not + // be a known tag number. + unittest::TestEmptyMessage message; + + // Control case. + EXPECT_TRUE(message.ParseFromString(MakeInvalidGroup("", 0, true))); + + // Missing end tag. Groups cannot end at EOF. + EXPECT_FALSE(message.ParseFromString(MakeInvalidGroup("", 0, false))); + + // The byte is a valid varint, but not a valid tag (zero). + EXPECT_FALSE(message.ParseFromString(MakeInvalidGroup("\0", 1, false))); + + // The byte is a malformed varint. + EXPECT_FALSE(message.ParseFromString(MakeInvalidGroup("\200", 1, false))); + + // The byte is an endgroup tag, but not the right one for this group. + EXPECT_FALSE(message.ParseFromString(MakeInvalidGroup("\014", 1, false))); + + // The byte is a valid varint but not a valid tag (bad wire type). + EXPECT_FALSE(message.ParseFromString(MakeInvalidGroup("\017", 1, true))); +} + +TEST_F(WireFormatInvalidInputTest, InvalidStringInUnknownGroup) { + // Test a bug fix: SkipMessage should fail if the message contains a string + // whose length would extend beyond the message end. + + unittest::TestAllTypes message; + message.set_optional_string("foo foo foo foo"); + string data; + message.SerializeToString(&data); + + // Chop some bytes off the end. + data.resize(data.size() - 4); + + // Try to skip it. Note that the bug was only present when parsing to an + // UnknownFieldSet. + io::ArrayInputStream raw_input(data.data(), data.size()); + io::CodedInputStream coded_input(&raw_input); + UnknownFieldSet unknown_fields; + EXPECT_FALSE(WireFormat::SkipMessage(&coded_input, &unknown_fields)); +} + +// Test differences between string and bytes. +// Value of a string type must be valid UTF-8 string. When UTF-8 +// validation is enabled (GOOGLE_PROTOBUF_UTF8_VALIDATION_ENABLED): +// WriteInvalidUTF8String: see error message. +// ReadInvalidUTF8String: see error message. +// WriteValidUTF8String: fine. +// ReadValidUTF8String: fine. +// WriteAnyBytes: fine. +// ReadAnyBytes: fine. +const char * kInvalidUTF8String = "Invalid UTF-8: \xA0\xB0\xC0\xD0"; +// This used to be "Valid UTF-8: \x01\x02\u8C37\u6B4C", but MSVC seems to +// interpret \u differently from GCC. +const char * kValidUTF8String = "Valid UTF-8: \x01\x02\350\260\267\346\255\214"; + +template +bool WriteMessage(const char *value, T *message, string *wire_buffer) { + message->set_data(value); + wire_buffer->clear(); + message->AppendToString(wire_buffer); + return (wire_buffer->size() > 0); +} + +template +bool ReadMessage(const string &wire_buffer, T *message) { + return message->ParseFromArray(wire_buffer.data(), wire_buffer.size()); +} + +TEST(Utf8ValidationTest, WriteInvalidUTF8String) { + string wire_buffer; + protobuf_unittest::OneString input; + vector errors; + { + ScopedMemoryLog log; + WriteMessage(kInvalidUTF8String, &input, &wire_buffer); + errors = log.GetMessages(ERROR); + } +#ifdef GOOGLE_PROTOBUF_UTF8_VALIDATION_ENABLED + ASSERT_EQ(1, errors.size()); + EXPECT_EQ("Encountered string containing invalid UTF-8 data while " + "serializing protocol buffer. Strings must contain only UTF-8; " + "use the 'bytes' type for raw bytes.", + errors[0]); + +#else + ASSERT_EQ(0, errors.size()); +#endif // GOOGLE_PROTOBUF_UTF8_VALIDATION_ENABLED +} + +TEST(Utf8ValidationTest, ReadInvalidUTF8String) { + string wire_buffer; + protobuf_unittest::OneString input; + WriteMessage(kInvalidUTF8String, &input, &wire_buffer); + protobuf_unittest::OneString output; + vector errors; + { + ScopedMemoryLog log; + ReadMessage(wire_buffer, &output); + errors = log.GetMessages(ERROR); + } +#ifdef GOOGLE_PROTOBUF_UTF8_VALIDATION_ENABLED + ASSERT_EQ(1, errors.size()); + EXPECT_EQ("Encountered string containing invalid UTF-8 data while " + "parsing protocol buffer. Strings must contain only UTF-8; " + "use the 'bytes' type for raw bytes.", + errors[0]); + +#else + ASSERT_EQ(0, errors.size()); +#endif // GOOGLE_PROTOBUF_UTF8_VALIDATION_ENABLED +} + +TEST(Utf8ValidationTest, WriteValidUTF8String) { + string wire_buffer; + protobuf_unittest::OneString input; + vector errors; + { + ScopedMemoryLog log; + WriteMessage(kValidUTF8String, &input, &wire_buffer); + errors = log.GetMessages(ERROR); + } + ASSERT_EQ(0, errors.size()); +} + +TEST(Utf8ValidationTest, ReadValidUTF8String) { + string wire_buffer; + protobuf_unittest::OneString input; + WriteMessage(kValidUTF8String, &input, &wire_buffer); + protobuf_unittest::OneString output; + vector errors; + { + ScopedMemoryLog log; + ReadMessage(wire_buffer, &output); + errors = log.GetMessages(ERROR); + } + ASSERT_EQ(0, errors.size()); + EXPECT_EQ(input.data(), output.data()); +} + +// Bytes: anything can pass as bytes, use invalid UTF-8 string to test +TEST(Utf8ValidationTest, WriteArbitraryBytes) { + string wire_buffer; + protobuf_unittest::OneBytes input; + vector errors; + { + ScopedMemoryLog log; + WriteMessage(kInvalidUTF8String, &input, &wire_buffer); + errors = log.GetMessages(ERROR); + } + ASSERT_EQ(0, errors.size()); +} + +TEST(Utf8ValidationTest, ReadArbitraryBytes) { + string wire_buffer; + protobuf_unittest::OneBytes input; + WriteMessage(kInvalidUTF8String, &input, &wire_buffer); + protobuf_unittest::OneBytes output; + vector errors; + { + ScopedMemoryLog log; + ReadMessage(wire_buffer, &output); + errors = log.GetMessages(ERROR); + } + ASSERT_EQ(0, errors.size()); + EXPECT_EQ(input.data(), output.data()); +} + +} // namespace +} // namespace internal +} // namespace protobuf +} // namespace google diff --git a/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/solaris/libstdc++.la b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/solaris/libstdc++.la new file mode 100644 index 0000000000..3edf425419 --- /dev/null +++ b/TMessagesProj/jni/third_party/breakpad/src/third_party/protobuf/protobuf/src/solaris/libstdc++.la @@ -0,0 +1,51 @@ +# libstdc++.la - a libtool library file +# Generated by ltmain.sh - GNU libtool 1.4a-GCC3.0 (1.641.2.256 2001/05/28 20:09:07 with GCC-local changes) +# +# Please DO NOT delete this file! +# It is necessary for linking the library. + +# --- +# NOTE: This file lives in /usr/sfw/lib on Solaris 10. Unfortunately, +# due to an apparent bug in the Solaris 10 6/06 release, +# /usr/sfw/lib/libstdc++.la is empty. Below is the correct content, +# according to +# http://forum.java.sun.com/thread.jspa?threadID=5073150 +# By passing LDFLAGS='-Lsrc/solaris' to configure, make will pick up +# this copy of the file rather than the empty copy in /usr/sfw/lib. +# +# Also see +# http://www.technicalarticles.org/index.php/Compiling_MySQL_5.0_on_Solaris_10 +# +# Note: this is for 32-bit systems. If you have a 64-bit system, +# uncomment the appropriate dependency_libs line below. +# ---- + +# The name that we can dlopen(3). +dlname='libstdc++.so.6' + +# Names of this library. +library_names='libstdc++.so.6.0.3 libstdc++.so.6 libstdc++.so' + +# The name of the static archive. +old_library='libstdc++.a' + +# Libraries that this one depends upon. +# 32-bit version: +dependency_libs='-lc -lm -L/usr/sfw/lib -lgcc_s' +# 64-bit version: +#dependency_libs='-L/lib/64 -lc -lm -L/usr/sfw/lib/64 -lgcc_s' + +# Version information for libstdc++. +current=6 +age=0 +revision=3 + +# Is this an already installed library? +installed=yes + +# Files to dlopen/dlpreopen +dlopen='' +dlpreopen='' + +# Directory that this library needs to be installed in: +libdir='/usr/sfw/lib' diff --git a/TMessagesProj/jni/third_party/libyuv/include/libyuv.h b/TMessagesProj/jni/third_party/libyuv/include/libyuv.h index aeffd5ef7a..a06e1233ab 100644 --- a/TMessagesProj/jni/third_party/libyuv/include/libyuv.h +++ b/TMessagesProj/jni/third_party/libyuv/include/libyuv.h @@ -26,6 +26,7 @@ #include "libyuv/scale.h" #include "libyuv/scale_argb.h" #include "libyuv/scale_row.h" +#include "libyuv/scale_uv.h" #include "libyuv/version.h" #include "libyuv/video_common.h" diff --git a/TMessagesProj/jni/third_party/libyuv/include/libyuv/compare_row.h b/TMessagesProj/jni/third_party/libyuv/include/libyuv/compare_row.h index e95b9d93eb..d8e82d721b 100644 --- a/TMessagesProj/jni/third_party/libyuv/include/libyuv/compare_row.h +++ b/TMessagesProj/jni/third_party/libyuv/include/libyuv/compare_row.h @@ -55,20 +55,20 @@ extern "C" { // The following are available for Visual C and clangcl 32 bit: #if !defined(LIBYUV_DISABLE_X86) && defined(_M_IX86) && defined(_MSC_VER) && \ + !defined(__clang__) && \ (defined(VISUALC_HAS_AVX2) || defined(CLANG_HAS_AVX2)) #define HAS_HASHDJB2_AVX2 #define HAS_SUMSQUAREERROR_AVX2 #endif -// The following are available for GCC and clangcl 64 bit: -#if !defined(LIBYUV_DISABLE_X86) && \ - (defined(__x86_64__) || (defined(__i386__) && !defined(_MSC_VER))) +// The following are available for GCC and clangcl: +#if !defined(LIBYUV_DISABLE_X86) && (defined(__x86_64__) || defined(__i386__)) #define HAS_HAMMINGDISTANCE_SSSE3 #endif -// The following are available for GCC and clangcl 64 bit: +// The following are available for GCC and clangcl: #if !defined(LIBYUV_DISABLE_X86) && defined(CLANG_HAS_AVX2) && \ - (defined(__x86_64__) || (defined(__i386__) && !defined(_MSC_VER))) + (defined(__x86_64__) || defined(__i386__)) #define HAS_HAMMINGDISTANCE_AVX2 #endif @@ -84,11 +84,6 @@ extern "C" { #define HAS_SUMSQUAREERROR_MSA #endif -#if !defined(LIBYUV_DISABLE_MMI) && defined(_MIPS_ARCH_LOONGSON3A) -#define HAS_HAMMINGDISTANCE_MMI -#define HAS_SUMSQUAREERROR_MMI -#endif - uint32_t HammingDistance_C(const uint8_t* src_a, const uint8_t* src_b, int count); @@ -107,9 +102,6 @@ uint32_t HammingDistance_NEON(const uint8_t* src_a, uint32_t HammingDistance_MSA(const uint8_t* src_a, const uint8_t* src_b, int count); -uint32_t HammingDistance_MMI(const uint8_t* src_a, - const uint8_t* src_b, - int count); uint32_t SumSquareError_C(const uint8_t* src_a, const uint8_t* src_b, int count); @@ -125,9 +117,6 @@ uint32_t SumSquareError_NEON(const uint8_t* src_a, uint32_t SumSquareError_MSA(const uint8_t* src_a, const uint8_t* src_b, int count); -uint32_t SumSquareError_MMI(const uint8_t* src_a, - const uint8_t* src_b, - int count); uint32_t HashDjb2_C(const uint8_t* src, int count, uint32_t seed); uint32_t HashDjb2_SSE41(const uint8_t* src, int count, uint32_t seed); diff --git a/TMessagesProj/jni/third_party/libyuv/include/libyuv/convert.h b/TMessagesProj/jni/third_party/libyuv/include/libyuv/convert.h index 026b153cef..2f1ce4214b 100644 --- a/TMessagesProj/jni/third_party/libyuv/include/libyuv/convert.h +++ b/TMessagesProj/jni/third_party/libyuv/include/libyuv/convert.h @@ -89,6 +89,95 @@ int I422ToI420(const uint8_t* src_y, int width, int height); +// Convert I422 to I444. +LIBYUV_API +int I422ToI444(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height); + +// Convert I422 to I210. +LIBYUV_API +int I422ToI210(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_u, + int dst_stride_u, + uint16_t* dst_v, + int dst_stride_v, + int width, + int height); + +// Convert MM21 to NV12. +LIBYUV_API +int MM21ToNV12(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height); + +// Convert MM21 to I420. +LIBYUV_API +int MM21ToI420(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height); + +// Convert MM21 to YUY2 +LIBYUV_API +int MM21ToYUY2(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_yuy2, + int dst_stride_yuy2, + int width, + int height); + +// Convert MT2T to P010 +// Note that src_y and src_uv point to packed 10-bit values, so the Y plane will +// be 10 / 8 times the dimensions of the image. Also for this reason, +// src_stride_y and src_stride_uv are given in bytes. +LIBYUV_API +int MT2TToP010(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_uv, + int dst_stride_uv, + int width, + int height); + // Convert I422 to NV21. LIBYUV_API int I422ToNV21(const uint8_t* src_y, @@ -122,6 +211,23 @@ int I420Copy(const uint8_t* src_y, int width, int height); +// Convert I420 to I444. +LIBYUV_API +int I420ToI444(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height); + // Copy I010 to I010 #define I010ToI010 I010Copy #define H010ToH010 I010Copy @@ -159,6 +265,263 @@ int I010ToI420(const uint16_t* src_y, int width, int height); +#define H210ToH420 I210ToI420 +LIBYUV_API +int I210ToI420(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height); + +#define H210ToH422 I210ToI422 +LIBYUV_API +int I210ToI422(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height); + +#define H410ToH420 I410ToI420 +LIBYUV_API +int I410ToI420(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height); + +#define H410ToH444 I410ToI444 +LIBYUV_API +int I410ToI444(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height); + +#define H012ToH420 I012ToI420 +LIBYUV_API +int I012ToI420(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height); + +#define H212ToH422 I212ToI422 +LIBYUV_API +int I212ToI422(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height); + +#define H412ToH444 I412ToI444 +LIBYUV_API +int I412ToI444(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height); + +#define I412ToI012 I410ToI010 +#define H410ToH010 I410ToI010 +#define H412ToH012 I410ToI010 +LIBYUV_API +int I410ToI010(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_u, + int dst_stride_u, + uint16_t* dst_v, + int dst_stride_v, + int width, + int height); + +#define I212ToI012 I210ToI010 +#define H210ToH010 I210ToI010 +#define H212ToH012 I210ToI010 +LIBYUV_API +int I210ToI010(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_u, + int dst_stride_u, + uint16_t* dst_v, + int dst_stride_v, + int width, + int height); + +// Convert I010 to I410 +LIBYUV_API +int I010ToI410(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_u, + int dst_stride_u, + uint16_t* dst_v, + int dst_stride_v, + int width, + int height); + +// Convert I012 to I412 +#define I012ToI412 I010ToI410 + +// Convert I210 to I410 +LIBYUV_API +int I210ToI410(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_u, + int dst_stride_u, + uint16_t* dst_v, + int dst_stride_v, + int width, + int height); + +// Convert I212 to I412 +#define I212ToI412 I210ToI410 + +// Convert I010 to P010 +LIBYUV_API +int I010ToP010(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_uv, + int dst_stride_uv, + int width, + int height); + +// Convert I210 to P210 +LIBYUV_API +int I210ToP210(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_uv, + int dst_stride_uv, + int width, + int height); + +// Convert I012 to P012 +LIBYUV_API +int I012ToP012(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_uv, + int dst_stride_uv, + int width, + int height); + +// Convert I212 to P212 +LIBYUV_API +int I212ToP212(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_uv, + int dst_stride_uv, + int width, + int height); + // Convert I400 (grey) to I420. LIBYUV_API int I400ToI420(const uint8_t* src_y, @@ -215,6 +578,100 @@ int NV21ToI420(const uint8_t* src_y, int width, int height); +// Convert NV12 to NV24. +LIBYUV_API +int NV12ToNV24(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height); + +// Convert NV16 to NV24. +LIBYUV_API +int NV16ToNV24(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height); + +// Convert P010 to I010. +LIBYUV_API +int P010ToI010(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_uv, + int src_stride_uv, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_u, + int dst_stride_u, + uint16_t* dst_v, + int dst_stride_v, + int width, + int height); + +// Convert P012 to I012. +LIBYUV_API +int P012ToI012(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_uv, + int src_stride_uv, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_u, + int dst_stride_u, + uint16_t* dst_v, + int dst_stride_v, + int width, + int height); + +// Convert P010 to P410. +LIBYUV_API +int P010ToP410(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_uv, + int src_stride_uv, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_uv, + int dst_stride_uv, + int width, + int height); + +// Convert P012 to P412. +#define P012ToP412 P010ToP410 + +// Convert P016 to P416. +#define P016ToP416 P010ToP410 + +// Convert P210 to P410. +LIBYUV_API +int P210ToP410(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_uv, + int src_stride_uv, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_uv, + int dst_stride_uv, + int width, + int height); + +// Convert P212 to P412. +#define P212ToP412 P210ToP410 + +// Convert P216 to P416. +#define P216ToP416 P210ToP410 + // Convert YUY2 to I420. LIBYUV_API int YUY2ToI420(const uint8_t* src_yuy2, @@ -372,6 +829,19 @@ int RAWToI420(const uint8_t* src_raw, int width, int height); +// RGB big endian (rgb in memory) to J420. +LIBYUV_API +int RAWToJ420(const uint8_t* src_raw, + int src_stride_raw, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height); + // RGB16 (RGBP fourcc) little endian to I420. LIBYUV_API int RGB565ToI420(const uint8_t* src_rgb565, diff --git a/TMessagesProj/jni/third_party/libyuv/include/libyuv/convert_argb.h b/TMessagesProj/jni/third_party/libyuv/include/libyuv/convert_argb.h index 715a3dad97..8e4562efc9 100644 --- a/TMessagesProj/jni/third_party/libyuv/include/libyuv/convert_argb.h +++ b/TMessagesProj/jni/third_party/libyuv/include/libyuv/convert_argb.h @@ -14,6 +14,7 @@ #include "libyuv/basic_types.h" #include "libyuv/rotate.h" // For enum RotationMode. +#include "libyuv/scale.h" // For enum FilterMode. #ifdef __cplusplus namespace libyuv { @@ -21,16 +22,20 @@ extern "C" { #endif // Conversion matrix for YUV to RGB -LIBYUV_API extern const struct YuvConstants kYuvI601Constants; // BT.601 -LIBYUV_API extern const struct YuvConstants kYuvJPEGConstants; // JPeg -LIBYUV_API extern const struct YuvConstants kYuvH709Constants; // BT.709 -LIBYUV_API extern const struct YuvConstants kYuv2020Constants; // BT.2020 +LIBYUV_API extern const struct YuvConstants kYuvI601Constants; // BT.601 +LIBYUV_API extern const struct YuvConstants kYuvJPEGConstants; // BT.601 full +LIBYUV_API extern const struct YuvConstants kYuvH709Constants; // BT.709 +LIBYUV_API extern const struct YuvConstants kYuvF709Constants; // BT.709 full +LIBYUV_API extern const struct YuvConstants kYuv2020Constants; // BT.2020 +LIBYUV_API extern const struct YuvConstants kYuvV2020Constants; // BT.2020 full // Conversion matrix for YVU to BGR -LIBYUV_API extern const struct YuvConstants kYvuI601Constants; // BT.601 -LIBYUV_API extern const struct YuvConstants kYvuJPEGConstants; // JPeg -LIBYUV_API extern const struct YuvConstants kYvuH709Constants; // BT.709 -LIBYUV_API extern const struct YuvConstants kYvu2020Constants; // BT.2020 +LIBYUV_API extern const struct YuvConstants kYvuI601Constants; // BT.601 +LIBYUV_API extern const struct YuvConstants kYvuJPEGConstants; // BT.601 full +LIBYUV_API extern const struct YuvConstants kYvuH709Constants; // BT.709 +LIBYUV_API extern const struct YuvConstants kYvuF709Constants; // BT.709 full +LIBYUV_API extern const struct YuvConstants kYvu2020Constants; // BT.2020 +LIBYUV_API extern const struct YuvConstants kYvuV2020Constants; // BT.2020 full // Macros for end swapped destination Matrix conversions. // Swap UV and pass mirrored kYvuJPEGConstants matrix. @@ -38,7 +43,10 @@ LIBYUV_API extern const struct YuvConstants kYvu2020Constants; // BT.2020 #define kYuvI601ConstantsVU kYvuI601Constants #define kYuvJPEGConstantsVU kYvuJPEGConstants #define kYuvH709ConstantsVU kYvuH709Constants +#define kYuvF709ConstantsVU kYvuF709Constants #define kYuv2020ConstantsVU kYvu2020Constants +#define kYuvV2020ConstantsVU kYvuV2020Constants + #define NV12ToABGRMatrix(a, b, c, d, e, f, g, h, i) \ NV21ToARGBMatrix(a, b, c, d, e, f, g##VU, h, i) #define NV21ToABGRMatrix(a, b, c, d, e, f, g, h, i) \ @@ -47,8 +55,30 @@ LIBYUV_API extern const struct YuvConstants kYvu2020Constants; // BT.2020 NV21ToRGB24Matrix(a, b, c, d, e, f, g##VU, h, i) #define NV21ToRAWMatrix(a, b, c, d, e, f, g, h, i) \ NV12ToRGB24Matrix(a, b, c, d, e, f, g##VU, h, i) +#define I010ToABGRMatrix(a, b, c, d, e, f, g, h, i, j, k) \ + I010ToARGBMatrix(a, b, e, f, c, d, g, h, i##VU, j, k) +#define I210ToABGRMatrix(a, b, c, d, e, f, g, h, i, j, k) \ + I210ToARGBMatrix(a, b, e, f, c, d, g, h, i##VU, j, k) +#define I410ToABGRMatrix(a, b, c, d, e, f, g, h, i, j, k) \ + I410ToARGBMatrix(a, b, e, f, c, d, g, h, i##VU, j, k) +#define I010ToAB30Matrix(a, b, c, d, e, f, g, h, i, j, k) \ + I010ToAR30Matrix(a, b, e, f, c, d, g, h, i##VU, j, k) +#define I210ToAB30Matrix(a, b, c, d, e, f, g, h, i, j, k) \ + I210ToAR30Matrix(a, b, e, f, c, d, g, h, i##VU, j, k) +#define I410ToAB30Matrix(a, b, c, d, e, f, g, h, i, j, k) \ + I410ToAR30Matrix(a, b, e, f, c, d, g, h, i##VU, j, k) #define I420AlphaToABGRMatrix(a, b, c, d, e, f, g, h, i, j, k, l, m, n) \ I420AlphaToARGBMatrix(a, b, e, f, c, d, g, h, i, j, k##VU, l, m, n) +#define I422AlphaToABGRMatrix(a, b, c, d, e, f, g, h, i, j, k, l, m, n) \ + I422AlphaToARGBMatrix(a, b, e, f, c, d, g, h, i, j, k##VU, l, m, n) +#define I444AlphaToABGRMatrix(a, b, c, d, e, f, g, h, i, j, k, l, m, n) \ + I444AlphaToARGBMatrix(a, b, e, f, c, d, g, h, i, j, k##VU, l, m, n) +#define I010AlphaToABGRMatrix(a, b, c, d, e, f, g, h, i, j, k, l, m, n) \ + I010AlphaToARGBMatrix(a, b, e, f, c, d, g, h, i, j, k##VU, l, m, n) +#define I210AlphaToABGRMatrix(a, b, c, d, e, f, g, h, i, j, k, l, m, n) \ + I210AlphaToARGBMatrix(a, b, e, f, c, d, g, h, i, j, k##VU, l, m, n) +#define I410AlphaToABGRMatrix(a, b, c, d, e, f, g, h, i, j, k, l, m, n) \ + I410AlphaToARGBMatrix(a, b, e, f, c, d, g, h, i, j, k##VU, l, m, n) // Alias. #define ARGBToARGB ARGBCopy @@ -374,6 +404,32 @@ int U444ToABGR(const uint8_t* src_y, int width, int height); +// Convert I444 to RGB24. +LIBYUV_API +int I444ToRGB24(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + int width, + int height); + +// Convert I444 to RAW. +LIBYUV_API +int I444ToRAW(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_raw, + int dst_stride_raw, + int width, + int height); + // Convert I010 to ARGB. LIBYUV_API int I010ToARGB(const uint16_t* src_y, @@ -562,6 +618,70 @@ int I420AlphaToABGR(const uint8_t* src_y, int height, int attenuate); +// Convert I422 with Alpha to preattenuated ARGB. +LIBYUV_API +int I422AlphaToARGB(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + const uint8_t* src_a, + int src_stride_a, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height, + int attenuate); + +// Convert I422 with Alpha to preattenuated ABGR. +LIBYUV_API +int I422AlphaToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + const uint8_t* src_a, + int src_stride_a, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height, + int attenuate); + +// Convert I444 with Alpha to preattenuated ARGB. +LIBYUV_API +int I444AlphaToARGB(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + const uint8_t* src_a, + int src_stride_a, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height, + int attenuate); + +// Convert I444 with Alpha to preattenuated ABGR. +LIBYUV_API +int I444AlphaToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + const uint8_t* src_a, + int src_stride_a, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height, + int attenuate); + // Convert I400 (grey) to ARGB. Reverse of ARGBToI400. LIBYUV_API int I400ToARGB(const uint8_t* src_y, @@ -713,29 +833,29 @@ int I010ToAR30(const uint16_t* src_y, int width, int height); -// Convert I010 to AB30. +// Convert H010 to AR30. LIBYUV_API -int I010ToAB30(const uint16_t* src_y, +int H010ToAR30(const uint16_t* src_y, int src_stride_y, const uint16_t* src_u, int src_stride_u, const uint16_t* src_v, int src_stride_v, - uint8_t* dst_ab30, - int dst_stride_ab30, + uint8_t* dst_ar30, + int dst_stride_ar30, int width, int height); -// Convert H010 to AR30. +// Convert I010 to AB30. LIBYUV_API -int H010ToAR30(const uint16_t* src_y, +int I010ToAB30(const uint16_t* src_y, int src_stride_y, const uint16_t* src_u, int src_stride_u, const uint16_t* src_v, int src_stride_v, - uint8_t* dst_ar30, - int dst_stride_ar30, + uint8_t* dst_ab30, + int dst_stride_ab30, int width, int height); @@ -972,6 +1092,42 @@ int AR30ToAB30(const uint8_t* src_ar30, int width, int height); +// Convert AR64 to ARGB. +LIBYUV_API +int AR64ToARGB(const uint16_t* src_ar64, + int src_stride_ar64, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height); + +// Convert AB64 to ABGR. +#define AB64ToABGR AR64ToARGB + +// Convert AB64 to ARGB. +LIBYUV_API +int AB64ToARGB(const uint16_t* src_ab64, + int src_stride_ab64, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height); + +// Convert AR64 to ABGR. +#define AR64ToABGR AB64ToARGB + +// Convert AR64 To AB64. +LIBYUV_API +int AR64ToAB64(const uint16_t* src_ar64, + int src_stride_ar64, + uint16_t* dst_ab64, + int dst_stride_ab64, + int width, + int height); + +// Convert AB64 To AR64. +#define AB64ToAR64 AR64ToAB64 + // src_width/height provided by capture // dst_width/height for clipping determine final size. LIBYUV_API @@ -1182,6 +1338,32 @@ int J420ToRAW(const uint8_t* src_y, int width, int height); +// Convert I422 to RGB24. +LIBYUV_API +int I422ToRGB24(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + int width, + int height); + +// Convert I422 to RAW. +LIBYUV_API +int I422ToRAW(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_raw, + int dst_stride_raw, + int width, + int height); + LIBYUV_API int I420ToRGB565(const uint8_t* src_y, int src_stride_y, @@ -1284,6 +1466,19 @@ int I420ToAR30(const uint8_t* src_y, int width, int height); +// Convert I420 to AB30. +LIBYUV_API +int I420ToAB30(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_ab30, + int dst_stride_ab30, + int width, + int height); + // Convert H420 to AR30. LIBYUV_API int H420ToAR30(const uint8_t* src_y, @@ -1297,6 +1492,19 @@ int H420ToAR30(const uint8_t* src_y, int width, int height); +// Convert H420 to AB30. +LIBYUV_API +int H420ToAB30(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_ab30, + int dst_stride_ab30, + int width, + int height); + // Convert I420 to ARGB with matrix. LIBYUV_API int I420ToARGBMatrix(const uint8_t* src_y, @@ -1339,7 +1547,21 @@ int I444ToARGBMatrix(const uint8_t* src_y, int width, int height); -// multiply 10 bit yuv into high bits to allow any number of bits. +// Convert I444 to RGB24 with matrix. +LIBYUV_API +int I444ToRGB24Matrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + const struct YuvConstants* yuvconstants, + int width, + int height); + +// Convert 10 bit 420 YUV to ARGB with matrix. LIBYUV_API int I010ToAR30Matrix(const uint16_t* src_y, int src_stride_y, @@ -1353,7 +1575,7 @@ int I010ToAR30Matrix(const uint16_t* src_y, int width, int height); -// multiply 10 bit yuv into high bits to allow any number of bits. +// Convert 10 bit 420 YUV to ARGB with matrix. LIBYUV_API int I210ToAR30Matrix(const uint16_t* src_y, int src_stride_y, @@ -1367,6 +1589,20 @@ int I210ToAR30Matrix(const uint16_t* src_y, int width, int height); +// Convert 10 bit 444 YUV to ARGB with matrix. +LIBYUV_API +int I410ToAR30Matrix(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_ar30, + int dst_stride_ar30, + const struct YuvConstants* yuvconstants, + int width, + int height); + // Convert 10 bit YUV to ARGB with matrix. LIBYUV_API int I010ToARGBMatrix(const uint16_t* src_y, @@ -1381,6 +1617,34 @@ int I010ToARGBMatrix(const uint16_t* src_y, int width, int height); +// multiply 12 bit yuv into high bits to allow any number of bits. +LIBYUV_API +int I012ToAR30Matrix(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_ar30, + int dst_stride_ar30, + const struct YuvConstants* yuvconstants, + int width, + int height); + +// Convert 12 bit YUV to ARGB with matrix. +LIBYUV_API +int I012ToARGBMatrix(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height); + // Convert 10 bit 422 YUV to ARGB with matrix. LIBYUV_API int I210ToARGBMatrix(const uint16_t* src_y, @@ -1395,6 +1659,87 @@ int I210ToARGBMatrix(const uint16_t* src_y, int width, int height); +// Convert 10 bit 444 YUV to ARGB with matrix. +LIBYUV_API +int I410ToARGBMatrix(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height); + +// Convert P010 to ARGB with matrix. +LIBYUV_API +int P010ToARGBMatrix(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_uv, + int src_stride_uv, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height); + +// Convert P210 to ARGB with matrix. +LIBYUV_API +int P210ToARGBMatrix(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_uv, + int src_stride_uv, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height); + +// Convert P010 to AR30 with matrix. +LIBYUV_API +int P010ToAR30Matrix(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_uv, + int src_stride_uv, + uint8_t* dst_ar30, + int dst_stride_ar30, + const struct YuvConstants* yuvconstants, + int width, + int height); + +// Convert P210 to AR30 with matrix. +LIBYUV_API +int P210ToAR30Matrix(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_uv, + int src_stride_uv, + uint8_t* dst_ar30, + int dst_stride_ar30, + const struct YuvConstants* yuvconstants, + int width, + int height); + +// P012 and P010 use most significant bits so the conversion is the same. +// Convert P012 to ARGB with matrix. +#define P012ToARGBMatrix P010ToARGBMatrix +// Convert P012 to AR30 with matrix. +#define P012ToAR30Matrix P010ToAR30Matrix +// Convert P212 to ARGB with matrix. +#define P212ToARGBMatrix P210ToARGBMatrix +// Convert P212 to AR30 with matrix. +#define P212ToAR30Matrix P210ToAR30Matrix + +// Convert P016 to ARGB with matrix. +#define P016ToARGBMatrix P010ToARGBMatrix +// Convert P016 to AR30 with matrix. +#define P016ToAR30Matrix P010ToAR30Matrix +// Convert P216 to ARGB with matrix. +#define P216ToARGBMatrix P210ToARGBMatrix +// Convert P216 to AR30 with matrix. +#define P216ToAR30Matrix P210ToAR30Matrix + // Convert I420 with Alpha to preattenuated ARGB with matrix. LIBYUV_API int I420AlphaToARGBMatrix(const uint8_t* src_y, @@ -1412,6 +1757,91 @@ int I420AlphaToARGBMatrix(const uint8_t* src_y, int height, int attenuate); +// Convert I422 with Alpha to preattenuated ARGB with matrix. +LIBYUV_API +int I422AlphaToARGBMatrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + const uint8_t* src_a, + int src_stride_a, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height, + int attenuate); + +// Convert I444 with Alpha to preattenuated ARGB with matrix. +LIBYUV_API +int I444AlphaToARGBMatrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + const uint8_t* src_a, + int src_stride_a, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height, + int attenuate); + +// Convert I010 with Alpha to preattenuated ARGB with matrix. +LIBYUV_API +int I010AlphaToARGBMatrix(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + const uint16_t* src_a, + int src_stride_a, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height, + int attenuate); + +// Convert I210 with Alpha to preattenuated ARGB with matrix. +LIBYUV_API +int I210AlphaToARGBMatrix(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + const uint16_t* src_a, + int src_stride_a, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height, + int attenuate); + +// Convert I410 with Alpha to preattenuated ARGB with matrix. +LIBYUV_API +int I410AlphaToARGBMatrix(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + const uint16_t* src_a, + int src_stride_a, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height, + int attenuate); + // Convert NV12 to ARGB with matrix. LIBYUV_API int NV12ToARGBMatrix(const uint8_t* src_y, @@ -1501,7 +1931,7 @@ int I422ToRGBAMatrix(const uint8_t* src_y, int width, int height); -// Convert I422 to RGBA with matrix. +// Convert I420 to RGBA with matrix. LIBYUV_API int I420ToRGBAMatrix(const uint8_t* src_y, int src_stride_y, @@ -1529,6 +1959,20 @@ int I420ToRGB24Matrix(const uint8_t* src_y, int width, int height); +// Convert I422 to RGB24 with matrix. +LIBYUV_API +int I422ToRGB24Matrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + const struct YuvConstants* yuvconstants, + int width, + int height); + // Convert I420 to RGB565 with specified color matrix. LIBYUV_API int I420ToRGB565Matrix(const uint8_t* src_y, @@ -1543,6 +1987,20 @@ int I420ToRGB565Matrix(const uint8_t* src_y, int width, int height); +// Convert I422 to RGB565 with specified color matrix. +LIBYUV_API +int I422ToRGB565Matrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb565, + int dst_stride_rgb565, + const struct YuvConstants* yuvconstants, + int width, + int height); + // Convert I420 to AR30 with matrix. LIBYUV_API int I420ToAR30Matrix(const uint8_t* src_y, @@ -1567,6 +2025,250 @@ int I400ToARGBMatrix(const uint8_t* src_y, int width, int height); +// Convert I420 to ARGB with matrix and UV filter mode. +LIBYUV_API +int I420ToARGBMatrixFilter(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height, + enum FilterMode filter); + +// Convert I422 to ARGB with matrix and UV filter mode. +LIBYUV_API +int I422ToARGBMatrixFilter(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height, + enum FilterMode filter); + +// Convert I422 to RGB24 with matrix and UV filter mode. +LIBYUV_API +int I422ToRGB24MatrixFilter(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + const struct YuvConstants* yuvconstants, + int width, + int height, + enum FilterMode filter); + +// Convert I420 to RGB24 with matrix and UV filter mode. +LIBYUV_API +int I420ToRGB24MatrixFilter(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + const struct YuvConstants* yuvconstants, + int width, + int height, + enum FilterMode filter); + +// Convert I010 to AR30 with matrix and UV filter mode. +LIBYUV_API +int I010ToAR30MatrixFilter(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_ar30, + int dst_stride_ar30, + const struct YuvConstants* yuvconstants, + int width, + int height, + enum FilterMode filter); + +// Convert I210 to AR30 with matrix and UV filter mode. +LIBYUV_API +int I210ToAR30MatrixFilter(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_ar30, + int dst_stride_ar30, + const struct YuvConstants* yuvconstants, + int width, + int height, + enum FilterMode filter); + +// Convert I010 to ARGB with matrix and UV filter mode. +LIBYUV_API +int I010ToARGBMatrixFilter(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height, + enum FilterMode filter); + +// Convert I210 to ARGB with matrix and UV filter mode. +LIBYUV_API +int I210ToARGBMatrixFilter(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height, + enum FilterMode filter); + +// Convert I420 with Alpha to attenuated ARGB with matrix and UV filter mode. +LIBYUV_API +int I420AlphaToARGBMatrixFilter(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + const uint8_t* src_a, + int src_stride_a, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height, + int attenuate, + enum FilterMode filter); + +// Convert I422 with Alpha to attenuated ARGB with matrix and UV filter mode. +LIBYUV_API +int I422AlphaToARGBMatrixFilter(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + const uint8_t* src_a, + int src_stride_a, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height, + int attenuate, + enum FilterMode filter); + +// Convert I010 with Alpha to attenuated ARGB with matrix and UV filter mode. +LIBYUV_API +int I010AlphaToARGBMatrixFilter(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + const uint16_t* src_a, + int src_stride_a, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height, + int attenuate, + enum FilterMode filter); + +// Convert I210 with Alpha to attenuated ARGB with matrix and UV filter mode. +LIBYUV_API +int I210AlphaToARGBMatrixFilter(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + const uint16_t* src_a, + int src_stride_a, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height, + int attenuate, + enum FilterMode filter); + +// Convert P010 to ARGB with matrix and UV filter mode. +LIBYUV_API +int P010ToARGBMatrixFilter(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_uv, + int src_stride_uv, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height, + enum FilterMode filter); + +// Convert P210 to ARGB with matrix and UV filter mode. +LIBYUV_API +int P210ToARGBMatrixFilter(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_uv, + int src_stride_uv, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height, + enum FilterMode filter); + +// Convert P010 to AR30 with matrix and UV filter mode. +LIBYUV_API +int P010ToAR30MatrixFilter(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_uv, + int src_stride_uv, + uint8_t* dst_ar30, + int dst_stride_ar30, + const struct YuvConstants* yuvconstants, + int width, + int height, + enum FilterMode filter); + +// Convert P210 to AR30 with matrix and UV filter mode. +LIBYUV_API +int P210ToAR30MatrixFilter(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_uv, + int src_stride_uv, + uint8_t* dst_ar30, + int dst_stride_ar30, + const struct YuvConstants* yuvconstants, + int width, + int height, + enum FilterMode filter); + // Convert camera sample to ARGB with cropping, rotation and vertical flip. // "sample_size" is needed to parse MJPG. // "dst_stride_argb" number of bytes in a row of the dst_argb plane. diff --git a/TMessagesProj/jni/third_party/libyuv/include/libyuv/convert_from.h b/TMessagesProj/jni/third_party/libyuv/include/libyuv/convert_from.h index 5140ed4f3e..32f42a6330 100644 --- a/TMessagesProj/jni/third_party/libyuv/include/libyuv/convert_from.h +++ b/TMessagesProj/jni/third_party/libyuv/include/libyuv/convert_from.h @@ -39,6 +39,24 @@ int I420ToI010(const uint8_t* src_y, int width, int height); +// Convert 8 bit YUV to 12 bit. +#define H420ToH012 I420ToI012 +LIBYUV_API +int I420ToI012(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_u, + int dst_stride_u, + uint16_t* dst_v, + int dst_stride_v, + int width, + int height); + LIBYUV_API int I420ToI422(const uint8_t* src_y, int src_stride_y, diff --git a/TMessagesProj/jni/third_party/libyuv/include/libyuv/convert_from_argb.h b/TMessagesProj/jni/third_party/libyuv/include/libyuv/convert_from_argb.h index d992363ceb..ff2a581ac4 100644 --- a/TMessagesProj/jni/third_party/libyuv/include/libyuv/convert_from_argb.h +++ b/TMessagesProj/jni/third_party/libyuv/include/libyuv/convert_from_argb.h @@ -153,6 +153,30 @@ int ARGBToI444(const uint8_t* src_argb, int width, int height); +// Convert ARGB to AR64. +LIBYUV_API +int ARGBToAR64(const uint8_t* src_argb, + int src_stride_argb, + uint16_t* dst_ar64, + int dst_stride_ar64, + int width, + int height); + +// Convert ABGR to AB64. +#define ABGRToAB64 ARGBToAR64 + +// Convert ARGB to AB64. +LIBYUV_API +int ARGBToAB64(const uint8_t* src_argb, + int src_stride_argb, + uint16_t* dst_ab64, + int dst_stride_ab64, + int width, + int height); + +// Convert ABGR to AR64. +#define ABGRToAR64 ARGBToAB64 + // Convert ARGB To I422. LIBYUV_API int ARGBToI422(const uint8_t* src_argb, @@ -185,10 +209,10 @@ int ARGBToJ420(const uint8_t* src_argb, int src_stride_argb, uint8_t* dst_yj, int dst_stride_yj, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, + uint8_t* dst_uj, + int dst_stride_uj, + uint8_t* dst_vj, + int dst_stride_vj, int width, int height); @@ -198,10 +222,10 @@ int ARGBToJ422(const uint8_t* src_argb, int src_stride_argb, uint8_t* dst_yj, int dst_stride_yj, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, + uint8_t* dst_uj, + int dst_stride_uj, + uint8_t* dst_vj, + int dst_stride_vj, int width, int height); @@ -214,6 +238,41 @@ int ARGBToJ400(const uint8_t* src_argb, int width, int height); +// Convert ABGR to J420. (JPeg full range I420). +LIBYUV_API +int ABGRToJ420(const uint8_t* src_abgr, + int src_stride_abgr, + uint8_t* dst_yj, + int dst_stride_yj, + uint8_t* dst_uj, + int dst_stride_uj, + uint8_t* dst_vj, + int dst_stride_vj, + int width, + int height); + +// Convert ABGR to J422. +LIBYUV_API +int ABGRToJ422(const uint8_t* src_abgr, + int src_stride_abgr, + uint8_t* dst_yj, + int dst_stride_yj, + uint8_t* dst_uj, + int dst_stride_uj, + uint8_t* dst_vj, + int dst_stride_vj, + int width, + int height); + +// Convert ABGR to J400. (JPeg full range). +LIBYUV_API +int ABGRToJ400(const uint8_t* src_abgr, + int src_stride_abgr, + uint8_t* dst_yj, + int dst_stride_yj, + int width, + int height); + // Convert RGBA to J400. (JPeg full range). LIBYUV_API int RGBAToJ400(const uint8_t* src_rgba, @@ -303,6 +362,17 @@ int ARGBToUYVY(const uint8_t* src_argb, int width, int height); +// RAW to JNV21 full range NV21 +LIBYUV_API +int RAWToJNV21(const uint8_t* src_raw, + int src_stride_raw, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_vu, + int dst_stride_vu, + int width, + int height); + #ifdef __cplusplus } // extern "C" } // namespace libyuv diff --git a/TMessagesProj/jni/third_party/libyuv/include/libyuv/cpu_id.h b/TMessagesProj/jni/third_party/libyuv/include/libyuv/cpu_id.h index 3e27cc107d..fb90c6c737 100644 --- a/TMessagesProj/jni/third_party/libyuv/include/libyuv/cpu_id.h +++ b/TMessagesProj/jni/third_party/libyuv/include/libyuv/cpu_id.h @@ -40,15 +40,20 @@ static const int kCpuHasF16C = 0x2000; static const int kCpuHasGFNI = 0x4000; static const int kCpuHasAVX512BW = 0x8000; static const int kCpuHasAVX512VL = 0x10000; -static const int kCpuHasAVX512VBMI = 0x20000; -static const int kCpuHasAVX512VBMI2 = 0x40000; -static const int kCpuHasAVX512VBITALG = 0x80000; -static const int kCpuHasAVX512VPOPCNTDQ = 0x100000; +static const int kCpuHasAVX512VNNI = 0x20000; +static const int kCpuHasAVX512VBMI = 0x40000; +static const int kCpuHasAVX512VBMI2 = 0x80000; +static const int kCpuHasAVX512VBITALG = 0x100000; +static const int kCpuHasAVX512VPOPCNTDQ = 0x200000; // These flags are only valid on MIPS processors. -static const int kCpuHasMIPS = 0x200000; -static const int kCpuHasMSA = 0x400000; -static const int kCpuHasMMI = 0x800000; +static const int kCpuHasMIPS = 0x400000; +static const int kCpuHasMSA = 0x800000; + +// These flags are only valid on LOONGARCH processors. +static const int kCpuHasLOONGARCH = 0x2000000; +static const int kCpuHasLSX = 0x4000000; +static const int kCpuHasLASX = 0x8000000; // Optional init function. TestCpuFlag does an auto-init. // Returns cpu_info flags. diff --git a/TMessagesProj/jni/third_party/libyuv/include/libyuv/loongson_intrinsics.h b/TMessagesProj/jni/third_party/libyuv/include/libyuv/loongson_intrinsics.h new file mode 100644 index 0000000000..1d613defb1 --- /dev/null +++ b/TMessagesProj/jni/third_party/libyuv/include/libyuv/loongson_intrinsics.h @@ -0,0 +1,1949 @@ +/* + * Copyright 2022 The LibYuv Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef INCLUDE_LIBYUV_LOONGSON_INTRINSICS_H +#define INCLUDE_LIBYUV_LOONGSON_INTRINSICS_H + +/* + * Copyright (c) 2022 Loongson Technology Corporation Limited + * All rights reserved. + * Contributed by Shiyou Yin + * Xiwei Gu + * Lu Wang + * + * This file is a header file for loongarch builtin extension. + * + */ + +#ifndef LOONGSON_INTRINSICS_H +#define LOONGSON_INTRINSICS_H + +/** + * MAJOR version: Macro usage changes. + * MINOR version: Add new functions, or bug fixes. + * MICRO version: Comment changes or implementation changes. + */ +#define LSOM_VERSION_MAJOR 1 +#define LSOM_VERSION_MINOR 1 +#define LSOM_VERSION_MICRO 0 + +#define DUP2_ARG1(_INS, _IN0, _IN1, _OUT0, _OUT1) \ + { \ + _OUT0 = _INS(_IN0); \ + _OUT1 = _INS(_IN1); \ + } + +#define DUP2_ARG2(_INS, _IN0, _IN1, _IN2, _IN3, _OUT0, _OUT1) \ + { \ + _OUT0 = _INS(_IN0, _IN1); \ + _OUT1 = _INS(_IN2, _IN3); \ + } + +#define DUP2_ARG3(_INS, _IN0, _IN1, _IN2, _IN3, _IN4, _IN5, _OUT0, _OUT1) \ + { \ + _OUT0 = _INS(_IN0, _IN1, _IN2); \ + _OUT1 = _INS(_IN3, _IN4, _IN5); \ + } + +#define DUP4_ARG1(_INS, _IN0, _IN1, _IN2, _IN3, _OUT0, _OUT1, _OUT2, _OUT3) \ + { \ + DUP2_ARG1(_INS, _IN0, _IN1, _OUT0, _OUT1); \ + DUP2_ARG1(_INS, _IN2, _IN3, _OUT2, _OUT3); \ + } + +#define DUP4_ARG2(_INS, _IN0, _IN1, _IN2, _IN3, _IN4, _IN5, _IN6, _IN7, _OUT0, \ + _OUT1, _OUT2, _OUT3) \ + { \ + DUP2_ARG2(_INS, _IN0, _IN1, _IN2, _IN3, _OUT0, _OUT1); \ + DUP2_ARG2(_INS, _IN4, _IN5, _IN6, _IN7, _OUT2, _OUT3); \ + } + +#define DUP4_ARG3(_INS, _IN0, _IN1, _IN2, _IN3, _IN4, _IN5, _IN6, _IN7, _IN8, \ + _IN9, _IN10, _IN11, _OUT0, _OUT1, _OUT2, _OUT3) \ + { \ + DUP2_ARG3(_INS, _IN0, _IN1, _IN2, _IN3, _IN4, _IN5, _OUT0, _OUT1); \ + DUP2_ARG3(_INS, _IN6, _IN7, _IN8, _IN9, _IN10, _IN11, _OUT2, _OUT3); \ + } + +#ifdef __loongarch_sx +#include +/* + * ============================================================================= + * Description : Dot product & addition of byte vector elements + * Arguments : Inputs - in_c, in_h, in_l + * Outputs - out + * Return Type - halfword + * Details : Signed byte elements from in_h are multiplied by + * signed byte elements from in_l, and then added adjacent to + * each other to get results with the twice size of input. + * Then the results plus to signed half-word elements from in_c. + * Example : out = __lsx_vdp2add_h_b(in_c, in_h, in_l) + * in_c : 1,2,3,4, 1,2,3,4 + * in_h : 1,2,3,4, 5,6,7,8, 1,2,3,4, 5,6,7,8 + * in_l : 8,7,6,5, 4,3,2,1, 8,7,6,5, 4,3,2,1 + * out : 23,40,41,26, 23,40,41,26 + * ============================================================================= + */ +static inline __m128i __lsx_vdp2add_h_b(__m128i in_c, + __m128i in_h, + __m128i in_l) { + __m128i out; + + out = __lsx_vmaddwev_h_b(in_c, in_h, in_l); + out = __lsx_vmaddwod_h_b(out, in_h, in_l); + return out; +} + +/* + * ============================================================================= + * Description : Dot product & addition of byte vector elements + * Arguments : Inputs - in_c, in_h, in_l + * Outputs - out + * Return Type - halfword + * Details : Unsigned byte elements from in_h are multiplied by + * unsigned byte elements from in_l, and then added adjacent to + * each other to get results with the twice size of input. + * The results plus to signed half-word elements from in_c. + * Example : out = __lsx_vdp2add_h_bu(in_c, in_h, in_l) + * in_c : 1,2,3,4, 1,2,3,4 + * in_h : 1,2,3,4, 5,6,7,8, 1,2,3,4, 5,6,7,8 + * in_l : 8,7,6,5, 4,3,2,1, 8,7,6,5, 4,3,2,1 + * out : 23,40,41,26, 23,40,41,26 + * ============================================================================= + */ +static inline __m128i __lsx_vdp2add_h_bu(__m128i in_c, + __m128i in_h, + __m128i in_l) { + __m128i out; + + out = __lsx_vmaddwev_h_bu(in_c, in_h, in_l); + out = __lsx_vmaddwod_h_bu(out, in_h, in_l); + return out; +} + +/* + * ============================================================================= + * Description : Dot product & addition of byte vector elements + * Arguments : Inputs - in_c, in_h, in_l + * Outputs - out + * Return Type - halfword + * Details : Unsigned byte elements from in_h are multiplied by + * signed byte elements from in_l, and then added adjacent to + * each other to get results with the twice size of input. + * The results plus to signed half-word elements from in_c. + * Example : out = __lsx_vdp2add_h_bu_b(in_c, in_h, in_l) + * in_c : 1,1,1,1, 1,1,1,1 + * in_h : 1,2,3,4, 5,6,7,8, 1,2,3,4, 5,6,7,8 + * in_l : -1,-2,-3,-4, -5,-6,-7,-8, 1,2,3,4, 5,6,7,8 + * out : -4,-24,-60,-112, 6,26,62,114 + * ============================================================================= + */ +static inline __m128i __lsx_vdp2add_h_bu_b(__m128i in_c, + __m128i in_h, + __m128i in_l) { + __m128i out; + + out = __lsx_vmaddwev_h_bu_b(in_c, in_h, in_l); + out = __lsx_vmaddwod_h_bu_b(out, in_h, in_l); + return out; +} + +/* + * ============================================================================= + * Description : Dot product & addition of half-word vector elements + * Arguments : Inputs - in_c, in_h, in_l + * Outputs - out + * Return Type - __m128i + * Details : Signed half-word elements from in_h are multiplied by + * signed half-word elements from in_l, and then added adjacent to + * each other to get results with the twice size of input. + * Then the results plus to signed word elements from in_c. + * Example : out = __lsx_vdp2add_h_b(in_c, in_h, in_l) + * in_c : 1,2,3,4 + * in_h : 1,2,3,4, 5,6,7,8 + * in_l : 8,7,6,5, 4,3,2,1 + * out : 23,40,41,26 + * ============================================================================= + */ +static inline __m128i __lsx_vdp2add_w_h(__m128i in_c, + __m128i in_h, + __m128i in_l) { + __m128i out; + + out = __lsx_vmaddwev_w_h(in_c, in_h, in_l); + out = __lsx_vmaddwod_w_h(out, in_h, in_l); + return out; +} + +/* + * ============================================================================= + * Description : Dot product of byte vector elements + * Arguments : Inputs - in_h, in_l + * Outputs - out + * Return Type - halfword + * Details : Signed byte elements from in_h are multiplied by + * signed byte elements from in_l, and then added adjacent to + * each other to get results with the twice size of input. + * Example : out = __lsx_vdp2_h_b(in_h, in_l) + * in_h : 1,2,3,4, 5,6,7,8, 1,2,3,4, 5,6,7,8 + * in_l : 8,7,6,5, 4,3,2,1, 8,7,6,5, 4,3,2,1 + * out : 22,38,38,22, 22,38,38,22 + * ============================================================================= + */ +static inline __m128i __lsx_vdp2_h_b(__m128i in_h, __m128i in_l) { + __m128i out; + + out = __lsx_vmulwev_h_b(in_h, in_l); + out = __lsx_vmaddwod_h_b(out, in_h, in_l); + return out; +} + +/* + * ============================================================================= + * Description : Dot product of byte vector elements + * Arguments : Inputs - in_h, in_l + * Outputs - out + * Return Type - halfword + * Details : Unsigned byte elements from in_h are multiplied by + * unsigned byte elements from in_l, and then added adjacent to + * each other to get results with the twice size of input. + * Example : out = __lsx_vdp2_h_bu(in_h, in_l) + * in_h : 1,2,3,4, 5,6,7,8, 1,2,3,4, 5,6,7,8 + * in_l : 8,7,6,5, 4,3,2,1, 8,7,6,5, 4,3,2,1 + * out : 22,38,38,22, 22,38,38,22 + * ============================================================================= + */ +static inline __m128i __lsx_vdp2_h_bu(__m128i in_h, __m128i in_l) { + __m128i out; + + out = __lsx_vmulwev_h_bu(in_h, in_l); + out = __lsx_vmaddwod_h_bu(out, in_h, in_l); + return out; +} + +/* + * ============================================================================= + * Description : Dot product of byte vector elements + * Arguments : Inputs - in_h, in_l + * Outputs - out + * Return Type - halfword + * Details : Unsigned byte elements from in_h are multiplied by + * signed byte elements from in_l, and then added adjacent to + * each other to get results with the twice size of input. + * Example : out = __lsx_vdp2_h_bu_b(in_h, in_l) + * in_h : 1,2,3,4, 5,6,7,8, 1,2,3,4, 5,6,7,8 + * in_l : 8,7,6,5, 4,3,2,1, 8,7,6,5, 4,3,2,-1 + * out : 22,38,38,22, 22,38,38,6 + * ============================================================================= + */ +static inline __m128i __lsx_vdp2_h_bu_b(__m128i in_h, __m128i in_l) { + __m128i out; + + out = __lsx_vmulwev_h_bu_b(in_h, in_l); + out = __lsx_vmaddwod_h_bu_b(out, in_h, in_l); + return out; +} + +/* + * ============================================================================= + * Description : Dot product of byte vector elements + * Arguments : Inputs - in_h, in_l + * Outputs - out + * Return Type - halfword + * Details : Signed byte elements from in_h are multiplied by + * signed byte elements from in_l, and then added adjacent to + * each other to get results with the twice size of input. + * Example : out = __lsx_vdp2_w_h(in_h, in_l) + * in_h : 1,2,3,4, 5,6,7,8 + * in_l : 8,7,6,5, 4,3,2,1 + * out : 22,38,38,22 + * ============================================================================= + */ +static inline __m128i __lsx_vdp2_w_h(__m128i in_h, __m128i in_l) { + __m128i out; + + out = __lsx_vmulwev_w_h(in_h, in_l); + out = __lsx_vmaddwod_w_h(out, in_h, in_l); + return out; +} + +/* + * ============================================================================= + * Description : Clip all halfword elements of input vector between min & max + * out = ((_in) < (min)) ? (min) : (((_in) > (max)) ? (max) : + * (_in)) + * Arguments : Inputs - _in (input vector) + * - min (min threshold) + * - max (max threshold) + * Outputs - out (output vector with clipped elements) + * Return Type - signed halfword + * Example : out = __lsx_vclip_h(_in) + * _in : -8,2,280,249, -8,255,280,249 + * min : 1,1,1,1, 1,1,1,1 + * max : 9,9,9,9, 9,9,9,9 + * out : 1,2,9,9, 1,9,9,9 + * ============================================================================= + */ +static inline __m128i __lsx_vclip_h(__m128i _in, __m128i min, __m128i max) { + __m128i out; + + out = __lsx_vmax_h(min, _in); + out = __lsx_vmin_h(max, out); + return out; +} + +/* + * ============================================================================= + * Description : Set each element of vector between 0 and 255 + * Arguments : Inputs - _in + * Outputs - out + * Return Type - halfword + * Details : Signed byte elements from _in are clamped between 0 and 255. + * Example : out = __lsx_vclip255_h(_in) + * _in : -8,255,280,249, -8,255,280,249 + * out : 0,255,255,249, 0,255,255,249 + * ============================================================================= + */ +static inline __m128i __lsx_vclip255_h(__m128i _in) { + __m128i out; + + out = __lsx_vmaxi_h(_in, 0); + out = __lsx_vsat_hu(out, 7); + return out; +} + +/* + * ============================================================================= + * Description : Set each element of vector between 0 and 255 + * Arguments : Inputs - _in + * Outputs - out + * Return Type - word + * Details : Signed byte elements from _in are clamped between 0 and 255. + * Example : out = __lsx_vclip255_w(_in) + * _in : -8,255,280,249 + * out : 0,255,255,249 + * ============================================================================= + */ +static inline __m128i __lsx_vclip255_w(__m128i _in) { + __m128i out; + + out = __lsx_vmaxi_w(_in, 0); + out = __lsx_vsat_wu(out, 7); + return out; +} + +/* + * ============================================================================= + * Description : Swap two variables + * Arguments : Inputs - _in0, _in1 + * Outputs - _in0, _in1 (in-place) + * Details : Swapping of two input variables using xor + * Example : LSX_SWAP(_in0, _in1) + * _in0 : 1,2,3,4 + * _in1 : 5,6,7,8 + * _in0(out) : 5,6,7,8 + * _in1(out) : 1,2,3,4 + * ============================================================================= + */ +#define LSX_SWAP(_in0, _in1) \ + { \ + _in0 = __lsx_vxor_v(_in0, _in1); \ + _in1 = __lsx_vxor_v(_in0, _in1); \ + _in0 = __lsx_vxor_v(_in0, _in1); \ + } + +/* + * ============================================================================= + * Description : Transpose 4x4 block with word elements in vectors + * Arguments : Inputs - in0, in1, in2, in3 + * Outputs - out0, out1, out2, out3 + * Details : + * Example : + * 1, 2, 3, 4 1, 5, 9,13 + * 5, 6, 7, 8 to 2, 6,10,14 + * 9,10,11,12 =====> 3, 7,11,15 + * 13,14,15,16 4, 8,12,16 + * ============================================================================= + */ +#define LSX_TRANSPOSE4x4_W(_in0, _in1, _in2, _in3, _out0, _out1, _out2, _out3) \ + { \ + __m128i _t0, _t1, _t2, _t3; \ + \ + _t0 = __lsx_vilvl_w(_in1, _in0); \ + _t1 = __lsx_vilvh_w(_in1, _in0); \ + _t2 = __lsx_vilvl_w(_in3, _in2); \ + _t3 = __lsx_vilvh_w(_in3, _in2); \ + _out0 = __lsx_vilvl_d(_t2, _t0); \ + _out1 = __lsx_vilvh_d(_t2, _t0); \ + _out2 = __lsx_vilvl_d(_t3, _t1); \ + _out3 = __lsx_vilvh_d(_t3, _t1); \ + } + +/* + * ============================================================================= + * Description : Transpose 8x8 block with byte elements in vectors + * Arguments : Inputs - _in0, _in1, _in2, _in3, _in4, _in5, _in6, _in7 + * Outputs - _out0, _out1, _out2, _out3, _out4, _out5, _out6, + * _out7 + * Details : The rows of the matrix become columns, and the columns + * become rows. + * Example : LSX_TRANSPOSE8x8_B + * _in0 : 00,01,02,03,04,05,06,07, 00,00,00,00,00,00,00,00 + * _in1 : 10,11,12,13,14,15,16,17, 00,00,00,00,00,00,00,00 + * _in2 : 20,21,22,23,24,25,26,27, 00,00,00,00,00,00,00,00 + * _in3 : 30,31,32,33,34,35,36,37, 00,00,00,00,00,00,00,00 + * _in4 : 40,41,42,43,44,45,46,47, 00,00,00,00,00,00,00,00 + * _in5 : 50,51,52,53,54,55,56,57, 00,00,00,00,00,00,00,00 + * _in6 : 60,61,62,63,64,65,66,67, 00,00,00,00,00,00,00,00 + * _in7 : 70,71,72,73,74,75,76,77, 00,00,00,00,00,00,00,00 + * + * _ out0 : 00,10,20,30,40,50,60,70, 00,00,00,00,00,00,00,00 + * _ out1 : 01,11,21,31,41,51,61,71, 00,00,00,00,00,00,00,00 + * _ out2 : 02,12,22,32,42,52,62,72, 00,00,00,00,00,00,00,00 + * _ out3 : 03,13,23,33,43,53,63,73, 00,00,00,00,00,00,00,00 + * _ out4 : 04,14,24,34,44,54,64,74, 00,00,00,00,00,00,00,00 + * _ out5 : 05,15,25,35,45,55,65,75, 00,00,00,00,00,00,00,00 + * _ out6 : 06,16,26,36,46,56,66,76, 00,00,00,00,00,00,00,00 + * _ out7 : 07,17,27,37,47,57,67,77, 00,00,00,00,00,00,00,00 + * ============================================================================= + */ +#define LSX_TRANSPOSE8x8_B(_in0, _in1, _in2, _in3, _in4, _in5, _in6, _in7, \ + _out0, _out1, _out2, _out3, _out4, _out5, _out6, \ + _out7) \ + { \ + __m128i zero = {0}; \ + __m128i shuf8 = {0x0F0E0D0C0B0A0908, 0x1716151413121110}; \ + __m128i _t0, _t1, _t2, _t3, _t4, _t5, _t6, _t7; \ + \ + _t0 = __lsx_vilvl_b(_in2, _in0); \ + _t1 = __lsx_vilvl_b(_in3, _in1); \ + _t2 = __lsx_vilvl_b(_in6, _in4); \ + _t3 = __lsx_vilvl_b(_in7, _in5); \ + _t4 = __lsx_vilvl_b(_t1, _t0); \ + _t5 = __lsx_vilvh_b(_t1, _t0); \ + _t6 = __lsx_vilvl_b(_t3, _t2); \ + _t7 = __lsx_vilvh_b(_t3, _t2); \ + _out0 = __lsx_vilvl_w(_t6, _t4); \ + _out2 = __lsx_vilvh_w(_t6, _t4); \ + _out4 = __lsx_vilvl_w(_t7, _t5); \ + _out6 = __lsx_vilvh_w(_t7, _t5); \ + _out1 = __lsx_vshuf_b(zero, _out0, shuf8); \ + _out3 = __lsx_vshuf_b(zero, _out2, shuf8); \ + _out5 = __lsx_vshuf_b(zero, _out4, shuf8); \ + _out7 = __lsx_vshuf_b(zero, _out6, shuf8); \ + } + +/* + * ============================================================================= + * Description : Transpose 8x8 block with half-word elements in vectors + * Arguments : Inputs - in0, in1, in2, in3, in4, in5, in6, in7 + * Outputs - out0, out1, out2, out3, out4, out5, out6, out7 + * Details : + * Example : + * 00,01,02,03,04,05,06,07 00,10,20,30,40,50,60,70 + * 10,11,12,13,14,15,16,17 01,11,21,31,41,51,61,71 + * 20,21,22,23,24,25,26,27 02,12,22,32,42,52,62,72 + * 30,31,32,33,34,35,36,37 to 03,13,23,33,43,53,63,73 + * 40,41,42,43,44,45,46,47 ======> 04,14,24,34,44,54,64,74 + * 50,51,52,53,54,55,56,57 05,15,25,35,45,55,65,75 + * 60,61,62,63,64,65,66,67 06,16,26,36,46,56,66,76 + * 70,71,72,73,74,75,76,77 07,17,27,37,47,57,67,77 + * ============================================================================= + */ +#define LSX_TRANSPOSE8x8_H(_in0, _in1, _in2, _in3, _in4, _in5, _in6, _in7, \ + _out0, _out1, _out2, _out3, _out4, _out5, _out6, \ + _out7) \ + { \ + __m128i _s0, _s1, _t0, _t1, _t2, _t3, _t4, _t5, _t6, _t7; \ + \ + _s0 = __lsx_vilvl_h(_in6, _in4); \ + _s1 = __lsx_vilvl_h(_in7, _in5); \ + _t0 = __lsx_vilvl_h(_s1, _s0); \ + _t1 = __lsx_vilvh_h(_s1, _s0); \ + _s0 = __lsx_vilvh_h(_in6, _in4); \ + _s1 = __lsx_vilvh_h(_in7, _in5); \ + _t2 = __lsx_vilvl_h(_s1, _s0); \ + _t3 = __lsx_vilvh_h(_s1, _s0); \ + _s0 = __lsx_vilvl_h(_in2, _in0); \ + _s1 = __lsx_vilvl_h(_in3, _in1); \ + _t4 = __lsx_vilvl_h(_s1, _s0); \ + _t5 = __lsx_vilvh_h(_s1, _s0); \ + _s0 = __lsx_vilvh_h(_in2, _in0); \ + _s1 = __lsx_vilvh_h(_in3, _in1); \ + _t6 = __lsx_vilvl_h(_s1, _s0); \ + _t7 = __lsx_vilvh_h(_s1, _s0); \ + \ + _out0 = __lsx_vpickev_d(_t0, _t4); \ + _out2 = __lsx_vpickev_d(_t1, _t5); \ + _out4 = __lsx_vpickev_d(_t2, _t6); \ + _out6 = __lsx_vpickev_d(_t3, _t7); \ + _out1 = __lsx_vpickod_d(_t0, _t4); \ + _out3 = __lsx_vpickod_d(_t1, _t5); \ + _out5 = __lsx_vpickod_d(_t2, _t6); \ + _out7 = __lsx_vpickod_d(_t3, _t7); \ + } + +/* + * ============================================================================= + * Description : Transpose input 8x4 byte block into 4x8 + * Arguments : Inputs - _in0, _in1, _in2, _in3 (input 8x4 byte block) + * Outputs - _out0, _out1, _out2, _out3 (output 4x8 byte block) + * Return Type - as per RTYPE + * Details : The rows of the matrix become columns, and the columns become + * rows. + * Example : LSX_TRANSPOSE8x4_B + * _in0 : 00,01,02,03,00,00,00,00, 00,00,00,00,00,00,00,00 + * _in1 : 10,11,12,13,00,00,00,00, 00,00,00,00,00,00,00,00 + * _in2 : 20,21,22,23,00,00,00,00, 00,00,00,00,00,00,00,00 + * _in3 : 30,31,32,33,00,00,00,00, 00,00,00,00,00,00,00,00 + * _in4 : 40,41,42,43,00,00,00,00, 00,00,00,00,00,00,00,00 + * _in5 : 50,51,52,53,00,00,00,00, 00,00,00,00,00,00,00,00 + * _in6 : 60,61,62,63,00,00,00,00, 00,00,00,00,00,00,00,00 + * _in7 : 70,71,72,73,00,00,00,00, 00,00,00,00,00,00,00,00 + * + * _out0 : 00,10,20,30,40,50,60,70, 00,00,00,00,00,00,00,00 + * _out1 : 01,11,21,31,41,51,61,71, 00,00,00,00,00,00,00,00 + * _out2 : 02,12,22,32,42,52,62,72, 00,00,00,00,00,00,00,00 + * _out3 : 03,13,23,33,43,53,63,73, 00,00,00,00,00,00,00,00 + * ============================================================================= + */ +#define LSX_TRANSPOSE8x4_B(_in0, _in1, _in2, _in3, _in4, _in5, _in6, _in7, \ + _out0, _out1, _out2, _out3) \ + { \ + __m128i _tmp0_m, _tmp1_m, _tmp2_m, _tmp3_m; \ + \ + _tmp0_m = __lsx_vpackev_w(_in4, _in0); \ + _tmp1_m = __lsx_vpackev_w(_in5, _in1); \ + _tmp2_m = __lsx_vilvl_b(_tmp1_m, _tmp0_m); \ + _tmp0_m = __lsx_vpackev_w(_in6, _in2); \ + _tmp1_m = __lsx_vpackev_w(_in7, _in3); \ + \ + _tmp3_m = __lsx_vilvl_b(_tmp1_m, _tmp0_m); \ + _tmp0_m = __lsx_vilvl_h(_tmp3_m, _tmp2_m); \ + _tmp1_m = __lsx_vilvh_h(_tmp3_m, _tmp2_m); \ + \ + _out0 = __lsx_vilvl_w(_tmp1_m, _tmp0_m); \ + _out2 = __lsx_vilvh_w(_tmp1_m, _tmp0_m); \ + _out1 = __lsx_vilvh_d(_out2, _out0); \ + _out3 = __lsx_vilvh_d(_out0, _out2); \ + } + +/* + * ============================================================================= + * Description : Transpose 16x8 block with byte elements in vectors + * Arguments : Inputs - in0, in1, in2, in3, in4, in5, in6, in7, in8 + * in9, in10, in11, in12, in13, in14, in15 + * Outputs - out0, out1, out2, out3, out4, out5, out6, out7 + * Details : + * Example : + * 000,001,002,003,004,005,006,007 + * 008,009,010,011,012,013,014,015 + * 016,017,018,019,020,021,022,023 + * 024,025,026,027,028,029,030,031 + * 032,033,034,035,036,037,038,039 + * 040,041,042,043,044,045,046,047 000,008,...,112,120 + * 048,049,050,051,052,053,054,055 001,009,...,113,121 + * 056,057,058,059,060,061,062,063 to 002,010,...,114,122 + * 064,068,066,067,068,069,070,071 =====> 003,011,...,115,123 + * 072,073,074,075,076,077,078,079 004,012,...,116,124 + * 080,081,082,083,084,085,086,087 005,013,...,117,125 + * 088,089,090,091,092,093,094,095 006,014,...,118,126 + * 096,097,098,099,100,101,102,103 007,015,...,119,127 + * 104,105,106,107,108,109,110,111 + * 112,113,114,115,116,117,118,119 + * 120,121,122,123,124,125,126,127 + * ============================================================================= + */ +#define LSX_TRANSPOSE16x8_B(_in0, _in1, _in2, _in3, _in4, _in5, _in6, _in7, \ + _in8, _in9, _in10, _in11, _in12, _in13, _in14, \ + _in15, _out0, _out1, _out2, _out3, _out4, _out5, \ + _out6, _out7) \ + { \ + __m128i _tmp0, _tmp1, _tmp2, _tmp3, _tmp4, _tmp5, _tmp6, _tmp7; \ + __m128i _t0, _t1, _t2, _t3, _t4, _t5, _t6, _t7; \ + DUP4_ARG2(__lsx_vilvl_b, _in2, _in0, _in3, _in1, _in6, _in4, _in7, _in5, \ + _tmp0, _tmp1, _tmp2, _tmp3); \ + DUP4_ARG2(__lsx_vilvl_b, _in10, _in8, _in11, _in9, _in14, _in12, _in15, \ + _in13, _tmp4, _tmp5, _tmp6, _tmp7); \ + DUP2_ARG2(__lsx_vilvl_b, _tmp1, _tmp0, _tmp3, _tmp2, _t0, _t2); \ + DUP2_ARG2(__lsx_vilvh_b, _tmp1, _tmp0, _tmp3, _tmp2, _t1, _t3); \ + DUP2_ARG2(__lsx_vilvl_b, _tmp5, _tmp4, _tmp7, _tmp6, _t4, _t6); \ + DUP2_ARG2(__lsx_vilvh_b, _tmp5, _tmp4, _tmp7, _tmp6, _t5, _t7); \ + DUP2_ARG2(__lsx_vilvl_w, _t2, _t0, _t3, _t1, _tmp0, _tmp4); \ + DUP2_ARG2(__lsx_vilvh_w, _t2, _t0, _t3, _t1, _tmp2, _tmp6); \ + DUP2_ARG2(__lsx_vilvl_w, _t6, _t4, _t7, _t5, _tmp1, _tmp5); \ + DUP2_ARG2(__lsx_vilvh_w, _t6, _t4, _t7, _t5, _tmp3, _tmp7); \ + DUP2_ARG2(__lsx_vilvl_d, _tmp1, _tmp0, _tmp3, _tmp2, _out0, _out2); \ + DUP2_ARG2(__lsx_vilvh_d, _tmp1, _tmp0, _tmp3, _tmp2, _out1, _out3); \ + DUP2_ARG2(__lsx_vilvl_d, _tmp5, _tmp4, _tmp7, _tmp6, _out4, _out6); \ + DUP2_ARG2(__lsx_vilvh_d, _tmp5, _tmp4, _tmp7, _tmp6, _out5, _out7); \ + } + +/* + * ============================================================================= + * Description : Butterfly of 4 input vectors + * Arguments : Inputs - in0, in1, in2, in3 + * Outputs - out0, out1, out2, out3 + * Details : Butterfly operation + * Example : + * out0 = in0 + in3; + * out1 = in1 + in2; + * out2 = in1 - in2; + * out3 = in0 - in3; + * ============================================================================= + */ +#define LSX_BUTTERFLY_4_B(_in0, _in1, _in2, _in3, _out0, _out1, _out2, _out3) \ + { \ + _out0 = __lsx_vadd_b(_in0, _in3); \ + _out1 = __lsx_vadd_b(_in1, _in2); \ + _out2 = __lsx_vsub_b(_in1, _in2); \ + _out3 = __lsx_vsub_b(_in0, _in3); \ + } +#define LSX_BUTTERFLY_4_H(_in0, _in1, _in2, _in3, _out0, _out1, _out2, _out3) \ + { \ + _out0 = __lsx_vadd_h(_in0, _in3); \ + _out1 = __lsx_vadd_h(_in1, _in2); \ + _out2 = __lsx_vsub_h(_in1, _in2); \ + _out3 = __lsx_vsub_h(_in0, _in3); \ + } +#define LSX_BUTTERFLY_4_W(_in0, _in1, _in2, _in3, _out0, _out1, _out2, _out3) \ + { \ + _out0 = __lsx_vadd_w(_in0, _in3); \ + _out1 = __lsx_vadd_w(_in1, _in2); \ + _out2 = __lsx_vsub_w(_in1, _in2); \ + _out3 = __lsx_vsub_w(_in0, _in3); \ + } +#define LSX_BUTTERFLY_4_D(_in0, _in1, _in2, _in3, _out0, _out1, _out2, _out3) \ + { \ + _out0 = __lsx_vadd_d(_in0, _in3); \ + _out1 = __lsx_vadd_d(_in1, _in2); \ + _out2 = __lsx_vsub_d(_in1, _in2); \ + _out3 = __lsx_vsub_d(_in0, _in3); \ + } + +/* + * ============================================================================= + * Description : Butterfly of 8 input vectors + * Arguments : Inputs - _in0, _in1, _in2, _in3, ~ + * Outputs - _out0, _out1, _out2, _out3, ~ + * Details : Butterfly operation + * Example : + * _out0 = _in0 + _in7; + * _out1 = _in1 + _in6; + * _out2 = _in2 + _in5; + * _out3 = _in3 + _in4; + * _out4 = _in3 - _in4; + * _out5 = _in2 - _in5; + * _out6 = _in1 - _in6; + * _out7 = _in0 - _in7; + * ============================================================================= + */ +#define LSX_BUTTERFLY_8_B(_in0, _in1, _in2, _in3, _in4, _in5, _in6, _in7, \ + _out0, _out1, _out2, _out3, _out4, _out5, _out6, \ + _out7) \ + { \ + _out0 = __lsx_vadd_b(_in0, _in7); \ + _out1 = __lsx_vadd_b(_in1, _in6); \ + _out2 = __lsx_vadd_b(_in2, _in5); \ + _out3 = __lsx_vadd_b(_in3, _in4); \ + _out4 = __lsx_vsub_b(_in3, _in4); \ + _out5 = __lsx_vsub_b(_in2, _in5); \ + _out6 = __lsx_vsub_b(_in1, _in6); \ + _out7 = __lsx_vsub_b(_in0, _in7); \ + } + +#define LSX_BUTTERFLY_8_H(_in0, _in1, _in2, _in3, _in4, _in5, _in6, _in7, \ + _out0, _out1, _out2, _out3, _out4, _out5, _out6, \ + _out7) \ + { \ + _out0 = __lsx_vadd_h(_in0, _in7); \ + _out1 = __lsx_vadd_h(_in1, _in6); \ + _out2 = __lsx_vadd_h(_in2, _in5); \ + _out3 = __lsx_vadd_h(_in3, _in4); \ + _out4 = __lsx_vsub_h(_in3, _in4); \ + _out5 = __lsx_vsub_h(_in2, _in5); \ + _out6 = __lsx_vsub_h(_in1, _in6); \ + _out7 = __lsx_vsub_h(_in0, _in7); \ + } + +#define LSX_BUTTERFLY_8_W(_in0, _in1, _in2, _in3, _in4, _in5, _in6, _in7, \ + _out0, _out1, _out2, _out3, _out4, _out5, _out6, \ + _out7) \ + { \ + _out0 = __lsx_vadd_w(_in0, _in7); \ + _out1 = __lsx_vadd_w(_in1, _in6); \ + _out2 = __lsx_vadd_w(_in2, _in5); \ + _out3 = __lsx_vadd_w(_in3, _in4); \ + _out4 = __lsx_vsub_w(_in3, _in4); \ + _out5 = __lsx_vsub_w(_in2, _in5); \ + _out6 = __lsx_vsub_w(_in1, _in6); \ + _out7 = __lsx_vsub_w(_in0, _in7); \ + } + +#define LSX_BUTTERFLY_8_D(_in0, _in1, _in2, _in3, _in4, _in5, _in6, _in7, \ + _out0, _out1, _out2, _out3, _out4, _out5, _out6, \ + _out7) \ + { \ + _out0 = __lsx_vadd_d(_in0, _in7); \ + _out1 = __lsx_vadd_d(_in1, _in6); \ + _out2 = __lsx_vadd_d(_in2, _in5); \ + _out3 = __lsx_vadd_d(_in3, _in4); \ + _out4 = __lsx_vsub_d(_in3, _in4); \ + _out5 = __lsx_vsub_d(_in2, _in5); \ + _out6 = __lsx_vsub_d(_in1, _in6); \ + _out7 = __lsx_vsub_d(_in0, _in7); \ + } + +#endif // LSX + +#ifdef __loongarch_asx +#include +/* + * ============================================================================= + * Description : Dot product of byte vector elements + * Arguments : Inputs - in_h, in_l + * Output - out + * Return Type - signed halfword + * Details : Unsigned byte elements from in_h are multiplied with + * unsigned byte elements from in_l producing a result + * twice the size of input i.e. signed halfword. + * Then this multiplied results of adjacent odd-even elements + * are added to the out vector + * Example : See out = __lasx_xvdp2_w_h(in_h, in_l) + * ============================================================================= + */ +static inline __m256i __lasx_xvdp2_h_bu(__m256i in_h, __m256i in_l) { + __m256i out; + + out = __lasx_xvmulwev_h_bu(in_h, in_l); + out = __lasx_xvmaddwod_h_bu(out, in_h, in_l); + return out; +} + +/* + * ============================================================================= + * Description : Dot product of byte vector elements + * Arguments : Inputs - in_h, in_l + * Output - out + * Return Type - signed halfword + * Details : Signed byte elements from in_h are multiplied with + * signed byte elements from in_l producing a result + * twice the size of input i.e. signed halfword. + * Then this multiplication results of adjacent odd-even elements + * are added to the out vector + * Example : See out = __lasx_xvdp2_w_h(in_h, in_l) + * ============================================================================= + */ +static inline __m256i __lasx_xvdp2_h_b(__m256i in_h, __m256i in_l) { + __m256i out; + + out = __lasx_xvmulwev_h_b(in_h, in_l); + out = __lasx_xvmaddwod_h_b(out, in_h, in_l); + return out; +} + +/* + * ============================================================================= + * Description : Dot product of halfword vector elements + * Arguments : Inputs - in_h, in_l + * Output - out + * Return Type - signed word + * Details : Signed halfword elements from in_h are multiplied with + * signed halfword elements from in_l producing a result + * twice the size of input i.e. signed word. + * Then this multiplied results of adjacent odd-even elements + * are added to the out vector. + * Example : out = __lasx_xvdp2_w_h(in_h, in_l) + * in_h : 1,2,3,4, 5,6,7,8, 1,2,3,4, 5,6,7,8 + * in_l : 8,7,6,5, 4,3,2,1, 8,7,6,5, 4,3,2,1 + * out : 22,38,38,22, 22,38,38,22 + * ============================================================================= + */ +static inline __m256i __lasx_xvdp2_w_h(__m256i in_h, __m256i in_l) { + __m256i out; + + out = __lasx_xvmulwev_w_h(in_h, in_l); + out = __lasx_xvmaddwod_w_h(out, in_h, in_l); + return out; +} + +/* + * ============================================================================= + * Description : Dot product of word vector elements + * Arguments : Inputs - in_h, in_l + * Output - out + * Return Type - signed double + * Details : Signed word elements from in_h are multiplied with + * signed word elements from in_l producing a result + * twice the size of input i.e. signed double-word. + * Then this multiplied results of adjacent odd-even elements + * are added to the out vector. + * Example : See out = __lasx_xvdp2_w_h(in_h, in_l) + * ============================================================================= + */ +static inline __m256i __lasx_xvdp2_d_w(__m256i in_h, __m256i in_l) { + __m256i out; + + out = __lasx_xvmulwev_d_w(in_h, in_l); + out = __lasx_xvmaddwod_d_w(out, in_h, in_l); + return out; +} + +/* + * ============================================================================= + * Description : Dot product of halfword vector elements + * Arguments : Inputs - in_h, in_l + * Output - out + * Return Type - signed word + * Details : Unsigned halfword elements from in_h are multiplied with + * signed halfword elements from in_l producing a result + * twice the size of input i.e. unsigned word. + * Multiplication result of adjacent odd-even elements + * are added to the out vector + * Example : See out = __lasx_xvdp2_w_h(in_h, in_l) + * ============================================================================= + */ +static inline __m256i __lasx_xvdp2_w_hu_h(__m256i in_h, __m256i in_l) { + __m256i out; + + out = __lasx_xvmulwev_w_hu_h(in_h, in_l); + out = __lasx_xvmaddwod_w_hu_h(out, in_h, in_l); + return out; +} + +/* + * ============================================================================= + * Description : Dot product & addition of byte vector elements + * Arguments : Inputs - in_h, in_l + * Output - out + * Return Type - halfword + * Details : Signed byte elements from in_h are multiplied with + * signed byte elements from in_l producing a result + * twice the size of input i.e. signed halfword. + * Then this multiplied results of adjacent odd-even elements + * are added to the in_c vector. + * Example : See out = __lasx_xvdp2add_w_h(in_c, in_h, in_l) + * ============================================================================= + */ +static inline __m256i __lasx_xvdp2add_h_b(__m256i in_c, + __m256i in_h, + __m256i in_l) { + __m256i out; + + out = __lasx_xvmaddwev_h_b(in_c, in_h, in_l); + out = __lasx_xvmaddwod_h_b(out, in_h, in_l); + return out; +} + +/* + * ============================================================================= + * Description : Dot product & addition of byte vector elements + * Arguments : Inputs - in_h, in_l + * Output - out + * Return Type - halfword + * Details : Unsigned byte elements from in_h are multiplied with + * unsigned byte elements from in_l producing a result + * twice the size of input i.e. signed halfword. + * Then this multiplied results of adjacent odd-even elements + * are added to the in_c vector. + * Example : See out = __lasx_xvdp2add_w_h(in_c, in_h, in_l) + * ============================================================================= + */ +static inline __m256i __lasx_xvdp2add_h_bu(__m256i in_c, + __m256i in_h, + __m256i in_l) { + __m256i out; + + out = __lasx_xvmaddwev_h_bu(in_c, in_h, in_l); + out = __lasx_xvmaddwod_h_bu(out, in_h, in_l); + return out; +} + +/* + * ============================================================================= + * Description : Dot product & addition of byte vector elements + * Arguments : Inputs - in_h, in_l + * Output - out + * Return Type - halfword + * Details : Unsigned byte elements from in_h are multiplied with + * signed byte elements from in_l producing a result + * twice the size of input i.e. signed halfword. + * Then this multiplied results of adjacent odd-even elements + * are added to the in_c vector. + * Example : See out = __lasx_xvdp2add_w_h(in_c, in_h, in_l) + * ============================================================================= + */ +static inline __m256i __lasx_xvdp2add_h_bu_b(__m256i in_c, + __m256i in_h, + __m256i in_l) { + __m256i out; + + out = __lasx_xvmaddwev_h_bu_b(in_c, in_h, in_l); + out = __lasx_xvmaddwod_h_bu_b(out, in_h, in_l); + return out; +} + +/* + * ============================================================================= + * Description : Dot product of halfword vector elements + * Arguments : Inputs - in_c, in_h, in_l + * Output - out + * Return Type - per RTYPE + * Details : Signed halfword elements from in_h are multiplied with + * signed halfword elements from in_l producing a result + * twice the size of input i.e. signed word. + * Multiplication result of adjacent odd-even elements + * are added to the in_c vector. + * Example : out = __lasx_xvdp2add_w_h(in_c, in_h, in_l) + * in_c : 1,2,3,4, 1,2,3,4 + * in_h : 1,2,3,4, 5,6,7,8, 1,2,3,4, 5,6,7,8, + * in_l : 8,7,6,5, 4,3,2,1, 8,7,6,5, 4,3,2,1, + * out : 23,40,41,26, 23,40,41,26 + * ============================================================================= + */ +static inline __m256i __lasx_xvdp2add_w_h(__m256i in_c, + __m256i in_h, + __m256i in_l) { + __m256i out; + + out = __lasx_xvmaddwev_w_h(in_c, in_h, in_l); + out = __lasx_xvmaddwod_w_h(out, in_h, in_l); + return out; +} + +/* + * ============================================================================= + * Description : Dot product of halfword vector elements + * Arguments : Inputs - in_c, in_h, in_l + * Output - out + * Return Type - signed word + * Details : Unsigned halfword elements from in_h are multiplied with + * unsigned halfword elements from in_l producing a result + * twice the size of input i.e. signed word. + * Multiplication result of adjacent odd-even elements + * are added to the in_c vector. + * Example : See out = __lasx_xvdp2add_w_h(in_c, in_h, in_l) + * ============================================================================= + */ +static inline __m256i __lasx_xvdp2add_w_hu(__m256i in_c, + __m256i in_h, + __m256i in_l) { + __m256i out; + + out = __lasx_xvmaddwev_w_hu(in_c, in_h, in_l); + out = __lasx_xvmaddwod_w_hu(out, in_h, in_l); + return out; +} + +/* + * ============================================================================= + * Description : Dot product of halfword vector elements + * Arguments : Inputs - in_c, in_h, in_l + * Output - out + * Return Type - signed word + * Details : Unsigned halfword elements from in_h are multiplied with + * signed halfword elements from in_l producing a result + * twice the size of input i.e. signed word. + * Multiplication result of adjacent odd-even elements + * are added to the in_c vector + * Example : See out = __lasx_xvdp2add_w_h(in_c, in_h, in_l) + * ============================================================================= + */ +static inline __m256i __lasx_xvdp2add_w_hu_h(__m256i in_c, + __m256i in_h, + __m256i in_l) { + __m256i out; + + out = __lasx_xvmaddwev_w_hu_h(in_c, in_h, in_l); + out = __lasx_xvmaddwod_w_hu_h(out, in_h, in_l); + return out; +} + +/* + * ============================================================================= + * Description : Vector Unsigned Dot Product and Subtract + * Arguments : Inputs - in_c, in_h, in_l + * Output - out + * Return Type - signed halfword + * Details : Unsigned byte elements from in_h are multiplied with + * unsigned byte elements from in_l producing a result + * twice the size of input i.e. signed halfword. + * Multiplication result of adjacent odd-even elements + * are added together and subtracted from double width elements + * in_c vector. + * Example : See out = __lasx_xvdp2sub_w_h(in_c, in_h, in_l) + * ============================================================================= + */ +static inline __m256i __lasx_xvdp2sub_h_bu(__m256i in_c, + __m256i in_h, + __m256i in_l) { + __m256i out; + + out = __lasx_xvmulwev_h_bu(in_h, in_l); + out = __lasx_xvmaddwod_h_bu(out, in_h, in_l); + out = __lasx_xvsub_h(in_c, out); + return out; +} + +/* + * ============================================================================= + * Description : Vector Signed Dot Product and Subtract + * Arguments : Inputs - in_c, in_h, in_l + * Output - out + * Return Type - signed word + * Details : Signed halfword elements from in_h are multiplied with + * Signed halfword elements from in_l producing a result + * twice the size of input i.e. signed word. + * Multiplication result of adjacent odd-even elements + * are added together and subtracted from double width elements + * in_c vector. + * Example : out = __lasx_xvdp2sub_w_h(in_c, in_h, in_l) + * in_c : 0,0,0,0, 0,0,0,0 + * in_h : 3,1,3,0, 0,0,0,1, 0,0,1,1, 0,0,0,1 + * in_l : 2,1,1,0, 1,0,0,0, 0,0,1,0, 1,0,0,1 + * out : -7,-3,0,0, 0,-1,0,-1 + * ============================================================================= + */ +static inline __m256i __lasx_xvdp2sub_w_h(__m256i in_c, + __m256i in_h, + __m256i in_l) { + __m256i out; + + out = __lasx_xvmulwev_w_h(in_h, in_l); + out = __lasx_xvmaddwod_w_h(out, in_h, in_l); + out = __lasx_xvsub_w(in_c, out); + return out; +} + +/* + * ============================================================================= + * Description : Dot product of halfword vector elements + * Arguments : Inputs - in_h, in_l + * Output - out + * Return Type - signed word + * Details : Signed halfword elements from in_h are multiplied with + * signed halfword elements from in_l producing a result + * four times the size of input i.e. signed doubleword. + * Then this multiplication results of four adjacent elements + * are added together and stored to the out vector. + * Example : out = __lasx_xvdp4_d_h(in_h, in_l) + * in_h : 3,1,3,0, 0,0,0,1, 0,0,1,-1, 0,0,0,1 + * in_l : -2,1,1,0, 1,0,0,0, 0,0,1, 0, 1,0,0,1 + * out : -2,0,1,1 + * ============================================================================= + */ +static inline __m256i __lasx_xvdp4_d_h(__m256i in_h, __m256i in_l) { + __m256i out; + + out = __lasx_xvmulwev_w_h(in_h, in_l); + out = __lasx_xvmaddwod_w_h(out, in_h, in_l); + out = __lasx_xvhaddw_d_w(out, out); + return out; +} + +/* + * ============================================================================= + * Description : The high half of the vector elements are expanded and + * added after being doubled. + * Arguments : Inputs - in_h, in_l + * Output - out + * Details : The in_h vector and the in_l vector are added after the + * higher half of the two-fold sign extension (signed byte + * to signed halfword) and stored to the out vector. + * Example : See out = __lasx_xvaddwh_w_h(in_h, in_l) + * ============================================================================= + */ +static inline __m256i __lasx_xvaddwh_h_b(__m256i in_h, __m256i in_l) { + __m256i out; + + out = __lasx_xvilvh_b(in_h, in_l); + out = __lasx_xvhaddw_h_b(out, out); + return out; +} + +/* + * ============================================================================= + * Description : The high half of the vector elements are expanded and + * added after being doubled. + * Arguments : Inputs - in_h, in_l + * Output - out + * Details : The in_h vector and the in_l vector are added after the + * higher half of the two-fold sign extension (signed halfword + * to signed word) and stored to the out vector. + * Example : out = __lasx_xvaddwh_w_h(in_h, in_l) + * in_h : 3, 0,3,0, 0,0,0,-1, 0,0,1,-1, 0,0,0,1 + * in_l : 2,-1,1,2, 1,0,0, 0, 1,0,1, 0, 1,0,0,1 + * out : 1,0,0,-1, 1,0,0, 2 + * ============================================================================= + */ +static inline __m256i __lasx_xvaddwh_w_h(__m256i in_h, __m256i in_l) { + __m256i out; + + out = __lasx_xvilvh_h(in_h, in_l); + out = __lasx_xvhaddw_w_h(out, out); + return out; +} + +/* + * ============================================================================= + * Description : The low half of the vector elements are expanded and + * added after being doubled. + * Arguments : Inputs - in_h, in_l + * Output - out + * Details : The in_h vector and the in_l vector are added after the + * lower half of the two-fold sign extension (signed byte + * to signed halfword) and stored to the out vector. + * Example : See out = __lasx_xvaddwl_w_h(in_h, in_l) + * ============================================================================= + */ +static inline __m256i __lasx_xvaddwl_h_b(__m256i in_h, __m256i in_l) { + __m256i out; + + out = __lasx_xvilvl_b(in_h, in_l); + out = __lasx_xvhaddw_h_b(out, out); + return out; +} + +/* + * ============================================================================= + * Description : The low half of the vector elements are expanded and + * added after being doubled. + * Arguments : Inputs - in_h, in_l + * Output - out + * Details : The in_h vector and the in_l vector are added after the + * lower half of the two-fold sign extension (signed halfword + * to signed word) and stored to the out vector. + * Example : out = __lasx_xvaddwl_w_h(in_h, in_l) + * in_h : 3, 0,3,0, 0,0,0,-1, 0,0,1,-1, 0,0,0,1 + * in_l : 2,-1,1,2, 1,0,0, 0, 1,0,1, 0, 1,0,0,1 + * out : 5,-1,4,2, 1,0,2,-1 + * ============================================================================= + */ +static inline __m256i __lasx_xvaddwl_w_h(__m256i in_h, __m256i in_l) { + __m256i out; + + out = __lasx_xvilvl_h(in_h, in_l); + out = __lasx_xvhaddw_w_h(out, out); + return out; +} + +/* + * ============================================================================= + * Description : The low half of the vector elements are expanded and + * added after being doubled. + * Arguments : Inputs - in_h, in_l + * Output - out + * Details : The out vector and the out vector are added after the + * lower half of the two-fold zero extension (unsigned byte + * to unsigned halfword) and stored to the out vector. + * Example : See out = __lasx_xvaddwl_w_h(in_h, in_l) + * ============================================================================= + */ +static inline __m256i __lasx_xvaddwl_h_bu(__m256i in_h, __m256i in_l) { + __m256i out; + + out = __lasx_xvilvl_b(in_h, in_l); + out = __lasx_xvhaddw_hu_bu(out, out); + return out; +} + +/* + * ============================================================================= + * Description : The low half of the vector elements are expanded and + * added after being doubled. + * Arguments : Inputs - in_h, in_l + * Output - out + * Details : The in_l vector after double zero extension (unsigned byte to + * signed halfword),added to the in_h vector. + * Example : See out = __lasx_xvaddw_w_w_h(in_h, in_l) + * ============================================================================= + */ +static inline __m256i __lasx_xvaddw_h_h_bu(__m256i in_h, __m256i in_l) { + __m256i out; + + out = __lasx_xvsllwil_hu_bu(in_l, 0); + out = __lasx_xvadd_h(in_h, out); + return out; +} + +/* + * ============================================================================= + * Description : The low half of the vector elements are expanded and + * added after being doubled. + * Arguments : Inputs - in_h, in_l + * Output - out + * Details : The in_l vector after double sign extension (signed halfword to + * signed word), added to the in_h vector. + * Example : out = __lasx_xvaddw_w_w_h(in_h, in_l) + * in_h : 0, 1,0,0, -1,0,0,1, + * in_l : 2,-1,1,2, 1,0,0,0, 0,0,1,0, 1,0,0,1, + * out : 2, 0,1,2, -1,0,1,1, + * ============================================================================= + */ +static inline __m256i __lasx_xvaddw_w_w_h(__m256i in_h, __m256i in_l) { + __m256i out; + + out = __lasx_xvsllwil_w_h(in_l, 0); + out = __lasx_xvadd_w(in_h, out); + return out; +} + +/* + * ============================================================================= + * Description : Multiplication and addition calculation after expansion + * of the lower half of the vector. + * Arguments : Inputs - in_c, in_h, in_l + * Output - out + * Details : The in_h vector and the in_l vector are multiplied after + * the lower half of the two-fold sign extension (signed halfword + * to signed word), and the result is added to the vector in_c, + * then stored to the out vector. + * Example : out = __lasx_xvmaddwl_w_h(in_c, in_h, in_l) + * in_c : 1,2,3,4, 5,6,7,8 + * in_h : 1,2,3,4, 1,2,3,4, 5,6,7,8, 5,6,7,8 + * in_l : 200, 300, 400, 500, 2000, 3000, 4000, 5000, + * -200,-300,-400,-500, -2000,-3000,-4000,-5000 + * out : 201, 602,1203,2004, -995, -1794,-2793,-3992 + * ============================================================================= + */ +static inline __m256i __lasx_xvmaddwl_w_h(__m256i in_c, + __m256i in_h, + __m256i in_l) { + __m256i tmp0, tmp1, out; + + tmp0 = __lasx_xvsllwil_w_h(in_h, 0); + tmp1 = __lasx_xvsllwil_w_h(in_l, 0); + tmp0 = __lasx_xvmul_w(tmp0, tmp1); + out = __lasx_xvadd_w(tmp0, in_c); + return out; +} + +/* + * ============================================================================= + * Description : Multiplication and addition calculation after expansion + * of the higher half of the vector. + * Arguments : Inputs - in_c, in_h, in_l + * Output - out + * Details : The in_h vector and the in_l vector are multiplied after + * the higher half of the two-fold sign extension (signed + * halfword to signed word), and the result is added to + * the vector in_c, then stored to the out vector. + * Example : See out = __lasx_xvmaddwl_w_h(in_c, in_h, in_l) + * ============================================================================= + */ +static inline __m256i __lasx_xvmaddwh_w_h(__m256i in_c, + __m256i in_h, + __m256i in_l) { + __m256i tmp0, tmp1, out; + + tmp0 = __lasx_xvilvh_h(in_h, in_h); + tmp1 = __lasx_xvilvh_h(in_l, in_l); + tmp0 = __lasx_xvmulwev_w_h(tmp0, tmp1); + out = __lasx_xvadd_w(tmp0, in_c); + return out; +} + +/* + * ============================================================================= + * Description : Multiplication calculation after expansion of the lower + * half of the vector. + * Arguments : Inputs - in_h, in_l + * Output - out + * Details : The in_h vector and the in_l vector are multiplied after + * the lower half of the two-fold sign extension (signed + * halfword to signed word), then stored to the out vector. + * Example : out = __lasx_xvmulwl_w_h(in_h, in_l) + * in_h : 3,-1,3,0, 0,0,0,-1, 0,0,1,-1, 0,0,0,1 + * in_l : 2,-1,1,2, 1,0,0, 0, 0,0,1, 0, 1,0,0,1 + * out : 6,1,3,0, 0,0,1,0 + * ============================================================================= + */ +static inline __m256i __lasx_xvmulwl_w_h(__m256i in_h, __m256i in_l) { + __m256i tmp0, tmp1, out; + + tmp0 = __lasx_xvsllwil_w_h(in_h, 0); + tmp1 = __lasx_xvsllwil_w_h(in_l, 0); + out = __lasx_xvmul_w(tmp0, tmp1); + return out; +} + +/* + * ============================================================================= + * Description : Multiplication calculation after expansion of the lower + * half of the vector. + * Arguments : Inputs - in_h, in_l + * Output - out + * Details : The in_h vector and the in_l vector are multiplied after + * the lower half of the two-fold sign extension (signed + * halfword to signed word), then stored to the out vector. + * Example : out = __lasx_xvmulwh_w_h(in_h, in_l) + * in_h : 3,-1,3,0, 0,0,0,-1, 0,0,1,-1, 0,0,0,1 + * in_l : 2,-1,1,2, 1,0,0, 0, 0,0,1, 0, 1,0,0,1 + * out : 0,0,0,0, 0,0,0,1 + * ============================================================================= + */ +static inline __m256i __lasx_xvmulwh_w_h(__m256i in_h, __m256i in_l) { + __m256i tmp0, tmp1, out; + + tmp0 = __lasx_xvilvh_h(in_h, in_h); + tmp1 = __lasx_xvilvh_h(in_l, in_l); + out = __lasx_xvmulwev_w_h(tmp0, tmp1); + return out; +} + +/* + * ============================================================================= + * Description : The low half of the vector elements are added to the high half + * after being doubled, then saturated. + * Arguments : Inputs - in_h, in_l + * Output - out + * Details : The in_h vector adds the in_l vector after the lower half of + * the two-fold zero extension (unsigned byte to unsigned + * halfword) and then saturated. The results are stored to the out + * vector. + * Example : out = __lasx_xvsaddw_hu_hu_bu(in_h, in_l) + * in_h : 2,65532,1,2, 1,0,0,0, 0,0,1,0, 1,0,0,1 + * in_l : 3,6,3,0, 0,0,0,1, 0,0,1,1, 0,0,0,1, 3,18,3,0, 0,0,0,1, 0,0,1,1, + * 0,0,0,1 + * out : 5,65535,4,2, 1,0,0,1, 3,18,4,0, 1,0,0,2, + * ============================================================================= + */ +static inline __m256i __lasx_xvsaddw_hu_hu_bu(__m256i in_h, __m256i in_l) { + __m256i tmp1, out; + __m256i zero = {0}; + + tmp1 = __lasx_xvilvl_b(zero, in_l); + out = __lasx_xvsadd_hu(in_h, tmp1); + return out; +} + +/* + * ============================================================================= + * Description : Clip all halfword elements of input vector between min & max + * out = ((in) < (min)) ? (min) : (((in) > (max)) ? (max) : (in)) + * Arguments : Inputs - in (input vector) + * - min (min threshold) + * - max (max threshold) + * Outputs - in (output vector with clipped elements) + * Return Type - signed halfword + * Example : out = __lasx_xvclip_h(in, min, max) + * in : -8,2,280,249, -8,255,280,249, 4,4,4,4, 5,5,5,5 + * min : 1,1,1,1, 1,1,1,1, 1,1,1,1, 1,1,1,1 + * max : 9,9,9,9, 9,9,9,9, 9,9,9,9, 9,9,9,9 + * out : 1,2,9,9, 1,9,9,9, 4,4,4,4, 5,5,5,5 + * ============================================================================= + */ +static inline __m256i __lasx_xvclip_h(__m256i in, __m256i min, __m256i max) { + __m256i out; + + out = __lasx_xvmax_h(min, in); + out = __lasx_xvmin_h(max, out); + return out; +} + +/* + * ============================================================================= + * Description : Clip all signed halfword elements of input vector + * between 0 & 255 + * Arguments : Inputs - in (input vector) + * Outputs - out (output vector with clipped elements) + * Return Type - signed halfword + * Example : See out = __lasx_xvclip255_w(in) + * ============================================================================= + */ +static inline __m256i __lasx_xvclip255_h(__m256i in) { + __m256i out; + + out = __lasx_xvmaxi_h(in, 0); + out = __lasx_xvsat_hu(out, 7); + return out; +} + +/* + * ============================================================================= + * Description : Clip all signed word elements of input vector + * between 0 & 255 + * Arguments : Inputs - in (input vector) + * Output - out (output vector with clipped elements) + * Return Type - signed word + * Example : out = __lasx_xvclip255_w(in) + * in : -8,255,280,249, -8,255,280,249 + * out : 0,255,255,249, 0,255,255,249 + * ============================================================================= + */ +static inline __m256i __lasx_xvclip255_w(__m256i in) { + __m256i out; + + out = __lasx_xvmaxi_w(in, 0); + out = __lasx_xvsat_wu(out, 7); + return out; +} + +/* + * ============================================================================= + * Description : Indexed halfword element values are replicated to all + * elements in output vector. If 'idx < 8' use xvsplati_l_*, + * if 'idx >= 8' use xvsplati_h_*. + * Arguments : Inputs - in, idx + * Output - out + * Details : Idx element value from in vector is replicated to all + * elements in out vector. + * Valid index range for halfword operation is 0-7 + * Example : out = __lasx_xvsplati_l_h(in, idx) + * in : 20,10,11,12, 13,14,15,16, 0,0,2,0, 0,0,0,0 + * idx : 0x02 + * out : 11,11,11,11, 11,11,11,11, 11,11,11,11, 11,11,11,11 + * ============================================================================= + */ +static inline __m256i __lasx_xvsplati_l_h(__m256i in, int idx) { + __m256i out; + + out = __lasx_xvpermi_q(in, in, 0x02); + out = __lasx_xvreplve_h(out, idx); + return out; +} + +/* + * ============================================================================= + * Description : Indexed halfword element values are replicated to all + * elements in output vector. If 'idx < 8' use xvsplati_l_*, + * if 'idx >= 8' use xvsplati_h_*. + * Arguments : Inputs - in, idx + * Output - out + * Details : Idx element value from in vector is replicated to all + * elements in out vector. + * Valid index range for halfword operation is 0-7 + * Example : out = __lasx_xvsplati_h_h(in, idx) + * in : 20,10,11,12, 13,14,15,16, 0,2,0,0, 0,0,0,0 + * idx : 0x09 + * out : 2,2,2,2, 2,2,2,2, 2,2,2,2, 2,2,2,2 + * ============================================================================= + */ +static inline __m256i __lasx_xvsplati_h_h(__m256i in, int idx) { + __m256i out; + + out = __lasx_xvpermi_q(in, in, 0x13); + out = __lasx_xvreplve_h(out, idx); + return out; +} + +/* + * ============================================================================= + * Description : Transpose 4x4 block with double-word elements in vectors + * Arguments : Inputs - _in0, _in1, _in2, _in3 + * Outputs - _out0, _out1, _out2, _out3 + * Example : LASX_TRANSPOSE4x4_D + * _in0 : 1,2,3,4 + * _in1 : 1,2,3,4 + * _in2 : 1,2,3,4 + * _in3 : 1,2,3,4 + * + * _out0 : 1,1,1,1 + * _out1 : 2,2,2,2 + * _out2 : 3,3,3,3 + * _out3 : 4,4,4,4 + * ============================================================================= + */ +#define LASX_TRANSPOSE4x4_D(_in0, _in1, _in2, _in3, _out0, _out1, _out2, \ + _out3) \ + { \ + __m256i _tmp0, _tmp1, _tmp2, _tmp3; \ + _tmp0 = __lasx_xvilvl_d(_in1, _in0); \ + _tmp1 = __lasx_xvilvh_d(_in1, _in0); \ + _tmp2 = __lasx_xvilvl_d(_in3, _in2); \ + _tmp3 = __lasx_xvilvh_d(_in3, _in2); \ + _out0 = __lasx_xvpermi_q(_tmp2, _tmp0, 0x20); \ + _out2 = __lasx_xvpermi_q(_tmp2, _tmp0, 0x31); \ + _out1 = __lasx_xvpermi_q(_tmp3, _tmp1, 0x20); \ + _out3 = __lasx_xvpermi_q(_tmp3, _tmp1, 0x31); \ + } + +/* + * ============================================================================= + * Description : Transpose 8x8 block with word elements in vectors + * Arguments : Inputs - _in0, _in1, _in2, _in3, _in4, _in5, _in6, _in7 + * Outputs - _out0, _out1, _out2, _out3, _out4, _out5, _out6, + * _out7 + * Example : LASX_TRANSPOSE8x8_W + * _in0 : 1,2,3,4,5,6,7,8 + * _in1 : 2,2,3,4,5,6,7,8 + * _in2 : 3,2,3,4,5,6,7,8 + * _in3 : 4,2,3,4,5,6,7,8 + * _in4 : 5,2,3,4,5,6,7,8 + * _in5 : 6,2,3,4,5,6,7,8 + * _in6 : 7,2,3,4,5,6,7,8 + * _in7 : 8,2,3,4,5,6,7,8 + * + * _out0 : 1,2,3,4,5,6,7,8 + * _out1 : 2,2,2,2,2,2,2,2 + * _out2 : 3,3,3,3,3,3,3,3 + * _out3 : 4,4,4,4,4,4,4,4 + * _out4 : 5,5,5,5,5,5,5,5 + * _out5 : 6,6,6,6,6,6,6,6 + * _out6 : 7,7,7,7,7,7,7,7 + * _out7 : 8,8,8,8,8,8,8,8 + * ============================================================================= + */ +#define LASX_TRANSPOSE8x8_W(_in0, _in1, _in2, _in3, _in4, _in5, _in6, _in7, \ + _out0, _out1, _out2, _out3, _out4, _out5, _out6, \ + _out7) \ + { \ + __m256i _s0_m, _s1_m; \ + __m256i _tmp0_m, _tmp1_m, _tmp2_m, _tmp3_m; \ + __m256i _tmp4_m, _tmp5_m, _tmp6_m, _tmp7_m; \ + \ + _s0_m = __lasx_xvilvl_w(_in2, _in0); \ + _s1_m = __lasx_xvilvl_w(_in3, _in1); \ + _tmp0_m = __lasx_xvilvl_w(_s1_m, _s0_m); \ + _tmp1_m = __lasx_xvilvh_w(_s1_m, _s0_m); \ + _s0_m = __lasx_xvilvh_w(_in2, _in0); \ + _s1_m = __lasx_xvilvh_w(_in3, _in1); \ + _tmp2_m = __lasx_xvilvl_w(_s1_m, _s0_m); \ + _tmp3_m = __lasx_xvilvh_w(_s1_m, _s0_m); \ + _s0_m = __lasx_xvilvl_w(_in6, _in4); \ + _s1_m = __lasx_xvilvl_w(_in7, _in5); \ + _tmp4_m = __lasx_xvilvl_w(_s1_m, _s0_m); \ + _tmp5_m = __lasx_xvilvh_w(_s1_m, _s0_m); \ + _s0_m = __lasx_xvilvh_w(_in6, _in4); \ + _s1_m = __lasx_xvilvh_w(_in7, _in5); \ + _tmp6_m = __lasx_xvilvl_w(_s1_m, _s0_m); \ + _tmp7_m = __lasx_xvilvh_w(_s1_m, _s0_m); \ + _out0 = __lasx_xvpermi_q(_tmp4_m, _tmp0_m, 0x20); \ + _out1 = __lasx_xvpermi_q(_tmp5_m, _tmp1_m, 0x20); \ + _out2 = __lasx_xvpermi_q(_tmp6_m, _tmp2_m, 0x20); \ + _out3 = __lasx_xvpermi_q(_tmp7_m, _tmp3_m, 0x20); \ + _out4 = __lasx_xvpermi_q(_tmp4_m, _tmp0_m, 0x31); \ + _out5 = __lasx_xvpermi_q(_tmp5_m, _tmp1_m, 0x31); \ + _out6 = __lasx_xvpermi_q(_tmp6_m, _tmp2_m, 0x31); \ + _out7 = __lasx_xvpermi_q(_tmp7_m, _tmp3_m, 0x31); \ + } + +/* + * ============================================================================= + * Description : Transpose input 16x8 byte block + * Arguments : Inputs - _in0, _in1, _in2, _in3, _in4, _in5, _in6, _in7, + * _in8, _in9, _in10, _in11, _in12, _in13, _in14, _in15 + * (input 16x8 byte block) + * Outputs - _out0, _out1, _out2, _out3, _out4, _out5, _out6, + * _out7 (output 8x16 byte block) + * Details : The rows of the matrix become columns, and the columns become + * rows. + * Example : See LASX_TRANSPOSE16x8_H + * ============================================================================= + */ +#define LASX_TRANSPOSE16x8_B(_in0, _in1, _in2, _in3, _in4, _in5, _in6, _in7, \ + _in8, _in9, _in10, _in11, _in12, _in13, _in14, \ + _in15, _out0, _out1, _out2, _out3, _out4, _out5, \ + _out6, _out7) \ + { \ + __m256i _tmp0_m, _tmp1_m, _tmp2_m, _tmp3_m; \ + __m256i _tmp4_m, _tmp5_m, _tmp6_m, _tmp7_m; \ + \ + _tmp0_m = __lasx_xvilvl_b(_in2, _in0); \ + _tmp1_m = __lasx_xvilvl_b(_in3, _in1); \ + _tmp2_m = __lasx_xvilvl_b(_in6, _in4); \ + _tmp3_m = __lasx_xvilvl_b(_in7, _in5); \ + _tmp4_m = __lasx_xvilvl_b(_in10, _in8); \ + _tmp5_m = __lasx_xvilvl_b(_in11, _in9); \ + _tmp6_m = __lasx_xvilvl_b(_in14, _in12); \ + _tmp7_m = __lasx_xvilvl_b(_in15, _in13); \ + _out0 = __lasx_xvilvl_b(_tmp1_m, _tmp0_m); \ + _out1 = __lasx_xvilvh_b(_tmp1_m, _tmp0_m); \ + _out2 = __lasx_xvilvl_b(_tmp3_m, _tmp2_m); \ + _out3 = __lasx_xvilvh_b(_tmp3_m, _tmp2_m); \ + _out4 = __lasx_xvilvl_b(_tmp5_m, _tmp4_m); \ + _out5 = __lasx_xvilvh_b(_tmp5_m, _tmp4_m); \ + _out6 = __lasx_xvilvl_b(_tmp7_m, _tmp6_m); \ + _out7 = __lasx_xvilvh_b(_tmp7_m, _tmp6_m); \ + _tmp0_m = __lasx_xvilvl_w(_out2, _out0); \ + _tmp2_m = __lasx_xvilvh_w(_out2, _out0); \ + _tmp4_m = __lasx_xvilvl_w(_out3, _out1); \ + _tmp6_m = __lasx_xvilvh_w(_out3, _out1); \ + _tmp1_m = __lasx_xvilvl_w(_out6, _out4); \ + _tmp3_m = __lasx_xvilvh_w(_out6, _out4); \ + _tmp5_m = __lasx_xvilvl_w(_out7, _out5); \ + _tmp7_m = __lasx_xvilvh_w(_out7, _out5); \ + _out0 = __lasx_xvilvl_d(_tmp1_m, _tmp0_m); \ + _out1 = __lasx_xvilvh_d(_tmp1_m, _tmp0_m); \ + _out2 = __lasx_xvilvl_d(_tmp3_m, _tmp2_m); \ + _out3 = __lasx_xvilvh_d(_tmp3_m, _tmp2_m); \ + _out4 = __lasx_xvilvl_d(_tmp5_m, _tmp4_m); \ + _out5 = __lasx_xvilvh_d(_tmp5_m, _tmp4_m); \ + _out6 = __lasx_xvilvl_d(_tmp7_m, _tmp6_m); \ + _out7 = __lasx_xvilvh_d(_tmp7_m, _tmp6_m); \ + } + +/* + * ============================================================================= + * Description : Transpose input 16x8 byte block + * Arguments : Inputs - _in0, _in1, _in2, _in3, _in4, _in5, _in6, _in7, + * _in8, _in9, _in10, _in11, _in12, _in13, _in14, _in15 + * (input 16x8 byte block) + * Outputs - _out0, _out1, _out2, _out3, _out4, _out5, _out6, + * _out7 (output 8x16 byte block) + * Details : The rows of the matrix become columns, and the columns become + * rows. + * Example : LASX_TRANSPOSE16x8_H + * _in0 : 1,2,3,4,5,6,7,8,0,0,0,0,0,0,0,0 + * _in1 : 2,2,3,4,5,6,7,8,0,0,0,0,0,0,0,0 + * _in2 : 3,2,3,4,5,6,7,8,0,0,0,0,0,0,0,0 + * _in3 : 4,2,3,4,5,6,7,8,0,0,0,0,0,0,0,0 + * _in4 : 5,2,3,4,5,6,7,8,0,0,0,0,0,0,0,0 + * _in5 : 6,2,3,4,5,6,7,8,0,0,0,0,0,0,0,0 + * _in6 : 7,2,3,4,5,6,7,8,0,0,0,0,0,0,0,0 + * _in7 : 8,2,3,4,5,6,7,8,0,0,0,0,0,0,0,0 + * _in8 : 9,2,3,4,5,6,7,8,0,0,0,0,0,0,0,0 + * _in9 : 1,2,3,4,5,6,7,8,0,0,0,0,0,0,0,0 + * _in10 : 0,2,3,4,5,6,7,8,0,0,0,0,0,0,0,0 + * _in11 : 2,2,3,4,5,6,7,8,0,0,0,0,0,0,0,0 + * _in12 : 3,2,3,4,5,6,7,8,0,0,0,0,0,0,0,0 + * _in13 : 7,2,3,4,5,6,7,8,0,0,0,0,0,0,0,0 + * _in14 : 5,2,3,4,5,6,7,8,0,0,0,0,0,0,0,0 + * _in15 : 6,2,3,4,5,6,7,8,0,0,0,0,0,0,0,0 + * + * _out0 : 1,2,3,4,5,6,7,8,9,1,0,2,3,7,5,6 + * _out1 : 2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2 + * _out2 : 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3 + * _out3 : 4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4 + * _out4 : 5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5 + * _out5 : 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6 + * _out6 : 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7 + * _out7 : 8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8 + * ============================================================================= + */ +#define LASX_TRANSPOSE16x8_H(_in0, _in1, _in2, _in3, _in4, _in5, _in6, _in7, \ + _in8, _in9, _in10, _in11, _in12, _in13, _in14, \ + _in15, _out0, _out1, _out2, _out3, _out4, _out5, \ + _out6, _out7) \ + { \ + __m256i _tmp0_m, _tmp1_m, _tmp2_m, _tmp3_m; \ + __m256i _tmp4_m, _tmp5_m, _tmp6_m, _tmp7_m; \ + __m256i _t0, _t1, _t2, _t3, _t4, _t5, _t6, _t7; \ + \ + _tmp0_m = __lasx_xvilvl_h(_in2, _in0); \ + _tmp1_m = __lasx_xvilvl_h(_in3, _in1); \ + _tmp2_m = __lasx_xvilvl_h(_in6, _in4); \ + _tmp3_m = __lasx_xvilvl_h(_in7, _in5); \ + _tmp4_m = __lasx_xvilvl_h(_in10, _in8); \ + _tmp5_m = __lasx_xvilvl_h(_in11, _in9); \ + _tmp6_m = __lasx_xvilvl_h(_in14, _in12); \ + _tmp7_m = __lasx_xvilvl_h(_in15, _in13); \ + _t0 = __lasx_xvilvl_h(_tmp1_m, _tmp0_m); \ + _t1 = __lasx_xvilvh_h(_tmp1_m, _tmp0_m); \ + _t2 = __lasx_xvilvl_h(_tmp3_m, _tmp2_m); \ + _t3 = __lasx_xvilvh_h(_tmp3_m, _tmp2_m); \ + _t4 = __lasx_xvilvl_h(_tmp5_m, _tmp4_m); \ + _t5 = __lasx_xvilvh_h(_tmp5_m, _tmp4_m); \ + _t6 = __lasx_xvilvl_h(_tmp7_m, _tmp6_m); \ + _t7 = __lasx_xvilvh_h(_tmp7_m, _tmp6_m); \ + _tmp0_m = __lasx_xvilvl_d(_t2, _t0); \ + _tmp2_m = __lasx_xvilvh_d(_t2, _t0); \ + _tmp4_m = __lasx_xvilvl_d(_t3, _t1); \ + _tmp6_m = __lasx_xvilvh_d(_t3, _t1); \ + _tmp1_m = __lasx_xvilvl_d(_t6, _t4); \ + _tmp3_m = __lasx_xvilvh_d(_t6, _t4); \ + _tmp5_m = __lasx_xvilvl_d(_t7, _t5); \ + _tmp7_m = __lasx_xvilvh_d(_t7, _t5); \ + _out0 = __lasx_xvpermi_q(_tmp1_m, _tmp0_m, 0x20); \ + _out1 = __lasx_xvpermi_q(_tmp3_m, _tmp2_m, 0x20); \ + _out2 = __lasx_xvpermi_q(_tmp5_m, _tmp4_m, 0x20); \ + _out3 = __lasx_xvpermi_q(_tmp7_m, _tmp6_m, 0x20); \ + \ + _tmp0_m = __lasx_xvilvh_h(_in2, _in0); \ + _tmp1_m = __lasx_xvilvh_h(_in3, _in1); \ + _tmp2_m = __lasx_xvilvh_h(_in6, _in4); \ + _tmp3_m = __lasx_xvilvh_h(_in7, _in5); \ + _tmp4_m = __lasx_xvilvh_h(_in10, _in8); \ + _tmp5_m = __lasx_xvilvh_h(_in11, _in9); \ + _tmp6_m = __lasx_xvilvh_h(_in14, _in12); \ + _tmp7_m = __lasx_xvilvh_h(_in15, _in13); \ + _t0 = __lasx_xvilvl_h(_tmp1_m, _tmp0_m); \ + _t1 = __lasx_xvilvh_h(_tmp1_m, _tmp0_m); \ + _t2 = __lasx_xvilvl_h(_tmp3_m, _tmp2_m); \ + _t3 = __lasx_xvilvh_h(_tmp3_m, _tmp2_m); \ + _t4 = __lasx_xvilvl_h(_tmp5_m, _tmp4_m); \ + _t5 = __lasx_xvilvh_h(_tmp5_m, _tmp4_m); \ + _t6 = __lasx_xvilvl_h(_tmp7_m, _tmp6_m); \ + _t7 = __lasx_xvilvh_h(_tmp7_m, _tmp6_m); \ + _tmp0_m = __lasx_xvilvl_d(_t2, _t0); \ + _tmp2_m = __lasx_xvilvh_d(_t2, _t0); \ + _tmp4_m = __lasx_xvilvl_d(_t3, _t1); \ + _tmp6_m = __lasx_xvilvh_d(_t3, _t1); \ + _tmp1_m = __lasx_xvilvl_d(_t6, _t4); \ + _tmp3_m = __lasx_xvilvh_d(_t6, _t4); \ + _tmp5_m = __lasx_xvilvl_d(_t7, _t5); \ + _tmp7_m = __lasx_xvilvh_d(_t7, _t5); \ + _out4 = __lasx_xvpermi_q(_tmp1_m, _tmp0_m, 0x20); \ + _out5 = __lasx_xvpermi_q(_tmp3_m, _tmp2_m, 0x20); \ + _out6 = __lasx_xvpermi_q(_tmp5_m, _tmp4_m, 0x20); \ + _out7 = __lasx_xvpermi_q(_tmp7_m, _tmp6_m, 0x20); \ + } + +/* + * ============================================================================= + * Description : Transpose 4x4 block with halfword elements in vectors + * Arguments : Inputs - _in0, _in1, _in2, _in3 + * Outputs - _out0, _out1, _out2, _out3 + * Return Type - signed halfword + * Details : The rows of the matrix become columns, and the columns become + * rows. + * Example : See LASX_TRANSPOSE8x8_H + * ============================================================================= + */ +#define LASX_TRANSPOSE4x4_H(_in0, _in1, _in2, _in3, _out0, _out1, _out2, \ + _out3) \ + { \ + __m256i _s0_m, _s1_m; \ + \ + _s0_m = __lasx_xvilvl_h(_in1, _in0); \ + _s1_m = __lasx_xvilvl_h(_in3, _in2); \ + _out0 = __lasx_xvilvl_w(_s1_m, _s0_m); \ + _out2 = __lasx_xvilvh_w(_s1_m, _s0_m); \ + _out1 = __lasx_xvilvh_d(_out0, _out0); \ + _out3 = __lasx_xvilvh_d(_out2, _out2); \ + } + +/* + * ============================================================================= + * Description : Transpose input 8x8 byte block + * Arguments : Inputs - _in0, _in1, _in2, _in3, _in4, _in5, _in6, _in7 + * (input 8x8 byte block) + * Outputs - _out0, _out1, _out2, _out3, _out4, _out5, _out6, + * _out7 (output 8x8 byte block) + * Example : See LASX_TRANSPOSE8x8_H + * ============================================================================= + */ +#define LASX_TRANSPOSE8x8_B(_in0, _in1, _in2, _in3, _in4, _in5, _in6, _in7, \ + _out0, _out1, _out2, _out3, _out4, _out5, _out6, \ + _out7) \ + { \ + __m256i _tmp0_m, _tmp1_m, _tmp2_m, _tmp3_m; \ + __m256i _tmp4_m, _tmp5_m, _tmp6_m, _tmp7_m; \ + _tmp0_m = __lasx_xvilvl_b(_in2, _in0); \ + _tmp1_m = __lasx_xvilvl_b(_in3, _in1); \ + _tmp2_m = __lasx_xvilvl_b(_in6, _in4); \ + _tmp3_m = __lasx_xvilvl_b(_in7, _in5); \ + _tmp4_m = __lasx_xvilvl_b(_tmp1_m, _tmp0_m); \ + _tmp5_m = __lasx_xvilvh_b(_tmp1_m, _tmp0_m); \ + _tmp6_m = __lasx_xvilvl_b(_tmp3_m, _tmp2_m); \ + _tmp7_m = __lasx_xvilvh_b(_tmp3_m, _tmp2_m); \ + _out0 = __lasx_xvilvl_w(_tmp6_m, _tmp4_m); \ + _out2 = __lasx_xvilvh_w(_tmp6_m, _tmp4_m); \ + _out4 = __lasx_xvilvl_w(_tmp7_m, _tmp5_m); \ + _out6 = __lasx_xvilvh_w(_tmp7_m, _tmp5_m); \ + _out1 = __lasx_xvbsrl_v(_out0, 8); \ + _out3 = __lasx_xvbsrl_v(_out2, 8); \ + _out5 = __lasx_xvbsrl_v(_out4, 8); \ + _out7 = __lasx_xvbsrl_v(_out6, 8); \ + } + +/* + * ============================================================================= + * Description : Transpose 8x8 block with halfword elements in vectors. + * Arguments : Inputs - _in0, _in1, ~ + * Outputs - _out0, _out1, ~ + * Details : The rows of the matrix become columns, and the columns become + * rows. + * Example : LASX_TRANSPOSE8x8_H + * _in0 : 1,2,3,4, 5,6,7,8, 1,2,3,4, 5,6,7,8 + * _in1 : 8,2,3,4, 5,6,7,8, 8,2,3,4, 5,6,7,8 + * _in2 : 8,2,3,4, 5,6,7,8, 8,2,3,4, 5,6,7,8 + * _in3 : 1,2,3,4, 5,6,7,8, 1,2,3,4, 5,6,7,8 + * _in4 : 9,2,3,4, 5,6,7,8, 9,2,3,4, 5,6,7,8 + * _in5 : 1,2,3,4, 5,6,7,8, 1,2,3,4, 5,6,7,8 + * _in6 : 1,2,3,4, 5,6,7,8, 1,2,3,4, 5,6,7,8 + * _in7 : 9,2,3,4, 5,6,7,8, 9,2,3,4, 5,6,7,8 + * + * _out0 : 1,8,8,1, 9,1,1,9, 1,8,8,1, 9,1,1,9 + * _out1 : 2,2,2,2, 2,2,2,2, 2,2,2,2, 2,2,2,2 + * _out2 : 3,3,3,3, 3,3,3,3, 3,3,3,3, 3,3,3,3 + * _out3 : 4,4,4,4, 4,4,4,4, 4,4,4,4, 4,4,4,4 + * _out4 : 5,5,5,5, 5,5,5,5, 5,5,5,5, 5,5,5,5 + * _out5 : 6,6,6,6, 6,6,6,6, 6,6,6,6, 6,6,6,6 + * _out6 : 7,7,7,7, 7,7,7,7, 7,7,7,7, 7,7,7,7 + * _out7 : 8,8,8,8, 8,8,8,8, 8,8,8,8, 8,8,8,8 + * ============================================================================= + */ +#define LASX_TRANSPOSE8x8_H(_in0, _in1, _in2, _in3, _in4, _in5, _in6, _in7, \ + _out0, _out1, _out2, _out3, _out4, _out5, _out6, \ + _out7) \ + { \ + __m256i _s0_m, _s1_m; \ + __m256i _tmp0_m, _tmp1_m, _tmp2_m, _tmp3_m; \ + __m256i _tmp4_m, _tmp5_m, _tmp6_m, _tmp7_m; \ + \ + _s0_m = __lasx_xvilvl_h(_in6, _in4); \ + _s1_m = __lasx_xvilvl_h(_in7, _in5); \ + _tmp0_m = __lasx_xvilvl_h(_s1_m, _s0_m); \ + _tmp1_m = __lasx_xvilvh_h(_s1_m, _s0_m); \ + _s0_m = __lasx_xvilvh_h(_in6, _in4); \ + _s1_m = __lasx_xvilvh_h(_in7, _in5); \ + _tmp2_m = __lasx_xvilvl_h(_s1_m, _s0_m); \ + _tmp3_m = __lasx_xvilvh_h(_s1_m, _s0_m); \ + \ + _s0_m = __lasx_xvilvl_h(_in2, _in0); \ + _s1_m = __lasx_xvilvl_h(_in3, _in1); \ + _tmp4_m = __lasx_xvilvl_h(_s1_m, _s0_m); \ + _tmp5_m = __lasx_xvilvh_h(_s1_m, _s0_m); \ + _s0_m = __lasx_xvilvh_h(_in2, _in0); \ + _s1_m = __lasx_xvilvh_h(_in3, _in1); \ + _tmp6_m = __lasx_xvilvl_h(_s1_m, _s0_m); \ + _tmp7_m = __lasx_xvilvh_h(_s1_m, _s0_m); \ + \ + _out0 = __lasx_xvpickev_d(_tmp0_m, _tmp4_m); \ + _out2 = __lasx_xvpickev_d(_tmp1_m, _tmp5_m); \ + _out4 = __lasx_xvpickev_d(_tmp2_m, _tmp6_m); \ + _out6 = __lasx_xvpickev_d(_tmp3_m, _tmp7_m); \ + _out1 = __lasx_xvpickod_d(_tmp0_m, _tmp4_m); \ + _out3 = __lasx_xvpickod_d(_tmp1_m, _tmp5_m); \ + _out5 = __lasx_xvpickod_d(_tmp2_m, _tmp6_m); \ + _out7 = __lasx_xvpickod_d(_tmp3_m, _tmp7_m); \ + } + +/* + * ============================================================================= + * Description : Butterfly of 4 input vectors + * Arguments : Inputs - _in0, _in1, _in2, _in3 + * Outputs - _out0, _out1, _out2, _out3 + * Details : Butterfly operation + * Example : LASX_BUTTERFLY_4 + * _out0 = _in0 + _in3; + * _out1 = _in1 + _in2; + * _out2 = _in1 - _in2; + * _out3 = _in0 - _in3; + * ============================================================================= + */ +#define LASX_BUTTERFLY_4_B(_in0, _in1, _in2, _in3, _out0, _out1, _out2, _out3) \ + { \ + _out0 = __lasx_xvadd_b(_in0, _in3); \ + _out1 = __lasx_xvadd_b(_in1, _in2); \ + _out2 = __lasx_xvsub_b(_in1, _in2); \ + _out3 = __lasx_xvsub_b(_in0, _in3); \ + } +#define LASX_BUTTERFLY_4_H(_in0, _in1, _in2, _in3, _out0, _out1, _out2, _out3) \ + { \ + _out0 = __lasx_xvadd_h(_in0, _in3); \ + _out1 = __lasx_xvadd_h(_in1, _in2); \ + _out2 = __lasx_xvsub_h(_in1, _in2); \ + _out3 = __lasx_xvsub_h(_in0, _in3); \ + } +#define LASX_BUTTERFLY_4_W(_in0, _in1, _in2, _in3, _out0, _out1, _out2, _out3) \ + { \ + _out0 = __lasx_xvadd_w(_in0, _in3); \ + _out1 = __lasx_xvadd_w(_in1, _in2); \ + _out2 = __lasx_xvsub_w(_in1, _in2); \ + _out3 = __lasx_xvsub_w(_in0, _in3); \ + } +#define LASX_BUTTERFLY_4_D(_in0, _in1, _in2, _in3, _out0, _out1, _out2, _out3) \ + { \ + _out0 = __lasx_xvadd_d(_in0, _in3); \ + _out1 = __lasx_xvadd_d(_in1, _in2); \ + _out2 = __lasx_xvsub_d(_in1, _in2); \ + _out3 = __lasx_xvsub_d(_in0, _in3); \ + } + +/* + * ============================================================================= + * Description : Butterfly of 8 input vectors + * Arguments : Inputs - _in0, _in1, _in2, _in3, ~ + * Outputs - _out0, _out1, _out2, _out3, ~ + * Details : Butterfly operation + * Example : LASX_BUTTERFLY_8 + * _out0 = _in0 + _in7; + * _out1 = _in1 + _in6; + * _out2 = _in2 + _in5; + * _out3 = _in3 + _in4; + * _out4 = _in3 - _in4; + * _out5 = _in2 - _in5; + * _out6 = _in1 - _in6; + * _out7 = _in0 - _in7; + * ============================================================================= + */ +#define LASX_BUTTERFLY_8_B(_in0, _in1, _in2, _in3, _in4, _in5, _in6, _in7, \ + _out0, _out1, _out2, _out3, _out4, _out5, _out6, \ + _out7) \ + { \ + _out0 = __lasx_xvadd_b(_in0, _in7); \ + _out1 = __lasx_xvadd_b(_in1, _in6); \ + _out2 = __lasx_xvadd_b(_in2, _in5); \ + _out3 = __lasx_xvadd_b(_in3, _in4); \ + _out4 = __lasx_xvsub_b(_in3, _in4); \ + _out5 = __lasx_xvsub_b(_in2, _in5); \ + _out6 = __lasx_xvsub_b(_in1, _in6); \ + _out7 = __lasx_xvsub_b(_in0, _in7); \ + } + +#define LASX_BUTTERFLY_8_H(_in0, _in1, _in2, _in3, _in4, _in5, _in6, _in7, \ + _out0, _out1, _out2, _out3, _out4, _out5, _out6, \ + _out7) \ + { \ + _out0 = __lasx_xvadd_h(_in0, _in7); \ + _out1 = __lasx_xvadd_h(_in1, _in6); \ + _out2 = __lasx_xvadd_h(_in2, _in5); \ + _out3 = __lasx_xvadd_h(_in3, _in4); \ + _out4 = __lasx_xvsub_h(_in3, _in4); \ + _out5 = __lasx_xvsub_h(_in2, _in5); \ + _out6 = __lasx_xvsub_h(_in1, _in6); \ + _out7 = __lasx_xvsub_h(_in0, _in7); \ + } + +#define LASX_BUTTERFLY_8_W(_in0, _in1, _in2, _in3, _in4, _in5, _in6, _in7, \ + _out0, _out1, _out2, _out3, _out4, _out5, _out6, \ + _out7) \ + { \ + _out0 = __lasx_xvadd_w(_in0, _in7); \ + _out1 = __lasx_xvadd_w(_in1, _in6); \ + _out2 = __lasx_xvadd_w(_in2, _in5); \ + _out3 = __lasx_xvadd_w(_in3, _in4); \ + _out4 = __lasx_xvsub_w(_in3, _in4); \ + _out5 = __lasx_xvsub_w(_in2, _in5); \ + _out6 = __lasx_xvsub_w(_in1, _in6); \ + _out7 = __lasx_xvsub_w(_in0, _in7); \ + } + +#define LASX_BUTTERFLY_8_D(_in0, _in1, _in2, _in3, _in4, _in5, _in6, _in7, \ + _out0, _out1, _out2, _out3, _out4, _out5, _out6, \ + _out7) \ + { \ + _out0 = __lasx_xvadd_d(_in0, _in7); \ + _out1 = __lasx_xvadd_d(_in1, _in6); \ + _out2 = __lasx_xvadd_d(_in2, _in5); \ + _out3 = __lasx_xvadd_d(_in3, _in4); \ + _out4 = __lasx_xvsub_d(_in3, _in4); \ + _out5 = __lasx_xvsub_d(_in2, _in5); \ + _out6 = __lasx_xvsub_d(_in1, _in6); \ + _out7 = __lasx_xvsub_d(_in0, _in7); \ + } + +#endif // LASX + +/* + * ============================================================================= + * Description : Print out elements in vector. + * Arguments : Inputs - RTYPE, _element_num, _in0, _enter + * Outputs - + * Details : Print out '_element_num' elements in 'RTYPE' vector '_in0', if + * '_enter' is TRUE, prefix "\nVP:" will be added first. + * Example : VECT_PRINT(v4i32,4,in0,1); // in0: 1,2,3,4 + * VP:1,2,3,4, + * ============================================================================= + */ +#define VECT_PRINT(RTYPE, element_num, in0, enter) \ + { \ + RTYPE _tmp0 = (RTYPE)in0; \ + int _i = 0; \ + if (enter) \ + printf("\nVP:"); \ + for (_i = 0; _i < element_num; _i++) \ + printf("%d,", _tmp0[_i]); \ + } + +#endif /* LOONGSON_INTRINSICS_H */ +#endif /* INCLUDE_LIBYUV_LOONGSON_INTRINSICS_H */ diff --git a/TMessagesProj/jni/third_party/libyuv/include/libyuv/macros_msa.h b/TMessagesProj/jni/third_party/libyuv/include/libyuv/macros_msa.h index 4e232b66bf..b9a44fcced 100644 --- a/TMessagesProj/jni/third_party/libyuv/include/libyuv/macros_msa.h +++ b/TMessagesProj/jni/third_party/libyuv/include/libyuv/macros_msa.h @@ -81,25 +81,35 @@ }) #endif // !(__mips == 64) #else // !(__mips_isa_rev >= 6) -#define LW(psrc) \ - ({ \ - const uint8_t* psrc_lw_m = (const uint8_t*)(psrc); \ - uint32_t val_m; \ - asm volatile("ulw %[val_m], %[psrc_lw_m] \n" \ - : [val_m] "=r"(val_m) \ - : [psrc_lw_m] "m"(*psrc_lw_m)); \ - val_m; \ +#define LW(psrc) \ + ({ \ + uint8_t* psrc_lw_m = (uint8_t*)(psrc); \ + uint32_t val_lw_m; \ + \ + __asm__ volatile( \ + "lwr %[val_lw_m], 0(%[psrc_lw_m]) \n\t" \ + "lwl %[val_lw_m], 3(%[psrc_lw_m]) \n\t" \ + \ + : [val_lw_m] "=&r"(val_lw_m) \ + : [psrc_lw_m] "r"(psrc_lw_m)); \ + \ + val_lw_m; \ }) #if (__mips == 64) -#define LD(psrc) \ - ({ \ - const uint8_t* psrc_ld_m = (const uint8_t*)(psrc); \ - uint64_t val_m = 0; \ - asm volatile("uld %[val_m], %[psrc_ld_m] \n" \ - : [val_m] "=r"(val_m) \ - : [psrc_ld_m] "m"(*psrc_ld_m)); \ - val_m; \ +#define LD(psrc) \ + ({ \ + uint8_t* psrc_ld_m = (uint8_t*)(psrc); \ + uint64_t val_ld_m = 0; \ + \ + __asm__ volatile( \ + "ldr %[val_ld_m], 0(%[psrc_ld_m]) \n\t" \ + "ldl %[val_ld_m], 7(%[psrc_ld_m]) \n\t" \ + \ + : [val_ld_m] "=&r"(val_ld_m) \ + : [psrc_ld_m] "r"(psrc_ld_m)); \ + \ + val_ld_m; \ }) #else // !(__mips == 64) #define LD(psrc) \ diff --git a/TMessagesProj/jni/third_party/libyuv/include/libyuv/planar_functions.h b/TMessagesProj/jni/third_party/libyuv/include/libyuv/planar_functions.h index 9e0038f474..154f2f2134 100644 --- a/TMessagesProj/jni/third_party/libyuv/include/libyuv/planar_functions.h +++ b/TMessagesProj/jni/third_party/libyuv/include/libyuv/planar_functions.h @@ -83,6 +83,50 @@ void SetPlane(uint8_t* dst_y, int height, uint32_t value); +// Convert a plane of tiles of 16 x H to linear. +LIBYUV_API +int DetilePlane(const uint8_t* src_y, + int src_stride_y, + uint8_t* dst_y, + int dst_stride_y, + int width, + int height, + int tile_height); + +// Convert a plane of 16 bit tiles of 16 x H to linear. +LIBYUV_API +int DetilePlane_16(const uint16_t* src_y, + int src_stride_y, + uint16_t* dst_y, + int dst_stride_y, + int width, + int height, + int tile_height); + +// Convert a UV plane of tiles of 16 x H into linear U and V planes. +LIBYUV_API +void DetileSplitUVPlane(const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height, + int tile_height); + +// Convert a Y and UV plane of tiles into interlaced YUY2. +LIBYUV_API +void DetileToYUY2(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_yuy2, + int dst_stride_yuy2, + int width, + int height, + int tile_height); + // Split interleaved UV plane into separate U and V planes. LIBYUV_API void SplitUVPlane(const uint8_t* src_uv, @@ -105,6 +149,50 @@ void MergeUVPlane(const uint8_t* src_u, int width, int height); +// Split interleaved msb UV plane into separate lsb U and V planes. +LIBYUV_API +void SplitUVPlane_16(const uint16_t* src_uv, + int src_stride_uv, + uint16_t* dst_u, + int dst_stride_u, + uint16_t* dst_v, + int dst_stride_v, + int width, + int height, + int depth); + +// Merge separate lsb U and V planes into one interleaved msb UV plane. +LIBYUV_API +void MergeUVPlane_16(const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint16_t* dst_uv, + int dst_stride_uv, + int width, + int height, + int depth); + +// Convert lsb plane to msb plane +LIBYUV_API +void ConvertToMSBPlane_16(const uint16_t* src_y, + int src_stride_y, + uint16_t* dst_y, + int dst_stride_y, + int width, + int height, + int depth); + +// Convert msb plane to lsb plane +LIBYUV_API +void ConvertToLSBPlane_16(const uint16_t* src_y, + int src_stride_y, + uint16_t* dst_y, + int dst_stride_y, + int width, + int height, + int depth); + // Scale U and V to half width and height and merge into interleaved UV plane. // width and height are source size, allowing odd sizes. // Use for converting I444 or I422 to NV12. @@ -153,6 +241,92 @@ void MergeRGBPlane(const uint8_t* src_r, int width, int height); +// Split interleaved ARGB plane into separate R, G, B and A planes. +// dst_a can be NULL to discard alpha plane. +LIBYUV_API +void SplitARGBPlane(const uint8_t* src_argb, + int src_stride_argb, + uint8_t* dst_r, + int dst_stride_r, + uint8_t* dst_g, + int dst_stride_g, + uint8_t* dst_b, + int dst_stride_b, + uint8_t* dst_a, + int dst_stride_a, + int width, + int height); + +// Merge separate R, G, B and A planes into one interleaved ARGB plane. +// src_a can be NULL to fill opaque value to alpha. +LIBYUV_API +void MergeARGBPlane(const uint8_t* src_r, + int src_stride_r, + const uint8_t* src_g, + int src_stride_g, + const uint8_t* src_b, + int src_stride_b, + const uint8_t* src_a, + int src_stride_a, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height); + +// Merge separate 'depth' bit R, G and B planes stored in lsb +// into one interleaved XR30 plane. +// depth should in range [10, 16] +LIBYUV_API +void MergeXR30Plane(const uint16_t* src_r, + int src_stride_r, + const uint16_t* src_g, + int src_stride_g, + const uint16_t* src_b, + int src_stride_b, + uint8_t* dst_ar30, + int dst_stride_ar30, + int width, + int height, + int depth); + +// Merge separate 'depth' bit R, G, B and A planes stored in lsb +// into one interleaved AR64 plane. +// src_a can be NULL to fill opaque value to alpha. +// depth should in range [1, 16] +LIBYUV_API +void MergeAR64Plane(const uint16_t* src_r, + int src_stride_r, + const uint16_t* src_g, + int src_stride_g, + const uint16_t* src_b, + int src_stride_b, + const uint16_t* src_a, + int src_stride_a, + uint16_t* dst_ar64, + int dst_stride_ar64, + int width, + int height, + int depth); + +// Merge separate 'depth' bit R, G, B and A planes stored in lsb +// into one interleaved ARGB plane. +// src_a can be NULL to fill opaque value to alpha. +// depth should in range [8, 16] +LIBYUV_API +void MergeARGB16To8Plane(const uint16_t* src_r, + int src_stride_r, + const uint16_t* src_g, + int src_stride_g, + const uint16_t* src_b, + int src_stride_b, + const uint16_t* src_a, + int src_stride_a, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height, + int depth); + // Copy I400. Supports inverting. LIBYUV_API int I400ToI400(const uint8_t* src_y, @@ -200,6 +374,68 @@ int I444Copy(const uint8_t* src_y, int width, int height); +// Copy I210 to I210. +#define I210ToI210 I210Copy +LIBYUV_API +int I210Copy(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_u, + int dst_stride_u, + uint16_t* dst_v, + int dst_stride_v, + int width, + int height); + +// Copy I410 to I410. +#define I410ToI410 I410Copy +LIBYUV_API +int I410Copy(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_u, + int dst_stride_u, + uint16_t* dst_v, + int dst_stride_v, + int width, + int height); + +// Copy NV12. Supports inverting. +LIBYUV_API +int NV12Copy(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height); + +// Copy NV21. Supports inverting. +LIBYUV_API +int NV21Copy(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_vu, + int src_stride_vu, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_vu, + int dst_stride_vu, + int width, + int height); + // Convert YUY2 to I422. LIBYUV_API int YUY2ToI422(const uint8_t* src_yuy2, @@ -267,6 +503,14 @@ int YUY2ToY(const uint8_t* src_yuy2, int width, int height); +LIBYUV_API +int UYVYToY(const uint8_t* src_uyvy, + int src_stride_uyvy, + uint8_t* dst_y, + int dst_stride_y, + int width, + int height); + // Convert I420 to I400. (calls CopyPlane ignoring u/v). LIBYUV_API int I420ToI400(const uint8_t* src_y, @@ -789,6 +1033,21 @@ int InterpolatePlane(const uint8_t* src0, int height, int interpolation); +// Interpolate between two images using specified amount of interpolation +// (0 to 255) and store to destination. +// 'interpolation' is specified as 8 bit fraction where 0 means 100% src0 +// and 255 means 1% src0 and 99% src1. +LIBYUV_API +int InterpolatePlane_16(const uint16_t* src0, + int src_stride0, // measured in 16 bit pixels + const uint16_t* src1, + int src_stride1, + uint16_t* dst, + int dst_stride, + int width, + int height, + int interpolation); + // Interpolate between two ARGB images using specified amount of interpolation // Internally calls InterpolatePlane with width * 4 (bpp). LIBYUV_API @@ -845,7 +1104,7 @@ void ARGBAffineRow_SSE2(const uint8_t* src_argb, int width); // Shuffle ARGB channel order. e.g. BGRA to ARGB. -// shuffler is 16 bytes and must be aligned. +// shuffler is 16 bytes. LIBYUV_API int ARGBShuffle(const uint8_t* src_bgra, int src_stride_bgra, @@ -855,6 +1114,17 @@ int ARGBShuffle(const uint8_t* src_bgra, int width, int height); +// Shuffle AR64 channel order. e.g. AR64 to AB64. +// shuffler is 16 bytes. +LIBYUV_API +int AR64Shuffle(const uint16_t* src_ar64, + int src_stride_ar64, + uint16_t* dst_ar64, + int dst_stride_ar64, + const uint8_t* shuffler, + int width, + int height); + // Sobel ARGB effect with planar output. LIBYUV_API int ARGBSobelToPlane(const uint8_t* src_argb, diff --git a/TMessagesProj/jni/third_party/libyuv/include/libyuv/rotate.h b/TMessagesProj/jni/third_party/libyuv/include/libyuv/rotate.h index 308882242c..37460c4ac5 100644 --- a/TMessagesProj/jni/third_party/libyuv/include/libyuv/rotate.h +++ b/TMessagesProj/jni/third_party/libyuv/include/libyuv/rotate.h @@ -49,6 +49,24 @@ int I420Rotate(const uint8_t* src_y, int height, enum RotationMode mode); +// Rotate I422 frame. +LIBYUV_API +int I422Rotate(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height, + enum RotationMode mode); + // Rotate I444 frame. LIBYUV_API int I444Rotate(const uint8_t* src_y, @@ -67,6 +85,60 @@ int I444Rotate(const uint8_t* src_y, int height, enum RotationMode mode); +// Rotate I010 frame. +LIBYUV_API +int I010Rotate(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_u, + int dst_stride_u, + uint16_t* dst_v, + int dst_stride_v, + int width, + int height, + enum RotationMode mode); + +// Rotate I210 frame. +LIBYUV_API +int I210Rotate(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_u, + int dst_stride_u, + uint16_t* dst_v, + int dst_stride_v, + int width, + int height, + enum RotationMode mode); + +// Rotate I410 frame. +LIBYUV_API +int I410Rotate(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_u, + int dst_stride_u, + uint16_t* dst_v, + int dst_stride_v, + int width, + int height, + enum RotationMode mode); + // Rotate NV12 input and store in I420. LIBYUV_API int NV12ToI420Rotate(const uint8_t* src_y, @@ -83,6 +155,26 @@ int NV12ToI420Rotate(const uint8_t* src_y, int height, enum RotationMode mode); +// Convert Android420 to I420 with rotation. +// "rotation" can be 0, 90, 180 or 270. +LIBYUV_API +int Android420ToI420Rotate(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + int src_pixel_stride_uv, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height, + enum RotationMode rotation); + // Rotate a plane by 0, 90, 180, or 270. LIBYUV_API int RotatePlane(const uint8_t* src, @@ -118,39 +210,61 @@ void RotatePlane270(const uint8_t* src, int width, int height); +// Rotate a plane by 0, 90, 180, or 270. +LIBYUV_API +int RotatePlane_16(const uint16_t* src, + int src_stride, + uint16_t* dst, + int dst_stride, + int width, + int height, + enum RotationMode mode); + // Rotations for when U and V are interleaved. -// These functions take one input pointer and +// These functions take one UV input pointer and // split the data into two buffers while -// rotating them. Deprecated. +// rotating them. +// width and height expected to be half size for NV12. LIBYUV_API -void RotateUV90(const uint8_t* src, - int src_stride, - uint8_t* dst_a, - int dst_stride_a, - uint8_t* dst_b, - int dst_stride_b, - int width, - int height); +int SplitRotateUV(const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height, + enum RotationMode mode); + +LIBYUV_API +void SplitRotateUV90(const uint8_t* src, + int src_stride, + uint8_t* dst_a, + int dst_stride_a, + uint8_t* dst_b, + int dst_stride_b, + int width, + int height); LIBYUV_API -void RotateUV180(const uint8_t* src, - int src_stride, - uint8_t* dst_a, - int dst_stride_a, - uint8_t* dst_b, - int dst_stride_b, - int width, - int height); +void SplitRotateUV180(const uint8_t* src, + int src_stride, + uint8_t* dst_a, + int dst_stride_a, + uint8_t* dst_b, + int dst_stride_b, + int width, + int height); LIBYUV_API -void RotateUV270(const uint8_t* src, - int src_stride, - uint8_t* dst_a, - int dst_stride_a, - uint8_t* dst_b, - int dst_stride_b, - int width, - int height); +void SplitRotateUV270(const uint8_t* src, + int src_stride, + uint8_t* dst_a, + int dst_stride_a, + uint8_t* dst_b, + int dst_stride_b, + int width, + int height); // The 90 and 270 functions are based on transposes. // Doing a transpose with reversing the read/write @@ -165,14 +279,14 @@ void TransposePlane(const uint8_t* src, int height); LIBYUV_API -void TransposeUV(const uint8_t* src, - int src_stride, - uint8_t* dst_a, - int dst_stride_a, - uint8_t* dst_b, - int dst_stride_b, - int width, - int height); +void SplitTransposeUV(const uint8_t* src, + int src_stride, + uint8_t* dst_a, + int dst_stride_a, + uint8_t* dst_b, + int dst_stride_b, + int width, + int height); #ifdef __cplusplus } // extern "C" diff --git a/TMessagesProj/jni/third_party/libyuv/include/libyuv/rotate_row.h b/TMessagesProj/jni/third_party/libyuv/include/libyuv/rotate_row.h index 022293eef2..b773f886ef 100644 --- a/TMessagesProj/jni/third_party/libyuv/include/libyuv/rotate_row.h +++ b/TMessagesProj/jni/third_party/libyuv/include/libyuv/rotate_row.h @@ -32,8 +32,9 @@ extern "C" { #define LIBYUV_DISABLE_X86 #endif #endif -// The following are available for Visual C and clangcl 32 bit: -#if !defined(LIBYUV_DISABLE_X86) && defined(_M_IX86) && defined(_MSC_VER) +// The following are available for Visual C 32 bit: +#if !defined(LIBYUV_DISABLE_X86) && defined(_M_IX86) && defined(_MSC_VER) && \ + !defined(__clang__) #define HAS_TRANSPOSEWX8_SSSE3 #define HAS_TRANSPOSEUVWX8_SSE2 #endif @@ -60,9 +61,9 @@ extern "C" { #define HAS_TRANSPOSEUVWX16_MSA #endif -#if !defined(LIBYUV_DISABLE_MMI) && defined(_MIPS_ARCH_LOONGSON3A) -#define HAS_TRANSPOSEWX8_MMI -#define HAS_TRANSPOSEUVWX8_MMI +#if !defined(LIBYUV_DISABLE_LSX) && defined(__loongarch_sx) +#define HAS_TRANSPOSEWX16_LSX +#define HAS_TRANSPOSEUVWX16_LSX #endif void TransposeWxH_C(const uint8_t* src, @@ -92,11 +93,6 @@ void TransposeWx8_SSSE3(const uint8_t* src, uint8_t* dst, int dst_stride, int width); -void TransposeWx8_MMI(const uint8_t* src, - int src_stride, - uint8_t* dst, - int dst_stride, - int width); void TransposeWx8_Fast_SSSE3(const uint8_t* src, int src_stride, uint8_t* dst, @@ -107,6 +103,11 @@ void TransposeWx16_MSA(const uint8_t* src, uint8_t* dst, int dst_stride, int width); +void TransposeWx16_LSX(const uint8_t* src, + int src_stride, + uint8_t* dst, + int dst_stride, + int width); void TransposeWx8_Any_NEON(const uint8_t* src, int src_stride, @@ -118,11 +119,6 @@ void TransposeWx8_Any_SSSE3(const uint8_t* src, uint8_t* dst, int dst_stride, int width); -void TransposeWx8_Any_MMI(const uint8_t* src, - int src_stride, - uint8_t* dst, - int dst_stride, - int width); void TransposeWx8_Fast_Any_SSSE3(const uint8_t* src, int src_stride, uint8_t* dst, @@ -133,6 +129,11 @@ void TransposeWx16_Any_MSA(const uint8_t* src, uint8_t* dst, int dst_stride, int width); +void TransposeWx16_Any_LSX(const uint8_t* src, + int src_stride, + uint8_t* dst, + int dst_stride, + int width); void TransposeUVWxH_C(const uint8_t* src, int src_stride, @@ -171,13 +172,6 @@ void TransposeUVWx8_NEON(const uint8_t* src, uint8_t* dst_b, int dst_stride_b, int width); -void TransposeUVWx8_MMI(const uint8_t* src, - int src_stride, - uint8_t* dst_a, - int dst_stride_a, - uint8_t* dst_b, - int dst_stride_b, - int width); void TransposeUVWx16_MSA(const uint8_t* src, int src_stride, uint8_t* dst_a, @@ -185,6 +179,13 @@ void TransposeUVWx16_MSA(const uint8_t* src, uint8_t* dst_b, int dst_stride_b, int width); +void TransposeUVWx16_LSX(const uint8_t* src, + int src_stride, + uint8_t* dst_a, + int dst_stride_a, + uint8_t* dst_b, + int dst_stride_b, + int width); void TransposeUVWx8_Any_SSE2(const uint8_t* src, int src_stride, @@ -200,13 +201,6 @@ void TransposeUVWx8_Any_NEON(const uint8_t* src, uint8_t* dst_b, int dst_stride_b, int width); -void TransposeUVWx8_Any_MMI(const uint8_t* src, - int src_stride, - uint8_t* dst_a, - int dst_stride_a, - uint8_t* dst_b, - int dst_stride_b, - int width); void TransposeUVWx16_Any_MSA(const uint8_t* src, int src_stride, uint8_t* dst_a, @@ -214,7 +208,30 @@ void TransposeUVWx16_Any_MSA(const uint8_t* src, uint8_t* dst_b, int dst_stride_b, int width); +void TransposeUVWx16_Any_LSX(const uint8_t* src, + int src_stride, + uint8_t* dst_a, + int dst_stride_a, + uint8_t* dst_b, + int dst_stride_b, + int width); +void TransposeWxH_16_C(const uint16_t* src, + int src_stride, + uint16_t* dst, + int dst_stride, + int width, + int height); +void TransposeWx8_16_C(const uint16_t* src, + int src_stride, + uint16_t* dst, + int dst_stride, + int width); +void TransposeWx1_16_C(const uint16_t* src, + int src_stride, + uint16_t* dst, + int dst_stride, + int width); #ifdef __cplusplus } // extern "C" } // namespace libyuv diff --git a/TMessagesProj/jni/third_party/libyuv/include/libyuv/row.h b/TMessagesProj/jni/third_party/libyuv/include/libyuv/row.h index a27788c1f6..8d998727fb 100644 --- a/TMessagesProj/jni/third_party/libyuv/include/libyuv/row.h +++ b/TMessagesProj/jni/third_party/libyuv/include/libyuv/row.h @@ -11,7 +11,8 @@ #ifndef INCLUDE_LIBYUV_ROW_H_ #define INCLUDE_LIBYUV_ROW_H_ -#include // For malloc. +#include // For NULL +#include // For malloc #include "libyuv/basic_types.h" @@ -74,7 +75,6 @@ extern "C" { #if !defined(LIBYUV_DISABLE_X86) && \ (defined(_M_IX86) || defined(__x86_64__) || defined(__i386__)) // Conversions: -#define HAS_ABGRTOUVROW_SSSE3 #define HAS_ABGRTOYROW_SSSE3 #define HAS_ARGB1555TOARGBROW_SSE2 #define HAS_ARGB4444TOARGBROW_SSE2 @@ -87,12 +87,8 @@ extern "C" { #define HAS_ARGBTORGB24ROW_SSSE3 #define HAS_ARGBTORGB565DITHERROW_SSE2 #define HAS_ARGBTORGB565ROW_SSE2 -#define HAS_ARGBTOUV444ROW_SSSE3 -#define HAS_ARGBTOUVJROW_SSSE3 -#define HAS_ARGBTOUVROW_SSSE3 #define HAS_ARGBTOYJROW_SSSE3 #define HAS_ARGBTOYROW_SSSE3 -#define HAS_BGRATOUVROW_SSSE3 #define HAS_BGRATOYROW_SSSE3 #define HAS_COPYROW_ERMS #define HAS_COPYROW_SSE2 @@ -107,6 +103,8 @@ extern "C" { #define HAS_I422TOUYVYROW_SSE2 #define HAS_I422TOYUY2ROW_SSE2 #define HAS_I444TOARGBROW_SSSE3 +#define HAS_I444TORGB24ROW_SSSE3 +#define HAS_INTERPOLATEROW_SSSE3 #define HAS_J400TOARGBROW_SSE2 #define HAS_J422TOARGBROW_SSSE3 #define HAS_MERGEUVROW_SSE2 @@ -119,13 +117,12 @@ extern "C" { #define HAS_NV21TORGB24ROW_SSSE3 #define HAS_RAWTOARGBROW_SSSE3 #define HAS_RAWTORGB24ROW_SSSE3 +#define HAS_RAWTOYJROW_SSSE3 #define HAS_RAWTOYROW_SSSE3 #define HAS_RGB24TOARGBROW_SSSE3 -#define HAS_RGB24TOYROW_SSSE3 #define HAS_RGB24TOYJROW_SSSE3 -#define HAS_RAWTOYJROW_SSSE3 +#define HAS_RGB24TOYROW_SSSE3 #define HAS_RGB565TOARGBROW_SSE2 -#define HAS_RGBATOUVROW_SSSE3 #define HAS_RGBATOYROW_SSSE3 #define HAS_SETROW_ERMS #define HAS_SETROW_X86 @@ -138,11 +135,18 @@ extern "C" { #define HAS_YUY2TOUV422ROW_SSE2 #define HAS_YUY2TOUVROW_SSE2 #define HAS_YUY2TOYROW_SSE2 +#if !defined(LIBYUV_BIT_EXACT) +#define HAS_ABGRTOUVROW_SSSE3 +#define HAS_ARGBTOUV444ROW_SSSE3 +#define HAS_ARGBTOUVJROW_SSSE3 +#define HAS_ARGBTOUVROW_SSSE3 +#define HAS_BGRATOUVROW_SSSE3 +#define HAS_RGBATOUVROW_SSSE3 +#endif // Effects: #define HAS_ARGBADDROW_SSE2 #define HAS_ARGBAFFINEROW_SSE2 -#define HAS_ARGBATTENUATEROW_SSSE3 #define HAS_ARGBBLENDROW_SSSE3 #define HAS_ARGBCOLORMATRIXROW_SSSE3 #define HAS_ARGBCOLORTABLEROW_X86 @@ -161,13 +165,15 @@ extern "C" { #define HAS_BLENDPLANEROW_SSSE3 #define HAS_COMPUTECUMULATIVESUMROW_SSE2 #define HAS_CUMULATIVESUMTOAVERAGEROW_SSE2 -#define HAS_INTERPOLATEROW_SSSE3 #define HAS_RGBCOLORTABLEROW_X86 #define HAS_SOBELROW_SSE2 #define HAS_SOBELTOPLANEROW_SSE2 #define HAS_SOBELXROW_SSE2 #define HAS_SOBELXYROW_SSE2 #define HAS_SOBELYROW_SSE2 +#if !defined(LIBYUV_BIT_EXACT) +#define HAS_ARGBATTENUATEROW_SSSE3 +#endif // The following functions fail on gcc/clang 32 bit with fpic and framepointer. // caveat: clangcl uses row_win.cc which works. @@ -176,6 +182,7 @@ extern "C" { // TODO(fbarchard): fix build error on android_full_debug=1 // https://code.google.com/p/libyuv/issues/detail?id=517 #define HAS_I422ALPHATOARGBROW_SSSE3 +#define HAS_I444ALPHATOARGBROW_SSSE3 #endif #endif @@ -191,16 +198,11 @@ extern "C" { #define HAS_ARGBPOLYNOMIALROW_AVX2 #define HAS_ARGBSHUFFLEROW_AVX2 #define HAS_ARGBTORGB565DITHERROW_AVX2 -#define HAS_ARGBTOUVJROW_AVX2 -#define HAS_ARGBTOUVROW_AVX2 #define HAS_ARGBTOYJROW_AVX2 #define HAS_ARGBTOYROW_AVX2 -#define HAS_RGB24TOYJROW_AVX2 -#define HAS_RAWTOYJROW_AVX2 #define HAS_COPYROW_AVX #define HAS_H422TOARGBROW_AVX2 #define HAS_HALFFLOATROW_AVX2 -// #define HAS_HALFFLOATROW_F16C // Enable to test halffloat cast #define HAS_I422TOARGB1555ROW_AVX2 #define HAS_I422TOARGB4444ROW_AVX2 #define HAS_I422TOARGBROW_AVX2 @@ -208,6 +210,7 @@ extern "C" { #define HAS_I422TORGB565ROW_AVX2 #define HAS_I422TORGBAROW_AVX2 #define HAS_I444TOARGBROW_AVX2 +#define HAS_I444TORGB24ROW_AVX2 #define HAS_INTERPOLATEROW_AVX2 #define HAS_J422TOARGBROW_AVX2 #define HAS_MERGEUVROW_AVX2 @@ -217,6 +220,8 @@ extern "C" { #define HAS_NV12TORGB565ROW_AVX2 #define HAS_NV21TOARGBROW_AVX2 #define HAS_NV21TORGB24ROW_AVX2 +#define HAS_RAWTOYJROW_AVX2 +#define HAS_RGB24TOYJROW_AVX2 #define HAS_SPLITUVROW_AVX2 #define HAS_UYVYTOARGBROW_AVX2 #define HAS_UYVYTOUV422ROW_AVX2 @@ -226,27 +231,35 @@ extern "C" { #define HAS_YUY2TOUV422ROW_AVX2 #define HAS_YUY2TOUVROW_AVX2 #define HAS_YUY2TOYROW_AVX2 +// #define HAS_HALFFLOATROW_F16C // Enable to test half float cast +#if !defined(LIBYUV_BIT_EXACT) +#define HAS_ARGBTOUVJROW_AVX2 +#define HAS_ARGBTOUVROW_AVX2 +#endif // Effects: #define HAS_ARGBADDROW_AVX2 -#define HAS_ARGBATTENUATEROW_AVX2 #define HAS_ARGBMULTIPLYROW_AVX2 #define HAS_ARGBSUBTRACTROW_AVX2 #define HAS_ARGBUNATTENUATEROW_AVX2 #define HAS_BLENDPLANEROW_AVX2 +#if !defined(LIBYUV_BIT_EXACT) +#define HAS_ARGBATTENUATEROW_AVX2 +#endif #if defined(__x86_64__) || !defined(__pic__) || defined(__clang__) || \ defined(_MSC_VER) // TODO(fbarchard): fix build error on android_full_debug=1 // https://code.google.com/p/libyuv/issues/detail?id=517 #define HAS_I422ALPHATOARGBROW_AVX2 +#define HAS_I444ALPHATOARGBROW_AVX2 #endif #endif -// The following are available for AVX2 Visual C and clangcl 32 bit: +// The following are available for AVX2 Visual C 32 bit: // TODO(fbarchard): Port to gcc. #if !defined(LIBYUV_DISABLE_X86) && defined(_M_IX86) && defined(_MSC_VER) && \ - (defined(VISUALC_HAS_AVX2) || defined(CLANG_HAS_AVX2)) + !defined(__clang__) && defined(VISUALC_HAS_AVX2) #define HAS_ARGB1555TOARGBROW_AVX2 #define HAS_ARGB4444TOARGBROW_AVX2 #define HAS_ARGBTOARGB1555ROW_AVX2 @@ -259,75 +272,157 @@ extern "C" { // The following are also available on x64 Visual C. #if !defined(LIBYUV_DISABLE_X86) && defined(_MSC_VER) && defined(_M_X64) && \ (!defined(__clang__) || defined(__SSSE3__)) +#define HAS_I444ALPHATOARGBROW_SSSE3 +#define HAS_I444TOARGBROW_SSSE3 #define HAS_I422ALPHATOARGBROW_SSSE3 #define HAS_I422TOARGBROW_SSSE3 #endif // The following are available for gcc/clang x86 platforms: // TODO(fbarchard): Port to Visual C -#if !defined(LIBYUV_DISABLE_X86) && \ - (defined(__x86_64__) || (defined(__i386__) && !defined(_MSC_VER))) +#if !defined(LIBYUV_DISABLE_X86) && (defined(__x86_64__) || defined(__i386__)) +#define HAS_AB64TOARGBROW_SSSE3 #define HAS_ABGRTOAR30ROW_SSSE3 +#define HAS_ABGRTOYJROW_SSSE3 +#define HAS_AR64TOARGBROW_SSSE3 +#define HAS_ARGBTOAB64ROW_SSSE3 #define HAS_ARGBTOAR30ROW_SSSE3 +#define HAS_ARGBTOAR64ROW_SSSE3 #define HAS_CONVERT16TO8ROW_SSSE3 #define HAS_CONVERT8TO16ROW_SSE2 +#define HAS_DETILEROW_SSE2 +#define HAS_DETILEROW_16_SSE2 +#define HAS_DETILEROW_16_AVX +#define HAS_DETILESPLITUVROW_SSSE3 +#define HAS_DETILETOYUY2_SSE2 #define HAS_HALFMERGEUVROW_SSSE3 #define HAS_I210TOAR30ROW_SSSE3 #define HAS_I210TOARGBROW_SSSE3 +#define HAS_I212TOAR30ROW_SSSE3 +#define HAS_I212TOARGBROW_SSSE3 #define HAS_I400TOARGBROW_SSE2 +#define HAS_I410TOAR30ROW_SSSE3 +#define HAS_I410TOARGBROW_SSSE3 #define HAS_I422TOAR30ROW_SSSE3 +#define HAS_MERGEARGBROW_SSE2 #define HAS_MERGERGBROW_SSSE3 -#define HAS_MIRRORUVROW_AVX2 +#define HAS_MERGEXRGBROW_SSE2 #define HAS_MIRRORUVROW_SSSE3 +#define HAS_NV21TOYUV24ROW_SSSE3 +#define HAS_P210TOAR30ROW_SSSE3 +#define HAS_P210TOARGBROW_SSSE3 +#define HAS_P410TOAR30ROW_SSSE3 +#define HAS_P410TOARGBROW_SSSE3 #define HAS_RAWTORGBAROW_SSSE3 #define HAS_RGB24MIRRORROW_SSSE3 #define HAS_RGBATOYJROW_SSSE3 +#define HAS_SPLITARGBROW_SSE2 +#define HAS_SPLITARGBROW_SSSE3 #define HAS_SPLITRGBROW_SSSE3 +#define HAS_SPLITXRGBROW_SSE2 +#define HAS_SPLITXRGBROW_SSSE3 #define HAS_SWAPUVROW_SSSE3 +#define HAS_YUY2TONVUVROW_SSE2 +#if !defined(LIBYUV_BIT_EXACT) +#define HAS_ABGRTOUVJROW_SSSE3 +#endif + +#if defined(__x86_64__) || !defined(__pic__) +// TODO(fbarchard): fix build error on android_full_debug=1 +// https://code.google.com/p/libyuv/issues/detail?id=517 +#define HAS_I210ALPHATOARGBROW_SSSE3 +#define HAS_I410ALPHATOARGBROW_SSSE3 +#endif #endif // The following are available for AVX2 gcc/clang x86 platforms: // TODO(fbarchard): Port to Visual C -#if !defined(LIBYUV_DISABLE_X86) && \ - (defined(__x86_64__) || (defined(__i386__) && !defined(_MSC_VER))) && \ +#if !defined(LIBYUV_DISABLE_X86) && \ + (defined(__x86_64__) || defined(__i386__)) && \ (defined(CLANG_HAS_AVX2) || defined(GCC_HAS_AVX2)) +#define HAS_AB64TOARGBROW_AVX2 #define HAS_ABGRTOAR30ROW_AVX2 -#define HAS_ABGRTOUVROW_AVX2 +#define HAS_ABGRTOYJROW_AVX2 #define HAS_ABGRTOYROW_AVX2 +#define HAS_AR64TOARGBROW_AVX2 +#define HAS_ARGBTOAB64ROW_AVX2 #define HAS_ARGBTOAR30ROW_AVX2 +#define HAS_ARGBTOAR64ROW_AVX2 #define HAS_ARGBTORAWROW_AVX2 #define HAS_ARGBTORGB24ROW_AVX2 #define HAS_CONVERT16TO8ROW_AVX2 #define HAS_CONVERT8TO16ROW_AVX2 +#define HAS_DIVIDEROW_16_AVX2 #define HAS_HALFMERGEUVROW_AVX2 #define HAS_I210TOAR30ROW_AVX2 #define HAS_I210TOARGBROW_AVX2 +#define HAS_I212TOAR30ROW_AVX2 +#define HAS_I212TOARGBROW_AVX2 #define HAS_I400TOARGBROW_AVX2 +#define HAS_I410TOAR30ROW_AVX2 +#define HAS_I410TOARGBROW_AVX2 #define HAS_I422TOAR30ROW_AVX2 #define HAS_I422TOUYVYROW_AVX2 #define HAS_I422TOYUY2ROW_AVX2 +#define HAS_INTERPOLATEROW_16TO8_AVX2 +#define HAS_MERGEAR64ROW_AVX2 +#define HAS_MERGEARGB16TO8ROW_AVX2 +#define HAS_MERGEARGBROW_AVX2 #define HAS_MERGEUVROW_16_AVX2 +#define HAS_MERGEXR30ROW_AVX2 +#define HAS_MERGEXR64ROW_AVX2 +#define HAS_MERGEXRGB16TO8ROW_AVX2 +#define HAS_MERGEXRGBROW_AVX2 +#define HAS_MIRRORUVROW_AVX2 #define HAS_MULTIPLYROW_16_AVX2 +#define HAS_NV21TOYUV24ROW_AVX2 +#define HAS_P210TOAR30ROW_AVX2 +#define HAS_P210TOARGBROW_AVX2 +#define HAS_P410TOAR30ROW_AVX2 +#define HAS_P410TOARGBROW_AVX2 #define HAS_RGBATOYJROW_AVX2 +#define HAS_SPLITARGBROW_AVX2 +#define HAS_SPLITUVROW_16_AVX2 +#define HAS_SPLITXRGBROW_AVX2 #define HAS_SWAPUVROW_AVX2 -// TODO(fbarchard): Fix AVX2 version of YUV24 -// #define HAS_NV21TOYUV24ROW_AVX2 +#define HAS_YUY2TONVUVROW_AVX2 +#if !defined(LIBYUV_BIT_EXACT) +#define HAS_ABGRTOUVJROW_AVX2 +#define HAS_ABGRTOUVROW_AVX2 +#endif + +#if defined(__x86_64__) || !defined(__pic__) +// TODO(fbarchard): fix build error on android_full_debug=1 +// https://code.google.com/p/libyuv/issues/detail?id=517 +#define HAS_I210ALPHATOARGBROW_AVX2 +#define HAS_I410ALPHATOARGBROW_AVX2 +#endif #endif // The following are available for AVX512 clang x86 platforms: // TODO(fbarchard): Port to GCC and Visual C // TODO(fbarchard): re-enable HAS_ARGBTORGB24ROW_AVX512VBMI. Issue libyuv:789 -#if !defined(LIBYUV_DISABLE_X86) && \ - (defined(__x86_64__) || (defined(__i386__) && !defined(_MSC_VER))) && \ - (defined(CLANG_HAS_AVX512)) +#if !defined(LIBYUV_DISABLE_X86) && \ + (defined(__x86_64__) || defined(__i386__)) && (defined(CLANG_HAS_AVX512)) #define HAS_ARGBTORGB24ROW_AVX512VBMI #endif +// The following are available for AVX512 clang x64 platforms: +// TODO(fbarchard): Port to x86 +#if !defined(LIBYUV_DISABLE_X86) && defined(__x86_64__) && \ + (defined(CLANG_HAS_AVX512)) +#define HAS_I422TOARGBROW_AVX512BW +#endif + // The following are available on Neon platforms: #if !defined(LIBYUV_DISABLE_NEON) && \ (defined(__aarch64__) || defined(__ARM_NEON__) || defined(LIBYUV_NEON)) +#define HAS_AB64TOARGBROW_NEON +#define HAS_ABGRTOUVJROW_NEON #define HAS_ABGRTOUVROW_NEON +#define HAS_ABGRTOYJROW_NEON #define HAS_ABGRTOYROW_NEON +#define HAS_AR64TOARGBROW_NEON #define HAS_ARGB1555TOARGBROW_NEON #define HAS_ARGB1555TOUVROW_NEON #define HAS_ARGB1555TOYROW_NEON @@ -336,6 +431,8 @@ extern "C" { #define HAS_ARGB4444TOYROW_NEON #define HAS_ARGBEXTRACTALPHAROW_NEON #define HAS_ARGBSETROW_NEON +#define HAS_ARGBTOAB64ROW_NEON +#define HAS_ARGBTOAR64ROW_NEON #define HAS_ARGBTOARGB1555ROW_NEON #define HAS_ARGBTOARGB4444ROW_NEON #define HAS_ARGBTORAWROW_NEON @@ -353,7 +450,14 @@ extern "C" { #define HAS_BGRATOUVROW_NEON #define HAS_BGRATOYROW_NEON #define HAS_BYTETOFLOATROW_NEON +#define HAS_CONVERT16TO8ROW_NEON #define HAS_COPYROW_NEON +#define HAS_DETILEROW_16_NEON +#define HAS_DETILEROW_NEON +#define HAS_DETILESPLITUVROW_NEON +#define HAS_DETILETOYUY2_NEON +#define HAS_UNPACKMT2T_NEON +#define HAS_DIVIDEROW_16_NEON #define HAS_HALFFLOATROW_NEON #define HAS_HALFMERGEUVROW_NEON #define HAS_I400TOARGBROW_NEON @@ -366,12 +470,25 @@ extern "C" { #define HAS_I422TORGBAROW_NEON #define HAS_I422TOUYVYROW_NEON #define HAS_I422TOYUY2ROW_NEON +#define HAS_I444ALPHATOARGBROW_NEON #define HAS_I444TOARGBROW_NEON +#define HAS_I444TORGB24ROW_NEON +#define HAS_INTERPOLATEROW_16_NEON +#define HAS_INTERPOLATEROW_NEON #define HAS_J400TOARGBROW_NEON +#define HAS_MERGEAR64ROW_NEON +#define HAS_MERGEARGB16TO8ROW_NEON +#define HAS_MERGEARGBROW_NEON +#define HAS_MERGEUVROW_16_NEON #define HAS_MERGEUVROW_NEON +#define HAS_MERGEXR30ROW_NEON +#define HAS_MERGEXR64ROW_NEON +#define HAS_MERGEXRGB16TO8ROW_NEON +#define HAS_MERGEXRGBROW_NEON #define HAS_MIRRORROW_NEON -#define HAS_MIRRORUVROW_NEON #define HAS_MIRRORSPLITUVROW_NEON +#define HAS_MIRRORUVROW_NEON +#define HAS_MULTIPLYROW_16_NEON #define HAS_NV12TOARGBROW_NEON #define HAS_NV12TORGB24ROW_NEON #define HAS_NV12TORGB565ROW_NEON @@ -381,10 +498,12 @@ extern "C" { #define HAS_RAWTOARGBROW_NEON #define HAS_RAWTORGB24ROW_NEON #define HAS_RAWTORGBAROW_NEON +#define HAS_RAWTOUVJROW_NEON #define HAS_RAWTOUVROW_NEON #define HAS_RAWTOYJROW_NEON #define HAS_RAWTOYROW_NEON #define HAS_RGB24TOARGBROW_NEON +#define HAS_RGB24TOUVJROW_NEON #define HAS_RGB24TOUVROW_NEON #define HAS_RGB24TOYJROW_NEON #define HAS_RGB24TOYROW_NEON @@ -395,14 +514,18 @@ extern "C" { #define HAS_RGBATOYJROW_NEON #define HAS_RGBATOYROW_NEON #define HAS_SETROW_NEON +#define HAS_SPLITARGBROW_NEON #define HAS_SPLITRGBROW_NEON +#define HAS_SPLITUVROW_16_NEON #define HAS_SPLITUVROW_NEON +#define HAS_SPLITXRGBROW_NEON #define HAS_SWAPUVROW_NEON #define HAS_UYVYTOARGBROW_NEON #define HAS_UYVYTOUV422ROW_NEON #define HAS_UYVYTOUVROW_NEON #define HAS_UYVYTOYROW_NEON #define HAS_YUY2TOARGBROW_NEON +#define HAS_YUY2TONVUVROW_NEON #define HAS_YUY2TOUV422ROW_NEON #define HAS_YUY2TOUVROW_NEON #define HAS_YUY2TOYROW_NEON @@ -414,14 +537,13 @@ extern "C" { #define HAS_ARGBCOLORMATRIXROW_NEON #define HAS_ARGBGRAYROW_NEON #define HAS_ARGBMIRRORROW_NEON -#define HAS_RGB24MIRRORROW_NEON #define HAS_ARGBMULTIPLYROW_NEON #define HAS_ARGBQUANTIZEROW_NEON #define HAS_ARGBSEPIAROW_NEON #define HAS_ARGBSHADEROW_NEON #define HAS_ARGBSHUFFLEROW_NEON #define HAS_ARGBSUBTRACTROW_NEON -#define HAS_INTERPOLATEROW_NEON +#define HAS_RGB24MIRRORROW_NEON #define HAS_SOBELROW_NEON #define HAS_SOBELTOPLANEROW_NEON #define HAS_SOBELXROW_NEON @@ -431,12 +553,13 @@ extern "C" { // The following are available on AArch64 platforms: #if !defined(LIBYUV_DISABLE_NEON) && defined(__aarch64__) -#define HAS_SCALESUMSAMPLES_NEON -#define HAS_GAUSSROW_F32_NEON #define HAS_GAUSSCOL_F32_NEON - +#define HAS_GAUSSROW_F32_NEON +#define HAS_INTERPOLATEROW_16TO8_NEON +#define HAS_SCALESUMSAMPLES_NEON #endif #if !defined(LIBYUV_DISABLE_MSA) && defined(__mips_msa) +#define HAS_ABGRTOUVJROW_MSA #define HAS_ABGRTOUVROW_MSA #define HAS_ABGRTOYROW_MSA #define HAS_ARGB1555TOARGBROW_MSA @@ -473,20 +596,21 @@ extern "C" { #define HAS_HALFFLOATROW_MSA #define HAS_I400TOARGBROW_MSA #define HAS_I422ALPHATOARGBROW_MSA +#define HAS_I422TOARGB1555ROW_MSA +#define HAS_I422TOARGB4444ROW_MSA #define HAS_I422TOARGBROW_MSA #define HAS_I422TORGB24ROW_MSA +#define HAS_I422TORGB565ROW_MSA #define HAS_I422TORGBAROW_MSA #define HAS_I422TOUYVYROW_MSA #define HAS_I422TOYUY2ROW_MSA #define HAS_I444TOARGBROW_MSA -#define HAS_I422TOARGB1555ROW_MSA -#define HAS_I422TORGB565ROW_MSA #define HAS_INTERPOLATEROW_MSA #define HAS_J400TOARGBROW_MSA #define HAS_MERGEUVROW_MSA #define HAS_MIRRORROW_MSA -#define HAS_MIRRORUVROW_MSA #define HAS_MIRRORSPLITUVROW_MSA +#define HAS_MIRRORUVROW_MSA #define HAS_NV12TOARGBROW_MSA #define HAS_NV12TORGB565ROW_MSA #define HAS_NV21TOARGBROW_MSA @@ -518,96 +642,118 @@ extern "C" { #define HAS_YUY2TOYROW_MSA #endif -#if !defined(LIBYUV_DISABLE_MMI) && defined(_MIPS_ARCH_LOONGSON3A) -#define HAS_ABGRTOUVROW_MMI -#define HAS_ABGRTOYROW_MMI -#define HAS_ARGB1555TOARGBROW_MMI -#define HAS_ARGB1555TOUVROW_MMI -#define HAS_ARGB1555TOYROW_MMI -#define HAS_ARGB4444TOARGBROW_MMI -#define HAS_ARGB4444TOUVROW_MMI -#define HAS_ARGB4444TOYROW_MMI -#define HAS_ARGBADDROW_MMI -#define HAS_ARGBATTENUATEROW_MMI -#define HAS_ARGBBLENDROW_MMI -#define HAS_ARGBCOLORMATRIXROW_MMI -#define HAS_ARGBCOPYALPHAROW_MMI -#define HAS_ARGBCOPYYTOALPHAROW_MMI -#define HAS_ARGBEXTRACTALPHAROW_MMI -#define HAS_ARGBGRAYROW_MMI -#define HAS_ARGBMIRRORROW_MMI -#define HAS_ARGBMULTIPLYROW_MMI -#define HAS_ARGBSEPIAROW_MMI -#define HAS_ARGBSETROW_MMI -#define HAS_ARGBSHADEROW_MMI -#define HAS_ARGBSHUFFLEROW_MMI -#define HAS_ARGBSUBTRACTROW_MMI -#define HAS_ARGBTOARGB1555ROW_MMI -#define HAS_ARGBTOARGB4444ROW_MMI -#define HAS_ARGBTORAWROW_MMI -#define HAS_ARGBTORGB24ROW_MMI -#define HAS_ARGBTORGB565DITHERROW_MMI -#define HAS_ARGBTORGB565ROW_MMI -#define HAS_ARGBTOUV444ROW_MMI -#define HAS_ARGBTOUVJROW_MMI -#define HAS_ARGBTOUVROW_MMI -#define HAS_ARGBTOYJROW_MMI -#define HAS_ARGBTOYROW_MMI -#define HAS_BGRATOUVROW_MMI -#define HAS_BGRATOYROW_MMI -#define HAS_BLENDPLANEROW_MMI -#define HAS_COMPUTECUMULATIVESUMROW_MMI -#define HAS_CUMULATIVESUMTOAVERAGEROW_MMI -#define HAS_HALFFLOATROW_MMI -#define HAS_I400TOARGBROW_MMI -#define HAS_I422TOUYVYROW_MMI -#define HAS_I422TOYUY2ROW_MMI -#define HAS_I422TOARGBROW_MMI -#define HAS_I444TOARGBROW_MMI -#define HAS_INTERPOLATEROW_MMI -#define HAS_J400TOARGBROW_MMI -#define HAS_MERGERGBROW_MMI -#define HAS_MERGEUVROW_MMI -#define HAS_MIRRORROW_MMI -#define HAS_MIRRORSPLITUVROW_MMI -#define HAS_RAWTOARGBROW_MMI -#define HAS_RAWTORGB24ROW_MMI -#define HAS_RAWTOUVROW_MMI -#define HAS_RAWTOYROW_MMI -#define HAS_RGB24TOARGBROW_MMI -#define HAS_RGB24TOUVROW_MMI -#define HAS_RGB24TOYROW_MMI -#define HAS_RGB565TOARGBROW_MMI -#define HAS_RGB565TOUVROW_MMI -#define HAS_RGB565TOYROW_MMI -#define HAS_RGBATOUVROW_MMI -#define HAS_RGBATOYROW_MMI -#define HAS_SOBELROW_MMI -#define HAS_SOBELTOPLANEROW_MMI -#define HAS_SOBELXROW_MMI -#define HAS_SOBELXYROW_MMI -#define HAS_SOBELYROW_MMI -#define HAS_SPLITRGBROW_MMI -#define HAS_SPLITUVROW_MMI -#define HAS_UYVYTOUVROW_MMI -#define HAS_UYVYTOYROW_MMI -#define HAS_YUY2TOUV422ROW_MMI -#define HAS_YUY2TOUVROW_MMI -#define HAS_YUY2TOYROW_MMI -#define HAS_I210TOARGBROW_MMI -#define HAS_I422TOARGB4444ROW_MMI -#define HAS_I422TOARGB1555ROW_MMI -#define HAS_I422TORGB565ROW_MMI -#define HAS_NV21TORGB24ROW_MMI -#define HAS_NV12TORGB24ROW_MMI -#define HAS_I422ALPHATOARGBROW_MMI -#define HAS_I422TORGB24ROW_MMI -#define HAS_NV12TOARGBROW_MMI -#define HAS_NV21TOARGBROW_MMI -#define HAS_NV12TORGB565ROW_MMI -#define HAS_YUY2TOARGBROW_MMI -#define HAS_UYVYTOARGBROW_MMI -#define HAS_I422TORGBAROW_MMI +#if !defined(LIBYUV_DISABLE_LSX) && defined(__loongarch_sx) +#define HAS_ABGRTOUVROW_LSX +#define HAS_ABGRTOYROW_LSX +#define HAS_ARGB1555TOARGBROW_LSX +#define HAS_ARGB1555TOUVROW_LSX +#define HAS_ARGB1555TOYROW_LSX +#define HAS_ARGB4444TOARGBROW_LSX +#define HAS_ARGBBLENDROW_LSX +#define HAS_ARGBCOLORMATRIXROW_LSX +#define HAS_ARGBEXTRACTALPHAROW_LSX +#define HAS_ARGBQUANTIZEROW_LSX +#define HAS_ARGBSETROW_LSX +#define HAS_ARGBTOUVJROW_LSX +#define HAS_ARGBTOYJROW_LSX +#define HAS_BGRATOUVROW_LSX +#define HAS_BGRATOYROW_LSX +#define HAS_I400TOARGBROW_LSX +#define HAS_I444TOARGBROW_LSX +#define HAS_INTERPOLATEROW_LSX +#define HAS_J400TOARGBROW_LSX +#define HAS_MERGEUVROW_LSX +#define HAS_MIRRORSPLITUVROW_LSX +#define HAS_NV12TOARGBROW_LSX +#define HAS_NV12TORGB565ROW_LSX +#define HAS_NV21TOARGBROW_LSX +#define HAS_RAWTOARGBROW_LSX +#define HAS_RAWTORGB24ROW_LSX +#define HAS_RAWTOUVROW_LSX +#define HAS_RAWTOYROW_LSX +#define HAS_RGB24TOARGBROW_LSX +#define HAS_RGB24TOUVROW_LSX +#define HAS_RGB24TOYROW_LSX +#define HAS_RGB565TOARGBROW_LSX +#define HAS_RGB565TOUVROW_LSX +#define HAS_RGB565TOYROW_LSX +#define HAS_RGBATOUVROW_LSX +#define HAS_RGBATOYROW_LSX +#define HAS_SETROW_LSX +#define HAS_SOBELROW_LSX +#define HAS_SOBELTOPLANEROW_LSX +#define HAS_SOBELXYROW_LSX +#define HAS_SPLITUVROW_LSX +#define HAS_UYVYTOARGBROW_LSX +#define HAS_YUY2TOARGBROW_LSX +#define HAS_ARGBTOYROW_LSX +#define HAS_ABGRTOYJROW_LSX +#define HAS_RGBATOYJROW_LSX +#define HAS_RGB24TOYJROW_LSX +#define HAS_RAWTOYJROW_LSX +#endif + +#if !defined(LIBYUV_DISABLE_LASX) && defined(__loongarch_asx) +#define HAS_ARGB1555TOARGBROW_LASX +#define HAS_ARGB1555TOUVROW_LASX +#define HAS_ARGB1555TOYROW_LASX +#define HAS_ARGB4444TOARGBROW_LASX +#define HAS_ARGBADDROW_LASX +#define HAS_ARGBATTENUATEROW_LASX +#define HAS_ARGBGRAYROW_LASX +#define HAS_ARGBMIRRORROW_LASX +#define HAS_ARGBMULTIPLYROW_LASX +#define HAS_ARGBSEPIAROW_LASX +#define HAS_ARGBSHADEROW_LASX +#define HAS_ARGBSHUFFLEROW_LASX +#define HAS_ARGBSUBTRACTROW_LASX +#define HAS_ARGBTOARGB1555ROW_LASX +#define HAS_ARGBTOARGB4444ROW_LASX +#define HAS_ARGBTORAWROW_LASX +#define HAS_ARGBTORGB24ROW_LASX +#define HAS_ARGBTORGB565DITHERROW_LASX +#define HAS_ARGBTORGB565ROW_LASX +#define HAS_ARGBTOUV444ROW_LASX +#define HAS_ARGBTOUVJROW_LASX +#define HAS_ARGBTOUVROW_LASX +#define HAS_ARGBTOYJROW_LASX +#define HAS_ARGBTOYROW_LASX +#define HAS_ABGRTOYJROW_LASX +#define HAS_ABGRTOYROW_LASX +#define HAS_I422ALPHATOARGBROW_LASX +#define HAS_I422TOARGB1555ROW_LASX +#define HAS_I422TOARGB4444ROW_LASX +#define HAS_I422TOARGBROW_LASX +#define HAS_I422TORGB24ROW_LASX +#define HAS_I422TORGB565ROW_LASX +#define HAS_I422TORGBAROW_LASX +#define HAS_I422TOUYVYROW_LASX +#define HAS_I422TOYUY2ROW_LASX +#define HAS_MIRRORROW_LASX +#define HAS_MIRRORUVROW_LASX +#define HAS_NV12TOARGBROW_LASX +#define HAS_NV12TORGB565ROW_LASX +#define HAS_NV21TOARGBROW_LASX +#define HAS_RAWTOARGBROW_LASX +#define HAS_RAWTOUVROW_LASX +#define HAS_RAWTOYROW_LASX +#define HAS_RGB24TOARGBROW_LASX +#define HAS_RGB24TOUVROW_LASX +#define HAS_RGB24TOYROW_LASX +#define HAS_RGB565TOARGBROW_LASX +#define HAS_RGB565TOUVROW_LASX +#define HAS_RGB565TOYROW_LASX +#define HAS_UYVYTOUV422ROW_LASX +#define HAS_UYVYTOUVROW_LASX +#define HAS_UYVYTOYROW_LASX +#define HAS_YUY2TOUV422ROW_LASX +#define HAS_YUY2TOUVROW_LASX +#define HAS_YUY2TOYROW_LASX +#define HAS_RGBATOYROW_LASX +#define HAS_RGBATOYJROW_LASX +#define HAS_BGRATOYROW_LASX +#define HAS_RGB24TOYJROW_LASX +#define HAS_RAWTOYJROW_LASX #endif #if defined(_MSC_VER) && !defined(__CLR_VER) && !defined(__clang__) @@ -616,6 +762,7 @@ extern "C" { #else #define SIMD_ALIGNED(var) __declspec(align(16)) var #endif +#define LIBYUV_NOINLINE __declspec(noinline) typedef __declspec(align(16)) int16_t vec16[8]; typedef __declspec(align(16)) int32_t vec32[4]; typedef __declspec(align(16)) float vecf32[4]; @@ -636,6 +783,7 @@ typedef __declspec(align(32)) uint8_t ulvec8[32]; #else #define SIMD_ALIGNED(var) var __attribute__((aligned(16))) #endif +#define LIBYUV_NOINLINE __attribute__((noinline)) typedef int16_t __attribute__((vector_size(16))) vec16; typedef int32_t __attribute__((vector_size(16))) vec32; typedef float __attribute__((vector_size(16))) vecf32; @@ -651,6 +799,7 @@ typedef uint32_t __attribute__((vector_size(32))) ulvec32; typedef uint8_t __attribute__((vector_size(32))) ulvec8; #else #define SIMD_ALIGNED(var) var +#define LIBYUV_NOINLINE typedef int16_t vec16[8]; typedef int32_t vec32[4]; typedef float vecf32[4]; @@ -666,33 +815,18 @@ typedef uint32_t ulvec32[8]; typedef uint8_t ulvec8[32]; #endif -#if defined(__aarch64__) -// This struct is for Arm64 color conversion. -struct YuvConstants { - uvec16 kUVToRB; - uvec16 kUVToRB2; - uvec16 kUVToG; - uvec16 kUVToG2; - vec16 kUVBiasBGR; - vec32 kYToRgb; -}; -#elif defined(__arm__) -// This struct is for ArmV7 color conversion. +#if defined(__aarch64__) || defined(__arm__) +// This struct is for ARM color conversion. struct YuvConstants { - uvec8 kUVToRB; - uvec8 kUVToG; - vec16 kUVBiasBGR; - vec32 kYToRgb; + uvec8 kUVCoeff; + vec16 kRGBCoeffBias; }; #else // This struct is for Intel color conversion. struct YuvConstants { - int8_t kUVToB[32]; - int8_t kUVToG[32]; - int8_t kUVToR[32]; - int16_t kUVBiasB[16]; - int16_t kUVBiasG[16]; - int16_t kUVBiasR[16]; + uint8_t kUVToB[32]; + uint8_t kUVToG[32]; + uint8_t kUVToR[32]; int16_t kYToRgb[16]; int16_t kYBiasToRgb[16]; }; @@ -701,23 +835,28 @@ struct YuvConstants { #define KUVTOB 0 #define KUVTOG 32 #define KUVTOR 64 -#define KUVBIASB 96 -#define KUVBIASG 128 -#define KUVBIASR 160 -#define KYTORGB 192 -#define KYBIASTORGB 224 +#define KYTORGB 96 +#define KYBIASTORGB 128 #endif #define IS_ALIGNED(p, a) (!((uintptr_t)(p) & ((a)-1))) -#define align_buffer_64(var, size) \ - uint8_t* var##_mem = (uint8_t*)(malloc((size) + 63)); /* NOLINT */ \ - uint8_t* var = (uint8_t*)(((intptr_t)(var##_mem) + 63) & ~63) /* NOLINT */ +#define align_buffer_64(var, size) \ + void* var##_mem = malloc((size) + 63); /* NOLINT */ \ + uint8_t* var = (uint8_t*)(((intptr_t)var##_mem + 63) & ~63) /* NOLINT */ #define free_aligned_buffer_64(var) \ free(var##_mem); \ - var = 0 + var = NULL + +#define align_buffer_64_16(var, size) \ + void* var##_mem = malloc((size)*2 + 63); /* NOLINT */ \ + uint16_t* var = (uint16_t*)(((intptr_t)var##_mem + 63) & ~63) /* NOLINT */ + +#define free_aligned_buffer_64_16(var) \ + free(var##_mem); \ + var = NULL #if defined(__APPLE__) || defined(__x86_64__) || defined(__llvm__) #define OMITFP @@ -789,12 +928,25 @@ void I444ToARGBRow_NEON(const uint8_t* src_y, uint8_t* dst_argb, const struct YuvConstants* yuvconstants, int width); +void I444ToRGB24Row_NEON(const uint8_t* src_y, + const uint8_t* src_u, + const uint8_t* src_v, + uint8_t* dst_rgb24, + const struct YuvConstants* yuvconstants, + int width); void I422ToARGBRow_NEON(const uint8_t* src_y, const uint8_t* src_u, const uint8_t* src_v, uint8_t* dst_argb, const struct YuvConstants* yuvconstants, int width); +void I444AlphaToARGBRow_NEON(const uint8_t* src_y, + const uint8_t* src_u, + const uint8_t* src_v, + const uint8_t* src_a, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width); void I422AlphaToARGBRow_NEON(const uint8_t* src_y, const uint8_t* src_u, const uint8_t* src_v, @@ -802,12 +954,6 @@ void I422AlphaToARGBRow_NEON(const uint8_t* src_y, uint8_t* dst_argb, const struct YuvConstants* yuvconstants, int width); -void I422ToARGBRow_NEON(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); void I422ToRGBARow_NEON(const uint8_t* src_y, const uint8_t* src_u, const uint8_t* src_v, @@ -881,7 +1027,7 @@ void I444ToARGBRow_MSA(const uint8_t* src_y, uint8_t* dst_argb, const struct YuvConstants* yuvconstants, int width); -void I444ToARGBRow_MMI(const uint8_t* src_y, +void I444ToARGBRow_LSX(const uint8_t* src_y, const uint8_t* src_u, const uint8_t* src_v, uint8_t* dst_argb, @@ -894,18 +1040,24 @@ void I422ToARGBRow_MSA(const uint8_t* src_y, uint8_t* dst_argb, const struct YuvConstants* yuvconstants, int width); +void I422ToARGBRow_LASX(const uint8_t* src_y, + const uint8_t* src_u, + const uint8_t* src_v, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width); void I422ToRGBARow_MSA(const uint8_t* src_y, const uint8_t* src_u, const uint8_t* src_v, uint8_t* dst_argb, const struct YuvConstants* yuvconstants, int width); -void I422ToARGBRow_MMI(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); +void I422ToRGBARow_LASX(const uint8_t* src_y, + const uint8_t* src_u, + const uint8_t* src_v, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width); void I422AlphaToARGBRow_MSA(const uint8_t* src_y, const uint8_t* src_u, const uint8_t* src_v, @@ -913,30 +1065,61 @@ void I422AlphaToARGBRow_MSA(const uint8_t* src_y, uint8_t* dst_argb, const struct YuvConstants* yuvconstants, int width); +void I422AlphaToARGBRow_LASX(const uint8_t* src_y, + const uint8_t* src_u, + const uint8_t* src_v, + const uint8_t* src_a, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width); void I422ToRGB24Row_MSA(const uint8_t* src_y, const uint8_t* src_u, const uint8_t* src_v, uint8_t* dst_argb, const struct YuvConstants* yuvconstants, int width); +void I422ToRGB24Row_LASX(const uint8_t* src_y, + const uint8_t* src_u, + const uint8_t* src_v, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width); void I422ToRGB565Row_MSA(const uint8_t* src_y, const uint8_t* src_u, const uint8_t* src_v, uint8_t* dst_rgb565, const struct YuvConstants* yuvconstants, int width); +void I422ToRGB565Row_LASX(const uint8_t* src_y, + const uint8_t* src_u, + const uint8_t* src_v, + uint8_t* dst_rgb565, + const struct YuvConstants* yuvconstants, + int width); void I422ToARGB4444Row_MSA(const uint8_t* src_y, const uint8_t* src_u, const uint8_t* src_v, uint8_t* dst_argb4444, const struct YuvConstants* yuvconstants, int width); +void I422ToARGB4444Row_LASX(const uint8_t* src_y, + const uint8_t* src_u, + const uint8_t* src_v, + uint8_t* dst_argb4444, + const struct YuvConstants* yuvconstants, + int width); void I422ToARGB1555Row_MSA(const uint8_t* src_y, const uint8_t* src_u, const uint8_t* src_v, uint8_t* dst_argb1555, const struct YuvConstants* yuvconstants, int width); +void I422ToARGB1555Row_LASX(const uint8_t* src_y, + const uint8_t* src_u, + const uint8_t* src_v, + uint8_t* dst_argb1555, + const struct YuvConstants* yuvconstants, + int width); void NV12ToARGBRow_MSA(const uint8_t* src_y, const uint8_t* src_uv, uint8_t* dst_argb, @@ -961,14 +1144,57 @@ void UYVYToARGBRow_MSA(const uint8_t* src_uyvy, const struct YuvConstants* yuvconstants, int width); +void NV12ToARGBRow_LSX(const uint8_t* src_y, + const uint8_t* src_uv, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width); +void NV12ToARGBRow_LASX(const uint8_t* src_y, + const uint8_t* src_uv, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width); +void NV12ToRGB565Row_LSX(const uint8_t* src_y, + const uint8_t* src_uv, + uint8_t* dst_rgb565, + const struct YuvConstants* yuvconstants, + int width); +void NV12ToRGB565Row_LASX(const uint8_t* src_y, + const uint8_t* src_uv, + uint8_t* dst_rgb565, + const struct YuvConstants* yuvconstants, + int width); +void NV21ToARGBRow_LSX(const uint8_t* src_y, + const uint8_t* src_vu, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width); +void NV21ToARGBRow_LASX(const uint8_t* src_y, + const uint8_t* src_vu, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width); +void YUY2ToARGBRow_LSX(const uint8_t* src_yuy2, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width); +void UYVYToARGBRow_LSX(const uint8_t* src_uyvy, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width); + void ARGBToYRow_AVX2(const uint8_t* src_argb, uint8_t* dst_y, int width); void ARGBToYRow_Any_AVX2(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void ABGRToYRow_AVX2(const uint8_t* src_abgr, uint8_t* dst_y, int width); void ABGRToYRow_Any_AVX2(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void ARGBToYRow_SSSE3(const uint8_t* src_argb, uint8_t* dst_y, int width); +void ARGBToYJRow_SSSE3(const uint8_t* src_argb, uint8_t* dst_y, int width); void ARGBToYJRow_AVX2(const uint8_t* src_argb, uint8_t* dst_y, int width); void ARGBToYJRow_Any_AVX2(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void ARGBToYJRow_SSSE3(const uint8_t* src_argb, uint8_t* dst_y, int width); +void ABGRToYRow_SSSE3(const uint8_t* src_abgr, uint8_t* dst_y, int width); +void ABGRToYJRow_SSSE3(const uint8_t* src_abgr, uint8_t* dst_y, int width); +void ABGRToYJRow_AVX2(const uint8_t* src_abgr, uint8_t* dst_y, int width); +void ABGRToYJRow_Any_AVX2(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void RGBAToYJRow_AVX2(const uint8_t* src_rgba, uint8_t* dst_y, int width); void RGBAToYJRow_Any_AVX2(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void RGBAToYJRow_SSSE3(const uint8_t* src_rgba, uint8_t* dst_y, int width); @@ -976,18 +1202,25 @@ void BGRAToYRow_SSSE3(const uint8_t* src_bgra, uint8_t* dst_y, int width); void ABGRToYRow_SSSE3(const uint8_t* src_abgr, uint8_t* dst_y, int width); void RGBAToYRow_SSSE3(const uint8_t* src_rgba, uint8_t* dst_y, int width); void RGB24ToYRow_SSSE3(const uint8_t* src_rgb24, uint8_t* dst_y, int width); -void RGB24ToYJRow_SSSE3(const uint8_t* src_rgb24, uint8_t* dst_y, int width); +void RGB24ToYJRow_SSSE3(const uint8_t* src_rgb24, uint8_t* dst_yj, int width); void RAWToYRow_SSSE3(const uint8_t* src_raw, uint8_t* dst_y, int width); -void RAWToYJRow_SSSE3(const uint8_t* src_raw, uint8_t* dst_y, int width); -void RGB24ToYJRow_AVX2(const uint8_t* src_rgb24, uint8_t* dst_y, int width); -void RAWToYJRow_AVX2(const uint8_t* src_raw, uint8_t* dst_y, int width); +void RAWToYJRow_SSSE3(const uint8_t* src_raw, uint8_t* dst_yj, int width); +void RGB24ToYJRow_AVX2(const uint8_t* src_rgb24, uint8_t* dst_yj, int width); +void RAWToYJRow_AVX2(const uint8_t* src_raw, uint8_t* dst_yj, int width); void ARGBToYRow_NEON(const uint8_t* src_argb, uint8_t* dst_y, int width); -void ARGBToYJRow_NEON(const uint8_t* src_argb, uint8_t* dst_y, int width); -void RGBAToYJRow_NEON(const uint8_t* src_rgba, uint8_t* dst_y, int width); +void ARGBToYJRow_NEON(const uint8_t* src_argb, uint8_t* dst_yj, int width); +void ABGRToYJRow_NEON(const uint8_t* src_abgr, uint8_t* dst_yj, int width); +void RGBAToYJRow_NEON(const uint8_t* src_rgba, uint8_t* dst_yj, int width); void ARGBToYRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width); void ARGBToYJRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width); -void ARGBToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width); -void ARGBToYJRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width); +void ARGBToYRow_LSX(const uint8_t* src_argb0, uint8_t* dst_y, int width); +void ARGBToYRow_LASX(const uint8_t* src_argb0, uint8_t* dst_y, int width); +void ARGBToYJRow_LSX(const uint8_t* src_argb0, uint8_t* dst_y, int width); +void ABGRToYJRow_LSX(const uint8_t* src_abgr, uint8_t* dst_yj, int width); +void RGBAToYJRow_LSX(const uint8_t* src_rgba, uint8_t* dst_yj, int width); +void ARGBToYJRow_LASX(const uint8_t* src_argb0, uint8_t* dst_y, int width); +void ABGRToYJRow_LASX(const uint8_t* src_abgr, uint8_t* dst_yj, int width); +void RGBAToYJRow_LASX(const uint8_t* src_rgba, uint8_t* dst_yj, int width); void ARGBToUV444Row_NEON(const uint8_t* src_argb, uint8_t* dst_u, uint8_t* dst_v, @@ -1006,20 +1239,25 @@ void ARGBToUVRow_MSA(const uint8_t* src_argb, uint8_t* dst_u, uint8_t* dst_v, int width); -void ARGBToUV444Row_MMI(const uint8_t* src_argb, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGBToUVRow_MMI(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_u, - uint8_t* dst_v, - int width); +void ARGBToUVRow_LASX(const uint8_t* src_argb, + int src_stride_argb, + uint8_t* dst_u, + uint8_t* dst_v, + int width); +void ARGBToUV444Row_LASX(const uint8_t* src_argb, + uint8_t* dst_u, + uint8_t* dst_v, + int width); void ARGBToUVJRow_NEON(const uint8_t* src_argb, int src_stride_argb, uint8_t* dst_u, uint8_t* dst_v, int width); +void ABGRToUVJRow_NEON(const uint8_t* src_abgr, + int src_stride_abgr, + uint8_t* dst_uj, + uint8_t* dst_vj, + int width); void BGRAToUVRow_NEON(const uint8_t* src_bgra, int src_stride_bgra, uint8_t* dst_u, @@ -1045,6 +1283,16 @@ void RAWToUVRow_NEON(const uint8_t* src_raw, uint8_t* dst_u, uint8_t* dst_v, int width); +void RGB24ToUVJRow_NEON(const uint8_t* src_rgb24, + int src_stride_rgb24, + uint8_t* dst_u, + uint8_t* dst_v, + int width); +void RAWToUVJRow_NEON(const uint8_t* src_raw, + int src_stride_raw, + uint8_t* dst_u, + uint8_t* dst_v, + int width); void RGB565ToUVRow_NEON(const uint8_t* src_rgb565, int src_stride_rgb565, uint8_t* dst_u, @@ -1065,6 +1313,11 @@ void ARGBToUVJRow_MSA(const uint8_t* src_rgb, uint8_t* dst_u, uint8_t* dst_v, int width); +void ABGRToUVJRow_MSA(const uint8_t* src_rgb, + int src_stride_rgb, + uint8_t* dst_u, + uint8_t* dst_v, + int width); void BGRAToUVRow_MSA(const uint8_t* src_rgb, int src_stride_rgb, uint8_t* dst_u, @@ -1100,51 +1353,71 @@ void ARGB1555ToUVRow_MSA(const uint8_t* src_argb1555, uint8_t* dst_u, uint8_t* dst_v, int width); -void ARGBToUVJRow_MMI(const uint8_t* src_rgb, - int src_stride_rgb, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void BGRAToUVRow_MMI(const uint8_t* src_rgb, - int src_stride_rgb, +void BGRAToUVRow_LSX(const uint8_t* src_bgra, + int src_stride_bgra, uint8_t* dst_u, uint8_t* dst_v, int width); -void ABGRToUVRow_MMI(const uint8_t* src_rgb, - int src_stride_rgb, +void ABGRToUVRow_LSX(const uint8_t* src_abgr, + int src_stride_abgr, uint8_t* dst_u, uint8_t* dst_v, int width); -void RGBAToUVRow_MMI(const uint8_t* src_rgb, - int src_stride_rgb, +void RGBAToUVRow_LSX(const uint8_t* src_rgba, + int src_stride_rgba, uint8_t* dst_u, uint8_t* dst_v, int width); -void RGB24ToUVRow_MMI(const uint8_t* src_rgb, - int src_stride_rgb, +void ARGBToUVJRow_LSX(const uint8_t* src_argb, + int src_stride_argb, uint8_t* dst_u, uint8_t* dst_v, int width); -void RAWToUVRow_MMI(const uint8_t* src_rgb, - int src_stride_rgb, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void RGB565ToUVRow_MMI(const uint8_t* src_rgb565, - int src_stride_rgb565, +void ARGBToUVJRow_LASX(const uint8_t* src_argb, + int src_stride_argb, uint8_t* dst_u, uint8_t* dst_v, int width); -void ARGB1555ToUVRow_MMI(const uint8_t* src_argb1555, +void ARGB1555ToUVRow_LSX(const uint8_t* src_argb1555, int src_stride_argb1555, uint8_t* dst_u, uint8_t* dst_v, int width); -void ARGB4444ToUVRow_MMI(const uint8_t* src_argb4444, - int src_stride_argb4444, - uint8_t* dst_u, - uint8_t* dst_v, - int width); +void ARGB1555ToUVRow_LASX(const uint8_t* src_argb1555, + int src_stride_argb1555, + uint8_t* dst_u, + uint8_t* dst_v, + int width); +void RGB565ToUVRow_LSX(const uint8_t* src_rgb565, + int src_stride_rgb565, + uint8_t* dst_u, + uint8_t* dst_v, + int width); +void RGB565ToUVRow_LASX(const uint8_t* src_rgb565, + int src_stride_rgb565, + uint8_t* dst_u, + uint8_t* dst_v, + int width); +void RGB24ToUVRow_LSX(const uint8_t* src_rgb24, + int src_stride_rgb24, + uint8_t* dst_u, + uint8_t* dst_v, + int width); +void RGB24ToUVRow_LASX(const uint8_t* src_rgb24, + int src_stride_rgb24, + uint8_t* dst_u, + uint8_t* dst_v, + int width); +void RAWToUVRow_LSX(const uint8_t* src_raw, + int src_stride_raw, + uint8_t* dst_u, + uint8_t* dst_v, + int width); +void RAWToUVRow_LASX(const uint8_t* src_raw, + int src_stride_raw, + uint8_t* dst_u, + uint8_t* dst_v, + int width); void BGRAToYRow_NEON(const uint8_t* src_bgra, uint8_t* dst_y, int width); void ABGRToYRow_NEON(const uint8_t* src_abgr, uint8_t* dst_y, int width); void RGBAToYRow_NEON(const uint8_t* src_rgba, uint8_t* dst_y, int width); @@ -1166,30 +1439,45 @@ void RGB24ToYRow_MSA(const uint8_t* src_argb, uint8_t* dst_y, int width); void RAWToYRow_MSA(const uint8_t* src_argb, uint8_t* dst_y, int width); void RGB565ToYRow_MSA(const uint8_t* src_rgb565, uint8_t* dst_y, int width); void ARGB1555ToYRow_MSA(const uint8_t* src_argb1555, uint8_t* dst_y, int width); -void BGRAToYRow_MMI(const uint8_t* src_argb, uint8_t* dst_y, int width); -void ABGRToYRow_MMI(const uint8_t* src_argb, uint8_t* dst_y, int width); -void RGBAToYRow_MMI(const uint8_t* src_argb, uint8_t* dst_y, int width); -void RGB24ToYRow_MMI(const uint8_t* src_argb, uint8_t* dst_y, int width); -void RAWToYRow_MMI(const uint8_t* src_argb, uint8_t* dst_y, int width); -void RGB565ToYRow_MMI(const uint8_t* src_rgb565, uint8_t* dst_y, int width); -void ARGB1555ToYRow_MMI(const uint8_t* src_argb1555, uint8_t* dst_y, int width); -void ARGB4444ToYRow_MMI(const uint8_t* src_argb4444, uint8_t* dst_y, int width); - -void ARGBToYRow_C(const uint8_t* src_argb, uint8_t* dst_y, int width); -void ARGBToYJRow_C(const uint8_t* src_argb, uint8_t* dst_y, int width); -void RGBAToYJRow_C(const uint8_t* src_argb0, uint8_t* dst_y, int width); -void BGRAToYRow_C(const uint8_t* src_argb, uint8_t* dst_y, int width); -void ABGRToYRow_C(const uint8_t* src_argb, uint8_t* dst_y, int width); -void RGBAToYRow_C(const uint8_t* src_argb, uint8_t* dst_y, int width); -void RGB24ToYRow_C(const uint8_t* src_argb, uint8_t* dst_y, int width); -void RGB24ToYJRow_C(const uint8_t* src_argb, uint8_t* dst_yj, int width); -void RAWToYRow_C(const uint8_t* src_argb, uint8_t* dst_y, int width); -void RAWToYJRow_C(const uint8_t* src_argb, uint8_t* dst_yj, int width); + +void BGRAToYRow_LSX(const uint8_t* src_bgra, uint8_t* dst_y, int width); +void ABGRToYRow_LSX(const uint8_t* src_abgr, uint8_t* dst_y, int width); +void RGBAToYRow_LSX(const uint8_t* src_rgba, uint8_t* dst_y, int width); +void ARGB1555ToYRow_LSX(const uint8_t* src_argb1555, uint8_t* dst_y, int width); +void RGB24ToYJRow_LSX(const uint8_t* src_rgb24, uint8_t* dst_yj, int width); +void ABGRToYRow_LASX(const uint8_t* src_abgr, uint8_t* dst_y, int width); +void ARGB1555ToYRow_LASX(const uint8_t* src_argb1555, + uint8_t* dst_y, + int width); +void RGB565ToYRow_LSX(const uint8_t* src_rgb565, uint8_t* dst_y, int width); +void RGB565ToYRow_LASX(const uint8_t* src_rgb565, uint8_t* dst_y, int width); +void RGB24ToYRow_LSX(const uint8_t* src_rgb24, uint8_t* dst_y, int width); +void RGB24ToYRow_LASX(const uint8_t* src_rgb24, uint8_t* dst_y, int width); +void RAWToYRow_LSX(const uint8_t* src_raw, uint8_t* dst_y, int width); +void RAWToYRow_LASX(const uint8_t* src_raw, uint8_t* dst_y, int width); +void RGBAToYRow_LASX(const uint8_t* src_rgba, uint8_t* dst_y, int width); +void BGRAToYRow_LASX(const uint8_t* src_bgra, uint8_t* dst_y, int width); +void RGB24ToYJRow_LASX(const uint8_t* src_rgb24, uint8_t* dst_yj, int width); +void RAWToYJRow_LSX(const uint8_t* src_raw, uint8_t* dst_yj, int width); +void RAWToYJRow_LASX(const uint8_t* src_raw, uint8_t* dst_yj, int width); + +void ARGBToYRow_C(const uint8_t* src_rgb, uint8_t* dst_y, int width); +void ARGBToYJRow_C(const uint8_t* src_rgb, uint8_t* dst_y, int width); +void ABGRToYJRow_C(const uint8_t* src_rgb, uint8_t* dst_y, int width); +void RGBAToYJRow_C(const uint8_t* src_rgb, uint8_t* dst_y, int width); +void BGRAToYRow_C(const uint8_t* src_rgb, uint8_t* dst_y, int width); +void ABGRToYRow_C(const uint8_t* src_rgb, uint8_t* dst_y, int width); +void RGBAToYRow_C(const uint8_t* src_rgb, uint8_t* dst_y, int width); +void RGB24ToYRow_C(const uint8_t* src_rgb, uint8_t* dst_y, int width); +void RGB24ToYJRow_C(const uint8_t* src_rgb, uint8_t* dst_y, int width); +void RAWToYRow_C(const uint8_t* src_rgb, uint8_t* dst_y, int width); +void RAWToYJRow_C(const uint8_t* src_rgb, uint8_t* dst_y, int width); void RGB565ToYRow_C(const uint8_t* src_rgb565, uint8_t* dst_y, int width); void ARGB1555ToYRow_C(const uint8_t* src_argb1555, uint8_t* dst_y, int width); void ARGB4444ToYRow_C(const uint8_t* src_argb4444, uint8_t* dst_y, int width); void ARGBToYRow_Any_SSSE3(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void ARGBToYJRow_Any_SSSE3(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void ABGRToYJRow_Any_SSSE3(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void RGBAToYJRow_Any_SSSE3(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void BGRAToYRow_Any_SSSE3(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void ABGRToYRow_Any_SSSE3(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); @@ -1204,6 +1492,7 @@ void RGB24ToYJRow_Any_AVX2(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void RAWToYJRow_Any_AVX2(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void ARGBToYRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void ARGBToYJRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void ABGRToYJRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void RGBAToYJRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void BGRAToYRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void ABGRToYRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); @@ -1230,21 +1519,39 @@ void RGB565ToYRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void ARGB1555ToYRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void BGRAToYRow_Any_MMI(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void ABGRToYRow_Any_MMI(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void RGBAToYRow_Any_MMI(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void ARGBToYJRow_Any_MMI(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void ARGBToYRow_Any_MMI(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void RGB24ToYRow_Any_MMI(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void RAWToYRow_Any_MMI(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void RGB565ToYRow_Any_MMI(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void ARGB1555ToYRow_Any_MMI(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGB4444ToYRow_Any_MMI(const uint8_t* src_ptr, + +void BGRAToYRow_Any_LSX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void ABGRToYRow_Any_LSX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void RGBAToYRow_Any_LSX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void ARGBToYRow_Any_LSX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void ARGBToYJRow_Any_LSX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void RGB24ToYRow_Any_LSX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void RGB565ToYRow_Any_LSX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void ABGRToYJRow_Any_LSX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void RAWToYRow_Any_LSX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void RGBAToYJRow_Any_LSX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void RGB24ToYJRow_Any_LSX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void RAWToYJRow_Any_LSX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void ARGB1555ToYRow_Any_LSX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void RGB565ToYRow_Any_LASX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void RGB24ToYRow_Any_LASX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void ARGBToYJRow_Any_LASX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void ARGBToYRow_Any_LASX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void ABGRToYRow_Any_LASX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void ABGRToYJRow_Any_LASX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void RAWToYRow_Any_LASX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void RGBAToYRow_Any_LASX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void RGBAToYJRow_Any_LASX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void BGRAToYRow_Any_LASX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void RGB24ToYJRow_Any_LASX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void RAWToYJRow_Any_LASX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void ARGB1555ToYRow_Any_LASX(const uint8_t* src_ptr, + uint8_t* dst_ptr, + int width); + void ARGBToUVRow_AVX2(const uint8_t* src_argb, int src_stride_argb, uint8_t* dst_u, @@ -1260,6 +1567,11 @@ void ARGBToUVJRow_AVX2(const uint8_t* src_argb, uint8_t* dst_u, uint8_t* dst_v, int width); +void ABGRToUVJRow_AVX2(const uint8_t* src_abgr, + int src_stride_abgr, + uint8_t* dst_u, + uint8_t* dst_v, + int width); void ARGBToUVRow_SSSE3(const uint8_t* src_argb, int src_stride_argb, uint8_t* dst_u, @@ -1270,6 +1582,11 @@ void ARGBToUVJRow_SSSE3(const uint8_t* src_argb, uint8_t* dst_u, uint8_t* dst_v, int width); +void ABGRToUVJRow_SSSE3(const uint8_t* src_abgr, + int src_stride_abgr, + uint8_t* dst_u, + uint8_t* dst_v, + int width); void BGRAToUVRow_SSSE3(const uint8_t* src_bgra, int src_stride_bgra, uint8_t* dst_u, @@ -1286,42 +1603,52 @@ void RGBAToUVRow_SSSE3(const uint8_t* src_rgba, uint8_t* dst_v, int width); void ARGBToUVRow_Any_AVX2(const uint8_t* src_ptr, - int src_stride_ptr, + int src_stride, uint8_t* dst_u, uint8_t* dst_v, int width); void ABGRToUVRow_Any_AVX2(const uint8_t* src_ptr, - int src_stride_ptr, + int src_stride, uint8_t* dst_u, uint8_t* dst_v, int width); void ARGBToUVJRow_Any_AVX2(const uint8_t* src_ptr, - int src_stride_ptr, + int src_stride, + uint8_t* dst_u, + uint8_t* dst_v, + int width); +void ABGRToUVJRow_Any_AVX2(const uint8_t* src_ptr, + int src_stride, uint8_t* dst_u, uint8_t* dst_v, int width); void ARGBToUVRow_Any_SSSE3(const uint8_t* src_ptr, - int src_stride_ptr, + int src_stride, uint8_t* dst_u, uint8_t* dst_v, int width); void ARGBToUVJRow_Any_SSSE3(const uint8_t* src_ptr, - int src_stride_ptr, + int src_stride, + uint8_t* dst_u, + uint8_t* dst_v, + int width); +void ABGRToUVJRow_Any_SSSE3(const uint8_t* src_ptr, + int src_stride, uint8_t* dst_u, uint8_t* dst_v, int width); void BGRAToUVRow_Any_SSSE3(const uint8_t* src_ptr, - int src_stride_ptr, + int src_stride, uint8_t* dst_u, uint8_t* dst_v, int width); void ABGRToUVRow_Any_SSSE3(const uint8_t* src_ptr, - int src_stride_ptr, + int src_stride, uint8_t* dst_u, uint8_t* dst_v, int width); void RGBAToUVRow_Any_SSSE3(const uint8_t* src_ptr, - int src_stride_ptr, + int src_stride, uint8_t* dst_u, uint8_t* dst_v, int width); @@ -1330,7 +1657,7 @@ void ARGBToUV444Row_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_v, int width); void ARGBToUVRow_Any_NEON(const uint8_t* src_ptr, - int src_stride_ptr, + int src_stride, uint8_t* dst_u, uint8_t* dst_v, int width); @@ -1343,57 +1670,72 @@ void ARGBToUVRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_u, uint8_t* dst_v, int width); -void ARGBToUV444Row_Any_MMI(const uint8_t* src_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGBToUVRow_Any_MMI(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); +void ARGBToUVRow_Any_LASX(const uint8_t* src_ptr, + int src_stride_ptr, + uint8_t* dst_u, + uint8_t* dst_v, + int width); +void ARGBToUV444Row_Any_LASX(const uint8_t* src_ptr, + uint8_t* dst_u, + uint8_t* dst_v, + int width); void ARGBToUVJRow_Any_NEON(const uint8_t* src_ptr, - int src_stride_ptr, + int src_stride, + uint8_t* dst_u, + uint8_t* dst_v, + int width); +void ABGRToUVJRow_Any_NEON(const uint8_t* src_ptr, + int src_stride, uint8_t* dst_u, uint8_t* dst_v, int width); void BGRAToUVRow_Any_NEON(const uint8_t* src_ptr, - int src_stride_ptr, + int src_stride, uint8_t* dst_u, uint8_t* dst_v, int width); void ABGRToUVRow_Any_NEON(const uint8_t* src_ptr, - int src_stride_ptr, + int src_stride, uint8_t* dst_u, uint8_t* dst_v, int width); void RGBAToUVRow_Any_NEON(const uint8_t* src_ptr, - int src_stride_ptr, + int src_stride, uint8_t* dst_u, uint8_t* dst_v, int width); void RGB24ToUVRow_Any_NEON(const uint8_t* src_ptr, - int src_stride_ptr, + int src_stride, uint8_t* dst_u, uint8_t* dst_v, int width); void RAWToUVRow_Any_NEON(const uint8_t* src_ptr, - int src_stride_ptr, + int src_stride, uint8_t* dst_u, uint8_t* dst_v, int width); +void RGB24ToUVJRow_Any_NEON(const uint8_t* src_ptr, + int src_stride, + uint8_t* dst_u, + uint8_t* dst_v, + int width); +void RAWToUVJRow_Any_NEON(const uint8_t* src_ptr, + int src_stride, + uint8_t* dst_u, + uint8_t* dst_v, + int width); void RGB565ToUVRow_Any_NEON(const uint8_t* src_ptr, - int src_stride_ptr, + int src_stride, uint8_t* dst_u, uint8_t* dst_v, int width); void ARGB1555ToUVRow_Any_NEON(const uint8_t* src_ptr, - int src_stride_ptr, + int src_stride, uint8_t* dst_u, uint8_t* dst_v, int width); void ARGB4444ToUVRow_Any_NEON(const uint8_t* src_ptr, - int src_stride_ptr, + int src_stride, uint8_t* dst_u, uint8_t* dst_v, int width); @@ -1437,51 +1779,71 @@ void ARGB1555ToUVRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_u, uint8_t* dst_v, int width); -void ARGBToUVJRow_Any_MMI(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void BGRAToUVRow_Any_MMI(const uint8_t* src_ptr, +void ABGRToUVRow_Any_LSX(const uint8_t* src_ptr, int src_stride_ptr, uint8_t* dst_u, uint8_t* dst_v, int width); -void ABGRToUVRow_Any_MMI(const uint8_t* src_ptr, +void BGRAToUVRow_Any_LSX(const uint8_t* src_ptr, int src_stride_ptr, uint8_t* dst_u, uint8_t* dst_v, int width); -void RGBAToUVRow_Any_MMI(const uint8_t* src_ptr, +void RGBAToUVRow_Any_LSX(const uint8_t* src_ptr, int src_stride_ptr, uint8_t* dst_u, uint8_t* dst_v, int width); -void RGB24ToUVRow_Any_MMI(const uint8_t* src_ptr, +void ARGBToUVJRow_Any_LSX(const uint8_t* src_ptr, int src_stride_ptr, uint8_t* dst_u, uint8_t* dst_v, int width); -void RAWToUVRow_Any_MMI(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void RGB565ToUVRow_Any_MMI(const uint8_t* src_ptr, +void ARGBToUVJRow_Any_LASX(const uint8_t* src_ptr, int src_stride_ptr, uint8_t* dst_u, uint8_t* dst_v, int width); -void ARGB1555ToUVRow_Any_MMI(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); -void ARGB4444ToUVRow_Any_MMI(const uint8_t* src_ptr, +void ARGB1555ToUVRow_Any_LSX(const uint8_t* src_ptr, int src_stride_ptr, uint8_t* dst_u, uint8_t* dst_v, int width); +void ARGB1555ToUVRow_Any_LASX(const uint8_t* src_ptr, + int src_stride_ptr, + uint8_t* dst_u, + uint8_t* dst_v, + int width); +void RGB565ToUVRow_Any_LSX(const uint8_t* src_ptr, + int src_stride_ptr, + uint8_t* dst_u, + uint8_t* dst_v, + int width); +void RGB565ToUVRow_Any_LASX(const uint8_t* src_ptr, + int src_stride_ptr, + uint8_t* dst_u, + uint8_t* dst_v, + int width); +void RGB24ToUVRow_Any_LSX(const uint8_t* src_ptr, + int src_stride_ptr, + uint8_t* dst_u, + uint8_t* dst_v, + int width); +void RGB24ToUVRow_Any_LASX(const uint8_t* src_ptr, + int src_stride_ptr, + uint8_t* dst_u, + uint8_t* dst_v, + int width); +void RAWToUVRow_Any_LSX(const uint8_t* src_ptr, + int src_stride_ptr, + uint8_t* dst_u, + uint8_t* dst_v, + int width); +void RAWToUVRow_Any_LASX(const uint8_t* src_ptr, + int src_stride_ptr, + uint8_t* dst_u, + uint8_t* dst_v, + int width); void ARGBToUVRow_C(const uint8_t* src_rgb, int src_stride_rgb, uint8_t* dst_u, @@ -1492,16 +1854,16 @@ void ARGBToUVJRow_C(const uint8_t* src_rgb, uint8_t* dst_u, uint8_t* dst_v, int width); +void ABGRToUVJRow_C(const uint8_t* src_rgb, + int src_stride_rgb, + uint8_t* dst_u, + uint8_t* dst_v, + int width); void ARGBToUVRow_C(const uint8_t* src_rgb, int src_stride_rgb, uint8_t* dst_u, uint8_t* dst_v, int width); -void ARGBToUVJRow_C(const uint8_t* src_rgb, - int src_stride_rgb, - uint8_t* dst_u, - uint8_t* dst_v, - int width); void BGRAToUVRow_C(const uint8_t* src_rgb, int src_stride_rgb, uint8_t* dst_u, @@ -1517,6 +1879,11 @@ void RGBAToUVRow_C(const uint8_t* src_rgb, uint8_t* dst_u, uint8_t* dst_v, int width); +void RGBAToUVJRow_C(const uint8_t* src_rgb, + int src_stride_rgb, + uint8_t* dst_u, + uint8_t* dst_v, + int width); void RGB24ToUVRow_C(const uint8_t* src_rgb, int src_stride_rgb, uint8_t* dst_u, @@ -1527,6 +1894,16 @@ void RAWToUVRow_C(const uint8_t* src_rgb, uint8_t* dst_u, uint8_t* dst_v, int width); +void RGB24ToUVJRow_C(const uint8_t* src_rgb, + int src_stride_rgb, + uint8_t* dst_u, + uint8_t* dst_v, + int width); +void RAWToUVJRow_C(const uint8_t* src_rgb, + int src_stride_rgb, + uint8_t* dst_u, + uint8_t* dst_v, + int width); void RGB565ToUVRow_C(const uint8_t* src_rgb565, int src_stride_rgb565, uint8_t* dst_u, @@ -1561,23 +1938,25 @@ void MirrorRow_AVX2(const uint8_t* src, uint8_t* dst, int width); void MirrorRow_SSSE3(const uint8_t* src, uint8_t* dst, int width); void MirrorRow_NEON(const uint8_t* src, uint8_t* dst, int width); void MirrorRow_MSA(const uint8_t* src, uint8_t* dst, int width); -void MirrorRow_MMI(const uint8_t* src, uint8_t* dst, int width); +void MirrorRow_LASX(const uint8_t* src, uint8_t* dst, int width); void MirrorRow_C(const uint8_t* src, uint8_t* dst, int width); void MirrorRow_Any_AVX2(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void MirrorRow_Any_SSSE3(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void MirrorRow_Any_SSE2(const uint8_t* src, uint8_t* dst, int width); void MirrorRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void MirrorRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void MirrorRow_Any_MMI(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void MirrorRow_Any_LASX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void MirrorUVRow_AVX2(const uint8_t* src_uv, uint8_t* dst_uv, int width); void MirrorUVRow_SSSE3(const uint8_t* src_uv, uint8_t* dst_uv, int width); void MirrorUVRow_NEON(const uint8_t* src_uv, uint8_t* dst_uv, int width); void MirrorUVRow_MSA(const uint8_t* src_uv, uint8_t* dst_uv, int width); +void MirrorUVRow_LASX(const uint8_t* src_uv, uint8_t* dst_uv, int width); void MirrorUVRow_C(const uint8_t* src_uv, uint8_t* dst_uv, int width); void MirrorUVRow_Any_AVX2(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void MirrorUVRow_Any_SSSE3(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void MirrorUVRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void MirrorUVRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void MirrorUVRow_Any_LASX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void MirrorSplitUVRow_SSSE3(const uint8_t* src, uint8_t* dst_u, @@ -1591,7 +1970,7 @@ void MirrorSplitUVRow_MSA(const uint8_t* src_uv, uint8_t* dst_u, uint8_t* dst_v, int width); -void MirrorSplitUVRow_MMI(const uint8_t* src_uv, +void MirrorSplitUVRow_LSX(const uint8_t* src_uv, uint8_t* dst_u, uint8_t* dst_v, int width); @@ -1600,11 +1979,13 @@ void MirrorSplitUVRow_C(const uint8_t* src_uv, uint8_t* dst_v, int width); +void MirrorRow_16_C(const uint16_t* src, uint16_t* dst, int width); + void ARGBMirrorRow_AVX2(const uint8_t* src, uint8_t* dst, int width); void ARGBMirrorRow_SSE2(const uint8_t* src, uint8_t* dst, int width); -void ARGBMirrorRow_NEON(const uint8_t* src, uint8_t* dst, int width); +void ARGBMirrorRow_NEON(const uint8_t* src_argb, uint8_t* dst_argb, int width); void ARGBMirrorRow_MSA(const uint8_t* src, uint8_t* dst, int width); -void ARGBMirrorRow_MMI(const uint8_t* src, uint8_t* dst, int width); +void ARGBMirrorRow_LASX(const uint8_t* src, uint8_t* dst, int width); void ARGBMirrorRow_C(const uint8_t* src, uint8_t* dst, int width); void ARGBMirrorRow_Any_AVX2(const uint8_t* src_ptr, uint8_t* dst_ptr, @@ -1616,11 +1997,17 @@ void ARGBMirrorRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void ARGBMirrorRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void ARGBMirrorRow_Any_MMI(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void ARGBMirrorRow_Any_LASX(const uint8_t* src_ptr, + uint8_t* dst_ptr, + int width); -void RGB24MirrorRow_SSSE3(const uint8_t* src, uint8_t* dst, int width); -void RGB24MirrorRow_NEON(const uint8_t* src, uint8_t* dst, int width); -void RGB24MirrorRow_C(const uint8_t* src, uint8_t* dst, int width); +void RGB24MirrorRow_SSSE3(const uint8_t* src_rgb24, + uint8_t* dst_rgb24, + int width); +void RGB24MirrorRow_NEON(const uint8_t* src_rgb24, + uint8_t* dst_rgb24, + int width); +void RGB24MirrorRow_C(const uint8_t* src_rgb24, uint8_t* dst_rgb24, int width); void RGB24MirrorRow_Any_SSSE3(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); @@ -1648,7 +2035,7 @@ void SplitUVRow_MSA(const uint8_t* src_uv, uint8_t* dst_u, uint8_t* dst_v, int width); -void SplitUVRow_MMI(const uint8_t* src_uv, +void SplitUVRow_LSX(const uint8_t* src_uv, uint8_t* dst_u, uint8_t* dst_v, int width); @@ -1668,11 +2055,123 @@ void SplitUVRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_u, uint8_t* dst_v, int width); -void SplitUVRow_Any_MMI(const uint8_t* src_ptr, +void SplitUVRow_Any_LSX(const uint8_t* src_ptr, uint8_t* dst_u, uint8_t* dst_v, int width); - +void DetileRow_C(const uint8_t* src, + ptrdiff_t src_tile_stride, + uint8_t* dst, + int width); +void DetileRow_NEON(const uint8_t* src, + ptrdiff_t src_tile_stride, + uint8_t* dst, + int width); +void DetileRow_Any_NEON(const uint8_t* src, + ptrdiff_t src_tile_stride, + uint8_t* dst, + int width); +void DetileRow_SSE2(const uint8_t* src, + ptrdiff_t src_tile_stride, + uint8_t* dst, + int width); +void DetileRow_Any_SSE2(const uint8_t* src, + ptrdiff_t src_tile_stride, + uint8_t* dst, + int width); +void DetileRow_AVX(const uint8_t* src, + ptrdiff_t src_tile_stride, + uint8_t* dst, + int width); +void DetileRow_Any_AVX(const uint8_t* src, + ptrdiff_t src_tile_stride, + uint8_t* dst, + int width); +void DetileRow_16_C(const uint16_t* src, + ptrdiff_t src_tile_stride, + uint16_t* dst, + int width); +void DetileRow_16_NEON(const uint16_t* src, + ptrdiff_t src_tile_stride, + uint16_t* dst, + int width); +void DetileRow_16_Any_NEON(const uint16_t* src, + ptrdiff_t src_tile_stride, + uint16_t* dst, + int width); +void DetileRow_16_SSE2(const uint16_t* src, + ptrdiff_t src_tile_stride, + uint16_t* dst, + int width); +void DetileRow_16_Any_SSE2(const uint16_t* src, + ptrdiff_t src_tile_stride, + uint16_t* dst, + int width); +void DetileRow_16_AVX(const uint16_t* src, + ptrdiff_t src_tile_stride, + uint16_t* dst, + int width); +void DetileRow_16_Any_AVX(const uint16_t* src, + ptrdiff_t src_tile_stride, + uint16_t* dst, + int width); +void DetileSplitUVRow_C(const uint8_t* src_uv, + ptrdiff_t src_tile_stride, + uint8_t* dst_u, + uint8_t* dst_v, + int width); +void DetileSplitUVRow_SSSE3(const uint8_t* src_uv, + ptrdiff_t src_tile_stride, + uint8_t* dst_u, + uint8_t* dst_v, + int width); +void DetileSplitUVRow_Any_SSSE3(const uint8_t* src_uv, + ptrdiff_t src_tile_stride, + uint8_t* dst_u, + uint8_t* dst_v, + int width); +void DetileSplitUVRow_NEON(const uint8_t* src_uv, + ptrdiff_t src_tile_stride, + uint8_t* dst_u, + uint8_t* dst_v, + int width); +void DetileSplitUVRow_Any_NEON(const uint8_t* src_uv, + ptrdiff_t src_tile_stride, + uint8_t* dst_u, + uint8_t* dst_v, + int width); +void DetileToYUY2_C(const uint8_t* src_y, + ptrdiff_t src_y_tile_stride, + const uint8_t* src_uv, + ptrdiff_t src_uv_tile_stride, + uint8_t* dst_yuy2, + int width); +void DetileToYUY2_SSE2(const uint8_t* src_y, + ptrdiff_t src_y_tile_stride, + const uint8_t* src_uv, + ptrdiff_t src_uv_tile_stride, + uint8_t* dst_yuy2, + int width); +void DetileToYUY2_Any_SSE2(const uint8_t* src_y, + ptrdiff_t src_y_tile_stride, + const uint8_t* src_uv, + ptrdiff_t src_uv_tile_stride, + uint8_t* dst_yuy2, + int width); +void DetileToYUY2_NEON(const uint8_t* src_y, + ptrdiff_t src_y_tile_stride, + const uint8_t* src_uv, + ptrdiff_t src_uv_tile_stride, + uint8_t* dst_yuy2, + int width); +void DetileToYUY2_Any_NEON(const uint8_t* src_y, + ptrdiff_t src_y_tile_stride, + const uint8_t* src_uv, + ptrdiff_t src_uv_tile_stride, + uint8_t* dst_yuy2, + int width); +void UnpackMT2T_C(const uint8_t* src, uint16_t* dst, size_t size); +void UnpackMT2T_NEON(const uint8_t* src, uint16_t* dst, size_t size); void MergeUVRow_C(const uint8_t* src_u, const uint8_t* src_v, uint8_t* dst_uv, @@ -1693,7 +2192,7 @@ void MergeUVRow_MSA(const uint8_t* src_u, const uint8_t* src_v, uint8_t* dst_uv, int width); -void MergeUVRow_MMI(const uint8_t* src_u, +void MergeUVRow_LSX(const uint8_t* src_u, const uint8_t* src_v, uint8_t* dst_uv, int width); @@ -1713,7 +2212,7 @@ void MergeUVRow_Any_MSA(const uint8_t* y_buf, const uint8_t* uv_buf, uint8_t* dst_ptr, int width); -void MergeUVRow_Any_MMI(const uint8_t* y_buf, +void MergeUVRow_Any_LSX(const uint8_t* y_buf, const uint8_t* uv_buf, uint8_t* dst_ptr, int width); @@ -1761,11 +2260,6 @@ void SplitRGBRow_NEON(const uint8_t* src_rgb, uint8_t* dst_g, uint8_t* dst_b, int width); -void SplitRGBRow_MMI(const uint8_t* src_rgb, - uint8_t* dst_r, - uint8_t* dst_g, - uint8_t* dst_b, - int width); void SplitRGBRow_Any_SSSE3(const uint8_t* src_ptr, uint8_t* dst_r, uint8_t* dst_g, @@ -1776,11 +2270,6 @@ void SplitRGBRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_g, uint8_t* dst_b, int width); -void SplitRGBRow_Any_MMI(const uint8_t* src_ptr, - uint8_t* dst_r, - uint8_t* dst_g, - uint8_t* dst_b, - int width); void MergeRGBRow_C(const uint8_t* src_r, const uint8_t* src_g, @@ -1797,11 +2286,6 @@ void MergeRGBRow_NEON(const uint8_t* src_r, const uint8_t* src_b, uint8_t* dst_rgb, int width); -void MergeRGBRow_MMI(const uint8_t* src_r, - const uint8_t* src_g, - const uint8_t* src_b, - uint8_t* dst_rgb, - int width); void MergeRGBRow_Any_SSSE3(const uint8_t* y_buf, const uint8_t* u_buf, const uint8_t* v_buf, @@ -1812,31 +2296,449 @@ void MergeRGBRow_Any_NEON(const uint8_t* src_r, const uint8_t* src_b, uint8_t* dst_rgb, int width); -void MergeRGBRow_Any_MMI(const uint8_t* src_r, - const uint8_t* src_g, - const uint8_t* src_b, - uint8_t* dst_rgb, +void MergeARGBRow_C(const uint8_t* src_r, + const uint8_t* src_g, + const uint8_t* src_b, + const uint8_t* src_a, + uint8_t* dst_argb, + int width); +void MergeARGBRow_SSE2(const uint8_t* src_r, + const uint8_t* src_g, + const uint8_t* src_b, + const uint8_t* src_a, + uint8_t* dst_argb, + int width); +void MergeARGBRow_AVX2(const uint8_t* src_r, + const uint8_t* src_g, + const uint8_t* src_b, + const uint8_t* src_a, + uint8_t* dst_argb, + int width); +void MergeARGBRow_NEON(const uint8_t* src_r, + const uint8_t* src_g, + const uint8_t* src_b, + const uint8_t* src_a, + uint8_t* dst_argb, + int width); +void MergeARGBRow_Any_SSE2(const uint8_t* y_buf, + const uint8_t* u_buf, + const uint8_t* v_buf, + const uint8_t* a_buf, + uint8_t* dst_ptr, + int width); +void MergeARGBRow_Any_AVX2(const uint8_t* y_buf, + const uint8_t* u_buf, + const uint8_t* v_buf, + const uint8_t* a_buf, + uint8_t* dst_ptr, + int width); +void MergeARGBRow_Any_NEON(const uint8_t* y_buf, + const uint8_t* u_buf, + const uint8_t* v_buf, + const uint8_t* a_buf, + uint8_t* dst_ptr, + int width); +void SplitARGBRow_C(const uint8_t* src_argb, + uint8_t* dst_r, + uint8_t* dst_g, + uint8_t* dst_b, + uint8_t* dst_a, + int width); +void SplitARGBRow_SSE2(const uint8_t* src_argb, + uint8_t* dst_r, + uint8_t* dst_g, + uint8_t* dst_b, + uint8_t* dst_a, + int width); +void SplitARGBRow_SSSE3(const uint8_t* src_argb, + uint8_t* dst_r, + uint8_t* dst_g, + uint8_t* dst_b, + uint8_t* dst_a, + int width); +void SplitARGBRow_AVX2(const uint8_t* src_argb, + uint8_t* dst_r, + uint8_t* dst_g, + uint8_t* dst_b, + uint8_t* dst_a, + int width); +void SplitARGBRow_NEON(const uint8_t* src_rgba, + uint8_t* dst_r, + uint8_t* dst_g, + uint8_t* dst_b, + uint8_t* dst_a, + int width); +void SplitARGBRow_Any_SSE2(const uint8_t* src_ptr, + uint8_t* dst_r, + uint8_t* dst_g, + uint8_t* dst_b, + uint8_t* dst_a, + int width); +void SplitARGBRow_Any_SSSE3(const uint8_t* src_ptr, + uint8_t* dst_r, + uint8_t* dst_g, + uint8_t* dst_b, + uint8_t* dst_a, + int width); +void SplitARGBRow_Any_AVX2(const uint8_t* src_ptr, + uint8_t* dst_r, + uint8_t* dst_g, + uint8_t* dst_b, + uint8_t* dst_a, + int width); +void SplitARGBRow_Any_NEON(const uint8_t* src_ptr, + uint8_t* dst_r, + uint8_t* dst_g, + uint8_t* dst_b, + uint8_t* dst_a, + int width); +void MergeXRGBRow_C(const uint8_t* src_r, + const uint8_t* src_g, + const uint8_t* src_b, + uint8_t* dst_argb, + int width); +void MergeXRGBRow_SSE2(const uint8_t* src_r, + const uint8_t* src_g, + const uint8_t* src_b, + uint8_t* dst_argb, + int width); +void MergeXRGBRow_AVX2(const uint8_t* src_r, + const uint8_t* src_g, + const uint8_t* src_b, + uint8_t* dst_argb, + int width); +void MergeXRGBRow_NEON(const uint8_t* src_r, + const uint8_t* src_g, + const uint8_t* src_b, + uint8_t* dst_argb, + int width); +void MergeXRGBRow_Any_SSE2(const uint8_t* y_buf, + const uint8_t* u_buf, + const uint8_t* v_buf, + uint8_t* dst_ptr, + int width); +void MergeXRGBRow_Any_AVX2(const uint8_t* y_buf, + const uint8_t* u_buf, + const uint8_t* v_buf, + uint8_t* dst_ptr, + int width); +void MergeXRGBRow_Any_NEON(const uint8_t* y_buf, + const uint8_t* u_buf, + const uint8_t* v_buf, + uint8_t* dst_ptr, + int width); +void SplitXRGBRow_C(const uint8_t* src_argb, + uint8_t* dst_r, + uint8_t* dst_g, + uint8_t* dst_b, + int width); +void SplitXRGBRow_SSE2(const uint8_t* src_argb, + uint8_t* dst_r, + uint8_t* dst_g, + uint8_t* dst_b, + int width); +void SplitXRGBRow_SSSE3(const uint8_t* src_argb, + uint8_t* dst_r, + uint8_t* dst_g, + uint8_t* dst_b, + int width); +void SplitXRGBRow_AVX2(const uint8_t* src_argb, + uint8_t* dst_r, + uint8_t* dst_g, + uint8_t* dst_b, + int width); +void SplitXRGBRow_NEON(const uint8_t* src_rgba, + uint8_t* dst_r, + uint8_t* dst_g, + uint8_t* dst_b, + int width); +void SplitXRGBRow_Any_SSE2(const uint8_t* src_ptr, + uint8_t* dst_r, + uint8_t* dst_g, + uint8_t* dst_b, + int width); +void SplitXRGBRow_Any_SSSE3(const uint8_t* src_ptr, + uint8_t* dst_r, + uint8_t* dst_g, + uint8_t* dst_b, + int width); +void SplitXRGBRow_Any_AVX2(const uint8_t* src_ptr, + uint8_t* dst_r, + uint8_t* dst_g, + uint8_t* dst_b, + int width); +void SplitXRGBRow_Any_NEON(const uint8_t* src_ptr, + uint8_t* dst_r, + uint8_t* dst_g, + uint8_t* dst_b, + int width); + +void MergeXR30Row_C(const uint16_t* src_r, + const uint16_t* src_g, + const uint16_t* src_b, + uint8_t* dst_ar30, + int depth, + int width); +void MergeAR64Row_C(const uint16_t* src_r, + const uint16_t* src_g, + const uint16_t* src_b, + const uint16_t* src_a, + uint16_t* dst_ar64, + int depth, + int width); +void MergeARGB16To8Row_C(const uint16_t* src_r, + const uint16_t* src_g, + const uint16_t* src_b, + const uint16_t* src_a, + uint8_t* dst_argb, + int depth, + int width); +void MergeXR64Row_C(const uint16_t* src_r, + const uint16_t* src_g, + const uint16_t* src_b, + uint16_t* dst_ar64, + int depth, + int width); +void MergeXRGB16To8Row_C(const uint16_t* src_r, + const uint16_t* src_g, + const uint16_t* src_b, + uint8_t* dst_argb, + int depth, int width); +void MergeXR30Row_AVX2(const uint16_t* src_r, + const uint16_t* src_g, + const uint16_t* src_b, + uint8_t* dst_ar30, + int depth, + int width); +void MergeAR64Row_AVX2(const uint16_t* src_r, + const uint16_t* src_g, + const uint16_t* src_b, + const uint16_t* src_a, + uint16_t* dst_ar64, + int depth, + int width); +void MergeARGB16To8Row_AVX2(const uint16_t* src_r, + const uint16_t* src_g, + const uint16_t* src_b, + const uint16_t* src_a, + uint8_t* dst_argb, + int depth, + int width); +void MergeXR64Row_AVX2(const uint16_t* src_r, + const uint16_t* src_g, + const uint16_t* src_b, + uint16_t* dst_ar64, + int depth, + int width); +void MergeXRGB16To8Row_AVX2(const uint16_t* src_r, + const uint16_t* src_g, + const uint16_t* src_b, + uint8_t* dst_argb, + int depth, + int width); +void MergeXR30Row_NEON(const uint16_t* src_r, + const uint16_t* src_g, + const uint16_t* src_b, + uint8_t* dst_ar30, + int depth, + int width); +void MergeXR30Row_10_NEON(const uint16_t* src_r, + const uint16_t* src_g, + const uint16_t* src_b, + uint8_t* dst_ar30, + int /* depth */, + int width); +void MergeAR64Row_NEON(const uint16_t* src_r, + const uint16_t* src_g, + const uint16_t* src_b, + const uint16_t* src_a, + uint16_t* dst_ar64, + int depth, + int width); +void MergeARGB16To8Row_NEON(const uint16_t* src_r, + const uint16_t* src_g, + const uint16_t* src_b, + const uint16_t* src_a, + uint8_t* dst_argb, + int depth, + int width); +void MergeXR64Row_NEON(const uint16_t* src_r, + const uint16_t* src_g, + const uint16_t* src_b, + uint16_t* dst_ar64, + int depth, + int width); +void MergeXRGB16To8Row_NEON(const uint16_t* src_r, + const uint16_t* src_g, + const uint16_t* src_b, + uint8_t* dst_argb, + int depth, + int width); +void MergeXR30Row_Any_AVX2(const uint16_t* r_buf, + const uint16_t* g_buf, + const uint16_t* b_buf, + uint8_t* dst_ptr, + int depth, + int width); +void MergeAR64Row_Any_AVX2(const uint16_t* r_buf, + const uint16_t* g_buf, + const uint16_t* b_buf, + const uint16_t* a_buf, + uint16_t* dst_ptr, + int depth, + int width); +void MergeXR64Row_Any_AVX2(const uint16_t* r_buf, + const uint16_t* g_buf, + const uint16_t* b_buf, + uint16_t* dst_ptr, + int depth, + int width); +void MergeARGB16To8Row_Any_AVX2(const uint16_t* r_buf, + const uint16_t* g_buf, + const uint16_t* b_buf, + const uint16_t* a_buf, + uint8_t* dst_ptr, + int depth, + int width); +void MergeXRGB16To8Row_Any_AVX2(const uint16_t* r_buf, + const uint16_t* g_buf, + const uint16_t* b_buf, + uint8_t* dst_ptr, + int depth, + int width); +void MergeXR30Row_Any_NEON(const uint16_t* r_buf, + const uint16_t* g_buf, + const uint16_t* b_buf, + uint8_t* dst_ptr, + int depth, + int width); +void MergeXR30Row_10_Any_NEON(const uint16_t* r_buf, + const uint16_t* g_buf, + const uint16_t* b_buf, + uint8_t* dst_ptr, + int depth, + int width); +void MergeAR64Row_Any_NEON(const uint16_t* r_buf, + const uint16_t* g_buf, + const uint16_t* b_buf, + const uint16_t* a_buf, + uint16_t* dst_ptr, + int depth, + int width); +void MergeARGB16To8Row_Any_NEON(const uint16_t* r_buf, + const uint16_t* g_buf, + const uint16_t* b_buf, + const uint16_t* a_buf, + uint8_t* dst_ptr, + int depth, + int width); +void MergeXR64Row_Any_NEON(const uint16_t* r_buf, + const uint16_t* g_buf, + const uint16_t* b_buf, + uint16_t* dst_ptr, + int depth, + int width); +void MergeXRGB16To8Row_Any_NEON(const uint16_t* r_buf, + const uint16_t* g_buf, + const uint16_t* b_buf, + uint8_t* dst_ptr, + int depth, + int width); void MergeUVRow_16_C(const uint16_t* src_u, const uint16_t* src_v, uint16_t* dst_uv, - int scale, /* 64 for 10 bit */ + int depth, int width); void MergeUVRow_16_AVX2(const uint16_t* src_u, const uint16_t* src_v, uint16_t* dst_uv, - int scale, + int depth, + int width); +void MergeUVRow_16_Any_AVX2(const uint16_t* src_u, + const uint16_t* src_v, + uint16_t* dst_uv, + int depth, + int width); +void MergeUVRow_16_NEON(const uint16_t* src_u, + const uint16_t* src_v, + uint16_t* dst_uv, + int depth, int width); +void MergeUVRow_16_Any_NEON(const uint16_t* src_u, + const uint16_t* src_v, + uint16_t* dst_uv, + int depth, + int width); + +void SplitUVRow_16_C(const uint16_t* src_uv, + uint16_t* dst_u, + uint16_t* dst_v, + int depth, + int width); +void SplitUVRow_16_AVX2(const uint16_t* src_uv, + uint16_t* dst_u, + uint16_t* dst_v, + int depth, + int width); +void SplitUVRow_16_Any_AVX2(const uint16_t* src_uv, + uint16_t* dst_u, + uint16_t* dst_v, + int depth, + int width); +void SplitUVRow_16_NEON(const uint16_t* src_uv, + uint16_t* dst_u, + uint16_t* dst_v, + int depth, + int width); +void SplitUVRow_16_Any_NEON(const uint16_t* src_uv, + uint16_t* dst_u, + uint16_t* dst_v, + int depth, + int width); -void MultiplyRow_16_AVX2(const uint16_t* src_y, - uint16_t* dst_y, - int scale, - int width); void MultiplyRow_16_C(const uint16_t* src_y, uint16_t* dst_y, int scale, int width); +void MultiplyRow_16_AVX2(const uint16_t* src_y, + uint16_t* dst_y, + int scale, + int width); +void MultiplyRow_16_Any_AVX2(const uint16_t* src_ptr, + uint16_t* dst_ptr, + int scale, + int width); +void MultiplyRow_16_NEON(const uint16_t* src_y, + uint16_t* dst_y, + int scale, + int width); +void MultiplyRow_16_Any_NEON(const uint16_t* src_ptr, + uint16_t* dst_ptr, + int scale, + int width); + +void DivideRow_16_C(const uint16_t* src_y, + uint16_t* dst_y, + int scale, + int width); +void DivideRow_16_AVX2(const uint16_t* src_y, + uint16_t* dst_y, + int scale, + int width); +void DivideRow_16_Any_AVX2(const uint16_t* src_ptr, + uint16_t* dst_ptr, + int scale, + int width); +void DivideRow_16_NEON(const uint16_t* src_y, + uint16_t* dst_y, + int scale, + int width); +void DivideRow_16_Any_NEON(const uint16_t* src_ptr, + uint16_t* dst_ptr, + int scale, + int width); void Convert8To16Row_C(const uint8_t* src_y, uint16_t* dst_y, @@ -1879,6 +2781,14 @@ void Convert16To8Row_Any_AVX2(const uint16_t* src_ptr, uint8_t* dst_ptr, int scale, int width); +void Convert16To8Row_NEON(const uint16_t* src_y, + uint8_t* dst_y, + int scale, + int width); +void Convert16To8Row_Any_NEON(const uint16_t* src_ptr, + uint8_t* dst_ptr, + int scale, + int width); void CopyRow_SSE2(const uint8_t* src, uint8_t* dst, int width); void CopyRow_AVX(const uint8_t* src, uint8_t* dst, int width); @@ -1895,16 +2805,12 @@ void CopyRow_16_C(const uint16_t* src, uint16_t* dst, int count); void ARGBCopyAlphaRow_C(const uint8_t* src, uint8_t* dst, int width); void ARGBCopyAlphaRow_SSE2(const uint8_t* src, uint8_t* dst, int width); void ARGBCopyAlphaRow_AVX2(const uint8_t* src, uint8_t* dst, int width); -void ARGBCopyAlphaRow_MMI(const uint8_t* src, uint8_t* dst, int width); void ARGBCopyAlphaRow_Any_SSE2(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void ARGBCopyAlphaRow_Any_AVX2(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void ARGBCopyAlphaRow_Any_MMI(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); void ARGBExtractAlphaRow_C(const uint8_t* src_argb, uint8_t* dst_a, int width); void ARGBExtractAlphaRow_SSE2(const uint8_t* src_argb, @@ -1919,7 +2825,7 @@ void ARGBExtractAlphaRow_NEON(const uint8_t* src_argb, void ARGBExtractAlphaRow_MSA(const uint8_t* src_argb, uint8_t* dst_a, int width); -void ARGBExtractAlphaRow_MMI(const uint8_t* src_argb, +void ARGBExtractAlphaRow_LSX(const uint8_t* src_argb, uint8_t* dst_a, int width); void ARGBExtractAlphaRow_Any_SSE2(const uint8_t* src_ptr, @@ -1934,31 +2840,29 @@ void ARGBExtractAlphaRow_Any_NEON(const uint8_t* src_ptr, void ARGBExtractAlphaRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void ARGBExtractAlphaRow_Any_MMI(const uint8_t* src_ptr, +void ARGBExtractAlphaRow_Any_LSX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void ARGBCopyYToAlphaRow_C(const uint8_t* src, uint8_t* dst, int width); void ARGBCopyYToAlphaRow_SSE2(const uint8_t* src, uint8_t* dst, int width); void ARGBCopyYToAlphaRow_AVX2(const uint8_t* src, uint8_t* dst, int width); -void ARGBCopyYToAlphaRow_MMI(const uint8_t* src, uint8_t* dst, int width); void ARGBCopyYToAlphaRow_Any_SSE2(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void ARGBCopyYToAlphaRow_Any_AVX2(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void ARGBCopyYToAlphaRow_Any_MMI(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); void SetRow_C(uint8_t* dst, uint8_t v8, int width); void SetRow_MSA(uint8_t* dst, uint8_t v8, int width); void SetRow_X86(uint8_t* dst, uint8_t v8, int width); void SetRow_ERMS(uint8_t* dst, uint8_t v8, int width); void SetRow_NEON(uint8_t* dst, uint8_t v8, int width); +void SetRow_LSX(uint8_t* dst, uint8_t v8, int width); void SetRow_Any_X86(uint8_t* dst_ptr, uint8_t v32, int width); void SetRow_Any_NEON(uint8_t* dst_ptr, uint8_t v32, int width); +void SetRow_Any_LSX(uint8_t* dst_ptr, uint8_t v32, int width); void ARGBSetRow_C(uint8_t* dst_argb, uint32_t v32, int width); void ARGBSetRow_X86(uint8_t* dst_argb, uint32_t v32, int width); @@ -1966,8 +2870,8 @@ void ARGBSetRow_NEON(uint8_t* dst, uint32_t v32, int width); void ARGBSetRow_Any_NEON(uint8_t* dst_ptr, uint32_t v32, int width); void ARGBSetRow_MSA(uint8_t* dst_argb, uint32_t v32, int width); void ARGBSetRow_Any_MSA(uint8_t* dst_ptr, uint32_t v32, int width); -void ARGBSetRow_MMI(uint8_t* dst_argb, uint32_t v32, int width); -void ARGBSetRow_Any_MMI(uint8_t* dst_ptr, uint32_t v32, int width); +void ARGBSetRow_LSX(uint8_t* dst_argb, uint32_t v32, int width); +void ARGBSetRow_Any_LSX(uint8_t* dst_ptr, uint32_t v32, int width); // ARGBShufflers for BGRAToARGB etc. void ARGBShuffleRow_C(const uint8_t* src_argb, @@ -1990,10 +2894,10 @@ void ARGBShuffleRow_MSA(const uint8_t* src_argb, uint8_t* dst_argb, const uint8_t* shuffler, int width); -void ARGBShuffleRow_MMI(const uint8_t* src_argb, - uint8_t* dst_argb, - const uint8_t* shuffler, - int width); +void ARGBShuffleRow_LASX(const uint8_t* src_argb, + uint8_t* dst_argb, + const uint8_t* shuffler, + int width); void ARGBShuffleRow_Any_SSSE3(const uint8_t* src_ptr, uint8_t* dst_ptr, const uint8_t* param, @@ -2010,10 +2914,10 @@ void ARGBShuffleRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, const uint8_t* param, int width); -void ARGBShuffleRow_Any_MMI(const uint8_t* src_ptr, - uint8_t* dst_ptr, - const uint8_t* param, - int width); +void ARGBShuffleRow_Any_LASX(const uint8_t* src_ptr, + uint8_t* dst_ptr, + const uint8_t* param, + int width); void RGB24ToARGBRow_SSSE3(const uint8_t* src_rgb24, uint8_t* dst_argb, @@ -2038,41 +2942,54 @@ void RGB24ToARGBRow_NEON(const uint8_t* src_rgb24, uint8_t* dst_argb, int width); void RGB24ToARGBRow_MSA(const uint8_t* src_rgb24, uint8_t* dst_argb, int width); -void RGB24ToARGBRow_MMI(const uint8_t* src_rgb24, uint8_t* dst_argb, int width); +void RGB24ToARGBRow_LSX(const uint8_t* src_rgb24, uint8_t* dst_argb, int width); +void RGB24ToARGBRow_LASX(const uint8_t* src_rgb24, + uint8_t* dst_argb, + int width); void RAWToARGBRow_NEON(const uint8_t* src_raw, uint8_t* dst_argb, int width); void RAWToRGBARow_NEON(const uint8_t* src_raw, uint8_t* dst_rgba, int width); void RAWToARGBRow_MSA(const uint8_t* src_raw, uint8_t* dst_argb, int width); -void RAWToARGBRow_MMI(const uint8_t* src_raw, uint8_t* dst_argb, int width); +void RAWToARGBRow_LSX(const uint8_t* src_raw, uint8_t* dst_argb, int width); +void RAWToARGBRow_LASX(const uint8_t* src_raw, uint8_t* dst_argb, int width); void RAWToRGB24Row_NEON(const uint8_t* src_raw, uint8_t* dst_rgb24, int width); void RAWToRGB24Row_MSA(const uint8_t* src_raw, uint8_t* dst_rgb24, int width); -void RAWToRGB24Row_MMI(const uint8_t* src_raw, uint8_t* dst_rgb24, int width); +void RAWToRGB24Row_LSX(const uint8_t* src_raw, uint8_t* dst_rgb24, int width); void RGB565ToARGBRow_NEON(const uint8_t* src_rgb565, uint8_t* dst_argb, int width); void RGB565ToARGBRow_MSA(const uint8_t* src_rgb565, uint8_t* dst_argb, int width); -void RGB565ToARGBRow_MMI(const uint8_t* src_rgb565, +void RGB565ToARGBRow_LSX(const uint8_t* src_rgb565, uint8_t* dst_argb, int width); +void RGB565ToARGBRow_LASX(const uint8_t* src_rgb565, + uint8_t* dst_argb, + int width); void ARGB1555ToARGBRow_NEON(const uint8_t* src_argb1555, uint8_t* dst_argb, int width); void ARGB1555ToARGBRow_MSA(const uint8_t* src_argb1555, uint8_t* dst_argb, int width); -void ARGB1555ToARGBRow_MMI(const uint8_t* src_argb1555, +void ARGB1555ToARGBRow_LSX(const uint8_t* src_argb1555, uint8_t* dst_argb, int width); +void ARGB1555ToARGBRow_LASX(const uint8_t* src_argb1555, + uint8_t* dst_argb, + int width); void ARGB4444ToARGBRow_NEON(const uint8_t* src_argb4444, uint8_t* dst_argb, int width); void ARGB4444ToARGBRow_MSA(const uint8_t* src_argb4444, uint8_t* dst_argb, int width); -void ARGB4444ToARGBRow_MMI(const uint8_t* src_argb4444, +void ARGB4444ToARGBRow_LSX(const uint8_t* src_argb4444, uint8_t* dst_argb, int width); +void ARGB4444ToARGBRow_LASX(const uint8_t* src_argb4444, + uint8_t* dst_argb, + int width); void RGB24ToARGBRow_C(const uint8_t* src_rgb24, uint8_t* dst_argb, int width); void RAWToARGBRow_C(const uint8_t* src_raw, uint8_t* dst_argb, int width); void RAWToRGBARow_C(const uint8_t* src_raw, uint8_t* dst_rgba, int width); @@ -2127,46 +3044,59 @@ void RGB24ToARGBRow_Any_NEON(const uint8_t* src_ptr, void RGB24ToARGBRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void RGB24ToARGBRow_Any_MMI(const uint8_t* src_ptr, +void RGB24ToARGBRow_Any_LSX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void RGB24ToARGBRow_Any_LASX(const uint8_t* src_ptr, + uint8_t* dst_ptr, + int width); void RAWToARGBRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void RAWToRGBARow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void RAWToARGBRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void RAWToARGBRow_Any_MMI(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void RAWToARGBRow_Any_LSX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void RAWToARGBRow_Any_LASX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void RAWToRGB24Row_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void RAWToRGB24Row_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void RAWToRGB24Row_Any_MMI(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void RAWToRGB24Row_Any_LSX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void RGB565ToARGBRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void RGB565ToARGBRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void RGB565ToARGBRow_Any_MMI(const uint8_t* src_ptr, +void RGB565ToARGBRow_Any_LSX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void RGB565ToARGBRow_Any_LASX(const uint8_t* src_ptr, + uint8_t* dst_ptr, + int width); void ARGB1555ToARGBRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void ARGB1555ToARGBRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void ARGB1555ToARGBRow_Any_MMI(const uint8_t* src_ptr, +void ARGB4444ToARGBRow_Any_NEON(const uint8_t* src_ptr, + uint8_t* dst_ptr, + int width); +void ARGB1555ToARGBRow_Any_LSX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void ARGB4444ToARGBRow_Any_NEON(const uint8_t* src_ptr, +void ARGB1555ToARGBRow_Any_LASX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void ARGB4444ToARGBRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void ARGB4444ToARGBRow_Any_MMI(const uint8_t* src_ptr, +void ARGB4444ToARGBRow_Any_LSX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void ARGB4444ToARGBRow_Any_LASX(const uint8_t* src_ptr, + uint8_t* dst_ptr, + int width); void ARGBToRGB24Row_SSSE3(const uint8_t* src, uint8_t* dst, int width); void ARGBToRAWRow_SSSE3(const uint8_t* src, uint8_t* dst, int width); @@ -2234,20 +3164,20 @@ void ARGBToRGB565DitherRow_MSA(const uint8_t* src_argb, uint8_t* dst_rgb, const uint32_t dither4, int width); +void ARGBToRGB565DitherRow_LASX(const uint8_t* src_argb, + uint8_t* dst_rgb, + const uint32_t dither4, + int width); -void ARGBToRGB24Row_MMI(const uint8_t* src_argb, uint8_t* dst_rgb, int width); -void ARGBToRAWRow_MMI(const uint8_t* src_argb, uint8_t* dst_rgb, int width); -void ARGBToRGB565Row_MMI(const uint8_t* src_argb, uint8_t* dst_rgb, int width); -void ARGBToARGB1555Row_MMI(const uint8_t* src_argb, - uint8_t* dst_rgb, - int width); -void ARGBToARGB4444Row_MMI(const uint8_t* src_argb, - uint8_t* dst_rgb, - int width); -void ARGBToRGB565DitherRow_MMI(const uint8_t* src_argb, - uint8_t* dst_rgb, - const uint32_t dither4, - int width); +void ARGBToRGB24Row_LASX(const uint8_t* src_argb, uint8_t* dst_rgb, int width); +void ARGBToRAWRow_LASX(const uint8_t* src_argb, uint8_t* dst_rgb, int width); +void ARGBToRGB565Row_LASX(const uint8_t* src_argb, uint8_t* dst_rgb, int width); +void ARGBToARGB1555Row_LASX(const uint8_t* src_argb, + uint8_t* dst_rgb, + int width); +void ARGBToARGB4444Row_LASX(const uint8_t* src_argb, + uint8_t* dst_rgb, + int width); void ARGBToRGBARow_C(const uint8_t* src_argb, uint8_t* dst_rgb, int width); void ARGBToRGB24Row_C(const uint8_t* src_argb, uint8_t* dst_rgb, int width); @@ -2258,11 +3188,76 @@ void ARGBToARGB4444Row_C(const uint8_t* src_argb, uint8_t* dst_rgb, int width); void ABGRToAR30Row_C(const uint8_t* src_abgr, uint8_t* dst_ar30, int width); void ARGBToAR30Row_C(const uint8_t* src_argb, uint8_t* dst_ar30, int width); +void ARGBToAR64Row_C(const uint8_t* src_argb, uint16_t* dst_ar64, int width); +void ARGBToAB64Row_C(const uint8_t* src_argb, uint16_t* dst_ab64, int width); +void AR64ToARGBRow_C(const uint16_t* src_ar64, uint8_t* dst_argb, int width); +void AB64ToARGBRow_C(const uint16_t* src_ab64, uint8_t* dst_argb, int width); +void AR64ShuffleRow_C(const uint8_t* src_ar64, + uint8_t* dst_ar64, + const uint8_t* shuffler, + int width); +void ARGBToAR64Row_SSSE3(const uint8_t* src_argb, + uint16_t* dst_ar64, + int width); +void ARGBToAB64Row_SSSE3(const uint8_t* src_argb, + uint16_t* dst_ab64, + int width); +void AR64ToARGBRow_SSSE3(const uint16_t* src_ar64, + uint8_t* dst_argb, + int width); +void AB64ToARGBRow_SSSE3(const uint16_t* src_ab64, + uint8_t* dst_argb, + int width); +void ARGBToAR64Row_AVX2(const uint8_t* src_argb, uint16_t* dst_ar64, int width); +void ARGBToAB64Row_AVX2(const uint8_t* src_argb, uint16_t* dst_ab64, int width); +void AR64ToARGBRow_AVX2(const uint16_t* src_ar64, uint8_t* dst_argb, int width); +void AB64ToARGBRow_AVX2(const uint16_t* src_ab64, uint8_t* dst_argb, int width); +void ARGBToAR64Row_NEON(const uint8_t* src_argb, uint16_t* dst_ar64, int width); +void ARGBToAB64Row_NEON(const uint8_t* src_argb, uint16_t* dst_ab64, int width); +void AR64ToARGBRow_NEON(const uint16_t* src_ar64, uint8_t* dst_argb, int width); +void AB64ToARGBRow_NEON(const uint16_t* src_ab64, uint8_t* dst_argb, int width); +void ARGBToAR64Row_Any_SSSE3(const uint8_t* src_ptr, + uint16_t* dst_ptr, + int width); +void ARGBToAB64Row_Any_SSSE3(const uint8_t* src_ptr, + uint16_t* dst_ptr, + int width); +void AR64ToARGBRow_Any_SSSE3(const uint16_t* src_ptr, + uint8_t* dst_ptr, + int width); +void AB64ToARGBRow_Any_SSSE3(const uint16_t* src_ptr, + uint8_t* dst_ptr, + int width); +void ARGBToAR64Row_Any_AVX2(const uint8_t* src_ptr, + uint16_t* dst_ptr, + int width); +void ARGBToAB64Row_Any_AVX2(const uint8_t* src_ptr, + uint16_t* dst_ptr, + int width); +void AR64ToARGBRow_Any_AVX2(const uint16_t* src_ptr, + uint8_t* dst_ptr, + int width); +void AB64ToARGBRow_Any_AVX2(const uint16_t* src_ptr, + uint8_t* dst_ptr, + int width); +void ARGBToAR64Row_Any_NEON(const uint8_t* src_ptr, + uint16_t* dst_ptr, + int width); +void ARGBToAB64Row_Any_NEON(const uint8_t* src_ptr, + uint16_t* dst_ptr, + int width); +void AR64ToARGBRow_Any_NEON(const uint16_t* src_ptr, + uint8_t* dst_ptr, + int width); +void AB64ToARGBRow_Any_NEON(const uint16_t* src_ptr, + uint8_t* dst_ptr, + int width); + void J400ToARGBRow_SSE2(const uint8_t* src_y, uint8_t* dst_argb, int width); void J400ToARGBRow_AVX2(const uint8_t* src_y, uint8_t* dst_argb, int width); void J400ToARGBRow_NEON(const uint8_t* src_y, uint8_t* dst_argb, int width); void J400ToARGBRow_MSA(const uint8_t* src_y, uint8_t* dst_argb, int width); -void J400ToARGBRow_MMI(const uint8_t* src_y, uint8_t* dst_argb, int width); +void J400ToARGBRow_LSX(const uint8_t* src_y, uint8_t* dst_argb, int width); void J400ToARGBRow_C(const uint8_t* src_y, uint8_t* dst_argb, int width); void J400ToARGBRow_Any_SSE2(const uint8_t* src_ptr, uint8_t* dst_ptr, @@ -2274,7 +3269,7 @@ void J400ToARGBRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void J400ToARGBRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void J400ToARGBRow_Any_MMI(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void J400ToARGBRow_Any_LSX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void I444ToARGBRow_C(const uint8_t* src_y, const uint8_t* src_u, @@ -2282,6 +3277,12 @@ void I444ToARGBRow_C(const uint8_t* src_y, uint8_t* rgb_buf, const struct YuvConstants* yuvconstants, int width); +void I444ToRGB24Row_C(const uint8_t* src_y, + const uint8_t* src_u, + const uint8_t* src_v, + uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, + int width); void I422ToARGBRow_C(const uint8_t* src_y, const uint8_t* src_u, const uint8_t* src_v, @@ -2306,6 +3307,51 @@ void I210ToARGBRow_C(const uint16_t* src_y, uint8_t* rgb_buf, const struct YuvConstants* yuvconstants, int width); +void I212ToAR30Row_C(const uint16_t* src_y, + const uint16_t* src_u, + const uint16_t* src_v, + uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, + int width); +void I212ToARGBRow_C(const uint16_t* src_y, + const uint16_t* src_u, + const uint16_t* src_v, + uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, + int width); +void I410ToAR30Row_C(const uint16_t* src_y, + const uint16_t* src_u, + const uint16_t* src_v, + uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, + int width); +void I410ToARGBRow_C(const uint16_t* src_y, + const uint16_t* src_u, + const uint16_t* src_v, + uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, + int width); +void I210AlphaToARGBRow_C(const uint16_t* src_y, + const uint16_t* src_u, + const uint16_t* src_v, + const uint16_t* src_a, + uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, + int width); +void I410AlphaToARGBRow_C(const uint16_t* src_y, + const uint16_t* src_u, + const uint16_t* src_v, + const uint16_t* src_a, + uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, + int width); +void I444AlphaToARGBRow_C(const uint8_t* src_y, + const uint8_t* src_u, + const uint8_t* src_v, + const uint8_t* src_a, + uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, + int width); void I422AlphaToARGBRow_C(const uint8_t* src_y, const uint8_t* src_u, const uint8_t* src_v, @@ -2350,6 +3396,27 @@ void UYVYToARGBRow_C(const uint8_t* src_uyvy, uint8_t* rgb_buf, const struct YuvConstants* yuvconstants, int width); +void P210ToARGBRow_C(const uint16_t* src_y, + const uint16_t* src_uv, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width); +void P410ToARGBRow_C(const uint16_t* src_y, + const uint16_t* src_uv, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width); +void P210ToAR30Row_C(const uint16_t* src_y, + const uint16_t* src_uv, + uint8_t* dst_ar30, + const struct YuvConstants* yuvconstants, + int width); +void P410ToAR30Row_C(const uint16_t* src_y, + const uint16_t* src_uv, + uint8_t* dst_ar30, + const struct YuvConstants* yuvconstants, + int width); + void I422ToRGBARow_C(const uint8_t* src_y, const uint8_t* src_u, const uint8_t* src_v, @@ -2386,6 +3453,12 @@ void I422ToARGBRow_AVX2(const uint8_t* y_buf, uint8_t* dst_argb, const struct YuvConstants* yuvconstants, int width); +void I422ToARGBRow_AVX512BW(const uint8_t* y_buf, + const uint8_t* u_buf, + const uint8_t* v_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width); void I422ToRGBARow_AVX2(const uint8_t* y_buf, const uint8_t* u_buf, const uint8_t* v_buf, @@ -2404,18 +3477,18 @@ void I444ToARGBRow_AVX2(const uint8_t* y_buf, uint8_t* dst_argb, const struct YuvConstants* yuvconstants, int width); -void I444ToARGBRow_SSSE3(const uint8_t* y_buf, +void I444ToRGB24Row_SSSE3(const uint8_t* y_buf, + const uint8_t* u_buf, + const uint8_t* v_buf, + uint8_t* dst_rgb24, + const struct YuvConstants* yuvconstants, + int width); +void I444ToRGB24Row_AVX2(const uint8_t* y_buf, const uint8_t* u_buf, const uint8_t* v_buf, - uint8_t* dst_argb, + uint8_t* dst_rgb24, const struct YuvConstants* yuvconstants, int width); -void I444ToARGBRow_AVX2(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); void I422ToARGBRow_SSSE3(const uint8_t* y_buf, const uint8_t* u_buf, const uint8_t* v_buf, @@ -2441,6 +3514,44 @@ void I210ToARGBRow_SSSE3(const uint16_t* y_buf, uint8_t* dst_argb, const struct YuvConstants* yuvconstants, int width); +void I212ToAR30Row_SSSE3(const uint16_t* y_buf, + const uint16_t* u_buf, + const uint16_t* v_buf, + uint8_t* dst_ar30, + const struct YuvConstants* yuvconstants, + int width); +void I212ToARGBRow_SSSE3(const uint16_t* y_buf, + const uint16_t* u_buf, + const uint16_t* v_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width); +void I410ToAR30Row_SSSE3(const uint16_t* y_buf, + const uint16_t* u_buf, + const uint16_t* v_buf, + uint8_t* dst_ar30, + const struct YuvConstants* yuvconstants, + int width); +void I410ToARGBRow_SSSE3(const uint16_t* y_buf, + const uint16_t* u_buf, + const uint16_t* v_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width); +void I210AlphaToARGBRow_SSSE3(const uint16_t* y_buf, + const uint16_t* u_buf, + const uint16_t* v_buf, + const uint16_t* a_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width); +void I410AlphaToARGBRow_SSSE3(const uint16_t* y_buf, + const uint16_t* u_buf, + const uint16_t* v_buf, + const uint16_t* a_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width); void I422ToAR30Row_AVX2(const uint8_t* y_buf, const uint8_t* u_buf, const uint8_t* v_buf, @@ -2459,6 +3570,58 @@ void I210ToAR30Row_AVX2(const uint16_t* y_buf, uint8_t* dst_ar30, const struct YuvConstants* yuvconstants, int width); +void I212ToARGBRow_AVX2(const uint16_t* y_buf, + const uint16_t* u_buf, + const uint16_t* v_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width); +void I212ToAR30Row_AVX2(const uint16_t* y_buf, + const uint16_t* u_buf, + const uint16_t* v_buf, + uint8_t* dst_ar30, + const struct YuvConstants* yuvconstants, + int width); +void I410ToAR30Row_AVX2(const uint16_t* y_buf, + const uint16_t* u_buf, + const uint16_t* v_buf, + uint8_t* dst_ar30, + const struct YuvConstants* yuvconstants, + int width); +void I410ToARGBRow_AVX2(const uint16_t* y_buf, + const uint16_t* u_buf, + const uint16_t* v_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width); +void I210AlphaToARGBRow_AVX2(const uint16_t* y_buf, + const uint16_t* u_buf, + const uint16_t* v_buf, + const uint16_t* a_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width); +void I410AlphaToARGBRow_AVX2(const uint16_t* y_buf, + const uint16_t* u_buf, + const uint16_t* v_buf, + const uint16_t* a_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width); +void I444AlphaToARGBRow_SSSE3(const uint8_t* y_buf, + const uint8_t* u_buf, + const uint8_t* v_buf, + const uint8_t* a_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width); +void I444AlphaToARGBRow_AVX2(const uint8_t* y_buf, + const uint8_t* u_buf, + const uint8_t* v_buf, + const uint8_t* a_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width); void I422AlphaToARGBRow_SSSE3(const uint8_t* y_buf, const uint8_t* u_buf, const uint8_t* v_buf, @@ -2508,6 +3671,10 @@ void NV21ToRGB24Row_AVX2(const uint8_t* src_y, uint8_t* dst_rgb24, const struct YuvConstants* yuvconstants, int width); +void NV21ToYUV24Row_SSSE3(const uint8_t* src_y, + const uint8_t* src_vu, + uint8_t* dst_yuv24, + int width); void NV21ToYUV24Row_AVX2(const uint8_t* src_y, const uint8_t* src_vu, uint8_t* dst_yuv24, @@ -2543,6 +3710,48 @@ void UYVYToARGBRow_AVX2(const uint8_t* uyvy_buf, uint8_t* dst_argb, const struct YuvConstants* yuvconstants, int width); + +void P210ToARGBRow_SSSE3(const uint16_t* y_buf, + const uint16_t* uv_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width); +void P410ToARGBRow_SSSE3(const uint16_t* y_buf, + const uint16_t* uv_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width); +void P210ToAR30Row_SSSE3(const uint16_t* y_buf, + const uint16_t* uv_buf, + uint8_t* dst_ar30, + const struct YuvConstants* yuvconstants, + int width); +void P410ToAR30Row_SSSE3(const uint16_t* y_buf, + const uint16_t* uv_buf, + uint8_t* dst_ar30, + const struct YuvConstants* yuvconstants, + int width); +void P210ToARGBRow_AVX2(const uint16_t* y_buf, + const uint16_t* uv_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width); +void P410ToARGBRow_AVX2(const uint16_t* y_buf, + const uint16_t* uv_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width); +void P210ToAR30Row_AVX2(const uint16_t* y_buf, + const uint16_t* uv_buf, + uint8_t* dst_ar30, + const struct YuvConstants* yuvconstants, + int width); +void P410ToAR30Row_AVX2(const uint16_t* y_buf, + const uint16_t* uv_buf, + uint8_t* dst_ar30, + const struct YuvConstants* yuvconstants, + int width); + void I422ToRGBARow_SSSE3(const uint8_t* y_buf, const uint8_t* u_buf, const uint8_t* v_buf, @@ -2603,6 +3812,12 @@ void I422ToARGBRow_Any_AVX2(const uint8_t* y_buf, uint8_t* dst_ptr, const struct YuvConstants* yuvconstants, int width); +void I422ToARGBRow_Any_AVX512BW(const uint8_t* y_buf, + const uint8_t* u_buf, + const uint8_t* v_buf, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); void I422ToRGBARow_Any_AVX2(const uint8_t* y_buf, const uint8_t* u_buf, const uint8_t* v_buf, @@ -2615,12 +3830,24 @@ void I444ToARGBRow_Any_SSSE3(const uint8_t* y_buf, uint8_t* dst_ptr, const struct YuvConstants* yuvconstants, int width); +void I444ToRGB24Row_Any_SSSE3(const uint8_t* y_buf, + const uint8_t* u_buf, + const uint8_t* v_buf, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); void I444ToARGBRow_Any_AVX2(const uint8_t* y_buf, const uint8_t* u_buf, const uint8_t* v_buf, uint8_t* dst_ptr, const struct YuvConstants* yuvconstants, int width); +void I444ToRGB24Row_Any_AVX2(const uint8_t* y_buf, + const uint8_t* u_buf, + const uint8_t* v_buf, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); void I422ToARGBRow_Any_SSSE3(const uint8_t* y_buf, const uint8_t* u_buf, const uint8_t* v_buf, @@ -2633,18 +3860,56 @@ void I422ToAR30Row_Any_SSSE3(const uint8_t* y_buf, uint8_t* dst_ptr, const struct YuvConstants* yuvconstants, int width); -void I210ToAR30Row_Any_SSSE3(const uint16_t* y_buf, +void I210ToAR30Row_Any_SSSE3(const uint16_t* y_buf, + const uint16_t* u_buf, + const uint16_t* v_buf, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); +void I210ToARGBRow_Any_SSSE3(const uint16_t* y_buf, + const uint16_t* u_buf, + const uint16_t* v_buf, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); +void I212ToAR30Row_Any_SSSE3(const uint16_t* y_buf, + const uint16_t* u_buf, + const uint16_t* v_buf, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); +void I212ToARGBRow_Any_SSSE3(const uint16_t* y_buf, + const uint16_t* u_buf, + const uint16_t* v_buf, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); +void I410ToAR30Row_Any_SSSE3(const uint16_t* y_buf, const uint16_t* u_buf, const uint16_t* v_buf, uint8_t* dst_ptr, const struct YuvConstants* yuvconstants, int width); -void I210ToARGBRow_Any_SSSE3(const uint16_t* y_buf, +void I410ToARGBRow_Any_SSSE3(const uint16_t* y_buf, const uint16_t* u_buf, const uint16_t* v_buf, uint8_t* dst_ptr, const struct YuvConstants* yuvconstants, int width); +void I210AlphaToARGBRow_Any_SSSE3(const uint16_t* y_buf, + const uint16_t* u_buf, + const uint16_t* v_buf, + const uint16_t* a_buf, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); +void I410AlphaToARGBRow_Any_SSSE3(const uint16_t* y_buf, + const uint16_t* u_buf, + const uint16_t* v_buf, + const uint16_t* a_buf, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); void I422ToAR30Row_Any_AVX2(const uint8_t* y_buf, const uint8_t* u_buf, const uint8_t* v_buf, @@ -2663,6 +3928,58 @@ void I210ToAR30Row_Any_AVX2(const uint16_t* y_buf, uint8_t* dst_ptr, const struct YuvConstants* yuvconstants, int width); +void I212ToARGBRow_Any_AVX2(const uint16_t* y_buf, + const uint16_t* u_buf, + const uint16_t* v_buf, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); +void I212ToAR30Row_Any_AVX2(const uint16_t* y_buf, + const uint16_t* u_buf, + const uint16_t* v_buf, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); +void I410ToAR30Row_Any_AVX2(const uint16_t* y_buf, + const uint16_t* u_buf, + const uint16_t* v_buf, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); +void I410ToARGBRow_Any_AVX2(const uint16_t* y_buf, + const uint16_t* u_buf, + const uint16_t* v_buf, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); +void I210AlphaToARGBRow_Any_AVX2(const uint16_t* y_buf, + const uint16_t* u_buf, + const uint16_t* v_buf, + const uint16_t* a_buf, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); +void I410AlphaToARGBRow_Any_AVX2(const uint16_t* y_buf, + const uint16_t* u_buf, + const uint16_t* v_buf, + const uint16_t* a_buf, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); +void I444AlphaToARGBRow_Any_SSSE3(const uint8_t* y_buf, + const uint8_t* u_buf, + const uint8_t* v_buf, + const uint8_t* a_buf, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); +void I444AlphaToARGBRow_Any_AVX2(const uint8_t* y_buf, + const uint8_t* u_buf, + const uint8_t* v_buf, + const uint8_t* a_buf, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); void I422AlphaToARGBRow_Any_SSSE3(const uint8_t* y_buf, const uint8_t* u_buf, const uint8_t* v_buf, @@ -2717,9 +4034,13 @@ void NV21ToRGB24Row_Any_AVX2(const uint8_t* y_buf, uint8_t* dst_ptr, const struct YuvConstants* yuvconstants, int width); -void NV21ToYUV24Row_Any_AVX2(const uint8_t* src_y, - const uint8_t* src_vu, - uint8_t* dst_yuv24, +void NV21ToYUV24Row_Any_SSSE3(const uint8_t* y_buf, + const uint8_t* uv_buf, + uint8_t* dst_ptr, + int width); +void NV21ToYUV24Row_Any_AVX2(const uint8_t* y_buf, + const uint8_t* uv_buf, + uint8_t* dst_ptr, int width); void NV12ToRGB565Row_Any_SSSE3(const uint8_t* y_buf, const uint8_t* uv_buf, @@ -2747,6 +4068,46 @@ void UYVYToARGBRow_Any_AVX2(const uint8_t* src_ptr, uint8_t* dst_ptr, const struct YuvConstants* yuvconstants, int width); +void P210ToARGBRow_Any_SSSE3(const uint16_t* y_buf, + const uint16_t* uv_buf, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); +void P410ToARGBRow_Any_SSSE3(const uint16_t* y_buf, + const uint16_t* uv_buf, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); +void P210ToAR30Row_Any_SSSE3(const uint16_t* y_buf, + const uint16_t* uv_buf, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); +void P410ToAR30Row_Any_SSSE3(const uint16_t* y_buf, + const uint16_t* uv_buf, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); +void P210ToARGBRow_Any_AVX2(const uint16_t* y_buf, + const uint16_t* uv_buf, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); +void P410ToARGBRow_Any_AVX2(const uint16_t* y_buf, + const uint16_t* uv_buf, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); +void P210ToAR30Row_Any_AVX2(const uint16_t* y_buf, + const uint16_t* uv_buf, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); +void P410ToAR30Row_Any_AVX2(const uint16_t* y_buf, + const uint16_t* uv_buf, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); void I422ToRGBARow_Any_SSSE3(const uint8_t* y_buf, const uint8_t* u_buf, const uint8_t* v_buf, @@ -2822,37 +4183,37 @@ void I400ToARGBRow_MSA(const uint8_t* src_y, uint8_t* dst_argb, const struct YuvConstants* yuvconstants, int width); -void I400ToARGBRow_MMI(const uint8_t* src_y, +void I400ToARGBRow_LSX(const uint8_t* src_y, uint8_t* dst_argb, const struct YuvConstants* yuvconstants, int width); void I400ToARGBRow_Any_SSE2(const uint8_t* src_ptr, uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, + const struct YuvConstants* param, int width); void I400ToARGBRow_Any_AVX2(const uint8_t* src_ptr, uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, + const struct YuvConstants* param, int width); void I400ToARGBRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, + const struct YuvConstants* param, int width); void I400ToARGBRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, const struct YuvConstants* yuvconstants, int width); -void I400ToARGBRow_Any_MMI(const uint8_t* src_ptr, +void I400ToARGBRow_Any_LSX(const uint8_t* src_ptr, uint8_t* dst_ptr, const struct YuvConstants* yuvconstants, int width); // ARGB preattenuated alpha blend. -void ARGBBlendRow_SSSE3(const uint8_t* src_argb0, +void ARGBBlendRow_SSSE3(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width); -void ARGBBlendRow_NEON(const uint8_t* src_argb0, +void ARGBBlendRow_NEON(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width); @@ -2860,11 +4221,11 @@ void ARGBBlendRow_MSA(const uint8_t* src_argb0, const uint8_t* src_argb1, uint8_t* dst_argb, int width); -void ARGBBlendRow_MMI(const uint8_t* src_argb0, +void ARGBBlendRow_LSX(const uint8_t* src_argb0, const uint8_t* src_argb1, uint8_t* dst_argb, int width); -void ARGBBlendRow_C(const uint8_t* src_argb0, +void ARGBBlendRow_C(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width); @@ -2890,16 +4251,6 @@ void BlendPlaneRow_Any_AVX2(const uint8_t* y_buf, const uint8_t* v_buf, uint8_t* dst_ptr, int width); -void BlendPlaneRow_MMI(const uint8_t* src0, - const uint8_t* src1, - const uint8_t* alpha, - uint8_t* dst, - int width); -void BlendPlaneRow_Any_MMI(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - int width); void BlendPlaneRow_C(const uint8_t* src0, const uint8_t* src1, const uint8_t* alpha, @@ -2908,11 +4259,11 @@ void BlendPlaneRow_C(const uint8_t* src0, // ARGB multiply images. Same API as Blend, but these require // pointer and width alignment for SSE2. -void ARGBMultiplyRow_C(const uint8_t* src_argb0, +void ARGBMultiplyRow_C(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width); -void ARGBMultiplyRow_SSE2(const uint8_t* src_argb0, +void ARGBMultiplyRow_SSE2(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width); @@ -2920,7 +4271,7 @@ void ARGBMultiplyRow_Any_SSE2(const uint8_t* y_buf, const uint8_t* uv_buf, uint8_t* dst_ptr, int width); -void ARGBMultiplyRow_AVX2(const uint8_t* src_argb0, +void ARGBMultiplyRow_AVX2(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width); @@ -2928,7 +4279,7 @@ void ARGBMultiplyRow_Any_AVX2(const uint8_t* y_buf, const uint8_t* uv_buf, uint8_t* dst_ptr, int width); -void ARGBMultiplyRow_NEON(const uint8_t* src_argb0, +void ARGBMultiplyRow_NEON(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width); @@ -2944,21 +4295,21 @@ void ARGBMultiplyRow_Any_MSA(const uint8_t* y_buf, const uint8_t* uv_buf, uint8_t* dst_ptr, int width); -void ARGBMultiplyRow_MMI(const uint8_t* src_argb0, - const uint8_t* src_argb1, - uint8_t* dst_argb, - int width); -void ARGBMultiplyRow_Any_MMI(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - int width); +void ARGBMultiplyRow_LASX(const uint8_t* src_argb0, + const uint8_t* src_argb1, + uint8_t* dst_argb, + int width); +void ARGBMultiplyRow_Any_LASX(const uint8_t* y_buf, + const uint8_t* uv_buf, + uint8_t* dst_ptr, + int width); // ARGB add images. -void ARGBAddRow_C(const uint8_t* src_argb0, +void ARGBAddRow_C(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width); -void ARGBAddRow_SSE2(const uint8_t* src_argb0, +void ARGBAddRow_SSE2(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width); @@ -2966,7 +4317,7 @@ void ARGBAddRow_Any_SSE2(const uint8_t* y_buf, const uint8_t* uv_buf, uint8_t* dst_ptr, int width); -void ARGBAddRow_AVX2(const uint8_t* src_argb0, +void ARGBAddRow_AVX2(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width); @@ -2974,7 +4325,7 @@ void ARGBAddRow_Any_AVX2(const uint8_t* y_buf, const uint8_t* uv_buf, uint8_t* dst_ptr, int width); -void ARGBAddRow_NEON(const uint8_t* src_argb0, +void ARGBAddRow_NEON(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width); @@ -2990,22 +4341,22 @@ void ARGBAddRow_Any_MSA(const uint8_t* y_buf, const uint8_t* uv_buf, uint8_t* dst_ptr, int width); -void ARGBAddRow_MMI(const uint8_t* src_argb0, - const uint8_t* src_argb1, - uint8_t* dst_argb, - int width); -void ARGBAddRow_Any_MMI(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - int width); +void ARGBAddRow_LASX(const uint8_t* src_argb0, + const uint8_t* src_argb1, + uint8_t* dst_argb, + int width); +void ARGBAddRow_Any_LASX(const uint8_t* y_buf, + const uint8_t* uv_buf, + uint8_t* dst_ptr, + int width); // ARGB subtract images. Same API as Blend, but these require // pointer and width alignment for SSE2. -void ARGBSubtractRow_C(const uint8_t* src_argb0, +void ARGBSubtractRow_C(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width); -void ARGBSubtractRow_SSE2(const uint8_t* src_argb0, +void ARGBSubtractRow_SSE2(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width); @@ -3013,7 +4364,7 @@ void ARGBSubtractRow_Any_SSE2(const uint8_t* y_buf, const uint8_t* uv_buf, uint8_t* dst_ptr, int width); -void ARGBSubtractRow_AVX2(const uint8_t* src_argb0, +void ARGBSubtractRow_AVX2(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width); @@ -3021,7 +4372,7 @@ void ARGBSubtractRow_Any_AVX2(const uint8_t* y_buf, const uint8_t* uv_buf, uint8_t* dst_ptr, int width); -void ARGBSubtractRow_NEON(const uint8_t* src_argb0, +void ARGBSubtractRow_NEON(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width); @@ -3037,14 +4388,14 @@ void ARGBSubtractRow_Any_MSA(const uint8_t* y_buf, const uint8_t* uv_buf, uint8_t* dst_ptr, int width); -void ARGBSubtractRow_MMI(const uint8_t* src_argb0, - const uint8_t* src_argb1, - uint8_t* dst_argb, - int width); -void ARGBSubtractRow_Any_MMI(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - int width); +void ARGBSubtractRow_LASX(const uint8_t* src_argb0, + const uint8_t* src_argb1, + uint8_t* dst_argb, + int width); +void ARGBSubtractRow_Any_LASX(const uint8_t* y_buf, + const uint8_t* uv_buf, + uint8_t* dst_ptr, + int width); void ARGBToRGB24Row_Any_SSSE3(const uint8_t* src_ptr, uint8_t* dst_ptr, @@ -3133,24 +4484,24 @@ void ARGBToRGB565DitherRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, const uint32_t param, int width); +void ARGBToRGB565DitherRow_Any_LASX(const uint8_t* src_ptr, + uint8_t* dst_ptr, + const uint32_t param, + int width); -void ARGBToRGB24Row_Any_MMI(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBToRAWRow_Any_MMI(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void ARGBToRGB565Row_Any_MMI(const uint8_t* src_ptr, +void ARGBToRGB24Row_Any_LASX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void ARGBToARGB1555Row_Any_MMI(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBToARGB4444Row_Any_MMI(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); -void ARGBToRGB565DitherRow_Any_MMI(const uint8_t* src_ptr, - uint8_t* dst_ptr, - const uint32_t param, - int width); +void ARGBToRAWRow_Any_LASX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void ARGBToRGB565Row_Any_LASX(const uint8_t* src_ptr, + uint8_t* dst_ptr, + int width); +void ARGBToARGB1555Row_Any_LASX(const uint8_t* src_ptr, + uint8_t* dst_ptr, + int width); +void ARGBToARGB4444Row_Any_LASX(const uint8_t* src_ptr, + uint8_t* dst_ptr, + int width); void I444ToARGBRow_Any_NEON(const uint8_t* y_buf, const uint8_t* u_buf, @@ -3158,12 +4509,25 @@ void I444ToARGBRow_Any_NEON(const uint8_t* y_buf, uint8_t* dst_ptr, const struct YuvConstants* yuvconstants, int width); +void I444ToRGB24Row_Any_NEON(const uint8_t* y_buf, + const uint8_t* u_buf, + const uint8_t* v_buf, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); void I422ToARGBRow_Any_NEON(const uint8_t* y_buf, const uint8_t* u_buf, const uint8_t* v_buf, uint8_t* dst_ptr, const struct YuvConstants* yuvconstants, int width); +void I444AlphaToARGBRow_Any_NEON(const uint8_t* y_buf, + const uint8_t* u_buf, + const uint8_t* v_buf, + const uint8_t* a_buf, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); void I422AlphaToARGBRow_Any_NEON(const uint8_t* y_buf, const uint8_t* u_buf, const uint8_t* v_buf, @@ -3221,9 +4585,9 @@ void NV21ToRGB24Row_Any_NEON(const uint8_t* y_buf, uint8_t* dst_ptr, const struct YuvConstants* yuvconstants, int width); -void NV21ToYUV24Row_Any_NEON(const uint8_t* src_y, - const uint8_t* src_vu, - uint8_t* dst_yuv24, +void NV21ToYUV24Row_Any_NEON(const uint8_t* y_buf, + const uint8_t* uv_buf, + uint8_t* dst_ptr, int width); void NV12ToRGB565Row_Any_NEON(const uint8_t* y_buf, const uint8_t* uv_buf, @@ -3238,13 +4602,53 @@ void UYVYToARGBRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, const struct YuvConstants* yuvconstants, int width); +void P210ToARGBRow_NEON(const uint16_t* y_buf, + const uint16_t* uv_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width); +void P410ToARGBRow_NEON(const uint16_t* y_buf, + const uint16_t* uv_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width); +void P210ToAR30Row_NEON(const uint16_t* y_buf, + const uint16_t* uv_buf, + uint8_t* dst_ar30, + const struct YuvConstants* yuvconstants, + int width); +void P410ToAR30Row_NEON(const uint16_t* y_buf, + const uint16_t* uv_buf, + uint8_t* dst_ar30, + const struct YuvConstants* yuvconstants, + int width); +void P210ToARGBRow_Any_NEON(const uint16_t* y_buf, + const uint16_t* uv_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width); +void P410ToARGBRow_Any_NEON(const uint16_t* y_buf, + const uint16_t* uv_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width); +void P210ToAR30Row_Any_NEON(const uint16_t* y_buf, + const uint16_t* uv_buf, + uint8_t* dst_ar30, + const struct YuvConstants* yuvconstants, + int width); +void P410ToAR30Row_Any_NEON(const uint16_t* y_buf, + const uint16_t* uv_buf, + uint8_t* dst_ar30, + const struct YuvConstants* yuvconstants, + int width); void I444ToARGBRow_Any_MSA(const uint8_t* y_buf, const uint8_t* u_buf, const uint8_t* v_buf, uint8_t* dst_ptr, const struct YuvConstants* yuvconstants, int width); -void I444ToARGBRow_Any_MMI(const uint8_t* y_buf, +void I444ToARGBRow_Any_LSX(const uint8_t* y_buf, const uint8_t* u_buf, const uint8_t* v_buf, uint8_t* dst_ptr, @@ -3256,18 +4660,24 @@ void I422ToARGBRow_Any_MSA(const uint8_t* y_buf, uint8_t* dst_ptr, const struct YuvConstants* yuvconstants, int width); -void I422ToARGBRow_Any_MMI(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); +void I422ToARGBRow_Any_LASX(const uint8_t* y_buf, + const uint8_t* u_buf, + const uint8_t* v_buf, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); void I422ToRGBARow_Any_MSA(const uint8_t* y_buf, const uint8_t* u_buf, const uint8_t* v_buf, uint8_t* dst_ptr, const struct YuvConstants* yuvconstants, int width); +void I422ToRGBARow_Any_LASX(const uint8_t* y_buf, + const uint8_t* u_buf, + const uint8_t* v_buf, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); void I422AlphaToARGBRow_Any_MSA(const uint8_t* y_buf, const uint8_t* u_buf, const uint8_t* v_buf, @@ -3275,30 +4685,61 @@ void I422AlphaToARGBRow_Any_MSA(const uint8_t* y_buf, uint8_t* dst_ptr, const struct YuvConstants* yuvconstants, int width); +void I422AlphaToARGBRow_Any_LASX(const uint8_t* y_buf, + const uint8_t* u_buf, + const uint8_t* v_buf, + const uint8_t* a_buf, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); void I422ToRGB24Row_Any_MSA(const uint8_t* y_buf, const uint8_t* u_buf, const uint8_t* v_buf, uint8_t* dst_ptr, const struct YuvConstants* yuvconstants, int width); +void I422ToRGB24Row_Any_LASX(const uint8_t* y_buf, + const uint8_t* u_buf, + const uint8_t* v_buf, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); void I422ToRGB565Row_Any_MSA(const uint8_t* y_buf, const uint8_t* u_buf, const uint8_t* v_buf, uint8_t* dst_ptr, const struct YuvConstants* yuvconstants, int width); +void I422ToRGB565Row_Any_LASX(const uint8_t* y_buf, + const uint8_t* u_buf, + const uint8_t* v_buf, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); void I422ToARGB4444Row_Any_MSA(const uint8_t* y_buf, const uint8_t* u_buf, const uint8_t* v_buf, uint8_t* dst_ptr, const struct YuvConstants* yuvconstants, int width); +void I422ToARGB4444Row_Any_LASX(const uint8_t* y_buf, + const uint8_t* u_buf, + const uint8_t* v_buf, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); void I422ToARGB1555Row_Any_MSA(const uint8_t* y_buf, const uint8_t* u_buf, const uint8_t* v_buf, uint8_t* dst_ptr, const struct YuvConstants* yuvconstants, int width); +void I422ToARGB1555Row_Any_LASX(const uint8_t* y_buf, + const uint8_t* u_buf, + const uint8_t* v_buf, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); void NV12ToARGBRow_Any_MSA(const uint8_t* y_buf, const uint8_t* uv_buf, uint8_t* dst_ptr, @@ -3323,12 +4764,55 @@ void UYVYToARGBRow_Any_MSA(const uint8_t* src_ptr, const struct YuvConstants* yuvconstants, int width); +void NV12ToARGBRow_Any_LSX(const uint8_t* y_buf, + const uint8_t* uv_buf, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); +void NV12ToARGBRow_Any_LASX(const uint8_t* y_buf, + const uint8_t* uv_buf, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); +void NV12ToRGB565Row_Any_LSX(const uint8_t* y_buf, + const uint8_t* uv_buf, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); +void NV12ToRGB565Row_Any_LASX(const uint8_t* y_buf, + const uint8_t* uv_buf, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); +void NV21ToARGBRow_Any_LSX(const uint8_t* y_buf, + const uint8_t* uv_buf, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); +void NV21ToARGBRow_Any_LASX(const uint8_t* y_buf, + const uint8_t* uv_buf, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); +void YUY2ToARGBRow_Any_LSX(const uint8_t* src_ptr, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); +void UYVYToARGBRow_Any_LSX(const uint8_t* src_ptr, + uint8_t* dst_ptr, + const struct YuvConstants* yuvconstants, + int width); + void YUY2ToYRow_AVX2(const uint8_t* src_yuy2, uint8_t* dst_y, int width); void YUY2ToUVRow_AVX2(const uint8_t* src_yuy2, int stride_yuy2, uint8_t* dst_u, uint8_t* dst_v, int width); +void YUY2ToNVUVRow_AVX2(const uint8_t* src_yuy2, + int stride_yuy2, + uint8_t* dst_uv, + int width); void YUY2ToUV422Row_AVX2(const uint8_t* src_yuy2, uint8_t* dst_u, uint8_t* dst_v, @@ -3339,6 +4823,10 @@ void YUY2ToUVRow_SSE2(const uint8_t* src_yuy2, uint8_t* dst_u, uint8_t* dst_v, int width); +void YUY2ToNVUVRow_SSE2(const uint8_t* src_yuy2, + int stride_yuy2, + uint8_t* dst_uv, + int width); void YUY2ToUV422Row_SSE2(const uint8_t* src_yuy2, uint8_t* dst_u, uint8_t* dst_v, @@ -3349,90 +4837,110 @@ void YUY2ToUVRow_NEON(const uint8_t* src_yuy2, uint8_t* dst_u, uint8_t* dst_v, int width); +void YUY2ToNVUVRow_NEON(const uint8_t* src_yuy2, + int stride_yuy2, + uint8_t* dst_uv, + int width); void YUY2ToUV422Row_NEON(const uint8_t* src_yuy2, uint8_t* dst_u, uint8_t* dst_v, int width); void YUY2ToYRow_MSA(const uint8_t* src_yuy2, uint8_t* dst_y, int width); -void YUY2ToYRow_MMI(const uint8_t* src_yuy2, uint8_t* dst_y, int width); +void YUY2ToYRow_LASX(const uint8_t* src_yuy2, uint8_t* dst_y, int width); void YUY2ToUVRow_MSA(const uint8_t* src_yuy2, int src_stride_yuy2, uint8_t* dst_u, uint8_t* dst_v, int width); -void YUY2ToUVRow_MMI(const uint8_t* src_yuy2, - int src_stride_yuy2, - uint8_t* dst_u, - uint8_t* dst_v, - int width); +void YUY2ToUVRow_LASX(const uint8_t* src_yuy2, + int src_stride_yuy2, + uint8_t* dst_u, + uint8_t* dst_v, + int width); void YUY2ToUV422Row_MSA(const uint8_t* src_yuy2, uint8_t* dst_u, uint8_t* dst_v, int width); -void YUY2ToUV422Row_MMI(const uint8_t* src_yuy2, - uint8_t* dst_u, - uint8_t* dst_v, - int width); +void YUY2ToUV422Row_LASX(const uint8_t* src_yuy2, + uint8_t* dst_u, + uint8_t* dst_v, + int width); void YUY2ToYRow_C(const uint8_t* src_yuy2, uint8_t* dst_y, int width); void YUY2ToUVRow_C(const uint8_t* src_yuy2, int src_stride_yuy2, uint8_t* dst_u, uint8_t* dst_v, int width); +void YUY2ToNVUVRow_C(const uint8_t* src_yuy2, + int src_stride_yuy2, + uint8_t* dst_uv, + int width); void YUY2ToUV422Row_C(const uint8_t* src_yuy2, uint8_t* dst_u, uint8_t* dst_v, int width); void YUY2ToYRow_Any_AVX2(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void YUY2ToUVRow_Any_AVX2(const uint8_t* src_ptr, - int src_stride_ptr, + int src_stride, uint8_t* dst_u, uint8_t* dst_v, int width); +void YUY2ToNVUVRow_Any_AVX2(const uint8_t* src_yuy2, + int stride_yuy2, + uint8_t* dst_uv, + int width); void YUY2ToUV422Row_Any_AVX2(const uint8_t* src_ptr, uint8_t* dst_u, uint8_t* dst_v, int width); void YUY2ToYRow_Any_SSE2(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void YUY2ToUVRow_Any_SSE2(const uint8_t* src_ptr, - int src_stride_ptr, + int src_stride, uint8_t* dst_u, uint8_t* dst_v, int width); +void YUY2ToNVUVRow_Any_SSE2(const uint8_t* src_yuy2, + int stride_yuy2, + uint8_t* dst_uv, + int width); void YUY2ToUV422Row_Any_SSE2(const uint8_t* src_ptr, uint8_t* dst_u, uint8_t* dst_v, int width); void YUY2ToYRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void YUY2ToUVRow_Any_NEON(const uint8_t* src_ptr, - int src_stride_ptr, + int src_stride, uint8_t* dst_u, uint8_t* dst_v, int width); +void YUY2ToNVUVRow_Any_NEON(const uint8_t* src_yuy2, + int stride_yuy2, + uint8_t* dst_uv, + int width); void YUY2ToUV422Row_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_u, uint8_t* dst_v, int width); void YUY2ToYRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void YUY2ToYRow_Any_MMI(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void YUY2ToYRow_Any_LASX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void YUY2ToUVRow_Any_MSA(const uint8_t* src_ptr, int src_stride_ptr, uint8_t* dst_u, uint8_t* dst_v, int width); -void YUY2ToUVRow_Any_MMI(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); +void YUY2ToUVRow_Any_LASX(const uint8_t* src_ptr, + int src_stride_ptr, + uint8_t* dst_u, + uint8_t* dst_v, + int width); void YUY2ToUV422Row_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_u, uint8_t* dst_v, int width); -void YUY2ToUV422Row_Any_MMI(const uint8_t* src_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); +void YUY2ToUV422Row_Any_LASX(const uint8_t* src_ptr, + uint8_t* dst_u, + uint8_t* dst_v, + int width); void UYVYToYRow_AVX2(const uint8_t* src_uyvy, uint8_t* dst_y, int width); void UYVYToUVRow_AVX2(const uint8_t* src_uyvy, int stride_uyvy, @@ -3474,25 +4982,25 @@ void UYVYToUV422Row_NEON(const uint8_t* src_uyvy, uint8_t* dst_v, int width); void UYVYToYRow_MSA(const uint8_t* src_uyvy, uint8_t* dst_y, int width); -void UYVYToYRow_MMI(const uint8_t* src_uyvy, uint8_t* dst_y, int width); +void UYVYToYRow_LASX(const uint8_t* src_uyvy, uint8_t* dst_y, int width); void UYVYToUVRow_MSA(const uint8_t* src_uyvy, int src_stride_uyvy, uint8_t* dst_u, uint8_t* dst_v, int width); -void UYVYToUVRow_MMI(const uint8_t* src_uyvy, - int src_stride_uyvy, - uint8_t* dst_u, - uint8_t* dst_v, - int width); +void UYVYToUVRow_LASX(const uint8_t* src_uyvy, + int src_stride_uyvy, + uint8_t* dst_u, + uint8_t* dst_v, + int width); void UYVYToUV422Row_MSA(const uint8_t* src_uyvy, uint8_t* dst_u, uint8_t* dst_v, int width); -void UYVYToUV422Row_MMI(const uint8_t* src_uyvy, - uint8_t* dst_u, - uint8_t* dst_v, - int width); +void UYVYToUV422Row_LASX(const uint8_t* src_uyvy, + uint8_t* dst_u, + uint8_t* dst_v, + int width); void UYVYToYRow_C(const uint8_t* src_uyvy, uint8_t* dst_y, int width); void UYVYToUVRow_C(const uint8_t* src_uyvy, @@ -3506,7 +5014,7 @@ void UYVYToUV422Row_C(const uint8_t* src_uyvy, int width); void UYVYToYRow_Any_AVX2(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void UYVYToUVRow_Any_AVX2(const uint8_t* src_ptr, - int src_stride_ptr, + int src_stride, uint8_t* dst_u, uint8_t* dst_v, int width); @@ -3516,7 +5024,7 @@ void UYVYToUV422Row_Any_AVX2(const uint8_t* src_ptr, int width); void UYVYToYRow_Any_SSE2(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void UYVYToUVRow_Any_SSE2(const uint8_t* src_ptr, - int src_stride_ptr, + int src_stride, uint8_t* dst_u, uint8_t* dst_v, int width); @@ -3526,7 +5034,7 @@ void UYVYToUV422Row_Any_SSE2(const uint8_t* src_ptr, int width); void UYVYToYRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void UYVYToUVRow_Any_NEON(const uint8_t* src_ptr, - int src_stride_ptr, + int src_stride, uint8_t* dst_u, uint8_t* dst_v, int width); @@ -3535,25 +5043,25 @@ void UYVYToUV422Row_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_v, int width); void UYVYToYRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void UYVYToYRow_Any_MMI(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void UYVYToYRow_Any_LASX(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void UYVYToUVRow_Any_MSA(const uint8_t* src_ptr, int src_stride_ptr, uint8_t* dst_u, uint8_t* dst_v, int width); -void UYVYToUVRow_Any_MMI(const uint8_t* src_ptr, - int src_stride_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); +void UYVYToUVRow_Any_LASX(const uint8_t* src_ptr, + int src_stride_ptr, + uint8_t* dst_u, + uint8_t* dst_v, + int width); void UYVYToUV422Row_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_u, uint8_t* dst_v, int width); -void UYVYToUV422Row_Any_MMI(const uint8_t* src_ptr, - uint8_t* dst_u, - uint8_t* dst_v, - int width); +void UYVYToUV422Row_Any_LASX(const uint8_t* src_ptr, + uint8_t* dst_u, + uint8_t* dst_v, + int width); void SwapUVRow_C(const uint8_t* src_uv, uint8_t* dst_vu, int width); void SwapUVRow_NEON(const uint8_t* src_uv, uint8_t* dst_vu, int width); void SwapUVRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); @@ -3563,29 +5071,29 @@ void SwapUVRow_AVX2(const uint8_t* src_uv, uint8_t* dst_vu, int width); void SwapUVRow_Any_AVX2(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); void AYUVToYRow_C(const uint8_t* src_ayuv, uint8_t* dst_y, int width); void AYUVToUVRow_C(const uint8_t* src_ayuv, - int stride_ayuv, + int src_stride_ayuv, uint8_t* dst_uv, int width); void AYUVToVURow_C(const uint8_t* src_ayuv, - int stride_ayuv, + int src_stride_ayuv, uint8_t* dst_vu, int width); void AYUVToYRow_NEON(const uint8_t* src_ayuv, uint8_t* dst_y, int width); void AYUVToUVRow_NEON(const uint8_t* src_ayuv, - int stride_ayuv, + int src_stride_ayuv, uint8_t* dst_uv, int width); void AYUVToVURow_NEON(const uint8_t* src_ayuv, - int stride_ayuv, + int src_stride_ayuv, uint8_t* dst_vu, int width); -void AYUVToYRow_Any_NEON(const uint8_t* src_ayuv, uint8_t* dst_y, int width); -void AYUVToUVRow_Any_NEON(const uint8_t* src_ayuv, - int stride_ayuv, - uint8_t* dst_uv, +void AYUVToYRow_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); +void AYUVToUVRow_Any_NEON(const uint8_t* src_ptr, + int src_stride, + uint8_t* dst_vu, int width); -void AYUVToVURow_Any_NEON(const uint8_t* src_ayuv, - int stride_ayuv, +void AYUVToVURow_Any_NEON(const uint8_t* src_ptr, + int src_stride, uint8_t* dst_vu, int width); @@ -3664,41 +5172,41 @@ void I422ToYUY2Row_MSA(const uint8_t* src_y, const uint8_t* src_v, uint8_t* dst_yuy2, int width); -void I422ToYUY2Row_MMI(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_yuy2, - int width); +void I422ToYUY2Row_LASX(const uint8_t* src_y, + const uint8_t* src_u, + const uint8_t* src_v, + uint8_t* dst_yuy2, + int width); void I422ToUYVYRow_MSA(const uint8_t* src_y, const uint8_t* src_u, const uint8_t* src_v, uint8_t* dst_uyvy, int width); -void I422ToUYVYRow_MMI(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_uyvy, - int width); +void I422ToUYVYRow_LASX(const uint8_t* src_y, + const uint8_t* src_u, + const uint8_t* src_v, + uint8_t* dst_uyvy, + int width); void I422ToYUY2Row_Any_MSA(const uint8_t* y_buf, const uint8_t* u_buf, const uint8_t* v_buf, uint8_t* dst_ptr, int width); -void I422ToYUY2Row_Any_MMI(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - int width); +void I422ToYUY2Row_Any_LASX(const uint8_t* y_buf, + const uint8_t* u_buf, + const uint8_t* v_buf, + uint8_t* dst_ptr, + int width); void I422ToUYVYRow_Any_MSA(const uint8_t* y_buf, const uint8_t* u_buf, const uint8_t* v_buf, uint8_t* dst_ptr, int width); -void I422ToUYVYRow_Any_MMI(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - int width); +void I422ToUYVYRow_Any_LASX(const uint8_t* y_buf, + const uint8_t* u_buf, + const uint8_t* v_buf, + uint8_t* dst_ptr, + int width); // Effects related row functions. void ARGBAttenuateRow_C(const uint8_t* src_argb, uint8_t* dst_argb, int width); @@ -3714,9 +5222,9 @@ void ARGBAttenuateRow_NEON(const uint8_t* src_argb, void ARGBAttenuateRow_MSA(const uint8_t* src_argb, uint8_t* dst_argb, int width); -void ARGBAttenuateRow_MMI(const uint8_t* src_argb, - uint8_t* dst_argb, - int width); +void ARGBAttenuateRow_LASX(const uint8_t* src_argb, + uint8_t* dst_argb, + int width); void ARGBAttenuateRow_Any_SSSE3(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); @@ -3729,9 +5237,9 @@ void ARGBAttenuateRow_Any_NEON(const uint8_t* src_ptr, void ARGBAttenuateRow_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, int width); -void ARGBAttenuateRow_Any_MMI(const uint8_t* src_ptr, - uint8_t* dst_ptr, - int width); +void ARGBAttenuateRow_Any_LASX(const uint8_t* src_ptr, + uint8_t* dst_ptr, + int width); // Inverse table for unattenuate, shared by C and SSE2. extern const uint32_t fixed_invtbl8[256]; @@ -3755,13 +5263,13 @@ void ARGBGrayRow_C(const uint8_t* src_argb, uint8_t* dst_argb, int width); void ARGBGrayRow_SSSE3(const uint8_t* src_argb, uint8_t* dst_argb, int width); void ARGBGrayRow_NEON(const uint8_t* src_argb, uint8_t* dst_argb, int width); void ARGBGrayRow_MSA(const uint8_t* src_argb, uint8_t* dst_argb, int width); -void ARGBGrayRow_MMI(const uint8_t* src_argb, uint8_t* dst_argb, int width); +void ARGBGrayRow_LASX(const uint8_t* src_argb, uint8_t* dst_argb, int width); void ARGBSepiaRow_C(uint8_t* dst_argb, int width); void ARGBSepiaRow_SSSE3(uint8_t* dst_argb, int width); void ARGBSepiaRow_NEON(uint8_t* dst_argb, int width); void ARGBSepiaRow_MSA(uint8_t* dst_argb, int width); -void ARGBSepiaRow_MMI(uint8_t* dst_argb, int width); +void ARGBSepiaRow_LASX(uint8_t* dst_argb, int width); void ARGBColorMatrixRow_C(const uint8_t* src_argb, uint8_t* dst_argb, @@ -3779,7 +5287,7 @@ void ARGBColorMatrixRow_MSA(const uint8_t* src_argb, uint8_t* dst_argb, const int8_t* matrix_argb, int width); -void ARGBColorMatrixRow_MMI(const uint8_t* src_argb, +void ARGBColorMatrixRow_LSX(const uint8_t* src_argb, uint8_t* dst_argb, const int8_t* matrix_argb, int width); @@ -3818,6 +5326,11 @@ void ARGBQuantizeRow_MSA(uint8_t* dst_argb, int interval_size, int interval_offset, int width); +void ARGBQuantizeRow_LSX(uint8_t* dst_argb, + int scale, + int interval_size, + int interval_offset, + int width); void ARGBShadeRow_C(const uint8_t* src_argb, uint8_t* dst_argb, @@ -3835,10 +5348,10 @@ void ARGBShadeRow_MSA(const uint8_t* src_argb, uint8_t* dst_argb, int width, uint32_t value); -void ARGBShadeRow_MMI(const uint8_t* src_argb, - uint8_t* dst_argb, - int width, - uint32_t value); +void ARGBShadeRow_LASX(const uint8_t* src_argb, + uint8_t* dst_argb, + int width, + uint32_t value); // Used for blur. void CumulativeSumToAverageRow_SSE2(const int32_t* topleft, @@ -3852,11 +5365,6 @@ void ComputeCumulativeSumRow_SSE2(const uint8_t* row, const int32_t* previous_cumsum, int width); -void ComputeCumulativeSumRow_MMI(const uint8_t* row, - int32_t* cumsum, - const int32_t* previous_cumsum, - int width); - void CumulativeSumToAverageRow_C(const int32_t* tl, const int32_t* bl, int w, @@ -3907,7 +5415,7 @@ void InterpolateRow_MSA(uint8_t* dst_ptr, ptrdiff_t src_stride, int width, int source_y_fraction); -void InterpolateRow_MMI(uint8_t* dst_ptr, +void InterpolateRow_LSX(uint8_t* dst_ptr, const uint8_t* src_ptr, ptrdiff_t src_stride, int width, @@ -3932,7 +5440,7 @@ void InterpolateRow_Any_MSA(uint8_t* dst_ptr, ptrdiff_t src_stride_ptr, int width, int source_y_fraction); -void InterpolateRow_Any_MMI(uint8_t* dst_ptr, +void InterpolateRow_Any_LSX(uint8_t* dst_ptr, const uint8_t* src_ptr, ptrdiff_t src_stride_ptr, int width, @@ -3943,6 +5451,47 @@ void InterpolateRow_16_C(uint16_t* dst_ptr, ptrdiff_t src_stride, int width, int source_y_fraction); +void InterpolateRow_16_NEON(uint16_t* dst_ptr, + const uint16_t* src_ptr, + ptrdiff_t src_stride, + int width, + int source_y_fraction); +void InterpolateRow_16_Any_NEON(uint16_t* dst_ptr, + const uint16_t* src_ptr, + ptrdiff_t src_stride, + int width, + int source_y_fraction); + +void InterpolateRow_16To8_C(uint8_t* dst_ptr, + const uint16_t* src_ptr, + ptrdiff_t src_stride, + int scale, + int width, + int source_y_fraction); +void InterpolateRow_16To8_NEON(uint8_t* dst_ptr, + const uint16_t* src_ptr, + ptrdiff_t src_stride, + int scale, + int width, + int source_y_fraction); +void InterpolateRow_16To8_Any_NEON(uint8_t* dst_ptr, + const uint16_t* src_ptr, + ptrdiff_t src_stride, + int scale, + int width, + int source_y_fraction); +void InterpolateRow_16To8_AVX2(uint8_t* dst_ptr, + const uint16_t* src_ptr, + ptrdiff_t src_stride, + int scale, + int width, + int source_y_fraction); +void InterpolateRow_16To8_Any_AVX2(uint8_t* dst_ptr, + const uint16_t* src_ptr, + ptrdiff_t src_stride, + int scale, + int width, + int source_y_fraction); // Sobel images. void SobelXRow_C(const uint8_t* src_y0, @@ -3965,11 +5514,6 @@ void SobelXRow_MSA(const uint8_t* src_y0, const uint8_t* src_y2, uint8_t* dst_sobelx, int width); -void SobelXRow_MMI(const uint8_t* src_y0, - const uint8_t* src_y1, - const uint8_t* src_y2, - uint8_t* dst_sobelx, - int width); void SobelYRow_C(const uint8_t* src_y0, const uint8_t* src_y1, uint8_t* dst_sobely, @@ -3986,10 +5530,6 @@ void SobelYRow_MSA(const uint8_t* src_y0, const uint8_t* src_y1, uint8_t* dst_sobely, int width); -void SobelYRow_MMI(const uint8_t* src_y0, - const uint8_t* src_y1, - uint8_t* dst_sobely, - int width); void SobelRow_C(const uint8_t* src_sobelx, const uint8_t* src_sobely, uint8_t* dst_argb, @@ -4006,7 +5546,7 @@ void SobelRow_MSA(const uint8_t* src_sobelx, const uint8_t* src_sobely, uint8_t* dst_argb, int width); -void SobelRow_MMI(const uint8_t* src_sobelx, +void SobelRow_LSX(const uint8_t* src_sobelx, const uint8_t* src_sobely, uint8_t* dst_argb, int width); @@ -4026,7 +5566,7 @@ void SobelToPlaneRow_MSA(const uint8_t* src_sobelx, const uint8_t* src_sobely, uint8_t* dst_y, int width); -void SobelToPlaneRow_MMI(const uint8_t* src_sobelx, +void SobelToPlaneRow_LSX(const uint8_t* src_sobelx, const uint8_t* src_sobely, uint8_t* dst_y, int width); @@ -4046,7 +5586,7 @@ void SobelXYRow_MSA(const uint8_t* src_sobelx, const uint8_t* src_sobely, uint8_t* dst_argb, int width); -void SobelXYRow_MMI(const uint8_t* src_sobelx, +void SobelXYRow_LSX(const uint8_t* src_sobelx, const uint8_t* src_sobely, uint8_t* dst_argb, int width); @@ -4062,7 +5602,7 @@ void SobelRow_Any_MSA(const uint8_t* y_buf, const uint8_t* uv_buf, uint8_t* dst_ptr, int width); -void SobelRow_Any_MMI(const uint8_t* y_buf, +void SobelRow_Any_LSX(const uint8_t* y_buf, const uint8_t* uv_buf, uint8_t* dst_ptr, int width); @@ -4078,7 +5618,7 @@ void SobelToPlaneRow_Any_MSA(const uint8_t* y_buf, const uint8_t* uv_buf, uint8_t* dst_ptr, int width); -void SobelToPlaneRow_Any_MMI(const uint8_t* y_buf, +void SobelToPlaneRow_Any_LSX(const uint8_t* y_buf, const uint8_t* uv_buf, uint8_t* dst_ptr, int width); @@ -4094,7 +5634,7 @@ void SobelXYRow_Any_MSA(const uint8_t* y_buf, const uint8_t* uv_buf, uint8_t* dst_ptr, int width); -void SobelXYRow_Any_MMI(const uint8_t* y_buf, +void SobelXYRow_Any_LSX(const uint8_t* y_buf, const uint8_t* uv_buf, uint8_t* dst_ptr, int width); @@ -4170,6 +5710,14 @@ void HalfFloatRow_Any_MSA(const uint16_t* src_ptr, uint16_t* dst_ptr, float param, int width); +void HalfFloatRow_LSX(const uint16_t* src, + uint16_t* dst, + float scale, + int width); +void HalfFloatRow_Any_LSX(const uint16_t* src_ptr, + uint16_t* dst_ptr, + float param, + int width); void ByteToFloatRow_C(const uint8_t* src, float* dst, float scale, int width); void ByteToFloatRow_NEON(const uint8_t* src, float* dst, @@ -4204,159 +5752,6 @@ float ScaleSumSamples_NEON(const float* src, void ScaleSamples_C(const float* src, float* dst, float scale, int width); void ScaleSamples_NEON(const float* src, float* dst, float scale, int width); -void I210ToARGBRow_MMI(const uint16_t* src_y, - const uint16_t* src_u, - const uint16_t* src_v, - uint8_t* rgb_buf, - const struct YuvConstants* yuvconstants, - int width); -void I422ToRGBARow_MMI(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); -void I422AlphaToARGBRow_MMI(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - const uint8_t* src_a, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); -void I422ToRGB24Row_MMI(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); -void I422ToRGB565Row_MMI(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_rgb565, - const struct YuvConstants* yuvconstants, - int width); -void I422ToARGB4444Row_MMI(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_argb4444, - const struct YuvConstants* yuvconstants, - int width); -void I422ToARGB1555Row_MMI(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_argb1555, - const struct YuvConstants* yuvconstants, - int width); -void NV12ToARGBRow_MMI(const uint8_t* src_y, - const uint8_t* src_uv, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); -void NV12ToRGB565Row_MMI(const uint8_t* src_y, - const uint8_t* src_uv, - uint8_t* dst_rgb565, - const struct YuvConstants* yuvconstants, - int width); -void NV21ToARGBRow_MMI(const uint8_t* src_y, - const uint8_t* src_vu, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); -void NV12ToRGB24Row_MMI(const uint8_t* src_y, - const uint8_t* src_uv, - uint8_t* dst_rgb24, - const struct YuvConstants* yuvconstants, - int width); -void NV21ToRGB24Row_MMI(const uint8_t* src_y, - const uint8_t* src_vu, - uint8_t* dst_rgb24, - const struct YuvConstants* yuvconstants, - int width); -void YUY2ToARGBRow_MMI(const uint8_t* src_yuy2, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); -void UYVYToARGBRow_MMI(const uint8_t* src_uyvy, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width); -void I210ToARGBRow_Any_MMI(const uint16_t* y_buf, - const uint16_t* u_buf, - const uint16_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I422ToRGBARow_Any_MMI(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I422AlphaToARGBRow_Any_MMI(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - const uint8_t* a_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I422ToRGB24Row_Any_MMI(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I422ToRGB565Row_Any_MMI(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I422ToARGB4444Row_Any_MMI(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void I422ToARGB1555Row_Any_MMI(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void NV12ToARGBRow_Any_MMI(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void NV12ToRGB565Row_Any_MMI(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void NV21ToARGBRow_Any_MMI(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void NV12ToRGB24Row_Any_MMI(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void NV21ToRGB24Row_Any_MMI(const uint8_t* y_buf, - const uint8_t* uv_buf, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void YUY2ToARGBRow_Any_MMI(const uint8_t* src_ptr, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); -void UYVYToARGBRow_Any_MMI(const uint8_t* src_ptr, - uint8_t* dst_ptr, - const struct YuvConstants* yuvconstants, - int width); - void GaussRow_F32_NEON(const float* src, float* dst, int width); void GaussRow_F32_C(const float* src, float* dst, int width); @@ -4376,6 +5771,17 @@ void GaussCol_F32_C(const float* src0, float* dst, int width); +void GaussRow_C(const uint32_t* src, uint16_t* dst, int width); +void GaussCol_C(const uint16_t* src0, + const uint16_t* src1, + const uint16_t* src2, + const uint16_t* src3, + const uint16_t* src4, + uint32_t* dst, + int width); + +void ClampFloatToZero_SSE2(const float* src_x, float* dst_y, int width); + #ifdef __cplusplus } // extern "C" } // namespace libyuv diff --git a/TMessagesProj/jni/third_party/libyuv/include/libyuv/scale.h b/TMessagesProj/jni/third_party/libyuv/include/libyuv/scale.h index add5a9eb62..443f89c2f9 100644 --- a/TMessagesProj/jni/third_party/libyuv/include/libyuv/scale.h +++ b/TMessagesProj/jni/third_party/libyuv/include/libyuv/scale.h @@ -49,6 +49,18 @@ void ScalePlane_16(const uint16_t* src, int dst_height, enum FilterMode filtering); +// Sample is expected to be in the low 12 bits. +LIBYUV_API +void ScalePlane_12(const uint16_t* src, + int src_stride, + int src_width, + int src_height, + uint16_t* dst, + int dst_stride, + int dst_width, + int dst_height, + enum FilterMode filtering); + // Scales a YUV 4:2:0 image from the src width and height to the // dst width and height. // If filtering is kFilterNone, a simple nearest-neighbor algorithm is @@ -97,6 +109,25 @@ int I420Scale_16(const uint16_t* src_y, int dst_height, enum FilterMode filtering); +LIBYUV_API +int I420Scale_12(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + int src_width, + int src_height, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_u, + int dst_stride_u, + uint16_t* dst_v, + int dst_stride_v, + int dst_width, + int dst_height, + enum FilterMode filtering); + // Scales a YUV 4:4:4 image from the src width and height to the // dst width and height. // If filtering is kFilterNone, a simple nearest-neighbor algorithm is @@ -145,6 +176,91 @@ int I444Scale_16(const uint16_t* src_y, int dst_height, enum FilterMode filtering); +LIBYUV_API +int I444Scale_12(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + int src_width, + int src_height, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_u, + int dst_stride_u, + uint16_t* dst_v, + int dst_stride_v, + int dst_width, + int dst_height, + enum FilterMode filtering); + +// Scales a YUV 4:2:2 image from the src width and height to the +// dst width and height. +// If filtering is kFilterNone, a simple nearest-neighbor algorithm is +// used. This produces basic (blocky) quality at the fastest speed. +// If filtering is kFilterBilinear, interpolation is used to produce a better +// quality image, at the expense of speed. +// If filtering is kFilterBox, averaging is used to produce ever better +// quality image, at further expense of speed. +// Returns 0 if successful. +LIBYUV_API +int I422Scale(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + int src_width, + int src_height, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int dst_width, + int dst_height, + enum FilterMode filtering); + +LIBYUV_API +int I422Scale_16(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + int src_width, + int src_height, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_u, + int dst_stride_u, + uint16_t* dst_v, + int dst_stride_v, + int dst_width, + int dst_height, + enum FilterMode filtering); + +LIBYUV_API +int I422Scale_12(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + int src_width, + int src_height, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_u, + int dst_stride_u, + uint16_t* dst_v, + int dst_stride_v, + int dst_width, + int dst_height, + enum FilterMode filtering); + // Scales an NV12 image from the src width and height to the // dst width and height. // If filtering is kFilterNone, a simple nearest-neighbor algorithm is diff --git a/TMessagesProj/jni/third_party/libyuv/include/libyuv/scale_rgb.h b/TMessagesProj/jni/third_party/libyuv/include/libyuv/scale_rgb.h new file mode 100644 index 0000000000..d17c39fd6e --- /dev/null +++ b/TMessagesProj/jni/third_party/libyuv/include/libyuv/scale_rgb.h @@ -0,0 +1,42 @@ +/* + * Copyright 2022 The LibYuv Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef INCLUDE_LIBYUV_SCALE_RGB_H_ +#define INCLUDE_LIBYUV_SCALE_RGB_H_ + +#include "libyuv/basic_types.h" +#include "libyuv/scale.h" // For FilterMode + +#ifdef __cplusplus +namespace libyuv { +extern "C" { +#endif + +// RGB can be RAW, RGB24 or YUV24 +// RGB scales 24 bit images by converting a row at a time to ARGB +// and using ARGB row functions to scale, then convert to RGB. +// TODO(fbarchard): Allow input/output formats to be specified. +LIBYUV_API +int RGBScale(const uint8_t* src_rgb, + int src_stride_rgb, + int src_width, + int src_height, + uint8_t* dst_rgb, + int dst_stride_rgb, + int dst_width, + int dst_height, + enum FilterMode filtering); + +#ifdef __cplusplus +} // extern "C" +} // namespace libyuv +#endif + +#endif // INCLUDE_LIBYUV_SCALE_UV_H_ diff --git a/TMessagesProj/jni/third_party/libyuv/include/libyuv/scale_row.h b/TMessagesProj/jni/third_party/libyuv/include/libyuv/scale_row.h index a386d49989..7996ea05d6 100644 --- a/TMessagesProj/jni/third_party/libyuv/include/libyuv/scale_row.h +++ b/TMessagesProj/jni/third_party/libyuv/include/libyuv/scale_row.h @@ -74,18 +74,39 @@ extern "C" { // The following are available for gcc/clang x86 platforms: // TODO(fbarchard): Port to Visual C -#if !defined(LIBYUV_DISABLE_X86) && \ - (defined(__x86_64__) || (defined(__i386__) && !defined(_MSC_VER))) +#if !defined(LIBYUV_DISABLE_X86) && (defined(__x86_64__) || defined(__i386__)) #define HAS_SCALEUVROWDOWN2BOX_SSSE3 +#define HAS_SCALEROWUP2_LINEAR_SSE2 +#define HAS_SCALEROWUP2_LINEAR_SSSE3 +#define HAS_SCALEROWUP2_BILINEAR_SSE2 +#define HAS_SCALEROWUP2_BILINEAR_SSSE3 +#define HAS_SCALEROWUP2_LINEAR_12_SSSE3 +#define HAS_SCALEROWUP2_BILINEAR_12_SSSE3 +#define HAS_SCALEROWUP2_LINEAR_16_SSE2 +#define HAS_SCALEROWUP2_BILINEAR_16_SSE2 +#define HAS_SCALEUVROWUP2_LINEAR_SSSE3 +#define HAS_SCALEUVROWUP2_BILINEAR_SSSE3 +#define HAS_SCALEUVROWUP2_LINEAR_16_SSE41 +#define HAS_SCALEUVROWUP2_BILINEAR_16_SSE41 #endif // The following are available for gcc/clang x86 platforms, but // require clang 3.4 or gcc 4.7. // TODO(fbarchard): Port to Visual C -#if !defined(LIBYUV_DISABLE_X86) && \ - (defined(__x86_64__) || defined(__i386__)) && !defined(_MSC_VER) && \ +#if !defined(LIBYUV_DISABLE_X86) && \ + (defined(__x86_64__) || defined(__i386__)) && \ (defined(CLANG_HAS_AVX2) || defined(GCC_HAS_AVX2)) #define HAS_SCALEUVROWDOWN2BOX_AVX2 +#define HAS_SCALEROWUP2_LINEAR_AVX2 +#define HAS_SCALEROWUP2_BILINEAR_AVX2 +#define HAS_SCALEROWUP2_LINEAR_12_AVX2 +#define HAS_SCALEROWUP2_BILINEAR_12_AVX2 +#define HAS_SCALEROWUP2_LINEAR_16_AVX2 +#define HAS_SCALEROWUP2_BILINEAR_16_AVX2 +#define HAS_SCALEUVROWUP2_LINEAR_AVX2 +#define HAS_SCALEUVROWUP2_BILINEAR_AVX2 +#define HAS_SCALEUVROWUP2_LINEAR_16_AVX2 +#define HAS_SCALEUVROWUP2_BILINEAR_16_AVX2 #endif // The following are available on all x86 platforms, but @@ -114,6 +135,16 @@ extern "C" { #define HAS_SCALEROWDOWN4_NEON #define HAS_SCALEUVROWDOWN2BOX_NEON #define HAS_SCALEUVROWDOWNEVEN_NEON +#define HAS_SCALEROWUP2_LINEAR_NEON +#define HAS_SCALEROWUP2_BILINEAR_NEON +#define HAS_SCALEROWUP2_LINEAR_12_NEON +#define HAS_SCALEROWUP2_BILINEAR_12_NEON +#define HAS_SCALEROWUP2_LINEAR_16_NEON +#define HAS_SCALEROWUP2_BILINEAR_16_NEON +#define HAS_SCALEUVROWUP2_LINEAR_NEON +#define HAS_SCALEUVROWUP2_BILINEAR_NEON +#define HAS_SCALEUVROWUP2_LINEAR_16_NEON +#define HAS_SCALEUVROWUP2_BILINEAR_16_NEON #endif #if !defined(LIBYUV_DISABLE_MSA) && defined(__mips_msa) @@ -129,22 +160,17 @@ extern "C" { #define HAS_SCALEROWDOWN4_MSA #endif -#if !defined(LIBYUV_DISABLE_MMI) && defined(_MIPS_ARCH_LOONGSON3A) -#define HAS_FIXEDDIV1_MIPS -#define HAS_FIXEDDIV_MIPS -#define HAS_SCALEADDROW_16_MMI -#define HAS_SCALEADDROW_MMI -#define HAS_SCALEARGBCOLS_MMI -#define HAS_SCALEARGBCOLSUP2_MMI -#define HAS_SCALEARGBROWDOWN2_MMI -#define HAS_SCALEARGBROWDOWNEVEN_MMI -#define HAS_SCALECOLS_16_MMI -#define HAS_SCALECOLS_MMI -#define HAS_SCALEROWDOWN2_16_MMI -#define HAS_SCALEROWDOWN2_MMI -#define HAS_SCALEROWDOWN4_16_MMI -#define HAS_SCALEROWDOWN4_MMI -#define HAS_SCALEROWDOWN34_MMI +#if !defined(LIBYUV_DISABLE_LSX) && defined(__loongarch_sx) +#define HAS_SCALEARGBROWDOWN2_LSX +#define HAS_SCALEARGBROWDOWNEVEN_LSX +#define HAS_SCALEROWDOWN2_LSX +#define HAS_SCALEROWDOWN4_LSX +#define HAS_SCALEROWDOWN38_LSX +#define HAS_SCALEFILTERCOLS_LSX +#define HAS_SCALEADDROW_LSX +#define HAS_SCALEARGBCOLS_LSX +#define HAS_SCALEARGBFILTERCOLS_LSX +#define HAS_SCALEROWDOWN34_LSX #endif // Scale ARGB vertically with bilinear interpolation. @@ -174,6 +200,31 @@ void ScalePlaneVertical_16(int src_height, int wpp, enum FilterMode filtering); +void ScalePlaneVertical_16To8(int src_height, + int dst_width, + int dst_height, + int src_stride, + int dst_stride, + const uint16_t* src_argb, + uint8_t* dst_argb, + int x, + int y, + int dy, + int wpp, + int scale, + enum FilterMode filtering); + +void ScalePlaneDown2_16To8(int src_width, + int src_height, + int dst_width, + int dst_height, + int src_stride, + int dst_stride, + const uint16_t* src_ptr, + uint8_t* dst_ptr, + int scale, + enum FilterMode filtering); + // Simplify the filtering based on scale factors. enum FilterMode ScaleFilterReduce(int src_width, int src_height, @@ -219,6 +270,16 @@ void ScaleRowDown2_16_C(const uint16_t* src_ptr, ptrdiff_t src_stride, uint16_t* dst, int dst_width); +void ScaleRowDown2_16To8_C(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst, + int dst_width, + int scale); +void ScaleRowDown2_16To8_Odd_C(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst, + int dst_width, + int scale); void ScaleRowDown2Linear_C(const uint8_t* src_ptr, ptrdiff_t src_stride, uint8_t* dst, @@ -227,6 +288,16 @@ void ScaleRowDown2Linear_16_C(const uint16_t* src_ptr, ptrdiff_t src_stride, uint16_t* dst, int dst_width); +void ScaleRowDown2Linear_16To8_C(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst, + int dst_width, + int scale); +void ScaleRowDown2Linear_16To8_Odd_C(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst, + int dst_width, + int scale); void ScaleRowDown2Box_C(const uint8_t* src_ptr, ptrdiff_t src_stride, uint8_t* dst, @@ -239,6 +310,16 @@ void ScaleRowDown2Box_16_C(const uint16_t* src_ptr, ptrdiff_t src_stride, uint16_t* dst, int dst_width); +void ScaleRowDown2Box_16To8_C(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst, + int dst_width, + int scale); +void ScaleRowDown2Box_16To8_Odd_C(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst, + int dst_width, + int scale); void ScaleRowDown4_C(const uint8_t* src_ptr, ptrdiff_t src_stride, uint8_t* dst, @@ -279,6 +360,40 @@ void ScaleRowDown34_1_Box_16_C(const uint16_t* src_ptr, ptrdiff_t src_stride, uint16_t* d, int dst_width); + +void ScaleRowUp2_Linear_C(const uint8_t* src_ptr, + uint8_t* dst_ptr, + int dst_width); +void ScaleRowUp2_Bilinear_C(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width); +void ScaleRowUp2_Linear_16_C(const uint16_t* src_ptr, + uint16_t* dst_ptr, + int dst_width); +void ScaleRowUp2_Bilinear_16_C(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint16_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width); +void ScaleRowUp2_Linear_Any_C(const uint8_t* src_ptr, + uint8_t* dst_ptr, + int dst_width); +void ScaleRowUp2_Bilinear_Any_C(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width); +void ScaleRowUp2_Linear_16_Any_C(const uint16_t* src_ptr, + uint16_t* dst_ptr, + int dst_width); +void ScaleRowUp2_Bilinear_16_Any_C(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint16_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width); + void ScaleCols_C(uint8_t* dst_ptr, const uint8_t* src_ptr, int dst_width, @@ -416,6 +531,40 @@ void ScaleUVRowDownEvenBox_C(const uint8_t* src_uv, int src_stepx, uint8_t* dst_uv, int dst_width); + +void ScaleUVRowUp2_Linear_C(const uint8_t* src_ptr, + uint8_t* dst_ptr, + int dst_width); +void ScaleUVRowUp2_Bilinear_C(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width); +void ScaleUVRowUp2_Linear_Any_C(const uint8_t* src_ptr, + uint8_t* dst_ptr, + int dst_width); +void ScaleUVRowUp2_Bilinear_Any_C(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width); +void ScaleUVRowUp2_Linear_16_C(const uint16_t* src_ptr, + uint16_t* dst_ptr, + int dst_width); +void ScaleUVRowUp2_Bilinear_16_C(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint16_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width); +void ScaleUVRowUp2_Linear_16_Any_C(const uint16_t* src_ptr, + uint16_t* dst_ptr, + int dst_width); +void ScaleUVRowUp2_Bilinear_16_Any_C(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint16_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width); + void ScaleUVCols_C(uint8_t* dst_uv, const uint8_t* src_uv, int dst_width, @@ -508,6 +657,120 @@ void ScaleRowDown38_2_Box_SSSE3(const uint8_t* src_ptr, ptrdiff_t src_stride, uint8_t* dst_ptr, int dst_width); + +void ScaleRowUp2_Linear_SSE2(const uint8_t* src_ptr, + uint8_t* dst_ptr, + int dst_width); +void ScaleRowUp2_Bilinear_SSE2(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width); +void ScaleRowUp2_Linear_12_SSSE3(const uint16_t* src_ptr, + uint16_t* dst_ptr, + int dst_width); +void ScaleRowUp2_Bilinear_12_SSSE3(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint16_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width); +void ScaleRowUp2_Linear_16_SSE2(const uint16_t* src_ptr, + uint16_t* dst_ptr, + int dst_width); +void ScaleRowUp2_Bilinear_16_SSE2(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint16_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width); +void ScaleRowUp2_Linear_SSSE3(const uint8_t* src_ptr, + uint8_t* dst_ptr, + int dst_width); +void ScaleRowUp2_Bilinear_SSSE3(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width); +void ScaleRowUp2_Linear_AVX2(const uint8_t* src_ptr, + uint8_t* dst_ptr, + int dst_width); +void ScaleRowUp2_Bilinear_AVX2(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width); +void ScaleRowUp2_Linear_12_AVX2(const uint16_t* src_ptr, + uint16_t* dst_ptr, + int dst_width); +void ScaleRowUp2_Bilinear_12_AVX2(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint16_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width); +void ScaleRowUp2_Linear_16_AVX2(const uint16_t* src_ptr, + uint16_t* dst_ptr, + int dst_width); +void ScaleRowUp2_Bilinear_16_AVX2(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint16_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width); +void ScaleRowUp2_Linear_Any_SSE2(const uint8_t* src_ptr, + uint8_t* dst_ptr, + int dst_width); +void ScaleRowUp2_Bilinear_Any_SSE2(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width); +void ScaleRowUp2_Linear_12_Any_SSSE3(const uint16_t* src_ptr, + uint16_t* dst_ptr, + int dst_width); +void ScaleRowUp2_Bilinear_12_Any_SSSE3(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint16_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width); +void ScaleRowUp2_Linear_16_Any_SSE2(const uint16_t* src_ptr, + uint16_t* dst_ptr, + int dst_width); +void ScaleRowUp2_Bilinear_16_Any_SSE2(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint16_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width); +void ScaleRowUp2_Linear_Any_SSSE3(const uint8_t* src_ptr, + uint8_t* dst_ptr, + int dst_width); +void ScaleRowUp2_Bilinear_Any_SSSE3(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width); +void ScaleRowUp2_Linear_Any_AVX2(const uint8_t* src_ptr, + uint8_t* dst_ptr, + int dst_width); +void ScaleRowUp2_Bilinear_Any_AVX2(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width); +void ScaleRowUp2_Linear_12_Any_AVX2(const uint16_t* src_ptr, + uint16_t* dst_ptr, + int dst_width); +void ScaleRowUp2_Bilinear_12_Any_AVX2(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint16_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width); +void ScaleRowUp2_Linear_16_Any_AVX2(const uint16_t* src_ptr, + uint16_t* dst_ptr, + int dst_width); +void ScaleRowUp2_Bilinear_16_Any_AVX2(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint16_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width); + void ScaleRowDown2_Any_SSSE3(const uint8_t* src_ptr, ptrdiff_t src_stride, uint8_t* dst_ptr, @@ -658,16 +921,6 @@ void ScaleARGBCols_Any_MSA(uint8_t* dst_ptr, int dst_width, int x, int dx); -void ScaleARGBCols_MMI(uint8_t* dst_argb, - const uint8_t* src_argb, - int dst_width, - int x, - int dx); -void ScaleARGBCols_Any_MMI(uint8_t* dst_ptr, - const uint8_t* src_ptr, - int dst_width, - int x, - int dx); // ARGB Row functions void ScaleARGBRowDown2_SSE2(const uint8_t* src_argb, @@ -706,15 +959,15 @@ void ScaleARGBRowDown2Box_MSA(const uint8_t* src_argb, ptrdiff_t src_stride, uint8_t* dst_argb, int dst_width); -void ScaleARGBRowDown2_MMI(const uint8_t* src_argb, +void ScaleARGBRowDown2_LSX(const uint8_t* src_argb, ptrdiff_t src_stride, uint8_t* dst_argb, int dst_width); -void ScaleARGBRowDown2Linear_MMI(const uint8_t* src_argb, +void ScaleARGBRowDown2Linear_LSX(const uint8_t* src_argb, ptrdiff_t src_stride, uint8_t* dst_argb, int dst_width); -void ScaleARGBRowDown2Box_MMI(const uint8_t* src_argb, +void ScaleARGBRowDown2Box_LSX(const uint8_t* src_argb, ptrdiff_t src_stride, uint8_t* dst_argb, int dst_width); @@ -754,15 +1007,15 @@ void ScaleARGBRowDown2Box_Any_MSA(const uint8_t* src_ptr, ptrdiff_t src_stride, uint8_t* dst_ptr, int dst_width); -void ScaleARGBRowDown2_Any_MMI(const uint8_t* src_ptr, +void ScaleARGBRowDown2_Any_LSX(const uint8_t* src_ptr, ptrdiff_t src_stride, uint8_t* dst_ptr, int dst_width); -void ScaleARGBRowDown2Linear_Any_MMI(const uint8_t* src_ptr, +void ScaleARGBRowDown2Linear_Any_LSX(const uint8_t* src_ptr, ptrdiff_t src_stride, uint8_t* dst_ptr, int dst_width); -void ScaleARGBRowDown2Box_Any_MMI(const uint8_t* src_ptr, +void ScaleARGBRowDown2Box_Any_LSX(const uint8_t* src_ptr, ptrdiff_t src_stride, uint8_t* dst_ptr, int dst_width); @@ -796,12 +1049,12 @@ void ScaleARGBRowDownEvenBox_MSA(const uint8_t* src_argb, int src_stepx, uint8_t* dst_argb, int dst_width); -void ScaleARGBRowDownEven_MMI(const uint8_t* src_argb, +void ScaleARGBRowDownEven_LSX(const uint8_t* src_argb, ptrdiff_t src_stride, int32_t src_stepx, uint8_t* dst_argb, int dst_width); -void ScaleARGBRowDownEvenBox_MMI(const uint8_t* src_argb, +void ScaleARGBRowDownEvenBox_LSX(const uint8_t* src_argb, ptrdiff_t src_stride, int src_stepx, uint8_t* dst_argb, @@ -836,12 +1089,12 @@ void ScaleARGBRowDownEvenBox_Any_MSA(const uint8_t* src_ptr, int src_stepx, uint8_t* dst_ptr, int dst_width); -void ScaleARGBRowDownEven_Any_MMI(const uint8_t* src_ptr, +void ScaleARGBRowDownEven_Any_LSX(const uint8_t* src_ptr, ptrdiff_t src_stride, int32_t src_stepx, uint8_t* dst_ptr, int dst_width); -void ScaleARGBRowDownEvenBox_Any_MMI(const uint8_t* src_ptr, +void ScaleARGBRowDownEvenBox_Any_LSX(const uint8_t* src_ptr, ptrdiff_t src_stride, int src_stepx, uint8_t* dst_ptr, @@ -888,18 +1141,6 @@ void ScaleUVRowDown2Box_MSA(const uint8_t* src_ptr, ptrdiff_t src_stride, uint8_t* dst_uv, int dst_width); -void ScaleUVRowDown2_MMI(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_uv, - int dst_width); -void ScaleUVRowDown2Linear_MMI(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_uv, - int dst_width); -void ScaleUVRowDown2Box_MMI(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_uv, - int dst_width); void ScaleUVRowDown2_Any_SSSE3(const uint8_t* src_ptr, ptrdiff_t src_stride, uint8_t* dst_ptr, @@ -940,18 +1181,6 @@ void ScaleUVRowDown2Box_Any_MSA(const uint8_t* src_ptr, ptrdiff_t src_stride, uint8_t* dst_ptr, int dst_width); -void ScaleUVRowDown2_Any_MMI(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleUVRowDown2Linear_Any_MMI(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); -void ScaleUVRowDown2Box_Any_MMI(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); void ScaleUVRowDownEven_SSSE3(const uint8_t* src_ptr, ptrdiff_t src_stride, int src_stepx, @@ -982,16 +1211,6 @@ void ScaleUVRowDownEvenBox_MSA(const uint8_t* src_ptr, int src_stepx, uint8_t* dst_uv, int dst_width); -void ScaleUVRowDownEven_MMI(const uint8_t* src_ptr, - ptrdiff_t src_stride, - int32_t src_stepx, - uint8_t* dst_uv, - int dst_width); -void ScaleUVRowDownEvenBox_MMI(const uint8_t* src_ptr, - ptrdiff_t src_stride, - int src_stepx, - uint8_t* dst_uv, - int dst_width); void ScaleUVRowDownEven_Any_SSSE3(const uint8_t* src_ptr, ptrdiff_t src_stride, int src_stepx, @@ -1022,16 +1241,103 @@ void ScaleUVRowDownEvenBox_Any_MSA(const uint8_t* src_ptr, int src_stepx, uint8_t* dst_ptr, int dst_width); -void ScaleUVRowDownEven_Any_MMI(const uint8_t* src_ptr, - ptrdiff_t src_stride, - int32_t src_stepx, + +void ScaleUVRowUp2_Linear_SSSE3(const uint8_t* src_ptr, uint8_t* dst_ptr, int dst_width); -void ScaleUVRowDownEvenBox_Any_MMI(const uint8_t* src_ptr, - ptrdiff_t src_stride, - int src_stepx, +void ScaleUVRowUp2_Bilinear_SSSE3(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width); +void ScaleUVRowUp2_Linear_Any_SSSE3(const uint8_t* src_ptr, + uint8_t* dst_ptr, + int dst_width); +void ScaleUVRowUp2_Bilinear_Any_SSSE3(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width); +void ScaleUVRowUp2_Linear_AVX2(const uint8_t* src_ptr, + uint8_t* dst_ptr, + int dst_width); +void ScaleUVRowUp2_Bilinear_AVX2(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width); +void ScaleUVRowUp2_Linear_Any_AVX2(const uint8_t* src_ptr, uint8_t* dst_ptr, int dst_width); +void ScaleUVRowUp2_Bilinear_Any_AVX2(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width); +void ScaleUVRowUp2_Linear_NEON(const uint8_t* src_ptr, + uint8_t* dst_ptr, + int dst_width); +void ScaleUVRowUp2_Bilinear_NEON(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width); +void ScaleUVRowUp2_Linear_Any_NEON(const uint8_t* src_ptr, + uint8_t* dst_ptr, + int dst_width); +void ScaleUVRowUp2_Bilinear_Any_NEON(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width); +void ScaleUVRowUp2_Linear_16_SSE41(const uint16_t* src_ptr, + uint16_t* dst_ptr, + int dst_width); +void ScaleUVRowUp2_Bilinear_16_SSE41(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint16_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width); +void ScaleUVRowUp2_Linear_16_Any_SSE41(const uint16_t* src_ptr, + uint16_t* dst_ptr, + int dst_width); +void ScaleUVRowUp2_Bilinear_16_Any_SSE41(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint16_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width); +void ScaleUVRowUp2_Linear_16_AVX2(const uint16_t* src_ptr, + uint16_t* dst_ptr, + int dst_width); +void ScaleUVRowUp2_Bilinear_16_AVX2(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint16_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width); +void ScaleUVRowUp2_Linear_16_Any_AVX2(const uint16_t* src_ptr, + uint16_t* dst_ptr, + int dst_width); +void ScaleUVRowUp2_Bilinear_16_Any_AVX2(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint16_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width); +void ScaleUVRowUp2_Linear_16_NEON(const uint16_t* src_ptr, + uint16_t* dst_ptr, + int dst_width); +void ScaleUVRowUp2_Bilinear_16_NEON(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint16_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width); +void ScaleUVRowUp2_Linear_16_Any_NEON(const uint16_t* src_ptr, + uint16_t* dst_ptr, + int dst_width); +void ScaleUVRowUp2_Bilinear_16_Any_NEON(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint16_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width); // ScaleRowDown2Box also used by planar functions // NEON downscalers with interpolation. @@ -1143,6 +1449,55 @@ void ScaleRowDown38_2_Box_Any_NEON(const uint8_t* src_ptr, uint8_t* dst_ptr, int dst_width); +void ScaleRowUp2_Linear_NEON(const uint8_t* src_ptr, + uint8_t* dst_ptr, + int dst_width); +void ScaleRowUp2_Bilinear_NEON(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width); +void ScaleRowUp2_Linear_12_NEON(const uint16_t* src_ptr, + uint16_t* dst_ptr, + int dst_width); +void ScaleRowUp2_Bilinear_12_NEON(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint16_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width); +void ScaleRowUp2_Linear_16_NEON(const uint16_t* src_ptr, + uint16_t* dst_ptr, + int dst_width); +void ScaleRowUp2_Bilinear_16_NEON(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint16_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width); +void ScaleRowUp2_Linear_Any_NEON(const uint8_t* src_ptr, + uint8_t* dst_ptr, + int dst_width); +void ScaleRowUp2_Bilinear_Any_NEON(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width); +void ScaleRowUp2_Linear_12_Any_NEON(const uint16_t* src_ptr, + uint16_t* dst_ptr, + int dst_width); +void ScaleRowUp2_Bilinear_12_Any_NEON(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint16_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width); +void ScaleRowUp2_Linear_16_Any_NEON(const uint16_t* src_ptr, + uint16_t* dst_ptr, + int dst_width); +void ScaleRowUp2_Bilinear_16_Any_NEON(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint16_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width); + void ScaleAddRow_NEON(const uint8_t* src_ptr, uint16_t* dst_ptr, int src_width); void ScaleAddRow_Any_NEON(const uint8_t* src_ptr, uint16_t* dst_ptr, @@ -1202,10 +1557,6 @@ void ScaleRowDown34_MSA(const uint8_t* src_ptr, ptrdiff_t src_stride, uint8_t* dst, int dst_width); -void ScaleRowDown34_MMI(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst, - int dst_width); void ScaleRowDown34_0_Box_MSA(const uint8_t* src_ptr, ptrdiff_t src_stride, uint8_t* d, @@ -1259,10 +1610,6 @@ void ScaleRowDown34_Any_MSA(const uint8_t* src_ptr, ptrdiff_t src_stride, uint8_t* dst_ptr, int dst_width); -void ScaleRowDown34_Any_MMI(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst_ptr, - int dst_width); void ScaleRowDown34_0_Box_Any_MSA(const uint8_t* src_ptr, ptrdiff_t src_stride, uint8_t* dst_ptr, @@ -1272,93 +1619,129 @@ void ScaleRowDown34_1_Box_Any_MSA(const uint8_t* src_ptr, uint8_t* dst_ptr, int dst_width); -void ScaleRowDown2_MMI(const uint8_t* src_ptr, +void ScaleRowDown2_LSX(const uint8_t* src_ptr, ptrdiff_t src_stride, uint8_t* dst, int dst_width); -void ScaleRowDown2_16_MMI(const uint16_t* src_ptr, - ptrdiff_t src_stride, - uint16_t* dst, - int dst_width); -void ScaleRowDown2Linear_MMI(const uint8_t* src_ptr, +void ScaleRowDown2Linear_LSX(const uint8_t* src_ptr, ptrdiff_t src_stride, uint8_t* dst, int dst_width); -void ScaleRowDown2Linear_16_MMI(const uint16_t* src_ptr, - ptrdiff_t src_stride, - uint16_t* dst, - int dst_width); -void ScaleRowDown2Box_MMI(const uint8_t* src_ptr, +void ScaleRowDown2Box_LSX(const uint8_t* src_ptr, ptrdiff_t src_stride, uint8_t* dst, int dst_width); -void ScaleRowDown2Box_16_MMI(const uint16_t* src_ptr, - ptrdiff_t src_stride, - uint16_t* dst, - int dst_width); -void ScaleRowDown2Box_Odd_MMI(const uint8_t* src_ptr, - ptrdiff_t src_stride, - uint8_t* dst, - int dst_width); -void ScaleRowDown4_MMI(const uint8_t* src_ptr, +void ScaleRowDown4_LSX(const uint8_t* src_ptr, ptrdiff_t src_stride, uint8_t* dst, int dst_width); -void ScaleRowDown4_16_MMI(const uint16_t* src_ptr, - ptrdiff_t src_stride, - uint16_t* dst, - int dst_width); -void ScaleRowDown4Box_MMI(const uint8_t* src_ptr, +void ScaleRowDown4Box_LSX(const uint8_t* src_ptr, ptrdiff_t src_stride, uint8_t* dst, int dst_width); -void ScaleRowDown4Box_16_MMI(const uint16_t* src_ptr, - ptrdiff_t src_stride, - uint16_t* dst, - int dst_width); -void ScaleAddRow_MMI(const uint8_t* src_ptr, uint16_t* dst_ptr, int src_width); -void ScaleAddRow_16_MMI(const uint16_t* src_ptr, - uint32_t* dst_ptr, - int src_width); -void ScaleColsUp2_MMI(uint8_t* dst_ptr, - const uint8_t* src_ptr, - int dst_width, - int x, - int dx); -void ScaleColsUp2_16_MMI(uint16_t* dst_ptr, - const uint16_t* src_ptr, +void ScaleRowDown38_LSX(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst, + int dst_width); +void ScaleRowDown38_2_Box_LSX(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + int dst_width); +void ScaleRowDown38_3_Box_LSX(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + int dst_width); +void ScaleAddRow_LSX(const uint8_t* src_ptr, uint16_t* dst_ptr, int src_width); +void ScaleFilterCols_LSX(uint8_t* dst_ptr, + const uint8_t* src_ptr, int dst_width, int x, int dx); -void ScaleARGBColsUp2_MMI(uint8_t* dst_argb, - const uint8_t* src_argb, - int dst_width, - int x, - int dx); - -void ScaleRowDown2_Any_MMI(const uint8_t* src_ptr, +void ScaleARGBFilterCols_LSX(uint8_t* dst_argb, + const uint8_t* src_argb, + int dst_width, + int x, + int dx); +void ScaleARGBCols_LSX(uint8_t* dst_argb, + const uint8_t* src_argb, + int dst_width, + int x, + int dx); +void ScaleRowDown34_LSX(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst, + int dst_width); +void ScaleRowDown34_0_Box_LSX(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* d, + int dst_width); +void ScaleRowDown34_1_Box_LSX(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* d, + int dst_width); +void ScaleRowDown2_Any_LSX(const uint8_t* src_ptr, ptrdiff_t src_stride, uint8_t* dst_ptr, int dst_width); -void ScaleRowDown2Linear_Any_MMI(const uint8_t* src_ptr, +void ScaleRowDown2Linear_Any_LSX(const uint8_t* src_ptr, ptrdiff_t src_stride, uint8_t* dst_ptr, int dst_width); -void ScaleRowDown2Box_Any_MMI(const uint8_t* src_ptr, +void ScaleRowDown2Box_Any_LSX(const uint8_t* src_ptr, ptrdiff_t src_stride, uint8_t* dst_ptr, int dst_width); -void ScaleRowDown4_Any_MMI(const uint8_t* src_ptr, +void ScaleRowDown4_Any_LSX(const uint8_t* src_ptr, ptrdiff_t src_stride, uint8_t* dst_ptr, int dst_width); -void ScaleRowDown4Box_Any_MMI(const uint8_t* src_ptr, +void ScaleRowDown4Box_Any_LSX(const uint8_t* src_ptr, ptrdiff_t src_stride, uint8_t* dst_ptr, int dst_width); -void ScaleAddRow_Any_MMI(const uint8_t* src_ptr, +void ScaleRowDown38_Any_LSX(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + int dst_width); +void ScaleRowDown38_2_Box_Any_LSX(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + int dst_width); +void ScaleRowDown38_3_Box_Any_LSX(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + int dst_width); +void ScaleAddRow_Any_LSX(const uint8_t* src_ptr, uint16_t* dst_ptr, int src_width); +void ScaleFilterCols_Any_LSX(uint8_t* dst_ptr, + const uint8_t* src_ptr, + int dst_width, + int x, + int dx); +void ScaleARGBCols_Any_LSX(uint8_t* dst_ptr, + const uint8_t* src_ptr, + int dst_width, + int x, + int dx); +void ScaleARGBFilterCols_Any_LSX(uint8_t* dst_ptr, + const uint8_t* src_ptr, + int dst_width, + int x, + int dx); +void ScaleRowDown34_Any_LSX(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + int dst_width); +void ScaleRowDown34_0_Box_Any_LSX(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + int dst_width); +void ScaleRowDown34_1_Box_Any_LSX(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + int dst_width); + #ifdef __cplusplus } // extern "C" } // namespace libyuv diff --git a/TMessagesProj/jni/third_party/libyuv/include/libyuv/scale_uv.h b/TMessagesProj/jni/third_party/libyuv/include/libyuv/scale_uv.h index 1b6327aaed..8e74e3195b 100644 --- a/TMessagesProj/jni/third_party/libyuv/include/libyuv/scale_uv.h +++ b/TMessagesProj/jni/third_party/libyuv/include/libyuv/scale_uv.h @@ -30,6 +30,19 @@ int UVScale(const uint8_t* src_uv, int dst_height, enum FilterMode filtering); +// Scale a 16 bit UV image. +// This function is currently incomplete, it can't handle all cases. +LIBYUV_API +int UVScale_16(const uint16_t* src_uv, + int src_stride_uv, + int src_width, + int src_height, + uint16_t* dst_uv, + int dst_stride_uv, + int dst_width, + int dst_height, + enum FilterMode filtering); + #ifdef __cplusplus } // extern "C" } // namespace libyuv diff --git a/TMessagesProj/jni/third_party/libyuv/include/libyuv/version.h b/TMessagesProj/jni/third_party/libyuv/include/libyuv/version.h index 1d085960e3..adc49c4e46 100644 --- a/TMessagesProj/jni/third_party/libyuv/include/libyuv/version.h +++ b/TMessagesProj/jni/third_party/libyuv/include/libyuv/version.h @@ -11,6 +11,6 @@ #ifndef INCLUDE_LIBYUV_VERSION_H_ #define INCLUDE_LIBYUV_VERSION_H_ -#define LIBYUV_VERSION 1767 +#define LIBYUV_VERSION 1857 #endif // INCLUDE_LIBYUV_VERSION_H_ diff --git a/TMessagesProj/jni/third_party/libyuv/include/libyuv/video_common.h b/TMessagesProj/jni/third_party/libyuv/include/libyuv/video_common.h index b9823d71d0..32b8a5210b 100644 --- a/TMessagesProj/jni/third_party/libyuv/include/libyuv/video_common.h +++ b/TMessagesProj/jni/third_party/libyuv/include/libyuv/video_common.h @@ -60,17 +60,19 @@ enum FourCC { FOURCC_YUY2 = FOURCC('Y', 'U', 'Y', '2'), FOURCC_UYVY = FOURCC('U', 'Y', 'V', 'Y'), FOURCC_I010 = FOURCC('I', '0', '1', '0'), // bt.601 10 bit 420 - FOURCC_I210 = FOURCC('I', '0', '1', '0'), // bt.601 10 bit 422 + FOURCC_I210 = FOURCC('I', '2', '1', '0'), // bt.601 10 bit 422 // 1 Secondary YUV format: row biplanar. deprecated. FOURCC_M420 = FOURCC('M', '4', '2', '0'), - // 11 Primary RGB formats: 4 32 bpp, 2 24 bpp, 3 16 bpp, 1 10 bpc + // 13 Primary RGB formats: 4 32 bpp, 2 24 bpp, 3 16 bpp, 1 10 bpc 2 64 bpp FOURCC_ARGB = FOURCC('A', 'R', 'G', 'B'), FOURCC_BGRA = FOURCC('B', 'G', 'R', 'A'), FOURCC_ABGR = FOURCC('A', 'B', 'G', 'R'), FOURCC_AR30 = FOURCC('A', 'R', '3', '0'), // 10 bit per channel. 2101010. FOURCC_AB30 = FOURCC('A', 'B', '3', '0'), // ABGR version of 10 bit + FOURCC_AR64 = FOURCC('A', 'R', '6', '4'), // 16 bit per channel. + FOURCC_AB64 = FOURCC('A', 'B', '6', '4'), // ABGR version of 16 bit FOURCC_24BG = FOURCC('2', '4', 'B', 'G'), FOURCC_RAW = FOURCC('r', 'a', 'w', ' '), FOURCC_RGBA = FOURCC('R', 'G', 'B', 'A'), @@ -94,16 +96,23 @@ enum FourCC { FOURCC('J', '4', '4', '4'), // jpeg (bt.601 full), unofficial fourcc FOURCC_J400 = FOURCC('J', '4', '0', '0'), // jpeg (bt.601 full), unofficial fourcc + FOURCC_F420 = FOURCC('F', '4', '2', '0'), // bt.709 full, unofficial fourcc + FOURCC_F422 = FOURCC('F', '4', '2', '2'), // bt.709 full, unofficial fourcc + FOURCC_F444 = FOURCC('F', '4', '4', '4'), // bt.709 full, unofficial fourcc FOURCC_H420 = FOURCC('H', '4', '2', '0'), // bt.709, unofficial fourcc FOURCC_H422 = FOURCC('H', '4', '2', '2'), // bt.709, unofficial fourcc FOURCC_H444 = FOURCC('H', '4', '4', '4'), // bt.709, unofficial fourcc FOURCC_U420 = FOURCC('U', '4', '2', '0'), // bt.2020, unofficial fourcc FOURCC_U422 = FOURCC('U', '4', '2', '2'), // bt.2020, unofficial fourcc FOURCC_U444 = FOURCC('U', '4', '4', '4'), // bt.2020, unofficial fourcc + FOURCC_F010 = FOURCC('F', '0', '1', '0'), // bt.709 full range 10 bit 420 FOURCC_H010 = FOURCC('H', '0', '1', '0'), // bt.709 10 bit 420 FOURCC_U010 = FOURCC('U', '0', '1', '0'), // bt.2020 10 bit 420 - FOURCC_H210 = FOURCC('H', '0', '1', '0'), // bt.709 10 bit 422 - FOURCC_U210 = FOURCC('U', '0', '1', '0'), // bt.2020 10 bit 422 + FOURCC_F210 = FOURCC('F', '2', '1', '0'), // bt.709 full range 10 bit 422 + FOURCC_H210 = FOURCC('H', '2', '1', '0'), // bt.709 10 bit 422 + FOURCC_U210 = FOURCC('U', '2', '1', '0'), // bt.2020 10 bit 422 + FOURCC_P010 = FOURCC('P', '0', '1', '0'), + FOURCC_P210 = FOURCC('P', '2', '1', '0'), // 14 Auxiliary aliases. CanonicalFourCC() maps these to canonical fourcc. FOURCC_IYUV = FOURCC('I', 'Y', 'U', 'V'), // Alias for I420. @@ -156,6 +165,8 @@ enum FourCCBpp { FOURCC_BPP_RGBA = 32, FOURCC_BPP_AR30 = 32, FOURCC_BPP_AB30 = 32, + FOURCC_BPP_AR64 = 64, + FOURCC_BPP_AB64 = 64, FOURCC_BPP_24BG = 24, FOURCC_BPP_RAW = 24, FOURCC_BPP_RGBP = 16, @@ -173,7 +184,12 @@ enum FourCCBpp { FOURCC_BPP_J400 = 8, FOURCC_BPP_H420 = 12, FOURCC_BPP_H422 = 16, - FOURCC_BPP_H010 = 24, + FOURCC_BPP_I010 = 15, + FOURCC_BPP_I210 = 20, + FOURCC_BPP_H010 = 15, + FOURCC_BPP_H210 = 20, + FOURCC_BPP_P010 = 15, + FOURCC_BPP_P210 = 20, FOURCC_BPP_MJPG = 0, // 0 means unknown. FOURCC_BPP_H264 = 0, FOURCC_BPP_IYUV = 12, diff --git a/TMessagesProj/jni/third_party/libyuv/source/compare.cc b/TMessagesProj/jni/third_party/libyuv/source/compare.cc index e93aba1b53..50a736bdda 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/compare.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/compare.cc @@ -45,7 +45,7 @@ uint32_t HashDjb2(const uint8_t* src, uint64_t count, uint32_t seed) { } #endif - while (count >= (uint64_t)(kBlockSize)) { + while (count >= (uint64_t)kBlockSize) { seed = HashDjb2_SSE(src, kBlockSize, seed); src += kBlockSize; count -= kBlockSize; @@ -149,11 +149,6 @@ uint64_t ComputeHammingDistance(const uint8_t* src_a, HammingDistance = HammingDistance_AVX2; } #endif -#if defined(HAS_HAMMINGDISTANCE_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - HammingDistance = HammingDistance_MMI; - } -#endif #if defined(HAS_HAMMINGDISTANCE_MSA) if (TestCpuFlag(kCpuHasMSA)) { HammingDistance = HammingDistance_MSA; @@ -211,11 +206,6 @@ uint64_t ComputeSumSquareError(const uint8_t* src_a, SumSquareError = SumSquareError_AVX2; } #endif -#if defined(HAS_SUMSQUAREERROR_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - SumSquareError = SumSquareError_MMI; - } -#endif #if defined(HAS_SUMSQUAREERROR_MSA) if (TestCpuFlag(kCpuHasMSA)) { SumSquareError = SumSquareError_MSA; @@ -369,10 +359,10 @@ static double Ssim8x8_C(const uint8_t* src_a, (sum_a_sq + sum_b_sq + c1) * (count * sum_sq_a - sum_a_sq + count * sum_sq_b - sum_b_sq + c2); - if (ssim_d == 0.0) { + if (ssim_d == 0) { return DBL_MAX; } - return ssim_n * 1.0 / ssim_d; + return (double)ssim_n / (double)ssim_d; } } diff --git a/TMessagesProj/jni/third_party/libyuv/source/compare_common.cc b/TMessagesProj/jni/third_party/libyuv/source/compare_common.cc index d4b170ad98..d1cab8d2b4 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/compare_common.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/compare_common.cc @@ -17,36 +17,6 @@ namespace libyuv { extern "C" { #endif -#if ORIGINAL_OPT -uint32_t HammingDistance_C1(const uint8_t* src_a, - const uint8_t* src_b, - int count) { - uint32_t diff = 0u; - - int i; - for (i = 0; i < count; ++i) { - int x = src_a[i] ^ src_b[i]; - if (x & 1) - ++diff; - if (x & 2) - ++diff; - if (x & 4) - ++diff; - if (x & 8) - ++diff; - if (x & 16) - ++diff; - if (x & 32) - ++diff; - if (x & 64) - ++diff; - if (x & 128) - ++diff; - } - return diff; -} -#endif - // Hakmem method for hamming distance. uint32_t HammingDistance_C(const uint8_t* src_a, const uint8_t* src_b, diff --git a/TMessagesProj/jni/third_party/libyuv/source/compare_gcc.cc b/TMessagesProj/jni/third_party/libyuv/source/compare_gcc.cc index 6700f9697e..33cbe25ded 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/compare_gcc.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/compare_gcc.cc @@ -19,8 +19,7 @@ extern "C" { #endif // This module is for GCC x86 and x64. -#if !defined(LIBYUV_DISABLE_X86) && \ - (defined(__x86_64__) || (defined(__i386__) && !defined(_MSC_VER))) +#if !defined(LIBYUV_DISABLE_X86) && (defined(__x86_64__) || defined(__i386__)) #if defined(__x86_64__) uint32_t HammingDistance_SSE42(const uint8_t* src_a, @@ -68,7 +67,7 @@ uint32_t HammingDistance_SSE42(const uint8_t* src_a, : : "memory", "cc", "rcx", "rdx", "rsi", "rdi", "r8", "r9", "r10"); - return static_cast(diff); + return (uint32_t)(diff); } #else uint32_t HammingDistance_SSE42(const uint8_t* src_a, diff --git a/TMessagesProj/jni/third_party/libyuv/source/compare_win.cc b/TMessagesProj/jni/third_party/libyuv/source/compare_win.cc index d57d3d9d1c..9bb27f1dd1 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/compare_win.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/compare_win.cc @@ -22,8 +22,9 @@ namespace libyuv { extern "C" { #endif -// This module is for 32 bit Visual C x86 and clangcl -#if !defined(LIBYUV_DISABLE_X86) && defined(_M_IX86) && defined(_MSC_VER) +// This module is for 32 bit Visual C x86 +#if !defined(LIBYUV_DISABLE_X86) && defined(_MSC_VER) && \ + !defined(__clang__) && defined(_M_IX86) uint32_t HammingDistance_SSE42(const uint8_t* src_a, const uint8_t* src_b, @@ -77,8 +78,7 @@ __declspec(naked) uint32_t } } -// Visual C 2012 required for AVX2. -#if _MSC_VER >= 1700 +#ifdef HAS_SUMSQUAREERROR_AVX2 // C4752: found Intel(R) Advanced Vector Extensions; consider using /arch:AVX. #pragma warning(disable : 4752) __declspec(naked) uint32_t @@ -118,7 +118,7 @@ __declspec(naked) uint32_t ret } } -#endif // _MSC_VER >= 1700 +#endif // HAS_SUMSQUAREERROR_AVX2 uvec32 kHash16x33 = {0x92d9e201, 0, 0, 0}; // 33 ^ 16 uvec32 kHashMul0 = { @@ -196,7 +196,7 @@ __declspec(naked) uint32_t } // Visual C 2012 required for AVX2. -#if _MSC_VER >= 1700 +#ifdef HAS_HASHDJB2_AVX2 __declspec(naked) uint32_t HashDjb2_AVX2(const uint8_t* src, int count, uint32_t seed) { __asm { @@ -231,7 +231,7 @@ __declspec(naked) uint32_t ret } } -#endif // _MSC_VER >= 1700 +#endif // HAS_HASHDJB2_AVX2 #endif // !defined(LIBYUV_DISABLE_X86) && defined(_M_IX86) diff --git a/TMessagesProj/jni/third_party/libyuv/source/convert.cc b/TMessagesProj/jni/third_party/libyuv/source/convert.cc index 98258b9bc9..15c70a6598 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/convert.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/convert.cc @@ -15,13 +15,19 @@ #include "libyuv/planar_functions.h" #include "libyuv/rotate.h" #include "libyuv/row.h" -#include "libyuv/scale.h" // For ScalePlane() +#include "libyuv/scale.h" // For ScalePlane() +#include "libyuv/scale_row.h" // For FixedDiv +#include "libyuv/scale_uv.h" // For UVScale() #ifdef __cplusplus namespace libyuv { extern "C" { #endif +// Subsample amount uses a shift. +// v is value +// a is amount to add to round up +// s is shift to subsample down #define SUBSAMPLE(v, a, s) (v < 0) ? (-((-v + a) >> s)) : ((v + a) >> s) static __inline int Abs(int v) { return v >= 0 ? v : -v; @@ -48,7 +54,7 @@ static int I4xxToI420(const uint8_t* src_y, const int dst_y_height = Abs(src_y_height); const int dst_uv_width = SUBSAMPLE(dst_y_width, 1, 1); const int dst_uv_height = SUBSAMPLE(dst_y_height, 1, 1); - if (src_uv_width == 0 || src_uv_height == 0) { + if (src_uv_width <= 0 || src_uv_height == 0) { return -1; } if (dst_y) { @@ -82,7 +88,8 @@ int I420Copy(const uint8_t* src_y, int height) { int halfwidth = (width + 1) >> 1; int halfheight = (height + 1) >> 1; - if (!src_u || !src_v || !dst_u || !dst_v || width <= 0 || height == 0) { + if ((!src_y && dst_y) || !src_u || !src_v || !dst_u || !dst_v || width <= 0 || + height == 0) { return -1; } // Negative height means invert the image. @@ -124,7 +131,8 @@ int I010Copy(const uint16_t* src_y, int height) { int halfwidth = (width + 1) >> 1; int halfheight = (height + 1) >> 1; - if (!src_u || !src_v || !dst_u || !dst_v || width <= 0 || height == 0) { + if ((!src_y && dst_y) || !src_u || !src_v || !dst_u || !dst_v || width <= 0 || + height == 0) { return -1; } // Negative height means invert the image. @@ -148,6 +156,53 @@ int I010Copy(const uint16_t* src_y, return 0; } +static int Planar16bitTo8bit(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height, + int subsample_x, + int subsample_y, + int depth) { + int uv_width = SUBSAMPLE(width, subsample_x, subsample_x); + int uv_height = SUBSAMPLE(height, subsample_y, subsample_y); + int scale = 1 << (24 - depth); + if ((!src_y && dst_y) || !src_u || !src_v || !dst_u || !dst_v || width <= 0 || + height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + uv_height = -uv_height; + src_y = src_y + (height - 1) * src_stride_y; + src_u = src_u + (uv_height - 1) * src_stride_u; + src_v = src_v + (uv_height - 1) * src_stride_v; + src_stride_y = -src_stride_y; + src_stride_u = -src_stride_u; + src_stride_v = -src_stride_v; + } + + // Convert Y plane. + Convert16To8Plane(src_y, src_stride_y, dst_y, dst_stride_y, scale, width, + height); + // Convert UV planes. + Convert16To8Plane(src_u, src_stride_u, dst_u, dst_stride_u, scale, uv_width, + uv_height); + Convert16To8Plane(src_v, src_stride_v, dst_v, dst_stride_v, scale, uv_width, + uv_height); + return 0; +} + // Convert 10 bit YUV to 8 bit. LIBYUV_API int I010ToI420(const uint16_t* src_y, @@ -164,34 +219,390 @@ int I010ToI420(const uint16_t* src_y, int dst_stride_v, int width, int height) { - int halfwidth = (width + 1) >> 1; - int halfheight = (height + 1) >> 1; - if (!src_u || !src_v || !dst_u || !dst_v || width <= 0 || height == 0) { + return Planar16bitTo8bit(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_y, dst_stride_y, dst_u, + dst_stride_u, dst_v, dst_stride_v, width, height, 1, + 1, 10); +} + +LIBYUV_API +int I210ToI420(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height) { + const int depth = 10; + const int scale = 1 << (24 - depth); + + if (width <= 0 || height == 0) { return -1; } // Negative height means invert the image. if (height < 0) { height = -height; - halfheight = (height + 1) >> 1; src_y = src_y + (height - 1) * src_stride_y; - src_u = src_u + (halfheight - 1) * src_stride_u; - src_v = src_v + (halfheight - 1) * src_stride_v; + src_u = src_u + (height - 1) * src_stride_u; + src_v = src_v + (height - 1) * src_stride_v; src_stride_y = -src_stride_y; src_stride_u = -src_stride_u; src_stride_v = -src_stride_v; } - // Convert Y plane. - Convert16To8Plane(src_y, src_stride_y, dst_y, dst_stride_y, 16384, width, - height); - // Convert UV planes. - Convert16To8Plane(src_u, src_stride_u, dst_u, dst_stride_u, 16384, halfwidth, - halfheight); - Convert16To8Plane(src_v, src_stride_v, dst_v, dst_stride_v, 16384, halfwidth, - halfheight); + { + const int uv_width = SUBSAMPLE(width, 1, 1); + const int uv_height = SUBSAMPLE(height, 1, 1); + const int dy = FixedDiv(height, uv_height); + + Convert16To8Plane(src_y, src_stride_y, dst_y, dst_stride_y, scale, width, + height); + ScalePlaneVertical_16To8(height, uv_width, uv_height, src_stride_u, + dst_stride_u, src_u, dst_u, 0, 32768, dy, + /*bpp=*/1, scale, kFilterBilinear); + ScalePlaneVertical_16To8(height, uv_width, uv_height, src_stride_v, + dst_stride_v, src_v, dst_v, 0, 32768, dy, + /*bpp=*/1, scale, kFilterBilinear); + } return 0; } +LIBYUV_API +int I210ToI422(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height) { + return Planar16bitTo8bit(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_y, dst_stride_y, dst_u, + dst_stride_u, dst_v, dst_stride_v, width, height, 1, + 0, 10); +} + +LIBYUV_API +int I410ToI420(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height) { + const int depth = 10; + const int scale = 1 << (24 - depth); + + if (width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + src_y = src_y + (height - 1) * src_stride_y; + src_u = src_u + (height - 1) * src_stride_u; + src_v = src_v + (height - 1) * src_stride_v; + src_stride_y = -src_stride_y; + src_stride_u = -src_stride_u; + src_stride_v = -src_stride_v; + } + + { + const int uv_width = SUBSAMPLE(width, 1, 1); + const int uv_height = SUBSAMPLE(height, 1, 1); + + Convert16To8Plane(src_y, src_stride_y, dst_y, dst_stride_y, scale, width, + height); + ScalePlaneDown2_16To8(width, height, uv_width, uv_height, src_stride_u, + dst_stride_u, src_u, dst_u, scale, kFilterBilinear); + ScalePlaneDown2_16To8(width, height, uv_width, uv_height, src_stride_v, + dst_stride_v, src_v, dst_v, scale, kFilterBilinear); + } + return 0; +} + +LIBYUV_API +int I410ToI444(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height) { + return Planar16bitTo8bit(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_y, dst_stride_y, dst_u, + dst_stride_u, dst_v, dst_stride_v, width, height, 0, + 0, 10); +} + +LIBYUV_API +int I012ToI420(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height) { + return Planar16bitTo8bit(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_y, dst_stride_y, dst_u, + dst_stride_u, dst_v, dst_stride_v, width, height, 1, + 1, 12); +} + +LIBYUV_API +int I212ToI422(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height) { + return Planar16bitTo8bit(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_y, dst_stride_y, dst_u, + dst_stride_u, dst_v, dst_stride_v, width, height, 1, + 0, 12); +} + +LIBYUV_API +int I412ToI444(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height) { + return Planar16bitTo8bit(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_y, dst_stride_y, dst_u, + dst_stride_u, dst_v, dst_stride_v, width, height, 0, + 0, 12); +} + +// Any Ix10 To I010 format with mirroring. +static int Ix10ToI010(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_u, + int dst_stride_u, + uint16_t* dst_v, + int dst_stride_v, + int width, + int height, + int subsample_x, + int subsample_y) { + const int dst_y_width = Abs(width); + const int dst_y_height = Abs(height); + const int src_uv_width = SUBSAMPLE(width, subsample_x, subsample_x); + const int src_uv_height = SUBSAMPLE(height, subsample_y, subsample_y); + const int dst_uv_width = SUBSAMPLE(dst_y_width, 1, 1); + const int dst_uv_height = SUBSAMPLE(dst_y_height, 1, 1); + if (width <= 0 || height == 0) { + return -1; + } + if (dst_y) { + ScalePlane_12(src_y, src_stride_y, width, height, dst_y, dst_stride_y, + dst_y_width, dst_y_height, kFilterBilinear); + } + ScalePlane_12(src_u, src_stride_u, src_uv_width, src_uv_height, dst_u, + dst_stride_u, dst_uv_width, dst_uv_height, kFilterBilinear); + ScalePlane_12(src_v, src_stride_v, src_uv_width, src_uv_height, dst_v, + dst_stride_v, dst_uv_width, dst_uv_height, kFilterBilinear); + return 0; +} + +LIBYUV_API +int I410ToI010(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_u, + int dst_stride_u, + uint16_t* dst_v, + int dst_stride_v, + int width, + int height) { + return Ix10ToI010(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_y, dst_stride_y, dst_u, dst_stride_u, + dst_v, dst_stride_v, width, height, 0, 0); +} + +LIBYUV_API +int I210ToI010(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_u, + int dst_stride_u, + uint16_t* dst_v, + int dst_stride_v, + int width, + int height) { + return Ix10ToI010(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_y, dst_stride_y, dst_u, dst_stride_u, + dst_v, dst_stride_v, width, height, 1, 0); +} + +// Any I[420]1[02] to P[420]1[02] format with mirroring. +static int IxxxToPxxx(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_uv, + int dst_stride_uv, + int width, + int height, + int subsample_x, + int subsample_y, + int depth) { + const int uv_width = SUBSAMPLE(width, subsample_x, subsample_x); + const int uv_height = SUBSAMPLE(height, subsample_y, subsample_y); + if (width <= 0 || height == 0) { + return -1; + } + + ConvertToMSBPlane_16(src_y, src_stride_y, dst_y, dst_stride_y, width, height, + depth); + MergeUVPlane_16(src_u, src_stride_u, src_v, src_stride_v, dst_uv, + dst_stride_uv, uv_width, uv_height, depth); + return 0; +} + +LIBYUV_API +int I010ToP010(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_uv, + int dst_stride_uv, + int width, + int height) { + return IxxxToPxxx(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_y, dst_stride_y, dst_uv, dst_stride_uv, + width, height, 1, 1, 10); +} + +LIBYUV_API +int I210ToP210(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_uv, + int dst_stride_uv, + int width, + int height) { + return IxxxToPxxx(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_y, dst_stride_y, dst_uv, dst_stride_uv, + width, height, 1, 0, 10); +} + +LIBYUV_API +int I012ToP012(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_uv, + int dst_stride_uv, + int width, + int height) { + return IxxxToPxxx(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_y, dst_stride_y, dst_uv, dst_stride_uv, + width, height, 1, 1, 12); +} + +LIBYUV_API +int I212ToP212(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_uv, + int dst_stride_uv, + int width, + int height) { + return IxxxToPxxx(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_y, dst_stride_y, dst_uv, dst_stride_uv, + width, height, 1, 0, 12); +} + // 422 chroma is 1/2 width, 1x height // 420 chroma is 1/2 width, 1/2 height LIBYUV_API @@ -215,6 +626,48 @@ int I422ToI420(const uint8_t* src_y, dst_v, dst_stride_v, width, height, src_uv_width, height); } +LIBYUV_API +int I422ToI210(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_u, + int dst_stride_u, + uint16_t* dst_v, + int dst_stride_v, + int width, + int height) { + int halfwidth = (width + 1) >> 1; + if ((!src_y && dst_y) || !src_u || !src_v || !dst_u || !dst_v || width <= 0 || + height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + src_y = src_y + (height - 1) * src_stride_y; + src_u = src_u + (height - 1) * src_stride_u; + src_v = src_v + (height - 1) * src_stride_v; + src_stride_y = -src_stride_y; + src_stride_u = -src_stride_u; + src_stride_v = -src_stride_v; + } + + // Convert Y plane. + Convert8To16Plane(src_y, src_stride_y, dst_y, dst_stride_y, 1024, width, + height); + // Convert UV planes. + Convert8To16Plane(src_u, src_stride_u, dst_u, dst_stride_u, 1024, halfwidth, + height); + Convert8To16Plane(src_v, src_stride_v, dst_v, dst_stride_v, 1024, halfwidth, + height); + return 0; +} + // TODO(fbarchard): Implement row conversion. LIBYUV_API int I422ToNV21(const uint8_t* src_y, @@ -256,6 +709,165 @@ int I422ToNV21(const uint8_t* src_y, return 0; } +LIBYUV_API +int MM21ToNV12(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height) { + if (!src_uv || !dst_uv || width <= 0) { + return -1; + } + + int sign = height < 0 ? -1 : 1; + + if (dst_y) { + DetilePlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height, 32); + } + DetilePlane(src_uv, src_stride_uv, dst_uv, dst_stride_uv, (width + 1) & ~1, + (height + sign) / 2, 16); + + return 0; +} + +LIBYUV_API +int MM21ToI420(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height) { + int sign = height < 0 ? -1 : 1; + + if (!src_uv || !dst_u || !dst_v || width <= 0) { + return -1; + } + + if (dst_y) { + DetilePlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height, 32); + } + DetileSplitUVPlane(src_uv, src_stride_uv, dst_u, dst_stride_u, dst_v, + dst_stride_v, (width + 1) & ~1, (height + sign) / 2, 16); + + return 0; +} + +LIBYUV_API +int MM21ToYUY2(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_yuy2, + int dst_stride_yuy2, + int width, + int height) { + if (!src_y || !src_uv || !dst_yuy2 || width <= 0) { + return -1; + } + + DetileToYUY2(src_y, src_stride_y, src_uv, src_stride_uv, dst_yuy2, + dst_stride_yuy2, width, height, 32); + + return 0; +} + +// Convert MT2T into P010. See tinyurl.com/mtk-10bit-video-format for format +// documentation. +// TODO(greenjustin): Add an MT2T to I420 conversion. +LIBYUV_API +int MT2TToP010(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_uv, + int dst_stride_uv, + int width, + int height) { + if (width <= 0 || !height || !src_uv || !dst_uv) { + return -1; + } + + { + int u_width = (width + 1) / 2; + int uv_width = 2 * u_width; + int y = 0; + int uv_height = uv_height = (height + 1) / 2; + const int tile_width = 16; + const int y_tile_height = 32; + const int uv_tile_height = 16; + int padded_width = (width + tile_width - 1) & ~(tile_width - 1); + int y_tile_row_size = padded_width * y_tile_height * 10 / 8; + int uv_tile_row_size = padded_width * uv_tile_height * 10 / 8; + size_t row_buf_size = padded_width * y_tile_height * sizeof(uint16_t); + void (*UnpackMT2T)(const uint8_t* src, uint16_t* dst, size_t size) = + UnpackMT2T_C; + align_buffer_64(row_buf, row_buf_size); + +#if defined(HAS_UNPACKMT2T_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + UnpackMT2T = UnpackMT2T_NEON; + } +#endif + // Negative height means invert the image. + if (height < 0) { + height = -height; + uv_height = (height + 1) / 2; + if (dst_y) { + dst_y = dst_y + (height - 1) * dst_stride_y; + dst_stride_y = -dst_stride_y; + } + dst_uv = dst_uv + (uv_height - 1) * dst_stride_uv; + dst_stride_uv = -dst_stride_uv; + } + + // Unpack and detile Y in rows of tiles + if (src_y && dst_y) { + for (y = 0; y < (height & ~(y_tile_height - 1)); y += y_tile_height) { + UnpackMT2T(src_y, (uint16_t*)row_buf, y_tile_row_size); + DetilePlane_16((uint16_t*)row_buf, padded_width, dst_y, dst_stride_y, + width, y_tile_height, y_tile_height); + src_y += src_stride_y * y_tile_height; + dst_y += dst_stride_y * y_tile_height; + } + if (height & (y_tile_height - 1)) { + UnpackMT2T(src_y, (uint16_t*)row_buf, y_tile_row_size); + DetilePlane_16((uint16_t*)row_buf, padded_width, dst_y, dst_stride_y, + width, height & (y_tile_height - 1), y_tile_height); + } + } + + // Unpack and detile UV plane + for (y = 0; y < (uv_height & ~(uv_tile_height - 1)); y += uv_tile_height) { + UnpackMT2T(src_uv, (uint16_t*)row_buf, uv_tile_row_size); + DetilePlane_16((uint16_t*)row_buf, padded_width, dst_uv, dst_stride_uv, + uv_width, uv_tile_height, uv_tile_height); + src_uv += src_stride_uv * uv_tile_height; + dst_uv += dst_stride_uv * uv_tile_height; + } + if (uv_height & (uv_tile_height - 1)) { + UnpackMT2T(src_uv, (uint16_t*)row_buf, uv_tile_row_size); + DetilePlane_16((uint16_t*)row_buf, padded_width, dst_uv, dst_stride_uv, + uv_width, uv_height & (uv_tile_height - 1), + uv_tile_height); + } + free_aligned_buffer_64(row_buf); + } + return 0; +} + #ifdef I422TONV21_ROW_VERSION // Unittest fails for this version. // 422 chroma is 1/2 width, 1x height @@ -277,7 +889,7 @@ int I422ToNV21(const uint8_t* src_y, int y; void (*MergeUVRow)(const uint8_t* src_u, const uint8_t* src_v, uint8_t* dst_uv, int width) = MergeUVRow_C; - void (*InterpolateRow)(uint8_t * dst_ptr, const uint8_t* src_ptr, + void (*InterpolateRow)(uint8_t* dst_ptr, const uint8_t* src_ptr, ptrdiff_t src_stride, int dst_width, int source_y_fraction) = InterpolateRow_C; int halfwidth = (width + 1) >> 1; @@ -320,14 +932,6 @@ int I422ToNV21(const uint8_t* src_y, } } #endif -#if defined(HAS_MERGEUVROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - MergeUVRow = MergeUVRow_Any_MMI; - if (IS_ALIGNED(halfwidth, 8)) { - MergeUVRow = MergeUVRow_MMI; - } - } -#endif #if defined(HAS_MERGEUVROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { MergeUVRow = MergeUVRow_Any_MSA; @@ -336,6 +940,14 @@ int I422ToNV21(const uint8_t* src_y, } } #endif +#if defined(HAS_MERGEUVROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + MergeUVRow = MergeUVRow_Any_LSX; + if (IS_ALIGNED(halfwidth, 16)) { + MergeUVRow = MergeUVRow_LSX; + } + } +#endif #if defined(HAS_INTERPOLATEROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { InterpolateRow = InterpolateRow_Any_SSSE3; @@ -360,14 +972,6 @@ int I422ToNV21(const uint8_t* src_y, } } #endif -#if defined(HAS_INTERPOLATEROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - InterpolateRow = InterpolateRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - InterpolateRow = InterpolateRow_MMI; - } - } -#endif #if defined(HAS_INTERPOLATEROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { InterpolateRow = InterpolateRow_Any_MSA; @@ -376,6 +980,14 @@ int I422ToNV21(const uint8_t* src_y, } } #endif +#if defined(HAS_INTERPOLATEROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + InterpolateRow = InterpolateRow_Any_LSX; + if (IS_ALIGNED(width, 32)) { + InterpolateRow = InterpolateRow_LSX; + } + } +#endif if (dst_y) { CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, halfwidth, height); @@ -439,8 +1051,7 @@ int I444ToNV12(const uint8_t* src_y, int dst_stride_uv, int width, int height) { - if (!src_y || !src_u || !src_v || !dst_y || !dst_uv || width <= 0 || - height == 0) { + if (!src_y || !src_u || !src_v || !dst_uv || width <= 0 || height == 0) { return -1; } // Negative height means invert the image. @@ -613,6 +1224,168 @@ int NV21ToI420(const uint8_t* src_y, width, height); } +LIBYUV_API +int NV12ToNV24(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height) { + if (width <= 0 || height == 0) { + return -1; + } + + if (dst_y) { + ScalePlane(src_y, src_stride_y, width, height, dst_y, dst_stride_y, + Abs(width), Abs(height), kFilterBilinear); + } + UVScale(src_uv, src_stride_uv, SUBSAMPLE(width, 1, 1), + SUBSAMPLE(height, 1, 1), dst_uv, dst_stride_uv, Abs(width), + Abs(height), kFilterBilinear); + return 0; +} + +LIBYUV_API +int NV16ToNV24(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height) { + if (width <= 0 || height == 0) { + return -1; + } + + if (dst_y) { + ScalePlane(src_y, src_stride_y, width, height, dst_y, dst_stride_y, + Abs(width), Abs(height), kFilterBilinear); + } + UVScale(src_uv, src_stride_uv, SUBSAMPLE(width, 1, 1), height, dst_uv, + dst_stride_uv, Abs(width), Abs(height), kFilterBilinear); + return 0; +} + +// Any P[420]1[02] to I[420]1[02] format with mirroring. +static int PxxxToIxxx(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_uv, + int src_stride_uv, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_u, + int dst_stride_u, + uint16_t* dst_v, + int dst_stride_v, + int width, + int height, + int subsample_x, + int subsample_y, + int depth) { + const int uv_width = SUBSAMPLE(width, subsample_x, subsample_x); + const int uv_height = SUBSAMPLE(height, subsample_y, subsample_y); + if (width <= 0 || height == 0) { + return -1; + } + ConvertToLSBPlane_16(src_y, src_stride_y, dst_y, dst_stride_y, width, height, + depth); + SplitUVPlane_16(src_uv, src_stride_uv, dst_u, dst_stride_u, dst_v, + dst_stride_v, uv_width, uv_height, depth); + return 0; +} + +LIBYUV_API +int P010ToI010(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_uv, + int src_stride_uv, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_u, + int dst_stride_u, + uint16_t* dst_v, + int dst_stride_v, + int width, + int height) { + return PxxxToIxxx(src_y, src_stride_y, src_uv, src_stride_uv, dst_y, + dst_stride_y, dst_u, dst_stride_u, dst_v, dst_stride_v, + width, height, 1, 1, 10); +} + +LIBYUV_API +int P012ToI012(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_uv, + int src_stride_uv, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_u, + int dst_stride_u, + uint16_t* dst_v, + int dst_stride_v, + int width, + int height) { + return PxxxToIxxx(src_y, src_stride_y, src_uv, src_stride_uv, dst_y, + dst_stride_y, dst_u, dst_stride_u, dst_v, dst_stride_v, + width, height, 1, 1, 12); +} + +LIBYUV_API +int P010ToP410(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_uv, + int src_stride_uv, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_uv, + int dst_stride_uv, + int width, + int height) { + if (width <= 0 || height == 0) { + return -1; + } + + if (dst_y) { + ScalePlane_16(src_y, src_stride_y, width, height, dst_y, dst_stride_y, + Abs(width), Abs(height), kFilterBilinear); + } + UVScale_16(src_uv, src_stride_uv, SUBSAMPLE(width, 1, 1), + SUBSAMPLE(height, 1, 1), dst_uv, dst_stride_uv, Abs(width), + Abs(height), kFilterBilinear); + return 0; +} + +LIBYUV_API +int P210ToP410(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_uv, + int src_stride_uv, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_uv, + int dst_stride_uv, + int width, + int height) { + if (width <= 0 || height == 0) { + return -1; + } + + if (dst_y) { + ScalePlane_16(src_y, src_stride_y, width, height, dst_y, dst_stride_y, + Abs(width), Abs(height), kFilterBilinear); + } + UVScale_16(src_uv, src_stride_uv, SUBSAMPLE(width, 1, 1), height, dst_uv, + dst_stride_uv, Abs(width), Abs(height), kFilterBilinear); + return 0; +} + // Convert YUY2 to I420. LIBYUV_API int YUY2ToI420(const uint8_t* src_yuy2, @@ -667,18 +1440,6 @@ int YUY2ToI420(const uint8_t* src_yuy2, } } #endif -#if defined(HAS_YUY2TOYROW_MMI) && defined(HAS_YUY2TOUVROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - YUY2ToYRow = YUY2ToYRow_Any_MMI; - YUY2ToUVRow = YUY2ToUVRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - YUY2ToYRow = YUY2ToYRow_MMI; - if (IS_ALIGNED(width, 16)) { - YUY2ToUVRow = YUY2ToUVRow_MMI; - } - } - } -#endif #if defined(HAS_YUY2TOYROW_MSA) && defined(HAS_YUY2TOUVROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { YUY2ToYRow = YUY2ToYRow_Any_MSA; @@ -689,6 +1450,16 @@ int YUY2ToI420(const uint8_t* src_yuy2, } } #endif +#if defined(HAS_YUY2TOYROW_LASX) && defined(HAS_YUY2TOUVROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + YUY2ToYRow = YUY2ToYRow_Any_LASX; + YUY2ToUVRow = YUY2ToUVRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + YUY2ToYRow = YUY2ToYRow_LASX; + YUY2ToUVRow = YUY2ToUVRow_LASX; + } + } +#endif for (y = 0; y < height - 1; y += 2) { YUY2ToUVRow(src_yuy2, src_stride_yuy2, dst_u, dst_v, width); @@ -760,16 +1531,6 @@ int UYVYToI420(const uint8_t* src_uyvy, } } #endif -#if defined(HAS_UYVYTOYROW_MMI) && defined(HAS_UYVYTOUVROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - UYVYToYRow = UYVYToYRow_Any_MMI; - UYVYToUVRow = UYVYToUVRow_Any_MMI; - if (IS_ALIGNED(width, 16)) { - UYVYToYRow = UYVYToYRow_MMI; - UYVYToUVRow = UYVYToUVRow_MMI; - } - } -#endif #if defined(HAS_UYVYTOYROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { UYVYToYRow = UYVYToYRow_Any_MSA; @@ -780,6 +1541,16 @@ int UYVYToI420(const uint8_t* src_uyvy, } } #endif +#if defined(HAS_UYVYTOYROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + UYVYToYRow = UYVYToYRow_Any_LASX; + UYVYToUVRow = UYVYToUVRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + UYVYToYRow = UYVYToYRow_LASX; + UYVYToUVRow = UYVYToUVRow_LASX; + } + } +#endif for (y = 0; y < height - 1; y += 2) { UYVYToUVRow(src_uyvy, src_stride_uyvy, dst_u, dst_v, width); @@ -962,30 +1733,10 @@ int ARGBToI420(const uint8_t* src_argb, src_argb = src_argb + (height - 1) * src_stride_argb; src_stride_argb = -src_stride_argb; } -#if defined(HAS_ARGBTOYROW_SSSE3) && defined(HAS_ARGBTOUVROW_SSSE3) - if (TestCpuFlag(kCpuHasSSSE3)) { - ARGBToUVRow = ARGBToUVRow_Any_SSSE3; - ARGBToYRow = ARGBToYRow_Any_SSSE3; - if (IS_ALIGNED(width, 16)) { - ARGBToUVRow = ARGBToUVRow_SSSE3; - ARGBToYRow = ARGBToYRow_SSSE3; - } - } -#endif -#if defined(HAS_ARGBTOYROW_AVX2) && defined(HAS_ARGBTOUVROW_AVX2) - if (TestCpuFlag(kCpuHasAVX2)) { - ARGBToUVRow = ARGBToUVRow_Any_AVX2; - ARGBToYRow = ARGBToYRow_Any_AVX2; - if (IS_ALIGNED(width, 32)) { - ARGBToUVRow = ARGBToUVRow_AVX2; - ARGBToYRow = ARGBToYRow_AVX2; - } - } -#endif #if defined(HAS_ARGBTOYROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { ARGBToYRow = ARGBToYRow_Any_NEON; - if (IS_ALIGNED(width, 8)) { + if (IS_ALIGNED(width, 16)) { ARGBToYRow = ARGBToYRow_NEON; } } @@ -998,15 +1749,35 @@ int ARGBToI420(const uint8_t* src_argb, } } #endif -#if defined(HAS_ARGBTOYROW_MMI) && defined(HAS_ARGBTOUVROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBToYRow = ARGBToYRow_Any_MMI; - ARGBToUVRow = ARGBToUVRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - ARGBToYRow = ARGBToYRow_MMI; +#if defined(HAS_ARGBTOYROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ARGBToYRow = ARGBToYRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + ARGBToYRow = ARGBToYRow_SSSE3; } + } +#endif +#if defined(HAS_ARGBTOUVROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ARGBToUVRow = ARGBToUVRow_Any_SSSE3; if (IS_ALIGNED(width, 16)) { - ARGBToUVRow = ARGBToUVRow_MMI; + ARGBToUVRow = ARGBToUVRow_SSSE3; + } + } +#endif +#if defined(HAS_ARGBTOYROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ARGBToYRow = ARGBToYRow_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + ARGBToYRow = ARGBToYRow_AVX2; + } + } +#endif +#if defined(HAS_ARGBTOUVROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ARGBToUVRow = ARGBToUVRow_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + ARGBToUVRow = ARGBToUVRow_AVX2; } } #endif @@ -1022,6 +1793,24 @@ int ARGBToI420(const uint8_t* src_argb, } } #endif +#if defined(HAS_ARGBTOYROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + ARGBToYRow = ARGBToYRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + ARGBToYRow = ARGBToYRow_LSX; + } + } +#endif +#if defined(HAS_ARGBTOYROW_LASX) && defined(HAS_ARGBTOUVROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + ARGBToYRow = ARGBToYRow_Any_LASX; + ARGBToUVRow = ARGBToUVRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + ARGBToYRow = ARGBToYRow_LASX; + ARGBToUVRow = ARGBToUVRow_LASX; + } + } +#endif for (y = 0; y < height - 1; y += 2) { ARGBToUVRow(src_argb, src_stride_argb, dst_u, dst_v, width); @@ -1066,20 +1855,10 @@ int BGRAToI420(const uint8_t* src_bgra, src_bgra = src_bgra + (height - 1) * src_stride_bgra; src_stride_bgra = -src_stride_bgra; } -#if defined(HAS_BGRATOYROW_SSSE3) && defined(HAS_BGRATOUVROW_SSSE3) - if (TestCpuFlag(kCpuHasSSSE3)) { - BGRAToUVRow = BGRAToUVRow_Any_SSSE3; - BGRAToYRow = BGRAToYRow_Any_SSSE3; - if (IS_ALIGNED(width, 16)) { - BGRAToUVRow = BGRAToUVRow_SSSE3; - BGRAToYRow = BGRAToYRow_SSSE3; - } - } -#endif #if defined(HAS_BGRATOYROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { BGRAToYRow = BGRAToYRow_Any_NEON; - if (IS_ALIGNED(width, 8)) { + if (IS_ALIGNED(width, 16)) { BGRAToYRow = BGRAToYRow_NEON; } } @@ -1092,15 +1871,35 @@ int BGRAToI420(const uint8_t* src_bgra, } } #endif -#if defined(HAS_BGRATOYROW_MMI) && defined(HAS_BGRATOUVROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - BGRAToYRow = BGRAToYRow_Any_MMI; - BGRAToUVRow = BGRAToUVRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - BGRAToYRow = BGRAToYRow_MMI; +#if defined(HAS_BGRATOYROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + BGRAToYRow = BGRAToYRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + BGRAToYRow = BGRAToYRow_SSSE3; } + } +#endif +#if defined(HAS_BGRATOUVROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + BGRAToUVRow = BGRAToUVRow_Any_SSSE3; if (IS_ALIGNED(width, 16)) { - BGRAToUVRow = BGRAToUVRow_MMI; + BGRAToUVRow = BGRAToUVRow_SSSE3; + } + } +#endif +#if defined(HAS_BGRATOYROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + BGRAToYRow = BGRAToYRow_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + BGRAToYRow = BGRAToYRow_AVX2; + } + } +#endif +#if defined(HAS_BGRATOUVROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + BGRAToUVRow = BGRAToUVRow_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + BGRAToUVRow = BGRAToUVRow_AVX2; } } #endif @@ -1110,10 +1909,30 @@ int BGRAToI420(const uint8_t* src_bgra, BGRAToUVRow = BGRAToUVRow_Any_MSA; if (IS_ALIGNED(width, 16)) { BGRAToYRow = BGRAToYRow_MSA; + } + if (IS_ALIGNED(width, 32)) { BGRAToUVRow = BGRAToUVRow_MSA; } } #endif +#if defined(HAS_BGRATOYROW_LSX) && defined(HAS_BGRATOUVROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + BGRAToYRow = BGRAToYRow_Any_LSX; + BGRAToUVRow = BGRAToUVRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + BGRAToYRow = BGRAToYRow_LSX; + BGRAToUVRow = BGRAToUVRow_LSX; + } + } +#endif +#if defined(HAS_BGRATOYROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + BGRAToYRow = BGRAToYRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + BGRAToYRow = BGRAToYRow_LASX; + } + } +#endif for (y = 0; y < height - 1; y += 2) { BGRAToUVRow(src_bgra, src_stride_bgra, dst_u, dst_v, width); @@ -1158,30 +1977,42 @@ int ABGRToI420(const uint8_t* src_abgr, src_abgr = src_abgr + (height - 1) * src_stride_abgr; src_stride_abgr = -src_stride_abgr; } -#if defined(HAS_ABGRTOYROW_SSSE3) && defined(HAS_ABGRTOUVROW_SSSE3) +#if defined(HAS_ABGRTOYROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { - ABGRToUVRow = ABGRToUVRow_Any_SSSE3; ABGRToYRow = ABGRToYRow_Any_SSSE3; if (IS_ALIGNED(width, 16)) { - ABGRToUVRow = ABGRToUVRow_SSSE3; ABGRToYRow = ABGRToYRow_SSSE3; } } #endif -#if defined(HAS_ABGRTOYROW_AVX2) && defined(HAS_ABGRTOUVROW_AVX2) +#if defined(HAS_ABGRTOUVROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ABGRToUVRow = ABGRToUVRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + ABGRToUVRow = ABGRToUVRow_SSSE3; + } + } +#endif +#if defined(HAS_ABGRTOYROW_AVX2) if (TestCpuFlag(kCpuHasAVX2)) { - ABGRToUVRow = ABGRToUVRow_Any_AVX2; ABGRToYRow = ABGRToYRow_Any_AVX2; if (IS_ALIGNED(width, 32)) { - ABGRToUVRow = ABGRToUVRow_AVX2; ABGRToYRow = ABGRToYRow_AVX2; } } #endif +#if defined(HAS_ABGRTOUVROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ABGRToUVRow = ABGRToUVRow_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + ABGRToUVRow = ABGRToUVRow_AVX2; + } + } +#endif #if defined(HAS_ABGRTOYROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { ABGRToYRow = ABGRToYRow_Any_NEON; - if (IS_ALIGNED(width, 8)) { + if (IS_ALIGNED(width, 16)) { ABGRToYRow = ABGRToYRow_NEON; } } @@ -1194,18 +2025,6 @@ int ABGRToI420(const uint8_t* src_abgr, } } #endif -#if defined(HAS_ABGRTOYROW_MMI) && defined(HAS_ABGRTOUVROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ABGRToYRow = ABGRToYRow_Any_MMI; - ABGRToUVRow = ABGRToUVRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - ABGRToYRow = ABGRToYRow_MMI; - } - if (IS_ALIGNED(width, 16)) { - ABGRToUVRow = ABGRToUVRow_MMI; - } - } -#endif #if defined(HAS_ABGRTOYROW_MSA) && defined(HAS_ABGRTOUVROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { ABGRToYRow = ABGRToYRow_Any_MSA; @@ -1216,6 +2035,24 @@ int ABGRToI420(const uint8_t* src_abgr, } } #endif +#if defined(HAS_ABGRTOYROW_LSX) && defined(HAS_ABGRTOUVROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + ABGRToYRow = ABGRToYRow_Any_LSX; + ABGRToUVRow = ABGRToUVRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + ABGRToYRow = ABGRToYRow_LSX; + ABGRToUVRow = ABGRToUVRow_LSX; + } + } +#endif +#if defined(HAS_ABGRTOYROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + ABGRToYRow = ABGRToYRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + ABGRToYRow = ABGRToYRow_LASX; + } + } +#endif for (y = 0; y < height - 1; y += 2) { ABGRToUVRow(src_abgr, src_stride_abgr, dst_u, dst_v, width); @@ -1260,20 +2097,26 @@ int RGBAToI420(const uint8_t* src_rgba, src_rgba = src_rgba + (height - 1) * src_stride_rgba; src_stride_rgba = -src_stride_rgba; } -#if defined(HAS_RGBATOYROW_SSSE3) && defined(HAS_RGBATOUVROW_SSSE3) +#if defined(HAS_RGBATOYROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { - RGBAToUVRow = RGBAToUVRow_Any_SSSE3; RGBAToYRow = RGBAToYRow_Any_SSSE3; if (IS_ALIGNED(width, 16)) { - RGBAToUVRow = RGBAToUVRow_SSSE3; RGBAToYRow = RGBAToYRow_SSSE3; } } #endif +#if defined(HAS_RGBATOUVROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + RGBAToUVRow = RGBAToUVRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + RGBAToUVRow = RGBAToUVRow_SSSE3; + } + } +#endif #if defined(HAS_RGBATOYROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { RGBAToYRow = RGBAToYRow_Any_NEON; - if (IS_ALIGNED(width, 8)) { + if (IS_ALIGNED(width, 16)) { RGBAToYRow = RGBAToYRow_NEON; } } @@ -1286,18 +2129,6 @@ int RGBAToI420(const uint8_t* src_rgba, } } #endif -#if defined(HAS_RGBATOYROW_MMI) && defined(HAS_RGBATOUVROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - RGBAToYRow = RGBAToYRow_Any_MMI; - RGBAToUVRow = RGBAToUVRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - RGBAToYRow = RGBAToYRow_MMI; - } - if (IS_ALIGNED(width, 16)) { - RGBAToUVRow = RGBAToUVRow_MMI; - } - } -#endif #if defined(HAS_RGBATOYROW_MSA) && defined(HAS_RGBATOUVROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { RGBAToYRow = RGBAToYRow_Any_MSA; @@ -1308,6 +2139,24 @@ int RGBAToI420(const uint8_t* src_rgba, } } #endif +#if defined(HAS_RGBATOYROW_LSX) && defined(HAS_RGBATOUVROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + RGBAToYRow = RGBAToYRow_Any_LSX; + RGBAToUVRow = RGBAToUVRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + RGBAToYRow = RGBAToYRow_LSX; + RGBAToUVRow = RGBAToUVRow_LSX; + } + } +#endif +#if defined(HAS_RGBATOYROW_LASX) + if (TestCpuFlag(kCpuHasNEON)) { + RGBAToYRow = RGBAToYRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + RGBAToYRow = RGBAToYRow_LASX; + } + } +#endif for (y = 0; y < height - 1; y += 2) { RGBAToUVRow(src_rgba, src_stride_rgba, dst_u, dst_v, width); @@ -1325,6 +2174,12 @@ int RGBAToI420(const uint8_t* src_rgba, return 0; } +// Enabled if 1 pass is available +#if (defined(HAS_RGB24TOYROW_NEON) || defined(HAS_RGB24TOYROW_MSA) || \ + defined(HAS_RGB24TOYROW_LSX)) +#define HAS_RGB24TOYROW +#endif + // Convert RGB24 to I420. LIBYUV_API int RGB24ToI420(const uint8_t* src_rgb24, @@ -1338,8 +2193,7 @@ int RGB24ToI420(const uint8_t* src_rgb24, int width, int height) { int y; -#if (defined(HAS_RGB24TOYROW_NEON) || defined(HAS_RGB24TOYROW_MSA) || \ - defined(HAS_RGB24TOYROW_MMI)) +#if defined(HAS_RGB24TOYROW) void (*RGB24ToUVRow)(const uint8_t* src_rgb24, int src_stride_rgb24, uint8_t* dst_u, uint8_t* dst_v, int width) = RGB24ToUVRow_C; @@ -1364,29 +2218,16 @@ int RGB24ToI420(const uint8_t* src_rgb24, src_stride_rgb24 = -src_stride_rgb24; } +#if defined(HAS_RGB24TOYROW) + // Neon version does direct RGB24 to YUV. -#if defined(HAS_RGB24TOYROW_NEON) +#if defined(HAS_RGB24TOYROW_NEON) && defined(HAS_RGB24TOUVROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { RGB24ToUVRow = RGB24ToUVRow_Any_NEON; RGB24ToYRow = RGB24ToYRow_Any_NEON; - if (IS_ALIGNED(width, 8)) { + if (IS_ALIGNED(width, 16)) { RGB24ToYRow = RGB24ToYRow_NEON; - if (IS_ALIGNED(width, 16)) { - RGB24ToUVRow = RGB24ToUVRow_NEON; - } - } - } -// MMI and MSA version does direct RGB24 to YUV. -#elif (defined(HAS_RGB24TOYROW_MMI) || defined(HAS_RGB24TOYROW_MSA)) -#if defined(HAS_RGB24TOYROW_MMI) && defined(HAS_RGB24TOUVROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - RGB24ToUVRow = RGB24ToUVRow_Any_MMI; - RGB24ToYRow = RGB24ToYRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - RGB24ToYRow = RGB24ToYRow_MMI; - if (IS_ALIGNED(width, 16)) { - RGB24ToUVRow = RGB24ToUVRow_MMI; - } + RGB24ToUVRow = RGB24ToUVRow_NEON; } } #endif @@ -1400,8 +2241,30 @@ int RGB24ToI420(const uint8_t* src_rgb24, } } #endif +#if defined(HAS_RGB24TOYROW_LSX) && defined(HAS_RGB24TOUVROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + RGB24ToUVRow = RGB24ToUVRow_Any_LSX; + RGB24ToYRow = RGB24ToYRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + RGB24ToYRow = RGB24ToYRow_LSX; + RGB24ToUVRow = RGB24ToUVRow_LSX; + } + } +#endif +#if defined(HAS_RGB24TOYROW_LASX) && defined(HAS_RGB24TOUVROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + RGB24ToUVRow = RGB24ToUVRow_Any_LASX; + RGB24ToYRow = RGB24ToYRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + RGB24ToYRow = RGB24ToYRow_LASX; + RGB24ToUVRow = RGB24ToUVRow_LASX; + } + } +#endif + // Other platforms do intermediate conversion from RGB24 to ARGB. -#else +#else // HAS_RGB24TOYROW + #if defined(HAS_RGB24TOARGBROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { RGB24ToARGBRow = RGB24ToARGBRow_Any_SSSE3; @@ -1410,48 +2273,58 @@ int RGB24ToI420(const uint8_t* src_rgb24, } } #endif -#if defined(HAS_ARGBTOYROW_SSSE3) && defined(HAS_ARGBTOUVROW_SSSE3) +#if defined(HAS_ARGBTOYROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { - ARGBToUVRow = ARGBToUVRow_Any_SSSE3; ARGBToYRow = ARGBToYRow_Any_SSSE3; if (IS_ALIGNED(width, 16)) { - ARGBToUVRow = ARGBToUVRow_SSSE3; ARGBToYRow = ARGBToYRow_SSSE3; } } #endif -#if defined(HAS_ARGBTOYROW_AVX2) && defined(HAS_ARGBTOUVROW_AVX2) +#if defined(HAS_ARGBTOYROW_AVX2) if (TestCpuFlag(kCpuHasAVX2)) { - ARGBToUVRow = ARGBToUVRow_Any_AVX2; ARGBToYRow = ARGBToYRow_Any_AVX2; if (IS_ALIGNED(width, 32)) { - ARGBToUVRow = ARGBToUVRow_AVX2; ARGBToYRow = ARGBToYRow_AVX2; } } #endif +#if defined(HAS_ARGBTOUVROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ARGBToUVRow = ARGBToUVRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + ARGBToUVRow = ARGBToUVRow_SSSE3; + } + } +#endif +#if defined(HAS_ARGBTOUVROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ARGBToUVRow = ARGBToUVRow_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + ARGBToUVRow = ARGBToUVRow_AVX2; + } + } #endif +#endif // HAS_RGB24TOYROW { -#if !(defined(HAS_RGB24TOYROW_NEON) || defined(HAS_RGB24TOYROW_MSA) || \ - defined(HAS_RGB24TOYROW_MMI)) +#if !defined(HAS_RGB24TOYROW) // Allocate 2 rows of ARGB. - const int kRowSize = (width * 4 + 31) & ~31; - align_buffer_64(row, kRowSize * 2); + const int row_size = (width * 4 + 31) & ~31; + align_buffer_64(row, row_size * 2); #endif for (y = 0; y < height - 1; y += 2) { -#if (defined(HAS_RGB24TOYROW_NEON) || defined(HAS_RGB24TOYROW_MSA) || \ - defined(HAS_RGB24TOYROW_MMI)) +#if defined(HAS_RGB24TOYROW) RGB24ToUVRow(src_rgb24, src_stride_rgb24, dst_u, dst_v, width); RGB24ToYRow(src_rgb24, dst_y, width); RGB24ToYRow(src_rgb24 + src_stride_rgb24, dst_y + dst_stride_y, width); #else RGB24ToARGBRow(src_rgb24, row, width); - RGB24ToARGBRow(src_rgb24 + src_stride_rgb24, row + kRowSize, width); - ARGBToUVRow(row, kRowSize, dst_u, dst_v, width); + RGB24ToARGBRow(src_rgb24 + src_stride_rgb24, row + row_size, width); + ARGBToUVRow(row, row_size, dst_u, dst_v, width); ARGBToYRow(row, dst_y, width); - ARGBToYRow(row + kRowSize, dst_y + dst_stride_y, width); + ARGBToYRow(row + row_size, dst_y + dst_stride_y, width); #endif src_rgb24 += src_stride_rgb24 * 2; dst_y += dst_stride_y * 2; @@ -1459,8 +2332,7 @@ int RGB24ToI420(const uint8_t* src_rgb24, dst_v += dst_stride_v; } if (height & 1) { -#if (defined(HAS_RGB24TOYROW_NEON) || defined(HAS_RGB24TOYROW_MSA) || \ - defined(HAS_RGB24TOYROW_MMI)) +#if defined(HAS_RGB24TOYROW) RGB24ToUVRow(src_rgb24, 0, dst_u, dst_v, width); RGB24ToYRow(src_rgb24, dst_y, width); #else @@ -1469,15 +2341,19 @@ int RGB24ToI420(const uint8_t* src_rgb24, ARGBToYRow(row, dst_y, width); #endif } -#if !(defined(HAS_RGB24TOYROW_NEON) || defined(HAS_RGB24TOYROW_MSA) || \ - defined(HAS_RGB24TOYROW_MMI)) +#if !defined(HAS_RGB24TOYROW) free_aligned_buffer_64(row); #endif } return 0; } +#undef HAS_RGB24TOYROW + +// Enabled if 1 pass is available +#if defined(HAS_RGB24TOYJROW_NEON) || defined(HAS_RGB24TOYJROW_MSA) +#define HAS_RGB24TOYJROW +#endif -// TODO(fbarchard): Use Matrix version to implement I420 and J420. // Convert RGB24 to J420. LIBYUV_API int RGB24ToJ420(const uint8_t* src_rgb24, @@ -1491,8 +2367,7 @@ int RGB24ToJ420(const uint8_t* src_rgb24, int width, int height) { int y; -#if (defined(HAS_RGB24TOYJROW_NEON) && defined(HAS_RGB24TOUVJROW_NEON)) || \ - defined(HAS_RGB24TOYJROW_MSA) || defined(HAS_RGB24TOYJROW_MMI) +#if defined(HAS_RGB24TOYJROW) void (*RGB24ToUVJRow)(const uint8_t* src_rgb24, int src_stride_rgb24, uint8_t* dst_u, uint8_t* dst_v, int width) = RGB24ToUVJRow_C; @@ -1517,29 +2392,16 @@ int RGB24ToJ420(const uint8_t* src_rgb24, src_stride_rgb24 = -src_stride_rgb24; } +#if defined(HAS_RGB24TOYJROW) + // Neon version does direct RGB24 to YUV. #if defined(HAS_RGB24TOYJROW_NEON) && defined(HAS_RGB24TOUVJROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { RGB24ToUVJRow = RGB24ToUVJRow_Any_NEON; RGB24ToYJRow = RGB24ToYJRow_Any_NEON; - if (IS_ALIGNED(width, 8)) { + if (IS_ALIGNED(width, 16)) { RGB24ToYJRow = RGB24ToYJRow_NEON; - if (IS_ALIGNED(width, 16)) { - RGB24ToUVJRow = RGB24ToUVJRow_NEON; - } - } - } -// MMI and MSA version does direct RGB24 to YUV. -#elif (defined(HAS_RGB24TOYJROW_MMI) || defined(HAS_RGB24TOYJROW_MSA)) -#if defined(HAS_RGB24TOYJROW_MMI) && defined(HAS_RGB24TOUVJROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - RGB24ToUVJRow = RGB24ToUVJRow_Any_MMI; - RGB24ToYJRow = RGB24ToYJRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - RGB24ToYJRow = RGB24ToYJRow_MMI; - if (IS_ALIGNED(width, 16)) { - RGB24ToUVJRow = RGB24ToUVJRow_MMI; - } + RGB24ToUVJRow = RGB24ToUVJRow_NEON; } } #endif @@ -1553,7 +2415,26 @@ int RGB24ToJ420(const uint8_t* src_rgb24, } } #endif -#else +#if defined(HAS_RGB24TOYJROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + RGB24ToYJRow = RGB24ToYJRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + RGB24ToYJRow = RGB24ToYJRow_LSX; + } + } +#endif +#if defined(HAS_RGB24TOYJROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + RGB24ToYJRow = RGB24ToYJRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + RGB24ToYJRow = RGB24ToYJRow_LASX; + } + } +#endif + +// Other platforms do intermediate conversion from RGB24 to ARGB. +#else // HAS_RGB24TOYJROW + #if defined(HAS_RGB24TOARGBROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { RGB24ToARGBRow = RGB24ToARGBRow_Any_SSSE3; @@ -1562,48 +2443,58 @@ int RGB24ToJ420(const uint8_t* src_rgb24, } } #endif -#if defined(HAS_ARGBTOYJROW_SSSE3) && defined(HAS_ARGBTOUVJROW_SSSE3) +#if defined(HAS_ARGBTOYJROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { - ARGBToUVJRow = ARGBToUVJRow_Any_SSSE3; ARGBToYJRow = ARGBToYJRow_Any_SSSE3; if (IS_ALIGNED(width, 16)) { - ARGBToUVJRow = ARGBToUVJRow_SSSE3; ARGBToYJRow = ARGBToYJRow_SSSE3; } } #endif -#if defined(HAS_ARGBTOYJROW_AVX2) && defined(HAS_ARGBTOUVJROW_AVX2) +#if defined(HAS_ARGBTOYJROW_AVX2) if (TestCpuFlag(kCpuHasAVX2)) { - ARGBToUVJRow = ARGBToUVJRow_Any_AVX2; ARGBToYJRow = ARGBToYJRow_Any_AVX2; if (IS_ALIGNED(width, 32)) { - ARGBToUVJRow = ARGBToUVJRow_AVX2; ARGBToYJRow = ARGBToYJRow_AVX2; } } #endif +#if defined(HAS_ARGBTOUVJROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ARGBToUVJRow = ARGBToUVJRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + ARGBToUVJRow = ARGBToUVJRow_SSSE3; + } + } +#endif +#if defined(HAS_ARGBTOUVJROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ARGBToUVJRow = ARGBToUVJRow_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + ARGBToUVJRow = ARGBToUVJRow_AVX2; + } + } #endif +#endif // HAS_RGB24TOYJROW { -#if !((defined(HAS_RGB24TOYJROW_NEON) && defined(HAS_RGB24TOUVJROW_NEON)) || \ - defined(HAS_RGB24TOYJROW_MSA) || defined(HAS_RGB24TOYJROW_MMI)) +#if !defined(HAS_RGB24TOYJROW) // Allocate 2 rows of ARGB. - const int kRowSize = (width * 4 + 31) & ~31; - align_buffer_64(row, kRowSize * 2); + const int row_size = (width * 4 + 31) & ~31; + align_buffer_64(row, row_size * 2); #endif for (y = 0; y < height - 1; y += 2) { -#if ((defined(HAS_RGB24TOYJROW_NEON) && defined(HAS_RGB24TOUVJROW_NEON)) || \ - defined(HAS_RGB24TOYJROW_MSA) || defined(HAS_RGB24TOYJROW_MMI)) +#if defined(HAS_RGB24TOYJROW) RGB24ToUVJRow(src_rgb24, src_stride_rgb24, dst_u, dst_v, width); RGB24ToYJRow(src_rgb24, dst_y, width); RGB24ToYJRow(src_rgb24 + src_stride_rgb24, dst_y + dst_stride_y, width); #else RGB24ToARGBRow(src_rgb24, row, width); - RGB24ToARGBRow(src_rgb24 + src_stride_rgb24, row + kRowSize, width); - ARGBToUVJRow(row, kRowSize, dst_u, dst_v, width); + RGB24ToARGBRow(src_rgb24 + src_stride_rgb24, row + row_size, width); + ARGBToUVJRow(row, row_size, dst_u, dst_v, width); ARGBToYJRow(row, dst_y, width); - ARGBToYJRow(row + kRowSize, dst_y + dst_stride_y, width); + ARGBToYJRow(row + row_size, dst_y + dst_stride_y, width); #endif src_rgb24 += src_stride_rgb24 * 2; dst_y += dst_stride_y * 2; @@ -1611,8 +2502,7 @@ int RGB24ToJ420(const uint8_t* src_rgb24, dst_v += dst_stride_v; } if (height & 1) { -#if ((defined(HAS_RGB24TOYJROW_NEON) && defined(HAS_RGB24TOUVJROW_NEON)) || \ - defined(HAS_RGB24TOYJROW_MSA) || defined(HAS_RGB24TOYJROW_MMI)) +#if defined(HAS_RGB24TOYJROW) RGB24ToUVJRow(src_rgb24, 0, dst_u, dst_v, width); RGB24ToYJRow(src_rgb24, dst_y, width); #else @@ -1621,17 +2511,196 @@ int RGB24ToJ420(const uint8_t* src_rgb24, ARGBToYJRow(row, dst_y, width); #endif } -#if !((defined(HAS_RGB24TOYJROW_NEON) && defined(HAS_RGB24TOUVJROW_NEON)) || \ - defined(HAS_RGB24TOYJROW_MSA) || defined(HAS_RGB24TOYJROW_MMI)) +#if !defined(HAS_RGB24TOYJROW) + free_aligned_buffer_64(row); +#endif + } + return 0; +} +#undef HAS_RGB24TOYJROW + +// Enabled if 1 pass is available +#if (defined(HAS_RAWTOYROW_NEON) || defined(HAS_RAWTOYROW_MSA) || \ + defined(HAS_RAWTOYROW_LSX)) +#define HAS_RAWTOYROW +#endif + +// Convert RAW to I420. +LIBYUV_API +int RAWToI420(const uint8_t* src_raw, + int src_stride_raw, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height) { + int y; +#if defined(HAS_RAWTOYROW) + void (*RAWToUVRow)(const uint8_t* src_raw, int src_stride_raw, uint8_t* dst_u, + uint8_t* dst_v, int width) = RAWToUVRow_C; + void (*RAWToYRow)(const uint8_t* src_raw, uint8_t* dst_y, int width) = + RAWToYRow_C; +#else + void (*RAWToARGBRow)(const uint8_t* src_rgb, uint8_t* dst_argb, int width) = + RAWToARGBRow_C; + void (*ARGBToUVRow)(const uint8_t* src_argb0, int src_stride_argb, + uint8_t* dst_u, uint8_t* dst_v, int width) = + ARGBToUVRow_C; + void (*ARGBToYRow)(const uint8_t* src_argb, uint8_t* dst_y, int width) = + ARGBToYRow_C; +#endif + if (!src_raw || !dst_y || !dst_u || !dst_v || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + src_raw = src_raw + (height - 1) * src_stride_raw; + src_stride_raw = -src_stride_raw; + } + +#if defined(HAS_RAWTOYROW) + +// Neon version does direct RAW to YUV. +#if defined(HAS_RAWTOYROW_NEON) && defined(HAS_RAWTOUVROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + RAWToUVRow = RAWToUVRow_Any_NEON; + RAWToYRow = RAWToYRow_Any_NEON; + if (IS_ALIGNED(width, 16)) { + RAWToYRow = RAWToYRow_NEON; + RAWToUVRow = RAWToUVRow_NEON; + } + } +#endif +#if defined(HAS_RAWTOYROW_MSA) && defined(HAS_RAWTOUVROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + RAWToUVRow = RAWToUVRow_Any_MSA; + RAWToYRow = RAWToYRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + RAWToYRow = RAWToYRow_MSA; + RAWToUVRow = RAWToUVRow_MSA; + } + } +#endif +#if defined(HAS_RAWTOYROW_LSX) && defined(HAS_RAWTOUVROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + RAWToUVRow = RAWToUVRow_Any_LSX; + RAWToYRow = RAWToYRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + RAWToYRow = RAWToYRow_LSX; + RAWToUVRow = RAWToUVRow_LSX; + } + } +#endif +#if defined(HAS_RAWTOYROW_LASX) && defined(HAS_RAWTOUVROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + RAWToUVRow = RAWToUVRow_Any_LASX; + RAWToYRow = RAWToYRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + RAWToYRow = RAWToYRow_LASX; + RAWToUVRow = RAWToUVRow_LASX; + } + } +#endif + +// Other platforms do intermediate conversion from RAW to ARGB. +#else // HAS_RAWTOYROW + +#if defined(HAS_RAWTOARGBROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + RAWToARGBRow = RAWToARGBRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + RAWToARGBRow = RAWToARGBRow_SSSE3; + } + } +#endif +#if defined(HAS_ARGBTOYROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ARGBToYRow = ARGBToYRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + ARGBToYRow = ARGBToYRow_SSSE3; + } + } +#endif +#if defined(HAS_ARGBTOYROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ARGBToYRow = ARGBToYRow_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + ARGBToYRow = ARGBToYRow_AVX2; + } + } +#endif +#if defined(HAS_ARGBTOUVROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ARGBToUVRow = ARGBToUVRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + ARGBToUVRow = ARGBToUVRow_SSSE3; + } + } +#endif +#if defined(HAS_ARGBTOUVROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ARGBToUVRow = ARGBToUVRow_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + ARGBToUVRow = ARGBToUVRow_AVX2; + } + } +#endif +#endif // HAS_RAWTOYROW + + { +#if !defined(HAS_RAWTOYROW) + // Allocate 2 rows of ARGB. + const int row_size = (width * 4 + 31) & ~31; + align_buffer_64(row, row_size * 2); +#endif + + for (y = 0; y < height - 1; y += 2) { +#if defined(HAS_RAWTOYROW) + RAWToUVRow(src_raw, src_stride_raw, dst_u, dst_v, width); + RAWToYRow(src_raw, dst_y, width); + RAWToYRow(src_raw + src_stride_raw, dst_y + dst_stride_y, width); +#else + RAWToARGBRow(src_raw, row, width); + RAWToARGBRow(src_raw + src_stride_raw, row + row_size, width); + ARGBToUVRow(row, row_size, dst_u, dst_v, width); + ARGBToYRow(row, dst_y, width); + ARGBToYRow(row + row_size, dst_y + dst_stride_y, width); +#endif + src_raw += src_stride_raw * 2; + dst_y += dst_stride_y * 2; + dst_u += dst_stride_u; + dst_v += dst_stride_v; + } + if (height & 1) { +#if defined(HAS_RAWTOYROW) + RAWToUVRow(src_raw, 0, dst_u, dst_v, width); + RAWToYRow(src_raw, dst_y, width); +#else + RAWToARGBRow(src_raw, row, width); + ARGBToUVRow(row, 0, dst_u, dst_v, width); + ARGBToYRow(row, dst_y, width); +#endif + } +#if !defined(HAS_RAWTOYROW) free_aligned_buffer_64(row); #endif } return 0; } +#undef HAS_RAWTOYROW + +// Enabled if 1 pass is available +#if defined(HAS_RAWTOYJROW_NEON) || defined(HAS_RAWTOYJROW_MSA) +#define HAS_RAWTOYJROW +#endif -// Convert RAW to I420. +// Convert RAW to J420. LIBYUV_API -int RAWToI420(const uint8_t* src_raw, +int RAWToJ420(const uint8_t* src_raw, int src_stride_raw, uint8_t* dst_y, int dst_stride_y, @@ -1642,20 +2711,20 @@ int RAWToI420(const uint8_t* src_raw, int width, int height) { int y; -#if (defined(HAS_RAWTOYROW_NEON) && defined(HAS_RAWTOUVROW_NEON)) || \ - defined(HAS_RAWTOYROW_MSA) || defined(HAS_RAWTOYROW_MMI) - void (*RAWToUVRow)(const uint8_t* src_raw, int src_stride_raw, uint8_t* dst_u, - uint8_t* dst_v, int width) = RAWToUVRow_C; - void (*RAWToYRow)(const uint8_t* src_raw, uint8_t* dst_y, int width) = - RAWToYRow_C; +#if defined(HAS_RAWTOYJROW) + void (*RAWToUVJRow)(const uint8_t* src_raw, int src_stride_raw, + uint8_t* dst_u, uint8_t* dst_v, int width) = + RAWToUVJRow_C; + void (*RAWToYJRow)(const uint8_t* src_raw, uint8_t* dst_y, int width) = + RAWToYJRow_C; #else void (*RAWToARGBRow)(const uint8_t* src_rgb, uint8_t* dst_argb, int width) = RAWToARGBRow_C; - void (*ARGBToUVRow)(const uint8_t* src_argb0, int src_stride_argb, - uint8_t* dst_u, uint8_t* dst_v, int width) = - ARGBToUVRow_C; - void (*ARGBToYRow)(const uint8_t* src_argb, uint8_t* dst_y, int width) = - ARGBToYRow_C; + void (*ARGBToUVJRow)(const uint8_t* src_argb0, int src_stride_argb, + uint8_t* dst_u, uint8_t* dst_v, int width) = + ARGBToUVJRow_C; + void (*ARGBToYJRow)(const uint8_t* src_argb, uint8_t* dst_y, int width) = + ARGBToYJRow_C; #endif if (!src_raw || !dst_y || !dst_u || !dst_v || width <= 0 || height == 0) { return -1; @@ -1667,44 +2736,49 @@ int RAWToI420(const uint8_t* src_raw, src_stride_raw = -src_stride_raw; } +#if defined(HAS_RAWTOYJROW) + // Neon version does direct RAW to YUV. -#if defined(HAS_RAWTOYROW_NEON) && defined(HAS_RAWTOUVROW_NEON) +#if defined(HAS_RAWTOYJROW_NEON) && defined(HAS_RAWTOUVJROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { - RAWToUVRow = RAWToUVRow_Any_NEON; - RAWToYRow = RAWToYRow_Any_NEON; - if (IS_ALIGNED(width, 8)) { - RAWToYRow = RAWToYRow_NEON; - if (IS_ALIGNED(width, 16)) { - RAWToUVRow = RAWToUVRow_NEON; - } + RAWToUVJRow = RAWToUVJRow_Any_NEON; + RAWToYJRow = RAWToYJRow_Any_NEON; + if (IS_ALIGNED(width, 16)) { + RAWToYJRow = RAWToYJRow_NEON; + RAWToUVJRow = RAWToUVJRow_NEON; } } -// MMI and MSA version does direct RAW to YUV. -#elif (defined(HAS_RAWTOYROW_MMI) || defined(HAS_RAWTOYROW_MSA)) -#if defined(HAS_RAWTOYROW_MMI) && defined(HAS_RAWTOUVROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - RAWToUVRow = RAWToUVRow_Any_MMI; - RAWToYRow = RAWToYRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - RAWToYRow = RAWToYRow_MMI; - if (IS_ALIGNED(width, 16)) { - RAWToUVRow = RAWToUVRow_MMI; - } +#endif +#if defined(HAS_RAWTOYJROW_MSA) && defined(HAS_RAWTOUVJROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + RAWToUVJRow = RAWToUVJRow_Any_MSA; + RAWToYJRow = RAWToYJRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + RAWToYJRow = RAWToYJRow_MSA; + RAWToUVJRow = RAWToUVJRow_MSA; } } #endif -#if defined(HAS_RAWTOYROW_MSA) && defined(HAS_RAWTOUVROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - RAWToUVRow = RAWToUVRow_Any_MSA; - RAWToYRow = RAWToYRow_Any_MSA; +#if defined(HAS_RAWTOYJROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + RAWToYJRow = RAWToYJRow_Any_LSX; if (IS_ALIGNED(width, 16)) { - RAWToYRow = RAWToYRow_MSA; - RAWToUVRow = RAWToUVRow_MSA; + RAWToYJRow = RAWToYJRow_LSX; + } + } +#endif +#if defined(HAS_RAWTOYJROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + RAWToYJRow = RAWToYJRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + RAWToYJRow = RAWToYJRow_LASX; } } #endif + // Other platforms do intermediate conversion from RAW to ARGB. -#else +#else // HAS_RAWTOYJROW + #if defined(HAS_RAWTOARGBROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { RAWToARGBRow = RAWToARGBRow_Any_SSSE3; @@ -1713,48 +2787,58 @@ int RAWToI420(const uint8_t* src_raw, } } #endif -#if defined(HAS_ARGBTOYROW_SSSE3) && defined(HAS_ARGBTOUVROW_SSSE3) +#if defined(HAS_ARGBTOYJROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { - ARGBToUVRow = ARGBToUVRow_Any_SSSE3; - ARGBToYRow = ARGBToYRow_Any_SSSE3; + ARGBToYJRow = ARGBToYJRow_Any_SSSE3; if (IS_ALIGNED(width, 16)) { - ARGBToUVRow = ARGBToUVRow_SSSE3; - ARGBToYRow = ARGBToYRow_SSSE3; + ARGBToYJRow = ARGBToYJRow_SSSE3; } } #endif -#if defined(HAS_ARGBTOYROW_AVX2) && defined(HAS_ARGBTOUVROW_AVX2) +#if defined(HAS_ARGBTOYJROW_AVX2) if (TestCpuFlag(kCpuHasAVX2)) { - ARGBToUVRow = ARGBToUVRow_Any_AVX2; - ARGBToYRow = ARGBToYRow_Any_AVX2; + ARGBToYJRow = ARGBToYJRow_Any_AVX2; if (IS_ALIGNED(width, 32)) { - ARGBToUVRow = ARGBToUVRow_AVX2; - ARGBToYRow = ARGBToYRow_AVX2; + ARGBToYJRow = ARGBToYJRow_AVX2; + } + } +#endif +#if defined(HAS_ARGBTOUVJROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ARGBToUVJRow = ARGBToUVJRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + ARGBToUVJRow = ARGBToUVJRow_SSSE3; } } #endif +#if defined(HAS_ARGBTOUVJROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ARGBToUVJRow = ARGBToUVJRow_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + ARGBToUVJRow = ARGBToUVJRow_AVX2; + } + } #endif +#endif // HAS_RAWTOYJROW { -#if !(defined(HAS_RAWTOYROW_NEON) || defined(HAS_RAWTOYROW_MSA) || \ - defined(HAS_RAWTOYROW_MMI)) +#if !defined(HAS_RAWTOYJROW) // Allocate 2 rows of ARGB. - const int kRowSize = (width * 4 + 31) & ~31; - align_buffer_64(row, kRowSize * 2); + const int row_size = (width * 4 + 31) & ~31; + align_buffer_64(row, row_size * 2); #endif for (y = 0; y < height - 1; y += 2) { -#if (defined(HAS_RAWTOYROW_NEON) || defined(HAS_RAWTOYROW_MSA) || \ - defined(HAS_RAWTOYROW_MMI)) - RAWToUVRow(src_raw, src_stride_raw, dst_u, dst_v, width); - RAWToYRow(src_raw, dst_y, width); - RAWToYRow(src_raw + src_stride_raw, dst_y + dst_stride_y, width); +#if defined(HAS_RAWTOYJROW) + RAWToUVJRow(src_raw, src_stride_raw, dst_u, dst_v, width); + RAWToYJRow(src_raw, dst_y, width); + RAWToYJRow(src_raw + src_stride_raw, dst_y + dst_stride_y, width); #else RAWToARGBRow(src_raw, row, width); - RAWToARGBRow(src_raw + src_stride_raw, row + kRowSize, width); - ARGBToUVRow(row, kRowSize, dst_u, dst_v, width); - ARGBToYRow(row, dst_y, width); - ARGBToYRow(row + kRowSize, dst_y + dst_stride_y, width); + RAWToARGBRow(src_raw + src_stride_raw, row + row_size, width); + ARGBToUVJRow(row, row_size, dst_u, dst_v, width); + ARGBToYJRow(row, dst_y, width); + ARGBToYJRow(row + row_size, dst_y + dst_stride_y, width); #endif src_raw += src_stride_raw * 2; dst_y += dst_stride_y * 2; @@ -1762,23 +2846,22 @@ int RAWToI420(const uint8_t* src_raw, dst_v += dst_stride_v; } if (height & 1) { -#if (defined(HAS_RAWTOYROW_NEON) || defined(HAS_RAWTOYROW_MSA) || \ - defined(HAS_RAWTOYROW_MMI)) - RAWToUVRow(src_raw, 0, dst_u, dst_v, width); - RAWToYRow(src_raw, dst_y, width); +#if defined(HAS_RAWTOYJROW) + RAWToUVJRow(src_raw, 0, dst_u, dst_v, width); + RAWToYJRow(src_raw, dst_y, width); #else RAWToARGBRow(src_raw, row, width); - ARGBToUVRow(row, 0, dst_u, dst_v, width); - ARGBToYRow(row, dst_y, width); + ARGBToUVJRow(row, 0, dst_u, dst_v, width); + ARGBToYJRow(row, dst_y, width); #endif } -#if !(defined(HAS_RAWTOYROW_NEON) || defined(HAS_RAWTOYROW_MSA) || \ - defined(HAS_RAWTOYROW_MMI)) +#if !defined(HAS_RAWTOYJROW) free_aligned_buffer_64(row); #endif } return 0; } +#undef HAS_RAWTOYJROW // Convert RGB565 to I420. LIBYUV_API @@ -1794,7 +2877,7 @@ int RGB565ToI420(const uint8_t* src_rgb565, int height) { int y; #if (defined(HAS_RGB565TOYROW_NEON) || defined(HAS_RGB565TOYROW_MSA) || \ - defined(HAS_RGB565TOYROW_MMI)) + defined(HAS_RGB565TOYROW_LSX) || defined(HAS_RGB565TOYROW_LASX)) void (*RGB565ToUVRow)(const uint8_t* src_rgb565, int src_stride_rgb565, uint8_t* dst_u, uint8_t* dst_v, int width) = RGB565ToUVRow_C; @@ -1831,20 +2914,9 @@ int RGB565ToI420(const uint8_t* src_rgb565, } } } -// MMI and MSA version does direct RGB565 to YUV. -#elif (defined(HAS_RGB565TOYROW_MMI) || defined(HAS_RGB565TOYROW_MSA)) -#if defined(HAS_RGB565TOYROW_MMI) && defined(HAS_RGB565TOUVROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - RGB565ToUVRow = RGB565ToUVRow_Any_MMI; - RGB565ToYRow = RGB565ToYRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - RGB565ToYRow = RGB565ToYRow_MMI; - if (IS_ALIGNED(width, 16)) { - RGB565ToUVRow = RGB565ToUVRow_MMI; - } - } - } -#endif +// MSA version does direct RGB565 to YUV. +#elif (defined(HAS_RGB565TOYROW_MSA) || defined(HAS_RGB565TOYROW_LSX) || \ + defined(HAS_RGB565TOYROW_LASX)) #if defined(HAS_RGB565TOYROW_MSA) && defined(HAS_RGB565TOUVROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { RGB565ToUVRow = RGB565ToUVRow_Any_MSA; @@ -1855,6 +2927,26 @@ int RGB565ToI420(const uint8_t* src_rgb565, } } #endif +#if defined(HAS_RGB565TOYROW_LSX) && defined(HAS_RGB565TOUVROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + RGB565ToUVRow = RGB565ToUVRow_Any_LSX; + RGB565ToYRow = RGB565ToYRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + RGB565ToYRow = RGB565ToYRow_LSX; + RGB565ToUVRow = RGB565ToUVRow_LSX; + } + } +#endif +#if defined(HAS_RGB565TOYROW_LASX) && defined(HAS_RGB565TOUVROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + RGB565ToUVRow = RGB565ToUVRow_Any_LASX; + RGB565ToYRow = RGB565ToYRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + RGB565ToYRow = RGB565ToYRow_LASX; + RGB565ToUVRow = RGB565ToUVRow_LASX; + } + } +#endif // Other platforms do intermediate conversion from RGB565 to ARGB. #else #if defined(HAS_RGB565TOARGBROW_SSE2) @@ -1873,46 +2965,58 @@ int RGB565ToI420(const uint8_t* src_rgb565, } } #endif -#if defined(HAS_ARGBTOYROW_SSSE3) && defined(HAS_ARGBTOUVROW_SSSE3) +#if defined(HAS_ARGBTOYROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { - ARGBToUVRow = ARGBToUVRow_Any_SSSE3; ARGBToYRow = ARGBToYRow_Any_SSSE3; if (IS_ALIGNED(width, 16)) { - ARGBToUVRow = ARGBToUVRow_SSSE3; ARGBToYRow = ARGBToYRow_SSSE3; } } #endif -#if defined(HAS_ARGBTOYROW_AVX2) && defined(HAS_ARGBTOUVROW_AVX2) +#if defined(HAS_ARGBTOUVROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ARGBToUVRow = ARGBToUVRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + ARGBToUVRow = ARGBToUVRow_SSSE3; + } + } +#endif +#if defined(HAS_ARGBTOYROW_AVX2) if (TestCpuFlag(kCpuHasAVX2)) { - ARGBToUVRow = ARGBToUVRow_Any_AVX2; ARGBToYRow = ARGBToYRow_Any_AVX2; if (IS_ALIGNED(width, 32)) { - ARGBToUVRow = ARGBToUVRow_AVX2; ARGBToYRow = ARGBToYRow_AVX2; } } #endif +#if defined(HAS_ARGBTOUVROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ARGBToUVRow = ARGBToUVRow_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + ARGBToUVRow = ARGBToUVRow_AVX2; + } + } +#endif #endif { #if !(defined(HAS_RGB565TOYROW_NEON) || defined(HAS_RGB565TOYROW_MSA) || \ - defined(HAS_RGB565TOYROW_MMI)) + defined(HAS_RGB565TOYROW_LSX) || defined(HAS_RGB565TOYROW_LASX)) // Allocate 2 rows of ARGB. - const int kRowSize = (width * 4 + 31) & ~31; - align_buffer_64(row, kRowSize * 2); + const int row_size = (width * 4 + 31) & ~31; + align_buffer_64(row, row_size * 2); #endif for (y = 0; y < height - 1; y += 2) { #if (defined(HAS_RGB565TOYROW_NEON) || defined(HAS_RGB565TOYROW_MSA) || \ - defined(HAS_RGB565TOYROW_MMI)) + defined(HAS_RGB565TOYROW_LSX) || defined(HAS_RGB565TOYROW_LASX)) RGB565ToUVRow(src_rgb565, src_stride_rgb565, dst_u, dst_v, width); RGB565ToYRow(src_rgb565, dst_y, width); RGB565ToYRow(src_rgb565 + src_stride_rgb565, dst_y + dst_stride_y, width); #else RGB565ToARGBRow(src_rgb565, row, width); - RGB565ToARGBRow(src_rgb565 + src_stride_rgb565, row + kRowSize, width); - ARGBToUVRow(row, kRowSize, dst_u, dst_v, width); + RGB565ToARGBRow(src_rgb565 + src_stride_rgb565, row + row_size, width); + ARGBToUVRow(row, row_size, dst_u, dst_v, width); ARGBToYRow(row, dst_y, width); - ARGBToYRow(row + kRowSize, dst_y + dst_stride_y, width); + ARGBToYRow(row + row_size, dst_y + dst_stride_y, width); #endif src_rgb565 += src_stride_rgb565 * 2; dst_y += dst_stride_y * 2; @@ -1921,7 +3025,7 @@ int RGB565ToI420(const uint8_t* src_rgb565, } if (height & 1) { #if (defined(HAS_RGB565TOYROW_NEON) || defined(HAS_RGB565TOYROW_MSA) || \ - defined(HAS_RGB565TOYROW_MMI)) + defined(HAS_RGB565TOYROW_LSX) || defined(HAS_RGB565TOYROW_LASX)) RGB565ToUVRow(src_rgb565, 0, dst_u, dst_v, width); RGB565ToYRow(src_rgb565, dst_y, width); #else @@ -1931,7 +3035,7 @@ int RGB565ToI420(const uint8_t* src_rgb565, #endif } #if !(defined(HAS_RGB565TOYROW_NEON) || defined(HAS_RGB565TOYROW_MSA) || \ - defined(HAS_RGB565TOYROW_MMI)) + defined(HAS_RGB565TOYROW_LSX) || defined(HAS_RGB565TOYROW_LASX)) free_aligned_buffer_64(row); #endif } @@ -1952,7 +3056,7 @@ int ARGB1555ToI420(const uint8_t* src_argb1555, int height) { int y; #if (defined(HAS_ARGB1555TOYROW_NEON) || defined(HAS_ARGB1555TOYROW_MSA) || \ - defined(HAS_ARGB1555TOYROW_MMI)) + defined(HAS_ARGB1555TOYROW_LSX) || defined(HAS_ARGB1555TOYROW_LASX)) void (*ARGB1555ToUVRow)(const uint8_t* src_argb1555, int src_stride_argb1555, uint8_t* dst_u, uint8_t* dst_v, int width) = ARGB1555ToUVRow_C; @@ -1990,20 +3094,9 @@ int ARGB1555ToI420(const uint8_t* src_argb1555, } } } -// MMI and MSA version does direct ARGB1555 to YUV. -#elif (defined(HAS_ARGB1555TOYROW_MMI) || defined(HAS_ARGB1555TOYROW_MSA)) -#if defined(HAS_ARGB1555TOYROW_MMI) && defined(HAS_ARGB1555TOUVROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGB1555ToUVRow = ARGB1555ToUVRow_Any_MMI; - ARGB1555ToYRow = ARGB1555ToYRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - ARGB1555ToYRow = ARGB1555ToYRow_MMI; - if (IS_ALIGNED(width, 16)) { - ARGB1555ToUVRow = ARGB1555ToUVRow_MMI; - } - } - } -#endif +// MSA version does direct ARGB1555 to YUV. +#elif (defined(HAS_ARGB1555TOYROW_MSA) || defined(HAS_ARGB1555TOYROW_LSX) || \ + defined(HAS_ARGB1555TOYROW_LASX)) #if defined(HAS_ARGB1555TOYROW_MSA) && defined(HAS_ARGB1555TOUVROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { ARGB1555ToUVRow = ARGB1555ToUVRow_Any_MSA; @@ -2014,6 +3107,26 @@ int ARGB1555ToI420(const uint8_t* src_argb1555, } } #endif +#if defined(HAS_ARGB1555TOYROW_LSX) && defined(HAS_ARGB1555TOUVROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + ARGB1555ToUVRow = ARGB1555ToUVRow_Any_LSX; + ARGB1555ToYRow = ARGB1555ToYRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + ARGB1555ToYRow = ARGB1555ToYRow_LSX; + ARGB1555ToUVRow = ARGB1555ToUVRow_LSX; + } + } +#endif +#if defined(HAS_ARGB1555TOYROW_LASX) && defined(HAS_ARGB1555TOUVROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + ARGB1555ToUVRow = ARGB1555ToUVRow_Any_LASX; + ARGB1555ToYRow = ARGB1555ToYRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + ARGB1555ToYRow = ARGB1555ToYRow_LASX; + ARGB1555ToUVRow = ARGB1555ToUVRow_LASX; + } + } +#endif // Other platforms do intermediate conversion from ARGB1555 to ARGB. #else #if defined(HAS_ARGB1555TOARGBROW_SSE2) @@ -2032,49 +3145,61 @@ int ARGB1555ToI420(const uint8_t* src_argb1555, } } #endif -#if defined(HAS_ARGBTOYROW_SSSE3) && defined(HAS_ARGBTOUVROW_SSSE3) +#if defined(HAS_ARGBTOYROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { - ARGBToUVRow = ARGBToUVRow_Any_SSSE3; ARGBToYRow = ARGBToYRow_Any_SSSE3; if (IS_ALIGNED(width, 16)) { - ARGBToUVRow = ARGBToUVRow_SSSE3; ARGBToYRow = ARGBToYRow_SSSE3; } } #endif -#if defined(HAS_ARGBTOYROW_AVX2) && defined(HAS_ARGBTOUVROW_AVX2) +#if defined(HAS_ARGBTOUVROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ARGBToUVRow = ARGBToUVRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + ARGBToUVRow = ARGBToUVRow_SSSE3; + } + } +#endif +#if defined(HAS_ARGBTOYROW_AVX2) if (TestCpuFlag(kCpuHasAVX2)) { - ARGBToUVRow = ARGBToUVRow_Any_AVX2; ARGBToYRow = ARGBToYRow_Any_AVX2; if (IS_ALIGNED(width, 32)) { - ARGBToUVRow = ARGBToUVRow_AVX2; ARGBToYRow = ARGBToYRow_AVX2; } } #endif +#if defined(HAS_ARGBTOUVROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ARGBToUVRow = ARGBToUVRow_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + ARGBToUVRow = ARGBToUVRow_AVX2; + } + } +#endif #endif { #if !(defined(HAS_ARGB1555TOYROW_NEON) || defined(HAS_ARGB1555TOYROW_MSA) || \ - defined(HAS_ARGB1555TOYROW_MMI)) + defined(HAS_ARGB1555TOYROW_LSX) || defined(HAS_ARGB1555TOYROW_LASX)) // Allocate 2 rows of ARGB. - const int kRowSize = (width * 4 + 31) & ~31; - align_buffer_64(row, kRowSize * 2); + const int row_size = (width * 4 + 31) & ~31; + align_buffer_64(row, row_size * 2); #endif for (y = 0; y < height - 1; y += 2) { #if (defined(HAS_ARGB1555TOYROW_NEON) || defined(HAS_ARGB1555TOYROW_MSA) || \ - defined(HAS_ARGB1555TOYROW_MMI)) + defined(HAS_ARGB1555TOYROW_LSX) || defined(HAS_ARGB1555TOYROW_LASX)) ARGB1555ToUVRow(src_argb1555, src_stride_argb1555, dst_u, dst_v, width); ARGB1555ToYRow(src_argb1555, dst_y, width); ARGB1555ToYRow(src_argb1555 + src_stride_argb1555, dst_y + dst_stride_y, width); #else ARGB1555ToARGBRow(src_argb1555, row, width); - ARGB1555ToARGBRow(src_argb1555 + src_stride_argb1555, row + kRowSize, + ARGB1555ToARGBRow(src_argb1555 + src_stride_argb1555, row + row_size, width); - ARGBToUVRow(row, kRowSize, dst_u, dst_v, width); + ARGBToUVRow(row, row_size, dst_u, dst_v, width); ARGBToYRow(row, dst_y, width); - ARGBToYRow(row + kRowSize, dst_y + dst_stride_y, width); + ARGBToYRow(row + row_size, dst_y + dst_stride_y, width); #endif src_argb1555 += src_stride_argb1555 * 2; dst_y += dst_stride_y * 2; @@ -2083,7 +3208,7 @@ int ARGB1555ToI420(const uint8_t* src_argb1555, } if (height & 1) { #if (defined(HAS_ARGB1555TOYROW_NEON) || defined(HAS_ARGB1555TOYROW_MSA) || \ - defined(HAS_ARGB1555TOYROW_MMI)) + defined(HAS_ARGB1555TOYROW_LSX) || defined(HAS_ARGB1555TOYROW_LASX)) ARGB1555ToUVRow(src_argb1555, 0, dst_u, dst_v, width); ARGB1555ToYRow(src_argb1555, dst_y, width); #else @@ -2093,7 +3218,7 @@ int ARGB1555ToI420(const uint8_t* src_argb1555, #endif } #if !(defined(HAS_ARGB1555TOYROW_NEON) || defined(HAS_ARGB1555TOYROW_MSA) || \ - defined(HAS_ARGB1555TOYROW_MMI)) + defined(HAS_ARGB1555TOYROW_LSX) || defined(HAS_ARGB1555TOYROW_LASX)) free_aligned_buffer_64(row); #endif } @@ -2113,7 +3238,7 @@ int ARGB4444ToI420(const uint8_t* src_argb4444, int width, int height) { int y; -#if (defined(HAS_ARGB4444TOYROW_NEON) || defined(HAS_ARGB4444TOYROW_MMI)) +#if defined(HAS_ARGB4444TOYROW_NEON) void (*ARGB4444ToUVRow)(const uint8_t* src_argb4444, int src_stride_argb4444, uint8_t* dst_u, uint8_t* dst_v, int width) = ARGB4444ToUVRow_C; @@ -2151,17 +3276,6 @@ int ARGB4444ToI420(const uint8_t* src_argb4444, } } } -#elif defined(HAS_ARGB4444TOYROW_MMI) && defined(HAS_ARGB4444TOUVROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGB4444ToUVRow = ARGB4444ToUVRow_Any_MMI; - ARGB4444ToYRow = ARGB4444ToYRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - ARGB4444ToYRow = ARGB4444ToYRow_MMI; - if (IS_ALIGNED(width, 16)) { - ARGB4444ToUVRow = ARGB4444ToUVRow_MMI; - } - } - } // Other platforms do intermediate conversion from ARGB4444 to ARGB. #else #if defined(HAS_ARGB4444TOARGBROW_SSE2) @@ -2188,35 +3302,51 @@ int ARGB4444ToI420(const uint8_t* src_argb4444, } } #endif -#if defined(HAS_ARGBTOYROW_SSSE3) && defined(HAS_ARGBTOUVROW_SSSE3) +#if defined(HAS_ARGB4444TOARGBROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + ARGB4444ToARGBRow = ARGB4444ToARGBRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + ARGB4444ToARGBRow = ARGB4444ToARGBRow_LSX; + } + } +#endif +#if defined(HAS_ARGB4444TOARGBROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + ARGB4444ToARGBRow = ARGB4444ToARGBRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + ARGB4444ToARGBRow = ARGB4444ToARGBRow_LASX; + } + } +#endif +#if defined(HAS_ARGBTOYROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { - ARGBToUVRow = ARGBToUVRow_Any_SSSE3; ARGBToYRow = ARGBToYRow_Any_SSSE3; if (IS_ALIGNED(width, 16)) { - ARGBToUVRow = ARGBToUVRow_SSSE3; ARGBToYRow = ARGBToYRow_SSSE3; } } #endif -#if defined(HAS_ARGBTOYROW_AVX2) && defined(HAS_ARGBTOUVROW_AVX2) +#if defined(HAS_ARGBTOUVROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ARGBToUVRow = ARGBToUVRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + ARGBToUVRow = ARGBToUVRow_SSSE3; + } + } +#endif +#if defined(HAS_ARGBTOYROW_AVX2) if (TestCpuFlag(kCpuHasAVX2)) { - ARGBToUVRow = ARGBToUVRow_Any_AVX2; ARGBToYRow = ARGBToYRow_Any_AVX2; if (IS_ALIGNED(width, 32)) { - ARGBToUVRow = ARGBToUVRow_AVX2; ARGBToYRow = ARGBToYRow_AVX2; } } #endif -#if defined(HAS_ARGBTOYROW_MMI) && defined(HAS_ARGBTOUVROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBToUVRow = ARGBToUVRow_Any_MMI; - ARGBToYRow = ARGBToYRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - ARGBToYRow = ARGBToYRow_MMI; - if (IS_ALIGNED(width, 16)) { - ARGBToUVRow = ARGBToUVRow_MMI; - } +#if defined(HAS_ARGBTOUVROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ARGBToUVRow = ARGBToUVRow_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + ARGBToUVRow = ARGBToUVRow_AVX2; } } #endif @@ -2232,28 +3362,46 @@ int ARGB4444ToI420(const uint8_t* src_argb4444, } } #endif +#if defined(HAS_ARGBTOYROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + ARGBToYRow = ARGBToYRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + ARGBToYRow = ARGBToYRow_LSX; + } + } +#endif +#if defined(HAS_ARGBTOYROW_LASX) && defined(HAS_ARGBTOUVROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + ARGBToYRow = ARGBToYRow_Any_LASX; + ARGBToUVRow = ARGBToUVRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + ARGBToYRow = ARGBToYRow_LASX; + ARGBToUVRow = ARGBToUVRow_LASX; + } + } +#endif #endif { -#if !(defined(HAS_ARGB4444TOYROW_NEON) || defined(HAS_ARGB4444TOYROW_MMI)) +#if !(defined(HAS_ARGB4444TOYROW_NEON)) // Allocate 2 rows of ARGB. - const int kRowSize = (width * 4 + 31) & ~31; - align_buffer_64(row, kRowSize * 2); + const int row_size = (width * 4 + 31) & ~31; + align_buffer_64(row, row_size * 2); #endif for (y = 0; y < height - 1; y += 2) { -#if (defined(HAS_ARGB4444TOYROW_NEON) || defined(HAS_ARGB4444TOYROW_MMI)) +#if defined(HAS_ARGB4444TOYROW_NEON) ARGB4444ToUVRow(src_argb4444, src_stride_argb4444, dst_u, dst_v, width); ARGB4444ToYRow(src_argb4444, dst_y, width); ARGB4444ToYRow(src_argb4444 + src_stride_argb4444, dst_y + dst_stride_y, width); #else ARGB4444ToARGBRow(src_argb4444, row, width); - ARGB4444ToARGBRow(src_argb4444 + src_stride_argb4444, row + kRowSize, + ARGB4444ToARGBRow(src_argb4444 + src_stride_argb4444, row + row_size, width); - ARGBToUVRow(row, kRowSize, dst_u, dst_v, width); + ARGBToUVRow(row, row_size, dst_u, dst_v, width); ARGBToYRow(row, dst_y, width); - ARGBToYRow(row + kRowSize, dst_y + dst_stride_y, width); + ARGBToYRow(row + row_size, dst_y + dst_stride_y, width); #endif src_argb4444 += src_stride_argb4444 * 2; dst_y += dst_stride_y * 2; @@ -2261,7 +3409,7 @@ int ARGB4444ToI420(const uint8_t* src_argb4444, dst_v += dst_stride_v; } if (height & 1) { -#if (defined(HAS_ARGB4444TOYROW_NEON) || defined(HAS_ARGB4444TOYROW_MMI)) +#if defined(HAS_ARGB4444TOYROW_NEON) ARGB4444ToUVRow(src_argb4444, 0, dst_u, dst_v, width); ARGB4444ToYRow(src_argb4444, dst_y, width); #else @@ -2270,7 +3418,7 @@ int ARGB4444ToI420(const uint8_t* src_argb4444, ARGBToYRow(row, dst_y, width); #endif } -#if !(defined(HAS_ARGB4444TOYROW_NEON) || defined(HAS_ARGB4444TOYROW_MMI)) +#if !(defined(HAS_ARGB4444TOYROW_NEON)) free_aligned_buffer_64(row); #endif } @@ -2321,19 +3469,11 @@ int RGB24ToJ400(const uint8_t* src_rgb24, #if defined(HAS_RGB24TOYJROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { RGB24ToYJRow = RGB24ToYJRow_Any_NEON; - if (IS_ALIGNED(width, 8)) { + if (IS_ALIGNED(width, 16)) { RGB24ToYJRow = RGB24ToYJRow_NEON; } } #endif -#if defined(HAS_RGB24TOYJROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - RGB24ToYJRow = RGB24ToYJRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - RGB24ToYJRow = RGB24ToYJRow_MMI; - } - } -#endif #if defined(HAS_RGB24TOYJROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { RGB24ToYJRow = RGB24ToYJRow_Any_MSA; @@ -2342,6 +3482,22 @@ int RGB24ToJ400(const uint8_t* src_rgb24, } } #endif +#if defined(HAS_RGB24TOYJROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + RGB24ToYJRow = RGB24ToYJRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + RGB24ToYJRow = RGB24ToYJRow_LSX; + } + } +#endif +#if defined(HAS_RGB24TOYJROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + RGB24ToYJRow = RGB24ToYJRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + RGB24ToYJRow = RGB24ToYJRow_LASX; + } + } +#endif for (y = 0; y < height; ++y) { RGB24ToYJRow(src_rgb24, dst_yj, width); @@ -2365,6 +3521,7 @@ int RAWToJ400(const uint8_t* src_raw, if (!src_raw || !dst_yj || width <= 0 || height == 0) { return -1; } + if (height < 0) { height = -height; src_raw = src_raw + (height - 1) * src_stride_raw; @@ -2376,6 +3533,7 @@ int RAWToJ400(const uint8_t* src_raw, height = 1; src_stride_raw = dst_stride_yj = 0; } + #if defined(HAS_RAWTOYJROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { RAWToYJRow = RAWToYJRow_Any_SSSE3; @@ -2395,19 +3553,11 @@ int RAWToJ400(const uint8_t* src_raw, #if defined(HAS_RAWTOYJROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { RAWToYJRow = RAWToYJRow_Any_NEON; - if (IS_ALIGNED(width, 8)) { + if (IS_ALIGNED(width, 16)) { RAWToYJRow = RAWToYJRow_NEON; } } #endif -#if defined(HAS_RAWTOYJROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - RAWToYJRow = RAWToYJRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - RAWToYJRow = RAWToYJRow_MMI; - } - } -#endif #if defined(HAS_RAWTOYJROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { RAWToYJRow = RAWToYJRow_Any_MSA; @@ -2416,6 +3566,22 @@ int RAWToJ400(const uint8_t* src_raw, } } #endif +#if defined(HAS_RAWTOYJROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + RAWToYJRow = RAWToYJRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + RAWToYJRow = RAWToYJRow_LSX; + } + } +#endif +#if defined(HAS_RAWTOYJROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + RAWToYJRow = RAWToYJRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + RAWToYJRow = RAWToYJRow_LASX; + } + } +#endif for (y = 0; y < height; ++y) { RAWToYJRow(src_raw, dst_yj, width); @@ -2425,18 +3591,6 @@ int RAWToJ400(const uint8_t* src_raw, return 0; } -static void SplitPixels(const uint8_t* src_u, - int src_pixel_stride_uv, - uint8_t* dst_u, - int width) { - int i; - for (i = 0; i < width; ++i) { - *dst_u = *src_u; - ++dst_u; - src_u += src_pixel_stride_uv; - } -} - // Convert Android420 to I420. LIBYUV_API int Android420ToI420(const uint8_t* src_y, @@ -2454,58 +3608,10 @@ int Android420ToI420(const uint8_t* src_y, int dst_stride_v, int width, int height) { - int y; - const ptrdiff_t vu_off = src_v - src_u; - int halfwidth = (width + 1) >> 1; - int halfheight = (height + 1) >> 1; - if (!src_u || !src_v || !dst_u || !dst_v || width <= 0 || height == 0) { - return -1; - } - // Negative height means invert the image. - if (height < 0) { - height = -height; - halfheight = (height + 1) >> 1; - src_y = src_y + (height - 1) * src_stride_y; - src_u = src_u + (halfheight - 1) * src_stride_u; - src_v = src_v + (halfheight - 1) * src_stride_v; - src_stride_y = -src_stride_y; - src_stride_u = -src_stride_u; - src_stride_v = -src_stride_v; - } - - if (dst_y) { - CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height); - } - - // Copy UV planes as is - I420 - if (src_pixel_stride_uv == 1) { - CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, halfwidth, halfheight); - CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, halfwidth, halfheight); - return 0; - // Split UV planes - NV21 - } - if (src_pixel_stride_uv == 2 && vu_off == -1 && - src_stride_u == src_stride_v) { - SplitUVPlane(src_v, src_stride_v, dst_v, dst_stride_v, dst_u, dst_stride_u, - halfwidth, halfheight); - return 0; - // Split UV planes - NV12 - } - if (src_pixel_stride_uv == 2 && vu_off == 1 && src_stride_u == src_stride_v) { - SplitUVPlane(src_u, src_stride_u, dst_u, dst_stride_u, dst_v, dst_stride_v, - halfwidth, halfheight); - return 0; - } - - for (y = 0; y < halfheight; ++y) { - SplitPixels(src_u, src_pixel_stride_uv, dst_u, halfwidth); - SplitPixels(src_v, src_pixel_stride_uv, dst_v, halfwidth); - src_u += src_stride_u; - src_v += src_stride_v; - dst_u += dst_stride_u; - dst_v += dst_stride_v; - } - return 0; + return Android420ToI420Rotate(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, src_pixel_stride_uv, dst_y, + dst_stride_y, dst_u, dst_stride_u, dst_v, + dst_stride_v, width, height, kRotate0); } #ifdef __cplusplus diff --git a/TMessagesProj/jni/third_party/libyuv/source/convert_argb.cc b/TMessagesProj/jni/third_party/libyuv/source/convert_argb.cc index 5e7225faf2..64425c5967 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/convert_argb.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/convert_argb.cc @@ -10,6 +10,9 @@ #include "libyuv/convert_argb.h" +#include + +#include "libyuv/convert_from_argb.h" #include "libyuv/cpu_id.h" #ifdef HAVE_JPEG #include "libyuv/mjpeg_decoder.h" @@ -17,6 +20,7 @@ #include "libyuv/planar_functions.h" // For CopyPlane and ARGBShuffle. #include "libyuv/rotate_argb.h" #include "libyuv/row.h" +#include "libyuv/scale_row.h" // For ScaleRowUp2_Linear and ScaleRowUp2_Bilinear #include "libyuv/video_common.h" #ifdef __cplusplus @@ -65,6 +69,7 @@ int I420ToARGBMatrix(const uint8_t* src_y, const uint8_t* v_buf, uint8_t* rgb_buf, const struct YuvConstants* yuvconstants, int width) = I422ToARGBRow_C; + assert(yuvconstants); if (!src_y || !src_u || !src_v || !dst_argb || width <= 0 || height == 0) { return -1; } @@ -90,6 +95,15 @@ int I420ToARGBMatrix(const uint8_t* src_y, } } #endif +#if defined(HAS_I422TOARGBROW_AVX512BW) + if (TestCpuFlag(kCpuHasAVX512BW | kCpuHasAVX512VL) == + (kCpuHasAVX512BW | kCpuHasAVX512VL)) { + I422ToARGBRow = I422ToARGBRow_Any_AVX512BW; + if (IS_ALIGNED(width, 32)) { + I422ToARGBRow = I422ToARGBRow_AVX512BW; + } + } +#endif #if defined(HAS_I422TOARGBROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { I422ToARGBRow = I422ToARGBRow_Any_NEON; @@ -98,14 +112,6 @@ int I420ToARGBMatrix(const uint8_t* src_y, } } #endif -#if defined(HAS_I422TOARGBROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - I422ToARGBRow = I422ToARGBRow_Any_MMI; - if (IS_ALIGNED(width, 4)) { - I422ToARGBRow = I422ToARGBRow_MMI; - } - } -#endif #if defined(HAS_I422TOARGBROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { I422ToARGBRow = I422ToARGBRow_Any_MSA; @@ -114,6 +120,14 @@ int I420ToARGBMatrix(const uint8_t* src_y, } } #endif +#if defined(HAS_I422TOARGBROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + I422ToARGBRow = I422ToARGBRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + I422ToARGBRow = I422ToARGBRow_LASX; + } + } +#endif for (y = 0; y < height; ++y) { I422ToARGBRow(src_y, src_u, src_v, dst_argb, yuvconstants, width); @@ -289,6 +303,7 @@ int I422ToARGBMatrix(const uint8_t* src_y, const uint8_t* v_buf, uint8_t* rgb_buf, const struct YuvConstants* yuvconstants, int width) = I422ToARGBRow_C; + assert(yuvconstants); if (!src_y || !src_u || !src_v || !dst_argb || width <= 0 || height == 0) { return -1; } @@ -321,6 +336,15 @@ int I422ToARGBMatrix(const uint8_t* src_y, } } #endif +#if defined(HAS_I422TOARGBROW_AVX512BW) + if (TestCpuFlag(kCpuHasAVX512BW | kCpuHasAVX512VL) == + (kCpuHasAVX512BW | kCpuHasAVX512VL)) { + I422ToARGBRow = I422ToARGBRow_Any_AVX512BW; + if (IS_ALIGNED(width, 32)) { + I422ToARGBRow = I422ToARGBRow_AVX512BW; + } + } +#endif #if defined(HAS_I422TOARGBROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { I422ToARGBRow = I422ToARGBRow_Any_NEON; @@ -329,14 +353,6 @@ int I422ToARGBMatrix(const uint8_t* src_y, } } #endif -#if defined(HAS_I422TOARGBROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - I422ToARGBRow = I422ToARGBRow_Any_MMI; - if (IS_ALIGNED(width, 4)) { - I422ToARGBRow = I422ToARGBRow_MMI; - } - } -#endif #if defined(HAS_I422TOARGBROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { I422ToARGBRow = I422ToARGBRow_Any_MSA; @@ -345,6 +361,14 @@ int I422ToARGBMatrix(const uint8_t* src_y, } } #endif +#if defined(HAS_I422TOARGBROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + I422ToARGBRow = I422ToARGBRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + I422ToARGBRow = I422ToARGBRow_LASX; + } + } +#endif for (y = 0; y < height; ++y) { I422ToARGBRow(src_y, src_u, src_v, dst_argb, yuvconstants, width); @@ -518,6 +542,7 @@ int I444ToARGBMatrix(const uint8_t* src_y, const uint8_t* v_buf, uint8_t* rgb_buf, const struct YuvConstants* yuvconstants, int width) = I444ToARGBRow_C; + assert(yuvconstants); if (!src_y || !src_u || !src_v || !dst_argb || width <= 0 || height == 0) { return -1; } @@ -558,14 +583,6 @@ int I444ToARGBMatrix(const uint8_t* src_y, } } #endif -#if defined(HAS_I444TOARGBROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - I444ToARGBRow = I444ToARGBRow_Any_MMI; - if (IS_ALIGNED(width, 4)) { - I444ToARGBRow = I444ToARGBRow_MMI; - } - } -#endif #if defined(HAS_I444TOARGBROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { I444ToARGBRow = I444ToARGBRow_Any_MSA; @@ -574,6 +591,14 @@ int I444ToARGBMatrix(const uint8_t* src_y, } } #endif +#if defined(HAS_I444TOARGBROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + I444ToARGBRow = I444ToARGBRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + I444ToARGBRow = I444ToARGBRow_LSX; + } + } +#endif for (y = 0; y < height; ++y) { I444ToARGBRow(src_y, src_u, src_v, dst_argb, yuvconstants, width); @@ -729,6 +754,128 @@ int U444ToABGR(const uint8_t* src_y, width, height); } +// Convert I444 to RGB24 with matrix. +LIBYUV_API +int I444ToRGB24Matrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + const struct YuvConstants* yuvconstants, + int width, + int height) { + int y; + void (*I444ToRGB24Row)(const uint8_t* y_buf, const uint8_t* u_buf, + const uint8_t* v_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = + I444ToRGB24Row_C; + assert(yuvconstants); + if (!src_y || !src_u || !src_v || !dst_rgb24 || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_rgb24 = dst_rgb24 + (height - 1) * dst_stride_rgb24; + dst_stride_rgb24 = -dst_stride_rgb24; + } + // Coalesce rows. + if (src_stride_y == width && src_stride_u == width && src_stride_v == width && + dst_stride_rgb24 == width * 3) { + width *= height; + height = 1; + src_stride_y = src_stride_u = src_stride_v = dst_stride_rgb24 = 0; + } +#if defined(HAS_I444TORGB24ROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + I444ToRGB24Row = I444ToRGB24Row_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + I444ToRGB24Row = I444ToRGB24Row_SSSE3; + } + } +#endif +#if defined(HAS_I444TORGB24ROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + I444ToRGB24Row = I444ToRGB24Row_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + I444ToRGB24Row = I444ToRGB24Row_AVX2; + } + } +#endif +#if defined(HAS_I444TORGB24ROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + I444ToRGB24Row = I444ToRGB24Row_Any_NEON; + if (IS_ALIGNED(width, 8)) { + I444ToRGB24Row = I444ToRGB24Row_NEON; + } + } +#endif +#if defined(HAS_I444TORGB24ROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + I444ToRGB24Row = I444ToRGB24Row_Any_MSA; + if (IS_ALIGNED(width, 8)) { + I444ToRGB24Row = I444ToRGB24Row_MSA; + } + } +#endif +#if defined(HAS_I444TORGB24ROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + I444ToRGB24Row = I444ToRGB24Row_Any_LSX; + if (IS_ALIGNED(width, 16)) { + I444ToRGB24Row = I444ToRGB24Row_LSX; + } + } +#endif + + for (y = 0; y < height; ++y) { + I444ToRGB24Row(src_y, src_u, src_v, dst_rgb24, yuvconstants, width); + dst_rgb24 += dst_stride_rgb24; + src_y += src_stride_y; + src_u += src_stride_u; + src_v += src_stride_v; + } + return 0; +} + +// Convert I444 to RGB24. +LIBYUV_API +int I444ToRGB24(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + int width, + int height) { + return I444ToRGB24Matrix(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_rgb24, dst_stride_rgb24, + &kYuvI601Constants, width, height); +} + +// Convert I444 to RAW. +LIBYUV_API +int I444ToRAW(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_raw, + int dst_stride_raw, + int width, + int height) { + return I444ToRGB24Matrix(src_y, src_stride_y, src_v, + src_stride_v, // Swap U and V + src_u, src_stride_u, dst_raw, dst_stride_raw, + &kYvuI601Constants, // Use Yvu matrix + width, height); +} + // Convert 10 bit YUV to ARGB with matrix. // TODO(fbarchard): Consider passing scale multiplier to I210ToARGB to // multiply 10 bit yuv into high bits to allow any number of bits. @@ -749,6 +896,7 @@ int I010ToAR30Matrix(const uint16_t* src_y, const uint16_t* v_buf, uint8_t* rgb_buf, const struct YuvConstants* yuvconstants, int width) = I210ToAR30Row_C; + assert(yuvconstants); if (!src_y || !src_u || !src_v || !dst_ar30 || width <= 0 || height == 0) { return -1; } @@ -888,6 +1036,64 @@ int U010ToAB30(const uint16_t* src_y, &kYuv2020Constants, width, height); } +// Convert 12 bit YUV to ARGB with matrix. +// TODO(fbarchard): Consider passing scale multiplier to I212ToARGB to +// multiply 12 bit yuv into high bits to allow any number of bits. +LIBYUV_API +int I012ToAR30Matrix(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_ar30, + int dst_stride_ar30, + const struct YuvConstants* yuvconstants, + int width, + int height) { + int y; + void (*I212ToAR30Row)(const uint16_t* y_buf, const uint16_t* u_buf, + const uint16_t* v_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = + I212ToAR30Row_C; + assert(yuvconstants); + if (!src_y || !src_u || !src_v || !dst_ar30 || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_ar30 = dst_ar30 + (height - 1) * dst_stride_ar30; + dst_stride_ar30 = -dst_stride_ar30; + } +#if defined(HAS_I212TOAR30ROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + I212ToAR30Row = I212ToAR30Row_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + I212ToAR30Row = I212ToAR30Row_SSSE3; + } + } +#endif +#if defined(HAS_I212TOAR30ROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + I212ToAR30Row = I212ToAR30Row_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + I212ToAR30Row = I212ToAR30Row_AVX2; + } + } +#endif + for (y = 0; y < height; ++y) { + I212ToAR30Row(src_y, src_u, src_v, dst_ar30, yuvconstants, width); + dst_ar30 += dst_stride_ar30; + src_y += src_stride_y; + if (y & 1) { + src_u += src_stride_u; + src_v += src_stride_v; + } + } + return 0; +} + // Convert 10 bit YUV to ARGB with matrix. // TODO(fbarchard): Consider passing scale multiplier to I210ToARGB to // multiply 10 bit yuv into high bits to allow any number of bits. @@ -908,6 +1114,7 @@ int I210ToAR30Matrix(const uint16_t* src_y, const uint16_t* v_buf, uint8_t* rgb_buf, const struct YuvConstants* yuvconstants, int width) = I210ToAR30Row_C; + assert(yuvconstants); if (!src_y || !src_u || !src_v || !dst_ar30 || width <= 0 || height == 0) { return -1; } @@ -1045,6 +1252,59 @@ int U210ToAB30(const uint16_t* src_y, &kYuv2020Constants, width, height); } +LIBYUV_API +int I410ToAR30Matrix(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_ar30, + int dst_stride_ar30, + const struct YuvConstants* yuvconstants, + int width, + int height) { + int y; + void (*I410ToAR30Row)(const uint16_t* y_buf, const uint16_t* u_buf, + const uint16_t* v_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = + I410ToAR30Row_C; + assert(yuvconstants); + if (!src_y || !src_u || !src_v || !dst_ar30 || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_ar30 = dst_ar30 + (height - 1) * dst_stride_ar30; + dst_stride_ar30 = -dst_stride_ar30; + } +#if defined(HAS_I410TOAR30ROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + I410ToAR30Row = I410ToAR30Row_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + I410ToAR30Row = I410ToAR30Row_SSSE3; + } + } +#endif +#if defined(HAS_I410TOAR30ROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + I410ToAR30Row = I410ToAR30Row_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + I410ToAR30Row = I410ToAR30Row_AVX2; + } + } +#endif + for (y = 0; y < height; ++y) { + I410ToAR30Row(src_y, src_u, src_v, dst_ar30, yuvconstants, width); + dst_ar30 += dst_stride_ar30; + src_y += src_stride_y; + src_u += src_stride_u; + src_v += src_stride_v; + } + return 0; +} + // Convert 10 bit YUV to ARGB with matrix. LIBYUV_API int I010ToARGBMatrix(const uint16_t* src_y, @@ -1063,6 +1323,7 @@ int I010ToARGBMatrix(const uint16_t* src_y, const uint16_t* v_buf, uint8_t* rgb_buf, const struct YuvConstants* yuvconstants, int width) = I210ToARGBRow_C; + assert(yuvconstants); if (!src_y || !src_u || !src_v || !dst_argb || width <= 0 || height == 0) { return -1; } @@ -1087,14 +1348,6 @@ int I010ToARGBMatrix(const uint16_t* src_y, I210ToARGBRow = I210ToARGBRow_AVX2; } } -#endif -#if defined(HAS_I210TOARGBROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - I210ToARGBRow = I210ToARGBRow_Any_MMI; - if (IS_ALIGNED(width, 4)) { - I210ToARGBRow = I210ToARGBRow_MMI; - } - } #endif for (y = 0; y < height; ++y) { I210ToARGBRow(src_y, src_u, src_v, dst_argb, yuvconstants, width); @@ -1216,9 +1469,9 @@ int U010ToABGR(const uint16_t* src_y, width, height); } -// Convert 10 bit 422 YUV to ARGB with matrix. +// Convert 12 bit YUV to ARGB with matrix. LIBYUV_API -int I210ToARGBMatrix(const uint16_t* src_y, +int I012ToARGBMatrix(const uint16_t* src_y, int src_stride_y, const uint16_t* src_u, int src_stride_u, @@ -1230,10 +1483,11 @@ int I210ToARGBMatrix(const uint16_t* src_y, int width, int height) { int y; - void (*I210ToARGBRow)(const uint16_t* y_buf, const uint16_t* u_buf, + void (*I212ToARGBRow)(const uint16_t* y_buf, const uint16_t* u_buf, const uint16_t* v_buf, uint8_t* rgb_buf, const struct YuvConstants* yuvconstants, int width) = - I210ToARGBRow_C; + I212ToARGBRow_C; + assert(yuvconstants); if (!src_y || !src_u || !src_v || !dst_argb || width <= 0 || height == 0) { return -1; } @@ -1243,60 +1497,108 @@ int I210ToARGBMatrix(const uint16_t* src_y, dst_argb = dst_argb + (height - 1) * dst_stride_argb; dst_stride_argb = -dst_stride_argb; } -#if defined(HAS_I210TOARGBROW_SSSE3) +#if defined(HAS_I212TOARGBROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { - I210ToARGBRow = I210ToARGBRow_Any_SSSE3; + I212ToARGBRow = I212ToARGBRow_Any_SSSE3; if (IS_ALIGNED(width, 8)) { - I210ToARGBRow = I210ToARGBRow_SSSE3; + I212ToARGBRow = I212ToARGBRow_SSSE3; } } #endif -#if defined(HAS_I210TOARGBROW_AVX2) +#if defined(HAS_I212TOARGBROW_AVX2) if (TestCpuFlag(kCpuHasAVX2)) { - I210ToARGBRow = I210ToARGBRow_Any_AVX2; + I212ToARGBRow = I212ToARGBRow_Any_AVX2; if (IS_ALIGNED(width, 16)) { - I210ToARGBRow = I210ToARGBRow_AVX2; - } - } -#endif -#if defined(HAS_I210TOARGBROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - I210ToARGBRow = I210ToARGBRow_Any_MMI; - if (IS_ALIGNED(width, 4)) { - I210ToARGBRow = I210ToARGBRow_MMI; + I212ToARGBRow = I212ToARGBRow_AVX2; } } #endif for (y = 0; y < height; ++y) { - I210ToARGBRow(src_y, src_u, src_v, dst_argb, yuvconstants, width); + I212ToARGBRow(src_y, src_u, src_v, dst_argb, yuvconstants, width); dst_argb += dst_stride_argb; src_y += src_stride_y; - src_u += src_stride_u; - src_v += src_stride_v; + if (y & 1) { + src_u += src_stride_u; + src_v += src_stride_v; + } } return 0; } -// Convert I210 to ARGB. -LIBYUV_API -int I210ToARGB(const uint16_t* src_y, - int src_stride_y, - const uint16_t* src_u, - int src_stride_u, - const uint16_t* src_v, - int src_stride_v, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height) { - return I210ToARGBMatrix(src_y, src_stride_y, src_u, src_stride_u, src_v, - src_stride_v, dst_argb, dst_stride_argb, - &kYuvI601Constants, width, height); -} - -// Convert I210 to ABGR. +// Convert 10 bit 422 YUV to ARGB with matrix. LIBYUV_API -int I210ToABGR(const uint16_t* src_y, +int I210ToARGBMatrix(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height) { + int y; + void (*I210ToARGBRow)(const uint16_t* y_buf, const uint16_t* u_buf, + const uint16_t* v_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = + I210ToARGBRow_C; + assert(yuvconstants); + if (!src_y || !src_u || !src_v || !dst_argb || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_argb = dst_argb + (height - 1) * dst_stride_argb; + dst_stride_argb = -dst_stride_argb; + } +#if defined(HAS_I210TOARGBROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + I210ToARGBRow = I210ToARGBRow_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + I210ToARGBRow = I210ToARGBRow_SSSE3; + } + } +#endif +#if defined(HAS_I210TOARGBROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + I210ToARGBRow = I210ToARGBRow_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + I210ToARGBRow = I210ToARGBRow_AVX2; + } + } +#endif + for (y = 0; y < height; ++y) { + I210ToARGBRow(src_y, src_u, src_v, dst_argb, yuvconstants, width); + dst_argb += dst_stride_argb; + src_y += src_stride_y; + src_u += src_stride_u; + src_v += src_stride_v; + } + return 0; +} + +// Convert I210 to ARGB. +LIBYUV_API +int I210ToARGB(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height) { + return I210ToARGBMatrix(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_argb, dst_stride_argb, + &kYuvI601Constants, width, height); +} + +// Convert I210 to ABGR. +LIBYUV_API +int I210ToABGR(const uint16_t* src_y, int src_stride_y, const uint16_t* src_u, int src_stride_u, @@ -1385,30 +1687,24 @@ int U210ToABGR(const uint16_t* src_y, width, height); } -// Convert I420 with Alpha to preattenuated ARGB with matrix. LIBYUV_API -int I420AlphaToARGBMatrix(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - const uint8_t* src_a, - int src_stride_a, - uint8_t* dst_argb, - int dst_stride_argb, - const struct YuvConstants* yuvconstants, - int width, - int height, - int attenuate) { +int I410ToARGBMatrix(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height) { int y; - void (*I422AlphaToARGBRow)(const uint8_t* y_buf, const uint8_t* u_buf, - const uint8_t* v_buf, const uint8_t* a_buf, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width) = I422AlphaToARGBRow_C; - void (*ARGBAttenuateRow)(const uint8_t* src_argb, uint8_t* dst_argb, - int width) = ARGBAttenuateRow_C; + void (*I410ToARGBRow)(const uint16_t* y_buf, const uint16_t* u_buf, + const uint16_t* v_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = + I410ToARGBRow_C; + assert(yuvconstants); if (!src_y || !src_u || !src_v || !dst_argb || width <= 0 || height == 0) { return -1; } @@ -1418,161 +1714,99 @@ int I420AlphaToARGBMatrix(const uint8_t* src_y, dst_argb = dst_argb + (height - 1) * dst_stride_argb; dst_stride_argb = -dst_stride_argb; } -#if defined(HAS_I422ALPHATOARGBROW_SSSE3) +#if defined(HAS_I410TOARGBROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { - I422AlphaToARGBRow = I422AlphaToARGBRow_Any_SSSE3; + I410ToARGBRow = I410ToARGBRow_Any_SSSE3; if (IS_ALIGNED(width, 8)) { - I422AlphaToARGBRow = I422AlphaToARGBRow_SSSE3; + I410ToARGBRow = I410ToARGBRow_SSSE3; } } #endif -#if defined(HAS_I422ALPHATOARGBROW_AVX2) +#if defined(HAS_I410TOARGBROW_AVX2) if (TestCpuFlag(kCpuHasAVX2)) { - I422AlphaToARGBRow = I422AlphaToARGBRow_Any_AVX2; + I410ToARGBRow = I410ToARGBRow_Any_AVX2; if (IS_ALIGNED(width, 16)) { - I422AlphaToARGBRow = I422AlphaToARGBRow_AVX2; + I410ToARGBRow = I410ToARGBRow_AVX2; } } #endif -#if defined(HAS_I422ALPHATOARGBROW_NEON) - if (TestCpuFlag(kCpuHasNEON)) { - I422AlphaToARGBRow = I422AlphaToARGBRow_Any_NEON; - if (IS_ALIGNED(width, 8)) { - I422AlphaToARGBRow = I422AlphaToARGBRow_NEON; - } + for (y = 0; y < height; ++y) { + I410ToARGBRow(src_y, src_u, src_v, dst_argb, yuvconstants, width); + dst_argb += dst_stride_argb; + src_y += src_stride_y; + src_u += src_stride_u; + src_v += src_stride_v; } -#endif -#if defined(HAS_I422ALPHATOARGBROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - I422AlphaToARGBRow = I422AlphaToARGBRow_Any_MMI; - if (IS_ALIGNED(width, 4)) { - I422AlphaToARGBRow = I422AlphaToARGBRow_MMI; - } + return 0; +} + +LIBYUV_API +int P010ToARGBMatrix(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_uv, + int src_stride_uv, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height) { + int y; + void (*P210ToARGBRow)( + const uint16_t* y_buf, const uint16_t* uv_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = P210ToARGBRow_C; + assert(yuvconstants); + if (!src_y || !src_uv || !dst_argb || width <= 0 || height == 0) { + return -1; } -#endif -#if defined(HAS_I422ALPHATOARGBROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - I422AlphaToARGBRow = I422AlphaToARGBRow_Any_MSA; - if (IS_ALIGNED(width, 8)) { - I422AlphaToARGBRow = I422AlphaToARGBRow_MSA; - } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_argb = dst_argb + (height - 1) * dst_stride_argb; + dst_stride_argb = -dst_stride_argb; } -#endif -#if defined(HAS_ARGBATTENUATEROW_SSSE3) +#if defined(HAS_P210TOARGBROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { - ARGBAttenuateRow = ARGBAttenuateRow_Any_SSSE3; - if (IS_ALIGNED(width, 4)) { - ARGBAttenuateRow = ARGBAttenuateRow_SSSE3; - } - } -#endif -#if defined(HAS_ARGBATTENUATEROW_AVX2) - if (TestCpuFlag(kCpuHasAVX2)) { - ARGBAttenuateRow = ARGBAttenuateRow_Any_AVX2; - if (IS_ALIGNED(width, 8)) { - ARGBAttenuateRow = ARGBAttenuateRow_AVX2; - } - } -#endif -#if defined(HAS_ARGBATTENUATEROW_NEON) - if (TestCpuFlag(kCpuHasNEON)) { - ARGBAttenuateRow = ARGBAttenuateRow_Any_NEON; + P210ToARGBRow = P210ToARGBRow_Any_SSSE3; if (IS_ALIGNED(width, 8)) { - ARGBAttenuateRow = ARGBAttenuateRow_NEON; - } - } -#endif -#if defined(HAS_ARGBATTENUATEROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBAttenuateRow = ARGBAttenuateRow_Any_MMI; - if (IS_ALIGNED(width, 2)) { - ARGBAttenuateRow = ARGBAttenuateRow_MMI; + P210ToARGBRow = P210ToARGBRow_SSSE3; } } #endif -#if defined(HAS_ARGBATTENUATEROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBAttenuateRow = ARGBAttenuateRow_Any_MSA; - if (IS_ALIGNED(width, 8)) { - ARGBAttenuateRow = ARGBAttenuateRow_MSA; +#if defined(HAS_P210TOARGBROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + P210ToARGBRow = P210ToARGBRow_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + P210ToARGBRow = P210ToARGBRow_AVX2; } } #endif - for (y = 0; y < height; ++y) { - I422AlphaToARGBRow(src_y, src_u, src_v, src_a, dst_argb, yuvconstants, - width); - if (attenuate) { - ARGBAttenuateRow(dst_argb, dst_argb, width); - } + P210ToARGBRow(src_y, src_uv, dst_argb, yuvconstants, width); dst_argb += dst_stride_argb; - src_a += src_stride_a; src_y += src_stride_y; if (y & 1) { - src_u += src_stride_u; - src_v += src_stride_v; + src_uv += src_stride_uv; } } return 0; } -// Convert I420 with Alpha to ARGB. -LIBYUV_API -int I420AlphaToARGB(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - const uint8_t* src_a, - int src_stride_a, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height, - int attenuate) { - return I420AlphaToARGBMatrix(src_y, src_stride_y, src_u, src_stride_u, src_v, - src_stride_v, src_a, src_stride_a, dst_argb, - dst_stride_argb, &kYuvI601Constants, width, - height, attenuate); -} - -// Convert I420 with Alpha to ABGR. -LIBYUV_API -int I420AlphaToABGR(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - const uint8_t* src_a, - int src_stride_a, - uint8_t* dst_abgr, - int dst_stride_abgr, - int width, - int height, - int attenuate) { - return I420AlphaToARGBMatrix( - src_y, src_stride_y, src_v, src_stride_v, // Swap U and V - src_u, src_stride_u, src_a, src_stride_a, dst_abgr, dst_stride_abgr, - &kYvuI601Constants, // Use Yvu matrix - width, height, attenuate); -} - -// Convert I400 to ARGB with matrix. LIBYUV_API -int I400ToARGBMatrix(const uint8_t* src_y, +int P210ToARGBMatrix(const uint16_t* src_y, int src_stride_y, + const uint16_t* src_uv, + int src_stride_uv, uint8_t* dst_argb, int dst_stride_argb, const struct YuvConstants* yuvconstants, int width, int height) { int y; - void (*I400ToARGBRow)(const uint8_t* y_buf, uint8_t* rgb_buf, - const struct YuvConstants* yuvconstants, int width) = - I400ToARGBRow_C; - if (!src_y || !dst_argb || width <= 0 || height == 0) { + void (*P210ToARGBRow)( + const uint16_t* y_buf, const uint16_t* uv_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = P210ToARGBRow_C; + assert(yuvconstants); + if (!src_y || !src_uv || !dst_argb || width <= 0 || height == 0) { return -1; } // Negative height means invert the image. @@ -1581,741 +1815,850 @@ int I400ToARGBMatrix(const uint8_t* src_y, dst_argb = dst_argb + (height - 1) * dst_stride_argb; dst_stride_argb = -dst_stride_argb; } - // Coalesce rows. - if (src_stride_y == width && dst_stride_argb == width * 4) { - width *= height; - height = 1; - src_stride_y = dst_stride_argb = 0; - } -#if defined(HAS_I400TOARGBROW_SSE2) - if (TestCpuFlag(kCpuHasSSE2)) { - I400ToARGBRow = I400ToARGBRow_Any_SSE2; +#if defined(HAS_P210TOARGBROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + P210ToARGBRow = P210ToARGBRow_Any_SSSE3; if (IS_ALIGNED(width, 8)) { - I400ToARGBRow = I400ToARGBRow_SSE2; + P210ToARGBRow = P210ToARGBRow_SSSE3; } } #endif -#if defined(HAS_I400TOARGBROW_AVX2) +#if defined(HAS_P210TOARGBROW_AVX2) if (TestCpuFlag(kCpuHasAVX2)) { - I400ToARGBRow = I400ToARGBRow_Any_AVX2; + P210ToARGBRow = P210ToARGBRow_Any_AVX2; if (IS_ALIGNED(width, 16)) { - I400ToARGBRow = I400ToARGBRow_AVX2; + P210ToARGBRow = P210ToARGBRow_AVX2; } } #endif -#if defined(HAS_I400TOARGBROW_NEON) - if (TestCpuFlag(kCpuHasNEON)) { - I400ToARGBRow = I400ToARGBRow_Any_NEON; - if (IS_ALIGNED(width, 8)) { - I400ToARGBRow = I400ToARGBRow_NEON; - } - } -#endif -#if defined(HAS_I400TOARGBROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - I400ToARGBRow = I400ToARGBRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - I400ToARGBRow = I400ToARGBRow_MMI; - } - } -#endif -#if defined(HAS_I400TOARGBROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - I400ToARGBRow = I400ToARGBRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - I400ToARGBRow = I400ToARGBRow_MSA; - } - } -#endif - for (y = 0; y < height; ++y) { - I400ToARGBRow(src_y, dst_argb, yuvconstants, width); + P210ToARGBRow(src_y, src_uv, dst_argb, yuvconstants, width); dst_argb += dst_stride_argb; src_y += src_stride_y; + src_uv += src_stride_uv; } return 0; } -// Convert I400 to ARGB. -LIBYUV_API -int I400ToARGB(const uint8_t* src_y, - int src_stride_y, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height) { - return I400ToARGBMatrix(src_y, src_stride_y, dst_argb, dst_stride_argb, - &kYuvI601Constants, width, height); -} - -// Convert J400 to ARGB. LIBYUV_API -int J400ToARGB(const uint8_t* src_y, - int src_stride_y, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height) { +int P010ToAR30Matrix(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_uv, + int src_stride_uv, + uint8_t* dst_ar30, + int dst_stride_ar30, + const struct YuvConstants* yuvconstants, + int width, + int height) { int y; - void (*J400ToARGBRow)(const uint8_t* src_y, uint8_t* dst_argb, int width) = - J400ToARGBRow_C; - if (!src_y || !dst_argb || width <= 0 || height == 0) { + void (*P210ToAR30Row)( + const uint16_t* y_buf, const uint16_t* uv_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = P210ToAR30Row_C; + assert(yuvconstants); + if (!src_y || !src_uv || !dst_ar30 || width <= 0 || height == 0) { return -1; } // Negative height means invert the image. if (height < 0) { height = -height; - src_y = src_y + (height - 1) * src_stride_y; - src_stride_y = -src_stride_y; - } - // Coalesce rows. - if (src_stride_y == width && dst_stride_argb == width * 4) { - width *= height; - height = 1; - src_stride_y = dst_stride_argb = 0; + dst_ar30 = dst_ar30 + (height - 1) * dst_stride_ar30; + dst_stride_ar30 = -dst_stride_ar30; } -#if defined(HAS_J400TOARGBROW_SSE2) - if (TestCpuFlag(kCpuHasSSE2)) { - J400ToARGBRow = J400ToARGBRow_Any_SSE2; +#if defined(HAS_P210TOAR30ROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + P210ToAR30Row = P210ToAR30Row_Any_SSSE3; if (IS_ALIGNED(width, 8)) { - J400ToARGBRow = J400ToARGBRow_SSE2; + P210ToAR30Row = P210ToAR30Row_SSSE3; } } #endif -#if defined(HAS_J400TOARGBROW_AVX2) +#if defined(HAS_P210TOAR30ROW_AVX2) if (TestCpuFlag(kCpuHasAVX2)) { - J400ToARGBRow = J400ToARGBRow_Any_AVX2; + P210ToAR30Row = P210ToAR30Row_Any_AVX2; if (IS_ALIGNED(width, 16)) { - J400ToARGBRow = J400ToARGBRow_AVX2; + P210ToAR30Row = P210ToAR30Row_AVX2; } } #endif -#if defined(HAS_J400TOARGBROW_NEON) - if (TestCpuFlag(kCpuHasNEON)) { - J400ToARGBRow = J400ToARGBRow_Any_NEON; - if (IS_ALIGNED(width, 8)) { - J400ToARGBRow = J400ToARGBRow_NEON; + for (y = 0; y < height; ++y) { + P210ToAR30Row(src_y, src_uv, dst_ar30, yuvconstants, width); + dst_ar30 += dst_stride_ar30; + src_y += src_stride_y; + if (y & 1) { + src_uv += src_stride_uv; } } -#endif -#if defined(HAS_J400TOARGBROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - J400ToARGBRow = J400ToARGBRow_Any_MMI; - if (IS_ALIGNED(width, 4)) { - J400ToARGBRow = J400ToARGBRow_MMI; + return 0; +} + +LIBYUV_API +int P210ToAR30Matrix(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_uv, + int src_stride_uv, + uint8_t* dst_ar30, + int dst_stride_ar30, + const struct YuvConstants* yuvconstants, + int width, + int height) { + int y; + void (*P210ToAR30Row)( + const uint16_t* y_buf, const uint16_t* uv_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = P210ToAR30Row_C; + assert(yuvconstants); + if (!src_y || !src_uv || !dst_ar30 || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_ar30 = dst_ar30 + (height - 1) * dst_stride_ar30; + dst_stride_ar30 = -dst_stride_ar30; + } +#if defined(HAS_P210TOAR30ROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + P210ToAR30Row = P210ToAR30Row_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + P210ToAR30Row = P210ToAR30Row_SSSE3; } } #endif -#if defined(HAS_J400TOARGBROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - J400ToARGBRow = J400ToARGBRow_Any_MSA; +#if defined(HAS_P210TOAR30ROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + P210ToAR30Row = P210ToAR30Row_Any_AVX2; if (IS_ALIGNED(width, 16)) { - J400ToARGBRow = J400ToARGBRow_MSA; + P210ToAR30Row = P210ToAR30Row_AVX2; } } #endif for (y = 0; y < height; ++y) { - J400ToARGBRow(src_y, dst_argb, width); + P210ToAR30Row(src_y, src_uv, dst_ar30, yuvconstants, width); + dst_ar30 += dst_stride_ar30; src_y += src_stride_y; - dst_argb += dst_stride_argb; + src_uv += src_stride_uv; } return 0; } -// Shuffle table for converting BGRA to ARGB. -static const uvec8 kShuffleMaskBGRAToARGB = { - 3u, 2u, 1u, 0u, 7u, 6u, 5u, 4u, 11u, 10u, 9u, 8u, 15u, 14u, 13u, 12u}; - -// Shuffle table for converting ABGR to ARGB. -static const uvec8 kShuffleMaskABGRToARGB = { - 2u, 1u, 0u, 3u, 6u, 5u, 4u, 7u, 10u, 9u, 8u, 11u, 14u, 13u, 12u, 15u}; - -// Shuffle table for converting RGBA to ARGB. -static const uvec8 kShuffleMaskRGBAToARGB = { - 1u, 2u, 3u, 0u, 5u, 6u, 7u, 4u, 9u, 10u, 11u, 8u, 13u, 14u, 15u, 12u}; - -// Convert BGRA to ARGB. -LIBYUV_API -int BGRAToARGB(const uint8_t* src_bgra, - int src_stride_bgra, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height) { - return ARGBShuffle(src_bgra, src_stride_bgra, dst_argb, dst_stride_argb, - (const uint8_t*)(&kShuffleMaskBGRAToARGB), width, height); -} - -// Convert ARGB to BGRA (same as BGRAToARGB). -LIBYUV_API -int ARGBToBGRA(const uint8_t* src_bgra, - int src_stride_bgra, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height) { - return ARGBShuffle(src_bgra, src_stride_bgra, dst_argb, dst_stride_argb, - (const uint8_t*)(&kShuffleMaskBGRAToARGB), width, height); -} - -// Convert ABGR to ARGB. -LIBYUV_API -int ABGRToARGB(const uint8_t* src_abgr, - int src_stride_abgr, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height) { - return ARGBShuffle(src_abgr, src_stride_abgr, dst_argb, dst_stride_argb, - (const uint8_t*)(&kShuffleMaskABGRToARGB), width, height); -} - -// Convert ARGB to ABGR to (same as ABGRToARGB). -LIBYUV_API -int ARGBToABGR(const uint8_t* src_abgr, - int src_stride_abgr, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height) { - return ARGBShuffle(src_abgr, src_stride_abgr, dst_argb, dst_stride_argb, - (const uint8_t*)(&kShuffleMaskABGRToARGB), width, height); -} - -// Convert RGBA to ARGB. -LIBYUV_API -int RGBAToARGB(const uint8_t* src_rgba, - int src_stride_rgba, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height) { - return ARGBShuffle(src_rgba, src_stride_rgba, dst_argb, dst_stride_argb, - (const uint8_t*)(&kShuffleMaskRGBAToARGB), width, height); -} - -// Convert RGB24 to ARGB. +// Convert I420 with Alpha to preattenuated ARGB with matrix. LIBYUV_API -int RGB24ToARGB(const uint8_t* src_rgb24, - int src_stride_rgb24, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height) { +int I420AlphaToARGBMatrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + const uint8_t* src_a, + int src_stride_a, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height, + int attenuate) { int y; - void (*RGB24ToARGBRow)(const uint8_t* src_rgb, uint8_t* dst_argb, int width) = - RGB24ToARGBRow_C; - if (!src_rgb24 || !dst_argb || width <= 0 || height == 0) { + void (*I422AlphaToARGBRow)(const uint8_t* y_buf, const uint8_t* u_buf, + const uint8_t* v_buf, const uint8_t* a_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) = I422AlphaToARGBRow_C; + void (*ARGBAttenuateRow)(const uint8_t* src_argb, uint8_t* dst_argb, + int width) = ARGBAttenuateRow_C; + assert(yuvconstants); + if (!src_y || !src_u || !src_v || !src_a || !dst_argb || width <= 0 || + height == 0) { return -1; } // Negative height means invert the image. if (height < 0) { height = -height; - src_rgb24 = src_rgb24 + (height - 1) * src_stride_rgb24; - src_stride_rgb24 = -src_stride_rgb24; - } - // Coalesce rows. - if (src_stride_rgb24 == width * 3 && dst_stride_argb == width * 4) { - width *= height; - height = 1; - src_stride_rgb24 = dst_stride_argb = 0; + dst_argb = dst_argb + (height - 1) * dst_stride_argb; + dst_stride_argb = -dst_stride_argb; } -#if defined(HAS_RGB24TOARGBROW_SSSE3) +#if defined(HAS_I422ALPHATOARGBROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { - RGB24ToARGBRow = RGB24ToARGBRow_Any_SSSE3; + I422AlphaToARGBRow = I422AlphaToARGBRow_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + I422AlphaToARGBRow = I422AlphaToARGBRow_SSSE3; + } + } +#endif +#if defined(HAS_I422ALPHATOARGBROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + I422AlphaToARGBRow = I422AlphaToARGBRow_Any_AVX2; if (IS_ALIGNED(width, 16)) { - RGB24ToARGBRow = RGB24ToARGBRow_SSSE3; + I422AlphaToARGBRow = I422AlphaToARGBRow_AVX2; } } #endif -#if defined(HAS_RGB24TOARGBROW_NEON) +#if defined(HAS_I422ALPHATOARGBROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { - RGB24ToARGBRow = RGB24ToARGBRow_Any_NEON; + I422AlphaToARGBRow = I422AlphaToARGBRow_Any_NEON; if (IS_ALIGNED(width, 8)) { - RGB24ToARGBRow = RGB24ToARGBRow_NEON; + I422AlphaToARGBRow = I422AlphaToARGBRow_NEON; } } #endif -#if defined(HAS_RGB24TOARGBROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - RGB24ToARGBRow = RGB24ToARGBRow_Any_MMI; - if (IS_ALIGNED(width, 4)) { - RGB24ToARGBRow = RGB24ToARGBRow_MMI; +#if defined(HAS_I422ALPHATOARGBROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + I422AlphaToARGBRow = I422AlphaToARGBRow_Any_MSA; + if (IS_ALIGNED(width, 8)) { + I422AlphaToARGBRow = I422AlphaToARGBRow_MSA; } } #endif -#if defined(HAS_RGB24TOARGBROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - RGB24ToARGBRow = RGB24ToARGBRow_Any_MSA; +#if defined(HAS_I422ALPHATOARGBROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + I422AlphaToARGBRow = I422AlphaToARGBRow_Any_LASX; if (IS_ALIGNED(width, 16)) { - RGB24ToARGBRow = RGB24ToARGBRow_MSA; + I422AlphaToARGBRow = I422AlphaToARGBRow_LASX; } } #endif - - for (y = 0; y < height; ++y) { - RGB24ToARGBRow(src_rgb24, dst_argb, width); - src_rgb24 += src_stride_rgb24; - dst_argb += dst_stride_argb; - } - return 0; -} - -// Convert RAW to ARGB. -LIBYUV_API -int RAWToARGB(const uint8_t* src_raw, - int src_stride_raw, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height) { - int y; - void (*RAWToARGBRow)(const uint8_t* src_rgb, uint8_t* dst_argb, int width) = - RAWToARGBRow_C; - if (!src_raw || !dst_argb || width <= 0 || height == 0) { - return -1; - } - // Negative height means invert the image. - if (height < 0) { - height = -height; - src_raw = src_raw + (height - 1) * src_stride_raw; - src_stride_raw = -src_stride_raw; - } - // Coalesce rows. - if (src_stride_raw == width * 3 && dst_stride_argb == width * 4) { - width *= height; - height = 1; - src_stride_raw = dst_stride_argb = 0; - } -#if defined(HAS_RAWTOARGBROW_SSSE3) +#if defined(HAS_ARGBATTENUATEROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { - RAWToARGBRow = RAWToARGBRow_Any_SSSE3; - if (IS_ALIGNED(width, 16)) { - RAWToARGBRow = RAWToARGBRow_SSSE3; + ARGBAttenuateRow = ARGBAttenuateRow_Any_SSSE3; + if (IS_ALIGNED(width, 4)) { + ARGBAttenuateRow = ARGBAttenuateRow_SSSE3; } } #endif -#if defined(HAS_RAWTOARGBROW_NEON) - if (TestCpuFlag(kCpuHasNEON)) { - RAWToARGBRow = RAWToARGBRow_Any_NEON; +#if defined(HAS_ARGBATTENUATEROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ARGBAttenuateRow = ARGBAttenuateRow_Any_AVX2; if (IS_ALIGNED(width, 8)) { - RAWToARGBRow = RAWToARGBRow_NEON; + ARGBAttenuateRow = ARGBAttenuateRow_AVX2; } } #endif -#if defined(HAS_RAWTOARGBROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - RAWToARGBRow = RAWToARGBRow_Any_MMI; - if (IS_ALIGNED(width, 4)) { - RAWToARGBRow = RAWToARGBRow_MMI; +#if defined(HAS_ARGBATTENUATEROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + ARGBAttenuateRow = ARGBAttenuateRow_Any_NEON; + if (IS_ALIGNED(width, 8)) { + ARGBAttenuateRow = ARGBAttenuateRow_NEON; } } #endif -#if defined(HAS_RAWTOARGBROW_MSA) +#if defined(HAS_ARGBATTENUATEROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { - RAWToARGBRow = RAWToARGBRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - RAWToARGBRow = RAWToARGBRow_MSA; + ARGBAttenuateRow = ARGBAttenuateRow_Any_MSA; + if (IS_ALIGNED(width, 8)) { + ARGBAttenuateRow = ARGBAttenuateRow_MSA; } } #endif for (y = 0; y < height; ++y) { - RAWToARGBRow(src_raw, dst_argb, width); - src_raw += src_stride_raw; + I422AlphaToARGBRow(src_y, src_u, src_v, src_a, dst_argb, yuvconstants, + width); + if (attenuate) { + ARGBAttenuateRow(dst_argb, dst_argb, width); + } dst_argb += dst_stride_argb; + src_a += src_stride_a; + src_y += src_stride_y; + if (y & 1) { + src_u += src_stride_u; + src_v += src_stride_v; + } } return 0; } -// Convert RAW to RGBA. +// Convert I422 with Alpha to preattenuated ARGB with matrix. LIBYUV_API -int RAWToRGBA(const uint8_t* src_raw, - int src_stride_raw, - uint8_t* dst_rgba, - int dst_stride_rgba, - int width, - int height) { +int I422AlphaToARGBMatrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + const uint8_t* src_a, + int src_stride_a, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height, + int attenuate) { int y; - void (*RAWToRGBARow)(const uint8_t* src_rgb, uint8_t* dst_rgba, int width) = - RAWToRGBARow_C; - if (!src_raw || !dst_rgba || width <= 0 || height == 0) { + void (*I422AlphaToARGBRow)(const uint8_t* y_buf, const uint8_t* u_buf, + const uint8_t* v_buf, const uint8_t* a_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) = I422AlphaToARGBRow_C; + void (*ARGBAttenuateRow)(const uint8_t* src_argb, uint8_t* dst_argb, + int width) = ARGBAttenuateRow_C; + assert(yuvconstants); + if (!src_y || !src_u || !src_v || !src_a || !dst_argb || width <= 0 || + height == 0) { return -1; } // Negative height means invert the image. if (height < 0) { height = -height; - src_raw = src_raw + (height - 1) * src_stride_raw; - src_stride_raw = -src_stride_raw; - } - // Coalesce rows. - if (src_stride_raw == width * 3 && dst_stride_rgba == width * 4) { - width *= height; - height = 1; - src_stride_raw = dst_stride_rgba = 0; + dst_argb = dst_argb + (height - 1) * dst_stride_argb; + dst_stride_argb = -dst_stride_argb; } -#if defined(HAS_RAWTORGBAROW_SSSE3) +#if defined(HAS_I422ALPHATOARGBROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { - RAWToRGBARow = RAWToRGBARow_Any_SSSE3; + I422AlphaToARGBRow = I422AlphaToARGBRow_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + I422AlphaToARGBRow = I422AlphaToARGBRow_SSSE3; + } + } +#endif +#if defined(HAS_I422ALPHATOARGBROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + I422AlphaToARGBRow = I422AlphaToARGBRow_Any_AVX2; if (IS_ALIGNED(width, 16)) { - RAWToRGBARow = RAWToRGBARow_SSSE3; + I422AlphaToARGBRow = I422AlphaToARGBRow_AVX2; } } #endif -#if defined(HAS_RAWTORGBAROW_NEON) +#if defined(HAS_I422ALPHATOARGBROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { - RAWToRGBARow = RAWToRGBARow_Any_NEON; + I422AlphaToARGBRow = I422AlphaToARGBRow_Any_NEON; if (IS_ALIGNED(width, 8)) { - RAWToRGBARow = RAWToRGBARow_NEON; + I422AlphaToARGBRow = I422AlphaToARGBRow_NEON; } } #endif - - for (y = 0; y < height; ++y) { - RAWToRGBARow(src_raw, dst_rgba, width); - src_raw += src_stride_raw; - dst_rgba += dst_stride_rgba; - } - return 0; -} - -// Convert RGB565 to ARGB. -LIBYUV_API -int RGB565ToARGB(const uint8_t* src_rgb565, - int src_stride_rgb565, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height) { - int y; - void (*RGB565ToARGBRow)(const uint8_t* src_rgb565, uint8_t* dst_argb, - int width) = RGB565ToARGBRow_C; - if (!src_rgb565 || !dst_argb || width <= 0 || height == 0) { - return -1; - } - // Negative height means invert the image. - if (height < 0) { - height = -height; - src_rgb565 = src_rgb565 + (height - 1) * src_stride_rgb565; - src_stride_rgb565 = -src_stride_rgb565; - } - // Coalesce rows. - if (src_stride_rgb565 == width * 2 && dst_stride_argb == width * 4) { - width *= height; - height = 1; - src_stride_rgb565 = dst_stride_argb = 0; - } -#if defined(HAS_RGB565TOARGBROW_SSE2) - if (TestCpuFlag(kCpuHasSSE2)) { - RGB565ToARGBRow = RGB565ToARGBRow_Any_SSE2; +#if defined(HAS_I422ALPHATOARGBROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + I422AlphaToARGBRow = I422AlphaToARGBRow_Any_MSA; if (IS_ALIGNED(width, 8)) { - RGB565ToARGBRow = RGB565ToARGBRow_SSE2; + I422AlphaToARGBRow = I422AlphaToARGBRow_MSA; } } #endif -#if defined(HAS_RGB565TOARGBROW_AVX2) - if (TestCpuFlag(kCpuHasAVX2)) { - RGB565ToARGBRow = RGB565ToARGBRow_Any_AVX2; +#if defined(HAS_I422ALPHATOARGBROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + I422AlphaToARGBRow = I422AlphaToARGBRow_Any_LASX; if (IS_ALIGNED(width, 16)) { - RGB565ToARGBRow = RGB565ToARGBRow_AVX2; + I422AlphaToARGBRow = I422AlphaToARGBRow_LASX; } } #endif -#if defined(HAS_RGB565TOARGBROW_NEON) - if (TestCpuFlag(kCpuHasNEON)) { - RGB565ToARGBRow = RGB565ToARGBRow_Any_NEON; +#if defined(HAS_ARGBATTENUATEROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ARGBAttenuateRow = ARGBAttenuateRow_Any_SSSE3; + if (IS_ALIGNED(width, 4)) { + ARGBAttenuateRow = ARGBAttenuateRow_SSSE3; + } + } +#endif +#if defined(HAS_ARGBATTENUATEROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ARGBAttenuateRow = ARGBAttenuateRow_Any_AVX2; if (IS_ALIGNED(width, 8)) { - RGB565ToARGBRow = RGB565ToARGBRow_NEON; + ARGBAttenuateRow = ARGBAttenuateRow_AVX2; } } #endif -#if defined(HAS_RGB565TOARGBROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - RGB565ToARGBRow = RGB565ToARGBRow_Any_MMI; - if (IS_ALIGNED(width, 4)) { - RGB565ToARGBRow = RGB565ToARGBRow_MMI; +#if defined(HAS_ARGBATTENUATEROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + ARGBAttenuateRow = ARGBAttenuateRow_Any_NEON; + if (IS_ALIGNED(width, 8)) { + ARGBAttenuateRow = ARGBAttenuateRow_NEON; } } #endif -#if defined(HAS_RGB565TOARGBROW_MSA) +#if defined(HAS_ARGBATTENUATEROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { - RGB565ToARGBRow = RGB565ToARGBRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - RGB565ToARGBRow = RGB565ToARGBRow_MSA; + ARGBAttenuateRow = ARGBAttenuateRow_Any_MSA; + if (IS_ALIGNED(width, 8)) { + ARGBAttenuateRow = ARGBAttenuateRow_MSA; } } #endif for (y = 0; y < height; ++y) { - RGB565ToARGBRow(src_rgb565, dst_argb, width); - src_rgb565 += src_stride_rgb565; + I422AlphaToARGBRow(src_y, src_u, src_v, src_a, dst_argb, yuvconstants, + width); + if (attenuate) { + ARGBAttenuateRow(dst_argb, dst_argb, width); + } dst_argb += dst_stride_argb; + src_a += src_stride_a; + src_y += src_stride_y; + src_u += src_stride_u; + src_v += src_stride_v; } return 0; } -// Convert ARGB1555 to ARGB. +// Convert I444 with Alpha to preattenuated ARGB with matrix. LIBYUV_API -int ARGB1555ToARGB(const uint8_t* src_argb1555, - int src_stride_argb1555, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height) { +int I444AlphaToARGBMatrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + const uint8_t* src_a, + int src_stride_a, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height, + int attenuate) { int y; - void (*ARGB1555ToARGBRow)(const uint8_t* src_argb1555, uint8_t* dst_argb, - int width) = ARGB1555ToARGBRow_C; - if (!src_argb1555 || !dst_argb || width <= 0 || height == 0) { + void (*I444AlphaToARGBRow)(const uint8_t* y_buf, const uint8_t* u_buf, + const uint8_t* v_buf, const uint8_t* a_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) = I444AlphaToARGBRow_C; + void (*ARGBAttenuateRow)(const uint8_t* src_argb, uint8_t* dst_argb, + int width) = ARGBAttenuateRow_C; + assert(yuvconstants); + if (!src_y || !src_u || !src_v || !src_a || !dst_argb || width <= 0 || + height == 0) { return -1; } // Negative height means invert the image. if (height < 0) { height = -height; - src_argb1555 = src_argb1555 + (height - 1) * src_stride_argb1555; - src_stride_argb1555 = -src_stride_argb1555; + dst_argb = dst_argb + (height - 1) * dst_stride_argb; + dst_stride_argb = -dst_stride_argb; } - // Coalesce rows. - if (src_stride_argb1555 == width * 2 && dst_stride_argb == width * 4) { - width *= height; - height = 1; - src_stride_argb1555 = dst_stride_argb = 0; - } -#if defined(HAS_ARGB1555TOARGBROW_SSE2) - if (TestCpuFlag(kCpuHasSSE2)) { - ARGB1555ToARGBRow = ARGB1555ToARGBRow_Any_SSE2; +#if defined(HAS_I444ALPHATOARGBROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + I444AlphaToARGBRow = I444AlphaToARGBRow_Any_SSSE3; if (IS_ALIGNED(width, 8)) { - ARGB1555ToARGBRow = ARGB1555ToARGBRow_SSE2; + I444AlphaToARGBRow = I444AlphaToARGBRow_SSSE3; } } #endif -#if defined(HAS_ARGB1555TOARGBROW_AVX2) +#if defined(HAS_I444ALPHATOARGBROW_AVX2) if (TestCpuFlag(kCpuHasAVX2)) { - ARGB1555ToARGBRow = ARGB1555ToARGBRow_Any_AVX2; + I444AlphaToARGBRow = I444AlphaToARGBRow_Any_AVX2; if (IS_ALIGNED(width, 16)) { - ARGB1555ToARGBRow = ARGB1555ToARGBRow_AVX2; + I444AlphaToARGBRow = I444AlphaToARGBRow_AVX2; } } #endif -#if defined(HAS_ARGB1555TOARGBROW_NEON) +#if defined(HAS_I444ALPHATOARGBROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { - ARGB1555ToARGBRow = ARGB1555ToARGBRow_Any_NEON; + I444AlphaToARGBRow = I444AlphaToARGBRow_Any_NEON; if (IS_ALIGNED(width, 8)) { - ARGB1555ToARGBRow = ARGB1555ToARGBRow_NEON; + I444AlphaToARGBRow = I444AlphaToARGBRow_NEON; } } #endif -#if defined(HAS_ARGB1555TOARGBROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGB1555ToARGBRow = ARGB1555ToARGBRow_Any_MMI; +#if defined(HAS_I444ALPHATOARGBROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + I444AlphaToARGBRow = I444AlphaToARGBRow_Any_MSA; + if (IS_ALIGNED(width, 8)) { + I444AlphaToARGBRow = I444AlphaToARGBRow_MSA; + } + } +#endif +#if defined(HAS_ARGBATTENUATEROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ARGBAttenuateRow = ARGBAttenuateRow_Any_SSSE3; if (IS_ALIGNED(width, 4)) { - ARGB1555ToARGBRow = ARGB1555ToARGBRow_MMI; + ARGBAttenuateRow = ARGBAttenuateRow_SSSE3; } } #endif -#if defined(HAS_ARGB1555TOARGBROW_MSA) +#if defined(HAS_ARGBATTENUATEROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ARGBAttenuateRow = ARGBAttenuateRow_Any_AVX2; + if (IS_ALIGNED(width, 8)) { + ARGBAttenuateRow = ARGBAttenuateRow_AVX2; + } + } +#endif +#if defined(HAS_ARGBATTENUATEROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + ARGBAttenuateRow = ARGBAttenuateRow_Any_NEON; + if (IS_ALIGNED(width, 8)) { + ARGBAttenuateRow = ARGBAttenuateRow_NEON; + } + } +#endif +#if defined(HAS_ARGBATTENUATEROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { - ARGB1555ToARGBRow = ARGB1555ToARGBRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - ARGB1555ToARGBRow = ARGB1555ToARGBRow_MSA; + ARGBAttenuateRow = ARGBAttenuateRow_Any_MSA; + if (IS_ALIGNED(width, 8)) { + ARGBAttenuateRow = ARGBAttenuateRow_MSA; } } #endif for (y = 0; y < height; ++y) { - ARGB1555ToARGBRow(src_argb1555, dst_argb, width); - src_argb1555 += src_stride_argb1555; + I444AlphaToARGBRow(src_y, src_u, src_v, src_a, dst_argb, yuvconstants, + width); + if (attenuate) { + ARGBAttenuateRow(dst_argb, dst_argb, width); + } dst_argb += dst_stride_argb; + src_a += src_stride_a; + src_y += src_stride_y; + src_u += src_stride_u; + src_v += src_stride_v; } return 0; } -// Convert ARGB4444 to ARGB. +// Convert I420 with Alpha to ARGB. LIBYUV_API -int ARGB4444ToARGB(const uint8_t* src_argb4444, - int src_stride_argb4444, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height) { +int I420AlphaToARGB(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + const uint8_t* src_a, + int src_stride_a, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height, + int attenuate) { + return I420AlphaToARGBMatrix(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, src_a, src_stride_a, dst_argb, + dst_stride_argb, &kYuvI601Constants, width, + height, attenuate); +} + +// Convert I420 with Alpha to ABGR. +LIBYUV_API +int I420AlphaToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + const uint8_t* src_a, + int src_stride_a, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height, + int attenuate) { + return I420AlphaToARGBMatrix( + src_y, src_stride_y, src_v, src_stride_v, // Swap U and V + src_u, src_stride_u, src_a, src_stride_a, dst_abgr, dst_stride_abgr, + &kYvuI601Constants, // Use Yvu matrix + width, height, attenuate); +} + +// Convert I422 with Alpha to ARGB. +LIBYUV_API +int I422AlphaToARGB(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + const uint8_t* src_a, + int src_stride_a, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height, + int attenuate) { + return I422AlphaToARGBMatrix(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, src_a, src_stride_a, dst_argb, + dst_stride_argb, &kYuvI601Constants, width, + height, attenuate); +} + +// Convert I422 with Alpha to ABGR. +LIBYUV_API +int I422AlphaToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + const uint8_t* src_a, + int src_stride_a, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height, + int attenuate) { + return I422AlphaToARGBMatrix( + src_y, src_stride_y, src_v, src_stride_v, // Swap U and V + src_u, src_stride_u, src_a, src_stride_a, dst_abgr, dst_stride_abgr, + &kYvuI601Constants, // Use Yvu matrix + width, height, attenuate); +} + +// Convert I444 with Alpha to ARGB. +LIBYUV_API +int I444AlphaToARGB(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + const uint8_t* src_a, + int src_stride_a, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height, + int attenuate) { + return I444AlphaToARGBMatrix(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, src_a, src_stride_a, dst_argb, + dst_stride_argb, &kYuvI601Constants, width, + height, attenuate); +} + +// Convert I444 with Alpha to ABGR. +LIBYUV_API +int I444AlphaToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + const uint8_t* src_a, + int src_stride_a, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height, + int attenuate) { + return I444AlphaToARGBMatrix( + src_y, src_stride_y, src_v, src_stride_v, // Swap U and V + src_u, src_stride_u, src_a, src_stride_a, dst_abgr, dst_stride_abgr, + &kYvuI601Constants, // Use Yvu matrix + width, height, attenuate); +} + +// Convert I010 with Alpha to preattenuated ARGB with matrix. +LIBYUV_API +int I010AlphaToARGBMatrix(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + const uint16_t* src_a, + int src_stride_a, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height, + int attenuate) { int y; - void (*ARGB4444ToARGBRow)(const uint8_t* src_argb4444, uint8_t* dst_argb, - int width) = ARGB4444ToARGBRow_C; - if (!src_argb4444 || !dst_argb || width <= 0 || height == 0) { + void (*I210AlphaToARGBRow)(const uint16_t* y_buf, const uint16_t* u_buf, + const uint16_t* v_buf, const uint16_t* a_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) = I210AlphaToARGBRow_C; + void (*ARGBAttenuateRow)(const uint8_t* src_argb, uint8_t* dst_argb, + int width) = ARGBAttenuateRow_C; + assert(yuvconstants); + if (!src_y || !src_u || !src_v || !src_a || !dst_argb || width <= 0 || + height == 0) { return -1; } // Negative height means invert the image. if (height < 0) { height = -height; - src_argb4444 = src_argb4444 + (height - 1) * src_stride_argb4444; - src_stride_argb4444 = -src_stride_argb4444; - } - // Coalesce rows. - if (src_stride_argb4444 == width * 2 && dst_stride_argb == width * 4) { - width *= height; - height = 1; - src_stride_argb4444 = dst_stride_argb = 0; + dst_argb = dst_argb + (height - 1) * dst_stride_argb; + dst_stride_argb = -dst_stride_argb; } -#if defined(HAS_ARGB4444TOARGBROW_SSE2) - if (TestCpuFlag(kCpuHasSSE2)) { - ARGB4444ToARGBRow = ARGB4444ToARGBRow_Any_SSE2; +#if defined(HAS_I210ALPHATOARGBROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + I210AlphaToARGBRow = I210AlphaToARGBRow_Any_SSSE3; if (IS_ALIGNED(width, 8)) { - ARGB4444ToARGBRow = ARGB4444ToARGBRow_SSE2; + I210AlphaToARGBRow = I210AlphaToARGBRow_SSSE3; } } #endif -#if defined(HAS_ARGB4444TOARGBROW_AVX2) +#if defined(HAS_I210ALPHATOARGBROW_AVX2) if (TestCpuFlag(kCpuHasAVX2)) { - ARGB4444ToARGBRow = ARGB4444ToARGBRow_Any_AVX2; + I210AlphaToARGBRow = I210AlphaToARGBRow_Any_AVX2; if (IS_ALIGNED(width, 16)) { - ARGB4444ToARGBRow = ARGB4444ToARGBRow_AVX2; + I210AlphaToARGBRow = I210AlphaToARGBRow_AVX2; } } #endif -#if defined(HAS_ARGB4444TOARGBROW_NEON) - if (TestCpuFlag(kCpuHasNEON)) { - ARGB4444ToARGBRow = ARGB4444ToARGBRow_Any_NEON; +#if defined(HAS_ARGBATTENUATEROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ARGBAttenuateRow = ARGBAttenuateRow_Any_SSSE3; + if (IS_ALIGNED(width, 4)) { + ARGBAttenuateRow = ARGBAttenuateRow_SSSE3; + } + } +#endif +#if defined(HAS_ARGBATTENUATEROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ARGBAttenuateRow = ARGBAttenuateRow_Any_AVX2; if (IS_ALIGNED(width, 8)) { - ARGB4444ToARGBRow = ARGB4444ToARGBRow_NEON; + ARGBAttenuateRow = ARGBAttenuateRow_AVX2; } } #endif -#if defined(HAS_ARGB4444TOARGBROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGB4444ToARGBRow = ARGB4444ToARGBRow_Any_MMI; - if (IS_ALIGNED(width, 4)) { - ARGB4444ToARGBRow = ARGB4444ToARGBRow_MMI; +#if defined(HAS_ARGBATTENUATEROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + ARGBAttenuateRow = ARGBAttenuateRow_Any_NEON; + if (IS_ALIGNED(width, 8)) { + ARGBAttenuateRow = ARGBAttenuateRow_NEON; } } #endif -#if defined(HAS_ARGB4444TOARGBROW_MSA) +#if defined(HAS_ARGBATTENUATEROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { - ARGB4444ToARGBRow = ARGB4444ToARGBRow_Any_MSA; - if (IS_ALIGNED(width, 16)) { - ARGB4444ToARGBRow = ARGB4444ToARGBRow_MSA; + ARGBAttenuateRow = ARGBAttenuateRow_Any_MSA; + if (IS_ALIGNED(width, 8)) { + ARGBAttenuateRow = ARGBAttenuateRow_MSA; } } #endif for (y = 0; y < height; ++y) { - ARGB4444ToARGBRow(src_argb4444, dst_argb, width); - src_argb4444 += src_stride_argb4444; + I210AlphaToARGBRow(src_y, src_u, src_v, src_a, dst_argb, yuvconstants, + width); + if (attenuate) { + ARGBAttenuateRow(dst_argb, dst_argb, width); + } dst_argb += dst_stride_argb; - } + src_a += src_stride_a; + src_y += src_stride_y; + if (y & 1) { + src_u += src_stride_u; + src_v += src_stride_v; + } + } return 0; } -// Convert AR30 to ARGB. +// Convert I210 with Alpha to preattenuated ARGB with matrix. LIBYUV_API -int AR30ToARGB(const uint8_t* src_ar30, - int src_stride_ar30, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height) { +int I210AlphaToARGBMatrix(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + const uint16_t* src_a, + int src_stride_a, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height, + int attenuate) { int y; - if (!src_ar30 || !dst_argb || width <= 0 || height == 0) { + void (*I210AlphaToARGBRow)(const uint16_t* y_buf, const uint16_t* u_buf, + const uint16_t* v_buf, const uint16_t* a_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) = I210AlphaToARGBRow_C; + void (*ARGBAttenuateRow)(const uint8_t* src_argb, uint8_t* dst_argb, + int width) = ARGBAttenuateRow_C; + assert(yuvconstants); + if (!src_y || !src_u || !src_v || !src_a || !dst_argb || width <= 0 || + height == 0) { return -1; } // Negative height means invert the image. if (height < 0) { height = -height; - src_ar30 = src_ar30 + (height - 1) * src_stride_ar30; - src_stride_ar30 = -src_stride_ar30; + dst_argb = dst_argb + (height - 1) * dst_stride_argb; + dst_stride_argb = -dst_stride_argb; } - // Coalesce rows. - if (src_stride_ar30 == width * 4 && dst_stride_argb == width * 4) { - width *= height; - height = 1; - src_stride_ar30 = dst_stride_argb = 0; +#if defined(HAS_I210ALPHATOARGBROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + I210AlphaToARGBRow = I210AlphaToARGBRow_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + I210AlphaToARGBRow = I210AlphaToARGBRow_SSSE3; + } } - for (y = 0; y < height; ++y) { - AR30ToARGBRow_C(src_ar30, dst_argb, width); - src_ar30 += src_stride_ar30; - dst_argb += dst_stride_argb; +#endif +#if defined(HAS_I210ALPHATOARGBROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + I210AlphaToARGBRow = I210AlphaToARGBRow_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + I210AlphaToARGBRow = I210AlphaToARGBRow_AVX2; + } } - return 0; -} - -// Convert AR30 to ABGR. -LIBYUV_API -int AR30ToABGR(const uint8_t* src_ar30, - int src_stride_ar30, - uint8_t* dst_abgr, - int dst_stride_abgr, - int width, - int height) { - int y; - if (!src_ar30 || !dst_abgr || width <= 0 || height == 0) { - return -1; +#endif +#if defined(HAS_ARGBATTENUATEROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ARGBAttenuateRow = ARGBAttenuateRow_Any_SSSE3; + if (IS_ALIGNED(width, 4)) { + ARGBAttenuateRow = ARGBAttenuateRow_SSSE3; + } } - // Negative height means invert the image. - if (height < 0) { - height = -height; - src_ar30 = src_ar30 + (height - 1) * src_stride_ar30; - src_stride_ar30 = -src_stride_ar30; +#endif +#if defined(HAS_ARGBATTENUATEROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ARGBAttenuateRow = ARGBAttenuateRow_Any_AVX2; + if (IS_ALIGNED(width, 8)) { + ARGBAttenuateRow = ARGBAttenuateRow_AVX2; + } } - // Coalesce rows. - if (src_stride_ar30 == width * 4 && dst_stride_abgr == width * 4) { - width *= height; - height = 1; - src_stride_ar30 = dst_stride_abgr = 0; +#endif +#if defined(HAS_ARGBATTENUATEROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + ARGBAttenuateRow = ARGBAttenuateRow_Any_NEON; + if (IS_ALIGNED(width, 8)) { + ARGBAttenuateRow = ARGBAttenuateRow_NEON; + } } - for (y = 0; y < height; ++y) { - AR30ToABGRRow_C(src_ar30, dst_abgr, width); - src_ar30 += src_stride_ar30; - dst_abgr += dst_stride_abgr; +#endif +#if defined(HAS_ARGBATTENUATEROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGBAttenuateRow = ARGBAttenuateRow_Any_MSA; + if (IS_ALIGNED(width, 8)) { + ARGBAttenuateRow = ARGBAttenuateRow_MSA; + } } - return 0; -} +#endif -// Convert AR30 to AB30. -LIBYUV_API -int AR30ToAB30(const uint8_t* src_ar30, - int src_stride_ar30, - uint8_t* dst_ab30, - int dst_stride_ab30, - int width, - int height) { - int y; - if (!src_ar30 || !dst_ab30 || width <= 0 || height == 0) { - return -1; - } - // Negative height means invert the image. - if (height < 0) { - height = -height; - src_ar30 = src_ar30 + (height - 1) * src_stride_ar30; - src_stride_ar30 = -src_stride_ar30; - } - // Coalesce rows. - if (src_stride_ar30 == width * 4 && dst_stride_ab30 == width * 4) { - width *= height; - height = 1; - src_stride_ar30 = dst_stride_ab30 = 0; - } for (y = 0; y < height; ++y) { - AR30ToAB30Row_C(src_ar30, dst_ab30, width); - src_ar30 += src_stride_ar30; - dst_ab30 += dst_stride_ab30; + I210AlphaToARGBRow(src_y, src_u, src_v, src_a, dst_argb, yuvconstants, + width); + if (attenuate) { + ARGBAttenuateRow(dst_argb, dst_argb, width); + } + dst_argb += dst_stride_argb; + src_a += src_stride_a; + src_y += src_stride_y; + src_u += src_stride_u; + src_v += src_stride_v; } return 0; } -// Convert NV12 to ARGB with matrix. +// Convert I410 with Alpha to preattenuated ARGB with matrix. LIBYUV_API -int NV12ToARGBMatrix(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_uv, - int src_stride_uv, - uint8_t* dst_argb, - int dst_stride_argb, - const struct YuvConstants* yuvconstants, - int width, - int height) { +int I410AlphaToARGBMatrix(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + const uint16_t* src_a, + int src_stride_a, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height, + int attenuate) { int y; - void (*NV12ToARGBRow)( - const uint8_t* y_buf, const uint8_t* uv_buf, uint8_t* rgb_buf, - const struct YuvConstants* yuvconstants, int width) = NV12ToARGBRow_C; - if (!src_y || !src_uv || !dst_argb || width <= 0 || height == 0) { + void (*I410AlphaToARGBRow)(const uint16_t* y_buf, const uint16_t* u_buf, + const uint16_t* v_buf, const uint16_t* a_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) = I410AlphaToARGBRow_C; + void (*ARGBAttenuateRow)(const uint8_t* src_argb, uint8_t* dst_argb, + int width) = ARGBAttenuateRow_C; + assert(yuvconstants); + if (!src_y || !src_u || !src_v || !src_a || !dst_argb || width <= 0 || + height == 0) { return -1; } // Negative height means invert the image. @@ -2324,74 +2667,85 @@ int NV12ToARGBMatrix(const uint8_t* src_y, dst_argb = dst_argb + (height - 1) * dst_stride_argb; dst_stride_argb = -dst_stride_argb; } -#if defined(HAS_NV12TOARGBROW_SSSE3) +#if defined(HAS_I410ALPHATOARGBROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { - NV12ToARGBRow = NV12ToARGBRow_Any_SSSE3; + I410AlphaToARGBRow = I410AlphaToARGBRow_Any_SSSE3; if (IS_ALIGNED(width, 8)) { - NV12ToARGBRow = NV12ToARGBRow_SSSE3; + I410AlphaToARGBRow = I410AlphaToARGBRow_SSSE3; } } #endif -#if defined(HAS_NV12TOARGBROW_AVX2) +#if defined(HAS_I410ALPHATOARGBROW_AVX2) if (TestCpuFlag(kCpuHasAVX2)) { - NV12ToARGBRow = NV12ToARGBRow_Any_AVX2; + I410AlphaToARGBRow = I410AlphaToARGBRow_Any_AVX2; if (IS_ALIGNED(width, 16)) { - NV12ToARGBRow = NV12ToARGBRow_AVX2; + I410AlphaToARGBRow = I410AlphaToARGBRow_AVX2; } } #endif -#if defined(HAS_NV12TOARGBROW_NEON) - if (TestCpuFlag(kCpuHasNEON)) { - NV12ToARGBRow = NV12ToARGBRow_Any_NEON; +#if defined(HAS_ARGBATTENUATEROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ARGBAttenuateRow = ARGBAttenuateRow_Any_SSSE3; + if (IS_ALIGNED(width, 4)) { + ARGBAttenuateRow = ARGBAttenuateRow_SSSE3; + } + } +#endif +#if defined(HAS_ARGBATTENUATEROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ARGBAttenuateRow = ARGBAttenuateRow_Any_AVX2; if (IS_ALIGNED(width, 8)) { - NV12ToARGBRow = NV12ToARGBRow_NEON; + ARGBAttenuateRow = ARGBAttenuateRow_AVX2; } } #endif -#if defined(HAS_NV12TOARGBROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - NV12ToARGBRow = NV12ToARGBRow_Any_MMI; - if (IS_ALIGNED(width, 4)) { - NV12ToARGBRow = NV12ToARGBRow_MMI; +#if defined(HAS_ARGBATTENUATEROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + ARGBAttenuateRow = ARGBAttenuateRow_Any_NEON; + if (IS_ALIGNED(width, 8)) { + ARGBAttenuateRow = ARGBAttenuateRow_NEON; } } #endif -#if defined(HAS_NV12TOARGBROW_MSA) +#if defined(HAS_ARGBATTENUATEROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { - NV12ToARGBRow = NV12ToARGBRow_Any_MSA; + ARGBAttenuateRow = ARGBAttenuateRow_Any_MSA; if (IS_ALIGNED(width, 8)) { - NV12ToARGBRow = NV12ToARGBRow_MSA; + ARGBAttenuateRow = ARGBAttenuateRow_MSA; } } #endif for (y = 0; y < height; ++y) { - NV12ToARGBRow(src_y, src_uv, dst_argb, yuvconstants, width); + I410AlphaToARGBRow(src_y, src_u, src_v, src_a, dst_argb, yuvconstants, + width); + if (attenuate) { + ARGBAttenuateRow(dst_argb, dst_argb, width); + } dst_argb += dst_stride_argb; + src_a += src_stride_a; src_y += src_stride_y; - if (y & 1) { - src_uv += src_stride_uv; - } + src_u += src_stride_u; + src_v += src_stride_v; } return 0; } -// Convert NV21 to ARGB with matrix. +// Convert I400 to ARGB with matrix. LIBYUV_API -int NV21ToARGBMatrix(const uint8_t* src_y, +int I400ToARGBMatrix(const uint8_t* src_y, int src_stride_y, - const uint8_t* src_vu, - int src_stride_vu, uint8_t* dst_argb, int dst_stride_argb, const struct YuvConstants* yuvconstants, int width, int height) { int y; - void (*NV21ToARGBRow)( - const uint8_t* y_buf, const uint8_t* uv_buf, uint8_t* rgb_buf, - const struct YuvConstants* yuvconstants, int width) = NV21ToARGBRow_C; - if (!src_y || !src_vu || !dst_argb || width <= 0 || height == 0) { + void (*I400ToARGBRow)(const uint8_t* y_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = + I400ToARGBRow_C; + assert(yuvconstants); + if (!src_y || !dst_argb || width <= 0 || height == 0) { return -1; } // Negative height means invert the image. @@ -2400,1682 +2754,1917 @@ int NV21ToARGBMatrix(const uint8_t* src_y, dst_argb = dst_argb + (height - 1) * dst_stride_argb; dst_stride_argb = -dst_stride_argb; } -#if defined(HAS_NV21TOARGBROW_SSSE3) - if (TestCpuFlag(kCpuHasSSSE3)) { - NV21ToARGBRow = NV21ToARGBRow_Any_SSSE3; + // Coalesce rows. + if (src_stride_y == width && dst_stride_argb == width * 4) { + width *= height; + height = 1; + src_stride_y = dst_stride_argb = 0; + } +#if defined(HAS_I400TOARGBROW_SSE2) + if (TestCpuFlag(kCpuHasSSE2)) { + I400ToARGBRow = I400ToARGBRow_Any_SSE2; if (IS_ALIGNED(width, 8)) { - NV21ToARGBRow = NV21ToARGBRow_SSSE3; + I400ToARGBRow = I400ToARGBRow_SSE2; } } #endif -#if defined(HAS_NV21TOARGBROW_AVX2) +#if defined(HAS_I400TOARGBROW_AVX2) if (TestCpuFlag(kCpuHasAVX2)) { - NV21ToARGBRow = NV21ToARGBRow_Any_AVX2; + I400ToARGBRow = I400ToARGBRow_Any_AVX2; if (IS_ALIGNED(width, 16)) { - NV21ToARGBRow = NV21ToARGBRow_AVX2; + I400ToARGBRow = I400ToARGBRow_AVX2; } } #endif -#if defined(HAS_NV21TOARGBROW_NEON) +#if defined(HAS_I400TOARGBROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { - NV21ToARGBRow = NV21ToARGBRow_Any_NEON; + I400ToARGBRow = I400ToARGBRow_Any_NEON; if (IS_ALIGNED(width, 8)) { - NV21ToARGBRow = NV21ToARGBRow_NEON; + I400ToARGBRow = I400ToARGBRow_NEON; } } #endif -#if defined(HAS_NV21TOARGBROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - NV21ToARGBRow = NV21ToARGBRow_Any_MMI; - if (IS_ALIGNED(width, 4)) { - NV21ToARGBRow = NV21ToARGBRow_MMI; +#if defined(HAS_I400TOARGBROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + I400ToARGBRow = I400ToARGBRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + I400ToARGBRow = I400ToARGBRow_MSA; } } #endif -#if defined(HAS_NV21TOARGBROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - NV21ToARGBRow = NV21ToARGBRow_Any_MSA; - if (IS_ALIGNED(width, 8)) { - NV21ToARGBRow = NV21ToARGBRow_MSA; +#if defined(HAS_I400TOARGBROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + I400ToARGBRow = I400ToARGBRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + I400ToARGBRow = I400ToARGBRow_LSX; } } #endif for (y = 0; y < height; ++y) { - NV21ToARGBRow(src_y, src_vu, dst_argb, yuvconstants, width); + I400ToARGBRow(src_y, dst_argb, yuvconstants, width); dst_argb += dst_stride_argb; src_y += src_stride_y; - if (y & 1) { - src_vu += src_stride_vu; - } } return 0; } -// Convert NV12 to ARGB. +// Convert I400 to ARGB. LIBYUV_API -int NV12ToARGB(const uint8_t* src_y, +int I400ToARGB(const uint8_t* src_y, int src_stride_y, - const uint8_t* src_uv, - int src_stride_uv, uint8_t* dst_argb, int dst_stride_argb, int width, int height) { - return NV12ToARGBMatrix(src_y, src_stride_y, src_uv, src_stride_uv, dst_argb, - dst_stride_argb, &kYuvI601Constants, width, height); + return I400ToARGBMatrix(src_y, src_stride_y, dst_argb, dst_stride_argb, + &kYuvI601Constants, width, height); } -// Convert NV21 to ARGB. +// Convert J400 to ARGB. LIBYUV_API -int NV21ToARGB(const uint8_t* src_y, +int J400ToARGB(const uint8_t* src_y, int src_stride_y, - const uint8_t* src_vu, - int src_stride_vu, uint8_t* dst_argb, int dst_stride_argb, int width, int height) { - return NV21ToARGBMatrix(src_y, src_stride_y, src_vu, src_stride_vu, dst_argb, - dst_stride_argb, &kYuvI601Constants, width, height); + int y; + void (*J400ToARGBRow)(const uint8_t* src_y, uint8_t* dst_argb, int width) = + J400ToARGBRow_C; + if (!src_y || !dst_argb || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + src_y = src_y + (height - 1) * src_stride_y; + src_stride_y = -src_stride_y; + } + // Coalesce rows. + if (src_stride_y == width && dst_stride_argb == width * 4) { + width *= height; + height = 1; + src_stride_y = dst_stride_argb = 0; + } +#if defined(HAS_J400TOARGBROW_SSE2) + if (TestCpuFlag(kCpuHasSSE2)) { + J400ToARGBRow = J400ToARGBRow_Any_SSE2; + if (IS_ALIGNED(width, 8)) { + J400ToARGBRow = J400ToARGBRow_SSE2; + } + } +#endif +#if defined(HAS_J400TOARGBROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + J400ToARGBRow = J400ToARGBRow_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + J400ToARGBRow = J400ToARGBRow_AVX2; + } + } +#endif +#if defined(HAS_J400TOARGBROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + J400ToARGBRow = J400ToARGBRow_Any_NEON; + if (IS_ALIGNED(width, 8)) { + J400ToARGBRow = J400ToARGBRow_NEON; + } + } +#endif +#if defined(HAS_J400TOARGBROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + J400ToARGBRow = J400ToARGBRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + J400ToARGBRow = J400ToARGBRow_MSA; + } + } +#endif +#if defined(HAS_J400TOARGBROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + J400ToARGBRow = J400ToARGBRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + J400ToARGBRow = J400ToARGBRow_LSX; + } + } +#endif + for (y = 0; y < height; ++y) { + J400ToARGBRow(src_y, dst_argb, width); + src_y += src_stride_y; + dst_argb += dst_stride_argb; + } + return 0; } -// Convert NV12 to ABGR. -// To output ABGR instead of ARGB swap the UV and use a mirrored yuv matrix. -// To swap the UV use NV12 instead of NV21.LIBYUV_API +// Shuffle table for converting BGRA to ARGB. +static const uvec8 kShuffleMaskBGRAToARGB = { + 3u, 2u, 1u, 0u, 7u, 6u, 5u, 4u, 11u, 10u, 9u, 8u, 15u, 14u, 13u, 12u}; + +// Shuffle table for converting ABGR to ARGB. +static const uvec8 kShuffleMaskABGRToARGB = { + 2u, 1u, 0u, 3u, 6u, 5u, 4u, 7u, 10u, 9u, 8u, 11u, 14u, 13u, 12u, 15u}; + +// Shuffle table for converting RGBA to ARGB. +static const uvec8 kShuffleMaskRGBAToARGB = { + 1u, 2u, 3u, 0u, 5u, 6u, 7u, 4u, 9u, 10u, 11u, 8u, 13u, 14u, 15u, 12u}; + +// Shuffle table for converting AR64 to AB64. +static const uvec8 kShuffleMaskAR64ToAB64 = { + 4u, 5u, 2u, 3u, 0u, 1u, 6u, 7u, 12u, 13u, 10u, 11u, 8u, 9u, 14u, 15u}; + +// Convert BGRA to ARGB. LIBYUV_API -int NV12ToABGR(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_uv, - int src_stride_uv, - uint8_t* dst_abgr, - int dst_stride_abgr, +int BGRAToARGB(const uint8_t* src_bgra, + int src_stride_bgra, + uint8_t* dst_argb, + int dst_stride_argb, int width, int height) { - return NV21ToARGBMatrix(src_y, src_stride_y, src_uv, src_stride_uv, dst_abgr, - dst_stride_abgr, &kYvuI601Constants, width, height); + return ARGBShuffle(src_bgra, src_stride_bgra, dst_argb, dst_stride_argb, + (const uint8_t*)&kShuffleMaskBGRAToARGB, width, height); } -// Convert NV21 to ABGR. +// Convert ARGB to BGRA (same as BGRAToARGB). LIBYUV_API -int NV21ToABGR(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_vu, - int src_stride_vu, - uint8_t* dst_abgr, - int dst_stride_abgr, +int ARGBToBGRA(const uint8_t* src_bgra, + int src_stride_bgra, + uint8_t* dst_argb, + int dst_stride_argb, int width, int height) { - return NV12ToARGBMatrix(src_y, src_stride_y, src_vu, src_stride_vu, dst_abgr, - dst_stride_abgr, &kYvuI601Constants, width, height); + return ARGBShuffle(src_bgra, src_stride_bgra, dst_argb, dst_stride_argb, + (const uint8_t*)&kShuffleMaskBGRAToARGB, width, height); } -// TODO(fbarchard): Consider SSSE3 2 step conversion. -// Convert NV12 to RGB24 with matrix. +// Convert ABGR to ARGB. LIBYUV_API -int NV12ToRGB24Matrix(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_uv, - int src_stride_uv, - uint8_t* dst_rgb24, - int dst_stride_rgb24, - const struct YuvConstants* yuvconstants, - int width, - int height) { +int ABGRToARGB(const uint8_t* src_abgr, + int src_stride_abgr, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height) { + return ARGBShuffle(src_abgr, src_stride_abgr, dst_argb, dst_stride_argb, + (const uint8_t*)&kShuffleMaskABGRToARGB, width, height); +} + +// Convert ARGB to ABGR to (same as ABGRToARGB). +LIBYUV_API +int ARGBToABGR(const uint8_t* src_abgr, + int src_stride_abgr, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height) { + return ARGBShuffle(src_abgr, src_stride_abgr, dst_argb, dst_stride_argb, + (const uint8_t*)&kShuffleMaskABGRToARGB, width, height); +} + +// Convert RGBA to ARGB. +LIBYUV_API +int RGBAToARGB(const uint8_t* src_rgba, + int src_stride_rgba, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height) { + return ARGBShuffle(src_rgba, src_stride_rgba, dst_argb, dst_stride_argb, + (const uint8_t*)&kShuffleMaskRGBAToARGB, width, height); +} + +// Convert AR64 To AB64. +LIBYUV_API +int AR64ToAB64(const uint16_t* src_ar64, + int src_stride_ar64, + uint16_t* dst_ab64, + int dst_stride_ab64, + int width, + int height) { + return AR64Shuffle(src_ar64, src_stride_ar64, dst_ab64, dst_stride_ab64, + (const uint8_t*)&kShuffleMaskAR64ToAB64, width, height); +} + +// Convert RGB24 to ARGB. +LIBYUV_API +int RGB24ToARGB(const uint8_t* src_rgb24, + int src_stride_rgb24, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height) { int y; - void (*NV12ToRGB24Row)( - const uint8_t* y_buf, const uint8_t* uv_buf, uint8_t* rgb_buf, - const struct YuvConstants* yuvconstants, int width) = NV12ToRGB24Row_C; - if (!src_y || !src_uv || !dst_rgb24 || width <= 0 || height == 0) { + void (*RGB24ToARGBRow)(const uint8_t* src_rgb, uint8_t* dst_argb, int width) = + RGB24ToARGBRow_C; + if (!src_rgb24 || !dst_argb || width <= 0 || height == 0) { return -1; } // Negative height means invert the image. if (height < 0) { height = -height; - dst_rgb24 = dst_rgb24 + (height - 1) * dst_stride_rgb24; - dst_stride_rgb24 = -dst_stride_rgb24; + src_rgb24 = src_rgb24 + (height - 1) * src_stride_rgb24; + src_stride_rgb24 = -src_stride_rgb24; } -#if defined(HAS_NV12TORGB24ROW_NEON) + // Coalesce rows. + if (src_stride_rgb24 == width * 3 && dst_stride_argb == width * 4) { + width *= height; + height = 1; + src_stride_rgb24 = dst_stride_argb = 0; + } +#if defined(HAS_RGB24TOARGBROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + RGB24ToARGBRow = RGB24ToARGBRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + RGB24ToARGBRow = RGB24ToARGBRow_SSSE3; + } + } +#endif +#if defined(HAS_RGB24TOARGBROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { - NV12ToRGB24Row = NV12ToRGB24Row_Any_NEON; + RGB24ToARGBRow = RGB24ToARGBRow_Any_NEON; if (IS_ALIGNED(width, 8)) { - NV12ToRGB24Row = NV12ToRGB24Row_NEON; + RGB24ToARGBRow = RGB24ToARGBRow_NEON; } } #endif -#if defined(HAS_NV12TORGB24ROW_SSSE3) - if (TestCpuFlag(kCpuHasSSSE3)) { - NV12ToRGB24Row = NV12ToRGB24Row_Any_SSSE3; +#if defined(HAS_RGB24TOARGBROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + RGB24ToARGBRow = RGB24ToARGBRow_Any_MSA; if (IS_ALIGNED(width, 16)) { - NV12ToRGB24Row = NV12ToRGB24Row_SSSE3; + RGB24ToARGBRow = RGB24ToARGBRow_MSA; } } #endif -#if defined(HAS_NV12TORGB24ROW_AVX2) - if (TestCpuFlag(kCpuHasAVX2)) { - NV12ToRGB24Row = NV12ToRGB24Row_Any_AVX2; - if (IS_ALIGNED(width, 32)) { - NV12ToRGB24Row = NV12ToRGB24Row_AVX2; +#if defined(HAS_RGB24TOARGBROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + RGB24ToARGBRow = RGB24ToARGBRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + RGB24ToARGBRow = RGB24ToARGBRow_LSX; } } #endif -#if defined(HAS_NV12TORGB24ROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - NV12ToRGB24Row = NV12ToRGB24Row_Any_MMI; - if (IS_ALIGNED(width, 8)) { - NV12ToRGB24Row = NV12ToRGB24Row_MMI; +#if defined(HAS_RGB24TOARGBROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + RGB24ToARGBRow = RGB24ToARGBRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + RGB24ToARGBRow = RGB24ToARGBRow_LASX; } } #endif for (y = 0; y < height; ++y) { - NV12ToRGB24Row(src_y, src_uv, dst_rgb24, yuvconstants, width); - dst_rgb24 += dst_stride_rgb24; - src_y += src_stride_y; - if (y & 1) { - src_uv += src_stride_uv; - } + RGB24ToARGBRow(src_rgb24, dst_argb, width); + src_rgb24 += src_stride_rgb24; + dst_argb += dst_stride_argb; } return 0; } -// Convert NV21 to RGB24 with matrix. +// Convert RAW to ARGB. LIBYUV_API -int NV21ToRGB24Matrix(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_vu, - int src_stride_vu, - uint8_t* dst_rgb24, - int dst_stride_rgb24, - const struct YuvConstants* yuvconstants, - int width, - int height) { +int RAWToARGB(const uint8_t* src_raw, + int src_stride_raw, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height) { int y; - void (*NV21ToRGB24Row)( - const uint8_t* y_buf, const uint8_t* uv_buf, uint8_t* rgb_buf, - const struct YuvConstants* yuvconstants, int width) = NV21ToRGB24Row_C; - if (!src_y || !src_vu || !dst_rgb24 || width <= 0 || height == 0) { + void (*RAWToARGBRow)(const uint8_t* src_rgb, uint8_t* dst_argb, int width) = + RAWToARGBRow_C; + if (!src_raw || !dst_argb || width <= 0 || height == 0) { return -1; } // Negative height means invert the image. if (height < 0) { height = -height; - dst_rgb24 = dst_rgb24 + (height - 1) * dst_stride_rgb24; - dst_stride_rgb24 = -dst_stride_rgb24; + src_raw = src_raw + (height - 1) * src_stride_raw; + src_stride_raw = -src_stride_raw; } -#if defined(HAS_NV21TORGB24ROW_NEON) + // Coalesce rows. + if (src_stride_raw == width * 3 && dst_stride_argb == width * 4) { + width *= height; + height = 1; + src_stride_raw = dst_stride_argb = 0; + } +#if defined(HAS_RAWTOARGBROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + RAWToARGBRow = RAWToARGBRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + RAWToARGBRow = RAWToARGBRow_SSSE3; + } + } +#endif +#if defined(HAS_RAWTOARGBROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { - NV21ToRGB24Row = NV21ToRGB24Row_Any_NEON; + RAWToARGBRow = RAWToARGBRow_Any_NEON; if (IS_ALIGNED(width, 8)) { - NV21ToRGB24Row = NV21ToRGB24Row_NEON; + RAWToARGBRow = RAWToARGBRow_NEON; } } #endif -#if defined(HAS_NV21TORGB24ROW_SSSE3) - if (TestCpuFlag(kCpuHasSSSE3)) { - NV21ToRGB24Row = NV21ToRGB24Row_Any_SSSE3; +#if defined(HAS_RAWTOARGBROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + RAWToARGBRow = RAWToARGBRow_Any_MSA; if (IS_ALIGNED(width, 16)) { - NV21ToRGB24Row = NV21ToRGB24Row_SSSE3; + RAWToARGBRow = RAWToARGBRow_MSA; } } #endif -#if defined(HAS_NV21TORGB24ROW_AVX2) - if (TestCpuFlag(kCpuHasAVX2)) { - NV21ToRGB24Row = NV21ToRGB24Row_Any_AVX2; - if (IS_ALIGNED(width, 32)) { - NV21ToRGB24Row = NV21ToRGB24Row_AVX2; +#if defined(HAS_RAWTOARGBROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + RAWToARGBRow = RAWToARGBRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + RAWToARGBRow = RAWToARGBRow_LSX; } } #endif -#if defined(HAS_NV21TORGB24ROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - NV21ToRGB24Row = NV21ToRGB24Row_Any_MMI; - if (IS_ALIGNED(width, 8)) { - NV21ToRGB24Row = NV21ToRGB24Row_MMI; +#if defined(HAS_RAWTOARGBROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + RAWToARGBRow = RAWToARGBRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + RAWToARGBRow = RAWToARGBRow_LASX; } } #endif for (y = 0; y < height; ++y) { - NV21ToRGB24Row(src_y, src_vu, dst_rgb24, yuvconstants, width); - dst_rgb24 += dst_stride_rgb24; - src_y += src_stride_y; - if (y & 1) { - src_vu += src_stride_vu; - } + RAWToARGBRow(src_raw, dst_argb, width); + src_raw += src_stride_raw; + dst_argb += dst_stride_argb; } return 0; } -// Convert NV12 to RGB24. -LIBYUV_API -int NV12ToRGB24(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_uv, - int src_stride_uv, - uint8_t* dst_rgb24, - int dst_stride_rgb24, - int width, - int height) { - return NV12ToRGB24Matrix(src_y, src_stride_y, src_uv, src_stride_uv, - dst_rgb24, dst_stride_rgb24, &kYuvI601Constants, - width, height); -} - -// Convert NV21 to RGB24. -LIBYUV_API -int NV21ToRGB24(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_vu, - int src_stride_vu, - uint8_t* dst_rgb24, - int dst_stride_rgb24, - int width, - int height) { - return NV21ToRGB24Matrix(src_y, src_stride_y, src_vu, src_stride_vu, - dst_rgb24, dst_stride_rgb24, &kYuvI601Constants, - width, height); -} - -// Convert NV12 to RAW. -LIBYUV_API -int NV12ToRAW(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_uv, - int src_stride_uv, - uint8_t* dst_raw, - int dst_stride_raw, - int width, - int height) { - return NV21ToRGB24Matrix(src_y, src_stride_y, src_uv, src_stride_uv, dst_raw, - dst_stride_raw, &kYvuI601Constants, width, height); -} - -// Convert NV21 to RAW. +// Convert RAW to RGBA. LIBYUV_API -int NV21ToRAW(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_vu, - int src_stride_vu, - uint8_t* dst_raw, - int dst_stride_raw, +int RAWToRGBA(const uint8_t* src_raw, + int src_stride_raw, + uint8_t* dst_rgba, + int dst_stride_rgba, int width, int height) { - return NV12ToRGB24Matrix(src_y, src_stride_y, src_vu, src_stride_vu, dst_raw, - dst_stride_raw, &kYvuI601Constants, width, height); -} - -// Convert NV21 to YUV24 -int NV21ToYUV24(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_vu, - int src_stride_vu, - uint8_t* dst_yuv24, - int dst_stride_yuv24, - int width, - int height) { int y; - void (*NV21ToYUV24Row)(const uint8_t* src_y, const uint8_t* src_vu, - uint8_t* dst_yuv24, int width) = NV21ToYUV24Row_C; - if (!src_y || !src_vu || !dst_yuv24 || width <= 0 || height == 0) { + void (*RAWToRGBARow)(const uint8_t* src_rgb, uint8_t* dst_rgba, int width) = + RAWToRGBARow_C; + if (!src_raw || !dst_rgba || width <= 0 || height == 0) { return -1; } // Negative height means invert the image. if (height < 0) { height = -height; - dst_yuv24 = dst_yuv24 + (height - 1) * dst_stride_yuv24; - dst_stride_yuv24 = -dst_stride_yuv24; + src_raw = src_raw + (height - 1) * src_stride_raw; + src_stride_raw = -src_stride_raw; } -#if defined(HAS_NV21TOYUV24ROW_NEON) - if (TestCpuFlag(kCpuHasNEON)) { - NV21ToYUV24Row = NV21ToYUV24Row_Any_NEON; + // Coalesce rows. + if (src_stride_raw == width * 3 && dst_stride_rgba == width * 4) { + width *= height; + height = 1; + src_stride_raw = dst_stride_rgba = 0; + } +#if defined(HAS_RAWTORGBAROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + RAWToRGBARow = RAWToRGBARow_Any_SSSE3; if (IS_ALIGNED(width, 16)) { - NV21ToYUV24Row = NV21ToYUV24Row_NEON; + RAWToRGBARow = RAWToRGBARow_SSSE3; } } #endif -#if defined(HAS_NV21TOYUV24ROW_AVX2) - if (TestCpuFlag(kCpuHasAVX2)) { - NV21ToYUV24Row = NV21ToYUV24Row_Any_AVX2; - if (IS_ALIGNED(width, 32)) { - NV21ToYUV24Row = NV21ToYUV24Row_AVX2; +#if defined(HAS_RAWTORGBAROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + RAWToRGBARow = RAWToRGBARow_Any_NEON; + if (IS_ALIGNED(width, 8)) { + RAWToRGBARow = RAWToRGBARow_NEON; } } #endif + for (y = 0; y < height; ++y) { - NV21ToYUV24Row(src_y, src_vu, dst_yuv24, width); - dst_yuv24 += dst_stride_yuv24; - src_y += src_stride_y; - if (y & 1) { - src_vu += src_stride_vu; - } + RAWToRGBARow(src_raw, dst_rgba, width); + src_raw += src_stride_raw; + dst_rgba += dst_stride_rgba; } return 0; } -// Convert YUY2 to ARGB. +// Convert RGB565 to ARGB. LIBYUV_API -int YUY2ToARGB(const uint8_t* src_yuy2, - int src_stride_yuy2, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height) { +int RGB565ToARGB(const uint8_t* src_rgb565, + int src_stride_rgb565, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height) { int y; - void (*YUY2ToARGBRow)(const uint8_t* src_yuy2, uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, int width) = - YUY2ToARGBRow_C; - if (!src_yuy2 || !dst_argb || width <= 0 || height == 0) { + void (*RGB565ToARGBRow)(const uint8_t* src_rgb565, uint8_t* dst_argb, + int width) = RGB565ToARGBRow_C; + if (!src_rgb565 || !dst_argb || width <= 0 || height == 0) { return -1; } // Negative height means invert the image. if (height < 0) { height = -height; - src_yuy2 = src_yuy2 + (height - 1) * src_stride_yuy2; - src_stride_yuy2 = -src_stride_yuy2; + src_rgb565 = src_rgb565 + (height - 1) * src_stride_rgb565; + src_stride_rgb565 = -src_stride_rgb565; } // Coalesce rows. - if (src_stride_yuy2 == width * 2 && dst_stride_argb == width * 4) { + if (src_stride_rgb565 == width * 2 && dst_stride_argb == width * 4) { width *= height; height = 1; - src_stride_yuy2 = dst_stride_argb = 0; + src_stride_rgb565 = dst_stride_argb = 0; } -#if defined(HAS_YUY2TOARGBROW_SSSE3) - if (TestCpuFlag(kCpuHasSSSE3)) { - YUY2ToARGBRow = YUY2ToARGBRow_Any_SSSE3; - if (IS_ALIGNED(width, 16)) { - YUY2ToARGBRow = YUY2ToARGBRow_SSSE3; +#if defined(HAS_RGB565TOARGBROW_SSE2) + if (TestCpuFlag(kCpuHasSSE2)) { + RGB565ToARGBRow = RGB565ToARGBRow_Any_SSE2; + if (IS_ALIGNED(width, 8)) { + RGB565ToARGBRow = RGB565ToARGBRow_SSE2; } } #endif -#if defined(HAS_YUY2TOARGBROW_AVX2) +#if defined(HAS_RGB565TOARGBROW_AVX2) if (TestCpuFlag(kCpuHasAVX2)) { - YUY2ToARGBRow = YUY2ToARGBRow_Any_AVX2; - if (IS_ALIGNED(width, 32)) { - YUY2ToARGBRow = YUY2ToARGBRow_AVX2; + RGB565ToARGBRow = RGB565ToARGBRow_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + RGB565ToARGBRow = RGB565ToARGBRow_AVX2; } } #endif -#if defined(HAS_YUY2TOARGBROW_NEON) +#if defined(HAS_RGB565TOARGBROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { - YUY2ToARGBRow = YUY2ToARGBRow_Any_NEON; + RGB565ToARGBRow = RGB565ToARGBRow_Any_NEON; if (IS_ALIGNED(width, 8)) { - YUY2ToARGBRow = YUY2ToARGBRow_NEON; + RGB565ToARGBRow = RGB565ToARGBRow_NEON; } } #endif -#if defined(HAS_YUY2TOARGBROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - YUY2ToARGBRow = YUY2ToARGBRow_Any_MMI; - if (IS_ALIGNED(width, 4)) { - YUY2ToARGBRow = YUY2ToARGBRow_MMI; +#if defined(HAS_RGB565TOARGBROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + RGB565ToARGBRow = RGB565ToARGBRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + RGB565ToARGBRow = RGB565ToARGBRow_MSA; } } #endif -#if defined(HAS_YUY2TOARGBROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - YUY2ToARGBRow = YUY2ToARGBRow_Any_MSA; - if (IS_ALIGNED(width, 8)) { - YUY2ToARGBRow = YUY2ToARGBRow_MSA; +#if defined(HAS_RGB565TOARGBROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + RGB565ToARGBRow = RGB565ToARGBRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + RGB565ToARGBRow = RGB565ToARGBRow_LSX; } } #endif - for (y = 0; y < height; ++y) { - YUY2ToARGBRow(src_yuy2, dst_argb, &kYuvI601Constants, width); - src_yuy2 += src_stride_yuy2; - dst_argb += dst_stride_argb; +#if defined(HAS_RGB565TOARGBROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + RGB565ToARGBRow = RGB565ToARGBRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + RGB565ToARGBRow = RGB565ToARGBRow_LASX; + } + } +#endif + + for (y = 0; y < height; ++y) { + RGB565ToARGBRow(src_rgb565, dst_argb, width); + src_rgb565 += src_stride_rgb565; + dst_argb += dst_stride_argb; } return 0; } -// Convert UYVY to ARGB. +// Convert ARGB1555 to ARGB. LIBYUV_API -int UYVYToARGB(const uint8_t* src_uyvy, - int src_stride_uyvy, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height) { +int ARGB1555ToARGB(const uint8_t* src_argb1555, + int src_stride_argb1555, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height) { int y; - void (*UYVYToARGBRow)(const uint8_t* src_uyvy, uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, int width) = - UYVYToARGBRow_C; - if (!src_uyvy || !dst_argb || width <= 0 || height == 0) { + void (*ARGB1555ToARGBRow)(const uint8_t* src_argb1555, uint8_t* dst_argb, + int width) = ARGB1555ToARGBRow_C; + if (!src_argb1555 || !dst_argb || width <= 0 || height == 0) { return -1; } // Negative height means invert the image. if (height < 0) { height = -height; - src_uyvy = src_uyvy + (height - 1) * src_stride_uyvy; - src_stride_uyvy = -src_stride_uyvy; + src_argb1555 = src_argb1555 + (height - 1) * src_stride_argb1555; + src_stride_argb1555 = -src_stride_argb1555; } // Coalesce rows. - if (src_stride_uyvy == width * 2 && dst_stride_argb == width * 4) { + if (src_stride_argb1555 == width * 2 && dst_stride_argb == width * 4) { width *= height; height = 1; - src_stride_uyvy = dst_stride_argb = 0; + src_stride_argb1555 = dst_stride_argb = 0; } -#if defined(HAS_UYVYTOARGBROW_SSSE3) - if (TestCpuFlag(kCpuHasSSSE3)) { - UYVYToARGBRow = UYVYToARGBRow_Any_SSSE3; - if (IS_ALIGNED(width, 16)) { - UYVYToARGBRow = UYVYToARGBRow_SSSE3; +#if defined(HAS_ARGB1555TOARGBROW_SSE2) + if (TestCpuFlag(kCpuHasSSE2)) { + ARGB1555ToARGBRow = ARGB1555ToARGBRow_Any_SSE2; + if (IS_ALIGNED(width, 8)) { + ARGB1555ToARGBRow = ARGB1555ToARGBRow_SSE2; } } #endif -#if defined(HAS_UYVYTOARGBROW_AVX2) +#if defined(HAS_ARGB1555TOARGBROW_AVX2) if (TestCpuFlag(kCpuHasAVX2)) { - UYVYToARGBRow = UYVYToARGBRow_Any_AVX2; - if (IS_ALIGNED(width, 32)) { - UYVYToARGBRow = UYVYToARGBRow_AVX2; + ARGB1555ToARGBRow = ARGB1555ToARGBRow_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + ARGB1555ToARGBRow = ARGB1555ToARGBRow_AVX2; } } #endif -#if defined(HAS_UYVYTOARGBROW_NEON) +#if defined(HAS_ARGB1555TOARGBROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { - UYVYToARGBRow = UYVYToARGBRow_Any_NEON; + ARGB1555ToARGBRow = ARGB1555ToARGBRow_Any_NEON; if (IS_ALIGNED(width, 8)) { - UYVYToARGBRow = UYVYToARGBRow_NEON; + ARGB1555ToARGBRow = ARGB1555ToARGBRow_NEON; } } #endif -#if defined(HAS_UYVYTOARGBROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - UYVYToARGBRow = UYVYToARGBRow_Any_MMI; - if (IS_ALIGNED(width, 4)) { - UYVYToARGBRow = UYVYToARGBRow_MMI; +#if defined(HAS_ARGB1555TOARGBROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGB1555ToARGBRow = ARGB1555ToARGBRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + ARGB1555ToARGBRow = ARGB1555ToARGBRow_MSA; } } #endif -#if defined(HAS_UYVYTOARGBROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - UYVYToARGBRow = UYVYToARGBRow_Any_MSA; - if (IS_ALIGNED(width, 8)) { - UYVYToARGBRow = UYVYToARGBRow_MSA; +#if defined(HAS_ARGB1555TOARGBROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + ARGB1555ToARGBRow = ARGB1555ToARGBRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + ARGB1555ToARGBRow = ARGB1555ToARGBRow_LSX; } } #endif +#if defined(HAS_ARGB1555TOARGBROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + ARGB1555ToARGBRow = ARGB1555ToARGBRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + ARGB1555ToARGBRow = ARGB1555ToARGBRow_LASX; + } + } +#endif + for (y = 0; y < height; ++y) { - UYVYToARGBRow(src_uyvy, dst_argb, &kYuvI601Constants, width); - src_uyvy += src_stride_uyvy; + ARGB1555ToARGBRow(src_argb1555, dst_argb, width); + src_argb1555 += src_stride_argb1555; dst_argb += dst_stride_argb; } return 0; } -static void WeavePixels(const uint8_t* src_u, - const uint8_t* src_v, - int src_pixel_stride_uv, - uint8_t* dst_uv, - int width) { - int i; - for (i = 0; i < width; ++i) { - dst_uv[0] = *src_u; - dst_uv[1] = *src_v; - dst_uv += 2; - src_u += src_pixel_stride_uv; - src_v += src_pixel_stride_uv; - } -} -// Convert Android420 to ARGB with matrix. +// Convert ARGB4444 to ARGB. LIBYUV_API -int Android420ToARGBMatrix(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - int src_pixel_stride_uv, - uint8_t* dst_argb, - int dst_stride_argb, - const struct YuvConstants* yuvconstants, - int width, - int height) { +int ARGB4444ToARGB(const uint8_t* src_argb4444, + int src_stride_argb4444, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height) { int y; - uint8_t* dst_uv; - const ptrdiff_t vu_off = src_v - src_u; - int halfwidth = (width + 1) >> 1; - int halfheight = (height + 1) >> 1; - if (!src_y || !src_u || !src_v || !dst_argb || width <= 0 || height == 0) { + void (*ARGB4444ToARGBRow)(const uint8_t* src_argb4444, uint8_t* dst_argb, + int width) = ARGB4444ToARGBRow_C; + if (!src_argb4444 || !dst_argb || width <= 0 || height == 0) { return -1; } // Negative height means invert the image. if (height < 0) { height = -height; - halfheight = (height + 1) >> 1; - dst_argb = dst_argb + (height - 1) * dst_stride_argb; - dst_stride_argb = -dst_stride_argb; - } - - // I420 - if (src_pixel_stride_uv == 1) { - return I420ToARGBMatrix(src_y, src_stride_y, src_u, src_stride_u, src_v, - src_stride_v, dst_argb, dst_stride_argb, - yuvconstants, width, height); - // NV21 - } - if (src_pixel_stride_uv == 2 && vu_off == -1 && - src_stride_u == src_stride_v) { - return NV21ToARGBMatrix(src_y, src_stride_y, src_v, src_stride_v, dst_argb, - dst_stride_argb, yuvconstants, width, height); - // NV12 - } - if (src_pixel_stride_uv == 2 && vu_off == 1 && src_stride_u == src_stride_v) { - return NV12ToARGBMatrix(src_y, src_stride_y, src_u, src_stride_u, dst_argb, - dst_stride_argb, yuvconstants, width, height); - } - - // General case fallback creates NV12 - align_buffer_64(plane_uv, halfwidth * 2 * halfheight); - dst_uv = plane_uv; - for (y = 0; y < halfheight; ++y) { - WeavePixels(src_u, src_v, src_pixel_stride_uv, dst_uv, halfwidth); - src_u += src_stride_u; - src_v += src_stride_v; - dst_uv += halfwidth * 2; - } - NV12ToARGBMatrix(src_y, src_stride_y, plane_uv, halfwidth * 2, dst_argb, - dst_stride_argb, yuvconstants, width, height); - free_aligned_buffer_64(plane_uv); - return 0; -} - -// Convert Android420 to ARGB. -LIBYUV_API -int Android420ToARGB(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - int src_pixel_stride_uv, - uint8_t* dst_argb, - int dst_stride_argb, - int width, - int height) { - return Android420ToARGBMatrix(src_y, src_stride_y, src_u, src_stride_u, src_v, - src_stride_v, src_pixel_stride_uv, dst_argb, - dst_stride_argb, &kYuvI601Constants, width, - height); -} - -// Convert Android420 to ABGR. -LIBYUV_API -int Android420ToABGR(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - int src_pixel_stride_uv, - uint8_t* dst_abgr, - int dst_stride_abgr, - int width, - int height) { - return Android420ToARGBMatrix(src_y, src_stride_y, src_v, src_stride_v, src_u, - src_stride_u, src_pixel_stride_uv, dst_abgr, - dst_stride_abgr, &kYvuI601Constants, width, - height); -} - -// Convert I422 to RGBA with matrix. -LIBYUV_API -int I422ToRGBAMatrix(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgba, - int dst_stride_rgba, - const struct YuvConstants* yuvconstants, - int width, - int height) { - int y; - void (*I422ToRGBARow)(const uint8_t* y_buf, const uint8_t* u_buf, - const uint8_t* v_buf, uint8_t* rgb_buf, - const struct YuvConstants* yuvconstants, int width) = - I422ToRGBARow_C; - if (!src_y || !src_u || !src_v || !dst_rgba || width <= 0 || height == 0) { - return -1; + src_argb4444 = src_argb4444 + (height - 1) * src_stride_argb4444; + src_stride_argb4444 = -src_stride_argb4444; } - // Negative height means invert the image. - if (height < 0) { - height = -height; - dst_rgba = dst_rgba + (height - 1) * dst_stride_rgba; - dst_stride_rgba = -dst_stride_rgba; + // Coalesce rows. + if (src_stride_argb4444 == width * 2 && dst_stride_argb == width * 4) { + width *= height; + height = 1; + src_stride_argb4444 = dst_stride_argb = 0; } -#if defined(HAS_I422TORGBAROW_SSSE3) - if (TestCpuFlag(kCpuHasSSSE3)) { - I422ToRGBARow = I422ToRGBARow_Any_SSSE3; +#if defined(HAS_ARGB4444TOARGBROW_SSE2) + if (TestCpuFlag(kCpuHasSSE2)) { + ARGB4444ToARGBRow = ARGB4444ToARGBRow_Any_SSE2; if (IS_ALIGNED(width, 8)) { - I422ToRGBARow = I422ToRGBARow_SSSE3; + ARGB4444ToARGBRow = ARGB4444ToARGBRow_SSE2; } } #endif -#if defined(HAS_I422TORGBAROW_AVX2) +#if defined(HAS_ARGB4444TOARGBROW_AVX2) if (TestCpuFlag(kCpuHasAVX2)) { - I422ToRGBARow = I422ToRGBARow_Any_AVX2; + ARGB4444ToARGBRow = ARGB4444ToARGBRow_Any_AVX2; if (IS_ALIGNED(width, 16)) { - I422ToRGBARow = I422ToRGBARow_AVX2; + ARGB4444ToARGBRow = ARGB4444ToARGBRow_AVX2; } } #endif -#if defined(HAS_I422TORGBAROW_NEON) +#if defined(HAS_ARGB4444TOARGBROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { - I422ToRGBARow = I422ToRGBARow_Any_NEON; + ARGB4444ToARGBRow = ARGB4444ToARGBRow_Any_NEON; if (IS_ALIGNED(width, 8)) { - I422ToRGBARow = I422ToRGBARow_NEON; - } - } -#endif -#if defined(HAS_I422TORGBAROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - I422ToRGBARow = I422ToRGBARow_Any_MMI; - if (IS_ALIGNED(width, 4)) { - I422ToRGBARow = I422ToRGBARow_MMI; + ARGB4444ToARGBRow = ARGB4444ToARGBRow_NEON; } } #endif -#if defined(HAS_I422TORGBAROW_MSA) +#if defined(HAS_ARGB4444TOARGBROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { - I422ToRGBARow = I422ToRGBARow_Any_MSA; - if (IS_ALIGNED(width, 8)) { - I422ToRGBARow = I422ToRGBARow_MSA; + ARGB4444ToARGBRow = ARGB4444ToARGBRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + ARGB4444ToARGBRow = ARGB4444ToARGBRow_MSA; + } + } +#endif +#if defined(HAS_ARGB4444TOARGBROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + ARGB4444ToARGBRow = ARGB4444ToARGBRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + ARGB4444ToARGBRow = ARGB4444ToARGBRow_LSX; + } + } +#endif +#if defined(HAS_ARGB4444TOARGBROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + ARGB4444ToARGBRow = ARGB4444ToARGBRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + ARGB4444ToARGBRow = ARGB4444ToARGBRow_LASX; } } #endif for (y = 0; y < height; ++y) { - I422ToRGBARow(src_y, src_u, src_v, dst_rgba, yuvconstants, width); - dst_rgba += dst_stride_rgba; - src_y += src_stride_y; - src_u += src_stride_u; - src_v += src_stride_v; + ARGB4444ToARGBRow(src_argb4444, dst_argb, width); + src_argb4444 += src_stride_argb4444; + dst_argb += dst_stride_argb; } return 0; } -// Convert I422 to RGBA. +// Convert AR30 to ARGB. LIBYUV_API -int I422ToRGBA(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgba, - int dst_stride_rgba, +int AR30ToARGB(const uint8_t* src_ar30, + int src_stride_ar30, + uint8_t* dst_argb, + int dst_stride_argb, int width, int height) { - return I422ToRGBAMatrix(src_y, src_stride_y, src_u, src_stride_u, src_v, - src_stride_v, dst_rgba, dst_stride_rgba, - &kYuvI601Constants, width, height); + int y; + if (!src_ar30 || !dst_argb || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + src_ar30 = src_ar30 + (height - 1) * src_stride_ar30; + src_stride_ar30 = -src_stride_ar30; + } + // Coalesce rows. + if (src_stride_ar30 == width * 4 && dst_stride_argb == width * 4) { + width *= height; + height = 1; + src_stride_ar30 = dst_stride_argb = 0; + } + for (y = 0; y < height; ++y) { + AR30ToARGBRow_C(src_ar30, dst_argb, width); + src_ar30 += src_stride_ar30; + dst_argb += dst_stride_argb; + } + return 0; } -// Convert I422 to BGRA. +// Convert AR30 to ABGR. LIBYUV_API -int I422ToBGRA(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_bgra, - int dst_stride_bgra, +int AR30ToABGR(const uint8_t* src_ar30, + int src_stride_ar30, + uint8_t* dst_abgr, + int dst_stride_abgr, int width, int height) { - return I422ToRGBAMatrix(src_y, src_stride_y, src_v, - src_stride_v, // Swap U and V - src_u, src_stride_u, dst_bgra, dst_stride_bgra, - &kYvuI601Constants, // Use Yvu matrix - width, height); + int y; + if (!src_ar30 || !dst_abgr || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + src_ar30 = src_ar30 + (height - 1) * src_stride_ar30; + src_stride_ar30 = -src_stride_ar30; + } + // Coalesce rows. + if (src_stride_ar30 == width * 4 && dst_stride_abgr == width * 4) { + width *= height; + height = 1; + src_stride_ar30 = dst_stride_abgr = 0; + } + for (y = 0; y < height; ++y) { + AR30ToABGRRow_C(src_ar30, dst_abgr, width); + src_ar30 += src_stride_ar30; + dst_abgr += dst_stride_abgr; + } + return 0; } -// Convert NV12 to RGB565 with matrix. +// Convert AR30 to AB30. LIBYUV_API -int NV12ToRGB565Matrix(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_uv, - int src_stride_uv, - uint8_t* dst_rgb565, - int dst_stride_rgb565, - const struct YuvConstants* yuvconstants, - int width, - int height) { +int AR30ToAB30(const uint8_t* src_ar30, + int src_stride_ar30, + uint8_t* dst_ab30, + int dst_stride_ab30, + int width, + int height) { int y; - void (*NV12ToRGB565Row)( - const uint8_t* y_buf, const uint8_t* uv_buf, uint8_t* rgb_buf, - const struct YuvConstants* yuvconstants, int width) = NV12ToRGB565Row_C; - if (!src_y || !src_uv || !dst_rgb565 || width <= 0 || height == 0) { + if (!src_ar30 || !dst_ab30 || width <= 0 || height == 0) { return -1; } // Negative height means invert the image. if (height < 0) { height = -height; - dst_rgb565 = dst_rgb565 + (height - 1) * dst_stride_rgb565; - dst_stride_rgb565 = -dst_stride_rgb565; + src_ar30 = src_ar30 + (height - 1) * src_stride_ar30; + src_stride_ar30 = -src_stride_ar30; } -#if defined(HAS_NV12TORGB565ROW_SSSE3) + // Coalesce rows. + if (src_stride_ar30 == width * 4 && dst_stride_ab30 == width * 4) { + width *= height; + height = 1; + src_stride_ar30 = dst_stride_ab30 = 0; + } + for (y = 0; y < height; ++y) { + AR30ToAB30Row_C(src_ar30, dst_ab30, width); + src_ar30 += src_stride_ar30; + dst_ab30 += dst_stride_ab30; + } + return 0; +} + +// Convert AR64 to ARGB. +LIBYUV_API +int AR64ToARGB(const uint16_t* src_ar64, + int src_stride_ar64, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height) { + int y; + void (*AR64ToARGBRow)(const uint16_t* src_ar64, uint8_t* dst_argb, + int width) = AR64ToARGBRow_C; + if (!src_ar64 || !dst_argb || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + src_ar64 = src_ar64 + (height - 1) * src_stride_ar64; + src_stride_ar64 = -src_stride_ar64; + } + // Coalesce rows. + if (src_stride_ar64 == width * 4 && dst_stride_argb == width * 4) { + width *= height; + height = 1; + src_stride_ar64 = dst_stride_argb = 0; + } +#if defined(HAS_AR64TOARGBROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { - NV12ToRGB565Row = NV12ToRGB565Row_Any_SSSE3; - if (IS_ALIGNED(width, 8)) { - NV12ToRGB565Row = NV12ToRGB565Row_SSSE3; + AR64ToARGBRow = AR64ToARGBRow_Any_SSSE3; + if (IS_ALIGNED(width, 4)) { + AR64ToARGBRow = AR64ToARGBRow_SSSE3; } } #endif -#if defined(HAS_NV12TORGB565ROW_AVX2) +#if defined(HAS_AR64TOARGBROW_AVX2) if (TestCpuFlag(kCpuHasAVX2)) { - NV12ToRGB565Row = NV12ToRGB565Row_Any_AVX2; - if (IS_ALIGNED(width, 16)) { - NV12ToRGB565Row = NV12ToRGB565Row_AVX2; - } - } -#endif -#if defined(HAS_NV12TORGB565ROW_NEON) - if (TestCpuFlag(kCpuHasNEON)) { - NV12ToRGB565Row = NV12ToRGB565Row_Any_NEON; + AR64ToARGBRow = AR64ToARGBRow_Any_AVX2; if (IS_ALIGNED(width, 8)) { - NV12ToRGB565Row = NV12ToRGB565Row_NEON; - } - } -#endif -#if defined(HAS_NV12TORGB565ROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - NV12ToRGB565Row = NV12ToRGB565Row_Any_MMI; - if (IS_ALIGNED(width, 4)) { - NV12ToRGB565Row = NV12ToRGB565Row_MMI; + AR64ToARGBRow = AR64ToARGBRow_AVX2; } } #endif -#if defined(HAS_NV12TORGB565ROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - NV12ToRGB565Row = NV12ToRGB565Row_Any_MSA; +#if defined(HAS_AR64TOARGBROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + AR64ToARGBRow = AR64ToARGBRow_Any_NEON; if (IS_ALIGNED(width, 8)) { - NV12ToRGB565Row = NV12ToRGB565Row_MSA; + AR64ToARGBRow = AR64ToARGBRow_NEON; } } #endif for (y = 0; y < height; ++y) { - NV12ToRGB565Row(src_y, src_uv, dst_rgb565, yuvconstants, width); - dst_rgb565 += dst_stride_rgb565; - src_y += src_stride_y; - if (y & 1) { - src_uv += src_stride_uv; - } + AR64ToARGBRow(src_ar64, dst_argb, width); + src_ar64 += src_stride_ar64; + dst_argb += dst_stride_argb; } return 0; } -// Convert NV12 to RGB565. -LIBYUV_API -int NV12ToRGB565(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_uv, - int src_stride_uv, - uint8_t* dst_rgb565, - int dst_stride_rgb565, - int width, - int height) { - return NV12ToRGB565Matrix(src_y, src_stride_y, src_uv, src_stride_uv, - dst_rgb565, dst_stride_rgb565, &kYuvI601Constants, - width, height); -} - -// Convert I422 to RGBA with matrix. +// Convert AB64 to ARGB. LIBYUV_API -int I420ToRGBAMatrix(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgba, - int dst_stride_rgba, - const struct YuvConstants* yuvconstants, - int width, - int height) { +int AB64ToARGB(const uint16_t* src_ab64, + int src_stride_ab64, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height) { int y; - void (*I422ToRGBARow)(const uint8_t* y_buf, const uint8_t* u_buf, - const uint8_t* v_buf, uint8_t* rgb_buf, - const struct YuvConstants* yuvconstants, int width) = - I422ToRGBARow_C; - if (!src_y || !src_u || !src_v || !dst_rgba || width <= 0 || height == 0) { + void (*AB64ToARGBRow)(const uint16_t* src_ar64, uint8_t* dst_argb, + int width) = AB64ToARGBRow_C; + if (!src_ab64 || !dst_argb || width <= 0 || height == 0) { return -1; } // Negative height means invert the image. if (height < 0) { height = -height; - dst_rgba = dst_rgba + (height - 1) * dst_stride_rgba; - dst_stride_rgba = -dst_stride_rgba; + src_ab64 = src_ab64 + (height - 1) * src_stride_ab64; + src_stride_ab64 = -src_stride_ab64; } -#if defined(HAS_I422TORGBAROW_SSSE3) + // Coalesce rows. + if (src_stride_ab64 == width * 4 && dst_stride_argb == width * 4) { + width *= height; + height = 1; + src_stride_ab64 = dst_stride_argb = 0; + } +#if defined(HAS_AB64TOARGBROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { - I422ToRGBARow = I422ToRGBARow_Any_SSSE3; - if (IS_ALIGNED(width, 8)) { - I422ToRGBARow = I422ToRGBARow_SSSE3; + AB64ToARGBRow = AB64ToARGBRow_Any_SSSE3; + if (IS_ALIGNED(width, 4)) { + AB64ToARGBRow = AB64ToARGBRow_SSSE3; } } #endif -#if defined(HAS_I422TORGBAROW_AVX2) +#if defined(HAS_AB64TOARGBROW_AVX2) if (TestCpuFlag(kCpuHasAVX2)) { - I422ToRGBARow = I422ToRGBARow_Any_AVX2; - if (IS_ALIGNED(width, 16)) { - I422ToRGBARow = I422ToRGBARow_AVX2; + AB64ToARGBRow = AB64ToARGBRow_Any_AVX2; + if (IS_ALIGNED(width, 8)) { + AB64ToARGBRow = AB64ToARGBRow_AVX2; } } #endif -#if defined(HAS_I422TORGBAROW_NEON) +#if defined(HAS_AB64TOARGBROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { - I422ToRGBARow = I422ToRGBARow_Any_NEON; + AB64ToARGBRow = AB64ToARGBRow_Any_NEON; if (IS_ALIGNED(width, 8)) { - I422ToRGBARow = I422ToRGBARow_NEON; + AB64ToARGBRow = AB64ToARGBRow_NEON; } } #endif -#if defined(HAS_I422TORGBAROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - I422ToRGBARow = I422ToRGBARow_Any_MMI; - if (IS_ALIGNED(width, 4)) { - I422ToRGBARow = I422ToRGBARow_MMI; - } + + for (y = 0; y < height; ++y) { + AB64ToARGBRow(src_ab64, dst_argb, width); + src_ab64 += src_stride_ab64; + dst_argb += dst_stride_argb; } -#endif -#if defined(HAS_I422TORGBAROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - I422ToRGBARow = I422ToRGBARow_Any_MSA; - if (IS_ALIGNED(width, 8)) { - I422ToRGBARow = I422ToRGBARow_MSA; + return 0; +} + +// Convert NV12 to ARGB with matrix. +LIBYUV_API +int NV12ToARGBMatrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height) { + int y; + void (*NV12ToARGBRow)( + const uint8_t* y_buf, const uint8_t* uv_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = NV12ToARGBRow_C; + assert(yuvconstants); + if (!src_y || !src_uv || !dst_argb || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_argb = dst_argb + (height - 1) * dst_stride_argb; + dst_stride_argb = -dst_stride_argb; + } +#if defined(HAS_NV12TOARGBROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + NV12ToARGBRow = NV12ToARGBRow_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + NV12ToARGBRow = NV12ToARGBRow_SSSE3; + } + } +#endif +#if defined(HAS_NV12TOARGBROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + NV12ToARGBRow = NV12ToARGBRow_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + NV12ToARGBRow = NV12ToARGBRow_AVX2; + } + } +#endif +#if defined(HAS_NV12TOARGBROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + NV12ToARGBRow = NV12ToARGBRow_Any_NEON; + if (IS_ALIGNED(width, 8)) { + NV12ToARGBRow = NV12ToARGBRow_NEON; + } + } +#endif +#if defined(HAS_NV12TOARGBROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + NV12ToARGBRow = NV12ToARGBRow_Any_MSA; + if (IS_ALIGNED(width, 8)) { + NV12ToARGBRow = NV12ToARGBRow_MSA; + } + } +#endif +#if defined(HAS_NV12TOARGBROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + NV12ToARGBRow = NV12ToARGBRow_Any_LSX; + if (IS_ALIGNED(width, 8)) { + NV12ToARGBRow = NV12ToARGBRow_LSX; + } + } +#endif +#if defined(HAS_NV12TOARGBROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + NV12ToARGBRow = NV12ToARGBRow_Any_LASX; + if (IS_ALIGNED(width, 16)) { + NV12ToARGBRow = NV12ToARGBRow_LASX; } } #endif for (y = 0; y < height; ++y) { - I422ToRGBARow(src_y, src_u, src_v, dst_rgba, yuvconstants, width); - dst_rgba += dst_stride_rgba; + NV12ToARGBRow(src_y, src_uv, dst_argb, yuvconstants, width); + dst_argb += dst_stride_argb; src_y += src_stride_y; if (y & 1) { - src_u += src_stride_u; - src_v += src_stride_v; + src_uv += src_stride_uv; } } return 0; } -// Convert I420 to RGBA. -LIBYUV_API -int I420ToRGBA(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgba, - int dst_stride_rgba, - int width, - int height) { - return I420ToRGBAMatrix(src_y, src_stride_y, src_u, src_stride_u, src_v, - src_stride_v, dst_rgba, dst_stride_rgba, - &kYuvI601Constants, width, height); -} - -// Convert I420 to BGRA. -LIBYUV_API -int I420ToBGRA(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_bgra, - int dst_stride_bgra, - int width, - int height) { - return I420ToRGBAMatrix(src_y, src_stride_y, src_v, - src_stride_v, // Swap U and V - src_u, src_stride_u, dst_bgra, dst_stride_bgra, - &kYvuI601Constants, // Use Yvu matrix - width, height); -} - -// Convert I420 to RGB24 with matrix. +// Convert NV21 to ARGB with matrix. LIBYUV_API -int I420ToRGB24Matrix(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgb24, - int dst_stride_rgb24, - const struct YuvConstants* yuvconstants, - int width, - int height) { +int NV21ToARGBMatrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_vu, + int src_stride_vu, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height) { int y; - void (*I422ToRGB24Row)(const uint8_t* y_buf, const uint8_t* u_buf, - const uint8_t* v_buf, uint8_t* rgb_buf, - const struct YuvConstants* yuvconstants, int width) = - I422ToRGB24Row_C; - if (!src_y || !src_u || !src_v || !dst_rgb24 || width <= 0 || height == 0) { + void (*NV21ToARGBRow)( + const uint8_t* y_buf, const uint8_t* uv_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = NV21ToARGBRow_C; + assert(yuvconstants); + if (!src_y || !src_vu || !dst_argb || width <= 0 || height == 0) { return -1; } // Negative height means invert the image. if (height < 0) { height = -height; - dst_rgb24 = dst_rgb24 + (height - 1) * dst_stride_rgb24; - dst_stride_rgb24 = -dst_stride_rgb24; + dst_argb = dst_argb + (height - 1) * dst_stride_argb; + dst_stride_argb = -dst_stride_argb; } -#if defined(HAS_I422TORGB24ROW_SSSE3) +#if defined(HAS_NV21TOARGBROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { - I422ToRGB24Row = I422ToRGB24Row_Any_SSSE3; - if (IS_ALIGNED(width, 16)) { - I422ToRGB24Row = I422ToRGB24Row_SSSE3; + NV21ToARGBRow = NV21ToARGBRow_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + NV21ToARGBRow = NV21ToARGBRow_SSSE3; } } #endif -#if defined(HAS_I422TORGB24ROW_AVX2) +#if defined(HAS_NV21TOARGBROW_AVX2) if (TestCpuFlag(kCpuHasAVX2)) { - I422ToRGB24Row = I422ToRGB24Row_Any_AVX2; - if (IS_ALIGNED(width, 32)) { - I422ToRGB24Row = I422ToRGB24Row_AVX2; + NV21ToARGBRow = NV21ToARGBRow_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + NV21ToARGBRow = NV21ToARGBRow_AVX2; } } #endif -#if defined(HAS_I422TORGB24ROW_NEON) +#if defined(HAS_NV21TOARGBROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { - I422ToRGB24Row = I422ToRGB24Row_Any_NEON; + NV21ToARGBRow = NV21ToARGBRow_Any_NEON; if (IS_ALIGNED(width, 8)) { - I422ToRGB24Row = I422ToRGB24Row_NEON; + NV21ToARGBRow = NV21ToARGBRow_NEON; } } #endif -#if defined(HAS_I422TORGB24ROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - I422ToRGB24Row = I422ToRGB24Row_Any_MMI; - if (IS_ALIGNED(width, 4)) { - I422ToRGB24Row = I422ToRGB24Row_MMI; +#if defined(HAS_NV21TOARGBROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + NV21ToARGBRow = NV21ToARGBRow_Any_MSA; + if (IS_ALIGNED(width, 8)) { + NV21ToARGBRow = NV21ToARGBRow_MSA; } } #endif -#if defined(HAS_I422TORGB24ROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - I422ToRGB24Row = I422ToRGB24Row_Any_MSA; +#if defined(HAS_NV21TOARGBROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + NV21ToARGBRow = NV21ToARGBRow_Any_LSX; + if (IS_ALIGNED(width, 8)) { + NV21ToARGBRow = NV21ToARGBRow_LSX; + } + } +#endif +#if defined(HAS_NV21TOARGBROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + NV21ToARGBRow = NV21ToARGBRow_Any_LASX; if (IS_ALIGNED(width, 16)) { - I422ToRGB24Row = I422ToRGB24Row_MSA; + NV21ToARGBRow = NV21ToARGBRow_LASX; } } #endif for (y = 0; y < height; ++y) { - I422ToRGB24Row(src_y, src_u, src_v, dst_rgb24, yuvconstants, width); - dst_rgb24 += dst_stride_rgb24; + NV21ToARGBRow(src_y, src_vu, dst_argb, yuvconstants, width); + dst_argb += dst_stride_argb; src_y += src_stride_y; if (y & 1) { - src_u += src_stride_u; - src_v += src_stride_v; + src_vu += src_stride_vu; } } return 0; } -// Convert I420 to RGB24. +// Convert NV12 to ARGB. LIBYUV_API -int I420ToRGB24(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgb24, - int dst_stride_rgb24, - int width, - int height) { - return I420ToRGB24Matrix(src_y, src_stride_y, src_u, src_stride_u, src_v, - src_stride_v, dst_rgb24, dst_stride_rgb24, - &kYuvI601Constants, width, height); +int NV12ToARGB(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height) { + return NV12ToARGBMatrix(src_y, src_stride_y, src_uv, src_stride_uv, dst_argb, + dst_stride_argb, &kYuvI601Constants, width, height); } -// Convert I420 to RAW. +// Convert NV21 to ARGB. LIBYUV_API -int I420ToRAW(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_raw, - int dst_stride_raw, - int width, - int height) { - return I420ToRGB24Matrix(src_y, src_stride_y, src_v, - src_stride_v, // Swap U and V - src_u, src_stride_u, dst_raw, dst_stride_raw, - &kYvuI601Constants, // Use Yvu matrix - width, height); +int NV21ToARGB(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_vu, + int src_stride_vu, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height) { + return NV21ToARGBMatrix(src_y, src_stride_y, src_vu, src_stride_vu, dst_argb, + dst_stride_argb, &kYuvI601Constants, width, height); } -// Convert J420 to RGB24. +// Convert NV12 to ABGR. +// To output ABGR instead of ARGB swap the UV and use a mirrored yuv matrix. +// To swap the UV use NV12 instead of NV21.LIBYUV_API LIBYUV_API -int J420ToRGB24(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgb24, - int dst_stride_rgb24, - int width, - int height) { - return I420ToRGB24Matrix(src_y, src_stride_y, src_u, src_stride_u, src_v, - src_stride_v, dst_rgb24, dst_stride_rgb24, - &kYuvJPEGConstants, width, height); -} - -// Convert J420 to RAW. -LIBYUV_API -int J420ToRAW(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_raw, - int dst_stride_raw, - int width, - int height) { - return I420ToRGB24Matrix(src_y, src_stride_y, src_v, - src_stride_v, // Swap U and V - src_u, src_stride_u, dst_raw, dst_stride_raw, - &kYvuJPEGConstants, // Use Yvu matrix - width, height); -} - -// Convert H420 to RGB24. -LIBYUV_API -int H420ToRGB24(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgb24, - int dst_stride_rgb24, - int width, - int height) { - return I420ToRGB24Matrix(src_y, src_stride_y, src_u, src_stride_u, src_v, - src_stride_v, dst_rgb24, dst_stride_rgb24, - &kYuvH709Constants, width, height); +int NV12ToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height) { + return NV21ToARGBMatrix(src_y, src_stride_y, src_uv, src_stride_uv, dst_abgr, + dst_stride_abgr, &kYvuI601Constants, width, height); } -// Convert H420 to RAW. +// Convert NV21 to ABGR. LIBYUV_API -int H420ToRAW(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_raw, - int dst_stride_raw, - int width, - int height) { - return I420ToRGB24Matrix(src_y, src_stride_y, src_v, - src_stride_v, // Swap U and V - src_u, src_stride_u, dst_raw, dst_stride_raw, - &kYvuH709Constants, // Use Yvu matrix - width, height); +int NV21ToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_vu, + int src_stride_vu, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height) { + return NV12ToARGBMatrix(src_y, src_stride_y, src_vu, src_stride_vu, dst_abgr, + dst_stride_abgr, &kYvuI601Constants, width, height); } -// Convert I420 to ARGB1555. +// TODO(fbarchard): Consider SSSE3 2 step conversion. +// Convert NV12 to RGB24 with matrix. LIBYUV_API -int I420ToARGB1555(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_argb1555, - int dst_stride_argb1555, - int width, - int height) { +int NV12ToRGB24Matrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + const struct YuvConstants* yuvconstants, + int width, + int height) { int y; - void (*I422ToARGB1555Row)(const uint8_t* y_buf, const uint8_t* u_buf, - const uint8_t* v_buf, uint8_t* rgb_buf, - const struct YuvConstants* yuvconstants, - int width) = I422ToARGB1555Row_C; - if (!src_y || !src_u || !src_v || !dst_argb1555 || width <= 0 || - height == 0) { + void (*NV12ToRGB24Row)( + const uint8_t* y_buf, const uint8_t* uv_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = NV12ToRGB24Row_C; + assert(yuvconstants); + if (!src_y || !src_uv || !dst_rgb24 || width <= 0 || height == 0) { return -1; } // Negative height means invert the image. if (height < 0) { height = -height; - dst_argb1555 = dst_argb1555 + (height - 1) * dst_stride_argb1555; - dst_stride_argb1555 = -dst_stride_argb1555; - } -#if defined(HAS_I422TOARGB1555ROW_SSSE3) - if (TestCpuFlag(kCpuHasSSSE3)) { - I422ToARGB1555Row = I422ToARGB1555Row_Any_SSSE3; - if (IS_ALIGNED(width, 8)) { - I422ToARGB1555Row = I422ToARGB1555Row_SSSE3; - } - } -#endif -#if defined(HAS_I422TOARGB1555ROW_AVX2) - if (TestCpuFlag(kCpuHasAVX2)) { - I422ToARGB1555Row = I422ToARGB1555Row_Any_AVX2; - if (IS_ALIGNED(width, 16)) { - I422ToARGB1555Row = I422ToARGB1555Row_AVX2; - } + dst_rgb24 = dst_rgb24 + (height - 1) * dst_stride_rgb24; + dst_stride_rgb24 = -dst_stride_rgb24; } -#endif -#if defined(HAS_I422TOARGB1555ROW_NEON) +#if defined(HAS_NV12TORGB24ROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { - I422ToARGB1555Row = I422ToARGB1555Row_Any_NEON; + NV12ToRGB24Row = NV12ToRGB24Row_Any_NEON; if (IS_ALIGNED(width, 8)) { - I422ToARGB1555Row = I422ToARGB1555Row_NEON; + NV12ToRGB24Row = NV12ToRGB24Row_NEON; } } #endif -#if defined(HAS_I422TOARGB1555ROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - I422ToARGB1555Row = I422ToARGB1555Row_Any_MMI; - if (IS_ALIGNED(width, 4)) { - I422ToARGB1555Row = I422ToARGB1555Row_MMI; +#if defined(HAS_NV12TORGB24ROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + NV12ToRGB24Row = NV12ToRGB24Row_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + NV12ToRGB24Row = NV12ToRGB24Row_SSSE3; } } #endif -#if defined(HAS_I422TOARGB1555ROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - I422ToARGB1555Row = I422ToARGB1555Row_Any_MSA; - if (IS_ALIGNED(width, 8)) { - I422ToARGB1555Row = I422ToARGB1555Row_MSA; +#if defined(HAS_NV12TORGB24ROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + NV12ToRGB24Row = NV12ToRGB24Row_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + NV12ToRGB24Row = NV12ToRGB24Row_AVX2; } } #endif for (y = 0; y < height; ++y) { - I422ToARGB1555Row(src_y, src_u, src_v, dst_argb1555, &kYuvI601Constants, - width); - dst_argb1555 += dst_stride_argb1555; + NV12ToRGB24Row(src_y, src_uv, dst_rgb24, yuvconstants, width); + dst_rgb24 += dst_stride_rgb24; src_y += src_stride_y; if (y & 1) { - src_u += src_stride_u; - src_v += src_stride_v; + src_uv += src_stride_uv; } } return 0; } -// Convert I420 to ARGB4444. +// Convert NV21 to RGB24 with matrix. LIBYUV_API -int I420ToARGB4444(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_argb4444, - int dst_stride_argb4444, - int width, - int height) { +int NV21ToRGB24Matrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_vu, + int src_stride_vu, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + const struct YuvConstants* yuvconstants, + int width, + int height) { int y; - void (*I422ToARGB4444Row)(const uint8_t* y_buf, const uint8_t* u_buf, - const uint8_t* v_buf, uint8_t* rgb_buf, - const struct YuvConstants* yuvconstants, - int width) = I422ToARGB4444Row_C; - if (!src_y || !src_u || !src_v || !dst_argb4444 || width <= 0 || - height == 0) { + void (*NV21ToRGB24Row)( + const uint8_t* y_buf, const uint8_t* uv_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = NV21ToRGB24Row_C; + assert(yuvconstants); + if (!src_y || !src_vu || !dst_rgb24 || width <= 0 || height == 0) { return -1; } // Negative height means invert the image. if (height < 0) { height = -height; - dst_argb4444 = dst_argb4444 + (height - 1) * dst_stride_argb4444; - dst_stride_argb4444 = -dst_stride_argb4444; - } -#if defined(HAS_I422TOARGB4444ROW_SSSE3) - if (TestCpuFlag(kCpuHasSSSE3)) { - I422ToARGB4444Row = I422ToARGB4444Row_Any_SSSE3; - if (IS_ALIGNED(width, 8)) { - I422ToARGB4444Row = I422ToARGB4444Row_SSSE3; - } - } -#endif -#if defined(HAS_I422TOARGB4444ROW_AVX2) - if (TestCpuFlag(kCpuHasAVX2)) { - I422ToARGB4444Row = I422ToARGB4444Row_Any_AVX2; - if (IS_ALIGNED(width, 16)) { - I422ToARGB4444Row = I422ToARGB4444Row_AVX2; - } + dst_rgb24 = dst_rgb24 + (height - 1) * dst_stride_rgb24; + dst_stride_rgb24 = -dst_stride_rgb24; } -#endif -#if defined(HAS_I422TOARGB4444ROW_NEON) +#if defined(HAS_NV21TORGB24ROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { - I422ToARGB4444Row = I422ToARGB4444Row_Any_NEON; + NV21ToRGB24Row = NV21ToRGB24Row_Any_NEON; if (IS_ALIGNED(width, 8)) { - I422ToARGB4444Row = I422ToARGB4444Row_NEON; + NV21ToRGB24Row = NV21ToRGB24Row_NEON; } } #endif -#if defined(HAS_I422TOARGB4444ROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - I422ToARGB4444Row = I422ToARGB4444Row_Any_MMI; - if (IS_ALIGNED(width, 4)) { - I422ToARGB4444Row = I422ToARGB4444Row_MMI; +#if defined(HAS_NV21TORGB24ROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + NV21ToRGB24Row = NV21ToRGB24Row_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + NV21ToRGB24Row = NV21ToRGB24Row_SSSE3; } } #endif -#if defined(HAS_I422TOARGB4444ROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - I422ToARGB4444Row = I422ToARGB4444Row_Any_MSA; - if (IS_ALIGNED(width, 8)) { - I422ToARGB4444Row = I422ToARGB4444Row_MSA; +#if defined(HAS_NV21TORGB24ROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + NV21ToRGB24Row = NV21ToRGB24Row_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + NV21ToRGB24Row = NV21ToRGB24Row_AVX2; } } #endif for (y = 0; y < height; ++y) { - I422ToARGB4444Row(src_y, src_u, src_v, dst_argb4444, &kYuvI601Constants, - width); - dst_argb4444 += dst_stride_argb4444; + NV21ToRGB24Row(src_y, src_vu, dst_rgb24, yuvconstants, width); + dst_rgb24 += dst_stride_rgb24; src_y += src_stride_y; if (y & 1) { - src_u += src_stride_u; - src_v += src_stride_v; + src_vu += src_stride_vu; } } return 0; } -// Convert I420 to RGB565 with specified color matrix. +// Convert NV12 to RGB24. LIBYUV_API -int I420ToRGB565Matrix(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgb565, - int dst_stride_rgb565, - const struct YuvConstants* yuvconstants, - int width, - int height) { - int y; - void (*I422ToRGB565Row)(const uint8_t* y_buf, const uint8_t* u_buf, - const uint8_t* v_buf, uint8_t* rgb_buf, - const struct YuvConstants* yuvconstants, int width) = - I422ToRGB565Row_C; - if (!src_y || !src_u || !src_v || !dst_rgb565 || width <= 0 || height == 0) { - return -1; - } +int NV12ToRGB24(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + int width, + int height) { + return NV12ToRGB24Matrix(src_y, src_stride_y, src_uv, src_stride_uv, + dst_rgb24, dst_stride_rgb24, &kYuvI601Constants, + width, height); +} + +// Convert NV21 to RGB24. +LIBYUV_API +int NV21ToRGB24(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_vu, + int src_stride_vu, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + int width, + int height) { + return NV21ToRGB24Matrix(src_y, src_stride_y, src_vu, src_stride_vu, + dst_rgb24, dst_stride_rgb24, &kYuvI601Constants, + width, height); +} + +// Convert NV12 to RAW. +LIBYUV_API +int NV12ToRAW(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_raw, + int dst_stride_raw, + int width, + int height) { + return NV21ToRGB24Matrix(src_y, src_stride_y, src_uv, src_stride_uv, dst_raw, + dst_stride_raw, &kYvuI601Constants, width, height); +} + +// Convert NV21 to RAW. +LIBYUV_API +int NV21ToRAW(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_vu, + int src_stride_vu, + uint8_t* dst_raw, + int dst_stride_raw, + int width, + int height) { + return NV12ToRGB24Matrix(src_y, src_stride_y, src_vu, src_stride_vu, dst_raw, + dst_stride_raw, &kYvuI601Constants, width, height); +} + +// Convert NV21 to YUV24 +int NV21ToYUV24(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_vu, + int src_stride_vu, + uint8_t* dst_yuv24, + int dst_stride_yuv24, + int width, + int height) { + int y; + void (*NV21ToYUV24Row)(const uint8_t* src_y, const uint8_t* src_vu, + uint8_t* dst_yuv24, int width) = NV21ToYUV24Row_C; + if (!src_y || !src_vu || !dst_yuv24 || width <= 0 || height == 0) { + return -1; + } // Negative height means invert the image. if (height < 0) { height = -height; - dst_rgb565 = dst_rgb565 + (height - 1) * dst_stride_rgb565; - dst_stride_rgb565 = -dst_stride_rgb565; - } -#if defined(HAS_I422TORGB565ROW_SSSE3) - if (TestCpuFlag(kCpuHasSSSE3)) { - I422ToRGB565Row = I422ToRGB565Row_Any_SSSE3; - if (IS_ALIGNED(width, 8)) { - I422ToRGB565Row = I422ToRGB565Row_SSSE3; - } - } -#endif -#if defined(HAS_I422TORGB565ROW_AVX2) - if (TestCpuFlag(kCpuHasAVX2)) { - I422ToRGB565Row = I422ToRGB565Row_Any_AVX2; - if (IS_ALIGNED(width, 16)) { - I422ToRGB565Row = I422ToRGB565Row_AVX2; - } + dst_yuv24 = dst_yuv24 + (height - 1) * dst_stride_yuv24; + dst_stride_yuv24 = -dst_stride_yuv24; } -#endif -#if defined(HAS_I422TORGB565ROW_NEON) +#if defined(HAS_NV21TOYUV24ROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { - I422ToRGB565Row = I422ToRGB565Row_Any_NEON; - if (IS_ALIGNED(width, 8)) { - I422ToRGB565Row = I422ToRGB565Row_NEON; + NV21ToYUV24Row = NV21ToYUV24Row_Any_NEON; + if (IS_ALIGNED(width, 16)) { + NV21ToYUV24Row = NV21ToYUV24Row_NEON; } } #endif -#if defined(HAS_I422TORGB565ROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - I422ToRGB565Row = I422ToRGB565Row_Any_MMI; - if (IS_ALIGNED(width, 4)) { - I422ToRGB565Row = I422ToRGB565Row_MMI; +#if defined(HAS_NV21TOYUV24ROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + NV21ToYUV24Row = NV21ToYUV24Row_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + NV21ToYUV24Row = NV21ToYUV24Row_SSSE3; } } #endif -#if defined(HAS_I422TORGB565ROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - I422ToRGB565Row = I422ToRGB565Row_Any_MSA; - if (IS_ALIGNED(width, 8)) { - I422ToRGB565Row = I422ToRGB565Row_MSA; +#if defined(HAS_NV21TOYUV24ROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + NV21ToYUV24Row = NV21ToYUV24Row_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + NV21ToYUV24Row = NV21ToYUV24Row_AVX2; } } #endif - for (y = 0; y < height; ++y) { - I422ToRGB565Row(src_y, src_u, src_v, dst_rgb565, yuvconstants, width); - dst_rgb565 += dst_stride_rgb565; + NV21ToYUV24Row(src_y, src_vu, dst_yuv24, width); + dst_yuv24 += dst_stride_yuv24; src_y += src_stride_y; if (y & 1) { - src_u += src_stride_u; - src_v += src_stride_v; + src_vu += src_stride_vu; } } return 0; } -// Convert I420 to RGB565. -LIBYUV_API -int I420ToRGB565(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgb565, - int dst_stride_rgb565, - int width, - int height) { - return I420ToRGB565Matrix(src_y, src_stride_y, src_u, src_stride_u, src_v, - src_stride_v, dst_rgb565, dst_stride_rgb565, - &kYuvI601Constants, width, height); -} - -// Convert J420 to RGB565. -LIBYUV_API -int J420ToRGB565(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgb565, - int dst_stride_rgb565, - int width, - int height) { - return I420ToRGB565Matrix(src_y, src_stride_y, src_u, src_stride_u, src_v, - src_stride_v, dst_rgb565, dst_stride_rgb565, - &kYuvJPEGConstants, width, height); -} - -// Convert H420 to RGB565. -LIBYUV_API -int H420ToRGB565(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgb565, - int dst_stride_rgb565, - int width, - int height) { - return I420ToRGB565Matrix(src_y, src_stride_y, src_u, src_stride_u, src_v, - src_stride_v, dst_rgb565, dst_stride_rgb565, - &kYuvH709Constants, width, height); -} - -// Convert I422 to RGB565. +// Convert YUY2 to ARGB. LIBYUV_API -int I422ToRGB565(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgb565, - int dst_stride_rgb565, - int width, - int height) { +int YUY2ToARGB(const uint8_t* src_yuy2, + int src_stride_yuy2, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height) { int y; - void (*I422ToRGB565Row)(const uint8_t* y_buf, const uint8_t* u_buf, - const uint8_t* v_buf, uint8_t* rgb_buf, - const struct YuvConstants* yuvconstants, int width) = - I422ToRGB565Row_C; - if (!src_y || !src_u || !src_v || !dst_rgb565 || width <= 0 || height == 0) { + void (*YUY2ToARGBRow)(const uint8_t* src_yuy2, uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, int width) = + YUY2ToARGBRow_C; + if (!src_yuy2 || !dst_argb || width <= 0 || height == 0) { return -1; } // Negative height means invert the image. if (height < 0) { height = -height; - dst_rgb565 = dst_rgb565 + (height - 1) * dst_stride_rgb565; - dst_stride_rgb565 = -dst_stride_rgb565; + src_yuy2 = src_yuy2 + (height - 1) * src_stride_yuy2; + src_stride_yuy2 = -src_stride_yuy2; } -#if defined(HAS_I422TORGB565ROW_SSSE3) + // Coalesce rows. + if (src_stride_yuy2 == width * 2 && dst_stride_argb == width * 4) { + width *= height; + height = 1; + src_stride_yuy2 = dst_stride_argb = 0; + } +#if defined(HAS_YUY2TOARGBROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { - I422ToRGB565Row = I422ToRGB565Row_Any_SSSE3; - if (IS_ALIGNED(width, 8)) { - I422ToRGB565Row = I422ToRGB565Row_SSSE3; + YUY2ToARGBRow = YUY2ToARGBRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + YUY2ToARGBRow = YUY2ToARGBRow_SSSE3; } } #endif -#if defined(HAS_I422TORGB565ROW_AVX2) +#if defined(HAS_YUY2TOARGBROW_AVX2) if (TestCpuFlag(kCpuHasAVX2)) { - I422ToRGB565Row = I422ToRGB565Row_Any_AVX2; - if (IS_ALIGNED(width, 16)) { - I422ToRGB565Row = I422ToRGB565Row_AVX2; + YUY2ToARGBRow = YUY2ToARGBRow_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + YUY2ToARGBRow = YUY2ToARGBRow_AVX2; } } #endif -#if defined(HAS_I422TORGB565ROW_NEON) +#if defined(HAS_YUY2TOARGBROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { - I422ToRGB565Row = I422ToRGB565Row_Any_NEON; + YUY2ToARGBRow = YUY2ToARGBRow_Any_NEON; if (IS_ALIGNED(width, 8)) { - I422ToRGB565Row = I422ToRGB565Row_NEON; + YUY2ToARGBRow = YUY2ToARGBRow_NEON; } } #endif -#if defined(HAS_I422TORGB565ROW_MSA) +#if defined(HAS_YUY2TOARGBROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { - I422ToRGB565Row = I422ToRGB565Row_Any_MSA; + YUY2ToARGBRow = YUY2ToARGBRow_Any_MSA; if (IS_ALIGNED(width, 8)) { - I422ToRGB565Row = I422ToRGB565Row_MSA; + YUY2ToARGBRow = YUY2ToARGBRow_MSA; + } + } +#endif +#if defined(HAS_YUY2TOARGBROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + YUY2ToARGBRow = YUY2ToARGBRow_Any_LSX; + if (IS_ALIGNED(width, 8)) { + YUY2ToARGBRow = YUY2ToARGBRow_LSX; } } #endif - for (y = 0; y < height; ++y) { - I422ToRGB565Row(src_y, src_u, src_v, dst_rgb565, &kYuvI601Constants, width); - dst_rgb565 += dst_stride_rgb565; - src_y += src_stride_y; - src_u += src_stride_u; - src_v += src_stride_v; + YUY2ToARGBRow(src_yuy2, dst_argb, &kYuvI601Constants, width); + src_yuy2 += src_stride_yuy2; + dst_argb += dst_stride_argb; } return 0; } -// Ordered 8x8 dither for 888 to 565. Values from 0 to 7. -static const uint8_t kDither565_4x4[16] = { - 0, 4, 1, 5, 6, 2, 7, 3, 1, 5, 0, 4, 7, 3, 6, 2, -}; - -// Convert I420 to RGB565 with dithering. +// Convert UYVY to ARGB. LIBYUV_API -int I420ToRGB565Dither(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_u, - int src_stride_u, - const uint8_t* src_v, - int src_stride_v, - uint8_t* dst_rgb565, - int dst_stride_rgb565, - const uint8_t* dither4x4, - int width, - int height) { +int UYVYToARGB(const uint8_t* src_uyvy, + int src_stride_uyvy, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height) { int y; - void (*I422ToARGBRow)(const uint8_t* y_buf, const uint8_t* u_buf, - const uint8_t* v_buf, uint8_t* rgb_buf, + void (*UYVYToARGBRow)(const uint8_t* src_uyvy, uint8_t* dst_argb, const struct YuvConstants* yuvconstants, int width) = - I422ToARGBRow_C; - void (*ARGBToRGB565DitherRow)(const uint8_t* src_argb, uint8_t* dst_rgb, - const uint32_t dither4, int width) = - ARGBToRGB565DitherRow_C; - if (!src_y || !src_u || !src_v || !dst_rgb565 || width <= 0 || height == 0) { + UYVYToARGBRow_C; + if (!src_uyvy || !dst_argb || width <= 0 || height == 0) { return -1; } // Negative height means invert the image. if (height < 0) { height = -height; - dst_rgb565 = dst_rgb565 + (height - 1) * dst_stride_rgb565; - dst_stride_rgb565 = -dst_stride_rgb565; + src_uyvy = src_uyvy + (height - 1) * src_stride_uyvy; + src_stride_uyvy = -src_stride_uyvy; } - if (!dither4x4) { - dither4x4 = kDither565_4x4; + // Coalesce rows. + if (src_stride_uyvy == width * 2 && dst_stride_argb == width * 4) { + width *= height; + height = 1; + src_stride_uyvy = dst_stride_argb = 0; } -#if defined(HAS_I422TOARGBROW_SSSE3) +#if defined(HAS_UYVYTOARGBROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { - I422ToARGBRow = I422ToARGBRow_Any_SSSE3; - if (IS_ALIGNED(width, 8)) { - I422ToARGBRow = I422ToARGBRow_SSSE3; + UYVYToARGBRow = UYVYToARGBRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + UYVYToARGBRow = UYVYToARGBRow_SSSE3; } } #endif -#if defined(HAS_I422TOARGBROW_AVX2) +#if defined(HAS_UYVYTOARGBROW_AVX2) if (TestCpuFlag(kCpuHasAVX2)) { - I422ToARGBRow = I422ToARGBRow_Any_AVX2; - if (IS_ALIGNED(width, 16)) { - I422ToARGBRow = I422ToARGBRow_AVX2; + UYVYToARGBRow = UYVYToARGBRow_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + UYVYToARGBRow = UYVYToARGBRow_AVX2; } } #endif -#if defined(HAS_I422TOARGBROW_NEON) +#if defined(HAS_UYVYTOARGBROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { - I422ToARGBRow = I422ToARGBRow_Any_NEON; + UYVYToARGBRow = UYVYToARGBRow_Any_NEON; if (IS_ALIGNED(width, 8)) { - I422ToARGBRow = I422ToARGBRow_NEON; - } - } -#endif -#if defined(HAS_I422TOARGBROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - I422ToARGBRow = I422ToARGBRow_Any_MMI; - if (IS_ALIGNED(width, 4)) { - I422ToARGBRow = I422ToARGBRow_MMI; + UYVYToARGBRow = UYVYToARGBRow_NEON; } } #endif -#if defined(HAS_I422TOARGBROW_MSA) +#if defined(HAS_UYVYTOARGBROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { - I422ToARGBRow = I422ToARGBRow_Any_MSA; + UYVYToARGBRow = UYVYToARGBRow_Any_MSA; if (IS_ALIGNED(width, 8)) { - I422ToARGBRow = I422ToARGBRow_MSA; - } - } -#endif -#if defined(HAS_ARGBTORGB565DITHERROW_SSE2) - if (TestCpuFlag(kCpuHasSSE2)) { - ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_Any_SSE2; - if (IS_ALIGNED(width, 4)) { - ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_SSE2; + UYVYToARGBRow = UYVYToARGBRow_MSA; } } #endif -#if defined(HAS_ARGBTORGB565DITHERROW_AVX2) - if (TestCpuFlag(kCpuHasAVX2)) { - ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_Any_AVX2; +#if defined(HAS_UYVYTOARGBROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + UYVYToARGBRow = UYVYToARGBRow_Any_LSX; if (IS_ALIGNED(width, 8)) { - ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_AVX2; + UYVYToARGBRow = UYVYToARGBRow_LSX; } } #endif -#if defined(HAS_ARGBTORGB565DITHERROW_NEON) - if (TestCpuFlag(kCpuHasNEON)) { - ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_Any_NEON; - if (IS_ALIGNED(width, 8)) { - ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_NEON; - } + for (y = 0; y < height; ++y) { + UYVYToARGBRow(src_uyvy, dst_argb, &kYuvI601Constants, width); + src_uyvy += src_stride_uyvy; + dst_argb += dst_stride_argb; } -#endif -#if defined(HAS_ARGBTORGB565DITHERROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_Any_MMI; - if (IS_ALIGNED(width, 4)) { - ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_MMI; - } + return 0; +} +static void WeavePixels(const uint8_t* src_u, + const uint8_t* src_v, + int src_pixel_stride_uv, + uint8_t* dst_uv, + int width) { + int i; + for (i = 0; i < width; ++i) { + dst_uv[0] = *src_u; + dst_uv[1] = *src_v; + dst_uv += 2; + src_u += src_pixel_stride_uv; + src_v += src_pixel_stride_uv; } -#endif -#if defined(HAS_ARGBTORGB565DITHERROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_Any_MSA; - if (IS_ALIGNED(width, 8)) { - ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_MSA; - } +} + +// Convert Android420 to ARGB with matrix. +LIBYUV_API +int Android420ToARGBMatrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + int src_pixel_stride_uv, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height) { + int y; + uint8_t* dst_uv; + const ptrdiff_t vu_off = src_v - src_u; + int halfwidth = (width + 1) >> 1; + int halfheight = (height + 1) >> 1; + assert(yuvconstants); + if (!src_y || !src_u || !src_v || !dst_argb || width <= 0 || height == 0) { + return -1; } -#endif - { - // Allocate a row of argb. - align_buffer_64(row_argb, width * 4); - for (y = 0; y < height; ++y) { - I422ToARGBRow(src_y, src_u, src_v, row_argb, &kYuvI601Constants, width); - ARGBToRGB565DitherRow(row_argb, dst_rgb565, - *(const uint32_t*)(dither4x4 + ((y & 3) << 2)), - width); - dst_rgb565 += dst_stride_rgb565; - src_y += src_stride_y; - if (y & 1) { - src_u += src_stride_u; - src_v += src_stride_v; - } - } - free_aligned_buffer_64(row_argb); + // Negative height means invert the image. + if (height < 0) { + height = -height; + halfheight = (height + 1) >> 1; + dst_argb = dst_argb + (height - 1) * dst_stride_argb; + dst_stride_argb = -dst_stride_argb; + } + + // I420 + if (src_pixel_stride_uv == 1) { + return I420ToARGBMatrix(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_argb, dst_stride_argb, + yuvconstants, width, height); + // NV21 + } + if (src_pixel_stride_uv == 2 && vu_off == -1 && + src_stride_u == src_stride_v) { + return NV21ToARGBMatrix(src_y, src_stride_y, src_v, src_stride_v, dst_argb, + dst_stride_argb, yuvconstants, width, height); + // NV12 + } + if (src_pixel_stride_uv == 2 && vu_off == 1 && src_stride_u == src_stride_v) { + return NV12ToARGBMatrix(src_y, src_stride_y, src_u, src_stride_u, dst_argb, + dst_stride_argb, yuvconstants, width, height); + } + + // General case fallback creates NV12 + align_buffer_64(plane_uv, halfwidth * 2 * halfheight); + dst_uv = plane_uv; + for (y = 0; y < halfheight; ++y) { + WeavePixels(src_u, src_v, src_pixel_stride_uv, dst_uv, halfwidth); + src_u += src_stride_u; + src_v += src_stride_v; + dst_uv += halfwidth * 2; } + NV12ToARGBMatrix(src_y, src_stride_y, plane_uv, halfwidth * 2, dst_argb, + dst_stride_argb, yuvconstants, width, height); + free_aligned_buffer_64(plane_uv); return 0; } -// Convert I420 to AR30 with matrix. +// Convert Android420 to ARGB. LIBYUV_API -int I420ToAR30Matrix(const uint8_t* src_y, +int Android420ToARGB(const uint8_t* src_y, int src_stride_y, const uint8_t* src_u, int src_stride_u, const uint8_t* src_v, int src_stride_v, - uint8_t* dst_ar30, - int dst_stride_ar30, + int src_pixel_stride_uv, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height) { + return Android420ToARGBMatrix(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, src_pixel_stride_uv, dst_argb, + dst_stride_argb, &kYuvI601Constants, width, + height); +} + +// Convert Android420 to ABGR. +LIBYUV_API +int Android420ToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + int src_pixel_stride_uv, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height) { + return Android420ToARGBMatrix(src_y, src_stride_y, src_v, src_stride_v, src_u, + src_stride_u, src_pixel_stride_uv, dst_abgr, + dst_stride_abgr, &kYvuI601Constants, width, + height); +} + +// Convert I422 to RGBA with matrix. +LIBYUV_API +int I422ToRGBAMatrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgba, + int dst_stride_rgba, const struct YuvConstants* yuvconstants, int width, int height) { int y; - void (*I422ToAR30Row)(const uint8_t* y_buf, const uint8_t* u_buf, + void (*I422ToRGBARow)(const uint8_t* y_buf, const uint8_t* u_buf, const uint8_t* v_buf, uint8_t* rgb_buf, const struct YuvConstants* yuvconstants, int width) = - I422ToAR30Row_C; - - if (!src_y || !src_u || !src_v || !dst_ar30 || width <= 0 || height == 0) { + I422ToRGBARow_C; + assert(yuvconstants); + if (!src_y || !src_u || !src_v || !dst_rgba || width <= 0 || height == 0) { return -1; } // Negative height means invert the image. if (height < 0) { height = -height; - dst_ar30 = dst_ar30 + (height - 1) * dst_stride_ar30; - dst_stride_ar30 = -dst_stride_ar30; + dst_rgba = dst_rgba + (height - 1) * dst_stride_rgba; + dst_stride_rgba = -dst_stride_rgba; } - -#if defined(HAS_I422TOAR30ROW_SSSE3) +#if defined(HAS_I422TORGBAROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { - I422ToAR30Row = I422ToAR30Row_Any_SSSE3; + I422ToRGBARow = I422ToRGBARow_Any_SSSE3; if (IS_ALIGNED(width, 8)) { - I422ToAR30Row = I422ToAR30Row_SSSE3; + I422ToRGBARow = I422ToRGBARow_SSSE3; } } #endif -#if defined(HAS_I422TOAR30ROW_AVX2) +#if defined(HAS_I422TORGBAROW_AVX2) if (TestCpuFlag(kCpuHasAVX2)) { - I422ToAR30Row = I422ToAR30Row_Any_AVX2; + I422ToRGBARow = I422ToRGBARow_Any_AVX2; if (IS_ALIGNED(width, 16)) { - I422ToAR30Row = I422ToAR30Row_AVX2; + I422ToRGBARow = I422ToRGBARow_AVX2; + } + } +#endif +#if defined(HAS_I422TORGBAROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + I422ToRGBARow = I422ToRGBARow_Any_NEON; + if (IS_ALIGNED(width, 8)) { + I422ToRGBARow = I422ToRGBARow_NEON; + } + } +#endif +#if defined(HAS_I422TORGBAROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + I422ToRGBARow = I422ToRGBARow_Any_MSA; + if (IS_ALIGNED(width, 8)) { + I422ToRGBARow = I422ToRGBARow_MSA; + } + } +#endif +#if defined(HAS_I422TORGBAROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + I422ToRGBARow = I422ToRGBARow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + I422ToRGBARow = I422ToRGBARow_LASX; } } #endif for (y = 0; y < height; ++y) { - I422ToAR30Row(src_y, src_u, src_v, dst_ar30, yuvconstants, width); - dst_ar30 += dst_stride_ar30; + I422ToRGBARow(src_y, src_u, src_v, dst_rgba, yuvconstants, width); + dst_rgba += dst_stride_rgba; + src_y += src_stride_y; + src_u += src_stride_u; + src_v += src_stride_v; + } + return 0; +} + +// Convert I422 to RGBA. +LIBYUV_API +int I422ToRGBA(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgba, + int dst_stride_rgba, + int width, + int height) { + return I422ToRGBAMatrix(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_rgba, dst_stride_rgba, + &kYuvI601Constants, width, height); +} + +// Convert I422 to BGRA. +LIBYUV_API +int I422ToBGRA(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_bgra, + int dst_stride_bgra, + int width, + int height) { + return I422ToRGBAMatrix(src_y, src_stride_y, src_v, + src_stride_v, // Swap U and V + src_u, src_stride_u, dst_bgra, dst_stride_bgra, + &kYvuI601Constants, // Use Yvu matrix + width, height); +} + +// Convert NV12 to RGB565 with matrix. +LIBYUV_API +int NV12ToRGB565Matrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_rgb565, + int dst_stride_rgb565, + const struct YuvConstants* yuvconstants, + int width, + int height) { + int y; + void (*NV12ToRGB565Row)( + const uint8_t* y_buf, const uint8_t* uv_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = NV12ToRGB565Row_C; + assert(yuvconstants); + if (!src_y || !src_uv || !dst_rgb565 || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_rgb565 = dst_rgb565 + (height - 1) * dst_stride_rgb565; + dst_stride_rgb565 = -dst_stride_rgb565; + } +#if defined(HAS_NV12TORGB565ROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + NV12ToRGB565Row = NV12ToRGB565Row_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + NV12ToRGB565Row = NV12ToRGB565Row_SSSE3; + } + } +#endif +#if defined(HAS_NV12TORGB565ROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + NV12ToRGB565Row = NV12ToRGB565Row_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + NV12ToRGB565Row = NV12ToRGB565Row_AVX2; + } + } +#endif +#if defined(HAS_NV12TORGB565ROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + NV12ToRGB565Row = NV12ToRGB565Row_Any_NEON; + if (IS_ALIGNED(width, 8)) { + NV12ToRGB565Row = NV12ToRGB565Row_NEON; + } + } +#endif +#if defined(HAS_NV12TORGB565ROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + NV12ToRGB565Row = NV12ToRGB565Row_Any_MSA; + if (IS_ALIGNED(width, 8)) { + NV12ToRGB565Row = NV12ToRGB565Row_MSA; + } + } +#endif +#if defined(HAS_NV12TORGB565ROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + NV12ToRGB565Row = NV12ToRGB565Row_Any_LSX; + if (IS_ALIGNED(width, 8)) { + NV12ToRGB565Row = NV12ToRGB565Row_LSX; + } + } +#endif +#if defined(HAS_NV12TORGB565ROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + NV12ToRGB565Row = NV12ToRGB565Row_Any_LASX; + if (IS_ALIGNED(width, 16)) { + NV12ToRGB565Row = NV12ToRGB565Row_LASX; + } + } +#endif + + for (y = 0; y < height; ++y) { + NV12ToRGB565Row(src_y, src_uv, dst_rgb565, yuvconstants, width); + dst_rgb565 += dst_stride_rgb565; + src_y += src_stride_y; + if (y & 1) { + src_uv += src_stride_uv; + } + } + return 0; +} + +// Convert NV12 to RGB565. +LIBYUV_API +int NV12ToRGB565(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_rgb565, + int dst_stride_rgb565, + int width, + int height) { + return NV12ToRGB565Matrix(src_y, src_stride_y, src_uv, src_stride_uv, + dst_rgb565, dst_stride_rgb565, &kYuvI601Constants, + width, height); +} + +// Convert I422 to RGBA with matrix. +LIBYUV_API +int I420ToRGBAMatrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgba, + int dst_stride_rgba, + const struct YuvConstants* yuvconstants, + int width, + int height) { + int y; + void (*I422ToRGBARow)(const uint8_t* y_buf, const uint8_t* u_buf, + const uint8_t* v_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = + I422ToRGBARow_C; + assert(yuvconstants); + if (!src_y || !src_u || !src_v || !dst_rgba || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_rgba = dst_rgba + (height - 1) * dst_stride_rgba; + dst_stride_rgba = -dst_stride_rgba; + } +#if defined(HAS_I422TORGBAROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + I422ToRGBARow = I422ToRGBARow_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + I422ToRGBARow = I422ToRGBARow_SSSE3; + } + } +#endif +#if defined(HAS_I422TORGBAROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + I422ToRGBARow = I422ToRGBARow_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + I422ToRGBARow = I422ToRGBARow_AVX2; + } + } +#endif +#if defined(HAS_I422TORGBAROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + I422ToRGBARow = I422ToRGBARow_Any_NEON; + if (IS_ALIGNED(width, 8)) { + I422ToRGBARow = I422ToRGBARow_NEON; + } + } +#endif +#if defined(HAS_I422TORGBAROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + I422ToRGBARow = I422ToRGBARow_Any_MSA; + if (IS_ALIGNED(width, 8)) { + I422ToRGBARow = I422ToRGBARow_MSA; + } + } +#endif +#if defined(HAS_I422TORGBAROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + I422ToRGBARow = I422ToRGBARow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + I422ToRGBARow = I422ToRGBARow_LASX; + } + } +#endif + + for (y = 0; y < height; ++y) { + I422ToRGBARow(src_y, src_u, src_v, dst_rgba, yuvconstants, width); + dst_rgba += dst_stride_rgba; src_y += src_stride_y; if (y & 1) { src_u += src_stride_u; @@ -4085,38 +4674,3312 @@ int I420ToAR30Matrix(const uint8_t* src_y, return 0; } -// Convert I420 to AR30. +// Convert I420 to RGBA. LIBYUV_API -int I420ToAR30(const uint8_t* src_y, +int I420ToRGBA(const uint8_t* src_y, int src_stride_y, const uint8_t* src_u, int src_stride_u, const uint8_t* src_v, int src_stride_v, - uint8_t* dst_ar30, - int dst_stride_ar30, + uint8_t* dst_rgba, + int dst_stride_rgba, int width, int height) { - return I420ToAR30Matrix(src_y, src_stride_y, src_u, src_stride_u, src_v, - src_stride_v, dst_ar30, dst_stride_ar30, + return I420ToRGBAMatrix(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_rgba, dst_stride_rgba, &kYuvI601Constants, width, height); } -// Convert H420 to AR30. +// Convert I420 to BGRA. LIBYUV_API -int H420ToAR30(const uint8_t* src_y, +int I420ToBGRA(const uint8_t* src_y, int src_stride_y, const uint8_t* src_u, int src_stride_u, const uint8_t* src_v, int src_stride_v, - uint8_t* dst_ar30, - int dst_stride_ar30, + uint8_t* dst_bgra, + int dst_stride_bgra, int width, int height) { - return I420ToAR30Matrix(src_y, src_stride_y, src_u, src_stride_u, src_v, - src_stride_v, dst_ar30, dst_stride_ar30, - &kYvuH709Constants, width, height); + return I420ToRGBAMatrix(src_y, src_stride_y, src_v, + src_stride_v, // Swap U and V + src_u, src_stride_u, dst_bgra, dst_stride_bgra, + &kYvuI601Constants, // Use Yvu matrix + width, height); +} + +// Convert I420 to RGB24 with matrix. +LIBYUV_API +int I420ToRGB24Matrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + const struct YuvConstants* yuvconstants, + int width, + int height) { + int y; + void (*I422ToRGB24Row)(const uint8_t* y_buf, const uint8_t* u_buf, + const uint8_t* v_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = + I422ToRGB24Row_C; + assert(yuvconstants); + if (!src_y || !src_u || !src_v || !dst_rgb24 || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_rgb24 = dst_rgb24 + (height - 1) * dst_stride_rgb24; + dst_stride_rgb24 = -dst_stride_rgb24; + } +#if defined(HAS_I422TORGB24ROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + I422ToRGB24Row = I422ToRGB24Row_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + I422ToRGB24Row = I422ToRGB24Row_SSSE3; + } + } +#endif +#if defined(HAS_I422TORGB24ROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + I422ToRGB24Row = I422ToRGB24Row_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + I422ToRGB24Row = I422ToRGB24Row_AVX2; + } + } +#endif +#if defined(HAS_I422TORGB24ROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + I422ToRGB24Row = I422ToRGB24Row_Any_NEON; + if (IS_ALIGNED(width, 8)) { + I422ToRGB24Row = I422ToRGB24Row_NEON; + } + } +#endif +#if defined(HAS_I422TORGB24ROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + I422ToRGB24Row = I422ToRGB24Row_Any_MSA; + if (IS_ALIGNED(width, 16)) { + I422ToRGB24Row = I422ToRGB24Row_MSA; + } + } +#endif +#if defined(HAS_I422TORGB24ROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + I422ToRGB24Row = I422ToRGB24Row_Any_LASX; + if (IS_ALIGNED(width, 32)) { + I422ToRGB24Row = I422ToRGB24Row_LASX; + } + } +#endif + + for (y = 0; y < height; ++y) { + I422ToRGB24Row(src_y, src_u, src_v, dst_rgb24, yuvconstants, width); + dst_rgb24 += dst_stride_rgb24; + src_y += src_stride_y; + if (y & 1) { + src_u += src_stride_u; + src_v += src_stride_v; + } + } + return 0; +} + +// Convert I420 to RGB24. +LIBYUV_API +int I420ToRGB24(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + int width, + int height) { + return I420ToRGB24Matrix(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_rgb24, dst_stride_rgb24, + &kYuvI601Constants, width, height); +} + +// Convert I420 to RAW. +LIBYUV_API +int I420ToRAW(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_raw, + int dst_stride_raw, + int width, + int height) { + return I420ToRGB24Matrix(src_y, src_stride_y, src_v, + src_stride_v, // Swap U and V + src_u, src_stride_u, dst_raw, dst_stride_raw, + &kYvuI601Constants, // Use Yvu matrix + width, height); +} + +// Convert J420 to RGB24. +LIBYUV_API +int J420ToRGB24(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + int width, + int height) { + return I420ToRGB24Matrix(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_rgb24, dst_stride_rgb24, + &kYuvJPEGConstants, width, height); +} + +// Convert J420 to RAW. +LIBYUV_API +int J420ToRAW(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_raw, + int dst_stride_raw, + int width, + int height) { + return I420ToRGB24Matrix(src_y, src_stride_y, src_v, + src_stride_v, // Swap U and V + src_u, src_stride_u, dst_raw, dst_stride_raw, + &kYvuJPEGConstants, // Use Yvu matrix + width, height); +} + +// Convert H420 to RGB24. +LIBYUV_API +int H420ToRGB24(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + int width, + int height) { + return I420ToRGB24Matrix(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_rgb24, dst_stride_rgb24, + &kYuvH709Constants, width, height); +} + +// Convert H420 to RAW. +LIBYUV_API +int H420ToRAW(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_raw, + int dst_stride_raw, + int width, + int height) { + return I420ToRGB24Matrix(src_y, src_stride_y, src_v, + src_stride_v, // Swap U and V + src_u, src_stride_u, dst_raw, dst_stride_raw, + &kYvuH709Constants, // Use Yvu matrix + width, height); +} + +// Convert I422 to RGB24 with matrix. +LIBYUV_API +int I422ToRGB24Matrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + const struct YuvConstants* yuvconstants, + int width, + int height) { + int y; + void (*I422ToRGB24Row)(const uint8_t* y_buf, const uint8_t* u_buf, + const uint8_t* v_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = + I422ToRGB24Row_C; + assert(yuvconstants); + if (!src_y || !src_u || !src_v || !dst_rgb24 || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_rgb24 = dst_rgb24 + (height - 1) * dst_stride_rgb24; + dst_stride_rgb24 = -dst_stride_rgb24; + } +#if defined(HAS_I422TORGB24ROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + I422ToRGB24Row = I422ToRGB24Row_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + I422ToRGB24Row = I422ToRGB24Row_SSSE3; + } + } +#endif +#if defined(HAS_I422TORGB24ROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + I422ToRGB24Row = I422ToRGB24Row_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + I422ToRGB24Row = I422ToRGB24Row_AVX2; + } + } +#endif +#if defined(HAS_I422TORGB24ROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + I422ToRGB24Row = I422ToRGB24Row_Any_NEON; + if (IS_ALIGNED(width, 8)) { + I422ToRGB24Row = I422ToRGB24Row_NEON; + } + } +#endif +#if defined(HAS_I422TORGB24ROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + I422ToRGB24Row = I422ToRGB24Row_Any_MSA; + if (IS_ALIGNED(width, 16)) { + I422ToRGB24Row = I422ToRGB24Row_MSA; + } + } +#endif +#if defined(HAS_I422TORGB24ROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + I422ToRGB24Row = I422ToRGB24Row_Any_LASX; + if (IS_ALIGNED(width, 32)) { + I422ToRGB24Row = I422ToRGB24Row_LASX; + } + } +#endif + + for (y = 0; y < height; ++y) { + I422ToRGB24Row(src_y, src_u, src_v, dst_rgb24, yuvconstants, width); + dst_rgb24 += dst_stride_rgb24; + src_y += src_stride_y; + src_u += src_stride_u; + src_v += src_stride_v; + } + return 0; +} + +// Convert I422 to RGB24. +LIBYUV_API +int I422ToRGB24(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + int width, + int height) { + return I422ToRGB24Matrix(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_rgb24, dst_stride_rgb24, + &kYuvI601Constants, width, height); +} + +// Convert I422 to RAW. +LIBYUV_API +int I422ToRAW(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_raw, + int dst_stride_raw, + int width, + int height) { + return I422ToRGB24Matrix(src_y, src_stride_y, src_v, + src_stride_v, // Swap U and V + src_u, src_stride_u, dst_raw, dst_stride_raw, + &kYvuI601Constants, // Use Yvu matrix + width, height); +} + +// Convert I420 to ARGB1555. +LIBYUV_API +int I420ToARGB1555(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_argb1555, + int dst_stride_argb1555, + int width, + int height) { + int y; + void (*I422ToARGB1555Row)(const uint8_t* y_buf, const uint8_t* u_buf, + const uint8_t* v_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, + int width) = I422ToARGB1555Row_C; + if (!src_y || !src_u || !src_v || !dst_argb1555 || width <= 0 || + height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_argb1555 = dst_argb1555 + (height - 1) * dst_stride_argb1555; + dst_stride_argb1555 = -dst_stride_argb1555; + } +#if defined(HAS_I422TOARGB1555ROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + I422ToARGB1555Row = I422ToARGB1555Row_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + I422ToARGB1555Row = I422ToARGB1555Row_SSSE3; + } + } +#endif +#if defined(HAS_I422TOARGB1555ROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + I422ToARGB1555Row = I422ToARGB1555Row_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + I422ToARGB1555Row = I422ToARGB1555Row_AVX2; + } + } +#endif +#if defined(HAS_I422TOARGB1555ROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + I422ToARGB1555Row = I422ToARGB1555Row_Any_NEON; + if (IS_ALIGNED(width, 8)) { + I422ToARGB1555Row = I422ToARGB1555Row_NEON; + } + } +#endif +#if defined(HAS_I422TOARGB1555ROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + I422ToARGB1555Row = I422ToARGB1555Row_Any_MSA; + if (IS_ALIGNED(width, 8)) { + I422ToARGB1555Row = I422ToARGB1555Row_MSA; + } + } +#endif +#if defined(HAS_I422TOARGB1555ROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + I422ToARGB1555Row = I422ToARGB1555Row_Any_LASX; + if (IS_ALIGNED(width, 8)) { + I422ToARGB1555Row = I422ToARGB1555Row_LASX; + } + } +#endif + + for (y = 0; y < height; ++y) { + I422ToARGB1555Row(src_y, src_u, src_v, dst_argb1555, &kYuvI601Constants, + width); + dst_argb1555 += dst_stride_argb1555; + src_y += src_stride_y; + if (y & 1) { + src_u += src_stride_u; + src_v += src_stride_v; + } + } + return 0; +} + +// Convert I420 to ARGB4444. +LIBYUV_API +int I420ToARGB4444(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_argb4444, + int dst_stride_argb4444, + int width, + int height) { + int y; + void (*I422ToARGB4444Row)(const uint8_t* y_buf, const uint8_t* u_buf, + const uint8_t* v_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, + int width) = I422ToARGB4444Row_C; + if (!src_y || !src_u || !src_v || !dst_argb4444 || width <= 0 || + height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_argb4444 = dst_argb4444 + (height - 1) * dst_stride_argb4444; + dst_stride_argb4444 = -dst_stride_argb4444; + } +#if defined(HAS_I422TOARGB4444ROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + I422ToARGB4444Row = I422ToARGB4444Row_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + I422ToARGB4444Row = I422ToARGB4444Row_SSSE3; + } + } +#endif +#if defined(HAS_I422TOARGB4444ROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + I422ToARGB4444Row = I422ToARGB4444Row_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + I422ToARGB4444Row = I422ToARGB4444Row_AVX2; + } + } +#endif +#if defined(HAS_I422TOARGB4444ROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + I422ToARGB4444Row = I422ToARGB4444Row_Any_NEON; + if (IS_ALIGNED(width, 8)) { + I422ToARGB4444Row = I422ToARGB4444Row_NEON; + } + } +#endif +#if defined(HAS_I422TOARGB4444ROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + I422ToARGB4444Row = I422ToARGB4444Row_Any_MSA; + if (IS_ALIGNED(width, 8)) { + I422ToARGB4444Row = I422ToARGB4444Row_MSA; + } + } +#endif +#if defined(HAS_I422TOARGB4444ROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + I422ToARGB4444Row = I422ToARGB4444Row_Any_LASX; + if (IS_ALIGNED(width, 8)) { + I422ToARGB4444Row = I422ToARGB4444Row_LASX; + } + } +#endif + + for (y = 0; y < height; ++y) { + I422ToARGB4444Row(src_y, src_u, src_v, dst_argb4444, &kYuvI601Constants, + width); + dst_argb4444 += dst_stride_argb4444; + src_y += src_stride_y; + if (y & 1) { + src_u += src_stride_u; + src_v += src_stride_v; + } + } + return 0; +} + +// Convert I420 to RGB565 with specified color matrix. +LIBYUV_API +int I420ToRGB565Matrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb565, + int dst_stride_rgb565, + const struct YuvConstants* yuvconstants, + int width, + int height) { + int y; + void (*I422ToRGB565Row)(const uint8_t* y_buf, const uint8_t* u_buf, + const uint8_t* v_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = + I422ToRGB565Row_C; + assert(yuvconstants); + if (!src_y || !src_u || !src_v || !dst_rgb565 || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_rgb565 = dst_rgb565 + (height - 1) * dst_stride_rgb565; + dst_stride_rgb565 = -dst_stride_rgb565; + } +#if defined(HAS_I422TORGB565ROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + I422ToRGB565Row = I422ToRGB565Row_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + I422ToRGB565Row = I422ToRGB565Row_SSSE3; + } + } +#endif +#if defined(HAS_I422TORGB565ROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + I422ToRGB565Row = I422ToRGB565Row_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + I422ToRGB565Row = I422ToRGB565Row_AVX2; + } + } +#endif +#if defined(HAS_I422TORGB565ROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + I422ToRGB565Row = I422ToRGB565Row_Any_NEON; + if (IS_ALIGNED(width, 8)) { + I422ToRGB565Row = I422ToRGB565Row_NEON; + } + } +#endif +#if defined(HAS_I422TORGB565ROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + I422ToRGB565Row = I422ToRGB565Row_Any_MSA; + if (IS_ALIGNED(width, 8)) { + I422ToRGB565Row = I422ToRGB565Row_MSA; + } + } +#endif +#if defined(HAS_I422TORGB565ROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + I422ToRGB565Row = I422ToRGB565Row_Any_LASX; + if (IS_ALIGNED(width, 32)) { + I422ToRGB565Row = I422ToRGB565Row_LASX; + } + } +#endif + + for (y = 0; y < height; ++y) { + I422ToRGB565Row(src_y, src_u, src_v, dst_rgb565, yuvconstants, width); + dst_rgb565 += dst_stride_rgb565; + src_y += src_stride_y; + if (y & 1) { + src_u += src_stride_u; + src_v += src_stride_v; + } + } + return 0; +} + +// Convert I420 to RGB565. +LIBYUV_API +int I420ToRGB565(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb565, + int dst_stride_rgb565, + int width, + int height) { + return I420ToRGB565Matrix(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_rgb565, dst_stride_rgb565, + &kYuvI601Constants, width, height); +} + +// Convert J420 to RGB565. +LIBYUV_API +int J420ToRGB565(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb565, + int dst_stride_rgb565, + int width, + int height) { + return I420ToRGB565Matrix(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_rgb565, dst_stride_rgb565, + &kYuvJPEGConstants, width, height); +} + +// Convert H420 to RGB565. +LIBYUV_API +int H420ToRGB565(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb565, + int dst_stride_rgb565, + int width, + int height) { + return I420ToRGB565Matrix(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_rgb565, dst_stride_rgb565, + &kYuvH709Constants, width, height); +} + +// Convert I422 to RGB565 with specified color matrix. +LIBYUV_API +int I422ToRGB565Matrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb565, + int dst_stride_rgb565, + const struct YuvConstants* yuvconstants, + int width, + int height) { + int y; + void (*I422ToRGB565Row)(const uint8_t* y_buf, const uint8_t* u_buf, + const uint8_t* v_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = + I422ToRGB565Row_C; + assert(yuvconstants); + if (!src_y || !src_u || !src_v || !dst_rgb565 || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_rgb565 = dst_rgb565 + (height - 1) * dst_stride_rgb565; + dst_stride_rgb565 = -dst_stride_rgb565; + } +#if defined(HAS_I422TORGB565ROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + I422ToRGB565Row = I422ToRGB565Row_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + I422ToRGB565Row = I422ToRGB565Row_SSSE3; + } + } +#endif +#if defined(HAS_I422TORGB565ROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + I422ToRGB565Row = I422ToRGB565Row_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + I422ToRGB565Row = I422ToRGB565Row_AVX2; + } + } +#endif +#if defined(HAS_I422TORGB565ROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + I422ToRGB565Row = I422ToRGB565Row_Any_NEON; + if (IS_ALIGNED(width, 8)) { + I422ToRGB565Row = I422ToRGB565Row_NEON; + } + } +#endif +#if defined(HAS_I422TORGB565ROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + I422ToRGB565Row = I422ToRGB565Row_Any_MSA; + if (IS_ALIGNED(width, 8)) { + I422ToRGB565Row = I422ToRGB565Row_MSA; + } + } +#endif +#if defined(HAS_I422TORGB565ROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + I422ToRGB565Row = I422ToRGB565Row_Any_LASX; + if (IS_ALIGNED(width, 32)) { + I422ToRGB565Row = I422ToRGB565Row_LASX; + } + } +#endif + + for (y = 0; y < height; ++y) { + I422ToRGB565Row(src_y, src_u, src_v, dst_rgb565, yuvconstants, width); + dst_rgb565 += dst_stride_rgb565; + src_y += src_stride_y; + src_u += src_stride_u; + src_v += src_stride_v; + } + return 0; +} + +// Convert I422 to RGB565. +LIBYUV_API +int I422ToRGB565(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb565, + int dst_stride_rgb565, + int width, + int height) { + return I422ToRGB565Matrix(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_rgb565, dst_stride_rgb565, + &kYuvI601Constants, width, height); +} + +// Ordered 8x8 dither for 888 to 565. Values from 0 to 7. +static const uint8_t kDither565_4x4[16] = { + 0, 4, 1, 5, 6, 2, 7, 3, 1, 5, 0, 4, 7, 3, 6, 2, +}; + +// Convert I420 to RGB565 with dithering. +LIBYUV_API +int I420ToRGB565Dither(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb565, + int dst_stride_rgb565, + const uint8_t* dither4x4, + int width, + int height) { + int y; + void (*I422ToARGBRow)(const uint8_t* y_buf, const uint8_t* u_buf, + const uint8_t* v_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = + I422ToARGBRow_C; + void (*ARGBToRGB565DitherRow)(const uint8_t* src_argb, uint8_t* dst_rgb, + const uint32_t dither4, int width) = + ARGBToRGB565DitherRow_C; + if (!src_y || !src_u || !src_v || !dst_rgb565 || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_rgb565 = dst_rgb565 + (height - 1) * dst_stride_rgb565; + dst_stride_rgb565 = -dst_stride_rgb565; + } + if (!dither4x4) { + dither4x4 = kDither565_4x4; + } +#if defined(HAS_I422TOARGBROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + I422ToARGBRow = I422ToARGBRow_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + I422ToARGBRow = I422ToARGBRow_SSSE3; + } + } +#endif +#if defined(HAS_I422TOARGBROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + I422ToARGBRow = I422ToARGBRow_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + I422ToARGBRow = I422ToARGBRow_AVX2; + } + } +#endif +#if defined(HAS_I422TOARGBROW_AVX512BW) + if (TestCpuFlag(kCpuHasAVX512BW | kCpuHasAVX512VL) == + (kCpuHasAVX512BW | kCpuHasAVX512VL)) { + I422ToARGBRow = I422ToARGBRow_Any_AVX512BW; + if (IS_ALIGNED(width, 32)) { + I422ToARGBRow = I422ToARGBRow_AVX512BW; + } + } +#endif +#if defined(HAS_I422TOARGBROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + I422ToARGBRow = I422ToARGBRow_Any_NEON; + if (IS_ALIGNED(width, 8)) { + I422ToARGBRow = I422ToARGBRow_NEON; + } + } +#endif +#if defined(HAS_I422TOARGBROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + I422ToARGBRow = I422ToARGBRow_Any_MSA; + if (IS_ALIGNED(width, 8)) { + I422ToARGBRow = I422ToARGBRow_MSA; + } + } +#endif +#if defined(HAS_I422TOARGBROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + I422ToARGBRow = I422ToARGBRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + I422ToARGBRow = I422ToARGBRow_LASX; + } + } +#endif +#if defined(HAS_ARGBTORGB565DITHERROW_SSE2) + if (TestCpuFlag(kCpuHasSSE2)) { + ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_Any_SSE2; + if (IS_ALIGNED(width, 4)) { + ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_SSE2; + } + } +#endif +#if defined(HAS_ARGBTORGB565DITHERROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_Any_AVX2; + if (IS_ALIGNED(width, 8)) { + ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_AVX2; + } + } +#endif +#if defined(HAS_ARGBTORGB565DITHERROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_Any_NEON; + if (IS_ALIGNED(width, 8)) { + ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_NEON; + } + } +#endif +#if defined(HAS_ARGBTORGB565DITHERROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_Any_MSA; + if (IS_ALIGNED(width, 8)) { + ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_MSA; + } + } +#endif +#if defined(HAS_ARGBTORGB565DITHERROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_Any_LASX; + if (IS_ALIGNED(width, 16)) { + ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_LASX; + } + } +#endif + { + // Allocate a row of argb. + align_buffer_64(row_argb, width * 4); + for (y = 0; y < height; ++y) { + I422ToARGBRow(src_y, src_u, src_v, row_argb, &kYuvI601Constants, width); + ARGBToRGB565DitherRow(row_argb, dst_rgb565, + *(const uint32_t*)(dither4x4 + ((y & 3) << 2)), + width); + dst_rgb565 += dst_stride_rgb565; + src_y += src_stride_y; + if (y & 1) { + src_u += src_stride_u; + src_v += src_stride_v; + } + } + free_aligned_buffer_64(row_argb); + } + return 0; +} + +// Convert I420 to AR30 with matrix. +LIBYUV_API +int I420ToAR30Matrix(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_ar30, + int dst_stride_ar30, + const struct YuvConstants* yuvconstants, + int width, + int height) { + int y; + void (*I422ToAR30Row)(const uint8_t* y_buf, const uint8_t* u_buf, + const uint8_t* v_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = + I422ToAR30Row_C; + + assert(yuvconstants); + if (!src_y || !src_u || !src_v || !dst_ar30 || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_ar30 = dst_ar30 + (height - 1) * dst_stride_ar30; + dst_stride_ar30 = -dst_stride_ar30; + } + +#if defined(HAS_I422TOAR30ROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + I422ToAR30Row = I422ToAR30Row_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + I422ToAR30Row = I422ToAR30Row_SSSE3; + } + } +#endif +#if defined(HAS_I422TOAR30ROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + I422ToAR30Row = I422ToAR30Row_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + I422ToAR30Row = I422ToAR30Row_AVX2; + } + } +#endif + + for (y = 0; y < height; ++y) { + I422ToAR30Row(src_y, src_u, src_v, dst_ar30, yuvconstants, width); + dst_ar30 += dst_stride_ar30; + src_y += src_stride_y; + if (y & 1) { + src_u += src_stride_u; + src_v += src_stride_v; + } + } + return 0; +} + +// Convert I420 to AR30. +LIBYUV_API +int I420ToAR30(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_ar30, + int dst_stride_ar30, + int width, + int height) { + return I420ToAR30Matrix(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_ar30, dst_stride_ar30, + &kYuvI601Constants, width, height); +} + +// Convert H420 to AR30. +LIBYUV_API +int H420ToAR30(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_ar30, + int dst_stride_ar30, + int width, + int height) { + return I420ToAR30Matrix(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_ar30, dst_stride_ar30, + &kYvuH709Constants, width, height); +} + +// Convert I420 to AB30. +LIBYUV_API +int I420ToAB30(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_ab30, + int dst_stride_ab30, + int width, + int height) { + return I420ToAR30Matrix(src_y, src_stride_y, src_v, src_stride_v, src_u, + src_stride_u, dst_ab30, dst_stride_ab30, + &kYvuI601Constants, width, height); +} + +// Convert H420 to AB30. +LIBYUV_API +int H420ToAB30(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_ab30, + int dst_stride_ab30, + int width, + int height) { + return I420ToAR30Matrix(src_y, src_stride_y, src_v, src_stride_v, src_u, + src_stride_u, dst_ab30, dst_stride_ab30, + &kYvuH709Constants, width, height); +} + +static int I420ToARGBMatrixBilinear(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height) { + int y; + void (*I444ToARGBRow)(const uint8_t* y_buf, const uint8_t* u_buf, + const uint8_t* v_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = + I444ToARGBRow_C; + void (*Scale2RowUp_Bilinear)(const uint8_t* src_ptr, ptrdiff_t src_stride, + uint8_t* dst_ptr, ptrdiff_t dst_stride, + int dst_width) = ScaleRowUp2_Bilinear_Any_C; + void (*ScaleRowUp2_Linear)(const uint8_t* src_ptr, uint8_t* dst_ptr, + int dst_width) = ScaleRowUp2_Linear_Any_C; + assert(yuvconstants); + if (!src_y || !src_u || !src_v || !dst_argb || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_argb = dst_argb + (height - 1) * dst_stride_argb; + dst_stride_argb = -dst_stride_argb; + } +#if defined(HAS_I444TOARGBROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + I444ToARGBRow = I444ToARGBRow_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + I444ToARGBRow = I444ToARGBRow_SSSE3; + } + } +#endif +#if defined(HAS_I444TOARGBROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + I444ToARGBRow = I444ToARGBRow_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + I444ToARGBRow = I444ToARGBRow_AVX2; + } + } +#endif +#if defined(HAS_I444TOARGBROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + I444ToARGBRow = I444ToARGBRow_Any_NEON; + if (IS_ALIGNED(width, 8)) { + I444ToARGBRow = I444ToARGBRow_NEON; + } + } +#endif +#if defined(HAS_I444TOARGBROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + I444ToARGBRow = I444ToARGBRow_Any_MSA; + if (IS_ALIGNED(width, 8)) { + I444ToARGBRow = I444ToARGBRow_MSA; + } + } +#endif +#if defined(HAS_I444TOARGBROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + I444ToARGBRow = I444ToARGBRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + I444ToARGBRow = I444ToARGBRow_LASX; + } + } +#endif + +#if defined(HAS_SCALEROWUP2_BILINEAR_SSE2) + if (TestCpuFlag(kCpuHasSSE2)) { + Scale2RowUp_Bilinear = ScaleRowUp2_Bilinear_Any_SSE2; + ScaleRowUp2_Linear = ScaleRowUp2_Linear_Any_SSE2; + } +#endif + +#if defined(HAS_SCALEROWUP2_BILINEAR_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + Scale2RowUp_Bilinear = ScaleRowUp2_Bilinear_Any_SSSE3; + ScaleRowUp2_Linear = ScaleRowUp2_Linear_Any_SSSE3; + } +#endif + +#if defined(HAS_SCALEROWUP2_BILINEAR_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + Scale2RowUp_Bilinear = ScaleRowUp2_Bilinear_Any_AVX2; + ScaleRowUp2_Linear = ScaleRowUp2_Linear_Any_AVX2; + } +#endif + +#if defined(HAS_SCALEROWUP2_BILINEAR_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + Scale2RowUp_Bilinear = ScaleRowUp2_Bilinear_Any_NEON; + ScaleRowUp2_Linear = ScaleRowUp2_Linear_Any_NEON; + } +#endif + + // alloc 4 lines temp + const int row_size = (width + 31) & ~31; + align_buffer_64(row, row_size * 4); + uint8_t* temp_u_1 = row; + uint8_t* temp_u_2 = row + row_size; + uint8_t* temp_v_1 = row + row_size * 2; + uint8_t* temp_v_2 = row + row_size * 3; + + ScaleRowUp2_Linear(src_u, temp_u_1, width); + ScaleRowUp2_Linear(src_v, temp_v_1, width); + I444ToARGBRow(src_y, temp_u_1, temp_v_1, dst_argb, yuvconstants, width); + dst_argb += dst_stride_argb; + src_y += src_stride_y; + + for (y = 0; y < height - 2; y += 2) { + Scale2RowUp_Bilinear(src_u, src_stride_u, temp_u_1, row_size, width); + Scale2RowUp_Bilinear(src_v, src_stride_v, temp_v_1, row_size, width); + I444ToARGBRow(src_y, temp_u_1, temp_v_1, dst_argb, yuvconstants, width); + dst_argb += dst_stride_argb; + src_y += src_stride_y; + I444ToARGBRow(src_y, temp_u_2, temp_v_2, dst_argb, yuvconstants, width); + dst_argb += dst_stride_argb; + src_y += src_stride_y; + src_u += src_stride_u; + src_v += src_stride_v; + } + + if (!(height & 1)) { + ScaleRowUp2_Linear(src_u, temp_u_1, width); + ScaleRowUp2_Linear(src_v, temp_v_1, width); + I444ToARGBRow(src_y, temp_u_1, temp_v_1, dst_argb, yuvconstants, width); + } + + free_aligned_buffer_64(row); + return 0; +} + +static int I422ToARGBMatrixLinear(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height) { + int y; + void (*I444ToARGBRow)(const uint8_t* y_buf, const uint8_t* u_buf, + const uint8_t* v_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = + I444ToARGBRow_C; + void (*ScaleRowUp2_Linear)(const uint8_t* src_ptr, uint8_t* dst_ptr, + int dst_width) = ScaleRowUp2_Linear_Any_C; + assert(yuvconstants); + if (!src_y || !src_u || !src_v || !dst_argb || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_argb = dst_argb + (height - 1) * dst_stride_argb; + dst_stride_argb = -dst_stride_argb; + } +#if defined(HAS_I444TOARGBROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + I444ToARGBRow = I444ToARGBRow_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + I444ToARGBRow = I444ToARGBRow_SSSE3; + } + } +#endif +#if defined(HAS_I444TOARGBROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + I444ToARGBRow = I444ToARGBRow_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + I444ToARGBRow = I444ToARGBRow_AVX2; + } + } +#endif +#if defined(HAS_I444TOARGBROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + I444ToARGBRow = I444ToARGBRow_Any_NEON; + if (IS_ALIGNED(width, 8)) { + I444ToARGBRow = I444ToARGBRow_NEON; + } + } +#endif +#if defined(HAS_I444TOARGBROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + I444ToARGBRow = I444ToARGBRow_Any_MSA; + if (IS_ALIGNED(width, 8)) { + I444ToARGBRow = I444ToARGBRow_MSA; + } + } +#endif +#if defined(HAS_I444TOARGBROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + I444ToARGBRow = I444ToARGBRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + I444ToARGBRow = I444ToARGBRow_LASX; + } + } +#endif +#if defined(HAS_SCALEROWUP2_LINEAR_SSE2) + if (TestCpuFlag(kCpuHasSSE2)) { + ScaleRowUp2_Linear = ScaleRowUp2_Linear_Any_SSE2; + } +#endif +#if defined(HAS_SCALEROWUP2_LINEAR_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ScaleRowUp2_Linear = ScaleRowUp2_Linear_Any_SSSE3; + } +#endif +#if defined(HAS_SCALEROWUP2_LINEAR_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ScaleRowUp2_Linear = ScaleRowUp2_Linear_Any_AVX2; + } +#endif +#if defined(HAS_SCALEROWUP2_LINEAR_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + ScaleRowUp2_Linear = ScaleRowUp2_Linear_Any_NEON; + } +#endif + + // alloc 2 lines temp + const int row_size = (width + 31) & ~31; + align_buffer_64(row, row_size * 2); + uint8_t* temp_u = row; + uint8_t* temp_v = row + row_size; + + for (y = 0; y < height; ++y) { + ScaleRowUp2_Linear(src_u, temp_u, width); + ScaleRowUp2_Linear(src_v, temp_v, width); + I444ToARGBRow(src_y, temp_u, temp_v, dst_argb, yuvconstants, width); + dst_argb += dst_stride_argb; + src_y += src_stride_y; + src_u += src_stride_u; + src_v += src_stride_v; + } + + free_aligned_buffer_64(row); + return 0; +} + +static int I420ToRGB24MatrixBilinear(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + const struct YuvConstants* yuvconstants, + int width, + int height) { + int y; + void (*I444ToRGB24Row)(const uint8_t* y_buf, const uint8_t* u_buf, + const uint8_t* v_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = + I444ToRGB24Row_C; + void (*Scale2RowUp_Bilinear)(const uint8_t* src_ptr, ptrdiff_t src_stride, + uint8_t* dst_ptr, ptrdiff_t dst_stride, + int dst_width) = ScaleRowUp2_Bilinear_Any_C; + void (*ScaleRowUp2_Linear)(const uint8_t* src_ptr, uint8_t* dst_ptr, + int dst_width) = ScaleRowUp2_Linear_Any_C; + assert(yuvconstants); + if (!src_y || !src_u || !src_v || !dst_rgb24 || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_rgb24 = dst_rgb24 + (height - 1) * dst_stride_rgb24; + dst_stride_rgb24 = -dst_stride_rgb24; + } +#if defined(HAS_I444TORGB24ROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + I444ToRGB24Row = I444ToRGB24Row_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + I444ToRGB24Row = I444ToRGB24Row_SSSE3; + } + } +#endif +#if defined(HAS_I444TORGB24ROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + I444ToRGB24Row = I444ToRGB24Row_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + I444ToRGB24Row = I444ToRGB24Row_AVX2; + } + } +#endif +#if defined(HAS_I444TORGB24ROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + I444ToRGB24Row = I444ToRGB24Row_Any_NEON; + if (IS_ALIGNED(width, 8)) { + I444ToRGB24Row = I444ToRGB24Row_NEON; + } + } +#endif +#if defined(HAS_I444TORGB24ROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + I444ToRGB24Row = I444ToRGB24Row_Any_MSA; + if (IS_ALIGNED(width, 8)) { + I444ToRGB24Row = I444ToRGB24Row_MSA; + } + } +#endif +#if defined(HAS_I444TORGB24ROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + I444ToRGB24Row = I444ToRGB24Row_Any_LASX; + if (IS_ALIGNED(width, 32)) { + I444ToRGB24Row = I444ToRGB24Row_LASX; + } + } +#endif + +#if defined(HAS_SCALEROWUP2_BILINEAR_SSE2) + if (TestCpuFlag(kCpuHasSSE2)) { + Scale2RowUp_Bilinear = ScaleRowUp2_Bilinear_Any_SSE2; + ScaleRowUp2_Linear = ScaleRowUp2_Linear_Any_SSE2; + } +#endif + +#if defined(HAS_SCALEROWUP2_BILINEAR_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + Scale2RowUp_Bilinear = ScaleRowUp2_Bilinear_Any_SSSE3; + ScaleRowUp2_Linear = ScaleRowUp2_Linear_Any_SSSE3; + } +#endif + +#if defined(HAS_SCALEROWUP2_BILINEAR_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + Scale2RowUp_Bilinear = ScaleRowUp2_Bilinear_Any_AVX2; + ScaleRowUp2_Linear = ScaleRowUp2_Linear_Any_AVX2; + } +#endif + +#if defined(HAS_SCALEROWUP2_BILINEAR_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + Scale2RowUp_Bilinear = ScaleRowUp2_Bilinear_Any_NEON; + ScaleRowUp2_Linear = ScaleRowUp2_Linear_Any_NEON; + } +#endif + + // alloc 4 lines temp + const int row_size = (width + 31) & ~31; + align_buffer_64(row, row_size * 4); + uint8_t* temp_u_1 = row; + uint8_t* temp_u_2 = row + row_size; + uint8_t* temp_v_1 = row + row_size * 2; + uint8_t* temp_v_2 = row + row_size * 3; + + ScaleRowUp2_Linear(src_u, temp_u_1, width); + ScaleRowUp2_Linear(src_v, temp_v_1, width); + I444ToRGB24Row(src_y, temp_u_1, temp_v_1, dst_rgb24, yuvconstants, width); + dst_rgb24 += dst_stride_rgb24; + src_y += src_stride_y; + + for (y = 0; y < height - 2; y += 2) { + Scale2RowUp_Bilinear(src_u, src_stride_u, temp_u_1, row_size, width); + Scale2RowUp_Bilinear(src_v, src_stride_v, temp_v_1, row_size, width); + I444ToRGB24Row(src_y, temp_u_1, temp_v_1, dst_rgb24, yuvconstants, width); + dst_rgb24 += dst_stride_rgb24; + src_y += src_stride_y; + I444ToRGB24Row(src_y, temp_u_2, temp_v_2, dst_rgb24, yuvconstants, width); + dst_rgb24 += dst_stride_rgb24; + src_y += src_stride_y; + src_u += src_stride_u; + src_v += src_stride_v; + } + + if (!(height & 1)) { + ScaleRowUp2_Linear(src_u, temp_u_1, width); + ScaleRowUp2_Linear(src_v, temp_v_1, width); + I444ToRGB24Row(src_y, temp_u_1, temp_v_1, dst_rgb24, yuvconstants, width); + } + + free_aligned_buffer_64(row); + return 0; +} + +static int I010ToAR30MatrixBilinear(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_ar30, + int dst_stride_ar30, + const struct YuvConstants* yuvconstants, + int width, + int height) { + int y; + void (*I410ToAR30Row)(const uint16_t* y_buf, const uint16_t* u_buf, + const uint16_t* v_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = + I410ToAR30Row_C; + void (*Scale2RowUp_Bilinear_12)( + const uint16_t* src_ptr, ptrdiff_t src_stride, uint16_t* dst_ptr, + ptrdiff_t dst_stride, int dst_width) = ScaleRowUp2_Bilinear_16_Any_C; + void (*ScaleRowUp2_Linear_12)(const uint16_t* src_ptr, uint16_t* dst_ptr, + int dst_width) = ScaleRowUp2_Linear_16_Any_C; + assert(yuvconstants); + if (!src_y || !src_u || !src_v || !dst_ar30 || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_ar30 = dst_ar30 + (height - 1) * dst_stride_ar30; + dst_stride_ar30 = -dst_stride_ar30; + } +#if defined(HAS_I410TOAR30ROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + I410ToAR30Row = I410ToAR30Row_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + I410ToAR30Row = I410ToAR30Row_SSSE3; + } + } +#endif +#if defined(HAS_I410TOAR30ROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + I410ToAR30Row = I410ToAR30Row_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + I410ToAR30Row = I410ToAR30Row_AVX2; + } + } +#endif + +#if defined(HAS_SCALEROWUP2_BILINEAR_12_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + Scale2RowUp_Bilinear_12 = ScaleRowUp2_Bilinear_12_Any_SSSE3; + ScaleRowUp2_Linear_12 = ScaleRowUp2_Linear_12_Any_SSSE3; + } +#endif + +#if defined(HAS_SCALEROWUP2_BILINEAR_12_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + Scale2RowUp_Bilinear_12 = ScaleRowUp2_Bilinear_12_Any_AVX2; + ScaleRowUp2_Linear_12 = ScaleRowUp2_Linear_12_Any_AVX2; + } +#endif + +#if defined(HAS_SCALEROWUP2_BILINEAR_12_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + Scale2RowUp_Bilinear_12 = ScaleRowUp2_Bilinear_12_Any_NEON; + ScaleRowUp2_Linear_12 = ScaleRowUp2_Linear_12_Any_NEON; + } +#endif + + // alloc 4 lines temp + const int row_size = (width + 31) & ~31; + align_buffer_64(row, row_size * 4 * sizeof(uint16_t)); + uint16_t* temp_u_1 = (uint16_t*)(row); + uint16_t* temp_u_2 = (uint16_t*)(row) + row_size; + uint16_t* temp_v_1 = (uint16_t*)(row) + row_size * 2; + uint16_t* temp_v_2 = (uint16_t*)(row) + row_size * 3; + + ScaleRowUp2_Linear_12(src_u, temp_u_1, width); + ScaleRowUp2_Linear_12(src_v, temp_v_1, width); + I410ToAR30Row(src_y, temp_u_1, temp_v_1, dst_ar30, yuvconstants, width); + dst_ar30 += dst_stride_ar30; + src_y += src_stride_y; + + for (y = 0; y < height - 2; y += 2) { + Scale2RowUp_Bilinear_12(src_u, src_stride_u, temp_u_1, row_size, width); + Scale2RowUp_Bilinear_12(src_v, src_stride_v, temp_v_1, row_size, width); + I410ToAR30Row(src_y, temp_u_1, temp_v_1, dst_ar30, yuvconstants, width); + dst_ar30 += dst_stride_ar30; + src_y += src_stride_y; + I410ToAR30Row(src_y, temp_u_2, temp_v_2, dst_ar30, yuvconstants, width); + dst_ar30 += dst_stride_ar30; + src_y += src_stride_y; + src_u += src_stride_u; + src_v += src_stride_v; + } + + if (!(height & 1)) { + ScaleRowUp2_Linear_12(src_u, temp_u_1, width); + ScaleRowUp2_Linear_12(src_v, temp_v_1, width); + I410ToAR30Row(src_y, temp_u_1, temp_v_1, dst_ar30, yuvconstants, width); + } + + free_aligned_buffer_64(row); + + return 0; +} + +static int I210ToAR30MatrixLinear(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_ar30, + int dst_stride_ar30, + const struct YuvConstants* yuvconstants, + int width, + int height) { + int y; + void (*I410ToAR30Row)(const uint16_t* y_buf, const uint16_t* u_buf, + const uint16_t* v_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = + I410ToAR30Row_C; + void (*ScaleRowUp2_Linear_12)(const uint16_t* src_ptr, uint16_t* dst_ptr, + int dst_width) = ScaleRowUp2_Linear_16_Any_C; + assert(yuvconstants); + if (!src_y || !src_u || !src_v || !dst_ar30 || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_ar30 = dst_ar30 + (height - 1) * dst_stride_ar30; + dst_stride_ar30 = -dst_stride_ar30; + } +#if defined(HAS_I410TOAR30ROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + I410ToAR30Row = I410ToAR30Row_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + I410ToAR30Row = I410ToAR30Row_SSSE3; + } + } +#endif +#if defined(HAS_I410TOAR30ROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + I410ToAR30Row = I410ToAR30Row_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + I410ToAR30Row = I410ToAR30Row_AVX2; + } + } +#endif + +#if defined(HAS_SCALEROWUP2_LINEAR_12_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ScaleRowUp2_Linear_12 = ScaleRowUp2_Linear_12_Any_SSSE3; + } +#endif +#if defined(HAS_SCALEROWUP2_LINEAR_12_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ScaleRowUp2_Linear_12 = ScaleRowUp2_Linear_12_Any_AVX2; + } +#endif +#if defined(HAS_SCALEROWUP2_LINEAR_12_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + ScaleRowUp2_Linear_12 = ScaleRowUp2_Linear_12_Any_NEON; + } +#endif + + // alloc 2 lines temp + const int row_size = (width + 31) & ~31; + align_buffer_64(row, row_size * 2 * sizeof(uint16_t)); + uint16_t* temp_u = (uint16_t*)(row); + uint16_t* temp_v = (uint16_t*)(row) + row_size; + + for (y = 0; y < height; ++y) { + ScaleRowUp2_Linear_12(src_u, temp_u, width); + ScaleRowUp2_Linear_12(src_v, temp_v, width); + I410ToAR30Row(src_y, temp_u, temp_v, dst_ar30, yuvconstants, width); + dst_ar30 += dst_stride_ar30; + src_y += src_stride_y; + src_u += src_stride_u; + src_v += src_stride_v; + } + free_aligned_buffer_64(row); + return 0; +} + +static int I010ToARGBMatrixBilinear(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height) { + int y; + void (*I410ToARGBRow)(const uint16_t* y_buf, const uint16_t* u_buf, + const uint16_t* v_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = + I410ToARGBRow_C; + void (*Scale2RowUp_Bilinear_12)( + const uint16_t* src_ptr, ptrdiff_t src_stride, uint16_t* dst_ptr, + ptrdiff_t dst_stride, int dst_width) = ScaleRowUp2_Bilinear_16_Any_C; + void (*ScaleRowUp2_Linear_12)(const uint16_t* src_ptr, uint16_t* dst_ptr, + int dst_width) = ScaleRowUp2_Linear_16_Any_C; + assert(yuvconstants); + if (!src_y || !src_u || !src_v || !dst_argb || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_argb = dst_argb + (height - 1) * dst_stride_argb; + dst_stride_argb = -dst_stride_argb; + } +#if defined(HAS_I410TOARGBROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + I410ToARGBRow = I410ToARGBRow_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + I410ToARGBRow = I410ToARGBRow_SSSE3; + } + } +#endif +#if defined(HAS_I410TOARGBROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + I410ToARGBRow = I410ToARGBRow_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + I410ToARGBRow = I410ToARGBRow_AVX2; + } + } +#endif + +#if defined(HAS_SCALEROWUP2_BILINEAR_12_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + Scale2RowUp_Bilinear_12 = ScaleRowUp2_Bilinear_12_Any_SSSE3; + ScaleRowUp2_Linear_12 = ScaleRowUp2_Linear_12_Any_SSSE3; + } +#endif + +#if defined(HAS_SCALEROWUP2_BILINEAR_12_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + Scale2RowUp_Bilinear_12 = ScaleRowUp2_Bilinear_12_Any_AVX2; + ScaleRowUp2_Linear_12 = ScaleRowUp2_Linear_12_Any_AVX2; + } +#endif + +#if defined(HAS_SCALEROWUP2_BILINEAR_12_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + Scale2RowUp_Bilinear_12 = ScaleRowUp2_Bilinear_12_Any_NEON; + ScaleRowUp2_Linear_12 = ScaleRowUp2_Linear_12_Any_NEON; + } +#endif + + // alloc 4 lines temp + const int row_size = (width + 31) & ~31; + align_buffer_64(row, row_size * 4 * sizeof(uint16_t)); + uint16_t* temp_u_1 = (uint16_t*)(row); + uint16_t* temp_u_2 = (uint16_t*)(row) + row_size; + uint16_t* temp_v_1 = (uint16_t*)(row) + row_size * 2; + uint16_t* temp_v_2 = (uint16_t*)(row) + row_size * 3; + + ScaleRowUp2_Linear_12(src_u, temp_u_1, width); + ScaleRowUp2_Linear_12(src_v, temp_v_1, width); + I410ToARGBRow(src_y, temp_u_1, temp_v_1, dst_argb, yuvconstants, width); + dst_argb += dst_stride_argb; + src_y += src_stride_y; + + for (y = 0; y < height - 2; y += 2) { + Scale2RowUp_Bilinear_12(src_u, src_stride_u, temp_u_1, row_size, width); + Scale2RowUp_Bilinear_12(src_v, src_stride_v, temp_v_1, row_size, width); + I410ToARGBRow(src_y, temp_u_1, temp_v_1, dst_argb, yuvconstants, width); + dst_argb += dst_stride_argb; + src_y += src_stride_y; + I410ToARGBRow(src_y, temp_u_2, temp_v_2, dst_argb, yuvconstants, width); + dst_argb += dst_stride_argb; + src_y += src_stride_y; + src_u += src_stride_u; + src_v += src_stride_v; + } + + if (!(height & 1)) { + ScaleRowUp2_Linear_12(src_u, temp_u_1, width); + ScaleRowUp2_Linear_12(src_v, temp_v_1, width); + I410ToARGBRow(src_y, temp_u_1, temp_v_1, dst_argb, yuvconstants, width); + } + + free_aligned_buffer_64(row); + return 0; +} + +static int I210ToARGBMatrixLinear(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height) { + int y; + void (*I410ToARGBRow)(const uint16_t* y_buf, const uint16_t* u_buf, + const uint16_t* v_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = + I410ToARGBRow_C; + void (*ScaleRowUp2_Linear_12)(const uint16_t* src_ptr, uint16_t* dst_ptr, + int dst_width) = ScaleRowUp2_Linear_16_Any_C; + assert(yuvconstants); + if (!src_y || !src_u || !src_v || !dst_argb || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_argb = dst_argb + (height - 1) * dst_stride_argb; + dst_stride_argb = -dst_stride_argb; + } +#if defined(HAS_I410TOARGBROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + I410ToARGBRow = I410ToARGBRow_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + I410ToARGBRow = I410ToARGBRow_SSSE3; + } + } +#endif +#if defined(HAS_I410TOARGBROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + I410ToARGBRow = I410ToARGBRow_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + I410ToARGBRow = I410ToARGBRow_AVX2; + } + } +#endif + +#if defined(HAS_SCALEROWUP2_LINEAR_12_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ScaleRowUp2_Linear_12 = ScaleRowUp2_Linear_12_Any_SSSE3; + } +#endif +#if defined(HAS_SCALEROWUP2_LINEAR_12_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ScaleRowUp2_Linear_12 = ScaleRowUp2_Linear_12_Any_AVX2; + } +#endif +#if defined(HAS_SCALEROWUP2_LINEAR_12_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + ScaleRowUp2_Linear_12 = ScaleRowUp2_Linear_12_Any_NEON; + } +#endif + + // alloc 2 lines temp + const int row_size = (width + 31) & ~31; + align_buffer_64(row, row_size * 2 * sizeof(uint16_t)); + uint16_t* temp_u = (uint16_t*)(row); + uint16_t* temp_v = (uint16_t*)(row) + row_size; + + for (y = 0; y < height; ++y) { + ScaleRowUp2_Linear_12(src_u, temp_u, width); + ScaleRowUp2_Linear_12(src_v, temp_v, width); + I410ToARGBRow(src_y, temp_u, temp_v, dst_argb, yuvconstants, width); + dst_argb += dst_stride_argb; + src_y += src_stride_y; + src_u += src_stride_u; + src_v += src_stride_v; + } + + free_aligned_buffer_64(row); + return 0; +} + +static int I420AlphaToARGBMatrixBilinear( + const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + const uint8_t* src_a, + int src_stride_a, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height, + int attenuate) { + int y; + void (*I444AlphaToARGBRow)(const uint8_t* y_buf, const uint8_t* u_buf, + const uint8_t* v_buf, const uint8_t* a_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) = I444AlphaToARGBRow_C; + void (*ARGBAttenuateRow)(const uint8_t* src_argb, uint8_t* dst_argb, + int width) = ARGBAttenuateRow_C; + void (*Scale2RowUp_Bilinear)(const uint8_t* src_ptr, ptrdiff_t src_stride, + uint8_t* dst_ptr, ptrdiff_t dst_stride, + int dst_width) = ScaleRowUp2_Bilinear_Any_C; + void (*ScaleRowUp2_Linear)(const uint8_t* src_ptr, uint8_t* dst_ptr, + int dst_width) = ScaleRowUp2_Linear_Any_C; + assert(yuvconstants); + if (!src_y || !src_u || !src_v || !src_a || !dst_argb || width <= 0 || + height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_argb = dst_argb + (height - 1) * dst_stride_argb; + dst_stride_argb = -dst_stride_argb; + } +#if defined(HAS_I444ALPHATOARGBROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + I444AlphaToARGBRow = I444AlphaToARGBRow_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + I444AlphaToARGBRow = I444AlphaToARGBRow_SSSE3; + } + } +#endif +#if defined(HAS_I444ALPHATOARGBROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + I444AlphaToARGBRow = I444AlphaToARGBRow_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + I444AlphaToARGBRow = I444AlphaToARGBRow_AVX2; + } + } +#endif +#if defined(HAS_I444ALPHATOARGBROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + I444AlphaToARGBRow = I444AlphaToARGBRow_Any_NEON; + if (IS_ALIGNED(width, 8)) { + I444AlphaToARGBRow = I444AlphaToARGBRow_NEON; + } + } +#endif +#if defined(HAS_I444ALPHATOARGBROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + I444AlphaToARGBRow = I444AlphaToARGBRow_Any_MSA; + if (IS_ALIGNED(width, 8)) { + I444AlphaToARGBRow = I444AlphaToARGBRow_MSA; + } + } +#endif +#if defined(HAS_I444ALPHATOARGBROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + I444AlphaToARGBRow = I444AlphaToARGBRow_Any_LASX; + if (IS_ALIGNED(width, 16)) { + I444AlphaToARGBRow = I444AlphaToARGBRow_LASX; + } + } +#endif +#if defined(HAS_ARGBATTENUATEROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ARGBAttenuateRow = ARGBAttenuateRow_Any_SSSE3; + if (IS_ALIGNED(width, 4)) { + ARGBAttenuateRow = ARGBAttenuateRow_SSSE3; + } + } +#endif +#if defined(HAS_ARGBATTENUATEROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ARGBAttenuateRow = ARGBAttenuateRow_Any_AVX2; + if (IS_ALIGNED(width, 8)) { + ARGBAttenuateRow = ARGBAttenuateRow_AVX2; + } + } +#endif +#if defined(HAS_ARGBATTENUATEROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + ARGBAttenuateRow = ARGBAttenuateRow_Any_NEON; + if (IS_ALIGNED(width, 8)) { + ARGBAttenuateRow = ARGBAttenuateRow_NEON; + } + } +#endif +#if defined(HAS_ARGBATTENUATEROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGBAttenuateRow = ARGBAttenuateRow_Any_MSA; + if (IS_ALIGNED(width, 8)) { + ARGBAttenuateRow = ARGBAttenuateRow_MSA; + } + } +#endif + +#if defined(HAS_SCALEROWUP2_BILINEAR_SSE2) + if (TestCpuFlag(kCpuHasSSE2)) { + Scale2RowUp_Bilinear = ScaleRowUp2_Bilinear_Any_SSE2; + ScaleRowUp2_Linear = ScaleRowUp2_Linear_Any_SSE2; + } +#endif + +#if defined(HAS_SCALEROWUP2_BILINEAR_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + Scale2RowUp_Bilinear = ScaleRowUp2_Bilinear_Any_SSSE3; + ScaleRowUp2_Linear = ScaleRowUp2_Linear_Any_SSSE3; + } +#endif + +#if defined(HAS_SCALEROWUP2_BILINEAR_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + Scale2RowUp_Bilinear = ScaleRowUp2_Bilinear_Any_AVX2; + ScaleRowUp2_Linear = ScaleRowUp2_Linear_Any_AVX2; + } +#endif + +#if defined(HAS_SCALEROWUP2_BILINEAR_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + Scale2RowUp_Bilinear = ScaleRowUp2_Bilinear_Any_NEON; + ScaleRowUp2_Linear = ScaleRowUp2_Linear_Any_NEON; + } +#endif + + // alloc 4 lines temp + const int row_size = (width + 31) & ~31; + align_buffer_64(row, row_size * 4); + uint8_t* temp_u_1 = row; + uint8_t* temp_u_2 = row + row_size; + uint8_t* temp_v_1 = row + row_size * 2; + uint8_t* temp_v_2 = row + row_size * 3; + + ScaleRowUp2_Linear(src_u, temp_u_1, width); + ScaleRowUp2_Linear(src_v, temp_v_1, width); + I444AlphaToARGBRow(src_y, temp_u_1, temp_v_1, src_a, dst_argb, yuvconstants, + width); + if (attenuate) { + ARGBAttenuateRow(dst_argb, dst_argb, width); + } + dst_argb += dst_stride_argb; + src_y += src_stride_y; + src_a += src_stride_a; + + for (y = 0; y < height - 2; y += 2) { + Scale2RowUp_Bilinear(src_u, src_stride_u, temp_u_1, row_size, width); + Scale2RowUp_Bilinear(src_v, src_stride_v, temp_v_1, row_size, width); + I444AlphaToARGBRow(src_y, temp_u_1, temp_v_1, src_a, dst_argb, yuvconstants, + width); + if (attenuate) { + ARGBAttenuateRow(dst_argb, dst_argb, width); + } + dst_argb += dst_stride_argb; + src_y += src_stride_y; + src_a += src_stride_a; + I444AlphaToARGBRow(src_y, temp_u_2, temp_v_2, src_a, dst_argb, yuvconstants, + width); + if (attenuate) { + ARGBAttenuateRow(dst_argb, dst_argb, width); + } + dst_argb += dst_stride_argb; + src_y += src_stride_y; + src_u += src_stride_u; + src_v += src_stride_v; + src_a += src_stride_a; + } + + if (!(height & 1)) { + ScaleRowUp2_Linear(src_u, temp_u_1, width); + ScaleRowUp2_Linear(src_v, temp_v_1, width); + I444AlphaToARGBRow(src_y, temp_u_1, temp_v_1, src_a, dst_argb, yuvconstants, + width); + if (attenuate) { + ARGBAttenuateRow(dst_argb, dst_argb, width); + } + } + + free_aligned_buffer_64(row); + return 0; +} + +static int I422AlphaToARGBMatrixLinear(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + const uint8_t* src_a, + int src_stride_a, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height, + int attenuate) { + int y; + void (*I444AlphaToARGBRow)(const uint8_t* y_buf, const uint8_t* u_buf, + const uint8_t* v_buf, const uint8_t* a_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) = I444AlphaToARGBRow_C; + void (*ARGBAttenuateRow)(const uint8_t* src_argb, uint8_t* dst_argb, + int width) = ARGBAttenuateRow_C; + void (*ScaleRowUp2_Linear)(const uint8_t* src_ptr, uint8_t* dst_ptr, + int dst_width) = ScaleRowUp2_Linear_Any_C; + assert(yuvconstants); + if (!src_y || !src_u || !src_v || !src_a || !dst_argb || width <= 0 || + height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_argb = dst_argb + (height - 1) * dst_stride_argb; + dst_stride_argb = -dst_stride_argb; + } +#if defined(HAS_I444ALPHATOARGBROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + I444AlphaToARGBRow = I444AlphaToARGBRow_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + I444AlphaToARGBRow = I444AlphaToARGBRow_SSSE3; + } + } +#endif +#if defined(HAS_I444ALPHATOARGBROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + I444AlphaToARGBRow = I444AlphaToARGBRow_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + I444AlphaToARGBRow = I444AlphaToARGBRow_AVX2; + } + } +#endif +#if defined(HAS_I444ALPHATOARGBROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + I444AlphaToARGBRow = I444AlphaToARGBRow_Any_NEON; + if (IS_ALIGNED(width, 8)) { + I444AlphaToARGBRow = I444AlphaToARGBRow_NEON; + } + } +#endif +#if defined(HAS_I444ALPHATOARGBROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + I444AlphaToARGBRow = I444AlphaToARGBRow_Any_MSA; + if (IS_ALIGNED(width, 8)) { + I444AlphaToARGBRow = I444AlphaToARGBRow_MSA; + } + } +#endif +#if defined(HAS_I444ALPHATOARGBROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + I444AlphaToARGBRow = I444AlphaToARGBRow_Any_LASX; + if (IS_ALIGNED(width, 16)) { + I444AlphaToARGBRow = I444AlphaToARGBRow_LASX; + } + } +#endif +#if defined(HAS_ARGBATTENUATEROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ARGBAttenuateRow = ARGBAttenuateRow_Any_SSSE3; + if (IS_ALIGNED(width, 4)) { + ARGBAttenuateRow = ARGBAttenuateRow_SSSE3; + } + } +#endif +#if defined(HAS_ARGBATTENUATEROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ARGBAttenuateRow = ARGBAttenuateRow_Any_AVX2; + if (IS_ALIGNED(width, 8)) { + ARGBAttenuateRow = ARGBAttenuateRow_AVX2; + } + } +#endif +#if defined(HAS_ARGBATTENUATEROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + ARGBAttenuateRow = ARGBAttenuateRow_Any_NEON; + if (IS_ALIGNED(width, 8)) { + ARGBAttenuateRow = ARGBAttenuateRow_NEON; + } + } +#endif +#if defined(HAS_ARGBATTENUATEROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGBAttenuateRow = ARGBAttenuateRow_Any_MSA; + if (IS_ALIGNED(width, 8)) { + ARGBAttenuateRow = ARGBAttenuateRow_MSA; + } + } +#endif +#if defined(HAS_SCALEROWUP2_LINEAR_SSE2) + if (TestCpuFlag(kCpuHasSSE2)) { + ScaleRowUp2_Linear = ScaleRowUp2_Linear_Any_SSE2; + } +#endif +#if defined(HAS_SCALEROWUP2_LINEAR_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ScaleRowUp2_Linear = ScaleRowUp2_Linear_Any_SSSE3; + } +#endif +#if defined(HAS_SCALEROWUP2_LINEAR_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ScaleRowUp2_Linear = ScaleRowUp2_Linear_Any_AVX2; + } +#endif +#if defined(HAS_SCALEROWUP2_LINEAR_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + ScaleRowUp2_Linear = ScaleRowUp2_Linear_Any_NEON; + } +#endif + + // alloc 2 lines temp + const int row_size = (width + 31) & ~31; + align_buffer_64(row, row_size * 2); + uint8_t* temp_u = row; + uint8_t* temp_v = row + row_size; + + for (y = 0; y < height; ++y) { + ScaleRowUp2_Linear(src_u, temp_u, width); + ScaleRowUp2_Linear(src_v, temp_v, width); + I444AlphaToARGBRow(src_y, temp_u, temp_v, src_a, dst_argb, yuvconstants, + width); + if (attenuate) { + ARGBAttenuateRow(dst_argb, dst_argb, width); + } + dst_argb += dst_stride_argb; + src_a += src_stride_a; + src_y += src_stride_y; + src_u += src_stride_u; + src_v += src_stride_v; + } + + free_aligned_buffer_64(row); + return 0; +} + +static int I010AlphaToARGBMatrixBilinear( + const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + const uint16_t* src_a, + int src_stride_a, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height, + int attenuate) { + int y; + void (*I410AlphaToARGBRow)(const uint16_t* y_buf, const uint16_t* u_buf, + const uint16_t* v_buf, const uint16_t* a_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) = I410AlphaToARGBRow_C; + void (*ARGBAttenuateRow)(const uint8_t* src_argb, uint8_t* dst_argb, + int width) = ARGBAttenuateRow_C; + void (*Scale2RowUp_Bilinear_12)( + const uint16_t* src_ptr, ptrdiff_t src_stride, uint16_t* dst_ptr, + ptrdiff_t dst_stride, int dst_width) = ScaleRowUp2_Bilinear_16_Any_C; + void (*ScaleRowUp2_Linear_12)(const uint16_t* src_ptr, uint16_t* dst_ptr, + int dst_width) = ScaleRowUp2_Linear_16_Any_C; + assert(yuvconstants); + if (!src_y || !src_u || !src_v || !src_a || !dst_argb || width <= 0 || + height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_argb = dst_argb + (height - 1) * dst_stride_argb; + dst_stride_argb = -dst_stride_argb; + } +#if defined(HAS_I410ALPHATOARGBROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + I410AlphaToARGBRow = I410AlphaToARGBRow_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + I410AlphaToARGBRow = I410AlphaToARGBRow_SSSE3; + } + } +#endif +#if defined(HAS_I410ALPHATOARGBROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + I410AlphaToARGBRow = I410AlphaToARGBRow_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + I410AlphaToARGBRow = I410AlphaToARGBRow_AVX2; + } + } +#endif +#if defined(HAS_ARGBATTENUATEROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ARGBAttenuateRow = ARGBAttenuateRow_Any_SSSE3; + if (IS_ALIGNED(width, 4)) { + ARGBAttenuateRow = ARGBAttenuateRow_SSSE3; + } + } +#endif +#if defined(HAS_ARGBATTENUATEROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ARGBAttenuateRow = ARGBAttenuateRow_Any_AVX2; + if (IS_ALIGNED(width, 8)) { + ARGBAttenuateRow = ARGBAttenuateRow_AVX2; + } + } +#endif +#if defined(HAS_ARGBATTENUATEROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + ARGBAttenuateRow = ARGBAttenuateRow_Any_NEON; + if (IS_ALIGNED(width, 8)) { + ARGBAttenuateRow = ARGBAttenuateRow_NEON; + } + } +#endif +#if defined(HAS_ARGBATTENUATEROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGBAttenuateRow = ARGBAttenuateRow_Any_MSA; + if (IS_ALIGNED(width, 8)) { + ARGBAttenuateRow = ARGBAttenuateRow_MSA; + } + } +#endif + +#if defined(HAS_SCALEROWUP2_BILINEAR_12_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + Scale2RowUp_Bilinear_12 = ScaleRowUp2_Bilinear_12_Any_SSSE3; + ScaleRowUp2_Linear_12 = ScaleRowUp2_Linear_12_Any_SSSE3; + } +#endif + +#if defined(HAS_SCALEROWUP2_BILINEAR_12_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + Scale2RowUp_Bilinear_12 = ScaleRowUp2_Bilinear_12_Any_AVX2; + ScaleRowUp2_Linear_12 = ScaleRowUp2_Linear_12_Any_AVX2; + } +#endif + +#if defined(HAS_SCALEROWUP2_BILINEAR_12_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + Scale2RowUp_Bilinear_12 = ScaleRowUp2_Bilinear_12_Any_NEON; + ScaleRowUp2_Linear_12 = ScaleRowUp2_Linear_12_Any_NEON; + } +#endif + + // alloc 4 lines temp + const int row_size = (width + 31) & ~31; + align_buffer_64(row, row_size * 4 * sizeof(uint16_t)); + uint16_t* temp_u_1 = (uint16_t*)(row); + uint16_t* temp_u_2 = (uint16_t*)(row) + row_size; + uint16_t* temp_v_1 = (uint16_t*)(row) + row_size * 2; + uint16_t* temp_v_2 = (uint16_t*)(row) + row_size * 3; + + ScaleRowUp2_Linear_12(src_u, temp_u_1, width); + ScaleRowUp2_Linear_12(src_v, temp_v_1, width); + I410AlphaToARGBRow(src_y, temp_u_1, temp_v_1, src_a, dst_argb, yuvconstants, + width); + if (attenuate) { + ARGBAttenuateRow(dst_argb, dst_argb, width); + } + dst_argb += dst_stride_argb; + src_y += src_stride_y; + src_a += src_stride_a; + + for (y = 0; y < height - 2; y += 2) { + Scale2RowUp_Bilinear_12(src_u, src_stride_u, temp_u_1, row_size, width); + Scale2RowUp_Bilinear_12(src_v, src_stride_v, temp_v_1, row_size, width); + I410AlphaToARGBRow(src_y, temp_u_1, temp_v_1, src_a, dst_argb, yuvconstants, + width); + if (attenuate) { + ARGBAttenuateRow(dst_argb, dst_argb, width); + } + dst_argb += dst_stride_argb; + src_y += src_stride_y; + src_a += src_stride_a; + I410AlphaToARGBRow(src_y, temp_u_2, temp_v_2, src_a, dst_argb, yuvconstants, + width); + if (attenuate) { + ARGBAttenuateRow(dst_argb, dst_argb, width); + } + dst_argb += dst_stride_argb; + src_y += src_stride_y; + src_a += src_stride_a; + src_u += src_stride_u; + src_v += src_stride_v; + } + + if (!(height & 1)) { + ScaleRowUp2_Linear_12(src_u, temp_u_1, width); + ScaleRowUp2_Linear_12(src_v, temp_v_1, width); + I410AlphaToARGBRow(src_y, temp_u_1, temp_v_1, src_a, dst_argb, yuvconstants, + width); + if (attenuate) { + ARGBAttenuateRow(dst_argb, dst_argb, width); + } + } + + free_aligned_buffer_64(row); + return 0; +} + +static int I210AlphaToARGBMatrixLinear(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + const uint16_t* src_a, + int src_stride_a, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height, + int attenuate) { + int y; + void (*I410AlphaToARGBRow)(const uint16_t* y_buf, const uint16_t* u_buf, + const uint16_t* v_buf, const uint16_t* a_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) = I410AlphaToARGBRow_C; + void (*ARGBAttenuateRow)(const uint8_t* src_argb, uint8_t* dst_argb, + int width) = ARGBAttenuateRow_C; + void (*ScaleRowUp2_Linear)(const uint16_t* src_ptr, uint16_t* dst_ptr, + int dst_width) = ScaleRowUp2_Linear_16_Any_C; + assert(yuvconstants); + if (!src_y || !src_u || !src_v || !src_a || !dst_argb || width <= 0 || + height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_argb = dst_argb + (height - 1) * dst_stride_argb; + dst_stride_argb = -dst_stride_argb; + } +#if defined(HAS_I410ALPHATOARGBROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + I410AlphaToARGBRow = I410AlphaToARGBRow_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + I410AlphaToARGBRow = I410AlphaToARGBRow_SSSE3; + } + } +#endif +#if defined(HAS_I410ALPHATOARGBROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + I410AlphaToARGBRow = I410AlphaToARGBRow_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + I410AlphaToARGBRow = I410AlphaToARGBRow_AVX2; + } + } +#endif +#if defined(HAS_ARGBATTENUATEROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ARGBAttenuateRow = ARGBAttenuateRow_Any_SSSE3; + if (IS_ALIGNED(width, 4)) { + ARGBAttenuateRow = ARGBAttenuateRow_SSSE3; + } + } +#endif +#if defined(HAS_ARGBATTENUATEROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ARGBAttenuateRow = ARGBAttenuateRow_Any_AVX2; + if (IS_ALIGNED(width, 8)) { + ARGBAttenuateRow = ARGBAttenuateRow_AVX2; + } + } +#endif +#if defined(HAS_ARGBATTENUATEROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + ARGBAttenuateRow = ARGBAttenuateRow_Any_NEON; + if (IS_ALIGNED(width, 8)) { + ARGBAttenuateRow = ARGBAttenuateRow_NEON; + } + } +#endif +#if defined(HAS_ARGBATTENUATEROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ARGBAttenuateRow = ARGBAttenuateRow_Any_MSA; + if (IS_ALIGNED(width, 8)) { + ARGBAttenuateRow = ARGBAttenuateRow_MSA; + } + } +#endif + +#if defined(HAS_SCALEROWUP2_LINEAR_12_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ScaleRowUp2_Linear = ScaleRowUp2_Linear_12_Any_SSSE3; + } +#endif +#if defined(HAS_SCALEROWUP2_LINEAR_12_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ScaleRowUp2_Linear = ScaleRowUp2_Linear_12_Any_AVX2; + } +#endif +#if defined(HAS_SCALEROWUP2_LINEAR_12_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + ScaleRowUp2_Linear = ScaleRowUp2_Linear_12_Any_NEON; + } +#endif + + // alloc 2 lines temp + const int row_size = (width + 31) & ~31; + align_buffer_64(row, row_size * 2 * sizeof(uint16_t)); + uint16_t* temp_u = (uint16_t*)(row); + uint16_t* temp_v = (uint16_t*)(row) + row_size; + + for (y = 0; y < height; ++y) { + ScaleRowUp2_Linear(src_u, temp_u, width); + ScaleRowUp2_Linear(src_v, temp_v, width); + I410AlphaToARGBRow(src_y, temp_u, temp_v, src_a, dst_argb, yuvconstants, + width); + if (attenuate) { + ARGBAttenuateRow(dst_argb, dst_argb, width); + } + dst_argb += dst_stride_argb; + src_a += src_stride_a; + src_y += src_stride_y; + src_u += src_stride_u; + src_v += src_stride_v; + } + free_aligned_buffer_64(row); + return 0; +} + +static int P010ToARGBMatrixBilinear(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_uv, + int src_stride_uv, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height) { + int y; + void (*P410ToARGBRow)( + const uint16_t* y_buf, const uint16_t* uv_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = P410ToARGBRow_C; + void (*Scale2RowUp_Bilinear_16)( + const uint16_t* src_ptr, ptrdiff_t src_stride, uint16_t* dst_ptr, + ptrdiff_t dst_stride, int dst_width) = ScaleUVRowUp2_Bilinear_16_Any_C; + assert(yuvconstants); + if (!src_y || !src_uv || !dst_argb || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_argb = dst_argb + (height - 1) * dst_stride_argb; + dst_stride_argb = -dst_stride_argb; + } +#if defined(HAS_P410TOARGBROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + P410ToARGBRow = P410ToARGBRow_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + P410ToARGBRow = P410ToARGBRow_SSSE3; + } + } +#endif +#if defined(HAS_P410TOARGBROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + P410ToARGBRow = P410ToARGBRow_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + P410ToARGBRow = P410ToARGBRow_AVX2; + } + } +#endif + +#ifdef HAS_SCALEUVROWUP2_BILINEAR_16_SSE41 + if (TestCpuFlag(kCpuHasSSE41)) { + Scale2RowUp_Bilinear_16 = ScaleUVRowUp2_Bilinear_16_Any_SSE41; + } +#endif + +#ifdef HAS_SCALEUVROWUP2_BILINEAR_16_AVX2 + if (TestCpuFlag(kCpuHasAVX2)) { + Scale2RowUp_Bilinear_16 = ScaleUVRowUp2_Bilinear_16_Any_AVX2; + } +#endif + +#ifdef HAS_SCALEUVROWUP2_BILINEAR_16_NEON + if (TestCpuFlag(kCpuHasNEON)) { + Scale2RowUp_Bilinear_16 = ScaleUVRowUp2_Bilinear_16_Any_NEON; + } +#endif + + // alloc 2 lines temp + const int row_size = (2 * width + 31) & ~31; + align_buffer_64(row, row_size * 2 * sizeof(uint16_t)); + uint16_t* temp_uv_1 = (uint16_t*)(row); + uint16_t* temp_uv_2 = (uint16_t*)(row) + row_size; + + Scale2RowUp_Bilinear_16(src_uv, 0, temp_uv_1, row_size, width); + P410ToARGBRow(src_y, temp_uv_1, dst_argb, yuvconstants, width); + dst_argb += dst_stride_argb; + src_y += src_stride_y; + + for (y = 0; y < height - 2; y += 2) { + Scale2RowUp_Bilinear_16(src_uv, src_stride_uv, temp_uv_1, row_size, width); + P410ToARGBRow(src_y, temp_uv_1, dst_argb, yuvconstants, width); + dst_argb += dst_stride_argb; + src_y += src_stride_y; + P410ToARGBRow(src_y, temp_uv_2, dst_argb, yuvconstants, width); + dst_argb += dst_stride_argb; + src_y += src_stride_y; + src_uv += src_stride_uv; + } + + if (!(height & 1)) { + Scale2RowUp_Bilinear_16(src_uv, 0, temp_uv_1, row_size, width); + P410ToARGBRow(src_y, temp_uv_1, dst_argb, yuvconstants, width); + } + + free_aligned_buffer_64(row); + return 0; +} + +static int P210ToARGBMatrixLinear(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_uv, + int src_stride_uv, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height) { + int y; + void (*P410ToARGBRow)( + const uint16_t* y_buf, const uint16_t* uv_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = P410ToARGBRow_C; + void (*ScaleRowUp2_Linear)(const uint16_t* src_uv, uint16_t* dst_uv, + int dst_width) = ScaleUVRowUp2_Linear_16_Any_C; + assert(yuvconstants); + if (!src_y || !src_uv || !dst_argb || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_argb = dst_argb + (height - 1) * dst_stride_argb; + dst_stride_argb = -dst_stride_argb; + } +#if defined(HAS_P410TOARGBROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + P410ToARGBRow = P410ToARGBRow_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + P410ToARGBRow = P410ToARGBRow_SSSE3; + } + } +#endif +#if defined(HAS_P410TOARGBROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + P410ToARGBRow = P410ToARGBRow_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + P410ToARGBRow = P410ToARGBRow_AVX2; + } + } +#endif + +#ifdef HAS_SCALEUVROWUP2_LINEAR_16_SSE41 + if (TestCpuFlag(kCpuHasSSE41)) { + ScaleRowUp2_Linear = ScaleUVRowUp2_Linear_16_Any_SSE41; + } +#endif + +#ifdef HAS_SCALEUVROWUP2_LINEAR_16_AVX2 + if (TestCpuFlag(kCpuHasAVX2)) { + ScaleRowUp2_Linear = ScaleUVRowUp2_Linear_16_Any_AVX2; + } +#endif + +#ifdef HAS_SCALEUVROWUP2_LINEAR_16_NEON + if (TestCpuFlag(kCpuHasNEON)) { + ScaleRowUp2_Linear = ScaleUVRowUp2_Linear_16_Any_NEON; + } +#endif + + const int row_size = (2 * width + 31) & ~31; + align_buffer_64(row, row_size * sizeof(uint16_t)); + uint16_t* temp_uv = (uint16_t*)(row); + + for (y = 0; y < height; ++y) { + ScaleRowUp2_Linear(src_uv, temp_uv, width); + P410ToARGBRow(src_y, temp_uv, dst_argb, yuvconstants, width); + dst_argb += dst_stride_argb; + src_y += src_stride_y; + src_uv += src_stride_uv; + } + + free_aligned_buffer_64(row); + return 0; +} + +static int P010ToAR30MatrixBilinear(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_uv, + int src_stride_uv, + uint8_t* dst_ar30, + int dst_stride_ar30, + const struct YuvConstants* yuvconstants, + int width, + int height) { + int y; + void (*P410ToAR30Row)( + const uint16_t* y_buf, const uint16_t* uv_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = P410ToAR30Row_C; + void (*Scale2RowUp_Bilinear_16)( + const uint16_t* src_ptr, ptrdiff_t src_stride, uint16_t* dst_ptr, + ptrdiff_t dst_stride, int dst_width) = ScaleUVRowUp2_Bilinear_16_Any_C; + assert(yuvconstants); + if (!src_y || !src_uv || !dst_ar30 || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_ar30 = dst_ar30 + (height - 1) * dst_stride_ar30; + dst_stride_ar30 = -dst_stride_ar30; + } +#if defined(HAS_P410TOAR30ROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + P410ToAR30Row = P410ToAR30Row_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + P410ToAR30Row = P410ToAR30Row_SSSE3; + } + } +#endif +#if defined(HAS_P410TOAR30ROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + P410ToAR30Row = P410ToAR30Row_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + P410ToAR30Row = P410ToAR30Row_AVX2; + } + } +#endif + +#ifdef HAS_SCALEUVROWUP2_BILINEAR_16_SSE41 + if (TestCpuFlag(kCpuHasSSE41)) { + Scale2RowUp_Bilinear_16 = ScaleUVRowUp2_Bilinear_16_Any_SSE41; + } +#endif + +#ifdef HAS_SCALEUVROWUP2_BILINEAR_16_AVX2 + if (TestCpuFlag(kCpuHasAVX2)) { + Scale2RowUp_Bilinear_16 = ScaleUVRowUp2_Bilinear_16_Any_AVX2; + } +#endif + +#ifdef HAS_SCALEUVROWUP2_BILINEAR_16_NEON + if (TestCpuFlag(kCpuHasNEON)) { + Scale2RowUp_Bilinear_16 = ScaleUVRowUp2_Bilinear_16_Any_NEON; + } +#endif + + // alloc 2 lines temp + const int row_size = (2 * width + 31) & ~31; + align_buffer_64(row, row_size * 2 * sizeof(uint16_t)); + uint16_t* temp_uv_1 = (uint16_t*)(row); + uint16_t* temp_uv_2 = (uint16_t*)(row) + row_size; + + Scale2RowUp_Bilinear_16(src_uv, 0, temp_uv_1, row_size, width); + P410ToAR30Row(src_y, temp_uv_1, dst_ar30, yuvconstants, width); + dst_ar30 += dst_stride_ar30; + src_y += src_stride_y; + + for (y = 0; y < height - 2; y += 2) { + Scale2RowUp_Bilinear_16(src_uv, src_stride_uv, temp_uv_1, row_size, width); + P410ToAR30Row(src_y, temp_uv_1, dst_ar30, yuvconstants, width); + dst_ar30 += dst_stride_ar30; + src_y += src_stride_y; + P410ToAR30Row(src_y, temp_uv_2, dst_ar30, yuvconstants, width); + dst_ar30 += dst_stride_ar30; + src_y += src_stride_y; + src_uv += src_stride_uv; + } + + if (!(height & 1)) { + Scale2RowUp_Bilinear_16(src_uv, 0, temp_uv_1, row_size, width); + P410ToAR30Row(src_y, temp_uv_1, dst_ar30, yuvconstants, width); + } + + free_aligned_buffer_64(row); + return 0; +} + +static int P210ToAR30MatrixLinear(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_uv, + int src_stride_uv, + uint8_t* dst_ar30, + int dst_stride_ar30, + const struct YuvConstants* yuvconstants, + int width, + int height) { + int y; + void (*P410ToAR30Row)( + const uint16_t* y_buf, const uint16_t* uv_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = P410ToAR30Row_C; + void (*ScaleRowUp2_Linear)(const uint16_t* src_uv, uint16_t* dst_uv, + int dst_width) = ScaleUVRowUp2_Linear_16_Any_C; + assert(yuvconstants); + if (!src_y || !src_uv || !dst_ar30 || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_ar30 = dst_ar30 + (height - 1) * dst_stride_ar30; + dst_stride_ar30 = -dst_stride_ar30; + } +#if defined(HAS_P410TOAR30ROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + P410ToAR30Row = P410ToAR30Row_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + P410ToAR30Row = P410ToAR30Row_SSSE3; + } + } +#endif +#if defined(HAS_P410TOAR30ROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + P410ToAR30Row = P410ToAR30Row_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + P410ToAR30Row = P410ToAR30Row_AVX2; + } + } +#endif + +#ifdef HAS_SCALEUVROWUP2_LINEAR_16_SSE41 + if (TestCpuFlag(kCpuHasSSE41)) { + ScaleRowUp2_Linear = ScaleUVRowUp2_Linear_16_Any_SSE41; + } +#endif + +#ifdef HAS_SCALEUVROWUP2_LINEAR_16_AVX2 + if (TestCpuFlag(kCpuHasAVX2)) { + ScaleRowUp2_Linear = ScaleUVRowUp2_Linear_16_Any_AVX2; + } +#endif + +#ifdef HAS_SCALEUVROWUP2_LINEAR_16_NEON + if (TestCpuFlag(kCpuHasNEON)) { + ScaleRowUp2_Linear = ScaleUVRowUp2_Linear_16_Any_NEON; + } +#endif + + const int row_size = (2 * width + 31) & ~31; + align_buffer_64(row, row_size * sizeof(uint16_t)); + uint16_t* temp_uv = (uint16_t*)(row); + + for (y = 0; y < height; ++y) { + ScaleRowUp2_Linear(src_uv, temp_uv, width); + P410ToAR30Row(src_y, temp_uv, dst_ar30, yuvconstants, width); + dst_ar30 += dst_stride_ar30; + src_y += src_stride_y; + src_uv += src_stride_uv; + } + + free_aligned_buffer_64(row); + return 0; +} + +static int I422ToRGB24MatrixLinear(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + const struct YuvConstants* yuvconstants, + int width, + int height) { + int y; + void (*I444ToRGB24Row)(const uint8_t* y_buf, const uint8_t* u_buf, + const uint8_t* v_buf, uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, int width) = + I444ToRGB24Row_C; + void (*ScaleRowUp2_Linear)(const uint8_t* src_ptr, uint8_t* dst_ptr, + int dst_width) = ScaleRowUp2_Linear_Any_C; + assert(yuvconstants); + if (!src_y || !src_u || !src_v || !dst_rgb24 || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_rgb24 = dst_rgb24 + (height - 1) * dst_stride_rgb24; + dst_stride_rgb24 = -dst_stride_rgb24; + } +#if defined(HAS_I444TORGB24ROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + I444ToRGB24Row = I444ToRGB24Row_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + I444ToRGB24Row = I444ToRGB24Row_SSSE3; + } + } +#endif +#if defined(HAS_I444TORGB24ROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + I444ToRGB24Row = I444ToRGB24Row_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + I444ToRGB24Row = I444ToRGB24Row_AVX2; + } + } +#endif +#if defined(HAS_I444TORGB24ROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + I444ToRGB24Row = I444ToRGB24Row_Any_NEON; + if (IS_ALIGNED(width, 8)) { + I444ToRGB24Row = I444ToRGB24Row_NEON; + } + } +#endif +#if defined(HAS_SCALEROWUP2_LINEAR_SSE2) + if (TestCpuFlag(kCpuHasSSE2)) { + ScaleRowUp2_Linear = ScaleRowUp2_Linear_Any_SSE2; + } +#endif +#if defined(HAS_SCALEROWUP2_LINEAR_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ScaleRowUp2_Linear = ScaleRowUp2_Linear_Any_SSSE3; + } +#endif +#if defined(HAS_SCALEROWUP2_LINEAR_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ScaleRowUp2_Linear = ScaleRowUp2_Linear_Any_AVX2; + } +#endif +#if defined(HAS_SCALEROWUP2_LINEAR_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + ScaleRowUp2_Linear = ScaleRowUp2_Linear_Any_NEON; + } +#endif + + // alloc 2 lines temp + const int row_size = (width + 31) & ~31; + align_buffer_64(row, row_size * 2); + uint8_t* temp_u = row; + uint8_t* temp_v = row + row_size; + + for (y = 0; y < height; ++y) { + ScaleRowUp2_Linear(src_u, temp_u, width); + ScaleRowUp2_Linear(src_v, temp_v, width); + I444ToRGB24Row(src_y, temp_u, temp_v, dst_rgb24, yuvconstants, width); + dst_rgb24 += dst_stride_rgb24; + src_y += src_stride_y; + src_u += src_stride_u; + src_v += src_stride_v; + } + + free_aligned_buffer_64(row); + return 0; +} + +LIBYUV_API +int I422ToRGB24MatrixFilter(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + const struct YuvConstants* yuvconstants, + int width, + int height, + enum FilterMode filter) { + switch (filter) { + case kFilterNone: + return I422ToRGB24Matrix(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_rgb24, dst_stride_rgb24, + yuvconstants, width, height); + case kFilterBilinear: + case kFilterBox: + case kFilterLinear: + return I422ToRGB24MatrixLinear( + src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v, + dst_rgb24, dst_stride_rgb24, yuvconstants, width, height); + } + + return -1; +} + +LIBYUV_API +int I420ToARGBMatrixFilter(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height, + enum FilterMode filter) { + switch (filter) { + case kFilterNone: + return I420ToARGBMatrix(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_argb, dst_stride_argb, + yuvconstants, width, height); + case kFilterBilinear: + case kFilterBox: + return I420ToARGBMatrixBilinear( + src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v, + dst_argb, dst_stride_argb, yuvconstants, width, height); + case kFilterLinear: + // Actually we can do this, but probably there's no usage. + return -1; + } + + return -1; +} + +LIBYUV_API +int I422ToARGBMatrixFilter(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height, + enum FilterMode filter) { + switch (filter) { + case kFilterNone: + return I422ToARGBMatrix(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_argb, dst_stride_argb, + yuvconstants, width, height); + case kFilterBilinear: + case kFilterBox: + case kFilterLinear: + return I422ToARGBMatrixLinear( + src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v, + dst_argb, dst_stride_argb, yuvconstants, width, height); + } + + return -1; +} + +LIBYUV_API +int I420ToRGB24MatrixFilter(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + const struct YuvConstants* yuvconstants, + int width, + int height, + enum FilterMode filter) { + switch (filter) { + case kFilterNone: + return I420ToRGB24Matrix(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_rgb24, dst_stride_rgb24, + yuvconstants, width, height); + case kFilterLinear: // TODO(fb): Implement Linear using Bilinear stride 0 + case kFilterBilinear: + case kFilterBox: + return I420ToRGB24MatrixBilinear( + src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v, + dst_rgb24, dst_stride_rgb24, yuvconstants, width, height); + } + + return -1; +} + +LIBYUV_API +int I010ToAR30MatrixFilter(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_ar30, + int dst_stride_ar30, + const struct YuvConstants* yuvconstants, + int width, + int height, + enum FilterMode filter) { + switch (filter) { + case kFilterNone: + return I010ToAR30Matrix(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_ar30, dst_stride_ar30, + yuvconstants, width, height); + case kFilterLinear: // TODO(fb): Implement Linear using Bilinear stride 0 + case kFilterBilinear: + case kFilterBox: + return I010ToAR30MatrixBilinear( + src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v, + dst_ar30, dst_stride_ar30, yuvconstants, width, height); + } + + return -1; +} + +LIBYUV_API +int I210ToAR30MatrixFilter(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_ar30, + int dst_stride_ar30, + const struct YuvConstants* yuvconstants, + int width, + int height, + enum FilterMode filter) { + switch (filter) { + case kFilterNone: + return I210ToAR30Matrix(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_ar30, dst_stride_ar30, + yuvconstants, width, height); + case kFilterBilinear: + case kFilterBox: + case kFilterLinear: + return I210ToAR30MatrixLinear( + src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v, + dst_ar30, dst_stride_ar30, yuvconstants, width, height); + } + + return -1; +} + +LIBYUV_API +int I010ToARGBMatrixFilter(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height, + enum FilterMode filter) { + switch (filter) { + case kFilterNone: + return I010ToARGBMatrix(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_argb, dst_stride_argb, + yuvconstants, width, height); + case kFilterLinear: // TODO(fb): Implement Linear using Bilinear stride 0 + case kFilterBilinear: + case kFilterBox: + return I010ToARGBMatrixBilinear( + src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v, + dst_argb, dst_stride_argb, yuvconstants, width, height); + } + + return -1; +} + +LIBYUV_API +int I210ToARGBMatrixFilter(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height, + enum FilterMode filter) { + switch (filter) { + case kFilterNone: + return I210ToARGBMatrix(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_argb, dst_stride_argb, + yuvconstants, width, height); + case kFilterBilinear: + case kFilterBox: + case kFilterLinear: + return I210ToARGBMatrixLinear( + src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v, + dst_argb, dst_stride_argb, yuvconstants, width, height); + } + + return -1; +} + +LIBYUV_API +int I420AlphaToARGBMatrixFilter(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + const uint8_t* src_a, + int src_stride_a, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height, + int attenuate, + enum FilterMode filter) { + switch (filter) { + case kFilterNone: + return I420AlphaToARGBMatrix(src_y, src_stride_y, src_u, src_stride_u, + src_v, src_stride_v, src_a, src_stride_a, + dst_argb, dst_stride_argb, yuvconstants, + width, height, attenuate); + case kFilterLinear: // TODO(fb): Implement Linear using Bilinear stride 0 + case kFilterBilinear: + case kFilterBox: + return I420AlphaToARGBMatrixBilinear( + src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v, src_a, + src_stride_a, dst_argb, dst_stride_argb, yuvconstants, width, height, + attenuate); + } + + return -1; +} + +LIBYUV_API +int I422AlphaToARGBMatrixFilter(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + const uint8_t* src_a, + int src_stride_a, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height, + int attenuate, + enum FilterMode filter) { + switch (filter) { + case kFilterNone: + return I422AlphaToARGBMatrix(src_y, src_stride_y, src_u, src_stride_u, + src_v, src_stride_v, src_a, src_stride_a, + dst_argb, dst_stride_argb, yuvconstants, + width, height, attenuate); + case kFilterBilinear: + case kFilterBox: + case kFilterLinear: + return I422AlphaToARGBMatrixLinear( + src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v, src_a, + src_stride_a, dst_argb, dst_stride_argb, yuvconstants, width, height, + attenuate); + } + + return -1; +} + +LIBYUV_API +int I010AlphaToARGBMatrixFilter(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + const uint16_t* src_a, + int src_stride_a, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height, + int attenuate, + enum FilterMode filter) { + switch (filter) { + case kFilterNone: + return I010AlphaToARGBMatrix(src_y, src_stride_y, src_u, src_stride_u, + src_v, src_stride_v, src_a, src_stride_a, + dst_argb, dst_stride_argb, yuvconstants, + width, height, attenuate); + case kFilterLinear: // TODO(fb): Implement Linear using Bilinear stride 0 + case kFilterBilinear: + case kFilterBox: + return I010AlphaToARGBMatrixBilinear( + src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v, src_a, + src_stride_a, dst_argb, dst_stride_argb, yuvconstants, width, height, + attenuate); + } + + return -1; +} + +LIBYUV_API +int I210AlphaToARGBMatrixFilter(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + const uint16_t* src_a, + int src_stride_a, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height, + int attenuate, + enum FilterMode filter) { + switch (filter) { + case kFilterNone: + return I210AlphaToARGBMatrix(src_y, src_stride_y, src_u, src_stride_u, + src_v, src_stride_v, src_a, src_stride_a, + dst_argb, dst_stride_argb, yuvconstants, + width, height, attenuate); + case kFilterBilinear: + case kFilterBox: + case kFilterLinear: + return I210AlphaToARGBMatrixLinear( + src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v, src_a, + src_stride_a, dst_argb, dst_stride_argb, yuvconstants, width, height, + attenuate); + } + + return -1; +} + +// TODO(fb): Verify this function works correctly. P010 is like NV12 but 10 bit +// UV is biplanar. +LIBYUV_API +int P010ToARGBMatrixFilter(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_uv, + int src_stride_uv, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height, + enum FilterMode filter) { + switch (filter) { + case kFilterNone: + return P010ToARGBMatrix(src_y, src_stride_y, src_uv, src_stride_uv, + dst_argb, dst_stride_argb, yuvconstants, width, + height); + case kFilterLinear: // TODO(fb): Implement Linear using Bilinear stride 0 + case kFilterBilinear: + case kFilterBox: + return P010ToARGBMatrixBilinear(src_y, src_stride_y, src_uv, + src_stride_uv, dst_argb, dst_stride_argb, + yuvconstants, width, height); + } + + return -1; +} + +LIBYUV_API +int P210ToARGBMatrixFilter(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_uv, + int src_stride_uv, + uint8_t* dst_argb, + int dst_stride_argb, + const struct YuvConstants* yuvconstants, + int width, + int height, + enum FilterMode filter) { + switch (filter) { + case kFilterNone: + return P210ToARGBMatrix(src_y, src_stride_y, src_uv, src_stride_uv, + dst_argb, dst_stride_argb, yuvconstants, width, + height); + case kFilterBilinear: + case kFilterBox: + case kFilterLinear: + return P210ToARGBMatrixLinear(src_y, src_stride_y, src_uv, src_stride_uv, + dst_argb, dst_stride_argb, yuvconstants, + width, height); + } + + return -1; +} + +LIBYUV_API +int P010ToAR30MatrixFilter(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_uv, + int src_stride_uv, + uint8_t* dst_ar30, + int dst_stride_ar30, + const struct YuvConstants* yuvconstants, + int width, + int height, + enum FilterMode filter) { + switch (filter) { + case kFilterNone: + return P010ToAR30Matrix(src_y, src_stride_y, src_uv, src_stride_uv, + dst_ar30, dst_stride_ar30, yuvconstants, width, + height); + case kFilterLinear: // TODO(fb): Implement Linear using Bilinear stride 0 + case kFilterBilinear: + case kFilterBox: + return P010ToAR30MatrixBilinear(src_y, src_stride_y, src_uv, + src_stride_uv, dst_ar30, dst_stride_ar30, + yuvconstants, width, height); + } + + return -1; +} + +LIBYUV_API +int P210ToAR30MatrixFilter(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_uv, + int src_stride_uv, + uint8_t* dst_ar30, + int dst_stride_ar30, + const struct YuvConstants* yuvconstants, + int width, + int height, + enum FilterMode filter) { + switch (filter) { + case kFilterNone: + return P210ToAR30Matrix(src_y, src_stride_y, src_uv, src_stride_uv, + dst_ar30, dst_stride_ar30, yuvconstants, width, + height); + case kFilterBilinear: + case kFilterBox: + case kFilterLinear: + return P210ToAR30MatrixLinear(src_y, src_stride_y, src_uv, src_stride_uv, + dst_ar30, dst_stride_ar30, yuvconstants, + width, height); + } + + return -1; } #ifdef __cplusplus diff --git a/TMessagesProj/jni/third_party/libyuv/source/convert_from.cc b/TMessagesProj/jni/third_party/libyuv/source/convert_from.cc index f2cfc1d8f5..8bd07e4ce2 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/convert_from.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/convert_from.cc @@ -30,6 +30,8 @@ static __inline int Abs(int v) { } // I420 To any I4xx YUV format with mirroring. +// TODO(fbarchard): Consider kFilterNone for Y, or CopyPlane + static int I420ToI4xx(const uint8_t* src_y, int src_stride_y, const uint8_t* src_u, @@ -83,7 +85,8 @@ int I420ToI010(const uint8_t* src_y, int height) { int halfwidth = (width + 1) >> 1; int halfheight = (height + 1) >> 1; - if (!src_u || !src_v || !dst_u || !dst_v || width <= 0 || height == 0) { + if ((!src_y && dst_y) || !src_u || !src_v || !dst_u || !dst_v || width <= 0 || + height == 0) { return -1; } // Negative height means invert the image. @@ -109,6 +112,51 @@ int I420ToI010(const uint8_t* src_y, return 0; } +// Convert 8 bit YUV to 12 bit. +LIBYUV_API +int I420ToI012(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_u, + int dst_stride_u, + uint16_t* dst_v, + int dst_stride_v, + int width, + int height) { + int halfwidth = (width + 1) >> 1; + int halfheight = (height + 1) >> 1; + if ((!src_y && dst_y) || !src_u || !src_v || !dst_u || !dst_v || width <= 0 || + height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + halfheight = (height + 1) >> 1; + src_y = src_y + (height - 1) * src_stride_y; + src_u = src_u + (halfheight - 1) * src_stride_u; + src_v = src_v + (halfheight - 1) * src_stride_v; + src_stride_y = -src_stride_y; + src_stride_u = -src_stride_u; + src_stride_v = -src_stride_v; + } + + // Convert Y plane. + Convert8To16Plane(src_y, src_stride_y, dst_y, dst_stride_y, 4096, width, + height); + // Convert UV planes. + Convert8To16Plane(src_u, src_stride_u, dst_u, dst_stride_u, 4096, halfwidth, + halfheight); + Convert8To16Plane(src_v, src_stride_v, dst_v, dst_stride_v, 4096, halfwidth, + halfheight); + return 0; +} + // 420 chroma is 1/2 width, 1/2 height // 422 chroma is 1/2 width, 1x height LIBYUV_API @@ -159,6 +207,102 @@ int I420ToI444(const uint8_t* src_y, dst_uv_height); } +// 420 chroma to 444 chroma, 10/12 bit version +LIBYUV_API +int I010ToI410(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_u, + int dst_stride_u, + uint16_t* dst_v, + int dst_stride_v, + int width, + int height) { + if (width == 0 || height == 0) { + return -1; + } + + if (dst_y) { + ScalePlane_12(src_y, src_stride_y, width, height, dst_y, dst_stride_y, + Abs(width), Abs(height), kFilterBilinear); + } + ScalePlane_12(src_u, src_stride_u, SUBSAMPLE(width, 1, 1), + SUBSAMPLE(height, 1, 1), dst_u, dst_stride_u, Abs(width), + Abs(height), kFilterBilinear); + ScalePlane_12(src_v, src_stride_v, SUBSAMPLE(width, 1, 1), + SUBSAMPLE(height, 1, 1), dst_v, dst_stride_v, Abs(width), + Abs(height), kFilterBilinear); + return 0; +} + +// 422 chroma to 444 chroma, 10/12 bit version +LIBYUV_API +int I210ToI410(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_u, + int dst_stride_u, + uint16_t* dst_v, + int dst_stride_v, + int width, + int height) { + if (width == 0 || height == 0) { + return -1; + } + + if (dst_y) { + ScalePlane_12(src_y, src_stride_y, width, height, dst_y, dst_stride_y, + Abs(width), Abs(height), kFilterBilinear); + } + ScalePlane_12(src_u, src_stride_u, SUBSAMPLE(width, 1, 1), height, dst_u, + dst_stride_u, Abs(width), Abs(height), kFilterBilinear); + ScalePlane_12(src_v, src_stride_v, SUBSAMPLE(width, 1, 1), height, dst_v, + dst_stride_v, Abs(width), Abs(height), kFilterBilinear); + return 0; +} + +// 422 chroma is 1/2 width, 1x height +// 444 chroma is 1x width, 1x height +LIBYUV_API +int I422ToI444(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height) { + if (width == 0 || height == 0) { + return -1; + } + + if (dst_y) { + ScalePlane(src_y, src_stride_y, width, height, dst_y, dst_stride_y, + Abs(width), Abs(height), kFilterBilinear); + } + ScalePlane(src_u, src_stride_u, SUBSAMPLE(width, 1, 1), height, dst_u, + dst_stride_u, Abs(width), Abs(height), kFilterBilinear); + ScalePlane(src_v, src_stride_v, SUBSAMPLE(width, 1, 1), height, dst_v, + dst_stride_v, Abs(width), Abs(height), kFilterBilinear); + return 0; +} + // Copy to I400. Source can be I420,422,444,400,NV12,NV21 LIBYUV_API int I400Copy(const uint8_t* src_y, @@ -294,14 +438,6 @@ int I420ToYUY2(const uint8_t* src_y, } } #endif -#if defined(HAS_I422TOYUY2ROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - I422ToYUY2Row = I422ToYUY2Row_Any_MMI; - if (IS_ALIGNED(width, 8)) { - I422ToYUY2Row = I422ToYUY2Row_MMI; - } - } -#endif #if defined(HAS_I422TOYUY2ROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { I422ToYUY2Row = I422ToYUY2Row_Any_MSA; @@ -310,6 +446,14 @@ int I420ToYUY2(const uint8_t* src_y, } } #endif +#if defined(HAS_I422TOYUY2ROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + I422ToYUY2Row = I422ToYUY2Row_Any_LASX; + if (IS_ALIGNED(width, 32)) { + I422ToYUY2Row = I422ToYUY2Row_LASX; + } + } +#endif for (y = 0; y < height - 1; y += 2) { I422ToYUY2Row(src_y, src_u, src_v, dst_yuy2, width); @@ -381,14 +525,6 @@ int I422ToUYVY(const uint8_t* src_y, } } #endif -#if defined(HAS_I422TOUYVYROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - I422ToUYVYRow = I422ToUYVYRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - I422ToUYVYRow = I422ToUYVYRow_MMI; - } - } -#endif #if defined(HAS_I422TOUYVYROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { I422ToUYVYRow = I422ToUYVYRow_Any_MSA; @@ -397,6 +533,14 @@ int I422ToUYVY(const uint8_t* src_y, } } #endif +#if defined(HAS_I422TOUYVYROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + I422ToUYVYRow = I422ToUYVYRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + I422ToUYVYRow = I422ToUYVYRow_LASX; + } + } +#endif for (y = 0; y < height; ++y) { I422ToUYVYRow(src_y, src_u, src_v, dst_uyvy, width); @@ -456,14 +600,6 @@ int I420ToUYVY(const uint8_t* src_y, } } #endif -#if defined(HAS_I422TOUYVYROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - I422ToUYVYRow = I422ToUYVYRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - I422ToUYVYRow = I422ToUYVYRow_MMI; - } - } -#endif #if defined(HAS_I422TOUYVYROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { I422ToUYVYRow = I422ToUYVYRow_Any_MSA; @@ -472,6 +608,14 @@ int I420ToUYVY(const uint8_t* src_y, } } #endif +#if defined(HAS_I422TOUYVYROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + I422ToUYVYRow = I422ToUYVYRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + I422ToUYVYRow = I422ToUYVYRow_LASX; + } + } +#endif for (y = 0; y < height - 1; y += 2) { I422ToUYVYRow(src_y, src_u, src_v, dst_uyvy, width); @@ -503,8 +647,7 @@ int I420ToNV12(const uint8_t* src_y, int height) { int halfwidth = (width + 1) / 2; int halfheight = (height + 1) / 2; - if (!src_y || !src_u || !src_v || !dst_y || !dst_uv || width <= 0 || - height == 0) { + if (!src_y || !src_u || !src_v || !dst_uv || width <= 0 || height == 0) { return -1; } // Negative height means invert the image. @@ -630,7 +773,8 @@ int ConvertFromI420(const uint8_t* y, height); break; case FOURCC_NV12: { - uint8_t* dst_uv = dst_sample + width * height; + int dst_y_stride = dst_sample_stride ? dst_sample_stride : width; + uint8_t* dst_uv = dst_sample + dst_y_stride * height; r = I420ToNV12(y, y_stride, u, u_stride, v, v_stride, dst_sample, dst_sample_stride ? dst_sample_stride : width, dst_uv, dst_sample_stride ? dst_sample_stride : width, width, @@ -638,7 +782,8 @@ int ConvertFromI420(const uint8_t* y, break; } case FOURCC_NV21: { - uint8_t* dst_vu = dst_sample + width * height; + int dst_y_stride = dst_sample_stride ? dst_sample_stride : width; + uint8_t* dst_vu = dst_sample + dst_y_stride * height; r = I420ToNV21(y, y_stride, u, u_stride, v, v_stride, dst_sample, dst_sample_stride ? dst_sample_stride : width, dst_vu, dst_sample_stride ? dst_sample_stride : width, width, diff --git a/TMessagesProj/jni/third_party/libyuv/source/convert_from_argb.cc b/TMessagesProj/jni/third_party/libyuv/source/convert_from_argb.cc index 4ba4bb5e0f..d548aec287 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/convert_from_argb.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/convert_from_argb.cc @@ -68,14 +68,6 @@ int ARGBToI444(const uint8_t* src_argb, } } #endif -#if defined(HAS_ARGBTOUV444ROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBToUV444Row = ARGBToUV444Row_Any_MMI; - if (IS_ALIGNED(width, 8)) { - ARGBToUV444Row = ARGBToUV444Row_MMI; - } - } -#endif #if defined(HAS_ARGBTOUV444ROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { ARGBToUV444Row = ARGBToUV444Row_Any_MSA; @@ -84,6 +76,14 @@ int ARGBToI444(const uint8_t* src_argb, } } #endif +#if defined(HAS_ARGBTOUV444ROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + ARGBToUV444Row = ARGBToUV444Row_Any_LASX; + if (IS_ALIGNED(width, 32)) { + ARGBToUV444Row = ARGBToUV444Row_LASX; + } + } +#endif #if defined(HAS_ARGBTOYROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { ARGBToYRow = ARGBToYRow_Any_SSSE3; @@ -103,19 +103,11 @@ int ARGBToI444(const uint8_t* src_argb, #if defined(HAS_ARGBTOYROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { ARGBToYRow = ARGBToYRow_Any_NEON; - if (IS_ALIGNED(width, 8)) { + if (IS_ALIGNED(width, 16)) { ARGBToYRow = ARGBToYRow_NEON; } } #endif -#if defined(HAS_ARGBTOYROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBToYRow = ARGBToYRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - ARGBToYRow = ARGBToYRow_MMI; - } - } -#endif #if defined(HAS_ARGBTOYROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { ARGBToYRow = ARGBToYRow_Any_MSA; @@ -124,6 +116,22 @@ int ARGBToI444(const uint8_t* src_argb, } } #endif +#if defined(HAS_ARGBTOYROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + ARGBToYRow = ARGBToYRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + ARGBToYRow = ARGBToYRow_LSX; + } + } +#endif +#if defined(HAS_ARGBTOYROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + ARGBToYRow = ARGBToYRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + ARGBToYRow = ARGBToYRow_LASX; + } + } +#endif for (y = 0; y < height; ++y) { ARGBToUV444Row(src_argb, dst_u, dst_v, width); @@ -170,30 +178,42 @@ int ARGBToI422(const uint8_t* src_argb, height = 1; src_stride_argb = dst_stride_y = dst_stride_u = dst_stride_v = 0; } -#if defined(HAS_ARGBTOYROW_SSSE3) && defined(HAS_ARGBTOUVROW_SSSE3) +#if defined(HAS_ARGBTOYROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { - ARGBToUVRow = ARGBToUVRow_Any_SSSE3; ARGBToYRow = ARGBToYRow_Any_SSSE3; if (IS_ALIGNED(width, 16)) { - ARGBToUVRow = ARGBToUVRow_SSSE3; ARGBToYRow = ARGBToYRow_SSSE3; } } #endif -#if defined(HAS_ARGBTOYROW_AVX2) && defined(HAS_ARGBTOUVROW_AVX2) +#if defined(HAS_ARGBTOUVROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ARGBToUVRow = ARGBToUVRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + ARGBToUVRow = ARGBToUVRow_SSSE3; + } + } +#endif +#if defined(HAS_ARGBTOYROW_AVX2) if (TestCpuFlag(kCpuHasAVX2)) { - ARGBToUVRow = ARGBToUVRow_Any_AVX2; ARGBToYRow = ARGBToYRow_Any_AVX2; if (IS_ALIGNED(width, 32)) { - ARGBToUVRow = ARGBToUVRow_AVX2; ARGBToYRow = ARGBToYRow_AVX2; } } #endif +#if defined(HAS_ARGBTOUVROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ARGBToUVRow = ARGBToUVRow_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + ARGBToUVRow = ARGBToUVRow_AVX2; + } + } +#endif #if defined(HAS_ARGBTOYROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { ARGBToYRow = ARGBToYRow_Any_NEON; - if (IS_ALIGNED(width, 8)) { + if (IS_ALIGNED(width, 16)) { ARGBToYRow = ARGBToYRow_NEON; } } @@ -206,20 +226,6 @@ int ARGBToI422(const uint8_t* src_argb, } } #endif - -#if defined(HAS_ARGBTOYROW_MMI) && defined(HAS_ARGBTOUVROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBToYRow = ARGBToYRow_Any_MMI; - ARGBToUVRow = ARGBToUVRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - ARGBToYRow = ARGBToYRow_MMI; - } - if (IS_ALIGNED(width, 16)) { - ARGBToUVRow = ARGBToUVRow_MMI; - } - } -#endif - #if defined(HAS_ARGBTOYROW_MSA) && defined(HAS_ARGBTOUVROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { ARGBToYRow = ARGBToYRow_Any_MSA; @@ -232,6 +238,24 @@ int ARGBToI422(const uint8_t* src_argb, } } #endif +#if defined(HAS_ARGBTOYROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + ARGBToYRow = ARGBToYRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + ARGBToYRow = ARGBToYRow_LSX; + } + } +#endif +#if defined(HAS_ARGBTOYROW_LASX) && defined(HAS_ARGBTOUVROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + ARGBToYRow = ARGBToYRow_Any_LASX; + ARGBToUVRow = ARGBToUVRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + ARGBToYRow = ARGBToYRow_LASX; + ARGBToUVRow = ARGBToUVRow_LASX; + } + } +#endif for (y = 0; y < height; ++y) { ARGBToUVRow(src_argb, 0, dst_u, dst_v, width); @@ -271,30 +295,10 @@ int ARGBToNV12(const uint8_t* src_argb, src_argb = src_argb + (height - 1) * src_stride_argb; src_stride_argb = -src_stride_argb; } -#if defined(HAS_ARGBTOYROW_SSSE3) && defined(HAS_ARGBTOUVROW_SSSE3) - if (TestCpuFlag(kCpuHasSSSE3)) { - ARGBToUVRow = ARGBToUVRow_Any_SSSE3; - ARGBToYRow = ARGBToYRow_Any_SSSE3; - if (IS_ALIGNED(width, 16)) { - ARGBToUVRow = ARGBToUVRow_SSSE3; - ARGBToYRow = ARGBToYRow_SSSE3; - } - } -#endif -#if defined(HAS_ARGBTOYROW_AVX2) && defined(HAS_ARGBTOUVROW_AVX2) - if (TestCpuFlag(kCpuHasAVX2)) { - ARGBToUVRow = ARGBToUVRow_Any_AVX2; - ARGBToYRow = ARGBToYRow_Any_AVX2; - if (IS_ALIGNED(width, 32)) { - ARGBToUVRow = ARGBToUVRow_AVX2; - ARGBToYRow = ARGBToYRow_AVX2; - } - } -#endif #if defined(HAS_ARGBTOYROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { ARGBToYRow = ARGBToYRow_Any_NEON; - if (IS_ALIGNED(width, 8)) { + if (IS_ALIGNED(width, 16)) { ARGBToYRow = ARGBToYRow_NEON; } } @@ -307,15 +311,35 @@ int ARGBToNV12(const uint8_t* src_argb, } } #endif -#if defined(HAS_ARGBTOYROW_MMI) && defined(HAS_ARGBTOUVROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBToYRow = ARGBToYRow_Any_MMI; - ARGBToUVRow = ARGBToUVRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - ARGBToYRow = ARGBToYRow_MMI; +#if defined(HAS_ARGBTOYROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ARGBToYRow = ARGBToYRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + ARGBToYRow = ARGBToYRow_SSSE3; } + } +#endif +#if defined(HAS_ARGBTOUVROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ARGBToUVRow = ARGBToUVRow_Any_SSSE3; if (IS_ALIGNED(width, 16)) { - ARGBToUVRow = ARGBToUVRow_MMI; + ARGBToUVRow = ARGBToUVRow_SSSE3; + } + } +#endif +#if defined(HAS_ARGBTOYROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ARGBToYRow = ARGBToYRow_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + ARGBToYRow = ARGBToYRow_AVX2; + } + } +#endif +#if defined(HAS_ARGBTOUVROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ARGBToUVRow = ARGBToUVRow_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + ARGBToUVRow = ARGBToUVRow_AVX2; } } #endif @@ -331,6 +355,24 @@ int ARGBToNV12(const uint8_t* src_argb, } } #endif +#if defined(HAS_ARGBTOYROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + ARGBToYRow = ARGBToYRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + ARGBToYRow = ARGBToYRow_LSX; + } + } +#endif +#if defined(HAS_ARGBTOYROW_LASX) && defined(HAS_ARGBTOUVROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + ARGBToYRow = ARGBToYRow_Any_LASX; + ARGBToUVRow = ARGBToUVRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + ARGBToYRow = ARGBToYRow_LASX; + ARGBToUVRow = ARGBToUVRow_LASX; + } + } +#endif #if defined(HAS_MERGEUVROW_SSE2) if (TestCpuFlag(kCpuHasSSE2)) { MergeUVRow_ = MergeUVRow_Any_SSE2; @@ -355,14 +397,6 @@ int ARGBToNV12(const uint8_t* src_argb, } } #endif -#if defined(HAS_MERGEUVROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - MergeUVRow_ = MergeUVRow_Any_MMI; - if (IS_ALIGNED(halfwidth, 8)) { - MergeUVRow_ = MergeUVRow_MMI; - } - } -#endif #if defined(HAS_MERGEUVROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { MergeUVRow_ = MergeUVRow_Any_MSA; @@ -370,6 +404,14 @@ int ARGBToNV12(const uint8_t* src_argb, MergeUVRow_ = MergeUVRow_MSA; } } +#endif +#if defined(HAS_MERGEUVROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + MergeUVRow_ = MergeUVRow_Any_LSX; + if (IS_ALIGNED(halfwidth, 16)) { + MergeUVRow_ = MergeUVRow_LSX; + } + } #endif { // Allocate a rows of uv. @@ -423,30 +465,42 @@ int ARGBToNV21(const uint8_t* src_argb, src_argb = src_argb + (height - 1) * src_stride_argb; src_stride_argb = -src_stride_argb; } -#if defined(HAS_ARGBTOYROW_SSSE3) && defined(HAS_ARGBTOUVROW_SSSE3) +#if defined(HAS_ARGBTOYROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { - ARGBToUVRow = ARGBToUVRow_Any_SSSE3; ARGBToYRow = ARGBToYRow_Any_SSSE3; if (IS_ALIGNED(width, 16)) { - ARGBToUVRow = ARGBToUVRow_SSSE3; ARGBToYRow = ARGBToYRow_SSSE3; } } #endif -#if defined(HAS_ARGBTOYROW_AVX2) && defined(HAS_ARGBTOUVROW_AVX2) +#if defined(HAS_ARGBTOUVROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ARGBToUVRow = ARGBToUVRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + ARGBToUVRow = ARGBToUVRow_SSSE3; + } + } +#endif +#if defined(HAS_ARGBTOYROW_AVX2) if (TestCpuFlag(kCpuHasAVX2)) { - ARGBToUVRow = ARGBToUVRow_Any_AVX2; ARGBToYRow = ARGBToYRow_Any_AVX2; if (IS_ALIGNED(width, 32)) { - ARGBToUVRow = ARGBToUVRow_AVX2; ARGBToYRow = ARGBToYRow_AVX2; } } #endif +#if defined(HAS_ARGBTOUVROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ARGBToUVRow = ARGBToUVRow_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + ARGBToUVRow = ARGBToUVRow_AVX2; + } + } +#endif #if defined(HAS_ARGBTOYROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { ARGBToYRow = ARGBToYRow_Any_NEON; - if (IS_ALIGNED(width, 8)) { + if (IS_ALIGNED(width, 16)) { ARGBToYRow = ARGBToYRow_NEON; } } @@ -459,18 +513,6 @@ int ARGBToNV21(const uint8_t* src_argb, } } #endif -#if defined(HAS_ARGBTOYROW_MMI) && defined(HAS_ARGBTOUVROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBToYRow = ARGBToYRow_Any_MMI; - ARGBToUVRow = ARGBToUVRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - ARGBToYRow = ARGBToYRow_MMI; - } - if (IS_ALIGNED(width, 16)) { - ARGBToUVRow = ARGBToUVRow_MMI; - } - } -#endif #if defined(HAS_ARGBTOYROW_MSA) && defined(HAS_ARGBTOUVROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { ARGBToYRow = ARGBToYRow_Any_MSA; @@ -483,6 +525,24 @@ int ARGBToNV21(const uint8_t* src_argb, } } #endif +#if defined(HAS_ARGBTOYROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + ARGBToYRow = ARGBToYRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + ARGBToYRow = ARGBToYRow_LSX; + } + } +#endif +#if defined(HAS_ARGBTOYROW_LASX) && defined(HAS_ARGBTOUVROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + ARGBToYRow = ARGBToYRow_Any_LASX; + ARGBToUVRow = ARGBToUVRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + ARGBToYRow = ARGBToYRow_LASX; + ARGBToUVRow = ARGBToUVRow_LASX; + } + } +#endif #if defined(HAS_MERGEUVROW_SSE2) if (TestCpuFlag(kCpuHasSSE2)) { MergeUVRow_ = MergeUVRow_Any_SSE2; @@ -507,14 +567,6 @@ int ARGBToNV21(const uint8_t* src_argb, } } #endif -#if defined(HAS_MERGEUVROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - MergeUVRow_ = MergeUVRow_Any_MMI; - if (IS_ALIGNED(halfwidth, 8)) { - MergeUVRow_ = MergeUVRow_MMI; - } - } -#endif #if defined(HAS_MERGEUVROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { MergeUVRow_ = MergeUVRow_Any_MSA; @@ -522,6 +574,14 @@ int ARGBToNV21(const uint8_t* src_argb, MergeUVRow_ = MergeUVRow_MSA; } } +#endif +#if defined(HAS_MERGEUVROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + MergeUVRow_ = MergeUVRow_Any_LSX; + if (IS_ALIGNED(halfwidth, 16)) { + MergeUVRow_ = MergeUVRow_LSX; + } + } #endif { // Allocate a rows of uv. @@ -574,30 +634,42 @@ int ABGRToNV12(const uint8_t* src_abgr, src_abgr = src_abgr + (height - 1) * src_stride_abgr; src_stride_abgr = -src_stride_abgr; } -#if defined(HAS_ABGRTOYROW_SSSE3) && defined(HAS_ABGRTOUVROW_SSSE3) +#if defined(HAS_ABGRTOYROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { - ABGRToUVRow = ABGRToUVRow_Any_SSSE3; ABGRToYRow = ABGRToYRow_Any_SSSE3; if (IS_ALIGNED(width, 16)) { - ABGRToUVRow = ABGRToUVRow_SSSE3; ABGRToYRow = ABGRToYRow_SSSE3; } } #endif -#if defined(HAS_ABGRTOYROW_AVX2) && defined(HAS_ABGRTOUVROW_AVX2) +#if defined(HAS_ABGRTOUVROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ABGRToUVRow = ABGRToUVRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + ABGRToUVRow = ABGRToUVRow_SSSE3; + } + } +#endif +#if defined(HAS_ABGRTOYROW_AVX2) if (TestCpuFlag(kCpuHasAVX2)) { - ABGRToUVRow = ABGRToUVRow_Any_AVX2; ABGRToYRow = ABGRToYRow_Any_AVX2; if (IS_ALIGNED(width, 32)) { - ABGRToUVRow = ABGRToUVRow_AVX2; ABGRToYRow = ABGRToYRow_AVX2; } } #endif +#if defined(HAS_ABGRTOUVROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ABGRToUVRow = ABGRToUVRow_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + ABGRToUVRow = ABGRToUVRow_AVX2; + } + } +#endif #if defined(HAS_ABGRTOYROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { ABGRToYRow = ABGRToYRow_Any_NEON; - if (IS_ALIGNED(width, 8)) { + if (IS_ALIGNED(width, 16)) { ABGRToYRow = ABGRToYRow_NEON; } } @@ -610,18 +682,6 @@ int ABGRToNV12(const uint8_t* src_abgr, } } #endif -#if defined(HAS_ABGRTOYROW_MMI) && defined(HAS_ABGRTOUVROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ABGRToYRow = ABGRToYRow_Any_MMI; - ABGRToUVRow = ABGRToUVRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - ABGRToYRow = ABGRToYRow_MMI; - } - if (IS_ALIGNED(width, 16)) { - ABGRToUVRow = ABGRToUVRow_MMI; - } - } -#endif #if defined(HAS_ABGRTOYROW_MSA) && defined(HAS_ABGRTOUVROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { ABGRToYRow = ABGRToYRow_Any_MSA; @@ -634,6 +694,22 @@ int ABGRToNV12(const uint8_t* src_abgr, } } #endif +#if defined(HAS_ABGRTOYROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + ABGRToYRow = ABGRToYRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + ABGRToYRow = ABGRToYRow_LSX; + } + } +#endif +#if defined(HAS_ABGRTOYROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + ABGRToYRow = ABGRToYRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + ABGRToYRow = ABGRToYRow_LASX; + } + } +#endif #if defined(HAS_MERGEUVROW_SSE2) if (TestCpuFlag(kCpuHasSSE2)) { MergeUVRow_ = MergeUVRow_Any_SSE2; @@ -658,14 +734,6 @@ int ABGRToNV12(const uint8_t* src_abgr, } } #endif -#if defined(HAS_MERGEUVROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - MergeUVRow_ = MergeUVRow_Any_MMI; - if (IS_ALIGNED(halfwidth, 8)) { - MergeUVRow_ = MergeUVRow_MMI; - } - } -#endif #if defined(HAS_MERGEUVROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { MergeUVRow_ = MergeUVRow_Any_MSA; @@ -673,6 +741,14 @@ int ABGRToNV12(const uint8_t* src_abgr, MergeUVRow_ = MergeUVRow_MSA; } } +#endif +#if defined(HAS_MERGEUVROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + MergeUVRow_ = MergeUVRow_Any_LSX; + if (IS_ALIGNED(halfwidth, 16)) { + MergeUVRow_ = MergeUVRow_LSX; + } + } #endif { // Allocate a rows of uv. @@ -726,30 +802,42 @@ int ABGRToNV21(const uint8_t* src_abgr, src_abgr = src_abgr + (height - 1) * src_stride_abgr; src_stride_abgr = -src_stride_abgr; } -#if defined(HAS_ABGRTOYROW_SSSE3) && defined(HAS_ABGRTOUVROW_SSSE3) +#if defined(HAS_ABGRTOYROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { - ABGRToUVRow = ABGRToUVRow_Any_SSSE3; ABGRToYRow = ABGRToYRow_Any_SSSE3; if (IS_ALIGNED(width, 16)) { - ABGRToUVRow = ABGRToUVRow_SSSE3; ABGRToYRow = ABGRToYRow_SSSE3; } } #endif -#if defined(HAS_ABGRTOYROW_AVX2) && defined(HAS_ABGRTOUVROW_AVX2) +#if defined(HAS_ABGRTOUVROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ABGRToUVRow = ABGRToUVRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + ABGRToUVRow = ABGRToUVRow_SSSE3; + } + } +#endif +#if defined(HAS_ABGRTOYROW_AVX2) if (TestCpuFlag(kCpuHasAVX2)) { - ABGRToUVRow = ABGRToUVRow_Any_AVX2; ABGRToYRow = ABGRToYRow_Any_AVX2; if (IS_ALIGNED(width, 32)) { - ABGRToUVRow = ABGRToUVRow_AVX2; ABGRToYRow = ABGRToYRow_AVX2; } } #endif +#if defined(HAS_ABGRTOUVROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ABGRToUVRow = ABGRToUVRow_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + ABGRToUVRow = ABGRToUVRow_AVX2; + } + } +#endif #if defined(HAS_ABGRTOYROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { ABGRToYRow = ABGRToYRow_Any_NEON; - if (IS_ALIGNED(width, 8)) { + if (IS_ALIGNED(width, 16)) { ABGRToYRow = ABGRToYRow_NEON; } } @@ -762,18 +850,6 @@ int ABGRToNV21(const uint8_t* src_abgr, } } #endif -#if defined(HAS_ABGRTOYROW_MMI) && defined(HAS_ABGRTOUVROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ABGRToYRow = ABGRToYRow_Any_MMI; - ABGRToUVRow = ABGRToUVRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - ABGRToYRow = ABGRToYRow_MMI; - } - if (IS_ALIGNED(width, 16)) { - ABGRToUVRow = ABGRToUVRow_MMI; - } - } -#endif #if defined(HAS_ABGRTOYROW_MSA) && defined(HAS_ABGRTOUVROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { ABGRToYRow = ABGRToYRow_Any_MSA; @@ -786,6 +862,22 @@ int ABGRToNV21(const uint8_t* src_abgr, } } #endif +#if defined(HAS_ABGRTOYROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + ABGRToYRow = ABGRToYRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + ABGRToYRow = ABGRToYRow_LSX; + } + } +#endif +#if defined(HAS_ABGRTOYROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + ABGRToYRow = ABGRToYRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + ABGRToYRow = ABGRToYRow_LASX; + } + } +#endif #if defined(HAS_MERGEUVROW_SSE2) if (TestCpuFlag(kCpuHasSSE2)) { MergeUVRow_ = MergeUVRow_Any_SSE2; @@ -810,14 +902,6 @@ int ABGRToNV21(const uint8_t* src_abgr, } } #endif -#if defined(HAS_MERGEUVROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - MergeUVRow_ = MergeUVRow_Any_MMI; - if (IS_ALIGNED(halfwidth, 8)) { - MergeUVRow_ = MergeUVRow_MMI; - } - } -#endif #if defined(HAS_MERGEUVROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { MergeUVRow_ = MergeUVRow_Any_MSA; @@ -825,6 +909,14 @@ int ABGRToNV21(const uint8_t* src_abgr, MergeUVRow_ = MergeUVRow_MSA; } } +#endif +#if defined(HAS_MERGEUVROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + MergeUVRow_ = MergeUVRow_Any_LSX; + if (IS_ALIGNED(halfwidth, 16)) { + MergeUVRow_ = MergeUVRow_LSX; + } + } #endif { // Allocate a rows of uv. @@ -883,30 +975,42 @@ int ARGBToYUY2(const uint8_t* src_argb, height = 1; src_stride_argb = dst_stride_yuy2 = 0; } -#if defined(HAS_ARGBTOYROW_SSSE3) && defined(HAS_ARGBTOUVROW_SSSE3) +#if defined(HAS_ARGBTOYROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { - ARGBToUVRow = ARGBToUVRow_Any_SSSE3; ARGBToYRow = ARGBToYRow_Any_SSSE3; if (IS_ALIGNED(width, 16)) { - ARGBToUVRow = ARGBToUVRow_SSSE3; ARGBToYRow = ARGBToYRow_SSSE3; } } #endif -#if defined(HAS_ARGBTOYROW_AVX2) && defined(HAS_ARGBTOUVROW_AVX2) - if (TestCpuFlag(kCpuHasAVX2)) { - ARGBToUVRow = ARGBToUVRow_Any_AVX2; - ARGBToYRow = ARGBToYRow_Any_AVX2; - if (IS_ALIGNED(width, 32)) { - ARGBToUVRow = ARGBToUVRow_AVX2; - ARGBToYRow = ARGBToYRow_AVX2; - } +#if defined(HAS_ARGBTOUVROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ARGBToUVRow = ARGBToUVRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + ARGBToUVRow = ARGBToUVRow_SSSE3; + } + } +#endif +#if defined(HAS_ARGBTOYROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ARGBToYRow = ARGBToYRow_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + ARGBToYRow = ARGBToYRow_AVX2; + } + } +#endif +#if defined(HAS_ARGBTOUVROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ARGBToUVRow = ARGBToUVRow_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + ARGBToUVRow = ARGBToUVRow_AVX2; + } } #endif #if defined(HAS_ARGBTOYROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { ARGBToYRow = ARGBToYRow_Any_NEON; - if (IS_ALIGNED(width, 8)) { + if (IS_ALIGNED(width, 16)) { ARGBToYRow = ARGBToYRow_NEON; } } @@ -919,18 +1023,6 @@ int ARGBToYUY2(const uint8_t* src_argb, } } #endif -#if defined(HAS_ARGBTOYROW_MMI) && defined(HAS_ARGBTOUVROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBToYRow = ARGBToYRow_Any_MMI; - ARGBToUVRow = ARGBToUVRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - ARGBToYRow = ARGBToYRow_MMI; - } - if (IS_ALIGNED(width, 16)) { - ARGBToUVRow = ARGBToUVRow_MMI; - } - } -#endif #if defined(HAS_ARGBTOYROW_MSA) && defined(HAS_ARGBTOUVROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { ARGBToYRow = ARGBToYRow_Any_MSA; @@ -943,6 +1035,24 @@ int ARGBToYUY2(const uint8_t* src_argb, } } #endif +#if defined(HAS_ARGBTOYROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + ARGBToYRow = ARGBToYRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + ARGBToYRow = ARGBToYRow_LSX; + } + } +#endif +#if defined(HAS_ARGBTOYROW_LASX) && defined(HAS_ARGBTOUVROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + ARGBToYRow = ARGBToYRow_Any_LASX; + ARGBToUVRow = ARGBToUVRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + ARGBToYRow = ARGBToYRow_LASX; + ARGBToUVRow = ARGBToUVRow_LASX; + } + } +#endif #if defined(HAS_I422TOYUY2ROW_SSE2) if (TestCpuFlag(kCpuHasSSE2)) { I422ToYUY2Row = I422ToYUY2Row_Any_SSE2; @@ -967,14 +1077,6 @@ int ARGBToYUY2(const uint8_t* src_argb, } } #endif -#if defined(HAS_I422TOYUY2ROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - I422ToYUY2Row = I422ToYUY2Row_Any_MMI; - if (IS_ALIGNED(width, 8)) { - I422ToYUY2Row = I422ToYUY2Row_MMI; - } - } -#endif #if defined(HAS_I422TOYUY2ROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { I422ToYUY2Row = I422ToYUY2Row_Any_MSA; @@ -983,6 +1085,14 @@ int ARGBToYUY2(const uint8_t* src_argb, } } #endif +#if defined(HAS_I422TOYUY2ROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + I422ToYUY2Row = I422ToYUY2Row_Any_LASX; + if (IS_ALIGNED(width, 32)) { + I422ToYUY2Row = I422ToYUY2Row_LASX; + } + } +#endif { // Allocate a rows of yuv. @@ -1036,30 +1146,42 @@ int ARGBToUYVY(const uint8_t* src_argb, height = 1; src_stride_argb = dst_stride_uyvy = 0; } -#if defined(HAS_ARGBTOYROW_SSSE3) && defined(HAS_ARGBTOUVROW_SSSE3) +#if defined(HAS_ARGBTOYROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { - ARGBToUVRow = ARGBToUVRow_Any_SSSE3; ARGBToYRow = ARGBToYRow_Any_SSSE3; if (IS_ALIGNED(width, 16)) { - ARGBToUVRow = ARGBToUVRow_SSSE3; ARGBToYRow = ARGBToYRow_SSSE3; } } #endif -#if defined(HAS_ARGBTOYROW_AVX2) && defined(HAS_ARGBTOUVROW_AVX2) +#if defined(HAS_ARGBTOUVROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ARGBToUVRow = ARGBToUVRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + ARGBToUVRow = ARGBToUVRow_SSSE3; + } + } +#endif +#if defined(HAS_ARGBTOYROW_AVX2) if (TestCpuFlag(kCpuHasAVX2)) { - ARGBToUVRow = ARGBToUVRow_Any_AVX2; ARGBToYRow = ARGBToYRow_Any_AVX2; if (IS_ALIGNED(width, 32)) { - ARGBToUVRow = ARGBToUVRow_AVX2; ARGBToYRow = ARGBToYRow_AVX2; } } #endif +#if defined(HAS_ARGBTOUVROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ARGBToUVRow = ARGBToUVRow_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + ARGBToUVRow = ARGBToUVRow_AVX2; + } + } +#endif #if defined(HAS_ARGBTOYROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { ARGBToYRow = ARGBToYRow_Any_NEON; - if (IS_ALIGNED(width, 8)) { + if (IS_ALIGNED(width, 16)) { ARGBToYRow = ARGBToYRow_NEON; } } @@ -1072,18 +1194,6 @@ int ARGBToUYVY(const uint8_t* src_argb, } } #endif -#if defined(HAS_ARGBTOYROW_MMI) && defined(HAS_ARGBTOUVROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBToYRow = ARGBToYRow_Any_MMI; - ARGBToUVRow = ARGBToUVRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - ARGBToYRow = ARGBToYRow_MMI; - } - if (IS_ALIGNED(width, 16)) { - ARGBToUVRow = ARGBToUVRow_MMI; - } - } -#endif #if defined(HAS_ARGBTOYROW_MSA) && defined(HAS_ARGBTOUVROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { ARGBToYRow = ARGBToYRow_Any_MSA; @@ -1096,6 +1206,24 @@ int ARGBToUYVY(const uint8_t* src_argb, } } #endif +#if defined(HAS_ARGBTOYROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + ARGBToYRow = ARGBToYRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + ARGBToYRow = ARGBToYRow_LSX; + } + } +#endif +#if defined(HAS_ARGBTOYROW_LASX) && defined(HAS_ARGBTOUVROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + ARGBToYRow = ARGBToYRow_Any_LASX; + ARGBToUVRow = ARGBToUVRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + ARGBToYRow = ARGBToYRow_LASX; + ARGBToUVRow = ARGBToUVRow_LASX; + } + } +#endif #if defined(HAS_I422TOUYVYROW_SSE2) if (TestCpuFlag(kCpuHasSSE2)) { I422ToUYVYRow = I422ToUYVYRow_Any_SSE2; @@ -1120,14 +1248,6 @@ int ARGBToUYVY(const uint8_t* src_argb, } } #endif -#if defined(HAS_I422TOUYVYROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - I422ToUYVYRow = I422ToUYVYRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - I422ToUYVYRow = I422ToUYVYRow_MMI; - } - } -#endif #if defined(HAS_I422TOUYVYROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { I422ToUYVYRow = I422ToUYVYRow_Any_MSA; @@ -1136,6 +1256,14 @@ int ARGBToUYVY(const uint8_t* src_argb, } } #endif +#if defined(HAS_I422TOUYVYROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + I422ToUYVYRow = I422ToUYVYRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + I422ToUYVYRow = I422ToUYVYRow_LASX; + } + } +#endif { // Allocate a rows of yuv. @@ -1200,19 +1328,11 @@ int ARGBToI400(const uint8_t* src_argb, #if defined(HAS_ARGBTOYROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { ARGBToYRow = ARGBToYRow_Any_NEON; - if (IS_ALIGNED(width, 8)) { + if (IS_ALIGNED(width, 16)) { ARGBToYRow = ARGBToYRow_NEON; } } #endif -#if defined(HAS_ARGBTOYROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBToYRow = ARGBToYRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - ARGBToYRow = ARGBToYRow_MMI; - } - } -#endif #if defined(HAS_ARGBTOYROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { ARGBToYRow = ARGBToYRow_Any_MSA; @@ -1221,6 +1341,22 @@ int ARGBToI400(const uint8_t* src_argb, } } #endif +#if defined(HAS_ARGBTOYROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + ARGBToYRow = ARGBToYRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + ARGBToYRow = ARGBToYRow_LSX; + } + } +#endif +#if defined(HAS_ARGBTOYROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + ARGBToYRow = ARGBToYRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + ARGBToYRow = ARGBToYRow_LASX; + } + } +#endif for (y = 0; y < height; ++y) { ARGBToYRow(src_argb, dst_y, width); @@ -1298,19 +1434,11 @@ int ARGBToRGB24(const uint8_t* src_argb, #if defined(HAS_ARGBTORGB24ROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { ARGBToRGB24Row = ARGBToRGB24Row_Any_NEON; - if (IS_ALIGNED(width, 8)) { + if (IS_ALIGNED(width, 16)) { ARGBToRGB24Row = ARGBToRGB24Row_NEON; } } #endif -#if defined(HAS_ARGBTORGB24ROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBToRGB24Row = ARGBToRGB24Row_Any_MMI; - if (IS_ALIGNED(width, 4)) { - ARGBToRGB24Row = ARGBToRGB24Row_MMI; - } - } -#endif #if defined(HAS_ARGBTORGB24ROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { ARGBToRGB24Row = ARGBToRGB24Row_Any_MSA; @@ -1319,6 +1447,14 @@ int ARGBToRGB24(const uint8_t* src_argb, } } #endif +#if defined(HAS_ARGBTORGB24ROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + ARGBToRGB24Row = ARGBToRGB24Row_Any_LASX; + if (IS_ALIGNED(width, 32)) { + ARGBToRGB24Row = ARGBToRGB24Row_LASX; + } + } +#endif for (y = 0; y < height; ++y) { ARGBToRGB24Row(src_argb, dst_rgb24, width); @@ -1377,14 +1513,6 @@ int ARGBToRAW(const uint8_t* src_argb, } } #endif -#if defined(HAS_ARGBTORAWROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBToRAWRow = ARGBToRAWRow_Any_MMI; - if (IS_ALIGNED(width, 4)) { - ARGBToRAWRow = ARGBToRAWRow_MMI; - } - } -#endif #if defined(HAS_ARGBTORAWROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { ARGBToRAWRow = ARGBToRAWRow_Any_MSA; @@ -1393,6 +1521,14 @@ int ARGBToRAW(const uint8_t* src_argb, } } #endif +#if defined(HAS_ARGBTORAWROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + ARGBToRAWRow = ARGBToRAWRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + ARGBToRAWRow = ARGBToRAWRow_LASX; + } + } +#endif for (y = 0; y < height; ++y) { ARGBToRAWRow(src_argb, dst_raw, width); @@ -1455,14 +1591,6 @@ int ARGBToRGB565Dither(const uint8_t* src_argb, } } #endif -#if defined(HAS_ARGBTORGB565DITHERROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_Any_MMI; - if (IS_ALIGNED(width, 4)) { - ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_MMI; - } - } -#endif #if defined(HAS_ARGBTORGB565DITHERROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_Any_MSA; @@ -1471,6 +1599,14 @@ int ARGBToRGB565Dither(const uint8_t* src_argb, } } #endif +#if defined(HAS_ARGBTORGB565DITHERROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_Any_LASX; + if (IS_ALIGNED(width, 16)) { + ARGBToRGB565DitherRow = ARGBToRGB565DitherRow_LASX; + } + } +#endif for (y = 0; y < height; ++y) { ARGBToRGB565DitherRow(src_argb, dst_rgb565, @@ -1532,14 +1668,6 @@ int ARGBToRGB565(const uint8_t* src_argb, } } #endif -#if defined(HAS_ARGBTORGB565ROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBToRGB565Row = ARGBToRGB565Row_Any_MMI; - if (IS_ALIGNED(width, 4)) { - ARGBToRGB565Row = ARGBToRGB565Row_MMI; - } - } -#endif #if defined(HAS_ARGBTORGB565ROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { ARGBToRGB565Row = ARGBToRGB565Row_Any_MSA; @@ -1548,6 +1676,14 @@ int ARGBToRGB565(const uint8_t* src_argb, } } #endif +#if defined(HAS_ARGBTORGB565ROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + ARGBToRGB565Row = ARGBToRGB565Row_Any_LASX; + if (IS_ALIGNED(width, 16)) { + ARGBToRGB565Row = ARGBToRGB565Row_LASX; + } + } +#endif for (y = 0; y < height; ++y) { ARGBToRGB565Row(src_argb, dst_rgb565, width); @@ -1606,14 +1742,6 @@ int ARGBToARGB1555(const uint8_t* src_argb, } } #endif -#if defined(HAS_ARGBTOARGB1555ROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBToARGB1555Row = ARGBToARGB1555Row_Any_MMI; - if (IS_ALIGNED(width, 4)) { - ARGBToARGB1555Row = ARGBToARGB1555Row_MMI; - } - } -#endif #if defined(HAS_ARGBTOARGB1555ROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { ARGBToARGB1555Row = ARGBToARGB1555Row_Any_MSA; @@ -1622,6 +1750,14 @@ int ARGBToARGB1555(const uint8_t* src_argb, } } #endif +#if defined(HAS_ARGBTOARGB1555ROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + ARGBToARGB1555Row = ARGBToARGB1555Row_Any_LASX; + if (IS_ALIGNED(width, 16)) { + ARGBToARGB1555Row = ARGBToARGB1555Row_LASX; + } + } +#endif for (y = 0; y < height; ++y) { ARGBToARGB1555Row(src_argb, dst_argb1555, width); @@ -1680,14 +1816,6 @@ int ARGBToARGB4444(const uint8_t* src_argb, } } #endif -#if defined(HAS_ARGBTOARGB4444ROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBToARGB4444Row = ARGBToARGB4444Row_Any_MMI; - if (IS_ALIGNED(width, 4)) { - ARGBToARGB4444Row = ARGBToARGB4444Row_MMI; - } - } -#endif #if defined(HAS_ARGBTOARGB4444ROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { ARGBToARGB4444Row = ARGBToARGB4444Row_Any_MSA; @@ -1696,6 +1824,14 @@ int ARGBToARGB4444(const uint8_t* src_argb, } } #endif +#if defined(HAS_ARGBTOARGB4444ROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + ARGBToARGB4444Row = ARGBToARGB4444Row_Any_LASX; + if (IS_ALIGNED(width, 16)) { + ARGBToARGB4444Row = ARGBToARGB4444Row_LASX; + } + } +#endif for (y = 0; y < height; ++y) { ARGBToARGB4444Row(src_argb, dst_argb4444, width); @@ -1809,19 +1945,19 @@ int ARGBToJ420(const uint8_t* src_argb, int src_stride_argb, uint8_t* dst_yj, int dst_stride_yj, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, + uint8_t* dst_uj, + int dst_stride_uj, + uint8_t* dst_vj, + int dst_stride_vj, int width, int height) { int y; void (*ARGBToUVJRow)(const uint8_t* src_argb0, int src_stride_argb, - uint8_t* dst_u, uint8_t* dst_v, int width) = + uint8_t* dst_uj, uint8_t* dst_vj, int width) = ARGBToUVJRow_C; void (*ARGBToYJRow)(const uint8_t* src_argb, uint8_t* dst_yj, int width) = ARGBToYJRow_C; - if (!src_argb || !dst_yj || !dst_u || !dst_v || width <= 0 || height == 0) { + if (!src_argb || !dst_yj || !dst_uj || !dst_vj || width <= 0 || height == 0) { return -1; } // Negative height means invert the image. @@ -1830,28 +1966,10 @@ int ARGBToJ420(const uint8_t* src_argb, src_argb = src_argb + (height - 1) * src_stride_argb; src_stride_argb = -src_stride_argb; } -#if defined(HAS_ARGBTOYJROW_SSSE3) && defined(HAS_ARGBTOUVJROW_SSSE3) - if (TestCpuFlag(kCpuHasSSSE3)) { - ARGBToUVJRow = ARGBToUVJRow_Any_SSSE3; - ARGBToYJRow = ARGBToYJRow_Any_SSSE3; - if (IS_ALIGNED(width, 16)) { - ARGBToUVJRow = ARGBToUVJRow_SSSE3; - ARGBToYJRow = ARGBToYJRow_SSSE3; - } - } -#endif -#if defined(HAS_ARGBTOYJROW_AVX2) - if (TestCpuFlag(kCpuHasAVX2)) { - ARGBToYJRow = ARGBToYJRow_Any_AVX2; - if (IS_ALIGNED(width, 32)) { - ARGBToYJRow = ARGBToYJRow_AVX2; - } - } -#endif #if defined(HAS_ARGBTOYJROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { ARGBToYJRow = ARGBToYJRow_Any_NEON; - if (IS_ALIGNED(width, 8)) { + if (IS_ALIGNED(width, 16)) { ARGBToYJRow = ARGBToYJRow_NEON; } } @@ -1864,15 +1982,35 @@ int ARGBToJ420(const uint8_t* src_argb, } } #endif -#if defined(HAS_ARGBTOYJROW_MMI) && defined(HAS_ARGBTOUVJROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBToYJRow = ARGBToYJRow_Any_MMI; - ARGBToUVJRow = ARGBToUVJRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - ARGBToYJRow = ARGBToYJRow_MMI; +#if defined(HAS_ARGBTOYJROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ARGBToYJRow = ARGBToYJRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + ARGBToYJRow = ARGBToYJRow_SSSE3; } + } +#endif +#if defined(HAS_ARGBTOUVJROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ARGBToUVJRow = ARGBToUVJRow_Any_SSSE3; if (IS_ALIGNED(width, 16)) { - ARGBToUVJRow = ARGBToUVJRow_MMI; + ARGBToUVJRow = ARGBToUVJRow_SSSE3; + } + } +#endif +#if defined(HAS_ARGBTOYJROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ARGBToYJRow = ARGBToYJRow_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + ARGBToYJRow = ARGBToYJRow_AVX2; + } + } +#endif +#if defined(HAS_ARGBTOUVJROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ARGBToUVJRow = ARGBToUVJRow_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + ARGBToUVJRow = ARGBToUVJRow_AVX2; } } #endif @@ -1888,18 +2026,38 @@ int ARGBToJ420(const uint8_t* src_argb, } } #endif +#if defined(HAS_ARGBTOYJROW_LSX) && defined(HAS_ARGBTOUVJROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + ARGBToYJRow = ARGBToYJRow_Any_LSX; + ARGBToUVJRow = ARGBToUVJRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + ARGBToYJRow = ARGBToYJRow_LSX; + ARGBToUVJRow = ARGBToUVJRow_LSX; + } + } +#endif +#if defined(HAS_ARGBTOYJROW_LASX) && defined(HAS_ARGBTOUVJROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + ARGBToYJRow = ARGBToYJRow_Any_LASX; + ARGBToUVJRow = ARGBToUVJRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + ARGBToYJRow = ARGBToYJRow_LASX; + ARGBToUVJRow = ARGBToUVJRow_LASX; + } + } +#endif for (y = 0; y < height - 1; y += 2) { - ARGBToUVJRow(src_argb, src_stride_argb, dst_u, dst_v, width); + ARGBToUVJRow(src_argb, src_stride_argb, dst_uj, dst_vj, width); ARGBToYJRow(src_argb, dst_yj, width); ARGBToYJRow(src_argb + src_stride_argb, dst_yj + dst_stride_yj, width); src_argb += src_stride_argb * 2; dst_yj += dst_stride_yj * 2; - dst_u += dst_stride_u; - dst_v += dst_stride_v; + dst_uj += dst_stride_uj; + dst_vj += dst_stride_vj; } if (height & 1) { - ARGBToUVJRow(src_argb, 0, dst_u, dst_v, width); + ARGBToUVJRow(src_argb, 0, dst_uj, dst_vj, width); ARGBToYJRow(src_argb, dst_yj, width); } return 0; @@ -1911,19 +2069,19 @@ int ARGBToJ422(const uint8_t* src_argb, int src_stride_argb, uint8_t* dst_yj, int dst_stride_yj, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, + uint8_t* dst_uj, + int dst_stride_uj, + uint8_t* dst_vj, + int dst_stride_vj, int width, int height) { int y; void (*ARGBToUVJRow)(const uint8_t* src_argb0, int src_stride_argb, - uint8_t* dst_u, uint8_t* dst_v, int width) = + uint8_t* dst_uj, uint8_t* dst_vj, int width) = ARGBToUVJRow_C; void (*ARGBToYJRow)(const uint8_t* src_argb, uint8_t* dst_yj, int width) = ARGBToYJRow_C; - if (!src_argb || !dst_yj || !dst_u || !dst_v || width <= 0 || height == 0) { + if (!src_argb || !dst_yj || !dst_uj || !dst_vj || width <= 0 || height == 0) { return -1; } // Negative height means invert the image. @@ -1934,21 +2092,27 @@ int ARGBToJ422(const uint8_t* src_argb, } // Coalesce rows. if (src_stride_argb == width * 4 && dst_stride_yj == width && - dst_stride_u * 2 == width && dst_stride_v * 2 == width) { + dst_stride_uj * 2 == width && dst_stride_vj * 2 == width) { width *= height; height = 1; - src_stride_argb = dst_stride_yj = dst_stride_u = dst_stride_v = 0; + src_stride_argb = dst_stride_yj = dst_stride_uj = dst_stride_vj = 0; } -#if defined(HAS_ARGBTOYJROW_SSSE3) && defined(HAS_ARGBTOUVJROW_SSSE3) +#if defined(HAS_ARGBTOYJROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { - ARGBToUVJRow = ARGBToUVJRow_Any_SSSE3; ARGBToYJRow = ARGBToYJRow_Any_SSSE3; if (IS_ALIGNED(width, 16)) { - ARGBToUVJRow = ARGBToUVJRow_SSSE3; ARGBToYJRow = ARGBToYJRow_SSSE3; } } #endif +#if defined(HAS_ARGBTOUVJROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ARGBToUVJRow = ARGBToUVJRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + ARGBToUVJRow = ARGBToUVJRow_SSSE3; + } + } +#endif #if defined(HAS_ARGBTOYJROW_AVX2) if (TestCpuFlag(kCpuHasAVX2)) { ARGBToYJRow = ARGBToYJRow_Any_AVX2; @@ -1957,10 +2121,18 @@ int ARGBToJ422(const uint8_t* src_argb, } } #endif +#if defined(HAS_ARGBTOUVJROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ARGBToUVJRow = ARGBToUVJRow_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + ARGBToUVJRow = ARGBToUVJRow_AVX2; + } + } +#endif #if defined(HAS_ARGBTOYJROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { ARGBToYJRow = ARGBToYJRow_Any_NEON; - if (IS_ALIGNED(width, 8)) { + if (IS_ALIGNED(width, 16)) { ARGBToYJRow = ARGBToYJRow_NEON; } } @@ -1973,18 +2145,6 @@ int ARGBToJ422(const uint8_t* src_argb, } } #endif -#if defined(HAS_ARGBTOYJROW_MMI) && defined(HAS_ARGBTOUVJROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBToYJRow = ARGBToYJRow_Any_MMI; - ARGBToUVJRow = ARGBToUVJRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - ARGBToYJRow = ARGBToYJRow_MMI; - } - if (IS_ALIGNED(width, 16)) { - ARGBToUVJRow = ARGBToUVJRow_MMI; - } - } -#endif #if defined(HAS_ARGBTOYJROW_MSA) && defined(HAS_ARGBTOUVJROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { ARGBToYJRow = ARGBToYJRow_Any_MSA; @@ -1997,14 +2157,34 @@ int ARGBToJ422(const uint8_t* src_argb, } } #endif +#if defined(HAS_ARGBTOYJROW_LSX) && defined(HAS_ARGBTOUVJROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + ARGBToYJRow = ARGBToYJRow_Any_LSX; + ARGBToUVJRow = ARGBToUVJRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + ARGBToYJRow = ARGBToYJRow_LSX; + ARGBToUVJRow = ARGBToUVJRow_LSX; + } + } +#endif +#if defined(HAS_ARGBTOYJROW_LASX) && defined(HAS_ARGBTOUVJROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + ARGBToYJRow = ARGBToYJRow_Any_LASX; + ARGBToUVJRow = ARGBToUVJRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + ARGBToYJRow = ARGBToYJRow_LASX; + ARGBToUVJRow = ARGBToUVJRow_LASX; + } + } +#endif for (y = 0; y < height; ++y) { - ARGBToUVJRow(src_argb, 0, dst_u, dst_v, width); + ARGBToUVJRow(src_argb, 0, dst_uj, dst_vj, width); ARGBToYJRow(src_argb, dst_yj, width); src_argb += src_stride_argb; dst_yj += dst_stride_yj; - dst_u += dst_stride_u; - dst_v += dst_stride_v; + dst_uj += dst_stride_uj; + dst_vj += dst_stride_vj; } return 0; } @@ -2053,19 +2233,11 @@ int ARGBToJ400(const uint8_t* src_argb, #if defined(HAS_ARGBTOYJROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { ARGBToYJRow = ARGBToYJRow_Any_NEON; - if (IS_ALIGNED(width, 8)) { + if (IS_ALIGNED(width, 16)) { ARGBToYJRow = ARGBToYJRow_NEON; } } #endif -#if defined(HAS_ARGBTOYJROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBToYJRow = ARGBToYJRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - ARGBToYJRow = ARGBToYJRow_MMI; - } - } -#endif #if defined(HAS_ARGBTOYJROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { ARGBToYJRow = ARGBToYJRow_Any_MSA; @@ -2127,19 +2299,11 @@ int RGBAToJ400(const uint8_t* src_rgba, #if defined(HAS_RGBATOYJROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { RGBAToYJRow = RGBAToYJRow_Any_NEON; - if (IS_ALIGNED(width, 8)) { + if (IS_ALIGNED(width, 16)) { RGBAToYJRow = RGBAToYJRow_NEON; } } #endif -#if defined(HAS_RGBATOYJROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - RGBAToYJRow = RGBAToYJRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - RGBAToYJRow = RGBAToYJRow_MMI; - } - } -#endif #if defined(HAS_RGBATOYJROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { RGBAToYJRow = RGBAToYJRow_Any_MSA; @@ -2148,6 +2312,22 @@ int RGBAToJ400(const uint8_t* src_rgba, } } #endif +#if defined(HAS_RGBATOYJROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + RGBAToYJRow = RGBAToYJRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + RGBAToYJRow = RGBAToYJRow_LSX; + } + } +#endif +#if defined(HAS_RGBATOYJROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + RGBAToYJRow = RGBAToYJRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + RGBAToYJRow = RGBAToYJRow_LASX; + } + } +#endif for (y = 0; y < height; ++y) { RGBAToYJRow(src_rgba, dst_yj, width); @@ -2157,6 +2337,663 @@ int RGBAToJ400(const uint8_t* src_rgba, return 0; } +// Convert ABGR to J420. (JPeg full range I420). +LIBYUV_API +int ABGRToJ420(const uint8_t* src_abgr, + int src_stride_abgr, + uint8_t* dst_yj, + int dst_stride_yj, + uint8_t* dst_uj, + int dst_stride_uj, + uint8_t* dst_vj, + int dst_stride_vj, + int width, + int height) { + int y; + void (*ABGRToUVJRow)(const uint8_t* src_abgr0, int src_stride_abgr, + uint8_t* dst_uj, uint8_t* dst_vj, int width) = + ABGRToUVJRow_C; + void (*ABGRToYJRow)(const uint8_t* src_abgr, uint8_t* dst_yj, int width) = + ABGRToYJRow_C; + if (!src_abgr || !dst_yj || !dst_uj || !dst_vj || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + src_abgr = src_abgr + (height - 1) * src_stride_abgr; + src_stride_abgr = -src_stride_abgr; + } +#if defined(HAS_ABGRTOYJROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ABGRToYJRow = ABGRToYJRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + ABGRToYJRow = ABGRToYJRow_SSSE3; + } + } +#endif +#if defined(HAS_ABGRTOUVJROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ABGRToUVJRow = ABGRToUVJRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + ABGRToUVJRow = ABGRToUVJRow_SSSE3; + } + } +#endif +#if defined(HAS_ABGRTOYJROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ABGRToYJRow = ABGRToYJRow_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + ABGRToYJRow = ABGRToYJRow_AVX2; + } + } +#endif +#if defined(HAS_ABGRTOUVJROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ABGRToUVJRow = ABGRToUVJRow_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + ABGRToUVJRow = ABGRToUVJRow_AVX2; + } + } +#endif +#if defined(HAS_ABGRTOYJROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + ABGRToYJRow = ABGRToYJRow_Any_NEON; + if (IS_ALIGNED(width, 16)) { + ABGRToYJRow = ABGRToYJRow_NEON; + } + } +#endif +#if defined(HAS_ABGRTOUVJROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + ABGRToUVJRow = ABGRToUVJRow_Any_NEON; + if (IS_ALIGNED(width, 16)) { + ABGRToUVJRow = ABGRToUVJRow_NEON; + } + } +#endif +#if defined(HAS_ABGRTOYJROW_MSA) && defined(HAS_ABGRTOUVJROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ABGRToYJRow = ABGRToYJRow_Any_MSA; + ABGRToUVJRow = ABGRToUVJRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + ABGRToYJRow = ABGRToYJRow_MSA; + ABGRToUVJRow = ABGRToUVJRow_MSA; + } + } +#endif +#if defined(HAS_ABGRTOYJROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + ABGRToYJRow = ABGRToYJRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + ABGRToYJRow = ABGRToYJRow_LSX; + } + } +#endif +#if defined(HAS_ABGRTOYJROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + ABGRToYJRow = ABGRToYJRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + ABGRToYJRow = ABGRToYJRow_LASX; + } + } +#endif + + for (y = 0; y < height - 1; y += 2) { + ABGRToUVJRow(src_abgr, src_stride_abgr, dst_uj, dst_vj, width); + ABGRToYJRow(src_abgr, dst_yj, width); + ABGRToYJRow(src_abgr + src_stride_abgr, dst_yj + dst_stride_yj, width); + src_abgr += src_stride_abgr * 2; + dst_yj += dst_stride_yj * 2; + dst_uj += dst_stride_uj; + dst_vj += dst_stride_vj; + } + if (height & 1) { + ABGRToUVJRow(src_abgr, 0, dst_uj, dst_vj, width); + ABGRToYJRow(src_abgr, dst_yj, width); + } + return 0; +} + +// Convert ABGR to J422. (JPeg full range I422). +LIBYUV_API +int ABGRToJ422(const uint8_t* src_abgr, + int src_stride_abgr, + uint8_t* dst_yj, + int dst_stride_yj, + uint8_t* dst_uj, + int dst_stride_uj, + uint8_t* dst_vj, + int dst_stride_vj, + int width, + int height) { + int y; + void (*ABGRToUVJRow)(const uint8_t* src_abgr0, int src_stride_abgr, + uint8_t* dst_uj, uint8_t* dst_vj, int width) = + ABGRToUVJRow_C; + void (*ABGRToYJRow)(const uint8_t* src_abgr, uint8_t* dst_yj, int width) = + ABGRToYJRow_C; + if (!src_abgr || !dst_yj || !dst_uj || !dst_vj || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + src_abgr = src_abgr + (height - 1) * src_stride_abgr; + src_stride_abgr = -src_stride_abgr; + } + // Coalesce rows. + if (src_stride_abgr == width * 4 && dst_stride_yj == width && + dst_stride_uj * 2 == width && dst_stride_vj * 2 == width) { + width *= height; + height = 1; + src_stride_abgr = dst_stride_yj = dst_stride_uj = dst_stride_vj = 0; + } +#if defined(HAS_ABGRTOYJROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ABGRToYJRow = ABGRToYJRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + ABGRToYJRow = ABGRToYJRow_SSSE3; + } + } +#endif +#if defined(HAS_ABGRTOUVJROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ABGRToUVJRow = ABGRToUVJRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + ABGRToUVJRow = ABGRToUVJRow_SSSE3; + } + } +#endif +#if defined(HAS_ABGRTOYJROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ABGRToYJRow = ABGRToYJRow_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + ABGRToYJRow = ABGRToYJRow_AVX2; + } + } +#endif +#if defined(HAS_ABGRTOUVJROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ABGRToUVJRow = ABGRToUVJRow_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + ABGRToUVJRow = ABGRToUVJRow_AVX2; + } + } +#endif +#if defined(HAS_ABGRTOYJROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + ABGRToYJRow = ABGRToYJRow_Any_NEON; + if (IS_ALIGNED(width, 16)) { + ABGRToYJRow = ABGRToYJRow_NEON; + } + } +#endif +#if defined(HAS_ABGRTOUVJROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + ABGRToUVJRow = ABGRToUVJRow_Any_NEON; + if (IS_ALIGNED(width, 16)) { + ABGRToUVJRow = ABGRToUVJRow_NEON; + } + } +#endif +#if defined(HAS_ABGRTOYJROW_MSA) && defined(HAS_ABGRTOUVJROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ABGRToYJRow = ABGRToYJRow_Any_MSA; + ABGRToUVJRow = ABGRToUVJRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + ABGRToYJRow = ABGRToYJRow_MSA; + } + if (IS_ALIGNED(width, 32)) { + ABGRToUVJRow = ABGRToUVJRow_MSA; + } + } +#endif +#if defined(HAS_ABGRTOYJROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + ABGRToYJRow = ABGRToYJRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + ABGRToYJRow = ABGRToYJRow_LSX; + } + } +#endif +#if defined(HAS_ABGRTOYJROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + ABGRToYJRow = ABGRToYJRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + ABGRToYJRow = ABGRToYJRow_LASX; + } + } +#endif + + for (y = 0; y < height; ++y) { + ABGRToUVJRow(src_abgr, 0, dst_uj, dst_vj, width); + ABGRToYJRow(src_abgr, dst_yj, width); + src_abgr += src_stride_abgr; + dst_yj += dst_stride_yj; + dst_uj += dst_stride_uj; + dst_vj += dst_stride_vj; + } + return 0; +} + +// Convert ABGR to J400. +LIBYUV_API +int ABGRToJ400(const uint8_t* src_abgr, + int src_stride_abgr, + uint8_t* dst_yj, + int dst_stride_yj, + int width, + int height) { + int y; + void (*ABGRToYJRow)(const uint8_t* src_abgr, uint8_t* dst_yj, int width) = + ABGRToYJRow_C; + if (!src_abgr || !dst_yj || width <= 0 || height == 0) { + return -1; + } + if (height < 0) { + height = -height; + src_abgr = src_abgr + (height - 1) * src_stride_abgr; + src_stride_abgr = -src_stride_abgr; + } + // Coalesce rows. + if (src_stride_abgr == width * 4 && dst_stride_yj == width) { + width *= height; + height = 1; + src_stride_abgr = dst_stride_yj = 0; + } +#if defined(HAS_ABGRTOYJROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ABGRToYJRow = ABGRToYJRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + ABGRToYJRow = ABGRToYJRow_SSSE3; + } + } +#endif +#if defined(HAS_ABGRTOYJROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ABGRToYJRow = ABGRToYJRow_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + ABGRToYJRow = ABGRToYJRow_AVX2; + } + } +#endif +#if defined(HAS_ABGRTOYJROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + ABGRToYJRow = ABGRToYJRow_Any_NEON; + if (IS_ALIGNED(width, 16)) { + ABGRToYJRow = ABGRToYJRow_NEON; + } + } +#endif +#if defined(HAS_ABGRTOYJROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + ABGRToYJRow = ABGRToYJRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + ABGRToYJRow = ABGRToYJRow_MSA; + } + } +#endif +#if defined(HAS_ABGRTOYJROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + ABGRToYJRow = ABGRToYJRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + ABGRToYJRow = ABGRToYJRow_LSX; + } + } +#endif +#if defined(HAS_ABGRTOYJROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + ABGRToYJRow = ABGRToYJRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + ABGRToYJRow = ABGRToYJRow_LASX; + } + } +#endif + + for (y = 0; y < height; ++y) { + ABGRToYJRow(src_abgr, dst_yj, width); + src_abgr += src_stride_abgr; + dst_yj += dst_stride_yj; + } + return 0; +} + +// Convert ARGB to AR64. +LIBYUV_API +int ARGBToAR64(const uint8_t* src_argb, + int src_stride_argb, + uint16_t* dst_ar64, + int dst_stride_ar64, + int width, + int height) { + int y; + void (*ARGBToAR64Row)(const uint8_t* src_argb, uint16_t* dst_ar64, + int width) = ARGBToAR64Row_C; + if (!src_argb || !dst_ar64 || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + src_argb = src_argb + (height - 1) * src_stride_argb; + src_stride_argb = -src_stride_argb; + } + // Coalesce rows. + if (src_stride_argb == width * 4 && dst_stride_ar64 == width * 4) { + width *= height; + height = 1; + src_stride_argb = dst_stride_ar64 = 0; + } +#if defined(HAS_ARGBTOAR64ROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ARGBToAR64Row = ARGBToAR64Row_Any_SSSE3; + if (IS_ALIGNED(width, 4)) { + ARGBToAR64Row = ARGBToAR64Row_SSSE3; + } + } +#endif +#if defined(HAS_ARGBTOAR64ROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ARGBToAR64Row = ARGBToAR64Row_Any_AVX2; + if (IS_ALIGNED(width, 8)) { + ARGBToAR64Row = ARGBToAR64Row_AVX2; + } + } +#endif +#if defined(HAS_ARGBTOAR64ROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + ARGBToAR64Row = ARGBToAR64Row_Any_NEON; + if (IS_ALIGNED(width, 8)) { + ARGBToAR64Row = ARGBToAR64Row_NEON; + } + } +#endif + + for (y = 0; y < height; ++y) { + ARGBToAR64Row(src_argb, dst_ar64, width); + src_argb += src_stride_argb; + dst_ar64 += dst_stride_ar64; + } + return 0; +} + +// Convert ARGB to AB64. +LIBYUV_API +int ARGBToAB64(const uint8_t* src_argb, + int src_stride_argb, + uint16_t* dst_ab64, + int dst_stride_ab64, + int width, + int height) { + int y; + void (*ARGBToAB64Row)(const uint8_t* src_argb, uint16_t* dst_ar64, + int width) = ARGBToAB64Row_C; + if (!src_argb || !dst_ab64 || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + src_argb = src_argb + (height - 1) * src_stride_argb; + src_stride_argb = -src_stride_argb; + } + // Coalesce rows. + if (src_stride_argb == width * 4 && dst_stride_ab64 == width * 4) { + width *= height; + height = 1; + src_stride_argb = dst_stride_ab64 = 0; + } +#if defined(HAS_ARGBTOAB64ROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ARGBToAB64Row = ARGBToAB64Row_Any_SSSE3; + if (IS_ALIGNED(width, 4)) { + ARGBToAB64Row = ARGBToAB64Row_SSSE3; + } + } +#endif +#if defined(HAS_ARGBTOAB64ROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ARGBToAB64Row = ARGBToAB64Row_Any_AVX2; + if (IS_ALIGNED(width, 8)) { + ARGBToAB64Row = ARGBToAB64Row_AVX2; + } + } +#endif +#if defined(HAS_ARGBTOAB64ROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + ARGBToAB64Row = ARGBToAB64Row_Any_NEON; + if (IS_ALIGNED(width, 8)) { + ARGBToAB64Row = ARGBToAB64Row_NEON; + } + } +#endif + + for (y = 0; y < height; ++y) { + ARGBToAB64Row(src_argb, dst_ab64, width); + src_argb += src_stride_argb; + dst_ab64 += dst_stride_ab64; + } + return 0; +} + +// Enabled if 1 pass is available +#if defined(HAS_RAWTOYJROW_NEON) || defined(HAS_RAWTOYJROW_MSA) +#define HAS_RAWTOYJROW +#endif + +// RAW to JNV21 full range NV21 +LIBYUV_API +int RAWToJNV21(const uint8_t* src_raw, + int src_stride_raw, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_vu, + int dst_stride_vu, + int width, + int height) { + int y; + int halfwidth = (width + 1) >> 1; +#if defined(HAS_RAWTOYJROW) + void (*RAWToUVJRow)(const uint8_t* src_raw, int src_stride_raw, + uint8_t* dst_uj, uint8_t* dst_vj, int width) = + RAWToUVJRow_C; + void (*RAWToYJRow)(const uint8_t* src_raw, uint8_t* dst_y, int width) = + RAWToYJRow_C; +#else + void (*RAWToARGBRow)(const uint8_t* src_rgb, uint8_t* dst_argb, int width) = + RAWToARGBRow_C; + void (*ARGBToUVJRow)(const uint8_t* src_argb0, int src_stride_argb, + uint8_t* dst_uj, uint8_t* dst_vj, int width) = + ARGBToUVJRow_C; + void (*ARGBToYJRow)(const uint8_t* src_argb, uint8_t* dst_y, int width) = + ARGBToYJRow_C; +#endif + void (*MergeUVRow_)(const uint8_t* src_uj, const uint8_t* src_vj, + uint8_t* dst_vu, int width) = MergeUVRow_C; + if (!src_raw || !dst_y || !dst_vu || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + src_raw = src_raw + (height - 1) * src_stride_raw; + src_stride_raw = -src_stride_raw; + } + +#if defined(HAS_RAWTOYJROW) + +// Neon version does direct RAW to YUV. +#if defined(HAS_RAWTOYJROW_NEON) && defined(HAS_RAWTOUVJROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + RAWToUVJRow = RAWToUVJRow_Any_NEON; + RAWToYJRow = RAWToYJRow_Any_NEON; + if (IS_ALIGNED(width, 16)) { + RAWToYJRow = RAWToYJRow_NEON; + RAWToUVJRow = RAWToUVJRow_NEON; + } + } +#endif +#if defined(HAS_RAWTOYJROW_MSA) && defined(HAS_RAWTOUVJROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + RAWToUVJRow = RAWToUVJRow_Any_MSA; + RAWToYJRow = RAWToYJRow_Any_MSA; + if (IS_ALIGNED(width, 16)) { + RAWToYJRow = RAWToYJRow_MSA; + RAWToUVJRow = RAWToUVJRow_MSA; + } + } +#endif +#if defined(HAS_RAWTOYJROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + RAWToYJRow = RAWToYJRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + RAWToYJRow = RAWToYJRow_LSX; + } + } +#endif +#if defined(HAS_RAWTOYJROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + RAWToYJRow = RAWToYJRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + RAWToYJRow = RAWToYJRow_LASX; + } + } +#endif + +// Other platforms do intermediate conversion from RAW to ARGB. +#else // HAS_RAWTOYJROW + +#if defined(HAS_RAWTOARGBROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + RAWToARGBRow = RAWToARGBRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + RAWToARGBRow = RAWToARGBRow_SSSE3; + } + } +#endif +#if defined(HAS_ARGBTOYJROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ARGBToYJRow = ARGBToYJRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + ARGBToYJRow = ARGBToYJRow_SSSE3; + } + } +#endif +#if defined(HAS_ARGBTOYJROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ARGBToYJRow = ARGBToYJRow_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + ARGBToYJRow = ARGBToYJRow_AVX2; + } + } +#endif +#if defined(HAS_ARGBTOUVJROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + ARGBToUVJRow = ARGBToUVJRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + ARGBToUVJRow = ARGBToUVJRow_SSSE3; + } + } +#endif +#if defined(HAS_ARGBTOUVJROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + ARGBToUVJRow = ARGBToUVJRow_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + ARGBToUVJRow = ARGBToUVJRow_AVX2; + } + } +#endif +#endif // HAS_RAWTOYJROW +#if defined(HAS_MERGEUVROW_SSE2) + if (TestCpuFlag(kCpuHasSSE2)) { + MergeUVRow_ = MergeUVRow_Any_SSE2; + if (IS_ALIGNED(halfwidth, 16)) { + MergeUVRow_ = MergeUVRow_SSE2; + } + } +#endif +#if defined(HAS_MERGEUVROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + MergeUVRow_ = MergeUVRow_Any_AVX2; + if (IS_ALIGNED(halfwidth, 32)) { + MergeUVRow_ = MergeUVRow_AVX2; + } + } +#endif +#if defined(HAS_MERGEUVROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + MergeUVRow_ = MergeUVRow_Any_NEON; + if (IS_ALIGNED(halfwidth, 16)) { + MergeUVRow_ = MergeUVRow_NEON; + } + } +#endif +#if defined(HAS_MERGEUVROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + MergeUVRow_ = MergeUVRow_Any_MSA; + if (IS_ALIGNED(halfwidth, 16)) { + MergeUVRow_ = MergeUVRow_MSA; + } + } +#endif +#if defined(HAS_MERGEUVROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + MergeUVRow_ = MergeUVRow_Any_LSX; + if (IS_ALIGNED(halfwidth, 16)) { + MergeUVRow_ = MergeUVRow_LSX; + } + } +#endif + { + // Allocate a row of uv. + align_buffer_64(row_uj, ((halfwidth + 31) & ~31) * 2); + uint8_t* row_vj = row_uj + ((halfwidth + 31) & ~31); +#if !defined(HAS_RAWTOYJROW) + // Allocate 2 rows of ARGB. + const int row_size = (width * 4 + 31) & ~31; + align_buffer_64(row, row_size * 2); +#endif + + for (y = 0; y < height - 1; y += 2) { +#if defined(HAS_RAWTOYJROW) + RAWToUVJRow(src_raw, src_stride_raw, row_uj, row_vj, width); + MergeUVRow_(row_vj, row_uj, dst_vu, halfwidth); + RAWToYJRow(src_raw, dst_y, width); + RAWToYJRow(src_raw + src_stride_raw, dst_y + dst_stride_y, width); +#else + RAWToARGBRow(src_raw, row, width); + RAWToARGBRow(src_raw + src_stride_raw, row + row_size, width); + ARGBToUVJRow(row, row_size, row_uj, row_vj, width); + MergeUVRow_(row_vj, row_uj, dst_vu, halfwidth); + ARGBToYJRow(row, dst_y, width); + ARGBToYJRow(row + row_size, dst_y + dst_stride_y, width); +#endif + src_raw += src_stride_raw * 2; + dst_y += dst_stride_y * 2; + dst_vu += dst_stride_vu; + } + if (height & 1) { +#if defined(HAS_RAWTOYJROW) + RAWToUVJRow(src_raw, 0, row_uj, row_vj, width); + MergeUVRow_(row_vj, row_uj, dst_vu, halfwidth); + RAWToYJRow(src_raw, dst_y, width); +#else + RAWToARGBRow(src_raw, row, width); + ARGBToUVJRow(row, 0, row_uj, row_vj, width); + MergeUVRow_(row_vj, row_uj, dst_vu, halfwidth); + ARGBToYJRow(row, dst_y, width); +#endif + } +#if !defined(HAS_RAWTOYJROW) + free_aligned_buffer_64(row); +#endif + free_aligned_buffer_64(row_uj); + } + return 0; +} +#undef HAS_RAWTOYJROW + #ifdef __cplusplus } // extern "C" } // namespace libyuv diff --git a/TMessagesProj/jni/third_party/libyuv/source/convert_to_i420.cc b/TMessagesProj/jni/third_party/libyuv/source/convert_to_i420.cc index ac6eeab24e..5869ecd7b9 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/convert_to_i420.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/convert_to_i420.cc @@ -89,18 +89,26 @@ int ConvertToI420(const uint8_t* sample, switch (format) { // Single plane formats - case FOURCC_YUY2: + case FOURCC_YUY2: { // TODO(fbarchard): Find better odd crop fix. + uint8_t* u = (crop_x & 1) ? dst_v : dst_u; + uint8_t* v = (crop_x & 1) ? dst_u : dst_v; + int stride_u = (crop_x & 1) ? dst_stride_v : dst_stride_u; + int stride_v = (crop_x & 1) ? dst_stride_u : dst_stride_v; src = sample + (aligned_src_width * crop_y + crop_x) * 2; - r = YUY2ToI420(src, aligned_src_width * 2, dst_y, dst_stride_y, dst_u, - dst_stride_u, dst_v, dst_stride_v, crop_width, - inv_crop_height); + r = YUY2ToI420(src, aligned_src_width * 2, dst_y, dst_stride_y, u, + stride_u, v, stride_v, crop_width, inv_crop_height); break; - case FOURCC_UYVY: + } + case FOURCC_UYVY: { + uint8_t* u = (crop_x & 1) ? dst_v : dst_u; + uint8_t* v = (crop_x & 1) ? dst_u : dst_v; + int stride_u = (crop_x & 1) ? dst_stride_v : dst_stride_u; + int stride_v = (crop_x & 1) ? dst_stride_u : dst_stride_v; src = sample + (aligned_src_width * crop_y + crop_x) * 2; - r = UYVYToI420(src, aligned_src_width * 2, dst_y, dst_stride_y, dst_u, - dst_stride_u, dst_v, dst_stride_v, crop_width, - inv_crop_height); + r = UYVYToI420(src, aligned_src_width * 2, dst_y, dst_stride_y, u, + stride_u, v, stride_v, crop_width, inv_crop_height); break; + } case FOURCC_RGBP: src = sample + (src_width * crop_y + crop_x) * 2; r = RGB565ToI420(src, src_width * 2, dst_y, dst_stride_y, dst_u, diff --git a/TMessagesProj/jni/third_party/libyuv/source/cpu_id.cc b/TMessagesProj/jni/third_party/libyuv/source/cpu_id.cc index fe89452b77..13e3da7bb1 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/cpu_id.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/cpu_id.cc @@ -20,7 +20,7 @@ #endif // For ArmCpuCaps() but unittested on all platforms -#include +#include // For fopen() #include #ifdef __cplusplus @@ -108,14 +108,14 @@ void CpuId(int eax, int ecx, int* cpu_info) { // } // For VS2013 and earlier 32 bit, the _xgetbv(0) optimizer produces bad code. // https://code.google.com/p/libyuv/issues/detail?id=529 -#if defined(_M_IX86) && (_MSC_VER < 1900) +#if defined(_M_IX86) && defined(_MSC_VER) && (_MSC_VER < 1900) #pragma optimize("g", off) #endif #if (defined(_M_IX86) || defined(_M_X64) || defined(__i386__) || \ defined(__x86_64__)) && \ !defined(__pnacl__) && !defined(__CLR_VER) && !defined(__native_client__) // X86 CPUs have xgetbv to detect OS saves high parts of ymm registers. -int GetXCR0() { +static int GetXCR0() { int xcr0 = 0; #if defined(_MSC_FULL_VER) && (_MSC_FULL_VER >= 160040219) xcr0 = (int)_xgetbv(0); // VS2010 SP1 required. NOLINT @@ -129,11 +129,11 @@ int GetXCR0() { #define GetXCR0() 0 #endif // defined(_M_IX86) || defined(_M_X64) .. // Return optimization to previous setting. -#if defined(_M_IX86) && (_MSC_VER < 1900) +#if defined(_M_IX86) && defined(_MSC_VER) && (_MSC_VER < 1900) #pragma optimize("g", on) #endif -// based on libvpx arm_cpudetect.c +// Based on libvpx arm_cpudetect.c // For Arm, but public to allow testing on any CPU LIBYUV_API SAFEBUFFERS int ArmCpuCaps(const char* cpuinfo_name) { char cpuinfo_line[512]; @@ -174,18 +174,12 @@ LIBYUV_API SAFEBUFFERS int MipsCpuCaps(const char* cpuinfo_name) { } while (fgets(cpuinfo_line, sizeof(cpuinfo_line) - 1, f)) { if (memcmp(cpuinfo_line, "cpu model", 9) == 0) { - // Workaround early kernel without mmi in ASEs line. - if (strstr(cpuinfo_line, "Loongson-3")) { - flag |= kCpuHasMMI; - } else if (strstr(cpuinfo_line, "Loongson-2K")) { - flag |= kCpuHasMMI | kCpuHasMSA; + // Workaround early kernel without MSA in ASEs line. + if (strstr(cpuinfo_line, "Loongson-2K")) { + flag |= kCpuHasMSA; } } if (memcmp(cpuinfo_line, "ASEs implemented", 16) == 0) { - if (strstr(cpuinfo_line, "loongson-mmi") && - strstr(cpuinfo_line, "loongson-ext")) { - flag |= kCpuHasMMI; - } if (strstr(cpuinfo_line, "msa")) { flag |= kCpuHasMSA; } @@ -197,6 +191,27 @@ LIBYUV_API SAFEBUFFERS int MipsCpuCaps(const char* cpuinfo_name) { return flag; } +// TODO(fbarchard): Consider read_loongarch_ir(). +#define LOONGARCH_CFG2 0x2 +#define LOONGARCH_CFG2_LSX (1 << 6) +#define LOONGARCH_CFG2_LASX (1 << 7) + +#if defined(__loongarch__) +LIBYUV_API SAFEBUFFERS int LoongarchCpuCaps(void) { + int flag = 0x0; + uint32_t cfg2 = 0; + + __asm__ volatile("cpucfg %0, %1 \n\t" : "+&r"(cfg2) : "r"(LOONGARCH_CFG2)); + + if (cfg2 & LOONGARCH_CFG2_LSX) + flag |= kCpuHasLSX; + + if (cfg2 & LOONGARCH_CFG2_LASX) + flag |= kCpuHasLASX; + return flag; +} +#endif + static SAFEBUFFERS int GetCpuFlags(void) { int cpu_info = 0; #if !defined(__pnacl__) && !defined(__CLR_VER) && \ @@ -229,6 +244,7 @@ static SAFEBUFFERS int GetCpuFlags(void) { cpu_info |= (cpu_info7[1] & 0x80000000) ? kCpuHasAVX512VL : 0; cpu_info |= (cpu_info7[2] & 0x00000002) ? kCpuHasAVX512VBMI : 0; cpu_info |= (cpu_info7[2] & 0x00000040) ? kCpuHasAVX512VBMI2 : 0; + cpu_info |= (cpu_info7[2] & 0x00000800) ? kCpuHasAVX512VNNI : 0; cpu_info |= (cpu_info7[2] & 0x00001000) ? kCpuHasAVX512VBITALG : 0; cpu_info |= (cpu_info7[2] & 0x00004000) ? kCpuHasAVX512VPOPCNTDQ : 0; cpu_info |= (cpu_info7[2] & 0x00000100) ? kCpuHasGFNI : 0; @@ -239,6 +255,10 @@ static SAFEBUFFERS int GetCpuFlags(void) { cpu_info = MipsCpuCaps("/proc/cpuinfo"); cpu_info |= kCpuHasMIPS; #endif +#if defined(__loongarch__) && defined(__linux__) + cpu_info = LoongarchCpuCaps(); + cpu_info |= kCpuHasLOONGARCH; +#endif #if defined(__arm__) || defined(__aarch64__) // gcc -mfpu=neon defines __ARM_NEON__ // __ARM_NEON__ generates code that requires Neon. NaCL also requires Neon. diff --git a/TMessagesProj/jni/third_party/libyuv/source/mjpeg_decoder.cc b/TMessagesProj/jni/third_party/libyuv/source/mjpeg_decoder.cc index adba832f53..0141da8a1d 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/mjpeg_decoder.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/mjpeg_decoder.cc @@ -109,7 +109,7 @@ LIBYUV_BOOL MJpegDecoder::LoadFrame(const uint8_t* src, size_t src_len) { } buf_.data = src; - buf_.len = static_cast(src_len); + buf_.len = (int)src_len; buf_vec_.pos = 0; decompress_struct_->client_data = &buf_vec_; #ifdef HAVE_SETJMP @@ -417,10 +417,6 @@ void init_source(j_decompress_ptr cinfo) { boolean fill_input_buffer(j_decompress_ptr cinfo) { BufferVector* buf_vec = reinterpret_cast(cinfo->client_data); if (buf_vec->pos >= buf_vec->len) { - // Don't assert-fail when fuzzing. -#ifndef FUZZING_BUILD_MODE_UNSAFE_FOR_PRODUCTION - assert(0 && "No more data"); -#endif // ERROR: No more data return FALSE; } @@ -432,7 +428,7 @@ boolean fill_input_buffer(j_decompress_ptr cinfo) { void skip_input_data(j_decompress_ptr cinfo, long num_bytes) { // NOLINT jpeg_source_mgr* src = cinfo->src; - size_t bytes = static_cast(num_bytes); + size_t bytes = (size_t)num_bytes; if (bytes > src->bytes_in_buffer) { src->next_input_byte = nullptr; src->bytes_in_buffer = 0; diff --git a/TMessagesProj/jni/third_party/libyuv/source/planar_functions.cc b/TMessagesProj/jni/third_party/libyuv/source/planar_functions.cc index d5cd7e6808..e08a44f6f6 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/planar_functions.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/planar_functions.cc @@ -10,6 +10,7 @@ #include "libyuv/planar_functions.h" +#include #include // for memset() #include "libyuv/cpu_id.h" @@ -34,6 +35,9 @@ void CopyPlane(const uint8_t* src_y, int height) { int y; void (*CopyRow)(const uint8_t* src, uint8_t* dst, int width) = CopyRow_C; + if (width <= 0 || height == 0) { + return; + } // Negative height means invert the image. if (height < 0) { height = -height; @@ -80,8 +84,6 @@ void CopyPlane(const uint8_t* src_y, } } -// TODO(fbarchard): Consider support for negative height. -// TODO(fbarchard): Consider stride measured in bytes. LIBYUV_API void CopyPlane_16(const uint16_t* src_y, int src_stride_y, @@ -89,36 +91,8 @@ void CopyPlane_16(const uint16_t* src_y, int dst_stride_y, int width, int height) { - int y; - void (*CopyRow)(const uint16_t* src, uint16_t* dst, int width) = CopyRow_16_C; - // Coalesce rows. - if (src_stride_y == width && dst_stride_y == width) { - width *= height; - height = 1; - src_stride_y = dst_stride_y = 0; - } -#if defined(HAS_COPYROW_16_SSE2) - if (TestCpuFlag(kCpuHasSSE2) && IS_ALIGNED(width, 32)) { - CopyRow = CopyRow_16_SSE2; - } -#endif -#if defined(HAS_COPYROW_16_ERMS) - if (TestCpuFlag(kCpuHasERMS)) { - CopyRow = CopyRow_16_ERMS; - } -#endif -#if defined(HAS_COPYROW_16_NEON) - if (TestCpuFlag(kCpuHasNEON) && IS_ALIGNED(width, 32)) { - CopyRow = CopyRow_16_NEON; - } -#endif - - // Copy plane - for (y = 0; y < height; ++y) { - CopyRow(src_y, dst_y, width); - src_y += src_stride_y; - dst_y += dst_stride_y; - } + CopyPlane((const uint8_t*)src_y, src_stride_y * 2, (uint8_t*)dst_y, + dst_stride_y * 2, width * 2, height); } // Convert a plane of 16 bit data to 8 bit @@ -134,6 +108,9 @@ void Convert16To8Plane(const uint16_t* src_y, void (*Convert16To8Row)(const uint16_t* src_y, uint8_t* dst_y, int scale, int width) = Convert16To8Row_C; + if (width <= 0 || height == 0) { + return; + } // Negative height means invert the image. if (height < 0) { height = -height; @@ -146,6 +123,14 @@ void Convert16To8Plane(const uint16_t* src_y, height = 1; src_stride_y = dst_stride_y = 0; } +#if defined(HAS_CONVERT16TO8ROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + Convert16To8Row = Convert16To8Row_Any_NEON; + if (IS_ALIGNED(width, 16)) { + Convert16To8Row = Convert16To8Row_NEON; + } + } +#endif #if defined(HAS_CONVERT16TO8ROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { Convert16To8Row = Convert16To8Row_Any_SSSE3; @@ -184,6 +169,9 @@ void Convert8To16Plane(const uint8_t* src_y, void (*Convert8To16Row)(const uint8_t* src_y, uint16_t* dst_y, int scale, int width) = Convert8To16Row_C; + if (width <= 0 || height == 0) { + return; + } // Negative height means invert the image. if (height < 0) { height = -height; @@ -238,9 +226,12 @@ int I422Copy(const uint8_t* src_y, int width, int height) { int halfwidth = (width + 1) >> 1; - if (!src_u || !src_v || !dst_u || !dst_v || width <= 0 || height == 0) { + + if ((!src_y && dst_y) || !src_u || !src_v || !dst_u || !dst_v || width <= 0 || + height == 0) { return -1; } + // Negative height means invert the image. if (height < 0) { height = -height; @@ -276,7 +267,8 @@ int I444Copy(const uint8_t* src_y, int dst_stride_v, int width, int height) { - if (!src_u || !src_v || !dst_u || !dst_v || width <= 0 || height == 0) { + if ((!src_y && dst_y) || !src_u || !src_v || !dst_u || !dst_v || width <= 0 || + height == 0) { return -1; } // Negative height means invert the image. @@ -298,6 +290,88 @@ int I444Copy(const uint8_t* src_y, return 0; } +// Copy I210. +LIBYUV_API +int I210Copy(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_u, + int dst_stride_u, + uint16_t* dst_v, + int dst_stride_v, + int width, + int height) { + int halfwidth = (width + 1) >> 1; + + if ((!src_y && dst_y) || !src_u || !src_v || !dst_u || !dst_v || width <= 0 || + height == 0) { + return -1; + } + + // Negative height means invert the image. + if (height < 0) { + height = -height; + src_y = src_y + (height - 1) * src_stride_y; + src_u = src_u + (height - 1) * src_stride_u; + src_v = src_v + (height - 1) * src_stride_v; + src_stride_y = -src_stride_y; + src_stride_u = -src_stride_u; + src_stride_v = -src_stride_v; + } + + if (dst_y) { + CopyPlane_16(src_y, src_stride_y, dst_y, dst_stride_y, width, height); + } + // Copy UV planes. + CopyPlane_16(src_u, src_stride_u, dst_u, dst_stride_u, halfwidth, height); + CopyPlane_16(src_v, src_stride_v, dst_v, dst_stride_v, halfwidth, height); + return 0; +} + +// Copy I410. +LIBYUV_API +int I410Copy(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_u, + int dst_stride_u, + uint16_t* dst_v, + int dst_stride_v, + int width, + int height) { + if ((!src_y && dst_y) || !src_u || !src_v || !dst_u || !dst_v || width <= 0 || + height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + src_y = src_y + (height - 1) * src_stride_y; + src_u = src_u + (height - 1) * src_stride_u; + src_v = src_v + (height - 1) * src_stride_v; + src_stride_y = -src_stride_y; + src_stride_u = -src_stride_u; + src_stride_v = -src_stride_v; + } + + if (dst_y) { + CopyPlane_16(src_y, src_stride_y, dst_y, dst_stride_y, width, height); + } + CopyPlane_16(src_u, src_stride_u, dst_u, dst_stride_u, width, height); + CopyPlane_16(src_v, src_stride_v, dst_v, dst_stride_v, width, height); + return 0; +} + // Copy I400. LIBYUV_API int I400ToI400(const uint8_t* src_y, @@ -349,6 +423,56 @@ int I420ToI400(const uint8_t* src_y, return 0; } +// Copy NV12. Supports inverting. +LIBYUV_API +int NV12Copy(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height) { + int halfwidth = (width + 1) >> 1; + int halfheight = (height + 1) >> 1; + + if (!src_y || !dst_y || !src_uv || !dst_uv || width <= 0 || height == 0) { + return -1; + } + + // Negative height means invert the image. + if (height < 0) { + height = -height; + halfheight = (height + 1) >> 1; + src_y = src_y + (height - 1) * src_stride_y; + src_uv = src_uv + (halfheight - 1) * src_stride_uv; + src_stride_y = -src_stride_y; + src_stride_uv = -src_stride_uv; + } + CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height); + CopyPlane(src_uv, src_stride_uv, dst_uv, dst_stride_uv, halfwidth * 2, + halfheight); + return 0; +} + +// Copy NV21. Supports inverting. +LIBYUV_API +int NV21Copy(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_vu, + int src_stride_vu, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_vu, + int dst_stride_vu, + int width, + int height) { + return NV12Copy(src_y, src_stride_y, src_vu, src_stride_vu, dst_y, + dst_stride_y, dst_vu, dst_stride_vu, width, height); +} + // Support function for NV12 etc UV channels. // Width and height are plane sizes (typically half pixel width). LIBYUV_API @@ -363,6 +487,9 @@ void SplitUVPlane(const uint8_t* src_uv, int y; void (*SplitUVRow)(const uint8_t* src_uv, uint8_t* dst_u, uint8_t* dst_v, int width) = SplitUVRow_C; + if (width <= 0 || height == 0) { + return; + } // Negative height means invert the image. if (height < 0) { height = -height; @@ -402,14 +529,6 @@ void SplitUVPlane(const uint8_t* src_uv, } } #endif -#if defined(HAS_SPLITUVROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - SplitUVRow = SplitUVRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - SplitUVRow = SplitUVRow_MMI; - } - } -#endif #if defined(HAS_SPLITUVROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { SplitUVRow = SplitUVRow_Any_MSA; @@ -418,6 +537,14 @@ void SplitUVPlane(const uint8_t* src_uv, } } #endif +#if defined(HAS_SPLITUVROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + SplitUVRow = SplitUVRow_Any_LSX; + if (IS_ALIGNED(width, 32)) { + SplitUVRow = SplitUVRow_LSX; + } + } +#endif for (y = 0; y < height; ++y) { // Copy a row of UV. @@ -440,6 +567,9 @@ void MergeUVPlane(const uint8_t* src_u, int y; void (*MergeUVRow)(const uint8_t* src_u, const uint8_t* src_v, uint8_t* dst_uv, int width) = MergeUVRow_C; + if (width <= 0 || height == 0) { + return; + } // Negative height means invert the image. if (height < 0) { height = -height; @@ -477,14 +607,6 @@ void MergeUVPlane(const uint8_t* src_u, } } #endif -#if defined(HAS_MERGEUVROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - MergeUVRow = MergeUVRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - MergeUVRow = MergeUVRow_MMI; - } - } -#endif #if defined(HAS_MERGEUVROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { MergeUVRow = MergeUVRow_Any_MSA; @@ -493,6 +615,14 @@ void MergeUVPlane(const uint8_t* src_u, } } #endif +#if defined(HAS_MERGEUVROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + MergeUVRow = MergeUVRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + MergeUVRow = MergeUVRow_LSX; + } + } +#endif for (y = 0; y < height; ++y) { // Merge a row of U and V into a row of UV. @@ -503,102 +633,587 @@ void MergeUVPlane(const uint8_t* src_u, } } -// Swap U and V channels in interleaved UV plane. +// Support function for P010 etc UV channels. +// Width and height are plane sizes (typically half pixel width). LIBYUV_API -void SwapUVPlane(const uint8_t* src_uv, - int src_stride_uv, - uint8_t* dst_vu, - int dst_stride_vu, - int width, - int height) { +void SplitUVPlane_16(const uint16_t* src_uv, + int src_stride_uv, + uint16_t* dst_u, + int dst_stride_u, + uint16_t* dst_v, + int dst_stride_v, + int width, + int height, + int depth) { int y; - void (*SwapUVRow)(const uint8_t* src_uv, uint8_t* dst_vu, int width) = - SwapUVRow_C; + void (*SplitUVRow_16)(const uint16_t* src_uv, uint16_t* dst_u, + uint16_t* dst_v, int depth, int width) = + SplitUVRow_16_C; + if (width <= 0 || height == 0) { + return; + } // Negative height means invert the image. if (height < 0) { height = -height; - src_uv = src_uv + (height - 1) * src_stride_uv; - src_stride_uv = -src_stride_uv; + dst_u = dst_u + (height - 1) * dst_stride_u; + dst_v = dst_v + (height - 1) * dst_stride_v; + dst_stride_u = -dst_stride_u; + dst_stride_v = -dst_stride_v; } // Coalesce rows. - if (src_stride_uv == width * 2 && dst_stride_vu == width * 2) { + if (src_stride_uv == width * 2 && dst_stride_u == width && + dst_stride_v == width) { width *= height; height = 1; - src_stride_uv = dst_stride_vu = 0; - } - -#if defined(HAS_SWAPUVROW_SSSE3) - if (TestCpuFlag(kCpuHasSSSE3)) { - SwapUVRow = SwapUVRow_Any_SSSE3; - if (IS_ALIGNED(width, 16)) { - SwapUVRow = SwapUVRow_SSSE3; - } + src_stride_uv = dst_stride_u = dst_stride_v = 0; } -#endif -#if defined(HAS_SWAPUVROW_AVX2) +#if defined(HAS_SPLITUVROW_16_AVX2) if (TestCpuFlag(kCpuHasAVX2)) { - SwapUVRow = SwapUVRow_Any_AVX2; - if (IS_ALIGNED(width, 32)) { - SwapUVRow = SwapUVRow_AVX2; + SplitUVRow_16 = SplitUVRow_16_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + SplitUVRow_16 = SplitUVRow_16_AVX2; } } #endif -#if defined(HAS_SWAPUVROW_NEON) +#if defined(HAS_SPLITUVROW_16_NEON) if (TestCpuFlag(kCpuHasNEON)) { - SwapUVRow = SwapUVRow_Any_NEON; - if (IS_ALIGNED(width, 16)) { - SwapUVRow = SwapUVRow_NEON; + SplitUVRow_16 = SplitUVRow_16_Any_NEON; + if (IS_ALIGNED(width, 8)) { + SplitUVRow_16 = SplitUVRow_16_NEON; } } #endif for (y = 0; y < height; ++y) { - SwapUVRow(src_uv, dst_vu, width); + // Copy a row of UV. + SplitUVRow_16(src_uv, dst_u, dst_v, depth, width); + dst_u += dst_stride_u; + dst_v += dst_stride_v; src_uv += src_stride_uv; - dst_vu += dst_stride_vu; } } -// Convert NV21 to NV12. LIBYUV_API -int NV21ToNV12(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_vu, - int src_stride_vu, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_uv, - int dst_stride_uv, - int width, - int height) { - int halfwidth = (width + 1) >> 1; - int halfheight = (height + 1) >> 1; - if (!src_vu || !dst_uv || width <= 0 || height == 0) { - return -1; - } - if (dst_y) { - CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height); +void MergeUVPlane_16(const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint16_t* dst_uv, + int dst_stride_uv, + int width, + int height, + int depth) { + int y; + void (*MergeUVRow_16)(const uint16_t* src_u, const uint16_t* src_v, + uint16_t* dst_uv, int depth, int width) = + MergeUVRow_16_C; + assert(depth >= 8); + assert(depth <= 16); + if (width <= 0 || height == 0) { + return; } - // Negative height means invert the image. if (height < 0) { height = -height; - halfheight = (height + 1) >> 1; - src_vu = src_vu + (halfheight - 1) * src_stride_vu; - src_stride_vu = -src_stride_vu; + dst_uv = dst_uv + (height - 1) * dst_stride_uv; + dst_stride_uv = -dst_stride_uv; + } + // Coalesce rows. + if (src_stride_u == width && src_stride_v == width && + dst_stride_uv == width * 2) { + width *= height; + height = 1; + src_stride_u = src_stride_v = dst_stride_uv = 0; + } +#if defined(HAS_MERGEUVROW_16_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + MergeUVRow_16 = MergeUVRow_16_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + MergeUVRow_16 = MergeUVRow_16_AVX2; + } + } +#endif +#if defined(HAS_MERGEUVROW_16_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + MergeUVRow_16 = MergeUVRow_16_Any_NEON; + if (IS_ALIGNED(width, 8)) { + MergeUVRow_16 = MergeUVRow_16_NEON; + } } +#endif - SwapUVPlane(src_vu, src_stride_vu, dst_uv, dst_stride_uv, halfwidth, - halfheight); - return 0; + for (y = 0; y < height; ++y) { + // Merge a row of U and V into a row of UV. + MergeUVRow_16(src_u, src_v, dst_uv, depth, width); + src_u += src_stride_u; + src_v += src_stride_v; + dst_uv += dst_stride_uv; + } } -// Support function for NV12 etc RGB channels. -// Width and height are plane sizes (typically half pixel width). +// Convert plane from lsb to msb LIBYUV_API -void SplitRGBPlane(const uint8_t* src_rgb, - int src_stride_rgb, - uint8_t* dst_r, +void ConvertToMSBPlane_16(const uint16_t* src_y, + int src_stride_y, + uint16_t* dst_y, + int dst_stride_y, + int width, + int height, + int depth) { + int y; + int scale = 1 << (16 - depth); + void (*MultiplyRow_16)(const uint16_t* src_y, uint16_t* dst_y, int scale, + int width) = MultiplyRow_16_C; + if (width <= 0 || height == 0) { + return; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_y = dst_y + (height - 1) * dst_stride_y; + dst_stride_y = -dst_stride_y; + } + // Coalesce rows. + if (src_stride_y == width && dst_stride_y == width) { + width *= height; + height = 1; + src_stride_y = dst_stride_y = 0; + } + +#if defined(HAS_MULTIPLYROW_16_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + MultiplyRow_16 = MultiplyRow_16_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + MultiplyRow_16 = MultiplyRow_16_AVX2; + } + } +#endif +#if defined(HAS_MULTIPLYROW_16_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + MultiplyRow_16 = MultiplyRow_16_Any_NEON; + if (IS_ALIGNED(width, 16)) { + MultiplyRow_16 = MultiplyRow_16_NEON; + } + } +#endif + + for (y = 0; y < height; ++y) { + MultiplyRow_16(src_y, dst_y, scale, width); + src_y += src_stride_y; + dst_y += dst_stride_y; + } +} + +// Convert plane from msb to lsb +LIBYUV_API +void ConvertToLSBPlane_16(const uint16_t* src_y, + int src_stride_y, + uint16_t* dst_y, + int dst_stride_y, + int width, + int height, + int depth) { + int y; + int scale = 1 << depth; + void (*DivideRow)(const uint16_t* src_y, uint16_t* dst_y, int scale, + int width) = DivideRow_16_C; + if (width <= 0 || height == 0) { + return; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_y = dst_y + (height - 1) * dst_stride_y; + dst_stride_y = -dst_stride_y; + } + // Coalesce rows. + if (src_stride_y == width && dst_stride_y == width) { + width *= height; + height = 1; + src_stride_y = dst_stride_y = 0; + } + +#if defined(HAS_DIVIDEROW_16_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + DivideRow = DivideRow_16_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + DivideRow = DivideRow_16_AVX2; + } + } +#endif +#if defined(HAS_DIVIDEROW_16_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + DivideRow = DivideRow_16_Any_NEON; + if (IS_ALIGNED(width, 16)) { + DivideRow = DivideRow_16_NEON; + } + } +#endif + + for (y = 0; y < height; ++y) { + DivideRow(src_y, dst_y, scale, width); + src_y += src_stride_y; + dst_y += dst_stride_y; + } +} + +// Swap U and V channels in interleaved UV plane. +LIBYUV_API +void SwapUVPlane(const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_vu, + int dst_stride_vu, + int width, + int height) { + int y; + void (*SwapUVRow)(const uint8_t* src_uv, uint8_t* dst_vu, int width) = + SwapUVRow_C; + if (width <= 0 || height == 0) { + return; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + src_uv = src_uv + (height - 1) * src_stride_uv; + src_stride_uv = -src_stride_uv; + } + // Coalesce rows. + if (src_stride_uv == width * 2 && dst_stride_vu == width * 2) { + width *= height; + height = 1; + src_stride_uv = dst_stride_vu = 0; + } + +#if defined(HAS_SWAPUVROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + SwapUVRow = SwapUVRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + SwapUVRow = SwapUVRow_SSSE3; + } + } +#endif +#if defined(HAS_SWAPUVROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + SwapUVRow = SwapUVRow_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + SwapUVRow = SwapUVRow_AVX2; + } + } +#endif +#if defined(HAS_SWAPUVROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + SwapUVRow = SwapUVRow_Any_NEON; + if (IS_ALIGNED(width, 16)) { + SwapUVRow = SwapUVRow_NEON; + } + } +#endif + + for (y = 0; y < height; ++y) { + SwapUVRow(src_uv, dst_vu, width); + src_uv += src_stride_uv; + dst_vu += dst_stride_vu; + } +} + +// Convert NV21 to NV12. +LIBYUV_API +int NV21ToNV12(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_vu, + int src_stride_vu, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height) { + int halfwidth = (width + 1) >> 1; + int halfheight = (height + 1) >> 1; + + if (!src_vu || !dst_uv || width <= 0 || height == 0) { + return -1; + } + + if (dst_y) { + CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height); + } + + // Negative height means invert the image. + if (height < 0) { + height = -height; + halfheight = (height + 1) >> 1; + src_vu = src_vu + (halfheight - 1) * src_stride_vu; + src_stride_vu = -src_stride_vu; + } + + SwapUVPlane(src_vu, src_stride_vu, dst_uv, dst_stride_uv, halfwidth, + halfheight); + return 0; +} + +// Test if tile_height is a power of 2 (16 or 32) +#define IS_POWEROFTWO(x) (!((x) & ((x)-1))) + +// Detile a plane of data +// tile width is 16 and assumed. +// tile_height is 16 or 32 for MM21. +// src_stride_y is bytes per row of source ignoring tiling. e.g. 640 +// TODO: More detile row functions. +LIBYUV_API +int DetilePlane(const uint8_t* src_y, + int src_stride_y, + uint8_t* dst_y, + int dst_stride_y, + int width, + int height, + int tile_height) { + const ptrdiff_t src_tile_stride = 16 * tile_height; + int y; + void (*DetileRow)(const uint8_t* src, ptrdiff_t src_tile_stride, uint8_t* dst, + int width) = DetileRow_C; + if (!src_y || !dst_y || width <= 0 || height == 0 || + !IS_POWEROFTWO(tile_height)) { + return -1; + } + + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_y = dst_y + (height - 1) * dst_stride_y; + dst_stride_y = -dst_stride_y; + } + +#if defined(HAS_DETILEROW_SSE2) + if (TestCpuFlag(kCpuHasSSE2)) { + DetileRow = DetileRow_Any_SSE2; + if (IS_ALIGNED(width, 16)) { + DetileRow = DetileRow_SSE2; + } + } +#endif +#if defined(HAS_DETILEROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + DetileRow = DetileRow_Any_NEON; + if (IS_ALIGNED(width, 16)) { + DetileRow = DetileRow_NEON; + } + } +#endif + + // Detile plane + for (y = 0; y < height; ++y) { + DetileRow(src_y, src_tile_stride, dst_y, width); + dst_y += dst_stride_y; + src_y += 16; + // Advance to next row of tiles. + if ((y & (tile_height - 1)) == (tile_height - 1)) { + src_y = src_y - src_tile_stride + src_stride_y * tile_height; + } + } + return 0; +} + +// Convert a plane of 16 bit tiles of 16 x H to linear. +// tile width is 16 and assumed. +// tile_height is 16 or 32 for MT2T. +LIBYUV_API +int DetilePlane_16(const uint16_t* src_y, + int src_stride_y, + uint16_t* dst_y, + int dst_stride_y, + int width, + int height, + int tile_height) { + const ptrdiff_t src_tile_stride = 16 * tile_height; + int y; + void (*DetileRow_16)(const uint16_t* src, ptrdiff_t src_tile_stride, + uint16_t* dst, int width) = DetileRow_16_C; + if (!src_y || !dst_y || width <= 0 || height == 0 || + !IS_POWEROFTWO(tile_height)) { + return -1; + } + + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_y = dst_y + (height - 1) * dst_stride_y; + dst_stride_y = -dst_stride_y; + } + +#if defined(HAS_DETILEROW_16_SSE2) + if (TestCpuFlag(kCpuHasSSE2)) { + DetileRow_16 = DetileRow_16_Any_SSE2; + if (IS_ALIGNED(width, 16)) { + DetileRow_16 = DetileRow_16_SSE2; + } + } +#endif +#if defined(HAS_DETILEROW_16_AVX) + if (TestCpuFlag(kCpuHasAVX)) { + DetileRow_16 = DetileRow_16_Any_AVX; + if (IS_ALIGNED(width, 16)) { + DetileRow_16 = DetileRow_16_AVX; + } + } +#endif +#if defined(HAS_DETILEROW_16_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + DetileRow_16 = DetileRow_16_Any_NEON; + if (IS_ALIGNED(width, 16)) { + DetileRow_16 = DetileRow_16_NEON; + } + } +#endif + + // Detile plane + for (y = 0; y < height; ++y) { + DetileRow_16(src_y, src_tile_stride, dst_y, width); + dst_y += dst_stride_y; + src_y += 16; + // Advance to next row of tiles. + if ((y & (tile_height - 1)) == (tile_height - 1)) { + src_y = src_y - src_tile_stride + src_stride_y * tile_height; + } + } + return 0; +} + +LIBYUV_API +void DetileSplitUVPlane(const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height, + int tile_height) { + const ptrdiff_t src_tile_stride = 16 * tile_height; + int y; + void (*DetileSplitUVRow)(const uint8_t* src, ptrdiff_t src_tile_stride, + uint8_t* dst_u, uint8_t* dst_v, int width) = + DetileSplitUVRow_C; + assert(src_stride_uv >= 0); + assert(tile_height > 0); + assert(src_stride_uv > 0); + + if (width <= 0 || height == 0) { + return; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_u = dst_u + (height - 1) * dst_stride_u; + dst_stride_u = -dst_stride_u; + dst_v = dst_v + (height - 1) * dst_stride_v; + dst_stride_v = -dst_stride_v; + } + +#if defined(HAS_DETILESPLITUVROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + DetileSplitUVRow = DetileSplitUVRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + DetileSplitUVRow = DetileSplitUVRow_SSSE3; + } + } +#endif +#if defined(HAS_DETILESPLITUVROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + DetileSplitUVRow = DetileSplitUVRow_Any_NEON; + if (IS_ALIGNED(width, 16)) { + DetileSplitUVRow = DetileSplitUVRow_NEON; + } + } +#endif + + // Detile plane + for (y = 0; y < height; ++y) { + DetileSplitUVRow(src_uv, src_tile_stride, dst_u, dst_v, width); + dst_u += dst_stride_u; + dst_v += dst_stride_v; + src_uv += 16; + // Advance to next row of tiles. + if ((y & (tile_height - 1)) == (tile_height - 1)) { + src_uv = src_uv - src_tile_stride + src_stride_uv * tile_height; + } + } +} + +LIBYUV_API +void DetileToYUY2(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_yuy2, + int dst_stride_yuy2, + int width, + int height, + int tile_height) { + const ptrdiff_t src_y_tile_stride = 16 * tile_height; + const ptrdiff_t src_uv_tile_stride = src_y_tile_stride / 2; + int y; + void (*DetileToYUY2)(const uint8_t* src_y, ptrdiff_t src_y_tile_stride, + const uint8_t* src_uv, ptrdiff_t src_uv_tile_stride, + uint8_t* dst_yuy2, int width) = DetileToYUY2_C; + assert(src_stride_y >= 0); + assert(src_stride_y > 0); + assert(src_stride_uv >= 0); + assert(src_stride_uv > 0); + assert(tile_height > 0); + + if (width <= 0 || height == 0 || tile_height <= 0) { + return; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_yuy2 = dst_yuy2 + (height - 1) * dst_stride_yuy2; + dst_stride_yuy2 = -dst_stride_yuy2; + } + +#if defined(HAS_DETILETOYUY2_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + DetileToYUY2 = DetileToYUY2_Any_NEON; + if (IS_ALIGNED(width, 16)) { + DetileToYUY2 = DetileToYUY2_NEON; + } + } +#endif + +#if defined(HAS_DETILETOYUY2_SSE2) + if (TestCpuFlag(kCpuHasSSE2)) { + DetileToYUY2 = DetileToYUY2_Any_SSE2; + if (IS_ALIGNED(width, 16)) { + DetileToYUY2 = DetileToYUY2_SSE2; + } + } +#endif + + // Detile plane + for (y = 0; y < height; ++y) { + DetileToYUY2(src_y, src_y_tile_stride, src_uv, src_uv_tile_stride, dst_yuy2, + width); + dst_yuy2 += dst_stride_yuy2; + src_y += 16; + + if (y & 0x1) + src_uv += 16; + + // Advance to next row of tiles. + if ((y & (tile_height - 1)) == (tile_height - 1)) { + src_y = src_y - src_y_tile_stride + src_stride_y * tile_height; + src_uv = src_uv - src_uv_tile_stride + src_stride_uv * (tile_height / 2); + } + } +} + +// Support function for NV12 etc RGB channels. +// Width and height are plane sizes (typically half pixel width). +LIBYUV_API +void SplitRGBPlane(const uint8_t* src_rgb, + int src_stride_rgb, + uint8_t* dst_r, int dst_stride_r, uint8_t* dst_g, int dst_stride_g, @@ -607,121 +1222,770 @@ void SplitRGBPlane(const uint8_t* src_rgb, int width, int height) { int y; - void (*SplitRGBRow)(const uint8_t* src_rgb, uint8_t* dst_r, uint8_t* dst_g, - uint8_t* dst_b, int width) = SplitRGBRow_C; + void (*SplitRGBRow)(const uint8_t* src_rgb, uint8_t* dst_r, uint8_t* dst_g, + uint8_t* dst_b, int width) = SplitRGBRow_C; + if (width <= 0 || height == 0) { + return; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_r = dst_r + (height - 1) * dst_stride_r; + dst_g = dst_g + (height - 1) * dst_stride_g; + dst_b = dst_b + (height - 1) * dst_stride_b; + dst_stride_r = -dst_stride_r; + dst_stride_g = -dst_stride_g; + dst_stride_b = -dst_stride_b; + } + // Coalesce rows. + if (src_stride_rgb == width * 3 && dst_stride_r == width && + dst_stride_g == width && dst_stride_b == width) { + width *= height; + height = 1; + src_stride_rgb = dst_stride_r = dst_stride_g = dst_stride_b = 0; + } +#if defined(HAS_SPLITRGBROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + SplitRGBRow = SplitRGBRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + SplitRGBRow = SplitRGBRow_SSSE3; + } + } +#endif +#if defined(HAS_SPLITRGBROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + SplitRGBRow = SplitRGBRow_Any_NEON; + if (IS_ALIGNED(width, 16)) { + SplitRGBRow = SplitRGBRow_NEON; + } + } +#endif + + for (y = 0; y < height; ++y) { + // Copy a row of RGB. + SplitRGBRow(src_rgb, dst_r, dst_g, dst_b, width); + dst_r += dst_stride_r; + dst_g += dst_stride_g; + dst_b += dst_stride_b; + src_rgb += src_stride_rgb; + } +} + +LIBYUV_API +void MergeRGBPlane(const uint8_t* src_r, + int src_stride_r, + const uint8_t* src_g, + int src_stride_g, + const uint8_t* src_b, + int src_stride_b, + uint8_t* dst_rgb, + int dst_stride_rgb, + int width, + int height) { + int y; + void (*MergeRGBRow)(const uint8_t* src_r, const uint8_t* src_g, + const uint8_t* src_b, uint8_t* dst_rgb, int width) = + MergeRGBRow_C; + if (width <= 0 || height == 0) { + return; + } + // Coalesce rows. + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_rgb = dst_rgb + (height - 1) * dst_stride_rgb; + dst_stride_rgb = -dst_stride_rgb; + } + // Coalesce rows. + if (src_stride_r == width && src_stride_g == width && src_stride_b == width && + dst_stride_rgb == width * 3) { + width *= height; + height = 1; + src_stride_r = src_stride_g = src_stride_b = dst_stride_rgb = 0; + } +#if defined(HAS_MERGERGBROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + MergeRGBRow = MergeRGBRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + MergeRGBRow = MergeRGBRow_SSSE3; + } + } +#endif +#if defined(HAS_MERGERGBROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + MergeRGBRow = MergeRGBRow_Any_NEON; + if (IS_ALIGNED(width, 16)) { + MergeRGBRow = MergeRGBRow_NEON; + } + } +#endif + + for (y = 0; y < height; ++y) { + // Merge a row of U and V into a row of RGB. + MergeRGBRow(src_r, src_g, src_b, dst_rgb, width); + src_r += src_stride_r; + src_g += src_stride_g; + src_b += src_stride_b; + dst_rgb += dst_stride_rgb; + } +} + +LIBYUV_NOINLINE +static void SplitARGBPlaneAlpha(const uint8_t* src_argb, + int src_stride_argb, + uint8_t* dst_r, + int dst_stride_r, + uint8_t* dst_g, + int dst_stride_g, + uint8_t* dst_b, + int dst_stride_b, + uint8_t* dst_a, + int dst_stride_a, + int width, + int height) { + int y; + void (*SplitARGBRow)(const uint8_t* src_rgb, uint8_t* dst_r, uint8_t* dst_g, + uint8_t* dst_b, uint8_t* dst_a, int width) = + SplitARGBRow_C; + + assert(height > 0); + + if (src_stride_argb == width * 4 && dst_stride_r == width && + dst_stride_g == width && dst_stride_b == width && dst_stride_a == width) { + width *= height; + height = 1; + src_stride_argb = dst_stride_r = dst_stride_g = dst_stride_b = + dst_stride_a = 0; + } + +#if defined(HAS_SPLITARGBROW_SSE2) + if (TestCpuFlag(kCpuHasSSE2)) { + SplitARGBRow = SplitARGBRow_Any_SSE2; + if (IS_ALIGNED(width, 8)) { + SplitARGBRow = SplitARGBRow_SSE2; + } + } +#endif +#if defined(HAS_SPLITARGBROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + SplitARGBRow = SplitARGBRow_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + SplitARGBRow = SplitARGBRow_SSSE3; + } + } +#endif +#if defined(HAS_SPLITARGBROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + SplitARGBRow = SplitARGBRow_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + SplitARGBRow = SplitARGBRow_AVX2; + } + } +#endif +#if defined(HAS_SPLITARGBROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + SplitARGBRow = SplitARGBRow_Any_NEON; + if (IS_ALIGNED(width, 16)) { + SplitARGBRow = SplitARGBRow_NEON; + } + } +#endif + + for (y = 0; y < height; ++y) { + SplitARGBRow(src_argb, dst_r, dst_g, dst_b, dst_a, width); + dst_r += dst_stride_r; + dst_g += dst_stride_g; + dst_b += dst_stride_b; + dst_a += dst_stride_a; + src_argb += src_stride_argb; + } +} + +LIBYUV_NOINLINE +static void SplitARGBPlaneOpaque(const uint8_t* src_argb, + int src_stride_argb, + uint8_t* dst_r, + int dst_stride_r, + uint8_t* dst_g, + int dst_stride_g, + uint8_t* dst_b, + int dst_stride_b, + int width, + int height) { + int y; + void (*SplitXRGBRow)(const uint8_t* src_rgb, uint8_t* dst_r, uint8_t* dst_g, + uint8_t* dst_b, int width) = SplitXRGBRow_C; + assert(height > 0); + + if (src_stride_argb == width * 4 && dst_stride_r == width && + dst_stride_g == width && dst_stride_b == width) { + width *= height; + height = 1; + src_stride_argb = dst_stride_r = dst_stride_g = dst_stride_b = 0; + } + +#if defined(HAS_SPLITXRGBROW_SSE2) + if (TestCpuFlag(kCpuHasSSE2)) { + SplitXRGBRow = SplitXRGBRow_Any_SSE2; + if (IS_ALIGNED(width, 8)) { + SplitXRGBRow = SplitXRGBRow_SSE2; + } + } +#endif +#if defined(HAS_SPLITXRGBROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + SplitXRGBRow = SplitXRGBRow_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + SplitXRGBRow = SplitXRGBRow_SSSE3; + } + } +#endif +#if defined(HAS_SPLITXRGBROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + SplitXRGBRow = SplitXRGBRow_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + SplitXRGBRow = SplitXRGBRow_AVX2; + } + } +#endif +#if defined(HAS_SPLITXRGBROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + SplitXRGBRow = SplitXRGBRow_Any_NEON; + if (IS_ALIGNED(width, 16)) { + SplitXRGBRow = SplitXRGBRow_NEON; + } + } +#endif + + for (y = 0; y < height; ++y) { + SplitXRGBRow(src_argb, dst_r, dst_g, dst_b, width); + dst_r += dst_stride_r; + dst_g += dst_stride_g; + dst_b += dst_stride_b; + src_argb += src_stride_argb; + } +} + +LIBYUV_API +void SplitARGBPlane(const uint8_t* src_argb, + int src_stride_argb, + uint8_t* dst_r, + int dst_stride_r, + uint8_t* dst_g, + int dst_stride_g, + uint8_t* dst_b, + int dst_stride_b, + uint8_t* dst_a, + int dst_stride_a, + int width, + int height) { + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_r = dst_r + (height - 1) * dst_stride_r; + dst_g = dst_g + (height - 1) * dst_stride_g; + dst_b = dst_b + (height - 1) * dst_stride_b; + dst_a = dst_a + (height - 1) * dst_stride_a; + dst_stride_r = -dst_stride_r; + dst_stride_g = -dst_stride_g; + dst_stride_b = -dst_stride_b; + dst_stride_a = -dst_stride_a; + } + + if (dst_a == NULL) { + SplitARGBPlaneOpaque(src_argb, src_stride_argb, dst_r, dst_stride_r, dst_g, + dst_stride_g, dst_b, dst_stride_b, width, height); + } else { + SplitARGBPlaneAlpha(src_argb, src_stride_argb, dst_r, dst_stride_r, dst_g, + dst_stride_g, dst_b, dst_stride_b, dst_a, dst_stride_a, + width, height); + } +} + +LIBYUV_NOINLINE +static void MergeARGBPlaneAlpha(const uint8_t* src_r, + int src_stride_r, + const uint8_t* src_g, + int src_stride_g, + const uint8_t* src_b, + int src_stride_b, + const uint8_t* src_a, + int src_stride_a, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height) { + int y; + void (*MergeARGBRow)(const uint8_t* src_r, const uint8_t* src_g, + const uint8_t* src_b, const uint8_t* src_a, + uint8_t* dst_argb, int width) = MergeARGBRow_C; + + assert(height > 0); + + if (src_stride_r == width && src_stride_g == width && src_stride_b == width && + src_stride_a == width && dst_stride_argb == width * 4) { + width *= height; + height = 1; + src_stride_r = src_stride_g = src_stride_b = src_stride_a = + dst_stride_argb = 0; + } +#if defined(HAS_MERGEARGBROW_SSE2) + if (TestCpuFlag(kCpuHasSSE2)) { + MergeARGBRow = MergeARGBRow_Any_SSE2; + if (IS_ALIGNED(width, 8)) { + MergeARGBRow = MergeARGBRow_SSE2; + } + } +#endif +#if defined(HAS_MERGEARGBROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + MergeARGBRow = MergeARGBRow_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + MergeARGBRow = MergeARGBRow_AVX2; + } + } +#endif +#if defined(HAS_MERGEARGBROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + MergeARGBRow = MergeARGBRow_Any_NEON; + if (IS_ALIGNED(width, 16)) { + MergeARGBRow = MergeARGBRow_NEON; + } + } +#endif + + for (y = 0; y < height; ++y) { + MergeARGBRow(src_r, src_g, src_b, src_a, dst_argb, width); + src_r += src_stride_r; + src_g += src_stride_g; + src_b += src_stride_b; + src_a += src_stride_a; + dst_argb += dst_stride_argb; + } +} + +LIBYUV_NOINLINE +static void MergeARGBPlaneOpaque(const uint8_t* src_r, + int src_stride_r, + const uint8_t* src_g, + int src_stride_g, + const uint8_t* src_b, + int src_stride_b, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height) { + int y; + void (*MergeXRGBRow)(const uint8_t* src_r, const uint8_t* src_g, + const uint8_t* src_b, uint8_t* dst_argb, int width) = + MergeXRGBRow_C; + + assert(height > 0); + + if (src_stride_r == width && src_stride_g == width && src_stride_b == width && + dst_stride_argb == width * 4) { + width *= height; + height = 1; + src_stride_r = src_stride_g = src_stride_b = dst_stride_argb = 0; + } +#if defined(HAS_MERGEXRGBROW_SSE2) + if (TestCpuFlag(kCpuHasSSE2)) { + MergeXRGBRow = MergeXRGBRow_Any_SSE2; + if (IS_ALIGNED(width, 8)) { + MergeXRGBRow = MergeXRGBRow_SSE2; + } + } +#endif +#if defined(HAS_MERGEXRGBROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + MergeXRGBRow = MergeXRGBRow_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + MergeXRGBRow = MergeXRGBRow_AVX2; + } + } +#endif +#if defined(HAS_MERGEXRGBROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + MergeXRGBRow = MergeXRGBRow_Any_NEON; + if (IS_ALIGNED(width, 16)) { + MergeXRGBRow = MergeXRGBRow_NEON; + } + } +#endif + + for (y = 0; y < height; ++y) { + MergeXRGBRow(src_r, src_g, src_b, dst_argb, width); + src_r += src_stride_r; + src_g += src_stride_g; + src_b += src_stride_b; + dst_argb += dst_stride_argb; + } +} + +LIBYUV_API +void MergeARGBPlane(const uint8_t* src_r, + int src_stride_r, + const uint8_t* src_g, + int src_stride_g, + const uint8_t* src_b, + int src_stride_b, + const uint8_t* src_a, + int src_stride_a, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height) { + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_argb = dst_argb + (height - 1) * dst_stride_argb; + dst_stride_argb = -dst_stride_argb; + } + + if (src_a == NULL) { + MergeARGBPlaneOpaque(src_r, src_stride_r, src_g, src_stride_g, src_b, + src_stride_b, dst_argb, dst_stride_argb, width, + height); + } else { + MergeARGBPlaneAlpha(src_r, src_stride_r, src_g, src_stride_g, src_b, + src_stride_b, src_a, src_stride_a, dst_argb, + dst_stride_argb, width, height); + } +} + +// TODO(yuan): Support 2 bit alpha channel. +LIBYUV_API +void MergeXR30Plane(const uint16_t* src_r, + int src_stride_r, + const uint16_t* src_g, + int src_stride_g, + const uint16_t* src_b, + int src_stride_b, + uint8_t* dst_ar30, + int dst_stride_ar30, + int width, + int height, + int depth) { + int y; + void (*MergeXR30Row)(const uint16_t* src_r, const uint16_t* src_g, + const uint16_t* src_b, uint8_t* dst_ar30, int depth, + int width) = MergeXR30Row_C; + // Negative height means invert the image. if (height < 0) { height = -height; - dst_r = dst_r + (height - 1) * dst_stride_r; - dst_g = dst_g + (height - 1) * dst_stride_g; - dst_b = dst_b + (height - 1) * dst_stride_b; - dst_stride_r = -dst_stride_r; - dst_stride_g = -dst_stride_g; - dst_stride_b = -dst_stride_b; + dst_ar30 = dst_ar30 + (height - 1) * dst_stride_ar30; + dst_stride_ar30 = -dst_stride_ar30; } // Coalesce rows. - if (src_stride_rgb == width * 3 && dst_stride_r == width && - dst_stride_g == width && dst_stride_b == width) { + if (src_stride_r == width && src_stride_g == width && src_stride_b == width && + dst_stride_ar30 == width * 4) { width *= height; height = 1; - src_stride_rgb = dst_stride_r = dst_stride_g = dst_stride_b = 0; + src_stride_r = src_stride_g = src_stride_b = dst_stride_ar30 = 0; } -#if defined(HAS_SPLITRGBROW_SSSE3) - if (TestCpuFlag(kCpuHasSSSE3)) { - SplitRGBRow = SplitRGBRow_Any_SSSE3; +#if defined(HAS_MERGEXR30ROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + MergeXR30Row = MergeXR30Row_Any_AVX2; if (IS_ALIGNED(width, 16)) { - SplitRGBRow = SplitRGBRow_SSSE3; + MergeXR30Row = MergeXR30Row_AVX2; } } #endif -#if defined(HAS_SPLITRGBROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - SplitRGBRow = SplitRGBRow_Any_MMI; - if (IS_ALIGNED(width, 4)) { - SplitRGBRow = SplitRGBRow_MMI; +#if defined(HAS_MERGEXR30ROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + if (depth == 10) { + MergeXR30Row = MergeXR30Row_10_Any_NEON; + if (IS_ALIGNED(width, 8)) { + MergeXR30Row = MergeXR30Row_10_NEON; + } + } else { + MergeXR30Row = MergeXR30Row_Any_NEON; + if (IS_ALIGNED(width, 8)) { + MergeXR30Row = MergeXR30Row_NEON; + } } } #endif -#if defined(HAS_SPLITRGBROW_NEON) - if (TestCpuFlag(kCpuHasNEON)) { - SplitRGBRow = SplitRGBRow_Any_NEON; + + for (y = 0; y < height; ++y) { + MergeXR30Row(src_r, src_g, src_b, dst_ar30, depth, width); + src_r += src_stride_r; + src_g += src_stride_g; + src_b += src_stride_b; + dst_ar30 += dst_stride_ar30; + } +} + +LIBYUV_NOINLINE +static void MergeAR64PlaneAlpha(const uint16_t* src_r, + int src_stride_r, + const uint16_t* src_g, + int src_stride_g, + const uint16_t* src_b, + int src_stride_b, + const uint16_t* src_a, + int src_stride_a, + uint16_t* dst_ar64, + int dst_stride_ar64, + int width, + int height, + int depth) { + int y; + void (*MergeAR64Row)(const uint16_t* src_r, const uint16_t* src_g, + const uint16_t* src_b, const uint16_t* src_a, + uint16_t* dst_argb, int depth, int width) = + MergeAR64Row_C; + + if (src_stride_r == width && src_stride_g == width && src_stride_b == width && + src_stride_a == width && dst_stride_ar64 == width * 4) { + width *= height; + height = 1; + src_stride_r = src_stride_g = src_stride_b = src_stride_a = + dst_stride_ar64 = 0; + } +#if defined(HAS_MERGEAR64ROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + MergeAR64Row = MergeAR64Row_Any_AVX2; if (IS_ALIGNED(width, 16)) { - SplitRGBRow = SplitRGBRow_NEON; + MergeAR64Row = MergeAR64Row_AVX2; + } + } +#endif +#if defined(HAS_MERGEAR64ROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + MergeAR64Row = MergeAR64Row_Any_NEON; + if (IS_ALIGNED(width, 8)) { + MergeAR64Row = MergeAR64Row_NEON; } } #endif for (y = 0; y < height; ++y) { - // Copy a row of RGB. - SplitRGBRow(src_rgb, dst_r, dst_g, dst_b, width); - dst_r += dst_stride_r; - dst_g += dst_stride_g; - dst_b += dst_stride_b; - src_rgb += src_stride_rgb; + MergeAR64Row(src_r, src_g, src_b, src_a, dst_ar64, depth, width); + src_r += src_stride_r; + src_g += src_stride_g; + src_b += src_stride_b; + src_a += src_stride_a; + dst_ar64 += dst_stride_ar64; } } -LIBYUV_API -void MergeRGBPlane(const uint8_t* src_r, - int src_stride_r, - const uint8_t* src_g, - int src_stride_g, - const uint8_t* src_b, - int src_stride_b, - uint8_t* dst_rgb, - int dst_stride_rgb, - int width, - int height) { +LIBYUV_NOINLINE +static void MergeAR64PlaneOpaque(const uint16_t* src_r, + int src_stride_r, + const uint16_t* src_g, + int src_stride_g, + const uint16_t* src_b, + int src_stride_b, + uint16_t* dst_ar64, + int dst_stride_ar64, + int width, + int height, + int depth) { int y; - void (*MergeRGBRow)(const uint8_t* src_r, const uint8_t* src_g, - const uint8_t* src_b, uint8_t* dst_rgb, int width) = - MergeRGBRow_C; + void (*MergeXR64Row)(const uint16_t* src_r, const uint16_t* src_g, + const uint16_t* src_b, uint16_t* dst_argb, int depth, + int width) = MergeXR64Row_C; + // Coalesce rows. + if (src_stride_r == width && src_stride_g == width && src_stride_b == width && + dst_stride_ar64 == width * 4) { + width *= height; + height = 1; + src_stride_r = src_stride_g = src_stride_b = dst_stride_ar64 = 0; + } +#if defined(HAS_MERGEXR64ROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + MergeXR64Row = MergeXR64Row_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + MergeXR64Row = MergeXR64Row_AVX2; + } + } +#endif +#if defined(HAS_MERGEXR64ROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + MergeXR64Row = MergeXR64Row_Any_NEON; + if (IS_ALIGNED(width, 8)) { + MergeXR64Row = MergeXR64Row_NEON; + } + } +#endif + + for (y = 0; y < height; ++y) { + MergeXR64Row(src_r, src_g, src_b, dst_ar64, depth, width); + src_r += src_stride_r; + src_g += src_stride_g; + src_b += src_stride_b; + dst_ar64 += dst_stride_ar64; + } +} + +LIBYUV_API +void MergeAR64Plane(const uint16_t* src_r, + int src_stride_r, + const uint16_t* src_g, + int src_stride_g, + const uint16_t* src_b, + int src_stride_b, + const uint16_t* src_a, + int src_stride_a, + uint16_t* dst_ar64, + int dst_stride_ar64, + int width, + int height, + int depth) { // Negative height means invert the image. if (height < 0) { height = -height; - dst_rgb = dst_rgb + (height - 1) * dst_stride_rgb; - dst_stride_rgb = -dst_stride_rgb; + dst_ar64 = dst_ar64 + (height - 1) * dst_stride_ar64; + dst_stride_ar64 = -dst_stride_ar64; } - // Coalesce rows. + + if (src_a == NULL) { + MergeAR64PlaneOpaque(src_r, src_stride_r, src_g, src_stride_g, src_b, + src_stride_b, dst_ar64, dst_stride_ar64, width, height, + depth); + } else { + MergeAR64PlaneAlpha(src_r, src_stride_r, src_g, src_stride_g, src_b, + src_stride_b, src_a, src_stride_a, dst_ar64, + dst_stride_ar64, width, height, depth); + } +} + +LIBYUV_NOINLINE +static void MergeARGB16To8PlaneAlpha(const uint16_t* src_r, + int src_stride_r, + const uint16_t* src_g, + int src_stride_g, + const uint16_t* src_b, + int src_stride_b, + const uint16_t* src_a, + int src_stride_a, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height, + int depth) { + int y; + void (*MergeARGB16To8Row)(const uint16_t* src_r, const uint16_t* src_g, + const uint16_t* src_b, const uint16_t* src_a, + uint8_t* dst_argb, int depth, int width) = + MergeARGB16To8Row_C; + if (src_stride_r == width && src_stride_g == width && src_stride_b == width && - dst_stride_rgb == width * 3) { + src_stride_a == width && dst_stride_argb == width * 4) { width *= height; height = 1; - src_stride_r = src_stride_g = src_stride_b = dst_stride_rgb = 0; + src_stride_r = src_stride_g = src_stride_b = src_stride_a = + dst_stride_argb = 0; } -#if defined(HAS_MERGERGBROW_SSSE3) - if (TestCpuFlag(kCpuHasSSSE3)) { - MergeRGBRow = MergeRGBRow_Any_SSSE3; +#if defined(HAS_MERGEARGB16TO8ROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + MergeARGB16To8Row = MergeARGB16To8Row_Any_AVX2; if (IS_ALIGNED(width, 16)) { - MergeRGBRow = MergeRGBRow_SSSE3; + MergeARGB16To8Row = MergeARGB16To8Row_AVX2; } } #endif -#if defined(HAS_MERGERGBROW_NEON) +#if defined(HAS_MERGEARGB16TO8ROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { - MergeRGBRow = MergeRGBRow_Any_NEON; + MergeARGB16To8Row = MergeARGB16To8Row_Any_NEON; + if (IS_ALIGNED(width, 8)) { + MergeARGB16To8Row = MergeARGB16To8Row_NEON; + } + } +#endif + + for (y = 0; y < height; ++y) { + MergeARGB16To8Row(src_r, src_g, src_b, src_a, dst_argb, depth, width); + src_r += src_stride_r; + src_g += src_stride_g; + src_b += src_stride_b; + src_a += src_stride_a; + dst_argb += dst_stride_argb; + } +} + +LIBYUV_NOINLINE +static void MergeARGB16To8PlaneOpaque(const uint16_t* src_r, + int src_stride_r, + const uint16_t* src_g, + int src_stride_g, + const uint16_t* src_b, + int src_stride_b, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height, + int depth) { + int y; + void (*MergeXRGB16To8Row)(const uint16_t* src_r, const uint16_t* src_g, + const uint16_t* src_b, uint8_t* dst_argb, int depth, + int width) = MergeXRGB16To8Row_C; + + // Coalesce rows. + if (src_stride_r == width && src_stride_g == width && src_stride_b == width && + dst_stride_argb == width * 4) { + width *= height; + height = 1; + src_stride_r = src_stride_g = src_stride_b = dst_stride_argb = 0; + } +#if defined(HAS_MERGEXRGB16TO8ROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + MergeXRGB16To8Row = MergeXRGB16To8Row_Any_AVX2; if (IS_ALIGNED(width, 16)) { - MergeRGBRow = MergeRGBRow_NEON; + MergeXRGB16To8Row = MergeXRGB16To8Row_AVX2; } } #endif -#if defined(HAS_MERGERGBROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - MergeRGBRow = MergeRGBRow_Any_MMI; +#if defined(HAS_MERGEXRGB16TO8ROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + MergeXRGB16To8Row = MergeXRGB16To8Row_Any_NEON; if (IS_ALIGNED(width, 8)) { - MergeRGBRow = MergeRGBRow_MMI; + MergeXRGB16To8Row = MergeXRGB16To8Row_NEON; } } #endif for (y = 0; y < height; ++y) { - // Merge a row of U and V into a row of RGB. - MergeRGBRow(src_r, src_g, src_b, dst_rgb, width); + MergeXRGB16To8Row(src_r, src_g, src_b, dst_argb, depth, width); src_r += src_stride_r; src_g += src_stride_g; src_b += src_stride_b; - dst_rgb += dst_stride_rgb; + dst_argb += dst_stride_argb; + } +} + +LIBYUV_API +void MergeARGB16To8Plane(const uint16_t* src_r, + int src_stride_r, + const uint16_t* src_g, + int src_stride_g, + const uint16_t* src_b, + int src_stride_b, + const uint16_t* src_a, + int src_stride_a, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height, + int depth) { + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst_argb = dst_argb + (height - 1) * dst_stride_argb; + dst_stride_argb = -dst_stride_argb; + } + + if (src_a == NULL) { + MergeARGB16To8PlaneOpaque(src_r, src_stride_r, src_g, src_stride_g, src_b, + src_stride_b, dst_argb, dst_stride_argb, width, + height, depth); + } else { + MergeARGB16To8PlaneAlpha(src_r, src_stride_r, src_g, src_stride_g, src_b, + src_stride_b, src_a, src_stride_a, dst_argb, + dst_stride_argb, width, height, depth); } } @@ -789,16 +2053,6 @@ int YUY2ToI422(const uint8_t* src_yuy2, } } #endif -#if defined(HAS_YUY2TOYROW_MMI) && defined(HAS_YUY2TOUV422ROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - YUY2ToYRow = YUY2ToYRow_Any_MMI; - YUY2ToUV422Row = YUY2ToUV422Row_Any_MMI; - if (IS_ALIGNED(width, 8)) { - YUY2ToYRow = YUY2ToYRow_MMI; - YUY2ToUV422Row = YUY2ToUV422Row_MMI; - } - } -#endif #if defined(HAS_YUY2TOYROW_MSA) && defined(HAS_YUY2TOUV422ROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { YUY2ToYRow = YUY2ToYRow_Any_MSA; @@ -809,6 +2063,16 @@ int YUY2ToI422(const uint8_t* src_yuy2, } } #endif +#if defined(HAS_YUY2TOYROW_LASX) && defined(HAS_YUY2TOUV422ROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + YUY2ToYRow = YUY2ToYRow_Any_LASX; + YUY2ToUV422Row = YUY2ToUV422Row_Any_LASX; + if (IS_ALIGNED(width, 32)) { + YUY2ToYRow = YUY2ToYRow_LASX; + YUY2ToUV422Row = YUY2ToUV422Row_LASX; + } + } +#endif for (y = 0; y < height; ++y) { YUY2ToUV422Row(src_yuy2, dst_u, dst_v, width); @@ -885,108 +2149,167 @@ int UYVYToI422(const uint8_t* src_uyvy, } } #endif -#if defined(HAS_UYVYTOYROW_MMI) && defined(HAS_UYVYTOUV422ROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - UYVYToYRow = UYVYToYRow_Any_MMI; - UYVYToUV422Row = UYVYToUV422Row_Any_MMI; +#if defined(HAS_UYVYTOYROW_MSA) && defined(HAS_UYVYTOUV422ROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + UYVYToYRow = UYVYToYRow_Any_MSA; + UYVYToUV422Row = UYVYToUV422Row_Any_MSA; + if (IS_ALIGNED(width, 32)) { + UYVYToYRow = UYVYToYRow_MSA; + UYVYToUV422Row = UYVYToUV422Row_MSA; + } + } +#endif +#if defined(HAS_UYVYTOYROW_LASX) && defined(HAS_UYVYTOUV422ROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + UYVYToYRow = UYVYToYRow_Any_LASX; + UYVYToUV422Row = UYVYToUV422Row_Any_LASX; + if (IS_ALIGNED(width, 32)) { + UYVYToYRow = UYVYToYRow_LASX; + UYVYToUV422Row = UYVYToUV422Row_LASX; + } + } +#endif + + for (y = 0; y < height; ++y) { + UYVYToUV422Row(src_uyvy, dst_u, dst_v, width); + UYVYToYRow(src_uyvy, dst_y, width); + src_uyvy += src_stride_uyvy; + dst_y += dst_stride_y; + dst_u += dst_stride_u; + dst_v += dst_stride_v; + } + return 0; +} + +// Convert YUY2 to Y. +LIBYUV_API +int YUY2ToY(const uint8_t* src_yuy2, + int src_stride_yuy2, + uint8_t* dst_y, + int dst_stride_y, + int width, + int height) { + int y; + void (*YUY2ToYRow)(const uint8_t* src_yuy2, uint8_t* dst_y, int width) = + YUY2ToYRow_C; + if (!src_yuy2 || !dst_y || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + src_yuy2 = src_yuy2 + (height - 1) * src_stride_yuy2; + src_stride_yuy2 = -src_stride_yuy2; + } + // Coalesce rows. + if (src_stride_yuy2 == width * 2 && dst_stride_y == width) { + width *= height; + height = 1; + src_stride_yuy2 = dst_stride_y = 0; + } +#if defined(HAS_YUY2TOYROW_SSE2) + if (TestCpuFlag(kCpuHasSSE2)) { + YUY2ToYRow = YUY2ToYRow_Any_SSE2; + if (IS_ALIGNED(width, 16)) { + YUY2ToYRow = YUY2ToYRow_SSE2; + } + } +#endif +#if defined(HAS_YUY2TOYROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + YUY2ToYRow = YUY2ToYRow_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + YUY2ToYRow = YUY2ToYRow_AVX2; + } + } +#endif +#if defined(HAS_YUY2TOYROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + YUY2ToYRow = YUY2ToYRow_Any_NEON; if (IS_ALIGNED(width, 16)) { - UYVYToYRow = UYVYToYRow_MMI; - UYVYToUV422Row = UYVYToUV422Row_MMI; + YUY2ToYRow = YUY2ToYRow_NEON; } } #endif -#if defined(HAS_UYVYTOYROW_MSA) && defined(HAS_UYVYTOUV422ROW_MSA) +#if defined(HAS_YUY2TOYROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { - UYVYToYRow = UYVYToYRow_Any_MSA; - UYVYToUV422Row = UYVYToUV422Row_Any_MSA; + YUY2ToYRow = YUY2ToYRow_Any_MSA; if (IS_ALIGNED(width, 32)) { - UYVYToYRow = UYVYToYRow_MSA; - UYVYToUV422Row = UYVYToUV422Row_MSA; + YUY2ToYRow = YUY2ToYRow_MSA; } } #endif for (y = 0; y < height; ++y) { - UYVYToUV422Row(src_uyvy, dst_u, dst_v, width); - UYVYToYRow(src_uyvy, dst_y, width); - src_uyvy += src_stride_uyvy; + YUY2ToYRow(src_yuy2, dst_y, width); + src_yuy2 += src_stride_yuy2; dst_y += dst_stride_y; - dst_u += dst_stride_u; - dst_v += dst_stride_v; } return 0; } -// Convert YUY2 to Y. +// Convert UYVY to Y. LIBYUV_API -int YUY2ToY(const uint8_t* src_yuy2, - int src_stride_yuy2, +int UYVYToY(const uint8_t* src_uyvy, + int src_stride_uyvy, uint8_t* dst_y, int dst_stride_y, int width, int height) { int y; - void (*YUY2ToYRow)(const uint8_t* src_yuy2, uint8_t* dst_y, int width) = - YUY2ToYRow_C; - if (!src_yuy2 || !dst_y || width <= 0 || height == 0) { + void (*UYVYToYRow)(const uint8_t* src_uyvy, uint8_t* dst_y, int width) = + UYVYToYRow_C; + if (!src_uyvy || !dst_y || width <= 0 || height == 0) { return -1; } // Negative height means invert the image. if (height < 0) { height = -height; - src_yuy2 = src_yuy2 + (height - 1) * src_stride_yuy2; - src_stride_yuy2 = -src_stride_yuy2; + src_uyvy = src_uyvy + (height - 1) * src_stride_uyvy; + src_stride_uyvy = -src_stride_uyvy; } // Coalesce rows. - if (src_stride_yuy2 == width * 2 && dst_stride_y == width) { + if (src_stride_uyvy == width * 2 && dst_stride_y == width) { width *= height; height = 1; - src_stride_yuy2 = dst_stride_y = 0; + src_stride_uyvy = dst_stride_y = 0; } -#if defined(HAS_YUY2TOYROW_SSE2) +#if defined(HAS_UYVYTOYROW_SSE2) if (TestCpuFlag(kCpuHasSSE2)) { - YUY2ToYRow = YUY2ToYRow_Any_SSE2; + UYVYToYRow = UYVYToYRow_Any_SSE2; if (IS_ALIGNED(width, 16)) { - YUY2ToYRow = YUY2ToYRow_SSE2; + UYVYToYRow = UYVYToYRow_SSE2; } } #endif -#if defined(HAS_YUY2TOYROW_AVX2) +#if defined(HAS_UYVYTOYROW_AVX2) if (TestCpuFlag(kCpuHasAVX2)) { - YUY2ToYRow = YUY2ToYRow_Any_AVX2; + UYVYToYRow = UYVYToYRow_Any_AVX2; if (IS_ALIGNED(width, 32)) { - YUY2ToYRow = YUY2ToYRow_AVX2; + UYVYToYRow = UYVYToYRow_AVX2; } } #endif -#if defined(HAS_YUY2TOYROW_NEON) +#if defined(HAS_UYVYTOYROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { - YUY2ToYRow = YUY2ToYRow_Any_NEON; + UYVYToYRow = UYVYToYRow_Any_NEON; if (IS_ALIGNED(width, 16)) { - YUY2ToYRow = YUY2ToYRow_NEON; - } - } -#endif -#if defined(HAS_YUY2TOYROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - YUY2ToYRow = YUY2ToYRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - YUY2ToYRow = YUY2ToYRow_MMI; + UYVYToYRow = UYVYToYRow_NEON; } } #endif -#if defined(HAS_YUY2TOYROW_MSA) +#if defined(HAS_UYVYTOYROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { - YUY2ToYRow = YUY2ToYRow_Any_MSA; + UYVYToYRow = UYVYToYRow_Any_MSA; if (IS_ALIGNED(width, 32)) { - YUY2ToYRow = YUY2ToYRow_MSA; + UYVYToYRow = UYVYToYRow_MSA; } } #endif for (y = 0; y < height; ++y) { - YUY2ToYRow(src_yuy2, dst_y, width); - src_yuy2 += src_stride_yuy2; + UYVYToYRow(src_uyvy, dst_y, width); + src_uyvy += src_stride_uyvy; dst_y += dst_stride_y; } return 0; @@ -1033,14 +2356,6 @@ void MirrorPlane(const uint8_t* src_y, } } #endif -#if defined(HAS_MIRRORROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - MirrorRow = MirrorRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - MirrorRow = MirrorRow_MMI; - } - } -#endif #if defined(HAS_MIRRORROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { MirrorRow = MirrorRow_Any_MSA; @@ -1049,6 +2364,14 @@ void MirrorPlane(const uint8_t* src_y, } } #endif +#if defined(HAS_MIRRORROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + MirrorRow = MirrorRow_Any_LASX; + if (IS_ALIGNED(width, 64)) { + MirrorRow = MirrorRow_LASX; + } + } +#endif // Mirror plane for (y = 0; y < height; ++y) { @@ -1107,6 +2430,14 @@ void MirrorUVPlane(const uint8_t* src_uv, } } #endif +#if defined(HAS_MIRRORUVROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + MirrorUVRow = MirrorUVRow_Any_LASX; + if (IS_ALIGNED(width, 16)) { + MirrorUVRow = MirrorUVRow_LASX; + } + } +#endif // MirrorUV plane for (y = 0; y < height; ++y) { @@ -1156,10 +2487,12 @@ int I420Mirror(const uint8_t* src_y, int height) { int halfwidth = (width + 1) >> 1; int halfheight = (height + 1) >> 1; + if (!src_y || !src_u || !src_v || !dst_u || !dst_v || width <= 0 || height == 0) { return -1; } + // Negative height means invert the image. if (height < 0) { height = -height; @@ -1194,9 +2527,11 @@ int NV12Mirror(const uint8_t* src_y, int height) { int halfwidth = (width + 1) >> 1; int halfheight = (height + 1) >> 1; + if (!src_y || !src_uv || !dst_uv || width <= 0 || height == 0) { return -1; } + // Negative height means invert the image. if (height < 0) { height = -height; @@ -1259,14 +2594,6 @@ int ARGBMirror(const uint8_t* src_argb, } } #endif -#if defined(HAS_ARGBMIRRORROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBMirrorRow = ARGBMirrorRow_Any_MMI; - if (IS_ALIGNED(width, 2)) { - ARGBMirrorRow = ARGBMirrorRow_MMI; - } - } -#endif #if defined(HAS_ARGBMIRRORROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { ARGBMirrorRow = ARGBMirrorRow_Any_MSA; @@ -1275,6 +2602,14 @@ int ARGBMirror(const uint8_t* src_argb, } } #endif +#if defined(HAS_ARGBMIRRORROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + ARGBMirrorRow = ARGBMirrorRow_Any_LASX; + if (IS_ALIGNED(width, 16)) { + ARGBMirrorRow = ARGBMirrorRow_LASX; + } + } +#endif // Mirror plane for (y = 0; y < height; ++y) { @@ -1349,15 +2684,15 @@ ARGBBlendRow GetARGBBlend() { ARGBBlendRow = ARGBBlendRow_NEON; } #endif -#if defined(HAS_ARGBBLENDROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBBlendRow = ARGBBlendRow_MMI; - } -#endif #if defined(HAS_ARGBBLENDROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { ARGBBlendRow = ARGBBlendRow_MSA; } +#endif +#if defined(HAS_ARGBBLENDROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + ARGBBlendRow = ARGBBlendRow_LSX; + } #endif return ARGBBlendRow; } @@ -1451,14 +2786,6 @@ int BlendPlane(const uint8_t* src_y0, } } #endif -#if defined(HAS_BLENDPLANEROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - BlendPlaneRow = BlendPlaneRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - BlendPlaneRow = BlendPlaneRow_MMI; - } - } -#endif for (y = 0; y < height; ++y) { BlendPlaneRow(src_y0, src_y1, alpha, dst_y, width); @@ -1503,6 +2830,7 @@ int I420Blend(const uint8_t* src_y0, BlendPlaneRow_C; void (*ScaleRowDown2)(const uint8_t* src_ptr, ptrdiff_t src_stride, uint8_t* dst_ptr, int dst_width) = ScaleRowDown2Box_C; + if (!src_y0 || !src_u0 || !src_v0 || !src_y1 || !src_u1 || !src_v1 || !alpha || !dst_y || !dst_u || !dst_v || width <= 0 || height == 0) { return -1; @@ -1534,14 +2862,6 @@ int I420Blend(const uint8_t* src_y0, BlendPlaneRow = BlendPlaneRow_AVX2; } } -#endif -#if defined(HAS_BLENDPLANEROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - BlendPlaneRow = BlendPlaneRow_Any_MMI; - if (IS_ALIGNED(halfwidth, 8)) { - BlendPlaneRow = BlendPlaneRow_MMI; - } - } #endif if (!IS_ALIGNED(width, 2)) { ScaleRowDown2 = ScaleRowDown2Box_Odd_C; @@ -1579,17 +2899,6 @@ int I420Blend(const uint8_t* src_y0, } } #endif -#if defined(HAS_SCALEROWDOWN2_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ScaleRowDown2 = ScaleRowDown2Box_Odd_MMI; - if (IS_ALIGNED(width, 2)) { - ScaleRowDown2 = ScaleRowDown2Box_Any_MMI; - if (IS_ALIGNED(halfwidth, 8)) { - ScaleRowDown2 = ScaleRowDown2Box_MMI; - } - } - } -#endif // Row buffer for intermediate alpha pixels. align_buffer_64(halfalpha, halfwidth); @@ -1667,14 +2976,6 @@ int ARGBMultiply(const uint8_t* src_argb0, } } #endif -#if defined(HAS_ARGBMULTIPLYROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBMultiplyRow = ARGBMultiplyRow_Any_MMI; - if (IS_ALIGNED(width, 2)) { - ARGBMultiplyRow = ARGBMultiplyRow_MMI; - } - } -#endif #if defined(HAS_ARGBMULTIPLYROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { ARGBMultiplyRow = ARGBMultiplyRow_Any_MSA; @@ -1683,6 +2984,14 @@ int ARGBMultiply(const uint8_t* src_argb0, } } #endif +#if defined(HAS_ARGBMULTIPLYROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + ARGBMultiplyRow = ARGBMultiplyRow_Any_LASX; + if (IS_ALIGNED(width, 8)) { + ARGBMultiplyRow = ARGBMultiplyRow_LASX; + } + } +#endif // Multiply plane for (y = 0; y < height; ++y) { @@ -1723,12 +3032,12 @@ int ARGBAdd(const uint8_t* src_argb0, height = 1; src_stride_argb0 = src_stride_argb1 = dst_stride_argb = 0; } -#if defined(HAS_ARGBADDROW_SSE2) && (defined(_MSC_VER) && !defined(__clang__)) +#if defined(HAS_ARGBADDROW_SSE2) if (TestCpuFlag(kCpuHasSSE2)) { ARGBAddRow = ARGBAddRow_SSE2; } #endif -#if defined(HAS_ARGBADDROW_SSE2) && !(defined(_MSC_VER) && !defined(__clang__)) +#if defined(HAS_ARGBADDROW_SSE2) if (TestCpuFlag(kCpuHasSSE2)) { ARGBAddRow = ARGBAddRow_Any_SSE2; if (IS_ALIGNED(width, 4)) { @@ -1752,14 +3061,6 @@ int ARGBAdd(const uint8_t* src_argb0, } } #endif -#if defined(HAS_ARGBADDROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBAddRow = ARGBAddRow_Any_MMI; - if (IS_ALIGNED(width, 2)) { - ARGBAddRow = ARGBAddRow_MMI; - } - } -#endif #if defined(HAS_ARGBADDROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { ARGBAddRow = ARGBAddRow_Any_MSA; @@ -1768,6 +3069,14 @@ int ARGBAdd(const uint8_t* src_argb0, } } #endif +#if defined(HAS_ARGBADDROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + ARGBAddRow = ARGBAddRow_Any_LASX; + if (IS_ALIGNED(width, 8)) { + ARGBAddRow = ARGBAddRow_LASX; + } + } +#endif // Add plane for (y = 0; y < height; ++y) { @@ -1832,14 +3141,6 @@ int ARGBSubtract(const uint8_t* src_argb0, } } #endif -#if defined(HAS_ARGBSUBTRACTROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBSubtractRow = ARGBSubtractRow_Any_MMI; - if (IS_ALIGNED(width, 2)) { - ARGBSubtractRow = ARGBSubtractRow_MMI; - } - } -#endif #if defined(HAS_ARGBSUBTRACTROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { ARGBSubtractRow = ARGBSubtractRow_Any_MSA; @@ -1848,6 +3149,14 @@ int ARGBSubtract(const uint8_t* src_argb0, } } #endif +#if defined(HAS_ARGBSUBTRACTROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + ARGBSubtractRow = ARGBSubtractRow_Any_LASX; + if (IS_ALIGNED(width, 8)) { + ARGBSubtractRow = ARGBSubtractRow_LASX; + } + } +#endif // Subtract plane for (y = 0; y < height; ++y) { @@ -1901,14 +3210,6 @@ int RAWToRGB24(const uint8_t* src_raw, } } #endif -#if defined(HAS_RAWTORGB24ROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - RAWToRGB24Row = RAWToRGB24Row_Any_MMI; - if (IS_ALIGNED(width, 4)) { - RAWToRGB24Row = RAWToRGB24Row_MMI; - } - } -#endif #if defined(HAS_RAWTORGB24ROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { RAWToRGB24Row = RAWToRGB24Row_Any_MSA; @@ -1917,6 +3218,14 @@ int RAWToRGB24(const uint8_t* src_raw, } } #endif +#if defined(HAS_RAWTORGB24ROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + RAWToRGB24Row = RAWToRGB24Row_Any_LSX; + if (IS_ALIGNED(width, 16)) { + RAWToRGB24Row = RAWToRGB24Row_LSX; + } + } +#endif for (y = 0; y < height; ++y) { RAWToRGB24Row(src_raw, dst_rgb24, width); @@ -1926,6 +3235,7 @@ int RAWToRGB24(const uint8_t* src_raw, return 0; } +// TODO(fbarchard): Consider uint8_t value LIBYUV_API void SetPlane(uint8_t* dst_y, int dst_stride_y, @@ -1933,7 +3243,11 @@ void SetPlane(uint8_t* dst_y, int height, uint32_t value) { int y; - void (*SetRow)(uint8_t * dst, uint8_t value, int width) = SetRow_C; + void (*SetRow)(uint8_t* dst, uint8_t value, int width) = SetRow_C; + + if (width <= 0 || height == 0) { + return; + } if (height < 0) { height = -height; dst_y = dst_y + (height - 1) * dst_stride_y; @@ -1971,10 +3285,18 @@ void SetPlane(uint8_t* dst_y, SetRow = SetRow_MSA; } #endif +#if defined(HAS_SETROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + SetRow = SetRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + SetRow = SetRow_LSX; + } + } +#endif // Set plane for (y = 0; y < height; ++y) { - SetRow(dst_y, value, width); + SetRow(dst_y, (uint8_t)value, width); dst_y += dst_stride_y; } } @@ -1999,6 +3321,7 @@ int I420Rect(uint8_t* dst_y, uint8_t* start_y = dst_y + y * dst_stride_y + x; uint8_t* start_u = dst_u + (y / 2) * dst_stride_u + (x / 2); uint8_t* start_v = dst_v + (y / 2) * dst_stride_v + (x / 2); + if (!dst_y || !dst_u || !dst_v || width <= 0 || height == 0 || x < 0 || y < 0 || value_y < 0 || value_y > 255 || value_u < 0 || value_u > 255 || value_v < 0 || value_v > 255) { @@ -2021,7 +3344,7 @@ int ARGBRect(uint8_t* dst_argb, int height, uint32_t value) { int y; - void (*ARGBSetRow)(uint8_t * dst_argb, uint32_t value, int width) = + void (*ARGBSetRow)(uint8_t* dst_argb, uint32_t value, int width) = ARGBSetRow_C; if (!dst_argb || width <= 0 || height == 0 || dst_x < 0 || dst_y < 0) { return -1; @@ -2052,14 +3375,6 @@ int ARGBRect(uint8_t* dst_argb, ARGBSetRow = ARGBSetRow_X86; } #endif -#if defined(HAS_ARGBSETROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBSetRow = ARGBSetRow_Any_MMI; - if (IS_ALIGNED(width, 4)) { - ARGBSetRow = ARGBSetRow_MMI; - } - } -#endif #if defined(HAS_ARGBSETROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { ARGBSetRow = ARGBSetRow_Any_MSA; @@ -2068,6 +3383,14 @@ int ARGBRect(uint8_t* dst_argb, } } #endif +#if defined(HAS_ARGBSETROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + ARGBSetRow = ARGBSetRow_Any_LSX; + if (IS_ALIGNED(width, 4)) { + ARGBSetRow = ARGBSetRow_LSX; + } + } +#endif // Set plane for (y = 0; y < height; ++y) { @@ -2138,14 +3461,6 @@ int ARGBAttenuate(const uint8_t* src_argb, } } #endif -#if defined(HAS_ARGBATTENUATEROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBAttenuateRow = ARGBAttenuateRow_Any_MMI; - if (IS_ALIGNED(width, 2)) { - ARGBAttenuateRow = ARGBAttenuateRow_MMI; - } - } -#endif #if defined(HAS_ARGBATTENUATEROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { ARGBAttenuateRow = ARGBAttenuateRow_Any_MSA; @@ -2154,6 +3469,14 @@ int ARGBAttenuate(const uint8_t* src_argb, } } #endif +#if defined(HAS_ARGBATTENUATEROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + ARGBAttenuateRow = ARGBAttenuateRow_Any_LASX; + if (IS_ALIGNED(width, 16)) { + ARGBAttenuateRow = ARGBAttenuateRow_LASX; + } + } +#endif for (y = 0; y < height; ++y) { ARGBAttenuateRow(src_argb, dst_argb, width); @@ -2249,16 +3572,16 @@ int ARGBGrayTo(const uint8_t* src_argb, ARGBGrayRow = ARGBGrayRow_NEON; } #endif -#if defined(HAS_ARGBGRAYROW_MMI) - if (TestCpuFlag(kCpuHasMMI) && IS_ALIGNED(width, 2)) { - ARGBGrayRow = ARGBGrayRow_MMI; - } -#endif #if defined(HAS_ARGBGRAYROW_MSA) if (TestCpuFlag(kCpuHasMSA) && IS_ALIGNED(width, 8)) { ARGBGrayRow = ARGBGrayRow_MSA; } #endif +#if defined(HAS_ARGBGRAYROW_LASX) + if (TestCpuFlag(kCpuHasLASX) && IS_ALIGNED(width, 16)) { + ARGBGrayRow = ARGBGrayRow_LASX; + } +#endif for (y = 0; y < height; ++y) { ARGBGrayRow(src_argb, dst_argb, width); @@ -2299,16 +3622,16 @@ int ARGBGray(uint8_t* dst_argb, ARGBGrayRow = ARGBGrayRow_NEON; } #endif -#if defined(HAS_ARGBGRAYROW_MMI) - if (TestCpuFlag(kCpuHasMMI) && IS_ALIGNED(width, 2)) { - ARGBGrayRow = ARGBGrayRow_MMI; - } -#endif #if defined(HAS_ARGBGRAYROW_MSA) if (TestCpuFlag(kCpuHasMSA) && IS_ALIGNED(width, 8)) { ARGBGrayRow = ARGBGrayRow_MSA; } #endif +#if defined(HAS_ARGBGRAYROW_LASX) + if (TestCpuFlag(kCpuHasLASX) && IS_ALIGNED(width, 16)) { + ARGBGrayRow = ARGBGrayRow_LASX; + } +#endif for (y = 0; y < height; ++y) { ARGBGrayRow(dst, dst, width); @@ -2326,7 +3649,7 @@ int ARGBSepia(uint8_t* dst_argb, int width, int height) { int y; - void (*ARGBSepiaRow)(uint8_t * dst_argb, int width) = ARGBSepiaRow_C; + void (*ARGBSepiaRow)(uint8_t* dst_argb, int width) = ARGBSepiaRow_C; uint8_t* dst = dst_argb + dst_y * dst_stride_argb + dst_x * 4; if (!dst_argb || width <= 0 || height <= 0 || dst_x < 0 || dst_y < 0) { return -1; @@ -2347,16 +3670,16 @@ int ARGBSepia(uint8_t* dst_argb, ARGBSepiaRow = ARGBSepiaRow_NEON; } #endif -#if defined(HAS_ARGBSEPIAROW_MMI) - if (TestCpuFlag(kCpuHasMMI) && IS_ALIGNED(width, 2)) { - ARGBSepiaRow = ARGBSepiaRow_MMI; - } -#endif #if defined(HAS_ARGBSEPIAROW_MSA) if (TestCpuFlag(kCpuHasMSA) && IS_ALIGNED(width, 8)) { ARGBSepiaRow = ARGBSepiaRow_MSA; } #endif +#if defined(HAS_ARGBSEPIAROW_LASX) + if (TestCpuFlag(kCpuHasLASX) && IS_ALIGNED(width, 16)) { + ARGBSepiaRow = ARGBSepiaRow_LASX; + } +#endif for (y = 0; y < height; ++y) { ARGBSepiaRow(dst, width); @@ -2403,15 +3726,15 @@ int ARGBColorMatrix(const uint8_t* src_argb, ARGBColorMatrixRow = ARGBColorMatrixRow_NEON; } #endif -#if defined(HAS_ARGBCOLORMATRIXROW_MMI) - if (TestCpuFlag(kCpuHasMMI) && IS_ALIGNED(width, 2)) { - ARGBColorMatrixRow = ARGBColorMatrixRow_MMI; - } -#endif #if defined(HAS_ARGBCOLORMATRIXROW_MSA) if (TestCpuFlag(kCpuHasMSA) && IS_ALIGNED(width, 8)) { ARGBColorMatrixRow = ARGBColorMatrixRow_MSA; } +#endif +#if defined(HAS_ARGBCOLORMATRIXROW_LSX) + if (TestCpuFlag(kCpuHasLSX) && IS_ALIGNED(width, 8)) { + ARGBColorMatrixRow = ARGBColorMatrixRow_LSX; + } #endif for (y = 0; y < height; ++y) { ARGBColorMatrixRow(src_argb, dst_argb, matrix_argb, width); @@ -2469,7 +3792,7 @@ int ARGBColorTable(uint8_t* dst_argb, int width, int height) { int y; - void (*ARGBColorTableRow)(uint8_t * dst_argb, const uint8_t* table_argb, + void (*ARGBColorTableRow)(uint8_t* dst_argb, const uint8_t* table_argb, int width) = ARGBColorTableRow_C; uint8_t* dst = dst_argb + dst_y * dst_stride_argb + dst_x * 4; if (!dst_argb || !table_argb || width <= 0 || height <= 0 || dst_x < 0 || @@ -2505,7 +3828,7 @@ int RGBColorTable(uint8_t* dst_argb, int width, int height) { int y; - void (*RGBColorTableRow)(uint8_t * dst_argb, const uint8_t* table_argb, + void (*RGBColorTableRow)(uint8_t* dst_argb, const uint8_t* table_argb, int width) = RGBColorTableRow_C; uint8_t* dst = dst_argb + dst_y * dst_stride_argb + dst_x * 4; if (!dst_argb || !table_argb || width <= 0 || height <= 0 || dst_x < 0 || @@ -2550,7 +3873,7 @@ int ARGBQuantize(uint8_t* dst_argb, int width, int height) { int y; - void (*ARGBQuantizeRow)(uint8_t * dst_argb, int scale, int interval_size, + void (*ARGBQuantizeRow)(uint8_t* dst_argb, int scale, int interval_size, int interval_offset, int width) = ARGBQuantizeRow_C; uint8_t* dst = dst_argb + dst_y * dst_stride_argb + dst_x * 4; if (!dst_argb || width <= 0 || height <= 0 || dst_x < 0 || dst_y < 0 || @@ -2577,6 +3900,11 @@ int ARGBQuantize(uint8_t* dst_argb, if (TestCpuFlag(kCpuHasMSA) && IS_ALIGNED(width, 8)) { ARGBQuantizeRow = ARGBQuantizeRow_MSA; } +#endif +#if defined(HAS_ARGBQUANTIZEROW_LSX) + if (TestCpuFlag(kCpuHasLSX) && IS_ALIGNED(width, 8)) { + ARGBQuantizeRow = ARGBQuantizeRow_LSX; + } #endif for (y = 0; y < height; ++y) { ARGBQuantizeRow(dst, scale, interval_size, interval_offset, width); @@ -2607,11 +3935,6 @@ int ARGBComputeCumulativeSum(const uint8_t* src_argb, ComputeCumulativeSumRow = ComputeCumulativeSumRow_SSE2; } #endif -#if defined(HAS_CUMULATIVESUMTOAVERAGEROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ComputeCumulativeSumRow = ComputeCumulativeSumRow_MMI; - } -#endif memset(dst_cumsum, 0, width * sizeof(dst_cumsum[0]) * 4); // 4 int per pixel. for (y = 0; y < height; ++y) { @@ -2662,7 +3985,7 @@ int ARGBBlur(const uint8_t* src_argb, if (radius > (width / 2 - 1)) { radius = width / 2 - 1; } - if (radius <= 0) { + if (radius <= 0 || height <= 1) { return -1; } #if defined(HAS_CUMULATIVESUMTOAVERAGEROW_SSE2) @@ -2670,11 +3993,6 @@ int ARGBBlur(const uint8_t* src_argb, ComputeCumulativeSumRow = ComputeCumulativeSumRow_SSE2; CumulativeSumToAverageRow = CumulativeSumToAverageRow_SSE2; } -#endif -#if defined(HAS_CUMULATIVESUMTOAVERAGEROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ComputeCumulativeSumRow = ComputeCumulativeSumRow_MMI; - } #endif // Compute enough CumulativeSum for first row to be blurred. After this // one row of CumulativeSum is updated at a time. @@ -2777,40 +4095,120 @@ int ARGBShade(const uint8_t* src_argb, ARGBShadeRow = ARGBShadeRow_NEON; } #endif -#if defined(HAS_ARGBSHADEROW_MMI) - if (TestCpuFlag(kCpuHasMMI) && IS_ALIGNED(width, 2)) { - ARGBShadeRow = ARGBShadeRow_MMI; +#if defined(HAS_ARGBSHADEROW_MSA) + if (TestCpuFlag(kCpuHasMSA) && IS_ALIGNED(width, 4)) { + ARGBShadeRow = ARGBShadeRow_MSA; + } +#endif +#if defined(HAS_ARGBSHADEROW_LASX) + if (TestCpuFlag(kCpuHasLASX) && IS_ALIGNED(width, 8)) { + ARGBShadeRow = ARGBShadeRow_LASX; + } +#endif + + for (y = 0; y < height; ++y) { + ARGBShadeRow(src_argb, dst_argb, width, value); + src_argb += src_stride_argb; + dst_argb += dst_stride_argb; + } + return 0; +} + +// Interpolate 2 planes by specified amount (0 to 255). +LIBYUV_API +int InterpolatePlane(const uint8_t* src0, + int src_stride0, + const uint8_t* src1, + int src_stride1, + uint8_t* dst, + int dst_stride, + int width, + int height, + int interpolation) { + int y; + void (*InterpolateRow)(uint8_t* dst_ptr, const uint8_t* src_ptr, + ptrdiff_t src_stride, int dst_width, + int source_y_fraction) = InterpolateRow_C; + if (!src0 || !src1 || !dst || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + dst = dst + (height - 1) * dst_stride; + dst_stride = -dst_stride; + } + // Coalesce rows. + if (src_stride0 == width && src_stride1 == width && dst_stride == width) { + width *= height; + height = 1; + src_stride0 = src_stride1 = dst_stride = 0; + } +#if defined(HAS_INTERPOLATEROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + InterpolateRow = InterpolateRow_Any_SSSE3; + if (IS_ALIGNED(width, 16)) { + InterpolateRow = InterpolateRow_SSSE3; + } + } +#endif +#if defined(HAS_INTERPOLATEROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + InterpolateRow = InterpolateRow_Any_AVX2; + if (IS_ALIGNED(width, 32)) { + InterpolateRow = InterpolateRow_AVX2; + } + } +#endif +#if defined(HAS_INTERPOLATEROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + InterpolateRow = InterpolateRow_Any_NEON; + if (IS_ALIGNED(width, 16)) { + InterpolateRow = InterpolateRow_NEON; + } + } +#endif +#if defined(HAS_INTERPOLATEROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + InterpolateRow = InterpolateRow_Any_MSA; + if (IS_ALIGNED(width, 32)) { + InterpolateRow = InterpolateRow_MSA; + } } #endif -#if defined(HAS_ARGBSHADEROW_MSA) - if (TestCpuFlag(kCpuHasMSA) && IS_ALIGNED(width, 4)) { - ARGBShadeRow = ARGBShadeRow_MSA; +#if defined(HAS_INTERPOLATEROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + InterpolateRow = InterpolateRow_Any_LSX; + if (IS_ALIGNED(width, 32)) { + InterpolateRow = InterpolateRow_LSX; + } } #endif for (y = 0; y < height; ++y) { - ARGBShadeRow(src_argb, dst_argb, width, value); - src_argb += src_stride_argb; - dst_argb += dst_stride_argb; + InterpolateRow(dst, src0, src1 - src0, width, interpolation); + src0 += src_stride0; + src1 += src_stride1; + dst += dst_stride; } return 0; } // Interpolate 2 planes by specified amount (0 to 255). LIBYUV_API -int InterpolatePlane(const uint8_t* src0, - int src_stride0, - const uint8_t* src1, - int src_stride1, - uint8_t* dst, - int dst_stride, - int width, - int height, - int interpolation) { +int InterpolatePlane_16(const uint16_t* src0, + int src_stride0, + const uint16_t* src1, + int src_stride1, + uint16_t* dst, + int dst_stride, + int width, + int height, + int interpolation) { int y; - void (*InterpolateRow)(uint8_t * dst_ptr, const uint8_t* src_ptr, - ptrdiff_t src_stride, int dst_width, - int source_y_fraction) = InterpolateRow_C; + void (*InterpolateRow_16)(uint16_t* dst_ptr, const uint16_t* src_ptr, + ptrdiff_t src_stride, int dst_width, + int source_y_fraction) = InterpolateRow_16_C; if (!src0 || !src1 || !dst || width <= 0 || height == 0) { return -1; } @@ -2826,49 +4224,49 @@ int InterpolatePlane(const uint8_t* src0, height = 1; src_stride0 = src_stride1 = dst_stride = 0; } -#if defined(HAS_INTERPOLATEROW_SSSE3) +#if defined(HAS_INTERPOLATEROW_16_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { - InterpolateRow = InterpolateRow_Any_SSSE3; + InterpolateRow_16 = InterpolateRow_16_Any_SSSE3; if (IS_ALIGNED(width, 16)) { - InterpolateRow = InterpolateRow_SSSE3; + InterpolateRow_16 = InterpolateRow_16_SSSE3; } } #endif -#if defined(HAS_INTERPOLATEROW_AVX2) +#if defined(HAS_INTERPOLATEROW_16_AVX2) if (TestCpuFlag(kCpuHasAVX2)) { - InterpolateRow = InterpolateRow_Any_AVX2; + InterpolateRow_16 = InterpolateRow_16_Any_AVX2; if (IS_ALIGNED(width, 32)) { - InterpolateRow = InterpolateRow_AVX2; + InterpolateRow_16 = InterpolateRow_16_AVX2; } } #endif -#if defined(HAS_INTERPOLATEROW_NEON) +#if defined(HAS_INTERPOLATEROW_16_NEON) if (TestCpuFlag(kCpuHasNEON)) { - InterpolateRow = InterpolateRow_Any_NEON; - if (IS_ALIGNED(width, 16)) { - InterpolateRow = InterpolateRow_NEON; + InterpolateRow_16 = InterpolateRow_16_Any_NEON; + if (IS_ALIGNED(width, 8)) { + InterpolateRow_16 = InterpolateRow_16_NEON; } } #endif -#if defined(HAS_INTERPOLATEROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - InterpolateRow = InterpolateRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - InterpolateRow = InterpolateRow_MMI; +#if defined(HAS_INTERPOLATEROW_16_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + InterpolateRow_16 = InterpolateRow_16_Any_MSA; + if (IS_ALIGNED(width, 32)) { + InterpolateRow_16 = InterpolateRow_16_MSA; } } #endif -#if defined(HAS_INTERPOLATEROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - InterpolateRow = InterpolateRow_Any_MSA; +#if defined(HAS_INTERPOLATEROW_16_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + InterpolateRow_16 = InterpolateRow_16_Any_LSX; if (IS_ALIGNED(width, 32)) { - InterpolateRow = InterpolateRow_MSA; + InterpolateRow_16 = InterpolateRow_16_LSX; } } #endif for (y = 0; y < height; ++y) { - InterpolateRow(dst, src0, src1 - src0, width, interpolation); + InterpolateRow_16(dst, src0, src1 - src0, width, interpolation); src0 += src_stride0; src1 += src_stride1; dst += dst_stride; @@ -2917,10 +4315,12 @@ int I420Interpolate(const uint8_t* src0_y, int interpolation) { int halfwidth = (width + 1) >> 1; int halfheight = (height + 1) >> 1; + if (!src0_y || !src0_u || !src0_v || !src1_y || !src1_u || !src1_v || !dst_y || !dst_u || !dst_v || width <= 0 || height == 0) { return -1; } + InterpolatePlane(src0_y, src0_stride_y, src1_y, src1_stride_y, dst_y, dst_stride_y, width, height, interpolation); InterpolatePlane(src0_u, src0_stride_u, src1_u, src1_stride_u, dst_u, @@ -2981,14 +4381,6 @@ int ARGBShuffle(const uint8_t* src_bgra, } } #endif -#if defined(HAS_ARGBSHUFFLEROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBShuffleRow = ARGBShuffleRow_Any_MMI; - if (IS_ALIGNED(width, 2)) { - ARGBShuffleRow = ARGBShuffleRow_MMI; - } - } -#endif #if defined(HAS_ARGBSHUFFLEROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { ARGBShuffleRow = ARGBShuffleRow_Any_MSA; @@ -2997,6 +4389,14 @@ int ARGBShuffle(const uint8_t* src_bgra, } } #endif +#if defined(HAS_ARGBSHUFFLEROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + ARGBShuffleRow = ARGBShuffleRow_Any_LASX; + if (IS_ALIGNED(width, 16)) { + ARGBShuffleRow = ARGBShuffleRow_LASX; + } + } +#endif for (y = 0; y < height; ++y) { ARGBShuffleRow(src_bgra, dst_argb, shuffler, width); @@ -3006,6 +4406,68 @@ int ARGBShuffle(const uint8_t* src_bgra, return 0; } +// Shuffle AR64 channel order. e.g. AR64 to AB64. +LIBYUV_API +int AR64Shuffle(const uint16_t* src_ar64, + int src_stride_ar64, + uint16_t* dst_ar64, + int dst_stride_ar64, + const uint8_t* shuffler, + int width, + int height) { + int y; + void (*AR64ShuffleRow)(const uint8_t* src_ar64, uint8_t* dst_ar64, + const uint8_t* shuffler, int width) = AR64ShuffleRow_C; + if (!src_ar64 || !dst_ar64 || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + src_ar64 = src_ar64 + (height - 1) * src_stride_ar64; + src_stride_ar64 = -src_stride_ar64; + } + // Coalesce rows. + if (src_stride_ar64 == width * 4 && dst_stride_ar64 == width * 4) { + width *= height; + height = 1; + src_stride_ar64 = dst_stride_ar64 = 0; + } + // Assembly versions can be reused if it's implemented with shuffle. +#if defined(HAS_ARGBSHUFFLEROW_SSSE3) + if (TestCpuFlag(kCpuHasSSSE3)) { + AR64ShuffleRow = ARGBShuffleRow_Any_SSSE3; + if (IS_ALIGNED(width, 8)) { + AR64ShuffleRow = ARGBShuffleRow_SSSE3; + } + } +#endif +#if defined(HAS_ARGBSHUFFLEROW_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + AR64ShuffleRow = ARGBShuffleRow_Any_AVX2; + if (IS_ALIGNED(width, 16)) { + AR64ShuffleRow = ARGBShuffleRow_AVX2; + } + } +#endif +#if defined(HAS_ARGBSHUFFLEROW_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + AR64ShuffleRow = ARGBShuffleRow_Any_NEON; + if (IS_ALIGNED(width, 4)) { + AR64ShuffleRow = ARGBShuffleRow_NEON; + } + } +#endif + + for (y = 0; y < height; ++y) { + AR64ShuffleRow((uint8_t*)(src_ar64), (uint8_t*)(dst_ar64), shuffler, + width * 2); + src_ar64 += src_stride_ar64; + dst_ar64 += dst_stride_ar64; + } + return 0; +} + // Gauss blur a float plane using Gaussian 5x5 filter with // coefficients of 1, 4, 6, 4, 1. // Each destination pixel is a blur of the 5x5 @@ -3129,19 +4591,11 @@ static int ARGBSobelize(const uint8_t* src_argb, #if defined(HAS_ARGBTOYJROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { ARGBToYJRow = ARGBToYJRow_Any_NEON; - if (IS_ALIGNED(width, 8)) { + if (IS_ALIGNED(width, 16)) { ARGBToYJRow = ARGBToYJRow_NEON; } } #endif -#if defined(HAS_ARGBTOYJROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBToYJRow = ARGBToYJRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - ARGBToYJRow = ARGBToYJRow_MMI; - } - } -#endif #if defined(HAS_ARGBTOYJROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { ARGBToYJRow = ARGBToYJRow_Any_MSA; @@ -3150,6 +4604,22 @@ static int ARGBSobelize(const uint8_t* src_argb, } } #endif +#if defined(HAS_ARGBTOYJROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + ARGBToYJRow = ARGBToYJRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + ARGBToYJRow = ARGBToYJRow_LSX; + } + } +#endif +#if defined(HAS_ARGBTOYJROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + ARGBToYJRow = ARGBToYJRow_Any_LASX; + if (IS_ALIGNED(width, 32)) { + ARGBToYJRow = ARGBToYJRow_LASX; + } + } +#endif #if defined(HAS_SOBELYROW_SSE2) if (TestCpuFlag(kCpuHasSSE2)) { @@ -3161,11 +4631,6 @@ static int ARGBSobelize(const uint8_t* src_argb, SobelYRow = SobelYRow_NEON; } #endif -#if defined(HAS_SOBELYROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - SobelYRow = SobelYRow_MMI; - } -#endif #if defined(HAS_SOBELYROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { SobelYRow = SobelYRow_MSA; @@ -3181,11 +4646,6 @@ static int ARGBSobelize(const uint8_t* src_argb, SobelXRow = SobelXRow_NEON; } #endif -#if defined(HAS_SOBELXROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - SobelXRow = SobelXRow_MMI; - } -#endif #if defined(HAS_SOBELXROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { SobelXRow = SobelXRow_MSA; @@ -3193,16 +4653,16 @@ static int ARGBSobelize(const uint8_t* src_argb, #endif { // 3 rows with edges before/after. - const int kRowSize = (width + kEdge + 31) & ~31; - align_buffer_64(rows, kRowSize * 2 + (kEdge + kRowSize * 3 + kEdge)); + const int row_size = (width + kEdge + 31) & ~31; + align_buffer_64(rows, row_size * 2 + (kEdge + row_size * 3 + kEdge)); uint8_t* row_sobelx = rows; - uint8_t* row_sobely = rows + kRowSize; - uint8_t* row_y = rows + kRowSize * 2; + uint8_t* row_sobely = rows + row_size; + uint8_t* row_y = rows + row_size * 2; // Convert first row. uint8_t* row_y0 = row_y + kEdge; - uint8_t* row_y1 = row_y0 + kRowSize; - uint8_t* row_y2 = row_y1 + kRowSize; + uint8_t* row_y1 = row_y0 + row_size; + uint8_t* row_y2 = row_y1 + row_size; ARGBToYJRow(src_argb, row_y0, width); row_y0[-1] = row_y0[0]; memset(row_y0 + width, row_y0[width - 1], 16); // Extrude 16 for valgrind. @@ -3265,14 +4725,6 @@ int ARGBSobel(const uint8_t* src_argb, } } #endif -#if defined(HAS_SOBELROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - SobelRow = SobelRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - SobelRow = SobelRow_MMI; - } - } -#endif #if defined(HAS_SOBELROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { SobelRow = SobelRow_Any_MSA; @@ -3280,6 +4732,14 @@ int ARGBSobel(const uint8_t* src_argb, SobelRow = SobelRow_MSA; } } +#endif +#if defined(HAS_SOBELROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + SobelRow = SobelRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + SobelRow = SobelRow_LSX; + } + } #endif return ARGBSobelize(src_argb, src_stride_argb, dst_argb, dst_stride_argb, width, height, SobelRow); @@ -3311,14 +4771,6 @@ int ARGBSobelToPlane(const uint8_t* src_argb, } } #endif -#if defined(HAS_SOBELTOPLANEROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - SobelToPlaneRow = SobelToPlaneRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - SobelToPlaneRow = SobelToPlaneRow_MMI; - } - } -#endif #if defined(HAS_SOBELTOPLANEROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { SobelToPlaneRow = SobelToPlaneRow_Any_MSA; @@ -3326,6 +4778,14 @@ int ARGBSobelToPlane(const uint8_t* src_argb, SobelToPlaneRow = SobelToPlaneRow_MSA; } } +#endif +#if defined(HAS_SOBELTOPLANEROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + SobelToPlaneRow = SobelToPlaneRow_Any_LSX; + if (IS_ALIGNED(width, 32)) { + SobelToPlaneRow = SobelToPlaneRow_LSX; + } + } #endif return ARGBSobelize(src_argb, src_stride_argb, dst_y, dst_stride_y, width, height, SobelToPlaneRow); @@ -3358,14 +4818,6 @@ int ARGBSobelXY(const uint8_t* src_argb, } } #endif -#if defined(HAS_SOBELXYROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - SobelXYRow = SobelXYRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - SobelXYRow = SobelXYRow_MMI; - } - } -#endif #if defined(HAS_SOBELXYROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { SobelXYRow = SobelXYRow_Any_MSA; @@ -3373,6 +4825,14 @@ int ARGBSobelXY(const uint8_t* src_argb, SobelXYRow = SobelXYRow_MSA; } } +#endif +#if defined(HAS_SOBELXYROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + SobelXYRow = SobelXYRow_Any_LSX; + if (IS_ALIGNED(width, 16)) { + SobelXYRow = SobelXYRow_LSX; + } + } #endif return ARGBSobelize(src_argb, src_stride_argb, dst_argb, dst_stride_argb, width, height, SobelXYRow); @@ -3497,6 +4957,14 @@ int HalfFloatPlane(const uint16_t* src_y, } } #endif +#if defined(HAS_HALFFLOATROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + HalfFloatRow = HalfFloatRow_Any_LSX; + if (IS_ALIGNED(width, 32)) { + HalfFloatRow = HalfFloatRow_LSX; + } + } +#endif for (y = 0; y < height; ++y) { HalfFloatRow(src_y, dst_y, scale, width); @@ -3611,14 +5079,6 @@ int ARGBCopyAlpha(const uint8_t* src_argb, } } #endif -#if defined(HAS_ARGBCOPYALPHAROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBCopyAlphaRow = ARGBCopyAlphaRow_Any_MMI; - if (IS_ALIGNED(width, 2)) { - ARGBCopyAlphaRow = ARGBCopyAlphaRow_MMI; - } - } -#endif for (y = 0; y < height; ++y) { ARGBCopyAlphaRow(src_argb, dst_argb, width); @@ -3671,18 +5131,18 @@ int ARGBExtractAlpha(const uint8_t* src_argb, : ARGBExtractAlphaRow_Any_NEON; } #endif -#if defined(HAS_ARGBEXTRACTALPHAROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBExtractAlphaRow = IS_ALIGNED(width, 8) ? ARGBExtractAlphaRow_MMI - : ARGBExtractAlphaRow_Any_MMI; - } -#endif #if defined(HAS_ARGBEXTRACTALPHAROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { ARGBExtractAlphaRow = IS_ALIGNED(width, 16) ? ARGBExtractAlphaRow_MSA : ARGBExtractAlphaRow_Any_MSA; } #endif +#if defined(HAS_ARGBEXTRACTALPHAROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + ARGBExtractAlphaRow = IS_ALIGNED(width, 16) ? ARGBExtractAlphaRow_LSX + : ARGBExtractAlphaRow_Any_LSX; + } +#endif for (int y = 0; y < height; ++y) { ARGBExtractAlphaRow(src_argb, dst_a, width); @@ -3734,14 +5194,6 @@ int ARGBCopyYToAlpha(const uint8_t* src_y, } } #endif -#if defined(HAS_ARGBCOPYYTOALPHAROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBCopyYToAlphaRow = ARGBCopyYToAlphaRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - ARGBCopyYToAlphaRow = ARGBCopyYToAlphaRow_MMI; - } - } -#endif for (y = 0; y < height; ++y) { ARGBCopyYToAlphaRow(src_y, dst_argb, width); @@ -3751,9 +5203,6 @@ int ARGBCopyYToAlpha(const uint8_t* src_y, return 0; } -// TODO(fbarchard): Consider if width is even Y channel can be split -// directly. A SplitUVRow_Odd function could copy the remaining chroma. - LIBYUV_API int YUY2ToNV12(const uint8_t* src_yuy2, int src_stride_yuy2, @@ -3764,124 +5213,97 @@ int YUY2ToNV12(const uint8_t* src_yuy2, int width, int height) { int y; - int halfwidth = (width + 1) >> 1; - void (*SplitUVRow)(const uint8_t* src_uv, uint8_t* dst_u, uint8_t* dst_v, - int width) = SplitUVRow_C; - void (*InterpolateRow)(uint8_t * dst_ptr, const uint8_t* src_ptr, - ptrdiff_t src_stride, int dst_width, - int source_y_fraction) = InterpolateRow_C; + void (*YUY2ToYRow)(const uint8_t* src_yuy2, uint8_t* dst_y, int width) = + YUY2ToYRow_C; + void (*YUY2ToNVUVRow)(const uint8_t* src_yuy2, int stride_yuy2, + uint8_t* dst_uv, int width) = YUY2ToNVUVRow_C; if (!src_yuy2 || !dst_y || !dst_uv || width <= 0 || height == 0) { return -1; } + // Negative height means invert the image. if (height < 0) { height = -height; src_yuy2 = src_yuy2 + (height - 1) * src_stride_yuy2; src_stride_yuy2 = -src_stride_yuy2; } -#if defined(HAS_SPLITUVROW_SSE2) +#if defined(HAS_YUY2TOYROW_SSE2) if (TestCpuFlag(kCpuHasSSE2)) { - SplitUVRow = SplitUVRow_Any_SSE2; + YUY2ToYRow = YUY2ToYRow_Any_SSE2; if (IS_ALIGNED(width, 16)) { - SplitUVRow = SplitUVRow_SSE2; + YUY2ToYRow = YUY2ToYRow_SSE2; } } #endif -#if defined(HAS_SPLITUVROW_AVX2) +#if defined(HAS_YUY2TOYROW_AVX2) if (TestCpuFlag(kCpuHasAVX2)) { - SplitUVRow = SplitUVRow_Any_AVX2; + YUY2ToYRow = YUY2ToYRow_Any_AVX2; if (IS_ALIGNED(width, 32)) { - SplitUVRow = SplitUVRow_AVX2; + YUY2ToYRow = YUY2ToYRow_AVX2; } } #endif -#if defined(HAS_SPLITUVROW_NEON) +#if defined(HAS_YUY2TOYROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { - SplitUVRow = SplitUVRow_Any_NEON; + YUY2ToYRow = YUY2ToYRow_Any_NEON; if (IS_ALIGNED(width, 16)) { - SplitUVRow = SplitUVRow_NEON; + YUY2ToYRow = YUY2ToYRow_NEON; } } #endif -#if defined(HAS_SPLITUVROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - SplitUVRow = SplitUVRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - SplitUVRow = SplitUVRow_MMI; +#if defined(HAS_YUY2TOYROW_MSA) && defined(HAS_YUY2TOUV422ROW_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + YUY2ToYRow = YUY2ToYRow_Any_MSA; + if (IS_ALIGNED(width, 32)) { + YUY2ToYRow = YUY2ToYRow_MSA; } } #endif -#if defined(HAS_SPLITUVROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - SplitUVRow = SplitUVRow_Any_MSA; +#if defined(HAS_YUY2TOYROW_LASX) && defined(HAS_YUY2TOUV422ROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + YUY2ToYRow = YUY2ToYRow_Any_LASX; if (IS_ALIGNED(width, 32)) { - SplitUVRow = SplitUVRow_MSA; + YUY2ToYRow = YUY2ToYRow_LASX; } } #endif -#if defined(HAS_INTERPOLATEROW_SSSE3) - if (TestCpuFlag(kCpuHasSSSE3)) { - InterpolateRow = InterpolateRow_Any_SSSE3; + +#if defined(HAS_YUY2TONVUVROW_SSE2) + if (TestCpuFlag(kCpuHasSSE2)) { + YUY2ToNVUVRow = YUY2ToNVUVRow_Any_SSE2; if (IS_ALIGNED(width, 16)) { - InterpolateRow = InterpolateRow_SSSE3; + YUY2ToNVUVRow = YUY2ToNVUVRow_SSE2; } } #endif -#if defined(HAS_INTERPOLATEROW_AVX2) +#if defined(HAS_YUY2TONVUVROW_AVX2) if (TestCpuFlag(kCpuHasAVX2)) { - InterpolateRow = InterpolateRow_Any_AVX2; + YUY2ToNVUVRow = YUY2ToNVUVRow_Any_AVX2; if (IS_ALIGNED(width, 32)) { - InterpolateRow = InterpolateRow_AVX2; + YUY2ToNVUVRow = YUY2ToNVUVRow_AVX2; } } #endif -#if defined(HAS_INTERPOLATEROW_NEON) +#if defined(HAS_YUY2TONVUVROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { - InterpolateRow = InterpolateRow_Any_NEON; + YUY2ToNVUVRow = YUY2ToNVUVRow_Any_NEON; if (IS_ALIGNED(width, 16)) { - InterpolateRow = InterpolateRow_NEON; - } - } -#endif -#if defined(HAS_INTERPOLATEROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - InterpolateRow = InterpolateRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - InterpolateRow = InterpolateRow_MMI; - } - } -#endif -#if defined(HAS_INTERPOLATEROW_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - InterpolateRow = InterpolateRow_Any_MSA; - if (IS_ALIGNED(width, 32)) { - InterpolateRow = InterpolateRow_MSA; + YUY2ToNVUVRow = YUY2ToNVUVRow_NEON; } } #endif - { - int awidth = halfwidth * 2; - // row of y and 2 rows of uv - align_buffer_64(rows, awidth * 3); - - for (y = 0; y < height - 1; y += 2) { - // Split Y from UV. - SplitUVRow(src_yuy2, rows, rows + awidth, awidth); - memcpy(dst_y, rows, width); - SplitUVRow(src_yuy2 + src_stride_yuy2, rows, rows + awidth * 2, awidth); - memcpy(dst_y + dst_stride_y, rows, width); - InterpolateRow(dst_uv, rows + awidth, awidth, awidth, 128); - src_yuy2 += src_stride_yuy2 * 2; - dst_y += dst_stride_y * 2; - dst_uv += dst_stride_uv; - } - if (height & 1) { - // Split Y from UV. - SplitUVRow(src_yuy2, rows, dst_uv, awidth); - memcpy(dst_y, rows, width); - } - free_aligned_buffer_64(rows); + for (y = 0; y < height - 1; y += 2) { + YUY2ToYRow(src_yuy2, dst_y, width); + YUY2ToYRow(src_yuy2 + src_stride_yuy2, dst_y + dst_stride_y, width); + YUY2ToNVUVRow(src_yuy2, src_stride_yuy2, dst_uv, width); + src_yuy2 += src_stride_yuy2 * 2; + dst_y += dst_stride_y * 2; + dst_uv += dst_stride_uv; + } + if (height & 1) { + YUY2ToYRow(src_yuy2, dst_y, width); + YUY2ToNVUVRow(src_yuy2, 0, dst_uv, width); } return 0; } @@ -3899,12 +5321,14 @@ int UYVYToNV12(const uint8_t* src_uyvy, int halfwidth = (width + 1) >> 1; void (*SplitUVRow)(const uint8_t* src_uv, uint8_t* dst_u, uint8_t* dst_v, int width) = SplitUVRow_C; - void (*InterpolateRow)(uint8_t * dst_ptr, const uint8_t* src_ptr, + void (*InterpolateRow)(uint8_t* dst_ptr, const uint8_t* src_ptr, ptrdiff_t src_stride, int dst_width, int source_y_fraction) = InterpolateRow_C; + if (!src_uyvy || !dst_y || !dst_uv || width <= 0 || height == 0) { return -1; } + // Negative height means invert the image. if (height < 0) { height = -height; @@ -3935,14 +5359,6 @@ int UYVYToNV12(const uint8_t* src_uyvy, } } #endif -#if defined(HAS_SPLITUVROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - SplitUVRow = SplitUVRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - SplitUVRow = SplitUVRow_MMI; - } - } -#endif #if defined(HAS_SPLITUVROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { SplitUVRow = SplitUVRow_Any_MSA; @@ -3951,6 +5367,14 @@ int UYVYToNV12(const uint8_t* src_uyvy, } } #endif +#if defined(HAS_SPLITUVROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + SplitUVRow = SplitUVRow_Any_LSX; + if (IS_ALIGNED(width, 32)) { + SplitUVRow = SplitUVRow_LSX; + } + } +#endif #if defined(HAS_INTERPOLATEROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { InterpolateRow = InterpolateRow_Any_SSSE3; @@ -3975,14 +5399,6 @@ int UYVYToNV12(const uint8_t* src_uyvy, } } #endif -#if defined(HAS_INTERPOLATEROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - InterpolateRow = InterpolateRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - InterpolateRow = InterpolateRow_MMI; - } - } -#endif #if defined(HAS_INTERPOLATEROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { InterpolateRow = InterpolateRow_Any_MSA; @@ -3991,6 +5407,14 @@ int UYVYToNV12(const uint8_t* src_uyvy, } } #endif +#if defined(HAS_INTERPOLATEROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + InterpolateRow = InterpolateRow_Any_LSX; + if (IS_ALIGNED(width, 32)) { + InterpolateRow = InterpolateRow_LSX; + } + } +#endif { int awidth = halfwidth * 2; diff --git a/TMessagesProj/jni/third_party/libyuv/source/rotate.cc b/TMessagesProj/jni/third_party/libyuv/source/rotate.cc index 32904e4731..b1b4458e66 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/rotate.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/rotate.cc @@ -29,7 +29,7 @@ void TransposePlane(const uint8_t* src, int width, int height) { int i = height; -#if defined(HAS_TRANSPOSEWX16_MSA) +#if defined(HAS_TRANSPOSEWX16_MSA) || defined(HAS_TRANSPOSEWX16_LSX) void (*TransposeWx16)(const uint8_t* src, int src_stride, uint8_t* dst, int dst_stride, int width) = TransposeWx16_C; #else @@ -37,17 +37,12 @@ void TransposePlane(const uint8_t* src, int dst_stride, int width) = TransposeWx8_C; #endif -#if defined(HAS_TRANSPOSEWX16_MSA) - if (TestCpuFlag(kCpuHasMSA)) { - TransposeWx16 = TransposeWx16_Any_MSA; - if (IS_ALIGNED(width, 16)) { - TransposeWx16 = TransposeWx16_MSA; - } - } -#else #if defined(HAS_TRANSPOSEWX8_NEON) if (TestCpuFlag(kCpuHasNEON)) { - TransposeWx8 = TransposeWx8_NEON; + TransposeWx8 = TransposeWx8_Any_NEON; + if (IS_ALIGNED(width, 8)) { + TransposeWx8 = TransposeWx8_NEON; + } } #endif #if defined(HAS_TRANSPOSEWX8_SSSE3) @@ -58,11 +53,6 @@ void TransposePlane(const uint8_t* src, } } #endif -#if defined(HAS_TRANSPOSEWX8_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - TransposeWx8 = TransposeWx8_MMI; - } -#endif #if defined(HAS_TRANSPOSEWX8_FAST_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { TransposeWx8 = TransposeWx8_Fast_Any_SSSE3; @@ -71,9 +61,24 @@ void TransposePlane(const uint8_t* src, } } #endif -#endif /* defined(HAS_TRANSPOSEWX16_MSA) */ - #if defined(HAS_TRANSPOSEWX16_MSA) + if (TestCpuFlag(kCpuHasMSA)) { + TransposeWx16 = TransposeWx16_Any_MSA; + if (IS_ALIGNED(width, 16)) { + TransposeWx16 = TransposeWx16_MSA; + } + } +#endif +#if defined(HAS_TRANSPOSEWX16_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + TransposeWx16 = TransposeWx16_Any_LSX; + if (IS_ALIGNED(width, 16)) { + TransposeWx16 = TransposeWx16_LSX; + } + } +#endif + +#if defined(HAS_TRANSPOSEWX16_MSA) || defined(HAS_TRANSPOSEWX16_LSX) // Work across the source in 16x16 tiles while (i >= 16) { TransposeWx16(src, src_stride, dst, dst_stride, width); @@ -133,7 +138,7 @@ void RotatePlane180(const uint8_t* src, int dst_stride, int width, int height) { - // Swap first and last row and mirror the content. Uses a temporary row. + // Swap top and bottom row and mirror the content. Uses a temporary row. align_buffer_64(row, width); const uint8_t* src_bot = src + src_stride * (height - 1); uint8_t* dst_bot = dst + dst_stride * (height - 1); @@ -165,14 +170,6 @@ void RotatePlane180(const uint8_t* src, } } #endif -#if defined(HAS_MIRRORROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - MirrorRow = MirrorRow_Any_MMI; - if (IS_ALIGNED(width, 8)) { - MirrorRow = MirrorRow_MMI; - } - } -#endif #if defined(HAS_MIRRORROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { MirrorRow = MirrorRow_Any_MSA; @@ -181,6 +178,14 @@ void RotatePlane180(const uint8_t* src, } } #endif +#if defined(HAS_MIRRORROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + MirrorRow = MirrorRow_Any_LASX; + if (IS_ALIGNED(width, 64)) { + MirrorRow = MirrorRow_LASX; + } + } +#endif #if defined(HAS_COPYROW_SSE2) if (TestCpuFlag(kCpuHasSSE2)) { CopyRow = IS_ALIGNED(width, 32) ? CopyRow_SSE2 : CopyRow_Any_SSE2; @@ -201,17 +206,12 @@ void RotatePlane180(const uint8_t* src, CopyRow = IS_ALIGNED(width, 32) ? CopyRow_NEON : CopyRow_Any_NEON; } #endif -#if defined(HAS_COPYROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - CopyRow = IS_ALIGNED(width, 8) ? CopyRow_MMI : CopyRow_Any_MMI; - } -#endif // Odd height will harmlessly mirror the middle row twice. for (y = 0; y < half_height; ++y) { - CopyRow(src, row, width); // Copy first row into buffer - MirrorRow(src_bot, dst, width); // Mirror last row into first row - MirrorRow(row, dst_bot, width); // Mirror buffer into last row + CopyRow(src, row, width); // Copy top row into buffer + MirrorRow(src_bot, dst, width); // Mirror bottom row into top row + MirrorRow(row, dst_bot, width); // Mirror buffer into bottom row src += src_stride; dst += dst_stride; src_bot -= src_stride; @@ -221,19 +221,23 @@ void RotatePlane180(const uint8_t* src, } LIBYUV_API -void TransposeUV(const uint8_t* src, - int src_stride, - uint8_t* dst_a, - int dst_stride_a, - uint8_t* dst_b, - int dst_stride_b, - int width, - int height) { +void SplitTransposeUV(const uint8_t* src, + int src_stride, + uint8_t* dst_a, + int dst_stride_a, + uint8_t* dst_b, + int dst_stride_b, + int width, + int height) { int i = height; #if defined(HAS_TRANSPOSEUVWX16_MSA) void (*TransposeUVWx16)(const uint8_t* src, int src_stride, uint8_t* dst_a, int dst_stride_a, uint8_t* dst_b, int dst_stride_b, int width) = TransposeUVWx16_C; +#elif defined(HAS_TRANSPOSEUVWX16_LSX) + void (*TransposeUVWx16)(const uint8_t* src, int src_stride, uint8_t* dst_a, + int dst_stride_a, uint8_t* dst_b, int dst_stride_b, + int width) = TransposeUVWx16_C; #else void (*TransposeUVWx8)(const uint8_t* src, int src_stride, uint8_t* dst_a, int dst_stride_a, uint8_t* dst_b, int dst_stride_b, @@ -247,6 +251,13 @@ void TransposeUV(const uint8_t* src, TransposeUVWx16 = TransposeUVWx16_MSA; } } +#elif defined(HAS_TRANSPOSEUVWX16_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + TransposeUVWx16 = TransposeUVWx16_Any_LSX; + if (IS_ALIGNED(width, 8)) { + TransposeUVWx16 = TransposeUVWx16_LSX; + } + } #else #if defined(HAS_TRANSPOSEUVWX8_NEON) if (TestCpuFlag(kCpuHasNEON)) { @@ -261,14 +272,6 @@ void TransposeUV(const uint8_t* src, } } #endif -#if defined(HAS_TRANSPOSEUVWX8_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - TransposeUVWx8 = TransposeUVWx8_Any_MMI; - if (IS_ALIGNED(width, 4)) { - TransposeUVWx8 = TransposeUVWx8_MMI; - } - } -#endif #endif /* defined(HAS_TRANSPOSEUVWX16_MSA) */ #if defined(HAS_TRANSPOSEUVWX16_MSA) @@ -281,6 +284,16 @@ void TransposeUV(const uint8_t* src, dst_b += 16; // Move over 8 columns. i -= 16; } +#elif defined(HAS_TRANSPOSEUVWX16_LSX) + // Work through the source in 8x8 tiles. + while (i >= 16) { + TransposeUVWx16(src, src_stride, dst_a, dst_stride_a, dst_b, dst_stride_b, + width); + src += 16 * src_stride; // Go down 16 rows. + dst_a += 16; // Move over 8 columns. + dst_b += 16; // Move over 8 columns. + i -= 16; + } #else // Work through the source in 8x8 tiles. while (i >= 8) { @@ -300,49 +313,49 @@ void TransposeUV(const uint8_t* src, } LIBYUV_API -void RotateUV90(const uint8_t* src, - int src_stride, - uint8_t* dst_a, - int dst_stride_a, - uint8_t* dst_b, - int dst_stride_b, - int width, - int height) { +void SplitRotateUV90(const uint8_t* src, + int src_stride, + uint8_t* dst_a, + int dst_stride_a, + uint8_t* dst_b, + int dst_stride_b, + int width, + int height) { src += src_stride * (height - 1); src_stride = -src_stride; - TransposeUV(src, src_stride, dst_a, dst_stride_a, dst_b, dst_stride_b, width, - height); + SplitTransposeUV(src, src_stride, dst_a, dst_stride_a, dst_b, dst_stride_b, + width, height); } LIBYUV_API -void RotateUV270(const uint8_t* src, - int src_stride, - uint8_t* dst_a, - int dst_stride_a, - uint8_t* dst_b, - int dst_stride_b, - int width, - int height) { +void SplitRotateUV270(const uint8_t* src, + int src_stride, + uint8_t* dst_a, + int dst_stride_a, + uint8_t* dst_b, + int dst_stride_b, + int width, + int height) { dst_a += dst_stride_a * (width - 1); dst_b += dst_stride_b * (width - 1); dst_stride_a = -dst_stride_a; dst_stride_b = -dst_stride_b; - TransposeUV(src, src_stride, dst_a, dst_stride_a, dst_b, dst_stride_b, width, - height); + SplitTransposeUV(src, src_stride, dst_a, dst_stride_a, dst_b, dst_stride_b, + width, height); } // Rotate 180 is a horizontal and vertical flip. LIBYUV_API -void RotateUV180(const uint8_t* src, - int src_stride, - uint8_t* dst_a, - int dst_stride_a, - uint8_t* dst_b, - int dst_stride_b, - int width, - int height) { +void SplitRotateUV180(const uint8_t* src, + int src_stride, + uint8_t* dst_a, + int dst_stride_a, + uint8_t* dst_b, + int dst_stride_b, + int width, + int height) { int i; void (*MirrorSplitUVRow)(const uint8_t* src, uint8_t* dst_u, uint8_t* dst_v, int width) = MirrorSplitUVRow_C; @@ -356,16 +369,16 @@ void RotateUV180(const uint8_t* src, MirrorSplitUVRow = MirrorSplitUVRow_SSSE3; } #endif -#if defined(HAS_MIRRORSPLITUVROW_MMI) - if (TestCpuFlag(kCpuHasMMI) && IS_ALIGNED(width, 8)) { - MirrorSplitUVRow = MirrorSplitUVRow_MMI; - } -#endif #if defined(HAS_MIRRORSPLITUVROW_MSA) if (TestCpuFlag(kCpuHasMSA) && IS_ALIGNED(width, 32)) { MirrorSplitUVRow = MirrorSplitUVRow_MSA; } #endif +#if defined(HAS_MIRRORSPLITUVROW_LSX) + if (TestCpuFlag(kCpuHasLSX) && IS_ALIGNED(width, 32)) { + MirrorSplitUVRow = MirrorSplitUVRow_LSX; + } +#endif dst_a += dst_stride_a * (height - 1); dst_b += dst_stride_b * (height - 1); @@ -378,6 +391,52 @@ void RotateUV180(const uint8_t* src, } } +// Rotate UV and split into planar. +// width and height expected to be half size for NV12 +LIBYUV_API +int SplitRotateUV(const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height, + enum RotationMode mode) { + if (!src_uv || width <= 0 || height == 0 || !dst_u || !dst_v) { + return -1; + } + + // Negative height means invert the image. + if (height < 0) { + height = -height; + src_uv = src_uv + (height - 1) * src_stride_uv; + src_stride_uv = -src_stride_uv; + } + + switch (mode) { + case kRotate0: + SplitUVPlane(src_uv, src_stride_uv, dst_u, dst_stride_u, dst_v, + dst_stride_v, width, height); + return 0; + case kRotate90: + SplitRotateUV90(src_uv, src_stride_uv, dst_u, dst_stride_u, dst_v, + dst_stride_v, width, height); + return 0; + case kRotate270: + SplitRotateUV270(src_uv, src_stride_uv, dst_u, dst_stride_u, dst_v, + dst_stride_v, width, height); + return 0; + case kRotate180: + SplitRotateUV180(src_uv, src_stride_uv, dst_u, dst_stride_u, dst_v, + dst_stride_v, width, height); + return 0; + default: + break; + } + return -1; +} + LIBYUV_API int RotatePlane(const uint8_t* src, int src_stride, @@ -417,6 +476,120 @@ int RotatePlane(const uint8_t* src, return -1; } +LIBYUV_API +void TransposePlane_16(const uint16_t* src, + int src_stride, + uint16_t* dst, + int dst_stride, + int width, + int height) { + int i = height; + // Work across the source in 8x8 tiles + while (i >= 8) { + TransposeWx8_16_C(src, src_stride, dst, dst_stride, width); + src += 8 * src_stride; // Go down 8 rows. + dst += 8; // Move over 8 columns. + i -= 8; + } + + if (i > 0) { + TransposeWxH_16_C(src, src_stride, dst, dst_stride, width, i); + } +} + +static void RotatePlane90_16(const uint16_t* src, + int src_stride, + uint16_t* dst, + int dst_stride, + int width, + int height) { + // Rotate by 90 is a transpose with the source read + // from bottom to top. So set the source pointer to the end + // of the buffer and flip the sign of the source stride. + src += src_stride * (height - 1); + src_stride = -src_stride; + TransposePlane_16(src, src_stride, dst, dst_stride, width, height); +} + +static void RotatePlane270_16(const uint16_t* src, + int src_stride, + uint16_t* dst, + int dst_stride, + int width, + int height) { + // Rotate by 270 is a transpose with the destination written + // from bottom to top. So set the destination pointer to the end + // of the buffer and flip the sign of the destination stride. + dst += dst_stride * (width - 1); + dst_stride = -dst_stride; + TransposePlane_16(src, src_stride, dst, dst_stride, width, height); +} + +static void RotatePlane180_16(const uint16_t* src, + int src_stride, + uint16_t* dst, + int dst_stride, + int width, + int height) { + // Swap top and bottom row and mirror the content. Uses a temporary row. + align_buffer_64_16(row, width); + const uint16_t* src_bot = src + src_stride * (height - 1); + uint16_t* dst_bot = dst + dst_stride * (height - 1); + int half_height = (height + 1) >> 1; + int y; + + // Odd height will harmlessly mirror the middle row twice. + for (y = 0; y < half_height; ++y) { + CopyRow_16_C(src, row, width); // Copy top row into buffer + MirrorRow_16_C(src_bot, dst, width); // Mirror bottom row into top row + MirrorRow_16_C(row, dst_bot, width); // Mirror buffer into bottom row + src += src_stride; + dst += dst_stride; + src_bot -= src_stride; + dst_bot -= dst_stride; + } + free_aligned_buffer_64_16(row); +} + +LIBYUV_API +int RotatePlane_16(const uint16_t* src, + int src_stride, + uint16_t* dst, + int dst_stride, + int width, + int height, + enum RotationMode mode) { + if (!src || width <= 0 || height == 0 || !dst) { + return -1; + } + + // Negative height means invert the image. + if (height < 0) { + height = -height; + src = src + (height - 1) * src_stride; + src_stride = -src_stride; + } + + switch (mode) { + case kRotate0: + // copy frame + CopyPlane_16(src, src_stride, dst, dst_stride, width, height); + return 0; + case kRotate90: + RotatePlane90_16(src, src_stride, dst, dst_stride, width, height); + return 0; + case kRotate270: + RotatePlane270_16(src, src_stride, dst, dst_stride, width, height); + return 0; + case kRotate180: + RotatePlane180_16(src, src_stride, dst, dst_stride, width, height); + return 0; + default: + break; + } + return -1; +} + LIBYUV_API int I420Rotate(const uint8_t* src_y, int src_stride_y, @@ -435,8 +608,8 @@ int I420Rotate(const uint8_t* src_y, enum RotationMode mode) { int halfwidth = (width + 1) >> 1; int halfheight = (height + 1) >> 1; - if (!src_y || !src_u || !src_v || width <= 0 || height == 0 || !dst_y || - !dst_u || !dst_v) { + if ((!src_y && dst_y) || !src_u || !src_v || width <= 0 || height == 0 || + !dst_y || !dst_u || !dst_v) { return -1; } @@ -485,6 +658,93 @@ int I420Rotate(const uint8_t* src_y, return -1; } +// I422 has half width x full height UV planes, so rotate by 90 and 270 +// require scaling to maintain 422 subsampling. +LIBYUV_API +int I422Rotate(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height, + enum RotationMode mode) { + int halfwidth = (width + 1) >> 1; + int halfheight = (height + 1) >> 1; + if (!src_y || !src_u || !src_v || width <= 0 || height == 0 || !dst_y || + !dst_u || !dst_v) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + src_y = src_y + (height - 1) * src_stride_y; + src_u = src_u + (height - 1) * src_stride_u; + src_v = src_v + (height - 1) * src_stride_v; + src_stride_y = -src_stride_y; + src_stride_u = -src_stride_u; + src_stride_v = -src_stride_v; + } + + switch (mode) { + case kRotate0: + // Copy frame + CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height); + CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, halfwidth, height); + CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, halfwidth, height); + return 0; + + // Note on temporary Y plane for UV. + // Rotation of UV first fits within the Y destination plane rows. + // Y plane is width x height + // Y plane rotated is height x width + // UV plane is (width / 2) x height + // UV plane rotated is height x (width / 2) + // UV plane rotated+scaled is (height / 2) x width. + // UV plane rotated is a temporary that fits within the Y plane rotated. + + case kRotate90: + RotatePlane90(src_u, src_stride_u, dst_y, dst_stride_y, halfwidth, + height); + ScalePlane(dst_y, dst_stride_y, height, halfwidth, dst_u, dst_stride_u, + halfheight, width, kFilterBilinear); + RotatePlane90(src_v, src_stride_v, dst_y, dst_stride_y, halfwidth, + height); + ScalePlane(dst_y, dst_stride_y, height, halfwidth, dst_v, dst_stride_v, + halfheight, width, kFilterLinear); + RotatePlane90(src_y, src_stride_y, dst_y, dst_stride_y, width, height); + return 0; + case kRotate270: + RotatePlane270(src_u, src_stride_u, dst_y, dst_stride_y, halfwidth, + height); + ScalePlane(dst_y, dst_stride_y, height, halfwidth, dst_u, dst_stride_u, + halfheight, width, kFilterBilinear); + RotatePlane270(src_v, src_stride_v, dst_y, dst_stride_y, halfwidth, + height); + ScalePlane(dst_y, dst_stride_y, height, halfwidth, dst_v, dst_stride_v, + halfheight, width, kFilterLinear); + RotatePlane270(src_y, src_stride_y, dst_y, dst_stride_y, width, height); + return 0; + case kRotate180: + RotatePlane180(src_y, src_stride_y, dst_y, dst_stride_y, width, height); + RotatePlane180(src_u, src_stride_u, dst_u, dst_stride_u, halfwidth, + height); + RotatePlane180(src_v, src_stride_v, dst_v, dst_stride_v, halfwidth, + height); + return 0; + default: + break; + } + return -1; +} + LIBYUV_API int I444Rotate(const uint8_t* src_y, int src_stride_y, @@ -500,7 +760,7 @@ int I444Rotate(const uint8_t* src_y, int dst_stride_v, int width, int height, - enum libyuv::RotationMode mode) { + enum RotationMode mode) { if (!src_y || !src_u || !src_v || width <= 0 || height == 0 || !dst_y || !dst_u || !dst_v) { return -1; @@ -518,23 +778,23 @@ int I444Rotate(const uint8_t* src_y, } switch (mode) { - case libyuv::kRotate0: + case kRotate0: // copy frame CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height); CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width, height); CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width, height); return 0; - case libyuv::kRotate90: + case kRotate90: RotatePlane90(src_y, src_stride_y, dst_y, dst_stride_y, width, height); RotatePlane90(src_u, src_stride_u, dst_u, dst_stride_u, width, height); RotatePlane90(src_v, src_stride_v, dst_v, dst_stride_v, width, height); return 0; - case libyuv::kRotate270: + case kRotate270: RotatePlane270(src_y, src_stride_y, dst_y, dst_stride_y, width, height); RotatePlane270(src_u, src_stride_u, dst_u, dst_stride_u, width, height); RotatePlane270(src_v, src_stride_v, dst_v, dst_stride_v, width, height); return 0; - case libyuv::kRotate180: + case kRotate180: RotatePlane180(src_y, src_stride_y, dst_y, dst_stride_y, width, height); RotatePlane180(src_u, src_stride_u, dst_u, dst_stride_u, width, height); RotatePlane180(src_v, src_stride_v, dst_v, dst_stride_v, width, height); @@ -584,18 +844,332 @@ int NV12ToI420Rotate(const uint8_t* src_y, width, height); case kRotate90: RotatePlane90(src_y, src_stride_y, dst_y, dst_stride_y, width, height); - RotateUV90(src_uv, src_stride_uv, dst_u, dst_stride_u, dst_v, - dst_stride_v, halfwidth, halfheight); + SplitRotateUV90(src_uv, src_stride_uv, dst_u, dst_stride_u, dst_v, + dst_stride_v, halfwidth, halfheight); return 0; case kRotate270: RotatePlane270(src_y, src_stride_y, dst_y, dst_stride_y, width, height); - RotateUV270(src_uv, src_stride_uv, dst_u, dst_stride_u, dst_v, - dst_stride_v, halfwidth, halfheight); + SplitRotateUV270(src_uv, src_stride_uv, dst_u, dst_stride_u, dst_v, + dst_stride_v, halfwidth, halfheight); return 0; case kRotate180: RotatePlane180(src_y, src_stride_y, dst_y, dst_stride_y, width, height); - RotateUV180(src_uv, src_stride_uv, dst_u, dst_stride_u, dst_v, - dst_stride_v, halfwidth, halfheight); + SplitRotateUV180(src_uv, src_stride_uv, dst_u, dst_stride_u, dst_v, + dst_stride_v, halfwidth, halfheight); + return 0; + default: + break; + } + return -1; +} + +static void SplitPixels(const uint8_t* src_u, + int src_pixel_stride_uv, + uint8_t* dst_u, + int width) { + int i; + for (i = 0; i < width; ++i) { + *dst_u = *src_u; + ++dst_u; + src_u += src_pixel_stride_uv; + } +} + +// Convert Android420 to I420 with Rotate +LIBYUV_API +int Android420ToI420Rotate(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + int src_pixel_stride_uv, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height, + enum RotationMode rotation) { + int y; + const ptrdiff_t vu_off = src_v - src_u; + int halfwidth = (width + 1) >> 1; + int halfheight = (height + 1) >> 1; + if ((!src_y && dst_y) || !src_u || !src_v || !dst_u || !dst_v || width <= 0 || + height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + halfheight = (height + 1) >> 1; + src_y = src_y + (height - 1) * src_stride_y; + src_u = src_u + (halfheight - 1) * src_stride_u; + src_v = src_v + (halfheight - 1) * src_stride_v; + src_stride_y = -src_stride_y; + src_stride_u = -src_stride_u; + src_stride_v = -src_stride_v; + } + + if (dst_y) { + RotatePlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height, + rotation); + } + + // Copy UV planes - I420 + if (src_pixel_stride_uv == 1) { + RotatePlane(src_u, src_stride_u, dst_u, dst_stride_u, halfwidth, halfheight, + rotation); + RotatePlane(src_v, src_stride_v, dst_v, dst_stride_v, halfwidth, halfheight, + rotation); + return 0; + } + // Split UV planes - NV21 + if (src_pixel_stride_uv == 2 && vu_off == -1 && + src_stride_u == src_stride_v) { + SplitRotateUV(src_v, src_stride_v, dst_v, dst_stride_v, dst_u, dst_stride_u, + halfwidth, halfheight, rotation); + return 0; + } + // Split UV planes - NV12 + if (src_pixel_stride_uv == 2 && vu_off == 1 && src_stride_u == src_stride_v) { + SplitRotateUV(src_u, src_stride_u, dst_u, dst_stride_u, dst_v, dst_stride_v, + halfwidth, halfheight, rotation); + return 0; + } + + if (rotation == 0) { + for (y = 0; y < halfheight; ++y) { + SplitPixels(src_u, src_pixel_stride_uv, dst_u, halfwidth); + SplitPixels(src_v, src_pixel_stride_uv, dst_v, halfwidth); + src_u += src_stride_u; + src_v += src_stride_v; + dst_u += dst_stride_u; + dst_v += dst_stride_v; + } + return 0; + } + // unsupported type and/or rotation. + return -1; +} + +LIBYUV_API +int I010Rotate(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_u, + int dst_stride_u, + uint16_t* dst_v, + int dst_stride_v, + int width, + int height, + enum RotationMode mode) { + int halfwidth = (width + 1) >> 1; + int halfheight = (height + 1) >> 1; + if (!src_y || !src_u || !src_v || width <= 0 || height == 0 || !dst_y || + !dst_u || !dst_v || dst_stride_y < 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + src_y = src_y + (height - 1) * src_stride_y; + src_u = src_u + (height - 1) * src_stride_u; + src_v = src_v + (height - 1) * src_stride_v; + src_stride_y = -src_stride_y; + src_stride_u = -src_stride_u; + src_stride_v = -src_stride_v; + } + + switch (mode) { + case kRotate0: + // copy frame + return I010Copy(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_y, dst_stride_y, dst_u, dst_stride_u, + dst_v, dst_stride_v, width, height); + case kRotate90: + RotatePlane90_16(src_y, src_stride_y, dst_y, dst_stride_y, width, height); + RotatePlane90_16(src_u, src_stride_u, dst_u, dst_stride_u, halfwidth, + halfheight); + RotatePlane90_16(src_v, src_stride_v, dst_v, dst_stride_v, halfwidth, + halfheight); + return 0; + case kRotate270: + RotatePlane270_16(src_y, src_stride_y, dst_y, dst_stride_y, width, + height); + RotatePlane270_16(src_u, src_stride_u, dst_u, dst_stride_u, halfwidth, + halfheight); + RotatePlane270_16(src_v, src_stride_v, dst_v, dst_stride_v, halfwidth, + halfheight); + return 0; + case kRotate180: + RotatePlane180_16(src_y, src_stride_y, dst_y, dst_stride_y, width, + height); + RotatePlane180_16(src_u, src_stride_u, dst_u, dst_stride_u, halfwidth, + halfheight); + RotatePlane180_16(src_v, src_stride_v, dst_v, dst_stride_v, halfwidth, + halfheight); + return 0; + default: + break; + } + return -1; +} + +// I210 has half width x full height UV planes, so rotate by 90 and 270 +// require scaling to maintain 422 subsampling. +LIBYUV_API +int I210Rotate(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_u, + int dst_stride_u, + uint16_t* dst_v, + int dst_stride_v, + int width, + int height, + enum RotationMode mode) { + int halfwidth = (width + 1) >> 1; + int halfheight = (height + 1) >> 1; + if (!src_y || !src_u || !src_v || width <= 0 || height == 0 || !dst_y || + !dst_u || !dst_v) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + src_y = src_y + (height - 1) * src_stride_y; + src_u = src_u + (height - 1) * src_stride_u; + src_v = src_v + (height - 1) * src_stride_v; + src_stride_y = -src_stride_y; + src_stride_u = -src_stride_u; + src_stride_v = -src_stride_v; + } + + switch (mode) { + case kRotate0: + // Copy frame + CopyPlane_16(src_y, src_stride_y, dst_y, dst_stride_y, width, height); + CopyPlane_16(src_u, src_stride_u, dst_u, dst_stride_u, halfwidth, height); + CopyPlane_16(src_v, src_stride_v, dst_v, dst_stride_v, halfwidth, height); + return 0; + + // Note on temporary Y plane for UV. + // Rotation of UV first fits within the Y destination plane rows. + // Y plane is width x height + // Y plane rotated is height x width + // UV plane is (width / 2) x height + // UV plane rotated is height x (width / 2) + // UV plane rotated+scaled is (height / 2) x width. + // UV plane rotated is a temporary that fits within the Y plane rotated. + + case kRotate90: + RotatePlane90_16(src_u, src_stride_u, dst_y, dst_stride_y, halfwidth, + height); + ScalePlane_16(dst_y, dst_stride_y, height, halfwidth, dst_u, dst_stride_u, + halfheight, width, kFilterBilinear); + RotatePlane90_16(src_v, src_stride_v, dst_y, dst_stride_y, halfwidth, + height); + ScalePlane_16(dst_y, dst_stride_y, height, halfwidth, dst_v, dst_stride_v, + halfheight, width, kFilterLinear); + RotatePlane90_16(src_y, src_stride_y, dst_y, dst_stride_y, width, height); + return 0; + case kRotate270: + RotatePlane270_16(src_u, src_stride_u, dst_y, dst_stride_y, halfwidth, + height); + ScalePlane_16(dst_y, dst_stride_y, height, halfwidth, dst_u, dst_stride_u, + halfheight, width, kFilterBilinear); + RotatePlane270_16(src_v, src_stride_v, dst_y, dst_stride_y, halfwidth, + height); + ScalePlane_16(dst_y, dst_stride_y, height, halfwidth, dst_v, dst_stride_v, + halfheight, width, kFilterLinear); + RotatePlane270_16(src_y, src_stride_y, dst_y, dst_stride_y, width, + height); + return 0; + case kRotate180: + RotatePlane180_16(src_y, src_stride_y, dst_y, dst_stride_y, width, + height); + RotatePlane180_16(src_u, src_stride_u, dst_u, dst_stride_u, halfwidth, + height); + RotatePlane180_16(src_v, src_stride_v, dst_v, dst_stride_v, halfwidth, + height); + return 0; + default: + break; + } + return -1; +} + +LIBYUV_API +int I410Rotate(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_u, + int dst_stride_u, + uint16_t* dst_v, + int dst_stride_v, + int width, + int height, + enum RotationMode mode) { + if (!src_y || !src_u || !src_v || width <= 0 || height == 0 || !dst_y || + !dst_u || !dst_v || dst_stride_y < 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + src_y = src_y + (height - 1) * src_stride_y; + src_u = src_u + (height - 1) * src_stride_u; + src_v = src_v + (height - 1) * src_stride_v; + src_stride_y = -src_stride_y; + src_stride_u = -src_stride_u; + src_stride_v = -src_stride_v; + } + + switch (mode) { + case kRotate0: + // copy frame + CopyPlane_16(src_y, src_stride_y, dst_y, dst_stride_y, width, height); + CopyPlane_16(src_u, src_stride_u, dst_u, dst_stride_u, width, height); + CopyPlane_16(src_v, src_stride_v, dst_v, dst_stride_v, width, height); + return 0; + case kRotate90: + RotatePlane90_16(src_y, src_stride_y, dst_y, dst_stride_y, width, height); + RotatePlane90_16(src_u, src_stride_u, dst_u, dst_stride_u, width, height); + RotatePlane90_16(src_v, src_stride_v, dst_v, dst_stride_v, width, height); + return 0; + case kRotate270: + RotatePlane270_16(src_y, src_stride_y, dst_y, dst_stride_y, width, + height); + RotatePlane270_16(src_u, src_stride_u, dst_u, dst_stride_u, width, + height); + RotatePlane270_16(src_v, src_stride_v, dst_v, dst_stride_v, width, + height); + return 0; + case kRotate180: + RotatePlane180_16(src_y, src_stride_y, dst_y, dst_stride_y, width, + height); + RotatePlane180_16(src_u, src_stride_u, dst_u, dst_stride_u, width, + height); + RotatePlane180_16(src_v, src_stride_v, dst_v, dst_stride_v, width, + height); return 0; default: break; diff --git a/TMessagesProj/jni/third_party/libyuv/source/rotate_any.cc b/TMessagesProj/jni/third_party/libyuv/source/rotate_any.cc index b3baf084d0..88ca78765a 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/rotate_any.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/rotate_any.cc @@ -35,15 +35,15 @@ TANY(TransposeWx8_Any_NEON, TransposeWx8_NEON, 7) #ifdef HAS_TRANSPOSEWX8_SSSE3 TANY(TransposeWx8_Any_SSSE3, TransposeWx8_SSSE3, 7) #endif -#ifdef HAS_TRANSPOSEWX8_MMI -TANY(TransposeWx8_Any_MMI, TransposeWx8_MMI, 7) -#endif #ifdef HAS_TRANSPOSEWX8_FAST_SSSE3 TANY(TransposeWx8_Fast_Any_SSSE3, TransposeWx8_Fast_SSSE3, 15) #endif #ifdef HAS_TRANSPOSEWX16_MSA TANY(TransposeWx16_Any_MSA, TransposeWx16_MSA, 15) #endif +#ifdef HAS_TRANSPOSEWX16_LSX +TANY(TransposeWx16_Any_LSX, TransposeWx16_LSX, 15) +#endif #undef TANY #define TUVANY(NAMEANY, TPOS_SIMD, MASK) \ @@ -65,12 +65,12 @@ TUVANY(TransposeUVWx8_Any_NEON, TransposeUVWx8_NEON, 7) #ifdef HAS_TRANSPOSEUVWX8_SSE2 TUVANY(TransposeUVWx8_Any_SSE2, TransposeUVWx8_SSE2, 7) #endif -#ifdef HAS_TRANSPOSEUVWX8_MMI -TUVANY(TransposeUVWx8_Any_MMI, TransposeUVWx8_MMI, 7) -#endif #ifdef HAS_TRANSPOSEUVWX16_MSA TUVANY(TransposeUVWx16_Any_MSA, TransposeUVWx16_MSA, 7) #endif +#ifdef HAS_TRANSPOSEUVWX16_LSX +TUVANY(TransposeUVWx16_Any_LSX, TransposeUVWx16_LSX, 7) +#endif #undef TUVANY #ifdef __cplusplus diff --git a/TMessagesProj/jni/third_party/libyuv/source/rotate_argb.cc b/TMessagesProj/jni/third_party/libyuv/source/rotate_argb.cc index ae65388601..28226210e1 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/rotate_argb.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/rotate_argb.cc @@ -8,11 +8,12 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "libyuv/rotate.h" +#include "libyuv/rotate_argb.h" #include "libyuv/convert.h" #include "libyuv/cpu_id.h" #include "libyuv/planar_functions.h" +#include "libyuv/rotate.h" #include "libyuv/row.h" #include "libyuv/scale_row.h" /* for ScaleARGBRowDownEven_ */ @@ -52,14 +53,6 @@ static int ARGBTranspose(const uint8_t* src_argb, } } #endif -#if defined(HAS_SCALEARGBROWDOWNEVEN_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ScaleARGBRowDownEven = ScaleARGBRowDownEven_Any_MMI; - if (IS_ALIGNED(height, 4)) { // Width of dest. - ScaleARGBRowDownEven = ScaleARGBRowDownEven_MMI; - } - } -#endif #if defined(HAS_SCALEARGBROWDOWNEVEN_MSA) if (TestCpuFlag(kCpuHasMSA)) { ScaleARGBRowDownEven = ScaleARGBRowDownEven_Any_MSA; @@ -68,6 +61,14 @@ static int ARGBTranspose(const uint8_t* src_argb, } } #endif +#if defined(HAS_SCALEARGBROWDOWNEVEN_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + ScaleARGBRowDownEven = ScaleARGBRowDownEven_Any_LSX; + if (IS_ALIGNED(height, 4)) { // Width of dest. + ScaleARGBRowDownEven = ScaleARGBRowDownEven_LSX; + } + } +#endif for (i = 0; i < width; ++i) { // column of source to row of dest. ScaleARGBRowDownEven(src_argb, 0, src_pixel_step, dst_argb, height); @@ -147,14 +148,6 @@ static int ARGBRotate180(const uint8_t* src_argb, } } #endif -#if defined(HAS_ARGBMIRRORROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ARGBMirrorRow = ARGBMirrorRow_Any_MMI; - if (IS_ALIGNED(width, 2)) { - ARGBMirrorRow = ARGBMirrorRow_MMI; - } - } -#endif #if defined(HAS_ARGBMIRRORROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { ARGBMirrorRow = ARGBMirrorRow_Any_MSA; @@ -163,6 +156,14 @@ static int ARGBRotate180(const uint8_t* src_argb, } } #endif +#if defined(HAS_ARGBMIRRORROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + ARGBMirrorRow = ARGBMirrorRow_Any_LASX; + if (IS_ALIGNED(width, 16)) { + ARGBMirrorRow = ARGBMirrorRow_LASX; + } + } +#endif #if defined(HAS_COPYROW_SSE2) if (TestCpuFlag(kCpuHasSSE2)) { CopyRow = IS_ALIGNED(width * 4, 32) ? CopyRow_SSE2 : CopyRow_Any_SSE2; diff --git a/TMessagesProj/jni/third_party/libyuv/source/rotate_common.cc b/TMessagesProj/jni/third_party/libyuv/source/rotate_common.cc index ff212adebc..2617c01b27 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/rotate_common.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/rotate_common.cc @@ -94,8 +94,74 @@ void TransposeUVWxH_C(const uint8_t* src, for (i = 0; i < width * 2; i += 2) { int j; for (j = 0; j < height; ++j) { - dst_a[j + ((i >> 1) * dst_stride_a)] = src[i + (j * src_stride)]; - dst_b[j + ((i >> 1) * dst_stride_b)] = src[i + (j * src_stride) + 1]; + dst_a[((i >> 1) * dst_stride_a) + j] = src[i + (j * src_stride)]; + dst_b[((i >> 1) * dst_stride_b) + j] = src[i + (j * src_stride) + 1]; + } + } +} + +void TransposeWx8_16_C(const uint16_t* src, + int src_stride, + uint16_t* dst, + int dst_stride, + int width) { + int i; + for (i = 0; i < width; ++i) { + dst[0] = src[0 * src_stride]; + dst[1] = src[1 * src_stride]; + dst[2] = src[2 * src_stride]; + dst[3] = src[3 * src_stride]; + dst[4] = src[4 * src_stride]; + dst[5] = src[5 * src_stride]; + dst[6] = src[6 * src_stride]; + dst[7] = src[7 * src_stride]; + ++src; + dst += dst_stride; + } +} + +void TransposeUVWx8_16_C(const uint16_t* src, + int src_stride, + uint16_t* dst_a, + int dst_stride_a, + uint16_t* dst_b, + int dst_stride_b, + int width) { + int i; + for (i = 0; i < width; ++i) { + dst_a[0] = src[0 * src_stride + 0]; + dst_b[0] = src[0 * src_stride + 1]; + dst_a[1] = src[1 * src_stride + 0]; + dst_b[1] = src[1 * src_stride + 1]; + dst_a[2] = src[2 * src_stride + 0]; + dst_b[2] = src[2 * src_stride + 1]; + dst_a[3] = src[3 * src_stride + 0]; + dst_b[3] = src[3 * src_stride + 1]; + dst_a[4] = src[4 * src_stride + 0]; + dst_b[4] = src[4 * src_stride + 1]; + dst_a[5] = src[5 * src_stride + 0]; + dst_b[5] = src[5 * src_stride + 1]; + dst_a[6] = src[6 * src_stride + 0]; + dst_b[6] = src[6 * src_stride + 1]; + dst_a[7] = src[7 * src_stride + 0]; + dst_b[7] = src[7 * src_stride + 1]; + src += 2; + dst_a += dst_stride_a; + dst_b += dst_stride_b; + } +} + +void TransposeWxH_16_C(const uint16_t* src, + int src_stride, + uint16_t* dst, + int dst_stride, + int width, + int height) { + int i; + for (i = 0; i < width; ++i) { + int j; + for (j = 0; j < height; ++j) { + dst[i * dst_stride + j] = src[j * src_stride + i]; } } } diff --git a/TMessagesProj/jni/third_party/libyuv/source/rotate_gcc.cc b/TMessagesProj/jni/third_party/libyuv/source/rotate_gcc.cc index fd359d4ae6..1a3f8cbbda 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/rotate_gcc.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/rotate_gcc.cc @@ -17,8 +17,7 @@ extern "C" { #endif // This module is for GCC x86 and x64. -#if !defined(LIBYUV_DISABLE_X86) && \ - (defined(__x86_64__) || (defined(__i386__) && !defined(_MSC_VER))) +#if !defined(LIBYUV_DISABLE_X86) && (defined(__x86_64__) || defined(__i386__)) // Transpose 8x8. 32 or 64 bit, but not NaCL for 64 bit. #if defined(HAS_TRANSPOSEWX8_SSSE3) diff --git a/TMessagesProj/jni/third_party/libyuv/source/rotate_lsx.cc b/TMessagesProj/jni/third_party/libyuv/source/rotate_lsx.cc new file mode 100644 index 0000000000..94a2b91cd8 --- /dev/null +++ b/TMessagesProj/jni/third_party/libyuv/source/rotate_lsx.cc @@ -0,0 +1,243 @@ +/* + * Copyright 2022 The LibYuv Project Authors. All rights reserved. + * + * Copyright (c) 2022 Loongson Technology Corporation Limited + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "libyuv/rotate_row.h" + +#if !defined(LIBYUV_DISABLE_LSX) && defined(__loongarch_sx) +#include "libyuv/loongson_intrinsics.h" + +#ifdef __cplusplus +namespace libyuv { +extern "C" { +#endif + +#define ILVLH_B(in0, in1, in2, in3, out0, out1, out2, out3) \ + { \ + DUP2_ARG2(__lsx_vilvl_b, in1, in0, in3, in2, out0, out2); \ + DUP2_ARG2(__lsx_vilvh_b, in1, in0, in3, in2, out1, out3); \ + } + +#define ILVLH_H(in0, in1, in2, in3, out0, out1, out2, out3) \ + { \ + DUP2_ARG2(__lsx_vilvl_h, in1, in0, in3, in2, out0, out2); \ + DUP2_ARG2(__lsx_vilvh_h, in1, in0, in3, in2, out1, out3); \ + } + +#define ILVLH_W(in0, in1, in2, in3, out0, out1, out2, out3) \ + { \ + DUP2_ARG2(__lsx_vilvl_w, in1, in0, in3, in2, out0, out2); \ + DUP2_ARG2(__lsx_vilvh_w, in1, in0, in3, in2, out1, out3); \ + } + +#define ILVLH_D(in0, in1, in2, in3, out0, out1, out2, out3) \ + { \ + DUP2_ARG2(__lsx_vilvl_d, in1, in0, in3, in2, out0, out2); \ + DUP2_ARG2(__lsx_vilvh_d, in1, in0, in3, in2, out1, out3); \ + } + +#define LSX_ST_4(_dst0, _dst1, _dst2, _dst3, _dst, _stride, _stride2, \ + _stride3, _stride4) \ + { \ + __lsx_vst(_dst0, _dst, 0); \ + __lsx_vstx(_dst1, _dst, _stride); \ + __lsx_vstx(_dst2, _dst, _stride2); \ + __lsx_vstx(_dst3, _dst, _stride3); \ + _dst += _stride4; \ + } + +#define LSX_ST_2(_dst0, _dst1, _dst, _stride, _stride2) \ + { \ + __lsx_vst(_dst0, _dst, 0); \ + __lsx_vstx(_dst1, _dst, _stride); \ + _dst += _stride2; \ + } + +void TransposeWx16_C(const uint8_t* src, + int src_stride, + uint8_t* dst, + int dst_stride, + int width) { + TransposeWx8_C(src, src_stride, dst, dst_stride, width); + TransposeWx8_C((src + 8 * src_stride), src_stride, (dst + 8), dst_stride, + width); +} + +void TransposeUVWx16_C(const uint8_t* src, + int src_stride, + uint8_t* dst_a, + int dst_stride_a, + uint8_t* dst_b, + int dst_stride_b, + int width) { + TransposeUVWx8_C(src, src_stride, dst_a, dst_stride_a, dst_b, dst_stride_b, + width); + TransposeUVWx8_C((src + 8 * src_stride), src_stride, (dst_a + 8), + dst_stride_a, (dst_b + 8), dst_stride_b, width); +} + +void TransposeWx16_LSX(const uint8_t* src, + int src_stride, + uint8_t* dst, + int dst_stride, + int width) { + int x; + int len = width / 16; + uint8_t* s; + int src_stride2 = src_stride << 1; + int src_stride3 = src_stride + src_stride2; + int src_stride4 = src_stride2 << 1; + int dst_stride2 = dst_stride << 1; + int dst_stride3 = dst_stride + dst_stride2; + int dst_stride4 = dst_stride2 << 1; + __m128i src0, src1, src2, src3, dst0, dst1, dst2, dst3; + __m128i tmp0, tmp1, tmp2, tmp3; + __m128i reg0, reg1, reg2, reg3, reg4, reg5, reg6, reg7; + __m128i res0, res1, res2, res3, res4, res5, res6, res7, res8, res9; + + for (x = 0; x < len; x++) { + s = (uint8_t*)src; + src0 = __lsx_vld(s, 0); + src1 = __lsx_vldx(s, src_stride); + src2 = __lsx_vldx(s, src_stride2); + src3 = __lsx_vldx(s, src_stride3); + s += src_stride4; + ILVLH_B(src0, src1, src2, src3, tmp0, tmp1, tmp2, tmp3); + ILVLH_H(tmp0, tmp2, tmp1, tmp3, reg0, reg1, reg2, reg3); + src0 = __lsx_vld(s, 0); + src1 = __lsx_vldx(s, src_stride); + src2 = __lsx_vldx(s, src_stride2); + src3 = __lsx_vldx(s, src_stride3); + s += src_stride4; + ILVLH_B(src0, src1, src2, src3, tmp0, tmp1, tmp2, tmp3); + ILVLH_H(tmp0, tmp2, tmp1, tmp3, reg4, reg5, reg6, reg7); + ILVLH_W(reg0, reg4, reg1, reg5, res0, res1, res2, res3); + ILVLH_W(reg2, reg6, reg3, reg7, res4, res5, res6, res7); + src0 = __lsx_vld(s, 0); + src1 = __lsx_vldx(s, src_stride); + src2 = __lsx_vldx(s, src_stride2); + src3 = __lsx_vldx(s, src_stride3); + s += src_stride4; + ILVLH_B(src0, src1, src2, src3, tmp0, tmp1, tmp2, tmp3); + ILVLH_H(tmp0, tmp2, tmp1, tmp3, reg0, reg1, reg2, reg3); + src0 = __lsx_vld(s, 0); + src1 = __lsx_vldx(s, src_stride); + src2 = __lsx_vldx(s, src_stride2); + src3 = __lsx_vldx(s, src_stride3); + s += src_stride4; + ILVLH_B(src0, src1, src2, src3, tmp0, tmp1, tmp2, tmp3); + ILVLH_H(tmp0, tmp2, tmp1, tmp3, reg4, reg5, reg6, reg7); + res8 = __lsx_vilvl_w(reg4, reg0); + res9 = __lsx_vilvh_w(reg4, reg0); + ILVLH_D(res0, res8, res1, res9, dst0, dst1, dst2, dst3); + LSX_ST_4(dst0, dst1, dst2, dst3, dst, dst_stride, dst_stride2, dst_stride3, + dst_stride4); + res8 = __lsx_vilvl_w(reg5, reg1); + res9 = __lsx_vilvh_w(reg5, reg1); + ILVLH_D(res2, res8, res3, res9, dst0, dst1, dst2, dst3); + LSX_ST_4(dst0, dst1, dst2, dst3, dst, dst_stride, dst_stride2, dst_stride3, + dst_stride4); + res8 = __lsx_vilvl_w(reg6, reg2); + res9 = __lsx_vilvh_w(reg6, reg2); + ILVLH_D(res4, res8, res5, res9, dst0, dst1, dst2, dst3); + LSX_ST_4(dst0, dst1, dst2, dst3, dst, dst_stride, dst_stride2, dst_stride3, + dst_stride4); + res8 = __lsx_vilvl_w(reg7, reg3); + res9 = __lsx_vilvh_w(reg7, reg3); + ILVLH_D(res6, res8, res7, res9, dst0, dst1, dst2, dst3); + LSX_ST_4(dst0, dst1, dst2, dst3, dst, dst_stride, dst_stride2, dst_stride3, + dst_stride4); + src += 16; + } +} + +void TransposeUVWx16_LSX(const uint8_t* src, + int src_stride, + uint8_t* dst_a, + int dst_stride_a, + uint8_t* dst_b, + int dst_stride_b, + int width) { + int x; + int len = width / 8; + uint8_t* s; + int src_stride2 = src_stride << 1; + int src_stride3 = src_stride + src_stride2; + int src_stride4 = src_stride2 << 1; + int dst_stride_a2 = dst_stride_a << 1; + int dst_stride_b2 = dst_stride_b << 1; + __m128i src0, src1, src2, src3, dst0, dst1, dst2, dst3; + __m128i tmp0, tmp1, tmp2, tmp3; + __m128i reg0, reg1, reg2, reg3, reg4, reg5, reg6, reg7; + __m128i res0, res1, res2, res3, res4, res5, res6, res7, res8, res9; + + for (x = 0; x < len; x++) { + s = (uint8_t*)src; + src0 = __lsx_vld(s, 0); + src1 = __lsx_vldx(s, src_stride); + src2 = __lsx_vldx(s, src_stride2); + src3 = __lsx_vldx(s, src_stride3); + s += src_stride4; + ILVLH_B(src0, src1, src2, src3, tmp0, tmp1, tmp2, tmp3); + ILVLH_H(tmp0, tmp2, tmp1, tmp3, reg0, reg1, reg2, reg3); + src0 = __lsx_vld(s, 0); + src1 = __lsx_vldx(s, src_stride); + src2 = __lsx_vldx(s, src_stride2); + src3 = __lsx_vldx(s, src_stride3); + s += src_stride4; + ILVLH_B(src0, src1, src2, src3, tmp0, tmp1, tmp2, tmp3); + ILVLH_H(tmp0, tmp2, tmp1, tmp3, reg4, reg5, reg6, reg7); + ILVLH_W(reg0, reg4, reg1, reg5, res0, res1, res2, res3); + ILVLH_W(reg2, reg6, reg3, reg7, res4, res5, res6, res7); + src0 = __lsx_vld(s, 0); + src1 = __lsx_vldx(s, src_stride); + src2 = __lsx_vldx(s, src_stride2); + src3 = __lsx_vldx(s, src_stride3); + s += src_stride4; + ILVLH_B(src0, src1, src2, src3, tmp0, tmp1, tmp2, tmp3); + ILVLH_H(tmp0, tmp2, tmp1, tmp3, reg0, reg1, reg2, reg3); + src0 = __lsx_vld(s, 0); + src1 = __lsx_vldx(s, src_stride); + src2 = __lsx_vldx(s, src_stride2); + src3 = __lsx_vldx(s, src_stride3); + s += src_stride4; + ILVLH_B(src0, src1, src2, src3, tmp0, tmp1, tmp2, tmp3); + ILVLH_H(tmp0, tmp2, tmp1, tmp3, reg4, reg5, reg6, reg7); + res8 = __lsx_vilvl_w(reg4, reg0); + res9 = __lsx_vilvh_w(reg4, reg0); + ILVLH_D(res0, res8, res1, res9, dst0, dst1, dst2, dst3); + LSX_ST_2(dst0, dst2, dst_a, dst_stride_a, dst_stride_a2); + LSX_ST_2(dst1, dst3, dst_b, dst_stride_b, dst_stride_b2); + res8 = __lsx_vilvl_w(reg5, reg1); + res9 = __lsx_vilvh_w(reg5, reg1); + ILVLH_D(res2, res8, res3, res9, dst0, dst1, dst2, dst3); + LSX_ST_2(dst0, dst2, dst_a, dst_stride_a, dst_stride_a2); + LSX_ST_2(dst1, dst3, dst_b, dst_stride_b, dst_stride_b2); + res8 = __lsx_vilvl_w(reg6, reg2); + res9 = __lsx_vilvh_w(reg6, reg2); + ILVLH_D(res4, res8, res5, res9, dst0, dst1, dst2, dst3); + LSX_ST_2(dst0, dst2, dst_a, dst_stride_a, dst_stride_a2); + LSX_ST_2(dst1, dst3, dst_b, dst_stride_b, dst_stride_b2); + res8 = __lsx_vilvl_w(reg7, reg3); + res9 = __lsx_vilvh_w(reg7, reg3); + ILVLH_D(res6, res8, res7, res9, dst0, dst1, dst2, dst3); + LSX_ST_2(dst0, dst2, dst_a, dst_stride_a, dst_stride_a2); + LSX_ST_2(dst1, dst3, dst_b, dst_stride_b, dst_stride_b2); + src += 16; + } +} + +#ifdef __cplusplus +} // extern "C" +} // namespace libyuv +#endif + +#endif // !defined(LIBYUV_DISABLE_LSX) && defined(__loongarch_sx) diff --git a/TMessagesProj/jni/third_party/libyuv/source/rotate_neon64.cc b/TMessagesProj/jni/third_party/libyuv/source/rotate_neon64.cc index 43c1581731..ea1cf82c29 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/rotate_neon64.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/rotate_neon64.cc @@ -201,13 +201,13 @@ void TransposeWx8_NEON(const uint8_t* src, "4: \n" - : "=&r"(src_temp), // %0 - "+r"(src), // %1 - "+r"(dst), // %2 - "+r"(width) // %3 - : "r"(&kVTbl4x4Transpose), // %4 - "r"(static_cast(src_stride)), // %5 - "r"(static_cast(dst_stride)) // %6 + : "=&r"(src_temp), // %0 + "+r"(src), // %1 + "+r"(dst), // %2 + "+r"(width) // %3 + : "r"(&kVTbl4x4Transpose), // %4 + "r"((ptrdiff_t)src_stride), // %5 + "r"((ptrdiff_t)dst_stride) // %6 : "memory", "cc", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v16", "v17", "v18", "v19", "v20", "v21", "v22", "v23"); } @@ -423,15 +423,15 @@ void TransposeUVWx8_NEON(const uint8_t* src, "4: \n" - : "=&r"(src_temp), // %0 - "+r"(src), // %1 - "+r"(dst_a), // %2 - "+r"(dst_b), // %3 - "+r"(width) // %4 - : "r"(static_cast(src_stride)), // %5 - "r"(static_cast(dst_stride_a)), // %6 - "r"(static_cast(dst_stride_b)), // %7 - "r"(&kVTbl4x4TransposeDi) // %8 + : "=&r"(src_temp), // %0 + "+r"(src), // %1 + "+r"(dst_a), // %2 + "+r"(dst_b), // %3 + "+r"(width) // %4 + : "r"((ptrdiff_t)src_stride), // %5 + "r"((ptrdiff_t)dst_stride_a), // %6 + "r"((ptrdiff_t)dst_stride_b), // %7 + "r"(&kVTbl4x4TransposeDi) // %8 : "memory", "cc", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v16", "v17", "v18", "v19", "v20", "v21", "v22", "v23", "v30", "v31"); } diff --git a/TMessagesProj/jni/third_party/libyuv/source/rotate_win.cc b/TMessagesProj/jni/third_party/libyuv/source/rotate_win.cc index e887dd525c..a78873f843 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/rotate_win.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/rotate_win.cc @@ -16,8 +16,9 @@ namespace libyuv { extern "C" { #endif -// This module is for 32 bit Visual C x86 and clangcl -#if !defined(LIBYUV_DISABLE_X86) && defined(_M_IX86) && defined(_MSC_VER) +// This module is for 32 bit Visual C x86 +#if !defined(LIBYUV_DISABLE_X86) && defined(_MSC_VER) && \ + !defined(__clang__) && defined(_M_IX86) __declspec(naked) void TransposeWx8_SSSE3(const uint8_t* src, int src_stride, diff --git a/TMessagesProj/jni/third_party/libyuv/source/row_any.cc b/TMessagesProj/jni/third_party/libyuv/source/row_any.cc index 7216373bcd..012f0fb297 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/row_any.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/row_any.cc @@ -30,6 +30,39 @@ extern "C" { // Subsampled source needs to be increase by 1 of not even. #define SS(width, shift) (((width) + (1 << (shift)) - 1) >> (shift)) +// Any 4 planes to 1 +#define ANY41(NAMEANY, ANY_SIMD, UVSHIFT, DUVSHIFT, BPP, MASK) \ + void NAMEANY(const uint8_t* y_buf, const uint8_t* u_buf, \ + const uint8_t* v_buf, const uint8_t* a_buf, uint8_t* dst_ptr, \ + int width) { \ + SIMD_ALIGNED(uint8_t temp[64 * 5]); \ + memset(temp, 0, 64 * 4); /* for msan */ \ + int r = width & MASK; \ + int n = width & ~MASK; \ + if (n > 0) { \ + ANY_SIMD(y_buf, u_buf, v_buf, a_buf, dst_ptr, n); \ + } \ + memcpy(temp, y_buf + n, r); \ + memcpy(temp + 64, u_buf + (n >> UVSHIFT), SS(r, UVSHIFT)); \ + memcpy(temp + 128, v_buf + (n >> UVSHIFT), SS(r, UVSHIFT)); \ + memcpy(temp + 192, a_buf + n, r); \ + ANY_SIMD(temp, temp + 64, temp + 128, temp + 192, temp + 256, MASK + 1); \ + memcpy(dst_ptr + (n >> DUVSHIFT) * BPP, temp + 256, \ + SS(r, DUVSHIFT) * BPP); \ + } + +#ifdef HAS_MERGEARGBROW_SSE2 +ANY41(MergeARGBRow_Any_SSE2, MergeARGBRow_SSE2, 0, 0, 4, 7) +#endif +#ifdef HAS_MERGEARGBROW_AVX2 +ANY41(MergeARGBRow_Any_AVX2, MergeARGBRow_AVX2, 0, 0, 4, 15) +#endif +#ifdef HAS_MERGEARGBROW_NEON +ANY41(MergeARGBRow_Any_NEON, MergeARGBRow_NEON, 0, 0, 4, 15) +#endif + +// Note that odd width replication includes 444 due to implementation +// on arm that subsamples 444 to 422 internally. // Any 4 planes to 1 with yuvconstants #define ANY41C(NAMEANY, ANY_SIMD, UVSHIFT, DUVSHIFT, BPP, MASK) \ void NAMEANY(const uint8_t* y_buf, const uint8_t* u_buf, \ @@ -46,29 +79,163 @@ extern "C" { memcpy(temp + 64, u_buf + (n >> UVSHIFT), SS(r, UVSHIFT)); \ memcpy(temp + 128, v_buf + (n >> UVSHIFT), SS(r, UVSHIFT)); \ memcpy(temp + 192, a_buf + n, r); \ + if (width & 1) { \ + temp[64 + SS(r, UVSHIFT)] = temp[64 + SS(r, UVSHIFT) - 1]; \ + temp[128 + SS(r, UVSHIFT)] = temp[128 + SS(r, UVSHIFT) - 1]; \ + } \ ANY_SIMD(temp, temp + 64, temp + 128, temp + 192, temp + 256, \ yuvconstants, MASK + 1); \ memcpy(dst_ptr + (n >> DUVSHIFT) * BPP, temp + 256, \ SS(r, DUVSHIFT) * BPP); \ } +#ifdef HAS_I444ALPHATOARGBROW_SSSE3 +ANY41C(I444AlphaToARGBRow_Any_SSSE3, I444AlphaToARGBRow_SSSE3, 0, 0, 4, 7) +#endif +#ifdef HAS_I444ALPHATOARGBROW_AVX2 +ANY41C(I444AlphaToARGBRow_Any_AVX2, I444AlphaToARGBRow_AVX2, 0, 0, 4, 15) +#endif #ifdef HAS_I422ALPHATOARGBROW_SSSE3 ANY41C(I422AlphaToARGBRow_Any_SSSE3, I422AlphaToARGBRow_SSSE3, 1, 0, 4, 7) #endif #ifdef HAS_I422ALPHATOARGBROW_AVX2 ANY41C(I422AlphaToARGBRow_Any_AVX2, I422AlphaToARGBRow_AVX2, 1, 0, 4, 15) #endif +#ifdef HAS_I444ALPHATOARGBROW_NEON +ANY41C(I444AlphaToARGBRow_Any_NEON, I444AlphaToARGBRow_NEON, 0, 0, 4, 7) +#endif #ifdef HAS_I422ALPHATOARGBROW_NEON ANY41C(I422AlphaToARGBRow_Any_NEON, I422AlphaToARGBRow_NEON, 1, 0, 4, 7) #endif +#ifdef HAS_I444ALPHATOARGBROW_MSA +ANY41C(I444AlphaToARGBRow_Any_MSA, I444AlphaToARGBRow_MSA, 0, 0, 4, 7) +#endif #ifdef HAS_I422ALPHATOARGBROW_MSA ANY41C(I422AlphaToARGBRow_Any_MSA, I422AlphaToARGBRow_MSA, 1, 0, 4, 7) #endif -#ifdef HAS_I422ALPHATOARGBROW_MMI -ANY41C(I422AlphaToARGBRow_Any_MMI, I422AlphaToARGBRow_MMI, 1, 0, 4, 7) +#ifdef HAS_I422ALPHATOARGBROW_LASX +ANY41C(I422AlphaToARGBRow_Any_LASX, I422AlphaToARGBRow_LASX, 1, 0, 4, 15) #endif #undef ANY41C +// Any 4 planes to 1 plane of 8 bit with yuvconstants +#define ANY41CT(NAMEANY, ANY_SIMD, UVSHIFT, DUVSHIFT, T, SBPP, BPP, MASK) \ + void NAMEANY(const T* y_buf, const T* u_buf, const T* v_buf, const T* a_buf, \ + uint8_t* dst_ptr, const struct YuvConstants* yuvconstants, \ + int width) { \ + SIMD_ALIGNED(T temp[16 * 4]); \ + SIMD_ALIGNED(uint8_t out[64]); \ + memset(temp, 0, 16 * 4 * SBPP); /* for YUY2 and msan */ \ + int r = width & MASK; \ + int n = width & ~MASK; \ + if (n > 0) { \ + ANY_SIMD(y_buf, u_buf, v_buf, a_buf, dst_ptr, yuvconstants, n); \ + } \ + memcpy(temp, y_buf + n, r * SBPP); \ + memcpy(temp + 16, u_buf + (n >> UVSHIFT), SS(r, UVSHIFT) * SBPP); \ + memcpy(temp + 32, v_buf + (n >> UVSHIFT), SS(r, UVSHIFT) * SBPP); \ + memcpy(temp + 48, a_buf + n, r * SBPP); \ + ANY_SIMD(temp, temp + 16, temp + 32, temp + 48, out, yuvconstants, \ + MASK + 1); \ + memcpy(dst_ptr + (n >> DUVSHIFT) * BPP, out, SS(r, DUVSHIFT) * BPP); \ + } + +#ifdef HAS_I210ALPHATOARGBROW_SSSE3 +ANY41CT(I210AlphaToARGBRow_Any_SSSE3, + I210AlphaToARGBRow_SSSE3, + 1, + 0, + uint16_t, + 2, + 4, + 7) +#endif + +#ifdef HAS_I210ALPHATOARGBROW_AVX2 +ANY41CT(I210AlphaToARGBRow_Any_AVX2, + I210AlphaToARGBRow_AVX2, + 1, + 0, + uint16_t, + 2, + 4, + 15) +#endif + +#ifdef HAS_I410ALPHATOARGBROW_SSSE3 +ANY41CT(I410AlphaToARGBRow_Any_SSSE3, + I410AlphaToARGBRow_SSSE3, + 0, + 0, + uint16_t, + 2, + 4, + 7) +#endif + +#ifdef HAS_I410ALPHATOARGBROW_AVX2 +ANY41CT(I410AlphaToARGBRow_Any_AVX2, + I410AlphaToARGBRow_AVX2, + 0, + 0, + uint16_t, + 2, + 4, + 15) +#endif + +#undef ANY41CT + +// Any 4 planes to 1 plane with parameter +#define ANY41PT(NAMEANY, ANY_SIMD, STYPE, SBPP, DTYPE, BPP, MASK) \ + void NAMEANY(const STYPE* r_buf, const STYPE* g_buf, const STYPE* b_buf, \ + const STYPE* a_buf, DTYPE* dst_ptr, int depth, int width) { \ + SIMD_ALIGNED(STYPE temp[16 * 4]); \ + SIMD_ALIGNED(DTYPE out[64]); \ + memset(temp, 0, 16 * 4 * SBPP); /* for YUY2 and msan */ \ + int r = width & MASK; \ + int n = width & ~MASK; \ + if (n > 0) { \ + ANY_SIMD(r_buf, g_buf, b_buf, a_buf, dst_ptr, depth, n); \ + } \ + memcpy(temp, r_buf + n, r * SBPP); \ + memcpy(temp + 16, g_buf + n, r * SBPP); \ + memcpy(temp + 32, b_buf + n, r * SBPP); \ + memcpy(temp + 48, a_buf + n, r * SBPP); \ + ANY_SIMD(temp, temp + 16, temp + 32, temp + 48, out, depth, MASK + 1); \ + memcpy((uint8_t*)dst_ptr + n * BPP, out, r * BPP); \ + } + +#ifdef HAS_MERGEAR64ROW_AVX2 +ANY41PT(MergeAR64Row_Any_AVX2, MergeAR64Row_AVX2, uint16_t, 2, uint16_t, 8, 15) +#endif + +#ifdef HAS_MERGEAR64ROW_NEON +ANY41PT(MergeAR64Row_Any_NEON, MergeAR64Row_NEON, uint16_t, 2, uint16_t, 8, 7) +#endif + +#ifdef HAS_MERGEARGB16TO8ROW_AVX2 +ANY41PT(MergeARGB16To8Row_Any_AVX2, + MergeARGB16To8Row_AVX2, + uint16_t, + 2, + uint8_t, + 4, + 15) +#endif + +#ifdef HAS_MERGEARGB16TO8ROW_NEON +ANY41PT(MergeARGB16To8Row_Any_NEON, + MergeARGB16To8Row_NEON, + uint16_t, + 2, + uint8_t, + 4, + 7) +#endif + +#undef ANY41PT + // Any 3 planes to 1. #define ANY31(NAMEANY, ANY_SIMD, UVSHIFT, DUVSHIFT, BPP, MASK) \ void NAMEANY(const uint8_t* y_buf, const uint8_t* u_buf, \ @@ -95,8 +262,14 @@ ANY31(MergeRGBRow_Any_SSSE3, MergeRGBRow_SSSE3, 0, 0, 3, 15) #ifdef HAS_MERGERGBROW_NEON ANY31(MergeRGBRow_Any_NEON, MergeRGBRow_NEON, 0, 0, 3, 15) #endif -#ifdef HAS_MERGERGBROW_MMI -ANY31(MergeRGBRow_Any_MMI, MergeRGBRow_MMI, 0, 0, 3, 7) +#ifdef HAS_MERGEXRGBROW_SSE2 +ANY31(MergeXRGBRow_Any_SSE2, MergeXRGBRow_SSE2, 0, 0, 4, 7) +#endif +#ifdef HAS_MERGEXRGBROW_AVX2 +ANY31(MergeXRGBRow_Any_AVX2, MergeXRGBRow_AVX2, 0, 0, 4, 15) +#endif +#ifdef HAS_MERGEXRGBROW_NEON +ANY31(MergeXRGBRow_Any_NEON, MergeXRGBRow_NEON, 0, 0, 4, 15) #endif #ifdef HAS_I422TOYUY2ROW_SSE2 ANY31(I422ToYUY2Row_Any_SSE2, I422ToYUY2Row_SSE2, 1, 1, 4, 15) @@ -112,8 +285,8 @@ ANY31(I422ToYUY2Row_Any_NEON, I422ToYUY2Row_NEON, 1, 1, 4, 15) #ifdef HAS_I422TOYUY2ROW_MSA ANY31(I422ToYUY2Row_Any_MSA, I422ToYUY2Row_MSA, 1, 1, 4, 31) #endif -#ifdef HAS_I422TOYUY2ROW_MMI -ANY31(I422ToYUY2Row_Any_MMI, I422ToYUY2Row_MMI, 1, 1, 4, 7) +#ifdef HAS_I422TOYUY2ROW_LASX +ANY31(I422ToYUY2Row_Any_LASX, I422ToYUY2Row_LASX, 1, 1, 4, 31) #endif #ifdef HAS_I422TOUYVYROW_NEON ANY31(I422ToUYVYRow_Any_NEON, I422ToUYVYRow_NEON, 1, 1, 4, 15) @@ -121,8 +294,8 @@ ANY31(I422ToUYVYRow_Any_NEON, I422ToUYVYRow_NEON, 1, 1, 4, 15) #ifdef HAS_I422TOUYVYROW_MSA ANY31(I422ToUYVYRow_Any_MSA, I422ToUYVYRow_MSA, 1, 1, 4, 31) #endif -#ifdef HAS_I422TOUYVYROW_MMI -ANY31(I422ToUYVYRow_Any_MMI, I422ToUYVYRow_MMI, 1, 1, 4, 7) +#ifdef HAS_I422TOUYVYROW_LASX +ANY31(I422ToUYVYRow_Any_LASX, I422ToUYVYRow_LASX, 1, 1, 4, 31) #endif #ifdef HAS_BLENDPLANEROW_AVX2 ANY31(BlendPlaneRow_Any_AVX2, BlendPlaneRow_AVX2, 0, 0, 1, 31) @@ -130,9 +303,6 @@ ANY31(BlendPlaneRow_Any_AVX2, BlendPlaneRow_AVX2, 0, 0, 1, 31) #ifdef HAS_BLENDPLANEROW_SSSE3 ANY31(BlendPlaneRow_Any_SSSE3, BlendPlaneRow_SSSE3, 0, 0, 1, 7) #endif -#ifdef HAS_BLENDPLANEROW_MMI -ANY31(BlendPlaneRow_Any_MMI, BlendPlaneRow_MMI, 0, 0, 1, 7) -#endif #undef ANY31 // Note that odd width replication includes 444 due to implementation @@ -165,6 +335,21 @@ ANY31(BlendPlaneRow_Any_MMI, BlendPlaneRow_MMI, 0, 0, 1, 7) #ifdef HAS_I422TOARGBROW_SSSE3 ANY31C(I422ToARGBRow_Any_SSSE3, I422ToARGBRow_SSSE3, 1, 0, 4, 7) #endif +#ifdef HAS_I422TORGBAROW_SSSE3 +ANY31C(I422ToRGBARow_Any_SSSE3, I422ToRGBARow_SSSE3, 1, 0, 4, 7) +#endif +#ifdef HAS_I422TOARGB4444ROW_SSSE3 +ANY31C(I422ToARGB4444Row_Any_SSSE3, I422ToARGB4444Row_SSSE3, 1, 0, 2, 7) +#endif +#ifdef HAS_I422TOARGB1555ROW_SSSE3 +ANY31C(I422ToARGB1555Row_Any_SSSE3, I422ToARGB1555Row_SSSE3, 1, 0, 2, 7) +#endif +#ifdef HAS_I422TORGB565ROW_SSSE3 +ANY31C(I422ToRGB565Row_Any_SSSE3, I422ToRGB565Row_SSSE3, 1, 0, 2, 7) +#endif +#ifdef HAS_I422TORGB24ROW_SSSE3 +ANY31C(I422ToRGB24Row_Any_SSSE3, I422ToRGB24Row_SSSE3, 1, 0, 3, 15) +#endif #ifdef HAS_I422TOAR30ROW_SSSE3 ANY31C(I422ToAR30Row_Any_SSSE3, I422ToAR30Row_SSSE3, 1, 0, 4, 7) #endif @@ -173,24 +358,28 @@ ANY31C(I422ToAR30Row_Any_AVX2, I422ToAR30Row_AVX2, 1, 0, 4, 15) #endif #ifdef HAS_I444TOARGBROW_SSSE3 ANY31C(I444ToARGBRow_Any_SSSE3, I444ToARGBRow_SSSE3, 0, 0, 4, 7) -ANY31C(I422ToRGBARow_Any_SSSE3, I422ToRGBARow_SSSE3, 1, 0, 4, 7) -ANY31C(I422ToARGB4444Row_Any_SSSE3, I422ToARGB4444Row_SSSE3, 1, 0, 2, 7) -ANY31C(I422ToARGB1555Row_Any_SSSE3, I422ToARGB1555Row_SSSE3, 1, 0, 2, 7) -ANY31C(I422ToRGB565Row_Any_SSSE3, I422ToRGB565Row_SSSE3, 1, 0, 2, 7) -ANY31C(I422ToRGB24Row_Any_SSSE3, I422ToRGB24Row_SSSE3, 1, 0, 3, 15) -#endif // HAS_I444TOARGBROW_SSSE3 +#endif +#ifdef HAS_I444TORGB24ROW_SSSE3 +ANY31C(I444ToRGB24Row_Any_SSSE3, I444ToRGB24Row_SSSE3, 0, 0, 3, 15) +#endif #ifdef HAS_I422TORGB24ROW_AVX2 ANY31C(I422ToRGB24Row_Any_AVX2, I422ToRGB24Row_AVX2, 1, 0, 3, 31) #endif #ifdef HAS_I422TOARGBROW_AVX2 ANY31C(I422ToARGBRow_Any_AVX2, I422ToARGBRow_AVX2, 1, 0, 4, 15) #endif +#ifdef HAS_I422TOARGBROW_AVX512BW +ANY31C(I422ToARGBRow_Any_AVX512BW, I422ToARGBRow_AVX512BW, 1, 0, 4, 31) +#endif #ifdef HAS_I422TORGBAROW_AVX2 ANY31C(I422ToRGBARow_Any_AVX2, I422ToRGBARow_AVX2, 1, 0, 4, 15) #endif #ifdef HAS_I444TOARGBROW_AVX2 ANY31C(I444ToARGBRow_Any_AVX2, I444ToARGBRow_AVX2, 0, 0, 4, 15) #endif +#ifdef HAS_I444TORGB24ROW_AVX2 +ANY31C(I444ToRGB24Row_Any_AVX2, I444ToRGB24Row_AVX2, 0, 0, 3, 31) +#endif #ifdef HAS_I422TOARGB4444ROW_AVX2 ANY31C(I422ToARGB4444Row_Any_AVX2, I422ToARGB4444Row_AVX2, 1, 0, 2, 15) #endif @@ -200,6 +389,9 @@ ANY31C(I422ToARGB1555Row_Any_AVX2, I422ToARGB1555Row_AVX2, 1, 0, 2, 15) #ifdef HAS_I422TORGB565ROW_AVX2 ANY31C(I422ToRGB565Row_Any_AVX2, I422ToRGB565Row_AVX2, 1, 0, 2, 15) #endif +#ifdef HAS_I444TORGB24ROW_NEON +ANY31C(I444ToRGB24Row_Any_NEON, I444ToRGB24Row_NEON, 0, 0, 3, 7) +#endif #ifdef HAS_I422TOARGBROW_NEON ANY31C(I444ToARGBRow_Any_NEON, I444ToARGBRow_NEON, 0, 0, 4, 7) ANY31C(I422ToARGBRow_Any_NEON, I422ToARGBRow_NEON, 1, 0, 4, 7) @@ -218,14 +410,16 @@ ANY31C(I422ToARGB4444Row_Any_MSA, I422ToARGB4444Row_MSA, 1, 0, 2, 7) ANY31C(I422ToARGB1555Row_Any_MSA, I422ToARGB1555Row_MSA, 1, 0, 2, 7) ANY31C(I422ToRGB565Row_Any_MSA, I422ToRGB565Row_MSA, 1, 0, 2, 7) #endif -#ifdef HAS_I422TOARGBROW_MMI -ANY31C(I444ToARGBRow_Any_MMI, I444ToARGBRow_MMI, 0, 0, 4, 7) -ANY31C(I422ToARGBRow_Any_MMI, I422ToARGBRow_MMI, 1, 0, 4, 7) -ANY31C(I422ToRGB24Row_Any_MMI, I422ToRGB24Row_MMI, 1, 0, 3, 15) -ANY31C(I422ToARGB4444Row_Any_MMI, I422ToARGB4444Row_MMI, 1, 0, 2, 7) -ANY31C(I422ToARGB1555Row_Any_MMI, I422ToARGB1555Row_MMI, 1, 0, 2, 7) -ANY31C(I422ToRGB565Row_Any_MMI, I422ToRGB565Row_MMI, 1, 0, 2, 7) -ANY31C(I422ToRGBARow_Any_MMI, I422ToRGBARow_MMI, 1, 0, 4, 7) +#ifdef HAS_I422TOARGBROW_LASX +ANY31C(I422ToARGBRow_Any_LASX, I422ToARGBRow_LASX, 1, 0, 4, 31) +ANY31C(I422ToRGBARow_Any_LASX, I422ToRGBARow_LASX, 1, 0, 4, 31) +ANY31C(I422ToRGB24Row_Any_LASX, I422ToRGB24Row_LASX, 1, 0, 3, 31) +ANY31C(I422ToRGB565Row_Any_LASX, I422ToRGB565Row_LASX, 1, 0, 2, 31) +ANY31C(I422ToARGB4444Row_Any_LASX, I422ToARGB4444Row_LASX, 1, 0, 2, 31) +ANY31C(I422ToARGB1555Row_Any_LASX, I422ToARGB1555Row_LASX, 1, 0, 2, 31) +#endif +#ifdef HAS_I444TOARGBROW_LSX +ANY31C(I444ToARGBRow_Any_LSX, I444ToARGBRow_LSX, 0, 0, 4, 15) #endif #undef ANY31C @@ -262,27 +456,112 @@ ANY31CT(I210ToARGBRow_Any_AVX2, I210ToARGBRow_AVX2, 1, 0, uint16_t, 2, 4, 15) #ifdef HAS_I210TOAR30ROW_AVX2 ANY31CT(I210ToAR30Row_Any_AVX2, I210ToAR30Row_AVX2, 1, 0, uint16_t, 2, 4, 15) #endif -#ifdef HAS_I210TOARGBROW_MMI -ANY31CT(I210ToARGBRow_Any_MMI, I210ToARGBRow_MMI, 1, 0, uint16_t, 2, 4, 7) +#ifdef HAS_I410TOAR30ROW_SSSE3 +ANY31CT(I410ToAR30Row_Any_SSSE3, I410ToAR30Row_SSSE3, 0, 0, uint16_t, 2, 4, 7) +#endif +#ifdef HAS_I410TOARGBROW_SSSE3 +ANY31CT(I410ToARGBRow_Any_SSSE3, I410ToARGBRow_SSSE3, 0, 0, uint16_t, 2, 4, 7) +#endif +#ifdef HAS_I410TOARGBROW_AVX2 +ANY31CT(I410ToARGBRow_Any_AVX2, I410ToARGBRow_AVX2, 0, 0, uint16_t, 2, 4, 15) +#endif +#ifdef HAS_I410TOAR30ROW_AVX2 +ANY31CT(I410ToAR30Row_Any_AVX2, I410ToAR30Row_AVX2, 0, 0, uint16_t, 2, 4, 15) +#endif +#ifdef HAS_I212TOAR30ROW_SSSE3 +ANY31CT(I212ToAR30Row_Any_SSSE3, I212ToAR30Row_SSSE3, 1, 0, uint16_t, 2, 4, 7) +#endif +#ifdef HAS_I212TOARGBROW_SSSE3 +ANY31CT(I212ToARGBRow_Any_SSSE3, I212ToARGBRow_SSSE3, 1, 0, uint16_t, 2, 4, 7) +#endif +#ifdef HAS_I212TOARGBROW_AVX2 +ANY31CT(I212ToARGBRow_Any_AVX2, I212ToARGBRow_AVX2, 1, 0, uint16_t, 2, 4, 15) +#endif +#ifdef HAS_I212TOAR30ROW_AVX2 +ANY31CT(I212ToAR30Row_Any_AVX2, I212ToAR30Row_AVX2, 1, 0, uint16_t, 2, 4, 15) #endif #undef ANY31CT +// Any 3 planes to 1 plane with parameter +#define ANY31PT(NAMEANY, ANY_SIMD, STYPE, SBPP, DTYPE, BPP, MASK) \ + void NAMEANY(const STYPE* r_buf, const STYPE* g_buf, const STYPE* b_buf, \ + DTYPE* dst_ptr, int depth, int width) { \ + SIMD_ALIGNED(STYPE temp[16 * 3]); \ + SIMD_ALIGNED(DTYPE out[64]); \ + memset(temp, 0, 16 * 3 * SBPP); /* for YUY2 and msan */ \ + int r = width & MASK; \ + int n = width & ~MASK; \ + if (n > 0) { \ + ANY_SIMD(r_buf, g_buf, b_buf, dst_ptr, depth, n); \ + } \ + memcpy(temp, r_buf + n, r * SBPP); \ + memcpy(temp + 16, g_buf + n, r * SBPP); \ + memcpy(temp + 32, b_buf + n, r * SBPP); \ + ANY_SIMD(temp, temp + 16, temp + 32, out, depth, MASK + 1); \ + memcpy((uint8_t*)dst_ptr + n * BPP, out, r * BPP); \ + } + +#ifdef HAS_MERGEXR30ROW_AVX2 +ANY31PT(MergeXR30Row_Any_AVX2, MergeXR30Row_AVX2, uint16_t, 2, uint8_t, 4, 15) +#endif + +#ifdef HAS_MERGEXR30ROW_NEON +ANY31PT(MergeXR30Row_Any_NEON, MergeXR30Row_NEON, uint16_t, 2, uint8_t, 4, 3) +ANY31PT(MergeXR30Row_10_Any_NEON, + MergeXR30Row_10_NEON, + uint16_t, + 2, + uint8_t, + 4, + 3) +#endif + +#ifdef HAS_MERGEXR64ROW_AVX2 +ANY31PT(MergeXR64Row_Any_AVX2, MergeXR64Row_AVX2, uint16_t, 2, uint16_t, 8, 15) +#endif + +#ifdef HAS_MERGEXR64ROW_NEON +ANY31PT(MergeXR64Row_Any_NEON, MergeXR64Row_NEON, uint16_t, 2, uint16_t, 8, 7) +#endif + +#ifdef HAS_MERGEXRGB16TO8ROW_AVX2 +ANY31PT(MergeXRGB16To8Row_Any_AVX2, + MergeXRGB16To8Row_AVX2, + uint16_t, + 2, + uint8_t, + 4, + 15) +#endif + +#ifdef HAS_MERGEXRGB16TO8ROW_NEON +ANY31PT(MergeXRGB16To8Row_Any_NEON, + MergeXRGB16To8Row_NEON, + uint16_t, + 2, + uint8_t, + 4, + 7) +#endif + +#undef ANY31PT + // Any 2 planes to 1. #define ANY21(NAMEANY, ANY_SIMD, UVSHIFT, SBPP, SBPP2, BPP, MASK) \ void NAMEANY(const uint8_t* y_buf, const uint8_t* uv_buf, uint8_t* dst_ptr, \ int width) { \ - SIMD_ALIGNED(uint8_t temp[64 * 3]); \ - memset(temp, 0, 64 * 2); /* for msan */ \ + SIMD_ALIGNED(uint8_t temp[128 * 3]); \ + memset(temp, 0, 128 * 2); /* for msan */ \ int r = width & MASK; \ int n = width & ~MASK; \ if (n > 0) { \ ANY_SIMD(y_buf, uv_buf, dst_ptr, n); \ } \ memcpy(temp, y_buf + n * SBPP, r * SBPP); \ - memcpy(temp + 64, uv_buf + (n >> UVSHIFT) * SBPP2, \ + memcpy(temp + 128, uv_buf + (n >> UVSHIFT) * SBPP2, \ SS(r, UVSHIFT) * SBPP2); \ - ANY_SIMD(temp, temp + 64, temp + 128, MASK + 1); \ - memcpy(dst_ptr + n * BPP, temp + 128, r * BPP); \ + ANY_SIMD(temp, temp + 128, temp + 256, MASK + 1); \ + memcpy(dst_ptr + n * BPP, temp + 256, r * BPP); \ } // Merge functions. @@ -298,12 +577,15 @@ ANY21(MergeUVRow_Any_NEON, MergeUVRow_NEON, 0, 1, 1, 2, 15) #ifdef HAS_MERGEUVROW_MSA ANY21(MergeUVRow_Any_MSA, MergeUVRow_MSA, 0, 1, 1, 2, 15) #endif -#ifdef HAS_MERGEUVROW_MMI -ANY21(MergeUVRow_Any_MMI, MergeUVRow_MMI, 0, 1, 1, 2, 7) +#ifdef HAS_MERGEUVROW_LSX +ANY21(MergeUVRow_Any_LSX, MergeUVRow_LSX, 0, 1, 1, 2, 15) #endif #ifdef HAS_NV21TOYUV24ROW_NEON ANY21(NV21ToYUV24Row_Any_NEON, NV21ToYUV24Row_NEON, 1, 1, 2, 3, 15) #endif +#ifdef HAS_NV21TOYUV24ROW_SSSE3 +ANY21(NV21ToYUV24Row_Any_SSSE3, NV21ToYUV24Row_SSSE3, 1, 1, 2, 3, 15) +#endif #ifdef HAS_NV21TOYUV24ROW_AVX2 ANY21(NV21ToYUV24Row_Any_AVX2, NV21ToYUV24Row_AVX2, 1, 1, 2, 3, 31) #endif @@ -338,20 +620,20 @@ ANY21(ARGBSubtractRow_Any_NEON, ARGBSubtractRow_NEON, 0, 4, 4, 4, 7) #ifdef HAS_ARGBMULTIPLYROW_MSA ANY21(ARGBMultiplyRow_Any_MSA, ARGBMultiplyRow_MSA, 0, 4, 4, 4, 3) #endif -#ifdef HAS_ARGBMULTIPLYROW_MMI -ANY21(ARGBMultiplyRow_Any_MMI, ARGBMultiplyRow_MMI, 0, 4, 4, 4, 1) +#ifdef HAS_ARGBMULTIPLYROW_LASX +ANY21(ARGBMultiplyRow_Any_LASX, ARGBMultiplyRow_LASX, 0, 4, 4, 4, 7) #endif #ifdef HAS_ARGBADDROW_MSA ANY21(ARGBAddRow_Any_MSA, ARGBAddRow_MSA, 0, 4, 4, 4, 7) #endif -#ifdef HAS_ARGBADDROW_MMI -ANY21(ARGBAddRow_Any_MMI, ARGBAddRow_MMI, 0, 4, 4, 4, 1) +#ifdef HAS_ARGBADDROW_LASX +ANY21(ARGBAddRow_Any_LASX, ARGBAddRow_LASX, 0, 4, 4, 4, 7) #endif #ifdef HAS_ARGBSUBTRACTROW_MSA ANY21(ARGBSubtractRow_Any_MSA, ARGBSubtractRow_MSA, 0, 4, 4, 4, 7) #endif -#ifdef HAS_ARGBSUBTRACTROW_MMI -ANY21(ARGBSubtractRow_Any_MMI, ARGBSubtractRow_MMI, 0, 4, 4, 4, 1) +#ifdef HAS_ARGBSUBTRACTROW_LASX +ANY21(ARGBSubtractRow_Any_LASX, ARGBSubtractRow_LASX, 0, 4, 4, 4, 7) #endif #ifdef HAS_SOBELROW_SSE2 ANY21(SobelRow_Any_SSE2, SobelRow_SSE2, 0, 1, 1, 4, 15) @@ -362,8 +644,8 @@ ANY21(SobelRow_Any_NEON, SobelRow_NEON, 0, 1, 1, 4, 7) #ifdef HAS_SOBELROW_MSA ANY21(SobelRow_Any_MSA, SobelRow_MSA, 0, 1, 1, 4, 15) #endif -#ifdef HAS_SOBELROW_MMI -ANY21(SobelRow_Any_MMI, SobelRow_MMI, 0, 1, 1, 4, 7) +#ifdef HAS_SOBELROW_LSX +ANY21(SobelRow_Any_LSX, SobelRow_LSX, 0, 1, 1, 4, 15) #endif #ifdef HAS_SOBELTOPLANEROW_SSE2 ANY21(SobelToPlaneRow_Any_SSE2, SobelToPlaneRow_SSE2, 0, 1, 1, 1, 15) @@ -374,8 +656,8 @@ ANY21(SobelToPlaneRow_Any_NEON, SobelToPlaneRow_NEON, 0, 1, 1, 1, 15) #ifdef HAS_SOBELTOPLANEROW_MSA ANY21(SobelToPlaneRow_Any_MSA, SobelToPlaneRow_MSA, 0, 1, 1, 1, 31) #endif -#ifdef HAS_SOBELTOPLANEROW_MMI -ANY21(SobelToPlaneRow_Any_MMI, SobelToPlaneRow_MMI, 0, 1, 1, 1, 7) +#ifdef HAS_SOBELTOPLANEROW_LSX +ANY21(SobelToPlaneRow_Any_LSX, SobelToPlaneRow_LSX, 0, 1, 1, 1, 31) #endif #ifdef HAS_SOBELXYROW_SSE2 ANY21(SobelXYRow_Any_SSE2, SobelXYRow_SSE2, 0, 1, 1, 4, 15) @@ -386,11 +668,40 @@ ANY21(SobelXYRow_Any_NEON, SobelXYRow_NEON, 0, 1, 1, 4, 7) #ifdef HAS_SOBELXYROW_MSA ANY21(SobelXYRow_Any_MSA, SobelXYRow_MSA, 0, 1, 1, 4, 15) #endif -#ifdef HAS_SOBELXYROW_MMI -ANY21(SobelXYRow_Any_MMI, SobelXYRow_MMI, 0, 1, 1, 4, 7) +#ifdef HAS_SOBELXYROW_LSX +ANY21(SobelXYRow_Any_LSX, SobelXYRow_LSX, 0, 1, 1, 4, 15) #endif #undef ANY21 +// Any 2 planes to 1 with stride +// width is measured in source pixels. 4 bytes contains 2 pixels +#define ANY21S(NAMEANY, ANY_SIMD, SBPP, BPP, MASK) \ + void NAMEANY(const uint8_t* src_yuy2, int stride_yuy2, uint8_t* dst_uv, \ + int width) { \ + SIMD_ALIGNED(uint8_t temp[32 * 3]); \ + memset(temp, 0, 32 * 2); /* for msan */ \ + int awidth = (width + 1) / 2; \ + int r = awidth & MASK; \ + int n = awidth & ~MASK; \ + if (n > 0) { \ + ANY_SIMD(src_yuy2, stride_yuy2, dst_uv, n * 2); \ + } \ + memcpy(temp, src_yuy2 + n * SBPP, r * SBPP); \ + memcpy(temp + 32, src_yuy2 + stride_yuy2 + n * SBPP, r * SBPP); \ + ANY_SIMD(temp, 32, temp + 64, MASK + 1); \ + memcpy(dst_uv + n * BPP, temp + 64, r * BPP); \ + } + +#ifdef HAS_YUY2TONVUVROW_NEON +ANY21S(YUY2ToNVUVRow_Any_NEON, YUY2ToNVUVRow_NEON, 4, 2, 7) +#endif +#ifdef HAS_YUY2TONVUVROW_SSE2 +ANY21S(YUY2ToNVUVRow_Any_SSE2, YUY2ToNVUVRow_SSE2, 4, 2, 7) +#endif +#ifdef HAS_YUY2TONVUVROW_AVX2 +ANY21S(YUY2ToNVUVRow_Any_AVX2, YUY2ToNVUVRow_AVX2, 4, 2, 15) +#endif + // Any 2 planes to 1 with yuvconstants #define ANY21C(NAMEANY, ANY_SIMD, UVSHIFT, SBPP, SBPP2, BPP, MASK) \ void NAMEANY(const uint8_t* y_buf, const uint8_t* uv_buf, uint8_t* dst_ptr, \ @@ -422,8 +733,11 @@ ANY21C(NV12ToARGBRow_Any_NEON, NV12ToARGBRow_NEON, 1, 1, 2, 4, 7) #ifdef HAS_NV12TOARGBROW_MSA ANY21C(NV12ToARGBRow_Any_MSA, NV12ToARGBRow_MSA, 1, 1, 2, 4, 7) #endif -#ifdef HAS_NV12TOARGBROW_MMI -ANY21C(NV12ToARGBRow_Any_MMI, NV12ToARGBRow_MMI, 1, 1, 2, 4, 7) +#ifdef HAS_NV12TOARGBROW_LSX +ANY21C(NV12ToARGBRow_Any_LSX, NV12ToARGBRow_LSX, 1, 1, 2, 4, 7) +#endif +#ifdef HAS_NV12TOARGBROW_LASX +ANY21C(NV12ToARGBRow_Any_LASX, NV12ToARGBRow_LASX, 1, 1, 2, 4, 15) #endif #ifdef HAS_NV21TOARGBROW_SSSE3 ANY21C(NV21ToARGBRow_Any_SSSE3, NV21ToARGBRow_SSSE3, 1, 1, 2, 4, 7) @@ -437,8 +751,11 @@ ANY21C(NV21ToARGBRow_Any_NEON, NV21ToARGBRow_NEON, 1, 1, 2, 4, 7) #ifdef HAS_NV21TOARGBROW_MSA ANY21C(NV21ToARGBRow_Any_MSA, NV21ToARGBRow_MSA, 1, 1, 2, 4, 7) #endif -#ifdef HAS_NV21TOARGBROW_MMI -ANY21C(NV21ToARGBRow_Any_MMI, NV21ToARGBRow_MMI, 1, 1, 2, 4, 7) +#ifdef HAS_NV21TOARGBROW_LSX +ANY21C(NV21ToARGBRow_Any_LSX, NV21ToARGBRow_LSX, 1, 1, 2, 4, 7) +#endif +#ifdef HAS_NV21TOARGBROW_LASX +ANY21C(NV21ToARGBRow_Any_LASX, NV21ToARGBRow_LASX, 1, 1, 2, 4, 15) #endif #ifdef HAS_NV12TORGB24ROW_NEON ANY21C(NV12ToRGB24Row_Any_NEON, NV12ToRGB24Row_NEON, 1, 1, 2, 3, 7) @@ -449,9 +766,6 @@ ANY21C(NV21ToRGB24Row_Any_NEON, NV21ToRGB24Row_NEON, 1, 1, 2, 3, 7) #ifdef HAS_NV12TORGB24ROW_SSSE3 ANY21C(NV12ToRGB24Row_Any_SSSE3, NV12ToRGB24Row_SSSE3, 1, 1, 2, 3, 15) #endif -#ifdef HAS_NV12TORGB24ROW_MMI -ANY21C(NV12ToRGB24Row_Any_MMI, NV12ToRGB24Row_MMI, 1, 1, 2, 3, 7) -#endif #ifdef HAS_NV21TORGB24ROW_SSSE3 ANY21C(NV21ToRGB24Row_Any_SSSE3, NV21ToRGB24Row_SSSE3, 1, 1, 2, 3, 15) #endif @@ -461,9 +775,6 @@ ANY21C(NV12ToRGB24Row_Any_AVX2, NV12ToRGB24Row_AVX2, 1, 1, 2, 3, 31) #ifdef HAS_NV21TORGB24ROW_AVX2 ANY21C(NV21ToRGB24Row_Any_AVX2, NV21ToRGB24Row_AVX2, 1, 1, 2, 3, 31) #endif -#ifdef HAS_NV21TORGB24ROW_MMI -ANY21C(NV21ToRGB24Row_Any_MMI, NV21ToRGB24Row_MMI, 1, 1, 2, 3, 7) -#endif #ifdef HAS_NV12TORGB565ROW_SSSE3 ANY21C(NV12ToRGB565Row_Any_SSSE3, NV12ToRGB565Row_SSSE3, 1, 1, 2, 2, 7) #endif @@ -476,11 +787,85 @@ ANY21C(NV12ToRGB565Row_Any_NEON, NV12ToRGB565Row_NEON, 1, 1, 2, 2, 7) #ifdef HAS_NV12TORGB565ROW_MSA ANY21C(NV12ToRGB565Row_Any_MSA, NV12ToRGB565Row_MSA, 1, 1, 2, 2, 7) #endif -#ifdef HAS_NV12TORGB565ROW_MMI -ANY21C(NV12ToRGB565Row_Any_MMI, NV12ToRGB565Row_MMI, 1, 1, 2, 2, 7) +#ifdef HAS_NV12TORGB565ROW_LSX +ANY21C(NV12ToRGB565Row_Any_LSX, NV12ToRGB565Row_LSX, 1, 1, 2, 2, 7) +#endif +#ifdef HAS_NV12TORGB565ROW_LASX +ANY21C(NV12ToRGB565Row_Any_LASX, NV12ToRGB565Row_LASX, 1, 1, 2, 2, 15) #endif #undef ANY21C +// Any 2 planes of 16 bit to 1 with yuvconstants +#define ANY21CT(NAMEANY, ANY_SIMD, UVSHIFT, DUVSHIFT, T, SBPP, BPP, MASK) \ + void NAMEANY(const T* y_buf, const T* uv_buf, uint8_t* dst_ptr, \ + const struct YuvConstants* yuvconstants, int width) { \ + SIMD_ALIGNED(T temp[16 * 3]); \ + SIMD_ALIGNED(uint8_t out[64]); \ + memset(temp, 0, 16 * 3 * SBPP); /* for YUY2 and msan */ \ + int r = width & MASK; \ + int n = width & ~MASK; \ + if (n > 0) { \ + ANY_SIMD(y_buf, uv_buf, dst_ptr, yuvconstants, n); \ + } \ + memcpy(temp, y_buf + n, r * SBPP); \ + memcpy(temp + 16, uv_buf + 2 * (n >> UVSHIFT), SS(r, UVSHIFT) * SBPP * 2); \ + ANY_SIMD(temp, temp + 16, out, yuvconstants, MASK + 1); \ + memcpy(dst_ptr + (n >> DUVSHIFT) * BPP, out, SS(r, DUVSHIFT) * BPP); \ + } + +#ifdef HAS_P210TOAR30ROW_SSSE3 +ANY21CT(P210ToAR30Row_Any_SSSE3, P210ToAR30Row_SSSE3, 1, 0, uint16_t, 2, 4, 7) +#endif +#ifdef HAS_P210TOARGBROW_SSSE3 +ANY21CT(P210ToARGBRow_Any_SSSE3, P210ToARGBRow_SSSE3, 1, 0, uint16_t, 2, 4, 7) +#endif +#ifdef HAS_P210TOARGBROW_AVX2 +ANY21CT(P210ToARGBRow_Any_AVX2, P210ToARGBRow_AVX2, 1, 0, uint16_t, 2, 4, 15) +#endif +#ifdef HAS_P210TOAR30ROW_AVX2 +ANY21CT(P210ToAR30Row_Any_AVX2, P210ToAR30Row_AVX2, 1, 0, uint16_t, 2, 4, 15) +#endif +#ifdef HAS_P410TOAR30ROW_SSSE3 +ANY21CT(P410ToAR30Row_Any_SSSE3, P410ToAR30Row_SSSE3, 0, 0, uint16_t, 2, 4, 7) +#endif +#ifdef HAS_P410TOARGBROW_SSSE3 +ANY21CT(P410ToARGBRow_Any_SSSE3, P410ToARGBRow_SSSE3, 0, 0, uint16_t, 2, 4, 7) +#endif +#ifdef HAS_P410TOARGBROW_AVX2 +ANY21CT(P410ToARGBRow_Any_AVX2, P410ToARGBRow_AVX2, 0, 0, uint16_t, 2, 4, 15) +#endif +#ifdef HAS_P410TOAR30ROW_AVX2 +ANY21CT(P410ToAR30Row_Any_AVX2, P410ToAR30Row_AVX2, 0, 0, uint16_t, 2, 4, 15) +#endif + +#undef ANY21CT + +// Any 2 16 bit planes with parameter to 1 +#define ANY21PT(NAMEANY, ANY_SIMD, T, BPP, MASK) \ + void NAMEANY(const T* src_u, const T* src_v, T* dst_uv, int depth, \ + int width) { \ + SIMD_ALIGNED(T temp[16 * 4]); \ + memset(temp, 0, 16 * 4 * BPP); /* for msan */ \ + int r = width & MASK; \ + int n = width & ~MASK; \ + if (n > 0) { \ + ANY_SIMD(src_u, src_v, dst_uv, depth, n); \ + } \ + memcpy(temp, src_u + n, r * BPP); \ + memcpy(temp + 16, src_v + n, r * BPP); \ + ANY_SIMD(temp, temp + 16, temp + 32, depth, MASK + 1); \ + memcpy(dst_uv + n * 2, temp + 32, r * BPP * 2); \ + } + +#ifdef HAS_MERGEUVROW_16_AVX2 +ANY21PT(MergeUVRow_16_Any_AVX2, MergeUVRow_16_AVX2, uint16_t, 2, 15) +#endif +#ifdef HAS_MERGEUVROW_16_NEON +ANY21PT(MergeUVRow_16_Any_NEON, MergeUVRow_16_NEON, uint16_t, 2, 7) +#endif + +#undef ANY21CT + // Any 1 to 1. #define ANY11(NAMEANY, ANY_SIMD, UVSHIFT, SBPP, BPP, MASK) \ void NAMEANY(const uint8_t* src_ptr, uint8_t* dst_ptr, int width) { \ @@ -569,7 +954,7 @@ ANY11(ARGB1555ToARGBRow_Any_AVX2, ARGB1555ToARGBRow_AVX2, 0, 2, 4, 15) ANY11(ARGB4444ToARGBRow_Any_AVX2, ARGB4444ToARGBRow_AVX2, 0, 2, 4, 15) #endif #if defined(HAS_ARGBTORGB24ROW_NEON) -ANY11(ARGBToRGB24Row_Any_NEON, ARGBToRGB24Row_NEON, 0, 4, 3, 7) +ANY11(ARGBToRGB24Row_Any_NEON, ARGBToRGB24Row_NEON, 0, 4, 3, 15) ANY11(ARGBToRAWRow_Any_NEON, ARGBToRAWRow_NEON, 0, 4, 3, 7) ANY11(ARGBToRGB565Row_Any_NEON, ARGBToRGB565Row_NEON, 0, 4, 2, 7) ANY11(ARGBToARGB1555Row_Any_NEON, ARGBToARGB1555Row_NEON, 0, 4, 2, 7) @@ -584,13 +969,15 @@ ANY11(ARGBToARGB1555Row_Any_MSA, ARGBToARGB1555Row_MSA, 0, 4, 2, 7) ANY11(ARGBToARGB4444Row_Any_MSA, ARGBToARGB4444Row_MSA, 0, 4, 2, 7) ANY11(J400ToARGBRow_Any_MSA, J400ToARGBRow_MSA, 0, 1, 4, 15) #endif -#if defined(HAS_ARGBTORGB24ROW_MMI) -ANY11(ARGBToRGB24Row_Any_MMI, ARGBToRGB24Row_MMI, 0, 4, 3, 3) -ANY11(ARGBToRAWRow_Any_MMI, ARGBToRAWRow_MMI, 0, 4, 3, 3) -ANY11(ARGBToRGB565Row_Any_MMI, ARGBToRGB565Row_MMI, 0, 4, 2, 3) -ANY11(ARGBToARGB1555Row_Any_MMI, ARGBToARGB1555Row_MMI, 0, 4, 2, 3) -ANY11(ARGBToARGB4444Row_Any_MMI, ARGBToARGB4444Row_MMI, 0, 4, 2, 3) -ANY11(J400ToARGBRow_Any_MMI, J400ToARGBRow_MMI, 0, 1, 4, 3) +#if defined(HAS_ARGBTORGB24ROW_LASX) +ANY11(ARGBToRGB24Row_Any_LASX, ARGBToRGB24Row_LASX, 0, 4, 3, 31) +ANY11(ARGBToRAWRow_Any_LASX, ARGBToRAWRow_LASX, 0, 4, 3, 31) +ANY11(ARGBToRGB565Row_Any_LASX, ARGBToRGB565Row_LASX, 0, 4, 2, 15) +ANY11(ARGBToARGB1555Row_Any_LASX, ARGBToARGB1555Row_LASX, 0, 4, 2, 15) +ANY11(ARGBToARGB4444Row_Any_LASX, ARGBToARGB4444Row_LASX, 0, 4, 2, 15) +#endif +#if defined(HAS_J400TOARGBROW_LSX) +ANY11(J400ToARGBRow_Any_LSX, J400ToARGBRow_LSX, 0, 1, 4, 15) #endif #if defined(HAS_RAWTORGB24ROW_NEON) ANY11(RAWToRGB24Row_Any_NEON, RAWToRGB24Row_NEON, 0, 3, 3, 7) @@ -598,8 +985,8 @@ ANY11(RAWToRGB24Row_Any_NEON, RAWToRGB24Row_NEON, 0, 3, 3, 7) #if defined(HAS_RAWTORGB24ROW_MSA) ANY11(RAWToRGB24Row_Any_MSA, RAWToRGB24Row_MSA, 0, 3, 3, 15) #endif -#if defined(HAS_RAWTORGB24ROW_MMI) -ANY11(RAWToRGB24Row_Any_MMI, RAWToRGB24Row_MMI, 0, 3, 3, 3) +#if defined(HAS_RAWTORGB24ROW_LSX) +ANY11(RAWToRGB24Row_Any_LSX, RAWToRGB24Row_LSX, 0, 3, 3, 15) #endif #ifdef HAS_ARGBTOYROW_AVX2 ANY11(ARGBToYRow_Any_AVX2, ARGBToYRow_AVX2, 0, 4, 1, 31) @@ -610,6 +997,9 @@ ANY11(ABGRToYRow_Any_AVX2, ABGRToYRow_AVX2, 0, 4, 1, 31) #ifdef HAS_ARGBTOYJROW_AVX2 ANY11(ARGBToYJRow_Any_AVX2, ARGBToYJRow_AVX2, 0, 4, 1, 31) #endif +#ifdef HAS_ABGRTOYJROW_AVX2 +ANY11(ABGRToYJRow_Any_AVX2, ABGRToYJRow_AVX2, 0, 4, 1, 31) +#endif #ifdef HAS_RGBATOYJROW_AVX2 ANY11(RGBAToYJRow_Any_AVX2, RGBAToYJRow_AVX2, 0, 4, 1, 31) #endif @@ -626,65 +1016,100 @@ ANY11(ARGBToYRow_Any_SSSE3, ARGBToYRow_SSSE3, 0, 4, 1, 15) ANY11(BGRAToYRow_Any_SSSE3, BGRAToYRow_SSSE3, 0, 4, 1, 15) ANY11(ABGRToYRow_Any_SSSE3, ABGRToYRow_SSSE3, 0, 4, 1, 15) ANY11(RGBAToYRow_Any_SSSE3, RGBAToYRow_SSSE3, 0, 4, 1, 15) +#endif +#ifdef HAS_YUY2TOYROW_SSE2 ANY11(YUY2ToYRow_Any_SSE2, YUY2ToYRow_SSE2, 1, 4, 1, 15) ANY11(UYVYToYRow_Any_SSE2, UYVYToYRow_SSE2, 1, 4, 1, 15) #endif #ifdef HAS_ARGBTOYJROW_SSSE3 ANY11(ARGBToYJRow_Any_SSSE3, ARGBToYJRow_SSSE3, 0, 4, 1, 15) #endif +#ifdef HAS_ABGRTOYJROW_SSSE3 +ANY11(ABGRToYJRow_Any_SSSE3, ABGRToYJRow_SSSE3, 0, 4, 1, 15) +#endif #ifdef HAS_RGBATOYJROW_SSSE3 ANY11(RGBAToYJRow_Any_SSSE3, RGBAToYJRow_SSSE3, 0, 4, 1, 15) #endif #ifdef HAS_ARGBTOYROW_NEON -ANY11(ARGBToYRow_Any_NEON, ARGBToYRow_NEON, 0, 4, 1, 7) +ANY11(ARGBToYRow_Any_NEON, ARGBToYRow_NEON, 0, 4, 1, 15) #endif #ifdef HAS_ARGBTOYROW_MSA ANY11(ARGBToYRow_Any_MSA, ARGBToYRow_MSA, 0, 4, 1, 15) #endif -#ifdef HAS_ARGBTOYROW_MMI -ANY11(ARGBToYRow_Any_MMI, ARGBToYRow_MMI, 0, 4, 1, 7) +#ifdef HAS_ARGBTOYROW_LSX +ANY11(ARGBToYRow_Any_LSX, ARGBToYRow_LSX, 0, 4, 1, 15) +#endif +#ifdef HAS_ARGBTOYROW_LASX +ANY11(ARGBToYRow_Any_LASX, ARGBToYRow_LASX, 0, 4, 1, 31) #endif #ifdef HAS_ARGBTOYJROW_NEON -ANY11(ARGBToYJRow_Any_NEON, ARGBToYJRow_NEON, 0, 4, 1, 7) +ANY11(ARGBToYJRow_Any_NEON, ARGBToYJRow_NEON, 0, 4, 1, 15) +#endif +#ifdef HAS_ABGRTOYJROW_NEON +ANY11(ABGRToYJRow_Any_NEON, ABGRToYJRow_NEON, 0, 4, 1, 15) #endif #ifdef HAS_RGBATOYJROW_NEON -ANY11(RGBAToYJRow_Any_NEON, RGBAToYJRow_NEON, 0, 4, 1, 7) +ANY11(RGBAToYJRow_Any_NEON, RGBAToYJRow_NEON, 0, 4, 1, 15) #endif #ifdef HAS_ARGBTOYJROW_MSA ANY11(ARGBToYJRow_Any_MSA, ARGBToYJRow_MSA, 0, 4, 1, 15) #endif -#ifdef HAS_ARGBTOYJROW_MMI -ANY11(ARGBToYJRow_Any_MMI, ARGBToYJRow_MMI, 0, 4, 1, 7) +#ifdef HAS_ARGBTOYJROW_LSX +ANY11(ARGBToYJRow_Any_LSX, ARGBToYJRow_LSX, 0, 4, 1, 15) +#endif +#ifdef HAS_RGBATOYJROW_LSX +ANY11(RGBAToYJRow_Any_LSX, RGBAToYJRow_LSX, 0, 4, 1, 15) +#endif +#ifdef HAS_ABGRTOYJROW_LSX +ANY11(ABGRToYJRow_Any_LSX, ABGRToYJRow_LSX, 0, 4, 1, 15) +#endif +#ifdef HAS_RGBATOYJROW_LASX +ANY11(RGBAToYJRow_Any_LASX, RGBAToYJRow_LASX, 0, 4, 1, 31) +#endif +#ifdef HAS_ARGBTOYJROW_LASX +ANY11(ARGBToYJRow_Any_LASX, ARGBToYJRow_LASX, 0, 4, 1, 31) +#endif +#ifdef HAS_ABGRTOYJROW_LASX +ANY11(ABGRToYJRow_Any_LASX, ABGRToYJRow_LASX, 0, 4, 1, 31) #endif #ifdef HAS_BGRATOYROW_NEON -ANY11(BGRAToYRow_Any_NEON, BGRAToYRow_NEON, 0, 4, 1, 7) +ANY11(BGRAToYRow_Any_NEON, BGRAToYRow_NEON, 0, 4, 1, 15) #endif #ifdef HAS_BGRATOYROW_MSA ANY11(BGRAToYRow_Any_MSA, BGRAToYRow_MSA, 0, 4, 1, 15) #endif -#ifdef HAS_BGRATOYROW_MMI -ANY11(BGRAToYRow_Any_MMI, BGRAToYRow_MMI, 0, 4, 1, 7) +#ifdef HAS_BGRATOYROW_LSX +ANY11(BGRAToYRow_Any_LSX, BGRAToYRow_LSX, 0, 4, 1, 15) +#endif +#ifdef HAS_BGRATOYROW_LASX +ANY11(BGRAToYRow_Any_LASX, BGRAToYRow_LASX, 0, 4, 1, 31) #endif #ifdef HAS_ABGRTOYROW_NEON -ANY11(ABGRToYRow_Any_NEON, ABGRToYRow_NEON, 0, 4, 1, 7) +ANY11(ABGRToYRow_Any_NEON, ABGRToYRow_NEON, 0, 4, 1, 15) #endif #ifdef HAS_ABGRTOYROW_MSA ANY11(ABGRToYRow_Any_MSA, ABGRToYRow_MSA, 0, 4, 1, 7) #endif -#ifdef HAS_ABGRTOYROW_MMI -ANY11(ABGRToYRow_Any_MMI, ABGRToYRow_MMI, 0, 4, 1, 7) +#ifdef HAS_ABGRTOYROW_LSX +ANY11(ABGRToYRow_Any_LSX, ABGRToYRow_LSX, 0, 4, 1, 15) +#endif +#ifdef HAS_ABGRTOYROW_LASX +ANY11(ABGRToYRow_Any_LASX, ABGRToYRow_LASX, 0, 4, 1, 31) #endif #ifdef HAS_RGBATOYROW_NEON -ANY11(RGBAToYRow_Any_NEON, RGBAToYRow_NEON, 0, 4, 1, 7) +ANY11(RGBAToYRow_Any_NEON, RGBAToYRow_NEON, 0, 4, 1, 15) #endif #ifdef HAS_RGBATOYROW_MSA ANY11(RGBAToYRow_Any_MSA, RGBAToYRow_MSA, 0, 4, 1, 15) #endif -#ifdef HAS_RGBATOYROW_MMI -ANY11(RGBAToYRow_Any_MMI, RGBAToYRow_MMI, 0, 4, 1, 7) +#ifdef HAS_RGBATOYROW_LSX +ANY11(RGBAToYRow_Any_LSX, RGBAToYRow_LSX, 0, 4, 1, 15) +#endif +#ifdef HAS_RGBATOYROW_LASX +ANY11(RGBAToYRow_Any_LASX, RGBAToYRow_LASX, 0, 4, 1, 31) #endif #ifdef HAS_RGB24TOYROW_NEON -ANY11(RGB24ToYRow_Any_NEON, RGB24ToYRow_NEON, 0, 3, 1, 7) +ANY11(RGB24ToYRow_Any_NEON, RGB24ToYRow_NEON, 0, 3, 1, 15) #endif #ifdef HAS_RGB24TOYJROW_AVX2 ANY11(RGB24ToYJRow_Any_AVX2, RGB24ToYJRow_AVX2, 0, 3, 1, 31) @@ -693,16 +1118,25 @@ ANY11(RGB24ToYJRow_Any_AVX2, RGB24ToYJRow_AVX2, 0, 3, 1, 31) ANY11(RGB24ToYJRow_Any_SSSE3, RGB24ToYJRow_SSSE3, 0, 3, 1, 15) #endif #ifdef HAS_RGB24TOYJROW_NEON -ANY11(RGB24ToYJRow_Any_NEON, RGB24ToYJRow_NEON, 0, 3, 1, 7) +ANY11(RGB24ToYJRow_Any_NEON, RGB24ToYJRow_NEON, 0, 3, 1, 15) #endif #ifdef HAS_RGB24TOYROW_MSA ANY11(RGB24ToYRow_Any_MSA, RGB24ToYRow_MSA, 0, 3, 1, 15) #endif -#ifdef HAS_RGB24TOYROW_MMI -ANY11(RGB24ToYRow_Any_MMI, RGB24ToYRow_MMI, 0, 3, 1, 7) +#ifdef HAS_RGB24TOYROW_LSX +ANY11(RGB24ToYRow_Any_LSX, RGB24ToYRow_LSX, 0, 3, 1, 15) +#endif +#ifdef HAS_RGB24TOYJROW_LSX +ANY11(RGB24ToYJRow_Any_LSX, RGB24ToYJRow_LSX, 0, 3, 1, 15) +#endif +#ifdef HAS_RGB24TOYJROW_LASX +ANY11(RGB24ToYJRow_Any_LASX, RGB24ToYJRow_LASX, 0, 3, 1, 31) +#endif +#ifdef HAS_RGB24TOYROW_LASX +ANY11(RGB24ToYRow_Any_LASX, RGB24ToYRow_LASX, 0, 3, 1, 31) #endif #ifdef HAS_RAWTOYROW_NEON -ANY11(RAWToYRow_Any_NEON, RAWToYRow_NEON, 0, 3, 1, 7) +ANY11(RAWToYRow_Any_NEON, RAWToYRow_NEON, 0, 3, 1, 15) #endif #ifdef HAS_RAWTOYJROW_AVX2 ANY11(RAWToYJRow_Any_AVX2, RAWToYJRow_AVX2, 0, 3, 1, 31) @@ -711,13 +1145,22 @@ ANY11(RAWToYJRow_Any_AVX2, RAWToYJRow_AVX2, 0, 3, 1, 31) ANY11(RAWToYJRow_Any_SSSE3, RAWToYJRow_SSSE3, 0, 3, 1, 15) #endif #ifdef HAS_RAWTOYJROW_NEON -ANY11(RAWToYJRow_Any_NEON, RAWToYJRow_NEON, 0, 3, 1, 7) +ANY11(RAWToYJRow_Any_NEON, RAWToYJRow_NEON, 0, 3, 1, 15) #endif #ifdef HAS_RAWTOYROW_MSA ANY11(RAWToYRow_Any_MSA, RAWToYRow_MSA, 0, 3, 1, 15) #endif -#ifdef HAS_RAWTOYROW_MMI -ANY11(RAWToYRow_Any_MMI, RAWToYRow_MMI, 0, 3, 1, 7) +#ifdef HAS_RAWTOYROW_LSX +ANY11(RAWToYRow_Any_LSX, RAWToYRow_LSX, 0, 3, 1, 15) +#endif +#ifdef HAS_RAWTOYROW_LASX +ANY11(RAWToYRow_Any_LASX, RAWToYRow_LASX, 0, 3, 1, 31) +#endif +#ifdef HAS_RAWTOYJROW_LSX +ANY11(RAWToYJRow_Any_LSX, RAWToYJRow_LSX, 0, 3, 1, 15) +#endif +#ifdef HAS_RAWTOYJROW_LASX +ANY11(RAWToYJRow_Any_LASX, RAWToYJRow_LASX, 0, 3, 1, 31) #endif #ifdef HAS_RGB565TOYROW_NEON ANY11(RGB565ToYRow_Any_NEON, RGB565ToYRow_NEON, 0, 2, 1, 7) @@ -725,8 +1168,11 @@ ANY11(RGB565ToYRow_Any_NEON, RGB565ToYRow_NEON, 0, 2, 1, 7) #ifdef HAS_RGB565TOYROW_MSA ANY11(RGB565ToYRow_Any_MSA, RGB565ToYRow_MSA, 0, 2, 1, 15) #endif -#ifdef HAS_RGB565TOYROW_MMI -ANY11(RGB565ToYRow_Any_MMI, RGB565ToYRow_MMI, 0, 2, 1, 7) +#ifdef HAS_RGB565TOYROW_LSX +ANY11(RGB565ToYRow_Any_LSX, RGB565ToYRow_LSX, 0, 2, 1, 15) +#endif +#ifdef HAS_RGB565TOYROW_LASX +ANY11(RGB565ToYRow_Any_LASX, RGB565ToYRow_LASX, 0, 2, 1, 31) #endif #ifdef HAS_ARGB1555TOYROW_NEON ANY11(ARGB1555ToYRow_Any_NEON, ARGB1555ToYRow_NEON, 0, 2, 1, 7) @@ -734,15 +1180,15 @@ ANY11(ARGB1555ToYRow_Any_NEON, ARGB1555ToYRow_NEON, 0, 2, 1, 7) #ifdef HAS_ARGB1555TOYROW_MSA ANY11(ARGB1555ToYRow_Any_MSA, ARGB1555ToYRow_MSA, 0, 2, 1, 15) #endif -#ifdef HAS_ARGB1555TOYROW_MMI -ANY11(ARGB1555ToYRow_Any_MMI, ARGB1555ToYRow_MMI, 0, 2, 1, 7) +#ifdef HAS_ARGB1555TOYROW_LSX +ANY11(ARGB1555ToYRow_Any_LSX, ARGB1555ToYRow_LSX, 0, 2, 1, 15) +#endif +#ifdef HAS_ARGB1555TOYROW_LASX +ANY11(ARGB1555ToYRow_Any_LASX, ARGB1555ToYRow_LASX, 0, 2, 1, 31) #endif #ifdef HAS_ARGB4444TOYROW_NEON ANY11(ARGB4444ToYRow_Any_NEON, ARGB4444ToYRow_NEON, 0, 2, 1, 7) #endif -#ifdef HAS_ARGB4444TOYROW_MMI -ANY11(ARGB4444ToYRow_Any_MMI, ARGB4444ToYRow_MMI, 0, 2, 1, 7) -#endif #ifdef HAS_YUY2TOYROW_NEON ANY11(YUY2ToYRow_Any_NEON, YUY2ToYRow_NEON, 1, 4, 1, 15) #endif @@ -752,14 +1198,14 @@ ANY11(UYVYToYRow_Any_NEON, UYVYToYRow_NEON, 1, 4, 1, 15) #ifdef HAS_YUY2TOYROW_MSA ANY11(YUY2ToYRow_Any_MSA, YUY2ToYRow_MSA, 1, 4, 1, 31) #endif -#ifdef HAS_YUY2TOYROW_MMI -ANY11(YUY2ToYRow_Any_MMI, YUY2ToYRow_MMI, 1, 4, 1, 7) +#ifdef HAS_YUY2TOYROW_LASX +ANY11(YUY2ToYRow_Any_LASX, YUY2ToYRow_LASX, 1, 4, 1, 31) #endif #ifdef HAS_UYVYTOYROW_MSA ANY11(UYVYToYRow_Any_MSA, UYVYToYRow_MSA, 1, 4, 1, 31) #endif -#ifdef HAS_UYVYTOYROW_MMI -ANY11(UYVYToYRow_Any_MMI, UYVYToYRow_MMI, 1, 4, 1, 15) +#ifdef HAS_UYVYTOYROW_LASX +ANY11(UYVYToYRow_Any_LASX, UYVYToYRow_LASX, 1, 4, 1, 31) #endif #ifdef HAS_AYUVTOYROW_NEON ANY11(AYUVToYRow_Any_NEON, AYUVToYRow_NEON, 0, 4, 1, 15) @@ -779,8 +1225,11 @@ ANY11(RGB24ToARGBRow_Any_NEON, RGB24ToARGBRow_NEON, 0, 3, 4, 7) #ifdef HAS_RGB24TOARGBROW_MSA ANY11(RGB24ToARGBRow_Any_MSA, RGB24ToARGBRow_MSA, 0, 3, 4, 15) #endif -#ifdef HAS_RGB24TOARGBROW_MMI -ANY11(RGB24ToARGBRow_Any_MMI, RGB24ToARGBRow_MMI, 0, 3, 4, 3) +#ifdef HAS_RGB24TOARGBROW_LSX +ANY11(RGB24ToARGBRow_Any_LSX, RGB24ToARGBRow_LSX, 0, 3, 4, 15) +#endif +#ifdef HAS_RGB24TOARGBROW_LASX +ANY11(RGB24ToARGBRow_Any_LASX, RGB24ToARGBRow_LASX, 0, 3, 4, 31) #endif #ifdef HAS_RAWTOARGBROW_NEON ANY11(RAWToARGBRow_Any_NEON, RAWToARGBRow_NEON, 0, 3, 4, 7) @@ -791,8 +1240,11 @@ ANY11(RAWToRGBARow_Any_NEON, RAWToRGBARow_NEON, 0, 3, 4, 7) #ifdef HAS_RAWTOARGBROW_MSA ANY11(RAWToARGBRow_Any_MSA, RAWToARGBRow_MSA, 0, 3, 4, 15) #endif -#ifdef HAS_RAWTOARGBROW_MMI -ANY11(RAWToARGBRow_Any_MMI, RAWToARGBRow_MMI, 0, 3, 4, 3) +#ifdef HAS_RAWTOARGBROW_LSX +ANY11(RAWToARGBRow_Any_LSX, RAWToARGBRow_LSX, 0, 3, 4, 15) +#endif +#ifdef HAS_RAWTOARGBROW_LASX +ANY11(RAWToARGBRow_Any_LASX, RAWToARGBRow_LASX, 0, 3, 4, 31) #endif #ifdef HAS_RGB565TOARGBROW_NEON ANY11(RGB565ToARGBRow_Any_NEON, RGB565ToARGBRow_NEON, 0, 2, 4, 7) @@ -800,8 +1252,11 @@ ANY11(RGB565ToARGBRow_Any_NEON, RGB565ToARGBRow_NEON, 0, 2, 4, 7) #ifdef HAS_RGB565TOARGBROW_MSA ANY11(RGB565ToARGBRow_Any_MSA, RGB565ToARGBRow_MSA, 0, 2, 4, 15) #endif -#ifdef HAS_RGB565TOARGBROW_MMI -ANY11(RGB565ToARGBRow_Any_MMI, RGB565ToARGBRow_MMI, 0, 2, 4, 3) +#ifdef HAS_RGB565TOARGBROW_LSX +ANY11(RGB565ToARGBRow_Any_LSX, RGB565ToARGBRow_LSX, 0, 2, 4, 15) +#endif +#ifdef HAS_RGB565TOARGBROW_LASX +ANY11(RGB565ToARGBRow_Any_LASX, RGB565ToARGBRow_LASX, 0, 2, 4, 31) #endif #ifdef HAS_ARGB1555TOARGBROW_NEON ANY11(ARGB1555ToARGBRow_Any_NEON, ARGB1555ToARGBRow_NEON, 0, 2, 4, 7) @@ -809,8 +1264,11 @@ ANY11(ARGB1555ToARGBRow_Any_NEON, ARGB1555ToARGBRow_NEON, 0, 2, 4, 7) #ifdef HAS_ARGB1555TOARGBROW_MSA ANY11(ARGB1555ToARGBRow_Any_MSA, ARGB1555ToARGBRow_MSA, 0, 2, 4, 15) #endif -#ifdef HAS_ARGB1555TOARGBROW_MMI -ANY11(ARGB1555ToARGBRow_Any_MMI, ARGB1555ToARGBRow_MMI, 0, 2, 4, 3) +#ifdef HAS_ARGB1555TOARGBROW_LSX +ANY11(ARGB1555ToARGBRow_Any_LSX, ARGB1555ToARGBRow_LSX, 0, 2, 4, 15) +#endif +#ifdef HAS_ARGB1555TOARGBROW_LASX +ANY11(ARGB1555ToARGBRow_Any_LASX, ARGB1555ToARGBRow_LASX, 0, 2, 4, 31) #endif #ifdef HAS_ARGB4444TOARGBROW_NEON ANY11(ARGB4444ToARGBRow_Any_NEON, ARGB4444ToARGBRow_NEON, 0, 2, 4, 7) @@ -818,8 +1276,11 @@ ANY11(ARGB4444ToARGBRow_Any_NEON, ARGB4444ToARGBRow_NEON, 0, 2, 4, 7) #ifdef HAS_ARGB4444TOARGBROW_MSA ANY11(ARGB4444ToARGBRow_Any_MSA, ARGB4444ToARGBRow_MSA, 0, 2, 4, 15) #endif -#ifdef HAS_ARGB4444TOARGBROW_MMI -ANY11(ARGB4444ToARGBRow_Any_MMI, ARGB4444ToARGBRow_MMI, 0, 2, 4, 3) +#ifdef HAS_ARGB4444TOARGBROW_LSX +ANY11(ARGB4444ToARGBRow_Any_LSX, ARGB4444ToARGBRow_LSX, 0, 2, 4, 15) +#endif +#ifdef HAS_ARGB4444TOARGBROW_LASX +ANY11(ARGB4444ToARGBRow_Any_LASX, ARGB4444ToARGBRow_LASX, 0, 2, 4, 31) #endif #ifdef HAS_ARGBATTENUATEROW_SSSE3 ANY11(ARGBAttenuateRow_Any_SSSE3, ARGBAttenuateRow_SSSE3, 0, 4, 4, 3) @@ -839,8 +1300,8 @@ ANY11(ARGBAttenuateRow_Any_NEON, ARGBAttenuateRow_NEON, 0, 4, 4, 7) #ifdef HAS_ARGBATTENUATEROW_MSA ANY11(ARGBAttenuateRow_Any_MSA, ARGBAttenuateRow_MSA, 0, 4, 4, 7) #endif -#ifdef HAS_ARGBATTENUATEROW_MMI -ANY11(ARGBAttenuateRow_Any_MMI, ARGBAttenuateRow_MMI, 0, 4, 4, 1) +#ifdef HAS_ARGBATTENUATEROW_LASX +ANY11(ARGBAttenuateRow_Any_LASX, ARGBAttenuateRow_LASX, 0, 4, 4, 15) #endif #ifdef HAS_ARGBEXTRACTALPHAROW_SSE2 ANY11(ARGBExtractAlphaRow_Any_SSE2, ARGBExtractAlphaRow_SSE2, 0, 4, 1, 7) @@ -854,8 +1315,8 @@ ANY11(ARGBExtractAlphaRow_Any_NEON, ARGBExtractAlphaRow_NEON, 0, 4, 1, 15) #ifdef HAS_ARGBEXTRACTALPHAROW_MSA ANY11(ARGBExtractAlphaRow_Any_MSA, ARGBExtractAlphaRow_MSA, 0, 4, 1, 15) #endif -#ifdef HAS_ARGBEXTRACTALPHAROW_MMI -ANY11(ARGBExtractAlphaRow_Any_MMI, ARGBExtractAlphaRow_MMI, 0, 4, 1, 7) +#ifdef HAS_ARGBEXTRACTALPHAROW_LSX +ANY11(ARGBExtractAlphaRow_Any_LSX, ARGBExtractAlphaRow_LSX, 0, 4, 1, 15) #endif #undef ANY11 @@ -881,18 +1342,12 @@ ANY11B(ARGBCopyAlphaRow_Any_AVX2, ARGBCopyAlphaRow_AVX2, 0, 4, 4, 15) #ifdef HAS_ARGBCOPYALPHAROW_SSE2 ANY11B(ARGBCopyAlphaRow_Any_SSE2, ARGBCopyAlphaRow_SSE2, 0, 4, 4, 7) #endif -#ifdef HAS_ARGBCOPYALPHAROW_MMI -ANY11B(ARGBCopyAlphaRow_Any_MMI, ARGBCopyAlphaRow_MMI, 0, 4, 4, 1) -#endif #ifdef HAS_ARGBCOPYYTOALPHAROW_AVX2 ANY11B(ARGBCopyYToAlphaRow_Any_AVX2, ARGBCopyYToAlphaRow_AVX2, 0, 1, 4, 15) #endif #ifdef HAS_ARGBCOPYYTOALPHAROW_SSE2 ANY11B(ARGBCopyYToAlphaRow_Any_SSE2, ARGBCopyYToAlphaRow_SSE2, 0, 1, 4, 7) #endif -#ifdef HAS_ARGBCOPYYTOALPHAROW_MMI -ANY11B(ARGBCopyYToAlphaRow_Any_MMI, ARGBCopyYToAlphaRow_MMI, 0, 1, 4, 7) -#endif #undef ANY11B // Any 1 to 1 with parameter. @@ -942,13 +1397,13 @@ ANY11P(I400ToARGBRow_Any_MSA, 4, 15) #endif -#if defined(HAS_I400TOARGBROW_MMI) -ANY11P(I400ToARGBRow_Any_MMI, - I400ToARGBRow_MMI, +#if defined(HAS_I400TOARGBROW_LSX) +ANY11P(I400ToARGBRow_Any_LSX, + I400ToARGBRow_LSX, const struct YuvConstants*, 1, 4, - 7) + 15) #endif #if defined(HAS_ARGBTORGB565DITHERROW_SSE2) @@ -983,13 +1438,13 @@ ANY11P(ARGBToRGB565DitherRow_Any_MSA, 2, 7) #endif -#if defined(HAS_ARGBTORGB565DITHERROW_MMI) -ANY11P(ARGBToRGB565DitherRow_Any_MMI, - ARGBToRGB565DitherRow_MMI, +#if defined(HAS_ARGBTORGB565DITHERROW_LASX) +ANY11P(ARGBToRGB565DitherRow_Any_LASX, + ARGBToRGB565DitherRow_LASX, const uint32_t, 4, 2, - 3) + 15) #endif #ifdef HAS_ARGBSHUFFLEROW_SSSE3 ANY11P(ARGBShuffleRow_Any_SSSE3, ARGBShuffleRow_SSSE3, const uint8_t*, 4, 4, 7) @@ -1003,12 +1458,78 @@ ANY11P(ARGBShuffleRow_Any_NEON, ARGBShuffleRow_NEON, const uint8_t*, 4, 4, 3) #ifdef HAS_ARGBSHUFFLEROW_MSA ANY11P(ARGBShuffleRow_Any_MSA, ARGBShuffleRow_MSA, const uint8_t*, 4, 4, 7) #endif -#ifdef HAS_ARGBSHUFFLEROW_MMI -ANY11P(ARGBShuffleRow_Any_MMI, ARGBShuffleRow_MMI, const uint8_t*, 4, 4, 1) +#ifdef HAS_ARGBSHUFFLEROW_LASX +ANY11P(ARGBShuffleRow_Any_LASX, ARGBShuffleRow_LASX, const uint8_t*, 4, 4, 15) #endif #undef ANY11P #undef ANY11P +// Any 1 to 1 with type +#define ANY11T(NAMEANY, ANY_SIMD, SBPP, BPP, STYPE, DTYPE, MASK) \ + void NAMEANY(const STYPE* src_ptr, DTYPE* dst_ptr, int width) { \ + SIMD_ALIGNED(uint8_t temp[(MASK + 1) * SBPP]); \ + SIMD_ALIGNED(uint8_t out[(MASK + 1) * BPP]); \ + memset(temp, 0, (MASK + 1) * SBPP); /* for msan */ \ + int r = width & MASK; \ + int n = width & ~MASK; \ + if (n > 0) { \ + ANY_SIMD(src_ptr, dst_ptr, n); \ + } \ + memcpy(temp, (uint8_t*)(src_ptr) + n * SBPP, r * SBPP); \ + ANY_SIMD((STYPE*)temp, (DTYPE*)out, MASK + 1); \ + memcpy((uint8_t*)(dst_ptr) + n * BPP, out, r * BPP); \ + } + +#ifdef HAS_ARGBTOAR64ROW_SSSE3 +ANY11T(ARGBToAR64Row_Any_SSSE3, ARGBToAR64Row_SSSE3, 4, 8, uint8_t, uint16_t, 3) +#endif + +#ifdef HAS_ARGBTOAB64ROW_SSSE3 +ANY11T(ARGBToAB64Row_Any_SSSE3, ARGBToAB64Row_SSSE3, 4, 8, uint8_t, uint16_t, 3) +#endif + +#ifdef HAS_AR64TOARGBROW_SSSE3 +ANY11T(AR64ToARGBRow_Any_SSSE3, AR64ToARGBRow_SSSE3, 8, 4, uint16_t, uint8_t, 3) +#endif + +#ifdef HAS_ARGBTOAR64ROW_SSSE3 +ANY11T(AB64ToARGBRow_Any_SSSE3, AB64ToARGBRow_SSSE3, 8, 4, uint16_t, uint8_t, 3) +#endif + +#ifdef HAS_ARGBTOAR64ROW_AVX2 +ANY11T(ARGBToAR64Row_Any_AVX2, ARGBToAR64Row_AVX2, 4, 8, uint8_t, uint16_t, 7) +#endif + +#ifdef HAS_ARGBTOAB64ROW_AVX2 +ANY11T(ARGBToAB64Row_Any_AVX2, ARGBToAB64Row_AVX2, 4, 8, uint8_t, uint16_t, 7) +#endif + +#ifdef HAS_AR64TOARGBROW_AVX2 +ANY11T(AR64ToARGBRow_Any_AVX2, AR64ToARGBRow_AVX2, 8, 4, uint16_t, uint8_t, 7) +#endif + +#ifdef HAS_ARGBTOAR64ROW_AVX2 +ANY11T(AB64ToARGBRow_Any_AVX2, AB64ToARGBRow_AVX2, 8, 4, uint16_t, uint8_t, 7) +#endif + +#ifdef HAS_ARGBTOAR64ROW_NEON +ANY11T(ARGBToAR64Row_Any_NEON, ARGBToAR64Row_NEON, 4, 8, uint8_t, uint16_t, 7) +#endif + +#ifdef HAS_ARGBTOAB64ROW_NEON +ANY11T(ARGBToAB64Row_Any_NEON, ARGBToAB64Row_NEON, 4, 8, uint8_t, uint16_t, 7) +#endif + +#ifdef HAS_AR64TOARGBROW_NEON +ANY11T(AR64ToARGBRow_Any_NEON, AR64ToARGBRow_NEON, 8, 4, uint16_t, uint8_t, 7) +#endif + +#ifdef HAS_ARGBTOAR64ROW_NEON +ANY11T(AB64ToARGBRow_Any_NEON, AB64ToARGBRow_NEON, 8, 4, uint16_t, uint8_t, 7) +#endif + +#undef ANY11T + // Any 1 to 1 with parameter and shorts. BPP measures in shorts. #define ANY11C(NAMEANY, ANY_SIMD, SBPP, BPP, STYPE, DTYPE, MASK) \ void NAMEANY(const STYPE* src_ptr, DTYPE* dst_ptr, int scale, int width) { \ @@ -1043,6 +1564,15 @@ ANY11C(Convert16To8Row_Any_AVX2, uint8_t, 31) #endif +#ifdef HAS_CONVERT16TO8ROW_NEON +ANY11C(Convert16To8Row_Any_NEON, + Convert16To8Row_NEON, + 2, + 1, + uint16_t, + uint8_t, + 15) +#endif #ifdef HAS_CONVERT8TO16ROW_SSE2 ANY11C(Convert8To16Row_Any_SSE2, Convert8To16Row_SSE2, @@ -1061,6 +1591,30 @@ ANY11C(Convert8To16Row_Any_AVX2, uint16_t, 31) #endif +#ifdef HAS_MULTIPLYROW_16_AVX2 +ANY11C(MultiplyRow_16_Any_AVX2, + MultiplyRow_16_AVX2, + 2, + 2, + uint16_t, + uint16_t, + 31) +#endif +#ifdef HAS_MULTIPLYROW_16_NEON +ANY11C(MultiplyRow_16_Any_NEON, + MultiplyRow_16_NEON, + 2, + 2, + uint16_t, + uint16_t, + 15) +#endif +#ifdef HAS_DIVIDEROW_16_AVX2 +ANY11C(DivideRow_16_Any_AVX2, DivideRow_16_AVX2, 2, 2, uint16_t, uint16_t, 31) +#endif +#ifdef HAS_DIVIDEROW_16_NEON +ANY11C(DivideRow_16_Any_NEON, DivideRow_16_NEON, 2, 2, uint16_t, uint16_t, 15) +#endif #undef ANY11C // Any 1 to 1 with parameter and shorts to byte. BPP measures in shorts. @@ -1111,6 +1665,9 @@ ANY11P16(HalfFloatRow_Any_MSA, HalfFloatRow_MSA, uint16_t, uint16_t, 2, 2, 31) #ifdef HAS_BYTETOFLOATROW_NEON ANY11P16(ByteToFloatRow_Any_NEON, ByteToFloatRow_NEON, uint8_t, float, 1, 3, 7) #endif +#ifdef HAS_HALFFLOATROW_LSX +ANY11P16(HalfFloatRow_Any_LSX, HalfFloatRow_LSX, uint16_t, uint16_t, 2, 2, 31) +#endif #undef ANY11P16 // Any 1 to 1 with yuvconstants @@ -1144,45 +1701,107 @@ ANY11C(UYVYToARGBRow_Any_NEON, UYVYToARGBRow_NEON, 1, 4, 4, 7) ANY11C(YUY2ToARGBRow_Any_MSA, YUY2ToARGBRow_MSA, 1, 4, 4, 7) ANY11C(UYVYToARGBRow_Any_MSA, UYVYToARGBRow_MSA, 1, 4, 4, 7) #endif -#if defined(HAS_YUY2TOARGBROW_MMI) -ANY11C(YUY2ToARGBRow_Any_MMI, YUY2ToARGBRow_MMI, 1, 4, 4, 7) -ANY11C(UYVYToARGBRow_Any_MMI, UYVYToARGBRow_MMI, 1, 4, 4, 7) +#if defined(HAS_YUY2TOARGBROW_LSX) +ANY11C(YUY2ToARGBRow_Any_LSX, YUY2ToARGBRow_LSX, 1, 4, 4, 7) +ANY11C(UYVYToARGBRow_Any_LSX, UYVYToARGBRow_LSX, 1, 4, 4, 7) #endif #undef ANY11C // Any 1 to 1 interpolate. Takes 2 rows of source via stride. -#define ANY11T(NAMEANY, ANY_SIMD, SBPP, BPP, MASK) \ - void NAMEANY(uint8_t* dst_ptr, const uint8_t* src_ptr, \ - ptrdiff_t src_stride_ptr, int width, int source_y_fraction) { \ - SIMD_ALIGNED(uint8_t temp[64 * 3]); \ - memset(temp, 0, 64 * 2); /* for msan */ \ - int r = width & MASK; \ - int n = width & ~MASK; \ - if (n > 0) { \ - ANY_SIMD(dst_ptr, src_ptr, src_stride_ptr, n, source_y_fraction); \ - } \ - memcpy(temp, src_ptr + n * SBPP, r * SBPP); \ - memcpy(temp + 64, src_ptr + src_stride_ptr + n * SBPP, r * SBPP); \ - ANY_SIMD(temp + 128, temp, 64, MASK + 1, source_y_fraction); \ - memcpy(dst_ptr + n * BPP, temp + 128, r * BPP); \ +#define ANY11I(NAMEANY, ANY_SIMD, TD, TS, SBPP, BPP, MASK) \ + void NAMEANY(TD* dst_ptr, const TS* src_ptr, ptrdiff_t src_stride, \ + int width, int source_y_fraction) { \ + SIMD_ALIGNED(TS temps[64 * 2]); \ + SIMD_ALIGNED(TD tempd[64]); \ + memset(temps, 0, sizeof(temps)); /* for msan */ \ + int r = width & MASK; \ + int n = width & ~MASK; \ + if (n > 0) { \ + ANY_SIMD(dst_ptr, src_ptr, src_stride, n, source_y_fraction); \ + } \ + memcpy(temps, src_ptr + n * SBPP, r * SBPP * sizeof(TS)); \ + if (source_y_fraction) { \ + memcpy(temps + 64, src_ptr + src_stride + n * SBPP, \ + r * SBPP * sizeof(TS)); \ + } \ + ANY_SIMD(tempd, temps, 64, MASK + 1, source_y_fraction); \ + memcpy(dst_ptr + n * BPP, tempd, r * BPP * sizeof(TD)); \ } #ifdef HAS_INTERPOLATEROW_AVX2 -ANY11T(InterpolateRow_Any_AVX2, InterpolateRow_AVX2, 1, 1, 31) +ANY11I(InterpolateRow_Any_AVX2, InterpolateRow_AVX2, uint8_t, uint8_t, 1, 1, 31) #endif #ifdef HAS_INTERPOLATEROW_SSSE3 -ANY11T(InterpolateRow_Any_SSSE3, InterpolateRow_SSSE3, 1, 1, 15) +ANY11I(InterpolateRow_Any_SSSE3, + InterpolateRow_SSSE3, + uint8_t, + uint8_t, + 1, + 1, + 15) #endif #ifdef HAS_INTERPOLATEROW_NEON -ANY11T(InterpolateRow_Any_NEON, InterpolateRow_NEON, 1, 1, 15) +ANY11I(InterpolateRow_Any_NEON, InterpolateRow_NEON, uint8_t, uint8_t, 1, 1, 15) #endif #ifdef HAS_INTERPOLATEROW_MSA -ANY11T(InterpolateRow_Any_MSA, InterpolateRow_MSA, 1, 1, 31) +ANY11I(InterpolateRow_Any_MSA, InterpolateRow_MSA, uint8_t, uint8_t, 1, 1, 31) #endif -#ifdef HAS_INTERPOLATEROW_MMI -ANY11T(InterpolateRow_Any_MMI, InterpolateRow_MMI, 1, 1, 7) +#ifdef HAS_INTERPOLATEROW_LSX +ANY11I(InterpolateRow_Any_LSX, InterpolateRow_LSX, uint8_t, uint8_t, 1, 1, 31) #endif -#undef ANY11T + +#ifdef HAS_INTERPOLATEROW_16_NEON +ANY11I(InterpolateRow_16_Any_NEON, + InterpolateRow_16_NEON, + uint16_t, + uint16_t, + 1, + 1, + 7) +#endif +#undef ANY11I + +// Any 1 to 1 interpolate with scale param +#define ANY11IS(NAMEANY, ANY_SIMD, TD, TS, SBPP, BPP, MASK) \ + void NAMEANY(TD* dst_ptr, const TS* src_ptr, ptrdiff_t src_stride, \ + int scale, int width, int source_y_fraction) { \ + SIMD_ALIGNED(TS temps[64 * 2]); \ + SIMD_ALIGNED(TD tempd[64]); \ + memset(temps, 0, sizeof(temps)); /* for msan */ \ + int r = width & MASK; \ + int n = width & ~MASK; \ + if (n > 0) { \ + ANY_SIMD(dst_ptr, src_ptr, src_stride, scale, n, source_y_fraction); \ + } \ + memcpy(temps, src_ptr + n * SBPP, r * SBPP * sizeof(TS)); \ + if (source_y_fraction) { \ + memcpy(temps + 64, src_ptr + src_stride + n * SBPP, \ + r * SBPP * sizeof(TS)); \ + } \ + ANY_SIMD(tempd, temps, 64, scale, MASK + 1, source_y_fraction); \ + memcpy(dst_ptr + n * BPP, tempd, r * BPP * sizeof(TD)); \ + } + +#ifdef HAS_INTERPOLATEROW_16TO8_NEON +ANY11IS(InterpolateRow_16To8_Any_NEON, + InterpolateRow_16To8_NEON, + uint8_t, + uint16_t, + 1, + 1, + 7) +#endif +#ifdef HAS_INTERPOLATEROW_16TO8_AVX2 +ANY11IS(InterpolateRow_16To8_Any_AVX2, + InterpolateRow_16To8_AVX2, + uint8_t, + uint16_t, + 1, + 1, + 31) +#endif + +#undef ANY11IS // Any 1 to 1 mirror. #define ANY11M(NAMEANY, ANY_SIMD, BPP, MASK) \ @@ -1211,8 +1830,8 @@ ANY11M(MirrorRow_Any_NEON, MirrorRow_NEON, 1, 31) #ifdef HAS_MIRRORROW_MSA ANY11M(MirrorRow_Any_MSA, MirrorRow_MSA, 1, 63) #endif -#ifdef HAS_MIRRORROW_MMI -ANY11M(MirrorRow_Any_MMI, MirrorRow_MMI, 1, 7) +#ifdef HAS_MIRRORROW_LASX +ANY11M(MirrorRow_Any_LASX, MirrorRow_LASX, 1, 63) #endif #ifdef HAS_MIRRORUVROW_AVX2 ANY11M(MirrorUVRow_Any_AVX2, MirrorUVRow_AVX2, 2, 15) @@ -1226,6 +1845,9 @@ ANY11M(MirrorUVRow_Any_NEON, MirrorUVRow_NEON, 2, 31) #ifdef HAS_MIRRORUVROW_MSA ANY11M(MirrorUVRow_Any_MSA, MirrorUVRow_MSA, 2, 7) #endif +#ifdef HAS_MIRRORUVROW_LASX +ANY11M(MirrorUVRow_Any_LASX, MirrorUVRow_LASX, 2, 15) +#endif #ifdef HAS_ARGBMIRRORROW_AVX2 ANY11M(ARGBMirrorRow_Any_AVX2, ARGBMirrorRow_AVX2, 4, 7) #endif @@ -1238,8 +1860,8 @@ ANY11M(ARGBMirrorRow_Any_NEON, ARGBMirrorRow_NEON, 4, 7) #ifdef HAS_ARGBMIRRORROW_MSA ANY11M(ARGBMirrorRow_Any_MSA, ARGBMirrorRow_MSA, 4, 15) #endif -#ifdef HAS_ARGBMIRRORROW_MMI -ANY11M(ARGBMirrorRow_Any_MMI, ARGBMirrorRow_MMI, 4, 1) +#ifdef HAS_ARGBMIRRORROW_LASX +ANY11M(ARGBMirrorRow_Any_LASX, ARGBMirrorRow_LASX, 4, 15) #endif #ifdef HAS_RGB24MIRRORROW_SSSE3 ANY11M(RGB24MirrorRow_Any_SSSE3, RGB24MirrorRow_SSSE3, 3, 15) @@ -1269,14 +1891,17 @@ ANY1(SetRow_Any_X86, SetRow_X86, uint8_t, 1, 3) #ifdef HAS_SETROW_NEON ANY1(SetRow_Any_NEON, SetRow_NEON, uint8_t, 1, 15) #endif +#ifdef HAS_SETROW_LSX +ANY1(SetRow_Any_LSX, SetRow_LSX, uint8_t, 1, 15) +#endif #ifdef HAS_ARGBSETROW_NEON ANY1(ARGBSetRow_Any_NEON, ARGBSetRow_NEON, uint32_t, 4, 3) #endif #ifdef HAS_ARGBSETROW_MSA ANY1(ARGBSetRow_Any_MSA, ARGBSetRow_MSA, uint32_t, 4, 3) #endif -#ifdef HAS_ARGBSETROW_MMI -ANY1(ARGBSetRow_Any_MMI, ARGBSetRow_MMI, uint32_t, 4, 3) +#ifdef HAS_ARGBSETROW_LSX +ANY1(ARGBSetRow_Any_LSX, ARGBSetRow_LSX, uint32_t, 4, 3) #endif #undef ANY1 @@ -1309,8 +1934,8 @@ ANY12(SplitUVRow_Any_NEON, SplitUVRow_NEON, 0, 2, 0, 15) #ifdef HAS_SPLITUVROW_MSA ANY12(SplitUVRow_Any_MSA, SplitUVRow_MSA, 0, 2, 0, 31) #endif -#ifdef HAS_SPLITUVROW_MMI -ANY12(SplitUVRow_Any_MMI, SplitUVRow_MMI, 0, 2, 0, 7) +#ifdef HAS_SPLITUVROW_LSX +ANY12(SplitUVRow_Any_LSX, SplitUVRow_LSX, 0, 2, 0, 31) #endif #ifdef HAS_ARGBTOUV444ROW_SSSE3 ANY12(ARGBToUV444Row_Any_SSSE3, ARGBToUV444Row_SSSE3, 0, 4, 0, 15) @@ -1333,13 +1958,39 @@ ANY12(ARGBToUV444Row_Any_MSA, ARGBToUV444Row_MSA, 0, 4, 0, 15) ANY12(YUY2ToUV422Row_Any_MSA, YUY2ToUV422Row_MSA, 1, 4, 1, 31) ANY12(UYVYToUV422Row_Any_MSA, UYVYToUV422Row_MSA, 1, 4, 1, 31) #endif -#ifdef HAS_YUY2TOUV422ROW_MMI -ANY12(ARGBToUV444Row_Any_MMI, ARGBToUV444Row_MMI, 0, 4, 0, 7) -ANY12(UYVYToUV422Row_Any_MMI, UYVYToUV422Row_MMI, 1, 4, 1, 15) -ANY12(YUY2ToUV422Row_Any_MMI, YUY2ToUV422Row_MMI, 1, 4, 1, 15) +#ifdef HAS_YUY2TOUV422ROW_LASX +ANY12(ARGBToUV444Row_Any_LASX, ARGBToUV444Row_LASX, 0, 4, 0, 31) +ANY12(YUY2ToUV422Row_Any_LASX, YUY2ToUV422Row_LASX, 1, 4, 1, 31) +ANY12(UYVYToUV422Row_Any_LASX, UYVYToUV422Row_LASX, 1, 4, 1, 31) #endif #undef ANY12 +// Any 2 16 bit planes with parameter to 1 +#define ANY12PT(NAMEANY, ANY_SIMD, T, BPP, MASK) \ + void NAMEANY(const T* src_uv, T* dst_u, T* dst_v, int depth, int width) { \ + SIMD_ALIGNED(T temp[16 * 4]); \ + memset(temp, 0, 16 * 4 * BPP); /* for msan */ \ + int r = width & MASK; \ + int n = width & ~MASK; \ + if (n > 0) { \ + ANY_SIMD(src_uv, dst_u, dst_v, depth, n); \ + } \ + memcpy(temp, src_uv + n * 2, r * BPP * 2); \ + ANY_SIMD(temp, temp + 32, temp + 48, depth, MASK + 1); \ + memcpy(dst_u + n, temp + 32, r * BPP); \ + memcpy(dst_v + n, temp + 48, r * BPP); \ + } + +#ifdef HAS_SPLITUVROW_16_AVX2 +ANY12PT(SplitUVRow_16_Any_AVX2, SplitUVRow_16_AVX2, uint16_t, 2, 15) +#endif + +#ifdef HAS_SPLITUVROW_16_NEON +ANY12PT(SplitUVRow_16_Any_NEON, SplitUVRow_16_NEON, uint16_t, 2, 7) +#endif + +#undef ANY21CT + // Any 1 to 3. Outputs RGB planes. #define ANY13(NAMEANY, ANY_SIMD, BPP, MASK) \ void NAMEANY(const uint8_t* src_ptr, uint8_t* dst_r, uint8_t* dst_g, \ @@ -1364,24 +2015,66 @@ ANY13(SplitRGBRow_Any_SSSE3, SplitRGBRow_SSSE3, 3, 15) #ifdef HAS_SPLITRGBROW_NEON ANY13(SplitRGBRow_Any_NEON, SplitRGBRow_NEON, 3, 15) #endif -#ifdef HAS_SPLITRGBROW_MMI -ANY13(SplitRGBRow_Any_MMI, SplitRGBRow_MMI, 3, 3) +#ifdef HAS_SPLITXRGBROW_SSE2 +ANY13(SplitXRGBRow_Any_SSE2, SplitXRGBRow_SSE2, 4, 7) +#endif +#ifdef HAS_SPLITXRGBROW_SSSE3 +ANY13(SplitXRGBRow_Any_SSSE3, SplitXRGBRow_SSSE3, 4, 7) +#endif +#ifdef HAS_SPLITXRGBROW_AVX2 +ANY13(SplitXRGBRow_Any_AVX2, SplitXRGBRow_AVX2, 4, 15) +#endif +#ifdef HAS_SPLITXRGBROW_NEON +ANY13(SplitXRGBRow_Any_NEON, SplitXRGBRow_NEON, 4, 15) +#endif + +// Any 1 to 4. Outputs ARGB planes. +#define ANY14(NAMEANY, ANY_SIMD, BPP, MASK) \ + void NAMEANY(const uint8_t* src_ptr, uint8_t* dst_r, uint8_t* dst_g, \ + uint8_t* dst_b, uint8_t* dst_a, int width) { \ + SIMD_ALIGNED(uint8_t temp[16 * 8]); \ + memset(temp, 0, 16 * 4); /* for msan */ \ + int r = width & MASK; \ + int n = width & ~MASK; \ + if (n > 0) { \ + ANY_SIMD(src_ptr, dst_r, dst_g, dst_b, dst_a, n); \ + } \ + memcpy(temp, src_ptr + n * BPP, r * BPP); \ + ANY_SIMD(temp, temp + 16 * 4, temp + 16 * 5, temp + 16 * 6, temp + 16 * 7, \ + MASK + 1); \ + memcpy(dst_r + n, temp + 16 * 4, r); \ + memcpy(dst_g + n, temp + 16 * 5, r); \ + memcpy(dst_b + n, temp + 16 * 6, r); \ + memcpy(dst_a + n, temp + 16 * 7, r); \ + } + +#ifdef HAS_SPLITARGBROW_SSE2 +ANY14(SplitARGBRow_Any_SSE2, SplitARGBRow_SSE2, 4, 7) +#endif +#ifdef HAS_SPLITARGBROW_SSSE3 +ANY14(SplitARGBRow_Any_SSSE3, SplitARGBRow_SSSE3, 4, 7) +#endif +#ifdef HAS_SPLITARGBROW_AVX2 +ANY14(SplitARGBRow_Any_AVX2, SplitARGBRow_AVX2, 4, 15) +#endif +#ifdef HAS_SPLITARGBROW_NEON +ANY14(SplitARGBRow_Any_NEON, SplitARGBRow_NEON, 4, 15) #endif // Any 1 to 2 with source stride (2 rows of source). Outputs UV planes. // 128 byte row allows for 32 avx ARGB pixels. #define ANY12S(NAMEANY, ANY_SIMD, UVSHIFT, BPP, MASK) \ - void NAMEANY(const uint8_t* src_ptr, int src_stride_ptr, uint8_t* dst_u, \ + void NAMEANY(const uint8_t* src_ptr, int src_stride, uint8_t* dst_u, \ uint8_t* dst_v, int width) { \ SIMD_ALIGNED(uint8_t temp[128 * 4]); \ memset(temp, 0, 128 * 2); /* for msan */ \ int r = width & MASK; \ int n = width & ~MASK; \ if (n > 0) { \ - ANY_SIMD(src_ptr, src_stride_ptr, dst_u, dst_v, n); \ + ANY_SIMD(src_ptr, src_stride, dst_u, dst_v, n); \ } \ memcpy(temp, src_ptr + (n >> UVSHIFT) * BPP, SS(r, UVSHIFT) * BPP); \ - memcpy(temp + 128, src_ptr + src_stride_ptr + (n >> UVSHIFT) * BPP, \ + memcpy(temp + 128, src_ptr + src_stride + (n >> UVSHIFT) * BPP, \ SS(r, UVSHIFT) * BPP); \ if ((width & 1) && UVSHIFT == 0) { /* repeat last pixel for subsample */ \ memcpy(temp + SS(r, UVSHIFT) * BPP, temp + SS(r, UVSHIFT) * BPP - BPP, \ @@ -1403,9 +2096,17 @@ ANY12S(ABGRToUVRow_Any_AVX2, ABGRToUVRow_AVX2, 0, 4, 31) #ifdef HAS_ARGBTOUVJROW_AVX2 ANY12S(ARGBToUVJRow_Any_AVX2, ARGBToUVJRow_AVX2, 0, 4, 31) #endif +#ifdef HAS_ABGRTOUVJROW_AVX2 +ANY12S(ABGRToUVJRow_Any_AVX2, ABGRToUVJRow_AVX2, 0, 4, 31) +#endif +#ifdef HAS_ARGBTOUVJROW_SSSE3 +ANY12S(ARGBToUVJRow_Any_SSSE3, ARGBToUVJRow_SSSE3, 0, 4, 15) +#endif +#ifdef HAS_ABGRTOUVJROW_SSSE3 +ANY12S(ABGRToUVJRow_Any_SSSE3, ABGRToUVJRow_SSSE3, 0, 4, 15) +#endif #ifdef HAS_ARGBTOUVROW_SSSE3 ANY12S(ARGBToUVRow_Any_SSSE3, ARGBToUVRow_SSSE3, 0, 4, 15) -ANY12S(ARGBToUVJRow_Any_SSSE3, ARGBToUVJRow_SSSE3, 0, 4, 15) ANY12S(BGRAToUVRow_Any_SSSE3, BGRAToUVRow_SSSE3, 0, 4, 15) ANY12S(ABGRToUVRow_Any_SSSE3, ABGRToUVRow_SSSE3, 0, 4, 15) ANY12S(RGBAToUVRow_Any_SSSE3, RGBAToUVRow_SSSE3, 0, 4, 15) @@ -1424,17 +2125,23 @@ ANY12S(ARGBToUVRow_Any_NEON, ARGBToUVRow_NEON, 0, 4, 15) #ifdef HAS_ARGBTOUVROW_MSA ANY12S(ARGBToUVRow_Any_MSA, ARGBToUVRow_MSA, 0, 4, 31) #endif -#ifdef HAS_ARGBTOUVROW_MMI -ANY12S(ARGBToUVRow_Any_MMI, ARGBToUVRow_MMI, 0, 4, 15) +#ifdef HAS_ARGBTOUVROW_LASX +ANY12S(ARGBToUVRow_Any_LASX, ARGBToUVRow_LASX, 0, 4, 31) #endif #ifdef HAS_ARGBTOUVJROW_NEON ANY12S(ARGBToUVJRow_Any_NEON, ARGBToUVJRow_NEON, 0, 4, 15) #endif +#ifdef HAS_ABGRTOUVJROW_NEON +ANY12S(ABGRToUVJRow_Any_NEON, ABGRToUVJRow_NEON, 0, 4, 15) +#endif #ifdef HAS_ARGBTOUVJROW_MSA ANY12S(ARGBToUVJRow_Any_MSA, ARGBToUVJRow_MSA, 0, 4, 31) #endif -#ifdef HAS_ARGBTOUVJROW_MMI -ANY12S(ARGBToUVJRow_Any_MMI, ARGBToUVJRow_MMI, 0, 4, 15) +#ifdef HAS_ARGBTOUVJROW_LSX +ANY12S(ARGBToUVJRow_Any_LSX, ARGBToUVJRow_LSX, 0, 4, 15) +#endif +#ifdef HAS_ARGBTOUVJROW_LASX +ANY12S(ARGBToUVJRow_Any_LASX, ARGBToUVJRow_LASX, 0, 4, 31) #endif #ifdef HAS_BGRATOUVROW_NEON ANY12S(BGRAToUVRow_Any_NEON, BGRAToUVRow_NEON, 0, 4, 15) @@ -1442,8 +2149,8 @@ ANY12S(BGRAToUVRow_Any_NEON, BGRAToUVRow_NEON, 0, 4, 15) #ifdef HAS_BGRATOUVROW_MSA ANY12S(BGRAToUVRow_Any_MSA, BGRAToUVRow_MSA, 0, 4, 15) #endif -#ifdef HAS_BGRATOUVROW_MMI -ANY12S(BGRAToUVRow_Any_MMI, BGRAToUVRow_MMI, 0, 4, 15) +#ifdef HAS_BGRATOUVROW_LSX +ANY12S(BGRAToUVRow_Any_LSX, BGRAToUVRow_LSX, 0, 4, 15) #endif #ifdef HAS_ABGRTOUVROW_NEON ANY12S(ABGRToUVRow_Any_NEON, ABGRToUVRow_NEON, 0, 4, 15) @@ -1451,8 +2158,8 @@ ANY12S(ABGRToUVRow_Any_NEON, ABGRToUVRow_NEON, 0, 4, 15) #ifdef HAS_ABGRTOUVROW_MSA ANY12S(ABGRToUVRow_Any_MSA, ABGRToUVRow_MSA, 0, 4, 15) #endif -#ifdef HAS_ABGRTOUVROW_MMI -ANY12S(ABGRToUVRow_Any_MMI, ABGRToUVRow_MMI, 0, 4, 15) +#ifdef HAS_ABGRTOUVROW_LSX +ANY12S(ABGRToUVRow_Any_LSX, ABGRToUVRow_LSX, 0, 4, 15) #endif #ifdef HAS_RGBATOUVROW_NEON ANY12S(RGBAToUVRow_Any_NEON, RGBAToUVRow_NEON, 0, 4, 15) @@ -1460,26 +2167,38 @@ ANY12S(RGBAToUVRow_Any_NEON, RGBAToUVRow_NEON, 0, 4, 15) #ifdef HAS_RGBATOUVROW_MSA ANY12S(RGBAToUVRow_Any_MSA, RGBAToUVRow_MSA, 0, 4, 15) #endif -#ifdef HAS_RGBATOUVROW_MMI -ANY12S(RGBAToUVRow_Any_MMI, RGBAToUVRow_MMI, 0, 4, 15) +#ifdef HAS_RGBATOUVROW_LSX +ANY12S(RGBAToUVRow_Any_LSX, RGBAToUVRow_LSX, 0, 4, 15) #endif #ifdef HAS_RGB24TOUVROW_NEON ANY12S(RGB24ToUVRow_Any_NEON, RGB24ToUVRow_NEON, 0, 3, 15) #endif +#ifdef HAS_RGB24TOUVJROW_NEON +ANY12S(RGB24ToUVJRow_Any_NEON, RGB24ToUVJRow_NEON, 0, 3, 15) +#endif #ifdef HAS_RGB24TOUVROW_MSA ANY12S(RGB24ToUVRow_Any_MSA, RGB24ToUVRow_MSA, 0, 3, 15) #endif -#ifdef HAS_RGB24TOUVROW_MMI -ANY12S(RGB24ToUVRow_Any_MMI, RGB24ToUVRow_MMI, 0, 3, 15) +#ifdef HAS_RGB24TOUVROW_LSX +ANY12S(RGB24ToUVRow_Any_LSX, RGB24ToUVRow_LSX, 0, 3, 15) +#endif +#ifdef HAS_RGB24TOUVROW_LASX +ANY12S(RGB24ToUVRow_Any_LASX, RGB24ToUVRow_LASX, 0, 3, 31) #endif #ifdef HAS_RAWTOUVROW_NEON ANY12S(RAWToUVRow_Any_NEON, RAWToUVRow_NEON, 0, 3, 15) #endif +#ifdef HAS_RAWTOUVJROW_NEON +ANY12S(RAWToUVJRow_Any_NEON, RAWToUVJRow_NEON, 0, 3, 15) +#endif #ifdef HAS_RAWTOUVROW_MSA ANY12S(RAWToUVRow_Any_MSA, RAWToUVRow_MSA, 0, 3, 15) #endif -#ifdef HAS_RAWTOUVROW_MMI -ANY12S(RAWToUVRow_Any_MMI, RAWToUVRow_MMI, 0, 3, 15) +#ifdef HAS_RAWTOUVROW_LSX +ANY12S(RAWToUVRow_Any_LSX, RAWToUVRow_LSX, 0, 3, 15) +#endif +#ifdef HAS_RAWTOUVROW_LASX +ANY12S(RAWToUVRow_Any_LASX, RAWToUVRow_LASX, 0, 3, 31) #endif #ifdef HAS_RGB565TOUVROW_NEON ANY12S(RGB565ToUVRow_Any_NEON, RGB565ToUVRow_NEON, 0, 2, 15) @@ -1487,8 +2206,11 @@ ANY12S(RGB565ToUVRow_Any_NEON, RGB565ToUVRow_NEON, 0, 2, 15) #ifdef HAS_RGB565TOUVROW_MSA ANY12S(RGB565ToUVRow_Any_MSA, RGB565ToUVRow_MSA, 0, 2, 15) #endif -#ifdef HAS_RGB565TOUVROW_MMI -ANY12S(RGB565ToUVRow_Any_MMI, RGB565ToUVRow_MMI, 0, 2, 15) +#ifdef HAS_RGB565TOUVROW_LSX +ANY12S(RGB565ToUVRow_Any_LSX, RGB565ToUVRow_LSX, 0, 2, 15) +#endif +#ifdef HAS_RGB565TOUVROW_LASX +ANY12S(RGB565ToUVRow_Any_LASX, RGB565ToUVRow_LASX, 0, 2, 31) #endif #ifdef HAS_ARGB1555TOUVROW_NEON ANY12S(ARGB1555ToUVRow_Any_NEON, ARGB1555ToUVRow_NEON, 0, 2, 15) @@ -1496,15 +2218,15 @@ ANY12S(ARGB1555ToUVRow_Any_NEON, ARGB1555ToUVRow_NEON, 0, 2, 15) #ifdef HAS_ARGB1555TOUVROW_MSA ANY12S(ARGB1555ToUVRow_Any_MSA, ARGB1555ToUVRow_MSA, 0, 2, 15) #endif -#ifdef HAS_ARGB1555TOUVROW_MMI -ANY12S(ARGB1555ToUVRow_Any_MMI, ARGB1555ToUVRow_MMI, 0, 2, 15) +#ifdef HAS_ARGB1555TOUVROW_LSX +ANY12S(ARGB1555ToUVRow_Any_LSX, ARGB1555ToUVRow_LSX, 0, 2, 15) +#endif +#ifdef HAS_ARGB1555TOUVROW_LASX +ANY12S(ARGB1555ToUVRow_Any_LASX, ARGB1555ToUVRow_LASX, 0, 2, 31) #endif #ifdef HAS_ARGB4444TOUVROW_NEON ANY12S(ARGB4444ToUVRow_Any_NEON, ARGB4444ToUVRow_NEON, 0, 2, 15) #endif -#ifdef HAS_ARGB4444TOUVROW_MMI -ANY12S(ARGB4444ToUVRow_Any_MMI, ARGB4444ToUVRow_MMI, 0, 2, 15) -#endif #ifdef HAS_YUY2TOUVROW_NEON ANY12S(YUY2ToUVRow_Any_NEON, YUY2ToUVRow_NEON, 1, 4, 15) #endif @@ -1514,31 +2236,31 @@ ANY12S(UYVYToUVRow_Any_NEON, UYVYToUVRow_NEON, 1, 4, 15) #ifdef HAS_YUY2TOUVROW_MSA ANY12S(YUY2ToUVRow_Any_MSA, YUY2ToUVRow_MSA, 1, 4, 31) #endif -#ifdef HAS_YUY2TOUVROW_MMI -ANY12S(YUY2ToUVRow_Any_MMI, YUY2ToUVRow_MMI, 1, 4, 15) +#ifdef HAS_YUY2TOUVROW_LASX +ANY12S(YUY2ToUVRow_Any_LASX, YUY2ToUVRow_LASX, 1, 4, 31) #endif #ifdef HAS_UYVYTOUVROW_MSA ANY12S(UYVYToUVRow_Any_MSA, UYVYToUVRow_MSA, 1, 4, 31) #endif -#ifdef HAS_UYVYTOUVROW_MMI -ANY12S(UYVYToUVRow_Any_MMI, UYVYToUVRow_MMI, 1, 4, 15) +#ifdef HAS_UYVYTOUVROW_LASX +ANY12S(UYVYToUVRow_Any_LASX, UYVYToUVRow_LASX, 1, 4, 31) #endif #undef ANY12S // Any 1 to 1 with source stride (2 rows of source). Outputs UV plane. // 128 byte row allows for 32 avx ARGB pixels. #define ANY11S(NAMEANY, ANY_SIMD, UVSHIFT, BPP, MASK) \ - void NAMEANY(const uint8_t* src_ptr, int src_stride_ptr, uint8_t* dst_vu, \ + void NAMEANY(const uint8_t* src_ptr, int src_stride, uint8_t* dst_vu, \ int width) { \ SIMD_ALIGNED(uint8_t temp[128 * 3]); \ memset(temp, 0, 128 * 2); /* for msan */ \ int r = width & MASK; \ int n = width & ~MASK; \ if (n > 0) { \ - ANY_SIMD(src_ptr, src_stride_ptr, dst_vu, n); \ + ANY_SIMD(src_ptr, src_stride, dst_vu, n); \ } \ memcpy(temp, src_ptr + (n >> UVSHIFT) * BPP, SS(r, UVSHIFT) * BPP); \ - memcpy(temp + 128, src_ptr + src_stride_ptr + (n >> UVSHIFT) * BPP, \ + memcpy(temp + 128, src_ptr + src_stride + (n >> UVSHIFT) * BPP, \ SS(r, UVSHIFT) * BPP); \ if ((width & 1) && UVSHIFT == 0) { /* repeat last pixel for subsample */ \ memcpy(temp + SS(r, UVSHIFT) * BPP, temp + SS(r, UVSHIFT) * BPP - BPP, \ @@ -1556,6 +2278,86 @@ ANY11S(AYUVToVURow_Any_NEON, AYUVToVURow_NEON, 0, 4, 15) #endif #undef ANY11S +#define ANYDETILE(NAMEANY, ANY_SIMD, T, BPP, MASK) \ + void NAMEANY(const T* src, ptrdiff_t src_tile_stride, T* dst, int width) { \ + SIMD_ALIGNED(T temp[16 * 2]); \ + memset(temp, 0, 16 * BPP); /* for msan */ \ + int r = width & MASK; \ + int n = width & ~MASK; \ + if (n > 0) { \ + ANY_SIMD(src, src_tile_stride, dst, n); \ + } \ + memcpy(temp, src + (n / 16) * src_tile_stride, r * BPP); \ + ANY_SIMD(temp, src_tile_stride, temp + 16, MASK + 1); \ + memcpy(dst + n, temp + 16, r * BPP); \ + } + +#ifdef HAS_DETILEROW_NEON +ANYDETILE(DetileRow_Any_NEON, DetileRow_NEON, uint8_t, 1, 15) +#endif +#ifdef HAS_DETILEROW_SSE2 +ANYDETILE(DetileRow_Any_SSE2, DetileRow_SSE2, uint8_t, 1, 15) +#endif +#ifdef HAS_DETILEROW_16_NEON +ANYDETILE(DetileRow_16_Any_NEON, DetileRow_16_NEON, uint16_t, 2, 15) +#endif +#ifdef HAS_DETILEROW_16_SSE2 +ANYDETILE(DetileRow_16_Any_SSE2, DetileRow_16_SSE2, uint16_t, 2, 15) +#endif +#ifdef HAS_DETILEROW_16_AVX +ANYDETILE(DetileRow_16_Any_AVX, DetileRow_16_AVX, uint16_t, 2, 15) +#endif + +#define ANYDETILESPLITUV(NAMEANY, ANY_SIMD, MASK) \ + void NAMEANY(const uint8_t* src_uv, ptrdiff_t src_tile_stride, \ + uint8_t* dst_u, uint8_t* dst_v, int width) { \ + SIMD_ALIGNED(uint8_t temp[16 * 2]); \ + memset(temp, 0, 16 * 2); /* for msan */ \ + int r = width & MASK; \ + int n = width & ~MASK; \ + if (n > 0) { \ + ANY_SIMD(src_uv, src_tile_stride, dst_u, dst_v, n); \ + } \ + memcpy(temp, src_uv + (n / 16) * src_tile_stride, r); \ + ANY_SIMD(temp, src_tile_stride, temp + 16, temp + 24, r); \ + memcpy(dst_u + n / 2, temp + 16, (r + 1) / 2); \ + memcpy(dst_v + n / 2, temp + 24, (r + 1) / 2); \ + } + +#ifdef HAS_DETILESPLITUVROW_NEON +ANYDETILESPLITUV(DetileSplitUVRow_Any_NEON, DetileSplitUVRow_NEON, 15) +#endif +#ifdef HAS_DETILESPLITUVROW_SSSE3 +ANYDETILESPLITUV(DetileSplitUVRow_Any_SSSE3, DetileSplitUVRow_SSSE3, 15) +#endif + +#define ANYDETILEMERGE(NAMEANY, ANY_SIMD, MASK) \ + void NAMEANY(const uint8_t* src_y, ptrdiff_t src_y_tile_stride, \ + const uint8_t* src_uv, ptrdiff_t src_uv_tile_stride, \ + uint8_t* dst_yuy2, int width) { \ + SIMD_ALIGNED(uint8_t temp[16 * 4]); \ + memset(temp, 0, 16 * 4); /* for msan */ \ + int r = width & MASK; \ + int n = width & ~MASK; \ + if (n > 0) { \ + ANY_SIMD(src_y, src_y_tile_stride, src_uv, src_uv_tile_stride, dst_yuy2, \ + n); \ + } \ + memcpy(temp, src_y + (n / 16) * src_y_tile_stride, r); \ + memcpy(temp + 16, src_uv + (n / 16) * src_uv_tile_stride, r); \ + ANY_SIMD(temp, src_y_tile_stride, temp + 16, src_uv_tile_stride, \ + temp + 32, r); \ + memcpy(dst_yuy2 + 2 * n, temp + 32, 2 * r); \ + } + +#ifdef HAS_DETILETOYUY2_NEON +ANYDETILEMERGE(DetileToYUY2_Any_NEON, DetileToYUY2_NEON, 15) +#endif + +#ifdef HAS_DETILETOYUY2_SSE2 +ANYDETILEMERGE(DetileToYUY2_Any_SSE2, DetileToYUY2_SSE2, 15) +#endif + #ifdef __cplusplus } // extern "C" } // namespace libyuv diff --git a/TMessagesProj/jni/third_party/libyuv/source/row_common.cc b/TMessagesProj/jni/third_party/libyuv/source/row_common.cc index 79aed5c787..84afd35ba4 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/row_common.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/row_common.cc @@ -10,7 +10,7 @@ #include "libyuv/row.h" -#include +#include #include // For memcpy and memset. #include "libyuv/basic_types.h" @@ -21,17 +21,37 @@ namespace libyuv { extern "C" { #endif -// The following ifdef from row_win makes the C code match the row_win code, -// which is 7 bit fixed point. -#if !defined(LIBYUV_DISABLE_X86) && defined(_MSC_VER) && \ - (defined(_M_IX86) || (defined(_M_X64) && !defined(__clang__))) +#ifdef __cplusplus +#define STATIC_CAST(type, expr) static_cast(expr) +#else +#define STATIC_CAST(type, expr) (type)(expr) +#endif + +// This macro controls YUV to RGB using unsigned math to extend range of +// YUV to RGB coefficients to 0 to 4 instead of 0 to 2 for more accuracy on B: +// LIBYUV_UNLIMITED_DATA + +// Macros to enable unlimited data for each colorspace +// LIBYUV_UNLIMITED_BT601 +// LIBYUV_UNLIMITED_BT709 +// LIBYUV_UNLIMITED_BT2020 + +// The following macro from row_win makes the C code match the row_win code, +// which is 7 bit fixed point for ARGBToI420: +#if !defined(LIBYUV_BIT_EXACT) && !defined(LIBYUV_DISABLE_X86) && \ + defined(_MSC_VER) && !defined(__clang__) && \ + (defined(_M_IX86) || defined(_M_X64)) #define LIBYUV_RGB7 1 #endif -#if defined(__x86_64__) || defined(_M_X64) || defined(__i386__) || \ - defined(_M_IX86) +#if !defined(LIBYUV_BIT_EXACT) && (defined(__x86_64__) || defined(_M_X64) || \ + defined(__i386__) || defined(_M_IX86)) #define LIBYUV_ARGBTOUV_PAVGB 1 #define LIBYUV_RGBTOU_TRUNCATE 1 +#define LIBYUV_ATTENUATE_DUP 1 +#endif +#if defined(LIBYUV_BIT_EXACT) +#define LIBYUV_UNATTENUATE_DUP 1 #endif // llvm x86 is poor at ternary operator, so use branchless min/max. @@ -50,6 +70,11 @@ static __inline int32_t clamp1023(int32_t v) { return (-(v >= 1023) | v) & 1023; } +// clamp to max +static __inline int32_t ClampMax(int32_t v, int32_t max) { + return (-(v >= max) | v) & max; +} + static __inline uint32_t Abs(int32_t v) { int m = -(v < 0); return (v + m) ^ m; @@ -67,6 +92,10 @@ static __inline int32_t clamp1023(int32_t v) { return (v > 1023) ? 1023 : v; } +static __inline int32_t ClampMax(int32_t v, int32_t max) { + return (v > max) ? max : v; +} + static __inline uint32_t Abs(int32_t v) { return (v < 0) ? -v : v; } @@ -159,12 +188,13 @@ void RGB565ToARGBRow_C(const uint8_t* src_rgb565, int width) { int x; for (x = 0; x < width; ++x) { - uint8_t b = src_rgb565[0] & 0x1f; - uint8_t g = (src_rgb565[0] >> 5) | ((src_rgb565[1] & 0x07) << 3); - uint8_t r = src_rgb565[1] >> 3; - dst_argb[0] = (b << 3) | (b >> 2); - dst_argb[1] = (g << 2) | (g >> 4); - dst_argb[2] = (r << 3) | (r >> 2); + uint8_t b = STATIC_CAST(uint8_t, src_rgb565[0] & 0x1f); + uint8_t g = STATIC_CAST( + uint8_t, (src_rgb565[0] >> 5) | ((src_rgb565[1] & 0x07) << 3)); + uint8_t r = STATIC_CAST(uint8_t, src_rgb565[1] >> 3); + dst_argb[0] = STATIC_CAST(uint8_t, (b << 3) | (b >> 2)); + dst_argb[1] = STATIC_CAST(uint8_t, (g << 2) | (g >> 4)); + dst_argb[2] = STATIC_CAST(uint8_t, (r << 3) | (r >> 2)); dst_argb[3] = 255u; dst_argb += 4; src_rgb565 += 2; @@ -176,13 +206,14 @@ void ARGB1555ToARGBRow_C(const uint8_t* src_argb1555, int width) { int x; for (x = 0; x < width; ++x) { - uint8_t b = src_argb1555[0] & 0x1f; - uint8_t g = (src_argb1555[0] >> 5) | ((src_argb1555[1] & 0x03) << 3); - uint8_t r = (src_argb1555[1] & 0x7c) >> 2; - uint8_t a = src_argb1555[1] >> 7; - dst_argb[0] = (b << 3) | (b >> 2); - dst_argb[1] = (g << 3) | (g >> 2); - dst_argb[2] = (r << 3) | (r >> 2); + uint8_t b = STATIC_CAST(uint8_t, src_argb1555[0] & 0x1f); + uint8_t g = STATIC_CAST( + uint8_t, (src_argb1555[0] >> 5) | ((src_argb1555[1] & 0x03) << 3)); + uint8_t r = STATIC_CAST(uint8_t, (src_argb1555[1] & 0x7c) >> 2); + uint8_t a = STATIC_CAST(uint8_t, src_argb1555[1] >> 7); + dst_argb[0] = STATIC_CAST(uint8_t, (b << 3) | (b >> 2)); + dst_argb[1] = STATIC_CAST(uint8_t, (g << 3) | (g >> 2)); + dst_argb[2] = STATIC_CAST(uint8_t, (r << 3) | (r >> 2)); dst_argb[3] = -a; dst_argb += 4; src_argb1555 += 2; @@ -194,14 +225,14 @@ void ARGB4444ToARGBRow_C(const uint8_t* src_argb4444, int width) { int x; for (x = 0; x < width; ++x) { - uint8_t b = src_argb4444[0] & 0x0f; - uint8_t g = src_argb4444[0] >> 4; - uint8_t r = src_argb4444[1] & 0x0f; - uint8_t a = src_argb4444[1] >> 4; - dst_argb[0] = (b << 4) | b; - dst_argb[1] = (g << 4) | g; - dst_argb[2] = (r << 4) | r; - dst_argb[3] = (a << 4) | a; + uint8_t b = STATIC_CAST(uint8_t, src_argb4444[0] & 0x0f); + uint8_t g = STATIC_CAST(uint8_t, src_argb4444[0] >> 4); + uint8_t r = STATIC_CAST(uint8_t, src_argb4444[1] & 0x0f); + uint8_t a = STATIC_CAST(uint8_t, src_argb4444[1] >> 4); + dst_argb[0] = STATIC_CAST(uint8_t, (b << 4) | b); + dst_argb[1] = STATIC_CAST(uint8_t, (g << 4) | g); + dst_argb[2] = STATIC_CAST(uint8_t, (r << 4) | r); + dst_argb[3] = STATIC_CAST(uint8_t, (a << 4) | a); dst_argb += 4; src_argb4444 += 2; } @@ -297,7 +328,7 @@ void ARGBToRGB565Row_C(const uint8_t* src_argb, uint8_t* dst_rgb, int width) { uint8_t b0 = src_argb[0] >> 3; uint8_t g0 = src_argb[1] >> 2; uint8_t r0 = src_argb[2] >> 3; - *(uint16_t*)(dst_rgb) = b0 | (g0 << 5) | (r0 << 11); + *(uint16_t*)(dst_rgb) = STATIC_CAST(uint16_t, b0 | (g0 << 5) | (r0 << 11)); } } @@ -317,23 +348,25 @@ void ARGBToRGB565DitherRow_C(const uint8_t* src_argb, for (x = 0; x < width - 1; x += 2) { int dither0 = ((const unsigned char*)(&dither4))[x & 3]; int dither1 = ((const unsigned char*)(&dither4))[(x + 1) & 3]; - uint8_t b0 = clamp255(src_argb[0] + dither0) >> 3; - uint8_t g0 = clamp255(src_argb[1] + dither0) >> 2; - uint8_t r0 = clamp255(src_argb[2] + dither0) >> 3; - uint8_t b1 = clamp255(src_argb[4] + dither1) >> 3; - uint8_t g1 = clamp255(src_argb[5] + dither1) >> 2; - uint8_t r1 = clamp255(src_argb[6] + dither1) >> 3; - WRITEWORD(dst_rgb, b0 | (g0 << 5) | (r0 << 11) | (b1 << 16) | (g1 << 21) | - (r1 << 27)); + uint8_t b0 = STATIC_CAST(uint8_t, clamp255(src_argb[0] + dither0) >> 3); + uint8_t g0 = STATIC_CAST(uint8_t, clamp255(src_argb[1] + dither0) >> 2); + uint8_t r0 = STATIC_CAST(uint8_t, clamp255(src_argb[2] + dither0) >> 3); + uint8_t b1 = STATIC_CAST(uint8_t, clamp255(src_argb[4] + dither1) >> 3); + uint8_t g1 = STATIC_CAST(uint8_t, clamp255(src_argb[5] + dither1) >> 2); + uint8_t r1 = STATIC_CAST(uint8_t, clamp255(src_argb[6] + dither1) >> 3); + *(uint16_t*)(dst_rgb + 0) = + STATIC_CAST(uint16_t, b0 | (g0 << 5) | (r0 << 11)); + *(uint16_t*)(dst_rgb + 2) = + STATIC_CAST(uint16_t, b1 | (g1 << 5) | (r1 << 11)); dst_rgb += 4; src_argb += 8; } if (width & 1) { int dither0 = ((const unsigned char*)(&dither4))[(width - 1) & 3]; - uint8_t b0 = clamp255(src_argb[0] + dither0) >> 3; - uint8_t g0 = clamp255(src_argb[1] + dither0) >> 2; - uint8_t r0 = clamp255(src_argb[2] + dither0) >> 3; - *(uint16_t*)(dst_rgb) = b0 | (g0 << 5) | (r0 << 11); + uint8_t b0 = STATIC_CAST(uint8_t, clamp255(src_argb[0] + dither0) >> 3); + uint8_t g0 = STATIC_CAST(uint8_t, clamp255(src_argb[1] + dither0) >> 2); + uint8_t r0 = STATIC_CAST(uint8_t, clamp255(src_argb[2] + dither0) >> 3); + *(uint16_t*)(dst_rgb) = STATIC_CAST(uint16_t, b0 | (g0 << 5) | (r0 << 11)); } } @@ -348,8 +381,10 @@ void ARGBToARGB1555Row_C(const uint8_t* src_argb, uint8_t* dst_rgb, int width) { uint8_t g1 = src_argb[5] >> 3; uint8_t r1 = src_argb[6] >> 3; uint8_t a1 = src_argb[7] >> 7; - *(uint32_t*)(dst_rgb) = b0 | (g0 << 5) | (r0 << 10) | (a0 << 15) | - (b1 << 16) | (g1 << 21) | (r1 << 26) | (a1 << 31); + *(uint16_t*)(dst_rgb + 0) = + STATIC_CAST(uint16_t, b0 | (g0 << 5) | (r0 << 10) | (a0 << 15)); + *(uint16_t*)(dst_rgb + 2) = + STATIC_CAST(uint16_t, b1 | (g1 << 5) | (r1 << 10) | (a1 << 15)); dst_rgb += 4; src_argb += 8; } @@ -358,7 +393,8 @@ void ARGBToARGB1555Row_C(const uint8_t* src_argb, uint8_t* dst_rgb, int width) { uint8_t g0 = src_argb[1] >> 3; uint8_t r0 = src_argb[2] >> 3; uint8_t a0 = src_argb[3] >> 7; - *(uint16_t*)(dst_rgb) = b0 | (g0 << 5) | (r0 << 10) | (a0 << 15); + *(uint16_t*)(dst_rgb) = + STATIC_CAST(uint16_t, b0 | (g0 << 5) | (r0 << 10) | (a0 << 15)); } } @@ -373,8 +409,10 @@ void ARGBToARGB4444Row_C(const uint8_t* src_argb, uint8_t* dst_rgb, int width) { uint8_t g1 = src_argb[5] >> 4; uint8_t r1 = src_argb[6] >> 4; uint8_t a1 = src_argb[7] >> 4; - *(uint32_t*)(dst_rgb) = b0 | (g0 << 4) | (r0 << 8) | (a0 << 12) | - (b1 << 16) | (g1 << 20) | (r1 << 24) | (a1 << 28); + *(uint16_t*)(dst_rgb + 0) = + STATIC_CAST(uint16_t, b0 | (g0 << 4) | (r0 << 8) | (a0 << 12)); + *(uint16_t*)(dst_rgb + 2) = + STATIC_CAST(uint16_t, b1 | (g1 << 4) | (r1 << 8) | (a1 << 12)); dst_rgb += 4; src_argb += 8; } @@ -383,7 +421,8 @@ void ARGBToARGB4444Row_C(const uint8_t* src_argb, uint8_t* dst_rgb, int width) { uint8_t g0 = src_argb[1] >> 4; uint8_t r0 = src_argb[2] >> 4; uint8_t a0 = src_argb[3] >> 4; - *(uint16_t*)(dst_rgb) = b0 | (g0 << 4) | (r0 << 8) | (a0 << 12); + *(uint16_t*)(dst_rgb) = + STATIC_CAST(uint16_t, b0 | (g0 << 4) | (r0 << 8) | (a0 << 12)); } } @@ -394,7 +433,8 @@ void ABGRToAR30Row_C(const uint8_t* src_abgr, uint8_t* dst_ar30, int width) { uint32_t g0 = (src_abgr[1] >> 6) | ((uint32_t)(src_abgr[1]) << 2); uint32_t r0 = (src_abgr[2] >> 6) | ((uint32_t)(src_abgr[2]) << 2); uint32_t a0 = (src_abgr[3] >> 6); - *(uint32_t*)(dst_ar30) = r0 | (g0 << 10) | (b0 << 20) | (a0 << 30); + *(uint32_t*)(dst_ar30) = + STATIC_CAST(uint32_t, r0 | (g0 << 10) | (b0 << 20) | (a0 << 30)); dst_ar30 += 4; src_abgr += 4; } @@ -407,16 +447,109 @@ void ARGBToAR30Row_C(const uint8_t* src_argb, uint8_t* dst_ar30, int width) { uint32_t g0 = (src_argb[1] >> 6) | ((uint32_t)(src_argb[1]) << 2); uint32_t r0 = (src_argb[2] >> 6) | ((uint32_t)(src_argb[2]) << 2); uint32_t a0 = (src_argb[3] >> 6); - *(uint32_t*)(dst_ar30) = b0 | (g0 << 10) | (r0 << 20) | (a0 << 30); + *(uint32_t*)(dst_ar30) = + STATIC_CAST(uint32_t, b0 | (g0 << 10) | (r0 << 20) | (a0 << 30)); dst_ar30 += 4; src_argb += 4; } } +void ARGBToAR64Row_C(const uint8_t* src_argb, uint16_t* dst_ar64, int width) { + int x; + for (x = 0; x < width; ++x) { + uint16_t b = src_argb[0] * 0x0101; + uint16_t g = src_argb[1] * 0x0101; + uint16_t r = src_argb[2] * 0x0101; + uint16_t a = src_argb[3] * 0x0101; + dst_ar64[0] = b; + dst_ar64[1] = g; + dst_ar64[2] = r; + dst_ar64[3] = a; + dst_ar64 += 4; + src_argb += 4; + } +} + +void ARGBToAB64Row_C(const uint8_t* src_argb, uint16_t* dst_ab64, int width) { + int x; + for (x = 0; x < width; ++x) { + uint16_t b = src_argb[0] * 0x0101; + uint16_t g = src_argb[1] * 0x0101; + uint16_t r = src_argb[2] * 0x0101; + uint16_t a = src_argb[3] * 0x0101; + dst_ab64[0] = r; + dst_ab64[1] = g; + dst_ab64[2] = b; + dst_ab64[3] = a; + dst_ab64 += 4; + src_argb += 4; + } +} + +void AR64ToARGBRow_C(const uint16_t* src_ar64, uint8_t* dst_argb, int width) { + int x; + for (x = 0; x < width; ++x) { + uint8_t b = src_ar64[0] >> 8; + uint8_t g = src_ar64[1] >> 8; + uint8_t r = src_ar64[2] >> 8; + uint8_t a = src_ar64[3] >> 8; + dst_argb[0] = b; + dst_argb[1] = g; + dst_argb[2] = r; + dst_argb[3] = a; + dst_argb += 4; + src_ar64 += 4; + } +} + +void AB64ToARGBRow_C(const uint16_t* src_ab64, uint8_t* dst_argb, int width) { + int x; + for (x = 0; x < width; ++x) { + uint8_t r = src_ab64[0] >> 8; + uint8_t g = src_ab64[1] >> 8; + uint8_t b = src_ab64[2] >> 8; + uint8_t a = src_ab64[3] >> 8; + dst_argb[0] = b; + dst_argb[1] = g; + dst_argb[2] = r; + dst_argb[3] = a; + dst_argb += 4; + src_ab64 += 4; + } +} + +// TODO(fbarchard): Make shuffle compatible with SIMD versions +void AR64ShuffleRow_C(const uint8_t* src_ar64, + uint8_t* dst_ar64, + const uint8_t* shuffler, + int width) { + const uint16_t* src_ar64_16 = (const uint16_t*)src_ar64; + uint16_t* dst_ar64_16 = (uint16_t*)dst_ar64; + int index0 = shuffler[0] / 2; + int index1 = shuffler[2] / 2; + int index2 = shuffler[4] / 2; + int index3 = shuffler[6] / 2; + // Shuffle a row of AR64. + int x; + for (x = 0; x < width / 2; ++x) { + // To support in-place conversion. + uint16_t b = src_ar64_16[index0]; + uint16_t g = src_ar64_16[index1]; + uint16_t r = src_ar64_16[index2]; + uint16_t a = src_ar64_16[index3]; + dst_ar64_16[0] = b; + dst_ar64_16[1] = g; + dst_ar64_16[2] = r; + dst_ar64_16[3] = a; + src_ar64_16 += 4; + dst_ar64_16 += 4; + } +} + #ifdef LIBYUV_RGB7 // Old 7 bit math for compatibility on unsupported platforms. -static __inline int RGBToY(uint8_t r, uint8_t g, uint8_t b) { - return ((33 * r + 65 * g + 13 * b) >> 7) + 16; +static __inline uint8_t RGBToY(uint8_t r, uint8_t g, uint8_t b) { + return STATIC_CAST(uint8_t, ((33 * r + 65 * g + 13 * b) >> 7) + 16); } #else // 8 bit @@ -425,117 +558,120 @@ static __inline int RGBToY(uint8_t r, uint8_t g, uint8_t b) { // return (66 * ((int)r - 128) + 129 * ((int)g - 128) + 25 * ((int)b - 128) + // 0x7e80) >> 8; -static __inline int RGBToY(uint8_t r, uint8_t g, uint8_t b) { - return (66 * r + 129 * g + 25 * b + 0x1080) >> 8; +static __inline uint8_t RGBToY(uint8_t r, uint8_t g, uint8_t b) { + return STATIC_CAST(uint8_t, (66 * r + 129 * g + 25 * b + 0x1080) >> 8); } #endif #define AVGB(a, b) (((a) + (b) + 1) >> 1) +// LIBYUV_RGBTOU_TRUNCATE mimics x86 code that does not round. #ifdef LIBYUV_RGBTOU_TRUNCATE -static __inline int RGBToU(uint8_t r, uint8_t g, uint8_t b) { - return (112 * b - 74 * g - 38 * r + 0x8000) >> 8; +static __inline uint8_t RGBToU(uint8_t r, uint8_t g, uint8_t b) { + return STATIC_CAST(uint8_t, (112 * b - 74 * g - 38 * r + 0x8000) >> 8); } -static __inline int RGBToV(uint8_t r, uint8_t g, uint8_t b) { - return (112 * r - 94 * g - 18 * b + 0x8000) >> 8; +static __inline uint8_t RGBToV(uint8_t r, uint8_t g, uint8_t b) { + return STATIC_CAST(uint8_t, (112 * r - 94 * g - 18 * b + 0x8000) >> 8); } #else -// TODO(fbarchard): Add rounding to SIMD and use this -static __inline int RGBToU(uint8_t r, uint8_t g, uint8_t b) { - return (112 * b - 74 * g - 38 * r + 0x8080) >> 8; +// TODO(fbarchard): Add rounding to x86 SIMD and use this +static __inline uint8_t RGBToU(uint8_t r, uint8_t g, uint8_t b) { + return STATIC_CAST(uint8_t, (112 * b - 74 * g - 38 * r + 0x8080) >> 8); } -static __inline int RGBToV(uint8_t r, uint8_t g, uint8_t b) { - return (112 * r - 94 * g - 18 * b + 0x8080) >> 8; +static __inline uint8_t RGBToV(uint8_t r, uint8_t g, uint8_t b) { + return STATIC_CAST(uint8_t, (112 * r - 94 * g - 18 * b + 0x8080) >> 8); } #endif +// LIBYUV_ARGBTOUV_PAVGB mimics x86 code that subsamples with 2 pavgb. #if !defined(LIBYUV_ARGBTOUV_PAVGB) static __inline int RGB2xToU(uint16_t r, uint16_t g, uint16_t b) { - return ((112 / 2) * b - (74 / 2) * g - (38 / 2) * r + 0x8080) >> 8; + return STATIC_CAST( + uint8_t, ((112 / 2) * b - (74 / 2) * g - (38 / 2) * r + 0x8080) >> 8); } static __inline int RGB2xToV(uint16_t r, uint16_t g, uint16_t b) { - return ((112 / 2) * r - (94 / 2) * g - (18 / 2) * b + 0x8080) >> 8; + return STATIC_CAST( + uint8_t, ((112 / 2) * r - (94 / 2) * g - (18 / 2) * b + 0x8080) >> 8); } #endif // ARGBToY_C and ARGBToUV_C // Intel version mimic SSE/AVX which does 2 pavgb #if LIBYUV_ARGBTOUV_PAVGB - -#define MAKEROWY(NAME, R, G, B, BPP) \ - void NAME##ToYRow_C(const uint8_t* src_argb0, uint8_t* dst_y, int width) { \ - int x; \ - for (x = 0; x < width; ++x) { \ - dst_y[0] = RGBToY(src_argb0[R], src_argb0[G], src_argb0[B]); \ - src_argb0 += BPP; \ - dst_y += 1; \ - } \ - } \ - void NAME##ToUVRow_C(const uint8_t* src_rgb0, int src_stride_rgb, \ - uint8_t* dst_u, uint8_t* dst_v, int width) { \ - const uint8_t* src_rgb1 = src_rgb0 + src_stride_rgb; \ - int x; \ - for (x = 0; x < width - 1; x += 2) { \ - uint8_t ab = AVGB(AVGB(src_rgb0[B], src_rgb1[B]), \ - AVGB(src_rgb0[B + BPP], src_rgb1[B + BPP])); \ - uint8_t ag = AVGB(AVGB(src_rgb0[G], src_rgb1[G]), \ - AVGB(src_rgb0[G + BPP], src_rgb1[G + BPP])); \ - uint8_t ar = AVGB(AVGB(src_rgb0[R], src_rgb1[R]), \ - AVGB(src_rgb0[R + BPP], src_rgb1[R + BPP])); \ - dst_u[0] = RGBToU(ar, ag, ab); \ - dst_v[0] = RGBToV(ar, ag, ab); \ - src_rgb0 += BPP * 2; \ - src_rgb1 += BPP * 2; \ - dst_u += 1; \ - dst_v += 1; \ - } \ - if (width & 1) { \ - uint8_t ab = AVGB(src_rgb0[B], src_rgb1[B]); \ - uint8_t ag = AVGB(src_rgb0[G], src_rgb1[G]); \ - uint8_t ar = AVGB(src_rgb0[R], src_rgb1[R]); \ - dst_u[0] = RGBToU(ar, ag, ab); \ - dst_v[0] = RGBToV(ar, ag, ab); \ - } \ +#define MAKEROWY(NAME, R, G, B, BPP) \ + void NAME##ToYRow_C(const uint8_t* src_rgb, uint8_t* dst_y, int width) { \ + int x; \ + for (x = 0; x < width; ++x) { \ + dst_y[0] = RGBToY(src_rgb[R], src_rgb[G], src_rgb[B]); \ + src_rgb += BPP; \ + dst_y += 1; \ + } \ + } \ + void NAME##ToUVRow_C(const uint8_t* src_rgb, int src_stride_rgb, \ + uint8_t* dst_u, uint8_t* dst_v, int width) { \ + const uint8_t* src_rgb1 = src_rgb + src_stride_rgb; \ + int x; \ + for (x = 0; x < width - 1; x += 2) { \ + uint8_t ab = AVGB(AVGB(src_rgb[B], src_rgb1[B]), \ + AVGB(src_rgb[B + BPP], src_rgb1[B + BPP])); \ + uint8_t ag = AVGB(AVGB(src_rgb[G], src_rgb1[G]), \ + AVGB(src_rgb[G + BPP], src_rgb1[G + BPP])); \ + uint8_t ar = AVGB(AVGB(src_rgb[R], src_rgb1[R]), \ + AVGB(src_rgb[R + BPP], src_rgb1[R + BPP])); \ + dst_u[0] = RGBToU(ar, ag, ab); \ + dst_v[0] = RGBToV(ar, ag, ab); \ + src_rgb += BPP * 2; \ + src_rgb1 += BPP * 2; \ + dst_u += 1; \ + dst_v += 1; \ + } \ + if (width & 1) { \ + uint8_t ab = AVGB(src_rgb[B], src_rgb1[B]); \ + uint8_t ag = AVGB(src_rgb[G], src_rgb1[G]); \ + uint8_t ar = AVGB(src_rgb[R], src_rgb1[R]); \ + dst_u[0] = RGBToU(ar, ag, ab); \ + dst_v[0] = RGBToV(ar, ag, ab); \ + } \ } #else // ARM version does sum / 2 then multiply by 2x smaller coefficients -#define MAKEROWY(NAME, R, G, B, BPP) \ - void NAME##ToYRow_C(const uint8_t* src_argb0, uint8_t* dst_y, int width) { \ - int x; \ - for (x = 0; x < width; ++x) { \ - dst_y[0] = RGBToY(src_argb0[R], src_argb0[G], src_argb0[B]); \ - src_argb0 += BPP; \ - dst_y += 1; \ - } \ - } \ - void NAME##ToUVRow_C(const uint8_t* src_rgb0, int src_stride_rgb, \ - uint8_t* dst_u, uint8_t* dst_v, int width) { \ - const uint8_t* src_rgb1 = src_rgb0 + src_stride_rgb; \ - int x; \ - for (x = 0; x < width - 1; x += 2) { \ - uint16_t ab = (src_rgb0[B] + src_rgb0[B + BPP] + src_rgb1[B] + \ - src_rgb1[B + BPP] + 1) >> \ - 1; \ - uint16_t ag = (src_rgb0[G] + src_rgb0[G + BPP] + src_rgb1[G] + \ - src_rgb1[G + BPP] + 1) >> \ - 1; \ - uint16_t ar = (src_rgb0[R] + src_rgb0[R + BPP] + src_rgb1[R] + \ - src_rgb1[R + BPP] + 1) >> \ - 1; \ - dst_u[0] = RGB2xToU(ar, ag, ab); \ - dst_v[0] = RGB2xToV(ar, ag, ab); \ - src_rgb0 += BPP * 2; \ - src_rgb1 += BPP * 2; \ - dst_u += 1; \ - dst_v += 1; \ - } \ - if (width & 1) { \ - uint16_t ab = src_rgb0[B] + src_rgb1[B]; \ - uint16_t ag = src_rgb0[G] + src_rgb1[G]; \ - uint16_t ar = src_rgb0[R] + src_rgb1[R]; \ - dst_u[0] = RGB2xToU(ar, ag, ab); \ - dst_v[0] = RGB2xToV(ar, ag, ab); \ - } \ +#define MAKEROWY(NAME, R, G, B, BPP) \ + void NAME##ToYRow_C(const uint8_t* src_rgb, uint8_t* dst_y, int width) { \ + int x; \ + for (x = 0; x < width; ++x) { \ + dst_y[0] = RGBToY(src_rgb[R], src_rgb[G], src_rgb[B]); \ + src_rgb += BPP; \ + dst_y += 1; \ + } \ + } \ + void NAME##ToUVRow_C(const uint8_t* src_rgb, int src_stride_rgb, \ + uint8_t* dst_u, uint8_t* dst_v, int width) { \ + const uint8_t* src_rgb1 = src_rgb + src_stride_rgb; \ + int x; \ + for (x = 0; x < width - 1; x += 2) { \ + uint16_t ab = (src_rgb[B] + src_rgb[B + BPP] + src_rgb1[B] + \ + src_rgb1[B + BPP] + 1) >> \ + 1; \ + uint16_t ag = (src_rgb[G] + src_rgb[G + BPP] + src_rgb1[G] + \ + src_rgb1[G + BPP] + 1) >> \ + 1; \ + uint16_t ar = (src_rgb[R] + src_rgb[R + BPP] + src_rgb1[R] + \ + src_rgb1[R + BPP] + 1) >> \ + 1; \ + dst_u[0] = RGB2xToU(ar, ag, ab); \ + dst_v[0] = RGB2xToV(ar, ag, ab); \ + src_rgb += BPP * 2; \ + src_rgb1 += BPP * 2; \ + dst_u += 1; \ + dst_v += 1; \ + } \ + if (width & 1) { \ + uint16_t ab = src_rgb[B] + src_rgb1[B]; \ + uint16_t ag = src_rgb[G] + src_rgb1[G]; \ + uint16_t ar = src_rgb[R] + src_rgb1[R]; \ + dst_u[0] = RGB2xToU(ar, ag, ab); \ + dst_v[0] = RGB2xToV(ar, ag, ab); \ + } \ } #endif @@ -574,28 +710,28 @@ MAKEROWY(RAW, 0, 1, 2, 3) #ifdef LIBYUV_RGB7 // Old 7 bit math for compatibility on unsupported platforms. -static __inline int RGBToYJ(uint8_t r, uint8_t g, uint8_t b) { +static __inline uint8_t RGBToYJ(uint8_t r, uint8_t g, uint8_t b) { return (38 * r + 75 * g + 15 * b + 64) >> 7; } #else // 8 bit -static __inline int RGBToYJ(uint8_t r, uint8_t g, uint8_t b) { +static __inline uint8_t RGBToYJ(uint8_t r, uint8_t g, uint8_t b) { return (77 * r + 150 * g + 29 * b + 128) >> 8; } #endif #if defined(LIBYUV_ARGBTOUV_PAVGB) -static __inline int RGBToUJ(uint8_t r, uint8_t g, uint8_t b) { +static __inline uint8_t RGBToUJ(uint8_t r, uint8_t g, uint8_t b) { return (127 * b - 84 * g - 43 * r + 0x8080) >> 8; } -static __inline int RGBToVJ(uint8_t r, uint8_t g, uint8_t b) { +static __inline uint8_t RGBToVJ(uint8_t r, uint8_t g, uint8_t b) { return (127 * r - 107 * g - 20 * b + 0x8080) >> 8; } #else -static __inline int RGB2xToUJ(uint16_t r, uint16_t g, uint16_t b) { +static __inline uint8_t RGB2xToUJ(uint16_t r, uint16_t g, uint16_t b) { return ((127 / 2) * b - (84 / 2) * g - (43 / 2) * r + 0x8080) >> 8; } -static __inline int RGB2xToVJ(uint16_t r, uint16_t g, uint16_t b) { +static __inline uint8_t RGB2xToVJ(uint16_t r, uint16_t g, uint16_t b) { return ((127 / 2) * r - (107 / 2) * g - (20 / 2) * b + 0x8080) >> 8; } #endif @@ -603,85 +739,86 @@ static __inline int RGB2xToVJ(uint16_t r, uint16_t g, uint16_t b) { // ARGBToYJ_C and ARGBToUVJ_C // Intel version mimic SSE/AVX which does 2 pavgb #if LIBYUV_ARGBTOUV_PAVGB -#define MAKEROWYJ(NAME, R, G, B, BPP) \ - void NAME##ToYJRow_C(const uint8_t* src_argb0, uint8_t* dst_y, int width) { \ - int x; \ - for (x = 0; x < width; ++x) { \ - dst_y[0] = RGBToYJ(src_argb0[R], src_argb0[G], src_argb0[B]); \ - src_argb0 += BPP; \ - dst_y += 1; \ - } \ - } \ - void NAME##ToUVJRow_C(const uint8_t* src_rgb0, int src_stride_rgb, \ - uint8_t* dst_u, uint8_t* dst_v, int width) { \ - const uint8_t* src_rgb1 = src_rgb0 + src_stride_rgb; \ - int x; \ - for (x = 0; x < width - 1; x += 2) { \ - uint8_t ab = AVGB(AVGB(src_rgb0[B], src_rgb1[B]), \ - AVGB(src_rgb0[B + BPP], src_rgb1[B + BPP])); \ - uint8_t ag = AVGB(AVGB(src_rgb0[G], src_rgb1[G]), \ - AVGB(src_rgb0[G + BPP], src_rgb1[G + BPP])); \ - uint8_t ar = AVGB(AVGB(src_rgb0[R], src_rgb1[R]), \ - AVGB(src_rgb0[R + BPP], src_rgb1[R + BPP])); \ - dst_u[0] = RGBToUJ(ar, ag, ab); \ - dst_v[0] = RGBToVJ(ar, ag, ab); \ - src_rgb0 += BPP * 2; \ - src_rgb1 += BPP * 2; \ - dst_u += 1; \ - dst_v += 1; \ - } \ - if (width & 1) { \ - uint8_t ab = AVGB(src_rgb0[B], src_rgb1[B]); \ - uint8_t ag = AVGB(src_rgb0[G], src_rgb1[G]); \ - uint8_t ar = AVGB(src_rgb0[R], src_rgb1[R]); \ - dst_u[0] = RGBToUJ(ar, ag, ab); \ - dst_v[0] = RGBToVJ(ar, ag, ab); \ - } \ +#define MAKEROWYJ(NAME, R, G, B, BPP) \ + void NAME##ToYJRow_C(const uint8_t* src_rgb, uint8_t* dst_y, int width) { \ + int x; \ + for (x = 0; x < width; ++x) { \ + dst_y[0] = RGBToYJ(src_rgb[R], src_rgb[G], src_rgb[B]); \ + src_rgb += BPP; \ + dst_y += 1; \ + } \ + } \ + void NAME##ToUVJRow_C(const uint8_t* src_rgb, int src_stride_rgb, \ + uint8_t* dst_u, uint8_t* dst_v, int width) { \ + const uint8_t* src_rgb1 = src_rgb + src_stride_rgb; \ + int x; \ + for (x = 0; x < width - 1; x += 2) { \ + uint8_t ab = AVGB(AVGB(src_rgb[B], src_rgb1[B]), \ + AVGB(src_rgb[B + BPP], src_rgb1[B + BPP])); \ + uint8_t ag = AVGB(AVGB(src_rgb[G], src_rgb1[G]), \ + AVGB(src_rgb[G + BPP], src_rgb1[G + BPP])); \ + uint8_t ar = AVGB(AVGB(src_rgb[R], src_rgb1[R]), \ + AVGB(src_rgb[R + BPP], src_rgb1[R + BPP])); \ + dst_u[0] = RGBToUJ(ar, ag, ab); \ + dst_v[0] = RGBToVJ(ar, ag, ab); \ + src_rgb += BPP * 2; \ + src_rgb1 += BPP * 2; \ + dst_u += 1; \ + dst_v += 1; \ + } \ + if (width & 1) { \ + uint8_t ab = AVGB(src_rgb[B], src_rgb1[B]); \ + uint8_t ag = AVGB(src_rgb[G], src_rgb1[G]); \ + uint8_t ar = AVGB(src_rgb[R], src_rgb1[R]); \ + dst_u[0] = RGBToUJ(ar, ag, ab); \ + dst_v[0] = RGBToVJ(ar, ag, ab); \ + } \ } #else // ARM version does sum / 2 then multiply by 2x smaller coefficients -#define MAKEROWYJ(NAME, R, G, B, BPP) \ - void NAME##ToYJRow_C(const uint8_t* src_argb0, uint8_t* dst_y, int width) { \ - int x; \ - for (x = 0; x < width; ++x) { \ - dst_y[0] = RGBToYJ(src_argb0[R], src_argb0[G], src_argb0[B]); \ - src_argb0 += BPP; \ - dst_y += 1; \ - } \ - } \ - void NAME##ToUVJRow_C(const uint8_t* src_rgb0, int src_stride_rgb, \ - uint8_t* dst_u, uint8_t* dst_v, int width) { \ - const uint8_t* src_rgb1 = src_rgb0 + src_stride_rgb; \ - int x; \ - for (x = 0; x < width - 1; x += 2) { \ - uint16_t ab = (src_rgb0[B] + src_rgb0[B + BPP] + src_rgb1[B] + \ - src_rgb1[B + BPP] + 1) >> \ - 1; \ - uint16_t ag = (src_rgb0[G] + src_rgb0[G + BPP] + src_rgb1[G] + \ - src_rgb1[G + BPP] + 1) >> \ - 1; \ - uint16_t ar = (src_rgb0[R] + src_rgb0[R + BPP] + src_rgb1[R] + \ - src_rgb1[R + BPP] + 1) >> \ - 1; \ - dst_u[0] = RGB2xToUJ(ar, ag, ab); \ - dst_v[0] = RGB2xToVJ(ar, ag, ab); \ - src_rgb0 += BPP * 2; \ - src_rgb1 += BPP * 2; \ - dst_u += 1; \ - dst_v += 1; \ - } \ - if (width & 1) { \ - uint16_t ab = (src_rgb0[B] + src_rgb1[B]); \ - uint16_t ag = (src_rgb0[G] + src_rgb1[G]); \ - uint16_t ar = (src_rgb0[R] + src_rgb1[R]); \ - dst_u[0] = RGB2xToUJ(ar, ag, ab); \ - dst_v[0] = RGB2xToVJ(ar, ag, ab); \ - } \ +#define MAKEROWYJ(NAME, R, G, B, BPP) \ + void NAME##ToYJRow_C(const uint8_t* src_rgb, uint8_t* dst_y, int width) { \ + int x; \ + for (x = 0; x < width; ++x) { \ + dst_y[0] = RGBToYJ(src_rgb[R], src_rgb[G], src_rgb[B]); \ + src_rgb += BPP; \ + dst_y += 1; \ + } \ + } \ + void NAME##ToUVJRow_C(const uint8_t* src_rgb, int src_stride_rgb, \ + uint8_t* dst_u, uint8_t* dst_v, int width) { \ + const uint8_t* src_rgb1 = src_rgb + src_stride_rgb; \ + int x; \ + for (x = 0; x < width - 1; x += 2) { \ + uint16_t ab = (src_rgb[B] + src_rgb[B + BPP] + src_rgb1[B] + \ + src_rgb1[B + BPP] + 1) >> \ + 1; \ + uint16_t ag = (src_rgb[G] + src_rgb[G + BPP] + src_rgb1[G] + \ + src_rgb1[G + BPP] + 1) >> \ + 1; \ + uint16_t ar = (src_rgb[R] + src_rgb[R + BPP] + src_rgb1[R] + \ + src_rgb1[R + BPP] + 1) >> \ + 1; \ + dst_u[0] = RGB2xToUJ(ar, ag, ab); \ + dst_v[0] = RGB2xToVJ(ar, ag, ab); \ + src_rgb += BPP * 2; \ + src_rgb1 += BPP * 2; \ + dst_u += 1; \ + dst_v += 1; \ + } \ + if (width & 1) { \ + uint16_t ab = (src_rgb[B] + src_rgb1[B]); \ + uint16_t ag = (src_rgb[G] + src_rgb1[G]); \ + uint16_t ar = (src_rgb[R] + src_rgb1[R]); \ + dst_u[0] = RGB2xToUJ(ar, ag, ab); \ + dst_v[0] = RGB2xToVJ(ar, ag, ab); \ + } \ } #endif MAKEROWYJ(ARGB, 2, 1, 0, 4) +MAKEROWYJ(ABGR, 0, 1, 2, 4) MAKEROWYJ(RGBA, 3, 2, 1, 4) MAKEROWYJ(RGB24, 2, 1, 0, 3) MAKEROWYJ(RAW, 0, 1, 2, 3) @@ -691,11 +828,12 @@ void RGB565ToYRow_C(const uint8_t* src_rgb565, uint8_t* dst_y, int width) { int x; for (x = 0; x < width; ++x) { uint8_t b = src_rgb565[0] & 0x1f; - uint8_t g = (src_rgb565[0] >> 5) | ((src_rgb565[1] & 0x07) << 3); + uint8_t g = STATIC_CAST( + uint8_t, (src_rgb565[0] >> 5) | ((src_rgb565[1] & 0x07) << 3)); uint8_t r = src_rgb565[1] >> 3; - b = (b << 3) | (b >> 2); - g = (g << 2) | (g >> 4); - r = (r << 3) | (r >> 2); + b = STATIC_CAST(uint8_t, (b << 3) | (b >> 2)); + g = STATIC_CAST(uint8_t, (g << 2) | (g >> 4)); + r = STATIC_CAST(uint8_t, (r << 3) | (r >> 2)); dst_y[0] = RGBToY(r, g, b); src_rgb565 += 2; dst_y += 1; @@ -706,11 +844,12 @@ void ARGB1555ToYRow_C(const uint8_t* src_argb1555, uint8_t* dst_y, int width) { int x; for (x = 0; x < width; ++x) { uint8_t b = src_argb1555[0] & 0x1f; - uint8_t g = (src_argb1555[0] >> 5) | ((src_argb1555[1] & 0x03) << 3); + uint8_t g = STATIC_CAST( + uint8_t, (src_argb1555[0] >> 5) | ((src_argb1555[1] & 0x03) << 3)); uint8_t r = (src_argb1555[1] & 0x7c) >> 2; - b = (b << 3) | (b >> 2); - g = (g << 3) | (g >> 2); - r = (r << 3) | (r >> 2); + b = STATIC_CAST(uint8_t, (b << 3) | (b >> 2)); + g = STATIC_CAST(uint8_t, (g << 3) | (g >> 2)); + r = STATIC_CAST(uint8_t, (r << 3) | (r >> 2)); dst_y[0] = RGBToY(r, g, b); src_argb1555 += 2; dst_y += 1; @@ -723,9 +862,9 @@ void ARGB4444ToYRow_C(const uint8_t* src_argb4444, uint8_t* dst_y, int width) { uint8_t b = src_argb4444[0] & 0x0f; uint8_t g = src_argb4444[0] >> 4; uint8_t r = src_argb4444[1] & 0x0f; - b = (b << 4) | b; - g = (g << 4) | g; - r = (r << 4) | r; + b = STATIC_CAST(uint8_t, (b << 4) | b); + g = STATIC_CAST(uint8_t, (g << 4) | g); + r = STATIC_CAST(uint8_t, (r << 4) | r); dst_y[0] = RGBToY(r, g, b); src_argb4444 += 2; dst_y += 1; @@ -740,31 +879,35 @@ void RGB565ToUVRow_C(const uint8_t* src_rgb565, const uint8_t* next_rgb565 = src_rgb565 + src_stride_rgb565; int x; for (x = 0; x < width - 1; x += 2) { - uint8_t b0 = src_rgb565[0] & 0x1f; - uint8_t g0 = (src_rgb565[0] >> 5) | ((src_rgb565[1] & 0x07) << 3); - uint8_t r0 = src_rgb565[1] >> 3; - uint8_t b1 = src_rgb565[2] & 0x1f; - uint8_t g1 = (src_rgb565[2] >> 5) | ((src_rgb565[3] & 0x07) << 3); - uint8_t r1 = src_rgb565[3] >> 3; - uint8_t b2 = next_rgb565[0] & 0x1f; - uint8_t g2 = (next_rgb565[0] >> 5) | ((next_rgb565[1] & 0x07) << 3); - uint8_t r2 = next_rgb565[1] >> 3; - uint8_t b3 = next_rgb565[2] & 0x1f; - uint8_t g3 = (next_rgb565[2] >> 5) | ((next_rgb565[3] & 0x07) << 3); - uint8_t r3 = next_rgb565[3] >> 3; - - b0 = (b0 << 3) | (b0 >> 2); - g0 = (g0 << 2) | (g0 >> 4); - r0 = (r0 << 3) | (r0 >> 2); - b1 = (b1 << 3) | (b1 >> 2); - g1 = (g1 << 2) | (g1 >> 4); - r1 = (r1 << 3) | (r1 >> 2); - b2 = (b2 << 3) | (b2 >> 2); - g2 = (g2 << 2) | (g2 >> 4); - r2 = (r2 << 3) | (r2 >> 2); - b3 = (b3 << 3) | (b3 >> 2); - g3 = (g3 << 2) | (g3 >> 4); - r3 = (r3 << 3) | (r3 >> 2); + uint8_t b0 = STATIC_CAST(uint8_t, src_rgb565[0] & 0x1f); + uint8_t g0 = STATIC_CAST( + uint8_t, (src_rgb565[0] >> 5) | ((src_rgb565[1] & 0x07) << 3)); + uint8_t r0 = STATIC_CAST(uint8_t, src_rgb565[1] >> 3); + uint8_t b1 = STATIC_CAST(uint8_t, src_rgb565[2] & 0x1f); + uint8_t g1 = STATIC_CAST( + uint8_t, (src_rgb565[2] >> 5) | ((src_rgb565[3] & 0x07) << 3)); + uint8_t r1 = STATIC_CAST(uint8_t, src_rgb565[3] >> 3); + uint8_t b2 = STATIC_CAST(uint8_t, next_rgb565[0] & 0x1f); + uint8_t g2 = STATIC_CAST( + uint8_t, (next_rgb565[0] >> 5) | ((next_rgb565[1] & 0x07) << 3)); + uint8_t r2 = STATIC_CAST(uint8_t, next_rgb565[1] >> 3); + uint8_t b3 = STATIC_CAST(uint8_t, next_rgb565[2] & 0x1f); + uint8_t g3 = STATIC_CAST( + uint8_t, (next_rgb565[2] >> 5) | ((next_rgb565[3] & 0x07) << 3)); + uint8_t r3 = STATIC_CAST(uint8_t, next_rgb565[3] >> 3); + + b0 = STATIC_CAST(uint8_t, (b0 << 3) | (b0 >> 2)); + g0 = STATIC_CAST(uint8_t, (g0 << 2) | (g0 >> 4)); + r0 = STATIC_CAST(uint8_t, (r0 << 3) | (r0 >> 2)); + b1 = STATIC_CAST(uint8_t, (b1 << 3) | (b1 >> 2)); + g1 = STATIC_CAST(uint8_t, (g1 << 2) | (g1 >> 4)); + r1 = STATIC_CAST(uint8_t, (r1 << 3) | (r1 >> 2)); + b2 = STATIC_CAST(uint8_t, (b2 << 3) | (b2 >> 2)); + g2 = STATIC_CAST(uint8_t, (g2 << 2) | (g2 >> 4)); + r2 = STATIC_CAST(uint8_t, (r2 << 3) | (r2 >> 2)); + b3 = STATIC_CAST(uint8_t, (b3 << 3) | (b3 >> 2)); + g3 = STATIC_CAST(uint8_t, (g3 << 2) | (g3 >> 4)); + r3 = STATIC_CAST(uint8_t, (r3 << 3) | (r3 >> 2)); #if LIBYUV_ARGBTOUV_PAVGB uint8_t ab = AVGB(AVGB(b0, b2), AVGB(b1, b3)); @@ -786,19 +929,20 @@ void RGB565ToUVRow_C(const uint8_t* src_rgb565, dst_v += 1; } if (width & 1) { - uint8_t b0 = src_rgb565[0] & 0x1f; - uint8_t g0 = (src_rgb565[0] >> 5) | ((src_rgb565[1] & 0x07) << 3); - uint8_t r0 = src_rgb565[1] >> 3; - uint8_t b2 = next_rgb565[0] & 0x1f; - uint8_t g2 = (next_rgb565[0] >> 5) | ((next_rgb565[1] & 0x07) << 3); - uint8_t r2 = next_rgb565[1] >> 3; - - b0 = (b0 << 3) | (b0 >> 2); - g0 = (g0 << 2) | (g0 >> 4); - r0 = (r0 << 3) | (r0 >> 2); - b2 = (b2 << 3) | (b2 >> 2); - g2 = (g2 << 2) | (g2 >> 4); - r2 = (r2 << 3) | (r2 >> 2); + uint8_t b0 = STATIC_CAST(uint8_t, src_rgb565[0] & 0x1f); + uint8_t g0 = STATIC_CAST( + uint8_t, (src_rgb565[0] >> 5) | ((src_rgb565[1] & 0x07) << 3)); + uint8_t r0 = STATIC_CAST(uint8_t, src_rgb565[1] >> 3); + uint8_t b2 = STATIC_CAST(uint8_t, next_rgb565[0] & 0x1f); + uint8_t g2 = STATIC_CAST( + uint8_t, (next_rgb565[0] >> 5) | ((next_rgb565[1] & 0x07) << 3)); + uint8_t r2 = STATIC_CAST(uint8_t, next_rgb565[1] >> 3); + b0 = STATIC_CAST(uint8_t, (b0 << 3) | (b0 >> 2)); + g0 = STATIC_CAST(uint8_t, (g0 << 2) | (g0 >> 4)); + r0 = STATIC_CAST(uint8_t, (r0 << 3) | (r0 >> 2)); + b2 = STATIC_CAST(uint8_t, (b2 << 3) | (b2 >> 2)); + g2 = STATIC_CAST(uint8_t, (g2 << 2) | (g2 >> 4)); + r2 = STATIC_CAST(uint8_t, (r2 << 3) | (r2 >> 2)); #if LIBYUV_ARGBTOUV_PAVGB uint8_t ab = AVGB(b0, b2); @@ -824,31 +968,35 @@ void ARGB1555ToUVRow_C(const uint8_t* src_argb1555, const uint8_t* next_argb1555 = src_argb1555 + src_stride_argb1555; int x; for (x = 0; x < width - 1; x += 2) { - uint8_t b0 = src_argb1555[0] & 0x1f; - uint8_t g0 = (src_argb1555[0] >> 5) | ((src_argb1555[1] & 0x03) << 3); - uint8_t r0 = (src_argb1555[1] & 0x7c) >> 2; - uint8_t b1 = src_argb1555[2] & 0x1f; - uint8_t g1 = (src_argb1555[2] >> 5) | ((src_argb1555[3] & 0x03) << 3); - uint8_t r1 = (src_argb1555[3] & 0x7c) >> 2; - uint8_t b2 = next_argb1555[0] & 0x1f; - uint8_t g2 = (next_argb1555[0] >> 5) | ((next_argb1555[1] & 0x03) << 3); - uint8_t r2 = (next_argb1555[1] & 0x7c) >> 2; - uint8_t b3 = next_argb1555[2] & 0x1f; - uint8_t g3 = (next_argb1555[2] >> 5) | ((next_argb1555[3] & 0x03) << 3); - uint8_t r3 = (next_argb1555[3] & 0x7c) >> 2; - - b0 = (b0 << 3) | (b0 >> 2); - g0 = (g0 << 3) | (g0 >> 2); - r0 = (r0 << 3) | (r0 >> 2); - b1 = (b1 << 3) | (b1 >> 2); - g1 = (g1 << 3) | (g1 >> 2); - r1 = (r1 << 3) | (r1 >> 2); - b2 = (b2 << 3) | (b2 >> 2); - g2 = (g2 << 3) | (g2 >> 2); - r2 = (r2 << 3) | (r2 >> 2); - b3 = (b3 << 3) | (b3 >> 2); - g3 = (g3 << 3) | (g3 >> 2); - r3 = (r3 << 3) | (r3 >> 2); + uint8_t b0 = STATIC_CAST(uint8_t, src_argb1555[0] & 0x1f); + uint8_t g0 = STATIC_CAST( + uint8_t, (src_argb1555[0] >> 5) | ((src_argb1555[1] & 0x03) << 3)); + uint8_t r0 = STATIC_CAST(uint8_t, (src_argb1555[1] & 0x7c) >> 2); + uint8_t b1 = STATIC_CAST(uint8_t, src_argb1555[2] & 0x1f); + uint8_t g1 = STATIC_CAST( + uint8_t, (src_argb1555[2] >> 5) | ((src_argb1555[3] & 0x03) << 3)); + uint8_t r1 = STATIC_CAST(uint8_t, (src_argb1555[3] & 0x7c) >> 2); + uint8_t b2 = STATIC_CAST(uint8_t, next_argb1555[0] & 0x1f); + uint8_t g2 = STATIC_CAST( + uint8_t, (next_argb1555[0] >> 5) | ((next_argb1555[1] & 0x03) << 3)); + uint8_t r2 = STATIC_CAST(uint8_t, (next_argb1555[1] & 0x7c) >> 2); + uint8_t b3 = STATIC_CAST(uint8_t, next_argb1555[2] & 0x1f); + uint8_t g3 = STATIC_CAST( + uint8_t, (next_argb1555[2] >> 5) | ((next_argb1555[3] & 0x03) << 3)); + uint8_t r3 = STATIC_CAST(uint8_t, (next_argb1555[3] & 0x7c) >> 2); + + b0 = STATIC_CAST(uint8_t, (b0 << 3) | (b0 >> 2)); + g0 = STATIC_CAST(uint8_t, (g0 << 3) | (g0 >> 2)); + r0 = STATIC_CAST(uint8_t, (r0 << 3) | (r0 >> 2)); + b1 = STATIC_CAST(uint8_t, (b1 << 3) | (b1 >> 2)); + g1 = STATIC_CAST(uint8_t, (g1 << 3) | (g1 >> 2)); + r1 = STATIC_CAST(uint8_t, (r1 << 3) | (r1 >> 2)); + b2 = STATIC_CAST(uint8_t, (b2 << 3) | (b2 >> 2)); + g2 = STATIC_CAST(uint8_t, (g2 << 3) | (g2 >> 2)); + r2 = STATIC_CAST(uint8_t, (r2 << 3) | (r2 >> 2)); + b3 = STATIC_CAST(uint8_t, (b3 << 3) | (b3 >> 2)); + g3 = STATIC_CAST(uint8_t, (g3 << 3) | (g3 >> 2)); + r3 = STATIC_CAST(uint8_t, (r3 << 3) | (r3 >> 2)); #if LIBYUV_ARGBTOUV_PAVGB uint8_t ab = AVGB(AVGB(b0, b2), AVGB(b1, b3)); @@ -870,19 +1018,21 @@ void ARGB1555ToUVRow_C(const uint8_t* src_argb1555, dst_v += 1; } if (width & 1) { - uint8_t b0 = src_argb1555[0] & 0x1f; - uint8_t g0 = (src_argb1555[0] >> 5) | ((src_argb1555[1] & 0x03) << 3); - uint8_t r0 = (src_argb1555[1] & 0x7c) >> 2; - uint8_t b2 = next_argb1555[0] & 0x1f; - uint8_t g2 = (next_argb1555[0] >> 5) | ((next_argb1555[1] & 0x03) << 3); - uint8_t r2 = next_argb1555[1] >> 3; - - b0 = (b0 << 3) | (b0 >> 2); - g0 = (g0 << 3) | (g0 >> 2); - r0 = (r0 << 3) | (r0 >> 2); - b2 = (b2 << 3) | (b2 >> 2); - g2 = (g2 << 3) | (g2 >> 2); - r2 = (r2 << 3) | (r2 >> 2); + uint8_t b0 = STATIC_CAST(uint8_t, src_argb1555[0] & 0x1f); + uint8_t g0 = STATIC_CAST( + uint8_t, (src_argb1555[0] >> 5) | ((src_argb1555[1] & 0x03) << 3)); + uint8_t r0 = STATIC_CAST(uint8_t, (src_argb1555[1] & 0x7c) >> 2); + uint8_t b2 = STATIC_CAST(uint8_t, next_argb1555[0] & 0x1f); + uint8_t g2 = STATIC_CAST( + uint8_t, (next_argb1555[0] >> 5) | ((next_argb1555[1] & 0x03) << 3)); + uint8_t r2 = STATIC_CAST(uint8_t, (next_argb1555[1] & 0x7c) >> 2); + + b0 = STATIC_CAST(uint8_t, (b0 << 3) | (b0 >> 2)); + g0 = STATIC_CAST(uint8_t, (g0 << 3) | (g0 >> 2)); + r0 = STATIC_CAST(uint8_t, (r0 << 3) | (r0 >> 2)); + b2 = STATIC_CAST(uint8_t, (b2 << 3) | (b2 >> 2)); + g2 = STATIC_CAST(uint8_t, (g2 << 3) | (g2 >> 2)); + r2 = STATIC_CAST(uint8_t, (r2 << 3) | (r2 >> 2)); #if LIBYUV_ARGBTOUV_PAVGB uint8_t ab = AVGB(b0, b2); @@ -921,18 +1071,18 @@ void ARGB4444ToUVRow_C(const uint8_t* src_argb4444, uint8_t g3 = next_argb4444[2] >> 4; uint8_t r3 = next_argb4444[3] & 0x0f; - b0 = (b0 << 4) | b0; - g0 = (g0 << 4) | g0; - r0 = (r0 << 4) | r0; - b1 = (b1 << 4) | b1; - g1 = (g1 << 4) | g1; - r1 = (r1 << 4) | r1; - b2 = (b2 << 4) | b2; - g2 = (g2 << 4) | g2; - r2 = (r2 << 4) | r2; - b3 = (b3 << 4) | b3; - g3 = (g3 << 4) | g3; - r3 = (r3 << 4) | r3; + b0 = STATIC_CAST(uint8_t, (b0 << 4) | b0); + g0 = STATIC_CAST(uint8_t, (g0 << 4) | g0); + r0 = STATIC_CAST(uint8_t, (r0 << 4) | r0); + b1 = STATIC_CAST(uint8_t, (b1 << 4) | b1); + g1 = STATIC_CAST(uint8_t, (g1 << 4) | g1); + r1 = STATIC_CAST(uint8_t, (r1 << 4) | r1); + b2 = STATIC_CAST(uint8_t, (b2 << 4) | b2); + g2 = STATIC_CAST(uint8_t, (g2 << 4) | g2); + r2 = STATIC_CAST(uint8_t, (r2 << 4) | r2); + b3 = STATIC_CAST(uint8_t, (b3 << 4) | b3); + g3 = STATIC_CAST(uint8_t, (g3 << 4) | g3); + r3 = STATIC_CAST(uint8_t, (r3 << 4) | r3); #if LIBYUV_ARGBTOUV_PAVGB uint8_t ab = AVGB(AVGB(b0, b2), AVGB(b1, b3)); @@ -961,12 +1111,12 @@ void ARGB4444ToUVRow_C(const uint8_t* src_argb4444, uint8_t g2 = next_argb4444[0] >> 4; uint8_t r2 = next_argb4444[1] & 0x0f; - b0 = (b0 << 4) | b0; - g0 = (g0 << 4) | g0; - r0 = (r0 << 4) | r0; - b2 = (b2 << 4) | b2; - g2 = (g2 << 4) | g2; - r2 = (r2 << 4) | r2; + b0 = STATIC_CAST(uint8_t, (b0 << 4) | b0); + g0 = STATIC_CAST(uint8_t, (g0 << 4) | g0); + r0 = STATIC_CAST(uint8_t, (r0 << 4) | r0); + b2 = STATIC_CAST(uint8_t, (b2 << 4) | b2); + g2 = STATIC_CAST(uint8_t, (g2 << 4) | g2); + r2 = STATIC_CAST(uint8_t, (r2 << 4) | r2); #if LIBYUV_ARGBTOUV_PAVGB uint8_t ab = AVGB(b0, b2); @@ -1023,9 +1173,9 @@ void ARGBSepiaRow_C(uint8_t* dst_argb, int width) { int sg = (b * 22 + g * 88 + r * 45) >> 7; int sr = (b * 24 + g * 98 + r * 50) >> 7; // b does not over flow. a is preserved from original. - dst_argb[0] = sb; - dst_argb[1] = clamp255(sg); - dst_argb[2] = clamp255(sr); + dst_argb[0] = STATIC_CAST(uint8_t, sb); + dst_argb[1] = STATIC_CAST(uint8_t, clamp255(sg)); + dst_argb[2] = STATIC_CAST(uint8_t, clamp255(sr)); dst_argb += 4; } } @@ -1054,10 +1204,10 @@ void ARGBColorMatrixRow_C(const uint8_t* src_argb, int sa = (b * matrix_argb[12] + g * matrix_argb[13] + r * matrix_argb[14] + a * matrix_argb[15]) >> 6; - dst_argb[0] = Clamp(sb); - dst_argb[1] = Clamp(sg); - dst_argb[2] = Clamp(sr); - dst_argb[3] = Clamp(sa); + dst_argb[0] = STATIC_CAST(uint8_t, Clamp(sb)); + dst_argb[1] = STATIC_CAST(uint8_t, Clamp(sg)); + dst_argb[2] = STATIC_CAST(uint8_t, Clamp(sr)); + dst_argb[3] = STATIC_CAST(uint8_t, Clamp(sa)); src_argb += 4; dst_argb += 4; } @@ -1107,9 +1257,12 @@ void ARGBQuantizeRow_C(uint8_t* dst_argb, int b = dst_argb[0]; int g = dst_argb[1]; int r = dst_argb[2]; - dst_argb[0] = (b * scale >> 16) * interval_size + interval_offset; - dst_argb[1] = (g * scale >> 16) * interval_size + interval_offset; - dst_argb[2] = (r * scale >> 16) * interval_size + interval_offset; + dst_argb[0] = STATIC_CAST( + uint8_t, (b * scale >> 16) * interval_size + interval_offset); + dst_argb[1] = STATIC_CAST( + uint8_t, (g * scale >> 16) * interval_size + interval_offset); + dst_argb[2] = STATIC_CAST( + uint8_t, (r * scale >> 16) * interval_size + interval_offset); dst_argb += 4; } } @@ -1146,25 +1299,25 @@ void ARGBShadeRow_C(const uint8_t* src_argb, #define REPEAT8(v) (v) | ((v) << 8) #define SHADE(f, v) v* f >> 16 -void ARGBMultiplyRow_C(const uint8_t* src_argb0, +void ARGBMultiplyRow_C(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width) { int i; for (i = 0; i < width; ++i) { - const uint32_t b = REPEAT8(src_argb0[0]); - const uint32_t g = REPEAT8(src_argb0[1]); - const uint32_t r = REPEAT8(src_argb0[2]); - const uint32_t a = REPEAT8(src_argb0[3]); + const uint32_t b = REPEAT8(src_argb[0]); + const uint32_t g = REPEAT8(src_argb[1]); + const uint32_t r = REPEAT8(src_argb[2]); + const uint32_t a = REPEAT8(src_argb[3]); const uint32_t b_scale = src_argb1[0]; const uint32_t g_scale = src_argb1[1]; const uint32_t r_scale = src_argb1[2]; const uint32_t a_scale = src_argb1[3]; - dst_argb[0] = SHADE(b, b_scale); - dst_argb[1] = SHADE(g, g_scale); - dst_argb[2] = SHADE(r, r_scale); - dst_argb[3] = SHADE(a, a_scale); - src_argb0 += 4; + dst_argb[0] = STATIC_CAST(uint8_t, SHADE(b, b_scale)); + dst_argb[1] = STATIC_CAST(uint8_t, SHADE(g, g_scale)); + dst_argb[2] = STATIC_CAST(uint8_t, SHADE(r, r_scale)); + dst_argb[3] = STATIC_CAST(uint8_t, SHADE(a, a_scale)); + src_argb += 4; src_argb1 += 4; dst_argb += 4; } @@ -1174,25 +1327,25 @@ void ARGBMultiplyRow_C(const uint8_t* src_argb0, #define SHADE(f, v) clamp255(v + f) -void ARGBAddRow_C(const uint8_t* src_argb0, +void ARGBAddRow_C(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width) { int i; for (i = 0; i < width; ++i) { - const int b = src_argb0[0]; - const int g = src_argb0[1]; - const int r = src_argb0[2]; - const int a = src_argb0[3]; + const int b = src_argb[0]; + const int g = src_argb[1]; + const int r = src_argb[2]; + const int a = src_argb[3]; const int b_add = src_argb1[0]; const int g_add = src_argb1[1]; const int r_add = src_argb1[2]; const int a_add = src_argb1[3]; - dst_argb[0] = SHADE(b, b_add); - dst_argb[1] = SHADE(g, g_add); - dst_argb[2] = SHADE(r, r_add); - dst_argb[3] = SHADE(a, a_add); - src_argb0 += 4; + dst_argb[0] = STATIC_CAST(uint8_t, SHADE(b, b_add)); + dst_argb[1] = STATIC_CAST(uint8_t, SHADE(g, g_add)); + dst_argb[2] = STATIC_CAST(uint8_t, SHADE(r, r_add)); + dst_argb[3] = STATIC_CAST(uint8_t, SHADE(a, a_add)); + src_argb += 4; src_argb1 += 4; dst_argb += 4; } @@ -1201,25 +1354,25 @@ void ARGBAddRow_C(const uint8_t* src_argb0, #define SHADE(f, v) clamp0(f - v) -void ARGBSubtractRow_C(const uint8_t* src_argb0, +void ARGBSubtractRow_C(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width) { int i; for (i = 0; i < width; ++i) { - const int b = src_argb0[0]; - const int g = src_argb0[1]; - const int r = src_argb0[2]; - const int a = src_argb0[3]; + const int b = src_argb[0]; + const int g = src_argb[1]; + const int r = src_argb[2]; + const int a = src_argb[3]; const int b_sub = src_argb1[0]; const int g_sub = src_argb1[1]; const int r_sub = src_argb1[2]; const int a_sub = src_argb1[3]; - dst_argb[0] = SHADE(b, b_sub); - dst_argb[1] = SHADE(g, g_sub); - dst_argb[2] = SHADE(r, r_sub); - dst_argb[3] = SHADE(a, a_sub); - src_argb0 += 4; + dst_argb[0] = STATIC_CAST(uint8_t, SHADE(b, b_sub)); + dst_argb[1] = STATIC_CAST(uint8_t, SHADE(g, g_sub)); + dst_argb[2] = STATIC_CAST(uint8_t, SHADE(r, r_sub)); + dst_argb[3] = STATIC_CAST(uint8_t, SHADE(a, a_sub)); + src_argb += 4; src_argb1 += 4; dst_argb += 4; } @@ -1327,351 +1480,241 @@ void J400ToARGBRow_C(const uint8_t* src_y, uint8_t* dst_argb, int width) { } } -// TODO(fbarchard): Unify these structures to be platform independent. -// TODO(fbarchard): Generate SIMD structures from float matrix. +// Macros to create SIMD specific yuv to rgb conversion constants. -// BT.601 YUV to RGB reference -// R = (Y - 16) * 1.164 - V * -1.596 -// G = (Y - 16) * 1.164 - U * 0.391 - V * 0.813 -// B = (Y - 16) * 1.164 - U * -2.018 +// clang-format off -// Y contribution to R,G,B. Scale and bias. -#define YG 18997 /* round(1.164 * 64 * 256 * 256 / 257) */ -#define YGB -1160 /* 1.164 * 64 * -16 + 64 / 2 */ +#if defined(__aarch64__) || defined(__arm__) +// Bias values include subtract 128 from U and V, bias from Y and rounding. +// For B and R bias is negative. For G bias is positive. +#define YUVCONSTANTSBODY(YG, YB, UB, UG, VG, VR) \ + {{UB, VR, UG, VG, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, \ + {YG, (UB * 128 - YB), (UG * 128 + VG * 128 + YB), (VR * 128 - YB), YB, 0, \ + 0, 0}} +#else +#define YUVCONSTANTSBODY(YG, YB, UB, UG, VG, VR) \ + {{UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, \ + UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0}, \ + {UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, \ + UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG}, \ + {0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, \ + 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR}, \ + {YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG}, \ + {YB, YB, YB, YB, YB, YB, YB, YB, YB, YB, YB, YB, YB, YB, YB, YB}} +#endif + +// clang-format on + +#define MAKEYUVCONSTANTS(name, YG, YB, UB, UG, VG, VR) \ + const struct YuvConstants SIMD_ALIGNED(kYuv##name##Constants) = \ + YUVCONSTANTSBODY(YG, YB, UB, UG, VG, VR); \ + const struct YuvConstants SIMD_ALIGNED(kYvu##name##Constants) = \ + YUVCONSTANTSBODY(YG, YB, VR, VG, UG, UB); + +// TODO(fbarchard): Generate SIMD structures from float matrix. + +// BT.601 limited range YUV to RGB reference +// R = (Y - 16) * 1.164 + V * 1.596 +// G = (Y - 16) * 1.164 - U * 0.391 - V * 0.813 +// B = (Y - 16) * 1.164 + U * 2.018 +// KR = 0.299; KB = 0.114 // U and V contributions to R,G,B. -#define UB -128 /* max(-128, round(-2.018 * 64)) */ -#define UG 25 /* round(0.391 * 64) */ -#define VG 52 /* round(0.813 * 64) */ -#define VR -102 /* round(-1.596 * 64) */ - -// Bias values to subtract 16 from Y and 128 from U and V. -#define BB (UB * 128 + YGB) -#define BG (UG * 128 + VG * 128 + YGB) -#define BR (VR * 128 + YGB) - -#if defined(__aarch64__) // 64 bit arm -const struct YuvConstants SIMD_ALIGNED(kYuvI601Constants) = { - {-UB, -VR, -UB, -VR, -UB, -VR, -UB, -VR}, - {-UB, -VR, -UB, -VR, -UB, -VR, -UB, -VR}, - {UG, VG, UG, VG, UG, VG, UG, VG}, - {UG, VG, UG, VG, UG, VG, UG, VG}, - {BB, BG, BR, YGB, 0, 0, 0, 0}, - {0x0101 * YG, YG, 0, 0}}; -const struct YuvConstants SIMD_ALIGNED(kYvuI601Constants) = { - {-VR, -UB, -VR, -UB, -VR, -UB, -VR, -UB}, - {-VR, -UB, -VR, -UB, -VR, -UB, -VR, -UB}, - {VG, UG, VG, UG, VG, UG, VG, UG}, - {VG, UG, VG, UG, VG, UG, VG, UG}, - {BR, BG, BB, YGB, 0, 0, 0, 0}, - {0x0101 * YG, YG, 0, 0}}; -#elif defined(__arm__) // 32 bit arm -const struct YuvConstants SIMD_ALIGNED(kYuvI601Constants) = { - {-UB, -UB, -UB, -UB, -VR, -VR, -VR, -VR, 0, 0, 0, 0, 0, 0, 0, 0}, - {UG, UG, UG, UG, VG, VG, VG, VG, 0, 0, 0, 0, 0, 0, 0, 0}, - {BB, BG, BR, YGB, 0, 0, 0, 0}, - {0x0101 * YG, YG, 0, 0}}; -const struct YuvConstants SIMD_ALIGNED(kYvuI601Constants) = { - {-VR, -VR, -VR, -VR, -UB, -UB, -UB, -UB, 0, 0, 0, 0, 0, 0, 0, 0}, - {VG, VG, VG, VG, UG, UG, UG, UG, 0, 0, 0, 0, 0, 0, 0, 0}, - {BR, BG, BB, YGB, 0, 0, 0, 0}, - {0x0101 * YG, YG, 0, 0}}; +#if defined(LIBYUV_UNLIMITED_DATA) || defined(LIBYUV_UNLIMITED_BT601) +#define UB 129 /* round(2.018 * 64) */ #else -const struct YuvConstants SIMD_ALIGNED(kYuvI601Constants) = { - {UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, - UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0}, - {UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, - UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG}, - {0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, - 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR}, - {BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB}, - {BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG}, - {BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR}, - {YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG}, - {YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, - YGB}}; -const struct YuvConstants SIMD_ALIGNED(kYvuI601Constants) = { - {VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, - VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0}, - {VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, - VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG}, - {0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, - 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB}, - {BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR}, - {BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG}, - {BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB}, - {YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG}, - {YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, - YGB}}; +#define UB 128 /* max(128, round(2.018 * 64)) */ #endif +#define UG 25 /* round(0.391 * 64) */ +#define VG 52 /* round(0.813 * 64) */ +#define VR 102 /* round(1.596 * 64) */ -#undef BB -#undef BG -#undef BR -#undef YGB +// Y contribution to R,G,B. Scale and bias. +#define YG 18997 /* round(1.164 * 64 * 256 * 256 / 257) */ +#define YB -1160 /* 1.164 * 64 * -16 + 64 / 2 */ + +MAKEYUVCONSTANTS(I601, YG, YB, UB, UG, VG, VR) + +#undef YG +#undef YB #undef UB #undef UG #undef VG #undef VR -#undef YG -// JPEG YUV to RGB reference -// * R = Y - V * -1.40200 -// * G = Y - U * 0.34414 - V * 0.71414 -// * B = Y - U * -1.77200 +// BT.601 full range YUV to RGB reference (aka JPEG) +// * R = Y + V * 1.40200 +// * G = Y - U * 0.34414 - V * 0.71414 +// * B = Y + U * 1.77200 +// KR = 0.299; KB = 0.114 + +// U and V contributions to R,G,B. +#define UB 113 /* round(1.77200 * 64) */ +#define UG 22 /* round(0.34414 * 64) */ +#define VG 46 /* round(0.71414 * 64) */ +#define VR 90 /* round(1.40200 * 64) */ // Y contribution to R,G,B. Scale and bias. #define YG 16320 /* round(1.000 * 64 * 256 * 256 / 257) */ -#define YGB 32 /* 64 / 2 */ +#define YB 32 /* 64 / 2 */ -// U and V contributions to R,G,B. -#define UB -113 /* round(-1.77200 * 64) */ -#define UG 22 /* round(0.34414 * 64) */ -#define VG 46 /* round(0.71414 * 64) */ -#define VR -90 /* round(-1.40200 * 64) */ - -// Bias values to round, and subtract 128 from U and V. -#define BB (UB * 128 + YGB) -#define BG (UG * 128 + VG * 128 + YGB) -#define BR (VR * 128 + YGB) - -#if defined(__aarch64__) -const struct YuvConstants SIMD_ALIGNED(kYuvJPEGConstants) = { - {-UB, -VR, -UB, -VR, -UB, -VR, -UB, -VR}, - {-UB, -VR, -UB, -VR, -UB, -VR, -UB, -VR}, - {UG, VG, UG, VG, UG, VG, UG, VG}, - {UG, VG, UG, VG, UG, VG, UG, VG}, - {BB, BG, BR, YGB, 0, 0, 0, 0}, - {0x0101 * YG, YG, 0, 0}}; -const struct YuvConstants SIMD_ALIGNED(kYvuJPEGConstants) = { - {-VR, -UB, -VR, -UB, -VR, -UB, -VR, -UB}, - {-VR, -UB, -VR, -UB, -VR, -UB, -VR, -UB}, - {VG, UG, VG, UG, VG, UG, VG, UG}, - {VG, UG, VG, UG, VG, UG, VG, UG}, - {BR, BG, BB, YGB, 0, 0, 0, 0}, - {0x0101 * YG, YG, 0, 0}}; -#elif defined(__arm__) -const struct YuvConstants SIMD_ALIGNED(kYuvJPEGConstants) = { - {-UB, -UB, -UB, -UB, -VR, -VR, -VR, -VR, 0, 0, 0, 0, 0, 0, 0, 0}, - {UG, UG, UG, UG, VG, VG, VG, VG, 0, 0, 0, 0, 0, 0, 0, 0}, - {BB, BG, BR, YGB, 0, 0, 0, 0}, - {0x0101 * YG, YG, 0, 0}}; -const struct YuvConstants SIMD_ALIGNED(kYvuJPEGConstants) = { - {-VR, -VR, -VR, -VR, -UB, -UB, -UB, -UB, 0, 0, 0, 0, 0, 0, 0, 0}, - {VG, VG, VG, VG, UG, UG, UG, UG, 0, 0, 0, 0, 0, 0, 0, 0}, - {BR, BG, BB, YGB, 0, 0, 0, 0}, - {0x0101 * YG, YG, 0, 0}}; -#else -const struct YuvConstants SIMD_ALIGNED(kYuvJPEGConstants) = { - {UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, - UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0}, - {UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, - UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG}, - {0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, - 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR}, - {BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB}, - {BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG}, - {BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR}, - {YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG}, - {YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, - YGB}}; -const struct YuvConstants SIMD_ALIGNED(kYvuJPEGConstants) = { - {VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, - VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0}, - {VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, - VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG}, - {0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, - 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB}, - {BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR}, - {BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG}, - {BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB}, - {YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG}, - {YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, - YGB}}; -#endif +MAKEYUVCONSTANTS(JPEG, YG, YB, UB, UG, VG, VR) -#undef BB -#undef BG -#undef BR -#undef YGB +#undef YG +#undef YB #undef UB #undef UG #undef VG #undef VR -#undef YG -// BT.709 YUV to RGB reference -// R = (Y - 16) * 1.164 - V * -1.793 -// G = (Y - 16) * 1.164 - U * 0.213 - V * 0.533 -// B = (Y - 16) * 1.164 - U * -2.112 -// See also http://www.equasys.de/colorconversion.html +// BT.709 limited range YUV to RGB reference +// R = (Y - 16) * 1.164 + V * 1.793 +// G = (Y - 16) * 1.164 - U * 0.213 - V * 0.533 +// B = (Y - 16) * 1.164 + U * 2.112 +// KR = 0.2126, KB = 0.0722 -// Y contribution to R,G,B. Scale and bias. -#define YG 18997 /* round(1.164 * 64 * 256 * 256 / 257) */ -#define YGB -1160 /* 1.164 * 64 * -16 + 64 / 2 */ - -// TODO(fbarchard): Find way to express 2.112 instead of 2.0. // U and V contributions to R,G,B. -#define UB -128 /* max(-128, round(-2.112 * 64)) */ -#define UG 14 /* round(0.213 * 64) */ -#define VG 34 /* round(0.533 * 64) */ -#define VR -115 /* round(-1.793 * 64) */ - -// Bias values to round, and subtract 128 from U and V. -#define BB (UB * 128 + YGB) -#define BG (UG * 128 + VG * 128 + YGB) -#define BR (VR * 128 + YGB) - -#if defined(__aarch64__) -const struct YuvConstants SIMD_ALIGNED(kYuvH709Constants) = { - {-UB, -VR, -UB, -VR, -UB, -VR, -UB, -VR}, - {-UB, -VR, -UB, -VR, -UB, -VR, -UB, -VR}, - {UG, VG, UG, VG, UG, VG, UG, VG}, - {UG, VG, UG, VG, UG, VG, UG, VG}, - {BB, BG, BR, YGB, 0, 0, 0, 0}, - {0x0101 * YG, YG, 0, 0}}; -const struct YuvConstants SIMD_ALIGNED(kYvuH709Constants) = { - {-VR, -UB, -VR, -UB, -VR, -UB, -VR, -UB}, - {-VR, -UB, -VR, -UB, -VR, -UB, -VR, -UB}, - {VG, UG, VG, UG, VG, UG, VG, UG}, - {VG, UG, VG, UG, VG, UG, VG, UG}, - {BR, BG, BB, YGB, 0, 0, 0, 0}, - {0x0101 * YG, YG, 0, 0}}; -#elif defined(__arm__) -const struct YuvConstants SIMD_ALIGNED(kYuvH709Constants) = { - {-UB, -UB, -UB, -UB, -VR, -VR, -VR, -VR, 0, 0, 0, 0, 0, 0, 0, 0}, - {UG, UG, UG, UG, VG, VG, VG, VG, 0, 0, 0, 0, 0, 0, 0, 0}, - {BB, BG, BR, YGB, 0, 0, 0, 0}, - {0x0101 * YG, YG, 0, 0}}; -const struct YuvConstants SIMD_ALIGNED(kYvuH709Constants) = { - {-VR, -VR, -VR, -VR, -UB, -UB, -UB, -UB, 0, 0, 0, 0, 0, 0, 0, 0}, - {VG, VG, VG, VG, UG, UG, UG, UG, 0, 0, 0, 0, 0, 0, 0, 0}, - {BR, BG, BB, YGB, 0, 0, 0, 0}, - {0x0101 * YG, YG, 0, 0}}; +#if defined(LIBYUV_UNLIMITED_DATA) || defined(LIBYUV_UNLIMITED_BT709) +#define UB 135 /* round(2.112 * 64) */ #else -const struct YuvConstants SIMD_ALIGNED(kYuvH709Constants) = { - {UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, - UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0}, - {UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, - UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG}, - {0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, - 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR}, - {BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB}, - {BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG}, - {BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR}, - {YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG}, - {YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, - YGB}}; -const struct YuvConstants SIMD_ALIGNED(kYvuH709Constants) = { - {VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, - VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0}, - {VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, - VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG}, - {0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, - 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB}, - {BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR}, - {BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG}, - {BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB}, - {YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG}, - {YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, - YGB}}; +#define UB 128 /* max(128, round(2.112 * 64)) */ #endif +#define UG 14 /* round(0.213 * 64) */ +#define VG 34 /* round(0.533 * 64) */ +#define VR 115 /* round(1.793 * 64) */ -#undef BB -#undef BG -#undef BR -#undef YGB +// Y contribution to R,G,B. Scale and bias. +#define YG 18997 /* round(1.164 * 64 * 256 * 256 / 257) */ +#define YB -1160 /* 1.164 * 64 * -16 + 64 / 2 */ + +MAKEYUVCONSTANTS(H709, YG, YB, UB, UG, VG, VR) + +#undef YG +#undef YB #undef UB #undef UG #undef VG #undef VR -#undef YG -// BT.2020 YUV to RGB reference -// R = (Y - 16) * 1.164384 - V * -1.67867 -// G = (Y - 16) * 1.164384 - U * 0.187326 - V * 0.65042 -// B = (Y - 16) * 1.164384 - U * -2.14177 +// BT.709 full range YUV to RGB reference +// R = Y + V * 1.5748 +// G = Y - U * 0.18732 - V * 0.46812 +// B = Y + U * 1.8556 +// KR = 0.2126, KB = 0.0722 -// Y contribution to R,G,B. Scale and bias. -#define YG 19003 /* round(1.164384 * 64 * 256 * 256 / 257) */ -#define YGB -1160 /* 1.164384 * 64 * -16 + 64 / 2 */ +// U and V contributions to R,G,B. +#define UB 119 /* round(1.8556 * 64) */ +#define UG 12 /* round(0.18732 * 64) */ +#define VG 30 /* round(0.46812 * 64) */ +#define VR 101 /* round(1.5748 * 64) */ + +// Y contribution to R,G,B. Scale and bias. (same as jpeg) +#define YG 16320 /* round(1 * 64 * 256 * 256 / 257) */ +#define YB 32 /* 64 / 2 */ + +MAKEYUVCONSTANTS(F709, YG, YB, UB, UG, VG, VR) + +#undef YG +#undef YB +#undef UB +#undef UG +#undef VG +#undef VR + +// BT.2020 limited range YUV to RGB reference +// R = (Y - 16) * 1.164384 + V * 1.67867 +// G = (Y - 16) * 1.164384 - U * 0.187326 - V * 0.65042 +// B = (Y - 16) * 1.164384 + U * 2.14177 +// KR = 0.2627; KB = 0.0593 -// TODO(fbarchard): Improve accuracy; the B channel is off by 7%. // U and V contributions to R,G,B. -#define UB -128 /* max(-128, round(-2.142 * 64)) */ -#define UG 12 /* round(0.187326 * 64) */ -#define VG 42 /* round(0.65042 * 64) */ -#define VR -107 /* round(-1.67867 * 64) */ - -// Bias values to round, and subtract 128 from U and V. -#define BB (UB * 128 + YGB) -#define BG (UG * 128 + VG * 128 + YGB) -#define BR (VR * 128 + YGB) - -#if defined(__aarch64__) -const struct YuvConstants SIMD_ALIGNED(kYuv2020Constants) = { - {-UB, -VR, -UB, -VR, -UB, -VR, -UB, -VR}, - {-UB, -VR, -UB, -VR, -UB, -VR, -UB, -VR}, - {UG, VG, UG, VG, UG, VG, UG, VG}, - {UG, VG, UG, VG, UG, VG, UG, VG}, - {BB, BG, BR, YGB, 0, 0, 0, 0}, - {0x0101 * YG, YG, 0, 0}}; -const struct YuvConstants SIMD_ALIGNED(kYvu2020Constants) = { - {-VR, -UB, -VR, -UB, -VR, -UB, -VR, -UB}, - {-VR, -UB, -VR, -UB, -VR, -UB, -VR, -UB}, - {VG, UG, VG, UG, VG, UG, VG, UG}, - {VG, UG, VG, UG, VG, UG, VG, UG}, - {BR, BG, BB, YGB, 0, 0, 0, 0}, - {0x0101 * YG, YG, 0, 0}}; -#elif defined(__arm__) -const struct YuvConstants SIMD_ALIGNED(kYuv2020Constants) = { - {-UB, -UB, -UB, -UB, -VR, -VR, -VR, -VR, 0, 0, 0, 0, 0, 0, 0, 0}, - {UG, UG, UG, UG, VG, VG, VG, VG, 0, 0, 0, 0, 0, 0, 0, 0}, - {BB, BG, BR, YGB, 0, 0, 0, 0}, - {0x0101 * YG, YG, 0, 0}}; -const struct YuvConstants SIMD_ALIGNED(kYvu2020Constants) = { - {-VR, -VR, -VR, -VR, -UB, -UB, -UB, -UB, 0, 0, 0, 0, 0, 0, 0, 0}, - {VG, VG, VG, VG, UG, UG, UG, UG, 0, 0, 0, 0, 0, 0, 0, 0}, - {BR, BG, BB, YGB, 0, 0, 0, 0}, - {0x0101 * YG, YG, 0, 0}}; +#if defined(LIBYUV_UNLIMITED_DATA) || defined(LIBYUV_UNLIMITED_BT2020) +#define UB 137 /* round(2.142 * 64) */ #else -const struct YuvConstants SIMD_ALIGNED(kYuv2020Constants) = { - {UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, - UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0}, - {UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, - UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG}, - {0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, - 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR}, - {BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB}, - {BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG}, - {BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR}, - {YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG}, - {YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, - YGB}}; -const struct YuvConstants SIMD_ALIGNED(kYvu2020Constants) = { - {VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, - VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0, VR, 0}, - {VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, - VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG, VG, UG}, - {0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, - 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB, 0, UB}, - {BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR, BR}, - {BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG, BG}, - {BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB, BB}, - {YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG, YG}, - {YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, YGB, - YGB}}; +#define UB 128 /* max(128, round(2.142 * 64)) */ #endif +#define UG 12 /* round(0.187326 * 64) */ +#define VG 42 /* round(0.65042 * 64) */ +#define VR 107 /* round(1.67867 * 64) */ -#undef BB -#undef BG -#undef BR -#undef YGB +// Y contribution to R,G,B. Scale and bias. +#define YG 19003 /* round(1.164384 * 64 * 256 * 256 / 257) */ +#define YB -1160 /* 1.164384 * 64 * -16 + 64 / 2 */ + +MAKEYUVCONSTANTS(2020, YG, YB, UB, UG, VG, VR) + +#undef YG +#undef YB #undef UB #undef UG #undef VG #undef VR + +// BT.2020 full range YUV to RGB reference +// R = Y + V * 1.474600 +// G = Y - U * 0.164553 - V * 0.571353 +// B = Y + U * 1.881400 +// KR = 0.2627; KB = 0.0593 + +#define UB 120 /* round(1.881400 * 64) */ +#define UG 11 /* round(0.164553 * 64) */ +#define VG 37 /* round(0.571353 * 64) */ +#define VR 94 /* round(1.474600 * 64) */ + +// Y contribution to R,G,B. Scale and bias. (same as jpeg) +#define YG 16320 /* round(1 * 64 * 256 * 256 / 257) */ +#define YB 32 /* 64 / 2 */ + +MAKEYUVCONSTANTS(V2020, YG, YB, UB, UG, VG, VR) + #undef YG +#undef YB +#undef UB +#undef UG +#undef VG +#undef VR + +#undef BB +#undef BG +#undef BR + +#undef MAKEYUVCONSTANTS + +#if defined(__aarch64__) || defined(__arm__) +#define LOAD_YUV_CONSTANTS \ + int ub = yuvconstants->kUVCoeff[0]; \ + int vr = yuvconstants->kUVCoeff[1]; \ + int ug = yuvconstants->kUVCoeff[2]; \ + int vg = yuvconstants->kUVCoeff[3]; \ + int yg = yuvconstants->kRGBCoeffBias[0]; \ + int bb = yuvconstants->kRGBCoeffBias[1]; \ + int bg = yuvconstants->kRGBCoeffBias[2]; \ + int br = yuvconstants->kRGBCoeffBias[3] + +#define CALC_RGB16 \ + int32_t y1 = (uint32_t)(y32 * yg) >> 16; \ + int b16 = y1 + (u * ub) - bb; \ + int g16 = y1 + bg - (u * ug + v * vg); \ + int r16 = y1 + (v * vr) - br +#else +#define LOAD_YUV_CONSTANTS \ + int ub = yuvconstants->kUVToB[0]; \ + int ug = yuvconstants->kUVToG[0]; \ + int vg = yuvconstants->kUVToG[1]; \ + int vr = yuvconstants->kUVToR[1]; \ + int yg = yuvconstants->kYToRgb[0]; \ + int yb = yuvconstants->kYBiasToRgb[0] + +#define CALC_RGB16 \ + int32_t y1 = ((uint32_t)(y32 * yg) >> 16) + yb; \ + int8_t ui = (int8_t)u; \ + int8_t vi = (int8_t)v; \ + ui -= 0x80; \ + vi -= 0x80; \ + int b16 = y1 + (ui * ub); \ + int g16 = y1 - (ui * ug + vi * vg); \ + int r16 = y1 + (vi * vr) +#endif // C reference code that mimics the YUV assembly. // Reads 8 bit YUV and leaves result as 16 bit. @@ -1682,39 +1725,12 @@ static __inline void YuvPixel(uint8_t y, uint8_t* g, uint8_t* r, const struct YuvConstants* yuvconstants) { -#if defined(__aarch64__) - int ub = -yuvconstants->kUVToRB[0]; - int ug = yuvconstants->kUVToG[0]; - int vg = yuvconstants->kUVToG[1]; - int vr = -yuvconstants->kUVToRB[1]; - int bb = yuvconstants->kUVBiasBGR[0]; - int bg = yuvconstants->kUVBiasBGR[1]; - int br = yuvconstants->kUVBiasBGR[2]; - int yg = yuvconstants->kYToRgb[1]; -#elif defined(__arm__) - int ub = -yuvconstants->kUVToRB[0]; - int ug = yuvconstants->kUVToG[0]; - int vg = yuvconstants->kUVToG[4]; - int vr = -yuvconstants->kUVToRB[4]; - int bb = yuvconstants->kUVBiasBGR[0]; - int bg = yuvconstants->kUVBiasBGR[1]; - int br = yuvconstants->kUVBiasBGR[2]; - int yg = yuvconstants->kYToRgb[1]; -#else - int ub = yuvconstants->kUVToB[0]; - int ug = yuvconstants->kUVToG[0]; - int vg = yuvconstants->kUVToG[1]; - int vr = yuvconstants->kUVToR[1]; - int bb = yuvconstants->kUVBiasB[0]; - int bg = yuvconstants->kUVBiasG[0]; - int br = yuvconstants->kUVBiasR[0]; - int yg = yuvconstants->kYToRgb[0]; -#endif - - uint32_t y1 = (uint32_t)(y * 0x0101 * yg) >> 16; - *b = Clamp((int32_t)(-(u * ub) + y1 + bb) >> 6); - *g = Clamp((int32_t)(-(u * ug + v * vg) + y1 + bg) >> 6); - *r = Clamp((int32_t)(-(v * vr) + y1 + br) >> 6); + LOAD_YUV_CONSTANTS; + uint32_t y32 = y * 0x0101; + CALC_RGB16; + *b = STATIC_CAST(uint8_t, Clamp((int32_t)(b16) >> 6)); + *g = STATIC_CAST(uint8_t, Clamp((int32_t)(g16) >> 6)); + *r = STATIC_CAST(uint8_t, Clamp((int32_t)(r16) >> 6)); } // Reads 8 bit YUV and leaves result as 16 bit. @@ -1725,85 +1741,50 @@ static __inline void YuvPixel8_16(uint8_t y, int* g, int* r, const struct YuvConstants* yuvconstants) { -#if defined(__aarch64__) - int ub = -yuvconstants->kUVToRB[0]; - int ug = yuvconstants->kUVToG[0]; - int vg = yuvconstants->kUVToG[1]; - int vr = -yuvconstants->kUVToRB[1]; - int bb = yuvconstants->kUVBiasBGR[0]; - int bg = yuvconstants->kUVBiasBGR[1]; - int br = yuvconstants->kUVBiasBGR[2]; - int yg = yuvconstants->kYToRgb[1]; -#elif defined(__arm__) - int ub = -yuvconstants->kUVToRB[0]; - int ug = yuvconstants->kUVToG[0]; - int vg = yuvconstants->kUVToG[4]; - int vr = -yuvconstants->kUVToRB[4]; - int bb = yuvconstants->kUVBiasBGR[0]; - int bg = yuvconstants->kUVBiasBGR[1]; - int br = yuvconstants->kUVBiasBGR[2]; - int yg = yuvconstants->kYToRgb[1]; -#else - int ub = yuvconstants->kUVToB[0]; - int ug = yuvconstants->kUVToG[0]; - int vg = yuvconstants->kUVToG[1]; - int vr = yuvconstants->kUVToR[1]; - int bb = yuvconstants->kUVBiasB[0]; - int bg = yuvconstants->kUVBiasG[0]; - int br = yuvconstants->kUVBiasR[0]; - int yg = yuvconstants->kYToRgb[0]; -#endif - - uint32_t y1 = (uint32_t)(y * 0x0101 * yg) >> 16; - *b = (int)(-(u * ub) + y1 + bb); - *g = (int)(-(u * ug + v * vg) + y1 + bg); - *r = (int)(-(v * vr) + y1 + br); + LOAD_YUV_CONSTANTS; + uint32_t y32 = y * 0x0101; + CALC_RGB16; + *b = b16; + *g = g16; + *r = r16; } // C reference code that mimics the YUV 16 bit assembly. // Reads 10 bit YUV and leaves result as 16 bit. -static __inline void YuvPixel16(int16_t y, - int16_t u, - int16_t v, - int* b, - int* g, - int* r, - const struct YuvConstants* yuvconstants) { -#if defined(__aarch64__) - int ub = -yuvconstants->kUVToRB[0]; - int ug = yuvconstants->kUVToG[0]; - int vg = yuvconstants->kUVToG[1]; - int vr = -yuvconstants->kUVToRB[1]; - int bb = yuvconstants->kUVBiasBGR[0]; - int bg = yuvconstants->kUVBiasBGR[1]; - int br = yuvconstants->kUVBiasBGR[2]; - int yg = yuvconstants->kYToRgb[1]; -#elif defined(__arm__) - int ub = -yuvconstants->kUVToRB[0]; - int ug = yuvconstants->kUVToG[0]; - int vg = yuvconstants->kUVToG[4]; - int vr = -yuvconstants->kUVToRB[4]; - int bb = yuvconstants->kUVBiasBGR[0]; - int bg = yuvconstants->kUVBiasBGR[1]; - int br = yuvconstants->kUVBiasBGR[2]; - int yg = yuvconstants->kYToRgb[1]; -#else - int ub = yuvconstants->kUVToB[0]; - int ug = yuvconstants->kUVToG[0]; - int vg = yuvconstants->kUVToG[1]; - int vr = yuvconstants->kUVToR[1]; - int bb = yuvconstants->kUVBiasB[0]; - int bg = yuvconstants->kUVBiasG[0]; - int br = yuvconstants->kUVBiasR[0]; - int yg = yuvconstants->kYToRgb[0]; -#endif +static __inline void YuvPixel10_16(uint16_t y, + uint16_t u, + uint16_t v, + int* b, + int* g, + int* r, + const struct YuvConstants* yuvconstants) { + LOAD_YUV_CONSTANTS; + uint32_t y32 = (y << 6) | (y >> 4); + u = STATIC_CAST(uint8_t, clamp255(u >> 2)); + v = STATIC_CAST(uint8_t, clamp255(v >> 2)); + CALC_RGB16; + *b = b16; + *g = g16; + *r = r16; +} - uint32_t y1 = (uint32_t)((y << 6) * yg) >> 16; - u = clamp255(u >> 2); - v = clamp255(v >> 2); - *b = (int)(-(u * ub) + y1 + bb); - *g = (int)(-(u * ug + v * vg) + y1 + bg); - *r = (int)(-(v * vr) + y1 + br); +// C reference code that mimics the YUV 16 bit assembly. +// Reads 12 bit YUV and leaves result as 16 bit. +static __inline void YuvPixel12_16(int16_t y, + int16_t u, + int16_t v, + int* b, + int* g, + int* r, + const struct YuvConstants* yuvconstants) { + LOAD_YUV_CONSTANTS; + uint32_t y32 = (y << 4) | (y >> 8); + u = STATIC_CAST(uint8_t, clamp255(u >> 4)); + v = STATIC_CAST(uint8_t, clamp255(v >> 4)); + CALC_RGB16; + *b = b16; + *g = g16; + *r = r16; } // C reference code that mimics the YUV 10 bit assembly. @@ -1818,36 +1799,88 @@ static __inline void YuvPixel10(uint16_t y, int b16; int g16; int r16; - YuvPixel16(y, u, v, &b16, &g16, &r16, yuvconstants); - *b = Clamp(b16 >> 6); - *g = Clamp(g16 >> 6); - *r = Clamp(r16 >> 6); + YuvPixel10_16(y, u, v, &b16, &g16, &r16, yuvconstants); + *b = STATIC_CAST(uint8_t, Clamp(b16 >> 6)); + *g = STATIC_CAST(uint8_t, Clamp(g16 >> 6)); + *r = STATIC_CAST(uint8_t, Clamp(r16 >> 6)); +} + +// C reference code that mimics the YUV 12 bit assembly. +// Reads 12 bit YUV and clamps down to 8 bit RGB. +static __inline void YuvPixel12(uint16_t y, + uint16_t u, + uint16_t v, + uint8_t* b, + uint8_t* g, + uint8_t* r, + const struct YuvConstants* yuvconstants) { + int b16; + int g16; + int r16; + YuvPixel12_16(y, u, v, &b16, &g16, &r16, yuvconstants); + *b = STATIC_CAST(uint8_t, Clamp(b16 >> 6)); + *g = STATIC_CAST(uint8_t, Clamp(g16 >> 6)); + *r = STATIC_CAST(uint8_t, Clamp(r16 >> 6)); +} + +// C reference code that mimics the YUV 16 bit assembly. +// Reads 16 bit YUV and leaves result as 8 bit. +static __inline void YuvPixel16_8(uint16_t y, + uint16_t u, + uint16_t v, + uint8_t* b, + uint8_t* g, + uint8_t* r, + const struct YuvConstants* yuvconstants) { + LOAD_YUV_CONSTANTS; + uint32_t y32 = y; + u = STATIC_CAST(uint16_t, clamp255(u >> 8)); + v = STATIC_CAST(uint16_t, clamp255(v >> 8)); + CALC_RGB16; + *b = STATIC_CAST(uint8_t, Clamp((int32_t)(b16) >> 6)); + *g = STATIC_CAST(uint8_t, Clamp((int32_t)(g16) >> 6)); + *r = STATIC_CAST(uint8_t, Clamp((int32_t)(r16) >> 6)); +} + +// C reference code that mimics the YUV 16 bit assembly. +// Reads 16 bit YUV and leaves result as 16 bit. +static __inline void YuvPixel16_16(uint16_t y, + uint16_t u, + uint16_t v, + int* b, + int* g, + int* r, + const struct YuvConstants* yuvconstants) { + LOAD_YUV_CONSTANTS; + uint32_t y32 = y; + u = STATIC_CAST(uint16_t, clamp255(u >> 8)); + v = STATIC_CAST(uint16_t, clamp255(v >> 8)); + CALC_RGB16; + *b = b16; + *g = g16; + *r = r16; } // C reference code that mimics the YUV assembly. -// Reads 8 bit YUV and leaves result as 16 bit. +// Reads 8 bit YUV and leaves result as 8 bit. static __inline void YPixel(uint8_t y, uint8_t* b, uint8_t* g, uint8_t* r, const struct YuvConstants* yuvconstants) { #if defined(__aarch64__) || defined(__arm__) - int ygb = yuvconstants->kUVBiasBGR[3]; - int yg = yuvconstants->kYToRgb[1]; + int yg = yuvconstants->kRGBCoeffBias[0]; + int ygb = yuvconstants->kRGBCoeffBias[4]; #else int ygb = yuvconstants->kYBiasToRgb[0]; int yg = yuvconstants->kYToRgb[0]; #endif uint32_t y1 = (uint32_t)(y * 0x0101 * yg) >> 16; - *b = Clamp(((int32_t)(y1) + ygb) >> 6); - *g = Clamp(((int32_t)(y1) + ygb) >> 6); - *r = Clamp(((int32_t)(y1) + ygb) >> 6); + *b = STATIC_CAST(uint8_t, Clamp(((int32_t)(y1) + ygb) >> 6)); + *g = STATIC_CAST(uint8_t, Clamp(((int32_t)(y1) + ygb) >> 6)); + *r = STATIC_CAST(uint8_t, Clamp(((int32_t)(y1) + ygb) >> 6)); } -#if !defined(LIBYUV_DISABLE_NEON) && \ - (defined(__ARM_NEON__) || defined(__aarch64__) || defined(LIBYUV_NEON)) -// C mimic assembly. -// TODO(fbarchard): Remove subsampling from Neon. void I444ToARGBRow_C(const uint8_t* src_y, const uint8_t* src_u, const uint8_t* src_v, @@ -1855,45 +1888,33 @@ void I444ToARGBRow_C(const uint8_t* src_y, const struct YuvConstants* yuvconstants, int width) { int x; - for (x = 0; x < width - 1; x += 2) { - uint8_t u = (src_u[0] + src_u[1] + 1) >> 1; - uint8_t v = (src_v[0] + src_v[1] + 1) >> 1; - YuvPixel(src_y[0], u, v, rgb_buf + 0, rgb_buf + 1, rgb_buf + 2, - yuvconstants); - rgb_buf[3] = 255; - YuvPixel(src_y[1], u, v, rgb_buf + 4, rgb_buf + 5, rgb_buf + 6, - yuvconstants); - rgb_buf[7] = 255; - src_y += 2; - src_u += 2; - src_v += 2; - rgb_buf += 8; // Advance 2 pixels. - } - if (width & 1) { + for (x = 0; x < width; ++x) { YuvPixel(src_y[0], src_u[0], src_v[0], rgb_buf + 0, rgb_buf + 1, rgb_buf + 2, yuvconstants); rgb_buf[3] = 255; + src_y += 1; + src_u += 1; + src_v += 1; + rgb_buf += 4; // Advance 1 pixel. } } -#else -void I444ToARGBRow_C(const uint8_t* src_y, - const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* rgb_buf, - const struct YuvConstants* yuvconstants, - int width) { + +void I444ToRGB24Row_C(const uint8_t* src_y, + const uint8_t* src_u, + const uint8_t* src_v, + uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, + int width) { int x; for (x = 0; x < width; ++x) { YuvPixel(src_y[0], src_u[0], src_v[0], rgb_buf + 0, rgb_buf + 1, rgb_buf + 2, yuvconstants); - rgb_buf[3] = 255; src_y += 1; src_u += 1; src_v += 1; - rgb_buf += 4; // Advance 1 pixel. + rgb_buf += 3; // Advance 1 pixel. } } -#endif // Also used for 420 void I422ToARGBRow_C(const uint8_t* src_y, @@ -1949,9 +1970,102 @@ void I210ToARGBRow_C(const uint16_t* src_y, } } +void I410ToARGBRow_C(const uint16_t* src_y, + const uint16_t* src_u, + const uint16_t* src_v, + uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, + int width) { + int x; + for (x = 0; x < width; ++x) { + YuvPixel10(src_y[0], src_u[0], src_v[0], rgb_buf + 0, rgb_buf + 1, + rgb_buf + 2, yuvconstants); + rgb_buf[3] = 255; + src_y += 1; + src_u += 1; + src_v += 1; + rgb_buf += 4; // Advance 1 pixels. + } +} + +void I210AlphaToARGBRow_C(const uint16_t* src_y, + const uint16_t* src_u, + const uint16_t* src_v, + const uint16_t* src_a, + uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, + int width) { + int x; + for (x = 0; x < width - 1; x += 2) { + YuvPixel10(src_y[0], src_u[0], src_v[0], rgb_buf + 0, rgb_buf + 1, + rgb_buf + 2, yuvconstants); + rgb_buf[3] = STATIC_CAST(uint8_t, clamp255(src_a[0] >> 2)); + YuvPixel10(src_y[1], src_u[0], src_v[0], rgb_buf + 4, rgb_buf + 5, + rgb_buf + 6, yuvconstants); + rgb_buf[7] = STATIC_CAST(uint8_t, clamp255(src_a[1] >> 2)); + src_y += 2; + src_u += 1; + src_v += 1; + src_a += 2; + rgb_buf += 8; // Advance 2 pixels. + } + if (width & 1) { + YuvPixel10(src_y[0], src_u[0], src_v[0], rgb_buf + 0, rgb_buf + 1, + rgb_buf + 2, yuvconstants); + rgb_buf[3] = STATIC_CAST(uint8_t, clamp255(src_a[0] >> 2)); + } +} + +void I410AlphaToARGBRow_C(const uint16_t* src_y, + const uint16_t* src_u, + const uint16_t* src_v, + const uint16_t* src_a, + uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, + int width) { + int x; + for (x = 0; x < width; ++x) { + YuvPixel10(src_y[0], src_u[0], src_v[0], rgb_buf + 0, rgb_buf + 1, + rgb_buf + 2, yuvconstants); + rgb_buf[3] = STATIC_CAST(uint8_t, clamp255(src_a[0] >> 2)); + src_y += 1; + src_u += 1; + src_v += 1; + src_a += 1; + rgb_buf += 4; // Advance 1 pixels. + } +} + +// 12 bit YUV to ARGB +void I212ToARGBRow_C(const uint16_t* src_y, + const uint16_t* src_u, + const uint16_t* src_v, + uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, + int width) { + int x; + for (x = 0; x < width - 1; x += 2) { + YuvPixel12(src_y[0], src_u[0], src_v[0], rgb_buf + 0, rgb_buf + 1, + rgb_buf + 2, yuvconstants); + rgb_buf[3] = 255; + YuvPixel12(src_y[1], src_u[0], src_v[0], rgb_buf + 4, rgb_buf + 5, + rgb_buf + 6, yuvconstants); + rgb_buf[7] = 255; + src_y += 2; + src_u += 1; + src_v += 1; + rgb_buf += 8; // Advance 2 pixels. + } + if (width & 1) { + YuvPixel12(src_y[0], src_u[0], src_v[0], rgb_buf + 0, rgb_buf + 1, + rgb_buf + 2, yuvconstants); + rgb_buf[3] = 255; + } +} + static void StoreAR30(uint8_t* rgb_buf, int b, int g, int r) { uint32_t ar30; - b = b >> 4; // convert 10.6 to 10 bit. + b = b >> 4; // convert 8 bit 10.6 to 10 bit. g = g >> 4; r = r >> 4; b = Clamp10(b); @@ -1972,19 +2086,149 @@ void I210ToAR30Row_C(const uint16_t* src_y, int b; int g; int r; - for (x = 0; x < width - 1; x += 2) { - YuvPixel16(src_y[0], src_u[0], src_v[0], &b, &g, &r, yuvconstants); - StoreAR30(rgb_buf, b, g, r); - YuvPixel16(src_y[1], src_u[0], src_v[0], &b, &g, &r, yuvconstants); - StoreAR30(rgb_buf + 4, b, g, r); - src_y += 2; - src_u += 1; - src_v += 1; - rgb_buf += 8; // Advance 2 pixels. - } - if (width & 1) { - YuvPixel16(src_y[0], src_u[0], src_v[0], &b, &g, &r, yuvconstants); - StoreAR30(rgb_buf, b, g, r); + for (x = 0; x < width - 1; x += 2) { + YuvPixel10_16(src_y[0], src_u[0], src_v[0], &b, &g, &r, yuvconstants); + StoreAR30(rgb_buf, b, g, r); + YuvPixel10_16(src_y[1], src_u[0], src_v[0], &b, &g, &r, yuvconstants); + StoreAR30(rgb_buf + 4, b, g, r); + src_y += 2; + src_u += 1; + src_v += 1; + rgb_buf += 8; // Advance 2 pixels. + } + if (width & 1) { + YuvPixel10_16(src_y[0], src_u[0], src_v[0], &b, &g, &r, yuvconstants); + StoreAR30(rgb_buf, b, g, r); + } +} + +// 12 bit YUV to 10 bit AR30 +void I212ToAR30Row_C(const uint16_t* src_y, + const uint16_t* src_u, + const uint16_t* src_v, + uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, + int width) { + int x; + int b; + int g; + int r; + for (x = 0; x < width - 1; x += 2) { + YuvPixel12_16(src_y[0], src_u[0], src_v[0], &b, &g, &r, yuvconstants); + StoreAR30(rgb_buf, b, g, r); + YuvPixel12_16(src_y[1], src_u[0], src_v[0], &b, &g, &r, yuvconstants); + StoreAR30(rgb_buf + 4, b, g, r); + src_y += 2; + src_u += 1; + src_v += 1; + rgb_buf += 8; // Advance 2 pixels. + } + if (width & 1) { + YuvPixel12_16(src_y[0], src_u[0], src_v[0], &b, &g, &r, yuvconstants); + StoreAR30(rgb_buf, b, g, r); + } +} + +void I410ToAR30Row_C(const uint16_t* src_y, + const uint16_t* src_u, + const uint16_t* src_v, + uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, + int width) { + int x; + int b; + int g; + int r; + for (x = 0; x < width; ++x) { + YuvPixel10_16(src_y[0], src_u[0], src_v[0], &b, &g, &r, yuvconstants); + StoreAR30(rgb_buf, b, g, r); + src_y += 1; + src_u += 1; + src_v += 1; + rgb_buf += 4; // Advance 1 pixel. + } +} + +// P210 has 10 bits in msb of 16 bit NV12 style layout. +void P210ToARGBRow_C(const uint16_t* src_y, + const uint16_t* src_uv, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { + int x; + for (x = 0; x < width - 1; x += 2) { + YuvPixel16_8(src_y[0], src_uv[0], src_uv[1], dst_argb + 0, dst_argb + 1, + dst_argb + 2, yuvconstants); + dst_argb[3] = 255; + YuvPixel16_8(src_y[1], src_uv[0], src_uv[1], dst_argb + 4, dst_argb + 5, + dst_argb + 6, yuvconstants); + dst_argb[7] = 255; + src_y += 2; + src_uv += 2; + dst_argb += 8; // Advance 2 pixels. + } + if (width & 1) { + YuvPixel16_8(src_y[0], src_uv[0], src_uv[1], dst_argb + 0, dst_argb + 1, + dst_argb + 2, yuvconstants); + dst_argb[3] = 255; + } +} + +void P410ToARGBRow_C(const uint16_t* src_y, + const uint16_t* src_uv, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { + int x; + for (x = 0; x < width; ++x) { + YuvPixel16_8(src_y[0], src_uv[0], src_uv[1], dst_argb + 0, dst_argb + 1, + dst_argb + 2, yuvconstants); + dst_argb[3] = 255; + src_y += 1; + src_uv += 2; + dst_argb += 4; // Advance 1 pixels. + } +} + +void P210ToAR30Row_C(const uint16_t* src_y, + const uint16_t* src_uv, + uint8_t* dst_ar30, + const struct YuvConstants* yuvconstants, + int width) { + int x; + int b; + int g; + int r; + for (x = 0; x < width - 1; x += 2) { + YuvPixel16_16(src_y[0], src_uv[0], src_uv[1], &b, &g, &r, yuvconstants); + StoreAR30(dst_ar30, b, g, r); + YuvPixel16_16(src_y[1], src_uv[0], src_uv[1], &b, &g, &r, yuvconstants); + StoreAR30(dst_ar30 + 4, b, g, r); + src_y += 2; + src_uv += 2; + dst_ar30 += 8; // Advance 2 pixels. + } + if (width & 1) { + YuvPixel16_16(src_y[0], src_uv[0], src_uv[1], &b, &g, &r, yuvconstants); + StoreAR30(dst_ar30, b, g, r); + } +} + +void P410ToAR30Row_C(const uint16_t* src_y, + const uint16_t* src_uv, + uint8_t* dst_ar30, + const struct YuvConstants* yuvconstants, + int width) { + int x; + int b; + int g; + int r; + for (x = 0; x < width; ++x) { + YuvPixel16_16(src_y[0], src_uv[0], src_uv[1], &b, &g, &r, yuvconstants); + StoreAR30(dst_ar30, b, g, r); + src_y += 1; + src_uv += 2; + dst_ar30 += 4; // Advance 1 pixel. } } @@ -2016,6 +2260,26 @@ void I422ToAR30Row_C(const uint8_t* src_y, } } +void I444AlphaToARGBRow_C(const uint8_t* src_y, + const uint8_t* src_u, + const uint8_t* src_v, + const uint8_t* src_a, + uint8_t* rgb_buf, + const struct YuvConstants* yuvconstants, + int width) { + int x; + for (x = 0; x < width; ++x) { + YuvPixel(src_y[0], src_u[0], src_v[0], rgb_buf + 0, rgb_buf + 1, + rgb_buf + 2, yuvconstants); + rgb_buf[3] = src_a[0]; + src_y += 1; + src_u += 1; + src_v += 1; + src_a += 1; + rgb_buf += 4; // Advance 1 pixel. + } +} + void I422AlphaToARGBRow_C(const uint8_t* src_y, const uint8_t* src_u, const uint8_t* src_v, @@ -2089,8 +2353,10 @@ void I422ToARGB4444Row_C(const uint8_t* src_y, b1 = b1 >> 4; g1 = g1 >> 4; r1 = r1 >> 4; - *(uint32_t*)(dst_argb4444) = b0 | (g0 << 4) | (r0 << 8) | (b1 << 16) | - (g1 << 20) | (r1 << 24) | 0xf000f000; + *(uint16_t*)(dst_argb4444 + 0) = + STATIC_CAST(uint16_t, b0 | (g0 << 4) | (r0 << 8) | 0xf000); + *(uint16_t*)(dst_argb4444 + 2) = + STATIC_CAST(uint16_t, b1 | (g1 << 4) | (r1 << 8) | 0xf000); src_y += 2; src_u += 1; src_v += 1; @@ -2101,7 +2367,8 @@ void I422ToARGB4444Row_C(const uint8_t* src_y, b0 = b0 >> 4; g0 = g0 >> 4; r0 = r0 >> 4; - *(uint16_t*)(dst_argb4444) = b0 | (g0 << 4) | (r0 << 8) | 0xf000; + *(uint16_t*)(dst_argb4444) = + STATIC_CAST(uint16_t, b0 | (g0 << 4) | (r0 << 8) | 0xf000); } } @@ -2127,8 +2394,10 @@ void I422ToARGB1555Row_C(const uint8_t* src_y, b1 = b1 >> 3; g1 = g1 >> 3; r1 = r1 >> 3; - *(uint32_t*)(dst_argb1555) = b0 | (g0 << 5) | (r0 << 10) | (b1 << 16) | - (g1 << 21) | (r1 << 26) | 0x80008000; + *(uint16_t*)(dst_argb1555 + 0) = + STATIC_CAST(uint16_t, b0 | (g0 << 5) | (r0 << 10) | 0x8000); + *(uint16_t*)(dst_argb1555 + 2) = + STATIC_CAST(uint16_t, b1 | (g1 << 5) | (r1 << 10) | 0x8000); src_y += 2; src_u += 1; src_v += 1; @@ -2139,7 +2408,8 @@ void I422ToARGB1555Row_C(const uint8_t* src_y, b0 = b0 >> 3; g0 = g0 >> 3; r0 = r0 >> 3; - *(uint16_t*)(dst_argb1555) = b0 | (g0 << 5) | (r0 << 10) | 0x8000; + *(uint16_t*)(dst_argb1555) = + STATIC_CAST(uint16_t, b0 | (g0 << 5) | (r0 << 10) | 0x8000); } } @@ -2165,8 +2435,10 @@ void I422ToRGB565Row_C(const uint8_t* src_y, b1 = b1 >> 3; g1 = g1 >> 2; r1 = r1 >> 3; - *(uint32_t*)(dst_rgb565) = - b0 | (g0 << 5) | (r0 << 11) | (b1 << 16) | (g1 << 21) | (r1 << 27); + *(uint16_t*)(dst_rgb565 + 0) = + STATIC_CAST(uint16_t, b0 | (g0 << 5) | (r0 << 11)); + *(uint16_t*)(dst_rgb565 + 2) = + STATIC_CAST(uint16_t, b1 | (g1 << 5) | (r1 << 11)); src_y += 2; src_u += 1; src_v += 1; @@ -2177,7 +2449,8 @@ void I422ToRGB565Row_C(const uint8_t* src_y, b0 = b0 >> 3; g0 = g0 >> 2; r0 = r0 >> 3; - *(uint16_t*)(dst_rgb565) = b0 | (g0 << 5) | (r0 << 11); + *(uint16_t*)(dst_rgb565 + 0) = + STATIC_CAST(uint16_t, b0 | (g0 << 5) | (r0 << 11)); } } @@ -2292,8 +2565,12 @@ void NV12ToRGB565Row_C(const uint8_t* src_y, b1 = b1 >> 3; g1 = g1 >> 2; r1 = r1 >> 3; - *(uint32_t*)(dst_rgb565) = - b0 | (g0 << 5) | (r0 << 11) | (b1 << 16) | (g1 << 21) | (r1 << 27); + *(uint16_t*)(dst_rgb565 + 0) = STATIC_CAST(uint16_t, b0) | + STATIC_CAST(uint16_t, g0 << 5) | + STATIC_CAST(uint16_t, r0 << 11); + *(uint16_t*)(dst_rgb565 + 2) = STATIC_CAST(uint16_t, b1) | + STATIC_CAST(uint16_t, g1 << 5) | + STATIC_CAST(uint16_t, r1 << 11); src_y += 2; src_uv += 2; dst_rgb565 += 4; // Advance 2 pixels. @@ -2303,7 +2580,9 @@ void NV12ToRGB565Row_C(const uint8_t* src_y, b0 = b0 >> 3; g0 = g0 >> 2; r0 = r0 >> 3; - *(uint16_t*)(dst_rgb565) = b0 | (g0 << 5) | (r0 << 11); + *(uint16_t*)(dst_rgb565) = STATIC_CAST(uint16_t, b0) | + STATIC_CAST(uint16_t, g0 << 5) | + STATIC_CAST(uint16_t, r0 << 11); } } @@ -2409,6 +2688,19 @@ void MirrorRow_C(const uint8_t* src, uint8_t* dst, int width) { } } +void MirrorRow_16_C(const uint16_t* src, uint16_t* dst, int width) { + int x; + src += width - 1; + for (x = 0; x < width - 1; x += 2) { + dst[x] = src[0]; + dst[x + 1] = src[-1]; + src -= 2; + } + if (width & 1) { + dst[width - 1] = src[0]; + } +} + void MirrorUVRow_C(const uint8_t* src_uv, uint8_t* dst_uv, int width) { int x; src_uv += (width - 1) << 1; @@ -2505,6 +2797,101 @@ void MergeUVRow_C(const uint8_t* src_u, } } +void DetileRow_C(const uint8_t* src, + ptrdiff_t src_tile_stride, + uint8_t* dst, + int width) { + int x; + for (x = 0; x < width - 15; x += 16) { + memcpy(dst, src, 16); + dst += 16; + src += src_tile_stride; + } + if (width & 15) { + memcpy(dst, src, width & 15); + } +} + +void DetileRow_16_C(const uint16_t* src, + ptrdiff_t src_tile_stride, + uint16_t* dst, + int width) { + int x; + for (x = 0; x < width - 15; x += 16) { + memcpy(dst, src, 16 * sizeof(uint16_t)); + dst += 16; + src += src_tile_stride; + } + if (width & 15) { + memcpy(dst, src, (width & 15) * sizeof(uint16_t)); + } +} + +void DetileSplitUVRow_C(const uint8_t* src_uv, + ptrdiff_t src_tile_stride, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { + int x; + for (x = 0; x < width - 15; x += 16) { + SplitUVRow_C(src_uv, dst_u, dst_v, 8); + dst_u += 8; + dst_v += 8; + src_uv += src_tile_stride; + } + if (width & 15) { + SplitUVRow_C(src_uv, dst_u, dst_v, ((width & 15) + 1) / 2); + } +} + +void DetileToYUY2_C(const uint8_t* src_y, + ptrdiff_t src_y_tile_stride, + const uint8_t* src_uv, + ptrdiff_t src_uv_tile_stride, + uint8_t* dst_yuy2, + int width) { + for (int x = 0; x < width - 15; x += 16) { + for (int i = 0; i < 8; i++) { + dst_yuy2[0] = src_y[0]; + dst_yuy2[1] = src_uv[0]; + dst_yuy2[2] = src_y[1]; + dst_yuy2[3] = src_uv[1]; + dst_yuy2 += 4; + src_y += 2; + src_uv += 2; + } + src_y += src_y_tile_stride - 16; + src_uv += src_uv_tile_stride - 16; + } +} + +// Unpack MT2T into tiled P010 64 pixels at a time. MT2T's bitstream is encoded +// in 80 byte blocks representing 64 pixels each. The first 16 bytes of the +// block contain all of the lower 2 bits of each pixel packed together, and the +// next 64 bytes represent all the upper 8 bits of the pixel. +void UnpackMT2T_C(const uint8_t* src, uint16_t* dst, size_t size) { + for (size_t i = 0; i < size; i += 80) { + const uint8_t* src_lower_bits = src; + const uint8_t* src_upper_bits = src + 16; + + for (int j = 0; j < 16; j++) { + uint8_t lower_bits = src_lower_bits[j]; + *dst++ = (lower_bits & 0x03) << 6 | (uint16_t)src_upper_bits[j * 4] << 8 | + (uint16_t)src_upper_bits[j * 4] >> 2; + *dst++ = (lower_bits & 0x0C) << 4 | + (uint16_t)src_upper_bits[j * 4 + 1] << 8 | + (uint16_t)src_upper_bits[j * 4 + 1] >> 2; + *dst++ = (lower_bits & 0x30) << 2 | + (uint16_t)src_upper_bits[j * 4 + 2] << 8 | + (uint16_t)src_upper_bits[j * 4 + 2] >> 2; + *dst++ = (lower_bits & 0xC0) | (uint16_t)src_upper_bits[j * 4 + 3] << 8 | + (uint16_t)src_upper_bits[j * 4 + 3] >> 2; + } + + src += 80; + } +} + void SplitRGBRow_C(const uint8_t* src_rgb, uint8_t* dst_r, uint8_t* dst_g, @@ -2533,27 +2920,197 @@ void MergeRGBRow_C(const uint8_t* src_r, } } -// Use scale to convert lsb formats to msb, depending how many bits there are: -// 128 = 9 bits -// 64 = 10 bits -// 16 = 12 bits -// 1 = 16 bits +void SplitARGBRow_C(const uint8_t* src_argb, + uint8_t* dst_r, + uint8_t* dst_g, + uint8_t* dst_b, + uint8_t* dst_a, + int width) { + int x; + for (x = 0; x < width; ++x) { + dst_b[x] = src_argb[0]; + dst_g[x] = src_argb[1]; + dst_r[x] = src_argb[2]; + dst_a[x] = src_argb[3]; + src_argb += 4; + } +} + +void MergeARGBRow_C(const uint8_t* src_r, + const uint8_t* src_g, + const uint8_t* src_b, + const uint8_t* src_a, + uint8_t* dst_argb, + int width) { + int x; + for (x = 0; x < width; ++x) { + dst_argb[0] = src_b[x]; + dst_argb[1] = src_g[x]; + dst_argb[2] = src_r[x]; + dst_argb[3] = src_a[x]; + dst_argb += 4; + } +} + +void MergeXR30Row_C(const uint16_t* src_r, + const uint16_t* src_g, + const uint16_t* src_b, + uint8_t* dst_ar30, + int depth, + int width) { + assert(depth >= 10); + assert(depth <= 16); + int x; + int shift = depth - 10; + uint32_t* dst_ar30_32 = (uint32_t*)dst_ar30; + for (x = 0; x < width; ++x) { + uint32_t r = clamp1023(src_r[x] >> shift); + uint32_t g = clamp1023(src_g[x] >> shift); + uint32_t b = clamp1023(src_b[x] >> shift); + dst_ar30_32[x] = b | (g << 10) | (r << 20) | 0xc0000000; + } +} + +void MergeAR64Row_C(const uint16_t* src_r, + const uint16_t* src_g, + const uint16_t* src_b, + const uint16_t* src_a, + uint16_t* dst_ar64, + int depth, + int width) { + assert(depth >= 1); + assert(depth <= 16); + int x; + int shift = 16 - depth; + int max = (1 << depth) - 1; + for (x = 0; x < width; ++x) { + dst_ar64[0] = STATIC_CAST(uint16_t, ClampMax(src_b[x], max) << shift); + dst_ar64[1] = STATIC_CAST(uint16_t, ClampMax(src_g[x], max) << shift); + dst_ar64[2] = STATIC_CAST(uint16_t, ClampMax(src_r[x], max) << shift); + dst_ar64[3] = STATIC_CAST(uint16_t, ClampMax(src_a[x], max) << shift); + dst_ar64 += 4; + } +} + +void MergeARGB16To8Row_C(const uint16_t* src_r, + const uint16_t* src_g, + const uint16_t* src_b, + const uint16_t* src_a, + uint8_t* dst_argb, + int depth, + int width) { + assert(depth >= 8); + assert(depth <= 16); + int x; + int shift = depth - 8; + for (x = 0; x < width; ++x) { + dst_argb[0] = STATIC_CAST(uint8_t, clamp255(src_b[x] >> shift)); + dst_argb[1] = STATIC_CAST(uint8_t, clamp255(src_g[x] >> shift)); + dst_argb[2] = STATIC_CAST(uint8_t, clamp255(src_r[x] >> shift)); + dst_argb[3] = STATIC_CAST(uint8_t, clamp255(src_a[x] >> shift)); + dst_argb += 4; + } +} + +void MergeXR64Row_C(const uint16_t* src_r, + const uint16_t* src_g, + const uint16_t* src_b, + uint16_t* dst_ar64, + int depth, + int width) { + assert(depth >= 1); + assert(depth <= 16); + int x; + int shift = 16 - depth; + int max = (1 << depth) - 1; + for (x = 0; x < width; ++x) { + dst_ar64[0] = STATIC_CAST(uint16_t, ClampMax(src_b[x], max) << shift); + dst_ar64[1] = STATIC_CAST(uint16_t, ClampMax(src_g[x], max) << shift); + dst_ar64[2] = STATIC_CAST(uint16_t, ClampMax(src_r[x], max) << shift); + dst_ar64[3] = 0xffff; + dst_ar64 += 4; + } +} + +void MergeXRGB16To8Row_C(const uint16_t* src_r, + const uint16_t* src_g, + const uint16_t* src_b, + uint8_t* dst_argb, + int depth, + int width) { + assert(depth >= 8); + assert(depth <= 16); + int x; + int shift = depth - 8; + for (x = 0; x < width; ++x) { + dst_argb[0] = STATIC_CAST(uint8_t, clamp255(src_b[x] >> shift)); + dst_argb[1] = STATIC_CAST(uint8_t, clamp255(src_g[x] >> shift)); + dst_argb[2] = STATIC_CAST(uint8_t, clamp255(src_r[x] >> shift)); + dst_argb[3] = 0xff; + dst_argb += 4; + } +} + +void SplitXRGBRow_C(const uint8_t* src_argb, + uint8_t* dst_r, + uint8_t* dst_g, + uint8_t* dst_b, + int width) { + int x; + for (x = 0; x < width; ++x) { + dst_b[x] = src_argb[0]; + dst_g[x] = src_argb[1]; + dst_r[x] = src_argb[2]; + src_argb += 4; + } +} + +void MergeXRGBRow_C(const uint8_t* src_r, + const uint8_t* src_g, + const uint8_t* src_b, + uint8_t* dst_argb, + int width) { + int x; + for (x = 0; x < width; ++x) { + dst_argb[0] = src_b[x]; + dst_argb[1] = src_g[x]; + dst_argb[2] = src_r[x]; + dst_argb[3] = 255; + dst_argb += 4; + } +} + +// Convert lsb formats to msb, depending on sample depth. void MergeUVRow_16_C(const uint16_t* src_u, const uint16_t* src_v, uint16_t* dst_uv, - int scale, + int depth, int width) { + int shift = 16 - depth; + assert(depth >= 8); + assert(depth <= 16); int x; - for (x = 0; x < width - 1; x += 2) { - dst_uv[0] = src_u[x] * scale; - dst_uv[1] = src_v[x] * scale; - dst_uv[2] = src_u[x + 1] * scale; - dst_uv[3] = src_v[x + 1] * scale; - dst_uv += 4; + for (x = 0; x < width; ++x) { + dst_uv[0] = STATIC_CAST(uint16_t, src_u[x] << shift); + dst_uv[1] = STATIC_CAST(uint16_t, src_v[x] << shift); + dst_uv += 2; } - if (width & 1) { - dst_uv[0] = src_u[width - 1] * scale; - dst_uv[1] = src_v[width - 1] * scale; +} + +// Convert msb formats to lsb, depending on sample depth. +void SplitUVRow_16_C(const uint16_t* src_uv, + uint16_t* dst_u, + uint16_t* dst_v, + int depth, + int width) { + int shift = 16 - depth; + int x; + assert(depth >= 8); + assert(depth <= 16); + for (x = 0; x < width; ++x) { + dst_u[x] = src_uv[0] >> shift; + dst_v[x] = src_uv[1] >> shift; + src_uv += 2; } } @@ -2563,7 +3120,17 @@ void MultiplyRow_16_C(const uint16_t* src_y, int width) { int x; for (x = 0; x < width; ++x) { - dst_y[x] = src_y[x] * scale; + dst_y[x] = STATIC_CAST(uint16_t, src_y[x] * scale); + } +} + +void DivideRow_16_C(const uint16_t* src_y, + uint16_t* dst_y, + int scale, + int width) { + int x; + for (x = 0; x < width; ++x) { + dst_y[x] = (src_y[x] * scale) >> 16; } } @@ -2572,13 +3139,19 @@ void MultiplyRow_16_C(const uint16_t* src_y, // 16384 = 10 bits // 4096 = 12 bits // 256 = 16 bits +// TODO(fbarchard): change scale to bits +#define C16TO8(v, scale) clamp255(((v) * (scale)) >> 16) + void Convert16To8Row_C(const uint16_t* src_y, uint8_t* dst_y, int scale, int width) { int x; + assert(scale >= 256); + assert(scale <= 32768); + for (x = 0; x < width; ++x) { - dst_y[x] = clamp255((src_y[x] * scale) >> 16); + dst_y[x] = STATIC_CAST(uint8_t, C16TO8(src_y[x], scale)); } } @@ -2631,6 +3204,21 @@ void YUY2ToUVRow_C(const uint8_t* src_yuy2, } } +// Filter 2 rows of YUY2 UV's (422) into UV (NV12). +void YUY2ToNVUVRow_C(const uint8_t* src_yuy2, + int src_stride_yuy2, + uint8_t* dst_uv, + int width) { + // Output a row of UV values, filtering 2 rows of YUY2. + int x; + for (x = 0; x < width; x += 2) { + dst_uv[0] = (src_yuy2[1] + src_yuy2[src_stride_yuy2 + 1] + 1) >> 1; + dst_uv[1] = (src_yuy2[3] + src_yuy2[src_stride_yuy2 + 3] + 1) >> 1; + src_yuy2 += 4; + dst_uv += 2; + } +} + // Copy row of YUY2 UV's (422) into U and V (422). void YUY2ToUV422Row_C(const uint8_t* src_yuy2, uint8_t* dst_u, @@ -2710,54 +3298,54 @@ void UYVYToYRow_C(const uint8_t* src_uyvy, uint8_t* dst_y, int width) { #define BLEND(f, b, a) clamp255((((256 - a) * b) >> 8) + f) -// Blend src_argb0 over src_argb1 and store to dst_argb. -// dst_argb may be src_argb0 or src_argb1. +// Blend src_argb over src_argb1 and store to dst_argb. +// dst_argb may be src_argb or src_argb1. // This code mimics the SSSE3 version for better testability. -void ARGBBlendRow_C(const uint8_t* src_argb0, +void ARGBBlendRow_C(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width) { int x; for (x = 0; x < width - 1; x += 2) { - uint32_t fb = src_argb0[0]; - uint32_t fg = src_argb0[1]; - uint32_t fr = src_argb0[2]; - uint32_t a = src_argb0[3]; + uint32_t fb = src_argb[0]; + uint32_t fg = src_argb[1]; + uint32_t fr = src_argb[2]; + uint32_t a = src_argb[3]; uint32_t bb = src_argb1[0]; uint32_t bg = src_argb1[1]; uint32_t br = src_argb1[2]; - dst_argb[0] = BLEND(fb, bb, a); - dst_argb[1] = BLEND(fg, bg, a); - dst_argb[2] = BLEND(fr, br, a); + dst_argb[0] = STATIC_CAST(uint8_t, BLEND(fb, bb, a)); + dst_argb[1] = STATIC_CAST(uint8_t, BLEND(fg, bg, a)); + dst_argb[2] = STATIC_CAST(uint8_t, BLEND(fr, br, a)); dst_argb[3] = 255u; - fb = src_argb0[4 + 0]; - fg = src_argb0[4 + 1]; - fr = src_argb0[4 + 2]; - a = src_argb0[4 + 3]; + fb = src_argb[4 + 0]; + fg = src_argb[4 + 1]; + fr = src_argb[4 + 2]; + a = src_argb[4 + 3]; bb = src_argb1[4 + 0]; bg = src_argb1[4 + 1]; br = src_argb1[4 + 2]; - dst_argb[4 + 0] = BLEND(fb, bb, a); - dst_argb[4 + 1] = BLEND(fg, bg, a); - dst_argb[4 + 2] = BLEND(fr, br, a); + dst_argb[4 + 0] = STATIC_CAST(uint8_t, BLEND(fb, bb, a)); + dst_argb[4 + 1] = STATIC_CAST(uint8_t, BLEND(fg, bg, a)); + dst_argb[4 + 2] = STATIC_CAST(uint8_t, BLEND(fr, br, a)); dst_argb[4 + 3] = 255u; - src_argb0 += 8; + src_argb += 8; src_argb1 += 8; dst_argb += 8; } if (width & 1) { - uint32_t fb = src_argb0[0]; - uint32_t fg = src_argb0[1]; - uint32_t fr = src_argb0[2]; - uint32_t a = src_argb0[3]; + uint32_t fb = src_argb[0]; + uint32_t fg = src_argb[1]; + uint32_t fr = src_argb[2]; + uint32_t a = src_argb[3]; uint32_t bb = src_argb1[0]; uint32_t bg = src_argb1[1]; uint32_t br = src_argb1[2]; - dst_argb[0] = BLEND(fb, bb, a); - dst_argb[1] = BLEND(fg, bg, a); - dst_argb[2] = BLEND(fr, br, a); + dst_argb[0] = STATIC_CAST(uint8_t, BLEND(fb, bb, a)); + dst_argb[1] = STATIC_CAST(uint8_t, BLEND(fg, bg, a)); + dst_argb[2] = STATIC_CAST(uint8_t, BLEND(fr, br, a)); dst_argb[3] = 255u; } } @@ -2784,11 +3372,11 @@ void BlendPlaneRow_C(const uint8_t* src0, } #undef UBLEND -#if defined(__aarch64__) || defined(__arm__) -#define ATTENUATE(f, a) (f * a + 128) >> 8 -#else +#if LIBYUV_ATTENUATE_DUP // This code mimics the SSSE3 version for better testability. #define ATTENUATE(f, a) (a | (a << 8)) * (f | (f << 8)) >> 24 +#else +#define ATTENUATE(f, a) (f * a + 128) >> 8 #endif // Multiply source RGB by alpha and store to destination. @@ -2802,7 +3390,7 @@ void ARGBAttenuateRow_C(const uint8_t* src_argb, uint8_t* dst_argb, int width) { dst_argb[0] = ATTENUATE(b, a); dst_argb[1] = ATTENUATE(g, a); dst_argb[2] = ATTENUATE(r, a); - dst_argb[3] = a; + dst_argb[3] = STATIC_CAST(uint8_t, a); b = src_argb[4]; g = src_argb[5]; r = src_argb[6]; @@ -2810,7 +3398,7 @@ void ARGBAttenuateRow_C(const uint8_t* src_argb, uint8_t* dst_argb, int width) { dst_argb[4] = ATTENUATE(b, a); dst_argb[5] = ATTENUATE(g, a); dst_argb[6] = ATTENUATE(r, a); - dst_argb[7] = a; + dst_argb[7] = STATIC_CAST(uint8_t, a); src_argb += 8; dst_argb += 8; } @@ -2823,7 +3411,7 @@ void ARGBAttenuateRow_C(const uint8_t* src_argb, uint8_t* dst_argb, int width) { dst_argb[0] = ATTENUATE(b, a); dst_argb[1] = ATTENUATE(g, a); dst_argb[2] = ATTENUATE(r, a); - dst_argb[3] = a; + dst_argb[3] = STATIC_CAST(uint8_t, a); } } #undef ATTENUATE @@ -2875,6 +3463,14 @@ const uint32_t fixed_invtbl8[256] = { T(0xfc), T(0xfd), T(0xfe), 0x01000100}; #undef T +#if LIBYUV_UNATTENUATE_DUP +// This code mimics the Intel SIMD version for better testability. +#define UNATTENUATE(f, ia) clamp255(((f | (f << 8)) * ia) >> 16) +#else +#define UNATTENUATE(f, ia) clamp255((f * ia) >> 8) +#endif + +// mimics the Intel SIMD code for exactness. void ARGBUnattenuateRow_C(const uint8_t* src_argb, uint8_t* dst_argb, int width) { @@ -2885,14 +3481,12 @@ void ARGBUnattenuateRow_C(const uint8_t* src_argb, uint32_t r = src_argb[2]; const uint32_t a = src_argb[3]; const uint32_t ia = fixed_invtbl8[a] & 0xffff; // 8.8 fixed point - b = (b * ia) >> 8; - g = (g * ia) >> 8; - r = (r * ia) >> 8; + // Clamping should not be necessary but is free in assembly. - dst_argb[0] = clamp255(b); - dst_argb[1] = clamp255(g); - dst_argb[2] = clamp255(r); - dst_argb[3] = a; + dst_argb[0] = STATIC_CAST(uint8_t, UNATTENUATE(b, ia)); + dst_argb[1] = STATIC_CAST(uint8_t, UNATTENUATE(g, ia)); + dst_argb[2] = STATIC_CAST(uint8_t, UNATTENUATE(r, ia)); + dst_argb[3] = STATIC_CAST(uint8_t, a); src_argb += 4; dst_argb += 4; } @@ -2922,13 +3516,24 @@ void CumulativeSumToAverageRow_C(const int32_t* tl, int area, uint8_t* dst, int count) { - float ooa = 1.0f / area; + float ooa; int i; + assert(area != 0); + + ooa = 1.0f / STATIC_CAST(float, area); for (i = 0; i < count; ++i) { - dst[0] = (uint8_t)((bl[w + 0] + tl[0] - bl[0] - tl[w + 0]) * ooa); - dst[1] = (uint8_t)((bl[w + 1] + tl[1] - bl[1] - tl[w + 1]) * ooa); - dst[2] = (uint8_t)((bl[w + 2] + tl[2] - bl[2] - tl[w + 2]) * ooa); - dst[3] = (uint8_t)((bl[w + 3] + tl[3] - bl[3] - tl[w + 3]) * ooa); + dst[0] = + (uint8_t)(STATIC_CAST(float, bl[w + 0] + tl[0] - bl[0] - tl[w + 0]) * + ooa); + dst[1] = + (uint8_t)(STATIC_CAST(float, bl[w + 1] + tl[1] - bl[1] - tl[w + 1]) * + ooa); + dst[2] = + (uint8_t)(STATIC_CAST(float, bl[w + 2] + tl[2] - bl[2] - tl[w + 2]) * + ooa); + dst[3] = + (uint8_t)(STATIC_CAST(float, bl[w + 3] + tl[3] - bl[3] - tl[w + 3]) * + ooa); dst += 4; tl += 4; bl += 4; @@ -2979,6 +3584,19 @@ static void HalfRow_16_C(const uint16_t* src_uv, } } +static void HalfRow_16To8_C(const uint16_t* src_uv, + ptrdiff_t src_uv_stride, + uint8_t* dst_uv, + int scale, + int width) { + int x; + for (x = 0; x < width; ++x) { + dst_uv[x] = STATIC_CAST( + uint8_t, + C16TO8((src_uv[x] + src_uv[src_uv_stride + x] + 1) >> 1, scale)); + } +} + // C version 2x2 -> 2x1. void InterpolateRow_C(uint8_t* dst_ptr, const uint8_t* src_ptr, @@ -2989,6 +3607,9 @@ void InterpolateRow_C(uint8_t* dst_ptr, int y0_fraction = 256 - y1_fraction; const uint8_t* src_ptr1 = src_ptr + src_stride; int x; + assert(source_y_fraction >= 0); + assert(source_y_fraction < 256); + if (y1_fraction == 0) { memcpy(dst_ptr, src_ptr, width); return; @@ -2997,21 +3618,17 @@ void InterpolateRow_C(uint8_t* dst_ptr, HalfRow_C(src_ptr, src_stride, dst_ptr, width); return; } - for (x = 0; x < width - 1; x += 2) { - dst_ptr[0] = - (src_ptr[0] * y0_fraction + src_ptr1[0] * y1_fraction + 128) >> 8; - dst_ptr[1] = - (src_ptr[1] * y0_fraction + src_ptr1[1] * y1_fraction + 128) >> 8; - src_ptr += 2; - src_ptr1 += 2; - dst_ptr += 2; - } - if (width & 1) { - dst_ptr[0] = - (src_ptr[0] * y0_fraction + src_ptr1[0] * y1_fraction + 128) >> 8; + for (x = 0; x < width; ++x) { + dst_ptr[0] = STATIC_CAST( + uint8_t, + (src_ptr[0] * y0_fraction + src_ptr1[0] * y1_fraction + 128) >> 8); + ++src_ptr; + ++src_ptr1; + ++dst_ptr; } } +// C version 2x2 -> 2x1. void InterpolateRow_16_C(uint16_t* dst_ptr, const uint16_t* src_ptr, ptrdiff_t src_stride, @@ -3021,23 +3638,65 @@ void InterpolateRow_16_C(uint16_t* dst_ptr, int y0_fraction = 256 - y1_fraction; const uint16_t* src_ptr1 = src_ptr + src_stride; int x; - if (source_y_fraction == 0) { + assert(source_y_fraction >= 0); + assert(source_y_fraction < 256); + + if (y1_fraction == 0) { memcpy(dst_ptr, src_ptr, width * 2); return; } - if (source_y_fraction == 128) { + if (y1_fraction == 128) { HalfRow_16_C(src_ptr, src_stride, dst_ptr, width); return; } - for (x = 0; x < width - 1; x += 2) { - dst_ptr[0] = (src_ptr[0] * y0_fraction + src_ptr1[0] * y1_fraction) >> 8; - dst_ptr[1] = (src_ptr[1] * y0_fraction + src_ptr1[1] * y1_fraction) >> 8; - src_ptr += 2; - src_ptr1 += 2; - dst_ptr += 2; + for (x = 0; x < width; ++x) { + dst_ptr[0] = STATIC_CAST( + uint16_t, + (src_ptr[0] * y0_fraction + src_ptr1[0] * y1_fraction + 128) >> 8); + ++src_ptr; + ++src_ptr1; + ++dst_ptr; } - if (width & 1) { - dst_ptr[0] = (src_ptr[0] * y0_fraction + src_ptr1[0] * y1_fraction) >> 8; +} + +// C version 2x2 16 bit-> 2x1 8 bit. +// Use scale to convert lsb formats to msb, depending how many bits there are: +// 32768 = 9 bits +// 16384 = 10 bits +// 4096 = 12 bits +// 256 = 16 bits +// TODO(fbarchard): change scale to bits + +void InterpolateRow_16To8_C(uint8_t* dst_ptr, + const uint16_t* src_ptr, + ptrdiff_t src_stride, + int scale, + int width, + int source_y_fraction) { + int y1_fraction = source_y_fraction; + int y0_fraction = 256 - y1_fraction; + const uint16_t* src_ptr1 = src_ptr + src_stride; + int x; + assert(source_y_fraction >= 0); + assert(source_y_fraction < 256); + + if (source_y_fraction == 0) { + Convert16To8Row_C(src_ptr, dst_ptr, scale, width); + return; + } + if (source_y_fraction == 128) { + HalfRow_16To8_C(src_ptr, src_stride, dst_ptr, scale, width); + return; + } + for (x = 0; x < width; ++x) { + dst_ptr[0] = STATIC_CAST( + uint8_t, + C16TO8( + (src_ptr[0] * y0_fraction + src_ptr1[0] * y1_fraction + 128) >> 8, + scale)); + src_ptr += 1; + src_ptr1 += 1; + dst_ptr += 1; } } @@ -3146,10 +3805,10 @@ void ARGBPolynomialRow_C(const uint8_t* src_argb, dr += poly[14] * r3; da += poly[15] * a3; - dst_argb[0] = Clamp((int32_t)(db)); - dst_argb[1] = Clamp((int32_t)(dg)); - dst_argb[2] = Clamp((int32_t)(dr)); - dst_argb[3] = Clamp((int32_t)(da)); + dst_argb[0] = STATIC_CAST(uint8_t, Clamp((int32_t)(db))); + dst_argb[1] = STATIC_CAST(uint8_t, Clamp((int32_t)(dg))); + dst_argb[2] = STATIC_CAST(uint8_t, Clamp((int32_t)(dr))); + dst_argb[3] = STATIC_CAST(uint8_t, Clamp((int32_t)(da))); src_argb += 4; dst_argb += 4; } @@ -3276,7 +3935,7 @@ void ARGBCopyYToAlphaRow_C(const uint8_t* src, uint8_t* dst, int width) { // Maximum temporary width for wrappers to process at a time, in pixels. #define MAXTWIDTH 2048 -#if !(defined(_MSC_VER) && defined(_M_IX86)) && \ +#if !(defined(_MSC_VER) && !defined(__clang__) && defined(_M_IX86)) && \ defined(HAS_I422TORGB565ROW_SSSE3) // row_win.cc has asm version, but GCC uses 2 step wrapper. void I422ToRGB565Row_SSSE3(const uint8_t* src_y, @@ -3554,6 +4213,32 @@ void I422ToRGB24Row_AVX2(const uint8_t* src_y, } #endif +#if defined(HAS_I444TORGB24ROW_AVX2) +void I444ToRGB24Row_AVX2(const uint8_t* src_y, + const uint8_t* src_u, + const uint8_t* src_v, + uint8_t* dst_rgb24, + const struct YuvConstants* yuvconstants, + int width) { + // Row buffer for intermediate ARGB pixels. + SIMD_ALIGNED(uint8_t row[MAXTWIDTH * 4]); + while (width > 0) { + int twidth = width > MAXTWIDTH ? MAXTWIDTH : width; + I444ToARGBRow_AVX2(src_y, src_u, src_v, row, yuvconstants, twidth); +#if defined(HAS_ARGBTORGB24ROW_AVX2) + ARGBToRGB24Row_AVX2(row, dst_rgb24, twidth); +#else + ARGBToRGB24Row_SSSE3(row, dst_rgb24, twidth); +#endif + src_y += twidth; + src_u += twidth; + src_v += twidth; + dst_rgb24 += twidth * 3; + width -= twidth; + } +} +#endif + #if defined(HAS_NV12TORGB565ROW_AVX2) void NV12ToRGB565Row_AVX2(const uint8_t* src_y, const uint8_t* src_uv, @@ -3642,6 +4327,26 @@ void RAWToYJRow_SSSE3(const uint8_t* src_raw, uint8_t* dst_yj, int width) { } #endif // HAS_RAWTOYJROW_SSSE3 +#ifdef HAS_INTERPOLATEROW_16TO8_AVX2 +void InterpolateRow_16To8_AVX2(uint8_t* dst_ptr, + const uint16_t* src_ptr, + ptrdiff_t src_stride, + int scale, + int width, + int source_y_fraction) { + // Row buffer for intermediate 16 bit pixels. + SIMD_ALIGNED(uint16_t row[MAXTWIDTH]); + while (width > 0) { + int twidth = width > MAXTWIDTH ? MAXTWIDTH : width; + InterpolateRow_16_C(row, src_ptr, src_stride, twidth, source_y_fraction); + Convert16To8Row_AVX2(row, dst_ptr, scale, twidth); + src_ptr += twidth; + dst_ptr += twidth; + width -= twidth; + } +} +#endif // HAS_INTERPOLATEROW_16TO8_AVX2 + float ScaleSumSamples_C(const float* src, float* dst, float scale, int width) { float fsum = 0.f; int i; @@ -3675,8 +4380,9 @@ void ScaleSamples_C(const float* src, float* dst, float scale, int width) { void GaussRow_C(const uint32_t* src, uint16_t* dst, int width) { int i; for (i = 0; i < width; ++i) { - *dst++ = - (src[0] + src[1] * 4 + src[2] * 6 + src[3] * 4 + src[4] + 128) >> 8; + *dst++ = STATIC_CAST( + uint16_t, + (src[0] + src[1] * 4 + src[2] * 6 + src[3] * 4 + src[4] + 128) >> 8); ++src; } } @@ -3743,13 +4449,14 @@ void NV21ToYUV24Row_C(const uint8_t* src_y, } // Filter 2 rows of AYUV UV's (444) into UV (420). +// AYUV is VUYA in memory. UV for NV12 is UV order in memory. void AYUVToUVRow_C(const uint8_t* src_ayuv, int src_stride_ayuv, uint8_t* dst_uv, int width) { // Output a row of UV values, filtering 2x2 rows of AYUV. int x; - for (x = 0; x < width; x += 2) { + for (x = 0; x < width - 1; x += 2) { dst_uv[0] = (src_ayuv[1] + src_ayuv[5] + src_ayuv[src_stride_ayuv + 1] + src_ayuv[src_stride_ayuv + 5] + 2) >> 2; @@ -3760,12 +4467,8 @@ void AYUVToUVRow_C(const uint8_t* src_ayuv, dst_uv += 2; } if (width & 1) { - dst_uv[0] = (src_ayuv[0] + src_ayuv[0] + src_ayuv[src_stride_ayuv + 0] + - src_ayuv[src_stride_ayuv + 0] + 2) >> - 2; - dst_uv[1] = (src_ayuv[1] + src_ayuv[1] + src_ayuv[src_stride_ayuv + 1] + - src_ayuv[src_stride_ayuv + 1] + 2) >> - 2; + dst_uv[0] = (src_ayuv[1] + src_ayuv[src_stride_ayuv + 1] + 1) >> 1; + dst_uv[1] = (src_ayuv[0] + src_ayuv[src_stride_ayuv + 0] + 1) >> 1; } } @@ -3776,7 +4479,7 @@ void AYUVToVURow_C(const uint8_t* src_ayuv, int width) { // Output a row of VU values, filtering 2x2 rows of AYUV. int x; - for (x = 0; x < width; x += 2) { + for (x = 0; x < width - 1; x += 2) { dst_vu[0] = (src_ayuv[0] + src_ayuv[4] + src_ayuv[src_stride_ayuv + 0] + src_ayuv[src_stride_ayuv + 4] + 2) >> 2; @@ -3787,12 +4490,8 @@ void AYUVToVURow_C(const uint8_t* src_ayuv, dst_vu += 2; } if (width & 1) { - dst_vu[0] = (src_ayuv[0] + src_ayuv[0] + src_ayuv[src_stride_ayuv + 0] + - src_ayuv[src_stride_ayuv + 0] + 2) >> - 2; - dst_vu[1] = (src_ayuv[1] + src_ayuv[1] + src_ayuv[src_stride_ayuv + 1] + - src_ayuv[src_stride_ayuv + 1] + 2) >> - 2; + dst_vu[0] = (src_ayuv[0] + src_ayuv[src_stride_ayuv + 0] + 1) >> 1; + dst_vu[1] = (src_ayuv[1] + src_ayuv[src_stride_ayuv + 1] + 1) >> 1; } } @@ -3843,7 +4542,9 @@ void HalfMergeUVRow_C(const uint8_t* src_u, } } +#undef STATIC_CAST + #ifdef __cplusplus } // extern "C" } // namespace libyuv -#endif +#endif \ No newline at end of file diff --git a/TMessagesProj/jni/third_party/libyuv/source/row_gcc.cc b/TMessagesProj/jni/third_party/libyuv/source/row_gcc.cc index a107c30e76..f36d0cf01b 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/row_gcc.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/row_gcc.cc @@ -9,15 +9,13 @@ */ #include "libyuv/row.h" - #ifdef __cplusplus namespace libyuv { extern "C" { #endif // This module is for GCC x86 and x64. -#if !defined(LIBYUV_DISABLE_X86) && \ - (defined(__x86_64__) || (defined(__i386__) && !defined(_MSC_VER))) +#if !defined(LIBYUV_DISABLE_X86) && (defined(__x86_64__) || defined(__i386__)) #if defined(HAS_ARGBTOYROW_SSSE3) || defined(HAS_ARGBGRAYROW_SSSE3) @@ -29,6 +27,9 @@ static const uvec8 kARGBToY = {25u, 129u, 66u, 0u, 25u, 129u, 66u, 0u, static const uvec8 kARGBToYJ = {29u, 150u, 77u, 0u, 29u, 150u, 77u, 0u, 29u, 150u, 77u, 0u, 29u, 150u, 77u, 0u}; +static const uvec8 kABGRToYJ = {77u, 150u, 29u, 0u, 77u, 150u, 29u, 0u, + 77u, 150u, 29u, 0u, 77u, 150u, 29u, 0u}; + static const uvec8 kRGBAToYJ = {0u, 29u, 150u, 77u, 0u, 29u, 150u, 77u, 0u, 29u, 150u, 77u, 0u, 29u, 150u, 77u}; #endif // defined(HAS_ARGBTOYROW_SSSE3) || defined(HAS_ARGBGRAYROW_SSSE3) @@ -41,12 +42,18 @@ static const vec8 kARGBToU = {112, -74, -38, 0, 112, -74, -38, 0, static const vec8 kARGBToUJ = {127, -84, -43, 0, 127, -84, -43, 0, 127, -84, -43, 0, 127, -84, -43, 0}; +static const vec8 kABGRToUJ = {-43, -84, 127, 0, -43, -84, 127, 0, + -43, -84, 127, 0, -43, -84, 127, 0}; + static const vec8 kARGBToV = {-18, -94, 112, 0, -18, -94, 112, 0, -18, -94, 112, 0, -18, -94, 112, 0}; static const vec8 kARGBToVJ = {-20, -107, 127, 0, -20, -107, 127, 0, -20, -107, 127, 0, -20, -107, 127, 0}; +static const vec8 kABGRToVJ = {127, -107, -20, 0, 127, -107, -20, 0, + 127, -107, -20, 0, 127, -107, -20, 0}; + // Constants for BGRA static const uvec8 kBGRAToY = {0u, 66u, 129u, 25u, 0u, 66u, 129u, 25u, 0u, 66u, 129u, 25u, 0u, 66u, 129u, 25u}; @@ -1078,6 +1085,226 @@ void ABGRToAR30Row_AVX2(const uint8_t* src, uint8_t* dst, int width) { } #endif +static const uvec8 kShuffleARGBToABGR = {2, 1, 0, 3, 6, 5, 4, 7, + 10, 9, 8, 11, 14, 13, 12, 15}; + +static const uvec8 kShuffleARGBToAB64Lo = {2, 2, 1, 1, 0, 0, 3, 3, + 6, 6, 5, 5, 4, 4, 7, 7}; +static const uvec8 kShuffleARGBToAB64Hi = {10, 10, 9, 9, 8, 8, 11, 11, + 14, 14, 13, 13, 12, 12, 15, 15}; + +void ARGBToAR64Row_SSSE3(const uint8_t* src_argb, + uint16_t* dst_ar64, + int width) { + asm volatile( + + LABELALIGN + "1: \n" + "movdqu (%0),%%xmm0 \n" + "movdqa %%xmm0,%%xmm1 \n" + "punpcklbw %%xmm0,%%xmm0 \n" + "punpckhbw %%xmm1,%%xmm1 \n" + "movdqu %%xmm0,(%1) \n" + "movdqu %%xmm1,0x10(%1) \n" + "lea 0x10(%0),%0 \n" + "lea 0x20(%1),%1 \n" + "sub $0x4,%2 \n" + "jg 1b \n" + : "+r"(src_argb), // %0 + "+r"(dst_ar64), // %1 + "+r"(width) // %2 + : + : "memory", "cc", "xmm0", "xmm1"); +} + +void ARGBToAB64Row_SSSE3(const uint8_t* src_argb, + uint16_t* dst_ab64, + int width) { + asm volatile( + + "movdqa %3,%%xmm2 \n" + "movdqa %4,%%xmm3 \n" LABELALIGN + "1: \n" + "movdqu (%0),%%xmm0 \n" + "movdqa %%xmm0,%%xmm1 \n" + "pshufb %%xmm2,%%xmm0 \n" + "pshufb %%xmm3,%%xmm1 \n" + "movdqu %%xmm0,(%1) \n" + "movdqu %%xmm1,0x10(%1) \n" + "lea 0x10(%0),%0 \n" + "lea 0x20(%1),%1 \n" + "sub $0x4,%2 \n" + "jg 1b \n" + : "+r"(src_argb), // %0 + "+r"(dst_ab64), // %1 + "+r"(width) // %2 + : "m"(kShuffleARGBToAB64Lo), // %3 + "m"(kShuffleARGBToAB64Hi) // %4 + : "memory", "cc", "xmm0", "xmm1", "xmm2"); +} + +void AR64ToARGBRow_SSSE3(const uint16_t* src_ar64, + uint8_t* dst_argb, + int width) { + asm volatile( + + LABELALIGN + "1: \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "psrlw $8,%%xmm0 \n" + "psrlw $8,%%xmm1 \n" + "packuswb %%xmm1,%%xmm0 \n" + "movdqu %%xmm0,(%1) \n" + "lea 0x20(%0),%0 \n" + "lea 0x10(%1),%1 \n" + "sub $0x4,%2 \n" + "jg 1b \n" + : "+r"(src_ar64), // %0 + "+r"(dst_argb), // %1 + "+r"(width) // %2 + : + : "memory", "cc", "xmm0", "xmm1"); +} + +void AB64ToARGBRow_SSSE3(const uint16_t* src_ab64, + uint8_t* dst_argb, + int width) { + asm volatile( + + "movdqa %3,%%xmm2 \n" LABELALIGN + "1: \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "psrlw $8,%%xmm0 \n" + "psrlw $8,%%xmm1 \n" + "packuswb %%xmm1,%%xmm0 \n" + "pshufb %%xmm2,%%xmm0 \n" + "movdqu %%xmm0,(%1) \n" + "lea 0x20(%0),%0 \n" + "lea 0x10(%1),%1 \n" + "sub $0x4,%2 \n" + "jg 1b \n" + : "+r"(src_ab64), // %0 + "+r"(dst_argb), // %1 + "+r"(width) // %2 + : "m"(kShuffleARGBToABGR) // %3 + : "memory", "cc", "xmm0", "xmm1", "xmm2"); +} + +#ifdef HAS_ARGBTOAR64ROW_AVX2 +void ARGBToAR64Row_AVX2(const uint8_t* src_argb, + uint16_t* dst_ar64, + int width) { + asm volatile( + + LABELALIGN + "1: \n" + "vmovdqu (%0),%%ymm0 \n" + "vpermq $0xd8,%%ymm0,%%ymm0 \n" + "vpunpckhbw %%ymm0,%%ymm0,%%ymm1 \n" + "vpunpcklbw %%ymm0,%%ymm0,%%ymm0 \n" + "vmovdqu %%ymm0,(%1) \n" + "vmovdqu %%ymm1,0x20(%1) \n" + "lea 0x20(%0),%0 \n" + "lea 0x40(%1),%1 \n" + "sub $0x8,%2 \n" + "jg 1b \n" + "vzeroupper \n" + : "+r"(src_argb), // %0 + "+r"(dst_ar64), // %1 + "+r"(width) // %2 + : + : "memory", "cc", "xmm0", "xmm1"); +} +#endif + +#ifdef HAS_ARGBTOAB64ROW_AVX2 +void ARGBToAB64Row_AVX2(const uint8_t* src_argb, + uint16_t* dst_ab64, + int width) { + asm volatile( + + "vbroadcastf128 %3,%%ymm2 \n" + "vbroadcastf128 %4,%%ymm3 \n" LABELALIGN + "1: \n" + "vmovdqu (%0),%%ymm0 \n" + "vpermq $0xd8,%%ymm0,%%ymm0 \n" + "vpshufb %%ymm3,%%ymm0,%%ymm1 \n" + "vpshufb %%ymm2,%%ymm0,%%ymm0 \n" + "vmovdqu %%ymm0,(%1) \n" + "vmovdqu %%ymm1,0x20(%1) \n" + "lea 0x20(%0),%0 \n" + "lea 0x40(%1),%1 \n" + "sub $0x8,%2 \n" + "jg 1b \n" + "vzeroupper \n" + : "+r"(src_argb), // %0 + "+r"(dst_ab64), // %1 + "+r"(width) // %2 + : "m"(kShuffleARGBToAB64Lo), // %3 + "m"(kShuffleARGBToAB64Hi) // %3 + : "memory", "cc", "xmm0", "xmm1", "xmm2"); +} +#endif + +#ifdef HAS_AR64TOARGBROW_AVX2 +void AR64ToARGBRow_AVX2(const uint16_t* src_ar64, + uint8_t* dst_argb, + int width) { + asm volatile( + + LABELALIGN + "1: \n" + "vmovdqu (%0),%%ymm0 \n" + "vmovdqu 0x20(%0),%%ymm1 \n" + "vpsrlw $8,%%ymm0,%%ymm0 \n" + "vpsrlw $8,%%ymm1,%%ymm1 \n" + "vpackuswb %%ymm1,%%ymm0,%%ymm0 \n" + "vpermq $0xd8,%%ymm0,%%ymm0 \n" + "vmovdqu %%ymm0,(%1) \n" + "lea 0x40(%0),%0 \n" + "lea 0x20(%1),%1 \n" + "sub $0x8,%2 \n" + "jg 1b \n" + "vzeroupper \n" + : "+r"(src_ar64), // %0 + "+r"(dst_argb), // %1 + "+r"(width) // %2 + : + : "memory", "cc", "xmm0", "xmm1"); +} +#endif + +#ifdef HAS_AB64TOARGBROW_AVX2 +void AB64ToARGBRow_AVX2(const uint16_t* src_ab64, + uint8_t* dst_argb, + int width) { + asm volatile( + + "vbroadcastf128 %3,%%ymm2 \n" LABELALIGN + "1: \n" + "vmovdqu (%0),%%ymm0 \n" + "vmovdqu 0x20(%0),%%ymm1 \n" + "vpsrlw $8,%%ymm0,%%ymm0 \n" + "vpsrlw $8,%%ymm1,%%ymm1 \n" + "vpackuswb %%ymm1,%%ymm0,%%ymm0 \n" + "vpermq $0xd8,%%ymm0,%%ymm0 \n" + "vpshufb %%ymm2,%%ymm0,%%ymm0 \n" + "vmovdqu %%ymm0,(%1) \n" + "lea 0x40(%0),%0 \n" + "lea 0x20(%1),%1 \n" + "sub $0x8,%2 \n" + "jg 1b \n" + "vzeroupper \n" + : "+r"(src_ab64), // %0 + "+r"(dst_argb), // %1 + "+r"(width) // %2 + : "m"(kShuffleARGBToABGR) // %3 + : "memory", "cc", "xmm0", "xmm1", "xmm2"); +} +#endif + // clang-format off // TODO(mraptis): Consider passing R, G, B multipliers as parameter. @@ -1184,6 +1411,24 @@ void ARGBToYJRow_SSSE3(const uint8_t* src_argb, uint8_t* dst_y, int width) { } #endif // HAS_ARGBTOYJROW_SSSE3 +#ifdef HAS_ABGRTOYJROW_SSSE3 +// Convert 16 ABGR pixels (64 bytes) to 16 YJ values. +// Same as ABGRToYRow but different coefficients, no add 16. +void ABGRToYJRow_SSSE3(const uint8_t* src_abgr, uint8_t* dst_y, int width) { + asm volatile( + "movdqa %3,%%xmm4 \n" + "movdqa %4,%%xmm5 \n" + + LABELALIGN RGBTOY(xmm5) + : "+r"(src_abgr), // %0 + "+r"(dst_y), // %1 + "+r"(width) // %2 + : "m"(kABGRToYJ), // %3 + "m"(kSub128) // %4 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6"); +} +#endif // HAS_ABGRTOYJROW_SSSE3 + #ifdef HAS_RGBATOYJROW_SSSE3 // Convert 16 ARGB pixels (64 bytes) to 16 YJ values. // Same as ARGBToYRow but different coefficients, no add 16. @@ -1202,9 +1447,13 @@ void RGBAToYJRow_SSSE3(const uint8_t* src_rgba, uint8_t* dst_y, int width) { } #endif // HAS_RGBATOYJROW_SSSE3 -#ifdef HAS_ARGBTOYROW_AVX2 +#if defined(HAS_ARGBTOYROW_AVX2) || defined(HAS_ABGRTOYROW_AVX2) || \ + defined(HAS_ARGBEXTRACTALPHAROW_AVX2) // vpermd for vphaddw + vpackuswb vpermd. static const lvec32 kPermdARGBToY_AVX = {0, 4, 1, 5, 2, 6, 3, 7}; +#endif + +#ifdef HAS_ARGBTOYROW_AVX2 // Convert 32 ARGB pixels (128 bytes) to 32 Y values. void ARGBToYRow_AVX2(const uint8_t* src_argb, uint8_t* dst_y, int width) { @@ -1212,9 +1461,8 @@ void ARGBToYRow_AVX2(const uint8_t* src_argb, uint8_t* dst_y, int width) { "vbroadcastf128 %3,%%ymm4 \n" "vbroadcastf128 %4,%%ymm5 \n" "vbroadcastf128 %5,%%ymm7 \n" - "vmovdqu %6,%%ymm6 \n" - - LABELALIGN RGBTOY_AVX2(ymm7) + "vmovdqu %6,%%ymm6 \n" LABELALIGN RGBTOY_AVX2( + ymm7) "vzeroupper \n" : "+r"(src_argb), // %0 "+r"(dst_y), // %1 "+r"(width) // %2 @@ -1234,9 +1482,8 @@ void ABGRToYRow_AVX2(const uint8_t* src_abgr, uint8_t* dst_y, int width) { "vbroadcastf128 %3,%%ymm4 \n" "vbroadcastf128 %4,%%ymm5 \n" "vbroadcastf128 %5,%%ymm7 \n" - "vmovdqu %6,%%ymm6 \n" - - LABELALIGN RGBTOY_AVX2(ymm7) + "vmovdqu %6,%%ymm6 \n" LABELALIGN RGBTOY_AVX2( + ymm7) "vzeroupper \n" : "+r"(src_abgr), // %0 "+r"(dst_y), // %1 "+r"(width) // %2 @@ -1255,9 +1502,8 @@ void ARGBToYJRow_AVX2(const uint8_t* src_argb, uint8_t* dst_y, int width) { asm volatile( "vbroadcastf128 %3,%%ymm4 \n" "vbroadcastf128 %4,%%ymm5 \n" - "vmovdqu %5,%%ymm6 \n" - - LABELALIGN RGBTOY_AVX2(ymm5) + "vmovdqu %5,%%ymm6 \n" LABELALIGN RGBTOY_AVX2( + ymm5) "vzeroupper \n" : "+r"(src_argb), // %0 "+r"(dst_y), // %1 "+r"(width) // %2 @@ -1269,16 +1515,33 @@ void ARGBToYJRow_AVX2(const uint8_t* src_argb, uint8_t* dst_y, int width) { } #endif // HAS_ARGBTOYJROW_AVX2 +#ifdef HAS_ABGRTOYJROW_AVX2 +// Convert 32 ABGR pixels (128 bytes) to 32 Y values. +void ABGRToYJRow_AVX2(const uint8_t* src_abgr, uint8_t* dst_y, int width) { + asm volatile( + "vbroadcastf128 %3,%%ymm4 \n" + "vbroadcastf128 %4,%%ymm5 \n" + "vmovdqu %5,%%ymm6 \n" LABELALIGN RGBTOY_AVX2( + ymm5) "vzeroupper \n" + : "+r"(src_abgr), // %0 + "+r"(dst_y), // %1 + "+r"(width) // %2 + : "m"(kABGRToYJ), // %3 + "m"(kSub128), // %4 + "m"(kPermdARGBToY_AVX) // %5 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", + "xmm7"); +} +#endif // HAS_ABGRTOYJROW_AVX2 + #ifdef HAS_RGBATOYJROW_AVX2 // Convert 32 ARGB pixels (128 bytes) to 32 Y values. void RGBAToYJRow_AVX2(const uint8_t* src_rgba, uint8_t* dst_y, int width) { asm volatile( "vbroadcastf128 %3,%%ymm4 \n" "vbroadcastf128 %4,%%ymm5 \n" - "vmovdqu %5,%%ymm6 \n" - - LABELALIGN RGBTOY_AVX2( - ymm5) "vzeroupper \n" + "vmovdqu %5,%%ymm6 \n" LABELALIGN RGBTOY_AVX2( + ymm5) "vzeroupper \n" : "+r"(src_rgba), // %0 "+r"(dst_y), // %1 "+r"(width) // %2 @@ -1290,7 +1553,7 @@ void RGBAToYJRow_AVX2(const uint8_t* src_rgba, uint8_t* dst_y, int width) { #endif // HAS_RGBATOYJROW_AVX2 #ifdef HAS_ARGBTOUVROW_SSSE3 -void ARGBToUVRow_SSSE3(const uint8_t* src_argb0, +void ARGBToUVRow_SSSE3(const uint8_t* src_argb, int src_stride_argb, uint8_t* dst_u, uint8_t* dst_v, @@ -1342,7 +1605,7 @@ void ARGBToUVRow_SSSE3(const uint8_t* src_argb0, "lea 0x8(%1),%1 \n" "sub $0x10,%3 \n" "jg 1b \n" - : "+r"(src_argb0), // %0 + : "+r"(src_argb), // %0 "+r"(dst_u), // %1 "+r"(dst_v), // %2 "+rm"(width) // %3 @@ -1354,12 +1617,16 @@ void ARGBToUVRow_SSSE3(const uint8_t* src_argb0, } #endif // HAS_ARGBTOUVROW_SSSE3 -#ifdef HAS_ARGBTOUVROW_AVX2 +#if defined(HAS_ARGBTOUVROW_AVX2) || defined(HAS_ABGRTOUVROW_AVX2) || \ + defined(HAS_ARGBTOUVJROW_AVX2) || defined(HAS_ABGRTOUVJROW_AVX2) // vpshufb for vphaddw + vpackuswb packed to shorts. static const lvec8 kShufARGBToUV_AVX = { 0, 1, 8, 9, 2, 3, 10, 11, 4, 5, 12, 13, 6, 7, 14, 15, 0, 1, 8, 9, 2, 3, 10, 11, 4, 5, 12, 13, 6, 7, 14, 15}; -void ARGBToUVRow_AVX2(const uint8_t* src_argb0, +#endif + +#if defined(HAS_ARGBTOUVROW_AVX2) +void ARGBToUVRow_AVX2(const uint8_t* src_argb, int src_stride_argb, uint8_t* dst_u, uint8_t* dst_v, @@ -1407,7 +1674,7 @@ void ARGBToUVRow_AVX2(const uint8_t* src_argb0, "sub $0x20,%3 \n" "jg 1b \n" "vzeroupper \n" - : "+r"(src_argb0), // %0 + : "+r"(src_argb), // %0 "+r"(dst_u), // %1 "+r"(dst_v), // %2 "+rm"(width) // %3 @@ -1422,7 +1689,7 @@ void ARGBToUVRow_AVX2(const uint8_t* src_argb0, #endif // HAS_ARGBTOUVROW_AVX2 #ifdef HAS_ABGRTOUVROW_AVX2 -void ABGRToUVRow_AVX2(const uint8_t* src_abgr0, +void ABGRToUVRow_AVX2(const uint8_t* src_abgr, int src_stride_abgr, uint8_t* dst_u, uint8_t* dst_v, @@ -1470,7 +1737,7 @@ void ABGRToUVRow_AVX2(const uint8_t* src_abgr0, "sub $0x20,%3 \n" "jg 1b \n" "vzeroupper \n" - : "+r"(src_abgr0), // %0 + : "+r"(src_abgr), // %0 "+r"(dst_u), // %1 "+r"(dst_v), // %2 "+rm"(width) // %3 @@ -1485,7 +1752,7 @@ void ABGRToUVRow_AVX2(const uint8_t* src_abgr0, #endif // HAS_ABGRTOUVROW_AVX2 #ifdef HAS_ARGBTOUVJROW_AVX2 -void ARGBToUVJRow_AVX2(const uint8_t* src_argb0, +void ARGBToUVJRow_AVX2(const uint8_t* src_argb, int src_stride_argb, uint8_t* dst_u, uint8_t* dst_v, @@ -1534,7 +1801,7 @@ void ARGBToUVJRow_AVX2(const uint8_t* src_argb0, "sub $0x20,%3 \n" "jg 1b \n" "vzeroupper \n" - : "+r"(src_argb0), // %0 + : "+r"(src_argb), // %0 "+r"(dst_u), // %1 "+r"(dst_v), // %2 "+rm"(width) // %3 @@ -1548,46 +1815,111 @@ void ARGBToUVJRow_AVX2(const uint8_t* src_argb0, } #endif // HAS_ARGBTOUVJROW_AVX2 -#ifdef HAS_ARGBTOUVJROW_SSSE3 -void ARGBToUVJRow_SSSE3(const uint8_t* src_argb0, - int src_stride_argb, - uint8_t* dst_u, - uint8_t* dst_v, - int width) { +// TODO(fbarchard): Pass kABGRToVJ / kABGRToUJ as matrix +#ifdef HAS_ABGRTOUVJROW_AVX2 +void ABGRToUVJRow_AVX2(const uint8_t* src_abgr, + int src_stride_abgr, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { asm volatile( - "movdqa %5,%%xmm3 \n" - "movdqa %6,%%xmm4 \n" - "movdqa %7,%%xmm5 \n" + "vbroadcastf128 %5,%%ymm5 \n" + "vbroadcastf128 %6,%%ymm6 \n" + "vbroadcastf128 %7,%%ymm7 \n" "sub %1,%2 \n" LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x00(%0,%4,1),%%xmm7 \n" - "pavgb %%xmm7,%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "movdqu 0x10(%0,%4,1),%%xmm7 \n" - "pavgb %%xmm7,%%xmm1 \n" - "movdqu 0x20(%0),%%xmm2 \n" - "movdqu 0x20(%0,%4,1),%%xmm7 \n" - "pavgb %%xmm7,%%xmm2 \n" - "movdqu 0x30(%0),%%xmm6 \n" - "movdqu 0x30(%0,%4,1),%%xmm7 \n" - "pavgb %%xmm7,%%xmm6 \n" + "vmovdqu (%0),%%ymm0 \n" + "vmovdqu 0x20(%0),%%ymm1 \n" + "vmovdqu 0x40(%0),%%ymm2 \n" + "vmovdqu 0x60(%0),%%ymm3 \n" + "vpavgb 0x00(%0,%4,1),%%ymm0,%%ymm0 \n" + "vpavgb 0x20(%0,%4,1),%%ymm1,%%ymm1 \n" + "vpavgb 0x40(%0,%4,1),%%ymm2,%%ymm2 \n" + "vpavgb 0x60(%0,%4,1),%%ymm3,%%ymm3 \n" + "lea 0x80(%0),%0 \n" + "vshufps $0x88,%%ymm1,%%ymm0,%%ymm4 \n" + "vshufps $0xdd,%%ymm1,%%ymm0,%%ymm0 \n" + "vpavgb %%ymm4,%%ymm0,%%ymm0 \n" + "vshufps $0x88,%%ymm3,%%ymm2,%%ymm4 \n" + "vshufps $0xdd,%%ymm3,%%ymm2,%%ymm2 \n" + "vpavgb %%ymm4,%%ymm2,%%ymm2 \n" - "lea 0x40(%0),%0 \n" - "movdqa %%xmm0,%%xmm7 \n" - "shufps $0x88,%%xmm1,%%xmm0 \n" - "shufps $0xdd,%%xmm1,%%xmm7 \n" - "pavgb %%xmm7,%%xmm0 \n" - "movdqa %%xmm2,%%xmm7 \n" - "shufps $0x88,%%xmm6,%%xmm2 \n" - "shufps $0xdd,%%xmm6,%%xmm7 \n" - "pavgb %%xmm7,%%xmm2 \n" - "movdqa %%xmm0,%%xmm1 \n" - "movdqa %%xmm2,%%xmm6 \n" - "pmaddubsw %%xmm4,%%xmm0 \n" - "pmaddubsw %%xmm4,%%xmm2 \n" + "vpmaddubsw %%ymm7,%%ymm0,%%ymm1 \n" + "vpmaddubsw %%ymm7,%%ymm2,%%ymm3 \n" + "vpmaddubsw %%ymm6,%%ymm0,%%ymm0 \n" + "vpmaddubsw %%ymm6,%%ymm2,%%ymm2 \n" + "vphaddw %%ymm3,%%ymm1,%%ymm1 \n" + "vphaddw %%ymm2,%%ymm0,%%ymm0 \n" + "vpaddw %%ymm5,%%ymm0,%%ymm0 \n" + "vpaddw %%ymm5,%%ymm1,%%ymm1 \n" + "vpsraw $0x8,%%ymm1,%%ymm1 \n" + "vpsraw $0x8,%%ymm0,%%ymm0 \n" + "vpacksswb %%ymm0,%%ymm1,%%ymm0 \n" + "vpermq $0xd8,%%ymm0,%%ymm0 \n" + "vpshufb %8,%%ymm0,%%ymm0 \n" + + "vextractf128 $0x0,%%ymm0,(%1) \n" + "vextractf128 $0x1,%%ymm0,0x0(%1,%2,1) \n" + "lea 0x10(%1),%1 \n" + "sub $0x20,%3 \n" + "jg 1b \n" + "vzeroupper \n" + : "+r"(src_abgr), // %0 + "+r"(dst_u), // %1 + "+r"(dst_v), // %2 + "+rm"(width) // %3 + : "r"((intptr_t)(src_stride_abgr)), // %4 + "m"(kSub128), // %5 + "m"(kABGRToVJ), // %6 + "m"(kABGRToUJ), // %7 + "m"(kShufARGBToUV_AVX) // %8 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", + "xmm7"); +} +#endif // HAS_ABGRTOUVJROW_AVX2 + +#ifdef HAS_ARGBTOUVJROW_SSSE3 +void ARGBToUVJRow_SSSE3(const uint8_t* src_argb, + int src_stride_argb, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { + asm volatile( + "movdqa %5,%%xmm3 \n" + "movdqa %6,%%xmm4 \n" + "movdqa %7,%%xmm5 \n" + "sub %1,%2 \n" + + LABELALIGN + "1: \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x00(%0,%4,1),%%xmm7 \n" + "pavgb %%xmm7,%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "movdqu 0x10(%0,%4,1),%%xmm7 \n" + "pavgb %%xmm7,%%xmm1 \n" + "movdqu 0x20(%0),%%xmm2 \n" + "movdqu 0x20(%0,%4,1),%%xmm7 \n" + "pavgb %%xmm7,%%xmm2 \n" + "movdqu 0x30(%0),%%xmm6 \n" + "movdqu 0x30(%0,%4,1),%%xmm7 \n" + "pavgb %%xmm7,%%xmm6 \n" + + "lea 0x40(%0),%0 \n" + "movdqa %%xmm0,%%xmm7 \n" + "shufps $0x88,%%xmm1,%%xmm0 \n" + "shufps $0xdd,%%xmm1,%%xmm7 \n" + "pavgb %%xmm7,%%xmm0 \n" + "movdqa %%xmm2,%%xmm7 \n" + "shufps $0x88,%%xmm6,%%xmm2 \n" + "shufps $0xdd,%%xmm6,%%xmm7 \n" + "pavgb %%xmm7,%%xmm2 \n" + "movdqa %%xmm0,%%xmm1 \n" + "movdqa %%xmm2,%%xmm6 \n" + "pmaddubsw %%xmm4,%%xmm0 \n" + "pmaddubsw %%xmm4,%%xmm2 \n" "pmaddubsw %%xmm3,%%xmm1 \n" "pmaddubsw %%xmm3,%%xmm6 \n" "phaddw %%xmm2,%%xmm0 \n" @@ -1602,7 +1934,7 @@ void ARGBToUVJRow_SSSE3(const uint8_t* src_argb0, "lea 0x8(%1),%1 \n" "sub $0x10,%3 \n" "jg 1b \n" - : "+r"(src_argb0), // %0 + : "+r"(src_argb), // %0 "+r"(dst_u), // %1 "+r"(dst_v), // %2 "+rm"(width) // %3 @@ -1614,6 +1946,72 @@ void ARGBToUVJRow_SSSE3(const uint8_t* src_argb0, } #endif // HAS_ARGBTOUVJROW_SSSE3 +#ifdef HAS_ABGRTOUVJROW_SSSE3 +void ABGRToUVJRow_SSSE3(const uint8_t* src_abgr, + int src_stride_abgr, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { + asm volatile( + "movdqa %5,%%xmm3 \n" + "movdqa %6,%%xmm4 \n" + "movdqa %7,%%xmm5 \n" + "sub %1,%2 \n" + + LABELALIGN + "1: \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x00(%0,%4,1),%%xmm7 \n" + "pavgb %%xmm7,%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "movdqu 0x10(%0,%4,1),%%xmm7 \n" + "pavgb %%xmm7,%%xmm1 \n" + "movdqu 0x20(%0),%%xmm2 \n" + "movdqu 0x20(%0,%4,1),%%xmm7 \n" + "pavgb %%xmm7,%%xmm2 \n" + "movdqu 0x30(%0),%%xmm6 \n" + "movdqu 0x30(%0,%4,1),%%xmm7 \n" + "pavgb %%xmm7,%%xmm6 \n" + + "lea 0x40(%0),%0 \n" + "movdqa %%xmm0,%%xmm7 \n" + "shufps $0x88,%%xmm1,%%xmm0 \n" + "shufps $0xdd,%%xmm1,%%xmm7 \n" + "pavgb %%xmm7,%%xmm0 \n" + "movdqa %%xmm2,%%xmm7 \n" + "shufps $0x88,%%xmm6,%%xmm2 \n" + "shufps $0xdd,%%xmm6,%%xmm7 \n" + "pavgb %%xmm7,%%xmm2 \n" + "movdqa %%xmm0,%%xmm1 \n" + "movdqa %%xmm2,%%xmm6 \n" + "pmaddubsw %%xmm4,%%xmm0 \n" + "pmaddubsw %%xmm4,%%xmm2 \n" + "pmaddubsw %%xmm3,%%xmm1 \n" + "pmaddubsw %%xmm3,%%xmm6 \n" + "phaddw %%xmm2,%%xmm0 \n" + "phaddw %%xmm6,%%xmm1 \n" + "paddw %%xmm5,%%xmm0 \n" + "paddw %%xmm5,%%xmm1 \n" + "psraw $0x8,%%xmm0 \n" + "psraw $0x8,%%xmm1 \n" + "packsswb %%xmm1,%%xmm0 \n" + "movlps %%xmm0,(%1) \n" + "movhps %%xmm0,0x00(%1,%2,1) \n" + "lea 0x8(%1),%1 \n" + "sub $0x10,%3 \n" + "jg 1b \n" + : "+r"(src_abgr), // %0 + "+r"(dst_u), // %1 + "+r"(dst_v), // %2 + "+rm"(width) // %3 + : "r"((intptr_t)(src_stride_abgr)), // %4 + "m"(kABGRToVJ), // %5 + "m"(kABGRToUJ), // %6 + "m"(kSub128) // %7 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm6", "xmm7"); +} +#endif // HAS_ABGRTOUVJROW_SSSE3 + #ifdef HAS_ARGBTOUV444ROW_SSSE3 void ARGBToUV444Row_SSSE3(const uint8_t* src_argb, uint8_t* dst_u, @@ -1689,7 +2087,7 @@ void BGRAToYRow_SSSE3(const uint8_t* src_bgra, uint8_t* dst_y, int width) { "xmm7"); } -void BGRAToUVRow_SSSE3(const uint8_t* src_bgra0, +void BGRAToUVRow_SSSE3(const uint8_t* src_bgra, int src_stride_bgra, uint8_t* dst_u, uint8_t* dst_v, @@ -1741,7 +2139,7 @@ void BGRAToUVRow_SSSE3(const uint8_t* src_bgra0, "lea 0x8(%1),%1 \n" "sub $0x10,%3 \n" "jg 1b \n" - : "+r"(src_bgra0), // %0 + : "+r"(src_bgra), // %0 "+r"(dst_u), // %1 "+r"(dst_v), // %2 "+rm"(width) // %3 @@ -1786,7 +2184,7 @@ void RGBAToYRow_SSSE3(const uint8_t* src_rgba, uint8_t* dst_y, int width) { "xmm7"); } -void ABGRToUVRow_SSSE3(const uint8_t* src_abgr0, +void ABGRToUVRow_SSSE3(const uint8_t* src_abgr, int src_stride_abgr, uint8_t* dst_u, uint8_t* dst_v, @@ -1838,7 +2236,7 @@ void ABGRToUVRow_SSSE3(const uint8_t* src_abgr0, "lea 0x8(%1),%1 \n" "sub $0x10,%3 \n" "jg 1b \n" - : "+r"(src_abgr0), // %0 + : "+r"(src_abgr), // %0 "+r"(dst_u), // %1 "+r"(dst_v), // %2 "+rm"(width) // %3 @@ -1849,7 +2247,7 @@ void ABGRToUVRow_SSSE3(const uint8_t* src_abgr0, : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm6", "xmm7"); } -void RGBAToUVRow_SSSE3(const uint8_t* src_rgba0, +void RGBAToUVRow_SSSE3(const uint8_t* src_rgba, int src_stride_rgba, uint8_t* dst_u, uint8_t* dst_v, @@ -1901,7 +2299,7 @@ void RGBAToUVRow_SSSE3(const uint8_t* src_rgba0, "lea 0x8(%1),%1 \n" "sub $0x10,%3 \n" "jg 1b \n" - : "+r"(src_rgba0), // %0 + : "+r"(src_rgba), // %0 "+r"(dst_u), // %1 "+r"(dst_v), // %2 "+rm"(width) // %3 @@ -1916,48 +2314,135 @@ void RGBAToUVRow_SSSE3(const uint8_t* src_rgba0, // Read 8 UV from 444 #define READYUV444 \ - "movq (%[u_buf]),%%xmm0 \n" \ + "movq (%[u_buf]),%%xmm3 \n" \ "movq 0x00(%[u_buf],%[v_buf],1),%%xmm1 \n" \ "lea 0x8(%[u_buf]),%[u_buf] \n" \ - "punpcklbw %%xmm1,%%xmm0 \n" \ + "punpcklbw %%xmm1,%%xmm3 \n" \ "movq (%[y_buf]),%%xmm4 \n" \ "punpcklbw %%xmm4,%%xmm4 \n" \ "lea 0x8(%[y_buf]),%[y_buf] \n" // Read 4 UV from 422, upsample to 8 UV #define READYUV422 \ - "movd (%[u_buf]),%%xmm0 \n" \ + "movd (%[u_buf]),%%xmm3 \n" \ "movd 0x00(%[u_buf],%[v_buf],1),%%xmm1 \n" \ "lea 0x4(%[u_buf]),%[u_buf] \n" \ - "punpcklbw %%xmm1,%%xmm0 \n" \ - "punpcklwd %%xmm0,%%xmm0 \n" \ + "punpcklbw %%xmm1,%%xmm3 \n" \ + "punpcklwd %%xmm3,%%xmm3 \n" \ "movq (%[y_buf]),%%xmm4 \n" \ "punpcklbw %%xmm4,%%xmm4 \n" \ "lea 0x8(%[y_buf]),%[y_buf] \n" // Read 4 UV from 422 10 bit, upsample to 8 UV -// TODO(fbarchard): Consider shufb to replace pack/unpack -// TODO(fbarchard): Consider pmulhuw to replace psraw -// TODO(fbarchard): Consider pmullw to replace psllw and allow different bits. #define READYUV210 \ - "movq (%[u_buf]),%%xmm0 \n" \ + "movq (%[u_buf]),%%xmm3 \n" \ "movq 0x00(%[u_buf],%[v_buf],1),%%xmm1 \n" \ "lea 0x8(%[u_buf]),%[u_buf] \n" \ - "punpcklwd %%xmm1,%%xmm0 \n" \ - "psraw $0x2,%%xmm0 \n" \ - "packuswb %%xmm0,%%xmm0 \n" \ - "punpcklwd %%xmm0,%%xmm0 \n" \ + "punpcklwd %%xmm1,%%xmm3 \n" \ + "psraw $2,%%xmm3 \n" \ + "packuswb %%xmm3,%%xmm3 \n" \ + "punpcklwd %%xmm3,%%xmm3 \n" \ + "movdqu (%[y_buf]),%%xmm4 \n" \ + "movdqa %%xmm4,%%xmm2 \n" \ + "psllw $6,%%xmm4 \n" \ + "psrlw $4,%%xmm2 \n" \ + "paddw %%xmm2,%%xmm4 \n" \ + "lea 0x10(%[y_buf]),%[y_buf] \n" + +#define READYUVA210 \ + "movq (%[u_buf]),%%xmm3 \n" \ + "movq 0x00(%[u_buf],%[v_buf],1),%%xmm1 \n" \ + "lea 0x8(%[u_buf]),%[u_buf] \n" \ + "punpcklwd %%xmm1,%%xmm3 \n" \ + "psraw $2,%%xmm3 \n" \ + "packuswb %%xmm3,%%xmm3 \n" \ + "punpcklwd %%xmm3,%%xmm3 \n" \ + "movdqu (%[y_buf]),%%xmm4 \n" \ + "movdqa %%xmm4,%%xmm2 \n" \ + "psllw $6,%%xmm4 \n" \ + "psrlw $4,%%xmm2 \n" \ + "paddw %%xmm2,%%xmm4 \n" \ + "lea 0x10(%[y_buf]),%[y_buf] \n" \ + "movdqu (%[a_buf]),%%xmm5 \n" \ + "psraw $2,%%xmm5 \n" \ + "packuswb %%xmm5,%%xmm5 \n" \ + "lea 0x10(%[a_buf]),%[a_buf] \n" + +// Read 8 UV from 444 10 bit +#define READYUV410 \ + "movdqu (%[u_buf]),%%xmm3 \n" \ + "movdqu 0x00(%[u_buf],%[v_buf],1),%%xmm2 \n" \ + "lea 0x10(%[u_buf]),%[u_buf] \n" \ + "psraw $2,%%xmm3 \n" \ + "psraw $2,%%xmm2 \n" \ + "movdqa %%xmm3,%%xmm1 \n" \ + "punpcklwd %%xmm2,%%xmm3 \n" \ + "punpckhwd %%xmm2,%%xmm1 \n" \ + "packuswb %%xmm1,%%xmm3 \n" \ + "movdqu (%[y_buf]),%%xmm4 \n" \ + "movdqa %%xmm4,%%xmm2 \n" \ + "psllw $6,%%xmm4 \n" \ + "psrlw $4,%%xmm2 \n" \ + "paddw %%xmm2,%%xmm4 \n" \ + "lea 0x10(%[y_buf]),%[y_buf] \n" + +// Read 8 UV from 444 10 bit. With 8 Alpha. +#define READYUVA410 \ + "movdqu (%[u_buf]),%%xmm3 \n" \ + "movdqu 0x00(%[u_buf],%[v_buf],1),%%xmm2 \n" \ + "lea 0x10(%[u_buf]),%[u_buf] \n" \ + "psraw $2,%%xmm3 \n" \ + "psraw $2,%%xmm2 \n" \ + "movdqa %%xmm3,%%xmm1 \n" \ + "punpcklwd %%xmm2,%%xmm3 \n" \ + "punpckhwd %%xmm2,%%xmm1 \n" \ + "packuswb %%xmm1,%%xmm3 \n" \ + "movdqu (%[y_buf]),%%xmm4 \n" \ + "movdqa %%xmm4,%%xmm2 \n" \ + "psllw $6,%%xmm4 \n" \ + "psrlw $4,%%xmm2 \n" \ + "paddw %%xmm2,%%xmm4 \n" \ + "lea 0x10(%[y_buf]),%[y_buf] \n" \ + "movdqu (%[a_buf]),%%xmm5 \n" \ + "psraw $2,%%xmm5 \n" \ + "packuswb %%xmm5,%%xmm5 \n" \ + "lea 0x10(%[a_buf]),%[a_buf] \n" + +// Read 4 UV from 422 12 bit, upsample to 8 UV +#define READYUV212 \ + "movq (%[u_buf]),%%xmm3 \n" \ + "movq 0x00(%[u_buf],%[v_buf],1),%%xmm1 \n" \ + "lea 0x8(%[u_buf]),%[u_buf] \n" \ + "punpcklwd %%xmm1,%%xmm3 \n" \ + "psraw $0x4,%%xmm3 \n" \ + "packuswb %%xmm3,%%xmm3 \n" \ + "punpcklwd %%xmm3,%%xmm3 \n" \ "movdqu (%[y_buf]),%%xmm4 \n" \ - "psllw $0x6,%%xmm4 \n" \ + "movdqa %%xmm4,%%xmm2 \n" \ + "psllw $4,%%xmm4 \n" \ + "psrlw $8,%%xmm2 \n" \ + "paddw %%xmm2,%%xmm4 \n" \ "lea 0x10(%[y_buf]),%[y_buf] \n" // Read 4 UV from 422, upsample to 8 UV. With 8 Alpha. #define READYUVA422 \ - "movd (%[u_buf]),%%xmm0 \n" \ + "movd (%[u_buf]),%%xmm3 \n" \ "movd 0x00(%[u_buf],%[v_buf],1),%%xmm1 \n" \ "lea 0x4(%[u_buf]),%[u_buf] \n" \ - "punpcklbw %%xmm1,%%xmm0 \n" \ - "punpcklwd %%xmm0,%%xmm0 \n" \ + "punpcklbw %%xmm1,%%xmm3 \n" \ + "punpcklwd %%xmm3,%%xmm3 \n" \ + "movq (%[y_buf]),%%xmm4 \n" \ + "punpcklbw %%xmm4,%%xmm4 \n" \ + "lea 0x8(%[y_buf]),%[y_buf] \n" \ + "movq (%[a_buf]),%%xmm5 \n" \ + "lea 0x8(%[a_buf]),%[a_buf] \n" + +// Read 8 UV from 444. With 8 Alpha. +#define READYUVA444 \ + "movq (%[u_buf]),%%xmm3 \n" \ + "movq 0x00(%[u_buf],%[v_buf],1),%%xmm1 \n" \ + "lea 0x8(%[u_buf]),%[u_buf] \n" \ + "punpcklbw %%xmm1,%%xmm3 \n" \ "movq (%[y_buf]),%%xmm4 \n" \ "punpcklbw %%xmm4,%%xmm4 \n" \ "lea 0x8(%[y_buf]),%[y_buf] \n" \ @@ -1966,18 +2451,18 @@ void RGBAToUVRow_SSSE3(const uint8_t* src_rgba0, // Read 4 UV from NV12, upsample to 8 UV #define READNV12 \ - "movq (%[uv_buf]),%%xmm0 \n" \ + "movq (%[uv_buf]),%%xmm3 \n" \ "lea 0x8(%[uv_buf]),%[uv_buf] \n" \ - "punpcklwd %%xmm0,%%xmm0 \n" \ + "punpcklwd %%xmm3,%%xmm3 \n" \ "movq (%[y_buf]),%%xmm4 \n" \ "punpcklbw %%xmm4,%%xmm4 \n" \ "lea 0x8(%[y_buf]),%[y_buf] \n" // Read 4 VU from NV21, upsample to 8 UV #define READNV21 \ - "movq (%[vu_buf]),%%xmm0 \n" \ + "movq (%[vu_buf]),%%xmm3 \n" \ "lea 0x8(%[vu_buf]),%[vu_buf] \n" \ - "pshufb %[kShuffleNV21], %%xmm0 \n" \ + "pshufb %[kShuffleNV21], %%xmm3 \n" \ "movq (%[y_buf]),%%xmm4 \n" \ "punpcklbw %%xmm4,%%xmm4 \n" \ "lea 0x8(%[y_buf]),%[y_buf] \n" @@ -1986,68 +2471,92 @@ void RGBAToUVRow_SSSE3(const uint8_t* src_rgba0, #define READYUY2 \ "movdqu (%[yuy2_buf]),%%xmm4 \n" \ "pshufb %[kShuffleYUY2Y], %%xmm4 \n" \ - "movdqu (%[yuy2_buf]),%%xmm0 \n" \ - "pshufb %[kShuffleYUY2UV], %%xmm0 \n" \ + "movdqu (%[yuy2_buf]),%%xmm3 \n" \ + "pshufb %[kShuffleYUY2UV], %%xmm3 \n" \ "lea 0x10(%[yuy2_buf]),%[yuy2_buf] \n" // Read 4 UYVY with 8 Y and update 4 UV to 8 UV. #define READUYVY \ "movdqu (%[uyvy_buf]),%%xmm4 \n" \ "pshufb %[kShuffleUYVYY], %%xmm4 \n" \ - "movdqu (%[uyvy_buf]),%%xmm0 \n" \ - "pshufb %[kShuffleUYVYUV], %%xmm0 \n" \ + "movdqu (%[uyvy_buf]),%%xmm3 \n" \ + "pshufb %[kShuffleUYVYUV], %%xmm3 \n" \ "lea 0x10(%[uyvy_buf]),%[uyvy_buf] \n" +// Read 4 UV from P210, upsample to 8 UV +#define READP210 \ + "movdqu (%[uv_buf]),%%xmm3 \n" \ + "lea 0x10(%[uv_buf]),%[uv_buf] \n" \ + "psrlw $0x8,%%xmm3 \n" \ + "packuswb %%xmm3,%%xmm3 \n" \ + "punpcklwd %%xmm3,%%xmm3 \n" \ + "movdqu (%[y_buf]),%%xmm4 \n" \ + "lea 0x10(%[y_buf]),%[y_buf] \n" + +// Read 8 UV from P410 +#define READP410 \ + "movdqu (%[uv_buf]),%%xmm3 \n" \ + "movdqu 0x10(%[uv_buf]),%%xmm1 \n" \ + "lea 0x20(%[uv_buf]),%[uv_buf] \n" \ + "psrlw $0x8,%%xmm3 \n" \ + "psrlw $0x8,%%xmm1 \n" \ + "packuswb %%xmm1,%%xmm3 \n" \ + "movdqu (%[y_buf]),%%xmm4 \n" \ + "lea 0x10(%[y_buf]),%[y_buf] \n" + #if defined(__x86_64__) #define YUVTORGB_SETUP(yuvconstants) \ + "pcmpeqb %%xmm13,%%xmm13 \n" \ "movdqa (%[yuvconstants]),%%xmm8 \n" \ + "pxor %%xmm12,%%xmm12 \n" \ "movdqa 32(%[yuvconstants]),%%xmm9 \n" \ + "psllw $7,%%xmm13 \n" \ "movdqa 64(%[yuvconstants]),%%xmm10 \n" \ + "pshufb %%xmm12,%%xmm13 \n" \ "movdqa 96(%[yuvconstants]),%%xmm11 \n" \ - "movdqa 128(%[yuvconstants]),%%xmm12 \n" \ - "movdqa 160(%[yuvconstants]),%%xmm13 \n" \ - "movdqa 192(%[yuvconstants]),%%xmm14 \n" + "movdqa 128(%[yuvconstants]),%%xmm12 \n" + // Convert 8 pixels: 8 UV and 8 Y #define YUVTORGB16(yuvconstants) \ - "movdqa %%xmm0,%%xmm1 \n" \ - "movdqa %%xmm0,%%xmm2 \n" \ - "movdqa %%xmm0,%%xmm3 \n" \ - "movdqa %%xmm11,%%xmm0 \n" \ - "pmaddubsw %%xmm8,%%xmm1 \n" \ - "psubw %%xmm1,%%xmm0 \n" \ - "movdqa %%xmm12,%%xmm1 \n" \ - "pmaddubsw %%xmm9,%%xmm2 \n" \ - "psubw %%xmm2,%%xmm1 \n" \ - "movdqa %%xmm13,%%xmm2 \n" \ - "pmaddubsw %%xmm10,%%xmm3 \n" \ - "psubw %%xmm3,%%xmm2 \n" \ - "pmulhuw %%xmm14,%%xmm4 \n" \ + "psubb %%xmm13,%%xmm3 \n" \ + "pmulhuw %%xmm11,%%xmm4 \n" \ + "movdqa %%xmm8,%%xmm0 \n" \ + "movdqa %%xmm9,%%xmm1 \n" \ + "movdqa %%xmm10,%%xmm2 \n" \ + "paddw %%xmm12,%%xmm4 \n" \ + "pmaddubsw %%xmm3,%%xmm0 \n" \ + "pmaddubsw %%xmm3,%%xmm1 \n" \ + "pmaddubsw %%xmm3,%%xmm2 \n" \ "paddsw %%xmm4,%%xmm0 \n" \ - "paddsw %%xmm4,%%xmm1 \n" \ - "paddsw %%xmm4,%%xmm2 \n" -#define YUVTORGB_REGS \ - "xmm8", "xmm9", "xmm10", "xmm11", "xmm12", "xmm13", "xmm14", + "paddsw %%xmm4,%%xmm2 \n" \ + "psubsw %%xmm1,%%xmm4 \n" \ + "movdqa %%xmm4,%%xmm1 \n" + +#define YUVTORGB_REGS "xmm8", "xmm9", "xmm10", "xmm11", "xmm12", "xmm13", #else #define YUVTORGB_SETUP(yuvconstants) // Convert 8 pixels: 8 UV and 8 Y #define YUVTORGB16(yuvconstants) \ - "movdqa %%xmm0,%%xmm1 \n" \ - "movdqa %%xmm0,%%xmm2 \n" \ - "movdqa %%xmm0,%%xmm3 \n" \ - "movdqa 96(%[yuvconstants]),%%xmm0 \n" \ - "pmaddubsw (%[yuvconstants]),%%xmm1 \n" \ - "psubw %%xmm1,%%xmm0 \n" \ - "movdqa 128(%[yuvconstants]),%%xmm1 \n" \ - "pmaddubsw 32(%[yuvconstants]),%%xmm2 \n" \ - "psubw %%xmm2,%%xmm1 \n" \ - "movdqa 160(%[yuvconstants]),%%xmm2 \n" \ - "pmaddubsw 64(%[yuvconstants]),%%xmm3 \n" \ - "psubw %%xmm3,%%xmm2 \n" \ - "pmulhuw 192(%[yuvconstants]),%%xmm4 \n" \ + "pcmpeqb %%xmm0,%%xmm0 \n" \ + "pxor %%xmm1,%%xmm1 \n" \ + "psllw $7,%%xmm0 \n" \ + "pshufb %%xmm1,%%xmm0 \n" \ + "psubb %%xmm0,%%xmm3 \n" \ + "pmulhuw 96(%[yuvconstants]),%%xmm4 \n" \ + "movdqa (%[yuvconstants]),%%xmm0 \n" \ + "movdqa 32(%[yuvconstants]),%%xmm1 \n" \ + "movdqa 64(%[yuvconstants]),%%xmm2 \n" \ + "pmaddubsw %%xmm3,%%xmm0 \n" \ + "pmaddubsw %%xmm3,%%xmm1 \n" \ + "pmaddubsw %%xmm3,%%xmm2 \n" \ + "movdqa 128(%[yuvconstants]),%%xmm3 \n" \ + "paddw %%xmm3,%%xmm4 \n" \ "paddsw %%xmm4,%%xmm0 \n" \ - "paddsw %%xmm4,%%xmm1 \n" \ - "paddsw %%xmm4,%%xmm2 \n" + "paddsw %%xmm4,%%xmm2 \n" \ + "psubsw %%xmm1,%%xmm4 \n" \ + "movdqa %%xmm4,%%xmm1 \n" + #define YUVTORGB_REGS #endif @@ -2083,6 +2592,20 @@ void RGBAToUVRow_SSSE3(const uint8_t* src_rgba0, "movdqu %%xmm0,0x10(%[dst_rgba]) \n" \ "lea 0x20(%[dst_rgba]),%[dst_rgba] \n" +// Store 8 RGB24 values. +#define STORERGB24 \ + "punpcklbw %%xmm1,%%xmm0 \n" \ + "punpcklbw %%xmm2,%%xmm2 \n" \ + "movdqa %%xmm0,%%xmm1 \n" \ + "punpcklwd %%xmm2,%%xmm0 \n" \ + "punpckhwd %%xmm2,%%xmm1 \n" \ + "pshufb %%xmm5,%%xmm0 \n" \ + "pshufb %%xmm6,%%xmm1 \n" \ + "palignr $0xc,%%xmm0,%%xmm1 \n" \ + "movq %%xmm0,(%[dst_rgb24]) \n" \ + "movdqu %%xmm1,0x8(%[dst_rgb24]) \n" \ + "lea 0x18(%[dst_rgb24]),%[dst_rgb24] \n" + // Store 8 AR30 values. #define STOREAR30 \ "psraw $0x4,%%xmm0 \n" \ @@ -2138,6 +2661,44 @@ void OMITFP I444ToARGBRow_SSSE3(const uint8_t* y_buf, ); } +#ifdef HAS_I444ALPHATOARGBROW_SSSE3 +void OMITFP I444AlphaToARGBRow_SSSE3(const uint8_t* y_buf, + const uint8_t* u_buf, + const uint8_t* v_buf, + const uint8_t* a_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { + // clang-format off + asm volatile ( + YUVTORGB_SETUP(yuvconstants) + "sub %[u_buf],%[v_buf] \n" + + LABELALIGN + "1: \n" + READYUVA444 + YUVTORGB(yuvconstants) + STOREARGB + "subl $0x8,%[width] \n" + "jg 1b \n" + : [y_buf]"+r"(y_buf), // %[y_buf] + [u_buf]"+r"(u_buf), // %[u_buf] + [v_buf]"+r"(v_buf), // %[v_buf] + [a_buf]"+r"(a_buf), // %[a_buf] + [dst_argb]"+r"(dst_argb), // %[dst_argb] +#if defined(__i386__) + [width]"+m"(width) // %[width] +#else + [width]"+rm"(width) // %[width] +#endif + : [yuvconstants]"r"(yuvconstants) // %[yuvconstants] + : "memory", "cc", YUVTORGB_REGS + "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5" + ); + // clang-format on +} +#endif // HAS_I444ALPHATOARGBROW_SSSE3 + void OMITFP I422ToRGB24Row_SSSE3(const uint8_t* y_buf, const uint8_t* u_buf, const uint8_t* v_buf, @@ -2154,17 +2715,43 @@ void OMITFP I422ToRGB24Row_SSSE3(const uint8_t* y_buf, "1: \n" READYUV422 YUVTORGB(yuvconstants) - "punpcklbw %%xmm1,%%xmm0 \n" - "punpcklbw %%xmm2,%%xmm2 \n" - "movdqa %%xmm0,%%xmm1 \n" - "punpcklwd %%xmm2,%%xmm0 \n" - "punpckhwd %%xmm2,%%xmm1 \n" - "pshufb %%xmm5,%%xmm0 \n" - "pshufb %%xmm6,%%xmm1 \n" - "palignr $0xc,%%xmm0,%%xmm1 \n" - "movq %%xmm0,(%[dst_rgb24]) \n" - "movdqu %%xmm1,0x8(%[dst_rgb24]) \n" - "lea 0x18(%[dst_rgb24]),%[dst_rgb24] \n" + STORERGB24 + "subl $0x8,%[width] \n" + "jg 1b \n" + : [y_buf]"+r"(y_buf), // %[y_buf] + [u_buf]"+r"(u_buf), // %[u_buf] + [v_buf]"+r"(v_buf), // %[v_buf] + [dst_rgb24]"+r"(dst_rgb24), // %[dst_rgb24] +#if defined(__i386__) + [width]"+m"(width) // %[width] +#else + [width]"+rm"(width) // %[width] +#endif + : [yuvconstants]"r"(yuvconstants), // %[yuvconstants] + [kShuffleMaskARGBToRGB24_0]"m"(kShuffleMaskARGBToRGB24_0), + [kShuffleMaskARGBToRGB24]"m"(kShuffleMaskARGBToRGB24) + : "memory", "cc", YUVTORGB_REGS + "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6" + ); +} + +void OMITFP I444ToRGB24Row_SSSE3(const uint8_t* y_buf, + const uint8_t* u_buf, + const uint8_t* v_buf, + uint8_t* dst_rgb24, + const struct YuvConstants* yuvconstants, + int width) { + asm volatile ( + YUVTORGB_SETUP(yuvconstants) + "movdqa %[kShuffleMaskARGBToRGB24_0],%%xmm5 \n" + "movdqa %[kShuffleMaskARGBToRGB24],%%xmm6 \n" + "sub %[u_buf],%[v_buf] \n" + + LABELALIGN + "1: \n" + READYUV444 + YUVTORGB(yuvconstants) + STORERGB24 "subl $0x8,%[width] \n" "jg 1b \n" : [y_buf]"+r"(y_buf), // %[y_buf] @@ -2225,8 +2812,8 @@ void OMITFP I422ToAR30Row_SSSE3(const uint8_t* y_buf, "pcmpeqb %%xmm5,%%xmm5 \n" // AR30 constants "psrlw $14,%%xmm5 \n" "psllw $4,%%xmm5 \n" // 2 alpha bits - "pxor %%xmm6,%%xmm6 \n" - "pcmpeqb %%xmm7,%%xmm7 \n" // 0 for min + "pxor %%xmm6,%%xmm6 \n" // 0 for min + "pcmpeqb %%xmm7,%%xmm7 \n" "psrlw $6,%%xmm7 \n" // 1023 for max LABELALIGN @@ -2277,51 +2864,251 @@ void OMITFP I210ToARGBRow_SSSE3(const uint16_t* y_buf, ); } -// 10 bit YUV to AR30 -void OMITFP I210ToAR30Row_SSSE3(const uint16_t* y_buf, +// 12 bit YUV to ARGB +void OMITFP I212ToARGBRow_SSSE3(const uint16_t* y_buf, const uint16_t* u_buf, const uint16_t* v_buf, - uint8_t* dst_ar30, + uint8_t* dst_argb, const struct YuvConstants* yuvconstants, int width) { asm volatile ( YUVTORGB_SETUP(yuvconstants) "sub %[u_buf],%[v_buf] \n" "pcmpeqb %%xmm5,%%xmm5 \n" - "psrlw $14,%%xmm5 \n" - "psllw $4,%%xmm5 \n" // 2 alpha bits - "pxor %%xmm6,%%xmm6 \n" - "pcmpeqb %%xmm7,%%xmm7 \n" // 0 for min - "psrlw $6,%%xmm7 \n" // 1023 for max LABELALIGN "1: \n" - READYUV210 - YUVTORGB16(yuvconstants) - STOREAR30 + READYUV212 + YUVTORGB(yuvconstants) + STOREARGB "sub $0x8,%[width] \n" "jg 1b \n" : [y_buf]"+r"(y_buf), // %[y_buf] [u_buf]"+r"(u_buf), // %[u_buf] [v_buf]"+r"(v_buf), // %[v_buf] - [dst_ar30]"+r"(dst_ar30), // %[dst_ar30] + [dst_argb]"+r"(dst_argb), // %[dst_argb] [width]"+rm"(width) // %[width] : [yuvconstants]"r"(yuvconstants) // %[yuvconstants] : "memory", "cc", YUVTORGB_REGS - "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", "xmm7" + "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5" ); } -#ifdef HAS_I422ALPHATOARGBROW_SSSE3 -void OMITFP I422AlphaToARGBRow_SSSE3(const uint8_t* y_buf, - const uint8_t* u_buf, - const uint8_t* v_buf, - const uint8_t* a_buf, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width) { - // clang-format off - asm volatile ( +// 10 bit YUV to AR30 +void OMITFP I210ToAR30Row_SSSE3(const uint16_t* y_buf, + const uint16_t* u_buf, + const uint16_t* v_buf, + uint8_t* dst_ar30, + const struct YuvConstants* yuvconstants, + int width) { + asm volatile ( + YUVTORGB_SETUP(yuvconstants) + "sub %[u_buf],%[v_buf] \n" + "pcmpeqb %%xmm5,%%xmm5 \n" + "psrlw $14,%%xmm5 \n" + "psllw $4,%%xmm5 \n" // 2 alpha bits + "pxor %%xmm6,%%xmm6 \n" // 0 for min + "pcmpeqb %%xmm7,%%xmm7 \n" + "psrlw $6,%%xmm7 \n" // 1023 for max + + LABELALIGN + "1: \n" + READYUV210 + YUVTORGB16(yuvconstants) + STOREAR30 + "sub $0x8,%[width] \n" + "jg 1b \n" + : [y_buf]"+r"(y_buf), // %[y_buf] + [u_buf]"+r"(u_buf), // %[u_buf] + [v_buf]"+r"(v_buf), // %[v_buf] + [dst_ar30]"+r"(dst_ar30), // %[dst_ar30] + [width]"+rm"(width) // %[width] + : [yuvconstants]"r"(yuvconstants) // %[yuvconstants] + : "memory", "cc", YUVTORGB_REGS + "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", "xmm7" + ); +} + +// 12 bit YUV to AR30 +void OMITFP I212ToAR30Row_SSSE3(const uint16_t* y_buf, + const uint16_t* u_buf, + const uint16_t* v_buf, + uint8_t* dst_ar30, + const struct YuvConstants* yuvconstants, + int width) { + asm volatile ( + YUVTORGB_SETUP(yuvconstants) + "sub %[u_buf],%[v_buf] \n" + "pcmpeqb %%xmm5,%%xmm5 \n" + "psrlw $14,%%xmm5 \n" + "psllw $4,%%xmm5 \n" // 2 alpha bits + "pxor %%xmm6,%%xmm6 \n" // 0 for min + "pcmpeqb %%xmm7,%%xmm7 \n" + "psrlw $6,%%xmm7 \n" // 1023 for max + + LABELALIGN + "1: \n" + READYUV212 + YUVTORGB16(yuvconstants) + STOREAR30 + "sub $0x8,%[width] \n" + "jg 1b \n" + : [y_buf]"+r"(y_buf), // %[y_buf] + [u_buf]"+r"(u_buf), // %[u_buf] + [v_buf]"+r"(v_buf), // %[v_buf] + [dst_ar30]"+r"(dst_ar30), // %[dst_ar30] + [width]"+rm"(width) // %[width] + : [yuvconstants]"r"(yuvconstants) // %[yuvconstants] + : "memory", "cc", YUVTORGB_REGS + "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", "xmm7" + ); +} + +// 10 bit YUV to ARGB +void OMITFP I410ToARGBRow_SSSE3(const uint16_t* y_buf, + const uint16_t* u_buf, + const uint16_t* v_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { + asm volatile ( + YUVTORGB_SETUP(yuvconstants) + "sub %[u_buf],%[v_buf] \n" + "pcmpeqb %%xmm5,%%xmm5 \n" + + LABELALIGN + "1: \n" + READYUV410 + YUVTORGB(yuvconstants) + STOREARGB + "sub $0x8,%[width] \n" + "jg 1b \n" + : [y_buf]"+r"(y_buf), // %[y_buf] + [u_buf]"+r"(u_buf), // %[u_buf] + [v_buf]"+r"(v_buf), // %[v_buf] + [dst_argb]"+r"(dst_argb), // %[dst_argb] + [width]"+rm"(width) // %[width] + : [yuvconstants]"r"(yuvconstants) // %[yuvconstants] + : "memory", "cc", YUVTORGB_REGS "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5" + ); +} + +#ifdef HAS_I210ALPHATOARGBROW_SSSE3 +// 10 bit YUVA to ARGB +void OMITFP I210AlphaToARGBRow_SSSE3(const uint16_t* y_buf, + const uint16_t* u_buf, + const uint16_t* v_buf, + const uint16_t* a_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { + asm volatile( + YUVTORGB_SETUP( + yuvconstants) "sub %[u_buf],%[v_buf] \n" + + LABELALIGN "1: \n" READYUVA210 + YUVTORGB(yuvconstants) STOREARGB + "subl $0x8,%[width] \n" + "jg 1b \n" + : [y_buf] "+r"(y_buf), // %[y_buf] + [u_buf] "+r"(u_buf), // %[u_buf] + [v_buf] "+r"(v_buf), // %[v_buf] + [a_buf] "+r"(a_buf), + [dst_argb] "+r"(dst_argb), // %[dst_argb] +#if defined(__i386__) + [width] "+m"(width) // %[width] +#else + [width] "+rm"(width) // %[width] +#endif + : [yuvconstants] "r"(yuvconstants) // %[yuvconstants] + : "memory", "cc", YUVTORGB_REGS "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", + "xmm5"); +} +#endif + +#ifdef HAS_I410ALPHATOARGBROW_SSSE3 +// 10 bit YUVA to ARGB +void OMITFP I410AlphaToARGBRow_SSSE3(const uint16_t* y_buf, + const uint16_t* u_buf, + const uint16_t* v_buf, + const uint16_t* a_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { + // clang-format off + asm volatile( + YUVTORGB_SETUP(yuvconstants) + "sub %[u_buf],%[v_buf] \n" + + LABELALIGN + "1: \n" + READYUVA410 + YUVTORGB(yuvconstants) + STOREARGB + "subl $0x8,%[width] \n" + "jg 1b \n" + : [y_buf] "+r"(y_buf), // %[y_buf] + [u_buf] "+r"(u_buf), // %[u_buf] + [v_buf] "+r"(v_buf), // %[v_buf] + [a_buf] "+r"(a_buf), + [dst_argb] "+r"(dst_argb), // %[dst_argb] +#if defined(__i386__) + [width] "+m"(width) // %[width] +#else + [width] "+rm"(width) // %[width] +#endif + : [yuvconstants] "r"(yuvconstants) // %[yuvconstants] + : "memory", "cc", YUVTORGB_REGS "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", + "xmm5"); + // clang-format on +} +#endif + +// 10 bit YUV to AR30 +void OMITFP I410ToAR30Row_SSSE3(const uint16_t* y_buf, + const uint16_t* u_buf, + const uint16_t* v_buf, + uint8_t* dst_ar30, + const struct YuvConstants* yuvconstants, + int width) { + asm volatile ( + YUVTORGB_SETUP(yuvconstants) + "sub %[u_buf],%[v_buf] \n" + "pcmpeqb %%xmm5,%%xmm5 \n" + "psrlw $14,%%xmm5 \n" + "psllw $4,%%xmm5 \n" // 2 alpha bits + "pxor %%xmm6,%%xmm6 \n" // 0 for min + "pcmpeqb %%xmm7,%%xmm7 \n" + "psrlw $6,%%xmm7 \n" // 1023 for max + + LABELALIGN + "1: \n" + READYUV410 + YUVTORGB16(yuvconstants) + STOREAR30 + "sub $0x8,%[width] \n" + "jg 1b \n" + : [y_buf]"+r"(y_buf), // %[y_buf] + [u_buf]"+r"(u_buf), // %[u_buf] + [v_buf]"+r"(v_buf), // %[v_buf] + [dst_ar30]"+r"(dst_ar30), // %[dst_ar30] + [width]"+rm"(width) // %[width] + : [yuvconstants]"r"(yuvconstants) // %[yuvconstants] + : "memory", "cc", YUVTORGB_REGS + "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", "xmm7" + ); +} + +#ifdef HAS_I422ALPHATOARGBROW_SSSE3 +void OMITFP I422AlphaToARGBRow_SSSE3(const uint8_t* y_buf, + const uint8_t* u_buf, + const uint8_t* v_buf, + const uint8_t* a_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { + // clang-format off + asm volatile ( YUVTORGB_SETUP(yuvconstants) "sub %[u_buf],%[v_buf] \n" @@ -2463,6 +3250,112 @@ void OMITFP UYVYToARGBRow_SSSE3(const uint8_t* uyvy_buf, // clang-format on } +void OMITFP P210ToARGBRow_SSSE3(const uint16_t* y_buf, + const uint16_t* uv_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { + asm volatile( + YUVTORGB_SETUP( + yuvconstants) "pcmpeqb %%xmm5,%%xmm5 \n" + + LABELALIGN "1: \n" READP210 + YUVTORGB(yuvconstants) STOREARGB + "sub $0x8,%[width] \n" + "jg 1b \n" + : [y_buf] "+r"(y_buf), // %[y_buf] + [uv_buf] "+r"(uv_buf), // %[u_buf] + [dst_argb] "+r"(dst_argb), // %[dst_argb] + [width] "+rm"(width) // %[width] + : [yuvconstants] "r"(yuvconstants) // %[yuvconstants] + : "memory", "cc", YUVTORGB_REGS "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", + "xmm5"); +} + +void OMITFP P410ToARGBRow_SSSE3(const uint16_t* y_buf, + const uint16_t* uv_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { + asm volatile( + YUVTORGB_SETUP( + yuvconstants) "pcmpeqb %%xmm5,%%xmm5 \n" + + LABELALIGN "1: \n" READP410 + YUVTORGB(yuvconstants) STOREARGB + "sub $0x8,%[width] \n" + "jg 1b \n" + : [y_buf] "+r"(y_buf), // %[y_buf] + [uv_buf] "+r"(uv_buf), // %[u_buf] + [dst_argb] "+r"(dst_argb), // %[dst_argb] + [width] "+rm"(width) // %[width] + : [yuvconstants] "r"(yuvconstants) // %[yuvconstants] + : "memory", "cc", YUVTORGB_REGS "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", + "xmm5"); +} + +void OMITFP P210ToAR30Row_SSSE3(const uint16_t* y_buf, + const uint16_t* uv_buf, + uint8_t* dst_ar30, + const struct YuvConstants* yuvconstants, + int width) { + asm volatile ( + YUVTORGB_SETUP(yuvconstants) + "pcmpeqb %%xmm5,%%xmm5 \n" + "psrlw $14,%%xmm5 \n" + "psllw $4,%%xmm5 \n" // 2 alpha bits + "pxor %%xmm6,%%xmm6 \n" // 0 for min + "pcmpeqb %%xmm7,%%xmm7 \n" + "psrlw $6,%%xmm7 \n" // 1023 for max + + LABELALIGN + "1: \n" + READP210 + YUVTORGB16(yuvconstants) + STOREAR30 + "sub $0x8,%[width] \n" + "jg 1b \n" + : [y_buf]"+r"(y_buf), // %[y_buf] + [uv_buf]"+r"(uv_buf), // %[uv_buf] + [dst_ar30]"+r"(dst_ar30), // %[dst_ar30] + [width]"+rm"(width) // %[width] + : [yuvconstants]"r"(yuvconstants) // %[yuvconstants] + : "memory", "cc", YUVTORGB_REGS + "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", "xmm7" + ); +} + +void OMITFP P410ToAR30Row_SSSE3(const uint16_t* y_buf, + const uint16_t* uv_buf, + uint8_t* dst_ar30, + const struct YuvConstants* yuvconstants, + int width) { + asm volatile ( + YUVTORGB_SETUP(yuvconstants) + "pcmpeqb %%xmm5,%%xmm5 \n" + "psrlw $14,%%xmm5 \n" + "psllw $4,%%xmm5 \n" // 2 alpha bits + "pxor %%xmm6,%%xmm6 \n" // 0 for min + "pcmpeqb %%xmm7,%%xmm7 \n" + "psrlw $6,%%xmm7 \n" // 1023 for max + + LABELALIGN + "1: \n" + READP410 + YUVTORGB16(yuvconstants) + STOREAR30 + "sub $0x8,%[width] \n" + "jg 1b \n" + : [y_buf]"+r"(y_buf), // %[y_buf] + [uv_buf]"+r"(uv_buf), // %[uv_buf] + [dst_ar30]"+r"(dst_ar30), // %[dst_ar30] + [width]"+rm"(width) // %[width] + : [yuvconstants]"r"(yuvconstants) // %[yuvconstants] + : "memory", "cc", YUVTORGB_REGS + "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", "xmm7" + ); +} + void OMITFP I422ToRGBARow_SSSE3(const uint8_t* y_buf, const uint8_t* u_buf, const uint8_t* v_buf, @@ -2496,12 +3389,12 @@ void OMITFP I422ToRGBARow_SSSE3(const uint8_t* y_buf, // Read 16 UV from 444 #define READYUV444_AVX2 \ - "vmovdqu (%[u_buf]),%%xmm0 \n" \ + "vmovdqu (%[u_buf]),%%xmm3 \n" \ "vmovdqu 0x00(%[u_buf],%[v_buf],1),%%xmm1 \n" \ "lea 0x10(%[u_buf]),%[u_buf] \n" \ - "vpermq $0xd8,%%ymm0,%%ymm0 \n" \ + "vpermq $0xd8,%%ymm3,%%ymm3 \n" \ "vpermq $0xd8,%%ymm1,%%ymm1 \n" \ - "vpunpcklbw %%ymm1,%%ymm0,%%ymm0 \n" \ + "vpunpcklbw %%ymm1,%%ymm3,%%ymm3 \n" \ "vmovdqu (%[y_buf]),%%xmm4 \n" \ "vpermq $0xd8,%%ymm4,%%ymm4 \n" \ "vpunpcklbw %%ymm4,%%ymm4,%%ymm4 \n" \ @@ -2509,42 +3402,149 @@ void OMITFP I422ToRGBARow_SSSE3(const uint8_t* y_buf, // Read 8 UV from 422, upsample to 16 UV. #define READYUV422_AVX2 \ - "vmovq (%[u_buf]),%%xmm0 \n" \ + "vmovq (%[u_buf]),%%xmm3 \n" \ "vmovq 0x00(%[u_buf],%[v_buf],1),%%xmm1 \n" \ "lea 0x8(%[u_buf]),%[u_buf] \n" \ - "vpunpcklbw %%ymm1,%%ymm0,%%ymm0 \n" \ - "vpermq $0xd8,%%ymm0,%%ymm0 \n" \ - "vpunpcklwd %%ymm0,%%ymm0,%%ymm0 \n" \ + "vpunpcklbw %%ymm1,%%ymm3,%%ymm3 \n" \ + "vpermq $0xd8,%%ymm3,%%ymm3 \n" \ + "vpunpcklwd %%ymm3,%%ymm3,%%ymm3 \n" \ "vmovdqu (%[y_buf]),%%xmm4 \n" \ "vpermq $0xd8,%%ymm4,%%ymm4 \n" \ "vpunpcklbw %%ymm4,%%ymm4,%%ymm4 \n" \ "lea 0x10(%[y_buf]),%[y_buf] \n" -// Read 8 UV from 210 10 bit, upsample to 16 UV +#define READYUV422_AVX512BW \ + "vmovdqu (%[u_buf]),%%xmm3 \n" \ + "vmovdqu 0x00(%[u_buf],%[v_buf],1),%%xmm1 \n" \ + "vpermq %%zmm3,%%zmm16,%%zmm3 \n" \ + "vpermq %%zmm1,%%zmm16,%%zmm1 \n" \ + "lea 0x10(%[u_buf]),%[u_buf] \n" \ + "vpunpcklbw %%zmm1,%%zmm3,%%zmm3 \n" \ + "vpermq $0xd8,%%zmm3,%%zmm3 \n" \ + "vpunpcklwd %%zmm3,%%zmm3,%%zmm3 \n" \ + "vmovdqu8 (%[y_buf]),%%ymm4 \n" \ + "vpermq %%zmm4,%%zmm17,%%zmm4 \n" \ + "vpermq $0xd8,%%zmm4,%%zmm4 \n" \ + "vpunpcklbw %%zmm4,%%zmm4,%%zmm4 \n" \ + "lea 0x20(%[y_buf]),%[y_buf] \n" + +// Read 8 UV from 210, upsample to 16 UV // TODO(fbarchard): Consider vshufb to replace pack/unpack // TODO(fbarchard): Consider vunpcklpd to combine the 2 registers into 1. #define READYUV210_AVX2 \ - "vmovdqu (%[u_buf]),%%xmm0 \n" \ + "vmovdqu (%[u_buf]),%%xmm3 \n" \ "vmovdqu 0x00(%[u_buf],%[v_buf],1),%%xmm1 \n" \ "lea 0x10(%[u_buf]),%[u_buf] \n" \ - "vpermq $0xd8,%%ymm0,%%ymm0 \n" \ + "vpermq $0xd8,%%ymm3,%%ymm3 \n" \ "vpermq $0xd8,%%ymm1,%%ymm1 \n" \ - "vpunpcklwd %%ymm1,%%ymm0,%%ymm0 \n" \ - "vpsraw $0x2,%%ymm0,%%ymm0 \n" \ - "vpackuswb %%ymm0,%%ymm0,%%ymm0 \n" \ - "vpunpcklwd %%ymm0,%%ymm0,%%ymm0 \n" \ + "vpunpcklwd %%ymm1,%%ymm3,%%ymm3 \n" \ + "vpsraw $2,%%ymm3,%%ymm3 \n" \ + "vpackuswb %%ymm3,%%ymm3,%%ymm3 \n" \ + "vpunpcklwd %%ymm3,%%ymm3,%%ymm3 \n" \ "vmovdqu (%[y_buf]),%%ymm4 \n" \ - "vpsllw $0x6,%%ymm4,%%ymm4 \n" \ + "vpsllw $6,%%ymm4,%%ymm2 \n" \ + "vpsrlw $4,%%ymm4,%%ymm4 \n" \ + "vpaddw %%ymm2,%%ymm4,%%ymm4 \n" \ "lea 0x20(%[y_buf]),%[y_buf] \n" -// Read 8 UV from 422, upsample to 16 UV. With 16 Alpha. -#define READYUVA422_AVX2 \ - "vmovq (%[u_buf]),%%xmm0 \n" \ - "vmovq 0x00(%[u_buf],%[v_buf],1),%%xmm1 \n" \ - "lea 0x8(%[u_buf]),%[u_buf] \n" \ - "vpunpcklbw %%ymm1,%%ymm0,%%ymm0 \n" \ - "vpermq $0xd8,%%ymm0,%%ymm0 \n" \ - "vpunpcklwd %%ymm0,%%ymm0,%%ymm0 \n" \ +// Read 8 UV from 210, upsample to 16 UV. With 16 Alpha. +#define READYUVA210_AVX2 \ + "vmovdqu (%[u_buf]),%%xmm3 \n" \ + "vmovdqu 0x00(%[u_buf],%[v_buf],1),%%xmm1 \n" \ + "lea 0x10(%[u_buf]),%[u_buf] \n" \ + "vpermq $0xd8,%%ymm3,%%ymm3 \n" \ + "vpermq $0xd8,%%ymm1,%%ymm1 \n" \ + "vpunpcklwd %%ymm1,%%ymm3,%%ymm3 \n" \ + "vpsraw $2,%%ymm3,%%ymm3 \n" \ + "vpackuswb %%ymm3,%%ymm3,%%ymm3 \n" \ + "vpunpcklwd %%ymm3,%%ymm3,%%ymm3 \n" \ + "vmovdqu (%[y_buf]),%%ymm4 \n" \ + "vpsllw $6,%%ymm4,%%ymm2 \n" \ + "vpsrlw $4,%%ymm4,%%ymm4 \n" \ + "vpaddw %%ymm2,%%ymm4,%%ymm4 \n" \ + "lea 0x20(%[y_buf]),%[y_buf] \n" \ + "vmovdqu (%[a_buf]),%%ymm5 \n" \ + "vpsraw $2,%%ymm5,%%ymm5 \n" \ + "vpackuswb %%ymm5,%%ymm5,%%ymm5 \n" \ + "lea 0x20(%[a_buf]),%[a_buf] \n" + +// Read 16 UV from 410 +#define READYUV410_AVX2 \ + "vmovdqu (%[u_buf]),%%ymm3 \n" \ + "vmovdqu 0x00(%[u_buf],%[v_buf],1),%%ymm2 \n" \ + "lea 0x20(%[u_buf]),%[u_buf] \n" \ + "vpsraw $2,%%ymm3,%%ymm3 \n" \ + "vpsraw $2,%%ymm2,%%ymm2 \n" \ + "vpunpckhwd %%ymm2,%%ymm3,%%ymm1 \n" \ + "vpunpcklwd %%ymm2,%%ymm3,%%ymm3 \n" \ + "vpackuswb %%ymm1,%%ymm3,%%ymm3 \n" \ + "vmovdqu (%[y_buf]),%%ymm4 \n" \ + "vpsllw $6,%%ymm4,%%ymm2 \n" \ + "vpsrlw $4,%%ymm4,%%ymm4 \n" \ + "vpaddw %%ymm2,%%ymm4,%%ymm4 \n" \ + "lea 0x20(%[y_buf]),%[y_buf] \n" + +// Read 8 UV from 212 12 bit, upsample to 16 UV +#define READYUV212_AVX2 \ + "vmovdqu (%[u_buf]),%%xmm3 \n" \ + "vmovdqu 0x00(%[u_buf],%[v_buf],1),%%xmm1 \n" \ + "lea 0x10(%[u_buf]),%[u_buf] \n" \ + "vpermq $0xd8,%%ymm3,%%ymm3 \n" \ + "vpermq $0xd8,%%ymm1,%%ymm1 \n" \ + "vpunpcklwd %%ymm1,%%ymm3,%%ymm3 \n" \ + "vpsraw $0x4,%%ymm3,%%ymm3 \n" \ + "vpackuswb %%ymm3,%%ymm3,%%ymm3 \n" \ + "vpunpcklwd %%ymm3,%%ymm3,%%ymm3 \n" \ + "vmovdqu (%[y_buf]),%%ymm4 \n" \ + "vpsllw $4,%%ymm4,%%ymm2 \n" \ + "vpsrlw $8,%%ymm4,%%ymm4 \n" \ + "vpaddw %%ymm2,%%ymm4,%%ymm4 \n" \ + "lea 0x20(%[y_buf]),%[y_buf] \n" + +// Read 16 UV from 410. With 16 Alpha. +#define READYUVA410_AVX2 \ + "vmovdqu (%[u_buf]),%%ymm3 \n" \ + "vmovdqu 0x00(%[u_buf],%[v_buf],1),%%ymm2 \n" \ + "lea 0x20(%[u_buf]),%[u_buf] \n" \ + "vpsraw $2,%%ymm3,%%ymm3 \n" \ + "vpsraw $2,%%ymm2,%%ymm2 \n" \ + "vpunpckhwd %%ymm2,%%ymm3,%%ymm1 \n" \ + "vpunpcklwd %%ymm2,%%ymm3,%%ymm3 \n" \ + "vpackuswb %%ymm1,%%ymm3,%%ymm3 \n" \ + "vmovdqu (%[y_buf]),%%ymm4 \n" \ + "vpsllw $6,%%ymm4,%%ymm2 \n" \ + "vpsrlw $4,%%ymm4,%%ymm4 \n" \ + "vpaddw %%ymm2,%%ymm4,%%ymm4 \n" \ + "lea 0x20(%[y_buf]),%[y_buf] \n" \ + "vmovdqu (%[a_buf]),%%ymm5 \n" \ + "vpsraw $2,%%ymm5,%%ymm5 \n" \ + "vpackuswb %%ymm5,%%ymm5,%%ymm5 \n" \ + "lea 0x20(%[a_buf]),%[a_buf] \n" + +// Read 16 UV from 444. With 16 Alpha. +#define READYUVA444_AVX2 \ + "vmovdqu (%[u_buf]),%%xmm3 \n" \ + "vmovdqu 0x00(%[u_buf],%[v_buf],1),%%xmm1 \n" \ + "lea 0x10(%[u_buf]),%[u_buf] \n" \ + "vpermq $0xd8,%%ymm3,%%ymm3 \n" \ + "vpermq $0xd8,%%ymm1,%%ymm1 \n" \ + "vpunpcklbw %%ymm1,%%ymm3,%%ymm3 \n" \ + "vmovdqu (%[y_buf]),%%xmm4 \n" \ + "vpermq $0xd8,%%ymm4,%%ymm4 \n" \ + "vpunpcklbw %%ymm4,%%ymm4,%%ymm4 \n" \ + "lea 0x10(%[y_buf]),%[y_buf] \n" \ + "vmovdqu (%[a_buf]),%%xmm5 \n" \ + "vpermq $0xd8,%%ymm5,%%ymm5 \n" \ + "lea 0x10(%[a_buf]),%[a_buf] \n" + +// Read 8 UV from 422, upsample to 16 UV. With 16 Alpha. +#define READYUVA422_AVX2 \ + "vmovq (%[u_buf]),%%xmm3 \n" \ + "vmovq 0x00(%[u_buf],%[v_buf],1),%%xmm1 \n" \ + "lea 0x8(%[u_buf]),%[u_buf] \n" \ + "vpunpcklbw %%ymm1,%%ymm3,%%ymm3 \n" \ + "vpermq $0xd8,%%ymm3,%%ymm3 \n" \ + "vpunpcklwd %%ymm3,%%ymm3,%%ymm3 \n" \ "vmovdqu (%[y_buf]),%%xmm4 \n" \ "vpermq $0xd8,%%ymm4,%%ymm4 \n" \ "vpunpcklbw %%ymm4,%%ymm4,%%ymm4 \n" \ @@ -2555,10 +3555,10 @@ void OMITFP I422ToRGBARow_SSSE3(const uint8_t* y_buf, // Read 8 UV from NV12, upsample to 16 UV. #define READNV12_AVX2 \ - "vmovdqu (%[uv_buf]),%%xmm0 \n" \ + "vmovdqu (%[uv_buf]),%%xmm3 \n" \ "lea 0x10(%[uv_buf]),%[uv_buf] \n" \ - "vpermq $0xd8,%%ymm0,%%ymm0 \n" \ - "vpunpcklwd %%ymm0,%%ymm0,%%ymm0 \n" \ + "vpermq $0xd8,%%ymm3,%%ymm3 \n" \ + "vpunpcklwd %%ymm3,%%ymm3,%%ymm3 \n" \ "vmovdqu (%[y_buf]),%%xmm4 \n" \ "vpermq $0xd8,%%ymm4,%%ymm4 \n" \ "vpunpcklbw %%ymm4,%%ymm4,%%ymm4 \n" \ @@ -2566,73 +3566,130 @@ void OMITFP I422ToRGBARow_SSSE3(const uint8_t* y_buf, // Read 8 VU from NV21, upsample to 16 UV. #define READNV21_AVX2 \ - "vmovdqu (%[vu_buf]),%%xmm0 \n" \ + "vmovdqu (%[vu_buf]),%%xmm3 \n" \ "lea 0x10(%[vu_buf]),%[vu_buf] \n" \ - "vpermq $0xd8,%%ymm0,%%ymm0 \n" \ - "vpshufb %[kShuffleNV21], %%ymm0, %%ymm0 \n" \ + "vpermq $0xd8,%%ymm3,%%ymm3 \n" \ + "vpshufb %[kShuffleNV21], %%ymm3, %%ymm3 \n" \ "vmovdqu (%[y_buf]),%%xmm4 \n" \ "vpermq $0xd8,%%ymm4,%%ymm4 \n" \ "vpunpcklbw %%ymm4,%%ymm4,%%ymm4 \n" \ "lea 0x10(%[y_buf]),%[y_buf] \n" +// Read 4 UV from P210, upsample to 8 UV +#define READP210_AVX2 \ + "vmovdqu (%[uv_buf]),%%ymm3 \n" \ + "lea 0x20(%[uv_buf]),%[uv_buf] \n" \ + "vpsrlw $0x8,%%ymm3,%%ymm3 \n" \ + "vpackuswb %%ymm3,%%ymm3,%%ymm3 \n" \ + "vpunpcklwd %%ymm3,%%ymm3,%%ymm3 \n" \ + "vmovdqu (%[y_buf]),%%ymm4 \n" \ + "lea 0x20(%[y_buf]),%[y_buf] \n" + +// Read 8 UV from P410 +#define READP410_AVX2 \ + "vmovdqu (%[uv_buf]),%%ymm3 \n" \ + "vmovdqu 0x20(%[uv_buf]),%%ymm1 \n" \ + "lea 0x40(%[uv_buf]),%[uv_buf] \n" \ + "vpsrlw $0x8,%%ymm3,%%ymm3 \n" \ + "vpsrlw $0x8,%%ymm1,%%ymm1 \n" \ + "vpackuswb %%ymm1,%%ymm3,%%ymm3 \n" \ + "vpermq $0xd8,%%ymm3,%%ymm3 \n" \ + "vmovdqu (%[y_buf]),%%ymm4 \n" \ + "lea 0x20(%[y_buf]),%[y_buf] \n" + // Read 8 YUY2 with 16 Y and upsample 8 UV to 16 UV. #define READYUY2_AVX2 \ "vmovdqu (%[yuy2_buf]),%%ymm4 \n" \ "vpshufb %[kShuffleYUY2Y], %%ymm4, %%ymm4 \n" \ - "vmovdqu (%[yuy2_buf]),%%ymm0 \n" \ - "vpshufb %[kShuffleYUY2UV], %%ymm0, %%ymm0 \n" \ + "vmovdqu (%[yuy2_buf]),%%ymm3 \n" \ + "vpshufb %[kShuffleYUY2UV], %%ymm3, %%ymm3 \n" \ "lea 0x20(%[yuy2_buf]),%[yuy2_buf] \n" // Read 8 UYVY with 16 Y and upsample 8 UV to 16 UV. #define READUYVY_AVX2 \ "vmovdqu (%[uyvy_buf]),%%ymm4 \n" \ "vpshufb %[kShuffleUYVYY], %%ymm4, %%ymm4 \n" \ - "vmovdqu (%[uyvy_buf]),%%ymm0 \n" \ - "vpshufb %[kShuffleUYVYUV], %%ymm0, %%ymm0 \n" \ + "vmovdqu (%[uyvy_buf]),%%ymm3 \n" \ + "vpshufb %[kShuffleUYVYUV], %%ymm3, %%ymm3 \n" \ "lea 0x20(%[uyvy_buf]),%[uyvy_buf] \n" +// TODO(fbarchard): Remove broadcastb #if defined(__x86_64__) -#define YUVTORGB_SETUP_AVX2(yuvconstants) \ - "vmovdqa (%[yuvconstants]),%%ymm8 \n" \ - "vmovdqa 32(%[yuvconstants]),%%ymm9 \n" \ - "vmovdqa 64(%[yuvconstants]),%%ymm10 \n" \ - "vmovdqa 96(%[yuvconstants]),%%ymm11 \n" \ - "vmovdqa 128(%[yuvconstants]),%%ymm12 \n" \ - "vmovdqa 160(%[yuvconstants]),%%ymm13 \n" \ - "vmovdqa 192(%[yuvconstants]),%%ymm14 \n" +#define YUVTORGB_SETUP_AVX2(yuvconstants) \ + "vpcmpeqb %%xmm13,%%xmm13,%%xmm13 \n" \ + "vmovdqa (%[yuvconstants]),%%ymm8 \n" \ + "vpsllw $7,%%xmm13,%%xmm13 \n" \ + "vmovdqa 32(%[yuvconstants]),%%ymm9 \n" \ + "vpbroadcastb %%xmm13,%%ymm13 \n" \ + "vmovdqa 64(%[yuvconstants]),%%ymm10 \n" \ + "vmovdqa 96(%[yuvconstants]),%%ymm11 \n" \ + "vmovdqa 128(%[yuvconstants]),%%ymm12 \n" + +#define YUVTORGB_SETUP_AVX512BW(yuvconstants) \ + "vpcmpeqb %%xmm13,%%xmm13,%%xmm13 \n" \ + "movdqa (%[yuvconstants]),%%xmm8 \n" \ + "vpbroadcastq %%xmm8, %%zmm8 \n" \ + "vpsllw $7,%%xmm13,%%xmm13 \n" \ + "vpbroadcastb %%xmm13,%%zmm13 \n" \ + "movq 32(%[yuvconstants]),%%xmm9 \n" \ + "vpbroadcastq %%xmm9,%%zmm9 \n" \ + "movq 64(%[yuvconstants]),%%xmm10 \n" \ + "vpbroadcastq %%xmm10,%%zmm10 \n" \ + "movq 96(%[yuvconstants]),%%xmm11 \n" \ + "vpbroadcastq %%xmm11,%%zmm11 \n" \ + "movq 128(%[yuvconstants]),%%xmm12 \n" \ + "vpbroadcastq %%xmm12,%%zmm12 \n" \ + "vmovdqu8 (%[quadsplitperm]),%%zmm16 \n" \ + "vmovdqu8 (%[dquadsplitperm]),%%zmm17 \n" \ + "vmovdqu8 (%[unperm]),%%zmm18 \n" #define YUVTORGB16_AVX2(yuvconstants) \ - "vpmaddubsw %%ymm10,%%ymm0,%%ymm2 \n" \ - "vpmaddubsw %%ymm9,%%ymm0,%%ymm1 \n" \ - "vpmaddubsw %%ymm8,%%ymm0,%%ymm0 \n" \ - "vpsubw %%ymm2,%%ymm13,%%ymm2 \n" \ - "vpsubw %%ymm1,%%ymm12,%%ymm1 \n" \ - "vpsubw %%ymm0,%%ymm11,%%ymm0 \n" \ - "vpmulhuw %%ymm14,%%ymm4,%%ymm4 \n" \ + "vpsubb %%ymm13,%%ymm3,%%ymm3 \n" \ + "vpmulhuw %%ymm11,%%ymm4,%%ymm4 \n" \ + "vpmaddubsw %%ymm3,%%ymm8,%%ymm0 \n" \ + "vpmaddubsw %%ymm3,%%ymm9,%%ymm1 \n" \ + "vpmaddubsw %%ymm3,%%ymm10,%%ymm2 \n" \ + "vpaddw %%ymm4,%%ymm12,%%ymm4 \n" \ "vpaddsw %%ymm4,%%ymm0,%%ymm0 \n" \ - "vpaddsw %%ymm4,%%ymm1,%%ymm1 \n" \ + "vpsubsw %%ymm1,%%ymm4,%%ymm1 \n" \ "vpaddsw %%ymm4,%%ymm2,%%ymm2 \n" -#define YUVTORGB_REGS_AVX2 \ - "xmm8", "xmm9", "xmm10", "xmm11", "xmm12", "xmm13", "xmm14", +#define YUVTORGB16_AVX512BW(yuvconstants) \ + "vpsubb %%zmm13,%%zmm3,%%zmm3 \n" \ + "vpmulhuw %%zmm11,%%zmm4,%%zmm4 \n" \ + "vpmaddubsw %%zmm3,%%zmm8,%%zmm0 \n" \ + "vpmaddubsw %%zmm3,%%zmm9,%%zmm1 \n" \ + "vpmaddubsw %%zmm3,%%zmm10,%%zmm2 \n" \ + "vpaddw %%zmm4,%%zmm12,%%zmm4 \n" \ + "vpaddsw %%zmm4,%%zmm0,%%zmm0 \n" \ + "vpsubsw %%zmm1,%%zmm4,%%zmm1 \n" \ + "vpaddsw %%zmm4,%%zmm2,%%zmm2 \n" + +#define YUVTORGB_REGS_AVX2 "xmm8", "xmm9", "xmm10", "xmm11", "xmm12", "xmm13", +#define YUVTORGB_REGS_AVX512BW \ + "xmm8", "xmm9", "xmm10", "xmm11", "xmm12", "xmm13", "xmm16", "xmm17", "xmm18", #else // Convert 16 pixels: 16 UV and 16 Y. #define YUVTORGB_SETUP_AVX2(yuvconstants) #define YUVTORGB16_AVX2(yuvconstants) \ - "vpmaddubsw 64(%[yuvconstants]),%%ymm0,%%ymm2 \n" \ - "vpmaddubsw 32(%[yuvconstants]),%%ymm0,%%ymm1 \n" \ - "vpmaddubsw (%[yuvconstants]),%%ymm0,%%ymm0 \n" \ - "vmovdqu 160(%[yuvconstants]),%%ymm3 \n" \ - "vpsubw %%ymm2,%%ymm3,%%ymm2 \n" \ - "vmovdqu 128(%[yuvconstants]),%%ymm3 \n" \ - "vpsubw %%ymm1,%%ymm3,%%ymm1 \n" \ - "vmovdqu 96(%[yuvconstants]),%%ymm3 \n" \ - "vpsubw %%ymm0,%%ymm3,%%ymm0 \n" \ - "vpmulhuw 192(%[yuvconstants]),%%ymm4,%%ymm4 \n" \ + "vpcmpeqb %%xmm0,%%xmm0,%%xmm0 \n" \ + "vpsllw $7,%%xmm0,%%xmm0 \n" \ + "vpbroadcastb %%xmm0,%%ymm0 \n" \ + "vpsubb %%ymm0,%%ymm3,%%ymm3 \n" \ + "vpmulhuw 96(%[yuvconstants]),%%ymm4,%%ymm4 \n" \ + "vmovdqa (%[yuvconstants]),%%ymm0 \n" \ + "vmovdqa 32(%[yuvconstants]),%%ymm1 \n" \ + "vmovdqa 64(%[yuvconstants]),%%ymm2 \n" \ + "vpmaddubsw %%ymm3,%%ymm0,%%ymm0 \n" \ + "vpmaddubsw %%ymm3,%%ymm1,%%ymm1 \n" \ + "vpmaddubsw %%ymm3,%%ymm2,%%ymm2 \n" \ + "vmovdqa 128(%[yuvconstants]),%%ymm3 \n" \ + "vpaddw %%ymm4,%%ymm3,%%ymm4 \n" \ "vpaddsw %%ymm4,%%ymm0,%%ymm0 \n" \ - "vpaddsw %%ymm4,%%ymm1,%%ymm1 \n" \ + "vpsubsw %%ymm1,%%ymm4,%%ymm1 \n" \ "vpaddsw %%ymm4,%%ymm2,%%ymm2 \n" + #define YUVTORGB_REGS_AVX2 #endif @@ -2645,6 +3702,15 @@ void OMITFP I422ToRGBARow_SSSE3(const uint8_t* y_buf, "vpackuswb %%ymm1,%%ymm1,%%ymm1 \n" \ "vpackuswb %%ymm2,%%ymm2,%%ymm2 \n" +#define YUVTORGB_AVX512BW(yuvconstants) \ + YUVTORGB16_AVX512BW(yuvconstants) \ + "vpsraw $0x6,%%zmm0,%%zmm0 \n" \ + "vpsraw $0x6,%%zmm1,%%zmm1 \n" \ + "vpsraw $0x6,%%zmm2,%%zmm2 \n" \ + "vpackuswb %%zmm0,%%zmm0,%%zmm0 \n" \ + "vpackuswb %%zmm1,%%zmm1,%%zmm1 \n" \ + "vpackuswb %%zmm2,%%zmm2,%%zmm2 \n" + // Store 16 ARGB values. #define STOREARGB_AVX2 \ "vpunpcklbw %%ymm1,%%ymm0,%%ymm0 \n" \ @@ -2655,7 +3721,19 @@ void OMITFP I422ToRGBARow_SSSE3(const uint8_t* y_buf, "vpunpckhwd %%ymm2,%%ymm0,%%ymm0 \n" \ "vmovdqu %%ymm1,(%[dst_argb]) \n" \ "vmovdqu %%ymm0,0x20(%[dst_argb]) \n" \ - "lea 0x40(%[dst_argb]), %[dst_argb] \n" + "lea 0x40(%[dst_argb]), %[dst_argb] \n" + +// Store 32 ARGB values. +#define STOREARGB_AVX512BW \ + "vpunpcklbw %%zmm1,%%zmm0,%%zmm0 \n" \ + "vpermq %%zmm0,%%zmm18,%%zmm0 \n" \ + "vpunpcklbw %%zmm5,%%zmm2,%%zmm2 \n" \ + "vpermq %%zmm2,%%zmm18,%%zmm2 \n" \ + "vpunpcklwd %%zmm2,%%zmm0,%%zmm1 \n" \ + "vpunpckhwd %%zmm2,%%zmm0,%%zmm0 \n" \ + "vmovdqu8 %%zmm1,(%[dst_argb]) \n" \ + "vmovdqu8 %%zmm0,0x40(%[dst_argb]) \n" \ + "lea 0x80(%[dst_argb]), %[dst_argb] \n" // Store 16 AR30 values. #define STOREAR30_AVX2 \ @@ -2753,6 +3831,50 @@ void OMITFP I422ToARGBRow_AVX2(const uint8_t* y_buf, } #endif // HAS_I422TOARGBROW_AVX2 +#if defined(HAS_I422TOARGBROW_AVX512BW) +static const uint64_t kSplitQuadWords[8] = {0, 2, 2, 2, 1, 2, 2, 2}; +static const uint64_t kSplitDoubleQuadWords[8] = {0, 1, 4, 4, 2, 3, 4, 4}; +static const uint64_t kUnpermuteAVX512[8] = {0, 4, 1, 5, 2, 6, 3, 7}; + +// 32 pixels +// 16 UV values upsampled to 32 UV, mixed with 32 Y producing 32 ARGB (128 +// bytes). +void OMITFP I422ToARGBRow_AVX512BW(const uint8_t* y_buf, + const uint8_t* u_buf, + const uint8_t* v_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { + asm volatile ( + YUVTORGB_SETUP_AVX512BW(yuvconstants) + "sub %[u_buf],%[v_buf] \n" + "vpcmpeqb %%xmm5,%%xmm5,%%xmm5 \n" + "vpbroadcastq %%xmm5,%%zmm5 \n" + + LABELALIGN + "1: \n" + READYUV422_AVX512BW + YUVTORGB_AVX512BW(yuvconstants) + STOREARGB_AVX512BW + "sub $0x20,%[width] \n" + "jg 1b \n" + + "vzeroupper \n" + : [y_buf]"+r"(y_buf), // %[y_buf] + [u_buf]"+r"(u_buf), // %[u_buf] + [v_buf]"+r"(v_buf), // %[v_buf] + [dst_argb]"+r"(dst_argb), // %[dst_argb] + [width]"+rm"(width) // %[width] + : [yuvconstants]"r"(yuvconstants), // %[yuvconstants] + [quadsplitperm]"r"(kSplitQuadWords), // %[quadsplitperm] + [dquadsplitperm]"r"(kSplitDoubleQuadWords), // %[dquadsplitperm] + [unperm]"r"(kUnpermuteAVX512) // %[unperm] + : "memory", "cc", YUVTORGB_REGS_AVX512BW + "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5" + ); +} +#endif // HAS_I422TOARGBROW_AVX512BW + #if defined(HAS_I422TOAR30ROW_AVX2) // 16 pixels // 8 UV values upsampled to 16 UV, mixed with 16 Y producing 16 AR30 (64 bytes). @@ -2828,6 +3950,41 @@ void OMITFP I210ToARGBRow_AVX2(const uint16_t* y_buf, } #endif // HAS_I210TOARGBROW_AVX2 +#if defined(HAS_I212TOARGBROW_AVX2) +// 16 pixels +// 8 UV values upsampled to 16 UV, mixed with 16 Y producing 16 ARGB (64 bytes). +void OMITFP I212ToARGBRow_AVX2(const uint16_t* y_buf, + const uint16_t* u_buf, + const uint16_t* v_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { + asm volatile ( + YUVTORGB_SETUP_AVX2(yuvconstants) + "sub %[u_buf],%[v_buf] \n" + "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" + + LABELALIGN + "1: \n" + READYUV212_AVX2 + YUVTORGB_AVX2(yuvconstants) + STOREARGB_AVX2 + "sub $0x10,%[width] \n" + "jg 1b \n" + + "vzeroupper \n" + : [y_buf]"+r"(y_buf), // %[y_buf] + [u_buf]"+r"(u_buf), // %[u_buf] + [v_buf]"+r"(v_buf), // %[v_buf] + [dst_argb]"+r"(dst_argb), // %[dst_argb] + [width]"+rm"(width) // %[width] + : [yuvconstants]"r"(yuvconstants) // %[yuvconstants] + : "memory", "cc", YUVTORGB_REGS_AVX2 + "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5" + ); +} +#endif // HAS_I212TOARGBROW_AVX2 + #if defined(HAS_I210TOAR30ROW_AVX2) // 16 pixels // 8 UV values upsampled to 16 UV, mixed with 16 Y producing 16 AR30 (64 bytes). @@ -2863,11 +4020,239 @@ void OMITFP I210ToAR30Row_AVX2(const uint16_t* y_buf, [width]"+rm"(width) // %[width] : [yuvconstants]"r"(yuvconstants) // %[yuvconstants] : "memory", "cc", YUVTORGB_REGS_AVX2 - "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5" + "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", "xmm7" ); } #endif // HAS_I210TOAR30ROW_AVX2 +#if defined(HAS_I212TOAR30ROW_AVX2) +// 16 pixels +// 8 UV values upsampled to 16 UV, mixed with 16 Y producing 16 AR30 (64 bytes). +void OMITFP I212ToAR30Row_AVX2(const uint16_t* y_buf, + const uint16_t* u_buf, + const uint16_t* v_buf, + uint8_t* dst_ar30, + const struct YuvConstants* yuvconstants, + int width) { + asm volatile ( + YUVTORGB_SETUP_AVX2(yuvconstants) + "sub %[u_buf],%[v_buf] \n" + "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" // AR30 constants + "vpsrlw $14,%%ymm5,%%ymm5 \n" + "vpsllw $4,%%ymm5,%%ymm5 \n" // 2 alpha bits + "vpxor %%ymm6,%%ymm6,%%ymm6 \n" // 0 for min + "vpcmpeqb %%ymm7,%%ymm7,%%ymm7 \n" // 1023 for max + "vpsrlw $6,%%ymm7,%%ymm7 \n" + + LABELALIGN + "1: \n" + READYUV212_AVX2 + YUVTORGB16_AVX2(yuvconstants) + STOREAR30_AVX2 + "sub $0x10,%[width] \n" + "jg 1b \n" + + "vzeroupper \n" + : [y_buf]"+r"(y_buf), // %[y_buf] + [u_buf]"+r"(u_buf), // %[u_buf] + [v_buf]"+r"(v_buf), // %[v_buf] + [dst_ar30]"+r"(dst_ar30), // %[dst_ar30] + [width]"+rm"(width) // %[width] + : [yuvconstants]"r"(yuvconstants) // %[yuvconstants] + : "memory", "cc", YUVTORGB_REGS_AVX2 + "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", "xmm7" + ); +} +#endif // HAS_I212TOAR30ROW_AVX2 + +#if defined(HAS_I410TOARGBROW_AVX2) +// 16 pixels +// 16 UV values with 16 Y producing 16 ARGB (64 bytes). +void OMITFP I410ToARGBRow_AVX2(const uint16_t* y_buf, + const uint16_t* u_buf, + const uint16_t* v_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { + asm volatile ( + YUVTORGB_SETUP_AVX2(yuvconstants) + "sub %[u_buf],%[v_buf] \n" + "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" + + LABELALIGN + "1: \n" + READYUV410_AVX2 + YUVTORGB_AVX2(yuvconstants) + STOREARGB_AVX2 + "sub $0x10,%[width] \n" + "jg 1b \n" + "vzeroupper \n" + + : [y_buf]"+r"(y_buf), // %[y_buf] + [u_buf]"+r"(u_buf), // %[u_buf] + [v_buf]"+r"(v_buf), // %[v_buf] + [dst_argb]"+r"(dst_argb), // %[dst_argb] + [width]"+rm"(width) // %[width] + : [yuvconstants]"r"(yuvconstants) // %[yuvconstants] + : "memory", "cc", YUVTORGB_REGS_AVX2 + "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5" + ); +} +#endif // HAS_I410TOARGBROW_AVX2 + +#if defined(HAS_I210ALPHATOARGBROW_AVX2) +// 16 pixels +// 8 UV, 16 Y and 16 A producing 16 ARGB (64 bytes). +void OMITFP I210AlphaToARGBRow_AVX2(const uint16_t* y_buf, + const uint16_t* u_buf, + const uint16_t* v_buf, + const uint16_t* a_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { + asm volatile( + YUVTORGB_SETUP_AVX2( + yuvconstants) "sub %[u_buf],%[v_buf] \n" + + LABELALIGN "1: \n" READYUVA210_AVX2 + YUVTORGB_AVX2(yuvconstants) STOREARGB_AVX2 + "subl $0x10,%[width] \n" + "jg 1b \n" + "vzeroupper \n" + + : [y_buf] "+r"(y_buf), // %[y_buf] + [u_buf] "+r"(u_buf), // %[u_buf] + [v_buf] "+r"(v_buf), // %[v_buf] + [a_buf] "+r"(a_buf), // %[a_buf] + [dst_argb] "+r"(dst_argb), // %[dst_argb] +#if defined(__i386__) + [width] "+m"(width) // %[width] +#else + [width] "+rm"(width) // %[width] +#endif + : [yuvconstants] "r"(yuvconstants) // %[yuvconstants] + : "memory", "cc", YUVTORGB_REGS_AVX2 "xmm0", "xmm1", "xmm2", "xmm3", + "xmm4", "xmm5"); +} +#endif // HAS_I210TOARGBROW_AVX2 + +#if defined(HAS_I410ALPHATOARGBROW_AVX2) +// 16 pixels +// 16 UV, 16 Y and 16 A producing 16 ARGB (64 bytes). +void OMITFP I410AlphaToARGBRow_AVX2(const uint16_t* y_buf, + const uint16_t* u_buf, + const uint16_t* v_buf, + const uint16_t* a_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { + asm volatile( + YUVTORGB_SETUP_AVX2( + yuvconstants) "sub %[u_buf],%[v_buf] \n" + + LABELALIGN "1: \n" READYUVA410_AVX2 + YUVTORGB_AVX2(yuvconstants) STOREARGB_AVX2 + "subl $0x10,%[width] \n" + "jg 1b \n" + "vzeroupper \n" + + : [y_buf] "+r"(y_buf), // %[y_buf] + [u_buf] "+r"(u_buf), // %[u_buf] + [v_buf] "+r"(v_buf), // %[v_buf] + [a_buf] "+r"(a_buf), // %[a_buf] + [dst_argb] "+r"(dst_argb), // %[dst_argb] +#if defined(__i386__) + [width] "+m"(width) // %[width] +#else + [width] "+rm"(width) // %[width] +#endif + : [yuvconstants] "r"(yuvconstants) // %[yuvconstants] + : "memory", "cc", YUVTORGB_REGS_AVX2 "xmm0", "xmm1", "xmm2", "xmm3", + "xmm4", "xmm5"); +} +#endif // HAS_I410TOARGBROW_AVX2 + +#if defined(HAS_I410TOAR30ROW_AVX2) +// 16 pixels +// 16 UV values with 16 Y producing 16 AR30 (64 bytes). +void OMITFP I410ToAR30Row_AVX2(const uint16_t* y_buf, + const uint16_t* u_buf, + const uint16_t* v_buf, + uint8_t* dst_ar30, + const struct YuvConstants* yuvconstants, + int width) { + asm volatile ( + YUVTORGB_SETUP_AVX2(yuvconstants) + "sub %[u_buf],%[v_buf] \n" + "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" // AR30 constants + "vpsrlw $14,%%ymm5,%%ymm5 \n" + "vpsllw $4,%%ymm5,%%ymm5 \n" // 2 alpha bits + "vpxor %%ymm6,%%ymm6,%%ymm6 \n" // 0 for min + "vpcmpeqb %%ymm7,%%ymm7,%%ymm7 \n" // 1023 for max + "vpsrlw $6,%%ymm7,%%ymm7 \n" + + LABELALIGN + "1: \n" + READYUV410_AVX2 + YUVTORGB16_AVX2(yuvconstants) + STOREAR30_AVX2 + "sub $0x10,%[width] \n" + "jg 1b \n" + + "vzeroupper \n" + : [y_buf]"+r"(y_buf), // %[y_buf] + [u_buf]"+r"(u_buf), // %[u_buf] + [v_buf]"+r"(v_buf), // %[v_buf] + [dst_ar30]"+r"(dst_ar30), // %[dst_ar30] + [width]"+rm"(width) // %[width] + : [yuvconstants]"r"(yuvconstants) // %[yuvconstants] + : "memory", "cc", YUVTORGB_REGS_AVX2 + "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", "xmm7" + ); +} +#endif // HAS_I410TOAR30ROW_AVX2 + +#if defined(HAS_I444ALPHATOARGBROW_AVX2) +// 16 pixels +// 16 UV values with 16 Y and 16 A producing 16 ARGB. +void OMITFP I444AlphaToARGBRow_AVX2(const uint8_t* y_buf, + const uint8_t* u_buf, + const uint8_t* v_buf, + const uint8_t* a_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { + // clang-format off + asm volatile ( + YUVTORGB_SETUP_AVX2(yuvconstants) + "sub %[u_buf],%[v_buf] \n" + + LABELALIGN + "1: \n" + READYUVA444_AVX2 + YUVTORGB_AVX2(yuvconstants) + STOREARGB_AVX2 + "subl $0x10,%[width] \n" + "jg 1b \n" + "vzeroupper \n" + : [y_buf]"+r"(y_buf), // %[y_buf] + [u_buf]"+r"(u_buf), // %[u_buf] + [v_buf]"+r"(v_buf), // %[v_buf] + [a_buf]"+r"(a_buf), // %[a_buf] + [dst_argb]"+r"(dst_argb), // %[dst_argb] +#if defined(__i386__) + [width]"+m"(width) // %[width] +#else + [width]"+rm"(width) // %[width] +#endif + : [yuvconstants]"r"(yuvconstants) // %[yuvconstants] + : "memory", "cc", YUVTORGB_REGS_AVX2 + "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5" + ); + // clang-format on +} +#endif // HAS_I444ALPHATOARGBROW_AVX2 + #if defined(HAS_I422ALPHATOARGBROW_AVX2) // 16 pixels // 8 UV values upsampled to 16 UV, mixed with 16 Y and 16 A producing 16 ARGB. @@ -3086,30 +4471,170 @@ void OMITFP UYVYToARGBRow_AVX2(const uint8_t* uyvy_buf, } #endif // HAS_UYVYTOARGBROW_AVX2 -#ifdef HAS_I400TOARGBROW_SSE2 -void I400ToARGBRow_SSE2(const uint8_t* y_buf, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width) { - asm volatile( - "movdqa 192(%3),%%xmm2 \n" // yg = 18997 = 1.164 - "movdqa 224(%3),%%xmm3 \n" // ygb = 1160 = 1.164 * 16 - "pcmpeqb %%xmm4,%%xmm4 \n" // 0xff000000 - "pslld $0x18,%%xmm4 \n" +#if defined(HAS_P210TOARGBROW_AVX2) +// 16 pixels. +// 8 UV values upsampled to 16 UV, mixed with 16 Y producing 16 ARGB (64 bytes). +void OMITFP P210ToARGBRow_AVX2(const uint16_t* y_buf, + const uint16_t* uv_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { + // clang-format off + asm volatile ( + YUVTORGB_SETUP_AVX2(yuvconstants) + "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" - LABELALIGN + LABELALIGN "1: \n" - // Step 1: Scale Y contribution to 8 G values. G = (y - 16) * 1.164 - "movq (%0),%%xmm0 \n" - "lea 0x8(%0),%0 \n" - "punpcklbw %%xmm0,%%xmm0 \n" - "pmulhuw %%xmm2,%%xmm0 \n" - "paddsw %%xmm3,%%xmm0 \n" - "psraw $6, %%xmm0 \n" - "packuswb %%xmm0,%%xmm0 \n" - - // Step 2: Weave into ARGB - "punpcklbw %%xmm0,%%xmm0 \n" + READP210_AVX2 + YUVTORGB_AVX2(yuvconstants) + STOREARGB_AVX2 + "sub $0x10,%[width] \n" + "jg 1b \n" + "vzeroupper \n" + : [y_buf]"+r"(y_buf), // %[y_buf] + [uv_buf]"+r"(uv_buf), // %[uv_buf] + [dst_argb]"+r"(dst_argb), // %[dst_argb] + [width]"+rm"(width) // %[width] + : [yuvconstants]"r"(yuvconstants) // %[yuvconstants] + : "memory", "cc", YUVTORGB_REGS_AVX2 + "xmm0", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5" + ); + // clang-format on +} +#endif // HAS_P210TOARGBROW_AVX2 + +#if defined(HAS_P410TOARGBROW_AVX2) +// 16 pixels. +// 8 UV values upsampled to 16 UV, mixed with 16 Y producing 16 ARGB (64 bytes). +void OMITFP P410ToARGBRow_AVX2(const uint16_t* y_buf, + const uint16_t* uv_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { + // clang-format off + asm volatile ( + YUVTORGB_SETUP_AVX2(yuvconstants) + "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" + + LABELALIGN + "1: \n" + READP410_AVX2 + YUVTORGB_AVX2(yuvconstants) + STOREARGB_AVX2 + "sub $0x10,%[width] \n" + "jg 1b \n" + "vzeroupper \n" + : [y_buf]"+r"(y_buf), // %[y_buf] + [uv_buf]"+r"(uv_buf), // %[uv_buf] + [dst_argb]"+r"(dst_argb), // %[dst_argb] + [width]"+rm"(width) // %[width] + : [yuvconstants]"r"(yuvconstants) // %[yuvconstants] + : "memory", "cc", YUVTORGB_REGS_AVX2 + "xmm0", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5" + ); + // clang-format on +} +#endif // HAS_P410TOARGBROW_AVX2 + +#if defined(HAS_P210TOAR30ROW_AVX2) +// 16 pixels +// 16 UV values with 16 Y producing 16 AR30 (64 bytes). +void OMITFP P210ToAR30Row_AVX2(const uint16_t* y_buf, + const uint16_t* uv_buf, + uint8_t* dst_ar30, + const struct YuvConstants* yuvconstants, + int width) { + asm volatile ( + YUVTORGB_SETUP_AVX2(yuvconstants) + "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" // AR30 constants + "vpsrlw $14,%%ymm5,%%ymm5 \n" + "vpsllw $4,%%ymm5,%%ymm5 \n" // 2 alpha bits + "vpxor %%ymm6,%%ymm6,%%ymm6 \n" // 0 for min + "vpcmpeqb %%ymm7,%%ymm7,%%ymm7 \n" // 1023 for max + "vpsrlw $6,%%ymm7,%%ymm7 \n" + + LABELALIGN + "1: \n" + READP210_AVX2 + YUVTORGB16_AVX2(yuvconstants) + STOREAR30_AVX2 + "sub $0x10,%[width] \n" + "jg 1b \n" + + "vzeroupper \n" + : [y_buf]"+r"(y_buf), // %[y_buf] + [uv_buf]"+r"(uv_buf), // %[uv_buf] + [dst_ar30]"+r"(dst_ar30), // %[dst_ar30] + [width]"+rm"(width) // %[width] + : [yuvconstants]"r"(yuvconstants) // %[yuvconstants] + : "memory", "cc", YUVTORGB_REGS_AVX2 + "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", "xmm7" + ); +} +#endif // HAS_P210TOAR30ROW_AVX2 + +#if defined(HAS_P410TOAR30ROW_AVX2) +// 16 pixels +// 16 UV values with 16 Y producing 16 AR30 (64 bytes). +void OMITFP P410ToAR30Row_AVX2(const uint16_t* y_buf, + const uint16_t* uv_buf, + uint8_t* dst_ar30, + const struct YuvConstants* yuvconstants, + int width) { + asm volatile ( + YUVTORGB_SETUP_AVX2(yuvconstants) + "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" // AR30 constants + "vpsrlw $14,%%ymm5,%%ymm5 \n" + "vpsllw $4,%%ymm5,%%ymm5 \n" // 2 alpha bits + "vpxor %%ymm6,%%ymm6,%%ymm6 \n" // 0 for min + "vpcmpeqb %%ymm7,%%ymm7,%%ymm7 \n" // 1023 for max + "vpsrlw $6,%%ymm7,%%ymm7 \n" + + LABELALIGN + "1: \n" + READP410_AVX2 + YUVTORGB16_AVX2(yuvconstants) + STOREAR30_AVX2 + "sub $0x10,%[width] \n" + "jg 1b \n" + + "vzeroupper \n" + : [y_buf]"+r"(y_buf), // %[y_buf] + [uv_buf]"+r"(uv_buf), // %[uv_buf] + [dst_ar30]"+r"(dst_ar30), // %[dst_ar30] + [width]"+rm"(width) // %[width] + : [yuvconstants]"r"(yuvconstants) // %[yuvconstants] + : "memory", "cc", YUVTORGB_REGS_AVX2 + "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", "xmm7" + ); +} +#endif // HAS_P410TOAR30ROW_AVX2 + +#ifdef HAS_I400TOARGBROW_SSE2 +void I400ToARGBRow_SSE2(const uint8_t* y_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { + asm volatile( + "movdqa 96(%3),%%xmm2 \n" // yg = 18997 = 1.164 + "movdqa 128(%3),%%xmm3 \n" // ygb = 1160 = 1.164 * 16 + "pcmpeqb %%xmm4,%%xmm4 \n" // 0xff000000 + "pslld $0x18,%%xmm4 \n" + + LABELALIGN + "1: \n" + // Step 1: Scale Y contribution to 8 G values. G = (y - 16) * 1.164 + "movq (%0),%%xmm0 \n" + "lea 0x8(%0),%0 \n" + "punpcklbw %%xmm0,%%xmm0 \n" + "pmulhuw %%xmm2,%%xmm0 \n" + "paddsw %%xmm3,%%xmm0 \n" + "psraw $6, %%xmm0 \n" + "packuswb %%xmm0,%%xmm0 \n" + + // Step 2: Weave into ARGB + "punpcklbw %%xmm0,%%xmm0 \n" "movdqa %%xmm0,%%xmm1 \n" "punpcklwd %%xmm0,%%xmm0 \n" "punpckhwd %%xmm1,%%xmm1 \n" @@ -3137,8 +4662,8 @@ void I400ToARGBRow_AVX2(const uint8_t* y_buf, const struct YuvConstants* yuvconstants, int width) { asm volatile( - "vmovdqa 192(%3),%%ymm2 \n" // yg = 18997 = 1.164 - "vmovdqa 224(%3),%%ymm3 \n" // ygb = -1160 = 1.164*16 + "vmovdqa 96(%3),%%ymm2 \n" // yg = 18997 = 1.164 + "vmovdqa 128(%3),%%ymm3 \n" // ygb = -1160 = 1.164*16 "vpcmpeqb %%ymm4,%%ymm4,%%ymm4 \n" // 0xff000000 "vpslld $0x18,%%ymm4,%%ymm4 \n" @@ -3482,6 +5007,141 @@ void SplitUVRow_SSE2(const uint8_t* src_uv, } #endif // HAS_SPLITUVROW_SSE2 +#ifdef HAS_DETILEROW_SSE2 +void DetileRow_SSE2(const uint8_t* src, + ptrdiff_t src_tile_stride, + uint8_t* dst, + int width) { + asm volatile( + "1: \n" + "movdqu (%0),%%xmm0 \n" + "sub $0x10,%2 \n" + "lea (%0,%3),%0 \n" + "movdqu %%xmm0,(%1) \n" + "lea 0x10(%1),%1 \n" + "jg 1b \n" + : "+r"(src), // %0 + "+r"(dst), // %1 + "+r"(width) // %2 + : "r"(src_tile_stride) // %3 + : "cc", "memory", "xmm0"); +} +#endif // HAS_DETILEROW_SSE2 + +#ifdef HAS_DETILEROW_16_SSE2 +void DetileRow_16_SSE2(const uint16_t* src, + ptrdiff_t src_tile_stride, + uint16_t* dst, + int width) { + asm volatile( + "1: \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "lea (%0,%3,2),%0 \n" + "movdqu %%xmm0,(%1) \n" + "movdqu %%xmm1,0x10(%1) \n" + "lea 0x20(%1),%1 \n" + "sub $0x10,%2 \n" + "jg 1b \n" + : "+r"(src), // %0 + "+r"(dst), // %1 + "+r"(width) // %2 + : "r"(src_tile_stride) // %3 + : "cc", "memory", "xmm0", "xmm1"); +} +#endif // HAS_DETILEROW_SSE2 + +#ifdef HAS_DETILEROW_16_AVX +void DetileRow_16_AVX(const uint16_t* src, + ptrdiff_t src_tile_stride, + uint16_t* dst, + int width) { + asm volatile( + "1: \n" + "vmovdqu (%0),%%ymm0 \n" + "lea (%0,%3,2),%0 \n" + "vmovdqu %%ymm0,(%1) \n" + "lea 0x20(%1),%1 \n" + "sub $0x10,%2 \n" + "jg 1b \n" + "vzeroupper \n" + : "+r"(src), // %0 + "+r"(dst), // %1 + "+r"(width) // %2 + : "r"(src_tile_stride) // %3 + : "cc", "memory", "xmm0"); +} +#endif // HAS_DETILEROW_AVX + +#ifdef HAS_DETILETOYUY2_SSE2 +// Read 16 Y, 8 UV, and write 8 YUYV. +void DetileToYUY2_SSE2(const uint8_t* src_y, + ptrdiff_t src_y_tile_stride, + const uint8_t* src_uv, + ptrdiff_t src_uv_tile_stride, + uint8_t* dst_yuy2, + int width) { + asm volatile( + "1: \n" + "movdqu (%0),%%xmm0 \n" // Load 16 Y + "sub $0x10,%3 \n" + "lea (%0,%4),%0 \n" + "movdqu (%1),%%xmm1 \n" // Load 8 UV + "lea (%1,%5),%1 \n" + "movdqu %%xmm0,%%xmm2 \n" + "punpcklbw %%xmm1,%%xmm0 \n" + "punpckhbw %%xmm1,%%xmm2 \n" + "movdqu %%xmm0,(%2) \n" + "movdqu %%xmm2,0x10(%2) \n" + "lea 0x20(%2),%2 \n" + "jg 1b \n" + : "+r"(src_y), // %0 + "+r"(src_uv), // %1 + "+r"(dst_yuy2), // %2 + "+r"(width) // %3 + : "r"(src_y_tile_stride), // %4 + "r"(src_uv_tile_stride) // %5 + : "cc", "memory", "xmm0", "xmm1", "xmm2" // Clobber list + ); +} +#endif + +#ifdef HAS_DETILESPLITUVROW_SSSE3 +// TODO(greenjustin): Look into generating these constants instead of loading +// them since this can cause branch mispredicts for fPIC code on 32-bit +// machines. +static const uvec8 kDeinterlaceUV = {0, 2, 4, 6, 8, 10, 12, 14, + 1, 3, 5, 7, 9, 11, 13, 15}; + +// TODO(greenjustin): Research alternatives to pshufb, since pshufb can be very +// slow on older SSE2 processors. +void DetileSplitUVRow_SSSE3(const uint8_t* src_uv, + ptrdiff_t src_tile_stride, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { + asm volatile( + "movdqu %4,%%xmm1 \n" + "1: \n" + "movdqu (%0),%%xmm0 \n" + "lea (%0, %5),%0 \n" + "pshufb %%xmm1,%%xmm0 \n" + "movq %%xmm0,(%1) \n" + "lea 0x8(%1),%1 \n" + "movhps %%xmm0,(%2) \n" + "lea 0x8(%2),%2 \n" + "sub $0x10,%3 \n" + "jg 1b \n" + : "+r"(src_uv), // %0 + "+r"(dst_u), // %1 + "+r"(dst_v), // %2 + "+r"(width) // %3 + : "m"(kDeinterlaceUV), // %4 + "r"(src_tile_stride) // %5 + : "cc", "memory", "xmm0", "xmm1"); +} +#endif // HAS_DETILESPLITUVROW_SSSE3 + #ifdef HAS_MERGEUVROW_AVX2 void MergeUVRow_AVX2(const uint8_t* src_u, const uint8_t* src_v, @@ -3546,22 +5206,16 @@ void MergeUVRow_SSE2(const uint8_t* src_u, } #endif // HAS_MERGEUVROW_SSE2 -// Use scale to convert lsb formats to msb, depending how many bits there are: -// 128 = 9 bits -// 64 = 10 bits -// 16 = 12 bits -// 1 = 16 bits #ifdef HAS_MERGEUVROW_16_AVX2 void MergeUVRow_16_AVX2(const uint16_t* src_u, const uint16_t* src_v, uint16_t* dst_uv, - int scale, + int depth, int width) { + depth = 16 - depth; // clang-format off asm volatile ( "vmovd %4,%%xmm3 \n" - "vpunpcklwd %%xmm3,%%xmm3,%%xmm3 \n" - "vbroadcastss %%xmm3,%%ymm3 \n" "sub %0,%1 \n" // 16 pixels per loop. @@ -3571,8 +5225,8 @@ void MergeUVRow_16_AVX2(const uint16_t* src_u, "vmovdqu (%0,%1,1),%%ymm1 \n" "add $0x20,%0 \n" - "vpmullw %%ymm3,%%ymm0,%%ymm0 \n" - "vpmullw %%ymm3,%%ymm1,%%ymm1 \n" + "vpsllw %%xmm3,%%ymm0,%%ymm0 \n" + "vpsllw %%xmm3,%%ymm1,%%ymm1 \n" "vpunpcklwd %%ymm1,%%ymm0,%%ymm2 \n" // mutates "vpunpckhwd %%ymm1,%%ymm0,%%ymm0 \n" "vextractf128 $0x0,%%ymm2,(%2) \n" @@ -3587,12 +5241,59 @@ void MergeUVRow_16_AVX2(const uint16_t* src_u, "+r"(src_v), // %1 "+r"(dst_uv), // %2 "+r"(width) // %3 - : "r"(scale) // %4 + : "r"(depth) // %4 : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3"); // clang-format on } #endif // HAS_MERGEUVROW_AVX2 +#ifdef HAS_SPLITUVROW_16_AVX2 +const uvec8 kSplitUVShuffle16 = {0, 1, 4, 5, 8, 9, 12, 13, + 2, 3, 6, 7, 10, 11, 14, 15}; +void SplitUVRow_16_AVX2(const uint16_t* src_uv, + uint16_t* dst_u, + uint16_t* dst_v, + int depth, + int width) { + depth = 16 - depth; + // clang-format off + asm volatile ( + "vmovd %4,%%xmm3 \n" + "vbroadcastf128 %5,%%ymm4 \n" + "sub %1,%2 \n" + + // 16 pixels per loop. + LABELALIGN + "1: \n" + "vmovdqu (%0),%%ymm0 \n" + "vmovdqu 0x20(%0),%%ymm1 \n" + "add $0x40,%0 \n" + + "vpsrlw %%xmm3,%%ymm0,%%ymm0 \n" + "vpsrlw %%xmm3,%%ymm1,%%ymm1 \n" + "vpshufb %%ymm4,%%ymm0,%%ymm0 \n" + "vpshufb %%ymm4,%%ymm1,%%ymm1 \n" + "vpermq $0xd8,%%ymm0,%%ymm0 \n" + "vpermq $0xd8,%%ymm1,%%ymm1 \n" + "vextractf128 $0x0,%%ymm0,(%1) \n" + "vextractf128 $0x0,%%ymm1,0x10(%1) \n" + "vextractf128 $0x1,%%ymm0,(%1,%2) \n" + "vextractf128 $0x1,%%ymm1,0x10(%1,%2) \n" + "add $0x20,%1 \n" + "sub $0x10,%3 \n" + "jg 1b \n" + "vzeroupper \n" + : "+r"(src_uv), // %0 + "+r"(dst_u), // %1 + "+r"(dst_v), // %2 + "+r"(width) // %3 + : "r"(depth), // %4 + "m"(kSplitUVShuffle16) // %5 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4"); + // clang-format on +} +#endif // HAS_SPLITUVROW_16_AVX2 + // Use scale to convert lsb formats to msb, depending how many bits there are: // 128 = 9 bits // 64 = 10 bits @@ -3610,7 +5311,7 @@ void MultiplyRow_16_AVX2(const uint16_t* src_y, "vbroadcastss %%xmm3,%%ymm3 \n" "sub %0,%1 \n" - // 16 pixels per loop. + // 32 pixels per loop. LABELALIGN "1: \n" "vmovdqu (%0),%%ymm0 \n" @@ -3632,6 +5333,46 @@ void MultiplyRow_16_AVX2(const uint16_t* src_y, } #endif // HAS_MULTIPLYROW_16_AVX2 +// Use scale to convert msb formats to lsb, depending how many bits there are: +// 512 = 9 bits +// 1024 = 10 bits +// 4096 = 12 bits +// 65536 = 16 bits +#ifdef HAS_DIVIDEROW_16_AVX2 +void DivideRow_16_AVX2(const uint16_t* src_y, + uint16_t* dst_y, + int scale, + int width) { + // clang-format off + asm volatile ( + "vmovd %3,%%xmm3 \n" + "vpunpcklwd %%xmm3,%%xmm3,%%xmm3 \n" + "vbroadcastss %%xmm3,%%ymm3 \n" + "sub %0,%1 \n" + + // 32 pixels per loop. + LABELALIGN + "1: \n" + "vmovdqu (%0),%%ymm0 \n" + "vmovdqu 0x20(%0),%%ymm1 \n" + "vpmulhuw %%ymm3,%%ymm0,%%ymm0 \n" + "vpmulhuw %%ymm3,%%ymm1,%%ymm1 \n" + "vmovdqu %%ymm0,(%0,%1) \n" + "vmovdqu %%ymm1,0x20(%0,%1) \n" + "add $0x40,%0 \n" + "sub $0x20,%2 \n" + "jg 1b \n" + "vzeroupper \n" + : "+r"(src_y), // %0 + "+r"(dst_y), // %1 + "+r"(width), // %2 + "+r"(scale) // %3 + : + : "memory", "cc", "xmm0", "xmm1", "xmm3"); + // clang-format on +} +#endif // HAS_MULTIPLYROW_16_AVX2 + // Use scale to convert lsb formats to msb, depending how many bits there are: // 32768 = 9 bits // 16384 = 10 bits @@ -3778,195 +5519,931 @@ void Convert8To16Row_AVX2(const uint8_t* src_y, #endif // HAS_CONVERT8TO16ROW_AVX2 #ifdef HAS_SPLITRGBROW_SSSE3 - // Shuffle table for converting RGB to Planar. -static const uvec8 kShuffleMaskRGBToR0 = {0u, 3u, 6u, 9u, 12u, 15u, - 128u, 128u, 128u, 128u, 128u, 128u, - 128u, 128u, 128u, 128u}; -static const uvec8 kShuffleMaskRGBToR1 = {128u, 128u, 128u, 128u, 128u, 128u, - 2u, 5u, 8u, 11u, 14u, 128u, - 128u, 128u, 128u, 128u}; -static const uvec8 kShuffleMaskRGBToR2 = {128u, 128u, 128u, 128u, 128u, 128u, - 128u, 128u, 128u, 128u, 128u, 1u, - 4u, 7u, 10u, 13u}; - -static const uvec8 kShuffleMaskRGBToG0 = {1u, 4u, 7u, 10u, 13u, 128u, - 128u, 128u, 128u, 128u, 128u, 128u, - 128u, 128u, 128u, 128u}; -static const uvec8 kShuffleMaskRGBToG1 = {128u, 128u, 128u, 128u, 128u, 0u, - 3u, 6u, 9u, 12u, 15u, 128u, - 128u, 128u, 128u, 128u}; -static const uvec8 kShuffleMaskRGBToG2 = {128u, 128u, 128u, 128u, 128u, 128u, - 128u, 128u, 128u, 128u, 128u, 2u, - 5u, 8u, 11u, 14u}; - -static const uvec8 kShuffleMaskRGBToB0 = {2u, 5u, 8u, 11u, 14u, 128u, - 128u, 128u, 128u, 128u, 128u, 128u, - 128u, 128u, 128u, 128u}; -static const uvec8 kShuffleMaskRGBToB1 = {128u, 128u, 128u, 128u, 128u, 1u, - 4u, 7u, 10u, 13u, 128u, 128u, - 128u, 128u, 128u, 128u}; -static const uvec8 kShuffleMaskRGBToB2 = {128u, 128u, 128u, 128u, 128u, 128u, - 128u, 128u, 128u, 128u, 0u, 3u, - 6u, 9u, 12u, 15u}; +static const uvec8 kSplitRGBShuffle[9] = { + {0u, 3u, 6u, 9u, 12u, 15u, 128u, 128u, 128u, 128u, 128u, 128u, 128u, 128u, + 128u, 128u}, + {128u, 128u, 128u, 128u, 128u, 128u, 2u, 5u, 8u, 11u, 14u, 128u, 128u, 128u, + 128u, 128u}, + {128u, 128u, 128u, 128u, 128u, 128u, 128u, 128u, 128u, 128u, 128u, 1u, 4u, + 7u, 10u, 13u}, + {1u, 4u, 7u, 10u, 13u, 128u, 128u, 128u, 128u, 128u, 128u, 128u, 128u, 128u, + 128u, 128u}, + {128u, 128u, 128u, 128u, 128u, 0u, 3u, 6u, 9u, 12u, 15u, 128u, 128u, 128u, + 128u, 128u}, + {128u, 128u, 128u, 128u, 128u, 128u, 128u, 128u, 128u, 128u, 128u, 2u, 5u, + 8u, 11u, 14u}, + {2u, 5u, 8u, 11u, 14u, 128u, 128u, 128u, 128u, 128u, 128u, 128u, 128u, 128u, + 128u, 128u}, + {128u, 128u, 128u, 128u, 128u, 1u, 4u, 7u, 10u, 13u, 128u, 128u, 128u, 128u, + 128u, 128u}, + {128u, 128u, 128u, 128u, 128u, 128u, 128u, 128u, 128u, 128u, 0u, 3u, 6u, 9u, + 12u, 15u}}; + +void SplitRGBRow_SSSE3(const uint8_t* src_rgb, + uint8_t* dst_r, + uint8_t* dst_g, + uint8_t* dst_b, + int width) { + asm volatile( + + LABELALIGN + "1: \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "movdqu 0x20(%0),%%xmm2 \n" + "pshufb 0(%5), %%xmm0 \n" + "pshufb 16(%5), %%xmm1 \n" + "pshufb 32(%5), %%xmm2 \n" + "por %%xmm1,%%xmm0 \n" + "por %%xmm2,%%xmm0 \n" + "movdqu %%xmm0,(%1) \n" + "lea 0x10(%1),%1 \n" + + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "movdqu 0x20(%0),%%xmm2 \n" + "pshufb 48(%5),%%xmm0 \n" + "pshufb 64(%5),%%xmm1 \n" + "pshufb 80(%5), %%xmm2 \n" + "por %%xmm1,%%xmm0 \n" + "por %%xmm2,%%xmm0 \n" + "movdqu %%xmm0,(%2) \n" + "lea 0x10(%2),%2 \n" + + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "movdqu 0x20(%0),%%xmm2 \n" + "pshufb 96(%5), %%xmm0 \n" + "pshufb 112(%5), %%xmm1 \n" + "pshufb 128(%5), %%xmm2 \n" + "por %%xmm1,%%xmm0 \n" + "por %%xmm2,%%xmm0 \n" + "movdqu %%xmm0,(%3) \n" + "lea 0x10(%3),%3 \n" + "lea 0x30(%0),%0 \n" + "sub $0x10,%4 \n" + "jg 1b \n" + : "+r"(src_rgb), // %0 + "+r"(dst_r), // %1 + "+r"(dst_g), // %2 + "+r"(dst_b), // %3 + "+r"(width) // %4 + : "r"(&kSplitRGBShuffle[0]) // %5 + : "memory", "cc", "xmm0", "xmm1", "xmm2"); +} +#endif // HAS_SPLITRGBROW_SSSE3 + +#ifdef HAS_MERGERGBROW_SSSE3 +// Shuffle table for converting Planar to RGB. +static const uvec8 kMergeRGBShuffle[9] = { + {0u, 128u, 128u, 1u, 128u, 128u, 2u, 128u, 128u, 3u, 128u, 128u, 4u, 128u, + 128u, 5u}, + {128u, 0u, 128u, 128u, 1u, 128u, 128u, 2u, 128u, 128u, 3u, 128u, 128u, 4u, + 128u, 128u}, + {128u, 128u, 0u, 128u, 128u, 1u, 128u, 128u, 2u, 128u, 128u, 3u, 128u, 128u, + 4u, 128u}, + {128u, 128u, 6u, 128u, 128u, 7u, 128u, 128u, 8u, 128u, 128u, 9u, 128u, 128u, + 10u, 128u}, + {5u, 128u, 128u, 6u, 128u, 128u, 7u, 128u, 128u, 8u, 128u, 128u, 9u, 128u, + 128u, 10u}, + {128u, 5u, 128u, 128u, 6u, 128u, 128u, 7u, 128u, 128u, 8u, 128u, 128u, 9u, + 128u, 128u}, + {128u, 11u, 128u, 128u, 12u, 128u, 128u, 13u, 128u, 128u, 14u, 128u, 128u, + 15u, 128u, 128u}, + {128u, 128u, 11u, 128u, 128u, 12u, 128u, 128u, 13u, 128u, 128u, 14u, 128u, + 128u, 15u, 128u}, + {10u, 128u, 128u, 11u, 128u, 128u, 12u, 128u, 128u, 13u, 128u, 128u, 14u, + 128u, 128u, 15u}}; + +void MergeRGBRow_SSSE3(const uint8_t* src_r, + const uint8_t* src_g, + const uint8_t* src_b, + uint8_t* dst_rgb, + int width) { + asm volatile( + + LABELALIGN + "1: \n" + "movdqu (%0),%%xmm0 \n" + "movdqu (%1),%%xmm1 \n" + "movdqu (%2),%%xmm2 \n" + "pshufb (%5), %%xmm0 \n" + "pshufb 16(%5), %%xmm1 \n" + "pshufb 32(%5), %%xmm2 \n" + "por %%xmm1,%%xmm0 \n" + "por %%xmm2,%%xmm0 \n" + "movdqu %%xmm0,(%3) \n" + + "movdqu (%0),%%xmm0 \n" + "movdqu (%1),%%xmm1 \n" + "movdqu (%2),%%xmm2 \n" + "pshufb 48(%5), %%xmm0 \n" + "pshufb 64(%5), %%xmm1 \n" + "pshufb 80(%5), %%xmm2 \n" + "por %%xmm1,%%xmm0 \n" + "por %%xmm2,%%xmm0 \n" + "movdqu %%xmm0,16(%3) \n" + + "movdqu (%0),%%xmm0 \n" + "movdqu (%1),%%xmm1 \n" + "movdqu (%2),%%xmm2 \n" + "pshufb 96(%5), %%xmm0 \n" + "pshufb 112(%5), %%xmm1 \n" + "pshufb 128(%5), %%xmm2 \n" + "por %%xmm1,%%xmm0 \n" + "por %%xmm2,%%xmm0 \n" + "movdqu %%xmm0,32(%3) \n" + + "lea 0x10(%0),%0 \n" + "lea 0x10(%1),%1 \n" + "lea 0x10(%2),%2 \n" + "lea 0x30(%3),%3 \n" + "sub $0x10,%4 \n" + "jg 1b \n" + : "+r"(src_r), // %0 + "+r"(src_g), // %1 + "+r"(src_b), // %2 + "+r"(dst_rgb), // %3 + "+r"(width) // %4 + : "r"(&kMergeRGBShuffle[0]) // %5 + : "memory", "cc", "xmm0", "xmm1", "xmm2"); +} +#endif // HAS_MERGERGBROW_SSSE3 + +#ifdef HAS_MERGEARGBROW_SSE2 +void MergeARGBRow_SSE2(const uint8_t* src_r, + const uint8_t* src_g, + const uint8_t* src_b, + const uint8_t* src_a, + uint8_t* dst_argb, + int width) { + asm volatile( + + "sub %0,%1 \n" + "sub %0,%2 \n" + "sub %0,%3 \n" + + LABELALIGN + "1: \n" + + "movq (%0,%2),%%xmm0 \n" // B + "movq (%0),%%xmm1 \n" // R + "movq (%0,%1),%%xmm2 \n" // G + "punpcklbw %%xmm1,%%xmm0 \n" // BR + "movq (%0,%3),%%xmm1 \n" // A + "punpcklbw %%xmm1,%%xmm2 \n" // GA + "movdqa %%xmm0,%%xmm1 \n" // BR + "punpckhbw %%xmm2,%%xmm1 \n" // BGRA (hi) + "punpcklbw %%xmm2,%%xmm0 \n" // BGRA (lo) + "movdqu %%xmm0,(%4) \n" + "movdqu %%xmm1,16(%4) \n" + + "lea 8(%0),%0 \n" + "lea 32(%4),%4 \n" + "sub $0x8,%5 \n" + "jg 1b \n" + : "+r"(src_r), // %0 + "+r"(src_g), // %1 + "+r"(src_b), // %2 + "+r"(src_a), // %3 + "+r"(dst_argb), // %4 + "+r"(width) // %5 + : + : "memory", "cc", "xmm0", "xmm1", "xmm2"); +} +#endif + +#ifdef HAS_MERGEXRGBROW_SSE2 +void MergeXRGBRow_SSE2(const uint8_t* src_r, + const uint8_t* src_g, + const uint8_t* src_b, + uint8_t* dst_argb, + int width) { + asm volatile( + + LABELALIGN + "1: \n" + + "movq (%2),%%xmm0 \n" // B + "movq (%0),%%xmm1 \n" // R + "movq (%1),%%xmm2 \n" // G + "punpcklbw %%xmm1,%%xmm0 \n" // BR + "pcmpeqd %%xmm1,%%xmm1 \n" // A(255) + "punpcklbw %%xmm1,%%xmm2 \n" // GA + "movdqa %%xmm0,%%xmm1 \n" // BR + "punpckhbw %%xmm2,%%xmm1 \n" // BGRA (hi) + "punpcklbw %%xmm2,%%xmm0 \n" // BGRA (lo) + "movdqu %%xmm0,(%3) \n" + "movdqu %%xmm1,16(%3) \n" + + "lea 8(%0),%0 \n" + "lea 8(%1),%1 \n" + "lea 8(%2),%2 \n" + "lea 32(%3),%3 \n" + "sub $0x8,%4 \n" + "jg 1b \n" + : "+r"(src_r), // %0 + "+r"(src_g), // %1 + "+r"(src_b), // %2 + "+r"(dst_argb), // %3 + "+r"(width) // %4 + : + : "memory", "cc", "xmm0", "xmm1", "xmm2"); +} +#endif // HAS_MERGEARGBROW_SSE2 + +#ifdef HAS_MERGEARGBROW_AVX2 +void MergeARGBRow_AVX2(const uint8_t* src_r, + const uint8_t* src_g, + const uint8_t* src_b, + const uint8_t* src_a, + uint8_t* dst_argb, + int width) { + asm volatile( + + "sub %0,%1 \n" + "sub %0,%2 \n" + "sub %0,%3 \n" + + LABELALIGN + "1: \n" + + "vmovdqu (%0,%2),%%xmm0 \n" // B + "vmovdqu (%0,%1),%%xmm1 \n" // R + "vinserti128 $1,(%0),%%ymm0,%%ymm0 \n" // G + "vinserti128 $1,(%0,%3),%%ymm1,%%ymm1 \n" // A + "vpunpckhbw %%ymm1,%%ymm0,%%ymm2 \n" + "vpunpcklbw %%ymm1,%%ymm0,%%ymm0 \n" + "vperm2i128 $0x31,%%ymm2,%%ymm0,%%ymm1 \n" + "vperm2i128 $0x20,%%ymm2,%%ymm0,%%ymm0 \n" + "vpunpckhwd %%ymm1,%%ymm0,%%ymm2 \n" + "vpunpcklwd %%ymm1,%%ymm0,%%ymm0 \n" + "vperm2i128 $0x31,%%ymm2,%%ymm0,%%ymm1 \n" + "vperm2i128 $0x20,%%ymm2,%%ymm0,%%ymm0 \n" + "vmovdqu %%ymm0,(%4) \n" // First 8 + "vmovdqu %%ymm1,32(%4) \n" // Next 8 + + "lea 16(%0),%0 \n" + "lea 64(%4),%4 \n" + "sub $0x10,%5 \n" + "jg 1b \n" + "vzeroupper \n" + : "+r"(src_r), // %0 + "+r"(src_g), // %1 + "+r"(src_b), // %2 + "+r"(src_a), // %3 + "+r"(dst_argb), // %4 + "+r"(width) // %5 + : + : "memory", "cc", "xmm0", "xmm1", "xmm2"); +} +#endif + +#ifdef HAS_MERGEXRGBROW_AVX2 +void MergeXRGBRow_AVX2(const uint8_t* src_r, + const uint8_t* src_g, + const uint8_t* src_b, + uint8_t* dst_argb, + int width) { + asm volatile( + + LABELALIGN + "1: \n" + + "vmovdqu (%2),%%xmm0 \n" // B + "vpcmpeqd %%ymm1,%%ymm1,%%ymm1 \n" // A(255) + "vinserti128 $0,(%1),%%ymm1,%%ymm1 \n" // R + "vinserti128 $1,(%0),%%ymm0,%%ymm0 \n" // G + "vpunpckhbw %%ymm1,%%ymm0,%%ymm2 \n" + "vpunpcklbw %%ymm1,%%ymm0,%%ymm0 \n" + "vperm2i128 $0x31,%%ymm2,%%ymm0,%%ymm1 \n" + "vperm2i128 $0x20,%%ymm2,%%ymm0,%%ymm0 \n" + "vpunpckhwd %%ymm1,%%ymm0,%%ymm2 \n" + "vpunpcklwd %%ymm1,%%ymm0,%%ymm0 \n" + "vperm2i128 $0x31,%%ymm2,%%ymm0,%%ymm1 \n" + "vperm2i128 $0x20,%%ymm2,%%ymm0,%%ymm0 \n" + "vmovdqu %%ymm0,(%3) \n" // First 8 + "vmovdqu %%ymm1,32(%3) \n" // Next 8 + + "lea 16(%0),%0 \n" + "lea 16(%1),%1 \n" + "lea 16(%2),%2 \n" + "lea 64(%3),%3 \n" + "sub $0x10,%4 \n" + "jg 1b \n" + "vzeroupper \n" + : "+r"(src_r), // %0 + "+r"(src_g), // %1 + "+r"(src_b), // %2 + "+r"(dst_argb), // %3 + "+rm"(width) // %4 + : + : "memory", "cc", "xmm0", "xmm1", "xmm2"); +} +#endif // HAS_MERGEARGBROW_AVX2 + +#ifdef HAS_SPLITARGBROW_SSE2 +void SplitARGBRow_SSE2(const uint8_t* src_argb, + uint8_t* dst_r, + uint8_t* dst_g, + uint8_t* dst_b, + uint8_t* dst_a, + int width) { + asm volatile( + + "sub %1,%2 \n" + "sub %1,%3 \n" + "sub %1,%4 \n" + + LABELALIGN + "1: \n" + + "movdqu (%0),%%xmm0 \n" // 00-0F + "movdqu 16(%0),%%xmm1 \n" // 10-1F + "movdqa %%xmm0,%%xmm2 \n" + "punpcklqdq %%xmm1,%%xmm0 \n" // 00-07 10-17 + "punpckhqdq %%xmm1,%%xmm2 \n" // 08-0F 18-1F + "movdqa %%xmm0,%%xmm1 \n" + "punpcklbw %%xmm2,%%xmm0 \n" // 08192A3B4C5D6E7F (lo) + "punpckhbw %%xmm2,%%xmm1 \n" // 08192A3B4C5D6E7F (hi) + "movdqa %%xmm0,%%xmm2 \n" + "punpcklqdq %%xmm1,%%xmm0 \n" // 08192A3B08192A3B + "punpckhqdq %%xmm1,%%xmm2 \n" // 4C5D6E7F4C5D6E7F + "movdqa %%xmm0,%%xmm1 \n" + "punpcklbw %%xmm2,%%xmm0 \n" // 048C159D26AE37BF (lo) + "punpckhbw %%xmm2,%%xmm1 \n" // 048C159D26AE37BF (hi) + "movdqa %%xmm0,%%xmm2 \n" + "punpckldq %%xmm1,%%xmm0 \n" // 048C048C159D159D (BG) + "punpckhdq %%xmm1,%%xmm2 \n" // 26AE26AE37BF37BF (RA) + "movlps %%xmm0,(%1,%3) \n" // B + "movhps %%xmm0,(%1,%2) \n" // G + "movlps %%xmm2,(%1) \n" // R + "movhps %%xmm2,(%1,%4) \n" // A + + "lea 32(%0),%0 \n" + "lea 8(%1),%1 \n" + "sub $0x8,%5 \n" + "jg 1b \n" + : "+r"(src_argb), // %0 + "+r"(dst_r), // %1 + "+r"(dst_g), // %2 + "+r"(dst_b), // %3 + "+r"(dst_a), // %4 + "+rm"(width) // %5 + : + : "memory", "cc", "xmm0", "xmm1", "xmm2"); +} +#endif + +#ifdef HAS_SPLITXRGBROW_SSE2 +void SplitXRGBRow_SSE2(const uint8_t* src_argb, + uint8_t* dst_r, + uint8_t* dst_g, + uint8_t* dst_b, + int width) { + asm volatile( + + LABELALIGN + "1: \n" + + "movdqu (%0),%%xmm0 \n" // 00-0F + "movdqu 16(%0),%%xmm1 \n" // 10-1F + "movdqa %%xmm0,%%xmm2 \n" + "punpcklqdq %%xmm1,%%xmm0 \n" // 00-07 10-17 + "punpckhqdq %%xmm1,%%xmm2 \n" // 08-0F 18-1F + "movdqa %%xmm0,%%xmm1 \n" + "punpcklbw %%xmm2,%%xmm0 \n" // 08192A3B4C5D6E7F (lo) + "punpckhbw %%xmm2,%%xmm1 \n" // 08192A3B4C5D6E7F (hi) + "movdqa %%xmm0,%%xmm2 \n" + "punpcklqdq %%xmm1,%%xmm0 \n" // 08192A3B08192A3B + "punpckhqdq %%xmm1,%%xmm2 \n" // 4C5D6E7F4C5D6E7F + "movdqa %%xmm0,%%xmm1 \n" + "punpcklbw %%xmm2,%%xmm0 \n" // 048C159D26AE37BF (lo) + "punpckhbw %%xmm2,%%xmm1 \n" // 048C159D26AE37BF (hi) + "movdqa %%xmm0,%%xmm2 \n" + "punpckldq %%xmm1,%%xmm0 \n" // 048C048C159D159D (BG) + "punpckhdq %%xmm1,%%xmm2 \n" // 26AE26AE37BF37BF (RA) + "movlps %%xmm0,(%3) \n" // B + "movhps %%xmm0,(%2) \n" // G + "movlps %%xmm2,(%1) \n" // R + + "lea 32(%0),%0 \n" + "lea 8(%1),%1 \n" + "lea 8(%2),%2 \n" + "lea 8(%3),%3 \n" + "sub $0x8,%4 \n" + "jg 1b \n" + : "+r"(src_argb), // %0 + "+r"(dst_r), // %1 + "+r"(dst_g), // %2 + "+r"(dst_b), // %3 + "+rm"(width) // %4 + : + : "memory", "cc", "xmm0", "xmm1", "xmm2"); +} +#endif + +static const uvec8 kShuffleMaskARGBSplit = {0, 4, 8, 12, 1, 5, 9, 13, + 2, 6, 10, 14, 3, 7, 11, 15}; +#ifdef HAS_SPLITARGBROW_SSSE3 +void SplitARGBRow_SSSE3(const uint8_t* src_argb, + uint8_t* dst_r, + uint8_t* dst_g, + uint8_t* dst_b, + uint8_t* dst_a, + int width) { + asm volatile( + + "movdqa %6,%%xmm3 \n" + "sub %1,%2 \n" + "sub %1,%3 \n" + "sub %1,%4 \n" + + LABELALIGN + "1: \n" + + "movdqu (%0),%%xmm0 \n" // 00-0F + "movdqu 16(%0),%%xmm1 \n" // 10-1F + "pshufb %%xmm3,%%xmm0 \n" // 048C159D26AE37BF (lo) + "pshufb %%xmm3,%%xmm1 \n" // 048C159D26AE37BF (hi) + "movdqa %%xmm0,%%xmm2 \n" + "punpckldq %%xmm1,%%xmm0 \n" // 048C048C159D159D (BG) + "punpckhdq %%xmm1,%%xmm2 \n" // 26AE26AE37BF37BF (RA) + "movlps %%xmm0,(%1,%3) \n" // B + "movhps %%xmm0,(%1,%2) \n" // G + "movlps %%xmm2,(%1) \n" // R + "movhps %%xmm2,(%1,%4) \n" // A + + "lea 32(%0),%0 \n" + "lea 8(%1),%1 \n" + "subl $0x8,%5 \n" + "jg 1b \n" + : "+r"(src_argb), // %0 + "+r"(dst_r), // %1 + "+r"(dst_g), // %2 + "+r"(dst_b), // %3 + "+r"(dst_a), // %4 +#if defined(__i386__) + "+m"(width) // %5 +#else + "+rm"(width) // %5 +#endif + : "m"(kShuffleMaskARGBSplit) // %6 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3"); +} +#endif + +#ifdef HAS_SPLITXRGBROW_SSSE3 +void SplitXRGBRow_SSSE3(const uint8_t* src_argb, + uint8_t* dst_r, + uint8_t* dst_g, + uint8_t* dst_b, + int width) { + asm volatile( + + "movdqa %5,%%xmm3 \n" + + LABELALIGN + "1: \n" + + "movdqu (%0),%%xmm0 \n" // 00-0F + "movdqu 16(%0),%%xmm1 \n" // 10-1F + "pshufb %%xmm3,%%xmm0 \n" // 048C159D26AE37BF (lo) + "pshufb %%xmm3,%%xmm1 \n" // 048C159D26AE37BF (hi) + "movdqa %%xmm0,%%xmm2 \n" + "punpckldq %%xmm1,%%xmm0 \n" // 048C048C159D159D (BG) + "punpckhdq %%xmm1,%%xmm2 \n" // 26AE26AE37BF37BF (RA) + "movlps %%xmm0,(%3) \n" // B + "movhps %%xmm0,(%2) \n" // G + "movlps %%xmm2,(%1) \n" // R + + "lea 32(%0),%0 \n" + "lea 8(%1),%1 \n" + "lea 8(%2),%2 \n" + "lea 8(%3),%3 \n" + "sub $0x8,%4 \n" + "jg 1b \n" + : "+r"(src_argb), // %0 + "+r"(dst_r), // %1 + "+r"(dst_g), // %2 + "+r"(dst_b), // %3 + "+r"(width) // %4 + : "m"(kShuffleMaskARGBSplit) // %5 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3"); +} +#endif + +#ifdef HAS_SPLITARGBROW_AVX2 +static const ulvec32 kShuffleMaskARGBPermute = {0, 4, 1, 5, 2, 6, 3, 7}; +void SplitARGBRow_AVX2(const uint8_t* src_argb, + uint8_t* dst_r, + uint8_t* dst_g, + uint8_t* dst_b, + uint8_t* dst_a, + int width) { + asm volatile( + + "sub %1,%2 \n" + "sub %1,%3 \n" + "sub %1,%4 \n" + "vmovdqa %7,%%ymm3 \n" + "vbroadcastf128 %6,%%ymm4 \n" + + LABELALIGN + "1: \n" + + "vmovdqu (%0),%%xmm0 \n" // 00-0F + "vmovdqu 16(%0),%%xmm1 \n" // 10-1F + "vinserti128 $1,32(%0),%%ymm0,%%ymm0 \n" // 00-0F 20-2F + "vinserti128 $1,48(%0),%%ymm1,%%ymm1 \n" // 10-1F 30-3F + "vpshufb %%ymm4,%%ymm0,%%ymm0 \n" + "vpshufb %%ymm4,%%ymm1,%%ymm1 \n" + "vpermd %%ymm0,%%ymm3,%%ymm0 \n" + "vpermd %%ymm1,%%ymm3,%%ymm1 \n" + "vpunpckhdq %%ymm1,%%ymm0,%%ymm2 \n" // GA + "vpunpckldq %%ymm1,%%ymm0,%%ymm0 \n" // BR + "vmovdqu %%xmm0,(%1,%3) \n" // B + "vextracti128 $1,%%ymm0,(%1) \n" // R + "vmovdqu %%xmm2,(%1,%2) \n" // G + "vextracti128 $1,%%ymm2,(%1,%4) \n" // A + "lea 64(%0),%0 \n" + "lea 16(%1),%1 \n" + "subl $0x10,%5 \n" + "jg 1b \n" + "vzeroupper \n" + : "+r"(src_argb), // %0 + "+r"(dst_r), // %1 + "+r"(dst_g), // %2 + "+r"(dst_b), // %3 + "+r"(dst_a), // %4 +#if defined(__i386__) + "+m"(width) // %5 +#else + "+rm"(width) // %5 +#endif + : "m"(kShuffleMaskARGBSplit), // %6 + "m"(kShuffleMaskARGBPermute) // %7 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4"); +} +#endif -void SplitRGBRow_SSSE3(const uint8_t* src_rgb, +#ifdef HAS_SPLITXRGBROW_AVX2 +void SplitXRGBRow_AVX2(const uint8_t* src_argb, uint8_t* dst_r, uint8_t* dst_g, uint8_t* dst_b, int width) { asm volatile( + "vmovdqa %6,%%ymm3 \n" + "vbroadcastf128 %5,%%ymm4 \n" + LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "movdqu 0x20(%0),%%xmm2 \n" - "pshufb %5, %%xmm0 \n" - "pshufb %6, %%xmm1 \n" - "pshufb %7, %%xmm2 \n" - "por %%xmm1,%%xmm0 \n" - "por %%xmm2,%%xmm0 \n" - "movdqu %%xmm0,(%1) \n" - "lea 0x10(%1),%1 \n" - "movdqu (%0),%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "movdqu 0x20(%0),%%xmm2 \n" - "pshufb %8, %%xmm0 \n" - "pshufb %9, %%xmm1 \n" - "pshufb %10, %%xmm2 \n" - "por %%xmm1,%%xmm0 \n" - "por %%xmm2,%%xmm0 \n" - "movdqu %%xmm0,(%2) \n" - "lea 0x10(%2),%2 \n" + "vmovdqu (%0),%%xmm0 \n" // 00-0F + "vmovdqu 16(%0),%%xmm1 \n" // 10-1F + "vinserti128 $1,32(%0),%%ymm0,%%ymm0 \n" // 00-0F 20-2F + "vinserti128 $1,48(%0),%%ymm1,%%ymm1 \n" // 10-1F 30-3F + "vpshufb %%ymm4,%%ymm0,%%ymm0 \n" + "vpshufb %%ymm4,%%ymm1,%%ymm1 \n" + "vpermd %%ymm0,%%ymm3,%%ymm0 \n" + "vpermd %%ymm1,%%ymm3,%%ymm1 \n" + "vpunpckhdq %%ymm1,%%ymm0,%%ymm2 \n" // GA + "vpunpckldq %%ymm1,%%ymm0,%%ymm0 \n" // BR + "vmovdqu %%xmm0,(%3) \n" // B + "vextracti128 $1,%%ymm0,(%1) \n" // R + "vmovdqu %%xmm2,(%2) \n" // G + + "lea 64(%0),%0 \n" + "lea 16(%1),%1 \n" + "lea 16(%2),%2 \n" + "lea 16(%3),%3 \n" + "sub $0x10,%4 \n" + "jg 1b \n" + "vzeroupper \n" + : "+r"(src_argb), // %0 + "+r"(dst_r), // %1 + "+r"(dst_g), // %2 + "+r"(dst_b), // %3 + "+r"(width) // %4 + : "m"(kShuffleMaskARGBSplit), // %5 + "m"(kShuffleMaskARGBPermute) // %6 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4"); +} +#endif - "movdqu (%0),%%xmm0 \n" - "movdqu 0x10(%0),%%xmm1 \n" - "movdqu 0x20(%0),%%xmm2 \n" - "pshufb %11, %%xmm0 \n" - "pshufb %12, %%xmm1 \n" - "pshufb %13, %%xmm2 \n" - "por %%xmm1,%%xmm0 \n" - "por %%xmm2,%%xmm0 \n" - "movdqu %%xmm0,(%3) \n" - "lea 0x10(%3),%3 \n" - "lea 0x30(%0),%0 \n" +#ifdef HAS_MERGEXR30ROW_AVX2 +void MergeXR30Row_AVX2(const uint16_t* src_r, + const uint16_t* src_g, + const uint16_t* src_b, + uint8_t* dst_ar30, + int depth, + int width) { + int shift = depth - 10; + asm volatile( + + "sub %0,%1 \n" + "sub %0,%2 \n" + "vpcmpeqb %%ymm5,%%ymm5,%%ymm5 \n" // AR30 constants + "vpsrlw $14,%%ymm5,%%ymm5 \n" + "vpsllw $4,%%ymm5,%%ymm5 \n" // 2 alpha bits + "vpcmpeqb %%ymm6,%%ymm6,%%ymm6 \n" + "vpsrlw $6,%%ymm6,%%ymm6 \n" + "vmovd %5,%%xmm4 \n" + + LABELALIGN + "1: \n" + "vmovdqu (%0),%%ymm0 \n" + "vmovdqu (%0,%1),%%ymm1 \n" + "vmovdqu (%0,%2),%%ymm2 \n" + "vpsrlw %%xmm4,%%ymm0,%%ymm0 \n" + "vpsrlw %%xmm4,%%ymm1,%%ymm1 \n" + "vpsrlw %%xmm4,%%ymm2,%%ymm2 \n" + "vpminuw %%ymm0,%%ymm6,%%ymm0 \n" + "vpminuw %%ymm1,%%ymm6,%%ymm1 \n" + "vpminuw %%ymm2,%%ymm6,%%ymm2 \n" + "vpermq $0xd8,%%ymm0,%%ymm0 \n" + "vpermq $0xd8,%%ymm1,%%ymm1 \n" + "vpermq $0xd8,%%ymm2,%%ymm2 \n" + "vpsllw $0x4,%%ymm0,%%ymm0 \n" // Shift R to target bit + "vpunpckhwd %%ymm0,%%ymm2,%%ymm3 \n" // RB + "vpunpcklwd %%ymm0,%%ymm2,%%ymm0 \n" + "vpunpckhwd %%ymm5,%%ymm1,%%ymm2 \n" // AG + "vpunpcklwd %%ymm5,%%ymm1,%%ymm1 \n" + "vpslld $0xa,%%ymm1,%%ymm1 \n" // Shift AG to target bit + "vpslld $0xa,%%ymm2,%%ymm2 \n" + "vpor %%ymm1,%%ymm0,%%ymm0 \n" // Combine + "vpor %%ymm2,%%ymm3,%%ymm3 \n" + "vmovdqu %%ymm0,(%3) \n" + "vmovdqu %%ymm3,0x20(%3) \n" + "lea 0x20(%0),%0 \n" + "lea 0x40(%3),%3 \n" "sub $0x10,%4 \n" "jg 1b \n" - : "+r"(src_rgb), // %0 - "+r"(dst_r), // %1 - "+r"(dst_g), // %2 - "+r"(dst_b), // %3 - "+r"(width) // %4 - : "m"(kShuffleMaskRGBToR0), // %5 - "m"(kShuffleMaskRGBToR1), // %6 - "m"(kShuffleMaskRGBToR2), // %7 - "m"(kShuffleMaskRGBToG0), // %8 - "m"(kShuffleMaskRGBToG1), // %9 - "m"(kShuffleMaskRGBToG2), // %10 - "m"(kShuffleMaskRGBToB0), // %11 - "m"(kShuffleMaskRGBToB1), // %12 - "m"(kShuffleMaskRGBToB2) // %13 - : "memory", "cc", "xmm0", "xmm1", "xmm2"); + "vzeroupper \n" + : "+r"(src_r), // %0 + "+r"(src_g), // %1 + "+r"(src_b), // %2 + "+r"(dst_ar30), // %3 + "+r"(width) // %4 +#if defined(__i386__) + : "m"(shift) // %5 +#else + : "rm"(shift) // %5 +#endif + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5"); } -#endif // HAS_SPLITRGBROW_SSSE3 +#endif -#ifdef HAS_MERGERGBROW_SSSE3 +#ifdef HAS_MERGEAR64ROW_AVX2 +static const lvec32 MergeAR64Permute = {0, 4, 2, 6, 1, 5, 3, 7}; +void MergeAR64Row_AVX2(const uint16_t* src_r, + const uint16_t* src_g, + const uint16_t* src_b, + const uint16_t* src_a, + uint16_t* dst_ar64, + int depth, + int width) { + int shift = 16 - depth; + int mask = (1 << depth) - 1; + mask = (mask << 16) + mask; + asm volatile( -// Shuffle table for converting RGB to Planar. -static const uvec8 kShuffleMaskRToRGB0 = {0u, 128u, 128u, 1u, 128u, 128u, - 2u, 128u, 128u, 3u, 128u, 128u, - 4u, 128u, 128u, 5u}; -static const uvec8 kShuffleMaskGToRGB0 = {128u, 0u, 128u, 128u, 1u, 128u, - 128u, 2u, 128u, 128u, 3u, 128u, - 128u, 4u, 128u, 128u}; -static const uvec8 kShuffleMaskBToRGB0 = {128u, 128u, 0u, 128u, 128u, 1u, - 128u, 128u, 2u, 128u, 128u, 3u, - 128u, 128u, 4u, 128u}; - -static const uvec8 kShuffleMaskGToRGB1 = {5u, 128u, 128u, 6u, 128u, 128u, - 7u, 128u, 128u, 8u, 128u, 128u, - 9u, 128u, 128u, 10u}; -static const uvec8 kShuffleMaskBToRGB1 = {128u, 5u, 128u, 128u, 6u, 128u, - 128u, 7u, 128u, 128u, 8u, 128u, - 128u, 9u, 128u, 128u}; -static const uvec8 kShuffleMaskRToRGB1 = {128u, 128u, 6u, 128u, 128u, 7u, - 128u, 128u, 8u, 128u, 128u, 9u, - 128u, 128u, 10u, 128u}; - -static const uvec8 kShuffleMaskBToRGB2 = {10u, 128u, 128u, 11u, 128u, 128u, - 12u, 128u, 128u, 13u, 128u, 128u, - 14u, 128u, 128u, 15u}; -static const uvec8 kShuffleMaskRToRGB2 = {128u, 11u, 128u, 128u, 12u, 128u, - 128u, 13u, 128u, 128u, 14u, 128u, - 128u, 15u, 128u, 128u}; -static const uvec8 kShuffleMaskGToRGB2 = {128u, 128u, 11u, 128u, 128u, 12u, - 128u, 128u, 13u, 128u, 128u, 14u, - 128u, 128u, 15u, 128u}; + "sub %0,%1 \n" + "sub %0,%2 \n" + "sub %0,%3 \n" + "vmovdqa %8,%%ymm5 \n" + "vmovd %6,%%xmm6 \n" + "vbroadcastss %7,%%ymm7 \n" -void MergeRGBRow_SSSE3(const uint8_t* src_r, - const uint8_t* src_g, - const uint8_t* src_b, - uint8_t* dst_rgb, + LABELALIGN + "1: \n" + "vmovdqu (%0),%%ymm0 \n" // R + "vmovdqu (%0,%1),%%ymm1 \n" // G + "vmovdqu (%0,%2),%%ymm2 \n" // B + "vmovdqu (%0,%3),%%ymm3 \n" // A + "vpminuw %%ymm0,%%ymm7,%%ymm0 \n" + "vpminuw %%ymm1,%%ymm7,%%ymm1 \n" + "vpminuw %%ymm2,%%ymm7,%%ymm2 \n" + "vpminuw %%ymm3,%%ymm7,%%ymm3 \n" + "vpsllw %%xmm6,%%ymm0,%%ymm0 \n" + "vpsllw %%xmm6,%%ymm1,%%ymm1 \n" + "vpsllw %%xmm6,%%ymm2,%%ymm2 \n" + "vpsllw %%xmm6,%%ymm3,%%ymm3 \n" + "vpermd %%ymm0,%%ymm5,%%ymm0 \n" + "vpermd %%ymm1,%%ymm5,%%ymm1 \n" + "vpermd %%ymm2,%%ymm5,%%ymm2 \n" + "vpermd %%ymm3,%%ymm5,%%ymm3 \n" + "vpunpcklwd %%ymm1,%%ymm2,%%ymm4 \n" // BG(low) + "vpunpckhwd %%ymm1,%%ymm2,%%ymm1 \n" // BG(hi) + "vpunpcklwd %%ymm3,%%ymm0,%%ymm2 \n" // RA(low) + "vpunpckhwd %%ymm3,%%ymm0,%%ymm0 \n" // RA(hi) + "vpunpckldq %%ymm2,%%ymm4,%%ymm3 \n" // BGRA(1) + "vpunpckhdq %%ymm2,%%ymm4,%%ymm4 \n" // BGRA(3) + "vpunpckldq %%ymm0,%%ymm1,%%ymm2 \n" // BGRA(2) + "vpunpckhdq %%ymm0,%%ymm1,%%ymm1 \n" // BGRA(4) + "vmovdqu %%ymm3,(%4) \n" + "vmovdqu %%ymm2,0x20(%4) \n" + "vmovdqu %%ymm4,0x40(%4) \n" + "vmovdqu %%ymm1,0x60(%4) \n" + "lea 0x20(%0),%0 \n" + "lea 0x80(%4),%4 \n" + "subl $0x10,%5 \n" + "jg 1b \n" + "vzeroupper \n" + : "+r"(src_r), // %0 + "+r"(src_g), // %1 + "+r"(src_b), // %2 + "+r"(src_a), // %3 + "+r"(dst_ar64), // %4 +#if defined(__i386__) + "+m"(width) // %5 +#else + "+rm"(width) // %5 +#endif + : "m"(shift), // %6 + "m"(mask), // %7 + "m"(MergeAR64Permute) // %8 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", + "xmm7"); +} +#endif + +#ifdef HAS_MERGEXR64ROW_AVX2 +void MergeXR64Row_AVX2(const uint16_t* src_r, + const uint16_t* src_g, + const uint16_t* src_b, + uint16_t* dst_ar64, + int depth, int width) { + int shift = 16 - depth; + int mask = (1 << depth) - 1; + mask = (mask << 16) + mask; asm volatile( + "sub %0,%1 \n" + "sub %0,%2 \n" + "vmovdqa %7,%%ymm5 \n" + "vmovd %5,%%xmm6 \n" + "vbroadcastss %6,%%ymm7 \n" + LABELALIGN "1: \n" - "movdqu (%0),%%xmm0 \n" - "movdqu (%1),%%xmm1 \n" - "movdqu (%2),%%xmm2 \n" - "pshufb %5, %%xmm0 \n" - "pshufb %6, %%xmm1 \n" - "pshufb %7, %%xmm2 \n" - "por %%xmm1,%%xmm0 \n" - "por %%xmm2,%%xmm0 \n" - "movdqu %%xmm0,(%3) \n" + "vmovdqu (%0),%%ymm0 \n" // R + "vmovdqu (%0,%1),%%ymm1 \n" // G + "vmovdqu (%0,%2),%%ymm2 \n" // B + "vpminuw %%ymm0,%%ymm7,%%ymm0 \n" + "vpminuw %%ymm1,%%ymm7,%%ymm1 \n" + "vpminuw %%ymm2,%%ymm7,%%ymm2 \n" + "vpsllw %%xmm6,%%ymm0,%%ymm0 \n" + "vpsllw %%xmm6,%%ymm1,%%ymm1 \n" + "vpsllw %%xmm6,%%ymm2,%%ymm2 \n" + "vpermd %%ymm0,%%ymm5,%%ymm0 \n" + "vpermd %%ymm1,%%ymm5,%%ymm1 \n" + "vpermd %%ymm2,%%ymm5,%%ymm2 \n" + "vpcmpeqb %%ymm3,%%ymm3,%%ymm3 \n" // A (0xffff) + "vpunpcklwd %%ymm1,%%ymm2,%%ymm4 \n" // BG(low) + "vpunpckhwd %%ymm1,%%ymm2,%%ymm1 \n" // BG(hi) + "vpunpcklwd %%ymm3,%%ymm0,%%ymm2 \n" // RA(low) + "vpunpckhwd %%ymm3,%%ymm0,%%ymm0 \n" // RA(hi) + "vpunpckldq %%ymm2,%%ymm4,%%ymm3 \n" // BGRA(1) + "vpunpckhdq %%ymm2,%%ymm4,%%ymm4 \n" // BGRA(3) + "vpunpckldq %%ymm0,%%ymm1,%%ymm2 \n" // BGRA(2) + "vpunpckhdq %%ymm0,%%ymm1,%%ymm1 \n" // BGRA(4) + "vmovdqu %%ymm3,(%3) \n" + "vmovdqu %%ymm2,0x20(%3) \n" + "vmovdqu %%ymm4,0x40(%3) \n" + "vmovdqu %%ymm1,0x60(%3) \n" + "lea 0x20(%0),%0 \n" + "lea 0x80(%3),%3 \n" + "subl $0x10,%4 \n" + "jg 1b \n" + "vzeroupper \n" + : "+r"(src_r), // %0 + "+r"(src_g), // %1 + "+r"(src_b), // %2 + "+r"(dst_ar64), // %3 + "+r"(width) // %4 + : "m"(shift), // %5 + "m"(mask), // %6 + "m"(MergeAR64Permute) // %7 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", + "xmm7"); +} +#endif - "movdqu (%0),%%xmm0 \n" - "movdqu (%1),%%xmm1 \n" - "movdqu (%2),%%xmm2 \n" - "pshufb %8, %%xmm0 \n" - "pshufb %9, %%xmm1 \n" - "pshufb %10, %%xmm2 \n" - "por %%xmm1,%%xmm0 \n" - "por %%xmm2,%%xmm0 \n" - "movdqu %%xmm0,16(%3) \n" +#ifdef HAS_MERGEARGB16TO8ROW_AVX2 +static const uvec8 MergeARGB16To8Shuffle = {0, 8, 1, 9, 2, 10, 3, 11, + 4, 12, 5, 13, 6, 14, 7, 15}; +void MergeARGB16To8Row_AVX2(const uint16_t* src_r, + const uint16_t* src_g, + const uint16_t* src_b, + const uint16_t* src_a, + uint8_t* dst_argb, + int depth, + int width) { + int shift = depth - 8; + asm volatile( - "movdqu (%0),%%xmm0 \n" - "movdqu (%1),%%xmm1 \n" - "movdqu (%2),%%xmm2 \n" - "pshufb %11, %%xmm0 \n" - "pshufb %12, %%xmm1 \n" - "pshufb %13, %%xmm2 \n" - "por %%xmm1,%%xmm0 \n" - "por %%xmm2,%%xmm0 \n" - "movdqu %%xmm0,32(%3) \n" + "sub %0,%1 \n" + "sub %0,%2 \n" + "sub %0,%3 \n" + "vbroadcastf128 %7,%%ymm5 \n" + "vmovd %6,%%xmm6 \n" - "lea 0x10(%0),%0 \n" - "lea 0x10(%1),%1 \n" - "lea 0x10(%2),%2 \n" - "lea 0x30(%3),%3 \n" - "sub $0x10,%4 \n" + LABELALIGN + "1: \n" + "vmovdqu (%0),%%ymm0 \n" // R + "vmovdqu (%0,%1),%%ymm1 \n" // G + "vmovdqu (%0,%2),%%ymm2 \n" // B + "vmovdqu (%0,%3),%%ymm3 \n" // A + "vpsrlw %%xmm6,%%ymm0,%%ymm0 \n" + "vpsrlw %%xmm6,%%ymm1,%%ymm1 \n" + "vpsrlw %%xmm6,%%ymm2,%%ymm2 \n" + "vpsrlw %%xmm6,%%ymm3,%%ymm3 \n" + "vpackuswb %%ymm1,%%ymm2,%%ymm1 \n" // BG (planar) + "vpackuswb %%ymm3,%%ymm0,%%ymm0 \n" // RA (planar) + "vpshufb %%ymm5,%%ymm1,%%ymm1 \n" // BG (interleave) + "vpshufb %%ymm5,%%ymm0,%%ymm0 \n" // RA (interleave) + "vpermq $0xd8,%%ymm1,%%ymm1 \n" + "vpermq $0xd8,%%ymm0,%%ymm0 \n" + "vpunpcklwd %%ymm0,%%ymm1,%%ymm2 \n" // BGRA (low) + "vpunpckhwd %%ymm0,%%ymm1,%%ymm0 \n" // BGRA (hi) + "vmovdqu %%ymm2,(%4) \n" + "vmovdqu %%ymm0,0x20(%4) \n" + "lea 0x20(%0),%0 \n" + "lea 0x40(%4),%4 \n" + "subl $0x10,%5 \n" "jg 1b \n" - : "+r"(src_r), // %0 - "+r"(src_g), // %1 - "+r"(src_b), // %2 - "+r"(dst_rgb), // %3 - "+r"(width) // %4 - : "m"(kShuffleMaskRToRGB0), // %5 - "m"(kShuffleMaskGToRGB0), // %6 - "m"(kShuffleMaskBToRGB0), // %7 - "m"(kShuffleMaskRToRGB1), // %8 - "m"(kShuffleMaskGToRGB1), // %9 - "m"(kShuffleMaskBToRGB1), // %10 - "m"(kShuffleMaskRToRGB2), // %11 - "m"(kShuffleMaskGToRGB2), // %12 - "m"(kShuffleMaskBToRGB2) // %13 - : "memory", "cc", "xmm0", "xmm1", "xmm2"); + "vzeroupper \n" + : "+r"(src_r), // %0 + "+r"(src_g), // %1 + "+r"(src_b), // %2 + "+r"(src_a), // %3 + "+r"(dst_argb), // %4 +#if defined(__i386__) + "+m"(width) // %5 +#else + "+rm"(width) // %5 +#endif + : "m"(shift), // %6 + "m"(MergeARGB16To8Shuffle) // %7 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6"); } -#endif // HAS_MERGERGBROW_SSSE3 +#endif + +#ifdef HAS_MERGEXRGB16TO8ROW_AVX2 +void MergeXRGB16To8Row_AVX2(const uint16_t* src_r, + const uint16_t* src_g, + const uint16_t* src_b, + uint8_t* dst_argb, + int depth, + int width) { + int shift = depth - 8; + asm volatile( + + "sub %0,%1 \n" + "sub %0,%2 \n" + "vbroadcastf128 %6,%%ymm5 \n" + "vmovd %5,%%xmm6 \n" + "vpcmpeqb %%ymm3,%%ymm3,%%ymm3 \n" + "vpsrlw $8,%%ymm3,%%ymm3 \n" // A (0xff) + + LABELALIGN + "1: \n" + "vmovdqu (%0),%%ymm0 \n" // R + "vmovdqu (%0,%1),%%ymm1 \n" // G + "vmovdqu (%0,%2),%%ymm2 \n" // B + "vpsrlw %%xmm6,%%ymm0,%%ymm0 \n" + "vpsrlw %%xmm6,%%ymm1,%%ymm1 \n" + "vpsrlw %%xmm6,%%ymm2,%%ymm2 \n" + "vpackuswb %%ymm1,%%ymm2,%%ymm1 \n" // BG (planar) + "vpackuswb %%ymm3,%%ymm0,%%ymm0 \n" // RA (planar) + "vpshufb %%ymm5,%%ymm1,%%ymm1 \n" // BG (interleave) + "vpshufb %%ymm5,%%ymm0,%%ymm0 \n" // RA (interleave) + "vpermq $0xd8,%%ymm1,%%ymm1 \n" + "vpermq $0xd8,%%ymm0,%%ymm0 \n" + "vpunpcklwd %%ymm0,%%ymm1,%%ymm2 \n" // BGRA (low) + "vpunpckhwd %%ymm0,%%ymm1,%%ymm0 \n" // BGRA (hi) + "vmovdqu %%ymm2,(%3) \n" + "vmovdqu %%ymm0,0x20(%3) \n" + "lea 0x20(%0),%0 \n" + "lea 0x40(%3),%3 \n" + "subl $0x10,%4 \n" + "jg 1b \n" + "vzeroupper \n" + : "+r"(src_r), // %0 + "+r"(src_g), // %1 + "+r"(src_b), // %2 + "+r"(dst_argb), // %3 + "+r"(width) // %4 + : "m"(shift), // %5 + "m"(MergeARGB16To8Shuffle) // %6 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6"); +} +#endif #ifdef HAS_COPYROW_SSE2 void CopyRow_SSE2(const uint8_t* src, uint8_t* dst, int width) { @@ -4022,6 +6499,7 @@ void CopyRow_AVX(const uint8_t* src, uint8_t* dst, int width) { "lea 0x40(%1),%1 \n" "sub $0x40,%2 \n" "jg 1b \n" + "vzeroupper \n" : "+r"(src), // %0 "+r"(dst), // %1 "+r"(width) // %2 @@ -4305,6 +6783,33 @@ void YUY2ToYRow_SSE2(const uint8_t* src_yuy2, uint8_t* dst_y, int width) { : "memory", "cc", "xmm0", "xmm1", "xmm5"); } +void YUY2ToNVUVRow_SSE2(const uint8_t* src_yuy2, + int stride_yuy2, + uint8_t* dst_uv, + int width) { + asm volatile(LABELALIGN + "1: \n" + "movdqu (%0),%%xmm0 \n" + "movdqu 0x10(%0),%%xmm1 \n" + "movdqu 0x00(%0,%3,1),%%xmm2 \n" + "movdqu 0x10(%0,%3,1),%%xmm3 \n" + "lea 0x20(%0),%0 \n" + "pavgb %%xmm2,%%xmm0 \n" + "pavgb %%xmm3,%%xmm1 \n" + "psrlw $0x8,%%xmm0 \n" + "psrlw $0x8,%%xmm1 \n" + "packuswb %%xmm1,%%xmm0 \n" + "movdqu %%xmm0,(%1) \n" + "lea 0x10(%1),%1 \n" + "sub $0x10,%2 \n" + "jg 1b \n" + : "+r"(src_yuy2), // %0 + "+r"(dst_uv), // %1 + "+r"(width) // %2 + : "r"((intptr_t)(stride_yuy2)) // %3 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3"); +} + void YUY2ToUVRow_SSE2(const uint8_t* src_yuy2, int stride_yuy2, uint8_t* dst_u, @@ -4505,6 +7010,35 @@ void YUY2ToYRow_AVX2(const uint8_t* src_yuy2, uint8_t* dst_y, int width) { : "memory", "cc", "xmm0", "xmm1", "xmm5"); } +void YUY2ToNVUVRow_AVX2(const uint8_t* src_yuy2, + int stride_yuy2, + uint8_t* dst_uv, + int width) { + asm volatile( + + LABELALIGN + "1: \n" + "vmovdqu (%0),%%ymm0 \n" + "vmovdqu 0x20(%0),%%ymm1 \n" + "vpavgb 0x00(%0,%3,1),%%ymm0,%%ymm0 \n" + "vpavgb 0x20(%0,%3,1),%%ymm1,%%ymm1 \n" + "lea 0x40(%0),%0 \n" + "vpsrlw $0x8,%%ymm0,%%ymm0 \n" + "vpsrlw $0x8,%%ymm1,%%ymm1 \n" + "vpackuswb %%ymm1,%%ymm0,%%ymm0 \n" + "vpermq $0xd8,%%ymm0,%%ymm0 \n" + "vmovdqu %%ymm0,(%1) \n" + "lea 0x20(%1),%1 \n" + "sub $0x20,%2 \n" + "jg 1b \n" + "vzeroupper \n" + : "+r"(src_yuy2), // %0 + "+r"(dst_uv), // %1 + "+r"(width) // %2 + : "r"((intptr_t)(stride_yuy2)) // %3 + : "memory", "cc", "xmm0", "xmm1"); +} + void YUY2ToUVRow_AVX2(const uint8_t* src_yuy2, int stride_yuy2, uint8_t* dst_u, @@ -4693,7 +7227,7 @@ static const uvec8 kShuffleAlpha = {3u, 0x80, 3u, 0x80, 7u, 0x80, 7u, 0x80, 11u, 0x80, 11u, 0x80, 15u, 0x80, 15u, 0x80}; // Blend 8 pixels at a time -void ARGBBlendRow_SSSE3(const uint8_t* src_argb0, +void ARGBBlendRow_SSSE3(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width) { @@ -4764,7 +7298,7 @@ void ARGBBlendRow_SSSE3(const uint8_t* src_argb0, "sub $0x1,%3 \n" "jge 91b \n" "99: \n" - : "+r"(src_argb0), // %0 + : "+r"(src_argb), // %0 "+r"(src_argb1), // %1 "+r"(dst_argb), // %2 "+r"(width) // %3 @@ -5366,7 +7900,7 @@ void ARGBShadeRow_SSE2(const uint8_t* src_argb, #ifdef HAS_ARGBMULTIPLYROW_SSE2 // Multiply 2 rows of ARGB pixels together, 4 pixels at a time. -void ARGBMultiplyRow_SSE2(const uint8_t* src_argb0, +void ARGBMultiplyRow_SSE2(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width) { @@ -5394,7 +7928,7 @@ void ARGBMultiplyRow_SSE2(const uint8_t* src_argb0, "lea 0x10(%2),%2 \n" "sub $0x4,%3 \n" "jg 1b \n" - : "+r"(src_argb0), // %0 + : "+r"(src_argb), // %0 "+r"(src_argb1), // %1 "+r"(dst_argb), // %2 "+r"(width) // %3 @@ -5405,7 +7939,7 @@ void ARGBMultiplyRow_SSE2(const uint8_t* src_argb0, #ifdef HAS_ARGBMULTIPLYROW_AVX2 // Multiply 2 rows of ARGB pixels together, 8 pixels at a time. -void ARGBMultiplyRow_AVX2(const uint8_t* src_argb0, +void ARGBMultiplyRow_AVX2(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width) { @@ -5432,23 +7966,18 @@ void ARGBMultiplyRow_AVX2(const uint8_t* src_argb0, "sub $0x8,%3 \n" "jg 1b \n" "vzeroupper \n" - : "+r"(src_argb0), // %0 + : "+r"(src_argb), // %0 "+r"(src_argb1), // %1 "+r"(dst_argb), // %2 "+r"(width) // %3 : - : "memory", "cc" -#if defined(__AVX2__) - , - "xmm0", "xmm1", "xmm2", "xmm3", "xmm5" -#endif - ); + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm5"); } #endif // HAS_ARGBMULTIPLYROW_AVX2 #ifdef HAS_ARGBADDROW_SSE2 // Add 2 rows of ARGB pixels together, 4 pixels at a time. -void ARGBAddRow_SSE2(const uint8_t* src_argb0, +void ARGBAddRow_SSE2(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width) { @@ -5465,7 +7994,7 @@ void ARGBAddRow_SSE2(const uint8_t* src_argb0, "lea 0x10(%2),%2 \n" "sub $0x4,%3 \n" "jg 1b \n" - : "+r"(src_argb0), // %0 + : "+r"(src_argb), // %0 "+r"(src_argb1), // %1 "+r"(dst_argb), // %2 "+r"(width) // %3 @@ -5476,7 +8005,7 @@ void ARGBAddRow_SSE2(const uint8_t* src_argb0, #ifdef HAS_ARGBADDROW_AVX2 // Add 2 rows of ARGB pixels together, 4 pixels at a time. -void ARGBAddRow_AVX2(const uint8_t* src_argb0, +void ARGBAddRow_AVX2(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width) { @@ -5493,7 +8022,7 @@ void ARGBAddRow_AVX2(const uint8_t* src_argb0, "sub $0x8,%3 \n" "jg 1b \n" "vzeroupper \n" - : "+r"(src_argb0), // %0 + : "+r"(src_argb), // %0 "+r"(src_argb1), // %1 "+r"(dst_argb), // %2 "+r"(width) // %3 @@ -5504,7 +8033,7 @@ void ARGBAddRow_AVX2(const uint8_t* src_argb0, #ifdef HAS_ARGBSUBTRACTROW_SSE2 // Subtract 2 rows of ARGB pixels, 4 pixels at a time. -void ARGBSubtractRow_SSE2(const uint8_t* src_argb0, +void ARGBSubtractRow_SSE2(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width) { @@ -5521,7 +8050,7 @@ void ARGBSubtractRow_SSE2(const uint8_t* src_argb0, "lea 0x10(%2),%2 \n" "sub $0x4,%3 \n" "jg 1b \n" - : "+r"(src_argb0), // %0 + : "+r"(src_argb), // %0 "+r"(src_argb1), // %1 "+r"(dst_argb), // %2 "+r"(width) // %3 @@ -5532,7 +8061,7 @@ void ARGBSubtractRow_SSE2(const uint8_t* src_argb0, #ifdef HAS_ARGBSUBTRACTROW_AVX2 // Subtract 2 rows of ARGB pixels, 8 pixels at a time. -void ARGBSubtractRow_AVX2(const uint8_t* src_argb0, +void ARGBSubtractRow_AVX2(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width) { @@ -5549,7 +8078,7 @@ void ARGBSubtractRow_AVX2(const uint8_t* src_argb0, "sub $0x8,%3 \n" "jg 1b \n" "vzeroupper \n" - : "+r"(src_argb0), // %0 + : "+r"(src_argb), // %0 "+r"(src_argb1), // %1 "+r"(dst_argb), // %2 "+r"(width) // %3 @@ -6103,7 +8632,7 @@ void ARGBAffineRow_SSE2(const uint8_t* src_argb, void InterpolateRow_SSSE3(uint8_t* dst_ptr, const uint8_t* src_ptr, ptrdiff_t src_stride, - int dst_width, + int width, int source_y_fraction) { asm volatile( "sub %1,%0 \n" @@ -6172,7 +8701,7 @@ void InterpolateRow_SSSE3(uint8_t* dst_ptr, "99: \n" : "+r"(dst_ptr), // %0 "+r"(src_ptr), // %1 - "+rm"(dst_width), // %2 + "+rm"(width), // %2 "+r"(source_y_fraction) // %3 : "r"((intptr_t)(src_stride)) // %4 : "memory", "cc", "eax", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5"); @@ -6184,12 +8713,12 @@ void InterpolateRow_SSSE3(uint8_t* dst_ptr, void InterpolateRow_AVX2(uint8_t* dst_ptr, const uint8_t* src_ptr, ptrdiff_t src_stride, - int dst_width, + int width, int source_y_fraction) { asm volatile( + "sub %1,%0 \n" "cmp $0x0,%3 \n" "je 100f \n" - "sub %1,%0 \n" "cmp $0x80,%3 \n" "je 50f \n" @@ -6240,15 +8769,17 @@ void InterpolateRow_AVX2(uint8_t* dst_ptr, // Blend 100 / 0 - Copy row unchanged. LABELALIGN "100: \n" - "rep movsb \n" - "jmp 999f \n" + "vmovdqu (%1),%%ymm0 \n" + "vmovdqu %%ymm0,0x00(%1,%0,1) \n" + "lea 0x20(%1),%1 \n" + "sub $0x20,%2 \n" + "jg 100b \n" "99: \n" "vzeroupper \n" - "999: \n" - : "+D"(dst_ptr), // %0 - "+S"(src_ptr), // %1 - "+cm"(dst_width), // %2 + : "+r"(dst_ptr), // %0 + "+r"(src_ptr), // %1 + "+r"(width), // %2 "+r"(source_y_fraction) // %3 : "r"((intptr_t)(src_stride)) // %4 : "memory", "cc", "eax", "xmm0", "xmm1", "xmm2", "xmm4", "xmm5"); @@ -6638,7 +9169,7 @@ void HalfFloatRow_AVX2(const uint16_t* src, #if defined(__x86_64__) : "x"(scale) // %3 #else - : "m"(scale) // %3 + : "m"(scale) // %3 #endif : "memory", "cc", "xmm2", "xmm3", "xmm4", "xmm5"); } @@ -6676,7 +9207,7 @@ void HalfFloatRow_F16C(const uint16_t* src, #if defined(__x86_64__) : "x"(scale) // %3 #else - : "m"(scale) // %3 + : "m"(scale) // %3 #endif : "memory", "cc", "xmm2", "xmm3", "xmm4"); } @@ -6873,127 +9404,134 @@ void ARGBLumaColorTableRow_SSSE3(const uint8_t* src_argb, } #endif // HAS_ARGBLUMACOLORTABLEROW_SSSE3 -#ifdef HAS_NV21TOYUV24ROW_AVX2 - -// begin NV21ToYUV24Row_C avx2 constants -static const ulvec8 kBLEND0 = {0x80, 0x00, 0x80, 0x80, 0x00, 0x80, 0x80, 0x00, - 0x80, 0x80, 0x00, 0x80, 0x80, 0x00, 0x80, 0x80, - 0x00, 0x80, 0x00, 0x00, 0x80, 0x00, 0x00, 0x80, - 0x00, 0x00, 0x80, 0x00, 0x00, 0x80, 0x00, 0x00}; - -static const ulvec8 kBLEND1 = {0x00, 0x00, 0x80, 0x00, 0x00, 0x80, 0x00, 0x00, - 0x80, 0x00, 0x00, 0x80, 0x00, 0x00, 0x80, 0x00, - 0x80, 0x00, 0x00, 0x80, 0x00, 0x00, 0x80, 0x00, - 0x00, 0x80, 0x00, 0x00, 0x80, 0x00, 0x00, 0x80}; - -static const ulvec8 kBLEND2 = {0x80, 0x00, 0x00, 0x80, 0x00, 0x00, 0x80, 0x00, - 0x00, 0x80, 0x00, 0x00, 0x80, 0x00, 0x00, 0x80, - 0x00, 0x00, 0x80, 0x00, 0x00, 0x80, 0x00, 0x00, - 0x80, 0x00, 0x00, 0x80, 0x00, 0x00, 0x80, 0x00}; - -static const ulvec8 kSHUF0 = {0x00, 0x0b, 0x80, 0x01, 0x0c, 0x80, 0x02, 0x0d, - 0x80, 0x03, 0x0e, 0x80, 0x04, 0x0f, 0x80, 0x05, - 0x00, 0x0b, 0x80, 0x01, 0x0c, 0x80, 0x02, 0x0d, - 0x80, 0x03, 0x0e, 0x80, 0x04, 0x0f, 0x80, 0x05}; - -static const ulvec8 kSHUF1 = {0x80, 0x00, 0x0b, 0x80, 0x01, 0x0c, 0x80, 0x02, - 0x0d, 0x80, 0x03, 0x0e, 0x80, 0x04, 0x0f, 0x80, - 0x80, 0x00, 0x0b, 0x80, 0x01, 0x0c, 0x80, 0x02, - 0x0d, 0x80, 0x03, 0x0e, 0x80, 0x04, 0x0f, 0x80}; - -static const ulvec8 kSHUF2 = {0x0a, 0x80, 0x00, 0x0b, 0x80, 0x01, 0x0c, 0x80, - 0x02, 0x0d, 0x80, 0x03, 0x0e, 0x80, 0x04, 0x0f, - 0x0a, 0x80, 0x00, 0x0b, 0x80, 0x01, 0x0c, 0x80, - 0x02, 0x0d, 0x80, 0x03, 0x0e, 0x80, 0x04, 0x0f}; - -static const ulvec8 kSHUF3 = {0x80, 0x80, 0x06, 0x80, 0x80, 0x07, 0x80, 0x80, - 0x08, 0x80, 0x80, 0x09, 0x80, 0x80, 0x0a, 0x80, - 0x80, 0x80, 0x06, 0x80, 0x80, 0x07, 0x80, 0x80, - 0x08, 0x80, 0x80, 0x09, 0x80, 0x80, 0x0a, 0x80}; - -static const ulvec8 kSHUF4 = {0x05, 0x80, 0x80, 0x06, 0x80, 0x80, 0x07, 0x80, - 0x80, 0x08, 0x80, 0x80, 0x09, 0x80, 0x80, 0x0a, - 0x05, 0x80, 0x80, 0x06, 0x80, 0x80, 0x07, 0x80, - 0x80, 0x08, 0x80, 0x80, 0x09, 0x80, 0x80, 0x0a}; - -static const ulvec8 kSHUF5 = {0x80, 0x05, 0x80, 0x80, 0x06, 0x80, 0x80, 0x07, - 0x80, 0x80, 0x08, 0x80, 0x80, 0x09, 0x80, 0x80, - 0x80, 0x05, 0x80, 0x80, 0x06, 0x80, 0x80, 0x07, - 0x80, 0x80, 0x08, 0x80, 0x80, 0x09, 0x80, 0x80}; - -// NV21ToYUV24Row_AVX2 +static const uvec8 kYUV24Shuffle[3] = { + {8, 9, 0, 8, 9, 1, 10, 11, 2, 10, 11, 3, 12, 13, 4, 12}, + {9, 1, 10, 11, 2, 10, 11, 3, 12, 13, 4, 12, 13, 5, 14, 15}, + {2, 10, 11, 3, 12, 13, 4, 12, 13, 5, 14, 15, 6, 14, 15, 7}}; + +// Convert biplanar NV21 to packed YUV24 +// NV21 has VU in memory for chroma. +// YUV24 is VUY in memory +void NV21ToYUV24Row_SSSE3(const uint8_t* src_y, + const uint8_t* src_vu, + uint8_t* dst_yuv24, + int width) { + asm volatile( + "sub %0,%1 \n" + "movdqa (%4),%%xmm4 \n" // 3 shuffler constants + "movdqa 16(%4),%%xmm5 \n" + "movdqa 32(%4),%%xmm6 \n" + "1: \n" + "movdqu (%0),%%xmm2 \n" // load 16 Y values + "movdqu (%0,%1),%%xmm3 \n" // load 8 VU values + "lea 16(%0),%0 \n" + "movdqa %%xmm2,%%xmm0 \n" + "movdqa %%xmm2,%%xmm1 \n" + "shufps $0x44,%%xmm3,%%xmm0 \n" // Y 0..7, UV 0..3 + "shufps $0x99,%%xmm3,%%xmm1 \n" // Y 4..11, UV 2..5 + "shufps $0xee,%%xmm3,%%xmm2 \n" // Y 8..15, UV 4..7 + "pshufb %%xmm4, %%xmm0 \n" // weave into YUV24 + "pshufb %%xmm5, %%xmm1 \n" + "pshufb %%xmm6, %%xmm2 \n" + "movdqu %%xmm0,(%2) \n" + "movdqu %%xmm1,16(%2) \n" + "movdqu %%xmm2,32(%2) \n" + "lea 48(%2),%2 \n" + "sub $16,%3 \n" // 16 pixels per loop + "jg 1b \n" + : "+r"(src_y), // %0 + "+r"(src_vu), // %1 + "+r"(dst_yuv24), // %2 + "+r"(width) // %3 + : "r"(&kYUV24Shuffle[0]) // %4 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6"); +} + +// Convert biplanar NV21 to packed YUV24 +// NV21 has VU in memory for chroma. +// YUV24 is VUY in memory void NV21ToYUV24Row_AVX2(const uint8_t* src_y, const uint8_t* src_vu, uint8_t* dst_yuv24, int width) { - uint8_t* src_y_ptr; - uint64_t src_offset = 0; - uint64_t width64; - - width64 = width; - src_y_ptr = (uint8_t*)src_y; - - asm volatile( - "vmovdqu %5, %%ymm0 \n" // init blend value - "vmovdqu %6, %%ymm1 \n" // init blend value - "vmovdqu %7, %%ymm2 \n" // init blend value - // "sub $0x20, %3 \n" //sub 32 from - // width for final loop - - LABELALIGN - "1: \n" // label 1 - "vmovdqu (%0,%4), %%ymm3 \n" // src_y - "vmovdqu 1(%1,%4), %%ymm4 \n" // src_uv+1 - "vmovdqu (%1), %%ymm5 \n" // src_uv - "vpshufb %8, %%ymm3, %%ymm13 \n" // y, kSHUF0 for shuf - "vpshufb %9, %%ymm4, %%ymm14 \n" // uv+1, kSHUF1 for - // shuf - "vpshufb %10, %%ymm5, %%ymm15 \n" // uv, kSHUF2 for - // shuf - "vpshufb %11, %%ymm3, %%ymm3 \n" // y kSHUF3 for shuf - "vpshufb %12, %%ymm4, %%ymm4 \n" // uv+1 kSHUF4 for - // shuf - "vpblendvb %%ymm0, %%ymm14, %%ymm13, %%ymm12 \n" // blend 0 - "vpblendvb %%ymm0, %%ymm13, %%ymm14, %%ymm14 \n" // blend 0 - "vpblendvb %%ymm2, %%ymm15, %%ymm12, %%ymm12 \n" // blend 2 - "vpblendvb %%ymm1, %%ymm15, %%ymm14, %%ymm13 \n" // blend 1 - "vpshufb %13, %%ymm5, %%ymm15 \n" // shuffle const - "vpor %%ymm4, %%ymm3, %%ymm5 \n" // get results - "vmovdqu %%ymm12, 0x20(%2) \n" // store dst_yuv+20h - "vpor %%ymm15, %%ymm5, %%ymm3 \n" // get results - "add $0x20, %4 \n" // add to src buffer - // ptr - "vinserti128 $0x1, %%xmm3, %%ymm13, %%ymm4 \n" // insert - "vperm2i128 $0x31, %%ymm13, %%ymm3, %%ymm5 \n" // insert - "vmovdqu %%ymm4, (%2) \n" // store dst_yuv - "vmovdqu %%ymm5, 0x40(%2) \n" // store dst_yuv+40h - "add $0x60,%2 \n" // add to dst buffer - // ptr - // "cmp %3, %4 \n" //(width64 - - // 32 bytes) and src_offset - "sub $0x20,%3 \n" // 32 pixels per loop - "jg 1b \n" - "vzeroupper \n" // sse-avx2 - // transistions - - : "+r"(src_y), //%0 - "+r"(src_vu), //%1 - "+r"(dst_yuv24), //%2 - "+r"(width64), //%3 - "+r"(src_offset) //%4 - : "m"(kBLEND0), //%5 - "m"(kBLEND1), //%6 - "m"(kBLEND2), //%7 - "m"(kSHUF0), //%8 - "m"(kSHUF1), //%9 - "m"(kSHUF2), //%10 - "m"(kSHUF3), //%11 - "m"(kSHUF4), //%12 - "m"(kSHUF5) //%13 - : "memory", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm12", - "xmm13", "xmm14", "xmm15"); -} -#endif // HAS_NV21TOYUV24ROW_AVX2 + asm volatile( + "sub %0,%1 \n" + "vbroadcastf128 (%4),%%ymm4 \n" // 3 shuffler constants + "vbroadcastf128 16(%4),%%ymm5 \n" + "vbroadcastf128 32(%4),%%ymm6 \n" + + "1: \n" + "vmovdqu (%0),%%ymm2 \n" // load 32 Y values + "vmovdqu (%0,%1),%%ymm3 \n" // load 16 VU values + "lea 32(%0),%0 \n" + "vshufps $0x44,%%ymm3,%%ymm2,%%ymm0 \n" // Y 0..7, UV 0..3 + "vshufps $0x99,%%ymm3,%%ymm2,%%ymm1 \n" // Y 4..11, UV 2..5 + "vshufps $0xee,%%ymm3,%%ymm2,%%ymm2 \n" // Y 8..15, UV 4..7 + "vpshufb %%ymm4,%%ymm0,%%ymm0 \n" // weave into YUV24 + "vpshufb %%ymm5,%%ymm1,%%ymm1 \n" + "vpshufb %%ymm6,%%ymm2,%%ymm2 \n" + "vperm2i128 $0x20,%%ymm1,%%ymm0,%%ymm3 \n" + "vperm2i128 $0x30,%%ymm0,%%ymm2,%%ymm0 \n" + "vperm2i128 $0x31,%%ymm2,%%ymm1,%%ymm1 \n" + "vmovdqu %%ymm3,(%2) \n" + "vmovdqu %%ymm0,32(%2) \n" + "vmovdqu %%ymm1,64(%2) \n" + "lea 96(%2),%2 \n" + "sub $32,%3 \n" // 32 pixels per loop + "jg 1b \n" + "vzeroupper \n" + : "+r"(src_y), // %0 + "+r"(src_vu), // %1 + "+r"(dst_yuv24), // %2 + "+r"(width) // %3 + : "r"(&kYUV24Shuffle[0]) // %4 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6"); +} + +#ifdef HAS_NV21ToYUV24ROW_AVX512 +// The following VMBI VEX256 code tests okay with the intelsde emulator. +static const lvec8 kYUV24Perm[3] = { + {32, 33, 0, 32, 33, 1, 34, 35, 2, 34, 35, 3, 36, 37, 4, 36, + 37, 5, 38, 39, 6, 38, 39, 7, 40, 41, 8, 40, 41, 9, 42, 43}, + {10, 42, 43, 11, 44, 45, 12, 44, 45, 13, 46, 47, 14, 46, 47, 15, + 48, 49, 16, 48, 49, 17, 50, 51, 18, 50, 51, 19, 52, 53, 20, 52}, + {53, 21, 54, 55, 22, 54, 55, 23, 56, 57, 24, 56, 57, 25, 58, 59, + 26, 58, 59, 27, 60, 61, 28, 60, 61, 29, 62, 63, 30, 62, 63, 31}}; + +void NV21ToYUV24Row_AVX512(const uint8_t* src_y, + const uint8_t* src_vu, + uint8_t* dst_yuv24, + int width) { + asm volatile( + "sub %0,%1 \n" + "vmovdqa (%4),%%ymm4 \n" // 3 shuffler constants + "vmovdqa 32(%4),%%ymm5 \n" + "vmovdqa 64(%4),%%ymm6 \n" LABELALIGN + "1: \n" + "vmovdqu (%0),%%ymm2 \n" // load 32 Y values + "vmovdqu (%0,%1),%%ymm3 \n" // load 16 VU values + "lea 32(%0),%0 \n" + "vmovdqa %%ymm2, %%ymm0 \n" + "vmovdqa %%ymm2, %%ymm1 \n" + "vpermt2b %%ymm3,%%ymm4,%%ymm0 \n" + "vpermt2b %%ymm3,%%ymm5,%%ymm1 \n" + "vpermt2b %%ymm3,%%ymm6,%%ymm2 \n" + "vmovdqu %%ymm0,(%2) \n" + "vmovdqu %%ymm1,32(%2) \n" + "vmovdqu %%ymm2,64(%2) \n" + "lea 96(%2),%2 \n" + "sub $32,%3 \n" + "jg 1b \n" + "vzeroupper \n" + : "+r"(src_y), // %0 + "+r"(src_vu), // %1 + "+r"(dst_yuv24), // %2 + "+r"(width) // %3 + : "r"(&kYUV24Perm[0]) // %4 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6"); +} + +#endif // HAS_NV21ToYUV24ROW_AVX512 #ifdef HAS_SWAPUVROW_SSSE3 diff --git a/TMessagesProj/jni/third_party/libyuv/source/row_lasx.cc b/TMessagesProj/jni/third_party/libyuv/source/row_lasx.cc new file mode 100644 index 0000000000..f824906d3f --- /dev/null +++ b/TMessagesProj/jni/third_party/libyuv/source/row_lasx.cc @@ -0,0 +1,2302 @@ +/* + * Copyright 2022 The LibYuv Project Authors. All rights reserved. + * + * Copyright (c) 2022 Loongson Technology Corporation Limited + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "libyuv/row.h" + +#if !defined(LIBYUV_DISABLE_LASX) && defined(__loongarch_asx) +#include "libyuv/loongson_intrinsics.h" + +#ifdef __cplusplus +namespace libyuv { +extern "C" { +#endif + +#define ALPHA_VAL (-1) + +// Fill YUV -> RGB conversion constants into vectors +#define YUVTORGB_SETUP(yuvconst, ub, vr, ug, vg, yg, yb) \ + { \ + ub = __lasx_xvreplgr2vr_h(yuvconst->kUVToB[0]); \ + vr = __lasx_xvreplgr2vr_h(yuvconst->kUVToR[1]); \ + ug = __lasx_xvreplgr2vr_h(yuvconst->kUVToG[0]); \ + vg = __lasx_xvreplgr2vr_h(yuvconst->kUVToG[1]); \ + yg = __lasx_xvreplgr2vr_h(yuvconst->kYToRgb[0]); \ + yb = __lasx_xvreplgr2vr_w(yuvconst->kYBiasToRgb[0]); \ + } + +// Load 32 YUV422 pixel data +#define READYUV422_D(psrc_y, psrc_u, psrc_v, out_y, uv_l, uv_h) \ + { \ + __m256i temp0, temp1; \ + \ + DUP2_ARG2(__lasx_xvld, psrc_y, 0, psrc_u, 0, out_y, temp0); \ + temp1 = __lasx_xvld(psrc_v, 0); \ + temp0 = __lasx_xvsub_b(temp0, const_0x80); \ + temp1 = __lasx_xvsub_b(temp1, const_0x80); \ + temp0 = __lasx_vext2xv_h_b(temp0); \ + temp1 = __lasx_vext2xv_h_b(temp1); \ + uv_l = __lasx_xvilvl_h(temp0, temp1); \ + uv_h = __lasx_xvilvh_h(temp0, temp1); \ + } + +// Load 16 YUV422 pixel data +#define READYUV422(psrc_y, psrc_u, psrc_v, out_y, uv) \ + { \ + __m256i temp0, temp1; \ + \ + out_y = __lasx_xvld(psrc_y, 0); \ + temp0 = __lasx_xvldrepl_d(psrc_u, 0); \ + temp1 = __lasx_xvldrepl_d(psrc_v, 0); \ + uv = __lasx_xvilvl_b(temp0, temp1); \ + uv = __lasx_xvsub_b(uv, const_0x80); \ + uv = __lasx_vext2xv_h_b(uv); \ + } + +// Convert 16 pixels of YUV420 to RGB. +#define YUVTORGB_D(in_y, in_uvl, in_uvh, ubvr, ugvg, yg, yb, b_l, b_h, g_l, \ + g_h, r_l, r_h) \ + { \ + __m256i u_l, u_h, v_l, v_h; \ + __m256i yl_ev, yl_od, yh_ev, yh_od; \ + __m256i temp0, temp1, temp2, temp3; \ + \ + temp0 = __lasx_xvilvl_b(in_y, in_y); \ + temp1 = __lasx_xvilvh_b(in_y, in_y); \ + yl_ev = __lasx_xvmulwev_w_hu_h(temp0, yg); \ + yl_od = __lasx_xvmulwod_w_hu_h(temp0, yg); \ + yh_ev = __lasx_xvmulwev_w_hu_h(temp1, yg); \ + yh_od = __lasx_xvmulwod_w_hu_h(temp1, yg); \ + DUP4_ARG2(__lasx_xvsrai_w, yl_ev, 16, yl_od, 16, yh_ev, 16, yh_od, 16, \ + yl_ev, yl_od, yh_ev, yh_od); \ + yl_ev = __lasx_xvadd_w(yl_ev, yb); \ + yl_od = __lasx_xvadd_w(yl_od, yb); \ + yh_ev = __lasx_xvadd_w(yh_ev, yb); \ + yh_od = __lasx_xvadd_w(yh_od, yb); \ + v_l = __lasx_xvmulwev_w_h(in_uvl, ubvr); \ + u_l = __lasx_xvmulwod_w_h(in_uvl, ubvr); \ + v_h = __lasx_xvmulwev_w_h(in_uvh, ubvr); \ + u_h = __lasx_xvmulwod_w_h(in_uvh, ubvr); \ + temp0 = __lasx_xvadd_w(yl_ev, u_l); \ + temp1 = __lasx_xvadd_w(yl_od, u_l); \ + temp2 = __lasx_xvadd_w(yh_ev, u_h); \ + temp3 = __lasx_xvadd_w(yh_od, u_h); \ + DUP4_ARG2(__lasx_xvsrai_w, temp0, 6, temp1, 6, temp2, 6, temp3, 6, temp0, \ + temp1, temp2, temp3); \ + DUP4_ARG1(__lasx_xvclip255_w, temp0, temp1, temp2, temp3, temp0, temp1, \ + temp2, temp3); \ + b_l = __lasx_xvpackev_h(temp1, temp0); \ + b_h = __lasx_xvpackev_h(temp3, temp2); \ + temp0 = __lasx_xvadd_w(yl_ev, v_l); \ + temp1 = __lasx_xvadd_w(yl_od, v_l); \ + temp2 = __lasx_xvadd_w(yh_ev, v_h); \ + temp3 = __lasx_xvadd_w(yh_od, v_h); \ + DUP4_ARG2(__lasx_xvsrai_w, temp0, 6, temp1, 6, temp2, 6, temp3, 6, temp0, \ + temp1, temp2, temp3); \ + DUP4_ARG1(__lasx_xvclip255_w, temp0, temp1, temp2, temp3, temp0, temp1, \ + temp2, temp3); \ + r_l = __lasx_xvpackev_h(temp1, temp0); \ + r_h = __lasx_xvpackev_h(temp3, temp2); \ + DUP2_ARG2(__lasx_xvdp2_w_h, in_uvl, ugvg, in_uvh, ugvg, u_l, u_h); \ + temp0 = __lasx_xvsub_w(yl_ev, u_l); \ + temp1 = __lasx_xvsub_w(yl_od, u_l); \ + temp2 = __lasx_xvsub_w(yh_ev, u_h); \ + temp3 = __lasx_xvsub_w(yh_od, u_h); \ + DUP4_ARG2(__lasx_xvsrai_w, temp0, 6, temp1, 6, temp2, 6, temp3, 6, temp0, \ + temp1, temp2, temp3); \ + DUP4_ARG1(__lasx_xvclip255_w, temp0, temp1, temp2, temp3, temp0, temp1, \ + temp2, temp3); \ + g_l = __lasx_xvpackev_h(temp1, temp0); \ + g_h = __lasx_xvpackev_h(temp3, temp2); \ + } + +// Convert 8 pixels of YUV420 to RGB. +#define YUVTORGB(in_y, in_uv, ubvr, ugvg, yg, yb, out_b, out_g, out_r) \ + { \ + __m256i u_l, v_l, yl_ev, yl_od; \ + __m256i temp0, temp1; \ + \ + in_y = __lasx_xvpermi_d(in_y, 0xD8); \ + temp0 = __lasx_xvilvl_b(in_y, in_y); \ + yl_ev = __lasx_xvmulwev_w_hu_h(temp0, yg); \ + yl_od = __lasx_xvmulwod_w_hu_h(temp0, yg); \ + DUP2_ARG2(__lasx_xvsrai_w, yl_ev, 16, yl_od, 16, yl_ev, yl_od); \ + yl_ev = __lasx_xvadd_w(yl_ev, yb); \ + yl_od = __lasx_xvadd_w(yl_od, yb); \ + v_l = __lasx_xvmulwev_w_h(in_uv, ubvr); \ + u_l = __lasx_xvmulwod_w_h(in_uv, ubvr); \ + temp0 = __lasx_xvadd_w(yl_ev, u_l); \ + temp1 = __lasx_xvadd_w(yl_od, u_l); \ + DUP2_ARG2(__lasx_xvsrai_w, temp0, 6, temp1, 6, temp0, temp1); \ + DUP2_ARG1(__lasx_xvclip255_w, temp0, temp1, temp0, temp1); \ + out_b = __lasx_xvpackev_h(temp1, temp0); \ + temp0 = __lasx_xvadd_w(yl_ev, v_l); \ + temp1 = __lasx_xvadd_w(yl_od, v_l); \ + DUP2_ARG2(__lasx_xvsrai_w, temp0, 6, temp1, 6, temp0, temp1); \ + DUP2_ARG1(__lasx_xvclip255_w, temp0, temp1, temp0, temp1); \ + out_r = __lasx_xvpackev_h(temp1, temp0); \ + u_l = __lasx_xvdp2_w_h(in_uv, ugvg); \ + temp0 = __lasx_xvsub_w(yl_ev, u_l); \ + temp1 = __lasx_xvsub_w(yl_od, u_l); \ + DUP2_ARG2(__lasx_xvsrai_w, temp0, 6, temp1, 6, temp0, temp1); \ + DUP2_ARG1(__lasx_xvclip255_w, temp0, temp1, temp0, temp1); \ + out_g = __lasx_xvpackev_h(temp1, temp0); \ + } + +// Pack and Store 16 ARGB values. +#define STOREARGB_D(a_l, a_h, r_l, r_h, g_l, g_h, b_l, b_h, pdst_argb) \ + { \ + __m256i temp0, temp1, temp2, temp3; \ + \ + temp0 = __lasx_xvpackev_b(g_l, b_l); \ + temp1 = __lasx_xvpackev_b(a_l, r_l); \ + temp2 = __lasx_xvpackev_b(g_h, b_h); \ + temp3 = __lasx_xvpackev_b(a_h, r_h); \ + r_l = __lasx_xvilvl_h(temp1, temp0); \ + r_h = __lasx_xvilvh_h(temp1, temp0); \ + g_l = __lasx_xvilvl_h(temp3, temp2); \ + g_h = __lasx_xvilvh_h(temp3, temp2); \ + temp0 = __lasx_xvpermi_q(r_h, r_l, 0x20); \ + temp1 = __lasx_xvpermi_q(g_h, g_l, 0x20); \ + temp2 = __lasx_xvpermi_q(r_h, r_l, 0x31); \ + temp3 = __lasx_xvpermi_q(g_h, g_l, 0x31); \ + __lasx_xvst(temp0, pdst_argb, 0); \ + __lasx_xvst(temp1, pdst_argb, 32); \ + __lasx_xvst(temp2, pdst_argb, 64); \ + __lasx_xvst(temp3, pdst_argb, 96); \ + pdst_argb += 128; \ + } + +// Pack and Store 8 ARGB values. +#define STOREARGB(in_a, in_r, in_g, in_b, pdst_argb) \ + { \ + __m256i temp0, temp1, temp2, temp3; \ + \ + temp0 = __lasx_xvpackev_b(in_g, in_b); \ + temp1 = __lasx_xvpackev_b(in_a, in_r); \ + temp2 = __lasx_xvilvl_h(temp1, temp0); \ + temp3 = __lasx_xvilvh_h(temp1, temp0); \ + temp0 = __lasx_xvpermi_q(temp3, temp2, 0x20); \ + temp1 = __lasx_xvpermi_q(temp3, temp2, 0x31); \ + __lasx_xvst(temp0, pdst_argb, 0); \ + __lasx_xvst(temp1, pdst_argb, 32); \ + pdst_argb += 64; \ + } + +#define RGBTOUV(_tmpb, _tmpg, _tmpr, _nexb, _nexg, _nexr, _reg0, _reg1) \ + { \ + __m256i _tmp0, _tmp1, _tmp2, _tmp3; \ + _tmp0 = __lasx_xvaddwev_h_bu(_tmpb, _nexb); \ + _tmp1 = __lasx_xvaddwod_h_bu(_tmpb, _nexb); \ + _tmp2 = __lasx_xvaddwev_h_bu(_tmpg, _nexg); \ + _tmp3 = __lasx_xvaddwod_h_bu(_tmpg, _nexg); \ + _reg0 = __lasx_xvaddwev_h_bu(_tmpr, _nexr); \ + _reg1 = __lasx_xvaddwod_h_bu(_tmpr, _nexr); \ + _tmpb = __lasx_xvavgr_hu(_tmp0, _tmp1); \ + _tmpg = __lasx_xvavgr_hu(_tmp2, _tmp3); \ + _tmpr = __lasx_xvavgr_hu(_reg0, _reg1); \ + _reg0 = __lasx_xvmadd_h(const_8080, const_112, _tmpb); \ + _reg1 = __lasx_xvmadd_h(const_8080, const_112, _tmpr); \ + _reg0 = __lasx_xvmsub_h(_reg0, const_74, _tmpg); \ + _reg1 = __lasx_xvmsub_h(_reg1, const_94, _tmpg); \ + _reg0 = __lasx_xvmsub_h(_reg0, const_38, _tmpr); \ + _reg1 = __lasx_xvmsub_h(_reg1, const_18, _tmpb); \ + } + +void MirrorRow_LASX(const uint8_t* src, uint8_t* dst, int width) { + int x; + int len = width / 64; + __m256i src0, src1; + __m256i shuffler = {0x08090A0B0C0D0E0F, 0x0001020304050607, + 0x08090A0B0C0D0E0F, 0x0001020304050607}; + src += width - 64; + for (x = 0; x < len; x++) { + DUP2_ARG2(__lasx_xvld, src, 0, src, 32, src0, src1); + DUP2_ARG3(__lasx_xvshuf_b, src0, src0, shuffler, src1, src1, shuffler, src0, + src1); + src0 = __lasx_xvpermi_q(src0, src0, 0x01); + src1 = __lasx_xvpermi_q(src1, src1, 0x01); + __lasx_xvst(src1, dst, 0); + __lasx_xvst(src0, dst, 32); + dst += 64; + src -= 64; + } +} + +void MirrorUVRow_LASX(const uint8_t* src_uv, uint8_t* dst_uv, int width) { + int x; + int len = width / 16; + __m256i src, dst; + __m256i shuffler = {0x0004000500060007, 0x0000000100020003, + 0x0004000500060007, 0x0000000100020003}; + + src_uv += (width - 16) << 1; + for (x = 0; x < len; x++) { + src = __lasx_xvld(src_uv, 0); + dst = __lasx_xvshuf_h(shuffler, src, src); + dst = __lasx_xvpermi_q(dst, dst, 0x01); + __lasx_xvst(dst, dst_uv, 0); + src_uv -= 32; + dst_uv += 32; + } +} + +void ARGBMirrorRow_LASX(const uint8_t* src, uint8_t* dst, int width) { + int x; + int len = width / 16; + __m256i src0, src1; + __m256i dst0, dst1; + __m256i shuffler = {0x0B0A09080F0E0D0C, 0x0302010007060504, + 0x0B0A09080F0E0D0C, 0x0302010007060504}; + src += (width * 4) - 64; + for (x = 0; x < len; x++) { + DUP2_ARG2(__lasx_xvld, src, 0, src, 32, src0, src1); + DUP2_ARG3(__lasx_xvshuf_b, src0, src0, shuffler, src1, src1, shuffler, src0, + src1); + dst1 = __lasx_xvpermi_q(src0, src0, 0x01); + dst0 = __lasx_xvpermi_q(src1, src1, 0x01); + __lasx_xvst(dst0, dst, 0); + __lasx_xvst(dst1, dst, 32); + dst += 64; + src -= 64; + } +} + +void I422ToYUY2Row_LASX(const uint8_t* src_y, + const uint8_t* src_u, + const uint8_t* src_v, + uint8_t* dst_yuy2, + int width) { + int x; + int len = width / 32; + __m256i src_u0, src_v0, src_y0, vec_uv0; + __m256i vec_yuy2_0, vec_yuy2_1; + __m256i dst_yuy2_0, dst_yuy2_1; + + for (x = 0; x < len; x++) { + DUP2_ARG2(__lasx_xvld, src_u, 0, src_v, 0, src_u0, src_v0); + src_y0 = __lasx_xvld(src_y, 0); + src_u0 = __lasx_xvpermi_d(src_u0, 0xD8); + src_v0 = __lasx_xvpermi_d(src_v0, 0xD8); + vec_uv0 = __lasx_xvilvl_b(src_v0, src_u0); + vec_yuy2_0 = __lasx_xvilvl_b(vec_uv0, src_y0); + vec_yuy2_1 = __lasx_xvilvh_b(vec_uv0, src_y0); + dst_yuy2_0 = __lasx_xvpermi_q(vec_yuy2_1, vec_yuy2_0, 0x20); + dst_yuy2_1 = __lasx_xvpermi_q(vec_yuy2_1, vec_yuy2_0, 0x31); + __lasx_xvst(dst_yuy2_0, dst_yuy2, 0); + __lasx_xvst(dst_yuy2_1, dst_yuy2, 32); + src_u += 16; + src_v += 16; + src_y += 32; + dst_yuy2 += 64; + } +} + +void I422ToUYVYRow_LASX(const uint8_t* src_y, + const uint8_t* src_u, + const uint8_t* src_v, + uint8_t* dst_uyvy, + int width) { + int x; + int len = width / 32; + __m256i src_u0, src_v0, src_y0, vec_uv0; + __m256i vec_uyvy0, vec_uyvy1; + __m256i dst_uyvy0, dst_uyvy1; + + for (x = 0; x < len; x++) { + DUP2_ARG2(__lasx_xvld, src_u, 0, src_v, 0, src_u0, src_v0); + src_y0 = __lasx_xvld(src_y, 0); + src_u0 = __lasx_xvpermi_d(src_u0, 0xD8); + src_v0 = __lasx_xvpermi_d(src_v0, 0xD8); + vec_uv0 = __lasx_xvilvl_b(src_v0, src_u0); + vec_uyvy0 = __lasx_xvilvl_b(src_y0, vec_uv0); + vec_uyvy1 = __lasx_xvilvh_b(src_y0, vec_uv0); + dst_uyvy0 = __lasx_xvpermi_q(vec_uyvy1, vec_uyvy0, 0x20); + dst_uyvy1 = __lasx_xvpermi_q(vec_uyvy1, vec_uyvy0, 0x31); + __lasx_xvst(dst_uyvy0, dst_uyvy, 0); + __lasx_xvst(dst_uyvy1, dst_uyvy, 32); + src_u += 16; + src_v += 16; + src_y += 32; + dst_uyvy += 64; + } +} + +void I422ToARGBRow_LASX(const uint8_t* src_y, + const uint8_t* src_u, + const uint8_t* src_v, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { + int x; + int len = width / 32; + __m256i vec_yb, vec_yg, vec_ub, vec_ug, vec_vr, vec_vg; + __m256i vec_ubvr, vec_ugvg; + __m256i alpha = __lasx_xvldi(0xFF); + __m256i const_0x80 = __lasx_xvldi(0x80); + + YUVTORGB_SETUP(yuvconstants, vec_ub, vec_vr, vec_ug, vec_vg, vec_yg, vec_yb); + vec_ubvr = __lasx_xvilvl_h(vec_ub, vec_vr); + vec_ugvg = __lasx_xvilvl_h(vec_ug, vec_vg); + + for (x = 0; x < len; x++) { + __m256i y, uv_l, uv_h, b_l, b_h, g_l, g_h, r_l, r_h; + + READYUV422_D(src_y, src_u, src_v, y, uv_l, uv_h); + YUVTORGB_D(y, uv_l, uv_h, vec_ubvr, vec_ugvg, vec_yg, vec_yb, b_l, b_h, g_l, + g_h, r_l, r_h); + STOREARGB_D(alpha, alpha, r_l, r_h, g_l, g_h, b_l, b_h, dst_argb); + src_y += 32; + src_u += 16; + src_v += 16; + } +} + +void I422ToRGBARow_LASX(const uint8_t* src_y, + const uint8_t* src_u, + const uint8_t* src_v, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { + int x; + int len = width / 32; + __m256i vec_yb, vec_yg, vec_ub, vec_vr, vec_ug, vec_vg; + __m256i vec_ubvr, vec_ugvg; + __m256i alpha = __lasx_xvldi(0xFF); + __m256i const_0x80 = __lasx_xvldi(0x80); + + YUVTORGB_SETUP(yuvconstants, vec_ub, vec_vr, vec_ug, vec_vg, vec_yg, vec_yb); + vec_ubvr = __lasx_xvilvl_h(vec_ub, vec_vr); + vec_ugvg = __lasx_xvilvl_h(vec_ug, vec_vg); + + for (x = 0; x < len; x++) { + __m256i y, uv_l, uv_h, b_l, b_h, g_l, g_h, r_l, r_h; + + READYUV422_D(src_y, src_u, src_v, y, uv_l, uv_h); + YUVTORGB_D(y, uv_l, uv_h, vec_ubvr, vec_ugvg, vec_yg, vec_yb, b_l, b_h, g_l, + g_h, r_l, r_h); + STOREARGB_D(r_l, r_h, g_l, g_h, b_l, b_h, alpha, alpha, dst_argb); + src_y += 32; + src_u += 16; + src_v += 16; + } +} + +void I422AlphaToARGBRow_LASX(const uint8_t* src_y, + const uint8_t* src_u, + const uint8_t* src_v, + const uint8_t* src_a, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { + int x; + int len = width / 32; + int res = width & 31; + __m256i vec_yb, vec_yg, vec_ub, vec_vr, vec_ug, vec_vg; + __m256i vec_ubvr, vec_ugvg; + __m256i zero = __lasx_xvldi(0); + __m256i const_0x80 = __lasx_xvldi(0x80); + + YUVTORGB_SETUP(yuvconstants, vec_ub, vec_vr, vec_ug, vec_vg, vec_yg, vec_yb); + vec_ubvr = __lasx_xvilvl_h(vec_ub, vec_vr); + vec_ugvg = __lasx_xvilvl_h(vec_ug, vec_vg); + + for (x = 0; x < len; x++) { + __m256i y, uv_l, uv_h, b_l, b_h, g_l, g_h, r_l, r_h, a_l, a_h; + + y = __lasx_xvld(src_a, 0); + a_l = __lasx_xvilvl_b(zero, y); + a_h = __lasx_xvilvh_b(zero, y); + READYUV422_D(src_y, src_u, src_v, y, uv_l, uv_h); + YUVTORGB_D(y, uv_l, uv_h, vec_ubvr, vec_ugvg, vec_yg, vec_yb, b_l, b_h, g_l, + g_h, r_l, r_h); + STOREARGB_D(a_l, a_h, r_l, r_h, g_l, g_h, b_l, b_h, dst_argb); + src_y += 32; + src_u += 16; + src_v += 16; + src_a += 32; + } + if (res) { + __m256i y, uv, r, g, b, a; + a = __lasx_xvld(src_a, 0); + a = __lasx_vext2xv_hu_bu(a); + READYUV422(src_y, src_u, src_v, y, uv); + YUVTORGB(y, uv, vec_ubvr, vec_ugvg, vec_yg, vec_yb, b, g, r); + STOREARGB(a, r, g, b, dst_argb); + } +} + +void I422ToRGB24Row_LASX(const uint8_t* src_y, + const uint8_t* src_u, + const uint8_t* src_v, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int32_t width) { + int x; + int len = width / 32; + __m256i vec_yb, vec_yg, vec_ub, vec_vr, vec_ug, vec_vg; + __m256i vec_ubvr, vec_ugvg; + __m256i const_0x80 = __lasx_xvldi(0x80); + __m256i shuffler0 = {0x0504120302100100, 0x0A18090816070614, + 0x0504120302100100, 0x0A18090816070614}; + __m256i shuffler1 = {0x1E0F0E1C0D0C1A0B, 0x1E0F0E1C0D0C1A0B, + 0x1E0F0E1C0D0C1A0B, 0x1E0F0E1C0D0C1A0B}; + + YUVTORGB_SETUP(yuvconstants, vec_ub, vec_vr, vec_ug, vec_vg, vec_yg, vec_yb); + vec_ubvr = __lasx_xvilvl_h(vec_ub, vec_vr); + vec_ugvg = __lasx_xvilvl_h(vec_ug, vec_vg); + + for (x = 0; x < len; x++) { + __m256i y, uv_l, uv_h, b_l, b_h, g_l, g_h, r_l, r_h; + __m256i temp0, temp1, temp2, temp3; + + READYUV422_D(src_y, src_u, src_v, y, uv_l, uv_h); + YUVTORGB_D(y, uv_l, uv_h, vec_ubvr, vec_ugvg, vec_yg, vec_yb, b_l, b_h, g_l, + g_h, r_l, r_h); + temp0 = __lasx_xvpackev_b(g_l, b_l); + temp1 = __lasx_xvpackev_b(g_h, b_h); + DUP4_ARG3(__lasx_xvshuf_b, r_l, temp0, shuffler1, r_h, temp1, shuffler1, + r_l, temp0, shuffler0, r_h, temp1, shuffler0, temp2, temp3, temp0, + temp1); + + b_l = __lasx_xvilvl_d(temp1, temp2); + b_h = __lasx_xvilvh_d(temp3, temp1); + temp1 = __lasx_xvpermi_q(b_l, temp0, 0x20); + temp2 = __lasx_xvpermi_q(temp0, b_h, 0x30); + temp3 = __lasx_xvpermi_q(b_h, b_l, 0x31); + __lasx_xvst(temp1, dst_argb, 0); + __lasx_xvst(temp2, dst_argb, 32); + __lasx_xvst(temp3, dst_argb, 64); + dst_argb += 96; + src_y += 32; + src_u += 16; + src_v += 16; + } +} + +// TODO(fbarchard): Consider AND instead of shift to isolate 5 upper bits of R. +void I422ToRGB565Row_LASX(const uint8_t* src_y, + const uint8_t* src_u, + const uint8_t* src_v, + uint8_t* dst_rgb565, + const struct YuvConstants* yuvconstants, + int width) { + int x; + int len = width / 32; + __m256i vec_yb, vec_yg, vec_ub, vec_vr, vec_ug, vec_vg; + __m256i vec_ubvr, vec_ugvg; + __m256i const_0x80 = __lasx_xvldi(0x80); + + YUVTORGB_SETUP(yuvconstants, vec_ub, vec_vr, vec_ug, vec_vg, vec_yg, vec_yb); + vec_ubvr = __lasx_xvilvl_h(vec_ub, vec_vr); + vec_ugvg = __lasx_xvilvl_h(vec_ug, vec_vg); + + for (x = 0; x < len; x++) { + __m256i y, uv_l, uv_h, b_l, b_h, g_l, g_h, r_l, r_h; + __m256i dst_l, dst_h; + + READYUV422_D(src_y, src_u, src_v, y, uv_l, uv_h); + YUVTORGB_D(y, uv_l, uv_h, vec_ubvr, vec_ugvg, vec_yg, vec_yb, b_l, b_h, g_l, + g_h, r_l, r_h); + b_l = __lasx_xvsrli_h(b_l, 3); + b_h = __lasx_xvsrli_h(b_h, 3); + g_l = __lasx_xvsrli_h(g_l, 2); + g_h = __lasx_xvsrli_h(g_h, 2); + r_l = __lasx_xvsrli_h(r_l, 3); + r_h = __lasx_xvsrli_h(r_h, 3); + r_l = __lasx_xvslli_h(r_l, 11); + r_h = __lasx_xvslli_h(r_h, 11); + g_l = __lasx_xvslli_h(g_l, 5); + g_h = __lasx_xvslli_h(g_h, 5); + r_l = __lasx_xvor_v(r_l, g_l); + r_l = __lasx_xvor_v(r_l, b_l); + r_h = __lasx_xvor_v(r_h, g_h); + r_h = __lasx_xvor_v(r_h, b_h); + dst_l = __lasx_xvpermi_q(r_h, r_l, 0x20); + dst_h = __lasx_xvpermi_q(r_h, r_l, 0x31); + __lasx_xvst(dst_l, dst_rgb565, 0); + __lasx_xvst(dst_h, dst_rgb565, 32); + dst_rgb565 += 64; + src_y += 32; + src_u += 16; + src_v += 16; + } +} + +// TODO(fbarchard): Consider AND instead of shift to isolate 4 upper bits of G. +void I422ToARGB4444Row_LASX(const uint8_t* src_y, + const uint8_t* src_u, + const uint8_t* src_v, + uint8_t* dst_argb4444, + const struct YuvConstants* yuvconstants, + int width) { + int x; + int len = width / 32; + __m256i vec_yb, vec_yg, vec_ub, vec_vr, vec_ug, vec_vg; + __m256i vec_ubvr, vec_ugvg; + __m256i const_0x80 = __lasx_xvldi(0x80); + __m256i alpha = {0xF000F000F000F000, 0xF000F000F000F000, 0xF000F000F000F000, + 0xF000F000F000F000}; + __m256i mask = {0x00F000F000F000F0, 0x00F000F000F000F0, 0x00F000F000F000F0, + 0x00F000F000F000F0}; + + YUVTORGB_SETUP(yuvconstants, vec_ub, vec_vr, vec_ug, vec_vg, vec_yg, vec_yb); + vec_ubvr = __lasx_xvilvl_h(vec_ub, vec_vr); + vec_ugvg = __lasx_xvilvl_h(vec_ug, vec_vg); + + for (x = 0; x < len; x++) { + __m256i y, uv_l, uv_h, b_l, b_h, g_l, g_h, r_l, r_h; + __m256i dst_l, dst_h; + + READYUV422_D(src_y, src_u, src_v, y, uv_l, uv_h); + YUVTORGB_D(y, uv_l, uv_h, vec_ubvr, vec_ugvg, vec_yg, vec_yb, b_l, b_h, g_l, + g_h, r_l, r_h); + b_l = __lasx_xvsrli_h(b_l, 4); + b_h = __lasx_xvsrli_h(b_h, 4); + r_l = __lasx_xvsrli_h(r_l, 4); + r_h = __lasx_xvsrli_h(r_h, 4); + g_l = __lasx_xvand_v(g_l, mask); + g_h = __lasx_xvand_v(g_h, mask); + r_l = __lasx_xvslli_h(r_l, 8); + r_h = __lasx_xvslli_h(r_h, 8); + r_l = __lasx_xvor_v(r_l, alpha); + r_h = __lasx_xvor_v(r_h, alpha); + r_l = __lasx_xvor_v(r_l, g_l); + r_h = __lasx_xvor_v(r_h, g_h); + r_l = __lasx_xvor_v(r_l, b_l); + r_h = __lasx_xvor_v(r_h, b_h); + dst_l = __lasx_xvpermi_q(r_h, r_l, 0x20); + dst_h = __lasx_xvpermi_q(r_h, r_l, 0x31); + __lasx_xvst(dst_l, dst_argb4444, 0); + __lasx_xvst(dst_h, dst_argb4444, 32); + dst_argb4444 += 64; + src_y += 32; + src_u += 16; + src_v += 16; + } +} + +void I422ToARGB1555Row_LASX(const uint8_t* src_y, + const uint8_t* src_u, + const uint8_t* src_v, + uint8_t* dst_argb1555, + const struct YuvConstants* yuvconstants, + int width) { + int x; + int len = width / 32; + __m256i vec_yb, vec_yg, vec_ub, vec_vr, vec_ug, vec_vg; + __m256i vec_ubvr, vec_ugvg; + __m256i const_0x80 = __lasx_xvldi(0x80); + __m256i alpha = {0x8000800080008000, 0x8000800080008000, 0x8000800080008000, + 0x8000800080008000}; + + YUVTORGB_SETUP(yuvconstants, vec_ub, vec_vr, vec_ug, vec_vg, vec_yg, vec_yb); + vec_ubvr = __lasx_xvilvl_h(vec_ub, vec_vr); + vec_ugvg = __lasx_xvilvl_h(vec_ug, vec_vg); + + for (x = 0; x < len; x++) { + __m256i y, uv_l, uv_h, b_l, b_h, g_l, g_h, r_l, r_h; + __m256i dst_l, dst_h; + + READYUV422_D(src_y, src_u, src_v, y, uv_l, uv_h); + YUVTORGB_D(y, uv_l, uv_h, vec_ubvr, vec_ugvg, vec_yg, vec_yb, b_l, b_h, g_l, + g_h, r_l, r_h); + b_l = __lasx_xvsrli_h(b_l, 3); + b_h = __lasx_xvsrli_h(b_h, 3); + g_l = __lasx_xvsrli_h(g_l, 3); + g_h = __lasx_xvsrli_h(g_h, 3); + g_l = __lasx_xvslli_h(g_l, 5); + g_h = __lasx_xvslli_h(g_h, 5); + r_l = __lasx_xvsrli_h(r_l, 3); + r_h = __lasx_xvsrli_h(r_h, 3); + r_l = __lasx_xvslli_h(r_l, 10); + r_h = __lasx_xvslli_h(r_h, 10); + r_l = __lasx_xvor_v(r_l, alpha); + r_h = __lasx_xvor_v(r_h, alpha); + r_l = __lasx_xvor_v(r_l, g_l); + r_h = __lasx_xvor_v(r_h, g_h); + r_l = __lasx_xvor_v(r_l, b_l); + r_h = __lasx_xvor_v(r_h, b_h); + dst_l = __lasx_xvpermi_q(r_h, r_l, 0x20); + dst_h = __lasx_xvpermi_q(r_h, r_l, 0x31); + __lasx_xvst(dst_l, dst_argb1555, 0); + __lasx_xvst(dst_h, dst_argb1555, 32); + dst_argb1555 += 64; + src_y += 32; + src_u += 16; + src_v += 16; + } +} + +void YUY2ToYRow_LASX(const uint8_t* src_yuy2, uint8_t* dst_y, int width) { + int x; + int len = width / 32; + __m256i src0, src1, dst0; + + for (x = 0; x < len; x++) { + DUP2_ARG2(__lasx_xvld, src_yuy2, 0, src_yuy2, 32, src0, src1); + dst0 = __lasx_xvpickev_b(src1, src0); + dst0 = __lasx_xvpermi_d(dst0, 0xD8); + __lasx_xvst(dst0, dst_y, 0); + src_yuy2 += 64; + dst_y += 32; + } +} + +void YUY2ToUVRow_LASX(const uint8_t* src_yuy2, + int src_stride_yuy2, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { + const uint8_t* src_yuy2_next = src_yuy2 + src_stride_yuy2; + int x; + int len = width / 32; + __m256i src0, src1, src2, src3; + __m256i tmp0, dst0, dst1; + + for (x = 0; x < len; x++) { + DUP4_ARG2(__lasx_xvld, src_yuy2, 0, src_yuy2, 32, src_yuy2_next, 0, + src_yuy2_next, 32, src0, src1, src2, src3); + src0 = __lasx_xvpickod_b(src1, src0); + src1 = __lasx_xvpickod_b(src3, src2); + tmp0 = __lasx_xvavgr_bu(src1, src0); + tmp0 = __lasx_xvpermi_d(tmp0, 0xD8); + dst0 = __lasx_xvpickev_b(tmp0, tmp0); + dst1 = __lasx_xvpickod_b(tmp0, tmp0); + __lasx_xvstelm_d(dst0, dst_u, 0, 0); + __lasx_xvstelm_d(dst0, dst_u, 8, 2); + __lasx_xvstelm_d(dst1, dst_v, 0, 0); + __lasx_xvstelm_d(dst1, dst_v, 8, 2); + src_yuy2 += 64; + src_yuy2_next += 64; + dst_u += 16; + dst_v += 16; + } +} + +void YUY2ToUV422Row_LASX(const uint8_t* src_yuy2, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { + int x; + int len = width / 32; + __m256i src0, src1, tmp0, dst0, dst1; + + for (x = 0; x < len; x++) { + DUP2_ARG2(__lasx_xvld, src_yuy2, 0, src_yuy2, 32, src0, src1); + tmp0 = __lasx_xvpickod_b(src1, src0); + tmp0 = __lasx_xvpermi_d(tmp0, 0xD8); + dst0 = __lasx_xvpickev_b(tmp0, tmp0); + dst1 = __lasx_xvpickod_b(tmp0, tmp0); + __lasx_xvstelm_d(dst0, dst_u, 0, 0); + __lasx_xvstelm_d(dst0, dst_u, 8, 2); + __lasx_xvstelm_d(dst1, dst_v, 0, 0); + __lasx_xvstelm_d(dst1, dst_v, 8, 2); + src_yuy2 += 64; + dst_u += 16; + dst_v += 16; + } +} + +void UYVYToYRow_LASX(const uint8_t* src_uyvy, uint8_t* dst_y, int width) { + int x; + int len = width / 32; + __m256i src0, src1, dst0; + + for (x = 0; x < len; x++) { + DUP2_ARG2(__lasx_xvld, src_uyvy, 0, src_uyvy, 32, src0, src1); + dst0 = __lasx_xvpickod_b(src1, src0); + dst0 = __lasx_xvpermi_d(dst0, 0xD8); + __lasx_xvst(dst0, dst_y, 0); + src_uyvy += 64; + dst_y += 32; + } +} + +void UYVYToUVRow_LASX(const uint8_t* src_uyvy, + int src_stride_uyvy, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { + const uint8_t* src_uyvy_next = src_uyvy + src_stride_uyvy; + int x; + int len = width / 32; + __m256i src0, src1, src2, src3, tmp0, dst0, dst1; + + for (x = 0; x < len; x++) { + DUP4_ARG2(__lasx_xvld, src_uyvy, 0, src_uyvy, 32, src_uyvy_next, 0, + src_uyvy_next, 32, src0, src1, src2, src3); + src0 = __lasx_xvpickev_b(src1, src0); + src1 = __lasx_xvpickev_b(src3, src2); + tmp0 = __lasx_xvavgr_bu(src1, src0); + tmp0 = __lasx_xvpermi_d(tmp0, 0xD8); + dst0 = __lasx_xvpickev_b(tmp0, tmp0); + dst1 = __lasx_xvpickod_b(tmp0, tmp0); + __lasx_xvstelm_d(dst0, dst_u, 0, 0); + __lasx_xvstelm_d(dst0, dst_u, 8, 2); + __lasx_xvstelm_d(dst1, dst_v, 0, 0); + __lasx_xvstelm_d(dst1, dst_v, 8, 2); + src_uyvy += 64; + src_uyvy_next += 64; + dst_u += 16; + dst_v += 16; + } +} + +void UYVYToUV422Row_LASX(const uint8_t* src_uyvy, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { + int x; + int len = width / 32; + __m256i src0, src1, tmp0, dst0, dst1; + + for (x = 0; x < len; x++) { + DUP2_ARG2(__lasx_xvld, src_uyvy, 0, src_uyvy, 32, src0, src1); + tmp0 = __lasx_xvpickev_b(src1, src0); + tmp0 = __lasx_xvpermi_d(tmp0, 0xD8); + dst0 = __lasx_xvpickev_b(tmp0, tmp0); + dst1 = __lasx_xvpickod_b(tmp0, tmp0); + __lasx_xvstelm_d(dst0, dst_u, 0, 0); + __lasx_xvstelm_d(dst0, dst_u, 8, 2); + __lasx_xvstelm_d(dst1, dst_v, 0, 0); + __lasx_xvstelm_d(dst1, dst_v, 8, 2); + src_uyvy += 64; + dst_u += 16; + dst_v += 16; + } +} + +void ARGBToUVRow_LASX(const uint8_t* src_argb0, + int src_stride_argb, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { + int x; + int len = width / 32; + const uint8_t* src_argb1 = src_argb0 + src_stride_argb; + + __m256i src0, src1, src2, src3, src4, src5, src6, src7; + __m256i vec0, vec1, vec2, vec3; + __m256i tmp0, tmp1, tmp2, tmp3, tmp4, tmp5, dst0, dst1; + __m256i const_0x70 = {0x0038003800380038, 0x0038003800380038, + 0x0038003800380038, 0x0038003800380038}; + __m256i const_0x4A = {0x0025002500250025, 0x0025002500250025, + 0x0025002500250025, 0x0025002500250025}; + __m256i const_0x26 = {0x0013001300130013, 0x0013001300130013, + 0x0013001300130013, 0x0013001300130013}; + __m256i const_0x5E = {0x002f002f002f002f, 0x002f002f002f002f, + 0x002f002f002f002f, 0x002f002f002f002f}; + __m256i const_0x12 = {0x0009000900090009, 0x0009000900090009, + 0x0009000900090009, 0x0009000900090009}; + __m256i control = {0x0000000400000000, 0x0000000500000001, 0x0000000600000002, + 0x0000000700000003}; + __m256i const_0x8080 = {0x8080808080808080, 0x8080808080808080, + 0x8080808080808080, 0x8080808080808080}; + + for (x = 0; x < len; x++) { + DUP4_ARG2(__lasx_xvld, src_argb0, 0, src_argb0, 32, src_argb0, 64, + src_argb0, 96, src0, src1, src2, src3); + DUP4_ARG2(__lasx_xvld, src_argb1, 0, src_argb1, 32, src_argb1, 64, + src_argb1, 96, src4, src5, src6, src7); + vec0 = __lasx_xvaddwev_h_bu(src0, src4); + vec1 = __lasx_xvaddwev_h_bu(src1, src5); + vec2 = __lasx_xvaddwev_h_bu(src2, src6); + vec3 = __lasx_xvaddwev_h_bu(src3, src7); + tmp0 = __lasx_xvpickev_h(vec1, vec0); + tmp1 = __lasx_xvpickev_h(vec3, vec2); + tmp2 = __lasx_xvpickod_h(vec1, vec0); + tmp3 = __lasx_xvpickod_h(vec3, vec2); + vec0 = __lasx_xvaddwod_h_bu(src0, src4); + vec1 = __lasx_xvaddwod_h_bu(src1, src5); + vec2 = __lasx_xvaddwod_h_bu(src2, src6); + vec3 = __lasx_xvaddwod_h_bu(src3, src7); + tmp4 = __lasx_xvpickev_h(vec1, vec0); + tmp5 = __lasx_xvpickev_h(vec3, vec2); + vec0 = __lasx_xvpickev_h(tmp1, tmp0); + vec1 = __lasx_xvpickod_h(tmp1, tmp0); + src0 = __lasx_xvavgr_h(vec0, vec1); + vec0 = __lasx_xvpickev_h(tmp3, tmp2); + vec1 = __lasx_xvpickod_h(tmp3, tmp2); + src1 = __lasx_xvavgr_h(vec0, vec1); + vec0 = __lasx_xvpickev_h(tmp5, tmp4); + vec1 = __lasx_xvpickod_h(tmp5, tmp4); + src2 = __lasx_xvavgr_h(vec0, vec1); + dst0 = __lasx_xvmadd_h(const_0x8080, src0, const_0x70); + dst0 = __lasx_xvmsub_h(dst0, src2, const_0x4A); + dst0 = __lasx_xvmsub_h(dst0, src1, const_0x26); + dst1 = __lasx_xvmadd_h(const_0x8080, src1, const_0x70); + dst1 = __lasx_xvmsub_h(dst1, src2, const_0x5E); + dst1 = __lasx_xvmsub_h(dst1, src0, const_0x12); + dst0 = __lasx_xvperm_w(dst0, control); + dst1 = __lasx_xvperm_w(dst1, control); + dst0 = __lasx_xvssrani_b_h(dst0, dst0, 8); + dst1 = __lasx_xvssrani_b_h(dst1, dst1, 8); + __lasx_xvstelm_d(dst0, dst_u, 0, 0); + __lasx_xvstelm_d(dst0, dst_u, 8, 2); + __lasx_xvstelm_d(dst1, dst_v, 0, 0); + __lasx_xvstelm_d(dst1, dst_v, 8, 2); + src_argb0 += 128; + src_argb1 += 128; + dst_u += 16; + dst_v += 16; + } +} + +void ARGBToRGB24Row_LASX(const uint8_t* src_argb, uint8_t* dst_rgb, int width) { + int x; + int len = (width / 32) - 1; + __m256i src0, src1, src2, src3; + __m256i tmp0, tmp1, tmp2, tmp3; + __m256i shuf = {0x0908060504020100, 0x000000000E0D0C0A, 0x0908060504020100, + 0x000000000E0D0C0A}; + __m256i control = {0x0000000100000000, 0x0000000400000002, 0x0000000600000005, + 0x0000000700000003}; + for (x = 0; x < len; x++) { + DUP4_ARG2(__lasx_xvld, src_argb, 0, src_argb, 32, src_argb, 64, src_argb, + 96, src0, src1, src2, src3); + tmp0 = __lasx_xvshuf_b(src0, src0, shuf); + tmp1 = __lasx_xvshuf_b(src1, src1, shuf); + tmp2 = __lasx_xvshuf_b(src2, src2, shuf); + tmp3 = __lasx_xvshuf_b(src3, src3, shuf); + tmp0 = __lasx_xvperm_w(tmp0, control); + tmp1 = __lasx_xvperm_w(tmp1, control); + tmp2 = __lasx_xvperm_w(tmp2, control); + tmp3 = __lasx_xvperm_w(tmp3, control); + __lasx_xvst(tmp0, dst_rgb, 0); + __lasx_xvst(tmp1, dst_rgb, 24); + __lasx_xvst(tmp2, dst_rgb, 48); + __lasx_xvst(tmp3, dst_rgb, 72); + dst_rgb += 96; + src_argb += 128; + } + DUP4_ARG2(__lasx_xvld, src_argb, 0, src_argb, 32, src_argb, 64, src_argb, 96, + src0, src1, src2, src3); + tmp0 = __lasx_xvshuf_b(src0, src0, shuf); + tmp1 = __lasx_xvshuf_b(src1, src1, shuf); + tmp2 = __lasx_xvshuf_b(src2, src2, shuf); + tmp3 = __lasx_xvshuf_b(src3, src3, shuf); + tmp0 = __lasx_xvperm_w(tmp0, control); + tmp1 = __lasx_xvperm_w(tmp1, control); + tmp2 = __lasx_xvperm_w(tmp2, control); + tmp3 = __lasx_xvperm_w(tmp3, control); + __lasx_xvst(tmp0, dst_rgb, 0); + __lasx_xvst(tmp1, dst_rgb, 24); + __lasx_xvst(tmp2, dst_rgb, 48); + dst_rgb += 72; + __lasx_xvstelm_d(tmp3, dst_rgb, 0, 0); + __lasx_xvstelm_d(tmp3, dst_rgb, 8, 1); + __lasx_xvstelm_d(tmp3, dst_rgb, 16, 2); +} + +void ARGBToRAWRow_LASX(const uint8_t* src_argb, uint8_t* dst_rgb, int width) { + int x; + int len = (width / 32) - 1; + __m256i src0, src1, src2, src3; + __m256i tmp0, tmp1, tmp2, tmp3; + __m256i shuf = {0x090A040506000102, 0x000000000C0D0E08, 0x090A040506000102, + 0x000000000C0D0E08}; + __m256i control = {0x0000000100000000, 0x0000000400000002, 0x0000000600000005, + 0x0000000700000003}; + for (x = 0; x < len; x++) { + DUP4_ARG2(__lasx_xvld, src_argb, 0, src_argb, 32, src_argb, 64, src_argb, + 96, src0, src1, src2, src3); + tmp0 = __lasx_xvshuf_b(src0, src0, shuf); + tmp1 = __lasx_xvshuf_b(src1, src1, shuf); + tmp2 = __lasx_xvshuf_b(src2, src2, shuf); + tmp3 = __lasx_xvshuf_b(src3, src3, shuf); + tmp0 = __lasx_xvperm_w(tmp0, control); + tmp1 = __lasx_xvperm_w(tmp1, control); + tmp2 = __lasx_xvperm_w(tmp2, control); + tmp3 = __lasx_xvperm_w(tmp3, control); + __lasx_xvst(tmp0, dst_rgb, 0); + __lasx_xvst(tmp1, dst_rgb, 24); + __lasx_xvst(tmp2, dst_rgb, 48); + __lasx_xvst(tmp3, dst_rgb, 72); + dst_rgb += 96; + src_argb += 128; + } + DUP4_ARG2(__lasx_xvld, src_argb, 0, src_argb, 32, src_argb, 64, src_argb, 96, + src0, src1, src2, src3); + tmp0 = __lasx_xvshuf_b(src0, src0, shuf); + tmp1 = __lasx_xvshuf_b(src1, src1, shuf); + tmp2 = __lasx_xvshuf_b(src2, src2, shuf); + tmp3 = __lasx_xvshuf_b(src3, src3, shuf); + tmp0 = __lasx_xvperm_w(tmp0, control); + tmp1 = __lasx_xvperm_w(tmp1, control); + tmp2 = __lasx_xvperm_w(tmp2, control); + tmp3 = __lasx_xvperm_w(tmp3, control); + __lasx_xvst(tmp0, dst_rgb, 0); + __lasx_xvst(tmp1, dst_rgb, 24); + __lasx_xvst(tmp2, dst_rgb, 48); + dst_rgb += 72; + __lasx_xvstelm_d(tmp3, dst_rgb, 0, 0); + __lasx_xvstelm_d(tmp3, dst_rgb, 8, 1); + __lasx_xvstelm_d(tmp3, dst_rgb, 16, 2); +} + +void ARGBToRGB565Row_LASX(const uint8_t* src_argb, + uint8_t* dst_rgb, + int width) { + int x; + int len = width / 16; + __m256i zero = __lasx_xvldi(0); + __m256i src0, src1, tmp0, tmp1, dst0; + __m256i shift = {0x0300030003000300, 0x0300030003000300, 0x0300030003000300, + 0x0300030003000300}; + + for (x = 0; x < len; x++) { + DUP2_ARG2(__lasx_xvld, src_argb, 0, src_argb, 32, src0, src1); + tmp0 = __lasx_xvpickev_b(src1, src0); + tmp1 = __lasx_xvpickod_b(src1, src0); + tmp0 = __lasx_xvsrli_b(tmp0, 3); + tmp1 = __lasx_xvpackev_b(zero, tmp1); + tmp1 = __lasx_xvsrli_h(tmp1, 2); + tmp0 = __lasx_xvsll_b(tmp0, shift); + tmp1 = __lasx_xvslli_h(tmp1, 5); + dst0 = __lasx_xvor_v(tmp0, tmp1); + dst0 = __lasx_xvpermi_d(dst0, 0xD8); + __lasx_xvst(dst0, dst_rgb, 0); + dst_rgb += 32; + src_argb += 64; + } +} + +void ARGBToARGB1555Row_LASX(const uint8_t* src_argb, + uint8_t* dst_rgb, + int width) { + int x; + int len = width / 16; + __m256i zero = __lasx_xvldi(0); + __m256i src0, src1, tmp0, tmp1, tmp2, tmp3, dst0; + __m256i shift1 = {0x0703070307030703, 0x0703070307030703, 0x0703070307030703, + 0x0703070307030703}; + __m256i shift2 = {0x0200020002000200, 0x0200020002000200, 0x0200020002000200, + 0x0200020002000200}; + + for (x = 0; x < len; x++) { + DUP2_ARG2(__lasx_xvld, src_argb, 0, src_argb, 32, src0, src1); + tmp0 = __lasx_xvpickev_b(src1, src0); + tmp1 = __lasx_xvpickod_b(src1, src0); + tmp0 = __lasx_xvsrli_b(tmp0, 3); + tmp1 = __lasx_xvsrl_b(tmp1, shift1); + tmp0 = __lasx_xvsll_b(tmp0, shift2); + tmp2 = __lasx_xvpackev_b(zero, tmp1); + tmp3 = __lasx_xvpackod_b(zero, tmp1); + tmp2 = __lasx_xvslli_h(tmp2, 5); + tmp3 = __lasx_xvslli_h(tmp3, 15); + dst0 = __lasx_xvor_v(tmp0, tmp2); + dst0 = __lasx_xvor_v(dst0, tmp3); + dst0 = __lasx_xvpermi_d(dst0, 0xD8); + __lasx_xvst(dst0, dst_rgb, 0); + dst_rgb += 32; + src_argb += 64; + } +} + +void ARGBToARGB4444Row_LASX(const uint8_t* src_argb, + uint8_t* dst_rgb, + int width) { + int x; + int len = width / 16; + __m256i src0, src1, tmp0, tmp1, dst0; + + for (x = 0; x < len; x++) { + DUP2_ARG2(__lasx_xvld, src_argb, 0, src_argb, 32, src0, src1); + tmp0 = __lasx_xvpickev_b(src1, src0); + tmp1 = __lasx_xvpickod_b(src1, src0); + tmp1 = __lasx_xvandi_b(tmp1, 0xF0); + tmp0 = __lasx_xvsrli_b(tmp0, 4); + dst0 = __lasx_xvor_v(tmp1, tmp0); + dst0 = __lasx_xvpermi_d(dst0, 0xD8); + __lasx_xvst(dst0, dst_rgb, 0); + dst_rgb += 32; + src_argb += 64; + } +} + +void ARGBToUV444Row_LASX(const uint8_t* src_argb, + uint8_t* dst_u, + uint8_t* dst_v, + int32_t width) { + int x; + int len = width / 32; + __m256i src0, src1, src2, src3; + __m256i tmp0, tmp1, tmp2, tmp3; + __m256i reg0, reg1, reg2, reg3, dst0, dst1; + __m256i const_112 = __lasx_xvldi(112); + __m256i const_74 = __lasx_xvldi(74); + __m256i const_38 = __lasx_xvldi(38); + __m256i const_94 = __lasx_xvldi(94); + __m256i const_18 = __lasx_xvldi(18); + __m256i const_0x8080 = {0x8080808080808080, 0x8080808080808080, + 0x8080808080808080, 0x8080808080808080}; + __m256i control = {0x0000000400000000, 0x0000000500000001, 0x0000000600000002, + 0x0000000700000003}; + for (x = 0; x < len; x++) { + DUP4_ARG2(__lasx_xvld, src_argb, 0, src_argb, 32, src_argb, 64, src_argb, + 96, src0, src1, src2, src3); + tmp0 = __lasx_xvpickev_h(src1, src0); + tmp1 = __lasx_xvpickod_h(src1, src0); + tmp2 = __lasx_xvpickev_h(src3, src2); + tmp3 = __lasx_xvpickod_h(src3, src2); + reg0 = __lasx_xvmaddwev_h_bu(const_0x8080, tmp0, const_112); + reg1 = __lasx_xvmaddwev_h_bu(const_0x8080, tmp2, const_112); + reg2 = __lasx_xvmulwod_h_bu(tmp0, const_74); + reg3 = __lasx_xvmulwod_h_bu(tmp2, const_74); + reg2 = __lasx_xvmaddwev_h_bu(reg2, tmp1, const_38); + reg3 = __lasx_xvmaddwev_h_bu(reg3, tmp3, const_38); + reg0 = __lasx_xvsub_h(reg0, reg2); + reg1 = __lasx_xvsub_h(reg1, reg3); + dst0 = __lasx_xvssrani_b_h(reg1, reg0, 8); + dst0 = __lasx_xvperm_w(dst0, control); + reg0 = __lasx_xvmaddwev_h_bu(const_0x8080, tmp1, const_112); + reg1 = __lasx_xvmaddwev_h_bu(const_0x8080, tmp3, const_112); + reg2 = __lasx_xvmulwev_h_bu(tmp0, const_18); + reg3 = __lasx_xvmulwev_h_bu(tmp2, const_18); + reg2 = __lasx_xvmaddwod_h_bu(reg2, tmp0, const_94); + reg3 = __lasx_xvmaddwod_h_bu(reg3, tmp2, const_94); + reg0 = __lasx_xvsub_h(reg0, reg2); + reg1 = __lasx_xvsub_h(reg1, reg3); + dst1 = __lasx_xvssrani_b_h(reg1, reg0, 8); + dst1 = __lasx_xvperm_w(dst1, control); + __lasx_xvst(dst0, dst_u, 0); + __lasx_xvst(dst1, dst_v, 0); + dst_u += 32; + dst_v += 32; + src_argb += 128; + } +} + +void ARGBMultiplyRow_LASX(const uint8_t* src_argb0, + const uint8_t* src_argb1, + uint8_t* dst_argb, + int width) { + int x; + int len = width / 8; + __m256i zero = __lasx_xvldi(0); + __m256i src0, src1, dst0, dst1; + __m256i tmp0, tmp1, tmp2, tmp3; + + for (x = 0; x < len; x++) { + DUP2_ARG2(__lasx_xvld, src_argb0, 0, src_argb1, 0, src0, src1); + tmp0 = __lasx_xvilvl_b(src0, src0); + tmp1 = __lasx_xvilvh_b(src0, src0); + tmp2 = __lasx_xvilvl_b(zero, src1); + tmp3 = __lasx_xvilvh_b(zero, src1); + dst0 = __lasx_xvmuh_hu(tmp0, tmp2); + dst1 = __lasx_xvmuh_hu(tmp1, tmp3); + dst0 = __lasx_xvpickev_b(dst1, dst0); + __lasx_xvst(dst0, dst_argb, 0); + src_argb0 += 32; + src_argb1 += 32; + dst_argb += 32; + } +} + +void ARGBAddRow_LASX(const uint8_t* src_argb0, + const uint8_t* src_argb1, + uint8_t* dst_argb, + int width) { + int x; + int len = width / 8; + __m256i src0, src1, dst0; + + for (x = 0; x < len; x++) { + DUP2_ARG2(__lasx_xvld, src_argb0, 0, src_argb1, 0, src0, src1); + dst0 = __lasx_xvsadd_bu(src0, src1); + __lasx_xvst(dst0, dst_argb, 0); + src_argb0 += 32; + src_argb1 += 32; + dst_argb += 32; + } +} + +void ARGBSubtractRow_LASX(const uint8_t* src_argb0, + const uint8_t* src_argb1, + uint8_t* dst_argb, + int width) { + int x; + int len = width / 8; + __m256i src0, src1, dst0; + + for (x = 0; x < len; x++) { + DUP2_ARG2(__lasx_xvld, src_argb0, 0, src_argb1, 0, src0, src1); + dst0 = __lasx_xvssub_bu(src0, src1); + __lasx_xvst(dst0, dst_argb, 0); + src_argb0 += 32; + src_argb1 += 32; + dst_argb += 32; + } +} + +void ARGBAttenuateRow_LASX(const uint8_t* src_argb, + uint8_t* dst_argb, + int width) { + int x; + int len = width / 16; + __m256i src0, src1, tmp0, tmp1; + __m256i reg0, reg1, reg2, reg3, reg4, reg5; + __m256i b, g, r, a, dst0, dst1; + __m256i control = {0x0005000100040000, 0x0007000300060002, 0x0005000100040000, + 0x0007000300060002}; + + for (x = 0; x < len; x++) { + DUP2_ARG2(__lasx_xvld, src_argb, 0, src_argb, 32, src0, src1); + tmp0 = __lasx_xvpickev_b(src1, src0); + tmp1 = __lasx_xvpickod_b(src1, src0); + b = __lasx_xvpackev_b(tmp0, tmp0); + r = __lasx_xvpackod_b(tmp0, tmp0); + g = __lasx_xvpackev_b(tmp1, tmp1); + a = __lasx_xvpackod_b(tmp1, tmp1); + reg0 = __lasx_xvmulwev_w_hu(b, a); + reg1 = __lasx_xvmulwod_w_hu(b, a); + reg2 = __lasx_xvmulwev_w_hu(r, a); + reg3 = __lasx_xvmulwod_w_hu(r, a); + reg4 = __lasx_xvmulwev_w_hu(g, a); + reg5 = __lasx_xvmulwod_w_hu(g, a); + reg0 = __lasx_xvssrani_h_w(reg1, reg0, 24); + reg2 = __lasx_xvssrani_h_w(reg3, reg2, 24); + reg4 = __lasx_xvssrani_h_w(reg5, reg4, 24); + reg0 = __lasx_xvshuf_h(control, reg0, reg0); + reg2 = __lasx_xvshuf_h(control, reg2, reg2); + reg4 = __lasx_xvshuf_h(control, reg4, reg4); + tmp0 = __lasx_xvpackev_b(reg4, reg0); + tmp1 = __lasx_xvpackev_b(a, reg2); + dst0 = __lasx_xvilvl_h(tmp1, tmp0); + dst1 = __lasx_xvilvh_h(tmp1, tmp0); + __lasx_xvst(dst0, dst_argb, 0); + __lasx_xvst(dst1, dst_argb, 32); + dst_argb += 64; + src_argb += 64; + } +} + +void ARGBToRGB565DitherRow_LASX(const uint8_t* src_argb, + uint8_t* dst_rgb, + const uint32_t dither4, + int width) { + int x; + int len = width / 16; + __m256i src0, src1, tmp0, tmp1, dst0; + __m256i b, g, r; + __m256i zero = __lasx_xvldi(0); + __m256i vec_dither = __lasx_xvldrepl_w(&dither4, 0); + + vec_dither = __lasx_xvilvl_b(zero, vec_dither); + for (x = 0; x < len; x++) { + DUP2_ARG2(__lasx_xvld, src_argb, 0, src_argb, 32, src0, src1); + tmp0 = __lasx_xvpickev_b(src1, src0); + tmp1 = __lasx_xvpickod_b(src1, src0); + b = __lasx_xvpackev_b(zero, tmp0); + r = __lasx_xvpackod_b(zero, tmp0); + g = __lasx_xvpackev_b(zero, tmp1); + b = __lasx_xvadd_h(b, vec_dither); + g = __lasx_xvadd_h(g, vec_dither); + r = __lasx_xvadd_h(r, vec_dither); + DUP2_ARG1(__lasx_xvclip255_h, b, g, b, g); + r = __lasx_xvclip255_h(r); + b = __lasx_xvsrai_h(b, 3); + g = __lasx_xvsrai_h(g, 2); + r = __lasx_xvsrai_h(r, 3); + g = __lasx_xvslli_h(g, 5); + r = __lasx_xvslli_h(r, 11); + dst0 = __lasx_xvor_v(b, g); + dst0 = __lasx_xvor_v(dst0, r); + dst0 = __lasx_xvpermi_d(dst0, 0xD8); + __lasx_xvst(dst0, dst_rgb, 0); + src_argb += 64; + dst_rgb += 32; + } +} + +void ARGBShuffleRow_LASX(const uint8_t* src_argb, + uint8_t* dst_argb, + const uint8_t* shuffler, + int width) { + int x; + int len = width / 16; + __m256i src0, src1, dst0, dst1; + __m256i shuf = {0x0404040400000000, 0x0C0C0C0C08080808, 0x0404040400000000, + 0x0C0C0C0C08080808}; + __m256i temp = __lasx_xvldrepl_w(shuffler, 0); + + shuf = __lasx_xvadd_b(shuf, temp); + for (x = 0; x < len; x++) { + DUP2_ARG2(__lasx_xvld, src_argb, 0, src_argb, 32, src0, src1); + dst0 = __lasx_xvshuf_b(src0, src0, shuf); + dst1 = __lasx_xvshuf_b(src1, src1, shuf); + __lasx_xvst(dst0, dst_argb, 0); + __lasx_xvst(dst1, dst_argb, 32); + src_argb += 64; + dst_argb += 64; + } +} + +void ARGBShadeRow_LASX(const uint8_t* src_argb, + uint8_t* dst_argb, + int width, + uint32_t value) { + int x; + int len = width / 8; + __m256i src0, dst0, tmp0, tmp1; + __m256i vec_value = __lasx_xvreplgr2vr_w(value); + + vec_value = __lasx_xvilvl_b(vec_value, vec_value); + for (x = 0; x < len; x++) { + src0 = __lasx_xvld(src_argb, 0); + tmp0 = __lasx_xvilvl_b(src0, src0); + tmp1 = __lasx_xvilvh_b(src0, src0); + tmp0 = __lasx_xvmuh_hu(tmp0, vec_value); + tmp1 = __lasx_xvmuh_hu(tmp1, vec_value); + dst0 = __lasx_xvpickod_b(tmp1, tmp0); + __lasx_xvst(dst0, dst_argb, 0); + src_argb += 32; + dst_argb += 32; + } +} + +void ARGBGrayRow_LASX(const uint8_t* src_argb, uint8_t* dst_argb, int width) { + int x; + int len = width / 16; + __m256i src0, src1, tmp0, tmp1; + __m256i reg0, reg1, reg2, dst0, dst1; + __m256i const_128 = __lasx_xvldi(0x480); + __m256i const_150 = __lasx_xvldi(0x96); + __m256i const_br = {0x4D1D4D1D4D1D4D1D, 0x4D1D4D1D4D1D4D1D, + 0x4D1D4D1D4D1D4D1D, 0x4D1D4D1D4D1D4D1D}; + + for (x = 0; x < len; x++) { + DUP2_ARG2(__lasx_xvld, src_argb, 0, src_argb, 32, src0, src1); + tmp0 = __lasx_xvpickev_b(src1, src0); + tmp1 = __lasx_xvpickod_b(src1, src0); + reg0 = __lasx_xvdp2_h_bu(tmp0, const_br); + reg1 = __lasx_xvmaddwev_h_bu(const_128, tmp1, const_150); + reg2 = __lasx_xvadd_h(reg0, reg1); + tmp0 = __lasx_xvpackod_b(reg2, reg2); + tmp1 = __lasx_xvpackod_b(tmp1, reg2); + dst0 = __lasx_xvilvl_h(tmp1, tmp0); + dst1 = __lasx_xvilvh_h(tmp1, tmp0); + __lasx_xvst(dst0, dst_argb, 0); + __lasx_xvst(dst1, dst_argb, 32); + src_argb += 64; + dst_argb += 64; + } +} + +void ARGBSepiaRow_LASX(uint8_t* dst_argb, int width) { + int x; + int len = width / 16; + __m256i src0, src1, tmp0, tmp1; + __m256i reg0, reg1, spb, spg, spr; + __m256i dst0, dst1; + __m256i spb_g = __lasx_xvldi(68); + __m256i spg_g = __lasx_xvldi(88); + __m256i spr_g = __lasx_xvldi(98); + __m256i spb_br = {0x2311231123112311, 0x2311231123112311, 0x2311231123112311, + 0x2311231123112311}; + __m256i spg_br = {0x2D162D162D162D16, 0x2D162D162D162D16, 0x2D162D162D162D16, + 0x2D162D162D162D16}; + __m256i spr_br = {0x3218321832183218, 0x3218321832183218, 0x3218321832183218, + 0x3218321832183218}; + __m256i shuff = {0x1706150413021100, 0x1F0E1D0C1B0A1908, 0x1706150413021100, + 0x1F0E1D0C1B0A1908}; + + for (x = 0; x < len; x++) { + DUP2_ARG2(__lasx_xvld, dst_argb, 0, dst_argb, 32, src0, src1); + tmp0 = __lasx_xvpickev_b(src1, src0); + tmp1 = __lasx_xvpickod_b(src1, src0); + DUP2_ARG2(__lasx_xvdp2_h_bu, tmp0, spb_br, tmp0, spg_br, spb, spg); + spr = __lasx_xvdp2_h_bu(tmp0, spr_br); + spb = __lasx_xvmaddwev_h_bu(spb, tmp1, spb_g); + spg = __lasx_xvmaddwev_h_bu(spg, tmp1, spg_g); + spr = __lasx_xvmaddwev_h_bu(spr, tmp1, spr_g); + spb = __lasx_xvsrli_h(spb, 7); + spg = __lasx_xvsrli_h(spg, 7); + spr = __lasx_xvsrli_h(spr, 7); + spg = __lasx_xvsat_hu(spg, 7); + spr = __lasx_xvsat_hu(spr, 7); + reg0 = __lasx_xvpackev_b(spg, spb); + reg1 = __lasx_xvshuf_b(tmp1, spr, shuff); + dst0 = __lasx_xvilvl_h(reg1, reg0); + dst1 = __lasx_xvilvh_h(reg1, reg0); + __lasx_xvst(dst0, dst_argb, 0); + __lasx_xvst(dst1, dst_argb, 32); + dst_argb += 64; + } +} + +void ARGB4444ToARGBRow_LASX(const uint8_t* src_argb4444, + uint8_t* dst_argb, + int width) { + int x; + int len = width / 32; + __m256i src0, src1; + __m256i tmp0, tmp1, tmp2, tmp3; + __m256i reg0, reg1, reg2, reg3; + __m256i dst0, dst1, dst2, dst3; + + for (x = 0; x < len; x++) { + src0 = __lasx_xvld(src_argb4444, 0); + src1 = __lasx_xvld(src_argb4444, 32); + DUP4_ARG2(__lasx_xvandi_b, src0, 0x0F, src0, 0xF0, src1, 0x0F, src1, 0xF0, + tmp0, tmp1, tmp2, tmp3); + DUP2_ARG2(__lasx_xvslli_b, tmp0, 4, tmp2, 4, reg0, reg2); + DUP2_ARG2(__lasx_xvsrli_b, tmp1, 4, tmp3, 4, reg1, reg3); + DUP4_ARG2(__lasx_xvor_v, tmp0, reg0, tmp1, reg1, tmp2, reg2, tmp3, reg3, + tmp0, tmp1, tmp2, tmp3); + DUP2_ARG2(__lasx_xvilvl_b, tmp1, tmp0, tmp3, tmp2, reg0, reg2); + DUP2_ARG2(__lasx_xvilvh_b, tmp1, tmp0, tmp3, tmp2, reg1, reg3); + DUP4_ARG3(__lasx_xvpermi_q, reg1, reg0, 0x20, reg1, reg0, 0x31, reg3, reg2, + 0x20, reg3, reg2, 0x31, dst0, dst1, dst2, dst3); + __lasx_xvst(dst0, dst_argb, 0); + __lasx_xvst(dst1, dst_argb, 32); + __lasx_xvst(dst2, dst_argb, 64); + __lasx_xvst(dst3, dst_argb, 96); + src_argb4444 += 64; + dst_argb += 128; + } +} + +void ARGB1555ToARGBRow_LASX(const uint8_t* src_argb1555, + uint8_t* dst_argb, + int width) { + int x; + int len = width / 32; + __m256i src0, src1; + __m256i tmp0, tmp1, tmpb, tmpg, tmpr, tmpa; + __m256i reg0, reg1, reg2, reg3; + __m256i dst0, dst1, dst2, dst3; + + for (x = 0; x < len; x++) { + src0 = __lasx_xvld(src_argb1555, 0); + src1 = __lasx_xvld(src_argb1555, 32); + tmp0 = __lasx_xvpickev_b(src1, src0); + tmp1 = __lasx_xvpickod_b(src1, src0); + tmpb = __lasx_xvandi_b(tmp0, 0x1F); + tmpg = __lasx_xvsrli_b(tmp0, 5); + reg0 = __lasx_xvandi_b(tmp1, 0x03); + reg0 = __lasx_xvslli_b(reg0, 3); + tmpg = __lasx_xvor_v(tmpg, reg0); + reg1 = __lasx_xvandi_b(tmp1, 0x7C); + tmpr = __lasx_xvsrli_b(reg1, 2); + tmpa = __lasx_xvsrli_b(tmp1, 7); + tmpa = __lasx_xvneg_b(tmpa); + reg0 = __lasx_xvslli_b(tmpb, 3); + reg1 = __lasx_xvslli_b(tmpg, 3); + reg2 = __lasx_xvslli_b(tmpr, 3); + tmpb = __lasx_xvsrli_b(tmpb, 2); + tmpg = __lasx_xvsrli_b(tmpg, 2); + tmpr = __lasx_xvsrli_b(tmpr, 2); + tmpb = __lasx_xvor_v(reg0, tmpb); + tmpg = __lasx_xvor_v(reg1, tmpg); + tmpr = __lasx_xvor_v(reg2, tmpr); + DUP2_ARG2(__lasx_xvilvl_b, tmpg, tmpb, tmpa, tmpr, reg0, reg1); + DUP2_ARG2(__lasx_xvilvh_b, tmpg, tmpb, tmpa, tmpr, reg2, reg3); + dst0 = __lasx_xvilvl_h(reg1, reg0); + dst1 = __lasx_xvilvh_h(reg1, reg0); + dst2 = __lasx_xvilvl_h(reg3, reg2); + dst3 = __lasx_xvilvh_h(reg3, reg2); + DUP4_ARG3(__lasx_xvpermi_q, dst1, dst0, 0x20, dst1, dst0, 0x31, dst3, dst2, + 0x20, dst3, dst2, 0x31, reg0, reg1, reg2, reg3); + __lasx_xvst(reg0, dst_argb, 0); + __lasx_xvst(reg1, dst_argb, 32); + __lasx_xvst(reg2, dst_argb, 64); + __lasx_xvst(reg3, dst_argb, 96); + src_argb1555 += 64; + dst_argb += 128; + } +} + +void RGB565ToARGBRow_LASX(const uint8_t* src_rgb565, + uint8_t* dst_argb, + int width) { + int x; + int len = width / 32; + __m256i src0, src1; + __m256i tmp0, tmp1, tmpb, tmpg, tmpr; + __m256i reg0, reg1, reg2, reg3, dst0, dst1, dst2, dst3; + __m256i alpha = __lasx_xvldi(0xFF); + + for (x = 0; x < len; x++) { + src0 = __lasx_xvld(src_rgb565, 0); + src1 = __lasx_xvld(src_rgb565, 32); + tmp0 = __lasx_xvpickev_b(src1, src0); + tmp1 = __lasx_xvpickod_b(src1, src0); + tmpb = __lasx_xvandi_b(tmp0, 0x1F); + tmpr = __lasx_xvandi_b(tmp1, 0xF8); + reg1 = __lasx_xvandi_b(tmp1, 0x07); + reg0 = __lasx_xvsrli_b(tmp0, 5); + reg1 = __lasx_xvslli_b(reg1, 3); + tmpg = __lasx_xvor_v(reg1, reg0); + reg0 = __lasx_xvslli_b(tmpb, 3); + reg1 = __lasx_xvsrli_b(tmpb, 2); + tmpb = __lasx_xvor_v(reg1, reg0); + reg0 = __lasx_xvslli_b(tmpg, 2); + reg1 = __lasx_xvsrli_b(tmpg, 4); + tmpg = __lasx_xvor_v(reg1, reg0); + reg0 = __lasx_xvsrli_b(tmpr, 5); + tmpr = __lasx_xvor_v(tmpr, reg0); + DUP2_ARG2(__lasx_xvilvl_b, tmpg, tmpb, alpha, tmpr, reg0, reg1); + dst0 = __lasx_xvilvl_h(reg1, reg0); + dst1 = __lasx_xvilvh_h(reg1, reg0); + DUP2_ARG2(__lasx_xvilvh_b, tmpg, tmpb, alpha, tmpr, reg0, reg1); + dst2 = __lasx_xvilvl_h(reg1, reg0); + dst3 = __lasx_xvilvh_h(reg1, reg0); + DUP4_ARG3(__lasx_xvpermi_q, dst1, dst0, 0x20, dst1, dst0, 0x31, dst3, dst2, + 0x20, dst3, dst2, 0x31, reg0, reg1, reg2, reg3); + __lasx_xvst(reg0, dst_argb, 0); + __lasx_xvst(reg1, dst_argb, 32); + __lasx_xvst(reg2, dst_argb, 64); + __lasx_xvst(reg3, dst_argb, 96); + src_rgb565 += 64; + dst_argb += 128; + } +} + +void RGB24ToARGBRow_LASX(const uint8_t* src_rgb24, + uint8_t* dst_argb, + int width) { + int x; + int len = width / 32; + __m256i src0, src1, src2; + __m256i tmp0, tmp1, tmp2; + __m256i dst0, dst1, dst2, dst3; + __m256i reg0, reg1, reg2, reg3; + __m256i alpha = __lasx_xvldi(0xFF); + __m256i shuf0 = {0x131211100F0E0D0C, 0x1B1A191817161514, 0x131211100F0E0D0C, + 0x1B1A191817161514}; + __m256i shuf1 = {0x1F1E1D1C1B1A1918, 0x0706050403020100, 0x1F1E1D1C1B1A1918, + 0x0706050403020100}; + __m256i shuf2 = {0x0B0A090807060504, 0x131211100F0E0D0C, 0x0B0A090807060504, + 0x131211100F0E0D0C}; + __m256i shuf3 = {0x1005040310020100, 0x100B0A0910080706, 0x1005040310020100, + 0x100B0A0910080706}; + + for (x = 0; x < len; x++) { + reg0 = __lasx_xvld(src_rgb24, 0); + reg1 = __lasx_xvld(src_rgb24, 32); + reg2 = __lasx_xvld(src_rgb24, 64); + src0 = __lasx_xvpermi_q(reg1, reg0, 0x30); + src1 = __lasx_xvpermi_q(reg2, reg0, 0x21); + src2 = __lasx_xvpermi_q(reg2, reg1, 0x30); + DUP2_ARG3(__lasx_xvshuf_b, src1, src0, shuf0, src1, src2, shuf1, tmp0, + tmp1); + tmp2 = __lasx_xvshuf_b(src1, src2, shuf2); + DUP4_ARG3(__lasx_xvshuf_b, alpha, src0, shuf3, alpha, tmp0, shuf3, alpha, + tmp1, shuf3, alpha, tmp2, shuf3, reg0, reg1, reg2, reg3); + DUP4_ARG3(__lasx_xvpermi_q, reg1, reg0, 0x20, reg3, reg2, 0x20, reg1, reg0, + 0x31, reg3, reg2, 0x31, dst0, dst1, dst2, dst3); + __lasx_xvst(dst0, dst_argb, 0); + __lasx_xvst(dst1, dst_argb, 32); + __lasx_xvst(dst2, dst_argb, 64); + __lasx_xvst(dst3, dst_argb, 96); + src_rgb24 += 96; + dst_argb += 128; + } +} + +void RAWToARGBRow_LASX(const uint8_t* src_raw, uint8_t* dst_argb, int width) { + int x; + int len = width / 32; + __m256i src0, src1, src2; + __m256i tmp0, tmp1, tmp2, reg0, reg1, reg2, reg3; + __m256i dst0, dst1, dst2, dst3; + __m256i alpha = __lasx_xvldi(0xFF); + __m256i shuf0 = {0x131211100F0E0D0C, 0x1B1A191817161514, 0x131211100F0E0D0C, + 0x1B1A191817161514}; + __m256i shuf1 = {0x1F1E1D1C1B1A1918, 0x0706050403020100, 0x1F1E1D1C1B1A1918, + 0x0706050403020100}; + __m256i shuf2 = {0x0B0A090807060504, 0x131211100F0E0D0C, 0x0B0A090807060504, + 0x131211100F0E0D0C}; + __m256i shuf3 = {0x1003040510000102, 0x10090A0B10060708, 0x1003040510000102, + 0x10090A0B10060708}; + + for (x = 0; x < len; x++) { + reg0 = __lasx_xvld(src_raw, 0); + reg1 = __lasx_xvld(src_raw, 32); + reg2 = __lasx_xvld(src_raw, 64); + src0 = __lasx_xvpermi_q(reg1, reg0, 0x30); + src1 = __lasx_xvpermi_q(reg2, reg0, 0x21); + src2 = __lasx_xvpermi_q(reg2, reg1, 0x30); + DUP2_ARG3(__lasx_xvshuf_b, src1, src0, shuf0, src1, src2, shuf1, tmp0, + tmp1); + tmp2 = __lasx_xvshuf_b(src1, src2, shuf2); + DUP4_ARG3(__lasx_xvshuf_b, alpha, src0, shuf3, alpha, tmp0, shuf3, alpha, + tmp1, shuf3, alpha, tmp2, shuf3, reg0, reg1, reg2, reg3); + DUP4_ARG3(__lasx_xvpermi_q, reg1, reg0, 0x20, reg3, reg2, 0x20, reg1, reg0, + 0x31, reg3, reg2, 0x31, dst0, dst1, dst2, dst3); + __lasx_xvst(dst0, dst_argb, 0); + __lasx_xvst(dst1, dst_argb, 32); + __lasx_xvst(dst2, dst_argb, 64); + __lasx_xvst(dst3, dst_argb, 96); + src_raw += 96; + dst_argb += 128; + } +} + +void ARGB1555ToYRow_LASX(const uint8_t* src_argb1555, + uint8_t* dst_y, + int width) { + int x; + int len = width / 32; + __m256i src0, src1; + __m256i tmp0, tmp1, tmpb, tmpg, tmpr; + __m256i reg0, reg1, reg2, dst0; + __m256i const_66 = __lasx_xvldi(66); + __m256i const_129 = __lasx_xvldi(129); + __m256i const_25 = __lasx_xvldi(25); + __m256i const_1080 = {0x1080108010801080, 0x1080108010801080, + 0x1080108010801080, 0x1080108010801080}; + + for (x = 0; x < len; x++) { + src0 = __lasx_xvld(src_argb1555, 0); + src1 = __lasx_xvld(src_argb1555, 32); + tmp0 = __lasx_xvpickev_b(src1, src0); + tmp1 = __lasx_xvpickod_b(src1, src0); + tmpb = __lasx_xvandi_b(tmp0, 0x1F); + tmpg = __lasx_xvsrli_b(tmp0, 5); + reg0 = __lasx_xvandi_b(tmp1, 0x03); + reg0 = __lasx_xvslli_b(reg0, 3); + tmpg = __lasx_xvor_v(tmpg, reg0); + reg1 = __lasx_xvandi_b(tmp1, 0x7C); + tmpr = __lasx_xvsrli_b(reg1, 2); + reg0 = __lasx_xvslli_b(tmpb, 3); + reg1 = __lasx_xvslli_b(tmpg, 3); + reg2 = __lasx_xvslli_b(tmpr, 3); + tmpb = __lasx_xvsrli_b(tmpb, 2); + tmpg = __lasx_xvsrli_b(tmpg, 2); + tmpr = __lasx_xvsrli_b(tmpr, 2); + tmpb = __lasx_xvor_v(reg0, tmpb); + tmpg = __lasx_xvor_v(reg1, tmpg); + tmpr = __lasx_xvor_v(reg2, tmpr); + reg0 = __lasx_xvmaddwev_h_bu(const_1080, tmpb, const_25); + reg1 = __lasx_xvmaddwod_h_bu(const_1080, tmpb, const_25); + reg0 = __lasx_xvmaddwev_h_bu(reg0, tmpg, const_129); + reg1 = __lasx_xvmaddwod_h_bu(reg1, tmpg, const_129); + reg0 = __lasx_xvmaddwev_h_bu(reg0, tmpr, const_66); + reg1 = __lasx_xvmaddwod_h_bu(reg1, tmpr, const_66); + dst0 = __lasx_xvpackod_b(reg1, reg0); + dst0 = __lasx_xvpermi_d(dst0, 0xD8); + __lasx_xvst(dst0, dst_y, 0); + src_argb1555 += 64; + dst_y += 32; + } +} + +void ARGB1555ToUVRow_LASX(const uint8_t* src_argb1555, + int src_stride_argb1555, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { + int x; + int len = width / 32; + const uint8_t* next_argb1555 = src_argb1555 + src_stride_argb1555; + __m256i src0, src1, src2, src3; + __m256i tmp0, tmp1, tmp2, tmp3; + __m256i tmpb, tmpg, tmpr, nexb, nexg, nexr; + __m256i reg0, reg1, reg2, reg3, dst0; + __m256i const_112 = __lasx_xvldi(0x438); + __m256i const_74 = __lasx_xvldi(0x425); + __m256i const_38 = __lasx_xvldi(0x413); + __m256i const_94 = __lasx_xvldi(0x42F); + __m256i const_18 = __lasx_xvldi(0x409); + __m256i const_8080 = {0x8080808080808080, 0x8080808080808080, + 0x8080808080808080, 0x8080808080808080}; + + for (x = 0; x < len; x++) { + DUP4_ARG2(__lasx_xvld, src_argb1555, 0, src_argb1555, 32, next_argb1555, 0, + next_argb1555, 32, src0, src1, src2, src3); + DUP2_ARG2(__lasx_xvpickev_b, src1, src0, src3, src2, tmp0, tmp2); + DUP2_ARG2(__lasx_xvpickod_b, src1, src0, src3, src2, tmp1, tmp3); + tmpb = __lasx_xvandi_b(tmp0, 0x1F); + nexb = __lasx_xvandi_b(tmp2, 0x1F); + tmpg = __lasx_xvsrli_b(tmp0, 5); + nexg = __lasx_xvsrli_b(tmp2, 5); + reg0 = __lasx_xvandi_b(tmp1, 0x03); + reg2 = __lasx_xvandi_b(tmp3, 0x03); + reg0 = __lasx_xvslli_b(reg0, 3); + reg2 = __lasx_xvslli_b(reg2, 3); + tmpg = __lasx_xvor_v(tmpg, reg0); + nexg = __lasx_xvor_v(nexg, reg2); + reg1 = __lasx_xvandi_b(tmp1, 0x7C); + reg3 = __lasx_xvandi_b(tmp3, 0x7C); + tmpr = __lasx_xvsrli_b(reg1, 2); + nexr = __lasx_xvsrli_b(reg3, 2); + reg0 = __lasx_xvslli_b(tmpb, 3); + reg1 = __lasx_xvslli_b(tmpg, 3); + reg2 = __lasx_xvslli_b(tmpr, 3); + tmpb = __lasx_xvsrli_b(tmpb, 2); + tmpg = __lasx_xvsrli_b(tmpg, 2); + tmpr = __lasx_xvsrli_b(tmpr, 2); + tmpb = __lasx_xvor_v(reg0, tmpb); + tmpg = __lasx_xvor_v(reg1, tmpg); + tmpr = __lasx_xvor_v(reg2, tmpr); + reg0 = __lasx_xvslli_b(nexb, 3); + reg1 = __lasx_xvslli_b(nexg, 3); + reg2 = __lasx_xvslli_b(nexr, 3); + nexb = __lasx_xvsrli_b(nexb, 2); + nexg = __lasx_xvsrli_b(nexg, 2); + nexr = __lasx_xvsrli_b(nexr, 2); + nexb = __lasx_xvor_v(reg0, nexb); + nexg = __lasx_xvor_v(reg1, nexg); + nexr = __lasx_xvor_v(reg2, nexr); + RGBTOUV(tmpb, tmpg, tmpr, nexb, nexg, nexr, reg0, reg1); + reg0 = __lasx_xvpermi_d(reg0, 0xD8); + reg1 = __lasx_xvpermi_d(reg1, 0xD8); + dst0 = __lasx_xvpickod_b(reg1, reg0); + __lasx_xvstelm_d(dst0, dst_u, 0, 0); + __lasx_xvstelm_d(dst0, dst_v, 0, 1); + __lasx_xvstelm_d(dst0, dst_u, 8, 2); + __lasx_xvstelm_d(dst0, dst_v, 8, 3); + src_argb1555 += 64; + next_argb1555 += 64; + dst_u += 16; + dst_v += 16; + } +} + +void RGB565ToYRow_LASX(const uint8_t* src_rgb565, uint8_t* dst_y, int width) { + int x; + int len = width / 32; + __m256i src0, src1; + __m256i tmp0, tmp1, tmpb, tmpg, tmpr; + __m256i reg0, reg1, dst0; + __m256i const_66 = __lasx_xvldi(66); + __m256i const_129 = __lasx_xvldi(129); + __m256i const_25 = __lasx_xvldi(25); + __m256i const_1080 = {0x1080108010801080, 0x1080108010801080, + 0x1080108010801080, 0x1080108010801080}; + + for (x = 0; x < len; x++) { + src0 = __lasx_xvld(src_rgb565, 0); + src1 = __lasx_xvld(src_rgb565, 32); + tmp0 = __lasx_xvpickev_b(src1, src0); + tmp1 = __lasx_xvpickod_b(src1, src0); + tmpb = __lasx_xvandi_b(tmp0, 0x1F); + tmpr = __lasx_xvandi_b(tmp1, 0xF8); + reg1 = __lasx_xvandi_b(tmp1, 0x07); + reg0 = __lasx_xvsrli_b(tmp0, 5); + reg1 = __lasx_xvslli_b(reg1, 3); + tmpg = __lasx_xvor_v(reg1, reg0); + reg0 = __lasx_xvslli_b(tmpb, 3); + reg1 = __lasx_xvsrli_b(tmpb, 2); + tmpb = __lasx_xvor_v(reg1, reg0); + reg0 = __lasx_xvslli_b(tmpg, 2); + reg1 = __lasx_xvsrli_b(tmpg, 4); + tmpg = __lasx_xvor_v(reg1, reg0); + reg0 = __lasx_xvsrli_b(tmpr, 5); + tmpr = __lasx_xvor_v(tmpr, reg0); + reg0 = __lasx_xvmaddwev_h_bu(const_1080, tmpb, const_25); + reg1 = __lasx_xvmaddwod_h_bu(const_1080, tmpb, const_25); + reg0 = __lasx_xvmaddwev_h_bu(reg0, tmpg, const_129); + reg1 = __lasx_xvmaddwod_h_bu(reg1, tmpg, const_129); + reg0 = __lasx_xvmaddwev_h_bu(reg0, tmpr, const_66); + reg1 = __lasx_xvmaddwod_h_bu(reg1, tmpr, const_66); + dst0 = __lasx_xvpackod_b(reg1, reg0); + dst0 = __lasx_xvpermi_d(dst0, 0xD8); + __lasx_xvst(dst0, dst_y, 0); + dst_y += 32; + src_rgb565 += 64; + } +} + +void RGB565ToUVRow_LASX(const uint8_t* src_rgb565, + int src_stride_rgb565, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { + int x; + int len = width / 32; + const uint8_t* next_rgb565 = src_rgb565 + src_stride_rgb565; + __m256i src0, src1, src2, src3; + __m256i tmp0, tmp1, tmp2, tmp3; + __m256i tmpb, tmpg, tmpr, nexb, nexg, nexr; + __m256i reg0, reg1, reg2, reg3, dst0; + __m256i const_112 = __lasx_xvldi(0x438); + __m256i const_74 = __lasx_xvldi(0x425); + __m256i const_38 = __lasx_xvldi(0x413); + __m256i const_94 = __lasx_xvldi(0x42F); + __m256i const_18 = __lasx_xvldi(0x409); + __m256i const_8080 = {0x8080808080808080, 0x8080808080808080, + 0x8080808080808080, 0x8080808080808080}; + + for (x = 0; x < len; x++) { + DUP4_ARG2(__lasx_xvld, src_rgb565, 0, src_rgb565, 32, next_rgb565, 0, + next_rgb565, 32, src0, src1, src2, src3); + DUP2_ARG2(__lasx_xvpickev_b, src1, src0, src3, src2, tmp0, tmp2); + DUP2_ARG2(__lasx_xvpickod_b, src1, src0, src3, src2, tmp1, tmp3); + tmpb = __lasx_xvandi_b(tmp0, 0x1F); + tmpr = __lasx_xvandi_b(tmp1, 0xF8); + nexb = __lasx_xvandi_b(tmp2, 0x1F); + nexr = __lasx_xvandi_b(tmp3, 0xF8); + reg1 = __lasx_xvandi_b(tmp1, 0x07); + reg3 = __lasx_xvandi_b(tmp3, 0x07); + reg0 = __lasx_xvsrli_b(tmp0, 5); + reg1 = __lasx_xvslli_b(reg1, 3); + reg2 = __lasx_xvsrli_b(tmp2, 5); + reg3 = __lasx_xvslli_b(reg3, 3); + tmpg = __lasx_xvor_v(reg1, reg0); + nexg = __lasx_xvor_v(reg2, reg3); + reg0 = __lasx_xvslli_b(tmpb, 3); + reg1 = __lasx_xvsrli_b(tmpb, 2); + reg2 = __lasx_xvslli_b(nexb, 3); + reg3 = __lasx_xvsrli_b(nexb, 2); + tmpb = __lasx_xvor_v(reg1, reg0); + nexb = __lasx_xvor_v(reg2, reg3); + reg0 = __lasx_xvslli_b(tmpg, 2); + reg1 = __lasx_xvsrli_b(tmpg, 4); + reg2 = __lasx_xvslli_b(nexg, 2); + reg3 = __lasx_xvsrli_b(nexg, 4); + tmpg = __lasx_xvor_v(reg1, reg0); + nexg = __lasx_xvor_v(reg2, reg3); + reg0 = __lasx_xvsrli_b(tmpr, 5); + reg2 = __lasx_xvsrli_b(nexr, 5); + tmpr = __lasx_xvor_v(tmpr, reg0); + nexr = __lasx_xvor_v(nexr, reg2); + RGBTOUV(tmpb, tmpg, tmpr, nexb, nexg, nexr, reg0, reg1); + reg0 = __lasx_xvpermi_d(reg0, 0xD8); + reg1 = __lasx_xvpermi_d(reg1, 0xD8); + dst0 = __lasx_xvpickod_b(reg1, reg0); + __lasx_xvstelm_d(dst0, dst_u, 0, 0); + __lasx_xvstelm_d(dst0, dst_v, 0, 1); + __lasx_xvstelm_d(dst0, dst_u, 8, 2); + __lasx_xvstelm_d(dst0, dst_v, 8, 3); + dst_u += 16; + dst_v += 16; + src_rgb565 += 64; + next_rgb565 += 64; + } +} + +void RGB24ToUVRow_LASX(const uint8_t* src_rgb24, + int src_stride_rgb24, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { + int x; + const uint8_t* next_rgb24 = src_rgb24 + src_stride_rgb24; + int len = width / 32; + __m256i src0, src1, src2, reg0, reg1, reg2; + __m256i nex0, nex1, nex2, dst0, tmp0, tmp1, tmp2; + __m256i tmpb, tmpg, tmpr, nexb, nexg, nexr; + __m256i const_112 = __lasx_xvldi(0x438); + __m256i const_74 = __lasx_xvldi(0x425); + __m256i const_38 = __lasx_xvldi(0x413); + __m256i const_94 = __lasx_xvldi(0x42F); + __m256i const_18 = __lasx_xvldi(0x409); + __m256i const_8080 = {0x8080808080808080, 0x8080808080808080, + 0x8080808080808080, 0x8080808080808080}; + __m256i shuff0_b = {0x15120F0C09060300, 0x00000000001E1B18, + 0x15120F0C09060300, 0x00000000001E1B18}; + __m256i shuff1_b = {0x0706050403020100, 0x1D1A1714110A0908, + 0x0706050403020100, 0x1D1A1714110A0908}; + __m256i shuff0_g = {0x1613100D0A070401, 0x00000000001F1C19, + 0x1613100D0A070401, 0x00000000001F1C19}; + __m256i shuff1_g = {0x0706050403020100, 0x1E1B1815120A0908, + 0x0706050403020100, 0x1E1B1815120A0908}; + __m256i shuff0_r = {0x1714110E0B080502, 0x0000000000001D1A, + 0x1714110E0B080502, 0x0000000000001D1A}; + __m256i shuff1_r = {0x0706050403020100, 0x1F1C191613100908, + 0x0706050403020100, 0x1F1C191613100908}; + + for (x = 0; x < len; x++) { + DUP4_ARG2(__lasx_xvld, src_rgb24, 0, src_rgb24, 32, src_rgb24, 64, + next_rgb24, 0, reg0, reg1, reg2, tmp0); + DUP2_ARG2(__lasx_xvld, next_rgb24, 32, next_rgb24, 64, tmp1, tmp2); + DUP4_ARG3(__lasx_xvpermi_q, reg1, reg0, 0x30, reg2, reg0, 0x21, reg2, reg1, + 0x30, tmp1, tmp0, 0x30, src0, src1, src2, nex0); + DUP2_ARG3(__lasx_xvpermi_q, tmp2, tmp0, 0x21, tmp2, tmp1, 0x30, nex1, nex2); + DUP2_ARG3(__lasx_xvshuf_b, src1, src0, shuff0_b, nex1, nex0, shuff0_b, tmpb, + nexb); + DUP2_ARG3(__lasx_xvshuf_b, src1, src0, shuff0_g, nex1, nex0, shuff0_g, tmpg, + nexg); + DUP2_ARG3(__lasx_xvshuf_b, src1, src0, shuff0_r, nex1, nex0, shuff0_r, tmpr, + nexr); + DUP2_ARG3(__lasx_xvshuf_b, src2, tmpb, shuff1_b, nex2, nexb, shuff1_b, tmpb, + nexb); + DUP2_ARG3(__lasx_xvshuf_b, src2, tmpg, shuff1_g, nex2, nexg, shuff1_g, tmpg, + nexg); + DUP2_ARG3(__lasx_xvshuf_b, src2, tmpr, shuff1_r, nex2, nexr, shuff1_r, tmpr, + nexr); + RGBTOUV(tmpb, tmpg, tmpr, nexb, nexg, nexr, reg0, reg1); + dst0 = __lasx_xvpickod_b(reg1, reg0); + __lasx_xvstelm_d(dst0, dst_u, 0, 0); + __lasx_xvstelm_d(dst0, dst_v, 0, 1); + __lasx_xvstelm_d(dst0, dst_u, 8, 2); + __lasx_xvstelm_d(dst0, dst_v, 8, 3); + src_rgb24 += 96; + next_rgb24 += 96; + dst_u += 16; + dst_v += 16; + } +} + +void RAWToUVRow_LASX(const uint8_t* src_raw, + int src_stride_raw, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { + int x; + const uint8_t* next_raw = src_raw + src_stride_raw; + int len = width / 32; + __m256i src0, src1, src2, reg0, reg1, reg2; + __m256i nex0, nex1, nex2, dst0, tmp0, tmp1, tmp2; + __m256i tmpb, tmpg, tmpr, nexb, nexg, nexr; + __m256i const_112 = __lasx_xvldi(0x438); + __m256i const_74 = __lasx_xvldi(0x425); + __m256i const_38 = __lasx_xvldi(0x413); + __m256i const_94 = __lasx_xvldi(0x42F); + __m256i const_18 = __lasx_xvldi(0x409); + __m256i const_8080 = {0x8080808080808080, 0x8080808080808080, + 0x8080808080808080, 0x8080808080808080}; + __m256i shuff0_r = {0x15120F0C09060300, 0x00000000001E1B18, + 0x15120F0C09060300, 0x00000000001E1B18}; + __m256i shuff1_r = {0x0706050403020100, 0x1D1A1714110A0908, + 0x0706050403020100, 0x1D1A1714110A0908}; + __m256i shuff0_g = {0x1613100D0A070401, 0x00000000001F1C19, + 0x1613100D0A070401, 0x00000000001F1C19}; + __m256i shuff1_g = {0x0706050403020100, 0x1E1B1815120A0908, + 0x0706050403020100, 0x1E1B1815120A0908}; + __m256i shuff0_b = {0x1714110E0B080502, 0x0000000000001D1A, + 0x1714110E0B080502, 0x0000000000001D1A}; + __m256i shuff1_b = {0x0706050403020100, 0x1F1C191613100908, + 0x0706050403020100, 0x1F1C191613100908}; + + for (x = 0; x < len; x++) { + DUP4_ARG2(__lasx_xvld, src_raw, 0, src_raw, 32, src_raw, 64, next_raw, 0, + reg0, reg1, reg2, tmp0); + DUP2_ARG2(__lasx_xvld, next_raw, 32, next_raw, 64, tmp1, tmp2); + DUP4_ARG3(__lasx_xvpermi_q, reg1, reg0, 0x30, reg2, reg0, 0x21, reg2, reg1, + 0x30, tmp1, tmp0, 0x30, src0, src1, src2, nex0); + DUP2_ARG3(__lasx_xvpermi_q, tmp2, tmp0, 0x21, tmp2, tmp1, 0x30, nex1, nex2); + DUP2_ARG3(__lasx_xvshuf_b, src1, src0, shuff0_b, nex1, nex0, shuff0_b, tmpb, + nexb); + DUP2_ARG3(__lasx_xvshuf_b, src1, src0, shuff0_g, nex1, nex0, shuff0_g, tmpg, + nexg); + DUP2_ARG3(__lasx_xvshuf_b, src1, src0, shuff0_r, nex1, nex0, shuff0_r, tmpr, + nexr); + DUP2_ARG3(__lasx_xvshuf_b, src2, tmpb, shuff1_b, nex2, nexb, shuff1_b, tmpb, + nexb); + DUP2_ARG3(__lasx_xvshuf_b, src2, tmpg, shuff1_g, nex2, nexg, shuff1_g, tmpg, + nexg); + DUP2_ARG3(__lasx_xvshuf_b, src2, tmpr, shuff1_r, nex2, nexr, shuff1_r, tmpr, + nexr); + RGBTOUV(tmpb, tmpg, tmpr, nexb, nexg, nexr, reg0, reg1); + dst0 = __lasx_xvpickod_b(reg1, reg0); + __lasx_xvstelm_d(dst0, dst_u, 0, 0); + __lasx_xvstelm_d(dst0, dst_v, 0, 1); + __lasx_xvstelm_d(dst0, dst_u, 8, 2); + __lasx_xvstelm_d(dst0, dst_v, 8, 3); + src_raw += 96; + next_raw += 96; + dst_u += 16; + dst_v += 16; + } +} + +void NV12ToARGBRow_LASX(const uint8_t* src_y, + const uint8_t* src_uv, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { + int x; + int len = width / 16; + __m256i vec_yg, vec_yb, vec_ub, vec_vr, vec_ug, vec_vg; + __m256i vec_vrub, vec_vgug, vec_y, vec_vu; + __m256i out_b, out_g, out_r; + __m256i const_0x80 = __lasx_xvldi(0x80); + __m256i alpha = __lasx_xvldi(0xFF); + + YUVTORGB_SETUP(yuvconstants, vec_ub, vec_vr, vec_ug, vec_vg, vec_yg, vec_yb); + vec_vrub = __lasx_xvilvl_h(vec_vr, vec_ub); + vec_vgug = __lasx_xvilvl_h(vec_vg, vec_ug); + + for (x = 0; x < len; x++) { + vec_y = __lasx_xvld(src_y, 0); + vec_vu = __lasx_xvld(src_uv, 0); + vec_vu = __lasx_xvsub_b(vec_vu, const_0x80); + vec_vu = __lasx_vext2xv_h_b(vec_vu); + YUVTORGB(vec_y, vec_vu, vec_vrub, vec_vgug, vec_yg, vec_yb, out_r, out_g, + out_b); + STOREARGB(alpha, out_r, out_g, out_b, dst_argb); + src_y += 16; + src_uv += 16; + } +} + +void NV12ToRGB565Row_LASX(const uint8_t* src_y, + const uint8_t* src_uv, + uint8_t* dst_rgb565, + const struct YuvConstants* yuvconstants, + int width) { + int x; + int len = width / 16; + __m256i vec_yg, vec_yb, vec_ub, vec_vr, vec_ug, vec_vg; + __m256i vec_vrub, vec_vgug, vec_y, vec_vu; + __m256i out_b, out_g, out_r; + __m256i const_0x80 = __lasx_xvldi(0x80); + + YUVTORGB_SETUP(yuvconstants, vec_ub, vec_vr, vec_ug, vec_vg, vec_yg, vec_yb); + vec_vrub = __lasx_xvilvl_h(vec_vr, vec_ub); + vec_vgug = __lasx_xvilvl_h(vec_vg, vec_ug); + + for (x = 0; x < len; x++) { + vec_y = __lasx_xvld(src_y, 0); + vec_vu = __lasx_xvld(src_uv, 0); + vec_vu = __lasx_xvsub_b(vec_vu, const_0x80); + vec_vu = __lasx_vext2xv_h_b(vec_vu); + YUVTORGB(vec_y, vec_vu, vec_vrub, vec_vgug, vec_yg, vec_yb, out_r, out_g, + out_b); + out_b = __lasx_xvsrli_h(out_b, 3); + out_g = __lasx_xvsrli_h(out_g, 2); + out_r = __lasx_xvsrli_h(out_r, 3); + out_g = __lasx_xvslli_h(out_g, 5); + out_r = __lasx_xvslli_h(out_r, 11); + out_r = __lasx_xvor_v(out_r, out_g); + out_r = __lasx_xvor_v(out_r, out_b); + __lasx_xvst(out_r, dst_rgb565, 0); + src_y += 16; + src_uv += 16; + dst_rgb565 += 32; + } +} + +void NV21ToARGBRow_LASX(const uint8_t* src_y, + const uint8_t* src_uv, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { + int x; + int len = width / 16; + __m256i vec_yg, vec_yb, vec_ub, vec_vr, vec_ug, vec_vg; + __m256i vec_ubvr, vec_ugvg, vec_y, vec_uv; + __m256i out_b, out_g, out_r; + __m256i const_0x80 = __lasx_xvldi(0x80); + __m256i alpha = __lasx_xvldi(0xFF); + + YUVTORGB_SETUP(yuvconstants, vec_ub, vec_vr, vec_ug, vec_vg, vec_yg, vec_yb); + vec_ubvr = __lasx_xvilvl_h(vec_ub, vec_vr); + vec_ugvg = __lasx_xvilvl_h(vec_ug, vec_vg); + + for (x = 0; x < len; x++) { + vec_y = __lasx_xvld(src_y, 0); + vec_uv = __lasx_xvld(src_uv, 0); + vec_uv = __lasx_xvsub_b(vec_uv, const_0x80); + vec_uv = __lasx_vext2xv_h_b(vec_uv); + YUVTORGB(vec_y, vec_uv, vec_ubvr, vec_ugvg, vec_yg, vec_yb, out_b, out_g, + out_r); + STOREARGB(alpha, out_r, out_g, out_b, dst_argb); + src_y += 16; + src_uv += 16; + } +} + +struct RgbConstants { + uint8_t kRGBToY[4]; + uint16_t kAddY; + uint16_t pad; +}; + +// RGB to JPeg coefficients +// B * 0.1140 coefficient = 29 +// G * 0.5870 coefficient = 150 +// R * 0.2990 coefficient = 77 +// Add 0.5 = 0x80 +static const struct RgbConstants kRgb24JPEGConstants = {{29, 150, 77, 0}, + 128, + 0}; + +static const struct RgbConstants kRawJPEGConstants = {{77, 150, 29, 0}, 128, 0}; + +// RGB to BT.601 coefficients +// B * 0.1016 coefficient = 25 +// G * 0.5078 coefficient = 129 +// R * 0.2578 coefficient = 66 +// Add 16.5 = 0x1080 + +static const struct RgbConstants kRgb24I601Constants = {{25, 129, 66, 0}, + 0x1080, + 0}; + +static const struct RgbConstants kRawI601Constants = {{66, 129, 25, 0}, + 0x1080, + 0}; + +// ARGB expects first 3 values to contain RGB and 4th value is ignored. +static void ARGBToYMatrixRow_LASX(const uint8_t* src_argb, + uint8_t* dst_y, + int width, + const struct RgbConstants* rgbconstants) { + int32_t shuff[8] = {0, 4, 1, 5, 2, 6, 3, 7}; + asm volatile( + "xvldrepl.b $xr0, %3, 0 \n\t" // load rgbconstants + "xvldrepl.b $xr1, %3, 1 \n\t" // load rgbconstants + "xvldrepl.b $xr2, %3, 2 \n\t" // load rgbconstants + "xvldrepl.h $xr3, %3, 4 \n\t" // load rgbconstants + "xvld $xr20, %4, 0 \n\t" // load shuff + "1: \n\t" + "xvld $xr4, %0, 0 \n\t" + "xvld $xr5, %0, 32 \n\t" + "xvld $xr6, %0, 64 \n\t" + "xvld $xr7, %0, 96 \n\t" // load 32 pixels of ARGB + "xvor.v $xr12, $xr3, $xr3 \n\t" + "xvor.v $xr13, $xr3, $xr3 \n\t" + "addi.d %2, %2, -32 \n\t" // 32 processed per loop. + "xvpickev.b $xr8, $xr5, $xr4 \n\t" //BR + "xvpickev.b $xr10, $xr7, $xr6 \n\t" + "xvpickod.b $xr9, $xr5, $xr4 \n\t" //GA + "xvpickod.b $xr11, $xr7, $xr6 \n\t" + "xvmaddwev.h.bu $xr12, $xr8, $xr0 \n\t" //B + "xvmaddwev.h.bu $xr13, $xr10, $xr0 \n\t" + "xvmaddwev.h.bu $xr12, $xr9, $xr1 \n\t" //G + "xvmaddwev.h.bu $xr13, $xr11, $xr1 \n\t" + "xvmaddwod.h.bu $xr12, $xr8, $xr2 \n\t" //R + "xvmaddwod.h.bu $xr13, $xr10, $xr2 \n\t" + "addi.d %0, %0, 128 \n\t" + "xvpickod.b $xr10, $xr13, $xr12 \n\t" + "xvperm.w $xr11, $xr10, $xr20 \n\t" + "xvst $xr11, %1, 0 \n\t" + "addi.d %1, %1, 32 \n\t" + "bnez %2, 1b \n\t" + : "+&r"(src_argb), // %0 + "+&r"(dst_y), // %1 + "+&r"(width) // %2 + : "r"(rgbconstants), + "r"(shuff) + : "memory" + ); +} + +void ARGBToYRow_LASX(const uint8_t* src_argb, uint8_t* dst_y, int width) { + ARGBToYMatrixRow_LASX(src_argb, dst_y, width, &kRgb24I601Constants); +} + +void ARGBToYJRow_LASX(const uint8_t* src_argb, uint8_t* dst_yj, int width) { + ARGBToYMatrixRow_LASX(src_argb, dst_yj, width, &kRgb24JPEGConstants); +} + +void ABGRToYRow_LASX(const uint8_t* src_abgr, uint8_t* dst_y, int width) { + ARGBToYMatrixRow_LASX(src_abgr, dst_y, width, &kRawI601Constants); +} + +void ABGRToYJRow_LASX(const uint8_t* src_abgr, uint8_t* dst_yj, int width) { + ARGBToYMatrixRow_LASX(src_abgr, dst_yj, width, &kRawJPEGConstants); +} + +// RGBA expects first value to be A and ignored, then 3 values to contain RGB. +// Same code as ARGB, except the LD4 +static void RGBAToYMatrixRow_LASX(const uint8_t* src_rgba, + uint8_t* dst_y, + int width, + const struct RgbConstants* rgbconstants) { + int32_t shuff[8] = {0, 4, 1, 5, 2, 6, 3, 7}; + asm volatile( + "xvldrepl.b $xr0, %3, 0 \n\t" // load rgbconstants + "xvldrepl.b $xr1, %3, 1 \n\t" // load rgbconstants + "xvldrepl.b $xr2, %3, 2 \n\t" // load rgbconstants + "xvldrepl.h $xr3, %3, 4 \n\t" // load rgbconstants + "xvld $xr20, %4, 0 \n\t" // load shuff + "1: \n\t" + "xvld $xr4, %0, 0 \n\t" + "xvld $xr5, %0, 32 \n\t" + "xvld $xr6, %0, 64 \n\t" + "xvld $xr7, %0, 96 \n\t" // load 32 pixels of RGBA + "xvor.v $xr12, $xr3, $xr3 \n\t" + "xvor.v $xr13, $xr3, $xr3 \n\t" + "addi.d %2, %2, -32 \n\t" // 32 processed per loop. + "xvpickev.b $xr8, $xr5, $xr4 \n\t" //AG + "xvpickev.b $xr10, $xr7, $xr6 \n\t" + "xvpickod.b $xr9, $xr5, $xr4 \n\t" //BR + "xvpickod.b $xr11, $xr7, $xr6 \n\t" + "xvmaddwev.h.bu $xr12, $xr9, $xr0 \n\t" //B + "xvmaddwev.h.bu $xr13, $xr11, $xr0 \n\t" + "xvmaddwod.h.bu $xr12, $xr8, $xr1 \n\t" //G + "xvmaddwod.h.bu $xr13, $xr10, $xr1 \n\t" + "xvmaddwod.h.bu $xr12, $xr9, $xr2 \n\t" //R + "xvmaddwod.h.bu $xr13, $xr11, $xr2 \n\t" + "addi.d %0, %0, 128 \n\t" + "xvpickod.b $xr10, $xr13, $xr12 \n\t" + "xvperm.w $xr11, $xr10, $xr20 \n\t" + "xvst $xr11, %1, 0 \n\t" + "addi.d %1, %1, 32 \n\t" + "bnez %2, 1b \n\t" + : "+&r"(src_rgba), // %0 + "+&r"(dst_y), // %1 + "+&r"(width) // %2 + : "r"(rgbconstants), + "r"(shuff) + : "memory" + ); +} + +void RGBAToYRow_LASX(const uint8_t* src_rgba, uint8_t* dst_y, int width) { + RGBAToYMatrixRow_LASX(src_rgba, dst_y, width, &kRgb24I601Constants); +} + +void RGBAToYJRow_LASX(const uint8_t* src_rgba, uint8_t* dst_yj, int width) { + RGBAToYMatrixRow_LASX(src_rgba, dst_yj, width, &kRgb24JPEGConstants); +} + +void BGRAToYRow_LASX(const uint8_t* src_bgra, uint8_t* dst_y, int width) { + RGBAToYMatrixRow_LASX(src_bgra, dst_y, width, &kRawI601Constants); +} + +static void RGBToYMatrixRow_LASX(const uint8_t* src_rgba, + uint8_t* dst_y, + int width, + const struct RgbConstants* rgbconstants) { + int8_t shuff[128] = {0, 2, 3, 5, 6, 8, 9, 11, 12, 14, 15, 17, 18, 20, 21, 23, + 0, 2, 3, 5, 6, 8, 9, 11, 12, 14, 15, 17, 18, 20, 21, 23, + 24, 26, 27, 29, 30, 0, 1, 3, 4, 6, 7, 9, 10, 12, 13, 15, + 24, 26, 27, 29, 30, 0, 1, 3, 4, 6, 7, 9, 10, 12, 13, 15, + 1, 0, 4, 0, 7, 0, 10, 0, 13, 0, 16, 0, 19, 0, 22, 0, + 1, 0, 4, 0, 7, 0, 10, 0, 13, 0, 16, 0, 19, 0, 22, 0, + 25, 0, 28, 0, 31, 0, 2, 0, 5, 0, 8, 0, 11, 0, 14, 0, + 25, 0, 28, 0, 31, 0, 2, 0, 5, 0, 8, 0, 11, 0, 14, 0}; + asm volatile( + "xvldrepl.b $xr0, %3, 0 \n\t" // load rgbconstants + "xvldrepl.b $xr1, %3, 1 \n\t" // load rgbconstants + "xvldrepl.b $xr2, %3, 2 \n\t" // load rgbconstants + "xvldrepl.h $xr3, %3, 4 \n\t" // load rgbconstants + "xvld $xr4, %4, 0 \n\t" // load shuff + "xvld $xr5, %4, 32 \n\t" + "xvld $xr6, %4, 64 \n\t" + "xvld $xr7, %4, 96 \n\t" + "1: \n\t" + "xvld $xr8, %0, 0 \n\t" + "xvld $xr9, %0, 32 \n\t" + "xvld $xr10, %0, 64 \n\t" // load 32 pixels of RGB + "xvor.v $xr12, $xr3, $xr3 \n\t" + "xvor.v $xr13, $xr3, $xr3 \n\t" + "xvor.v $xr11, $xr9, $xr9 \n\t" + "addi.d %2, %2, -32 \n\t" // 32 processed per loop. + "xvpermi.q $xr9, $xr8, 0x30 \n\t" //src0 + "xvpermi.q $xr8, $xr10, 0x03 \n\t" //src1 + "xvpermi.q $xr10, $xr11, 0x30 \n\t" //src2 + "xvshuf.b $xr14, $xr8, $xr9, $xr4 \n\t" + "xvshuf.b $xr15, $xr8, $xr10, $xr5 \n\t" + "xvshuf.b $xr16, $xr8, $xr9, $xr6 \n\t" + "xvshuf.b $xr17, $xr8, $xr10, $xr7 \n\t" + "xvmaddwev.h.bu $xr12, $xr16, $xr1 \n\t" //G + "xvmaddwev.h.bu $xr13, $xr17, $xr1 \n\t" + "xvmaddwev.h.bu $xr12, $xr14, $xr0 \n\t" //B + "xvmaddwev.h.bu $xr13, $xr15, $xr0 \n\t" + "xvmaddwod.h.bu $xr12, $xr14, $xr2 \n\t" //R + "xvmaddwod.h.bu $xr13, $xr15, $xr2 \n\t" + "addi.d %0, %0, 96 \n\t" + "xvpickod.b $xr10, $xr13, $xr12 \n\t" + "xvst $xr10, %1, 0 \n\t" + "addi.d %1, %1, 32 \n\t" + "bnez %2, 1b \n\t" + : "+&r"(src_rgba), // %0 + "+&r"(dst_y), // %1 + "+&r"(width) // %2 + : "r"(rgbconstants), // %3 + "r"(shuff) // %4 + : "memory" + ); +} + +void RGB24ToYJRow_LASX(const uint8_t* src_rgb24, uint8_t* dst_yj, int width) { + RGBToYMatrixRow_LASX(src_rgb24, dst_yj, width, &kRgb24JPEGConstants); +} + +void RAWToYJRow_LASX(const uint8_t* src_raw, uint8_t* dst_yj, int width) { + RGBToYMatrixRow_LASX(src_raw, dst_yj, width, &kRawJPEGConstants); +} + +void RGB24ToYRow_LASX(const uint8_t* src_rgb24, uint8_t* dst_y, int width) { + RGBToYMatrixRow_LASX(src_rgb24, dst_y, width, &kRgb24I601Constants); +} + +void RAWToYRow_LASX(const uint8_t* src_raw, uint8_t* dst_y, int width) { + RGBToYMatrixRow_LASX(src_raw, dst_y, width, &kRawI601Constants); +} + +void ARGBToUVJRow_LASX(const uint8_t* src_argb, + int src_stride_argb, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { + int x; + const uint8_t* next_argb = src_argb + src_stride_argb; + int len = width / 32; + __m256i src0, src1, src2, src3; + __m256i nex0, nex1, nex2, nex3; + __m256i tmp0, tmp1, tmp2, tmp3; + __m256i reg0, reg1, dst0; + __m256i tmpb, tmpg, tmpr, nexb, nexg, nexr; + __m256i const_63 = __lasx_xvldi(0x43F); + __m256i const_42 = __lasx_xvldi(0x42A); + __m256i const_21 = __lasx_xvldi(0x415); + __m256i const_53 = __lasx_xvldi(0x435); + __m256i const_10 = __lasx_xvldi(0x40A); + __m256i const_8080 = {0x8080808080808080, 0x8080808080808080, + 0x8080808080808080, 0x8080808080808080}; + __m256i shuff = {0x1614060412100200, 0x1E1C0E0C1A180A08, 0x1715070513110301, + 0x1F1D0F0D1B190B09}; + + for (x = 0; x < len; x++) { + DUP4_ARG2(__lasx_xvld, src_argb, 0, src_argb, 32, src_argb, 64, src_argb, + 96, src0, src1, src2, src3); + DUP4_ARG2(__lasx_xvld, next_argb, 0, next_argb, 32, next_argb, 64, + next_argb, 96, nex0, nex1, nex2, nex3); + tmp0 = __lasx_xvpickev_b(src1, src0); + tmp1 = __lasx_xvpickod_b(src1, src0); + tmp2 = __lasx_xvpickev_b(src3, src2); + tmp3 = __lasx_xvpickod_b(src3, src2); + tmpr = __lasx_xvpickod_b(tmp2, tmp0); + tmpb = __lasx_xvpickev_b(tmp2, tmp0); + tmpg = __lasx_xvpickev_b(tmp3, tmp1); + tmp0 = __lasx_xvpickev_b(nex1, nex0); + tmp1 = __lasx_xvpickod_b(nex1, nex0); + tmp2 = __lasx_xvpickev_b(nex3, nex2); + tmp3 = __lasx_xvpickod_b(nex3, nex2); + nexr = __lasx_xvpickod_b(tmp2, tmp0); + nexb = __lasx_xvpickev_b(tmp2, tmp0); + nexg = __lasx_xvpickev_b(tmp3, tmp1); + tmp0 = __lasx_xvaddwev_h_bu(tmpb, nexb); + tmp1 = __lasx_xvaddwod_h_bu(tmpb, nexb); + tmp2 = __lasx_xvaddwev_h_bu(tmpg, nexg); + tmp3 = __lasx_xvaddwod_h_bu(tmpg, nexg); + reg0 = __lasx_xvaddwev_h_bu(tmpr, nexr); + reg1 = __lasx_xvaddwod_h_bu(tmpr, nexr); + tmpb = __lasx_xvavgr_hu(tmp0, tmp1); + tmpg = __lasx_xvavgr_hu(tmp2, tmp3); + tmpr = __lasx_xvavgr_hu(reg0, reg1); + reg0 = __lasx_xvmadd_h(const_8080, const_63, tmpb); + reg1 = __lasx_xvmadd_h(const_8080, const_63, tmpr); + reg0 = __lasx_xvmsub_h(reg0, const_42, tmpg); + reg1 = __lasx_xvmsub_h(reg1, const_53, tmpg); + reg0 = __lasx_xvmsub_h(reg0, const_21, tmpr); + reg1 = __lasx_xvmsub_h(reg1, const_10, tmpb); + dst0 = __lasx_xvpackod_b(reg1, reg0); + tmp0 = __lasx_xvpermi_d(dst0, 0x44); + tmp1 = __lasx_xvpermi_d(dst0, 0xEE); + dst0 = __lasx_xvshuf_b(tmp1, tmp0, shuff); + __lasx_xvstelm_d(dst0, dst_u, 0, 0); + __lasx_xvstelm_d(dst0, dst_v, 0, 2); + __lasx_xvstelm_d(dst0, dst_u, 8, 1); + __lasx_xvstelm_d(dst0, dst_v, 8, 3); + dst_u += 16; + dst_v += 16; + src_argb += 128; + next_argb += 128; + } +} + +#ifdef __cplusplus +} // extern "C" +} // namespace libyuv +#endif + +#endif // !defined(LIBYUV_DISABLE_LASX) && defined(__loongarch_asx) diff --git a/TMessagesProj/jni/third_party/libyuv/source/row_lsx.cc b/TMessagesProj/jni/third_party/libyuv/source/row_lsx.cc new file mode 100644 index 0000000000..0825b63359 --- /dev/null +++ b/TMessagesProj/jni/third_party/libyuv/source/row_lsx.cc @@ -0,0 +1,1857 @@ +/* + * Copyright 2022 The LibYuv Project Authors. All rights reserved. + * + * Copyright (c) 2022 Loongson Technology Corporation Limited + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "libyuv/row.h" + +#if !defined(LIBYUV_DISABLE_LSX) && defined(__loongarch_sx) +#include "libyuv/loongson_intrinsics.h" + +#ifdef __cplusplus +namespace libyuv { +extern "C" { +#endif + +// Fill YUV -> RGB conversion constants into vectors +#define YUVTORGB_SETUP(yuvconst, vr, ub, vg, ug, yg, yb) \ + { \ + ub = __lsx_vreplgr2vr_h(yuvconst->kUVToB[0]); \ + vr = __lsx_vreplgr2vr_h(yuvconst->kUVToR[1]); \ + ug = __lsx_vreplgr2vr_h(yuvconst->kUVToG[0]); \ + vg = __lsx_vreplgr2vr_h(yuvconst->kUVToG[1]); \ + yg = __lsx_vreplgr2vr_h(yuvconst->kYToRgb[0]); \ + yb = __lsx_vreplgr2vr_w(yuvconst->kYBiasToRgb[0]); \ + } + +// Convert 8 pixels of YUV420 to RGB. +#define YUVTORGB(in_y, in_vu, vrub, vgug, yg, yb, out_b, out_g, out_r) \ + { \ + __m128i y_ev, y_od, u_l, v_l; \ + __m128i tmp0, tmp1, tmp2, tmp3; \ + \ + tmp0 = __lsx_vilvl_b(in_y, in_y); \ + y_ev = __lsx_vmulwev_w_hu_h(tmp0, yg); \ + y_od = __lsx_vmulwod_w_hu_h(tmp0, yg); \ + y_ev = __lsx_vsrai_w(y_ev, 16); \ + y_od = __lsx_vsrai_w(y_od, 16); \ + y_ev = __lsx_vadd_w(y_ev, yb); \ + y_od = __lsx_vadd_w(y_od, yb); \ + in_vu = __lsx_vilvl_b(zero, in_vu); \ + in_vu = __lsx_vsub_h(in_vu, const_80); \ + u_l = __lsx_vmulwev_w_h(in_vu, vrub); \ + v_l = __lsx_vmulwod_w_h(in_vu, vrub); \ + tmp0 = __lsx_vadd_w(y_ev, u_l); \ + tmp1 = __lsx_vadd_w(y_od, u_l); \ + tmp2 = __lsx_vadd_w(y_ev, v_l); \ + tmp3 = __lsx_vadd_w(y_od, v_l); \ + tmp0 = __lsx_vsrai_w(tmp0, 6); \ + tmp1 = __lsx_vsrai_w(tmp1, 6); \ + tmp2 = __lsx_vsrai_w(tmp2, 6); \ + tmp3 = __lsx_vsrai_w(tmp3, 6); \ + tmp0 = __lsx_vclip255_w(tmp0); \ + tmp1 = __lsx_vclip255_w(tmp1); \ + tmp2 = __lsx_vclip255_w(tmp2); \ + tmp3 = __lsx_vclip255_w(tmp3); \ + out_b = __lsx_vpackev_h(tmp1, tmp0); \ + out_r = __lsx_vpackev_h(tmp3, tmp2); \ + tmp0 = __lsx_vdp2_w_h(in_vu, vgug); \ + tmp1 = __lsx_vsub_w(y_ev, tmp0); \ + tmp2 = __lsx_vsub_w(y_od, tmp0); \ + tmp1 = __lsx_vsrai_w(tmp1, 6); \ + tmp2 = __lsx_vsrai_w(tmp2, 6); \ + tmp1 = __lsx_vclip255_w(tmp1); \ + tmp2 = __lsx_vclip255_w(tmp2); \ + out_g = __lsx_vpackev_h(tmp2, tmp1); \ + } + +// Convert I444 pixels of YUV420 to RGB. +#define I444TORGB(in_yy, in_u, in_v, ub, vr, ugvg, yg, yb, out_b, out_g, \ + out_r) \ + { \ + __m128i y_ev, y_od, u_ev, v_ev, u_od, v_od; \ + __m128i tmp0, tmp1, tmp2, tmp3; \ + \ + y_ev = __lsx_vmulwev_w_hu_h(in_yy, yg); \ + y_od = __lsx_vmulwod_w_hu_h(in_yy, yg); \ + y_ev = __lsx_vsrai_w(y_ev, 16); \ + y_od = __lsx_vsrai_w(y_od, 16); \ + y_ev = __lsx_vadd_w(y_ev, yb); \ + y_od = __lsx_vadd_w(y_od, yb); \ + in_u = __lsx_vsub_h(in_u, const_80); \ + in_v = __lsx_vsub_h(in_v, const_80); \ + u_ev = __lsx_vmulwev_w_h(in_u, ub); \ + u_od = __lsx_vmulwod_w_h(in_u, ub); \ + v_ev = __lsx_vmulwev_w_h(in_v, vr); \ + v_od = __lsx_vmulwod_w_h(in_v, vr); \ + tmp0 = __lsx_vadd_w(y_ev, u_ev); \ + tmp1 = __lsx_vadd_w(y_od, u_od); \ + tmp2 = __lsx_vadd_w(y_ev, v_ev); \ + tmp3 = __lsx_vadd_w(y_od, v_od); \ + tmp0 = __lsx_vsrai_w(tmp0, 6); \ + tmp1 = __lsx_vsrai_w(tmp1, 6); \ + tmp2 = __lsx_vsrai_w(tmp2, 6); \ + tmp3 = __lsx_vsrai_w(tmp3, 6); \ + tmp0 = __lsx_vclip255_w(tmp0); \ + tmp1 = __lsx_vclip255_w(tmp1); \ + tmp2 = __lsx_vclip255_w(tmp2); \ + tmp3 = __lsx_vclip255_w(tmp3); \ + out_b = __lsx_vpackev_h(tmp1, tmp0); \ + out_r = __lsx_vpackev_h(tmp3, tmp2); \ + u_ev = __lsx_vpackev_h(in_u, in_v); \ + u_od = __lsx_vpackod_h(in_u, in_v); \ + v_ev = __lsx_vdp2_w_h(u_ev, ugvg); \ + v_od = __lsx_vdp2_w_h(u_od, ugvg); \ + tmp0 = __lsx_vsub_w(y_ev, v_ev); \ + tmp1 = __lsx_vsub_w(y_od, v_od); \ + tmp0 = __lsx_vsrai_w(tmp0, 6); \ + tmp1 = __lsx_vsrai_w(tmp1, 6); \ + tmp0 = __lsx_vclip255_w(tmp0); \ + tmp1 = __lsx_vclip255_w(tmp1); \ + out_g = __lsx_vpackev_h(tmp1, tmp0); \ + } + +// Pack and Store 8 ARGB values. +#define STOREARGB(in_a, in_r, in_g, in_b, pdst_argb) \ + { \ + __m128i temp0, temp1; \ + __m128i dst0, dst1; \ + \ + temp0 = __lsx_vpackev_b(in_g, in_b); \ + temp1 = __lsx_vpackev_b(in_a, in_r); \ + dst0 = __lsx_vilvl_h(temp1, temp0); \ + dst1 = __lsx_vilvh_h(temp1, temp0); \ + __lsx_vst(dst0, pdst_argb, 0); \ + __lsx_vst(dst1, pdst_argb, 16); \ + pdst_argb += 32; \ + } + +#define RGBTOUV(_tmpb, _tmpg, _tmpr, _nexb, _nexg, _nexr, _dst0) \ + { \ + __m128i _tmp0, _tmp1, _tmp2, _tmp3; \ + __m128i _reg0, _reg1; \ + _tmp0 = __lsx_vaddwev_h_bu(_tmpb, _nexb); \ + _tmp1 = __lsx_vaddwod_h_bu(_tmpb, _nexb); \ + _tmp2 = __lsx_vaddwev_h_bu(_tmpg, _nexg); \ + _tmp3 = __lsx_vaddwod_h_bu(_tmpg, _nexg); \ + _reg0 = __lsx_vaddwev_h_bu(_tmpr, _nexr); \ + _reg1 = __lsx_vaddwod_h_bu(_tmpr, _nexr); \ + _tmpb = __lsx_vavgr_hu(_tmp0, _tmp1); \ + _tmpg = __lsx_vavgr_hu(_tmp2, _tmp3); \ + _tmpr = __lsx_vavgr_hu(_reg0, _reg1); \ + _reg0 = __lsx_vmadd_h(const_8080, const_112, _tmpb); \ + _reg1 = __lsx_vmadd_h(const_8080, const_112, _tmpr); \ + _reg0 = __lsx_vmsub_h(_reg0, const_74, _tmpg); \ + _reg1 = __lsx_vmsub_h(_reg1, const_94, _tmpg); \ + _reg0 = __lsx_vmsub_h(_reg0, const_38, _tmpr); \ + _reg1 = __lsx_vmsub_h(_reg1, const_18, _tmpb); \ + _dst0 = __lsx_vpickod_b(_reg1, _reg0); \ + } + +void ARGB4444ToARGBRow_LSX(const uint8_t* src_argb4444, + uint8_t* dst_argb, + int width) { + int x; + int len = width / 16; + __m128i src0, src1; + __m128i tmp0, tmp1, tmp2, tmp3; + __m128i reg0, reg1, reg2, reg3; + __m128i dst0, dst1, dst2, dst3; + + for (x = 0; x < len; x++) { + src0 = __lsx_vld(src_argb4444, 0); + src1 = __lsx_vld(src_argb4444, 16); + tmp0 = __lsx_vandi_b(src0, 0x0F); + tmp1 = __lsx_vandi_b(src0, 0xF0); + tmp2 = __lsx_vandi_b(src1, 0x0F); + tmp3 = __lsx_vandi_b(src1, 0xF0); + reg0 = __lsx_vslli_b(tmp0, 4); + reg2 = __lsx_vslli_b(tmp2, 4); + reg1 = __lsx_vsrli_b(tmp1, 4); + reg3 = __lsx_vsrli_b(tmp3, 4); + DUP4_ARG2(__lsx_vor_v, tmp0, reg0, tmp1, reg1, tmp2, reg2, tmp3, reg3, tmp0, + tmp1, tmp2, tmp3); + dst0 = __lsx_vilvl_b(tmp1, tmp0); + dst2 = __lsx_vilvl_b(tmp3, tmp2); + dst1 = __lsx_vilvh_b(tmp1, tmp0); + dst3 = __lsx_vilvh_b(tmp3, tmp2); + __lsx_vst(dst0, dst_argb, 0); + __lsx_vst(dst1, dst_argb, 16); + __lsx_vst(dst2, dst_argb, 32); + __lsx_vst(dst3, dst_argb, 48); + dst_argb += 64; + src_argb4444 += 32; + } +} + +void ARGB1555ToARGBRow_LSX(const uint8_t* src_argb1555, + uint8_t* dst_argb, + int width) { + int x; + int len = width / 16; + __m128i src0, src1; + __m128i tmp0, tmp1, tmpb, tmpg, tmpr, tmpa; + __m128i reg0, reg1, reg2; + __m128i dst0, dst1, dst2, dst3; + + for (x = 0; x < len; x++) { + src0 = __lsx_vld(src_argb1555, 0); + src1 = __lsx_vld(src_argb1555, 16); + tmp0 = __lsx_vpickev_b(src1, src0); + tmp1 = __lsx_vpickod_b(src1, src0); + tmpb = __lsx_vandi_b(tmp0, 0x1F); + tmpg = __lsx_vsrli_b(tmp0, 5); + reg0 = __lsx_vandi_b(tmp1, 0x03); + reg0 = __lsx_vslli_b(reg0, 3); + tmpg = __lsx_vor_v(tmpg, reg0); + reg1 = __lsx_vandi_b(tmp1, 0x7C); + tmpr = __lsx_vsrli_b(reg1, 2); + tmpa = __lsx_vsrli_b(tmp1, 7); + tmpa = __lsx_vneg_b(tmpa); + reg0 = __lsx_vslli_b(tmpb, 3); + reg1 = __lsx_vslli_b(tmpg, 3); + reg2 = __lsx_vslli_b(tmpr, 3); + tmpb = __lsx_vsrli_b(tmpb, 2); + tmpg = __lsx_vsrli_b(tmpg, 2); + tmpr = __lsx_vsrli_b(tmpr, 2); + tmpb = __lsx_vor_v(reg0, tmpb); + tmpg = __lsx_vor_v(reg1, tmpg); + tmpr = __lsx_vor_v(reg2, tmpr); + DUP2_ARG2(__lsx_vilvl_b, tmpg, tmpb, tmpa, tmpr, reg0, reg1); + dst0 = __lsx_vilvl_h(reg1, reg0); + dst1 = __lsx_vilvh_h(reg1, reg0); + DUP2_ARG2(__lsx_vilvh_b, tmpg, tmpb, tmpa, tmpr, reg0, reg1); + dst2 = __lsx_vilvl_h(reg1, reg0); + dst3 = __lsx_vilvh_h(reg1, reg0); + __lsx_vst(dst0, dst_argb, 0); + __lsx_vst(dst1, dst_argb, 16); + __lsx_vst(dst2, dst_argb, 32); + __lsx_vst(dst3, dst_argb, 48); + dst_argb += 64; + src_argb1555 += 32; + } +} + +void RGB565ToARGBRow_LSX(const uint8_t* src_rgb565, + uint8_t* dst_argb, + int width) { + int x; + int len = width / 16; + __m128i src0, src1; + __m128i tmp0, tmp1, tmpb, tmpg, tmpr; + __m128i reg0, reg1, dst0, dst1, dst2, dst3; + __m128i alpha = __lsx_vldi(0xFF); + + for (x = 0; x < len; x++) { + src0 = __lsx_vld(src_rgb565, 0); + src1 = __lsx_vld(src_rgb565, 16); + tmp0 = __lsx_vpickev_b(src1, src0); + tmp1 = __lsx_vpickod_b(src1, src0); + tmpb = __lsx_vandi_b(tmp0, 0x1F); + tmpr = __lsx_vandi_b(tmp1, 0xF8); + reg1 = __lsx_vandi_b(tmp1, 0x07); + reg0 = __lsx_vsrli_b(tmp0, 5); + reg1 = __lsx_vslli_b(reg1, 3); + tmpg = __lsx_vor_v(reg1, reg0); + reg0 = __lsx_vslli_b(tmpb, 3); + reg1 = __lsx_vsrli_b(tmpb, 2); + tmpb = __lsx_vor_v(reg1, reg0); + reg0 = __lsx_vslli_b(tmpg, 2); + reg1 = __lsx_vsrli_b(tmpg, 4); + tmpg = __lsx_vor_v(reg1, reg0); + reg0 = __lsx_vsrli_b(tmpr, 5); + tmpr = __lsx_vor_v(tmpr, reg0); + DUP2_ARG2(__lsx_vilvl_b, tmpg, tmpb, alpha, tmpr, reg0, reg1); + dst0 = __lsx_vilvl_h(reg1, reg0); + dst1 = __lsx_vilvh_h(reg1, reg0); + DUP2_ARG2(__lsx_vilvh_b, tmpg, tmpb, alpha, tmpr, reg0, reg1); + dst2 = __lsx_vilvl_h(reg1, reg0); + dst3 = __lsx_vilvh_h(reg1, reg0); + __lsx_vst(dst0, dst_argb, 0); + __lsx_vst(dst1, dst_argb, 16); + __lsx_vst(dst2, dst_argb, 32); + __lsx_vst(dst3, dst_argb, 48); + dst_argb += 64; + src_rgb565 += 32; + } +} + +void RGB24ToARGBRow_LSX(const uint8_t* src_rgb24, + uint8_t* dst_argb, + int width) { + int x; + int len = width / 16; + __m128i src0, src1, src2; + __m128i tmp0, tmp1, tmp2; + __m128i dst0, dst1, dst2, dst3; + __m128i alpha = __lsx_vldi(0xFF); + __m128i shuf0 = {0x131211100F0E0D0C, 0x1B1A191817161514}; + __m128i shuf1 = {0x1F1E1D1C1B1A1918, 0x0706050403020100}; + __m128i shuf2 = {0x0B0A090807060504, 0x131211100F0E0D0C}; + __m128i shuf3 = {0x1005040310020100, 0x100B0A0910080706}; + + for (x = 0; x < len; x++) { + src0 = __lsx_vld(src_rgb24, 0); + src1 = __lsx_vld(src_rgb24, 16); + src2 = __lsx_vld(src_rgb24, 32); + DUP2_ARG3(__lsx_vshuf_b, src1, src0, shuf0, src1, src2, shuf1, tmp0, tmp1); + tmp2 = __lsx_vshuf_b(src1, src2, shuf2); + DUP4_ARG3(__lsx_vshuf_b, alpha, src0, shuf3, alpha, tmp0, shuf3, alpha, + tmp1, shuf3, alpha, tmp2, shuf3, dst0, dst1, dst2, dst3); + __lsx_vst(dst0, dst_argb, 0); + __lsx_vst(dst1, dst_argb, 16); + __lsx_vst(dst2, dst_argb, 32); + __lsx_vst(dst3, dst_argb, 48); + dst_argb += 64; + src_rgb24 += 48; + } +} + +void RAWToARGBRow_LSX(const uint8_t* src_raw, uint8_t* dst_argb, int width) { + int x; + int len = width / 16; + __m128i src0, src1, src2; + __m128i tmp0, tmp1, tmp2; + __m128i dst0, dst1, dst2, dst3; + __m128i alpha = __lsx_vldi(0xFF); + __m128i shuf0 = {0x131211100F0E0D0C, 0x1B1A191817161514}; + __m128i shuf1 = {0x1F1E1D1C1B1A1918, 0x0706050403020100}; + __m128i shuf2 = {0x0B0A090807060504, 0x131211100F0E0D0C}; + __m128i shuf3 = {0x1003040510000102, 0x10090A0B10060708}; + + for (x = 0; x < len; x++) { + src0 = __lsx_vld(src_raw, 0); + src1 = __lsx_vld(src_raw, 16); + src2 = __lsx_vld(src_raw, 32); + DUP2_ARG3(__lsx_vshuf_b, src1, src0, shuf0, src1, src2, shuf1, tmp0, tmp1); + tmp2 = __lsx_vshuf_b(src1, src2, shuf2); + DUP4_ARG3(__lsx_vshuf_b, alpha, src0, shuf3, alpha, tmp0, shuf3, alpha, + tmp1, shuf3, alpha, tmp2, shuf3, dst0, dst1, dst2, dst3); + __lsx_vst(dst0, dst_argb, 0); + __lsx_vst(dst1, dst_argb, 16); + __lsx_vst(dst2, dst_argb, 32); + __lsx_vst(dst3, dst_argb, 48); + dst_argb += 64; + src_raw += 48; + } +} + +void ARGB1555ToYRow_LSX(const uint8_t* src_argb1555, + uint8_t* dst_y, + int width) { + int x; + int len = width / 16; + __m128i src0, src1; + __m128i tmp0, tmp1, tmpb, tmpg, tmpr; + __m128i reg0, reg1, reg2, dst0; + __m128i const_66 = __lsx_vldi(66); + __m128i const_129 = __lsx_vldi(129); + __m128i const_25 = __lsx_vldi(25); + __m128i const_1080 = {0x1080108010801080, 0x1080108010801080}; + + for (x = 0; x < len; x++) { + src0 = __lsx_vld(src_argb1555, 0); + src1 = __lsx_vld(src_argb1555, 16); + tmp0 = __lsx_vpickev_b(src1, src0); + tmp1 = __lsx_vpickod_b(src1, src0); + tmpb = __lsx_vandi_b(tmp0, 0x1F); + tmpg = __lsx_vsrli_b(tmp0, 5); + reg0 = __lsx_vandi_b(tmp1, 0x03); + reg0 = __lsx_vslli_b(reg0, 3); + tmpg = __lsx_vor_v(tmpg, reg0); + reg1 = __lsx_vandi_b(tmp1, 0x7C); + tmpr = __lsx_vsrli_b(reg1, 2); + reg0 = __lsx_vslli_b(tmpb, 3); + reg1 = __lsx_vslli_b(tmpg, 3); + reg2 = __lsx_vslli_b(tmpr, 3); + tmpb = __lsx_vsrli_b(tmpb, 2); + tmpg = __lsx_vsrli_b(tmpg, 2); + tmpr = __lsx_vsrli_b(tmpr, 2); + tmpb = __lsx_vor_v(reg0, tmpb); + tmpg = __lsx_vor_v(reg1, tmpg); + tmpr = __lsx_vor_v(reg2, tmpr); + reg0 = __lsx_vmaddwev_h_bu(const_1080, tmpb, const_25); + reg1 = __lsx_vmaddwod_h_bu(const_1080, tmpb, const_25); + reg0 = __lsx_vmaddwev_h_bu(reg0, tmpg, const_129); + reg1 = __lsx_vmaddwod_h_bu(reg1, tmpg, const_129); + reg0 = __lsx_vmaddwev_h_bu(reg0, tmpr, const_66); + reg1 = __lsx_vmaddwod_h_bu(reg1, tmpr, const_66); + dst0 = __lsx_vpackod_b(reg1, reg0); + __lsx_vst(dst0, dst_y, 0); + dst_y += 16; + src_argb1555 += 32; + } +} + +void ARGB1555ToUVRow_LSX(const uint8_t* src_argb1555, + int src_stride_argb1555, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { + int x; + int len = width / 16; + const uint8_t* next_argb1555 = src_argb1555 + src_stride_argb1555; + __m128i src0, src1, src2, src3; + __m128i tmp0, tmp1, tmp2, tmp3; + __m128i tmpb, tmpg, tmpr, nexb, nexg, nexr; + __m128i reg0, reg1, reg2, reg3, dst0; + __m128i const_112 = __lsx_vldi(0x438); + __m128i const_74 = __lsx_vldi(0x425); + __m128i const_38 = __lsx_vldi(0x413); + __m128i const_94 = __lsx_vldi(0x42F); + __m128i const_18 = __lsx_vldi(0x409); + __m128i const_8080 = {0x8080808080808080, 0x8080808080808080}; + + for (x = 0; x < len; x++) { + DUP4_ARG2(__lsx_vld, src_argb1555, 0, src_argb1555, 16, next_argb1555, 0, + next_argb1555, 16, src0, src1, src2, src3); + DUP2_ARG2(__lsx_vpickev_b, src1, src0, src3, src2, tmp0, tmp2); + DUP2_ARG2(__lsx_vpickod_b, src1, src0, src3, src2, tmp1, tmp3); + tmpb = __lsx_vandi_b(tmp0, 0x1F); + nexb = __lsx_vandi_b(tmp2, 0x1F); + tmpg = __lsx_vsrli_b(tmp0, 5); + nexg = __lsx_vsrli_b(tmp2, 5); + reg0 = __lsx_vandi_b(tmp1, 0x03); + reg2 = __lsx_vandi_b(tmp3, 0x03); + reg0 = __lsx_vslli_b(reg0, 3); + reg2 = __lsx_vslli_b(reg2, 3); + tmpg = __lsx_vor_v(tmpg, reg0); + nexg = __lsx_vor_v(nexg, reg2); + reg1 = __lsx_vandi_b(tmp1, 0x7C); + reg3 = __lsx_vandi_b(tmp3, 0x7C); + tmpr = __lsx_vsrli_b(reg1, 2); + nexr = __lsx_vsrli_b(reg3, 2); + reg0 = __lsx_vslli_b(tmpb, 3); + reg1 = __lsx_vslli_b(tmpg, 3); + reg2 = __lsx_vslli_b(tmpr, 3); + tmpb = __lsx_vsrli_b(tmpb, 2); + tmpg = __lsx_vsrli_b(tmpg, 2); + tmpr = __lsx_vsrli_b(tmpr, 2); + tmpb = __lsx_vor_v(reg0, tmpb); + tmpg = __lsx_vor_v(reg1, tmpg); + tmpr = __lsx_vor_v(reg2, tmpr); + reg0 = __lsx_vslli_b(nexb, 3); + reg1 = __lsx_vslli_b(nexg, 3); + reg2 = __lsx_vslli_b(nexr, 3); + nexb = __lsx_vsrli_b(nexb, 2); + nexg = __lsx_vsrli_b(nexg, 2); + nexr = __lsx_vsrli_b(nexr, 2); + nexb = __lsx_vor_v(reg0, nexb); + nexg = __lsx_vor_v(reg1, nexg); + nexr = __lsx_vor_v(reg2, nexr); + RGBTOUV(tmpb, tmpg, tmpr, nexb, nexg, nexr, dst0); + __lsx_vstelm_d(dst0, dst_u, 0, 0); + __lsx_vstelm_d(dst0, dst_v, 0, 1); + dst_u += 8; + dst_v += 8; + src_argb1555 += 32; + next_argb1555 += 32; + } +} + +void RGB565ToYRow_LSX(const uint8_t* src_rgb565, uint8_t* dst_y, int width) { + int x; + int len = width / 16; + __m128i src0, src1; + __m128i tmp0, tmp1, tmpb, tmpg, tmpr; + __m128i reg0, reg1, dst0; + __m128i const_66 = __lsx_vldi(66); + __m128i const_129 = __lsx_vldi(129); + __m128i const_25 = __lsx_vldi(25); + __m128i const_1080 = {0x1080108010801080, 0x1080108010801080}; + + for (x = 0; x < len; x++) { + src0 = __lsx_vld(src_rgb565, 0); + src1 = __lsx_vld(src_rgb565, 16); + tmp0 = __lsx_vpickev_b(src1, src0); + tmp1 = __lsx_vpickod_b(src1, src0); + tmpb = __lsx_vandi_b(tmp0, 0x1F); + tmpr = __lsx_vandi_b(tmp1, 0xF8); + reg1 = __lsx_vandi_b(tmp1, 0x07); + reg0 = __lsx_vsrli_b(tmp0, 5); + reg1 = __lsx_vslli_b(reg1, 3); + tmpg = __lsx_vor_v(reg1, reg0); + reg0 = __lsx_vslli_b(tmpb, 3); + reg1 = __lsx_vsrli_b(tmpb, 2); + tmpb = __lsx_vor_v(reg1, reg0); + reg0 = __lsx_vslli_b(tmpg, 2); + reg1 = __lsx_vsrli_b(tmpg, 4); + tmpg = __lsx_vor_v(reg1, reg0); + reg0 = __lsx_vsrli_b(tmpr, 5); + tmpr = __lsx_vor_v(tmpr, reg0); + reg0 = __lsx_vmaddwev_h_bu(const_1080, tmpb, const_25); + reg1 = __lsx_vmaddwod_h_bu(const_1080, tmpb, const_25); + reg0 = __lsx_vmaddwev_h_bu(reg0, tmpg, const_129); + reg1 = __lsx_vmaddwod_h_bu(reg1, tmpg, const_129); + reg0 = __lsx_vmaddwev_h_bu(reg0, tmpr, const_66); + reg1 = __lsx_vmaddwod_h_bu(reg1, tmpr, const_66); + dst0 = __lsx_vpackod_b(reg1, reg0); + __lsx_vst(dst0, dst_y, 0); + dst_y += 16; + src_rgb565 += 32; + } +} + +void RGB565ToUVRow_LSX(const uint8_t* src_rgb565, + int src_stride_rgb565, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { + int x; + int len = width / 16; + const uint8_t* next_rgb565 = src_rgb565 + src_stride_rgb565; + __m128i src0, src1, src2, src3; + __m128i tmp0, tmp1, tmp2, tmp3; + __m128i tmpb, tmpg, tmpr, nexb, nexg, nexr; + __m128i reg0, reg1, reg2, reg3, dst0; + __m128i const_112 = __lsx_vldi(0x438); + __m128i const_74 = __lsx_vldi(0x425); + __m128i const_38 = __lsx_vldi(0x413); + __m128i const_94 = __lsx_vldi(0x42F); + __m128i const_18 = __lsx_vldi(0x409); + __m128i const_8080 = {0x8080808080808080, 0x8080808080808080}; + + for (x = 0; x < len; x++) { + DUP4_ARG2(__lsx_vld, src_rgb565, 0, src_rgb565, 16, next_rgb565, 0, + next_rgb565, 16, src0, src1, src2, src3); + DUP2_ARG2(__lsx_vpickev_b, src1, src0, src3, src2, tmp0, tmp2); + DUP2_ARG2(__lsx_vpickod_b, src1, src0, src3, src2, tmp1, tmp3); + tmpb = __lsx_vandi_b(tmp0, 0x1F); + tmpr = __lsx_vandi_b(tmp1, 0xF8); + nexb = __lsx_vandi_b(tmp2, 0x1F); + nexr = __lsx_vandi_b(tmp3, 0xF8); + reg1 = __lsx_vandi_b(tmp1, 0x07); + reg3 = __lsx_vandi_b(tmp3, 0x07); + reg0 = __lsx_vsrli_b(tmp0, 5); + reg1 = __lsx_vslli_b(reg1, 3); + reg2 = __lsx_vsrli_b(tmp2, 5); + reg3 = __lsx_vslli_b(reg3, 3); + tmpg = __lsx_vor_v(reg1, reg0); + nexg = __lsx_vor_v(reg2, reg3); + reg0 = __lsx_vslli_b(tmpb, 3); + reg1 = __lsx_vsrli_b(tmpb, 2); + reg2 = __lsx_vslli_b(nexb, 3); + reg3 = __lsx_vsrli_b(nexb, 2); + tmpb = __lsx_vor_v(reg1, reg0); + nexb = __lsx_vor_v(reg2, reg3); + reg0 = __lsx_vslli_b(tmpg, 2); + reg1 = __lsx_vsrli_b(tmpg, 4); + reg2 = __lsx_vslli_b(nexg, 2); + reg3 = __lsx_vsrli_b(nexg, 4); + tmpg = __lsx_vor_v(reg1, reg0); + nexg = __lsx_vor_v(reg2, reg3); + reg0 = __lsx_vsrli_b(tmpr, 5); + reg2 = __lsx_vsrli_b(nexr, 5); + tmpr = __lsx_vor_v(tmpr, reg0); + nexr = __lsx_vor_v(nexr, reg2); + RGBTOUV(tmpb, tmpg, tmpr, nexb, nexg, nexr, dst0); + __lsx_vstelm_d(dst0, dst_u, 0, 0); + __lsx_vstelm_d(dst0, dst_v, 0, 1); + dst_u += 8; + dst_v += 8; + src_rgb565 += 32; + next_rgb565 += 32; + } +} + +void RGB24ToUVRow_LSX(const uint8_t* src_rgb24, + int src_stride_rgb24, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { + int x; + const uint8_t* next_rgb24 = src_rgb24 + src_stride_rgb24; + int len = width / 16; + __m128i src0, src1, src2; + __m128i nex0, nex1, nex2, dst0; + __m128i tmpb, tmpg, tmpr, nexb, nexg, nexr; + __m128i const_112 = __lsx_vldi(0x438); + __m128i const_74 = __lsx_vldi(0x425); + __m128i const_38 = __lsx_vldi(0x413); + __m128i const_94 = __lsx_vldi(0x42F); + __m128i const_18 = __lsx_vldi(0x409); + __m128i const_8080 = {0x8080808080808080, 0x8080808080808080}; + __m128i shuff0_b = {0x15120F0C09060300, 0x00000000001E1B18}; + __m128i shuff1_b = {0x0706050403020100, 0x1D1A1714110A0908}; + __m128i shuff0_g = {0x1613100D0A070401, 0x00000000001F1C19}; + __m128i shuff1_g = {0x0706050403020100, 0x1E1B1815120A0908}; + __m128i shuff0_r = {0x1714110E0B080502, 0x0000000000001D1A}; + __m128i shuff1_r = {0x0706050403020100, 0x1F1C191613100908}; + + for (x = 0; x < len; x++) { + src0 = __lsx_vld(src_rgb24, 0); + src1 = __lsx_vld(src_rgb24, 16); + src2 = __lsx_vld(src_rgb24, 32); + nex0 = __lsx_vld(next_rgb24, 0); + nex1 = __lsx_vld(next_rgb24, 16); + nex2 = __lsx_vld(next_rgb24, 32); + DUP2_ARG3(__lsx_vshuf_b, src1, src0, shuff0_b, nex1, nex0, shuff0_b, tmpb, + nexb); + DUP2_ARG3(__lsx_vshuf_b, src1, src0, shuff0_g, nex1, nex0, shuff0_g, tmpg, + nexg); + DUP2_ARG3(__lsx_vshuf_b, src1, src0, shuff0_r, nex1, nex0, shuff0_r, tmpr, + nexr); + DUP2_ARG3(__lsx_vshuf_b, src2, tmpb, shuff1_b, nex2, nexb, shuff1_b, tmpb, + nexb); + DUP2_ARG3(__lsx_vshuf_b, src2, tmpg, shuff1_g, nex2, nexg, shuff1_g, tmpg, + nexg); + DUP2_ARG3(__lsx_vshuf_b, src2, tmpr, shuff1_r, nex2, nexr, shuff1_r, tmpr, + nexr); + RGBTOUV(tmpb, tmpg, tmpr, nexb, nexg, nexr, dst0); + __lsx_vstelm_d(dst0, dst_u, 0, 0); + __lsx_vstelm_d(dst0, dst_v, 0, 1); + dst_u += 8; + dst_v += 8; + src_rgb24 += 48; + next_rgb24 += 48; + } +} + +void RAWToUVRow_LSX(const uint8_t* src_raw, + int src_stride_raw, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { + int x; + const uint8_t* next_raw = src_raw + src_stride_raw; + int len = width / 16; + __m128i src0, src1, src2; + __m128i nex0, nex1, nex2, dst0; + __m128i tmpb, tmpg, tmpr, nexb, nexg, nexr; + __m128i const_112 = __lsx_vldi(0x438); + __m128i const_74 = __lsx_vldi(0x425); + __m128i const_38 = __lsx_vldi(0x413); + __m128i const_94 = __lsx_vldi(0x42F); + __m128i const_18 = __lsx_vldi(0x409); + __m128i const_8080 = {0x8080808080808080, 0x8080808080808080}; + __m128i shuff0_r = {0x15120F0C09060300, 0x00000000001E1B18}; + __m128i shuff1_r = {0x0706050403020100, 0x1D1A1714110A0908}; + __m128i shuff0_g = {0x1613100D0A070401, 0x00000000001F1C19}; + __m128i shuff1_g = {0x0706050403020100, 0x1E1B1815120A0908}; + __m128i shuff0_b = {0x1714110E0B080502, 0x0000000000001D1A}; + __m128i shuff1_b = {0x0706050403020100, 0x1F1C191613100908}; + + for (x = 0; x < len; x++) { + src0 = __lsx_vld(src_raw, 0); + src1 = __lsx_vld(src_raw, 16); + src2 = __lsx_vld(src_raw, 32); + nex0 = __lsx_vld(next_raw, 0); + nex1 = __lsx_vld(next_raw, 16); + nex2 = __lsx_vld(next_raw, 32); + DUP2_ARG3(__lsx_vshuf_b, src1, src0, shuff0_b, nex1, nex0, shuff0_b, tmpb, + nexb); + DUP2_ARG3(__lsx_vshuf_b, src1, src0, shuff0_g, nex1, nex0, shuff0_g, tmpg, + nexg); + DUP2_ARG3(__lsx_vshuf_b, src1, src0, shuff0_r, nex1, nex0, shuff0_r, tmpr, + nexr); + DUP2_ARG3(__lsx_vshuf_b, src2, tmpb, shuff1_b, nex2, nexb, shuff1_b, tmpb, + nexb); + DUP2_ARG3(__lsx_vshuf_b, src2, tmpg, shuff1_g, nex2, nexg, shuff1_g, tmpg, + nexg); + DUP2_ARG3(__lsx_vshuf_b, src2, tmpr, shuff1_r, nex2, nexr, shuff1_r, tmpr, + nexr); + RGBTOUV(tmpb, tmpg, tmpr, nexb, nexg, nexr, dst0); + __lsx_vstelm_d(dst0, dst_u, 0, 0); + __lsx_vstelm_d(dst0, dst_v, 0, 1); + dst_u += 8; + dst_v += 8; + src_raw += 48; + next_raw += 48; + } +} + +void NV12ToARGBRow_LSX(const uint8_t* src_y, + const uint8_t* src_uv, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { + int x; + int len = width / 8; + __m128i vec_y, vec_vu; + __m128i vec_vr, vec_ub, vec_vg, vec_ug, vec_yg, vec_yb; + __m128i vec_vrub, vec_vgug; + __m128i out_b, out_g, out_r; + __m128i const_80 = __lsx_vldi(0x480); + __m128i alpha = __lsx_vldi(0xFF); + __m128i zero = __lsx_vldi(0); + + YUVTORGB_SETUP(yuvconstants, vec_vr, vec_ub, vec_vg, vec_ug, vec_yg, vec_yb); + vec_vrub = __lsx_vilvl_h(vec_vr, vec_ub); + vec_vgug = __lsx_vilvl_h(vec_vg, vec_ug); + + for (x = 0; x < len; x++) { + vec_y = __lsx_vld(src_y, 0); + vec_vu = __lsx_vld(src_uv, 0); + YUVTORGB(vec_y, vec_vu, vec_vrub, vec_vgug, vec_yg, vec_yb, out_b, out_g, + out_r); + STOREARGB(alpha, out_r, out_g, out_b, dst_argb); + src_y += 8; + src_uv += 8; + } +} + +void NV12ToRGB565Row_LSX(const uint8_t* src_y, + const uint8_t* src_uv, + uint8_t* dst_rgb565, + const struct YuvConstants* yuvconstants, + int width) { + int x; + int len = width / 8; + __m128i vec_y, vec_vu; + __m128i vec_vr, vec_ub, vec_vg, vec_ug, vec_yg, vec_yb; + __m128i vec_vrub, vec_vgug; + __m128i out_b, out_g, out_r; + __m128i const_80 = __lsx_vldi(0x480); + __m128i zero = __lsx_vldi(0); + + YUVTORGB_SETUP(yuvconstants, vec_vr, vec_ub, vec_vg, vec_ug, vec_yg, vec_yb); + vec_vrub = __lsx_vilvl_h(vec_vr, vec_ub); + vec_vgug = __lsx_vilvl_h(vec_vg, vec_ug); + + for (x = 0; x < len; x++) { + vec_y = __lsx_vld(src_y, 0); + vec_vu = __lsx_vld(src_uv, 0); + YUVTORGB(vec_y, vec_vu, vec_vrub, vec_vgug, vec_yg, vec_yb, out_b, out_g, + out_r); + out_b = __lsx_vsrli_h(out_b, 3); + out_g = __lsx_vsrli_h(out_g, 2); + out_r = __lsx_vsrli_h(out_r, 3); + out_g = __lsx_vslli_h(out_g, 5); + out_r = __lsx_vslli_h(out_r, 11); + out_r = __lsx_vor_v(out_r, out_g); + out_r = __lsx_vor_v(out_r, out_b); + __lsx_vst(out_r, dst_rgb565, 0); + src_y += 8; + src_uv += 8; + dst_rgb565 += 16; + } +} + +void NV21ToARGBRow_LSX(const uint8_t* src_y, + const uint8_t* src_vu, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { + int x; + int len = width / 8; + __m128i vec_y, vec_uv; + __m128i vec_vr, vec_ub, vec_vg, vec_ug, vec_yg, vec_yb; + __m128i vec_ubvr, vec_ugvg; + __m128i out_b, out_g, out_r; + __m128i const_80 = __lsx_vldi(0x480); + __m128i alpha = __lsx_vldi(0xFF); + __m128i zero = __lsx_vldi(0); + + YUVTORGB_SETUP(yuvconstants, vec_vr, vec_ub, vec_vg, vec_ug, vec_yg, vec_yb); + vec_ubvr = __lsx_vilvl_h(vec_ub, vec_vr); + vec_ugvg = __lsx_vilvl_h(vec_ug, vec_vg); + + for (x = 0; x < len; x++) { + vec_y = __lsx_vld(src_y, 0); + vec_uv = __lsx_vld(src_vu, 0); + YUVTORGB(vec_y, vec_uv, vec_ubvr, vec_ugvg, vec_yg, vec_yb, out_r, out_g, + out_b); + STOREARGB(alpha, out_r, out_g, out_b, dst_argb); + src_y += 8; + src_vu += 8; + } +} + +void SobelRow_LSX(const uint8_t* src_sobelx, + const uint8_t* src_sobely, + uint8_t* dst_argb, + int width) { + int x; + int len = width / 16; + __m128i src0, src1, tmp0; + __m128i out0, out1, out2, out3; + __m128i alpha = __lsx_vldi(0xFF); + __m128i shuff0 = {0x1001010110000000, 0x1003030310020202}; + __m128i shuff1 = __lsx_vaddi_bu(shuff0, 0x04); + __m128i shuff2 = __lsx_vaddi_bu(shuff1, 0x04); + __m128i shuff3 = __lsx_vaddi_bu(shuff2, 0x04); + + for (x = 0; x < len; x++) { + src0 = __lsx_vld(src_sobelx, 0); + src1 = __lsx_vld(src_sobely, 0); + tmp0 = __lsx_vsadd_bu(src0, src1); + DUP4_ARG3(__lsx_vshuf_b, alpha, tmp0, shuff0, alpha, tmp0, shuff1, alpha, + tmp0, shuff2, alpha, tmp0, shuff3, out0, out1, out2, out3); + __lsx_vst(out0, dst_argb, 0); + __lsx_vst(out1, dst_argb, 16); + __lsx_vst(out2, dst_argb, 32); + __lsx_vst(out3, dst_argb, 48); + src_sobelx += 16; + src_sobely += 16; + dst_argb += 64; + } +} + +void SobelToPlaneRow_LSX(const uint8_t* src_sobelx, + const uint8_t* src_sobely, + uint8_t* dst_y, + int width) { + int x; + int len = width / 32; + __m128i src0, src1, src2, src3, dst0, dst1; + + for (x = 0; x < len; x++) { + DUP2_ARG2(__lsx_vld, src_sobelx, 0, src_sobelx, 16, src0, src1); + DUP2_ARG2(__lsx_vld, src_sobely, 0, src_sobely, 16, src2, src3); + dst0 = __lsx_vsadd_bu(src0, src2); + dst1 = __lsx_vsadd_bu(src1, src3); + __lsx_vst(dst0, dst_y, 0); + __lsx_vst(dst1, dst_y, 16); + src_sobelx += 32; + src_sobely += 32; + dst_y += 32; + } +} + +void SobelXYRow_LSX(const uint8_t* src_sobelx, + const uint8_t* src_sobely, + uint8_t* dst_argb, + int width) { + int x; + int len = width / 16; + __m128i src_r, src_b, src_g; + __m128i tmp0, tmp1, tmp2, tmp3; + __m128i dst0, dst1, dst2, dst3; + __m128i alpha = __lsx_vldi(0xFF); + + for (x = 0; x < len; x++) { + src_r = __lsx_vld(src_sobelx, 0); + src_b = __lsx_vld(src_sobely, 0); + src_g = __lsx_vsadd_bu(src_r, src_b); + tmp0 = __lsx_vilvl_b(src_g, src_b); + tmp1 = __lsx_vilvh_b(src_g, src_b); + tmp2 = __lsx_vilvl_b(alpha, src_r); + tmp3 = __lsx_vilvh_b(alpha, src_r); + dst0 = __lsx_vilvl_h(tmp2, tmp0); + dst1 = __lsx_vilvh_h(tmp2, tmp0); + dst2 = __lsx_vilvl_h(tmp3, tmp1); + dst3 = __lsx_vilvh_h(tmp3, tmp1); + __lsx_vst(dst0, dst_argb, 0); + __lsx_vst(dst1, dst_argb, 16); + __lsx_vst(dst2, dst_argb, 32); + __lsx_vst(dst3, dst_argb, 48); + src_sobelx += 16; + src_sobely += 16; + dst_argb += 64; + } +} + +void BGRAToUVRow_LSX(const uint8_t* src_bgra, + int src_stride_bgra, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { + int x; + const uint8_t* next_bgra = src_bgra + src_stride_bgra; + int len = width / 16; + __m128i src0, src1, src2, src3; + __m128i nex0, nex1, nex2, nex3; + __m128i tmp0, tmp1, tmp2, tmp3, dst0; + __m128i tmpb, tmpg, tmpr, nexb, nexg, nexr; + __m128i const_112 = __lsx_vldi(0x438); + __m128i const_74 = __lsx_vldi(0x425); + __m128i const_38 = __lsx_vldi(0x413); + __m128i const_94 = __lsx_vldi(0x42F); + __m128i const_18 = __lsx_vldi(0x409); + __m128i const_8080 = {0x8080808080808080, 0x8080808080808080}; + + for (x = 0; x < len; x++) { + DUP4_ARG2(__lsx_vld, src_bgra, 0, src_bgra, 16, src_bgra, 32, src_bgra, 48, + src0, src1, src2, src3); + DUP4_ARG2(__lsx_vld, next_bgra, 0, next_bgra, 16, next_bgra, 32, next_bgra, + 48, nex0, nex1, nex2, nex3); + tmp0 = __lsx_vpickod_b(src1, src0); + tmp1 = __lsx_vpickev_b(src1, src0); + tmp2 = __lsx_vpickod_b(src3, src2); + tmp3 = __lsx_vpickev_b(src3, src2); + tmpb = __lsx_vpickod_b(tmp2, tmp0); + tmpr = __lsx_vpickev_b(tmp2, tmp0); + tmpg = __lsx_vpickod_b(tmp3, tmp1); + tmp0 = __lsx_vpickod_b(nex1, nex0); + tmp1 = __lsx_vpickev_b(nex1, nex0); + tmp2 = __lsx_vpickod_b(nex3, nex2); + tmp3 = __lsx_vpickev_b(nex3, nex2); + nexb = __lsx_vpickod_b(tmp2, tmp0); + nexr = __lsx_vpickev_b(tmp2, tmp0); + nexg = __lsx_vpickod_b(tmp3, tmp1); + RGBTOUV(tmpb, tmpg, tmpr, nexb, nexg, nexr, dst0); + __lsx_vstelm_d(dst0, dst_u, 0, 0); + __lsx_vstelm_d(dst0, dst_v, 0, 1); + dst_u += 8; + dst_v += 8; + src_bgra += 64; + next_bgra += 64; + } +} + +void ABGRToUVRow_LSX(const uint8_t* src_abgr, + int src_stride_abgr, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { + int x; + const uint8_t* next_abgr = src_abgr + src_stride_abgr; + int len = width / 16; + __m128i src0, src1, src2, src3; + __m128i nex0, nex1, nex2, nex3; + __m128i tmp0, tmp1, tmp2, tmp3, dst0; + __m128i tmpb, tmpg, tmpr, nexb, nexg, nexr; + __m128i const_112 = __lsx_vldi(0x438); + __m128i const_74 = __lsx_vldi(0x425); + __m128i const_38 = __lsx_vldi(0x413); + __m128i const_94 = __lsx_vldi(0x42F); + __m128i const_18 = __lsx_vldi(0x409); + __m128i const_8080 = {0x8080808080808080, 0x8080808080808080}; + + for (x = 0; x < len; x++) { + DUP4_ARG2(__lsx_vld, src_abgr, 0, src_abgr, 16, src_abgr, 32, src_abgr, 48, + src0, src1, src2, src3); + DUP4_ARG2(__lsx_vld, next_abgr, 0, next_abgr, 16, next_abgr, 32, next_abgr, + 48, nex0, nex1, nex2, nex3); + tmp0 = __lsx_vpickev_b(src1, src0); + tmp1 = __lsx_vpickod_b(src1, src0); + tmp2 = __lsx_vpickev_b(src3, src2); + tmp3 = __lsx_vpickod_b(src3, src2); + tmpb = __lsx_vpickod_b(tmp2, tmp0); + tmpr = __lsx_vpickev_b(tmp2, tmp0); + tmpg = __lsx_vpickev_b(tmp3, tmp1); + tmp0 = __lsx_vpickev_b(nex1, nex0); + tmp1 = __lsx_vpickod_b(nex1, nex0); + tmp2 = __lsx_vpickev_b(nex3, nex2); + tmp3 = __lsx_vpickod_b(nex3, nex2); + nexb = __lsx_vpickod_b(tmp2, tmp0); + nexr = __lsx_vpickev_b(tmp2, tmp0); + nexg = __lsx_vpickev_b(tmp3, tmp1); + RGBTOUV(tmpb, tmpg, tmpr, nexb, nexg, nexr, dst0); + __lsx_vstelm_d(dst0, dst_u, 0, 0); + __lsx_vstelm_d(dst0, dst_v, 0, 1); + dst_u += 8; + dst_v += 8; + src_abgr += 64; + next_abgr += 64; + } +} + +void RGBAToUVRow_LSX(const uint8_t* src_rgba, + int src_stride_rgba, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { + int x; + const uint8_t* next_rgba = src_rgba + src_stride_rgba; + int len = width / 16; + __m128i src0, src1, src2, src3; + __m128i nex0, nex1, nex2, nex3; + __m128i tmp0, tmp1, tmp2, tmp3, dst0; + __m128i tmpb, tmpg, tmpr, nexb, nexg, nexr; + __m128i const_112 = __lsx_vldi(0x438); + __m128i const_74 = __lsx_vldi(0x425); + __m128i const_38 = __lsx_vldi(0x413); + __m128i const_94 = __lsx_vldi(0x42F); + __m128i const_18 = __lsx_vldi(0x409); + __m128i const_8080 = {0x8080808080808080, 0x8080808080808080}; + + for (x = 0; x < len; x++) { + DUP4_ARG2(__lsx_vld, src_rgba, 0, src_rgba, 16, src_rgba, 32, src_rgba, 48, + src0, src1, src2, src3); + DUP4_ARG2(__lsx_vld, next_rgba, 0, next_rgba, 16, next_rgba, 32, next_rgba, + 48, nex0, nex1, nex2, nex3); + tmp0 = __lsx_vpickod_b(src1, src0); + tmp1 = __lsx_vpickev_b(src1, src0); + tmp2 = __lsx_vpickod_b(src3, src2); + tmp3 = __lsx_vpickev_b(src3, src2); + tmpr = __lsx_vpickod_b(tmp2, tmp0); + tmpb = __lsx_vpickev_b(tmp2, tmp0); + tmpg = __lsx_vpickod_b(tmp3, tmp1); + tmp0 = __lsx_vpickod_b(nex1, nex0); + tmp1 = __lsx_vpickev_b(nex1, nex0); + tmp2 = __lsx_vpickod_b(nex3, nex2); + tmp3 = __lsx_vpickev_b(nex3, nex2); + nexr = __lsx_vpickod_b(tmp2, tmp0); + nexb = __lsx_vpickev_b(tmp2, tmp0); + nexg = __lsx_vpickod_b(tmp3, tmp1); + RGBTOUV(tmpb, tmpg, tmpr, nexb, nexg, nexr, dst0); + __lsx_vstelm_d(dst0, dst_u, 0, 0); + __lsx_vstelm_d(dst0, dst_v, 0, 1); + dst_u += 8; + dst_v += 8; + src_rgba += 64; + next_rgba += 64; + } +} + +void ARGBToUVJRow_LSX(const uint8_t* src_argb, + int src_stride_argb, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { + int x; + const uint8_t* next_argb = src_argb + src_stride_argb; + int len = width / 16; + __m128i src0, src1, src2, src3; + __m128i nex0, nex1, nex2, nex3; + __m128i tmp0, tmp1, tmp2, tmp3; + __m128i reg0, reg1, dst0; + __m128i tmpb, tmpg, tmpr, nexb, nexg, nexr; + __m128i const_63 = __lsx_vldi(0x43F); + __m128i const_42 = __lsx_vldi(0x42A); + __m128i const_21 = __lsx_vldi(0x415); + __m128i const_53 = __lsx_vldi(0x435); + __m128i const_10 = __lsx_vldi(0x40A); + __m128i const_8080 = {0x8080808080808080, 0x8080808080808080}; + + for (x = 0; x < len; x++) { + DUP4_ARG2(__lsx_vld, src_argb, 0, src_argb, 16, src_argb, 32, src_argb, 48, + src0, src1, src2, src3); + DUP4_ARG2(__lsx_vld, next_argb, 0, next_argb, 16, next_argb, 32, next_argb, + 48, nex0, nex1, nex2, nex3); + tmp0 = __lsx_vpickev_b(src1, src0); + tmp1 = __lsx_vpickod_b(src1, src0); + tmp2 = __lsx_vpickev_b(src3, src2); + tmp3 = __lsx_vpickod_b(src3, src2); + tmpr = __lsx_vpickod_b(tmp2, tmp0); + tmpb = __lsx_vpickev_b(tmp2, tmp0); + tmpg = __lsx_vpickev_b(tmp3, tmp1); + tmp0 = __lsx_vpickev_b(nex1, nex0); + tmp1 = __lsx_vpickod_b(nex1, nex0); + tmp2 = __lsx_vpickev_b(nex3, nex2); + tmp3 = __lsx_vpickod_b(nex3, nex2); + nexr = __lsx_vpickod_b(tmp2, tmp0); + nexb = __lsx_vpickev_b(tmp2, tmp0); + nexg = __lsx_vpickev_b(tmp3, tmp1); + tmp0 = __lsx_vaddwev_h_bu(tmpb, nexb); + tmp1 = __lsx_vaddwod_h_bu(tmpb, nexb); + tmp2 = __lsx_vaddwev_h_bu(tmpg, nexg); + tmp3 = __lsx_vaddwod_h_bu(tmpg, nexg); + reg0 = __lsx_vaddwev_h_bu(tmpr, nexr); + reg1 = __lsx_vaddwod_h_bu(tmpr, nexr); + tmpb = __lsx_vavgr_hu(tmp0, tmp1); + tmpg = __lsx_vavgr_hu(tmp2, tmp3); + tmpr = __lsx_vavgr_hu(reg0, reg1); + reg0 = __lsx_vmadd_h(const_8080, const_63, tmpb); + reg1 = __lsx_vmadd_h(const_8080, const_63, tmpr); + reg0 = __lsx_vmsub_h(reg0, const_42, tmpg); + reg1 = __lsx_vmsub_h(reg1, const_53, tmpg); + reg0 = __lsx_vmsub_h(reg0, const_21, tmpr); + reg1 = __lsx_vmsub_h(reg1, const_10, tmpb); + dst0 = __lsx_vpickod_b(reg1, reg0); + __lsx_vstelm_d(dst0, dst_u, 0, 0); + __lsx_vstelm_d(dst0, dst_v, 0, 1); + dst_u += 8; + dst_v += 8; + src_argb += 64; + next_argb += 64; + } +} + +void I444ToARGBRow_LSX(const uint8_t* src_y, + const uint8_t* src_u, + const uint8_t* src_v, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { + int x; + int len = width / 16; + __m128i vec_y, vec_u, vec_v, out_b, out_g, out_r; + __m128i vec_yl, vec_yh, vec_ul, vec_vl, vec_uh, vec_vh; + __m128i vec_vr, vec_ub, vec_vg, vec_ug, vec_yg, vec_yb, vec_ugvg; + __m128i const_80 = __lsx_vldi(0x480); + __m128i alpha = __lsx_vldi(0xFF); + __m128i zero = __lsx_vldi(0); + + YUVTORGB_SETUP(yuvconstants, vec_vr, vec_ub, vec_vg, vec_ug, vec_yg, vec_yb); + vec_ugvg = __lsx_vilvl_h(vec_ug, vec_vg); + + for (x = 0; x < len; x++) { + vec_y = __lsx_vld(src_y, 0); + vec_u = __lsx_vld(src_u, 0); + vec_v = __lsx_vld(src_v, 0); + vec_yl = __lsx_vilvl_b(vec_y, vec_y); + vec_ul = __lsx_vilvl_b(zero, vec_u); + vec_vl = __lsx_vilvl_b(zero, vec_v); + I444TORGB(vec_yl, vec_ul, vec_vl, vec_ub, vec_vr, vec_ugvg, vec_yg, vec_yb, + out_b, out_g, out_r); + STOREARGB(alpha, out_r, out_g, out_b, dst_argb); + vec_yh = __lsx_vilvh_b(vec_y, vec_y); + vec_uh = __lsx_vilvh_b(zero, vec_u); + vec_vh = __lsx_vilvh_b(zero, vec_v); + I444TORGB(vec_yh, vec_uh, vec_vh, vec_ub, vec_vr, vec_ugvg, vec_yg, vec_yb, + out_b, out_g, out_r); + STOREARGB(alpha, out_r, out_g, out_b, dst_argb); + src_y += 16; + src_u += 16; + src_v += 16; + } +} + +void I400ToARGBRow_LSX(const uint8_t* src_y, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { + int x; + int len = width / 16; + __m128i vec_y, vec_yl, vec_yh, out0; + __m128i y_ev, y_od, dst0, dst1, dst2, dst3; + __m128i temp0, temp1; + __m128i alpha = __lsx_vldi(0xFF); + __m128i vec_yg = __lsx_vreplgr2vr_h(yuvconstants->kYToRgb[0]); + __m128i vec_yb = __lsx_vreplgr2vr_w(yuvconstants->kYBiasToRgb[0]); + + for (x = 0; x < len; x++) { + vec_y = __lsx_vld(src_y, 0); + vec_yl = __lsx_vilvl_b(vec_y, vec_y); + y_ev = __lsx_vmulwev_w_hu_h(vec_yl, vec_yg); + y_od = __lsx_vmulwod_w_hu_h(vec_yl, vec_yg); + y_ev = __lsx_vsrai_w(y_ev, 16); + y_od = __lsx_vsrai_w(y_od, 16); + y_ev = __lsx_vadd_w(y_ev, vec_yb); + y_od = __lsx_vadd_w(y_od, vec_yb); + y_ev = __lsx_vsrai_w(y_ev, 6); + y_od = __lsx_vsrai_w(y_od, 6); + y_ev = __lsx_vclip255_w(y_ev); + y_od = __lsx_vclip255_w(y_od); + out0 = __lsx_vpackev_h(y_od, y_ev); + temp0 = __lsx_vpackev_b(out0, out0); + temp1 = __lsx_vpackev_b(alpha, out0); + dst0 = __lsx_vilvl_h(temp1, temp0); + dst1 = __lsx_vilvh_h(temp1, temp0); + vec_yh = __lsx_vilvh_b(vec_y, vec_y); + y_ev = __lsx_vmulwev_w_hu_h(vec_yh, vec_yg); + y_od = __lsx_vmulwod_w_hu_h(vec_yh, vec_yg); + y_ev = __lsx_vsrai_w(y_ev, 16); + y_od = __lsx_vsrai_w(y_od, 16); + y_ev = __lsx_vadd_w(y_ev, vec_yb); + y_od = __lsx_vadd_w(y_od, vec_yb); + y_ev = __lsx_vsrai_w(y_ev, 6); + y_od = __lsx_vsrai_w(y_od, 6); + y_ev = __lsx_vclip255_w(y_ev); + y_od = __lsx_vclip255_w(y_od); + out0 = __lsx_vpackev_h(y_od, y_ev); + temp0 = __lsx_vpackev_b(out0, out0); + temp1 = __lsx_vpackev_b(alpha, out0); + dst2 = __lsx_vilvl_h(temp1, temp0); + dst3 = __lsx_vilvh_h(temp1, temp0); + __lsx_vst(dst0, dst_argb, 0); + __lsx_vst(dst1, dst_argb, 16); + __lsx_vst(dst2, dst_argb, 32); + __lsx_vst(dst3, dst_argb, 48); + dst_argb += 64; + src_y += 16; + } +} + +void J400ToARGBRow_LSX(const uint8_t* src_y, uint8_t* dst_argb, int width) { + int x; + int len = width / 16; + __m128i vec_y, dst0, dst1, dst2, dst3; + __m128i tmp0, tmp1, tmp2, tmp3; + __m128i alpha = __lsx_vldi(0xFF); + + for (x = 0; x < len; x++) { + vec_y = __lsx_vld(src_y, 0); + tmp0 = __lsx_vilvl_b(vec_y, vec_y); + tmp1 = __lsx_vilvh_b(vec_y, vec_y); + tmp2 = __lsx_vilvl_b(alpha, vec_y); + tmp3 = __lsx_vilvh_b(alpha, vec_y); + dst0 = __lsx_vilvl_h(tmp2, tmp0); + dst1 = __lsx_vilvh_h(tmp2, tmp0); + dst2 = __lsx_vilvl_h(tmp3, tmp1); + dst3 = __lsx_vilvh_h(tmp3, tmp1); + __lsx_vst(dst0, dst_argb, 0); + __lsx_vst(dst1, dst_argb, 16); + __lsx_vst(dst2, dst_argb, 32); + __lsx_vst(dst3, dst_argb, 48); + dst_argb += 64; + src_y += 16; + } +} + +void YUY2ToARGBRow_LSX(const uint8_t* src_yuy2, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { + int x; + int len = width / 8; + __m128i src0, vec_y, vec_vu; + __m128i vec_vr, vec_ub, vec_vg, vec_ug, vec_yg, vec_yb; + __m128i vec_vrub, vec_vgug; + __m128i out_b, out_g, out_r; + __m128i const_80 = __lsx_vldi(0x480); + __m128i zero = __lsx_vldi(0); + __m128i alpha = __lsx_vldi(0xFF); + + YUVTORGB_SETUP(yuvconstants, vec_vr, vec_ub, vec_vg, vec_ug, vec_yg, vec_yb); + vec_vrub = __lsx_vilvl_h(vec_vr, vec_ub); + vec_vgug = __lsx_vilvl_h(vec_vg, vec_ug); + + for (x = 0; x < len; x++) { + src0 = __lsx_vld(src_yuy2, 0); + vec_y = __lsx_vpickev_b(src0, src0); + vec_vu = __lsx_vpickod_b(src0, src0); + YUVTORGB(vec_y, vec_vu, vec_vrub, vec_vgug, vec_yg, vec_yb, out_b, out_g, + out_r); + STOREARGB(alpha, out_r, out_g, out_b, dst_argb); + src_yuy2 += 16; + } +} + +void UYVYToARGBRow_LSX(const uint8_t* src_uyvy, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { + int x; + int len = width / 8; + __m128i src0, vec_y, vec_vu; + __m128i vec_vr, vec_ub, vec_vg, vec_ug, vec_yg, vec_yb; + __m128i vec_vrub, vec_vgug; + __m128i out_b, out_g, out_r; + __m128i const_80 = __lsx_vldi(0x480); + __m128i zero = __lsx_vldi(0); + __m128i alpha = __lsx_vldi(0xFF); + + YUVTORGB_SETUP(yuvconstants, vec_vr, vec_ub, vec_vg, vec_ug, vec_yg, vec_yb); + vec_vrub = __lsx_vilvl_h(vec_vr, vec_ub); + vec_vgug = __lsx_vilvl_h(vec_vg, vec_ug); + + for (x = 0; x < len; x++) { + src0 = __lsx_vld(src_uyvy, 0); + vec_y = __lsx_vpickod_b(src0, src0); + vec_vu = __lsx_vpickev_b(src0, src0); + YUVTORGB(vec_y, vec_vu, vec_vrub, vec_vgug, vec_yg, vec_yb, out_b, out_g, + out_r); + STOREARGB(alpha, out_r, out_g, out_b, dst_argb); + src_uyvy += 16; + } +} + +void InterpolateRow_LSX(uint8_t* dst_ptr, + const uint8_t* src_ptr, + ptrdiff_t src_stride, + int width, + int32_t source_y_fraction) { + int x; + int y1_fraction = source_y_fraction; + int y0_fraction = 256 - y1_fraction; + const uint8_t* nex_ptr = src_ptr + src_stride; + uint16_t y_fractions; + int len = width / 32; + __m128i src0, src1, nex0, nex1; + __m128i dst0, dst1, y_frac; + __m128i tmp0, tmp1, tmp2, tmp3; + __m128i const_128 = __lsx_vldi(0x480); + + if (y1_fraction == 0) { + for (x = 0; x < len; x++) { + DUP2_ARG2(__lsx_vld, src_ptr, 0, src_ptr, 16, src0, src1); + __lsx_vst(src0, dst_ptr, 0); + __lsx_vst(src1, dst_ptr, 16); + src_ptr += 32; + dst_ptr += 32; + } + return; + } + + if (y1_fraction == 128) { + for (x = 0; x < len; x++) { + DUP2_ARG2(__lsx_vld, src_ptr, 0, src_ptr, 16, src0, src1); + DUP2_ARG2(__lsx_vld, nex_ptr, 0, nex_ptr, 16, nex0, nex1); + dst0 = __lsx_vavgr_bu(src0, nex0); + dst1 = __lsx_vavgr_bu(src1, nex1); + __lsx_vst(dst0, dst_ptr, 0); + __lsx_vst(dst1, dst_ptr, 16); + src_ptr += 32; + nex_ptr += 32; + dst_ptr += 32; + } + return; + } + + y_fractions = (uint16_t)(y0_fraction + (y1_fraction << 8)); + y_frac = __lsx_vreplgr2vr_h(y_fractions); + + for (x = 0; x < len; x++) { + DUP2_ARG2(__lsx_vld, src_ptr, 0, src_ptr, 16, src0, src1); + DUP2_ARG2(__lsx_vld, nex_ptr, 0, nex_ptr, 16, nex0, nex1); + tmp0 = __lsx_vilvl_b(nex0, src0); + tmp1 = __lsx_vilvh_b(nex0, src0); + tmp2 = __lsx_vilvl_b(nex1, src1); + tmp3 = __lsx_vilvh_b(nex1, src1); + tmp0 = __lsx_vdp2add_h_bu(const_128, tmp0, y_frac); + tmp1 = __lsx_vdp2add_h_bu(const_128, tmp1, y_frac); + tmp2 = __lsx_vdp2add_h_bu(const_128, tmp2, y_frac); + tmp3 = __lsx_vdp2add_h_bu(const_128, tmp3, y_frac); + dst0 = __lsx_vsrlni_b_h(tmp1, tmp0, 8); + dst1 = __lsx_vsrlni_b_h(tmp3, tmp2, 8); + __lsx_vst(dst0, dst_ptr, 0); + __lsx_vst(dst1, dst_ptr, 16); + src_ptr += 32; + nex_ptr += 32; + dst_ptr += 32; + } +} + +void ARGBSetRow_LSX(uint8_t* dst_argb, uint32_t v32, int width) { + int x; + int len = width / 4; + __m128i dst0 = __lsx_vreplgr2vr_w(v32); + + for (x = 0; x < len; x++) { + __lsx_vst(dst0, dst_argb, 0); + dst_argb += 16; + } +} + +void RAWToRGB24Row_LSX(const uint8_t* src_raw, uint8_t* dst_rgb24, int width) { + int x; + int len = width / 16; + __m128i src0, src1, src2; + __m128i dst0, dst1, dst2; + __m128i shuf0 = {0x0708030405000102, 0x110C0D0E090A0B06}; + __m128i shuf1 = {0x1516171213140F10, 0x1F1E1B1C1D18191A}; + __m128i shuf2 = {0x090405060102031E, 0x0D0E0F0A0B0C0708}; + + for (x = 0; x < len; x++) { + DUP2_ARG2(__lsx_vld, src_raw, 0, src_raw, 16, src0, src1); + src2 = __lsx_vld(src_raw, 32); + DUP2_ARG3(__lsx_vshuf_b, src1, src0, shuf0, src1, src0, shuf1, dst0, dst1); + dst2 = __lsx_vshuf_b(src1, src2, shuf2); + dst1 = __lsx_vinsgr2vr_b(dst1, src_raw[32], 0x0E); + __lsx_vst(dst0, dst_rgb24, 0); + __lsx_vst(dst1, dst_rgb24, 16); + __lsx_vst(dst2, dst_rgb24, 32); + dst_rgb24 += 48; + src_raw += 48; + } +} + +void MergeUVRow_LSX(const uint8_t* src_u, + const uint8_t* src_v, + uint8_t* dst_uv, + int width) { + int x; + int len = width / 16; + __m128i src0, src1, dst0, dst1; + + for (x = 0; x < len; x++) { + DUP2_ARG2(__lsx_vld, src_u, 0, src_v, 0, src0, src1); + dst0 = __lsx_vilvl_b(src1, src0); + dst1 = __lsx_vilvh_b(src1, src0); + __lsx_vst(dst0, dst_uv, 0); + __lsx_vst(dst1, dst_uv, 16); + src_u += 16; + src_v += 16; + dst_uv += 32; + } +} + +void ARGBExtractAlphaRow_LSX(const uint8_t* src_argb, + uint8_t* dst_a, + int width) { + int x; + int len = width / 16; + __m128i src0, src1, src2, src3, tmp0, tmp1, dst0; + + for (x = 0; x < len; x++) { + DUP4_ARG2(__lsx_vld, src_argb, 0, src_argb, 16, src_argb, 32, src_argb, 48, + src0, src1, src2, src3); + tmp0 = __lsx_vpickod_b(src1, src0); + tmp1 = __lsx_vpickod_b(src3, src2); + dst0 = __lsx_vpickod_b(tmp1, tmp0); + __lsx_vst(dst0, dst_a, 0); + src_argb += 64; + dst_a += 16; + } +} + +void ARGBBlendRow_LSX(const uint8_t* src_argb, + const uint8_t* src_argb1, + uint8_t* dst_argb, + int width) { + int x; + int len = width / 8; + __m128i src0, src1, src2, src3; + __m128i tmp0, tmp1, dst0, dst1; + __m128i reg0, reg1, reg2, reg3; + __m128i a0, a1, a2, a3; + __m128i const_256 = __lsx_vldi(0x500); + __m128i zero = __lsx_vldi(0); + __m128i alpha = __lsx_vldi(0xFF); + __m128i control = {0xFF000000FF000000, 0xFF000000FF000000}; + + for (x = 0; x < len; x++) { + DUP4_ARG2(__lsx_vld, src_argb, 0, src_argb, 16, src_argb1, 0, src_argb1, 16, + src0, src1, src2, src3); + tmp0 = __lsx_vshuf4i_b(src0, 0xFF); + tmp1 = __lsx_vshuf4i_b(src1, 0xFF); + a0 = __lsx_vilvl_b(zero, tmp0); + a1 = __lsx_vilvh_b(zero, tmp0); + a2 = __lsx_vilvl_b(zero, tmp1); + a3 = __lsx_vilvh_b(zero, tmp1); + reg0 = __lsx_vilvl_b(zero, src2); + reg1 = __lsx_vilvh_b(zero, src2); + reg2 = __lsx_vilvl_b(zero, src3); + reg3 = __lsx_vilvh_b(zero, src3); + DUP4_ARG2(__lsx_vsub_h, const_256, a0, const_256, a1, const_256, a2, + const_256, a3, a0, a1, a2, a3); + DUP4_ARG2(__lsx_vmul_h, a0, reg0, a1, reg1, a2, reg2, a3, reg3, reg0, reg1, + reg2, reg3); + DUP2_ARG3(__lsx_vsrani_b_h, reg1, reg0, 8, reg3, reg2, 8, dst0, dst1); + dst0 = __lsx_vsadd_bu(dst0, src0); + dst1 = __lsx_vsadd_bu(dst1, src1); + dst0 = __lsx_vbitsel_v(dst0, alpha, control); + dst1 = __lsx_vbitsel_v(dst1, alpha, control); + __lsx_vst(dst0, dst_argb, 0); + __lsx_vst(dst1, dst_argb, 16); + src_argb += 32; + src_argb1 += 32; + dst_argb += 32; + } +} + +void ARGBQuantizeRow_LSX(uint8_t* dst_argb, + int scale, + int interval_size, + int interval_offset, + int width) { + int x; + int len = width / 16; + __m128i src0, src1, src2, src3, dst0, dst1, dst2, dst3; + __m128i tmp0, tmp1, tmp2, tmp3, tmp4, tmp5, tmp6, tmp7; + __m128i reg0, reg1, reg2, reg3, reg4, reg5, reg6, reg7; + __m128i vec_size = __lsx_vreplgr2vr_b(interval_size); + __m128i vec_offset = __lsx_vreplgr2vr_b(interval_offset); + __m128i vec_scale = __lsx_vreplgr2vr_w(scale); + __m128i zero = __lsx_vldi(0); + __m128i control = {0xFF000000FF000000, 0xFF000000FF000000}; + + for (x = 0; x < len; x++) { + DUP4_ARG2(__lsx_vld, dst_argb, 0, dst_argb, 16, dst_argb, 32, dst_argb, 48, + src0, src1, src2, src3); + reg0 = __lsx_vilvl_b(zero, src0); + reg1 = __lsx_vilvh_b(zero, src0); + reg2 = __lsx_vilvl_b(zero, src1); + reg3 = __lsx_vilvh_b(zero, src1); + reg4 = __lsx_vilvl_b(zero, src2); + reg5 = __lsx_vilvh_b(zero, src2); + reg6 = __lsx_vilvl_b(zero, src3); + reg7 = __lsx_vilvh_b(zero, src3); + tmp0 = __lsx_vilvl_h(zero, reg0); + tmp1 = __lsx_vilvh_h(zero, reg0); + tmp2 = __lsx_vilvl_h(zero, reg1); + tmp3 = __lsx_vilvh_h(zero, reg1); + tmp4 = __lsx_vilvl_h(zero, reg2); + tmp5 = __lsx_vilvh_h(zero, reg2); + tmp6 = __lsx_vilvl_h(zero, reg3); + tmp7 = __lsx_vilvh_h(zero, reg3); + DUP4_ARG2(__lsx_vmul_w, tmp0, vec_scale, tmp1, vec_scale, tmp2, vec_scale, + tmp3, vec_scale, tmp0, tmp1, tmp2, tmp3); + DUP4_ARG2(__lsx_vmul_w, tmp4, vec_scale, tmp5, vec_scale, tmp6, vec_scale, + tmp7, vec_scale, tmp4, tmp5, tmp6, tmp7); + DUP4_ARG3(__lsx_vsrani_h_w, tmp1, tmp0, 16, tmp3, tmp2, 16, tmp5, tmp4, 16, + tmp7, tmp6, 16, reg0, reg1, reg2, reg3); + dst0 = __lsx_vpickev_b(reg1, reg0); + dst1 = __lsx_vpickev_b(reg3, reg2); + tmp0 = __lsx_vilvl_h(zero, reg4); + tmp1 = __lsx_vilvh_h(zero, reg4); + tmp2 = __lsx_vilvl_h(zero, reg5); + tmp3 = __lsx_vilvh_h(zero, reg5); + tmp4 = __lsx_vilvl_h(zero, reg6); + tmp5 = __lsx_vilvh_h(zero, reg6); + tmp6 = __lsx_vilvl_h(zero, reg7); + tmp7 = __lsx_vilvh_h(zero, reg7); + DUP4_ARG2(__lsx_vmul_w, tmp0, vec_scale, tmp1, vec_scale, tmp2, vec_scale, + tmp3, vec_scale, tmp0, tmp1, tmp2, tmp3); + DUP4_ARG2(__lsx_vmul_w, tmp4, vec_scale, tmp5, vec_scale, tmp6, vec_scale, + tmp7, vec_scale, tmp4, tmp5, tmp6, tmp7); + DUP4_ARG3(__lsx_vsrani_h_w, tmp1, tmp0, 16, tmp3, tmp2, 16, tmp5, tmp4, 16, + tmp7, tmp6, 16, reg0, reg1, reg2, reg3); + dst2 = __lsx_vpickev_b(reg1, reg0); + dst3 = __lsx_vpickev_b(reg3, reg2); + DUP4_ARG2(__lsx_vmul_b, dst0, vec_size, dst1, vec_size, dst2, vec_size, + dst3, vec_size, dst0, dst1, dst2, dst3); + DUP4_ARG2(__lsx_vadd_b, dst0, vec_offset, dst1, vec_offset, dst2, + vec_offset, dst3, vec_offset, dst0, dst1, dst2, dst3); + DUP4_ARG3(__lsx_vbitsel_v, dst0, src0, control, dst1, src1, control, dst2, + src2, control, dst3, src3, control, dst0, dst1, dst2, dst3); + __lsx_vst(dst0, dst_argb, 0); + __lsx_vst(dst1, dst_argb, 16); + __lsx_vst(dst2, dst_argb, 32); + __lsx_vst(dst3, dst_argb, 48); + dst_argb += 64; + } +} + +void ARGBColorMatrixRow_LSX(const uint8_t* src_argb, + uint8_t* dst_argb, + const int8_t* matrix_argb, + int width) { + int x; + int len = width / 8; + __m128i src0, src1, tmp0, tmp1, dst0, dst1; + __m128i tmp_b, tmp_g, tmp_r, tmp_a; + __m128i reg_b, reg_g, reg_r, reg_a; + __m128i matrix_b = __lsx_vldrepl_w(matrix_argb, 0); + __m128i matrix_g = __lsx_vldrepl_w(matrix_argb, 4); + __m128i matrix_r = __lsx_vldrepl_w(matrix_argb, 8); + __m128i matrix_a = __lsx_vldrepl_w(matrix_argb, 12); + + for (x = 0; x < len; x++) { + DUP2_ARG2(__lsx_vld, src_argb, 0, src_argb, 16, src0, src1); + DUP4_ARG2(__lsx_vdp2_h_bu_b, src0, matrix_b, src0, matrix_g, src0, matrix_r, + src0, matrix_a, tmp_b, tmp_g, tmp_r, tmp_a); + DUP4_ARG2(__lsx_vdp2_h_bu_b, src1, matrix_b, src1, matrix_g, src1, matrix_r, + src1, matrix_a, reg_b, reg_g, reg_r, reg_a); + DUP4_ARG2(__lsx_vhaddw_w_h, tmp_b, tmp_b, tmp_g, tmp_g, tmp_r, tmp_r, tmp_a, + tmp_a, tmp_b, tmp_g, tmp_r, tmp_a); + DUP4_ARG2(__lsx_vhaddw_w_h, reg_b, reg_b, reg_g, reg_g, reg_r, reg_r, reg_a, + reg_a, reg_b, reg_g, reg_r, reg_a); + DUP4_ARG2(__lsx_vsrai_w, tmp_b, 6, tmp_g, 6, tmp_r, 6, tmp_a, 6, tmp_b, + tmp_g, tmp_r, tmp_a); + DUP4_ARG2(__lsx_vsrai_w, reg_b, 6, reg_g, 6, reg_r, 6, reg_a, 6, reg_b, + reg_g, reg_r, reg_a); + DUP4_ARG1(__lsx_vclip255_w, tmp_b, tmp_g, tmp_r, tmp_a, tmp_b, tmp_g, tmp_r, + tmp_a) + DUP4_ARG1(__lsx_vclip255_w, reg_b, reg_g, reg_r, reg_a, reg_b, reg_g, reg_r, + reg_a) + DUP4_ARG2(__lsx_vpickev_h, reg_b, tmp_b, reg_g, tmp_g, reg_r, tmp_r, reg_a, + tmp_a, tmp_b, tmp_g, tmp_r, tmp_a); + tmp0 = __lsx_vpackev_b(tmp_g, tmp_b); + tmp1 = __lsx_vpackev_b(tmp_a, tmp_r); + dst0 = __lsx_vilvl_h(tmp1, tmp0); + dst1 = __lsx_vilvh_h(tmp1, tmp0); + __lsx_vst(dst0, dst_argb, 0); + __lsx_vst(dst1, dst_argb, 16); + src_argb += 32; + dst_argb += 32; + } +} + +void SplitUVRow_LSX(const uint8_t* src_uv, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { + int x; + int len = width / 32; + __m128i src0, src1, src2, src3; + __m128i dst0, dst1, dst2, dst3; + + for (x = 0; x < len; x++) { + DUP4_ARG2(__lsx_vld, src_uv, 0, src_uv, 16, src_uv, 32, src_uv, 48, src0, + src1, src2, src3); + DUP2_ARG2(__lsx_vpickev_b, src1, src0, src3, src2, dst0, dst1); + DUP2_ARG2(__lsx_vpickod_b, src1, src0, src3, src2, dst2, dst3); + __lsx_vst(dst0, dst_u, 0); + __lsx_vst(dst1, dst_u, 16); + __lsx_vst(dst2, dst_v, 0); + __lsx_vst(dst3, dst_v, 16); + src_uv += 64; + dst_u += 32; + dst_v += 32; + } +} + +void SetRow_LSX(uint8_t* dst, uint8_t v8, int width) { + int x; + int len = width / 16; + __m128i dst0 = __lsx_vreplgr2vr_b(v8); + + for (x = 0; x < len; x++) { + __lsx_vst(dst0, dst, 0); + dst += 16; + } +} + +void MirrorSplitUVRow_LSX(const uint8_t* src_uv, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { + int x; + int len = width / 32; + __m128i src0, src1, src2, src3; + __m128i dst0, dst1, dst2, dst3; + __m128i shuff0 = {0x10121416181A1C1E, 0x00020406080A0C0E}; + __m128i shuff1 = {0x11131517191B1D1F, 0x01030507090B0D0F}; + + src_uv += (width << 1); + for (x = 0; x < len; x++) { + src_uv -= 64; + DUP4_ARG2(__lsx_vld, src_uv, 0, src_uv, 16, src_uv, 32, src_uv, 48, src2, + src3, src0, src1); + DUP4_ARG3(__lsx_vshuf_b, src1, src0, shuff1, src3, src2, shuff1, src1, src0, + shuff0, src3, src2, shuff0, dst0, dst1, dst2, dst3); + __lsx_vst(dst0, dst_v, 0); + __lsx_vst(dst1, dst_v, 16); + __lsx_vst(dst2, dst_u, 0); + __lsx_vst(dst3, dst_u, 16); + dst_u += 32; + dst_v += 32; + } +} + +void HalfFloatRow_LSX(const uint16_t* src, + uint16_t* dst, + float scale, + int width) { + int x; + int len = width / 32; + float mult = 1.9259299444e-34f * scale; + __m128i src0, src1, src2, src3, dst0, dst1, dst2, dst3; + __m128i tmp0, tmp1, tmp2, tmp3, tmp4, tmp5, tmp6, tmp7; + __m128 reg0, reg1, reg2, reg3, reg4, reg5, reg6, reg7; + __m128 vec_mult = (__m128)__lsx_vldrepl_w(&mult, 0); + __m128i zero = __lsx_vldi(0); + + for (x = 0; x < len; x++) { + DUP4_ARG2(__lsx_vld, src, 0, src, 16, src, 32, src, 48, src0, src1, src2, + src3); + DUP4_ARG2(__lsx_vilvl_h, zero, src0, zero, src1, zero, src2, zero, src3, + tmp0, tmp2, tmp4, tmp6); + DUP4_ARG2(__lsx_vilvh_h, zero, src0, zero, src1, zero, src2, zero, src3, + tmp1, tmp3, tmp5, tmp7); + DUP4_ARG1(__lsx_vffint_s_wu, tmp0, tmp2, tmp4, tmp6, reg0, reg2, reg4, + reg6); + DUP4_ARG1(__lsx_vffint_s_wu, tmp1, tmp3, tmp5, tmp7, reg1, reg3, reg5, + reg7); + DUP4_ARG2(__lsx_vfmul_s, reg0, vec_mult, reg1, vec_mult, reg2, vec_mult, + reg3, vec_mult, reg0, reg1, reg2, reg3); + DUP4_ARG2(__lsx_vfmul_s, reg4, vec_mult, reg5, vec_mult, reg6, vec_mult, + reg7, vec_mult, reg4, reg5, reg6, reg7); + DUP4_ARG2(__lsx_vsrli_w, (v4u32)reg0, 13, (v4u32)reg1, 13, (v4u32)reg2, 13, + (v4u32)reg3, 13, tmp0, tmp1, tmp2, tmp3); + DUP4_ARG2(__lsx_vsrli_w, (v4u32)reg4, 13, (v4u32)reg5, 13, (v4u32)reg6, 13, + (v4u32)reg7, 13, tmp4, tmp5, tmp6, tmp7); + DUP4_ARG2(__lsx_vpickev_h, tmp1, tmp0, tmp3, tmp2, tmp5, tmp4, tmp7, tmp6, + dst0, dst1, dst2, dst3); + __lsx_vst(dst0, dst, 0); + __lsx_vst(dst1, dst, 16); + __lsx_vst(dst2, dst, 32); + __lsx_vst(dst3, dst, 48); + src += 32; + dst += 32; + } +} + +struct RgbConstants { + uint8_t kRGBToY[4]; + uint16_t kAddY; + uint16_t pad; +}; + +// RGB to JPeg coefficients +// B * 0.1140 coefficient = 29 +// G * 0.5870 coefficient = 150 +// R * 0.2990 coefficient = 77 +// Add 0.5 = 0x80 +static const struct RgbConstants kRgb24JPEGConstants = {{29, 150, 77, 0}, + 128, + 0}; + +static const struct RgbConstants kRawJPEGConstants = {{77, 150, 29, 0}, 128, 0}; + +// RGB to BT.601 coefficients +// B * 0.1016 coefficient = 25 +// G * 0.5078 coefficient = 129 +// R * 0.2578 coefficient = 66 +// Add 16.5 = 0x1080 + +static const struct RgbConstants kRgb24I601Constants = {{25, 129, 66, 0}, + 0x1080, + 0}; + +static const struct RgbConstants kRawI601Constants = {{66, 129, 25, 0}, + 0x1080, + 0}; + +// ARGB expects first 3 values to contain RGB and 4th value is ignored. +static void ARGBToYMatrixRow_LSX(const uint8_t* src_argb, + uint8_t* dst_y, + int width, + const struct RgbConstants* rgbconstants) { + asm volatile( + "vldrepl.b $vr0, %3, 0 \n\t" // load rgbconstants + "vldrepl.b $vr1, %3, 1 \n\t" // load rgbconstants + "vldrepl.b $vr2, %3, 2 \n\t" // load rgbconstants + "vldrepl.h $vr3, %3, 4 \n\t" // load rgbconstants + "1: \n\t" + "vld $vr4, %0, 0 \n\t" + "vld $vr5, %0, 16 \n\t" + "vld $vr6, %0, 32 \n\t" + "vld $vr7, %0, 48 \n\t" // load 16 pixels of ARGB + "vor.v $vr12, $vr3, $vr3 \n\t" + "vor.v $vr13, $vr3, $vr3 \n\t" + "addi.d %2, %2, -16 \n\t" // 16 processed per loop. + "vpickev.b $vr8, $vr5, $vr4 \n\t" //BR + "vpickev.b $vr10, $vr7, $vr6 \n\t" + "vpickod.b $vr9, $vr5, $vr4 \n\t" //GA + "vpickod.b $vr11, $vr7, $vr6 \n\t" + "vmaddwev.h.bu $vr12, $vr8, $vr0 \n\t" //B + "vmaddwev.h.bu $vr13, $vr10, $vr0 \n\t" + "vmaddwev.h.bu $vr12, $vr9, $vr1 \n\t" //G + "vmaddwev.h.bu $vr13, $vr11, $vr1 \n\t" + "vmaddwod.h.bu $vr12, $vr8, $vr2 \n\t" //R + "vmaddwod.h.bu $vr13, $vr10, $vr2 \n\t" + "addi.d %0, %0, 64 \n\t" + "vpickod.b $vr10, $vr13, $vr12 \n\t" + "vst $vr10, %1, 0 \n\t" + "addi.d %1, %1, 16 \n\t" + "bnez %2, 1b \n\t" + : "+&r"(src_argb), // %0 + "+&r"(dst_y), // %1 + "+&r"(width) // %2 + : "r"(rgbconstants) + : "memory" + ); +} + +void ARGBToYRow_LSX(const uint8_t* src_argb, uint8_t* dst_y, int width) { + ARGBToYMatrixRow_LSX(src_argb, dst_y, width, &kRgb24I601Constants); +} + +void ARGBToYJRow_LSX(const uint8_t* src_argb, uint8_t* dst_yj, int width) { + ARGBToYMatrixRow_LSX(src_argb, dst_yj, width, &kRgb24JPEGConstants); +} + +void ABGRToYRow_LSX(const uint8_t* src_abgr, uint8_t* dst_y, int width) { + ARGBToYMatrixRow_LSX(src_abgr, dst_y, width, &kRawI601Constants); +} + +void ABGRToYJRow_LSX(const uint8_t* src_abgr, uint8_t* dst_yj, int width) { + ARGBToYMatrixRow_LSX(src_abgr, dst_yj, width, &kRawJPEGConstants); +} + +// RGBA expects first value to be A and ignored, then 3 values to contain RGB. +// Same code as ARGB, except the LD4 +static void RGBAToYMatrixRow_LSX(const uint8_t* src_rgba, + uint8_t* dst_y, + int width, + const struct RgbConstants* rgbconstants) { + asm volatile( + "vldrepl.b $vr0, %3, 0 \n\t" // load rgbconstants + "vldrepl.b $vr1, %3, 1 \n\t" // load rgbconstants + "vldrepl.b $vr2, %3, 2 \n\t" // load rgbconstants + "vldrepl.h $vr3, %3, 4 \n\t" // load rgbconstants + "1: \n\t" + "vld $vr4, %0, 0 \n\t" + "vld $vr5, %0, 16 \n\t" + "vld $vr6, %0, 32 \n\t" + "vld $vr7, %0, 48 \n\t" // load 16 pixels of RGBA + "vor.v $vr12, $vr3, $vr3 \n\t" + "vor.v $vr13, $vr3, $vr3 \n\t" + "addi.d %2, %2, -16 \n\t" // 16 processed per loop. + "vpickev.b $vr8, $vr5, $vr4 \n\t" //AG + "vpickev.b $vr10, $vr7, $vr6 \n\t" + "vpickod.b $vr9, $vr5, $vr4 \n\t" //BR + "vpickod.b $vr11, $vr7, $vr6 \n\t" + "vmaddwev.h.bu $vr12, $vr9, $vr0 \n\t" //B + "vmaddwev.h.bu $vr13, $vr11, $vr0 \n\t" + "vmaddwod.h.bu $vr12, $vr8, $vr1 \n\t" //G + "vmaddwod.h.bu $vr13, $vr10, $vr1 \n\t" + "vmaddwod.h.bu $vr12, $vr9, $vr2 \n\t" //R + "vmaddwod.h.bu $vr13, $vr11, $vr2 \n\t" + "addi.d %0, %0, 64 \n\t" + "vpickod.b $vr10, $vr13, $vr12 \n\t" + "vst $vr10, %1, 0 \n\t" + "addi.d %1, %1, 16 \n\t" + "bnez %2, 1b \n\t" + : "+&r"(src_rgba), // %0 + "+&r"(dst_y), // %1 + "+&r"(width) // %2 + : "r"(rgbconstants) + : "memory" + ); +} + +void RGBAToYRow_LSX(const uint8_t* src_rgba, uint8_t* dst_y, int width) { + RGBAToYMatrixRow_LSX(src_rgba, dst_y, width, &kRgb24I601Constants); +} + +void RGBAToYJRow_LSX(const uint8_t* src_rgba, uint8_t* dst_yj, int width) { + RGBAToYMatrixRow_LSX(src_rgba, dst_yj, width, &kRgb24JPEGConstants); +} + +void BGRAToYRow_LSX(const uint8_t* src_bgra, uint8_t* dst_y, int width) { + RGBAToYMatrixRow_LSX(src_bgra, dst_y, width, &kRawI601Constants); +} + +static void RGBToYMatrixRow_LSX(const uint8_t* src_rgba, + uint8_t* dst_y, + int width, + const struct RgbConstants* rgbconstants) { + int8_t shuff[64] = {0, 2, 3, 5, 6, 8, 9, 11, 12, 14, 15, 17, 18, 20, 21, 23, + 24, 26, 27, 29, 30, 0, 1, 3, 4, 6, 7, 9, 10, 12, 13, 15, + 1, 0, 4, 0, 7, 0, 10, 0, 13, 0, 16, 0, 19, 0, 22, 0, + 25, 0, 28, 0, 31, 0, 2, 0, 5, 0, 8, 0, 11, 0, 14, 0}; + asm volatile( + "vldrepl.b $vr0, %3, 0 \n\t" // load rgbconstants + "vldrepl.b $vr1, %3, 1 \n\t" // load rgbconstants + "vldrepl.b $vr2, %3, 2 \n\t" // load rgbconstants + "vldrepl.h $vr3, %3, 4 \n\t" // load rgbconstants + "vld $vr4, %4, 0 \n\t" // load shuff + "vld $vr5, %4, 16 \n\t" + "vld $vr6, %4, 32 \n\t" + "vld $vr7, %4, 48 \n\t" + "1: \n\t" + "vld $vr8, %0, 0 \n\t" + "vld $vr9, %0, 16 \n\t" + "vld $vr10, %0, 32 \n\t" // load 16 pixels of RGB + "vor.v $vr12, $vr3, $vr3 \n\t" + "vor.v $vr13, $vr3, $vr3 \n\t" + "addi.d %2, %2, -16 \n\t" // 16 processed per loop. + "vshuf.b $vr14, $vr9, $vr8, $vr4 \n\t" + "vshuf.b $vr15, $vr9, $vr10, $vr5 \n\t" + "vshuf.b $vr16, $vr9, $vr8, $vr6 \n\t" + "vshuf.b $vr17, $vr9, $vr10, $vr7 \n\t" + "vmaddwev.h.bu $vr12, $vr16, $vr1 \n\t" //G + "vmaddwev.h.bu $vr13, $vr17, $vr1 \n\t" + "vmaddwev.h.bu $vr12, $vr14, $vr0 \n\t" //B + "vmaddwev.h.bu $vr13, $vr15, $vr0 \n\t" + "vmaddwod.h.bu $vr12, $vr14, $vr2 \n\t" //R + "vmaddwod.h.bu $vr13, $vr15, $vr2 \n\t" + "addi.d %0, %0, 48 \n\t" + "vpickod.b $vr10, $vr13, $vr12 \n\t" + "vst $vr10, %1, 0 \n\t" + "addi.d %1, %1, 16 \n\t" + "bnez %2, 1b \n\t" + : "+&r"(src_rgba), // %0 + "+&r"(dst_y), // %1 + "+&r"(width) // %2 + : "r"(rgbconstants), // %3 + "r"(shuff) // %4 + : "memory" + ); +} + +void RGB24ToYJRow_LSX(const uint8_t* src_rgb24, uint8_t* dst_yj, int width) { + RGBToYMatrixRow_LSX(src_rgb24, dst_yj, width, &kRgb24JPEGConstants); +} + +void RAWToYJRow_LSX(const uint8_t* src_raw, uint8_t* dst_yj, int width) { + RGBToYMatrixRow_LSX(src_raw, dst_yj, width, &kRawJPEGConstants); +} + +void RGB24ToYRow_LSX(const uint8_t* src_rgb24, uint8_t* dst_y, int width) { + RGBToYMatrixRow_LSX(src_rgb24, dst_y, width, &kRgb24I601Constants); +} + +void RAWToYRow_LSX(const uint8_t* src_raw, uint8_t* dst_y, int width) { + RGBToYMatrixRow_LSX(src_raw, dst_y, width, &kRawI601Constants); +} + +#ifdef __cplusplus +} // extern "C" +} // namespace libyuv +#endif + +#endif // !defined(LIBYUV_DISABLE_LSX) && defined(__loongarch_sx) diff --git a/TMessagesProj/jni/third_party/libyuv/source/row_mmi.cc b/TMessagesProj/jni/third_party/libyuv/source/row_mmi.cc index 9a8e2cb2d1..362fd1cfcc 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/row_mmi.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/row_mmi.cc @@ -605,7 +605,7 @@ void ARGBToARGB4444Row_MMI(const uint8_t* src_argb, : "memory"); } -void ARGBToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) { +void ARGBToYRow_MMI(const uint8_t* src_argb, uint8_t* dst_y, int width) { uint64_t src, src_hi, src_lo; uint64_t dest0, dest1, dest2, dest3; const uint64_t value = 0x1080; @@ -613,8 +613,8 @@ void ARGBToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) { __asm__ volatile( "1: \n\t" - "gsldlc1 %[src], 0x07(%[src_argb0]) \n\t" - "gsldrc1 %[src], 0x00(%[src_argb0]) \n\t" + "gsldlc1 %[src], 0x07(%[src_argb]) \n\t" + "gsldrc1 %[src], 0x00(%[src_argb]) \n\t" "punpcklbh %[src_lo], %[src], %[zero] \n\t" "pinsrh_3 %[src_lo], %[src_lo], %[value] \n\t" "pmaddhw %[src_lo], %[src_lo], %[mask] \n\t" @@ -626,8 +626,8 @@ void ARGBToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) { "paddw %[dest0], %[dest0], %[src] \n\t" "psrlw %[dest0], %[dest0], %[eight] \n\t" - "gsldlc1 %[src], 0x0f(%[src_argb0]) \n\t" - "gsldrc1 %[src], 0x08(%[src_argb0]) \n\t" + "gsldlc1 %[src], 0x0f(%[src_argb]) \n\t" + "gsldrc1 %[src], 0x08(%[src_argb]) \n\t" "punpcklbh %[src_lo], %[src], %[zero] \n\t" "pinsrh_3 %[src_lo], %[src_lo], %[value] \n\t" "pmaddhw %[src_lo], %[src_lo], %[mask] \n\t" @@ -639,8 +639,8 @@ void ARGBToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) { "paddw %[dest1], %[dest1], %[src] \n\t" "psrlw %[dest1], %[dest1], %[eight] \n\t" - "gsldlc1 %[src], 0x17(%[src_argb0]) \n\t" - "gsldrc1 %[src], 0x10(%[src_argb0]) \n\t" + "gsldlc1 %[src], 0x17(%[src_argb]) \n\t" + "gsldrc1 %[src], 0x10(%[src_argb]) \n\t" "punpcklbh %[src_lo], %[src], %[zero] \n\t" "pinsrh_3 %[src_lo], %[src_lo], %[value] \n\t" "pmaddhw %[src_lo], %[src_lo], %[mask] \n\t" @@ -652,8 +652,8 @@ void ARGBToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) { "paddw %[dest2], %[dest2], %[src] \n\t" "psrlw %[dest2], %[dest2], %[eight] \n\t" - "gsldlc1 %[src], 0x1f(%[src_argb0]) \n\t" - "gsldrc1 %[src], 0x18(%[src_argb0]) \n\t" + "gsldlc1 %[src], 0x1f(%[src_argb]) \n\t" + "gsldrc1 %[src], 0x18(%[src_argb]) \n\t" "punpcklbh %[src_lo], %[src], %[zero] \n\t" "pinsrh_3 %[src_lo], %[src_lo], %[value] \n\t" "pmaddhw %[src_lo], %[src_lo], %[mask] \n\t" @@ -671,20 +671,20 @@ void ARGBToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) { "gssdlc1 %[dest0], 0x07(%[dst_y]) \n\t" "gssdrc1 %[dest0], 0x00(%[dst_y]) \n\t" - "daddiu %[src_argb0], %[src_argb0], 0x20 \n\t" + "daddiu %[src_argb], %[src_argb], 0x20 \n\t" "daddiu %[dst_y], %[dst_y], 0x08 \n\t" "daddi %[width], %[width], -0x08 \n\t" "bnez %[width], 1b \n\t" : [src] "=&f"(src), [src_hi] "=&f"(src_hi), [src_lo] "=&f"(src_lo), [dest0] "=&f"(dest0), [dest1] "=&f"(dest1), [dest2] "=&f"(dest2), [dest3] "=&f"(dest3) - : [src_argb0] "r"(src_argb0), [dst_y] "r"(dst_y), [width] "r"(width), + : [src_argb] "r"(src_argb), [dst_y] "r"(dst_y), [width] "r"(width), [mask] "f"(mask), [value] "f"(value), [eight] "f"(0x08), [zero] "f"(0x00) : "memory"); } -void ARGBToUVRow_MMI(const uint8_t* src_rgb0, +void ARGBToUVRow_MMI(const uint8_t* src_rgb, int src_stride_rgb, uint8_t* dst_u, uint8_t* dst_v, @@ -700,9 +700,9 @@ void ARGBToUVRow_MMI(const uint8_t* src_rgb0, "dli %[tmp0], 0x0001000100010001 \n\t" "dmtc1 %[tmp0], %[ftmp12] \n\t" "1: \n\t" - "daddu %[src_rgb1], %[src_rgb0], %[src_stride_rgb] \n\t" - "gsldrc1 %[src0], 0x00(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x07(%[src_rgb0]) \n\t" + "daddu %[src_rgb1], %[src_rgb], %[src_stride_rgb] \n\t" + "gsldrc1 %[src0], 0x00(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x07(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x00(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x07(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -720,8 +720,8 @@ void ARGBToUVRow_MMI(const uint8_t* src_rgb0, "pmaddhw %[dest0_u], %[dest0_u], %[mask_u] \n\t" "pmaddhw %[dest0_v], %[dest0_v], %[mask_v] \n\t" - "gsldrc1 %[src0], 0x08(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x0f(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x08(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x0f(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x08(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x0f(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -748,8 +748,8 @@ void ARGBToUVRow_MMI(const uint8_t* src_rgb0, "psubw %[dest0_v], %[src1], %[src0] \n\t" "psraw %[dest0_v], %[dest0_v], %[eight] \n\t" - "gsldrc1 %[src0], 0x10(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x17(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x10(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x17(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x10(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x17(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -767,8 +767,8 @@ void ARGBToUVRow_MMI(const uint8_t* src_rgb0, "pmaddhw %[dest1_u], %[dest1_u], %[mask_u] \n\t" "pmaddhw %[dest1_v], %[dest1_v], %[mask_v] \n\t" - "gsldrc1 %[src0], 0x18(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x1f(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x18(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x1f(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x18(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x1f(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -795,8 +795,8 @@ void ARGBToUVRow_MMI(const uint8_t* src_rgb0, "psubw %[dest1_v], %[src1], %[src0] \n\t" "psraw %[dest1_v], %[dest1_v], %[eight] \n\t" - "gsldrc1 %[src0], 0x20(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x27(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x20(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x27(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x20(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x27(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -814,8 +814,8 @@ void ARGBToUVRow_MMI(const uint8_t* src_rgb0, "pmaddhw %[dest2_u], %[dest2_u], %[mask_u] \n\t" "pmaddhw %[dest2_v], %[dest2_v], %[mask_v] \n\t" - "gsldrc1 %[src0], 0x28(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x2f(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x28(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x2f(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x28(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x2f(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -842,8 +842,8 @@ void ARGBToUVRow_MMI(const uint8_t* src_rgb0, "psubw %[dest2_v], %[src1], %[src0] \n\t" "psraw %[dest2_v], %[dest2_v], %[eight] \n\t" - "gsldrc1 %[src0], 0x30(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x37(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x30(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x37(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x30(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x37(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -861,8 +861,8 @@ void ARGBToUVRow_MMI(const uint8_t* src_rgb0, "pmaddhw %[dest3_u], %[dest3_u], %[mask_u] \n\t" "pmaddhw %[dest3_v], %[dest3_v], %[mask_v] \n\t" - "gsldrc1 %[src0], 0x38(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x3f(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x38(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x3f(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x38(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x3f(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -901,7 +901,7 @@ void ARGBToUVRow_MMI(const uint8_t* src_rgb0, "gssdlc1 %[dest0_v], 0x07(%[dst_v]) \n\t" "gssdrc1 %[dest0_v], 0x00(%[dst_v]) \n\t" - "daddiu %[src_rgb0], %[src_rgb0], 0x40 \n\t" + "daddiu %[src_rgb], %[src_rgb], 0x40 \n\t" "daddiu %[dst_u], %[dst_u], 0x08 \n\t" "daddiu %[dst_v], %[dst_v], 0x08 \n\t" "daddi %[width], %[width], -0x10 \n\t" @@ -913,7 +913,7 @@ void ARGBToUVRow_MMI(const uint8_t* src_rgb0, [dest2_u] "=&f"(ftmp[8]), [dest2_v] "=&f"(ftmp[9]), [dest3_u] "=&f"(ftmp[10]), [dest3_v] "=&f"(ftmp[11]), [ftmp12] "=&f"(ftmp[12]), [tmp0] "=&r"(tmp[0]) - : [src_rgb0] "r"(src_rgb0), [src_stride_rgb] "r"(src_stride_rgb), + : [src_rgb] "r"(src_rgb), [src_stride_rgb] "r"(src_stride_rgb), [dst_u] "r"(dst_u), [dst_v] "r"(dst_v), [width] "r"(width), [mask_u] "f"(mask_u), [mask_v] "f"(mask_v), [value] "f"(value), [zero] "f"(0x00), [eight] "f"(0x08), [one] "f"(0x01), @@ -921,7 +921,7 @@ void ARGBToUVRow_MMI(const uint8_t* src_rgb0, : "memory"); } -void BGRAToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) { +void BGRAToYRow_MMI(const uint8_t* src_argb, uint8_t* dst_y, int width) { uint64_t src, src_hi, src_lo; uint64_t dest0, dest1, dest2, dest3; const uint64_t value = 0x1080; @@ -929,8 +929,8 @@ void BGRAToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) { __asm__ volatile( "1: \n\t" - "gsldlc1 %[src], 0x07(%[src_argb0]) \n\t" - "gsldrc1 %[src], 0x00(%[src_argb0]) \n\t" + "gsldlc1 %[src], 0x07(%[src_argb]) \n\t" + "gsldrc1 %[src], 0x00(%[src_argb]) \n\t" "punpcklbh %[src_lo], %[src], %[zero] \n\t" "pinsrh_0 %[src_lo], %[src_lo], %[value] \n\t" "pmaddhw %[src_lo], %[src_lo], %[mask] \n\t" @@ -942,8 +942,8 @@ void BGRAToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) { "paddw %[dest0], %[dest0], %[src] \n\t" "psrlw %[dest0], %[dest0], %[eight] \n\t" - "gsldlc1 %[src], 0x0f(%[src_argb0]) \n\t" - "gsldrc1 %[src], 0x08(%[src_argb0]) \n\t" + "gsldlc1 %[src], 0x0f(%[src_argb]) \n\t" + "gsldrc1 %[src], 0x08(%[src_argb]) \n\t" "punpcklbh %[src_lo], %[src], %[zero] \n\t" "pinsrh_0 %[src_lo], %[src_lo], %[value] \n\t" "pmaddhw %[src_lo], %[src_lo], %[mask] \n\t" @@ -955,8 +955,8 @@ void BGRAToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) { "paddw %[dest1], %[dest1], %[src] \n\t" "psrlw %[dest1], %[dest1], %[eight] \n\t" - "gsldlc1 %[src], 0x17(%[src_argb0]) \n\t" - "gsldrc1 %[src], 0x10(%[src_argb0]) \n\t" + "gsldlc1 %[src], 0x17(%[src_argb]) \n\t" + "gsldrc1 %[src], 0x10(%[src_argb]) \n\t" "punpcklbh %[src_lo], %[src], %[zero] \n\t" "pinsrh_0 %[src_lo], %[src_lo], %[value] \n\t" "pmaddhw %[src_lo], %[src_lo], %[mask] \n\t" @@ -968,8 +968,8 @@ void BGRAToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) { "paddw %[dest2], %[dest2], %[src] \n\t" "psrlw %[dest2], %[dest2], %[eight] \n\t" - "gsldlc1 %[src], 0x1f(%[src_argb0]) \n\t" - "gsldrc1 %[src], 0x18(%[src_argb0]) \n\t" + "gsldlc1 %[src], 0x1f(%[src_argb]) \n\t" + "gsldrc1 %[src], 0x18(%[src_argb]) \n\t" "punpcklbh %[src_lo], %[src], %[zero] \n\t" "pinsrh_0 %[src_lo], %[src_lo], %[value] \n\t" "pmaddhw %[src_lo], %[src_lo], %[mask] \n\t" @@ -987,20 +987,20 @@ void BGRAToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) { "gssdlc1 %[dest0], 0x07(%[dst_y]) \n\t" "gssdrc1 %[dest0], 0x00(%[dst_y]) \n\t" - "daddiu %[src_argb0], %[src_argb0], 0x20 \n\t" + "daddiu %[src_argb], %[src_argb], 0x20 \n\t" "daddiu %[dst_y], %[dst_y], 0x08 \n\t" "daddi %[width], %[width], -0x08 \n\t" "bnez %[width], 1b \n\t" : [src] "=&f"(src), [src_hi] "=&f"(src_hi), [src_lo] "=&f"(src_lo), [dest0] "=&f"(dest0), [dest1] "=&f"(dest1), [dest2] "=&f"(dest2), [dest3] "=&f"(dest3) - : [src_argb0] "r"(src_argb0), [dst_y] "r"(dst_y), [width] "r"(width), + : [src_argb] "r"(src_argb), [dst_y] "r"(dst_y), [width] "r"(width), [mask] "f"(mask), [value] "f"(value), [eight] "f"(0x08), [zero] "f"(0x00) : "memory"); } -void BGRAToUVRow_MMI(const uint8_t* src_rgb0, +void BGRAToUVRow_MMI(const uint8_t* src_rgb, int src_stride_rgb, uint8_t* dst_u, uint8_t* dst_v, @@ -1016,9 +1016,9 @@ void BGRAToUVRow_MMI(const uint8_t* src_rgb0, "dli %[tmp0], 0x0001000100010001 \n\t" "dmtc1 %[tmp0], %[ftmp12] \n\t" "1: \n\t" - "daddu %[src_rgb1], %[src_rgb0], %[src_stride_rgb] \n\t" - "gsldrc1 %[src0], 0x00(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x07(%[src_rgb0]) \n\t" + "daddu %[src_rgb1], %[src_rgb], %[src_stride_rgb] \n\t" + "gsldrc1 %[src0], 0x00(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x07(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x00(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x07(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -1036,8 +1036,8 @@ void BGRAToUVRow_MMI(const uint8_t* src_rgb0, "pmaddhw %[dest0_u], %[dest0_u], %[mask_u] \n\t" "pmaddhw %[dest0_v], %[dest0_v], %[mask_v] \n\t" - "gsldrc1 %[src0], 0x08(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x0f(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x08(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x0f(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x08(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x0f(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -1064,8 +1064,8 @@ void BGRAToUVRow_MMI(const uint8_t* src_rgb0, "psubw %[dest0_v], %[src0], %[src1] \n\t" "psraw %[dest0_v], %[dest0_v], %[eight] \n\t" - "gsldrc1 %[src0], 0x10(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x17(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x10(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x17(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x10(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x17(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -1083,8 +1083,8 @@ void BGRAToUVRow_MMI(const uint8_t* src_rgb0, "pmaddhw %[dest1_u], %[dest1_u], %[mask_u] \n\t" "pmaddhw %[dest1_v], %[dest1_v], %[mask_v] \n\t" - "gsldrc1 %[src0], 0x18(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x1f(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x18(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x1f(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x18(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x1f(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -1111,8 +1111,8 @@ void BGRAToUVRow_MMI(const uint8_t* src_rgb0, "psubw %[dest1_v], %[src0], %[src1] \n\t" "psraw %[dest1_v], %[dest1_v], %[eight] \n\t" - "gsldrc1 %[src0], 0x20(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x27(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x20(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x27(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x20(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x27(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -1130,8 +1130,8 @@ void BGRAToUVRow_MMI(const uint8_t* src_rgb0, "pmaddhw %[dest2_u], %[dest2_u], %[mask_u] \n\t" "pmaddhw %[dest2_v], %[dest2_v], %[mask_v] \n\t" - "gsldrc1 %[src0], 0x28(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x2f(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x28(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x2f(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x28(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x2f(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -1158,8 +1158,8 @@ void BGRAToUVRow_MMI(const uint8_t* src_rgb0, "psubw %[dest2_v], %[src0], %[src1] \n\t" "psraw %[dest2_v], %[dest2_v], %[eight] \n\t" - "gsldrc1 %[src0], 0x30(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x37(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x30(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x37(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x30(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x37(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -1177,8 +1177,8 @@ void BGRAToUVRow_MMI(const uint8_t* src_rgb0, "pmaddhw %[dest3_u], %[dest3_u], %[mask_u] \n\t" "pmaddhw %[dest3_v], %[dest3_v], %[mask_v] \n\t" - "gsldrc1 %[src0], 0x38(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x3f(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x38(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x3f(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x38(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x3f(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -1217,7 +1217,7 @@ void BGRAToUVRow_MMI(const uint8_t* src_rgb0, "gssdlc1 %[dest0_v], 0x07(%[dst_v]) \n\t" "gssdrc1 %[dest0_v], 0x00(%[dst_v]) \n\t" - "daddiu %[src_rgb0], %[src_rgb0], 0x40 \n\t" + "daddiu %[src_rgb], %[src_rgb], 0x40 \n\t" "daddiu %[dst_u], %[dst_u], 0x08 \n\t" "daddiu %[dst_v], %[dst_v], 0x08 \n\t" "daddi %[width], %[width], -0x10 \n\t" @@ -1229,7 +1229,7 @@ void BGRAToUVRow_MMI(const uint8_t* src_rgb0, [dest2_u] "=&f"(ftmp[8]), [dest2_v] "=&f"(ftmp[9]), [dest3_u] "=&f"(ftmp[10]), [dest3_v] "=&f"(ftmp[11]), [ftmp12] "=&f"(ftmp[12]), [tmp0] "=&r"(tmp[0]) - : [src_rgb0] "r"(src_rgb0), [src_stride_rgb] "r"(src_stride_rgb), + : [src_rgb] "r"(src_rgb), [src_stride_rgb] "r"(src_stride_rgb), [dst_u] "r"(dst_u), [dst_v] "r"(dst_v), [width] "r"(width), [mask_u] "f"(mask_u), [mask_v] "f"(mask_v), [value] "f"(value), [zero] "f"(0x00), [eight] "f"(0x08), [one] "f"(0x01), @@ -1237,7 +1237,7 @@ void BGRAToUVRow_MMI(const uint8_t* src_rgb0, : "memory"); } -void ABGRToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) { +void ABGRToYRow_MMI(const uint8_t* src_argb, uint8_t* dst_y, int width) { uint64_t src, src_hi, src_lo; uint64_t dest0, dest1, dest2, dest3; const uint64_t value = 0x1080; @@ -1245,8 +1245,8 @@ void ABGRToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) { __asm__ volatile( "1: \n\t" - "gsldlc1 %[src], 0x07(%[src_argb0]) \n\t" - "gsldrc1 %[src], 0x00(%[src_argb0]) \n\t" + "gsldlc1 %[src], 0x07(%[src_argb]) \n\t" + "gsldrc1 %[src], 0x00(%[src_argb]) \n\t" "punpcklbh %[src_lo], %[src], %[zero] \n\t" "pinsrh_3 %[src_lo], %[src_lo], %[value] \n\t" "pmaddhw %[src_lo], %[src_lo], %[mask] \n\t" @@ -1258,8 +1258,8 @@ void ABGRToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) { "paddw %[dest0], %[dest0], %[src] \n\t" "psrlw %[dest0], %[dest0], %[eight] \n\t" - "gsldlc1 %[src], 0x0f(%[src_argb0]) \n\t" - "gsldrc1 %[src], 0x08(%[src_argb0]) \n\t" + "gsldlc1 %[src], 0x0f(%[src_argb]) \n\t" + "gsldrc1 %[src], 0x08(%[src_argb]) \n\t" "punpcklbh %[src_lo], %[src], %[zero] \n\t" "pinsrh_3 %[src_lo], %[src_lo], %[value] \n\t" "pmaddhw %[src_lo], %[src_lo], %[mask] \n\t" @@ -1271,8 +1271,8 @@ void ABGRToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) { "paddw %[dest1], %[dest1], %[src] \n\t" "psrlw %[dest1], %[dest1], %[eight] \n\t" - "gsldlc1 %[src], 0x17(%[src_argb0]) \n\t" - "gsldrc1 %[src], 0x10(%[src_argb0]) \n\t" + "gsldlc1 %[src], 0x17(%[src_argb]) \n\t" + "gsldrc1 %[src], 0x10(%[src_argb]) \n\t" "punpcklbh %[src_lo], %[src], %[zero] \n\t" "pinsrh_3 %[src_lo], %[src_lo], %[value] \n\t" "pmaddhw %[src_lo], %[src_lo], %[mask] \n\t" @@ -1284,8 +1284,8 @@ void ABGRToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) { "paddw %[dest2], %[dest2], %[src] \n\t" "psrlw %[dest2], %[dest2], %[eight] \n\t" - "gsldlc1 %[src], 0x1f(%[src_argb0]) \n\t" - "gsldrc1 %[src], 0x18(%[src_argb0]) \n\t" + "gsldlc1 %[src], 0x1f(%[src_argb]) \n\t" + "gsldrc1 %[src], 0x18(%[src_argb]) \n\t" "punpcklbh %[src_lo], %[src], %[zero] \n\t" "pinsrh_3 %[src_lo], %[src_lo], %[value] \n\t" "pmaddhw %[src_lo], %[src_lo], %[mask] \n\t" @@ -1303,20 +1303,20 @@ void ABGRToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) { "gssdlc1 %[dest0], 0x07(%[dst_y]) \n\t" "gssdrc1 %[dest0], 0x00(%[dst_y]) \n\t" - "daddiu %[src_argb0], %[src_argb0], 0x20 \n\t" + "daddiu %[src_argb], %[src_argb], 0x20 \n\t" "daddiu %[dst_y], %[dst_y], 0x08 \n\t" "daddi %[width], %[width], -0x08 \n\t" "bnez %[width], 1b \n\t" : [src] "=&f"(src), [src_hi] "=&f"(src_hi), [src_lo] "=&f"(src_lo), [dest0] "=&f"(dest0), [dest1] "=&f"(dest1), [dest2] "=&f"(dest2), [dest3] "=&f"(dest3) - : [src_argb0] "r"(src_argb0), [dst_y] "r"(dst_y), [width] "r"(width), + : [src_argb] "r"(src_argb), [dst_y] "r"(dst_y), [width] "r"(width), [mask] "f"(mask), [value] "f"(value), [eight] "f"(0x08), [zero] "f"(0x00) : "memory"); } -void ABGRToUVRow_MMI(const uint8_t* src_rgb0, +void ABGRToUVRow_MMI(const uint8_t* src_rgb, int src_stride_rgb, uint8_t* dst_u, uint8_t* dst_v, @@ -1332,9 +1332,9 @@ void ABGRToUVRow_MMI(const uint8_t* src_rgb0, "dli %[tmp0], 0x0001000100010001 \n\t" "dmtc1 %[tmp0], %[ftmp12] \n\t" "1: \n\t" - "daddu %[src_rgb1], %[src_rgb0], %[src_stride_rgb] \n\t" - "gsldrc1 %[src0], 0x00(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x07(%[src_rgb0]) \n\t" + "daddu %[src_rgb1], %[src_rgb], %[src_stride_rgb] \n\t" + "gsldrc1 %[src0], 0x00(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x07(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x00(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x07(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -1352,8 +1352,8 @@ void ABGRToUVRow_MMI(const uint8_t* src_rgb0, "pmaddhw %[dest0_u], %[dest0_u], %[mask_u] \n\t" "pmaddhw %[dest0_v], %[dest0_v], %[mask_v] \n\t" - "gsldrc1 %[src0], 0x08(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x0f(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x08(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x0f(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x08(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x0f(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -1380,8 +1380,8 @@ void ABGRToUVRow_MMI(const uint8_t* src_rgb0, "psubw %[dest0_v], %[src0], %[src1] \n\t" "psraw %[dest0_v], %[dest0_v], %[eight] \n\t" - "gsldrc1 %[src0], 0x10(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x17(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x10(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x17(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x10(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x17(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -1399,8 +1399,8 @@ void ABGRToUVRow_MMI(const uint8_t* src_rgb0, "pmaddhw %[dest1_u], %[dest1_u], %[mask_u] \n\t" "pmaddhw %[dest1_v], %[dest1_v], %[mask_v] \n\t" - "gsldrc1 %[src0], 0x18(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x1f(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x18(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x1f(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x18(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x1f(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -1427,8 +1427,8 @@ void ABGRToUVRow_MMI(const uint8_t* src_rgb0, "psubw %[dest1_v], %[src0], %[src1] \n\t" "psraw %[dest1_v], %[dest1_v], %[eight] \n\t" - "gsldrc1 %[src0], 0x20(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x27(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x20(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x27(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x20(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x27(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -1446,8 +1446,8 @@ void ABGRToUVRow_MMI(const uint8_t* src_rgb0, "pmaddhw %[dest2_u], %[dest2_u], %[mask_u] \n\t" "pmaddhw %[dest2_v], %[dest2_v], %[mask_v] \n\t" - "gsldrc1 %[src0], 0x28(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x2f(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x28(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x2f(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x28(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x2f(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -1474,8 +1474,8 @@ void ABGRToUVRow_MMI(const uint8_t* src_rgb0, "psubw %[dest2_v], %[src0], %[src1] \n\t" "psraw %[dest2_v], %[dest2_v], %[eight] \n\t" - "gsldrc1 %[src0], 0x30(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x37(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x30(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x37(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x30(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x37(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -1493,8 +1493,8 @@ void ABGRToUVRow_MMI(const uint8_t* src_rgb0, "pmaddhw %[dest3_u], %[dest3_u], %[mask_u] \n\t" "pmaddhw %[dest3_v], %[dest3_v], %[mask_v] \n\t" - "gsldrc1 %[src0], 0x38(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x3f(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x38(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x3f(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x38(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x3f(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -1533,7 +1533,7 @@ void ABGRToUVRow_MMI(const uint8_t* src_rgb0, "gssdlc1 %[dest0_v], 0x07(%[dst_v]) \n\t" "gssdrc1 %[dest0_v], 0x00(%[dst_v]) \n\t" - "daddiu %[src_rgb0], %[src_rgb0], 0x40 \n\t" + "daddiu %[src_rgb], %[src_rgb], 0x40 \n\t" "daddiu %[dst_u], %[dst_u], 0x08 \n\t" "daddiu %[dst_v], %[dst_v], 0x08 \n\t" "daddi %[width], %[width], -0x10 \n\t" @@ -1545,7 +1545,7 @@ void ABGRToUVRow_MMI(const uint8_t* src_rgb0, [dest2_u] "=&f"(ftmp[8]), [dest2_v] "=&f"(ftmp[9]), [dest3_u] "=&f"(ftmp[10]), [dest3_v] "=&f"(ftmp[11]), [ftmp12] "=&f"(ftmp[12]), [tmp0] "=&r"(tmp[0]) - : [src_rgb0] "r"(src_rgb0), [src_stride_rgb] "r"(src_stride_rgb), + : [src_rgb] "r"(src_rgb), [src_stride_rgb] "r"(src_stride_rgb), [dst_u] "r"(dst_u), [dst_v] "r"(dst_v), [width] "r"(width), [mask_u] "f"(mask_u), [mask_v] "f"(mask_v), [value] "f"(value), [zero] "f"(0x00), [eight] "f"(0x08), [one] "f"(0x01), @@ -1553,7 +1553,7 @@ void ABGRToUVRow_MMI(const uint8_t* src_rgb0, : "memory"); } -void RGBAToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) { +void RGBAToYRow_MMI(const uint8_t* src_argb, uint8_t* dst_y, int width) { uint64_t src, src_hi, src_lo; uint64_t dest0, dest1, dest2, dest3; const uint64_t value = 0x1080; @@ -1561,8 +1561,8 @@ void RGBAToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) { __asm__ volatile( "1: \n\t" - "gsldlc1 %[src], 0x07(%[src_argb0]) \n\t" - "gsldrc1 %[src], 0x00(%[src_argb0]) \n\t" + "gsldlc1 %[src], 0x07(%[src_argb]) \n\t" + "gsldrc1 %[src], 0x00(%[src_argb]) \n\t" "punpcklbh %[src_lo], %[src], %[zero] \n\t" "pinsrh_0 %[src_lo], %[src_lo], %[value] \n\t" "pmaddhw %[src_lo], %[src_lo], %[mask] \n\t" @@ -1574,8 +1574,8 @@ void RGBAToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) { "paddw %[dest0], %[dest0], %[src] \n\t" "psrlw %[dest0], %[dest0], %[eight] \n\t" - "gsldlc1 %[src], 0x0f(%[src_argb0]) \n\t" - "gsldrc1 %[src], 0x08(%[src_argb0]) \n\t" + "gsldlc1 %[src], 0x0f(%[src_argb]) \n\t" + "gsldrc1 %[src], 0x08(%[src_argb]) \n\t" "punpcklbh %[src_lo], %[src], %[zero] \n\t" "pinsrh_0 %[src_lo], %[src_lo], %[value] \n\t" "pmaddhw %[src_lo], %[src_lo], %[mask] \n\t" @@ -1587,8 +1587,8 @@ void RGBAToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) { "paddw %[dest1], %[dest1], %[src] \n\t" "psrlw %[dest1], %[dest1], %[eight] \n\t" - "gsldlc1 %[src], 0x17(%[src_argb0]) \n\t" - "gsldrc1 %[src], 0x10(%[src_argb0]) \n\t" + "gsldlc1 %[src], 0x17(%[src_argb]) \n\t" + "gsldrc1 %[src], 0x10(%[src_argb]) \n\t" "punpcklbh %[src_lo], %[src], %[zero] \n\t" "pinsrh_0 %[src_lo], %[src_lo], %[value] \n\t" "pmaddhw %[src_lo], %[src_lo], %[mask] \n\t" @@ -1600,8 +1600,8 @@ void RGBAToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) { "paddw %[dest2], %[dest2], %[src] \n\t" "psrlw %[dest2], %[dest2], %[eight] \n\t" - "gsldlc1 %[src], 0x1f(%[src_argb0]) \n\t" - "gsldrc1 %[src], 0x18(%[src_argb0]) \n\t" + "gsldlc1 %[src], 0x1f(%[src_argb]) \n\t" + "gsldrc1 %[src], 0x18(%[src_argb]) \n\t" "punpcklbh %[src_lo], %[src], %[zero] \n\t" "pinsrh_0 %[src_lo], %[src_lo], %[value] \n\t" "pmaddhw %[src_lo], %[src_lo], %[mask] \n\t" @@ -1619,20 +1619,20 @@ void RGBAToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) { "gssdlc1 %[dest0], 0x07(%[dst_y]) \n\t" "gssdrc1 %[dest0], 0x00(%[dst_y]) \n\t" - "daddiu %[src_argb0], %[src_argb0], 0x20 \n\t" + "daddiu %[src_argb], %[src_argb], 0x20 \n\t" "daddiu %[dst_y], %[dst_y], 0x08 \n\t" "daddi %[width], %[width], -0x08 \n\t" "bnez %[width], 1b \n\t" : [src] "=&f"(src), [src_hi] "=&f"(src_hi), [src_lo] "=&f"(src_lo), [dest0] "=&f"(dest0), [dest1] "=&f"(dest1), [dest2] "=&f"(dest2), [dest3] "=&f"(dest3) - : [src_argb0] "r"(src_argb0), [dst_y] "r"(dst_y), [width] "r"(width), + : [src_argb] "r"(src_argb), [dst_y] "r"(dst_y), [width] "r"(width), [mask] "f"(mask), [value] "f"(value), [eight] "f"(0x08), [zero] "f"(0x00) : "memory"); } -void RGBAToUVRow_MMI(const uint8_t* src_rgb0, +void RGBAToUVRow_MMI(const uint8_t* src_rgb, int src_stride_rgb, uint8_t* dst_u, uint8_t* dst_v, @@ -1648,9 +1648,9 @@ void RGBAToUVRow_MMI(const uint8_t* src_rgb0, "dli %[tmp0], 0x0001000100010001 \n\t" "dmtc1 %[tmp0], %[ftmp12] \n\t" "1: \n\t" - "daddu %[src_rgb1], %[src_rgb0], %[src_stride_rgb] \n\t" - "gsldrc1 %[src0], 0x00(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x07(%[src_rgb0]) \n\t" + "daddu %[src_rgb1], %[src_rgb], %[src_stride_rgb] \n\t" + "gsldrc1 %[src0], 0x00(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x07(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x00(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x07(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -1668,8 +1668,8 @@ void RGBAToUVRow_MMI(const uint8_t* src_rgb0, "pmaddhw %[dest0_u], %[dest0_u], %[mask_u] \n\t" "pmaddhw %[dest0_v], %[dest0_v], %[mask_v] \n\t" - "gsldrc1 %[src0], 0x08(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x0f(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x08(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x0f(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x08(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x0f(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -1696,8 +1696,8 @@ void RGBAToUVRow_MMI(const uint8_t* src_rgb0, "psubw %[dest0_v], %[src1], %[src0] \n\t" "psraw %[dest0_v], %[dest0_v], %[eight] \n\t" - "gsldrc1 %[src0], 0x10(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x17(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x10(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x17(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x10(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x17(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -1715,8 +1715,8 @@ void RGBAToUVRow_MMI(const uint8_t* src_rgb0, "pmaddhw %[dest1_u], %[dest1_u], %[mask_u] \n\t" "pmaddhw %[dest1_v], %[dest1_v], %[mask_v] \n\t" - "gsldrc1 %[src0], 0x18(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x1f(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x18(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x1f(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x18(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x1f(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -1743,8 +1743,8 @@ void RGBAToUVRow_MMI(const uint8_t* src_rgb0, "psubw %[dest1_v], %[src1], %[src0] \n\t" "psraw %[dest1_v], %[dest1_v], %[eight] \n\t" - "gsldrc1 %[src0], 0x20(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x27(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x20(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x27(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x20(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x27(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -1762,8 +1762,8 @@ void RGBAToUVRow_MMI(const uint8_t* src_rgb0, "pmaddhw %[dest2_u], %[dest2_u], %[mask_u] \n\t" "pmaddhw %[dest2_v], %[dest2_v], %[mask_v] \n\t" - "gsldrc1 %[src0], 0x28(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x2f(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x28(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x2f(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x28(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x2f(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -1790,8 +1790,8 @@ void RGBAToUVRow_MMI(const uint8_t* src_rgb0, "psubw %[dest2_v], %[src1], %[src0] \n\t" "psraw %[dest2_v], %[dest2_v], %[eight] \n\t" - "gsldrc1 %[src0], 0x30(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x37(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x30(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x37(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x30(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x37(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -1809,8 +1809,8 @@ void RGBAToUVRow_MMI(const uint8_t* src_rgb0, "pmaddhw %[dest3_u], %[dest3_u], %[mask_u] \n\t" "pmaddhw %[dest3_v], %[dest3_v], %[mask_v] \n\t" - "gsldrc1 %[src0], 0x38(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x3f(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x38(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x3f(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x38(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x3f(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -1849,7 +1849,7 @@ void RGBAToUVRow_MMI(const uint8_t* src_rgb0, "gssdlc1 %[dest0_v], 0x07(%[dst_v]) \n\t" "gssdrc1 %[dest0_v], 0x00(%[dst_v]) \n\t" - "daddiu %[src_rgb0], %[src_rgb0], 0x40 \n\t" + "daddiu %[src_rgb], %[src_rgb], 0x40 \n\t" "daddiu %[dst_u], %[dst_u], 0x08 \n\t" "daddiu %[dst_v], %[dst_v], 0x08 \n\t" "daddi %[width], %[width], -0x10 \n\t" @@ -1861,7 +1861,7 @@ void RGBAToUVRow_MMI(const uint8_t* src_rgb0, [dest2_u] "=&f"(ftmp[8]), [dest2_v] "=&f"(ftmp[9]), [dest3_u] "=&f"(ftmp[10]), [dest3_v] "=&f"(ftmp[11]), [ftmp12] "=&f"(ftmp[12]), [tmp0] "=&r"(tmp[0]) - : [src_rgb0] "r"(src_rgb0), [src_stride_rgb] "r"(src_stride_rgb), + : [src_rgb] "r"(src_rgb), [src_stride_rgb] "r"(src_stride_rgb), [dst_u] "r"(dst_u), [dst_v] "r"(dst_v), [width] "r"(width), [mask_u] "f"(mask_u), [mask_v] "f"(mask_v), [value] "f"(value), [zero] "f"(0x00), [eight] "f"(0x08), [one] "f"(0x01), @@ -1869,7 +1869,7 @@ void RGBAToUVRow_MMI(const uint8_t* src_rgb0, : "memory"); } -void RGB24ToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) { +void RGB24ToYRow_MMI(const uint8_t* src_argb, uint8_t* dst_y, int width) { uint64_t src, src_hi, src_lo; uint64_t dest0, dest1, dest2, dest3; const uint64_t value = 0x1080; @@ -1877,8 +1877,8 @@ void RGB24ToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) { __asm__ volatile( "1: \n\t" - "gsldlc1 %[src], 0x07(%[src_argb0]) \n\t" - "gsldrc1 %[src], 0x00(%[src_argb0]) \n\t" + "gsldlc1 %[src], 0x07(%[src_argb]) \n\t" + "gsldrc1 %[src], 0x00(%[src_argb]) \n\t" "punpcklbh %[src_lo], %[src], %[zero] \n\t" "pinsrh_3 %[src_lo], %[src_lo], %[value] \n\t" "pmaddhw %[src_lo], %[src_lo], %[mask] \n\t" @@ -1891,8 +1891,8 @@ void RGB24ToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) { "paddw %[dest0], %[dest0], %[src] \n\t" "psrlw %[dest0], %[dest0], %[eight] \n\t" - "gsldlc1 %[src], 0x0d(%[src_argb0]) \n\t" - "gsldrc1 %[src], 0x06(%[src_argb0]) \n\t" + "gsldlc1 %[src], 0x0d(%[src_argb]) \n\t" + "gsldrc1 %[src], 0x06(%[src_argb]) \n\t" "punpcklbh %[src_lo], %[src], %[zero] \n\t" "pinsrh_3 %[src_lo], %[src_lo], %[value] \n\t" "pmaddhw %[src_lo], %[src_lo], %[mask] \n\t" @@ -1905,8 +1905,8 @@ void RGB24ToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) { "paddw %[dest1], %[dest1], %[src] \n\t" "psrlw %[dest1], %[dest1], %[eight] \n\t" - "gsldlc1 %[src], 0x13(%[src_argb0]) \n\t" - "gsldrc1 %[src], 0x0c(%[src_argb0]) \n\t" + "gsldlc1 %[src], 0x13(%[src_argb]) \n\t" + "gsldrc1 %[src], 0x0c(%[src_argb]) \n\t" "punpcklbh %[src_lo], %[src], %[zero] \n\t" "pinsrh_3 %[src_lo], %[src_lo], %[value] \n\t" "pmaddhw %[src_lo], %[src_lo], %[mask] \n\t" @@ -1919,8 +1919,8 @@ void RGB24ToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) { "paddw %[dest2], %[dest2], %[src] \n\t" "psrlw %[dest2], %[dest2], %[eight] \n\t" - "gsldlc1 %[src], 0x19(%[src_argb0]) \n\t" - "gsldrc1 %[src], 0x12(%[src_argb0]) \n\t" + "gsldlc1 %[src], 0x19(%[src_argb]) \n\t" + "gsldrc1 %[src], 0x12(%[src_argb]) \n\t" "punpcklbh %[src_lo], %[src], %[zero] \n\t" "pinsrh_3 %[src_lo], %[src_lo], %[value] \n\t" "pmaddhw %[src_lo], %[src_lo], %[mask] \n\t" @@ -1939,20 +1939,20 @@ void RGB24ToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) { "gssdlc1 %[dest0], 0x07(%[dst_y]) \n\t" "gssdrc1 %[dest0], 0x00(%[dst_y]) \n\t" - "daddiu %[src_argb0], %[src_argb0], 0x18 \n\t" + "daddiu %[src_argb], %[src_argb], 0x18 \n\t" "daddiu %[dst_y], %[dst_y], 0x08 \n\t" "daddi %[width], %[width], -0x08 \n\t" "bnez %[width], 1b \n\t" : [src] "=&f"(src), [src_hi] "=&f"(src_hi), [src_lo] "=&f"(src_lo), [dest0] "=&f"(dest0), [dest1] "=&f"(dest1), [dest2] "=&f"(dest2), [dest3] "=&f"(dest3) - : [src_argb0] "r"(src_argb0), [dst_y] "r"(dst_y), [width] "r"(width), + : [src_argb] "r"(src_argb), [dst_y] "r"(dst_y), [width] "r"(width), [mask] "f"(mask), [value] "f"(value), [eight] "f"(0x08), [zero] "f"(0x00) : "memory"); } -void RGB24ToUVRow_MMI(const uint8_t* src_rgb0, +void RGB24ToUVRow_MMI(const uint8_t* src_rgb, int src_stride_rgb, uint8_t* dst_u, uint8_t* dst_v, @@ -1968,9 +1968,9 @@ void RGB24ToUVRow_MMI(const uint8_t* src_rgb0, "dli %[tmp0], 0x0001000100010001 \n\t" "dmtc1 %[tmp0], %[ftmp12] \n\t" "1: \n\t" - "daddu %[src_rgb1], %[src_rgb0], %[src_stride_rgb] \n\t" - "gsldrc1 %[src0], 0x00(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x07(%[src_rgb0]) \n\t" + "daddu %[src_rgb1], %[src_rgb], %[src_stride_rgb] \n\t" + "gsldrc1 %[src0], 0x00(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x07(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x00(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x07(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -1990,8 +1990,8 @@ void RGB24ToUVRow_MMI(const uint8_t* src_rgb0, "pmaddhw %[dest0_u], %[dest0_u], %[mask_u] \n\t" "pmaddhw %[dest0_v], %[dest0_v], %[mask_v] \n\t" - "gsldrc1 %[src0], 0x06(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x0d(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x06(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x0d(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x06(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x0d(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -2020,8 +2020,8 @@ void RGB24ToUVRow_MMI(const uint8_t* src_rgb0, "psubw %[dest0_v], %[src1], %[src0] \n\t" "psraw %[dest0_v], %[dest0_v], %[eight] \n\t" - "gsldrc1 %[src0], 0x0c(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x13(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x0c(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x13(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x0c(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x13(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -2041,8 +2041,8 @@ void RGB24ToUVRow_MMI(const uint8_t* src_rgb0, "pmaddhw %[dest1_u], %[dest1_u], %[mask_u] \n\t" "pmaddhw %[dest1_v], %[dest1_v], %[mask_v] \n\t" - "gsldrc1 %[src0], 0x12(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x19(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x12(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x19(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x12(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x19(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -2071,8 +2071,8 @@ void RGB24ToUVRow_MMI(const uint8_t* src_rgb0, "psubw %[dest1_v], %[src1], %[src0] \n\t" "psraw %[dest1_v], %[dest1_v], %[eight] \n\t" - "gsldrc1 %[src0], 0x18(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x1f(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x18(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x1f(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x18(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x1f(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -2092,8 +2092,8 @@ void RGB24ToUVRow_MMI(const uint8_t* src_rgb0, "pmaddhw %[dest2_u], %[dest2_u], %[mask_u] \n\t" "pmaddhw %[dest2_v], %[dest2_v], %[mask_v] \n\t" - "gsldrc1 %[src0], 0x1e(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x25(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x1e(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x25(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x1e(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x25(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -2122,8 +2122,8 @@ void RGB24ToUVRow_MMI(const uint8_t* src_rgb0, "psubw %[dest2_v], %[src1], %[src0] \n\t" "psraw %[dest2_v], %[dest2_v], %[eight] \n\t" - "gsldrc1 %[src0], 0x24(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x2b(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x24(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x2b(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x24(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x2b(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -2143,8 +2143,8 @@ void RGB24ToUVRow_MMI(const uint8_t* src_rgb0, "pmaddhw %[dest3_u], %[dest3_u], %[mask_u] \n\t" "pmaddhw %[dest3_v], %[dest3_v], %[mask_v] \n\t" - "gsldrc1 %[src0], 0x2a(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x31(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x2a(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x31(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x2a(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x31(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -2185,7 +2185,7 @@ void RGB24ToUVRow_MMI(const uint8_t* src_rgb0, "gssdlc1 %[dest0_v], 0x07(%[dst_v]) \n\t" "gssdrc1 %[dest0_v], 0x00(%[dst_v]) \n\t" - "daddiu %[src_rgb0], %[src_rgb0], 0x30 \n\t" + "daddiu %[src_rgb], %[src_rgb], 0x30 \n\t" "daddiu %[dst_u], %[dst_u], 0x08 \n\t" "daddiu %[dst_v], %[dst_v], 0x08 \n\t" "daddi %[width], %[width], -0x10 \n\t" @@ -2197,7 +2197,7 @@ void RGB24ToUVRow_MMI(const uint8_t* src_rgb0, [dest2_u] "=&f"(ftmp[8]), [dest2_v] "=&f"(ftmp[9]), [dest3_u] "=&f"(ftmp[10]), [dest3_v] "=&f"(ftmp[11]), [ftmp12] "=&f"(ftmp[12]), [tmp0] "=&r"(tmp[0]) - : [src_rgb0] "r"(src_rgb0), [src_stride_rgb] "r"(src_stride_rgb), + : [src_rgb] "r"(src_rgb), [src_stride_rgb] "r"(src_stride_rgb), [dst_u] "r"(dst_u), [dst_v] "r"(dst_v), [width] "r"(width), [mask_u] "f"(mask_u), [mask_v] "f"(mask_v), [value] "f"(value), [zero] "f"(0x00), [eight] "f"(0x08), [one] "f"(0x01), @@ -2205,7 +2205,7 @@ void RGB24ToUVRow_MMI(const uint8_t* src_rgb0, : "memory"); } -void RAWToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) { +void RAWToYRow_MMI(const uint8_t* src_argb, uint8_t* dst_y, int width) { uint64_t src, src_hi, src_lo; uint64_t dest0, dest1, dest2, dest3; const uint64_t value = 0x1080; @@ -2213,8 +2213,8 @@ void RAWToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) { __asm__ volatile( "1: \n\t" - "gsldlc1 %[src], 0x07(%[src_argb0]) \n\t" - "gsldrc1 %[src], 0x00(%[src_argb0]) \n\t" + "gsldlc1 %[src], 0x07(%[src_argb]) \n\t" + "gsldrc1 %[src], 0x00(%[src_argb]) \n\t" "punpcklbh %[src_lo], %[src], %[zero] \n\t" "pinsrh_3 %[src_lo], %[src_lo], %[value] \n\t" "pmaddhw %[src_lo], %[src_lo], %[mask] \n\t" @@ -2227,8 +2227,8 @@ void RAWToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) { "paddw %[dest0], %[dest0], %[src] \n\t" "psrlw %[dest0], %[dest0], %[eight] \n\t" - "gsldlc1 %[src], 0x0d(%[src_argb0]) \n\t" - "gsldrc1 %[src], 0x06(%[src_argb0]) \n\t" + "gsldlc1 %[src], 0x0d(%[src_argb]) \n\t" + "gsldrc1 %[src], 0x06(%[src_argb]) \n\t" "punpcklbh %[src_lo], %[src], %[zero] \n\t" "pinsrh_3 %[src_lo], %[src_lo], %[value] \n\t" "pmaddhw %[src_lo], %[src_lo], %[mask] \n\t" @@ -2241,8 +2241,8 @@ void RAWToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) { "paddw %[dest1], %[dest1], %[src] \n\t" "psrlw %[dest1], %[dest1], %[eight] \n\t" - "gsldlc1 %[src], 0x13(%[src_argb0]) \n\t" - "gsldrc1 %[src], 0x0c(%[src_argb0]) \n\t" + "gsldlc1 %[src], 0x13(%[src_argb]) \n\t" + "gsldrc1 %[src], 0x0c(%[src_argb]) \n\t" "punpcklbh %[src_lo], %[src], %[zero] \n\t" "pinsrh_3 %[src_lo], %[src_lo], %[value] \n\t" "pmaddhw %[src_lo], %[src_lo], %[mask] \n\t" @@ -2255,8 +2255,8 @@ void RAWToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) { "paddw %[dest2], %[dest2], %[src] \n\t" "psrlw %[dest2], %[dest2], %[eight] \n\t" - "gsldlc1 %[src], 0x19(%[src_argb0]) \n\t" - "gsldrc1 %[src], 0x12(%[src_argb0]) \n\t" + "gsldlc1 %[src], 0x19(%[src_argb]) \n\t" + "gsldrc1 %[src], 0x12(%[src_argb]) \n\t" "punpcklbh %[src_lo], %[src], %[zero] \n\t" "pinsrh_3 %[src_lo], %[src_lo], %[value] \n\t" "pmaddhw %[src_lo], %[src_lo], %[mask] \n\t" @@ -2275,20 +2275,20 @@ void RAWToYRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) { "gssdlc1 %[dest0], 0x07(%[dst_y]) \n\t" "gssdrc1 %[dest0], 0x00(%[dst_y]) \n\t" - "daddiu %[src_argb0], %[src_argb0], 0x18 \n\t" + "daddiu %[src_argb], %[src_argb], 0x18 \n\t" "daddiu %[dst_y], %[dst_y], 0x08 \n\t" "daddi %[width], %[width], -0x08 \n\t" "bnez %[width], 1b \n\t" : [src] "=&f"(src), [src_hi] "=&f"(src_hi), [src_lo] "=&f"(src_lo), [dest0] "=&f"(dest0), [dest1] "=&f"(dest1), [dest2] "=&f"(dest2), [dest3] "=&f"(dest3) - : [src_argb0] "r"(src_argb0), [dst_y] "r"(dst_y), [width] "r"(width), + : [src_argb] "r"(src_argb), [dst_y] "r"(dst_y), [width] "r"(width), [mask] "f"(mask), [value] "f"(value), [eight] "f"(0x08), [zero] "f"(0x00) : "memory"); } -void RAWToUVRow_MMI(const uint8_t* src_rgb0, +void RAWToUVRow_MMI(const uint8_t* src_rgb, int src_stride_rgb, uint8_t* dst_u, uint8_t* dst_v, @@ -2304,9 +2304,9 @@ void RAWToUVRow_MMI(const uint8_t* src_rgb0, "dli %[tmp0], 0x0001000100010001 \n\t" "dmtc1 %[tmp0], %[ftmp12] \n\t" "1: \n\t" - "daddu %[src_rgb1], %[src_rgb0], %[src_stride_rgb] \n\t" - "gsldrc1 %[src0], 0x00(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x07(%[src_rgb0]) \n\t" + "daddu %[src_rgb1], %[src_rgb], %[src_stride_rgb] \n\t" + "gsldrc1 %[src0], 0x00(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x07(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x00(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x07(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -2326,8 +2326,8 @@ void RAWToUVRow_MMI(const uint8_t* src_rgb0, "pmaddhw %[dest0_u], %[dest0_u], %[mask_u] \n\t" "pmaddhw %[dest0_v], %[dest0_v], %[mask_v] \n\t" - "gsldrc1 %[src0], 0x06(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x0d(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x06(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x0d(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x06(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x0d(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -2356,8 +2356,8 @@ void RAWToUVRow_MMI(const uint8_t* src_rgb0, "psubw %[dest0_v], %[src0], %[src1] \n\t" "psraw %[dest0_v], %[dest0_v], %[eight] \n\t" - "gsldrc1 %[src0], 0x0c(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x13(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x0c(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x13(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x0c(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x13(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -2377,8 +2377,8 @@ void RAWToUVRow_MMI(const uint8_t* src_rgb0, "pmaddhw %[dest1_u], %[dest1_u], %[mask_u] \n\t" "pmaddhw %[dest1_v], %[dest1_v], %[mask_v] \n\t" - "gsldrc1 %[src0], 0x12(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x19(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x12(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x19(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x12(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x19(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -2407,8 +2407,8 @@ void RAWToUVRow_MMI(const uint8_t* src_rgb0, "psubw %[dest1_v], %[src0], %[src1] \n\t" "psraw %[dest1_v], %[dest1_v], %[eight] \n\t" - "gsldrc1 %[src0], 0x18(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x1f(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x18(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x1f(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x18(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x1f(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -2428,8 +2428,8 @@ void RAWToUVRow_MMI(const uint8_t* src_rgb0, "pmaddhw %[dest2_u], %[dest2_u], %[mask_u] \n\t" "pmaddhw %[dest2_v], %[dest2_v], %[mask_v] \n\t" - "gsldrc1 %[src0], 0x1e(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x25(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x1e(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x25(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x1e(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x25(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -2458,8 +2458,8 @@ void RAWToUVRow_MMI(const uint8_t* src_rgb0, "psubw %[dest2_v], %[src0], %[src1] \n\t" "psraw %[dest2_v], %[dest2_v], %[eight] \n\t" - "gsldrc1 %[src0], 0x24(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x2b(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x24(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x2b(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x24(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x2b(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -2479,8 +2479,8 @@ void RAWToUVRow_MMI(const uint8_t* src_rgb0, "pmaddhw %[dest3_u], %[dest3_u], %[mask_u] \n\t" "pmaddhw %[dest3_v], %[dest3_v], %[mask_v] \n\t" - "gsldrc1 %[src0], 0x2a(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x31(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x2a(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x31(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x2a(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x31(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -2521,7 +2521,7 @@ void RAWToUVRow_MMI(const uint8_t* src_rgb0, "gssdlc1 %[dest0_v], 0x07(%[dst_v]) \n\t" "gssdrc1 %[dest0_v], 0x00(%[dst_v]) \n\t" - "daddiu %[src_rgb0], %[src_rgb0], 0x30 \n\t" + "daddiu %[src_rgb], %[src_rgb], 0x30 \n\t" "daddiu %[dst_u], %[dst_u], 0x08 \n\t" "daddiu %[dst_v], %[dst_v], 0x08 \n\t" "daddi %[width], %[width], -0x10 \n\t" @@ -2533,7 +2533,7 @@ void RAWToUVRow_MMI(const uint8_t* src_rgb0, [dest2_u] "=&f"(ftmp[8]), [dest2_v] "=&f"(ftmp[9]), [dest3_u] "=&f"(ftmp[10]), [dest3_v] "=&f"(ftmp[11]), [ftmp12] "=&f"(ftmp[12]), [tmp0] "=&r"(tmp[0]) - : [src_rgb0] "r"(src_rgb0), [src_stride_rgb] "r"(src_stride_rgb), + : [src_rgb] "r"(src_rgb), [src_stride_rgb] "r"(src_stride_rgb), [dst_u] "r"(dst_u), [dst_v] "r"(dst_v), [width] "r"(width), [mask_u] "f"(mask_u), [mask_v] "f"(mask_v), [value] "f"(value), [zero] "f"(0x00), [eight] "f"(0x08), [one] "f"(0x01), @@ -2541,7 +2541,7 @@ void RAWToUVRow_MMI(const uint8_t* src_rgb0, : "memory"); } -void ARGBToYJRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) { +void ARGBToYJRow_MMI(const uint8_t* src_argb, uint8_t* dst_y, int width) { uint64_t src, src_hi, src_lo; uint64_t dest, dest0, dest1, dest2, dest3; uint64_t tmp0, tmp1; @@ -2618,13 +2618,13 @@ void ARGBToYJRow_MMI(const uint8_t* src_argb0, uint8_t* dst_y, int width) { [src_lo] "=&f"(src_lo), [dest0] "=&f"(dest0), [dest1] "=&f"(dest1), [dest2] "=&f"(dest2), [dest3] "=&f"(dest3), [tmp0] "=&f"(tmp0), [tmp1] "=&f"(tmp1) - : [src_ptr] "r"(src_argb0), [dst_ptr] "r"(dst_y), [mask0] "f"(mask0), + : [src_ptr] "r"(src_argb), [dst_ptr] "r"(dst_y), [mask0] "f"(mask0), [mask1] "f"(mask1), [shift] "f"(shift), [value] "f"(value), [width] "r"(width) : "memory"); } -void ARGBToUVJRow_MMI(const uint8_t* src_rgb0, +void ARGBToUVJRow_MMI(const uint8_t* src_rgb, int src_stride_rgb, uint8_t* dst_u, uint8_t* dst_v, @@ -2637,9 +2637,9 @@ void ARGBToUVJRow_MMI(const uint8_t* src_rgb0, __asm__ volatile( "1: \n\t" - "daddu %[src_rgb1], %[src_rgb0], %[src_stride_rgb] \n\t" - "gsldrc1 %[src0], 0x00(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x07(%[src_rgb0]) \n\t" + "daddu %[src_rgb1], %[src_rgb], %[src_stride_rgb] \n\t" + "gsldrc1 %[src0], 0x00(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x07(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x00(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x07(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -2655,8 +2655,8 @@ void ARGBToUVJRow_MMI(const uint8_t* src_rgb0, "pmaddhw %[dest0_u], %[dest0_u], %[mask_u] \n\t" "pmaddhw %[dest0_v], %[dest0_v], %[mask_v] \n\t" - "gsldrc1 %[src0], 0x08(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x0f(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x08(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x0f(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x08(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x0f(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -2681,8 +2681,8 @@ void ARGBToUVJRow_MMI(const uint8_t* src_rgb0, "psubw %[dest0_v], %[src1], %[src0] \n\t" "psraw %[dest0_v], %[dest0_v], %[eight] \n\t" - "gsldrc1 %[src0], 0x10(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x17(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x10(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x17(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x10(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x17(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -2698,8 +2698,8 @@ void ARGBToUVJRow_MMI(const uint8_t* src_rgb0, "pmaddhw %[dest1_u], %[dest1_u], %[mask_u] \n\t" "pmaddhw %[dest1_v], %[dest1_v], %[mask_v] \n\t" - "gsldrc1 %[src0], 0x18(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x1f(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x18(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x1f(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x18(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x1f(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -2724,8 +2724,8 @@ void ARGBToUVJRow_MMI(const uint8_t* src_rgb0, "psubw %[dest1_v], %[src1], %[src0] \n\t" "psraw %[dest1_v], %[dest1_v], %[eight] \n\t" - "gsldrc1 %[src0], 0x20(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x27(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x20(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x27(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x20(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x27(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -2741,8 +2741,8 @@ void ARGBToUVJRow_MMI(const uint8_t* src_rgb0, "pmaddhw %[dest2_u], %[dest2_u], %[mask_u] \n\t" "pmaddhw %[dest2_v], %[dest2_v], %[mask_v] \n\t" - "gsldrc1 %[src0], 0x28(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x2f(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x28(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x2f(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x28(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x2f(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -2767,8 +2767,8 @@ void ARGBToUVJRow_MMI(const uint8_t* src_rgb0, "psubw %[dest2_v], %[src1], %[src0] \n\t" "psraw %[dest2_v], %[dest2_v], %[eight] \n\t" - "gsldrc1 %[src0], 0x30(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x37(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x30(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x37(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x30(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x37(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -2784,8 +2784,8 @@ void ARGBToUVJRow_MMI(const uint8_t* src_rgb0, "pmaddhw %[dest3_u], %[dest3_u], %[mask_u] \n\t" "pmaddhw %[dest3_v], %[dest3_v], %[mask_v] \n\t" - "gsldrc1 %[src0], 0x38(%[src_rgb0]) \n\t" - "gsldlc1 %[src0], 0x3f(%[src_rgb0]) \n\t" + "gsldrc1 %[src0], 0x38(%[src_rgb]) \n\t" + "gsldlc1 %[src0], 0x3f(%[src_rgb]) \n\t" "gsldrc1 %[src1], 0x38(%[src_rgb1]) \n\t" "gsldlc1 %[src1], 0x3f(%[src_rgb1]) \n\t" "punpcklbh %[src_lo], %[src0], %[zero] \n\t" @@ -2822,7 +2822,7 @@ void ARGBToUVJRow_MMI(const uint8_t* src_rgb0, "gssdlc1 %[dest0_v], 0x07(%[dst_v]) \n\t" "gssdrc1 %[dest0_v], 0x00(%[dst_v]) \n\t" - "daddiu %[src_rgb0], %[src_rgb0], 0x40 \n\t" + "daddiu %[src_rgb], %[src_rgb], 0x40 \n\t" "daddiu %[dst_u], %[dst_u], 0x08 \n\t" "daddiu %[dst_v], %[dst_v], 0x08 \n\t" "daddi %[width], %[width], -0x10 \n\t" @@ -2833,7 +2833,7 @@ void ARGBToUVJRow_MMI(const uint8_t* src_rgb0, [dest1_u] "=&f"(ftmp[6]), [dest1_v] "=&f"(ftmp[7]), [dest2_u] "=&f"(ftmp[8]), [dest2_v] "=&f"(ftmp[9]), [dest3_u] "=&f"(ftmp[10]), [dest3_v] "=&f"(ftmp[11]) - : [src_rgb0] "r"(src_rgb0), [src_stride_rgb] "r"(src_stride_rgb), + : [src_rgb] "r"(src_rgb), [src_stride_rgb] "r"(src_stride_rgb), [dst_u] "r"(dst_u), [dst_v] "r"(dst_v), [width] "r"(width), [mask_u] "f"(mask_u), [mask_v] "f"(mask_v), [value] "f"(value), [zero] "f"(0x00), [eight] "f"(0x08), @@ -4386,7 +4386,7 @@ void ARGBShadeRow_MMI(const uint8_t* src_argb, : "memory"); } -void ARGBMultiplyRow_MMI(const uint8_t* src_argb0, +void ARGBMultiplyRow_MMI(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width) { @@ -4422,12 +4422,12 @@ void ARGBMultiplyRow_MMI(const uint8_t* src_argb0, [src1_hi] "=&f"(src1_hi), [src1_lo] "=&f"(src1_lo), [dest_hi] "=&f"(dest_hi), [dest_lo] "=&f"(dest_lo), [src0] "=&f"(src0), [src1] "=&f"(src1), [dest] "=&f"(dest) - : [src0_ptr] "r"(src_argb0), [src1_ptr] "r"(src_argb1), + : [src0_ptr] "r"(src_argb), [src1_ptr] "r"(src_argb1), [dst_ptr] "r"(dst_argb), [width] "r"(width), [mask] "f"(mask) : "memory"); } -void ARGBAddRow_MMI(const uint8_t* src_argb0, +void ARGBAddRow_MMI(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width) { @@ -4449,12 +4449,12 @@ void ARGBAddRow_MMI(const uint8_t* src_argb0, "daddi %[width], %[width], -0x02 \n\t" "bnez %[width], 1b \n\t" : [src0] "=&f"(src0), [src1] "=&f"(src1), [dest] "=&f"(dest) - : [src0_ptr] "r"(src_argb0), [src1_ptr] "r"(src_argb1), + : [src0_ptr] "r"(src_argb), [src1_ptr] "r"(src_argb1), [dst_ptr] "r"(dst_argb), [width] "r"(width) : "memory"); } -void ARGBSubtractRow_MMI(const uint8_t* src_argb0, +void ARGBSubtractRow_MMI(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width) { @@ -4476,7 +4476,7 @@ void ARGBSubtractRow_MMI(const uint8_t* src_argb0, "daddi %[width], %[width], -0x02 \n\t" "bnez %[width], 1b \n\t" : [src0] "=&f"(src0), [src1] "=&f"(src1), [dest] "=&f"(dest) - : [src0_ptr] "r"(src_argb0), [src1_ptr] "r"(src_argb1), + : [src0_ptr] "r"(src_argb), [src1_ptr] "r"(src_argb1), [dst_ptr] "r"(dst_argb), [width] "r"(width) : "memory"); } @@ -5552,10 +5552,10 @@ void UYVYToYRow_MMI(const uint8_t* src_uyvy, uint8_t* dst_y, int width) { : "memory"); } -// Blend src_argb0 over src_argb1 and store to dst_argb. -// dst_argb may be src_argb0 or src_argb1. +// Blend src_argb over src_argb1 and store to dst_argb. +// dst_argb may be src_argb or src_argb1. // This code mimics the SSSE3 version for better testability. -void ARGBBlendRow_MMI(const uint8_t* src_argb0, +void ARGBBlendRow_MMI(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width) { @@ -5608,7 +5608,7 @@ void ARGBBlendRow_MMI(const uint8_t* src_argb0, [dest] "=&f"(dest), [src0_hi] "=&f"(src0_hi), [src0_lo] "=&f"(src0_lo), [src1_hi] "=&f"(src1_hi), [src1_lo] "=&f"(src1_lo), [dest_hi] "=&f"(dest_hi), [dest_lo] "=&f"(dest_lo) - : [src0_ptr] "r"(src_argb0), [src1_ptr] "r"(src_argb1), + : [src0_ptr] "r"(src_argb), [src1_ptr] "r"(src_argb1), [dst_ptr] "r"(dst_argb), [mask0] "f"(mask0), [mask1] "f"(mask1), [mask2] "f"(mask2), [mask3] "f"(mask3), [mask4] "f"(mask4), [shift] "f"(shift), [width] "r"(width) diff --git a/TMessagesProj/jni/third_party/libyuv/source/row_msa.cc b/TMessagesProj/jni/third_party/libyuv/source/row_msa.cc index fe6df93a60..b7d5bb5ecf 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/row_msa.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/row_msa.cc @@ -24,16 +24,14 @@ extern "C" { #define ALPHA_VAL (-1) // Fill YUV -> RGB conversion constants into vectors -#define YUVTORGB_SETUP(yuvconst, ub, vr, ug, vg, bb, bg, br, yg) \ - { \ - ub = __msa_fill_w(yuvconst->kUVToB[0]); \ - vr = __msa_fill_w(yuvconst->kUVToR[1]); \ - ug = __msa_fill_w(yuvconst->kUVToG[0]); \ - vg = __msa_fill_w(yuvconst->kUVToG[1]); \ - bb = __msa_fill_w(yuvconst->kUVBiasB[0]); \ - bg = __msa_fill_w(yuvconst->kUVBiasG[0]); \ - br = __msa_fill_w(yuvconst->kUVBiasR[0]); \ - yg = __msa_fill_w(yuvconst->kYToRgb[0]); \ +#define YUVTORGB_SETUP(yuvconst, ub, vr, ug, vg, yg, yb) \ + { \ + ub = __msa_fill_w(yuvconst->kUVToB[0]); \ + vr = __msa_fill_w(yuvconst->kUVToR[1]); \ + ug = __msa_fill_w(yuvconst->kUVToG[0]); \ + vg = __msa_fill_w(yuvconst->kUVToG[1]); \ + yg = __msa_fill_w(yuvconst->kYToRgb[0]); \ + yb = __msa_fill_w(yuvconst->kYBiasToRgb[0]); \ } // Load YUV 422 pixel data @@ -70,54 +68,52 @@ extern "C" { } // Convert 8 pixels of YUV 420 to RGB. -#define YUVTORGB(in_y, in_uv, ubvr, ugvg, bb, bg, br, yg, out_b, out_g, out_r) \ - { \ - v8i16 vec0_m, vec1_m; \ - v4i32 reg0_m, reg1_m, reg2_m, reg3_m, reg4_m; \ - v4i32 reg5_m, reg6_m, reg7_m; \ - v16i8 zero_m = {0}; \ - \ - vec0_m = (v8i16)__msa_ilvr_b((v16i8)in_y, (v16i8)in_y); \ - vec1_m = (v8i16)__msa_ilvr_b((v16i8)zero_m, (v16i8)in_uv); \ - reg0_m = (v4i32)__msa_ilvr_h((v8i16)zero_m, (v8i16)vec0_m); \ - reg1_m = (v4i32)__msa_ilvl_h((v8i16)zero_m, (v8i16)vec0_m); \ - reg2_m = (v4i32)__msa_ilvr_h((v8i16)zero_m, (v8i16)vec1_m); \ - reg3_m = (v4i32)__msa_ilvl_h((v8i16)zero_m, (v8i16)vec1_m); \ - reg0_m *= yg; \ - reg1_m *= yg; \ - reg2_m *= ubvr; \ - reg3_m *= ubvr; \ - reg0_m = __msa_srai_w(reg0_m, 16); \ - reg1_m = __msa_srai_w(reg1_m, 16); \ - reg4_m = __msa_dotp_s_w((v8i16)vec1_m, (v8i16)ugvg); \ - reg5_m = __msa_ilvev_w(reg2_m, reg2_m); \ - reg6_m = __msa_ilvev_w(reg3_m, reg3_m); \ - reg7_m = __msa_ilvr_w(reg4_m, reg4_m); \ - reg2_m = __msa_ilvod_w(reg2_m, reg2_m); \ - reg3_m = __msa_ilvod_w(reg3_m, reg3_m); \ - reg4_m = __msa_ilvl_w(reg4_m, reg4_m); \ - reg5_m = reg0_m - reg5_m; \ - reg6_m = reg1_m - reg6_m; \ - reg2_m = reg0_m - reg2_m; \ - reg3_m = reg1_m - reg3_m; \ - reg7_m = reg0_m - reg7_m; \ - reg4_m = reg1_m - reg4_m; \ - reg5_m += bb; \ - reg6_m += bb; \ - reg7_m += bg; \ - reg4_m += bg; \ - reg2_m += br; \ - reg3_m += br; \ - reg5_m = __msa_srai_w(reg5_m, 6); \ - reg6_m = __msa_srai_w(reg6_m, 6); \ - reg7_m = __msa_srai_w(reg7_m, 6); \ - reg4_m = __msa_srai_w(reg4_m, 6); \ - reg2_m = __msa_srai_w(reg2_m, 6); \ - reg3_m = __msa_srai_w(reg3_m, 6); \ - CLIP_0TO255(reg5_m, reg6_m, reg7_m, reg4_m, reg2_m, reg3_m); \ - out_b = __msa_pckev_h((v8i16)reg6_m, (v8i16)reg5_m); \ - out_g = __msa_pckev_h((v8i16)reg4_m, (v8i16)reg7_m); \ - out_r = __msa_pckev_h((v8i16)reg3_m, (v8i16)reg2_m); \ +#define YUVTORGB(in_y, in_uv, ubvr, ugvg, yg, yb, out_b, out_g, out_r) \ + { \ + v8i16 vec0_m, vec1_m; \ + v4i32 reg0_m, reg1_m, reg2_m, reg3_m, reg4_m; \ + v4i32 reg5_m, reg6_m, reg7_m; \ + v16i8 temp_m, zero_m = {0}; \ + \ + vec0_m = (v8i16)__msa_ilvr_b((v16i8)in_y, (v16i8)in_y); \ + vec1_m = (v8i16)__msa_ilvr_b((v16i8)zero_m, (v16i8)in_uv); \ + reg0_m = (v4i32)__msa_ilvr_h((v8i16)zero_m, (v8i16)vec0_m); \ + reg1_m = (v4i32)__msa_ilvl_h((v8i16)zero_m, (v8i16)vec0_m); \ + vec1_m = (v8i16)__msa_subv_h(vec1_m, const_0x80); \ + temp_m = (v16i8)__msa_clti_s_h(vec1_m, 0); \ + reg2_m = (v4i32)__msa_ilvr_h((v8i16)temp_m, (v8i16)vec1_m); \ + reg3_m = (v4i32)__msa_ilvl_h((v8i16)temp_m, (v8i16)vec1_m); \ + reg0_m *= yg; \ + reg1_m *= yg; \ + reg2_m *= ubvr; \ + reg3_m *= ubvr; \ + reg0_m = __msa_srai_w(reg0_m, 16); \ + reg1_m = __msa_srai_w(reg1_m, 16); \ + reg0_m += yb; \ + reg1_m += yb; \ + reg4_m = __msa_dotp_s_w((v8i16)vec1_m, (v8i16)ugvg); \ + reg5_m = __msa_ilvev_w(reg2_m, reg2_m); \ + reg6_m = __msa_ilvev_w(reg3_m, reg3_m); \ + reg7_m = __msa_ilvr_w(reg4_m, reg4_m); \ + reg2_m = __msa_ilvod_w(reg2_m, reg2_m); \ + reg3_m = __msa_ilvod_w(reg3_m, reg3_m); \ + reg4_m = __msa_ilvl_w(reg4_m, reg4_m); \ + reg5_m = reg0_m + reg5_m; \ + reg6_m = reg1_m + reg6_m; \ + reg2_m = reg0_m + reg2_m; \ + reg3_m = reg1_m + reg3_m; \ + reg7_m = reg0_m - reg7_m; \ + reg4_m = reg1_m - reg4_m; \ + reg5_m = __msa_srai_w(reg5_m, 6); \ + reg6_m = __msa_srai_w(reg6_m, 6); \ + reg7_m = __msa_srai_w(reg7_m, 6); \ + reg4_m = __msa_srai_w(reg4_m, 6); \ + reg2_m = __msa_srai_w(reg2_m, 6); \ + reg3_m = __msa_srai_w(reg3_m, 6); \ + CLIP_0TO255(reg5_m, reg6_m, reg7_m, reg4_m, reg2_m, reg3_m); \ + out_b = __msa_pckev_h((v8i16)reg6_m, (v8i16)reg5_m); \ + out_g = __msa_pckev_h((v8i16)reg4_m, (v8i16)reg7_m); \ + out_r = __msa_pckev_h((v8i16)reg3_m, (v8i16)reg2_m); \ } // Pack and Store 8 ARGB values. @@ -284,6 +280,34 @@ extern "C" { out_v = (v16u8)__msa_insert_d(zero_m, 0, (int64_t)v_m); \ } +#define RGBTOUV(_tmpb, _tmpg, _tmpr, _nexb, _nexg, _nexr, _dst0) \ + { \ + v16u8 _tmp0, _tmp1, _tmp2, _tmp3, _tmp4, _tmp5; \ + v8i16 _reg0, _reg1, _reg2, _reg3, _reg4, _reg5; \ + _tmp0 = (v16u8)__msa_ilvev_b(_tmpb, _nexb); \ + _tmp1 = (v16u8)__msa_ilvod_b(_tmpb, _nexb); \ + _tmp2 = (v16u8)__msa_ilvev_b(_tmpg, _nexg); \ + _tmp3 = (v16u8)__msa_ilvod_b(_tmpg, _nexg); \ + _tmp4 = (v16u8)__msa_ilvev_b(_tmpr, _nexr); \ + _tmp5 = (v16u8)__msa_ilvod_b(_tmpr, _nexr); \ + _reg0 = (v8i16)__msa_hadd_u_h(_tmp0, _tmp0); \ + _reg1 = (v8i16)__msa_hadd_u_h(_tmp1, _tmp1); \ + _reg2 = (v8i16)__msa_hadd_u_h(_tmp2, _tmp2); \ + _reg3 = (v8i16)__msa_hadd_u_h(_tmp3, _tmp3); \ + _reg4 = (v8i16)__msa_hadd_u_h(_tmp4, _tmp4); \ + _reg5 = (v8i16)__msa_hadd_u_h(_tmp5, _tmp5); \ + _reg0 = (v8i16)__msa_aver_u_h(_reg0, _reg1); \ + _reg2 = (v8i16)__msa_aver_u_h(_reg2, _reg3); \ + _reg4 = (v8i16)__msa_aver_u_h(_reg4, _reg5); \ + _reg1 = const_8080 + const_112 * _reg0; \ + _reg3 = const_8080 + const_112 * _reg4; \ + _reg1 = (v8i16)__msa_msubv_h(_reg1, const_74, _reg2); \ + _reg3 = (v8i16)__msa_msubv_h(_reg3, const_94, _reg2); \ + _reg1 = (v8i16)__msa_msubv_h(_reg1, const_38, _reg4); \ + _reg3 = (v8i16)__msa_msubv_h(_reg3, const_18, _reg0); \ + _dst0 = (v16u8)__msa_pckod_b(_reg3, _reg1); \ + } + void MirrorRow_MSA(const uint8_t* src, uint8_t* dst, int width) { int x; v16u8 src0, src1, src2, src3; @@ -389,20 +413,19 @@ void I422ToARGBRow_MSA(const uint8_t* src_y, int x; v16u8 src0, src1, src2; v8i16 vec0, vec1, vec2; - v4i32 vec_ub, vec_vr, vec_ug, vec_vg, vec_bb, vec_bg, vec_br, vec_yg; + v4i32 vec_ub, vec_vr, vec_ug, vec_vg, vec_yg, vec_yb; v4i32 vec_ubvr, vec_ugvg; v16u8 alpha = (v16u8)__msa_ldi_b(ALPHA_VAL); + v8i16 const_0x80 = __msa_ldi_h(0x80); - YUVTORGB_SETUP(yuvconstants, vec_ub, vec_vr, vec_ug, vec_vg, vec_bb, vec_bg, - vec_br, vec_yg); + YUVTORGB_SETUP(yuvconstants, vec_ub, vec_vr, vec_ug, vec_vg, vec_yg, vec_yb); vec_ubvr = __msa_ilvr_w(vec_vr, vec_ub); vec_ugvg = (v4i32)__msa_ilvev_h((v8i16)vec_vg, (v8i16)vec_ug); for (x = 0; x < width; x += 8) { READYUV422(src_y, src_u, src_v, src0, src1, src2); src1 = (v16u8)__msa_ilvr_b((v16i8)src2, (v16i8)src1); - YUVTORGB(src0, src1, vec_ubvr, vec_ugvg, vec_bb, vec_bg, vec_br, vec_yg, - vec0, vec1, vec2); + YUVTORGB(src0, src1, vec_ubvr, vec_ugvg, vec_yg, vec_yb, vec0, vec1, vec2); STOREARGB(vec0, vec1, vec2, alpha, dst_argb); src_y += 8; src_u += 4; @@ -420,20 +443,19 @@ void I422ToRGBARow_MSA(const uint8_t* src_y, int x; v16u8 src0, src1, src2; v8i16 vec0, vec1, vec2; - v4i32 vec_ub, vec_vr, vec_ug, vec_vg, vec_bb, vec_bg, vec_br, vec_yg; + v4i32 vec_ub, vec_vr, vec_ug, vec_vg, vec_yg, vec_yb; v4i32 vec_ubvr, vec_ugvg; v16u8 alpha = (v16u8)__msa_ldi_b(ALPHA_VAL); + v8i16 const_0x80 = __msa_ldi_h(0x80); - YUVTORGB_SETUP(yuvconstants, vec_ub, vec_vr, vec_ug, vec_vg, vec_bb, vec_bg, - vec_br, vec_yg); + YUVTORGB_SETUP(yuvconstants, vec_ub, vec_vr, vec_ug, vec_vg, vec_yg, vec_yb); vec_ubvr = __msa_ilvr_w(vec_vr, vec_ub); vec_ugvg = (v4i32)__msa_ilvev_h((v8i16)vec_vg, (v8i16)vec_ug); for (x = 0; x < width; x += 8) { READYUV422(src_y, src_u, src_v, src0, src1, src2); src1 = (v16u8)__msa_ilvr_b((v16i8)src2, (v16i8)src1); - YUVTORGB(src0, src1, vec_ubvr, vec_ugvg, vec_bb, vec_bg, vec_br, vec_yg, - vec0, vec1, vec2); + YUVTORGB(src0, src1, vec_ubvr, vec_ugvg, vec_yg, vec_yb, vec0, vec1, vec2); STOREARGB(alpha, vec0, vec1, vec2, dst_argb); src_y += 8; src_u += 4; @@ -453,12 +475,12 @@ void I422AlphaToARGBRow_MSA(const uint8_t* src_y, int64_t data_a; v16u8 src0, src1, src2, src3; v8i16 vec0, vec1, vec2; - v4i32 vec_ub, vec_vr, vec_ug, vec_vg, vec_bb, vec_bg, vec_br, vec_yg; + v4i32 vec_ub, vec_vr, vec_ug, vec_vg, vec_yg, vec_yb; v4i32 vec_ubvr, vec_ugvg; v4i32 zero = {0}; + v8i16 const_0x80 = __msa_ldi_h(0x80); - YUVTORGB_SETUP(yuvconstants, vec_ub, vec_vr, vec_ug, vec_vg, vec_bb, vec_bg, - vec_br, vec_yg); + YUVTORGB_SETUP(yuvconstants, vec_ub, vec_vr, vec_ug, vec_vg, vec_yg, vec_yb); vec_ubvr = __msa_ilvr_w(vec_vr, vec_ub); vec_ugvg = (v4i32)__msa_ilvev_h((v8i16)vec_vg, (v8i16)vec_ug); @@ -467,8 +489,7 @@ void I422AlphaToARGBRow_MSA(const uint8_t* src_y, READYUV422(src_y, src_u, src_v, src0, src1, src2); src1 = (v16u8)__msa_ilvr_b((v16i8)src2, (v16i8)src1); src3 = (v16u8)__msa_insert_d((v2i64)zero, 0, data_a); - YUVTORGB(src0, src1, vec_ubvr, vec_ugvg, vec_bb, vec_bg, vec_br, vec_yg, - vec0, vec1, vec2); + YUVTORGB(src0, src1, vec_ubvr, vec_ugvg, vec_yg, vec_yb, vec0, vec1, vec2); src3 = (v16u8)__msa_ilvr_b((v16i8)src3, (v16i8)src3); STOREARGB(vec0, vec1, vec2, src3, dst_argb); src_y += 8; @@ -489,17 +510,17 @@ void I422ToRGB24Row_MSA(const uint8_t* src_y, int64_t data_u, data_v; v16u8 src0, src1, src2, src3, src4, dst0, dst1, dst2; v8i16 vec0, vec1, vec2, vec3, vec4, vec5; - v4i32 vec_ub, vec_vr, vec_ug, vec_vg, vec_bb, vec_bg, vec_br, vec_yg; + v4i32 vec_ub, vec_vr, vec_ug, vec_vg, vec_yg, vec_yb; v4i32 vec_ubvr, vec_ugvg; v16u8 reg0, reg1, reg2, reg3; v2i64 zero = {0}; + v8i16 const_0x80 = __msa_ldi_h(0x80); v16i8 shuffler0 = {0, 1, 16, 2, 3, 17, 4, 5, 18, 6, 7, 19, 8, 9, 20, 10}; v16i8 shuffler1 = {0, 21, 1, 2, 22, 3, 4, 23, 5, 6, 24, 7, 8, 25, 9, 10}; v16i8 shuffler2 = {26, 6, 7, 27, 8, 9, 28, 10, 11, 29, 12, 13, 30, 14, 15, 31}; - YUVTORGB_SETUP(yuvconstants, vec_ub, vec_vr, vec_ug, vec_vg, vec_bb, vec_bg, - vec_br, vec_yg); + YUVTORGB_SETUP(yuvconstants, vec_ub, vec_vr, vec_ug, vec_vg, vec_yg, vec_yb); vec_ubvr = __msa_ilvr_w(vec_vr, vec_ub); vec_ugvg = (v4i32)__msa_ilvev_h((v8i16)vec_vg, (v8i16)vec_ug); @@ -512,10 +533,8 @@ void I422ToRGB24Row_MSA(const uint8_t* src_y, src1 = (v16u8)__msa_ilvr_b((v16i8)src2, (v16i8)src1); src3 = (v16u8)__msa_sldi_b((v16i8)src0, (v16i8)src0, 8); src4 = (v16u8)__msa_sldi_b((v16i8)src1, (v16i8)src1, 8); - YUVTORGB(src0, src1, vec_ubvr, vec_ugvg, vec_bb, vec_bg, vec_br, vec_yg, - vec0, vec1, vec2); - YUVTORGB(src3, src4, vec_ubvr, vec_ugvg, vec_bb, vec_bg, vec_br, vec_yg, - vec3, vec4, vec5); + YUVTORGB(src0, src1, vec_ubvr, vec_ugvg, vec_yg, vec_yb, vec0, vec1, vec2); + YUVTORGB(src3, src4, vec_ubvr, vec_ugvg, vec_yg, vec_yb, vec3, vec4, vec5); reg0 = (v16u8)__msa_ilvev_b((v16i8)vec1, (v16i8)vec0); reg2 = (v16u8)__msa_ilvev_b((v16i8)vec4, (v16i8)vec3); reg3 = (v16u8)__msa_pckev_b((v16i8)vec5, (v16i8)vec2); @@ -542,24 +561,23 @@ void I422ToRGB565Row_MSA(const uint8_t* src_y, int x; v16u8 src0, src1, src2, dst0; v8i16 vec0, vec1, vec2; - v4i32 vec_ub, vec_vr, vec_ug, vec_vg, vec_bb, vec_bg, vec_br, vec_yg; + v4i32 vec_ub, vec_vr, vec_ug, vec_vg, vec_yg, vec_yb; v4i32 vec_ubvr, vec_ugvg; + v8i16 const_0x80 = __msa_ldi_h(0x80); - YUVTORGB_SETUP(yuvconstants, vec_ub, vec_vr, vec_ug, vec_vg, vec_bb, vec_bg, - vec_br, vec_yg); + YUVTORGB_SETUP(yuvconstants, vec_ub, vec_vr, vec_ug, vec_vg, vec_yg, vec_yb); vec_ubvr = __msa_ilvr_w(vec_vr, vec_ub); vec_ugvg = (v4i32)__msa_ilvev_h((v8i16)vec_vg, (v8i16)vec_ug); for (x = 0; x < width; x += 8) { READYUV422(src_y, src_u, src_v, src0, src1, src2); src1 = (v16u8)__msa_ilvr_b((v16i8)src2, (v16i8)src1); - YUVTORGB(src0, src1, vec_ubvr, vec_ugvg, vec_bb, vec_bg, vec_br, vec_yg, - vec0, vec2, vec1); - vec0 = __msa_srai_h(vec0, 3); - vec1 = __msa_srai_h(vec1, 3); - vec2 = __msa_srai_h(vec2, 2); - vec1 = __msa_slli_h(vec1, 11); - vec2 = __msa_slli_h(vec2, 5); + YUVTORGB(src0, src1, vec_ubvr, vec_ugvg, vec_yg, vec_yb, vec0, vec1, vec2); + vec0 = __msa_srli_h(vec0, 3); + vec1 = __msa_srli_h(vec1, 2); + vec2 = __msa_srli_h(vec2, 3); + vec2 = __msa_slli_h(vec2, 11); + vec1 = __msa_slli_h(vec1, 5); vec0 |= vec1; dst0 = (v16u8)(vec2 | vec0); ST_UB(dst0, dst_rgb565); @@ -581,25 +599,24 @@ void I422ToARGB4444Row_MSA(const uint8_t* src_y, v16u8 src0, src1, src2, dst0; v8i16 vec0, vec1, vec2; v8u16 reg0, reg1, reg2; - v4i32 vec_ub, vec_vr, vec_ug, vec_vg, vec_bb, vec_bg, vec_br, vec_yg; + v4i32 vec_ub, vec_vr, vec_ug, vec_vg, vec_yg, vec_yb; v4i32 vec_ubvr, vec_ugvg; v8u16 const_0xF000 = (v8u16)__msa_fill_h(0xF000); + v8u16 mask = (v8u16)__msa_fill_h(0x00F0); + v8i16 const_0x80 = __msa_ldi_h(0x80); - YUVTORGB_SETUP(yuvconstants, vec_ub, vec_vr, vec_ug, vec_vg, vec_bb, vec_bg, - vec_br, vec_yg); + YUVTORGB_SETUP(yuvconstants, vec_ub, vec_vr, vec_ug, vec_vg, vec_yg, vec_yb); vec_ubvr = __msa_ilvr_w(vec_vr, vec_ub); vec_ugvg = (v4i32)__msa_ilvev_h((v8i16)vec_vg, (v8i16)vec_ug); for (x = 0; x < width; x += 8) { READYUV422(src_y, src_u, src_v, src0, src1, src2); src1 = (v16u8)__msa_ilvr_b((v16i8)src2, (v16i8)src1); - YUVTORGB(src0, src1, vec_ubvr, vec_ugvg, vec_bb, vec_bg, vec_br, vec_yg, - vec0, vec1, vec2); - reg0 = (v8u16)__msa_srai_h(vec0, 4); - reg1 = (v8u16)__msa_srai_h(vec1, 4); - reg2 = (v8u16)__msa_srai_h(vec2, 4); - reg1 = (v8u16)__msa_slli_h((v8i16)reg1, 4); - reg2 = (v8u16)__msa_slli_h((v8i16)reg2, 8); + YUVTORGB(src0, src1, vec_ubvr, vec_ugvg, vec_yg, vec_yb, vec0, vec1, vec2); + reg0 = (v8u16)__msa_srli_h(vec0, 4); + reg2 = (v8u16)__msa_srli_h(vec2, 4); + reg1 = (v8u16)__msa_and_v(vec1, mask); + reg2 = (v8u16)__msa_slli_h(reg2, 8); reg1 |= const_0xF000; reg0 |= reg2; dst0 = (v16u8)(reg1 | reg0); @@ -621,23 +638,22 @@ void I422ToARGB1555Row_MSA(const uint8_t* src_y, v16u8 src0, src1, src2, dst0; v8i16 vec0, vec1, vec2; v8u16 reg0, reg1, reg2; - v4i32 vec_ub, vec_vr, vec_ug, vec_vg, vec_bb, vec_bg, vec_br, vec_yg; + v4i32 vec_ub, vec_vr, vec_ug, vec_vg, vec_yg, vec_yb; v4i32 vec_ubvr, vec_ugvg; v8u16 const_0x8000 = (v8u16)__msa_fill_h(0x8000); + v8i16 const_0x80 = __msa_ldi_h(0x80); - YUVTORGB_SETUP(yuvconstants, vec_ub, vec_vr, vec_ug, vec_vg, vec_bb, vec_bg, - vec_br, vec_yg); + YUVTORGB_SETUP(yuvconstants, vec_ub, vec_vr, vec_ug, vec_vg, vec_yg, vec_yb); vec_ubvr = __msa_ilvr_w(vec_vr, vec_ub); vec_ugvg = (v4i32)__msa_ilvev_h((v8i16)vec_vg, (v8i16)vec_ug); for (x = 0; x < width; x += 8) { READYUV422(src_y, src_u, src_v, src0, src1, src2); src1 = (v16u8)__msa_ilvr_b((v16i8)src2, (v16i8)src1); - YUVTORGB(src0, src1, vec_ubvr, vec_ugvg, vec_bb, vec_bg, vec_br, vec_yg, - vec0, vec1, vec2); - reg0 = (v8u16)__msa_srai_h(vec0, 3); - reg1 = (v8u16)__msa_srai_h(vec1, 3); - reg2 = (v8u16)__msa_srai_h(vec2, 3); + YUVTORGB(src0, src1, vec_ubvr, vec_ugvg, vec_yg, vec_yb, vec0, vec1, vec2); + reg0 = (v8u16)__msa_srli_h(vec0, 3); + reg1 = (v8u16)__msa_srli_h(vec1, 3); + reg2 = (v8u16)__msa_srli_h(vec2, 3); reg1 = (v8u16)__msa_slli_h((v8i16)reg1, 5); reg2 = (v8u16)__msa_slli_h((v8i16)reg2, 10); reg1 |= const_0x8000; @@ -781,7 +797,7 @@ void UYVYToUV422Row_MSA(const uint8_t* src_uyvy, } } -void ARGBToYRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width) { +void ARGBToYRow_MSA(const uint8_t* src_argb, uint8_t* dst_y, int width) { int x; v16u8 src0, src1, src2, src3, vec0, vec1, vec2, vec3, dst0; v8u16 reg0, reg1, reg2, reg3, reg4, reg5; @@ -792,10 +808,10 @@ void ARGBToYRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width) { v8u16 const_0x1080 = (v8u16)__msa_fill_h(0x1080); for (x = 0; x < width; x += 16) { - src0 = (v16u8)__msa_ld_b((v16u8*)src_argb0, 0); - src1 = (v16u8)__msa_ld_b((v16u8*)src_argb0, 16); - src2 = (v16u8)__msa_ld_b((v16u8*)src_argb0, 32); - src3 = (v16u8)__msa_ld_b((v16u8*)src_argb0, 48); + src0 = (v16u8)__msa_ld_b((v16u8*)src_argb, 0); + src1 = (v16u8)__msa_ld_b((v16u8*)src_argb, 16); + src2 = (v16u8)__msa_ld_b((v16u8*)src_argb, 32); + src3 = (v16u8)__msa_ld_b((v16u8*)src_argb, 48); vec0 = (v16u8)__msa_pckev_b((v16i8)src1, (v16i8)src0); vec1 = (v16u8)__msa_pckev_b((v16i8)src3, (v16i8)src2); vec2 = (v16u8)__msa_pckod_b((v16i8)src1, (v16i8)src0); @@ -822,18 +838,18 @@ void ARGBToYRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width) { reg1 = (v8u16)__msa_srai_h((v8i16)reg1, 8); dst0 = (v16u8)__msa_pckev_b((v16i8)reg1, (v16i8)reg0); ST_UB(dst0, dst_y); - src_argb0 += 64; + src_argb += 64; dst_y += 16; } } -void ARGBToUVRow_MSA(const uint8_t* src_argb0, +void ARGBToUVRow_MSA(const uint8_t* src_argb, int src_stride_argb, uint8_t* dst_u, uint8_t* dst_v, int width) { int x; - const uint8_t* src_argb0_next = src_argb0 + src_stride_argb; + const uint8_t* src_argb_next = src_argb + src_stride_argb; v16u8 src0, src1, src2, src3, src4, src5, src6, src7; v16u8 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7, vec8, vec9; v8u16 reg0, reg1, reg2, reg3, reg4, reg5, reg6, reg7, reg8, reg9; @@ -847,14 +863,14 @@ void ARGBToUVRow_MSA(const uint8_t* src_argb0, v8u16 const_0x0001 = (v8u16)__msa_fill_h(0x0001); for (x = 0; x < width; x += 32) { - src0 = (v16u8)__msa_ld_b((v16u8*)src_argb0, 0); - src1 = (v16u8)__msa_ld_b((v16u8*)src_argb0, 16); - src2 = (v16u8)__msa_ld_b((v16u8*)src_argb0, 32); - src3 = (v16u8)__msa_ld_b((v16u8*)src_argb0, 48); - src4 = (v16u8)__msa_ld_b((v16u8*)src_argb0, 64); - src5 = (v16u8)__msa_ld_b((v16u8*)src_argb0, 80); - src6 = (v16u8)__msa_ld_b((v16u8*)src_argb0, 96); - src7 = (v16u8)__msa_ld_b((v16u8*)src_argb0, 112); + src0 = (v16u8)__msa_ld_b((v16u8*)src_argb, 0); + src1 = (v16u8)__msa_ld_b((v16u8*)src_argb, 16); + src2 = (v16u8)__msa_ld_b((v16u8*)src_argb, 32); + src3 = (v16u8)__msa_ld_b((v16u8*)src_argb, 48); + src4 = (v16u8)__msa_ld_b((v16u8*)src_argb, 64); + src5 = (v16u8)__msa_ld_b((v16u8*)src_argb, 80); + src6 = (v16u8)__msa_ld_b((v16u8*)src_argb, 96); + src7 = (v16u8)__msa_ld_b((v16u8*)src_argb, 112); vec0 = (v16u8)__msa_pckev_b((v16i8)src1, (v16i8)src0); vec1 = (v16u8)__msa_pckev_b((v16i8)src3, (v16i8)src2); vec2 = (v16u8)__msa_pckev_b((v16i8)src5, (v16i8)src4); @@ -875,14 +891,14 @@ void ARGBToUVRow_MSA(const uint8_t* src_argb0, reg3 = __msa_hadd_u_h(vec5, vec5); reg4 = __msa_hadd_u_h(vec0, vec0); reg5 = __msa_hadd_u_h(vec1, vec1); - src0 = (v16u8)__msa_ld_b((v16u8*)src_argb0_next, 0); - src1 = (v16u8)__msa_ld_b((v16u8*)src_argb0_next, 16); - src2 = (v16u8)__msa_ld_b((v16u8*)src_argb0_next, 32); - src3 = (v16u8)__msa_ld_b((v16u8*)src_argb0_next, 48); - src4 = (v16u8)__msa_ld_b((v16u8*)src_argb0_next, 64); - src5 = (v16u8)__msa_ld_b((v16u8*)src_argb0_next, 80); - src6 = (v16u8)__msa_ld_b((v16u8*)src_argb0_next, 96); - src7 = (v16u8)__msa_ld_b((v16u8*)src_argb0_next, 112); + src0 = (v16u8)__msa_ld_b((v16u8*)src_argb_next, 0); + src1 = (v16u8)__msa_ld_b((v16u8*)src_argb_next, 16); + src2 = (v16u8)__msa_ld_b((v16u8*)src_argb_next, 32); + src3 = (v16u8)__msa_ld_b((v16u8*)src_argb_next, 48); + src4 = (v16u8)__msa_ld_b((v16u8*)src_argb_next, 64); + src5 = (v16u8)__msa_ld_b((v16u8*)src_argb_next, 80); + src6 = (v16u8)__msa_ld_b((v16u8*)src_argb_next, 96); + src7 = (v16u8)__msa_ld_b((v16u8*)src_argb_next, 112); vec0 = (v16u8)__msa_pckev_b((v16i8)src1, (v16i8)src0); vec1 = (v16u8)__msa_pckev_b((v16i8)src3, (v16i8)src2); vec2 = (v16u8)__msa_pckev_b((v16i8)src5, (v16i8)src4); @@ -945,8 +961,8 @@ void ARGBToUVRow_MSA(const uint8_t* src_argb0, dst1 = (v16u8)__msa_pckev_b((v16i8)reg5, (v16i8)reg4); ST_UB(dst0, dst_u); ST_UB(dst1, dst_v); - src_argb0 += 128; - src_argb0_next += 128; + src_argb += 128; + src_argb_next += 128; dst_u += 16; dst_v += 16; } @@ -1173,7 +1189,7 @@ void ARGBToUV444Row_MSA(const uint8_t* src_argb, } } -void ARGBMultiplyRow_MSA(const uint8_t* src_argb0, +void ARGBMultiplyRow_MSA(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width) { @@ -1184,7 +1200,7 @@ void ARGBMultiplyRow_MSA(const uint8_t* src_argb0, v8i16 zero = {0}; for (x = 0; x < width; x += 4) { - src0 = (v16u8)__msa_ld_b((void*)src_argb0, 0); + src0 = (v16u8)__msa_ld_b((void*)src_argb, 0); src1 = (v16u8)__msa_ld_b((void*)src_argb1, 0); vec0 = (v8u16)__msa_ilvr_b((v16i8)src0, (v16i8)src0); vec1 = (v8u16)__msa_ilvl_b((v16i8)src0, (v16i8)src0); @@ -1206,13 +1222,13 @@ void ARGBMultiplyRow_MSA(const uint8_t* src_argb0, vec1 = (v8u16)__msa_pckev_h((v8i16)reg3, (v8i16)reg2); dst0 = (v16u8)__msa_pckev_b((v16i8)vec1, (v16i8)vec0); ST_UB(dst0, dst_argb); - src_argb0 += 16; + src_argb += 16; src_argb1 += 16; dst_argb += 16; } } -void ARGBAddRow_MSA(const uint8_t* src_argb0, +void ARGBAddRow_MSA(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width) { @@ -1220,20 +1236,20 @@ void ARGBAddRow_MSA(const uint8_t* src_argb0, v16u8 src0, src1, src2, src3, dst0, dst1; for (x = 0; x < width; x += 8) { - src0 = (v16u8)__msa_ld_b((void*)src_argb0, 0); - src1 = (v16u8)__msa_ld_b((void*)src_argb0, 16); + src0 = (v16u8)__msa_ld_b((void*)src_argb, 0); + src1 = (v16u8)__msa_ld_b((void*)src_argb, 16); src2 = (v16u8)__msa_ld_b((void*)src_argb1, 0); src3 = (v16u8)__msa_ld_b((void*)src_argb1, 16); dst0 = __msa_adds_u_b(src0, src2); dst1 = __msa_adds_u_b(src1, src3); ST_UB2(dst0, dst1, dst_argb, 16); - src_argb0 += 32; + src_argb += 32; src_argb1 += 32; dst_argb += 32; } } -void ARGBSubtractRow_MSA(const uint8_t* src_argb0, +void ARGBSubtractRow_MSA(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width) { @@ -1241,14 +1257,14 @@ void ARGBSubtractRow_MSA(const uint8_t* src_argb0, v16u8 src0, src1, src2, src3, dst0, dst1; for (x = 0; x < width; x += 8) { - src0 = (v16u8)__msa_ld_b((void*)src_argb0, 0); - src1 = (v16u8)__msa_ld_b((void*)src_argb0, 16); + src0 = (v16u8)__msa_ld_b((void*)src_argb, 0); + src1 = (v16u8)__msa_ld_b((void*)src_argb, 16); src2 = (v16u8)__msa_ld_b((void*)src_argb1, 0); src3 = (v16u8)__msa_ld_b((void*)src_argb1, 16); dst0 = __msa_subs_u_b(src0, src2); dst1 = __msa_subs_u_b(src1, src3); ST_UB2(dst0, dst1, dst_argb, 16); - src_argb0 += 32; + src_argb += 32; src_argb1 += 32; dst_argb += 32; } @@ -1676,56 +1692,51 @@ void ARGB1555ToYRow_MSA(const uint8_t* src_argb1555, uint8_t* dst_y, int width) { int x; - v8u16 src0, src1, vec0, vec1, vec2, vec3, vec4, vec5; - v8u16 reg0, reg1, reg2, reg3, reg4, reg5; - v16u8 dst0; - v8u16 const_0x19 = (v8u16)__msa_ldi_h(0x19); - v8u16 const_0x81 = (v8u16)__msa_ldi_h(0x81); - v8u16 const_0x42 = (v8u16)__msa_ldi_h(0x42); - v8u16 const_0x1F = (v8u16)__msa_ldi_h(0x1F); - v8u16 const_0x1080 = (v8u16)__msa_fill_h(0x1080); + v16u8 src0, src1, tmp0, tmp1, tmpb, tmpg, tmpr; + v16u8 reg0, reg1, reg2, dst; + v8i16 tmpr_l, tmpr_r, tmpg_l, tmpg_r, tmpb_l, tmpb_r; + v8i16 res0, res1; + v8i16 const_66 = (v8i16)__msa_ldi_h(66); + v8i16 const_129 = (v8i16)__msa_ldi_h(129); + v8i16 const_25 = (v8i16)__msa_ldi_h(25); + v8u16 const_1080 = (v8u16)__msa_fill_h(0x1080); + v16u8 zero = (v16u8)__msa_ldi_b(0); for (x = 0; x < width; x += 16) { - src0 = (v8u16)__msa_ld_b((void*)src_argb1555, 0); - src1 = (v8u16)__msa_ld_b((void*)src_argb1555, 16); - vec0 = src0 & const_0x1F; - vec1 = src1 & const_0x1F; - src0 = (v8u16)__msa_srai_h((v8i16)src0, 5); - src1 = (v8u16)__msa_srai_h((v8i16)src1, 5); - vec2 = src0 & const_0x1F; - vec3 = src1 & const_0x1F; - src0 = (v8u16)__msa_srai_h((v8i16)src0, 5); - src1 = (v8u16)__msa_srai_h((v8i16)src1, 5); - vec4 = src0 & const_0x1F; - vec5 = src1 & const_0x1F; - reg0 = (v8u16)__msa_slli_h((v8i16)vec0, 3); - reg1 = (v8u16)__msa_slli_h((v8i16)vec1, 3); - reg0 |= (v8u16)__msa_srai_h((v8i16)vec0, 2); - reg1 |= (v8u16)__msa_srai_h((v8i16)vec1, 2); - reg2 = (v8u16)__msa_slli_h((v8i16)vec2, 3); - reg3 = (v8u16)__msa_slli_h((v8i16)vec3, 3); - reg2 |= (v8u16)__msa_srai_h((v8i16)vec2, 2); - reg3 |= (v8u16)__msa_srai_h((v8i16)vec3, 2); - reg4 = (v8u16)__msa_slli_h((v8i16)vec4, 3); - reg5 = (v8u16)__msa_slli_h((v8i16)vec5, 3); - reg4 |= (v8u16)__msa_srai_h((v8i16)vec4, 2); - reg5 |= (v8u16)__msa_srai_h((v8i16)vec5, 2); - reg0 *= const_0x19; - reg1 *= const_0x19; - reg2 *= const_0x81; - reg3 *= const_0x81; - reg4 *= const_0x42; - reg5 *= const_0x42; - reg0 += reg2; - reg1 += reg3; - reg0 += reg4; - reg1 += reg5; - reg0 += const_0x1080; - reg1 += const_0x1080; - reg0 = (v8u16)__msa_srai_h((v8i16)reg0, 8); - reg1 = (v8u16)__msa_srai_h((v8i16)reg1, 8); - dst0 = (v16u8)__msa_pckev_b((v16i8)reg1, (v16i8)reg0); - ST_UB(dst0, dst_y); + src0 = (v16u8)__msa_ld_b((void*)src_argb1555, 0); + src1 = (v16u8)__msa_ld_b((void*)src_argb1555, 16); + tmp0 = (v16u8)__msa_pckev_b(src1, src0); + tmp1 = (v16u8)__msa_pckod_b(src1, src0); + tmpb = (v16u8)__msa_andi_b(tmp0, 0x1F); + tmpg = (v16u8)__msa_srli_b(tmp0, 5); + reg0 = (v16u8)__msa_andi_b(tmp1, 0x03); + reg0 = (v16u8)__msa_slli_b(reg0, 3); + tmpg = (v16u8)__msa_or_v(tmpg, reg0); + reg1 = (v16u8)__msa_andi_b(tmp1, 0x7C); + tmpr = (v16u8)__msa_srli_b(reg1, 2); + reg0 = (v16u8)__msa_slli_b(tmpb, 3); + reg1 = (v16u8)__msa_slli_b(tmpg, 3); + reg2 = (v16u8)__msa_slli_b(tmpr, 3); + tmpb = (v16u8)__msa_srli_b(tmpb, 2); + tmpg = (v16u8)__msa_srli_b(tmpg, 2); + tmpr = (v16u8)__msa_srli_b(tmpr, 2); + tmpb = (v16u8)__msa_or_v(reg0, tmpb); + tmpg = (v16u8)__msa_or_v(reg1, tmpg); + tmpr = (v16u8)__msa_or_v(reg2, tmpr); + tmpb_r = (v8i16)__msa_ilvr_b(zero, tmpb); + tmpb_l = (v8i16)__msa_ilvl_b(zero, tmpb); + tmpg_r = (v8i16)__msa_ilvr_b(zero, tmpg); + tmpg_l = (v8i16)__msa_ilvl_b(zero, tmpg); + tmpr_r = (v8i16)__msa_ilvr_b(zero, tmpr); + tmpr_l = (v8i16)__msa_ilvl_b(zero, tmpr); + res0 = const_1080 + const_25 * tmpb_r; + res1 = const_1080 + const_25 * tmpb_l; + res0 += const_129 * tmpg_r; + res1 += const_129 * tmpg_l; + res0 += const_66 * tmpr_r; + res1 += const_66 * tmpr_l; + dst = (v16u8)__msa_pckod_b(res1, res0); + ST_UB(dst, dst_y); src_argb1555 += 32; dst_y += 16; } @@ -1733,68 +1744,55 @@ void ARGB1555ToYRow_MSA(const uint8_t* src_argb1555, void RGB565ToYRow_MSA(const uint8_t* src_rgb565, uint8_t* dst_y, int width) { int x; - v8u16 src0, src1, vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7; - v8u16 reg0, reg1, reg2, reg3, reg4, reg5; - v4u32 res0, res1, res2, res3; - v16u8 dst0; - v4u32 const_0x810019 = (v4u32)__msa_fill_w(0x810019); - v4u32 const_0x010042 = (v4u32)__msa_fill_w(0x010042); - v8i16 const_0x1080 = __msa_fill_h(0x1080); - v8u16 const_0x1F = (v8u16)__msa_ldi_h(0x1F); - v8u16 const_0x7E0 = (v8u16)__msa_fill_h(0x7E0); - v8u16 const_0xF800 = (v8u16)__msa_fill_h(0xF800); + v16u8 src0, src1, tmp0, tmp1, tmpb, tmpg, tmpr; + v16u8 reg0, reg1, dst; + v8i16 tmpr_l, tmpr_r, tmpg_l, tmpg_r, tmpb_l, tmpb_r; + v8i16 res0, res1; + v8i16 const_66 = (v8i16)__msa_ldi_h(66); + v8i16 const_129 = (v8i16)__msa_ldi_h(129); + v8i16 const_25 = (v8i16)__msa_ldi_h(25); + v8i16 const_1080 = (v8i16)__msa_fill_h(0x1080); + v16u8 zero = __msa_ldi_b(0); for (x = 0; x < width; x += 16) { - src0 = (v8u16)__msa_ld_b((void*)src_rgb565, 0); - src1 = (v8u16)__msa_ld_b((void*)src_rgb565, 16); - vec0 = src0 & const_0x1F; - vec1 = src0 & const_0x7E0; - vec2 = src0 & const_0xF800; - vec3 = src1 & const_0x1F; - vec4 = src1 & const_0x7E0; - vec5 = src1 & const_0xF800; - reg0 = (v8u16)__msa_slli_h((v8i16)vec0, 3); - reg1 = (v8u16)__msa_srli_h((v8i16)vec1, 3); - reg2 = (v8u16)__msa_srli_h((v8i16)vec2, 8); - reg3 = (v8u16)__msa_slli_h((v8i16)vec3, 3); - reg4 = (v8u16)__msa_srli_h((v8i16)vec4, 3); - reg5 = (v8u16)__msa_srli_h((v8i16)vec5, 8); - reg0 |= (v8u16)__msa_srli_h((v8i16)vec0, 2); - reg1 |= (v8u16)__msa_srli_h((v8i16)vec1, 9); - reg2 |= (v8u16)__msa_srli_h((v8i16)vec2, 13); - reg3 |= (v8u16)__msa_srli_h((v8i16)vec3, 2); - reg4 |= (v8u16)__msa_srli_h((v8i16)vec4, 9); - reg5 |= (v8u16)__msa_srli_h((v8i16)vec5, 13); - vec0 = (v8u16)__msa_ilvr_h((v8i16)reg1, (v8i16)reg0); - vec1 = (v8u16)__msa_ilvl_h((v8i16)reg1, (v8i16)reg0); - vec2 = (v8u16)__msa_ilvr_h((v8i16)reg4, (v8i16)reg3); - vec3 = (v8u16)__msa_ilvl_h((v8i16)reg4, (v8i16)reg3); - vec4 = (v8u16)__msa_ilvr_h(const_0x1080, (v8i16)reg2); - vec5 = (v8u16)__msa_ilvl_h(const_0x1080, (v8i16)reg2); - vec6 = (v8u16)__msa_ilvr_h(const_0x1080, (v8i16)reg5); - vec7 = (v8u16)__msa_ilvl_h(const_0x1080, (v8i16)reg5); - res0 = __msa_dotp_u_w(vec0, (v8u16)const_0x810019); - res1 = __msa_dotp_u_w(vec1, (v8u16)const_0x810019); - res2 = __msa_dotp_u_w(vec2, (v8u16)const_0x810019); - res3 = __msa_dotp_u_w(vec3, (v8u16)const_0x810019); - res0 = __msa_dpadd_u_w(res0, vec4, (v8u16)const_0x010042); - res1 = __msa_dpadd_u_w(res1, vec5, (v8u16)const_0x010042); - res2 = __msa_dpadd_u_w(res2, vec6, (v8u16)const_0x010042); - res3 = __msa_dpadd_u_w(res3, vec7, (v8u16)const_0x010042); - res0 = (v4u32)__msa_srai_w((v4i32)res0, 8); - res1 = (v4u32)__msa_srai_w((v4i32)res1, 8); - res2 = (v4u32)__msa_srai_w((v4i32)res2, 8); - res3 = (v4u32)__msa_srai_w((v4i32)res3, 8); - vec0 = (v8u16)__msa_pckev_h((v8i16)res1, (v8i16)res0); - vec1 = (v8u16)__msa_pckev_h((v8i16)res3, (v8i16)res2); - dst0 = (v16u8)__msa_pckev_b((v16i8)vec1, (v16i8)vec0); - ST_UB(dst0, dst_y); + src0 = (v16u8)__msa_ld_b((void*)src_rgb565, 0); + src1 = (v16u8)__msa_ld_b((void*)src_rgb565, 16); + tmp0 = (v16u8)__msa_pckev_b(src1, src0); + tmp1 = (v16u8)__msa_pckod_b(src1, src0); + tmpb = (v16u8)__msa_andi_b(tmp0, 0x1F); + tmpr = (v16u8)__msa_andi_b(tmp1, 0xF8); + reg1 = (v16u8)__msa_andi_b(tmp1, 0x07); + reg0 = (v16u8)__msa_srli_b(tmp0, 5); + reg1 = (v16u8)__msa_slli_b(reg1, 3); + tmpg = (v16u8)__msa_or_v(reg1, reg0); + reg0 = (v16u8)__msa_slli_b(tmpb, 3); + reg1 = (v16u8)__msa_srli_b(tmpb, 2); + tmpb = (v16u8)__msa_or_v(reg1, reg0); + reg0 = (v16u8)__msa_slli_b(tmpg, 2); + reg1 = (v16u8)__msa_srli_b(tmpg, 4); + tmpg = (v16u8)__msa_or_v(reg1, reg0); + reg0 = (v16u8)__msa_srli_b(tmpr, 5); + tmpr = (v16u8)__msa_or_v(tmpr, reg0); + tmpb_r = (v8i16)__msa_ilvr_b(zero, tmpb); + tmpb_l = (v8i16)__msa_ilvl_b(zero, tmpb); + tmpg_r = (v8i16)__msa_ilvr_b(zero, tmpg); + tmpg_l = (v8i16)__msa_ilvl_b(zero, tmpg); + tmpr_r = (v8i16)__msa_ilvr_b(zero, tmpr); + tmpr_l = (v8i16)__msa_ilvl_b(zero, tmpr); + res0 = const_1080 + const_25 * tmpb_r; + res1 = const_1080 + const_25 * tmpb_l; + res0 += const_129 * tmpg_r; + res1 += const_129 * tmpg_l; + res0 += const_66 * tmpr_r; + res1 += const_66 * tmpr_l; + dst = (v16u8)__msa_pckod_b(res1, res0); + ST_UB(dst, dst_y); src_rgb565 += 32; dst_y += 16; } } -void RGB24ToYRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width) { +void RGB24ToYRow_MSA(const uint8_t* src_argb, uint8_t* dst_y, int width) { int x; v16u8 src0, src1, src2, reg0, reg1, reg2, reg3, dst0; v8u16 vec0, vec1, vec2, vec3; @@ -1809,9 +1807,9 @@ void RGB24ToYRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width) { v16i8 zero = {0}; for (x = 0; x < width; x += 16) { - src0 = (v16u8)__msa_ld_b((void*)src_argb0, 0); - src1 = (v16u8)__msa_ld_b((void*)src_argb0, 16); - src2 = (v16u8)__msa_ld_b((void*)src_argb0, 32); + src0 = (v16u8)__msa_ld_b((void*)src_argb, 0); + src1 = (v16u8)__msa_ld_b((void*)src_argb, 16); + src2 = (v16u8)__msa_ld_b((void*)src_argb, 32); reg0 = (v16u8)__msa_vshf_b(mask0, zero, (v16i8)src0); reg1 = (v16u8)__msa_vshf_b(mask1, (v16i8)src1, (v16i8)src0); reg2 = (v16u8)__msa_vshf_b(mask2, (v16i8)src2, (v16i8)src1); @@ -1830,12 +1828,12 @@ void RGB24ToYRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width) { vec1 = (v8u16)__msa_srai_h((v8i16)vec1, 8); dst0 = (v16u8)__msa_pckev_b((v16i8)vec1, (v16i8)vec0); ST_UB(dst0, dst_y); - src_argb0 += 48; + src_argb += 48; dst_y += 16; } } -void RAWToYRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width) { +void RAWToYRow_MSA(const uint8_t* src_argb, uint8_t* dst_y, int width) { int x; v16u8 src0, src1, src2, reg0, reg1, reg2, reg3, dst0; v8u16 vec0, vec1, vec2, vec3; @@ -1850,9 +1848,9 @@ void RAWToYRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width) { v16i8 zero = {0}; for (x = 0; x < width; x += 16) { - src0 = (v16u8)__msa_ld_b((void*)src_argb0, 0); - src1 = (v16u8)__msa_ld_b((void*)src_argb0, 16); - src2 = (v16u8)__msa_ld_b((void*)src_argb0, 32); + src0 = (v16u8)__msa_ld_b((void*)src_argb, 0); + src1 = (v16u8)__msa_ld_b((void*)src_argb, 16); + src2 = (v16u8)__msa_ld_b((void*)src_argb, 32); reg0 = (v16u8)__msa_vshf_b(mask0, zero, (v16i8)src0); reg1 = (v16u8)__msa_vshf_b(mask1, (v16i8)src1, (v16i8)src0); reg2 = (v16u8)__msa_vshf_b(mask2, (v16i8)src2, (v16i8)src1); @@ -1871,7 +1869,7 @@ void RAWToYRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width) { vec1 = (v8u16)__msa_srai_h((v8i16)vec1, 8); dst0 = (v16u8)__msa_pckev_b((v16i8)vec1, (v16i8)vec0); ST_UB(dst0, dst_y); - src_argb0 += 48; + src_argb += 48; dst_y += 16; } } @@ -1885,69 +1883,61 @@ void ARGB1555ToUVRow_MSA(const uint8_t* src_argb1555, const uint16_t* s = (const uint16_t*)src_argb1555; const uint16_t* t = (const uint16_t*)(src_argb1555 + src_stride_argb1555); int64_t res0, res1; - v8u16 src0, src1, src2, src3, reg0, reg1, reg2, reg3; - v8u16 vec0, vec1, vec2, vec3, vec4, vec5, vec6; - v16u8 dst0; - v8u16 const_0x70 = (v8u16)__msa_ldi_h(0x70); - v8u16 const_0x4A = (v8u16)__msa_ldi_h(0x4A); - v8u16 const_0x26 = (v8u16)__msa_ldi_h(0x26); - v8u16 const_0x5E = (v8u16)__msa_ldi_h(0x5E); - v8u16 const_0x12 = (v8u16)__msa_ldi_h(0x12); - v8u16 const_0x8080 = (v8u16)__msa_fill_h(0x8080); - v8u16 const_0x1F = (v8u16)__msa_ldi_h(0x1F); + v16u8 src0, src1, src2, src3, dst; + v16u8 tmp0, tmp1, tmp2, tmp3; + v16u8 reg0, reg1, reg2, reg3; + v16u8 tmpb, tmpg, tmpr, nexb, nexg, nexr; + v8i16 const_112 = (v8i16)__msa_ldi_h(0x38); + v8i16 const_74 = (v8i16)__msa_ldi_h(0x25); + v8i16 const_38 = (v8i16)__msa_ldi_h(0x13); + v8i16 const_94 = (v8i16)__msa_ldi_h(0x2F); + v8i16 const_18 = (v8i16)__msa_ldi_h(0x09); + v8u16 const_8080 = (v8u16)__msa_fill_h(0x8080); for (x = 0; x < width; x += 16) { src0 = (v8u16)__msa_ld_b((void*)s, 0); src1 = (v8u16)__msa_ld_b((void*)s, 16); src2 = (v8u16)__msa_ld_b((void*)t, 0); src3 = (v8u16)__msa_ld_b((void*)t, 16); - vec0 = src0 & const_0x1F; - vec1 = src1 & const_0x1F; - vec0 += src2 & const_0x1F; - vec1 += src3 & const_0x1F; - vec0 = (v8u16)__msa_pckev_b((v16i8)vec1, (v16i8)vec0); - src0 = (v8u16)__msa_srai_h((v8i16)src0, 5); - src1 = (v8u16)__msa_srai_h((v8i16)src1, 5); - src2 = (v8u16)__msa_srai_h((v8i16)src2, 5); - src3 = (v8u16)__msa_srai_h((v8i16)src3, 5); - vec2 = src0 & const_0x1F; - vec3 = src1 & const_0x1F; - vec2 += src2 & const_0x1F; - vec3 += src3 & const_0x1F; - vec2 = (v8u16)__msa_pckev_b((v16i8)vec3, (v16i8)vec2); - src0 = (v8u16)__msa_srai_h((v8i16)src0, 5); - src1 = (v8u16)__msa_srai_h((v8i16)src1, 5); - src2 = (v8u16)__msa_srai_h((v8i16)src2, 5); - src3 = (v8u16)__msa_srai_h((v8i16)src3, 5); - vec4 = src0 & const_0x1F; - vec5 = src1 & const_0x1F; - vec4 += src2 & const_0x1F; - vec5 += src3 & const_0x1F; - vec4 = (v8u16)__msa_pckev_b((v16i8)vec5, (v16i8)vec4); - vec0 = __msa_hadd_u_h((v16u8)vec0, (v16u8)vec0); - vec2 = __msa_hadd_u_h((v16u8)vec2, (v16u8)vec2); - vec4 = __msa_hadd_u_h((v16u8)vec4, (v16u8)vec4); - vec6 = (v8u16)__msa_slli_h((v8i16)vec0, 1); - vec6 |= (v8u16)__msa_srai_h((v8i16)vec0, 6); - vec0 = (v8u16)__msa_slli_h((v8i16)vec2, 1); - vec0 |= (v8u16)__msa_srai_h((v8i16)vec2, 6); - vec2 = (v8u16)__msa_slli_h((v8i16)vec4, 1); - vec2 |= (v8u16)__msa_srai_h((v8i16)vec4, 6); - reg0 = vec6 * const_0x70; - reg1 = vec0 * const_0x4A; - reg2 = vec2 * const_0x70; - reg3 = vec0 * const_0x5E; - reg0 += const_0x8080; - reg1 += vec2 * const_0x26; - reg2 += const_0x8080; - reg3 += vec6 * const_0x12; - reg0 -= reg1; - reg2 -= reg3; - reg0 = (v8u16)__msa_srai_h((v8i16)reg0, 8); - reg2 = (v8u16)__msa_srai_h((v8i16)reg2, 8); - dst0 = (v16u8)__msa_pckev_b((v16i8)reg2, (v16i8)reg0); - res0 = __msa_copy_u_d((v2i64)dst0, 0); - res1 = __msa_copy_u_d((v2i64)dst0, 1); + tmp0 = (v16u8)__msa_pckev_b(src1, src0); + tmp1 = (v16u8)__msa_pckod_b(src1, src0); + tmp2 = (v16u8)__msa_pckev_b(src3, src2); + tmp3 = (v16u8)__msa_pckod_b(src3, src2); + tmpb = (v16u8)__msa_andi_b(tmp0, 0x1F); + nexb = (v16u8)__msa_andi_b(tmp2, 0x1F); + tmpg = (v16u8)__msa_srli_b(tmp0, 5); + nexg = (v16u8)__msa_srli_b(tmp2, 5); + reg0 = (v16u8)__msa_andi_b(tmp1, 0x03); + reg2 = (v16u8)__msa_andi_b(tmp3, 0x03); + reg0 = (v16u8)__msa_slli_b(reg0, 3); + reg2 = (v16u8)__msa_slli_b(reg2, 3); + tmpg = (v16u8)__msa_or_v(tmpg, reg0); + nexg = (v16u8)__msa_or_v(nexg, reg2); + reg1 = (v16u8)__msa_andi_b(tmp1, 0x7C); + reg3 = (v16u8)__msa_andi_b(tmp3, 0x7C); + tmpr = (v16u8)__msa_srli_b(reg1, 2); + nexr = (v16u8)__msa_srli_b(reg3, 2); + reg0 = (v16u8)__msa_slli_b(tmpb, 3); + reg1 = (v16u8)__msa_slli_b(tmpg, 3); + reg2 = (v16u8)__msa_slli_b(tmpr, 3); + tmpb = (v16u8)__msa_srli_b(tmpb, 2); + tmpg = (v16u8)__msa_srli_b(tmpg, 2); + tmpr = (v16u8)__msa_srli_b(tmpr, 2); + tmpb = (v16u8)__msa_or_v(reg0, tmpb); + tmpg = (v16u8)__msa_or_v(reg1, tmpg); + tmpr = (v16u8)__msa_or_v(reg2, tmpr); + reg0 = (v16u8)__msa_slli_b(nexb, 3); + reg1 = (v16u8)__msa_slli_b(nexg, 3); + reg2 = (v16u8)__msa_slli_b(nexr, 3); + nexb = (v16u8)__msa_srli_b(nexb, 2); + nexg = (v16u8)__msa_srli_b(nexg, 2); + nexr = (v16u8)__msa_srli_b(nexr, 2); + nexb = (v16u8)__msa_or_v(reg0, nexb); + nexg = (v16u8)__msa_or_v(reg1, nexg); + nexr = (v16u8)__msa_or_v(reg2, nexr); + RGBTOUV(tmpb, tmpg, tmpr, nexb, nexg, nexr, dst); + res0 = __msa_copy_u_d((v2i64)dst, 0); + res1 = __msa_copy_u_d((v2i64)dst, 1); SD(res0, dst_u); SD(res1, dst_v); s += 16; @@ -1966,68 +1956,57 @@ void RGB565ToUVRow_MSA(const uint8_t* src_rgb565, const uint16_t* s = (const uint16_t*)src_rgb565; const uint16_t* t = (const uint16_t*)(src_rgb565 + src_stride_rgb565); int64_t res0, res1; - v8u16 src0, src1, src2, src3, reg0, reg1, reg2, reg3; - v8u16 vec0, vec1, vec2, vec3, vec4, vec5; - v16u8 dst0; - v8u16 const_0x70 = (v8u16)__msa_ldi_h(0x70); - v8u16 const_0x4A = (v8u16)__msa_ldi_h(0x4A); - v8u16 const_0x26 = (v8u16)__msa_ldi_h(0x26); - v8u16 const_0x5E = (v8u16)__msa_ldi_h(0x5E); - v8u16 const_0x12 = (v8u16)__msa_ldi_h(0x12); - v8u16 const_32896 = (v8u16)__msa_fill_h(0x8080); - v8u16 const_0x1F = (v8u16)__msa_ldi_h(0x1F); - v8u16 const_0x3F = (v8u16)__msa_fill_h(0x3F); + v16u8 src0, src1, src2, src3, dst; + v16u8 tmp0, tmp1, tmp2, tmp3; + v16u8 reg0, reg1, reg2, reg3; + v16u8 tmpb, tmpg, tmpr, nexb, nexg, nexr; + v8i16 const_112 = (v8i16)__msa_ldi_h(0x38); + v8i16 const_74 = (v8i16)__msa_ldi_h(0x25); + v8i16 const_38 = (v8i16)__msa_ldi_h(0x13); + v8i16 const_94 = (v8i16)__msa_ldi_h(0x2F); + v8i16 const_18 = (v8i16)__msa_ldi_h(0x09); + v8u16 const_8080 = (v8u16)__msa_fill_h(0x8080); for (x = 0; x < width; x += 16) { - src0 = (v8u16)__msa_ld_b((void*)s, 0); - src1 = (v8u16)__msa_ld_b((void*)s, 16); - src2 = (v8u16)__msa_ld_b((void*)t, 0); - src3 = (v8u16)__msa_ld_b((void*)t, 16); - vec0 = src0 & const_0x1F; - vec1 = src1 & const_0x1F; - vec0 += src2 & const_0x1F; - vec1 += src3 & const_0x1F; - vec0 = (v8u16)__msa_pckev_b((v16i8)vec1, (v16i8)vec0); - src0 = (v8u16)__msa_srai_h((v8i16)src0, 5); - src1 = (v8u16)__msa_srai_h((v8i16)src1, 5); - src2 = (v8u16)__msa_srai_h((v8i16)src2, 5); - src3 = (v8u16)__msa_srai_h((v8i16)src3, 5); - vec2 = src0 & const_0x3F; - vec3 = src1 & const_0x3F; - vec2 += src2 & const_0x3F; - vec3 += src3 & const_0x3F; - vec1 = (v8u16)__msa_pckev_b((v16i8)vec3, (v16i8)vec2); - src0 = (v8u16)__msa_srai_h((v8i16)src0, 6); - src1 = (v8u16)__msa_srai_h((v8i16)src1, 6); - src2 = (v8u16)__msa_srai_h((v8i16)src2, 6); - src3 = (v8u16)__msa_srai_h((v8i16)src3, 6); - vec4 = src0 & const_0x1F; - vec5 = src1 & const_0x1F; - vec4 += src2 & const_0x1F; - vec5 += src3 & const_0x1F; - vec2 = (v8u16)__msa_pckev_b((v16i8)vec5, (v16i8)vec4); - vec0 = __msa_hadd_u_h((v16u8)vec0, (v16u8)vec0); - vec1 = __msa_hadd_u_h((v16u8)vec1, (v16u8)vec1); - vec2 = __msa_hadd_u_h((v16u8)vec2, (v16u8)vec2); - vec3 = (v8u16)__msa_slli_h((v8i16)vec0, 1); - vec3 |= (v8u16)__msa_srai_h((v8i16)vec0, 6); - vec4 = (v8u16)__msa_slli_h((v8i16)vec2, 1); - vec4 |= (v8u16)__msa_srai_h((v8i16)vec2, 6); - reg0 = vec3 * const_0x70; - reg1 = vec1 * const_0x4A; - reg2 = vec4 * const_0x70; - reg3 = vec1 * const_0x5E; - reg0 += const_32896; - reg1 += vec4 * const_0x26; - reg2 += const_32896; - reg3 += vec3 * const_0x12; - reg0 -= reg1; - reg2 -= reg3; - reg0 = (v8u16)__msa_srai_h((v8i16)reg0, 8); - reg2 = (v8u16)__msa_srai_h((v8i16)reg2, 8); - dst0 = (v16u8)__msa_pckev_b((v16i8)reg2, (v16i8)reg0); - res0 = __msa_copy_u_d((v2i64)dst0, 0); - res1 = __msa_copy_u_d((v2i64)dst0, 1); + src0 = (v16u8)__msa_ld_b((void*)s, 0); + src1 = (v16u8)__msa_ld_b((void*)s, 16); + src2 = (v16u8)__msa_ld_b((void*)t, 0); + src3 = (v16u8)__msa_ld_b((void*)t, 16); + tmp0 = (v16u8)__msa_pckev_b(src1, src0); + tmp1 = (v16u8)__msa_pckod_b(src1, src0); + tmp2 = (v16u8)__msa_pckev_b(src3, src2); + tmp3 = (v16u8)__msa_pckod_b(src3, src2); + tmpb = (v16u8)__msa_andi_b(tmp0, 0x1F); + tmpr = (v16u8)__msa_andi_b(tmp1, 0xF8); + nexb = (v16u8)__msa_andi_b(tmp2, 0x1F); + nexr = (v16u8)__msa_andi_b(tmp3, 0xF8); + reg1 = (v16u8)__msa_andi_b(tmp1, 0x07); + reg3 = (v16u8)__msa_andi_b(tmp3, 0x07); + reg0 = (v16u8)__msa_srli_b(tmp0, 5); + reg1 = (v16u8)__msa_slli_b(reg1, 3); + reg2 = (v16u8)__msa_srli_b(tmp2, 5); + reg3 = (v16u8)__msa_slli_b(reg3, 3); + tmpg = (v16u8)__msa_or_v(reg1, reg0); + nexg = (v16u8)__msa_or_v(reg2, reg3); + reg0 = (v16u8)__msa_slli_b(tmpb, 3); + reg1 = (v16u8)__msa_srli_b(tmpb, 2); + reg2 = (v16u8)__msa_slli_b(nexb, 3); + reg3 = (v16u8)__msa_srli_b(nexb, 2); + tmpb = (v16u8)__msa_or_v(reg1, reg0); + nexb = (v16u8)__msa_or_v(reg2, reg3); + reg0 = (v16u8)__msa_slli_b(tmpg, 2); + reg1 = (v16u8)__msa_srli_b(tmpg, 4); + reg2 = (v16u8)__msa_slli_b(nexg, 2); + reg3 = (v16u8)__msa_srli_b(nexg, 4); + tmpg = (v16u8)__msa_or_v(reg1, reg0); + nexg = (v16u8)__msa_or_v(reg2, reg3); + reg0 = (v16u8)__msa_srli_b(tmpr, 5); + reg2 = (v16u8)__msa_srli_b(nexr, 5); + tmpr = (v16u8)__msa_or_v(tmpr, reg0); + nexr = (v16u8)__msa_or_v(nexr, reg2); + RGBTOUV(tmpb, tmpg, tmpr, nexb, nexg, nexr, dst); + res0 = __msa_copy_u_d((v2i64)dst, 0); + res1 = __msa_copy_u_d((v2i64)dst, 1); SD(res0, dst_u); SD(res1, dst_v); s += 16; @@ -2037,14 +2016,14 @@ void RGB565ToUVRow_MSA(const uint8_t* src_rgb565, } } -void RGB24ToUVRow_MSA(const uint8_t* src_rgb0, +void RGB24ToUVRow_MSA(const uint8_t* src_rgb, int src_stride_rgb, uint8_t* dst_u, uint8_t* dst_v, int width) { int x; - const uint8_t* s = src_rgb0; - const uint8_t* t = src_rgb0 + src_stride_rgb; + const uint8_t* s = src_rgb; + const uint8_t* t = src_rgb + src_stride_rgb; int64_t res0, res1; v16u8 src0, src1, src2, src3, src4, src5, src6, src7; v16u8 inp0, inp1, inp2, inp3, inp4, inp5; @@ -2147,14 +2126,14 @@ void RGB24ToUVRow_MSA(const uint8_t* src_rgb0, } } -void RAWToUVRow_MSA(const uint8_t* src_rgb0, +void RAWToUVRow_MSA(const uint8_t* src_rgb, int src_stride_rgb, uint8_t* dst_u, uint8_t* dst_v, int width) { int x; - const uint8_t* s = src_rgb0; - const uint8_t* t = src_rgb0 + src_stride_rgb; + const uint8_t* s = src_rgb; + const uint8_t* t = src_rgb + src_stride_rgb; int64_t res0, res1; v16u8 inp0, inp1, inp2, inp3, inp4, inp5; v16u8 src0, src1, src2, src3, src4, src5, src6, src7; @@ -2266,13 +2245,13 @@ void NV12ToARGBRow_MSA(const uint8_t* src_y, uint64_t val0, val1; v16u8 src0, src1, res0, res1, dst0, dst1; v8i16 vec0, vec1, vec2; - v4i32 vec_ub, vec_vr, vec_ug, vec_vg, vec_bb, vec_bg, vec_br, vec_yg; + v4i32 vec_ub, vec_vr, vec_ug, vec_vg, vec_yg, vec_yb; v4i32 vec_ubvr, vec_ugvg; v16u8 zero = {0}; v16u8 alpha = (v16u8)__msa_ldi_b(ALPHA_VAL); + v8i16 const_0x80 = __msa_ldi_h(0x80); - YUVTORGB_SETUP(yuvconstants, vec_ub, vec_vr, vec_ug, vec_vg, vec_bb, vec_bg, - vec_br, vec_yg); + YUVTORGB_SETUP(yuvconstants, vec_ub, vec_vr, vec_ug, vec_vg, vec_yg, vec_yb); vec_ubvr = __msa_ilvr_w(vec_vr, vec_ub); vec_ugvg = (v4i32)__msa_ilvev_h((v8i16)vec_vg, (v8i16)vec_ug); @@ -2281,8 +2260,7 @@ void NV12ToARGBRow_MSA(const uint8_t* src_y, val1 = LD(src_uv); src0 = (v16u8)__msa_insert_d((v2i64)zero, 0, val0); src1 = (v16u8)__msa_insert_d((v2i64)zero, 0, val1); - YUVTORGB(src0, src1, vec_ubvr, vec_ugvg, vec_bb, vec_bg, vec_br, vec_yg, - vec0, vec1, vec2); + YUVTORGB(src0, src1, vec_ubvr, vec_ugvg, vec_yg, vec_yb, vec0, vec1, vec2); res0 = (v16u8)__msa_ilvev_b((v16i8)vec2, (v16i8)vec0); res1 = (v16u8)__msa_ilvev_b((v16i8)alpha, (v16i8)vec1); dst0 = (v16u8)__msa_ilvr_b((v16i8)res1, (v16i8)res0); @@ -2303,12 +2281,12 @@ void NV12ToRGB565Row_MSA(const uint8_t* src_y, uint64_t val0, val1; v16u8 src0, src1, dst0; v8i16 vec0, vec1, vec2; - v4i32 vec_ub, vec_vr, vec_ug, vec_vg, vec_bb, vec_bg, vec_br, vec_yg; + v4i32 vec_ub, vec_vr, vec_ug, vec_vg, vec_yg, vec_yb; v4i32 vec_ubvr, vec_ugvg; + v8i16 const_0x80 = __msa_ldi_h(0x80); v16u8 zero = {0}; - YUVTORGB_SETUP(yuvconstants, vec_ub, vec_vr, vec_ug, vec_vg, vec_bb, vec_bg, - vec_br, vec_yg); + YUVTORGB_SETUP(yuvconstants, vec_ub, vec_vr, vec_ug, vec_vg, vec_yg, vec_yb); vec_ubvr = __msa_ilvr_w(vec_vr, vec_ub); vec_ugvg = (v4i32)__msa_ilvev_h((v8i16)vec_vg, (v8i16)vec_ug); @@ -2317,8 +2295,7 @@ void NV12ToRGB565Row_MSA(const uint8_t* src_y, val1 = LD(src_uv); src0 = (v16u8)__msa_insert_d((v2i64)zero, 0, val0); src1 = (v16u8)__msa_insert_d((v2i64)zero, 0, val1); - YUVTORGB(src0, src1, vec_ubvr, vec_ugvg, vec_bb, vec_bg, vec_br, vec_yg, - vec0, vec1, vec2); + YUVTORGB(src0, src1, vec_ubvr, vec_ugvg, vec_yg, vec_yb, vec0, vec1, vec2); vec0 = vec0 >> 3; vec1 = (vec1 >> 2) << 5; vec2 = (vec2 >> 3) << 11; @@ -2339,14 +2316,14 @@ void NV21ToARGBRow_MSA(const uint8_t* src_y, uint64_t val0, val1; v16u8 src0, src1, res0, res1, dst0, dst1; v8i16 vec0, vec1, vec2; - v4i32 vec_ub, vec_vr, vec_ug, vec_vg, vec_bb, vec_bg, vec_br, vec_yg; + v4i32 vec_ub, vec_vr, vec_ug, vec_vg, vec_yg, vec_yb; v4i32 vec_ubvr, vec_ugvg; v16u8 alpha = (v16u8)__msa_ldi_b(ALPHA_VAL); v16u8 zero = {0}; v16i8 shuffler = {1, 0, 3, 2, 5, 4, 7, 6, 9, 8, 11, 10, 13, 12, 15, 14}; + v8i16 const_0x80 = __msa_ldi_h(0x80); - YUVTORGB_SETUP(yuvconstants, vec_ub, vec_vr, vec_ug, vec_vg, vec_bb, vec_bg, - vec_br, vec_yg); + YUVTORGB_SETUP(yuvconstants, vec_ub, vec_vr, vec_ug, vec_vg, vec_yg, vec_yb); vec_ubvr = __msa_ilvr_w(vec_vr, vec_ub); vec_ugvg = (v4i32)__msa_ilvev_h((v8i16)vec_vg, (v8i16)vec_ug); @@ -2356,8 +2333,7 @@ void NV21ToARGBRow_MSA(const uint8_t* src_y, src0 = (v16u8)__msa_insert_d((v2i64)zero, 0, val0); src1 = (v16u8)__msa_insert_d((v2i64)zero, 0, val1); src1 = (v16u8)__msa_vshf_b(shuffler, (v16i8)src1, (v16i8)src1); - YUVTORGB(src0, src1, vec_ubvr, vec_ugvg, vec_bb, vec_bg, vec_br, vec_yg, - vec0, vec1, vec2); + YUVTORGB(src0, src1, vec_ubvr, vec_ugvg, vec_yg, vec_yb, vec0, vec1, vec2); res0 = (v16u8)__msa_ilvev_b((v16i8)vec2, (v16i8)vec0); res1 = (v16u8)__msa_ilvev_b((v16i8)alpha, (v16i8)vec1); dst0 = (v16u8)__msa_ilvr_b((v16i8)res1, (v16i8)res0); @@ -2446,7 +2422,7 @@ void SobelXYRow_MSA(const uint8_t* src_sobelx, } } -void ARGBToYJRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width) { +void ARGBToYJRow_MSA(const uint8_t* src_argb, uint8_t* dst_y, int width) { int x; v16u8 src0, src1, src2, src3, dst0; v16u8 const_0x961D = (v16u8)__msa_fill_h(0x961D); @@ -2454,19 +2430,19 @@ void ARGBToYJRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width) { v8u16 const_0x80 = (v8u16)__msa_fill_h(0x80); for (x = 0; x < width; x += 16) { - src0 = (v16u8)__msa_ld_b((void*)src_argb0, 0); - src1 = (v16u8)__msa_ld_b((void*)src_argb0, 16); - src2 = (v16u8)__msa_ld_b((void*)src_argb0, 32); - src3 = (v16u8)__msa_ld_b((void*)src_argb0, 48); + src0 = (v16u8)__msa_ld_b((void*)src_argb, 0); + src1 = (v16u8)__msa_ld_b((void*)src_argb, 16); + src2 = (v16u8)__msa_ld_b((void*)src_argb, 32); + src3 = (v16u8)__msa_ld_b((void*)src_argb, 48); ARGBTOY(src0, src1, src2, src3, const_0x961D, const_0x4D, const_0x80, 8, dst0); ST_UB(dst0, dst_y); - src_argb0 += 64; + src_argb += 64; dst_y += 16; } } -void BGRAToYRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width) { +void BGRAToYRow_MSA(const uint8_t* src_argb, uint8_t* dst_y, int width) { int x; v16u8 src0, src1, src2, src3, dst0; v16u8 const_0x4200 = (v16u8)__msa_fill_h(0x4200); @@ -2474,19 +2450,19 @@ void BGRAToYRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width) { v8u16 const_0x1080 = (v8u16)__msa_fill_h(0x1080); for (x = 0; x < width; x += 16) { - src0 = (v16u8)__msa_ld_b((void*)src_argb0, 0); - src1 = (v16u8)__msa_ld_b((void*)src_argb0, 16); - src2 = (v16u8)__msa_ld_b((void*)src_argb0, 32); - src3 = (v16u8)__msa_ld_b((void*)src_argb0, 48); + src0 = (v16u8)__msa_ld_b((void*)src_argb, 0); + src1 = (v16u8)__msa_ld_b((void*)src_argb, 16); + src2 = (v16u8)__msa_ld_b((void*)src_argb, 32); + src3 = (v16u8)__msa_ld_b((void*)src_argb, 48); ARGBTOY(src0, src1, src2, src3, const_0x4200, const_0x1981, const_0x1080, 8, dst0); ST_UB(dst0, dst_y); - src_argb0 += 64; + src_argb += 64; dst_y += 16; } } -void ABGRToYRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width) { +void ABGRToYRow_MSA(const uint8_t* src_argb, uint8_t* dst_y, int width) { int x; v16u8 src0, src1, src2, src3, dst0; v16u8 const_0x8142 = (v16u8)__msa_fill_h(0x8142); @@ -2494,19 +2470,19 @@ void ABGRToYRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width) { v8u16 const_0x1080 = (v8u16)__msa_fill_h(0x1080); for (x = 0; x < width; x += 16) { - src0 = (v16u8)__msa_ld_b((void*)src_argb0, 0); - src1 = (v16u8)__msa_ld_b((void*)src_argb0, 16); - src2 = (v16u8)__msa_ld_b((void*)src_argb0, 32); - src3 = (v16u8)__msa_ld_b((void*)src_argb0, 48); + src0 = (v16u8)__msa_ld_b((void*)src_argb, 0); + src1 = (v16u8)__msa_ld_b((void*)src_argb, 16); + src2 = (v16u8)__msa_ld_b((void*)src_argb, 32); + src3 = (v16u8)__msa_ld_b((void*)src_argb, 48); ARGBTOY(src0, src1, src2, src3, const_0x8142, const_0x19, const_0x1080, 8, dst0); ST_UB(dst0, dst_y); - src_argb0 += 64; + src_argb += 64; dst_y += 16; } } -void RGBAToYRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width) { +void RGBAToYRow_MSA(const uint8_t* src_argb, uint8_t* dst_y, int width) { int x; v16u8 src0, src1, src2, src3, dst0; v16u8 const_0x1900 = (v16u8)__msa_fill_h(0x1900); @@ -2514,26 +2490,26 @@ void RGBAToYRow_MSA(const uint8_t* src_argb0, uint8_t* dst_y, int width) { v8u16 const_0x1080 = (v8u16)__msa_fill_h(0x1080); for (x = 0; x < width; x += 16) { - src0 = (v16u8)__msa_ld_b((void*)src_argb0, 0); - src1 = (v16u8)__msa_ld_b((void*)src_argb0, 16); - src2 = (v16u8)__msa_ld_b((void*)src_argb0, 32); - src3 = (v16u8)__msa_ld_b((void*)src_argb0, 48); + src0 = (v16u8)__msa_ld_b((void*)src_argb, 0); + src1 = (v16u8)__msa_ld_b((void*)src_argb, 16); + src2 = (v16u8)__msa_ld_b((void*)src_argb, 32); + src3 = (v16u8)__msa_ld_b((void*)src_argb, 48); ARGBTOY(src0, src1, src2, src3, const_0x1900, const_0x4281, const_0x1080, 8, dst0); ST_UB(dst0, dst_y); - src_argb0 += 64; + src_argb += 64; dst_y += 16; } } -void ARGBToUVJRow_MSA(const uint8_t* src_rgb0, +void ARGBToUVJRow_MSA(const uint8_t* src_rgb, int src_stride_rgb, uint8_t* dst_u, uint8_t* dst_v, int width) { int x; - const uint8_t* s = src_rgb0; - const uint8_t* t = src_rgb0 + src_stride_rgb; + const uint8_t* s = src_rgb; + const uint8_t* t = src_rgb + src_stride_rgb; v8u16 src0, src1, src2, src3, src4, src5, src6, src7; v8u16 vec0, vec1, vec2, vec3; v8u16 dst0, dst1, dst2, dst3; @@ -2658,14 +2634,14 @@ void ARGBToUVJRow_MSA(const uint8_t* src_rgb0, } } -void BGRAToUVRow_MSA(const uint8_t* src_rgb0, +void BGRAToUVRow_MSA(const uint8_t* src_rgb, int src_stride_rgb, uint8_t* dst_u, uint8_t* dst_v, int width) { int x; - const uint8_t* s = src_rgb0; - const uint8_t* t = src_rgb0 + src_stride_rgb; + const uint8_t* s = src_rgb; + const uint8_t* t = src_rgb + src_stride_rgb; const uint8_t unused = 0xf; v8u16 src0, src1, src2, src3; v16u8 dst0, dst1; @@ -2693,14 +2669,14 @@ void BGRAToUVRow_MSA(const uint8_t* src_rgb0, } } -void ABGRToUVRow_MSA(const uint8_t* src_rgb0, +void ABGRToUVRow_MSA(const uint8_t* src_rgb, int src_stride_rgb, uint8_t* dst_u, uint8_t* dst_v, int width) { int x; - const uint8_t* s = src_rgb0; - const uint8_t* t = src_rgb0 + src_stride_rgb; + const uint8_t* s = src_rgb; + const uint8_t* t = src_rgb + src_stride_rgb; const uint8_t unused = 0xf; v8u16 src0, src1, src2, src3; v16u8 dst0, dst1; @@ -2728,14 +2704,14 @@ void ABGRToUVRow_MSA(const uint8_t* src_rgb0, } } -void RGBAToUVRow_MSA(const uint8_t* src_rgb0, +void RGBAToUVRow_MSA(const uint8_t* src_rgb, int src_stride_rgb, uint8_t* dst_u, uint8_t* dst_v, int width) { int x; - const uint8_t* s = src_rgb0; - const uint8_t* t = src_rgb0 + src_stride_rgb; + const uint8_t* s = src_rgb; + const uint8_t* t = src_rgb + src_stride_rgb; const uint8_t unused = 0xf; v8u16 src0, src1, src2, src3; v16u8 dst0, dst1; @@ -2771,54 +2747,57 @@ void I444ToARGBRow_MSA(const uint8_t* src_y, int width) { int x; v16u8 src0, src1, src2, dst0, dst1; - v8u16 vec0, vec1, vec2; + v8i16 vec0, vec1, vec2; v4i32 reg0, reg1, reg2, reg3, reg4, reg5, reg6, reg7, reg8, reg9; - v4i32 vec_ub, vec_vr, vec_ug, vec_vg, vec_bb, vec_bg, vec_br, vec_yg; + v4i32 tmp0, tmp1, tmp2, tmp3, tmp4, tmp5; + v4i32 vec_ub, vec_vr, vec_ug, vec_vg, vec_yg, vec_yb; v16u8 alpha = (v16u8)__msa_ldi_b(ALPHA_VAL); v8i16 zero = {0}; + v4i32 const_0x80 = __msa_fill_w(0x80); - YUVTORGB_SETUP(yuvconstants, vec_ub, vec_vr, vec_ug, vec_vg, vec_bb, vec_bg, - vec_br, vec_yg); + YUVTORGB_SETUP(yuvconstants, vec_ub, vec_vr, vec_ug, vec_vg, vec_yg, vec_yb); for (x = 0; x < width; x += 8) { READI444(src_y, src_u, src_v, src0, src1, src2); - vec0 = (v8u16)__msa_ilvr_b((v16i8)src0, (v16i8)src0); + vec0 = (v8i16)__msa_ilvr_b((v16i8)src0, (v16i8)src0); reg0 = (v4i32)__msa_ilvr_h((v8i16)zero, (v8i16)vec0); reg1 = (v4i32)__msa_ilvl_h((v8i16)zero, (v8i16)vec0); reg0 *= vec_yg; reg1 *= vec_yg; reg0 = __msa_srai_w(reg0, 16); reg1 = __msa_srai_w(reg1, 16); - reg4 = reg0 + vec_br; - reg5 = reg1 + vec_br; - reg2 = reg0 + vec_bg; - reg3 = reg1 + vec_bg; - reg0 += vec_bb; - reg1 += vec_bb; + reg0 += vec_yb; + reg1 += vec_yb; vec0 = (v8u16)__msa_ilvr_b((v16i8)zero, (v16i8)src1); vec1 = (v8u16)__msa_ilvr_b((v16i8)zero, (v16i8)src2); reg6 = (v4i32)__msa_ilvr_h((v8i16)zero, (v8i16)vec0); reg7 = (v4i32)__msa_ilvl_h((v8i16)zero, (v8i16)vec0); reg8 = (v4i32)__msa_ilvr_h((v8i16)zero, (v8i16)vec1); reg9 = (v4i32)__msa_ilvl_h((v8i16)zero, (v8i16)vec1); - reg0 -= reg6 * vec_ub; - reg1 -= reg7 * vec_ub; - reg2 -= reg6 * vec_ug; - reg3 -= reg7 * vec_ug; - reg4 -= reg8 * vec_vr; - reg5 -= reg9 * vec_vr; - reg2 -= reg8 * vec_vg; - reg3 -= reg9 * vec_vg; - reg0 = __msa_srai_w(reg0, 6); - reg1 = __msa_srai_w(reg1, 6); - reg2 = __msa_srai_w(reg2, 6); - reg3 = __msa_srai_w(reg3, 6); - reg4 = __msa_srai_w(reg4, 6); - reg5 = __msa_srai_w(reg5, 6); + reg6 -= const_0x80; + reg7 -= const_0x80; + reg8 -= const_0x80; + reg9 -= const_0x80; + tmp0 = reg0 + reg6 * vec_ub; + tmp1 = reg1 + reg7 * vec_ub; + tmp2 = reg0 + reg8 * vec_vr; + tmp3 = reg1 + reg9 * vec_vr; + tmp4 = reg6 * vec_ug; + tmp5 = reg7 * vec_ug; + tmp4 += reg8 * vec_vg; + tmp5 += reg9 * vec_vg; + tmp4 = reg0 - tmp4; + tmp5 = reg1 - tmp5; + reg0 = __msa_srai_w(tmp0, 6); + reg1 = __msa_srai_w(tmp1, 6); + reg2 = __msa_srai_w(tmp2, 6); + reg3 = __msa_srai_w(tmp3, 6); + reg4 = __msa_srai_w(tmp4, 6); + reg5 = __msa_srai_w(tmp5, 6); CLIP_0TO255(reg0, reg1, reg2, reg3, reg4, reg5); vec0 = (v8u16)__msa_pckev_h((v8i16)reg1, (v8i16)reg0); - vec1 = (v8u16)__msa_pckev_h((v8i16)reg3, (v8i16)reg2); - vec2 = (v8u16)__msa_pckev_h((v8i16)reg5, (v8i16)reg4); + vec1 = (v8u16)__msa_pckev_h((v8i16)reg5, (v8i16)reg4); + vec2 = (v8u16)__msa_pckev_h((v8i16)reg3, (v8i16)reg2); vec0 = (v8u16)__msa_ilvev_b((v16i8)vec1, (v16i8)vec0); vec1 = (v8u16)__msa_ilvev_b((v16i8)alpha, (v16i8)vec2); dst0 = (v16u8)__msa_ilvr_h((v8i16)vec1, (v8i16)vec0); @@ -2922,12 +2901,12 @@ void YUY2ToARGBRow_MSA(const uint8_t* src_yuy2, int x; v16u8 src0, src1, src2; v8i16 vec0, vec1, vec2; - v4i32 vec_ub, vec_vr, vec_ug, vec_vg, vec_bb, vec_bg, vec_br, vec_yg; + v4i32 vec_ub, vec_vr, vec_ug, vec_vg, vec_yg, vec_yb; v4i32 vec_ubvr, vec_ugvg; v16u8 alpha = (v16u8)__msa_ldi_b(ALPHA_VAL); + v8i16 const_0x80 = __msa_ldi_h(0x80); - YUVTORGB_SETUP(yuvconstants, vec_ub, vec_vr, vec_ug, vec_vg, vec_bb, vec_bg, - vec_br, vec_yg); + YUVTORGB_SETUP(yuvconstants, vec_ub, vec_vr, vec_ug, vec_vg, vec_yg, vec_yb); vec_ubvr = __msa_ilvr_w(vec_vr, vec_ub); vec_ugvg = (v4i32)__msa_ilvev_h((v8i16)vec_vg, (v8i16)vec_ug); @@ -2935,8 +2914,7 @@ void YUY2ToARGBRow_MSA(const uint8_t* src_yuy2, src0 = (v16u8)__msa_ld_b((void*)src_yuy2, 0); src1 = (v16u8)__msa_pckev_b((v16i8)src0, (v16i8)src0); src2 = (v16u8)__msa_pckod_b((v16i8)src0, (v16i8)src0); - YUVTORGB(src1, src2, vec_ubvr, vec_ugvg, vec_bb, vec_bg, vec_br, vec_yg, - vec0, vec1, vec2); + YUVTORGB(src1, src2, vec_ubvr, vec_ugvg, vec_yg, vec_yb, vec0, vec1, vec2); STOREARGB(vec0, vec1, vec2, alpha, dst_argb); src_yuy2 += 16; dst_argb += 32; @@ -2950,12 +2928,12 @@ void UYVYToARGBRow_MSA(const uint8_t* src_uyvy, int x; v16u8 src0, src1, src2; v8i16 vec0, vec1, vec2; - v4i32 vec_ub, vec_vr, vec_ug, vec_vg, vec_bb, vec_bg, vec_br, vec_yg; + v4i32 vec_ub, vec_vr, vec_ug, vec_vg, vec_yg, vec_yb; v4i32 vec_ubvr, vec_ugvg; + v8i16 const_0x80 = __msa_ldi_h(0x80); v16u8 alpha = (v16u8)__msa_ldi_b(ALPHA_VAL); - YUVTORGB_SETUP(yuvconstants, vec_ub, vec_vr, vec_ug, vec_vg, vec_bb, vec_bg, - vec_br, vec_yg); + YUVTORGB_SETUP(yuvconstants, vec_ub, vec_vr, vec_ug, vec_vg, vec_yg, vec_yb); vec_ubvr = __msa_ilvr_w(vec_vr, vec_ub); vec_ugvg = (v4i32)__msa_ilvev_h((v8i16)vec_vg, (v8i16)vec_ug); @@ -2963,8 +2941,7 @@ void UYVYToARGBRow_MSA(const uint8_t* src_uyvy, src0 = (v16u8)__msa_ld_b((void*)src_uyvy, 0); src1 = (v16u8)__msa_pckod_b((v16i8)src0, (v16i8)src0); src2 = (v16u8)__msa_pckev_b((v16i8)src0, (v16i8)src0); - YUVTORGB(src1, src2, vec_ubvr, vec_ugvg, vec_bb, vec_bg, vec_br, vec_yg, - vec0, vec1, vec2); + YUVTORGB(src1, src2, vec_ubvr, vec_ugvg, vec_yg, vec_yb, vec0, vec1, vec2); STOREARGB(vec0, vec1, vec2, alpha, dst_argb); src_uyvy += 16; dst_argb += 32; @@ -3109,7 +3086,7 @@ void ARGBExtractAlphaRow_MSA(const uint8_t* src_argb, } } -void ARGBBlendRow_MSA(const uint8_t* src_argb0, +void ARGBBlendRow_MSA(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width) { @@ -3123,8 +3100,8 @@ void ARGBBlendRow_MSA(const uint8_t* src_argb0, v16i8 zero = {0}; for (x = 0; x < width; x += 8) { - src0 = (v16u8)__msa_ld_b((void*)src_argb0, 0); - src1 = (v16u8)__msa_ld_b((void*)src_argb0, 16); + src0 = (v16u8)__msa_ld_b((void*)src_argb, 0); + src1 = (v16u8)__msa_ld_b((void*)src_argb, 16); src2 = (v16u8)__msa_ld_b((void*)src_argb1, 0); src3 = (v16u8)__msa_ld_b((void*)src_argb1, 16); vec0 = (v8u16)__msa_ilvr_b(zero, (v16i8)src0); @@ -3168,7 +3145,7 @@ void ARGBBlendRow_MSA(const uint8_t* src_argb0, dst0 = __msa_bmnz_v(dst0, const_255, mask); dst1 = __msa_bmnz_v(dst1, const_255, mask); ST_UB2(dst0, dst1, dst_argb, 16); - src_argb0 += 32; + src_argb += 32; src_argb1 += 32; dst_argb += 32; } diff --git a/TMessagesProj/jni/third_party/libyuv/source/row_neon.cc b/TMessagesProj/jni/third_party/libyuv/source/row_neon.cc index a5aeaabfbd..37f6db0cd9 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/row_neon.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/row_neon.cc @@ -10,8 +10,6 @@ #include "libyuv/row.h" -#include - #ifdef __cplusplus namespace libyuv { extern "C" { @@ -21,90 +19,118 @@ extern "C" { #if !defined(LIBYUV_DISABLE_NEON) && defined(__ARM_NEON__) && \ !defined(__aarch64__) +// d8-d15, r4-r11,r14(lr) need to be preserved if used. r13(sp),r15(pc) are +// reserved. + +// q0: Y uint16x8_t +// d2: U uint8x8_t +// d3: V uint8x8_t + // Read 8 Y, 4 U and 4 V from 422 #define READYUV422 \ - "vld1.8 {d0}, [%0]! \n" \ - "vld1.32 {d2[0]}, [%1]! \n" \ - "vld1.32 {d2[1]}, [%2]! \n" + "vld1.8 {d0}, [%[src_y]]! \n" \ + "vld1.32 {d2[0]}, [%[src_u]]! \n" \ + "vld1.32 {d2[1]}, [%[src_v]]! \n" \ + "vmov.u8 d1, d0 \n" \ + "vmovl.u8 q1, d2 \n" \ + "vzip.u8 d0, d1 \n" \ + "vsli.u16 q1, q1, #8 \n" // Read 8 Y, 8 U and 8 V from 444 #define READYUV444 \ - "vld1.8 {d0}, [%0]! \n" \ - "vld1.8 {d2}, [%1]! \n" \ - "vld1.8 {d3}, [%2]! \n" \ - "vpaddl.u8 q1, q1 \n" \ - "vrshrn.u16 d2, q1, #1 \n" + "vld1.8 {d0}, [%[src_y]]! \n" \ + "vld1.8 {d2}, [%[src_u]]! \n" \ + "vmovl.u8 q0, d0 \n" \ + "vld1.8 {d3}, [%[src_v]]! \n" \ + "vsli.u16 q0, q0, #8 \n" // Read 8 Y, and set 4 U and 4 V to 128 #define READYUV400 \ - "vld1.8 {d0}, [%0]! \n" \ - "vmov.u8 d2, #128 \n" + "vld1.8 {d0}, [%[src_y]]! \n" \ + "vmov.u8 q1, #128 \n" \ + "vmovl.u8 q0, d0 \n" \ + "vsli.u16 q0, q0, #8 \n" // Read 8 Y and 4 UV from NV12 -#define READNV12 \ - "vld1.8 {d0}, [%0]! \n" \ - "vld1.8 {d2}, [%1]! \n" \ - "vmov.u8 d3, d2 \n" /* split odd/even uv apart */ \ - "vuzp.u8 d2, d3 \n" \ - "vtrn.u32 d2, d3 \n" +#define READNV12 \ + "vld1.8 {d0}, [%[src_y]]! \n" \ + "vld1.8 {d2}, [%[src_uv]]! \n" \ + "vmov.u8 d1, d0 \n" \ + "vmov.u8 d3, d2 \n" \ + "vzip.u8 d0, d1 \n" \ + "vsli.u16 d2, d2, #8 \n" /* Duplicate low byte (U) */ \ + "vsri.u16 d3, d3, #8 \n" /* Duplicate high byte (V) */ // Read 8 Y and 4 VU from NV21 #define READNV21 \ - "vld1.8 {d0}, [%0]! \n" \ - "vld1.8 {d2}, [%1]! \n" \ - "vmov.u8 d3, d2 \n" /* split odd/even uv apart */ \ - "vuzp.u8 d3, d2 \n" \ - "vtrn.u32 d2, d3 \n" + "vld1.8 {d0}, [%[src_y]]! \n" \ + "vld1.8 {d2}, [%[src_vu]]! \n" \ + "vmov.u8 d1, d0 \n" \ + "vmov.u8 d3, d2 \n" \ + "vzip.u8 d0, d1 \n" \ + "vsri.u16 d2, d2, #8 \n" /* Duplicate high byte (U) */ \ + "vsli.u16 d3, d3, #8 \n" /* Duplicate low byte (V) */ // Read 8 YUY2 #define READYUY2 \ - "vld2.8 {d0, d2}, [%0]! \n" \ + "vld2.8 {d0, d2}, [%[src_yuy2]]! \n" \ + "vmovl.u8 q0, d0 \n" \ "vmov.u8 d3, d2 \n" \ - "vuzp.u8 d2, d3 \n" \ - "vtrn.u32 d2, d3 \n" + "vsli.u16 q0, q0, #8 \n" \ + "vsli.u16 d2, d2, #8 \n" \ + "vsri.u16 d3, d3, #8 \n" // Read 8 UYVY #define READUYVY \ - "vld2.8 {d2, d3}, [%0]! \n" \ - "vmov.u8 d0, d3 \n" \ + "vld2.8 {d2, d3}, [%[src_uyvy]]! \n" \ + "vmovl.u8 q0, d3 \n" \ "vmov.u8 d3, d2 \n" \ - "vuzp.u8 d2, d3 \n" \ - "vtrn.u32 d2, d3 \n" - -#define YUVTORGB_SETUP \ - "vld1.8 {d24}, [%[kUVToRB]] \n" \ - "vld1.8 {d25}, [%[kUVToG]] \n" \ - "vld1.16 {d26[], d27[]}, [%[kUVBiasBGR]]! \n" \ - "vld1.16 {d8[], d9[]}, [%[kUVBiasBGR]]! \n" \ - "vld1.16 {d28[], d29[]}, [%[kUVBiasBGR]] \n" \ - "vld1.32 {d30[], d31[]}, [%[kYToRgb]] \n" - -#define YUVTORGB \ - "vmull.u8 q8, d2, d24 \n" /* u/v B/R component */ \ - "vmull.u8 q9, d2, d25 \n" /* u/v G component */ \ - "vmovl.u8 q0, d0 \n" /* Y */ \ - "vmovl.s16 q10, d1 \n" \ - "vmovl.s16 q0, d0 \n" \ - "vmul.s32 q10, q10, q15 \n" \ - "vmul.s32 q0, q0, q15 \n" \ - "vqshrun.s32 d0, q0, #16 \n" \ - "vqshrun.s32 d1, q10, #16 \n" /* Y */ \ - "vadd.s16 d18, d19 \n" \ - "vshll.u16 q1, d16, #16 \n" /* Replicate u * UB */ \ - "vshll.u16 q10, d17, #16 \n" /* Replicate v * VR */ \ - "vshll.u16 q3, d18, #16 \n" /* Replicate (v*VG + u*UG)*/ \ - "vaddw.u16 q1, q1, d16 \n" \ - "vaddw.u16 q10, q10, d17 \n" \ - "vaddw.u16 q3, q3, d18 \n" \ - "vqadd.s16 q8, q0, q13 \n" /* B */ \ - "vqadd.s16 q9, q0, q14 \n" /* R */ \ - "vqadd.s16 q0, q0, q4 \n" /* G */ \ - "vqadd.s16 q8, q8, q1 \n" /* B */ \ - "vqadd.s16 q9, q9, q10 \n" /* R */ \ - "vqsub.s16 q0, q0, q3 \n" /* G */ \ - "vqshrun.s16 d20, q8, #6 \n" /* B */ \ - "vqshrun.s16 d22, q9, #6 \n" /* R */ \ - "vqshrun.s16 d21, q0, #6 \n" /* G */ + "vsli.u16 q0, q0, #8 \n" \ + "vsli.u16 d2, d2, #8 \n" \ + "vsri.u16 d3, d3, #8 \n" + +#define YUVTORGB_SETUP \ + "vld4.8 {d26[], d27[], d28[], d29[]}, [%[kUVCoeff]] \n" \ + "vld1.16 {d31[]}, [%[kRGBCoeffBias]]! \n" \ + "vld1.16 {d20[], d21[]}, [%[kRGBCoeffBias]]! \n" \ + "vld1.16 {d22[], d23[]}, [%[kRGBCoeffBias]]! \n" \ + "vld1.16 {d24[], d25[]}, [%[kRGBCoeffBias]] \n" + +// q0: B uint16x8_t +// q1: G uint16x8_t +// q2: R uint16x8_t + +// Convert from YUV to 2.14 fixed point RGB +#define YUVTORGB \ + "vmull.u16 q2, d1, d31 \n" \ + "vmull.u8 q8, d3, d29 \n" /* DGV */ \ + "vmull.u16 q0, d0, d31 \n" \ + "vmlal.u8 q8, d2, d28 \n" /* DG */ \ + "vqshrn.u32 d0, q0, #16 \n" \ + "vqshrn.u32 d1, q2, #16 \n" /* Y */ \ + "vmull.u8 q9, d2, d26 \n" /* DB */ \ + "vmull.u8 q2, d3, d27 \n" /* DR */ \ + "vadd.u16 q4, q0, q11 \n" /* G */ \ + "vadd.u16 q2, q0, q2 \n" /* R */ \ + "vadd.u16 q0, q0, q9 \n" /* B */ \ + "vqsub.u16 q1, q4, q8 \n" /* G */ \ + "vqsub.u16 q0, q0, q10 \n" /* B */ \ + "vqsub.u16 q2, q2, q12 \n" /* R */ + +// Convert from 2.14 fixed point RGB To 8 bit RGB +#define RGBTORGB8 \ + "vqshrn.u16 d4, q2, #6 \n" /* R */ \ + "vqshrn.u16 d2, q1, #6 \n" /* G */ \ + "vqshrn.u16 d0, q0, #6 \n" /* B */ + +#define YUVTORGB_REGS \ + "q0", "q1", "q2", "q4", "q8", "q9", "q10", "q11", "q12", "q13", "q14", "d31" + +#define STORERGBA \ + "vmov.u8 d1, d0 \n" \ + "vmov.u8 d3, d4 \n" \ + "vmov.u8 d0, d6 \n" \ + "vst4.8 {d0, d1, d2, d3}, [%[dst_rgba]]! \n" void I444ToARGBRow_NEON(const uint8_t* src_y, const uint8_t* src_u, @@ -114,22 +140,43 @@ void I444ToARGBRow_NEON(const uint8_t* src_y, int width) { asm volatile( YUVTORGB_SETUP - "vmov.u8 d23, #255 \n" + "vmov.u8 d6, #255 \n" "1: \n" READYUV444 YUVTORGB - "subs %4, %4, #8 \n" - "vst4.8 {d20, d21, d22, d23}, [%3]! \n" + RGBTORGB8 + "subs %[width], %[width], #8 \n" + "vst4.8 {d0, d2, d4, d6}, [%[dst_argb]]! \n" "bgt 1b \n" - : "+r"(src_y), // %0 - "+r"(src_u), // %1 - "+r"(src_v), // %2 - "+r"(dst_argb), // %3 - "+r"(width) // %4 - : [kUVToRB] "r"(&yuvconstants->kUVToRB), - [kUVToG] "r"(&yuvconstants->kUVToG), - [kUVBiasBGR] "r"(&yuvconstants->kUVBiasBGR), - [kYToRgb] "r"(&yuvconstants->kYToRgb) - : "cc", "memory", "q0", "q1", "q2", "q3", "q4", "q8", "q9", "q10", "q11", - "q12", "q13", "q14", "q15"); + : [src_y] "+r"(src_y), // %[src_y] + [src_u] "+r"(src_u), // %[src_u] + [src_v] "+r"(src_v), // %[src_v] + [dst_argb] "+r"(dst_argb), // %[dst_argb] + [width] "+r"(width) // %[width] + : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff] + [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias] + : "cc", "memory", YUVTORGB_REGS, "d6"); +} + +void I444ToRGB24Row_NEON(const uint8_t* src_y, + const uint8_t* src_u, + const uint8_t* src_v, + uint8_t* dst_rgb24, + const struct YuvConstants* yuvconstants, + int width) { + asm volatile( + YUVTORGB_SETUP + "1: \n" READYUV444 YUVTORGB + RGBTORGB8 + "subs %[width], %[width], #8 \n" + "vst3.8 {d0, d2, d4}, [%[dst_rgb24]]! \n" + "bgt 1b \n" + : [src_y] "+r"(src_y), // %[src_y] + [src_u] "+r"(src_u), // %[src_u] + [src_v] "+r"(src_v), // %[src_v] + [dst_rgb24] "+r"(dst_rgb24), // %[dst_argb] + [width] "+r"(width) // %[width] + : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff] + [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias] + : "cc", "memory", YUVTORGB_REGS); } void I422ToARGBRow_NEON(const uint8_t* src_y, @@ -140,22 +187,46 @@ void I422ToARGBRow_NEON(const uint8_t* src_y, int width) { asm volatile( YUVTORGB_SETUP - "vmov.u8 d23, #255 \n" + "vmov.u8 d6, #255 \n" "1: \n" READYUV422 YUVTORGB - "subs %4, %4, #8 \n" - "vst4.8 {d20, d21, d22, d23}, [%3]! \n" + RGBTORGB8 + "subs %[width], %[width], #8 \n" + "vst4.8 {d0, d2, d4, d6}, [%[dst_argb]]! \n" "bgt 1b \n" - : "+r"(src_y), // %0 - "+r"(src_u), // %1 - "+r"(src_v), // %2 - "+r"(dst_argb), // %3 - "+r"(width) // %4 - : [kUVToRB] "r"(&yuvconstants->kUVToRB), - [kUVToG] "r"(&yuvconstants->kUVToG), - [kUVBiasBGR] "r"(&yuvconstants->kUVBiasBGR), - [kYToRgb] "r"(&yuvconstants->kYToRgb) - : "cc", "memory", "q0", "q1", "q2", "q3", "q4", "q8", "q9", "q10", "q11", - "q12", "q13", "q14", "q15"); + : [src_y] "+r"(src_y), // %[src_y] + [src_u] "+r"(src_u), // %[src_u] + [src_v] "+r"(src_v), // %[src_v] + [dst_argb] "+r"(dst_argb), // %[dst_argb] + [width] "+r"(width) // %[width] + : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff] + [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias] + : "cc", "memory", YUVTORGB_REGS, "d6"); +} + +void I444AlphaToARGBRow_NEON(const uint8_t* src_y, + const uint8_t* src_u, + const uint8_t* src_v, + const uint8_t* src_a, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { + asm volatile( + YUVTORGB_SETUP + "1: \n" READYUV444 YUVTORGB + RGBTORGB8 + "vld1.8 {d6}, [%[src_a]]! \n" + "subs %[width], %[width], #8 \n" + "vst4.8 {d0, d2, d4, d6}, [%[dst_argb]]! \n" + "bgt 1b \n" + : [src_y] "+r"(src_y), // %[src_y] + [src_u] "+r"(src_u), // %[src_u] + [src_v] "+r"(src_v), // %[src_v] + [src_a] "+r"(src_a), // %[src_a] + [dst_argb] "+r"(dst_argb), // %[dst_argb] + [width] "+r"(width) // %[width] + : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff] + [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias] + : "cc", "memory", YUVTORGB_REGS, "d6"); } void I422AlphaToARGBRow_NEON(const uint8_t* src_y, @@ -168,22 +239,20 @@ void I422AlphaToARGBRow_NEON(const uint8_t* src_y, asm volatile( YUVTORGB_SETUP "1: \n" READYUV422 YUVTORGB - "subs %5, %5, #8 \n" - "vld1.8 {d23}, [%3]! \n" - "vst4.8 {d20, d21, d22, d23}, [%4]! \n" - "bgt 1b \n" - : "+r"(src_y), // %0 - "+r"(src_u), // %1 - "+r"(src_v), // %2 - "+r"(src_a), // %3 - "+r"(dst_argb), // %4 - "+r"(width) // %5 - : [kUVToRB] "r"(&yuvconstants->kUVToRB), - [kUVToG] "r"(&yuvconstants->kUVToG), - [kUVBiasBGR] "r"(&yuvconstants->kUVBiasBGR), - [kYToRgb] "r"(&yuvconstants->kYToRgb) - : "cc", "memory", "q0", "q1", "q2", "q3", "q4", "q8", "q9", "q10", "q11", - "q12", "q13", "q14", "q15"); + RGBTORGB8 + "vld1.8 {d6}, [%[src_a]]! \n" + "subs %[width], %[width], #8 \n" + "vst4.8 {d0, d2, d4, d6}, [%[dst_argb]]! \n" + "bgt 1b \n" + : [src_y] "+r"(src_y), // %[src_y] + [src_u] "+r"(src_u), // %[src_u] + [src_v] "+r"(src_v), // %[src_v] + [src_a] "+r"(src_a), // %[src_a] + [dst_argb] "+r"(dst_argb), // %[dst_argb] + [width] "+r"(width) // %[width] + : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff] + [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias] + : "cc", "memory", YUVTORGB_REGS, "d6"); } void I422ToRGBARow_NEON(const uint8_t* src_y, @@ -194,22 +263,18 @@ void I422ToRGBARow_NEON(const uint8_t* src_y, int width) { asm volatile( YUVTORGB_SETUP + "vmov.u8 d6, #255 \n" "1: \n" READYUV422 YUVTORGB - "subs %4, %4, #8 \n" - "vmov.u8 d19, #255 \n" // YUVTORGB modified d19 - "vst4.8 {d19, d20, d21, d22}, [%3]! \n" + RGBTORGB8 "subs %[width], %[width], #8 \n" STORERGBA "bgt 1b \n" - : "+r"(src_y), // %0 - "+r"(src_u), // %1 - "+r"(src_v), // %2 - "+r"(dst_rgba), // %3 - "+r"(width) // %4 - : [kUVToRB] "r"(&yuvconstants->kUVToRB), - [kUVToG] "r"(&yuvconstants->kUVToG), - [kUVBiasBGR] "r"(&yuvconstants->kUVBiasBGR), - [kYToRgb] "r"(&yuvconstants->kYToRgb) - : "cc", "memory", "q0", "q1", "q2", "q3", "q4", "q8", "q9", "q10", "q11", - "q12", "q13", "q14", "q15"); + : [src_y] "+r"(src_y), // %[src_y] + [src_u] "+r"(src_u), // %[src_u] + [src_v] "+r"(src_v), // %[src_v] + [dst_rgba] "+r"(dst_rgba), // %[dst_rgba] + [width] "+r"(width) // %[width] + : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff] + [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias] + : "cc", "memory", YUVTORGB_REGS, "d6"); } void I422ToRGB24Row_NEON(const uint8_t* src_y, @@ -220,29 +285,28 @@ void I422ToRGB24Row_NEON(const uint8_t* src_y, int width) { asm volatile( YUVTORGB_SETUP + "vmov.u8 d6, #255 \n" "1: \n" READYUV422 YUVTORGB - "subs %4, %4, #8 \n" - "vst3.8 {d20, d21, d22}, [%3]! \n" + RGBTORGB8 + "subs %[width], %[width], #8 \n" + "vst3.8 {d0, d2, d4}, [%[dst_rgb24]]! \n" "bgt 1b \n" - : "+r"(src_y), // %0 - "+r"(src_u), // %1 - "+r"(src_v), // %2 - "+r"(dst_rgb24), // %3 - "+r"(width) // %4 - : [kUVToRB] "r"(&yuvconstants->kUVToRB), - [kUVToG] "r"(&yuvconstants->kUVToG), - [kUVBiasBGR] "r"(&yuvconstants->kUVBiasBGR), - [kYToRgb] "r"(&yuvconstants->kYToRgb) - : "cc", "memory", "q0", "q1", "q2", "q3", "q4", "q8", "q9", "q10", "q11", - "q12", "q13", "q14", "q15"); + : [src_y] "+r"(src_y), // %[src_y] + [src_u] "+r"(src_u), // %[src_u] + [src_v] "+r"(src_v), // %[src_v] + [dst_rgb24] "+r"(dst_rgb24), // %[dst_rgb24] + [width] "+r"(width) // %[width] + : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff] + [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias] + : "cc", "memory", YUVTORGB_REGS); } #define ARGBTORGB565 \ - "vshll.u8 q0, d22, #8 \n" /* R */ \ - "vshll.u8 q8, d21, #8 \n" /* G */ \ - "vshll.u8 q9, d20, #8 \n" /* B */ \ - "vsri.16 q0, q8, #5 \n" /* RG */ \ - "vsri.16 q0, q9, #11 \n" /* RGB */ + "vshll.u8 q2, d4, #8 \n" /* R */ \ + "vshll.u8 q1, d2, #8 \n" /* G */ \ + "vshll.u8 q0, d0, #8 \n" /* B */ \ + "vsri.16 q2, q1, #5 \n" /* RG */ \ + "vsri.16 q2, q0, #11 \n" /* RGB */ void I422ToRGB565Row_NEON(const uint8_t* src_y, const uint8_t* src_u, @@ -252,31 +316,29 @@ void I422ToRGB565Row_NEON(const uint8_t* src_y, int width) { asm volatile( YUVTORGB_SETUP + "vmov.u8 d6, #255 \n" "1: \n" READYUV422 YUVTORGB - "subs %4, %4, #8 \n" ARGBTORGB565 - "vst1.8 {q0}, [%3]! \n" // store 8 pixels RGB565. - "bgt 1b \n" - : "+r"(src_y), // %0 - "+r"(src_u), // %1 - "+r"(src_v), // %2 - "+r"(dst_rgb565), // %3 - "+r"(width) // %4 - : [kUVToRB] "r"(&yuvconstants->kUVToRB), - [kUVToG] "r"(&yuvconstants->kUVToG), - [kUVBiasBGR] "r"(&yuvconstants->kUVBiasBGR), - [kYToRgb] "r"(&yuvconstants->kYToRgb) - : "cc", "memory", "q0", "q1", "q2", "q3", "q4", "q8", "q9", "q10", "q11", - "q12", "q13", "q14", "q15"); + RGBTORGB8 "subs %[width], %[width], #8 \n" ARGBTORGB565 + "vst1.8 {q2}, [%[dst_rgb565]]! \n" // store 8 pixels RGB565. + "bgt 1b \n" + : [src_y] "+r"(src_y), // %[src_y] + [src_u] "+r"(src_u), // %[src_u] + [src_v] "+r"(src_v), // %[src_v] + [dst_rgb565] "+r"(dst_rgb565), // %[dst_rgb565] + [width] "+r"(width) // %[width] + : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff] + [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias] + : "cc", "memory", YUVTORGB_REGS); } #define ARGBTOARGB1555 \ - "vshll.u8 q0, d23, #8 \n" /* A */ \ - "vshll.u8 q8, d22, #8 \n" /* R */ \ - "vshll.u8 q9, d21, #8 \n" /* G */ \ - "vshll.u8 q10, d20, #8 \n" /* B */ \ - "vsri.16 q0, q8, #1 \n" /* AR */ \ - "vsri.16 q0, q9, #6 \n" /* ARG */ \ - "vsri.16 q0, q10, #11 \n" /* ARGB */ + "vshll.u8 q3, d6, #8 \n" /* A */ \ + "vshll.u8 q2, d4, #8 \n" /* R */ \ + "vshll.u8 q1, d2, #8 \n" /* G */ \ + "vshll.u8 q0, d0, #8 \n" /* B */ \ + "vsri.16 q3, q2, #1 \n" /* AR */ \ + "vsri.16 q3, q1, #6 \n" /* ARG */ \ + "vsri.16 q3, q0, #11 \n" /* ARGB */ void I422ToARGB1555Row_NEON(const uint8_t* src_y, const uint8_t* src_u, @@ -287,30 +349,28 @@ void I422ToARGB1555Row_NEON(const uint8_t* src_y, asm volatile( YUVTORGB_SETUP "1: \n" READYUV422 YUVTORGB - "subs %4, %4, #8 \n" - "vmov.u8 d23, #255 \n" ARGBTOARGB1555 - "vst1.8 {q0}, [%3]! \n" // store 8 pixels - "bgt 1b \n" - : "+r"(src_y), // %0 - "+r"(src_u), // %1 - "+r"(src_v), // %2 - "+r"(dst_argb1555), // %3 - "+r"(width) // %4 - : [kUVToRB] "r"(&yuvconstants->kUVToRB), - [kUVToG] "r"(&yuvconstants->kUVToG), - [kUVBiasBGR] "r"(&yuvconstants->kUVBiasBGR), - [kYToRgb] "r"(&yuvconstants->kYToRgb) - : "cc", "memory", "q0", "q1", "q2", "q3", "q4", "q8", "q9", "q10", "q11", - "q12", "q13", "q14", "q15"); + RGBTORGB8 + "subs %[width], %[width], #8 \n" + "vmov.u8 d6, #0xff \n" ARGBTOARGB1555 + "vst1.8 {q3}, [%[dst_argb1555]]! \n" // store 8 pixels RGB1555. + "bgt 1b \n" + : [src_y] "+r"(src_y), // %[src_y] + [src_u] "+r"(src_u), // %[src_u] + [src_v] "+r"(src_v), // %[src_v] + [dst_argb1555] "+r"(dst_argb1555), // %[dst_argb1555] + [width] "+r"(width) // %[width] + : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff] + [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias] + : "cc", "memory", YUVTORGB_REGS, "q3"); } #define ARGBTOARGB4444 \ - "vshr.u8 d20, d20, #4 \n" /* B */ \ - "vbic.32 d21, d21, d4 \n" /* G */ \ - "vshr.u8 d22, d22, #4 \n" /* R */ \ - "vbic.32 d23, d23, d4 \n" /* A */ \ - "vorr d0, d20, d21 \n" /* BG */ \ - "vorr d1, d22, d23 \n" /* RA */ \ + "vshr.u8 d0, d0, #4 \n" /* B */ \ + "vbic.32 d2, d2, d7 \n" /* G */ \ + "vshr.u8 d4, d4, #4 \n" /* R */ \ + "vbic.32 d6, d6, d7 \n" /* A */ \ + "vorr d0, d0, d2 \n" /* BG */ \ + "vorr d1, d4, d6 \n" /* RA */ \ "vzip.u8 d0, d1 \n" /* BGRA */ void I422ToARGB4444Row_NEON(const uint8_t* src_y, @@ -321,25 +381,21 @@ void I422ToARGB4444Row_NEON(const uint8_t* src_y, int width) { asm volatile( YUVTORGB_SETUP - "vmov.u8 d4, #0x0f \n" // vbic bits to clear - "1: \n" - - READYUV422 YUVTORGB - "subs %4, %4, #8 \n" - "vmov.u8 d23, #255 \n" ARGBTOARGB4444 - "vst1.8 {q0}, [%3]! \n" // store 8 pixels - "bgt 1b \n" - : "+r"(src_y), // %0 - "+r"(src_u), // %1 - "+r"(src_v), // %2 - "+r"(dst_argb4444), // %3 - "+r"(width) // %4 - : [kUVToRB] "r"(&yuvconstants->kUVToRB), - [kUVToG] "r"(&yuvconstants->kUVToG), - [kUVBiasBGR] "r"(&yuvconstants->kUVBiasBGR), - [kYToRgb] "r"(&yuvconstants->kYToRgb) - : "cc", "memory", "q0", "q1", "q2", "q3", "q4", "q8", "q9", "q10", "q11", - "q12", "q13", "q14", "q15"); + "vmov.u8 d6, #255 \n" + "vmov.u8 d7, #0x0f \n" // vbic bits to clear + "1: \n" READYUV422 YUVTORGB + RGBTORGB8 + "subs %[width], %[width], #8 \n" ARGBTOARGB4444 + "vst1.8 {q0}, [%[dst_argb4444]]! \n" // store 8 pixels + "bgt 1b \n" + : [src_y] "+r"(src_y), // %[src_y] + [src_u] "+r"(src_u), // %[src_u] + [src_v] "+r"(src_v), // %[src_v] + [dst_argb4444] "+r"(dst_argb4444), // %[dst_argb4444] + [width] "+r"(width) // %[width] + : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff] + [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias] + : "cc", "memory", YUVTORGB_REGS, "q3"); } void I400ToARGBRow_NEON(const uint8_t* src_y, @@ -348,20 +404,18 @@ void I400ToARGBRow_NEON(const uint8_t* src_y, int width) { asm volatile( YUVTORGB_SETUP - "vmov.u8 d23, #255 \n" + "vmov.u8 d6, #255 \n" "1: \n" READYUV400 YUVTORGB - "subs %2, %2, #8 \n" - "vst4.8 {d20, d21, d22, d23}, [%1]! \n" + RGBTORGB8 + "subs %[width], %[width], #8 \n" + "vst4.8 {d0, d2, d4, d6}, [%[dst_argb]]! \n" "bgt 1b \n" - : "+r"(src_y), // %0 - "+r"(dst_argb), // %1 - "+r"(width) // %2 - : [kUVToRB] "r"(&yuvconstants->kUVToRB), - [kUVToG] "r"(&yuvconstants->kUVToG), - [kUVBiasBGR] "r"(&yuvconstants->kUVBiasBGR), - [kYToRgb] "r"(&yuvconstants->kYToRgb) - : "cc", "memory", "q0", "q1", "q2", "q3", "q4", "q8", "q9", "q10", "q11", - "q12", "q13", "q14", "q15"); + : [src_y] "+r"(src_y), // %[src_y] + [dst_argb] "+r"(dst_argb), // %[dst_argb] + [width] "+r"(width) // %[width] + : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff] + [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias] + : "cc", "memory", YUVTORGB_REGS, "d6"); } void J400ToARGBRow_NEON(const uint8_t* src_y, uint8_t* dst_argb, int width) { @@ -386,22 +440,20 @@ void NV12ToARGBRow_NEON(const uint8_t* src_y, uint8_t* dst_argb, const struct YuvConstants* yuvconstants, int width) { - asm volatile(YUVTORGB_SETUP - "vmov.u8 d23, #255 \n" - "1: \n" READNV12 YUVTORGB - "subs %3, %3, #8 \n" - "vst4.8 {d20, d21, d22, d23}, [%2]! \n" - "bgt 1b \n" - : "+r"(src_y), // %0 - "+r"(src_uv), // %1 - "+r"(dst_argb), // %2 - "+r"(width) // %3 - : [kUVToRB] "r"(&yuvconstants->kUVToRB), - [kUVToG] "r"(&yuvconstants->kUVToG), - [kUVBiasBGR] "r"(&yuvconstants->kUVBiasBGR), - [kYToRgb] "r"(&yuvconstants->kYToRgb) - : "cc", "memory", "q0", "q1", "q2", "q3", "q4", "q8", "q9", - "q10", "q11", "q12", "q13", "q14", "q15"); + asm volatile( + YUVTORGB_SETUP + "vmov.u8 d6, #255 \n" + "1: \n" READNV12 YUVTORGB RGBTORGB8 + "subs %[width], %[width], #8 \n" + "vst4.8 {d0, d2, d4, d6}, [%[dst_argb]]! \n" + "bgt 1b \n" + : [src_y] "+r"(src_y), // %[src_y] + [src_uv] "+r"(src_uv), // %[src_uv] + [dst_argb] "+r"(dst_argb), // %[dst_argb] + [width] "+r"(width) // %[width] + : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff] + [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias] + : "cc", "memory", YUVTORGB_REGS, "d6"); } void NV21ToARGBRow_NEON(const uint8_t* src_y, @@ -409,22 +461,20 @@ void NV21ToARGBRow_NEON(const uint8_t* src_y, uint8_t* dst_argb, const struct YuvConstants* yuvconstants, int width) { - asm volatile(YUVTORGB_SETUP - "vmov.u8 d23, #255 \n" - "1: \n" READNV21 YUVTORGB - "subs %3, %3, #8 \n" - "vst4.8 {d20, d21, d22, d23}, [%2]! \n" - "bgt 1b \n" - : "+r"(src_y), // %0 - "+r"(src_vu), // %1 - "+r"(dst_argb), // %2 - "+r"(width) // %3 - : [kUVToRB] "r"(&yuvconstants->kUVToRB), - [kUVToG] "r"(&yuvconstants->kUVToG), - [kUVBiasBGR] "r"(&yuvconstants->kUVBiasBGR), - [kYToRgb] "r"(&yuvconstants->kYToRgb) - : "cc", "memory", "q0", "q1", "q2", "q3", "q4", "q8", "q9", - "q10", "q11", "q12", "q13", "q14", "q15"); + asm volatile( + YUVTORGB_SETUP + "vmov.u8 d6, #255 \n" + "1: \n" READNV21 YUVTORGB RGBTORGB8 + "subs %[width], %[width], #8 \n" + "vst4.8 {d0, d2, d4, d6}, [%[dst_argb]]! \n" + "bgt 1b \n" + : [src_y] "+r"(src_y), // %[src_y] + [src_vu] "+r"(src_vu), // %[src_vu] + [dst_argb] "+r"(dst_argb), // %[dst_argb] + [width] "+r"(width) // %[width] + : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff] + [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias] + : "cc", "memory", YUVTORGB_REGS, "d6"); } void NV12ToRGB24Row_NEON(const uint8_t* src_y, @@ -433,25 +483,19 @@ void NV12ToRGB24Row_NEON(const uint8_t* src_y, const struct YuvConstants* yuvconstants, int width) { asm volatile( - YUVTORGB_SETUP - - "1: \n" - - READNV12 YUVTORGB - "subs %3, %3, #8 \n" - "vst3.8 {d20, d21, d22}, [%2]! \n" + "vmov.u8 d6, #255 \n" + "1: \n" READNV12 YUVTORGB RGBTORGB8 + "subs %[width], %[width], #8 \n" + "vst3.8 {d0, d2, d4}, [%[dst_rgb24]]! \n" "bgt 1b \n" - : "+r"(src_y), // %0 - "+r"(src_uv), // %1 - "+r"(dst_rgb24), // %2 - "+r"(width) // %3 - : [kUVToRB] "r"(&yuvconstants->kUVToRB), - [kUVToG] "r"(&yuvconstants->kUVToG), - [kUVBiasBGR] "r"(&yuvconstants->kUVBiasBGR), - [kYToRgb] "r"(&yuvconstants->kYToRgb) - : "cc", "memory", "q0", "q1", "q2", "q3", "q4", "q8", "q9", "q10", "q11", - "q12", "q13", "q14", "q15"); + : [src_y] "+r"(src_y), // %[src_y] + [src_uv] "+r"(src_uv), // %[src_uv] + [dst_rgb24] "+r"(dst_rgb24), // %[dst_rgb24] + [width] "+r"(width) // %[width] + : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff] + [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias] + : "cc", "memory", YUVTORGB_REGS); } void NV21ToRGB24Row_NEON(const uint8_t* src_y, @@ -460,25 +504,19 @@ void NV21ToRGB24Row_NEON(const uint8_t* src_y, const struct YuvConstants* yuvconstants, int width) { asm volatile( - YUVTORGB_SETUP - - "1: \n" - - READNV21 YUVTORGB - "subs %3, %3, #8 \n" - "vst3.8 {d20, d21, d22}, [%2]! \n" + "vmov.u8 d6, #255 \n" + "1: \n" READNV21 YUVTORGB RGBTORGB8 + "subs %[width], %[width], #8 \n" + "vst3.8 {d0, d2, d4}, [%[dst_rgb24]]! \n" "bgt 1b \n" - : "+r"(src_y), // %0 - "+r"(src_vu), // %1 - "+r"(dst_rgb24), // %2 - "+r"(width) // %3 - : [kUVToRB] "r"(&yuvconstants->kUVToRB), - [kUVToG] "r"(&yuvconstants->kUVToG), - [kUVBiasBGR] "r"(&yuvconstants->kUVBiasBGR), - [kYToRgb] "r"(&yuvconstants->kYToRgb) - : "cc", "memory", "q0", "q1", "q2", "q3", "q4", "q8", "q9", "q10", "q11", - "q12", "q13", "q14", "q15"); + : [src_y] "+r"(src_y), // %[src_y] + [src_vu] "+r"(src_vu), // %[src_vu] + [dst_rgb24] "+r"(dst_rgb24), // %[dst_rgb24] + [width] "+r"(width) // %[width] + : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff] + [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias] + : "cc", "memory", YUVTORGB_REGS); } void NV12ToRGB565Row_NEON(const uint8_t* src_y, @@ -488,62 +526,56 @@ void NV12ToRGB565Row_NEON(const uint8_t* src_y, int width) { asm volatile( YUVTORGB_SETUP - "1: \n" READNV12 YUVTORGB - "subs %3, %3, #8 \n" ARGBTORGB565 - "vst1.8 {q0}, [%2]! \n" // store 8 pixels RGB565. + "vmov.u8 d6, #255 \n" + "1: \n" READNV12 YUVTORGB RGBTORGB8 + "subs %[width], %[width], #8 \n" ARGBTORGB565 + "vst1.8 {q2}, [%[dst_rgb565]]! \n" // store 8 pixels RGB565. "bgt 1b \n" - : "+r"(src_y), // %0 - "+r"(src_uv), // %1 - "+r"(dst_rgb565), // %2 - "+r"(width) // %3 - : [kUVToRB] "r"(&yuvconstants->kUVToRB), - [kUVToG] "r"(&yuvconstants->kUVToG), - [kUVBiasBGR] "r"(&yuvconstants->kUVBiasBGR), - [kYToRgb] "r"(&yuvconstants->kYToRgb) - : "cc", "memory", "q0", "q1", "q2", "q3", "q4", "q8", "q9", "q10", "q11", - "q12", "q13", "q14", "q15"); + : [src_y] "+r"(src_y), // %[src_y] + [src_uv] "+r"(src_uv), // %[src_uv] + [dst_rgb565] "+r"(dst_rgb565), // %[dst_rgb565] + [width] "+r"(width) // %[width] + : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff] + [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias] + : "cc", "memory", YUVTORGB_REGS); } void YUY2ToARGBRow_NEON(const uint8_t* src_yuy2, uint8_t* dst_argb, const struct YuvConstants* yuvconstants, int width) { - asm volatile(YUVTORGB_SETUP - "vmov.u8 d23, #255 \n" - "1: \n" READYUY2 YUVTORGB - "subs %2, %2, #8 \n" - "vst4.8 {d20, d21, d22, d23}, [%1]! \n" - "bgt 1b \n" - : "+r"(src_yuy2), // %0 - "+r"(dst_argb), // %1 - "+r"(width) // %2 - : [kUVToRB] "r"(&yuvconstants->kUVToRB), - [kUVToG] "r"(&yuvconstants->kUVToG), - [kUVBiasBGR] "r"(&yuvconstants->kUVBiasBGR), - [kYToRgb] "r"(&yuvconstants->kYToRgb) - : "cc", "memory", "q0", "q1", "q2", "q3", "q4", "q8", "q9", - "q10", "q11", "q12", "q13", "q14", "q15"); + asm volatile( + YUVTORGB_SETUP + "vmov.u8 d6, #255 \n" + "1: \n" READYUY2 YUVTORGB RGBTORGB8 + "subs %[width], %[width], #8 \n" + "vst4.8 {d0, d2, d4, d6}, [%[dst_argb]]! \n" + "bgt 1b \n" + : [src_yuy2] "+r"(src_yuy2), // %[src_yuy2] + [dst_argb] "+r"(dst_argb), // %[dst_argb] + [width] "+r"(width) // %[width] + : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff] + [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias] + : "cc", "memory", YUVTORGB_REGS, "d6"); } void UYVYToARGBRow_NEON(const uint8_t* src_uyvy, uint8_t* dst_argb, const struct YuvConstants* yuvconstants, int width) { - asm volatile(YUVTORGB_SETUP - "vmov.u8 d23, #255 \n" - "1: \n" READUYVY YUVTORGB - "subs %2, %2, #8 \n" - "vst4.8 {d20, d21, d22, d23}, [%1]! \n" - "bgt 1b \n" - : "+r"(src_uyvy), // %0 - "+r"(dst_argb), // %1 - "+r"(width) // %2 - : [kUVToRB] "r"(&yuvconstants->kUVToRB), - [kUVToG] "r"(&yuvconstants->kUVToG), - [kUVBiasBGR] "r"(&yuvconstants->kUVBiasBGR), - [kYToRgb] "r"(&yuvconstants->kYToRgb) - : "cc", "memory", "q0", "q1", "q2", "q3", "q4", "q8", "q9", - "q10", "q11", "q12", "q13", "q14", "q15"); + asm volatile( + YUVTORGB_SETUP + "vmov.u8 d6, #255 \n" + "1: \n" READUYVY YUVTORGB RGBTORGB8 + "subs %[width], %[width], #8 \n" + "vst4.8 {d0, d2, d4, d6}, [%[dst_argb]]! \n" + "bgt 1b \n" + : [src_uyvy] "+r"(src_uyvy), // %[src_uyvy] + [dst_argb] "+r"(dst_argb), // %[dst_argb] + [width] "+r"(width) // %[width] + : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff] + [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias] + : "cc", "memory", YUVTORGB_REGS, "d6"); } // Reads 16 pairs of UV and write even values to dst_u and odd to dst_v. @@ -567,6 +599,181 @@ void SplitUVRow_NEON(const uint8_t* src_uv, ); } +// Reads 16 byte Y's from tile and writes out 16 Y's. +// MM21 Y tiles are 16x32 so src_tile_stride = 512 bytes +// MM21 UV tiles are 8x16 so src_tile_stride = 256 bytes +// width measured in bytes so 8 UV = 16. +void DetileRow_NEON(const uint8_t* src, + ptrdiff_t src_tile_stride, + uint8_t* dst, + int width) { + asm volatile( + "1: \n" + "vld1.8 {q0}, [%0], %3 \n" // load 16 bytes + "subs %2, %2, #16 \n" // 16 processed per loop + "pld [%0, #1792] \n" + "vst1.8 {q0}, [%1]! \n" // store 16 bytes + "bgt 1b \n" + : "+r"(src), // %0 + "+r"(dst), // %1 + "+r"(width) // %2 + : "r"(src_tile_stride) // %3 + : "cc", "memory", "q0" // Clobber List + ); +} + +// Reads 16 byte Y's of 16 bits from tile and writes out 16 Y's. +void DetileRow_16_NEON(const uint16_t* src, + ptrdiff_t src_tile_stride, + uint16_t* dst, + int width) { + asm volatile( + "1: \n" + "vld1.16 {q0, q1}, [%0], %3 \n" // load 16 pixels + "subs %2, %2, #16 \n" // 16 processed per loop + "pld [%0, #3584] \n" + "vst1.16 {q0, q1}, [%1]! \n" // store 16 pixels + "bgt 1b \n" + : "+r"(src), // %0 + "+r"(dst), // %1 + "+r"(width) // %2 + : "r"(src_tile_stride * 2) // %3 + : "cc", "memory", "q0", "q1" // Clobber List + ); +} + +// Read 16 bytes of UV, detile, and write 8 bytes of U and 8 bytes of V. +void DetileSplitUVRow_NEON(const uint8_t* src_uv, + ptrdiff_t src_tile_stride, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { + asm volatile( + "1: \n" + "vld2.8 {d0, d1}, [%0], %4 \n" + "subs %3, %3, #16 \n" + "pld [%0, #1792] \n" + "vst1.8 {d0}, [%1]! \n" + "vst1.8 {d1}, [%2]! \n" + "bgt 1b \n" + : "+r"(src_uv), // %0 + "+r"(dst_u), // %1 + "+r"(dst_v), // %2 + "+r"(width) // %3 + : "r"(src_tile_stride) // %4 + : "cc", "memory", "d0", "d1" // Clobber List + ); +} + +#if LIBYUV_USE_ST2 +// Read 16 Y, 8 UV, and write 8 YUYV. +void DetileToYUY2_NEON(const uint8_t* src_y, + ptrdiff_t src_y_tile_stride, + const uint8_t* src_uv, + ptrdiff_t src_uv_tile_stride, + uint8_t* dst_yuy2, + int width) { + asm volatile( + "1: \n" + "vld1.8 {q0}, [%0], %4 \n" // Load 16 Y + "pld [%0, #1792] \n" + "vld1.8 {q1}, [%1], %5 \n" // Load 8 UV + "pld [%1, #1792] \n" + "subs %3, %3, #16 \n" + "vst2.8 {q0, q1}, [%2]! \n" + "bgt 1b \n" + : "+r"(src_y), // %0 + "+r"(src_uv), // %1 + "+r"(dst_yuy2), // %2 + "+r"(width) // %3 + : "r"(src_y_tile_stride), // %4 + "r"(src_uv_tile_stride) // %5 + : "cc", "memory", "d0", "d1", "d2", "d3" // Clobber list + ); +} +#else +// Read 16 Y, 8 UV, and write 8 YUYV. +void DetileToYUY2_NEON(const uint8_t* src_y, + ptrdiff_t src_y_tile_stride, + const uint8_t* src_uv, + ptrdiff_t src_uv_tile_stride, + uint8_t* dst_yuy2, + int width) { + asm volatile( + "1: \n" + "vld1.8 {q0}, [%0], %4 \n" // Load 16 Y + "vld1.8 {q1}, [%1], %5 \n" // Load 8 UV + "subs %3, %3, #16 \n" + "pld [%0, #1792] \n" + "vzip.8 q0, q1 \n" + "pld [%1, #1792] \n" + "vst1.8 {q0, q1}, [%2]! \n" + "bgt 1b \n" + : "+r"(src_y), // %0 + "+r"(src_uv), // %1 + "+r"(dst_yuy2), // %2 + "+r"(width) // %3 + : "r"(src_y_tile_stride), // %4 + "r"(src_uv_tile_stride) // %5 + : "cc", "memory", "q0", "q1", "q2", "q3" // Clobber list + ); +} +#endif + +void UnpackMT2T_NEON(const uint8_t* src, uint16_t* dst, size_t size) { + const uint8_t* src_lower_bits = src; + const uint8_t* src_upper_bits = src + 16; + asm volatile( + "1: \n" + "vld4.8 {d1, d3, d5, d7}, [%1]! \n" // Load 32 bytes of upper + // bits. + "vld1.8 {d6}, [%0]! \n" // Load 8 bytes of lower + // bits. + "vshl.u8 d4, d6, #2 \n" // Align lower bits. + "vshl.u8 d2, d6, #4 \n" + "vshl.u8 d0, d6, #6 \n" + "vzip.u8 d0, d1 \n" // Zip lower and upper + // bits together. + "vzip.u8 d2, d3 \n" + "vzip.u8 d4, d5 \n" + "vzip.u8 d6, d7 \n" + "vsri.u16 q0, q0, #10 \n" // Copy upper 6 bits into + // lower 6 bits for better + // accuracy in + // conversions. + "vsri.u16 q1, q1, #10 \n" + "vsri.u16 q2, q2, #10 \n" + "vsri.u16 q3, q3, #10 \n" + "vst4.16 {d0, d2, d4, d6}, [%2]! \n" // Store 32 pixels + "vst4.16 {d1, d3, d5, d7}, [%2]! \n" + "vld4.8 {d1, d3, d5, d7}, [%1]! \n" // Process last 32 pixels + // in the block + "vld1.8 {d6}, [%0]! \n" + "vshl.u8 d4, d6, #2 \n" + "vshl.u8 d2, d6, #4 \n" + "vshl.u8 d0, d6, #6 \n" + "vzip.u8 d0, d1 \n" + "vzip.u8 d2, d3 \n" + "vzip.u8 d4, d5 \n" + "vzip.u8 d6, d7 \n" + "vsri.u16 q0, q0, #10 \n" + "vsri.u16 q1, q1, #10 \n" + "vsri.u16 q2, q2, #10 \n" + "vsri.u16 q3, q3, #10 \n" + "vst4.16 {d0, d2, d4, d6}, [%2]! \n" + "vst4.16 {d1, d3, d5, d7}, [%2]! \n" + "mov %0, %1 \n" + "add %1, %0, #16 \n" + "subs %3, %3, #80 \n" + "bgt 1b \n" + : "+r"(src_lower_bits), // %0 + "+r"(src_upper_bits), // %1 + "+r"(dst), // %2 + "+r"(size) // %3 + : + : "cc", "memory", "q0", "q1", "q2", "q3"); +} + // Reads 16 U's and V's and writes out 16 pairs of UV. void MergeUVRow_NEON(const uint8_t* src_u, const uint8_t* src_v, @@ -638,6 +845,333 @@ void MergeRGBRow_NEON(const uint8_t* src_r, ); } +// Reads 16 packed ARGB and write to planar dst_r, dst_g, dst_b, dst_a. +void SplitARGBRow_NEON(const uint8_t* src_argb, + uint8_t* dst_r, + uint8_t* dst_g, + uint8_t* dst_b, + uint8_t* dst_a, + int width) { + asm volatile( + "1: \n" + "vld4.8 {d0, d2, d4, d6}, [%0]! \n" // load 8 ARGB + "vld4.8 {d1, d3, d5, d7}, [%0]! \n" // next 8 ARGB + "subs %5, %5, #16 \n" // 16 processed per loop + "vst1.8 {q0}, [%3]! \n" // store B + "vst1.8 {q1}, [%2]! \n" // store G + "vst1.8 {q2}, [%1]! \n" // store R + "vst1.8 {q3}, [%4]! \n" // store A + "bgt 1b \n" + : "+r"(src_argb), // %0 + "+r"(dst_r), // %1 + "+r"(dst_g), // %2 + "+r"(dst_b), // %3 + "+r"(dst_a), // %4 + "+r"(width) // %5 + : // Input registers + : "cc", "memory", "q0", "q1", "q2", "q3" // Clobber List + ); +} + +// Reads 16 planar R's, G's and B's and writes out 16 packed ARGB at a time +void MergeARGBRow_NEON(const uint8_t* src_r, + const uint8_t* src_g, + const uint8_t* src_b, + const uint8_t* src_a, + uint8_t* dst_argb, + int width) { + asm volatile( + "1: \n" + "vld1.8 {q2}, [%0]! \n" // load R + "vld1.8 {q1}, [%1]! \n" // load G + "vld1.8 {q0}, [%2]! \n" // load B + "vld1.8 {q3}, [%3]! \n" // load A + "subs %5, %5, #16 \n" // 16 processed per loop + "vst4.8 {d0, d2, d4, d6}, [%4]! \n" // store 8 ARGB + "vst4.8 {d1, d3, d5, d7}, [%4]! \n" // next 8 ARGB + "bgt 1b \n" + : "+r"(src_r), // %0 + "+r"(src_g), // %1 + "+r"(src_b), // %2 + "+r"(src_a), // %3 + "+r"(dst_argb), // %4 + "+r"(width) // %5 + : // Input registers + : "cc", "memory", "q0", "q1", "q2", "q3" // Clobber List + ); +} + +// Reads 16 packed ARGB and write to planar dst_r, dst_g, dst_b. +void SplitXRGBRow_NEON(const uint8_t* src_argb, + uint8_t* dst_r, + uint8_t* dst_g, + uint8_t* dst_b, + int width) { + asm volatile( + "1: \n" + "vld4.8 {d0, d2, d4, d6}, [%0]! \n" // load 8 ARGB + "vld4.8 {d1, d3, d5, d7}, [%0]! \n" // next 8 ARGB + "subs %4, %4, #16 \n" // 16 processed per loop + "vst1.8 {q0}, [%3]! \n" // store B + "vst1.8 {q1}, [%2]! \n" // store G + "vst1.8 {q2}, [%1]! \n" // store R + "bgt 1b \n" + : "+r"(src_argb), // %0 + "+r"(dst_r), // %1 + "+r"(dst_g), // %2 + "+r"(dst_b), // %3 + "+r"(width) // %4 + : // Input registers + : "cc", "memory", "q0", "q1", "q2", "q3" // Clobber List + ); +} + +// Reads 16 planar R's, G's, B's and A's and writes out 16 packed ARGB at a time +void MergeXRGBRow_NEON(const uint8_t* src_r, + const uint8_t* src_g, + const uint8_t* src_b, + uint8_t* dst_argb, + int width) { + asm volatile( + "vmov.u8 q3, #255 \n" // load A(255) + "1: \n" + "vld1.8 {q2}, [%0]! \n" // load R + "vld1.8 {q1}, [%1]! \n" // load G + "vld1.8 {q0}, [%2]! \n" // load B + "subs %4, %4, #16 \n" // 16 processed per loop + "vst4.8 {d0, d2, d4, d6}, [%3]! \n" // store 8 ARGB + "vst4.8 {d1, d3, d5, d7}, [%3]! \n" // next 8 ARGB + "bgt 1b \n" + : "+r"(src_r), // %0 + "+r"(src_g), // %1 + "+r"(src_b), // %2 + "+r"(dst_argb), // %3 + "+r"(width) // %4 + : // Input registers + : "cc", "memory", "q0", "q1", "q2", "q3" // Clobber List + ); +} + +void MergeXR30Row_NEON(const uint16_t* src_r, + const uint16_t* src_g, + const uint16_t* src_b, + uint8_t* dst_ar30, + int depth, + int width) { + int shift = 10 - depth; + asm volatile( + "vmov.u32 q14, #1023 \n" + "vdup.32 q15, %5 \n" + "1: \n" + "vld1.16 {d4}, [%2]! \n" // B + "vld1.16 {d2}, [%1]! \n" // G + "vld1.16 {d0}, [%0]! \n" // R + "vmovl.u16 q2, d4 \n" // B + "vmovl.u16 q1, d2 \n" // G + "vmovl.u16 q0, d0 \n" // R + "vshl.u32 q2, q2, q15 \n" // 000B + "vshl.u32 q1, q1, q15 \n" + "vshl.u32 q0, q0, q15 \n" + "vmin.u32 q2, q2, q14 \n" + "vmin.u32 q1, q1, q14 \n" + "vmin.u32 q0, q0, q14 \n" + "vsli.u32 q2, q1, #10 \n" // 00GB + "vsli.u32 q2, q0, #20 \n" // 0RGB + "vorr.u32 q2, #0xc0000000 \n" // ARGB (AR30) + "subs %4, %4, #4 \n" + "vst1.8 {q2}, [%3]! \n" + "bgt 1b \n" + : "+r"(src_r), // %0 + "+r"(src_g), // %1 + "+r"(src_b), // %2 + "+r"(dst_ar30), // %3 + "+r"(width) // %4 + : "r"(shift) // %5 + : "memory", "cc", "q0", "q1", "q2", "q14", "q15"); +} + +void MergeXR30Row_10_NEON(const uint16_t* src_r, + const uint16_t* src_g, + const uint16_t* src_b, + uint8_t* dst_ar30, + int /* depth */, + int width) { + asm volatile( + "vmov.u32 q14, #1023 \n" + "1: \n" + "vld1.16 {d4}, [%2]! \n" // B + "vld1.16 {d2}, [%1]! \n" // G + "vld1.16 {d0}, [%0]! \n" // R + "vmovl.u16 q2, d4 \n" // 000B + "vmovl.u16 q1, d2 \n" // G + "vmovl.u16 q0, d0 \n" // R + "vmin.u32 q2, q2, q14 \n" + "vmin.u32 q1, q1, q14 \n" + "vmin.u32 q0, q0, q14 \n" + "vsli.u32 q2, q1, #10 \n" // 00GB + "vsli.u32 q2, q0, #20 \n" // 0RGB + "vorr.u32 q2, #0xc0000000 \n" // ARGB (AR30) + "subs %4, %4, #4 \n" + "vst1.8 {q2}, [%3]! \n" + "bgt 1b \n" + "3: \n" + : "+r"(src_r), // %0 + "+r"(src_g), // %1 + "+r"(src_b), // %2 + "+r"(dst_ar30), // %3 + "+r"(width) // %4 + : + : "memory", "cc", "q0", "q1", "q2", "q14"); +} + +void MergeAR64Row_NEON(const uint16_t* src_r, + const uint16_t* src_g, + const uint16_t* src_b, + const uint16_t* src_a, + uint16_t* dst_ar64, + int depth, + int width) { + int shift = 16 - depth; + int mask = (1 << depth) - 1; + asm volatile( + + "vdup.u16 q15, %6 \n" + "vdup.u16 q14, %7 \n" + "1: \n" + "vld1.16 {q2}, [%0]! \n" // R + "vld1.16 {q1}, [%1]! \n" // G + "vld1.16 {q0}, [%2]! \n" // B + "vld1.16 {q3}, [%3]! \n" // A + "vmin.u16 q2, q2, q14 \n" + "vmin.u16 q1, q1, q14 \n" + "vmin.u16 q0, q0, q14 \n" + "vmin.u16 q3, q3, q14 \n" + "vshl.u16 q2, q2, q15 \n" + "vshl.u16 q1, q1, q15 \n" + "vshl.u16 q0, q0, q15 \n" + "vshl.u16 q3, q3, q15 \n" + "subs %5, %5, #8 \n" + "vst4.16 {d0, d2, d4, d6}, [%4]! \n" + "vst4.16 {d1, d3, d5, d7}, [%4]! \n" + "bgt 1b \n" + : "+r"(src_r), // %0 + "+r"(src_g), // %1 + "+r"(src_b), // %2 + "+r"(src_a), // %3 + "+r"(dst_ar64), // %4 + "+r"(width) // %5 + : "r"(shift), // %6 + "r"(mask) // %7 + : "memory", "cc", "q0", "q1", "q2", "q3", "q15"); +} + +void MergeXR64Row_NEON(const uint16_t* src_r, + const uint16_t* src_g, + const uint16_t* src_b, + uint16_t* dst_ar64, + int depth, + int width) { + int shift = 16 - depth; + int mask = (1 << depth) - 1; + asm volatile( + + "vmov.u8 q3, #0xff \n" // A (0xffff) + "vdup.u16 q15, %5 \n" + "vdup.u16 q14, %6 \n" + "1: \n" + "vld1.16 {q2}, [%0]! \n" // R + "vld1.16 {q1}, [%1]! \n" // G + "vld1.16 {q0}, [%2]! \n" // B + "vmin.u16 q2, q2, q14 \n" + "vmin.u16 q1, q1, q14 \n" + "vmin.u16 q0, q0, q14 \n" + "vshl.u16 q2, q2, q15 \n" + "vshl.u16 q1, q1, q15 \n" + "vshl.u16 q0, q0, q15 \n" + "subs %4, %4, #8 \n" + "vst4.16 {d0, d2, d4, d6}, [%3]! \n" + "vst4.16 {d1, d3, d5, d7}, [%3]! \n" + "bgt 1b \n" + : "+r"(src_r), // %0 + "+r"(src_g), // %1 + "+r"(src_b), // %2 + "+r"(dst_ar64), // %3 + "+r"(width) // %4 + : "r"(shift), // %5 + "r"(mask) // %6 + : "memory", "cc", "q0", "q1", "q2", "q3", "q15"); +} + +void MergeARGB16To8Row_NEON(const uint16_t* src_r, + const uint16_t* src_g, + const uint16_t* src_b, + const uint16_t* src_a, + uint8_t* dst_argb, + int depth, + int width) { + int shift = 8 - depth; + asm volatile( + + "vdup.16 q15, %6 \n" + "1: \n" + "vld1.16 {q2}, [%0]! \n" // R + "vld1.16 {q1}, [%1]! \n" // G + "vld1.16 {q0}, [%2]! \n" // B + "vld1.16 {q3}, [%3]! \n" // A + "vshl.u16 q2, q2, q15 \n" + "vshl.u16 q1, q1, q15 \n" + "vshl.u16 q0, q0, q15 \n" + "vshl.u16 q3, q3, q15 \n" + "vqmovn.u16 d0, q0 \n" + "vqmovn.u16 d1, q1 \n" + "vqmovn.u16 d2, q2 \n" + "vqmovn.u16 d3, q3 \n" + "subs %5, %5, #8 \n" + "vst4.8 {d0, d1, d2, d3}, [%4]! \n" + "bgt 1b \n" + : "+r"(src_r), // %0 + "+r"(src_g), // %1 + "+r"(src_b), // %2 + "+r"(src_a), // %3 + "+r"(dst_argb), // %4 + "+r"(width) // %5 + : "r"(shift) // %6 + : "memory", "cc", "q0", "q1", "q2", "q3", "q15"); +} + +void MergeXRGB16To8Row_NEON(const uint16_t* src_r, + const uint16_t* src_g, + const uint16_t* src_b, + uint8_t* dst_argb, + int depth, + int width) { + int shift = 8 - depth; + asm volatile( + + "vdup.16 q15, %5 \n" + "vmov.u8 d6, #0xff \n" // A (0xff) + "1: \n" + "vld1.16 {q2}, [%0]! \n" // R + "vld1.16 {q1}, [%1]! \n" // G + "vld1.16 {q0}, [%2]! \n" // B + "vshl.u16 q2, q2, q15 \n" + "vshl.u16 q1, q1, q15 \n" + "vshl.u16 q0, q0, q15 \n" + "vqmovn.u16 d5, q2 \n" + "vqmovn.u16 d4, q1 \n" + "vqmovn.u16 d3, q0 \n" + "subs %4, %4, #8 \n" + "vst4.u8 {d3, d4, d5, d6}, [%3]! \n" + "bgt 1b \n" + : "+r"(src_r), // %0 + "+r"(src_g), // %1 + "+r"(src_b), // %2 + "+r"(dst_argb), // %3 + "+r"(width) // %4 + : "r"(shift) // %5 + : "memory", "cc", "q0", "q1", "q2", "d6", "q15"); +} + // Copy multiple of 32. vld4.8 allow unaligned and is fastest on a15. void CopyRow_NEON(const uint8_t* src, uint8_t* dst, int width) { asm volatile( @@ -969,16 +1503,17 @@ void ARGBToRGB24Row_NEON(const uint8_t* src_argb, int width) { asm volatile( "1: \n" - "vld4.8 {d1, d2, d3, d4}, [%0]! \n" // load 8 pixels of ARGB. - "subs %2, %2, #8 \n" // 8 processed per loop. - "vst3.8 {d1, d2, d3}, [%1]! \n" // store 8 pixels of - // RGB24. + "vld4.8 {d0, d2, d4, d6}, [%0]! \n" // load 16 pixels of ARGB. + "vld4.8 {d1, d3, d5, d7}, [%0]! \n" + "subs %2, %2, #16 \n" // 16 processed per loop. + "vst3.8 {d0, d2, d4}, [%1]! \n" // store 16 RGB24 pixels. + "vst3.8 {d1, d3, d5}, [%1]! \n" "bgt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_rgb24), // %1 "+r"(width) // %2 : - : "cc", "memory", "d1", "d2", "d3", "d4" // Clobber List + : "cc", "memory", "q0", "q1", "q2", "q3" // Clobber List ); } @@ -1122,6 +1657,29 @@ void UYVYToUVRow_NEON(const uint8_t* src_uyvy, ); } +void YUY2ToNVUVRow_NEON(const uint8_t* src_yuy2, + int stride_yuy2, + uint8_t* dst_uv, + int width) { + asm volatile( + "add %1, %0, %1 \n" // stride + src_yuy2 + "1: \n" + "vld2.8 {q0, q1}, [%0]! \n" // load 16 pixels of YUY2. + "subs %3, %3, #16 \n" // 16 pixels = 8 UVs. + "vld2.8 {q2, q3}, [%1]! \n" // load next row YUY2. + "vrhadd.u8 q4, q1, q3 \n" // average rows of UV + "vst1.8 {q4}, [%2]! \n" // store 8 UV. + "bgt 1b \n" + : "+r"(src_yuy2), // %0 + "+r"(stride_yuy2), // %1 + "+r"(dst_uv), // %2 + "+r"(width) // %3 + : + : "cc", "memory", "d0", "d1", "d2", "d3", "d4", "d5", "d6", + "d7" // Clobber List + ); +} + // For BGRAToARGB, ABGRToARGB, RGBAToARGB, and ARGBToRGBA. void ARGBShuffleRow_NEON(const uint8_t* src_argb, uint8_t* dst_argb, @@ -1193,16 +1751,16 @@ void ARGBToRGB565Row_NEON(const uint8_t* src_argb, int width) { asm volatile( "1: \n" - "vld4.8 {d20, d21, d22, d23}, [%0]! \n" // load 8 pixels of ARGB. + "vld4.8 {d0, d2, d4, d6}, [%0]! \n" // load 8 pixels of ARGB. "subs %2, %2, #8 \n" // 8 processed per loop. ARGBTORGB565 - "vst1.8 {q0}, [%1]! \n" // store 8 pixels RGB565. + "vst1.8 {q2}, [%1]! \n" // store 8 pixels RGB565. "bgt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_rgb565), // %1 "+r"(width) // %2 : - : "cc", "memory", "q0", "q8", "q9", "q10", "q11"); + : "cc", "memory", "q0", "q1", "q2", "d6"); } void ARGBToRGB565DitherRow_NEON(const uint8_t* src_argb, @@ -1210,21 +1768,21 @@ void ARGBToRGB565DitherRow_NEON(const uint8_t* src_argb, const uint32_t dither4, int width) { asm volatile( - "vdup.32 d2, %2 \n" // dither4 + "vdup.32 d7, %2 \n" // dither4 "1: \n" - "vld4.8 {d20, d21, d22, d23}, [%1]! \n" // load 8 pixels of ARGB. + "vld4.8 {d0, d2, d4, d6}, [%1]! \n" // load 8 pixels of ARGB. "subs %3, %3, #8 \n" // 8 processed per loop. - "vqadd.u8 d20, d20, d2 \n" - "vqadd.u8 d21, d21, d2 \n" - "vqadd.u8 d22, d22, d2 \n" // add for dither + "vqadd.u8 d0, d0, d7 \n" + "vqadd.u8 d2, d2, d7 \n" + "vqadd.u8 d4, d4, d7 \n" // add for dither ARGBTORGB565 - "vst1.8 {q0}, [%0]! \n" // store 8 RGB565. + "vst1.8 {q2}, [%0]! \n" // store 8 RGB565. "bgt 1b \n" : "+r"(dst_rgb) // %0 : "r"(src_argb), // %1 "r"(dither4), // %2 "r"(width) // %3 - : "cc", "memory", "q0", "q1", "q8", "q9", "q10", "q11"); + : "cc", "memory", "q0", "q1", "q2", "q3"); } void ARGBToARGB1555Row_NEON(const uint8_t* src_argb, @@ -1232,26 +1790,26 @@ void ARGBToARGB1555Row_NEON(const uint8_t* src_argb, int width) { asm volatile( "1: \n" - "vld4.8 {d20, d21, d22, d23}, [%0]! \n" // load 8 pixels of ARGB. + "vld4.8 {d0, d2, d4, d6}, [%0]! \n" // load 8 pixels of ARGB. "subs %2, %2, #8 \n" // 8 processed per loop. ARGBTOARGB1555 - "vst1.8 {q0}, [%1]! \n" // store 8 ARGB1555. + "vst1.8 {q3}, [%1]! \n" // store 8 ARGB1555. "bgt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_argb1555), // %1 "+r"(width) // %2 : - : "cc", "memory", "q0", "q8", "q9", "q10", "q11"); + : "cc", "memory", "q0", "q1", "q2", "q3"); } void ARGBToARGB4444Row_NEON(const uint8_t* src_argb, uint8_t* dst_argb4444, int width) { asm volatile( - "vmov.u8 d4, #0x0f \n" // bits to clear with + "vmov.u8 d7, #0x0f \n" // bits to clear with // vbic. "1: \n" - "vld4.8 {d20, d21, d22, d23}, [%0]! \n" // load 8 pixels of ARGB. + "vld4.8 {d0, d2, d4, d6}, [%0]! \n" // load 8 pixels of ARGB. "subs %2, %2, #8 \n" // 8 processed per loop. ARGBTOARGB4444 "vst1.8 {q0}, [%1]! \n" // store 8 ARGB4444. @@ -1260,30 +1818,7 @@ void ARGBToARGB4444Row_NEON(const uint8_t* src_argb, "+r"(dst_argb4444), // %1 "+r"(width) // %2 : - : "cc", "memory", "q0", "q8", "q9", "q10", "q11"); -} - -void ARGBToYRow_NEON(const uint8_t* src_argb, uint8_t* dst_y, int width) { - asm volatile( - "vmov.u8 d24, #25 \n" // B * 0.1016 coefficient - "vmov.u8 d25, #129 \n" // G * 0.5078 coefficient - "vmov.u8 d26, #66 \n" // R * 0.2578 coefficient - "vmov.u8 d27, #16 \n" // Add 16 constant - "1: \n" - "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // load 8 ARGB pixels. - "subs %2, %2, #8 \n" // 8 processed per loop. - "vmull.u8 q2, d0, d24 \n" // B - "vmlal.u8 q2, d1, d25 \n" // G - "vmlal.u8 q2, d2, d26 \n" // R - "vqrshrn.u16 d0, q2, #8 \n" // 16 bit to 8 bit Y - "vqadd.u8 d0, d27 \n" - "vst1.8 {d0}, [%1]! \n" // store 8 pixels Y. - "bgt 1b \n" - : "+r"(src_argb), // %0 - "+r"(dst_y), // %1 - "+r"(width) // %2 - : - : "cc", "memory", "q0", "q1", "q2", "q12", "q13"); + : "cc", "memory", "q0", "q1", "q2", "q3"); } void ARGBExtractAlphaRow_NEON(const uint8_t* src_argb, @@ -1304,48 +1839,6 @@ void ARGBExtractAlphaRow_NEON(const uint8_t* src_argb, ); } -void ARGBToYJRow_NEON(const uint8_t* src_argb, uint8_t* dst_y, int width) { - asm volatile( - "vmov.u8 d24, #29 \n" // B * 0.1140 coefficient - "vmov.u8 d25, #150 \n" // G * 0.5870 coefficient - "vmov.u8 d26, #77 \n" // R * 0.2990 coefficient - "1: \n" - "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // load 8 ARGB pixels. - "subs %2, %2, #8 \n" // 8 processed per loop. - "vmull.u8 q2, d0, d24 \n" // B - "vmlal.u8 q2, d1, d25 \n" // G - "vmlal.u8 q2, d2, d26 \n" // R - "vqrshrn.u16 d0, q2, #8 \n" // 16 bit to 8 bit Y - "vst1.8 {d0}, [%1]! \n" // store 8 pixels Y. - "bgt 1b \n" - : "+r"(src_argb), // %0 - "+r"(dst_y), // %1 - "+r"(width) // %2 - : - : "cc", "memory", "q0", "q1", "q2", "q12", "q13"); -} - -void RGBAToYJRow_NEON(const uint8_t* src_argb, uint8_t* dst_y, int width) { - asm volatile( - "vmov.u8 d24, #29 \n" // B * 0.1140 coefficient - "vmov.u8 d25, #150 \n" // G * 0.5870 coefficient - "vmov.u8 d26, #77 \n" // R * 0.2990 coefficient - "1: \n" - "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // load 8 RGBA pixels. - "subs %2, %2, #8 \n" // 8 processed per loop. - "vmull.u8 q2, d1, d24 \n" // B - "vmlal.u8 q2, d2, d25 \n" // G - "vmlal.u8 q2, d3, d26 \n" // R - "vqrshrn.u16 d0, q2, #8 \n" // 16 bit to 8 bit Y - "vst1.8 {d0}, [%1]! \n" // store 8 pixels Y. - "bgt 1b \n" - : "+r"(src_argb), // %0 - "+r"(dst_y), // %1 - "+r"(width) // %2 - : - : "cc", "memory", "q0", "q1", "q2", "q12", "q13"); -} - // 8x1 pixels. void ARGBToUV444Row_NEON(const uint8_t* src_argb, uint8_t* dst_u, @@ -1365,65 +1858,198 @@ void ARGBToUV444Row_NEON(const uint8_t* src_argb, "vmull.u8 q2, d0, d24 \n" // B "vmlsl.u8 q2, d1, d25 \n" // G "vmlsl.u8 q2, d2, d26 \n" // R - "vadd.u16 q2, q2, q15 \n" // +128 -> unsigned "vmull.u8 q3, d2, d24 \n" // R "vmlsl.u8 q3, d1, d28 \n" // G "vmlsl.u8 q3, d0, d27 \n" // B - "vadd.u16 q3, q3, q15 \n" // +128 -> unsigned - "vqshrn.u16 d0, q2, #8 \n" // 16 bit to 8 bit U - "vqshrn.u16 d1, q3, #8 \n" // 16 bit to 8 bit V + "vaddhn.u16 d0, q2, q15 \n" // +128 -> unsigned + "vaddhn.u16 d1, q3, q15 \n" // +128 -> unsigned + + "vst1.8 {d0}, [%1]! \n" // store 8 pixels U. + "vst1.8 {d1}, [%2]! \n" // store 8 pixels V. + "bgt 1b \n" + : "+r"(src_argb), // %0 + "+r"(dst_u), // %1 + "+r"(dst_v), // %2 + "+r"(width) // %3 + : + : "cc", "memory", "q0", "q1", "q2", "q3", "q4", "q12", "q13", "q14", + "q15"); +} + +// clang-format off +// 16x2 pixels -> 8x1. width is number of argb pixels. e.g. 16. +#define RGBTOUV(QB, QG, QR) \ + "vmul.s16 q8, " #QB ", q10 \n" /* B */ \ + "vmls.s16 q8, " #QG ", q11 \n" /* G */ \ + "vmls.s16 q8, " #QR ", q12 \n" /* R */ \ + "vmul.s16 q9, " #QR ", q10 \n" /* R */ \ + "vmls.s16 q9, " #QG ", q14 \n" /* G */ \ + "vmls.s16 q9, " #QB ", q13 \n" /* B */ \ + "vaddhn.u16 d0, q8, q15 \n" /* +128 -> unsigned */ \ + "vaddhn.u16 d1, q9, q15 \n" /* +128 -> unsigned */ +// clang-format on + +// TODO(fbarchard): Consider vhadd vertical, then vpaddl horizontal, avoid shr. +void ARGBToUVRow_NEON(const uint8_t* src_argb, + int src_stride_argb, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { + asm volatile ( + "add %1, %0, %1 \n" // src_stride + src_argb + "vmov.s16 q10, #112 / 2 \n" // UB / VR 0.875 coefficient + "vmov.s16 q11, #74 / 2 \n" // UG -0.5781 coefficient + "vmov.s16 q12, #38 / 2 \n" // UR -0.2969 coefficient + "vmov.s16 q13, #18 / 2 \n" // VB -0.1406 coefficient + "vmov.s16 q14, #94 / 2 \n" // VG -0.7344 coefficient + "vmov.u16 q15, #0x8080 \n" // 128.5 + "1: \n" + "vld4.8 {d0, d2, d4, d6}, [%0]! \n" // load 8 ARGB pixels. + "vld4.8 {d1, d3, d5, d7}, [%0]! \n" // load next 8 ARGB pixels. + "vpaddl.u8 q0, q0 \n" // B 16 bytes -> 8 shorts. + "vpaddl.u8 q1, q1 \n" // G 16 bytes -> 8 shorts. + "vpaddl.u8 q2, q2 \n" // R 16 bytes -> 8 shorts. + "vld4.8 {d8, d10, d12, d14}, [%1]! \n" // load 8 more ARGB pixels. + "vld4.8 {d9, d11, d13, d15}, [%1]! \n" // load last 8 ARGB pixels. + "vpadal.u8 q0, q4 \n" // B 16 bytes -> 8 shorts. + "vpadal.u8 q1, q5 \n" // G 16 bytes -> 8 shorts. + "vpadal.u8 q2, q6 \n" // R 16 bytes -> 8 shorts. + + "vrshr.u16 q0, q0, #1 \n" // 2x average + "vrshr.u16 q1, q1, #1 \n" + "vrshr.u16 q2, q2, #1 \n" + + "subs %4, %4, #16 \n" // 16 processed per loop. + RGBTOUV(q0, q1, q2) + "vst1.8 {d0}, [%2]! \n" // store 8 pixels U. + "vst1.8 {d1}, [%3]! \n" // store 8 pixels V. + "bgt 1b \n" + : "+r"(src_argb), // %0 + "+r"(src_stride_argb), // %1 + "+r"(dst_u), // %2 + "+r"(dst_v), // %3 + "+r"(width) // %4 + : + : "cc", "memory", "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7", + "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15" + ); +} + +// TODO(fbarchard): Subsample match Intel code. +void ARGBToUVJRow_NEON(const uint8_t* src_argb, + int src_stride_argb, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { + asm volatile ( + "add %1, %0, %1 \n" // src_stride + src_argb + "vmov.s16 q10, #127 / 2 \n" // UB / VR 0.500 coefficient + "vmov.s16 q11, #84 / 2 \n" // UG -0.33126 coefficient + "vmov.s16 q12, #43 / 2 \n" // UR -0.16874 coefficient + "vmov.s16 q13, #20 / 2 \n" // VB -0.08131 coefficient + "vmov.s16 q14, #107 / 2 \n" // VG -0.41869 coefficient + "vmov.u16 q15, #0x8080 \n" // 128.5 + "1: \n" + "vld4.8 {d0, d2, d4, d6}, [%0]! \n" // load 8 ARGB pixels. + "vld4.8 {d1, d3, d5, d7}, [%0]! \n" // load next 8 ARGB pixels. + "vpaddl.u8 q0, q0 \n" // B 16 bytes -> 8 shorts. + "vpaddl.u8 q1, q1 \n" // G 16 bytes -> 8 shorts. + "vpaddl.u8 q2, q2 \n" // R 16 bytes -> 8 shorts. + "vld4.8 {d8, d10, d12, d14}, [%1]! \n" // load 8 more ARGB pixels. + "vld4.8 {d9, d11, d13, d15}, [%1]! \n" // load last 8 ARGB pixels. + "vpadal.u8 q0, q4 \n" // B 16 bytes -> 8 shorts. + "vpadal.u8 q1, q5 \n" // G 16 bytes -> 8 shorts. + "vpadal.u8 q2, q6 \n" // R 16 bytes -> 8 shorts. + + "vrshr.u16 q0, q0, #1 \n" // 2x average + "vrshr.u16 q1, q1, #1 \n" + "vrshr.u16 q2, q2, #1 \n" + + "subs %4, %4, #16 \n" // 16 processed per loop. + RGBTOUV(q0, q1, q2) + "vst1.8 {d0}, [%2]! \n" // store 8 pixels U. + "vst1.8 {d1}, [%3]! \n" // store 8 pixels V. + "bgt 1b \n" + : "+r"(src_argb), // %0 + "+r"(src_stride_argb), // %1 + "+r"(dst_u), // %2 + "+r"(dst_v), // %3 + "+r"(width) // %4 + : + : "cc", "memory", "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7", + "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15" + ); +} + +void ABGRToUVJRow_NEON(const uint8_t* src_abgr, + int src_stride_abgr, + uint8_t* dst_uj, + uint8_t* dst_vj, + int width) { + asm volatile ( + "add %1, %0, %1 \n" // src_stride + src_argb + "vmov.s16 q10, #127 / 2 \n" // UB / VR 0.500 coefficient + "vmov.s16 q11, #84 / 2 \n" // UG -0.33126 coefficient + "vmov.s16 q12, #43 / 2 \n" // UR -0.16874 coefficient + "vmov.s16 q13, #20 / 2 \n" // VB -0.08131 coefficient + "vmov.s16 q14, #107 / 2 \n" // VG -0.41869 coefficient + "vmov.u16 q15, #0x8080 \n" // 128.5 + "1: \n" + "vld4.8 {d0, d2, d4, d6}, [%0]! \n" // load 8 ABGR pixels. + "vld4.8 {d1, d3, d5, d7}, [%0]! \n" // load next 8 ABGR pixels. + "vpaddl.u8 q0, q0 \n" // R 16 bytes -> 8 shorts. + "vpaddl.u8 q1, q1 \n" // G 16 bytes -> 8 shorts. + "vpaddl.u8 q2, q2 \n" // B 16 bytes -> 8 shorts. + "vld4.8 {d8, d10, d12, d14}, [%1]! \n" // load 8 more ABGR pixels. + "vld4.8 {d9, d11, d13, d15}, [%1]! \n" // load last 8 ABGR pixels. + "vpadal.u8 q0, q4 \n" // R 16 bytes -> 8 shorts. + "vpadal.u8 q1, q5 \n" // G 16 bytes -> 8 shorts. + "vpadal.u8 q2, q6 \n" // B 16 bytes -> 8 shorts. + + "vrshr.u16 q0, q0, #1 \n" // 2x average + "vrshr.u16 q1, q1, #1 \n" + "vrshr.u16 q2, q2, #1 \n" - "vst1.8 {d0}, [%1]! \n" // store 8 pixels U. - "vst1.8 {d1}, [%2]! \n" // store 8 pixels V. + "subs %4, %4, #16 \n" // 16 processed per loop. + RGBTOUV(q2, q1, q0) + "vst1.8 {d0}, [%2]! \n" // store 8 pixels U. + "vst1.8 {d1}, [%3]! \n" // store 8 pixels V. "bgt 1b \n" - : "+r"(src_argb), // %0 - "+r"(dst_u), // %1 - "+r"(dst_v), // %2 - "+r"(width) // %3 - : - : "cc", "memory", "q0", "q1", "q2", "q3", "q4", "q12", "q13", "q14", - "q15"); + : "+r"(src_abgr), // %0 + "+r"(src_stride_abgr), // %1 + "+r"(dst_uj), // %2 + "+r"(dst_vj), // %3 + "+r"(width) // %4 + : + : "cc", "memory", "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7", + "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15" + ); } -// clang-format off -// 16x2 pixels -> 8x1. width is number of argb pixels. e.g. 16. -#define RGBTOUV(QB, QG, QR) \ - "vmul.s16 q8, " #QB ", q10 \n" /* B */ \ - "vmls.s16 q8, " #QG ", q11 \n" /* G */ \ - "vmls.s16 q8, " #QR ", q12 \n" /* R */ \ - "vadd.u16 q8, q8, q15 \n" /* +128 -> unsigned */ \ - "vmul.s16 q9, " #QR ", q10 \n" /* R */ \ - "vmls.s16 q9, " #QG ", q14 \n" /* G */ \ - "vmls.s16 q9, " #QB ", q13 \n" /* B */ \ - "vadd.u16 q9, q9, q15 \n" /* +128 -> unsigned */ \ - "vqshrn.u16 d0, q8, #8 \n" /* 16 bit to 8 bit U */ \ - "vqshrn.u16 d1, q9, #8 \n" /* 16 bit to 8 bit V */ -// clang-format on - -// TODO(fbarchard): Consider vhadd vertical, then vpaddl horizontal, avoid shr. -void ARGBToUVRow_NEON(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_u, - uint8_t* dst_v, - int width) { +// TODO(fbarchard): Subsample match C code. +void RGB24ToUVJRow_NEON(const uint8_t* src_rgb24, + int src_stride_rgb24, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { asm volatile ( - "add %1, %0, %1 \n" // src_stride + src_argb - "vmov.s16 q10, #112 / 2 \n" // UB / VR 0.875 coefficient - "vmov.s16 q11, #74 / 2 \n" // UG -0.5781 coefficient - "vmov.s16 q12, #38 / 2 \n" // UR -0.2969 coefficient - "vmov.s16 q13, #18 / 2 \n" // VB -0.1406 coefficient - "vmov.s16 q14, #94 / 2 \n" // VG -0.7344 coefficient + "add %1, %0, %1 \n" // src_stride + src_rgb24 + "vmov.s16 q10, #127 / 2 \n" // UB / VR 0.500 coefficient + "vmov.s16 q11, #84 / 2 \n" // UG -0.33126 coefficient + "vmov.s16 q12, #43 / 2 \n" // UR -0.16874 coefficient + "vmov.s16 q13, #20 / 2 \n" // VB -0.08131 coefficient + "vmov.s16 q14, #107 / 2 \n" // VG -0.41869 coefficient "vmov.u16 q15, #0x8080 \n" // 128.5 "1: \n" - "vld4.8 {d0, d2, d4, d6}, [%0]! \n" // load 8 ARGB pixels. - "vld4.8 {d1, d3, d5, d7}, [%0]! \n" // load next 8 ARGB pixels. + "vld3.8 {d0, d2, d4}, [%0]! \n" // load 8 RGB24 pixels. + "vld3.8 {d1, d3, d5}, [%0]! \n" // load next 8 RGB24 pixels. "vpaddl.u8 q0, q0 \n" // B 16 bytes -> 8 shorts. "vpaddl.u8 q1, q1 \n" // G 16 bytes -> 8 shorts. "vpaddl.u8 q2, q2 \n" // R 16 bytes -> 8 shorts. - "vld4.8 {d8, d10, d12, d14}, [%1]! \n" // load 8 more ARGB pixels. - "vld4.8 {d9, d11, d13, d15}, [%1]! \n" // load last 8 ARGB pixels. + "vld3.8 {d8, d10, d12}, [%1]! \n" // load 8 more RGB24 pixels. + "vld3.8 {d9, d11, d13}, [%1]! \n" // load last 8 RGB24 pixels. "vpadal.u8 q0, q4 \n" // B 16 bytes -> 8 shorts. "vpadal.u8 q1, q5 \n" // G 16 bytes -> 8 shorts. "vpadal.u8 q2, q6 \n" // R 16 bytes -> 8 shorts. @@ -1437,8 +2063,8 @@ void ARGBToUVRow_NEON(const uint8_t* src_argb, "vst1.8 {d0}, [%2]! \n" // store 8 pixels U. "vst1.8 {d1}, [%3]! \n" // store 8 pixels V. "bgt 1b \n" - : "+r"(src_argb), // %0 - "+r"(src_stride_argb), // %1 + : "+r"(src_rgb24), // %0 + "+r"(src_stride_rgb24), // %1 "+r"(dst_u), // %2 "+r"(dst_v), // %3 "+r"(width) // %4 @@ -1449,13 +2075,13 @@ void ARGBToUVRow_NEON(const uint8_t* src_argb, } // TODO(fbarchard): Subsample match C code. -void ARGBToUVJRow_NEON(const uint8_t* src_argb, - int src_stride_argb, - uint8_t* dst_u, - uint8_t* dst_v, - int width) { +void RAWToUVJRow_NEON(const uint8_t* src_raw, + int src_stride_raw, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { asm volatile ( - "add %1, %0, %1 \n" // src_stride + src_argb + "add %1, %0, %1 \n" // src_stride + src_raw "vmov.s16 q10, #127 / 2 \n" // UB / VR 0.500 coefficient "vmov.s16 q11, #84 / 2 \n" // UG -0.33126 coefficient "vmov.s16 q12, #43 / 2 \n" // UR -0.16874 coefficient @@ -1463,13 +2089,13 @@ void ARGBToUVJRow_NEON(const uint8_t* src_argb, "vmov.s16 q14, #107 / 2 \n" // VG -0.41869 coefficient "vmov.u16 q15, #0x8080 \n" // 128.5 "1: \n" - "vld4.8 {d0, d2, d4, d6}, [%0]! \n" // load 8 ARGB pixels. - "vld4.8 {d1, d3, d5, d7}, [%0]! \n" // load next 8 ARGB pixels. + "vld3.8 {d0, d2, d4}, [%0]! \n" // load 8 RAW pixels. + "vld3.8 {d1, d3, d5}, [%0]! \n" // load next 8 RAW pixels. "vpaddl.u8 q0, q0 \n" // B 16 bytes -> 8 shorts. "vpaddl.u8 q1, q1 \n" // G 16 bytes -> 8 shorts. "vpaddl.u8 q2, q2 \n" // R 16 bytes -> 8 shorts. - "vld4.8 {d8, d10, d12, d14}, [%1]! \n" // load 8 more ARGB pixels. - "vld4.8 {d9, d11, d13, d15}, [%1]! \n" // load last 8 ARGB pixels. + "vld3.8 {d8, d10, d12}, [%1]! \n" // load 8 more RAW pixels. + "vld3.8 {d9, d11, d13}, [%1]! \n" // load last 8 RAW pixels. "vpadal.u8 q0, q4 \n" // B 16 bytes -> 8 shorts. "vpadal.u8 q1, q5 \n" // G 16 bytes -> 8 shorts. "vpadal.u8 q2, q6 \n" // R 16 bytes -> 8 shorts. @@ -1479,12 +2105,12 @@ void ARGBToUVJRow_NEON(const uint8_t* src_argb, "vrshr.u16 q2, q2, #1 \n" "subs %4, %4, #16 \n" // 16 processed per loop. - RGBTOUV(q0, q1, q2) + RGBTOUV(q2, q1, q0) "vst1.8 {d0}, [%2]! \n" // store 8 pixels U. "vst1.8 {d1}, [%3]! \n" // store 8 pixels V. "bgt 1b \n" - : "+r"(src_argb), // %0 - "+r"(src_stride_argb), // %1 + : "+r"(src_raw), // %0 + "+r"(src_stride_raw), // %1 "+r"(dst_u), // %2 "+r"(dst_v), // %3 "+r"(width) // %4 @@ -1984,161 +2610,280 @@ void ARGB4444ToYRow_NEON(const uint8_t* src_argb4444, : "cc", "memory", "q0", "q1", "q2", "q3", "q12", "q13"); } -void BGRAToYRow_NEON(const uint8_t* src_bgra, uint8_t* dst_y, int width) { +void ARGBToAR64Row_NEON(const uint8_t* src_argb, + uint16_t* dst_ar64, + int width) { asm volatile( - "vmov.u8 d6, #25 \n" // B * 0.1016 coefficient - "vmov.u8 d5, #129 \n" // G * 0.5078 coefficient - "vmov.u8 d4, #66 \n" // R * 0.2578 coefficient - "vmov.u8 d7, #16 \n" // Add 16 constant "1: \n" - "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // load 8 pixels of BGRA. + "vld1.8 {q0}, [%0]! \n" + "vld1.8 {q2}, [%0]! \n" + "vmov.u8 q1, q0 \n" + "vmov.u8 q3, q2 \n" "subs %2, %2, #8 \n" // 8 processed per loop. - "vmull.u8 q8, d1, d4 \n" // R - "vmlal.u8 q8, d2, d5 \n" // G - "vmlal.u8 q8, d3, d6 \n" // B - "vqrshrn.u16 d0, q8, #8 \n" // 16 bit to 8 bit Y - "vqadd.u8 d0, d7 \n" - "vst1.8 {d0}, [%1]! \n" // store 8 pixels Y. + "vst2.8 {q0, q1}, [%1]! \n" // store 4 pixels + "vst2.8 {q2, q3}, [%1]! \n" // store 4 pixels "bgt 1b \n" - : "+r"(src_bgra), // %0 - "+r"(dst_y), // %1 + : "+r"(src_argb), // %0 + "+r"(dst_ar64), // %1 "+r"(width) // %2 : - : "cc", "memory", "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7", "q8"); + : "cc", "memory", "q0", "q1", "q2", "q3"); } -void ABGRToYRow_NEON(const uint8_t* src_abgr, uint8_t* dst_y, int width) { +static const uvec8 kShuffleARGBToABGR = {2, 1, 0, 3, 6, 5, 4, 7, + 10, 9, 8, 11, 14, 13, 12, 15}; + +void ARGBToAB64Row_NEON(const uint8_t* src_argb, + uint16_t* dst_ab64, + int width) { asm volatile( - "vmov.u8 d6, #25 \n" // B * 0.1016 coefficient - "vmov.u8 d5, #129 \n" // G * 0.5078 coefficient - "vmov.u8 d4, #66 \n" // R * 0.2578 coefficient - "vmov.u8 d7, #16 \n" // Add 16 constant + "vld1.8 {q4}, [%3] \n" // shuffler + "1: \n" - "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // load 8 pixels of ABGR. + "vld1.8 {q0}, [%0]! \n" + "vld1.8 {q2}, [%0]! \n" + "vtbl.8 d2, {d0, d1}, d8 \n" + "vtbl.8 d3, {d0, d1}, d9 \n" + "vtbl.8 d6, {d4, d5}, d8 \n" + "vtbl.8 d7, {d4, d5}, d9 \n" + "vmov.u8 q0, q1 \n" + "vmov.u8 q2, q3 \n" "subs %2, %2, #8 \n" // 8 processed per loop. - "vmull.u8 q8, d0, d4 \n" // R - "vmlal.u8 q8, d1, d5 \n" // G - "vmlal.u8 q8, d2, d6 \n" // B - "vqrshrn.u16 d0, q8, #8 \n" // 16 bit to 8 bit Y - "vqadd.u8 d0, d7 \n" - "vst1.8 {d0}, [%1]! \n" // store 8 pixels Y. + "vst2.8 {q0, q1}, [%1]! \n" // store 4 pixels + "vst2.8 {q2, q3}, [%1]! \n" // store 4 pixels "bgt 1b \n" - : "+r"(src_abgr), // %0 - "+r"(dst_y), // %1 - "+r"(width) // %2 - : - : "cc", "memory", "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7", "q8"); + : "+r"(src_argb), // %0 + "+r"(dst_ab64), // %1 + "+r"(width) // %2 + : "r"(&kShuffleARGBToABGR) // %3 + : "cc", "memory", "q0", "q1", "q2", "q3", "q4"); } -void RGBAToYRow_NEON(const uint8_t* src_rgba, uint8_t* dst_y, int width) { +void AR64ToARGBRow_NEON(const uint16_t* src_ar64, + uint8_t* dst_argb, + int width) { asm volatile( - "vmov.u8 d4, #25 \n" // B * 0.1016 coefficient - "vmov.u8 d5, #129 \n" // G * 0.5078 coefficient - "vmov.u8 d6, #66 \n" // R * 0.2578 coefficient - "vmov.u8 d7, #16 \n" // Add 16 constant "1: \n" - "vld4.8 {d0, d1, d2, d3}, [%0]! \n" // load 8 pixels of RGBA. + "vld1.16 {q0}, [%0]! \n" + "vld1.16 {q1}, [%0]! \n" + "vld1.16 {q2}, [%0]! \n" + "vld1.16 {q3}, [%0]! \n" + "vshrn.u16 d0, q0, #8 \n" + "vshrn.u16 d1, q1, #8 \n" + "vshrn.u16 d4, q2, #8 \n" + "vshrn.u16 d5, q3, #8 \n" "subs %2, %2, #8 \n" // 8 processed per loop. - "vmull.u8 q8, d1, d4 \n" // B - "vmlal.u8 q8, d2, d5 \n" // G - "vmlal.u8 q8, d3, d6 \n" // R - "vqrshrn.u16 d0, q8, #8 \n" // 16 bit to 8 bit Y - "vqadd.u8 d0, d7 \n" - "vst1.8 {d0}, [%1]! \n" // store 8 pixels Y. + "vst1.8 {q0}, [%1]! \n" // store 4 pixels + "vst1.8 {q2}, [%1]! \n" // store 4 pixels "bgt 1b \n" - : "+r"(src_rgba), // %0 - "+r"(dst_y), // %1 + : "+r"(src_ar64), // %0 + "+r"(dst_argb), // %1 "+r"(width) // %2 : - : "cc", "memory", "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7", "q8"); + : "cc", "memory", "q0", "q1", "q2", "q3"); } -void RGB24ToYRow_NEON(const uint8_t* src_rgb24, uint8_t* dst_y, int width) { +static const uvec8 kShuffleAB64ToARGB = {5, 3, 1, 7, 13, 11, 9, 15}; + +void AB64ToARGBRow_NEON(const uint16_t* src_ab64, + uint8_t* dst_argb, + int width) { asm volatile( - "vmov.u8 d4, #25 \n" // B * 0.1016 coefficient - "vmov.u8 d5, #129 \n" // G * 0.5078 coefficient - "vmov.u8 d6, #66 \n" // R * 0.2578 coefficient - "vmov.u8 d7, #16 \n" // Add 16 constant + "vld1.8 {d8}, [%3] \n" // shuffler + "1: \n" - "vld3.8 {d0, d1, d2}, [%0]! \n" // load 8 pixels of RGB24. + "vld1.16 {q0}, [%0]! \n" + "vld1.16 {q1}, [%0]! \n" + "vld1.16 {q2}, [%0]! \n" + "vld1.16 {q3}, [%0]! \n" + "vtbl.8 d0, {d0, d1}, d8 \n" + "vtbl.8 d1, {d2, d3}, d8 \n" + "vtbl.8 d4, {d4, d5}, d8 \n" + "vtbl.8 d5, {d6, d7}, d8 \n" "subs %2, %2, #8 \n" // 8 processed per loop. - "vmull.u8 q8, d0, d4 \n" // B - "vmlal.u8 q8, d1, d5 \n" // G - "vmlal.u8 q8, d2, d6 \n" // R - "vqrshrn.u16 d0, q8, #8 \n" // 16 bit to 8 bit Y - "vqadd.u8 d0, d7 \n" - "vst1.8 {d0}, [%1]! \n" // store 8 pixels Y. + "vst1.8 {q0}, [%1]! \n" // store 4 pixels + "vst1.8 {q2}, [%1]! \n" // store 4 pixels + "bgt 1b \n" + : "+r"(src_ab64), // %0 + "+r"(dst_argb), // %1 + "+r"(width) // %2 + : "r"(&kShuffleAB64ToARGB) // %3 + : "cc", "memory", "q0", "q1", "q2", "q3", "q4"); +} + +struct RgbConstants { + uint8_t kRGBToY[4]; + uint16_t kAddY; + uint16_t pad; +}; + +// RGB to JPeg coefficients +// B * 0.1140 coefficient = 29 +// G * 0.5870 coefficient = 150 +// R * 0.2990 coefficient = 77 +// Add 0.5 = 0x80 +static const struct RgbConstants kRgb24JPEGConstants = {{29, 150, 77, 0}, + 128, + 0}; + +static const struct RgbConstants kRawJPEGConstants = {{77, 150, 29, 0}, 128, 0}; + +// RGB to BT.601 coefficients +// B * 0.1016 coefficient = 25 +// G * 0.5078 coefficient = 129 +// R * 0.2578 coefficient = 66 +// Add 16.5 = 0x1080 + +static const struct RgbConstants kRgb24I601Constants = {{25, 129, 66, 0}, + 0x1080, + 0}; + +static const struct RgbConstants kRawI601Constants = {{66, 129, 25, 0}, + 0x1080, + 0}; + +// ARGB expects first 3 values to contain RGB and 4th value is ignored. +void ARGBToYMatrixRow_NEON(const uint8_t* src_argb, + uint8_t* dst_y, + int width, + const struct RgbConstants* rgbconstants) { + asm volatile( + "vld1.8 {d0}, [%3] \n" // load rgbconstants + "vdup.u8 d20, d0[0] \n" + "vdup.u8 d21, d0[1] \n" + "vdup.u8 d22, d0[2] \n" + "vdup.u16 q12, d0[2] \n" + "1: \n" + "vld4.8 {d0, d2, d4, d6}, [%0]! \n" // load 16 pixels of ARGB + "vld4.8 {d1, d3, d5, d7}, [%0]! \n" + "subs %2, %2, #16 \n" // 16 processed per loop. + "vmull.u8 q8, d0, d20 \n" // B + "vmull.u8 q9, d1, d20 \n" + "vmlal.u8 q8, d2, d21 \n" // G + "vmlal.u8 q9, d3, d21 \n" + "vmlal.u8 q8, d4, d22 \n" // R + "vmlal.u8 q9, d5, d22 \n" + "vaddhn.u16 d0, q8, q12 \n" // 16 bit to 8 bit Y + "vaddhn.u16 d1, q9, q12 \n" + "vst1.8 {d0, d1}, [%1]! \n" // store 16 pixels Y. "bgt 1b \n" - : "+r"(src_rgb24), // %0 - "+r"(dst_y), // %1 - "+r"(width) // %2 - : - : "cc", "memory", "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7", "q8"); + : "+r"(src_argb), // %0 + "+r"(dst_y), // %1 + "+r"(width) // %2 + : "r"(rgbconstants) // %3 + : "cc", "memory", "q0", "q1", "q2", "q3", "q8", "q9", "d20", "d21", "d22", + "q12"); } -void RAWToYRow_NEON(const uint8_t* src_raw, uint8_t* dst_y, int width) { +void ARGBToYRow_NEON(const uint8_t* src_argb, uint8_t* dst_y, int width) { + ARGBToYMatrixRow_NEON(src_argb, dst_y, width, &kRgb24I601Constants); +} + +void ARGBToYJRow_NEON(const uint8_t* src_argb, uint8_t* dst_yj, int width) { + ARGBToYMatrixRow_NEON(src_argb, dst_yj, width, &kRgb24JPEGConstants); +} + +void ABGRToYRow_NEON(const uint8_t* src_abgr, uint8_t* dst_y, int width) { + ARGBToYMatrixRow_NEON(src_abgr, dst_y, width, &kRawI601Constants); +} + +void ABGRToYJRow_NEON(const uint8_t* src_abgr, uint8_t* dst_yj, int width) { + ARGBToYMatrixRow_NEON(src_abgr, dst_yj, width, &kRawJPEGConstants); +} + +// RGBA expects first value to be A and ignored, then 3 values to contain RGB. +// Same code as ARGB, except the LD4 +void RGBAToYMatrixRow_NEON(const uint8_t* src_rgba, + uint8_t* dst_y, + int width, + const struct RgbConstants* rgbconstants) { asm volatile( - "vmov.u8 d6, #25 \n" // B * 0.1016 coefficient - "vmov.u8 d5, #129 \n" // G * 0.5078 coefficient - "vmov.u8 d4, #66 \n" // R * 0.2578 coefficient - "vmov.u8 d7, #16 \n" // Add 16 constant + "vld1.8 {d0}, [%3] \n" // load rgbconstants + "vdup.u8 d20, d0[0] \n" + "vdup.u8 d21, d0[1] \n" + "vdup.u8 d22, d0[2] \n" + "vdup.u16 q12, d0[2] \n" "1: \n" - "vld3.8 {d0, d1, d2}, [%0]! \n" // load 8 pixels of RAW. - "subs %2, %2, #8 \n" // 8 processed per loop. - "vmull.u8 q8, d0, d4 \n" // B - "vmlal.u8 q8, d1, d5 \n" // G - "vmlal.u8 q8, d2, d6 \n" // R - "vqrshrn.u16 d0, q8, #8 \n" // 16 bit to 8 bit Y - "vqadd.u8 d0, d7 \n" - "vst1.8 {d0}, [%1]! \n" // store 8 pixels Y. - "bgt 1b \n" - : "+r"(src_raw), // %0 - "+r"(dst_y), // %1 - "+r"(width) // %2 - : - : "cc", "memory", "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7", "q8"); + "vld4.8 {d0, d2, d4, d6}, [%0]! \n" // load 16 pixels of RGBA + "vld4.8 {d1, d3, d5, d7}, [%0]! \n" + "subs %2, %2, #16 \n" // 16 processed per loop. + "vmull.u8 q8, d2, d20 \n" // B + "vmull.u8 q9, d3, d20 \n" + "vmlal.u8 q8, d4, d21 \n" // G + "vmlal.u8 q9, d5, d21 \n" + "vmlal.u8 q8, d6, d22 \n" // R + "vmlal.u8 q9, d7, d22 \n" + "vaddhn.u16 d0, q8, q12 \n" // 16 bit to 8 bit Y + "vaddhn.u16 d1, q9, q12 \n" + "vst1.8 {d0, d1}, [%1]! \n" // store 16 pixels Y. + "bgt 1b \n" + : "+r"(src_rgba), // %0 + "+r"(dst_y), // %1 + "+r"(width) // %2 + : "r"(rgbconstants) // %3 + : "cc", "memory", "q0", "q1", "q2", "q3", "q8", "q9", "d20", "d21", "d22", + "q12"); } -void RGB24ToYJRow_NEON(const uint8_t* src_rgb24, uint8_t* dst_yj, int width) { +void RGBAToYRow_NEON(const uint8_t* src_rgba, uint8_t* dst_y, int width) { + RGBAToYMatrixRow_NEON(src_rgba, dst_y, width, &kRgb24I601Constants); +} + +void RGBAToYJRow_NEON(const uint8_t* src_rgba, uint8_t* dst_yj, int width) { + RGBAToYMatrixRow_NEON(src_rgba, dst_yj, width, &kRgb24JPEGConstants); +} + +void BGRAToYRow_NEON(const uint8_t* src_bgra, uint8_t* dst_y, int width) { + RGBAToYMatrixRow_NEON(src_bgra, dst_y, width, &kRawI601Constants); +} + +void RGBToYMatrixRow_NEON(const uint8_t* src_rgb, + uint8_t* dst_y, + int width, + const struct RgbConstants* rgbconstants) { asm volatile( - "vmov.u8 d4, #29 \n" // B * 0.1140 coefficient - "vmov.u8 d5, #150 \n" // G * 0.5870 coefficient - "vmov.u8 d6, #77 \n" // R * 0.2990 coefficient + "vld1.8 {d0}, [%3] \n" // load rgbconstants + "vdup.u8 d20, d0[0] \n" + "vdup.u8 d21, d0[1] \n" + "vdup.u8 d22, d0[2] \n" + "vdup.u16 q12, d0[2] \n" "1: \n" - "vld3.8 {d0, d1, d2}, [%0]! \n" // load 8 pixels of RGB24. - "subs %2, %2, #8 \n" // 8 processed per loop. - "vmull.u8 q4, d0, d4 \n" // B - "vmlal.u8 q4, d1, d5 \n" // G - "vmlal.u8 q4, d2, d6 \n" // R - "vqrshrn.u16 d0, q4, #8 \n" // 16 bit to 8 bit Y - "vst1.8 {d0}, [%1]! \n" // store 8 pixels Y. - "bgt 1b \n" - : "+r"(src_rgb24), // %0 - "+r"(dst_yj), // %1 - "+r"(width) // %2 - : - : "cc", "memory", "d0", "d1", "d2", "d3", "d4", "d5", "d6", "q4"); + "vld3.8 {d2, d4, d6}, [%0]! \n" // load 16 pixels of + // RGB24. + "vld3.8 {d3, d5, d7}, [%0]! \n" + "subs %2, %2, #16 \n" // 16 processed per loop. + "vmull.u8 q8, d2, d20 \n" // B + "vmull.u8 q9, d3, d20 \n" + "vmlal.u8 q8, d4, d21 \n" // G + "vmlal.u8 q9, d5, d21 \n" + "vmlal.u8 q8, d6, d22 \n" // R + "vmlal.u8 q9, d7, d22 \n" + "vaddhn.u16 d0, q8, q12 \n" // 16 bit to 8 bit Y + "vaddhn.u16 d1, q9, q12 \n" + "vst1.8 {d0, d1}, [%1]! \n" // store 16 pixels Y. + "bgt 1b \n" + : "+r"(src_rgb), // %0 + "+r"(dst_y), // %1 + "+r"(width) // %2 + : "r"(rgbconstants) // %3 + : "cc", "memory", "q0", "q1", "q2", "q3", "q8", "q9", "d20", "d21", "d22", + "q12"); +} + +void RGB24ToYJRow_NEON(const uint8_t* src_rgb24, uint8_t* dst_yj, int width) { + RGBToYMatrixRow_NEON(src_rgb24, dst_yj, width, &kRgb24JPEGConstants); } void RAWToYJRow_NEON(const uint8_t* src_raw, uint8_t* dst_yj, int width) { - asm volatile( - "vmov.u8 d6, #29 \n" // B * 0.1140 coefficient - "vmov.u8 d5, #150 \n" // G * 0.5870 coefficient - "vmov.u8 d4, #77 \n" // R * 0.2990 coefficient - "1: \n" - "vld3.8 {d0, d1, d2}, [%0]! \n" // load 8 pixels of RAW. - "subs %2, %2, #8 \n" // 8 processed per loop. - "vmull.u8 q4, d0, d4 \n" // B - "vmlal.u8 q4, d1, d5 \n" // G - "vmlal.u8 q4, d2, d6 \n" // R - "vqrshrn.u16 d0, q4, #8 \n" // 16 bit to 8 bit Y - "vst1.8 {d0}, [%1]! \n" // store 8 pixels Y. - "bgt 1b \n" - : "+r"(src_raw), // %0 - "+r"(dst_yj), // %1 - "+r"(width) // %2 - : - : "cc", "memory", "d0", "d1", "d2", "d3", "d4", "d5", "d6", "q4"); + RGBToYMatrixRow_NEON(src_raw, dst_yj, width, &kRawJPEGConstants); +} + +void RGB24ToYRow_NEON(const uint8_t* src_rgb24, uint8_t* dst_y, int width) { + RGBToYMatrixRow_NEON(src_rgb24, dst_y, width, &kRgb24I601Constants); +} + +void RAWToYRow_NEON(const uint8_t* src_raw, uint8_t* dst_y, int width) { + RGBToYMatrixRow_NEON(src_raw, dst_y, width, &kRawI601Constants); } // Bilinear filter 16x2 -> 16x1 @@ -2200,8 +2945,68 @@ void InterpolateRow_NEON(uint8_t* dst_ptr, : "cc", "memory", "q0", "q1", "d4", "d5", "q13", "q14"); } +// Bilinear filter 8x2 -> 8x1 +void InterpolateRow_16_NEON(uint16_t* dst_ptr, + const uint16_t* src_ptr, + ptrdiff_t src_stride, + int dst_width, + int source_y_fraction) { + int y1_fraction = source_y_fraction; + int y0_fraction = 256 - y1_fraction; + const uint16_t* src_ptr1 = src_ptr + src_stride; + + asm volatile( + "cmp %4, #0 \n" + "beq 100f \n" + "cmp %4, #128 \n" + "beq 50f \n" + + "vdup.16 d17, %4 \n" + "vdup.16 d16, %5 \n" + // General purpose row blend. + "1: \n" + "vld1.16 {q0}, [%1]! \n" + "vld1.16 {q1}, [%2]! \n" + "subs %3, %3, #8 \n" + "vmull.u16 q2, d0, d16 \n" + "vmull.u16 q3, d1, d16 \n" + "vmlal.u16 q2, d2, d17 \n" + "vmlal.u16 q3, d3, d17 \n" + "vrshrn.u32 d0, q2, #8 \n" + "vrshrn.u32 d1, q3, #8 \n" + "vst1.16 {q0}, [%0]! \n" + "bgt 1b \n" + "b 99f \n" + + // Blend 50 / 50. + "50: \n" + "vld1.16 {q0}, [%1]! \n" + "vld1.16 {q1}, [%2]! \n" + "subs %3, %3, #8 \n" + "vrhadd.u16 q0, q1 \n" + "vst1.16 {q0}, [%0]! \n" + "bgt 50b \n" + "b 99f \n" + + // Blend 100 / 0 - Copy row unchanged. + "100: \n" + "vld1.16 {q0}, [%1]! \n" + "subs %3, %3, #8 \n" + "vst1.16 {q0}, [%0]! \n" + "bgt 100b \n" + + "99: \n" + : "+r"(dst_ptr), // %0 + "+r"(src_ptr), // %1 + "+r"(src_ptr1), // %2 + "+r"(dst_width) // %3 + : "r"(y1_fraction), // %4 + "r"(y0_fraction) // %5 + : "cc", "memory", "q0", "q1", "q2", "q3", "q8"); +} + // dr * (256 - sa) / 256 + sr = dr - dr * sa / 256 + sr -void ARGBBlendRow_NEON(const uint8_t* src_argb0, +void ARGBBlendRow_NEON(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width) { @@ -2252,7 +3057,7 @@ void ARGBBlendRow_NEON(const uint8_t* src_argb0, "99: \n" - : "+r"(src_argb0), // %0 + : "+r"(src_argb), // %0 "+r"(src_argb1), // %1 "+r"(dst_argb), // %2 "+r"(width) // %3 @@ -2490,7 +3295,7 @@ void ARGBColorMatrixRow_NEON(const uint8_t* src_argb, } // Multiply 2 rows of ARGB pixels together, 8 pixels at a time. -void ARGBMultiplyRow_NEON(const uint8_t* src_argb0, +void ARGBMultiplyRow_NEON(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width) { @@ -2510,7 +3315,7 @@ void ARGBMultiplyRow_NEON(const uint8_t* src_argb0, "vrshrn.u16 d3, q3, #8 \n" // 16 bit to 8 bit A "vst4.8 {d0, d1, d2, d3}, [%2]! \n" // store 8 ARGB pixels. "bgt 1b \n" - : "+r"(src_argb0), // %0 + : "+r"(src_argb), // %0 "+r"(src_argb1), // %1 "+r"(dst_argb), // %2 "+r"(width) // %3 @@ -2519,7 +3324,7 @@ void ARGBMultiplyRow_NEON(const uint8_t* src_argb0, } // Add 2 rows of ARGB pixels together, 8 pixels at a time. -void ARGBAddRow_NEON(const uint8_t* src_argb0, +void ARGBAddRow_NEON(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width) { @@ -2533,7 +3338,7 @@ void ARGBAddRow_NEON(const uint8_t* src_argb0, "vqadd.u8 q1, q1, q3 \n" // add R, A "vst4.8 {d0, d1, d2, d3}, [%2]! \n" // store 8 ARGB pixels. "bgt 1b \n" - : "+r"(src_argb0), // %0 + : "+r"(src_argb), // %0 "+r"(src_argb1), // %1 "+r"(dst_argb), // %2 "+r"(width) // %3 @@ -2542,7 +3347,7 @@ void ARGBAddRow_NEON(const uint8_t* src_argb0, } // Subtract 2 rows of ARGB pixels, 8 pixels at a time. -void ARGBSubtractRow_NEON(const uint8_t* src_argb0, +void ARGBSubtractRow_NEON(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width) { @@ -2556,7 +3361,7 @@ void ARGBSubtractRow_NEON(const uint8_t* src_argb0, "vqsub.u8 q1, q1, q3 \n" // subtract R, A "vst4.8 {d0, d1, d2, d3}, [%2]! \n" // store 8 ARGB pixels. "bgt 1b \n" - : "+r"(src_argb0), // %0 + : "+r"(src_argb), // %0 "+r"(src_argb1), // %1 "+r"(dst_argb), // %2 "+r"(width) // %3 @@ -3031,6 +3836,131 @@ void HalfMergeUVRow_NEON(const uint8_t* src_u, : "cc", "memory", "q0", "q1", "q2", "q3"); } +void SplitUVRow_16_NEON(const uint16_t* src_uv, + uint16_t* dst_u, + uint16_t* dst_v, + int depth, + int width) { + int shift = depth - 16; // Negative for right shift. + asm volatile( + "vdup.16 q2, %4 \n" + "1: \n" + "vld2.16 {q0, q1}, [%0]! \n" // load 8 UV + "vshl.u16 q0, q0, q2 \n" + "vshl.u16 q1, q1, q2 \n" + "subs %3, %3, #8 \n" // 8 src pixels per loop + "vst1.16 {q0}, [%1]! \n" // store 8 U pixels + "vst1.16 {q1}, [%2]! \n" // store 8 V pixels + "bgt 1b \n" + : "+r"(src_uv), // %0 + "+r"(dst_u), // %1 + "+r"(dst_v), // %2 + "+r"(width) // %3 + : "r"(shift) // %4 + : "cc", "memory", "q0", "q1", "q2", "q3", "q4"); +} + +void MergeUVRow_16_NEON(const uint16_t* src_u, + const uint16_t* src_v, + uint16_t* dst_uv, + int depth, + int width) { + int shift = 16 - depth; + asm volatile( + "vdup.16 q2, %4 \n" + "1: \n" + "vld1.16 {q0}, [%0]! \n" // load 8 U + "vld1.16 {q1}, [%1]! \n" // load 8 V + "vshl.u16 q0, q0, q2 \n" + "vshl.u16 q1, q1, q2 \n" + "subs %3, %3, #8 \n" // 8 src pixels per loop + "vst2.16 {q0, q1}, [%2]! \n" // store 8 UV pixels + "bgt 1b \n" + : "+r"(src_u), // %0 + "+r"(src_v), // %1 + "+r"(dst_uv), // %2 + "+r"(width) // %3 + : "r"(shift) // %4 + : "cc", "memory", "q0", "q1", "q2"); +} + +void MultiplyRow_16_NEON(const uint16_t* src_y, + uint16_t* dst_y, + int scale, + int width) { + asm volatile( + "vdup.16 q2, %3 \n" + "1: \n" + "vld1.16 {q0}, [%0]! \n" + "vld1.16 {q1}, [%0]! \n" + "vmul.u16 q0, q0, q2 \n" + "vmul.u16 q1, q1, q2 \n" + "vst1.16 {q0}, [%1]! \n" + "vst1.16 {q1}, [%1]! \n" + "subs %2, %2, #16 \n" // 16 src pixels per loop + "bgt 1b \n" + : "+r"(src_y), // %0 + "+r"(dst_y), // %1 + "+r"(width) // %2 + : "r"(scale) // %3 + : "cc", "memory", "q0", "q1", "q2"); +} + +void DivideRow_16_NEON(const uint16_t* src_y, + uint16_t* dst_y, + int scale, + int width) { + asm volatile( + "vdup.16 d8, %3 \n" + "1: \n" + "vld1.16 {q2, q3}, [%0]! \n" + "vmull.u16 q0, d4, d8 \n" + "vmull.u16 q1, d5, d8 \n" + "vmull.u16 q2, d6, d8 \n" + "vmull.u16 q3, d7, d8 \n" + "vshrn.u32 d0, q0, #16 \n" + "vshrn.u32 d1, q1, #16 \n" + "vshrn.u32 d2, q2, #16 \n" + "vshrn.u32 d3, q3, #16 \n" + "vst1.16 {q0, q1}, [%1]! \n" // store 16 pixels + "subs %2, %2, #16 \n" // 16 src pixels per loop + "bgt 1b \n" + : "+r"(src_y), // %0 + "+r"(dst_y), // %1 + "+r"(width) // %2 + : "r"(scale) // %3 + : "cc", "memory", "q0", "q1", "q2", "q3", "d8"); +} + +// Use scale to convert lsb formats to msb, depending how many bits there are: +// 32768 = 9 bits = shr 1 +// 16384 = 10 bits = shr 2 +// 4096 = 12 bits = shr 4 +// 256 = 16 bits = shr 8 +void Convert16To8Row_NEON(const uint16_t* src_y, + uint8_t* dst_y, + int scale, + int width) { + int shift = 15 - __builtin_clz((int32_t)scale); // Negative shl is shr + asm volatile( + "vdup.16 q2, %3 \n" + "1: \n" + "vld1.16 {q0}, [%0]! \n" + "vld1.16 {q1}, [%0]! \n" + "vshl.u16 q0, q0, q2 \n" // shr = q2 is negative + "vshl.u16 q1, q1, q2 \n" + "vqmovn.u16 d0, q0 \n" + "vqmovn.u16 d1, q1 \n" + "subs %2, %2, #16 \n" // 16 src pixels per loop + "vst1.8 {q0}, [%1]! \n" + "bgt 1b \n" + : "+r"(src_y), // %0 + "+r"(dst_y), // %1 + "+r"(width) // %2 + : "r"(shift) // %3 + : "cc", "memory", "q0", "q1", "q2"); +} + #endif // !defined(LIBYUV_DISABLE_NEON) && defined(__ARM_NEON__).. #ifdef __cplusplus diff --git a/TMessagesProj/jni/third_party/libyuv/source/row_neon64.cc b/TMessagesProj/jni/third_party/libyuv/source/row_neon64.cc index d5258a3aef..7f04b60687 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/row_neon64.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/row_neon64.cc @@ -15,96 +15,108 @@ namespace libyuv { extern "C" { #endif +// Enable LIBYUV_USE_ST2, LIBYUV_USE_ST3, LIBYUV_USE_ST4 for CPUs that prefer +// STn over ZIP1+ST1 +// Exynos M1, M2, M3 are slow with ST2, ST3 and ST4 instructions. + // This module is for GCC Neon armv8 64 bit. #if !defined(LIBYUV_DISABLE_NEON) && defined(__aarch64__) +// v0.8h: Y +// v1.16b: 8U, 8V + // Read 8 Y, 4 U and 4 V from 422 #define READYUV422 \ - "ld1 {v0.8b}, [%0], #8 \n" \ - "ld1 {v1.s}[0], [%1], #4 \n" \ - "ld1 {v1.s}[1], [%2], #4 \n" + "ldr d0, [%[src_y]], #8 \n" \ + "ld1 {v1.s}[0], [%[src_u]], #4 \n" \ + "ld1 {v1.s}[1], [%[src_v]], #4 \n" \ + "zip1 v0.16b, v0.16b, v0.16b \n" \ + "prfm pldl1keep, [%[src_y], 448] \n" \ + "zip1 v1.16b, v1.16b, v1.16b \n" \ + "prfm pldl1keep, [%[src_u], 128] \n" \ + "prfm pldl1keep, [%[src_v], 128] \n" // Read 8 Y, 8 U and 8 V from 444 #define READYUV444 \ - "ld1 {v0.8b}, [%0], #8 \n" \ - "ld1 {v1.d}[0], [%1], #8 \n" \ - "ld1 {v1.d}[1], [%2], #8 \n" \ - "uaddlp v1.8h, v1.16b \n" \ - "rshrn v1.8b, v1.8h, #1 \n" + "ldr d0, [%[src_y]], #8 \n" \ + "ld1 {v1.d}[0], [%[src_u]], #8 \n" \ + "prfm pldl1keep, [%[src_y], 448] \n" \ + "ld1 {v1.d}[1], [%[src_v]], #8 \n" \ + "prfm pldl1keep, [%[src_u], 448] \n" \ + "zip1 v0.16b, v0.16b, v0.16b \n" \ + "prfm pldl1keep, [%[src_v], 448] \n" // Read 8 Y, and set 4 U and 4 V to 128 #define READYUV400 \ - "ld1 {v0.8b}, [%0], #8 \n" \ - "movi v1.8b , #128 \n" + "ldr d0, [%[src_y]], #8 \n" \ + "movi v1.16b, #128 \n" \ + "prfm pldl1keep, [%[src_y], 448] \n" \ + "zip1 v0.16b, v0.16b, v0.16b \n" -// Read 8 Y and 4 UV from NV12 +static const uvec8 kNV12Table = {0, 0, 2, 2, 4, 4, 6, 6, + 1, 1, 3, 3, 5, 5, 7, 7}; +static const uvec8 kNV21Table = {1, 1, 3, 3, 5, 5, 7, 7, + 0, 0, 2, 2, 4, 4, 6, 6}; + +// Read 8 Y and 4 UV from NV12 or NV21 #define READNV12 \ - "ld1 {v0.8b}, [%0], #8 \n" \ - "ld1 {v2.8b}, [%1], #8 \n" \ - "uzp1 v1.8b, v2.8b, v2.8b \n" \ - "uzp2 v3.8b, v2.8b, v2.8b \n" \ - "ins v1.s[1], v3.s[0] \n" - -// Read 8 Y and 4 VU from NV21 -#define READNV21 \ - "ld1 {v0.8b}, [%0], #8 \n" \ - "ld1 {v2.8b}, [%1], #8 \n" \ - "uzp1 v3.8b, v2.8b, v2.8b \n" \ - "uzp2 v1.8b, v2.8b, v2.8b \n" \ - "ins v1.s[1], v3.s[0] \n" + "ldr d0, [%[src_y]], #8 \n" \ + "ldr d1, [%[src_uv]], #8 \n" \ + "zip1 v0.16b, v0.16b, v0.16b \n" \ + "prfm pldl1keep, [%[src_y], 448] \n" \ + "tbl v1.16b, {v1.16b}, v2.16b \n" \ + "prfm pldl1keep, [%[src_uv], 448] \n" // Read 8 YUY2 -#define READYUY2 \ - "ld2 {v0.8b, v1.8b}, [%0], #16 \n" \ - "uzp2 v3.8b, v1.8b, v1.8b \n" \ - "uzp1 v1.8b, v1.8b, v1.8b \n" \ - "ins v1.s[1], v3.s[0] \n" +#define READYUY2 \ + "ld2 {v0.8b, v1.8b}, [%[src_yuy2]], #16 \n" \ + "zip1 v0.16b, v0.16b, v0.16b \n" \ + "prfm pldl1keep, [%[src_yuy2], 448] \n" \ + "tbl v1.16b, {v1.16b}, v2.16b \n" // Read 8 UYVY -#define READUYVY \ - "ld2 {v2.8b, v3.8b}, [%0], #16 \n" \ - "orr v0.8b, v3.8b, v3.8b \n" \ - "uzp1 v1.8b, v2.8b, v2.8b \n" \ - "uzp2 v3.8b, v2.8b, v2.8b \n" \ - "ins v1.s[1], v3.s[0] \n" - -#define YUVTORGB_SETUP \ - "ld3r {v24.8h, v25.8h, v26.8h}, [%[kUVBiasBGR]] \n" \ - "ld1r {v31.4s}, [%[kYToRgb]] \n" \ - "ld2 {v27.8h, v28.8h}, [%[kUVToRB]] \n" \ - "ld2 {v29.8h, v30.8h}, [%[kUVToG]] \n" - -// clang-format off - -#define YUVTORGB(vR, vG, vB) \ - "uxtl v0.8h, v0.8b \n" /* Extract Y */ \ - "shll v2.8h, v1.8b, #8 \n" /* Replicate UV */ \ - "ushll2 v3.4s, v0.8h, #0 \n" /* Y */ \ - "ushll v0.4s, v0.4h, #0 \n" \ - "mul v3.4s, v3.4s, v31.4s \n" \ - "mul v0.4s, v0.4s, v31.4s \n" \ - "sqshrun v0.4h, v0.4s, #16 \n" \ - "sqshrun2 v0.8h, v3.4s, #16 \n" /* Y */ \ - "uaddw v1.8h, v2.8h, v1.8b \n" /* Replicate UV */ \ - "mov v2.d[0], v1.d[1] \n" /* Extract V */ \ - "uxtl v2.8h, v2.8b \n" \ - "uxtl v1.8h, v1.8b \n" /* Extract U */ \ - "mul v3.8h, v27.8h, v1.8h \n" \ - "mul v5.8h, v29.8h, v1.8h \n" \ - "mul v6.8h, v30.8h, v2.8h \n" \ - "mul v7.8h, v28.8h, v2.8h \n" \ - "sqadd v6.8h, v6.8h, v5.8h \n" \ - "sqadd " #vB ".8h, v24.8h, v0.8h \n" /* B */ \ - "sqadd " #vG ".8h, v25.8h, v0.8h \n" /* G */ \ - "sqadd " #vR ".8h, v26.8h, v0.8h \n" /* R */ \ - "sqadd " #vB ".8h, " #vB ".8h, v3.8h \n" /* B */ \ - "sqsub " #vG ".8h, " #vG ".8h, v6.8h \n" /* G */ \ - "sqadd " #vR ".8h, " #vR ".8h, v7.8h \n" /* R */ \ - "sqshrun " #vB ".8b, " #vB ".8h, #6 \n" /* B */ \ - "sqshrun " #vG ".8b, " #vG ".8h, #6 \n" /* G */ \ - "sqshrun " #vR ".8b, " #vR ".8h, #6 \n" /* R */ - -// clang-format on +#define READUYVY \ + "ld2 {v3.8b, v4.8b}, [%[src_uyvy]], #16 \n" \ + "zip1 v0.16b, v4.16b, v4.16b \n" \ + "prfm pldl1keep, [%[src_uyvy], 448] \n" \ + "tbl v1.16b, {v3.16b}, v2.16b \n" + +// UB VR UG VG +// YG BB BG BR +#define YUVTORGB_SETUP \ + "ld4r {v28.16b, v29.16b, v30.16b, v31.16b}, [%[kUVCoeff]] \n" \ + "ld4r {v24.8h, v25.8h, v26.8h, v27.8h}, [%[kRGBCoeffBias]] \n" + +// v16.8h: B +// v17.8h: G +// v18.8h: R + +// Convert from YUV to 2.14 fixed point RGB +#define YUVTORGB \ + "umull2 v3.4s, v0.8h, v24.8h \n" \ + "umull v6.8h, v1.8b, v30.8b \n" \ + "umull v0.4s, v0.4h, v24.4h \n" \ + "umlal2 v6.8h, v1.16b, v31.16b \n" /* DG */ \ + "uqshrn v0.4h, v0.4s, #16 \n" \ + "uqshrn2 v0.8h, v3.4s, #16 \n" /* Y */ \ + "umull v4.8h, v1.8b, v28.8b \n" /* DB */ \ + "umull2 v5.8h, v1.16b, v29.16b \n" /* DR */ \ + "add v17.8h, v0.8h, v26.8h \n" /* G */ \ + "add v16.8h, v0.8h, v4.8h \n" /* B */ \ + "add v18.8h, v0.8h, v5.8h \n" /* R */ \ + "uqsub v17.8h, v17.8h, v6.8h \n" /* G */ \ + "uqsub v16.8h, v16.8h, v25.8h \n" /* B */ \ + "uqsub v18.8h, v18.8h, v27.8h \n" /* R */ + +// Convert from 2.14 fixed point RGB To 8 bit RGB +#define RGBTORGB8 \ + "uqshrn v17.8b, v17.8h, #6 \n" \ + "uqshrn v16.8b, v16.8h, #6 \n" \ + "uqshrn v18.8b, v18.8h, #6 \n" + +#define YUVTORGB_REGS \ + "v0", "v1", "v3", "v4", "v5", "v6", "v7", "v16", "v17", "v18", "v24", "v25", \ + "v26", "v27", "v28", "v29", "v30", "v31" void I444ToARGBRow_NEON(const uint8_t* src_y, const uint8_t* src_u, @@ -112,30 +124,45 @@ void I444ToARGBRow_NEON(const uint8_t* src_y, uint8_t* dst_argb, const struct YuvConstants* yuvconstants, int width) { - asm volatile ( - YUVTORGB_SETUP - "movi v23.8b, #255 \n" /* A */ - "1: \n" - READYUV444 - "prfm pldl1keep, [%0, 448] \n" - YUVTORGB(v22, v21, v20) - "prfm pldl1keep, [%1, 448] \n" - "prfm pldl1keep, [%2, 448] \n" - "subs %w4, %w4, #8 \n" - "st4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%3], #32 \n" - "b.gt 1b \n" - : "+r"(src_y), // %0 - "+r"(src_u), // %1 - "+r"(src_v), // %2 - "+r"(dst_argb), // %3 - "+r"(width) // %4 - : [kUVToRB]"r"(&yuvconstants->kUVToRB), - [kUVToG]"r"(&yuvconstants->kUVToG), - [kUVBiasBGR]"r"(&yuvconstants->kUVBiasBGR), - [kYToRgb]"r"(&yuvconstants->kYToRgb) - : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v20", - "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30" - ); + asm volatile( + YUVTORGB_SETUP + "movi v19.8b, #255 \n" /* A */ + "1: \n" READYUV444 YUVTORGB + RGBTORGB8 + "subs %w[width], %w[width], #8 \n" + "st4 {v16.8b,v17.8b,v18.8b,v19.8b}, [%[dst_argb]], #32 \n" + "b.gt 1b \n" + : [src_y] "+r"(src_y), // %[src_y] + [src_u] "+r"(src_u), // %[src_u] + [src_v] "+r"(src_v), // %[src_v] + [dst_argb] "+r"(dst_argb), // %[dst_argb] + [width] "+r"(width) // %[width] + : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff] + [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias] + : "cc", "memory", YUVTORGB_REGS, "v19"); +} + +void I444ToRGB24Row_NEON(const uint8_t* src_y, + const uint8_t* src_u, + const uint8_t* src_v, + uint8_t* dst_rgb24, + const struct YuvConstants* yuvconstants, + int width) { + asm volatile( + YUVTORGB_SETUP + "1: \n" READYUV444 YUVTORGB + RGBTORGB8 + "subs %w[width], %w[width], #8 \n" + "st3 {v16.8b,v17.8b,v18.8b}, [%[dst_rgb24]], #24 \n" + "b.gt 1b \n" + : [src_y] "+r"(src_y), // %[src_y] + [src_u] "+r"(src_u), // %[src_u] + [src_v] "+r"(src_v), // %[src_v] + [dst_rgb24] "+r"(dst_rgb24), // %[dst_rgb24] + [width] "+r"(width) // %[width] + : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff] + [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias] + : "cc", "memory", YUVTORGB_REGS); } void I422ToARGBRow_NEON(const uint8_t* src_y, @@ -144,31 +171,48 @@ void I422ToARGBRow_NEON(const uint8_t* src_y, uint8_t* dst_argb, const struct YuvConstants* yuvconstants, int width) { - asm volatile ( - YUVTORGB_SETUP - "movi v23.8b, #255 \n" /* A */ + asm volatile( + YUVTORGB_SETUP + "movi v19.8b, #255 \n" /* A */ + "1: \n" READYUV422 YUVTORGB + RGBTORGB8 + "subs %w[width], %w[width], #8 \n" + "st4 {v16.8b,v17.8b,v18.8b,v19.8b}, [%[dst_argb]], #32 \n" + "b.gt 1b \n" + : [src_y] "+r"(src_y), // %[src_y] + [src_u] "+r"(src_u), // %[src_u] + [src_v] "+r"(src_v), // %[src_v] + [dst_argb] "+r"(dst_argb), // %[dst_argb] + [width] "+r"(width) // %[width] + : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff] + [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias] + : "cc", "memory", YUVTORGB_REGS, "v19"); +} +void I444AlphaToARGBRow_NEON(const uint8_t* src_y, + const uint8_t* src_u, + const uint8_t* src_v, + const uint8_t* src_a, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { + asm volatile( + YUVTORGB_SETUP "1: \n" - READYUV422 - "prfm pldl1keep, [%0, 448] \n" - YUVTORGB(v22, v21, v20) - "prfm pldl1keep, [%1, 128] \n" - "prfm pldl1keep, [%2, 128] \n" - "subs %w4, %w4, #8 \n" - "st4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%3], #32 \n" - "b.gt 1b \n" - : "+r"(src_y), // %0 - "+r"(src_u), // %1 - "+r"(src_v), // %2 - "+r"(dst_argb), // %3 - "+r"(width) // %4 - : [kUVToRB]"r"(&yuvconstants->kUVToRB), - [kUVToG]"r"(&yuvconstants->kUVToG), - [kUVBiasBGR]"r"(&yuvconstants->kUVBiasBGR), - [kYToRgb]"r"(&yuvconstants->kYToRgb) - : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v20", - "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30" - ); + "ld1 {v19.8b}, [%[src_a]], #8 \n" READYUV444 + "prfm pldl1keep, [%[src_a], 448] \n" YUVTORGB RGBTORGB8 + "subs %w[width], %w[width], #8 \n" + "st4 {v16.8b,v17.8b,v18.8b,v19.8b}, [%[dst_argb]], #32 \n" + "b.gt 1b \n" + : [src_y] "+r"(src_y), // %[src_y] + [src_u] "+r"(src_u), // %[src_u] + [src_v] "+r"(src_v), // %[src_v] + [src_a] "+r"(src_a), // %[src_a] + [dst_argb] "+r"(dst_argb), // %[dst_argb] + [width] "+r"(width) // %[width] + : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff] + [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias] + : "cc", "memory", YUVTORGB_REGS, "v19"); } void I422AlphaToARGBRow_NEON(const uint8_t* src_y, @@ -178,32 +222,23 @@ void I422AlphaToARGBRow_NEON(const uint8_t* src_y, uint8_t* dst_argb, const struct YuvConstants* yuvconstants, int width) { - asm volatile ( - YUVTORGB_SETUP + asm volatile( + YUVTORGB_SETUP "1: \n" - READYUV422 - "prfm pldl1keep, [%0, 448] \n" - YUVTORGB(v22, v21, v20) - "ld1 {v23.8b}, [%3], #8 \n" - "prfm pldl1keep, [%1, 128] \n" - "prfm pldl1keep, [%2, 128] \n" - "prfm pldl1keep, [%3, 448] \n" - "subs %w5, %w5, #8 \n" - "st4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%4], #32 \n" - "b.gt 1b \n" - : "+r"(src_y), // %0 - "+r"(src_u), // %1 - "+r"(src_v), // %2 - "+r"(src_a), // %3 - "+r"(dst_argb), // %4 - "+r"(width) // %5 - : [kUVToRB]"r"(&yuvconstants->kUVToRB), - [kUVToG]"r"(&yuvconstants->kUVToG), - [kUVBiasBGR]"r"(&yuvconstants->kUVBiasBGR), - [kYToRgb]"r"(&yuvconstants->kYToRgb) - : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v20", - "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30" - ); + "ld1 {v19.8b}, [%[src_a]], #8 \n" READYUV422 + "prfm pldl1keep, [%[src_a], 448] \n" YUVTORGB RGBTORGB8 + "subs %w[width], %w[width], #8 \n" + "st4 {v16.8b,v17.8b,v18.8b,v19.8b}, [%[dst_argb]], #32 \n" + "b.gt 1b \n" + : [src_y] "+r"(src_y), // %[src_y] + [src_u] "+r"(src_u), // %[src_u] + [src_v] "+r"(src_v), // %[src_v] + [src_a] "+r"(src_a), // %[src_a] + [dst_argb] "+r"(dst_argb), // %[dst_argb] + [width] "+r"(width) // %[width] + : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff] + [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias] + : "cc", "memory", YUVTORGB_REGS, "v19"); } void I422ToRGBARow_NEON(const uint8_t* src_y, @@ -212,30 +247,22 @@ void I422ToRGBARow_NEON(const uint8_t* src_y, uint8_t* dst_rgba, const struct YuvConstants* yuvconstants, int width) { - asm volatile ( - YUVTORGB_SETUP - "movi v20.8b, #255 \n" /* A */ - "1: \n" - READYUV422 - "prfm pldl1keep, [%0, 448] \n" - YUVTORGB(v23, v22, v21) - "prfm pldl1keep, [%1, 128] \n" - "prfm pldl1keep, [%2, 128] \n" - "subs %w4, %w4, #8 \n" - "st4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%3], #32 \n" - "b.gt 1b \n" - : "+r"(src_y), // %0 - "+r"(src_u), // %1 - "+r"(src_v), // %2 - "+r"(dst_rgba), // %3 - "+r"(width) // %4 - : [kUVToRB]"r"(&yuvconstants->kUVToRB), - [kUVToG]"r"(&yuvconstants->kUVToG), - [kUVBiasBGR]"r"(&yuvconstants->kUVBiasBGR), - [kYToRgb]"r"(&yuvconstants->kYToRgb) - : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v20", - "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30" - ); + asm volatile( + YUVTORGB_SETUP + "movi v15.8b, #255 \n" /* A */ + "1: \n" READYUV422 YUVTORGB + RGBTORGB8 + "subs %w[width], %w[width], #8 \n" + "st4 {v15.8b,v16.8b,v17.8b,v18.8b}, [%[dst_rgba]], #32 \n" + "b.gt 1b \n" + : [src_y] "+r"(src_y), // %[src_y] + [src_u] "+r"(src_u), // %[src_u] + [src_v] "+r"(src_v), // %[src_v] + [dst_rgba] "+r"(dst_rgba), // %[dst_rgba] + [width] "+r"(width) // %[width] + : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff] + [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias] + : "cc", "memory", YUVTORGB_REGS, "v15"); } void I422ToRGB24Row_NEON(const uint8_t* src_y, @@ -244,39 +271,29 @@ void I422ToRGB24Row_NEON(const uint8_t* src_y, uint8_t* dst_rgb24, const struct YuvConstants* yuvconstants, int width) { - asm volatile ( - YUVTORGB_SETUP - "1: \n" - READYUV422 - "prfm pldl1keep, [%0, 448] \n" - YUVTORGB(v22, v21, v20) - "prfm pldl1keep, [%1, 128] \n" - "prfm pldl1keep, [%2, 128] \n" - "subs %w4, %w4, #8 \n" - "st3 {v20.8b,v21.8b,v22.8b}, [%3], #24 \n" - "b.gt 1b \n" - : "+r"(src_y), // %0 - "+r"(src_u), // %1 - "+r"(src_v), // %2 - "+r"(dst_rgb24), // %3 - "+r"(width) // %4 - : [kUVToRB]"r"(&yuvconstants->kUVToRB), - [kUVToG]"r"(&yuvconstants->kUVToG), - [kUVBiasBGR]"r"(&yuvconstants->kUVBiasBGR), - [kYToRgb]"r"(&yuvconstants->kYToRgb) - : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v20", - "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30" - ); + asm volatile( + YUVTORGB_SETUP + "1: \n" READYUV422 YUVTORGB + RGBTORGB8 + "subs %w[width], %w[width], #8 \n" + "st3 {v16.8b,v17.8b,v18.8b}, [%[dst_rgb24]], #24 \n" + "b.gt 1b \n" + : [src_y] "+r"(src_y), // %[src_y] + [src_u] "+r"(src_u), // %[src_u] + [src_v] "+r"(src_v), // %[src_v] + [dst_rgb24] "+r"(dst_rgb24), // %[dst_rgb24] + [width] "+r"(width) // %[width] + : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff] + [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias] + : "cc", "memory", YUVTORGB_REGS); } #define ARGBTORGB565 \ - "shll v0.8h, v22.8b, #8 \n" /* R */ \ - "shll v21.8h, v21.8b, #8 \n" /* G */ \ - "shll v20.8h, v20.8b, #8 \n" /* B */ \ - "sri v0.8h, v21.8h, #5 \n" /* RG */ \ - "sri v0.8h, v20.8h, #11 \n" /* RGB */ - -// clang-format off + "shll v18.8h, v18.8b, #8 \n" /* R */ \ + "shll v17.8h, v17.8b, #8 \n" /* G */ \ + "shll v16.8h, v16.8b, #8 \n" /* B */ \ + "sri v18.8h, v17.8h, #5 \n" /* RG */ \ + "sri v18.8h, v16.8h, #11 \n" /* RGB */ void I422ToRGB565Row_NEON(const uint8_t* src_y, const uint8_t* src_u, @@ -285,38 +302,29 @@ void I422ToRGB565Row_NEON(const uint8_t* src_y, const struct YuvConstants* yuvconstants, int width) { asm volatile( - YUVTORGB_SETUP - "1: \n" - READYUV422 - YUVTORGB(v22, v21, v20) - "prfm pldl1keep, [%0, 448] \n" - "subs %w4, %w4, #8 \n" - ARGBTORGB565 - "prfm pldl1keep, [%1, 128] \n" - "prfm pldl1keep, [%2, 128] \n" - "st1 {v0.8h}, [%3], #16 \n" // store 8 pixels RGB565. - "b.gt 1b \n" - : "+r"(src_y), // %0 - "+r"(src_u), // %1 - "+r"(src_v), // %2 - "+r"(dst_rgb565), // %3 - "+r"(width) // %4 - : [kUVToRB] "r"(&yuvconstants->kUVToRB), - [kUVToG] "r"(&yuvconstants->kUVToG), - [kUVBiasBGR] "r"(&yuvconstants->kUVBiasBGR), - [kYToRgb] "r"(&yuvconstants->kYToRgb) - : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v20", - "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30"); + YUVTORGB_SETUP + "1: \n" READYUV422 YUVTORGB + RGBTORGB8 "subs %w[width], %w[width], #8 \n" ARGBTORGB565 + "st1 {v18.8h}, [%[dst_rgb565]], #16 \n" // store 8 pixels RGB565. + "b.gt 1b \n" + : [src_y] "+r"(src_y), // %[src_y] + [src_u] "+r"(src_u), // %[src_u] + [src_v] "+r"(src_v), // %[src_v] + [dst_rgb565] "+r"(dst_rgb565), // %[dst_rgb565] + [width] "+r"(width) // %[width] + : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff] + [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias] + : "cc", "memory", YUVTORGB_REGS); } #define ARGBTOARGB1555 \ - "shll v0.8h, v23.8b, #8 \n" /* A */ \ - "shll v22.8h, v22.8b, #8 \n" /* R */ \ - "shll v21.8h, v21.8b, #8 \n" /* G */ \ - "shll v20.8h, v20.8b, #8 \n" /* B */ \ - "sri v0.8h, v22.8h, #1 \n" /* AR */ \ - "sri v0.8h, v21.8h, #6 \n" /* ARG */ \ - "sri v0.8h, v20.8h, #11 \n" /* ARGB */ + "shll v0.8h, v19.8b, #8 \n" /* A */ \ + "shll v18.8h, v18.8b, #8 \n" /* R */ \ + "shll v17.8h, v17.8b, #8 \n" /* G */ \ + "shll v16.8h, v16.8b, #8 \n" /* B */ \ + "sri v0.8h, v18.8h, #1 \n" /* AR */ \ + "sri v0.8h, v17.8h, #6 \n" /* ARG */ \ + "sri v0.8h, v16.8h, #11 \n" /* ARGB */ void I422ToARGB1555Row_NEON(const uint8_t* src_y, const uint8_t* src_u, @@ -325,40 +333,32 @@ void I422ToARGB1555Row_NEON(const uint8_t* src_y, const struct YuvConstants* yuvconstants, int width) { asm volatile( - YUVTORGB_SETUP - "movi v23.8b, #255 \n" - "1: \n" - READYUV422 - YUVTORGB(v22, v21, v20) - "prfm pldl1keep, [%0, 448] \n" - "subs %w4, %w4, #8 \n" - ARGBTOARGB1555 - "prfm pldl1keep, [%1, 128] \n" - "prfm pldl1keep, [%2, 128] \n" - "st1 {v0.8h}, [%3], #16 \n" // store 8 pixels RGB565. - "b.gt 1b \n" - : "+r"(src_y), // %0 - "+r"(src_u), // %1 - "+r"(src_v), // %2 - "+r"(dst_argb1555), // %3 - "+r"(width) // %4 - : [kUVToRB] "r"(&yuvconstants->kUVToRB), - [kUVToG] "r"(&yuvconstants->kUVToG), - [kUVBiasBGR] "r"(&yuvconstants->kUVBiasBGR), - [kYToRgb] "r"(&yuvconstants->kYToRgb) - : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v20", - "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30"); + YUVTORGB_SETUP + "movi v19.8b, #255 \n" + "1: \n" READYUV422 YUVTORGB + RGBTORGB8 + "subs %w[width], %w[width], #8 \n" ARGBTOARGB1555 + "st1 {v0.8h}, [%[dst_argb1555]], #16 \n" // store 8 pixels + // RGB565. + "b.gt 1b \n" + : [src_y] "+r"(src_y), // %[src_y] + [src_u] "+r"(src_u), // %[src_u] + [src_v] "+r"(src_v), // %[src_v] + [dst_argb1555] "+r"(dst_argb1555), // %[dst_argb1555] + [width] "+r"(width) // %[width] + : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff] + [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias] + : "cc", "memory", YUVTORGB_REGS, "v19"); } -// clang-format on #define ARGBTOARGB4444 \ - /* Input v20.8b<=B, v21.8b<=G, v22.8b<=R, v23.8b<=A, v4.8b<=0x0f */ \ - "ushr v20.8b, v20.8b, #4 \n" /* B */ \ - "bic v21.8b, v21.8b, v4.8b \n" /* G */ \ - "ushr v22.8b, v22.8b, #4 \n" /* R */ \ - "bic v23.8b, v23.8b, v4.8b \n" /* A */ \ - "orr v0.8b, v20.8b, v21.8b \n" /* BG */ \ - "orr v1.8b, v22.8b, v23.8b \n" /* RA */ \ + /* Input v16.8b<=B, v17.8b<=G, v18.8b<=R, v19.8b<=A, v23.8b<=0x0f */ \ + "ushr v16.8b, v16.8b, #4 \n" /* B */ \ + "bic v17.8b, v17.8b, v23.8b \n" /* G */ \ + "ushr v18.8b, v18.8b, #4 \n" /* R */ \ + "bic v19.8b, v19.8b, v23.8b \n" /* A */ \ + "orr v0.8b, v16.8b, v17.8b \n" /* BG */ \ + "orr v1.8b, v18.8b, v19.8b \n" /* RA */ \ "zip1 v0.16b, v0.16b, v1.16b \n" /* BGRA */ void I422ToARGB4444Row_NEON(const uint8_t* src_y, @@ -367,60 +367,49 @@ void I422ToARGB4444Row_NEON(const uint8_t* src_y, uint8_t* dst_argb4444, const struct YuvConstants* yuvconstants, int width) { - asm volatile ( - YUVTORGB_SETUP - "movi v4.16b, #0x0f \n" // bits to clear with vbic. - "1: \n" - READYUV422 - YUVTORGB(v22, v21, v20) - "prfm pldl1keep, [%0, 448] \n" - "subs %w4, %w4, #8 \n" - "movi v23.8b, #255 \n" - ARGBTOARGB4444 - "prfm pldl1keep, [%1, 128] \n" - "prfm pldl1keep, [%2, 128] \n" - "st1 {v0.8h}, [%3], #16 \n" // store 8 pixels ARGB4444. - "b.gt 1b \n" - : "+r"(src_y), // %0 - "+r"(src_u), // %1 - "+r"(src_v), // %2 - "+r"(dst_argb4444), // %3 - "+r"(width) // %4 - : [kUVToRB]"r"(&yuvconstants->kUVToRB), - [kUVToG]"r"(&yuvconstants->kUVToG), - [kUVBiasBGR]"r"(&yuvconstants->kUVBiasBGR), - [kYToRgb]"r"(&yuvconstants->kYToRgb) - : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v20", - "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30" - ); + asm volatile( + YUVTORGB_SETUP + "movi v23.16b, #0x0f \n" // bits to clear with + // vbic. + "1: \n" READYUV422 YUVTORGB + RGBTORGB8 + "subs %w[width], %w[width], #8 \n" + "movi v19.8b, #255 \n" ARGBTOARGB4444 + "st1 {v0.8h}, [%[dst_argb4444]], #16 \n" // store 8 + // pixels + // ARGB4444. + "b.gt 1b \n" + : [src_y] "+r"(src_y), // %[src_y] + [src_u] "+r"(src_u), // %[src_u] + [src_v] "+r"(src_v), // %[src_v] + [dst_argb4444] "+r"(dst_argb4444), // %[dst_argb4444] + [width] "+r"(width) // %[width] + : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff] + [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias] + : "cc", "memory", YUVTORGB_REGS, "v19", "v23"); } void I400ToARGBRow_NEON(const uint8_t* src_y, uint8_t* dst_argb, const struct YuvConstants* yuvconstants, int width) { - asm volatile ( - YUVTORGB_SETUP - "movi v23.8b, #255 \n" - "1: \n" - READYUV400 - YUVTORGB(v22, v21, v20) - "prfm pldl1keep, [%0, 448] \n" - "subs %w2, %w2, #8 \n" - "st4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%1], #32 \n" + asm volatile( + YUVTORGB_SETUP + "movi v19.8b, #255 \n" + "1: \n" READYUV400 YUVTORGB + RGBTORGB8 + "subs %w[width], %w[width], #8 \n" + "st4 {v16.8b,v17.8b,v18.8b,v19.8b}, [%[dst_argb]], #32 \n" "b.gt 1b \n" - : "+r"(src_y), // %0 - "+r"(dst_argb), // %1 - "+r"(width) // %2 - : [kUVToRB]"r"(&yuvconstants->kUVToRB), - [kUVToG]"r"(&yuvconstants->kUVToG), - [kUVBiasBGR]"r"(&yuvconstants->kUVBiasBGR), - [kYToRgb]"r"(&yuvconstants->kYToRgb) - : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v20", - "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30" - ); + : [src_y] "+r"(src_y), // %[src_y] + [dst_argb] "+r"(dst_argb), // %[dst_argb] + [width] "+r"(width) // %[width] + : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff] + [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias) // %[kRGBCoeffBias] + : "cc", "memory", YUVTORGB_REGS, "v19"); } +#if LIBYUV_USE_ST4 void J400ToARGBRow_NEON(const uint8_t* src_y, uint8_t* dst_argb, int width) { asm volatile( "movi v23.8b, #255 \n" @@ -438,34 +427,49 @@ void J400ToARGBRow_NEON(const uint8_t* src_y, uint8_t* dst_argb, int width) { : : "cc", "memory", "v20", "v21", "v22", "v23"); } +#else +void J400ToARGBRow_NEON(const uint8_t* src_y, uint8_t* dst_argb, int width) { + asm volatile( + "movi v20.8b, #255 \n" + "1: \n" + "ldr d16, [%0], #8 \n" + "subs %w2, %w2, #8 \n" + "zip1 v18.16b, v16.16b, v16.16b \n" // YY + "zip1 v19.16b, v16.16b, v20.16b \n" // YA + "prfm pldl1keep, [%0, 448] \n" + "zip1 v16.16b, v18.16b, v19.16b \n" // YYYA + "zip2 v17.16b, v18.16b, v19.16b \n" + "stp q16, q17, [%1], #32 \n" + "b.gt 1b \n" + : "+r"(src_y), // %0 + "+r"(dst_argb), // %1 + "+r"(width) // %2 + : + : "cc", "memory", "v16", "v17", "v18", "v19", "v20"); +} +#endif // LIBYUV_USE_ST4 void NV12ToARGBRow_NEON(const uint8_t* src_y, const uint8_t* src_uv, uint8_t* dst_argb, const struct YuvConstants* yuvconstants, int width) { - asm volatile ( - YUVTORGB_SETUP - "movi v23.8b, #255 \n" - "1: \n" - READNV12 - "prfm pldl1keep, [%0, 448] \n" - YUVTORGB(v22, v21, v20) - "prfm pldl1keep, [%1, 256] \n" - "subs %w3, %w3, #8 \n" - "st4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%2], #32 \n" - "b.gt 1b \n" - : "+r"(src_y), // %0 - "+r"(src_uv), // %1 - "+r"(dst_argb), // %2 - "+r"(width) // %3 - : [kUVToRB]"r"(&yuvconstants->kUVToRB), - [kUVToG]"r"(&yuvconstants->kUVToG), - [kUVBiasBGR]"r"(&yuvconstants->kUVBiasBGR), - [kYToRgb]"r"(&yuvconstants->kYToRgb) - : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v20", - "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30" - ); + asm volatile( + YUVTORGB_SETUP + "movi v19.8b, #255 \n" + "ldr q2, [%[kNV12Table]] \n" + "1: \n" READNV12 YUVTORGB RGBTORGB8 + "subs %w[width], %w[width], #8 \n" + "st4 {v16.8b,v17.8b,v18.8b,v19.8b}, [%[dst_argb]], #32 \n" + "b.gt 1b \n" + : [src_y] "+r"(src_y), // %[src_y] + [src_uv] "+r"(src_uv), // %[src_uv] + [dst_argb] "+r"(dst_argb), // %[dst_argb] + [width] "+r"(width) // %[width] + : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff] + [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias), // %[kRGBCoeffBias] + [kNV12Table] "r"(&kNV12Table) + : "cc", "memory", YUVTORGB_REGS, "v2", "v19"); } void NV21ToARGBRow_NEON(const uint8_t* src_y, @@ -473,28 +477,22 @@ void NV21ToARGBRow_NEON(const uint8_t* src_y, uint8_t* dst_argb, const struct YuvConstants* yuvconstants, int width) { - asm volatile ( - YUVTORGB_SETUP - "movi v23.8b, #255 \n" - "1: \n" - READNV21 - "prfm pldl1keep, [%0, 448] \n" - YUVTORGB(v22, v21, v20) - "prfm pldl1keep, [%1, 256] \n" - "subs %w3, %w3, #8 \n" - "st4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%2], #32 \n" - "b.gt 1b \n" - : "+r"(src_y), // %0 - "+r"(src_vu), // %1 - "+r"(dst_argb), // %2 - "+r"(width) // %3 - : [kUVToRB]"r"(&yuvconstants->kUVToRB), - [kUVToG]"r"(&yuvconstants->kUVToG), - [kUVBiasBGR]"r"(&yuvconstants->kUVBiasBGR), - [kYToRgb]"r"(&yuvconstants->kYToRgb) - : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v20", - "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30" - ); + asm volatile( + YUVTORGB_SETUP + "movi v19.8b, #255 \n" + "ldr q2, [%[kNV12Table]] \n" + "1: \n" READNV12 YUVTORGB RGBTORGB8 + "subs %w[width], %w[width], #8 \n" + "st4 {v16.8b,v17.8b,v18.8b,v19.8b}, [%[dst_argb]], #32 \n" + "b.gt 1b \n" + : [src_y] "+r"(src_y), // %[src_y] + [src_uv] "+r"(src_vu), // %[src_uv] + [dst_argb] "+r"(dst_argb), // %[dst_argb] + [width] "+r"(width) // %[width] + : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff] + [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias), // %[kRGBCoeffBias] + [kNV12Table] "r"(&kNV21Table) + : "cc", "memory", YUVTORGB_REGS, "v2", "v19"); } void NV12ToRGB24Row_NEON(const uint8_t* src_y, @@ -502,27 +500,21 @@ void NV12ToRGB24Row_NEON(const uint8_t* src_y, uint8_t* dst_rgb24, const struct YuvConstants* yuvconstants, int width) { - asm volatile ( - YUVTORGB_SETUP - "1: \n" - READNV12 - "prfm pldl1keep, [%0, 448] \n" - YUVTORGB(v22, v21, v20) - "prfm pldl1keep, [%1, 256] \n" - "subs %w3, %w3, #8 \n" - "st3 {v20.8b,v21.8b,v22.8b}, [%2], #24 \n" - "b.gt 1b \n" - : "+r"(src_y), // %0 - "+r"(src_uv), // %1 - "+r"(dst_rgb24), // %2 - "+r"(width) // %3 - : [kUVToRB]"r"(&yuvconstants->kUVToRB), - [kUVToG]"r"(&yuvconstants->kUVToG), - [kUVBiasBGR]"r"(&yuvconstants->kUVBiasBGR), - [kYToRgb]"r"(&yuvconstants->kYToRgb) - : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v20", - "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30" - ); + asm volatile( + YUVTORGB_SETUP + "ldr q2, [%[kNV12Table]] \n" + "1: \n" READNV12 YUVTORGB RGBTORGB8 + "subs %w[width], %w[width], #8 \n" + "st3 {v16.8b,v17.8b,v18.8b}, [%[dst_rgb24]], #24 \n" + "b.gt 1b \n" + : [src_y] "+r"(src_y), // %[src_y] + [src_uv] "+r"(src_uv), // %[src_uv] + [dst_rgb24] "+r"(dst_rgb24), // %[dst_rgb24] + [width] "+r"(width) // %[width] + : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff] + [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias), // %[kRGBCoeffBias] + [kNV12Table] "r"(&kNV12Table) + : "cc", "memory", YUVTORGB_REGS, "v2"); } void NV21ToRGB24Row_NEON(const uint8_t* src_y, @@ -530,27 +522,21 @@ void NV21ToRGB24Row_NEON(const uint8_t* src_y, uint8_t* dst_rgb24, const struct YuvConstants* yuvconstants, int width) { - asm volatile ( - YUVTORGB_SETUP - "1: \n" - READNV21 - "prfm pldl1keep, [%0, 448] \n" - YUVTORGB(v22, v21, v20) - "prfm pldl1keep, [%1, 256] \n" - "subs %w3, %w3, #8 \n" - "st3 {v20.8b,v21.8b,v22.8b}, [%2], #24 \n" - "b.gt 1b \n" - : "+r"(src_y), // %0 - "+r"(src_vu), // %1 - "+r"(dst_rgb24), // %2 - "+r"(width) // %3 - : [kUVToRB]"r"(&yuvconstants->kUVToRB), - [kUVToG]"r"(&yuvconstants->kUVToG), - [kUVBiasBGR]"r"(&yuvconstants->kUVBiasBGR), - [kYToRgb]"r"(&yuvconstants->kYToRgb) - : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v20", - "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30" - ); + asm volatile( + YUVTORGB_SETUP + "ldr q2, [%[kNV12Table]] \n" + "1: \n" READNV12 YUVTORGB RGBTORGB8 + "subs %w[width], %w[width], #8 \n" + "st3 {v16.8b,v17.8b,v18.8b}, [%[dst_rgb24]], #24 \n" + "b.gt 1b \n" + : [src_y] "+r"(src_y), // %[src_y] + [src_uv] "+r"(src_vu), // %[src_uv] + [dst_rgb24] "+r"(dst_rgb24), // %[dst_rgb24] + [width] "+r"(width) // %[width] + : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff] + [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias), // %[kRGBCoeffBias] + [kNV12Table] "r"(&kNV21Table) + : "cc", "memory", YUVTORGB_REGS, "v2"); } void NV12ToRGB565Row_NEON(const uint8_t* src_y, @@ -559,173 +545,800 @@ void NV12ToRGB565Row_NEON(const uint8_t* src_y, const struct YuvConstants* yuvconstants, int width) { asm volatile( - YUVTORGB_SETUP "1: \n" READNV12 - "prfm pldl1keep, [%0, 448] \n" YUVTORGB( - v22, v21, v20) ARGBTORGB565 - "prfm pldl1keep, [%1, 256] \n" - "subs %w3, %w3, #8 \n" - "st1 {v0.8h}, [%2], 16 \n" // store 8 pixels + YUVTORGB_SETUP + "ldr q2, [%[kNV12Table]] \n" + "1: \n" READNV12 YUVTORGB RGBTORGB8 + "subs %w[width], %w[width], #8 \n" ARGBTORGB565 + "st1 {v18.8h}, [%[dst_rgb565]], #16 \n" // store 8 + // pixels + // RGB565. "b.gt 1b \n" - : "+r"(src_y), // %0 - "+r"(src_uv), // %1 - "+r"(dst_rgb565), // %2 - "+r"(width) // %3 - : [kUVToRB] "r"(&yuvconstants->kUVToRB), - [kUVToG] "r"(&yuvconstants->kUVToG), - [kUVBiasBGR] "r"(&yuvconstants->kUVBiasBGR), - [kYToRgb] "r"(&yuvconstants->kYToRgb) - : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v20", - "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30"); + : [src_y] "+r"(src_y), // %[src_y] + [src_uv] "+r"(src_uv), // %[src_uv] + [dst_rgb565] "+r"(dst_rgb565), // %[dst_rgb565] + [width] "+r"(width) // %[width] + : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff] + [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias), // %[kRGBCoeffBias] + [kNV12Table] "r"(&kNV12Table) + : "cc", "memory", YUVTORGB_REGS, "v2"); } void YUY2ToARGBRow_NEON(const uint8_t* src_yuy2, uint8_t* dst_argb, const struct YuvConstants* yuvconstants, int width) { - asm volatile ( - YUVTORGB_SETUP - "movi v23.8b, #255 \n" + asm volatile( + YUVTORGB_SETUP + "movi v19.8b, #255 \n" + "ldr q2, [%[kNV12Table]] \n" + "1: \n" READYUY2 YUVTORGB RGBTORGB8 + "subs %w[width], %w[width], #8 \n" + "st4 {v16.8b,v17.8b,v18.8b,v19.8b}, [%[dst_argb]], #32 \n" + "b.gt 1b \n" + : [src_yuy2] "+r"(src_yuy2), // %[src_yuy2] + [dst_argb] "+r"(dst_argb), // %[dst_argb] + [width] "+r"(width) // %[width] + : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff] + [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias), // %[kRGBCoeffBias] + [kNV12Table] "r"(&kNV12Table) + : "cc", "memory", YUVTORGB_REGS, "v2", "v19"); +} + +void UYVYToARGBRow_NEON(const uint8_t* src_uyvy, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { + asm volatile( + YUVTORGB_SETUP + "movi v19.8b, #255 \n" + "ldr q2, [%[kNV12Table]] \n" + "1: \n" READUYVY YUVTORGB RGBTORGB8 + "subs %w[width], %w[width], #8 \n" + "st4 {v16.8b,v17.8b,v18.8b,v19.8b}, [%[dst_argb]], #32 \n" + "b.gt 1b \n" + : [src_uyvy] "+r"(src_uyvy), // %[src_yuy2] + [dst_argb] "+r"(dst_argb), // %[dst_argb] + [width] "+r"(width) // %[width] + : [kUVCoeff] "r"(&yuvconstants->kUVCoeff), // %[kUVCoeff] + [kRGBCoeffBias] "r"(&yuvconstants->kRGBCoeffBias), // %[kRGBCoeffBias] + [kNV12Table] "r"(&kNV12Table) + : "cc", "memory", YUVTORGB_REGS, "v2", "v19"); +} + +// Reads 16 pairs of UV and write even values to dst_u and odd to dst_v. +void SplitUVRow_NEON(const uint8_t* src_uv, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { + asm volatile( "1: \n" - READYUY2 + "ld2 {v0.16b,v1.16b}, [%0], #32 \n" // load 16 pairs of UV + "subs %w3, %w3, #16 \n" // 16 processed per loop "prfm pldl1keep, [%0, 448] \n" - YUVTORGB(v22, v21, v20) - "subs %w2, %w2, #8 \n" - "st4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%1], #32 \n" + "st1 {v0.16b}, [%1], #16 \n" // store U + "st1 {v1.16b}, [%2], #16 \n" // store V + "b.gt 1b \n" + : "+r"(src_uv), // %0 + "+r"(dst_u), // %1 + "+r"(dst_v), // %2 + "+r"(width) // %3 // Output registers + : // Input registers + : "cc", "memory", "v0", "v1" // Clobber List + ); +} + +// Reads 16 byte Y's from tile and writes out 16 Y's. +// MM21 Y tiles are 16x32 so src_tile_stride = 512 bytes +// MM21 UV tiles are 8x16 so src_tile_stride = 256 bytes +// width measured in bytes so 8 UV = 16. +void DetileRow_NEON(const uint8_t* src, + ptrdiff_t src_tile_stride, + uint8_t* dst, + int width) { + asm volatile( + "1: \n" + "ld1 {v0.16b}, [%0], %3 \n" // load 16 bytes + "subs %w2, %w2, #16 \n" // 16 processed per loop + "prfm pldl1keep, [%0, 1792] \n" // 7 tiles of 256b ahead + "st1 {v0.16b}, [%1], #16 \n" // store 16 bytes + "b.gt 1b \n" + : "+r"(src), // %0 + "+r"(dst), // %1 + "+r"(width) // %2 + : "r"(src_tile_stride) // %3 + : "cc", "memory", "v0" // Clobber List + ); +} + +// Reads 16 byte Y's of 16 bits from tile and writes out 16 Y's. +void DetileRow_16_NEON(const uint16_t* src, + ptrdiff_t src_tile_stride, + uint16_t* dst, + int width) { + asm volatile( + "1: \n" + "ld1 {v0.8h,v1.8h}, [%0], %3 \n" // load 16 pixels + "subs %w2, %w2, #16 \n" // 16 processed per loop + "prfm pldl1keep, [%0, 3584] \n" // 7 tiles of 512b ahead + "st1 {v0.8h,v1.8h}, [%1], #32 \n" // store 16 pixels + "b.gt 1b \n" + : "+r"(src), // %0 + "+r"(dst), // %1 + "+r"(width) // %2 + : "r"(src_tile_stride * 2) // %3 + : "cc", "memory", "v0", "v1" // Clobber List + ); +} + +// Read 16 bytes of UV, detile, and write 8 bytes of U and 8 bytes of V. +void DetileSplitUVRow_NEON(const uint8_t* src_uv, + ptrdiff_t src_tile_stride, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { + asm volatile( + "1: \n" + "ld2 {v0.8b,v1.8b}, [%0], %4 \n" + "subs %w3, %w3, #16 \n" + "prfm pldl1keep, [%0, 1792] \n" + "st1 {v0.8b}, [%1], #8 \n" + "st1 {v1.8b}, [%2], #8 \n" + "b.gt 1b \n" + : "+r"(src_uv), // %0 + "+r"(dst_u), // %1 + "+r"(dst_v), // %2 + "+r"(width) // %3 + : "r"(src_tile_stride) // %4 + : "cc", "memory", "v0", "v1" // Clobber List + ); +} + +#if LIBYUV_USE_ST2 +// Read 16 Y, 8 UV, and write 8 YUY2 +void DetileToYUY2_NEON(const uint8_t* src_y, + ptrdiff_t src_y_tile_stride, + const uint8_t* src_uv, + ptrdiff_t src_uv_tile_stride, + uint8_t* dst_yuy2, + int width) { + asm volatile( + "1: \n" + "ld1 {v0.16b}, [%0], %4 \n" // load 16 Ys + "prfm pldl1keep, [%0, 1792] \n" + "ld1 {v1.16b}, [%1], %5 \n" // load 8 UVs + "prfm pldl1keep, [%1, 1792] \n" + "subs %w3, %w3, #16 \n" // store 8 YUY2 + "st2 {v0.16b,v1.16b}, [%2], #32 \n" + "b.gt 1b \n" + : "+r"(src_y), // %0 + "+r"(src_uv), // %1 + "+r"(dst_yuy2), // %2 + "+r"(width) // %3 + : "r"(src_y_tile_stride), // %4 + "r"(src_uv_tile_stride) // %5 + : "cc", "memory", "v0", "v1" // Clobber list + ); +} +#else +// Read 16 Y, 8 UV, and write 8 YUY2 +void DetileToYUY2_NEON(const uint8_t* src_y, + ptrdiff_t src_y_tile_stride, + const uint8_t* src_uv, + ptrdiff_t src_uv_tile_stride, + uint8_t* dst_yuy2, + int width) { + asm volatile( + "1: \n" + "ld1 {v0.16b}, [%0], %4 \n" // load 16 Ys + "ld1 {v1.16b}, [%1], %5 \n" // load 8 UVs + "subs %w3, %w3, #16 \n" + "prfm pldl1keep, [%0, 1792] \n" + "zip1 v2.16b, v0.16b, v1.16b \n" + "prfm pldl1keep, [%1, 1792] \n" + "zip2 v3.16b, v0.16b, v1.16b \n" + "st1 {v2.16b,v3.16b}, [%2], #32 \n" // store 8 YUY2 + "b.gt 1b \n" + : "+r"(src_y), // %0 + "+r"(src_uv), // %1 + "+r"(dst_yuy2), // %2 + "+r"(width) // %3 + : "r"(src_y_tile_stride), // %4 + "r"(src_uv_tile_stride) // %5 + : "cc", "memory", "v0", "v1", "v2", "v3" // Clobber list + ); +} +#endif + +// Unpack MT2T into tiled P010 64 pixels at a time. See +// tinyurl.com/mtk-10bit-video-format for format documentation. +void UnpackMT2T_NEON(const uint8_t* src, uint16_t* dst, size_t size) { + const uint8_t* src_lower_bits = src; + const uint8_t* src_upper_bits = src + 16; + asm volatile( + "1: \n" + "ld4 {v0.8b, v1.8b, v2.8b, v3.8b}, [%1], #32 \n" + "ld1 {v7.8b}, [%0], #8 \n" + "shl v6.8b, v7.8b, #2 \n" + "shl v5.8b, v7.8b, #4 \n" + "shl v4.8b, v7.8b, #6 \n" + "zip1 v0.16b, v4.16b, v0.16b \n" + "zip1 v1.16b, v5.16b, v1.16b \n" + "zip1 v2.16b, v6.16b, v2.16b \n" + "zip1 v3.16b, v7.16b, v3.16b \n" + "sri v0.8h, v0.8h, #10 \n" + "sri v1.8h, v1.8h, #10 \n" + "sri v2.8h, v2.8h, #10 \n" + "sri v3.8h, v3.8h, #10 \n" + "st4 {v0.8h, v1.8h, v2.8h, v3.8h}, [%2], #64 \n" + "ld4 {v0.8b, v1.8b, v2.8b, v3.8b}, [%1], #32 \n" + "ld1 {v7.8b}, [%0], #8 \n" + "shl v6.8b, v7.8b, #2 \n" + "shl v5.8b, v7.8b, #4 \n" + "shl v4.8b, v7.8b, #6 \n" + "zip1 v0.16b, v4.16b, v0.16b \n" + "zip1 v1.16b, v5.16b, v1.16b \n" + "zip1 v2.16b, v6.16b, v2.16b \n" + "zip1 v3.16b, v7.16b, v3.16b \n" + "sri v0.8h, v0.8h, #10 \n" + "sri v1.8h, v1.8h, #10 \n" + "sri v2.8h, v2.8h, #10 \n" + "sri v3.8h, v3.8h, #10 \n" + "st4 {v0.8h, v1.8h, v2.8h, v3.8h}, [%2], #64 \n" + "mov %0, %1 \n" + "add %1, %0, #16 \n" + "subs %3, %3, #80 \n" + "b.gt 1b \n" + : "+r"(src_lower_bits), // %0 + "+r"(src_upper_bits), // %1 + "+r"(dst), // %2 + "+r"(size) // %3 + : + : "cc", "memory", "w0", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", + "v8", "v9", "v10", "v11", "v12"); +} + +#if LIBYUV_USE_ST2 +// Reads 16 U's and V's and writes out 16 pairs of UV. +void MergeUVRow_NEON(const uint8_t* src_u, + const uint8_t* src_v, + uint8_t* dst_uv, + int width) { + asm volatile( + "1: \n" + "ld1 {v0.16b}, [%0], #16 \n" // load U + "ld1 {v1.16b}, [%1], #16 \n" // load V + "subs %w3, %w3, #16 \n" // 16 processed per loop + "prfm pldl1keep, [%0, 448] \n" + "prfm pldl1keep, [%1, 448] \n" + "st2 {v0.16b,v1.16b}, [%2], #32 \n" // store 16 pairs of UV + "b.gt 1b \n" + : "+r"(src_u), // %0 + "+r"(src_v), // %1 + "+r"(dst_uv), // %2 + "+r"(width) // %3 // Output registers + : // Input registers + : "cc", "memory", "v0", "v1" // Clobber List + ); +} + +void MergeUVRow_16_NEON(const uint16_t* src_u, + const uint16_t* src_v, + uint16_t* dst_uv, + int depth, + int width) { + int shift = 16 - depth; + asm volatile( + "dup v2.8h, %w4 \n" + "1: \n" + "ld1 {v0.8h}, [%0], #16 \n" // load 8 U + "subs %w3, %w3, #8 \n" // 8 src pixels per loop + "ld1 {v1.8h}, [%1], #16 \n" // load 8 V + "ushl v0.8h, v0.8h, v2.8h \n" + "prfm pldl1keep, [%0, 448] \n" + "ushl v1.8h, v1.8h, v2.8h \n" + "prfm pldl1keep, [%1, 448] \n" + "st2 {v0.8h, v1.8h}, [%2], #32 \n" // store 8 UV pixels + "b.gt 1b \n" + : "+r"(src_u), // %0 + "+r"(src_v), // %1 + "+r"(dst_uv), // %2 + "+r"(width) // %3 + : "r"(shift) // %4 + : "cc", "memory", "v0", "v1", "v2"); +} +#else +// Reads 16 U's and V's and writes out 16 pairs of UV. +void MergeUVRow_NEON(const uint8_t* src_u, + const uint8_t* src_v, + uint8_t* dst_uv, + int width) { + asm volatile( + "1: \n" + "ld1 {v0.16b}, [%0], #16 \n" // load U + "ld1 {v1.16b}, [%1], #16 \n" // load V + "subs %w3, %w3, #16 \n" // 16 processed per loop + "zip1 v2.16b, v0.16b, v1.16b \n" + "prfm pldl1keep, [%0, 448] \n" + "zip2 v3.16b, v0.16b, v1.16b \n" + "prfm pldl1keep, [%1, 448] \n" + "st1 {v2.16b,v3.16b}, [%2], #32 \n" // store 16 pairs of UV + "b.gt 1b \n" + : "+r"(src_u), // %0 + "+r"(src_v), // %1 + "+r"(dst_uv), // %2 + "+r"(width) // %3 // Output registers + : // Input registers + : "cc", "memory", "v0", "v1", "v2", "v3" // Clobber List + ); +} + +void MergeUVRow_16_NEON(const uint16_t* src_u, + const uint16_t* src_v, + uint16_t* dst_uv, + int depth, + int width) { + int shift = 16 - depth; + asm volatile( + "dup v4.8h, %w4 \n" + "1: \n" + "ld1 {v0.8h}, [%0], #16 \n" // load 8 U + "subs %w3, %w3, #8 \n" // 8 src pixels per loop + "ld1 {v1.8h}, [%1], #16 \n" // load 8 V + "ushl v0.8h, v0.8h, v4.8h \n" + "ushl v1.8h, v1.8h, v4.8h \n" + "prfm pldl1keep, [%0, 448] \n" + "zip1 v2.8h, v0.8h, v1.8h \n" + "zip2 v3.8h, v0.8h, v1.8h \n" + "prfm pldl1keep, [%1, 448] \n" + "st1 {v2.8h, v3.8h}, [%2], #32 \n" // store 8 UV pixels + "b.gt 1b \n" + : "+r"(src_u), // %0 + "+r"(src_v), // %1 + "+r"(dst_uv), // %2 + "+r"(width) // %3 + : "r"(shift) // %4 + : "cc", "memory", "v0", "v1", "v2", "v1", "v2", "v3", "v4"); +} +#endif // LIBYUV_USE_ST2 + +// Reads 16 packed RGB and write to planar dst_r, dst_g, dst_b. +void SplitRGBRow_NEON(const uint8_t* src_rgb, + uint8_t* dst_r, + uint8_t* dst_g, + uint8_t* dst_b, + int width) { + asm volatile( + "1: \n" + "ld3 {v0.16b,v1.16b,v2.16b}, [%0], #48 \n" // load 16 RGB + "subs %w4, %w4, #16 \n" // 16 processed per loop + "prfm pldl1keep, [%0, 448] \n" + "st1 {v0.16b}, [%1], #16 \n" // store R + "st1 {v1.16b}, [%2], #16 \n" // store G + "st1 {v2.16b}, [%3], #16 \n" // store B + "b.gt 1b \n" + : "+r"(src_rgb), // %0 + "+r"(dst_r), // %1 + "+r"(dst_g), // %2 + "+r"(dst_b), // %3 + "+r"(width) // %4 + : // Input registers + : "cc", "memory", "v0", "v1", "v2" // Clobber List + ); +} + +// Reads 16 planar R's, G's and B's and writes out 16 packed RGB at a time +void MergeRGBRow_NEON(const uint8_t* src_r, + const uint8_t* src_g, + const uint8_t* src_b, + uint8_t* dst_rgb, + int width) { + asm volatile( + "1: \n" + "ld1 {v0.16b}, [%0], #16 \n" // load R + "ld1 {v1.16b}, [%1], #16 \n" // load G + "ld1 {v2.16b}, [%2], #16 \n" // load B + "subs %w4, %w4, #16 \n" // 16 processed per loop + "prfm pldl1keep, [%0, 448] \n" + "prfm pldl1keep, [%1, 448] \n" + "prfm pldl1keep, [%2, 448] \n" + "st3 {v0.16b,v1.16b,v2.16b}, [%3], #48 \n" // store 16 RGB + "b.gt 1b \n" + : "+r"(src_r), // %0 + "+r"(src_g), // %1 + "+r"(src_b), // %2 + "+r"(dst_rgb), // %3 + "+r"(width) // %4 + : // Input registers + : "cc", "memory", "v0", "v1", "v2" // Clobber List + ); +} + +// Reads 16 packed ARGB and write to planar dst_r, dst_g, dst_b, dst_a. +void SplitARGBRow_NEON(const uint8_t* src_rgba, + uint8_t* dst_r, + uint8_t* dst_g, + uint8_t* dst_b, + uint8_t* dst_a, + int width) { + asm volatile( + "1: \n" + "ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 ARGB + "subs %w5, %w5, #16 \n" // 16 processed per loop + "prfm pldl1keep, [%0, 448] \n" + "st1 {v0.16b}, [%3], #16 \n" // store B + "st1 {v1.16b}, [%2], #16 \n" // store G + "st1 {v2.16b}, [%1], #16 \n" // store R + "st1 {v3.16b}, [%4], #16 \n" // store A + "b.gt 1b \n" + : "+r"(src_rgba), // %0 + "+r"(dst_r), // %1 + "+r"(dst_g), // %2 + "+r"(dst_b), // %3 + "+r"(dst_a), // %4 + "+r"(width) // %5 + : // Input registers + : "cc", "memory", "v0", "v1", "v2", "v3" // Clobber List + ); +} + +#if LIBYUV_USE_ST4 +// Reads 16 planar R's, G's, B's and A's and writes out 16 packed ARGB at a time +void MergeARGBRow_NEON(const uint8_t* src_r, + const uint8_t* src_g, + const uint8_t* src_b, + const uint8_t* src_a, + uint8_t* dst_argb, + int width) { + asm volatile( + "1: \n" + "ld1 {v0.16b}, [%2], #16 \n" // load B + "ld1 {v1.16b}, [%1], #16 \n" // load G + "ld1 {v2.16b}, [%0], #16 \n" // load R + "ld1 {v3.16b}, [%3], #16 \n" // load A + "subs %w5, %w5, #16 \n" // 16 processed per loop + "prfm pldl1keep, [%0, 448] \n" + "prfm pldl1keep, [%1, 448] \n" + "prfm pldl1keep, [%2, 448] \n" + "prfm pldl1keep, [%3, 448] \n" + "st4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%4], #64 \n" // store 16ARGB + "b.gt 1b \n" + : "+r"(src_r), // %0 + "+r"(src_g), // %1 + "+r"(src_b), // %2 + "+r"(src_a), // %3 + "+r"(dst_argb), // %4 + "+r"(width) // %5 + : // Input registers + : "cc", "memory", "v0", "v1", "v2", "v3" // Clobber List + ); +} +#else +// Reads 16 planar R's, G's, B's and A's and writes out 16 packed ARGB at a time +void MergeARGBRow_NEON(const uint8_t* src_r, + const uint8_t* src_g, + const uint8_t* src_b, + const uint8_t* src_a, + uint8_t* dst_argb, + int width) { + asm volatile( + "1: \n" + "ld1 {v0.16b}, [%2], #16 \n" // load B + "ld1 {v1.16b}, [%1], #16 \n" // load G + "ld1 {v2.16b}, [%0], #16 \n" // load R + "ld1 {v3.16b}, [%3], #16 \n" // load A + "subs %w5, %w5, #16 \n" // 16 processed per loop + "prfm pldl1keep, [%2, 448] \n" + "zip1 v4.16b, v0.16b, v1.16b \n" // BG + "zip1 v5.16b, v2.16b, v3.16b \n" // RA + "prfm pldl1keep, [%1, 448] \n" + "zip2 v6.16b, v0.16b, v1.16b \n" // BG + "zip2 v7.16b, v2.16b, v3.16b \n" // RA + "prfm pldl1keep, [%0, 448] \n" + "zip1 v0.8h, v4.8h, v5.8h \n" // BGRA + "zip2 v1.8h, v4.8h, v5.8h \n" + "prfm pldl1keep, [%3, 448] \n" + "zip1 v2.8h, v6.8h, v7.8h \n" + "zip2 v3.8h, v6.8h, v7.8h \n" + "st1 {v0.16b,v1.16b,v2.16b,v3.16b}, [%4], #64 \n" // store 16ARGB + "b.gt 1b \n" + : "+r"(src_r), // %0 + "+r"(src_g), // %1 + "+r"(src_b), // %2 + "+r"(src_a), // %3 + "+r"(dst_argb), // %4 + "+r"(width) // %5 + : // Input registers + : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", + "v7" // Clobber List + ); +} +#endif // LIBYUV_USE_ST4 + +// Reads 16 packed ARGB and write to planar dst_r, dst_g, dst_b. +void SplitXRGBRow_NEON(const uint8_t* src_rgba, + uint8_t* dst_r, + uint8_t* dst_g, + uint8_t* dst_b, + int width) { + asm volatile( + "1: \n" + "ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 ARGB + "subs %w4, %w4, #16 \n" // 16 processed per loop + "prfm pldl1keep, [%0, 448] \n" + "st1 {v0.16b}, [%3], #16 \n" // store B + "st1 {v1.16b}, [%2], #16 \n" // store G + "st1 {v2.16b}, [%1], #16 \n" // store R + "b.gt 1b \n" + : "+r"(src_rgba), // %0 + "+r"(dst_r), // %1 + "+r"(dst_g), // %2 + "+r"(dst_b), // %3 + "+r"(width) // %4 + : // Input registers + : "cc", "memory", "v0", "v1", "v2", "v3" // Clobber List + ); +} + +// Reads 16 planar R's, G's and B's and writes out 16 packed ARGB at a time +void MergeXRGBRow_NEON(const uint8_t* src_r, + const uint8_t* src_g, + const uint8_t* src_b, + uint8_t* dst_argb, + int width) { + asm volatile( + "movi v3.16b, #255 \n" // load A(255) + "1: \n" + "ld1 {v2.16b}, [%0], #16 \n" // load R + "ld1 {v1.16b}, [%1], #16 \n" // load G + "ld1 {v0.16b}, [%2], #16 \n" // load B + "subs %w4, %w4, #16 \n" // 16 processed per loop + "prfm pldl1keep, [%0, 448] \n" + "prfm pldl1keep, [%1, 448] \n" + "prfm pldl1keep, [%2, 448] \n" + "st4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%3], #64 \n" // store 16ARGB "b.gt 1b \n" - : "+r"(src_yuy2), // %0 - "+r"(dst_argb), // %1 - "+r"(width) // %2 - : [kUVToRB]"r"(&yuvconstants->kUVToRB), - [kUVToG]"r"(&yuvconstants->kUVToG), - [kUVBiasBGR]"r"(&yuvconstants->kUVBiasBGR), - [kYToRgb]"r"(&yuvconstants->kYToRgb) - : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v20", - "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30" + : "+r"(src_r), // %0 + "+r"(src_g), // %1 + "+r"(src_b), // %2 + "+r"(dst_argb), // %3 + "+r"(width) // %4 + : // Input registers + : "cc", "memory", "v0", "v1", "v2", "v3" // Clobber List ); } -void UYVYToARGBRow_NEON(const uint8_t* src_uyvy, - uint8_t* dst_argb, - const struct YuvConstants* yuvconstants, - int width) { - asm volatile ( - YUVTORGB_SETUP - "movi v23.8b, #255 \n" +void MergeXR30Row_NEON(const uint16_t* src_r, + const uint16_t* src_g, + const uint16_t* src_b, + uint8_t* dst_ar30, + int depth, + int width) { + int shift = 10 - depth; + asm volatile( + "movi v30.16b, #255 \n" + "ushr v30.4s, v30.4s, #22 \n" // 1023 + "dup v31.4s, %w5 \n" "1: \n" - READUYVY - YUVTORGB(v22, v21, v20) - "prfm pldl1keep, [%0, 448] \n" - "subs %w2, %w2, #8 \n" - "st4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%1], 32 \n" - "b.gt 1b \n" - : "+r"(src_uyvy), // %0 - "+r"(dst_argb), // %1 - "+r"(width) // %2 - : [kUVToRB]"r"(&yuvconstants->kUVToRB), - [kUVToG]"r"(&yuvconstants->kUVToG), - [kUVBiasBGR]"r"(&yuvconstants->kUVBiasBGR), - [kYToRgb]"r"(&yuvconstants->kYToRgb) - : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v20", - "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30" - ); + "ldr d2, [%2], #8 \n" // B + "ldr d1, [%1], #8 \n" // G + "ldr d0, [%0], #8 \n" // R + "ushll v2.4s, v2.4h, #0 \n" // B + "ushll v1.4s, v1.4h, #0 \n" // G + "ushll v0.4s, v0.4h, #0 \n" // R + "ushl v2.4s, v2.4s, v31.4s \n" // 000B + "ushl v1.4s, v1.4s, v31.4s \n" // G + "ushl v0.4s, v0.4s, v31.4s \n" // R + "umin v2.4s, v2.4s, v30.4s \n" + "umin v1.4s, v1.4s, v30.4s \n" + "umin v0.4s, v0.4s, v30.4s \n" + "sli v2.4s, v1.4s, #10 \n" // 00GB + "sli v2.4s, v0.4s, #20 \n" // 0RGB + "orr v2.4s, #0xc0, lsl #24 \n" // ARGB (AR30) + "subs %w4, %w4, #4 \n" + "str q2, [%3], #16 \n" + "b.gt 1b \n" + : "+r"(src_r), // %0 + "+r"(src_g), // %1 + "+r"(src_b), // %2 + "+r"(dst_ar30), // %3 + "+r"(width) // %4 + : "r"(shift) // %5 + : "memory", "cc", "v0", "v1", "v2", "v30", "v31"); } -// Reads 16 pairs of UV and write even values to dst_u and odd to dst_v. -void SplitUVRow_NEON(const uint8_t* src_uv, - uint8_t* dst_u, - uint8_t* dst_v, - int width) { +void MergeXR30Row_10_NEON(const uint16_t* src_r, + const uint16_t* src_g, + const uint16_t* src_b, + uint8_t* dst_ar30, + int /* depth */, + int width) { asm volatile( + "movi v30.16b, #255 \n" + "ushr v30.4s, v30.4s, #22 \n" // 1023 "1: \n" - "ld2 {v0.16b,v1.16b}, [%0], #32 \n" // load 16 pairs of UV - "prfm pldl1keep, [%0, 448] \n" - "subs %w3, %w3, #16 \n" // 16 processed per loop - "st1 {v0.16b}, [%1], #16 \n" // store U - "st1 {v1.16b}, [%2], #16 \n" // store V + "ldr d2, [%2], #8 \n" // B + "ldr d1, [%1], #8 \n" // G + "ldr d0, [%0], #8 \n" // R + "ushll v2.4s, v2.4h, #0 \n" // 000B + "ushll v1.4s, v1.4h, #0 \n" // G + "ushll v0.4s, v0.4h, #0 \n" // R + "umin v2.4s, v2.4s, v30.4s \n" + "umin v1.4s, v1.4s, v30.4s \n" + "umin v0.4s, v0.4s, v30.4s \n" + "sli v2.4s, v1.4s, #10 \n" // 00GB + "sli v2.4s, v0.4s, #20 \n" // 0RGB + "orr v2.4s, #0xc0, lsl #24 \n" // ARGB (AR30) + "subs %w4, %w4, #4 \n" + "str q2, [%3], #16 \n" "b.gt 1b \n" - : "+r"(src_uv), // %0 - "+r"(dst_u), // %1 - "+r"(dst_v), // %2 - "+r"(width) // %3 // Output registers - : // Input registers - : "cc", "memory", "v0", "v1" // Clobber List - ); + : "+r"(src_r), // %0 + "+r"(src_g), // %1 + "+r"(src_b), // %2 + "+r"(dst_ar30), // %3 + "+r"(width) // %4 + : + : "memory", "cc", "v0", "v1", "v2", "v30"); } -// Reads 16 U's and V's and writes out 16 pairs of UV. -void MergeUVRow_NEON(const uint8_t* src_u, - const uint8_t* src_v, - uint8_t* dst_uv, - int width) { +void MergeAR64Row_NEON(const uint16_t* src_r, + const uint16_t* src_g, + const uint16_t* src_b, + const uint16_t* src_a, + uint16_t* dst_ar64, + int depth, + int width) { + int shift = 16 - depth; + int mask = (1 << depth) - 1; asm volatile( + + "dup v30.8h, %w7 \n" + "dup v31.8h, %w6 \n" "1: \n" - "ld1 {v0.16b}, [%0], #16 \n" // load U - "ld1 {v1.16b}, [%1], #16 \n" // load V + "ldr q2, [%0], #16 \n" // R + "ldr q1, [%1], #16 \n" // G + "ldr q0, [%2], #16 \n" // B + "ldr q3, [%3], #16 \n" // A + "umin v2.8h, v2.8h, v30.8h \n" "prfm pldl1keep, [%0, 448] \n" + "umin v1.8h, v1.8h, v30.8h \n" "prfm pldl1keep, [%1, 448] \n" - "subs %w3, %w3, #16 \n" // 16 processed per loop - "st2 {v0.16b,v1.16b}, [%2], #32 \n" // store 16 pairs of UV + "umin v0.8h, v0.8h, v30.8h \n" + "prfm pldl1keep, [%2, 448] \n" + "umin v3.8h, v3.8h, v30.8h \n" + "prfm pldl1keep, [%3, 448] \n" + "ushl v2.8h, v2.8h, v31.8h \n" + "ushl v1.8h, v1.8h, v31.8h \n" + "ushl v0.8h, v0.8h, v31.8h \n" + "ushl v3.8h, v3.8h, v31.8h \n" + "subs %w5, %w5, #8 \n" + "st4 {v0.8h, v1.8h, v2.8h, v3.8h}, [%4], #64 \n" "b.gt 1b \n" - : "+r"(src_u), // %0 - "+r"(src_v), // %1 - "+r"(dst_uv), // %2 - "+r"(width) // %3 // Output registers - : // Input registers - : "cc", "memory", "v0", "v1" // Clobber List - ); + : "+r"(src_r), // %0 + "+r"(src_g), // %1 + "+r"(src_b), // %2 + "+r"(src_a), // %3 + "+r"(dst_ar64), // %4 + "+r"(width) // %5 + : "r"(shift), // %6 + "r"(mask) // %7 + : "memory", "cc", "v0", "v1", "v2", "v3", "v31"); } -// Reads 16 packed RGB and write to planar dst_r, dst_g, dst_b. -void SplitRGBRow_NEON(const uint8_t* src_rgb, - uint8_t* dst_r, - uint8_t* dst_g, - uint8_t* dst_b, - int width) { +void MergeXR64Row_NEON(const uint16_t* src_r, + const uint16_t* src_g, + const uint16_t* src_b, + uint16_t* dst_ar64, + int depth, + int width) { + int shift = 16 - depth; + int mask = (1 << depth) - 1; asm volatile( + + "movi v3.16b, #0xff \n" // A (0xffff) + "dup v30.8h, %w6 \n" + "dup v31.8h, %w5 \n" + "1: \n" - "ld3 {v0.16b,v1.16b,v2.16b}, [%0], #48 \n" // load 16 RGB + "ldr q2, [%0], #16 \n" // R + "ldr q1, [%1], #16 \n" // G + "ldr q0, [%2], #16 \n" // B + "umin v2.8h, v2.8h, v30.8h \n" "prfm pldl1keep, [%0, 448] \n" - "subs %w4, %w4, #16 \n" // 16 processed per loop - "st1 {v0.16b}, [%1], #16 \n" // store R - "st1 {v1.16b}, [%2], #16 \n" // store G - "st1 {v2.16b}, [%3], #16 \n" // store B + "umin v1.8h, v1.8h, v30.8h \n" + "prfm pldl1keep, [%1, 448] \n" + "umin v0.8h, v0.8h, v30.8h \n" + "prfm pldl1keep, [%2, 448] \n" + "ushl v2.8h, v2.8h, v31.8h \n" + "ushl v1.8h, v1.8h, v31.8h \n" + "ushl v0.8h, v0.8h, v31.8h \n" + "subs %w4, %w4, #8 \n" + "st4 {v0.8h, v1.8h, v2.8h, v3.8h}, [%3], #64 \n" "b.gt 1b \n" - : "+r"(src_rgb), // %0 - "+r"(dst_r), // %1 - "+r"(dst_g), // %2 - "+r"(dst_b), // %3 - "+r"(width) // %4 - : // Input registers - : "cc", "memory", "v0", "v1", "v2" // Clobber List - ); + : "+r"(src_r), // %0 + "+r"(src_g), // %1 + "+r"(src_b), // %2 + "+r"(dst_ar64), // %3 + "+r"(width) // %4 + : "r"(shift), // %5 + "r"(mask) // %6 + : "memory", "cc", "v0", "v1", "v2", "v3", "v31"); } -// Reads 16 planar R's, G's and B's and writes out 16 packed RGB at a time -void MergeRGBRow_NEON(const uint8_t* src_r, - const uint8_t* src_g, - const uint8_t* src_b, - uint8_t* dst_rgb, - int width) { +void MergeARGB16To8Row_NEON(const uint16_t* src_r, + const uint16_t* src_g, + const uint16_t* src_b, + const uint16_t* src_a, + uint8_t* dst_argb, + int depth, + int width) { + int shift = 8 - depth; asm volatile( + + "dup v31.8h, %w6 \n" "1: \n" - "ld1 {v0.16b}, [%0], #16 \n" // load R - "ld1 {v1.16b}, [%1], #16 \n" // load G - "ld1 {v2.16b}, [%2], #16 \n" // load B + "ldr q2, [%0], #16 \n" // R + "ldr q1, [%1], #16 \n" // G + "ldr q0, [%2], #16 \n" // B + "ldr q3, [%3], #16 \n" // A + "ushl v2.8h, v2.8h, v31.8h \n" "prfm pldl1keep, [%0, 448] \n" + "ushl v1.8h, v1.8h, v31.8h \n" "prfm pldl1keep, [%1, 448] \n" + "ushl v0.8h, v0.8h, v31.8h \n" "prfm pldl1keep, [%2, 448] \n" - "subs %w4, %w4, #16 \n" // 16 processed per loop - "st3 {v0.16b,v1.16b,v2.16b}, [%3], #48 \n" // store 16 RGB + "ushl v3.8h, v3.8h, v31.8h \n" + "prfm pldl1keep, [%3, 448] \n" + "uqxtn v2.8b, v2.8h \n" + "uqxtn v1.8b, v1.8h \n" + "uqxtn v0.8b, v0.8h \n" + "uqxtn v3.8b, v3.8h \n" + "subs %w5, %w5, #8 \n" + "st4 {v0.8b, v1.8b, v2.8b, v3.8b}, [%4], #32 \n" + "b.gt 1b \n" + : "+r"(src_r), // %0 + "+r"(src_g), // %1 + "+r"(src_b), // %2 + "+r"(src_a), // %3 + "+r"(dst_argb), // %4 + "+r"(width) // %5 + : "r"(shift) // %6 + : "memory", "cc", "v0", "v1", "v2", "v3", "v31"); +} + +void MergeXRGB16To8Row_NEON(const uint16_t* src_r, + const uint16_t* src_g, + const uint16_t* src_b, + uint8_t* dst_argb, + int depth, + int width) { + int shift = 8 - depth; + asm volatile( + + "dup v31.8h, %w5 \n" + "movi v3.8b, #0xff \n" // A (0xff) + "1: \n" + "ldr q2, [%0], #16 \n" // R + "ldr q1, [%1], #16 \n" // G + "ldr q0, [%2], #16 \n" // B + "ushl v2.8h, v2.8h, v31.8h \n" "prfm pldl1keep, [%0, 448] \n" + "ushl v1.8h, v1.8h, v31.8h \n" + "prfm pldl1keep, [%1, 448] \n" + "ushl v0.8h, v0.8h, v31.8h \n" + "prfm pldl1keep, [%2, 448] \n" + "uqxtn v2.8b, v2.8h \n" + "uqxtn v1.8b, v1.8h \n" + "uqxtn v0.8b, v0.8h \n" + "subs %w4, %w4, #8 \n" + "st4 {v0.8b, v1.8b, v2.8b, v3.8b}, [%3], #32 \n" "b.gt 1b \n" - : "+r"(src_r), // %0 - "+r"(src_g), // %1 - "+r"(src_b), // %2 - "+r"(dst_rgb), // %3 - "+r"(width) // %4 - : // Input registers - : "cc", "memory", "v0", "v1", "v2" // Clobber List - ); + : "+r"(src_r), // %0 + "+r"(src_g), // %1 + "+r"(src_b), // %2 + "+r"(dst_argb), // %3 + "+r"(width) // %4 + : "r"(shift) // %5 + : "memory", "cc", "v0", "v1", "v2", "v3", "v31"); } // Copy multiple of 32. @@ -925,10 +1538,10 @@ void RAWToARGBRow_NEON(const uint8_t* src_raw, uint8_t* dst_argb, int width) { "movi v5.8b, #255 \n" // Alpha "1: \n" "ld3 {v0.8b,v1.8b,v2.8b}, [%0], #24 \n" // read r g b + "subs %w2, %w2, #8 \n" // 8 processed per loop. + "orr v3.8b, v1.8b, v1.8b \n" // move g "prfm pldl1keep, [%0, 448] \n" - "subs %w2, %w2, #8 \n" // 8 processed per loop. - "orr v3.8b, v1.8b, v1.8b \n" // move g - "orr v4.8b, v0.8b, v0.8b \n" // move r + "orr v4.8b, v0.8b, v0.8b \n" // move r "st4 {v2.8b,v3.8b,v4.8b,v5.8b}, [%1], #32 \n" // store b g r a "b.gt 1b \n" : "+r"(src_raw), // %0 @@ -944,10 +1557,10 @@ void RAWToRGBARow_NEON(const uint8_t* src_raw, uint8_t* dst_rgba, int width) { "movi v0.8b, #255 \n" // Alpha "1: \n" "ld3 {v3.8b,v4.8b,v5.8b}, [%0], #24 \n" // read r g b + "subs %w2, %w2, #8 \n" // 8 processed per loop. + "orr v2.8b, v4.8b, v4.8b \n" // move g "prfm pldl1keep, [%0, 448] \n" - "subs %w2, %w2, #8 \n" // 8 processed per loop. - "orr v2.8b, v4.8b, v4.8b \n" // move g - "orr v1.8b, v5.8b, v5.8b \n" // move r + "orr v1.8b, v5.8b, v5.8b \n" // move r "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%1], #32 \n" // store a b g r "b.gt 1b \n" : "+r"(src_raw), // %0 @@ -962,9 +1575,9 @@ void RAWToRGB24Row_NEON(const uint8_t* src_raw, uint8_t* dst_rgb24, int width) { asm volatile( "1: \n" "ld3 {v0.8b,v1.8b,v2.8b}, [%0], #24 \n" // read r g b - "prfm pldl1keep, [%0, 448] \n" "subs %w2, %w2, #8 \n" // 8 processed per loop. "orr v3.8b, v1.8b, v1.8b \n" // move g + "prfm pldl1keep, [%0, 448] \n" "orr v4.8b, v0.8b, v0.8b \n" // move r "st3 {v2.8b,v3.8b,v4.8b}, [%1], #24 \n" // store b g r "b.gt 1b \n" @@ -996,9 +1609,8 @@ void RGB565ToARGBRow_NEON(const uint8_t* src_rgb565, "movi v3.8b, #255 \n" // Alpha "1: \n" "ld1 {v0.16b}, [%0], #16 \n" // load 8 RGB565 pixels. - "prfm pldl1keep, [%0, 448] \n" "subs %w2, %w2, #8 \n" // 8 processed per loop. - RGB565TOARGB + "prfm pldl1keep, [%0, 448] \n" RGB565TOARGB "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%1], #32 \n" // store 8 ARGB "b.gt 1b \n" : "+r"(src_rgb565), // %0 @@ -1086,9 +1698,8 @@ void ARGB4444ToARGBRow_NEON(const uint8_t* src_argb4444, asm volatile( "1: \n" "ld1 {v0.16b}, [%0], #16 \n" // load 8 ARGB4444 pixels. - "prfm pldl1keep, [%0, 448] \n" "subs %w2, %w2, #8 \n" // 8 processed per loop. - ARGB4444TOARGB + "prfm pldl1keep, [%0, 448] \n" ARGB4444TOARGB "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%1], #32 \n" // store 8 ARGB "b.gt 1b \n" : "+r"(src_argb4444), // %0 @@ -1104,17 +1715,16 @@ void ARGBToRGB24Row_NEON(const uint8_t* src_argb, int width) { asm volatile( "1: \n" - "ld4 {v1.8b,v2.8b,v3.8b,v4.8b}, [%0], #32 \n" // load 8 ARGB + "ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 ARGB + "subs %w2, %w2, #16 \n" // 16 pixels per loop. "prfm pldl1keep, [%0, 448] \n" - "subs %w2, %w2, #8 \n" // 8 processed per loop. - "st3 {v1.8b,v2.8b,v3.8b}, [%1], #24 \n" // store 8 pixels of - // RGB24 + "st3 {v0.16b,v1.16b,v2.16b}, [%1], #48 \n" // store 8 RGB24 "b.gt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_rgb24), // %1 "+r"(width) // %2 : - : "cc", "memory", "v1", "v2", "v3", "v4" // Clobber List + : "cc", "memory", "v0", "v1", "v2", "v3" // Clobber List ); } @@ -1122,9 +1732,9 @@ void ARGBToRAWRow_NEON(const uint8_t* src_argb, uint8_t* dst_raw, int width) { asm volatile( "1: \n" "ld4 {v1.8b,v2.8b,v3.8b,v4.8b}, [%0], #32 \n" // load b g r a + "subs %w2, %w2, #8 \n" // 8 processed per loop. + "orr v4.8b, v2.8b, v2.8b \n" // mov g "prfm pldl1keep, [%0, 448] \n" - "subs %w2, %w2, #8 \n" // 8 processed per loop. - "orr v4.8b, v2.8b, v2.8b \n" // mov g "orr v5.8b, v1.8b, v1.8b \n" // mov b "st3 {v3.8b,v4.8b,v5.8b}, [%1], #24 \n" // store r g b "b.gt 1b \n" @@ -1140,8 +1750,8 @@ void YUY2ToYRow_NEON(const uint8_t* src_yuy2, uint8_t* dst_y, int width) { asm volatile( "1: \n" "ld2 {v0.16b,v1.16b}, [%0], #32 \n" // load 16 pixels of YUY2. - "prfm pldl1keep, [%0, 448] \n" "subs %w2, %w2, #16 \n" // 16 processed per loop. + "prfm pldl1keep, [%0, 448] \n" "st1 {v0.16b}, [%1], #16 \n" // store 16 pixels of Y. "b.gt 1b \n" : "+r"(src_yuy2), // %0 @@ -1156,8 +1766,8 @@ void UYVYToYRow_NEON(const uint8_t* src_uyvy, uint8_t* dst_y, int width) { asm volatile( "1: \n" "ld2 {v0.16b,v1.16b}, [%0], #32 \n" // load 16 pixels of UYVY. - "prfm pldl1keep, [%0, 448] \n" "subs %w2, %w2, #16 \n" // 16 processed per loop. + "prfm pldl1keep, [%0, 448] \n" "st1 {v1.16b}, [%1], #16 \n" // store 16 pixels of Y. "b.gt 1b \n" : "+r"(src_uyvy), // %0 @@ -1175,8 +1785,8 @@ void YUY2ToUV422Row_NEON(const uint8_t* src_yuy2, asm volatile( "1: \n" "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 16 YUY2 - "prfm pldl1keep, [%0, 448] \n" "subs %w3, %w3, #16 \n" // 16 pixels = 8 UVs. + "prfm pldl1keep, [%0, 448] \n" "st1 {v1.8b}, [%1], #8 \n" // store 8 U. "st1 {v3.8b}, [%2], #8 \n" // store 8 V. "b.gt 1b \n" @@ -1196,8 +1806,8 @@ void UYVYToUV422Row_NEON(const uint8_t* src_uyvy, asm volatile( "1: \n" "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 16 UYVY - "prfm pldl1keep, [%0, 448] \n" "subs %w3, %w3, #16 \n" // 16 pixels = 8 UVs. + "prfm pldl1keep, [%0, 448] \n" "st1 {v0.8b}, [%1], #8 \n" // store 8 U. "st1 {v2.8b}, [%2], #8 \n" // store 8 V. "b.gt 1b \n" @@ -1219,10 +1829,10 @@ void YUY2ToUVRow_NEON(const uint8_t* src_yuy2, asm volatile( "1: \n" "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 16 pixels - "prfm pldl1keep, [%0, 448] \n" "subs %w4, %w4, #16 \n" // 16 pixels = 8 UVs. "ld4 {v4.8b,v5.8b,v6.8b,v7.8b}, [%1], #32 \n" // load next row "urhadd v1.8b, v1.8b, v5.8b \n" // average rows of U + "prfm pldl1keep, [%0, 448] \n" "urhadd v3.8b, v3.8b, v7.8b \n" // average rows of V "st1 {v1.8b}, [%2], #8 \n" // store 8 U. "st1 {v3.8b}, [%3], #8 \n" // store 8 V. @@ -1247,10 +1857,10 @@ void UYVYToUVRow_NEON(const uint8_t* src_uyvy, asm volatile( "1: \n" "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 16 pixels - "prfm pldl1keep, [%0, 448] \n" "subs %w4, %w4, #16 \n" // 16 pixels = 8 UVs. "ld4 {v4.8b,v5.8b,v6.8b,v7.8b}, [%1], #32 \n" // load next row "urhadd v0.8b, v0.8b, v4.8b \n" // average rows of U + "prfm pldl1keep, [%0, 448] \n" "urhadd v2.8b, v2.8b, v6.8b \n" // average rows of V "st1 {v0.8b}, [%2], #8 \n" // store 8 U. "st1 {v2.8b}, [%3], #8 \n" // store 8 V. @@ -1266,6 +1876,29 @@ void UYVYToUVRow_NEON(const uint8_t* src_uyvy, ); } +void YUY2ToNVUVRow_NEON(const uint8_t* src_yuy2, + int stride_yuy2, + uint8_t* dst_uv, + int width) { + const uint8_t* src_yuy2b = src_yuy2 + stride_yuy2; + asm volatile( + "1: \n" + "ld2 {v0.16b,v1.16b}, [%0], #32 \n" // load 16 pixels + "subs %w3, %w3, #16 \n" // 16 pixels = 8 UVs. + "ld2 {v2.16b,v3.16b}, [%1], #32 \n" // load next row + "urhadd v4.16b, v1.16b, v3.16b \n" // average rows of UV + "prfm pldl1keep, [%0, 448] \n" + "st1 {v4.16b}, [%2], #16 \n" // store 8 UV. + "b.gt 1b \n" + : "+r"(src_yuy2), // %0 + "+r"(src_yuy2b), // %1 + "+r"(dst_uv), // %2 + "+r"(width) // %3 + : + : "cc", "memory", "v0", "v1", "v2", "v3", "v4" // Clobber List + ); +} + // For BGRAToARGB, ABGRToARGB, RGBAToARGB, and ARGBToRGBA. void ARGBShuffleRow_NEON(const uint8_t* src_argb, uint8_t* dst_argb, @@ -1275,8 +1908,8 @@ void ARGBShuffleRow_NEON(const uint8_t* src_argb, "ld1 {v2.16b}, [%3] \n" // shuffler "1: \n" "ld1 {v0.16b}, [%0], #16 \n" // load 4 pixels. - "prfm pldl1keep, [%0, 448] \n" "subs %w2, %w2, #4 \n" // 4 processed per loop + "prfm pldl1keep, [%0, 448] \n" "tbl v1.16b, {v0.16b}, v2.16b \n" // look up 4 pixels "st1 {v1.16b}, [%1], #16 \n" // store 4. "b.gt 1b \n" @@ -1296,11 +1929,11 @@ void I422ToYUY2Row_NEON(const uint8_t* src_y, asm volatile( "1: \n" "ld2 {v0.8b, v1.8b}, [%0], #16 \n" // load 16 Ys - "prfm pldl1keep, [%0, 448] \n" + "subs %w4, %w4, #16 \n" // 16 pixels "orr v2.8b, v1.8b, v1.8b \n" + "prfm pldl1keep, [%0, 448] \n" "ld1 {v1.8b}, [%1], #8 \n" // load 8 Us "ld1 {v3.8b}, [%2], #8 \n" // load 8 Vs - "subs %w4, %w4, #16 \n" // 16 pixels "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%3], #32 \n" // Store 16 pixels. "b.gt 1b \n" : "+r"(src_y), // %0 @@ -1320,8 +1953,8 @@ void I422ToUYVYRow_NEON(const uint8_t* src_y, asm volatile( "1: \n" "ld2 {v1.8b,v2.8b}, [%0], #16 \n" // load 16 Ys - "prfm pldl1keep, [%0, 448] \n" "orr v3.8b, v2.8b, v2.8b \n" + "prfm pldl1keep, [%0, 448] \n" "ld1 {v0.8b}, [%1], #8 \n" // load 8 Us "ld1 {v2.8b}, [%2], #8 \n" // load 8 Vs "subs %w4, %w4, #16 \n" // 16 pixels @@ -1341,18 +1974,17 @@ void ARGBToRGB565Row_NEON(const uint8_t* src_argb, int width) { asm volatile( "1: \n" - "ld4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%0], #32 \n" // load 8 + "ld4 {v16.8b,v17.8b,v18.8b,v19.8b}, [%0], #32 \n" // load 8 // pixels - "prfm pldl1keep, [%0, 448] \n" "subs %w2, %w2, #8 \n" // 8 processed per loop. - ARGBTORGB565 - "st1 {v0.16b}, [%1], #16 \n" // store 8 pixels RGB565. + "prfm pldl1keep, [%0, 448] \n" ARGBTORGB565 + "st1 {v18.16b}, [%1], #16 \n" // store 8 pixels RGB565. "b.gt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_rgb565), // %1 "+r"(width) // %2 : - : "cc", "memory", "v0", "v20", "v21", "v22", "v23"); + : "cc", "memory", "v16", "v17", "v18", "v19"); } void ARGBToRGB565DitherRow_NEON(const uint8_t* src_argb, @@ -1362,20 +1994,20 @@ void ARGBToRGB565DitherRow_NEON(const uint8_t* src_argb, asm volatile( "dup v1.4s, %w2 \n" // dither4 "1: \n" - "ld4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%1], #32 \n" // load 8 + "ld4 {v16.8b,v17.8b,v18.8b,v19.8b}, [%1], #32 \n" // load 8 // pixels - "prfm pldl1keep, [%0, 448] \n" "subs %w3, %w3, #8 \n" // 8 processed per loop. - "uqadd v20.8b, v20.8b, v1.8b \n" - "uqadd v21.8b, v21.8b, v1.8b \n" - "uqadd v22.8b, v22.8b, v1.8b \n" ARGBTORGB565 - "st1 {v0.16b}, [%0], #16 \n" // store 8 pixels RGB565. + "uqadd v16.8b, v16.8b, v1.8b \n" + "prfm pldl1keep, [%0, 448] \n" + "uqadd v17.8b, v17.8b, v1.8b \n" + "uqadd v18.8b, v18.8b, v1.8b \n" ARGBTORGB565 + "st1 {v18.16b}, [%0], #16 \n" // store 8 pixels RGB565. "b.gt 1b \n" : "+r"(dst_rgb) // %0 : "r"(src_argb), // %1 "r"(dither4), // %2 "r"(width) // %3 - : "cc", "memory", "v0", "v1", "v20", "v21", "v22", "v23"); + : "cc", "memory", "v1", "v16", "v17", "v18", "v19"); } void ARGBToARGB1555Row_NEON(const uint8_t* src_argb, @@ -1383,125 +2015,198 @@ void ARGBToARGB1555Row_NEON(const uint8_t* src_argb, int width) { asm volatile( "1: \n" - "ld4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%0], #32 \n" // load 8 + "ld4 {v16.8b,v17.8b,v18.8b,v19.8b}, [%0], #32 \n" // load 8 // pixels - "prfm pldl1keep, [%0, 448] \n" "subs %w2, %w2, #8 \n" // 8 processed per loop. - ARGBTOARGB1555 + "prfm pldl1keep, [%0, 448] \n" ARGBTOARGB1555 "st1 {v0.16b}, [%1], #16 \n" // store 8 pixels "b.gt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_argb1555), // %1 "+r"(width) // %2 : - : "cc", "memory", "v0", "v20", "v21", "v22", "v23"); + : "cc", "memory", "v0", "v16", "v17", "v18", "v19"); } void ARGBToARGB4444Row_NEON(const uint8_t* src_argb, uint8_t* dst_argb4444, int width) { asm volatile( - "movi v4.16b, #0x0f \n" // bits to clear with + "movi v23.16b, #0x0f \n" // bits to clear with // vbic. "1: \n" - "ld4 {v20.8b,v21.8b,v22.8b,v23.8b}, [%0], #32 \n" // load 8 + "ld4 {v16.8b,v17.8b,v18.8b,v19.8b}, [%0], #32 \n" // load 8 // pixels - "prfm pldl1keep, [%0, 448] \n" "subs %w2, %w2, #8 \n" // 8 processed per loop. - ARGBTOARGB4444 + "prfm pldl1keep, [%0, 448] \n" ARGBTOARGB4444 "st1 {v0.16b}, [%1], #16 \n" // store 8 pixels "b.gt 1b \n" : "+r"(src_argb), // %0 "+r"(dst_argb4444), // %1 "+r"(width) // %2 : - : "cc", "memory", "v0", "v1", "v4", "v20", "v21", "v22", "v23"); + : "cc", "memory", "v0", "v1", "v16", "v17", "v18", "v19", "v23"); } -void ARGBToYRow_NEON(const uint8_t* src_argb, uint8_t* dst_y, int width) { +#if LIBYUV_USE_ST2 +void ARGBToAR64Row_NEON(const uint8_t* src_argb, + uint16_t* dst_ar64, + int width) { asm volatile( - "movi v4.8b, #25 \n" // B * 0.1016 coefficient - "movi v5.8b, #129 \n" // G * 0.5078 coefficient - "movi v6.8b, #66 \n" // R * 0.2578 coefficient - "movi v7.8b, #16 \n" // Add 16 constant "1: \n" - "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 ARGB + "ldp q0, q2, [%0], #32 \n" // load 8 pixels + "mov v1.16b, v0.16b \n" "prfm pldl1keep, [%0, 448] \n" + "mov v3.16b, v2.16b \n" "subs %w2, %w2, #8 \n" // 8 processed per loop. - "umull v3.8h, v0.8b, v4.8b \n" // B - "umlal v3.8h, v1.8b, v5.8b \n" // G - "umlal v3.8h, v2.8b, v6.8b \n" // R - "uqrshrn v0.8b, v3.8h, #8 \n" // 16 bit to 8 bit Y - "uqadd v0.8b, v0.8b, v7.8b \n" - "st1 {v0.8b}, [%1], #8 \n" // store 8 pixels Y. + "st2 {v0.16b, v1.16b}, [%1], #32 \n" // store 4 pixels + "st2 {v2.16b, v3.16b}, [%1], #32 \n" // store 4 pixels "b.gt 1b \n" : "+r"(src_argb), // %0 - "+r"(dst_y), // %1 + "+r"(dst_ar64), // %1 "+r"(width) // %2 : - : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7"); + : "cc", "memory", "v0", "v1", "v2", "v3"); } -void ARGBExtractAlphaRow_NEON(const uint8_t* src_argb, - uint8_t* dst_a, - int width) { +static const uvec8 kShuffleARGBToABGR = {2, 1, 0, 3, 6, 5, 4, 7, + 10, 9, 8, 11, 14, 13, 12, 15}; + +void ARGBToAB64Row_NEON(const uint8_t* src_argb, + uint16_t* dst_ab64, + int width) { asm volatile( + "ldr q4, [%3] \n" // shuffler "1: \n" - "ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 + "ldp q0, q2, [%0], #32 \n" // load 8 pixels + "tbl v0.16b, {v0.16b}, v4.16b \n" + "tbl v2.16b, {v2.16b}, v4.16b \n" "prfm pldl1keep, [%0, 448] \n" - "subs %w2, %w2, #16 \n" // 16 processed per loop - "st1 {v3.16b}, [%1], #16 \n" // store 16 A's. + "mov v1.16b, v0.16b \n" + "mov v3.16b, v2.16b \n" + "subs %w2, %w2, #8 \n" // 8 processed per loop. + "st2 {v0.16b, v1.16b}, [%1], #32 \n" // store 4 pixels + "st2 {v2.16b, v3.16b}, [%1], #32 \n" // store 4 pixels + "b.gt 1b \n" + : "+r"(src_argb), // %0 + "+r"(dst_ab64), // %1 + "+r"(width) // %2 + : "r"(&kShuffleARGBToABGR) // %3 + : "cc", "memory", "v0", "v1", "v2", "v3", "v4"); +} +#else +void ARGBToAR64Row_NEON(const uint8_t* src_argb, + uint16_t* dst_ar64, + int width) { + asm volatile( + "1: \n" + "ldp q0, q1, [%0], #32 \n" // load 8 ARGB pixels + "subs %w2, %w2, #8 \n" // 8 processed per loop. + "zip1 v2.16b, v0.16b, v0.16b \n" + "zip2 v3.16b, v0.16b, v0.16b \n" + "prfm pldl1keep, [%0, 448] \n" + "zip1 v4.16b, v1.16b, v1.16b \n" + "zip2 v5.16b, v1.16b, v1.16b \n" + "st1 {v2.8h, v3.8h, v4.8h, v5.8h}, [%1], #64 \n" // 8 AR64 "b.gt 1b \n" : "+r"(src_argb), // %0 - "+r"(dst_a), // %1 + "+r"(dst_ar64), // %1 "+r"(width) // %2 : - : "cc", "memory", "v0", "v1", "v2", "v3" // Clobber List - ); + : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5"); } -void ARGBToYJRow_NEON(const uint8_t* src_argb, uint8_t* dst_y, int width) { +static const uvec8 kShuffleARGBToAB64[2] = { + {2, 2, 1, 1, 0, 0, 3, 3, 6, 6, 5, 5, 4, 4, 7, 7}, + {10, 10, 9, 9, 8, 8, 11, 11, 14, 14, 13, 13, 12, 12, 15, 15}}; + +void ARGBToAB64Row_NEON(const uint8_t* src_argb, + uint16_t* dst_ab64, + int width) { asm volatile( - "movi v4.8b, #29 \n" // B * 0.1140 coefficient - "movi v5.8b, #150 \n" // G * 0.5870 coefficient - "movi v6.8b, #77 \n" // R * 0.2990 coefficient + "ldp q6, q7, [%3] \n" // 2 shufflers "1: \n" - "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 ARGB + "ldp q0, q1, [%0], #32 \n" // load 8 pixels + "subs %w2, %w2, #8 \n" // 8 processed per loop. + "tbl v2.16b, {v0.16b}, v6.16b \n" // ARGB to AB64 + "tbl v3.16b, {v0.16b}, v7.16b \n" + "prfm pldl1keep, [%0, 448] \n" + "tbl v4.16b, {v1.16b}, v6.16b \n" + "tbl v5.16b, {v1.16b}, v7.16b \n" + "st1 {v2.8h, v3.8h, v4.8h, v5.8h}, [%1], #64 \n" // 8 AR64 + "b.gt 1b \n" + : "+r"(src_argb), // %0 + "+r"(dst_ab64), // %1 + "+r"(width) // %2 + : "r"(&kShuffleARGBToAB64[0]) // %3 + : "cc", "memory", "v0", "v1", "v2", "v3", "v4"); +} +#endif // LIBYUV_USE_ST2 + +static const uvec8 kShuffleAR64ToARGB = {1, 3, 5, 7, 9, 11, 13, 15, + 17, 19, 21, 23, 25, 27, 29, 31}; + +void AR64ToARGBRow_NEON(const uint16_t* src_ar64, + uint8_t* dst_argb, + int width) { + asm volatile( + "ldr q4, [%3] \n" // shuffler + "1: \n" + "ldp q0, q1, [%0], #32 \n" // load 4 pixels + "ldp q2, q3, [%0], #32 \n" // load 4 pixels + "tbl v0.16b, {v0.16b, v1.16b}, v4.16b \n" "prfm pldl1keep, [%0, 448] \n" + "tbl v2.16b, {v2.16b, v3.16b}, v4.16b \n" "subs %w2, %w2, #8 \n" // 8 processed per loop. - "umull v3.8h, v0.8b, v4.8b \n" // B - "umlal v3.8h, v1.8b, v5.8b \n" // G - "umlal v3.8h, v2.8b, v6.8b \n" // R - "uqrshrn v0.8b, v3.8h, #8 \n" // 16 bit to 8 bit Y - "st1 {v0.8b}, [%1], #8 \n" // store 8 pixels Y. + "stp q0, q2, [%1], #32 \n" // store 8 pixels "b.gt 1b \n" - : "+r"(src_argb), // %0 - "+r"(dst_y), // %1 - "+r"(width) // %2 - : - : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6"); + : "+r"(src_ar64), // %0 + "+r"(dst_argb), // %1 + "+r"(width) // %2 + : "r"(&kShuffleAR64ToARGB) // %3 + : "cc", "memory", "v0", "v1", "v2", "v3", "v4"); +} + +static const uvec8 kShuffleAB64ToARGB = {5, 3, 1, 7, 13, 11, 9, 15, + 21, 19, 17, 23, 29, 27, 25, 31}; + +void AB64ToARGBRow_NEON(const uint16_t* src_ab64, + uint8_t* dst_argb, + int width) { + asm volatile( + "ldr q4, [%3] \n" // shuffler + "1: \n" + "ldp q0, q1, [%0], #32 \n" // load 4 pixels + "ldp q2, q3, [%0], #32 \n" // load 4 pixels + "tbl v0.16b, {v0.16b, v1.16b}, v4.16b \n" + "prfm pldl1keep, [%0, 448] \n" + "tbl v2.16b, {v2.16b, v3.16b}, v4.16b \n" + "subs %w2, %w2, #8 \n" // 8 processed per loop. + "stp q0, q2, [%1], #32 \n" // store 8 pixels + "b.gt 1b \n" + : "+r"(src_ab64), // %0 + "+r"(dst_argb), // %1 + "+r"(width) // %2 + : "r"(&kShuffleAB64ToARGB) // %3 + : "cc", "memory", "v0", "v1", "v2", "v3", "v4"); } -void RGBAToYJRow_NEON(const uint8_t* src_argb, uint8_t* dst_y, int width) { +void ARGBExtractAlphaRow_NEON(const uint8_t* src_argb, + uint8_t* dst_a, + int width) { asm volatile( - "movi v4.8b, #29 \n" // B * 0.1140 coefficient - "movi v5.8b, #150 \n" // G * 0.5870 coefficient - "movi v6.8b, #77 \n" // R * 0.2990 coefficient "1: \n" - "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 RGBA + "ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 "prfm pldl1keep, [%0, 448] \n" - "subs %w2, %w2, #8 \n" // 8 processed per loop. - "umull v0.8h, v1.8b, v4.8b \n" // B - "umlal v0.8h, v2.8b, v5.8b \n" // G - "umlal v0.8h, v3.8b, v6.8b \n" // R - "uqrshrn v3.8b, v0.8h, #8 \n" // 16 bit to 8 bit Y - "st1 {v3.8b}, [%1], #8 \n" // store 8 pixels Y. + "subs %w2, %w2, #16 \n" // 16 processed per loop + "st1 {v3.16b}, [%1], #16 \n" // store 16 A's. "b.gt 1b \n" : "+r"(src_argb), // %0 - "+r"(dst_y), // %1 + "+r"(dst_a), // %1 "+r"(width) // %2 : - : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6"); + : "cc", "memory", "v0", "v1", "v2", "v3" // Clobber List + ); } // 8x1 pixels. @@ -1519,20 +2224,18 @@ void ARGBToUV444Row_NEON(const uint8_t* src_argb, "movi v29.16b,#0x80 \n" // 128.5 "1: \n" "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 ARGB - "prfm pldl1keep, [%0, 448] \n" "subs %w3, %w3, #8 \n" // 8 processed per loop. "umull v4.8h, v0.8b, v24.8b \n" // B "umlsl v4.8h, v1.8b, v25.8b \n" // G "umlsl v4.8h, v2.8b, v26.8b \n" // R - "add v4.8h, v4.8h, v29.8h \n" // +128 -> unsigned + "prfm pldl1keep, [%0, 448] \n" "umull v3.8h, v2.8b, v24.8b \n" // R "umlsl v3.8h, v1.8b, v28.8b \n" // G "umlsl v3.8h, v0.8b, v27.8b \n" // B - "add v3.8h, v3.8h, v29.8h \n" // +128 -> unsigned - "uqshrn v0.8b, v4.8h, #8 \n" // 16 bit to 8 bit U - "uqshrn v1.8b, v3.8h, #8 \n" // 16 bit to 8 bit V + "addhn v0.8b, v4.8h, v29.8h \n" // +128 -> unsigned + "addhn v1.8b, v3.8h, v29.8h \n" // +128 -> unsigned "st1 {v0.8b}, [%1], #8 \n" // store 8 pixels U. "st1 {v1.8b}, [%2], #8 \n" // store 8 pixels V. @@ -1563,10 +2266,8 @@ void ARGBToUV444Row_NEON(const uint8_t* src_argb, "mls v4.8h, " #QG ",v24.8h \n" /* G */ \ "mls v3.8h, " #QR ",v22.8h \n" /* R */ \ "mls v4.8h, " #QB ",v23.8h \n" /* B */ \ - "add v3.8h, v3.8h, v25.8h \n" /* +128 -> unsigned */ \ - "add v4.8h, v4.8h, v25.8h \n" /* +128 -> unsigned */ \ - "uqshrn v0.8b, v3.8h, #8 \n" /* 16 bit to 8 bit U */ \ - "uqshrn v1.8b, v4.8h, #8 \n" /* 16 bit to 8 bit V */ + "addhn v0.8b, v3.8h, v25.8h \n" /* +128 -> unsigned */ \ + "addhn v1.8b, v4.8h, v25.8h \n" /* +128 -> unsigned */ // clang-format on // TODO(fbarchard): Consider vhadd vertical, then vpaddl horizontal, avoid shr. @@ -1582,14 +2283,14 @@ void ARGBToUVRow_NEON(const uint8_t* src_argb, RGBTOUV_SETUP_REG "1: \n" "ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 pixels. - "prfm pldl1keep, [%0, 448] \n" "uaddlp v0.8h, v0.16b \n" // B 16 bytes -> 8 shorts. + "prfm pldl1keep, [%0, 448] \n" "uaddlp v1.8h, v1.16b \n" // G 16 bytes -> 8 shorts. "uaddlp v2.8h, v2.16b \n" // R 16 bytes -> 8 shorts. "ld4 {v4.16b,v5.16b,v6.16b,v7.16b}, [%1], #64 \n" // load next 16 - "prfm pldl1keep, [%1, 448] \n" "uadalp v0.8h, v4.16b \n" // B 16 bytes -> 8 shorts. + "prfm pldl1keep, [%1, 448] \n" "uadalp v1.8h, v5.16b \n" // G 16 bytes -> 8 shorts. "uadalp v2.8h, v6.16b \n" // R 16 bytes -> 8 shorts. @@ -1613,6 +2314,7 @@ void ARGBToUVRow_NEON(const uint8_t* src_argb, ); } +// TODO(fbarchard): Subsample match Intel code. void ARGBToUVJRow_NEON(const uint8_t* src_argb, int src_stride_argb, uint8_t* dst_u, @@ -1628,13 +2330,13 @@ void ARGBToUVJRow_NEON(const uint8_t* src_argb, "movi v25.16b, #0x80 \n" // 128.5 (0x8080 in 16-bit) "1: \n" "ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 pixels. - "prfm pldl1keep, [%0, 448] \n" "uaddlp v0.8h, v0.16b \n" // B 16 bytes -> 8 shorts. + "prfm pldl1keep, [%0, 448] \n" "uaddlp v1.8h, v1.16b \n" // G 16 bytes -> 8 shorts. "uaddlp v2.8h, v2.16b \n" // R 16 bytes -> 8 shorts. "ld4 {v4.16b,v5.16b,v6.16b,v7.16b}, [%1], #64 \n" // load next 16 - "prfm pldl1keep, [%1, 448] \n" "uadalp v0.8h, v4.16b \n" // B 16 bytes -> 8 shorts. + "prfm pldl1keep, [%1, 448] \n" "uadalp v1.8h, v5.16b \n" // G 16 bytes -> 8 shorts. "uadalp v2.8h, v6.16b \n" // R 16 bytes -> 8 shorts. @@ -1642,7 +2344,7 @@ void ARGBToUVJRow_NEON(const uint8_t* src_argb, "urshr v1.8h, v1.8h, #1 \n" "urshr v2.8h, v2.8h, #1 \n" - "subs %w4, %w4, #16 \n" // 32 processed per loop. + "subs %w4, %w4, #16 \n" // 16 processed per loop. RGBTOUV(v0.8h, v1.8h, v2.8h) "st1 {v0.8b}, [%2], #8 \n" // store 8 pixels U. "st1 {v1.8b}, [%3], #8 \n" // store 8 pixels V. @@ -1658,6 +2360,141 @@ void ARGBToUVJRow_NEON(const uint8_t* src_argb, ); } +void ABGRToUVJRow_NEON(const uint8_t* src_abgr, + int src_stride_abgr, + uint8_t* dst_uj, + uint8_t* dst_vj, + int width) { + const uint8_t* src_abgr_1 = src_abgr + src_stride_abgr; + asm volatile ( + "movi v20.8h, #63, lsl #0 \n" // UB/VR coeff (0.500) / 2 + "movi v21.8h, #42, lsl #0 \n" // UG coeff (-0.33126) / 2 + "movi v22.8h, #21, lsl #0 \n" // UR coeff (-0.16874) / 2 + "movi v23.8h, #10, lsl #0 \n" // VB coeff (-0.08131) / 2 + "movi v24.8h, #53, lsl #0 \n" // VG coeff (-0.41869) / 2 + "movi v25.16b, #0x80 \n" // 128.5 (0x8080 in 16-bit) + "1: \n" + "ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 pixels. + "uaddlp v0.8h, v0.16b \n" // R 16 bytes -> 8 shorts. + "prfm pldl1keep, [%0, 448] \n" + "uaddlp v1.8h, v1.16b \n" // G 16 bytes -> 8 shorts. + "uaddlp v2.8h, v2.16b \n" // B 16 bytes -> 8 shorts. + "ld4 {v4.16b,v5.16b,v6.16b,v7.16b}, [%1], #64 \n" // load next 16 + "uadalp v0.8h, v4.16b \n" // R 16 bytes -> 8 shorts. + "prfm pldl1keep, [%1, 448] \n" + "uadalp v1.8h, v5.16b \n" // G 16 bytes -> 8 shorts. + "uadalp v2.8h, v6.16b \n" // B 16 bytes -> 8 shorts. + + "urshr v0.8h, v0.8h, #1 \n" // 2x average + "urshr v1.8h, v1.8h, #1 \n" + "urshr v2.8h, v2.8h, #1 \n" + + "subs %w4, %w4, #16 \n" // 16 processed per loop. + RGBTOUV(v2.8h, v1.8h, v0.8h) + "st1 {v0.8b}, [%2], #8 \n" // store 8 pixels U. + "st1 {v1.8b}, [%3], #8 \n" // store 8 pixels V. + "b.gt 1b \n" + : "+r"(src_abgr), // %0 + "+r"(src_abgr_1), // %1 + "+r"(dst_uj), // %2 + "+r"(dst_vj), // %3 + "+r"(width) // %4 + : + : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", + "v20", "v21", "v22", "v23", "v24", "v25" + ); +} + +void RGB24ToUVJRow_NEON(const uint8_t* src_rgb24, + int src_stride_rgb24, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { + const uint8_t* src_rgb24_1 = src_rgb24 + src_stride_rgb24; + asm volatile ( + "movi v20.8h, #63, lsl #0 \n" // UB/VR coeff (0.500) / 2 + "movi v21.8h, #42, lsl #0 \n" // UG coeff (-0.33126) / 2 + "movi v22.8h, #21, lsl #0 \n" // UR coeff (-0.16874) / 2 + "movi v23.8h, #10, lsl #0 \n" // VB coeff (-0.08131) / 2 + "movi v24.8h, #53, lsl #0 \n" // VG coeff (-0.41869) / 2 + "movi v25.16b, #0x80 \n" // 128.5 (0x8080 in 16-bit) + "1: \n" + "ld3 {v0.16b,v1.16b,v2.16b}, [%0], #48 \n" // load 16 pixels. + "uaddlp v0.8h, v0.16b \n" // B 16 bytes -> 8 shorts. + "prfm pldl1keep, [%0, 448] \n" + "uaddlp v1.8h, v1.16b \n" // G 16 bytes -> 8 shorts. + "uaddlp v2.8h, v2.16b \n" // R 16 bytes -> 8 shorts. + "ld3 {v4.16b,v5.16b,v6.16b}, [%1], #48 \n" // load next 16 + "uadalp v0.8h, v4.16b \n" // B 16 bytes -> 8 shorts. + "prfm pldl1keep, [%1, 448] \n" + "uadalp v1.8h, v5.16b \n" // G 16 bytes -> 8 shorts. + "uadalp v2.8h, v6.16b \n" // R 16 bytes -> 8 shorts. + + "urshr v0.8h, v0.8h, #1 \n" // 2x average + "urshr v1.8h, v1.8h, #1 \n" + "urshr v2.8h, v2.8h, #1 \n" + + "subs %w4, %w4, #16 \n" // 16 processed per loop. + RGBTOUV(v0.8h, v1.8h, v2.8h) + "st1 {v0.8b}, [%2], #8 \n" // store 8 pixels U. + "st1 {v1.8b}, [%3], #8 \n" // store 8 pixels V. + "b.gt 1b \n" + : "+r"(src_rgb24), // %0 + "+r"(src_rgb24_1), // %1 + "+r"(dst_u), // %2 + "+r"(dst_v), // %3 + "+r"(width) // %4 + : + : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", + "v20", "v21", "v22", "v23", "v24", "v25" + ); +} + +void RAWToUVJRow_NEON(const uint8_t* src_raw, + int src_stride_raw, + uint8_t* dst_u, + uint8_t* dst_v, + int width) { + const uint8_t* src_raw_1 = src_raw + src_stride_raw; + asm volatile ( + "movi v20.8h, #63, lsl #0 \n" // UB/VR coeff (0.500) / 2 + "movi v21.8h, #42, lsl #0 \n" // UG coeff (-0.33126) / 2 + "movi v22.8h, #21, lsl #0 \n" // UR coeff (-0.16874) / 2 + "movi v23.8h, #10, lsl #0 \n" // VB coeff (-0.08131) / 2 + "movi v24.8h, #53, lsl #0 \n" // VG coeff (-0.41869) / 2 + "movi v25.16b, #0x80 \n" // 128.5 (0x8080 in 16-bit) + "1: \n" + "ld3 {v0.16b,v1.16b,v2.16b}, [%0], #48 \n" // load 16 pixels. + "uaddlp v0.8h, v0.16b \n" // B 16 bytes -> 8 shorts. + "prfm pldl1keep, [%0, 448] \n" + "uaddlp v1.8h, v1.16b \n" // G 16 bytes -> 8 shorts. + "uaddlp v2.8h, v2.16b \n" // R 16 bytes -> 8 shorts. + "ld3 {v4.16b,v5.16b,v6.16b}, [%1], #48 \n" // load next 16 + "uadalp v0.8h, v4.16b \n" // B 16 bytes -> 8 shorts. + "prfm pldl1keep, [%1, 448] \n" + "uadalp v1.8h, v5.16b \n" // G 16 bytes -> 8 shorts. + "uadalp v2.8h, v6.16b \n" // R 16 bytes -> 8 shorts. + + "urshr v0.8h, v0.8h, #1 \n" // 2x average + "urshr v1.8h, v1.8h, #1 \n" + "urshr v2.8h, v2.8h, #1 \n" + + "subs %w4, %w4, #16 \n" // 16 processed per loop. + RGBTOUV(v2.8h, v1.8h, v0.8h) + "st1 {v0.8b}, [%2], #8 \n" // store 8 pixels U. + "st1 {v1.8b}, [%3], #8 \n" // store 8 pixels V. + "b.gt 1b \n" + : "+r"(src_raw), // %0 + "+r"(src_raw_1), // %1 + "+r"(dst_u), // %2 + "+r"(dst_v), // %3 + "+r"(width) // %4 + : + : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", + "v20", "v21", "v22", "v23", "v24", "v25" + ); +} + void BGRAToUVRow_NEON(const uint8_t* src_bgra, int src_stride_bgra, uint8_t* dst_u, @@ -1668,13 +2505,13 @@ void BGRAToUVRow_NEON(const uint8_t* src_bgra, RGBTOUV_SETUP_REG "1: \n" "ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 pixels. - "prfm pldl1keep, [%0, 448] \n" "uaddlp v0.8h, v3.16b \n" // B 16 bytes -> 8 shorts. + "prfm pldl1keep, [%0, 448] \n" "uaddlp v3.8h, v2.16b \n" // G 16 bytes -> 8 shorts. "uaddlp v2.8h, v1.16b \n" // R 16 bytes -> 8 shorts. "ld4 {v4.16b,v5.16b,v6.16b,v7.16b}, [%1], #64 \n" // load 16 more - "prfm pldl1keep, [%1, 448] \n" "uadalp v0.8h, v7.16b \n" // B 16 bytes -> 8 shorts. + "prfm pldl1keep, [%1, 448] \n" "uadalp v3.8h, v6.16b \n" // G 16 bytes -> 8 shorts. "uadalp v2.8h, v5.16b \n" // R 16 bytes -> 8 shorts. @@ -1682,7 +2519,7 @@ void BGRAToUVRow_NEON(const uint8_t* src_bgra, "urshr v1.8h, v3.8h, #1 \n" "urshr v2.8h, v2.8h, #1 \n" - "subs %w4, %w4, #16 \n" // 32 processed per loop. + "subs %w4, %w4, #16 \n" // 16 processed per loop. RGBTOUV(v0.8h, v1.8h, v2.8h) "st1 {v0.8b}, [%2], #8 \n" // store 8 pixels U. "st1 {v1.8b}, [%3], #8 \n" // store 8 pixels V. @@ -1708,13 +2545,13 @@ void ABGRToUVRow_NEON(const uint8_t* src_abgr, RGBTOUV_SETUP_REG "1: \n" "ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 pixels. - "prfm pldl1keep, [%0, 448] \n" "uaddlp v3.8h, v2.16b \n" // B 16 bytes -> 8 shorts. + "prfm pldl1keep, [%0, 448] \n" "uaddlp v2.8h, v1.16b \n" // G 16 bytes -> 8 shorts. "uaddlp v1.8h, v0.16b \n" // R 16 bytes -> 8 shorts. "ld4 {v4.16b,v5.16b,v6.16b,v7.16b}, [%1], #64 \n" // load 16 more. - "prfm pldl1keep, [%1, 448] \n" "uadalp v3.8h, v6.16b \n" // B 16 bytes -> 8 shorts. + "prfm pldl1keep, [%1, 448] \n" "uadalp v2.8h, v5.16b \n" // G 16 bytes -> 8 shorts. "uadalp v1.8h, v4.16b \n" // R 16 bytes -> 8 shorts. @@ -1722,7 +2559,7 @@ void ABGRToUVRow_NEON(const uint8_t* src_abgr, "urshr v2.8h, v2.8h, #1 \n" "urshr v1.8h, v1.8h, #1 \n" - "subs %w4, %w4, #16 \n" // 32 processed per loop. + "subs %w4, %w4, #16 \n" // 16 processed per loop. RGBTOUV(v0.8h, v2.8h, v1.8h) "st1 {v0.8b}, [%2], #8 \n" // store 8 pixels U. "st1 {v1.8b}, [%3], #8 \n" // store 8 pixels V. @@ -1748,13 +2585,13 @@ void RGBAToUVRow_NEON(const uint8_t* src_rgba, RGBTOUV_SETUP_REG "1: \n" "ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 pixels. - "prfm pldl1keep, [%0, 448] \n" "uaddlp v0.8h, v1.16b \n" // B 16 bytes -> 8 shorts. + "prfm pldl1keep, [%0, 448] \n" "uaddlp v1.8h, v2.16b \n" // G 16 bytes -> 8 shorts. "uaddlp v2.8h, v3.16b \n" // R 16 bytes -> 8 shorts. "ld4 {v4.16b,v5.16b,v6.16b,v7.16b}, [%1], #64 \n" // load 16 more. - "prfm pldl1keep, [%1, 448] \n" "uadalp v0.8h, v5.16b \n" // B 16 bytes -> 8 shorts. + "prfm pldl1keep, [%1, 448] \n" "uadalp v1.8h, v6.16b \n" // G 16 bytes -> 8 shorts. "uadalp v2.8h, v7.16b \n" // R 16 bytes -> 8 shorts. @@ -1762,7 +2599,7 @@ void RGBAToUVRow_NEON(const uint8_t* src_rgba, "urshr v1.8h, v1.8h, #1 \n" "urshr v2.8h, v2.8h, #1 \n" - "subs %w4, %w4, #16 \n" // 32 processed per loop. + "subs %w4, %w4, #16 \n" // 16 processed per loop. RGBTOUV(v0.8h, v1.8h, v2.8h) "st1 {v0.8b}, [%2], #8 \n" // store 8 pixels U. "st1 {v1.8b}, [%3], #8 \n" // store 8 pixels V. @@ -1788,13 +2625,13 @@ void RGB24ToUVRow_NEON(const uint8_t* src_rgb24, RGBTOUV_SETUP_REG "1: \n" "ld3 {v0.16b,v1.16b,v2.16b}, [%0], #48 \n" // load 16 pixels. - "prfm pldl1keep, [%0, 448] \n" "uaddlp v0.8h, v0.16b \n" // B 16 bytes -> 8 shorts. + "prfm pldl1keep, [%0, 448] \n" "uaddlp v1.8h, v1.16b \n" // G 16 bytes -> 8 shorts. "uaddlp v2.8h, v2.16b \n" // R 16 bytes -> 8 shorts. "ld3 {v4.16b,v5.16b,v6.16b}, [%1], #48 \n" // load 16 more. - "prfm pldl1keep, [%1, 448] \n" "uadalp v0.8h, v4.16b \n" // B 16 bytes -> 8 shorts. + "prfm pldl1keep, [%1, 448] \n" "uadalp v1.8h, v5.16b \n" // G 16 bytes -> 8 shorts. "uadalp v2.8h, v6.16b \n" // R 16 bytes -> 8 shorts. @@ -1802,7 +2639,7 @@ void RGB24ToUVRow_NEON(const uint8_t* src_rgb24, "urshr v1.8h, v1.8h, #1 \n" "urshr v2.8h, v2.8h, #1 \n" - "subs %w4, %w4, #16 \n" // 32 processed per loop. + "subs %w4, %w4, #16 \n" // 16 processed per loop. RGBTOUV(v0.8h, v1.8h, v2.8h) "st1 {v0.8b}, [%2], #8 \n" // store 8 pixels U. "st1 {v1.8b}, [%3], #8 \n" // store 8 pixels V. @@ -1827,14 +2664,14 @@ void RAWToUVRow_NEON(const uint8_t* src_raw, asm volatile ( RGBTOUV_SETUP_REG "1: \n" - "ld3 {v0.16b,v1.16b,v2.16b}, [%0], #48 \n" // load 8 RAW pixels. - "prfm pldl1keep, [%0, 448] \n" + "ld3 {v0.16b,v1.16b,v2.16b}, [%0], #48 \n" // load 16 RAW pixels. "uaddlp v2.8h, v2.16b \n" // B 16 bytes -> 8 shorts. + "prfm pldl1keep, [%0, 448] \n" "uaddlp v1.8h, v1.16b \n" // G 16 bytes -> 8 shorts. "uaddlp v0.8h, v0.16b \n" // R 16 bytes -> 8 shorts. "ld3 {v4.16b,v5.16b,v6.16b}, [%1], #48 \n" // load 8 more RAW pixels - "prfm pldl1keep, [%1, 448] \n" "uadalp v2.8h, v6.16b \n" // B 16 bytes -> 8 shorts. + "prfm pldl1keep, [%1, 448] \n" "uadalp v1.8h, v5.16b \n" // G 16 bytes -> 8 shorts. "uadalp v0.8h, v4.16b \n" // R 16 bytes -> 8 shorts. @@ -1842,7 +2679,7 @@ void RAWToUVRow_NEON(const uint8_t* src_raw, "urshr v1.8h, v1.8h, #1 \n" "urshr v0.8h, v0.8h, #1 \n" - "subs %w4, %w4, #16 \n" // 32 processed per loop. + "subs %w4, %w4, #16 \n" // 16 processed per loop. RGBTOUV(v2.8h, v1.8h, v0.8h) "st1 {v0.8b}, [%2], #8 \n" // store 8 pixels U. "st1 {v1.8b}, [%3], #8 \n" // store 8 pixels V. @@ -1869,9 +2706,9 @@ void RGB565ToUVRow_NEON(const uint8_t* src_rgb565, RGBTOUV_SETUP_REG "1: \n" "ld1 {v0.16b}, [%0], #16 \n" // load 8 RGB565 pixels. - "prfm pldl1keep, [%0, 448] \n" RGB565TOARGB "uaddlp v16.4h, v0.8b \n" // B 8 bytes -> 4 shorts. + "prfm pldl1keep, [%0, 448] \n" "uaddlp v17.4h, v1.8b \n" // G 8 bytes -> 4 shorts. "uaddlp v18.4h, v2.8b \n" // R 8 bytes -> 4 shorts. "ld1 {v0.16b}, [%0], #16 \n" // next 8 RGB565 pixels. @@ -1881,9 +2718,9 @@ void RGB565ToUVRow_NEON(const uint8_t* src_rgb565, "uaddlp v28.4h, v2.8b \n" // R 8 bytes -> 4 shorts. "ld1 {v0.16b}, [%1], #16 \n" // load 8 RGB565 pixels. - "prfm pldl1keep, [%1, 448] \n" RGB565TOARGB "uadalp v16.4h, v0.8b \n" // B 8 bytes -> 4 shorts. + "prfm pldl1keep, [%1, 448] \n" "uadalp v17.4h, v1.8b \n" // G 8 bytes -> 4 shorts. "uadalp v18.4h, v2.8b \n" // R 8 bytes -> 4 shorts. "ld1 {v0.16b}, [%1], #16 \n" // next 8 RGB565 pixels. @@ -1927,9 +2764,9 @@ void ARGB1555ToUVRow_NEON(const uint8_t* src_argb1555, RGBTOUV_SETUP_REG "1: \n" "ld1 {v0.16b}, [%0], #16 \n" // load 8 ARGB1555 pixels. - "prfm pldl1keep, [%0, 448] \n" RGB555TOARGB "uaddlp v16.4h, v0.8b \n" // B 8 bytes -> 4 shorts. + "prfm pldl1keep, [%0, 448] \n" "uaddlp v17.4h, v1.8b \n" // G 8 bytes -> 4 shorts. "uaddlp v18.4h, v2.8b \n" // R 8 bytes -> 4 shorts. "ld1 {v0.16b}, [%0], #16 \n" // next 8 ARGB1555 pixels. @@ -1939,9 +2776,9 @@ void ARGB1555ToUVRow_NEON(const uint8_t* src_argb1555, "uaddlp v28.4h, v2.8b \n" // R 8 bytes -> 4 shorts. "ld1 {v0.16b}, [%1], #16 \n" // load 8 ARGB1555 pixels. - "prfm pldl1keep, [%1, 448] \n" RGB555TOARGB "uadalp v16.4h, v0.8b \n" // B 8 bytes -> 4 shorts. + "prfm pldl1keep, [%1, 448] \n" "uadalp v17.4h, v1.8b \n" // G 8 bytes -> 4 shorts. "uadalp v18.4h, v2.8b \n" // R 8 bytes -> 4 shorts. "ld1 {v0.16b}, [%1], #16 \n" // next 8 ARGB1555 pixels. @@ -1985,9 +2822,9 @@ void ARGB4444ToUVRow_NEON(const uint8_t* src_argb4444, RGBTOUV_SETUP_REG // sets v20-v25 "1: \n" "ld1 {v0.16b}, [%0], #16 \n" // load 8 ARGB4444 pixels. - "prfm pldl1keep, [%0, 448] \n" ARGB4444TOARGB "uaddlp v16.4h, v0.8b \n" // B 8 bytes -> 4 shorts. + "prfm pldl1keep, [%0, 448] \n" "uaddlp v17.4h, v1.8b \n" // G 8 bytes -> 4 shorts. "uaddlp v18.4h, v2.8b \n" // R 8 bytes -> 4 shorts. "ld1 {v0.16b}, [%0], #16 \n" // next 8 ARGB4444 pixels. @@ -1997,9 +2834,9 @@ void ARGB4444ToUVRow_NEON(const uint8_t* src_argb4444, "uaddlp v28.4h, v2.8b \n" // R 8 bytes -> 4 shorts. "ld1 {v0.16b}, [%1], #16 \n" // load 8 ARGB4444 pixels. - "prfm pldl1keep, [%1, 448] \n" ARGB4444TOARGB "uadalp v16.4h, v0.8b \n" // B 8 bytes -> 4 shorts. + "prfm pldl1keep, [%1, 448] \n" "uadalp v17.4h, v1.8b \n" // G 8 bytes -> 4 shorts. "uadalp v18.4h, v2.8b \n" // R 8 bytes -> 4 shorts. "ld1 {v0.16b}, [%1], #16 \n" // next 8 ARGB4444 pixels. @@ -2042,10 +2879,10 @@ void RGB565ToYRow_NEON(const uint8_t* src_rgb565, uint8_t* dst_y, int width) { "movi v27.8b, #16 \n" // Add 16 constant "1: \n" "ld1 {v0.16b}, [%0], #16 \n" // load 8 RGB565 pixels. - "prfm pldl1keep, [%0, 448] \n" "subs %w2, %w2, #8 \n" // 8 processed per loop. RGB565TOARGB "umull v3.8h, v0.8b, v24.8b \n" // B + "prfm pldl1keep, [%0, 448] \n" "umlal v3.8h, v1.8b, v25.8b \n" // G "umlal v3.8h, v2.8b, v26.8b \n" // R "uqrshrn v0.8b, v3.8h, #8 \n" // 16 bit to 8 bit Y @@ -2070,10 +2907,10 @@ void ARGB1555ToYRow_NEON(const uint8_t* src_argb1555, "movi v7.8b, #16 \n" // Add 16 constant "1: \n" "ld1 {v0.16b}, [%0], #16 \n" // load 8 ARGB1555 pixels. - "prfm pldl1keep, [%0, 448] \n" "subs %w2, %w2, #8 \n" // 8 processed per loop. ARGB1555TOARGB "umull v3.8h, v0.8b, v4.8b \n" // B + "prfm pldl1keep, [%0, 448] \n" "umlal v3.8h, v1.8b, v5.8b \n" // G "umlal v3.8h, v2.8b, v6.8b \n" // R "uqrshrn v0.8b, v3.8h, #8 \n" // 16 bit to 8 bit Y @@ -2097,10 +2934,10 @@ void ARGB4444ToYRow_NEON(const uint8_t* src_argb4444, "movi v27.8b, #16 \n" // Add 16 constant "1: \n" "ld1 {v0.16b}, [%0], #16 \n" // load 8 ARGB4444 pixels. - "prfm pldl1keep, [%0, 448] \n" "subs %w2, %w2, #8 \n" // 8 processed per loop. ARGB4444TOARGB "umull v3.8h, v0.8b, v24.8b \n" // B + "prfm pldl1keep, [%0, 448] \n" "umlal v3.8h, v1.8b, v25.8b \n" // G "umlal v3.8h, v2.8b, v26.8b \n" // R "uqrshrn v0.8b, v3.8h, #8 \n" // 16 bit to 8 bit Y @@ -2114,168 +2951,179 @@ void ARGB4444ToYRow_NEON(const uint8_t* src_argb4444, : "cc", "memory", "v0", "v1", "v2", "v3", "v24", "v25", "v26", "v27"); } -void BGRAToYRow_NEON(const uint8_t* src_bgra, uint8_t* dst_y, int width) { +struct RgbConstants { + uint8_t kRGBToY[4]; + uint16_t kAddY; + uint16_t pad; +}; + +// RGB to JPeg coefficients +// B * 0.1140 coefficient = 29 +// G * 0.5870 coefficient = 150 +// R * 0.2990 coefficient = 77 +// Add 0.5 = 0x80 +static const struct RgbConstants kRgb24JPEGConstants = {{29, 150, 77, 0}, + 128, + 0}; + +static const struct RgbConstants kRawJPEGConstants = {{77, 150, 29, 0}, 128, 0}; + +// RGB to BT.601 coefficients +// B * 0.1016 coefficient = 25 +// G * 0.5078 coefficient = 129 +// R * 0.2578 coefficient = 66 +// Add 16.5 = 0x1080 + +static const struct RgbConstants kRgb24I601Constants = {{25, 129, 66, 0}, + 0x1080, + 0}; + +static const struct RgbConstants kRawI601Constants = {{66, 129, 25, 0}, + 0x1080, + 0}; + +// ARGB expects first 3 values to contain RGB and 4th value is ignored. +void ARGBToYMatrixRow_NEON(const uint8_t* src_argb, + uint8_t* dst_y, + int width, + const struct RgbConstants* rgbconstants) { asm volatile( - "movi v4.8b, #66 \n" // R * 0.2578 coefficient - "movi v5.8b, #129 \n" // G * 0.5078 coefficient - "movi v6.8b, #25 \n" // B * 0.1016 coefficient - "movi v7.8b, #16 \n" // Add 16 constant + "ldr d0, [%3] \n" // load rgbconstants + "dup v6.16b, v0.b[0] \n" + "dup v7.16b, v0.b[1] \n" + "dup v16.16b, v0.b[2] \n" + "dup v17.8h, v0.h[2] \n" "1: \n" - "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 pixels. + "ld4 {v2.16b,v3.16b,v4.16b,v5.16b}, [%0], #64 \n" // load 16 + // pixels. + "subs %w2, %w2, #16 \n" // 16 processed per loop. + "umull v0.8h, v2.8b, v6.8b \n" // B + "umull2 v1.8h, v2.16b, v6.16b \n" "prfm pldl1keep, [%0, 448] \n" - "subs %w2, %w2, #8 \n" // 8 processed per loop. - "umull v16.8h, v1.8b, v4.8b \n" // R - "umlal v16.8h, v2.8b, v5.8b \n" // G - "umlal v16.8h, v3.8b, v6.8b \n" // B - "uqrshrn v0.8b, v16.8h, #8 \n" // 16 bit to 8 bit Y - "uqadd v0.8b, v0.8b, v7.8b \n" - "st1 {v0.8b}, [%1], #8 \n" // store 8 pixels Y. + "umlal v0.8h, v3.8b, v7.8b \n" // G + "umlal2 v1.8h, v3.16b, v7.16b \n" + "umlal v0.8h, v4.8b, v16.8b \n" // R + "umlal2 v1.8h, v4.16b, v16.16b \n" + "addhn v0.8b, v0.8h, v17.8h \n" // 16 bit to 8 bit Y + "addhn v1.8b, v1.8h, v17.8h \n" + "st1 {v0.8b, v1.8b}, [%1], #16 \n" // store 16 pixels Y. "b.gt 1b \n" - : "+r"(src_bgra), // %0 - "+r"(dst_y), // %1 - "+r"(width) // %2 - : - : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v16"); + : "+r"(src_argb), // %0 + "+r"(dst_y), // %1 + "+r"(width) // %2 + : "r"(rgbconstants) // %3 + : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v16", + "v17"); +} + +void ARGBToYRow_NEON(const uint8_t* src_argb, uint8_t* dst_y, int width) { + ARGBToYMatrixRow_NEON(src_argb, dst_y, width, &kRgb24I601Constants); +} + +void ARGBToYJRow_NEON(const uint8_t* src_argb, uint8_t* dst_yj, int width) { + ARGBToYMatrixRow_NEON(src_argb, dst_yj, width, &kRgb24JPEGConstants); } void ABGRToYRow_NEON(const uint8_t* src_abgr, uint8_t* dst_y, int width) { - asm volatile( - "movi v6.8b, #25 \n" // B * 0.1016 coefficient - "movi v5.8b, #129 \n" // G * 0.5078 coefficient - "movi v4.8b, #66 \n" // R * 0.2578 coefficient - "movi v7.8b, #16 \n" // Add 16 constant - "1: \n" - "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 pixels. - "prfm pldl1keep, [%0, 448] \n" - "subs %w2, %w2, #8 \n" // 8 processed per loop. - "umull v16.8h, v0.8b, v4.8b \n" // R - "umlal v16.8h, v1.8b, v5.8b \n" // G - "umlal v16.8h, v2.8b, v6.8b \n" // B - "uqrshrn v0.8b, v16.8h, #8 \n" // 16 bit to 8 bit Y - "uqadd v0.8b, v0.8b, v7.8b \n" - "st1 {v0.8b}, [%1], #8 \n" // store 8 pixels Y. - "b.gt 1b \n" - : "+r"(src_abgr), // %0 - "+r"(dst_y), // %1 - "+r"(width) // %2 - : - : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v16"); + ARGBToYMatrixRow_NEON(src_abgr, dst_y, width, &kRawI601Constants); } -void RGBAToYRow_NEON(const uint8_t* src_rgba, uint8_t* dst_y, int width) { - asm volatile( - "movi v4.8b, #25 \n" // B * 0.1016 coefficient - "movi v5.8b, #129 \n" // G * 0.5078 coefficient - "movi v6.8b, #66 \n" // R * 0.2578 coefficient - "movi v7.8b, #16 \n" // Add 16 constant - "1: \n" - "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 pixels. - "prfm pldl1keep, [%0, 448] \n" - "subs %w2, %w2, #8 \n" // 8 processed per loop. - "umull v16.8h, v1.8b, v4.8b \n" // B - "umlal v16.8h, v2.8b, v5.8b \n" // G - "umlal v16.8h, v3.8b, v6.8b \n" // R - "uqrshrn v0.8b, v16.8h, #8 \n" // 16 bit to 8 bit Y - "uqadd v0.8b, v0.8b, v7.8b \n" - "st1 {v0.8b}, [%1], #8 \n" // store 8 pixels Y. - "b.gt 1b \n" - : "+r"(src_rgba), // %0 - "+r"(dst_y), // %1 - "+r"(width) // %2 - : - : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v16"); +void ABGRToYJRow_NEON(const uint8_t* src_abgr, uint8_t* dst_yj, int width) { + ARGBToYMatrixRow_NEON(src_abgr, dst_yj, width, &kRawJPEGConstants); } -void RGB24ToYRow_NEON(const uint8_t* src_rgb24, uint8_t* dst_y, int width) { +// RGBA expects first value to be A and ignored, then 3 values to contain RGB. +// Same code as ARGB, except the LD4 +void RGBAToYMatrixRow_NEON(const uint8_t* src_rgba, + uint8_t* dst_y, + int width, + const struct RgbConstants* rgbconstants) { asm volatile( - "movi v4.8b, #25 \n" // B * 0.1016 coefficient - "movi v5.8b, #129 \n" // G * 0.5078 coefficient - "movi v6.8b, #66 \n" // R * 0.2578 coefficient - "movi v7.8b, #16 \n" // Add 16 constant + "ldr d0, [%3] \n" // load rgbconstants + "dup v6.16b, v0.b[0] \n" + "dup v7.16b, v0.b[1] \n" + "dup v16.16b, v0.b[2] \n" + "dup v17.8h, v0.h[2] \n" "1: \n" - "ld3 {v0.8b,v1.8b,v2.8b}, [%0], #24 \n" // load 8 pixels. + "ld4 {v1.16b,v2.16b,v3.16b,v4.16b}, [%0], #64 \n" // load 16 + // pixels. + "subs %w2, %w2, #16 \n" // 16 processed per loop. + "umull v0.8h, v2.8b, v6.8b \n" // B + "umull2 v1.8h, v2.16b, v6.16b \n" "prfm pldl1keep, [%0, 448] \n" - "subs %w2, %w2, #8 \n" // 8 processed per loop. - "umull v16.8h, v0.8b, v4.8b \n" // B - "umlal v16.8h, v1.8b, v5.8b \n" // G - "umlal v16.8h, v2.8b, v6.8b \n" // R - "uqrshrn v0.8b, v16.8h, #8 \n" // 16 bit to 8 bit Y - "uqadd v0.8b, v0.8b, v7.8b \n" - "st1 {v0.8b}, [%1], #8 \n" // store 8 pixels Y. + "umlal v0.8h, v3.8b, v7.8b \n" // G + "umlal2 v1.8h, v3.16b, v7.16b \n" + "umlal v0.8h, v4.8b, v16.8b \n" // R + "umlal2 v1.8h, v4.16b, v16.16b \n" + "addhn v0.8b, v0.8h, v17.8h \n" // 16 bit to 8 bit Y + "addhn v1.8b, v1.8h, v17.8h \n" + "st1 {v0.8b, v1.8b}, [%1], #16 \n" // store 16 pixels Y. "b.gt 1b \n" - : "+r"(src_rgb24), // %0 - "+r"(dst_y), // %1 - "+r"(width) // %2 - : - : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v16"); + : "+r"(src_rgba), // %0 + "+r"(dst_y), // %1 + "+r"(width) // %2 + : "r"(rgbconstants) // %3 + : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v16", + "v17"); } -void RAWToYRow_NEON(const uint8_t* src_raw, uint8_t* dst_y, int width) { +void RGBAToYRow_NEON(const uint8_t* src_rgba, uint8_t* dst_y, int width) { + RGBAToYMatrixRow_NEON(src_rgba, dst_y, width, &kRgb24I601Constants); +} + +void RGBAToYJRow_NEON(const uint8_t* src_rgba, uint8_t* dst_yj, int width) { + RGBAToYMatrixRow_NEON(src_rgba, dst_yj, width, &kRgb24JPEGConstants); +} + +void BGRAToYRow_NEON(const uint8_t* src_bgra, uint8_t* dst_y, int width) { + RGBAToYMatrixRow_NEON(src_bgra, dst_y, width, &kRawI601Constants); +} + +void RGBToYMatrixRow_NEON(const uint8_t* src_rgb, + uint8_t* dst_y, + int width, + const struct RgbConstants* rgbconstants) { asm volatile( - "movi v6.8b, #25 \n" // B * 0.1016 coefficient - "movi v5.8b, #129 \n" // G * 0.5078 coefficient - "movi v4.8b, #66 \n" // R * 0.2578 coefficient - "movi v7.8b, #16 \n" // Add 16 constant + "ldr d0, [%3] \n" // load rgbconstants + "dup v5.16b, v0.b[0] \n" + "dup v6.16b, v0.b[1] \n" + "dup v7.16b, v0.b[2] \n" + "dup v16.8h, v0.h[2] \n" "1: \n" - "ld3 {v0.8b,v1.8b,v2.8b}, [%0], #24 \n" // load 8 pixels. + "ld3 {v2.16b,v3.16b,v4.16b}, [%0], #48 \n" // load 16 pixels. + "subs %w2, %w2, #16 \n" // 16 processed per loop. + "umull v0.8h, v2.8b, v5.8b \n" // B + "umull2 v1.8h, v2.16b, v5.16b \n" "prfm pldl1keep, [%0, 448] \n" - "subs %w2, %w2, #8 \n" // 8 processed per loop. - "umull v16.8h, v0.8b, v4.8b \n" // B - "umlal v16.8h, v1.8b, v5.8b \n" // G - "umlal v16.8h, v2.8b, v6.8b \n" // R - "uqrshrn v0.8b, v16.8h, #8 \n" // 16 bit to 8 bit Y - "uqadd v0.8b, v0.8b, v7.8b \n" - "st1 {v0.8b}, [%1], #8 \n" // store 8 pixels Y. + "umlal v0.8h, v3.8b, v6.8b \n" // G + "umlal2 v1.8h, v3.16b, v6.16b \n" + "umlal v0.8h, v4.8b, v7.8b \n" // R + "umlal2 v1.8h, v4.16b, v7.16b \n" + "addhn v0.8b, v0.8h, v16.8h \n" // 16 bit to 8 bit Y + "addhn v1.8b, v1.8h, v16.8h \n" + "st1 {v0.8b, v1.8b}, [%1], #16 \n" // store 16 pixels Y. "b.gt 1b \n" - : "+r"(src_raw), // %0 - "+r"(dst_y), // %1 - "+r"(width) // %2 - : + : "+r"(src_rgb), // %0 + "+r"(dst_y), // %1 + "+r"(width) // %2 + : "r"(rgbconstants) // %3 : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v16"); } void RGB24ToYJRow_NEON(const uint8_t* src_rgb24, uint8_t* dst_yj, int width) { - asm volatile( - "movi v4.8b, #29 \n" // B * 0.1140 coefficient - "movi v5.8b, #150 \n" // G * 0.5870 coefficient - "movi v6.8b, #77 \n" // R * 0.2990 coefficient - "1: \n" - "ld3 {v0.8b,v1.8b,v2.8b}, [%0], #24 \n" // load 8 pixels. - "prfm pldl1keep, [%0, 448] \n" - "subs %w2, %w2, #8 \n" // 8 processed per loop. - "umull v0.8h, v0.8b, v4.8b \n" // B - "umlal v0.8h, v1.8b, v5.8b \n" // G - "umlal v0.8h, v2.8b, v6.8b \n" // R - "uqrshrn v0.8b, v0.8h, #8 \n" // 16 bit to 8 bit Y - "st1 {v0.8b}, [%1], #8 \n" // store 8 pixels Y. - "b.gt 1b \n" - : "+r"(src_rgb24), // %0 - "+r"(dst_yj), // %1 - "+r"(width) // %2 - : - : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6"); + RGBToYMatrixRow_NEON(src_rgb24, dst_yj, width, &kRgb24JPEGConstants); } void RAWToYJRow_NEON(const uint8_t* src_raw, uint8_t* dst_yj, int width) { - asm volatile( - "movi v6.8b, #29 \n" // B * 0.1140 coefficient - "movi v5.8b, #150 \n" // G * 0.5870 coefficient - "movi v4.8b, #77 \n" // R * 0.2990 coefficient - "1: \n" - "ld3 {v0.8b,v1.8b,v2.8b}, [%0], #24 \n" // load 8 pixels. - "prfm pldl1keep, [%0, 448] \n" - "subs %w2, %w2, #8 \n" // 8 processed per loop. - "umull v0.8h, v0.8b, v4.8b \n" // B - "umlal v0.8h, v1.8b, v5.8b \n" // G - "umlal v0.8h, v2.8b, v6.8b \n" // R - "uqrshrn v0.8b, v0.8h, #8 \n" // 16 bit to 8 bit Y - "st1 {v0.8b}, [%1], #8 \n" // store 8 pixels Y. - "b.gt 1b \n" - : "+r"(src_raw), // %0 - "+r"(dst_yj), // %1 - "+r"(width) // %2 - : - : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6"); + RGBToYMatrixRow_NEON(src_raw, dst_yj, width, &kRawJPEGConstants); +} + +void RGB24ToYRow_NEON(const uint8_t* src_rgb24, uint8_t* dst_y, int width) { + RGBToYMatrixRow_NEON(src_rgb24, dst_y, width, &kRgb24I601Constants); +} + +void RAWToYRow_NEON(const uint8_t* src_raw, uint8_t* dst_y, int width) { + RGBToYMatrixRow_NEON(src_raw, dst_y, width, &kRawI601Constants); } // Bilinear filter 16x2 -> 16x1 @@ -2299,11 +3147,11 @@ void InterpolateRow_NEON(uint8_t* dst_ptr, "1: \n" "ld1 {v0.16b}, [%1], #16 \n" "ld1 {v1.16b}, [%2], #16 \n" - "prfm pldl1keep, [%1, 448] \n" - "prfm pldl1keep, [%2, 448] \n" "subs %w3, %w3, #16 \n" "umull v2.8h, v0.8b, v4.8b \n" + "prfm pldl1keep, [%1, 448] \n" "umull2 v3.8h, v0.16b, v4.16b \n" + "prfm pldl1keep, [%2, 448] \n" "umlal v2.8h, v1.8b, v5.8b \n" "umlal2 v3.8h, v1.16b, v5.16b \n" "rshrn v0.8b, v2.8h, #8 \n" @@ -2316,10 +3164,10 @@ void InterpolateRow_NEON(uint8_t* dst_ptr, "50: \n" "ld1 {v0.16b}, [%1], #16 \n" "ld1 {v1.16b}, [%2], #16 \n" - "prfm pldl1keep, [%1, 448] \n" - "prfm pldl1keep, [%2, 448] \n" "subs %w3, %w3, #16 \n" + "prfm pldl1keep, [%1, 448] \n" "urhadd v0.16b, v0.16b, v1.16b \n" + "prfm pldl1keep, [%2, 448] \n" "st1 {v0.16b}, [%0], #16 \n" "b.gt 50b \n" "b 99f \n" @@ -2327,8 +3175,8 @@ void InterpolateRow_NEON(uint8_t* dst_ptr, // Blend 100 / 0 - Copy row unchanged. "100: \n" "ld1 {v0.16b}, [%1], #16 \n" - "prfm pldl1keep, [%1, 448] \n" "subs %w3, %w3, #16 \n" + "prfm pldl1keep, [%1, 448] \n" "st1 {v0.16b}, [%0], #16 \n" "b.gt 100b \n" @@ -2343,8 +3191,153 @@ void InterpolateRow_NEON(uint8_t* dst_ptr, : "cc", "memory", "v0", "v1", "v3", "v4", "v5"); } +// Bilinear filter 8x2 -> 8x1 +void InterpolateRow_16_NEON(uint16_t* dst_ptr, + const uint16_t* src_ptr, + ptrdiff_t src_stride, + int dst_width, + int source_y_fraction) { + int y1_fraction = source_y_fraction; + int y0_fraction = 256 - y1_fraction; + const uint16_t* src_ptr1 = src_ptr + src_stride; + + asm volatile( + "cmp %w4, #0 \n" + "b.eq 100f \n" + "cmp %w4, #128 \n" + "b.eq 50f \n" + + "dup v5.8h, %w4 \n" + "dup v4.8h, %w5 \n" + // General purpose row blend. + "1: \n" + "ld1 {v0.8h}, [%1], #16 \n" + "ld1 {v1.8h}, [%2], #16 \n" + "subs %w3, %w3, #8 \n" + "umull v2.4s, v0.4h, v4.4h \n" + "prfm pldl1keep, [%1, 448] \n" + "umull2 v3.4s, v0.8h, v4.8h \n" + "prfm pldl1keep, [%2, 448] \n" + "umlal v2.4s, v1.4h, v5.4h \n" + "umlal2 v3.4s, v1.8h, v5.8h \n" + "rshrn v0.4h, v2.4s, #8 \n" + "rshrn2 v0.8h, v3.4s, #8 \n" + "st1 {v0.8h}, [%0], #16 \n" + "b.gt 1b \n" + "b 99f \n" + + // Blend 50 / 50. + "50: \n" + "ld1 {v0.8h}, [%1], #16 \n" + "ld1 {v1.8h}, [%2], #16 \n" + "subs %w3, %w3, #8 \n" + "prfm pldl1keep, [%1, 448] \n" + "urhadd v0.8h, v0.8h, v1.8h \n" + "prfm pldl1keep, [%2, 448] \n" + "st1 {v0.8h}, [%0], #16 \n" + "b.gt 50b \n" + "b 99f \n" + + // Blend 100 / 0 - Copy row unchanged. + "100: \n" + "ld1 {v0.8h}, [%1], #16 \n" + "subs %w3, %w3, #8 \n" + "prfm pldl1keep, [%1, 448] \n" + "st1 {v0.8h}, [%0], #16 \n" + "b.gt 100b \n" + + "99: \n" + : "+r"(dst_ptr), // %0 + "+r"(src_ptr), // %1 + "+r"(src_ptr1), // %2 + "+r"(dst_width) // %3 + : "r"(y1_fraction), // %4 + "r"(y0_fraction) // %5 + : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5"); +} + +// Bilinear filter 8x2 -> 8x1 +// Use scale to convert lsb formats to msb, depending how many bits there are: +// 32768 = 9 bits +// 16384 = 10 bits +// 4096 = 12 bits +// 256 = 16 bits +void InterpolateRow_16To8_NEON(uint8_t* dst_ptr, + const uint16_t* src_ptr, + ptrdiff_t src_stride, + int scale, + int dst_width, + int source_y_fraction) { + int y1_fraction = source_y_fraction; + int y0_fraction = 256 - y1_fraction; + const uint16_t* src_ptr1 = src_ptr + src_stride; + int shift = 15 - __builtin_clz((int32_t)scale); // Negative shl is shr + + asm volatile( + "dup v6.8h, %w6 \n" + "cmp %w4, #0 \n" + "b.eq 100f \n" + "cmp %w4, #128 \n" + "b.eq 50f \n" + + "dup v5.8h, %w4 \n" + "dup v4.8h, %w5 \n" + // General purpose row blend. + "1: \n" + "ld1 {v0.8h}, [%1], #16 \n" + "ld1 {v1.8h}, [%2], #16 \n" + "subs %w3, %w3, #8 \n" + "umull v2.4s, v0.4h, v4.4h \n" + "prfm pldl1keep, [%1, 448] \n" + "umull2 v3.4s, v0.8h, v4.8h \n" + "prfm pldl1keep, [%2, 448] \n" + "umlal v2.4s, v1.4h, v5.4h \n" + "umlal2 v3.4s, v1.8h, v5.8h \n" + "rshrn v0.4h, v2.4s, #8 \n" + "rshrn2 v0.8h, v3.4s, #8 \n" + "ushl v0.8h, v0.8h, v6.8h \n" + "uqxtn v0.8b, v0.8h \n" + "st1 {v0.8b}, [%0], #8 \n" + "b.gt 1b \n" + "b 99f \n" + + // Blend 50 / 50. + "50: \n" + "ld1 {v0.8h}, [%1], #16 \n" + "ld1 {v1.8h}, [%2], #16 \n" + "subs %w3, %w3, #8 \n" + "prfm pldl1keep, [%1, 448] \n" + "urhadd v0.8h, v0.8h, v1.8h \n" + "prfm pldl1keep, [%2, 448] \n" + "ushl v0.8h, v0.8h, v6.8h \n" + "uqxtn v0.8b, v0.8h \n" + "st1 {v0.8b}, [%0], #8 \n" + "b.gt 50b \n" + "b 99f \n" + + // Blend 100 / 0 - Copy row unchanged. + "100: \n" + "ldr q0, [%1], #16 \n" + "ushl v0.8h, v0.8h, v2.8h \n" // shr = v2 is negative + "prfm pldl1keep, [%1, 448] \n" + "uqxtn v0.8b, v0.8h \n" + "subs %w3, %w3, #8 \n" // 8 src pixels per loop + "str d0, [%0], #8 \n" // store 8 pixels + "b.gt 100b \n" + + "99: \n" + : "+r"(dst_ptr), // %0 + "+r"(src_ptr), // %1 + "+r"(src_ptr1), // %2 + "+r"(dst_width) // %3 + : "r"(y1_fraction), // %4 + "r"(y0_fraction), // %5 + "r"(shift) // %6 + : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6"); +} + // dr * (256 - sa) / 256 + sr = dr - dr * sa / 256 + sr -void ARGBBlendRow_NEON(const uint8_t* src_argb0, +void ARGBBlendRow_NEON(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width) { @@ -2355,11 +3348,11 @@ void ARGBBlendRow_NEON(const uint8_t* src_argb0, "8: \n" "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 ARGB0 "ld4 {v4.8b,v5.8b,v6.8b,v7.8b}, [%1], #32 \n" // load 8 ARGB1 - "prfm pldl1keep, [%0, 448] \n" - "prfm pldl1keep, [%1, 448] \n" "subs %w3, %w3, #8 \n" // 8 processed per loop. "umull v16.8h, v4.8b, v3.8b \n" // db * a + "prfm pldl1keep, [%0, 448] \n" "umull v17.8h, v5.8b, v3.8b \n" // dg * a + "prfm pldl1keep, [%1, 448] \n" "umull v18.8h, v6.8b, v3.8b \n" // dr * a "uqrshrn v16.8b, v16.8h, #8 \n" // db >>= 8 "uqrshrn v17.8b, v17.8h, #8 \n" // dg >>= 8 @@ -2385,11 +3378,11 @@ void ARGBBlendRow_NEON(const uint8_t* src_argb0, // ARGB0. "ld4 {v4.b,v5.b,v6.b,v7.b}[0], [%1], #4 \n" // load 1 pixel // ARGB1. - "prfm pldl1keep, [%0, 448] \n" - "prfm pldl1keep, [%1, 448] \n" "subs %w3, %w3, #1 \n" // 1 processed per loop. "umull v16.8h, v4.8b, v3.8b \n" // db * a + "prfm pldl1keep, [%0, 448] \n" "umull v17.8h, v5.8b, v3.8b \n" // dg * a + "prfm pldl1keep, [%1, 448] \n" "umull v18.8h, v6.8b, v3.8b \n" // dr * a "uqrshrn v16.8b, v16.8h, #8 \n" // db >>= 8 "uqrshrn v17.8b, v17.8h, #8 \n" // dg >>= 8 @@ -2406,7 +3399,7 @@ void ARGBBlendRow_NEON(const uint8_t* src_argb0, "99: \n" - : "+r"(src_argb0), // %0 + : "+r"(src_argb), // %0 "+r"(src_argb1), // %1 "+r"(dst_argb), // %2 "+r"(width) // %3 @@ -2423,14 +3416,14 @@ void ARGBAttenuateRow_NEON(const uint8_t* src_argb, // Attenuate 8 pixels. "1: \n" "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 ARGB - "prfm pldl1keep, [%0, 448] \n" "subs %w2, %w2, #8 \n" // 8 processed per loop. "umull v4.8h, v0.8b, v3.8b \n" // b * a - "umull v5.8h, v1.8b, v3.8b \n" // g * a - "umull v6.8h, v2.8b, v3.8b \n" // r * a - "uqrshrn v0.8b, v4.8h, #8 \n" // b >>= 8 - "uqrshrn v1.8b, v5.8h, #8 \n" // g >>= 8 - "uqrshrn v2.8b, v6.8h, #8 \n" // r >>= 8 + "prfm pldl1keep, [%0, 448] \n" + "umull v5.8h, v1.8b, v3.8b \n" // g * a + "umull v6.8h, v2.8b, v3.8b \n" // r * a + "uqrshrn v0.8b, v4.8h, #8 \n" // b >>= 8 + "uqrshrn v1.8b, v5.8h, #8 \n" // g >>= 8 + "uqrshrn v2.8b, v6.8h, #8 \n" // r >>= 8 "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%1], #32 \n" // store 8 ARGB "b.gt 1b \n" : "+r"(src_argb), // %0 @@ -2456,9 +3449,9 @@ void ARGBQuantizeRow_NEON(uint8_t* dst_argb, // 8 pixel loop. "1: \n" "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0] \n" // load 8 ARGB. + "subs %w1, %w1, #8 \n" // 8 processed per loop. + "uxtl v0.8h, v0.8b \n" // b (0 .. 255) "prfm pldl1keep, [%0, 448] \n" - "subs %w1, %w1, #8 \n" // 8 processed per loop. - "uxtl v0.8h, v0.8b \n" // b (0 .. 255) "uxtl v1.8h, v1.8b \n" "uxtl v2.8h, v2.8b \n" "sqdmulh v0.8h, v0.8h, v4.8h \n" // b * scale @@ -2498,9 +3491,9 @@ void ARGBShadeRow_NEON(const uint8_t* src_argb, // 8 pixel loop. "1: \n" "ld4 {v4.8b,v5.8b,v6.8b,v7.8b}, [%0], #32 \n" // load 8 ARGB - "prfm pldl1keep, [%0, 448] \n" "subs %w2, %w2, #8 \n" // 8 processed per loop. "uxtl v4.8h, v4.8b \n" // b (0 .. 255) + "prfm pldl1keep, [%0, 448] \n" "uxtl v5.8h, v5.8b \n" "uxtl v6.8h, v6.8b \n" "uxtl v7.8h, v7.8b \n" @@ -2531,9 +3524,9 @@ void ARGBGrayRow_NEON(const uint8_t* src_argb, uint8_t* dst_argb, int width) { "movi v26.8b, #77 \n" // R * 0.2990 coefficient "1: \n" "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 ARGB - "prfm pldl1keep, [%0, 448] \n" "subs %w2, %w2, #8 \n" // 8 processed per loop. "umull v4.8h, v0.8b, v24.8b \n" // B + "prfm pldl1keep, [%0, 448] \n" "umlal v4.8h, v1.8b, v25.8b \n" // G "umlal v4.8h, v2.8b, v26.8b \n" // R "uqrshrn v0.8b, v4.8h, #8 \n" // 16 bit to 8 bit B @@ -2566,9 +3559,9 @@ void ARGBSepiaRow_NEON(uint8_t* dst_argb, int width) { "movi v30.8b, #50 \n" // BR coefficient "1: \n" "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0] \n" // load 8 ARGB pixels. + "subs %w1, %w1, #8 \n" // 8 processed per loop. + "umull v4.8h, v0.8b, v20.8b \n" // B to Sepia B "prfm pldl1keep, [%0, 448] \n" - "subs %w1, %w1, #8 \n" // 8 processed per loop. - "umull v4.8h, v0.8b, v20.8b \n" // B to Sepia B "umlal v4.8h, v1.8b, v21.8b \n" // G "umlal v4.8h, v2.8b, v22.8b \n" // R "umull v5.8h, v0.8b, v24.8b \n" // B to Sepia G @@ -2603,9 +3596,9 @@ void ARGBColorMatrixRow_NEON(const uint8_t* src_argb, "1: \n" "ld4 {v16.8b,v17.8b,v18.8b,v19.8b}, [%0], #32 \n" // load 8 ARGB - "prfm pldl1keep, [%0, 448] \n" "subs %w2, %w2, #8 \n" // 8 processed per loop. "uxtl v16.8h, v16.8b \n" // b (0 .. 255) 16 bit + "prfm pldl1keep, [%0, 448] \n" "uxtl v17.8h, v17.8b \n" // g "uxtl v18.8h, v18.8b \n" // r "uxtl v19.8h, v19.8b \n" // a @@ -2653,7 +3646,7 @@ void ARGBColorMatrixRow_NEON(const uint8_t* src_argb, // TODO(fbarchard): fix vqshrun in ARGBMultiplyRow_NEON and reenable. // Multiply 2 rows of ARGB pixels together, 8 pixels at a time. -void ARGBMultiplyRow_NEON(const uint8_t* src_argb0, +void ARGBMultiplyRow_NEON(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width) { @@ -2662,11 +3655,11 @@ void ARGBMultiplyRow_NEON(const uint8_t* src_argb0, "1: \n" "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 ARGB "ld4 {v4.8b,v5.8b,v6.8b,v7.8b}, [%1], #32 \n" // load 8 more - "prfm pldl1keep, [%0, 448] \n" - "prfm pldl1keep, [%1, 448] \n" "subs %w3, %w3, #8 \n" // 8 processed per loop. "umull v0.8h, v0.8b, v4.8b \n" // multiply B + "prfm pldl1keep, [%0, 448] \n" "umull v1.8h, v1.8b, v5.8b \n" // multiply G + "prfm pldl1keep, [%1, 448] \n" "umull v2.8h, v2.8b, v6.8b \n" // multiply R "umull v3.8h, v3.8b, v7.8b \n" // multiply A "rshrn v0.8b, v0.8h, #8 \n" // 16 bit to 8 bit B @@ -2675,7 +3668,7 @@ void ARGBMultiplyRow_NEON(const uint8_t* src_argb0, "rshrn v3.8b, v3.8h, #8 \n" // 16 bit to 8 bit A "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%2], #32 \n" // store 8 ARGB "b.gt 1b \n" - : "+r"(src_argb0), // %0 + : "+r"(src_argb), // %0 "+r"(src_argb1), // %1 "+r"(dst_argb), // %2 "+r"(width) // %3 @@ -2684,7 +3677,7 @@ void ARGBMultiplyRow_NEON(const uint8_t* src_argb0, } // Add 2 rows of ARGB pixels together, 8 pixels at a time. -void ARGBAddRow_NEON(const uint8_t* src_argb0, +void ARGBAddRow_NEON(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width) { @@ -2693,16 +3686,16 @@ void ARGBAddRow_NEON(const uint8_t* src_argb0, "1: \n" "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 ARGB "ld4 {v4.8b,v5.8b,v6.8b,v7.8b}, [%1], #32 \n" // load 8 more - "prfm pldl1keep, [%0, 448] \n" - "prfm pldl1keep, [%1, 448] \n" "subs %w3, %w3, #8 \n" // 8 processed per loop. "uqadd v0.8b, v0.8b, v4.8b \n" + "prfm pldl1keep, [%0, 448] \n" "uqadd v1.8b, v1.8b, v5.8b \n" + "prfm pldl1keep, [%1, 448] \n" "uqadd v2.8b, v2.8b, v6.8b \n" "uqadd v3.8b, v3.8b, v7.8b \n" "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%2], #32 \n" // store 8 ARGB "b.gt 1b \n" - : "+r"(src_argb0), // %0 + : "+r"(src_argb), // %0 "+r"(src_argb1), // %1 "+r"(dst_argb), // %2 "+r"(width) // %3 @@ -2711,7 +3704,7 @@ void ARGBAddRow_NEON(const uint8_t* src_argb0, } // Subtract 2 rows of ARGB pixels, 8 pixels at a time. -void ARGBSubtractRow_NEON(const uint8_t* src_argb0, +void ARGBSubtractRow_NEON(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width) { @@ -2720,16 +3713,16 @@ void ARGBSubtractRow_NEON(const uint8_t* src_argb0, "1: \n" "ld4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%0], #32 \n" // load 8 ARGB "ld4 {v4.8b,v5.8b,v6.8b,v7.8b}, [%1], #32 \n" // load 8 more - "prfm pldl1keep, [%0, 448] \n" - "prfm pldl1keep, [%1, 448] \n" "subs %w3, %w3, #8 \n" // 8 processed per loop. "uqsub v0.8b, v0.8b, v4.8b \n" + "prfm pldl1keep, [%0, 448] \n" "uqsub v1.8b, v1.8b, v5.8b \n" + "prfm pldl1keep, [%1, 448] \n" "uqsub v2.8b, v2.8b, v6.8b \n" "uqsub v3.8b, v3.8b, v7.8b \n" "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%2], #32 \n" // store 8 ARGB "b.gt 1b \n" - : "+r"(src_argb0), // %0 + : "+r"(src_argb), // %0 "+r"(src_argb1), // %1 "+r"(dst_argb), // %2 "+r"(width) // %3 @@ -2752,11 +3745,11 @@ void SobelRow_NEON(const uint8_t* src_sobelx, "1: \n" "ld1 {v0.8b}, [%0], #8 \n" // load 8 sobelx. "ld1 {v1.8b}, [%1], #8 \n" // load 8 sobely. - "prfm pldl1keep, [%0, 448] \n" - "prfm pldl1keep, [%1, 448] \n" "subs %w3, %w3, #8 \n" // 8 processed per loop. "uqadd v0.8b, v0.8b, v1.8b \n" // add + "prfm pldl1keep, [%0, 448] \n" "orr v1.8b, v0.8b, v0.8b \n" + "prfm pldl1keep, [%1, 448] \n" "orr v2.8b, v0.8b, v0.8b \n" "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%2], #32 \n" // store 8 ARGB "b.gt 1b \n" @@ -2778,10 +3771,10 @@ void SobelToPlaneRow_NEON(const uint8_t* src_sobelx, "1: \n" "ld1 {v0.16b}, [%0], #16 \n" // load 16 sobelx. "ld1 {v1.16b}, [%1], #16 \n" // load 16 sobely. - "prfm pldl1keep, [%0, 448] \n" - "prfm pldl1keep, [%1, 448] \n" "subs %w3, %w3, #16 \n" // 16 processed per loop. + "prfm pldl1keep, [%0, 448] \n" "uqadd v0.16b, v0.16b, v1.16b \n" // add + "prfm pldl1keep, [%1, 448] \n" "st1 {v0.16b}, [%2], #16 \n" // store 16 pixels. "b.gt 1b \n" : "+r"(src_sobelx), // %0 @@ -2807,10 +3800,10 @@ void SobelXYRow_NEON(const uint8_t* src_sobelx, "1: \n" "ld1 {v2.8b}, [%0], #8 \n" // load 8 sobelx. "ld1 {v0.8b}, [%1], #8 \n" // load 8 sobely. - "prfm pldl1keep, [%0, 448] \n" - "prfm pldl1keep, [%1, 448] \n" "subs %w3, %w3, #8 \n" // 8 processed per loop. + "prfm pldl1keep, [%0, 448] \n" "uqadd v1.8b, v0.8b, v2.8b \n" // add + "prfm pldl1keep, [%1, 448] \n" "st4 {v0.8b,v1.8b,v2.8b,v3.8b}, [%2], #32 \n" // store 8 ARGB "b.gt 1b \n" : "+r"(src_sobelx), // %0 @@ -2834,18 +3827,18 @@ void SobelXRow_NEON(const uint8_t* src_y0, "1: \n" "ld1 {v0.8b}, [%0],%5 \n" // top "ld1 {v1.8b}, [%0],%6 \n" - "prfm pldl1keep, [%0, 448] \n" "usubl v0.8h, v0.8b, v1.8b \n" + "prfm pldl1keep, [%0, 448] \n" "ld1 {v2.8b}, [%1],%5 \n" // center * 2 "ld1 {v3.8b}, [%1],%6 \n" - "prfm pldl1keep, [%1, 448] \n" "usubl v1.8h, v2.8b, v3.8b \n" + "prfm pldl1keep, [%1, 448] \n" "add v0.8h, v0.8h, v1.8h \n" "add v0.8h, v0.8h, v1.8h \n" "ld1 {v2.8b}, [%2],%5 \n" // bottom "ld1 {v3.8b}, [%2],%6 \n" - "prfm pldl1keep, [%2, 448] \n" "subs %w4, %w4, #8 \n" // 8 pixels + "prfm pldl1keep, [%2, 448] \n" "usubl v1.8h, v2.8b, v3.8b \n" "add v0.8h, v0.8h, v1.8h \n" "abs v0.8h, v0.8h \n" @@ -2883,11 +3876,11 @@ void SobelYRow_NEON(const uint8_t* src_y0, "add v0.8h, v0.8h, v1.8h \n" "ld1 {v2.8b}, [%0],%5 \n" // right "ld1 {v3.8b}, [%1],%5 \n" - "prfm pldl1keep, [%0, 448] \n" - "prfm pldl1keep, [%1, 448] \n" "subs %w3, %w3, #8 \n" // 8 pixels "usubl v1.8h, v2.8b, v3.8b \n" + "prfm pldl1keep, [%0, 448] \n" "add v0.8h, v0.8h, v1.8h \n" + "prfm pldl1keep, [%1, 448] \n" "abs v0.8h, v0.8h \n" "uqxtn v0.8b, v0.8h \n" "st1 {v0.8b}, [%2], #8 \n" // store 8 sobely @@ -2910,9 +3903,9 @@ void HalfFloat1Row_NEON(const uint16_t* src, asm volatile( "1: \n" "ld1 {v1.16b}, [%0], #16 \n" // load 8 shorts - "prfm pldl1keep, [%0, 448] \n" "subs %w2, %w2, #8 \n" // 8 pixels per loop "uxtl v2.4s, v1.4h \n" // 8 int's + "prfm pldl1keep, [%0, 448] \n" "uxtl2 v3.4s, v1.8h \n" "scvtf v2.4s, v2.4s \n" // 8 floats "scvtf v3.4s, v3.4s \n" @@ -2934,9 +3927,9 @@ void HalfFloatRow_NEON(const uint16_t* src, asm volatile( "1: \n" "ld1 {v1.16b}, [%0], #16 \n" // load 8 shorts - "prfm pldl1keep, [%0, 448] \n" "subs %w2, %w2, #8 \n" // 8 pixels per loop "uxtl v2.4s, v1.4h \n" // 8 int's + "prfm pldl1keep, [%0, 448] \n" "uxtl2 v3.4s, v1.8h \n" "scvtf v2.4s, v2.4s \n" // 8 floats "scvtf v3.4s, v3.4s \n" @@ -2960,9 +3953,9 @@ void ByteToFloatRow_NEON(const uint8_t* src, asm volatile( "1: \n" "ld1 {v1.8b}, [%0], #8 \n" // load 8 bytes - "prfm pldl1keep, [%0, 448] \n" "subs %w2, %w2, #8 \n" // 8 pixels per loop "uxtl v1.8h, v1.8b \n" // 8 shorts + "prfm pldl1keep, [%0, 448] \n" "uxtl v2.4s, v1.4h \n" // 8 ints "uxtl2 v3.4s, v1.8h \n" "scvtf v2.4s, v2.4s \n" // 8 floats @@ -2989,9 +3982,9 @@ float ScaleMaxSamples_NEON(const float* src, "1: \n" "ld1 {v1.4s, v2.4s}, [%0], #32 \n" // load 8 samples - "prfm pldl1keep, [%0, 448] \n" "subs %w2, %w2, #8 \n" // 8 processed per loop "fmul v3.4s, v1.4s, %4.s[0] \n" // scale + "prfm pldl1keep, [%0, 448] \n" "fmul v4.4s, v2.4s, %4.s[0] \n" // scale "fmax v5.4s, v5.4s, v1.4s \n" // max "fmax v6.4s, v6.4s, v2.4s \n" @@ -3019,9 +4012,9 @@ float ScaleSumSamples_NEON(const float* src, "1: \n" "ld1 {v1.4s, v2.4s}, [%0], #32 \n" // load 8 samples - "prfm pldl1keep, [%0, 448] \n" "subs %w2, %w2, #8 \n" // 8 processed per loop "fmul v3.4s, v1.4s, %4.s[0] \n" // scale + "prfm pldl1keep, [%0, 448] \n" "fmul v4.4s, v2.4s, %4.s[0] \n" "fmla v5.4s, v1.4s, v1.4s \n" // sum of squares "fmla v6.4s, v2.4s, v2.4s \n" @@ -3220,6 +4213,7 @@ void GaussRow_F32_NEON(const float* src, float* dst, int width) { : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8"); } +#if LIBYUV_USE_ST3 // Convert biplanar NV21 to packed YUV24 void NV21ToYUV24Row_NEON(const uint8_t* src_y, const uint8_t* src_vu, @@ -3229,10 +4223,10 @@ void NV21ToYUV24Row_NEON(const uint8_t* src_y, "1: \n" "ld1 {v2.16b}, [%0], #16 \n" // load 16 Y values "ld2 {v0.8b, v1.8b}, [%1], #16 \n" // load 8 VU values + "zip1 v0.16b, v0.16b, v0.16b \n" // replicate V values "prfm pldl1keep, [%0, 448] \n" + "zip1 v1.16b, v1.16b, v1.16b \n" // replicate U values "prfm pldl1keep, [%1, 448] \n" - "zip1 v0.16b, v0.16b, v0.16b \n" // replicate V values - "zip1 v1.16b, v1.16b, v1.16b \n" // replicate U values "subs %w3, %w3, #16 \n" // 16 pixels per loop "st3 {v0.16b,v1.16b,v2.16b}, [%2], #48 \n" // store 16 YUV pixels "b.gt 1b \n" @@ -3243,7 +4237,44 @@ void NV21ToYUV24Row_NEON(const uint8_t* src_y, : : "cc", "memory", "v0", "v1", "v2"); } +#else +static const uvec8 kYUV24Shuffle[3] = { + {16, 17, 0, 16, 17, 1, 18, 19, 2, 18, 19, 3, 20, 21, 4, 20}, + {21, 5, 22, 23, 6, 22, 23, 7, 24, 25, 8, 24, 25, 9, 26, 27}, + {10, 26, 27, 11, 28, 29, 12, 28, 29, 13, 30, 31, 14, 30, 31, 15}}; + +// Convert biplanar NV21 to packed YUV24 +// NV21 has VU in memory for chroma. +// YUV24 is VUY in memory +void NV21ToYUV24Row_NEON(const uint8_t* src_y, + const uint8_t* src_vu, + uint8_t* dst_yuv24, + int width) { + asm volatile( + "ld1 {v5.16b,v6.16b,v7.16b}, [%4] \n" // 3 shuffler constants + "1: \n" + "ld1 {v0.16b}, [%0], #16 \n" // load 16 Y values + "ld1 {v1.16b}, [%1], #16 \n" // load 8 VU values + "tbl v2.16b, {v0.16b,v1.16b}, v5.16b \n" // weave into YUV24 + "prfm pldl1keep, [%0, 448] \n" + "tbl v3.16b, {v0.16b,v1.16b}, v6.16b \n" + "prfm pldl1keep, [%1, 448] \n" + "tbl v4.16b, {v0.16b,v1.16b}, v7.16b \n" + "subs %w3, %w3, #16 \n" // 16 pixels per loop + "st1 {v2.16b,v3.16b,v4.16b}, [%2], #48 \n" // store 16 YUV pixels + "b.gt 1b \n" + : "+r"(src_y), // %0 + "+r"(src_vu), // %1 + "+r"(dst_yuv24), // %2 + "+r"(width) // %3 + : "r"(&kYUV24Shuffle[0]) // %4 + : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7"); +} +#endif // LIBYUV_USE_ST3 +// Note ST2 8b version is faster than zip+ST1 + +// AYUV is VUYA in memory. UV for NV12 is UV order in memory. void AYUVToUVRow_NEON(const uint8_t* src_ayuv, int src_stride_ayuv, uint8_t* dst_uv, @@ -3253,13 +4284,13 @@ void AYUVToUVRow_NEON(const uint8_t* src_ayuv, "1: \n" "ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 ayuv - "prfm pldl1keep, [%0, 448] \n" "uaddlp v0.8h, v0.16b \n" // V 16 bytes -> 8 shorts. + "prfm pldl1keep, [%0, 448] \n" "uaddlp v1.8h, v1.16b \n" // U 16 bytes -> 8 shorts. "ld4 {v4.16b,v5.16b,v6.16b,v7.16b}, [%1], #64 \n" // load next 16 - "prfm pldl1keep, [%1, 448] \n" "uadalp v0.8h, v4.16b \n" // V 16 bytes -> 8 shorts. "uadalp v1.8h, v5.16b \n" // U 16 bytes -> 8 shorts. + "prfm pldl1keep, [%1, 448] \n" "uqrshrn v3.8b, v0.8h, #2 \n" // 2x2 average "uqrshrn v2.8b, v1.8h, #2 \n" "subs %w3, %w3, #16 \n" // 16 processed per loop. @@ -3282,13 +4313,13 @@ void AYUVToVURow_NEON(const uint8_t* src_ayuv, "1: \n" "ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 ayuv - "prfm pldl1keep, [%0, 448] \n" "uaddlp v0.8h, v0.16b \n" // V 16 bytes -> 8 shorts. + "prfm pldl1keep, [%0, 448] \n" "uaddlp v1.8h, v1.16b \n" // U 16 bytes -> 8 shorts. "ld4 {v4.16b,v5.16b,v6.16b,v7.16b}, [%1], #64 \n" // load next 16 - "prfm pldl1keep, [%1, 448] \n" "uadalp v0.8h, v4.16b \n" // V 16 bytes -> 8 shorts. "uadalp v1.8h, v5.16b \n" // U 16 bytes -> 8 shorts. + "prfm pldl1keep, [%1, 448] \n" "uqrshrn v0.8b, v0.8h, #2 \n" // 2x2 average "uqrshrn v1.8b, v1.8h, #2 \n" "subs %w3, %w3, #16 \n" // 16 processed per loop. @@ -3307,8 +4338,8 @@ void AYUVToYRow_NEON(const uint8_t* src_ayuv, uint8_t* dst_y, int width) { asm volatile( "1: \n" "ld4 {v0.16b,v1.16b,v2.16b,v3.16b}, [%0], #64 \n" // load 16 - "prfm pldl1keep, [%0, 448] \n" "subs %w2, %w2, #16 \n" // 16 pixels per loop + "prfm pldl1keep, [%0, 448] \n" "st1 {v2.16b}, [%1], #16 \n" // store 16 Y pixels "b.gt 1b \n" : "+r"(src_ayuv), // %0 @@ -3329,9 +4360,9 @@ void SwapUVRow_NEON(const uint8_t* src_uv, uint8_t* dst_vu, int width) { "1: \n" "ld1 {v0.16b}, [%0], 16 \n" // load 16 UV values "ld1 {v1.16b}, [%0], 16 \n" - "prfm pldl1keep, [%0, 448] \n" "subs %w2, %w2, #16 \n" // 16 pixels per loop "tbl v0.16b, {v0.16b}, v2.16b \n" + "prfm pldl1keep, [%0, 448] \n" "tbl v1.16b, {v1.16b}, v2.16b \n" "stp q0, q1, [%1], 32 \n" // store 16 VU pixels "b.gt 1b \n" @@ -3379,6 +4410,108 @@ void HalfMergeUVRow_NEON(const uint8_t* src_u, : "cc", "memory", "v0", "v1", "v2", "v3"); } +void SplitUVRow_16_NEON(const uint16_t* src_uv, + uint16_t* dst_u, + uint16_t* dst_v, + int depth, + int width) { + int shift = depth - 16; // Negative for right shift. + asm volatile( + "dup v2.8h, %w4 \n" + "1: \n" + "ld2 {v0.8h, v1.8h}, [%0], #32 \n" // load 8 UV + "subs %w3, %w3, #8 \n" // 8 src pixels per loop + "ushl v0.8h, v0.8h, v2.8h \n" + "prfm pldl1keep, [%0, 448] \n" + "ushl v1.8h, v1.8h, v2.8h \n" + "st1 {v0.8h}, [%1], #16 \n" // store 8 U pixels + "st1 {v1.8h}, [%2], #16 \n" // store 8 V pixels + "b.gt 1b \n" + : "+r"(src_uv), // %0 + "+r"(dst_u), // %1 + "+r"(dst_v), // %2 + "+r"(width) // %3 + : "r"(shift) // %4 + : "cc", "memory", "v0", "v1", "v2"); +} + +void MultiplyRow_16_NEON(const uint16_t* src_y, + uint16_t* dst_y, + int scale, + int width) { + asm volatile( + "dup v2.8h, %w3 \n" + "1: \n" + "ldp q0, q1, [%0], #32 \n" + "mul v0.8h, v0.8h, v2.8h \n" + "prfm pldl1keep, [%0, 448] \n" + "mul v1.8h, v1.8h, v2.8h \n" + "stp q0, q1, [%1], #32 \n" // store 16 pixels + "subs %w2, %w2, #16 \n" // 16 src pixels per loop + "b.gt 1b \n" + : "+r"(src_y), // %0 + "+r"(dst_y), // %1 + "+r"(width) // %2 + : "r"(scale) // %3 + : "cc", "memory", "v0", "v1", "v2"); +} + +void DivideRow_16_NEON(const uint16_t* src_y, + uint16_t* dst_y, + int scale, + int width) { + asm volatile( + "dup v4.8h, %w3 \n" + "1: \n" + "ldp q2, q3, [%0], #32 \n" + "umull v0.4s, v2.4h, v4.4h \n" + "umull2 v1.4s, v2.8h, v4.8h \n" + "umull v2.4s, v3.4h, v4.4h \n" + "umull2 v3.4s, v3.8h, v4.8h \n" + "prfm pldl1keep, [%0, 448] \n" + "shrn v0.4h, v0.4s, #16 \n" + "shrn2 v0.8h, v1.4s, #16 \n" + "shrn v1.4h, v2.4s, #16 \n" + "shrn2 v1.8h, v3.4s, #16 \n" + "stp q0, q1, [%1], #32 \n" // store 16 pixels + "subs %w2, %w2, #16 \n" // 16 src pixels per loop + "b.gt 1b \n" + : "+r"(src_y), // %0 + "+r"(dst_y), // %1 + "+r"(width) // %2 + : "r"(scale) // %3 + : "cc", "memory", "v0", "v1", "v2", "v3", "v4"); +} + +// Use scale to convert lsb formats to msb, depending how many bits there are: +// 32768 = 9 bits = shr 1 +// 16384 = 10 bits = shr 2 +// 4096 = 12 bits = shr 4 +// 256 = 16 bits = shr 8 +void Convert16To8Row_NEON(const uint16_t* src_y, + uint8_t* dst_y, + int scale, + int width) { + int shift = 15 - __builtin_clz((int32_t)scale); // Negative shl is shr + asm volatile( + "dup v2.8h, %w3 \n" + "1: \n" + "ldp q0, q1, [%0], #32 \n" + "ushl v0.8h, v0.8h, v2.8h \n" // shr = v2 is negative + "ushl v1.8h, v1.8h, v2.8h \n" + "prfm pldl1keep, [%0, 448] \n" + "uqxtn v0.8b, v0.8h \n" + "uqxtn2 v0.16b, v1.8h \n" + "subs %w2, %w2, #16 \n" // 16 src pixels per loop + "str q0, [%1], #16 \n" // store 16 pixels + "b.gt 1b \n" + : "+r"(src_y), // %0 + "+r"(dst_y), // %1 + "+r"(width) // %2 + : "r"(shift) // %3 + : "cc", "memory", "v0", "v1", "v2"); +} + #endif // !defined(LIBYUV_DISABLE_NEON) && defined(__aarch64__) #ifdef __cplusplus diff --git a/TMessagesProj/jni/third_party/libyuv/source/row_win.cc b/TMessagesProj/jni/third_party/libyuv/source/row_win.cc index 9afcf060a4..c5a14f86fb 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/row_win.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/row_win.cc @@ -10,9 +10,9 @@ #include "libyuv/row.h" -// This module is for Visual C 32/64 bit and clangcl 32 bit +// This module is for Visual C 32/64 bit #if !defined(LIBYUV_DISABLE_X86) && defined(_MSC_VER) && \ - (defined(_M_IX86) || (defined(_M_X64) && !defined(__clang__))) + !defined(__clang__) && (defined(_M_IX86) || defined(_M_X64)) #if defined(_M_X64) #include @@ -27,12 +27,34 @@ extern "C" { // 64 bit #if defined(_M_X64) +// Read 8 UV from 444 +#define READYUV444 \ + xmm3 = _mm_loadl_epi64((__m128i*)u_buf); \ + xmm1 = _mm_loadl_epi64((__m128i*)(u_buf + offset)); \ + xmm3 = _mm_unpacklo_epi8(xmm3, xmm1); \ + u_buf += 8; \ + xmm4 = _mm_loadl_epi64((__m128i*)y_buf); \ + xmm4 = _mm_unpacklo_epi8(xmm4, xmm4); \ + y_buf += 8; + +// Read 8 UV from 444, With 8 Alpha. +#define READYUVA444 \ + xmm3 = _mm_loadl_epi64((__m128i*)u_buf); \ + xmm1 = _mm_loadl_epi64((__m128i*)(u_buf + offset)); \ + xmm3 = _mm_unpacklo_epi8(xmm3, xmm1); \ + u_buf += 8; \ + xmm4 = _mm_loadl_epi64((__m128i*)y_buf); \ + xmm4 = _mm_unpacklo_epi8(xmm4, xmm4); \ + y_buf += 8; \ + xmm5 = _mm_loadl_epi64((__m128i*)a_buf); \ + a_buf += 8; + // Read 4 UV from 422, upsample to 8 UV. #define READYUV422 \ - xmm0 = _mm_cvtsi32_si128(*(uint32_t*)u_buf); \ + xmm3 = _mm_cvtsi32_si128(*(uint32_t*)u_buf); \ xmm1 = _mm_cvtsi32_si128(*(uint32_t*)(u_buf + offset)); \ - xmm0 = _mm_unpacklo_epi8(xmm0, xmm1); \ - xmm0 = _mm_unpacklo_epi16(xmm0, xmm0); \ + xmm3 = _mm_unpacklo_epi8(xmm3, xmm1); \ + xmm3 = _mm_unpacklo_epi16(xmm3, xmm3); \ u_buf += 4; \ xmm4 = _mm_loadl_epi64((__m128i*)y_buf); \ xmm4 = _mm_unpacklo_epi8(xmm4, xmm4); \ @@ -40,10 +62,10 @@ extern "C" { // Read 4 UV from 422, upsample to 8 UV. With 8 Alpha. #define READYUVA422 \ - xmm0 = _mm_cvtsi32_si128(*(uint32_t*)u_buf); \ + xmm3 = _mm_cvtsi32_si128(*(uint32_t*)u_buf); \ xmm1 = _mm_cvtsi32_si128(*(uint32_t*)(u_buf + offset)); \ - xmm0 = _mm_unpacklo_epi8(xmm0, xmm1); \ - xmm0 = _mm_unpacklo_epi16(xmm0, xmm0); \ + xmm3 = _mm_unpacklo_epi8(xmm3, xmm1); \ + xmm3 = _mm_unpacklo_epi16(xmm3, xmm3); \ u_buf += 4; \ xmm4 = _mm_loadl_epi64((__m128i*)y_buf); \ xmm4 = _mm_unpacklo_epi8(xmm4, xmm4); \ @@ -52,24 +74,21 @@ extern "C" { a_buf += 8; // Convert 8 pixels: 8 UV and 8 Y. -#define YUVTORGB(yuvconstants) \ - xmm1 = _mm_loadu_si128(&xmm0); \ - xmm2 = _mm_loadu_si128(&xmm0); \ - xmm0 = _mm_maddubs_epi16(xmm0, *(__m128i*)yuvconstants->kUVToB); \ - xmm1 = _mm_maddubs_epi16(xmm1, *(__m128i*)yuvconstants->kUVToG); \ - xmm2 = _mm_maddubs_epi16(xmm2, *(__m128i*)yuvconstants->kUVToR); \ - xmm0 = _mm_sub_epi16(*(__m128i*)yuvconstants->kUVBiasB, xmm0); \ - xmm1 = _mm_sub_epi16(*(__m128i*)yuvconstants->kUVBiasG, xmm1); \ - xmm2 = _mm_sub_epi16(*(__m128i*)yuvconstants->kUVBiasR, xmm2); \ - xmm4 = _mm_mulhi_epu16(xmm4, *(__m128i*)yuvconstants->kYToRgb); \ - xmm0 = _mm_adds_epi16(xmm0, xmm4); \ - xmm1 = _mm_adds_epi16(xmm1, xmm4); \ - xmm2 = _mm_adds_epi16(xmm2, xmm4); \ - xmm0 = _mm_srai_epi16(xmm0, 6); \ - xmm1 = _mm_srai_epi16(xmm1, 6); \ - xmm2 = _mm_srai_epi16(xmm2, 6); \ - xmm0 = _mm_packus_epi16(xmm0, xmm0); \ - xmm1 = _mm_packus_epi16(xmm1, xmm1); \ +#define YUVTORGB(yuvconstants) \ + xmm3 = _mm_sub_epi8(xmm3, _mm_set1_epi8((char)0x80)); \ + xmm4 = _mm_mulhi_epu16(xmm4, *(__m128i*)yuvconstants->kYToRgb); \ + xmm4 = _mm_add_epi16(xmm4, *(__m128i*)yuvconstants->kYBiasToRgb); \ + xmm0 = _mm_maddubs_epi16(*(__m128i*)yuvconstants->kUVToB, xmm3); \ + xmm1 = _mm_maddubs_epi16(*(__m128i*)yuvconstants->kUVToG, xmm3); \ + xmm2 = _mm_maddubs_epi16(*(__m128i*)yuvconstants->kUVToR, xmm3); \ + xmm0 = _mm_adds_epi16(xmm4, xmm0); \ + xmm1 = _mm_subs_epi16(xmm4, xmm1); \ + xmm2 = _mm_adds_epi16(xmm4, xmm2); \ + xmm0 = _mm_srai_epi16(xmm0, 6); \ + xmm1 = _mm_srai_epi16(xmm1, 6); \ + xmm2 = _mm_srai_epi16(xmm2, 6); \ + xmm0 = _mm_packus_epi16(xmm0, xmm0); \ + xmm1 = _mm_packus_epi16(xmm1, xmm1); \ xmm2 = _mm_packus_epi16(xmm2, xmm2); // Store 8 ARGB values. @@ -90,7 +109,7 @@ void I422ToARGBRow_SSSE3(const uint8_t* y_buf, uint8_t* dst_argb, const struct YuvConstants* yuvconstants, int width) { - __m128i xmm0, xmm1, xmm2, xmm4; + __m128i xmm0, xmm1, xmm2, xmm3, xmm4; const __m128i xmm5 = _mm_set1_epi8(-1); const ptrdiff_t offset = (uint8_t*)v_buf - (uint8_t*)u_buf; while (width > 0) { @@ -110,7 +129,7 @@ void I422AlphaToARGBRow_SSSE3(const uint8_t* y_buf, uint8_t* dst_argb, const struct YuvConstants* yuvconstants, int width) { - __m128i xmm0, xmm1, xmm2, xmm4, xmm5; + __m128i xmm0, xmm1, xmm2, xmm3, xmm4, xmm5; const ptrdiff_t offset = (uint8_t*)v_buf - (uint8_t*)u_buf; while (width > 0) { READYUVA422 @@ -121,6 +140,44 @@ void I422AlphaToARGBRow_SSSE3(const uint8_t* y_buf, } #endif +#if defined(HAS_I444TOARGBROW_SSSE3) +void I444ToARGBRow_SSSE3(const uint8_t* y_buf, + const uint8_t* u_buf, + const uint8_t* v_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { + __m128i xmm0, xmm1, xmm2, xmm3, xmm4; + const __m128i xmm5 = _mm_set1_epi8(-1); + const ptrdiff_t offset = (uint8_t*)v_buf - (uint8_t*)u_buf; + while (width > 0) { + READYUV444 + YUVTORGB(yuvconstants) + STOREARGB + width -= 8; + } +} +#endif + +#if defined(HAS_I444ALPHATOARGBROW_SSSE3) +void I444AlphaToARGBRow_SSSE3(const uint8_t* y_buf, + const uint8_t* u_buf, + const uint8_t* v_buf, + const uint8_t* a_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { + __m128i xmm0, xmm1, xmm2, xmm3, xmm4, xmm5; + const ptrdiff_t offset = (uint8_t*)v_buf - (uint8_t*)u_buf; + while (width > 0) { + READYUVA444 + YUVTORGB(yuvconstants) + STOREARGB + width -= 8; + } +} +#endif + // 32 bit #else // defined(_M_X64) #ifdef HAS_ARGBTOYROW_SSSE3 @@ -187,11 +244,11 @@ static const uvec8 kAddY16 = {16u, 16u, 16u, 16u, 16u, 16u, 16u, 16u, // 7 bit fixed point 0.5. static const vec16 kAddYJ64 = {64, 64, 64, 64, 64, 64, 64, 64}; -static const uvec8 kAddUV128 = {128u, 128u, 128u, 128u, 128u, 128u, 128u, 128u, - 128u, 128u, 128u, 128u, 128u, 128u, 128u, 128u}; - -static const uvec16 kAddUVJ128 = {0x8080u, 0x8080u, 0x8080u, 0x8080u, - 0x8080u, 0x8080u, 0x8080u, 0x8080u}; +// 8 bit fixed point 0.5, for bias of UV. +static const ulvec8 kBiasUV128 = { + 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, + 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, + 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80}; // Shuffle table for converting RGB24 to ARGB. static const uvec8 kShuffleMaskRGB24ToARGB = { @@ -1367,7 +1424,7 @@ __declspec(naked) void RGBAToYRow_SSSE3(const uint8_t* src_argb, } } -__declspec(naked) void ARGBToUVRow_SSSE3(const uint8_t* src_argb0, +__declspec(naked) void ARGBToUVRow_SSSE3(const uint8_t* src_argb, int src_stride_argb, uint8_t* dst_u, uint8_t* dst_v, @@ -1380,7 +1437,7 @@ __declspec(naked) void ARGBToUVRow_SSSE3(const uint8_t* src_argb0, mov edx, [esp + 8 + 12] // dst_u mov edi, [esp + 8 + 16] // dst_v mov ecx, [esp + 8 + 20] // width - movdqa xmm5, xmmword ptr kAddUV128 + movdqa xmm5, xmmword ptr kBiasUV128 movdqa xmm6, xmmword ptr kARGBToV movdqa xmm7, xmmword ptr kARGBToU sub edi, edx // stride from u to v @@ -1439,7 +1496,7 @@ __declspec(naked) void ARGBToUVRow_SSSE3(const uint8_t* src_argb0, } } -__declspec(naked) void ARGBToUVJRow_SSSE3(const uint8_t* src_argb0, +__declspec(naked) void ARGBToUVJRow_SSSE3(const uint8_t* src_argb, int src_stride_argb, uint8_t* dst_u, uint8_t* dst_v, @@ -1452,7 +1509,7 @@ __declspec(naked) void ARGBToUVJRow_SSSE3(const uint8_t* src_argb0, mov edx, [esp + 8 + 12] // dst_u mov edi, [esp + 8 + 16] // dst_v mov ecx, [esp + 8 + 20] // width - movdqa xmm5, xmmword ptr kAddUVJ128 + movdqa xmm5, xmmword ptr kBiasUV128 movdqa xmm6, xmmword ptr kARGBToVJ movdqa xmm7, xmmword ptr kARGBToUJ sub edi, edx // stride from u to v @@ -1513,7 +1570,7 @@ __declspec(naked) void ARGBToUVJRow_SSSE3(const uint8_t* src_argb0, } #ifdef HAS_ARGBTOUVROW_AVX2 -__declspec(naked) void ARGBToUVRow_AVX2(const uint8_t* src_argb0, +__declspec(naked) void ARGBToUVRow_AVX2(const uint8_t* src_argb, int src_stride_argb, uint8_t* dst_u, uint8_t* dst_v, @@ -1526,7 +1583,7 @@ __declspec(naked) void ARGBToUVRow_AVX2(const uint8_t* src_argb0, mov edx, [esp + 8 + 12] // dst_u mov edi, [esp + 8 + 16] // dst_v mov ecx, [esp + 8 + 20] // width - vbroadcastf128 ymm5, xmmword ptr kAddUV128 + vbroadcastf128 ymm5, xmmword ptr kBiasUV128 vbroadcastf128 ymm6, xmmword ptr kARGBToV vbroadcastf128 ymm7, xmmword ptr kARGBToU sub edi, edx // stride from u to v @@ -1581,7 +1638,7 @@ __declspec(naked) void ARGBToUVRow_AVX2(const uint8_t* src_argb0, #endif // HAS_ARGBTOUVROW_AVX2 #ifdef HAS_ARGBTOUVJROW_AVX2 -__declspec(naked) void ARGBToUVJRow_AVX2(const uint8_t* src_argb0, +__declspec(naked) void ARGBToUVJRow_AVX2(const uint8_t* src_argb, int src_stride_argb, uint8_t* dst_u, uint8_t* dst_v, @@ -1594,7 +1651,7 @@ __declspec(naked) void ARGBToUVJRow_AVX2(const uint8_t* src_argb0, mov edx, [esp + 8 + 12] // dst_u mov edi, [esp + 8 + 16] // dst_v mov ecx, [esp + 8 + 20] // width - vbroadcastf128 ymm5, xmmword ptr kAddUVJ128 + vbroadcastf128 ymm5, xmmword ptr kBiasUV128 vbroadcastf128 ymm6, xmmword ptr kARGBToVJ vbroadcastf128 ymm7, xmmword ptr kARGBToUJ sub edi, edx // stride from u to v @@ -1649,7 +1706,7 @@ __declspec(naked) void ARGBToUVJRow_AVX2(const uint8_t* src_argb0, } #endif // HAS_ARGBTOUVJROW_AVX2 -__declspec(naked) void ARGBToUV444Row_SSSE3(const uint8_t* src_argb0, +__declspec(naked) void ARGBToUV444Row_SSSE3(const uint8_t* src_argb, uint8_t* dst_u, uint8_t* dst_v, int width) { @@ -1659,7 +1716,7 @@ __declspec(naked) void ARGBToUV444Row_SSSE3(const uint8_t* src_argb0, mov edx, [esp + 4 + 8] // dst_u mov edi, [esp + 4 + 12] // dst_v mov ecx, [esp + 4 + 16] // width - movdqa xmm5, xmmword ptr kAddUV128 + movdqa xmm5, xmmword ptr kBiasUV128 movdqa xmm6, xmmword ptr kARGBToV movdqa xmm7, xmmword ptr kARGBToU sub edi, edx // stride from u to v @@ -1707,7 +1764,7 @@ __declspec(naked) void ARGBToUV444Row_SSSE3(const uint8_t* src_argb0, } } -__declspec(naked) void BGRAToUVRow_SSSE3(const uint8_t* src_argb0, +__declspec(naked) void BGRAToUVRow_SSSE3(const uint8_t* src_argb, int src_stride_argb, uint8_t* dst_u, uint8_t* dst_v, @@ -1720,7 +1777,7 @@ __declspec(naked) void BGRAToUVRow_SSSE3(const uint8_t* src_argb0, mov edx, [esp + 8 + 12] // dst_u mov edi, [esp + 8 + 16] // dst_v mov ecx, [esp + 8 + 20] // width - movdqa xmm5, xmmword ptr kAddUV128 + movdqa xmm5, xmmword ptr kBiasUV128 movdqa xmm6, xmmword ptr kBGRAToV movdqa xmm7, xmmword ptr kBGRAToU sub edi, edx // stride from u to v @@ -1779,7 +1836,7 @@ __declspec(naked) void BGRAToUVRow_SSSE3(const uint8_t* src_argb0, } } -__declspec(naked) void ABGRToUVRow_SSSE3(const uint8_t* src_argb0, +__declspec(naked) void ABGRToUVRow_SSSE3(const uint8_t* src_argb, int src_stride_argb, uint8_t* dst_u, uint8_t* dst_v, @@ -1792,7 +1849,7 @@ __declspec(naked) void ABGRToUVRow_SSSE3(const uint8_t* src_argb0, mov edx, [esp + 8 + 12] // dst_u mov edi, [esp + 8 + 16] // dst_v mov ecx, [esp + 8 + 20] // width - movdqa xmm5, xmmword ptr kAddUV128 + movdqa xmm5, xmmword ptr kBiasUV128 movdqa xmm6, xmmword ptr kABGRToV movdqa xmm7, xmmword ptr kABGRToU sub edi, edx // stride from u to v @@ -1851,7 +1908,7 @@ __declspec(naked) void ABGRToUVRow_SSSE3(const uint8_t* src_argb0, } } -__declspec(naked) void RGBAToUVRow_SSSE3(const uint8_t* src_argb0, +__declspec(naked) void RGBAToUVRow_SSSE3(const uint8_t* src_argb, int src_stride_argb, uint8_t* dst_u, uint8_t* dst_v, @@ -1864,7 +1921,7 @@ __declspec(naked) void RGBAToUVRow_SSSE3(const uint8_t* src_argb0, mov edx, [esp + 8 + 12] // dst_u mov edi, [esp + 8 + 16] // dst_v mov ecx, [esp + 8 + 20] // width - movdqa xmm5, xmmword ptr kAddUV128 + movdqa xmm5, xmmword ptr kBiasUV128 movdqa xmm6, xmmword ptr kRGBAToV movdqa xmm7, xmmword ptr kRGBAToU sub edi, edx // stride from u to v @@ -1926,137 +1983,153 @@ __declspec(naked) void RGBAToUVRow_SSSE3(const uint8_t* src_argb0, // Read 16 UV from 444 #define READYUV444_AVX2 \ - __asm { \ - __asm vmovdqu xmm0, [esi] /* U */ \ - __asm vmovdqu xmm1, [esi + edi] /* V */ \ + __asm { \ + __asm vmovdqu xmm3, [esi] /* U */ \ + __asm vmovdqu xmm1, [esi + edi] /* V */ \ __asm lea esi, [esi + 16] \ - __asm vpermq ymm0, ymm0, 0xd8 \ + __asm vpermq ymm3, ymm3, 0xd8 \ __asm vpermq ymm1, ymm1, 0xd8 \ - __asm vpunpcklbw ymm0, ymm0, ymm1 /* UV */ \ - __asm vmovdqu xmm4, [eax] /* Y */ \ + __asm vpunpcklbw ymm3, ymm3, ymm1 /* UV */ \ + __asm vmovdqu xmm4, [eax] /* Y */ \ __asm vpermq ymm4, ymm4, 0xd8 \ __asm vpunpcklbw ymm4, ymm4, ymm4 \ __asm lea eax, [eax + 16]} +// Read 16 UV from 444. With 16 Alpha. +#define READYUVA444_AVX2 \ + __asm { \ + __asm vmovdqu xmm3, [esi] /* U */ \ + __asm vmovdqu xmm1, [esi + edi] /* V */ \ + __asm lea esi, [esi + 16] \ + __asm vpermq ymm3, ymm3, 0xd8 \ + __asm vpermq ymm1, ymm1, 0xd8 \ + __asm vpunpcklbw ymm3, ymm3, ymm1 /* UV */ \ + __asm vmovdqu xmm4, [eax] /* Y */ \ + __asm vpermq ymm4, ymm4, 0xd8 \ + __asm vpunpcklbw ymm4, ymm4, ymm4 \ + __asm lea eax, [eax + 16] \ + __asm vmovdqu xmm5, [ebp] /* A */ \ + __asm vpermq ymm5, ymm5, 0xd8 \ + __asm lea ebp, [ebp + 16]} + // Read 8 UV from 422, upsample to 16 UV. #define READYUV422_AVX2 \ - __asm { \ - __asm vmovq xmm0, qword ptr [esi] /* U */ \ - __asm vmovq xmm1, qword ptr [esi + edi] /* V */ \ + __asm { \ + __asm vmovq xmm3, qword ptr [esi] /* U */ \ + __asm vmovq xmm1, qword ptr [esi + edi] /* V */ \ __asm lea esi, [esi + 8] \ - __asm vpunpcklbw ymm0, ymm0, ymm1 /* UV */ \ - __asm vpermq ymm0, ymm0, 0xd8 \ - __asm vpunpcklwd ymm0, ymm0, ymm0 /* UVUV (upsample) */ \ - __asm vmovdqu xmm4, [eax] /* Y */ \ + __asm vpunpcklbw ymm3, ymm3, ymm1 /* UV */ \ + __asm vpermq ymm3, ymm3, 0xd8 \ + __asm vpunpcklwd ymm3, ymm3, ymm3 /* UVUV (upsample) */ \ + __asm vmovdqu xmm4, [eax] /* Y */ \ __asm vpermq ymm4, ymm4, 0xd8 \ __asm vpunpcklbw ymm4, ymm4, ymm4 \ __asm lea eax, [eax + 16]} // Read 8 UV from 422, upsample to 16 UV. With 16 Alpha. #define READYUVA422_AVX2 \ - __asm { \ - __asm vmovq xmm0, qword ptr [esi] /* U */ \ - __asm vmovq xmm1, qword ptr [esi + edi] /* V */ \ + __asm { \ + __asm vmovq xmm3, qword ptr [esi] /* U */ \ + __asm vmovq xmm1, qword ptr [esi + edi] /* V */ \ __asm lea esi, [esi + 8] \ - __asm vpunpcklbw ymm0, ymm0, ymm1 /* UV */ \ - __asm vpermq ymm0, ymm0, 0xd8 \ - __asm vpunpcklwd ymm0, ymm0, ymm0 /* UVUV (upsample) */ \ - __asm vmovdqu xmm4, [eax] /* Y */ \ + __asm vpunpcklbw ymm3, ymm3, ymm1 /* UV */ \ + __asm vpermq ymm3, ymm3, 0xd8 \ + __asm vpunpcklwd ymm3, ymm3, ymm3 /* UVUV (upsample) */ \ + __asm vmovdqu xmm4, [eax] /* Y */ \ __asm vpermq ymm4, ymm4, 0xd8 \ __asm vpunpcklbw ymm4, ymm4, ymm4 \ __asm lea eax, [eax + 16] \ - __asm vmovdqu xmm5, [ebp] /* A */ \ + __asm vmovdqu xmm5, [ebp] /* A */ \ __asm vpermq ymm5, ymm5, 0xd8 \ __asm lea ebp, [ebp + 16]} // Read 8 UV from NV12, upsample to 16 UV. #define READNV12_AVX2 \ - __asm { \ - __asm vmovdqu xmm0, [esi] /* UV */ \ + __asm { \ + __asm vmovdqu xmm3, [esi] /* UV */ \ __asm lea esi, [esi + 16] \ - __asm vpermq ymm0, ymm0, 0xd8 \ - __asm vpunpcklwd ymm0, ymm0, ymm0 /* UVUV (upsample) */ \ - __asm vmovdqu xmm4, [eax] /* Y */ \ + __asm vpermq ymm3, ymm3, 0xd8 \ + __asm vpunpcklwd ymm3, ymm3, ymm3 /* UVUV (upsample) */ \ + __asm vmovdqu xmm4, [eax] /* Y */ \ __asm vpermq ymm4, ymm4, 0xd8 \ __asm vpunpcklbw ymm4, ymm4, ymm4 \ __asm lea eax, [eax + 16]} // Read 8 UV from NV21, upsample to 16 UV. #define READNV21_AVX2 \ - __asm { \ - __asm vmovdqu xmm0, [esi] /* UV */ \ + __asm { \ + __asm vmovdqu xmm3, [esi] /* UV */ \ __asm lea esi, [esi + 16] \ - __asm vpermq ymm0, ymm0, 0xd8 \ - __asm vpshufb ymm0, ymm0, ymmword ptr kShuffleNV21 \ - __asm vmovdqu xmm4, [eax] /* Y */ \ + __asm vpermq ymm3, ymm3, 0xd8 \ + __asm vpshufb ymm3, ymm3, ymmword ptr kShuffleNV21 \ + __asm vmovdqu xmm4, [eax] /* Y */ \ __asm vpermq ymm4, ymm4, 0xd8 \ __asm vpunpcklbw ymm4, ymm4, ymm4 \ __asm lea eax, [eax + 16]} // Read 8 YUY2 with 16 Y and upsample 8 UV to 16 UV. #define READYUY2_AVX2 \ - __asm { \ - __asm vmovdqu ymm4, [eax] /* YUY2 */ \ + __asm { \ + __asm vmovdqu ymm4, [eax] /* YUY2 */ \ __asm vpshufb ymm4, ymm4, ymmword ptr kShuffleYUY2Y \ - __asm vmovdqu ymm0, [eax] /* UV */ \ - __asm vpshufb ymm0, ymm0, ymmword ptr kShuffleYUY2UV \ + __asm vmovdqu ymm3, [eax] /* UV */ \ + __asm vpshufb ymm3, ymm3, ymmword ptr kShuffleYUY2UV \ __asm lea eax, [eax + 32]} // Read 8 UYVY with 16 Y and upsample 8 UV to 16 UV. #define READUYVY_AVX2 \ - __asm { \ - __asm vmovdqu ymm4, [eax] /* UYVY */ \ + __asm { \ + __asm vmovdqu ymm4, [eax] /* UYVY */ \ __asm vpshufb ymm4, ymm4, ymmword ptr kShuffleUYVYY \ - __asm vmovdqu ymm0, [eax] /* UV */ \ - __asm vpshufb ymm0, ymm0, ymmword ptr kShuffleUYVYUV \ + __asm vmovdqu ymm3, [eax] /* UV */ \ + __asm vpshufb ymm3, ymm3, ymmword ptr kShuffleUYVYUV \ __asm lea eax, [eax + 32]} // Convert 16 pixels: 16 UV and 16 Y. #define YUVTORGB_AVX2(YuvConstants) \ - __asm { \ - __asm vpmaddubsw ymm2, ymm0, ymmword ptr [YuvConstants + KUVTOR] /* R UV */\ - __asm vpmaddubsw ymm1, ymm0, ymmword ptr [YuvConstants + KUVTOG] /* G UV */\ - __asm vpmaddubsw ymm0, ymm0, ymmword ptr [YuvConstants + KUVTOB] /* B UV */\ - __asm vmovdqu ymm3, ymmword ptr [YuvConstants + KUVBIASR] \ - __asm vpsubw ymm2, ymm3, ymm2 \ - __asm vmovdqu ymm3, ymmword ptr [YuvConstants + KUVBIASG] \ - __asm vpsubw ymm1, ymm3, ymm1 \ - __asm vmovdqu ymm3, ymmword ptr [YuvConstants + KUVBIASB] \ - __asm vpsubw ymm0, ymm3, ymm0 /* Step 2: Find Y contribution to 16 R,G,B values */ \ + __asm { \ + __asm vpsubb ymm3, ymm3, ymmword ptr kBiasUV128 \ __asm vpmulhuw ymm4, ymm4, ymmword ptr [YuvConstants + KYTORGB] \ - __asm vpaddsw ymm0, ymm0, ymm4 /* B += Y */ \ - __asm vpaddsw ymm1, ymm1, ymm4 /* G += Y */ \ - __asm vpaddsw ymm2, ymm2, ymm4 /* R += Y */ \ + __asm vmovdqa ymm0, ymmword ptr [YuvConstants + KUVTOB] \ + __asm vmovdqa ymm1, ymmword ptr [YuvConstants + KUVTOG] \ + __asm vmovdqa ymm2, ymmword ptr [YuvConstants + KUVTOR] \ + __asm vpmaddubsw ymm0, ymm0, ymm3 /* B UV */ \ + __asm vpmaddubsw ymm1, ymm1, ymm3 /* G UV */ \ + __asm vpmaddubsw ymm2, ymm2, ymm3 /* B UV */ \ + __asm vmovdqu ymm3, ymmword ptr [YuvConstants + KYBIASTORGB] \ + __asm vpaddw ymm4, ymm3, ymm4 \ + __asm vpaddsw ymm0, ymm0, ymm4 \ + __asm vpsubsw ymm1, ymm4, ymm1 \ + __asm vpaddsw ymm2, ymm2, ymm4 \ __asm vpsraw ymm0, ymm0, 6 \ __asm vpsraw ymm1, ymm1, 6 \ __asm vpsraw ymm2, ymm2, 6 \ - __asm vpackuswb ymm0, ymm0, ymm0 /* B */ \ - __asm vpackuswb ymm1, ymm1, ymm1 /* G */ \ - __asm vpackuswb ymm2, ymm2, ymm2 /* R */ \ - } + __asm vpackuswb ymm0, ymm0, ymm0 \ + __asm vpackuswb ymm1, ymm1, ymm1 \ + __asm vpackuswb ymm2, ymm2, ymm2} // Store 16 ARGB values. #define STOREARGB_AVX2 \ - __asm { \ - __asm vpunpcklbw ymm0, ymm0, ymm1 /* BG */ \ + __asm { \ + __asm vpunpcklbw ymm0, ymm0, ymm1 /* BG */ \ __asm vpermq ymm0, ymm0, 0xd8 \ - __asm vpunpcklbw ymm2, ymm2, ymm5 /* RA */ \ + __asm vpunpcklbw ymm2, ymm2, ymm5 /* RA */ \ __asm vpermq ymm2, ymm2, 0xd8 \ - __asm vpunpcklwd ymm1, ymm0, ymm2 /* BGRA first 8 pixels */ \ - __asm vpunpckhwd ymm0, ymm0, ymm2 /* BGRA next 8 pixels */ \ + __asm vpunpcklwd ymm1, ymm0, ymm2 /* BGRA first 8 pixels */ \ + __asm vpunpckhwd ymm0, ymm0, ymm2 /* BGRA next 8 pixels */ \ __asm vmovdqu 0[edx], ymm1 \ __asm vmovdqu 32[edx], ymm0 \ __asm lea edx, [edx + 64]} // Store 16 RGBA values. #define STORERGBA_AVX2 \ - __asm { \ - __asm vpunpcklbw ymm1, ymm1, ymm2 /* GR */ \ + __asm { \ + __asm vpunpcklbw ymm1, ymm1, ymm2 /* GR */ \ __asm vpermq ymm1, ymm1, 0xd8 \ - __asm vpunpcklbw ymm2, ymm5, ymm0 /* AB */ \ + __asm vpunpcklbw ymm2, ymm5, ymm0 /* AB */ \ __asm vpermq ymm2, ymm2, 0xd8 \ - __asm vpunpcklwd ymm0, ymm2, ymm1 /* ABGR first 8 pixels */ \ - __asm vpunpckhwd ymm1, ymm2, ymm1 /* ABGR next 8 pixels */ \ + __asm vpunpcklwd ymm0, ymm2, ymm1 /* ABGR first 8 pixels */ \ + __asm vpunpckhwd ymm1, ymm2, ymm1 /* ABGR next 8 pixels */ \ __asm vmovdqu [edx], ymm0 \ __asm vmovdqu [edx + 32], ymm1 \ __asm lea edx, [edx + 64]} @@ -2183,6 +2256,48 @@ __declspec(naked) void I444ToARGBRow_AVX2( } #endif // HAS_I444TOARGBROW_AVX2 +#ifdef HAS_I444ALPHATOARGBROW_AVX2 +// 16 pixels +// 16 UV values with 16 Y producing 16 ARGB (64 bytes). +__declspec(naked) void I444AlphaToARGBRow_AVX2( + const uint8_t* y_buf, + const uint8_t* u_buf, + const uint8_t* v_buf, + const uint8_t* a_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { + __asm { + push esi + push edi + push ebx + push ebp + mov eax, [esp + 16 + 4] // Y + mov esi, [esp + 16 + 8] // U + mov edi, [esp + 16 + 12] // V + mov ebp, [esp + 16 + 16] // A + mov edx, [esp + 16 + 20] // argb + mov ebx, [esp + 16 + 24] // yuvconstants + mov ecx, [esp + 16 + 28] // width + sub edi, esi + convertloop: + READYUVA444_AVX2 + YUVTORGB_AVX2(ebx) + STOREARGB_AVX2 + + sub ecx, 16 + jg convertloop + + pop ebp + pop ebx + pop edi + pop esi + vzeroupper + ret + } +} +#endif // HAS_I444AlphaTOARGBROW_AVX2 + #ifdef HAS_NV12TOARGBROW_AVX2 // 16 pixels. // 8 UV values upsampled to 16 UV, mixed with 16 Y producing 16 ARGB (64 bytes). @@ -2361,191 +2476,202 @@ __declspec(naked) void I422ToRGBARow_AVX2( // Read 8 UV from 444. #define READYUV444 \ - __asm { \ - __asm movq xmm0, qword ptr [esi] /* U */ \ + __asm { \ + __asm movq xmm3, qword ptr [esi] /* U */ \ __asm movq xmm1, qword ptr [esi + edi] /* V */ \ __asm lea esi, [esi + 8] \ - __asm punpcklbw xmm0, xmm1 /* UV */ \ + __asm punpcklbw xmm3, xmm1 /* UV */ \ __asm movq xmm4, qword ptr [eax] \ __asm punpcklbw xmm4, xmm4 \ __asm lea eax, [eax + 8]} +// Read 4 UV from 444. With 8 Alpha. +#define READYUVA444 \ + __asm { \ + __asm movq xmm3, qword ptr [esi] /* U */ \ + __asm movq xmm1, qword ptr [esi + edi] /* V */ \ + __asm lea esi, [esi + 8] \ + __asm punpcklbw xmm3, xmm1 /* UV */ \ + __asm movq xmm4, qword ptr [eax] \ + __asm punpcklbw xmm4, xmm4 \ + __asm lea eax, [eax + 8] \ + __asm movq xmm5, qword ptr [ebp] /* A */ \ + __asm lea ebp, [ebp + 8]} + // Read 4 UV from 422, upsample to 8 UV. #define READYUV422 \ - __asm { \ - __asm movd xmm0, [esi] /* U */ \ - __asm movd xmm1, [esi + edi] /* V */ \ + __asm { \ + __asm movd xmm3, [esi] /* U */ \ + __asm movd xmm1, [esi + edi] /* V */ \ __asm lea esi, [esi + 4] \ - __asm punpcklbw xmm0, xmm1 /* UV */ \ - __asm punpcklwd xmm0, xmm0 /* UVUV (upsample) */ \ + __asm punpcklbw xmm3, xmm1 /* UV */ \ + __asm punpcklwd xmm3, xmm3 /* UVUV (upsample) */ \ __asm movq xmm4, qword ptr [eax] \ __asm punpcklbw xmm4, xmm4 \ __asm lea eax, [eax + 8]} // Read 4 UV from 422, upsample to 8 UV. With 8 Alpha. #define READYUVA422 \ - __asm { \ - __asm movd xmm0, [esi] /* U */ \ - __asm movd xmm1, [esi + edi] /* V */ \ + __asm { \ + __asm movd xmm3, [esi] /* U */ \ + __asm movd xmm1, [esi + edi] /* V */ \ __asm lea esi, [esi + 4] \ - __asm punpcklbw xmm0, xmm1 /* UV */ \ - __asm punpcklwd xmm0, xmm0 /* UVUV (upsample) */ \ - __asm movq xmm4, qword ptr [eax] /* Y */ \ + __asm punpcklbw xmm3, xmm1 /* UV */ \ + __asm punpcklwd xmm3, xmm3 /* UVUV (upsample) */ \ + __asm movq xmm4, qword ptr [eax] /* Y */ \ __asm punpcklbw xmm4, xmm4 \ __asm lea eax, [eax + 8] \ - __asm movq xmm5, qword ptr [ebp] /* A */ \ + __asm movq xmm5, qword ptr [ebp] /* A */ \ __asm lea ebp, [ebp + 8]} // Read 4 UV from NV12, upsample to 8 UV. #define READNV12 \ - __asm { \ - __asm movq xmm0, qword ptr [esi] /* UV */ \ + __asm { \ + __asm movq xmm3, qword ptr [esi] /* UV */ \ __asm lea esi, [esi + 8] \ - __asm punpcklwd xmm0, xmm0 /* UVUV (upsample) */ \ + __asm punpcklwd xmm3, xmm3 /* UVUV (upsample) */ \ __asm movq xmm4, qword ptr [eax] \ __asm punpcklbw xmm4, xmm4 \ __asm lea eax, [eax + 8]} // Read 4 VU from NV21, upsample to 8 UV. #define READNV21 \ - __asm { \ - __asm movq xmm0, qword ptr [esi] /* UV */ \ + __asm { \ + __asm movq xmm3, qword ptr [esi] /* UV */ \ __asm lea esi, [esi + 8] \ - __asm pshufb xmm0, xmmword ptr kShuffleNV21 \ + __asm pshufb xmm3, xmmword ptr kShuffleNV21 \ __asm movq xmm4, qword ptr [eax] \ __asm punpcklbw xmm4, xmm4 \ __asm lea eax, [eax + 8]} // Read 4 YUY2 with 8 Y and upsample 4 UV to 8 UV. #define READYUY2 \ - __asm { \ - __asm movdqu xmm4, [eax] /* YUY2 */ \ + __asm { \ + __asm movdqu xmm4, [eax] /* YUY2 */ \ __asm pshufb xmm4, xmmword ptr kShuffleYUY2Y \ - __asm movdqu xmm0, [eax] /* UV */ \ - __asm pshufb xmm0, xmmword ptr kShuffleYUY2UV \ + __asm movdqu xmm3, [eax] /* UV */ \ + __asm pshufb xmm3, xmmword ptr kShuffleYUY2UV \ __asm lea eax, [eax + 16]} // Read 4 UYVY with 8 Y and upsample 4 UV to 8 UV. #define READUYVY \ - __asm { \ - __asm movdqu xmm4, [eax] /* UYVY */ \ + __asm { \ + __asm movdqu xmm4, [eax] /* UYVY */ \ __asm pshufb xmm4, xmmword ptr kShuffleUYVYY \ - __asm movdqu xmm0, [eax] /* UV */ \ - __asm pshufb xmm0, xmmword ptr kShuffleUYVYUV \ + __asm movdqu xmm3, [eax] /* UV */ \ + __asm pshufb xmm3, xmmword ptr kShuffleUYVYUV \ __asm lea eax, [eax + 16]} // Convert 8 pixels: 8 UV and 8 Y. #define YUVTORGB(YuvConstants) \ - __asm { \ - __asm movdqa xmm1, xmm0 \ - __asm movdqa xmm2, xmm0 \ - __asm movdqa xmm3, xmm0 \ - __asm movdqa xmm0, xmmword ptr [YuvConstants + KUVBIASB] \ - __asm pmaddubsw xmm1, xmmword ptr [YuvConstants + KUVTOB] \ - __asm psubw xmm0, xmm1 \ - __asm movdqa xmm1, xmmword ptr [YuvConstants + KUVBIASG] \ - __asm pmaddubsw xmm2, xmmword ptr [YuvConstants + KUVTOG] \ - __asm psubw xmm1, xmm2 \ - __asm movdqa xmm2, xmmword ptr [YuvConstants + KUVBIASR] \ - __asm pmaddubsw xmm3, xmmword ptr [YuvConstants + KUVTOR] \ - __asm psubw xmm2, xmm3 \ + __asm { \ + __asm psubb xmm3, xmmword ptr kBiasUV128 \ __asm pmulhuw xmm4, xmmword ptr [YuvConstants + KYTORGB] \ - __asm paddsw xmm0, xmm4 /* B += Y */ \ - __asm paddsw xmm1, xmm4 /* G += Y */ \ - __asm paddsw xmm2, xmm4 /* R += Y */ \ + __asm movdqa xmm0, xmmword ptr [YuvConstants + KUVTOB] \ + __asm movdqa xmm1, xmmword ptr [YuvConstants + KUVTOG] \ + __asm movdqa xmm2, xmmword ptr [YuvConstants + KUVTOR] \ + __asm pmaddubsw xmm0, xmm3 \ + __asm pmaddubsw xmm1, xmm3 \ + __asm pmaddubsw xmm2, xmm3 \ + __asm movdqa xmm3, xmmword ptr [YuvConstants + KYBIASTORGB] \ + __asm paddw xmm4, xmm3 \ + __asm paddsw xmm0, xmm4 \ + __asm paddsw xmm2, xmm4 \ + __asm psubsw xmm4, xmm1 \ + __asm movdqa xmm1, xmm4 \ __asm psraw xmm0, 6 \ __asm psraw xmm1, 6 \ __asm psraw xmm2, 6 \ - __asm packuswb xmm0, xmm0 /* B */ \ - __asm packuswb xmm1, xmm1 /* G */ \ + __asm packuswb xmm0, xmm0 /* B */ \ + __asm packuswb xmm1, xmm1 /* G */ \ __asm packuswb xmm2, xmm2 /* R */ \ } // Store 8 ARGB values. #define STOREARGB \ - __asm { \ - __asm punpcklbw xmm0, xmm1 /* BG */ \ - __asm punpcklbw xmm2, xmm5 /* RA */ \ + __asm { \ + __asm punpcklbw xmm0, xmm1 /* BG */ \ + __asm punpcklbw xmm2, xmm5 /* RA */ \ __asm movdqa xmm1, xmm0 \ - __asm punpcklwd xmm0, xmm2 /* BGRA first 4 pixels */ \ - __asm punpckhwd xmm1, xmm2 /* BGRA next 4 pixels */ \ + __asm punpcklwd xmm0, xmm2 /* BGRA first 4 pixels */ \ + __asm punpckhwd xmm1, xmm2 /* BGRA next 4 pixels */ \ __asm movdqu 0[edx], xmm0 \ __asm movdqu 16[edx], xmm1 \ __asm lea edx, [edx + 32]} // Store 8 BGRA values. #define STOREBGRA \ - __asm { \ - __asm pcmpeqb xmm5, xmm5 /* generate 0xffffffff for alpha */ \ - __asm punpcklbw xmm1, xmm0 /* GB */ \ - __asm punpcklbw xmm5, xmm2 /* AR */ \ + __asm { \ + __asm pcmpeqb xmm5, xmm5 /* generate 0xffffffff for alpha */ \ + __asm punpcklbw xmm1, xmm0 /* GB */ \ + __asm punpcklbw xmm5, xmm2 /* AR */ \ __asm movdqa xmm0, xmm5 \ - __asm punpcklwd xmm5, xmm1 /* BGRA first 4 pixels */ \ - __asm punpckhwd xmm0, xmm1 /* BGRA next 4 pixels */ \ + __asm punpcklwd xmm5, xmm1 /* BGRA first 4 pixels */ \ + __asm punpckhwd xmm0, xmm1 /* BGRA next 4 pixels */ \ __asm movdqu 0[edx], xmm5 \ __asm movdqu 16[edx], xmm0 \ __asm lea edx, [edx + 32]} // Store 8 RGBA values. #define STORERGBA \ - __asm { \ - __asm pcmpeqb xmm5, xmm5 /* generate 0xffffffff for alpha */ \ - __asm punpcklbw xmm1, xmm2 /* GR */ \ - __asm punpcklbw xmm5, xmm0 /* AB */ \ + __asm { \ + __asm pcmpeqb xmm5, xmm5 /* generate 0xffffffff for alpha */ \ + __asm punpcklbw xmm1, xmm2 /* GR */ \ + __asm punpcklbw xmm5, xmm0 /* AB */ \ __asm movdqa xmm0, xmm5 \ - __asm punpcklwd xmm5, xmm1 /* RGBA first 4 pixels */ \ - __asm punpckhwd xmm0, xmm1 /* RGBA next 4 pixels */ \ + __asm punpcklwd xmm5, xmm1 /* RGBA first 4 pixels */ \ + __asm punpckhwd xmm0, xmm1 /* RGBA next 4 pixels */ \ __asm movdqu 0[edx], xmm5 \ __asm movdqu 16[edx], xmm0 \ __asm lea edx, [edx + 32]} // Store 8 RGB24 values. #define STORERGB24 \ - __asm {/* Weave into RRGB */ \ - __asm punpcklbw xmm0, xmm1 /* BG */ \ - __asm punpcklbw xmm2, xmm2 /* RR */ \ + __asm {/* Weave into RRGB */ \ + __asm punpcklbw xmm0, xmm1 /* BG */ \ + __asm punpcklbw xmm2, xmm2 /* RR */ \ __asm movdqa xmm1, xmm0 \ - __asm punpcklwd xmm0, xmm2 /* BGRR first 4 pixels */ \ - __asm punpckhwd xmm1, xmm2 /* BGRR next 4 pixels */ /* RRGB -> RGB24 */ \ - __asm pshufb xmm0, xmm5 /* Pack first 8 and last 4 bytes. */ \ - __asm pshufb xmm1, xmm6 /* Pack first 12 bytes. */ \ - __asm palignr xmm1, xmm0, 12 /* last 4 bytes of xmm0 + 12 xmm1 */ \ - __asm movq qword ptr 0[edx], xmm0 /* First 8 bytes */ \ - __asm movdqu 8[edx], xmm1 /* Last 16 bytes */ \ + __asm punpcklwd xmm0, xmm2 /* BGRR first 4 pixels */ \ + __asm punpckhwd xmm1, xmm2 /* BGRR next 4 pixels */ /* RRGB -> RGB24 */ \ + __asm pshufb xmm0, xmm5 /* Pack first 8 and last 4 bytes. */ \ + __asm pshufb xmm1, xmm6 /* Pack first 12 bytes. */ \ + __asm palignr xmm1, xmm0, 12 /* last 4 bytes of xmm0 + 12 xmm1 */ \ + __asm movq qword ptr 0[edx], xmm0 /* First 8 bytes */ \ + __asm movdqu 8[edx], xmm1 /* Last 16 bytes */ \ __asm lea edx, [edx + 24]} // Store 8 RGB565 values. #define STORERGB565 \ - __asm {/* Weave into RRGB */ \ - __asm punpcklbw xmm0, xmm1 /* BG */ \ - __asm punpcklbw xmm2, xmm2 /* RR */ \ + __asm {/* Weave into RRGB */ \ + __asm punpcklbw xmm0, xmm1 /* BG */ \ + __asm punpcklbw xmm2, xmm2 /* RR */ \ __asm movdqa xmm1, xmm0 \ - __asm punpcklwd xmm0, xmm2 /* BGRR first 4 pixels */ \ - __asm punpckhwd xmm1, xmm2 /* BGRR next 4 pixels */ /* RRGB -> RGB565 */ \ - __asm movdqa xmm3, xmm0 /* B first 4 pixels of argb */ \ - __asm movdqa xmm2, xmm0 /* G */ \ - __asm pslld xmm0, 8 /* R */ \ - __asm psrld xmm3, 3 /* B */ \ - __asm psrld xmm2, 5 /* G */ \ - __asm psrad xmm0, 16 /* R */ \ - __asm pand xmm3, xmm5 /* B */ \ - __asm pand xmm2, xmm6 /* G */ \ - __asm pand xmm0, xmm7 /* R */ \ - __asm por xmm3, xmm2 /* BG */ \ - __asm por xmm0, xmm3 /* BGR */ \ - __asm movdqa xmm3, xmm1 /* B next 4 pixels of argb */ \ - __asm movdqa xmm2, xmm1 /* G */ \ - __asm pslld xmm1, 8 /* R */ \ - __asm psrld xmm3, 3 /* B */ \ - __asm psrld xmm2, 5 /* G */ \ - __asm psrad xmm1, 16 /* R */ \ - __asm pand xmm3, xmm5 /* B */ \ - __asm pand xmm2, xmm6 /* G */ \ - __asm pand xmm1, xmm7 /* R */ \ - __asm por xmm3, xmm2 /* BG */ \ - __asm por xmm1, xmm3 /* BGR */ \ + __asm punpcklwd xmm0, xmm2 /* BGRR first 4 pixels */ \ + __asm punpckhwd xmm1, xmm2 /* BGRR next 4 pixels */ /* RRGB -> RGB565 */ \ + __asm movdqa xmm3, xmm0 /* B first 4 pixels of argb */ \ + __asm movdqa xmm2, xmm0 /* G */ \ + __asm pslld xmm0, 8 /* R */ \ + __asm psrld xmm3, 3 /* B */ \ + __asm psrld xmm2, 5 /* G */ \ + __asm psrad xmm0, 16 /* R */ \ + __asm pand xmm3, xmm5 /* B */ \ + __asm pand xmm2, xmm6 /* G */ \ + __asm pand xmm0, xmm7 /* R */ \ + __asm por xmm3, xmm2 /* BG */ \ + __asm por xmm0, xmm3 /* BGR */ \ + __asm movdqa xmm3, xmm1 /* B next 4 pixels of argb */ \ + __asm movdqa xmm2, xmm1 /* G */ \ + __asm pslld xmm1, 8 /* R */ \ + __asm psrld xmm3, 3 /* B */ \ + __asm psrld xmm2, 5 /* G */ \ + __asm psrad xmm1, 16 /* R */ \ + __asm pand xmm3, xmm5 /* B */ \ + __asm pand xmm2, xmm6 /* G */ \ + __asm pand xmm1, xmm7 /* R */ \ + __asm por xmm3, xmm2 /* BG */ \ + __asm por xmm1, xmm3 /* BGR */ \ __asm packssdw xmm0, xmm1 \ - __asm movdqu 0[edx], xmm0 /* store 8 pixels of RGB565 */ \ + __asm movdqu 0[edx], xmm0 /* store 8 pixels of RGB565 */ \ __asm lea edx, [edx + 16]} // 8 pixels. @@ -2585,6 +2711,46 @@ __declspec(naked) void I444ToARGBRow_SSSE3( } } +// 8 pixels. +// 8 UV values, mixed with 8 Y and 8A producing 8 ARGB (32 bytes). +__declspec(naked) void I444AlphaToARGBRow_SSSE3( + const uint8_t* y_buf, + const uint8_t* u_buf, + const uint8_t* v_buf, + const uint8_t* a_buf, + uint8_t* dst_argb, + const struct YuvConstants* yuvconstants, + int width) { + __asm { + push esi + push edi + push ebx + push ebp + mov eax, [esp + 16 + 4] // Y + mov esi, [esp + 16 + 8] // U + mov edi, [esp + 16 + 12] // V + mov ebp, [esp + 16 + 16] // A + mov edx, [esp + 16 + 20] // argb + mov ebx, [esp + 16 + 24] // yuvconstants + mov ecx, [esp + 16 + 28] // width + sub edi, esi + + convertloop: + READYUVA444 + YUVTORGB(ebx) + STOREARGB + + sub ecx, 8 + jg convertloop + + pop ebp + pop ebx + pop edi + pop esi + ret + } +} + // 8 pixels. // 4 UV values upsampled to 8 UV, mixed with 8 Y producing 8 RGB24 (24 bytes). __declspec(naked) void I422ToRGB24Row_SSSE3( @@ -2623,6 +2789,44 @@ __declspec(naked) void I422ToRGB24Row_SSSE3( } } +// 8 pixels. +// 8 UV values, mixed with 8 Y producing 8 RGB24 (24 bytes). +__declspec(naked) void I444ToRGB24Row_SSSE3( + const uint8_t* y_buf, + const uint8_t* u_buf, + const uint8_t* v_buf, + uint8_t* dst_rgb24, + const struct YuvConstants* yuvconstants, + int width) { + __asm { + push esi + push edi + push ebx + mov eax, [esp + 12 + 4] // Y + mov esi, [esp + 12 + 8] // U + mov edi, [esp + 12 + 12] // V + mov edx, [esp + 12 + 16] // argb + mov ebx, [esp + 12 + 20] // yuvconstants + mov ecx, [esp + 12 + 24] // width + sub edi, esi + movdqa xmm5, xmmword ptr kShuffleMaskARGBToRGB24_0 + movdqa xmm6, xmmword ptr kShuffleMaskARGBToRGB24 + + convertloop: + READYUV444 + YUVTORGB(ebx) + STORERGB24 + + sub ecx, 8 + jg convertloop + + pop ebx + pop edi + pop esi + ret + } +} + // 8 pixels // 4 UV values upsampled to 8 UV, mixed with 8 Y producing 8 RGB565 (16 bytes). __declspec(naked) void I422ToRGB565Row_SSSE3( @@ -4175,13 +4379,13 @@ static const uvec8 kShuffleAlpha = {3u, 0x80, 3u, 0x80, 7u, 0x80, 7u, 0x80, 11u, 0x80, 11u, 0x80, 15u, 0x80, 15u, 0x80}; // Blend 8 pixels at a time. -__declspec(naked) void ARGBBlendRow_SSSE3(const uint8_t* src_argb0, +__declspec(naked) void ARGBBlendRow_SSSE3(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width) { __asm { push esi - mov eax, [esp + 4 + 4] // src_argb0 + mov eax, [esp + 4 + 4] // src_argb mov esi, [esp + 4 + 8] // src_argb1 mov edx, [esp + 4 + 12] // dst_argb mov ecx, [esp + 4 + 16] // width @@ -4270,7 +4474,7 @@ __declspec(naked) void ARGBAttenuateRow_SSSE3(const uint8_t* src_argb, uint8_t* dst_argb, int width) { __asm { - mov eax, [esp + 4] // src_argb0 + mov eax, [esp + 4] // src_argb mov edx, [esp + 8] // dst_argb mov ecx, [esp + 12] // width pcmpeqb xmm3, xmm3 // generate mask 0xff000000 @@ -4315,7 +4519,7 @@ __declspec(naked) void ARGBAttenuateRow_AVX2(const uint8_t* src_argb, uint8_t* dst_argb, int width) { __asm { - mov eax, [esp + 4] // src_argb0 + mov eax, [esp + 4] // src_argb mov edx, [esp + 8] // dst_argb mov ecx, [esp + 12] // width sub edx, eax @@ -4409,7 +4613,7 @@ __declspec(naked) void ARGBUnattenuateRow_AVX2(const uint8_t* src_argb, uint8_t* dst_argb, int width) { __asm { - mov eax, [esp + 4] // src_argb0 + mov eax, [esp + 4] // src_argb mov edx, [esp + 8] // dst_argb mov ecx, [esp + 12] // width sub edx, eax @@ -4765,20 +4969,20 @@ __declspec(naked) void ARGBShadeRow_SSE2(const uint8_t* src_argb, #ifdef HAS_ARGBMULTIPLYROW_SSE2 // Multiply 2 rows of ARGB pixels together, 4 pixels at a time. -__declspec(naked) void ARGBMultiplyRow_SSE2(const uint8_t* src_argb0, +__declspec(naked) void ARGBMultiplyRow_SSE2(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width) { __asm { push esi - mov eax, [esp + 4 + 4] // src_argb0 + mov eax, [esp + 4 + 4] // src_argb mov esi, [esp + 4 + 8] // src_argb1 mov edx, [esp + 4 + 12] // dst_argb mov ecx, [esp + 4 + 16] // width pxor xmm5, xmm5 // constant 0 convertloop: - movdqu xmm0, [eax] // read 4 pixels from src_argb0 + movdqu xmm0, [eax] // read 4 pixels from src_argb movdqu xmm2, [esi] // read 4 pixels from src_argb1 movdqu xmm1, xmm0 movdqu xmm3, xmm2 @@ -4786,8 +4990,8 @@ __declspec(naked) void ARGBMultiplyRow_SSE2(const uint8_t* src_argb0, punpckhbw xmm1, xmm1 // next 2 punpcklbw xmm2, xmm5 // first 2 punpckhbw xmm3, xmm5 // next 2 - pmulhuw xmm0, xmm2 // src_argb0 * src_argb1 first 2 - pmulhuw xmm1, xmm3 // src_argb0 * src_argb1 next 2 + pmulhuw xmm0, xmm2 // src_argb * src_argb1 first 2 + pmulhuw xmm1, xmm3 // src_argb * src_argb1 next 2 lea eax, [eax + 16] lea esi, [esi + 16] packuswb xmm0, xmm1 @@ -4805,13 +5009,13 @@ __declspec(naked) void ARGBMultiplyRow_SSE2(const uint8_t* src_argb0, #ifdef HAS_ARGBADDROW_SSE2 // Add 2 rows of ARGB pixels together, 4 pixels at a time. // TODO(fbarchard): Port this to posix, neon and other math functions. -__declspec(naked) void ARGBAddRow_SSE2(const uint8_t* src_argb0, +__declspec(naked) void ARGBAddRow_SSE2(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width) { __asm { push esi - mov eax, [esp + 4 + 4] // src_argb0 + mov eax, [esp + 4 + 4] // src_argb mov esi, [esp + 4 + 8] // src_argb1 mov edx, [esp + 4 + 12] // dst_argb mov ecx, [esp + 4 + 16] // width @@ -4820,11 +5024,11 @@ __declspec(naked) void ARGBAddRow_SSE2(const uint8_t* src_argb0, jl convertloop49 convertloop4: - movdqu xmm0, [eax] // read 4 pixels from src_argb0 + movdqu xmm0, [eax] // read 4 pixels from src_argb lea eax, [eax + 16] movdqu xmm1, [esi] // read 4 pixels from src_argb1 lea esi, [esi + 16] - paddusb xmm0, xmm1 // src_argb0 + src_argb1 + paddusb xmm0, xmm1 // src_argb + src_argb1 movdqu [edx], xmm0 lea edx, [edx + 16] sub ecx, 4 @@ -4835,11 +5039,11 @@ __declspec(naked) void ARGBAddRow_SSE2(const uint8_t* src_argb0, jl convertloop19 convertloop1: - movd xmm0, [eax] // read 1 pixels from src_argb0 + movd xmm0, [eax] // read 1 pixels from src_argb lea eax, [eax + 4] movd xmm1, [esi] // read 1 pixels from src_argb1 lea esi, [esi + 4] - paddusb xmm0, xmm1 // src_argb0 + src_argb1 + paddusb xmm0, xmm1 // src_argb + src_argb1 movd [edx], xmm0 lea edx, [edx + 4] sub ecx, 1 @@ -4854,23 +5058,23 @@ __declspec(naked) void ARGBAddRow_SSE2(const uint8_t* src_argb0, #ifdef HAS_ARGBSUBTRACTROW_SSE2 // Subtract 2 rows of ARGB pixels together, 4 pixels at a time. -__declspec(naked) void ARGBSubtractRow_SSE2(const uint8_t* src_argb0, +__declspec(naked) void ARGBSubtractRow_SSE2(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width) { __asm { push esi - mov eax, [esp + 4 + 4] // src_argb0 + mov eax, [esp + 4 + 4] // src_argb mov esi, [esp + 4 + 8] // src_argb1 mov edx, [esp + 4 + 12] // dst_argb mov ecx, [esp + 4 + 16] // width convertloop: - movdqu xmm0, [eax] // read 4 pixels from src_argb0 + movdqu xmm0, [eax] // read 4 pixels from src_argb lea eax, [eax + 16] movdqu xmm1, [esi] // read 4 pixels from src_argb1 lea esi, [esi + 16] - psubusb xmm0, xmm1 // src_argb0 - src_argb1 + psubusb xmm0, xmm1 // src_argb - src_argb1 movdqu [edx], xmm0 lea edx, [edx + 16] sub ecx, 4 @@ -4884,20 +5088,20 @@ __declspec(naked) void ARGBSubtractRow_SSE2(const uint8_t* src_argb0, #ifdef HAS_ARGBMULTIPLYROW_AVX2 // Multiply 2 rows of ARGB pixels together, 8 pixels at a time. -__declspec(naked) void ARGBMultiplyRow_AVX2(const uint8_t* src_argb0, +__declspec(naked) void ARGBMultiplyRow_AVX2(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width) { __asm { push esi - mov eax, [esp + 4 + 4] // src_argb0 + mov eax, [esp + 4 + 4] // src_argb mov esi, [esp + 4 + 8] // src_argb1 mov edx, [esp + 4 + 12] // dst_argb mov ecx, [esp + 4 + 16] // width vpxor ymm5, ymm5, ymm5 // constant 0 convertloop: - vmovdqu ymm1, [eax] // read 8 pixels from src_argb0 + vmovdqu ymm1, [eax] // read 8 pixels from src_argb lea eax, [eax + 32] vmovdqu ymm3, [esi] // read 8 pixels from src_argb1 lea esi, [esi + 32] @@ -4905,8 +5109,8 @@ __declspec(naked) void ARGBMultiplyRow_AVX2(const uint8_t* src_argb0, vpunpckhbw ymm1, ymm1, ymm1 // high 4 vpunpcklbw ymm2, ymm3, ymm5 // low 4 vpunpckhbw ymm3, ymm3, ymm5 // high 4 - vpmulhuw ymm0, ymm0, ymm2 // src_argb0 * src_argb1 low 4 - vpmulhuw ymm1, ymm1, ymm3 // src_argb0 * src_argb1 high 4 + vpmulhuw ymm0, ymm0, ymm2 // src_argb * src_argb1 low 4 + vpmulhuw ymm1, ymm1, ymm3 // src_argb * src_argb1 high 4 vpackuswb ymm0, ymm0, ymm1 vmovdqu [edx], ymm0 lea edx, [edx + 32] @@ -4922,19 +5126,19 @@ __declspec(naked) void ARGBMultiplyRow_AVX2(const uint8_t* src_argb0, #ifdef HAS_ARGBADDROW_AVX2 // Add 2 rows of ARGB pixels together, 8 pixels at a time. -__declspec(naked) void ARGBAddRow_AVX2(const uint8_t* src_argb0, +__declspec(naked) void ARGBAddRow_AVX2(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width) { __asm { push esi - mov eax, [esp + 4 + 4] // src_argb0 + mov eax, [esp + 4 + 4] // src_argb mov esi, [esp + 4 + 8] // src_argb1 mov edx, [esp + 4 + 12] // dst_argb mov ecx, [esp + 4 + 16] // width convertloop: - vmovdqu ymm0, [eax] // read 8 pixels from src_argb0 + vmovdqu ymm0, [eax] // read 8 pixels from src_argb lea eax, [eax + 32] vpaddusb ymm0, ymm0, [esi] // add 8 pixels from src_argb1 lea esi, [esi + 32] @@ -4952,21 +5156,21 @@ __declspec(naked) void ARGBAddRow_AVX2(const uint8_t* src_argb0, #ifdef HAS_ARGBSUBTRACTROW_AVX2 // Subtract 2 rows of ARGB pixels together, 8 pixels at a time. -__declspec(naked) void ARGBSubtractRow_AVX2(const uint8_t* src_argb0, +__declspec(naked) void ARGBSubtractRow_AVX2(const uint8_t* src_argb, const uint8_t* src_argb1, uint8_t* dst_argb, int width) { __asm { push esi - mov eax, [esp + 4 + 4] // src_argb0 + mov eax, [esp + 4 + 4] // src_argb mov esi, [esp + 4 + 8] // src_argb1 mov edx, [esp + 4 + 12] // dst_argb mov ecx, [esp + 4 + 16] // width convertloop: - vmovdqu ymm0, [eax] // read 8 pixels from src_argb0 + vmovdqu ymm0, [eax] // read 8 pixels from src_argb lea eax, [eax + 32] - vpsubusb ymm0, ymm0, [esi] // src_argb0 - src_argb1 + vpsubusb ymm0, ymm0, [esi] // src_argb - src_argb1 lea esi, [esi + 32] vmovdqu [edx], ymm0 lea edx, [edx + 32] diff --git a/TMessagesProj/jni/third_party/libyuv/source/scale.cc b/TMessagesProj/jni/third_party/libyuv/source/scale.cc index cf3c033257..65a4685fc0 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/scale.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/scale.cc @@ -29,6 +29,7 @@ static __inline int Abs(int v) { } #define SUBSAMPLE(v, a, s) (v < 0) ? (-((-v + a) >> s)) : ((v + a) >> s) +#define CENTERSTART(dx, s) (dx < 0) ? -((-dx >> 1) + s) : ((dx >> 1) + s) // Scale plane, 1/2 // This is an optimized version for scaling down a plane to 1/2 of @@ -50,7 +51,7 @@ static void ScalePlaneDown2(int src_width, ? ScaleRowDown2_C : (filtering == kFilterLinear ? ScaleRowDown2Linear_C : ScaleRowDown2Box_C); - int row_stride = src_stride << 1; + int row_stride = src_stride * 2; (void)src_width; (void)src_height; if (!filtering) { @@ -104,21 +105,6 @@ static void ScalePlaneDown2(int src_width, } } #endif -#if defined(HAS_SCALEROWDOWN2_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ScaleRowDown2 = - filtering == kFilterNone - ? ScaleRowDown2_Any_MMI - : (filtering == kFilterLinear ? ScaleRowDown2Linear_Any_MMI - : ScaleRowDown2Box_Any_MMI); - if (IS_ALIGNED(dst_width, 8)) { - ScaleRowDown2 = filtering == kFilterNone ? ScaleRowDown2_MMI - : (filtering == kFilterLinear - ? ScaleRowDown2Linear_MMI - : ScaleRowDown2Box_MMI); - } - } -#endif #if defined(HAS_SCALEROWDOWN2_MSA) if (TestCpuFlag(kCpuHasMSA)) { ScaleRowDown2 = @@ -134,6 +120,21 @@ static void ScalePlaneDown2(int src_width, } } #endif +#if defined(HAS_SCALEROWDOWN2_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + ScaleRowDown2 = + filtering == kFilterNone + ? ScaleRowDown2_Any_LSX + : (filtering == kFilterLinear ? ScaleRowDown2Linear_Any_LSX + : ScaleRowDown2Box_Any_LSX); + if (IS_ALIGNED(dst_width, 32)) { + ScaleRowDown2 = filtering == kFilterNone ? ScaleRowDown2_LSX + : (filtering == kFilterLinear + ? ScaleRowDown2Linear_LSX + : ScaleRowDown2Box_LSX); + } + } +#endif if (filtering == kFilterLinear) { src_stride = 0; @@ -162,7 +163,7 @@ static void ScalePlaneDown2_16(int src_width, ? ScaleRowDown2_16_C : (filtering == kFilterLinear ? ScaleRowDown2Linear_16_C : ScaleRowDown2Box_16_C); - int row_stride = src_stride << 1; + int row_stride = src_stride * 2; (void)src_width; (void)src_height; if (!filtering) { @@ -185,14 +186,6 @@ static void ScalePlaneDown2_16(int src_width, : ScaleRowDown2Box_16_SSE2); } #endif -#if defined(HAS_SCALEROWDOWN2_16_MMI) - if (TestCpuFlag(kCpuHasMMI) && IS_ALIGNED(dst_width, 4)) { - ScaleRowDown2 = filtering == kFilterNone ? ScaleRowDown2_16_MMI - : (filtering == kFilterLinear - ? ScaleRowDown2Linear_16_MMI - : ScaleRowDown2Box_16_MMI); - } -#endif if (filtering == kFilterLinear) { src_stride = 0; @@ -205,6 +198,51 @@ static void ScalePlaneDown2_16(int src_width, } } +void ScalePlaneDown2_16To8(int src_width, + int src_height, + int dst_width, + int dst_height, + int src_stride, + int dst_stride, + const uint16_t* src_ptr, + uint8_t* dst_ptr, + int scale, + enum FilterMode filtering) { + int y; + void (*ScaleRowDown2)(const uint16_t* src_ptr, ptrdiff_t src_stride, + uint8_t* dst_ptr, int dst_width, int scale) = + (src_width & 1) + ? (filtering == kFilterNone + ? ScaleRowDown2_16To8_Odd_C + : (filtering == kFilterLinear ? ScaleRowDown2Linear_16To8_Odd_C + : ScaleRowDown2Box_16To8_Odd_C)) + : (filtering == kFilterNone + ? ScaleRowDown2_16To8_C + : (filtering == kFilterLinear ? ScaleRowDown2Linear_16To8_C + : ScaleRowDown2Box_16To8_C)); + int row_stride = src_stride * 2; + (void)dst_height; + if (!filtering) { + src_ptr += src_stride; // Point to odd rows. + src_stride = 0; + } + + if (filtering == kFilterLinear) { + src_stride = 0; + } + for (y = 0; y < src_height / 2; ++y) { + ScaleRowDown2(src_ptr, src_stride, dst_ptr, dst_width, scale); + src_ptr += row_stride; + dst_ptr += dst_stride; + } + if (src_height & 1) { + if (!filtering) { + src_ptr -= src_stride; // Point to last row. + } + ScaleRowDown2(src_ptr, 0, dst_ptr, dst_width, scale); + } +} + // Scale plane, 1/4 // This is an optimized version for scaling down a plane to 1/4 of // its original size. @@ -222,7 +260,7 @@ static void ScalePlaneDown4(int src_width, void (*ScaleRowDown4)(const uint8_t* src_ptr, ptrdiff_t src_stride, uint8_t* dst_ptr, int dst_width) = filtering ? ScaleRowDown4Box_C : ScaleRowDown4_C; - int row_stride = src_stride << 2; + int row_stride = src_stride * 4; (void)src_width; (void)src_height; if (!filtering) { @@ -256,15 +294,6 @@ static void ScalePlaneDown4(int src_width, } } #endif -#if defined(HAS_SCALEROWDOWN4_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ScaleRowDown4 = - filtering ? ScaleRowDown4Box_Any_MMI : ScaleRowDown4_Any_MMI; - if (IS_ALIGNED(dst_width, 8)) { - ScaleRowDown4 = filtering ? ScaleRowDown4Box_MMI : ScaleRowDown4_MMI; - } - } -#endif #if defined(HAS_SCALEROWDOWN4_MSA) if (TestCpuFlag(kCpuHasMSA)) { ScaleRowDown4 = @@ -274,6 +303,15 @@ static void ScalePlaneDown4(int src_width, } } #endif +#if defined(HAS_SCALEROWDOWN4_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + ScaleRowDown4 = + filtering ? ScaleRowDown4Box_Any_LSX : ScaleRowDown4_Any_LSX; + if (IS_ALIGNED(dst_width, 16)) { + ScaleRowDown4 = filtering ? ScaleRowDown4Box_LSX : ScaleRowDown4_LSX; + } + } +#endif if (filtering == kFilterLinear) { src_stride = 0; @@ -298,7 +336,7 @@ static void ScalePlaneDown4_16(int src_width, void (*ScaleRowDown4)(const uint16_t* src_ptr, ptrdiff_t src_stride, uint16_t* dst_ptr, int dst_width) = filtering ? ScaleRowDown4Box_16_C : ScaleRowDown4_16_C; - int row_stride = src_stride << 2; + int row_stride = src_stride * 4; (void)src_width; (void)src_height; if (!filtering) { @@ -317,11 +355,6 @@ static void ScalePlaneDown4_16(int src_width, filtering ? ScaleRowDown4Box_16_SSE2 : ScaleRowDown4_16_SSE2; } #endif -#if defined(HAS_SCALEROWDOWN4_16_MMI) - if (TestCpuFlag(kCpuHasMMI) && IS_ALIGNED(dst_width, 8)) { - ScaleRowDown4 = filtering ? ScaleRowDown4Box_16_MMI : ScaleRowDown4_16_MMI; - } -#endif if (filtering == kFilterLinear) { src_stride = 0; @@ -379,18 +412,6 @@ static void ScalePlaneDown34(int src_width, } } #endif -#if defined(HAS_SCALEROWDOWN34_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - if (!filtering) { - ScaleRowDown34_0 = ScaleRowDown34_Any_MMI; - ScaleRowDown34_1 = ScaleRowDown34_Any_MMI; - if (dst_width % 24 == 0) { - ScaleRowDown34_0 = ScaleRowDown34_MMI; - ScaleRowDown34_1 = ScaleRowDown34_MMI; - } - } - } -#endif #if defined(HAS_SCALEROWDOWN34_MSA) if (TestCpuFlag(kCpuHasMSA)) { if (!filtering) { @@ -411,6 +432,26 @@ static void ScalePlaneDown34(int src_width, } } #endif +#if defined(HAS_SCALEROWDOWN34_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + if (!filtering) { + ScaleRowDown34_0 = ScaleRowDown34_Any_LSX; + ScaleRowDown34_1 = ScaleRowDown34_Any_LSX; + } else { + ScaleRowDown34_0 = ScaleRowDown34_0_Box_Any_LSX; + ScaleRowDown34_1 = ScaleRowDown34_1_Box_Any_LSX; + } + if (dst_width % 48 == 0) { + if (!filtering) { + ScaleRowDown34_0 = ScaleRowDown34_LSX; + ScaleRowDown34_1 = ScaleRowDown34_LSX; + } else { + ScaleRowDown34_0 = ScaleRowDown34_0_Box_LSX; + ScaleRowDown34_1 = ScaleRowDown34_1_Box_LSX; + } + } + } +#endif #if defined(HAS_SCALEROWDOWN34_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { if (!filtering) { @@ -626,6 +667,26 @@ static void ScalePlaneDown38(int src_width, } } #endif +#if defined(HAS_SCALEROWDOWN38_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + if (!filtering) { + ScaleRowDown38_3 = ScaleRowDown38_Any_LSX; + ScaleRowDown38_2 = ScaleRowDown38_Any_LSX; + } else { + ScaleRowDown38_3 = ScaleRowDown38_3_Box_Any_LSX; + ScaleRowDown38_2 = ScaleRowDown38_2_Box_Any_LSX; + } + if (dst_width % 12 == 0) { + if (!filtering) { + ScaleRowDown38_3 = ScaleRowDown38_LSX; + ScaleRowDown38_2 = ScaleRowDown38_LSX; + } else { + ScaleRowDown38_3 = ScaleRowDown38_3_Box_LSX; + ScaleRowDown38_2 = ScaleRowDown38_2_Box_LSX; + } + } + } +#endif for (y = 0; y < dst_height - 2; y += 3) { ScaleRowDown38_3(src_ptr, filter_stride, dst_ptr, dst_width); @@ -759,9 +820,9 @@ static void ScaleAddCols2_C(int dst_width, int ix = x >> 16; x += dx; boxwidth = MIN1((x >> 16) - ix); - *dst_ptr++ = - SumPixels(boxwidth, src_ptr + ix) * scaletbl[boxwidth - minboxwidth] >> - 16; + *dst_ptr++ = (uint8_t)(SumPixels(boxwidth, src_ptr + ix) * + scaletbl[boxwidth - minboxwidth] >> + 16); } } @@ -798,7 +859,7 @@ static void ScaleAddCols0_C(int dst_width, (void)dx; src_ptr += (x >> 16); for (i = 0; i < dst_width; ++i) { - *dst_ptr++ = src_ptr[i] * scaleval >> 16; + *dst_ptr++ = (uint8_t)(src_ptr[i] * scaleval >> 16); } } @@ -813,7 +874,7 @@ static void ScaleAddCols1_C(int dst_width, int i; x >>= 16; for (i = 0; i < dst_width; ++i) { - *dst_ptr++ = SumPixels(boxwidth, src_ptr + x) * scaleval >> 16; + *dst_ptr++ = (uint8_t)(SumPixels(boxwidth, src_ptr + x) * scaleval >> 16); x += boxwidth; } } @@ -891,14 +952,6 @@ static void ScalePlaneBox(int src_width, } } #endif -#if defined(HAS_SCALEADDROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ScaleAddRow = ScaleAddRow_Any_MMI; - if (IS_ALIGNED(src_width, 8)) { - ScaleAddRow = ScaleAddRow_MMI; - } - } -#endif #if defined(HAS_SCALEADDROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { ScaleAddRow = ScaleAddRow_Any_MSA; @@ -907,11 +960,19 @@ static void ScalePlaneBox(int src_width, } } #endif +#if defined(HAS_SCALEADDROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + ScaleAddRow = ScaleAddRow_Any_LSX; + if (IS_ALIGNED(src_width, 16)) { + ScaleAddRow = ScaleAddRow_LSX; + } + } +#endif for (j = 0; j < dst_height; ++j) { int boxheight; int iy = y >> 16; - const uint8_t* src = src_ptr + iy * src_stride; + const uint8_t* src = src_ptr + iy * (int64_t)src_stride; y += dy; if (y > max_y) { y = max_y; @@ -962,15 +1023,10 @@ static void ScalePlaneBox_16(int src_width, } #endif -#if defined(HAS_SCALEADDROW_16_MMI) - if (TestCpuFlag(kCpuHasMMI) && IS_ALIGNED(src_width, 4)) { - ScaleAddRow = ScaleAddRow_16_MMI; - } -#endif for (j = 0; j < dst_height; ++j) { int boxheight; int iy = y >> 16; - const uint16_t* src = src_ptr + iy * src_stride; + const uint16_t* src = src_ptr + iy * (int64_t)src_stride; y += dy; if (y > max_y) { y = max_y; @@ -1043,14 +1099,6 @@ void ScalePlaneBilinearDown(int src_width, } } #endif -#if defined(HAS_INTERPOLATEROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - InterpolateRow = InterpolateRow_Any_MMI; - if (IS_ALIGNED(src_width, 16)) { - InterpolateRow = InterpolateRow_MMI; - } - } -#endif #if defined(HAS_INTERPOLATEROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { InterpolateRow = InterpolateRow_Any_MSA; @@ -1059,6 +1107,14 @@ void ScalePlaneBilinearDown(int src_width, } } #endif +#if defined(HAS_INTERPOLATEROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + InterpolateRow = InterpolateRow_Any_LSX; + if (IS_ALIGNED(src_width, 32)) { + InterpolateRow = InterpolateRow_LSX; + } + } +#endif #if defined(HAS_SCALEFILTERCOLS_SSSE3) if (TestCpuFlag(kCpuHasSSSE3) && src_width < 32768) { @@ -1080,6 +1136,14 @@ void ScalePlaneBilinearDown(int src_width, ScaleFilterCols = ScaleFilterCols_MSA; } } +#endif +#if defined(HAS_SCALEFILTERCOLS_LSX) + if (TestCpuFlag(kCpuHasLSX) && src_width < 32768) { + ScaleFilterCols = ScaleFilterCols_Any_LSX; + if (IS_ALIGNED(dst_width, 16)) { + ScaleFilterCols = ScaleFilterCols_LSX; + } + } #endif if (y > max_y) { y = max_y; @@ -1087,7 +1151,7 @@ void ScalePlaneBilinearDown(int src_width, for (j = 0; j < dst_height; ++j) { int yi = y >> 16; - const uint8_t* src = src_ptr + yi * src_stride; + const uint8_t* src = src_ptr + yi * (int64_t)src_stride; if (filtering == kFilterLinear) { ScaleFilterCols(dst_ptr, src, dst_width, x, dx); } else { @@ -1136,7 +1200,7 @@ void ScalePlaneBilinearDown_16(int src_width, #if defined(HAS_INTERPOLATEROW_16_SSE2) if (TestCpuFlag(kCpuHasSSE2)) { - InterpolateRow = InterpolateRow_Any_16_SSE2; + InterpolateRow = InterpolateRow_16_Any_SSE2; if (IS_ALIGNED(src_width, 16)) { InterpolateRow = InterpolateRow_16_SSE2; } @@ -1144,7 +1208,7 @@ void ScalePlaneBilinearDown_16(int src_width, #endif #if defined(HAS_INTERPOLATEROW_16_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { - InterpolateRow = InterpolateRow_Any_16_SSSE3; + InterpolateRow = InterpolateRow_16_Any_SSSE3; if (IS_ALIGNED(src_width, 16)) { InterpolateRow = InterpolateRow_16_SSSE3; } @@ -1152,7 +1216,7 @@ void ScalePlaneBilinearDown_16(int src_width, #endif #if defined(HAS_INTERPOLATEROW_16_AVX2) if (TestCpuFlag(kCpuHasAVX2)) { - InterpolateRow = InterpolateRow_Any_16_AVX2; + InterpolateRow = InterpolateRow_16_Any_AVX2; if (IS_ALIGNED(src_width, 32)) { InterpolateRow = InterpolateRow_16_AVX2; } @@ -1160,7 +1224,7 @@ void ScalePlaneBilinearDown_16(int src_width, #endif #if defined(HAS_INTERPOLATEROW_16_NEON) if (TestCpuFlag(kCpuHasNEON)) { - InterpolateRow = InterpolateRow_Any_16_NEON; + InterpolateRow = InterpolateRow_16_Any_NEON; if (IS_ALIGNED(src_width, 16)) { InterpolateRow = InterpolateRow_16_NEON; } @@ -1178,7 +1242,7 @@ void ScalePlaneBilinearDown_16(int src_width, for (j = 0; j < dst_height; ++j) { int yi = y >> 16; - const uint16_t* src = src_ptr + yi * src_stride; + const uint16_t* src = src_ptr + yi * (int64_t)src_stride; if (filtering == kFilterLinear) { ScaleFilterCols(dst_ptr, src, dst_width, x, dx); } else { @@ -1270,6 +1334,14 @@ void ScalePlaneBilinearUp(int src_width, ScaleFilterCols = ScaleFilterCols_MSA; } } +#endif +#if defined(HAS_SCALEFILTERCOLS_LSX) + if (filtering && TestCpuFlag(kCpuHasLSX) && src_width < 32768) { + ScaleFilterCols = ScaleFilterCols_Any_LSX; + if (IS_ALIGNED(dst_width, 16)) { + ScaleFilterCols = ScaleFilterCols_LSX; + } + } #endif if (!filtering && src_width * 2 == dst_width && x < 0x8000) { ScaleFilterCols = ScaleColsUp2_C; @@ -1277,11 +1349,6 @@ void ScalePlaneBilinearUp(int src_width, if (TestCpuFlag(kCpuHasSSE2) && IS_ALIGNED(dst_width, 8)) { ScaleFilterCols = ScaleColsUp2_SSE2; } -#endif -#if defined(HAS_SCALECOLS_MMI) - if (TestCpuFlag(kCpuHasMMI) && IS_ALIGNED(dst_width, 8)) { - ScaleFilterCols = ScaleColsUp2_MMI; - } #endif } @@ -1290,14 +1357,14 @@ void ScalePlaneBilinearUp(int src_width, } { int yi = y >> 16; - const uint8_t* src = src_ptr + yi * src_stride; + const uint8_t* src = src_ptr + yi * (int64_t)src_stride; // Allocate 2 row buffers. - const int kRowSize = (dst_width + 31) & ~31; - align_buffer_64(row, kRowSize * 2); + const int row_size = (dst_width + 31) & ~31; + align_buffer_64(row, row_size * 2); uint8_t* rowptr = row; - int rowstride = kRowSize; + int rowstride = row_size; int lasty = yi; ScaleFilterCols(rowptr, src, dst_width, x, dx); @@ -1305,7 +1372,9 @@ void ScalePlaneBilinearUp(int src_width, src += src_stride; } ScaleFilterCols(rowptr + rowstride, src, dst_width, x, dx); - src += src_stride; + if (src_height > 2) { + src += src_stride; + } for (j = 0; j < dst_height; ++j) { yi = y >> 16; @@ -1313,14 +1382,16 @@ void ScalePlaneBilinearUp(int src_width, if (y > max_y) { y = max_y; yi = y >> 16; - src = src_ptr + yi * src_stride; + src = src_ptr + yi * (int64_t)src_stride; } if (yi != lasty) { ScaleFilterCols(rowptr, src, dst_width, x, dx); rowptr += rowstride; rowstride = -rowstride; lasty = yi; - src += src_stride; + if ((y + 65536) < max_y) { + src += src_stride; + } } } if (filtering == kFilterLinear) { @@ -1336,6 +1407,327 @@ void ScalePlaneBilinearUp(int src_width, } } +// Scale plane, horizontally up by 2 times. +// Uses linear filter horizontally, nearest vertically. +// This is an optimized version for scaling up a plane to 2 times of +// its original width, using linear interpolation. +// This is used to scale U and V planes of I422 to I444. +void ScalePlaneUp2_Linear(int src_width, + int src_height, + int dst_width, + int dst_height, + int src_stride, + int dst_stride, + const uint8_t* src_ptr, + uint8_t* dst_ptr) { + void (*ScaleRowUp)(const uint8_t* src_ptr, uint8_t* dst_ptr, int dst_width) = + ScaleRowUp2_Linear_Any_C; + int i; + int y; + int dy; + + // This function can only scale up by 2 times horizontally. + assert(src_width == ((dst_width + 1) / 2)); + +#ifdef HAS_SCALEROWUP2_LINEAR_SSE2 + if (TestCpuFlag(kCpuHasSSE2)) { + ScaleRowUp = ScaleRowUp2_Linear_Any_SSE2; + } +#endif + +#ifdef HAS_SCALEROWUP2_LINEAR_SSSE3 + if (TestCpuFlag(kCpuHasSSSE3)) { + ScaleRowUp = ScaleRowUp2_Linear_Any_SSSE3; + } +#endif + +#ifdef HAS_SCALEROWUP2_LINEAR_AVX2 + if (TestCpuFlag(kCpuHasAVX2)) { + ScaleRowUp = ScaleRowUp2_Linear_Any_AVX2; + } +#endif + +#ifdef HAS_SCALEROWUP2_LINEAR_NEON + if (TestCpuFlag(kCpuHasNEON)) { + ScaleRowUp = ScaleRowUp2_Linear_Any_NEON; + } +#endif + + if (dst_height == 1) { + ScaleRowUp(src_ptr + ((src_height - 1) / 2) * (int64_t)src_stride, dst_ptr, + dst_width); + } else { + dy = FixedDiv(src_height - 1, dst_height - 1); + y = (1 << 15) - 1; + for (i = 0; i < dst_height; ++i) { + ScaleRowUp(src_ptr + (y >> 16) * (int64_t)src_stride, dst_ptr, dst_width); + dst_ptr += dst_stride; + y += dy; + } + } +} + +// Scale plane, up by 2 times. +// This is an optimized version for scaling up a plane to 2 times of +// its original size, using bilinear interpolation. +// This is used to scale U and V planes of I420 to I444. +void ScalePlaneUp2_Bilinear(int src_width, + int src_height, + int dst_width, + int dst_height, + int src_stride, + int dst_stride, + const uint8_t* src_ptr, + uint8_t* dst_ptr) { + void (*Scale2RowUp)(const uint8_t* src_ptr, ptrdiff_t src_stride, + uint8_t* dst_ptr, ptrdiff_t dst_stride, int dst_width) = + ScaleRowUp2_Bilinear_Any_C; + int x; + + // This function can only scale up by 2 times. + assert(src_width == ((dst_width + 1) / 2)); + assert(src_height == ((dst_height + 1) / 2)); + +#ifdef HAS_SCALEROWUP2_BILINEAR_SSE2 + if (TestCpuFlag(kCpuHasSSE2)) { + Scale2RowUp = ScaleRowUp2_Bilinear_Any_SSE2; + } +#endif + +#ifdef HAS_SCALEROWUP2_BILINEAR_SSSE3 + if (TestCpuFlag(kCpuHasSSSE3)) { + Scale2RowUp = ScaleRowUp2_Bilinear_Any_SSSE3; + } +#endif + +#ifdef HAS_SCALEROWUP2_BILINEAR_AVX2 + if (TestCpuFlag(kCpuHasAVX2)) { + Scale2RowUp = ScaleRowUp2_Bilinear_Any_AVX2; + } +#endif + +#ifdef HAS_SCALEROWUP2_BILINEAR_NEON + if (TestCpuFlag(kCpuHasNEON)) { + Scale2RowUp = ScaleRowUp2_Bilinear_Any_NEON; + } +#endif + + Scale2RowUp(src_ptr, 0, dst_ptr, 0, dst_width); + dst_ptr += dst_stride; + for (x = 0; x < src_height - 1; ++x) { + Scale2RowUp(src_ptr, src_stride, dst_ptr, dst_stride, dst_width); + src_ptr += src_stride; + // TODO(fbarchard): Test performance of writing one row of destination at a + // time. + dst_ptr += 2 * dst_stride; + } + if (!(dst_height & 1)) { + Scale2RowUp(src_ptr, 0, dst_ptr, 0, dst_width); + } +} + +// Scale at most 14 bit plane, horizontally up by 2 times. +// This is an optimized version for scaling up a plane to 2 times of +// its original width, using linear interpolation. +// stride is in count of uint16_t. +// This is used to scale U and V planes of I210 to I410 and I212 to I412. +void ScalePlaneUp2_12_Linear(int src_width, + int src_height, + int dst_width, + int dst_height, + int src_stride, + int dst_stride, + const uint16_t* src_ptr, + uint16_t* dst_ptr) { + void (*ScaleRowUp)(const uint16_t* src_ptr, uint16_t* dst_ptr, + int dst_width) = ScaleRowUp2_Linear_16_Any_C; + int i; + int y; + int dy; + + // This function can only scale up by 2 times horizontally. + assert(src_width == ((dst_width + 1) / 2)); + +#ifdef HAS_SCALEROWUP2_LINEAR_12_SSSE3 + if (TestCpuFlag(kCpuHasSSSE3)) { + ScaleRowUp = ScaleRowUp2_Linear_12_Any_SSSE3; + } +#endif + +#ifdef HAS_SCALEROWUP2_LINEAR_12_AVX2 + if (TestCpuFlag(kCpuHasAVX2)) { + ScaleRowUp = ScaleRowUp2_Linear_12_Any_AVX2; + } +#endif + +#ifdef HAS_SCALEROWUP2_LINEAR_12_NEON + if (TestCpuFlag(kCpuHasNEON)) { + ScaleRowUp = ScaleRowUp2_Linear_12_Any_NEON; + } +#endif + + if (dst_height == 1) { + ScaleRowUp(src_ptr + ((src_height - 1) / 2) * (int64_t)src_stride, dst_ptr, + dst_width); + } else { + dy = FixedDiv(src_height - 1, dst_height - 1); + y = (1 << 15) - 1; + for (i = 0; i < dst_height; ++i) { + ScaleRowUp(src_ptr + (y >> 16) * (int64_t)src_stride, dst_ptr, dst_width); + dst_ptr += dst_stride; + y += dy; + } + } +} + +// Scale at most 12 bit plane, up by 2 times. +// This is an optimized version for scaling up a plane to 2 times of +// its original size, using bilinear interpolation. +// stride is in count of uint16_t. +// This is used to scale U and V planes of I010 to I410 and I012 to I412. +void ScalePlaneUp2_12_Bilinear(int src_width, + int src_height, + int dst_width, + int dst_height, + int src_stride, + int dst_stride, + const uint16_t* src_ptr, + uint16_t* dst_ptr) { + void (*Scale2RowUp)(const uint16_t* src_ptr, ptrdiff_t src_stride, + uint16_t* dst_ptr, ptrdiff_t dst_stride, int dst_width) = + ScaleRowUp2_Bilinear_16_Any_C; + int x; + + // This function can only scale up by 2 times. + assert(src_width == ((dst_width + 1) / 2)); + assert(src_height == ((dst_height + 1) / 2)); + +#ifdef HAS_SCALEROWUP2_BILINEAR_12_SSSE3 + if (TestCpuFlag(kCpuHasSSSE3)) { + Scale2RowUp = ScaleRowUp2_Bilinear_12_Any_SSSE3; + } +#endif + +#ifdef HAS_SCALEROWUP2_BILINEAR_12_AVX2 + if (TestCpuFlag(kCpuHasAVX2)) { + Scale2RowUp = ScaleRowUp2_Bilinear_12_Any_AVX2; + } +#endif + +#ifdef HAS_SCALEROWUP2_BILINEAR_12_NEON + if (TestCpuFlag(kCpuHasNEON)) { + Scale2RowUp = ScaleRowUp2_Bilinear_12_Any_NEON; + } +#endif + + Scale2RowUp(src_ptr, 0, dst_ptr, 0, dst_width); + dst_ptr += dst_stride; + for (x = 0; x < src_height - 1; ++x) { + Scale2RowUp(src_ptr, src_stride, dst_ptr, dst_stride, dst_width); + src_ptr += src_stride; + dst_ptr += 2 * dst_stride; + } + if (!(dst_height & 1)) { + Scale2RowUp(src_ptr, 0, dst_ptr, 0, dst_width); + } +} + +void ScalePlaneUp2_16_Linear(int src_width, + int src_height, + int dst_width, + int dst_height, + int src_stride, + int dst_stride, + const uint16_t* src_ptr, + uint16_t* dst_ptr) { + void (*ScaleRowUp)(const uint16_t* src_ptr, uint16_t* dst_ptr, + int dst_width) = ScaleRowUp2_Linear_16_Any_C; + int i; + int y; + int dy; + + // This function can only scale up by 2 times horizontally. + assert(src_width == ((dst_width + 1) / 2)); + +#ifdef HAS_SCALEROWUP2_LINEAR_16_SSE2 + if (TestCpuFlag(kCpuHasSSE2)) { + ScaleRowUp = ScaleRowUp2_Linear_16_Any_SSE2; + } +#endif + +#ifdef HAS_SCALEROWUP2_LINEAR_16_AVX2 + if (TestCpuFlag(kCpuHasAVX2)) { + ScaleRowUp = ScaleRowUp2_Linear_16_Any_AVX2; + } +#endif + +#ifdef HAS_SCALEROWUP2_LINEAR_16_NEON + if (TestCpuFlag(kCpuHasNEON)) { + ScaleRowUp = ScaleRowUp2_Linear_16_Any_NEON; + } +#endif + + if (dst_height == 1) { + ScaleRowUp(src_ptr + ((src_height - 1) / 2) * (int64_t)src_stride, dst_ptr, + dst_width); + } else { + dy = FixedDiv(src_height - 1, dst_height - 1); + y = (1 << 15) - 1; + for (i = 0; i < dst_height; ++i) { + ScaleRowUp(src_ptr + (y >> 16) * (int64_t)src_stride, dst_ptr, dst_width); + dst_ptr += dst_stride; + y += dy; + } + } +} + +void ScalePlaneUp2_16_Bilinear(int src_width, + int src_height, + int dst_width, + int dst_height, + int src_stride, + int dst_stride, + const uint16_t* src_ptr, + uint16_t* dst_ptr) { + void (*Scale2RowUp)(const uint16_t* src_ptr, ptrdiff_t src_stride, + uint16_t* dst_ptr, ptrdiff_t dst_stride, int dst_width) = + ScaleRowUp2_Bilinear_16_Any_C; + int x; + + // This function can only scale up by 2 times. + assert(src_width == ((dst_width + 1) / 2)); + assert(src_height == ((dst_height + 1) / 2)); + +#ifdef HAS_SCALEROWUP2_BILINEAR_16_SSE2 + if (TestCpuFlag(kCpuHasSSE2)) { + Scale2RowUp = ScaleRowUp2_Bilinear_16_Any_SSE2; + } +#endif + +#ifdef HAS_SCALEROWUP2_BILINEAR_16_AVX2 + if (TestCpuFlag(kCpuHasAVX2)) { + Scale2RowUp = ScaleRowUp2_Bilinear_16_Any_AVX2; + } +#endif + +#ifdef HAS_SCALEROWUP2_BILINEAR_16_NEON + if (TestCpuFlag(kCpuHasNEON)) { + Scale2RowUp = ScaleRowUp2_Bilinear_16_Any_NEON; + } +#endif + + Scale2RowUp(src_ptr, 0, dst_ptr, 0, dst_width); + dst_ptr += dst_stride; + for (x = 0; x < src_height - 1; ++x) { + Scale2RowUp(src_ptr, src_stride, dst_ptr, dst_stride, dst_width); + src_ptr += src_stride; + dst_ptr += 2 * dst_stride; + } + if (!(dst_height & 1)) { + Scale2RowUp(src_ptr, 0, dst_ptr, 0, dst_width); + } +} + void ScalePlaneBilinearUp_16(int src_width, int src_height, int dst_width, @@ -1364,7 +1756,7 @@ void ScalePlaneBilinearUp_16(int src_width, #if defined(HAS_INTERPOLATEROW_16_SSE2) if (TestCpuFlag(kCpuHasSSE2)) { - InterpolateRow = InterpolateRow_Any_16_SSE2; + InterpolateRow = InterpolateRow_16_Any_SSE2; if (IS_ALIGNED(dst_width, 16)) { InterpolateRow = InterpolateRow_16_SSE2; } @@ -1372,7 +1764,7 @@ void ScalePlaneBilinearUp_16(int src_width, #endif #if defined(HAS_INTERPOLATEROW_16_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { - InterpolateRow = InterpolateRow_Any_16_SSSE3; + InterpolateRow = InterpolateRow_16_Any_SSSE3; if (IS_ALIGNED(dst_width, 16)) { InterpolateRow = InterpolateRow_16_SSSE3; } @@ -1380,7 +1772,7 @@ void ScalePlaneBilinearUp_16(int src_width, #endif #if defined(HAS_INTERPOLATEROW_16_AVX2) if (TestCpuFlag(kCpuHasAVX2)) { - InterpolateRow = InterpolateRow_Any_16_AVX2; + InterpolateRow = InterpolateRow_16_Any_AVX2; if (IS_ALIGNED(dst_width, 32)) { InterpolateRow = InterpolateRow_16_AVX2; } @@ -1388,7 +1780,7 @@ void ScalePlaneBilinearUp_16(int src_width, #endif #if defined(HAS_INTERPOLATEROW_16_NEON) if (TestCpuFlag(kCpuHasNEON)) { - InterpolateRow = InterpolateRow_Any_16_NEON; + InterpolateRow = InterpolateRow_16_Any_NEON; if (IS_ALIGNED(dst_width, 16)) { InterpolateRow = InterpolateRow_16_NEON; } @@ -1409,27 +1801,21 @@ void ScalePlaneBilinearUp_16(int src_width, if (TestCpuFlag(kCpuHasSSE2) && IS_ALIGNED(dst_width, 8)) { ScaleFilterCols = ScaleColsUp2_16_SSE2; } -#endif -#if defined(HAS_SCALECOLS_16_MMI) - if (TestCpuFlag(kCpuHasMMI) && IS_ALIGNED(dst_width, 8)) { - ScaleFilterCols = ScaleColsUp2_16_MMI; - } #endif } - if (y > max_y) { y = max_y; } { int yi = y >> 16; - const uint16_t* src = src_ptr + yi * src_stride; + const uint16_t* src = src_ptr + yi * (int64_t)src_stride; // Allocate 2 row buffers. - const int kRowSize = (dst_width + 31) & ~31; - align_buffer_64(row, kRowSize * 4); + const int row_size = (dst_width + 31) & ~31; + align_buffer_64(row, row_size * 4); uint16_t* rowptr = (uint16_t*)row; - int rowstride = kRowSize; + int rowstride = row_size; int lasty = yi; ScaleFilterCols(rowptr, src, dst_width, x, dx); @@ -1437,7 +1823,9 @@ void ScalePlaneBilinearUp_16(int src_width, src += src_stride; } ScaleFilterCols(rowptr + rowstride, src, dst_width, x, dx); - src += src_stride; + if (src_height > 2) { + src += src_stride; + } for (j = 0; j < dst_height; ++j) { yi = y >> 16; @@ -1445,14 +1833,16 @@ void ScalePlaneBilinearUp_16(int src_width, if (y > max_y) { y = max_y; yi = y >> 16; - src = src_ptr + yi * src_stride; + src = src_ptr + yi * (int64_t)src_stride; } if (yi != lasty) { ScaleFilterCols(rowptr, src, dst_width, x, dx); rowptr += rowstride; rowstride = -rowstride; lasty = yi; - src += src_stride; + if ((y + 65536) < max_y) { + src += src_stride; + } } } if (filtering == kFilterLinear) { @@ -1499,16 +1889,12 @@ static void ScalePlaneSimple(int src_width, if (TestCpuFlag(kCpuHasSSE2) && IS_ALIGNED(dst_width, 8)) { ScaleCols = ScaleColsUp2_SSE2; } -#endif -#if defined(HAS_SCALECOLS_MMI) - if (TestCpuFlag(kCpuHasMMI) && IS_ALIGNED(dst_width, 8)) { - ScaleCols = ScaleColsUp2_MMI; - } #endif } for (i = 0; i < dst_height; ++i) { - ScaleCols(dst_ptr, src_ptr + (y >> 16) * src_stride, dst_width, x, dx); + ScaleCols(dst_ptr, src_ptr + (y >> 16) * (int64_t)src_stride, dst_width, x, + dx); dst_ptr += dst_stride; y += dy; } @@ -1540,16 +1926,12 @@ static void ScalePlaneSimple_16(int src_width, if (TestCpuFlag(kCpuHasSSE2) && IS_ALIGNED(dst_width, 8)) { ScaleCols = ScaleColsUp2_16_SSE2; } -#endif -#if defined(HAS_SCALECOLS_16_MMI) - if (TestCpuFlag(kCpuHasMMI) && IS_ALIGNED(dst_width, 8)) { - ScaleCols = ScaleColsUp2_16_MMI; - } #endif } for (i = 0; i < dst_height; ++i) { - ScaleCols(dst_ptr, src_ptr + (y >> 16) * src_stride, dst_width, x, dx); + ScaleCols(dst_ptr, src_ptr + (y >> 16) * (int64_t)src_stride, dst_width, x, + dx); dst_ptr += dst_stride; y += dy; } @@ -1557,7 +1939,6 @@ static void ScalePlaneSimple_16(int src_width, // Scale a plane. // This function dispatches to a specialized scaler based on scale factor. - LIBYUV_API void ScalePlane(const uint8_t* src, int src_stride, @@ -1575,10 +1956,9 @@ void ScalePlane(const uint8_t* src, // Negative height means invert the image. if (src_height < 0) { src_height = -src_height; - src = src + (src_height - 1) * src_stride; + src = src + (src_height - 1) * (int64_t)src_stride; src_stride = -src_stride; } - // Use specialized scales to improve performance for common resolutions. // For example, all the 1/2 scalings will use ScalePlaneDown2() if (dst_width == src_width && dst_height == src_height) { @@ -1587,10 +1967,19 @@ void ScalePlane(const uint8_t* src, return; } if (dst_width == src_width && filtering != kFilterBox) { - int dy = FixedDiv(src_height, dst_height); + int dy = 0; + int y = 0; + // When scaling down, use the center 2 rows to filter. + // When scaling up, last row of destination uses the last 2 source rows. + if (dst_height <= src_height) { + dy = FixedDiv(src_height, dst_height); + y = CENTERSTART(dy, -32768); // Subtract 0.5 (32768) to center filter. + } else if (src_height > 1 && dst_height > 1) { + dy = FixedDiv1(src_height, dst_height); + } // Arbitrary scale vertically, but unscaled horizontally. ScalePlaneVertical(src_height, dst_width, dst_height, src_stride, - dst_stride, src, dst, 0, 0, dy, 1, filtering); + dst_stride, src, dst, 0, y, dy, /*bpp=*/1, filtering); return; } if (dst_width <= Abs(src_width) && dst_height <= src_height) { @@ -1627,6 +2016,17 @@ void ScalePlane(const uint8_t* src, dst_stride, src, dst); return; } + if ((dst_width + 1) / 2 == src_width && filtering == kFilterLinear) { + ScalePlaneUp2_Linear(src_width, src_height, dst_width, dst_height, + src_stride, dst_stride, src, dst); + return; + } + if ((dst_height + 1) / 2 == src_height && (dst_width + 1) / 2 == src_width && + (filtering == kFilterBilinear || filtering == kFilterBox)) { + ScalePlaneUp2_Bilinear(src_width, src_height, dst_width, dst_height, + src_stride, dst_stride, src, dst); + return; + } if (filtering && dst_height > src_height) { ScalePlaneBilinearUp(src_width, src_height, dst_width, dst_height, src_stride, dst_stride, src, dst, filtering); @@ -1658,10 +2058,9 @@ void ScalePlane_16(const uint16_t* src, // Negative height means invert the image. if (src_height < 0) { src_height = -src_height; - src = src + (src_height - 1) * src_stride; + src = src + (src_height - 1) * (int64_t)src_stride; src_stride = -src_stride; } - // Use specialized scales to improve performance for common resolutions. // For example, all the 1/2 scalings will use ScalePlaneDown2() if (dst_width == src_width && dst_height == src_height) { @@ -1670,10 +2069,22 @@ void ScalePlane_16(const uint16_t* src, return; } if (dst_width == src_width && filtering != kFilterBox) { - int dy = FixedDiv(src_height, dst_height); + int dy = 0; + int y = 0; + // When scaling down, use the center 2 rows to filter. + // When scaling up, last row of destination uses the last 2 source rows. + if (dst_height <= src_height) { + dy = FixedDiv(src_height, dst_height); + y = CENTERSTART(dy, -32768); // Subtract 0.5 (32768) to center filter. + // When scaling up, ensure the last row of destination uses the last + // source. Avoid divide by zero for dst_height but will do no scaling + // later. + } else if (src_height > 1 && dst_height > 1) { + dy = FixedDiv1(src_height, dst_height); + } // Arbitrary scale vertically, but unscaled horizontally. ScalePlaneVertical_16(src_height, dst_width, dst_height, src_stride, - dst_stride, src, dst, 0, 0, dy, 1, filtering); + dst_stride, src, dst, 0, y, dy, /*bpp=*/1, filtering); return; } if (dst_width <= Abs(src_width) && dst_height <= src_height) { @@ -1710,6 +2121,17 @@ void ScalePlane_16(const uint16_t* src, dst_stride, src, dst); return; } + if ((dst_width + 1) / 2 == src_width && filtering == kFilterLinear) { + ScalePlaneUp2_16_Linear(src_width, src_height, dst_width, dst_height, + src_stride, dst_stride, src, dst); + return; + } + if ((dst_height + 1) / 2 == src_height && (dst_width + 1) / 2 == src_width && + (filtering == kFilterBilinear || filtering == kFilterBox)) { + ScalePlaneUp2_16_Bilinear(src_width, src_height, dst_width, dst_height, + src_stride, dst_stride, src, dst); + return; + } if (filtering && dst_height > src_height) { ScalePlaneBilinearUp_16(src_width, src_height, dst_width, dst_height, src_stride, dst_stride, src, dst, filtering); @@ -1724,6 +2146,43 @@ void ScalePlane_16(const uint16_t* src, dst_stride, src, dst); } +LIBYUV_API +void ScalePlane_12(const uint16_t* src, + int src_stride, + int src_width, + int src_height, + uint16_t* dst, + int dst_stride, + int dst_width, + int dst_height, + enum FilterMode filtering) { + // Simplify filtering when possible. + filtering = ScaleFilterReduce(src_width, src_height, dst_width, dst_height, + filtering); + + // Negative height means invert the image. + if (src_height < 0) { + src_height = -src_height; + src = src + (src_height - 1) * (int64_t)src_stride; + src_stride = -src_stride; + } + + if ((dst_width + 1) / 2 == src_width && filtering == kFilterLinear) { + ScalePlaneUp2_12_Linear(src_width, src_height, dst_width, dst_height, + src_stride, dst_stride, src, dst); + return; + } + if ((dst_height + 1) / 2 == src_height && (dst_width + 1) / 2 == src_width && + (filtering == kFilterBilinear || filtering == kFilterBox)) { + ScalePlaneUp2_12_Bilinear(src_width, src_height, dst_width, dst_height, + src_stride, dst_stride, src, dst); + return; + } + + ScalePlane_16(src, src_stride, src_width, src_height, dst, dst_stride, + dst_width, dst_height, filtering); +} + // Scale an I420 image. // This function in turn calls a scaling function for each plane. @@ -1749,7 +2208,8 @@ int I420Scale(const uint8_t* src_y, int src_halfheight = SUBSAMPLE(src_height, 1, 1); int dst_halfwidth = SUBSAMPLE(dst_width, 1, 1); int dst_halfheight = SUBSAMPLE(dst_height, 1, 1); - if (!src_y || !src_u || !src_v || src_width == 0 || src_height == 0 || + + if (!src_y || !src_u || !src_v || src_width <= 0 || src_height == 0 || src_width > 32768 || src_height > 32768 || !dst_y || !dst_u || !dst_v || dst_width <= 0 || dst_height <= 0) { return -1; @@ -1786,7 +2246,8 @@ int I420Scale_16(const uint16_t* src_y, int src_halfheight = SUBSAMPLE(src_height, 1, 1); int dst_halfwidth = SUBSAMPLE(dst_width, 1, 1); int dst_halfheight = SUBSAMPLE(dst_height, 1, 1); - if (!src_y || !src_u || !src_v || src_width == 0 || src_height == 0 || + + if (!src_y || !src_u || !src_v || src_width <= 0 || src_height == 0 || src_width > 32768 || src_height > 32768 || !dst_y || !dst_u || !dst_v || dst_width <= 0 || dst_height <= 0) { return -1; @@ -1801,6 +2262,44 @@ int I420Scale_16(const uint16_t* src_y, return 0; } +LIBYUV_API +int I420Scale_12(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + int src_width, + int src_height, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_u, + int dst_stride_u, + uint16_t* dst_v, + int dst_stride_v, + int dst_width, + int dst_height, + enum FilterMode filtering) { + int src_halfwidth = SUBSAMPLE(src_width, 1, 1); + int src_halfheight = SUBSAMPLE(src_height, 1, 1); + int dst_halfwidth = SUBSAMPLE(dst_width, 1, 1); + int dst_halfheight = SUBSAMPLE(dst_height, 1, 1); + + if (!src_y || !src_u || !src_v || src_width <= 0 || src_height == 0 || + src_width > 32768 || src_height > 32768 || !dst_y || !dst_u || !dst_v || + dst_width <= 0 || dst_height <= 0) { + return -1; + } + + ScalePlane_12(src_y, src_stride_y, src_width, src_height, dst_y, dst_stride_y, + dst_width, dst_height, filtering); + ScalePlane_12(src_u, src_stride_u, src_halfwidth, src_halfheight, dst_u, + dst_stride_u, dst_halfwidth, dst_halfheight, filtering); + ScalePlane_12(src_v, src_stride_v, src_halfwidth, src_halfheight, dst_v, + dst_stride_v, dst_halfwidth, dst_halfheight, filtering); + return 0; +} + // Scale an I444 image. // This function in turn calls a scaling function for each plane. @@ -1822,7 +2321,7 @@ int I444Scale(const uint8_t* src_y, int dst_width, int dst_height, enum FilterMode filtering) { - if (!src_y || !src_u || !src_v || src_width == 0 || src_height == 0 || + if (!src_y || !src_u || !src_v || src_width <= 0 || src_height == 0 || src_width > 32768 || src_height > 32768 || !dst_y || !dst_u || !dst_v || dst_width <= 0 || dst_height <= 0) { return -1; @@ -1855,7 +2354,7 @@ int I444Scale_16(const uint16_t* src_y, int dst_width, int dst_height, enum FilterMode filtering) { - if (!src_y || !src_u || !src_v || src_width == 0 || src_height == 0 || + if (!src_y || !src_u || !src_v || src_width <= 0 || src_height == 0 || src_width > 32768 || src_height > 32768 || !dst_y || !dst_u || !dst_v || dst_width <= 0 || dst_height <= 0) { return -1; @@ -1870,6 +2369,150 @@ int I444Scale_16(const uint16_t* src_y, return 0; } +LIBYUV_API +int I444Scale_12(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + int src_width, + int src_height, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_u, + int dst_stride_u, + uint16_t* dst_v, + int dst_stride_v, + int dst_width, + int dst_height, + enum FilterMode filtering) { + if (!src_y || !src_u || !src_v || src_width <= 0 || src_height == 0 || + src_width > 32768 || src_height > 32768 || !dst_y || !dst_u || !dst_v || + dst_width <= 0 || dst_height <= 0) { + return -1; + } + + ScalePlane_12(src_y, src_stride_y, src_width, src_height, dst_y, dst_stride_y, + dst_width, dst_height, filtering); + ScalePlane_12(src_u, src_stride_u, src_width, src_height, dst_u, dst_stride_u, + dst_width, dst_height, filtering); + ScalePlane_12(src_v, src_stride_v, src_width, src_height, dst_v, dst_stride_v, + dst_width, dst_height, filtering); + return 0; +} + +// Scale an I422 image. +// This function in turn calls a scaling function for each plane. + +LIBYUV_API +int I422Scale(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + int src_width, + int src_height, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int dst_width, + int dst_height, + enum FilterMode filtering) { + int src_halfwidth = SUBSAMPLE(src_width, 1, 1); + int dst_halfwidth = SUBSAMPLE(dst_width, 1, 1); + + if (!src_y || !src_u || !src_v || src_width <= 0 || src_height == 0 || + src_width > 32768 || src_height > 32768 || !dst_y || !dst_u || !dst_v || + dst_width <= 0 || dst_height <= 0) { + return -1; + } + + ScalePlane(src_y, src_stride_y, src_width, src_height, dst_y, dst_stride_y, + dst_width, dst_height, filtering); + ScalePlane(src_u, src_stride_u, src_halfwidth, src_height, dst_u, + dst_stride_u, dst_halfwidth, dst_height, filtering); + ScalePlane(src_v, src_stride_v, src_halfwidth, src_height, dst_v, + dst_stride_v, dst_halfwidth, dst_height, filtering); + return 0; +} + +LIBYUV_API +int I422Scale_16(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + int src_width, + int src_height, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_u, + int dst_stride_u, + uint16_t* dst_v, + int dst_stride_v, + int dst_width, + int dst_height, + enum FilterMode filtering) { + int src_halfwidth = SUBSAMPLE(src_width, 1, 1); + int dst_halfwidth = SUBSAMPLE(dst_width, 1, 1); + + if (!src_y || !src_u || !src_v || src_width <= 0 || src_height == 0 || + src_width > 32768 || src_height > 32768 || !dst_y || !dst_u || !dst_v || + dst_width <= 0 || dst_height <= 0) { + return -1; + } + + ScalePlane_16(src_y, src_stride_y, src_width, src_height, dst_y, dst_stride_y, + dst_width, dst_height, filtering); + ScalePlane_16(src_u, src_stride_u, src_halfwidth, src_height, dst_u, + dst_stride_u, dst_halfwidth, dst_height, filtering); + ScalePlane_16(src_v, src_stride_v, src_halfwidth, src_height, dst_v, + dst_stride_v, dst_halfwidth, dst_height, filtering); + return 0; +} + +LIBYUV_API +int I422Scale_12(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + int src_width, + int src_height, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_u, + int dst_stride_u, + uint16_t* dst_v, + int dst_stride_v, + int dst_width, + int dst_height, + enum FilterMode filtering) { + int src_halfwidth = SUBSAMPLE(src_width, 1, 1); + int dst_halfwidth = SUBSAMPLE(dst_width, 1, 1); + + if (!src_y || !src_u || !src_v || src_width <= 0 || src_height == 0 || + src_width > 32768 || src_height > 32768 || !dst_y || !dst_u || !dst_v || + dst_width <= 0 || dst_height <= 0) { + return -1; + } + + ScalePlane_12(src_y, src_stride_y, src_width, src_height, dst_y, dst_stride_y, + dst_width, dst_height, filtering); + ScalePlane_12(src_u, src_stride_u, src_halfwidth, src_height, dst_u, + dst_stride_u, dst_halfwidth, dst_height, filtering); + ScalePlane_12(src_v, src_stride_v, src_halfwidth, src_height, dst_v, + dst_stride_v, dst_halfwidth, dst_height, filtering); + return 0; +} + // Scale an NV12 image. // This function in turn calls a scaling function for each plane. @@ -1891,7 +2534,8 @@ int NV12Scale(const uint8_t* src_y, int src_halfheight = SUBSAMPLE(src_height, 1, 1); int dst_halfwidth = SUBSAMPLE(dst_width, 1, 1); int dst_halfheight = SUBSAMPLE(dst_height, 1, 1); - if (!src_y || !src_uv || src_width == 0 || src_height == 0 || + + if (!src_y || !src_uv || src_width <= 0 || src_height == 0 || src_width > 32768 || src_height > 32768 || !dst_y || !dst_uv || dst_width <= 0 || dst_height <= 0) { return -1; diff --git a/TMessagesProj/jni/third_party/libyuv/source/scale_any.cc b/TMessagesProj/jni/third_party/libyuv/source/scale_any.cc index c93d70c5fc..317041f806 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/scale_any.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/scale_any.cc @@ -76,7 +76,7 @@ SDANY(ScaleUVRowDown2Box_Any_SSSE3, ScaleUVRowDown2Box_C, 2, 2, - 4) + 3) #endif #ifdef HAS_SCALEUVROWDOWN2BOX_AVX2 SDANY(ScaleUVRowDown2Box_Any_AVX2, @@ -84,7 +84,7 @@ SDANY(ScaleUVRowDown2Box_Any_AVX2, ScaleUVRowDown2Box_C, 2, 2, - 8) + 7) #endif #ifdef HAS_SCALEROWDOWN2_AVX2 SDANY(ScaleRowDown2_Any_AVX2, ScaleRowDown2_AVX2, ScaleRowDown2_C, 2, 1, 31) @@ -134,7 +134,7 @@ SDANY(ScaleUVRowDown2Box_Any_NEON, ScaleUVRowDown2Box_C, 2, 2, - 8) + 7) #endif #ifdef HAS_SCALEROWDOWN2_MSA @@ -152,26 +152,20 @@ SDANY(ScaleRowDown2Box_Any_MSA, 1, 31) #endif -#ifdef HAS_SCALEROWDOWN2_MMI -SDANY(ScaleRowDown2_Any_MMI, ScaleRowDown2_MMI, ScaleRowDown2_C, 2, 1, 7) -SDANY(ScaleRowDown2Linear_Any_MMI, - ScaleRowDown2Linear_MMI, +#ifdef HAS_SCALEROWDOWN2_LSX +SDANY(ScaleRowDown2_Any_LSX, ScaleRowDown2_LSX, ScaleRowDown2_C, 2, 1, 31) +SDANY(ScaleRowDown2Linear_Any_LSX, + ScaleRowDown2Linear_LSX, ScaleRowDown2Linear_C, 2, 1, - 7) -SDANY(ScaleRowDown2Box_Any_MMI, - ScaleRowDown2Box_MMI, + 31) +SDANY(ScaleRowDown2Box_Any_LSX, + ScaleRowDown2Box_LSX, ScaleRowDown2Box_C, 2, 1, - 7) -SDODD(ScaleRowDown2Box_Odd_MMI, - ScaleRowDown2Box_MMI, - ScaleRowDown2Box_Odd_C, - 2, - 1, - 7) + 31) #endif #ifdef HAS_SCALEROWDOWN4_SSSE3 SDANY(ScaleRowDown4_Any_SSSE3, ScaleRowDown4_SSSE3, ScaleRowDown4_C, 4, 1, 7) @@ -209,14 +203,14 @@ SDANY(ScaleRowDown4Box_Any_MSA, 1, 15) #endif -#ifdef HAS_SCALEROWDOWN4_MMI -SDANY(ScaleRowDown4_Any_MMI, ScaleRowDown4_MMI, ScaleRowDown4_C, 4, 1, 7) -SDANY(ScaleRowDown4Box_Any_MMI, - ScaleRowDown4Box_MMI, +#ifdef HAS_SCALEROWDOWN4_LSX +SDANY(ScaleRowDown4_Any_LSX, ScaleRowDown4_LSX, ScaleRowDown4_C, 4, 1, 15) +SDANY(ScaleRowDown4Box_Any_LSX, + ScaleRowDown4Box_LSX, ScaleRowDown4Box_C, 4, 1, - 7) + 15) #endif #ifdef HAS_SCALEROWDOWN34_SSSE3 SDANY(ScaleRowDown34_Any_SSSE3, @@ -278,13 +272,25 @@ SDANY(ScaleRowDown34_1_Box_Any_MSA, 1, 47) #endif -#ifdef HAS_SCALEROWDOWN34_MMI -SDANY(ScaleRowDown34_Any_MMI, - ScaleRowDown34_MMI, +#ifdef HAS_SCALEROWDOWN34_LSX +SDANY(ScaleRowDown34_Any_LSX, + ScaleRowDown34_LSX, ScaleRowDown34_C, 4 / 3, 1, - 23) + 47) +SDANY(ScaleRowDown34_0_Box_Any_LSX, + ScaleRowDown34_0_Box_LSX, + ScaleRowDown34_0_Box_C, + 4 / 3, + 1, + 47) +SDANY(ScaleRowDown34_1_Box_Any_LSX, + ScaleRowDown34_1_Box_LSX, + ScaleRowDown34_1_Box_C, + 4 / 3, + 1, + 47) #endif #ifdef HAS_SCALEROWDOWN38_SSSE3 SDANY(ScaleRowDown38_Any_SSSE3, @@ -346,6 +352,26 @@ SDANY(ScaleRowDown38_2_Box_Any_MSA, 1, 11) #endif +#ifdef HAS_SCALEROWDOWN38_LSX +SDANY(ScaleRowDown38_Any_LSX, + ScaleRowDown38_LSX, + ScaleRowDown38_C, + 8 / 3, + 1, + 11) +SDANY(ScaleRowDown38_3_Box_Any_LSX, + ScaleRowDown38_3_Box_LSX, + ScaleRowDown38_3_Box_C, + 8 / 3, + 1, + 11) +SDANY(ScaleRowDown38_2_Box_Any_LSX, + ScaleRowDown38_2_Box_LSX, + ScaleRowDown38_2_Box_C, + 8 / 3, + 1, + 11) +#endif #ifdef HAS_SCALEARGBROWDOWN2_SSE2 SDANY(ScaleARGBRowDown2_Any_SSE2, @@ -407,25 +433,25 @@ SDANY(ScaleARGBRowDown2Box_Any_MSA, 4, 3) #endif -#ifdef HAS_SCALEARGBROWDOWN2_MMI -SDANY(ScaleARGBRowDown2_Any_MMI, - ScaleARGBRowDown2_MMI, +#ifdef HAS_SCALEARGBROWDOWN2_LSX +SDANY(ScaleARGBRowDown2_Any_LSX, + ScaleARGBRowDown2_LSX, ScaleARGBRowDown2_C, 2, 4, - 1) -SDANY(ScaleARGBRowDown2Linear_Any_MMI, - ScaleARGBRowDown2Linear_MMI, + 3) +SDANY(ScaleARGBRowDown2Linear_Any_LSX, + ScaleARGBRowDown2Linear_LSX, ScaleARGBRowDown2Linear_C, 2, 4, - 1) -SDANY(ScaleARGBRowDown2Box_Any_MMI, - ScaleARGBRowDown2Box_MMI, + 3) +SDANY(ScaleARGBRowDown2Box_Any_LSX, + ScaleARGBRowDown2Box_LSX, ScaleARGBRowDown2Box_C, 2, 4, - 1) + 3) #endif #undef SDANY @@ -478,17 +504,17 @@ SDAANY(ScaleARGBRowDownEvenBox_Any_MSA, 4, 3) #endif -#ifdef HAS_SCALEARGBROWDOWNEVEN_MMI -SDAANY(ScaleARGBRowDownEven_Any_MMI, - ScaleARGBRowDownEven_MMI, +#ifdef HAS_SCALEARGBROWDOWNEVEN_LSX +SDAANY(ScaleARGBRowDownEven_Any_LSX, + ScaleARGBRowDownEven_LSX, ScaleARGBRowDownEven_C, 4, - 1) -SDAANY(ScaleARGBRowDownEvenBox_Any_MMI, - ScaleARGBRowDownEvenBox_MMI, + 3) +SDAANY(ScaleARGBRowDownEvenBox_Any_LSX, + ScaleARGBRowDownEvenBox_LSX, ScaleARGBRowDownEvenBox_C, 4, - 1) + 3) #endif #ifdef HAS_SCALEUVROWDOWNEVEN_NEON SDAANY(ScaleUVRowDownEven_Any_NEON, @@ -530,8 +556,8 @@ SAROW(ScaleAddRow_Any_NEON, ScaleAddRow_NEON, 1, 2, 15) #ifdef HAS_SCALEADDROW_MSA SAROW(ScaleAddRow_Any_MSA, ScaleAddRow_MSA, 1, 2, 15) #endif -#ifdef HAS_SCALEADDROW_MMI -SAROW(ScaleAddRow_Any_MMI, ScaleAddRow_MMI, 1, 2, 7) +#ifdef HAS_SCALEADDROW_LSX +SAROW(ScaleAddRow_Any_LSX, ScaleAddRow_LSX, 1, 2, 15) #endif #undef SAANY @@ -559,8 +585,8 @@ SAANY(ScaleAddRow_Any_NEON, ScaleAddRow_NEON, ScaleAddRow_C, 15) #ifdef HAS_SCALEADDROW_MSA SAANY(ScaleAddRow_Any_MSA, ScaleAddRow_MSA, ScaleAddRow_C, 15) #endif -#ifdef HAS_SCALEADDROW_MMI -SAANY(ScaleAddRow_Any_MMI, ScaleAddRow_MMI, ScaleAddRow_C, 7) +#ifdef HAS_SCALEADDROW_LSX +SAANY(ScaleAddRow_Any_LSX, ScaleAddRow_LSX, ScaleAddRow_C, 15) #endif #undef SAANY @@ -584,14 +610,17 @@ CANY(ScaleFilterCols_Any_NEON, ScaleFilterCols_NEON, ScaleFilterCols_C, 1, 7) #ifdef HAS_SCALEFILTERCOLS_MSA CANY(ScaleFilterCols_Any_MSA, ScaleFilterCols_MSA, ScaleFilterCols_C, 1, 15) #endif +#ifdef HAS_SCALEFILTERCOLS_LSX +CANY(ScaleFilterCols_Any_LSX, ScaleFilterCols_LSX, ScaleFilterCols_C, 1, 15) +#endif #ifdef HAS_SCALEARGBCOLS_NEON CANY(ScaleARGBCols_Any_NEON, ScaleARGBCols_NEON, ScaleARGBCols_C, 4, 7) #endif #ifdef HAS_SCALEARGBCOLS_MSA CANY(ScaleARGBCols_Any_MSA, ScaleARGBCols_MSA, ScaleARGBCols_C, 4, 3) #endif -#ifdef HAS_SCALEARGBCOLS_MMI -CANY(ScaleARGBCols_Any_MMI, ScaleARGBCols_MMI, ScaleARGBCols_C, 4, 0) +#ifdef HAS_SCALEARGBCOLS_LSX +CANY(ScaleARGBCols_Any_LSX, ScaleARGBCols_LSX, ScaleARGBCols_C, 4, 3) #endif #ifdef HAS_SCALEARGBFILTERCOLS_NEON CANY(ScaleARGBFilterCols_Any_NEON, @@ -607,8 +636,426 @@ CANY(ScaleARGBFilterCols_Any_MSA, 4, 7) #endif +#ifdef HAS_SCALEARGBFILTERCOLS_LSX +CANY(ScaleARGBFilterCols_Any_LSX, + ScaleARGBFilterCols_LSX, + ScaleARGBFilterCols_C, + 4, + 7) +#endif #undef CANY +// Scale up horizontally 2 times using linear filter. +#define SUH2LANY(NAME, SIMD, C, MASK, PTYPE) \ + void NAME(const PTYPE* src_ptr, PTYPE* dst_ptr, int dst_width) { \ + int work_width = (dst_width - 1) & ~1; \ + int r = work_width & MASK; \ + int n = work_width & ~MASK; \ + dst_ptr[0] = src_ptr[0]; \ + if (work_width > 0) { \ + if (n != 0) { \ + SIMD(src_ptr, dst_ptr + 1, n); \ + } \ + C(src_ptr + (n / 2), dst_ptr + n + 1, r); \ + } \ + dst_ptr[dst_width - 1] = src_ptr[(dst_width - 1) / 2]; \ + } + +// Even the C versions need to be wrapped, because boundary pixels have to +// be handled differently + +SUH2LANY(ScaleRowUp2_Linear_Any_C, + ScaleRowUp2_Linear_C, + ScaleRowUp2_Linear_C, + 0, + uint8_t) + +SUH2LANY(ScaleRowUp2_Linear_16_Any_C, + ScaleRowUp2_Linear_16_C, + ScaleRowUp2_Linear_16_C, + 0, + uint16_t) + +#ifdef HAS_SCALEROWUP2_LINEAR_SSE2 +SUH2LANY(ScaleRowUp2_Linear_Any_SSE2, + ScaleRowUp2_Linear_SSE2, + ScaleRowUp2_Linear_C, + 15, + uint8_t) +#endif + +#ifdef HAS_SCALEROWUP2_LINEAR_SSSE3 +SUH2LANY(ScaleRowUp2_Linear_Any_SSSE3, + ScaleRowUp2_Linear_SSSE3, + ScaleRowUp2_Linear_C, + 15, + uint8_t) +#endif + +#ifdef HAS_SCALEROWUP2_LINEAR_12_SSSE3 +SUH2LANY(ScaleRowUp2_Linear_12_Any_SSSE3, + ScaleRowUp2_Linear_12_SSSE3, + ScaleRowUp2_Linear_16_C, + 15, + uint16_t) +#endif + +#ifdef HAS_SCALEROWUP2_LINEAR_16_SSE2 +SUH2LANY(ScaleRowUp2_Linear_16_Any_SSE2, + ScaleRowUp2_Linear_16_SSE2, + ScaleRowUp2_Linear_16_C, + 7, + uint16_t) +#endif + +#ifdef HAS_SCALEROWUP2_LINEAR_AVX2 +SUH2LANY(ScaleRowUp2_Linear_Any_AVX2, + ScaleRowUp2_Linear_AVX2, + ScaleRowUp2_Linear_C, + 31, + uint8_t) +#endif + +#ifdef HAS_SCALEROWUP2_LINEAR_12_AVX2 +SUH2LANY(ScaleRowUp2_Linear_12_Any_AVX2, + ScaleRowUp2_Linear_12_AVX2, + ScaleRowUp2_Linear_16_C, + 31, + uint16_t) +#endif + +#ifdef HAS_SCALEROWUP2_LINEAR_16_AVX2 +SUH2LANY(ScaleRowUp2_Linear_16_Any_AVX2, + ScaleRowUp2_Linear_16_AVX2, + ScaleRowUp2_Linear_16_C, + 15, + uint16_t) +#endif + +#ifdef HAS_SCALEROWUP2_LINEAR_NEON +SUH2LANY(ScaleRowUp2_Linear_Any_NEON, + ScaleRowUp2_Linear_NEON, + ScaleRowUp2_Linear_C, + 15, + uint8_t) +#endif + +#ifdef HAS_SCALEROWUP2_LINEAR_12_NEON +SUH2LANY(ScaleRowUp2_Linear_12_Any_NEON, + ScaleRowUp2_Linear_12_NEON, + ScaleRowUp2_Linear_16_C, + 15, + uint16_t) +#endif + +#ifdef HAS_SCALEROWUP2_LINEAR_16_NEON +SUH2LANY(ScaleRowUp2_Linear_16_Any_NEON, + ScaleRowUp2_Linear_16_NEON, + ScaleRowUp2_Linear_16_C, + 15, + uint16_t) +#endif + +#undef SUH2LANY + +// Scale up 2 times using bilinear filter. +// This function produces 2 rows at a time. +#define SU2BLANY(NAME, SIMD, C, MASK, PTYPE) \ + void NAME(const PTYPE* src_ptr, ptrdiff_t src_stride, PTYPE* dst_ptr, \ + ptrdiff_t dst_stride, int dst_width) { \ + int work_width = (dst_width - 1) & ~1; \ + int r = work_width & MASK; \ + int n = work_width & ~MASK; \ + const PTYPE* sa = src_ptr; \ + const PTYPE* sb = src_ptr + src_stride; \ + PTYPE* da = dst_ptr; \ + PTYPE* db = dst_ptr + dst_stride; \ + da[0] = (3 * sa[0] + sb[0] + 2) >> 2; \ + db[0] = (sa[0] + 3 * sb[0] + 2) >> 2; \ + if (work_width > 0) { \ + if (n != 0) { \ + SIMD(sa, sb - sa, da + 1, db - da, n); \ + } \ + C(sa + (n / 2), sb - sa, da + n + 1, db - da, r); \ + } \ + da[dst_width - 1] = \ + (3 * sa[(dst_width - 1) / 2] + sb[(dst_width - 1) / 2] + 2) >> 2; \ + db[dst_width - 1] = \ + (sa[(dst_width - 1) / 2] + 3 * sb[(dst_width - 1) / 2] + 2) >> 2; \ + } + +SU2BLANY(ScaleRowUp2_Bilinear_Any_C, + ScaleRowUp2_Bilinear_C, + ScaleRowUp2_Bilinear_C, + 0, + uint8_t) + +SU2BLANY(ScaleRowUp2_Bilinear_16_Any_C, + ScaleRowUp2_Bilinear_16_C, + ScaleRowUp2_Bilinear_16_C, + 0, + uint16_t) + +#ifdef HAS_SCALEROWUP2_BILINEAR_SSE2 +SU2BLANY(ScaleRowUp2_Bilinear_Any_SSE2, + ScaleRowUp2_Bilinear_SSE2, + ScaleRowUp2_Bilinear_C, + 15, + uint8_t) +#endif + +#ifdef HAS_SCALEROWUP2_BILINEAR_12_SSSE3 +SU2BLANY(ScaleRowUp2_Bilinear_12_Any_SSSE3, + ScaleRowUp2_Bilinear_12_SSSE3, + ScaleRowUp2_Bilinear_16_C, + 15, + uint16_t) +#endif + +#ifdef HAS_SCALEROWUP2_BILINEAR_16_SSE2 +SU2BLANY(ScaleRowUp2_Bilinear_16_Any_SSE2, + ScaleRowUp2_Bilinear_16_SSE2, + ScaleRowUp2_Bilinear_16_C, + 7, + uint16_t) +#endif + +#ifdef HAS_SCALEROWUP2_BILINEAR_SSSE3 +SU2BLANY(ScaleRowUp2_Bilinear_Any_SSSE3, + ScaleRowUp2_Bilinear_SSSE3, + ScaleRowUp2_Bilinear_C, + 15, + uint8_t) +#endif + +#ifdef HAS_SCALEROWUP2_BILINEAR_AVX2 +SU2BLANY(ScaleRowUp2_Bilinear_Any_AVX2, + ScaleRowUp2_Bilinear_AVX2, + ScaleRowUp2_Bilinear_C, + 31, + uint8_t) +#endif + +#ifdef HAS_SCALEROWUP2_BILINEAR_12_AVX2 +SU2BLANY(ScaleRowUp2_Bilinear_12_Any_AVX2, + ScaleRowUp2_Bilinear_12_AVX2, + ScaleRowUp2_Bilinear_16_C, + 15, + uint16_t) +#endif + +#ifdef HAS_SCALEROWUP2_BILINEAR_16_AVX2 +SU2BLANY(ScaleRowUp2_Bilinear_16_Any_AVX2, + ScaleRowUp2_Bilinear_16_AVX2, + ScaleRowUp2_Bilinear_16_C, + 15, + uint16_t) +#endif + +#ifdef HAS_SCALEROWUP2_BILINEAR_NEON +SU2BLANY(ScaleRowUp2_Bilinear_Any_NEON, + ScaleRowUp2_Bilinear_NEON, + ScaleRowUp2_Bilinear_C, + 15, + uint8_t) +#endif + +#ifdef HAS_SCALEROWUP2_BILINEAR_12_NEON +SU2BLANY(ScaleRowUp2_Bilinear_12_Any_NEON, + ScaleRowUp2_Bilinear_12_NEON, + ScaleRowUp2_Bilinear_16_C, + 15, + uint16_t) +#endif + +#ifdef HAS_SCALEROWUP2_BILINEAR_16_NEON +SU2BLANY(ScaleRowUp2_Bilinear_16_Any_NEON, + ScaleRowUp2_Bilinear_16_NEON, + ScaleRowUp2_Bilinear_16_C, + 7, + uint16_t) +#endif + +#undef SU2BLANY + +// Scale bi-planar plane up horizontally 2 times using linear filter. +#define SBUH2LANY(NAME, SIMD, C, MASK, PTYPE) \ + void NAME(const PTYPE* src_ptr, PTYPE* dst_ptr, int dst_width) { \ + int work_width = (dst_width - 1) & ~1; \ + int r = work_width & MASK; \ + int n = work_width & ~MASK; \ + dst_ptr[0] = src_ptr[0]; \ + dst_ptr[1] = src_ptr[1]; \ + if (work_width > 0) { \ + if (n != 0) { \ + SIMD(src_ptr, dst_ptr + 2, n); \ + } \ + C(src_ptr + n, dst_ptr + 2 * n + 2, r); \ + } \ + dst_ptr[2 * dst_width - 2] = src_ptr[((dst_width + 1) & ~1) - 2]; \ + dst_ptr[2 * dst_width - 1] = src_ptr[((dst_width + 1) & ~1) - 1]; \ + } + +SBUH2LANY(ScaleUVRowUp2_Linear_Any_C, + ScaleUVRowUp2_Linear_C, + ScaleUVRowUp2_Linear_C, + 0, + uint8_t) + +SBUH2LANY(ScaleUVRowUp2_Linear_16_Any_C, + ScaleUVRowUp2_Linear_16_C, + ScaleUVRowUp2_Linear_16_C, + 0, + uint16_t) + +#ifdef HAS_SCALEUVROWUP2_LINEAR_SSSE3 +SBUH2LANY(ScaleUVRowUp2_Linear_Any_SSSE3, + ScaleUVRowUp2_Linear_SSSE3, + ScaleUVRowUp2_Linear_C, + 7, + uint8_t) +#endif + +#ifdef HAS_SCALEUVROWUP2_LINEAR_AVX2 +SBUH2LANY(ScaleUVRowUp2_Linear_Any_AVX2, + ScaleUVRowUp2_Linear_AVX2, + ScaleUVRowUp2_Linear_C, + 15, + uint8_t) +#endif + +#ifdef HAS_SCALEUVROWUP2_LINEAR_16_SSE41 +SBUH2LANY(ScaleUVRowUp2_Linear_16_Any_SSE41, + ScaleUVRowUp2_Linear_16_SSE41, + ScaleUVRowUp2_Linear_16_C, + 3, + uint16_t) +#endif + +#ifdef HAS_SCALEUVROWUP2_LINEAR_16_AVX2 +SBUH2LANY(ScaleUVRowUp2_Linear_16_Any_AVX2, + ScaleUVRowUp2_Linear_16_AVX2, + ScaleUVRowUp2_Linear_16_C, + 7, + uint16_t) +#endif + +#ifdef HAS_SCALEUVROWUP2_LINEAR_NEON +SBUH2LANY(ScaleUVRowUp2_Linear_Any_NEON, + ScaleUVRowUp2_Linear_NEON, + ScaleUVRowUp2_Linear_C, + 15, + uint8_t) +#endif + +#ifdef HAS_SCALEUVROWUP2_LINEAR_16_NEON +SBUH2LANY(ScaleUVRowUp2_Linear_16_Any_NEON, + ScaleUVRowUp2_Linear_16_NEON, + ScaleUVRowUp2_Linear_16_C, + 15, + uint16_t) +#endif + +#undef SBUH2LANY + +// Scale bi-planar plane up 2 times using bilinear filter. +// This function produces 2 rows at a time. +#define SBU2BLANY(NAME, SIMD, C, MASK, PTYPE) \ + void NAME(const PTYPE* src_ptr, ptrdiff_t src_stride, PTYPE* dst_ptr, \ + ptrdiff_t dst_stride, int dst_width) { \ + int work_width = (dst_width - 1) & ~1; \ + int r = work_width & MASK; \ + int n = work_width & ~MASK; \ + const PTYPE* sa = src_ptr; \ + const PTYPE* sb = src_ptr + src_stride; \ + PTYPE* da = dst_ptr; \ + PTYPE* db = dst_ptr + dst_stride; \ + da[0] = (3 * sa[0] + sb[0] + 2) >> 2; \ + db[0] = (sa[0] + 3 * sb[0] + 2) >> 2; \ + da[1] = (3 * sa[1] + sb[1] + 2) >> 2; \ + db[1] = (sa[1] + 3 * sb[1] + 2) >> 2; \ + if (work_width > 0) { \ + if (n != 0) { \ + SIMD(sa, sb - sa, da + 2, db - da, n); \ + } \ + C(sa + n, sb - sa, da + 2 * n + 2, db - da, r); \ + } \ + da[2 * dst_width - 2] = (3 * sa[((dst_width + 1) & ~1) - 2] + \ + sb[((dst_width + 1) & ~1) - 2] + 2) >> \ + 2; \ + db[2 * dst_width - 2] = (sa[((dst_width + 1) & ~1) - 2] + \ + 3 * sb[((dst_width + 1) & ~1) - 2] + 2) >> \ + 2; \ + da[2 * dst_width - 1] = (3 * sa[((dst_width + 1) & ~1) - 1] + \ + sb[((dst_width + 1) & ~1) - 1] + 2) >> \ + 2; \ + db[2 * dst_width - 1] = (sa[((dst_width + 1) & ~1) - 1] + \ + 3 * sb[((dst_width + 1) & ~1) - 1] + 2) >> \ + 2; \ + } + +SBU2BLANY(ScaleUVRowUp2_Bilinear_Any_C, + ScaleUVRowUp2_Bilinear_C, + ScaleUVRowUp2_Bilinear_C, + 0, + uint8_t) + +SBU2BLANY(ScaleUVRowUp2_Bilinear_16_Any_C, + ScaleUVRowUp2_Bilinear_16_C, + ScaleUVRowUp2_Bilinear_16_C, + 0, + uint16_t) + +#ifdef HAS_SCALEUVROWUP2_BILINEAR_SSSE3 +SBU2BLANY(ScaleUVRowUp2_Bilinear_Any_SSSE3, + ScaleUVRowUp2_Bilinear_SSSE3, + ScaleUVRowUp2_Bilinear_C, + 7, + uint8_t) +#endif + +#ifdef HAS_SCALEUVROWUP2_BILINEAR_AVX2 +SBU2BLANY(ScaleUVRowUp2_Bilinear_Any_AVX2, + ScaleUVRowUp2_Bilinear_AVX2, + ScaleUVRowUp2_Bilinear_C, + 15, + uint8_t) +#endif + +#ifdef HAS_SCALEUVROWUP2_BILINEAR_16_SSE41 +SBU2BLANY(ScaleUVRowUp2_Bilinear_16_Any_SSE41, + ScaleUVRowUp2_Bilinear_16_SSE41, + ScaleUVRowUp2_Bilinear_16_C, + 7, + uint16_t) +#endif + +#ifdef HAS_SCALEUVROWUP2_BILINEAR_16_AVX2 +SBU2BLANY(ScaleUVRowUp2_Bilinear_16_Any_AVX2, + ScaleUVRowUp2_Bilinear_16_AVX2, + ScaleUVRowUp2_Bilinear_16_C, + 7, + uint16_t) +#endif + +#ifdef HAS_SCALEUVROWUP2_BILINEAR_NEON +SBU2BLANY(ScaleUVRowUp2_Bilinear_Any_NEON, + ScaleUVRowUp2_Bilinear_NEON, + ScaleUVRowUp2_Bilinear_C, + 7, + uint8_t) +#endif + +#ifdef HAS_SCALEUVROWUP2_BILINEAR_16_NEON +SBU2BLANY(ScaleUVRowUp2_Bilinear_16_Any_NEON, + ScaleUVRowUp2_Bilinear_16_NEON, + ScaleUVRowUp2_Bilinear_16_C, + 7, + uint16_t) +#endif + +#undef SBU2BLANY + #ifdef __cplusplus } // extern "C" } // namespace libyuv diff --git a/TMessagesProj/jni/third_party/libyuv/source/scale_argb.cc b/TMessagesProj/jni/third_party/libyuv/source/scale_argb.cc index 451d4ec4d1..3e6f54776a 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/scale_argb.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/scale_argb.cc @@ -58,9 +58,9 @@ static void ScaleARGBDown2(int src_width, assert((dy & 0x1ffff) == 0); // Test vertical scale is multiple of 2. // Advance to odd row, even column. if (filtering == kFilterBilinear) { - src_argb += (y >> 16) * src_stride + (x >> 16) * 4; + src_argb += (y >> 16) * (intptr_t)src_stride + (x >> 16) * 4; } else { - src_argb += (y >> 16) * src_stride + ((x >> 16) - 1) * 4; + src_argb += (y >> 16) * (intptr_t)src_stride + ((x >> 16) - 1) * 4; } #if defined(HAS_SCALEARGBROWDOWN2_SSE2) @@ -95,22 +95,6 @@ static void ScaleARGBDown2(int src_width, } } #endif -#if defined(HAS_SCALEARGBROWDOWN2_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ScaleARGBRowDown2 = - filtering == kFilterNone - ? ScaleARGBRowDown2_Any_MMI - : (filtering == kFilterLinear ? ScaleARGBRowDown2Linear_Any_MMI - : ScaleARGBRowDown2Box_Any_MMI); - if (IS_ALIGNED(dst_width, 2)) { - ScaleARGBRowDown2 = - filtering == kFilterNone - ? ScaleARGBRowDown2_MMI - : (filtering == kFilterLinear ? ScaleARGBRowDown2Linear_MMI - : ScaleARGBRowDown2Box_MMI); - } - } -#endif #if defined(HAS_SCALEARGBROWDOWN2_MSA) if (TestCpuFlag(kCpuHasMSA)) { ScaleARGBRowDown2 = @@ -127,6 +111,22 @@ static void ScaleARGBDown2(int src_width, } } #endif +#if defined(HAS_SCALEARGBROWDOWN2_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + ScaleARGBRowDown2 = + filtering == kFilterNone + ? ScaleARGBRowDown2_Any_LSX + : (filtering == kFilterLinear ? ScaleARGBRowDown2Linear_Any_LSX + : ScaleARGBRowDown2Box_Any_LSX); + if (IS_ALIGNED(dst_width, 4)) { + ScaleARGBRowDown2 = + filtering == kFilterNone + ? ScaleARGBRowDown2_LSX + : (filtering == kFilterLinear ? ScaleARGBRowDown2Linear_LSX + : ScaleARGBRowDown2Box_LSX); + } + } +#endif if (filtering == kFilterLinear) { src_stride = 0; @@ -155,14 +155,14 @@ static void ScaleARGBDown4Box(int src_width, int dy) { int j; // Allocate 2 rows of ARGB. - const int kRowSize = (dst_width * 2 * 4 + 31) & ~31; - align_buffer_64(row, kRowSize * 2); + const int row_size = (dst_width * 2 * 4 + 31) & ~31; + align_buffer_64(row, row_size * 2); int row_stride = src_stride * (dy >> 16); void (*ScaleARGBRowDown2)(const uint8_t* src_argb, ptrdiff_t src_stride, uint8_t* dst_argb, int dst_width) = ScaleARGBRowDown2Box_C; // Advance to odd row, even column. - src_argb += (y >> 16) * src_stride + (x >> 16) * 4; + src_argb += (y >> 16) * (intptr_t)src_stride + (x >> 16) * 4; (void)src_width; (void)src_height; (void)dx; @@ -187,9 +187,9 @@ static void ScaleARGBDown4Box(int src_width, for (j = 0; j < dst_height; ++j) { ScaleARGBRowDown2(src_argb, src_stride, row, dst_width * 2); - ScaleARGBRowDown2(src_argb + src_stride * 2, src_stride, row + kRowSize, + ScaleARGBRowDown2(src_argb + src_stride * 2, src_stride, row + row_size, dst_width * 2); - ScaleARGBRowDown2(row, kRowSize, dst_argb, dst_width); + ScaleARGBRowDown2(row, row_size, dst_argb, dst_width); src_argb += row_stride; dst_argb += dst_stride; } @@ -214,7 +214,7 @@ static void ScaleARGBDownEven(int src_width, enum FilterMode filtering) { int j; int col_step = dx >> 16; - int row_stride = (dy >> 16) * src_stride; + ptrdiff_t row_stride = (ptrdiff_t)((dy >> 16) * (intptr_t)src_stride); void (*ScaleARGBRowDownEven)(const uint8_t* src_argb, ptrdiff_t src_stride, int src_step, uint8_t* dst_argb, int dst_width) = filtering ? ScaleARGBRowDownEvenBox_C : ScaleARGBRowDownEven_C; @@ -222,7 +222,7 @@ static void ScaleARGBDownEven(int src_width, (void)src_height; assert(IS_ALIGNED(src_width, 2)); assert(IS_ALIGNED(src_height, 2)); - src_argb += (y >> 16) * src_stride + (x >> 16) * 4; + src_argb += (y >> 16) * (intptr_t)src_stride + (x >> 16) * 4; #if defined(HAS_SCALEARGBROWDOWNEVEN_SSE2) if (TestCpuFlag(kCpuHasSSE2)) { ScaleARGBRowDownEven = filtering ? ScaleARGBRowDownEvenBox_Any_SSE2 @@ -243,16 +243,6 @@ static void ScaleARGBDownEven(int src_width, } } #endif -#if defined(HAS_SCALEARGBROWDOWNEVEN_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ScaleARGBRowDownEven = filtering ? ScaleARGBRowDownEvenBox_Any_MMI - : ScaleARGBRowDownEven_Any_MMI; - if (IS_ALIGNED(dst_width, 2)) { - ScaleARGBRowDownEven = - filtering ? ScaleARGBRowDownEvenBox_MMI : ScaleARGBRowDownEven_MMI; - } - } -#endif #if defined(HAS_SCALEARGBROWDOWNEVEN_MSA) if (TestCpuFlag(kCpuHasMSA)) { ScaleARGBRowDownEven = filtering ? ScaleARGBRowDownEvenBox_Any_MSA @@ -263,6 +253,16 @@ static void ScaleARGBDownEven(int src_width, } } #endif +#if defined(HAS_SCALEARGBROWDOWNEVEN_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + ScaleARGBRowDownEven = filtering ? ScaleARGBRowDownEvenBox_Any_LSX + : ScaleARGBRowDownEven_Any_LSX; + if (IS_ALIGNED(dst_width, 4)) { + ScaleARGBRowDownEven = + filtering ? ScaleARGBRowDownEvenBox_LSX : ScaleARGBRowDownEven_LSX; + } + } +#endif if (filtering == kFilterLinear) { src_stride = 0; @@ -289,10 +289,10 @@ static void ScaleARGBBilinearDown(int src_width, int dy, enum FilterMode filtering) { int j; - void (*InterpolateRow)(uint8_t * dst_argb, const uint8_t* src_argb, + void (*InterpolateRow)(uint8_t* dst_argb, const uint8_t* src_argb, ptrdiff_t src_stride, int dst_width, int source_y_fraction) = InterpolateRow_C; - void (*ScaleARGBFilterCols)(uint8_t * dst_argb, const uint8_t* src_argb, + void (*ScaleARGBFilterCols)(uint8_t* dst_argb, const uint8_t* src_argb, int dst_width, int x, int dx) = (src_width >= 32768) ? ScaleARGBFilterCols64_C : ScaleARGBFilterCols_C; int64_t xlast = x + (int64_t)(dst_width - 1) * dx; @@ -340,6 +340,14 @@ static void ScaleARGBBilinearDown(int src_width, } } #endif +#if defined(HAS_INTERPOLATEROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + InterpolateRow = InterpolateRow_Any_LSX; + if (IS_ALIGNED(clip_src_width, 32)) { + InterpolateRow = InterpolateRow_LSX; + } + } +#endif #if defined(HAS_SCALEARGBFILTERCOLS_SSSE3) if (TestCpuFlag(kCpuHasSSSE3) && src_width < 32768) { ScaleARGBFilterCols = ScaleARGBFilterCols_SSSE3; @@ -360,6 +368,14 @@ static void ScaleARGBBilinearDown(int src_width, ScaleARGBFilterCols = ScaleARGBFilterCols_MSA; } } +#endif +#if defined(HAS_SCALEARGBFILTERCOLS_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + ScaleARGBFilterCols = ScaleARGBFilterCols_Any_LSX; + if (IS_ALIGNED(dst_width, 8)) { + ScaleARGBFilterCols = ScaleARGBFilterCols_LSX; + } + } #endif // TODO(fbarchard): Consider not allocating row buffer for kFilterLinear. // Allocate a row of ARGB. @@ -372,7 +388,7 @@ static void ScaleARGBBilinearDown(int src_width, } for (j = 0; j < dst_height; ++j) { int yi = y >> 16; - const uint8_t* src = src_argb + yi * src_stride; + const uint8_t* src = src_argb + yi * (intptr_t)src_stride; if (filtering == kFilterLinear) { ScaleARGBFilterCols(dst_argb, src, dst_width, x, dx); } else { @@ -405,10 +421,10 @@ static void ScaleARGBBilinearUp(int src_width, int dy, enum FilterMode filtering) { int j; - void (*InterpolateRow)(uint8_t * dst_argb, const uint8_t* src_argb, + void (*InterpolateRow)(uint8_t* dst_argb, const uint8_t* src_argb, ptrdiff_t src_stride, int dst_width, int source_y_fraction) = InterpolateRow_C; - void (*ScaleARGBFilterCols)(uint8_t * dst_argb, const uint8_t* src_argb, + void (*ScaleARGBFilterCols)(uint8_t* dst_argb, const uint8_t* src_argb, int dst_width, int x, int dx) = filtering ? ScaleARGBFilterCols_C : ScaleARGBCols_C; const int max_y = (src_height - 1) << 16; @@ -436,14 +452,6 @@ static void ScaleARGBBilinearUp(int src_width, } } #endif -#if defined(HAS_INTERPOLATEROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - InterpolateRow = InterpolateRow_Any_MMI; - if (IS_ALIGNED(dst_width, 2)) { - InterpolateRow = InterpolateRow_MMI; - } - } -#endif #if defined(HAS_INTERPOLATEROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { InterpolateRow = InterpolateRow_Any_MSA; @@ -451,6 +459,14 @@ static void ScaleARGBBilinearUp(int src_width, InterpolateRow = InterpolateRow_MSA; } } +#endif +#if defined(HAS_INTERPOLATEROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + InterpolateRow = InterpolateRow_Any_LSX; + if (IS_ALIGNED(dst_width, 8)) { + InterpolateRow = InterpolateRow_LSX; + } + } #endif if (src_width >= 32768) { ScaleARGBFilterCols = @@ -477,6 +493,14 @@ static void ScaleARGBBilinearUp(int src_width, } } #endif +#if defined(HAS_SCALEARGBFILTERCOLS_LSX) + if (filtering && TestCpuFlag(kCpuHasLSX)) { + ScaleARGBFilterCols = ScaleARGBFilterCols_Any_LSX; + if (IS_ALIGNED(dst_width, 8)) { + ScaleARGBFilterCols = ScaleARGBFilterCols_LSX; + } + } +#endif #if defined(HAS_SCALEARGBCOLS_SSE2) if (!filtering && TestCpuFlag(kCpuHasSSE2) && src_width < 32768) { ScaleARGBFilterCols = ScaleARGBCols_SSE2; @@ -490,14 +514,6 @@ static void ScaleARGBBilinearUp(int src_width, } } #endif -#if defined(HAS_SCALEARGBCOLS_MMI) - if (!filtering && TestCpuFlag(kCpuHasMMI)) { - ScaleARGBFilterCols = ScaleARGBCols_Any_MMI; - if (IS_ALIGNED(dst_width, 1)) { - ScaleARGBFilterCols = ScaleARGBCols_MMI; - } - } -#endif #if defined(HAS_SCALEARGBCOLS_MSA) if (!filtering && TestCpuFlag(kCpuHasMSA)) { ScaleARGBFilterCols = ScaleARGBCols_Any_MSA; @@ -505,6 +521,14 @@ static void ScaleARGBBilinearUp(int src_width, ScaleARGBFilterCols = ScaleARGBCols_MSA; } } +#endif +#if defined(HAS_SCALEARGBCOLS_LSX) + if (!filtering && TestCpuFlag(kCpuHasLSX)) { + ScaleARGBFilterCols = ScaleARGBCols_Any_LSX; + if (IS_ALIGNED(dst_width, 4)) { + ScaleARGBFilterCols = ScaleARGBCols_LSX; + } + } #endif if (!filtering && src_width * 2 == dst_width && x < 0x8000) { ScaleARGBFilterCols = ScaleARGBColsUp2_C; @@ -512,11 +536,6 @@ static void ScaleARGBBilinearUp(int src_width, if (TestCpuFlag(kCpuHasSSE2) && IS_ALIGNED(dst_width, 8)) { ScaleARGBFilterCols = ScaleARGBColsUp2_SSE2; } -#endif -#if defined(HAS_SCALEARGBCOLSUP2_MMI) - if (TestCpuFlag(kCpuHasMMI) && IS_ALIGNED(dst_width, 4)) { - ScaleARGBFilterCols = ScaleARGBColsUp2_MMI; - } #endif } @@ -526,14 +545,14 @@ static void ScaleARGBBilinearUp(int src_width, { int yi = y >> 16; - const uint8_t* src = src_argb + yi * src_stride; + const uint8_t* src = src_argb + yi * (intptr_t)src_stride; // Allocate 2 rows of ARGB. - const int kRowSize = (dst_width * 4 + 31) & ~31; - align_buffer_64(row, kRowSize * 2); + const int row_size = (dst_width * 4 + 31) & ~31; + align_buffer_64(row, row_size * 2); uint8_t* rowptr = row; - int rowstride = kRowSize; + int rowstride = row_size; int lasty = yi; ScaleARGBFilterCols(rowptr, src, dst_width, x, dx); @@ -541,7 +560,9 @@ static void ScaleARGBBilinearUp(int src_width, src += src_stride; } ScaleARGBFilterCols(rowptr + rowstride, src, dst_width, x, dx); - src += src_stride; + if (src_height > 2) { + src += src_stride; + } for (j = 0; j < dst_height; ++j) { yi = y >> 16; @@ -549,14 +570,16 @@ static void ScaleARGBBilinearUp(int src_width, if (y > max_y) { y = max_y; yi = y >> 16; - src = src_argb + yi * src_stride; + src = src_argb + yi * (intptr_t)src_stride; } if (yi != lasty) { ScaleARGBFilterCols(rowptr, src, dst_width, x, dx); rowptr += rowstride; rowstride = -rowstride; lasty = yi; - src += src_stride; + if ((y + 65536) < max_y) { + src += src_stride; + } } } if (filtering == kFilterLinear) { @@ -611,6 +634,15 @@ static void ScaleYUVToARGBBilinearUp(int src_width, } } #endif +#if defined(HAS_I422TOARGBROW_AVX512BW) + if (TestCpuFlag(kCpuHasAVX512BW | kCpuHasAVX512VL) == + (kCpuHasAVX512BW | kCpuHasAVX512VL)) { + I422ToARGBRow = I422ToARGBRow_Any_AVX512BW; + if (IS_ALIGNED(src_width, 32)) { + I422ToARGBRow = I422ToARGBRow_AVX512BW; + } + } +#endif #if defined(HAS_I422TOARGBROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { I422ToARGBRow = I422ToARGBRow_Any_NEON; @@ -619,14 +651,6 @@ static void ScaleYUVToARGBBilinearUp(int src_width, } } #endif -#if defined(HAS_I422TOARGBROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - I422ToARGBRow = I422ToARGBRow_Any_MMI; - if (IS_ALIGNED(src_width, 4)) { - I422ToARGBRow = I422ToARGBRow_MMI; - } - } -#endif #if defined(HAS_I422TOARGBROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { I422ToARGBRow = I422ToARGBRow_Any_MSA; @@ -635,8 +659,16 @@ static void ScaleYUVToARGBBilinearUp(int src_width, } } #endif +#if defined(HAS_I422TOARGBROW_LASX) + if (TestCpuFlag(kCpuHasLASX)) { + I422ToARGBRow = I422ToARGBRow_Any_LASX; + if (IS_ALIGNED(src_width, 32)) { + I422ToARGBRow = I422ToARGBRow_LASX; + } + } +#endif - void (*InterpolateRow)(uint8_t * dst_argb, const uint8_t* src_argb, + void (*InterpolateRow)(uint8_t* dst_argb, const uint8_t* src_argb, ptrdiff_t src_stride, int dst_width, int source_y_fraction) = InterpolateRow_C; #if defined(HAS_INTERPOLATEROW_SSSE3) @@ -671,8 +703,16 @@ static void ScaleYUVToARGBBilinearUp(int src_width, } } #endif +#if defined(HAS_INTERPOLATEROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + InterpolateRow = InterpolateRow_Any_LSX; + if (IS_ALIGNED(dst_width, 8)) { + InterpolateRow = InterpolateRow_LSX; + } + } +#endif - void (*ScaleARGBFilterCols)(uint8_t * dst_argb, const uint8_t* src_argb, + void (*ScaleARGBFilterCols)(uint8_t* dst_argb, const uint8_t* src_argb, int dst_width, int x, int dx) = filtering ? ScaleARGBFilterCols_C : ScaleARGBCols_C; if (src_width >= 32768) { @@ -700,6 +740,14 @@ static void ScaleYUVToARGBBilinearUp(int src_width, } } #endif +#if defined(HAS_SCALEARGBFILTERCOLS_LSX) + if (filtering && TestCpuFlag(kCpuHasLSX)) { + ScaleARGBFilterCols = ScaleARGBFilterCols_Any_LSX; + if (IS_ALIGNED(dst_width, 8)) { + ScaleARGBFilterCols = ScaleARGBFilterCols_LSX; + } + } +#endif #if defined(HAS_SCALEARGBCOLS_SSE2) if (!filtering && TestCpuFlag(kCpuHasSSE2) && src_width < 32768) { ScaleARGBFilterCols = ScaleARGBCols_SSE2; @@ -713,14 +761,6 @@ static void ScaleYUVToARGBBilinearUp(int src_width, } } #endif -#if defined(HAS_SCALEARGBCOLS_MMI) - if (!filtering && TestCpuFlag(kCpuHasMMI)) { - ScaleARGBFilterCols = ScaleARGBCols_Any_MMI; - if (IS_ALIGNED(dst_width, 1)) { - ScaleARGBFilterCols = ScaleARGBCols_MMI; - } - } -#endif #if defined(HAS_SCALEARGBCOLS_MSA) if (!filtering && TestCpuFlag(kCpuHasMSA)) { ScaleARGBFilterCols = ScaleARGBCols_Any_MSA; @@ -728,6 +768,14 @@ static void ScaleYUVToARGBBilinearUp(int src_width, ScaleARGBFilterCols = ScaleARGBCols_MSA; } } +#endif +#if defined(HAS_SCALEARGBCOLS_LSX) + if (!filtering && TestCpuFlag(kCpuHasLSX)) { + ScaleARGBFilterCols = ScaleARGBCols_Any_LSX; + if (IS_ALIGNED(dst_width, 4)) { + ScaleARGBFilterCols = ScaleARGBCols_LSX; + } + } #endif if (!filtering && src_width * 2 == dst_width && x < 0x8000) { ScaleARGBFilterCols = ScaleARGBColsUp2_C; @@ -735,11 +783,6 @@ static void ScaleYUVToARGBBilinearUp(int src_width, if (TestCpuFlag(kCpuHasSSE2) && IS_ALIGNED(dst_width, 8)) { ScaleARGBFilterCols = ScaleARGBColsUp2_SSE2; } -#endif -#if defined(HAS_SCALEARGBCOLSUP2_MMI) - if (TestCpuFlag(kCpuHasMMI) && IS_ALIGNED(dst_width, 4)) { - ScaleARGBFilterCols = ScaleARGBColsUp2_MMI; - } #endif } @@ -750,19 +793,19 @@ static void ScaleYUVToARGBBilinearUp(int src_width, const int kYShift = 1; // Shift Y by 1 to convert Y plane to UV coordinate. int yi = y >> 16; int uv_yi = yi >> kYShift; - const uint8_t* src_row_y = src_y + yi * src_stride_y; - const uint8_t* src_row_u = src_u + uv_yi * src_stride_u; - const uint8_t* src_row_v = src_v + uv_yi * src_stride_v; + const uint8_t* src_row_y = src_y + yi * (intptr_t)src_stride_y; + const uint8_t* src_row_u = src_u + uv_yi * (intptr_t)src_stride_u; + const uint8_t* src_row_v = src_v + uv_yi * (intptr_t)src_stride_v; // Allocate 2 rows of ARGB. - const int kRowSize = (dst_width * 4 + 31) & ~31; - align_buffer_64(row, kRowSize * 2); + const int row_size = (dst_width * 4 + 31) & ~31; + align_buffer_64(row, row_size * 2); // Allocate 1 row of ARGB for source conversion. align_buffer_64(argb_row, src_width * 4); uint8_t* rowptr = row; - int rowstride = kRowSize; + int rowstride = row_size; int lasty = yi; // TODO(fbarchard): Convert first 2 rows of YUV to ARGB. @@ -790,9 +833,9 @@ static void ScaleYUVToARGBBilinearUp(int src_width, y = max_y; yi = y >> 16; uv_yi = yi >> kYShift; - src_row_y = src_y + yi * src_stride_y; - src_row_u = src_u + uv_yi * src_stride_u; - src_row_v = src_v + uv_yi * src_stride_v; + src_row_y = src_y + yi * (intptr_t)src_stride_y; + src_row_u = src_u + uv_yi * (intptr_t)src_stride_u; + src_row_v = src_v + uv_yi * (intptr_t)src_stride_v; } if (yi != lasty) { // TODO(fbarchard): Convert the clipped region of row. @@ -840,7 +883,7 @@ static void ScaleARGBSimple(int src_width, int y, int dy) { int j; - void (*ScaleARGBCols)(uint8_t * dst_argb, const uint8_t* src_argb, + void (*ScaleARGBCols)(uint8_t* dst_argb, const uint8_t* src_argb, int dst_width, int x, int dx) = (src_width >= 32768) ? ScaleARGBCols64_C : ScaleARGBCols_C; (void)src_height; @@ -857,14 +900,6 @@ static void ScaleARGBSimple(int src_width, } } #endif -#if defined(HAS_SCALEARGBCOLS_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ScaleARGBCols = ScaleARGBCols_Any_MMI; - if (IS_ALIGNED(dst_width, 1)) { - ScaleARGBCols = ScaleARGBCols_MMI; - } - } -#endif #if defined(HAS_SCALEARGBCOLS_MSA) if (TestCpuFlag(kCpuHasMSA)) { ScaleARGBCols = ScaleARGBCols_Any_MSA; @@ -872,6 +907,14 @@ static void ScaleARGBSimple(int src_width, ScaleARGBCols = ScaleARGBCols_MSA; } } +#endif +#if defined(HAS_SCALEARGBCOLS_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + ScaleARGBCols = ScaleARGBCols_Any_LSX; + if (IS_ALIGNED(dst_width, 4)) { + ScaleARGBCols = ScaleARGBCols_LSX; + } + } #endif if (src_width * 2 == dst_width && x < 0x8000) { ScaleARGBCols = ScaleARGBColsUp2_C; @@ -879,17 +922,12 @@ static void ScaleARGBSimple(int src_width, if (TestCpuFlag(kCpuHasSSE2) && IS_ALIGNED(dst_width, 8)) { ScaleARGBCols = ScaleARGBColsUp2_SSE2; } -#endif -#if defined(HAS_SCALEARGBCOLSUP2_MMI) - if (TestCpuFlag(kCpuHasMMI) && IS_ALIGNED(dst_width, 4)) { - ScaleARGBCols = ScaleARGBColsUp2_MMI; - } #endif } for (j = 0; j < dst_height; ++j) { - ScaleARGBCols(dst_argb, src_argb + (y >> 16) * src_stride, dst_width, x, - dx); + ScaleARGBCols(dst_argb, src_argb + (y >> 16) * (intptr_t)src_stride, + dst_width, x, dx); dst_argb += dst_stride; y += dy; } @@ -924,7 +962,7 @@ static void ScaleARGB(const uint8_t* src, // Negative src_height means invert the image. if (src_height < 0) { src_height = -src_height; - src = src + (src_height - 1) * src_stride; + src = src + (src_height - 1) * (intptr_t)src_stride; src_stride = -src_stride; } ScaleSlope(src_width, src_height, dst_width, dst_height, filtering, &x, &y, @@ -939,7 +977,7 @@ static void ScaleARGB(const uint8_t* src, if (clip_y) { int64_t clipf = (int64_t)(clip_y)*dy; y += (clipf & 0xffff); - src += (clipf >> 16) * src_stride; + src += (clipf >> 16) * (intptr_t)src_stride; dst += clip_y * dst_stride; } @@ -973,8 +1011,8 @@ static void ScaleARGB(const uint8_t* src, filtering = kFilterNone; if (dx == 0x10000 && dy == 0x10000) { // Straight copy. - ARGBCopy(src + (y >> 16) * src_stride + (x >> 16) * 4, src_stride, - dst, dst_stride, clip_width, clip_height); + ARGBCopy(src + (y >> 16) * (intptr_t)src_stride + (x >> 16) * 4, + src_stride, dst, dst_stride, clip_width, clip_height); return; } } @@ -983,7 +1021,7 @@ static void ScaleARGB(const uint8_t* src, if (dx == 0x10000 && (x & 0xffff) == 0) { // Arbitrary scale vertically, but unscaled horizontally. ScalePlaneVertical(src_height, clip_width, clip_height, src_stride, - dst_stride, src, dst, x, y, dy, 4, filtering); + dst_stride, src, dst, x, y, dy, /*bpp=*/4, filtering); return; } if (filtering && dy < 65536) { diff --git a/TMessagesProj/jni/third_party/libyuv/source/scale_common.cc b/TMessagesProj/jni/third_party/libyuv/source/scale_common.cc index fd4cbd0386..f183240343 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/scale_common.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/scale_common.cc @@ -23,6 +23,25 @@ namespace libyuv { extern "C" { #endif +#ifdef __cplusplus +#define STATIC_CAST(type, expr) static_cast(expr) +#else +#define STATIC_CAST(type, expr) (type)(expr) +#endif + +// TODO(fbarchard): make clamp255 preserve negative values. +static __inline int32_t clamp255(int32_t v) { + return (-(v >= 255) | v) & 255; +} + +// Use scale to convert lsb formats to msb, depending how many bits there are: +// 32768 = 9 bits +// 16384 = 10 bits +// 4096 = 12 bits +// 256 = 16 bits +// TODO(fbarchard): change scale to bits +#define C16TO8(v, scale) clamp255(((v) * (scale)) >> 16) + static __inline int Abs(int v) { return v >= 0 ? v : -v; } @@ -62,6 +81,50 @@ void ScaleRowDown2_16_C(const uint16_t* src_ptr, } } +void ScaleRowDown2_16To8_C(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst, + int dst_width, + int scale) { + int x; + (void)src_stride; + assert(scale >= 256); + assert(scale <= 32768); + for (x = 0; x < dst_width - 1; x += 2) { + dst[0] = STATIC_CAST(uint8_t, C16TO8(src_ptr[1], scale)); + dst[1] = STATIC_CAST(uint8_t, C16TO8(src_ptr[3], scale)); + dst += 2; + src_ptr += 4; + } + if (dst_width & 1) { + dst[0] = STATIC_CAST(uint8_t, C16TO8(src_ptr[1], scale)); + } +} + +void ScaleRowDown2_16To8_Odd_C(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst, + int dst_width, + int scale) { + int x; + (void)src_stride; + assert(scale >= 256); + assert(scale <= 32768); + dst_width -= 1; + for (x = 0; x < dst_width - 1; x += 2) { + dst[0] = STATIC_CAST(uint8_t, C16TO8(src_ptr[1], scale)); + dst[1] = STATIC_CAST(uint8_t, C16TO8(src_ptr[3], scale)); + dst += 2; + src_ptr += 4; + } + if (dst_width & 1) { + dst[0] = STATIC_CAST(uint8_t, C16TO8(src_ptr[1], scale)); + dst += 1; + src_ptr += 2; + } + dst[0] = STATIC_CAST(uint8_t, C16TO8(src_ptr[0], scale)); +} + void ScaleRowDown2Linear_C(const uint8_t* src_ptr, ptrdiff_t src_stride, uint8_t* dst, @@ -98,6 +161,52 @@ void ScaleRowDown2Linear_16_C(const uint16_t* src_ptr, } } +void ScaleRowDown2Linear_16To8_C(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst, + int dst_width, + int scale) { + const uint16_t* s = src_ptr; + int x; + (void)src_stride; + assert(scale >= 256); + assert(scale <= 32768); + for (x = 0; x < dst_width - 1; x += 2) { + dst[0] = STATIC_CAST(uint8_t, C16TO8((s[0] + s[1] + 1) >> 1, scale)); + dst[1] = STATIC_CAST(uint8_t, C16TO8((s[2] + s[3] + 1) >> 1, scale)); + dst += 2; + s += 4; + } + if (dst_width & 1) { + dst[0] = STATIC_CAST(uint8_t, C16TO8((s[0] + s[1] + 1) >> 1, scale)); + } +} + +void ScaleRowDown2Linear_16To8_Odd_C(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst, + int dst_width, + int scale) { + const uint16_t* s = src_ptr; + int x; + (void)src_stride; + assert(scale >= 256); + assert(scale <= 32768); + dst_width -= 1; + for (x = 0; x < dst_width - 1; x += 2) { + dst[0] = STATIC_CAST(uint8_t, C16TO8((s[0] + s[1] + 1) >> 1, scale)); + dst[1] = STATIC_CAST(uint8_t, C16TO8((s[2] + s[3] + 1) >> 1, scale)); + dst += 2; + s += 4; + } + if (dst_width & 1) { + dst[0] = STATIC_CAST(uint8_t, C16TO8((s[0] + s[1] + 1) >> 1, scale)); + dst += 1; + s += 2; + } + dst[0] = STATIC_CAST(uint8_t, C16TO8(s[0], scale)); +} + void ScaleRowDown2Box_C(const uint8_t* src_ptr, ptrdiff_t src_stride, uint8_t* dst, @@ -160,6 +269,61 @@ void ScaleRowDown2Box_16_C(const uint16_t* src_ptr, } } +void ScaleRowDown2Box_16To8_C(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst, + int dst_width, + int scale) { + const uint16_t* s = src_ptr; + const uint16_t* t = src_ptr + src_stride; + int x; + assert(scale >= 256); + assert(scale <= 32768); + for (x = 0; x < dst_width - 1; x += 2) { + dst[0] = STATIC_CAST(uint8_t, + C16TO8((s[0] + s[1] + t[0] + t[1] + 2) >> 2, scale)); + dst[1] = STATIC_CAST(uint8_t, + C16TO8((s[2] + s[3] + t[2] + t[3] + 2) >> 2, scale)); + dst += 2; + s += 4; + t += 4; + } + if (dst_width & 1) { + dst[0] = STATIC_CAST(uint8_t, + C16TO8((s[0] + s[1] + t[0] + t[1] + 2) >> 2, scale)); + } +} + +void ScaleRowDown2Box_16To8_Odd_C(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst, + int dst_width, + int scale) { + const uint16_t* s = src_ptr; + const uint16_t* t = src_ptr + src_stride; + int x; + assert(scale >= 256); + assert(scale <= 32768); + dst_width -= 1; + for (x = 0; x < dst_width - 1; x += 2) { + dst[0] = STATIC_CAST(uint8_t, + C16TO8((s[0] + s[1] + t[0] + t[1] + 2) >> 2, scale)); + dst[1] = STATIC_CAST(uint8_t, + C16TO8((s[2] + s[3] + t[2] + t[3] + 2) >> 2, scale)); + dst += 2; + s += 4; + t += 4; + } + if (dst_width & 1) { + dst[0] = STATIC_CAST(uint8_t, + C16TO8((s[0] + s[1] + t[0] + t[1] + 2) >> 2, scale)); + dst += 1; + s += 2; + t += 2; + } + dst[0] = STATIC_CAST(uint8_t, C16TO8((s[0] + t[0] + 1) >> 1, scale)); +} + void ScaleRowDown4_C(const uint8_t* src_ptr, ptrdiff_t src_stride, uint8_t* dst, @@ -400,6 +564,95 @@ void ScaleRowDown34_1_Box_16_C(const uint16_t* src_ptr, } } +// Sample position: (O is src sample position, X is dst sample position) +// +// v dst_ptr at here v stop at here +// X O X X O X X O X X O X X O X +// ^ src_ptr at here +void ScaleRowUp2_Linear_C(const uint8_t* src_ptr, + uint8_t* dst_ptr, + int dst_width) { + int src_width = dst_width >> 1; + int x; + assert((dst_width % 2 == 0) && (dst_width >= 0)); + for (x = 0; x < src_width; ++x) { + dst_ptr[2 * x + 0] = (src_ptr[x + 0] * 3 + src_ptr[x + 1] * 1 + 2) >> 2; + dst_ptr[2 * x + 1] = (src_ptr[x + 0] * 1 + src_ptr[x + 1] * 3 + 2) >> 2; + } +} + +// Sample position: (O is src sample position, X is dst sample position) +// +// src_ptr at here +// X v X X X X X X X X X +// O O O O O +// X X X X X X X X X X +// ^ dst_ptr at here ^ stop at here +// X X X X X X X X X X +// O O O O O +// X X X X X X X X X X +void ScaleRowUp2_Bilinear_C(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width) { + const uint8_t* s = src_ptr; + const uint8_t* t = src_ptr + src_stride; + uint8_t* d = dst_ptr; + uint8_t* e = dst_ptr + dst_stride; + int src_width = dst_width >> 1; + int x; + assert((dst_width % 2 == 0) && (dst_width >= 0)); + for (x = 0; x < src_width; ++x) { + d[2 * x + 0] = + (s[x + 0] * 9 + s[x + 1] * 3 + t[x + 0] * 3 + t[x + 1] * 1 + 8) >> 4; + d[2 * x + 1] = + (s[x + 0] * 3 + s[x + 1] * 9 + t[x + 0] * 1 + t[x + 1] * 3 + 8) >> 4; + e[2 * x + 0] = + (s[x + 0] * 3 + s[x + 1] * 1 + t[x + 0] * 9 + t[x + 1] * 3 + 8) >> 4; + e[2 * x + 1] = + (s[x + 0] * 1 + s[x + 1] * 3 + t[x + 0] * 3 + t[x + 1] * 9 + 8) >> 4; + } +} + +// Only suitable for at most 14 bit range. +void ScaleRowUp2_Linear_16_C(const uint16_t* src_ptr, + uint16_t* dst_ptr, + int dst_width) { + int src_width = dst_width >> 1; + int x; + assert((dst_width % 2 == 0) && (dst_width >= 0)); + for (x = 0; x < src_width; ++x) { + dst_ptr[2 * x + 0] = (src_ptr[x + 0] * 3 + src_ptr[x + 1] * 1 + 2) >> 2; + dst_ptr[2 * x + 1] = (src_ptr[x + 0] * 1 + src_ptr[x + 1] * 3 + 2) >> 2; + } +} + +// Only suitable for at most 12bit range. +void ScaleRowUp2_Bilinear_16_C(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint16_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width) { + const uint16_t* s = src_ptr; + const uint16_t* t = src_ptr + src_stride; + uint16_t* d = dst_ptr; + uint16_t* e = dst_ptr + dst_stride; + int src_width = dst_width >> 1; + int x; + assert((dst_width % 2 == 0) && (dst_width >= 0)); + for (x = 0; x < src_width; ++x) { + d[2 * x + 0] = + (s[x + 0] * 9 + s[x + 1] * 3 + t[x + 0] * 3 + t[x + 1] * 1 + 8) >> 4; + d[2 * x + 1] = + (s[x + 0] * 3 + s[x + 1] * 9 + t[x + 0] * 1 + t[x + 1] * 3 + 8) >> 4; + e[2 * x + 0] = + (s[x + 0] * 3 + s[x + 1] * 1 + t[x + 0] * 9 + t[x + 1] * 3 + 8) >> 4; + e[2 * x + 1] = + (s[x + 0] * 1 + s[x + 1] * 3 + t[x + 0] * 3 + t[x + 1] * 9 + 8) >> 4; + } +} + // Scales a single row of pixels using point sampling. void ScaleCols_C(uint8_t* dst_ptr, const uint8_t* src_ptr, @@ -677,18 +930,18 @@ void ScaleRowDown38_3_Box_16_C(const uint16_t* src_ptr, (src_ptr[0] + src_ptr[1] + src_ptr[2] + src_ptr[stride + 0] + src_ptr[stride + 1] + src_ptr[stride + 2] + src_ptr[stride * 2 + 0] + src_ptr[stride * 2 + 1] + src_ptr[stride * 2 + 2]) * - (65536 / 9) >> + (65536u / 9u) >> 16; dst_ptr[1] = (src_ptr[3] + src_ptr[4] + src_ptr[5] + src_ptr[stride + 3] + src_ptr[stride + 4] + src_ptr[stride + 5] + src_ptr[stride * 2 + 3] + src_ptr[stride * 2 + 4] + src_ptr[stride * 2 + 5]) * - (65536 / 9) >> + (65536u / 9u) >> 16; dst_ptr[2] = (src_ptr[6] + src_ptr[7] + src_ptr[stride + 6] + src_ptr[stride + 7] + src_ptr[stride * 2 + 6] + src_ptr[stride * 2 + 7]) * - (65536 / 6) >> + (65536u / 6u) >> 16; src_ptr += 8; dst_ptr += 3; @@ -731,15 +984,15 @@ void ScaleRowDown38_2_Box_16_C(const uint16_t* src_ptr, for (i = 0; i < dst_width; i += 3) { dst_ptr[0] = (src_ptr[0] + src_ptr[1] + src_ptr[2] + src_ptr[stride + 0] + src_ptr[stride + 1] + src_ptr[stride + 2]) * - (65536 / 6) >> + (65536u / 6u) >> 16; dst_ptr[1] = (src_ptr[3] + src_ptr[4] + src_ptr[5] + src_ptr[stride + 3] + src_ptr[stride + 4] + src_ptr[stride + 5]) * - (65536 / 6) >> + (65536u / 6u) >> 16; dst_ptr[2] = (src_ptr[6] + src_ptr[7] + src_ptr[stride + 6] + src_ptr[stride + 7]) * - (65536 / 4) >> + (65536u / 4u) >> 16; src_ptr += 8; dst_ptr += 3; @@ -1111,6 +1364,122 @@ void ScaleUVRowDownEvenBox_C(const uint8_t* src_uv, } } +void ScaleUVRowUp2_Linear_C(const uint8_t* src_ptr, + uint8_t* dst_ptr, + int dst_width) { + int src_width = dst_width >> 1; + int x; + assert((dst_width % 2 == 0) && (dst_width >= 0)); + for (x = 0; x < src_width; ++x) { + dst_ptr[4 * x + 0] = + (src_ptr[2 * x + 0] * 3 + src_ptr[2 * x + 2] * 1 + 2) >> 2; + dst_ptr[4 * x + 1] = + (src_ptr[2 * x + 1] * 3 + src_ptr[2 * x + 3] * 1 + 2) >> 2; + dst_ptr[4 * x + 2] = + (src_ptr[2 * x + 0] * 1 + src_ptr[2 * x + 2] * 3 + 2) >> 2; + dst_ptr[4 * x + 3] = + (src_ptr[2 * x + 1] * 1 + src_ptr[2 * x + 3] * 3 + 2) >> 2; + } +} + +void ScaleUVRowUp2_Bilinear_C(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width) { + const uint8_t* s = src_ptr; + const uint8_t* t = src_ptr + src_stride; + uint8_t* d = dst_ptr; + uint8_t* e = dst_ptr + dst_stride; + int src_width = dst_width >> 1; + int x; + assert((dst_width % 2 == 0) && (dst_width >= 0)); + for (x = 0; x < src_width; ++x) { + d[4 * x + 0] = (s[2 * x + 0] * 9 + s[2 * x + 2] * 3 + t[2 * x + 0] * 3 + + t[2 * x + 2] * 1 + 8) >> + 4; + d[4 * x + 1] = (s[2 * x + 1] * 9 + s[2 * x + 3] * 3 + t[2 * x + 1] * 3 + + t[2 * x + 3] * 1 + 8) >> + 4; + d[4 * x + 2] = (s[2 * x + 0] * 3 + s[2 * x + 2] * 9 + t[2 * x + 0] * 1 + + t[2 * x + 2] * 3 + 8) >> + 4; + d[4 * x + 3] = (s[2 * x + 1] * 3 + s[2 * x + 3] * 9 + t[2 * x + 1] * 1 + + t[2 * x + 3] * 3 + 8) >> + 4; + e[4 * x + 0] = (s[2 * x + 0] * 3 + s[2 * x + 2] * 1 + t[2 * x + 0] * 9 + + t[2 * x + 2] * 3 + 8) >> + 4; + e[4 * x + 1] = (s[2 * x + 1] * 3 + s[2 * x + 3] * 1 + t[2 * x + 1] * 9 + + t[2 * x + 3] * 3 + 8) >> + 4; + e[4 * x + 2] = (s[2 * x + 0] * 1 + s[2 * x + 2] * 3 + t[2 * x + 0] * 3 + + t[2 * x + 2] * 9 + 8) >> + 4; + e[4 * x + 3] = (s[2 * x + 1] * 1 + s[2 * x + 3] * 3 + t[2 * x + 1] * 3 + + t[2 * x + 3] * 9 + 8) >> + 4; + } +} + +void ScaleUVRowUp2_Linear_16_C(const uint16_t* src_ptr, + uint16_t* dst_ptr, + int dst_width) { + int src_width = dst_width >> 1; + int x; + assert((dst_width % 2 == 0) && (dst_width >= 0)); + for (x = 0; x < src_width; ++x) { + dst_ptr[4 * x + 0] = + (src_ptr[2 * x + 0] * 3 + src_ptr[2 * x + 2] * 1 + 2) >> 2; + dst_ptr[4 * x + 1] = + (src_ptr[2 * x + 1] * 3 + src_ptr[2 * x + 3] * 1 + 2) >> 2; + dst_ptr[4 * x + 2] = + (src_ptr[2 * x + 0] * 1 + src_ptr[2 * x + 2] * 3 + 2) >> 2; + dst_ptr[4 * x + 3] = + (src_ptr[2 * x + 1] * 1 + src_ptr[2 * x + 3] * 3 + 2) >> 2; + } +} + +void ScaleUVRowUp2_Bilinear_16_C(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint16_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width) { + const uint16_t* s = src_ptr; + const uint16_t* t = src_ptr + src_stride; + uint16_t* d = dst_ptr; + uint16_t* e = dst_ptr + dst_stride; + int src_width = dst_width >> 1; + int x; + assert((dst_width % 2 == 0) && (dst_width >= 0)); + for (x = 0; x < src_width; ++x) { + d[4 * x + 0] = (s[2 * x + 0] * 9 + s[2 * x + 2] * 3 + t[2 * x + 0] * 3 + + t[2 * x + 2] * 1 + 8) >> + 4; + d[4 * x + 1] = (s[2 * x + 1] * 9 + s[2 * x + 3] * 3 + t[2 * x + 1] * 3 + + t[2 * x + 3] * 1 + 8) >> + 4; + d[4 * x + 2] = (s[2 * x + 0] * 3 + s[2 * x + 2] * 9 + t[2 * x + 0] * 1 + + t[2 * x + 2] * 3 + 8) >> + 4; + d[4 * x + 3] = (s[2 * x + 1] * 3 + s[2 * x + 3] * 9 + t[2 * x + 1] * 1 + + t[2 * x + 3] * 3 + 8) >> + 4; + e[4 * x + 0] = (s[2 * x + 0] * 3 + s[2 * x + 2] * 1 + t[2 * x + 0] * 9 + + t[2 * x + 2] * 3 + 8) >> + 4; + e[4 * x + 1] = (s[2 * x + 1] * 3 + s[2 * x + 3] * 1 + t[2 * x + 1] * 9 + + t[2 * x + 3] * 3 + 8) >> + 4; + e[4 * x + 2] = (s[2 * x + 0] * 1 + s[2 * x + 2] * 3 + t[2 * x + 0] * 3 + + t[2 * x + 2] * 9 + 8) >> + 4; + e[4 * x + 3] = (s[2 * x + 1] * 1 + s[2 * x + 3] * 3 + t[2 * x + 1] * 3 + + t[2 * x + 3] * 9 + 8) >> + 4; + } +} + // Scales a single row of pixels using point sampling. void ScaleUVCols_C(uint8_t* dst_uv, const uint8_t* src_uv, @@ -1260,7 +1629,7 @@ void ScalePlaneVertical(int src_height, int x, int y, int dy, - int bpp, + int bpp, // bytes per pixel. 4 for ARGB. enum FilterMode filtering) { // TODO(fbarchard): Allow higher bpp. int dst_width_bytes = dst_width * bpp; @@ -1298,14 +1667,6 @@ void ScalePlaneVertical(int src_height, } } #endif -#if defined(HAS_INTERPOLATEROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - InterpolateRow = InterpolateRow_Any_MMI; - if (IS_ALIGNED(dst_width_bytes, 8)) { - InterpolateRow = InterpolateRow_MMI; - } - } -#endif #if defined(HAS_INTERPOLATEROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { InterpolateRow = InterpolateRow_Any_MSA; @@ -1313,6 +1674,14 @@ void ScalePlaneVertical(int src_height, InterpolateRow = InterpolateRow_MSA; } } +#endif +#if defined(HAS_INTERPOLATEROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + InterpolateRow = InterpolateRow_Any_LSX; + if (IS_ALIGNED(dst_width_bytes, 32)) { + InterpolateRow = InterpolateRow_LSX; + } + } #endif for (j = 0; j < dst_height; ++j) { int yi; @@ -1328,6 +1697,7 @@ void ScalePlaneVertical(int src_height, y += dy; } } + void ScalePlaneVertical_16(int src_height, int dst_width, int dst_height, @@ -1338,7 +1708,7 @@ void ScalePlaneVertical_16(int src_height, int x, int y, int dy, - int wpp, + int wpp, /* words per pixel. normally 1 */ enum FilterMode filtering) { // TODO(fbarchard): Allow higher wpp. int dst_width_words = dst_width * wpp; @@ -1354,32 +1724,32 @@ void ScalePlaneVertical_16(int src_height, src_argb += (x >> 16) * wpp; #if defined(HAS_INTERPOLATEROW_16_SSE2) if (TestCpuFlag(kCpuHasSSE2)) { - InterpolateRow = InterpolateRow_Any_16_SSE2; - if (IS_ALIGNED(dst_width_bytes, 16)) { + InterpolateRow = InterpolateRow_16_Any_SSE2; + if (IS_ALIGNED(dst_width_words, 16)) { InterpolateRow = InterpolateRow_16_SSE2; } } #endif #if defined(HAS_INTERPOLATEROW_16_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { - InterpolateRow = InterpolateRow_Any_16_SSSE3; - if (IS_ALIGNED(dst_width_bytes, 16)) { + InterpolateRow = InterpolateRow_16_Any_SSSE3; + if (IS_ALIGNED(dst_width_words, 16)) { InterpolateRow = InterpolateRow_16_SSSE3; } } #endif #if defined(HAS_INTERPOLATEROW_16_AVX2) if (TestCpuFlag(kCpuHasAVX2)) { - InterpolateRow = InterpolateRow_Any_16_AVX2; - if (IS_ALIGNED(dst_width_bytes, 32)) { + InterpolateRow = InterpolateRow_16_Any_AVX2; + if (IS_ALIGNED(dst_width_words, 32)) { InterpolateRow = InterpolateRow_16_AVX2; } } #endif #if defined(HAS_INTERPOLATEROW_16_NEON) if (TestCpuFlag(kCpuHasNEON)) { - InterpolateRow = InterpolateRow_Any_16_NEON; - if (IS_ALIGNED(dst_width_bytes, 16)) { + InterpolateRow = InterpolateRow_16_Any_NEON; + if (IS_ALIGNED(dst_width_words, 8)) { InterpolateRow = InterpolateRow_16_NEON; } } @@ -1399,6 +1769,70 @@ void ScalePlaneVertical_16(int src_height, } } +// Use scale to convert lsb formats to msb, depending how many bits there are: +// 32768 = 9 bits +// 16384 = 10 bits +// 4096 = 12 bits +// 256 = 16 bits +// TODO(fbarchard): change scale to bits +void ScalePlaneVertical_16To8(int src_height, + int dst_width, + int dst_height, + int src_stride, + int dst_stride, + const uint16_t* src_argb, + uint8_t* dst_argb, + int x, + int y, + int dy, + int wpp, /* words per pixel. normally 1 */ + int scale, + enum FilterMode filtering) { + // TODO(fbarchard): Allow higher wpp. + int dst_width_words = dst_width * wpp; + // TODO(https://crbug.com/libyuv/931): Add NEON 32 bit and AVX2 versions. + void (*InterpolateRow_16To8)(uint8_t * dst_argb, const uint16_t* src_argb, + ptrdiff_t src_stride, int scale, int dst_width, + int source_y_fraction) = InterpolateRow_16To8_C; + const int max_y = (src_height > 1) ? ((src_height - 1) << 16) - 1 : 0; + int j; + assert(wpp >= 1 && wpp <= 2); + assert(src_height != 0); + assert(dst_width > 0); + assert(dst_height > 0); + src_argb += (x >> 16) * wpp; + +#if defined(HAS_INTERPOLATEROW_16TO8_NEON) + if (TestCpuFlag(kCpuHasNEON)) { + InterpolateRow_16To8 = InterpolateRow_16To8_Any_NEON; + if (IS_ALIGNED(dst_width, 8)) { + InterpolateRow_16To8 = InterpolateRow_16To8_NEON; + } + } +#endif +#if defined(HAS_INTERPOLATEROW_16TO8_AVX2) + if (TestCpuFlag(kCpuHasAVX2)) { + InterpolateRow_16To8 = InterpolateRow_16To8_Any_AVX2; + if (IS_ALIGNED(dst_width, 32)) { + InterpolateRow_16To8 = InterpolateRow_16To8_AVX2; + } + } +#endif + for (j = 0; j < dst_height; ++j) { + int yi; + int yf; + if (y > max_y) { + y = max_y; + } + yi = y >> 16; + yf = filtering ? ((y >> 8) & 255) : 0; + InterpolateRow_16To8(dst_argb, src_argb + yi * src_stride, src_stride, + scale, dst_width_words, yf); + dst_argb += dst_stride; + y += dy; + } +} + // Simplify the filtering based on scale factors. enum FilterMode ScaleFilterReduce(int src_width, int src_height, @@ -1412,8 +1846,8 @@ enum FilterMode ScaleFilterReduce(int src_width, src_height = -src_height; } if (filtering == kFilterBox) { - // If scaling both axis to 0.5 or larger, switch from Box to Bilinear. - if (dst_width * 2 >= src_width && dst_height * 2 >= src_height) { + // If scaling either axis to 0.5 or larger, switch from Box to Bilinear. + if (dst_width * 2 >= src_width || dst_height * 2 >= src_height) { filtering = kFilterBilinear; } } @@ -1448,7 +1882,7 @@ int FixedDiv_C(int num, int div) { return (int)(((int64_t)(num) << 16) / div); } -// Divide num by div and return as 16.16 fixed point result. +// Divide num - 1 by div - 1 and return as 16.16 fixed point result. int FixedDiv1_C(int num, int div) { return (int)((((int64_t)(num) << 16) - 0x00010001) / (div - 1)); } @@ -1491,14 +1925,14 @@ void ScaleSlope(int src_width, if (dst_width <= Abs(src_width)) { *dx = FixedDiv(Abs(src_width), dst_width); *x = CENTERSTART(*dx, -32768); // Subtract 0.5 (32768) to center filter. - } else if (dst_width > 1) { + } else if (src_width > 1 && dst_width > 1) { *dx = FixedDiv1(Abs(src_width), dst_width); *x = 0; } if (dst_height <= src_height) { *dy = FixedDiv(src_height, dst_height); *y = CENTERSTART(*dy, -32768); // Subtract 0.5 (32768) to center filter. - } else if (dst_height > 1) { + } else if (src_height > 1 && dst_height > 1) { *dy = FixedDiv1(src_height, dst_height); *y = 0; } @@ -1507,7 +1941,7 @@ void ScaleSlope(int src_width, if (dst_width <= Abs(src_width)) { *dx = FixedDiv(Abs(src_width), dst_width); *x = CENTERSTART(*dx, -32768); // Subtract 0.5 (32768) to center filter. - } else if (dst_width > 1) { + } else if (src_width > 1 && dst_width > 1) { *dx = FixedDiv1(Abs(src_width), dst_width); *x = 0; } diff --git a/TMessagesProj/jni/third_party/libyuv/source/scale_gcc.cc b/TMessagesProj/jni/third_party/libyuv/source/scale_gcc.cc index e575ee18bc..17eeffadfb 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/scale_gcc.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/scale_gcc.cc @@ -17,8 +17,7 @@ extern "C" { #endif // This module is for GCC x86 and x64. -#if !defined(LIBYUV_DISABLE_X86) && \ - (defined(__x86_64__) || (defined(__i386__) && !defined(_MSC_VER))) +#if !defined(LIBYUV_DISABLE_X86) && (defined(__x86_64__) || defined(__i386__)) // Offsets for source bytes 0 to 9 static const uvec8 kShuf0 = {0, 1, 3, 4, 5, 7, 8, 9, @@ -196,9 +195,7 @@ void ScaleRowDown2_AVX2(const uint8_t* src_ptr, uint8_t* dst_ptr, int dst_width) { (void)src_stride; - asm volatile( - - LABELALIGN + asm volatile(LABELALIGN "1: \n" "vmovdqu (%0),%%ymm0 \n" "vmovdqu 0x20(%0),%%ymm1 \n" @@ -212,11 +209,11 @@ void ScaleRowDown2_AVX2(const uint8_t* src_ptr, "sub $0x20,%2 \n" "jg 1b \n" "vzeroupper \n" - : "+r"(src_ptr), // %0 - "+r"(dst_ptr), // %1 - "+r"(dst_width) // %2 - ::"memory", - "cc", "xmm0", "xmm1"); + : "+r"(src_ptr), // %0 + "+r"(dst_ptr), // %1 + "+r"(dst_width) // %2 + ::"memory", + "cc", "xmm0", "xmm1"); } void ScaleRowDown2Linear_AVX2(const uint8_t* src_ptr, @@ -484,9 +481,7 @@ void ScaleRowDown34_SSSE3(const uint8_t* src_ptr, "m"(kShuf1), // %1 "m"(kShuf2) // %2 ); - asm volatile( - - LABELALIGN + asm volatile(LABELALIGN "1: \n" "movdqu (%0),%%xmm0 \n" "movdqu 0x10(%0),%%xmm2 \n" @@ -502,11 +497,11 @@ void ScaleRowDown34_SSSE3(const uint8_t* src_ptr, "lea 0x18(%1),%1 \n" "sub $0x18,%2 \n" "jg 1b \n" - : "+r"(src_ptr), // %0 - "+r"(dst_ptr), // %1 - "+r"(dst_width) // %2 - ::"memory", - "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5"); + : "+r"(src_ptr), // %0 + "+r"(dst_ptr), // %1 + "+r"(dst_width) // %2 + ::"memory", + "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5"); } void ScaleRowDown34_1_Box_SSSE3(const uint8_t* src_ptr, @@ -531,9 +526,7 @@ void ScaleRowDown34_1_Box_SSSE3(const uint8_t* src_ptr, "m"(kMadd11), // %1 "m"(kRound34) // %2 ); - asm volatile( - - LABELALIGN + asm volatile(LABELALIGN "1: \n" "movdqu (%0),%%xmm6 \n" "movdqu 0x00(%0,%3,1),%%xmm7 \n" @@ -566,13 +559,13 @@ void ScaleRowDown34_1_Box_SSSE3(const uint8_t* src_ptr, "lea 0x18(%1),%1 \n" "sub $0x18,%2 \n" "jg 1b \n" - : "+r"(src_ptr), // %0 - "+r"(dst_ptr), // %1 - "+r"(dst_width) // %2 - : "r"((intptr_t)(src_stride)), // %3 - "m"(kMadd21) // %4 - : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", - "xmm7"); + : "+r"(src_ptr), // %0 + "+r"(dst_ptr), // %1 + "+r"(dst_width) // %2 + : "r"((intptr_t)(src_stride)), // %3 + "m"(kMadd21) // %4 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", + "xmm6", "xmm7"); } void ScaleRowDown34_0_Box_SSSE3(const uint8_t* src_ptr, @@ -598,9 +591,7 @@ void ScaleRowDown34_0_Box_SSSE3(const uint8_t* src_ptr, "m"(kRound34) // %2 ); - asm volatile( - - LABELALIGN + asm volatile(LABELALIGN "1: \n" "movdqu (%0),%%xmm6 \n" "movdqu 0x00(%0,%3,1),%%xmm7 \n" @@ -636,13 +627,13 @@ void ScaleRowDown34_0_Box_SSSE3(const uint8_t* src_ptr, "lea 0x18(%1),%1 \n" "sub $0x18,%2 \n" "jg 1b \n" - : "+r"(src_ptr), // %0 - "+r"(dst_ptr), // %1 - "+r"(dst_width) // %2 - : "r"((intptr_t)(src_stride)), // %3 - "m"(kMadd21) // %4 - : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", - "xmm7"); + : "+r"(src_ptr), // %0 + "+r"(dst_ptr), // %1 + "+r"(dst_width) // %2 + : "r"((intptr_t)(src_stride)), // %3 + "m"(kMadd21) // %4 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", + "xmm6", "xmm7"); } void ScaleRowDown38_SSSE3(const uint8_t* src_ptr, @@ -691,9 +682,7 @@ void ScaleRowDown38_2_Box_SSSE3(const uint8_t* src_ptr, "m"(kShufAb2), // %2 "m"(kScaleAb2) // %3 ); - asm volatile( - - LABELALIGN + asm volatile(LABELALIGN "1: \n" "movdqu (%0),%%xmm0 \n" "movdqu 0x00(%0,%3,1),%%xmm1 \n" @@ -714,11 +703,12 @@ void ScaleRowDown38_2_Box_SSSE3(const uint8_t* src_ptr, "lea 0x6(%1),%1 \n" "sub $0x6,%2 \n" "jg 1b \n" - : "+r"(src_ptr), // %0 - "+r"(dst_ptr), // %1 - "+r"(dst_width) // %2 - : "r"((intptr_t)(src_stride)) // %3 - : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6"); + : "+r"(src_ptr), // %0 + "+r"(dst_ptr), // %1 + "+r"(dst_width) // %2 + : "r"((intptr_t)(src_stride)) // %3 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", + "xmm6"); } void ScaleRowDown38_3_Box_SSSE3(const uint8_t* src_ptr, @@ -735,9 +725,7 @@ void ScaleRowDown38_3_Box_SSSE3(const uint8_t* src_ptr, "m"(kShufAc3), // %1 "m"(kScaleAc33) // %2 ); - asm volatile( - - LABELALIGN + asm volatile(LABELALIGN "1: \n" "movdqu (%0),%%xmm0 \n" "movdqu 0x00(%0,%3,1),%%xmm6 \n" @@ -751,50 +739,1034 @@ void ScaleRowDown38_3_Box_SSSE3(const uint8_t* src_ptr, "paddusw %%xmm7,%%xmm1 \n" "movdqu 0x00(%0,%3,2),%%xmm6 \n" "lea 0x10(%0),%0 \n" - "movhlps %%xmm6,%%xmm7 \n" - "punpcklbw %%xmm5,%%xmm6 \n" - "punpcklbw %%xmm5,%%xmm7 \n" - "paddusw %%xmm6,%%xmm0 \n" - "paddusw %%xmm7,%%xmm1 \n" - "movdqa %%xmm0,%%xmm6 \n" - "psrldq $0x2,%%xmm0 \n" - "paddusw %%xmm0,%%xmm6 \n" - "psrldq $0x2,%%xmm0 \n" - "paddusw %%xmm0,%%xmm6 \n" - "pshufb %%xmm2,%%xmm6 \n" - "movdqa %%xmm1,%%xmm7 \n" - "psrldq $0x2,%%xmm1 \n" - "paddusw %%xmm1,%%xmm7 \n" - "psrldq $0x2,%%xmm1 \n" - "paddusw %%xmm1,%%xmm7 \n" - "pshufb %%xmm3,%%xmm7 \n" - "paddusw %%xmm7,%%xmm6 \n" - "pmulhuw %%xmm4,%%xmm6 \n" - "packuswb %%xmm6,%%xmm6 \n" - "movd %%xmm6,(%1) \n" - "psrlq $0x10,%%xmm6 \n" - "movd %%xmm6,0x2(%1) \n" - "lea 0x6(%1),%1 \n" - "sub $0x6,%2 \n" + "movhlps %%xmm6,%%xmm7 \n" + "punpcklbw %%xmm5,%%xmm6 \n" + "punpcklbw %%xmm5,%%xmm7 \n" + "paddusw %%xmm6,%%xmm0 \n" + "paddusw %%xmm7,%%xmm1 \n" + "movdqa %%xmm0,%%xmm6 \n" + "psrldq $0x2,%%xmm0 \n" + "paddusw %%xmm0,%%xmm6 \n" + "psrldq $0x2,%%xmm0 \n" + "paddusw %%xmm0,%%xmm6 \n" + "pshufb %%xmm2,%%xmm6 \n" + "movdqa %%xmm1,%%xmm7 \n" + "psrldq $0x2,%%xmm1 \n" + "paddusw %%xmm1,%%xmm7 \n" + "psrldq $0x2,%%xmm1 \n" + "paddusw %%xmm1,%%xmm7 \n" + "pshufb %%xmm3,%%xmm7 \n" + "paddusw %%xmm7,%%xmm6 \n" + "pmulhuw %%xmm4,%%xmm6 \n" + "packuswb %%xmm6,%%xmm6 \n" + "movd %%xmm6,(%1) \n" + "psrlq $0x10,%%xmm6 \n" + "movd %%xmm6,0x2(%1) \n" + "lea 0x6(%1),%1 \n" + "sub $0x6,%2 \n" + "jg 1b \n" + : "+r"(src_ptr), // %0 + "+r"(dst_ptr), // %1 + "+r"(dst_width) // %2 + : "r"((intptr_t)(src_stride)) // %3 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", + "xmm6", "xmm7"); +} + +static const uvec8 kLinearShuffleFar = {2, 3, 0, 1, 6, 7, 4, 5, + 10, 11, 8, 9, 14, 15, 12, 13}; + +static const uvec8 kLinearMadd31 = {3, 1, 1, 3, 3, 1, 1, 3, + 3, 1, 1, 3, 3, 1, 1, 3}; + +#ifdef HAS_SCALEROWUP2_LINEAR_SSE2 +void ScaleRowUp2_Linear_SSE2(const uint8_t* src_ptr, + uint8_t* dst_ptr, + int dst_width) { + asm volatile( + "pxor %%xmm0,%%xmm0 \n" // 0 + "pcmpeqw %%xmm6,%%xmm6 \n" + "psrlw $15,%%xmm6 \n" + "psllw $1,%%xmm6 \n" // all 2 + + LABELALIGN + "1: \n" + "movq (%0),%%xmm1 \n" // 01234567 + "movq 1(%0),%%xmm2 \n" // 12345678 + "movdqa %%xmm1,%%xmm3 \n" + "punpcklbw %%xmm2,%%xmm3 \n" // 0112233445566778 + "punpcklbw %%xmm1,%%xmm1 \n" // 0011223344556677 + "punpcklbw %%xmm2,%%xmm2 \n" // 1122334455667788 + "movdqa %%xmm1,%%xmm4 \n" + "punpcklbw %%xmm0,%%xmm4 \n" // 00112233 (16) + "movdqa %%xmm2,%%xmm5 \n" + "punpcklbw %%xmm0,%%xmm5 \n" // 11223344 (16) + "paddw %%xmm5,%%xmm4 \n" + "movdqa %%xmm3,%%xmm5 \n" + "paddw %%xmm6,%%xmm4 \n" + "punpcklbw %%xmm0,%%xmm5 \n" // 01122334 (16) + "paddw %%xmm5,%%xmm5 \n" + "paddw %%xmm4,%%xmm5 \n" // 3*near+far+2 (lo) + "psrlw $2,%%xmm5 \n" // 3/4*near+1/4*far (lo) + + "punpckhbw %%xmm0,%%xmm1 \n" // 44556677 (16) + "punpckhbw %%xmm0,%%xmm2 \n" // 55667788 (16) + "paddw %%xmm2,%%xmm1 \n" + "punpckhbw %%xmm0,%%xmm3 \n" // 45566778 (16) + "paddw %%xmm6,%%xmm1 \n" + "paddw %%xmm3,%%xmm3 \n" + "paddw %%xmm3,%%xmm1 \n" // 3*near+far+2 (hi) + "psrlw $2,%%xmm1 \n" // 3/4*near+1/4*far (hi) + + "packuswb %%xmm1,%%xmm5 \n" + "movdqu %%xmm5,(%1) \n" + + "lea 0x8(%0),%0 \n" + "lea 0x10(%1),%1 \n" // 8 sample to 16 sample + "sub $0x10,%2 \n" + "jg 1b \n" + : "+r"(src_ptr), // %0 + "+r"(dst_ptr), // %1 + "+r"(dst_width) // %2 + : + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6"); +} +#endif + +#ifdef HAS_SCALEROWUP2_BILINEAR_SSE2 +void ScaleRowUp2_Bilinear_SSE2(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width) { + asm volatile( + LABELALIGN + "1: \n" + "pxor %%xmm0,%%xmm0 \n" // 0 + // above line + "movq (%0),%%xmm1 \n" // 01234567 + "movq 1(%0),%%xmm2 \n" // 12345678 + "movdqa %%xmm1,%%xmm3 \n" + "punpcklbw %%xmm2,%%xmm3 \n" // 0112233445566778 + "punpcklbw %%xmm1,%%xmm1 \n" // 0011223344556677 + "punpcklbw %%xmm2,%%xmm2 \n" // 1122334455667788 + + "movdqa %%xmm1,%%xmm4 \n" + "punpcklbw %%xmm0,%%xmm4 \n" // 00112233 (16) + "movdqa %%xmm2,%%xmm5 \n" + "punpcklbw %%xmm0,%%xmm5 \n" // 11223344 (16) + "paddw %%xmm5,%%xmm4 \n" // near+far + "movdqa %%xmm3,%%xmm5 \n" + "punpcklbw %%xmm0,%%xmm5 \n" // 01122334 (16) + "paddw %%xmm5,%%xmm5 \n" // 2*near + "paddw %%xmm5,%%xmm4 \n" // 3*near+far (1, lo) + + "punpckhbw %%xmm0,%%xmm1 \n" // 44556677 (16) + "punpckhbw %%xmm0,%%xmm2 \n" // 55667788 (16) + "paddw %%xmm2,%%xmm1 \n" + "punpckhbw %%xmm0,%%xmm3 \n" // 45566778 (16) + "paddw %%xmm3,%%xmm3 \n" // 2*near + "paddw %%xmm3,%%xmm1 \n" // 3*near+far (1, hi) + + // below line + "movq (%0,%3),%%xmm6 \n" // 01234567 + "movq 1(%0,%3),%%xmm2 \n" // 12345678 + "movdqa %%xmm6,%%xmm3 \n" + "punpcklbw %%xmm2,%%xmm3 \n" // 0112233445566778 + "punpcklbw %%xmm6,%%xmm6 \n" // 0011223344556677 + "punpcklbw %%xmm2,%%xmm2 \n" // 1122334455667788 + + "movdqa %%xmm6,%%xmm5 \n" + "punpcklbw %%xmm0,%%xmm5 \n" // 00112233 (16) + "movdqa %%xmm2,%%xmm7 \n" + "punpcklbw %%xmm0,%%xmm7 \n" // 11223344 (16) + "paddw %%xmm7,%%xmm5 \n" // near+far + "movdqa %%xmm3,%%xmm7 \n" + "punpcklbw %%xmm0,%%xmm7 \n" // 01122334 (16) + "paddw %%xmm7,%%xmm7 \n" // 2*near + "paddw %%xmm7,%%xmm5 \n" // 3*near+far (2, lo) + + "punpckhbw %%xmm0,%%xmm6 \n" // 44556677 (16) + "punpckhbw %%xmm0,%%xmm2 \n" // 55667788 (16) + "paddw %%xmm6,%%xmm2 \n" // near+far + "punpckhbw %%xmm0,%%xmm3 \n" // 45566778 (16) + "paddw %%xmm3,%%xmm3 \n" // 2*near + "paddw %%xmm3,%%xmm2 \n" // 3*near+far (2, hi) + + // xmm4 xmm1 + // xmm5 xmm2 + "pcmpeqw %%xmm0,%%xmm0 \n" + "psrlw $15,%%xmm0 \n" + "psllw $3,%%xmm0 \n" // all 8 + + "movdqa %%xmm4,%%xmm3 \n" + "movdqa %%xmm5,%%xmm6 \n" + "paddw %%xmm3,%%xmm3 \n" // 6*near+2*far (1, lo) + "paddw %%xmm0,%%xmm6 \n" // 3*near+far+8 (2, lo) + "paddw %%xmm4,%%xmm3 \n" // 9*near+3*far (1, lo) + "paddw %%xmm6,%%xmm3 \n" // 9 3 3 1 + 8 (1, lo) + "psrlw $4,%%xmm3 \n" // ^ div by 16 + + "movdqa %%xmm1,%%xmm7 \n" + "movdqa %%xmm2,%%xmm6 \n" + "paddw %%xmm7,%%xmm7 \n" // 6*near+2*far (1, hi) + "paddw %%xmm0,%%xmm6 \n" // 3*near+far+8 (2, hi) + "paddw %%xmm1,%%xmm7 \n" // 9*near+3*far (1, hi) + "paddw %%xmm6,%%xmm7 \n" // 9 3 3 1 + 8 (1, hi) + "psrlw $4,%%xmm7 \n" // ^ div by 16 + + "packuswb %%xmm7,%%xmm3 \n" + "movdqu %%xmm3,(%1) \n" // save above line + + "movdqa %%xmm5,%%xmm3 \n" + "paddw %%xmm0,%%xmm4 \n" // 3*near+far+8 (1, lo) + "paddw %%xmm3,%%xmm3 \n" // 6*near+2*far (2, lo) + "paddw %%xmm3,%%xmm5 \n" // 9*near+3*far (2, lo) + "paddw %%xmm4,%%xmm5 \n" // 9 3 3 1 + 8 (lo) + "psrlw $4,%%xmm5 \n" // ^ div by 16 + + "movdqa %%xmm2,%%xmm3 \n" + "paddw %%xmm0,%%xmm1 \n" // 3*near+far+8 (1, hi) + "paddw %%xmm3,%%xmm3 \n" // 6*near+2*far (2, hi) + "paddw %%xmm3,%%xmm2 \n" // 9*near+3*far (2, hi) + "paddw %%xmm1,%%xmm2 \n" // 9 3 3 1 + 8 (hi) + "psrlw $4,%%xmm2 \n" // ^ div by 16 + + "packuswb %%xmm2,%%xmm5 \n" + "movdqu %%xmm5,(%1,%4) \n" // save below line + + "lea 0x8(%0),%0 \n" + "lea 0x10(%1),%1 \n" // 8 sample to 16 sample + "sub $0x10,%2 \n" + "jg 1b \n" + : "+r"(src_ptr), // %0 + "+r"(dst_ptr), // %1 + "+r"(dst_width) // %2 + : "r"((intptr_t)(src_stride)), // %3 + "r"((intptr_t)(dst_stride)) // %4 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", + "xmm7"); +} +#endif + +#ifdef HAS_SCALEROWUP2_LINEAR_12_SSSE3 +void ScaleRowUp2_Linear_12_SSSE3(const uint16_t* src_ptr, + uint16_t* dst_ptr, + int dst_width) { + asm volatile( + "movdqa %3,%%xmm5 \n" + "pcmpeqw %%xmm4,%%xmm4 \n" + "psrlw $15,%%xmm4 \n" + "psllw $1,%%xmm4 \n" // all 2 + + LABELALIGN + "1: \n" + "movdqu (%0),%%xmm0 \n" // 01234567 (16) + "movdqu 2(%0),%%xmm1 \n" // 12345678 (16) + + "movdqa %%xmm0,%%xmm2 \n" + "punpckhwd %%xmm1,%%xmm2 \n" // 45566778 (16) + "punpcklwd %%xmm1,%%xmm0 \n" // 01122334 (16) + + "movdqa %%xmm2,%%xmm3 \n" + "movdqa %%xmm0,%%xmm1 \n" + "pshufb %%xmm5,%%xmm3 \n" // 54657687 (far) + "pshufb %%xmm5,%%xmm1 \n" // 10213243 (far) + + "paddw %%xmm4,%%xmm1 \n" // far+2 + "paddw %%xmm4,%%xmm3 \n" // far+2 + "paddw %%xmm0,%%xmm1 \n" // near+far+2 + "paddw %%xmm2,%%xmm3 \n" // near+far+2 + "paddw %%xmm0,%%xmm0 \n" // 2*near + "paddw %%xmm2,%%xmm2 \n" // 2*near + "paddw %%xmm1,%%xmm0 \n" // 3*near+far+2 (lo) + "paddw %%xmm3,%%xmm2 \n" // 3*near+far+2 (hi) + + "psrlw $2,%%xmm0 \n" // 3/4*near+1/4*far + "psrlw $2,%%xmm2 \n" // 3/4*near+1/4*far + "movdqu %%xmm0,(%1) \n" + "movdqu %%xmm2,16(%1) \n" + + "lea 0x10(%0),%0 \n" + "lea 0x20(%1),%1 \n" // 8 sample to 16 sample + "sub $0x10,%2 \n" + "jg 1b \n" + : "+r"(src_ptr), // %0 + "+r"(dst_ptr), // %1 + "+r"(dst_width) // %2 + : "m"(kLinearShuffleFar) // %3 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5"); +} +#endif + +#ifdef HAS_SCALEROWUP2_BILINEAR_12_SSSE3 +void ScaleRowUp2_Bilinear_12_SSSE3(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint16_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width) { + asm volatile( + "pcmpeqw %%xmm7,%%xmm7 \n" + "psrlw $15,%%xmm7 \n" + "psllw $3,%%xmm7 \n" // all 8 + "movdqa %5,%%xmm6 \n" + + LABELALIGN + "1: \n" + // above line + "movdqu (%0),%%xmm0 \n" // 01234567 (16) + "movdqu 2(%0),%%xmm1 \n" // 12345678 (16) + "movdqa %%xmm0,%%xmm2 \n" + "punpckhwd %%xmm1,%%xmm2 \n" // 45566778 (16) + "punpcklwd %%xmm1,%%xmm0 \n" // 01122334 (16) + "movdqa %%xmm2,%%xmm3 \n" + "movdqa %%xmm0,%%xmm1 \n" + "pshufb %%xmm6,%%xmm3 \n" // 54657687 (far) + "pshufb %%xmm6,%%xmm1 \n" // 10213243 (far) + "paddw %%xmm0,%%xmm1 \n" // near+far + "paddw %%xmm2,%%xmm3 \n" // near+far + "paddw %%xmm0,%%xmm0 \n" // 2*near + "paddw %%xmm2,%%xmm2 \n" // 2*near + "paddw %%xmm1,%%xmm0 \n" // 3*near+far (1, lo) + "paddw %%xmm3,%%xmm2 \n" // 3*near+far (1, hi) + + // below line + "movdqu (%0,%3,2),%%xmm1 \n" // 01234567 (16) + "movdqu 2(%0,%3,2),%%xmm4 \n" // 12345678 (16) + "movdqa %%xmm1,%%xmm3 \n" + "punpckhwd %%xmm4,%%xmm3 \n" // 45566778 (16) + "punpcklwd %%xmm4,%%xmm1 \n" // 01122334 (16) + "movdqa %%xmm3,%%xmm5 \n" + "movdqa %%xmm1,%%xmm4 \n" + "pshufb %%xmm6,%%xmm5 \n" // 54657687 (far) + "pshufb %%xmm6,%%xmm4 \n" // 10213243 (far) + "paddw %%xmm1,%%xmm4 \n" // near+far + "paddw %%xmm3,%%xmm5 \n" // near+far + "paddw %%xmm1,%%xmm1 \n" // 2*near + "paddw %%xmm3,%%xmm3 \n" // 2*near + "paddw %%xmm4,%%xmm1 \n" // 3*near+far (2, lo) + "paddw %%xmm5,%%xmm3 \n" // 3*near+far (2, hi) + + // xmm0 xmm2 + // xmm1 xmm3 + + "movdqa %%xmm0,%%xmm4 \n" + "movdqa %%xmm1,%%xmm5 \n" + "paddw %%xmm4,%%xmm4 \n" // 6*near+2*far (1, lo) + "paddw %%xmm7,%%xmm5 \n" // 3*near+far+8 (2, lo) + "paddw %%xmm0,%%xmm4 \n" // 9*near+3*far (1, lo) + "paddw %%xmm5,%%xmm4 \n" // 9 3 3 1 + 8 (1, lo) + "psrlw $4,%%xmm4 \n" // ^ div by 16 + "movdqu %%xmm4,(%1) \n" + + "movdqa %%xmm2,%%xmm4 \n" + "movdqa %%xmm3,%%xmm5 \n" + "paddw %%xmm4,%%xmm4 \n" // 6*near+2*far (1, hi) + "paddw %%xmm7,%%xmm5 \n" // 3*near+far+8 (2, hi) + "paddw %%xmm2,%%xmm4 \n" // 9*near+3*far (1, hi) + "paddw %%xmm5,%%xmm4 \n" // 9 3 3 1 + 8 (1, hi) + "psrlw $4,%%xmm4 \n" // ^ div by 16 + "movdqu %%xmm4,0x10(%1) \n" + + "movdqa %%xmm1,%%xmm4 \n" + "paddw %%xmm7,%%xmm0 \n" // 3*near+far+8 (1, lo) + "paddw %%xmm4,%%xmm4 \n" // 6*near+2*far (2, lo) + "paddw %%xmm4,%%xmm1 \n" // 9*near+3*far (2, lo) + "paddw %%xmm0,%%xmm1 \n" // 9 3 3 1 + 8 (2, lo) + "psrlw $4,%%xmm1 \n" // ^ div by 16 + "movdqu %%xmm1,(%1,%4,2) \n" + + "movdqa %%xmm3,%%xmm4 \n" + "paddw %%xmm7,%%xmm2 \n" // 3*near+far+8 (1, hi) + "paddw %%xmm4,%%xmm4 \n" // 6*near+2*far (2, hi) + "paddw %%xmm4,%%xmm3 \n" // 9*near+3*far (2, hi) + "paddw %%xmm2,%%xmm3 \n" // 9 3 3 1 + 8 (2, hi) + "psrlw $4,%%xmm3 \n" // ^ div by 16 + "movdqu %%xmm3,0x10(%1,%4,2) \n" + + "lea 0x10(%0),%0 \n" + "lea 0x20(%1),%1 \n" // 8 sample to 16 sample + "sub $0x10,%2 \n" + "jg 1b \n" + : "+r"(src_ptr), // %0 + "+r"(dst_ptr), // %1 + "+r"(dst_width) // %2 + : "r"((intptr_t)(src_stride)), // %3 + "r"((intptr_t)(dst_stride)), // %4 + "m"(kLinearShuffleFar) // %5 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", + "xmm7"); +} +#endif + +#ifdef HAS_SCALEROWUP2_LINEAR_16_SSE2 +void ScaleRowUp2_Linear_16_SSE2(const uint16_t* src_ptr, + uint16_t* dst_ptr, + int dst_width) { + asm volatile( + "pxor %%xmm5,%%xmm5 \n" + "pcmpeqd %%xmm4,%%xmm4 \n" + "psrld $31,%%xmm4 \n" + "pslld $1,%%xmm4 \n" // all 2 + + LABELALIGN + "1: \n" + "movq (%0),%%xmm0 \n" // 0123 (16b) + "movq 2(%0),%%xmm1 \n" // 1234 (16b) + + "punpcklwd %%xmm5,%%xmm0 \n" // 0123 (32b) + "punpcklwd %%xmm5,%%xmm1 \n" // 1234 (32b) + + "movdqa %%xmm0,%%xmm2 \n" + "movdqa %%xmm1,%%xmm3 \n" + + "pshufd $0b10110001,%%xmm2,%%xmm2 \n" // 1032 (even, far) + "pshufd $0b10110001,%%xmm3,%%xmm3 \n" // 2143 (odd, far) + + "paddd %%xmm4,%%xmm2 \n" // far+2 (lo) + "paddd %%xmm4,%%xmm3 \n" // far+2 (hi) + "paddd %%xmm0,%%xmm2 \n" // near+far+2 (lo) + "paddd %%xmm1,%%xmm3 \n" // near+far+2 (hi) + "paddd %%xmm0,%%xmm0 \n" // 2*near (lo) + "paddd %%xmm1,%%xmm1 \n" // 2*near (hi) + "paddd %%xmm2,%%xmm0 \n" // 3*near+far+2 (lo) + "paddd %%xmm3,%%xmm1 \n" // 3*near+far+2 (hi) + + "psrld $2,%%xmm0 \n" // 3/4*near+1/4*far (lo) + "psrld $2,%%xmm1 \n" // 3/4*near+1/4*far (hi) + "packssdw %%xmm1,%%xmm0 \n" + "pshufd $0b11011000,%%xmm0,%%xmm0 \n" + "movdqu %%xmm0,(%1) \n" + + "lea 0x8(%0),%0 \n" + "lea 0x10(%1),%1 \n" // 4 pixel to 8 pixel + "sub $0x8,%2 \n" + "jg 1b \n" + : "+r"(src_ptr), // %0 + "+r"(dst_ptr), // %1 + "+r"(dst_width) // %2 + : + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5"); +} +#endif + +#ifdef HAS_SCALEROWUP2_BILINEAR_16_SSE2 +void ScaleRowUp2_Bilinear_16_SSE2(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint16_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width) { + asm volatile( + "pxor %%xmm7,%%xmm7 \n" + "pcmpeqd %%xmm6,%%xmm6 \n" + "psrld $31,%%xmm6 \n" + "pslld $3,%%xmm6 \n" // all 8 + + LABELALIGN + "1: \n" + "movq (%0),%%xmm0 \n" // 0011 (16b, 1u1v) + "movq 4(%0),%%xmm1 \n" // 1122 (16b, 1u1v) + "punpcklwd %%xmm7,%%xmm0 \n" // 0011 (near) (32b, 1u1v) + "punpcklwd %%xmm7,%%xmm1 \n" // 1122 (near) (32b, 1u1v) + "movdqa %%xmm0,%%xmm2 \n" + "movdqa %%xmm1,%%xmm3 \n" + "pshufd $0b01001110,%%xmm2,%%xmm2 \n" // 1100 (far) (1, lo) + "pshufd $0b01001110,%%xmm3,%%xmm3 \n" // 2211 (far) (1, hi) + "paddd %%xmm0,%%xmm2 \n" // near+far (1, lo) + "paddd %%xmm1,%%xmm3 \n" // near+far (1, hi) + "paddd %%xmm0,%%xmm0 \n" // 2*near (1, lo) + "paddd %%xmm1,%%xmm1 \n" // 2*near (1, hi) + "paddd %%xmm2,%%xmm0 \n" // 3*near+far (1, lo) + "paddd %%xmm3,%%xmm1 \n" // 3*near+far (1, hi) + + "movq (%0),%%xmm0 \n" // 0123 (16b) + "movq 2(%0),%%xmm1 \n" // 1234 (16b) + "punpcklwd %%xmm7,%%xmm0 \n" // 0123 (32b) + "punpcklwd %%xmm7,%%xmm1 \n" // 1234 (32b) + "movdqa %%xmm0,%%xmm2 \n" + "movdqa %%xmm1,%%xmm3 \n" + "pshufd $0b10110001,%%xmm2,%%xmm2 \n" // 1032 (even, far) + "pshufd $0b10110001,%%xmm3,%%xmm3 \n" // 2143 (odd, far) + "paddd %%xmm0,%%xmm2 \n" // near+far (lo) + "paddd %%xmm1,%%xmm3 \n" // near+far (hi) + "paddd %%xmm0,%%xmm0 \n" // 2*near (lo) + "paddd %%xmm1,%%xmm1 \n" // 2*near (hi) + "paddd %%xmm2,%%xmm0 \n" // 3*near+far (1, lo) + "paddd %%xmm3,%%xmm1 \n" // 3*near+far (1, hi) + + "movq (%0,%3,2),%%xmm2 \n" + "movq 2(%0,%3,2),%%xmm3 \n" + "punpcklwd %%xmm7,%%xmm2 \n" // 0123 (32b) + "punpcklwd %%xmm7,%%xmm3 \n" // 1234 (32b) + "movdqa %%xmm2,%%xmm4 \n" + "movdqa %%xmm3,%%xmm5 \n" + "pshufd $0b10110001,%%xmm4,%%xmm4 \n" // 1032 (even, far) + "pshufd $0b10110001,%%xmm5,%%xmm5 \n" // 2143 (odd, far) + "paddd %%xmm2,%%xmm4 \n" // near+far (lo) + "paddd %%xmm3,%%xmm5 \n" // near+far (hi) + "paddd %%xmm2,%%xmm2 \n" // 2*near (lo) + "paddd %%xmm3,%%xmm3 \n" // 2*near (hi) + "paddd %%xmm4,%%xmm2 \n" // 3*near+far (2, lo) + "paddd %%xmm5,%%xmm3 \n" // 3*near+far (2, hi) + + "movdqa %%xmm0,%%xmm4 \n" + "movdqa %%xmm2,%%xmm5 \n" + "paddd %%xmm0,%%xmm4 \n" // 6*near+2*far (1, lo) + "paddd %%xmm6,%%xmm5 \n" // 3*near+far+8 (2, lo) + "paddd %%xmm0,%%xmm4 \n" // 9*near+3*far (1, lo) + "paddd %%xmm5,%%xmm4 \n" // 9 3 3 1 + 8 (1, lo) + "psrld $4,%%xmm4 \n" // ^ div by 16 (1, lo) + + "movdqa %%xmm2,%%xmm5 \n" + "paddd %%xmm2,%%xmm5 \n" // 6*near+2*far (2, lo) + "paddd %%xmm6,%%xmm0 \n" // 3*near+far+8 (1, lo) + "paddd %%xmm2,%%xmm5 \n" // 9*near+3*far (2, lo) + "paddd %%xmm0,%%xmm5 \n" // 9 3 3 1 + 8 (2, lo) + "psrld $4,%%xmm5 \n" // ^ div by 16 (2, lo) + + "movdqa %%xmm1,%%xmm0 \n" + "movdqa %%xmm3,%%xmm2 \n" + "paddd %%xmm1,%%xmm0 \n" // 6*near+2*far (1, hi) + "paddd %%xmm6,%%xmm2 \n" // 3*near+far+8 (2, hi) + "paddd %%xmm1,%%xmm0 \n" // 9*near+3*far (1, hi) + "paddd %%xmm2,%%xmm0 \n" // 9 3 3 1 + 8 (1, hi) + "psrld $4,%%xmm0 \n" // ^ div by 16 (1, hi) + + "movdqa %%xmm3,%%xmm2 \n" + "paddd %%xmm3,%%xmm2 \n" // 6*near+2*far (2, hi) + "paddd %%xmm6,%%xmm1 \n" // 3*near+far+8 (1, hi) + "paddd %%xmm3,%%xmm2 \n" // 9*near+3*far (2, hi) + "paddd %%xmm1,%%xmm2 \n" // 9 3 3 1 + 8 (2, hi) + "psrld $4,%%xmm2 \n" // ^ div by 16 (2, hi) + + "packssdw %%xmm0,%%xmm4 \n" + "pshufd $0b11011000,%%xmm4,%%xmm4 \n" + "movdqu %%xmm4,(%1) \n" // store above + "packssdw %%xmm2,%%xmm5 \n" + "pshufd $0b11011000,%%xmm5,%%xmm5 \n" + "movdqu %%xmm5,(%1,%4,2) \n" // store below + + "lea 0x8(%0),%0 \n" + "lea 0x10(%1),%1 \n" // 4 pixel to 8 pixel + "sub $0x8,%2 \n" + "jg 1b \n" + : "+r"(src_ptr), // %0 + "+r"(dst_ptr), // %1 + "+r"(dst_width) // %2 + : "r"((intptr_t)(src_stride)), // %3 + "r"((intptr_t)(dst_stride)) // %4 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", + "xmm7"); +} +#endif + +#ifdef HAS_SCALEROWUP2_LINEAR_SSSE3 +void ScaleRowUp2_Linear_SSSE3(const uint8_t* src_ptr, + uint8_t* dst_ptr, + int dst_width) { + asm volatile( + "pcmpeqw %%xmm4,%%xmm4 \n" + "psrlw $15,%%xmm4 \n" + "psllw $1,%%xmm4 \n" // all 2 + "movdqa %3,%%xmm3 \n" + + LABELALIGN + "1: \n" + "movq (%0),%%xmm0 \n" // 01234567 + "movq 1(%0),%%xmm1 \n" // 12345678 + "punpcklwd %%xmm0,%%xmm0 \n" // 0101232345456767 + "punpcklwd %%xmm1,%%xmm1 \n" // 1212343456567878 + "movdqa %%xmm0,%%xmm2 \n" + "punpckhdq %%xmm1,%%xmm2 \n" // 4545565667677878 + "punpckldq %%xmm1,%%xmm0 \n" // 0101121223233434 + "pmaddubsw %%xmm3,%%xmm2 \n" // 3*near+far (hi) + "pmaddubsw %%xmm3,%%xmm0 \n" // 3*near+far (lo) + "paddw %%xmm4,%%xmm0 \n" // 3*near+far+2 (lo) + "paddw %%xmm4,%%xmm2 \n" // 3*near+far+2 (hi) + "psrlw $2,%%xmm0 \n" // 3/4*near+1/4*far (lo) + "psrlw $2,%%xmm2 \n" // 3/4*near+1/4*far (hi) + "packuswb %%xmm2,%%xmm0 \n" + "movdqu %%xmm0,(%1) \n" + "lea 0x8(%0),%0 \n" + "lea 0x10(%1),%1 \n" // 8 sample to 16 sample + "sub $0x10,%2 \n" + "jg 1b \n" + : "+r"(src_ptr), // %0 + "+r"(dst_ptr), // %1 + "+r"(dst_width) // %2 + : "m"(kLinearMadd31) // %3 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4"); +} +#endif + +#ifdef HAS_SCALEROWUP2_BILINEAR_SSSE3 +void ScaleRowUp2_Bilinear_SSSE3(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width) { + asm volatile( + "pcmpeqw %%xmm6,%%xmm6 \n" + "psrlw $15,%%xmm6 \n" + "psllw $3,%%xmm6 \n" // all 8 + "movdqa %5,%%xmm7 \n" + + LABELALIGN + "1: \n" + "movq (%0),%%xmm0 \n" // 01234567 + "movq 1(%0),%%xmm1 \n" // 12345678 + "punpcklwd %%xmm0,%%xmm0 \n" // 0101232345456767 + "punpcklwd %%xmm1,%%xmm1 \n" // 1212343456567878 + "movdqa %%xmm0,%%xmm2 \n" + "punpckhdq %%xmm1,%%xmm2 \n" // 4545565667677878 + "punpckldq %%xmm1,%%xmm0 \n" // 0101121223233434 + "pmaddubsw %%xmm7,%%xmm2 \n" // 3*near+far (1, hi) + "pmaddubsw %%xmm7,%%xmm0 \n" // 3*near+far (1, lo) + + "movq (%0,%3),%%xmm1 \n" + "movq 1(%0,%3),%%xmm4 \n" + "punpcklwd %%xmm1,%%xmm1 \n" + "punpcklwd %%xmm4,%%xmm4 \n" + "movdqa %%xmm1,%%xmm3 \n" + "punpckhdq %%xmm4,%%xmm3 \n" + "punpckldq %%xmm4,%%xmm1 \n" + "pmaddubsw %%xmm7,%%xmm3 \n" // 3*near+far (2, hi) + "pmaddubsw %%xmm7,%%xmm1 \n" // 3*near+far (2, lo) + + // xmm0 xmm2 + // xmm1 xmm3 + + "movdqa %%xmm0,%%xmm4 \n" + "movdqa %%xmm1,%%xmm5 \n" + "paddw %%xmm0,%%xmm4 \n" // 6*near+2*far (1, lo) + "paddw %%xmm6,%%xmm5 \n" // 3*near+far+8 (2, lo) + "paddw %%xmm0,%%xmm4 \n" // 9*near+3*far (1, lo) + "paddw %%xmm5,%%xmm4 \n" // 9 3 3 1 + 8 (1, lo) + "psrlw $4,%%xmm4 \n" // ^ div by 16 (1, lo) + + "movdqa %%xmm1,%%xmm5 \n" + "paddw %%xmm1,%%xmm5 \n" // 6*near+2*far (2, lo) + "paddw %%xmm6,%%xmm0 \n" // 3*near+far+8 (1, lo) + "paddw %%xmm1,%%xmm5 \n" // 9*near+3*far (2, lo) + "paddw %%xmm0,%%xmm5 \n" // 9 3 3 1 + 8 (2, lo) + "psrlw $4,%%xmm5 \n" // ^ div by 16 (2, lo) + + "movdqa %%xmm2,%%xmm0 \n" + "movdqa %%xmm3,%%xmm1 \n" + "paddw %%xmm2,%%xmm0 \n" // 6*near+2*far (1, hi) + "paddw %%xmm6,%%xmm1 \n" // 3*near+far+8 (2, hi) + "paddw %%xmm2,%%xmm0 \n" // 9*near+3*far (1, hi) + "paddw %%xmm1,%%xmm0 \n" // 9 3 3 1 + 8 (1, hi) + "psrlw $4,%%xmm0 \n" // ^ div by 16 (1, hi) + + "movdqa %%xmm3,%%xmm1 \n" + "paddw %%xmm3,%%xmm1 \n" // 6*near+2*far (2, hi) + "paddw %%xmm6,%%xmm2 \n" // 3*near+far+8 (1, hi) + "paddw %%xmm3,%%xmm1 \n" // 9*near+3*far (2, hi) + "paddw %%xmm2,%%xmm1 \n" // 9 3 3 1 + 8 (2, hi) + "psrlw $4,%%xmm1 \n" // ^ div by 16 (2, hi) + + "packuswb %%xmm0,%%xmm4 \n" + "movdqu %%xmm4,(%1) \n" // store above + "packuswb %%xmm1,%%xmm5 \n" + "movdqu %%xmm5,(%1,%4) \n" // store below + + "lea 0x8(%0),%0 \n" + "lea 0x10(%1),%1 \n" // 8 sample to 16 sample + "sub $0x10,%2 \n" + "jg 1b \n" + : "+r"(src_ptr), // %0 + "+r"(dst_ptr), // %1 + "+r"(dst_width) // %2 + : "r"((intptr_t)(src_stride)), // %3 + "r"((intptr_t)(dst_stride)), // %4 + "m"(kLinearMadd31) // %5 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", + "xmm7"); +} +#endif + +#ifdef HAS_SCALEROWUP2_LINEAR_AVX2 +void ScaleRowUp2_Linear_AVX2(const uint8_t* src_ptr, + uint8_t* dst_ptr, + int dst_width) { + asm volatile( + "vpcmpeqw %%ymm4,%%ymm4,%%ymm4 \n" + "vpsrlw $15,%%ymm4,%%ymm4 \n" + "vpsllw $1,%%ymm4,%%ymm4 \n" // all 2 + "vbroadcastf128 %3,%%ymm3 \n" + + LABELALIGN + "1: \n" + "vmovdqu (%0),%%xmm0 \n" // 0123456789ABCDEF + "vmovdqu 1(%0),%%xmm1 \n" // 123456789ABCDEF0 + "vpermq $0b11011000,%%ymm0,%%ymm0 \n" + "vpermq $0b11011000,%%ymm1,%%ymm1 \n" + "vpunpcklwd %%ymm0,%%ymm0,%%ymm0 \n" + "vpunpcklwd %%ymm1,%%ymm1,%%ymm1 \n" + "vpunpckhdq %%ymm1,%%ymm0,%%ymm2 \n" + "vpunpckldq %%ymm1,%%ymm0,%%ymm0 \n" + "vpmaddubsw %%ymm3,%%ymm2,%%ymm1 \n" // 3*near+far (hi) + "vpmaddubsw %%ymm3,%%ymm0,%%ymm0 \n" // 3*near+far (lo) + "vpaddw %%ymm4,%%ymm0,%%ymm0 \n" // 3*near+far+2 (lo) + "vpaddw %%ymm4,%%ymm1,%%ymm1 \n" // 3*near+far+2 (hi) + "vpsrlw $2,%%ymm0,%%ymm0 \n" // 3/4*near+1/4*far (lo) + "vpsrlw $2,%%ymm1,%%ymm1 \n" // 3/4*near+1/4*far (hi) + "vpackuswb %%ymm1,%%ymm0,%%ymm0 \n" + "vmovdqu %%ymm0,(%1) \n" + + "lea 0x10(%0),%0 \n" + "lea 0x20(%1),%1 \n" // 16 sample to 32 sample + "sub $0x20,%2 \n" + "jg 1b \n" + "vzeroupper \n" + : "+r"(src_ptr), // %0 + "+r"(dst_ptr), // %1 + "+r"(dst_width) // %2 + : "m"(kLinearMadd31) // %3 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4"); +} +#endif + +#ifdef HAS_SCALEROWUP2_BILINEAR_AVX2 +void ScaleRowUp2_Bilinear_AVX2(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width) { + asm volatile( + "vpcmpeqw %%ymm6,%%ymm6,%%ymm6 \n" + "vpsrlw $15,%%ymm6,%%ymm6 \n" + "vpsllw $3,%%ymm6,%%ymm6 \n" // all 8 + "vbroadcastf128 %5,%%ymm7 \n" + + LABELALIGN + "1: \n" + "vmovdqu (%0),%%xmm0 \n" // 0123456789ABCDEF + "vmovdqu 1(%0),%%xmm1 \n" // 123456789ABCDEF0 + "vpermq $0b11011000,%%ymm0,%%ymm0 \n" + "vpermq $0b11011000,%%ymm1,%%ymm1 \n" + "vpunpcklwd %%ymm0,%%ymm0,%%ymm0 \n" + "vpunpcklwd %%ymm1,%%ymm1,%%ymm1 \n" + "vpunpckhdq %%ymm1,%%ymm0,%%ymm2 \n" + "vpunpckldq %%ymm1,%%ymm0,%%ymm0 \n" + "vpmaddubsw %%ymm7,%%ymm2,%%ymm1 \n" // 3*near+far (1, hi) + "vpmaddubsw %%ymm7,%%ymm0,%%ymm0 \n" // 3*near+far (1, lo) + + "vmovdqu (%0,%3),%%xmm2 \n" // 0123456789ABCDEF + "vmovdqu 1(%0,%3),%%xmm3 \n" // 123456789ABCDEF0 + "vpermq $0b11011000,%%ymm2,%%ymm2 \n" + "vpermq $0b11011000,%%ymm3,%%ymm3 \n" + "vpunpcklwd %%ymm2,%%ymm2,%%ymm2 \n" + "vpunpcklwd %%ymm3,%%ymm3,%%ymm3 \n" + "vpunpckhdq %%ymm3,%%ymm2,%%ymm4 \n" + "vpunpckldq %%ymm3,%%ymm2,%%ymm2 \n" + "vpmaddubsw %%ymm7,%%ymm4,%%ymm3 \n" // 3*near+far (2, hi) + "vpmaddubsw %%ymm7,%%ymm2,%%ymm2 \n" // 3*near+far (2, lo) + + // ymm0 ymm1 + // ymm2 ymm3 + + "vpaddw %%ymm0,%%ymm0,%%ymm4 \n" // 6*near+2*far (1, lo) + "vpaddw %%ymm6,%%ymm2,%%ymm5 \n" // 3*near+far+8 (2, lo) + "vpaddw %%ymm4,%%ymm0,%%ymm4 \n" // 9*near+3*far (1, lo) + "vpaddw %%ymm4,%%ymm5,%%ymm4 \n" // 9 3 3 1 + 8 (1, lo) + "vpsrlw $4,%%ymm4,%%ymm4 \n" // ^ div by 16 (1, lo) + + "vpaddw %%ymm2,%%ymm2,%%ymm5 \n" // 6*near+2*far (2, lo) + "vpaddw %%ymm6,%%ymm0,%%ymm0 \n" // 3*near+far+8 (1, lo) + "vpaddw %%ymm5,%%ymm2,%%ymm5 \n" // 9*near+3*far (2, lo) + "vpaddw %%ymm5,%%ymm0,%%ymm5 \n" // 9 3 3 1 + 8 (2, lo) + "vpsrlw $4,%%ymm5,%%ymm5 \n" // ^ div by 16 (2, lo) + + "vpaddw %%ymm1,%%ymm1,%%ymm0 \n" // 6*near+2*far (1, hi) + "vpaddw %%ymm6,%%ymm3,%%ymm2 \n" // 3*near+far+8 (2, hi) + "vpaddw %%ymm0,%%ymm1,%%ymm0 \n" // 9*near+3*far (1, hi) + "vpaddw %%ymm0,%%ymm2,%%ymm0 \n" // 9 3 3 1 + 8 (1, hi) + "vpsrlw $4,%%ymm0,%%ymm0 \n" // ^ div by 16 (1, hi) + + "vpaddw %%ymm3,%%ymm3,%%ymm2 \n" // 6*near+2*far (2, hi) + "vpaddw %%ymm6,%%ymm1,%%ymm1 \n" // 3*near+far+8 (1, hi) + "vpaddw %%ymm2,%%ymm3,%%ymm2 \n" // 9*near+3*far (2, hi) + "vpaddw %%ymm2,%%ymm1,%%ymm2 \n" // 9 3 3 1 + 8 (2, hi) + "vpsrlw $4,%%ymm2,%%ymm2 \n" // ^ div by 16 (2, hi) + + "vpackuswb %%ymm0,%%ymm4,%%ymm4 \n" + "vmovdqu %%ymm4,(%1) \n" // store above + "vpackuswb %%ymm2,%%ymm5,%%ymm5 \n" + "vmovdqu %%ymm5,(%1,%4) \n" // store below + + "lea 0x10(%0),%0 \n" + "lea 0x20(%1),%1 \n" // 16 sample to 32 sample + "sub $0x20,%2 \n" + "jg 1b \n" + "vzeroupper \n" + : "+r"(src_ptr), // %0 + "+r"(dst_ptr), // %1 + "+r"(dst_width) // %2 + : "r"((intptr_t)(src_stride)), // %3 + "r"((intptr_t)(dst_stride)), // %4 + "m"(kLinearMadd31) // %5 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", + "xmm7"); +} +#endif + +#ifdef HAS_SCALEROWUP2_LINEAR_12_AVX2 +void ScaleRowUp2_Linear_12_AVX2(const uint16_t* src_ptr, + uint16_t* dst_ptr, + int dst_width) { + asm volatile( + "vbroadcastf128 %3,%%ymm5 \n" + "vpcmpeqw %%ymm4,%%ymm4,%%ymm4 \n" + "vpsrlw $15,%%ymm4,%%ymm4 \n" + "vpsllw $1,%%ymm4,%%ymm4 \n" // all 2 + + LABELALIGN + "1: \n" + "vmovdqu (%0),%%ymm0 \n" // 0123456789ABCDEF (16b) + "vmovdqu 2(%0),%%ymm1 \n" // 123456789ABCDEF0 (16b) + + "vpermq $0b11011000,%%ymm0,%%ymm0 \n" // 012389AB4567CDEF + "vpermq $0b11011000,%%ymm1,%%ymm1 \n" // 12349ABC5678DEF0 + + "vpunpckhwd %%ymm1,%%ymm0,%%ymm2 \n" // 899AABBCCDDEEFF0 (near) + "vpunpcklwd %%ymm1,%%ymm0,%%ymm0 \n" // 0112233445566778 (near) + "vpshufb %%ymm5,%%ymm2,%%ymm3 \n" // 98A9BACBDCEDFE0F (far) + "vpshufb %%ymm5,%%ymm0,%%ymm1 \n" // 1021324354657687 (far) + + "vpaddw %%ymm4,%%ymm1,%%ymm1 \n" // far+2 + "vpaddw %%ymm4,%%ymm3,%%ymm3 \n" // far+2 + "vpaddw %%ymm0,%%ymm1,%%ymm1 \n" // near+far+2 + "vpaddw %%ymm2,%%ymm3,%%ymm3 \n" // near+far+2 + "vpaddw %%ymm0,%%ymm0,%%ymm0 \n" // 2*near + "vpaddw %%ymm2,%%ymm2,%%ymm2 \n" // 2*near + "vpaddw %%ymm0,%%ymm1,%%ymm0 \n" // 3*near+far+2 + "vpaddw %%ymm2,%%ymm3,%%ymm2 \n" // 3*near+far+2 + + "vpsrlw $2,%%ymm0,%%ymm0 \n" // 3/4*near+1/4*far + "vpsrlw $2,%%ymm2,%%ymm2 \n" // 3/4*near+1/4*far + "vmovdqu %%ymm0,(%1) \n" + "vmovdqu %%ymm2,32(%1) \n" + + "lea 0x20(%0),%0 \n" + "lea 0x40(%1),%1 \n" // 16 sample to 32 sample + "sub $0x20,%2 \n" + "jg 1b \n" + "vzeroupper \n" + : "+r"(src_ptr), // %0 + "+r"(dst_ptr), // %1 + "+r"(dst_width) // %2 + : "m"(kLinearShuffleFar) // %3 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5"); +} +#endif + +#ifdef HAS_SCALEROWUP2_BILINEAR_12_AVX2 +void ScaleRowUp2_Bilinear_12_AVX2(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint16_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width) { + asm volatile( + "vbroadcastf128 %5,%%ymm5 \n" + "vpcmpeqw %%ymm4,%%ymm4,%%ymm4 \n" + "vpsrlw $15,%%ymm4,%%ymm4 \n" + "vpsllw $3,%%ymm4,%%ymm4 \n" // all 8 + + LABELALIGN + "1: \n" + + "vmovdqu (%0),%%xmm0 \n" // 01234567 (16b) + "vmovdqu 2(%0),%%xmm1 \n" // 12345678 (16b) + "vpermq $0b11011000,%%ymm0,%%ymm0 \n" // 0123000045670000 + "vpermq $0b11011000,%%ymm1,%%ymm1 \n" // 1234000056780000 + "vpunpcklwd %%ymm1,%%ymm0,%%ymm0 \n" // 0112233445566778 (near) + "vpshufb %%ymm5,%%ymm0,%%ymm1 \n" // 1021324354657687 (far) + "vpaddw %%ymm0,%%ymm1,%%ymm1 \n" // near+far + "vpaddw %%ymm0,%%ymm0,%%ymm0 \n" // 2*near + "vpaddw %%ymm0,%%ymm1,%%ymm2 \n" // 3*near+far (1) + + "vmovdqu (%0,%3,2),%%xmm0 \n" // 01234567 (16b) + "vmovdqu 2(%0,%3,2),%%xmm1 \n" // 12345678 (16b) + "vpermq $0b11011000,%%ymm0,%%ymm0 \n" // 0123000045670000 + "vpermq $0b11011000,%%ymm1,%%ymm1 \n" // 1234000056780000 + "vpunpcklwd %%ymm1,%%ymm0,%%ymm0 \n" // 0112233445566778 (near) + "vpshufb %%ymm5,%%ymm0,%%ymm1 \n" // 1021324354657687 (far) + "vpaddw %%ymm0,%%ymm1,%%ymm1 \n" // near+far + "vpaddw %%ymm0,%%ymm0,%%ymm0 \n" // 2*near + "vpaddw %%ymm0,%%ymm1,%%ymm3 \n" // 3*near+far (2) + + "vpaddw %%ymm2,%%ymm2,%%ymm0 \n" // 6*near+2*far (1) + "vpaddw %%ymm4,%%ymm3,%%ymm1 \n" // 3*near+far+8 (2) + "vpaddw %%ymm0,%%ymm2,%%ymm0 \n" // 9*near+3*far (1) + "vpaddw %%ymm0,%%ymm1,%%ymm0 \n" // 9 3 3 1 + 8 (1) + "vpsrlw $4,%%ymm0,%%ymm0 \n" // ^ div by 16 + "vmovdqu %%ymm0,(%1) \n" // store above + + "vpaddw %%ymm3,%%ymm3,%%ymm0 \n" // 6*near+2*far (2) + "vpaddw %%ymm4,%%ymm2,%%ymm1 \n" // 3*near+far+8 (1) + "vpaddw %%ymm0,%%ymm3,%%ymm0 \n" // 9*near+3*far (2) + "vpaddw %%ymm0,%%ymm1,%%ymm0 \n" // 9 3 3 1 + 8 (2) + "vpsrlw $4,%%ymm0,%%ymm0 \n" // ^ div by 16 + "vmovdqu %%ymm0,(%1,%4,2) \n" // store below + + "lea 0x10(%0),%0 \n" + "lea 0x20(%1),%1 \n" // 8 sample to 16 sample + "sub $0x10,%2 \n" + "jg 1b \n" + "vzeroupper \n" + : "+r"(src_ptr), // %0 + "+r"(dst_ptr), // %1 + "+r"(dst_width) // %2 + : "r"((intptr_t)(src_stride)), // %3 + "r"((intptr_t)(dst_stride)), // %4 + "m"(kLinearShuffleFar) // %5 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5"); +} +#endif + +#ifdef HAS_SCALEROWUP2_LINEAR_16_AVX2 +void ScaleRowUp2_Linear_16_AVX2(const uint16_t* src_ptr, + uint16_t* dst_ptr, + int dst_width) { + asm volatile( + "vpcmpeqd %%ymm4,%%ymm4,%%ymm4 \n" + "vpsrld $31,%%ymm4,%%ymm4 \n" + "vpslld $1,%%ymm4,%%ymm4 \n" // all 2 + + LABELALIGN + "1: \n" + "vmovdqu (%0),%%xmm0 \n" // 01234567 (16b, 1u1v) + "vmovdqu 2(%0),%%xmm1 \n" // 12345678 (16b, 1u1v) + + "vpmovzxwd %%xmm0,%%ymm0 \n" // 01234567 (32b, 1u1v) + "vpmovzxwd %%xmm1,%%ymm1 \n" // 12345678 (32b, 1u1v) + + "vpshufd $0b10110001,%%ymm0,%%ymm2 \n" // 10325476 (lo, far) + "vpshufd $0b10110001,%%ymm1,%%ymm3 \n" // 21436587 (hi, far) + + "vpaddd %%ymm4,%%ymm2,%%ymm2 \n" // far+2 (lo) + "vpaddd %%ymm4,%%ymm3,%%ymm3 \n" // far+2 (hi) + "vpaddd %%ymm0,%%ymm2,%%ymm2 \n" // near+far+2 (lo) + "vpaddd %%ymm1,%%ymm3,%%ymm3 \n" // near+far+2 (hi) + "vpaddd %%ymm0,%%ymm0,%%ymm0 \n" // 2*near (lo) + "vpaddd %%ymm1,%%ymm1,%%ymm1 \n" // 2*near (hi) + "vpaddd %%ymm0,%%ymm2,%%ymm0 \n" // 3*near+far+2 (lo) + "vpaddd %%ymm1,%%ymm3,%%ymm1 \n" // 3*near+far+2 (hi) + + "vpsrld $2,%%ymm0,%%ymm0 \n" // 3/4*near+1/4*far (lo) + "vpsrld $2,%%ymm1,%%ymm1 \n" // 3/4*near+1/4*far (hi) + "vpackusdw %%ymm1,%%ymm0,%%ymm0 \n" + "vpshufd $0b11011000,%%ymm0,%%ymm0 \n" + "vmovdqu %%ymm0,(%1) \n" + + "lea 0x10(%0),%0 \n" + "lea 0x20(%1),%1 \n" // 8 pixel to 16 pixel + "sub $0x10,%2 \n" + "jg 1b \n" + "vzeroupper \n" + : "+r"(src_ptr), // %0 + "+r"(dst_ptr), // %1 + "+r"(dst_width) // %2 + : + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4"); +} +#endif + +#ifdef HAS_SCALEROWUP2_BILINEAR_16_AVX2 +void ScaleRowUp2_Bilinear_16_AVX2(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint16_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width) { + asm volatile( + "vpcmpeqd %%ymm6,%%ymm6,%%ymm6 \n" + "vpsrld $31,%%ymm6,%%ymm6 \n" + "vpslld $3,%%ymm6,%%ymm6 \n" // all 8 + + LABELALIGN + "1: \n" + + "vmovdqu (%0),%%xmm0 \n" // 01234567 (16b, 1u1v) + "vmovdqu 2(%0),%%xmm1 \n" // 12345678 (16b, 1u1v) + "vpmovzxwd %%xmm0,%%ymm0 \n" // 01234567 (32b, 1u1v) + "vpmovzxwd %%xmm1,%%ymm1 \n" // 12345678 (32b, 1u1v) + "vpshufd $0b10110001,%%ymm0,%%ymm2 \n" // 10325476 (lo, far) + "vpshufd $0b10110001,%%ymm1,%%ymm3 \n" // 21436587 (hi, far) + "vpaddd %%ymm0,%%ymm2,%%ymm2 \n" // near+far (lo) + "vpaddd %%ymm1,%%ymm3,%%ymm3 \n" // near+far (hi) + "vpaddd %%ymm0,%%ymm0,%%ymm0 \n" // 2*near (lo) + "vpaddd %%ymm1,%%ymm1,%%ymm1 \n" // 2*near (hi) + "vpaddd %%ymm0,%%ymm2,%%ymm0 \n" // 3*near+far (1, lo) + "vpaddd %%ymm1,%%ymm3,%%ymm1 \n" // 3*near+far (1, hi) + + "vmovdqu (%0,%3,2),%%xmm2 \n" // 01234567 (16b, 1u1v) + "vmovdqu 2(%0,%3,2),%%xmm3 \n" // 12345678 (16b, 1u1v) + "vpmovzxwd %%xmm2,%%ymm2 \n" // 01234567 (32b, 1u1v) + "vpmovzxwd %%xmm3,%%ymm3 \n" // 12345678 (32b, 1u1v) + "vpshufd $0b10110001,%%ymm2,%%ymm4 \n" // 10325476 (lo, far) + "vpshufd $0b10110001,%%ymm3,%%ymm5 \n" // 21436587 (hi, far) + "vpaddd %%ymm2,%%ymm4,%%ymm4 \n" // near+far (lo) + "vpaddd %%ymm3,%%ymm5,%%ymm5 \n" // near+far (hi) + "vpaddd %%ymm2,%%ymm2,%%ymm2 \n" // 2*near (lo) + "vpaddd %%ymm3,%%ymm3,%%ymm3 \n" // 2*near (hi) + "vpaddd %%ymm2,%%ymm4,%%ymm2 \n" // 3*near+far (2, lo) + "vpaddd %%ymm3,%%ymm5,%%ymm3 \n" // 3*near+far (2, hi) + + "vpaddd %%ymm0,%%ymm0,%%ymm4 \n" // 6*near+2*far (1, lo) + "vpaddd %%ymm6,%%ymm2,%%ymm5 \n" // 3*near+far+8 (2, lo) + "vpaddd %%ymm4,%%ymm0,%%ymm4 \n" // 9*near+3*far (1, lo) + "vpaddd %%ymm4,%%ymm5,%%ymm4 \n" // 9 3 3 1 + 8 (1, lo) + "vpsrld $4,%%ymm4,%%ymm4 \n" // ^ div by 16 (1, lo) + + "vpaddd %%ymm2,%%ymm2,%%ymm5 \n" // 6*near+2*far (2, lo) + "vpaddd %%ymm6,%%ymm0,%%ymm0 \n" // 3*near+far+8 (1, lo) + "vpaddd %%ymm5,%%ymm2,%%ymm5 \n" // 9*near+3*far (2, lo) + "vpaddd %%ymm5,%%ymm0,%%ymm5 \n" // 9 3 3 1 + 8 (2, lo) + "vpsrld $4,%%ymm5,%%ymm5 \n" // ^ div by 16 (2, lo) + + "vpaddd %%ymm1,%%ymm1,%%ymm0 \n" // 6*near+2*far (1, hi) + "vpaddd %%ymm6,%%ymm3,%%ymm2 \n" // 3*near+far+8 (2, hi) + "vpaddd %%ymm0,%%ymm1,%%ymm0 \n" // 9*near+3*far (1, hi) + "vpaddd %%ymm0,%%ymm2,%%ymm0 \n" // 9 3 3 1 + 8 (1, hi) + "vpsrld $4,%%ymm0,%%ymm0 \n" // ^ div by 16 (1, hi) + + "vpaddd %%ymm3,%%ymm3,%%ymm2 \n" // 6*near+2*far (2, hi) + "vpaddd %%ymm6,%%ymm1,%%ymm1 \n" // 3*near+far+8 (1, hi) + "vpaddd %%ymm2,%%ymm3,%%ymm2 \n" // 9*near+3*far (2, hi) + "vpaddd %%ymm2,%%ymm1,%%ymm2 \n" // 9 3 3 1 + 8 (2, hi) + "vpsrld $4,%%ymm2,%%ymm2 \n" // ^ div by 16 (2, hi) + + "vpackusdw %%ymm0,%%ymm4,%%ymm4 \n" + "vpshufd $0b11011000,%%ymm4,%%ymm4 \n" + "vmovdqu %%ymm4,(%1) \n" // store above + "vpackusdw %%ymm2,%%ymm5,%%ymm5 \n" + "vpshufd $0b11011000,%%ymm5,%%ymm5 \n" + "vmovdqu %%ymm5,(%1,%4,2) \n" // store below + + "lea 0x10(%0),%0 \n" + "lea 0x20(%1),%1 \n" // 8 pixel to 16 pixel + "sub $0x10,%2 \n" "jg 1b \n" - : "+r"(src_ptr), // %0 - "+r"(dst_ptr), // %1 - "+r"(dst_width) // %2 - : "r"((intptr_t)(src_stride)) // %3 - : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", - "xmm7"); + "vzeroupper \n" + : "+r"(src_ptr), // %0 + "+r"(dst_ptr), // %1 + "+r"(dst_width) // %2 + : "r"((intptr_t)(src_stride)), // %3 + "r"((intptr_t)(dst_stride)) // %4 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6"); } +#endif // Reads 16xN bytes and produces 16 shorts at a time. void ScaleAddRow_SSE2(const uint8_t* src_ptr, uint16_t* dst_ptr, int src_width) { - asm volatile( - - "pxor %%xmm5,%%xmm5 \n" + asm volatile("pxor %%xmm5,%%xmm5 \n" - // 16 pixel loop. - LABELALIGN + // 16 pixel loop. + LABELALIGN "1: \n" "movdqu (%0),%%xmm3 \n" "lea 0x10(%0),%0 \n" // src_ptr += 16 @@ -810,11 +1782,11 @@ void ScaleAddRow_SSE2(const uint8_t* src_ptr, "lea 0x20(%1),%1 \n" "sub $0x10,%2 \n" "jg 1b \n" - : "+r"(src_ptr), // %0 - "+r"(dst_ptr), // %1 - "+r"(src_width) // %2 - : - : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm5"); + : "+r"(src_ptr), // %0 + "+r"(dst_ptr), // %1 + "+r"(src_width) // %2 + : + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm5"); } #ifdef HAS_SCALEADDROW_AVX2 @@ -822,11 +1794,9 @@ void ScaleAddRow_SSE2(const uint8_t* src_ptr, void ScaleAddRow_AVX2(const uint8_t* src_ptr, uint16_t* dst_ptr, int src_width) { - asm volatile( + asm volatile("vpxor %%ymm5,%%ymm5,%%ymm5 \n" - "vpxor %%ymm5,%%ymm5,%%ymm5 \n" - - LABELALIGN + LABELALIGN "1: \n" "vmovdqu (%0),%%ymm3 \n" "lea 0x20(%0),%0 \n" // src_ptr += 32 @@ -841,11 +1811,11 @@ void ScaleAddRow_AVX2(const uint8_t* src_ptr, "sub $0x20,%2 \n" "jg 1b \n" "vzeroupper \n" - : "+r"(src_ptr), // %0 - "+r"(dst_ptr), // %1 - "+r"(src_width) // %2 - : - : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm5"); + : "+r"(src_ptr), // %0 + "+r"(dst_ptr), // %1 + "+r"(src_width) // %2 + : + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm5"); } #endif // HAS_SCALEADDROW_AVX2 @@ -962,9 +1932,7 @@ void ScaleColsUp2_SSE2(uint8_t* dst_ptr, int dx) { (void)x; (void)dx; - asm volatile( - - LABELALIGN + asm volatile(LABELALIGN "1: \n" "movdqu (%1),%%xmm0 \n" "lea 0x10(%1),%1 \n" @@ -977,11 +1945,11 @@ void ScaleColsUp2_SSE2(uint8_t* dst_ptr, "sub $0x20,%2 \n" "jg 1b \n" - : "+r"(dst_ptr), // %0 - "+r"(src_ptr), // %1 - "+r"(dst_width) // %2 - ::"memory", - "cc", "xmm0", "xmm1"); + : "+r"(dst_ptr), // %0 + "+r"(src_ptr), // %1 + "+r"(dst_width) // %2 + ::"memory", + "cc", "xmm0", "xmm1"); } void ScaleARGBRowDown2_SSE2(const uint8_t* src_argb, @@ -989,9 +1957,7 @@ void ScaleARGBRowDown2_SSE2(const uint8_t* src_argb, uint8_t* dst_argb, int dst_width) { (void)src_stride; - asm volatile( - - LABELALIGN + asm volatile(LABELALIGN "1: \n" "movdqu (%0),%%xmm0 \n" "movdqu 0x10(%0),%%xmm1 \n" @@ -1001,11 +1967,11 @@ void ScaleARGBRowDown2_SSE2(const uint8_t* src_argb, "lea 0x10(%1),%1 \n" "sub $0x4,%2 \n" "jg 1b \n" - : "+r"(src_argb), // %0 - "+r"(dst_argb), // %1 - "+r"(dst_width) // %2 - ::"memory", - "cc", "xmm0", "xmm1"); + : "+r"(src_argb), // %0 + "+r"(dst_argb), // %1 + "+r"(dst_width) // %2 + ::"memory", + "cc", "xmm0", "xmm1"); } void ScaleARGBRowDown2Linear_SSE2(const uint8_t* src_argb, @@ -1013,9 +1979,7 @@ void ScaleARGBRowDown2Linear_SSE2(const uint8_t* src_argb, uint8_t* dst_argb, int dst_width) { (void)src_stride; - asm volatile( - - LABELALIGN + asm volatile(LABELALIGN "1: \n" "movdqu (%0),%%xmm0 \n" "movdqu 0x10(%0),%%xmm1 \n" @@ -1028,20 +1992,18 @@ void ScaleARGBRowDown2Linear_SSE2(const uint8_t* src_argb, "lea 0x10(%1),%1 \n" "sub $0x4,%2 \n" "jg 1b \n" - : "+r"(src_argb), // %0 - "+r"(dst_argb), // %1 - "+r"(dst_width) // %2 - ::"memory", - "cc", "xmm0", "xmm1"); + : "+r"(src_argb), // %0 + "+r"(dst_argb), // %1 + "+r"(dst_width) // %2 + ::"memory", + "cc", "xmm0", "xmm1"); } void ScaleARGBRowDown2Box_SSE2(const uint8_t* src_argb, ptrdiff_t src_stride, uint8_t* dst_argb, int dst_width) { - asm volatile( - - LABELALIGN + asm volatile(LABELALIGN "1: \n" "movdqu (%0),%%xmm0 \n" "movdqu 0x10(%0),%%xmm1 \n" @@ -1058,11 +2020,11 @@ void ScaleARGBRowDown2Box_SSE2(const uint8_t* src_argb, "lea 0x10(%1),%1 \n" "sub $0x4,%2 \n" "jg 1b \n" - : "+r"(src_argb), // %0 - "+r"(dst_argb), // %1 - "+r"(dst_width) // %2 - : "r"((intptr_t)(src_stride)) // %3 - : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3"); + : "+r"(src_argb), // %0 + "+r"(dst_argb), // %1 + "+r"(dst_width) // %2 + : "r"((intptr_t)(src_stride)) // %3 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3"); } // Reads 4 pixels at a time. @@ -1226,9 +2188,7 @@ void ScaleARGBColsUp2_SSE2(uint8_t* dst_argb, int dx) { (void)x; (void)dx; - asm volatile( - - LABELALIGN + asm volatile(LABELALIGN "1: \n" "movdqu (%1),%%xmm0 \n" "lea 0x10(%1),%1 \n" @@ -1241,11 +2201,11 @@ void ScaleARGBColsUp2_SSE2(uint8_t* dst_argb, "sub $0x8,%2 \n" "jg 1b \n" - : "+r"(dst_argb), // %0 - "+r"(src_argb), // %1 - "+r"(dst_width) // %2 - ::"memory", - "cc", "xmm0", "xmm1"); + : "+r"(dst_argb), // %0 + "+r"(src_argb), // %1 + "+r"(dst_width) // %2 + ::"memory", + "cc", "xmm0", "xmm1"); } // Shuffle table for arranging 2 pixels into pairs for pmaddubsw @@ -1367,13 +2327,18 @@ int FixedDiv1_X86(int num, int div) { return num; } -#ifdef HAS_SCALEUVROWDOWN2BOX_SSSE3 +#if defined(HAS_SCALEUVROWDOWN2BOX_SSSE3) || \ + defined(HAS_SCALEUVROWDOWN2BOX_AVX2) + // Shuffle table for splitting UV into upper and lower part of register. static const uvec8 kShuffleSplitUV = {0u, 2u, 4u, 6u, 8u, 10u, 12u, 14u, 1u, 3u, 5u, 7u, 9u, 11u, 13u, 15u}; static const uvec8 kShuffleMergeUV = {0u, 8u, 2u, 10u, 4u, 12u, 6u, 14u, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80}; +#endif + +#ifdef HAS_SCALEUVROWDOWN2BOX_SSSE3 void ScaleUVRowDown2Box_SSSE3(const uint8_t* src_ptr, ptrdiff_t src_stride, @@ -1456,6 +2421,530 @@ void ScaleUVRowDown2Box_AVX2(const uint8_t* src_ptr, } #endif // HAS_SCALEUVROWDOWN2BOX_AVX2 +static const uvec8 kUVLinearMadd31 = {3, 1, 3, 1, 1, 3, 1, 3, + 3, 1, 3, 1, 1, 3, 1, 3}; + +#ifdef HAS_SCALEUVROWUP2_LINEAR_SSSE3 +void ScaleUVRowUp2_Linear_SSSE3(const uint8_t* src_ptr, + uint8_t* dst_ptr, + int dst_width) { + asm volatile( + "pcmpeqw %%xmm4,%%xmm4 \n" + "psrlw $15,%%xmm4 \n" + "psllw $1,%%xmm4 \n" // all 2 + "movdqa %3,%%xmm3 \n" + + LABELALIGN + "1: \n" + "movq (%0),%%xmm0 \n" // 00112233 (1u1v) + "movq 2(%0),%%xmm1 \n" // 11223344 (1u1v) + "punpcklbw %%xmm1,%%xmm0 \n" // 0101121223233434 (2u2v) + "movdqa %%xmm0,%%xmm2 \n" + "punpckhdq %%xmm0,%%xmm2 \n" // 2323232334343434 (2u2v) + "punpckldq %%xmm0,%%xmm0 \n" // 0101010112121212 (2u2v) + "pmaddubsw %%xmm3,%%xmm2 \n" // 3*near+far (1u1v16, hi) + "pmaddubsw %%xmm3,%%xmm0 \n" // 3*near+far (1u1v16, lo) + "paddw %%xmm4,%%xmm0 \n" // 3*near+far+2 (lo) + "paddw %%xmm4,%%xmm2 \n" // 3*near+far+2 (hi) + "psrlw $2,%%xmm0 \n" // 3/4*near+1/4*far (lo) + "psrlw $2,%%xmm2 \n" // 3/4*near+1/4*far (hi) + "packuswb %%xmm2,%%xmm0 \n" + "movdqu %%xmm0,(%1) \n" + + "lea 0x8(%0),%0 \n" + "lea 0x10(%1),%1 \n" // 4 uv to 8 uv + "sub $0x8,%2 \n" + "jg 1b \n" + : "+r"(src_ptr), // %0 + "+r"(dst_ptr), // %1 + "+r"(dst_width) // %2 + : "m"(kUVLinearMadd31) // %3 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6"); +} +#endif + +#ifdef HAS_SCALEUVROWUP2_BILINEAR_SSSE3 +void ScaleUVRowUp2_Bilinear_SSSE3(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width) { + asm volatile( + "pcmpeqw %%xmm6,%%xmm6 \n" + "psrlw $15,%%xmm6 \n" + "psllw $3,%%xmm6 \n" // all 8 + "movdqa %5,%%xmm7 \n" + + LABELALIGN + "1: \n" + "movq (%0),%%xmm0 \n" // 00112233 (1u1v) + "movq 2(%0),%%xmm1 \n" // 11223344 (1u1v) + "punpcklbw %%xmm1,%%xmm0 \n" // 0101121223233434 (2u2v) + "movdqa %%xmm0,%%xmm2 \n" + "punpckhdq %%xmm0,%%xmm2 \n" // 2323232334343434 (2u2v) + "punpckldq %%xmm0,%%xmm0 \n" // 0101010112121212 (2u2v) + "pmaddubsw %%xmm7,%%xmm2 \n" // 3*near+far (1u1v16, hi) + "pmaddubsw %%xmm7,%%xmm0 \n" // 3*near+far (1u1v16, lo) + + "movq (%0,%3),%%xmm1 \n" + "movq 2(%0,%3),%%xmm4 \n" + "punpcklbw %%xmm4,%%xmm1 \n" + "movdqa %%xmm1,%%xmm3 \n" + "punpckhdq %%xmm1,%%xmm3 \n" + "punpckldq %%xmm1,%%xmm1 \n" + "pmaddubsw %%xmm7,%%xmm3 \n" // 3*near+far (2, hi) + "pmaddubsw %%xmm7,%%xmm1 \n" // 3*near+far (2, lo) + + // xmm0 xmm2 + // xmm1 xmm3 + + "movdqa %%xmm0,%%xmm4 \n" + "movdqa %%xmm1,%%xmm5 \n" + "paddw %%xmm0,%%xmm4 \n" // 6*near+2*far (1, lo) + "paddw %%xmm6,%%xmm5 \n" // 3*near+far+8 (2, lo) + "paddw %%xmm0,%%xmm4 \n" // 9*near+3*far (1, lo) + "paddw %%xmm5,%%xmm4 \n" // 9 3 3 1 + 8 (1, lo) + "psrlw $4,%%xmm4 \n" // ^ div by 16 (1, lo) + + "movdqa %%xmm1,%%xmm5 \n" + "paddw %%xmm1,%%xmm5 \n" // 6*near+2*far (2, lo) + "paddw %%xmm6,%%xmm0 \n" // 3*near+far+8 (1, lo) + "paddw %%xmm1,%%xmm5 \n" // 9*near+3*far (2, lo) + "paddw %%xmm0,%%xmm5 \n" // 9 3 3 1 + 8 (2, lo) + "psrlw $4,%%xmm5 \n" // ^ div by 16 (2, lo) + + "movdqa %%xmm2,%%xmm0 \n" + "movdqa %%xmm3,%%xmm1 \n" + "paddw %%xmm2,%%xmm0 \n" // 6*near+2*far (1, hi) + "paddw %%xmm6,%%xmm1 \n" // 3*near+far+8 (2, hi) + "paddw %%xmm2,%%xmm0 \n" // 9*near+3*far (1, hi) + "paddw %%xmm1,%%xmm0 \n" // 9 3 3 1 + 8 (1, hi) + "psrlw $4,%%xmm0 \n" // ^ div by 16 (1, hi) + + "movdqa %%xmm3,%%xmm1 \n" + "paddw %%xmm3,%%xmm1 \n" // 6*near+2*far (2, hi) + "paddw %%xmm6,%%xmm2 \n" // 3*near+far+8 (1, hi) + "paddw %%xmm3,%%xmm1 \n" // 9*near+3*far (2, hi) + "paddw %%xmm2,%%xmm1 \n" // 9 3 3 1 + 8 (2, hi) + "psrlw $4,%%xmm1 \n" // ^ div by 16 (2, hi) + + "packuswb %%xmm0,%%xmm4 \n" + "movdqu %%xmm4,(%1) \n" // store above + "packuswb %%xmm1,%%xmm5 \n" + "movdqu %%xmm5,(%1,%4) \n" // store below + + "lea 0x8(%0),%0 \n" + "lea 0x10(%1),%1 \n" // 4 uv to 8 uv + "sub $0x8,%2 \n" + "jg 1b \n" + : "+r"(src_ptr), // %0 + "+r"(dst_ptr), // %1 + "+r"(dst_width) // %2 + : "r"((intptr_t)(src_stride)), // %3 + "r"((intptr_t)(dst_stride)), // %4 + "m"(kUVLinearMadd31) // %5 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", + "xmm7"); +} +#endif + +#ifdef HAS_SCALEUVROWUP2_LINEAR_AVX2 + +void ScaleUVRowUp2_Linear_AVX2(const uint8_t* src_ptr, + uint8_t* dst_ptr, + int dst_width) { + asm volatile( + "vpcmpeqw %%ymm4,%%ymm4,%%ymm4 \n" + "vpsrlw $15,%%ymm4,%%ymm4 \n" + "vpsllw $1,%%ymm4,%%ymm4 \n" // all 2 + "vbroadcastf128 %3,%%ymm3 \n" + + LABELALIGN + "1: \n" + "vmovdqu (%0),%%xmm0 \n" + "vmovdqu 2(%0),%%xmm1 \n" + "vpermq $0b11011000,%%ymm0,%%ymm0 \n" + "vpermq $0b11011000,%%ymm1,%%ymm1 \n" + "vpunpcklbw %%ymm1,%%ymm0,%%ymm0 \n" + "vpunpckhdq %%ymm0,%%ymm0,%%ymm2 \n" + "vpunpckldq %%ymm0,%%ymm0,%%ymm0 \n" + "vpmaddubsw %%ymm3,%%ymm2,%%ymm1 \n" // 3*near+far (hi) + "vpmaddubsw %%ymm3,%%ymm0,%%ymm0 \n" // 3*near+far (lo) + "vpaddw %%ymm4,%%ymm0,%%ymm0 \n" // 3*near+far+2 (lo) + "vpaddw %%ymm4,%%ymm1,%%ymm1 \n" // 3*near+far+2 (hi) + "vpsrlw $2,%%ymm0,%%ymm0 \n" // 3/4*near+1/4*far (lo) + "vpsrlw $2,%%ymm1,%%ymm1 \n" // 3/4*near+1/4*far (hi) + "vpackuswb %%ymm1,%%ymm0,%%ymm0 \n" + "vmovdqu %%ymm0,(%1) \n" + + "lea 0x10(%0),%0 \n" + "lea 0x20(%1),%1 \n" // 8 uv to 16 uv + "sub $0x10,%2 \n" + "jg 1b \n" + "vzeroupper \n" + : "+r"(src_ptr), // %0 + "+r"(dst_ptr), // %1 + "+r"(dst_width) // %2 + : "m"(kUVLinearMadd31) // %3 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4"); +} +#endif + +#ifdef HAS_SCALEUVROWUP2_BILINEAR_AVX2 +void ScaleUVRowUp2_Bilinear_AVX2(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width) { + asm volatile( + "vpcmpeqw %%ymm6,%%ymm6,%%ymm6 \n" + "vpsrlw $15,%%ymm6,%%ymm6 \n" + "vpsllw $3,%%ymm6,%%ymm6 \n" // all 8 + "vbroadcastf128 %5,%%ymm7 \n" + + LABELALIGN + "1: \n" + "vmovdqu (%0),%%xmm0 \n" + "vmovdqu 2(%0),%%xmm1 \n" + "vpermq $0b11011000,%%ymm0,%%ymm0 \n" + "vpermq $0b11011000,%%ymm1,%%ymm1 \n" + "vpunpcklbw %%ymm1,%%ymm0,%%ymm0 \n" + "vpunpckhdq %%ymm0,%%ymm0,%%ymm2 \n" + "vpunpckldq %%ymm0,%%ymm0,%%ymm0 \n" + "vpmaddubsw %%ymm7,%%ymm2,%%ymm1 \n" // 3*near+far (1, hi) + "vpmaddubsw %%ymm7,%%ymm0,%%ymm0 \n" // 3*near+far (1, lo) + + "vmovdqu (%0,%3),%%xmm2 \n" // 0123456789ABCDEF + "vmovdqu 2(%0,%3),%%xmm3 \n" // 123456789ABCDEF0 + "vpermq $0b11011000,%%ymm2,%%ymm2 \n" + "vpermq $0b11011000,%%ymm3,%%ymm3 \n" + "vpunpcklbw %%ymm3,%%ymm2,%%ymm2 \n" + "vpunpckhdq %%ymm2,%%ymm2,%%ymm4 \n" + "vpunpckldq %%ymm2,%%ymm2,%%ymm2 \n" + "vpmaddubsw %%ymm7,%%ymm4,%%ymm3 \n" // 3*near+far (2, hi) + "vpmaddubsw %%ymm7,%%ymm2,%%ymm2 \n" // 3*near+far (2, lo) + + // ymm0 ymm1 + // ymm2 ymm3 + + "vpaddw %%ymm0,%%ymm0,%%ymm4 \n" // 6*near+2*far (1, lo) + "vpaddw %%ymm6,%%ymm2,%%ymm5 \n" // 3*near+far+8 (2, lo) + "vpaddw %%ymm4,%%ymm0,%%ymm4 \n" // 9*near+3*far (1, lo) + "vpaddw %%ymm4,%%ymm5,%%ymm4 \n" // 9 3 3 1 + 8 (1, lo) + "vpsrlw $4,%%ymm4,%%ymm4 \n" // ^ div by 16 (1, lo) + + "vpaddw %%ymm2,%%ymm2,%%ymm5 \n" // 6*near+2*far (2, lo) + "vpaddw %%ymm6,%%ymm0,%%ymm0 \n" // 3*near+far+8 (1, lo) + "vpaddw %%ymm5,%%ymm2,%%ymm5 \n" // 9*near+3*far (2, lo) + "vpaddw %%ymm5,%%ymm0,%%ymm5 \n" // 9 3 3 1 + 8 (2, lo) + "vpsrlw $4,%%ymm5,%%ymm5 \n" // ^ div by 16 (2, lo) + + "vpaddw %%ymm1,%%ymm1,%%ymm0 \n" // 6*near+2*far (1, hi) + "vpaddw %%ymm6,%%ymm3,%%ymm2 \n" // 3*near+far+8 (2, hi) + "vpaddw %%ymm0,%%ymm1,%%ymm0 \n" // 9*near+3*far (1, hi) + "vpaddw %%ymm0,%%ymm2,%%ymm0 \n" // 9 3 3 1 + 8 (1, hi) + "vpsrlw $4,%%ymm0,%%ymm0 \n" // ^ div by 16 (1, hi) + + "vpaddw %%ymm3,%%ymm3,%%ymm2 \n" // 6*near+2*far (2, hi) + "vpaddw %%ymm6,%%ymm1,%%ymm1 \n" // 3*near+far+8 (1, hi) + "vpaddw %%ymm2,%%ymm3,%%ymm2 \n" // 9*near+3*far (2, hi) + "vpaddw %%ymm2,%%ymm1,%%ymm2 \n" // 9 3 3 1 + 8 (2, hi) + "vpsrlw $4,%%ymm2,%%ymm2 \n" // ^ div by 16 (2, hi) + + "vpackuswb %%ymm0,%%ymm4,%%ymm4 \n" + "vmovdqu %%ymm4,(%1) \n" // store above + "vpackuswb %%ymm2,%%ymm5,%%ymm5 \n" + "vmovdqu %%ymm5,(%1,%4) \n" // store below + + "lea 0x10(%0),%0 \n" + "lea 0x20(%1),%1 \n" // 8 uv to 16 uv + "sub $0x10,%2 \n" + "jg 1b \n" + "vzeroupper \n" + : "+r"(src_ptr), // %0 + "+r"(dst_ptr), // %1 + "+r"(dst_width) // %2 + : "r"((intptr_t)(src_stride)), // %3 + "r"((intptr_t)(dst_stride)), // %4 + "m"(kUVLinearMadd31) // %5 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", + "xmm7"); +} +#endif + +#ifdef HAS_SCALEUVROWUP2_LINEAR_16_SSE41 +void ScaleUVRowUp2_Linear_16_SSE41(const uint16_t* src_ptr, + uint16_t* dst_ptr, + int dst_width) { + asm volatile( + "pxor %%xmm5,%%xmm5 \n" + "pcmpeqd %%xmm4,%%xmm4 \n" + "psrld $31,%%xmm4 \n" + "pslld $1,%%xmm4 \n" // all 2 + + LABELALIGN + "1: \n" + "movq (%0),%%xmm0 \n" // 0011 (16b, 1u1v) + "movq 4(%0),%%xmm1 \n" // 1122 (16b, 1u1v) + + "punpcklwd %%xmm5,%%xmm0 \n" // 0011 (32b, 1u1v) + "punpcklwd %%xmm5,%%xmm1 \n" // 1122 (32b, 1u1v) + + "movdqa %%xmm0,%%xmm2 \n" + "movdqa %%xmm1,%%xmm3 \n" + + "pshufd $0b01001110,%%xmm2,%%xmm2 \n" // 1100 (lo, far) + "pshufd $0b01001110,%%xmm3,%%xmm3 \n" // 2211 (hi, far) + + "paddd %%xmm4,%%xmm2 \n" // far+2 (lo) + "paddd %%xmm4,%%xmm3 \n" // far+2 (hi) + "paddd %%xmm0,%%xmm2 \n" // near+far+2 (lo) + "paddd %%xmm1,%%xmm3 \n" // near+far+2 (hi) + "paddd %%xmm0,%%xmm0 \n" // 2*near (lo) + "paddd %%xmm1,%%xmm1 \n" // 2*near (hi) + "paddd %%xmm2,%%xmm0 \n" // 3*near+far+2 (lo) + "paddd %%xmm3,%%xmm1 \n" // 3*near+far+2 (hi) + + "psrld $2,%%xmm0 \n" // 3/4*near+1/4*far (lo) + "psrld $2,%%xmm1 \n" // 3/4*near+1/4*far (hi) + "packusdw %%xmm1,%%xmm0 \n" + "movdqu %%xmm0,(%1) \n" + + "lea 0x8(%0),%0 \n" + "lea 0x10(%1),%1 \n" // 2 uv to 4 uv + "sub $0x4,%2 \n" + "jg 1b \n" + : "+r"(src_ptr), // %0 + "+r"(dst_ptr), // %1 + "+r"(dst_width) // %2 + : + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5"); +} +#endif + +#ifdef HAS_SCALEUVROWUP2_BILINEAR_16_SSE41 +void ScaleUVRowUp2_Bilinear_16_SSE41(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint16_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width) { + asm volatile( + "pxor %%xmm7,%%xmm7 \n" + "pcmpeqd %%xmm6,%%xmm6 \n" + "psrld $31,%%xmm6 \n" + "pslld $3,%%xmm6 \n" // all 8 + + LABELALIGN + "1: \n" + "movq (%0),%%xmm0 \n" // 0011 (16b, 1u1v) + "movq 4(%0),%%xmm1 \n" // 1122 (16b, 1u1v) + "punpcklwd %%xmm7,%%xmm0 \n" // 0011 (near) (32b, 1u1v) + "punpcklwd %%xmm7,%%xmm1 \n" // 1122 (near) (32b, 1u1v) + "movdqa %%xmm0,%%xmm2 \n" + "movdqa %%xmm1,%%xmm3 \n" + "pshufd $0b01001110,%%xmm2,%%xmm2 \n" // 1100 (far) (1, lo) + "pshufd $0b01001110,%%xmm3,%%xmm3 \n" // 2211 (far) (1, hi) + "paddd %%xmm0,%%xmm2 \n" // near+far (1, lo) + "paddd %%xmm1,%%xmm3 \n" // near+far (1, hi) + "paddd %%xmm0,%%xmm0 \n" // 2*near (1, lo) + "paddd %%xmm1,%%xmm1 \n" // 2*near (1, hi) + "paddd %%xmm2,%%xmm0 \n" // 3*near+far (1, lo) + "paddd %%xmm3,%%xmm1 \n" // 3*near+far (1, hi) + + "movq (%0,%3,2),%%xmm2 \n" + "movq 4(%0,%3,2),%%xmm3 \n" + "punpcklwd %%xmm7,%%xmm2 \n" + "punpcklwd %%xmm7,%%xmm3 \n" + "movdqa %%xmm2,%%xmm4 \n" + "movdqa %%xmm3,%%xmm5 \n" + "pshufd $0b01001110,%%xmm4,%%xmm4 \n" // 1100 (far) (2, lo) + "pshufd $0b01001110,%%xmm5,%%xmm5 \n" // 2211 (far) (2, hi) + "paddd %%xmm2,%%xmm4 \n" // near+far (2, lo) + "paddd %%xmm3,%%xmm5 \n" // near+far (2, hi) + "paddd %%xmm2,%%xmm2 \n" // 2*near (2, lo) + "paddd %%xmm3,%%xmm3 \n" // 2*near (2, hi) + "paddd %%xmm4,%%xmm2 \n" // 3*near+far (2, lo) + "paddd %%xmm5,%%xmm3 \n" // 3*near+far (2, hi) + + "movdqa %%xmm0,%%xmm4 \n" + "movdqa %%xmm2,%%xmm5 \n" + "paddd %%xmm0,%%xmm4 \n" // 6*near+2*far (1, lo) + "paddd %%xmm6,%%xmm5 \n" // 3*near+far+8 (2, lo) + "paddd %%xmm0,%%xmm4 \n" // 9*near+3*far (1, lo) + "paddd %%xmm5,%%xmm4 \n" // 9 3 3 1 + 8 (1, lo) + "psrld $4,%%xmm4 \n" // ^ div by 16 (1, lo) + + "movdqa %%xmm2,%%xmm5 \n" + "paddd %%xmm2,%%xmm5 \n" // 6*near+2*far (2, lo) + "paddd %%xmm6,%%xmm0 \n" // 3*near+far+8 (1, lo) + "paddd %%xmm2,%%xmm5 \n" // 9*near+3*far (2, lo) + "paddd %%xmm0,%%xmm5 \n" // 9 3 3 1 + 8 (2, lo) + "psrld $4,%%xmm5 \n" // ^ div by 16 (2, lo) + + "movdqa %%xmm1,%%xmm0 \n" + "movdqa %%xmm3,%%xmm2 \n" + "paddd %%xmm1,%%xmm0 \n" // 6*near+2*far (1, hi) + "paddd %%xmm6,%%xmm2 \n" // 3*near+far+8 (2, hi) + "paddd %%xmm1,%%xmm0 \n" // 9*near+3*far (1, hi) + "paddd %%xmm2,%%xmm0 \n" // 9 3 3 1 + 8 (1, hi) + "psrld $4,%%xmm0 \n" // ^ div by 16 (1, hi) + + "movdqa %%xmm3,%%xmm2 \n" + "paddd %%xmm3,%%xmm2 \n" // 6*near+2*far (2, hi) + "paddd %%xmm6,%%xmm1 \n" // 3*near+far+8 (1, hi) + "paddd %%xmm3,%%xmm2 \n" // 9*near+3*far (2, hi) + "paddd %%xmm1,%%xmm2 \n" // 9 3 3 1 + 8 (2, hi) + "psrld $4,%%xmm2 \n" // ^ div by 16 (2, hi) + + "packusdw %%xmm0,%%xmm4 \n" + "movdqu %%xmm4,(%1) \n" // store above + "packusdw %%xmm2,%%xmm5 \n" + "movdqu %%xmm5,(%1,%4,2) \n" // store below + + "lea 0x8(%0),%0 \n" + "lea 0x10(%1),%1 \n" // 2 uv to 4 uv + "sub $0x4,%2 \n" + "jg 1b \n" + : "+r"(src_ptr), // %0 + "+r"(dst_ptr), // %1 + "+r"(dst_width) // %2 + : "r"((intptr_t)(src_stride)), // %3 + "r"((intptr_t)(dst_stride)) // %4 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", + "xmm7"); +} +#endif + +#ifdef HAS_SCALEUVROWUP2_LINEAR_16_AVX2 +void ScaleUVRowUp2_Linear_16_AVX2(const uint16_t* src_ptr, + uint16_t* dst_ptr, + int dst_width) { + asm volatile( + "vpcmpeqd %%ymm4,%%ymm4,%%ymm4 \n" + "vpsrld $31,%%ymm4,%%ymm4 \n" + "vpslld $1,%%ymm4,%%ymm4 \n" // all 2 + + LABELALIGN + "1: \n" + "vmovdqu (%0),%%xmm0 \n" // 00112233 (16b, 1u1v) + "vmovdqu 4(%0),%%xmm1 \n" // 11223344 (16b, 1u1v) + + "vpmovzxwd %%xmm0,%%ymm0 \n" // 01234567 (32b, 1u1v) + "vpmovzxwd %%xmm1,%%ymm1 \n" // 12345678 (32b, 1u1v) + + "vpshufd $0b01001110,%%ymm0,%%ymm2 \n" // 11003322 (lo, far) + "vpshufd $0b01001110,%%ymm1,%%ymm3 \n" // 22114433 (hi, far) + + "vpaddd %%ymm4,%%ymm2,%%ymm2 \n" // far+2 (lo) + "vpaddd %%ymm4,%%ymm3,%%ymm3 \n" // far+2 (hi) + "vpaddd %%ymm0,%%ymm2,%%ymm2 \n" // near+far+2 (lo) + "vpaddd %%ymm1,%%ymm3,%%ymm3 \n" // near+far+2 (hi) + "vpaddd %%ymm0,%%ymm0,%%ymm0 \n" // 2*near (lo) + "vpaddd %%ymm1,%%ymm1,%%ymm1 \n" // 2*near (hi) + "vpaddd %%ymm0,%%ymm2,%%ymm0 \n" // 3*near+far+2 (lo) + "vpaddd %%ymm1,%%ymm3,%%ymm1 \n" // 3*near+far+2 (hi) + + "vpsrld $2,%%ymm0,%%ymm0 \n" // 3/4*near+1/4*far (lo) + "vpsrld $2,%%ymm1,%%ymm1 \n" // 3/4*near+1/4*far (hi) + "vpackusdw %%ymm1,%%ymm0,%%ymm0 \n" + "vmovdqu %%ymm0,(%1) \n" + + "lea 0x10(%0),%0 \n" + "lea 0x20(%1),%1 \n" // 4 uv to 8 uv + "sub $0x8,%2 \n" + "jg 1b \n" + "vzeroupper \n" + : "+r"(src_ptr), // %0 + "+r"(dst_ptr), // %1 + "+r"(dst_width) // %2 + : + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4"); +} +#endif + +#ifdef HAS_SCALEUVROWUP2_BILINEAR_16_AVX2 +void ScaleUVRowUp2_Bilinear_16_AVX2(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint16_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width) { + asm volatile( + "vpcmpeqd %%ymm6,%%ymm6,%%ymm6 \n" + "vpsrld $31,%%ymm6,%%ymm6 \n" + "vpslld $3,%%ymm6,%%ymm6 \n" // all 8 + + LABELALIGN + "1: \n" + + "vmovdqu (%0),%%xmm0 \n" // 00112233 (16b, 1u1v) + "vmovdqu 4(%0),%%xmm1 \n" // 11223344 (16b, 1u1v) + "vpmovzxwd %%xmm0,%%ymm0 \n" // 01234567 (32b, 1u1v) + "vpmovzxwd %%xmm1,%%ymm1 \n" // 12345678 (32b, 1u1v) + "vpshufd $0b01001110,%%ymm0,%%ymm2 \n" // 11003322 (lo, far) + "vpshufd $0b01001110,%%ymm1,%%ymm3 \n" // 22114433 (hi, far) + "vpaddd %%ymm0,%%ymm2,%%ymm2 \n" // near+far (lo) + "vpaddd %%ymm1,%%ymm3,%%ymm3 \n" // near+far (hi) + "vpaddd %%ymm0,%%ymm0,%%ymm0 \n" // 2*near (lo) + "vpaddd %%ymm1,%%ymm1,%%ymm1 \n" // 2*near (hi) + "vpaddd %%ymm0,%%ymm2,%%ymm0 \n" // 3*near+far (lo) + "vpaddd %%ymm1,%%ymm3,%%ymm1 \n" // 3*near+far (hi) + + "vmovdqu (%0,%3,2),%%xmm2 \n" // 00112233 (16b, 1u1v) + "vmovdqu 4(%0,%3,2),%%xmm3 \n" // 11223344 (16b, 1u1v) + "vpmovzxwd %%xmm2,%%ymm2 \n" // 01234567 (32b, 1u1v) + "vpmovzxwd %%xmm3,%%ymm3 \n" // 12345678 (32b, 1u1v) + "vpshufd $0b01001110,%%ymm2,%%ymm4 \n" // 11003322 (lo, far) + "vpshufd $0b01001110,%%ymm3,%%ymm5 \n" // 22114433 (hi, far) + "vpaddd %%ymm2,%%ymm4,%%ymm4 \n" // near+far (lo) + "vpaddd %%ymm3,%%ymm5,%%ymm5 \n" // near+far (hi) + "vpaddd %%ymm2,%%ymm2,%%ymm2 \n" // 2*near (lo) + "vpaddd %%ymm3,%%ymm3,%%ymm3 \n" // 2*near (hi) + "vpaddd %%ymm2,%%ymm4,%%ymm2 \n" // 3*near+far (lo) + "vpaddd %%ymm3,%%ymm5,%%ymm3 \n" // 3*near+far (hi) + + "vpaddd %%ymm0,%%ymm0,%%ymm4 \n" // 6*near+2*far (1, lo) + "vpaddd %%ymm6,%%ymm2,%%ymm5 \n" // 3*near+far+8 (2, lo) + "vpaddd %%ymm4,%%ymm0,%%ymm4 \n" // 9*near+3*far (1, lo) + "vpaddd %%ymm4,%%ymm5,%%ymm4 \n" // 9 3 3 1 + 8 (1, lo) + "vpsrld $4,%%ymm4,%%ymm4 \n" // ^ div by 16 (1, lo) + + "vpaddd %%ymm2,%%ymm2,%%ymm5 \n" // 6*near+2*far (2, lo) + "vpaddd %%ymm6,%%ymm0,%%ymm0 \n" // 3*near+far+8 (1, lo) + "vpaddd %%ymm5,%%ymm2,%%ymm5 \n" // 9*near+3*far (2, lo) + "vpaddd %%ymm5,%%ymm0,%%ymm5 \n" // 9 3 3 1 + 8 (2, lo) + "vpsrld $4,%%ymm5,%%ymm5 \n" // ^ div by 16 (2, lo) + + "vpaddd %%ymm1,%%ymm1,%%ymm0 \n" // 6*near+2*far (1, hi) + "vpaddd %%ymm6,%%ymm3,%%ymm2 \n" // 3*near+far+8 (2, hi) + "vpaddd %%ymm0,%%ymm1,%%ymm0 \n" // 9*near+3*far (1, hi) + "vpaddd %%ymm0,%%ymm2,%%ymm0 \n" // 9 3 3 1 + 8 (1, hi) + "vpsrld $4,%%ymm0,%%ymm0 \n" // ^ div by 16 (1, hi) + + "vpaddd %%ymm3,%%ymm3,%%ymm2 \n" // 6*near+2*far (2, hi) + "vpaddd %%ymm6,%%ymm1,%%ymm1 \n" // 3*near+far+8 (1, hi) + "vpaddd %%ymm2,%%ymm3,%%ymm2 \n" // 9*near+3*far (2, hi) + "vpaddd %%ymm2,%%ymm1,%%ymm2 \n" // 9 3 3 1 + 8 (2, hi) + "vpsrld $4,%%ymm2,%%ymm2 \n" // ^ div by 16 (2, hi) + + "vpackusdw %%ymm0,%%ymm4,%%ymm4 \n" + "vmovdqu %%ymm4,(%1) \n" // store above + "vpackusdw %%ymm2,%%ymm5,%%ymm5 \n" + "vmovdqu %%ymm5,(%1,%4,2) \n" // store below + + "lea 0x10(%0),%0 \n" + "lea 0x20(%1),%1 \n" // 4 uv to 8 uv + "sub $0x8,%2 \n" + "jg 1b \n" + "vzeroupper \n" + : "+r"(src_ptr), // %0 + "+r"(dst_ptr), // %1 + "+r"(dst_width) // %2 + : "r"((intptr_t)(src_stride)), // %3 + "r"((intptr_t)(dst_stride)) // %4 + : "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6"); +} +#endif + #endif // defined(__x86_64__) || defined(__i386__) #ifdef __cplusplus diff --git a/TMessagesProj/jni/third_party/libyuv/source/scale_lsx.cc b/TMessagesProj/jni/third_party/libyuv/source/scale_lsx.cc new file mode 100644 index 0000000000..bfe5e9fbba --- /dev/null +++ b/TMessagesProj/jni/third_party/libyuv/source/scale_lsx.cc @@ -0,0 +1,739 @@ +/* + * Copyright 2022 The LibYuv Project Authors. All rights reserved. + * + * Copyright (c) 2022 Loongson Technology Corporation Limited + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include + +#include "libyuv/scale_row.h" + +#if !defined(LIBYUV_DISABLE_LSX) && defined(__loongarch_sx) +#include "libyuv/loongson_intrinsics.h" + +#ifdef __cplusplus +namespace libyuv { +extern "C" { +#endif + +#define LOAD_DATA(_src, _in, _out) \ + { \ + int _tmp1, _tmp2, _tmp3, _tmp4; \ + DUP4_ARG2(__lsx_vpickve2gr_w, _in, 0, _in, 1, _in, 2, _in, 3, _tmp1, \ + _tmp2, _tmp3, _tmp4); \ + _out = __lsx_vinsgr2vr_w(_out, _src[_tmp1], 0); \ + _out = __lsx_vinsgr2vr_w(_out, _src[_tmp2], 1); \ + _out = __lsx_vinsgr2vr_w(_out, _src[_tmp3], 2); \ + _out = __lsx_vinsgr2vr_w(_out, _src[_tmp4], 3); \ + } + +void ScaleARGBRowDown2_LSX(const uint8_t* src_argb, + ptrdiff_t src_stride, + uint8_t* dst_argb, + int dst_width) { + int x; + int len = dst_width / 4; + (void)src_stride; + __m128i src0, src1, dst0; + + for (x = 0; x < len; x++) { + DUP2_ARG2(__lsx_vld, src_argb, 0, src_argb, 16, src0, src1); + dst0 = __lsx_vpickod_w(src1, src0); + __lsx_vst(dst0, dst_argb, 0); + src_argb += 32; + dst_argb += 16; + } +} + +void ScaleARGBRowDown2Linear_LSX(const uint8_t* src_argb, + ptrdiff_t src_stride, + uint8_t* dst_argb, + int dst_width) { + int x; + int len = dst_width / 4; + (void)src_stride; + __m128i src0, src1, tmp0, tmp1, dst0; + + for (x = 0; x < len; x++) { + DUP2_ARG2(__lsx_vld, src_argb, 0, src_argb, 16, src0, src1); + tmp0 = __lsx_vpickev_w(src1, src0); + tmp1 = __lsx_vpickod_w(src1, src0); + dst0 = __lsx_vavgr_bu(tmp1, tmp0); + __lsx_vst(dst0, dst_argb, 0); + src_argb += 32; + dst_argb += 16; + } +} + +void ScaleARGBRowDown2Box_LSX(const uint8_t* src_argb, + ptrdiff_t src_stride, + uint8_t* dst_argb, + int dst_width) { + int x; + int len = dst_width / 4; + const uint8_t* s = src_argb; + const uint8_t* t = src_argb + src_stride; + __m128i src0, src1, src2, src3, tmp0, tmp1, tmp2, tmp3, dst0; + __m128i reg0, reg1, reg2, reg3; + __m128i shuff = {0x0703060205010400, 0x0F0B0E0A0D090C08}; + + for (x = 0; x < len; x++) { + DUP2_ARG2(__lsx_vld, s, 0, s, 16, src0, src1); + DUP2_ARG2(__lsx_vld, t, 0, t, 16, src2, src3); + DUP4_ARG3(__lsx_vshuf_b, src0, src0, shuff, src1, src1, shuff, src2, src2, + shuff, src3, src3, shuff, tmp0, tmp1, tmp2, tmp3); + DUP4_ARG2(__lsx_vhaddw_hu_bu, tmp0, tmp0, tmp1, tmp1, tmp2, tmp2, tmp3, + tmp3, reg0, reg1, reg2, reg3); + DUP2_ARG2(__lsx_vsadd_hu, reg0, reg2, reg1, reg3, reg0, reg1); + dst0 = __lsx_vsrarni_b_h(reg1, reg0, 2); + __lsx_vst(dst0, dst_argb, 0); + s += 32; + t += 32; + dst_argb += 16; + } +} + +void ScaleARGBRowDownEven_LSX(const uint8_t* src_argb, + ptrdiff_t src_stride, + int32_t src_stepx, + uint8_t* dst_argb, + int dst_width) { + int x; + int len = dst_width / 4; + int32_t stepx = src_stepx << 2; + (void)src_stride; + __m128i dst0, dst1, dst2, dst3; + + for (x = 0; x < len; x++) { + dst0 = __lsx_vldrepl_w(src_argb, 0); + src_argb += stepx; + dst1 = __lsx_vldrepl_w(src_argb, 0); + src_argb += stepx; + dst2 = __lsx_vldrepl_w(src_argb, 0); + src_argb += stepx; + dst3 = __lsx_vldrepl_w(src_argb, 0); + src_argb += stepx; + __lsx_vstelm_w(dst0, dst_argb, 0, 0); + __lsx_vstelm_w(dst1, dst_argb, 4, 0); + __lsx_vstelm_w(dst2, dst_argb, 8, 0); + __lsx_vstelm_w(dst3, dst_argb, 12, 0); + dst_argb += 16; + } +} + +void ScaleARGBRowDownEvenBox_LSX(const uint8_t* src_argb, + ptrdiff_t src_stride, + int src_stepx, + uint8_t* dst_argb, + int dst_width) { + int x; + int len = dst_width / 4; + int32_t stepx = src_stepx * 4; + const uint8_t* next_argb = src_argb + src_stride; + __m128i src0, src1, src2, src3; + __m128i tmp0, tmp1, tmp2, tmp3, tmp4, tmp5, tmp6, tmp7; + __m128i reg0, reg1, dst0; + + for (x = 0; x < len; x++) { + tmp0 = __lsx_vldrepl_d(src_argb, 0); + src_argb += stepx; + tmp1 = __lsx_vldrepl_d(src_argb, 0); + src_argb += stepx; + tmp2 = __lsx_vldrepl_d(src_argb, 0); + src_argb += stepx; + tmp3 = __lsx_vldrepl_d(src_argb, 0); + src_argb += stepx; + tmp4 = __lsx_vldrepl_d(next_argb, 0); + next_argb += stepx; + tmp5 = __lsx_vldrepl_d(next_argb, 0); + next_argb += stepx; + tmp6 = __lsx_vldrepl_d(next_argb, 0); + next_argb += stepx; + tmp7 = __lsx_vldrepl_d(next_argb, 0); + next_argb += stepx; + DUP4_ARG2(__lsx_vilvl_d, tmp1, tmp0, tmp3, tmp2, tmp5, tmp4, tmp7, tmp6, + src0, src1, src2, src3); + DUP2_ARG2(__lsx_vaddwev_h_bu, src0, src2, src1, src3, tmp0, tmp2); + DUP2_ARG2(__lsx_vaddwod_h_bu, src0, src2, src1, src3, tmp1, tmp3); + DUP2_ARG2(__lsx_vpackev_w, tmp1, tmp0, tmp3, tmp2, reg0, reg1); + DUP2_ARG2(__lsx_vpackod_w, tmp1, tmp0, tmp3, tmp2, tmp4, tmp5); + DUP2_ARG2(__lsx_vadd_h, reg0, tmp4, reg1, tmp5, reg0, reg1); + dst0 = __lsx_vsrarni_b_h(reg1, reg0, 2); + dst0 = __lsx_vshuf4i_b(dst0, 0xD8); + __lsx_vst(dst0, dst_argb, 0); + dst_argb += 16; + } +} + +void ScaleRowDown2_LSX(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst, + int dst_width) { + int x; + int len = dst_width / 32; + __m128i src0, src1, src2, src3, dst0, dst1; + (void)src_stride; + + for (x = 0; x < len; x++) { + DUP4_ARG2(__lsx_vld, src_ptr, 0, src_ptr, 16, src_ptr, 32, src_ptr, 48, + src0, src1, src2, src3); + DUP2_ARG2(__lsx_vpickod_b, src1, src0, src3, src2, dst0, dst1); + __lsx_vst(dst0, dst, 0); + __lsx_vst(dst1, dst, 16); + src_ptr += 64; + dst += 32; + } +} + +void ScaleRowDown2Linear_LSX(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst, + int dst_width) { + int x; + int len = dst_width / 32; + __m128i src0, src1, src2, src3; + __m128i tmp0, tmp1, tmp2, tmp3, dst0, dst1; + (void)src_stride; + + for (x = 0; x < len; x++) { + DUP4_ARG2(__lsx_vld, src_ptr, 0, src_ptr, 16, src_ptr, 32, src_ptr, 48, + src0, src1, src2, src3); + DUP2_ARG2(__lsx_vpickev_b, src1, src0, src3, src2, tmp0, tmp2); + DUP2_ARG2(__lsx_vpickod_b, src1, src0, src3, src2, tmp1, tmp3); + DUP2_ARG2(__lsx_vavgr_bu, tmp0, tmp1, tmp2, tmp3, dst0, dst1); + __lsx_vst(dst0, dst, 0); + __lsx_vst(dst1, dst, 16); + src_ptr += 64; + dst += 32; + } +} + +void ScaleRowDown2Box_LSX(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst, + int dst_width) { + int x; + int len = dst_width / 32; + const uint8_t* src_nex = src_ptr + src_stride; + __m128i src0, src1, src2, src3, src4, src5, src6, src7; + __m128i tmp0, tmp1, tmp2, tmp3, tmp4, tmp5, tmp6, tmp7; + __m128i dst0, dst1; + + for (x = 0; x < len; x++) { + DUP4_ARG2(__lsx_vld, src_ptr, 0, src_ptr, 16, src_ptr, 32, src_ptr, 48, + src0, src1, src2, src3); + DUP4_ARG2(__lsx_vld, src_nex, 0, src_nex, 16, src_nex, 32, src_nex, 48, + src4, src5, src6, src7); + DUP4_ARG2(__lsx_vaddwev_h_bu, src0, src4, src1, src5, src2, src6, src3, + src7, tmp0, tmp2, tmp4, tmp6); + DUP4_ARG2(__lsx_vaddwod_h_bu, src0, src4, src1, src5, src2, src6, src3, + src7, tmp1, tmp3, tmp5, tmp7); + DUP4_ARG2(__lsx_vadd_h, tmp0, tmp1, tmp2, tmp3, tmp4, tmp5, tmp6, tmp7, + tmp0, tmp1, tmp2, tmp3); + DUP2_ARG3(__lsx_vsrarni_b_h, tmp1, tmp0, 2, tmp3, tmp2, 2, dst0, dst1); + __lsx_vst(dst0, dst, 0); + __lsx_vst(dst1, dst, 16); + src_ptr += 64; + src_nex += 64; + dst += 32; + } +} + +void ScaleRowDown4_LSX(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst, + int dst_width) { + int x; + int len = dst_width / 16; + __m128i src0, src1, src2, src3, tmp0, tmp1, dst0; + (void)src_stride; + + for (x = 0; x < len; x++) { + DUP4_ARG2(__lsx_vld, src_ptr, 0, src_ptr, 16, src_ptr, 32, src_ptr, 48, + src0, src1, src2, src3); + DUP2_ARG2(__lsx_vpickev_b, src1, src0, src3, src2, tmp0, tmp1); + dst0 = __lsx_vpickod_b(tmp1, tmp0); + __lsx_vst(dst0, dst, 0); + src_ptr += 64; + dst += 16; + } +} + +void ScaleRowDown4Box_LSX(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst, + int dst_width) { + int x; + int len = dst_width / 16; + const uint8_t* ptr1 = src_ptr + src_stride; + const uint8_t* ptr2 = ptr1 + src_stride; + const uint8_t* ptr3 = ptr2 + src_stride; + __m128i src0, src1, src2, src3, src4, src5, src6, src7; + __m128i tmp0, tmp1, tmp2, tmp3, tmp4, tmp5, tmp6, tmp7; + __m128i reg0, reg1, reg2, reg3, reg4, reg5, reg6, reg7, dst0; + + for (x = 0; x < len; x++) { + DUP4_ARG2(__lsx_vld, src_ptr, 0, src_ptr, 16, src_ptr, 32, src_ptr, 48, + src0, src1, src2, src3); + DUP4_ARG2(__lsx_vld, ptr1, 0, ptr1, 16, ptr1, 32, ptr1, 48, src4, src5, + src6, src7); + DUP4_ARG2(__lsx_vaddwev_h_bu, src0, src4, src1, src5, src2, src6, src3, + src7, tmp0, tmp2, tmp4, tmp6); + DUP4_ARG2(__lsx_vaddwod_h_bu, src0, src4, src1, src5, src2, src6, src3, + src7, tmp1, tmp3, tmp5, tmp7); + DUP4_ARG2(__lsx_vadd_h, tmp0, tmp1, tmp2, tmp3, tmp4, tmp5, tmp6, tmp7, + reg0, reg1, reg2, reg3); + DUP4_ARG2(__lsx_vld, ptr2, 0, ptr2, 16, ptr2, 32, ptr2, 48, src0, src1, + src2, src3); + DUP4_ARG2(__lsx_vld, ptr3, 0, ptr3, 16, ptr3, 32, ptr3, 48, src4, src5, + src6, src7); + DUP4_ARG2(__lsx_vaddwev_h_bu, src0, src4, src1, src5, src2, src6, src3, + src7, tmp0, tmp2, tmp4, tmp6); + DUP4_ARG2(__lsx_vaddwod_h_bu, src0, src4, src1, src5, src2, src6, src3, + src7, tmp1, tmp3, tmp5, tmp7); + DUP4_ARG2(__lsx_vadd_h, tmp0, tmp1, tmp2, tmp3, tmp4, tmp5, tmp6, tmp7, + reg4, reg5, reg6, reg7); + DUP4_ARG2(__lsx_vadd_h, reg0, reg4, reg1, reg5, reg2, reg6, reg3, reg7, + reg0, reg1, reg2, reg3); + DUP4_ARG2(__lsx_vhaddw_wu_hu, reg0, reg0, reg1, reg1, reg2, reg2, reg3, + reg3, reg0, reg1, reg2, reg3); + DUP2_ARG3(__lsx_vsrarni_h_w, reg1, reg0, 4, reg3, reg2, 4, tmp0, tmp1); + dst0 = __lsx_vpickev_b(tmp1, tmp0); + __lsx_vst(dst0, dst, 0); + src_ptr += 64; + ptr1 += 64; + ptr2 += 64; + ptr3 += 64; + dst += 16; + } +} + +void ScaleRowDown38_LSX(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst, + int dst_width) { + int x, len; + __m128i src0, src1, tmp0; + __m128i shuff = {0x13100E0B08060300, 0x000000001E1B1816}; + + assert(dst_width % 3 == 0); + len = dst_width / 12; + (void)src_stride; + + for (x = 0; x < len; x++) { + DUP2_ARG2(__lsx_vld, src_ptr, 0, src_ptr, 16, src0, src1); + tmp0 = __lsx_vshuf_b(src1, src0, shuff); + __lsx_vstelm_d(tmp0, dst, 0, 0); + __lsx_vstelm_w(tmp0, dst, 8, 2); + src_ptr += 32; + dst += 12; + } +} + +void ScaleRowDown38_2_Box_LSX(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + int dst_width) { + int x, len; + const uint8_t* src_nex = src_ptr + src_stride; + __m128i src0, src1, src2, src3, dst0; + __m128i tmp0, tmp1, tmp2, tmp3, tmp4, tmp5, tmp6, tmp7; + __m128i reg0, reg1, reg2, reg3; + __m128i shuff = {0x0A08160604120200, 0x000000001E0E0C1A}; + __m128i const_0x2AAA = __lsx_vreplgr2vr_h(0x2AAA); + __m128i const_0x4000 = __lsx_vreplgr2vr_w(0x4000); + + assert((dst_width % 3 == 0) && (dst_width > 0)); + len = dst_width / 12; + + for (x = 0; x < len; x++) { + DUP4_ARG2(__lsx_vld, src_ptr, 0, src_ptr, 16, src_nex, 0, src_nex, 16, src0, + src1, src2, src3); + DUP2_ARG2(__lsx_vaddwev_h_bu, src0, src2, src1, src3, tmp0, tmp2); + DUP2_ARG2(__lsx_vaddwod_h_bu, src0, src2, src1, src3, tmp1, tmp3); + DUP2_ARG2(__lsx_vpickev_h, tmp2, tmp0, tmp3, tmp1, reg0, reg1); + DUP2_ARG2(__lsx_vpackod_h, tmp1, tmp0, tmp3, tmp2, reg2, reg3); + tmp4 = __lsx_vpickev_w(reg3, reg2); + tmp5 = __lsx_vadd_h(reg0, reg1); + tmp6 = __lsx_vadd_h(tmp5, tmp4); + tmp7 = __lsx_vmuh_h(tmp6, const_0x2AAA); + tmp0 = __lsx_vpickod_w(reg3, reg2); + tmp1 = __lsx_vhaddw_wu_hu(tmp0, tmp0); + tmp2 = __lsx_vmul_w(tmp1, const_0x4000); + dst0 = __lsx_vshuf_b(tmp2, tmp7, shuff); + __lsx_vstelm_d(dst0, dst_ptr, 0, 0); + __lsx_vstelm_w(dst0, dst_ptr, 8, 2); + src_ptr += 32; + src_nex += 32; + dst_ptr += 12; + } +} + +void ScaleRowDown38_3_Box_LSX(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + int dst_width) { + int x, len; + const uint8_t* ptr1 = src_ptr + src_stride; + const uint8_t* ptr2 = ptr1 + src_stride; + __m128i src0, src1, src2, src3, src4, src5; + __m128i tmp0, tmp1, tmp2, tmp3, tmp4, tmp5, tmp6, tmp7; + __m128i reg0, reg1, reg2, reg3, dst0; + __m128i zero = __lsx_vldi(0); + __m128i shuff = {0x0A08160604120200, 0x000000001E0E0C1A}; + __m128i const_0x1C71 = __lsx_vreplgr2vr_h(0x1C71); + __m128i const_0x2AAA = __lsx_vreplgr2vr_w(0x2AAA); + + assert((dst_width % 3 == 0) && (dst_width > 0)); + len = dst_width / 12; + + for (x = 0; x < len; x++) { + DUP4_ARG2(__lsx_vld, src_ptr, 0, src_ptr, 16, ptr1, 0, ptr1, 16, src0, src1, + src2, src3); + DUP2_ARG2(__lsx_vld, ptr2, 0, ptr2, 16, src4, src5); + DUP2_ARG2(__lsx_vaddwev_h_bu, src0, src2, src1, src3, tmp0, tmp2); + DUP2_ARG2(__lsx_vaddwod_h_bu, src0, src2, src1, src3, tmp1, tmp3); + DUP2_ARG2(__lsx_vpackev_b, zero, src4, zero, src5, tmp4, tmp6); + DUP2_ARG2(__lsx_vpackod_b, zero, src4, zero, src5, tmp5, tmp7); + DUP4_ARG2(__lsx_vadd_h, tmp0, tmp4, tmp1, tmp5, tmp2, tmp6, tmp3, tmp7, + tmp0, tmp1, tmp2, tmp3); + DUP2_ARG2(__lsx_vpickev_h, tmp2, tmp0, tmp3, tmp1, reg0, reg1); + DUP2_ARG2(__lsx_vpackod_h, tmp1, tmp0, tmp3, tmp2, reg2, reg3); + tmp4 = __lsx_vpickev_w(reg3, reg2); + tmp5 = __lsx_vadd_h(reg0, reg1); + tmp6 = __lsx_vadd_h(tmp5, tmp4); + tmp7 = __lsx_vmuh_h(tmp6, const_0x1C71); + tmp0 = __lsx_vpickod_w(reg3, reg2); + tmp1 = __lsx_vhaddw_wu_hu(tmp0, tmp0); + tmp2 = __lsx_vmul_w(tmp1, const_0x2AAA); + dst0 = __lsx_vshuf_b(tmp2, tmp7, shuff); + __lsx_vstelm_d(dst0, dst_ptr, 0, 0); + __lsx_vstelm_w(dst0, dst_ptr, 8, 2); + src_ptr += 32; + ptr1 += 32; + ptr2 += 32; + dst_ptr += 12; + } +} + +void ScaleAddRow_LSX(const uint8_t* src_ptr, uint16_t* dst_ptr, int src_width) { + int x; + int len = src_width / 16; + __m128i src0, tmp0, tmp1, dst0, dst1; + __m128i zero = __lsx_vldi(0); + + assert(src_width > 0); + + for (x = 0; x < len; x++) { + src0 = __lsx_vld(src_ptr, 0); + DUP2_ARG2(__lsx_vld, dst_ptr, 0, dst_ptr, 16, dst0, dst1); + tmp0 = __lsx_vilvl_b(zero, src0); + tmp1 = __lsx_vilvh_b(zero, src0); + DUP2_ARG2(__lsx_vadd_h, dst0, tmp0, dst1, tmp1, dst0, dst1); + __lsx_vst(dst0, dst_ptr, 0); + __lsx_vst(dst1, dst_ptr, 16); + src_ptr += 16; + dst_ptr += 16; + } +} + +void ScaleFilterCols_LSX(uint8_t* dst_ptr, + const uint8_t* src_ptr, + int dst_width, + int x, + int dx) { + int j; + int len = dst_width / 16; + __m128i tmp0, tmp1, tmp2, tmp3, tmp4, tmp5, tmp6, tmp7; + __m128i reg0, reg1, reg2, reg3, reg4, reg5, reg6, reg7; + __m128i vec0, vec1, dst0; + __m128i vec_x = __lsx_vreplgr2vr_w(x); + __m128i vec_dx = __lsx_vreplgr2vr_w(dx); + __m128i const1 = __lsx_vreplgr2vr_w(0xFFFF); + __m128i const2 = __lsx_vreplgr2vr_w(0x40); + __m128i const_tmp = {0x0000000100000000, 0x0000000300000002}; + + vec0 = __lsx_vmul_w(vec_dx, const_tmp); + vec1 = __lsx_vslli_w(vec_dx, 2); + vec_x = __lsx_vadd_w(vec_x, vec0); + + for (j = 0; j < len; j++) { + tmp0 = __lsx_vsrai_w(vec_x, 16); + tmp4 = __lsx_vand_v(vec_x, const1); + vec_x = __lsx_vadd_w(vec_x, vec1); + tmp1 = __lsx_vsrai_w(vec_x, 16); + tmp5 = __lsx_vand_v(vec_x, const1); + vec_x = __lsx_vadd_w(vec_x, vec1); + tmp2 = __lsx_vsrai_w(vec_x, 16); + tmp6 = __lsx_vand_v(vec_x, const1); + vec_x = __lsx_vadd_w(vec_x, vec1); + tmp3 = __lsx_vsrai_w(vec_x, 16); + tmp7 = __lsx_vand_v(vec_x, const1); + vec_x = __lsx_vadd_w(vec_x, vec1); + DUP4_ARG2(__lsx_vsrai_w, tmp4, 9, tmp5, 9, tmp6, 9, tmp7, 9, tmp4, tmp5, + tmp6, tmp7); + LOAD_DATA(src_ptr, tmp0, reg0); + LOAD_DATA(src_ptr, tmp1, reg1); + LOAD_DATA(src_ptr, tmp2, reg2); + LOAD_DATA(src_ptr, tmp3, reg3); + DUP4_ARG2(__lsx_vaddi_wu, tmp0, 1, tmp1, 1, tmp2, 1, tmp3, 1, tmp0, tmp1, + tmp2, tmp3); + LOAD_DATA(src_ptr, tmp0, reg4); + LOAD_DATA(src_ptr, tmp1, reg5); + LOAD_DATA(src_ptr, tmp2, reg6); + LOAD_DATA(src_ptr, tmp3, reg7); + DUP4_ARG2(__lsx_vsub_w, reg4, reg0, reg5, reg1, reg6, reg2, reg7, reg3, + reg4, reg5, reg6, reg7); + DUP4_ARG2(__lsx_vmul_w, reg4, tmp4, reg5, tmp5, reg6, tmp6, reg7, tmp7, + reg4, reg5, reg6, reg7); + DUP4_ARG2(__lsx_vadd_w, reg4, const2, reg5, const2, reg6, const2, reg7, + const2, reg4, reg5, reg6, reg7); + DUP4_ARG2(__lsx_vsrai_w, reg4, 7, reg5, 7, reg6, 7, reg7, 7, reg4, reg5, + reg6, reg7); + DUP4_ARG2(__lsx_vadd_w, reg0, reg4, reg1, reg5, reg2, reg6, reg3, reg7, + reg0, reg1, reg2, reg3); + DUP2_ARG2(__lsx_vpickev_h, reg1, reg0, reg3, reg2, tmp0, tmp1); + dst0 = __lsx_vpickev_b(tmp1, tmp0); + __lsx_vst(dst0, dst_ptr, 0); + dst_ptr += 16; + } +} + +void ScaleARGBCols_LSX(uint8_t* dst_argb, + const uint8_t* src_argb, + int dst_width, + int x, + int dx) { + const uint32_t* src = (const uint32_t*)src_argb; + uint32_t* dst = (uint32_t*)dst_argb; + int j; + int len = dst_width / 4; + __m128i tmp0, tmp1, tmp2, dst0; + __m128i vec_x = __lsx_vreplgr2vr_w(x); + __m128i vec_dx = __lsx_vreplgr2vr_w(dx); + __m128i const_tmp = {0x0000000100000000, 0x0000000300000002}; + + tmp0 = __lsx_vmul_w(vec_dx, const_tmp); + tmp1 = __lsx_vslli_w(vec_dx, 2); + vec_x = __lsx_vadd_w(vec_x, tmp0); + + for (j = 0; j < len; j++) { + tmp2 = __lsx_vsrai_w(vec_x, 16); + vec_x = __lsx_vadd_w(vec_x, tmp1); + LOAD_DATA(src, tmp2, dst0); + __lsx_vst(dst0, dst, 0); + dst += 4; + } +} + +void ScaleARGBFilterCols_LSX(uint8_t* dst_argb, + const uint8_t* src_argb, + int dst_width, + int x, + int dx) { + const uint32_t* src = (const uint32_t*)src_argb; + int j; + int len = dst_width / 8; + __m128i src0, src1, src2, src3; + __m128i tmp0, tmp1, tmp2, tmp3, tmp4, tmp5, tmp6, tmp7; + __m128i reg0, reg1, reg2, reg3, reg4, reg5, reg6, reg7; + __m128i vec0, vec1, dst0, dst1; + __m128i vec_x = __lsx_vreplgr2vr_w(x); + __m128i vec_dx = __lsx_vreplgr2vr_w(dx); + __m128i const_tmp = {0x0000000100000000, 0x0000000300000002}; + __m128i const_7f = __lsx_vldi(0x7F); + + vec0 = __lsx_vmul_w(vec_dx, const_tmp); + vec1 = __lsx_vslli_w(vec_dx, 2); + vec_x = __lsx_vadd_w(vec_x, vec0); + + for (j = 0; j < len; j++) { + tmp0 = __lsx_vsrai_w(vec_x, 16); + reg0 = __lsx_vsrai_w(vec_x, 9); + vec_x = __lsx_vadd_w(vec_x, vec1); + tmp1 = __lsx_vsrai_w(vec_x, 16); + reg1 = __lsx_vsrai_w(vec_x, 9); + vec_x = __lsx_vadd_w(vec_x, vec1); + DUP2_ARG2(__lsx_vand_v, reg0, const_7f, reg1, const_7f, reg0, reg1); + DUP2_ARG2(__lsx_vshuf4i_b, reg0, 0, reg1, 0, reg0, reg1); + DUP2_ARG2(__lsx_vxor_v, reg0, const_7f, reg1, const_7f, reg2, reg3); + DUP2_ARG2(__lsx_vilvl_b, reg0, reg2, reg1, reg3, reg4, reg6); + DUP2_ARG2(__lsx_vilvh_b, reg0, reg2, reg1, reg3, reg5, reg7); + LOAD_DATA(src, tmp0, src0); + LOAD_DATA(src, tmp1, src1); + DUP2_ARG2(__lsx_vaddi_wu, tmp0, 1, tmp1, 1, tmp0, tmp1); + LOAD_DATA(src, tmp0, src2); + LOAD_DATA(src, tmp1, src3); + DUP2_ARG2(__lsx_vilvl_b, src2, src0, src3, src1, tmp4, tmp6); + DUP2_ARG2(__lsx_vilvh_b, src2, src0, src3, src1, tmp5, tmp7); + DUP4_ARG2(__lsx_vdp2_h_bu, tmp4, reg4, tmp5, reg5, tmp6, reg6, tmp7, reg7, + tmp0, tmp1, tmp2, tmp3); + DUP2_ARG3(__lsx_vsrani_b_h, tmp1, tmp0, 7, tmp3, tmp2, 7, dst0, dst1); + __lsx_vst(dst0, dst_argb, 0); + __lsx_vst(dst1, dst_argb, 16); + dst_argb += 32; + } +} + +void ScaleRowDown34_LSX(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst, + int dst_width) { + int x; + (void)src_stride; + __m128i src0, src1, src2, src3; + __m128i dst0, dst1, dst2; + __m128i shuff0 = {0x0908070504030100, 0x141311100F0D0C0B}; + __m128i shuff1 = {0x0F0D0C0B09080705, 0x1918171514131110}; + __m128i shuff2 = {0x141311100F0D0C0B, 0x1F1D1C1B19181715}; + + assert((dst_width % 3 == 0) && (dst_width > 0)); + + for (x = 0; x < dst_width; x += 48) { + DUP4_ARG2(__lsx_vld, src_ptr, 0, src_ptr, 16, src_ptr, 32, src_ptr, 48, + src0, src1, src2, src3); + DUP2_ARG3(__lsx_vshuf_b, src1, src0, shuff0, src2, src1, shuff1, dst0, + dst1); + dst2 = __lsx_vshuf_b(src3, src2, shuff2); + __lsx_vst(dst0, dst, 0); + __lsx_vst(dst1, dst, 16); + __lsx_vst(dst2, dst, 32); + src_ptr += 64; + dst += 48; + } +} + +void ScaleRowDown34_0_Box_LSX(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* d, + int dst_width) { + const uint8_t* src_nex = src_ptr + src_stride; + int x; + __m128i src0, src1, src2, src3, src4, src5, src6, src7; + __m128i tmp0, tmp1, tmp2, tmp3, tmp4, tmp5, tmp6, tmp7, tmp8, tmp9; + __m128i tmp10, tmp11, dst0, dst1, dst2; + __m128i const0 = {0x0103030101010103, 0x0101010303010101}; + __m128i const1 = {0x0301010101030301, 0x0103030101010103}; + __m128i const2 = {0x0101010303010101, 0x0301010101030301}; + __m128i shuff0 = {0x0504030202010100, 0x0A09090807060605}; + __m128i shuff1 = {0x0F0E0E0D0D0C0B0A, 0x1514131212111110}; + __m128i shuff2 = {0x0A09090807060605, 0x0F0E0E0D0D0C0B0A}; + __m128i shift0 = {0x0002000200010002, 0x0001000200020001}; + __m128i shift1 = {0x0002000100020002, 0x0002000200010002}; + __m128i shift2 = {0x0001000200020001, 0x0002000100020002}; + + assert((dst_width % 3 == 0) && (dst_width > 0)); + + for (x = 0; x < dst_width; x += 48) { + DUP4_ARG2(__lsx_vld, src_ptr, 0, src_ptr, 16, src_ptr, 32, src_ptr, 48, + src0, src1, src2, src3); + DUP4_ARG2(__lsx_vld, src_nex, 0, src_nex, 16, src_nex, 32, src_nex, 48, + src4, src5, src6, src7); + DUP4_ARG3(__lsx_vshuf_b, src0, src0, shuff0, src1, src0, shuff1, src1, src1, + shuff2, src2, src2, shuff0, tmp0, tmp1, tmp2, tmp3); + DUP4_ARG3(__lsx_vshuf_b, src3, src2, shuff1, src3, src3, shuff2, src4, src4, + shuff0, src5, src4, shuff1, tmp4, tmp5, tmp6, tmp7); + DUP4_ARG3(__lsx_vshuf_b, src5, src5, shuff2, src6, src6, shuff0, src7, src6, + shuff1, src7, src7, shuff2, tmp8, tmp9, tmp10, tmp11); + DUP4_ARG2(__lsx_vdp2_h_bu, tmp0, const0, tmp1, const1, tmp2, const2, tmp3, + const0, src0, src1, src2, src3); + DUP4_ARG2(__lsx_vdp2_h_bu, tmp4, const1, tmp5, const2, tmp6, const0, tmp7, + const1, src4, src5, src6, src7); + DUP4_ARG2(__lsx_vdp2_h_bu, tmp8, const2, tmp9, const0, tmp10, const1, tmp11, + const2, tmp0, tmp1, tmp2, tmp3); + DUP4_ARG2(__lsx_vsrar_h, src0, shift0, src1, shift1, src2, shift2, src3, + shift0, src0, src1, src2, src3); + DUP4_ARG2(__lsx_vsrar_h, src4, shift1, src5, shift2, src6, shift0, src7, + shift1, src4, src5, src6, src7); + DUP4_ARG2(__lsx_vsrar_h, tmp0, shift2, tmp1, shift0, tmp2, shift1, tmp3, + shift2, tmp0, tmp1, tmp2, tmp3); + DUP4_ARG2(__lsx_vslli_h, src0, 1, src1, 1, src2, 1, src3, 1, tmp5, tmp6, + tmp7, tmp8); + DUP2_ARG2(__lsx_vslli_h, src4, 1, src5, 1, tmp9, tmp10); + DUP4_ARG2(__lsx_vadd_h, src0, tmp5, src1, tmp6, src2, tmp7, src3, tmp8, + src0, src1, src2, src3); + DUP2_ARG2(__lsx_vadd_h, src4, tmp9, src5, tmp10, src4, src5); + DUP4_ARG2(__lsx_vadd_h, src0, src6, src1, src7, src2, tmp0, src3, tmp1, + src0, src1, src2, src3); + DUP2_ARG2(__lsx_vadd_h, src4, tmp2, src5, tmp3, src4, src5); + DUP2_ARG3(__lsx_vsrarni_b_h, src1, src0, 2, src3, src2, 2, dst0, dst1); + dst2 = __lsx_vsrarni_b_h(src5, src4, 2); + __lsx_vst(dst0, d, 0); + __lsx_vst(dst1, d, 16); + __lsx_vst(dst2, d, 32); + src_ptr += 64; + src_nex += 64; + d += 48; + } +} + +void ScaleRowDown34_1_Box_LSX(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* d, + int dst_width) { + const uint8_t* src_nex = src_ptr + src_stride; + int x; + __m128i src0, src1, src2, src3, src4, src5, src6, src7; + __m128i tmp0, tmp1, tmp2, tmp3, tmp4, tmp5, tmp6, tmp7, tmp8, tmp9; + __m128i tmp10, tmp11, dst0, dst1, dst2; + __m128i const0 = {0x0103030101010103, 0x0101010303010101}; + __m128i const1 = {0x0301010101030301, 0x0103030101010103}; + __m128i const2 = {0x0101010303010101, 0x0301010101030301}; + __m128i shuff0 = {0x0504030202010100, 0x0A09090807060605}; + __m128i shuff1 = {0x0F0E0E0D0D0C0B0A, 0x1514131212111110}; + __m128i shuff2 = {0x0A09090807060605, 0x0F0E0E0D0D0C0B0A}; + __m128i shift0 = {0x0002000200010002, 0x0001000200020001}; + __m128i shift1 = {0x0002000100020002, 0x0002000200010002}; + __m128i shift2 = {0x0001000200020001, 0x0002000100020002}; + + assert((dst_width % 3 == 0) && (dst_width > 0)); + + for (x = 0; x < dst_width; x += 48) { + DUP4_ARG2(__lsx_vld, src_ptr, 0, src_ptr, 16, src_ptr, 32, src_ptr, 48, + src0, src1, src2, src3); + DUP4_ARG2(__lsx_vld, src_nex, 0, src_nex, 16, src_nex, 32, src_nex, 48, + src4, src5, src6, src7); + DUP4_ARG3(__lsx_vshuf_b, src0, src0, shuff0, src1, src0, shuff1, src1, src1, + shuff2, src2, src2, shuff0, tmp0, tmp1, tmp2, tmp3); + DUP4_ARG3(__lsx_vshuf_b, src3, src2, shuff1, src3, src3, shuff2, src4, src4, + shuff0, src5, src4, shuff1, tmp4, tmp5, tmp6, tmp7); + DUP4_ARG3(__lsx_vshuf_b, src5, src5, shuff2, src6, src6, shuff0, src7, src6, + shuff1, src7, src7, shuff2, tmp8, tmp9, tmp10, tmp11); + DUP4_ARG2(__lsx_vdp2_h_bu, tmp0, const0, tmp1, const1, tmp2, const2, tmp3, + const0, src0, src1, src2, src3); + DUP4_ARG2(__lsx_vdp2_h_bu, tmp4, const1, tmp5, const2, tmp6, const0, tmp7, + const1, src4, src5, src6, src7); + DUP4_ARG2(__lsx_vdp2_h_bu, tmp8, const2, tmp9, const0, tmp10, const1, tmp11, + const2, tmp0, tmp1, tmp2, tmp3); + DUP4_ARG2(__lsx_vsrar_h, src0, shift0, src1, shift1, src2, shift2, src3, + shift0, src0, src1, src2, src3); + DUP4_ARG2(__lsx_vsrar_h, src4, shift1, src5, shift2, src6, shift0, src7, + shift1, src4, src5, src6, src7); + DUP4_ARG2(__lsx_vsrar_h, tmp0, shift2, tmp1, shift0, tmp2, shift1, tmp3, + shift2, tmp0, tmp1, tmp2, tmp3); + DUP4_ARG2(__lsx_vadd_h, src0, src6, src1, src7, src2, tmp0, src3, tmp1, + src0, src1, src2, src3); + DUP2_ARG2(__lsx_vadd_h, src4, tmp2, src5, tmp3, src4, src5); + DUP2_ARG3(__lsx_vsrarni_b_h, src1, src0, 1, src3, src2, 1, dst0, dst1); + dst2 = __lsx_vsrarni_b_h(src5, src4, 1); + __lsx_vst(dst0, d, 0); + __lsx_vst(dst1, d, 16); + __lsx_vst(dst2, d, 32); + src_ptr += 64; + src_nex += 64; + d += 48; + } +} + +#ifdef __cplusplus +} // extern "C" +} // namespace libyuv +#endif + +#endif // !defined(LIBYUV_DISABLE_LSX) && defined(__loongarch_sx) diff --git a/TMessagesProj/jni/third_party/libyuv/source/scale_neon.cc b/TMessagesProj/jni/third_party/libyuv/source/scale_neon.cc index 572b4bfa9b..6a0d6e1b49 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/scale_neon.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/scale_neon.cc @@ -194,21 +194,21 @@ void ScaleRowDown34_0_Box_NEON(const uint8_t* src_ptr, "vmlal.u8 q10, d2, d24 \n" "vmlal.u8 q11, d3, d24 \n" - // (3 * line_0 + line_1) >> 2 + // (3 * line_0 + line_1 + 2) >> 2 "vqrshrn.u16 d0, q8, #2 \n" "vqrshrn.u16 d1, q9, #2 \n" "vqrshrn.u16 d2, q10, #2 \n" "vqrshrn.u16 d3, q11, #2 \n" - // a0 = (src[0] * 3 + s[1] * 1) >> 2 + // a0 = (src[0] * 3 + s[1] * 1 + 2) >> 2 "vmovl.u8 q8, d1 \n" "vmlal.u8 q8, d0, d24 \n" "vqrshrn.u16 d0, q8, #2 \n" - // a1 = (src[1] * 1 + s[2] * 1) >> 1 + // a1 = (src[1] * 1 + s[2] * 1 + 1) >> 1 "vrhadd.u8 d1, d1, d2 \n" - // a2 = (src[2] * 1 + s[3] * 3) >> 2 + // a2 = (src[2] * 1 + s[3] * 3 + 2) >> 2 "vmovl.u8 q8, d2 \n" "vmlal.u8 q8, d3, d24 \n" "vqrshrn.u16 d2, q8, #2 \n" @@ -240,15 +240,15 @@ void ScaleRowDown34_1_Box_NEON(const uint8_t* src_ptr, "vrhadd.u8 q0, q0, q2 \n" "vrhadd.u8 q1, q1, q3 \n" - // a0 = (src[0] * 3 + s[1] * 1) >> 2 + // a0 = (src[0] * 3 + s[1] * 1 + 2) >> 2 "vmovl.u8 q3, d1 \n" "vmlal.u8 q3, d0, d24 \n" "vqrshrn.u16 d0, q3, #2 \n" - // a1 = (src[1] * 1 + s[2] * 1) >> 1 + // a1 = (src[1] * 1 + s[2] * 1 + 1) >> 1 "vrhadd.u8 d1, d1, d2 \n" - // a2 = (src[2] * 1 + s[3] * 3) >> 2 + // a2 = (src[2] * 1 + s[3] * 3 + 2) >> 2 "vmovl.u8 q3, d2 \n" "vmlal.u8 q3, d3, d24 \n" "vqrshrn.u16 d2, q3, #2 \n" @@ -504,6 +504,484 @@ void ScaleRowDown38_2_Box_NEON(const uint8_t* src_ptr, : "q0", "q1", "q2", "q3", "q13", "q14", "memory", "cc"); } +void ScaleRowUp2_Linear_NEON(const uint8_t* src_ptr, + uint8_t* dst_ptr, + int dst_width) { + const uint8_t* src_temp = src_ptr + 1; + asm volatile( + "vmov.u8 d30, #3 \n" + + "1: \n" + "vld1.8 {d4}, [%0]! \n" // 01234567 + "vld1.8 {d5}, [%3]! \n" // 12345678 + + "vmovl.u8 q0, d4 \n" // 01234567 (16b) + "vmovl.u8 q1, d5 \n" // 12345678 (16b) + "vmlal.u8 q0, d5, d30 \n" // 3*near+far (odd) + "vmlal.u8 q1, d4, d30 \n" // 3*near+far (even) + + "vrshrn.u16 d1, q0, #2 \n" // 3/4*near+1/4*far (odd) + "vrshrn.u16 d0, q1, #2 \n" // 3/4*near+1/4*far (even) + + "vst2.8 {d0, d1}, [%1]! \n" // store + "subs %2, %2, #16 \n" // 8 sample -> 16 sample + "bgt 1b \n" + : "+r"(src_ptr), // %0 + "+r"(dst_ptr), // %1 + "+r"(dst_width), // %2 + "+r"(src_temp) // %3 + : + : "memory", "cc", "q0", "q1", "q2", "q15" // Clobber List + ); +} + +void ScaleRowUp2_Bilinear_NEON(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width) { + const uint8_t* src_ptr1 = src_ptr + src_stride; + uint8_t* dst_ptr1 = dst_ptr + dst_stride; + const uint8_t* src_temp = src_ptr + 1; + const uint8_t* src_temp1 = src_ptr1 + 1; + + asm volatile( + "vmov.u16 q15, #3 \n" + "vmov.u8 d28, #3 \n" + + "1: \n" + "vld1.8 {d4}, [%0]! \n" // 01234567 + "vld1.8 {d5}, [%5]! \n" // 12345678 + + "vmovl.u8 q0, d4 \n" // 01234567 (16b) + "vmovl.u8 q1, d5 \n" // 12345678 (16b) + "vmlal.u8 q0, d5, d28 \n" // 3*near+far (1, odd) + "vmlal.u8 q1, d4, d28 \n" // 3*near+far (1, even) + + "vld1.8 {d8}, [%1]! \n" + "vld1.8 {d9}, [%6]! \n" + + "vmovl.u8 q2, d8 \n" + "vmovl.u8 q3, d9 \n" + "vmlal.u8 q2, d9, d28 \n" // 3*near+far (2, odd) + "vmlal.u8 q3, d8, d28 \n" // 3*near+far (2, even) + + // e o + // q1 q0 + // q3 q2 + + "vmovq q4, q2 \n" + "vmovq q5, q3 \n" + "vmla.u16 q4, q0, q15 \n" // 9 3 3 1 (1, odd) + "vmla.u16 q5, q1, q15 \n" // 9 3 3 1 (1, even) + "vmla.u16 q0, q2, q15 \n" // 9 3 3 1 (2, odd) + "vmla.u16 q1, q3, q15 \n" // 9 3 3 1 (2, even) + + // e o + // q5 q4 + // q1 q0 + + "vrshrn.u16 d2, q1, #4 \n" // 2, even + "vrshrn.u16 d3, q0, #4 \n" // 2, odd + "vrshrn.u16 d0, q5, #4 \n" // 1, even + "vrshrn.u16 d1, q4, #4 \n" // 1, odd + + "vst2.8 {d0, d1}, [%2]! \n" // store + "vst2.8 {d2, d3}, [%3]! \n" // store + "subs %4, %4, #16 \n" // 8 sample -> 16 sample + "bgt 1b \n" + : "+r"(src_ptr), // %0 + "+r"(src_ptr1), // %1 + "+r"(dst_ptr), // %2 + "+r"(dst_ptr1), // %3 + "+r"(dst_width), // %4 + "+r"(src_temp), // %5 + "+r"(src_temp1) // %6 + : + : "memory", "cc", "q0", "q1", "q2", "q3", "q4", "q5", "d28", + "q15" // Clobber List + ); +} + +void ScaleRowUp2_Linear_12_NEON(const uint16_t* src_ptr, + uint16_t* dst_ptr, + int dst_width) { + const uint16_t* src_temp = src_ptr + 1; + asm volatile( + "vmov.u16 q15, #3 \n" + + "1: \n" + "vld1.16 {q1}, [%0]! \n" // 01234567 (16b) + "vld1.16 {q0}, [%3]! \n" // 12345678 (16b) + + "vmovq q2, q0 \n" + "vmla.u16 q0, q1, q15 \n" // 3*near+far (odd) + "vmla.u16 q1, q2, q15 \n" // 3*near+far (even) + + "vrshr.u16 q0, q0, #2 \n" // 3/4*near+1/4*far (odd) + "vrshr.u16 q1, q1, #2 \n" // 3/4*near+1/4*far (even) + + "vst2.16 {d0, d1, d2, d3}, [%1]! \n" // store + "subs %2, %2, #16 \n" // 8 sample -> 16 sample + "bgt 1b \n" + : "+r"(src_ptr), // %0 + "+r"(dst_ptr), // %1 + "+r"(dst_width), // %2 + "+r"(src_temp) // %3 + : + : "memory", "cc", "q0", "q1", "q2", "q15" // Clobber List + ); +} + +void ScaleRowUp2_Bilinear_12_NEON(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint16_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width) { + const uint16_t* src_ptr1 = src_ptr + src_stride; + uint16_t* dst_ptr1 = dst_ptr + dst_stride; + const uint16_t* src_temp = src_ptr + 1; + const uint16_t* src_temp1 = src_ptr1 + 1; + + asm volatile( + "vmov.u16 q15, #3 \n" + + "1: \n" + "vld1.16 {q0}, [%0]! \n" // 01234567 (16b) + "vld1.16 {q1}, [%5]! \n" // 12345678 (16b) + + "vmovq q2, q0 \n" + "vmla.u16 q0, q1, q15 \n" // 3*near+far (odd) + "vmla.u16 q1, q2, q15 \n" // 3*near+far (even) + + "vld1.16 {q2}, [%1]! \n" // 01234567 (16b) + "vld1.16 {q3}, [%6]! \n" // 12345678 (16b) + + "vmovq q4, q2 \n" + "vmla.u16 q2, q3, q15 \n" // 3*near+far (odd) + "vmla.u16 q3, q4, q15 \n" // 3*near+far (even) + + "vmovq q4, q2 \n" + "vmovq q5, q3 \n" + "vmla.u16 q4, q0, q15 \n" // 9 3 3 1 (1, odd) + "vmla.u16 q5, q1, q15 \n" // 9 3 3 1 (1, even) + "vmla.u16 q0, q2, q15 \n" // 9 3 3 1 (2, odd) + "vmla.u16 q1, q3, q15 \n" // 9 3 3 1 (2, even) + + "vrshr.u16 q2, q1, #4 \n" // 2, even + "vrshr.u16 q3, q0, #4 \n" // 2, odd + "vrshr.u16 q0, q5, #4 \n" // 1, even + "vrshr.u16 q1, q4, #4 \n" // 1, odd + + "vst2.16 {d0, d1, d2, d3}, [%2]! \n" // store + "vst2.16 {d4, d5, d6, d7}, [%3]! \n" // store + "subs %4, %4, #16 \n" // 8 sample -> 16 sample + "bgt 1b \n" + : "+r"(src_ptr), // %0 + "+r"(src_ptr1), // %1 + "+r"(dst_ptr), // %2 + "+r"(dst_ptr1), // %3 + "+r"(dst_width), // %4 + "+r"(src_temp), // %5 + "+r"(src_temp1) // %6 + : + : "memory", "cc", "q0", "q1", "q2", "q3", "q4", "q5", + "q15" // Clobber List + ); +} + +void ScaleRowUp2_Linear_16_NEON(const uint16_t* src_ptr, + uint16_t* dst_ptr, + int dst_width) { + const uint16_t* src_temp = src_ptr + 1; + asm volatile( + "vmov.u16 d31, #3 \n" + + "1: \n" + "vld1.16 {q0}, [%0]! \n" // 01234567 (16b) + "vld1.16 {q1}, [%3]! \n" // 12345678 (16b) + + "vmovl.u16 q2, d0 \n" // 0123 (32b) + "vmovl.u16 q3, d1 \n" // 4567 (32b) + "vmovl.u16 q4, d2 \n" // 1234 (32b) + "vmovl.u16 q5, d3 \n" // 5678 (32b) + + "vmlal.u16 q2, d2, d31 \n" + "vmlal.u16 q3, d3, d31 \n" + "vmlal.u16 q4, d0, d31 \n" + "vmlal.u16 q5, d1, d31 \n" + + "vrshrn.u32 d0, q4, #2 \n" + "vrshrn.u32 d1, q5, #2 \n" + "vrshrn.u32 d2, q2, #2 \n" + "vrshrn.u32 d3, q3, #2 \n" + + "vst2.16 {q0, q1}, [%1]! \n" // store + "subs %2, %2, #16 \n" // 8 sample -> 16 sample + "bgt 1b \n" + : "+r"(src_ptr), // %0 + "+r"(dst_ptr), // %1 + "+r"(dst_width), // %2 + "+r"(src_temp) // %3 + : + : "memory", "cc", "q0", "q1", "q2", "q15" // Clobber List + ); +} + +void ScaleRowUp2_Bilinear_16_NEON(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint16_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width) { + const uint16_t* src_ptr1 = src_ptr + src_stride; + uint16_t* dst_ptr1 = dst_ptr + dst_stride; + const uint16_t* src_temp = src_ptr + 1; + const uint16_t* src_temp1 = src_ptr1 + 1; + + asm volatile( + "vmov.u16 d31, #3 \n" + "vmov.u32 q14, #3 \n" + + "1: \n" + "vld1.16 {d0}, [%0]! \n" // 0123 (16b) + "vld1.16 {d1}, [%5]! \n" // 1234 (16b) + "vmovl.u16 q2, d0 \n" // 0123 (32b) + "vmovl.u16 q3, d1 \n" // 1234 (32b) + "vmlal.u16 q2, d1, d31 \n" + "vmlal.u16 q3, d0, d31 \n" + + "vld1.16 {d0}, [%1]! \n" // 0123 (16b) + "vld1.16 {d1}, [%6]! \n" // 1234 (16b) + "vmovl.u16 q4, d0 \n" // 0123 (32b) + "vmovl.u16 q5, d1 \n" // 1234 (32b) + "vmlal.u16 q4, d1, d31 \n" + "vmlal.u16 q5, d0, d31 \n" + + "vmovq q0, q4 \n" + "vmovq q1, q5 \n" + "vmla.u32 q4, q2, q14 \n" + "vmla.u32 q5, q3, q14 \n" + "vmla.u32 q2, q0, q14 \n" + "vmla.u32 q3, q1, q14 \n" + + "vrshrn.u32 d1, q4, #4 \n" + "vrshrn.u32 d0, q5, #4 \n" + "vrshrn.u32 d3, q2, #4 \n" + "vrshrn.u32 d2, q3, #4 \n" + + "vst2.16 {d0, d1}, [%2]! \n" // store + "vst2.16 {d2, d3}, [%3]! \n" // store + "subs %4, %4, #8 \n" // 4 sample -> 8 sample + "bgt 1b \n" + : "+r"(src_ptr), // %0 + "+r"(src_ptr1), // %1 + "+r"(dst_ptr), // %2 + "+r"(dst_ptr1), // %3 + "+r"(dst_width), // %4 + "+r"(src_temp), // %5 + "+r"(src_temp1) // %6 + : + : "memory", "cc", "q0", "q1", "q2", "q3", "q4", "q5", "q14", + "d31" // Clobber List + ); +} + +void ScaleUVRowUp2_Linear_NEON(const uint8_t* src_ptr, + uint8_t* dst_ptr, + int dst_width) { + const uint8_t* src_temp = src_ptr + 2; + asm volatile( + "vmov.u8 d30, #3 \n" + + "1: \n" + "vld1.8 {d4}, [%0]! \n" // 00112233 (1u1v) + "vld1.8 {d5}, [%3]! \n" // 11223344 (1u1v) + + "vmovl.u8 q0, d4 \n" // 00112233 (1u1v, 16b) + "vmovl.u8 q1, d5 \n" // 11223344 (1u1v, 16b) + "vmlal.u8 q0, d5, d30 \n" // 3*near+far (odd) + "vmlal.u8 q1, d4, d30 \n" // 3*near+far (even) + + "vrshrn.u16 d1, q0, #2 \n" // 3/4*near+1/4*far (odd) + "vrshrn.u16 d0, q1, #2 \n" // 3/4*near+1/4*far (even) + + "vst2.16 {d0, d1}, [%1]! \n" // store + "subs %2, %2, #8 \n" // 4 uv -> 8 uv + "bgt 1b \n" + : "+r"(src_ptr), // %0 + "+r"(dst_ptr), // %1 + "+r"(dst_width), // %2 + "+r"(src_temp) // %3 + : + : "memory", "cc", "q0", "q1", "q2", "d30" // Clobber List + ); +} + +void ScaleUVRowUp2_Bilinear_NEON(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width) { + const uint8_t* src_ptr1 = src_ptr + src_stride; + uint8_t* dst_ptr1 = dst_ptr + dst_stride; + const uint8_t* src_temp = src_ptr + 2; + const uint8_t* src_temp1 = src_ptr1 + 2; + + asm volatile( + "vmov.u16 q15, #3 \n" + "vmov.u8 d28, #3 \n" + + "1: \n" + "vld1.8 {d4}, [%0]! \n" // 00112233 (1u1v) + "vld1.8 {d5}, [%5]! \n" // 11223344 (1u1v) + + "vmovl.u8 q0, d4 \n" // 00112233 (1u1v, 16b) + "vmovl.u8 q1, d5 \n" // 11223344 (1u1v, 16b) + "vmlal.u8 q0, d5, d28 \n" // 3*near+far (1, odd) + "vmlal.u8 q1, d4, d28 \n" // 3*near+far (1, even) + + "vld1.8 {d8}, [%1]! \n" // 00112233 (1u1v) + "vld1.8 {d9}, [%6]! \n" // 11223344 (1u1v) + + "vmovl.u8 q2, d8 \n" // 00112233 (1u1v, 16b) + "vmovl.u8 q3, d9 \n" // 11223344 (1u1v, 16b) + "vmlal.u8 q2, d9, d28 \n" // 3*near+far (2, odd) + "vmlal.u8 q3, d8, d28 \n" // 3*near+far (2, even) + + // e o + // q1 q0 + // q3 q2 + + "vmovq q4, q2 \n" + "vmovq q5, q3 \n" + "vmla.u16 q4, q0, q15 \n" // 9 3 3 1 (1, odd) + "vmla.u16 q5, q1, q15 \n" // 9 3 3 1 (1, even) + "vmla.u16 q0, q2, q15 \n" // 9 3 3 1 (2, odd) + "vmla.u16 q1, q3, q15 \n" // 9 3 3 1 (2, even) + + // e o + // q5 q4 + // q1 q0 + + "vrshrn.u16 d2, q1, #4 \n" // 2, even + "vrshrn.u16 d3, q0, #4 \n" // 2, odd + "vrshrn.u16 d0, q5, #4 \n" // 1, even + "vrshrn.u16 d1, q4, #4 \n" // 1, odd + + "vst2.16 {d0, d1}, [%2]! \n" // store + "vst2.16 {d2, d3}, [%3]! \n" // store + "subs %4, %4, #8 \n" // 4 uv -> 8 uv + "bgt 1b \n" + : "+r"(src_ptr), // %0 + "+r"(src_ptr1), // %1 + "+r"(dst_ptr), // %2 + "+r"(dst_ptr1), // %3 + "+r"(dst_width), // %4 + "+r"(src_temp), // %5 + "+r"(src_temp1) // %6 + : + : "memory", "cc", "q0", "q1", "q2", "q3", "q4", "q5", "d28", + "q15" // Clobber List + ); +} + +void ScaleUVRowUp2_Linear_16_NEON(const uint16_t* src_ptr, + uint16_t* dst_ptr, + int dst_width) { + const uint16_t* src_temp = src_ptr + 2; + asm volatile( + "vmov.u16 d30, #3 \n" + + "1: \n" + "vld1.16 {q0}, [%0]! \n" // 00112233 (1u1v, 16) + "vld1.16 {q1}, [%3]! \n" // 11223344 (1u1v, 16) + + "vmovl.u16 q2, d0 \n" // 0011 (1u1v, 32b) + "vmovl.u16 q3, d2 \n" // 1122 (1u1v, 32b) + "vmovl.u16 q4, d1 \n" // 2233 (1u1v, 32b) + "vmovl.u16 q5, d3 \n" // 3344 (1u1v, 32b) + "vmlal.u16 q2, d2, d30 \n" // 3*near+far (odd) + "vmlal.u16 q3, d0, d30 \n" // 3*near+far (even) + "vmlal.u16 q4, d3, d30 \n" // 3*near+far (odd) + "vmlal.u16 q5, d1, d30 \n" // 3*near+far (even) + + "vrshrn.u32 d1, q2, #2 \n" // 3/4*near+1/4*far (odd) + "vrshrn.u32 d0, q3, #2 \n" // 3/4*near+1/4*far (even) + "vrshrn.u32 d3, q4, #2 \n" // 3/4*near+1/4*far (odd) + "vrshrn.u32 d2, q5, #2 \n" // 3/4*near+1/4*far (even) + + "vst2.32 {d0, d1}, [%1]! \n" // store + "vst2.32 {d2, d3}, [%1]! \n" // store + "subs %2, %2, #8 \n" // 4 uv -> 8 uv + "bgt 1b \n" + : "+r"(src_ptr), // %0 + "+r"(dst_ptr), // %1 + "+r"(dst_width), // %2 + "+r"(src_temp) // %3 + : + : "memory", "cc", "q0", "q1", "q2", "q3", "q4", "q5", + "d30" // Clobber List + ); +} + +void ScaleUVRowUp2_Bilinear_16_NEON(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint16_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width) { + const uint16_t* src_ptr1 = src_ptr + src_stride; + uint16_t* dst_ptr1 = dst_ptr + dst_stride; + const uint16_t* src_temp = src_ptr + 2; + const uint16_t* src_temp1 = src_ptr1 + 2; + + asm volatile( + "vmov.u16 d30, #3 \n" + "vmov.u32 q14, #3 \n" + + "1: \n" + "vld1.8 {d0}, [%0]! \n" // 0011 (1u1v) + "vld1.8 {d1}, [%5]! \n" // 1122 (1u1v) + "vmovl.u16 q2, d0 \n" // 0011 (1u1v, 32b) + "vmovl.u16 q3, d1 \n" // 1122 (1u1v, 32b) + "vmlal.u16 q2, d1, d30 \n" // 3*near+far (1, odd) + "vmlal.u16 q3, d0, d30 \n" // 3*near+far (1, even) + + "vld1.8 {d0}, [%1]! \n" // 0011 (1u1v) + "vld1.8 {d1}, [%6]! \n" // 1122 (1u1v) + "vmovl.u16 q4, d0 \n" // 0011 (1u1v, 32b) + "vmovl.u16 q5, d1 \n" // 1122 (1u1v, 32b) + "vmlal.u16 q4, d1, d30 \n" // 3*near+far (2, odd) + "vmlal.u16 q5, d0, d30 \n" // 3*near+far (2, even) + + "vmovq q0, q4 \n" + "vmovq q1, q5 \n" + "vmla.u32 q4, q2, q14 \n" // 9 3 3 1 (1, odd) + "vmla.u32 q5, q3, q14 \n" // 9 3 3 1 (1, even) + "vmla.u32 q2, q0, q14 \n" // 9 3 3 1 (2, odd) + "vmla.u32 q3, q1, q14 \n" // 9 3 3 1 (2, even) + + "vrshrn.u32 d1, q4, #4 \n" // 1, odd + "vrshrn.u32 d0, q5, #4 \n" // 1, even + "vrshrn.u32 d3, q2, #4 \n" // 2, odd + "vrshrn.u32 d2, q3, #4 \n" // 2, even + + "vst2.32 {d0, d1}, [%2]! \n" // store + "vst2.32 {d2, d3}, [%3]! \n" // store + "subs %4, %4, #4 \n" // 2 uv -> 4 uv + "bgt 1b \n" + : "+r"(src_ptr), // %0 + "+r"(src_ptr1), // %1 + "+r"(dst_ptr), // %2 + "+r"(dst_ptr1), // %3 + "+r"(dst_width), // %4 + "+r"(src_temp), // %5 + "+r"(src_temp1) // %6 + : + : "memory", "cc", "q0", "q1", "q2", "q3", "q4", "q5", "q14", + "d30" // Clobber List + ); +} + // Add a row of bytes to a row of shorts. Used for box filter. // Reads 16 bytes and accumulates to 16 shorts at a time. void ScaleAddRow_NEON(const uint8_t* src_ptr, @@ -991,20 +1469,20 @@ void ScaleUVRowDownEven_NEON(const uint8_t* src_ptr, (void)src_stride; asm volatile( "1: \n" - "vld1.16 {d0[0]}, [%0], %6 \n" - "vld1.16 {d0[1]}, [%1], %6 \n" - "vld1.16 {d0[2]}, [%2], %6 \n" - "vld1.16 {d0[3]}, [%3], %6 \n" - "subs %5, %5, #4 \n" // 4 pixels per loop. - "vst1.8 {d0}, [%4]! \n" - "bgt 1b \n" - : "+r"(src_ptr), // %0 - "+r"(src1_ptr), // %1 - "+r"(src2_ptr), // %2 - "+r"(src3_ptr), // %3 - "+r"(dst_ptr), // %4 - "+r"(dst_width) // %5 - : "r"(src_stepx * 8) // %6 + "vld1.16 {d0[0]}, [%0], %6 \n" + "vld1.16 {d0[1]}, [%1], %6 \n" + "vld1.16 {d0[2]}, [%2], %6 \n" + "vld1.16 {d0[3]}, [%3], %6 \n" + "subs %5, %5, #4 \n" // 4 pixels per loop. + "vst1.8 {d0}, [%4]! \n" + "bgt 1b \n" + : "+r"(src_ptr), // %0 + "+r"(src1_ptr), // %1 + "+r"(src2_ptr), // %2 + "+r"(src3_ptr), // %3 + "+r"(dst_ptr), // %4 + "+r"(dst_width) // %5 + : "r"(src_stepx * 8) // %6 : "memory", "cc", "d0"); } diff --git a/TMessagesProj/jni/third_party/libyuv/source/scale_neon64.cc b/TMessagesProj/jni/third_party/libyuv/source/scale_neon64.cc index 185591cb55..9f9636e646 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/scale_neon64.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/scale_neon64.cc @@ -201,22 +201,22 @@ void ScaleRowDown34_0_Box_NEON(const uint8_t* src_ptr, "umlal v19.8h, v3.8b, v20.8b \n" "prfm pldl1keep, [%0, 448] \n" // prefetch 7 lines ahead - // (3 * line_0 + line_1) >> 2 + // (3 * line_0 + line_1 + 2) >> 2 "uqrshrn v0.8b, v16.8h, #2 \n" "uqrshrn v1.8b, v17.8h, #2 \n" "uqrshrn v2.8b, v18.8h, #2 \n" "uqrshrn v3.8b, v19.8h, #2 \n" "prfm pldl1keep, [%3, 448] \n" - // a0 = (src[0] * 3 + s[1] * 1) >> 2 + // a0 = (src[0] * 3 + s[1] * 1 + 2) >> 2 "ushll v16.8h, v1.8b, #0 \n" "umlal v16.8h, v0.8b, v20.8b \n" "uqrshrn v0.8b, v16.8h, #2 \n" - // a1 = (src[1] * 1 + s[2] * 1) >> 1 + // a1 = (src[1] * 1 + s[2] * 1 + 1) >> 1 "urhadd v1.8b, v1.8b, v2.8b \n" - // a2 = (src[2] * 1 + s[3] * 3) >> 2 + // a2 = (src[2] * 1 + s[3] * 3 + 2) >> 2 "ushll v16.8h, v2.8b, #0 \n" "umlal v16.8h, v3.8b, v20.8b \n" "uqrshrn v2.8b, v16.8h, #2 \n" @@ -251,16 +251,16 @@ void ScaleRowDown34_1_Box_NEON(const uint8_t* src_ptr, "urhadd v3.8b, v3.8b, v7.8b \n" "prfm pldl1keep, [%0, 448] \n" // prefetch 7 lines ahead - // a0 = (src[0] * 3 + s[1] * 1) >> 2 + // a0 = (src[0] * 3 + s[1] * 1 + 2) >> 2 "ushll v4.8h, v1.8b, #0 \n" "umlal v4.8h, v0.8b, v20.8b \n" "uqrshrn v0.8b, v4.8h, #2 \n" "prfm pldl1keep, [%3, 448] \n" - // a1 = (src[1] * 1 + s[2] * 1) >> 1 + // a1 = (src[1] * 1 + s[2] * 1 + 1) >> 1 "urhadd v1.8b, v1.8b, v2.8b \n" - // a2 = (src[2] * 1 + s[3] * 3) >> 2 + // a2 = (src[2] * 1 + s[3] * 3 + 2) >> 2 "ushll v4.8h, v2.8b, #0 \n" "umlal v4.8h, v3.8b, v20.8b \n" "uqrshrn v2.8b, v4.8h, #2 \n" @@ -535,6 +535,488 @@ void ScaleRowDown38_2_Box_NEON(const uint8_t* src_ptr, "v19", "v30", "v31", "memory", "cc"); } +void ScaleRowUp2_Linear_NEON(const uint8_t* src_ptr, + uint8_t* dst_ptr, + int dst_width) { + const uint8_t* src_temp = src_ptr + 1; + asm volatile( + "movi v31.8b, #3 \n" + + "1: \n" + "ldr d0, [%0], #8 \n" // 01234567 + "ldr d1, [%1], #8 \n" // 12345678 + "prfm pldl1keep, [%0, 448] \n" // prefetch 7 lines ahead + + "ushll v2.8h, v0.8b, #0 \n" // 01234567 (16b) + "ushll v3.8h, v1.8b, #0 \n" // 12345678 (16b) + + "umlal v2.8h, v1.8b, v31.8b \n" // 3*near+far (odd) + "umlal v3.8h, v0.8b, v31.8b \n" // 3*near+far (even) + + "rshrn v2.8b, v2.8h, #2 \n" // 3/4*near+1/4*far (odd) + "rshrn v1.8b, v3.8h, #2 \n" // 3/4*near+1/4*far (even) + + "st2 {v1.8b, v2.8b}, [%2], #16 \n" // store + "subs %w3, %w3, #16 \n" // 8 sample -> 16 sample + "b.gt 1b \n" + : "+r"(src_ptr), // %0 + "+r"(src_temp), // %1 + "+r"(dst_ptr), // %2 + "+r"(dst_width) // %3 + : + : "memory", "cc", "v0", "v1", "v2", "v3", "v31" // Clobber List + ); +} + +void ScaleRowUp2_Bilinear_NEON(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width) { + const uint8_t* src_ptr1 = src_ptr + src_stride; + uint8_t* dst_ptr1 = dst_ptr + dst_stride; + const uint8_t* src_temp = src_ptr + 1; + const uint8_t* src_temp1 = src_ptr1 + 1; + + asm volatile( + "movi v31.8b, #3 \n" + "movi v30.8h, #3 \n" + + "1: \n" + "ldr d0, [%0], #8 \n" // 01234567 + "ldr d1, [%2], #8 \n" // 12345678 + "prfm pldl1keep, [%0, 448] \n" // prefetch 7 lines ahead + + "ushll v2.8h, v0.8b, #0 \n" // 01234567 (16b) + "ushll v3.8h, v1.8b, #0 \n" // 12345678 (16b) + "umlal v2.8h, v1.8b, v31.8b \n" // 3*near+far (1, odd) + "umlal v3.8h, v0.8b, v31.8b \n" // 3*near+far (1, even) + + "ldr d0, [%1], #8 \n" + "ldr d1, [%3], #8 \n" + "prfm pldl1keep, [%1, 448] \n" // prefetch 7 lines ahead + + "ushll v4.8h, v0.8b, #0 \n" // 01234567 (16b) + "ushll v5.8h, v1.8b, #0 \n" // 12345678 (16b) + "umlal v4.8h, v1.8b, v31.8b \n" // 3*near+far (2, odd) + "umlal v5.8h, v0.8b, v31.8b \n" // 3*near+far (2, even) + + "mov v0.16b, v4.16b \n" + "mov v1.16b, v5.16b \n" + "mla v4.8h, v2.8h, v30.8h \n" // 9 3 3 1 (1, odd) + "mla v5.8h, v3.8h, v30.8h \n" // 9 3 3 1 (1, even) + "mla v2.8h, v0.8h, v30.8h \n" // 9 3 3 1 (2, odd) + "mla v3.8h, v1.8h, v30.8h \n" // 9 3 3 1 (2, even) + + "rshrn v2.8b, v2.8h, #4 \n" // 2, odd + "rshrn v1.8b, v3.8h, #4 \n" // 2, even + "rshrn v4.8b, v4.8h, #4 \n" // 1, odd + "rshrn v3.8b, v5.8h, #4 \n" // 1, even + + "st2 {v1.8b, v2.8b}, [%5], #16 \n" // store 1 + "st2 {v3.8b, v4.8b}, [%4], #16 \n" // store 2 + "subs %w6, %w6, #16 \n" // 8 sample -> 16 sample + "b.gt 1b \n" + : "+r"(src_ptr), // %0 + "+r"(src_ptr1), // %1 + "+r"(src_temp), // %2 + "+r"(src_temp1), // %3 + "+r"(dst_ptr), // %4 + "+r"(dst_ptr1), // %5 + "+r"(dst_width) // %6 + : + : "memory", "cc", "v0", "v1", "v2", "v3", "v4", "v5", "v30", + "v31" // Clobber List + ); +} + +void ScaleRowUp2_Linear_12_NEON(const uint16_t* src_ptr, + uint16_t* dst_ptr, + int dst_width) { + const uint16_t* src_temp = src_ptr + 1; + asm volatile( + "movi v31.8h, #3 \n" + + "1: \n" + "ld1 {v0.8h}, [%0], #16 \n" // 01234567 (16b) + "ld1 {v1.8h}, [%1], #16 \n" // 12345678 (16b) + "prfm pldl1keep, [%0, 448] \n" // prefetch 7 lines ahead + + "mov v2.16b, v0.16b \n" + "mla v0.8h, v1.8h, v31.8h \n" // 3*near+far (odd) + "mla v1.8h, v2.8h, v31.8h \n" // 3*near+far (even) + + "urshr v2.8h, v0.8h, #2 \n" // 3/4*near+1/4*far (odd) + "urshr v1.8h, v1.8h, #2 \n" // 3/4*near+1/4*far (even) + + "st2 {v1.8h, v2.8h}, [%2], #32 \n" // store + "subs %w3, %w3, #16 \n" // 8 sample -> 16 sample + "b.gt 1b \n" + : "+r"(src_ptr), // %0 + "+r"(src_temp), // %1 + "+r"(dst_ptr), // %2 + "+r"(dst_width) // %3 + : + : "memory", "cc", "v0", "v1", "v2", "v31" // Clobber List + ); +} + +void ScaleRowUp2_Bilinear_12_NEON(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint16_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width) { + const uint16_t* src_ptr1 = src_ptr + src_stride; + uint16_t* dst_ptr1 = dst_ptr + dst_stride; + const uint16_t* src_temp = src_ptr + 1; + const uint16_t* src_temp1 = src_ptr1 + 1; + + asm volatile( + "movi v31.8h, #3 \n" + + "1: \n" + "ld1 {v2.8h}, [%0], #16 \n" // 01234567 (16b) + "ld1 {v3.8h}, [%2], #16 \n" // 12345678 (16b) + "prfm pldl1keep, [%0, 448] \n" // prefetch 7 lines ahead + + "mov v0.16b, v2.16b \n" + "mla v2.8h, v3.8h, v31.8h \n" // 3*near+far (odd) + "mla v3.8h, v0.8h, v31.8h \n" // 3*near+far (even) + + "ld1 {v4.8h}, [%1], #16 \n" // 01234567 (16b) + "ld1 {v5.8h}, [%3], #16 \n" // 12345678 (16b) + "prfm pldl1keep, [%1, 448] \n" // prefetch 7 lines ahead + + "mov v0.16b, v4.16b \n" + "mla v4.8h, v5.8h, v31.8h \n" // 3*near+far (odd) + "mla v5.8h, v0.8h, v31.8h \n" // 3*near+far (even) + + "mov v0.16b, v4.16b \n" + "mov v1.16b, v5.16b \n" + "mla v4.8h, v2.8h, v31.8h \n" // 9 3 3 1 (1, odd) + "mla v5.8h, v3.8h, v31.8h \n" // 9 3 3 1 (1, even) + "mla v2.8h, v0.8h, v31.8h \n" // 9 3 3 1 (2, odd) + "mla v3.8h, v1.8h, v31.8h \n" // 9 3 3 1 (2, even) + + "urshr v2.8h, v2.8h, #4 \n" // 2, odd + "urshr v1.8h, v3.8h, #4 \n" // 2, even + "urshr v4.8h, v4.8h, #4 \n" // 1, odd + "urshr v3.8h, v5.8h, #4 \n" // 1, even + + "st2 {v3.8h, v4.8h}, [%4], #32 \n" // store 1 + "st2 {v1.8h, v2.8h}, [%5], #32 \n" // store 2 + + "subs %w6, %w6, #16 \n" // 8 sample -> 16 sample + "b.gt 1b \n" + : "+r"(src_ptr), // %0 + "+r"(src_ptr1), // %1 + "+r"(src_temp), // %2 + "+r"(src_temp1), // %3 + "+r"(dst_ptr), // %4 + "+r"(dst_ptr1), // %5 + "+r"(dst_width) // %6 + : + : "memory", "cc", "v0", "v1", "v2", "v3", "v4", "v5", + "v31" // Clobber List + ); +} + +void ScaleRowUp2_Linear_16_NEON(const uint16_t* src_ptr, + uint16_t* dst_ptr, + int dst_width) { + const uint16_t* src_temp = src_ptr + 1; + asm volatile( + "movi v31.8h, #3 \n" + + "1: \n" + "ld1 {v0.8h}, [%0], #16 \n" // 01234567 (16b) + "ld1 {v1.8h}, [%1], #16 \n" // 12345678 (16b) + "prfm pldl1keep, [%0, 448] \n" // prefetch 7 lines ahead + + "ushll v2.4s, v0.4h, #0 \n" // 0123 (32b) + "ushll2 v3.4s, v0.8h, #0 \n" // 4567 (32b) + "ushll v4.4s, v1.4h, #0 \n" // 1234 (32b) + "ushll2 v5.4s, v1.8h, #0 \n" // 5678 (32b) + + "umlal v2.4s, v1.4h, v31.4h \n" // 3*near+far (1, odd) + "umlal2 v3.4s, v1.8h, v31.8h \n" // 3*near+far (2, odd) + "umlal v4.4s, v0.4h, v31.4h \n" // 3*near+far (1, even) + "umlal2 v5.4s, v0.8h, v31.8h \n" // 3*near+far (2, even) + + "rshrn v0.4h, v4.4s, #2 \n" // 3/4*near+1/4*far + "rshrn2 v0.8h, v5.4s, #2 \n" // 3/4*near+1/4*far (even) + "rshrn v1.4h, v2.4s, #2 \n" // 3/4*near+1/4*far + "rshrn2 v1.8h, v3.4s, #2 \n" // 3/4*near+1/4*far (odd) + + "st2 {v0.8h, v1.8h}, [%2], #32 \n" // store + "subs %w3, %w3, #16 \n" // 8 sample -> 16 sample + "b.gt 1b \n" + : "+r"(src_ptr), // %0 + "+r"(src_temp), // %1 + "+r"(dst_ptr), // %2 + "+r"(dst_width) // %3 + : + : "memory", "cc", "v0", "v1", "v2", "v31" // Clobber List + ); +} + +void ScaleRowUp2_Bilinear_16_NEON(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint16_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width) { + const uint16_t* src_ptr1 = src_ptr + src_stride; + uint16_t* dst_ptr1 = dst_ptr + dst_stride; + const uint16_t* src_temp = src_ptr + 1; + const uint16_t* src_temp1 = src_ptr1 + 1; + + asm volatile( + "movi v31.4h, #3 \n" + "movi v30.4s, #3 \n" + + "1: \n" + "ldr d0, [%0], #8 \n" // 0123 (16b) + "ldr d1, [%2], #8 \n" // 1234 (16b) + "prfm pldl1keep, [%0, 448] \n" // prefetch 7 lines ahead + "ushll v2.4s, v0.4h, #0 \n" // 0123 (32b) + "ushll v3.4s, v1.4h, #0 \n" // 1234 (32b) + "umlal v2.4s, v1.4h, v31.4h \n" // 3*near+far (1, odd) + "umlal v3.4s, v0.4h, v31.4h \n" // 3*near+far (1, even) + + "ldr d0, [%1], #8 \n" // 0123 (16b) + "ldr d1, [%3], #8 \n" // 1234 (16b) + "prfm pldl1keep, [%1, 448] \n" // prefetch 7 lines ahead + "ushll v4.4s, v0.4h, #0 \n" // 0123 (32b) + "ushll v5.4s, v1.4h, #0 \n" // 1234 (32b) + "umlal v4.4s, v1.4h, v31.4h \n" // 3*near+far (2, odd) + "umlal v5.4s, v0.4h, v31.4h \n" // 3*near+far (2, even) + + "mov v0.16b, v4.16b \n" + "mov v1.16b, v5.16b \n" + "mla v4.4s, v2.4s, v30.4s \n" // 9 3 3 1 (1, odd) + "mla v5.4s, v3.4s, v30.4s \n" // 9 3 3 1 (1, even) + "mla v2.4s, v0.4s, v30.4s \n" // 9 3 3 1 (2, odd) + "mla v3.4s, v1.4s, v30.4s \n" // 9 3 3 1 (2, even) + + "rshrn v1.4h, v4.4s, #4 \n" // 3/4*near+1/4*far + "rshrn v0.4h, v5.4s, #4 \n" // 3/4*near+1/4*far + "rshrn v5.4h, v2.4s, #4 \n" // 3/4*near+1/4*far + "rshrn v4.4h, v3.4s, #4 \n" // 3/4*near+1/4*far + + "st2 {v0.4h, v1.4h}, [%4], #16 \n" // store 1 + "st2 {v4.4h, v5.4h}, [%5], #16 \n" // store 2 + + "subs %w6, %w6, #8 \n" // 4 sample -> 8 sample + "b.gt 1b \n" + : "+r"(src_ptr), // %0 + "+r"(src_ptr1), // %1 + "+r"(src_temp), // %2 + "+r"(src_temp1), // %3 + "+r"(dst_ptr), // %4 + "+r"(dst_ptr1), // %5 + "+r"(dst_width) // %6 + : + : "memory", "cc", "v0", "v1", "v2", "v3", "v4", "v5", "v30", + "v31" // Clobber List + ); +} + +void ScaleUVRowUp2_Linear_NEON(const uint8_t* src_ptr, + uint8_t* dst_ptr, + int dst_width) { + const uint8_t* src_temp = src_ptr + 2; + asm volatile( + "movi v31.8b, #3 \n" + + "1: \n" + "ldr d0, [%0], #8 \n" // 00112233 (1u1v) + "ldr d1, [%1], #8 \n" // 11223344 (1u1v) + "prfm pldl1keep, [%0, 448] \n" // prefetch 7 lines ahead + + "ushll v2.8h, v0.8b, #0 \n" // 00112233 (1u1v, 16b) + "ushll v3.8h, v1.8b, #0 \n" // 11223344 (1u1v, 16b) + + "umlal v2.8h, v1.8b, v31.8b \n" // 3*near+far (odd) + "umlal v3.8h, v0.8b, v31.8b \n" // 3*near+far (even) + + "rshrn v2.8b, v2.8h, #2 \n" // 3/4*near+1/4*far (odd) + "rshrn v1.8b, v3.8h, #2 \n" // 3/4*near+1/4*far (even) + + "st2 {v1.4h, v2.4h}, [%2], #16 \n" // store + "subs %w3, %w3, #8 \n" // 4 uv -> 8 uv + "b.gt 1b \n" + : "+r"(src_ptr), // %0 + "+r"(src_temp), // %1 + "+r"(dst_ptr), // %2 + "+r"(dst_width) // %3 + : + : "memory", "cc", "v0", "v1", "v2", "v3", "v31" // Clobber List + ); +} + +void ScaleUVRowUp2_Bilinear_NEON(const uint8_t* src_ptr, + ptrdiff_t src_stride, + uint8_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width) { + const uint8_t* src_ptr1 = src_ptr + src_stride; + uint8_t* dst_ptr1 = dst_ptr + dst_stride; + const uint8_t* src_temp = src_ptr + 2; + const uint8_t* src_temp1 = src_ptr1 + 2; + + asm volatile( + "movi v31.8b, #3 \n" + "movi v30.8h, #3 \n" + + "1: \n" + "ldr d0, [%0], #8 \n" + "ldr d1, [%2], #8 \n" + "prfm pldl1keep, [%0, 448] \n" // prefetch 7 lines ahead + + "ushll v2.8h, v0.8b, #0 \n" + "ushll v3.8h, v1.8b, #0 \n" + "umlal v2.8h, v1.8b, v31.8b \n" // 3*near+far (1, odd) + "umlal v3.8h, v0.8b, v31.8b \n" // 3*near+far (1, even) + + "ldr d0, [%1], #8 \n" + "ldr d1, [%3], #8 \n" + "prfm pldl1keep, [%1, 448] \n" // prefetch 7 lines ahead + + "ushll v4.8h, v0.8b, #0 \n" + "ushll v5.8h, v1.8b, #0 \n" + "umlal v4.8h, v1.8b, v31.8b \n" // 3*near+far (2, odd) + "umlal v5.8h, v0.8b, v31.8b \n" // 3*near+far (2, even) + + "mov v0.16b, v4.16b \n" + "mov v1.16b, v5.16b \n" + "mla v4.8h, v2.8h, v30.8h \n" // 9 3 3 1 (1, odd) + "mla v5.8h, v3.8h, v30.8h \n" // 9 3 3 1 (1, even) + "mla v2.8h, v0.8h, v30.8h \n" // 9 3 3 1 (2, odd) + "mla v3.8h, v1.8h, v30.8h \n" // 9 3 3 1 (2, even) + + "rshrn v2.8b, v2.8h, #4 \n" // 2, odd + "rshrn v1.8b, v3.8h, #4 \n" // 2, even + "rshrn v4.8b, v4.8h, #4 \n" // 1, odd + "rshrn v3.8b, v5.8h, #4 \n" // 1, even + + "st2 {v1.4h, v2.4h}, [%5], #16 \n" // store 2 + "st2 {v3.4h, v4.4h}, [%4], #16 \n" // store 1 + "subs %w6, %w6, #8 \n" // 4 uv -> 8 uv + "b.gt 1b \n" + : "+r"(src_ptr), // %0 + "+r"(src_ptr1), // %1 + "+r"(src_temp), // %2 + "+r"(src_temp1), // %3 + "+r"(dst_ptr), // %4 + "+r"(dst_ptr1), // %5 + "+r"(dst_width) // %6 + : + : "memory", "cc", "v0", "v1", "v2", "v3", "v4", "v5", "v30", + "v31" // Clobber List + ); +} + +void ScaleUVRowUp2_Linear_16_NEON(const uint16_t* src_ptr, + uint16_t* dst_ptr, + int dst_width) { + const uint16_t* src_temp = src_ptr + 2; + asm volatile( + "movi v31.8h, #3 \n" + + "1: \n" + "ld1 {v0.8h}, [%0], #16 \n" // 01234567 (16b) + "ld1 {v1.8h}, [%1], #16 \n" // 12345678 (16b) + "prfm pldl1keep, [%0, 448] \n" // prefetch 7 lines ahead + + "ushll v2.4s, v0.4h, #0 \n" // 0011 (1u1v, 32b) + "ushll v3.4s, v1.4h, #0 \n" // 1122 (1u1v, 32b) + "ushll2 v4.4s, v0.8h, #0 \n" // 2233 (1u1v, 32b) + "ushll2 v5.4s, v1.8h, #0 \n" // 3344 (1u1v, 32b) + + "umlal v2.4s, v1.4h, v31.4h \n" // 3*near+far (odd) + "umlal v3.4s, v0.4h, v31.4h \n" // 3*near+far (even) + "umlal2 v4.4s, v1.8h, v31.8h \n" // 3*near+far (odd) + "umlal2 v5.4s, v0.8h, v31.8h \n" // 3*near+far (even) + + "rshrn v2.4h, v2.4s, #2 \n" // 3/4*near+1/4*far (odd) + "rshrn v1.4h, v3.4s, #2 \n" // 3/4*near+1/4*far (even) + "rshrn v4.4h, v4.4s, #2 \n" // 3/4*near+1/4*far (odd) + "rshrn v3.4h, v5.4s, #2 \n" // 3/4*near+1/4*far (even) + + "st2 {v1.2s, v2.2s}, [%2], #16 \n" // store + "st2 {v3.2s, v4.2s}, [%2], #16 \n" // store + "subs %w3, %w3, #8 \n" // 4 uv -> 8 uv + "b.gt 1b \n" + : "+r"(src_ptr), // %0 + "+r"(src_temp), // %1 + "+r"(dst_ptr), // %2 + "+r"(dst_width) // %3 + : + : "memory", "cc", "v0", "v1", "v2", "v3", "v4", "v5", + "v31" // Clobber List + ); +} + +void ScaleUVRowUp2_Bilinear_16_NEON(const uint16_t* src_ptr, + ptrdiff_t src_stride, + uint16_t* dst_ptr, + ptrdiff_t dst_stride, + int dst_width) { + const uint16_t* src_ptr1 = src_ptr + src_stride; + uint16_t* dst_ptr1 = dst_ptr + dst_stride; + const uint16_t* src_temp = src_ptr + 2; + const uint16_t* src_temp1 = src_ptr1 + 2; + + asm volatile( + "movi v31.4h, #3 \n" + "movi v30.4s, #3 \n" + + "1: \n" + "ldr d0, [%0], #8 \n" + "ldr d1, [%2], #8 \n" + "prfm pldl1keep, [%0, 448] \n" // prefetch 7 lines ahead + "ushll v2.4s, v0.4h, #0 \n" // 0011 (1u1v, 32b) + "ushll v3.4s, v1.4h, #0 \n" // 1122 (1u1v, 32b) + "umlal v2.4s, v1.4h, v31.4h \n" // 3*near+far (1, odd) + "umlal v3.4s, v0.4h, v31.4h \n" // 3*near+far (1, even) + + "ldr d0, [%1], #8 \n" + "ldr d1, [%3], #8 \n" + "prfm pldl1keep, [%1, 448] \n" // prefetch 7 lines ahead + "ushll v4.4s, v0.4h, #0 \n" // 0011 (1u1v, 32b) + "ushll v5.4s, v1.4h, #0 \n" // 1122 (1u1v, 32b) + "umlal v4.4s, v1.4h, v31.4h \n" // 3*near+far (2, odd) + "umlal v5.4s, v0.4h, v31.4h \n" // 3*near+far (2, even) + + "mov v0.16b, v4.16b \n" + "mov v1.16b, v5.16b \n" + "mla v4.4s, v2.4s, v30.4s \n" // 9 3 3 1 (1, odd) + "mla v5.4s, v3.4s, v30.4s \n" // 9 3 3 1 (1, even) + "mla v2.4s, v0.4s, v30.4s \n" // 9 3 3 1 (2, odd) + "mla v3.4s, v1.4s, v30.4s \n" // 9 3 3 1 (2, even) + + "rshrn v1.4h, v2.4s, #4 \n" // 2, odd + "rshrn v0.4h, v3.4s, #4 \n" // 2, even + "rshrn v3.4h, v4.4s, #4 \n" // 1, odd + "rshrn v2.4h, v5.4s, #4 \n" // 1, even + + "st2 {v0.2s, v1.2s}, [%5], #16 \n" // store 2 + "st2 {v2.2s, v3.2s}, [%4], #16 \n" // store 1 + "subs %w6, %w6, #4 \n" // 2 uv -> 4 uv + "b.gt 1b \n" + : "+r"(src_ptr), // %0 + "+r"(src_ptr1), // %1 + "+r"(src_temp), // %2 + "+r"(src_temp1), // %3 + "+r"(dst_ptr), // %4 + "+r"(dst_ptr1), // %5 + "+r"(dst_width) // %6 + : + : "memory", "cc", "v0", "v1", "v2", "v3", "v4", "v5", "v30", + "v31" // Clobber List + ); +} + // Add a row of bytes to a row of shorts. Used for box filter. // Reads 16 bytes and accumulates to 16 shorts at a time. void ScaleAddRow_NEON(const uint8_t* src_ptr, @@ -1127,13 +1609,13 @@ void ScaleUVRowDownEven_NEON(const uint8_t* src_ptr, (void)src_stride; asm volatile( "1: \n" - "ld1 {v0.h}[0], [%0], %6 \n" - "ld1 {v1.h}[0], [%1], %6 \n" - "ld1 {v2.h}[0], [%2], %6 \n" - "ld1 {v3.h}[0], [%3], %6 \n" - "subs %w5, %w5, #4 \n" // 4 pixels per loop. - "st4 {v0.h, v1.h, v2.h, v3.h}[0], [%4], #8 \n" - "b.gt 1b \n" + "ld1 {v0.h}[0], [%0], %6 \n" + "ld1 {v1.h}[0], [%1], %6 \n" + "ld1 {v2.h}[0], [%2], %6 \n" + "ld1 {v3.h}[0], [%3], %6 \n" + "subs %w5, %w5, #4 \n" // 4 pixels per loop. + "st4 {v0.h, v1.h, v2.h, v3.h}[0], [%4], #8 \n" + "b.gt 1b \n" : "+r"(src_ptr), // %0 "+r"(src1_ptr), // %1 "+r"(src2_ptr), // %2 diff --git a/TMessagesProj/jni/third_party/libyuv/source/scale_rgb.cc b/TMessagesProj/jni/third_party/libyuv/source/scale_rgb.cc new file mode 100644 index 0000000000..8db59b56fc --- /dev/null +++ b/TMessagesProj/jni/third_party/libyuv/source/scale_rgb.cc @@ -0,0 +1,66 @@ +/* + * Copyright 2022 The LibYuv Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "libyuv/scale.h" /* For FilterMode */ + +#include +#include + +#include "libyuv/convert_argb.h" +#include "libyuv/convert_from_argb.h" +#include "libyuv/row.h" +#include "libyuv/scale_argb.h" +#include "libyuv/scale_rgb.h" + +#ifdef __cplusplus +namespace libyuv { +extern "C" { +#endif + +// Scale a 24 bit image. +// Converts to ARGB as intermediate step + +LIBYUV_API +int RGBScale(const uint8_t* src_rgb, + int src_stride_rgb, + int src_width, + int src_height, + uint8_t* dst_rgb, + int dst_stride_rgb, + int dst_width, + int dst_height, + enum FilterMode filtering) { + int r; + uint8_t* src_argb = + (uint8_t*)malloc(src_width * src_height * 4 + dst_width * dst_height * 4); + uint8_t* dst_argb = src_argb + src_width * src_height * 4; + + if (!src_argb) { + return 1; + } + + r = RGB24ToARGB(src_rgb, src_stride_rgb, src_argb, src_width * 4, src_width, + src_height); + if (!r) { + r = ARGBScale(src_argb, src_width * 4, src_width, src_height, dst_argb, + dst_width * 4, dst_width, dst_height, filtering); + if (!r) { + r = ARGBToRGB24(dst_argb, dst_width * 4, dst_rgb, dst_stride_rgb, + dst_width, dst_height); + } + } + free(src_argb); + return r; +} + +#ifdef __cplusplus +} // extern "C" +} // namespace libyuv +#endif diff --git a/TMessagesProj/jni/third_party/libyuv/source/scale_uv.cc b/TMessagesProj/jni/third_party/libyuv/source/scale_uv.cc index b0469f09b8..5b92d04321 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/scale_uv.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/scale_uv.cc @@ -83,9 +83,9 @@ static void ScaleUVDown2(int src_width, assert((dy & 0x1ffff) == 0); // Test vertical scale is multiple of 2. // Advance to odd row, even column. if (filtering == kFilterBilinear) { - src_uv += (y >> 16) * src_stride + (x >> 16) * 2; + src_uv += (y >> 16) * (intptr_t)src_stride + (x >> 16) * 2; } else { - src_uv += (y >> 16) * src_stride + ((x >> 16) - 1) * 2; + src_uv += (y >> 16) * (intptr_t)src_stride + ((x >> 16) - 1) * 2; } #if defined(HAS_SCALEUVROWDOWN2BOX_SSSE3) @@ -147,22 +147,6 @@ static void ScaleUVDown2(int src_width, } } #endif -#if defined(HAS_SCALEUVROWDOWN2_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ScaleUVRowDown2 = - filtering == kFilterNone - ? ScaleUVRowDown2_Any_MMI - : (filtering == kFilterLinear ? ScaleUVRowDown2Linear_Any_MMI - : ScaleUVRowDown2Box_Any_MMI); - if (IS_ALIGNED(dst_width, 2)) { - ScaleUVRowDown2 = - filtering == kFilterNone - ? ScaleUVRowDown2_MMI - : (filtering == kFilterLinear ? ScaleUVRowDown2Linear_MMI - : ScaleUVRowDown2Box_MMI); - } - } -#endif #if defined(HAS_SCALEUVROWDOWN2_MSA) if (TestCpuFlag(kCpuHasMSA)) { ScaleUVRowDown2 = @@ -209,14 +193,14 @@ static void ScaleUVDown4Box(int src_width, int dy) { int j; // Allocate 2 rows of UV. - const int kRowSize = (dst_width * 2 * 2 + 15) & ~15; - align_buffer_64(row, kRowSize * 2); + const int row_size = (dst_width * 2 * 2 + 15) & ~15; + align_buffer_64(row, row_size * 2); int row_stride = src_stride * (dy >> 16); void (*ScaleUVRowDown2)(const uint8_t* src_uv, ptrdiff_t src_stride, uint8_t* dst_uv, int dst_width) = ScaleUVRowDown2Box_C; // Advance to odd row, even column. - src_uv += (y >> 16) * src_stride + (x >> 16) * 2; + src_uv += (y >> 16) * (intptr_t)src_stride + (x >> 16) * 2; (void)src_width; (void)src_height; (void)dx; @@ -250,9 +234,9 @@ static void ScaleUVDown4Box(int src_width, for (j = 0; j < dst_height; ++j) { ScaleUVRowDown2(src_uv, src_stride, row, dst_width * 2); - ScaleUVRowDown2(src_uv + src_stride * 2, src_stride, row + kRowSize, + ScaleUVRowDown2(src_uv + src_stride * 2, src_stride, row + row_size, dst_width * 2); - ScaleUVRowDown2(row, kRowSize, dst_uv, dst_width); + ScaleUVRowDown2(row, row_size, dst_uv, dst_width); src_uv += row_stride; dst_uv += dst_stride; } @@ -279,7 +263,7 @@ static void ScaleUVDownEven(int src_width, enum FilterMode filtering) { int j; int col_step = dx >> 16; - int row_stride = (dy >> 16) * src_stride; + ptrdiff_t row_stride = (ptrdiff_t)((dy >> 16) * (intptr_t)src_stride); void (*ScaleUVRowDownEven)(const uint8_t* src_uv, ptrdiff_t src_stride, int src_step, uint8_t* dst_uv, int dst_width) = filtering ? ScaleUVRowDownEvenBox_C : ScaleUVRowDownEven_C; @@ -287,7 +271,7 @@ static void ScaleUVDownEven(int src_width, (void)src_height; assert(IS_ALIGNED(src_width, 2)); assert(IS_ALIGNED(src_height, 2)); - src_uv += (y >> 16) * src_stride + (x >> 16) * 2; + src_uv += (y >> 16) * (intptr_t)src_stride + (x >> 16) * 2; #if defined(HAS_SCALEUVROWDOWNEVEN_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { ScaleUVRowDownEven = filtering ? ScaleUVRowDownEvenBox_Any_SSSE3 @@ -305,7 +289,7 @@ static void ScaleUVDownEven(int src_width, ScaleUVRowDownEven = ScaleUVRowDownEven_NEON; } } -#endif// TODO(fbarchard): Enable Box filter +#endif // TODO(fbarchard): Enable Box filter #if defined(HAS_SCALEUVROWDOWNEVENBOX_NEON) if (TestCpuFlag(kCpuHasNEON)) { ScaleUVRowDownEven = filtering ? ScaleUVRowDownEvenBox_Any_NEON @@ -316,16 +300,6 @@ static void ScaleUVDownEven(int src_width, } } #endif -#if defined(HAS_SCALEUVROWDOWNEVEN_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ScaleUVRowDownEven = - filtering ? ScaleUVRowDownEvenBox_Any_MMI : ScaleUVRowDownEven_Any_MMI; - if (IS_ALIGNED(dst_width, 2)) { - ScaleUVRowDownEven = - filtering ? ScaleUVRowDownEvenBox_MMI : ScaleUVRowDownEven_MMI; - } - } -#endif #if defined(HAS_SCALEUVROWDOWNEVEN_MSA) if (TestCpuFlag(kCpuHasMSA)) { ScaleUVRowDownEven = @@ -364,10 +338,10 @@ static void ScaleUVBilinearDown(int src_width, int dy, enum FilterMode filtering) { int j; - void (*InterpolateRow)(uint8_t * dst_uv, const uint8_t* src_uv, + void (*InterpolateRow)(uint8_t* dst_uv, const uint8_t* src_uv, ptrdiff_t src_stride, int dst_width, int source_y_fraction) = InterpolateRow_C; - void (*ScaleUVFilterCols)(uint8_t * dst_uv, const uint8_t* src_uv, + void (*ScaleUVFilterCols)(uint8_t* dst_uv, const uint8_t* src_uv, int dst_width, int x, int dx) = (src_width >= 32768) ? ScaleUVFilterCols64_C : ScaleUVFilterCols_C; int64_t xlast = x + (int64_t)(dst_width - 1) * dx; @@ -415,6 +389,14 @@ static void ScaleUVBilinearDown(int src_width, } } #endif +#if defined(HAS_INTERPOLATEROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + InterpolateRow = InterpolateRow_Any_LSX; + if (IS_ALIGNED(clip_src_width, 32)) { + InterpolateRow = InterpolateRow_LSX; + } + } +#endif #if defined(HAS_SCALEUVFILTERCOLS_SSSE3) if (TestCpuFlag(kCpuHasSSSE3) && src_width < 32768) { ScaleUVFilterCols = ScaleUVFilterCols_SSSE3; @@ -447,7 +429,7 @@ static void ScaleUVBilinearDown(int src_width, } for (j = 0; j < dst_height; ++j) { int yi = y >> 16; - const uint8_t* src = src_uv + yi * src_stride; + const uint8_t* src = src_uv + yi * (intptr_t)src_stride; if (filtering == kFilterLinear) { ScaleUVFilterCols(dst_uv, src, dst_width, x, dx); } else { @@ -482,17 +464,17 @@ static void ScaleUVBilinearUp(int src_width, int dy, enum FilterMode filtering) { int j; - void (*InterpolateRow)(uint8_t * dst_uv, const uint8_t* src_uv, + void (*InterpolateRow)(uint8_t* dst_uv, const uint8_t* src_uv, ptrdiff_t src_stride, int dst_width, int source_y_fraction) = InterpolateRow_C; - void (*ScaleUVFilterCols)(uint8_t * dst_uv, const uint8_t* src_uv, + void (*ScaleUVFilterCols)(uint8_t* dst_uv, const uint8_t* src_uv, int dst_width, int x, int dx) = filtering ? ScaleUVFilterCols_C : ScaleUVCols_C; const int max_y = (src_height - 1) << 16; #if defined(HAS_INTERPOLATEROW_SSSE3) if (TestCpuFlag(kCpuHasSSSE3)) { InterpolateRow = InterpolateRow_Any_SSSE3; - if (IS_ALIGNED(dst_width, 4)) { + if (IS_ALIGNED(dst_width, 8)) { InterpolateRow = InterpolateRow_SSSE3; } } @@ -500,7 +482,7 @@ static void ScaleUVBilinearUp(int src_width, #if defined(HAS_INTERPOLATEROW_AVX2) if (TestCpuFlag(kCpuHasAVX2)) { InterpolateRow = InterpolateRow_Any_AVX2; - if (IS_ALIGNED(dst_width, 8)) { + if (IS_ALIGNED(dst_width, 16)) { InterpolateRow = InterpolateRow_AVX2; } } @@ -508,26 +490,26 @@ static void ScaleUVBilinearUp(int src_width, #if defined(HAS_INTERPOLATEROW_NEON) if (TestCpuFlag(kCpuHasNEON)) { InterpolateRow = InterpolateRow_Any_NEON; - if (IS_ALIGNED(dst_width, 4)) { + if (IS_ALIGNED(dst_width, 8)) { InterpolateRow = InterpolateRow_NEON; } } #endif -#if defined(HAS_INTERPOLATEROW_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - InterpolateRow = InterpolateRow_Any_MMI; - if (IS_ALIGNED(dst_width, 2)) { - InterpolateRow = InterpolateRow_MMI; - } - } -#endif #if defined(HAS_INTERPOLATEROW_MSA) if (TestCpuFlag(kCpuHasMSA)) { InterpolateRow = InterpolateRow_Any_MSA; - if (IS_ALIGNED(dst_width, 8)) { + if (IS_ALIGNED(dst_width, 16)) { InterpolateRow = InterpolateRow_MSA; } } +#endif +#if defined(HAS_INTERPOLATEROW_LSX) + if (TestCpuFlag(kCpuHasLSX)) { + InterpolateRow = InterpolateRow_Any_LSX; + if (IS_ALIGNED(dst_width, 16)) { + InterpolateRow = InterpolateRow_LSX; + } + } #endif if (src_width >= 32768) { ScaleUVFilterCols = filtering ? ScaleUVFilterCols64_C : ScaleUVCols64_C; @@ -540,7 +522,7 @@ static void ScaleUVBilinearUp(int src_width, #if defined(HAS_SCALEUVFILTERCOLS_NEON) if (filtering && TestCpuFlag(kCpuHasNEON)) { ScaleUVFilterCols = ScaleUVFilterCols_Any_NEON; - if (IS_ALIGNED(dst_width, 4)) { + if (IS_ALIGNED(dst_width, 8)) { ScaleUVFilterCols = ScaleUVFilterCols_NEON; } } @@ -548,7 +530,7 @@ static void ScaleUVBilinearUp(int src_width, #if defined(HAS_SCALEUVFILTERCOLS_MSA) if (filtering && TestCpuFlag(kCpuHasMSA)) { ScaleUVFilterCols = ScaleUVFilterCols_Any_MSA; - if (IS_ALIGNED(dst_width, 8)) { + if (IS_ALIGNED(dst_width, 16)) { ScaleUVFilterCols = ScaleUVFilterCols_MSA; } } @@ -561,23 +543,15 @@ static void ScaleUVBilinearUp(int src_width, #if defined(HAS_SCALEUVCOLS_NEON) if (!filtering && TestCpuFlag(kCpuHasNEON)) { ScaleUVFilterCols = ScaleUVCols_Any_NEON; - if (IS_ALIGNED(dst_width, 8)) { + if (IS_ALIGNED(dst_width, 16)) { ScaleUVFilterCols = ScaleUVCols_NEON; } } #endif -#if defined(HAS_SCALEUVCOLS_MMI) - if (!filtering && TestCpuFlag(kCpuHasMMI)) { - ScaleUVFilterCols = ScaleUVCols_Any_MMI; - if (IS_ALIGNED(dst_width, 1)) { - ScaleUVFilterCols = ScaleUVCols_MMI; - } - } -#endif #if defined(HAS_SCALEUVCOLS_MSA) if (!filtering && TestCpuFlag(kCpuHasMSA)) { ScaleUVFilterCols = ScaleUVCols_Any_MSA; - if (IS_ALIGNED(dst_width, 4)) { + if (IS_ALIGNED(dst_width, 8)) { ScaleUVFilterCols = ScaleUVCols_MSA; } } @@ -588,11 +562,6 @@ static void ScaleUVBilinearUp(int src_width, if (TestCpuFlag(kCpuHasSSSE3) && IS_ALIGNED(dst_width, 8)) { ScaleUVFilterCols = ScaleUVColsUp2_SSSE3; } -#endif -#if defined(HAS_SCALEUVCOLSUP2_MMI) - if (TestCpuFlag(kCpuHasMMI) && IS_ALIGNED(dst_width, 4)) { - ScaleUVFilterCols = ScaleUVColsUp2_MMI; - } #endif } @@ -602,14 +571,14 @@ static void ScaleUVBilinearUp(int src_width, { int yi = y >> 16; - const uint8_t* src = src_uv + yi * src_stride; + const uint8_t* src = src_uv + yi * (intptr_t)src_stride; // Allocate 2 rows of UV. - const int kRowSize = (dst_width * 2 + 15) & ~15; - align_buffer_64(row, kRowSize * 2); + const int row_size = (dst_width * 2 + 15) & ~15; + align_buffer_64(row, row_size * 2); uint8_t* rowptr = row; - int rowstride = kRowSize; + int rowstride = row_size; int lasty = yi; ScaleUVFilterCols(rowptr, src, dst_width, x, dx); @@ -617,7 +586,9 @@ static void ScaleUVBilinearUp(int src_width, src += src_stride; } ScaleUVFilterCols(rowptr + rowstride, src, dst_width, x, dx); - src += src_stride; + if (src_height > 2) { + src += src_stride; + } for (j = 0; j < dst_height; ++j) { yi = y >> 16; @@ -625,14 +596,16 @@ static void ScaleUVBilinearUp(int src_width, if (y > max_y) { y = max_y; yi = y >> 16; - src = src_uv + yi * src_stride; + src = src_uv + yi * (intptr_t)src_stride; } if (yi != lasty) { ScaleUVFilterCols(rowptr, src, dst_width, x, dx); rowptr += rowstride; rowstride = -rowstride; lasty = yi; - src += src_stride; + if ((y + 65536) < max_y) { + src += src_stride; + } } } if (filtering == kFilterLinear) { @@ -649,6 +622,220 @@ static void ScaleUVBilinearUp(int src_width, } #endif // HAS_SCALEUVBILINEARUP +// Scale UV, horizontally up by 2 times. +// Uses linear filter horizontally, nearest vertically. +// This is an optimized version for scaling up a plane to 2 times of +// its original width, using linear interpolation. +// This is used to scale U and V planes of NV16 to NV24. +void ScaleUVLinearUp2(int src_width, + int src_height, + int dst_width, + int dst_height, + int src_stride, + int dst_stride, + const uint8_t* src_uv, + uint8_t* dst_uv) { + void (*ScaleRowUp)(const uint8_t* src_uv, uint8_t* dst_uv, int dst_width) = + ScaleUVRowUp2_Linear_Any_C; + int i; + int y; + int dy; + + // This function can only scale up by 2 times horizontally. + assert(src_width == ((dst_width + 1) / 2)); + +#ifdef HAS_SCALEUVROWUP2LINEAR_SSSE3 + if (TestCpuFlag(kCpuHasSSSE3)) { + ScaleRowUp = ScaleUVRowUp2_Linear_Any_SSSE3; + } +#endif + +#ifdef HAS_SCALEUVROWUP2LINEAR_AVX2 + if (TestCpuFlag(kCpuHasAVX2)) { + ScaleRowUp = ScaleUVRowUp2_Linear_Any_AVX2; + } +#endif + +#ifdef HAS_SCALEUVROWUP2LINEAR_NEON + if (TestCpuFlag(kCpuHasNEON)) { + ScaleRowUp = ScaleUVRowUp2_Linear_Any_NEON; + } +#endif + + if (dst_height == 1) { + ScaleRowUp(src_uv + ((src_height - 1) / 2) * (intptr_t)src_stride, dst_uv, + dst_width); + } else { + dy = FixedDiv(src_height - 1, dst_height - 1); + y = (1 << 15) - 1; + for (i = 0; i < dst_height; ++i) { + ScaleRowUp(src_uv + (y >> 16) * (intptr_t)src_stride, dst_uv, dst_width); + dst_uv += dst_stride; + y += dy; + } + } +} + +// Scale plane, up by 2 times. +// This is an optimized version for scaling up a plane to 2 times of +// its original size, using bilinear interpolation. +// This is used to scale U and V planes of NV12 to NV24. +void ScaleUVBilinearUp2(int src_width, + int src_height, + int dst_width, + int dst_height, + int src_stride, + int dst_stride, + const uint8_t* src_ptr, + uint8_t* dst_ptr) { + void (*Scale2RowUp)(const uint8_t* src_ptr, ptrdiff_t src_stride, + uint8_t* dst_ptr, ptrdiff_t dst_stride, int dst_width) = + ScaleUVRowUp2_Bilinear_Any_C; + int x; + + // This function can only scale up by 2 times. + assert(src_width == ((dst_width + 1) / 2)); + assert(src_height == ((dst_height + 1) / 2)); + +#ifdef HAS_SCALEUVROWUP2BILINEAR_SSSE3 + if (TestCpuFlag(kCpuHasSSSE3)) { + Scale2RowUp = ScaleUVRowUp2_Bilinear_Any_SSSE3; + } +#endif + +#ifdef HAS_SCALEUVROWUP2BILINEAR_AVX2 + if (TestCpuFlag(kCpuHasAVX2)) { + Scale2RowUp = ScaleUVRowUp2_Bilinear_Any_AVX2; + } +#endif + +#ifdef HAS_SCALEUVROWUP2BILINEAR_NEON + if (TestCpuFlag(kCpuHasNEON)) { + Scale2RowUp = ScaleUVRowUp2_Bilinear_Any_NEON; + } +#endif + + Scale2RowUp(src_ptr, 0, dst_ptr, 0, dst_width); + dst_ptr += dst_stride; + for (x = 0; x < src_height - 1; ++x) { + Scale2RowUp(src_ptr, src_stride, dst_ptr, dst_stride, dst_width); + src_ptr += src_stride; + // TODO(fbarchard): Test performance of writing one row of destination at a + // time. + dst_ptr += 2 * dst_stride; + } + if (!(dst_height & 1)) { + Scale2RowUp(src_ptr, 0, dst_ptr, 0, dst_width); + } +} + +// Scale 16 bit UV, horizontally up by 2 times. +// Uses linear filter horizontally, nearest vertically. +// This is an optimized version for scaling up a plane to 2 times of +// its original width, using linear interpolation. +// This is used to scale U and V planes of P210 to P410. +void ScaleUVLinearUp2_16(int src_width, + int src_height, + int dst_width, + int dst_height, + int src_stride, + int dst_stride, + const uint16_t* src_uv, + uint16_t* dst_uv) { + void (*ScaleRowUp)(const uint16_t* src_uv, uint16_t* dst_uv, int dst_width) = + ScaleUVRowUp2_Linear_16_Any_C; + int i; + int y; + int dy; + + // This function can only scale up by 2 times horizontally. + assert(src_width == ((dst_width + 1) / 2)); + +#ifdef HAS_SCALEUVROWUP2LINEAR_16_SSE41 + if (TestCpuFlag(kCpuHasSSE41)) { + ScaleRowUp = ScaleUVRowUp2_Linear_16_Any_SSE41; + } +#endif + +#ifdef HAS_SCALEUVROWUP2LINEAR_16_AVX2 + if (TestCpuFlag(kCpuHasAVX2)) { + ScaleRowUp = ScaleUVRowUp2_Linear_16_Any_AVX2; + } +#endif + +#ifdef HAS_SCALEUVROWUP2LINEAR_16_NEON + if (TestCpuFlag(kCpuHasNEON)) { + ScaleRowUp = ScaleUVRowUp2_Linear_16_Any_NEON; + } +#endif + + if (dst_height == 1) { + ScaleRowUp(src_uv + ((src_height - 1) / 2) * (intptr_t)src_stride, dst_uv, + dst_width); + } else { + dy = FixedDiv(src_height - 1, dst_height - 1); + y = (1 << 15) - 1; + for (i = 0; i < dst_height; ++i) { + ScaleRowUp(src_uv + (y >> 16) * (intptr_t)src_stride, dst_uv, dst_width); + dst_uv += dst_stride; + y += dy; + } + } +} + +// Scale 16 bit UV, up by 2 times. +// This is an optimized version for scaling up a plane to 2 times of +// its original size, using bilinear interpolation. +// This is used to scale U and V planes of P010 to P410. +void ScaleUVBilinearUp2_16(int src_width, + int src_height, + int dst_width, + int dst_height, + int src_stride, + int dst_stride, + const uint16_t* src_ptr, + uint16_t* dst_ptr) { + void (*Scale2RowUp)(const uint16_t* src_ptr, ptrdiff_t src_stride, + uint16_t* dst_ptr, ptrdiff_t dst_stride, int dst_width) = + ScaleUVRowUp2_Bilinear_16_Any_C; + int x; + + // This function can only scale up by 2 times. + assert(src_width == ((dst_width + 1) / 2)); + assert(src_height == ((dst_height + 1) / 2)); + +#ifdef HAS_SCALEUVROWUP2BILINEAR_16_SSE41 + if (TestCpuFlag(kCpuHasSSE41)) { + Scale2RowUp = ScaleUVRowUp2_Bilinear_16_Any_SSE41; + } +#endif + +#ifdef HAS_SCALEUVROWUP2BILINEAR_16_AVX2 + if (TestCpuFlag(kCpuHasAVX2)) { + Scale2RowUp = ScaleUVRowUp2_Bilinear_16_Any_AVX2; + } +#endif + +#ifdef HAS_SCALEUVROWUP2BILINEAR_16_NEON + if (TestCpuFlag(kCpuHasNEON)) { + Scale2RowUp = ScaleUVRowUp2_Bilinear_16_Any_NEON; + } +#endif + + Scale2RowUp(src_ptr, 0, dst_ptr, 0, dst_width); + dst_ptr += dst_stride; + for (x = 0; x < src_height - 1; ++x) { + Scale2RowUp(src_ptr, src_stride, dst_ptr, dst_stride, dst_width); + src_ptr += src_stride; + // TODO(fbarchard): Test performance of writing one row of destination at a + // time. + dst_ptr += 2 * dst_stride; + } + if (!(dst_height & 1)) { + Scale2RowUp(src_ptr, 0, dst_ptr, 0, dst_width); + } +} + // Scale UV to/from any dimensions, without interpolation. // Fixed point math is used for performance: The upper 16 bits // of x and dx is the integer part of the source position and @@ -667,7 +854,7 @@ static void ScaleUVSimple(int src_width, int y, int dy) { int j; - void (*ScaleUVCols)(uint8_t * dst_uv, const uint8_t* src_uv, int dst_width, + void (*ScaleUVCols)(uint8_t* dst_uv, const uint8_t* src_uv, int dst_width, int x, int dx) = (src_width >= 32768) ? ScaleUVCols64_C : ScaleUVCols_C; (void)src_height; @@ -684,14 +871,6 @@ static void ScaleUVSimple(int src_width, } } #endif -#if defined(HAS_SCALEUVCOLS_MMI) - if (TestCpuFlag(kCpuHasMMI)) { - ScaleUVCols = ScaleUVCols_Any_MMI; - if (IS_ALIGNED(dst_width, 1)) { - ScaleUVCols = ScaleUVCols_MMI; - } - } -#endif #if defined(HAS_SCALEUVCOLS_MSA) if (TestCpuFlag(kCpuHasMSA)) { ScaleUVCols = ScaleUVCols_Any_MSA; @@ -706,16 +885,12 @@ static void ScaleUVSimple(int src_width, if (TestCpuFlag(kCpuHasSSSE3) && IS_ALIGNED(dst_width, 8)) { ScaleUVCols = ScaleUVColsUp2_SSSE3; } -#endif -#if defined(HAS_SCALEUVCOLSUP2_MMI) - if (TestCpuFlag(kCpuHasMMI) && IS_ALIGNED(dst_width, 4)) { - ScaleUVCols = ScaleUVColsUp2_MMI; - } #endif } for (j = 0; j < dst_height; ++j) { - ScaleUVCols(dst_uv, src_uv + (y >> 16) * src_stride, dst_width, x, dx); + ScaleUVCols(dst_uv, src_uv + (y >> 16) * (intptr_t)src_stride, dst_width, x, + dx); dst_uv += dst_stride; y += dy; } @@ -723,23 +898,43 @@ static void ScaleUVSimple(int src_width, // Copy UV with optional flipping #if HAS_UVCOPY -static int UVCopy(const uint8_t* src_UV, - int src_stride_UV, - uint8_t* dst_UV, - int dst_stride_UV, +static int UVCopy(const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_uv, + int dst_stride_uv, int width, int height) { - if (!src_UV || !dst_UV || width <= 0 || height == 0) { + if (!src_uv || !dst_uv || width <= 0 || height == 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + src_uv = src_uv + (height - 1) * (intptr_t)src_stride_uv; + src_stride_uv = -src_stride_uv; + } + + CopyPlane(src_uv, src_stride_uv, dst_uv, dst_stride_uv, width * 2, height); + return 0; +} + +static int UVCopy_16(const uint16_t* src_uv, + int src_stride_uv, + uint16_t* dst_uv, + int dst_stride_uv, + int width, + int height) { + if (!src_uv || !dst_uv || width <= 0 || height == 0) { return -1; } // Negative height means invert the image. if (height < 0) { height = -height; - src_UV = src_UV + (height - 1) * src_stride_UV; - src_stride_UV = -src_stride_UV; + src_uv = src_uv + (height - 1) * (intptr_t)src_stride_uv; + src_stride_uv = -src_stride_uv; } - CopyPlane(src_UV, src_stride_UV, dst_UV, dst_stride_UV, width * 2, height); + CopyPlane_16(src_uv, src_stride_uv, dst_uv, dst_stride_uv, width * 2, height); return 0; } #endif // HAS_UVCOPY @@ -773,7 +968,7 @@ static void ScaleUV(const uint8_t* src, // Negative src_height means invert the image. if (src_height < 0) { src_height = -src_height; - src = src + (src_height - 1) * src_stride; + src = src + (src_height - 1) * (intptr_t)src_stride; src_stride = -src_stride; } ScaleSlope(src_width, src_height, dst_width, dst_height, filtering, &x, &y, @@ -788,7 +983,7 @@ static void ScaleUV(const uint8_t* src, if (clip_y) { int64_t clipf = (int64_t)(clip_y)*dy; y += (clipf & 0xffff); - src += (clipf >> 16) * src_stride; + src += (clipf >> 16) * (intptr_t)src_stride; dst += clip_y * dst_stride; } @@ -829,8 +1024,8 @@ static void ScaleUV(const uint8_t* src, #ifdef HAS_UVCOPY if (dx == 0x10000 && dy == 0x10000) { // Straight copy. - UVCopy(src + (y >> 16) * src_stride + (x >> 16) * 2, src_stride, dst, - dst_stride, clip_width, clip_height); + UVCopy(src + (y >> 16) * (intptr_t)src_stride + (x >> 16) * 2, + src_stride, dst, dst_stride, clip_width, clip_height); return; } #endif @@ -841,10 +1036,21 @@ static void ScaleUV(const uint8_t* src, if (dx == 0x10000 && (x & 0xffff) == 0) { // Arbitrary scale vertically, but unscaled horizontally. ScalePlaneVertical(src_height, clip_width, clip_height, src_stride, - dst_stride, src, dst, x, y, dy, 4, filtering); + dst_stride, src, dst, x, y, dy, /*bpp=*/2, filtering); + return; + } + if (filtering && (dst_width + 1) / 2 == src_width) { + ScaleUVLinearUp2(src_width, src_height, clip_width, clip_height, src_stride, + dst_stride, src, dst); + return; + } + if ((clip_height + 1) / 2 == src_height && + (clip_width + 1) / 2 == src_width && + (filtering == kFilterBilinear || filtering == kFilterBox)) { + ScaleUVBilinearUp2(src_width, src_height, clip_width, clip_height, + src_stride, dst_stride, src, dst); return; } - #if HAS_SCALEUVBILINEARUP if (filtering && dy < 65536) { ScaleUVBilinearUp(src_width, src_height, clip_width, clip_height, @@ -876,7 +1082,7 @@ int UVScale(const uint8_t* src_uv, int dst_width, int dst_height, enum FilterMode filtering) { - if (!src_uv || src_width == 0 || src_height == 0 || src_width > 32768 || + if (!src_uv || src_width <= 0 || src_height == 0 || src_width > 32768 || src_height > 32768 || !dst_uv || dst_width <= 0 || dst_height <= 0) { return -1; } @@ -885,6 +1091,70 @@ int UVScale(const uint8_t* src_uv, return 0; } +// Scale a 16 bit UV image. +// This function is currently incomplete, it can't handle all cases. +LIBYUV_API +int UVScale_16(const uint16_t* src_uv, + int src_stride_uv, + int src_width, + int src_height, + uint16_t* dst_uv, + int dst_stride_uv, + int dst_width, + int dst_height, + enum FilterMode filtering) { + int dy = 0; + + if (!src_uv || src_width <= 0 || src_height == 0 || src_width > 32768 || + src_height > 32768 || !dst_uv || dst_width <= 0 || dst_height <= 0) { + return -1; + } + + // UV does not support box filter yet, but allow the user to pass it. + // Simplify filtering when possible. + filtering = ScaleFilterReduce(src_width, src_height, dst_width, dst_height, + filtering); + + // Negative src_height means invert the image. + if (src_height < 0) { + src_height = -src_height; + src_uv = src_uv + (src_height - 1) * (intptr_t)src_stride_uv; + src_stride_uv = -src_stride_uv; + } + src_width = Abs(src_width); + +#ifdef HAS_UVCOPY + if (!filtering && src_width == dst_width && (src_height % dst_height == 0)) { + if (dst_height == 1) { + UVCopy_16(src_uv + ((src_height - 1) / 2) * (intptr_t)src_stride_uv, + src_stride_uv, dst_uv, dst_stride_uv, dst_width, dst_height); + } else { + dy = src_height / dst_height; + UVCopy_16(src_uv + ((dy - 1) / 2) * (intptr_t)src_stride_uv, + (int)(dy * (intptr_t)src_stride_uv), dst_uv, dst_stride_uv, + dst_width, dst_height); + } + + return 0; + } +#endif + + if (filtering && (dst_width + 1) / 2 == src_width) { + ScaleUVLinearUp2_16(src_width, src_height, dst_width, dst_height, + src_stride_uv, dst_stride_uv, src_uv, dst_uv); + return 0; + } + + if ((dst_height + 1) / 2 == src_height && (dst_width + 1) / 2 == src_width && + (filtering == kFilterBilinear || filtering == kFilterBox)) { + ScaleUVBilinearUp2_16(src_width, src_height, dst_width, dst_height, + src_stride_uv, dst_stride_uv, src_uv, dst_uv); + return 0; + } + + return -1; +} + #ifdef __cplusplus } // extern "C" } // namespace libyuv diff --git a/TMessagesProj/jni/third_party/libyuv/source/scale_win.cc b/TMessagesProj/jni/third_party/libyuv/source/scale_win.cc index c5fc86f3e9..ea1f95c6c3 100644 --- a/TMessagesProj/jni/third_party/libyuv/source/scale_win.cc +++ b/TMessagesProj/jni/third_party/libyuv/source/scale_win.cc @@ -16,8 +16,9 @@ namespace libyuv { extern "C" { #endif -// This module is for 32 bit Visual C x86 and clangcl -#if !defined(LIBYUV_DISABLE_X86) && defined(_M_IX86) && defined(_MSC_VER) +// This module is for 32 bit Visual C x86 +#if !defined(LIBYUV_DISABLE_X86) && defined(_MSC_VER) && \ + !defined(__clang__) && defined(_M_IX86) // Offsets for source bytes 0 to 9 static const uvec8 kShuf0 = {0, 1, 3, 4, 5, 7, 8, 9, diff --git a/TMessagesProj/jni/utilities.cpp b/TMessagesProj/jni/utilities.cpp index 2b49b6b417..147c78a784 100644 --- a/TMessagesProj/jni/utilities.cpp +++ b/TMessagesProj/jni/utilities.cpp @@ -4,6 +4,11 @@ #include #include +#include +#include "breakpad/src/client/linux/handler/exception_handler.h" +#include "breakpad/src/client/linux/handler/minidump_descriptor.h" + + thread_local static char buf[PATH_MAX + 1]; extern "C" JNIEXPORT jstring Java_org_telegram_messenger_Utilities_readlink(JNIEnv *env, jclass clazz, jstring path) { @@ -29,3 +34,23 @@ extern "C" JNIEXPORT jstring Java_org_telegram_messenger_Utilities_readlinkFd(JN } return value; } + +bool dumpCallback(const google_breakpad::MinidumpDescriptor &descriptor, + void *context, + bool succeeded) { + + __android_log_print(ANDROID_LOG_DEBUG, "tmessages", + "Wrote breakpad minidump at %s succeeded=%d\n", descriptor.path(), + succeeded); + return false; +} + +extern "C" +JNIEXPORT void JNICALL +Java_org_telegram_messenger_Utilities_setupNativeCrashesListener(JNIEnv *env, jclass clazz, + jstring path) { + const char *dumpPath = (char *) env->GetStringUTFChars(path, NULL); + google_breakpad::MinidumpDescriptor descriptor(dumpPath); + new google_breakpad::ExceptionHandler(descriptor, NULL, dumpCallback, NULL, true, -1); + env->ReleaseStringUTFChars(path, dumpPath); +} diff --git a/TMessagesProj/jni/voip/CMakeLists.txt b/TMessagesProj/jni/voip/CMakeLists.txt index be045d0c02..1c210b6caa 100644 --- a/TMessagesProj/jni/voip/CMakeLists.txt +++ b/TMessagesProj/jni/voip/CMakeLists.txt @@ -43,7 +43,7 @@ set_target_properties(tgvoip PROPERTIES target_compile_definitions(tgvoip PUBLIC HAVE_PTHREAD __STDC_LIMIT_MACROS BSD=1 USE_KISS_FFT TGVOIP_NO_VIDEO NULL=0 SOCKLEN_T=socklen_t LOCALE_NOT_USED _LARGEFILE_SOURCE=1 _FILE_OFFSET_BITS=64 restrict= __EMX__ OPUS_BUILD FIXED_POINT USE_ALLOCA HAVE_LRINT HAVE_LRINTF TGVOIP_NO_DSP) target_compile_definitions(tgvoip PUBLIC - RTC_DISABLE_TRACE_EVENTS WEBRTC_OPUS_SUPPORT_120MS_PTIME=1 BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0 ABSL_ALLOCATOR_NOTHROW=1 RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ANDROID WEBRTC_USE_H264 NDEBUG WEBRTC_HAVE_USRSCTP WEBRTC_HAVE_SCTP WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_USE_BUILTIN_ISAC_FLOAT WEBRTC_OPUS_VARIABLE_COMPLEXITY=0 HAVE_NETINET_IN_H WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE __Userspace__ SCTP_SIMPLE_ALLOCATOR SCTP_PROCESS_LEVEL_LOCKS __Userspace_os_Linux) + WEBRTC_HAVE_DCSCTP RTC_DISABLE_TRACE_EVENTS WEBRTC_OPUS_SUPPORT_120MS_PTIME=1 BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0 ABSL_ALLOCATOR_NOTHROW=1 RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ANDROID WEBRTC_USE_H264 NDEBUG WEBRTC_HAVE_USRSCTP WEBRTC_HAVE_SCTP WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_USE_BUILTIN_ISAC_FLOAT WEBRTC_OPUS_VARIABLE_COMPLEXITY=0 HAVE_NETINET_IN_H WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE __Userspace__ SCTP_SIMPLE_ALLOCATOR SCTP_PROCESS_LEVEL_LOCKS __Userspace_os_Linux) target_include_directories(tgvoip PUBLIC ./ voip @@ -56,7 +56,8 @@ target_include_directories(tgvoip PUBLIC boringssl/include voip/webrtc voip/tgcalls - voip/libtgvoip) + voip/libtgvoip + voip/webrtc/third_party/crc32c/src/include) if (${ANDROID_ABI} STREQUAL "armeabi-v7a") target_compile_definitions(tgvoip PUBLIC @@ -240,7 +241,7 @@ target_compile_definitions(rnnoise PRIVATE pitch_search=rnnoise_pitch_search remove_doubling=rnnoise_remove_doubling) target_compile_definitions(rnnoise PUBLIC - RTC_DISABLE_TRACE_EVENTS WEBRTC_OPUS_SUPPORT_120MS_PTIME=1 BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0 ABSL_ALLOCATOR_NOTHROW=1 RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ANDROID WEBRTC_USE_H264 NDEBUG WEBRTC_HAVE_USRSCTP WEBRTC_HAVE_SCTP WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_USE_BUILTIN_ISAC_FLOAT WEBRTC_OPUS_VARIABLE_COMPLEXITY=0 HAVE_NETINET_IN_H WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE __Userspace__ SCTP_SIMPLE_ALLOCATOR SCTP_PROCESS_LEVEL_LOCKS __Userspace_os_Linux) + WEBRTC_HAVE_DCSCTP RTC_DISABLE_TRACE_EVENTS WEBRTC_OPUS_SUPPORT_120MS_PTIME=1 BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0 ABSL_ALLOCATOR_NOTHROW=1 RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ANDROID WEBRTC_USE_H264 NDEBUG WEBRTC_HAVE_USRSCTP WEBRTC_HAVE_SCTP WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_USE_BUILTIN_ISAC_FLOAT WEBRTC_OPUS_VARIABLE_COMPLEXITY=0 HAVE_NETINET_IN_H WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE __Userspace__ SCTP_SIMPLE_ALLOCATOR SCTP_PROCESS_LEVEL_LOCKS __Userspace_os_Linux) target_include_directories(rnnoise PUBLIC voip/rnnoise/include) @@ -291,9 +292,7 @@ add_library(tgcalls_tp STATIC third_party/usrsctplib/user_recv_thread.c third_party/usrsctplib/user_socket.c voip/webrtc/absl/base/internal/cycleclock.cc - voip/webrtc/absl/base/internal/exponential_biased.cc voip/webrtc/absl/base/internal/low_level_alloc.cc - voip/webrtc/absl/base/internal/periodic_sampler.cc voip/webrtc/absl/base/internal/raw_logging.cc voip/webrtc/absl/base/internal/scoped_set_env.cc voip/webrtc/absl/base/internal/spinlock.cc @@ -317,7 +316,6 @@ add_library(tgcalls_tp STATIC voip/webrtc/absl/debugging/internal/stack_consumption.cc voip/webrtc/absl/debugging/internal/vdso_support.cc voip/webrtc/absl/debugging/leak_check.cc - voip/webrtc/absl/debugging/leak_check_disable.cc voip/webrtc/absl/debugging/stacktrace.cc voip/webrtc/absl/debugging/symbolize.cc voip/webrtc/absl/flags/flag.cc @@ -404,7 +402,7 @@ target_compile_options(tgcalls_tp PUBLIC set_target_properties(tgcalls_tp PROPERTIES ANDROID_ARM_MODE arm) target_compile_definitions(tgcalls_tp PUBLIC - RTC_DISABLE_TRACE_EVENTS WEBRTC_OPUS_SUPPORT_120MS_PTIME=1 BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0 ABSL_ALLOCATOR_NOTHROW=1 HAVE_PTHREAD RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ANDROID WEBRTC_USE_H264 NDEBUG WEBRTC_HAVE_USRSCTP WEBRTC_HAVE_SCTP WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_USE_BUILTIN_ISAC_FLOAT WEBRTC_OPUS_VARIABLE_COMPLEXITY=0 HAVE_NETINET_IN_H WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE __Userspace__ SCTP_SIMPLE_ALLOCATOR SCTP_PROCESS_LEVEL_LOCKS __Userspace_os_Linux HAVE_WEBRTC_VIDEO __ANDROID__ TGVOIP_NO_DSP) + WEBRTC_HAVE_DCSCTP RTC_DISABLE_TRACE_EVENTS WEBRTC_OPUS_SUPPORT_120MS_PTIME=1 BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0 ABSL_ALLOCATOR_NOTHROW=1 HAVE_PTHREAD RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ANDROID WEBRTC_USE_H264 NDEBUG WEBRTC_HAVE_USRSCTP WEBRTC_HAVE_SCTP WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_USE_BUILTIN_ISAC_FLOAT WEBRTC_OPUS_VARIABLE_COMPLEXITY=0 HAVE_NETINET_IN_H WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE __Userspace__ SCTP_SIMPLE_ALLOCATOR SCTP_PROCESS_LEVEL_LOCKS __Userspace_os_Linux HAVE_WEBRTC_VIDEO __ANDROID__ TGVOIP_NO_DSP) target_include_directories(tgcalls_tp PUBLIC ./ voip @@ -423,10 +421,13 @@ target_include_directories(tgcalls_tp PUBLIC third_party/libsrtp/config third_party/libsrtp/crypto/include libvpx/build/${ANDROID_ABI}/include - third_party) + third_party + voip/webrtc/third_party/crc32c/src/include) #tgcalls+webrtc add_library(tgcalls STATIC + voip/tgcalls/FieldTrialsConfig.cpp + voip/tgcalls/ChannelManager.cpp voip/tgcalls/CodecSelectHelper.cpp voip/tgcalls/CryptoHelper.cpp voip/tgcalls/EncryptedConnection.cpp @@ -460,10 +461,15 @@ add_library(tgcalls STATIC voip/tgcalls/v2/SignalingEncryption.cpp voip/tgcalls/v2/ContentNegotiation.cpp voip/tgcalls/v2/InstanceV2ReferenceImpl.cpp + voip/tgcalls/v2/ExternalSignalingConnection.cpp + voip/tgcalls/v2/ReflectorPort.cpp + voip/tgcalls/v2/ReflectorRelayPortFactory.cpp + voip/tgcalls/v2/SignalingConnection.cpp + voip/tgcalls/v2/SignalingSctpConnection.cpp voip/tgcalls/v2_4_0_0/InstanceV2_4_0_0Impl.cpp voip/tgcalls/v2_4_0_0/Signaling_4_0_0.cpp + voip/tgcalls/utils/gzip.cpp voip/webrtc/rtc_base/bitstream_reader.cc - voip/webrtc/rtc_base/async_invoker.cc voip/webrtc/rtc_base/system_time.cc voip/webrtc/rtc_base/async_resolver.cc voip/webrtc/rtc_base/async_packet_socket.cc @@ -491,7 +497,6 @@ add_library(tgcalls STATIC voip/webrtc/rtc_base/experiments/field_trial_list.cc voip/webrtc/rtc_base/experiments/field_trial_parser.cc voip/webrtc/rtc_base/experiments/field_trial_units.cc - voip/webrtc/rtc_base/experiments/jitter_upper_bound_experiment.cc voip/webrtc/rtc_base/experiments/keyframe_interval_settings.cc voip/webrtc/rtc_base/experiments/min_video_bitrate_experiment.cc voip/webrtc/rtc_base/experiments/normalize_simulcast_size_experiment.cc @@ -510,13 +515,11 @@ add_library(tgcalls STATIC voip/webrtc/rtc_base/ifaddrs_android.cc voip/webrtc/rtc_base/ifaddrs_converter.cc voip/webrtc/rtc_base/ip_address.cc - voip/webrtc/rtc_base/location.cc voip/webrtc/rtc_base/log_sinks.cc voip/webrtc/rtc_base/logging.cc voip/webrtc/rtc_base/memory/aligned_malloc.cc voip/webrtc/rtc_base/memory/fifo_buffer.cc voip/webrtc/rtc_base/message_digest.cc - voip/webrtc/rtc_base/message_handler.cc voip/webrtc/rtc_base/net_helper.cc voip/webrtc/rtc_base/net_helpers.cc voip/webrtc/rtc_base/network.cc @@ -569,22 +572,18 @@ add_library(tgcalls STATIC voip/webrtc/rtc_base/strings/audio_format_to_string.cc voip/webrtc/rtc_base/strings/string_builder.cc voip/webrtc/rtc_base/strings/string_format.cc - voip/webrtc/rtc_base/synchronization/mutex.cc voip/webrtc/rtc_base/synchronization/yield.cc voip/webrtc/rtc_base/synchronization/sequence_checker_internal.cc voip/webrtc/rtc_base/synchronization/yield_policy.cc voip/webrtc/rtc_base/system/file_wrapper.cc - voip/webrtc/rtc_base/system/thread_registry.cc voip/webrtc/rtc_base/system/warn_current_thread_is_deadlocked.cc voip/webrtc/rtc_base/task_queue.cc voip/webrtc/rtc_base/task_queue_libevent.cc voip/webrtc/rtc_base/task_queue_stdlib.cc - voip/webrtc/rtc_base/task_utils/pending_task_safety_flag.cc voip/webrtc/rtc_base/task_utils/repeating_task.cc voip/webrtc/rtc_base/third_party/base64/base64.cc voip/webrtc/rtc_base/third_party/sigslot/sigslot.cc voip/webrtc/rtc_base/thread.cc - voip/webrtc/rtc_base/time/timestamp_extrapolator.cc voip/webrtc/rtc_base/time_utils.cc voip/webrtc/rtc_base/timestamp_aligner.cc voip/webrtc/rtc_base/unique_id_generator.cc @@ -627,6 +626,8 @@ add_library(tgcalls STATIC voip/webrtc/api/audio_options.cc voip/webrtc/api/call/transport.cc voip/webrtc/api/candidate.cc + voip/webrtc/api/field_trials.cc + voip/webrtc/api/field_trials_registry.cc voip/webrtc/api/create_peerconnection_factory.cc voip/webrtc/api/crypto/crypto_options.cc voip/webrtc/api/data_channel_interface.cc @@ -651,11 +652,11 @@ add_library(tgcalls STATIC voip/webrtc/api/rtp_packet_info.cc voip/webrtc/api/rtp_parameters.cc voip/webrtc/api/rtp_receiver_interface.cc - voip/webrtc/api/rtp_sender_interface.cc voip/webrtc/api/rtp_transceiver_interface.cc voip/webrtc/api/sctp_transport_interface.cc voip/webrtc/api/stats_types.cc voip/webrtc/api/task_queue/default_task_queue_factory_libevent.cc + voip/webrtc/api/task_queue/pending_task_safety_flag.cc voip/webrtc/api/task_queue/task_queue_base.cc voip/webrtc/api/transport/bitrate_settings.cc voip/webrtc/api/transport/field_trial_based_config.cc @@ -667,6 +668,10 @@ add_library(tgcalls STATIC voip/webrtc/api/video/video_frame_metadata.cc voip/webrtc/api/voip/voip_engine_factory.cc voip/webrtc/api/video/rtp_video_frame_assembler.cc + voip/webrtc/api/video/frame_buffer.cc + voip/webrtc/api/video/i210_buffer.cc + voip/webrtc/api/video/i422_buffer.cc + voip/webrtc/api/video/i444_buffer.cc voip/webrtc/api/numerics/samples_stats_counter.cc voip/webrtc/api/wrapping_async_dns_resolver.cc voip/webrtc/call/adaptation/adaptation_constraint.cc @@ -710,7 +715,6 @@ add_library(tgcalls STATIC voip/webrtc/api/video_codecs/video_decoder.cc voip/webrtc/api/video_codecs/video_decoder_software_fallback_wrapper.cc voip/webrtc/api/video_codecs/video_encoder.cc - voip/webrtc/api/video_codecs/video_encoder_config.cc voip/webrtc/api/video_codecs/video_encoder_software_fallback_wrapper.cc voip/webrtc/api/video_codecs/vp8_frame_config.cc voip/webrtc/api/video_codecs/vp8_temporal_layers.cc @@ -718,10 +722,13 @@ add_library(tgcalls STATIC voip/webrtc/api/video_codecs/spatial_layer.cc voip/webrtc/api/video_codecs/h264_profile_level_id.cc voip/webrtc/api/video_codecs/vp9_profile.cc + voip/webrtc/api/video_codecs/av1_profile.cc + voip/webrtc/api/video_codecs/scalability_mode.cc + voip/webrtc/api/video_codecs/scalability_mode_helper.cc + voip/webrtc/api/video_codecs/simulcast_stream.cc voip/webrtc/pc/audio_rtp_receiver.cc voip/webrtc/pc/audio_track.cc voip/webrtc/pc/channel.cc - voip/webrtc/pc/channel_manager.cc voip/webrtc/pc/data_channel_controller.cc voip/webrtc/pc/data_channel_utils.cc voip/webrtc/pc/dtls_srtp_transport.cc @@ -736,6 +743,7 @@ add_library(tgcalls STATIC voip/webrtc/pc/jsep_transport.cc voip/webrtc/pc/jsep_transport_collection.cc voip/webrtc/pc/jsep_transport_controller.cc + voip/webrtc/pc/legacy_stats_collector.cc voip/webrtc/pc/local_audio_source.cc voip/webrtc/pc/media_protocol_names.cc voip/webrtc/pc/media_session.cc @@ -753,7 +761,6 @@ add_library(tgcalls STATIC voip/webrtc/pc/rtp_sender.cc voip/webrtc/pc/rtp_transceiver.cc voip/webrtc/pc/rtp_transport.cc - voip/webrtc/pc/sctp_data_channel_transport.cc voip/webrtc/pc/sctp_transport.cc voip/webrtc/pc/sctp_utils.cc voip/webrtc/pc/sctp_data_channel.cc @@ -764,7 +771,6 @@ add_library(tgcalls STATIC voip/webrtc/pc/srtp_filter.cc voip/webrtc/pc/srtp_session.cc voip/webrtc/pc/srtp_transport.cc - voip/webrtc/pc/stats_collector.cc voip/webrtc/pc/track_media_info_map.cc voip/webrtc/pc/transport_stats.cc voip/webrtc/pc/video_rtp_receiver.cc @@ -801,7 +807,6 @@ add_library(tgcalls STATIC voip/webrtc/media/engine/internal_encoder_factory.cc voip/webrtc/media/engine/multiplex_codec_factory.cc voip/webrtc/media/engine/payload_type_mapper.cc - voip/webrtc/media/engine/simulcast.cc voip/webrtc/media/engine/simulcast_encoder_adapter.cc voip/webrtc/media/engine/unhandled_packets_buffer.cc voip/webrtc/media/engine/webrtc_media_engine.cc @@ -810,7 +815,6 @@ add_library(tgcalls STATIC voip/webrtc/media/engine/webrtc_voice_engine.cc voip/webrtc/media/sctp/dcsctp_transport.cc voip/webrtc/media/sctp/sctp_transport_factory.cc - voip/webrtc/media/sctp/usrsctp_transport.cc voip/webrtc/system_wrappers/source/clock.cc voip/webrtc/system_wrappers/source/cpu_features.cc voip/webrtc/system_wrappers/source/cpu_info.cc @@ -1024,7 +1028,7 @@ add_library(tgcalls STATIC voip/webrtc/modules/audio_coding/neteq/timestamp_scaler.cc voip/webrtc/modules/audio_coding/neteq/reorder_optimizer.cc voip/webrtc/modules/audio_coding/neteq/underrun_optimizer.cc - voip/webrtc/modules/audio_coding/neteq/relative_arrival_delay_tracker.cc + voip/webrtc/modules/audio_coding/neteq/packet_arrival_history.cc voip/webrtc/modules/audio_device/audio_device_buffer.cc voip/webrtc/modules/audio_device/audio_device_data_observer.cc voip/webrtc/modules/audio_device/audio_device_generic.cc @@ -1096,6 +1100,8 @@ add_library(tgcalls STATIC voip/webrtc/modules/audio_processing/aec3/suppression_filter.cc voip/webrtc/modules/audio_processing/aec3/suppression_gain.cc voip/webrtc/modules/audio_processing/aec3/transparent_mode.cc + voip/webrtc/modules/audio_processing/aec3/config_selector.cc + voip/webrtc/modules/audio_processing/aec3/multi_channel_content_detector.cc voip/webrtc/modules/audio_processing/aec_dump/null_aec_dump_factory.cc voip/webrtc/modules/audio_processing/aecm/aecm_core.cc voip/webrtc/modules/audio_processing/aecm/aecm_core_c.cc @@ -1106,15 +1112,10 @@ add_library(tgcalls STATIC voip/webrtc/modules/audio_processing/agc/legacy/digital_agc.cc voip/webrtc/modules/audio_processing/agc/loudness_histogram.cc voip/webrtc/modules/audio_processing/agc/utility.cc - voip/webrtc/modules/audio_processing/agc/clipping_predictor.cc - voip/webrtc/modules/audio_processing/agc/clipping_predictor_evaluator.cc - voip/webrtc/modules/audio_processing/agc/clipping_predictor_level_buffer.cc - voip/webrtc/modules/audio_processing/agc/analog_gain_stats_reporter.cc voip/webrtc/modules/audio_processing/agc2/adaptive_digital_gain_controller.cc voip/webrtc/modules/audio_processing/agc2/vad_wrapper.cc voip/webrtc/modules/audio_processing/agc2/cpu_features.cc voip/webrtc/modules/audio_processing/agc2/adaptive_digital_gain_applier.cc - voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator.cc voip/webrtc/modules/audio_processing/agc2/agc2_testing_common.cc voip/webrtc/modules/audio_processing/agc2/biquad_filter.cc voip/webrtc/modules/audio_processing/agc2/compute_interpolated_gain_curve.cc @@ -1137,6 +1138,11 @@ add_library(tgcalls STATIC voip/webrtc/modules/audio_processing/agc2/rnn_vad/spectral_features_internal.cc voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn_fc.cc voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn_gru.cc + voip/webrtc/modules/audio_processing/agc2/speech_level_estimator.cc + voip/webrtc/modules/audio_processing/agc2/speech_probability_buffer.cc + voip/webrtc/modules/audio_processing/agc2/clipping_predictor.cc + voip/webrtc/modules/audio_processing/agc2/clipping_predictor_level_buffer.cc + voip/webrtc/modules/audio_processing/agc2/input_volume_stats_reporter.cc voip/webrtc/modules/audio_processing/audio_buffer.cc voip/webrtc/modules/audio_processing/audio_processing_impl.cc voip/webrtc/modules/audio_processing/audio_processing_builder_impl.cc @@ -1176,7 +1182,7 @@ add_library(tgcalls STATIC voip/webrtc/modules/audio_processing/transient/transient_suppressor_impl.cc voip/webrtc/modules/audio_processing/transient/wpd_node.cc voip/webrtc/modules/audio_processing/transient/wpd_tree.cc - voip/webrtc/modules/audio_processing/typing_detection.cc + voip/webrtc/modules/audio_processing/transient/voice_probability_delay_unit.cc voip/webrtc/modules/audio_processing/utility/cascaded_biquad_filter.cc voip/webrtc/modules/audio_processing/utility/delay_estimator.cc voip/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.cc @@ -1189,7 +1195,6 @@ add_library(tgcalls STATIC voip/webrtc/modules/audio_processing/vad/vad_audio_proc.cc voip/webrtc/modules/audio_processing/vad/vad_circular_buffer.cc voip/webrtc/modules/audio_processing/vad/voice_activity_detector.cc - voip/webrtc/modules/audio_processing/voice_detection.cc voip/webrtc/modules/audio_processing/optionally_built_submodule_creators.cc voip/webrtc/modules/audio_processing/capture_levels_adjuster/capture_levels_adjuster.cc voip/webrtc/modules/audio_processing/capture_levels_adjuster/audio_samples_scaler.cc @@ -1223,11 +1228,10 @@ add_library(tgcalls STATIC voip/webrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2.cc voip/webrtc/modules/pacing/bitrate_prober.cc voip/webrtc/modules/pacing/interval_budget.cc - voip/webrtc/modules/pacing/paced_sender.cc voip/webrtc/modules/pacing/pacing_controller.cc voip/webrtc/modules/pacing/packet_router.cc - voip/webrtc/modules/pacing/round_robin_packet_queue.cc voip/webrtc/modules/pacing/task_queue_paced_sender.cc + voip/webrtc/modules/pacing/prioritized_packet_queue.cc voip/webrtc/modules/rtp_rtcp/include/report_block_data.cc voip/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.cc voip/webrtc/modules/rtp_rtcp/source/absolute_capture_time_interpolator.cc @@ -1253,7 +1257,6 @@ add_library(tgcalls STATIC voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/common_header.cc voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet.cc voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/dlrr.cc - voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report.cc voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_reports.cc voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/fir.cc voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/loss_notification.cc @@ -1312,7 +1315,7 @@ add_library(tgcalls STATIC voip/webrtc/modules/rtp_rtcp/source/tmmbr_help.cc voip/webrtc/modules/rtp_rtcp/source/ulpfec_generator.cc voip/webrtc/modules/rtp_rtcp/source/ulpfec_header_reader_writer.cc - voip/webrtc/modules/rtp_rtcp/source/ulpfec_receiver_impl.cc + voip/webrtc/modules/rtp_rtcp/source/ulpfec_receiver.cc voip/webrtc/modules/rtp_rtcp/source/video_rtp_depacketizer.cc voip/webrtc/modules/rtp_rtcp/source/video_rtp_depacketizer_av1.cc voip/webrtc/modules/rtp_rtcp/source/video_rtp_depacketizer_generic.cc @@ -1328,15 +1331,13 @@ add_library(tgcalls STATIC voip/webrtc/modules/rtp_rtcp/source/rtp_util.cc voip/webrtc/modules/utility/source/helpers_android.cc voip/webrtc/modules/utility/source/jvm_android.cc - voip/webrtc/modules/utility/source/process_thread_impl.cc + voip/webrtc/modules/utility/maybe_worker_thread.cc voip/webrtc/modules/video_capture/device_info_impl.cc voip/webrtc/modules/video_capture/linux/device_info_linux.cc voip/webrtc/modules/video_capture/linux/video_capture_linux.cc voip/webrtc/modules/video_capture/video_capture_factory.cc + voip/webrtc/modules/video_capture/video_capture_factory_null.cc voip/webrtc/modules/video_capture/video_capture_impl.cc - voip/webrtc/modules/video_coding/codec_timer.cc - voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_decoder_absent.cc - voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder_absent.cc voip/webrtc/modules/video_coding/codecs/h264/h264.cc voip/webrtc/modules/video_coding/codecs/h264/h264_color_space.cc voip/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc @@ -1361,20 +1362,21 @@ add_library(tgcalls STATIC voip/webrtc/modules/video_coding/h265_vps_sps_pps_tracker.cc voip/webrtc/modules/video_coding/histogram.cc voip/webrtc/modules/video_coding/include/video_codec_interface.cc - voip/webrtc/modules/video_coding/inter_frame_delay.cc voip/webrtc/modules/video_coding/jitter_buffer.cc - voip/webrtc/modules/video_coding/jitter_estimator.cc voip/webrtc/modules/video_coding/loss_notification_controller.cc voip/webrtc/modules/video_coding/media_opt_util.cc voip/webrtc/modules/video_coding/packet.cc voip/webrtc/modules/video_coding/packet_buffer.cc voip/webrtc/modules/video_coding/receiver.cc voip/webrtc/modules/video_coding/rtp_frame_reference_finder.cc - voip/webrtc/modules/video_coding/rtt_filter.cc + voip/webrtc/modules/video_coding/timing/rtt_filter.cc voip/webrtc/modules/video_coding/session_info.cc - voip/webrtc/modules/video_coding/timestamp_map.cc - voip/webrtc/modules/video_coding/timing.cc - voip/webrtc/modules/video_coding/unique_timestamp_counter.cc + voip/webrtc/modules/video_coding/timing/timing.cc + voip/webrtc/modules/video_coding/timing/codec_timer.cc + voip/webrtc/modules/video_coding/timing/frame_delay_variation_kalman_filter.cc + voip/webrtc/modules/video_coding/timing/inter_frame_delay.cc + voip/webrtc/modules/video_coding/timing/jitter_estimator.cc + voip/webrtc/modules/video_coding/timing/timestamp_extrapolator.cc voip/webrtc/modules/video_coding/utility/decoded_frames_history.cc voip/webrtc/modules/video_coding/utility/qp_parser.cc voip/webrtc/modules/video_coding/utility/frame_dropper.cc @@ -1398,6 +1400,7 @@ add_library(tgcalls STATIC voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc voip/webrtc/modules/video_coding/codecs/vp8/screenshare_layers.cc voip/webrtc/modules/video_coding/codecs/vp8/temporal_layers_checker.cc + voip/webrtc/modules/video_coding/codecs/vp8/vp8_scalability.cc voip/webrtc/modules/video_coding/codecs/vp9/svc_config.cc voip/webrtc/modules/video_coding/codecs/vp9/vp9.cc voip/webrtc/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc @@ -1410,6 +1413,8 @@ add_library(tgcalls STATIC voip/webrtc/modules/video_coding/svc/scalability_structure_simulcast.cc voip/webrtc/modules/video_coding/svc/scalable_video_controller_no_layering.cc voip/webrtc/modules/video_coding/svc/svc_rate_allocator.cc + voip/webrtc/modules/video_coding/svc/scalability_mode_util.cc + voip/webrtc/modules/video_coding/frame_helpers.cc voip/webrtc/modules/video_coding/rtp_seq_num_only_ref_finder.cc voip/webrtc/modules/video_coding/rtp_frame_id_only_ref_finder.cc voip/webrtc/modules/video_coding/rtp_vp8_ref_finder.cc @@ -1417,13 +1422,7 @@ add_library(tgcalls STATIC voip/webrtc/modules/video_coding/rtp_generic_ref_finder.cc voip/webrtc/modules/video_coding/codecs/av1/av1_svc_config.cc voip/webrtc/modules/video_coding/nack_requester.cc - voip/webrtc/modules/video_coding/frame_buffer3.cc voip/webrtc/modules/video_coding/h264_packet_buffer.cc - voip/webrtc/modules/video_processing/util/denoiser_filter.cc - voip/webrtc/modules/video_processing/util/denoiser_filter_c.cc - voip/webrtc/modules/video_processing/util/noise_estimation.cc - voip/webrtc/modules/video_processing/util/skin_detection.cc - voip/webrtc/modules/video_processing/video_denoiser.cc voip/webrtc/call/adaptation/resource_adaptation_processor_interface.cc voip/webrtc/call/adaptation/video_source_restrictions.cc voip/webrtc/call/audio_receive_stream.cc @@ -1522,10 +1521,8 @@ add_library(tgcalls STATIC voip/webrtc/common_video/h265/h265_pps_parser.cc voip/webrtc/common_video/h265/h265_sps_parser.cc voip/webrtc/common_video/h265/h265_vps_parser.cc - voip/webrtc/common_video/incoming_video_stream.cc voip/webrtc/common_video/libyuv/webrtc_libyuv.cc voip/webrtc/common_video/video_frame_buffer.cc - voip/webrtc/common_video/video_render_frames.cc voip/webrtc/common_video/video_frame_buffer_pool.cc voip/webrtc/common_video/framerate_controller.cc voip/webrtc/p2p/base/async_stun_tcp_socket.cc @@ -1557,10 +1554,11 @@ add_library(tgcalls STATIC voip/webrtc/p2p/base/transport_description_factory.cc voip/webrtc/p2p/base/turn_port.cc voip/webrtc/p2p/base/turn_server.cc + voip/webrtc/p2p/base/ice_switch_reason.cc + voip/webrtc/p2p/base/wrapping_active_ice_controller.cc voip/webrtc/p2p/client/basic_port_allocator.cc voip/webrtc/p2p/client/turn_port_factory.cc voip/webrtc/p2p/stunprober/stun_prober.cc - voip/webrtc/modules/video_coding/deprecated/nack_module.cc voip/webrtc/modules/async_audio_processing/async_audio_processing.cc voip/webrtc/logging/rtc_event_log/encoder/blob_encoding.cc voip/webrtc/logging/rtc_event_log/encoder/delta_encoding.cc @@ -1610,7 +1608,6 @@ add_library(tgcalls STATIC voip/webrtc/video/adaptation/quality_rampup_experiment_helper.cc voip/webrtc/video/adaptation/bandwidth_quality_scaler_resource.cc voip/webrtc/video/buffered_frame_decryptor.cc - voip/webrtc/video/call_stats.cc voip/webrtc/video/encoder_bitrate_adjuster.cc voip/webrtc/video/encoder_overshoot_detector.cc voip/webrtc/video/encoder_rtcp_feedback.cc @@ -1618,9 +1615,7 @@ add_library(tgcalls STATIC voip/webrtc/video/frame_encode_metadata_writer.cc voip/webrtc/video/quality_limitation_reason_tracker.cc voip/webrtc/video/quality_threshold.cc - voip/webrtc/video/receive_statistics_proxy.cc voip/webrtc/video/report_block_stats.cc - voip/webrtc/video/rtp_streams_synchronizer.cc voip/webrtc/video/rtp_video_stream_receiver.cc voip/webrtc/video/rtp_video_stream_receiver_frame_transformer_delegate.cc voip/webrtc/video/send_delay_stats.cc @@ -1628,11 +1623,9 @@ add_library(tgcalls STATIC voip/webrtc/video/stats_counter.cc voip/webrtc/video/stream_synchronization.cc voip/webrtc/video/transport_adapter.cc - voip/webrtc/video/video_quality_observer.cc voip/webrtc/video/video_send_stream.cc voip/webrtc/video/video_send_stream_impl.cc voip/webrtc/video/video_source_sink_controller.cc - voip/webrtc/video/video_stream_decoder.cc voip/webrtc/video/video_stream_decoder_impl.cc voip/webrtc/video/video_stream_encoder.cc voip/webrtc/video/video_stream_decoder2.cc @@ -1644,6 +1637,19 @@ add_library(tgcalls STATIC voip/webrtc/video/call_stats2.cc voip/webrtc/video/alignment_adjuster.cc voip/webrtc/video/frame_cadence_adapter.cc + voip/webrtc/video/unique_timestamp_counter.cc + voip/webrtc/video/decode_synchronizer.cc + voip/webrtc/video/frame_decode_timing.cc + voip/webrtc/video/task_queue_frame_decode_scheduler.cc + voip/webrtc/video/unique_timestamp_counter.cc + voip/webrtc/video/unique_timestamp_counter.cc + voip/webrtc/video/video_receive_stream_timeout_tracker.cc + voip/webrtc/video/video_stream_buffer_controller.cc + voip/webrtc/video/render/incoming_video_stream.cc + voip/webrtc/video/render/video_render_frames.cc + voip/webrtc/video/config/encoder_stream_factory.cc + voip/webrtc/video/config/simulcast.cc + voip/webrtc/video/config/video_encoder_config.cc voip/webrtc/audio/audio_level.cc voip/webrtc/audio/audio_receive_stream.cc voip/webrtc/audio/audio_send_stream.cc @@ -1653,7 +1659,6 @@ add_library(tgcalls STATIC voip/webrtc/audio/channel_receive_frame_transformer_delegate.cc voip/webrtc/audio/channel_send.cc voip/webrtc/audio/channel_send_frame_transformer_delegate.cc - voip/webrtc/audio/null_audio_poller.cc voip/webrtc/audio/remix_resample.cc voip/webrtc/audio/utility/audio_frame_operations.cc voip/webrtc/audio/utility/channel_mixer.cc @@ -1686,13 +1691,84 @@ add_library(tgcalls STATIC voip/webrtc/base/third_party/libevent/poll.c voip/webrtc/base/third_party/libevent/select.c voip/webrtc/base/third_party/libevent/signal.c - voip/webrtc/base/third_party/libevent/strlcpy.c) + voip/webrtc/base/third_party/libevent/strlcpy.c + voip/webrtc/net/dcsctp/public/dcsctp_socket_factory.cc + voip/webrtc/net/dcsctp/public/dcsctp_handover_state.cc + voip/webrtc/net/dcsctp/public/text_pcap_packet_observer.cc + voip/webrtc/net/dcsctp/socket/callback_deferrer.cc + voip/webrtc/net/dcsctp/socket/dcsctp_socket.cc + voip/webrtc/net/dcsctp/socket/heartbeat_handler.cc + voip/webrtc/net/dcsctp/socket/packet_sender.cc + voip/webrtc/net/dcsctp/socket/state_cookie.cc + voip/webrtc/net/dcsctp/socket/stream_reset_handler.cc + voip/webrtc/net/dcsctp/socket/transmission_control_block.cc + voip/webrtc/net/dcsctp/packet/chunk_validators.cc + voip/webrtc/net/dcsctp/packet/crc32c.cc + voip/webrtc/net/dcsctp/packet/sctp_packet.cc + voip/webrtc/net/dcsctp/packet/tlv_trait.cc + voip/webrtc/net/dcsctp/packet/chunk/abort_chunk.cc + voip/webrtc/net/dcsctp/packet/chunk/chunk.cc + voip/webrtc/net/dcsctp/packet/chunk/cookie_ack_chunk.cc + voip/webrtc/net/dcsctp/packet/chunk/cookie_echo_chunk.cc + voip/webrtc/net/dcsctp/packet/chunk/data_chunk.cc + voip/webrtc/net/dcsctp/packet/chunk/error_chunk.cc + voip/webrtc/net/dcsctp/packet/chunk/forward_tsn_chunk.cc + voip/webrtc/net/dcsctp/packet/chunk/heartbeat_ack_chunk.cc + voip/webrtc/net/dcsctp/packet/chunk/heartbeat_request_chunk.cc + voip/webrtc/net/dcsctp/packet/chunk/idata_chunk.cc + voip/webrtc/net/dcsctp/packet/chunk/iforward_tsn_chunk.cc + voip/webrtc/net/dcsctp/packet/chunk/init_ack_chunk.cc + voip/webrtc/net/dcsctp/packet/chunk/init_chunk.cc + voip/webrtc/net/dcsctp/packet/chunk/reconfig_chunk.cc + voip/webrtc/net/dcsctp/packet/chunk/sack_chunk.cc + voip/webrtc/net/dcsctp/packet/chunk/shutdown_ack_chunk.cc + voip/webrtc/net/dcsctp/packet/chunk/shutdown_chunk.cc + voip/webrtc/net/dcsctp/packet/chunk/shutdown_complete_chunk.cc + voip/webrtc/net/dcsctp/packet/error_cause/cookie_received_while_shutting_down_cause.cc + voip/webrtc/net/dcsctp/packet/error_cause/error_cause.cc + voip/webrtc/net/dcsctp/packet/error_cause/invalid_mandatory_parameter_cause.cc + voip/webrtc/net/dcsctp/packet/error_cause/invalid_stream_identifier_cause.cc + voip/webrtc/net/dcsctp/packet/error_cause/missing_mandatory_parameter_cause.cc + voip/webrtc/net/dcsctp/packet/error_cause/no_user_data_cause.cc + voip/webrtc/net/dcsctp/packet/error_cause/out_of_resource_error_cause.cc + voip/webrtc/net/dcsctp/packet/error_cause/protocol_violation_cause.cc + voip/webrtc/net/dcsctp/packet/error_cause/stale_cookie_error_cause.cc + voip/webrtc/net/dcsctp/packet/error_cause/restart_of_an_association_with_new_address_cause.cc + voip/webrtc/net/dcsctp/packet/error_cause/unrecognized_chunk_type_cause.cc + voip/webrtc/net/dcsctp/packet/error_cause/unrecognized_parameter_cause.cc + voip/webrtc/net/dcsctp/packet/error_cause/unresolvable_address_cause.cc + voip/webrtc/net/dcsctp/packet/error_cause/user_initiated_abort_cause.cc + voip/webrtc/net/dcsctp/packet/parameter/add_incoming_streams_request_parameter.cc + voip/webrtc/net/dcsctp/packet/parameter/add_outgoing_streams_request_parameter.cc + voip/webrtc/net/dcsctp/packet/parameter/forward_tsn_supported_parameter.cc + voip/webrtc/net/dcsctp/packet/parameter/heartbeat_info_parameter.cc + voip/webrtc/net/dcsctp/packet/parameter/incoming_ssn_reset_request_parameter.cc + voip/webrtc/net/dcsctp/packet/parameter/outgoing_ssn_reset_request_parameter.cc + voip/webrtc/net/dcsctp/packet/parameter/parameter.cc + voip/webrtc/net/dcsctp/packet/parameter/reconfiguration_response_parameter.cc + voip/webrtc/net/dcsctp/packet/parameter/ssn_tsn_reset_request_parameter.cc + voip/webrtc/net/dcsctp/packet/parameter/state_cookie_parameter.cc + voip/webrtc/net/dcsctp/packet/parameter/supported_extensions_parameter.cc + voip/webrtc/net/dcsctp/rx/data_tracker.cc + voip/webrtc/net/dcsctp/rx/reassembly_queue.cc + voip/webrtc/net/dcsctp/rx/traditional_reassembly_streams.cc + voip/webrtc/net/dcsctp/rx/interleaved_reassembly_streams.cc + voip/webrtc/net/dcsctp/tx/outstanding_data.cc + voip/webrtc/net/dcsctp/tx/retransmission_error_counter.cc + voip/webrtc/net/dcsctp/tx/retransmission_queue.cc + voip/webrtc/net/dcsctp/tx/retransmission_timeout.cc + voip/webrtc/net/dcsctp/tx/rr_send_queue.cc + voip/webrtc/net/dcsctp/tx/stream_scheduler.cc + voip/webrtc/net/dcsctp/timer/task_queue_timeout.cc + voip/webrtc/net/dcsctp/timer/timer.cc + voip/webrtc/third_party/crc32c/src/crc32c_portable.cc + voip/webrtc/third_party/crc32c/src/crc32c.cc) target_compile_options(tgcalls PUBLIC -Wall -finline-functions -ffast-math) set_target_properties(tgcalls PROPERTIES ANDROID_ARM_MODE arm) target_compile_definitions(tgcalls PUBLIC - RTC_DISABLE_TRACE_EVENTS WEBRTC_OPUS_SUPPORT_120MS_PTIME=1 BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0 ABSL_ALLOCATOR_NOTHROW=1 WEBRTC_NS_FLOAT HAVE_PTHREAD RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ANDROID WEBRTC_USE_H264 NDEBUG WEBRTC_HAVE_USRSCTP WEBRTC_HAVE_SCTP WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_USE_BUILTIN_ISAC_FLOAT WEBRTC_OPUS_VARIABLE_COMPLEXITY=0 HAVE_NETINET_IN_H WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE HAVE_WEBRTC_VIDEO) + WEBRTC_HAVE_DCSCTP RTC_DISABLE_TRACE_EVENTS WEBRTC_OPUS_SUPPORT_120MS_PTIME=1 BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0 ABSL_ALLOCATOR_NOTHROW=1 WEBRTC_NS_FLOAT HAVE_PTHREAD RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ANDROID WEBRTC_USE_H264 NDEBUG WEBRTC_HAVE_USRSCTP WEBRTC_HAVE_SCTP WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_USE_BUILTIN_ISAC_FLOAT WEBRTC_OPUS_VARIABLE_COMPLEXITY=0 HAVE_NETINET_IN_H WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE HAVE_WEBRTC_VIDEO) target_include_directories(tgcalls PUBLIC ./ voip @@ -1714,7 +1790,8 @@ target_include_directories(tgcalls PUBLIC third_party voip/libtgvoip libvpx/build/${ANDROID_ABI}/include - ffmpeg/build/${ANDROID_ABI}/include) + ffmpeg/build/${ANDROID_ABI}/include + voip/webrtc/third_party/crc32c/src/include) if (${ANDROID_ABI} STREQUAL "armeabi-v7a") target_sources(tgcalls PRIVATE @@ -1746,7 +1823,6 @@ if (${ANDROID_ABI} STREQUAL "armeabi-v7a" OR ${ANDROID_ABI} STREQUAL "arm64-v8a" voip/webrtc/modules/audio_coding/codecs/isac/fix/source/lattice_neon.c voip/webrtc/modules/audio_coding/codecs/isac/fix/source/transform_neon.c voip/webrtc/modules/audio_processing/aecm/aecm_core_neon.cc - voip/webrtc/modules/video_processing/util/denoiser_filter_neon.cc voip/webrtc/common_audio/fir_filter_neon.cc voip/webrtc/common_audio/signal_processing/cross_correlation_neon.c voip/webrtc/common_audio/signal_processing/downsample_fast_neon.c @@ -1755,7 +1831,6 @@ if (${ANDROID_ABI} STREQUAL "armeabi-v7a" OR ${ANDROID_ABI} STREQUAL "arm64-v8a" voip/webrtc/common_audio/third_party/ooura/fft_size_128/ooura_fft_neon.cc) else() target_sources(tgcalls PRIVATE - voip/webrtc/modules/video_processing/util/denoiser_filter_sse2.cc voip/webrtc/common_audio/fir_filter_sse.cc voip/webrtc/common_audio/resampler/sinc_resampler_sse.cc voip/webrtc/common_audio/signal_processing/complex_bit_reverse.c @@ -1790,7 +1865,6 @@ add_library(voipandroid STATIC voip/webrtc/sdk/android/native_api/video/wrapper.cc voip/webrtc/sdk/android/native_api/network_monitor/network_monitor.cc voip/webrtc/sdk/android/src/jni/android_histogram.cc - voip/webrtc/sdk/android/src/jni/av1_codec.cc voip/webrtc/sdk/android/src/jni/egl_base_10_impl.cc voip/webrtc/sdk/android/src/jni/android_metrics.cc voip/webrtc/sdk/android/src/jni/android_network_monitor.cc @@ -1866,7 +1940,7 @@ target_compile_options(voipandroid PUBLIC set_target_properties(voipandroid PROPERTIES ANDROID_ARM_MODE arm) target_compile_definitions(voipandroid PUBLIC - RTC_DISABLE_TRACE_EVENTS WEBRTC_OPUS_SUPPORT_120MS_PTIME=1 BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0 ABSL_ALLOCATOR_NOTHROW=1 WEBRTC_NS_FLOAT HAVE_PTHREAD RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ANDROID WEBRTC_USE_H264 NDEBUG WEBRTC_HAVE_USRSCTP WEBRTC_HAVE_SCTP WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_USE_BUILTIN_ISAC_FLOAT WEBRTC_OPUS_VARIABLE_COMPLEXITY=0 HAVE_NETINET_IN_H WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE HAVE_WEBRTC_VIDEO) + WEBRTC_HAVE_DCSCTP RTC_DISABLE_TRACE_EVENTS WEBRTC_OPUS_SUPPORT_120MS_PTIME=1 BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0 ABSL_ALLOCATOR_NOTHROW=1 WEBRTC_NS_FLOAT HAVE_PTHREAD RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ANDROID WEBRTC_USE_H264 NDEBUG WEBRTC_HAVE_USRSCTP WEBRTC_HAVE_SCTP WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_USE_BUILTIN_ISAC_FLOAT WEBRTC_OPUS_VARIABLE_COMPLEXITY=0 HAVE_NETINET_IN_H WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE HAVE_WEBRTC_VIDEO) target_include_directories(voipandroid PUBLIC ./ voip diff --git a/TMessagesProj/jni/voip/libtgvoip/os/android/AudioInputAndroid.cpp b/TMessagesProj/jni/voip/libtgvoip/os/android/AudioInputAndroid.cpp index 313a244526..536e14966f 100644 --- a/TMessagesProj/jni/voip/libtgvoip/os/android/AudioInputAndroid.cpp +++ b/TMessagesProj/jni/voip/libtgvoip/os/android/AudioInputAndroid.cpp @@ -8,6 +8,7 @@ #include #include "../../logging.h" #include "JNIUtilities.h" +#include "tgnet/FileLog.h" extern JavaVM* sharedJVM; @@ -25,6 +26,7 @@ AudioInputAndroid::AudioInputAndroid(){ jni::DoWithJNI([this](JNIEnv* env){ jmethodID ctor=env->GetMethodID(jniClass, "", "(J)V"); jobject obj=env->NewObject(jniClass, ctor, (jlong)(intptr_t)this); + DEBUG_REF("AudioInputAndroid"); javaObject=env->NewGlobalRef(obj); env->CallVoidMethod(javaObject, initMethod, 48000, 16, 1, 960*2); @@ -38,6 +40,7 @@ AudioInputAndroid::~AudioInputAndroid(){ MutexGuard guard(mutex); jni::DoWithJNI([this](JNIEnv* env){ env->CallVoidMethod(javaObject, releaseMethod); + DEBUG_DELREF("AudioInputAndroid"); env->DeleteGlobalRef(javaObject); javaObject=NULL; }); diff --git a/TMessagesProj/jni/voip/libtgvoip/os/android/AudioOutputAndroid.cpp b/TMessagesProj/jni/voip/libtgvoip/os/android/AudioOutputAndroid.cpp index c1dc2d5df1..96702f034c 100644 --- a/TMessagesProj/jni/voip/libtgvoip/os/android/AudioOutputAndroid.cpp +++ b/TMessagesProj/jni/voip/libtgvoip/os/android/AudioOutputAndroid.cpp @@ -7,6 +7,7 @@ #include "AudioOutputAndroid.h" #include #include "../../logging.h" +#include "tgnet/FileLog.h" extern JavaVM* sharedJVM; @@ -30,6 +31,7 @@ AudioOutputAndroid::AudioOutputAndroid(){ jmethodID ctor=env->GetMethodID(jniClass, "", "(J)V"); jobject obj=env->NewObject(jniClass, ctor, (jlong)(intptr_t)this); + DEBUG_REF("AudioOutputAndroid"); javaObject=env->NewGlobalRef(obj); env->CallVoidMethod(javaObject, initMethod, 48000, 16, 1, 960*2); @@ -50,6 +52,7 @@ AudioOutputAndroid::~AudioOutputAndroid(){ } env->CallVoidMethod(javaObject, releaseMethod); + DEBUG_DELREF("AudioOutputAndroid"); env->DeleteGlobalRef(javaObject); javaObject=NULL; diff --git a/TMessagesProj/jni/voip/libtgvoip/os/android/VideoSourceAndroid.cpp b/TMessagesProj/jni/voip/libtgvoip/os/android/VideoSourceAndroid.cpp index 8431f50054..5b297f7705 100644 --- a/TMessagesProj/jni/voip/libtgvoip/os/android/VideoSourceAndroid.cpp +++ b/TMessagesProj/jni/voip/libtgvoip/os/android/VideoSourceAndroid.cpp @@ -6,6 +6,7 @@ #include "JNIUtilities.h" #include "../../logging.h" #include "../../PrivateDefines.h" +#include "tgnet/FileLog.h" using namespace tgvoip; using namespace tgvoip::video; @@ -27,6 +28,7 @@ VideoSourceAndroid::VideoSourceAndroid(jobject jobj) : javaObject(jobj){ VideoSourceAndroid::~VideoSourceAndroid(){ jni::DoWithJNI([this](JNIEnv* env){ + DEBUG_DELREF("VideoSourceAndroid"); env->DeleteGlobalRef(javaObject); }); } diff --git a/TMessagesProj/jni/voip/org_telegram_messenger_voip_Instance.cpp b/TMessagesProj/jni/voip/org_telegram_messenger_voip_Instance.cpp index 6101038644..164e943c55 100644 --- a/TMessagesProj/jni/voip/org_telegram_messenger_voip_Instance.cpp +++ b/TMessagesProj/jni/voip/org_telegram_messenger_voip_Instance.cpp @@ -381,9 +381,13 @@ void initWebRTC(JNIEnv *env) { rtc::InitializeSSL(); webrtcLoaded = true; + DEBUG_REF("NativeInstanceClass"); NativeInstanceClass = static_cast(env->NewGlobalRef(env->FindClass("org/telegram/messenger/voip/NativeInstance"))); + DEBUG_REF("TrafficStatsClass"); TrafficStatsClass = static_cast(env->NewGlobalRef(env->FindClass("org/telegram/messenger/voip/Instance$TrafficStats"))); + DEBUG_REF("FingerprintClass"); FingerprintClass = static_cast(env->NewGlobalRef(env->FindClass("org/telegram/messenger/voip/Instance$Fingerprint"))); + DEBUG_REF("FinalStateClass"); FinalStateClass = static_cast(env->NewGlobalRef(env->FindClass("org/telegram/messenger/voip/Instance$FinalState"))); FinalStateInitMethod = env->GetMethodID(FinalStateClass, "", "([BLjava/lang/String;Lorg/telegram/messenger/voip/Instance$TrafficStats;Z)V"); } @@ -688,15 +692,16 @@ JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeNati env->CallVoidMethod(globalRef, env->GetMethodID(NativeInstanceClass, "onSignalBarsUpdated", "(I)V"), count); }); }, - .audioLevelUpdated = [platformContext](float level) { - tgvoip::jni::DoWithJNI([platformContext, level](JNIEnv *env) { + .audioLevelsUpdated = [platformContext](float myAudioLevel, float audioLevel) { + tgvoip::jni::DoWithJNI([platformContext, myAudioLevel, audioLevel](JNIEnv *env) { jintArray intArray = nullptr; - jfloatArray floatArray = env->NewFloatArray(1); + jfloatArray floatArray = env->NewFloatArray(2); jbooleanArray boolArray = nullptr; - jfloat floatFill[1]; - floatFill[0] = level; - env->SetFloatArrayRegion(floatArray, 0, 1, floatFill); + jfloat floatFill[2]; + floatFill[0] = myAudioLevel; + floatFill[1] = audioLevel; + env->SetFloatArrayRegion(floatArray, 0, 2, floatFill); jobject globalRef = ((AndroidContext *) platformContext.get())->getJavaInstance(); env->CallVoidMethod(globalRef, env->GetMethodID(NativeInstanceClass, "onAudioLevelsUpdated", "([I[F[Z)V"), intArray, floatArray, boolArray); @@ -719,12 +724,14 @@ JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeNati }, .platformContext = platformContext, }; + descriptor.version = v; for (int i = 0, size = env->GetArrayLength(endpoints); i < size; i++) { JavaObject endpointObject(env, env->GetObjectArrayElement(endpoints, i)); bool isRtc = endpointObject.getBooleanField("isRtc"); if (isRtc) { RtcServer rtcServer; + rtcServer.id = static_cast(endpointObject.getIntField("reflectorId")); rtcServer.host = tgvoip::jni::JavaStringToStdString(env, endpointObject.getStringField("ipv4")); rtcServer.port = static_cast(endpointObject.getIntField("port")); rtcServer.login = tgvoip::jni::JavaStringToStdString(env, endpointObject.getStringField("username")); @@ -732,6 +739,16 @@ JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeNati rtcServer.isTurn = endpointObject.getBooleanField("turn"); descriptor.rtcServers.push_back(std::move(rtcServer)); } else { + RtcServer rtcServer; + rtcServer.id = static_cast(endpointObject.getIntField("reflectorId")); + rtcServer.host = tgvoip::jni::JavaStringToStdString(env, endpointObject.getStringField("ipv4")); + rtcServer.port = static_cast(endpointObject.getIntField("port")); + rtcServer.login = tgvoip::jni::JavaStringToStdString(env, endpointObject.getStringField("username")); + rtcServer.password = tgvoip::jni::JavaStringToStdString(env, endpointObject.getStringField("password")); + rtcServer.isTurn = true; + rtcServer.isTcp = endpointObject.getBooleanField("tcp"); + descriptor.rtcServers.push_back(std::move(rtcServer)); + Endpoint endpoint; endpoint.endpointId = endpointObject.getLongField("id"); endpoint.host = EndpointHost{tgvoip::jni::JavaStringToStdString(env, endpointObject.getStringField("ipv4")), tgvoip::jni::JavaStringToStdString(env, endpointObject.getStringField("ipv6"))}; @@ -743,7 +760,7 @@ JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeNati memcpy(endpoint.peerTag, peerTagBytes, 16); env->ReleaseByteArrayElements(peerTag, peerTagBytes, JNI_ABORT); } - descriptor.endpoints.push_back(std::move(endpoint)); + descriptor.endpoints.push_back(std::move(endpoint)); } } diff --git a/TMessagesProj/jni/voip/tg_voip_jni.cpp b/TMessagesProj/jni/voip/tg_voip_jni.cpp index 05f021873b..5b3725957a 100644 --- a/TMessagesProj/jni/voip/tg_voip_jni.cpp +++ b/TMessagesProj/jni/voip/tg_voip_jni.cpp @@ -20,6 +20,7 @@ #include "libtgvoip/PrivateDefines.h" #include "libtgvoip/logging.h" #include "../c_utils.h" +#include "tgnet/FileLog.h" #ifdef TGVOIP_HAS_CONFIG #include @@ -63,6 +64,7 @@ namespace tgvoip { jlong VoIPController_nativeInit(JNIEnv *env, jobject thiz, jstring persistentStateFile) { ImplDataAndroid *impl = new ImplDataAndroid(); + DEBUG_REF("VoIPController_nativeInit"); impl->javaObject = env->NewGlobalRef(thiz); if (persistentStateFile) { impl->persistentStateFile = jni::JavaStringToStdString(env, persistentStateFile); @@ -123,6 +125,7 @@ namespace tgvoip { ctlr->Stop(); std::vector state = ctlr->GetPersistentState(); delete ctlr; + DEBUG_DELREF("VoIPController_nativeRelease"); env->DeleteGlobalRef(impl->javaObject); if (!impl->persistentStateFile.empty()) { FILE *f = fopen(impl->persistentStateFile.c_str(), "w"); @@ -294,6 +297,7 @@ int tgvoipOnJNILoad(JavaVM *vm, JNIEnv *env) { env->GetJavaVM(&sharedJVM); if (!AudioInputAndroid::jniClass) { jclass cls = env->FindClass(TGVOIP_PACKAGE_PATH "/AudioRecordJNI"); + DEBUG_REF("AudioRecordJNI"); AudioInputAndroid::jniClass = (jclass) env->NewGlobalRef(cls); AudioInputAndroid::initMethod = env->GetMethodID(cls, "init", "(IIII)V"); AudioInputAndroid::releaseMethod = env->GetMethodID(cls, "release", "()V"); @@ -302,6 +306,7 @@ int tgvoipOnJNILoad(JavaVM *vm, JNIEnv *env) { AudioInputAndroid::getEnabledEffectsMaskMethod = env->GetMethodID(cls, "getEnabledEffectsMask", "()I"); cls = env->FindClass(TGVOIP_PACKAGE_PATH "/AudioTrackJNI"); + DEBUG_REF("AudioTrackJNI"); AudioOutputAndroid::jniClass = (jclass) env->NewGlobalRef(cls); AudioOutputAndroid::initMethod = env->GetMethodID(cls, "init", "(IIII)V"); AudioOutputAndroid::releaseMethod = env->GetMethodID(cls, "release", "()V"); @@ -313,6 +318,7 @@ int tgvoipOnJNILoad(JavaVM *vm, JNIEnv *env) { setSignalBarsMethod = env->GetMethodID(controller, "handleSignalBarsChange", "(I)V"); if (!jniUtilitiesClass) { + DEBUG_REF("JNIUtilities"); jniUtilitiesClass = (jclass) env->NewGlobalRef(env->FindClass(TGVOIP_PACKAGE_PATH "/JNIUtilities")); } diff --git a/TMessagesProj/jni/voip/tgcalls/ChannelManager.cpp b/TMessagesProj/jni/voip/tgcalls/ChannelManager.cpp new file mode 100644 index 0000000000..37e2ce134c --- /dev/null +++ b/TMessagesProj/jni/voip/tgcalls/ChannelManager.cpp @@ -0,0 +1,151 @@ +#include "ChannelManager.h" +#include +#include "absl/algorithm/container.h" +#include "absl/memory/memory.h" +#include "absl/strings/match.h" +#include "api/media_types.h" +#include "api/sequence_checker.h" +#include "media/base/media_constants.h" +#include "rtc_base/checks.h" +#include "rtc_base/trace_event.h" +namespace tgcalls { +// static +std::unique_ptr ChannelManager::Create( + std::unique_ptr media_engine, + rtc::Thread* worker_thread, + rtc::Thread* network_thread) { + RTC_DCHECK(network_thread); + RTC_DCHECK(worker_thread); + return absl::WrapUnique(new ChannelManager( + std::move(media_engine), worker_thread, network_thread)); +} +ChannelManager::ChannelManager( + std::unique_ptr media_engine, + rtc::Thread* worker_thread, + rtc::Thread* network_thread) + : media_engine_(std::move(media_engine)), + signaling_thread_(rtc::Thread::Current()), + worker_thread_(worker_thread), + network_thread_(network_thread) { + RTC_DCHECK_RUN_ON(signaling_thread_); + RTC_DCHECK(worker_thread_); + RTC_DCHECK(network_thread_); + if (media_engine_) { + // TODO(tommi): Change VoiceEngine to do ctor time initialization so that + // this isn't necessary. + worker_thread_->BlockingCall([&] { media_engine_->Init(); }); + } +} +ChannelManager::~ChannelManager() { + RTC_DCHECK_RUN_ON(signaling_thread_); + worker_thread_->BlockingCall([&] { + RTC_DCHECK_RUN_ON(worker_thread_); + RTC_DCHECK(voice_channels_.empty()); + RTC_DCHECK(video_channels_.empty()); + // While `media_engine_` is const throughout the ChannelManager's lifetime, + // it requires destruction to happen on the worker thread. Instead of + // marking the pointer as non-const, we live with this const_cast<> in the + // destructor. + const_cast&>(media_engine_).reset(); + }); +} +cricket::VoiceChannel* ChannelManager::CreateVoiceChannel( + webrtc::Call* call, + const cricket::MediaConfig& media_config, + const std::string& mid, + bool srtp_required, + const webrtc::CryptoOptions& crypto_options, + const cricket::AudioOptions& options) { + RTC_DCHECK(call); + RTC_DCHECK(media_engine_); + // TODO(bugs.webrtc.org/11992): Remove this workaround after updates in + // PeerConnection and add the expectation that we're already on the right + // thread. + if (!worker_thread_->IsCurrent()) { + cricket::VoiceChannel* temp = nullptr; + worker_thread_->BlockingCall([&] { + temp = CreateVoiceChannel(call, media_config, mid, srtp_required, + crypto_options, options); + }); + return temp; + } + RTC_DCHECK_RUN_ON(worker_thread_); + cricket::VoiceMediaChannel* media_channel = media_engine_->voice().CreateMediaChannel( + call, media_config, options, crypto_options); + if (!media_channel) { + return nullptr; + } + auto voice_channel = std::make_unique( + worker_thread_, network_thread_, signaling_thread_, + absl::WrapUnique(media_channel), mid, srtp_required, crypto_options, + &ssrc_generator_); + cricket::VoiceChannel* voice_channel_ptr = voice_channel.get(); + voice_channels_.push_back(std::move(voice_channel)); + return voice_channel_ptr; +} +void ChannelManager::DestroyVoiceChannel(cricket::VoiceChannel* channel) { + TRACE_EVENT0("webrtc", "ChannelManager::DestroyVoiceChannel"); + RTC_DCHECK_RUN_ON(worker_thread_); + voice_channels_.erase(absl::c_find_if( + voice_channels_, [&](const auto& p) { return p.get() == channel; })); +} +cricket::VideoChannel* ChannelManager::CreateVideoChannel( + webrtc::Call* call, + const cricket::MediaConfig& media_config, + const std::string& mid, + bool srtp_required, + const webrtc::CryptoOptions& crypto_options, + const cricket::VideoOptions& options, + webrtc::VideoBitrateAllocatorFactory* video_bitrate_allocator_factory) { + RTC_DCHECK(call); + RTC_DCHECK(media_engine_); + // TODO(bugs.webrtc.org/11992): Remove this workaround after updates in + // PeerConnection and add the expectation that we're already on the right + // thread. + if (!worker_thread_->IsCurrent()) { + cricket::VideoChannel* temp = nullptr; + worker_thread_->BlockingCall([&] { + temp = CreateVideoChannel(call, media_config, mid, srtp_required, + crypto_options, options, + video_bitrate_allocator_factory); + }); + return temp; + } + RTC_DCHECK_RUN_ON(worker_thread_); + cricket::VideoMediaChannel* media_channel = media_engine_->video().CreateMediaChannel( + call, media_config, options, crypto_options, + video_bitrate_allocator_factory); + if (!media_channel) { + return nullptr; + } + auto video_channel = std::make_unique( + worker_thread_, network_thread_, signaling_thread_, + absl::WrapUnique(media_channel), mid, srtp_required, crypto_options, + &ssrc_generator_); + cricket::VideoChannel* video_channel_ptr = video_channel.get(); + video_channels_.push_back(std::move(video_channel)); + return video_channel_ptr; +} +void ChannelManager::DestroyVideoChannel(cricket::VideoChannel* channel) { + TRACE_EVENT0("webrtc", "ChannelManager::DestroyVideoChannel"); + RTC_DCHECK_RUN_ON(worker_thread_); + video_channels_.erase(absl::c_find_if( + video_channels_, [&](const auto& p) { return p.get() == channel; })); +} +void ChannelManager::DestroyChannel(cricket::ChannelInterface* channel) { + RTC_DCHECK(channel); + if (!worker_thread_->IsCurrent()) { + // TODO(tommi): Do this asynchronously when we have a way to make sure that + // the call to DestroyChannel runs before ~Call() runs, which today happens + // inside an Invoke from the signaling thread in PeerConnectin::Close(). + worker_thread_->BlockingCall([&] { DestroyChannel(channel); }); + return; + } + if (channel->media_type() == cricket::MEDIA_TYPE_AUDIO) { + DestroyVoiceChannel(static_cast(channel)); + } else { + RTC_DCHECK_EQ(channel->media_type(), cricket::MEDIA_TYPE_VIDEO); + DestroyVideoChannel(static_cast(channel)); + } +} +} // namespace tgcalls diff --git a/TMessagesProj/jni/voip/tgcalls/ChannelManager.h b/TMessagesProj/jni/voip/tgcalls/ChannelManager.h new file mode 100644 index 0000000000..150d374f0e --- /dev/null +++ b/TMessagesProj/jni/voip/tgcalls/ChannelManager.h @@ -0,0 +1,95 @@ +#ifndef TGCALLS_CHANNEL_MANAGER_H_ +#define TGCALLS_CHANNEL_MANAGER_H_ +#include +#include +#include +#include +#include "api/audio_options.h" +#include "api/crypto/crypto_options.h" +#include "api/rtp_parameters.h" +#include "api/video/video_bitrate_allocator_factory.h" +#include "call/call.h" +#include "media/base/codec.h" +#include "media/base/media_channel.h" +#include "media/base/media_config.h" +#include "media/base/media_engine.h" +#include "pc/channel.h" +#include "pc/channel_interface.h" +#include "pc/rtp_transport_internal.h" +#include "pc/session_description.h" +#include "rtc_base/system/file_wrapper.h" +#include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" +#include "rtc_base/unique_id_generator.h" +namespace tgcalls { +// ChannelManager allows the MediaEngine to run on a separate thread, and takes +// care of marshalling calls between threads. It also creates and keeps track of +// voice and video channels; by doing so, it can temporarily pause all the +// channels when a new audio or video device is chosen. The voice and video +// channels are stored in separate vectors, to easily allow operations on just +// voice or just video channels. +// ChannelManager also allows the application to discover what devices it has +// using device manager. +class ChannelManager { + public: + // Returns an initialized instance of ChannelManager. + // If media_engine is non-nullptr, then the returned ChannelManager instance + // will own that reference and media engine initialization + static std::unique_ptr Create( + std::unique_ptr media_engine, + rtc::Thread* worker_thread, + rtc::Thread* network_thread); + ChannelManager() = delete; + ~ChannelManager(); + rtc::Thread* worker_thread() const { return worker_thread_; } + rtc::Thread* network_thread() const { return network_thread_; } + cricket::MediaEngineInterface* media_engine() { return media_engine_.get(); } + rtc::UniqueRandomIdGenerator& ssrc_generator() { return ssrc_generator_; } + // The operations below all occur on the worker thread. + // ChannelManager retains ownership of the created channels, so clients should + // call the appropriate Destroy*Channel method when done. + // Creates a voice channel, to be associated with the specified session. + cricket::VoiceChannel* CreateVoiceChannel(webrtc::Call* call, + const cricket::MediaConfig& media_config, + const std::string& mid, + bool srtp_required, + const webrtc::CryptoOptions& crypto_options, + const cricket::AudioOptions& options); + // Creates a video channel, synced with the specified voice channel, and + // associated with the specified session. + // Version of the above that takes PacketTransportInternal. + cricket::VideoChannel* CreateVideoChannel( + webrtc::Call* call, + const cricket::MediaConfig& media_config, + const std::string& mid, + bool srtp_required, + const webrtc::CryptoOptions& crypto_options, + const cricket::VideoOptions& options, + webrtc::VideoBitrateAllocatorFactory* video_bitrate_allocator_factory); + void DestroyChannel(cricket::ChannelInterface* channel); + protected: + ChannelManager(std::unique_ptr media_engine, + rtc::Thread* worker_thread, + rtc::Thread* network_thread); + // Destroys a voice channel created by CreateVoiceChannel. + void DestroyVoiceChannel(cricket::VoiceChannel* voice_channel); + // Destroys a video channel created by CreateVideoChannel. + void DestroyVideoChannel(cricket::VideoChannel* video_channel); + private: + const std::unique_ptr media_engine_; // Nullable. + rtc::Thread* const signaling_thread_; + rtc::Thread* const worker_thread_; + rtc::Thread* const network_thread_; + // This object should be used to generate any SSRC that is not explicitly + // specified by the user (or by the remote party). + // TODO(bugs.webrtc.org/12666): This variable is used from both the signaling + // and worker threads. See if we can't restrict usage to a single thread. + rtc::UniqueRandomIdGenerator ssrc_generator_; + // Vector contents are non-null. + std::vector> voice_channels_ + RTC_GUARDED_BY(worker_thread_); + std::vector> video_channels_ + RTC_GUARDED_BY(worker_thread_); +}; +} // namespace tgcalls +#endif // TGCALLS_CHANNEL_MANAGER_H_ \ No newline at end of file diff --git a/TMessagesProj/jni/voip/tgcalls/CodecSelectHelper.cpp b/TMessagesProj/jni/voip/tgcalls/CodecSelectHelper.cpp index af00efdd82..abdc9f6e3a 100644 --- a/TMessagesProj/jni/voip/tgcalls/CodecSelectHelper.cpp +++ b/TMessagesProj/jni/voip/tgcalls/CodecSelectHelper.cpp @@ -129,13 +129,37 @@ void AddDefaultFeedbackParams(cricket::VideoCodec *codec) { } } +std::vector RemoveScalabilityModes( + std::vector list) { + auto changed = false; + for (auto &entry : list) { + if (!entry.scalability_modes.empty()) { + entry.scalability_modes = {}; + changed = true; + } + } + if (changed && list.size() > 1) { + for (auto i = list.end() - 1; i != list.begin(); --i) { + if (std::find(list.begin(), i, *i) != i) { + i = list.erase(i); + } + } + } + return list; +} + } // namespace VideoFormatsMessage ComposeSupportedFormats( std::vector encoders, std::vector decoders, const std::vector &preferredCodecs, - std::shared_ptr platformContext) { + std::shared_ptr platformContext) { + // We don't pass scalability_modes through signaling, + // So we have to remove them here, otherwise lists are different. + encoders = RemoveScalabilityModes(std::move(encoders)); + decoders = RemoveScalabilityModes(std::move(decoders)); + encoders = FilterAndSortEncoders(std::move(encoders), preferredCodecs, platformContext); auto result = VideoFormatsMessage(); diff --git a/TMessagesProj/jni/voip/tgcalls/FakeVideoTrackSource.cpp b/TMessagesProj/jni/voip/tgcalls/FakeVideoTrackSource.cpp index a740308f72..2c1c6eb283 100644 --- a/TMessagesProj/jni/voip/tgcalls/FakeVideoTrackSource.cpp +++ b/TMessagesProj/jni/voip/tgcalls/FakeVideoTrackSource.cpp @@ -126,7 +126,7 @@ class FakeVideoSource : public rtc::VideoSourceInterface { } // RemoveSink must guarantee that at the time the method returns, // there is no current and no future calls to VideoSinkInterface::OnFrame. - void RemoveSink(rtc::VideoSinkInterface *sink) { + void RemoveSink(rtc::VideoSinkInterface *sink) override { RTC_LOG(LS_WARNING) << "REMOVE"; data_->broadcaster_.RemoveSink(sink); } @@ -166,7 +166,7 @@ std::unique_ptr FrameSource::chess(){ } void FrameSource::video_frame_to_rgb0(const webrtc::VideoFrame & src, char *dest){ - auto buffer = src.video_frame_buffer()->GetI420(); + auto buffer = src.video_frame_buffer()->ToI420(); libyuv::I420ToABGR(buffer->DataY(), buffer->StrideY(), buffer->DataU(), buffer->StrideU(), buffer->DataV(), buffer->StrideV( ), reinterpret_cast(dest), src.width() * 4, src.width(), src.height()); } diff --git a/TMessagesProj/jni/voip/tgcalls/FieldTrialsConfig.cpp b/TMessagesProj/jni/voip/tgcalls/FieldTrialsConfig.cpp new file mode 100644 index 0000000000..42c986b39b --- /dev/null +++ b/TMessagesProj/jni/voip/tgcalls/FieldTrialsConfig.cpp @@ -0,0 +1,7 @@ +#include "FieldTrialsConfig.h" + +namespace tgcalls { + +webrtc::FieldTrialBasedConfig fieldTrialsBasedConfig; + +} diff --git a/TMessagesProj/jni/voip/tgcalls/FieldTrialsConfig.h b/TMessagesProj/jni/voip/tgcalls/FieldTrialsConfig.h new file mode 100644 index 0000000000..1e9c4b88e5 --- /dev/null +++ b/TMessagesProj/jni/voip/tgcalls/FieldTrialsConfig.h @@ -0,0 +1,12 @@ +#ifndef TGCALLS_FIELD_TRIALS_CONFIG_H +#define TGCALLS_FIELD_TRIALS_CONFIG_H + +#include "api/transport/field_trial_based_config.h" + +namespace tgcalls { + +extern webrtc::FieldTrialBasedConfig fieldTrialsBasedConfig; + +} // namespace tgcalls + +#endif diff --git a/TMessagesProj/jni/voip/tgcalls/Instance.cpp b/TMessagesProj/jni/voip/tgcalls/Instance.cpp index 56619bb96b..8b0977105f 100644 --- a/TMessagesProj/jni/voip/tgcalls/Instance.cpp +++ b/TMessagesProj/jni/voip/tgcalls/Instance.cpp @@ -41,7 +41,7 @@ std::unique_ptr Meta::Create( // Enforce correct protocol version. if (version == "2.7.7") { descriptor.config.protocolVersion = ProtocolVersion::V0; - } else if (version == "3.0.0") { + } else if (version == "5.0.0") { descriptor.config.protocolVersion = ProtocolVersion::V1; } diff --git a/TMessagesProj/jni/voip/tgcalls/Instance.h b/TMessagesProj/jni/voip/tgcalls/Instance.h index fc11163655..3a43c29904 100644 --- a/TMessagesProj/jni/voip/tgcalls/Instance.h +++ b/TMessagesProj/jni/voip/tgcalls/Instance.h @@ -43,11 +43,13 @@ struct Proxy { }; struct RtcServer { + uint8_t id = 0; std::string host; uint16_t port = 0; std::string login; std::string password; bool isTurn = false; + bool isTcp = false; }; enum class EndpointType { @@ -227,12 +229,14 @@ struct Descriptor { std::shared_ptr videoCapture; std::function stateUpdated; std::function signalBarsUpdated; - std::function audioLevelUpdated; + std::function audioLevelsUpdated; std::function remoteBatteryLevelIsLowUpdated; std::function remoteMediaStateUpdated; std::function remotePrefferedAspectRatioUpdated; std::function &)> signalingDataEmitted; std::function(webrtc::TaskQueueFactory*)> createAudioDeviceModule; + std::string initialInputDeviceId; + std::string initialOutputDeviceId; std::shared_ptr platformContext; }; diff --git a/TMessagesProj/jni/voip/tgcalls/InstanceImpl.cpp b/TMessagesProj/jni/voip/tgcalls/InstanceImpl.cpp index aff9033f61..bec8963345 100644 --- a/TMessagesProj/jni/voip/tgcalls/InstanceImpl.cpp +++ b/TMessagesProj/jni/voip/tgcalls/InstanceImpl.cpp @@ -7,6 +7,7 @@ #include "VideoCapturerInterface.h" namespace tgcalls { + namespace { rtc::Thread *makeManagerThread() { @@ -35,7 +36,7 @@ InstanceImpl::InstanceImpl(Descriptor &&descriptor) _manager.reset(new ThreadLocalObject(getManagerThread(), [descriptor = std::move(descriptor)]() mutable { return new Manager(getManagerThread(), std::move(descriptor)); })); - _manager->perform(RTC_FROM_HERE, [](Manager *manager) { + _manager->perform([](Manager *manager) { manager->start(); }); @@ -47,25 +48,25 @@ InstanceImpl::~InstanceImpl() { } void InstanceImpl::receiveSignalingData(const std::vector &data) { - _manager->perform(RTC_FROM_HERE, [data](Manager *manager) { + _manager->perform([data](Manager *manager) { manager->receiveSignalingData(data); }); }; void InstanceImpl::setVideoCapture(std::shared_ptr videoCapture) { - _manager->perform(RTC_FROM_HERE, [videoCapture](Manager *manager) { + _manager->perform([videoCapture](Manager *manager) { manager->setVideoCapture(videoCapture); }); } void InstanceImpl::sendVideoDeviceUpdated() { - _manager->perform(RTC_FROM_HERE, [](Manager *manager) { + _manager->perform([](Manager *manager) { manager->sendVideoDeviceUpdated(); }); } void InstanceImpl::setRequestedVideoAspect(float aspect) { - _manager->perform(RTC_FROM_HERE, [aspect](Manager *manager) { + _manager->perform([aspect](Manager *manager) { manager->setRequestedVideoAspect(aspect); }); } @@ -81,19 +82,19 @@ void InstanceImpl::setNetworkType(NetworkType networkType) { break; } - _manager->perform(RTC_FROM_HERE, [isLowCostNetwork](Manager *manager) { + _manager->perform([isLowCostNetwork](Manager *manager) { manager->setIsLocalNetworkLowCost(isLowCostNetwork); }); } void InstanceImpl::setMuteMicrophone(bool muteMicrophone) { - _manager->perform(RTC_FROM_HERE, [muteMicrophone](Manager *manager) { + _manager->perform([muteMicrophone](Manager *manager) { manager->setMuteOutgoingAudio(muteMicrophone); }); } void InstanceImpl::setIncomingVideoOutput(std::shared_ptr> sink) { - _manager->perform(RTC_FROM_HERE, [sink](Manager *manager) { + _manager->perform([sink](Manager *manager) { manager->setIncomingVideoOutput(sink); }); } @@ -105,25 +106,25 @@ void InstanceImpl::setEchoCancellationStrength(int strength) { } void InstanceImpl::setAudioInputDevice(std::string id) { - _manager->perform(RTC_FROM_HERE, [id](Manager *manager) { + _manager->perform([id](Manager *manager) { manager->setAudioInputDevice(id); }); } void InstanceImpl::setAudioOutputDevice(std::string id) { - _manager->perform(RTC_FROM_HERE, [id](Manager *manager) { + _manager->perform([id](Manager *manager) { manager->setAudioOutputDevice(id); }); } void InstanceImpl::setInputVolume(float level) { - _manager->perform(RTC_FROM_HERE, [level](Manager *manager) { + _manager->perform([level](Manager *manager) { manager->setInputVolume(level); }); } void InstanceImpl::setOutputVolume(float level) { - _manager->perform(RTC_FROM_HERE, [level](Manager *manager) { + _manager->perform([level](Manager *manager) { manager->setOutputVolume(level); }); } @@ -133,13 +134,13 @@ void InstanceImpl::setAudioOutputDuckingEnabled(bool enabled) { } void InstanceImpl::addExternalAudioSamples(std::vector &&samples) { - _manager->perform(RTC_FROM_HERE, [samples = std::move(samples)](Manager *manager) mutable { + _manager->perform([samples = std::move(samples)](Manager *manager) mutable { manager->addExternalAudioSamples(std::move(samples)); }); } void InstanceImpl::setIsLowBatteryLevel(bool isLowBatteryLevel) { - _manager->perform(RTC_FROM_HERE, [isLowBatteryLevel](Manager *manager) { + _manager->perform([isLowBatteryLevel](Manager *manager) { manager->setIsLowBatteryLevel(isLowBatteryLevel); }); } @@ -167,7 +168,7 @@ PersistentState InstanceImpl::getPersistentState() { void InstanceImpl::stop(std::function completion) { std::string debugLog = _logSink->result(); - _manager->perform(RTC_FROM_HERE, [completion, debugLog = std::move(debugLog)](Manager *manager) { + _manager->perform([completion, debugLog = std::move(debugLog)](Manager *manager) { manager->getNetworkStats([completion, debugLog = std::move(debugLog)](TrafficStats stats, CallStats callStats) { FinalState finalState; finalState.debugLog = debugLog; @@ -187,7 +188,7 @@ int InstanceImpl::GetConnectionMaxLayer() { std::vector InstanceImpl::GetVersions() { std::vector result; result.push_back("2.7.7"); - result.push_back("3.0.0"); + result.push_back("5.0.0"); return result; } diff --git a/TMessagesProj/jni/voip/tgcalls/Manager.cpp b/TMessagesProj/jni/voip/tgcalls/Manager.cpp index a384064567..9ba5794bc3 100644 --- a/TMessagesProj/jni/voip/tgcalls/Manager.cpp +++ b/TMessagesProj/jni/voip/tgcalls/Manager.cpp @@ -98,7 +98,7 @@ _remoteBatteryLevelIsLowUpdated(std::move(descriptor.remoteBatteryLevelIsLowUpda _remotePrefferedAspectRatioUpdated(std::move(descriptor.remotePrefferedAspectRatioUpdated)), _signalingDataEmitted(std::move(descriptor.signalingDataEmitted)), _signalBarsUpdated(std::move(descriptor.signalBarsUpdated)), -_audioLevelUpdated(std::move(descriptor.audioLevelUpdated)), +_audioLevelsUpdated(std::move(descriptor.audioLevelsUpdated)), _createAudioDeviceModule(std::move(descriptor.createAudioDeviceModule)), _enableHighBitrateVideo(descriptor.config.enableHighBitrateVideo), _dataSaving(descriptor.config.dataSaving), @@ -118,7 +118,7 @@ _platformContext(descriptor.platformContext) { return uint32_t(0); }; _sendTransportMessage = [=](Message &&message) { - _networkManager->perform(RTC_FROM_HERE, [message = std::move(message)](NetworkManager *networkManager) { + _networkManager->perform([message = std::move(message)](NetworkManager *networkManager) { networkManager->sendMessage(message); }); }; @@ -139,9 +139,9 @@ void Manager::sendSignalingAsync(int delayMs, int cause) { } }; if (delayMs) { - _thread->PostDelayedTask(RTC_FROM_HERE, std::move(task), delayMs); + _thread->PostDelayedTask(std::move(task), webrtc::TimeDelta::Millis(delayMs)); } else { - _thread->PostTask(RTC_FROM_HERE, std::move(task)); + _thread->PostTask(std::move(task)); } } @@ -149,7 +149,7 @@ void Manager::start() { const auto weak = std::weak_ptr(shared_from_this()); const auto thread = _thread; const auto sendSignalingMessage = [=](Message &&message) { - thread->PostTask(RTC_FROM_HERE, [=, message = std::move(message)]() mutable { + thread->PostTask([=, message = std::move(message)]() mutable { const auto strong = weak.lock(); if (!strong) { return; @@ -167,7 +167,7 @@ void Manager::start() { rtcServers, std::move(proxy), [=](const NetworkManager::State &state) { - thread->PostTask(RTC_FROM_HERE, [=] { + thread->PostTask([=] { const auto strong = weak.lock(); if (!strong) { return; @@ -190,7 +190,7 @@ void Manager::start() { strong->_state = mappedState; strong->_stateUpdated(mappedState); - strong->_mediaManager->perform(RTC_FROM_HERE, [=](MediaManager *mediaManager) { + strong->_mediaManager->perform([=](MediaManager *mediaManager) { mediaManager->setIsConnected(state.isReadyToSendData); }); @@ -200,7 +200,7 @@ void Manager::start() { }); }, [=](DecryptedMessage &&message) { - thread->PostTask(RTC_FROM_HERE, [=, message = std::move(message)]() mutable { + thread->PostTask([=, message = std::move(message)]() mutable { if (const auto strong = weak.lock()) { strong->receiveMessage(std::move(message)); } @@ -210,20 +210,20 @@ void Manager::start() { [=](int delayMs, int cause) { const auto task = [=] { if (const auto strong = weak.lock()) { - strong->_networkManager->perform(RTC_FROM_HERE, [=](NetworkManager *networkManager) { + strong->_networkManager->perform([=](NetworkManager *networkManager) { networkManager->sendTransportService(cause); }); } }; if (delayMs) { - thread->PostDelayedTask(RTC_FROM_HERE, task, delayMs); + thread->PostDelayedTask(task, webrtc::TimeDelta::Millis(delayMs)); } else { - thread->PostTask(RTC_FROM_HERE, task); + thread->PostTask(task); } }); })); bool isOutgoing = _encryptionKey.isOutgoing; - _mediaManager.reset(new ThreadLocalObject(StaticThreads::getMediaThread(), [weak, isOutgoing, protocolVersion = _protocolVersion, thread, sendSignalingMessage, videoCapture = _videoCapture, mediaDevicesConfig = _mediaDevicesConfig, enableHighBitrateVideo = _enableHighBitrateVideo, signalBarsUpdated = _signalBarsUpdated, audioLevelUpdated = _audioLevelUpdated, preferredCodecs = _preferredCodecs, createAudioDeviceModule = _createAudioDeviceModule, platformContext = _platformContext]() { + _mediaManager.reset(new ThreadLocalObject(StaticThreads::getMediaThread(), [weak, isOutgoing, protocolVersion = _protocolVersion, thread, sendSignalingMessage, videoCapture = _videoCapture, mediaDevicesConfig = _mediaDevicesConfig, enableHighBitrateVideo = _enableHighBitrateVideo, signalBarsUpdated = _signalBarsUpdated, audioLevelsUpdated = _audioLevelsUpdated, preferredCodecs = _preferredCodecs, createAudioDeviceModule = _createAudioDeviceModule, platformContext = _platformContext]() { return new MediaManager( StaticThreads::getMediaThread(), isOutgoing, @@ -232,7 +232,7 @@ void Manager::start() { videoCapture, sendSignalingMessage, [=](Message &&message) { - thread->PostTask(RTC_FROM_HERE, [=, message = std::move(message)]() mutable { + thread->PostTask([=, message = std::move(message)]() mutable { const auto strong = weak.lock(); if (!strong) { return; @@ -241,16 +241,16 @@ void Manager::start() { }); }, signalBarsUpdated, - audioLevelUpdated, + audioLevelsUpdated, createAudioDeviceModule, enableHighBitrateVideo, preferredCodecs, platformContext); })); - _networkManager->perform(RTC_FROM_HERE, [](NetworkManager *networkManager) { + _networkManager->perform([](NetworkManager *networkManager) { networkManager->start(); }); - _mediaManager->perform(RTC_FROM_HERE, [](MediaManager *mediaManager) { + _mediaManager->perform([](MediaManager *mediaManager) { mediaManager->start(); }); } @@ -267,11 +267,11 @@ void Manager::receiveSignalingData(const std::vector &data) { void Manager::receiveMessage(DecryptedMessage &&message) { const auto data = &message.message.data; if (const auto candidatesList = absl::get_if(data)) { - _networkManager->perform(RTC_FROM_HERE, [message = std::move(message)](NetworkManager *networkManager) mutable { + _networkManager->perform([message = std::move(message)](NetworkManager *networkManager) mutable { networkManager->receiveSignalingMessage(std::move(message)); }); } else if (const auto videoFormats = absl::get_if(data)) { - _mediaManager->perform(RTC_FROM_HERE, [message = std::move(message)](MediaManager *mediaManager) mutable { + _mediaManager->perform([message = std::move(message)](MediaManager *mediaManager) mutable { mediaManager->receiveMessage(std::move(message)); }); } else if (const auto remoteMediaState = absl::get_if(data)) { @@ -280,7 +280,7 @@ void Manager::receiveMessage(DecryptedMessage &&message) { remoteMediaState->audio, remoteMediaState->video); } - _mediaManager->perform(RTC_FROM_HERE, [video = remoteMediaState->video](MediaManager *mediaManager) { + _mediaManager->perform([video = remoteMediaState->video](MediaManager *mediaManager) { mediaManager->remoteVideoStateUpdated(video); }); } else if (const auto remoteBatteryLevelIsLow = absl::get_if(data)) { @@ -298,7 +298,7 @@ void Manager::receiveMessage(DecryptedMessage &&message) { _remotePrefferedAspectRatioUpdated(value); } } - _mediaManager->perform(RTC_FROM_HERE, [=, message = std::move(message)](MediaManager *mediaManager) mutable { + _mediaManager->perform([=, message = std::move(message)](MediaManager *mediaManager) mutable { mediaManager->receiveMessage(std::move(message)); }); } @@ -311,31 +311,31 @@ void Manager::setVideoCapture(std::shared_ptr videoCaptur return; } _videoCapture = videoCapture; - _mediaManager->perform(RTC_FROM_HERE, [videoCapture](MediaManager *mediaManager) { + _mediaManager->perform([videoCapture](MediaManager *mediaManager) { mediaManager->setSendVideo(videoCapture); }); } void Manager::sendVideoDeviceUpdated() { - _mediaManager->perform(RTC_FROM_HERE, [](MediaManager *mediaManager) { + _mediaManager->perform([](MediaManager *mediaManager) { mediaManager->sendVideoDeviceUpdated(); }); } void Manager::setRequestedVideoAspect(float aspect) { - _mediaManager->perform(RTC_FROM_HERE, [aspect](MediaManager *mediaManager) { + _mediaManager->perform([aspect](MediaManager *mediaManager) { mediaManager->setRequestedVideoAspect(aspect); }); } void Manager::setMuteOutgoingAudio(bool mute) { - _mediaManager->perform(RTC_FROM_HERE, [mute](MediaManager *mediaManager) { + _mediaManager->perform([mute](MediaManager *mediaManager) { mediaManager->setMuteOutgoingAudio(mute); }); } void Manager::setIncomingVideoOutput(std::weak_ptr> sink) { - _mediaManager->perform(RTC_FROM_HERE, [sink](MediaManager *mediaManager) { + _mediaManager->perform([sink](MediaManager *mediaManager) { mediaManager->setIncomingVideoOutput(sink); }); } @@ -346,7 +346,7 @@ void Manager::setIsLowBatteryLevel(bool isLowBatteryLevel) { void Manager::setIsLocalNetworkLowCost(bool isLocalNetworkLowCost) { if (isLocalNetworkLowCost != _localNetworkIsLowCost) { - _networkManager->perform(RTC_FROM_HERE, [isLocalNetworkLowCost](NetworkManager *networkManager) { + _networkManager->perform([isLocalNetworkLowCost](NetworkManager *networkManager) { networkManager->setIsLocalNetworkLowCost(isLocalNetworkLowCost); }); @@ -356,19 +356,19 @@ void Manager::setIsLocalNetworkLowCost(bool isLocalNetworkLowCost) { } void Manager::getNetworkStats(std::function completion) { - _networkManager->perform(RTC_FROM_HERE, [thread = _thread, weak = std::weak_ptr(shared_from_this()), completion = std::move(completion), statsLogPath = _statsLogPath](NetworkManager *networkManager) { + _networkManager->perform([thread = _thread, weak = std::weak_ptr(shared_from_this()), completion = std::move(completion), statsLogPath = _statsLogPath](NetworkManager *networkManager) { auto networkStats = networkManager->getNetworkStats(); CallStats callStats; networkManager->fillCallStats(callStats); - thread->PostTask(RTC_FROM_HERE, [weak, networkStats, completion = std::move(completion), callStats = std::move(callStats), statsLogPath = statsLogPath] { + thread->PostTask([weak, networkStats, completion = std::move(completion), callStats = std::move(callStats), statsLogPath = statsLogPath] { const auto strong = weak.lock(); if (!strong) { return; } - strong->_mediaManager->perform(RTC_FROM_HERE, [networkStats, completion = std::move(completion), callStatsValue = std::move(callStats), statsLogPath = statsLogPath](MediaManager *mediaManager) { + strong->_mediaManager->perform([networkStats, completion = std::move(completion), callStatsValue = std::move(callStats), statsLogPath = statsLogPath](MediaManager *mediaManager) { CallStats callStats = std::move(callStatsValue); mediaManager->fillCallStats(callStats); dumpStatsLog(statsLogPath, callStats); @@ -417,7 +417,7 @@ void Manager::updateCurrentResolvedNetworkStatus() { if (!_currentResolvedNetworkStatus.has_value() || *_currentResolvedNetworkStatus != status) { _currentResolvedNetworkStatus = status; - _mediaManager->perform(RTC_FROM_HERE, [status](MediaManager *mediaManager) { + _mediaManager->perform([status](MediaManager *mediaManager) { mediaManager->setNetworkParameters(status.isLowCost, status.isLowDataRequested); }); } @@ -436,31 +436,31 @@ void Manager::sendInitialSignalingMessages() { } void Manager::setAudioInputDevice(std::string id) { - _mediaManager->perform(RTC_FROM_HERE, [id](MediaManager *mediaManager) { + _mediaManager->perform([id](MediaManager *mediaManager) { mediaManager->setAudioInputDevice(id); }); } void Manager::setAudioOutputDevice(std::string id) { - _mediaManager->perform(RTC_FROM_HERE, [id](MediaManager *mediaManager) { + _mediaManager->perform([id](MediaManager *mediaManager) { mediaManager->setAudioOutputDevice(id); }); } void Manager::setInputVolume(float level) { - _mediaManager->perform(RTC_FROM_HERE, [level](MediaManager *mediaManager) { + _mediaManager->perform([level](MediaManager *mediaManager) { mediaManager->setInputVolume(level); }); } void Manager::setOutputVolume(float level) { - _mediaManager->perform(RTC_FROM_HERE, [level](MediaManager *mediaManager) { + _mediaManager->perform([level](MediaManager *mediaManager) { mediaManager->setOutputVolume(level); }); } void Manager::addExternalAudioSamples(std::vector &&samples) { - _mediaManager->perform(RTC_FROM_HERE, [samples = std::move(samples)](MediaManager *mediaManager) mutable { + _mediaManager->perform([samples = std::move(samples)](MediaManager *mediaManager) mutable { mediaManager->addExternalAudioSamples(std::move(samples)); }); } diff --git a/TMessagesProj/jni/voip/tgcalls/Manager.h b/TMessagesProj/jni/voip/tgcalls/Manager.h index a4a4a8f6d3..bf449d519e 100644 --- a/TMessagesProj/jni/voip/tgcalls/Manager.h +++ b/TMessagesProj/jni/voip/tgcalls/Manager.h @@ -69,7 +69,7 @@ class Manager final : public std::enable_shared_from_this { std::function _remotePrefferedAspectRatioUpdated; std::function &)> _signalingDataEmitted; std::function _signalBarsUpdated; - std::function _audioLevelUpdated; + std::function _audioLevelsUpdated; std::function(webrtc::TaskQueueFactory*)> _createAudioDeviceModule; std::function _sendSignalingMessage; std::function _sendTransportMessage; diff --git a/TMessagesProj/jni/voip/tgcalls/MediaManager.cpp b/TMessagesProj/jni/voip/tgcalls/MediaManager.cpp index 0f5d5558f8..3aa19ae855 100644 --- a/TMessagesProj/jni/voip/tgcalls/MediaManager.cpp +++ b/TMessagesProj/jni/voip/tgcalls/MediaManager.cpp @@ -25,6 +25,9 @@ #ifdef WEBRTC_IOS #include "platform/darwin/iOS/tgcalls_audio_device_module_ios.h" #endif + +#include "FieldTrialsConfig.h" + namespace tgcalls { namespace { @@ -242,7 +245,7 @@ MediaManager::MediaManager( std::function sendSignalingMessage, std::function sendTransportMessage, std::function signalBarsUpdated, - std::function audioLevelUpdated, + std::function audioLevelsUpdated, std::function(webrtc::TaskQueueFactory*)> createAudioDeviceModule, bool enableHighBitrateVideo, std::vector preferredCodecs, @@ -253,7 +256,7 @@ _taskQueueFactory(webrtc::CreateDefaultTaskQueueFactory()), _sendSignalingMessage(std::move(sendSignalingMessage)), _sendTransportMessage(std::move(sendTransportMessage)), _signalBarsUpdated(std::move(signalBarsUpdated)), -_audioLevelUpdated(std::move(audioLevelUpdated)), +_audioLevelsUpdated(std::move(audioLevelsUpdated)), _createAudioDeviceModule(std::move(createAudioDeviceModule)), _protocolVersion(protocolVersion), _outgoingVideoState(videoCapture ? VideoState::Active : VideoState::Inactive), @@ -292,6 +295,7 @@ _platformContext(platformContext) { "WebRTC-FlexFEC-03/Enabled/" "WebRTC-FlexFEC-03-Advertised/Enabled/" "WebRTC-Turn-AllowSystemPorts/Enabled/" + "WebRTC-Audio-iOS-Holding/Enabled/" ); PlatformInterface::SharedInstance()->configurePlatformAudio(); @@ -314,7 +318,7 @@ _platformContext(platformContext) { webrtc::AudioProcessingBuilder builder; std::unique_ptr audioProcessor = std::make_unique([this](float level) { - this->_thread->PostTask(RTC_FROM_HERE, [this, level](){ + this->_thread->PostTask([this, level](){ auto strong = this; strong->_currentMyAudioLevel = level; }); @@ -322,85 +326,95 @@ _platformContext(platformContext) { builder.SetCapturePostProcessing(std::move(audioProcessor)); mediaDeps.audio_processing = builder.Create(); - _audioDeviceModule = this->createAudioDeviceModule(); - if (!_audioDeviceModule) { - return; - } - mediaDeps.adm = _audioDeviceModule; - - _mediaEngine = cricket::CreateMediaEngine(std::move(mediaDeps)); - _mediaEngine->Init(); - - /*setAudioInputDevice(devicesConfig.audioInputId); - setAudioOutputDevice(devicesConfig.audioOutputId); - setInputVolume(devicesConfig.inputVolume); - setOutputVolume(devicesConfig.outputVolume);*/ - - webrtc::Call::Config callConfig(_eventLog.get()); - callConfig.task_queue_factory = _taskQueueFactory.get(); - callConfig.trials = &_fieldTrials; - callConfig.audio_state = _mediaEngine->voice().GetAudioState(); - _call.reset(webrtc::Call::Create(callConfig)); - - cricket::AudioOptions audioOptions; - audioOptions.echo_cancellation = true; - audioOptions.noise_suppression = true; - audioOptions.audio_jitter_buffer_fast_accelerate = true; - - std::vector streamIds; - streamIds.push_back("1"); - - _audioChannel.reset(_mediaEngine->voice().CreateMediaChannel(_call.get(), cricket::MediaConfig(), audioOptions, webrtc::CryptoOptions::NoGcm())); - _videoChannel.reset(_mediaEngine->video().CreateMediaChannel(_call.get(), cricket::MediaConfig(), cricket::VideoOptions(), webrtc::CryptoOptions::NoGcm(), _videoBitrateAllocatorFactory.get())); - - const uint32_t opusClockrate = 48000; - const uint16_t opusSdpPayload = 111; - const char *opusSdpName = "opus"; - const uint8_t opusSdpChannels = 2; - const uint32_t opusSdpBitrate = 0; - - const uint8_t opusMinBitrateKbps = 6; - const uint8_t opusMaxBitrateKbps = 32; - const uint8_t opusStartBitrateKbps = 8; - const uint8_t opusPTimeMs = 120; - - cricket::AudioCodec opusCodec(opusSdpPayload, opusSdpName, opusClockrate, opusSdpBitrate, opusSdpChannels); - opusCodec.AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamTransportCc)); - opusCodec.SetParam(cricket::kCodecParamMinBitrate, opusMinBitrateKbps); - opusCodec.SetParam(cricket::kCodecParamStartBitrate, opusStartBitrateKbps); - opusCodec.SetParam(cricket::kCodecParamMaxBitrate, opusMaxBitrateKbps); - opusCodec.SetParam(cricket::kCodecParamUseInbandFec, 1); - opusCodec.SetParam(cricket::kCodecParamPTime, opusPTimeMs); - - cricket::AudioSendParameters audioSendPrameters; - audioSendPrameters.codecs.push_back(opusCodec); - audioSendPrameters.extensions.emplace_back(webrtc::RtpExtension::kTransportSequenceNumberUri, 1); - audioSendPrameters.options.echo_cancellation = true; - //audioSendPrameters.options.experimental_ns = false; - audioSendPrameters.options.noise_suppression = true; - audioSendPrameters.options.auto_gain_control = true; - //audioSendPrameters.options.highpass_filter = false; - audioSendPrameters.options.typing_detection = false; - //audioSendPrameters.max_bandwidth_bps = 16000; - audioSendPrameters.rtcp.reduced_size = true; - audioSendPrameters.rtcp.remote_estimate = true; - _audioChannel->SetSendParameters(audioSendPrameters); - _audioChannel->AddSendStream(cricket::StreamParams::CreateLegacy(_ssrcAudio.outgoing)); - _audioChannel->SetInterface(_audioNetworkInterface.get()); - - cricket::AudioRecvParameters audioRecvParameters; - audioRecvParameters.codecs.emplace_back(opusSdpPayload, opusSdpName, opusClockrate, opusSdpBitrate, opusSdpChannels); - audioRecvParameters.extensions.emplace_back(webrtc::RtpExtension::kTransportSequenceNumberUri, 1); - audioRecvParameters.rtcp.reduced_size = true; - audioRecvParameters.rtcp.remote_estimate = true; - - _audioChannel->SetRecvParameters(audioRecvParameters); - cricket::StreamParams audioRecvStreamParams = cricket::StreamParams::CreateLegacy(_ssrcAudio.incoming); - audioRecvStreamParams.set_stream_ids(streamIds); - _audioChannel->AddRecvStream(audioRecvStreamParams); - _audioChannel->SetPlayout(true); - - _videoChannel->SetInterface(_videoNetworkInterface.get()); + StaticThreads::getWorkerThread()->BlockingCall([&] { + _audioDeviceModule = this->createAudioDeviceModule(); + + /*if (!_audioDeviceModule) { + return; + }*/ + mediaDeps.adm = _audioDeviceModule; + + _mediaEngine = cricket::CreateMediaEngine(std::move(mediaDeps)); + _mediaEngine->Init(); + }); + + StaticThreads::getWorkerThread()->BlockingCall([&] { + /*setAudioInputDevice(devicesConfig.audioInputId); + setAudioOutputDevice(devicesConfig.audioOutputId); + setInputVolume(devicesConfig.inputVolume); + setOutputVolume(devicesConfig.outputVolume);*/ + + webrtc::Call::Config callConfig(_eventLog.get()); + callConfig.task_queue_factory = _taskQueueFactory.get(); + callConfig.trials = &fieldTrialsBasedConfig; + callConfig.audio_state = _mediaEngine->voice().GetAudioState(); + _call.reset(webrtc::Call::Create(callConfig)); + + cricket::AudioOptions audioOptions; + audioOptions.echo_cancellation = true; + audioOptions.noise_suppression = true; + audioOptions.audio_jitter_buffer_fast_accelerate = true; + + std::vector streamIds; + streamIds.push_back("1"); + + _audioChannel.reset(_mediaEngine->voice().CreateMediaChannel(_call.get(), cricket::MediaConfig(), audioOptions, webrtc::CryptoOptions::NoGcm())); + _videoChannel.reset(_mediaEngine->video().CreateMediaChannel(_call.get(), cricket::MediaConfig(), cricket::VideoOptions(), webrtc::CryptoOptions::NoGcm(), _videoBitrateAllocatorFactory.get())); + + const uint32_t opusClockrate = 48000; + const uint16_t opusSdpPayload = 111; + const char *opusSdpName = "opus"; + const uint8_t opusSdpChannels = 2; + const uint32_t opusSdpBitrate = 0; + + const uint8_t opusMinBitrateKbps = 6; + const uint8_t opusMaxBitrateKbps = 32; + const uint8_t opusStartBitrateKbps = 8; + const uint8_t opusPTimeMs = 120; + + cricket::AudioCodec opusCodec(opusSdpPayload, opusSdpName, opusClockrate, opusSdpBitrate, opusSdpChannels); + opusCodec.AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamTransportCc)); + opusCodec.SetParam(cricket::kCodecParamMinBitrate, opusMinBitrateKbps); + opusCodec.SetParam(cricket::kCodecParamStartBitrate, opusStartBitrateKbps); + opusCodec.SetParam(cricket::kCodecParamMaxBitrate, opusMaxBitrateKbps); + opusCodec.SetParam(cricket::kCodecParamUseInbandFec, 1); + opusCodec.SetParam(cricket::kCodecParamPTime, opusPTimeMs); + + cricket::AudioSendParameters audioSendPrameters; + audioSendPrameters.codecs.push_back(opusCodec); + audioSendPrameters.extensions.emplace_back(webrtc::RtpExtension::kTransportSequenceNumberUri, 1); + #if WEBRTC_IOS + audioSendPrameters.options.echo_cancellation = false; + audioSendPrameters.options.auto_gain_control = false; + #else + audioSendPrameters.options.echo_cancellation = true; + audioSendPrameters.options.auto_gain_control = true; + #endif + //audioSendPrameters.options.experimental_ns = false; + audioSendPrameters.options.noise_suppression = true; + //audioSendPrameters.options.highpass_filter = false; + //audioSendPrameters.options.typing_detection = false; + //audioSendPrameters.max_bandwidth_bps = 16000; + audioSendPrameters.rtcp.reduced_size = true; + audioSendPrameters.rtcp.remote_estimate = true; + _audioChannel->SetSendParameters(audioSendPrameters); + _audioChannel->AddSendStream(cricket::StreamParams::CreateLegacy(_ssrcAudio.outgoing)); + _audioChannel->SetInterface(_audioNetworkInterface.get()); + + cricket::AudioRecvParameters audioRecvParameters; + audioRecvParameters.codecs.emplace_back(opusSdpPayload, opusSdpName, opusClockrate, opusSdpBitrate, opusSdpChannels); + audioRecvParameters.extensions.emplace_back(webrtc::RtpExtension::kTransportSequenceNumberUri, 1); + audioRecvParameters.rtcp.reduced_size = true; + audioRecvParameters.rtcp.remote_estimate = true; + + _audioChannel->SetRecvParameters(audioRecvParameters); + cricket::StreamParams audioRecvStreamParams = cricket::StreamParams::CreateLegacy(_ssrcAudio.incoming); + audioRecvStreamParams.set_stream_ids(streamIds); + _audioChannel->AddRecvStream(audioRecvStreamParams); + _audioChannel->SetPlayout(true); + + _videoChannel->SetInterface(_videoNetworkInterface.get()); + }); adjustBitratePreferences(true); } @@ -436,13 +450,15 @@ void MediaManager::start() { // Here we hope that thread outlives the sink rtc::Thread *thread = _thread; std::unique_ptr incomingSink(new AudioTrackSinkInterfaceImpl([weak, thread](float level) { - thread->PostTask(RTC_FROM_HERE, [weak, level] { + thread->PostTask([weak, level] { if (const auto strong = weak.lock()) { strong->_currentAudioLevel = level; } }); })); - _audioChannel->SetRawAudioSink(_ssrcAudio.incoming, std::move(incomingSink)); + StaticThreads::getWorkerThread()->BlockingCall([&] { + _audioChannel->SetRawAudioSink(_ssrcAudio.incoming, std::move(incomingSink)); + }); _sendSignalingMessage({ _myVideoFormats }); @@ -451,7 +467,7 @@ void MediaManager::start() { } beginStatsTimer(3000); - if (_audioLevelUpdated != nullptr) { + if (_audioLevelsUpdated != nullptr) { beginLevelsTimer(100); } } @@ -461,44 +477,59 @@ MediaManager::~MediaManager() { RTC_LOG(LS_INFO) << "MediaManager::~MediaManager()"; - _call->SignalChannelNetworkState(webrtc::MediaType::AUDIO, webrtc::kNetworkDown); - _call->SignalChannelNetworkState(webrtc::MediaType::VIDEO, webrtc::kNetworkDown); + StaticThreads::getWorkerThread()->BlockingCall([&] { + _call->SignalChannelNetworkState(webrtc::MediaType::AUDIO, webrtc::kNetworkDown); + _call->SignalChannelNetworkState(webrtc::MediaType::VIDEO, webrtc::kNetworkDown); - _audioChannel->OnReadyToSend(false); - _audioChannel->SetSend(false); - _audioChannel->SetAudioSend(_ssrcAudio.outgoing, false, nullptr, &_audioSource); + _audioChannel->OnReadyToSend(false); + _audioChannel->SetSend(false); + _audioChannel->SetAudioSend(_ssrcAudio.outgoing, false, nullptr, &_audioSource); - _audioChannel->SetPlayout(false); + _audioChannel->SetPlayout(false); - _audioChannel->RemoveRecvStream(_ssrcAudio.incoming); - _audioChannel->RemoveSendStream(_ssrcAudio.outgoing); + _audioChannel->RemoveRecvStream(_ssrcAudio.incoming); + _audioChannel->RemoveSendStream(_ssrcAudio.outgoing); - _audioChannel->SetInterface(nullptr); + _audioChannel->SetInterface(nullptr); + + _audioChannel.reset(); + }); setSendVideo(nullptr); if (computeIsReceivingVideo()) { - _videoChannel->RemoveRecvStream(_ssrcVideo.incoming); - if (_enableFlexfec) { - _videoChannel->RemoveRecvStream(_ssrcVideo.fecIncoming); - } + StaticThreads::getWorkerThread()->BlockingCall([&] { + _videoChannel->RemoveRecvStream(_ssrcVideo.incoming); + if (_enableFlexfec) { + _videoChannel->RemoveRecvStream(_ssrcVideo.fecIncoming); + } + }); } if (_didConfigureVideo) { - _videoChannel->OnReadyToSend(false); - _videoChannel->SetSend(false); + StaticThreads::getWorkerThread()->BlockingCall([&] { + _videoChannel->OnReadyToSend(false); + _videoChannel->SetSend(false); - if (_enableFlexfec) { - _videoChannel->RemoveSendStream(_ssrcVideo.outgoing); - _videoChannel->RemoveSendStream(_ssrcVideo.fecOutgoing); - } else { - _videoChannel->RemoveSendStream(_ssrcVideo.outgoing); - } + if (_enableFlexfec) { + _videoChannel->RemoveSendStream(_ssrcVideo.outgoing); + _videoChannel->RemoveSendStream(_ssrcVideo.fecOutgoing); + } else { + _videoChannel->RemoveSendStream(_ssrcVideo.outgoing); + } + }); } - _videoChannel->SetInterface(nullptr); + StaticThreads::getWorkerThread()->BlockingCall([&] { + _videoChannel->SetInterface(nullptr); - _audioDeviceModule = nullptr; + _videoChannel.reset(); + + _audioDeviceModule = nullptr; + + _call.reset(); + _mediaEngine.reset(); + }); } void MediaManager::setIsConnected(bool isConnected) { @@ -512,22 +543,24 @@ void MediaManager::setIsConnected(bool isConnected) { } _isConnected = isConnected; - if (_isConnected) { - _call->SignalChannelNetworkState(webrtc::MediaType::AUDIO, webrtc::kNetworkUp); - _call->SignalChannelNetworkState(webrtc::MediaType::VIDEO, webrtc::kNetworkUp); - } else { - _call->SignalChannelNetworkState(webrtc::MediaType::AUDIO, webrtc::kNetworkDown); - _call->SignalChannelNetworkState(webrtc::MediaType::VIDEO, webrtc::kNetworkDown); - } - if (_audioChannel) { - _audioChannel->OnReadyToSend(_isConnected); - _audioChannel->SetSend(_isConnected); - _audioChannel->SetAudioSend(_ssrcAudio.outgoing, _isConnected && (_outgoingAudioState == AudioState::Active), nullptr, &_audioSource); - } - if (computeIsSendingVideo() && _videoChannel) { - _videoChannel->OnReadyToSend(_isConnected); - _videoChannel->SetSend(_isConnected); - } + StaticThreads::getWorkerThread()->BlockingCall([&] { + if (_isConnected) { + _call->SignalChannelNetworkState(webrtc::MediaType::AUDIO, webrtc::kNetworkUp); + _call->SignalChannelNetworkState(webrtc::MediaType::VIDEO, webrtc::kNetworkUp); + } else { + _call->SignalChannelNetworkState(webrtc::MediaType::AUDIO, webrtc::kNetworkDown); + _call->SignalChannelNetworkState(webrtc::MediaType::VIDEO, webrtc::kNetworkDown); + } + if (_audioChannel) { + _audioChannel->OnReadyToSend(_isConnected); + _audioChannel->SetSend(_isConnected); + _audioChannel->SetAudioSend(_ssrcAudio.outgoing, _isConnected && (_outgoingAudioState == AudioState::Active), nullptr, &_audioSource); + } + if (computeIsSendingVideo() && _videoChannel) { + _videoChannel->OnReadyToSend(_isConnected); + _videoChannel->SetSend(_isConnected); + } + }); if (isFirstConnection) { sendVideoParametersMessage(); sendOutgoingMediaStateMessage(); @@ -545,32 +578,35 @@ void MediaManager::sendOutgoingMediaStateMessage() { void MediaManager::beginStatsTimer(int timeoutMs) { const auto weak = std::weak_ptr(shared_from_this()); - _thread->PostDelayedTask(RTC_FROM_HERE, [weak]() { + _thread->PostDelayedTask([weak]() { auto strong = weak.lock(); if (!strong) { return; } strong->collectStats(); - }, timeoutMs); + }, webrtc::TimeDelta::Millis(timeoutMs)); } void MediaManager::beginLevelsTimer(int timeoutMs) { const auto weak = std::weak_ptr(shared_from_this()); - _thread->PostDelayedTask(RTC_FROM_HERE, [weak]() { + _thread->PostDelayedTask([weak]() { auto strong = weak.lock(); if (!strong) { return; } - float effectiveLevel = fmaxf(strong->_currentAudioLevel, strong->_currentMyAudioLevel); - strong->_audioLevelUpdated(effectiveLevel); + strong->_audioLevelsUpdated(strong->_currentMyAudioLevel, strong->_currentAudioLevel); strong->beginLevelsTimer(100); - }, timeoutMs); + }, webrtc::TimeDelta::Millis(timeoutMs)); } void MediaManager::collectStats() { - auto stats = _call->GetStats(); + webrtc::Call::Stats stats; + StaticThreads::getWorkerThread()->BlockingCall([&] { + stats = _call->GetStats(); + }); + float bitrateNorm = 16.0f; switch (_outgoingVideoState) { case VideoState::Active: @@ -642,20 +678,27 @@ void MediaManager::setSendVideo(std::shared_ptr videoCapt const auto wasReceiving = computeIsReceivingVideo(); if (_videoCapture) { - GetVideoCaptureAssumingSameThread(_videoCapture.get())->setStateUpdated(nullptr); + _videoCaptureGuard = nullptr; + GetVideoCaptureAssumingSameThread(_videoCapture.get())->setStateUpdated(nullptr); } _videoCapture = videoCapture; if (_videoCapture) { _videoCapture->setPreferredAspectRatio(_preferredAspectRatio); const auto thread = _thread; - const auto weak = std::weak_ptr(shared_from_this()); const auto object = GetVideoCaptureAssumingSameThread(_videoCapture.get()); _isScreenCapture = object->isScreenCapture(); + _videoCaptureGuard = std::make_shared(true); + const auto guard = std::weak_ptr{_videoCaptureGuard}; object->setStateUpdated([=](VideoState state) { - thread->PostTask(RTC_FROM_HERE, [=] { - if (const auto strong = weak.lock()) { - strong->setOutgoingVideoState(state); + thread->PostTask([=] { + // Checking this special guard instead of weak_ptr(this) + // ensures that we won't call setOutgoingVideoState after + // the _videoCapture was already changed and the old + // stateUpdated was already null-ed, but the event + // at that time was already posted. + if (guard.lock()) { + setOutgoingVideoState(state); } }); }); @@ -666,23 +709,27 @@ void MediaManager::setSendVideo(std::shared_ptr videoCapt setOutgoingVideoState(VideoState::Inactive); } - if (_enableFlexfec) { - _videoChannel->RemoveSendStream(_ssrcVideo.outgoing); - _videoChannel->RemoveSendStream(_ssrcVideo.fecOutgoing); - } else { - _videoChannel->RemoveSendStream(_ssrcVideo.outgoing); - } + StaticThreads::getWorkerThread()->BlockingCall([&] { + if (_enableFlexfec) { + _videoChannel->RemoveSendStream(_ssrcVideo.outgoing); + _videoChannel->RemoveSendStream(_ssrcVideo.fecOutgoing); + } else { + _videoChannel->RemoveSendStream(_ssrcVideo.outgoing); + } - if (_enableFlexfec) { - cricket::StreamParams videoSendStreamParams; - cricket::SsrcGroup videoSendSsrcGroup(cricket::kFecFrSsrcGroupSemantics, {_ssrcVideo.outgoing, _ssrcVideo.fecOutgoing}); - videoSendStreamParams.ssrcs = {_ssrcVideo.outgoing}; - videoSendStreamParams.ssrc_groups.push_back(videoSendSsrcGroup); - videoSendStreamParams.cname = "cname"; - _videoChannel->AddSendStream(videoSendStreamParams); - } else { - _videoChannel->AddSendStream(cricket::StreamParams::CreateLegacy(_ssrcVideo.outgoing)); - } + if (videoCapture) { + if (_enableFlexfec) { + cricket::StreamParams videoSendStreamParams; + cricket::SsrcGroup videoSendSsrcGroup(cricket::kFecFrSsrcGroupSemantics, {_ssrcVideo.outgoing, _ssrcVideo.fecOutgoing}); + videoSendStreamParams.ssrcs = {_ssrcVideo.outgoing}; + videoSendStreamParams.ssrc_groups.push_back(videoSendSsrcGroup); + videoSendStreamParams.cname = "cname"; + _videoChannel->AddSendStream(videoSendStreamParams); + } else { + _videoChannel->AddSendStream(cricket::StreamParams::CreateLegacy(_ssrcVideo.outgoing)); + } + } + }); checkIsSendingVideoChanged(wasSending); checkIsReceivingVideoChanged(wasReceiving); @@ -747,18 +794,20 @@ void MediaManager::configureSendingVideoIfNeeded() { break; } videoSendParameters.rtcp.remote_estimate = true; - _videoChannel->SetSendParameters(videoSendParameters); + StaticThreads::getWorkerThread()->BlockingCall([&] { + _videoChannel->SetSendParameters(videoSendParameters); - if (_enableFlexfec) { - cricket::StreamParams videoSendStreamParams; - cricket::SsrcGroup videoSendSsrcGroup(cricket::kFecFrSsrcGroupSemantics, {_ssrcVideo.outgoing, _ssrcVideo.fecOutgoing}); - videoSendStreamParams.ssrcs = {_ssrcVideo.outgoing}; - videoSendStreamParams.ssrc_groups.push_back(videoSendSsrcGroup); - videoSendStreamParams.cname = "cname"; - _videoChannel->AddSendStream(videoSendStreamParams); - } else { - _videoChannel->AddSendStream(cricket::StreamParams::CreateLegacy(_ssrcVideo.outgoing)); - } + if (_enableFlexfec) { + cricket::StreamParams videoSendStreamParams; + cricket::SsrcGroup videoSendSsrcGroup(cricket::kFecFrSsrcGroupSemantics, {_ssrcVideo.outgoing, _ssrcVideo.fecOutgoing}); + videoSendStreamParams.ssrcs = {_ssrcVideo.outgoing}; + videoSendStreamParams.ssrc_groups.push_back(videoSendSsrcGroup); + videoSendStreamParams.cname = "cname"; + _videoChannel->AddSendStream(videoSendStreamParams); + } else { + _videoChannel->AddSendStream(cricket::StreamParams::CreateLegacy(_ssrcVideo.outgoing)); + } + }); adjustBitratePreferences(true); } @@ -770,18 +819,24 @@ void MediaManager::checkIsSendingVideoChanged(bool wasSending) { } else if (sending) { configureSendingVideoIfNeeded(); - if (_enableFlexfec) { - _videoChannel->SetVideoSend(_ssrcVideo.outgoing, NULL, GetVideoCaptureAssumingSameThread(_videoCapture.get())->source()); - _videoChannel->SetVideoSend(_ssrcVideo.fecOutgoing, NULL, nullptr); - } else { - _videoChannel->SetVideoSend(_ssrcVideo.outgoing, NULL, GetVideoCaptureAssumingSameThread(_videoCapture.get())->source()); - } + rtc::scoped_refptr source = GetVideoCaptureAssumingSameThread(_videoCapture.get())->source(); - _videoChannel->OnReadyToSend(_isConnected); - _videoChannel->SetSend(_isConnected); + StaticThreads::getWorkerThread()->BlockingCall([&] { + if (_enableFlexfec) { + _videoChannel->SetVideoSend(_ssrcVideo.outgoing, NULL, source.get()); + _videoChannel->SetVideoSend(_ssrcVideo.fecOutgoing, NULL, nullptr); + } else { + _videoChannel->SetVideoSend(_ssrcVideo.outgoing, NULL, source.get()); + } + + _videoChannel->OnReadyToSend(_isConnected); + _videoChannel->SetSend(_isConnected); + }); } else { - _videoChannel->SetVideoSend(_ssrcVideo.outgoing, NULL, nullptr); - _videoChannel->SetVideoSend(_ssrcVideo.fecOutgoing, NULL, nullptr); + StaticThreads::getWorkerThread()->BlockingCall([&] { + _videoChannel->SetVideoSend(_ssrcVideo.outgoing, NULL, nullptr); + _videoChannel->SetVideoSend(_ssrcVideo.fecOutgoing, NULL, nullptr); + }); } adjustBitratePreferences(true); @@ -886,16 +941,21 @@ void MediaManager::checkIsReceivingVideoChanged(bool wasReceiving) { streamIds.push_back("1"); videoRecvStreamParams.set_stream_ids(streamIds); - _videoChannel->SetRecvParameters(videoRecvParameters); - _videoChannel->AddRecvStream(videoRecvStreamParams); _readyToReceiveVideo = true; - _videoChannel->SetSink(_ssrcVideo.incoming, _incomingVideoSinkProxy.get()); + StaticThreads::getWorkerThread()->BlockingCall([&] { + _videoChannel->SetRecvParameters(videoRecvParameters); + _videoChannel->AddRecvStream(videoRecvStreamParams); + _videoChannel->SetSink(_ssrcVideo.incoming, _incomingVideoSinkProxy.get()); + }); } } void MediaManager::setMuteOutgoingAudio(bool mute) { setOutgoingAudioState(mute ? AudioState::Muted : AudioState::Active); - _audioChannel->SetAudioSend(_ssrcAudio.outgoing, _isConnected && (_outgoingAudioState == AudioState::Active), nullptr, &_audioSource); + + StaticThreads::getWorkerThread()->BlockingCall([&] { + _audioChannel->SetAudioSend(_ssrcAudio.outgoing, _isConnected && (_outgoingAudioState == AudioState::Active), nullptr, &_audioSource); + }); } void MediaManager::setOutgoingAudioState(AudioState state) { @@ -926,19 +986,23 @@ void MediaManager::receiveMessage(DecryptedMessage &&message) { if (webrtc::IsRtcpPacket(audio->data)) { RTC_LOG(LS_VERBOSE) << "Deliver audio RTCP"; } - if (webrtc::IsRtcpPacket(audio->data)) { - _call->Receiver()->DeliverPacket(webrtc::MediaType::ANY, audio->data, -1); - } else { - _call->Receiver()->DeliverPacket(webrtc::MediaType::AUDIO, audio->data, -1); - } + StaticThreads::getWorkerThread()->BlockingCall([&] { + if (webrtc::IsRtcpPacket(audio->data)) { + _call->Receiver()->DeliverPacket(webrtc::MediaType::ANY, audio->data, -1); + } else { + _call->Receiver()->DeliverPacket(webrtc::MediaType::AUDIO, audio->data, -1); + } + }); } else if (const auto video = absl::get_if(data)) { if (_videoChannel) { if (_readyToReceiveVideo) { - if (webrtc::IsRtcpPacket(video->data)) { - _call->Receiver()->DeliverPacket(webrtc::MediaType::ANY, video->data, -1); - } else { - _call->Receiver()->DeliverPacket(webrtc::MediaType::VIDEO, video->data, -1); - } + StaticThreads::getWorkerThread()->BlockingCall([&] { + if (webrtc::IsRtcpPacket(video->data)) { + _call->Receiver()->DeliverPacket(webrtc::MediaType::ANY, video->data, -1); + } else { + _call->Receiver()->DeliverPacket(webrtc::MediaType::VIDEO, video->data, -1); + } + }); } else { // maybe we need to queue packets for some time? } diff --git a/TMessagesProj/jni/voip/tgcalls/MediaManager.h b/TMessagesProj/jni/voip/tgcalls/MediaManager.h index f4e321506e..6eb9a1367c 100644 --- a/TMessagesProj/jni/voip/tgcalls/MediaManager.h +++ b/TMessagesProj/jni/voip/tgcalls/MediaManager.h @@ -47,7 +47,7 @@ class MediaManager : public sigslot::has_slots<>, public std::enable_shared_from std::function sendSignalingMessage, std::function sendTransportMessage, std::function signalBarsUpdated, - std::function audioLevelUpdated, + std::function audioLevelsUpdated, std::function(webrtc::TaskQueueFactory*)> createAudioDeviceModule, bool enableHighBitrateVideo, std::vector preferredCodecs, @@ -130,7 +130,7 @@ class MediaManager : public sigslot::has_slots<>, public std::enable_shared_from std::function _sendSignalingMessage; std::function _sendTransportMessage; std::function _signalBarsUpdated; - std::function _audioLevelUpdated; + std::function _audioLevelsUpdated; std::function(webrtc::TaskQueueFactory*)> _createAudioDeviceModule; SSRC _ssrcAudio; @@ -152,13 +152,13 @@ class MediaManager : public sigslot::has_slots<>, public std::enable_shared_from std::unique_ptr _mediaEngine; std::unique_ptr _call; - webrtc::FieldTrialBasedConfig _fieldTrials; webrtc::LocalAudioSinkAdapter _audioSource; rtc::scoped_refptr _audioDeviceModule; std::unique_ptr _audioChannel; std::unique_ptr _videoChannel; std::unique_ptr _videoBitrateAllocatorFactory; std::shared_ptr _videoCapture; + std::shared_ptr _videoCaptureGuard; bool _isScreenCapture = false; std::shared_ptr _incomingVideoSinkProxy; diff --git a/TMessagesProj/jni/voip/tgcalls/Message.cpp b/TMessagesProj/jni/voip/tgcalls/Message.cpp index 86a53bb257..113579b110 100644 --- a/TMessagesProj/jni/voip/tgcalls/Message.cpp +++ b/TMessagesProj/jni/voip/tgcalls/Message.cpp @@ -384,22 +384,22 @@ absl::optional DeserializeRawMessage( if (!reader.Length()) { return absl::nullopt; } - + uint32_t length = 0; if (!reader.ReadUInt32(&length)) { return absl::nullopt; } - - if (length < 0 || length > 1024 * 1024) { + + if (/*length < 0 || */length > 1024 * 1024) { return absl::nullopt; } - + rtc::CopyOnWriteBuffer result; result.SetSize(length); if (!reader.ReadBytes((char *)result.MutableData(), result.size())) { return absl::nullopt; } - + return result; } diff --git a/TMessagesProj/jni/voip/tgcalls/NetworkManager.cpp b/TMessagesProj/jni/voip/tgcalls/NetworkManager.cpp index e8053db566..dd74752ef6 100644 --- a/TMessagesProj/jni/voip/tgcalls/NetworkManager.cpp +++ b/TMessagesProj/jni/voip/tgcalls/NetworkManager.cpp @@ -3,11 +3,11 @@ #include "Message.h" #include "p2p/base/basic_packet_socket_factory.h" +#include "v2/ReflectorRelayPortFactory.h" #include "p2p/client/basic_port_allocator.h" #include "p2p/base/p2p_transport_channel.h" #include "p2p/base/basic_async_resolver_factory.h" #include "api/packet_socket_factory.h" -#include "rtc_base/task_utils/to_queued_task.h" #include "p2p/base/ice_credentials_iterator.h" #include "api/jsep_ice_candidate.h" #include "rtc_base/network_monitor_factory.h" @@ -110,13 +110,15 @@ NetworkManager::~NetworkManager() { void NetworkManager::start() { _socketFactory.reset(new rtc::BasicPacketSocketFactory(_thread->socketserver())); - _networkManager = std::make_unique(_networkMonitorFactory.get()); + _networkManager = std::make_unique(_networkMonitorFactory.get(), _thread->socketserver()); if (_enableStunMarking) { _turnCustomizer.reset(new TurnCustomizerImpl()); } - _portAllocator.reset(new cricket::BasicPortAllocator(_networkManager.get(), _socketFactory.get(), _turnCustomizer.get(), nullptr)); + _relayPortFactory.reset(new ReflectorRelayPortFactory(_rtcServers)); + + _portAllocator.reset(new cricket::BasicPortAllocator(_networkManager.get(), _socketFactory.get(), _turnCustomizer.get(), _relayPortFactory.get())); uint32_t flags = _portAllocator->flags(); @@ -154,6 +156,10 @@ void NetworkManager::start() { std::vector turnServers; for (auto &server : _rtcServers) { + if (server.isTcp) { + continue; + } + if (server.isTurn) { turnServers.push_back(cricket::RelayServerConfig( rtc::SocketAddress(server.host, server.port), @@ -170,7 +176,12 @@ void NetworkManager::start() { _portAllocator->SetConfiguration(stunServers, turnServers, 2, webrtc::NO_PRUNE, _turnCustomizer.get()); _asyncResolverFactory = std::make_unique(); - _transportChannel.reset(new cricket::P2PTransportChannel("transport", 0, _portAllocator.get(), _asyncResolverFactory.get(), nullptr)); + + webrtc::IceTransportInit iceTransportInit; + iceTransportInit.set_port_allocator(_portAllocator.get()); + iceTransportInit.set_async_resolver_factory(_asyncResolverFactory.get()); + + _transportChannel = cricket::P2PTransportChannel::Create("transport", 0, std::move(iceTransportInit)); cricket::IceConfig iceConfig; iceConfig.continual_gathering_policy = cricket::GATHER_CONTINUALLY; @@ -275,7 +286,7 @@ void NetworkManager::logCurrentNetworkState() { void NetworkManager::checkConnectionTimeout() { const auto weak = std::weak_ptr(shared_from_this()); - _thread->PostDelayedTask(RTC_FROM_HERE, [weak]() { + _thread->PostDelayedTask([weak]() { auto strong = weak.lock(); if (!strong) { return; @@ -292,7 +303,7 @@ void NetworkManager::checkConnectionTimeout() { } strong->checkConnectionTimeout(); - }, 1000); + }, webrtc::TimeDelta::Millis(1000)); } void NetworkManager::candidateGathered(cricket::IceTransportInternal *transport, const cricket::Candidate &candidate) { diff --git a/TMessagesProj/jni/voip/tgcalls/NetworkManager.h b/TMessagesProj/jni/voip/tgcalls/NetworkManager.h index b508500460..c6c1cb5a21 100644 --- a/TMessagesProj/jni/voip/tgcalls/NetworkManager.h +++ b/TMessagesProj/jni/voip/tgcalls/NetworkManager.h @@ -26,6 +26,7 @@ namespace cricket { class BasicPortAllocator; class P2PTransportChannel; class IceTransportInternal; +class RelayPortFactoryInterface; } // namespace cricket namespace webrtc { @@ -98,6 +99,7 @@ class NetworkManager : public sigslot::has_slots<>, public std::enable_shared_fr std::unique_ptr _socketFactory; std::unique_ptr _networkManager; std::unique_ptr _turnCustomizer; + std::unique_ptr _relayPortFactory; std::unique_ptr _portAllocator; std::unique_ptr _asyncResolverFactory; std::unique_ptr _transportChannel; diff --git a/TMessagesProj/jni/voip/tgcalls/SctpDataChannelProviderInterfaceImpl.cpp b/TMessagesProj/jni/voip/tgcalls/SctpDataChannelProviderInterfaceImpl.cpp index fea07b8a1d..7ac3cc6f71 100644 --- a/TMessagesProj/jni/voip/tgcalls/SctpDataChannelProviderInterfaceImpl.cpp +++ b/TMessagesProj/jni/voip/tgcalls/SctpDataChannelProviderInterfaceImpl.cpp @@ -1,6 +1,8 @@ #include "SctpDataChannelProviderInterfaceImpl.h" #include "p2p/base/dtls_transport.h" +#include "api/transport/field_trial_based_config.h" +#include "FieldTrialsConfig.h" namespace tgcalls { @@ -21,9 +23,9 @@ _onMessageReceived(onMessageReceived) { _sctpTransportFactory.reset(new cricket::SctpTransportFactory(_threads->getNetworkThread())); _sctpTransport = _sctpTransportFactory->CreateSctpTransport(transportChannel); - _sctpTransport->SignalReadyToSendData.connect(this, &SctpDataChannelProviderInterfaceImpl::sctpReadyToSendData); - _sctpTransport->SignalDataReceived.connect(this, &SctpDataChannelProviderInterfaceImpl::sctpDataReceived); - _sctpTransport->SignalClosedAbruptly.connect(this, &SctpDataChannelProviderInterfaceImpl::sctpClosedAbruptly); + _sctpTransport->SetDataChannelSink(this); + + // TODO: should we disconnect the data channel sink? webrtc::InternalDataChannelInit dataChannelInit; dataChannelInit.id = 0; @@ -97,13 +99,13 @@ void SctpDataChannelProviderInterfaceImpl::updateIsConnected(bool isConnected) { } } -void SctpDataChannelProviderInterfaceImpl::sctpReadyToSendData() { +void SctpDataChannelProviderInterfaceImpl::OnReadyToSend() { assert(_threads->getNetworkThread()->IsCurrent()); _dataChannel->OnTransportReady(true); } -void SctpDataChannelProviderInterfaceImpl::sctpClosedAbruptly(webrtc::RTCError error) { +void SctpDataChannelProviderInterfaceImpl::OnTransportClosed(webrtc::RTCError error) { assert(_threads->getNetworkThread()->IsCurrent()); if (_onTerminated) { @@ -111,10 +113,13 @@ void SctpDataChannelProviderInterfaceImpl::sctpClosedAbruptly(webrtc::RTCError e } } -void SctpDataChannelProviderInterfaceImpl::sctpDataReceived(const cricket::ReceiveDataParams& params, const rtc::CopyOnWriteBuffer& buffer) { +void SctpDataChannelProviderInterfaceImpl::OnDataReceived(int channel_id, webrtc::DataMessageType type, const rtc::CopyOnWriteBuffer& buffer) { assert(_threads->getNetworkThread()->IsCurrent()); - _dataChannel->OnDataReceived(params, buffer); + _dataChannel->OnDataReceived(cricket::ReceiveDataParams { + .sid = channel_id, + .type = type + }, buffer); } bool SctpDataChannelProviderInterfaceImpl::SendData( @@ -149,7 +154,7 @@ void SctpDataChannelProviderInterfaceImpl::AddSctpDataStream(int sid) { void SctpDataChannelProviderInterfaceImpl::RemoveSctpDataStream(int sid) { assert(_threads->getNetworkThread()->IsCurrent()); - _threads->getNetworkThread()->Invoke(RTC_FROM_HERE, [this, sid]() { + _threads->getNetworkThread()->BlockingCall([this, sid]() { _sctpTransport->ResetStream(sid); }); } diff --git a/TMessagesProj/jni/voip/tgcalls/SctpDataChannelProviderInterfaceImpl.h b/TMessagesProj/jni/voip/tgcalls/SctpDataChannelProviderInterfaceImpl.h index 59904afc68..5da150a29e 100644 --- a/TMessagesProj/jni/voip/tgcalls/SctpDataChannelProviderInterfaceImpl.h +++ b/TMessagesProj/jni/voip/tgcalls/SctpDataChannelProviderInterfaceImpl.h @@ -16,7 +16,7 @@ class DtlsTransport; namespace tgcalls { -class SctpDataChannelProviderInterfaceImpl : public sigslot::has_slots<>, public webrtc::SctpDataChannelProviderInterface, public webrtc::DataChannelObserver { +class SctpDataChannelProviderInterfaceImpl : public sigslot::has_slots<>, public webrtc::SctpDataChannelControllerInterface, public webrtc::DataChannelObserver, public webrtc::DataChannelSink { public: SctpDataChannelProviderInterfaceImpl( cricket::DtlsTransport *transportChannel, @@ -44,10 +44,15 @@ class SctpDataChannelProviderInterfaceImpl : public sigslot::has_slots<>, public virtual void RemoveSctpDataStream(int sid) override; virtual bool ReadyToSendData() const override; -private: - void sctpReadyToSendData(); - void sctpClosedAbruptly(webrtc::RTCError error); - void sctpDataReceived(const cricket::ReceiveDataParams& params, const rtc::CopyOnWriteBuffer& buffer); + virtual void OnDataReceived(int channel_id, + webrtc::DataMessageType type, + const rtc::CopyOnWriteBuffer& buffer) override; + virtual void OnReadyToSend() override; + virtual void OnTransportClosed(webrtc::RTCError error) override; + + // Unused + virtual void OnChannelClosing(int channel_id) override{} + virtual void OnChannelClosed(int channel_id) override{} private: std::shared_ptr _threads; diff --git a/TMessagesProj/jni/voip/tgcalls/StaticThreads.cpp b/TMessagesProj/jni/voip/tgcalls/StaticThreads.cpp index 0f34ae7e3a..db11059cc6 100644 --- a/TMessagesProj/jni/voip/tgcalls/StaticThreads.cpp +++ b/TMessagesProj/jni/voip/tgcalls/StaticThreads.cpp @@ -82,22 +82,11 @@ class ThreadsImpl : public Threads { rtc::Thread *getWorkerThread() override { return worker_.get(); } - rtc::scoped_refptr getSharedModuleThread() override { - // This function must be called from a single thread because of SharedModuleThread implementation - // So we don't care about making it thread safe - if (!shared_module_thread_) { - shared_module_thread_ = webrtc::SharedModuleThread::Create( - webrtc::ProcessThread::Create("tgc-module"), - [=] { shared_module_thread_ = nullptr; }); - } - return shared_module_thread_; - } private: Thread network_; Thread media_; Thread worker_; - rtc::scoped_refptr shared_module_thread_; static Thread create(const std::string &name) { return init(std::unique_ptr(rtc::Thread::Create()), name); diff --git a/TMessagesProj/jni/voip/tgcalls/StaticThreads.h b/TMessagesProj/jni/voip/tgcalls/StaticThreads.h index eb1c0358bd..1767a8198a 100644 --- a/TMessagesProj/jni/voip/tgcalls/StaticThreads.h +++ b/TMessagesProj/jni/voip/tgcalls/StaticThreads.h @@ -8,9 +8,6 @@ class Thread; template class scoped_refptr; } -namespace webrtc { -class SharedModuleThread; -} namespace tgcalls { @@ -20,7 +17,6 @@ class Threads { virtual rtc::Thread *getNetworkThread() = 0; virtual rtc::Thread *getMediaThread() = 0; virtual rtc::Thread *getWorkerThread() = 0; - virtual rtc::scoped_refptr getSharedModuleThread() = 0; // it is not possible to decrease pool size static void setPoolSize(size_t size); @@ -31,7 +27,6 @@ namespace StaticThreads { rtc::Thread *getNetworkThread(); rtc::Thread *getMediaThread(); rtc::Thread *getWorkerThread(); -rtc::scoped_refptr getSharedMoudleThread(); std::shared_ptr &getThreads(); } diff --git a/TMessagesProj/jni/voip/tgcalls/ThreadLocalObject.h b/TMessagesProj/jni/voip/tgcalls/ThreadLocalObject.h index 3c6c51cf59..baa49ba5c3 100644 --- a/TMessagesProj/jni/voip/tgcalls/ThreadLocalObject.h +++ b/TMessagesProj/jni/voip/tgcalls/ThreadLocalObject.h @@ -2,7 +2,6 @@ #define TGCALLS_THREAD_LOCAL_OBJECT_H #include "rtc_base/thread.h" -#include "rtc_base/location.h" #include #include @@ -19,20 +18,20 @@ class ThreadLocalObject { _thread(thread), _valueHolder(std::make_unique()) { assert(_thread != nullptr); - _thread->PostTask(RTC_FROM_HERE, [valueHolder = _valueHolder.get(), generator = std::forward(generator)]() mutable { + _thread->PostTask([valueHolder = _valueHolder.get(), generator = std::forward(generator)]() mutable { valueHolder->_value.reset(generator()); }); } ~ThreadLocalObject() { - _thread->PostTask(RTC_FROM_HERE, [valueHolder = std::move(_valueHolder)](){ + _thread->PostTask([valueHolder = std::move(_valueHolder)](){ valueHolder->_value.reset(); }); } template - void perform(const rtc::Location& posted_from, FunctorT &&functor) { - _thread->PostTask(posted_from, [valueHolder = _valueHolder.get(), f = std::forward(functor)]() mutable { + void perform(FunctorT &&functor) { + _thread->PostTask([valueHolder = _valueHolder.get(), f = std::forward(functor)]() mutable { assert(valueHolder->_value != nullptr); f(valueHolder->_value.get()); }); diff --git a/TMessagesProj/jni/voip/tgcalls/VideoCaptureInterfaceImpl.cpp b/TMessagesProj/jni/voip/tgcalls/VideoCaptureInterfaceImpl.cpp index 04a8f92555..c5d4e3495a 100644 --- a/TMessagesProj/jni/voip/tgcalls/VideoCaptureInterfaceImpl.cpp +++ b/TMessagesProj/jni/voip/tgcalls/VideoCaptureInterfaceImpl.cpp @@ -22,7 +22,7 @@ VideoCaptureInterfaceObject::~VideoCaptureInterfaceObject() { } } -webrtc::VideoTrackSourceInterface *VideoCaptureInterfaceObject::source() { +rtc::scoped_refptr VideoCaptureInterfaceObject::source() { return _videoSource; } @@ -177,47 +177,47 @@ _impl(threads->getMediaThread(), [deviceId, isScreenCapture, platformContext, th VideoCaptureInterfaceImpl::~VideoCaptureInterfaceImpl() = default; void VideoCaptureInterfaceImpl::switchToDevice(std::string deviceId, bool isScreenCapture) { - _impl.perform(RTC_FROM_HERE, [deviceId, isScreenCapture](VideoCaptureInterfaceObject *impl) { + _impl.perform([deviceId, isScreenCapture](VideoCaptureInterfaceObject *impl) { impl->switchToDevice(deviceId, isScreenCapture); }); } void VideoCaptureInterfaceImpl::withNativeImplementation(std::function completion) { - _impl.perform(RTC_FROM_HERE, [completion](VideoCaptureInterfaceObject *impl) { + _impl.perform([completion](VideoCaptureInterfaceObject *impl) { impl->withNativeImplementation(completion); }); } void VideoCaptureInterfaceImpl::setState(VideoState state) { - _impl.perform(RTC_FROM_HERE, [state](VideoCaptureInterfaceObject *impl) { + _impl.perform([state](VideoCaptureInterfaceObject *impl) { impl->setState(state); }); } void VideoCaptureInterfaceImpl::setPreferredAspectRatio(float aspectRatio) { - _impl.perform(RTC_FROM_HERE, [aspectRatio](VideoCaptureInterfaceObject *impl) { + _impl.perform([aspectRatio](VideoCaptureInterfaceObject *impl) { impl->setPreferredAspectRatio(aspectRatio); }); } void VideoCaptureInterfaceImpl::setOnFatalError(std::function error) { - _impl.perform(RTC_FROM_HERE, [error](VideoCaptureInterfaceObject *impl) { + _impl.perform([error](VideoCaptureInterfaceObject *impl) { impl->setOnFatalError(error); }); } void VideoCaptureInterfaceImpl::setOnPause(std::function pause) { - _impl.perform(RTC_FROM_HERE, [pause](VideoCaptureInterfaceObject *impl) { + _impl.perform([pause](VideoCaptureInterfaceObject *impl) { impl->setOnPause(pause); }); } void VideoCaptureInterfaceImpl::setOnIsActiveUpdated(std::function onIsActiveUpdated) { - _impl.perform(RTC_FROM_HERE, [onIsActiveUpdated](VideoCaptureInterfaceObject *impl) { + _impl.perform([onIsActiveUpdated](VideoCaptureInterfaceObject *impl) { impl->setOnIsActiveUpdated(onIsActiveUpdated); }); } void VideoCaptureInterfaceImpl::setOutput(std::shared_ptr> sink) { - _impl.perform(RTC_FROM_HERE, [sink](VideoCaptureInterfaceObject *impl) { + _impl.perform([sink](VideoCaptureInterfaceObject *impl) { impl->setOutput(sink); }); } diff --git a/TMessagesProj/jni/voip/tgcalls/VideoCaptureInterfaceImpl.h b/TMessagesProj/jni/voip/tgcalls/VideoCaptureInterfaceImpl.h index a296c2cec3..ebe23ed9cd 100644 --- a/TMessagesProj/jni/voip/tgcalls/VideoCaptureInterfaceImpl.h +++ b/TMessagesProj/jni/voip/tgcalls/VideoCaptureInterfaceImpl.h @@ -27,7 +27,7 @@ class VideoCaptureInterfaceObject { void setOnFatalError(std::function error); void setOnPause(std::function pause); void setOnIsActiveUpdated(std::function onIsActiveUpdated); - webrtc::VideoTrackSourceInterface *source(); + rtc::scoped_refptr source(); int getRotation(); bool isScreenCapture(); diff --git a/TMessagesProj/jni/voip/tgcalls/group/AVIOContextImpl.cpp b/TMessagesProj/jni/voip/tgcalls/group/AVIOContextImpl.cpp index 337efa9b27..a620e92f1c 100644 --- a/TMessagesProj/jni/voip/tgcalls/group/AVIOContextImpl.cpp +++ b/TMessagesProj/jni/voip/tgcalls/group/AVIOContextImpl.cpp @@ -54,7 +54,7 @@ _fileData(std::move(fileData)) { } AVIOContextImpl::~AVIOContextImpl() { - av_free(_context); + avio_context_free(&_context); } AVIOContext *AVIOContextImpl::getContext() const { diff --git a/TMessagesProj/jni/voip/tgcalls/group/AudioStreamingPartInternal.cpp b/TMessagesProj/jni/voip/tgcalls/group/AudioStreamingPartInternal.cpp index 56390368a4..178091618a 100644 --- a/TMessagesProj/jni/voip/tgcalls/group/AudioStreamingPartInternal.cpp +++ b/TMessagesProj/jni/voip/tgcalls/group/AudioStreamingPartInternal.cpp @@ -291,10 +291,12 @@ void AudioStreamingPartInternal::fillPcmBuffer(AudioStreamingPartPersistentDecod } if (_packet.stream_index != _streamId) { + av_packet_unref(&_packet); continue; } ret = persistentDecoder.decode(_audioCodecParameters, _inputFormatContext->streams[_streamId]->time_base, _packet, _frame); + av_packet_unref(&_packet); if (ret == AVERROR(EAGAIN)) { continue; diff --git a/TMessagesProj/jni/voip/tgcalls/group/GroupInstanceCustomImpl.cpp b/TMessagesProj/jni/voip/tgcalls/group/GroupInstanceCustomImpl.cpp index 93c2288258..b21ad4969c 100644 --- a/TMessagesProj/jni/voip/tgcalls/group/GroupInstanceCustomImpl.cpp +++ b/TMessagesProj/jni/voip/tgcalls/group/GroupInstanceCustomImpl.cpp @@ -30,7 +30,8 @@ #include "absl/strings/match.h" #include "modules/audio_processing/agc2/cpu_features.h" #include "modules/audio_processing/agc2/vad_wrapper.h" -#include "pc/channel_manager.h" +#include "pc/channel.h" +#include "pc/rtp_transport.h" #include "audio/audio_state.h" #include "modules/audio_coding/neteq/default_neteq_factory.h" #include "modules/audio_coding/include/audio_coding_module.h" @@ -39,6 +40,7 @@ #include "common_audio/resampler/include/resampler.h" #include "modules/rtp_rtcp/source/rtp_util.h" +#include "ChannelManager.h" #include "AudioFrame.h" #include "ThreadLocalObject.h" #include "Manager.h" @@ -70,6 +72,7 @@ #endif #include "GroupJoinPayloadInternal.h" +#include "FieldTrialsConfig.h" #include "third-party/json11.hpp" @@ -907,7 +910,7 @@ class ExternalAudioRecorder : public FakeAudioDeviceModule::Recorder { class IncomingAudioChannel : public sigslot::has_slots<> { public: IncomingAudioChannel( - cricket::ChannelManager *channelManager, + ChannelManager *channelManager, webrtc::Call *call, webrtc::RtpTransport *rtpTransport, rtc::UniqueRandomIdGenerator *randomIdGenerator, @@ -922,14 +925,18 @@ class IncomingAudioChannel : public sigslot::has_slots<> { _call(call) { _creationTimestamp = rtc::TimeMillis(); - threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [this, rtpTransport, ssrc, onAudioFrame = std::move(onAudioFrame), onAudioLevelUpdated = std::move(onAudioLevelUpdated), randomIdGenerator, isRawPcm]() mutable { + threads->getWorkerThread()->BlockingCall([this, rtpTransport, ssrc, onAudioFrame = std::move(onAudioFrame), onAudioLevelUpdated = std::move(onAudioLevelUpdated), isRawPcm]() mutable { cricket::AudioOptions audioOptions; audioOptions.audio_jitter_buffer_fast_accelerate = true; audioOptions.audio_jitter_buffer_min_delay_ms = 50; std::string streamId = std::string("stream") + ssrc.name(); - _audioChannel = _channelManager->CreateVoiceChannel(_call, cricket::MediaConfig(), rtpTransport, _threads->getWorkerThread(), std::string("audio") + uint32ToString(ssrc.networkSsrc), false, GroupNetworkManager::getDefaulCryptoOptions(), randomIdGenerator, audioOptions); + _audioChannel = _channelManager->CreateVoiceChannel(_call, cricket::MediaConfig(), std::string("audio") + uint32ToString(ssrc.networkSsrc), false, GroupNetworkManager::getDefaulCryptoOptions(), audioOptions); + + _threads->getNetworkThread()->BlockingCall([&]() { + _audioChannel->SetRtpTransport(rtpTransport); + }); const uint8_t opusPTimeMs = 120; @@ -966,9 +973,10 @@ class IncomingAudioChannel : public sigslot::has_slots<> { streamParams.set_stream_ids({ streamId }); incomingAudioDescription->AddStream(streamParams); - _audioChannel->SetLocalContent(outgoingAudioDescription.get(), webrtc::SdpType::kOffer, nullptr); - _audioChannel->SetRemoteContent(incomingAudioDescription.get(), webrtc::SdpType::kAnswer, nullptr); + std::string errorDesc; _audioChannel->SetPayloadTypeDemuxingEnabled(false); + _audioChannel->SetLocalContent(outgoingAudioDescription.get(), webrtc::SdpType::kOffer, errorDesc); + _audioChannel->SetRemoteContent(incomingAudioDescription.get(), webrtc::SdpType::kAnswer, errorDesc); outgoingAudioDescription.reset(); incomingAudioDescription.reset(); @@ -977,20 +985,23 @@ class IncomingAudioChannel : public sigslot::has_slots<> { std::unique_ptr audioLevelSink(new AudioSinkImpl(std::move(onAudioLevelUpdated), _ssrc, std::move(onAudioFrame))); _audioChannel->media_channel()->SetRawAudioSink(ssrc.networkSsrc, std::move(audioLevelSink)); } - - _audioChannel->Enable(true); }); + + _audioChannel->Enable(true); } ~IncomingAudioChannel() { - _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [this]() { - _channelManager->DestroyVoiceChannel(_audioChannel); + _threads->getNetworkThread()->BlockingCall([&]() { + _audioChannel->SetRtpTransport(nullptr); + }); + _threads->getWorkerThread()->BlockingCall([this]() { + _channelManager->DestroyChannel(_audioChannel); _audioChannel = nullptr; }); } void setVolume(double value) { - _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [this, value]() { + _threads->getWorkerThread()->BlockingCall([this, value]() { _audioChannel->media_channel()->SetOutputVolume(_ssrc.networkSsrc, value); }); } @@ -1009,7 +1020,7 @@ class IncomingAudioChannel : public sigslot::has_slots<> { // Memory is managed by _channelManager cricket::VoiceChannel *_audioChannel = nullptr; // Memory is managed externally - cricket::ChannelManager *_channelManager = nullptr; + ChannelManager *_channelManager = nullptr; webrtc::Call *_call = nullptr; int64_t _creationTimestamp = 0; int64_t _activityTimestamp = 0; @@ -1018,7 +1029,7 @@ class IncomingAudioChannel : public sigslot::has_slots<> { class IncomingVideoChannel : public sigslot::has_slots<> { public: IncomingVideoChannel( - cricket::ChannelManager *channelManager, + ChannelManager *channelManager, webrtc::Call *call, webrtc::RtpTransport *rtpTransport, rtc::UniqueRandomIdGenerator *randomIdGenerator, @@ -1037,7 +1048,7 @@ class IncomingVideoChannel : public sigslot::has_slots<> { _requestedMaxQuality(maxQuality) { _videoSink.reset(new VideoSinkImpl(_endpointId)); - _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [this, rtpTransport, &availableVideoFormats, &description, randomIdGenerator]() mutable { + _threads->getWorkerThread()->BlockingCall([this, rtpTransport, &availableVideoFormats, &description, randomIdGenerator]() mutable { uint32_t mid = randomIdGenerator->GenerateId(); std::string streamId = std::string("video") + uint32ToString(mid); @@ -1102,22 +1113,29 @@ class IncomingVideoChannel : public sigslot::has_slots<> { incomingVideoDescription->AddStream(videoRecvStreamParams); - _videoChannel = _channelManager->CreateVideoChannel(_call, cricket::MediaConfig(), rtpTransport, _threads->getWorkerThread(), std::string("video") + uint32ToString(mid), false, GroupNetworkManager::getDefaulCryptoOptions(), randomIdGenerator, cricket::VideoOptions(), _videoBitrateAllocatorFactory.get()); + _videoChannel = _channelManager->CreateVideoChannel(_call, cricket::MediaConfig(), std::string("video") + uint32ToString(mid), false, GroupNetworkManager::getDefaulCryptoOptions(), cricket::VideoOptions(), _videoBitrateAllocatorFactory.get()); - _videoChannel->SetLocalContent(outgoingVideoDescription.get(), webrtc::SdpType::kOffer, nullptr); - _videoChannel->SetRemoteContent(incomingVideoDescription.get(), webrtc::SdpType::kAnswer, nullptr); + _threads->getNetworkThread()->BlockingCall([&]() { + _videoChannel->SetRtpTransport(rtpTransport); + }); + + std::string errorDesc; + _videoChannel->SetLocalContent(outgoingVideoDescription.get(), webrtc::SdpType::kOffer, errorDesc); + _videoChannel->SetRemoteContent(incomingVideoDescription.get(), webrtc::SdpType::kAnswer, errorDesc); _videoChannel->SetPayloadTypeDemuxingEnabled(false); _videoChannel->media_channel()->SetSink(_mainVideoSsrc, _videoSink.get()); - - _videoChannel->Enable(true); }); + + _videoChannel->Enable(true); } ~IncomingVideoChannel() { - //_videoChannel->SignalSentPacket().disconnect(this); - _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [this]() { - _videoChannel->Enable(false); - _channelManager->DestroyVideoChannel(_videoChannel); + _videoChannel->Enable(false); + _threads->getNetworkThread()->BlockingCall([&]() { + _videoChannel->SetRtpTransport(nullptr); + }); + _threads->getWorkerThread()->BlockingCall([this]() { + _channelManager->DestroyChannel(_videoChannel); _videoChannel = nullptr; }); } @@ -1168,7 +1186,7 @@ class IncomingVideoChannel : public sigslot::has_slots<> { // Memory is managed by _channelManager cricket::VideoChannel *_videoChannel; // Memory is managed externally - cricket::ChannelManager *_channelManager = nullptr; + ChannelManager *_channelManager = nullptr; webrtc::Call *_call = nullptr; VideoChannelDescription::Quality _requestedMinQuality = VideoChannelDescription::Quality::Thumbnail; @@ -1236,7 +1254,7 @@ struct DecodedBroadcastPart { std::vector channels; }; -std::function videoCaptureToGetVideoSource(std::shared_ptr videoCapture) { +std::function()> videoCaptureToGetVideoSource(std::shared_ptr videoCapture) { return [videoCapture]() { VideoCaptureInterfaceObject *videoCaptureImpl = GetVideoCaptureAssumingSameThread(videoCapture.get()); return videoCaptureImpl ? videoCaptureImpl->source() : nullptr; @@ -1418,10 +1436,10 @@ class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::ena GroupInstanceCustomImpl::customAudioBitrate = _outgoingAudioBitrateKbit * 1000; - _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [this] { + _threads->getWorkerThread()->BlockingCall([this] { _workerThreadSafery = webrtc::PendingTaskSafetyFlag::Create(); }); - _threads->getNetworkThread()->Invoke(RTC_FROM_HERE, [this] { + _threads->getNetworkThread()->BlockingCall([this] { _networkThreadSafery = webrtc::PendingTaskSafetyFlag::Create(); }); @@ -1447,13 +1465,14 @@ class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::ena destroyOutgoingAudioChannel(); destroyOutgoingVideoChannel(); - _threads->getNetworkThread()->Invoke(RTC_FROM_HERE, [this]() { + _threads->getNetworkThread()->BlockingCall([this]() { _rtpTransport->SignalSentPacket.disconnect(this); _rtpTransport->SignalRtcpPacketReceived.disconnect(this); }); - _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [this]() { - _channelManager = nullptr; + _channelManager = nullptr; + + _threads->getWorkerThread()->BlockingCall([this]() { if (_audioDeviceModule) { _audioDeviceModule->Stop(); _audioDeviceModule = nullptr; @@ -1466,7 +1485,8 @@ class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::ena const auto weak = std::weak_ptr(shared_from_this()); std::stringstream stringStream; - stringStream << "WebRTC-Audio-Allocation/min:" << _outgoingAudioBitrateKbit << "kbps,max:" << _outgoingAudioBitrateKbit << "kbps/" + stringStream << "WebRTC-DataChannel-Dcsctp/Enabled/" + "WebRTC-Audio-Allocation/min:" << _outgoingAudioBitrateKbit << "kbps,max:" << _outgoingAudioBitrateKbit << "kbps/" << "WebRTC-Audio-OpusMinPacketLossRate/Enabled-1/" << "WebRTC-TaskQueuePacer/Enabled/" << "WebRTC-VP8ConferenceTemporalLayers/1/" @@ -1480,8 +1500,9 @@ class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::ena _networkManager.reset(new ThreadLocalObject(_threads->getNetworkThread(), [weak, threads = _threads] () mutable { return new GroupNetworkManager( + fieldTrialsBasedConfig, [=](const GroupNetworkManager::State &state) { - threads->getMediaThread()->PostTask(RTC_FROM_HERE, [=] { + threads->getMediaThread()->PostTask([=] { const auto strong = weak.lock(); if (!strong) { return; @@ -1489,32 +1510,29 @@ class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::ena strong->setIsRtcConnected(state.isReadyToSendData); }); }, - [=](rtc::CopyOnWriteBuffer const &message, bool isUnresolved) { - if (!isUnresolved) { - return; - } - threads->getMediaThread()->PostTask(RTC_FROM_HERE, [weak, message, isUnresolved]() mutable { + [=](uint32_t ssrc, int payloadType) { + threads->getMediaThread()->PostTask([weak, ssrc, payloadType]() mutable { if (const auto strong = weak.lock()) { - strong->receivePacket(message, isUnresolved); + strong->receiveUnknownSsrcPacket(ssrc, payloadType); } }); }, [=](bool isDataChannelOpen) { - threads->getMediaThread()->PostTask(RTC_FROM_HERE, [weak, isDataChannelOpen]() mutable { + threads->getMediaThread()->PostTask([weak, isDataChannelOpen]() mutable { if (const auto strong = weak.lock()) { strong->updateIsDataChannelOpen(isDataChannelOpen); } }); }, [=](std::string const &message) { - threads->getMediaThread()->PostTask(RTC_FROM_HERE, [weak, message]() { + threads->getMediaThread()->PostTask([weak, message]() { if (const auto strong = weak.lock()) { strong->receiveDataChannelMessage(message); } }); }, [=](uint32_t ssrc, uint8_t audioLevel, bool isSpeech) { - threads->getMediaThread()->PostTask(RTC_FROM_HERE, [weak, ssrc, audioLevel, isSpeech]() { + threads->getMediaThread()->PostTask([weak, ssrc, audioLevel, isSpeech]() { if (const auto strong = weak.lock()) { strong->updateSsrcAudioLevel(ssrc, audioLevel, isSpeech); } @@ -1536,7 +1554,7 @@ class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::ena #if USE_RNNOISE audioProcessor = std::make_unique([weak, threads = _threads](GroupLevelValue const &level) { - threads->getMediaThread()->PostTask(RTC_FROM_HERE, [weak, level](){ + threads->getMediaThread()->PostTask([weak, level](){ auto strong = weak.lock(); if (!strong) { return; @@ -1547,65 +1565,61 @@ class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::ena #endif } - _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [this - #if USE_RNNOISE - , audioProcessor = std::move(audioProcessor) - #endif - ]() mutable { - cricket::MediaEngineDependencies mediaDeps; - mediaDeps.task_queue_factory = _taskQueueFactory.get(); - mediaDeps.audio_encoder_factory = webrtc::CreateAudioEncoderFactory(); - mediaDeps.audio_decoder_factory = webrtc::CreateAudioDecoderFactory(); + _audioDeviceDataObserverShared = std::make_shared(); + + _threads->getWorkerThread()->BlockingCall([this]() mutable { + _audioDeviceModule = createAudioDeviceModule(); + if (!_audioDeviceModule) { + return; + } + }); - mediaDeps.video_encoder_factory = PlatformInterface::SharedInstance()->makeVideoEncoderFactory(_platformContext, false, _videoContentType == VideoContentType::Screencast); - mediaDeps.video_decoder_factory = PlatformInterface::SharedInstance()->makeVideoDecoderFactory(_platformContext); + cricket::MediaEngineDependencies mediaDeps; + mediaDeps.task_queue_factory = _taskQueueFactory.get(); + mediaDeps.audio_encoder_factory = webrtc::CreateAudioEncoderFactory(); + mediaDeps.audio_decoder_factory = webrtc::CreateAudioDecoderFactory(); - #if USE_RNNOISE - if (_audioLevelsUpdated && audioProcessor) { - webrtc::AudioProcessingBuilder builder; - builder.SetCapturePostProcessing(std::move(audioProcessor)); + mediaDeps.video_encoder_factory = PlatformInterface::SharedInstance()->makeVideoEncoderFactory(_platformContext, false, _videoContentType == VideoContentType::Screencast); + mediaDeps.video_decoder_factory = PlatformInterface::SharedInstance()->makeVideoDecoderFactory(_platformContext); - builder.SetEchoDetector(rtc::make_ref_counted()); +#if USE_RNNOISE + if (_audioLevelsUpdated && audioProcessor) { + webrtc::AudioProcessingBuilder builder; + builder.SetCapturePostProcessing(std::move(audioProcessor)); - mediaDeps.audio_processing = builder.Create(); - } - #endif + builder.SetEchoDetector(rtc::make_ref_counted()); - _audioDeviceDataObserverShared = std::make_shared(); + mediaDeps.audio_processing = builder.Create(); + } +#endif - _audioDeviceModule = createAudioDeviceModule(); - if (!_audioDeviceModule) { - return; - } - mediaDeps.adm = _audioDeviceModule; + mediaDeps.adm = _audioDeviceModule; - _availableVideoFormats = filterSupportedVideoFormats(mediaDeps.video_encoder_factory->GetSupportedFormats()); + _availableVideoFormats = filterSupportedVideoFormats(mediaDeps.video_encoder_factory->GetSupportedFormats()); - std::unique_ptr mediaEngine = cricket::CreateMediaEngine(std::move(mediaDeps)); + std::unique_ptr mediaEngine = cricket::CreateMediaEngine(std::move(mediaDeps)); - _channelManager = cricket::ChannelManager::Create( - std::move(mediaEngine), - true, - _threads->getWorkerThread(), - _threads->getNetworkThread() - ); - }); + _channelManager = ChannelManager::Create( + std::move(mediaEngine), + _threads->getWorkerThread(), + _threads->getNetworkThread() + ); setAudioInputDevice(_initialInputDeviceId); setAudioOutputDevice(_initialOutputDeviceId); - _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [this]() { + _threads->getWorkerThread()->BlockingCall([this]() { webrtc::Call::Config callConfig(_eventLog.get(), _threads->getNetworkThread()); callConfig.neteq_factory = _netEqFactory.get(); callConfig.task_queue_factory = _taskQueueFactory.get(); - callConfig.trials = &_fieldTrials; + callConfig.trials = &fieldTrialsBasedConfig; callConfig.audio_state = _channelManager->media_engine()->voice().GetAudioState(); - _call.reset(webrtc::Call::Create(callConfig, webrtc::Clock::GetRealTimeClock(), _threads->getSharedModuleThread(), webrtc::ProcessThread::Create("PacerThread"))); + _call.reset(webrtc::Call::Create(callConfig)); }); _uniqueRandomIdGenerator.reset(new rtc::UniqueRandomIdGenerator()); - _threads->getNetworkThread()->Invoke(RTC_FROM_HERE, [this]() { + _threads->getNetworkThread()->BlockingCall([this]() { _rtpTransport = _networkManager->getSyncAssumingSameThread()->getRtpTransport(); _rtpTransport->SignalSentPacket.connect(this, &GroupInstanceCustomInternal::OnSentPacket_w); _rtpTransport->SignalRtcpPacketReceived.connect(this, &GroupInstanceCustomInternal::OnRtcpPacketReceived_n); @@ -1645,10 +1659,13 @@ class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::ena if (!_outgoingVideoChannel) { return; } - _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [this]() { - _outgoingVideoChannel->Enable(false); + _outgoingVideoChannel->Enable(false); + _threads->getNetworkThread()->BlockingCall([&]() { + _outgoingVideoChannel->SetRtpTransport(nullptr); + }); + _threads->getWorkerThread()->BlockingCall([this]() { _outgoingVideoChannel->media_channel()->SetVideoSend(_outgoingVideoSsrcs.simulcastLayers[0].ssrc, nullptr, nullptr); - _channelManager->DestroyVideoChannel(_outgoingVideoChannel); + _channelManager->DestroyChannel(_outgoingVideoChannel); }); _outgoingVideoChannel = nullptr; } @@ -1669,7 +1686,10 @@ class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::ena if (_videoContentType == VideoContentType::Screencast) { videoOptions.is_screencast = true; } - _outgoingVideoChannel = _channelManager->CreateVideoChannel(_call.get(), cricket::MediaConfig(), _rtpTransport, _threads->getWorkerThread(), "1", false, GroupNetworkManager::getDefaulCryptoOptions(), _uniqueRandomIdGenerator.get(), videoOptions, _videoBitrateAllocatorFactory.get()); + _outgoingVideoChannel = _channelManager->CreateVideoChannel(_call.get(), cricket::MediaConfig(), "1", false, GroupNetworkManager::getDefaulCryptoOptions(), videoOptions, _videoBitrateAllocatorFactory.get()); + _threads->getNetworkThread()->BlockingCall([&]() { + _outgoingVideoChannel->SetRtpTransport(_rtpTransport); + }); if (!_outgoingVideoChannel) { RTC_LOG(LS_ERROR) << "Could not create outgoing video channel."; @@ -1729,11 +1749,12 @@ class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::ena incomingVideoDescription->set_codecs({ _selectedPayloadType->videoCodec, _selectedPayloadType->rtxCodec }); incomingVideoDescription->set_bandwidth(1300000); - _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [&]() { - _outgoingVideoChannel->SetRemoteContent(incomingVideoDescription.get(), webrtc::SdpType::kAnswer, nullptr); - _outgoingVideoChannel->SetLocalContent(outgoingVideoDescription.get(), webrtc::SdpType::kOffer, nullptr); - _outgoingVideoChannel->SetPayloadTypeDemuxingEnabled(false); - }); + _threads->getWorkerThread()->BlockingCall([&]() { + std::string errorDesc; + _outgoingVideoChannel->SetRemoteContent(incomingVideoDescription.get(), webrtc::SdpType::kAnswer, errorDesc); + _outgoingVideoChannel->SetLocalContent(outgoingVideoDescription.get(), webrtc::SdpType::kOffer, errorDesc); + _outgoingVideoChannel->SetPayloadTypeDemuxingEnabled(false); + }); adjustVideoSendParams(); updateVideoSend(); @@ -1745,7 +1766,7 @@ class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::ena } if (_videoContentType == VideoContentType::Screencast) { - _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [this]() { + _threads->getWorkerThread()->BlockingCall([this]() { webrtc::RtpParameters rtpParameters = _outgoingVideoChannel->media_channel()->GetRtpSendParameters(_outgoingVideoSsrcs.simulcastLayers[0].ssrc); if (rtpParameters.encodings.size() == 3) { for (int i = 0; i < (int)rtpParameters.encodings.size(); i++) { @@ -1783,7 +1804,7 @@ class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::ena _outgoingVideoChannel->media_channel()->SetRtpSendParameters(_outgoingVideoSsrcs.simulcastLayers[0].ssrc, rtpParameters); }); } else { - _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [this]() { + _threads->getWorkerThread()->BlockingCall([this]() { webrtc::RtpParameters rtpParameters = _outgoingVideoChannel->media_channel()->GetRtpSendParameters(_outgoingVideoSsrcs.simulcastLayers[0].ssrc); if (rtpParameters.encodings.size() == 3) { for (int i = 0; i < (int)rtpParameters.encodings.size(); i++) { @@ -1828,13 +1849,16 @@ class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::ena return; } - webrtc::VideoTrackSourceInterface *videoSource = _getVideoSource ? _getVideoSource() : nullptr; - _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [this, videoSource]() { + rtc::scoped_refptr videoSource = _getVideoSource ? _getVideoSource() : nullptr; + if (_getVideoSource) { + _outgoingVideoChannel->Enable(true); + } else { + _outgoingVideoChannel->Enable(false); + } + _threads->getWorkerThread()->BlockingCall([this, videoSource]() { if (_getVideoSource) { - _outgoingVideoChannel->Enable(true); - _outgoingVideoChannel->media_channel()->SetVideoSend(_outgoingVideoSsrcs.simulcastLayers[0].ssrc, nullptr, videoSource); + _outgoingVideoChannel->media_channel()->SetVideoSend(_outgoingVideoSsrcs.simulcastLayers[0].ssrc, nullptr, videoSource.get()); } else { - _outgoingVideoChannel->Enable(false); _outgoingVideoChannel->media_channel()->SetVideoSend(_outgoingVideoSsrcs.simulcastLayers[0].ssrc, nullptr, nullptr); } }); @@ -1845,10 +1869,13 @@ class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::ena return; } - _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [this]() { + _outgoingAudioChannel->Enable(false); + _threads->getNetworkThread()->BlockingCall([&]() { + _outgoingAudioChannel->SetRtpTransport(nullptr); + }); + _threads->getWorkerThread()->BlockingCall([this]() { _outgoingAudioChannel->media_channel()->SetAudioSend(_outgoingAudioSsrc, false, nullptr, &_audioSource); - _outgoingAudioChannel->Enable(false); - _channelManager->DestroyVoiceChannel(_outgoingAudioChannel); + _channelManager->DestroyChannel(_outgoingAudioChannel); }); _outgoingAudioChannel = nullptr; } @@ -1865,21 +1892,21 @@ class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::ena audioOptions.noise_suppression = false; audioOptions.auto_gain_control = false; audioOptions.highpass_filter = false; - audioOptions.typing_detection = false; - audioOptions.experimental_agc = false; - audioOptions.experimental_ns = false; - audioOptions.residual_echo_detector = false; + //audioOptions.typing_detection = false; + //audioOptions.residual_echo_detector = false; } else { audioOptions.echo_cancellation = true; audioOptions.noise_suppression = true; - audioOptions.experimental_ns = true; - audioOptions.residual_echo_detector = true; + //audioOptions.residual_echo_detector = true; } std::vector streamIds; streamIds.push_back("1"); - _outgoingAudioChannel = _channelManager->CreateVoiceChannel(_call.get(), cricket::MediaConfig(), _rtpTransport, _threads->getWorkerThread(), "0", false, GroupNetworkManager::getDefaulCryptoOptions(), _uniqueRandomIdGenerator.get(), audioOptions); + _outgoingAudioChannel = _channelManager->CreateVoiceChannel(_call.get(), cricket::MediaConfig(), "0", false, GroupNetworkManager::getDefaulCryptoOptions(), audioOptions); + _threads->getNetworkThread()->BlockingCall([&]() { + _outgoingAudioChannel->SetRtpTransport(_rtpTransport); + }); int opusMinBitrateKbps = _outgoingAudioBitrateKbit; int opusMaxBitrateKbps = _outgoingAudioBitrateKbit; @@ -1915,19 +1942,24 @@ class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::ena incomingAudioDescription->set_codecs({ opusCodec }); incomingAudioDescription->set_bandwidth(1300000); - _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [&]() { - _outgoingAudioChannel->SetLocalContent(outgoingAudioDescription.get(), webrtc::SdpType::kOffer, nullptr); - _outgoingAudioChannel->SetRemoteContent(incomingAudioDescription.get(), webrtc::SdpType::kAnswer, nullptr); + _threads->getWorkerThread()->BlockingCall([&]() { + std::string errorDesc; + _outgoingAudioChannel->SetLocalContent(outgoingAudioDescription.get(), webrtc::SdpType::kOffer, errorDesc); + _outgoingAudioChannel->SetRemoteContent(incomingAudioDescription.get(), webrtc::SdpType::kAnswer, errorDesc); _outgoingAudioChannel->SetPayloadTypeDemuxingEnabled(false); - _outgoingAudioChannel->Enable(true); }); + _outgoingAudioChannel->Enable(true); + onUpdatedIsMuted(); adjustBitratePreferences(false); } void stop() { + _networkManager->perform([](GroupNetworkManager *networkManager) { + networkManager->stop(); + }); } void updateSsrcAudioLevel(uint32_t ssrc, uint8_t audioLevel, bool isSpeech) { @@ -1963,7 +1995,7 @@ class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::ena void beginLevelsTimer(int timeoutMs) { const auto weak = std::weak_ptr(shared_from_this()); - _threads->getMediaThread()->PostDelayedTask(RTC_FROM_HERE, [weak]() { + _threads->getMediaThread()->PostDelayedTask([weak]() { auto strong = weak.lock(); if (!strong) { return; @@ -2014,17 +2046,17 @@ class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::ena } bool isSpeech = myAudioLevel.voice && !myAudioLevel.isMuted; - strong->_networkManager->perform(RTC_FROM_HERE, [isSpeech = isSpeech](GroupNetworkManager *networkManager) { + strong->_networkManager->perform([isSpeech = isSpeech](GroupNetworkManager *networkManager) { networkManager->setOutgoingVoiceActivity(isSpeech); }); strong->beginLevelsTimer(100); - }, timeoutMs); + }, webrtc::TimeDelta::Millis(timeoutMs)); } void beginAudioChannelCleanupTimer(int delayMs) { const auto weak = std::weak_ptr(shared_from_this()); - _threads->getMediaThread()->PostDelayedTask(RTC_FROM_HERE, [weak]() { + _threads->getMediaThread()->PostDelayedTask([weak]() { auto strong = weak.lock(); if (!strong) { return; @@ -2048,12 +2080,12 @@ class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::ena } strong->beginAudioChannelCleanupTimer(500); - }, delayMs); + }, webrtc::TimeDelta::Millis(delayMs)); } void beginRemoteConstraintsUpdateTimer(int delayMs) { const auto weak = std::weak_ptr(shared_from_this()); - _threads->getMediaThread()->PostDelayedTask(RTC_FROM_HERE, [weak]() { + _threads->getMediaThread()->PostDelayedTask([weak]() { auto strong = weak.lock(); if (!strong) { return; @@ -2062,12 +2094,12 @@ class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::ena strong->maybeUpdateRemoteVideoConstraints(); strong->beginRemoteConstraintsUpdateTimer(5000); - }, delayMs); + }, webrtc::TimeDelta::Millis(delayMs)); } void beginNetworkStatusTimer(int delayMs) { const auto weak = std::weak_ptr(shared_from_this()); - _threads->getMediaThread()->PostDelayedTask(RTC_FROM_HERE, [weak]() { + _threads->getMediaThread()->PostDelayedTask([weak]() { auto strong = weak.lock(); if (!strong) { return; @@ -2078,7 +2110,7 @@ class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::ena } strong->beginNetworkStatusTimer(500); - }, delayMs); + }, webrtc::TimeDelta::Millis(delayMs)); } void updateBroadcastNetworkStatus() { @@ -2252,7 +2284,7 @@ class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::ena settings.max_bitrate_bps = preferences.max_bitrate_bps; _call->GetTransportControllerSend()->SetSdpBitrateParameters(preferences); - _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [&]() { + _threads->getWorkerThread()->BlockingCall([&]() { _call->SetClientBitratePreferences(settings); }); } @@ -2327,56 +2359,39 @@ class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::ena } } - void receivePacket(rtc::CopyOnWriteBuffer const &packet, bool isUnresolved) { - if (packet.size() >= 4) { - if (packet.data()[0] == 0x13 && packet.data()[1] == 0x88 && packet.data()[2] == 0x13 && packet.data()[3] == 0x88) { - // SCTP packet header (source port 5000, destination port 5000) - return; - } + void receiveUnknownSsrcPacket(uint32_t ssrc, int payloadType) { + if (ssrc == _outgoingAudioSsrc) { + return; } - if (webrtc::IsRtcpPacket(packet)) { - _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [this, packet]() { - _call->Receiver()->DeliverPacket(webrtc::MediaType::ANY, packet, -1); - }); - } else { - uint32_t ssrc = webrtc::ParseRtpSsrc(packet); - int payloadType = webrtc::ParseRtpPayloadType(packet); - - if (ssrc == _outgoingAudioSsrc) { - return; + auto ssrcInfo = _channelBySsrc.find(ssrc); + if (ssrcInfo == _channelBySsrc.end()) { + // opus + if (payloadType == 111) { + maybeRequestUnknownSsrc(ssrc); } + } else { + switch (ssrcInfo->second.type) { + case ChannelSsrcInfo::Type::Audio: { + const auto it = _incomingAudioChannels.find(ChannelId(ssrc)); + if (it != _incomingAudioChannels.end()) { + it->second->updateActivity(); + } - auto ssrcInfo = _channelBySsrc.find(ssrc); - if (ssrcInfo == _channelBySsrc.end()) { - // opus - if (payloadType == 111) { - maybeRequestUnknownSsrc(ssrc); - _missingPacketBuffer.add(ssrc, packet); + break; } - } else { - switch (ssrcInfo->second.type) { - case ChannelSsrcInfo::Type::Audio: { - const auto it = _incomingAudioChannels.find(ChannelId(ssrc)); - if (it != _incomingAudioChannels.end()) { - it->second->updateActivity(); - } - - break; - } - case ChannelSsrcInfo::Type::Video: { - break; - } - default: { - break; - } + case ChannelSsrcInfo::Type::Video: { + break; + } + default: { + break; } } } } void receiveRtcpPacket(rtc::CopyOnWriteBuffer const &packet, int64_t timestamp) { - _threads->getWorkerThread()->PostTask(RTC_FROM_HERE, [this, packet, timestamp]() { + _threads->getWorkerThread()->PostTask([this, packet, timestamp]() { _call->Receiver()->DeliverPacket(webrtc::MediaType::ANY, packet, timestamp); }); } @@ -2407,7 +2422,7 @@ class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::ena _pendingOutgoingVideoConstraintRequestId += 1; const auto weak = std::weak_ptr(shared_from_this()); - _threads->getMediaThread()->PostDelayedTask(RTC_FROM_HERE, [weak, requestId]() { + _threads->getMediaThread()->PostDelayedTask([weak, requestId]() { auto strong = weak.lock(); if (!strong) { return; @@ -2419,7 +2434,7 @@ class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::ena } strong->_pendingOutgoingVideoConstraint = -1; } - }, 2000); + }, webrtc::TimeDelta::Millis(2000)); } else { _pendingOutgoingVideoConstraint = -1; _pendingOutgoingVideoConstraintRequestId += 1; @@ -2507,7 +2522,7 @@ class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::ena const auto weak = std::weak_ptr(shared_from_this()); auto task = _requestMediaChannelDescriptions(requestSsrcs, [weak, threads = _threads, requestId](std::vector &&descriptions) { - threads->getWorkerThread()->PostTask(RTC_FROM_HERE, [weak, requestId, descriptions = std::move(descriptions)]() mutable { + threads->getMediaThread()->PostTask([weak, requestId, descriptions = std::move(descriptions)]() mutable { auto strong = weak.lock(); if (!strong) { return; @@ -2551,7 +2566,7 @@ class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::ena auto it = _ssrcMapping.find(ssrc); if (it != _ssrcMapping.end()) { for (const auto &packet : packets) { - _threads->getNetworkThread()->Invoke(RTC_FROM_HERE, [this, packet]() { + _threads->getNetworkThread()->BlockingCall([this, packet]() { _rtpTransport->DemuxPacketInternal(packet, -1); }); } @@ -2622,7 +2637,7 @@ class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::ena json.insert(std::make_pair("constraints", json11::Json(std::move(constraints)))); std::string result = json11::Json(std::move(json)).dump(); - _networkManager->perform(RTC_FROM_HERE, [result = std::move(result)](GroupNetworkManager *networkManager) { + _networkManager->perform([result = std::move(result)](GroupNetworkManager *networkManager) { networkManager->sendDataChannelMessage(result); }); } @@ -2640,7 +2655,7 @@ class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::ena RTC_CHECK(_connectionMode != previousMode || _connectionMode == GroupConnectionMode::GroupConnectionModeNone); if (previousMode == GroupConnectionMode::GroupConnectionModeRtc) { - _networkManager->perform(RTC_FROM_HERE, [](GroupNetworkManager *networkManager) { + _networkManager->perform([](GroupNetworkManager *networkManager) { networkManager->stop(); }); } else if (previousMode == GroupConnectionMode::GroupConnectionModeBroadcast) { @@ -2672,7 +2687,7 @@ class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::ena break; } case GroupConnectionMode::GroupConnectionModeRtc: { - _networkManager->perform(RTC_FROM_HERE, [](GroupNetworkManager *networkManager) { + _networkManager->perform([](GroupNetworkManager *networkManager) { networkManager->start(); }); break; @@ -2780,7 +2795,7 @@ class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::ena } void emitJoinPayload(std::function completion) { - _networkManager->perform(RTC_FROM_HERE, [outgoingAudioSsrc = _outgoingAudioSsrc, /*videoPayloadTypes = _videoPayloadTypes, videoExtensionMap = _videoExtensionMap, */videoSourceGroups = _videoSourceGroups, videoContentType = _videoContentType, completion](GroupNetworkManager *networkManager) { + _networkManager->perform([outgoingAudioSsrc = _outgoingAudioSsrc, /*videoPayloadTypes = _videoPayloadTypes, videoExtensionMap = _videoExtensionMap, */videoSourceGroups = _videoSourceGroups, videoContentType = _videoContentType, completion](GroupNetworkManager *networkManager) { GroupJoinInternalPayload payload; payload.audioSsrc = outgoingAudioSsrc; @@ -2815,7 +2830,7 @@ class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::ena }); } - void setVideoSource(std::function getVideoSource, bool isInitializing) { + void setVideoSource(std::function()> getVideoSource, bool isInitializing) { bool resetBitrate = (!_getVideoSource) != (!getVideoSource) && !isInitializing; if (!isInitializing && _getVideoSource && getVideoSource && getVideoSource() == _getVideoSource()) { return; @@ -2835,16 +2850,16 @@ class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::ena void setAudioOutputDevice(const std::string &id) { #if not defined(WEBRTC_IOS) && not defined(WEBRTC_ANDROID) - _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [&] { - SetAudioOutputDeviceById(_audioDeviceModule.get(), id); + _threads->getWorkerThread()->BlockingCall([&] { + SetAudioOutputDeviceById(_audioDeviceModule.get(), id); }); #endif // WEBRTC_IOS } void setAudioInputDevice(const std::string &id) { #if not defined(WEBRTC_IOS) && not defined(WEBRTC_ANDROID) - _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [&] { - SetAudioInputDeviceById(_audioDeviceModule.get(), id); + _threads->getWorkerThread()->BlockingCall([&] { + SetAudioInputDeviceById(_audioDeviceModule.get(), id); }); #endif // WEBRTC_IOS } @@ -2883,7 +2898,7 @@ class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::ena setServerBandwidthProbingChannelSsrc(parsedPayload->videoInformation->serverVideoBandwidthProbingSsrc); } - _networkManager->perform(RTC_FROM_HERE, [parsedTransport = parsedPayload->transport](GroupNetworkManager *networkManager) { + _networkManager->perform([parsedTransport = parsedPayload->transport](GroupNetworkManager *networkManager) { PeerIceParameters remoteIceParameters; remoteIceParameters.ufrag = parsedTransport.ufrag; remoteIceParameters.pwd = parsedTransport.pwd; @@ -2983,10 +2998,11 @@ class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::ena void onUpdatedIsMuted() { if (_outgoingAudioChannel) { - _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [this]() { + _threads->getWorkerThread()->BlockingCall([this]() { _outgoingAudioChannel->media_channel()->SetAudioSend(_outgoingAudioSsrc, !_isMuted, nullptr, &_audioSource); - _outgoingAudioChannel->Enable(!_isMuted); }); + + _outgoingAudioChannel->Enable(!_isMuted); } } @@ -3060,7 +3076,7 @@ class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::ena if (ssrc.actualSsrc != ssrc.networkSsrc) { if (_audioLevelsUpdated) { onAudioSinkUpdate = [weak, ssrc = ssrc, threads = _threads](AudioSinkImpl::Update update) { - threads->getMediaThread()->PostTask(RTC_FROM_HERE, [weak, ssrc, update]() { + threads->getMediaThread()->PostTask([weak, ssrc, update]() { auto strong = weak.lock(); if (!strong) { return; @@ -3367,7 +3383,7 @@ class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::ena std::function(std::shared_ptr, int64_t, int64_t, int32_t, VideoChannelDescription::Quality, std::function)> _requestVideoBroadcastPart; std::shared_ptr _videoCapture; std::shared_ptr _videoCaptureSink; - std::function _getVideoSource; + std::function()> _getVideoSource; bool _disableIncomingChannels = false; bool _useDummyChannel{true}; int _outgoingAudioBitrateKbit{32}; @@ -3387,9 +3403,7 @@ class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::ena std::unique_ptr _eventLog; std::unique_ptr _taskQueueFactory; std::unique_ptr _netEqFactory; - std::unique_ptr _mediaEngine; std::unique_ptr _call; - webrtc::FieldTrialBasedConfig _fieldTrials; webrtc::LocalAudioSinkAdapter _audioSource; std::shared_ptr _audioDeviceDataObserverShared; rtc::scoped_refptr _audioDeviceModule; @@ -3411,7 +3425,7 @@ class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::ena std::unique_ptr _uniqueRandomIdGenerator; webrtc::RtpTransport *_rtpTransport = nullptr; - std::unique_ptr _channelManager; + std::unique_ptr _channelManager; std::unique_ptr _videoBitrateAllocatorFactory; // _outgoingVideoChannel memory is managed by _channelManager @@ -3474,7 +3488,7 @@ GroupInstanceCustomImpl::GroupInstanceCustomImpl(GroupInstanceDescriptor &&descr _internal.reset(new ThreadLocalObject(_threads->getMediaThread(), [descriptor = std::move(descriptor), threads = _threads]() mutable { return new GroupInstanceCustomInternal(std::move(descriptor), threads); })); - _internal->perform(RTC_FROM_HERE, [](GroupInstanceCustomInternal *internal) { + _internal->perform([](GroupInstanceCustomInternal *internal) { internal->start(); }); } @@ -3486,113 +3500,113 @@ GroupInstanceCustomImpl::~GroupInstanceCustomImpl() { _internal.reset(); // Wait until _internal is destroyed - _threads->getMediaThread()->Invoke(RTC_FROM_HERE, [] {}); + _threads->getMediaThread()->BlockingCall([] {}); } void GroupInstanceCustomImpl::stop() { - _internal->perform(RTC_FROM_HERE, [](GroupInstanceCustomInternal *internal) { + _internal->perform([](GroupInstanceCustomInternal *internal) { internal->stop(); }); } void GroupInstanceCustomImpl::setConnectionMode(GroupConnectionMode connectionMode, bool keepBroadcastIfWasEnabled, bool isUnifiedBroadcast) { - _internal->perform(RTC_FROM_HERE, [connectionMode, keepBroadcastIfWasEnabled, isUnifiedBroadcast](GroupInstanceCustomInternal *internal) { + _internal->perform([connectionMode, keepBroadcastIfWasEnabled, isUnifiedBroadcast](GroupInstanceCustomInternal *internal) { internal->setConnectionMode(connectionMode, keepBroadcastIfWasEnabled, isUnifiedBroadcast); }); } void GroupInstanceCustomImpl::emitJoinPayload(std::function completion) { - _internal->perform(RTC_FROM_HERE, [completion](GroupInstanceCustomInternal *internal) { + _internal->perform([completion](GroupInstanceCustomInternal *internal) { internal->emitJoinPayload(completion); }); } void GroupInstanceCustomImpl::setJoinResponsePayload(std::string const &payload) { - _internal->perform(RTC_FROM_HERE, [payload](GroupInstanceCustomInternal *internal) { + _internal->perform([payload](GroupInstanceCustomInternal *internal) { internal->setJoinResponsePayload(payload); }); } void GroupInstanceCustomImpl::removeSsrcs(std::vector ssrcs) { - _internal->perform(RTC_FROM_HERE, [ssrcs = std::move(ssrcs)](GroupInstanceCustomInternal *internal) mutable { + _internal->perform([ssrcs = std::move(ssrcs)](GroupInstanceCustomInternal *internal) mutable { internal->removeSsrcs(ssrcs); }); } void GroupInstanceCustomImpl::removeIncomingVideoSource(uint32_t ssrc) { - _internal->perform(RTC_FROM_HERE, [ssrc](GroupInstanceCustomInternal *internal) mutable { + _internal->perform([ssrc](GroupInstanceCustomInternal *internal) mutable { internal->removeIncomingVideoSource(ssrc); }); } void GroupInstanceCustomImpl::setIsMuted(bool isMuted) { - _internal->perform(RTC_FROM_HERE, [isMuted](GroupInstanceCustomInternal *internal) { + _internal->perform([isMuted](GroupInstanceCustomInternal *internal) { internal->setIsMuted(isMuted); }); } void GroupInstanceCustomImpl::setIsNoiseSuppressionEnabled(bool isNoiseSuppressionEnabled) { - _internal->perform(RTC_FROM_HERE, [isNoiseSuppressionEnabled](GroupInstanceCustomInternal *internal) { + _internal->perform([isNoiseSuppressionEnabled](GroupInstanceCustomInternal *internal) { internal->setIsNoiseSuppressionEnabled(isNoiseSuppressionEnabled); }); } void GroupInstanceCustomImpl::setVideoCapture(std::shared_ptr videoCapture) { - _internal->perform(RTC_FROM_HERE, [videoCapture](GroupInstanceCustomInternal *internal) { + _internal->perform([videoCapture](GroupInstanceCustomInternal *internal) { internal->setVideoCapture(videoCapture, false); }); } -void GroupInstanceCustomImpl::setVideoSource(std::function getVideoSource) { - _internal->perform(RTC_FROM_HERE, [getVideoSource](GroupInstanceCustomInternal *internal) { +void GroupInstanceCustomImpl::setVideoSource(std::function()> getVideoSource) { + _internal->perform([getVideoSource](GroupInstanceCustomInternal *internal) { internal->setVideoSource(getVideoSource, false); }); } void GroupInstanceCustomImpl::setAudioOutputDevice(std::string id) { - _internal->perform(RTC_FROM_HERE, [id](GroupInstanceCustomInternal *internal) { + _internal->perform([id](GroupInstanceCustomInternal *internal) { internal->setAudioOutputDevice(id); }); } void GroupInstanceCustomImpl::setAudioInputDevice(std::string id) { - _internal->perform(RTC_FROM_HERE, [id](GroupInstanceCustomInternal *internal) { + _internal->perform([id](GroupInstanceCustomInternal *internal) { internal->setAudioInputDevice(id); }); } void GroupInstanceCustomImpl::addExternalAudioSamples(std::vector &&samples) { - _internal->perform(RTC_FROM_HERE, [samples = std::move(samples)](GroupInstanceCustomInternal *internal) mutable { + _internal->perform([samples = std::move(samples)](GroupInstanceCustomInternal *internal) mutable { internal->addExternalAudioSamples(std::move(samples)); }); } void GroupInstanceCustomImpl::addOutgoingVideoOutput(std::weak_ptr> sink) { - _internal->perform(RTC_FROM_HERE, [sink](GroupInstanceCustomInternal *internal) mutable { + _internal->perform([sink](GroupInstanceCustomInternal *internal) mutable { internal->addOutgoingVideoOutput(sink); }); } void GroupInstanceCustomImpl::addIncomingVideoOutput(std::string const &endpointId, std::weak_ptr> sink) { - _internal->perform(RTC_FROM_HERE, [endpointId, sink](GroupInstanceCustomInternal *internal) mutable { + _internal->perform([endpointId, sink](GroupInstanceCustomInternal *internal) mutable { internal->addIncomingVideoOutput(endpointId, sink); }); } void GroupInstanceCustomImpl::setVolume(uint32_t ssrc, double volume) { - _internal->perform(RTC_FROM_HERE, [ssrc, volume](GroupInstanceCustomInternal *internal) { + _internal->perform([ssrc, volume](GroupInstanceCustomInternal *internal) { internal->setVolume(ssrc, volume); }); } void GroupInstanceCustomImpl::setRequestedVideoChannels(std::vector &&requestedVideoChannels) { - _internal->perform(RTC_FROM_HERE, [requestedVideoChannels = std::move(requestedVideoChannels)](GroupInstanceCustomInternal *internal) mutable { + _internal->perform([requestedVideoChannels = std::move(requestedVideoChannels)](GroupInstanceCustomInternal *internal) mutable { internal->setRequestedVideoChannels(std::move(requestedVideoChannels)); }); } void GroupInstanceCustomImpl::getStats(std::function completion) { - _internal->perform(RTC_FROM_HERE, [completion = std::move(completion)](GroupInstanceCustomInternal *internal) mutable { + _internal->perform([completion = std::move(completion)](GroupInstanceCustomInternal *internal) mutable { internal->getStats(completion); }); } diff --git a/TMessagesProj/jni/voip/tgcalls/group/GroupInstanceCustomImpl.h b/TMessagesProj/jni/voip/tgcalls/group/GroupInstanceCustomImpl.h index 299a4f7d9a..2ba3a02e6f 100644 --- a/TMessagesProj/jni/voip/tgcalls/group/GroupInstanceCustomImpl.h +++ b/TMessagesProj/jni/voip/tgcalls/group/GroupInstanceCustomImpl.h @@ -34,7 +34,7 @@ class GroupInstanceCustomImpl final : public GroupInstanceInterface { void setIsMuted(bool isMuted); void setIsNoiseSuppressionEnabled(bool isNoiseSuppressionEnabled); void setVideoCapture(std::shared_ptr videoCapture); - void setVideoSource(std::function getVideoSource); + void setVideoSource(std::function()> getVideoSource); void setAudioOutputDevice(std::string id); void setAudioInputDevice(std::string id); void addExternalAudioSamples(std::vector &&samples); diff --git a/TMessagesProj/jni/voip/tgcalls/group/GroupInstanceImpl.h b/TMessagesProj/jni/voip/tgcalls/group/GroupInstanceImpl.h index 83dd540f21..60c95415ce 100644 --- a/TMessagesProj/jni/voip/tgcalls/group/GroupInstanceImpl.h +++ b/TMessagesProj/jni/voip/tgcalls/group/GroupInstanceImpl.h @@ -152,7 +152,7 @@ struct GroupInstanceDescriptor { bool disableIncomingChannels{false}; std::function(webrtc::TaskQueueFactory*)> createAudioDeviceModule; std::shared_ptr videoCapture; // deprecated - std::function getVideoSource; + std::function()> getVideoSource; std::function(std::function)> requestCurrentTime; std::function(std::shared_ptr, int64_t, int64_t, std::function)> requestAudioBroadcastPart; std::function(std::shared_ptr, int64_t, int64_t, int32_t, VideoChannelDescription::Quality, std::function)> requestVideoBroadcastPart; @@ -190,7 +190,7 @@ class GroupInstanceInterface { virtual void setIsMuted(bool isMuted) = 0; virtual void setIsNoiseSuppressionEnabled(bool isNoiseSuppressionEnabled) = 0; virtual void setVideoCapture(std::shared_ptr videoCapture) = 0; - virtual void setVideoSource(std::function getVideoSource) = 0; + virtual void setVideoSource(std::function()> getVideoSource) = 0; virtual void setAudioOutputDevice(std::string id) = 0; virtual void setAudioInputDevice(std::string id) = 0; virtual void addExternalAudioSamples(std::vector &&samples) = 0; diff --git a/TMessagesProj/jni/voip/tgcalls/group/GroupNetworkManager.cpp b/TMessagesProj/jni/voip/tgcalls/group/GroupNetworkManager.cpp index ecdf6c2013..c96da3bbaf 100644 --- a/TMessagesProj/jni/voip/tgcalls/group/GroupNetworkManager.cpp +++ b/TMessagesProj/jni/voip/tgcalls/group/GroupNetworkManager.cpp @@ -5,7 +5,6 @@ #include "p2p/base/p2p_transport_channel.h" #include "p2p/base/basic_async_resolver_factory.h" #include "api/packet_socket_factory.h" -#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/rtc_certificate_generator.h" #include "p2p/base/ice_credentials_iterator.h" #include "api/jsep_ice_candidate.h" @@ -20,6 +19,9 @@ #include "TurnCustomizerImpl.h" #include "SctpDataChannelProviderInterfaceImpl.h" #include "StaticThreads.h" +#include "call/rtp_packet_sink_interface.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "modules/rtp_rtcp/source/rtp_header_extensions.h" namespace tgcalls { @@ -81,6 +83,7 @@ static void updateHeaderWithVoiceActivity(rtc::CopyOnWriteBuffer *packet, const } } +#if 0 // Currently unused. static void readHeaderVoiceActivity(const uint8_t* ptrRTPDataExtensionEnd, const uint8_t* ptr, bool &didRead, uint8_t &audioLevel, bool &voiceActivity) { while (ptrRTPDataExtensionEnd - ptr > 0) { // 0 @@ -124,6 +127,7 @@ static void readHeaderVoiceActivity(const uint8_t* ptrRTPDataExtensionEnd, const ptr += (len + 1); } } +#endif static void maybeUpdateRtpVoiceActivity(rtc::CopyOnWriteBuffer *packet, bool voiceActivity) { @@ -201,6 +205,7 @@ static void maybeUpdateRtpVoiceActivity(rtc::CopyOnWriteBuffer *packet, bool voi } } +#if 0 // Currently unused. static void maybeReadRtpVoiceActivity(rtc::CopyOnWriteBuffer *packet, bool &didRead, uint32_t &ssrc, uint8_t &audioLevel, bool &voiceActivity) { const uint8_t *_ptrRTPDataBegin = packet->data(); const uint8_t *_ptrRTPDataEnd = packet->data() + packet->size(); @@ -276,15 +281,16 @@ static void maybeReadRtpVoiceActivity(rtc::CopyOnWriteBuffer *packet, bool &didR } } } +#endif class WrappedDtlsSrtpTransport : public webrtc::DtlsSrtpTransport { public: bool _voiceActivity = false; public: - WrappedDtlsSrtpTransport(bool rtcp_mux_enabled) : - webrtc::DtlsSrtpTransport(rtcp_mux_enabled) { - + WrappedDtlsSrtpTransport(bool rtcp_mux_enabled, const webrtc::WebRtcKeyValueConfig& fieldTrials, std::function &&processRtpPacket) : + webrtc::DtlsSrtpTransport(rtcp_mux_enabled, fieldTrials), + _processRtpPacket(std::move(processRtpPacket)) { } virtual ~WrappedDtlsSrtpTransport() { @@ -294,6 +300,13 @@ class WrappedDtlsSrtpTransport : public webrtc::DtlsSrtpTransport { maybeUpdateRtpVoiceActivity(packet, _voiceActivity); return webrtc::DtlsSrtpTransport::SendRtpPacket(packet, options, flags); } + + void ProcessRtpPacket(webrtc::RtpPacketReceived const &packet, bool isUnresolved) override { + _processRtpPacket(packet, isUnresolved); + } + +private: + std::function _processRtpPacket; }; webrtc::CryptoOptions GroupNetworkManager::getDefaulCryptoOptions() { @@ -304,15 +317,16 @@ webrtc::CryptoOptions GroupNetworkManager::getDefaulCryptoOptions() { } GroupNetworkManager::GroupNetworkManager( + const webrtc::WebRtcKeyValueConfig& fieldTrials, std::function stateUpdated, - std::function transportMessageReceived, + std::function unknownSsrcPacketReceived, std::function dataChannelStateUpdated, std::function dataChannelMessageReceived, std::function audioActivityUpdated, std::shared_ptr threads) : _threads(std::move(threads)), _stateUpdated(std::move(stateUpdated)), -_transportMessageReceived(std::move(transportMessageReceived)), +_unknownSsrcPacketReceived(std::move(unknownSsrcPacketReceived)), _dataChannelStateUpdated(dataChannelStateUpdated), _dataChannelMessageReceived(dataChannelMessageReceived), _audioActivityUpdated(audioActivityUpdated) { @@ -325,14 +339,16 @@ _audioActivityUpdated(audioActivityUpdated) { _networkMonitorFactory = PlatformInterface::SharedInstance()->createNetworkMonitorFactory(); _socketFactory.reset(new rtc::BasicPacketSocketFactory(_threads->getNetworkThread()->socketserver())); - _networkManager = std::make_unique(_networkMonitorFactory.get()); + _networkManager = std::make_unique(_networkMonitorFactory.get(), _threads->getNetworkThread()->socketserver()); _asyncResolverFactory = std::make_unique(); - _dtlsSrtpTransport = std::make_unique(true); + _dtlsSrtpTransport = std::make_unique(true, fieldTrials, [this](webrtc::RtpPacketReceived const &packet, bool isUnresolved) { + this->RtpPacketReceived_n(packet, isUnresolved); + }); _dtlsSrtpTransport->SetDtlsTransports(nullptr, nullptr); _dtlsSrtpTransport->SetActiveResetSrtpParams(false); _dtlsSrtpTransport->SignalReadyToSend.connect(this, &GroupNetworkManager::DtlsReadyToSend); - _dtlsSrtpTransport->SignalRtpPacketReceived.connect(this, &GroupNetworkManager::RtpPacketReceived_n); + //_dtlsSrtpTransport->SignalRtpPacketReceived.connect(this, &GroupNetworkManager::RtpPacketReceived_n); resetDtlsSrtpTransport(); } @@ -342,9 +358,9 @@ GroupNetworkManager::~GroupNetworkManager() { RTC_LOG(LS_INFO) << "GroupNetworkManager::~GroupNetworkManager()"; + _dataChannelInterface.reset(); _dtlsSrtpTransport.reset(); _dtlsTransport.reset(); - _dataChannelInterface.reset(); _transportChannel.reset(); _asyncResolverFactory.reset(); _portAllocator.reset(); @@ -353,19 +369,23 @@ GroupNetworkManager::~GroupNetworkManager() { } void GroupNetworkManager::resetDtlsSrtpTransport() { - _portAllocator.reset(new cricket::BasicPortAllocator(_networkManager.get(), _socketFactory.get(), _turnCustomizer.get(), nullptr)); - _portAllocator->set_flags(_portAllocator->flags()); - _portAllocator->Initialize(); + std::unique_ptr portAllocator = std::make_unique(_networkManager.get(), _socketFactory.get(), _turnCustomizer.get(), nullptr); + portAllocator->set_flags(portAllocator->flags()); + portAllocator->Initialize(); + + portAllocator->SetConfiguration({}, {}, 2, webrtc::NO_PRUNE, _turnCustomizer.get()); - _portAllocator->SetConfiguration({}, {}, 2, webrtc::NO_PRUNE, _turnCustomizer.get()); + webrtc::IceTransportInit iceTransportInit; + iceTransportInit.set_port_allocator(portAllocator.get()); + iceTransportInit.set_async_resolver_factory(_asyncResolverFactory.get()); - _transportChannel.reset(new cricket::P2PTransportChannel("transport", 0, _portAllocator.get(), _asyncResolverFactory.get(), nullptr)); + auto transportChannel = cricket::P2PTransportChannel::Create("transport", 0, std::move(iceTransportInit)); cricket::IceConfig iceConfig; iceConfig.continual_gathering_policy = cricket::GATHER_CONTINUALLY; iceConfig.prioritize_most_likely_candidate_pairs = true; iceConfig.regather_on_failed_networks_interval = 2000; - _transportChannel->SetIceConfig(iceConfig); + transportChannel->SetIceConfig(iceConfig); cricket::IceParameters localIceParameters( _localIceParameters.ufrag, @@ -373,26 +393,31 @@ void GroupNetworkManager::resetDtlsSrtpTransport() { false ); - _transportChannel->SetIceParameters(localIceParameters); + transportChannel->SetIceParameters(localIceParameters); const bool isOutgoing = false; - _transportChannel->SetIceRole(isOutgoing ? cricket::ICEROLE_CONTROLLING : cricket::ICEROLE_CONTROLLED); - _transportChannel->SetRemoteIceMode(cricket::ICEMODE_LITE); + transportChannel->SetIceRole(isOutgoing ? cricket::ICEROLE_CONTROLLING : cricket::ICEROLE_CONTROLLED); + transportChannel->SetRemoteIceMode(cricket::ICEMODE_LITE); - _transportChannel->SignalIceTransportStateChanged.connect(this, &GroupNetworkManager::transportStateChanged); - _transportChannel->SignalReadPacket.connect(this, &GroupNetworkManager::transportPacketReceived); + transportChannel->SignalIceTransportStateChanged.connect(this, &GroupNetworkManager::transportStateChanged); + transportChannel->SignalReadPacket.connect(this, &GroupNetworkManager::transportPacketReceived); webrtc::CryptoOptions cryptoOptions = GroupNetworkManager::getDefaulCryptoOptions(); - _dtlsTransport.reset(new cricket::DtlsTransport(_transportChannel.get(), cryptoOptions, nullptr)); - _dtlsTransport->SignalWritableState.connect( + auto dtlsTransport = std::make_unique(transportChannel.get(), cryptoOptions, nullptr); + + dtlsTransport->SignalWritableState.connect( this, &GroupNetworkManager::OnTransportWritableState_n); - _dtlsTransport->SignalReceivingState.connect( + dtlsTransport->SignalReceivingState.connect( this, &GroupNetworkManager::OnTransportReceivingState_n); - _dtlsTransport->SetDtlsRole(rtc::SSLRole::SSL_SERVER); - _dtlsTransport->SetLocalCertificate(_localCertificate); + dtlsTransport->SetDtlsRole(rtc::SSLRole::SSL_SERVER); + dtlsTransport->SetLocalCertificate(_localCertificate); + + _dtlsSrtpTransport->SetDtlsTransports(dtlsTransport.get(), nullptr); - _dtlsSrtpTransport->SetDtlsTransports(_dtlsTransport.get(), nullptr); + _dtlsTransport = std::move(dtlsTransport); + _transportChannel = std::move(transportChannel); + _portAllocator = std::move(portAllocator); } void GroupNetworkManager::start() { @@ -445,12 +470,7 @@ void GroupNetworkManager::stop() { _dtlsTransport->SignalWritableState.disconnect(this); _dtlsTransport->SignalReceivingState.disconnect(this); - _dtlsSrtpTransport->SetDtlsTransports(nullptr, nullptr); - _dataChannelInterface.reset(); - _dtlsTransport.reset(); - _transportChannel.reset(); - _portAllocator.reset(); _localIceParameters = PeerIceParameters(rtc::CreateRandomString(cricket::ICE_UFRAG_LENGTH), rtc::CreateRandomString(cricket::ICE_PWD_LENGTH), false); @@ -509,7 +529,7 @@ webrtc::RtpTransport *GroupNetworkManager::getRtpTransport() { void GroupNetworkManager::checkConnectionTimeout() { const auto weak = std::weak_ptr(shared_from_this()); - _threads->getNetworkThread()->PostDelayedTask(RTC_FROM_HERE, [weak]() { + _threads->getNetworkThread()->PostDelayedTask([weak]() { auto strong = weak.lock(); if (!strong) { return; @@ -526,7 +546,7 @@ void GroupNetworkManager::checkConnectionTimeout() { } strong->checkConnectionTimeout(); - }, 1000); + }, webrtc::TimeDelta::Millis(1000)); } void GroupNetworkManager::candidateGathered(cricket::IceTransportInternal *transport, const cricket::Candidate &candidate) { @@ -553,7 +573,7 @@ void GroupNetworkManager::DtlsReadyToSend(bool isReadyToSend) { if (isReadyToSend) { const auto weak = std::weak_ptr(shared_from_this()); - _threads->getNetworkThread()->PostTask(RTC_FROM_HERE, [weak]() { + _threads->getNetworkThread()->PostTask([weak]() { const auto strong = weak.lock(); if (!strong) { return; @@ -577,20 +597,23 @@ void GroupNetworkManager::transportPacketReceived(rtc::PacketTransportInternal * _lastNetworkActivityMs = rtc::TimeMillis(); } -void GroupNetworkManager::RtpPacketReceived_n(rtc::CopyOnWriteBuffer *packet, int64_t packet_time_us, bool isUnresolved) { - bool didRead = false; - uint32_t ssrc = 0; - uint8_t audioLevel = 0; - bool isSpeech = false; - maybeReadRtpVoiceActivity(packet, didRead, ssrc, audioLevel, isSpeech); - if (didRead && ssrc != 0) { - if (_audioActivityUpdated) { - _audioActivityUpdated(ssrc, audioLevel, isSpeech); +void GroupNetworkManager::RtpPacketReceived_n(webrtc::RtpPacketReceived const &packet, bool isUnresolved) { + if (packet.HasExtension(webrtc::kRtpExtensionAudioLevel)) { + uint8_t audioLevel = 0; + bool isSpeech = false; + + if (packet.GetExtension(&isSpeech, &audioLevel)) { + if (_audioActivityUpdated) { + _audioActivityUpdated(packet.Ssrc(), audioLevel, isSpeech); + } } } - if (_transportMessageReceived) { - _transportMessageReceived(*packet, isUnresolved); + if (isUnresolved && _unknownSsrcPacketReceived) { + uint32_t ssrc = packet.Ssrc(); + int payloadType = packet.PayloadType(); + + _unknownSsrcPacketReceived(ssrc, payloadType); } } diff --git a/TMessagesProj/jni/voip/tgcalls/group/GroupNetworkManager.h b/TMessagesProj/jni/voip/tgcalls/group/GroupNetworkManager.h index 059ca4785e..5f68d5f70f 100644 --- a/TMessagesProj/jni/voip/tgcalls/group/GroupNetworkManager.h +++ b/TMessagesProj/jni/voip/tgcalls/group/GroupNetworkManager.h @@ -14,6 +14,7 @@ #include "pc/sctp_transport.h" #include "rtc_base/ssl_fingerprint.h" #include "pc/sctp_data_channel.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" #include #include @@ -58,8 +59,9 @@ class GroupNetworkManager : public sigslot::has_slots<>, public std::enable_shar static webrtc::CryptoOptions getDefaulCryptoOptions(); GroupNetworkManager( + const webrtc::WebRtcKeyValueConfig& fieldTrials, std::function stateUpdated, - std::function transportMessageReceived, + std::function unknownSsrcPacketReceived, std::function dataChannelStateUpdated, std::function dataChannelMessageReceived, std::function audioActivityUpdated, @@ -92,7 +94,7 @@ class GroupNetworkManager : public sigslot::has_slots<>, public std::enable_shar void transportPacketReceived(rtc::PacketTransportInternal *transport, const char *bytes, size_t size, const int64_t ×tamp, int unused); void DtlsReadyToSend(bool DtlsReadyToSend); void UpdateAggregateStates_n(); - void RtpPacketReceived_n(rtc::CopyOnWriteBuffer *packet, int64_t packet_time_us, bool isUnresolved); + void RtpPacketReceived_n(webrtc::RtpPacketReceived const &packet, bool isUnresolved); void OnRtcpPacketReceived_n(rtc::CopyOnWriteBuffer *packet, int64_t packet_time_us); void sctpReadyToSendData(); @@ -100,7 +102,7 @@ class GroupNetworkManager : public sigslot::has_slots<>, public std::enable_shar std::shared_ptr _threads; std::function _stateUpdated; - std::function _transportMessageReceived; + std::function _unknownSsrcPacketReceived; std::function _dataChannelStateUpdated; std::function _dataChannelMessageReceived; std::function _audioActivityUpdated; diff --git a/TMessagesProj/jni/voip/tgcalls/group/StreamingMediaContext.cpp b/TMessagesProj/jni/voip/tgcalls/group/StreamingMediaContext.cpp index 9b566735c0..6d8f74361c 100644 --- a/TMessagesProj/jni/voip/tgcalls/group/StreamingMediaContext.cpp +++ b/TMessagesProj/jni/voip/tgcalls/group/StreamingMediaContext.cpp @@ -258,7 +258,7 @@ class StreamingMediaContextPrivate : public std::enable_shared_from_this(shared_from_this()); - _threads->getMediaThread()->PostDelayedTask(RTC_FROM_HERE, [weak]() { + _threads->getMediaThread()->PostDelayedTask([weak]() { auto strong = weak.lock(); if (!strong) { return; @@ -267,7 +267,7 @@ class StreamingMediaContextPrivate : public std::enable_shared_from_thisrender(); strong->beginRenderTimer((int)(1.0 * 1000.0 / 120.0)); - }, timeoutMs); + }, webrtc::TimeDelta::Millis(timeoutMs)); } void render() { @@ -596,7 +596,7 @@ class StreamingMediaContextPrivate : public std::enable_shared_from_this(shared_from_this()); _pendingRequestTimeTask = _requestCurrentTime([weak, threads = _threads](int64_t timestamp) { - threads->getMediaThread()->PostTask(RTC_FROM_HERE, [weak, timestamp]() { + threads->getMediaThread()->PostTask([weak, timestamp]() { auto strong = weak.lock(); if (!strong) { return; @@ -614,7 +614,7 @@ class StreamingMediaContextPrivate : public std::enable_shared_from_this_pendingRequestTimeDelayTaskId = taskId; strong->_nextPendingRequestTimeDelayTaskId++; - strong->_threads->getMediaThread()->PostDelayedTask(RTC_FROM_HERE, [weak, taskId]() { + strong->_threads->getMediaThread()->PostDelayedTask([weak, taskId]() { auto strong = weak.lock(); if (!strong) { return; @@ -626,7 +626,7 @@ class StreamingMediaContextPrivate : public std::enable_shared_from_this_pendingRequestTimeDelayTaskId = 0; strong->requestSegmentsIfNeeded(); - }, 1000); + }, webrtc::TimeDelta::Millis(1000)); } else { strong->_nextSegmentTimestamp = adjustedTimestamp; strong->requestSegmentsIfNeeded(); @@ -794,7 +794,7 @@ class StreamingMediaContextPrivate : public std::enable_shared_from_this(part); std::function handleResult = [weak, weakSegment, weakPart, threads = _threads, segmentTimestamp](BroadcastPart &&part) { - threads->getMediaThread()->PostTask(RTC_FROM_HERE, [weak, weakSegment, weakPart, part = std::move(part), segmentTimestamp]() mutable { + threads->getMediaThread()->PostTask([weak, weakSegment, weakPart, part = std::move(part), segmentTimestamp]() mutable { auto strong = weak.lock(); if (!strong) { return; @@ -909,13 +909,13 @@ class StreamingMediaContextPrivate : public std::enable_shared_from_this(shared_from_this()); - _threads->getMediaThread()->PostDelayedTask(RTC_FROM_HERE, [weak]() { + _threads->getMediaThread()->PostDelayedTask([weak]() { auto strong = weak.lock(); if (!strong) { return; } strong->checkPendingSegments(); - }, std::max((int32_t)minDelayedRequestTimeout, 10)); + }, webrtc::TimeDelta::Millis(std::max((int32_t)minDelayedRequestTimeout, 10))); } if (shouldRequestMoreSegments) { @@ -928,7 +928,7 @@ class StreamingMediaContextPrivate : public std::enable_shared_from_this(part); std::function handleResult = [weak, weakPart, threads = _threads, completion](BroadcastPart &&part) { - threads->getMediaThread()->PostTask(RTC_FROM_HERE, [weak, weakPart, part = std::move(part), completion]() mutable { + threads->getMediaThread()->PostTask([weak, weakPart, part = std::move(part), completion]() mutable { auto strong = weak.lock(); if (!strong) { return; diff --git a/TMessagesProj/jni/voip/tgcalls/platform/PlatformInterface.h b/TMessagesProj/jni/voip/tgcalls/platform/PlatformInterface.h index 234a91a4bb..2f6e11ea27 100644 --- a/TMessagesProj/jni/voip/tgcalls/platform/PlatformInterface.h +++ b/TMessagesProj/jni/voip/tgcalls/platform/PlatformInterface.h @@ -311,7 +311,7 @@ class PlatformInterface { virtual void adaptVideoSource(rtc::scoped_refptr videoSource, int width, int height, int fps) = 0; virtual std::unique_ptr makeVideoCapturer(rtc::scoped_refptr source, std::string deviceId, std::function stateUpdated, std::function captureInfoUpdated, std::shared_ptr platformContext, std::pair &outResolution) = 0; virtual rtc::scoped_refptr wrapAudioDeviceModule(rtc::scoped_refptr module) { - return new rtc::RefCountedObject(module); + return rtc::make_ref_counted(module); } public: diff --git a/TMessagesProj/jni/voip/tgcalls/platform/android/AndroidContext.cpp b/TMessagesProj/jni/voip/tgcalls/platform/android/AndroidContext.cpp index 7cf031dc87..3feb47ee7e 100644 --- a/TMessagesProj/jni/voip/tgcalls/platform/android/AndroidContext.cpp +++ b/TMessagesProj/jni/voip/tgcalls/platform/android/AndroidContext.cpp @@ -6,9 +6,12 @@ namespace tgcalls { AndroidContext::AndroidContext(JNIEnv *env, jobject instance, bool screencast) { + DEBUG_REF("VideoCapturerDevice"); VideoCapturerDeviceClass = (jclass) env->NewGlobalRef(env->FindClass("org/telegram/messenger/voip/VideoCapturerDevice")); jmethodID initMethodId = env->GetMethodID(VideoCapturerDeviceClass, "", "(Z)V"); + DEBUG_REF("VideoCapturerDevice javaCapturer"); javaCapturer = env->NewGlobalRef(env->NewObject(VideoCapturerDeviceClass, initMethodId, screencast)); + DEBUG_REF("VideoCapturerDevice javaInstance"); javaInstance = env->NewGlobalRef(instance); } @@ -17,17 +20,21 @@ AndroidContext::~AndroidContext() { jmethodID onDestroyMethodId = env->GetMethodID(VideoCapturerDeviceClass, "onDestroy", "()V"); env->CallVoidMethod(javaCapturer, onDestroyMethodId); + DEBUG_DELREF("javaCapturer"); env->DeleteGlobalRef(javaCapturer); javaCapturer = nullptr; + DEBUG_DELREF("VideoCapturerDeviceClass"); env->DeleteGlobalRef(VideoCapturerDeviceClass); if (javaInstance) { + DEBUG_DELREF("javaInstance"); env->DeleteGlobalRef(javaInstance); } } void AndroidContext::setJavaInstance(JNIEnv *env, jobject instance) { + DEBUG_REF("setJavaInstance"); javaInstance = env->NewGlobalRef(instance); } diff --git a/TMessagesProj/jni/voip/tgcalls/platform/android/AndroidInterface.cpp b/TMessagesProj/jni/voip/tgcalls/platform/android/AndroidInterface.cpp index 85067628ea..307bd02f6f 100644 --- a/TMessagesProj/jni/voip/tgcalls/platform/android/AndroidInterface.cpp +++ b/TMessagesProj/jni/voip/tgcalls/platform/android/AndroidInterface.cpp @@ -60,7 +60,7 @@ void AndroidInterface::adaptVideoSource(rtc::scoped_refptr AndroidInterface::makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread, bool screencapture) { JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded(); _source[screencapture ? 1 : 0] = webrtc::CreateJavaVideoSource(env, signalingThread, false, false); - return webrtc::CreateVideoTrackSourceProxy(signalingThread, workerThread, _source[screencapture ? 1 : 0]); + return webrtc::CreateVideoTrackSourceProxy(signalingThread, workerThread, _source[screencapture ? 1 : 0].get()); } bool AndroidInterface::supportsEncoding(const std::string &codecName, std::shared_ptr platformContext) { diff --git a/TMessagesProj/jni/voip/tgcalls/utils/gzip.cpp b/TMessagesProj/jni/voip/tgcalls/utils/gzip.cpp new file mode 100644 index 0000000000..2ae6e0708a --- /dev/null +++ b/TMessagesProj/jni/voip/tgcalls/utils/gzip.cpp @@ -0,0 +1,183 @@ +#include "utils/gzip.h" + +#include + +#include "rtc_base/copy_on_write_buffer.h" + +namespace tgcalls { +namespace { + +using uint = decltype(z_stream::avail_in); + +} // namespace + +bool isGzip(std::vector const &data) { + if (data.size() < 2) { + return false; + } + + if ((data[0] == 0x1f && data[1] == 0x8b) || (data[0] == 0x78 && data[1] == 0x9c)) { + return true; + } else { + return false; + } +} + +absl::optional> gzipData(std::vector const &data) { + z_stream stream; + stream.zalloc = Z_NULL; + stream.zfree = Z_NULL; + stream.opaque = Z_NULL; + stream.avail_in = (uint)data.size(); + stream.next_in = (Bytef *)(void *)data.data(); + stream.total_out = 0; + stream.avail_out = 0; + + static const uint ChunkSize = 16384; + + std::vector output; + int compression = 9; + if (deflateInit2(&stream, compression, Z_DEFLATED, 31, 8, Z_DEFAULT_STRATEGY) == Z_OK) { + output.resize(ChunkSize); + + while (stream.avail_out == 0) { + if (stream.total_out >= output.size()) { + output.resize(output.size() + ChunkSize); + } + stream.next_out = (uint8_t *)output.data() + stream.total_out; + stream.avail_out = (uInt)(output.size() - stream.total_out); + deflate(&stream, Z_FINISH); + } + deflateEnd(&stream); + output.resize(stream.total_out); + } + + return output; +} + +absl::optional> gunzipData(std::vector const &data, size_t sizeLimit) { + if (!isGzip(data)) { + return absl::nullopt; + } + + z_stream stream; + stream.zalloc = Z_NULL; + stream.zfree = Z_NULL; + stream.avail_in = (uint)data.size(); + stream.next_in = (Bytef *)data.data(); + stream.total_out = 0; + stream.avail_out = 0; + + std::vector output; + if (inflateInit2(&stream, 47) == Z_OK) { + int status = Z_OK; + output.resize(data.size() * 2); + while (status == Z_OK) { + if (sizeLimit > 0 && stream.total_out > sizeLimit) { + return absl::nullopt; + } + + if (stream.total_out >= output.size()) { + output.resize(output.size() + data.size() / 2); + } + stream.next_out = (uint8_t *)output.data() + stream.total_out; + stream.avail_out = (uInt)(output.size() - stream.total_out); + status = inflate(&stream, Z_SYNC_FLUSH); + } + if (inflateEnd(&stream) == Z_OK) { + if (status == Z_STREAM_END) { + output.resize(stream.total_out); + } else if (sizeLimit > 0 && output.size() > sizeLimit) { + return absl::nullopt; + } + } + } + + return output; +} + +} + +/*bool TGIsGzippedData(NSData *data) { + const UInt8 *bytes = (const UInt8 *)data.bytes; + return data.length >= 2 && ((bytes[0] == 0x1f && bytes[1] == 0x8b) || (bytes[0] == 0x78 && bytes[1] == 0x9c)); +} + +NSData *TGGZipData(NSData *data, float level) { + if (data.length == 0 || TGIsGzippedData(data)) { + return data; + } + + z_stream stream; + stream.zalloc = Z_NULL; + stream.zfree = Z_NULL; + stream.opaque = Z_NULL; + stream.avail_in = (uint)data.length; + stream.next_in = (Bytef *)(void *)data.bytes; + stream.total_out = 0; + stream.avail_out = 0; + + static const NSUInteger ChunkSize = 16384; + + NSMutableData *output = nil; + int compression = (level < 0.0f) ? Z_DEFAULT_COMPRESSION : (int)(roundf(level * 9)); + if (deflateInit2(&stream, compression, Z_DEFLATED, 31, 8, Z_DEFAULT_STRATEGY) == Z_OK) { + output = [NSMutableData dataWithLength:ChunkSize]; + while (stream.avail_out == 0) { + if (stream.total_out >= output.length) { + output.length += ChunkSize; + } + stream.next_out = (uint8_t *)output.mutableBytes + stream.total_out; + stream.avail_out = (uInt)(output.length - stream.total_out); + deflate(&stream, Z_FINISH); + } + deflateEnd(&stream); + output.length = stream.total_out; + } + + return output; +} + +NSData * _Nullable TGGUnzipData(NSData *data, uint sizeLimit) +{ + if (data.length == 0 || !TGIsGzippedData(data)) { + return nil; + } + + z_stream stream; + stream.zalloc = Z_NULL; + stream.zfree = Z_NULL; + stream.avail_in = (uint)data.length; + stream.next_in = (Bytef *)data.bytes; + stream.total_out = 0; + stream.avail_out = 0; + + NSMutableData *output = nil; + if (inflateInit2(&stream, 47) == Z_OK) { + int status = Z_OK; + output = [NSMutableData dataWithCapacity:data.length * 2]; + while (status == Z_OK) { + if (sizeLimit > 0 && stream.total_out > sizeLimit) { + return nil; + } + + if (stream.total_out >= output.length) { + output.length = output.length + data.length / 2; + } + stream.next_out = (uint8_t *)output.mutableBytes + stream.total_out; + stream.avail_out = (uInt)(output.length - stream.total_out); + status = inflate(&stream, Z_SYNC_FLUSH); + } + if (inflateEnd(&stream) == Z_OK) { + if (status == Z_STREAM_END) { + output.length = stream.total_out; + } else if (sizeLimit > 0 && output.length > sizeLimit) { + return nil; + } + } + } + + return output; +} + +*/ diff --git a/TMessagesProj/jni/voip/tgcalls/utils/gzip.h b/TMessagesProj/jni/voip/tgcalls/utils/gzip.h new file mode 100644 index 0000000000..1746cd984c --- /dev/null +++ b/TMessagesProj/jni/voip/tgcalls/utils/gzip.h @@ -0,0 +1,16 @@ +#ifndef TGCALLS_UTILS_GZIP_H +#define TGCALLS_UTILS_GZIP_H + +#include +#include +#include + +namespace tgcalls { + +bool isGzip(std::vector const &data); +absl::optional> gzipData(std::vector const &data); +absl::optional> gunzipData(std::vector const &data, size_t sizeLimit); + +} + +#endif // TGCALLS_UTILS_GZIP_H diff --git a/TMessagesProj/jni/voip/tgcalls/v2/ContentNegotiation.cpp b/TMessagesProj/jni/voip/tgcalls/v2/ContentNegotiation.cpp index 4ee37c2498..726b1fbfa2 100644 --- a/TMessagesProj/jni/voip/tgcalls/v2/ContentNegotiation.cpp +++ b/TMessagesProj/jni/voip/tgcalls/v2/ContentNegotiation.cpp @@ -1,6 +1,7 @@ #include "v2/ContentNegotiation.h" #include "rtc_base/rtc_certificate_generator.h" +#include "media/base/media_engine.h" #include @@ -195,10 +196,10 @@ std::string contentIdBySsrc(uint32_t ssrc) { } -ContentNegotiationContext::ContentNegotiationContext(bool isOutgoing, rtc::UniqueRandomIdGenerator *uniqueRandomIdGenerator) : +ContentNegotiationContext::ContentNegotiationContext(const webrtc::WebRtcKeyValueConfig& fieldTrials, bool isOutgoing, rtc::UniqueRandomIdGenerator *uniqueRandomIdGenerator) : _isOutgoing(isOutgoing), _uniqueRandomIdGenerator(uniqueRandomIdGenerator) { - _transportDescriptionFactory = std::make_unique(); + _transportDescriptionFactory = std::make_unique(fieldTrials); // tempCertificate is only used to fill in the local SDP auto tempCertificate = rtc::RTCCertificateGenerator::GenerateCertificate(rtc::KeyParams(rtc::KT_ECDSA), absl::nullopt); @@ -214,16 +215,11 @@ ContentNegotiationContext::~ContentNegotiationContext() { } -void ContentNegotiationContext::copyCodecsFromChannelManager(cricket::ChannelManager *channelManager, bool randomize) { - cricket::AudioCodecs audioSendCodecs; - cricket::AudioCodecs audioRecvCodecs; - cricket::VideoCodecs videoSendCodecs; - cricket::VideoCodecs videoRecvCodecs; - - channelManager->GetSupportedAudioSendCodecs(&audioSendCodecs); - channelManager->GetSupportedAudioReceiveCodecs(&audioRecvCodecs); - channelManager->GetSupportedVideoSendCodecs(&videoSendCodecs); - channelManager->GetSupportedVideoReceiveCodecs(&videoRecvCodecs); +void ContentNegotiationContext::copyCodecsFromChannelManager(cricket::MediaEngineInterface *mediaEngine, bool randomize) { + cricket::AudioCodecs audioSendCodecs = mediaEngine->voice().send_codecs(); + cricket::AudioCodecs audioRecvCodecs = mediaEngine->voice().recv_codecs(); + cricket::VideoCodecs videoSendCodecs = mediaEngine->video().send_codecs(); + cricket::VideoCodecs videoRecvCodecs = mediaEngine->video().recv_codecs(); for (const auto &codec : audioSendCodecs) { if (codec.name == "opus") { diff --git a/TMessagesProj/jni/voip/tgcalls/v2/ContentNegotiation.h b/TMessagesProj/jni/voip/tgcalls/v2/ContentNegotiation.h index 53e16d73e1..6eafec9318 100644 --- a/TMessagesProj/jni/voip/tgcalls/v2/ContentNegotiation.h +++ b/TMessagesProj/jni/voip/tgcalls/v2/ContentNegotiation.h @@ -3,7 +3,7 @@ #include -#include "pc/channel_manager.h" +#include "media/base/media_engine.h" #include "pc/media_session.h" #include "pc/session_description.h" #include "p2p/base/transport_description_factory.h" @@ -49,10 +49,10 @@ class ContentNegotiationContext { }; public: - ContentNegotiationContext(bool isOutgoing, rtc::UniqueRandomIdGenerator *uniqueRandomIdGenerator); + ContentNegotiationContext(const webrtc::WebRtcKeyValueConfig& fieldTrials, bool isOutgoing, rtc::UniqueRandomIdGenerator *uniqueRandomIdGenerator); ~ContentNegotiationContext(); - void copyCodecsFromChannelManager(cricket::ChannelManager *channelManager, bool randomize); + void copyCodecsFromChannelManager(cricket::MediaEngineInterface *mediaEngine, bool randomize); std::string addOutgoingChannel(signaling::MediaContent::Type mediaType); void removeOutgoingChannel(std::string const &id); diff --git a/TMessagesProj/jni/voip/tgcalls/v2/ExternalSignalingConnection.cpp b/TMessagesProj/jni/voip/tgcalls/v2/ExternalSignalingConnection.cpp new file mode 100644 index 0000000000..d28c94dadf --- /dev/null +++ b/TMessagesProj/jni/voip/tgcalls/v2/ExternalSignalingConnection.cpp @@ -0,0 +1,30 @@ +#include "v2/ExternalSignalingConnection.h" + +namespace tgcalls { + +namespace { + +} + +ExternalSignalingConnection::ExternalSignalingConnection(std::function &)> onIncomingData, std::function &)> emitData) : +_onIncomingData(onIncomingData), +_emitData(emitData) { +} + +ExternalSignalingConnection::~ExternalSignalingConnection() { + +} + +void ExternalSignalingConnection::start() { + +} + +void ExternalSignalingConnection::send(const std::vector &data) { + _emitData(data); +} + +void ExternalSignalingConnection::receiveExternal(const std::vector &data) { + _onIncomingData(data); +} + +} diff --git a/TMessagesProj/jni/voip/tgcalls/v2/ExternalSignalingConnection.h b/TMessagesProj/jni/voip/tgcalls/v2/ExternalSignalingConnection.h new file mode 100644 index 0000000000..51240979ef --- /dev/null +++ b/TMessagesProj/jni/voip/tgcalls/v2/ExternalSignalingConnection.h @@ -0,0 +1,32 @@ +#ifndef TGCALLS_EXTERNAL_SIGNALING_CONNECTION_H_ +#define TGCALLS_EXTERNAL_SIGNALING_CONNECTION_H_ + +#include +#include + +#include "SignalingConnection.h" + +namespace rtc { +class AsyncPacketSocket; +} + +namespace tgcalls { + +class ExternalSignalingConnection : public SignalingConnection { +public: + ExternalSignalingConnection(std::function &)> onIncomingData, std::function &)> emitData); + virtual ~ExternalSignalingConnection(); + + virtual void start() override; + + virtual void send(const std::vector &data) override; + virtual void receiveExternal(const std::vector &data) override; + +private: + std::function &)> _onIncomingData; + std::function &)> _emitData; +}; + +} // namespace tgcalls + +#endif // TGCALLS_EXTERNAL_SIGNALING_CONNECTION_H_ diff --git a/TMessagesProj/jni/voip/tgcalls/v2/InstanceV2Impl.cpp b/TMessagesProj/jni/voip/tgcalls/v2/InstanceV2Impl.cpp index c3752ba0c7..59170a4b49 100644 --- a/TMessagesProj/jni/voip/tgcalls/v2/InstanceV2Impl.cpp +++ b/TMessagesProj/jni/voip/tgcalls/v2/InstanceV2Impl.cpp @@ -25,7 +25,7 @@ #include "api/call/audio_sink.h" #include "modules/audio_processing/audio_buffer.h" #include "absl/strings/match.h" -#include "pc/channel_manager.h" +#include "pc/channel.h" #include "audio/audio_state.h" #include "modules/audio_coding/neteq/default_neteq_factory.h" #include "modules/audio_coding/include/audio_coding_module.h" @@ -52,28 +52,53 @@ #include #include +#include "FieldTrialsConfig.h" + #include "third-party/json11.hpp" +#include "ChannelManager.h" +#include "SignalingConnection.h" +#include "ExternalSignalingConnection.h" +#include "SignalingSctpConnection.h" +#include "utils/gzip.h" + namespace tgcalls { namespace { enum class SignalingProtocolVersion { V1, - V2 + V2, + V3 }; SignalingProtocolVersion signalingProtocolVersion(std::string const &version) { - if (version == "4.0.1") { + if (version == "7.0.0") { return SignalingProtocolVersion::V1; - } else if (version == "4.0.2") { + } else if (version == "8.0.0") { return SignalingProtocolVersion::V2; + } else if (version == "9.0.0") { + return SignalingProtocolVersion::V3; } else { RTC_LOG(LS_ERROR) << "signalingProtocolVersion: unknown version " << version; - + return SignalingProtocolVersion::V2; } } +bool signalingProtocolSupportsCompression(SignalingProtocolVersion version) { + switch (version) { + case SignalingProtocolVersion::V1: + case SignalingProtocolVersion::V2: + return false; + case SignalingProtocolVersion::V3: + return true; + default: + RTC_DCHECK_NOTREACHED(); + break; + } + return false; +} + static VideoCaptureInterfaceObject *GetVideoCaptureAssumingSameThread(VideoCaptureInterface *videoCapture) { return videoCapture ? static_cast(videoCapture)->object()->getSyncAssumingSameThread() @@ -84,7 +109,7 @@ class OutgoingAudioChannel : public sigslot::has_slots<> { public: OutgoingAudioChannel( webrtc::Call *call, - cricket::ChannelManager *channelManager, + ChannelManager *channelManager, rtc::UniqueRandomIdGenerator *uniqueRandomIdGenerator, webrtc::LocalAudioSinkAdapter *audioSource, webrtc::RtpTransport *rtpTransport, @@ -104,22 +129,23 @@ class OutgoingAudioChannel : public sigslot::has_slots<> { audioOptions.noise_suppression = false; audioOptions.auto_gain_control = false; audioOptions.highpass_filter = false; - audioOptions.typing_detection = false; - audioOptions.experimental_agc = false; - audioOptions.experimental_ns = false; - audioOptions.residual_echo_detector = false; + //audioOptions.typing_detection = false; + //audioOptions.residual_echo_detector = false; } else { audioOptions.echo_cancellation = true; audioOptions.noise_suppression = true; } - + std::ostringstream contentId; contentId << _ssrc; std::vector streamIds; streamIds.push_back(contentId.str()); - _outgoingAudioChannel = _channelManager->CreateVoiceChannel(call, cricket::MediaConfig(), rtpTransport, threads->getMediaThread(), contentId.str(), false, NativeNetworkingImpl::getDefaulCryptoOptions(), uniqueRandomIdGenerator, audioOptions); + _outgoingAudioChannel = _channelManager->CreateVoiceChannel(call, cricket::MediaConfig(), contentId.str(), false, NativeNetworkingImpl::getDefaulCryptoOptions(), audioOptions); + _threads->getNetworkThread()->BlockingCall([&]() { + _outgoingAudioChannel->SetRtpTransport(rtpTransport); + }); std::vector codecs; for (const auto &payloadType : mediaContent.payloadTypes) { @@ -128,13 +154,13 @@ class OutgoingAudioChannel : public sigslot::has_slots<> { codec.SetParam(cricket::kCodecParamUseInbandFec, 1); codec.SetParam(cricket::kCodecParamPTime, 60); - + for (const auto &feedbackType : payloadType.feedbackTypes) { codec.AddFeedbackParam(cricket::FeedbackParam(feedbackType.type, feedbackType.subtype)); } codecs.push_back(std::move(codec)); - + break; } } @@ -160,10 +186,11 @@ class OutgoingAudioChannel : public sigslot::has_slots<> { incomingAudioDescription->set_codecs(codecs); incomingAudioDescription->set_bandwidth(-1); - _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [&]() { + _threads->getWorkerThread()->BlockingCall([&]() { _outgoingAudioChannel->SetPayloadTypeDemuxingEnabled(false); - _outgoingAudioChannel->SetLocalContent(outgoingAudioDescription.get(), webrtc::SdpType::kOffer, nullptr); - _outgoingAudioChannel->SetRemoteContent(incomingAudioDescription.get(), webrtc::SdpType::kAnswer, nullptr); + std::string errorDesc; + _outgoingAudioChannel->SetLocalContent(outgoingAudioDescription.get(), webrtc::SdpType::kOffer, errorDesc); + _outgoingAudioChannel->SetRemoteContent(incomingAudioDescription.get(), webrtc::SdpType::kAnswer, errorDesc); }); setIsMuted(false); @@ -171,7 +198,10 @@ class OutgoingAudioChannel : public sigslot::has_slots<> { ~OutgoingAudioChannel() { _outgoingAudioChannel->Enable(false); - _channelManager->DestroyVoiceChannel(_outgoingAudioChannel); + _threads->getNetworkThread()->BlockingCall([&]() { + _outgoingAudioChannel->SetRtpTransport(nullptr); + }); + _channelManager->DestroyChannel(_outgoingAudioChannel); _outgoingAudioChannel = nullptr; } @@ -180,27 +210,27 @@ class OutgoingAudioChannel : public sigslot::has_slots<> { _isMuted = isMuted; _outgoingAudioChannel->Enable(!_isMuted); - _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [&]() { + _threads->getWorkerThread()->BlockingCall([&]() { _outgoingAudioChannel->media_channel()->SetAudioSend(_ssrc, !_isMuted, nullptr, _audioSource); }); } } - + uint32_t ssrc() const { return _ssrc; } - + void setMaxBitrate(int bitrate) { - _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [&]() { + _threads->getWorkerThread()->BlockingCall([&]() { webrtc::RtpParameters initialParameters = _outgoingAudioChannel->media_channel()->GetRtpSendParameters(_ssrc); webrtc::RtpParameters updatedParameters = initialParameters; - + if (updatedParameters.encodings.empty()) { updatedParameters.encodings.push_back(webrtc::RtpEncodingParameters()); } - + updatedParameters.encodings[0].max_bitrate_bps = bitrate; - + if (initialParameters != updatedParameters) { _outgoingAudioChannel->media_channel()->SetRtpSendParameters(_ssrc, updatedParameters); } @@ -216,7 +246,7 @@ class OutgoingAudioChannel : public sigslot::has_slots<> { std::shared_ptr _threads; uint32_t _ssrc = 0; webrtc::Call *_call = nullptr; - cricket::ChannelManager *_channelManager = nullptr; + ChannelManager *_channelManager = nullptr; webrtc::LocalAudioSinkAdapter *_audioSource = nullptr; cricket::VoiceChannel *_outgoingAudioChannel = nullptr; @@ -226,12 +256,13 @@ class OutgoingAudioChannel : public sigslot::has_slots<> { class IncomingV2AudioChannel : public sigslot::has_slots<> { public: IncomingV2AudioChannel( - cricket::ChannelManager *channelManager, + ChannelManager *channelManager, webrtc::Call *call, webrtc::RtpTransport *rtpTransport, rtc::UniqueRandomIdGenerator *randomIdGenerator, signaling::MediaContent const &mediaContent, std::shared_ptr threads) : + _threads(threads), _ssrc(mediaContent.ssrc), _channelManager(channelManager), _call(call) { @@ -240,13 +271,18 @@ class IncomingV2AudioChannel : public sigslot::has_slots<> { cricket::AudioOptions audioOptions; audioOptions.audio_jitter_buffer_fast_accelerate = true; audioOptions.audio_jitter_buffer_min_delay_ms = 50; - + std::ostringstream contentId; contentId << _ssrc; std::string streamId = contentId.str(); - _audioChannel = _channelManager->CreateVoiceChannel(call, cricket::MediaConfig(), rtpTransport, threads->getMediaThread(), contentId.str(), false, NativeNetworkingImpl::getDefaulCryptoOptions(), randomIdGenerator, audioOptions); + _audioChannel = _channelManager->CreateVoiceChannel(call, cricket::MediaConfig(), contentId.str(), false, NativeNetworkingImpl::getDefaulCryptoOptions(), audioOptions); + _threads->getNetworkThread()->BlockingCall([&]() { + _audioChannel->SetRtpTransport(rtpTransport); + }); + + std::vector codecs; for (const auto &payloadType : mediaContent.payloadTypes) { @@ -264,6 +300,7 @@ class IncomingV2AudioChannel : public sigslot::has_slots<> { for (const auto &rtpExtension : mediaContent.rtpExtensions) { outgoingAudioDescription->AddRtpHeaderExtension(webrtc::RtpExtension(rtpExtension.uri, rtpExtension.id)); } + outgoingAudioDescription->set_rtcp_mux(true); outgoingAudioDescription->set_rtcp_reduced_size(true); outgoingAudioDescription->set_direction(webrtc::RtpTransceiverDirection::kRecvOnly); @@ -283,24 +320,30 @@ class IncomingV2AudioChannel : public sigslot::has_slots<> { streamParams.set_stream_ids({ streamId }); incomingAudioDescription->AddStream(streamParams); - threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [&]() { + threads->getWorkerThread()->BlockingCall([&]() { _audioChannel->SetPayloadTypeDemuxingEnabled(false); - _audioChannel->SetLocalContent(outgoingAudioDescription.get(), webrtc::SdpType::kOffer, nullptr); - _audioChannel->SetRemoteContent(incomingAudioDescription.get(), webrtc::SdpType::kAnswer, nullptr); + std::string errorDesc; + _audioChannel->SetLocalContent(outgoingAudioDescription.get(), webrtc::SdpType::kOffer, errorDesc); + _audioChannel->SetRemoteContent(incomingAudioDescription.get(), webrtc::SdpType::kAnswer, errorDesc); }); outgoingAudioDescription.reset(); incomingAudioDescription.reset(); + //std::unique_ptr audioLevelSink(new AudioSinkImpl(onAudioLevelUpdated, _ssrc, std::move(onAudioFrame))); //_audioChannel->media_channel()->SetRawAudioSink(ssrc.networkSsrc, std::move(audioLevelSink)); _audioChannel->Enable(true); + } ~IncomingV2AudioChannel() { _audioChannel->Enable(false); - _channelManager->DestroyVoiceChannel(_audioChannel); + _threads->getNetworkThread()->BlockingCall([&]() { + _audioChannel->SetRtpTransport(nullptr); + }); + _channelManager->DestroyChannel(_audioChannel); _audioChannel = nullptr; } @@ -315,7 +358,7 @@ class IncomingV2AudioChannel : public sigslot::has_slots<> { int64_t getActivity() { return _activityTimestamp; } - + uint32_t ssrc() const { return _ssrc; } @@ -326,11 +369,12 @@ class IncomingV2AudioChannel : public sigslot::has_slots<> { } private: + std::shared_ptr _threads; uint32_t _ssrc = 0; // Memory is managed by _channelManager cricket::VoiceChannel *_audioChannel = nullptr; // Memory is managed externally - cricket::ChannelManager *_channelManager = nullptr; + ChannelManager *_channelManager = nullptr; webrtc::Call *_call = nullptr; int64_t _creationTimestamp = 0; int64_t _activityTimestamp = 0; @@ -340,7 +384,7 @@ class OutgoingVideoChannel : public sigslot::has_slots<>, public std::enable_sha public: OutgoingVideoChannel( std::shared_ptr threads, - cricket::ChannelManager *channelManager, + ChannelManager *channelManager, webrtc::Call *call, webrtc::RtpTransport *rtpTransport, rtc::UniqueRandomIdGenerator *randomIdGenerator, @@ -356,11 +400,14 @@ class OutgoingVideoChannel : public sigslot::has_slots<>, public std::enable_sha _rotationUpdated(rotationUpdated) { cricket::VideoOptions videoOptions; videoOptions.is_screencast = isScreencast; - + std::ostringstream contentId; contentId << mediaContent.ssrc; - - _outgoingVideoChannel = _channelManager->CreateVideoChannel(call, cricket::MediaConfig(), rtpTransport, threads->getMediaThread(), contentId.str(), false, NativeNetworkingImpl::getDefaulCryptoOptions(), randomIdGenerator, videoOptions, videoBitrateAllocatorFactory); + + _outgoingVideoChannel = _channelManager->CreateVideoChannel(call, cricket::MediaConfig(), contentId.str(), false, NativeNetworkingImpl::getDefaulCryptoOptions(), videoOptions, videoBitrateAllocatorFactory); + _threads->getNetworkThread()->BlockingCall([&]() { + _outgoingVideoChannel->SetRtpTransport(rtpTransport); + }); std::vector unsortedCodecs; for (const auto &payloadType : mediaContent.payloadTypes) { @@ -373,14 +420,14 @@ class OutgoingVideoChannel : public sigslot::has_slots<>, public std::enable_sha } unsortedCodecs.push_back(std::move(codec)); } - + std::vector codecPreferences = { #ifndef WEBRTC_DISABLE_H265 cricket::kH265CodecName, #endif cricket::kH264CodecName }; - + std::vector codecs; for (const auto &name : codecPreferences) { for (const auto &codec : unsortedCodecs) { @@ -433,13 +480,14 @@ class OutgoingVideoChannel : public sigslot::has_slots<>, public std::enable_sha incomingVideoDescription->set_codecs(codecs); incomingVideoDescription->set_bandwidth(-1); - threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [&]() { + threads->getWorkerThread()->BlockingCall([&]() { _outgoingVideoChannel->SetPayloadTypeDemuxingEnabled(false); - _outgoingVideoChannel->SetLocalContent(outgoingVideoDescription.get(), webrtc::SdpType::kOffer, nullptr); - _outgoingVideoChannel->SetRemoteContent(incomingVideoDescription.get(), webrtc::SdpType::kAnswer, nullptr); + std::string errorDesc; + _outgoingVideoChannel->SetLocalContent(outgoingVideoDescription.get(), webrtc::SdpType::kOffer, errorDesc); + _outgoingVideoChannel->SetRemoteContent(incomingVideoDescription.get(), webrtc::SdpType::kAnswer, errorDesc); webrtc::RtpParameters rtpParameters = _outgoingVideoChannel->media_channel()->GetRtpSendParameters(mediaContent.ssrc); - + if (isScreencast) { rtpParameters.degradation_preference = webrtc::DegradationPreference::MAINTAIN_RESOLUTION; } @@ -449,14 +497,17 @@ class OutgoingVideoChannel : public sigslot::has_slots<>, public std::enable_sha _outgoingVideoChannel->Enable(false); - threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [&]() { + threads->getWorkerThread()->BlockingCall([&]() { _outgoingVideoChannel->media_channel()->SetVideoSend(mediaContent.ssrc, NULL, nullptr); }); } ~OutgoingVideoChannel() { _outgoingVideoChannel->Enable(false); - _channelManager->DestroyVideoChannel(_outgoingVideoChannel); + _threads->getNetworkThread()->BlockingCall([&]() { + _outgoingVideoChannel->SetRtpTransport(nullptr); + }); + _channelManager->DestroyChannel(_outgoingVideoChannel); _outgoingVideoChannel = nullptr; } @@ -467,13 +518,13 @@ class OutgoingVideoChannel : public sigslot::has_slots<>, public std::enable_sha _outgoingVideoChannel->Enable(true); auto videoCaptureImpl = GetVideoCaptureAssumingSameThread(_videoCapture.get()); - _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [&]() { - _outgoingVideoChannel->media_channel()->SetVideoSend(_mainSsrc, NULL, videoCaptureImpl->source()); + _threads->getWorkerThread()->BlockingCall([&]() { + _outgoingVideoChannel->media_channel()->SetVideoSend(_mainSsrc, NULL, videoCaptureImpl->source().get()); }); const auto weak = std::weak_ptr(shared_from_this()); videoCaptureImpl->setRotationUpdated([threads = _threads, weak](int angle) { - threads->getMediaThread()->PostTask(RTC_FROM_HERE, [=] { + threads->getMediaThread()->PostTask([=] { const auto strong = weak.lock(); if (!strong) { return; @@ -534,27 +585,27 @@ class OutgoingVideoChannel : public sigslot::has_slots<>, public std::enable_sha _videoRotation = signaling::MediaStateMessage::VideoRotation::Rotation0; _outgoingVideoChannel->Enable(false); - _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [&]() { + _threads->getWorkerThread()->BlockingCall([&]() { _outgoingVideoChannel->media_channel()->SetVideoSend(_mainSsrc, NULL, nullptr); }); } } - + uint32_t ssrc() const { return _mainSsrc; } - + void setMaxBitrate(int bitrate) { - _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [&]() { + _threads->getWorkerThread()->BlockingCall([&]() { webrtc::RtpParameters initialParameters = _outgoingVideoChannel->media_channel()->GetRtpSendParameters(_mainSsrc); webrtc::RtpParameters updatedParameters = initialParameters; - + if (updatedParameters.encodings.empty()) { updatedParameters.encodings.push_back(webrtc::RtpEncodingParameters()); } - + updatedParameters.encodings[0].max_bitrate_bps = bitrate; - + if (initialParameters != updatedParameters) { _outgoingVideoChannel->media_channel()->SetRtpSendParameters(_mainSsrc, updatedParameters); } @@ -580,7 +631,7 @@ class OutgoingVideoChannel : public sigslot::has_slots<>, public std::enable_sha uint32_t _mainSsrc = 0; webrtc::Call *_call = nullptr; - cricket::ChannelManager *_channelManager = nullptr; + ChannelManager *_channelManager = nullptr; cricket::VideoChannel *_outgoingVideoChannel = nullptr; std::function _rotationUpdated; @@ -638,22 +689,26 @@ class VideoSinkImpl : public rtc::VideoSinkInterface { class IncomingV2VideoChannel : public sigslot::has_slots<> { public: IncomingV2VideoChannel( - cricket::ChannelManager *channelManager, + ChannelManager *channelManager, webrtc::Call *call, webrtc::RtpTransport *rtpTransport, rtc::UniqueRandomIdGenerator *randomIdGenerator, signaling::MediaContent const &mediaContent, std::shared_ptr threads) : + _threads(threads), _channelManager(channelManager), _call(call) { _videoSink.reset(new VideoSinkImpl()); _videoBitrateAllocatorFactory = webrtc::CreateBuiltinVideoBitrateAllocatorFactory(); - + std::ostringstream contentId; contentId << mediaContent.ssrc; - _videoChannel = _channelManager->CreateVideoChannel(call, cricket::MediaConfig(), rtpTransport, threads->getMediaThread(), contentId.str(), false, NativeNetworkingImpl::getDefaulCryptoOptions(), randomIdGenerator, cricket::VideoOptions(), _videoBitrateAllocatorFactory.get()); + _videoChannel = _channelManager->CreateVideoChannel(call, cricket::MediaConfig(), contentId.str(), false, NativeNetworkingImpl::getDefaulCryptoOptions(), cricket::VideoOptions(), _videoBitrateAllocatorFactory.get()); + _threads->getNetworkThread()->BlockingCall([&]() { + _videoChannel->SetRtpTransport(rtpTransport); + }); std::vector codecs; for (const auto &payloadType : mediaContent.payloadTypes) { @@ -709,10 +764,11 @@ class IncomingV2VideoChannel : public sigslot::has_slots<> { incomingVideoDescription->AddStream(videoRecvStreamParams); - threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [&]() { + threads->getWorkerThread()->BlockingCall([&]() { _videoChannel->SetPayloadTypeDemuxingEnabled(false); - _videoChannel->SetLocalContent(outgoingVideoDescription.get(), webrtc::SdpType::kOffer, nullptr); - _videoChannel->SetRemoteContent(incomingVideoDescription.get(), webrtc::SdpType::kAnswer, nullptr); + std::string errorDesc; + _videoChannel->SetLocalContent(outgoingVideoDescription.get(), webrtc::SdpType::kOffer, errorDesc); + _videoChannel->SetRemoteContent(incomingVideoDescription.get(), webrtc::SdpType::kAnswer, errorDesc); _videoChannel->media_channel()->SetSink(_mainVideoSsrc, _videoSink.get()); }); @@ -722,14 +778,17 @@ class IncomingV2VideoChannel : public sigslot::has_slots<> { ~IncomingV2VideoChannel() { _videoChannel->Enable(false); - _channelManager->DestroyVideoChannel(_videoChannel); + _threads->getNetworkThread()->BlockingCall([&]() { + _videoChannel->SetRtpTransport(nullptr); + }); + _channelManager->DestroyChannel(_videoChannel); _videoChannel = nullptr; } void addSink(std::weak_ptr> impl) { _videoSink->addSink(impl); } - + uint32_t ssrc() const { return _mainVideoSsrc; } @@ -740,13 +799,14 @@ class IncomingV2VideoChannel : public sigslot::has_slots<> { } private: + std::shared_ptr _threads; uint32_t _mainVideoSsrc = 0; std::unique_ptr _videoSink; std::unique_ptr _videoBitrateAllocatorFactory; // Memory is managed by _channelManager cricket::VideoChannel *_videoChannel; // Memory is managed externally - cricket::ChannelManager *_channelManager = nullptr; + ChannelManager *_channelManager = nullptr; webrtc::Call *_call = nullptr; }; @@ -754,7 +814,7 @@ template struct StateLogRecord { int64_t timestamp = 0; T record; - + explicit StateLogRecord(int32_t timestamp_, T &&record_) : timestamp(timestamp_), record(std::move(record_)) { @@ -766,7 +826,7 @@ struct NetworkStateLogRecord { bool isFailed = false; absl::optional route; absl::optional connection; - + bool operator==(NetworkStateLogRecord const &rhs) const { if (isConnected != rhs.isConnected) { return false; @@ -780,7 +840,7 @@ struct NetworkStateLogRecord { if (connection != rhs.connection) { return false; } - + return true; } }; @@ -802,7 +862,7 @@ class InstanceV2ImplInternal : public std::enable_shared_from_this()), _taskQueueFactory(webrtc::CreateDefaultTaskQueueFactory()), + _initialInputDeviceId(std::move(descriptor.initialInputDeviceId)), + _initialOutputDeviceId(std::move(descriptor.initialOutputDeviceId)), _videoCapture(descriptor.videoCapture), - _platformContext(descriptor.platformContext) { + _platformContext(descriptor.platformContext) { + webrtc::field_trial::InitFieldTrialsFromString( + "WebRTC-DataChannel-Dcsctp/Enabled/" + "WebRTC-Audio-MinimizeResamplingOnMobile/Enabled/" + "WebRTC-Audio-iOS-Holding/Enabled/" + "WebRTC-IceFieldTrials/skip_relay_to_non_relay_connections:true/" + ); } ~InstanceV2ImplInternal() { - _networking->perform(RTC_FROM_HERE, [](NativeNetworkingImpl *networking) { - networking->stop(); - }); - _incomingAudioChannel.reset(); _incomingVideoChannel.reset(); _incomingScreencastChannel.reset(); @@ -828,23 +892,66 @@ class InstanceV2ImplInternal : public std::enable_shared_from_thisgetWorkerThread()->Invoke(RTC_FROM_HERE, [&]() { - _channelManager.reset(); + _channelManager.reset(); + + _threads->getWorkerThread()->BlockingCall([&]() { _call.reset(); _audioDeviceModule = nullptr; }); - + _contentNegotiationContext.reset(); - - _threads->getNetworkThread()->Invoke(RTC_FROM_HERE, []() { + + _networking->perform([](NativeNetworkingImpl *networking) { + networking->stop(); + }); + + _threads->getNetworkThread()->BlockingCall([]() { }); } void start() { _startTimestamp = rtc::TimeMillis(); - + const auto weak = std::weak_ptr(shared_from_this()); - + + if (_signalingProtocolVersion == SignalingProtocolVersion::V3) { + _signalingConnection = std::make_unique( + _threads, + [threads = _threads, weak](const std::vector &data) { + threads->getMediaThread()->PostTask([weak, data] { + const auto strong = weak.lock(); + if (!strong) { + return; + } + + strong->onSignalingData(data); + }); + }, + [signalingDataEmitted = _signalingDataEmitted](const std::vector &data) { + signalingDataEmitted(data); + } + ); + } + if (!_signalingConnection) { + _signalingConnection = std::make_unique( + [threads = _threads, weak](const std::vector &data) { + threads->getMediaThread()->PostTask([weak, data] { + const auto strong = weak.lock(); + if (!strong) { + return; + } + + strong->onSignalingData(data); + }); + }, + [signalingDataEmitted = _signalingDataEmitted](const std::vector &data) { + signalingDataEmitted(data); + } + ); + } + + _signalingConnection->start(); + absl::optional proxy; if (_proxy) { proxy = *(_proxy.get()); @@ -859,7 +966,7 @@ class InstanceV2ImplInternal : public std::enable_shared_from_thisgetMediaThread()->PostTask(RTC_FROM_HERE, [=] { + threads->getMediaThread()->PostTask([=] { const auto strong = weak.lock(); if (!strong) { return; @@ -868,7 +975,7 @@ class InstanceV2ImplInternal : public std::enable_shared_from_thisgetMediaThread()->PostTask(RTC_FROM_HERE, [=] { + threads->getMediaThread()->PostTask([=] { const auto strong = weak.lock(); if (!strong) { return; @@ -878,7 +985,7 @@ class InstanceV2ImplInternal : public std::enable_shared_from_thisgetMediaThread()->PostTask(RTC_FROM_HERE, [=] { + threads->getMediaThread()->PostTask([=] { const auto strong = weak.lock(); if (!strong) { return; @@ -893,7 +1000,7 @@ class InstanceV2ImplInternal : public std::enable_shared_from_this_call->Receiver()->DeliverPacket(webrtc::MediaType::ANY, packet, timestamp); }, .dataChannelStateUpdated = [threads, weak](bool isDataChannelOpen) { - threads->getMediaThread()->PostTask(RTC_FROM_HERE, [=] { + threads->getMediaThread()->PostTask([=] { const auto strong = weak.lock(); if (!strong) { return; @@ -902,7 +1009,7 @@ class InstanceV2ImplInternal : public std::enable_shared_from_thisgetMediaThread()->PostTask(RTC_FROM_HERE, [=] { + threads->getMediaThread()->PostTask([=] { const auto strong = weak.lock(); if (!strong) { return; @@ -916,58 +1023,56 @@ class InstanceV2ImplInternal : public std::enable_shared_from_thisconfigurePlatformAudio(); - //setAudioInputDevice(_initialInputDeviceId); - //setAudioOutputDevice(_initialOutputDeviceId); - _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [&]() { - cricket::MediaEngineDependencies mediaDeps; - mediaDeps.task_queue_factory = _taskQueueFactory.get(); - mediaDeps.audio_encoder_factory = webrtc::CreateAudioEncoderFactory(); - mediaDeps.audio_decoder_factory = webrtc::CreateAudioDecoderFactory(); + _threads->getWorkerThread()->BlockingCall([&]() { + _audioDeviceModule = createAudioDeviceModule(); + }); - mediaDeps.video_encoder_factory = PlatformInterface::SharedInstance()->makeVideoEncoderFactory(_platformContext, true); - mediaDeps.video_decoder_factory = PlatformInterface::SharedInstance()->makeVideoDecoderFactory(_platformContext); + cricket::MediaEngineDependencies mediaDeps; + mediaDeps.task_queue_factory = _taskQueueFactory.get(); + mediaDeps.audio_encoder_factory = webrtc::CreateAudioEncoderFactory(); + mediaDeps.audio_decoder_factory = webrtc::CreateAudioDecoderFactory(); - _audioDeviceModule = createAudioDeviceModule(); - /*if (!_audioDeviceModule) { - return; - }*/ - mediaDeps.adm = _audioDeviceModule; + mediaDeps.video_encoder_factory = PlatformInterface::SharedInstance()->makeVideoEncoderFactory(_platformContext, true); + mediaDeps.video_decoder_factory = PlatformInterface::SharedInstance()->makeVideoDecoderFactory(_platformContext); - _availableVideoFormats = mediaDeps.video_encoder_factory->GetSupportedFormats(); + mediaDeps.adm = _audioDeviceModule; - std::unique_ptr mediaEngine = cricket::CreateMediaEngine(std::move(mediaDeps)); - _channelManager = cricket::ChannelManager::Create( - std::move(mediaEngine), - true, - _threads->getWorkerThread(), - _threads->getNetworkThread() - ); - webrtc::Call::Config callConfig(_eventLog.get(), _threads->getNetworkThread()); - callConfig.task_queue_factory = _taskQueueFactory.get(); - callConfig.trials = &_fieldTrials; - callConfig.audio_state = _channelManager->media_engine()->voice().GetAudioState(); + _availableVideoFormats = mediaDeps.video_encoder_factory->GetSupportedFormats(); - _call.reset(webrtc::Call::Create(callConfig, webrtc::Clock::GetRealTimeClock(), _threads->getSharedModuleThread(), webrtc::ProcessThread::Create("PacerThread"))); - }); + std::unique_ptr mediaEngine = cricket::CreateMediaEngine(std::move(mediaDeps)); - _uniqueRandomIdGenerator.reset(new rtc::UniqueRandomIdGenerator()); - - _contentNegotiationContext = std::make_unique(_encryptionKey.isOutgoing, _uniqueRandomIdGenerator.get()); - _contentNegotiationContext->copyCodecsFromChannelManager(_channelManager.get(), false); - - _outgoingAudioChannelId = _contentNegotiationContext->addOutgoingChannel(signaling::MediaContent::Type::Audio); - //_contentNegotiationContext->addOutgoingChannel(signaling::MediaContent::Type::Video); + _channelManager = ChannelManager::Create( + std::move(mediaEngine), + _threads->getWorkerThread(), + _threads->getNetworkThread() + ); + + webrtc::Call::Config callConfig(_eventLog.get(), _threads->getNetworkThread()); + callConfig.task_queue_factory = _taskQueueFactory.get(); + callConfig.trials = &fieldTrialsBasedConfig; - _threads->getNetworkThread()->Invoke(RTC_FROM_HERE, [this]() { + _threads->getNetworkThread()->BlockingCall([&]() { _rtpTransport = _networking->getSyncAssumingSameThread()->getRtpTransport(); }); + _threads->getWorkerThread()->BlockingCall([&]() { + callConfig.audio_state = _channelManager->media_engine()->voice().GetAudioState(); + _call.reset(webrtc::Call::Create(callConfig)); + }); + + _uniqueRandomIdGenerator.reset(new rtc::UniqueRandomIdGenerator()); + + _contentNegotiationContext = std::make_unique(fieldTrialsBasedConfig, _encryptionKey.isOutgoing, _uniqueRandomIdGenerator.get()); + _contentNegotiationContext->copyCodecsFromChannelManager(_channelManager->media_engine(), false); + + _outgoingAudioChannelId = _contentNegotiationContext->addOutgoingChannel(signaling::MediaContent::Type::Audio); + _videoBitrateAllocatorFactory = webrtc::CreateBuiltinVideoBitrateAllocatorFactory(); - _networking->perform(RTC_FROM_HERE, [](NativeNetworkingImpl *networking) { + _networking->perform([](NativeNetworkingImpl *networking) { networking->start(); }); @@ -978,28 +1083,32 @@ class InstanceV2ImplInternal : public std::enable_shared_from_this(shared_from_this()); - _threads->getMediaThread()->PostDelayedTask(RTC_FROM_HERE, [weak]() { + _threads->getMediaThread()->PostDelayedTask([weak]() { auto strong = weak.lock(); if (!strong) { return; } - - + + strong->beginQualityTimer(500); - }, delayMs); + }, webrtc::TimeDelta::Millis(delayMs)); } - + void beginLogTimer(int delayMs) { const auto weak = std::weak_ptr(shared_from_this()); - _threads->getMediaThread()->PostDelayedTask(RTC_FROM_HERE, [weak]() { + _threads->getMediaThread()->PostDelayedTask([weak]() { auto strong = weak.lock(); if (!strong) { return; @@ -1008,31 +1117,31 @@ class InstanceV2ImplInternal : public std::enable_shared_from_thiswriteStateLogRecords(); strong->beginLogTimer(1000); - }, delayMs); + }, webrtc::TimeDelta::Millis(delayMs)); } - + void writeStateLogRecords() { const auto weak = std::weak_ptr(shared_from_this()); - _threads->getWorkerThread()->PostTask(RTC_FROM_HERE, [weak]() { + _threads->getWorkerThread()->PostTask([weak]() { auto strong = weak.lock(); if (!strong) { return; } - + auto stats = strong->_call->GetStats(); float sendBitrateKbps = ((float)stats.send_bandwidth_bps / 1000.0f); - - strong->_threads->getMediaThread()->PostTask(RTC_FROM_HERE, [weak, sendBitrateKbps]() { + + strong->_threads->getMediaThread()->PostTask([weak, sendBitrateKbps]() { auto strong = weak.lock(); if (!strong) { return; } - + float bitrateNorm = 16.0f; if (strong->_outgoingVideoChannel) { bitrateNorm = 600.0f; } - + float signalBarsNorm = 4.0f; float adjustedQuality = sendBitrateKbps / bitrateNorm; adjustedQuality = fmaxf(0.0f, adjustedQuality); @@ -1040,10 +1149,10 @@ class InstanceV2ImplInternal : public std::enable_shared_from_this_signalBarsUpdated) { strong->_signalBarsUpdated((int)(adjustedQuality * signalBarsNorm)); } - + NetworkBitrateLogRecord networkBitrateLogRecord; networkBitrateLogRecord.bitrate = (int32_t)sendBitrateKbps; - + strong->_networkBitrateLogRecords.emplace_back(rtc::TimeMillis(), std::move(networkBitrateLogRecord)); }); }); @@ -1053,15 +1162,27 @@ class InstanceV2ImplInternal : public std::enable_shared_from_this const &data) { RTC_LOG(LS_INFO) << "sendSignalingMessage: " << std::string(data.begin(), data.end()); - if (_signalingEncryptedConnection) { + if (_signalingConnection && _signalingEncryptedConnection) { switch (_signalingProtocolVersion) { - case SignalingProtocolVersion::V1: { - if (const auto message = _signalingEncryptedConnection->encryptRawPacket(rtc::CopyOnWriteBuffer(data.data(), data.size()))) { - _signalingDataEmitted(std::vector(message.value().data(), message.value().data() + message.value().size())); + case SignalingProtocolVersion::V1: + case SignalingProtocolVersion::V3: { + std::vector packetData; + if (signalingProtocolSupportsCompression(_signalingProtocolVersion)) { + if (const auto compressedData = gzipData(data)) { + packetData = std::move(compressedData.value()); + } else { + RTC_LOG(LS_ERROR) << "Could not gzip signaling message"; + } + } else { + packetData = data; + } + + if (const auto message = _signalingEncryptedConnection->encryptRawPacket(rtc::CopyOnWriteBuffer(packetData.data(), packetData.size()))) { + _signalingConnection->send(std::vector(message.value().data(), message.value().data() + message.value().size())); } else { RTC_LOG(LS_ERROR) << "Could not encrypt signaling message"; } @@ -1070,14 +1191,14 @@ class InstanceV2ImplInternal : public std::enable_shared_from_thisprepareForSendingRawMessage(message, true)); - + break; } default: { RTC_DCHECK_NOTREACHED(); - + break; } } @@ -1085,40 +1206,42 @@ class InstanceV2ImplInternal : public std::enable_shared_from_this packet) { if (!packet) { return; } - - _signalingDataEmitted(packet.value().bytes); + + if (_signalingConnection) { + _signalingConnection->send(packet.value().bytes); + } } void beginSignaling() { const auto weak = std::weak_ptr(shared_from_this()); - + _signalingEncryptedConnection = std::make_unique( EncryptedConnection::Type::Signaling, _encryptionKey, [weak, threads = _threads](int delayMs, int cause) { if (delayMs == 0) { - threads->getMediaThread()->PostTask(RTC_FROM_HERE, [weak, cause]() { + threads->getMediaThread()->PostTask([weak, cause]() { const auto strong = weak.lock(); if (!strong) { return; } - + strong->sendPendingSignalingServiceData(cause); }); } else { - threads->getMediaThread()->PostDelayedTask(RTC_FROM_HERE, [weak, cause]() { + threads->getMediaThread()->PostDelayedTask([weak, cause]() { const auto strong = weak.lock(); if (!strong) { return; } - + strong->sendPendingSignalingServiceData(cause); - }, delayMs); + }, webrtc::TimeDelta::Millis(delayMs)); } } ); @@ -1127,7 +1250,7 @@ class InstanceV2ImplInternal : public std::enable_shared_from_thisprepareForSendingService(cause)); } @@ -1137,14 +1260,14 @@ class InstanceV2ImplInternal : public std::enable_shared_from_thisoutgoingChannelSsrc(_outgoingAudioChannelId.value()); if (audioSsrc) { if (_outgoingAudioChannel && _outgoingAudioChannel->ssrc() != audioSsrc.value()) { _outgoingAudioChannel.reset(); } - + absl::optional outgoingAudioContent; for (const auto &content : coordinatedState->outgoingContents) { if (content.type == signaling::MediaContent::Type::Audio && content.ssrc == audioSsrc.value()) { @@ -1152,7 +1275,7 @@ class InstanceV2ImplInternal : public std::enable_shared_from_thisoutgoingChannelSsrc(_outgoingVideoChannelId.value()); if (videoSsrc) { if (_outgoingVideoChannel && _outgoingVideoChannel->ssrc() != videoSsrc.value()) { _outgoingVideoChannel.reset(); } - + absl::optional outgoingVideoContent; for (const auto &content : coordinatedState->outgoingContents) { if (content.type == signaling::MediaContent::Type::Video && content.ssrc == videoSsrc.value()) { @@ -1183,7 +1306,7 @@ class InstanceV2ImplInternal : public std::enable_shared_from_this(shared_from_this()); @@ -1196,7 +1319,7 @@ class InstanceV2ImplInternal : public std::enable_shared_from_thisgetMediaThread()->PostTask(RTC_FROM_HERE, [=] { + threads->getMediaThread()->PostTask([=] { const auto strong = weak.lock(); if (!strong) { return; @@ -1215,14 +1338,14 @@ class InstanceV2ImplInternal : public std::enable_shared_from_thisoutgoingChannelSsrc(_outgoingScreencastChannelId.value()); if (screencastSsrc) { if (_outgoingScreencastChannel && _outgoingScreencastChannel->ssrc() != screencastSsrc.value()) { _outgoingScreencastChannel.reset(); } - + absl::optional outgoingScreencastContent; for (const auto &content : coordinatedState->outgoingContents) { if (content.type == signaling::MediaContent::Type::Video && content.ssrc == screencastSsrc.value()) { @@ -1230,7 +1353,7 @@ class InstanceV2ImplInternal : public std::enable_shared_from_this(shared_from_this()); @@ -1243,7 +1366,7 @@ class InstanceV2ImplInternal : public std::enable_shared_from_thisgetMediaThread()->PostTask(RTC_FROM_HERE, [=] { + threads->getMediaThread()->PostTask([=] { const auto strong = weak.lock(); if (!strong) { return; @@ -1262,14 +1385,14 @@ class InstanceV2ImplInternal : public std::enable_shared_from_thisincomingContents) { switch (content.type) { case signaling::MediaContent::Type::Audio: { if (_incomingAudioChannel && _incomingAudioChannel->ssrc() != content.ssrc) { _incomingAudioChannel.reset(); } - + if (!_incomingAudioChannel) { _incomingAudioChannel.reset(new IncomingV2AudioChannel( _channelManager.get(), @@ -1280,14 +1403,14 @@ class InstanceV2ImplInternal : public std::enable_shared_from_thisssrc() != content.ssrc) { _incomingVideoChannel.reset(); } - + if (!_incomingVideoChannel) { _incomingVideoChannel.reset(new IncomingV2VideoChannel( _channelManager.get(), @@ -1299,7 +1422,7 @@ class InstanceV2ImplInternal : public std::enable_shared_from_thisaddSink(_currentSink); } - + break; } default: { @@ -1308,7 +1431,7 @@ class InstanceV2ImplInternal : public std::enable_shared_from_thisgetMediaThread()->PostTask(RTC_FROM_HERE, [=] { + threads->getMediaThread()->PostTask([=] { const auto strong = weak.lock(); if (!strong) { return; @@ -1358,7 +1481,7 @@ class InstanceV2ImplInternal : public std::enable_shared_from_this(shared_from_this()); - _networking->perform(RTC_FROM_HERE, [weak, threads = _threads, isOutgoing = _encryptionKey.isOutgoing](NativeNetworkingImpl *networking) { + _networking->perform([weak, threads = _threads, isOutgoing = _encryptionKey.isOutgoing](NativeNetworkingImpl *networking) { auto localFingerprint = networking->getLocalFingerprint(); std::string hash = localFingerprint->algorithm; std::string fingerprint = localFingerprint->GetRfc4572Fingerprint(); @@ -1374,7 +1497,7 @@ class InstanceV2ImplInternal : public std::enable_shared_from_thisgetMediaThread()->PostTask(RTC_FROM_HERE, [weak, ufrag, pwd, supportsRenomination, hash, fingerprint, setup, localIceParams]() { + threads->getMediaThread()->PostTask([weak, ufrag, pwd, supportsRenomination, hash, fingerprint, setup, localIceParams]() { const auto strong = weak.lock(); if (!strong) { return; @@ -1398,13 +1521,13 @@ class InstanceV2ImplInternal : public std::enable_shared_from_thisgetPendingOffer()) { signaling::NegotiateChannelsMessage data; data.exchangeId = offer->exchangeId; - + data.contents = offer->contents; signaling::Message message; @@ -1414,31 +1537,40 @@ class InstanceV2ImplInternal : public std::enable_shared_from_this &data) { + if (_signalingConnection) { + _signalingConnection->receiveExternal(data); + } else { + RTC_LOG(LS_ERROR) << "receiveSignalingData: signalingConnection is not available"; + } + } + + void onSignalingData(const std::vector &data) { if (_signalingEncryptedConnection) { switch (_signalingProtocolVersion) { - case SignalingProtocolVersion::V1: { + case SignalingProtocolVersion::V1: + case SignalingProtocolVersion::V3: { if (const auto message = _signalingEncryptedConnection->decryptRawPacket(rtc::CopyOnWriteBuffer(data.data(), data.size()))) { processSignalingMessage(message.value()); } else { RTC_LOG(LS_ERROR) << "receiveSignalingData could not decrypt signaling data"; } - + break; } case SignalingProtocolVersion::V2: { if (const auto packet = _signalingEncryptedConnection->handleIncomingRawPacket((const char *)data.data(), data.size())) { processSignalingMessage(packet.value().main.message); - + for (const auto &additional : packet.value().additional) { processSignalingMessage(additional.message); } } - + break; } default: { RTC_DCHECK_NOTREACHED(); - + break; } } @@ -1446,10 +1578,19 @@ class InstanceV2ImplInternal : public std::enable_shared_from_this decryptedData = std::vector(data.data(), data.data() + data.size()); - processSignalingData(decryptedData); + + if (isGzip(decryptedData)) { + if (const auto decompressedData = gunzipData(decryptedData, 2 * 1024 * 1024)) { + processSignalingData(decompressedData.value()); + } else { + RTC_LOG(LS_ERROR) << "receiveSignalingData could not decompress gzipped data"; + } + } else { + processSignalingData(decryptedData); + } } void processSignalingData(const std::vector &data) { @@ -1473,10 +1614,10 @@ class InstanceV2ImplInternal : public std::enable_shared_from_thisfingerprints[0].setup; } - _networking->perform(RTC_FROM_HERE, [threads = _threads, remoteIceParameters = std::move(remoteIceParameters), fingerprint = std::move(fingerprint), sslSetup = std::move(sslSetup)](NativeNetworkingImpl *networking) { + _networking->perform([threads = _threads, remoteIceParameters = std::move(remoteIceParameters), fingerprint = std::move(fingerprint), sslSetup = std::move(sslSetup)](NativeNetworkingImpl *networking) { networking->setRemoteParams(remoteIceParameters, fingerprint.get(), sslSetup); }); - + _handshakeCompleted = true; if (_encryptionKey.isOutgoing) { @@ -1490,7 +1631,7 @@ class InstanceV2ImplInternal : public std::enable_shared_from_this(); negotiationContents->exchangeId = offerAnwer->exchangeId; negotiationContents->contents = offerAnwer->contents; - + if (const auto response = _contentNegotiationContext->setRemoteNegotiationContent(std::move(negotiationContents))) { signaling::NegotiateChannelsMessage data; @@ -1501,9 +1642,9 @@ class InstanceV2ImplInternal : public std::enable_shared_from_this(messageData)) { for (const auto &candidate : candidatesList->iceCandidates) { @@ -1585,7 +1726,7 @@ class InstanceV2ImplInternal : public std::enable_shared_from_thisperform(RTC_FROM_HERE, [threads = _threads, parsedCandidates = _pendingIceCandidates](NativeNetworkingImpl *networking) { + _networking->perform([threads = _threads, parsedCandidates = _pendingIceCandidates](NativeNetworkingImpl *networking) { networking->addCandidates(parsedCandidates); }); _pendingIceCandidates.clear(); @@ -1600,18 +1741,18 @@ class InstanceV2ImplInternal : public std::enable_shared_from_thisperform(RTC_FROM_HERE, [stringData = std::move(stringData)](NativeNetworkingImpl *networking) { + _networking->perform([stringData = std::move(stringData)](NativeNetworkingImpl *networking) { networking->sendDataChannelMessage(stringData); }); } @@ -1752,12 +1893,12 @@ class InstanceV2ImplInternal : public std::enable_shared_from_thissetVideoCapture(nullptr); } - + if (_outgoingVideoChannelId) { _contentNegotiationContext->removeOutgoingChannel(_outgoingVideoChannelId.value()); _outgoingVideoChannelId.reset(); } - + if (_outgoingScreencastChannelId) { _contentNegotiationContext->removeOutgoingChannel(_outgoingScreencastChannelId.value()); _outgoingScreencastChannelId.reset(); @@ -1802,11 +1943,15 @@ class InstanceV2ImplInternal : public std::enable_shared_from_thisgetWorkerThread()->BlockingCall([&]() { + SetAudioInputDeviceById(_audioDeviceModule.get(), id); + }); } void setAudioOutputDevice(std::string id) { - + _threads->getWorkerThread()->BlockingCall([&]() { + SetAudioOutputDeviceById(_audioDeviceModule.get(), id); + }); } void setIsLowBatteryLevel(bool isLowBatteryLevel) { @@ -1818,28 +1963,28 @@ class InstanceV2ImplInternal : public std::enable_shared_from_this completion) { FinalState finalState; - + json11::Json::object statsLog; - + for (int i = (int)_networkStateLogRecords.size() - 1; i >= 1; i--) { // coalesce events within 5ms if (_networkStateLogRecords[i].timestamp - _networkStateLogRecords[i - 1].timestamp < 5) { _networkStateLogRecords.erase(_networkStateLogRecords.begin() + i - 1); } } - + json11::Json::array jsonNetworkStateLogRecords; int64_t baseTimestamp = 0; for (const auto &record : _networkStateLogRecords) { json11::Json::object jsonRecord; - + std::ostringstream timestampString; - + if (baseTimestamp == 0) { baseTimestamp = record.timestamp; } timestampString << (record.timestamp - baseTimestamp); - + jsonRecord.insert(std::make_pair("t", json11::Json(timestampString.str()))); jsonRecord.insert(std::make_pair("c", json11::Json(record.record.isConnected ? 1 : 0))); if (record.record.route) { @@ -1848,51 +1993,51 @@ class InstanceV2ImplInternal : public std::enable_shared_from_this json11::Json::object { json11::Json::object jsonCandidate; - + jsonCandidate.insert(std::make_pair("type", json11::Json(candidate.type))); jsonCandidate.insert(std::make_pair("protocol", json11::Json(candidate.protocol))); jsonCandidate.insert(std::make_pair("address", json11::Json(candidate.address))); - + return jsonCandidate; }; - + jsonConnection.insert(std::make_pair("local", serializeCandidate(record.record.connection->local))); jsonConnection.insert(std::make_pair("remote", serializeCandidate(record.record.connection->remote))); - + jsonRecord.insert(std::make_pair("network", std::move(jsonConnection))); } if (record.record.isFailed) { jsonRecord.insert(std::make_pair("failed", json11::Json(1))); } - + jsonNetworkStateLogRecords.push_back(std::move(jsonRecord)); } statsLog.insert(std::make_pair("network", std::move(jsonNetworkStateLogRecords))); - + json11::Json::array jsonNetworkBitrateLogRecords; for (const auto &record : _networkBitrateLogRecords) { json11::Json::object jsonRecord; - + jsonRecord.insert(std::make_pair("b", json11::Json(record.record.bitrate))); - + jsonNetworkBitrateLogRecords.push_back(std::move(jsonRecord)); } statsLog.insert(std::make_pair("bitrate", std::move(jsonNetworkBitrateLogRecords))); - + auto jsonStatsLog = json11::Json(std::move(statsLog)); - + if (!_statsLogPath.data.empty()) { std::ofstream file; file.open(_statsLogPath.data); - + file << jsonStatsLog.dump(); - + file.close(); } - + completion(finalState); } @@ -1936,7 +2081,7 @@ class InstanceV2ImplInternal : public std::enable_shared_from_this _stateUpdated; std::function _signalBarsUpdated; - std::function _audioLevelUpdated; + std::function _audioLevelsUpdated; std::function _remoteBatteryLevelIsLowUpdated; std::function _remoteMediaStateUpdated; std::function _remotePrefferedAspectRatioUpdated; @@ -1944,14 +2089,15 @@ class InstanceV2ImplInternal : public std::enable_shared_from_this(webrtc::TaskQueueFactory*)> _createAudioDeviceModule; FilePath _statsLogPath; + std::unique_ptr _signalingConnection; std::unique_ptr _signalingEncryptedConnection; - + int64_t _startTimestamp = 0; - + absl::optional _currentNetworkStateLogRecord; std::vector> _networkStateLogRecords; std::vector> _networkBitrateLogRecords; - + absl::optional _networkState; bool _handshakeCompleted = false; @@ -1960,17 +2106,17 @@ class InstanceV2ImplInternal : public std::enable_shared_from_this _eventLog; std::unique_ptr _taskQueueFactory; - std::unique_ptr _mediaEngine; std::unique_ptr _call; - webrtc::FieldTrialBasedConfig _fieldTrials; webrtc::LocalAudioSinkAdapter _audioSource; rtc::scoped_refptr _audioDeviceModule; std::unique_ptr _uniqueRandomIdGenerator; webrtc::RtpTransport *_rtpTransport = nullptr; - std::unique_ptr _channelManager; + std::unique_ptr _channelManager; std::unique_ptr _videoBitrateAllocatorFactory; - + std::string _initialInputDeviceId; + std::string _initialOutputDeviceId; + std::unique_ptr _contentNegotiationContext; std::shared_ptr> _networking; @@ -2003,7 +2149,11 @@ InstanceV2Impl::InstanceV2Impl(Descriptor &&descriptor) { if (descriptor.config.logPath.data.size() != 0) { _logSink = std::make_unique(descriptor.config.logPath); } +#ifdef DEBUG + rtc::LogMessage::LogToDebug(rtc::LS_VERBOSE); +#else rtc::LogMessage::LogToDebug(rtc::LS_INFO); +#endif rtc::LogMessage::SetLogToStderr(false); if (_logSink) { rtc::LogMessage::AddLogToStream(_logSink.get(), rtc::LS_INFO); @@ -2013,7 +2163,7 @@ InstanceV2Impl::InstanceV2Impl(Descriptor &&descriptor) { _internal.reset(new ThreadLocalObject(_threads->getMediaThread(), [descriptor = std::move(descriptor), threads = _threads]() mutable { return new InstanceV2ImplInternal(std::move(descriptor), threads); })); - _internal->perform(RTC_FROM_HERE, [](InstanceV2ImplInternal *internal) { + _internal->perform([](InstanceV2ImplInternal *internal) { internal->start(); }); } @@ -2023,55 +2173,55 @@ InstanceV2Impl::~InstanceV2Impl() { } void InstanceV2Impl::receiveSignalingData(const std::vector &data) { - _internal->perform(RTC_FROM_HERE, [data](InstanceV2ImplInternal *internal) { + _internal->perform([data](InstanceV2ImplInternal *internal) { internal->receiveSignalingData(data); }); } void InstanceV2Impl::setVideoCapture(std::shared_ptr videoCapture) { - _internal->perform(RTC_FROM_HERE, [videoCapture](InstanceV2ImplInternal *internal) { + _internal->perform([videoCapture](InstanceV2ImplInternal *internal) { internal->setVideoCapture(videoCapture); }); } void InstanceV2Impl::setRequestedVideoAspect(float aspect) { - _internal->perform(RTC_FROM_HERE, [aspect](InstanceV2ImplInternal *internal) { + _internal->perform([aspect](InstanceV2ImplInternal *internal) { internal->setRequestedVideoAspect(aspect); }); } void InstanceV2Impl::setNetworkType(NetworkType networkType) { - _internal->perform(RTC_FROM_HERE, [networkType](InstanceV2ImplInternal *internal) { + _internal->perform([networkType](InstanceV2ImplInternal *internal) { internal->setNetworkType(networkType); }); } void InstanceV2Impl::setMuteMicrophone(bool muteMicrophone) { - _internal->perform(RTC_FROM_HERE, [muteMicrophone](InstanceV2ImplInternal *internal) { + _internal->perform([muteMicrophone](InstanceV2ImplInternal *internal) { internal->setMuteMicrophone(muteMicrophone); }); } void InstanceV2Impl::setIncomingVideoOutput(std::shared_ptr> sink) { - _internal->perform(RTC_FROM_HERE, [sink](InstanceV2ImplInternal *internal) { + _internal->perform([sink](InstanceV2ImplInternal *internal) { internal->setIncomingVideoOutput(sink); }); } void InstanceV2Impl::setAudioInputDevice(std::string id) { - _internal->perform(RTC_FROM_HERE, [id](InstanceV2ImplInternal *internal) { + _internal->perform([id](InstanceV2ImplInternal *internal) { internal->setAudioInputDevice(id); }); } void InstanceV2Impl::setAudioOutputDevice(std::string id) { - _internal->perform(RTC_FROM_HERE, [id](InstanceV2ImplInternal *internal) { + _internal->perform([id](InstanceV2ImplInternal *internal) { internal->setAudioOutputDevice(id); }); } void InstanceV2Impl::setIsLowBatteryLevel(bool isLowBatteryLevel) { - _internal->perform(RTC_FROM_HERE, [isLowBatteryLevel](InstanceV2ImplInternal *internal) { + _internal->perform([isLowBatteryLevel](InstanceV2ImplInternal *internal) { internal->setIsLowBatteryLevel(isLowBatteryLevel); }); } @@ -2093,8 +2243,9 @@ void InstanceV2Impl::setEchoCancellationStrength(int strength) { std::vector InstanceV2Impl::GetVersions() { std::vector result; - result.push_back("4.0.1"); - result.push_back("4.0.2"); + result.push_back("7.0.0"); + result.push_back("8.0.0"); + result.push_back("9.0.0"); return result; } @@ -2127,7 +2278,7 @@ void InstanceV2Impl::stop(std::function completion) { if (_logSink) { debugLog = _logSink->result(); } - _internal->perform(RTC_FROM_HERE, [completion, debugLog = std::move(debugLog)](InstanceV2ImplInternal *internal) mutable { + _internal->perform([completion, debugLog = std::move(debugLog)](InstanceV2ImplInternal *internal) mutable { internal->stop([completion, debugLog = std::move(debugLog)](FinalState finalState) mutable { finalState.debugLog = debugLog; completion(finalState); diff --git a/TMessagesProj/jni/voip/tgcalls/v2/InstanceV2ReferenceImpl.cpp b/TMessagesProj/jni/voip/tgcalls/v2/InstanceV2ReferenceImpl.cpp index 7505762614..82455c644f 100644 --- a/TMessagesProj/jni/voip/tgcalls/v2/InstanceV2ReferenceImpl.cpp +++ b/TMessagesProj/jni/voip/tgcalls/v2/InstanceV2ReferenceImpl.cpp @@ -25,7 +25,6 @@ #include "api/call/audio_sink.h" #include "modules/audio_processing/audio_buffer.h" #include "absl/strings/match.h" -#include "pc/channel_manager.h" #include "audio/audio_state.h" #include "modules/audio_coding/neteq/default_neteq_factory.h" #include "modules/audio_coding/include/audio_coding_module.h" @@ -50,6 +49,10 @@ #include "CodecSelectHelper.h" #include "AudioDeviceHelper.h" #include "SignalingEncryption.h" +#include "ReflectorRelayPortFactory.h" +#include "v2/SignalingConnection.h" +#include "v2/ExternalSignalingConnection.h" +#include "v2/SignalingSctpConnection.h" #ifdef WEBRTC_IOS #include "platform/darwin/iOS/tgcalls_audio_device_module_ios.h" #endif @@ -58,10 +61,41 @@ #include #include "third-party/json11.hpp" +#include "utils/gzip.h" namespace tgcalls { namespace { +enum class SignalingProtocolVersion { + V1, + V2 +}; + +SignalingProtocolVersion signalingProtocolVersion(std::string const &version) { + if (version == "10.0.0") { + return SignalingProtocolVersion::V1; + } else if (version == "11.0.0") { + return SignalingProtocolVersion::V2; + } else { + RTC_LOG(LS_ERROR) << "signalingProtocolVersion: unknown version " << version; + + return SignalingProtocolVersion::V2; + } +} + +bool signalingProtocolSupportsCompression(SignalingProtocolVersion version) { + switch (version) { + case SignalingProtocolVersion::V1: + return false; + case SignalingProtocolVersion::V2: + return true; + default: + RTC_DCHECK_NOTREACHED(); + break; + } + return false; +} + static VideoCaptureInterfaceObject *GetVideoCaptureAssumingSameThread(VideoCaptureInterface *videoCapture) { return videoCapture ? static_cast(videoCapture)->object()->getSyncAssumingSameThread() @@ -321,6 +355,7 @@ struct NetworkBitrateLogRecord { class InstanceV2ReferenceImplInternal : public std::enable_shared_from_this { public: InstanceV2ReferenceImplInternal(Descriptor &&descriptor, std::shared_ptr threads) : + _signalingProtocolVersion(signalingProtocolVersion(descriptor.version)), _threads(threads), _rtcServers(descriptor.rtcServers), _proxy(std::move(descriptor.proxy)), @@ -328,7 +363,7 @@ class InstanceV2ReferenceImplInternal : public std::enable_shared_from_this()), _taskQueueFactory(webrtc::CreateDefaultTaskQueueFactory()), _videoCapture(descriptor.videoCapture), - _platformContext(descriptor.platformContext) { + _platformContext(descriptor.platformContext) { + webrtc::field_trial::InitFieldTrialsFromString( + "WebRTC-DataChannel-Dcsctp/Enabled/" + "WebRTC-Audio-iOS-Holding/Enabled/" + ); } ~InstanceV2ReferenceImplInternal() { _currentStrongSink.reset(); - _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [&]() { + _threads->getWorkerThread()->BlockingCall([&]() { _audioDeviceModule = nullptr; }); @@ -363,15 +402,55 @@ class InstanceV2ReferenceImplInternal : public std::enable_shared_from_thisconfigurePlatformAudio(); const auto weak = std::weak_ptr(shared_from_this()); - + PlatformInterface::SharedInstance()->configurePlatformAudio(); - + RTC_DCHECK(_threads->getMediaThread()->IsCurrent()); - _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [&]() { + if (_signalingProtocolVersion == SignalingProtocolVersion::V2) { + _signalingConnection = std::make_unique( + _threads, + [threads = _threads, weak](const std::vector &data) { + threads->getMediaThread()->PostTask([weak, data] { + const auto strong = weak.lock(); + if (!strong) { + return; + } + + strong->onSignalingData(data); + }); + }, + [signalingDataEmitted = _signalingDataEmitted](const std::vector &data) { + signalingDataEmitted(data); + } + ); + } + if (!_signalingConnection) { + _signalingConnection = std::make_unique( + [threads = _threads, weak](const std::vector &data) { + threads->getMediaThread()->PostTask([weak, data] { + const auto strong = weak.lock(); + if (!strong) { + return; + } + + strong->onSignalingData(data); + }); + }, + [signalingDataEmitted = _signalingDataEmitted](const std::vector &data) { + signalingDataEmitted(data); + } + ); + } + + _signalingConnection->start(); + + _threads->getWorkerThread()->BlockingCall([&]() { _audioDeviceModule = createAudioDeviceModule(); }); + _relayPortFactory.reset(new ReflectorRelayPortFactory(_rtcServers)); + webrtc::PeerConnectionFactoryDependencies peerConnectionFactoryDependencies; peerConnectionFactoryDependencies.signaling_thread = _threads->getMediaThread(); peerConnectionFactoryDependencies.worker_thread = _threads->getWorkerThread(); @@ -389,18 +468,17 @@ class InstanceV2ReferenceImplInternal : public std::enable_shared_from_this mediaEngine = cricket::CreateMediaEngine(std::move(mediaDeps)); peerConnectionFactoryDependencies.media_engine = std::move(mediaEngine); - peerConnectionFactoryDependencies.call_factory = webrtc::CreateCallFactory(); peerConnectionFactoryDependencies.event_log_factory = std::make_unique(peerConnectionFactoryDependencies.task_queue_factory.get()); _peerConnectionFactory = webrtc::CreateModularPeerConnectionFactory(std::move(peerConnectionFactoryDependencies)); - + webrtc::PeerConnectionDependencies peerConnectionDependencies(nullptr); - + PeerConnectionDelegateAdapter::Parameters delegateParameters; delegateParameters.onRenegotiationNeeded = [weak, threads = _threads]() { - threads->getMediaThread()->PostTask(RTC_FROM_HERE, [weak]() { + threads->getMediaThread()->PostTask([weak]() { const auto strong = weak.lock(); if (!strong) { return; @@ -558,6 +636,10 @@ class InstanceV2ReferenceImplInternal : public std::enable_shared_from_this audioSource = _peerConnectionFactory->CreateAudioSource(audioSourceOptions); - rtc::scoped_refptr audioTrack = _peerConnectionFactory->CreateAudioTrack("0", audioSource); + rtc::scoped_refptr audioTrack = _peerConnectionFactory->CreateAudioTrack("0", audioSource.get()); webrtc::RTCErrorOr> audioTransceiverOrError = _peerConnection->AddTransceiver(audioTrack, transceiverInit); if (audioTransceiverOrError.ok()) { _outgoingAudioTrack = audioTrack; @@ -635,7 +717,7 @@ class InstanceV2ReferenceImplInternal : public std::enable_shared_from_thisgetMediaThread()->PostTask(RTC_FROM_HERE, [weak, cause]() { + threads->getMediaThread()->PostTask([weak, cause]() { const auto strong = weak.lock(); if (!strong) { return; @@ -644,14 +726,14 @@ class InstanceV2ReferenceImplInternal : public std::enable_shared_from_thissendPendingSignalingServiceData(cause); }); } else { - threads->getMediaThread()->PostDelayedTask(RTC_FROM_HERE, [weak, cause]() { + threads->getMediaThread()->PostDelayedTask([weak, cause]() { const auto strong = weak.lock(); if (!strong) { return; } strong->sendPendingSignalingServiceData(cause); - }, delayMs); + }, webrtc::TimeDelta::Millis(delayMs)); } } ); @@ -673,10 +755,41 @@ class InstanceV2ReferenceImplInternal : public std::enable_shared_from_this const &data) { RTC_LOG(LS_INFO) << "sendSignalingMessage: " << std::string(data.begin(), data.end()); - if (_signalingEncryptedConnection) { - rtc::CopyOnWriteBuffer message; - message.AppendData(data.data(), data.size()); - commitSendSignalingMessage(_signalingEncryptedConnection->prepareForSendingRawMessage(message, true)); + if (_signalingConnection && _signalingEncryptedConnection) { + switch (_signalingProtocolVersion) { + case SignalingProtocolVersion::V1: { + rtc::CopyOnWriteBuffer message; + message.AppendData(data.data(), data.size()); + + commitSendSignalingMessage(_signalingEncryptedConnection->prepareForSendingRawMessage(message, true)); + + break; + } + case SignalingProtocolVersion::V2: { + std::vector packetData; + if (signalingProtocolSupportsCompression(_signalingProtocolVersion)) { + if (const auto compressedData = gzipData(data)) { + packetData = std::move(compressedData.value()); + } else { + RTC_LOG(LS_ERROR) << "Could not gzip signaling message"; + } + } else { + packetData = data; + } + + if (const auto message = _signalingEncryptedConnection->encryptRawPacket(rtc::CopyOnWriteBuffer(packetData.data(), packetData.size()))) { + _signalingConnection->send(std::vector(message.value().data(), message.value().data() + message.value().size())); + } else { + RTC_LOG(LS_ERROR) << "Could not encrypt signaling message"; + } + break; + } + default: { + RTC_DCHECK_NOTREACHED(); + + break; + } + } } else { RTC_LOG(LS_ERROR) << "sendSignalingMessage encryption not available"; } @@ -686,13 +799,15 @@ class InstanceV2ReferenceImplInternal : public std::enable_shared_from_thissend(packet.value().bytes); + } } void beginLogTimer(int delayMs) { const auto weak = std::weak_ptr(shared_from_this()); - _threads->getMediaThread()->PostDelayedTask(RTC_FROM_HERE, [weak]() { + _threads->getMediaThread()->PostDelayedTask([weak]() { auto strong = weak.lock(); if (!strong) { return; @@ -701,7 +816,7 @@ class InstanceV2ReferenceImplInternal : public std::enable_shared_from_thiswriteStateLogRecords(); strong->beginLogTimer(1000); - }, delayMs); + }, webrtc::TimeDelta::Millis(delayMs)); } void writeStateLogRecords() { @@ -711,7 +826,7 @@ class InstanceV2ReferenceImplInternal : public std::enable_shared_from_thisgetWorkerThread()->PostTask(RTC_FROM_HERE, [weak, call]() { + _threads->getWorkerThread()->PostTask([weak, call]() { auto strong = weak.lock(); if (!strong) { return; @@ -720,7 +835,7 @@ class InstanceV2ReferenceImplInternal : public std::enable_shared_from_thisGetStats(); float sendBitrateKbps = ((float)stats.send_bandwidth_bps / 1024.0f); - strong->_threads->getMediaThread()->PostTask(RTC_FROM_HERE, [weak, sendBitrateKbps]() { + strong->_threads->getMediaThread()->PostTask([weak, sendBitrateKbps]() { auto strong = weak.lock(); if (!strong) { return; @@ -753,7 +868,7 @@ class InstanceV2ReferenceImplInternal : public std::enable_shared_from_this observer(new rtc::RefCountedObject([threads = _threads, weak](webrtc::RTCError error) { - threads->getMediaThread()->PostTask(RTC_FROM_HERE, [weak]() { + threads->getMediaThread()->PostTask([weak]() { const auto strong = weak.lock(); if (!strong) { return; @@ -782,7 +897,7 @@ class InstanceV2ReferenceImplInternal : public std::enable_shared_from_this(jsonResult.begin(), jsonResult.end())); } - + void sentLocalDescription() { auto localDescription = _peerConnection->local_description(); if (localDescription) { @@ -793,7 +908,7 @@ class InstanceV2ReferenceImplInternal : public std::enable_shared_from_this(jsonResult.begin(), jsonResult.end())); @@ -808,12 +923,38 @@ class InstanceV2ReferenceImplInternal : public std::enable_shared_from_this &data) { + if (_signalingConnection) { + _signalingConnection->receiveExternal(data); + } + } + + void onSignalingData(const std::vector &data) { if (_signalingEncryptedConnection) { - if (const auto packet = _signalingEncryptedConnection->handleIncomingRawPacket((const char *)data.data(), data.size())) { - processSignalingMessage(packet.value().main.message); + switch (_signalingProtocolVersion) { + case SignalingProtocolVersion::V1: { + if (const auto packet = _signalingEncryptedConnection->handleIncomingRawPacket((const char *)data.data(), data.size())) { + processSignalingMessage(packet.value().main.message); - for (const auto &additional : packet.value().additional) { - processSignalingMessage(additional.message); + for (const auto &additional : packet.value().additional) { + processSignalingMessage(additional.message); + } + } + + break; + } + case SignalingProtocolVersion::V2: { + if (const auto message = _signalingEncryptedConnection->decryptRawPacket(rtc::CopyOnWriteBuffer(data.data(), data.size()))) { + processSignalingMessage(message.value()); + } else { + RTC_LOG(LS_ERROR) << "receiveSignalingData could not decrypt signaling data"; + } + + break; + } + default: { + RTC_DCHECK_NOTREACHED(); + + break; } } } else { @@ -823,7 +964,16 @@ class InstanceV2ReferenceImplInternal : public std::enable_shared_from_this decryptedData = std::vector(data.data(), data.data() + data.size()); - processSignalingData(decryptedData); + + if (isGzip(decryptedData)) { + if (const auto decompressedData = gunzipData(decryptedData, 2 * 1024 * 1024)) { + processSignalingData(decompressedData.value()); + } else { + RTC_LOG(LS_ERROR) << "receiveSignalingData could not decompress gzipped data"; + } + } else { + processSignalingData(decryptedData); + } } void processSignalingData(const std::vector &data) { @@ -892,7 +1042,7 @@ class InstanceV2ReferenceImplInternal : public std::enable_shared_from_thisdata; - + if (const auto mediaState = absl::get_if(messageData)) { AudioState mappedAudioState; if (mediaState->isMuted) { @@ -962,7 +1112,7 @@ class InstanceV2ReferenceImplInternal : public std::enable_shared_from_this remoteDescription(webrtc::CreateSessionDescription(type, sdp, &sdpParseError)); const auto weak = std::weak_ptr(shared_from_this()); rtc::scoped_refptr observer(new rtc::RefCountedObject([threads = _threads, weak, type](webrtc::RTCError error) { - threads->getMediaThread()->PostTask(RTC_FROM_HERE, [weak, type]() { + threads->getMediaThread()->PostTask([weak, type]() { const auto strong = weak.lock(); if (!strong) { return; @@ -1025,7 +1175,7 @@ class InstanceV2ReferenceImplInternal : public std::enable_shared_from_thisgetMediaThread()->PostTask(RTC_FROM_HERE, [weak]() { + threads->getMediaThread()->PostTask([weak]() { const auto strong = weak.lock(); if (!strong) { return; @@ -1145,7 +1295,7 @@ class InstanceV2ReferenceImplInternal : public std::enable_shared_from_thisRemoveTrackNew(_outgoingVideoTransceiver->sender()); + _peerConnection->RemoveTrackOrError(_outgoingVideoTransceiver->sender()); } if (_outgoingVideoTrack) { _outgoingVideoTrack = nullptr; @@ -1158,7 +1308,7 @@ class InstanceV2ReferenceImplInternal : public std::enable_shared_from_thisCreateVideoTrack("1", videoCaptureImpl->source()); + auto videoTrack = _peerConnectionFactory->CreateVideoTrack("1", videoCaptureImpl->source().get()); if (videoTrack) { webrtc::RtpTransceiverInit transceiverInit; transceiverInit.stream_ids = { "0" }; @@ -1266,9 +1416,11 @@ class InstanceV2ReferenceImplInternal : public std::enable_shared_from_this _threads; std::vector _rtcServers; std::unique_ptr _proxy; @@ -1401,7 +1554,7 @@ class InstanceV2ReferenceImplInternal : public std::enable_shared_from_this _stateUpdated; std::function _signalBarsUpdated; - std::function _audioLevelUpdated; + std::function _audioLevelsUpdated; std::function _remoteBatteryLevelIsLowUpdated; std::function _remoteMediaStateUpdated; std::function _remotePrefferedAspectRatioUpdated; @@ -1409,6 +1562,7 @@ class InstanceV2ReferenceImplInternal : public std::enable_shared_from_this(webrtc::TaskQueueFactory*)> _createAudioDeviceModule; FilePath _statsLogPath; + std::unique_ptr _signalingConnection; std::unique_ptr _signalingEncryptedConnection; bool _isConnected = false; @@ -1441,13 +1595,13 @@ class InstanceV2ReferenceImplInternal : public std::enable_shared_from_this _eventLog; std::unique_ptr _taskQueueFactory; + std::unique_ptr _relayPortFactory; rtc::scoped_refptr _peerConnectionFactory; std::unique_ptr _peerConnectionObserver; rtc::scoped_refptr _peerConnection; - webrtc::FieldTrialBasedConfig _fieldTrials; - + webrtc::LocalAudioSinkAdapter _audioSource; - + rtc::scoped_refptr _audioDeviceModule; bool _isBatteryLow = false; @@ -1472,7 +1626,7 @@ InstanceV2ReferenceImpl::InstanceV2ReferenceImpl(Descriptor &&descriptor) { _internal.reset(new ThreadLocalObject(_threads->getMediaThread(), [descriptor = std::move(descriptor), threads = _threads]() mutable { return new InstanceV2ReferenceImplInternal(std::move(descriptor), threads); })); - _internal->perform(RTC_FROM_HERE, [](InstanceV2ReferenceImplInternal *internal) { + _internal->perform([](InstanceV2ReferenceImplInternal *internal) { internal->start(); }); } @@ -1482,55 +1636,55 @@ InstanceV2ReferenceImpl::~InstanceV2ReferenceImpl() { } void InstanceV2ReferenceImpl::receiveSignalingData(const std::vector &data) { - _internal->perform(RTC_FROM_HERE, [data](InstanceV2ReferenceImplInternal *internal) { + _internal->perform([data](InstanceV2ReferenceImplInternal *internal) { internal->receiveSignalingData(data); }); } void InstanceV2ReferenceImpl::setVideoCapture(std::shared_ptr videoCapture) { - _internal->perform(RTC_FROM_HERE, [videoCapture](InstanceV2ReferenceImplInternal *internal) { + _internal->perform([videoCapture](InstanceV2ReferenceImplInternal *internal) { internal->setVideoCapture(videoCapture); }); } void InstanceV2ReferenceImpl::setRequestedVideoAspect(float aspect) { - _internal->perform(RTC_FROM_HERE, [aspect](InstanceV2ReferenceImplInternal *internal) { + _internal->perform([aspect](InstanceV2ReferenceImplInternal *internal) { internal->setRequestedVideoAspect(aspect); }); } void InstanceV2ReferenceImpl::setNetworkType(NetworkType networkType) { - _internal->perform(RTC_FROM_HERE, [networkType](InstanceV2ReferenceImplInternal *internal) { + _internal->perform([networkType](InstanceV2ReferenceImplInternal *internal) { internal->setNetworkType(networkType); }); } void InstanceV2ReferenceImpl::setMuteMicrophone(bool muteMicrophone) { - _internal->perform(RTC_FROM_HERE, [muteMicrophone](InstanceV2ReferenceImplInternal *internal) { + _internal->perform([muteMicrophone](InstanceV2ReferenceImplInternal *internal) { internal->setMuteMicrophone(muteMicrophone); }); } void InstanceV2ReferenceImpl::setIncomingVideoOutput(std::shared_ptr> sink) { - _internal->perform(RTC_FROM_HERE, [sink](InstanceV2ReferenceImplInternal *internal) { + _internal->perform([sink](InstanceV2ReferenceImplInternal *internal) { internal->setIncomingVideoOutput(sink); }); } void InstanceV2ReferenceImpl::setAudioInputDevice(std::string id) { - _internal->perform(RTC_FROM_HERE, [id](InstanceV2ReferenceImplInternal *internal) { + _internal->perform([id](InstanceV2ReferenceImplInternal *internal) { internal->setAudioInputDevice(id); }); } void InstanceV2ReferenceImpl::setAudioOutputDevice(std::string id) { - _internal->perform(RTC_FROM_HERE, [id](InstanceV2ReferenceImplInternal *internal) { + _internal->perform([id](InstanceV2ReferenceImplInternal *internal) { internal->setAudioOutputDevice(id); }); } void InstanceV2ReferenceImpl::setIsLowBatteryLevel(bool isLowBatteryLevel) { - _internal->perform(RTC_FROM_HERE, [isLowBatteryLevel](InstanceV2ReferenceImplInternal *internal) { + _internal->perform([isLowBatteryLevel](InstanceV2ReferenceImplInternal *internal) { internal->setIsLowBatteryLevel(isLowBatteryLevel); }); } @@ -1552,7 +1706,8 @@ void InstanceV2ReferenceImpl::setEchoCancellationStrength(int strength) { std::vector InstanceV2ReferenceImpl::GetVersions() { std::vector result; - result.push_back("4.1.2"); + result.push_back("10.0.0"); + result.push_back("11.0.0"); return result; } @@ -1585,7 +1740,7 @@ void InstanceV2ReferenceImpl::stop(std::function completion) { if (_logSink) { debugLog = _logSink->result(); } - _internal->perform(RTC_FROM_HERE, [completion, debugLog = std::move(debugLog)](InstanceV2ReferenceImplInternal *internal) mutable { + _internal->perform([completion, debugLog = std::move(debugLog)](InstanceV2ReferenceImplInternal *internal) mutable { internal->stop([completion, debugLog = std::move(debugLog)](FinalState finalState) mutable { finalState.debugLog = debugLog; completion(finalState); diff --git a/TMessagesProj/jni/voip/tgcalls/v2/NativeNetworkingImpl.cpp b/TMessagesProj/jni/voip/tgcalls/v2/NativeNetworkingImpl.cpp index 4f20a7b8aa..7398a06113 100644 --- a/TMessagesProj/jni/voip/tgcalls/v2/NativeNetworkingImpl.cpp +++ b/TMessagesProj/jni/voip/tgcalls/v2/NativeNetworkingImpl.cpp @@ -5,7 +5,6 @@ #include "p2p/base/p2p_transport_channel.h" #include "p2p/base/basic_async_resolver_factory.h" #include "api/packet_socket_factory.h" -#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/rtc_certificate_generator.h" #include "p2p/base/ice_credentials_iterator.h" #include "api/jsep_ice_candidate.h" @@ -17,11 +16,15 @@ #include "api/async_dns_resolver.h" #include "TurnCustomizerImpl.h" +#include "ReflectorRelayPortFactory.h" #include "SctpDataChannelProviderInterfaceImpl.h" #include "StaticThreads.h" #include "platform/PlatformInterface.h" #include "p2p/base/turn_port.h" +#include "ReflectorPort.h" +#include "FieldTrialsConfig.h" + namespace tgcalls { namespace { @@ -70,7 +73,7 @@ class WrappedAsyncPacketSocket : public rtc::AsyncPacketSocket { _wrappedSocket->SignalReadyToSend.connect(this, &WrappedAsyncPacketSocket::onReadyToSend); _wrappedSocket->SignalAddressReady.connect(this, &WrappedAsyncPacketSocket::onAddressReady); _wrappedSocket->SignalConnect.connect(this, &WrappedAsyncPacketSocket::onConnect); - _wrappedSocket->SignalClose.connect(this, &WrappedAsyncPacketSocket::onClose); + _wrappedSocket->SubscribeClose(this, [this](AsyncPacketSocket* socket, int error) { onClose(socket, error); }); } virtual ~WrappedAsyncPacketSocket() override { @@ -79,7 +82,7 @@ class WrappedAsyncPacketSocket : public rtc::AsyncPacketSocket { _wrappedSocket->SignalReadyToSend.disconnect(this); _wrappedSocket->SignalAddressReady.disconnect(this); _wrappedSocket->SignalConnect.disconnect(this); - _wrappedSocket->SignalClose.disconnect(this); + _wrappedSocket->UnsubscribeClose(this); _wrappedSocket.reset(); } @@ -222,15 +225,15 @@ _dataChannelMessageReceived(configuration.dataChannelMessageReceived) { _networkMonitorFactory = PlatformInterface::SharedInstance()->createNetworkMonitorFactory(); _socketFactory.reset(new rtc::BasicPacketSocketFactory(_threads->getNetworkThread()->socketserver())); - _networkManager = std::make_unique(_networkMonitorFactory.get(), nullptr); + _networkManager = std::make_unique(_networkMonitorFactory.get(), _threads->getNetworkThread()->socketserver()); _asyncResolverFactory = std::make_unique(std::make_unique()); - _dtlsSrtpTransport = std::make_unique(true); + _dtlsSrtpTransport = std::make_unique(true, fieldTrialsBasedConfig); _dtlsSrtpTransport->SetDtlsTransports(nullptr, nullptr); _dtlsSrtpTransport->SetActiveResetSrtpParams(false); _dtlsSrtpTransport->SignalReadyToSend.connect(this, &NativeNetworkingImpl::DtlsReadyToSend); - _dtlsSrtpTransport->SignalRtpPacketReceived.connect(this, &NativeNetworkingImpl::RtpPacketReceived_n); + //_dtlsSrtpTransport->SignalRtpPacketReceived.connect(this, &NativeNetworkingImpl::RtpPacketReceived_n); _dtlsSrtpTransport->SignalRtcpPacketReceived.connect(this, &NativeNetworkingImpl::OnRtcpPacketReceived_n); resetDtlsSrtpTransport(); @@ -256,8 +259,10 @@ void NativeNetworkingImpl::resetDtlsSrtpTransport() { if (_enableStunMarking) { _turnCustomizer.reset(new TurnCustomizerImpl()); } + + _relayPortFactory.reset(new ReflectorRelayPortFactory(_rtcServers)); - _portAllocator.reset(new cricket::BasicPortAllocator(_networkManager.get(), _socketFactory.get(), _turnCustomizer.get(), nullptr)); + _portAllocator.reset(new cricket::BasicPortAllocator(_networkManager.get(), _socketFactory.get(), _turnCustomizer.get(), _relayPortFactory.get())); uint32_t flags = _portAllocator->flags(); @@ -298,6 +303,10 @@ void NativeNetworkingImpl::resetDtlsSrtpTransport() { std::vector turnServers; for (auto &server : _rtcServers) { + if (server.isTcp) { + continue; + } + if (server.isTurn) { turnServers.push_back(cricket::RelayServerConfig( rtc::SocketAddress(server.host, server.port), @@ -313,8 +322,11 @@ void NativeNetworkingImpl::resetDtlsSrtpTransport() { _portAllocator->SetConfiguration(stunServers, turnServers, 0, webrtc::NO_PRUNE, _turnCustomizer.get()); + webrtc::IceTransportInit iceTransportInit; + iceTransportInit.set_port_allocator(_portAllocator.get()); + iceTransportInit.set_async_dns_resolver_factory(_asyncResolverFactory.get()); - _transportChannel = cricket::P2PTransportChannel::Create("transport", 0, _portAllocator.get(), _asyncResolverFactory.get()); + _transportChannel = cricket::P2PTransportChannel::Create("transport", 0, std::move(iceTransportInit)); cricket::IceConfig iceConfig; iceConfig.continual_gathering_policy = cricket::GATHER_CONTINUALLY; @@ -407,8 +419,6 @@ void NativeNetworkingImpl::stop() { _localIceParameters = PeerIceParameters(rtc::CreateRandomString(cricket::ICE_UFRAG_LENGTH), rtc::CreateRandomString(cricket::ICE_PWD_LENGTH), true); _localCertificate = rtc::RTCCertificateGenerator::GenerateCertificate(rtc::KeyParams(rtc::KT_ECDSA), absl::nullopt); - - resetDtlsSrtpTransport(); } PeerIceParameters NativeNetworkingImpl::getLocalIceParameters() { @@ -465,7 +475,7 @@ webrtc::RtpTransport *NativeNetworkingImpl::getRtpTransport() { void NativeNetworkingImpl::checkConnectionTimeout() { const auto weak = std::weak_ptr(shared_from_this()); - _threads->getNetworkThread()->PostDelayedTask(RTC_FROM_HERE, [weak]() { + _threads->getNetworkThread()->PostDelayedTask([weak]() { auto strong = weak.lock(); if (!strong) { return; @@ -482,7 +492,7 @@ void NativeNetworkingImpl::checkConnectionTimeout() { } strong->checkConnectionTimeout(); - }, 1000); + }, webrtc::TimeDelta::Millis(1000)); } void NativeNetworkingImpl::candidateGathered(cricket::IceTransportInternal *transport, const cricket::Candidate &candidate) { @@ -511,7 +521,7 @@ void NativeNetworkingImpl::DtlsReadyToSend(bool isReadyToSend) { if (isReadyToSend) { const auto weak = std::weak_ptr(shared_from_this()); - _threads->getNetworkThread()->PostTask(RTC_FROM_HERE, [weak]() { + _threads->getNetworkThread()->PostTask([weak]() { const auto strong = weak.lock(); if (!strong) { return; diff --git a/TMessagesProj/jni/voip/tgcalls/v2/NativeNetworkingImpl.h b/TMessagesProj/jni/voip/tgcalls/v2/NativeNetworkingImpl.h index 69b06e218d..f64e3467f7 100644 --- a/TMessagesProj/jni/voip/tgcalls/v2/NativeNetworkingImpl.h +++ b/TMessagesProj/jni/voip/tgcalls/v2/NativeNetworkingImpl.h @@ -13,6 +13,7 @@ #include "rtc_base/ssl_fingerprint.h" #include "pc/sctp_data_channel.h" #include "p2p/base/port.h" +#include "api/transport/field_trial_based_config.h" #include #include @@ -33,6 +34,7 @@ class BasicPortAllocator; class P2PTransportChannel; class IceTransportInternal; class DtlsTransport; +class RelayPortFactoryInterface; } // namespace cricket namespace webrtc { @@ -200,6 +202,7 @@ class NativeNetworkingImpl : public sigslot::has_slots<>, public std::enable_sha std::unique_ptr _socketFactory; std::unique_ptr _networkManager; std::unique_ptr _turnCustomizer; + std::unique_ptr _relayPortFactory; std::unique_ptr _portAllocator; std::unique_ptr _asyncResolverFactory; std::unique_ptr _transportChannel; diff --git a/TMessagesProj/jni/voip/tgcalls/v2/ReflectorPort.cpp b/TMessagesProj/jni/voip/tgcalls/v2/ReflectorPort.cpp new file mode 100644 index 0000000000..0827b9a0a9 --- /dev/null +++ b/TMessagesProj/jni/voip/tgcalls/v2/ReflectorPort.cpp @@ -0,0 +1,788 @@ +#include "v2/ReflectorPort.h" + +#include +#include +#include +#include +#include +#include + +#include "absl/algorithm/container.h" +#include "absl/strings/match.h" +#include "absl/types/optional.h" +#include "api/transport/stun.h" +#include "p2p/base/connection.h" +#include "p2p/base/p2p_constants.h" +#include "rtc_base/async_packet_socket.h" +#include "rtc_base/byte_order.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "rtc_base/net_helpers.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/strings/string_builder.h" +#include "system_wrappers/include/field_trial.h" +#include "rtc_base/byte_order.h" + +namespace tgcalls { + +namespace { + +rtc::CopyOnWriteBuffer parseHex(std::string const &string) { + rtc::CopyOnWriteBuffer result; + + for (size_t i = 0; i < string.length(); i += 2) { + std::string byteString = string.substr(i, 2); + char byte = (char)strtol(byteString.c_str(), NULL, 16); + result.AppendData(&byte, 1); + } + + return result; +} + +} + +static int GetRelayPreference(cricket::ProtocolType proto) { + switch (proto) { + case cricket::PROTO_TCP: + return cricket::ICE_TYPE_PREFERENCE_RELAY_TCP; + case cricket::PROTO_TLS: + return cricket::ICE_TYPE_PREFERENCE_RELAY_TLS; + default: + RTC_DCHECK(proto == cricket::PROTO_UDP); + return cricket::ICE_TYPE_PREFERENCE_RELAY_UDP; + } +} + +ReflectorPort::ReflectorPort(const cricket::CreateRelayPortArgs& args, + rtc::AsyncPacketSocket* socket, + uint8_t serverId) +: Port(args.network_thread, + cricket::RELAY_PORT_TYPE, + args.socket_factory, + args.network, + args.username, + args.password), +server_address_(*args.server_address), +credentials_(args.config->credentials), +socket_(socket), +error_(0), +stun_dscp_value_(rtc::DSCP_NO_CHANGE), +state_(STATE_CONNECTING), +server_priority_(args.config->priority) { + serverId_ = serverId; + + auto rawPeerTag = parseHex(args.config->credentials.password); + auto generator = std::mt19937(std::random_device()()); + auto distribution = std::uniform_int_distribution(); + do { + randomTag_ = distribution(generator); + } while (!randomTag_); + peer_tag_.AppendData(rawPeerTag.data(), rawPeerTag.size() - 4); + peer_tag_.AppendData((uint8_t *)&randomTag_, 4); +} + +ReflectorPort::ReflectorPort(const cricket::CreateRelayPortArgs& args, + uint16_t min_port, + uint16_t max_port, + uint8_t serverId) +: Port(args.network_thread, + cricket::RELAY_PORT_TYPE, + args.socket_factory, + args.network, + min_port, + max_port, + args.username, + args.password), +server_address_(*args.server_address), +credentials_(args.config->credentials), +socket_(NULL), +error_(0), +stun_dscp_value_(rtc::DSCP_NO_CHANGE), +state_(STATE_CONNECTING), +server_priority_(args.config->priority) { + serverId_ = serverId; + + auto rawPeerTag = parseHex(args.config->credentials.password); + auto generator = std::mt19937(std::random_device()()); + auto distribution = std::uniform_int_distribution(); + do { + randomTag_ = distribution(generator); + } while (!randomTag_); + peer_tag_.AppendData(rawPeerTag.data(), rawPeerTag.size() - 4); + peer_tag_.AppendData((uint8_t *)&randomTag_, 4); +} + +ReflectorPort::~ReflectorPort() { + // TODO(juberti): Should this even be necessary? + + // release the allocation by sending a refresh with + // lifetime 0. + if (ready()) { + Release(); + } + + if (!SharedSocket()) { + delete socket_; + } +} + +rtc::SocketAddress ReflectorPort::GetLocalAddress() const { + return socket_ ? socket_->GetLocalAddress() : rtc::SocketAddress(); +} + +cricket::ProtocolType ReflectorPort::GetProtocol() const { + return server_address_.proto; +} + +void ReflectorPort::PrepareAddress() { + if (peer_tag_.size() != 16) { + RTC_LOG(LS_ERROR) << "Allocation can't be started without setting the" + " peer tag."; + OnAllocateError(cricket::STUN_ERROR_UNAUTHORIZED, + "Missing REFLECTOR server credentials."); + return; + } + if (serverId_ == 0) { + RTC_LOG(LS_ERROR) << "Allocation can't be started without setting the" + " server id."; + OnAllocateError(cricket::STUN_ERROR_UNAUTHORIZED, + "Missing REFLECTOR server id."); + return; + } + + if (!server_address_.address.port()) { + // We will set default REFLECTOR port, if no port is set in the address. + server_address_.address.SetPort(599); + } + + if (!AllowedReflectorPort(server_address_.address.port())) { + // This can only happen after a 300 ALTERNATE SERVER, since the port can't + // be created with a disallowed port number. + RTC_LOG(LS_ERROR) << "Attempt to start allocation with disallowed port# " + << server_address_.address.port(); + OnAllocateError(cricket::STUN_ERROR_SERVER_ERROR, + "Attempt to start allocation to a disallowed port"); + return; + } + if (server_address_.address.IsUnresolvedIP()) { + ResolveTurnAddress(server_address_.address); + } else { + // If protocol family of server address doesn't match with local, return. + if (!IsCompatibleAddress(server_address_.address)) { + RTC_LOG(LS_ERROR) << "IP address family does not match. server: " + << server_address_.address.family() + << " local: " << Network()->GetBestIP().family(); + OnAllocateError(cricket::STUN_ERROR_GLOBAL_FAILURE, + "IP address family does not match."); + return; + } + + // Insert the current address to prevent redirection pingpong. + attempted_server_addresses_.insert(server_address_.address); + + RTC_LOG(LS_INFO) << ToString() << ": Trying to connect to REFLECTOR server via " + << ProtoToString(server_address_.proto) << " @ " + << server_address_.address.ToSensitiveString(); + if (!CreateReflectorClientSocket()) { + RTC_LOG(LS_ERROR) << "Failed to create REFLECTOR client socket"; + OnAllocateError(cricket::SERVER_NOT_REACHABLE_ERROR, + "Failed to create REFLECTOR client socket."); + return; + } + if (server_address_.proto == cricket::PROTO_UDP) { + SendReflectorHello(); + } + } +} + +void ReflectorPort::SendReflectorHello() { + if (!(state_ == STATE_CONNECTED || state_ == STATE_READY)) { + return; + } + + RTC_LOG(LS_WARNING) + << ToString() + << ": REFLECTOR sending ping to " << server_address_.address.ToString(); + + rtc::ByteBufferWriter bufferWriter; + bufferWriter.WriteBytes((const char *)peer_tag_.data(), peer_tag_.size()); + for (int i = 0; i < 12; i++) { + bufferWriter.WriteUInt8(0xffu); + } + bufferWriter.WriteUInt8(0xfeu); + for (int i = 0; i < 3; i++) { + bufferWriter.WriteUInt8(0xffu); + } + bufferWriter.WriteUInt64(123); + + while (bufferWriter.Length() % 4 != 0) { + bufferWriter.WriteUInt8(0); + } + + rtc::PacketOptions options; + Send(bufferWriter.Data(), bufferWriter.Length(), options); + + if (!is_running_ping_task_) { + is_running_ping_task_ = true; + + int timeoutMs = 10000; + // Send pings faster until response arrives + if (state_ == STATE_CONNECTED) { + timeoutMs = 500; + } + + thread()->PostDelayedTask(SafeTask(task_safety_.flag(), [this] { + is_running_ping_task_ = false; + SendReflectorHello(); + }), webrtc::TimeDelta::Millis(timeoutMs)); + } +} + +bool ReflectorPort::CreateReflectorClientSocket() { + RTC_DCHECK(!socket_ || SharedSocket()); + + if (server_address_.proto == cricket::PROTO_UDP && !SharedSocket()) { + socket_ = socket_factory()->CreateUdpSocket( + rtc::SocketAddress(Network()->GetBestIP(), 0), min_port(), max_port()); + } else if (server_address_.proto == cricket::PROTO_TCP) { + RTC_DCHECK(!SharedSocket()); + int opts = rtc::PacketSocketFactory::OPT_STUN; + + rtc::PacketSocketTcpOptions tcp_options; + tcp_options.opts = opts; + socket_ = socket_factory()->CreateClientTcpSocket( + rtc::SocketAddress(Network()->GetBestIP(), 0), server_address_.address, + proxy(), user_agent(), tcp_options); + } + + if (!socket_) { + error_ = SOCKET_ERROR; + return false; + } + + // Apply options if any. + for (SocketOptionsMap::iterator iter = socket_options_.begin(); + iter != socket_options_.end(); ++iter) { + socket_->SetOption(iter->first, iter->second); + } + + if (!SharedSocket()) { + // If socket is shared, AllocationSequence will receive the packet. + socket_->SignalReadPacket.connect(this, &ReflectorPort::OnReadPacket); + } + + socket_->SignalReadyToSend.connect(this, &ReflectorPort::OnReadyToSend); + + socket_->SignalSentPacket.connect(this, &ReflectorPort::OnSentPacket); + + // TCP port is ready to send stun requests after the socket is connected, + // while UDP port is ready to do so once the socket is created. + if (server_address_.proto == cricket::PROTO_TCP || + server_address_.proto == cricket::PROTO_TLS) { + socket_->SignalConnect.connect(this, &ReflectorPort::OnSocketConnect); + socket_->SubscribeClose(this, [this](rtc::AsyncPacketSocket* socket, int error) { OnSocketClose(socket, error); }); + } else { + state_ = STATE_CONNECTED; + } + return true; +} + +void ReflectorPort::OnSocketConnect(rtc::AsyncPacketSocket* socket) { + // This slot should only be invoked if we're using a connection-oriented + // protocol. + RTC_DCHECK(server_address_.proto == cricket::PROTO_TCP || + server_address_.proto == cricket::PROTO_TLS); + + // Do not use this port if the socket bound to an address not associated with + // the desired network interface. This is seen in Chrome, where TCP sockets + // cannot be given a binding address, and the platform is expected to pick + // the correct local address. + // + // However, there are two situations in which we allow the bound address to + // not be one of the addresses of the requested interface: + // 1. The bound address is the loopback address. This happens when a proxy + // forces TCP to bind to only the localhost address (see issue 3927). + // 2. The bound address is the "any address". This happens when + // multiple_routes is disabled (see issue 4780). + // + // Note that, aside from minor differences in log statements, this logic is + // identical to that in TcpPort. + const rtc::SocketAddress& socket_address = socket->GetLocalAddress(); + if (absl::c_none_of(Network()->GetIPs(), + [socket_address](const rtc::InterfaceAddress& addr) { + return socket_address.ipaddr() == addr; + })) { + if (socket->GetLocalAddress().IsLoopbackIP()) { + RTC_LOG(LS_WARNING) << "Socket is bound to the address:" + << socket_address.ipaddr().ToSensitiveString() + << ", rather than an address associated with network:" + << Network()->ToString() + << ". Still allowing it since it's localhost."; + } else if (IPIsAny(Network()->GetBestIP())) { + RTC_LOG(LS_WARNING) + << "Socket is bound to the address:" + << socket_address.ipaddr().ToSensitiveString() + << ", rather than an address associated with network:" + << Network()->ToString() + << ". Still allowing it since it's the 'any' address" + ", possibly caused by multiple_routes being disabled."; + } else { + RTC_LOG(LS_WARNING) << "Socket is bound to the address:" + << socket_address.ipaddr().ToSensitiveString() + << ", rather than an address associated with network:" + << Network()->ToString() << ". Discarding REFLECTOR port."; + OnAllocateError( + cricket::STUN_ERROR_GLOBAL_FAILURE, + "Address not associated with the desired network interface."); + return; + } + } + + state_ = STATE_CONNECTED; // It is ready to send stun requests. + if (server_address_.address.IsUnresolvedIP()) { + server_address_.address = socket_->GetRemoteAddress(); + } + + RTC_LOG(LS_INFO) << "ReflectorPort connected to " + << socket->GetRemoteAddress().ToSensitiveString() + << " using tcp."; + + //TODO: Initiate server ping +} + +void ReflectorPort::OnSocketClose(rtc::AsyncPacketSocket* socket, int error) { + RTC_LOG(LS_WARNING) << ToString() + << ": Connection with server failed with error: " + << error; + RTC_DCHECK(socket == socket_); + Close(); +} + +cricket::Connection* ReflectorPort::CreateConnection(const cricket::Candidate& remote_candidate, + CandidateOrigin origin) { + // REFLECTOR-UDP can only connect to UDP candidates. + if (!SupportsProtocol(remote_candidate.protocol())) { + return nullptr; + } + + auto remoteHostname = remote_candidate.address().hostname(); + if (remoteHostname.empty()) { + return nullptr; + } + std::ostringstream ipFormat; + ipFormat << "reflector-" << (uint32_t)serverId_ << "-"; + if (!absl::StartsWith(remoteHostname, ipFormat.str()) || !absl::EndsWith(remoteHostname, ".reflector")) { + return nullptr; + } + if (remote_candidate.address().port() != server_address_.address.port()) { + return nullptr; + } + + if (state_ == STATE_DISCONNECTED || state_ == STATE_RECEIVEONLY) { + return nullptr; + } + + cricket::ProxyConnection* conn = new cricket::ProxyConnection(NewWeakPtr(), 0, remote_candidate); + AddOrReplaceConnection(conn); + + return conn; +} + +bool ReflectorPort::FailAndPruneConnection(const rtc::SocketAddress& address) { + cricket::Connection* conn = GetConnection(address); + if (conn != nullptr) { + conn->FailAndPrune(); + return true; + } + return false; +} + +int ReflectorPort::SetOption(rtc::Socket::Option opt, int value) { + // Remember the last requested DSCP value, for STUN traffic. + if (opt == rtc::Socket::OPT_DSCP) + stun_dscp_value_ = static_cast(value); + + if (!socket_) { + // If socket is not created yet, these options will be applied during socket + // creation. + socket_options_[opt] = value; + return 0; + } + return socket_->SetOption(opt, value); +} + +int ReflectorPort::GetOption(rtc::Socket::Option opt, int* value) { + if (!socket_) { + SocketOptionsMap::const_iterator it = socket_options_.find(opt); + if (it == socket_options_.end()) { + return -1; + } + *value = it->second; + return 0; + } + + return socket_->GetOption(opt, value); +} + +int ReflectorPort::GetError() { + return error_; +} + +int ReflectorPort::SendTo(const void* data, + size_t size, + const rtc::SocketAddress& addr, + const rtc::PacketOptions& options, + bool payload) { + rtc::CopyOnWriteBuffer targetPeerTag; + + auto syntheticHostname = addr.hostname(); + + uint32_t resolvedPeerTag = 0; + auto resolvedPeerTagIt = resolved_peer_tags_by_hostname_.find(syntheticHostname); + if (resolvedPeerTagIt != resolved_peer_tags_by_hostname_.end()) { + resolvedPeerTag = resolvedPeerTagIt->second; + } else { + std::ostringstream prefixFormat; + prefixFormat << "reflector-" << (uint32_t)serverId_ << "-"; + std::string suffixFormat = ".reflector"; + if (!absl::StartsWith(syntheticHostname, prefixFormat.str()) || !absl::EndsWith(syntheticHostname, suffixFormat)) { + RTC_LOG(LS_ERROR) << ToString() + << ": Discarding SendTo request with destination " + << addr.ToString(); + + return -1; + } + + auto startPosition = prefixFormat.str().size(); + auto tagString = syntheticHostname.substr(startPosition, syntheticHostname.size() - suffixFormat.size() - startPosition); + + std::stringstream tagStringStream(tagString); + tagStringStream >> resolvedPeerTag; + + if (resolvedPeerTag == 0) { + RTC_LOG(LS_ERROR) << ToString() + << ": Discarding SendTo request with destination " + << addr.ToString() << " (could not parse peer tag)"; + + return -1; + } + + resolved_peer_tags_by_hostname_.insert(std::make_pair(syntheticHostname, resolvedPeerTag)); + } + + targetPeerTag.AppendData(peer_tag_.data(), peer_tag_.size() - 4); + targetPeerTag.AppendData((uint8_t *)&resolvedPeerTag, 4); + + rtc::ByteBufferWriter bufferWriter; + bufferWriter.WriteBytes((const char *)targetPeerTag.data(), targetPeerTag.size()); + + bufferWriter.WriteBytes((const char *)&randomTag_, 4); + + bufferWriter.WriteUInt32((uint32_t)size); + bufferWriter.WriteBytes((const char *)data, size); + + while (bufferWriter.Length() % 4 != 0) { + bufferWriter.WriteUInt8(0); + } + + rtc::PacketOptions modified_options(options); + CopyPortInformationToPacketInfo(&modified_options.info_signaled_after_sent); + + modified_options.info_signaled_after_sent.turn_overhead_bytes = bufferWriter.Length() - size; + + Send(bufferWriter.Data(), bufferWriter.Length(), modified_options); + + return static_cast(size); +} + +bool ReflectorPort::CanHandleIncomingPacketsFrom( + const rtc::SocketAddress& addr) const { + return server_address_.address == addr; + } + +bool ReflectorPort::HandleIncomingPacket(rtc::AsyncPacketSocket* socket, + const char* data, + size_t size, + const rtc::SocketAddress& remote_addr, + int64_t packet_time_us) { + if (socket != socket_) { + // The packet was received on a shared socket after we've allocated a new + // socket for this REFLECTOR port. + return false; + } + + // This is to guard against a STUN response from previous server after + // alternative server redirection. TODO(guoweis): add a unit test for this + // race condition. + if (remote_addr != server_address_.address) { + RTC_LOG(LS_WARNING) << ToString() + << ": Discarding REFLECTOR message from unknown address: " + << remote_addr.ToSensitiveString() + << " server_address_: " + << server_address_.address.ToSensitiveString(); + return false; + } + + // The message must be at least 16 bytes (peer tag). + if (size < 16) { + RTC_LOG(LS_WARNING) << ToString() + << ": Received REFLECTOR message that was too short (" << size << ")"; + return false; + } + + if (state_ == STATE_DISCONNECTED) { + RTC_LOG(LS_WARNING) + << ToString() + << ": Received REFLECTOR message while the REFLECTOR port is disconnected"; + return false; + } + + uint8_t receivedPeerTag[16]; + memcpy(receivedPeerTag, data, 16); + + if (memcmp(receivedPeerTag, peer_tag_.data(), 16 - 4) != 0) { + RTC_LOG(LS_WARNING) + << ToString() + << ": Received REFLECTOR message with incorrect peer_tag"; + return false; + } + + if (state_ != STATE_READY) { + state_ = STATE_READY; + + RTC_LOG(LS_INFO) + << ToString() + << ": REFLECTOR " << server_address_.address.ToString() << " is now ready"; + + std::ostringstream ipFormat; + ipFormat << "reflector-" << (uint32_t)serverId_ << "-" << randomTag_ << ".reflector"; + rtc::SocketAddress candidateAddress(ipFormat.str(), server_address_.address.port()); + + // For relayed candidate, Base is the candidate itself. + AddAddress(candidateAddress, // Candidate address. + server_address_.address, // Base address. + rtc::SocketAddress(), // Related address. + cricket::UDP_PROTOCOL_NAME, + ProtoToString(server_address_.proto), // The first hop protocol. + "", // TCP canddiate type, empty for turn candidates. + cricket::RELAY_PORT_TYPE, GetRelayPreference(server_address_.proto), + server_priority_, ReconstructedServerUrl(false /* use_hostname */), + true); + } + + if (size > 16 + 4 + 4) { + bool isSpecialPacket = false; + if (size >= 16 + 12) { + uint8_t specialTag[12]; + memcpy(specialTag, data + 16, 12); + + uint8_t expectedSpecialTag[12]; + memset(expectedSpecialTag, 0xff, 12); + + if (memcmp(specialTag, expectedSpecialTag, 12) == 0) { + isSpecialPacket = true; + } + } + + if (!isSpecialPacket) { + uint32_t senderTag = 0; + memcpy(&senderTag, data + 16, 4); + + uint32_t dataSize = 0; + memcpy(&dataSize, data + 16 + 4, 4); + dataSize = be32toh(dataSize); + if (dataSize > size - 16 - 4 - 4) { + RTC_LOG(LS_WARNING) + << ToString() + << ": Received data packet with invalid size tag"; + } else { + std::ostringstream ipFormat; + ipFormat << "reflector-" << (uint32_t)serverId_ << "-" << senderTag << ".reflector"; + rtc::SocketAddress candidateAddress(ipFormat.str(), server_address_.address.port()); + candidateAddress.SetResolvedIP(server_address_.address.ipaddr()); + + DispatchPacket(data + 16 + 4 + 4, dataSize, candidateAddress, cricket::ProtocolType::PROTO_UDP, packet_time_us); + } + } + } + + return true; +} + +void ReflectorPort::OnReadPacket(rtc::AsyncPacketSocket* socket, + const char* data, + size_t size, + const rtc::SocketAddress& remote_addr, + const int64_t& packet_time_us) { + HandleIncomingPacket(socket, data, size, remote_addr, packet_time_us); +} + +void ReflectorPort::OnSentPacket(rtc::AsyncPacketSocket* socket, + const rtc::SentPacket& sent_packet) { + PortInterface::SignalSentPacket(sent_packet); +} + +void ReflectorPort::OnReadyToSend(rtc::AsyncPacketSocket* socket) { + if (ready()) { + Port::OnReadyToSend(); + } +} + +bool ReflectorPort::SupportsProtocol(absl::string_view protocol) const { + // Turn port only connects to UDP candidates. + return protocol == cricket::UDP_PROTOCOL_NAME; +} + +void ReflectorPort::ResolveTurnAddress(const rtc::SocketAddress& address) { + if (resolver_) + return; + + RTC_LOG(LS_INFO) << ToString() << ": Starting TURN host lookup for " + << address.ToSensitiveString(); + resolver_ = socket_factory()->CreateAsyncDnsResolver(); + resolver_->Start(address, [this] { + // If DNS resolve is failed when trying to connect to the server using TCP, + // one of the reason could be due to DNS queries blocked by firewall. + // In such cases we will try to connect to the server with hostname, + // assuming socket layer will resolve the hostname through a HTTP proxy (if + // any). + auto& result = resolver_->result(); + if (result.GetError() != 0 && (server_address_.proto == cricket::PROTO_TCP || + server_address_.proto == cricket::PROTO_TLS)) { + if (!CreateReflectorClientSocket()) { + OnAllocateError(cricket::SERVER_NOT_REACHABLE_ERROR, + "TURN host lookup received error."); + } + return; + } + + // Copy the original server address in `resolved_address`. For TLS based + // sockets we need hostname along with resolved address. + rtc::SocketAddress resolved_address = server_address_.address; + if (result.GetError() != 0 || + !result.GetResolvedAddress(Network()->GetBestIP().family(), + &resolved_address)) { + RTC_LOG(LS_WARNING) << ToString() << ": TURN host lookup received error " + << result.GetError(); + error_ = result.GetError(); + OnAllocateError(cricket::SERVER_NOT_REACHABLE_ERROR, + "TURN host lookup received error."); + return; + } + // Signal needs both resolved and unresolved address. After signal is sent + // we can copy resolved address back into `server_address_`. + SignalResolvedServerAddress(this, server_address_.address, + resolved_address); + server_address_.address = resolved_address; + PrepareAddress(); + }); +} + +void ReflectorPort::OnSendStunPacket(const void* data, + size_t size, + cricket::StunRequest* request) { + RTC_DCHECK(connected()); + rtc::PacketOptions options(StunDscpValue()); + options.info_signaled_after_sent.packet_type = rtc::PacketType::kTurnMessage; + CopyPortInformationToPacketInfo(&options.info_signaled_after_sent); + if (Send(data, size, options) < 0) { + RTC_LOG(LS_ERROR) << ToString() << ": Failed to send TURN message, error: " + << socket_->GetError(); + } +} + +void ReflectorPort::OnAllocateError(int error_code, const std::string& reason) { + // We will send SignalPortError asynchronously as this can be sent during + // port initialization. This way it will not be blocking other port + // creation. + thread()->PostTask( + SafeTask(task_safety_.flag(), [this] { SignalPortError(this); })); + std::string address = GetLocalAddress().HostAsSensitiveURIString(); + int port = GetLocalAddress().port(); + if (server_address_.proto == cricket::PROTO_TCP && + server_address_.address.IsPrivateIP()) { + address.clear(); + port = 0; + } + SignalCandidateError(this, cricket::IceCandidateErrorEvent(address, port, ReconstructedServerUrl(true /* use_hostname */), error_code, reason)); +} + +void ReflectorPort::Release() { + state_ = STATE_RECEIVEONLY; +} + +void ReflectorPort::Close() { + if (!ready()) { + OnAllocateError(cricket::SERVER_NOT_REACHABLE_ERROR, ""); + } + // Stop the port from creating new connections. + state_ = STATE_DISCONNECTED; + // Delete all existing connections; stop sending data. + for (auto kv : connections()) { + kv.second->Destroy(); + } + + SignalReflectorPortClosed(this); +} + +rtc::DiffServCodePoint ReflectorPort::StunDscpValue() const { + return stun_dscp_value_; +} + +// static +bool ReflectorPort::AllowedReflectorPort(int port) { + return true; +} + +void ReflectorPort::DispatchPacket(const char* data, + size_t size, + const rtc::SocketAddress& remote_addr, + cricket::ProtocolType proto, + int64_t packet_time_us) { + if (cricket::Connection* conn = GetConnection(remote_addr)) { + conn->OnReadPacket(data, size, packet_time_us); + } else { + Port::OnReadPacket(data, size, remote_addr, proto); + } +} + +int ReflectorPort::Send(const void* data, + size_t len, + const rtc::PacketOptions& options) { + return socket_->SendTo(data, len, server_address_.address, options); +} + +void ReflectorPort::HandleConnectionDestroyed(cricket::Connection* conn) { +} + +std::string ReflectorPort::ReconstructedServerUrl(bool use_hostname) { + // draft-petithuguenin-behave-turn-uris-01 + // turnURI = scheme ":" turn-host [ ":" turn-port ] + // [ "?transport=" transport ] + // scheme = "turn" / "turns" + // transport = "udp" / "tcp" / transport-ext + // transport-ext = 1*unreserved + // turn-host = IP-literal / IPv4address / reg-name + // turn-port = *DIGIT + std::string scheme = "turn"; + std::string transport = "tcp"; + switch (server_address_.proto) { + case cricket::PROTO_SSLTCP: + case cricket::PROTO_TLS: + scheme = "turns"; + break; + case cricket::PROTO_UDP: + transport = "udp"; + break; + case cricket::PROTO_TCP: + break; + } + rtc::StringBuilder url; + url << scheme << ":" + << (use_hostname ? server_address_.address.hostname() + : server_address_.address.ipaddr().ToString()) + << ":" << server_address_.address.port() << "?transport=" << transport; + return url.Release(); +} + +} // namespace cricket diff --git a/TMessagesProj/jni/voip/tgcalls/v2/ReflectorPort.h b/TMessagesProj/jni/voip/tgcalls/v2/ReflectorPort.h new file mode 100644 index 0000000000..8f0a3a12e2 --- /dev/null +++ b/TMessagesProj/jni/voip/tgcalls/v2/ReflectorPort.h @@ -0,0 +1,240 @@ +#ifndef TGCALLS_REFLECTOR_PORT_H_ +#define TGCALLS_REFLECTOR_PORT_H_ + +#include + +#include +#include +#include +#include +#include +#include + +#include "absl/memory/memory.h" +#include "api/async_dns_resolver.h" +#include "p2p/base/port.h" +#include "p2p/client/basic_port_allocator.h" +#include "rtc_base/async_packet_socket.h" +#include "rtc_base/ssl_certificate.h" + +namespace webrtc { +class TurnCustomizer; +} + +namespace tgcalls { + +extern const int STUN_ATTR_TURN_LOGGING_ID; +extern const char TURN_PORT_TYPE[]; +class TurnAllocateRequest; +class TurnEntry; + +class ReflectorPort : public cricket::Port { +public: + enum PortState { + STATE_CONNECTING, // Initial state, cannot send any packets. + STATE_CONNECTED, // Socket connected, ready to send stun requests. + STATE_READY, // Received allocate success, can send any packets. + STATE_RECEIVEONLY, // Had REFRESH_REQUEST error, cannot send any packets. + STATE_DISCONNECTED, // TCP connection died, cannot send/receive any + // packets. + }; + + // Create a TURN port using the shared UDP socket, `socket`. + static std::unique_ptr Create( + const cricket::CreateRelayPortArgs& args, + rtc::AsyncPacketSocket* socket, + uint8_t serverId) { + // Do basic parameter validation. + if (args.config->credentials.username.size() > 32) { + RTC_LOG(LS_ERROR) << "Attempt to use REFLECTOR with a too long username " + << "of length " << args.config->credentials.username.size(); + return nullptr; + } + // Do not connect to low-numbered ports. The default STUN port is 3478. + if (!AllowedReflectorPort(args.server_address->address.port())) { + RTC_LOG(LS_ERROR) << "Attempt to use REFLECTOR to connect to port " + << args.server_address->address.port(); + return nullptr; + } + // Using `new` to access a non-public constructor. + return absl::WrapUnique(new ReflectorPort(args, socket, serverId)); + } + + // Create a TURN port that will use a new socket, bound to `network` and + // using a port in the range between `min_port` and `max_port`. + static std::unique_ptr Create( + const cricket::CreateRelayPortArgs& args, + uint16_t min_port, + uint16_t max_port, + uint8_t serverId) { + // Do basic parameter validation. + if (args.config->credentials.username.size() > 32) { + RTC_LOG(LS_ERROR) << "Attempt to use TURN with a too long username " + << "of length " << args.config->credentials.username.size(); + return nullptr; + } + // Do not connect to low-numbered ports. The default STUN port is 3478. + if (!AllowedReflectorPort(args.server_address->address.port())) { + RTC_LOG(LS_ERROR) << "Attempt to use TURN to connect to port " + << args.server_address->address.port(); + return nullptr; + } + // Using `new` to access a non-public constructor. + return absl::WrapUnique(new ReflectorPort(args, min_port, max_port, serverId)); + } + + ~ReflectorPort() override; + + const cricket::ProtocolAddress& server_address() const { return server_address_; } + // Returns an empty address if the local address has not been assigned. + rtc::SocketAddress GetLocalAddress() const; + + bool ready() const { return state_ == STATE_READY; } + bool connected() const { + return state_ == STATE_READY || state_ == STATE_CONNECTED; + } + const cricket::RelayCredentials& credentials() const { return credentials_; } + + cricket::ProtocolType GetProtocol() const override; + + // Sets state to STATE_RECEIVEONLY. + void Release(); + + void PrepareAddress() override; + cricket::Connection* CreateConnection(const cricket::Candidate& c, + PortInterface::CandidateOrigin origin) override; + int SendTo(const void* data, + size_t size, + const rtc::SocketAddress& addr, + const rtc::PacketOptions& options, + bool payload) override; + int SetOption(rtc::Socket::Option opt, int value) override; + int GetOption(rtc::Socket::Option opt, int* value) override; + int GetError() override; + + bool HandleIncomingPacket(rtc::AsyncPacketSocket* socket, + const char* data, + size_t size, + const rtc::SocketAddress& remote_addr, + int64_t packet_time_us) override; + bool CanHandleIncomingPacketsFrom( + const rtc::SocketAddress& addr) const override; + virtual void OnReadPacket(rtc::AsyncPacketSocket* socket, + const char* data, + size_t size, + const rtc::SocketAddress& remote_addr, + const int64_t& packet_time_us); + + void OnSentPacket(rtc::AsyncPacketSocket* socket, + const rtc::SentPacket& sent_packet) override; + virtual void OnReadyToSend(rtc::AsyncPacketSocket* socket); + bool SupportsProtocol(absl::string_view protocol) const override; + + void OnSocketConnect(rtc::AsyncPacketSocket* socket); + void OnSocketClose(rtc::AsyncPacketSocket* socket, int error); + + int error() const { return error_; } + + rtc::AsyncPacketSocket* socket() const { return socket_; } + + // Signal with resolved server address. + // Parameters are port, server address and resolved server address. + // This signal will be sent only if server address is resolved successfully. + sigslot:: + signal3 + SignalResolvedServerAddress; + + // Signal when ReflectorPort is closed, + // e.g remote socket closed (TCP) + // or receiveing a REFRESH response with lifetime 0. + sigslot::signal1 SignalReflectorPortClosed; + + // All public methods/signals below are for testing only. + sigslot::signal2 SignalTurnRefreshResult; + sigslot::signal3 + SignalCreatePermissionResult; + + // Visible for testing. + // Shuts down the turn port, usually because of some fatal errors. + void Close(); + + void HandleConnectionDestroyed(cricket::Connection* conn) override; + +protected: + ReflectorPort(const cricket::CreateRelayPortArgs& args, + rtc::AsyncPacketSocket* socket, + uint8_t serverId); + + ReflectorPort(const cricket::CreateRelayPortArgs& args, + uint16_t min_port, + uint16_t max_port, + uint8_t serverId); + + rtc::DiffServCodePoint StunDscpValue() const override; + +private: + typedef std::map SocketOptionsMap; + typedef std::set AttemptedServerSet; + + static bool AllowedReflectorPort(int port); + + bool CreateReflectorClientSocket(); + + void ResolveTurnAddress(const rtc::SocketAddress& address); + void OnResolveResult(rtc::AsyncResolverInterface* resolver); + + void OnSendStunPacket(const void* data, size_t size, cricket::StunRequest* request); + + void OnAllocateError(int error_code, const std::string& reason); + + void DispatchPacket(const char* data, + size_t size, + const rtc::SocketAddress& remote_addr, + cricket::ProtocolType proto, + int64_t packet_time_us); + + int Send(const void* data, size_t size, const rtc::PacketOptions& options); + + // Marks the connection with remote address `address` failed and + // pruned (a.k.a. write-timed-out). Returns true if a connection is found. + bool FailAndPruneConnection(const rtc::SocketAddress& address); + + // Reconstruct the URL of the server which the candidate is gathered from. + std::string ReconstructedServerUrl(bool use_hostname); + + void SendReflectorHello(); + + rtc::CopyOnWriteBuffer peer_tag_; + uint32_t randomTag_ = 0; + + cricket::ProtocolAddress server_address_; + uint8_t serverId_ = 0; + + std::map resolved_peer_tags_by_hostname_; + + cricket::RelayCredentials credentials_; + AttemptedServerSet attempted_server_addresses_; + + rtc::AsyncPacketSocket* socket_; + SocketOptionsMap socket_options_; + std::unique_ptr resolver_; + int error_; + rtc::DiffServCodePoint stun_dscp_value_; + + PortState state_; + // By default the value will be set to 0. This value will be used in + // calculating the candidate priority. + int server_priority_; + + // Optional TurnCustomizer that can modify outgoing messages. Once set, this + // must outlive the ReflectorPort's lifetime. + webrtc::TurnCustomizer* turn_customizer_ = nullptr; + + webrtc::ScopedTaskSafety task_safety_; + + bool is_running_ping_task_ = false; +}; + +} // namespace tgcalls + +#endif // TGCALLS_REFLECTOR_PORT_H_ diff --git a/TMessagesProj/jni/voip/tgcalls/v2/ReflectorRelayPortFactory.cpp b/TMessagesProj/jni/voip/tgcalls/v2/ReflectorRelayPortFactory.cpp new file mode 100644 index 0000000000..86db5c9f3a --- /dev/null +++ b/TMessagesProj/jni/voip/tgcalls/v2/ReflectorRelayPortFactory.cpp @@ -0,0 +1,78 @@ +#include "v2/ReflectorRelayPortFactory.h" + +#include "p2p/base/turn_port.h" + +#include "v2/ReflectorPort.h" + +namespace tgcalls { + +ReflectorRelayPortFactory::ReflectorRelayPortFactory(std::vector servers) : +_servers(servers) { +} + +ReflectorRelayPortFactory::~ReflectorRelayPortFactory() { +} + +std::unique_ptr ReflectorRelayPortFactory::Create(const cricket::CreateRelayPortArgs& args, rtc::AsyncPacketSocket* udp_socket) { + if (args.config->credentials.username == "reflector") { + uint8_t id = 0; + for (const auto &server : _servers) { + rtc::SocketAddress serverAddress(server.host, server.port); + if (args.server_address->address == serverAddress) { + id = server.id; + break; + } + } + + if (id == 0) { + return nullptr; + } + + auto port = ReflectorPort::Create(args, udp_socket, id); + if (!port) { + return nullptr; + } + return port; + } else { + auto port = cricket::TurnPort::Create(args, udp_socket); + if (!port) { + return nullptr; + } + port->SetTlsCertPolicy(args.config->tls_cert_policy); + port->SetTurnLoggingId(args.config->turn_logging_id); + return port; + } +} + +std::unique_ptr ReflectorRelayPortFactory::Create(const cricket::CreateRelayPortArgs& args, int min_port, int max_port) { + if (args.config->credentials.username == "reflector") { + uint8_t id = 0; + for (const auto &server : _servers) { + rtc::SocketAddress serverAddress(server.host, server.port); + if (args.server_address->address == serverAddress) { + id = server.id; + break; + } + } + + if (id == 0) { + return nullptr; + } + + auto port = ReflectorPort::Create(args, min_port, max_port, id); + if (!port) { + return nullptr; + } + return port; + } else { + auto port = cricket::TurnPort::Create(args, min_port, max_port); + if (!port) { + return nullptr; + } + port->SetTlsCertPolicy(args.config->tls_cert_policy); + port->SetTurnLoggingId(args.config->turn_logging_id); + return port; + } +} + +} // namespace tgcalls diff --git a/TMessagesProj/jni/voip/tgcalls/v2/ReflectorRelayPortFactory.h b/TMessagesProj/jni/voip/tgcalls/v2/ReflectorRelayPortFactory.h new file mode 100644 index 0000000000..d1cda7b7c2 --- /dev/null +++ b/TMessagesProj/jni/voip/tgcalls/v2/ReflectorRelayPortFactory.h @@ -0,0 +1,28 @@ +#ifndef TGCALLS_REFLECTOR_RELAY_PORT_FACTORY_H +#define TGCALLS_REFLECTOR_RELAY_PORT_FACTORY_H + +#include "p2p/client/relay_port_factory_interface.h" + +#include "Instance.h" + +namespace tgcalls { + +class ReflectorRelayPortFactory : public cricket::RelayPortFactoryInterface { +public: + ReflectorRelayPortFactory(std::vector servers); + ~ReflectorRelayPortFactory() override; + + // This variant is used for UDP connection to the relay server + // using a already existing shared socket. + virtual std::unique_ptr Create(const cricket::CreateRelayPortArgs& args, rtc::AsyncPacketSocket* udp_socket) override; + + // This variant is used for the other cases. + virtual std::unique_ptr Create(const cricket::CreateRelayPortArgs& args, int min_port, int max_port) override; + +private: + std::vector _servers; +}; + +} // namespace tgcalls + +#endif diff --git a/TMessagesProj/jni/voip/tgcalls/v2/SignalingConnection.cpp b/TMessagesProj/jni/voip/tgcalls/v2/SignalingConnection.cpp new file mode 100644 index 0000000000..848c54e1f7 --- /dev/null +++ b/TMessagesProj/jni/voip/tgcalls/v2/SignalingConnection.cpp @@ -0,0 +1,8 @@ +#include "v2/SignalingConnection.h" + +namespace tgcalls { + +SignalingConnection::SignalingConnection() { +} + +} diff --git a/TMessagesProj/jni/voip/tgcalls/v2/SignalingConnection.h b/TMessagesProj/jni/voip/tgcalls/v2/SignalingConnection.h new file mode 100644 index 0000000000..490d41083e --- /dev/null +++ b/TMessagesProj/jni/voip/tgcalls/v2/SignalingConnection.h @@ -0,0 +1,26 @@ +#ifndef TGCALLS_SIGNALING_CONNECTION_H_ +#define TGCALLS_SIGNALING_CONNECTION_H_ + +#include +#include + +namespace webrtc { +} + +namespace tgcalls { + +class SignalingConnection : public std::enable_shared_from_this { +public: + SignalingConnection(); + virtual ~SignalingConnection() = default; + + virtual void start() = 0; + + virtual void send(const std::vector &data) = 0; + virtual void receiveExternal(const std::vector &data) { + } +}; + +} // namespace tgcalls + +#endif // TGCALLS_SIGNALING_CONNECTION_H_ diff --git a/TMessagesProj/jni/voip/tgcalls/v2/SignalingSctpConnection.cpp b/TMessagesProj/jni/voip/tgcalls/v2/SignalingSctpConnection.cpp new file mode 100644 index 0000000000..fdef34f4d3 --- /dev/null +++ b/TMessagesProj/jni/voip/tgcalls/v2/SignalingSctpConnection.cpp @@ -0,0 +1,189 @@ +#include "v2/SignalingSctpConnection.h" + +#include + +#include "rtc_base/async_tcp_socket.h" +#include "p2p/base/basic_packet_socket_factory.h" +#include "rtc_base/logging.h" +#include "p2p/base/packet_transport_internal.h" +#include "media/sctp/sctp_transport_factory.h" + +#include "FieldTrialsConfig.h" + +namespace tgcalls { + +class SignalingPacketTransport : public rtc::PacketTransportInternal { +public: + SignalingPacketTransport(std::shared_ptr threads, std::function &)> emitData) : + _threads(threads), + _emitData(emitData), + _transportName("signaling") { + } + + virtual ~SignalingPacketTransport() { + } + + void receiveData(std::vector const &data) { + RTC_LOG(LS_INFO) << "SignalingPacketTransport: adding data of " << data.size() << " bytes"; + SignalReadPacket.emit(this, (const char *)data.data(), data.size(), -1, 0); + } + + virtual const std::string& transport_name() const override { + return _transportName; + } + + virtual bool writable() const override { + return true; + } + + virtual bool receiving() const override { + return false; + } + + // Attempts to send the given packet. + // The return value is < 0 on failure. The return value in failure case is not + // descriptive. Depending on failure cause and implementation details + // GetError() returns an descriptive errno.h error value. + // This mimics posix socket send() or sendto() behavior. + // TODO(johan): Reliable, meaningful, consistent error codes for all + // implementations would be nice. + // TODO(johan): Remove the default argument once channel code is updated. + virtual int SendPacket(const char* data, + size_t len, + const rtc::PacketOptions& options, + int flags = 0) override { + _emitData(std::vector(data, data + len)); + + rtc::SentPacket sentPacket; + sentPacket.packet_id = options.packet_id; + SignalSentPacket.emit(this, sentPacket); + + return (int)len; + } + + virtual int SetOption(rtc::Socket::Option opt, int value) override { + return 0; + } + + virtual bool GetOption(rtc::Socket::Option opt, int* value) override { + return 0; + } + + virtual int GetError() override { + return 0; + } + + virtual absl::optional network_route() const override { + return absl::nullopt; + } + +private: + std::shared_ptr _threads; + std::function &)> _onIncomingData; + std::function &)> _emitData; + std::string _transportName; +}; + +SignalingSctpConnection::SignalingSctpConnection(std::shared_ptr threads, std::function &)> onIncomingData, std::function &)> emitData) : +_threads(threads), +_emitData(emitData), +_onIncomingData(onIncomingData) { + _threads->getNetworkThread()->BlockingCall([&]() { + _packetTransport = std::make_unique(threads, emitData); + + _sctpTransportFactory.reset(new cricket::SctpTransportFactory(_threads->getNetworkThread())); + + _sctpTransport = _sctpTransportFactory->CreateSctpTransport(_packetTransport.get()); + _sctpTransport->OpenStream(0); + _sctpTransport->SetDataChannelSink(this); + + // TODO: should we disconnect the data channel sink? + + _sctpTransport->Start(5000, 5000, 262144); + }); +} + +void SignalingSctpConnection::OnReadyToSend() { + assert(_threads->getNetworkThread()->IsCurrent()); + + _isReadyToSend = true; + + auto pendingData = _pendingData; + _pendingData.clear(); + + for (const auto &data : pendingData) { + webrtc::SendDataParams params; + params.type = webrtc::DataMessageType::kBinary; + params.ordered = true; + + rtc::CopyOnWriteBuffer payload; + payload.AppendData(data.data(), data.size()); + + cricket::SendDataResult result; + _sctpTransport->SendData(0, params, payload, &result); + + if (result == cricket::SendDataResult::SDR_SUCCESS) { + RTC_LOG(LS_INFO) << "SignalingSctpConnection: sent data of " << data.size() << " bytes"; + } else { + _isReadyToSend = false; + _pendingData.push_back(data); + RTC_LOG(LS_INFO) << "SignalingSctpConnection: send error, storing data until ready to send (" << _pendingData.size() << " items)"; + } + } +} + +void SignalingSctpConnection::OnTransportClosed(webrtc::RTCError error) { + assert(_threads->getNetworkThread()->IsCurrent()); +} + +void SignalingSctpConnection::OnDataReceived(int channel_id, webrtc::DataMessageType type, const rtc::CopyOnWriteBuffer& buffer) { + assert(_threads->getNetworkThread()->IsCurrent()); + + _onIncomingData(std::vector(buffer.data(), buffer.data() + buffer.size())); +} + +SignalingSctpConnection::~SignalingSctpConnection() { + _threads->getNetworkThread()->BlockingCall([&]() { + _sctpTransport.reset(); + _sctpTransportFactory.reset(); + _packetTransport.reset(); + }); +} + +void SignalingSctpConnection::start() { +} + +void SignalingSctpConnection::receiveExternal(const std::vector &data) { + _threads->getNetworkThread()->BlockingCall([&]() { + _packetTransport->receiveData(data); + }); +} + +void SignalingSctpConnection::send(const std::vector &data) { + _threads->getNetworkThread()->BlockingCall([&]() { + if (_isReadyToSend) { + webrtc::SendDataParams params; + params.type = webrtc::DataMessageType::kBinary; + params.ordered = true; + + rtc::CopyOnWriteBuffer payload; + payload.AppendData(data.data(), data.size()); + + cricket::SendDataResult result; + _sctpTransport->SendData(0, params, payload, &result); + + if (result == cricket::SendDataResult::SDR_ERROR) { + _isReadyToSend = false; + _pendingData.push_back(data); + RTC_LOG(LS_INFO) << "SignalingSctpConnection: send error, storing data until ready to send (" << _pendingData.size() << " items)"; + } else { + RTC_LOG(LS_INFO) << "SignalingSctpConnection: sent data of " << data.size() << " bytes"; + } + } else { + _pendingData.push_back(data); + RTC_LOG(LS_INFO) << "SignalingSctpConnection: not ready to send, storing data until ready to send (" << _pendingData.size() << " items)"; + } + }); +} + +} diff --git a/TMessagesProj/jni/voip/tgcalls/v2/SignalingSctpConnection.h b/TMessagesProj/jni/voip/tgcalls/v2/SignalingSctpConnection.h new file mode 100644 index 0000000000..c4a96c9f76 --- /dev/null +++ b/TMessagesProj/jni/voip/tgcalls/v2/SignalingSctpConnection.h @@ -0,0 +1,79 @@ +#ifndef TGCALLS_SIGNALING_SCTP_CONNECTION_H_ +#define TGCALLS_SIGNALING_SCTP_CONNECTION_H_ + +#ifdef WEBRTC_WIN +#include +#endif // WEBRTC_WIN + +#include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/byte_buffer.h" +#include "media/base/media_channel.h" + +#include + +#include + +#include "StaticThreads.h" +#include "SignalingConnection.h" + +namespace rtc { +class Socket; +} + +namespace cricket { +class SctpTransportFactory; +class SctpTransportInternal; +}; + +namespace tgcalls { + +class SignalingPacketTransport; + +class SignalingSctpConnection : public sigslot::has_slots<>, public SignalingConnection, public webrtc::DataChannelSink { +private: + struct PacketReadState { + rtc::CopyOnWriteBuffer headerData; + int remainingHeaderSize = 0; + bool isHeaderCompleted = false; + + rtc::CopyOnWriteBuffer data; + int remainingDataSize = 0; + bool isDataCompleted = false; + }; + +public: + SignalingSctpConnection(std::shared_ptr threads, std::function &)> onIncomingData, std::function &)> emitData); + virtual ~SignalingSctpConnection(); + + virtual void receiveExternal(const std::vector &data) override; + virtual void start() override; + virtual void send(const std::vector &data) override; + + virtual void OnDataReceived(int channel_id, + webrtc::DataMessageType type, + const rtc::CopyOnWriteBuffer& buffer) override; + virtual void OnReadyToSend() override; + virtual void OnTransportClosed(webrtc::RTCError error) override; + + // Unused + virtual void OnChannelClosing(int channel_id) override{} + virtual void OnChannelClosed(int channel_id) override{} + +private: + std::shared_ptr _threads; + std::function &)> _emitData; + std::function &)> _onIncomingData; + + std::unique_ptr _packetTransport; + std::unique_ptr _sctpTransportFactory; + std::unique_ptr _sctpTransport; + + bool _isReadyToSend = false; + std::vector> _pendingData; +}; + +} // namespace tgcalls + +#endif // TGCALLS_SIGNALING_SCTP_CONNECTION_H_ diff --git a/TMessagesProj/jni/voip/tgcalls/v2_4_0_0/InstanceV2_4_0_0Impl.cpp b/TMessagesProj/jni/voip/tgcalls/v2_4_0_0/InstanceV2_4_0_0Impl.cpp index 8a822c723e..69ce1e6e6a 100644 --- a/TMessagesProj/jni/voip/tgcalls/v2_4_0_0/InstanceV2_4_0_0Impl.cpp +++ b/TMessagesProj/jni/voip/tgcalls/v2_4_0_0/InstanceV2_4_0_0Impl.cpp @@ -24,7 +24,8 @@ #include "api/call/audio_sink.h" #include "modules/audio_processing/audio_buffer.h" #include "absl/strings/match.h" -#include "pc/channel_manager.h" +#include "pc/rtp_transport.h" +#include "pc/channel.h" #include "audio/audio_state.h" #include "modules/audio_coding/neteq/default_neteq_factory.h" #include "modules/audio_coding/include/audio_coding_module.h" @@ -33,6 +34,7 @@ #include "pc/used_ids.h" #include "media/base/sdp_video_format_utils.h" +#include "ChannelManager.h" #include "AudioFrame.h" #include "ThreadLocalObject.h" #include "Manager.h" @@ -49,6 +51,8 @@ #include #include +#include "FieldTrialsConfig.h" + namespace tgcalls { namespace { @@ -525,7 +529,7 @@ class OutgoingAudioChannel : public sigslot::has_slots<> { public: OutgoingAudioChannel( webrtc::Call *call, - cricket::ChannelManager *channelManager, + ChannelManager *channelManager, rtc::UniqueRandomIdGenerator *uniqueRandomIdGenerator, webrtc::LocalAudioSinkAdapter *audioSource, webrtc::RtpTransport *rtpTransport, @@ -545,10 +549,8 @@ class OutgoingAudioChannel : public sigslot::has_slots<> { audioOptions.noise_suppression = false; audioOptions.auto_gain_control = false; audioOptions.highpass_filter = false; - audioOptions.typing_detection = false; - audioOptions.experimental_agc = false; - audioOptions.experimental_ns = false; - audioOptions.residual_echo_detector = false; + //audioOptions.typing_detection = false; + //audioOptions.residual_echo_detector = false; } else { audioOptions.echo_cancellation = true; audioOptions.noise_suppression = true; @@ -557,7 +559,10 @@ class OutgoingAudioChannel : public sigslot::has_slots<> { std::vector streamIds; streamIds.push_back("1"); - _outgoingAudioChannel = _channelManager->CreateVoiceChannel(call, cricket::MediaConfig(), rtpTransport, threads->getMediaThread(), "audio0", false, NativeNetworkingImpl::getDefaulCryptoOptions(), uniqueRandomIdGenerator, audioOptions); + _outgoingAudioChannel = _channelManager->CreateVoiceChannel(call, cricket::MediaConfig(), "audio0", false, NativeNetworkingImpl::getDefaulCryptoOptions(), audioOptions); + _threads->getNetworkThread()->BlockingCall([&]() { + _outgoingAudioChannel->SetRtpTransport(rtpTransport); + }); std::vector codecs; for (const auto &codec : mediaContent.codecs) { @@ -600,22 +605,22 @@ class OutgoingAudioChannel : public sigslot::has_slots<> { incomingAudioDescription->set_codecs(codecs); incomingAudioDescription->set_bandwidth(-1); - _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [&]() { + _threads->getWorkerThread()->BlockingCall([&]() { _outgoingAudioChannel->SetPayloadTypeDemuxingEnabled(false); - _outgoingAudioChannel->SetLocalContent(outgoingAudioDescription.get(), webrtc::SdpType::kOffer, nullptr); - _outgoingAudioChannel->SetRemoteContent(incomingAudioDescription.get(), webrtc::SdpType::kAnswer, nullptr); + std::string errorDesc; + _outgoingAudioChannel->SetLocalContent(outgoingAudioDescription.get(), webrtc::SdpType::kOffer, errorDesc); + _outgoingAudioChannel->SetRemoteContent(incomingAudioDescription.get(), webrtc::SdpType::kAnswer, errorDesc); }); - //_outgoingAudioChannel->SignalSentPacket().connect(this, &OutgoingAudioChannel::OnSentPacket_w); - //_outgoingAudioChannel->UpdateRtpTransport(nullptr); - setIsMuted(false); } ~OutgoingAudioChannel() { - //_outgoingAudioChannel->SignalSentPacket().disconnect(this); _outgoingAudioChannel->Enable(false); - _channelManager->DestroyVoiceChannel(_outgoingAudioChannel); + _threads->getNetworkThread()->BlockingCall([&]() { + _outgoingAudioChannel->SetRtpTransport(nullptr); + }); + _channelManager->DestroyChannel(_outgoingAudioChannel); _outgoingAudioChannel = nullptr; } @@ -624,7 +629,7 @@ class OutgoingAudioChannel : public sigslot::has_slots<> { _isMuted = isMuted; _outgoingAudioChannel->Enable(!_isMuted); - _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [&]() { + _threads->getWorkerThread()->BlockingCall([&]() { _outgoingAudioChannel->media_channel()->SetAudioSend(_ssrc, !_isMuted, nullptr, _audioSource); }); } @@ -639,7 +644,7 @@ class OutgoingAudioChannel : public sigslot::has_slots<> { std::shared_ptr _threads; uint32_t _ssrc = 0; webrtc::Call *_call = nullptr; - cricket::ChannelManager *_channelManager = nullptr; + ChannelManager *_channelManager = nullptr; webrtc::LocalAudioSinkAdapter *_audioSource = nullptr; cricket::VoiceChannel *_outgoingAudioChannel = nullptr; @@ -649,12 +654,13 @@ class OutgoingAudioChannel : public sigslot::has_slots<> { class IncomingV2AudioChannel : public sigslot::has_slots<> { public: IncomingV2AudioChannel( - cricket::ChannelManager *channelManager, + ChannelManager *channelManager, webrtc::Call *call, webrtc::RtpTransport *rtpTransport, rtc::UniqueRandomIdGenerator *randomIdGenerator, NegotiatedMediaContent const &mediaContent, std::shared_ptr threads) : + _threads(threads), _ssrc(mediaContent.ssrc), _channelManager(channelManager), _call(call) { @@ -666,7 +672,10 @@ class IncomingV2AudioChannel : public sigslot::has_slots<> { std::string streamId = std::string("stream1"); - _audioChannel = _channelManager->CreateVoiceChannel(call, cricket::MediaConfig(), rtpTransport, threads->getMediaThread(), "0", false, NativeNetworkingImpl::getDefaulCryptoOptions(), randomIdGenerator, audioOptions); + _audioChannel = _channelManager->CreateVoiceChannel(call, cricket::MediaConfig(), "0", false, NativeNetworkingImpl::getDefaulCryptoOptions(), audioOptions); + _threads->getNetworkThread()->BlockingCall([&]() { + _audioChannel->SetRtpTransport(rtpTransport); + }); auto audioCodecs = mediaContent.codecs; @@ -693,10 +702,11 @@ class IncomingV2AudioChannel : public sigslot::has_slots<> { streamParams.set_stream_ids({ streamId }); incomingAudioDescription->AddStream(streamParams); - threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [&]() { + threads->getWorkerThread()->BlockingCall([&]() { _audioChannel->SetPayloadTypeDemuxingEnabled(false); - _audioChannel->SetLocalContent(outgoingAudioDescription.get(), webrtc::SdpType::kOffer, nullptr); - _audioChannel->SetRemoteContent(incomingAudioDescription.get(), webrtc::SdpType::kAnswer, nullptr); + std::string errorDesc; + _audioChannel->SetLocalContent(outgoingAudioDescription.get(), webrtc::SdpType::kOffer, errorDesc); + _audioChannel->SetRemoteContent(incomingAudioDescription.get(), webrtc::SdpType::kAnswer, errorDesc); }); outgoingAudioDescription.reset(); @@ -710,7 +720,10 @@ class IncomingV2AudioChannel : public sigslot::has_slots<> { ~IncomingV2AudioChannel() { _audioChannel->Enable(false); - _channelManager->DestroyVoiceChannel(_audioChannel); + _threads->getNetworkThread()->BlockingCall([&]() { + _audioChannel->SetRtpTransport(nullptr); + }); + _channelManager->DestroyChannel(_audioChannel); _audioChannel = nullptr; } @@ -732,11 +745,12 @@ class IncomingV2AudioChannel : public sigslot::has_slots<> { } private: + std::shared_ptr _threads; uint32_t _ssrc = 0; // Memory is managed by _channelManager cricket::VoiceChannel *_audioChannel = nullptr; // Memory is managed externally - cricket::ChannelManager *_channelManager = nullptr; + ChannelManager *_channelManager = nullptr; webrtc::Call *_call = nullptr; int64_t _creationTimestamp = 0; int64_t _activityTimestamp = 0; @@ -841,7 +855,7 @@ class OutgoingVideoChannel : public sigslot::has_slots<>, public std::enable_sha public: OutgoingVideoChannel( std::shared_ptr threads, - cricket::ChannelManager *channelManager, + ChannelManager *channelManager, webrtc::Call *call, webrtc::RtpTransport *rtpTransport, rtc::UniqueRandomIdGenerator *randomIdGenerator, @@ -857,7 +871,10 @@ class OutgoingVideoChannel : public sigslot::has_slots<>, public std::enable_sha _rotationUpdated(rotationUpdated) { cricket::VideoOptions videoOptions; videoOptions.is_screencast = isScreencast; - _outgoingVideoChannel = _channelManager->CreateVideoChannel(call, cricket::MediaConfig(), rtpTransport, threads->getMediaThread(), "out" + intToString(mediaContent.ssrc), false, NativeNetworkingImpl::getDefaulCryptoOptions(), randomIdGenerator, videoOptions, videoBitrateAllocatorFactory); + _outgoingVideoChannel = _channelManager->CreateVideoChannel(call, cricket::MediaConfig(), "out" + intToString(mediaContent.ssrc), false, NativeNetworkingImpl::getDefaulCryptoOptions(), videoOptions, videoBitrateAllocatorFactory); + _threads->getNetworkThread()->BlockingCall([&]() { + _outgoingVideoChannel->SetRtpTransport(rtpTransport); + }); auto videoCodecs = mediaContent.codecs; @@ -897,10 +914,11 @@ class OutgoingVideoChannel : public sigslot::has_slots<>, public std::enable_sha incomingVideoDescription->set_codecs(videoCodecs); incomingVideoDescription->set_bandwidth(-1); - threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [&]() { + threads->getWorkerThread()->BlockingCall([&]() { _outgoingVideoChannel->SetPayloadTypeDemuxingEnabled(false); - _outgoingVideoChannel->SetLocalContent(outgoingVideoDescription.get(), webrtc::SdpType::kOffer, nullptr); - _outgoingVideoChannel->SetRemoteContent(incomingVideoDescription.get(), webrtc::SdpType::kAnswer, nullptr); + std::string errorDesc; + _outgoingVideoChannel->SetLocalContent(outgoingVideoDescription.get(), webrtc::SdpType::kOffer, errorDesc); + _outgoingVideoChannel->SetRemoteContent(incomingVideoDescription.get(), webrtc::SdpType::kAnswer, errorDesc); webrtc::RtpParameters rtpParameters = _outgoingVideoChannel->media_channel()->GetRtpSendParameters(mediaContent.ssrc); @@ -909,14 +927,17 @@ class OutgoingVideoChannel : public sigslot::has_slots<>, public std::enable_sha _outgoingVideoChannel->Enable(false); - threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [&]() { + threads->getWorkerThread()->BlockingCall([&]() { _outgoingVideoChannel->media_channel()->SetVideoSend(mediaContent.ssrc, NULL, nullptr); }); } ~OutgoingVideoChannel() { _outgoingVideoChannel->Enable(false); - _channelManager->DestroyVideoChannel(_outgoingVideoChannel); + _threads->getNetworkThread()->BlockingCall([&]() { + _outgoingVideoChannel->SetRtpTransport(nullptr); + }); + _channelManager->DestroyChannel(_outgoingVideoChannel); _outgoingVideoChannel = nullptr; } @@ -927,13 +948,13 @@ class OutgoingVideoChannel : public sigslot::has_slots<>, public std::enable_sha _outgoingVideoChannel->Enable(true); auto videoCaptureImpl = GetVideoCaptureAssumingSameThread(_videoCapture.get()); - _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [&]() { - _outgoingVideoChannel->media_channel()->SetVideoSend(_mainSsrc, NULL, videoCaptureImpl->source()); + _threads->getWorkerThread()->BlockingCall([&]() { + _outgoingVideoChannel->media_channel()->SetVideoSend(_mainSsrc, NULL, videoCaptureImpl->source().get()); }); const auto weak = std::weak_ptr(shared_from_this()); videoCaptureImpl->setRotationUpdated([threads = _threads, weak](int angle) { - threads->getMediaThread()->PostTask(RTC_FROM_HERE, [=] { + threads->getMediaThread()->PostTask([=] { const auto strong = weak.lock(); if (!strong) { return; @@ -994,7 +1015,7 @@ class OutgoingVideoChannel : public sigslot::has_slots<>, public std::enable_sha _videoRotation = signaling_4_0_0::MediaStateMessage::VideoRotation::Rotation0; _outgoingVideoChannel->Enable(false); - _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [&]() { + _threads->getWorkerThread()->BlockingCall([&]() { _outgoingVideoChannel->media_channel()->SetVideoSend(_mainSsrc, NULL, nullptr); }); } @@ -1019,7 +1040,7 @@ class OutgoingVideoChannel : public sigslot::has_slots<>, public std::enable_sha uint32_t _mainSsrc = 0; webrtc::Call *_call = nullptr; - cricket::ChannelManager *_channelManager = nullptr; + ChannelManager *_channelManager = nullptr; cricket::VideoChannel *_outgoingVideoChannel = nullptr; std::function _rotationUpdated; @@ -1077,20 +1098,24 @@ class VideoSinkImpl : public rtc::VideoSinkInterface { class IncomingV2VideoChannel : public sigslot::has_slots<> { public: IncomingV2VideoChannel( - cricket::ChannelManager *channelManager, + ChannelManager *channelManager, webrtc::Call *call, webrtc::RtpTransport *rtpTransport, rtc::UniqueRandomIdGenerator *randomIdGenerator, NegotiatedMediaContent const &mediaContent, std::string const &streamId, std::shared_ptr threads) : + _threads(threads), _channelManager(channelManager), _call(call) { _videoSink.reset(new VideoSinkImpl()); _videoBitrateAllocatorFactory = webrtc::CreateBuiltinVideoBitrateAllocatorFactory(); - _videoChannel = _channelManager->CreateVideoChannel(call, cricket::MediaConfig(), rtpTransport, threads->getMediaThread(), streamId, false, NativeNetworkingImpl::getDefaulCryptoOptions(), randomIdGenerator, cricket::VideoOptions(), _videoBitrateAllocatorFactory.get()); + _videoChannel = _channelManager->CreateVideoChannel(call, cricket::MediaConfig(), streamId, false, NativeNetworkingImpl::getDefaulCryptoOptions(), cricket::VideoOptions(), _videoBitrateAllocatorFactory.get()); + _threads->getNetworkThread()->BlockingCall([&]() { + _videoChannel->SetRtpTransport(rtpTransport); + }); std::vector videoCodecs = mediaContent.codecs; @@ -1136,10 +1161,11 @@ class IncomingV2VideoChannel : public sigslot::has_slots<> { incomingVideoDescription->AddStream(videoRecvStreamParams); - threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [&]() { + threads->getWorkerThread()->BlockingCall([&]() { _videoChannel->SetPayloadTypeDemuxingEnabled(false); - _videoChannel->SetLocalContent(outgoingVideoDescription.get(), webrtc::SdpType::kOffer, nullptr); - _videoChannel->SetRemoteContent(incomingVideoDescription.get(), webrtc::SdpType::kAnswer, nullptr); + std::string errorDesc; + _videoChannel->SetLocalContent(outgoingVideoDescription.get(), webrtc::SdpType::kOffer, errorDesc); + _videoChannel->SetRemoteContent(incomingVideoDescription.get(), webrtc::SdpType::kAnswer, errorDesc); _videoChannel->media_channel()->SetSink(_mainVideoSsrc, _videoSink.get()); }); @@ -1149,7 +1175,10 @@ class IncomingV2VideoChannel : public sigslot::has_slots<> { ~IncomingV2VideoChannel() { _videoChannel->Enable(false); - _channelManager->DestroyVideoChannel(_videoChannel); + _threads->getNetworkThread()->BlockingCall([&]() { + _videoChannel->SetRtpTransport(nullptr); + }); + _channelManager->DestroyChannel(_videoChannel); _videoChannel = nullptr; } @@ -1163,13 +1192,14 @@ class IncomingV2VideoChannel : public sigslot::has_slots<> { } private: + std::shared_ptr _threads; uint32_t _mainVideoSsrc = 0; std::unique_ptr _videoSink; std::unique_ptr _videoBitrateAllocatorFactory; // Memory is managed by _channelManager cricket::VideoChannel *_videoChannel; // Memory is managed externally - cricket::ChannelManager *_channelManager = nullptr; + ChannelManager *_channelManager = nullptr; webrtc::Call *_call = nullptr; }; @@ -1183,7 +1213,7 @@ class InstanceV2_4_0_0ImplInternal : public std::enable_shared_from_thisperform(RTC_FROM_HERE, [](NativeNetworkingImpl *networking) { - networking->stop(); - }); - _incomingAudioChannel.reset(); _incomingVideoChannel.reset(); _incomingScreencastChannel.reset(); @@ -1208,12 +1234,18 @@ class InstanceV2_4_0_0ImplInternal : public std::enable_shared_from_thisgetWorkerThread()->Invoke(RTC_FROM_HERE, [&]() { - _channelManager.reset(); + _channelManager.reset(); + + _threads->getWorkerThread()->BlockingCall([&]() { _call.reset(); _audioDeviceModule = nullptr; }); - _threads->getNetworkThread()->Invoke(RTC_FROM_HERE, []() { + + _networking->perform([](NativeNetworkingImpl *networking) { + networking->stop(); + }); + + _threads->getNetworkThread()->BlockingCall([]() { }); } @@ -1228,7 +1260,7 @@ class InstanceV2_4_0_0ImplInternal : public std::enable_shared_from_thisgetMediaThread()->PostTask(RTC_FROM_HERE, [=] { + threads->getMediaThread()->PostTask([=] { const auto strong = weak.lock(); if (!strong) { return; @@ -1237,7 +1269,7 @@ class InstanceV2_4_0_0ImplInternal : public std::enable_shared_from_thisgetMediaThread()->PostTask(RTC_FROM_HERE, [=] { + threads->getMediaThread()->PostTask([=] { const auto strong = weak.lock(); if (!strong) { return; @@ -1247,7 +1279,7 @@ class InstanceV2_4_0_0ImplInternal : public std::enable_shared_from_thisgetMediaThread()->PostTask(RTC_FROM_HERE, [=] { + threads->getMediaThread()->PostTask([=] { const auto strong = weak.lock(); if (!strong) { return; @@ -1262,7 +1294,7 @@ class InstanceV2_4_0_0ImplInternal : public std::enable_shared_from_this_call->Receiver()->DeliverPacket(webrtc::MediaType::ANY, packet, timestamp); }, .dataChannelStateUpdated = [threads, weak](bool isDataChannelOpen) { - threads->getMediaThread()->PostTask(RTC_FROM_HERE, [=] { + threads->getMediaThread()->PostTask([=] { const auto strong = weak.lock(); if (!strong) { return; @@ -1271,7 +1303,7 @@ class InstanceV2_4_0_0ImplInternal : public std::enable_shared_from_thisgetMediaThread()->PostTask(RTC_FROM_HERE, [=] { + threads->getMediaThread()->PostTask([=] { const auto strong = weak.lock(); if (!strong) { return; @@ -1288,49 +1320,52 @@ class InstanceV2_4_0_0ImplInternal : public std::enable_shared_from_thisgetWorkerThread()->Invoke(RTC_FROM_HERE, [&]() { - cricket::MediaEngineDependencies mediaDeps; - mediaDeps.task_queue_factory = _taskQueueFactory.get(); - mediaDeps.audio_encoder_factory = webrtc::CreateAudioEncoderFactory(); - mediaDeps.audio_decoder_factory = webrtc::CreateAudioDecoderFactory(); + _threads->getWorkerThread()->BlockingCall([&]() { + _audioDeviceModule = createAudioDeviceModule(); + }); + + cricket::MediaEngineDependencies mediaDeps; + mediaDeps.task_queue_factory = _taskQueueFactory.get(); + mediaDeps.audio_encoder_factory = webrtc::CreateAudioEncoderFactory(); + mediaDeps.audio_decoder_factory = webrtc::CreateAudioDecoderFactory(); - mediaDeps.video_encoder_factory = PlatformInterface::SharedInstance()->makeVideoEncoderFactory(_platformContext, true); - mediaDeps.video_decoder_factory = PlatformInterface::SharedInstance()->makeVideoDecoderFactory(_platformContext); + mediaDeps.video_encoder_factory = PlatformInterface::SharedInstance()->makeVideoEncoderFactory(_platformContext, true); + mediaDeps.video_decoder_factory = PlatformInterface::SharedInstance()->makeVideoDecoderFactory(_platformContext); - _audioDeviceModule = createAudioDeviceModule(); - /*if (!_audioDeviceModule) { - return; - }*/ - mediaDeps.adm = _audioDeviceModule; + mediaDeps.adm = _audioDeviceModule; - _availableVideoFormats = mediaDeps.video_encoder_factory->GetSupportedFormats(); + _availableVideoFormats = mediaDeps.video_encoder_factory->GetSupportedFormats(); - std::unique_ptr mediaEngine = cricket::CreateMediaEngine(std::move(mediaDeps)); + std::unique_ptr mediaEngine = cricket::CreateMediaEngine(std::move(mediaDeps)); - _channelManager = cricket::ChannelManager::Create( - std::move(mediaEngine), - true, - _threads->getWorkerThread(), - _threads->getNetworkThread() - ); + _channelManager = ChannelManager::Create( + std::move(mediaEngine), + _threads->getWorkerThread(), + _threads->getNetworkThread() + ); - webrtc::Call::Config callConfig(_eventLog.get()); - callConfig.task_queue_factory = _taskQueueFactory.get(); - callConfig.trials = &_fieldTrials; - callConfig.audio_state = _channelManager->media_engine()->voice().GetAudioState(); + webrtc::Call::Config callConfig(_eventLog.get(), _threads->getNetworkThread()); + callConfig.task_queue_factory = _taskQueueFactory.get(); + callConfig.trials = &fieldTrialsBasedConfig; - _call.reset(webrtc::Call::Create(callConfig, webrtc::Clock::GetRealTimeClock(), _threads->getSharedModuleThread(), webrtc::ProcessThread::Create("PacerThread"))); + _threads->getNetworkThread()->BlockingCall([&]() { + _rtpTransport = _networking->getSyncAssumingSameThread()->getRtpTransport(); + }); + + _threads->getWorkerThread()->BlockingCall([&]() { + callConfig.audio_state = _channelManager->media_engine()->voice().GetAudioState(); + _call.reset(webrtc::Call::Create(callConfig)); }); _uniqueRandomIdGenerator.reset(new rtc::UniqueRandomIdGenerator()); - _threads->getNetworkThread()->Invoke(RTC_FROM_HERE, [this]() { + _threads->getNetworkThread()->BlockingCall([this]() { _rtpTransport = _networking->getSyncAssumingSameThread()->getRtpTransport(); }); _videoBitrateAllocatorFactory = webrtc::CreateBuiltinVideoBitrateAllocatorFactory(); - _networking->perform(RTC_FROM_HERE, [](NativeNetworkingImpl *networking) { + _networking->perform([](NativeNetworkingImpl *networking) { networking->start(); }); @@ -1383,7 +1418,7 @@ class InstanceV2_4_0_0ImplInternal : public std::enable_shared_from_thisgetMediaThread()->PostTask(RTC_FROM_HERE, [=] { + threads->getMediaThread()->PostTask([=] { const auto strong = weak.lock(); if (!strong) { return; @@ -1411,7 +1446,7 @@ class InstanceV2_4_0_0ImplInternal : public std::enable_shared_from_thisgetMediaThread()->PostTask(RTC_FROM_HERE, [=] { + threads->getMediaThread()->PostTask([=] { const auto strong = weak.lock(); if (!strong) { return; @@ -1446,7 +1481,7 @@ class InstanceV2_4_0_0ImplInternal : public std::enable_shared_from_this(shared_from_this()); - _networking->perform(RTC_FROM_HERE, [weak, threads = _threads, isOutgoing = _encryptionKey.isOutgoing](NativeNetworkingImpl *networking) { + _networking->perform([weak, threads = _threads, isOutgoing = _encryptionKey.isOutgoing](NativeNetworkingImpl *networking) { auto localFingerprint = networking->getLocalFingerprint(); std::string hash = localFingerprint->algorithm; std::string fingerprint = localFingerprint->GetRfc4572Fingerprint(); @@ -1461,7 +1496,7 @@ class InstanceV2_4_0_0ImplInternal : public std::enable_shared_from_thisgetMediaThread()->PostTask(RTC_FROM_HERE, [weak, ufrag, pwd, hash, fingerprint, setup, localIceParams]() { + threads->getMediaThread()->PostTask([weak, ufrag, pwd, hash, fingerprint, setup, localIceParams]() { const auto strong = weak.lock(); if (!strong) { return; @@ -1534,7 +1569,7 @@ class InstanceV2_4_0_0ImplInternal : public std::enable_shared_from_thisfingerprints[0].setup; } - _networking->perform(RTC_FROM_HERE, [threads = _threads, remoteIceParameters = std::move(remoteIceParameters), fingerprint = std::move(fingerprint), sslSetup = std::move(sslSetup)](NativeNetworkingImpl *networking) { + _networking->perform([threads = _threads, remoteIceParameters = std::move(remoteIceParameters), fingerprint = std::move(fingerprint), sslSetup = std::move(sslSetup)](NativeNetworkingImpl *networking) { networking->setRemoteParams(remoteIceParameters, fingerprint.get(), sslSetup); }); @@ -1796,7 +1831,7 @@ class InstanceV2_4_0_0ImplInternal : public std::enable_shared_from_thisperform(RTC_FROM_HERE, [threads = _threads, parsedCandidates = _pendingIceCandidates](NativeNetworkingImpl *networking) { + _networking->perform([threads = _threads, parsedCandidates = _pendingIceCandidates](NativeNetworkingImpl *networking) { networking->addCandidates(parsedCandidates); }); _pendingIceCandidates.clear(); @@ -1830,7 +1865,7 @@ class InstanceV2_4_0_0ImplInternal : public std::enable_shared_from_thisperform(RTC_FROM_HERE, [stringData = std::move(stringData)](NativeNetworkingImpl *networking) { + _networking->perform([stringData = std::move(stringData)](NativeNetworkingImpl *networking) { networking->sendDataChannelMessage(stringData); }); } @@ -2041,7 +2076,7 @@ class InstanceV2_4_0_0ImplInternal : public std::enable_shared_from_this _stateUpdated; std::function _signalBarsUpdated; - std::function _audioLevelUpdated; + std::function _audioLevelsUpdated; std::function _remoteBatteryLevelIsLowUpdated; std::function _remoteMediaStateUpdated; std::function _remotePrefferedAspectRatioUpdated; @@ -2056,15 +2091,13 @@ class InstanceV2_4_0_0ImplInternal : public std::enable_shared_from_this _eventLog; std::unique_ptr _taskQueueFactory; - std::unique_ptr _mediaEngine; std::unique_ptr _call; - webrtc::FieldTrialBasedConfig _fieldTrials; webrtc::LocalAudioSinkAdapter _audioSource; rtc::scoped_refptr _audioDeviceModule; std::unique_ptr _uniqueRandomIdGenerator; webrtc::RtpTransport *_rtpTransport = nullptr; - std::unique_ptr _channelManager; + std::unique_ptr _channelManager; std::unique_ptr _videoBitrateAllocatorFactory; std::shared_ptr> _networking; @@ -2113,7 +2146,7 @@ InstanceV2_4_0_0Impl::InstanceV2_4_0_0Impl(Descriptor &&descriptor) { _internal.reset(new ThreadLocalObject(_threads->getMediaThread(), [descriptor = std::move(descriptor), threads = _threads]() mutable { return new InstanceV2_4_0_0ImplInternal(std::move(descriptor), threads); })); - _internal->perform(RTC_FROM_HERE, [](InstanceV2_4_0_0ImplInternal *internal) { + _internal->perform([](InstanceV2_4_0_0ImplInternal *internal) { internal->start(); }); } @@ -2123,55 +2156,55 @@ InstanceV2_4_0_0Impl::~InstanceV2_4_0_0Impl() { } void InstanceV2_4_0_0Impl::receiveSignalingData(const std::vector &data) { - _internal->perform(RTC_FROM_HERE, [data](InstanceV2_4_0_0ImplInternal *internal) { + _internal->perform([data](InstanceV2_4_0_0ImplInternal *internal) { internal->receiveSignalingData(data); }); } void InstanceV2_4_0_0Impl::setVideoCapture(std::shared_ptr videoCapture) { - _internal->perform(RTC_FROM_HERE, [videoCapture](InstanceV2_4_0_0ImplInternal *internal) { + _internal->perform([videoCapture](InstanceV2_4_0_0ImplInternal *internal) { internal->setVideoCapture(videoCapture); }); } void InstanceV2_4_0_0Impl::setRequestedVideoAspect(float aspect) { - _internal->perform(RTC_FROM_HERE, [aspect](InstanceV2_4_0_0ImplInternal *internal) { + _internal->perform([aspect](InstanceV2_4_0_0ImplInternal *internal) { internal->setRequestedVideoAspect(aspect); }); } void InstanceV2_4_0_0Impl::setNetworkType(NetworkType networkType) { - _internal->perform(RTC_FROM_HERE, [networkType](InstanceV2_4_0_0ImplInternal *internal) { + _internal->perform([networkType](InstanceV2_4_0_0ImplInternal *internal) { internal->setNetworkType(networkType); }); } void InstanceV2_4_0_0Impl::setMuteMicrophone(bool muteMicrophone) { - _internal->perform(RTC_FROM_HERE, [muteMicrophone](InstanceV2_4_0_0ImplInternal *internal) { + _internal->perform([muteMicrophone](InstanceV2_4_0_0ImplInternal *internal) { internal->setMuteMicrophone(muteMicrophone); }); } void InstanceV2_4_0_0Impl::setIncomingVideoOutput(std::shared_ptr> sink) { - _internal->perform(RTC_FROM_HERE, [sink](InstanceV2_4_0_0ImplInternal *internal) { + _internal->perform([sink](InstanceV2_4_0_0ImplInternal *internal) { internal->setIncomingVideoOutput(sink); }); } void InstanceV2_4_0_0Impl::setAudioInputDevice(std::string id) { - _internal->perform(RTC_FROM_HERE, [id](InstanceV2_4_0_0ImplInternal *internal) { + _internal->perform([id](InstanceV2_4_0_0ImplInternal *internal) { internal->setAudioInputDevice(id); }); } void InstanceV2_4_0_0Impl::setAudioOutputDevice(std::string id) { - _internal->perform(RTC_FROM_HERE, [id](InstanceV2_4_0_0ImplInternal *internal) { + _internal->perform([id](InstanceV2_4_0_0ImplInternal *internal) { internal->setAudioOutputDevice(id); }); } void InstanceV2_4_0_0Impl::setIsLowBatteryLevel(bool isLowBatteryLevel) { - _internal->perform(RTC_FROM_HERE, [isLowBatteryLevel](InstanceV2_4_0_0ImplInternal *internal) { + _internal->perform([isLowBatteryLevel](InstanceV2_4_0_0ImplInternal *internal) { internal->setIsLowBatteryLevel(isLowBatteryLevel); }); } @@ -2193,7 +2226,7 @@ void InstanceV2_4_0_0Impl::setEchoCancellationStrength(int strength) { std::vector InstanceV2_4_0_0Impl::GetVersions() { std::vector result; - result.push_back("4.0.0"); + result.push_back("6.0.0"); return result; } @@ -2226,7 +2259,7 @@ void InstanceV2_4_0_0Impl::stop(std::function completion) { if (_logSink) { debugLog = _logSink->result(); } - _internal->perform(RTC_FROM_HERE, [completion, debugLog = std::move(debugLog)](InstanceV2_4_0_0ImplInternal *internal) mutable { + _internal->perform([completion, debugLog = std::move(debugLog)](InstanceV2_4_0_0ImplInternal *internal) mutable { internal->stop([completion, debugLog = std::move(debugLog)](FinalState finalState) mutable { finalState.debugLog = debugLog; completion(finalState); diff --git a/TMessagesProj/jni/voip/webrtc/absl/ABSEIL_ISSUE_TEMPLATE.md b/TMessagesProj/jni/voip/webrtc/absl/ABSEIL_ISSUE_TEMPLATE.md deleted file mode 100644 index ed5461f166..0000000000 --- a/TMessagesProj/jni/voip/webrtc/absl/ABSEIL_ISSUE_TEMPLATE.md +++ /dev/null @@ -1,22 +0,0 @@ -Please submit a new Abseil Issue using the template below: - -## [Short title of proposed API change(s)] - --------------------------------------------------------------------------------- --------------------------------------------------------------------------------- - -## Background - -[Provide the background information that is required in order to evaluate the -proposed API changes. No controversial claims should be made here. If there are -design constraints that need to be considered, they should be presented here -**along with justification for those constraints**. Linking to other docs is -good, but please keep the **pertinent information as self contained** as -possible in this section.] - -## Proposed API Change (s) - -[Please clearly describe the API change(s) being proposed. If multiple changes, -please keep them clearly distinguished. When possible, **use example code -snippets to illustrate before-after API usages**. List pros-n-cons. Highlight -the main questions that you want to be answered. Given the Abseil project compatibility requirements, describe why the API change is safe.] diff --git a/TMessagesProj/jni/voip/webrtc/absl/CONTRIBUTING.md b/TMessagesProj/jni/voip/webrtc/absl/CONTRIBUTING.md deleted file mode 100644 index 9dadae9376..0000000000 --- a/TMessagesProj/jni/voip/webrtc/absl/CONTRIBUTING.md +++ /dev/null @@ -1,141 +0,0 @@ -# How to Contribute to Abseil - -We'd love to accept your patches and contributions to this project. There are -just a few small guidelines you need to follow. - -NOTE: If you are new to GitHub, please start by reading [Pull Request -howto](https://help.github.com/articles/about-pull-requests/) - -## Contributor License Agreement - -Contributions to this project must be accompanied by a Contributor License -Agreement. You (or your employer) retain the copyright to your contribution, -this simply gives us permission to use and redistribute your contributions as -part of the project. Head over to to see -your current agreements on file or to sign a new one. - -You generally only need to submit a CLA once, so if you've already submitted one -(even if it was for a different project), you probably don't need to do it -again. - -## Contribution Guidelines - -Potential contributors sometimes ask us if the Abseil project is the appropriate -home for their utility library code or for specific functions implementing -missing portions of the standard. Often, the answer to this question is "no". -We’d like to articulate our thinking on this issue so that our choices can be -understood by everyone and so that contributors can have a better intuition -about whether Abseil might be interested in adopting a new library. - -### Priorities - -Although our mission is to augment the C++ standard library, our goal is not to -provide a full forward-compatible implementation of the latest standard. For us -to consider a library for inclusion in Abseil, it is not enough that a library -is useful. We generally choose to release a library when it meets at least one -of the following criteria: - -* **Widespread usage** - Using our internal codebase to help gauge usage, most - of the libraries we've released have tens of thousands of users. -* **Anticipated widespread usage** - Pre-adoption of some standard-compliant - APIs may not have broad adoption initially but can be expected to pick up - usage when it replaces legacy APIs. `absl::from_chars`, for example, - replaces existing code that converts strings to numbers and will therefore - likely see usage growth. -* **High impact** - APIs that provide a key solution to a specific problem, - such as `absl::FixedArray`, have higher impact than usage numbers may signal - and are released because of their importance. -* **Direct support for a library that falls under one of the above** - When we - want access to a smaller library as an implementation detail for a - higher-priority library we plan to release, we may release it, as we did - with portions of `absl/meta/type_traits.h`. One consequence of this is that - the presence of a library in Abseil does not necessarily mean that other - similar libraries would be a high priority. - -### API Freeze Consequences - -Via the -[Abseil Compatibility Guidelines](https://abseil.io/about/compatibility), we -have promised a large degree of API stability. In particular, we will not make -backward-incompatible changes to released APIs without also shipping a tool or -process that can upgrade our users' code. We are not yet at the point of easily -releasing such tools. Therefore, at this time, shipping a library establishes an -API contract which is borderline unchangeable. (We can add new functionality, -but we cannot easily change existing behavior.) This constraint forces us to -very carefully review all APIs that we ship. - - -## Coding Style - -To keep the source consistent, readable, diffable and easy to merge, we use a -fairly rigid coding style, as defined by the -[google-styleguide](https://github.com/google/styleguide) project. All patches -will be expected to conform to the style outlined -[here](https://google.github.io/styleguide/cppguide.html). - -## Guidelines for Pull Requests - -* If you are a Googler, it is preferable to first create an internal CL and - have it reviewed and submitted. The code propagation process will deliver - the change to GitHub. - -* Create **small PRs** that are narrowly focused on **addressing a single - concern**. We often receive PRs that are trying to fix several things at a - time, but if only one fix is considered acceptable, nothing gets merged and - both author's & review's time is wasted. Create more PRs to address - different concerns and everyone will be happy. - -* For speculative changes, consider opening an [Abseil - issue](https://github.com/abseil/abseil-cpp/issues) and discussing it first. - If you are suggesting a behavioral or API change, consider starting with an - [Abseil proposal template](ABSEIL_ISSUE_TEMPLATE.md). - -* Provide a good **PR description** as a record of **what** change is being - made and **why** it was made. Link to a GitHub issue if it exists. - -* Don't fix code style and formatting unless you are already changing that - line to address an issue. Formatting of modified lines may be done using - `git clang-format`. PRs with irrelevant changes won't be merged. If - you do want to fix formatting or style, do that in a separate PR. - -* Unless your PR is trivial, you should expect there will be reviewer comments - that you'll need to address before merging. We expect you to be reasonably - responsive to those comments, otherwise the PR will be closed after 2-3 - weeks of inactivity. - -* Maintain **clean commit history** and use **meaningful commit messages**. - PRs with messy commit history are difficult to review and won't be merged. - Use `rebase -i upstream/master` to curate your commit history and/or to - bring in latest changes from master (but avoid rebasing in the middle of a - code review). - -* Keep your PR up to date with upstream/master (if there are merge conflicts, - we can't really merge your change). - -* **All tests need to be passing** before your change can be merged. We - recommend you **run tests locally** (see below) - -* Exceptions to the rules can be made if there's a compelling reason for doing - so. That is - the rules are here to serve us, not the other way around, and - the rules need to be serving their intended purpose to be valuable. - -* All submissions, including submissions by project members, require review. - -## Running Tests - -If you have [Bazel](https://bazel.build/) installed, use `bazel test ---test_tag_filters="-benchmark" ...` to run the unit tests. - -If you are running the Linux operating system and have -[Docker](https://www.docker.com/) installed, you can also run the `linux_*.sh` -scripts under the `ci/`(https://github.com/abseil/abseil-cpp/tree/master/ci) -directory to test Abseil under a variety of conditions. - -## Abseil Committers - -The current members of the Abseil engineering team are the only committers at -present. - -## Release Process - -Abseil lives at head, where latest-and-greatest code can be found. diff --git a/TMessagesProj/jni/voip/webrtc/absl/FAQ.md b/TMessagesProj/jni/voip/webrtc/absl/FAQ.md deleted file mode 100644 index 78028fc09f..0000000000 --- a/TMessagesProj/jni/voip/webrtc/absl/FAQ.md +++ /dev/null @@ -1,164 +0,0 @@ -# Abseil FAQ - -## Is Abseil the right home for my utility library? - -Most often the answer to the question is "no." As both the [About -Abseil](https://abseil.io/about/) page and our [contributing -guidelines](https://github.com/abseil/abseil-cpp/blob/master/CONTRIBUTING.md#contribution-guidelines) -explain, Abseil contains a variety of core C++ library code that is widely used -at [Google](https://www.google.com/). As such, Abseil's primary purpose is to be -used as a dependency by Google's open source C++ projects. While we do hope that -Abseil is also useful to the C++ community at large, this added constraint also -means that we are unlikely to accept a contribution of utility code that isn't -already widely used by Google. - -## How to I set the C++ dialect used to build Abseil? - -The short answer is that whatever mechanism you choose, you need to make sure -that you set this option consistently at the global level for your entire -project. If, for example, you want to set the C++ dialect to C++17, with -[Bazel](https://bazel/build/) as the build system and `gcc` or `clang` as the -compiler, there several ways to do this: -* Pass `--cxxopt=-std=c++17` on the command line (for example, `bazel build - --cxxopt=-std=c++17 ...`) -* Set the environment variable `BAZEL_CXXOPTS` (for example, - `BAZEL_CXXOPTS=-std=c++17`) -* Add `build --cxxopt=-std=c++17` to your [`.bazelrc` - file](https://docs.bazel.build/versions/master/guide.html#bazelrc) - -If you are using CMake as the build system, you'll need to add a line like -`set(CMAKE_CXX_STANDARD 17)` to your top level `CMakeLists.txt` file. See the -[CMake build -instructions](https://github.com/abseil/abseil-cpp/blob/master/CMake/README.md) -for more information. - -For a longer answer to this question and to understand why some other approaches -don't work, see the answer to ["What is ABI and why don't you recommend using a -pre-compiled version of -Abseil?"](#what-is-abi-and-why-dont-you-recommend-using-a-pre-compiled-version-of-abseil) - -## What is ABI and why don't you recommend using a pre-compiled version of Abseil? - -For the purposes of this discussion, you can think of -[ABI](https://en.wikipedia.org/wiki/Application_binary_interface) as the -compiled representation of the interfaces in code. This is in contrast to -[API](https://en.wikipedia.org/wiki/Application_programming_interface), which -you can think of as the interfaces as defined by the code itself. [Abseil has a -strong promise of API compatibility, but does not make any promise of ABI -compatibility](https://abseil.io/about/compatibility). Let's take a look at what -this means in practice. - -You might be tempted to do something like this in a -[Bazel](https://bazel.build/) `BUILD` file: - -``` -# DON'T DO THIS!!! -cc_library( - name = "my_library", - srcs = ["my_library.cc"], - copts = ["-std=c++17"], # May create a mixed-mode compile! - deps = ["@com_google_absl//absl/strings"], -) -``` - -Applying `-std=c++17` to an individual target in your `BUILD` file is going to -compile that specific target in C++17 mode, but it isn't going to ensure the -Abseil library is built in C++17 mode, since the Abseil library itself is a -different build target. If your code includes an Abseil header, then your -program may contain conflicting definitions of the same -class/function/variable/enum, etc. As a rule, all compile options that affect -the ABI of a program need to be applied to the entire build on a global basis. - -C++ has something called the [One Definition -Rule](https://en.wikipedia.org/wiki/One_Definition_Rule) (ODR). C++ doesn't -allow multiple definitions of the same class/function/variable/enum, etc. ODR -violations sometimes result in linker errors, but linkers do not always catch -violations. Uncaught ODR violations can result in strange runtime behaviors or -crashes that can be hard to debug. - -If you build the Abseil library and your code using different compile options -that affect ABI, there is a good chance you will run afoul of the One Definition -Rule. Examples of GCC compile options that affect ABI include (but aren't -limited to) language dialect (e.g. `-std=`), optimization level (e.g. `-O2`), -code generation flags (e.g. `-fexceptions`), and preprocessor defines -(e.g. `-DNDEBUG`). - -If you use a pre-compiled version of Abseil, (for example, from your Linux -distribution package manager or from something like -[vcpkg](https://github.com/microsoft/vcpkg)) you have to be very careful to -ensure ABI compatibility across the components of your program. The only way you -can be sure your program is going to be correct regarding ABI is to ensure -you've used the exact same compile options as were used to build the -pre-compiled library. This does not mean that Abseil cannot work as part of a -Linux distribution since a knowledgeable binary packager will have ensured that -all packages have been built with consistent compile options. This is one of the -reasons we warn against - though do not outright reject - using Abseil as a -pre-compiled library. - -Another possible way that you might afoul of ABI issues is if you accidentally -include two versions of Abseil in your program. Multiple versions of Abseil can -end up within the same binary if your program uses the Abseil library and -another library also transitively depends on Abseil (resulting in what is -sometimes called the diamond dependency problem). In cases such as this you must -structure your build so that all libraries use the same version of Abseil. -[Abseil's strong promise of API compatibility between -releases](https://abseil.io/about/compatibility) means the latest "HEAD" release -of Abseil is almost certainly the right choice if you are doing as we recommend -and building all of your code from source. - -For these reasons we recommend you avoid pre-compiled code and build the Abseil -library yourself in a consistent manner with the rest of your code. - -## What is "live at head" and how do I do it? - -From Abseil's point-of-view, "live at head" means that every Abseil source -release (which happens on an almost daily basis) is either API compatible with -the previous release, or comes with an automated tool that you can run over code -to make it compatible. In practice, the need to use an automated tool is -extremely rare. This means that upgrading from one source release to another -should be a routine practice that can and should be performed often. - -We recommend you update to the [latest commit in the `master` branch of -Abseil](https://github.com/abseil/abseil-cpp/commits/master) as often as -possible. Not only will you pick up bug fixes more quickly, but if you have good -automated testing, you will catch and be able to fix any [Hyrum's -Law](https://www.hyrumslaw.com/) dependency problems on an incremental basis -instead of being overwhelmed by them and having difficulty isolating them if you -wait longer between updates. - -If you are using the [Bazel](https://bazel.build/) build system and its -[external dependencies](https://docs.bazel.build/versions/master/external.html) -feature, updating the -[`http_archive`](https://docs.bazel.build/versions/master/repo/http.html#http_archive) -rule in your -[`WORKSPACE`](https://docs.bazel.build/versions/master/be/workspace.html) for -`com_google_abseil` to point to the [latest commit in the `master` branch of -Abseil](https://github.com/abseil/abseil-cpp/commits/master) is all you need to -do. For example, on February 11, 2020, the latest commit to the master branch -was `98eb410c93ad059f9bba1bf43f5bb916fc92a5ea`. To update to this commit, you -would add the following snippet to your `WORKSPACE` file: - -``` -http_archive( - name = "com_google_absl", - urls = ["https://github.com/abseil/abseil-cpp/archive/98eb410c93ad059f9bba1bf43f5bb916fc92a5ea.zip"], # 2020-02-11T18:50:53Z - strip_prefix = "abseil-cpp-98eb410c93ad059f9bba1bf43f5bb916fc92a5ea", - sha256 = "aabf6c57e3834f8dc3873a927f37eaf69975d4b28117fc7427dfb1c661542a87", -) -``` - -To get the `sha256` of this URL, run `curl -sL --output - -https://github.com/abseil/abseil-cpp/archive/98eb410c93ad059f9bba1bf43f5bb916fc92a5ea.zip -| sha256sum -`. - -You can commit the updated `WORKSPACE` file to your source control every time -you update, and if you have good automated testing, you might even consider -automating this. - -One thing we don't recommend is using GitHub's `master.zip` files (for example -[https://github.com/abseil/abseil-cpp/archive/master.zip](https://github.com/abseil/abseil-cpp/archive/master.zip)), -which are always the latest commit in the `master` branch, to implement live at -head. Since these `master.zip` URLs are not versioned, you will lose build -reproducibility. In addition, some build systems, including Bazel, will simply -cache this file, which means you won't actually be updating to the latest -release until your cache is cleared or invalidated. diff --git a/TMessagesProj/jni/voip/webrtc/absl/LICENSE b/TMessagesProj/jni/voip/webrtc/absl/LICENSE deleted file mode 100644 index ccd61dcfe3..0000000000 --- a/TMessagesProj/jni/voip/webrtc/absl/LICENSE +++ /dev/null @@ -1,203 +0,0 @@ - - Apache License - Version 2.0, January 2004 - https://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - https://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - diff --git a/TMessagesProj/jni/voip/webrtc/absl/LTS.md b/TMessagesProj/jni/voip/webrtc/absl/LTS.md deleted file mode 100644 index ade8b17c73..0000000000 --- a/TMessagesProj/jni/voip/webrtc/absl/LTS.md +++ /dev/null @@ -1,16 +0,0 @@ -# Long Term Support (LTS) Branches - -This repository contains periodic snapshots of the Abseil codebase that are -Long Term Support (LTS) branches. An LTS branch allows you to use a known -version of Abseil without interfering with other projects which may also, in -turn, use Abseil. (For more information about our releases, see the -[Abseil Release Management](https://abseil.io/about/releases) guide.) - -## LTS Branches - -The following lists LTS branches and the dates on which they have been released: - -* [LTS Branch December 18, 2018](https://github.com/abseil/abseil-cpp/tree/lts_2018_12_18/) -* [LTS Branch June 20, 2018](https://github.com/abseil/abseil-cpp/tree/lts_2018_06_20/) -* [LTS Branch August 8, 2019](https://github.com/abseil/abseil-cpp/tree/lts_2019_08_08/) -* [LTS Branch February 25, 2020](https://github.com/abseil/abseil-cpp/tree/lts_2020_02_25/) diff --git a/TMessagesProj/jni/voip/webrtc/absl/OWNERS b/TMessagesProj/jni/voip/webrtc/absl/OWNERS deleted file mode 100644 index 68650639fd..0000000000 --- a/TMessagesProj/jni/voip/webrtc/absl/OWNERS +++ /dev/null @@ -1,6 +0,0 @@ -danilchap@chromium.org -kwiberg@chromium.org -mbonadei@chromium.org -phoglund@chromium.org - -# COMPONENT: Internals>Core diff --git a/TMessagesProj/jni/voip/webrtc/absl/README.chromium b/TMessagesProj/jni/voip/webrtc/absl/README.chromium deleted file mode 100644 index 142914f978..0000000000 --- a/TMessagesProj/jni/voip/webrtc/absl/README.chromium +++ /dev/null @@ -1,36 +0,0 @@ -Name: Abseil -Short Name: absl -URL: https://github.com/abseil/abseil-cpp -License: Apache 2.0 -License File: LICENSE -Version: 0 -Revision: fba8a316c30690097de5d6127ad307d84a1b74ca -Security Critical: yes - -Description: -This directory contains the source code of Abseil for C++. This can be used by -Chromium's dependencies, but shouldn't be used by Chromium itself. -See: https://goo.gl/TgnJb8. - -How to update Abseil: - -1. Download the code from the Abseil git repository (see URL). - -2. Copy the content of the Abseil git repo to //third_party/abseil-cpp. - -3. From //third_party/abseil-cpp/ launch ./rename_annotations.sh. - This script will rewrite dynamic_annotations and thread_annotations - macros and function inside Abseil in order to avoid ODR violations - and macro clashing with Chromium - (see: https://github.com/abseil/abseil-cpp/issues/122). - -Local Modifications: - -* absl/copts.bzl has been translated to //third_party/absl-cpp/BUILD.gn. Both - files contain lists of compiler flags in order to reduce duplication. - -* All the BUILD.bazel files has been translated to BUILD.gn files. - -* Functions and macros in absl/base/dynamic_annotations.{h,cc} and - absl/base/thread_annotations.h have been renamed to avoid ODR - violations and macro clashes with Chromium (see step 3). diff --git a/TMessagesProj/jni/voip/webrtc/absl/README.md b/TMessagesProj/jni/voip/webrtc/absl/README.md deleted file mode 100644 index 85de569658..0000000000 --- a/TMessagesProj/jni/voip/webrtc/absl/README.md +++ /dev/null @@ -1,114 +0,0 @@ -# Abseil - C++ Common Libraries - -The repository contains the Abseil C++ library code. Abseil is an open-source -collection of C++ code (compliant to C++11) designed to augment the C++ -standard library. - -## Table of Contents - -- [About Abseil](#about) -- [Quickstart](#quickstart) -- [Building Abseil](#build) -- [Codemap](#codemap) -- [License](#license) -- [Links](#links) - - -## About Abseil - -Abseil is an open-source collection of C++ library code designed to augment -the C++ standard library. The Abseil library code is collected from Google's -own C++ code base, has been extensively tested and used in production, and -is the same code we depend on in our daily coding lives. - -In some cases, Abseil provides pieces missing from the C++ standard; in -others, Abseil provides alternatives to the standard for special needs -we've found through usage in the Google code base. We denote those cases -clearly within the library code we provide you. - -Abseil is not meant to be a competitor to the standard library; we've -just found that many of these utilities serve a purpose within our code -base, and we now want to provide those resources to the C++ community as -a whole. - - -## Quickstart - -If you want to just get started, make sure you at least run through the -[Abseil Quickstart](https://abseil.io/docs/cpp/quickstart). The Quickstart -contains information about setting up your development environment, downloading -the Abseil code, running tests, and getting a simple binary working. - - -## Building Abseil - -[Bazel](https://bazel.build) is the official build system for Abseil, -which is supported on most major platforms (Linux, Windows, macOS, for example) -and compilers. See the [quickstart](https://abseil.io/docs/cpp/quickstart) for -more information on building Abseil using the Bazel build system. - - -If you require CMake support, please check the -[CMake build instructions](CMake/README.md). - -## Codemap - -Abseil contains the following C++ library components: - -* [`base`](absl/base/) Abseil Fundamentals -
The `base` library contains initialization code and other code which - all other Abseil code depends on. Code within `base` may not depend on any - other code (other than the C++ standard library). -* [`algorithm`](absl/algorithm/) -
The `algorithm` library contains additions to the C++ `` - library and container-based versions of such algorithms. -* [`container`](absl/container/) -
The `container` library contains additional STL-style containers, - including Abseil's unordered "Swiss table" containers. -* [`debugging`](absl/debugging/) -
The `debugging` library contains code useful for enabling leak - checks, and stacktrace and symbolization utilities. -* [`hash`](absl/hash/) -
The `hash` library contains the hashing framework and default hash - functor implementations for hashable types in Abseil. -* [`memory`](absl/memory/) -
The `memory` library contains C++11-compatible versions of - `std::make_unique()` and related memory management facilities. -* [`meta`](absl/meta/) -
The `meta` library contains C++11-compatible versions of type checks - available within C++14 and C++17 versions of the C++ `` library. -* [`numeric`](absl/numeric/) -
The `numeric` library contains C++11-compatible 128-bit integers. -* [`strings`](absl/strings/) -
The `strings` library contains a variety of strings routines and - utilities, including a C++11-compatible version of the C++17 - `std::string_view` type. -* [`synchronization`](absl/synchronization/) -
The `synchronization` library contains concurrency primitives (Abseil's - `absl::Mutex` class, an alternative to `std::mutex`) and a variety of - synchronization abstractions. -* [`time`](absl/time/) -
The `time` library contains abstractions for computing with absolute - points in time, durations of time, and formatting and parsing time within - time zones. -* [`types`](absl/types/) -
The `types` library contains non-container utility types, like a - C++11-compatible version of the C++17 `std::optional` type. -* [`utility`](absl/utility/) -
The `utility` library contains utility and helper code. - -## License - -The Abseil C++ library is licensed under the terms of the Apache -license. See [LICENSE](LICENSE) for more information. - -## Links - -For more information about Abseil: - -* Consult our [Abseil Introduction](https://abseil.io/about/intro) -* Read [Why Adopt Abseil](https://abseil.io/about/philosophy) to understand our - design philosophy. -* Peruse our - [Abseil Compatibility Guarantees](https://abseil.io/about/compatibility) to - understand both what we promise to you, and what we expect of you in return. diff --git a/TMessagesProj/jni/voip/webrtc/absl/UPGRADES.md b/TMessagesProj/jni/voip/webrtc/absl/UPGRADES.md deleted file mode 100644 index 35599d0878..0000000000 --- a/TMessagesProj/jni/voip/webrtc/absl/UPGRADES.md +++ /dev/null @@ -1,17 +0,0 @@ -# C++ Upgrade Tools - -Abseil may occassionally release API-breaking changes. As noted in our -[Compatibility Guidelines][compatibility-guide], we will aim to provide a tool -to do the work of effecting such API-breaking changes, when absolutely -necessary. - -These tools will be listed on the [C++ Upgrade Tools][upgrade-tools] guide on -https://abseil.io. - -For more information, the [C++ Automated Upgrade Guide][api-upgrades-guide] -outlines this process. - -[compatibility-guide]: https://abseil.io/about/compatibility -[api-upgrades-guide]: https://abseil.io/docs/cpp/tools/api-upgrades -[upgrade-tools]: https://abseil.io/docs/cpp/tools/upgrades/ - diff --git a/TMessagesProj/jni/voip/webrtc/absl/WORKSPACE b/TMessagesProj/jni/voip/webrtc/absl/WORKSPACE deleted file mode 100644 index f2b1046446..0000000000 --- a/TMessagesProj/jni/voip/webrtc/absl/WORKSPACE +++ /dev/null @@ -1,45 +0,0 @@ -# -# Copyright 2019 The Abseil Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -workspace(name = "com_google_absl") -load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") - -# GoogleTest/GoogleMock framework. Used by most unit-tests. -http_archive( - name = "com_google_googletest", - urls = ["https://github.com/google/googletest/archive/b6cd405286ed8635ece71c72f118e659f4ade3fb.zip"], # 2019-01-07 - strip_prefix = "googletest-b6cd405286ed8635ece71c72f118e659f4ade3fb", - sha256 = "ff7a82736e158c077e76188232eac77913a15dac0b22508c390ab3f88e6d6d86", -) - -# Google benchmark. -http_archive( - name = "com_github_google_benchmark", - urls = ["https://github.com/google/benchmark/archive/16703ff83c1ae6d53e5155df3bb3ab0bc96083be.zip"], - strip_prefix = "benchmark-16703ff83c1ae6d53e5155df3bb3ab0bc96083be", - sha256 = "59f918c8ccd4d74b6ac43484467b500f1d64b40cc1010daa055375b322a43ba3", -) - -# C++ rules for Bazel. -http_archive( - name = "rules_cc", - sha256 = "9a446e9dd9c1bb180c86977a8dc1e9e659550ae732ae58bd2e8fd51e15b2c91d", - strip_prefix = "rules_cc-262ebec3c2296296526740db4aefce68c80de7fa", - urls = [ - "https://mirror.bazel.build/github.com/bazelbuild/rules_cc/archive/262ebec3c2296296526740db4aefce68c80de7fa.zip", - "https://github.com/bazelbuild/rules_cc/archive/262ebec3c2296296526740db4aefce68c80de7fa.zip", - ], -) diff --git a/TMessagesProj/jni/voip/webrtc/absl/algorithm/equal_benchmark.cc b/TMessagesProj/jni/voip/webrtc/absl/algorithm/equal_benchmark.cc index 7bf62c9a7f..948cd65c54 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/algorithm/equal_benchmark.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/algorithm/equal_benchmark.cc @@ -15,8 +15,8 @@ #include #include -#include "benchmark/benchmark.h" #include "absl/algorithm/algorithm.h" +#include "benchmark/benchmark.h" namespace { diff --git a/TMessagesProj/jni/voip/webrtc/absl/base/attributes.h b/TMessagesProj/jni/voip/webrtc/absl/base/attributes.h index 5721356d46..e11a064add 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/base/attributes.h +++ b/TMessagesProj/jni/voip/webrtc/absl/base/attributes.h @@ -213,6 +213,9 @@ // https://gcc.gnu.org/gcc-4.8/changes.html #if ABSL_HAVE_ATTRIBUTE(no_sanitize_address) #define ABSL_ATTRIBUTE_NO_SANITIZE_ADDRESS __attribute__((no_sanitize_address)) +#elif defined(_MSC_VER) && _MSC_VER >= 1928 +// https://docs.microsoft.com/en-us/cpp/cpp/no-sanitize-address +#define ABSL_ATTRIBUTE_NO_SANITIZE_ADDRESS __declspec(no_sanitize_address) #else #define ABSL_ATTRIBUTE_NO_SANITIZE_ADDRESS #endif @@ -679,9 +682,18 @@ // not compile (on supported platforms) unless the variable has a constant // initializer. This is useful for variables with static and thread storage // duration, because it guarantees that they will not suffer from the so-called -// "static init order fiasco". Prefer to put this attribute on the most visible -// declaration of the variable, if there's more than one, because code that -// accesses the variable can then use the attribute for optimization. +// "static init order fiasco". +// +// This attribute must be placed on the initializing declaration of the +// variable. Some compilers will give a -Wmissing-constinit warning when this +// attribute is placed on some other declaration but missing from the +// initializing declaration. +// +// In some cases (notably with thread_local variables), `ABSL_CONST_INIT` can +// also be used in a non-initializing declaration to tell the compiler that a +// variable is already initialized, reducing overhead that would otherwise be +// incurred by a hidden guard variable. Thus annotating all declarations with +// this attribute is recommended to potentially enhance optimization. // // Example: // @@ -690,14 +702,19 @@ // ABSL_CONST_INIT static MyType my_var; // }; // -// MyType MyClass::my_var = MakeMyType(...); +// ABSL_CONST_INIT MyType MyClass::my_var = MakeMyType(...); +// +// For code or headers that are assured to only build with C++20 and up, prefer +// just using the standard `constinit` keyword directly over this macro. // // Note that this attribute is redundant if the variable is declared constexpr. -#if ABSL_HAVE_CPP_ATTRIBUTE(clang::require_constant_initialization) +#if defined(__cpp_constinit) && __cpp_constinit >= 201907L +#define ABSL_CONST_INIT constinit +#elif ABSL_HAVE_CPP_ATTRIBUTE(clang::require_constant_initialization) #define ABSL_CONST_INIT [[clang::require_constant_initialization]] #else #define ABSL_CONST_INIT -#endif // ABSL_HAVE_CPP_ATTRIBUTE(clang::require_constant_initialization) +#endif // ABSL_ATTRIBUTE_PURE_FUNCTION // @@ -742,4 +759,41 @@ #define ABSL_ATTRIBUTE_LIFETIME_BOUND #endif +// ABSL_ATTRIBUTE_TRIVIAL_ABI +// Indicates that a type is "trivially relocatable" -- meaning it can be +// relocated without invoking the constructor/destructor, using a form of move +// elision. +// +// From a memory safety point of view, putting aside destructor ordering, it's +// safe to apply ABSL_ATTRIBUTE_TRIVIAL_ABI if an object's location +// can change over the course of its lifetime: if a constructor can be run one +// place, and then the object magically teleports to another place where some +// methods are run, and then the object teleports to yet another place where it +// is destroyed. This is notably not true for self-referential types, where the +// move-constructor must keep the self-reference up to date. If the type changed +// location without invoking the move constructor, it would have a dangling +// self-reference. +// +// The use of this teleporting machinery means that the number of paired +// move/destroy operations can change, and so it is a bad idea to apply this to +// a type meant to count the number of moves. +// +// Warning: applying this can, rarely, break callers. Objects passed by value +// will be destroyed at the end of the call, instead of the end of the +// full-expression containing the call. In addition, it changes the ABI +// of functions accepting this type by value (e.g. to pass in registers). +// +// See also the upstream documentation: +// https://clang.llvm.org/docs/AttributeReference.html#trivial-abi +// +#if ABSL_HAVE_CPP_ATTRIBUTE(clang::trivial_abi) +#define ABSL_ATTRIBUTE_TRIVIAL_ABI [[clang::trivial_abi]] +#define ABSL_HAVE_ATTRIBUTE_TRIVIAL_ABI 1 +#elif ABSL_HAVE_ATTRIBUTE(trivial_abi) +#define ABSL_ATTRIBUTE_TRIVIAL_ABI __attribute__((trivial_abi)) +#define ABSL_HAVE_ATTRIBUTE_TRIVIAL_ABI 1 +#else +#define ABSL_ATTRIBUTE_TRIVIAL_ABI +#endif + #endif // ABSL_BASE_ATTRIBUTES_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/base/config.h b/TMessagesProj/jni/voip/webrtc/absl/base/config.h index 373aa0ccbe..fe1d7c7d98 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/base/config.h +++ b/TMessagesProj/jni/voip/webrtc/absl/base/config.h @@ -56,6 +56,25 @@ #include #endif // __cplusplus +// ABSL_INTERNAL_CPLUSPLUS_LANG +// +// MSVC does not set the value of __cplusplus correctly, but instead uses +// _MSVC_LANG as a stand-in. +// https://docs.microsoft.com/en-us/cpp/preprocessor/predefined-macros +// +// However, there are reports that MSVC even sets _MSVC_LANG incorrectly at +// times, for example: +// https://github.com/microsoft/vscode-cpptools/issues/1770 +// https://reviews.llvm.org/D70996 +// +// For this reason, this symbol is considered INTERNAL and code outside of +// Abseil must not use it. +#if defined(_MSVC_LANG) +#define ABSL_INTERNAL_CPLUSPLUS_LANG _MSVC_LANG +#elif defined(__cplusplus) +#define ABSL_INTERNAL_CPLUSPLUS_LANG __cplusplus +#endif + #if defined(__APPLE__) // Included for TARGET_OS_IPHONE, __IPHONE_OS_VERSION_MIN_REQUIRED, // __IPHONE_8_0. @@ -183,12 +202,6 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' || #define ABSL_HAVE_BUILTIN(x) 0 #endif -#if defined(__is_identifier) -#define ABSL_INTERNAL_HAS_KEYWORD(x) !(__is_identifier(x)) -#else -#define ABSL_INTERNAL_HAS_KEYWORD(x) 0 -#endif - #ifdef __has_feature #define ABSL_HAVE_FEATURE(f) __has_feature(f) #else @@ -212,11 +225,12 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' || #endif // ABSL_HAVE_TLS is defined to 1 when __thread should be supported. -// We assume __thread is supported on Linux when compiled with Clang or compiled -// against libstdc++ with _GLIBCXX_HAVE_TLS defined. +// We assume __thread is supported on Linux or Asylo when compiled with Clang or +// compiled against libstdc++ with _GLIBCXX_HAVE_TLS defined. #ifdef ABSL_HAVE_TLS #error ABSL_HAVE_TLS cannot be directly set -#elif defined(__linux__) && (defined(__clang__) || defined(_GLIBCXX_HAVE_TLS)) +#elif (defined(__linux__) || defined(__ASYLO__)) && \ + (defined(__clang__) || defined(_GLIBCXX_HAVE_TLS)) #define ABSL_HAVE_TLS 1 #endif @@ -229,6 +243,7 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' || #ifdef ABSL_HAVE_STD_IS_TRIVIALLY_DESTRUCTIBLE #error ABSL_HAVE_STD_IS_TRIVIALLY_DESTRUCTIBLE cannot be directly set #elif defined(_LIBCPP_VERSION) || defined(_MSC_VER) || \ + (defined(__clang__) && __clang_major__ >= 15) || \ (!defined(__clang__) && defined(__GLIBCXX__) && \ ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(4, 8)) #define ABSL_HAVE_STD_IS_TRIVIALLY_DESTRUCTIBLE 1 @@ -250,26 +265,25 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' || #elif defined(ABSL_HAVE_STD_IS_TRIVIALLY_ASSIGNABLE) #error ABSL_HAVE_STD_IS_TRIVIALLY_ASSIGNABLE cannot directly set #elif (defined(__clang__) && defined(_LIBCPP_VERSION)) || \ + (defined(__clang__) && __clang_major__ >= 15) || \ (!defined(__clang__) && \ ((ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(7, 4) && defined(__GLIBCXX__)) || \ (ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(8, 2) && \ defined(_LIBCPP_VERSION)))) || \ - (defined(_MSC_VER) && !defined(__NVCC__)) + (defined(_MSC_VER) && !defined(__NVCC__) && !defined(__clang__)) #define ABSL_HAVE_STD_IS_TRIVIALLY_CONSTRUCTIBLE 1 #define ABSL_HAVE_STD_IS_TRIVIALLY_ASSIGNABLE 1 #endif -// ABSL_HAVE_SOURCE_LOCATION_CURRENT +// ABSL_HAVE_STD_IS_TRIVIALLY_COPYABLE // -// Indicates whether `absl::SourceLocation::current()` will return useful -// information in some contexts. -#ifndef ABSL_HAVE_SOURCE_LOCATION_CURRENT -#if ABSL_INTERNAL_HAS_KEYWORD(__builtin_LINE) && \ - ABSL_INTERNAL_HAS_KEYWORD(__builtin_FILE) -#define ABSL_HAVE_SOURCE_LOCATION_CURRENT 1 -#elif ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(5, 0) -#define ABSL_HAVE_SOURCE_LOCATION_CURRENT 1 -#endif +// Checks whether `std::is_trivially_copyable` is supported. +// +// Notes: Clang 15+ with libc++ supports these features, GCC hasn't been tested. +#if defined(ABSL_HAVE_STD_IS_TRIVIALLY_COPYABLE) +#error ABSL_HAVE_STD_IS_TRIVIALLY_COPYABLE cannot be directly set +#elif defined(__clang__) && (__clang_major__ >= 15) +#define ABSL_HAVE_STD_IS_TRIVIALLY_COPYABLE 1 #endif // ABSL_HAVE_THREAD_LOCAL @@ -414,7 +428,8 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' || defined(_AIX) || defined(__ros__) || defined(__native_client__) || \ defined(__asmjs__) || defined(__wasm__) || defined(__Fuchsia__) || \ defined(__sun) || defined(__ASYLO__) || defined(__myriad2__) || \ - defined(__HAIKU__) + defined(__HAIKU__) || defined(__OpenBSD__) || defined(__NetBSD__) || \ + defined(__QNX__) #define ABSL_HAVE_MMAP 1 #endif @@ -425,7 +440,8 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' || #ifdef ABSL_HAVE_PTHREAD_GETSCHEDPARAM #error ABSL_HAVE_PTHREAD_GETSCHEDPARAM cannot be directly set #elif defined(__linux__) || defined(__APPLE__) || defined(__FreeBSD__) || \ - defined(_AIX) || defined(__ros__) + defined(_AIX) || defined(__ros__) || defined(__OpenBSD__) || \ + defined(__NetBSD__) #define ABSL_HAVE_PTHREAD_GETSCHEDPARAM 1 #endif @@ -560,6 +576,9 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' || #define ABSL_INTERNAL_APPLE_CXX17_TYPES_UNAVAILABLE 0 #endif +#undef ABSL_INTERNAL_APPLE_CXX17_TYPES_UNAVAILABLE +#define ABSL_INTERNAL_APPLE_CXX17_TYPES_UNAVAILABLE 1 + // ABSL_HAVE_STD_ANY // // Checks whether C++17 std::any is available by checking whether exists. @@ -724,8 +743,6 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' || #endif #endif -#undef ABSL_INTERNAL_HAS_KEYWORD - // ABSL_DLL // // When building Abseil as a DLL, this macro expands to `__declspec(dllexport)` @@ -793,10 +810,27 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' || // ABSL_HAVE_LEAK_SANITIZER // // LeakSanitizer (or lsan) is a detector of memory leaks. +// https://clang.llvm.org/docs/LeakSanitizer.html +// https://github.com/google/sanitizers/wiki/AddressSanitizerLeakSanitizer +// +// The macro ABSL_HAVE_LEAK_SANITIZER can be used to detect at compile-time +// whether the LeakSanitizer is potentially available. However, just because the +// LeakSanitizer is available does not mean it is active. Use the +// always-available run-time interface in //absl/debugging/leak_check.h for +// interacting with LeakSanitizer. #ifdef ABSL_HAVE_LEAK_SANITIZER #error "ABSL_HAVE_LEAK_SANITIZER cannot be directly set." +#elif defined(LEAK_SANITIZER) +// GCC provides no method for detecting the presense of the standalone +// LeakSanitizer (-fsanitize=leak), so GCC users of -fsanitize=leak should also +// use -DLEAK_SANITIZER. +#define ABSL_HAVE_LEAK_SANITIZER 1 +// Clang standalone LeakSanitizer (-fsanitize=leak) #elif ABSL_HAVE_FEATURE(leak_sanitizer) #define ABSL_HAVE_LEAK_SANITIZER 1 +#elif defined(ABSL_HAVE_ADDRESS_SANITIZER) +// GCC or Clang using the LeakSanitizer integrated into AddressSanitizer. +#define ABSL_HAVE_LEAK_SANITIZER 1 #endif // ABSL_HAVE_CLASS_TEMPLATE_ARGUMENT_DEDUCTION @@ -808,6 +842,29 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' || #define ABSL_HAVE_CLASS_TEMPLATE_ARGUMENT_DEDUCTION 1 #endif +// ABSL_INTERNAL_NEED_REDUNDANT_CONSTEXPR_DECL +// +// Prior to C++17, static constexpr variables defined in classes required a +// separate definition outside of the class body, for example: +// +// class Foo { +// static constexpr int kBar = 0; +// }; +// constexpr int Foo::kBar; +// +// In C++17, these variables defined in classes are considered inline variables, +// and the extra declaration is redundant. Since some compilers warn on the +// extra declarations, ABSL_INTERNAL_NEED_REDUNDANT_CONSTEXPR_DECL can be used +// conditionally ignore them: +// +// #ifdef ABSL_INTERNAL_NEED_REDUNDANT_CONSTEXPR_DECL +// constexpr int Foo::kBar; +// #endif +#if defined(ABSL_INTERNAL_CPLUSPLUS_LANG) && \ + ABSL_INTERNAL_CPLUSPLUS_LANG < 201703L +#define ABSL_INTERNAL_NEED_REDUNDANT_CONSTEXPR_DECL 1 +#endif + // `ABSL_INTERNAL_HAS_RTTI` determines whether abseil is being compiled with // RTTI support. #ifdef ABSL_INTERNAL_HAS_RTTI @@ -816,4 +873,62 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' || #define ABSL_INTERNAL_HAS_RTTI 1 #endif // !defined(__GNUC__) || defined(__GXX_RTTI) +// ABSL_INTERNAL_HAVE_SSE is used for compile-time detection of SSE support. +// See https://gcc.gnu.org/onlinedocs/gcc/x86-Options.html for an overview of +// which architectures support the various x86 instruction sets. +#ifdef ABSL_INTERNAL_HAVE_SSE +#error ABSL_INTERNAL_HAVE_SSE cannot be directly set +#elif defined(__SSE__) +#define ABSL_INTERNAL_HAVE_SSE 1 +#elif defined(_M_X64) || (defined(_M_IX86_FP) && _M_IX86_FP >= 1) +// MSVC only defines _M_IX86_FP for x86 32-bit code, and _M_IX86_FP >= 1 +// indicates that at least SSE was targeted with the /arch:SSE option. +// All x86-64 processors support SSE, so support can be assumed. +// https://docs.microsoft.com/en-us/cpp/preprocessor/predefined-macros +#define ABSL_INTERNAL_HAVE_SSE 1 +#endif + +// ABSL_INTERNAL_HAVE_SSE2 is used for compile-time detection of SSE2 support. +// See https://gcc.gnu.org/onlinedocs/gcc/x86-Options.html for an overview of +// which architectures support the various x86 instruction sets. +#ifdef ABSL_INTERNAL_HAVE_SSE2 +#error ABSL_INTERNAL_HAVE_SSE2 cannot be directly set +#elif defined(__SSE2__) +#define ABSL_INTERNAL_HAVE_SSE2 1 +#elif defined(_M_X64) || (defined(_M_IX86_FP) && _M_IX86_FP >= 2) +// MSVC only defines _M_IX86_FP for x86 32-bit code, and _M_IX86_FP >= 2 +// indicates that at least SSE2 was targeted with the /arch:SSE2 option. +// All x86-64 processors support SSE2, so support can be assumed. +// https://docs.microsoft.com/en-us/cpp/preprocessor/predefined-macros +#define ABSL_INTERNAL_HAVE_SSE2 1 +#endif + +// ABSL_INTERNAL_HAVE_SSSE3 is used for compile-time detection of SSSE3 support. +// See https://gcc.gnu.org/onlinedocs/gcc/x86-Options.html for an overview of +// which architectures support the various x86 instruction sets. +// +// MSVC does not have a mode that targets SSSE3 at compile-time. To use SSSE3 +// with MSVC requires either assuming that the code will only every run on CPUs +// that support SSSE3, otherwise __cpuid() can be used to detect support at +// runtime and fallback to a non-SSSE3 implementation when SSSE3 is unsupported +// by the CPU. +#ifdef ABSL_INTERNAL_HAVE_SSSE3 +#error ABSL_INTERNAL_HAVE_SSSE3 cannot be directly set +#elif defined(__SSSE3__) +#define ABSL_INTERNAL_HAVE_SSSE3 1 +#endif + +// ABSL_INTERNAL_HAVE_ARM_NEON is used for compile-time detection of NEON (ARM +// SIMD). +// +// If __CUDA_ARCH__ is defined, then we are compiling CUDA code in device mode. +// In device mode, NEON intrinsics are not available, regardless of host +// platform. +// https://llvm.org/docs/CompileCudaWithLLVM.html#detecting-clang-vs-nvcc-from-code +#ifdef ABSL_INTERNAL_HAVE_ARM_NEON +#error ABSL_INTERNAL_HAVE_ARM_NEON cannot be directly set +#elif defined(__ARM_NEON) && !defined(__CUDA_ARCH__) +#define ABSL_INTERNAL_HAVE_ARM_NEON 1 +#endif + #endif // ABSL_BASE_CONFIG_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/base/dynamic_annotations.h b/TMessagesProj/jni/voip/webrtc/absl/base/dynamic_annotations.h index 1ebf1d124b..3ea7c1568c 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/base/dynamic_annotations.h +++ b/TMessagesProj/jni/voip/webrtc/absl/base/dynamic_annotations.h @@ -53,6 +53,9 @@ #include "absl/base/macros.h" #endif +// TODO(rogeeff): Remove after the backward compatibility period. +#include "absl/base/internal/dynamic_annotations.h" // IWYU pragma: export + // ------------------------------------------------------------------------- // Decide which features are enabled. diff --git a/TMessagesProj/jni/voip/webrtc/absl/base/internal/cycleclock.cc b/TMessagesProj/jni/voip/webrtc/absl/base/internal/cycleclock.cc index 0e65005b89..902e3f5ef1 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/base/internal/cycleclock.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/base/internal/cycleclock.cc @@ -25,6 +25,8 @@ #include #include // NOLINT(build/c++11) +#include "absl/base/attributes.h" +#include "absl/base/config.h" #include "absl/base/internal/unscaledcycleclock.h" namespace absl { @@ -33,44 +35,20 @@ namespace base_internal { #if ABSL_USE_UNSCALED_CYCLECLOCK -namespace { - -#ifdef NDEBUG -#ifdef ABSL_INTERNAL_UNSCALED_CYCLECLOCK_FREQUENCY_IS_CPU_FREQUENCY -// Not debug mode and the UnscaledCycleClock frequency is the CPU -// frequency. Scale the CycleClock to prevent overflow if someone -// tries to represent the time as cycles since the Unix epoch. -static constexpr int32_t kShift = 1; -#else -// Not debug mode and the UnscaledCycleClock isn't operating at the -// raw CPU frequency. There is no need to do any scaling, so don't -// needlessly sacrifice precision. -static constexpr int32_t kShift = 0; -#endif -#else -// In debug mode use a different shift to discourage depending on a -// particular shift value. -static constexpr int32_t kShift = 2; +#ifdef ABSL_INTERNAL_NEED_REDUNDANT_CONSTEXPR_DECL +constexpr int32_t CycleClock::kShift; +constexpr double CycleClock::kFrequencyScale; #endif -static constexpr double kFrequencyScale = 1.0 / (1 << kShift); -static std::atomic cycle_clock_source; +ABSL_CONST_INIT std::atomic + CycleClock::cycle_clock_source_{nullptr}; -CycleClockSourceFunc LoadCycleClockSource() { - // Optimize for the common case (no callback) by first doing a relaxed load; - // this is significantly faster on non-x86 platforms. - if (cycle_clock_source.load(std::memory_order_relaxed) == nullptr) { - return nullptr; - } - // This corresponds to the store(std::memory_order_release) in - // CycleClockSource::Register, and makes sure that any updates made prior to - // registering the callback are visible to this thread before the callback is - // invoked. - return cycle_clock_source.load(std::memory_order_acquire); +void CycleClockSource::Register(CycleClockSourceFunc source) { + // Corresponds to the load(std::memory_order_acquire) in LoadCycleClockSource. + CycleClock::cycle_clock_source_.store(source, std::memory_order_release); } -} // namespace - +#ifdef _WIN32 int64_t CycleClock::Now() { auto fn = LoadCycleClockSource(); if (fn == nullptr) { @@ -78,15 +56,7 @@ int64_t CycleClock::Now() { } return fn() >> kShift; } - -double CycleClock::Frequency() { - return kFrequencyScale * base_internal::UnscaledCycleClock::Frequency(); -} - -void CycleClockSource::Register(CycleClockSourceFunc source) { - // Corresponds to the load(std::memory_order_acquire) in LoadCycleClockSource. - cycle_clock_source.store(source, std::memory_order_release); -} +#endif #else diff --git a/TMessagesProj/jni/voip/webrtc/absl/base/internal/cycleclock.h b/TMessagesProj/jni/voip/webrtc/absl/base/internal/cycleclock.h index a18b584445..cbfdf57998 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/base/internal/cycleclock.h +++ b/TMessagesProj/jni/voip/webrtc/absl/base/internal/cycleclock.h @@ -42,14 +42,20 @@ #ifndef ABSL_BASE_INTERNAL_CYCLECLOCK_H_ #define ABSL_BASE_INTERNAL_CYCLECLOCK_H_ +#include #include +#include "absl/base/attributes.h" #include "absl/base/config.h" +#include "absl/base/internal/cycleclock_config.h" +#include "absl/base/internal/unscaledcycleclock.h" namespace absl { ABSL_NAMESPACE_BEGIN namespace base_internal { +using CycleClockSourceFunc = int64_t (*)(); + // ----------------------------------------------------------------------------- // CycleClock // ----------------------------------------------------------------------------- @@ -68,12 +74,21 @@ class CycleClock { static double Frequency(); private: +#if ABSL_USE_UNSCALED_CYCLECLOCK + static CycleClockSourceFunc LoadCycleClockSource(); + + static constexpr int32_t kShift = kCycleClockShift; + static constexpr double kFrequencyScale = kCycleClockFrequencyScale; + + ABSL_CONST_INIT static std::atomic cycle_clock_source_; +#endif // ABSL_USE_UNSCALED_CYCLECLOC + CycleClock() = delete; // no instances CycleClock(const CycleClock&) = delete; CycleClock& operator=(const CycleClock&) = delete; -}; -using CycleClockSourceFunc = int64_t (*)(); + friend class CycleClockSource; +}; class CycleClockSource { private: @@ -87,6 +102,41 @@ class CycleClockSource { static void Register(CycleClockSourceFunc source); }; +#if ABSL_USE_UNSCALED_CYCLECLOCK + +inline CycleClockSourceFunc CycleClock::LoadCycleClockSource() { +#if !defined(__x86_64__) + // Optimize for the common case (no callback) by first doing a relaxed load; + // this is significantly faster on non-x86 platforms. + if (cycle_clock_source_.load(std::memory_order_relaxed) == nullptr) { + return nullptr; + } +#endif // !defined(__x86_64__) + + // This corresponds to the store(std::memory_order_release) in + // CycleClockSource::Register, and makes sure that any updates made prior to + // registering the callback are visible to this thread before the callback + // is invoked. + return cycle_clock_source_.load(std::memory_order_acquire); +} + +// Accessing globals in inlined code in Window DLLs is problematic. +#ifndef _WIN32 +inline int64_t CycleClock::Now() { + auto fn = LoadCycleClockSource(); + if (fn == nullptr) { + return base_internal::UnscaledCycleClock::Now() >> kShift; + } + return fn() >> kShift; +} +#endif + +inline double CycleClock::Frequency() { + return kFrequencyScale * base_internal::UnscaledCycleClock::Frequency(); +} + +#endif // ABSL_USE_UNSCALED_CYCLECLOCK + } // namespace base_internal ABSL_NAMESPACE_END } // namespace absl diff --git a/TMessagesProj/jni/voip/webrtc/absl/base/internal/cycleclock_config.h b/TMessagesProj/jni/voip/webrtc/absl/base/internal/cycleclock_config.h new file mode 100644 index 0000000000..191112b58e --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/absl/base/internal/cycleclock_config.h @@ -0,0 +1,55 @@ +// Copyright 2022 The Abseil Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef ABSL_BASE_INTERNAL_CYCLECLOCK_CONFIG_H_ +#define ABSL_BASE_INTERNAL_CYCLECLOCK_CONFIG_H_ + +#include + +#include "absl/base/config.h" +#include "absl/base/internal/inline_variable.h" +#include "absl/base/internal/unscaledcycleclock_config.h" + +namespace absl { +ABSL_NAMESPACE_BEGIN +namespace base_internal { + +#if ABSL_USE_UNSCALED_CYCLECLOCK +#ifdef NDEBUG +#ifdef ABSL_INTERNAL_UNSCALED_CYCLECLOCK_FREQUENCY_IS_CPU_FREQUENCY +// Not debug mode and the UnscaledCycleClock frequency is the CPU +// frequency. Scale the CycleClock to prevent overflow if someone +// tries to represent the time as cycles since the Unix epoch. +ABSL_INTERNAL_INLINE_CONSTEXPR(int32_t, kCycleClockShift, 1); +#else +// Not debug mode and the UnscaledCycleClock isn't operating at the +// raw CPU frequency. There is no need to do any scaling, so don't +// needlessly sacrifice precision. +ABSL_INTERNAL_INLINE_CONSTEXPR(int32_t, kCycleClockShift, 0); +#endif +#else // NDEBUG +// In debug mode use a different shift to discourage depending on a +// particular shift value. +ABSL_INTERNAL_INLINE_CONSTEXPR(int32_t, kCycleClockShift, 2); +#endif // NDEBUG + +ABSL_INTERNAL_INLINE_CONSTEXPR(double, kCycleClockFrequencyScale, + 1.0 / (1 << kCycleClockShift)); +#endif // ABSL_USE_UNSCALED_CYCLECLOC + +} // namespace base_internal +ABSL_NAMESPACE_END +} // namespace absl + +#endif // ABSL_BASE_INTERNAL_CYCLECLOCK_CONFIG_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/base/internal/direct_mmap.h b/TMessagesProj/jni/voip/webrtc/absl/base/internal/direct_mmap.h index 274054cd5a..815b8d23ba 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/base/internal/direct_mmap.h +++ b/TMessagesProj/jni/voip/webrtc/absl/base/internal/direct_mmap.h @@ -20,7 +20,7 @@ #include "absl/base/config.h" -#if ABSL_HAVE_MMAP +#ifdef ABSL_HAVE_MMAP #include @@ -41,13 +41,13 @@ #ifdef __mips__ // Include definitions of the ABI currently in use. -#ifdef __BIONIC__ +#if defined(__BIONIC__) || !defined(__GLIBC__) // Android doesn't have sgidefs.h, but does have asm/sgidefs.h, which has the // definitions we need. #include #else #include -#endif // __BIONIC__ +#endif // __BIONIC__ || !__GLIBC__ #endif // __mips__ // SYS_mmap and SYS_munmap are not defined in Android. @@ -97,7 +97,8 @@ inline void* DirectMmap(void* start, size_t length, int prot, int flags, int fd, #ifdef __BIONIC__ // SYS_mmap2 has problems on Android API level <= 16. // Workaround by invoking __mmap2() instead. - return __mmap2(start, length, prot, flags, fd, offset / pagesize); + return __mmap2(start, length, prot, flags, fd, + static_cast(offset / pagesize)); #else return reinterpret_cast( syscall(SYS_mmap2, start, length, prot, flags, fd, diff --git a/TMessagesProj/jni/voip/webrtc/absl/base/internal/endian.h b/TMessagesProj/jni/voip/webrtc/absl/base/internal/endian.h index dad0e9aeb0..50747d75ec 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/base/internal/endian.h +++ b/TMessagesProj/jni/voip/webrtc/absl/base/internal/endian.h @@ -16,16 +16,9 @@ #ifndef ABSL_BASE_INTERNAL_ENDIAN_H_ #define ABSL_BASE_INTERNAL_ENDIAN_H_ -// The following guarantees declaration of the byte swap functions -#ifdef _MSC_VER -#include // NOLINT(build/include) -#elif defined(__FreeBSD__) -#include -#elif defined(__GLIBC__) -#include // IWYU pragma: export -#endif - #include +#include + #include "absl/base/casts.h" #include "absl/base/config.h" #include "absl/base/internal/unaligned_access.h" @@ -34,47 +27,11 @@ namespace absl { ABSL_NAMESPACE_BEGIN -// Use compiler byte-swapping intrinsics if they are available. 32-bit -// and 64-bit versions are available in Clang and GCC as of GCC 4.3.0. -// The 16-bit version is available in Clang and GCC only as of GCC 4.8.0. -// For simplicity, we enable them all only for GCC 4.8.0 or later. -#if defined(__clang__) || \ - (defined(__GNUC__) && \ - ((__GNUC__ == 4 && __GNUC_MINOR__ >= 8) || __GNUC__ >= 5)) inline uint64_t gbswap_64(uint64_t host_int) { +#if ABSL_HAVE_BUILTIN(__builtin_bswap64) || defined(__GNUC__) return __builtin_bswap64(host_int); -} -inline uint32_t gbswap_32(uint32_t host_int) { - return __builtin_bswap32(host_int); -} -inline uint16_t gbswap_16(uint16_t host_int) { - return __builtin_bswap16(host_int); -} - #elif defined(_MSC_VER) -inline uint64_t gbswap_64(uint64_t host_int) { return _byteswap_uint64(host_int); -} -inline uint32_t gbswap_32(uint32_t host_int) { - return _byteswap_ulong(host_int); -} -inline uint16_t gbswap_16(uint16_t host_int) { - return _byteswap_ushort(host_int); -} - -#else -inline uint64_t gbswap_64(uint64_t host_int) { -#if defined(__GNUC__) && defined(__x86_64__) && !defined(__APPLE__) - // Adapted from /usr/include/byteswap.h. Not available on Mac. - if (__builtin_constant_p(host_int)) { - return __bswap_constant_64(host_int); - } else { - uint64_t result; - __asm__("bswap %0" : "=r"(result) : "0"(host_int)); - return result; - } -#elif defined(__GLIBC__) - return bswap_64(host_int); #else return (((host_int & uint64_t{0xFF}) << 56) | ((host_int & uint64_t{0xFF00}) << 40) | @@ -84,12 +41,14 @@ inline uint64_t gbswap_64(uint64_t host_int) { ((host_int & uint64_t{0xFF0000000000}) >> 24) | ((host_int & uint64_t{0xFF000000000000}) >> 40) | ((host_int & uint64_t{0xFF00000000000000}) >> 56)); -#endif // bswap_64 +#endif } inline uint32_t gbswap_32(uint32_t host_int) { -#if defined(__GLIBC__) - return bswap_32(host_int); +#if ABSL_HAVE_BUILTIN(__builtin_bswap32) || defined(__GNUC__) + return __builtin_bswap32(host_int); +#elif defined(_MSC_VER) + return _byteswap_ulong(host_int); #else return (((host_int & uint32_t{0xFF}) << 24) | ((host_int & uint32_t{0xFF00}) << 8) | @@ -99,33 +58,29 @@ inline uint32_t gbswap_32(uint32_t host_int) { } inline uint16_t gbswap_16(uint16_t host_int) { -#if defined(__GLIBC__) - return bswap_16(host_int); +#if ABSL_HAVE_BUILTIN(__builtin_bswap16) || defined(__GNUC__) + return __builtin_bswap16(host_int); +#elif defined(_MSC_VER) + return _byteswap_ushort(host_int); #else return (((host_int & uint16_t{0xFF}) << 8) | ((host_int & uint16_t{0xFF00}) >> 8)); #endif } -#endif // intrinsics available - #ifdef ABSL_IS_LITTLE_ENDIAN -// Definitions for ntohl etc. that don't require us to include -// netinet/in.h. We wrap gbswap_32 and gbswap_16 in functions rather -// than just #defining them because in debug mode, gcc doesn't -// correctly handle the (rather involved) definitions of bswap_32. -// gcc guarantees that inline functions are as fast as macros, so -// this isn't a performance hit. +// Portable definitions for htonl (host-to-network) and friends on little-endian +// architectures. inline uint16_t ghtons(uint16_t x) { return gbswap_16(x); } inline uint32_t ghtonl(uint32_t x) { return gbswap_32(x); } inline uint64_t ghtonll(uint64_t x) { return gbswap_64(x); } #elif defined ABSL_IS_BIG_ENDIAN -// These definitions are simpler on big-endian machines -// These are functions instead of macros to avoid self-assignment warnings -// on calls such as "i = ghtnol(i);". This also provides type checking. +// Portable definitions for htonl (host-to-network) etc on big-endian +// architectures. These definitions are simpler since the host byte order is the +// same as network byte order. inline uint16_t ghtons(uint16_t x) { return x; } inline uint32_t ghtonl(uint32_t x) { return x; } inline uint64_t ghtonll(uint64_t x) { return x; } diff --git a/TMessagesProj/jni/voip/webrtc/absl/base/internal/exception_safety_testing.h b/TMessagesProj/jni/voip/webrtc/absl/base/internal/exception_safety_testing.h index d19863fd6b..77a5aec642 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/base/internal/exception_safety_testing.h +++ b/TMessagesProj/jni/voip/webrtc/absl/base/internal/exception_safety_testing.h @@ -30,6 +30,7 @@ #include #include +#include "gtest/gtest.h" #include "absl/base/internal/pretty_function.h" #include "absl/memory/memory.h" #include "absl/meta/type_traits.h" diff --git a/TMessagesProj/jni/voip/webrtc/absl/base/internal/exponential_biased.cc b/TMessagesProj/jni/voip/webrtc/absl/base/internal/exponential_biased.cc deleted file mode 100644 index 05aeea566c..0000000000 --- a/TMessagesProj/jni/voip/webrtc/absl/base/internal/exponential_biased.cc +++ /dev/null @@ -1,93 +0,0 @@ -// Copyright 2019 The Abseil Authors. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "absl/base/internal/exponential_biased.h" - -#include - -#include -#include -#include -#include - -#include "absl/base/attributes.h" -#include "absl/base/optimization.h" - -namespace absl { -ABSL_NAMESPACE_BEGIN -namespace base_internal { - -// The algorithm generates a random number between 0 and 1 and applies the -// inverse cumulative distribution function for an exponential. Specifically: -// Let m be the inverse of the sample period, then the probability -// distribution function is m*exp(-mx) so the CDF is -// p = 1 - exp(-mx), so -// q = 1 - p = exp(-mx) -// log_e(q) = -mx -// -log_e(q)/m = x -// log_2(q) * (-log_e(2) * 1/m) = x -// In the code, q is actually in the range 1 to 2**26, hence the -26 below -int64_t ExponentialBiased::GetSkipCount(int64_t mean) { - if (ABSL_PREDICT_FALSE(!initialized_)) { - Initialize(); - } - - uint64_t rng = NextRandom(rng_); - rng_ = rng; - - // Take the top 26 bits as the random number - // (This plus the 1<<58 sampling bound give a max possible step of - // 5194297183973780480 bytes.) - // The uint32_t cast is to prevent a (hard-to-reproduce) NAN - // under piii debug for some binaries. - double q = static_cast(rng >> (kPrngNumBits - 26)) + 1.0; - // Put the computed p-value through the CDF of a geometric. - double interval = bias_ + (std::log2(q) - 26) * (-std::log(2.0) * mean); - // Very large values of interval overflow int64_t. To avoid that, we will - // cheat and clamp any huge values to (int64_t max)/2. This is a potential - // source of bias, but the mean would need to be such a large value that it's - // not likely to come up. For example, with a mean of 1e18, the probability of - // hitting this condition is about 1/1000. For a mean of 1e17, standard - // calculators claim that this event won't happen. - if (interval > static_cast(std::numeric_limits::max() / 2)) { - // Assume huge values are bias neutral, retain bias for next call. - return std::numeric_limits::max() / 2; - } - double value = std::rint(interval); - bias_ = interval - value; - return value; -} - -int64_t ExponentialBiased::GetStride(int64_t mean) { - return GetSkipCount(mean - 1) + 1; -} - -void ExponentialBiased::Initialize() { - // We don't get well distributed numbers from `this` so we call NextRandom() a - // bunch to mush the bits around. We use a global_rand to handle the case - // where the same thread (by memory address) gets created and destroyed - // repeatedly. - ABSL_CONST_INIT static std::atomic global_rand(0); - uint64_t r = reinterpret_cast(this) + - global_rand.fetch_add(1, std::memory_order_relaxed); - for (int i = 0; i < 20; ++i) { - r = NextRandom(r); - } - rng_ = r; - initialized_ = true; -} - -} // namespace base_internal -ABSL_NAMESPACE_END -} // namespace absl diff --git a/TMessagesProj/jni/voip/webrtc/absl/base/internal/exponential_biased.h b/TMessagesProj/jni/voip/webrtc/absl/base/internal/exponential_biased.h deleted file mode 100644 index a81f10e230..0000000000 --- a/TMessagesProj/jni/voip/webrtc/absl/base/internal/exponential_biased.h +++ /dev/null @@ -1,130 +0,0 @@ -// Copyright 2019 The Abseil Authors. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef ABSL_BASE_INTERNAL_EXPONENTIAL_BIASED_H_ -#define ABSL_BASE_INTERNAL_EXPONENTIAL_BIASED_H_ - -#include - -#include "absl/base/config.h" -#include "absl/base/macros.h" - -namespace absl { -ABSL_NAMESPACE_BEGIN -namespace base_internal { - -// ExponentialBiased provides a small and fast random number generator for a -// rounded exponential distribution. This generator manages very little state, -// and imposes no synchronization overhead. This makes it useful in specialized -// scenarios requiring minimum overhead, such as stride based periodic sampling. -// -// ExponentialBiased provides two closely related functions, GetSkipCount() and -// GetStride(), both returning a rounded integer defining a number of events -// required before some event with a given mean probability occurs. -// -// The distribution is useful to generate a random wait time or some periodic -// event with a given mean probability. For example, if an action is supposed to -// happen on average once every 'N' events, then we can get a random 'stride' -// counting down how long before the event to happen. For example, if we'd want -// to sample one in every 1000 'Frobber' calls, our code could look like this: -// -// Frobber::Frobber() { -// stride_ = exponential_biased_.GetStride(1000); -// } -// -// void Frobber::Frob(int arg) { -// if (--stride == 0) { -// SampleFrob(arg); -// stride_ = exponential_biased_.GetStride(1000); -// } -// ... -// } -// -// The rounding of the return value creates a bias, especially for smaller means -// where the distribution of the fraction is not evenly distributed. We correct -// this bias by tracking the fraction we rounded up or down on each iteration, -// effectively tracking the distance between the cumulative value, and the -// rounded cumulative value. For example, given a mean of 2: -// -// raw = 1.63076, cumulative = 1.63076, rounded = 2, bias = -0.36923 -// raw = 0.14624, cumulative = 1.77701, rounded = 2, bias = 0.14624 -// raw = 4.93194, cumulative = 6.70895, rounded = 7, bias = -0.06805 -// raw = 0.24206, cumulative = 6.95101, rounded = 7, bias = 0.24206 -// etc... -// -// Adjusting with rounding bias is relatively trivial: -// -// double value = bias_ + exponential_distribution(mean)(); -// double rounded_value = std::rint(value); -// bias_ = value - rounded_value; -// return rounded_value; -// -// This class is thread-compatible. -class ExponentialBiased { - public: - // The number of bits set by NextRandom. - static constexpr int kPrngNumBits = 48; - - // `GetSkipCount()` returns the number of events to skip before some chosen - // event happens. For example, randomly tossing a coin, we will on average - // throw heads once before we get tails. We can simulate random coin tosses - // using GetSkipCount() as: - // - // ExponentialBiased eb; - // for (...) { - // int number_of_heads_before_tail = eb.GetSkipCount(1); - // for (int flips = 0; flips < number_of_heads_before_tail; ++flips) { - // printf("head..."); - // } - // printf("tail\n"); - // } - // - int64_t GetSkipCount(int64_t mean); - - // GetStride() returns the number of events required for a specific event to - // happen. See the class comments for a usage example. `GetStride()` is - // equivalent to `GetSkipCount(mean - 1) + 1`. When to use `GetStride()` or - // `GetSkipCount()` depends mostly on what best fits the use case. - int64_t GetStride(int64_t mean); - - // Computes a random number in the range [0, 1<<(kPrngNumBits+1) - 1] - // - // This is public to enable testing. - static uint64_t NextRandom(uint64_t rnd); - - private: - void Initialize(); - - uint64_t rng_{0}; - double bias_{0}; - bool initialized_{false}; -}; - -// Returns the next prng value. -// pRNG is: aX+b mod c with a = 0x5DEECE66D, b = 0xB, c = 1<<48 -// This is the lrand64 generator. -inline uint64_t ExponentialBiased::NextRandom(uint64_t rnd) { - const uint64_t prng_mult = uint64_t{0x5DEECE66D}; - const uint64_t prng_add = 0xB; - const uint64_t prng_mod_power = 48; - const uint64_t prng_mod_mask = - ~((~static_cast(0)) << prng_mod_power); - return (prng_mult * rnd + prng_add) & prng_mod_mask; -} - -} // namespace base_internal -ABSL_NAMESPACE_END -} // namespace absl - -#endif // ABSL_BASE_INTERNAL_EXPONENTIAL_BIASED_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/base/internal/fast_type_id.h b/TMessagesProj/jni/voip/webrtc/absl/base/internal/fast_type_id.h index 3db59e8374..a547b3a8bc 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/base/internal/fast_type_id.h +++ b/TMessagesProj/jni/voip/webrtc/absl/base/internal/fast_type_id.h @@ -28,8 +28,10 @@ struct FastTypeTag { constexpr static char dummy_var = 0; }; +#ifdef ABSL_INTERNAL_NEED_REDUNDANT_CONSTEXPR_DECL template constexpr char FastTypeTag::dummy_var; +#endif // FastTypeId() evaluates at compile/link-time to a unique pointer for the // passed-in type. These are meant to be good match for keys into maps or diff --git a/TMessagesProj/jni/voip/webrtc/absl/base/internal/inline_variable.h b/TMessagesProj/jni/voip/webrtc/absl/base/internal/inline_variable.h index 130d8c2476..df933faff5 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/base/internal/inline_variable.h +++ b/TMessagesProj/jni/voip/webrtc/absl/base/internal/inline_variable.h @@ -12,8 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. -#ifndef ABSL_BASE_INTERNAL_INLINE_VARIABLE_EMULATION_H_ -#define ABSL_BASE_INTERNAL_INLINE_VARIABLE_EMULATION_H_ +#ifndef ABSL_BASE_INTERNAL_INLINE_VARIABLE_H_ +#define ABSL_BASE_INTERNAL_INLINE_VARIABLE_H_ #include @@ -104,4 +104,4 @@ #endif // __cpp_inline_variables -#endif // ABSL_BASE_INTERNAL_INLINE_VARIABLE_EMULATION_H_ +#endif // ABSL_BASE_INTERNAL_INLINE_VARIABLE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/base/internal/inline_variable_testing.h b/TMessagesProj/jni/voip/webrtc/absl/base/internal/inline_variable_testing.h index 3856b9f80f..f3c81459fa 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/base/internal/inline_variable_testing.h +++ b/TMessagesProj/jni/voip/webrtc/absl/base/internal/inline_variable_testing.h @@ -12,8 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. -#ifndef ABSL_BASE_INLINE_VARIABLE_TESTING_H_ -#define ABSL_BASE_INLINE_VARIABLE_TESTING_H_ +#ifndef ABSL_BASE_INTERNAL_INLINE_VARIABLE_TESTING_H_ +#define ABSL_BASE_INTERNAL_INLINE_VARIABLE_TESTING_H_ #include "absl/base/internal/inline_variable.h" @@ -43,4 +43,4 @@ const int& get_int_b(); ABSL_NAMESPACE_END } // namespace absl -#endif // ABSL_BASE_INLINE_VARIABLE_TESTING_H_ +#endif // ABSL_BASE_INTERNAL_INLINE_VARIABLE_TESTING_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/base/internal/invoke.h b/TMessagesProj/jni/voip/webrtc/absl/base/internal/invoke.h index 5c71f32823..643c2a42f0 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/base/internal/invoke.h +++ b/TMessagesProj/jni/voip/webrtc/absl/base/internal/invoke.h @@ -14,6 +14,8 @@ // // absl::base_internal::invoke(f, args...) is an implementation of // INVOKE(f, args...) from section [func.require] of the C++ standard. +// When compiled as C++17 and later versions, it is implemented as an alias of +// std::invoke. // // [func.require] // Define INVOKE (f, t1, t2, ..., tN) as follows: @@ -35,6 +37,26 @@ #ifndef ABSL_BASE_INTERNAL_INVOKE_H_ #define ABSL_BASE_INTERNAL_INVOKE_H_ +#include "absl/base/config.h" + +#if ABSL_INTERNAL_CPLUSPLUS_LANG >= 201703L + +#include + +namespace absl { +ABSL_NAMESPACE_BEGIN +namespace base_internal { + +using std::invoke; +using std::invoke_result_t; +using std::is_invocable_r; + +} // namespace base_internal +ABSL_NAMESPACE_END +} // namespace absl + +#else // ABSL_INTERNAL_CPLUSPLUS_LANG >= 201703L + #include #include #include @@ -80,8 +102,18 @@ struct MemFunAndRef : StrippedAccept { static decltype((std::declval().* std::declval())(std::declval()...)) Invoke(MemFun&& mem_fun, Obj&& obj, Args&&... args) { +// Ignore bogus GCC warnings on this line. +// See https://gcc.gnu.org/bugzilla/show_bug.cgi?id=101436 for similar example. +#if ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(11, 0) +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Warray-bounds" +#pragma GCC diagnostic ignored "-Wmaybe-uninitialized" +#endif return (std::forward(obj).* std::forward(mem_fun))(std::forward(args)...); +#if ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(11, 0) +#pragma GCC diagnostic pop +#endif } }; @@ -180,8 +212,30 @@ invoke_result_t invoke(F&& f, Args&&... args) { return Invoker::type::Invoke(std::forward(f), std::forward(args)...); } + +template +struct IsInvocableRImpl : std::false_type {}; + +template +struct IsInvocableRImpl< + absl::void_t >, R, F, + Args...> + : std::integral_constant< + bool, + std::is_convertible, + R>::value || + std::is_void::value> {}; + +// Type trait whose member `value` is true if invoking `F` with `Args` is valid, +// and either the return type is convertible to `R`, or `R` is void. +// C++11-compatible version of `std::is_invocable_r`. +template +using is_invocable_r = IsInvocableRImpl; + } // namespace base_internal ABSL_NAMESPACE_END } // namespace absl +#endif // ABSL_INTERNAL_CPLUSPLUS_LANG >= 201703L + #endif // ABSL_BASE_INTERNAL_INVOKE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/base/internal/low_level_alloc.cc b/TMessagesProj/jni/voip/webrtc/absl/base/internal/low_level_alloc.cc index 229ab9162d..662167b08a 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/base/internal/low_level_alloc.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/base/internal/low_level_alloc.cc @@ -332,7 +332,7 @@ size_t GetPageSize() { #elif defined(__wasm__) || defined(__asmjs__) return getpagesize(); #else - return sysconf(_SC_PAGESIZE); + return static_cast(sysconf(_SC_PAGESIZE)); #endif } @@ -364,7 +364,7 @@ LowLevelAlloc::Arena::Arena(uint32_t flags_value) } // L < meta_data_arena->mu -LowLevelAlloc::Arena *LowLevelAlloc::NewArena(int32_t flags) { +LowLevelAlloc::Arena *LowLevelAlloc::NewArena(uint32_t flags) { Arena *meta_data_arena = DefaultArena(); #ifndef ABSL_LOW_LEVEL_ALLOC_ASYNC_SIGNAL_SAFE_MISSING if ((flags & LowLevelAlloc::kAsyncSignalSafe) != 0) { diff --git a/TMessagesProj/jni/voip/webrtc/absl/base/internal/low_level_alloc.h b/TMessagesProj/jni/voip/webrtc/absl/base/internal/low_level_alloc.h index db91951c82..eabb14a9b4 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/base/internal/low_level_alloc.h +++ b/TMessagesProj/jni/voip/webrtc/absl/base/internal/low_level_alloc.h @@ -103,7 +103,7 @@ class LowLevelAlloc { // the provided flags. For example, the call NewArena(kAsyncSignalSafe) // is itself async-signal-safe, as well as generatating an arena that provides // async-signal-safe Alloc/Free. - static Arena *NewArena(int32_t flags); + static Arena *NewArena(uint32_t flags); // Destroys an arena allocated by NewArena and returns true, // provided no allocated blocks remain in the arena. diff --git a/TMessagesProj/jni/voip/webrtc/absl/base/internal/periodic_sampler.cc b/TMessagesProj/jni/voip/webrtc/absl/base/internal/periodic_sampler.cc deleted file mode 100644 index 520dabbaa0..0000000000 --- a/TMessagesProj/jni/voip/webrtc/absl/base/internal/periodic_sampler.cc +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright 2019 The Abseil Authors. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "absl/base/internal/periodic_sampler.h" - -#include - -#include "absl/base/internal/exponential_biased.h" - -namespace absl { -ABSL_NAMESPACE_BEGIN -namespace base_internal { - -int64_t PeriodicSamplerBase::GetExponentialBiased(int period) noexcept { - return rng_.GetStride(period); -} - -bool PeriodicSamplerBase::SubtleConfirmSample() noexcept { - int current_period = period(); - - // Deal with period case 0 (always off) and 1 (always on) - if (ABSL_PREDICT_FALSE(current_period < 2)) { - stride_ = 0; - return current_period == 1; - } - - // Check if this is the first call to Sample() - if (ABSL_PREDICT_FALSE(stride_ == 1)) { - stride_ = static_cast(-GetExponentialBiased(current_period)); - if (static_cast(stride_) < -1) { - ++stride_; - return false; - } - } - - stride_ = static_cast(-GetExponentialBiased(current_period)); - return true; -} - -} // namespace base_internal -ABSL_NAMESPACE_END -} // namespace absl diff --git a/TMessagesProj/jni/voip/webrtc/absl/base/internal/periodic_sampler.h b/TMessagesProj/jni/voip/webrtc/absl/base/internal/periodic_sampler.h deleted file mode 100644 index f8a86796b1..0000000000 --- a/TMessagesProj/jni/voip/webrtc/absl/base/internal/periodic_sampler.h +++ /dev/null @@ -1,211 +0,0 @@ -// Copyright 2019 The Abseil Authors. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef ABSL_BASE_INTERNAL_PERIODIC_SAMPLER_H_ -#define ABSL_BASE_INTERNAL_PERIODIC_SAMPLER_H_ - -#include - -#include - -#include "absl/base/internal/exponential_biased.h" -#include "absl/base/optimization.h" - -namespace absl { -ABSL_NAMESPACE_BEGIN -namespace base_internal { - -// PeriodicSamplerBase provides the basic period sampler implementation. -// -// This is the base class for the templated PeriodicSampler class, which holds -// a global std::atomic value identified by a user defined tag, such that -// each specific PeriodSampler implementation holds its own global period. -// -// PeriodicSamplerBase is thread-compatible except where stated otherwise. -class PeriodicSamplerBase { - public: - // PeriodicSamplerBase is trivial / copyable / movable / destructible. - PeriodicSamplerBase() = default; - PeriodicSamplerBase(PeriodicSamplerBase&&) = default; - PeriodicSamplerBase(const PeriodicSamplerBase&) = default; - - // Returns true roughly once every `period` calls. This is established by a - // randomly picked `stride` that is counted down on each call to `Sample`. - // This stride is picked such that the probability of `Sample()` returning - // true is 1 in `period`. - inline bool Sample() noexcept; - - // The below methods are intended for optimized use cases where the - // size of the inlined fast path code is highly important. Applications - // should use the `Sample()` method unless they have proof that their - // specific use case requires the optimizations offered by these methods. - // - // An example of such a use case is SwissTable sampling. All sampling checks - // are in inlined SwissTable methods, and the number of call sites is huge. - // In this case, the inlined code size added to each translation unit calling - // SwissTable methods is non-trivial. - // - // The `SubtleMaybeSample()` function spuriously returns true even if the - // function should not be sampled, applications MUST match each call to - // 'SubtleMaybeSample()' returning true with a `SubtleConfirmSample()` call, - // and use the result of the latter as the sampling decision. - // In other words: the code should logically be equivalent to: - // - // if (SubtleMaybeSample() && SubtleConfirmSample()) { - // // Sample this call - // } - // - // In the 'inline-size' optimized case, the `SubtleConfirmSample()` call can - // be placed out of line, for example, the typical use case looks as follows: - // - // // --- frobber.h ----------- - // void FrobberSampled(); - // - // inline void FrobberImpl() { - // // ... - // } - // - // inline void Frobber() { - // if (ABSL_PREDICT_FALSE(sampler.SubtleMaybeSample())) { - // FrobberSampled(); - // } else { - // FrobberImpl(); - // } - // } - // - // // --- frobber.cc ----------- - // void FrobberSampled() { - // if (!sampler.SubtleConfirmSample())) { - // // Spurious false positive - // FrobberImpl(); - // return; - // } - // - // // Sampled execution - // // ... - // } - inline bool SubtleMaybeSample() noexcept; - bool SubtleConfirmSample() noexcept; - - protected: - // We explicitly don't use a virtual destructor as this class is never - // virtually destroyed, and it keeps the class trivial, which avoids TLS - // prologue and epilogue code for our TLS instances. - ~PeriodicSamplerBase() = default; - - // Returns the next stride for our sampler. - // This function is virtual for testing purposes only. - virtual int64_t GetExponentialBiased(int period) noexcept; - - private: - // Returns the current period of this sampler. Thread-safe. - virtual int period() const noexcept = 0; - - // Keep and decrement stride_ as an unsigned integer, but compare the value - // to zero casted as a signed int. clang and msvc do not create optimum code - // if we use signed for the combined decrement and sign comparison. - // - // Below 3 alternative options, all compiles generate the best code - // using the unsigned increment <---> signed int comparison option. - // - // Option 1: - // int64_t stride_; - // if (ABSL_PREDICT_TRUE(++stride_ < 0)) { ... } - // - // GCC x64 (OK) : https://gcc.godbolt.org/z/R5MzzA - // GCC ppc (OK) : https://gcc.godbolt.org/z/z7NZAt - // Clang x64 (BAD): https://gcc.godbolt.org/z/t4gPsd - // ICC x64 (OK) : https://gcc.godbolt.org/z/rE6s8W - // MSVC x64 (OK) : https://gcc.godbolt.org/z/ARMXqS - // - // Option 2: - // int64_t stride_ = 0; - // if (ABSL_PREDICT_TRUE(--stride_ >= 0)) { ... } - // - // GCC x64 (OK) : https://gcc.godbolt.org/z/jSQxYK - // GCC ppc (OK) : https://gcc.godbolt.org/z/VJdYaA - // Clang x64 (BAD): https://gcc.godbolt.org/z/Xm4NjX - // ICC x64 (OK) : https://gcc.godbolt.org/z/4snaFd - // MSVC x64 (BAD): https://gcc.godbolt.org/z/BgnEKE - // - // Option 3: - // uint64_t stride_; - // if (ABSL_PREDICT_TRUE(static_cast(++stride_) < 0)) { ... } - // - // GCC x64 (OK) : https://gcc.godbolt.org/z/bFbfPy - // GCC ppc (OK) : https://gcc.godbolt.org/z/S9KkUE - // Clang x64 (OK) : https://gcc.godbolt.org/z/UYzRb4 - // ICC x64 (OK) : https://gcc.godbolt.org/z/ptTNfD - // MSVC x64 (OK) : https://gcc.godbolt.org/z/76j4-5 - uint64_t stride_ = 0; - ExponentialBiased rng_; -}; - -inline bool PeriodicSamplerBase::SubtleMaybeSample() noexcept { - // See comments on `stride_` for the unsigned increment / signed compare. - if (ABSL_PREDICT_TRUE(static_cast(++stride_) < 0)) { - return false; - } - return true; -} - -inline bool PeriodicSamplerBase::Sample() noexcept { - return ABSL_PREDICT_FALSE(SubtleMaybeSample()) ? SubtleConfirmSample() - : false; -} - -// PeriodicSampler is a concreted periodic sampler implementation. -// The user provided Tag identifies the implementation, and is required to -// isolate the global state of this instance from other instances. -// -// Typical use case: -// -// struct HashTablezTag {}; -// thread_local PeriodicSampler sampler; -// -// void HashTableSamplingLogic(...) { -// if (sampler.Sample()) { -// HashTableSlowSamplePath(...); -// } -// } -// -template -class PeriodicSampler final : public PeriodicSamplerBase { - public: - ~PeriodicSampler() = default; - - int period() const noexcept final { - return period_.load(std::memory_order_relaxed); - } - - // Sets the global period for this sampler. Thread-safe. - // Setting a period of 0 disables the sampler, i.e., every call to Sample() - // will return false. Setting a period of 1 puts the sampler in 'always on' - // mode, i.e., every call to Sample() returns true. - static void SetGlobalPeriod(int period) { - period_.store(period, std::memory_order_relaxed); - } - - private: - static std::atomic period_; -}; - -template -std::atomic PeriodicSampler::period_(default_period); - -} // namespace base_internal -ABSL_NAMESPACE_END -} // namespace absl - -#endif // ABSL_BASE_INTERNAL_PERIODIC_SAMPLER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/base/internal/prefetch.h b/TMessagesProj/jni/voip/webrtc/absl/base/internal/prefetch.h new file mode 100644 index 0000000000..06419283ba --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/absl/base/internal/prefetch.h @@ -0,0 +1,138 @@ +// Copyright 2022 The Abseil Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef ABSL_BASE_INTERNAL_PREFETCH_H_ +#define ABSL_BASE_INTERNAL_PREFETCH_H_ + +#include "absl/base/config.h" + +#ifdef __SSE__ +#include +#endif + +#if defined(_MSC_VER) && defined(ABSL_INTERNAL_HAVE_SSE) +#include +#pragma intrinsic(_mm_prefetch) +#endif + +// Compatibility wrappers around __builtin_prefetch, to prefetch data +// for read if supported by the toolchain. + +// Move data into the cache before it is read, or "prefetch" it. +// +// The value of `addr` is the address of the memory to prefetch. If +// the target and compiler support it, data prefetch instructions are +// generated. If the prefetch is done some time before the memory is +// read, it may be in the cache by the time the read occurs. +// +// The function names specify the temporal locality heuristic applied, +// using the names of Intel prefetch instructions: +// +// T0 - high degree of temporal locality; data should be left in as +// many levels of the cache possible +// T1 - moderate degree of temporal locality +// T2 - low degree of temporal locality +// Nta - no temporal locality, data need not be left in the cache +// after the read +// +// Incorrect or gratuitous use of these functions can degrade +// performance, so use them only when representative benchmarks show +// an improvement. +// +// Example usage: +// +// absl::base_internal::PrefetchT0(addr); +// +// Currently, the different prefetch calls behave on some Intel +// architectures as follows: +// +// SNB..SKL SKX +// PrefetchT0() L1/L2/L3 L1/L2 +// PrefetchT1() L2/L3 L2 +// PrefetchT2() L2/L3 L2 +// PrefetchNta() L1/--/L3 L1* +// +// * On SKX PrefetchNta() will bring the line into L1 but will evict +// from L3 cache. This might result in surprising behavior. +// +// SNB = Sandy Bridge, SKL = Skylake, SKX = Skylake Xeon. +// +namespace absl { +ABSL_NAMESPACE_BEGIN +namespace base_internal { + +void PrefetchT0(const void* addr); +void PrefetchT1(const void* addr); +void PrefetchT2(const void* addr); +void PrefetchNta(const void* addr); + +// Implementation details follow. + +#if ABSL_HAVE_BUILTIN(__builtin_prefetch) || defined(__GNUC__) + +#define ABSL_INTERNAL_HAVE_PREFETCH 1 + +// See __builtin_prefetch: +// https://gcc.gnu.org/onlinedocs/gcc/Other-Builtins.html. +// +// These functions speculatively load for read only. This is +// safe for all currently supported platforms. However, prefetch for +// store may have problems depending on the target platform. +// +inline void PrefetchT0(const void* addr) { + // Note: this uses prefetcht0 on Intel. + __builtin_prefetch(addr, 0, 3); +} +inline void PrefetchT1(const void* addr) { + // Note: this uses prefetcht1 on Intel. + __builtin_prefetch(addr, 0, 2); +} +inline void PrefetchT2(const void* addr) { + // Note: this uses prefetcht2 on Intel. + __builtin_prefetch(addr, 0, 1); +} +inline void PrefetchNta(const void* addr) { + // Note: this uses prefetchtnta on Intel. + __builtin_prefetch(addr, 0, 0); +} + +#elif defined(ABSL_INTERNAL_HAVE_SSE) + +#define ABSL_INTERNAL_HAVE_PREFETCH 1 + +inline void PrefetchT0(const void* addr) { + _mm_prefetch(reinterpret_cast(addr), _MM_HINT_T0); +} +inline void PrefetchT1(const void* addr) { + _mm_prefetch(reinterpret_cast(addr), _MM_HINT_T1); +} +inline void PrefetchT2(const void* addr) { + _mm_prefetch(reinterpret_cast(addr), _MM_HINT_T2); +} +inline void PrefetchNta(const void* addr) { + _mm_prefetch(reinterpret_cast(addr), _MM_HINT_NTA); +} + +#else +inline void PrefetchT0(const void*) {} +inline void PrefetchT1(const void*) {} +inline void PrefetchT2(const void*) {} +inline void PrefetchNta(const void*) {} +#endif + +} // namespace base_internal +ABSL_NAMESPACE_END +} // namespace absl + +#endif // ABSL_BASE_INTERNAL_PREFETCH_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/base/internal/raw_logging.cc b/TMessagesProj/jni/voip/webrtc/absl/base/internal/raw_logging.cc index 074e026adb..6273e8471b 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/base/internal/raw_logging.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/base/internal/raw_logging.cc @@ -14,15 +14,17 @@ #include "absl/base/internal/raw_logging.h" -#include #include +#include #include #include #include +#include #include "absl/base/attributes.h" #include "absl/base/config.h" #include "absl/base/internal/atomic_hook.h" +#include "absl/base/internal/errno_saver.h" #include "absl/base/log_severity.h" // We know how to perform low-level writes to stderr in POSIX and Windows. For @@ -36,8 +38,8 @@ // This preprocessor token is also defined in raw_io.cc. If you need to copy // this, consider moving both to config.h instead. #if defined(__linux__) || defined(__APPLE__) || defined(__FreeBSD__) || \ - defined(__Fuchsia__) || defined(__native_client__) || \ - defined(__EMSCRIPTEN__) || defined(__ASYLO__) + defined(__Fuchsia__) || defined(__native_client__) || \ + defined(__OpenBSD__) || defined(__EMSCRIPTEN__) || defined(__ASYLO__) #include @@ -50,7 +52,8 @@ // ABSL_HAVE_SYSCALL_WRITE is defined when the platform provides the syscall // syscall(SYS_write, /*int*/ fd, /*char* */ buf, /*size_t*/ len); // for low level operations that want to avoid libc. -#if (defined(__linux__) || defined(__FreeBSD__)) && !defined(__ANDROID__) +#if (defined(__linux__) || defined(__FreeBSD__) || defined(__OpenBSD__)) && \ + !defined(__ANDROID__) #include #define ABSL_HAVE_SYSCALL_WRITE 1 #define ABSL_LOW_LEVEL_WRITE_SUPPORTED 1 @@ -69,20 +72,13 @@ namespace absl { ABSL_NAMESPACE_BEGIN -namespace raw_logging_internal { +namespace raw_log_internal { namespace { // TODO(gfalcon): We want raw-logging to work on as many platforms as possible. // Explicitly `#error` out when not `ABSL_LOW_LEVEL_WRITE_SUPPORTED`, except for // a selected set of platforms for which we expect not to be able to raw log. -ABSL_INTERNAL_ATOMIC_HOOK_ATTRIBUTES - absl::base_internal::AtomicHook - log_prefix_hook; -ABSL_INTERNAL_ATOMIC_HOOK_ATTRIBUTES - absl::base_internal::AtomicHook - abort_hook; - #ifdef ABSL_LOW_LEVEL_WRITE_SUPPORTED constexpr char kTruncated[] = " ... (message truncated)\n"; @@ -93,12 +89,14 @@ constexpr char kTruncated[] = " ... (message truncated)\n"; bool VADoRawLog(char** buf, int* size, const char* format, va_list ap) ABSL_PRINTF_ATTRIBUTE(3, 0); bool VADoRawLog(char** buf, int* size, const char* format, va_list ap) { - int n = vsnprintf(*buf, *size, format, ap); + if (*size < 0) + return false; + int n = vsnprintf(*buf, static_cast(*size), format, ap); bool result = true; if (n < 0 || n > *size) { result = false; if (static_cast(*size) > sizeof(kTruncated)) { - n = *size - sizeof(kTruncated); // room for truncation message + n = *size - static_cast(sizeof(kTruncated)); } else { n = 0; // no room for truncation message } @@ -120,9 +118,11 @@ constexpr int kLogBufSize = 3000; bool DoRawLog(char** buf, int* size, const char* format, ...) ABSL_PRINTF_ATTRIBUTE(3, 4); bool DoRawLog(char** buf, int* size, const char* format, ...) { + if (*size < 0) + return false; va_list ap; va_start(ap, format); - int n = vsnprintf(*buf, *size, format, ap); + int n = vsnprintf(*buf, static_cast(*size), format, ap); va_end(ap); if (n < 0 || n > *size) return false; *size -= n; @@ -130,6 +130,18 @@ bool DoRawLog(char** buf, int* size, const char* format, ...) { return true; } +bool DefaultLogFilterAndPrefix(absl::LogSeverity, const char* file, int line, + char** buf, int* buf_size) { + DoRawLog(buf, buf_size, "[%s : %d] RAW: ", file, line); + return true; +} + +ABSL_INTERNAL_ATOMIC_HOOK_ATTRIBUTES +absl::base_internal::AtomicHook + log_filter_and_prefix_hook(DefaultLogFilterAndPrefix); +ABSL_INTERNAL_ATOMIC_HOOK_ATTRIBUTES +absl::base_internal::AtomicHook abort_hook; + void RawLogVA(absl::LogSeverity severity, const char* file, int line, const char* format, va_list ap) ABSL_PRINTF_ATTRIBUTE(4, 0); void RawLogVA(absl::LogSeverity severity, const char* file, int line, @@ -150,14 +162,7 @@ void RawLogVA(absl::LogSeverity severity, const char* file, int line, } #endif - auto log_prefix_hook_ptr = log_prefix_hook.Load(); - if (log_prefix_hook_ptr) { - enabled = log_prefix_hook_ptr(severity, file, line, &buf, &size); - } else { - if (enabled) { - DoRawLog(&buf, &size, "[%s : %d] RAW: ", file, line); - } - } + enabled = log_filter_and_prefix_hook(severity, file, line, &buf, &size); const char* const prefix_end = buf; #ifdef ABSL_LOW_LEVEL_WRITE_SUPPORTED @@ -168,11 +173,12 @@ void RawLogVA(absl::LogSeverity severity, const char* file, int line, } else { DoRawLog(&buf, &size, "%s", kTruncated); } - SafeWriteToStderr(buffer, strlen(buffer)); + AsyncSignalSafeWriteToStderr(buffer, strlen(buffer)); } #else static_cast(format); static_cast(ap); + static_cast(enabled); #endif // Abort the process after logging a FATAL message, even if the output itself @@ -195,13 +201,16 @@ void DefaultInternalLog(absl::LogSeverity severity, const char* file, int line, } // namespace -void SafeWriteToStderr(const char *s, size_t len) { +void AsyncSignalSafeWriteToStderr(const char* s, size_t len) { + absl::base_internal::ErrnoSaver errno_saver; #if defined(ABSL_HAVE_SYSCALL_WRITE) + // We prefer calling write via `syscall` to minimize the risk of libc doing + // something "helpful". syscall(SYS_write, STDERR_FILENO, s, len); #elif defined(ABSL_HAVE_POSIX_WRITE) write(STDERR_FILENO, s, len); #elif defined(ABSL_HAVE_RAW_IO) - _write(/* stderr */ 2, s, len); + _write(/* stderr */ 2, s, static_cast(len)); #else // stderr logging unsupported on this platform (void) s; @@ -229,7 +238,9 @@ ABSL_INTERNAL_ATOMIC_HOOK_ATTRIBUTES ABSL_DLL absl::base_internal::AtomicHook internal_log_function(DefaultInternalLog); -void RegisterLogPrefixHook(LogPrefixHook func) { log_prefix_hook.Store(func); } +void RegisterLogFilterAndPrefixHook(LogFilterAndPrefixHook func) { + log_filter_and_prefix_hook.Store(func); +} void RegisterAbortHook(AbortHook func) { abort_hook.Store(func); } @@ -237,6 +248,6 @@ void RegisterInternalLogFunction(InternalLogFunction func) { internal_log_function.Store(func); } -} // namespace raw_logging_internal +} // namespace raw_log_internal ABSL_NAMESPACE_END } // namespace absl diff --git a/TMessagesProj/jni/voip/webrtc/absl/base/internal/raw_logging.h b/TMessagesProj/jni/voip/webrtc/absl/base/internal/raw_logging.h index 2bf7aabac1..db2ef38e07 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/base/internal/raw_logging.h +++ b/TMessagesProj/jni/voip/webrtc/absl/base/internal/raw_logging.h @@ -43,12 +43,11 @@ #define ABSL_RAW_LOG(severity, ...) \ do { \ - constexpr const char* absl_raw_logging_internal_basename = \ - ::absl::raw_logging_internal::Basename(__FILE__, \ - sizeof(__FILE__) - 1); \ - ::absl::raw_logging_internal::RawLog(ABSL_RAW_LOGGING_INTERNAL_##severity, \ - absl_raw_logging_internal_basename, \ - __LINE__, __VA_ARGS__); \ + constexpr const char* absl_raw_log_internal_basename = \ + ::absl::raw_log_internal::Basename(__FILE__, sizeof(__FILE__) - 1); \ + ::absl::raw_log_internal::RawLog(ABSL_RAW_LOG_INTERNAL_##severity, \ + absl_raw_log_internal_basename, __LINE__, \ + __VA_ARGS__); \ } while (0) // Similar to CHECK(condition) << message, but for low-level modules: @@ -72,14 +71,14 @@ // // The API is a subset of the above: each macro only takes two arguments. Use // StrCat if you need to build a richer message. -#define ABSL_INTERNAL_LOG(severity, message) \ - do { \ - constexpr const char* absl_raw_logging_internal_filename = __FILE__; \ - ::absl::raw_logging_internal::internal_log_function( \ - ABSL_RAW_LOGGING_INTERNAL_##severity, \ - absl_raw_logging_internal_filename, __LINE__, message); \ - if (ABSL_RAW_LOGGING_INTERNAL_##severity == ::absl::LogSeverity::kFatal) \ - ABSL_INTERNAL_UNREACHABLE; \ +#define ABSL_INTERNAL_LOG(severity, message) \ + do { \ + constexpr const char* absl_raw_log_internal_filename = __FILE__; \ + ::absl::raw_log_internal::internal_log_function( \ + ABSL_RAW_LOG_INTERNAL_##severity, absl_raw_log_internal_filename, \ + __LINE__, message); \ + if (ABSL_RAW_LOG_INTERNAL_##severity == ::absl::LogSeverity::kFatal) \ + ABSL_INTERNAL_UNREACHABLE; \ } while (0) #define ABSL_INTERNAL_CHECK(condition, message) \ @@ -91,16 +90,16 @@ } \ } while (0) -#define ABSL_RAW_LOGGING_INTERNAL_INFO ::absl::LogSeverity::kInfo -#define ABSL_RAW_LOGGING_INTERNAL_WARNING ::absl::LogSeverity::kWarning -#define ABSL_RAW_LOGGING_INTERNAL_ERROR ::absl::LogSeverity::kError -#define ABSL_RAW_LOGGING_INTERNAL_FATAL ::absl::LogSeverity::kFatal -#define ABSL_RAW_LOGGING_INTERNAL_LEVEL(severity) \ +#define ABSL_RAW_LOG_INTERNAL_INFO ::absl::LogSeverity::kInfo +#define ABSL_RAW_LOG_INTERNAL_WARNING ::absl::LogSeverity::kWarning +#define ABSL_RAW_LOG_INTERNAL_ERROR ::absl::LogSeverity::kError +#define ABSL_RAW_LOG_INTERNAL_FATAL ::absl::LogSeverity::kFatal +#define ABSL_RAW_LOG_INTERNAL_LEVEL(severity) \ ::absl::NormalizeLogSeverity(severity) namespace absl { ABSL_NAMESPACE_BEGIN -namespace raw_logging_internal { +namespace raw_log_internal { // Helper function to implement ABSL_RAW_LOG // Logs format... at "severity" level, reporting it @@ -109,12 +108,9 @@ namespace raw_logging_internal { void RawLog(absl::LogSeverity severity, const char* file, int line, const char* format, ...) ABSL_PRINTF_ATTRIBUTE(4, 5); -// Writes the provided buffer directly to stderr, in a safe, low-level manner. -// -// In POSIX this means calling write(), which is async-signal safe and does -// not malloc. If the platform supports the SYS_write syscall, we invoke that -// directly to side-step any libc interception. -void SafeWriteToStderr(const char *s, size_t len); +// Writes the provided buffer directly to stderr, in a signal-safe, low-level +// manner. +void AsyncSignalSafeWriteToStderr(const char* s, size_t len); // compile-time function to get the "base" filename, that is, the part of // a filename after the last "/" or "\" path separator. The search starts at @@ -133,7 +129,7 @@ constexpr const char* Basename(const char* fname, int offset) { // TODO(gfalcon): Come up with a better name for this method. bool RawLoggingFullySupported(); -// Function type for a raw_logging customization hook for suppressing messages +// Function type for a raw_log customization hook for suppressing messages // by severity, and for writing custom prefixes on non-suppressed messages. // // The installed hook is called for every raw log invocation. The message will @@ -142,19 +138,20 @@ bool RawLoggingFullySupported(); // also provided with an output buffer, where it can write a custom log message // prefix. // -// The raw_logging system does not allocate memory or grab locks. User-provided +// The raw_log system does not allocate memory or grab locks. User-provided // hooks must avoid these operations, and must not throw exceptions. // // 'severity' is the severity level of the message being written. // 'file' and 'line' are the file and line number where the ABSL_RAW_LOG macro // was located. -// 'buffer' and 'buf_size' are pointers to the buffer and buffer size. If the -// hook writes a prefix, it must increment *buffer and decrement *buf_size +// 'buf' and 'buf_size' are pointers to the buffer and buffer size. If the +// hook writes a prefix, it must increment *buf and decrement *buf_size // accordingly. -using LogPrefixHook = bool (*)(absl::LogSeverity severity, const char* file, - int line, char** buffer, int* buf_size); +using LogFilterAndPrefixHook = bool (*)(absl::LogSeverity severity, + const char* file, int line, char** buf, + int* buf_size); -// Function type for a raw_logging customization hook called to abort a process +// Function type for a raw_log customization hook called to abort a process // when a FATAL message is logged. If the provided AbortHook() returns, the // logging system will call abort(). // @@ -162,7 +159,10 @@ using LogPrefixHook = bool (*)(absl::LogSeverity severity, const char* file, // was located. // The NUL-terminated logged message lives in the buffer between 'buf_start' // and 'buf_end'. 'prefix_end' points to the first non-prefix character of the -// buffer (as written by the LogPrefixHook.) +// buffer (as written by the LogFilterAndPrefixHook.) +// +// The lifetime of the filename and message buffers will not end while the +// process remains alive. using AbortHook = void (*)(const char* file, int line, const char* buf_start, const char* prefix_end, const char* buf_end); @@ -184,11 +184,11 @@ ABSL_INTERNAL_ATOMIC_HOOK_ATTRIBUTES ABSL_DLL extern base_internal::AtomicHook< // // These functions are safe to call at any point during initialization; they do // not block or malloc, and are async-signal safe. -void RegisterLogPrefixHook(LogPrefixHook func); +void RegisterLogFilterAndPrefixHook(LogFilterAndPrefixHook func); void RegisterAbortHook(AbortHook func); void RegisterInternalLogFunction(InternalLogFunction func); -} // namespace raw_logging_internal +} // namespace raw_log_internal ABSL_NAMESPACE_END } // namespace absl diff --git a/TMessagesProj/jni/voip/webrtc/absl/base/internal/spinlock.cc b/TMessagesProj/jni/voip/webrtc/absl/base/internal/spinlock.cc index 35c0696a34..381b913b29 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/base/internal/spinlock.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/base/internal/spinlock.cc @@ -19,6 +19,7 @@ #include #include "absl/base/attributes.h" +#include "absl/base/config.h" #include "absl/base/internal/atomic_hook.h" #include "absl/base/internal/cycleclock.h" #include "absl/base/internal/spinlock_wait.h" @@ -66,12 +67,14 @@ void RegisterSpinLockProfiler(void (*fn)(const void *contendedlock, submit_profile_data.Store(fn); } +#ifdef ABSL_INTERNAL_NEED_REDUNDANT_CONSTEXPR_DECL // Static member variable definitions. constexpr uint32_t SpinLock::kSpinLockHeld; constexpr uint32_t SpinLock::kSpinLockCooperative; constexpr uint32_t SpinLock::kSpinLockDisabledScheduling; constexpr uint32_t SpinLock::kSpinLockSleeper; constexpr uint32_t SpinLock::kWaitTimeMask; +#endif // Uncommon constructors. SpinLock::SpinLock(base_internal::SchedulingMode mode) @@ -175,7 +178,7 @@ void SpinLock::SlowUnlock(uint32_t lock_value) { // reserve a unitary wait time to represent that a waiter exists without our // own acquisition having been contended. if ((lock_value & kWaitTimeMask) != kSpinLockSleeper) { - const uint64_t wait_cycles = DecodeWaitCycles(lock_value); + const int64_t wait_cycles = DecodeWaitCycles(lock_value); ABSL_TSAN_MUTEX_PRE_DIVERT(this, 0); submit_profile_data(this, wait_cycles); ABSL_TSAN_MUTEX_POST_DIVERT(this, 0); @@ -217,9 +220,9 @@ uint32_t SpinLock::EncodeWaitCycles(int64_t wait_start_time, return clamped; } -uint64_t SpinLock::DecodeWaitCycles(uint32_t lock_value) { +int64_t SpinLock::DecodeWaitCycles(uint32_t lock_value) { // Cast to uint32_t first to ensure bits [63:32] are cleared. - const uint64_t scaled_wait_time = + const int64_t scaled_wait_time = static_cast(lock_value & kWaitTimeMask); return scaled_wait_time << (kProfileTimestampShift - kLockwordReservedShift); } diff --git a/TMessagesProj/jni/voip/webrtc/absl/base/internal/spinlock.h b/TMessagesProj/jni/voip/webrtc/absl/base/internal/spinlock.h index 6d8d8dddd4..09ba5824b1 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/base/internal/spinlock.h +++ b/TMessagesProj/jni/voip/webrtc/absl/base/internal/spinlock.h @@ -29,10 +29,8 @@ #ifndef ABSL_BASE_INTERNAL_SPINLOCK_H_ #define ABSL_BASE_INTERNAL_SPINLOCK_H_ -#include -#include - #include +#include #include "absl/base/attributes.h" #include "absl/base/const_init.h" @@ -41,8 +39,6 @@ #include "absl/base/internal/raw_logging.h" #include "absl/base/internal/scheduling_mode.h" #include "absl/base/internal/tsan_mutex_interface.h" -#include "absl/base/macros.h" -#include "absl/base/port.h" #include "absl/base/thread_annotations.h" namespace absl { @@ -137,7 +133,7 @@ class ABSL_LOCKABLE SpinLock { int64_t wait_end_time); // Extract number of wait cycles in a lock value. - static uint64_t DecodeWaitCycles(uint32_t lock_value); + static int64_t DecodeWaitCycles(uint32_t lock_value); // Provide access to protected method above. Use for testing only. friend struct SpinLockTest; diff --git a/TMessagesProj/jni/voip/webrtc/absl/base/internal/spinlock_linux.inc b/TMessagesProj/jni/voip/webrtc/absl/base/internal/spinlock_linux.inc index 202f7cdfc8..fe8ba674f5 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/base/internal/spinlock_linux.inc +++ b/TMessagesProj/jni/voip/webrtc/absl/base/internal/spinlock_linux.inc @@ -57,13 +57,10 @@ static_assert(sizeof(std::atomic) == sizeof(int), extern "C" { ABSL_ATTRIBUTE_WEAK void ABSL_INTERNAL_C_SYMBOL(AbslInternalSpinLockDelay)( - std::atomic *w, uint32_t value, int loop, + std::atomic *w, uint32_t value, int, absl::base_internal::SchedulingMode) { absl::base_internal::ErrnoSaver errno_saver; - struct timespec tm; - tm.tv_sec = 0; - tm.tv_nsec = absl::base_internal::SpinLockSuggestedDelayNS(loop); - syscall(SYS_futex, w, FUTEX_WAIT | FUTEX_PRIVATE_FLAG, value, &tm); + syscall(SYS_futex, w, FUTEX_WAIT | FUTEX_PRIVATE_FLAG, value, nullptr); } ABSL_ATTRIBUTE_WEAK void ABSL_INTERNAL_C_SYMBOL(AbslInternalSpinLockWake)( diff --git a/TMessagesProj/jni/voip/webrtc/absl/base/internal/spinlock_win32.inc b/TMessagesProj/jni/voip/webrtc/absl/base/internal/spinlock_win32.inc index 9d224813a5..934c2016fb 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/base/internal/spinlock_win32.inc +++ b/TMessagesProj/jni/voip/webrtc/absl/base/internal/spinlock_win32.inc @@ -27,7 +27,10 @@ void ABSL_INTERNAL_C_SYMBOL(AbslInternalSpinLockDelay)( } else if (loop == 1) { Sleep(0); } else { - Sleep(absl::base_internal::SpinLockSuggestedDelayNS(loop) / 1000000); + // SpinLockSuggestedDelayNS() always returns a positive integer, so this + // static_cast is safe. + Sleep(static_cast( + absl::base_internal::SpinLockSuggestedDelayNS(loop) / 1000000)); } } diff --git a/TMessagesProj/jni/voip/webrtc/absl/base/internal/strerror.cc b/TMessagesProj/jni/voip/webrtc/absl/base/internal/strerror.cc index 0d6226fd0a..de91c05e07 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/base/internal/strerror.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/base/internal/strerror.cc @@ -66,8 +66,8 @@ constexpr int kSysNerr = 135; std::array* NewStrErrorTable() { auto* table = new std::array; - for (int i = 0; i < static_cast(table->size()); ++i) { - (*table)[i] = StrErrorInternal(i); + for (size_t i = 0; i < table->size(); ++i) { + (*table)[i] = StrErrorInternal(static_cast(i)); } return table; } @@ -77,8 +77,8 @@ std::array* NewStrErrorTable() { std::string StrError(int errnum) { absl::base_internal::ErrnoSaver errno_saver; static const auto* table = NewStrErrorTable(); - if (errnum >= 0 && errnum < static_cast(table->size())) { - return (*table)[errnum]; + if (errnum >= 0 && static_cast(errnum) < table->size()) { + return (*table)[static_cast(errnum)]; } return StrErrorInternal(errnum); } diff --git a/TMessagesProj/jni/voip/webrtc/absl/base/internal/sysinfo.cc b/TMessagesProj/jni/voip/webrtc/absl/base/internal/sysinfo.cc index a7cfb461f3..da499d3a77 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/base/internal/sysinfo.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/base/internal/sysinfo.cc @@ -117,19 +117,18 @@ int Win32NumCPUs() { } } free(info); - return logicalProcessorCount; + return static_cast(logicalProcessorCount); } #endif } // namespace - static int GetNumCPUs() { #if defined(__myriad2__) return 1; #elif defined(_WIN32) - const unsigned hardware_concurrency = Win32NumCPUs(); + const int hardware_concurrency = Win32NumCPUs(); return hardware_concurrency ? hardware_concurrency : 1; #elif defined(_AIX) return sysconf(_SC_NPROCESSORS_ONLN); @@ -137,7 +136,7 @@ static int GetNumCPUs() { // Other possibilities: // - Read /sys/devices/system/cpu/online and use cpumask_parse() // - sysconf(_SC_NPROCESSORS_ONLN) - return std::thread::hardware_concurrency(); + return static_cast(std::thread::hardware_concurrency()); #endif } @@ -190,12 +189,15 @@ static double GetNominalCPUFrequency() { // and the memory location pointed to by value is set to the value read. static bool ReadLongFromFile(const char *file, long *value) { bool ret = false; - int fd = open(file, O_RDONLY); + int fd = open(file, O_RDONLY | O_CLOEXEC); if (fd != -1) { char line[1024]; char *err; memset(line, '\0', sizeof(line)); - int len = read(fd, line, sizeof(line) - 1); + ssize_t len; + do { + len = read(fd, line, sizeof(line) - 1); + } while (len < 0 && errno == EINTR); if (len <= 0) { ret = false; } else { @@ -377,7 +379,7 @@ pid_t GetTID() { #endif pid_t GetTID() { - return syscall(SYS_gettid); + return static_cast(syscall(SYS_gettid)); } #elif defined(__akaros__) @@ -430,11 +432,11 @@ static constexpr int kBitsPerWord = 32; // tid_array is uint32_t. // Returns the TID to tid_array. static void FreeTID(void *v) { intptr_t tid = reinterpret_cast(v); - int word = tid / kBitsPerWord; + intptr_t word = tid / kBitsPerWord; uint32_t mask = ~(1u << (tid % kBitsPerWord)); absl::base_internal::SpinLockHolder lock(&tid_lock); assert(0 <= word && static_cast(word) < tid_array->size()); - (*tid_array)[word] &= mask; + (*tid_array)[static_cast(word)] &= mask; } static void InitGetTID() { @@ -456,7 +458,7 @@ pid_t GetTID() { intptr_t tid = reinterpret_cast(pthread_getspecific(tid_key)); if (tid != 0) { - return tid; + return static_cast(tid); } int bit; // tid_array[word] = 1u << bit; @@ -477,7 +479,8 @@ pid_t GetTID() { while (bit < kBitsPerWord && (((*tid_array)[word] >> bit) & 1) != 0) { ++bit; } - tid = (word * kBitsPerWord) + bit; + tid = + static_cast((word * kBitsPerWord) + static_cast(bit)); (*tid_array)[word] |= 1u << bit; // Mark the TID as allocated. } diff --git a/TMessagesProj/jni/voip/webrtc/absl/base/internal/thread_annotations.h b/TMessagesProj/jni/voip/webrtc/absl/base/internal/thread_annotations.h index 4dab6a9c15..8c5c67e0df 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/base/internal/thread_annotations.h +++ b/TMessagesProj/jni/voip/webrtc/absl/base/internal/thread_annotations.h @@ -38,6 +38,13 @@ #ifndef ABSL_BASE_INTERNAL_THREAD_ANNOTATIONS_H_ #define ABSL_BASE_INTERNAL_THREAD_ANNOTATIONS_H_ +// ABSL_LEGACY_THREAD_ANNOTATIONS is a *temporary* compatibility macro that can +// be defined on the compile command-line to restore the legacy spellings of the +// thread annotations macros/functions. The macros in this file are available +// under ABSL_ prefixed spellings in absl/base/thread_annotations.h. This macro +// and the legacy spellings will be removed in the future. +#ifdef ABSL_LEGACY_THREAD_ANNOTATIONS + #if defined(__clang__) #define THREAD_ANNOTATION_ATTRIBUTE__(x) __attribute__((x)) #else @@ -268,4 +275,6 @@ inline T& ts_unchecked_read(T& v) NO_THREAD_SAFETY_ANALYSIS { } // namespace thread_safety_analysis +#endif // defined(ABSL_LEGACY_THREAD_ANNOTATIONS) + #endif // ABSL_BASE_INTERNAL_THREAD_ANNOTATIONS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/base/internal/thread_identity.cc b/TMessagesProj/jni/voip/webrtc/absl/base/internal/thread_identity.cc index 9950e63a79..79853f09f5 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/base/internal/thread_identity.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/base/internal/thread_identity.cc @@ -14,7 +14,7 @@ #include "absl/base/internal/thread_identity.h" -#ifndef _WIN32 +#if !defined(_WIN32) || defined(__MINGW32__) #include #include #endif @@ -56,6 +56,7 @@ void AllocateThreadIdentityKey(ThreadIdentityReclaimerFunction reclaimer) { // *different* instances of this ptr. // Apple platforms have the visibility attribute, but issue a compile warning // that protected visibility is unsupported. +ABSL_CONST_INIT // Must come before __attribute__((visibility("protected"))) #if ABSL_HAVE_ATTRIBUTE(visibility) && !defined(__APPLE__) __attribute__((visibility("protected"))) #endif // ABSL_HAVE_ATTRIBUTE(visibility) && !defined(__APPLE__) diff --git a/TMessagesProj/jni/voip/webrtc/absl/base/internal/unscaledcycleclock.cc b/TMessagesProj/jni/voip/webrtc/absl/base/internal/unscaledcycleclock.cc index 4d352bd110..b1c396c69c 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/base/internal/unscaledcycleclock.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/base/internal/unscaledcycleclock.cc @@ -24,8 +24,13 @@ #ifdef __GLIBC__ #include #elif defined(__FreeBSD__) -#include +// clang-format off +// This order does actually matter =(. #include +#include +// clang-format on + +#include "absl/base/call_once.h" #endif #endif @@ -49,12 +54,6 @@ double UnscaledCycleClock::Frequency() { #elif defined(__x86_64__) -int64_t UnscaledCycleClock::Now() { - uint64_t low, high; - __asm__ volatile("rdtsc" : "=a"(low), "=d"(high)); - return (high << 32) | low; -} - double UnscaledCycleClock::Frequency() { return base_internal::NominalCPUFrequency(); } diff --git a/TMessagesProj/jni/voip/webrtc/absl/base/internal/unscaledcycleclock.h b/TMessagesProj/jni/voip/webrtc/absl/base/internal/unscaledcycleclock.h index 681ff8f996..cc1276ba08 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/base/internal/unscaledcycleclock.h +++ b/TMessagesProj/jni/voip/webrtc/absl/base/internal/unscaledcycleclock.h @@ -42,49 +42,11 @@ #include #endif -#include "absl/base/port.h" - -// The following platforms have an implementation of a hardware counter. -#if defined(__i386__) || defined(__x86_64__) || defined(__aarch64__) || \ - defined(__powerpc__) || defined(__ppc__) || defined(__riscv) || \ - defined(_M_IX86) || defined(_M_X64) -#define ABSL_HAVE_UNSCALED_CYCLECLOCK_IMPLEMENTATION 1 -#else -#define ABSL_HAVE_UNSCALED_CYCLECLOCK_IMPLEMENTATION 0 -#endif - -// The following platforms often disable access to the hardware -// counter (through a sandbox) even if the underlying hardware has a -// usable counter. The CycleTimer interface also requires a *scaled* -// CycleClock that runs at atleast 1 MHz. We've found some Android -// ARM64 devices where this is not the case, so we disable it by -// default on Android ARM64. -#if defined(__native_client__) || \ - (defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE) || \ - (defined(__ANDROID__) && defined(__aarch64__)) -#define ABSL_USE_UNSCALED_CYCLECLOCK_DEFAULT 0 -#else -#define ABSL_USE_UNSCALED_CYCLECLOCK_DEFAULT 1 -#endif - -// UnscaledCycleClock is an optional internal feature. -// Use "#if ABSL_USE_UNSCALED_CYCLECLOCK" to test for its presence. -// Can be overridden at compile-time via -DABSL_USE_UNSCALED_CYCLECLOCK=0|1 -#if !defined(ABSL_USE_UNSCALED_CYCLECLOCK) -#define ABSL_USE_UNSCALED_CYCLECLOCK \ - (ABSL_HAVE_UNSCALED_CYCLECLOCK_IMPLEMENTATION && \ - ABSL_USE_UNSCALED_CYCLECLOCK_DEFAULT) -#endif +#include "absl/base/config.h" +#include "absl/base/internal/unscaledcycleclock_config.h" #if ABSL_USE_UNSCALED_CYCLECLOCK -// This macro can be used to test if UnscaledCycleClock::Frequency() -// is NominalCPUFrequency() on a particular platform. -#if (defined(__i386__) || defined(__x86_64__) || defined(__riscv) || \ - defined(_M_IX86) || defined(_M_X64)) -#define ABSL_INTERNAL_UNSCALED_CYCLECLOCK_FREQUENCY_IS_CPU_FREQUENCY -#endif - namespace absl { ABSL_NAMESPACE_BEGIN namespace time_internal { @@ -115,6 +77,16 @@ class UnscaledCycleClock { friend class base_internal::UnscaledCycleClockWrapperForInitializeFrequency; }; +#if defined(__x86_64__) + +inline int64_t UnscaledCycleClock::Now() { + uint64_t low, high; + __asm__ volatile("rdtsc" : "=a"(low), "=d"(high)); + return static_cast((high << 32) | low); +} + +#endif + } // namespace base_internal ABSL_NAMESPACE_END } // namespace absl diff --git a/TMessagesProj/jni/voip/webrtc/absl/base/internal/unscaledcycleclock_config.h b/TMessagesProj/jni/voip/webrtc/absl/base/internal/unscaledcycleclock_config.h new file mode 100644 index 0000000000..24b324ac99 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/absl/base/internal/unscaledcycleclock_config.h @@ -0,0 +1,62 @@ +// Copyright 2022 The Abseil Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef ABSL_BASE_INTERNAL_UNSCALEDCYCLECLOCK_CONFIG_H_ +#define ABSL_BASE_INTERNAL_UNSCALEDCYCLECLOCK_CONFIG_H_ + +#if defined(__APPLE__) +#include +#endif + +// The following platforms have an implementation of a hardware counter. +#if defined(__i386__) || defined(__x86_64__) || defined(__aarch64__) || \ + defined(__powerpc__) || defined(__ppc__) || defined(__riscv) || \ + defined(_M_IX86) || (defined(_M_X64) && !defined(_M_ARM64EC)) +#define ABSL_HAVE_UNSCALED_CYCLECLOCK_IMPLEMENTATION 1 +#else +#define ABSL_HAVE_UNSCALED_CYCLECLOCK_IMPLEMENTATION 0 +#endif + +// The following platforms often disable access to the hardware +// counter (through a sandbox) even if the underlying hardware has a +// usable counter. The CycleTimer interface also requires a *scaled* +// CycleClock that runs at atleast 1 MHz. We've found some Android +// ARM64 devices where this is not the case, so we disable it by +// default on Android ARM64. +#if defined(__native_client__) || (defined(__APPLE__)) || \ + (defined(__ANDROID__) && defined(__aarch64__)) +#define ABSL_USE_UNSCALED_CYCLECLOCK_DEFAULT 0 +#else +#define ABSL_USE_UNSCALED_CYCLECLOCK_DEFAULT 1 +#endif + +// UnscaledCycleClock is an optional internal feature. +// Use "#if ABSL_USE_UNSCALED_CYCLECLOCK" to test for its presence. +// Can be overridden at compile-time via -DABSL_USE_UNSCALED_CYCLECLOCK=0|1 +#if !defined(ABSL_USE_UNSCALED_CYCLECLOCK) +#define ABSL_USE_UNSCALED_CYCLECLOCK \ + (ABSL_HAVE_UNSCALED_CYCLECLOCK_IMPLEMENTATION && \ + ABSL_USE_UNSCALED_CYCLECLOCK_DEFAULT) +#endif + +#if ABSL_USE_UNSCALED_CYCLECLOCK +// This macro can be used to test if UnscaledCycleClock::Frequency() +// is NominalCPUFrequency() on a particular platform. +#if (defined(__i386__) || defined(__x86_64__) || defined(__riscv) || \ + defined(_M_IX86) || defined(_M_X64)) +#define ABSL_INTERNAL_UNSCALED_CYCLECLOCK_FREQUENCY_IS_CPU_FREQUENCY +#endif +#endif + +#endif // ABSL_BASE_INTERNAL_UNSCALEDCYCLECLOCK_CONFIG_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/base/log_severity.cc b/TMessagesProj/jni/voip/webrtc/absl/base/log_severity.cc index de26b06e49..60a8fc1f89 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/base/log_severity.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/base/log_severity.cc @@ -16,6 +16,8 @@ #include +#include "absl/base/attributes.h" + namespace absl { ABSL_NAMESPACE_BEGIN diff --git a/TMessagesProj/jni/voip/webrtc/absl/base/optimization.h b/TMessagesProj/jni/voip/webrtc/absl/base/optimization.h index d090be1286..d706100cca 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/base/optimization.h +++ b/TMessagesProj/jni/voip/webrtc/absl/base/optimization.h @@ -91,6 +91,7 @@ #define ABSL_CACHELINE_SIZE 64 #endif #endif +#endif #ifndef ABSL_CACHELINE_SIZE // A reasonable default guess. Note that overestimates tend to waste more @@ -141,12 +142,11 @@ // the generated machine code. // 3) Prefer applying this attribute to individual variables. Avoid // applying it to types. This tends to localize the effect. +#if defined(__clang__) || defined(__GNUC__) #define ABSL_CACHELINE_ALIGNED __attribute__((aligned(ABSL_CACHELINE_SIZE))) #elif defined(_MSC_VER) -#define ABSL_CACHELINE_SIZE 64 #define ABSL_CACHELINE_ALIGNED __declspec(align(ABSL_CACHELINE_SIZE)) #else -#define ABSL_CACHELINE_SIZE 64 #define ABSL_CACHELINE_ALIGNED #endif @@ -181,35 +181,43 @@ #define ABSL_PREDICT_TRUE(x) (x) #endif -// ABSL_INTERNAL_ASSUME(cond) +// ABSL_ASSUME(cond) +// // Informs the compiler that a condition is always true and that it can assume -// it to be true for optimization purposes. The call has undefined behavior if -// the condition is false. +// it to be true for optimization purposes. +// +// WARNING: If the condition is false, the program can produce undefined and +// potentially dangerous behavior. +// // In !NDEBUG mode, the condition is checked with an assert(). -// NOTE: The expression must not have side effects, as it will only be evaluated -// in some compilation modes and not others. +// +// NOTE: The expression must not have side effects, as it may only be evaluated +// in some compilation modes and not others. Some compilers may issue a warning +// if the compiler cannot prove the expression has no side effects. For example, +// the expression should not use a function call since the compiler cannot prove +// that a function call does not have side effects. // // Example: // // int x = ...; -// ABSL_INTERNAL_ASSUME(x >= 0); +// ABSL_ASSUME(x >= 0); // // The compiler can optimize the division to a simple right shift using the // // assumption specified above. // int y = x / 16; // #if !defined(NDEBUG) -#define ABSL_INTERNAL_ASSUME(cond) assert(cond) +#define ABSL_ASSUME(cond) assert(cond) #elif ABSL_HAVE_BUILTIN(__builtin_assume) -#define ABSL_INTERNAL_ASSUME(cond) __builtin_assume(cond) +#define ABSL_ASSUME(cond) __builtin_assume(cond) #elif defined(__GNUC__) || ABSL_HAVE_BUILTIN(__builtin_unreachable) -#define ABSL_INTERNAL_ASSUME(cond) \ +#define ABSL_ASSUME(cond) \ do { \ if (!(cond)) __builtin_unreachable(); \ } while (0) #elif defined(_MSC_VER) -#define ABSL_INTERNAL_ASSUME(cond) __assume(cond) +#define ABSL_ASSUME(cond) __assume(cond) #else -#define ABSL_INTERNAL_ASSUME(cond) \ +#define ABSL_ASSUME(cond) \ do { \ static_cast(false && (cond)); \ } while (0) diff --git a/TMessagesProj/jni/voip/webrtc/absl/base/options.h b/TMessagesProj/jni/voip/webrtc/absl/base/options.h index 1641271cd3..5c162a3891 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/base/options.h +++ b/TMessagesProj/jni/voip/webrtc/absl/base/options.h @@ -67,12 +67,6 @@ #ifndef ABSL_BASE_OPTIONS_H_ #define ABSL_BASE_OPTIONS_H_ -// Include a standard library header to allow configuration based on the -// standard library in use. -#ifdef __cplusplus -#include -#endif - // ----------------------------------------------------------------------------- // Type Compatibility Options // ----------------------------------------------------------------------------- @@ -100,7 +94,7 @@ // User code should not inspect this macro. To check in the preprocessor if // absl::any is a typedef of std::any, use the feature macro ABSL_USES_STD_ANY. -#define ABSL_OPTION_USE_STD_ANY 0 +#define ABSL_OPTION_USE_STD_ANY 2 // ABSL_OPTION_USE_STD_OPTIONAL @@ -127,7 +121,7 @@ // absl::optional is a typedef of std::optional, use the feature macro // ABSL_USES_STD_OPTIONAL. -#define ABSL_OPTION_USE_STD_OPTIONAL 0 +#define ABSL_OPTION_USE_STD_OPTIONAL 2 // ABSL_OPTION_USE_STD_STRING_VIEW @@ -154,7 +148,7 @@ // absl::string_view is a typedef of std::string_view, use the feature macro // ABSL_USES_STD_STRING_VIEW. -#define ABSL_OPTION_USE_STD_STRING_VIEW 0 +#define ABSL_OPTION_USE_STD_STRING_VIEW 2 // ABSL_OPTION_USE_STD_VARIANT // @@ -180,7 +174,7 @@ // absl::variant is a typedef of std::variant, use the feature macro // ABSL_USES_STD_VARIANT. -#define ABSL_OPTION_USE_STD_VARIANT 0 +#define ABSL_OPTION_USE_STD_VARIANT 2 // ABSL_OPTION_USE_INLINE_NAMESPACE @@ -233,6 +227,6 @@ // checks enabled by this option may abort the program in a different way and // log additional information when `NDEBUG` is not defined. -#define ABSL_OPTION_HARDENED 1 +#define ABSL_OPTION_HARDENED 0 #endif // ABSL_BASE_OPTIONS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/base/policy_checks.h b/TMessagesProj/jni/voip/webrtc/absl/base/policy_checks.h index 06b3243916..2626fb6a36 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/base/policy_checks.h +++ b/TMessagesProj/jni/voip/webrtc/absl/base/policy_checks.h @@ -44,17 +44,17 @@ // Toolchain Check // ----------------------------------------------------------------------------- -// We support MSVC++ 14.0 update 2 and later. +// We support Visual Studio 2017 (MSVC++ 15.0) and later. // This minimum will go up. -#if defined(_MSC_FULL_VER) && _MSC_FULL_VER < 190023918 && !defined(__clang__) -#error "This package requires Visual Studio 2015 Update 2 or higher." +#if defined(_MSC_VER) && _MSC_VER < 1910 && !defined(__clang__) +#error "This package requires Visual Studio 2017 (MSVC++ 15.0) or higher." #endif -// We support gcc 4.7 and later. +// We support gcc 5 and later. // This minimum will go up. #if defined(__GNUC__) && !defined(__clang__) -#if __GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 7) -#error "This package requires gcc 4.7 or higher." +#if __GNUC__ < 5 +#error "This package requires gcc 5 or higher." #endif #endif @@ -69,13 +69,15 @@ // C++ Version Check // ----------------------------------------------------------------------------- -// Enforce C++11 as the minimum. Note that Visual Studio has not -// advanced __cplusplus despite being good enough for our purposes, so -// so we exempt it from the check. -#if defined(__cplusplus) && !defined(_MSC_VER) -#if __cplusplus < 201103L -#error "C++ versions less than C++11 are not supported." -#endif +// Enforce C++14 as the minimum. +#if defined(_MSVC_LANG) +#if _MSVC_LANG < 201402L +#error "C++ versions less than C++14 are not supported." +#endif // _MSVC_LANG < 201402L +#elif defined(__cplusplus) +#if __cplusplus < 201402L +#error "C++ versions less than C++14 are not supported." +#endif // __cplusplus < 201402L #endif // ----------------------------------------------------------------------------- diff --git a/TMessagesProj/jni/voip/webrtc/absl/base/spinlock_test_common.cc b/TMessagesProj/jni/voip/webrtc/absl/base/spinlock_test_common.cc index 2b572c5b3f..52ecf58012 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/base/spinlock_test_common.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/base/spinlock_test_common.cc @@ -34,7 +34,7 @@ #include "absl/synchronization/blocking_counter.h" #include "absl/synchronization/notification.h" -constexpr int32_t kNumThreads = 10; +constexpr uint32_t kNumThreads = 10; constexpr int32_t kIters = 1000; namespace absl { @@ -48,14 +48,14 @@ struct SpinLockTest { int64_t wait_end_time) { return SpinLock::EncodeWaitCycles(wait_start_time, wait_end_time); } - static uint64_t DecodeWaitCycles(uint32_t lock_value) { + static int64_t DecodeWaitCycles(uint32_t lock_value) { return SpinLock::DecodeWaitCycles(lock_value); } }; namespace { -static constexpr int kArrayLength = 10; +static constexpr size_t kArrayLength = 10; static uint32_t values[kArrayLength]; ABSL_CONST_INIT static SpinLock static_cooperative_spinlock( @@ -79,11 +79,11 @@ static uint32_t Hash32(uint32_t a, uint32_t c) { return c; } -static void TestFunction(int thread_salt, SpinLock* spinlock) { +static void TestFunction(uint32_t thread_salt, SpinLock* spinlock) { for (int i = 0; i < kIters; i++) { SpinLockHolder h(spinlock); - for (int j = 0; j < kArrayLength; j++) { - const int index = (j + thread_salt) % kArrayLength; + for (size_t j = 0; j < kArrayLength; j++) { + const size_t index = (j + thread_salt) % kArrayLength; values[index] = Hash32(values[index], thread_salt); std::this_thread::yield(); } @@ -93,7 +93,7 @@ static void TestFunction(int thread_salt, SpinLock* spinlock) { static void ThreadedTest(SpinLock* spinlock) { std::vector threads; threads.reserve(kNumThreads); - for (int i = 0; i < kNumThreads; ++i) { + for (uint32_t i = 0; i < kNumThreads; ++i) { threads.push_back(std::thread(TestFunction, i, spinlock)); } for (auto& thread : threads) { @@ -101,7 +101,7 @@ static void ThreadedTest(SpinLock* spinlock) { } SpinLockHolder h(spinlock); - for (int i = 1; i < kArrayLength; i++) { + for (size_t i = 1; i < kArrayLength; i++) { EXPECT_EQ(values[0], values[i]); } } @@ -133,28 +133,28 @@ TEST(SpinLock, WaitCyclesEncoding) { // but the lower kProfileTimestampShift will be dropped. const int kMaxCyclesShift = 32 - kLockwordReservedShift + kProfileTimestampShift; - const uint64_t kMaxCycles = (int64_t{1} << kMaxCyclesShift) - 1; + const int64_t kMaxCycles = (int64_t{1} << kMaxCyclesShift) - 1; // These bits should be zero after encoding. const uint32_t kLockwordReservedMask = (1 << kLockwordReservedShift) - 1; // These bits are dropped when wait cycles are encoded. - const uint64_t kProfileTimestampMask = (1 << kProfileTimestampShift) - 1; + const int64_t kProfileTimestampMask = (1 << kProfileTimestampShift) - 1; // Test a bunch of random values std::default_random_engine generator; // Shift to avoid overflow below. - std::uniform_int_distribution time_distribution( - 0, std::numeric_limits::max() >> 4); - std::uniform_int_distribution cycle_distribution(0, kMaxCycles); + std::uniform_int_distribution time_distribution( + 0, std::numeric_limits::max() >> 3); + std::uniform_int_distribution cycle_distribution(0, kMaxCycles); for (int i = 0; i < 100; i++) { int64_t start_time = time_distribution(generator); int64_t cycles = cycle_distribution(generator); int64_t end_time = start_time + cycles; uint32_t lock_value = SpinLockTest::EncodeWaitCycles(start_time, end_time); - EXPECT_EQ(0, lock_value & kLockwordReservedMask); - uint64_t decoded = SpinLockTest::DecodeWaitCycles(lock_value); + EXPECT_EQ(0u, lock_value & kLockwordReservedMask); + int64_t decoded = SpinLockTest::DecodeWaitCycles(lock_value); EXPECT_EQ(0, decoded & kProfileTimestampMask); EXPECT_EQ(cycles & ~kProfileTimestampMask, decoded); } @@ -178,21 +178,21 @@ TEST(SpinLock, WaitCyclesEncoding) { // Test clamping uint32_t max_value = SpinLockTest::EncodeWaitCycles(start_time, start_time + kMaxCycles); - uint64_t max_value_decoded = SpinLockTest::DecodeWaitCycles(max_value); - uint64_t expected_max_value_decoded = kMaxCycles & ~kProfileTimestampMask; + int64_t max_value_decoded = SpinLockTest::DecodeWaitCycles(max_value); + int64_t expected_max_value_decoded = kMaxCycles & ~kProfileTimestampMask; EXPECT_EQ(expected_max_value_decoded, max_value_decoded); const int64_t step = (1 << kProfileTimestampShift); uint32_t after_max_value = SpinLockTest::EncodeWaitCycles(start_time, start_time + kMaxCycles + step); - uint64_t after_max_value_decoded = + int64_t after_max_value_decoded = SpinLockTest::DecodeWaitCycles(after_max_value); EXPECT_EQ(expected_max_value_decoded, after_max_value_decoded); uint32_t before_max_value = SpinLockTest::EncodeWaitCycles( start_time, start_time + kMaxCycles - step); - uint64_t before_max_value_decoded = - SpinLockTest::DecodeWaitCycles(before_max_value); + int64_t before_max_value_decoded = + SpinLockTest::DecodeWaitCycles(before_max_value); EXPECT_GT(expected_max_value_decoded, before_max_value_decoded); } diff --git a/TMessagesProj/jni/voip/webrtc/absl/base/thread_annotations.h b/TMessagesProj/jni/voip/webrtc/absl/base/thread_annotations.h index 531e4f7a4f..bc8a620347 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/base/thread_annotations.h +++ b/TMessagesProj/jni/voip/webrtc/absl/base/thread_annotations.h @@ -36,6 +36,8 @@ #include "absl/base/attributes.h" #include "absl/base/config.h" +// TODO(mbonadei): Remove after the backward compatibility period. +#include "absl/base/internal/thread_annotations.h" // IWYU pragma: export // ABSL_GUARDED_BY() // @@ -152,8 +154,8 @@ // ABSL_LOCKS_EXCLUDED() // -// Documents the locks acquired in the body of the function. These locks -// cannot be held when calling this function (as Abseil's `Mutex` locks are +// Documents the locks that cannot be held by callers of this function, as they +// might be acquired by this function (Abseil's `Mutex` locks are // non-reentrant). #if ABSL_HAVE_ATTRIBUTE(locks_excluded) #define ABSL_LOCKS_EXCLUDED(...) __attribute__((locks_excluded(__VA_ARGS__))) diff --git a/TMessagesProj/jni/voip/webrtc/absl/container/btree_benchmark.cc b/TMessagesProj/jni/voip/webrtc/absl/container/btree_benchmark.cc index 0ca497c81b..0d26fd424c 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/container/btree_benchmark.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/container/btree_benchmark.cc @@ -27,6 +27,7 @@ #include #include "benchmark/benchmark.h" +#include "absl/algorithm/container.h" #include "absl/base/internal/raw_logging.h" #include "absl/container/btree_map.h" #include "absl/container/btree_set.h" @@ -34,9 +35,10 @@ #include "absl/container/flat_hash_map.h" #include "absl/container/flat_hash_set.h" #include "absl/container/internal/hashtable_debug.h" -#include "absl/flags/flag.h" #include "absl/hash/hash.h" +#include "absl/log/log.h" #include "absl/memory/memory.h" +#include "absl/random/random.h" #include "absl/strings/cord.h" #include "absl/strings/str_format.h" #include "absl/time/time.h" @@ -733,6 +735,29 @@ double ContainerInfo(const btree_map>& b) { BIG_TYPE_PTR_BENCHMARKS(32); +void BM_BtreeSet_IteratorSubtraction(benchmark::State& state) { + absl::InsecureBitGen bitgen; + std::vector vec; + // Randomize the set's insertion order so the nodes aren't all full. + vec.reserve(state.range(0)); + for (int i = 0; i < state.range(0); ++i) vec.push_back(i); + absl::c_shuffle(vec, bitgen); + + absl::btree_set set; + for (int i : vec) set.insert(i); + + size_t distance = absl::Uniform(bitgen, 0u, set.size()); + while (state.KeepRunningBatch(distance)) { + size_t end = absl::Uniform(bitgen, distance, set.size()); + size_t begin = end - distance; + benchmark::DoNotOptimize(set.find(static_cast(end)) - + set.find(static_cast(begin))); + distance = absl::Uniform(bitgen, 0u, set.size()); + } +} + +BENCHMARK(BM_BtreeSet_IteratorSubtraction)->Range(1 << 10, 1 << 20); + } // namespace } // namespace container_internal ABSL_NAMESPACE_END diff --git a/TMessagesProj/jni/voip/webrtc/absl/container/btree_map.h b/TMessagesProj/jni/voip/webrtc/absl/container/btree_map.h index ad484ce020..cd3ee2b422 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/container/btree_map.h +++ b/TMessagesProj/jni/voip/webrtc/absl/container/btree_map.h @@ -42,10 +42,13 @@ // Importantly, insertions and deletions may invalidate outstanding iterators, // pointers, and references to elements. Such invalidations are typically only // an issue if insertion and deletion operations are interleaved with the use of -// more than one iterator, pointer, or reference simultaneously. For this -// reason, `insert()` and `erase()` return a valid iterator at the current -// position. Another important difference is that key-types must be -// copy-constructible. +// more than one iterator, pointer, or reference simultaneously. For this +// reason, `insert()`, `erase()`, and `extract_and_get_next()` return a valid +// iterator at the current position. Another important difference is that +// key-types must be copy-constructible. +// +// Another API difference is that btree iterators can be subtracted, and this +// is faster than using std::distance. #ifndef ABSL_CONTAINER_BTREE_MAP_H_ #define ABSL_CONTAINER_BTREE_MAP_H_ @@ -59,7 +62,7 @@ ABSL_NAMESPACE_BEGIN namespace container_internal { template + int TargetNodeSize, bool IsMulti> struct map_params; } // namespace container_internal @@ -85,7 +88,7 @@ class btree_map : public container_internal::btree_map_container< container_internal::btree>> { + /*IsMulti=*/false>>> { using Base = typename btree_map::btree_map_container; public: @@ -322,7 +325,8 @@ class btree_map // btree_map::extract() // // Extracts the indicated element, erasing it in the process, and returns it - // as a C++17-compatible node handle. Overloads are listed below. + // as a C++17-compatible node handle. Any references, pointers, or iterators + // are invalidated. Overloads are listed below. // // node_type extract(const_iterator position): // @@ -347,6 +351,21 @@ class btree_map // It does NOT refer to the data layout of the underlying btree. using Base::extract; + // btree_map::extract_and_get_next() + // + // Extracts the indicated element, erasing it in the process, and returns it + // as a C++17-compatible node handle along with an iterator to the next + // element. + // + // extract_and_get_next_return_type extract_and_get_next( + // const_iterator position): + // + // Extracts the element at the indicated position, returns a struct + // containing a member named `node`: a node handle owning that extracted + // data and a member named `next`: an iterator pointing to the next element + // in the btree. + using Base::extract_and_get_next; + // btree_map::merge() // // Extracts elements from a given `source` btree_map into this @@ -507,7 +526,7 @@ class btree_multimap : public container_internal::btree_multimap_container< container_internal::btree>> { + /*IsMulti=*/true>>> { using Base = typename btree_multimap::btree_multimap_container; public: @@ -698,6 +717,21 @@ class btree_multimap // It does NOT refer to the data layout of the underlying btree. using Base::extract; + // btree_multimap::extract_and_get_next() + // + // Extracts the indicated element, erasing it in the process, and returns it + // as a C++17-compatible node handle along with an iterator to the next + // element. + // + // extract_and_get_next_return_type extract_and_get_next( + // const_iterator position): + // + // Extracts the element at the indicated position, returns a struct + // containing a member named `node`: a node handle owning that extracted + // data and a member named `next`: an iterator pointing to the next element + // in the btree. + using Base::extract_and_get_next; + // btree_multimap::merge() // // Extracts all elements from a given `source` btree_multimap into this @@ -817,9 +851,9 @@ namespace container_internal { // A parameters structure for holding the type parameters for a btree_map. // Compare and Alloc should be nothrow copy-constructible. template -struct map_params : common_params> { + int TargetNodeSize, bool IsMulti> +struct map_params : common_params> { using super_type = typename map_params::common_params; using mapped_type = Data; // This type allows us to move keys when it is safe to do so. It is safe @@ -829,25 +863,6 @@ struct map_params : common_params - friend class btree; - - protected: - explicit value_compare(original_key_compare c) : comp(std::move(c)) {} - - original_key_compare comp; // NOLINT - - public: - auto operator()(const value_type &lhs, const value_type &rhs) const - -> decltype(comp(lhs.first, rhs.first)) { - return comp(lhs.first, rhs.first); - } - }; - using is_map_container = std::true_type; - template static auto key(const V &value) -> decltype(value.first) { return value.first; diff --git a/TMessagesProj/jni/voip/webrtc/absl/container/btree_set.h b/TMessagesProj/jni/voip/webrtc/absl/container/btree_set.h index 78826830f4..51dc42b797 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/container/btree_set.h +++ b/TMessagesProj/jni/voip/webrtc/absl/container/btree_set.h @@ -43,8 +43,11 @@ // pointers, and references to elements. Such invalidations are typically only // an issue if insertion and deletion operations are interleaved with the use of // more than one iterator, pointer, or reference simultaneously. For this -// reason, `insert()` and `erase()` return a valid iterator at the current -// position. +// reason, `insert()`, `erase()`, and `extract_and_get_next()` return a valid +// iterator at the current position. +// +// Another API difference is that btree iterators can be subtracted, and this +// is faster than using std::distance. #ifndef ABSL_CONTAINER_BTREE_SET_H_ #define ABSL_CONTAINER_BTREE_SET_H_ @@ -61,7 +64,7 @@ template struct set_slot_policy; template + bool IsMulti> struct set_params; } // namespace container_internal @@ -87,7 +90,7 @@ class btree_set : public container_internal::btree_set_container< container_internal::btree>> { + /*IsMulti=*/false>>> { using Base = typename btree_set::btree_set_container; public: @@ -269,7 +272,8 @@ class btree_set // btree_set::extract() // // Extracts the indicated element, erasing it in the process, and returns it - // as a C++17-compatible node handle. Overloads are listed below. + // as a C++17-compatible node handle. Any references, pointers, or iterators + // are invalidated. Overloads are listed below. // // node_type extract(const_iterator position): // @@ -289,6 +293,21 @@ class btree_set // It does NOT refer to the data layout of the underlying btree. using Base::extract; + // btree_set::extract_and_get_next() + // + // Extracts the indicated element, erasing it in the process, and returns it + // as a C++17-compatible node handle along with an iterator to the next + // element. + // + // extract_and_get_next_return_type extract_and_get_next( + // const_iterator position): + // + // Extracts the element at the indicated position, returns a struct + // containing a member named `node`: a node handle owning that extracted + // data and a member named `next`: an iterator pointing to the next element + // in the btree. + using Base::extract_and_get_next; + // btree_set::merge() // // Extracts elements from a given `source` btree_set into this @@ -427,7 +446,7 @@ class btree_multiset : public container_internal::btree_multiset_container< container_internal::btree>> { + /*IsMulti=*/true>>> { using Base = typename btree_multiset::btree_multiset_container; public: @@ -611,6 +630,21 @@ class btree_multiset // It does NOT refer to the data layout of the underlying btree. using Base::extract; + // btree_multiset::extract_and_get_next() + // + // Extracts the indicated element, erasing it in the process, and returns it + // as a C++17-compatible node handle along with an iterator to the next + // element. + // + // extract_and_get_next_return_type extract_and_get_next( + // const_iterator position): + // + // Extracts the element at the indicated position, returns a struct + // containing a member named `node`: a node handle owning that extracted + // data and a member named `next`: an iterator pointing to the next element + // in the btree. + using Base::extract_and_get_next; + // btree_multiset::merge() // // Extracts all elements from a given `source` btree_multiset into this @@ -752,33 +786,24 @@ struct set_slot_policy { } template - static void destroy(Alloc *alloc, slot_type *slot) { - absl::allocator_traits::destroy(*alloc, slot); + static void construct(Alloc *alloc, slot_type *slot, const slot_type *other) { + absl::allocator_traits::construct(*alloc, slot, *other); } template - static void swap(Alloc * /*alloc*/, slot_type *a, slot_type *b) { - using std::swap; - swap(*a, *b); - } - - template - static void move(Alloc * /*alloc*/, slot_type *src, slot_type *dest) { - *dest = std::move(*src); + static void destroy(Alloc *alloc, slot_type *slot) { + absl::allocator_traits::destroy(*alloc, slot); } }; // A parameters structure for holding the type parameters for a btree_set. // Compare and Alloc should be nothrow copy-constructible. template -struct set_params : common_params> { + bool IsMulti> +struct set_params : common_params> { using value_type = Key; using slot_type = typename set_params::common_params::slot_type; - using value_compare = - typename set_params::common_params::original_key_compare; - using is_map_container = std::false_type; template static const V &key(const V &value) { diff --git a/TMessagesProj/jni/voip/webrtc/absl/container/btree_test.cc b/TMessagesProj/jni/voip/webrtc/absl/container/btree_test.cc index e829e0bab4..28dda8a6fd 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/container/btree_test.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/container/btree_test.cc @@ -14,18 +14,24 @@ #include "absl/container/btree_test.h" +#include +#include #include #include +#include #include #include #include +#include #include #include #include #include +#include #include "gmock/gmock.h" #include "gtest/gtest.h" +#include "absl/algorithm/container.h" #include "absl/base/internal/raw_logging.h" #include "absl/base/macros.h" #include "absl/container/btree_map.h" @@ -35,7 +41,7 @@ #include "absl/flags/flag.h" #include "absl/hash/hash_testing.h" #include "absl/memory/memory.h" -#include "absl/meta/type_traits.h" +#include "absl/random/random.h" #include "absl/strings/str_cat.h" #include "absl/strings/str_split.h" #include "absl/strings/string_view.h" @@ -68,6 +74,16 @@ void CheckPairEquals(const std::pair &x, const std::pair &y) { CheckPairEquals(x.first, y.first); CheckPairEquals(x.second, y.second); } + +bool IsAssertEnabled() { + // Use an assert with side-effects to figure out if they are actually enabled. + bool assert_enabled = false; + assert([&]() { // NOLINT + assert_enabled = true; + return true; + }()); + return assert_enabled; +} } // namespace // The base class for a sorted associative container checker. TreeType is the @@ -1291,7 +1307,7 @@ TEST(Btree, BtreeMapCanHoldMoveOnlyTypes) { std::unique_ptr &v = m["A"]; EXPECT_TRUE(v == nullptr); - v.reset(new std::string("X")); + v = absl::make_unique("X"); auto iter = m.find("A"); EXPECT_EQ("X", *iter->second); @@ -1645,10 +1661,9 @@ TEST(Btree, BtreeMultisetEmplace) { auto iter = s.emplace(value_to_insert); ASSERT_NE(iter, s.end()); EXPECT_EQ(*iter, value_to_insert); - auto iter2 = s.emplace(value_to_insert); - EXPECT_NE(iter2, iter); - ASSERT_NE(iter2, s.end()); - EXPECT_EQ(*iter2, value_to_insert); + iter = s.emplace(value_to_insert); + ASSERT_NE(iter, s.end()); + EXPECT_EQ(*iter, value_to_insert); auto result = s.equal_range(value_to_insert); EXPECT_EQ(std::distance(result.first, result.second), 2); } @@ -1659,44 +1674,45 @@ TEST(Btree, BtreeMultisetEmplaceHint) { auto iter = s.emplace(value_to_insert); ASSERT_NE(iter, s.end()); EXPECT_EQ(*iter, value_to_insert); - auto emplace_iter = s.emplace_hint(iter, value_to_insert); - EXPECT_NE(emplace_iter, iter); - ASSERT_NE(emplace_iter, s.end()); - EXPECT_EQ(*emplace_iter, value_to_insert); + iter = s.emplace_hint(iter, value_to_insert); + // The new element should be before the previously inserted one. + EXPECT_EQ(iter, s.lower_bound(value_to_insert)); + ASSERT_NE(iter, s.end()); + EXPECT_EQ(*iter, value_to_insert); } TEST(Btree, BtreeMultimapEmplace) { const int key_to_insert = 123456; const char value0[] = "a"; - absl::btree_multimap s; - auto iter = s.emplace(key_to_insert, value0); - ASSERT_NE(iter, s.end()); + absl::btree_multimap m; + auto iter = m.emplace(key_to_insert, value0); + ASSERT_NE(iter, m.end()); EXPECT_EQ(iter->first, key_to_insert); EXPECT_EQ(iter->second, value0); const char value1[] = "b"; - auto iter2 = s.emplace(key_to_insert, value1); - EXPECT_NE(iter2, iter); - ASSERT_NE(iter2, s.end()); - EXPECT_EQ(iter2->first, key_to_insert); - EXPECT_EQ(iter2->second, value1); - auto result = s.equal_range(key_to_insert); + iter = m.emplace(key_to_insert, value1); + ASSERT_NE(iter, m.end()); + EXPECT_EQ(iter->first, key_to_insert); + EXPECT_EQ(iter->second, value1); + auto result = m.equal_range(key_to_insert); EXPECT_EQ(std::distance(result.first, result.second), 2); } TEST(Btree, BtreeMultimapEmplaceHint) { const int key_to_insert = 123456; const char value0[] = "a"; - absl::btree_multimap s; - auto iter = s.emplace(key_to_insert, value0); - ASSERT_NE(iter, s.end()); + absl::btree_multimap m; + auto iter = m.emplace(key_to_insert, value0); + ASSERT_NE(iter, m.end()); EXPECT_EQ(iter->first, key_to_insert); EXPECT_EQ(iter->second, value0); const char value1[] = "b"; - auto emplace_iter = s.emplace_hint(iter, key_to_insert, value1); - EXPECT_NE(emplace_iter, iter); - ASSERT_NE(emplace_iter, s.end()); - EXPECT_EQ(emplace_iter->first, key_to_insert); - EXPECT_EQ(emplace_iter->second, value1); + iter = m.emplace_hint(iter, key_to_insert, value1); + // The new element should be before the previously inserted one. + EXPECT_EQ(iter, m.lower_bound(key_to_insert)); + ASSERT_NE(iter, m.end()); + EXPECT_EQ(iter->first, key_to_insert); + EXPECT_EQ(iter->second, value1); } TEST(Btree, ConstIteratorAccessors) { @@ -1762,6 +1778,22 @@ TEST(Btree, ValueComp) { EXPECT_FALSE(m2.value_comp()(std::make_pair("b", 0), std::make_pair("a", 0))); } +// Test that we have the protected members from the std::map::value_compare API. +// See https://en.cppreference.com/w/cpp/container/map/value_compare. +TEST(Btree, MapValueCompProtected) { + struct key_compare { + bool operator()(int l, int r) const { return l < r; } + int id; + }; + using value_compare = absl::btree_map::value_compare; + struct value_comp_child : public value_compare { + explicit value_comp_child(key_compare kc) : value_compare(kc) {} + int GetId() const { return comp.id; } + }; + value_comp_child c(key_compare{10}); + EXPECT_EQ(c.GetId(), 10); +} + TEST(Btree, DefaultConstruction) { absl::btree_set s; absl::btree_map m; @@ -2091,6 +2123,79 @@ TEST(Btree, ExtractMultiMapEquivalentKeys) { } } +TEST(Btree, ExtractAndGetNextSet) { + absl::btree_set src = {1, 2, 3, 4, 5}; + auto it = src.find(3); + auto extracted_and_next = src.extract_and_get_next(it); + EXPECT_THAT(src, ElementsAre(1, 2, 4, 5)); + EXPECT_EQ(extracted_and_next.node.value(), 3); + EXPECT_EQ(*extracted_and_next.next, 4); +} + +TEST(Btree, ExtractAndGetNextMultiSet) { + absl::btree_multiset src = {1, 2, 3, 4, 5}; + auto it = src.find(3); + auto extracted_and_next = src.extract_and_get_next(it); + EXPECT_THAT(src, ElementsAre(1, 2, 4, 5)); + EXPECT_EQ(extracted_and_next.node.value(), 3); + EXPECT_EQ(*extracted_and_next.next, 4); +} + +TEST(Btree, ExtractAndGetNextMap) { + absl::btree_map src = {{1, 2}, {3, 4}, {5, 6}}; + auto it = src.find(3); + auto extracted_and_next = src.extract_and_get_next(it); + EXPECT_THAT(src, ElementsAre(Pair(1, 2), Pair(5, 6))); + EXPECT_EQ(extracted_and_next.node.key(), 3); + EXPECT_EQ(extracted_and_next.node.mapped(), 4); + EXPECT_THAT(*extracted_and_next.next, Pair(5, 6)); +} + +TEST(Btree, ExtractAndGetNextMultiMap) { + absl::btree_multimap src = {{1, 2}, {3, 4}, {5, 6}}; + auto it = src.find(3); + auto extracted_and_next = src.extract_and_get_next(it); + EXPECT_THAT(src, ElementsAre(Pair(1, 2), Pair(5, 6))); + EXPECT_EQ(extracted_and_next.node.key(), 3); + EXPECT_EQ(extracted_and_next.node.mapped(), 4); + EXPECT_THAT(*extracted_and_next.next, Pair(5, 6)); +} + +TEST(Btree, ExtractAndGetNextEndIter) { + absl::btree_set src = {1, 2, 3, 4, 5}; + auto it = src.find(5); + auto extracted_and_next = src.extract_and_get_next(it); + EXPECT_THAT(src, ElementsAre(1, 2, 3, 4)); + EXPECT_EQ(extracted_and_next.node.value(), 5); + EXPECT_EQ(extracted_and_next.next, src.end()); +} + +TEST(Btree, ExtractDoesntCauseExtraMoves) { +#ifdef _MSC_VER + GTEST_SKIP() << "This test fails on MSVC."; +#endif + + using Set = absl::btree_set; + std::array, 3> extracters = { + [](Set &s) { auto node = s.extract(s.begin()); }, + [](Set &s) { auto ret = s.extract_and_get_next(s.begin()); }, + [](Set &s) { auto node = s.extract(MovableOnlyInstance(0)); }}; + + InstanceTracker tracker; + for (int i = 0; i < 3; ++i) { + Set s; + s.insert(MovableOnlyInstance(0)); + tracker.ResetCopiesMovesSwaps(); + + extracters[i](s); + // We expect to see exactly 1 move: from the original slot into the + // extracted node. + EXPECT_EQ(tracker.copies(), 0) << i; + EXPECT_EQ(tracker.moves(), 1) << i; + EXPECT_EQ(tracker.swaps(), 0) << i; + } +} + // For multisets, insert with hint also affects correctness because we need to // insert immediately before the hint if possible. struct InsertMultiHintData { @@ -2983,8 +3088,9 @@ TEST(Btree, ConstructImplicitlyWithUnadaptedComparator) { absl::btree_set set = {{}, MultiKeyComp{}}; } -#ifndef NDEBUG TEST(Btree, InvalidComparatorsCaught) { + if (!IsAssertEnabled()) GTEST_SKIP() << "Assertions not enabled."; + { struct ZeroAlwaysLessCmp { bool operator()(int lhs, int rhs) const { @@ -3032,7 +3138,6 @@ TEST(Btree, InvalidComparatorsCaught) { EXPECT_DEATH(set.insert({0, 1, 2}), "lhs_comp_rhs < 0 -> rhs_comp_lhs > 0"); } } -#endif #ifndef _MSC_VER // This test crashes on MSVC. @@ -3065,6 +3170,292 @@ TEST(Btree, InvalidIteratorUse) { } #endif +class OnlyConstructibleByAllocator { + explicit OnlyConstructibleByAllocator(int i) : i_(i) {} + + public: + OnlyConstructibleByAllocator(const OnlyConstructibleByAllocator &other) + : i_(other.i_) {} + OnlyConstructibleByAllocator &operator=( + const OnlyConstructibleByAllocator &other) { + i_ = other.i_; + return *this; + } + int Get() const { return i_; } + bool operator==(int i) const { return i_ == i; } + + private: + template + friend class OnlyConstructibleAllocator; + + int i_; +}; + +template +class OnlyConstructibleAllocator : public std::allocator { + public: + OnlyConstructibleAllocator() = default; + template + explicit OnlyConstructibleAllocator(const OnlyConstructibleAllocator &) {} + + void construct(OnlyConstructibleByAllocator *p, int i) { + new (p) OnlyConstructibleByAllocator(i); + } + template + void construct(Pair *p, const int i) { + OnlyConstructibleByAllocator only(i); + new (p) Pair(std::move(only), i); + } + + template + struct rebind { + using other = OnlyConstructibleAllocator; + }; +}; + +struct OnlyConstructibleByAllocatorComp { + using is_transparent = void; + bool operator()(OnlyConstructibleByAllocator a, + OnlyConstructibleByAllocator b) const { + return a.Get() < b.Get(); + } + bool operator()(int a, OnlyConstructibleByAllocator b) const { + return a < b.Get(); + } + bool operator()(OnlyConstructibleByAllocator a, int b) const { + return a.Get() < b; + } +}; + +TEST(Btree, OnlyConstructibleByAllocatorType) { + const std::array arr = {3, 4}; + { + absl::btree_set> + set; + set.emplace(1); + set.emplace_hint(set.end(), 2); + set.insert(arr.begin(), arr.end()); + EXPECT_THAT(set, ElementsAre(1, 2, 3, 4)); + } + { + absl::btree_multiset> + set; + set.emplace(1); + set.emplace_hint(set.end(), 2); + // TODO(ezb): fix insert_multi to allow this to compile. + // set.insert(arr.begin(), arr.end()); + EXPECT_THAT(set, ElementsAre(1, 2)); + } + { + absl::btree_map> + map; + map.emplace(1); + map.emplace_hint(map.end(), 2); + map.insert(arr.begin(), arr.end()); + EXPECT_THAT(map, + ElementsAre(Pair(1, 1), Pair(2, 2), Pair(3, 3), Pair(4, 4))); + } + { + absl::btree_multimap> + map; + map.emplace(1); + map.emplace_hint(map.end(), 2); + // TODO(ezb): fix insert_multi to allow this to compile. + // map.insert(arr.begin(), arr.end()); + EXPECT_THAT(map, ElementsAre(Pair(1, 1), Pair(2, 2))); + } +} + +class NotAssignable { + public: + explicit NotAssignable(int i) : i_(i) {} + NotAssignable(const NotAssignable &other) : i_(other.i_) {} + NotAssignable &operator=(NotAssignable &&other) = delete; + int Get() const { return i_; } + bool operator==(int i) const { return i_ == i; } + friend bool operator<(NotAssignable a, NotAssignable b) { + return a.i_ < b.i_; + } + + private: + int i_; +}; + +TEST(Btree, NotAssignableType) { + { + absl::btree_set set; + set.emplace(1); + set.emplace_hint(set.end(), 2); + set.insert(NotAssignable(3)); + set.insert(set.end(), NotAssignable(4)); + EXPECT_THAT(set, ElementsAre(1, 2, 3, 4)); + set.erase(set.begin()); + EXPECT_THAT(set, ElementsAre(2, 3, 4)); + } + { + absl::btree_multiset set; + set.emplace(1); + set.emplace_hint(set.end(), 2); + set.insert(NotAssignable(2)); + set.insert(set.end(), NotAssignable(3)); + EXPECT_THAT(set, ElementsAre(1, 2, 2, 3)); + set.erase(set.begin()); + EXPECT_THAT(set, ElementsAre(2, 2, 3)); + } + { + absl::btree_map map; + map.emplace(NotAssignable(1), 1); + map.emplace_hint(map.end(), NotAssignable(2), 2); + map.insert({NotAssignable(3), 3}); + map.insert(map.end(), {NotAssignable(4), 4}); + EXPECT_THAT(map, + ElementsAre(Pair(1, 1), Pair(2, 2), Pair(3, 3), Pair(4, 4))); + map.erase(map.begin()); + EXPECT_THAT(map, ElementsAre(Pair(2, 2), Pair(3, 3), Pair(4, 4))); + } + { + absl::btree_multimap map; + map.emplace(NotAssignable(1), 1); + map.emplace_hint(map.end(), NotAssignable(2), 2); + map.insert({NotAssignable(2), 3}); + map.insert(map.end(), {NotAssignable(3), 3}); + EXPECT_THAT(map, + ElementsAre(Pair(1, 1), Pair(2, 2), Pair(2, 3), Pair(3, 3))); + map.erase(map.begin()); + EXPECT_THAT(map, ElementsAre(Pair(2, 2), Pair(2, 3), Pair(3, 3))); + } +} + +struct ArenaLike { + void* recycled = nullptr; + size_t recycled_size = 0; +}; + +// A very simple implementation of arena allocation. +template +class ArenaLikeAllocator : public std::allocator { + public: + // Standard library containers require the ability to allocate objects of + // different types which they can do so via rebind.other. + template + struct rebind { + using other = ArenaLikeAllocator; + }; + + explicit ArenaLikeAllocator(ArenaLike* arena) noexcept : arena_(arena) {} + + ~ArenaLikeAllocator() { + if (arena_->recycled != nullptr) { + delete [] static_cast(arena_->recycled); + arena_->recycled = nullptr; + } + } + + template + explicit ArenaLikeAllocator(const ArenaLikeAllocator& other) noexcept + : arena_(other.arena_) {} + + T* allocate(size_t num_objects, const void* = nullptr) { + size_t size = num_objects * sizeof(T); + if (arena_->recycled != nullptr && arena_->recycled_size == size) { + T* result = static_cast(arena_->recycled); + arena_->recycled = nullptr; + return result; + } + return new T[num_objects]; + } + + void deallocate(T* p, size_t num_objects) { + size_t size = num_objects * sizeof(T); + + // Simulate writing to the freed memory as an actual arena allocator might + // do. This triggers an error report if the memory is poisoned. + memset(p, 0xde, size); + + if (arena_->recycled == nullptr) { + arena_->recycled = p; + arena_->recycled_size = size; + } else { + delete [] p; + } + } + + ArenaLike* arena_; +}; + +// This test verifies that an arena allocator that reuses memory will not be +// asked to free poisoned BTree memory. +TEST(Btree, ReusePoisonMemory) { + using Alloc = ArenaLikeAllocator; + using Set = absl::btree_set, Alloc>; + ArenaLike arena; + Alloc alloc(&arena); + Set set(alloc); + + set.insert(0); + set.erase(0); + set.insert(0); +} + +TEST(Btree, IteratorSubtraction) { + absl::BitGen bitgen; + std::vector vec; + // Randomize the set's insertion order so the nodes aren't all full. + for (int i = 0; i < 1000000; ++i) vec.push_back(i); + absl::c_shuffle(vec, bitgen); + + absl::btree_set set; + for (int i : vec) set.insert(i); + + for (int i = 0; i < 1000; ++i) { + size_t begin = absl::Uniform(bitgen, 0u, set.size()); + size_t end = absl::Uniform(bitgen, begin, set.size()); + ASSERT_EQ(end - begin, set.find(end) - set.find(begin)) + << begin << " " << end; + } +} + +TEST(Btree, DereferencingEndIterator) { + if (!IsAssertEnabled()) GTEST_SKIP() << "Assertions not enabled."; + + absl::btree_set set; + for (int i = 0; i < 1000; ++i) set.insert(i); + EXPECT_DEATH(*set.end(), R"regex(Dereferencing end\(\) iterator)regex"); +} + +TEST(Btree, InvalidIteratorComparison) { + if (!IsAssertEnabled()) GTEST_SKIP() << "Assertions not enabled."; + + absl::btree_set set1, set2; + for (int i = 0; i < 1000; ++i) { + set1.insert(i); + set2.insert(i); + } + + constexpr const char *kValueInitDeathMessage = + "Comparing default-constructed iterator with .*non-default-constructed " + "iterator"; + typename absl::btree_set::iterator iter1, iter2; + EXPECT_EQ(iter1, iter2); + EXPECT_DEATH(void(set1.begin() == iter1), kValueInitDeathMessage); + EXPECT_DEATH(void(iter1 == set1.begin()), kValueInitDeathMessage); + + constexpr const char *kDifferentContainerDeathMessage = + "Comparing iterators from different containers"; + iter1 = set1.begin(); + iter2 = set2.begin(); + EXPECT_DEATH(void(iter1 == iter2), kDifferentContainerDeathMessage); + EXPECT_DEATH(void(iter2 == iter1), kDifferentContainerDeathMessage); +} + } // namespace } // namespace container_internal ABSL_NAMESPACE_END diff --git a/TMessagesProj/jni/voip/webrtc/absl/container/fixed_array.h b/TMessagesProj/jni/voip/webrtc/absl/container/fixed_array.h index 839ba0bc16..5543243042 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/container/fixed_array.h +++ b/TMessagesProj/jni/voip/webrtc/absl/container/fixed_array.h @@ -471,6 +471,9 @@ class FixedArray { return n <= inline_elements; } +#ifdef ABSL_HAVE_ADDRESS_SANITIZER + ABSL_ATTRIBUTE_NOINLINE +#endif // ABSL_HAVE_ADDRESS_SANITIZER StorageElement* InitializeData() { if (UsingInlinedStorage(size())) { InlinedStorage::AnnotateConstruct(size()); @@ -489,12 +492,14 @@ class FixedArray { Storage storage_; }; +#ifdef ABSL_INTERNAL_NEED_REDUNDANT_CONSTEXPR_DECL template constexpr size_t FixedArray::kInlineBytesDefault; template constexpr typename FixedArray::size_type FixedArray::inline_elements; +#endif template void FixedArray::NonEmptyInlinedStorage::AnnotateConstruct( diff --git a/TMessagesProj/jni/voip/webrtc/absl/container/fixed_array_benchmark.cc b/TMessagesProj/jni/voip/webrtc/absl/container/fixed_array_benchmark.cc index 3c7a5a7234..db6663e60d 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/container/fixed_array_benchmark.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/container/fixed_array_benchmark.cc @@ -16,8 +16,8 @@ #include -#include "benchmark/benchmark.h" #include "absl/container/fixed_array.h" +#include "benchmark/benchmark.h" namespace { diff --git a/TMessagesProj/jni/voip/webrtc/absl/container/flat_hash_map.h b/TMessagesProj/jni/voip/webrtc/absl/container/flat_hash_map.h index 83c71029d4..e6bdbd9e4f 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/container/flat_hash_map.h +++ b/TMessagesProj/jni/voip/webrtc/absl/container/flat_hash_map.h @@ -76,6 +76,10 @@ struct FlatHashMapPolicy; // absl/hash/hash.h for information on extending Abseil hashing to user-defined // types. // +// Using `absl::flat_hash_map` at interface boundaries in dynamically loaded +// libraries (e.g. .dll, .so) is unsupported due to way `absl::Hash` values may +// be randomized across dynamically loaded libraries. +// // NOTE: A `flat_hash_map` stores its value types directly inside its // implementation array to avoid memory indirection. Because a `flat_hash_map` // is designed to move data when rehashed, map values will not retain pointer @@ -357,8 +361,8 @@ class flat_hash_map : public absl::container_internal::raw_hash_map< // `flat_hash_map`. // // iterator try_emplace(const_iterator hint, - // const init_type& k, Args&&... args): - // iterator try_emplace(const_iterator hint, init_type&& k, Args&&... args): + // const key_type& k, Args&&... args): + // iterator try_emplace(const_iterator hint, key_type&& k, Args&&... args): // // Inserts (via copy or move) the element of the specified key into the // `flat_hash_map` using the position of `hint` as a non-binding suggestion diff --git a/TMessagesProj/jni/voip/webrtc/absl/container/flat_hash_set.h b/TMessagesProj/jni/voip/webrtc/absl/container/flat_hash_set.h index 0fb2ae6fe9..f5376f991a 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/container/flat_hash_set.h +++ b/TMessagesProj/jni/voip/webrtc/absl/container/flat_hash_set.h @@ -72,6 +72,10 @@ struct FlatHashSetPolicy; // absl/hash/hash.h for information on extending Abseil hashing to user-defined // types. // +// Using `absl::flat_hash_set` at interface boundaries in dynamically loaded +// libraries (e.g. .dll, .so) is unsupported due to way `absl::Hash` values may +// be randomized across dynamically loaded libraries. +// // NOTE: A `flat_hash_set` stores its keys directly inside its implementation // array to avoid memory indirection. Because a `flat_hash_set` is designed to // move data when rehashed, set keys will not retain pointer stability. If you @@ -470,13 +474,6 @@ struct FlatHashSetPolicy { absl::allocator_traits::destroy(*alloc, slot); } - template - static void transfer(Allocator* alloc, slot_type* new_slot, - slot_type* old_slot) { - construct(alloc, new_slot, std::move(*old_slot)); - destroy(alloc, old_slot); - } - static T& element(slot_type* slot) { return *slot; } template diff --git a/TMessagesProj/jni/voip/webrtc/absl/container/inlined_vector.h b/TMessagesProj/jni/voip/webrtc/absl/container/inlined_vector.h index 711b29c18c..7058f375e7 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/container/inlined_vector.h +++ b/TMessagesProj/jni/voip/webrtc/absl/container/inlined_vector.h @@ -52,6 +52,7 @@ #include "absl/base/port.h" #include "absl/container/internal/inlined_vector.h" #include "absl/memory/memory.h" +#include "absl/meta/type_traits.h" namespace absl { ABSL_NAMESPACE_BEGIN @@ -77,6 +78,8 @@ class InlinedVector { using MoveIterator = inlined_vector_internal::MoveIterator; template using IsMemcpyOk = inlined_vector_internal::IsMemcpyOk; + template + using IsMoveAssignOk = inlined_vector_internal::IsMoveAssignOk; template using IteratorValueAdapter = @@ -94,6 +97,12 @@ class InlinedVector { using DisableIfAtLeastForwardIterator = absl::enable_if_t< !inlined_vector_internal::IsAtLeastForwardIterator::value, int>; + using MemcpyPolicy = typename Storage::MemcpyPolicy; + using ElementwiseAssignPolicy = typename Storage::ElementwiseAssignPolicy; + using ElementwiseConstructPolicy = + typename Storage::ElementwiseConstructPolicy; + using MoveAssignmentPolicy = typename Storage::MoveAssignmentPolicy; + public: using allocator_type = A; using value_type = inlined_vector_internal::ValueType; @@ -275,8 +284,10 @@ class InlinedVector { size_type max_size() const noexcept { // One bit of the size storage is used to indicate whether the inlined // vector contains allocated memory. As a result, the maximum size that the - // inlined vector can express is half of the max for `size_type`. - return (std::numeric_limits::max)() / 2; + // inlined vector can express is the minimum of the limit of how many + // objects we can allocate and std::numeric_limits::max() / 2. + return (std::min)(AllocatorTraits::max_size(storage_.GetAllocator()), + (std::numeric_limits::max)() / 2); } // `InlinedVector::capacity()` @@ -484,18 +495,7 @@ class InlinedVector { // unspecified state. InlinedVector& operator=(InlinedVector&& other) { if (ABSL_PREDICT_TRUE(this != std::addressof(other))) { - if (IsMemcpyOk::value || other.storage_.GetIsAllocated()) { - inlined_vector_internal::DestroyAdapter::DestroyElements( - storage_.GetAllocator(), data(), size()); - storage_.DeallocateIfAllocated(); - storage_.MemcpyFrom(other.storage_); - - other.storage_.SetInlinedSize(0); - } else { - storage_.Assign(IteratorValueAdapter>( - MoveIterator(other.storage_.GetInlinedData())), - other.size()); - } + MoveAssignment(MoveAssignmentPolicy{}, std::move(other)); } return *this; @@ -585,8 +585,20 @@ class InlinedVector { if (ABSL_PREDICT_TRUE(n != 0)) { value_type dealias = v; + // https://gcc.gnu.org/bugzilla/show_bug.cgi?id=102329#c2 + // It appears that GCC thinks that since `pos` is a const pointer and may + // point to uninitialized memory at this point, a warning should be + // issued. But `pos` is actually only used to compute an array index to + // write to. +#if !defined(__clang__) && defined(__GNUC__) +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wmaybe-uninitialized" +#endif return storage_.Insert(pos, CopyValueAdapter(std::addressof(dealias)), n); +#if !defined(__clang__) && defined(__GNUC__) +#pragma GCC diagnostic pop +#endif } else { return const_cast(pos); } @@ -612,9 +624,9 @@ class InlinedVector { ABSL_HARDENING_ASSERT(pos <= end()); if (ABSL_PREDICT_TRUE(first != last)) { - return storage_.Insert(pos, - IteratorValueAdapter(first), - std::distance(first, last)); + return storage_.Insert( + pos, IteratorValueAdapter(first), + static_cast(std::distance(first, last))); } else { return const_cast(pos); } @@ -631,7 +643,7 @@ class InlinedVector { ABSL_HARDENING_ASSERT(pos >= begin()); ABSL_HARDENING_ASSERT(pos <= end()); - size_type index = std::distance(cbegin(), pos); + size_type index = static_cast(std::distance(cbegin(), pos)); for (size_type i = index; first != last; ++i, static_cast(++first)) { insert(data() + i, *first); } @@ -649,10 +661,22 @@ class InlinedVector { ABSL_HARDENING_ASSERT(pos <= end()); value_type dealias(std::forward(args)...); + // https://gcc.gnu.org/bugzilla/show_bug.cgi?id=102329#c2 + // It appears that GCC thinks that since `pos` is a const pointer and may + // point to uninitialized memory at this point, a warning should be + // issued. But `pos` is actually only used to compute an array index to + // write to. +#if !defined(__clang__) && defined(__GNUC__) +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wmaybe-uninitialized" +#endif return storage_.Insert(pos, IteratorValueAdapter>( MoveIterator(std::addressof(dealias))), 1); +#if !defined(__clang__) && defined(__GNUC__) +#pragma GCC diagnostic pop +#endif } // `InlinedVector::emplace_back(...)` @@ -759,6 +783,42 @@ class InlinedVector { template friend H AbslHashValue(H h, const absl::InlinedVector& a); + void MoveAssignment(MemcpyPolicy, InlinedVector&& other) { + inlined_vector_internal::DestroyAdapter::DestroyElements( + storage_.GetAllocator(), data(), size()); + storage_.DeallocateIfAllocated(); + storage_.MemcpyFrom(other.storage_); + + other.storage_.SetInlinedSize(0); + } + + void MoveAssignment(ElementwiseAssignPolicy, InlinedVector&& other) { + if (other.storage_.GetIsAllocated()) { + MoveAssignment(MemcpyPolicy{}, std::move(other)); + } else { + storage_.Assign(IteratorValueAdapter>( + MoveIterator(other.storage_.GetInlinedData())), + other.size()); + } + } + + void MoveAssignment(ElementwiseConstructPolicy, InlinedVector&& other) { + if (other.storage_.GetIsAllocated()) { + MoveAssignment(MemcpyPolicy{}, std::move(other)); + } else { + inlined_vector_internal::DestroyAdapter::DestroyElements( + storage_.GetAllocator(), data(), size()); + storage_.DeallocateIfAllocated(); + + IteratorValueAdapter> other_values( + MoveIterator(other.storage_.GetInlinedData())); + inlined_vector_internal::ConstructElements( + storage_.GetAllocator(), storage_.GetInlinedData(), other_values, + other.storage_.GetSize()); + storage_.SetInlinedSize(other.storage_.GetSize()); + } + } + Storage storage_; }; diff --git a/TMessagesProj/jni/voip/webrtc/absl/container/inlined_vector_benchmark.cc b/TMessagesProj/jni/voip/webrtc/absl/container/inlined_vector_benchmark.cc index e256fad60f..56a6bfd23a 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/container/inlined_vector_benchmark.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/container/inlined_vector_benchmark.cc @@ -16,11 +16,11 @@ #include #include -#include "benchmark/benchmark.h" #include "absl/base/internal/raw_logging.h" #include "absl/base/macros.h" #include "absl/container/inlined_vector.h" #include "absl/strings/str_cat.h" +#include "benchmark/benchmark.h" namespace { diff --git a/TMessagesProj/jni/voip/webrtc/absl/container/internal/btree.h b/TMessagesProj/jni/voip/webrtc/absl/container/internal/btree.h index 6c10b00f47..ab75afb403 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/container/internal/btree.h +++ b/TMessagesProj/jni/voip/webrtc/absl/container/internal/btree.h @@ -61,6 +61,7 @@ #include "absl/base/internal/raw_logging.h" #include "absl/base/macros.h" #include "absl/container/internal/common.h" +#include "absl/container/internal/common_policy_traits.h" #include "absl/container/internal/compressed_tuple.h" #include "absl/container/internal/container_memory.h" #include "absl/container/internal/layout.h" @@ -100,7 +101,7 @@ struct StringBtreeDefaultLess { StringBtreeDefaultLess() = default; // Compatibility constructor. - StringBtreeDefaultLess(std::less) {} // NOLINT + StringBtreeDefaultLess(std::less) {} // NOLINT StringBtreeDefaultLess(std::less) {} // NOLINT // Allow converting to std::less for use in key_comp()/value_comp(). @@ -132,7 +133,7 @@ struct StringBtreeDefaultGreater { StringBtreeDefaultGreater() = default; - StringBtreeDefaultGreater(std::greater) {} // NOLINT + StringBtreeDefaultGreater(std::greater) {} // NOLINT StringBtreeDefaultGreater(std::greater) {} // NOLINT // Allow converting to std::greater for use in key_comp()/value_comp(). @@ -191,8 +192,8 @@ struct key_compare_adapter { // Inherit from checked_compare_base to support function pointers and also // keep empty-base-optimization (EBO) support for classes. // Note: we can't use CompressedTuple here because that would interfere - // with the EBO for `btree::root_`. `btree::root_` is itself a CompressedTuple - // and nested `CompressedTuple`s don't support EBO. + // with the EBO for `btree::rightmost_`. `btree::rightmost_` is itself a + // CompressedTuple and nested `CompressedTuple`s don't support EBO. // TODO(b/214288561): use CompressedTuple instead once it supports EBO for // nested `CompressedTuple`s. struct checked_compare : checked_compare_base { @@ -335,9 +336,28 @@ constexpr bool compare_has_valid_result_type() { std::is_convertible::value; } +template +class map_value_compare { + template + friend class btree; + + // Note: this `protected` is part of the API of std::map::value_compare. See + // https://en.cppreference.com/w/cpp/container/map/value_compare. + protected: + explicit map_value_compare(original_key_compare c) : comp(std::move(c)) {} + + original_key_compare comp; // NOLINT + + public: + auto operator()(const value_type &lhs, const value_type &rhs) const + -> decltype(comp(lhs.first, rhs.first)) { + return comp(lhs.first, rhs.first); + } +}; + template -struct common_params { + bool IsMulti, bool IsMap, typename SlotPolicy> +struct common_params : common_policy_traits { using original_key_compare = Compare; // If Compare is a common comparator for a string-like type, then we adapt it @@ -357,8 +377,7 @@ struct common_params { std::is_same::value || std::is_same::value; static constexpr bool kIsKeyCompareTransparent = - IsTransparent::value || - kIsKeyCompareStringAdapted; + IsTransparent::value || kIsKeyCompareStringAdapted; static constexpr bool kEnableGenerations = #ifdef ABSL_BTREE_ENABLE_GENERATIONS true; @@ -384,6 +403,12 @@ struct common_params { using reference = value_type &; using const_reference = const value_type &; + using value_compare = + absl::conditional_t, + original_key_compare>; + using is_map_container = std::integral_constant; + // For the given lookup key type, returns whether we can have multiple // equivalent keys in the btree. If this is a multi-container, then we can. // Otherwise, we can have multiple equivalent keys only if all of the @@ -394,9 +419,9 @@ struct common_params { // that we know has the same equivalence classes for all lookup types. template constexpr static bool can_have_multiple_equivalent_keys() { - return Multi || (IsTransparent::value && - !std::is_same::value && - !kIsKeyCompareStringAdapted); + return IsMulti || (IsTransparent::value && + !std::is_same::value && + !kIsKeyCompareStringAdapted); } enum { @@ -405,8 +430,7 @@ struct common_params { // Upper bound for the available space for slots. This is largest for leaf // nodes, which have overhead of at least a pointer + 4 bytes (for storing // 3 field_types and an enum). - kNodeSlotSpace = - TargetNodeSize - /*minimum overhead=*/(sizeof(void *) + 4), + kNodeSlotSpace = TargetNodeSize - /*minimum overhead=*/(sizeof(void *) + 4), }; // This is an integral type large enough to hold as many slots as will fit a @@ -415,35 +439,6 @@ struct common_params { absl::conditional_t<(kNodeSlotSpace / sizeof(slot_type) > (std::numeric_limits::max)()), uint16_t, uint8_t>; // NOLINT - - // The following methods are necessary for passing this struct as PolicyTraits - // for node_handle and/or are used within btree. - static value_type &element(slot_type *slot) { - return slot_policy::element(slot); - } - static const value_type &element(const slot_type *slot) { - return slot_policy::element(slot); - } - template - static void construct(Alloc *alloc, slot_type *slot, Args &&... args) { - slot_policy::construct(alloc, slot, std::forward(args)...); - } - static void construct(Alloc *alloc, slot_type *slot, slot_type *other) { - slot_policy::construct(alloc, slot, other); - } - static void destroy(Alloc *alloc, slot_type *slot) { - slot_policy::destroy(alloc, slot); - } - static void transfer(Alloc *alloc, slot_type *new_slot, slot_type *old_slot) { - construct(alloc, new_slot, old_slot); - destroy(alloc, old_slot); - } - static void swap(Alloc *alloc, slot_type *a, slot_type *b) { - slot_policy::swap(alloc, a, b); - } - static void move(Alloc *alloc, slot_type *src, slot_type *dest) { - slot_policy::move(alloc, src, dest); - } }; // An adapter class that converts a lower-bound compare into an upper-bound @@ -480,8 +475,8 @@ struct SearchResult { template struct SearchResult { SearchResult() {} - explicit SearchResult(V value) : value(value) {} - SearchResult(V value, MatchKind /*match*/) : value(value) {} + explicit SearchResult(V v) : value(v) {} + SearchResult(V v, MatchKind /*match*/) : value(v) {} V value; @@ -607,35 +602,36 @@ class btree_node { // Compute how many values we can fit onto a leaf node taking into account // padding. - constexpr static size_type NodeTargetSlots(const int begin, const int end) { + constexpr static size_type NodeTargetSlots(const size_type begin, + const size_type end) { return begin == end ? begin - : SizeWithNSlots((begin + end) / 2 + 1) > - params_type::kTargetNodeSize - ? NodeTargetSlots(begin, (begin + end) / 2) - : NodeTargetSlots((begin + end) / 2 + 1, end); + : SizeWithNSlots((begin + end) / 2 + 1) > + params_type::kTargetNodeSize + ? NodeTargetSlots(begin, (begin + end) / 2) + : NodeTargetSlots((begin + end) / 2 + 1, end); } - enum { - kTargetNodeSize = params_type::kTargetNodeSize, - kNodeTargetSlots = NodeTargetSlots(0, params_type::kTargetNodeSize), - - // We need a minimum of 3 slots per internal node in order to perform - // splitting (1 value for the two nodes involved in the split and 1 value - // propagated to the parent as the delimiter for the split). For performance - // reasons, we don't allow 3 slots-per-node due to bad worst case occupancy - // of 1/3 (for a node, not a b-tree). - kMinNodeSlots = 4, - - kNodeSlots = - kNodeTargetSlots >= kMinNodeSlots ? kNodeTargetSlots : kMinNodeSlots, - - // The node is internal (i.e. is not a leaf node) if and only if `max_count` - // has this value. - kInternalNodeMaxCount = 0, - }; + constexpr static size_type kTargetNodeSize = params_type::kTargetNodeSize; + constexpr static size_type kNodeTargetSlots = + NodeTargetSlots(0, kTargetNodeSize); + + // We need a minimum of 3 slots per internal node in order to perform + // splitting (1 value for the two nodes involved in the split and 1 value + // propagated to the parent as the delimiter for the split). For performance + // reasons, we don't allow 3 slots-per-node due to bad worst case occupancy of + // 1/3 (for a node, not a b-tree). + constexpr static size_type kMinNodeSlots = 4; + + constexpr static size_type kNodeSlots = + kNodeTargetSlots >= kMinNodeSlots ? kNodeTargetSlots : kMinNodeSlots; + + // The node is internal (i.e. is not a leaf node) if and only if `max_count` + // has this value. + constexpr static field_type kInternalNodeMaxCount = 0; // Leaves can have less than kNodeSlots values. - constexpr static layout_type LeafLayout(const int slot_count = kNodeSlots) { + constexpr static layout_type LeafLayout( + const size_type slot_count = kNodeSlots) { return layout_type( /*parent*/ 1, /*generation*/ params_type::kEnableGenerations ? 1 : 0, @@ -651,7 +647,7 @@ class btree_node { /*slots*/ kNodeSlots, /*children*/ kNodeSlots + 1); } - constexpr static size_type LeafSize(const int slot_count = kNodeSlots) { + constexpr static size_type LeafSize(const size_type slot_count = kNodeSlots) { return LeafLayout(slot_count).AllocSize(); } constexpr static size_type InternalSize() { @@ -674,10 +670,10 @@ class btree_node { } void set_parent(btree_node *p) { *GetField<0>() = p; } field_type &mutable_finish() { return GetField<2>()[2]; } - slot_type *slot(int i) { return &GetField<3>()[i]; } + slot_type *slot(size_type i) { return &GetField<3>()[i]; } slot_type *start_slot() { return slot(start()); } slot_type *finish_slot() { return slot(finish()); } - const slot_type *slot(int i) const { return &GetField<3>()[i]; } + const slot_type *slot(size_type i) const { return &GetField<3>()[i]; } void set_position(field_type v) { GetField<2>()[0] = v; } void set_start(field_type v) { GetField<2>()[1] = v; } void set_finish(field_type v) { GetField<2>()[2] = v; } @@ -754,51 +750,53 @@ class btree_node { } // Getters for the key/value at position i in the node. - const key_type &key(int i) const { return params_type::key(slot(i)); } - reference value(int i) { return params_type::element(slot(i)); } - const_reference value(int i) const { return params_type::element(slot(i)); } + const key_type &key(size_type i) const { return params_type::key(slot(i)); } + reference value(size_type i) { return params_type::element(slot(i)); } + const_reference value(size_type i) const { + return params_type::element(slot(i)); + } // Getters/setter for the child at position i in the node. - btree_node *child(int i) const { return GetField<4>()[i]; } + btree_node *child(field_type i) const { return GetField<4>()[i]; } btree_node *start_child() const { return child(start()); } - btree_node *&mutable_child(int i) { return GetField<4>()[i]; } - void clear_child(int i) { + btree_node *&mutable_child(field_type i) { return GetField<4>()[i]; } + void clear_child(field_type i) { absl::container_internal::SanitizerPoisonObject(&mutable_child(i)); } - void set_child(int i, btree_node *c) { + void set_child(field_type i, btree_node *c) { absl::container_internal::SanitizerUnpoisonObject(&mutable_child(i)); mutable_child(i) = c; c->set_position(i); } - void init_child(int i, btree_node *c) { + void init_child(field_type i, btree_node *c) { set_child(i, c); c->set_parent(this); } // Returns the position of the first value whose key is not less than k. template - SearchResult lower_bound( + SearchResult lower_bound( const K &k, const key_compare &comp) const { return use_linear_search::value ? linear_search(k, comp) : binary_search(k, comp); } // Returns the position of the first value whose key is greater than k. template - int upper_bound(const K &k, const key_compare &comp) const { + size_type upper_bound(const K &k, const key_compare &comp) const { auto upper_compare = upper_bound_adapter(comp); return use_linear_search::value ? linear_search(k, upper_compare).value : binary_search(k, upper_compare).value; } template - SearchResult::value> + SearchResult::value> linear_search(const K &k, const Compare &comp) const { return linear_search_impl(k, start(), finish(), comp, btree_is_key_compare_to()); } template - SearchResult::value> + SearchResult::value> binary_search(const K &k, const Compare &comp) const { return binary_search_impl(k, start(), finish(), comp, btree_is_key_compare_to()); @@ -807,8 +805,8 @@ class btree_node { // Returns the position of the first value whose key is not less than k using // linear search performed using plain compare. template - SearchResult linear_search_impl( - const K &k, int s, const int e, const Compare &comp, + SearchResult linear_search_impl( + const K &k, size_type s, const size_type e, const Compare &comp, std::false_type /* IsCompareTo */) const { while (s < e) { if (!comp(key(s), k)) { @@ -816,14 +814,14 @@ class btree_node { } ++s; } - return SearchResult{s}; + return SearchResult{s}; } // Returns the position of the first value whose key is not less than k using // linear search performed using compare-to. template - SearchResult linear_search_impl( - const K &k, int s, const int e, const Compare &comp, + SearchResult linear_search_impl( + const K &k, size_type s, const size_type e, const Compare &comp, std::true_type /* IsCompareTo */) const { while (s < e) { const absl::weak_ordering c = comp(key(s), k); @@ -840,30 +838,30 @@ class btree_node { // Returns the position of the first value whose key is not less than k using // binary search performed using plain compare. template - SearchResult binary_search_impl( - const K &k, int s, int e, const Compare &comp, + SearchResult binary_search_impl( + const K &k, size_type s, size_type e, const Compare &comp, std::false_type /* IsCompareTo */) const { while (s != e) { - const int mid = (s + e) >> 1; + const size_type mid = (s + e) >> 1; if (comp(key(mid), k)) { s = mid + 1; } else { e = mid; } } - return SearchResult{s}; + return SearchResult{s}; } // Returns the position of the first value whose key is not less than k using // binary search performed using compare-to. template - SearchResult binary_search_impl( - const K &k, int s, int e, const CompareTo &comp, + SearchResult binary_search_impl( + const K &k, size_type s, size_type e, const CompareTo &comp, std::true_type /* IsCompareTo */) const { if (params_type::template can_have_multiple_equivalent_keys()) { MatchKind exact_match = MatchKind::kNe; while (s != e) { - const int mid = (s + e) >> 1; + const size_type mid = (s + e) >> 1; const absl::weak_ordering c = comp(key(mid), k); if (c < 0) { s = mid + 1; @@ -880,7 +878,7 @@ class btree_node { return {s, exact_match}; } else { // Can't have multiple equivalent keys. while (s != e) { - const int mid = (s + e) >> 1; + const size_type mid = (s + e) >> 1; const absl::weak_ordering c = comp(key(mid), k); if (c < 0) { s = mid + 1; @@ -897,7 +895,7 @@ class btree_node { // Emplaces a value at position i, shifting all existing values and // children at positions >= i to the right by 1. template - void emplace_value(size_type i, allocator_type *alloc, Args &&... args); + void emplace_value(field_type i, allocator_type *alloc, Args &&...args); // Removes the values at positions [i, i + to_erase), shifting all existing // values and children after that range to the left by to_erase. Clears all @@ -905,9 +903,9 @@ class btree_node { void remove_values(field_type i, field_type to_erase, allocator_type *alloc); // Rebalances a node with its right sibling. - void rebalance_right_to_left(int to_move, btree_node *right, + void rebalance_right_to_left(field_type to_move, btree_node *right, allocator_type *alloc); - void rebalance_left_to_right(int to_move, btree_node *right, + void rebalance_left_to_right(field_type to_move, btree_node *right, allocator_type *alloc); // Splits a node, moving a portion of the node's values to its right sibling. @@ -918,7 +916,7 @@ class btree_node { void merge(btree_node *src, allocator_type *alloc); // Node allocation/deletion routines. - void init_leaf(int max_count, btree_node *parent) { + void init_leaf(field_type max_count, btree_node *parent) { set_generation(0); set_parent(parent); set_position(0); @@ -939,6 +937,7 @@ class btree_node { static void deallocate(const size_type size, btree_node *node, allocator_type *alloc) { + absl::container_internal::SanitizerUnpoisonMemoryRegion(node, size); absl::container_internal::Deallocate(alloc, node, size); } @@ -947,7 +946,7 @@ class btree_node { private: template - void value_init(const field_type i, allocator_type *alloc, Args &&... args) { + void value_init(const field_type i, allocator_type *alloc, Args &&...args) { next_generation(); absl::container_internal::SanitizerUnpoisonObject(slot(i)); params_type::construct(alloc, slot(i), std::forward(args)...); @@ -998,10 +997,15 @@ class btree_node { const size_type src_i, btree_node *src_node, allocator_type *alloc) { next_generation(); - for (slot_type *src = src_node->slot(src_i + n - 1), *end = src - n, - *dest = slot(dest_i + n - 1); + for (slot_type *src = src_node->slot(src_i + n), *end = src - n, + *dest = slot(dest_i + n); src != end; --src, --dest) { - transfer(dest, src, alloc); + // If we modified the loop index calculations above to avoid the -1s here, + // it would result in UB in the computation of `end` (and possibly `src` + // as well, if n == 0), since slot() is effectively an array index and it + // is UB to compute the address of any out-of-bounds array element except + // for one-past-the-end. + transfer(dest - 1, src - 1, alloc); } } @@ -1013,8 +1017,20 @@ class btree_node { friend struct btree_access; }; +template +bool AreNodesFromSameContainer(const Node *node_a, const Node *node_b) { + // If either node is null, then give up on checking whether they're from the + // same container. (If exactly one is null, then we'll trigger the + // default-constructed assert in Equals.) + if (node_a == nullptr || node_b == nullptr) return true; + while (!node_a->is_root()) node_a = node_a->parent(); + while (!node_b->is_root()) node_b = node_b->parent(); + return node_a == node_b; +} + template class btree_iterator { + using field_type = typename Node::field_type; using key_type = typename Node::key_type; using size_type = typename Node::size_type; using params_type = typename Node::params_type; @@ -1068,25 +1084,38 @@ class btree_iterator { } bool operator==(const iterator &other) const { - return node_ == other.node_ && position_ == other.position_; + return Equals(other.node_, other.position_); } bool operator==(const const_iterator &other) const { - return node_ == other.node_ && position_ == other.position_; + return Equals(other.node_, other.position_); } bool operator!=(const iterator &other) const { - return node_ != other.node_ || position_ != other.position_; + return !Equals(other.node_, other.position_); } bool operator!=(const const_iterator &other) const { - return node_ != other.node_ || position_ != other.position_; + return !Equals(other.node_, other.position_); + } + + // Returns n such that n calls to ++other yields *this. + // Precondition: n exists. + difference_type operator-(const_iterator other) const { + if (node_ == other.node_) { + if (node_->is_leaf()) return position_ - other.position_; + if (position_ == other.position_) return 0; + } + return distance_slow(other); } // Accessors for the key/value the iterator is pointing at. reference operator*() const { ABSL_HARDENING_ASSERT(node_ != nullptr); - ABSL_HARDENING_ASSERT(node_->start() <= position_); - ABSL_HARDENING_ASSERT(node_->finish() > position_); assert_valid_generation(); - return node_->value(position_); + ABSL_HARDENING_ASSERT(position_ >= node_->start()); + if (position_ >= node_->finish()) { + ABSL_HARDENING_ASSERT(!IsEndIterator() && "Dereferencing end() iterator"); + ABSL_HARDENING_ASSERT(position_ < node_->finish()); + } + return node_->value(static_cast(position_)); } pointer operator->() const { return &operator*(); } @@ -1143,6 +1172,34 @@ class btree_iterator { #endif } + bool Equals(const node_type *other_node, int other_position) const { + ABSL_HARDENING_ASSERT(((node_ == nullptr && other_node == nullptr) || + (node_ != nullptr && other_node != nullptr)) && + "Comparing default-constructed iterator with " + "non-default-constructed iterator."); + // Note: we use assert instead of ABSL_HARDENING_ASSERT here because this + // changes the complexity of Equals from O(1) to O(log(N) + log(M)) where + // N/M are sizes of the containers containing node_/other_node. + assert(AreNodesFromSameContainer(node_, other_node) && + "Comparing iterators from different containers."); + return node_ == other_node && position_ == other_position; + } + + bool IsEndIterator() const { + if (position_ != node_->finish()) return false; + node_type *node = node_; + while (!node->is_root()) { + if (node->position() != node->parent()->finish()) return false; + node = node->parent(); + } + return true; + } + + // Returns n such that n calls to ++other yields *this. + // Precondition: n exists && (this->node_ != other.node_ || + // !this->node_->is_leaf() || this->position_ != other.position_). + difference_type distance_slow(const_iterator other) const; + // Increment/decrement the iterator. void increment() { assert_valid_generation(); @@ -1170,8 +1227,12 @@ class btree_iterator { #endif } - const key_type &key() const { return node_->key(position_); } - slot_type *slot() { return node_->slot(position_); } + const key_type &key() const { + return node_->key(static_cast(position_)); + } + decltype(std::declval()->slot(0)) slot() { + return node_->slot(static_cast(position_)); + } void assert_valid_generation() const { #ifdef ABSL_BTREE_ENABLE_GENERATIONS @@ -1200,7 +1261,6 @@ template class btree { using node_type = btree_node; using is_key_compare_to = typename Params::is_key_compare_to; - using init_type = typename Params::init_type; using field_type = typename node_type::field_type; // We use a static empty node for the root/leftmost/rightmost of empty btrees @@ -1222,7 +1282,7 @@ class btree { // MSVC has constexpr code generations bugs here. EmptyNodeType() : parent(this) {} #else - constexpr EmptyNodeType(node_type *p) : parent(p) {} + explicit constexpr EmptyNodeType(node_type *p) : parent(p) {} #endif }; @@ -1284,14 +1344,6 @@ class btree { using slot_type = typename Params::slot_type; private: - // For use in copy_or_move_values_in_order. - const value_type &maybe_move_from_iterator(const_iterator it) { return *it; } - value_type &&maybe_move_from_iterator(iterator it) { - // This is a destructive operation on the other container so it's safe for - // us to const_cast and move from the keys here even if it's a set. - return std::move(const_cast(*it)); - } - // Copies or moves (depending on the template parameter) the values in // other into this btree in their order in other. This btree must be empty // before this method is called. This method is used in copy construction, @@ -1304,7 +1356,7 @@ class btree { public: btree(const key_compare &comp, const allocator_type &alloc) - : root_(comp, alloc, EmptyNode()), rightmost_(EmptyNode()), size_(0) {} + : root_(EmptyNode()), rightmost_(comp, alloc, EmptyNode()), size_(0) {} btree(const btree &other) : btree(other, other.allocator()) {} btree(const btree &other, const allocator_type &alloc) @@ -1312,10 +1364,10 @@ class btree { copy_or_move_values_in_order(other); } btree(btree &&other) noexcept - : root_(std::move(other.root_)), - rightmost_(absl::exchange(other.rightmost_, EmptyNode())), - size_(absl::exchange(other.size_, 0)) { - other.mutable_root() = EmptyNode(); + : root_(absl::exchange(other.root_, EmptyNode())), + rightmost_(std::move(other.rightmost_)), + size_(absl::exchange(other.size_, 0u)) { + other.mutable_rightmost() = EmptyNode(); } btree(btree &&other, const allocator_type &alloc) : btree(other.key_comp(), alloc) { @@ -1340,9 +1392,9 @@ class btree { iterator begin() { return iterator(leftmost()); } const_iterator begin() const { return const_iterator(leftmost()); } - iterator end() { return iterator(rightmost_, rightmost_->finish()); } + iterator end() { return iterator(rightmost(), rightmost()->finish()); } const_iterator end() const { - return const_iterator(rightmost_, rightmost_->finish()); + return const_iterator(rightmost(), rightmost()->finish()); } reverse_iterator rbegin() { return reverse_iterator(end()); } const_reverse_iterator rbegin() const { @@ -1393,7 +1445,7 @@ class btree { // Requirement: if `key` already exists in the btree, does not consume `args`. // Requirement: `key` is never referenced after consuming `args`. template - std::pair insert_unique(const K &key, Args &&... args); + std::pair insert_unique(const K &key, Args &&...args); // Inserts with hint. Checks to see if the value should be placed immediately // before `position` in the tree. If so, then the insertion will take @@ -1402,9 +1454,8 @@ class btree { // Requirement: if `key` already exists in the btree, does not consume `args`. // Requirement: `key` is never referenced after consuming `args`. template - std::pair insert_hint_unique(iterator position, - const K &key, - Args &&... args); + std::pair insert_hint_unique(iterator position, const K &key, + Args &&...args); // Insert a range of values into the btree. // Note: the first overload avoids constructing a value_type if the key @@ -1468,7 +1519,7 @@ class btree { void swap(btree &other); const key_compare &key_comp() const noexcept { - return root_.template get<0>(); + return rightmost_.template get<0>(); } template bool compare_keys(const K1 &a, const K2 &b) const { @@ -1531,8 +1582,7 @@ class btree { static double average_bytes_per_value() { // The expected number of values per node with random insertion order is the // average of the maximum and minimum numbers of values per node. - const double expected_values_per_node = - (kNodeSlots + kMinNodeValues) / 2.0; + const double expected_values_per_node = (kNodeSlots + kMinNodeValues) / 2.0; return node_type::LeafSize() / expected_values_per_node; } @@ -1562,10 +1612,17 @@ class btree { friend struct btree_access; // Internal accessor routines. - node_type *root() { return root_.template get<2>(); } - const node_type *root() const { return root_.template get<2>(); } - node_type *&mutable_root() noexcept { return root_.template get<2>(); } - key_compare *mutable_key_comp() noexcept { return &root_.template get<0>(); } + node_type *root() { return root_; } + const node_type *root() const { return root_; } + node_type *&mutable_root() noexcept { return root_; } + node_type *rightmost() { return rightmost_.template get<2>(); } + const node_type *rightmost() const { return rightmost_.template get<2>(); } + node_type *&mutable_rightmost() noexcept { + return rightmost_.template get<2>(); + } + key_compare *mutable_key_comp() noexcept { + return &rightmost_.template get<0>(); + } // The leftmost node is stored as the parent of the root node. node_type *leftmost() { return root()->parent(); } @@ -1573,15 +1630,15 @@ class btree { // Allocator routines. allocator_type *mutable_allocator() noexcept { - return &root_.template get<1>(); + return &rightmost_.template get<1>(); } const allocator_type &allocator() const noexcept { - return root_.template get<1>(); + return rightmost_.template get<1>(); } // Allocates a correctly aligned node of at least size bytes using the // allocator. - node_type *allocate(const size_type size) { + node_type *allocate(size_type size) { return reinterpret_cast( absl::container_internal::Allocate( mutable_allocator(), size)); @@ -1598,7 +1655,7 @@ class btree { n->init_leaf(kNodeSlots, parent); return n; } - node_type *new_leaf_root_node(const int max_count) { + node_type *new_leaf_root_node(field_type max_count) { node_type *n = allocate(node_type::LeafSize(max_count)); n->init_leaf(max_count, /*parent=*/n); return n; @@ -1633,7 +1690,7 @@ class btree { // Emplaces a value into the btree immediately before iter. Requires that // key(v) <= iter.key() and (--iter).key() <= key(v). template - iterator internal_emplace(iterator iter, Args &&... args); + iterator internal_emplace(iterator iter, Args &&...args); // Returns an iterator pointing to the first value >= the value "iter" is // pointing at. Note that "iter" might be pointing to an invalid location such @@ -1666,8 +1723,8 @@ class btree { iterator internal_find(const K &key) const; // Verifies the tree structure of node. - int internal_verify(const node_type *node, const key_type *lo, - const key_type *hi) const; + size_type internal_verify(const node_type *node, const key_type *lo, + const key_type *hi) const; node_stats internal_stats(const node_type *node) const { // The root can be a static empty node. @@ -1684,15 +1741,14 @@ class btree { return res; } - // We use compressed tuple in order to save space because key_compare and - // allocator_type are usually empty. - absl::container_internal::CompressedTuple - root_; + node_type *root_; // A pointer to the rightmost node. Note that the leftmost node is stored as - // the root's parent. - node_type *rightmost_; + // the root's parent. We use compressed tuple in order to save space because + // key_compare and allocator_type are usually empty. + absl::container_internal::CompressedTuple + rightmost_; // Number of values. size_type size_; @@ -1702,9 +1758,9 @@ class btree { // btree_node methods template template -inline void btree_node

::emplace_value(const size_type i, +inline void btree_node

::emplace_value(const field_type i, allocator_type *alloc, - Args &&... args) { + Args &&...args) { assert(i >= start()); assert(i <= finish()); // Shift old values to create space for new value and then construct it in @@ -1713,7 +1769,7 @@ inline void btree_node

::emplace_value(const size_type i, transfer_n_backward(finish() - i, /*dest_i=*/i + 1, /*src_i=*/i, this, alloc); } - value_init(i, alloc, std::forward(args)...); + value_init(static_cast(i), alloc, std::forward(args)...); set_finish(finish() + 1); if (is_internal() && finish() > i + 1) { @@ -1736,11 +1792,11 @@ inline void btree_node

::remove_values(const field_type i, if (is_internal()) { // Delete all children between begin and end. - for (int j = 0; j < to_erase; ++j) { + for (field_type j = 0; j < to_erase; ++j) { clear_and_delete(child(i + j + 1), alloc); } // Rotate children after end into new positions. - for (int j = i + to_erase + 1; j <= orig_finish; ++j) { + for (field_type j = i + to_erase + 1; j <= orig_finish; ++j) { set_child(j - to_erase, child(j)); clear_child(j); } @@ -1749,7 +1805,7 @@ inline void btree_node

::remove_values(const field_type i, } template -void btree_node

::rebalance_right_to_left(const int to_move, +void btree_node

::rebalance_right_to_left(field_type to_move, btree_node *right, allocator_type *alloc) { assert(parent() == right->parent()); @@ -1773,10 +1829,10 @@ void btree_node

::rebalance_right_to_left(const int to_move, if (is_internal()) { // Move the child pointers from the right to the left node. - for (int i = 0; i < to_move; ++i) { + for (field_type i = 0; i < to_move; ++i) { init_child(finish() + i + 1, right->child(i)); } - for (int i = right->start(); i <= right->finish() - to_move; ++i) { + for (field_type i = right->start(); i <= right->finish() - to_move; ++i) { assert(i + to_move <= right->max_count()); right->init_child(i, right->child(i + to_move)); right->clear_child(i + to_move); @@ -1789,7 +1845,7 @@ void btree_node

::rebalance_right_to_left(const int to_move, } template -void btree_node

::rebalance_left_to_right(const int to_move, +void btree_node

::rebalance_left_to_right(field_type to_move, btree_node *right, allocator_type *alloc) { assert(parent() == right->parent()); @@ -1820,11 +1876,11 @@ void btree_node

::rebalance_left_to_right(const int to_move, if (is_internal()) { // Move the child pointers from the left to the right node. - for (int i = right->finish(); i >= right->start(); --i) { - right->init_child(i + to_move, right->child(i)); - right->clear_child(i); + for (field_type i = right->finish() + 1; i > right->start(); --i) { + right->init_child(i - 1 + to_move, right->child(i - 1)); + right->clear_child(i - 1); } - for (int i = 1; i <= to_move; ++i) { + for (field_type i = 1; i <= to_move; ++i) { right->init_child(i - 1, child(finish() - to_move + i)); clear_child(finish() - to_move + i); } @@ -1865,7 +1921,7 @@ void btree_node

::split(const int insert_position, btree_node *dest, parent()->init_child(position() + 1, dest); if (is_internal()) { - for (int i = dest->start(), j = finish() + 1; i <= dest->finish(); + for (field_type i = dest->start(), j = finish() + 1; i <= dest->finish(); ++i, ++j) { assert(child(j) != nullptr); dest->init_child(i, child(j)); @@ -1887,7 +1943,8 @@ void btree_node

::merge(btree_node *src, allocator_type *alloc) { if (is_internal()) { // Move the child pointers from the right to the left node. - for (int i = src->start(), j = finish() + 1; i <= src->finish(); ++i, ++j) { + for (field_type i = src->start(), j = finish() + 1; i <= src->finish(); + ++i, ++j) { init_child(j, src->child(i)); src->clear_child(i); } @@ -1926,15 +1983,15 @@ void btree_node

::clear_and_delete(btree_node *node, allocator_type *alloc) { // instead of checking whether the parent is a leaf, we can remove this logic. btree_node *leftmost_leaf = node; #endif - // Use `int` because `pos` needs to be able to hold `kNodeSlots+1`, which - // isn't guaranteed to be a valid `field_type`. - int pos = node->position(); + // Use `size_type` because `pos` needs to be able to hold `kNodeSlots+1`, + // which isn't guaranteed to be a valid `field_type`. + size_type pos = node->position(); btree_node *parent = node->parent(); for (;;) { // In each iteration of the next loop, we delete one leaf node and go right. assert(pos <= parent->finish()); do { - node = parent->child(pos); + node = parent->child(static_cast(pos)); if (node->is_internal()) { // Navigate to the leftmost leaf under node. while (node->is_internal()) node = node->start_child(); @@ -1970,6 +2027,64 @@ void btree_node

::clear_and_delete(btree_node *node, allocator_type *alloc) { //// // btree_iterator methods + +// Note: the implementation here is based on btree_node::clear_and_delete. +template +auto btree_iterator::distance_slow(const_iterator other) const + -> difference_type { + const_iterator begin = other; + const_iterator end = *this; + assert(begin.node_ != end.node_ || !begin.node_->is_leaf() || + begin.position_ != end.position_); + + const node_type *node = begin.node_; + // We need to compensate for double counting if begin.node_ is a leaf node. + difference_type count = node->is_leaf() ? -begin.position_ : 0; + + // First navigate to the leftmost leaf node past begin. + if (node->is_internal()) { + ++count; + node = node->child(begin.position_ + 1); + } + while (node->is_internal()) node = node->start_child(); + + // Use `size_type` because `pos` needs to be able to hold `kNodeSlots+1`, + // which isn't guaranteed to be a valid `field_type`. + size_type pos = node->position(); + const node_type *parent = node->parent(); + for (;;) { + // In each iteration of the next loop, we count one leaf node and go right. + assert(pos <= parent->finish()); + do { + node = parent->child(static_cast(pos)); + if (node->is_internal()) { + // Navigate to the leftmost leaf under node. + while (node->is_internal()) node = node->start_child(); + pos = node->position(); + parent = node->parent(); + } + if (node == end.node_) return count + end.position_; + if (parent == end.node_ && pos == static_cast(end.position_)) + return count + node->count(); + // +1 is for the next internal node value. + count += node->count() + 1; + ++pos; + } while (pos <= parent->finish()); + + // Once we've counted all children of parent, go up/right. + assert(pos > parent->finish()); + do { + node = parent; + pos = node->position(); + parent = node->parent(); + // -1 because we counted the value at end and shouldn't. + if (parent == end.node_ && pos == static_cast(end.position_)) + return count - 1; + ++pos; + } while (pos > parent->finish()); + } +} + template void btree_iterator::increment_slow() { if (node_->is_leaf()) { @@ -1986,7 +2101,7 @@ void btree_iterator::increment_slow() { } } else { assert(position_ < node_->finish()); - node_ = node_->child(position_ + 1); + node_ = node_->child(static_cast(position_ + 1)); while (node_->is_internal()) { node_ = node_->start_child(); } @@ -2010,7 +2125,7 @@ void btree_iterator::decrement_slow() { } } else { assert(position_ >= node_->start()); - node_ = node_->child(position_); + node_ = node_->child(static_cast(position_)); while (node_->is_internal()) { node_ = node_->child(node_->finish()); } @@ -2032,12 +2147,12 @@ void btree

::copy_or_move_values_in_order(Btree &other) { // values is the same order we'll store them in. auto iter = other.begin(); if (iter == other.end()) return; - insert_multi(maybe_move_from_iterator(iter)); + insert_multi(iter.slot()); ++iter; for (; iter != other.end(); ++iter) { // If the btree is not empty, we can just insert the new value at the end // of the tree. - internal_emplace(end(), maybe_move_from_iterator(iter)); + internal_emplace(end(), iter.slot()); } } @@ -2113,10 +2228,10 @@ auto btree

::equal_range(const K &key) -> std::pair { template template -auto btree

::insert_unique(const K &key, Args &&... args) +auto btree

::insert_unique(const K &key, Args &&...args) -> std::pair { if (empty()) { - mutable_root() = rightmost_ = new_leaf_root_node(1); + mutable_root() = mutable_rightmost() = new_leaf_root_node(1); } SearchResult res = internal_locate(key); @@ -2140,7 +2255,7 @@ auto btree

::insert_unique(const K &key, Args &&... args) template template inline auto btree

::insert_hint_unique(iterator position, const K &key, - Args &&... args) + Args &&...args) -> std::pair { if (!empty()) { if (position == end() || compare_keys(key, position.key())) { @@ -2174,8 +2289,11 @@ template template void btree

::insert_iterator_unique(InputIterator b, InputIterator e, char) { for (; b != e; ++b) { - init_type value(*b); - insert_hint_unique(end(), params_type::key(value), std::move(value)); + // Use a node handle to manage a temp slot. + auto node_handle = + CommonAccess::Construct(get_allocator(), *b); + slot_type *slot = CommonAccess::GetSlot(node_handle); + insert_hint_unique(end(), params_type::key(slot), slot); } } @@ -2183,7 +2301,7 @@ template template auto btree

::insert_multi(const key_type &key, ValueType &&v) -> iterator { if (empty()) { - mutable_root() = rightmost_ = new_leaf_root_node(1); + mutable_root() = mutable_rightmost() = new_leaf_root_node(1); } iterator iter = internal_upper_bound(key); @@ -2247,15 +2365,15 @@ auto btree

::operator=(btree &&other) noexcept -> btree & { using std::swap; if (absl::allocator_traits< allocator_type>::propagate_on_container_copy_assignment::value) { - // Note: `root_` also contains the allocator and the key comparator. swap(root_, other.root_); + // Note: `rightmost_` also contains the allocator and the key comparator. swap(rightmost_, other.rightmost_); swap(size_, other.size_); } else { if (allocator() == other.allocator()) { swap(mutable_root(), other.mutable_root()); swap(*mutable_key_comp(), *other.mutable_key_comp()); - swap(rightmost_, other.rightmost_); + swap(mutable_rightmost(), other.mutable_rightmost()); swap(size_, other.size_); } else { // We aren't allowed to propagate the allocator and the allocator is @@ -2273,23 +2391,34 @@ auto btree

::operator=(btree &&other) noexcept -> btree & { template auto btree

::erase(iterator iter) -> iterator { - bool internal_delete = false; - if (iter.node_->is_internal()) { - // Deletion of a value on an internal node. First, move the largest value - // from our left child here, then delete that position (in remove_values() - // below). We can get to the largest value from our left child by - // decrementing iter. + iter.node_->value_destroy(static_cast(iter.position_), + mutable_allocator()); + iter.update_generation(); + + const bool internal_delete = iter.node_->is_internal(); + if (internal_delete) { + // Deletion of a value on an internal node. First, transfer the largest + // value from our left child here, then erase/rebalance from that position. + // We can get to the largest value from our left child by decrementing iter. iterator internal_iter(iter); --iter; assert(iter.node_->is_leaf()); - params_type::move(mutable_allocator(), iter.node_->slot(iter.position_), - internal_iter.node_->slot(internal_iter.position_)); - internal_delete = true; - } - - // Delete the key from the leaf. - iter.node_->remove_values(iter.position_, /*to_erase=*/1, - mutable_allocator()); + internal_iter.node_->transfer( + static_cast(internal_iter.position_), + static_cast(iter.position_), iter.node_, + mutable_allocator()); + } else { + // Shift values after erased position in leaf. In the internal case, we + // don't need to do this because the leaf position is the end of the node. + const field_type transfer_from = + static_cast(iter.position_ + 1); + const field_type num_to_transfer = iter.node_->finish() - transfer_from; + iter.node_->transfer_n(num_to_transfer, + static_cast(iter.position_), + transfer_from, iter.node_, mutable_allocator()); + } + // Update node finish and container size. + iter.node_->set_finish(iter.node_->finish() - 1); --size_; // We want to return the next value after the one we just erased. If we @@ -2352,7 +2481,7 @@ auto btree

::rebalance_after_delete(iterator iter) -> iterator { template auto btree

::erase_range(iterator begin, iterator end) -> std::pair { - difference_type count = std::distance(begin, end); + size_type count = static_cast(end - begin); assert(count >= 0); if (count == 0) { @@ -2366,8 +2495,10 @@ auto btree

::erase_range(iterator begin, iterator end) if (begin.node_ == end.node_) { assert(end.position_ > begin.position_); - begin.node_->remove_values(begin.position_, end.position_ - begin.position_, - mutable_allocator()); + begin.node_->remove_values( + static_cast(begin.position_), + static_cast(end.position_ - begin.position_), + mutable_allocator()); size_ -= count; return {count, rebalance_after_delete(begin)}; } @@ -2377,11 +2508,11 @@ auto btree

::erase_range(iterator begin, iterator end) if (begin.node_->is_leaf()) { const size_type remaining_to_erase = size_ - target_size; const size_type remaining_in_node = - begin.node_->finish() - begin.position_; - const size_type to_erase = - (std::min)(remaining_to_erase, remaining_in_node); - begin.node_->remove_values(begin.position_, to_erase, - mutable_allocator()); + static_cast(begin.node_->finish() - begin.position_); + const field_type to_erase = static_cast( + (std::min)(remaining_to_erase, remaining_in_node)); + begin.node_->remove_values(static_cast(begin.position_), + to_erase, mutable_allocator()); size_ -= to_erase; begin = rebalance_after_delete(begin); } else { @@ -2397,8 +2528,7 @@ void btree

::clear() { if (!empty()) { node_type::clear_and_delete(root(), mutable_allocator()); } - mutable_root() = EmptyNode(); - rightmost_ = EmptyNode(); + mutable_root() = mutable_rightmost() = EmptyNode(); size_ = 0; } @@ -2407,15 +2537,15 @@ void btree

::swap(btree &other) { using std::swap; if (absl::allocator_traits< allocator_type>::propagate_on_container_swap::value) { - // Note: `root_` also contains the allocator and the key comparator. - swap(root_, other.root_); + // Note: `rightmost_` also contains the allocator and the key comparator. + swap(rightmost_, other.rightmost_); } else { // It's undefined behavior if the allocators are unequal here. assert(allocator() == other.allocator()); - swap(mutable_root(), other.mutable_root()); + swap(mutable_rightmost(), other.mutable_rightmost()); swap(*mutable_key_comp(), *other.mutable_key_comp()); } - swap(rightmost_, other.rightmost_); + swap(mutable_root(), other.mutable_root()); swap(size_, other.size_); } @@ -2423,12 +2553,12 @@ template void btree

::verify() const { assert(root() != nullptr); assert(leftmost() != nullptr); - assert(rightmost_ != nullptr); + assert(rightmost() != nullptr); assert(empty() || size() == internal_verify(root(), nullptr, nullptr)); assert(leftmost() == (++const_iterator(root(), -1)).node_); - assert(rightmost_ == (--const_iterator(root(), root()->finish())).node_); + assert(rightmost() == (--const_iterator(root(), root()->finish())).node_); assert(leftmost()->is_leaf()); - assert(rightmost_->is_leaf()); + assert(rightmost()->is_leaf()); } template @@ -2449,16 +2579,19 @@ void btree

::rebalance_or_split(iterator *iter) { // We bias rebalancing based on the position being inserted. If we're // inserting at the end of the right node then we bias rebalancing to // fill up the left node. - int to_move = (kNodeSlots - left->count()) / - (1 + (insert_position < static_cast(kNodeSlots))); - to_move = (std::max)(1, to_move); - - if (insert_position - to_move >= node->start() || - left->count() + to_move < static_cast(kNodeSlots)) { + field_type to_move = + (kNodeSlots - left->count()) / + (1 + (static_cast(insert_position) < kNodeSlots)); + to_move = (std::max)(field_type{1}, to_move); + + if (static_cast(insert_position) - to_move >= + node->start() || + left->count() + to_move < kNodeSlots) { left->rebalance_right_to_left(to_move, node, mutable_allocator()); assert(node->max_count() - node->count() == to_move); - insert_position = insert_position - to_move; + insert_position = static_cast( + static_cast(insert_position) - to_move); if (insert_position < node->start()) { insert_position = insert_position + left->count() + 1; node = left; @@ -2478,12 +2611,13 @@ void btree

::rebalance_or_split(iterator *iter) { // We bias rebalancing based on the position being inserted. If we're // inserting at the beginning of the left node then we bias rebalancing // to fill up the right node. - int to_move = (static_cast(kNodeSlots) - right->count()) / - (1 + (insert_position > node->start())); - to_move = (std::max)(1, to_move); + field_type to_move = (kNodeSlots - right->count()) / + (1 + (insert_position > node->start())); + to_move = (std::max)(field_type{1}, to_move); - if (insert_position <= node->finish() - to_move || - right->count() + to_move < static_cast(kNodeSlots)) { + if (static_cast(insert_position) <= + node->finish() - to_move || + right->count() + to_move < kNodeSlots) { node->rebalance_left_to_right(to_move, right, mutable_allocator()); if (insert_position > node->finish()) { @@ -2514,7 +2648,7 @@ void btree

::rebalance_or_split(iterator *iter) { mutable_root() = parent; // If the former root was a leaf node, then it's now the rightmost node. assert(parent->start_child()->is_internal() || - parent->start_child() == rightmost_); + parent->start_child() == rightmost()); } // Split the node. @@ -2522,7 +2656,7 @@ void btree

::rebalance_or_split(iterator *iter) { if (node->is_leaf()) { split_node = new_leaf_node(parent); node->split(insert_position, split_node, mutable_allocator()); - if (rightmost_ == node) rightmost_ = split_node; + if (rightmost() == node) mutable_rightmost() = split_node; } else { split_node = new_internal_node(parent); node->split(insert_position, split_node, mutable_allocator()); @@ -2537,7 +2671,7 @@ void btree

::rebalance_or_split(iterator *iter) { template void btree

::merge_nodes(node_type *left, node_type *right) { left->merge(right, mutable_allocator()); - if (rightmost_ == right) rightmost_ = left; + if (rightmost() == right) mutable_rightmost() = left; } template @@ -2568,8 +2702,9 @@ bool btree

::try_merge_or_rebalance(iterator *iter) { // from the front of the tree. if (right->count() > kMinNodeValues && (iter->node_->count() == 0 || iter->position_ > iter->node_->start())) { - int to_move = (right->count() - iter->node_->count()) / 2; - to_move = (std::min)(to_move, right->count() - 1); + field_type to_move = (right->count() - iter->node_->count()) / 2; + to_move = + (std::min)(to_move, static_cast(right->count() - 1)); iter->node_->rebalance_right_to_left(to_move, right, mutable_allocator()); return false; } @@ -2583,8 +2718,8 @@ bool btree

::try_merge_or_rebalance(iterator *iter) { if (left->count() > kMinNodeValues && (iter->node_->count() == 0 || iter->position_ < iter->node_->finish())) { - int to_move = (left->count() - iter->node_->count()) / 2; - to_move = (std::min)(to_move, left->count() - 1); + field_type to_move = (left->count() - iter->node_->count()) / 2; + to_move = (std::min)(to_move, static_cast(left->count() - 1)); left->rebalance_left_to_right(to_move, iter->node_, mutable_allocator()); iter->position_ += to_move; return false; @@ -2602,7 +2737,7 @@ void btree

::try_shrink() { // Deleted the last item on the root node, shrink the height of the tree. if (orig_root->is_leaf()) { assert(size() == 0); - mutable_root() = rightmost_ = EmptyNode(); + mutable_root() = mutable_rightmost() = EmptyNode(); } else { node_type *child = orig_root->start_child(); child->make_root(); @@ -2629,7 +2764,7 @@ inline IterType btree

::internal_last(IterType iter) { template template -inline auto btree

::internal_emplace(iterator iter, Args &&... args) +inline auto btree

::internal_emplace(iterator iter, Args &&...args) -> iterator { if (iter.node_->is_internal()) { // We can't insert on an internal node. Instead, we'll insert after the @@ -2645,8 +2780,8 @@ inline auto btree

::internal_emplace(iterator iter, Args &&... args) // Insertion into the root where the root is smaller than the full node // size. Simply grow the size of the root node. assert(iter.node_ == root()); - iter.node_ = - new_leaf_root_node((std::min)(kNodeSlots, 2 * max_count)); + iter.node_ = new_leaf_root_node(static_cast( + (std::min)(static_cast(kNodeSlots), 2 * max_count))); // Transfer the values from the old root to the new root. node_type *old_root = root(); node_type *new_root = iter.node_; @@ -2656,12 +2791,13 @@ inline auto btree

::internal_emplace(iterator iter, Args &&... args) old_root->set_finish(old_root->start()); new_root->set_generation(old_root->generation()); node_type::clear_and_delete(old_root, alloc); - mutable_root() = rightmost_ = new_root; + mutable_root() = mutable_rightmost() = new_root; } else { rebalance_or_split(&iter); } } - iter.node_->emplace_value(iter.position_, alloc, std::forward(args)...); + iter.node_->emplace_value(static_cast(iter.position_), alloc, + std::forward(args)...); ++size_; iter.update_generation(); return iter; @@ -2673,9 +2809,9 @@ inline auto btree

::internal_locate(const K &key) const -> SearchResult { iterator iter(const_cast(root())); for (;;) { - SearchResult res = + SearchResult res = iter.node_->lower_bound(key, key_comp()); - iter.position_ = res.value; + iter.position_ = static_cast(res.value); if (res.IsEq()) { return {iter, MatchKind::kEq}; } @@ -2686,7 +2822,7 @@ inline auto btree

::internal_locate(const K &key) const if (iter.node_->is_leaf()) { break; } - iter.node_ = iter.node_->child(iter.position_); + iter.node_ = iter.node_->child(static_cast(iter.position_)); } // Note: in the non-key-compare-to case, the key may actually be equivalent // here (and the MatchKind::kNe is ignored). @@ -2703,16 +2839,16 @@ auto btree

::internal_lower_bound(const K &key) const return ret; } iterator iter(const_cast(root())); - SearchResult res; + SearchResult res; bool seen_eq = false; for (;;) { res = iter.node_->lower_bound(key, key_comp()); - iter.position_ = res.value; + iter.position_ = static_cast(res.value); if (iter.node_->is_leaf()) { break; } seen_eq = seen_eq || res.IsEq(); - iter.node_ = iter.node_->child(iter.position_); + iter.node_ = iter.node_->child(static_cast(iter.position_)); } if (res.IsEq()) return {iter, MatchKind::kEq}; return {internal_last(iter), seen_eq ? MatchKind::kEq : MatchKind::kNe}; @@ -2723,11 +2859,11 @@ template auto btree

::internal_upper_bound(const K &key) const -> iterator { iterator iter(const_cast(root())); for (;;) { - iter.position_ = iter.node_->upper_bound(key, key_comp()); + iter.position_ = static_cast(iter.node_->upper_bound(key, key_comp())); if (iter.node_->is_leaf()) { break; } - iter.node_ = iter.node_->child(iter.position_); + iter.node_ = iter.node_->child(static_cast(iter.position_)); } return internal_last(iter); } @@ -2750,8 +2886,8 @@ auto btree

::internal_find(const K &key) const -> iterator { } template -int btree

::internal_verify(const node_type *node, const key_type *lo, - const key_type *hi) const { +typename btree

::size_type btree

::internal_verify( + const node_type *node, const key_type *lo, const key_type *hi) const { assert(node->count() > 0); assert(node->count() <= node->max_count()); if (lo) { @@ -2763,9 +2899,9 @@ int btree

::internal_verify(const node_type *node, const key_type *lo, for (int i = node->start() + 1; i < node->finish(); ++i) { assert(!compare_keys(node->key(i), node->key(i - 1))); } - int count = node->count(); + size_type count = node->count(); if (node->is_internal()) { - for (int i = node->start(); i <= node->finish(); ++i) { + for (field_type i = node->start(); i <= node->finish(); ++i) { assert(node->child(i) != nullptr); assert(node->child(i)->parent() == node); assert(node->child(i)->position() == i); @@ -2779,8 +2915,8 @@ int btree

::internal_verify(const node_type *node, const key_type *lo, struct btree_access { template - static auto erase_if(BtreeContainer &container, Pred pred) - -> typename BtreeContainer::size_type { + static auto erase_if(BtreeContainer &container, Pred pred) -> + typename BtreeContainer::size_type { const auto initial_size = container.size(); auto &tree = container.tree_; auto *alloc = tree.mutable_allocator(); diff --git a/TMessagesProj/jni/voip/webrtc/absl/container/internal/btree_container.h b/TMessagesProj/jni/voip/webrtc/absl/container/internal/btree_container.h index cc2e1793a5..2bff11db04 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/container/internal/btree_container.h +++ b/TMessagesProj/jni/voip/webrtc/absl/container/internal/btree_container.h @@ -65,6 +65,11 @@ class btree_container { using const_reverse_iterator = typename Tree::const_reverse_iterator; using node_type = typename Tree::node_handle_type; + struct extract_and_get_next_return_type { + node_type node; + iterator next; + }; + // Constructors/assignments. btree_container() : tree_(key_compare(), allocator_type()) {} explicit btree_container(const key_compare &comp, @@ -107,7 +112,7 @@ class btree_container { template size_type count(const key_arg &key) const { auto equal_range = this->equal_range(key); - return std::distance(equal_range.first, equal_range.second); + return equal_range.second - equal_range.first; } template iterator find(const key_arg &key) { @@ -165,10 +170,20 @@ class btree_container { } // Extract routines. + extract_and_get_next_return_type extract_and_get_next( + const_iterator position) { + // Use Construct instead of Transfer because the rebalancing code will + // destroy the slot later. + // Note: we rely on erase() taking place after Construct(). + return {CommonAccess::Construct(get_allocator(), + iterator(position).slot()), + erase(position)}; + } node_type extract(iterator position) { - // Use Move instead of Transfer, because the rebalancing code expects to - // have a valid object to scribble metadata bits on top of. - auto node = CommonAccess::Move(get_allocator(), position.slot()); + // Use Construct instead of Transfer because the rebalancing code will + // destroy the slot later. + auto node = + CommonAccess::Construct(get_allocator(), position.slot()); erase(position); return node; } @@ -291,8 +306,11 @@ class btree_set_container : public btree_container { } template std::pair emplace(Args &&... args) { - init_type v(std::forward(args)...); - return this->tree_.insert_unique(params_type::key(v), std::move(v)); + // Use a node handle to manage a temp slot. + auto node = CommonAccess::Construct(this->get_allocator(), + std::forward(args)...); + auto *slot = CommonAccess::GetSlot(node); + return this->tree_.insert_unique(params_type::key(slot), slot); } iterator insert(const_iterator hint, const value_type &v) { return this->tree_ @@ -306,9 +324,12 @@ class btree_set_container : public btree_container { } template iterator emplace_hint(const_iterator hint, Args &&... args) { - init_type v(std::forward(args)...); + // Use a node handle to manage a temp slot. + auto node = CommonAccess::Construct(this->get_allocator(), + std::forward(args)...); + auto *slot = CommonAccess::GetSlot(node); return this->tree_ - .insert_hint_unique(iterator(hint), params_type::key(v), std::move(v)) + .insert_hint_unique(iterator(hint), params_type::key(slot), slot) .first; } template @@ -598,12 +619,18 @@ class btree_multiset_container : public btree_container { } template iterator emplace(Args &&... args) { - return this->tree_.insert_multi(init_type(std::forward(args)...)); + // Use a node handle to manage a temp slot. + auto node = CommonAccess::Construct(this->get_allocator(), + std::forward(args)...); + return this->tree_.insert_multi(CommonAccess::GetSlot(node)); } template iterator emplace_hint(const_iterator hint, Args &&... args) { - return this->tree_.insert_hint_multi( - iterator(hint), init_type(std::forward(args)...)); + // Use a node handle to manage a temp slot. + auto node = CommonAccess::Construct(this->get_allocator(), + std::forward(args)...); + return this->tree_.insert_hint_multi(iterator(hint), + CommonAccess::GetSlot(node)); } iterator insert(node_type &&node) { if (!node) return this->end(); diff --git a/TMessagesProj/jni/voip/webrtc/absl/container/internal/common.h b/TMessagesProj/jni/voip/webrtc/absl/container/internal/common.h index 030e9d4ab0..9239bb4d09 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/container/internal/common.h +++ b/TMessagesProj/jni/voip/webrtc/absl/container/internal/common.h @@ -12,8 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. -#ifndef ABSL_CONTAINER_INTERNAL_CONTAINER_H_ -#define ABSL_CONTAINER_INTERNAL_CONTAINER_H_ +#ifndef ABSL_CONTAINER_INTERNAL_COMMON_H_ +#define ABSL_CONTAINER_INTERNAL_COMMON_H_ #include #include @@ -84,10 +84,11 @@ class node_handle_base { PolicyTraits::transfer(alloc(), slot(), s); } - struct move_tag_t {}; - node_handle_base(move_tag_t, const allocator_type& a, slot_type* s) + struct construct_tag_t {}; + template + node_handle_base(construct_tag_t, const allocator_type& a, Args&&... args) : alloc_(a) { - PolicyTraits::construct(alloc(), slot(), s); + PolicyTraits::construct(alloc(), slot(), std::forward(args)...); } void destroy() { @@ -186,8 +187,8 @@ struct CommonAccess { } template - static T Move(Args&&... args) { - return T(typename T::move_tag_t{}, std::forward(args)...); + static T Construct(Args&&... args) { + return T(typename T::construct_tag_t{}, std::forward(args)...); } }; @@ -203,4 +204,4 @@ struct InsertReturnType { ABSL_NAMESPACE_END } // namespace absl -#endif // ABSL_CONTAINER_INTERNAL_CONTAINER_H_ +#endif // ABSL_CONTAINER_INTERNAL_COMMON_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/container/internal/common_policy_traits.h b/TMessagesProj/jni/voip/webrtc/absl/container/internal/common_policy_traits.h new file mode 100644 index 0000000000..0fd4866e38 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/absl/container/internal/common_policy_traits.h @@ -0,0 +1,115 @@ +// Copyright 2022 The Abseil Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef ABSL_CONTAINER_INTERNAL_COMMON_POLICY_TRAITS_H_ +#define ABSL_CONTAINER_INTERNAL_COMMON_POLICY_TRAITS_H_ + +#include +#include +#include +#include +#include +#include + +#include "absl/meta/type_traits.h" + +namespace absl { +ABSL_NAMESPACE_BEGIN +namespace container_internal { + +// Defines how slots are initialized/destroyed/moved. +template +struct common_policy_traits { + // The actual object stored in the container. + using slot_type = typename Policy::slot_type; + using reference = decltype(Policy::element(std::declval())); + using value_type = typename std::remove_reference::type; + + // PRECONDITION: `slot` is UNINITIALIZED + // POSTCONDITION: `slot` is INITIALIZED + template + static void construct(Alloc* alloc, slot_type* slot, Args&&... args) { + Policy::construct(alloc, slot, std::forward(args)...); + } + + // PRECONDITION: `slot` is INITIALIZED + // POSTCONDITION: `slot` is UNINITIALIZED + template + static void destroy(Alloc* alloc, slot_type* slot) { + Policy::destroy(alloc, slot); + } + + // Transfers the `old_slot` to `new_slot`. Any memory allocated by the + // allocator inside `old_slot` to `new_slot` can be transferred. + // + // OPTIONAL: defaults to: + // + // clone(new_slot, std::move(*old_slot)); + // destroy(old_slot); + // + // PRECONDITION: `new_slot` is UNINITIALIZED and `old_slot` is INITIALIZED + // POSTCONDITION: `new_slot` is INITIALIZED and `old_slot` is + // UNINITIALIZED + template + static void transfer(Alloc* alloc, slot_type* new_slot, slot_type* old_slot) { + transfer_impl(alloc, new_slot, old_slot, 0); + } + + // PRECONDITION: `slot` is INITIALIZED + // POSTCONDITION: `slot` is INITIALIZED + // Note: we use remove_const_t so that the two overloads have different args + // in the case of sets with explicitly const value_types. + template + static auto element(absl::remove_const_t* slot) + -> decltype(P::element(slot)) { + return P::element(slot); + } + template + static auto element(const slot_type* slot) -> decltype(P::element(slot)) { + return P::element(slot); + } + + private: + // Use auto -> decltype as an enabler. + template + static auto transfer_impl(Alloc* alloc, slot_type* new_slot, + slot_type* old_slot, int) + -> decltype((void)P::transfer(alloc, new_slot, old_slot)) { + P::transfer(alloc, new_slot, old_slot); + } + template + static void transfer_impl(Alloc* alloc, slot_type* new_slot, + slot_type* old_slot, char) { +#if defined(__cpp_lib_launder) && __cpp_lib_launder >= 201606 + if (absl::is_trivially_relocatable()) { + // TODO(b/247130232,b/251814870): remove casts after fixing warnings. + std::memcpy(static_cast( + std::launder(const_cast*>( + &element(new_slot)))), + static_cast(&element(old_slot)), + sizeof(value_type)); + return; + } +#endif + + construct(alloc, new_slot, std::move(element(old_slot))); + destroy(alloc, old_slot); + } +}; + +} // namespace container_internal +ABSL_NAMESPACE_END +} // namespace absl + +#endif // ABSL_CONTAINER_INTERNAL_COMMON_POLICY_TRAITS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/container/internal/container_memory.h b/TMessagesProj/jni/voip/webrtc/absl/container/internal/container_memory.h index e67529ecb6..bfa4ff93d7 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/container/internal/container_memory.h +++ b/TMessagesProj/jni/voip/webrtc/absl/container/internal/container_memory.h @@ -17,6 +17,7 @@ #include #include +#include #include #include #include @@ -174,7 +175,7 @@ decltype(std::declval()(std::declval())) WithConstructed( // // 2. auto a = PairArgs(args...); // std::pair p(std::piecewise_construct, -// std::move(p.first), std::move(p.second)); +// std::move(a.first), std::move(a.second)); inline std::pair, std::tuple<>> PairArgs() { return {}; } template std::pair, std::tuple> PairArgs(F&& f, S&& s) { @@ -340,7 +341,8 @@ template struct map_slot_policy { using slot_type = map_slot_type; using value_type = std::pair; - using mutable_value_type = std::pair; + using mutable_value_type = + std::pair, absl::remove_const_t>; private: static void emplace(slot_type* slot) { @@ -402,6 +404,15 @@ struct map_slot_policy { } } + // Construct this slot by copying from another slot. + template + static void construct(Allocator* alloc, slot_type* slot, + const slot_type* other) { + emplace(slot); + absl::allocator_traits::construct(*alloc, &slot->value, + other->value); + } + template static void destroy(Allocator* alloc, slot_type* slot) { if (kMutableKeys::value) { @@ -415,6 +426,16 @@ struct map_slot_policy { static void transfer(Allocator* alloc, slot_type* new_slot, slot_type* old_slot) { emplace(new_slot); +#if defined(__cpp_lib_launder) && __cpp_lib_launder >= 201606 + if (absl::is_trivially_relocatable()) { + // TODO(b/247130232,b/251814870): remove casts after fixing warnings. + std::memcpy(static_cast(std::launder(&new_slot->value)), + static_cast(&old_slot->value), + sizeof(value_type)); + return; + } +#endif + if (kMutableKeys::value) { absl::allocator_traits::construct( *alloc, &new_slot->mutable_value, std::move(old_slot->mutable_value)); @@ -424,33 +445,6 @@ struct map_slot_policy { } destroy(alloc, old_slot); } - - template - static void swap(Allocator* alloc, slot_type* a, slot_type* b) { - if (kMutableKeys::value) { - using std::swap; - swap(a->mutable_value, b->mutable_value); - } else { - value_type tmp = std::move(a->value); - absl::allocator_traits::destroy(*alloc, &a->value); - absl::allocator_traits::construct(*alloc, &a->value, - std::move(b->value)); - absl::allocator_traits::destroy(*alloc, &b->value); - absl::allocator_traits::construct(*alloc, &b->value, - std::move(tmp)); - } - } - - template - static void move(Allocator* alloc, slot_type* src, slot_type* dest) { - if (kMutableKeys::value) { - dest->mutable_value = std::move(src->mutable_value); - } else { - absl::allocator_traits::destroy(*alloc, &dest->value); - absl::allocator_traits::construct(*alloc, &dest->value, - std::move(src->value)); - } - } }; } // namespace container_internal diff --git a/TMessagesProj/jni/voip/webrtc/absl/container/internal/counting_allocator.h b/TMessagesProj/jni/voip/webrtc/absl/container/internal/counting_allocator.h index 927cf08255..66068a5a0d 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/container/internal/counting_allocator.h +++ b/TMessagesProj/jni/voip/webrtc/absl/container/internal/counting_allocator.h @@ -80,7 +80,15 @@ class CountingAllocator { template void destroy(U* p) { Allocator allocator; + // Ignore GCC warning bug. +#if ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(12, 0) +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wuse-after-free" +#endif AllocatorTraits::destroy(allocator, p); +#if ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(12, 0) +#pragma GCC diagnostic pop +#endif if (instance_count_ != nullptr) { *instance_count_ -= 1; } diff --git a/TMessagesProj/jni/voip/webrtc/absl/container/internal/hash_policy_traits.h b/TMessagesProj/jni/voip/webrtc/absl/container/internal/hash_policy_traits.h index 46c97b18a2..164ec12316 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/container/internal/hash_policy_traits.h +++ b/TMessagesProj/jni/voip/webrtc/absl/container/internal/hash_policy_traits.h @@ -21,6 +21,7 @@ #include #include +#include "absl/container/internal/common_policy_traits.h" #include "absl/meta/type_traits.h" namespace absl { @@ -29,7 +30,7 @@ namespace container_internal { // Defines how slots are initialized/destroyed/moved. template -struct hash_policy_traits { +struct hash_policy_traits : common_policy_traits { // The type of the keys stored in the hashtable. using key_type = typename Policy::key_type; @@ -87,43 +88,6 @@ struct hash_policy_traits { // Defaults to false if not provided by the policy. using constant_iterators = ConstantIteratorsImpl<>; - // PRECONDITION: `slot` is UNINITIALIZED - // POSTCONDITION: `slot` is INITIALIZED - template - static void construct(Alloc* alloc, slot_type* slot, Args&&... args) { - Policy::construct(alloc, slot, std::forward(args)...); - } - - // PRECONDITION: `slot` is INITIALIZED - // POSTCONDITION: `slot` is UNINITIALIZED - template - static void destroy(Alloc* alloc, slot_type* slot) { - Policy::destroy(alloc, slot); - } - - // Transfers the `old_slot` to `new_slot`. Any memory allocated by the - // allocator inside `old_slot` to `new_slot` can be transferred. - // - // OPTIONAL: defaults to: - // - // clone(new_slot, std::move(*old_slot)); - // destroy(old_slot); - // - // PRECONDITION: `new_slot` is UNINITIALIZED and `old_slot` is INITIALIZED - // POSTCONDITION: `new_slot` is INITIALIZED and `old_slot` is - // UNINITIALIZED - template - static void transfer(Alloc* alloc, slot_type* new_slot, slot_type* old_slot) { - transfer_impl(alloc, new_slot, old_slot, 0); - } - - // PRECONDITION: `slot` is INITIALIZED - // POSTCONDITION: `slot` is INITIALIZED - template - static auto element(slot_type* slot) -> decltype(P::element(slot)) { - return P::element(slot); - } - // Returns the amount of memory owned by `slot`, exclusive of `sizeof(*slot)`. // // If `slot` is nullptr, returns the constant amount of memory owned by any @@ -174,8 +138,8 @@ struct hash_policy_traits { // Used for node handle manipulation. template static auto mutable_key(slot_type* slot) - -> decltype(P::apply(ReturnKey(), element(slot))) { - return P::apply(ReturnKey(), element(slot)); + -> decltype(P::apply(ReturnKey(), hash_policy_traits::element(slot))) { + return P::apply(ReturnKey(), hash_policy_traits::element(slot)); } // Returns the "value" (as opposed to the "key") portion of the element. Used @@ -184,21 +148,6 @@ struct hash_policy_traits { static auto value(T* elem) -> decltype(P::value(elem)) { return P::value(elem); } - - private: - // Use auto -> decltype as an enabler. - template - static auto transfer_impl(Alloc* alloc, slot_type* new_slot, - slot_type* old_slot, int) - -> decltype((void)P::transfer(alloc, new_slot, old_slot)) { - P::transfer(alloc, new_slot, old_slot); - } - template - static void transfer_impl(Alloc* alloc, slot_type* new_slot, - slot_type* old_slot, char) { - construct(alloc, new_slot, std::move(element(old_slot))); - destroy(alloc, old_slot); - } }; } // namespace container_internal diff --git a/TMessagesProj/jni/voip/webrtc/absl/container/internal/hash_policy_traits_test.cc b/TMessagesProj/jni/voip/webrtc/absl/container/internal/hash_policy_traits_test.cc index 6ef8b9e05f..82d7cc3a70 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/container/internal/hash_policy_traits_test.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/container/internal/hash_policy_traits_test.cc @@ -38,81 +38,31 @@ struct PolicyWithoutOptionalOps { using key_type = Slot; using init_type = Slot; - static std::function construct; - static std::function destroy; - static std::function element; static int apply(int v) { return apply_impl(v); } static std::function apply_impl; static std::function value; }; -std::function PolicyWithoutOptionalOps::construct; -std::function PolicyWithoutOptionalOps::destroy; - -std::function PolicyWithoutOptionalOps::element; std::function PolicyWithoutOptionalOps::apply_impl; std::function PolicyWithoutOptionalOps::value; -struct PolicyWithOptionalOps : PolicyWithoutOptionalOps { - static std::function transfer; -}; - -std::function PolicyWithOptionalOps::transfer; - struct Test : ::testing::Test { Test() { - PolicyWithoutOptionalOps::construct = [&](void* a1, Slot* a2, Slot a3) { - construct.Call(a1, a2, std::move(a3)); - }; - PolicyWithoutOptionalOps::destroy = [&](void* a1, Slot* a2) { - destroy.Call(a1, a2); - }; - - PolicyWithoutOptionalOps::element = [&](Slot* a1) -> Slot& { - return element.Call(a1); - }; PolicyWithoutOptionalOps::apply_impl = [&](int a1) -> int { return apply.Call(a1); }; PolicyWithoutOptionalOps::value = [&](Slot* a1) -> Slot& { return value.Call(a1); }; - - PolicyWithOptionalOps::transfer = [&](void* a1, Slot* a2, Slot* a3) { - return transfer.Call(a1, a2, a3); - }; } std::allocator alloc; int a = 53; - - MockFunction construct; - MockFunction destroy; - - MockFunction element; MockFunction apply; MockFunction value; - - MockFunction transfer; }; -TEST_F(Test, construct) { - EXPECT_CALL(construct, Call(&alloc, &a, 53)); - hash_policy_traits::construct(&alloc, &a, 53); -} - -TEST_F(Test, destroy) { - EXPECT_CALL(destroy, Call(&alloc, &a)); - hash_policy_traits::destroy(&alloc, &a); -} - -TEST_F(Test, element) { - int b = 0; - EXPECT_CALL(element, Call(&a)).WillOnce(ReturnRef(b)); - EXPECT_EQ(&b, &hash_policy_traits::element(&a)); -} - TEST_F(Test, apply) { EXPECT_CALL(apply, Call(42)).WillOnce(Return(1337)); EXPECT_EQ(1337, (hash_policy_traits::apply(42))); @@ -124,20 +74,6 @@ TEST_F(Test, value) { EXPECT_EQ(&b, &hash_policy_traits::value(&a)); } -TEST_F(Test, without_transfer) { - int b = 42; - EXPECT_CALL(element, Call(&b)).WillOnce(::testing::ReturnRef(b)); - EXPECT_CALL(construct, Call(&alloc, &a, b)); - EXPECT_CALL(destroy, Call(&alloc, &b)); - hash_policy_traits::transfer(&alloc, &a, &b); -} - -TEST_F(Test, with_transfer) { - int b = 42; - EXPECT_CALL(transfer, Call(&alloc, &a, &b)); - hash_policy_traits::transfer(&alloc, &a, &b); -} - } // namespace } // namespace container_internal ABSL_NAMESPACE_END diff --git a/TMessagesProj/jni/voip/webrtc/absl/container/internal/hashtablez_sampler.cc b/TMessagesProj/jni/voip/webrtc/absl/container/internal/hashtablez_sampler.cc index 322e0547e2..5b8cf341da 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/container/internal/hashtablez_sampler.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/container/internal/hashtablez_sampler.cc @@ -21,7 +21,7 @@ #include #include "absl/base/attributes.h" -#include "absl/container/internal/have_sse.h" +#include "absl/base/config.h" #include "absl/debugging/stacktrace.h" #include "absl/memory/memory.h" #include "absl/profiling/internal/exponential_biased.h" @@ -32,7 +32,10 @@ namespace absl { ABSL_NAMESPACE_BEGIN namespace container_internal { + +#ifdef ABSL_INTERNAL_NEED_REDUNDANT_CONSTEXPR_DECL constexpr int HashtablezInfo::kMaxStackDepth; +#endif namespace { ABSL_CONST_INIT std::atomic g_hashtablez_enabled{ @@ -160,7 +163,7 @@ void RecordInsertSlow(HashtablezInfo* info, size_t hash, // SwissTables probe in groups of 16, so scale this to count items probes and // not offset from desired. size_t probe_length = distance_from_desired; -#if ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSE2 +#ifdef ABSL_INTERNAL_HAVE_SSE2 probe_length /= 16; #else probe_length /= 8; @@ -212,21 +215,20 @@ void SetHashtablezSampleParameterInternal(int32_t rate) { } } -int32_t GetHashtablezMaxSamples() { +size_t GetHashtablezMaxSamples() { return GlobalHashtablezSampler().GetMaxSamples(); } -void SetHashtablezMaxSamples(int32_t max) { +void SetHashtablezMaxSamples(size_t max) { SetHashtablezMaxSamplesInternal(max); TriggerHashtablezConfigListener(); } -void SetHashtablezMaxSamplesInternal(int32_t max) { +void SetHashtablezMaxSamplesInternal(size_t max) { if (max > 0) { GlobalHashtablezSampler().SetMaxSamples(max); } else { - ABSL_RAW_LOG(ERROR, "Invalid hashtablez max samples: %lld", - static_cast(max)); // NOLINT(runtime/int) + ABSL_RAW_LOG(ERROR, "Invalid hashtablez max samples: 0"); } } diff --git a/TMessagesProj/jni/voip/webrtc/absl/container/internal/hashtablez_sampler.h b/TMessagesProj/jni/voip/webrtc/absl/container/internal/hashtablez_sampler.h index e7c204eea4..a89518bb03 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/container/internal/hashtablez_sampler.h +++ b/TMessagesProj/jni/voip/webrtc/absl/container/internal/hashtablez_sampler.h @@ -44,9 +44,9 @@ #include #include +#include "absl/base/config.h" #include "absl/base/internal/per_thread_tls.h" #include "absl/base/optimization.h" -#include "absl/container/internal/have_sse.h" #include "absl/profiling/internal/sample_recorder.h" #include "absl/synchronization/mutex.h" #include "absl/utility/utility.h" @@ -96,7 +96,7 @@ struct HashtablezInfo : public profiling_internal::Sample { }; inline void RecordRehashSlow(HashtablezInfo* info, size_t total_probe_length) { -#if ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSE2 +#ifdef ABSL_INTERNAL_HAVE_SSE2 total_probe_length /= 16; #else total_probe_length /= 8; @@ -281,9 +281,9 @@ void SetHashtablezSampleParameter(int32_t rate); void SetHashtablezSampleParameterInternal(int32_t rate); // Sets a soft max for the number of samples that will be kept. -int32_t GetHashtablezMaxSamples(); -void SetHashtablezMaxSamples(int32_t max); -void SetHashtablezMaxSamplesInternal(int32_t max); +size_t GetHashtablezMaxSamples(); +void SetHashtablezMaxSamples(size_t max); +void SetHashtablezMaxSamplesInternal(size_t max); // Configuration override. // This allows process-wide sampling without depending on order of diff --git a/TMessagesProj/jni/voip/webrtc/absl/container/internal/hashtablez_sampler_test.cc b/TMessagesProj/jni/voip/webrtc/absl/container/internal/hashtablez_sampler_test.cc index 77cdf2fd93..665d518fc7 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/container/internal/hashtablez_sampler_test.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/container/internal/hashtablez_sampler_test.cc @@ -21,7 +21,7 @@ #include "gmock/gmock.h" #include "gtest/gtest.h" #include "absl/base/attributes.h" -#include "absl/container/internal/have_sse.h" +#include "absl/base/config.h" #include "absl/profiling/internal/sample_recorder.h" #include "absl/synchronization/blocking_counter.h" #include "absl/synchronization/internal/thread_pool.h" @@ -30,7 +30,7 @@ #include "absl/time/clock.h" #include "absl/time/time.h" -#if ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSE2 +#ifdef ABSL_INTERNAL_HAVE_SSE2 constexpr int kProbeLength = 16; #else constexpr int kProbeLength = 8; diff --git a/TMessagesProj/jni/voip/webrtc/absl/container/internal/have_sse.h b/TMessagesProj/jni/voip/webrtc/absl/container/internal/have_sse.h deleted file mode 100644 index e75e1a16d3..0000000000 --- a/TMessagesProj/jni/voip/webrtc/absl/container/internal/have_sse.h +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright 2018 The Abseil Authors. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// Shared config probing for SSE instructions used in Swiss tables. -#ifndef ABSL_CONTAINER_INTERNAL_HAVE_SSE_H_ -#define ABSL_CONTAINER_INTERNAL_HAVE_SSE_H_ - -#ifndef ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSE2 -#if defined(__SSE2__) || \ - (defined(_MSC_VER) && \ - (defined(_M_X64) || (defined(_M_IX86) && _M_IX86_FP >= 2))) -#define ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSE2 1 -#else -#define ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSE2 0 -#endif -#endif - -#ifndef ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSSE3 -#ifdef __SSSE3__ -#define ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSSE3 1 -#else -#define ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSSE3 0 -#endif -#endif - -#if ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSSE3 && \ - !ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSE2 -#error "Bad configuration!" -#endif - -#if ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSE2 -#include -#endif - -#if ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSSE3 -#include -#endif - -#endif // ABSL_CONTAINER_INTERNAL_HAVE_SSE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/container/internal/inlined_vector.h b/TMessagesProj/jni/voip/webrtc/absl/container/internal/inlined_vector.h index 2baf26f3ee..0398f53008 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/container/internal/inlined_vector.h +++ b/TMessagesProj/jni/voip/webrtc/absl/container/internal/inlined_vector.h @@ -12,8 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. -#ifndef ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_INTERNAL_H_ -#define ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_INTERNAL_H_ +#ifndef ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_H_ +#define ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_H_ #include #include @@ -40,7 +40,6 @@ namespace inlined_vector_internal { #if !defined(__clang__) && defined(__GNUC__) #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Warray-bounds" -#pragma GCC diagnostic ignored "-Wmaybe-uninitialized" #endif template @@ -84,6 +83,11 @@ using IsMemcpyOk = absl::is_trivially_copy_assignable>, absl::is_trivially_destructible>>; +template +using IsMoveAssignOk = std::is_move_assignable>; +template +using IsSwapOk = absl::type_traits_internal::IsSwappable>; + template struct TypeIdentity { using type = T; @@ -121,8 +125,8 @@ struct DestroyAdapter { template struct Allocation { - Pointer data; - SizeType capacity; + Pointer data = nullptr; + SizeType capacity = 0; }; template class Storage { public: + struct MemcpyPolicy {}; + struct ElementwiseAssignPolicy {}; + struct ElementwiseSwapPolicy {}; + struct ElementwiseConstructPolicy {}; + + using MoveAssignmentPolicy = absl::conditional_t< + IsMemcpyOk::value, MemcpyPolicy, + absl::conditional_t::value, ElementwiseAssignPolicy, + ElementwiseConstructPolicy>>; + using SwapPolicy = absl::conditional_t< + IsMemcpyOk::value, MemcpyPolicy, + absl::conditional_t::value, ElementwiseSwapPolicy, + ElementwiseConstructPolicy>>; + static SizeType NextCapacity(SizeType current_capacity) { return current_capacity * 2; } @@ -361,7 +379,9 @@ class Storage { return data_.allocated.allocated_capacity; } - SizeType GetInlinedCapacity() const { return static_cast>(N); } + SizeType GetInlinedCapacity() const { + return static_cast>(kOptimalInlinedSize); + } StorageView MakeStorageView() { return GetIsAllocated() ? StorageView{GetAllocatedData(), GetSize(), @@ -465,8 +485,15 @@ class Storage { SizeType allocated_capacity; }; + // `kOptimalInlinedSize` is an automatically adjusted inlined capacity of the + // `InlinedVector`. Sometimes, it is possible to increase the capacity (from + // the user requested `N`) without increasing the size of the `InlinedVector`. + static constexpr size_t kOptimalInlinedSize = + (std::max)(N, sizeof(Allocated) / sizeof(ValueType)); + struct Inlined { - alignas(ValueType) char inlined_data[sizeof(ValueType[N])]; + alignas(ValueType) char inlined_data[sizeof( + ValueType[kOptimalInlinedSize])]; }; union Data { @@ -474,6 +501,13 @@ class Storage { Inlined inlined; }; + void SwapN(ElementwiseSwapPolicy, Storage* other, SizeType n); + void SwapN(ElementwiseConstructPolicy, Storage* other, SizeType n); + + void SwapInlinedElements(MemcpyPolicy, Storage* other); + template + void SwapInlinedElements(NotMemcpyPolicy, Storage* other); + template ABSL_ATTRIBUTE_NOINLINE Reference EmplaceBackSlow(Args&&... args); @@ -611,7 +645,7 @@ auto Storage::Resize(ValueAdapter values, SizeType new_size) // Steps: // a. Allocate new backing store. // b. Construct new elements in new backing store. - // c. Move existing elements from old backing store to now. + // c. Move existing elements from old backing store to new backing store. // d. Destroy all elements in old backing store. // Use transactional wrappers for the first two steps so we can roll // back if necessary due to exceptions. @@ -642,8 +676,8 @@ auto Storage::Insert(ConstIterator pos, ValueAdapter values, SizeType insert_count) -> Iterator { StorageView storage_view = MakeStorageView(); - SizeType insert_index = - std::distance(ConstIterator(storage_view.data), pos); + auto insert_index = static_cast>( + std::distance(ConstIterator(storage_view.data), pos)); SizeType insert_end_index = insert_index + insert_count; SizeType new_size = storage_view.size + insert_count; @@ -785,9 +819,9 @@ auto Storage::Erase(ConstIterator from, ConstIterator to) -> Iterator { StorageView storage_view = MakeStorageView(); - SizeType erase_size = std::distance(from, to); - SizeType erase_index = - std::distance(ConstIterator(storage_view.data), from); + auto erase_size = static_cast>(std::distance(from, to)); + auto erase_index = static_cast>( + std::distance(ConstIterator(storage_view.data), from)); SizeType erase_end_index = erase_index + erase_size; IteratorValueAdapter> move_values( @@ -887,26 +921,7 @@ auto Storage::Swap(Storage* other_storage_ptr) -> void { if (GetIsAllocated() && other_storage_ptr->GetIsAllocated()) { swap(data_.allocated, other_storage_ptr->data_.allocated); } else if (!GetIsAllocated() && !other_storage_ptr->GetIsAllocated()) { - Storage* small_ptr = this; - Storage* large_ptr = other_storage_ptr; - if (small_ptr->GetSize() > large_ptr->GetSize()) swap(small_ptr, large_ptr); - - for (SizeType i = 0; i < small_ptr->GetSize(); ++i) { - swap(small_ptr->GetInlinedData()[i], large_ptr->GetInlinedData()[i]); - } - - IteratorValueAdapter> move_values( - MoveIterator(large_ptr->GetInlinedData() + small_ptr->GetSize())); - - ConstructElements(large_ptr->GetAllocator(), - small_ptr->GetInlinedData() + small_ptr->GetSize(), - move_values, - large_ptr->GetSize() - small_ptr->GetSize()); - - DestroyAdapter::DestroyElements( - large_ptr->GetAllocator(), - large_ptr->GetInlinedData() + small_ptr->GetSize(), - large_ptr->GetSize() - small_ptr->GetSize()); + SwapInlinedElements(SwapPolicy{}, other_storage_ptr); } else { Storage* allocated_ptr = this; Storage* inlined_ptr = other_storage_ptr; @@ -942,7 +957,69 @@ auto Storage::Swap(Storage* other_storage_ptr) -> void { swap(GetAllocator(), other_storage_ptr->GetAllocator()); } -// End ignore "array-bounds" and "maybe-uninitialized" +template +void Storage::SwapN(ElementwiseSwapPolicy, Storage* other, + SizeType n) { + std::swap_ranges(GetInlinedData(), GetInlinedData() + n, + other->GetInlinedData()); +} + +template +void Storage::SwapN(ElementwiseConstructPolicy, Storage* other, + SizeType n) { + Pointer a = GetInlinedData(); + Pointer b = other->GetInlinedData(); + // see note on allocators in `SwapInlinedElements`. + A& allocator_a = GetAllocator(); + A& allocator_b = other->GetAllocator(); + for (SizeType i = 0; i < n; ++i, ++a, ++b) { + ValueType tmp(std::move(*a)); + + AllocatorTraits::destroy(allocator_a, a); + AllocatorTraits::construct(allocator_b, a, std::move(*b)); + + AllocatorTraits::destroy(allocator_b, b); + AllocatorTraits::construct(allocator_a, b, std::move(tmp)); + } +} + +template +void Storage::SwapInlinedElements(MemcpyPolicy, Storage* other) { + Data tmp = data_; + data_ = other->data_; + other->data_ = tmp; +} + +template +template +void Storage::SwapInlinedElements(NotMemcpyPolicy policy, + Storage* other) { + // Note: `destroy` needs to use pre-swap allocator while `construct` - + // post-swap allocator. Allocators will be swaped later on outside of + // `SwapInlinedElements`. + Storage* small_ptr = this; + Storage* large_ptr = other; + if (small_ptr->GetSize() > large_ptr->GetSize()) { + std::swap(small_ptr, large_ptr); + } + + auto small_size = small_ptr->GetSize(); + auto diff = large_ptr->GetSize() - small_size; + SwapN(policy, other, small_size); + + IteratorValueAdapter> move_values( + MoveIterator(large_ptr->GetInlinedData() + small_size)); + + ConstructElements(large_ptr->GetAllocator(), + small_ptr->GetInlinedData() + small_size, move_values, + diff); + + DestroyAdapter::DestroyElements(large_ptr->GetAllocator(), + large_ptr->GetInlinedData() + small_size, + diff); +} + +// End ignore "array-bounds" #if !defined(__clang__) && defined(__GNUC__) #pragma GCC diagnostic pop #endif @@ -951,4 +1028,4 @@ auto Storage::Swap(Storage* other_storage_ptr) -> void { ABSL_NAMESPACE_END } // namespace absl -#endif // ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_INTERNAL_H_ +#endif // ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/container/internal/node_hash_policy.h b/TMessagesProj/jni/voip/webrtc/absl/container/internal/node_hash_policy.h deleted file mode 100644 index 4617162f0b..0000000000 --- a/TMessagesProj/jni/voip/webrtc/absl/container/internal/node_hash_policy.h +++ /dev/null @@ -1,92 +0,0 @@ -// Copyright 2018 The Abseil Authors. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// Adapts a policy for nodes. -// -// The node policy should model: -// -// struct Policy { -// // Returns a new node allocated and constructed using the allocator, using -// // the specified arguments. -// template -// value_type* new_element(Alloc* alloc, Args&&... args) const; -// -// // Destroys and deallocates node using the allocator. -// template -// void delete_element(Alloc* alloc, value_type* node) const; -// }; -// -// It may also optionally define `value()` and `apply()`. For documentation on -// these, see hash_policy_traits.h. - -#ifndef ABSL_CONTAINER_INTERNAL_NODE_HASH_POLICY_H_ -#define ABSL_CONTAINER_INTERNAL_NODE_HASH_POLICY_H_ - -#include -#include -#include -#include -#include - -#include "absl/base/config.h" - -namespace absl { -ABSL_NAMESPACE_BEGIN -namespace container_internal { - -template -struct node_hash_policy { - static_assert(std::is_lvalue_reference::value, ""); - - using slot_type = typename std::remove_cv< - typename std::remove_reference::type>::type*; - - template - static void construct(Alloc* alloc, slot_type* slot, Args&&... args) { - *slot = Policy::new_element(alloc, std::forward(args)...); - } - - template - static void destroy(Alloc* alloc, slot_type* slot) { - Policy::delete_element(alloc, *slot); - } - - template - static void transfer(Alloc*, slot_type* new_slot, slot_type* old_slot) { - *new_slot = *old_slot; - } - - static size_t space_used(const slot_type* slot) { - if (slot == nullptr) return Policy::element_space_used(nullptr); - return Policy::element_space_used(*slot); - } - - static Reference element(slot_type* slot) { return **slot; } - - template - static auto value(T* elem) -> decltype(P::value(elem)) { - return P::value(elem); - } - - template - static auto apply(Ts&&... ts) -> decltype(P::apply(std::forward(ts)...)) { - return P::apply(std::forward(ts)...); - } -}; - -} // namespace container_internal -ABSL_NAMESPACE_END -} // namespace absl - -#endif // ABSL_CONTAINER_INTERNAL_NODE_HASH_POLICY_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/container/internal/node_hash_policy_test.cc b/TMessagesProj/jni/voip/webrtc/absl/container/internal/node_hash_policy_test.cc deleted file mode 100644 index 84aabba968..0000000000 --- a/TMessagesProj/jni/voip/webrtc/absl/container/internal/node_hash_policy_test.cc +++ /dev/null @@ -1,69 +0,0 @@ -// Copyright 2018 The Abseil Authors. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "absl/container/internal/node_hash_policy.h" - -#include - -#include "gmock/gmock.h" -#include "gtest/gtest.h" -#include "absl/container/internal/hash_policy_traits.h" - -namespace absl { -ABSL_NAMESPACE_BEGIN -namespace container_internal { -namespace { - -using ::testing::Pointee; - -struct Policy : node_hash_policy { - using key_type = int; - using init_type = int; - - template - static int* new_element(Alloc* alloc, int value) { - return new int(value); - } - - template - static void delete_element(Alloc* alloc, int* elem) { - delete elem; - } -}; - -using NodePolicy = hash_policy_traits; - -struct NodeTest : ::testing::Test { - std::allocator alloc; - int n = 53; - int* a = &n; -}; - -TEST_F(NodeTest, ConstructDestroy) { - NodePolicy::construct(&alloc, &a, 42); - EXPECT_THAT(a, Pointee(42)); - NodePolicy::destroy(&alloc, &a); -} - -TEST_F(NodeTest, transfer) { - int s = 42; - int* b = &s; - NodePolicy::transfer(&alloc, &a, &b); - EXPECT_EQ(&s, a); -} - -} // namespace -} // namespace container_internal -ABSL_NAMESPACE_END -} // namespace absl diff --git a/TMessagesProj/jni/voip/webrtc/absl/container/internal/node_slot_policy.h b/TMessagesProj/jni/voip/webrtc/absl/container/internal/node_slot_policy.h new file mode 100644 index 0000000000..baba5743c8 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/absl/container/internal/node_slot_policy.h @@ -0,0 +1,92 @@ +// Copyright 2018 The Abseil Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Adapts a policy for nodes. +// +// The node policy should model: +// +// struct Policy { +// // Returns a new node allocated and constructed using the allocator, using +// // the specified arguments. +// template +// value_type* new_element(Alloc* alloc, Args&&... args) const; +// +// // Destroys and deallocates node using the allocator. +// template +// void delete_element(Alloc* alloc, value_type* node) const; +// }; +// +// It may also optionally define `value()` and `apply()`. For documentation on +// these, see hash_policy_traits.h. + +#ifndef ABSL_CONTAINER_INTERNAL_NODE_SLOT_POLICY_H_ +#define ABSL_CONTAINER_INTERNAL_NODE_SLOT_POLICY_H_ + +#include +#include +#include +#include +#include + +#include "absl/base/config.h" + +namespace absl { +ABSL_NAMESPACE_BEGIN +namespace container_internal { + +template +struct node_slot_policy { + static_assert(std::is_lvalue_reference::value, ""); + + using slot_type = typename std::remove_cv< + typename std::remove_reference::type>::type*; + + template + static void construct(Alloc* alloc, slot_type* slot, Args&&... args) { + *slot = Policy::new_element(alloc, std::forward(args)...); + } + + template + static void destroy(Alloc* alloc, slot_type* slot) { + Policy::delete_element(alloc, *slot); + } + + template + static void transfer(Alloc*, slot_type* new_slot, slot_type* old_slot) { + *new_slot = *old_slot; + } + + static size_t space_used(const slot_type* slot) { + if (slot == nullptr) return Policy::element_space_used(nullptr); + return Policy::element_space_used(*slot); + } + + static Reference element(slot_type* slot) { return **slot; } + + template + static auto value(T* elem) -> decltype(P::value(elem)) { + return P::value(elem); + } + + template + static auto apply(Ts&&... ts) -> decltype(P::apply(std::forward(ts)...)) { + return P::apply(std::forward(ts)...); + } +}; + +} // namespace container_internal +ABSL_NAMESPACE_END +} // namespace absl + +#endif // ABSL_CONTAINER_INTERNAL_NODE_SLOT_POLICY_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/container/internal/raw_hash_set.cc b/TMessagesProj/jni/voip/webrtc/absl/container/internal/raw_hash_set.cc index 687bcb8a4d..c63a2e02d1 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/container/internal/raw_hash_set.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/container/internal/raw_hash_set.cc @@ -23,13 +23,17 @@ namespace absl { ABSL_NAMESPACE_BEGIN namespace container_internal { +// A single block of empty control bytes for tables without any slots allocated. +// This enables removing a branch in the hot path of find(). alignas(16) ABSL_CONST_INIT ABSL_DLL const ctrl_t kEmptyGroup[16] = { ctrl_t::kSentinel, ctrl_t::kEmpty, ctrl_t::kEmpty, ctrl_t::kEmpty, ctrl_t::kEmpty, ctrl_t::kEmpty, ctrl_t::kEmpty, ctrl_t::kEmpty, ctrl_t::kEmpty, ctrl_t::kEmpty, ctrl_t::kEmpty, ctrl_t::kEmpty, ctrl_t::kEmpty, ctrl_t::kEmpty, ctrl_t::kEmpty, ctrl_t::kEmpty}; +#ifdef ABSL_INTERNAL_NEED_REDUNDANT_CONSTEXPR_DECL constexpr size_t Group::kWidth; +#endif // Returns "random" seed. inline size_t RandomSeed() { diff --git a/TMessagesProj/jni/voip/webrtc/absl/container/internal/raw_hash_set.h b/TMessagesProj/jni/voip/webrtc/absl/container/internal/raw_hash_set.h index 7409d5eca0..de455d6cb5 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/container/internal/raw_hash_set.h +++ b/TMessagesProj/jni/voip/webrtc/absl/container/internal/raw_hash_set.h @@ -53,51 +53,121 @@ // // IMPLEMENTATION DETAILS // -// The table stores elements inline in a slot array. In addition to the slot -// array the table maintains some control state per slot. The extra state is one -// byte per slot and stores empty or deleted marks, or alternatively 7 bits from -// the hash of an occupied slot. The table is split into logical groups of -// slots, like so: +// # Table Layout +// +// A raw_hash_set's backing array consists of control bytes followed by slots +// that may or may not contain objects. +// +// The layout of the backing array, for `capacity` slots, is thus, as a +// pseudo-struct: +// +// struct BackingArray { +// // Control bytes for the "real" slots. +// ctrl_t ctrl[capacity]; +// // Always `ctrl_t::kSentinel`. This is used by iterators to find when to +// // stop and serves no other purpose. +// ctrl_t sentinel; +// // A copy of the first `kWidth - 1` elements of `ctrl`. This is used so +// // that if a probe sequence picks a value near the end of `ctrl`, +// // `Group` will have valid control bytes to look at. +// ctrl_t clones[kWidth - 1]; +// // The actual slot data. +// slot_type slots[capacity]; +// }; +// +// The length of this array is computed by `AllocSize()` below. +// +// Control bytes (`ctrl_t`) are bytes (collected into groups of a +// platform-specific size) that define the state of the corresponding slot in +// the slot array. Group manipulation is tightly optimized to be as efficient +// as possible: SSE and friends on x86, clever bit operations on other arches. // // Group 1 Group 2 Group 3 // +---------------+---------------+---------------+ // | | | | | | | | | | | | | | | | | | | | | | | | | // +---------------+---------------+---------------+ // -// On lookup the hash is split into two parts: -// - H2: 7 bits (those stored in the control bytes) -// - H1: the rest of the bits -// The groups are probed using H1. For each group the slots are matched to H2 in -// parallel. Because H2 is 7 bits (128 states) and the number of slots per group -// is low (8 or 16) in almost all cases a match in H2 is also a lookup hit. +// Each control byte is either a special value for empty slots, deleted slots +// (sometimes called *tombstones*), and a special end-of-table marker used by +// iterators, or, if occupied, seven bits (H2) from the hash of the value in the +// corresponding slot. +// +// Storing control bytes in a separate array also has beneficial cache effects, +// since more logical slots will fit into a cache line. +// +// # Hashing +// +// We compute two separate hashes, `H1` and `H2`, from the hash of an object. +// `H1(hash(x))` is an index into `slots`, and essentially the starting point +// for the probe sequence. `H2(hash(x))` is a 7-bit value used to filter out +// objects that cannot possibly be the one we are looking for. +// +// # Table operations. // -// On insert, once the right group is found (as in lookup), its slots are -// filled in order. +// The key operations are `insert`, `find`, and `erase`. // -// On erase a slot is cleared. In case the group did not have any empty slots -// before the erase, the erased slot is marked as deleted. +// Since `insert` and `erase` are implemented in terms of `find`, we describe +// `find` first. To `find` a value `x`, we compute `hash(x)`. From +// `H1(hash(x))` and the capacity, we construct a `probe_seq` that visits every +// group of slots in some interesting order. // -// Groups without empty slots (but maybe with deleted slots) extend the probe -// sequence. The probing algorithm is quadratic. Given N the number of groups, -// the probing function for the i'th probe is: +// We now walk through these indices. At each index, we select the entire group +// starting with that index and extract potential candidates: occupied slots +// with a control byte equal to `H2(hash(x))`. If we find an empty slot in the +// group, we stop and return an error. Each candidate slot `y` is compared with +// `x`; if `x == y`, we are done and return `&y`; otherwise we contine to the +// next probe index. Tombstones effectively behave like full slots that never +// match the value we're looking for. // -// P(0) = H1 % N +// The `H2` bits ensure when we compare a slot to an object with `==`, we are +// likely to have actually found the object. That is, the chance is low that +// `==` is called and returns `false`. Thus, when we search for an object, we +// are unlikely to call `==` many times. This likelyhood can be analyzed as +// follows (assuming that H2 is a random enough hash function). // -// P(i) = (P(i - 1) + i) % N +// Let's assume that there are `k` "wrong" objects that must be examined in a +// probe sequence. For example, when doing a `find` on an object that is in the +// table, `k` is the number of objects between the start of the probe sequence +// and the final found object (not including the final found object). The +// expected number of objects with an H2 match is then `k/128`. Measurements +// and analysis indicate that even at high load factors, `k` is less than 32, +// meaning that the number of "false positive" comparisons we must perform is +// less than 1/8 per `find`. + +// `insert` is implemented in terms of `unchecked_insert`, which inserts a +// value presumed to not be in the table (violating this requirement will cause +// the table to behave erratically). Given `x` and its hash `hash(x)`, to insert +// it, we construct a `probe_seq` once again, and use it to find the first +// group with an unoccupied (empty *or* deleted) slot. We place `x` into the +// first such slot in the group and mark it as full with `x`'s H2. // -// This probing function guarantees that after N probes, all the groups of the -// table will be probed exactly once. +// To `insert`, we compose `unchecked_insert` with `find`. We compute `h(x)` and +// perform a `find` to see if it's already present; if it is, we're done. If +// it's not, we may decide the table is getting overcrowded (i.e. the load +// factor is greater than 7/8 for big tables; `is_small()` tables use a max load +// factor of 1); in this case, we allocate a bigger array, `unchecked_insert` +// each element of the table into the new array (we know that no insertion here +// will insert an already-present value), and discard the old backing array. At +// this point, we may `unchecked_insert` the value `x`. // -// The control state and slot array are stored contiguously in a shared heap -// allocation. The layout of this allocation is: `capacity()` control bytes, -// one sentinel control byte, `Group::kWidth - 1` cloned control bytes, -// , `capacity()` slots. The sentinel control byte is used in -// iteration so we know when we reach the end of the table. The cloned control -// bytes at the end of the table are cloned from the beginning of the table so -// groups that begin near the end of the table can see a full group. In cases in -// which there are more than `capacity()` cloned control bytes, the extra bytes -// are `kEmpty`, and these ensure that we always see at least one empty slot and -// can stop an unsuccessful search. +// Below, `unchecked_insert` is partly implemented by `prepare_insert`, which +// presents a viable, initialized slot pointee to the caller. +// +// `erase` is implemented in terms of `erase_at`, which takes an index to a +// slot. Given an offset, we simply create a tombstone and destroy its contents. +// If we can prove that the slot would not appear in a probe sequence, we can +// make the slot as empty, instead. We can prove this by observing that if a +// group has any empty slots, it has never been full (assuming we never create +// an empty slot in a group with no empties, which this heuristic guarantees we +// never do) and find would stop at this group anyways (since it does not probe +// beyond groups with empties). +// +// `erase` is `erase_at` composed with `find`: if we +// have a value `x`, we can perform a `find`, and then `erase_at` the resulting +// slot. +// +// To iterate, we simply traverse the array, skipping empty and deleted slots +// and stopping when we hit a `kSentinel`. #ifndef ABSL_CONTAINER_INTERNAL_RAW_HASH_SET_H_ #define ABSL_CONTAINER_INTERNAL_RAW_HASH_SET_H_ @@ -113,7 +183,9 @@ #include #include +#include "absl/base/config.h" #include "absl/base/internal/endian.h" +#include "absl/base/internal/prefetch.h" #include "absl/base/optimization.h" #include "absl/base/port.h" #include "absl/container/internal/common.h" @@ -122,12 +194,27 @@ #include "absl/container/internal/hash_policy_traits.h" #include "absl/container/internal/hashtable_debug_hooks.h" #include "absl/container/internal/hashtablez_sampler.h" -#include "absl/container/internal/have_sse.h" #include "absl/memory/memory.h" #include "absl/meta/type_traits.h" #include "absl/numeric/bits.h" #include "absl/utility/utility.h" +#ifdef ABSL_INTERNAL_HAVE_SSE2 +#include +#endif + +#ifdef ABSL_INTERNAL_HAVE_SSSE3 +#include +#endif + +#ifdef _MSC_VER +#include +#endif + +#ifdef ABSL_INTERNAL_HAVE_ARM_NEON +#include +#endif + namespace absl { ABSL_NAMESPACE_BEGIN namespace container_internal { @@ -142,14 +229,40 @@ template void SwapAlloc(AllocType& /*lhs*/, AllocType& /*rhs*/, std::false_type /* propagate_on_container_swap */) {} +// The state for a probe sequence. +// +// Currently, the sequence is a triangular progression of the form +// +// p(i) := Width * (i^2 + i)/2 + hash (mod mask + 1) +// +// The use of `Width` ensures that each probe step does not overlap groups; +// the sequence effectively outputs the addresses of *groups* (although not +// necessarily aligned to any boundary). The `Group` machinery allows us +// to check an entire group with minimal branching. +// +// Wrapping around at `mask + 1` is important, but not for the obvious reason. +// As described above, the first few entries of the control byte array +// are mirrored at the end of the array, which `Group` will find and use +// for selecting candidates. However, when those candidates' slots are +// actually inspected, there are no corresponding slots for the cloned bytes, +// so we need to make sure we've treated those offsets as "wrapping around". +// +// It turns out that this probe sequence visits every group exactly once if the +// number of groups is a power of two, since (i^2+i)/2 is a bijection in +// Z/(2^m). See https://en.wikipedia.org/wiki/Quadratic_probing template class probe_seq { public: + // Creates a new probe sequence using `hash` as the initial value of the + // sequence and `mask` (usually the capacity of the table) as the mask to + // apply to each value in the progression. probe_seq(size_t hash, size_t mask) { assert(((mask + 1) & mask) == 0 && "not a mask"); mask_ = mask; offset_ = hash & mask_; } + + // The offset within the table, i.e., the value `p(i)` above. size_t offset() const { return offset_; } size_t offset(size_t i) const { return (offset_ + i) & mask_; } @@ -158,7 +271,7 @@ class probe_seq { offset_ += index_; offset_ &= mask_; } - // 0-based probe index. The i-th probe in the probe sequence. + // 0-based probe index, a multiple of `Width`. size_t index() const { return index_; } private: @@ -182,9 +295,9 @@ struct IsDecomposable : std::false_type {}; template struct IsDecomposable< - absl::void_t(), - std::declval()...))>, + absl::void_t(), + std::declval()...))>, Policy, Hash, Eq, Ts...> : std::true_type {}; // TODO(alkis): Switch to std::is_nothrow_swappable when gcc/clang supports it. @@ -200,57 +313,84 @@ constexpr bool IsNoThrowSwappable(std::false_type /* is_swappable */) { template uint32_t TrailingZeros(T x) { - ABSL_INTERNAL_ASSUME(x != 0); + ABSL_ASSUME(x != 0); return static_cast(countr_zero(x)); } -// An abstraction over a bitmask. It provides an easy way to iterate through the -// indexes of the set bits of a bitmask. When Shift=0 (platforms with SSE), -// this is a true bitmask. On non-SSE, platforms the arithematic used to -// emulate the SSE behavior works in bytes (Shift=3) and leaves each bytes as -// either 0x00 or 0x80. +// An abstract bitmask, such as that emitted by a SIMD instruction. // -// For example: -// for (int i : BitMask(0x5)) -> yields 0, 2 -// for (int i : BitMask(0x0000000080800000)) -> yields 2, 3 +// Specifically, this type implements a simple bitset whose representation is +// controlled by `SignificantBits` and `Shift`. `SignificantBits` is the number +// of abstract bits in the bitset, while `Shift` is the log-base-two of the +// width of an abstract bit in the representation. +// This mask provides operations for any number of real bits set in an abstract +// bit. To add iteration on top of that, implementation must guarantee no more +// than one real bit is set in an abstract bit. template -class BitMask { - static_assert(std::is_unsigned::value, ""); - static_assert(Shift == 0 || Shift == 3, ""); - +class NonIterableBitMask { public: - // These are useful for unit tests (gunit). - using value_type = int; - using iterator = BitMask; - using const_iterator = BitMask; + explicit NonIterableBitMask(T mask) : mask_(mask) {} - explicit BitMask(T mask) : mask_(mask) {} - BitMask& operator++() { - mask_ &= (mask_ - 1); - return *this; - } - explicit operator bool() const { return mask_ != 0; } - uint32_t operator*() const { return LowestBitSet(); } + explicit operator bool() const { return this->mask_ != 0; } + + // Returns the index of the lowest *abstract* bit set in `self`. uint32_t LowestBitSet() const { return container_internal::TrailingZeros(mask_) >> Shift; } + + // Returns the index of the highest *abstract* bit set in `self`. uint32_t HighestBitSet() const { return static_cast((bit_width(mask_) - 1) >> Shift); } - BitMask begin() const { return *this; } - BitMask end() const { return BitMask(0); } - + // Return the number of trailing zero *abstract* bits. uint32_t TrailingZeros() const { return container_internal::TrailingZeros(mask_) >> Shift; } + // Return the number of leading zero *abstract* bits. uint32_t LeadingZeros() const { constexpr int total_significant_bits = SignificantBits << Shift; constexpr int extra_bits = sizeof(T) * 8 - total_significant_bits; return static_cast(countl_zero(mask_ << extra_bits)) >> Shift; } + T mask_; +}; + +// Mask that can be iterable +// +// For example, when `SignificantBits` is 16 and `Shift` is zero, this is just +// an ordinary 16-bit bitset occupying the low 16 bits of `mask`. When +// `SignificantBits` is 8 and `Shift` is 3, abstract bits are represented as +// the bytes `0x00` and `0x80`, and it occupies all 64 bits of the bitmask. +// +// For example: +// for (int i : BitMask(0b101)) -> yields 0, 2 +// for (int i : BitMask(0x0000000080800000)) -> yields 2, 3 +template +class BitMask : public NonIterableBitMask { + using Base = NonIterableBitMask; + static_assert(std::is_unsigned::value, ""); + static_assert(Shift == 0 || Shift == 3, ""); + + public: + explicit BitMask(T mask) : Base(mask) {} + // BitMask is an iterator over the indices of its abstract bits. + using value_type = int; + using iterator = BitMask; + using const_iterator = BitMask; + + BitMask& operator++() { + this->mask_ &= (this->mask_ - 1); + return *this; + } + + uint32_t operator*() const { return Base::LowestBitSet(); } + + BitMask begin() const { return *this; } + BitMask end() const { return BitMask(0); } + private: friend bool operator==(const BitMask& a, const BitMask& b) { return a.mask_ == b.mask_; @@ -258,15 +398,27 @@ class BitMask { friend bool operator!=(const BitMask& a, const BitMask& b) { return a.mask_ != b.mask_; } - - T mask_; }; using h2_t = uint8_t; // The values here are selected for maximum performance. See the static asserts -// below for details. We use an enum class so that when strict aliasing is -// enabled, the compiler knows ctrl_t doesn't alias other types. +// below for details. + +// A `ctrl_t` is a single control byte, which can have one of four +// states: empty, deleted, full (which has an associated seven-bit h2_t value) +// and the sentinel. They have the following bit patterns: +// +// empty: 1 0 0 0 0 0 0 0 +// deleted: 1 1 1 1 1 1 1 0 +// full: 0 h h h h h h h // h represents the hash bits. +// sentinel: 1 1 1 1 1 1 1 1 +// +// These values are specifically tuned for SSE-flavored SIMD. +// The static_asserts below detail the source of these choices. +// +// We use an enum class so that when strict aliasing is enabled, the compiler +// knows ctrl_t doesn't alias other types. enum class ctrl_t : int8_t { kEmpty = -128, // 0b10000000 kDeleted = -2, // 0b11111110 @@ -294,15 +446,17 @@ static_assert( static_cast(ctrl_t::kSentinel) & 0x7F) != 0, "ctrl_t::kEmpty and ctrl_t::kDeleted must share an unset bit that is not " "shared by ctrl_t::kSentinel to make the scalar test for " - "MatchEmptyOrDeleted() efficient"); + "MaskEmptyOrDeleted() efficient"); static_assert(ctrl_t::kDeleted == static_cast(-2), "ctrl_t::kDeleted must be -2 to make the implementation of " "ConvertSpecialToEmptyAndFullToDeleted efficient"); -// A single block of empty control bytes for tables without any slots allocated. -// This enables removing a branch in the hot path of find(). ABSL_DLL extern const ctrl_t kEmptyGroup[16]; + +// Returns a pointer to a control byte group that can be used by empty tables. inline ctrl_t* EmptyGroup() { + // Const must be cast away here; no uses of this function will actually write + // to it, because it is only used for empty tables. return const_cast(kEmptyGroup); } @@ -310,28 +464,61 @@ inline ctrl_t* EmptyGroup() { // randomize insertion order within groups. bool ShouldInsertBackwards(size_t hash, const ctrl_t* ctrl); -// Returns a hash seed. +// Returns a per-table, hash salt, which changes on resize. This gets mixed into +// H1 to randomize iteration order per-table. // // The seed consists of the ctrl_ pointer, which adds enough entropy to ensure // non-determinism of iteration order in most cases. -inline size_t HashSeed(const ctrl_t* ctrl) { +inline size_t PerTableSalt(const ctrl_t* ctrl) { // The low bits of the pointer have little or no entropy because of // alignment. We shift the pointer to try to use higher entropy bits. A // good number seems to be 12 bits, because that aligns with page size. return reinterpret_cast(ctrl) >> 12; } - +// Extracts the H1 portion of a hash: 57 bits mixed with a per-table salt. inline size_t H1(size_t hash, const ctrl_t* ctrl) { - return (hash >> 7) ^ HashSeed(ctrl); + return (hash >> 7) ^ PerTableSalt(ctrl); } + +// Extracts the H2 portion of a hash: the 7 bits not used for H1. +// +// These are used as an occupied control byte. inline h2_t H2(size_t hash) { return hash & 0x7F; } +// Helpers for checking the state of a control byte. inline bool IsEmpty(ctrl_t c) { return c == ctrl_t::kEmpty; } inline bool IsFull(ctrl_t c) { return c >= static_cast(0); } inline bool IsDeleted(ctrl_t c) { return c == ctrl_t::kDeleted; } inline bool IsEmptyOrDeleted(ctrl_t c) { return c < ctrl_t::kSentinel; } -#if ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSE2 +#ifdef ABSL_INTERNAL_HAVE_SSE2 +// Quick reference guide for intrinsics used below: +// +// * __m128i: An XMM (128-bit) word. +// +// * _mm_setzero_si128: Returns a zero vector. +// * _mm_set1_epi8: Returns a vector with the same i8 in each lane. +// +// * _mm_subs_epi8: Saturating-subtracts two i8 vectors. +// * _mm_and_si128: Ands two i128s together. +// * _mm_or_si128: Ors two i128s together. +// * _mm_andnot_si128: And-nots two i128s together. +// +// * _mm_cmpeq_epi8: Component-wise compares two i8 vectors for equality, +// filling each lane with 0x00 or 0xff. +// * _mm_cmpgt_epi8: Same as above, but using > rather than ==. +// +// * _mm_loadu_si128: Performs an unaligned load of an i128. +// * _mm_storeu_si128: Performs an unaligned store of an i128. +// +// * _mm_sign_epi8: Retains, negates, or zeroes each i8 lane of the first +// argument if the corresponding lane of the second +// argument is positive, negative, or zero, respectively. +// * _mm_movemask_epi8: Selects the sign bit out of each i8 lane and produces a +// bitmask consisting of those bits. +// * _mm_shuffle_epi8: Selects i8s from the first argument, using the low +// four bits of each i8 lane in the second argument as +// indices. // https://github.com/abseil/abseil-cpp/issues/209 // https://gcc.gnu.org/bugzilla/show_bug.cgi?id=87853 @@ -358,33 +545,34 @@ struct GroupSse2Impl { // Returns a bitmask representing the positions of slots that match hash. BitMask Match(h2_t hash) const { - auto match = _mm_set1_epi8(hash); + auto match = _mm_set1_epi8(static_cast(hash)); return BitMask( static_cast(_mm_movemask_epi8(_mm_cmpeq_epi8(match, ctrl)))); } // Returns a bitmask representing the positions of empty slots. - BitMask MatchEmpty() const { -#if ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSSE3 + NonIterableBitMask MaskEmpty() const { +#ifdef ABSL_INTERNAL_HAVE_SSSE3 // This only works because ctrl_t::kEmpty is -128. - return BitMask( + return NonIterableBitMask( static_cast(_mm_movemask_epi8(_mm_sign_epi8(ctrl, ctrl)))); #else - return Match(static_cast(ctrl_t::kEmpty)); + auto match = _mm_set1_epi8(static_cast(ctrl_t::kEmpty)); + return NonIterableBitMask( + static_cast(_mm_movemask_epi8(_mm_cmpeq_epi8(match, ctrl)))); #endif } // Returns a bitmask representing the positions of empty or deleted slots. - BitMask MatchEmptyOrDeleted() const { - auto special = _mm_set1_epi8(static_cast(ctrl_t::kSentinel)); - return BitMask( - static_cast( - _mm_movemask_epi8(_mm_cmpgt_epi8_fixed(special, ctrl)))); + NonIterableBitMask MaskEmptyOrDeleted() const { + auto special = _mm_set1_epi8(static_cast(ctrl_t::kSentinel)); + return NonIterableBitMask(static_cast( + _mm_movemask_epi8(_mm_cmpgt_epi8_fixed(special, ctrl)))); } // Returns the number of trailing empty or deleted elements in the group. uint32_t CountLeadingEmptyOrDeleted() const { - auto special = _mm_set1_epi8(static_cast(ctrl_t::kSentinel)); + auto special = _mm_set1_epi8(static_cast(ctrl_t::kSentinel)); return TrailingZeros(static_cast( _mm_movemask_epi8(_mm_cmpgt_epi8_fixed(special, ctrl)) + 1)); } @@ -392,7 +580,7 @@ struct GroupSse2Impl { void ConvertSpecialToEmptyAndFullToDeleted(ctrl_t* dst) const { auto msbs = _mm_set1_epi8(static_cast(-128)); auto x126 = _mm_set1_epi8(126); -#if ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSSE3 +#ifdef ABSL_INTERNAL_HAVE_SSSE3 auto res = _mm_or_si128(_mm_shuffle_epi8(x126, ctrl), msbs); #else auto zero = _mm_setzero_si128(); @@ -406,6 +594,64 @@ struct GroupSse2Impl { }; #endif // ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSE2 +#if defined(ABSL_INTERNAL_HAVE_ARM_NEON) && defined(ABSL_IS_LITTLE_ENDIAN) +struct GroupAArch64Impl { + static constexpr size_t kWidth = 8; + + explicit GroupAArch64Impl(const ctrl_t* pos) { + ctrl = vld1_u8(reinterpret_cast(pos)); + } + + BitMask Match(h2_t hash) const { + uint8x8_t dup = vdup_n_u8(hash); + auto mask = vceq_u8(ctrl, dup); + constexpr uint64_t msbs = 0x8080808080808080ULL; + return BitMask( + vget_lane_u64(vreinterpret_u64_u8(mask), 0) & msbs); + } + + NonIterableBitMask MaskEmpty() const { + uint64_t mask = + vget_lane_u64(vreinterpret_u64_u8(vceq_s8( + vdup_n_s8(static_cast(ctrl_t::kEmpty)), + vreinterpret_s8_u8(ctrl))), + 0); + return NonIterableBitMask(mask); + } + + NonIterableBitMask MaskEmptyOrDeleted() const { + uint64_t mask = + vget_lane_u64(vreinterpret_u64_u8(vcgt_s8( + vdup_n_s8(static_cast(ctrl_t::kSentinel)), + vreinterpret_s8_u8(ctrl))), + 0); + return NonIterableBitMask(mask); + } + + uint32_t CountLeadingEmptyOrDeleted() const { + uint64_t mask = vget_lane_u64(vreinterpret_u64_u8(ctrl), 0); + // ctrl | ~(ctrl >> 7) will have the lowest bit set to zero for kEmpty and + // kDeleted. We lower all other bits and count number of trailing zeros. + // Clang and GCC optimize countr_zero to rbit+clz without any check for 0, + // so we should be fine. + constexpr uint64_t bits = 0x0101010101010101ULL; + return static_cast(countr_zero((mask | ~(mask >> 7)) & bits) >> + 3); + } + + void ConvertSpecialToEmptyAndFullToDeleted(ctrl_t* dst) const { + uint64_t mask = vget_lane_u64(vreinterpret_u64_u8(ctrl), 0); + constexpr uint64_t msbs = 0x8080808080808080ULL; + constexpr uint64_t lsbs = 0x0101010101010101ULL; + auto x = mask & msbs; + auto res = (~x + (x >> 7)) & ~lsbs; + little_endian::Store64(dst, res); + } + + uint8x8_t ctrl; +}; +#endif // ABSL_INTERNAL_HAVE_ARM_NEON && ABSL_IS_LITTLE_ENDIAN + struct GroupPortableImpl { static constexpr size_t kWidth = 8; @@ -432,19 +678,24 @@ struct GroupPortableImpl { return BitMask((x - lsbs) & ~x & msbs); } - BitMask MatchEmpty() const { + NonIterableBitMask MaskEmpty() const { constexpr uint64_t msbs = 0x8080808080808080ULL; - return BitMask((ctrl & (~ctrl << 6)) & msbs); + return NonIterableBitMask((ctrl & (~ctrl << 6)) & + msbs); } - BitMask MatchEmptyOrDeleted() const { + NonIterableBitMask MaskEmptyOrDeleted() const { constexpr uint64_t msbs = 0x8080808080808080ULL; - return BitMask((ctrl & (~ctrl << 7)) & msbs); + return NonIterableBitMask((ctrl & (~ctrl << 7)) & + msbs); } uint32_t CountLeadingEmptyOrDeleted() const { - constexpr uint64_t gaps = 0x00FEFEFEFEFEFEFEULL; - return (TrailingZeros(((~ctrl & (ctrl >> 7)) | gaps) + 1) + 7) >> 3; + // ctrl | ~(ctrl >> 7) will have the lowest bit set to zero for kEmpty and + // kDeleted. We lower all other bits and count number of trailing zeros. + constexpr uint64_t bits = 0x0101010101010101ULL; + return static_cast(countr_zero((ctrl | ~(ctrl >> 7)) & bits) >> + 3); } void ConvertSpecialToEmptyAndFullToDeleted(ctrl_t* dst) const { @@ -458,32 +709,40 @@ struct GroupPortableImpl { uint64_t ctrl; }; -#if ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSE2 +#ifdef ABSL_INTERNAL_HAVE_SSE2 using Group = GroupSse2Impl; +#elif defined(ABSL_INTERNAL_HAVE_ARM_NEON) && defined(ABSL_IS_LITTLE_ENDIAN) +using Group = GroupAArch64Impl; #else using Group = GroupPortableImpl; #endif -// The number of cloned control bytes that we copy from the beginning to the -// end of the control bytes array. +// Returns he number of "cloned control bytes". +// +// This is the number of control bytes that are present both at the beginning +// of the control byte array and at the end, such that we can create a +// `Group::kWidth`-width probe window starting from any control byte. constexpr size_t NumClonedBytes() { return Group::kWidth - 1; } template class raw_hash_set; +// Returns whether `n` is a valid capacity (i.e., number of slots). +// +// A valid capacity is a non-zero integer `2^m - 1`. inline bool IsValidCapacity(size_t n) { return ((n + 1) & n) == 0 && n > 0; } +// Applies the following mapping to every byte in the control array: +// * kDeleted -> kEmpty +// * kEmpty -> kEmpty +// * _ -> kDeleted // PRECONDITION: // IsValidCapacity(capacity) // ctrl[capacity] == ctrl_t::kSentinel // ctrl[i] != ctrl_t::kSentinel for all i < capacity -// Applies mapping for every byte in ctrl: -// DELETED -> EMPTY -// EMPTY -> EMPTY -// FULL -> DELETED void ConvertDeletedToEmptyAndFullToDeleted(ctrl_t* ctrl, size_t capacity); -// Rounds up the capacity to the next power of 2 minus 1, with a minimum of 1. +// Converts `n` into the next valid capacity, per `IsValidCapacity`. inline size_t NormalizeCapacity(size_t n) { return n ? ~size_t{} >> countl_zero(n) : 1; } @@ -496,8 +755,8 @@ inline size_t NormalizeCapacity(size_t n) { // never need to probe (the whole table fits in one group) so we don't need a // load factor less than 1. -// Given `capacity` of the table, returns the size (i.e. number of full slots) -// at which we should grow the capacity. +// Given `capacity`, applies the load factor; i.e., it returns the maximum +// number of values we should put into the table before a resizing rehash. inline size_t CapacityToGrowth(size_t capacity) { assert(IsValidCapacity(capacity)); // `capacity*7/8` @@ -507,8 +766,12 @@ inline size_t CapacityToGrowth(size_t capacity) { } return capacity - capacity / 8; } -// From desired "growth" to a lowerbound of the necessary capacity. -// Might not be a valid one and requires NormalizeCapacity(). + +// Given `growth`, "unapplies" the load factor to find how large the capacity +// should be to stay within the load factor. +// +// This might not be a valid capacity and `NormalizeCapacity()` should be +// called on this. inline size_t GrowthToLowerboundCapacity(size_t growth) { // `growth*8/7` if (Group::kWidth == 8 && growth == 7) { @@ -534,20 +797,54 @@ size_t SelectBucketCountForIterRange(InputIter first, InputIter last, return 0; } -inline void AssertIsFull(ctrl_t* ctrl) { - ABSL_HARDENING_ASSERT( - (ctrl != nullptr && IsFull(*ctrl)) && - "Invalid operation on iterator. The element might have " - "been erased, the table might have rehashed, or this may " - "be an end() iterator."); +#define ABSL_INTERNAL_ASSERT_IS_FULL(ctrl, operation) \ + do { \ + ABSL_HARDENING_ASSERT( \ + (ctrl != nullptr) && operation \ + " called on invalid iterator. The iterator might be an end() " \ + "iterator or may have been default constructed."); \ + ABSL_HARDENING_ASSERT( \ + (IsFull(*ctrl)) && operation \ + " called on invalid iterator. The element might have been erased or " \ + "the table might have rehashed."); \ + } while (0) + +// Note that for comparisons, null/end iterators are valid. +inline void AssertIsValidForComparison(const ctrl_t* ctrl) { + ABSL_HARDENING_ASSERT((ctrl == nullptr || IsFull(*ctrl)) && + "Invalid iterator comparison. The element might have " + "been erased or the table might have rehashed."); +} + +// If the two iterators come from the same container, then their pointers will +// interleave such that ctrl_a <= ctrl_b < slot_a <= slot_b or vice/versa. +// Note: we take slots by reference so that it's not UB if they're uninitialized +// as long as we don't read them (when ctrl is null). +inline bool AreItersFromSameContainer(const ctrl_t* ctrl_a, + const ctrl_t* ctrl_b, + const void* const& slot_a, + const void* const& slot_b) { + // If either control byte is null, then we can't tell. + if (ctrl_a == nullptr || ctrl_b == nullptr) return true; + const void* low_slot = slot_a; + const void* hi_slot = slot_b; + if (ctrl_a > ctrl_b) { + std::swap(ctrl_a, ctrl_b); + std::swap(low_slot, hi_slot); + } + return ctrl_b < low_slot && low_slot <= hi_slot; } -inline void AssertIsValid(ctrl_t* ctrl) { +// Asserts that two iterators come from the same container. +// Note: we take slots by reference so that it's not UB if they're uninitialized +// as long as we don't read them (when ctrl is null). +inline void AssertSameContainer(const ctrl_t* ctrl_a, const ctrl_t* ctrl_b, + const void* const& slot_a, + const void* const& slot_b) { ABSL_HARDENING_ASSERT( - (ctrl == nullptr || IsFull(*ctrl)) && - "Invalid operation on iterator. The element might have " - "been erased, the table might have rehashed, or this may " - "be an end() iterator."); + AreItersFromSameContainer(ctrl_a, ctrl_b, slot_a, slot_b) && + "Invalid iterator comparison. The iterators may be from different " + "containers or the container might have rehashed."); } struct FindInfo { @@ -555,44 +852,40 @@ struct FindInfo { size_t probe_length; }; -// The representation of the object has two modes: -// - small: For capacities < kWidth-1 -// - large: For the rest. +// Whether a table is "small". A small table fits entirely into a probing +// group, i.e., has a capacity < `Group::kWidth`. // -// Differences: -// - In small mode we are able to use the whole capacity. The extra control -// bytes give us at least one "empty" control byte to stop the iteration. -// This is important to make 1 a valid capacity. +// In small mode we are able to use the whole capacity. The extra control +// bytes give us at least one "empty" control byte to stop the iteration. +// This is important to make 1 a valid capacity. // -// - In small mode only the first `capacity()` control bytes after the -// sentinel are valid. The rest contain dummy ctrl_t::kEmpty values that do not -// represent a real slot. This is important to take into account on -// find_first_non_full(), where we never try ShouldInsertBackwards() for -// small tables. +// In small mode only the first `capacity` control bytes after the sentinel +// are valid. The rest contain dummy ctrl_t::kEmpty values that do not +// represent a real slot. This is important to take into account on +// `find_first_non_full()`, where we never try +// `ShouldInsertBackwards()` for small tables. inline bool is_small(size_t capacity) { return capacity < Group::kWidth - 1; } +// Begins a probing operation on `ctrl`, using `hash`. inline probe_seq probe(const ctrl_t* ctrl, size_t hash, size_t capacity) { return probe_seq(H1(hash, ctrl), capacity); } -// Probes the raw_hash_set with the probe sequence for hash and returns the -// pointer to the first empty or deleted slot. -// NOTE: this function must work with tables having both ctrl_t::kEmpty and -// ctrl_t::kDeleted in one group. Such tables appears during -// drop_deletes_without_resize. +// Probes an array of control bits using a probe sequence derived from `hash`, +// and returns the offset corresponding to the first deleted or empty slot. +// +// Behavior when the entire table is full is undefined. // -// This function is very useful when insertions happen and: -// - the input is already a set -// - there are enough slots -// - the element with the hash is not in the table +// NOTE: this function must work with tables having both empty and deleted +// slots in the same group. Such tables appear during `erase()`. template inline FindInfo find_first_non_full(const ctrl_t* ctrl, size_t hash, size_t capacity) { auto seq = probe(ctrl, hash, capacity); while (true) { Group g{ctrl + seq.offset()}; - auto mask = g.MatchEmptyOrDeleted(); + auto mask = g.MaskEmptyOrDeleted(); if (mask) { #if !defined(NDEBUG) // We want to add entropy even when ASLR is not enabled. @@ -615,7 +908,8 @@ inline FindInfo find_first_non_full(const ctrl_t* ctrl, size_t hash, // corresponding translation unit. extern template FindInfo find_first_non_full(const ctrl_t*, size_t, size_t); -// Reset all ctrl bytes back to ctrl_t::kEmpty, except the sentinel. +// Sets `ctrl` to `{kEmpty, kSentinel, ..., kEmpty}`, marking the entire +// array as marked as empty. inline void ResetCtrl(size_t capacity, ctrl_t* ctrl, const void* slot, size_t slot_size) { std::memset(ctrl, static_cast(ctrl_t::kEmpty), @@ -624,8 +918,10 @@ inline void ResetCtrl(size_t capacity, ctrl_t* ctrl, const void* slot, SanitizerPoisonMemoryRegion(slot, slot_size * capacity); } -// Sets the control byte, and if `i < NumClonedBytes()`, set the cloned byte -// at the end too. +// Sets `ctrl[i]` to `h`. +// +// Unlike setting it directly, this function will perform bounds checks and +// mirror the value to the cloned tail if necessary. inline void SetCtrl(size_t i, ctrl_t h, size_t capacity, ctrl_t* ctrl, const void* slot, size_t slot_size) { assert(i < capacity); @@ -641,25 +937,28 @@ inline void SetCtrl(size_t i, ctrl_t h, size_t capacity, ctrl_t* ctrl, ctrl[((i - NumClonedBytes()) & capacity) + (NumClonedBytes() & capacity)] = h; } +// Overload for setting to an occupied `h2_t` rather than a special `ctrl_t`. inline void SetCtrl(size_t i, h2_t h, size_t capacity, ctrl_t* ctrl, const void* slot, size_t slot_size) { SetCtrl(i, static_cast(h), capacity, ctrl, slot, slot_size); } -// The allocated block consists of `capacity + 1 + NumClonedBytes()` control -// bytes followed by `capacity` slots, which must be aligned to `slot_align`. -// SlotOffset returns the offset of the slots into the allocated block. +// Given the capacity of a table, computes the offset (from the start of the +// backing allocation) at which the slots begin. inline size_t SlotOffset(size_t capacity, size_t slot_align) { assert(IsValidCapacity(capacity)); const size_t num_control_bytes = capacity + 1 + NumClonedBytes(); return (num_control_bytes + slot_align - 1) & (~slot_align + 1); } -// Returns the size of the allocated block. See also above comment. +// Given the capacity of a table, computes the total size of the backing +// array. inline size_t AllocSize(size_t capacity, size_t slot_size, size_t slot_align) { return SlotOffset(capacity, slot_align) + capacity * slot_size; } +// A SwissTable. +// // Policy: a policy defines how to perform different operations on // the slots of the hashtable (see hash_policy_traits.h for the full interface // of policy). @@ -774,16 +1073,19 @@ class raw_hash_set { // PRECONDITION: not an end() iterator. reference operator*() const { - AssertIsFull(ctrl_); + ABSL_INTERNAL_ASSERT_IS_FULL(ctrl_, "operator*()"); return PolicyTraits::element(slot_); } // PRECONDITION: not an end() iterator. - pointer operator->() const { return &operator*(); } + pointer operator->() const { + ABSL_INTERNAL_ASSERT_IS_FULL(ctrl_, "operator->"); + return &operator*(); + } // PRECONDITION: not an end() iterator. iterator& operator++() { - AssertIsFull(ctrl_); + ABSL_INTERNAL_ASSERT_IS_FULL(ctrl_, "operator++"); ++ctrl_; ++slot_; skip_empty_or_deleted(); @@ -797,8 +1099,9 @@ class raw_hash_set { } friend bool operator==(const iterator& a, const iterator& b) { - AssertIsValid(a.ctrl_); - AssertIsValid(b.ctrl_); + AssertSameContainer(a.ctrl_, b.ctrl_, a.slot_, b.slot_); + AssertIsValidForComparison(a.ctrl_); + AssertIsValidForComparison(b.ctrl_); return a.ctrl_ == b.ctrl_; } friend bool operator!=(const iterator& a, const iterator& b) { @@ -809,9 +1112,13 @@ class raw_hash_set { iterator(ctrl_t* ctrl, slot_type* slot) : ctrl_(ctrl), slot_(slot) { // This assumption helps the compiler know that any non-end iterator is // not equal to any end iterator. - ABSL_INTERNAL_ASSUME(ctrl != nullptr); + ABSL_ASSUME(ctrl != nullptr); } + // Fixes up `ctrl_` to point to a full by advancing it and `slot_` until + // they reach one. + // + // If a sentinel is reached, we null `ctrl_` out instead. void skip_empty_or_deleted() { while (IsEmptyOrDeleted(*ctrl_)) { uint32_t shift = Group{ctrl_}.CountLeadingEmptyOrDeleted(); @@ -874,11 +1181,12 @@ class raw_hash_set { std::is_nothrow_default_constructible::value&& std::is_nothrow_default_constructible::value) {} - explicit raw_hash_set(size_t bucket_count, const hasher& hash = hasher(), + explicit raw_hash_set(size_t bucket_count, + const hasher& hash = hasher(), const key_equal& eq = key_equal(), const allocator_type& alloc = allocator_type()) : ctrl_(EmptyGroup()), - settings_(0, HashtablezInfoHandle(), hash, eq, alloc) { + settings_(0u, HashtablezInfoHandle(), hash, eq, alloc) { if (bucket_count) { capacity_ = NormalizeCapacity(bucket_count); initialize_slots(); @@ -1003,14 +1311,16 @@ class raw_hash_set { std::is_nothrow_copy_constructible::value) : ctrl_(absl::exchange(that.ctrl_, EmptyGroup())), slots_(absl::exchange(that.slots_, nullptr)), - size_(absl::exchange(that.size_, 0)), - capacity_(absl::exchange(that.capacity_, 0)), + size_(absl::exchange(that.size_, size_t{0})), + capacity_(absl::exchange(that.capacity_, size_t{0})), // Hash, equality and allocator are copied instead of moved because // `that` must be left valid. If Hash is std::function, moving it // would create a nullptr functor that cannot be called. - settings_(absl::exchange(that.growth_left(), 0), + settings_(absl::exchange(that.growth_left(), size_t{0}), absl::exchange(that.infoz(), HashtablezInfoHandle()), - that.hash_ref(), that.eq_ref(), that.alloc_ref()) {} + that.hash_ref(), + that.eq_ref(), + that.alloc_ref()) {} raw_hash_set(raw_hash_set&& that, const allocator_type& a) : ctrl_(EmptyGroup()), @@ -1054,7 +1364,7 @@ class raw_hash_set { typename AllocTraits::propagate_on_container_move_assignment()); } - ~raw_hash_set() { destroy_slots(); } + ~raw_hash_set() { destroy_slots(/*reset=*/false); } iterator begin() { auto it = iterator_at(0); @@ -1084,7 +1394,7 @@ class raw_hash_set { // largest bucket_count() threshold for which iteration is still fast and // past that we simply deallocate the array. if (capacity_ > 127) { - destroy_slots(); + destroy_slots(/*reset=*/true); infoz().RecordClearedReservation(); } else if (capacity_) { @@ -1108,8 +1418,7 @@ class raw_hash_set { // m.insert(std::make_pair("abc", 42)); // TODO(cheshire): A type alias T2 is introduced as a workaround for the nvcc // bug. - template = 0, - class T2 = T, + template = 0, class T2 = T, typename std::enable_if::value, int>::type = 0, T* = nullptr> std::pair insert(T&& value) { @@ -1329,7 +1638,7 @@ class raw_hash_set { // This overload is necessary because otherwise erase(const K&) would be // a better match if non-const iterator is passed as an argument. void erase(iterator it) { - AssertIsFull(it.ctrl_); + ABSL_INTERNAL_ASSERT_IS_FULL(it.ctrl_, "erase()"); PolicyTraits::destroy(&alloc_ref(), it.slot_); erase_meta_only(it); } @@ -1363,7 +1672,7 @@ class raw_hash_set { } node_type extract(const_iterator position) { - AssertIsFull(position.inner_.ctrl_); + ABSL_INTERNAL_ASSERT_IS_FULL(position.inner_.ctrl_, "extract()"); auto node = CommonAccess::Transfer(alloc_ref(), position.inner_.slot_); erase_meta_only(position); @@ -1398,7 +1707,7 @@ class raw_hash_set { void rehash(size_t n) { if (n == 0 && capacity_ == 0) return; if (n == 0 && size_ == 0) { - destroy_slots(); + destroy_slots(/*reset=*/true); infoz().RecordStorageChanged(0, 0); infoz().RecordClearedReservation(); return; @@ -1450,12 +1759,13 @@ class raw_hash_set { template void prefetch(const key_arg& key) const { (void)key; -#if defined(__GNUC__) + // Avoid probing if we won't be able to prefetch the addresses received. +#ifdef ABSL_INTERNAL_HAVE_PREFETCH prefetch_heap_block(); auto seq = probe(ctrl_, hash_ref()(key), capacity_); - __builtin_prefetch(static_cast(ctrl_ + seq.offset())); - __builtin_prefetch(static_cast(slots_ + seq.offset())); -#endif // __GNUC__ + base_internal::PrefetchT0(ctrl_ + seq.offset()); + base_internal::PrefetchT0(slots_ + seq.offset()); +#endif // ABSL_INTERNAL_HAVE_PREFETCH } // The API of find() has two extensions. @@ -1476,7 +1786,7 @@ class raw_hash_set { PolicyTraits::element(slots_ + seq.offset(i))))) return iterator_at(seq.offset(i)); } - if (ABSL_PREDICT_TRUE(g.MatchEmpty())) return end(); + if (ABSL_PREDICT_TRUE(g.MaskEmpty())) return end(); seq.next(); assert(seq.index() <= capacity_ && "full table!"); } @@ -1616,17 +1926,17 @@ class raw_hash_set { slot_type&& slot; }; - // "erases" the object from the container, except that it doesn't actually - // destroy the object. It only updates all the metadata of the class. - // This can be used in conjunction with Policy::transfer to move the object to - // another place. + // Erases, but does not destroy, the value pointed to by `it`. + // + // This merely updates the pertinent control byte. This can be used in + // conjunction with Policy::transfer to move the object to another place. void erase_meta_only(const_iterator it) { assert(IsFull(*it.inner_.ctrl_) && "erasing a dangling iterator"); --size_; const size_t index = static_cast(it.inner_.ctrl_ - ctrl_); const size_t index_before = (index - Group::kWidth) & capacity_; - const auto empty_after = Group(it.inner_.ctrl_).MatchEmpty(); - const auto empty_before = Group(ctrl_ + index_before).MatchEmpty(); + const auto empty_after = Group(it.inner_.ctrl_).MaskEmpty(); + const auto empty_before = Group(ctrl_ + index_before).MaskEmpty(); // We count how many consecutive non empties we have to the right and to the // left of `it`. If the sum is >= kWidth then there is at least one probe @@ -1642,6 +1952,11 @@ class raw_hash_set { infoz().RecordErase(); } + // Allocates a backing array for `self` and initializes its control bytes. + // This reads `capacity_` and updates all other fields based on the result of + // the allocation. + // + // This does not free the currently held array; `capacity_` must be nonzero. void initialize_slots() { assert(capacity_); // Folks with custom allocators often make unwarranted assumptions about the @@ -1670,7 +1985,11 @@ class raw_hash_set { infoz().RecordStorageChanged(size_, capacity_); } - void destroy_slots() { + // Destroys all slots in the backing array, frees the backing array, + // If reset is true, also clears all top-level book-keeping data. + // + // This essentially implements `map = raw_hash_set();`. + void destroy_slots(bool reset) { if (!capacity_) return; for (size_t i = 0; i != capacity_; ++i) { if (IsFull(ctrl_[i])) { @@ -1683,11 +2002,13 @@ class raw_hash_set { Deallocate( &alloc_ref(), ctrl_, AllocSize(capacity_, sizeof(slot_type), alignof(slot_type))); - ctrl_ = EmptyGroup(); - slots_ = nullptr; - size_ = 0; - capacity_ = 0; - growth_left() = 0; + if (reset) { + ctrl_ = EmptyGroup(); + slots_ = nullptr; + size_ = 0; + capacity_ = 0; + growth_left() = 0; + } } void resize(size_t new_capacity) { @@ -1720,6 +2041,9 @@ class raw_hash_set { infoz().RecordRehash(total_probe_length); } + // Prunes control bytes to remove as many tombstones as possible. + // + // See the comment on `rehash_and_grow_if_necessary()`. void drop_deletes_without_resize() ABSL_ATTRIBUTE_NOINLINE { assert(IsValidCapacity(capacity_)); assert(!is_small(capacity_)); @@ -1786,6 +2110,11 @@ class raw_hash_set { infoz().RecordRehash(total_probe_length); } + // Called whenever the table *might* need to conditionally grow. + // + // This function is an optimization opportunity to perform a rehash even when + // growth is unnecessary, because vacating tombstones is beneficial for + // performance in the long-run. void rehash_and_grow_if_necessary() { if (capacity_ == 0) { resize(1); @@ -1850,7 +2179,7 @@ class raw_hash_set { elem)) return true; } - if (ABSL_PREDICT_TRUE(g.MatchEmpty())) return false; + if (ABSL_PREDICT_TRUE(g.MaskEmpty())) return false; seq.next(); assert(seq.index() <= capacity_ && "full table!"); } @@ -1870,6 +2199,9 @@ class raw_hash_set { } protected: + // Attempts to find `key` in the table; if it isn't found, returns a slot that + // the value can be inserted into, with the control byte already set to + // `key`'s H2. template std::pair find_or_prepare_insert(const K& key) { prefetch_heap_block(); @@ -1883,13 +2215,17 @@ class raw_hash_set { PolicyTraits::element(slots_ + seq.offset(i))))) return {seq.offset(i), false}; } - if (ABSL_PREDICT_TRUE(g.MatchEmpty())) break; + if (ABSL_PREDICT_TRUE(g.MaskEmpty())) break; seq.next(); assert(seq.index() <= capacity_ && "full table!"); } return {prepare_insert(hash), true}; } + // Given the hash of a value not currently in the table, finds the next + // viable slot index to insert it at. + // + // REQUIRES: At least one non-full slot available. size_t prepare_insert(size_t hash) ABSL_ATTRIBUTE_NOINLINE { auto target = find_first_non_full(ctrl_, hash, capacity_); if (ABSL_PREDICT_FALSE(growth_left() == 0 && @@ -1933,15 +2269,23 @@ class raw_hash_set { growth_left() = CapacityToGrowth(capacity()) - size_; } + // The number of slots we can still fill without needing to rehash. + // + // This is stored separately due to tombstones: we do not include tombstones + // in the growth capacity, because we'd like to rehash when the table is + // otherwise filled with tombstones: otherwise, probe sequences might get + // unacceptably long without triggering a rehash. Callers can also force a + // rehash via the standard `rehash(0)`, which will recompute this value as a + // side-effect. + // + // See `CapacityToGrowth()`. size_t& growth_left() { return settings_.template get<0>(); } + // Prefetch the heap-allocated memory region to resolve potential TLB misses. + // This is intended to overlap with execution of calculating the hash for a + // key. void prefetch_heap_block() const { - // Prefetch the heap-allocated memory region to resolve potential TLB - // misses. This is intended to overlap with execution of calculating the - // hash for a key. -#if defined(__GNUC__) - __builtin_prefetch(static_cast(ctrl_), 0, 1); -#endif // __GNUC__ + base_internal::PrefetchT2(ctrl_); } HashtablezInfoHandle& infoz() { return settings_.template get<1>(); } @@ -1958,10 +2302,21 @@ class raw_hash_set { // TODO(alkis): Investigate removing some of these fields: // - ctrl/slots can be derived from each other // - size can be moved into the slot array - ctrl_t* ctrl_ = EmptyGroup(); // [(capacity + 1 + NumClonedBytes()) * ctrl_t] - slot_type* slots_ = nullptr; // [capacity * slot_type] - size_t size_ = 0; // number of full slots - size_t capacity_ = 0; // total number of slots + + // The control bytes (and, also, a pointer to the base of the backing array). + // + // This contains `capacity_ + 1 + NumClonedBytes()` entries, even + // when the table is empty (hence EmptyGroup). + ctrl_t* ctrl_ = EmptyGroup(); + // The beginning of the slots, located at `SlotOffset()` bytes after + // `ctrl_`. May be null for empty tables. + slot_type* slots_ = nullptr; + + // The number of filled slots. + size_t size_ = 0; + + // The total number of available slots. + size_t capacity_ = 0; absl::container_internal::CompressedTuple @@ -2005,7 +2360,7 @@ struct HashtableDebugAccess> { return num_probes; ++num_probes; } - if (g.MatchEmpty()) return num_probes; + if (g.MaskEmpty()) return num_probes; seq.next(); ++num_probes; } @@ -2047,4 +2402,6 @@ struct HashtableDebugAccess> { ABSL_NAMESPACE_END } // namespace absl +#undef ABSL_INTERNAL_ASSERT_IS_FULL + #endif // ABSL_CONTAINER_INTERNAL_RAW_HASH_SET_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/container/internal/raw_hash_set_benchmark.cc b/TMessagesProj/jni/voip/webrtc/absl/container/internal/raw_hash_set_benchmark.cc index 146ef433c2..e17ba9b43f 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/container/internal/raw_hash_set_benchmark.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/container/internal/raw_hash_set_benchmark.cc @@ -12,13 +12,16 @@ // See the License for the specific language governing permissions and // limitations under the License. -#include "absl/container/internal/raw_hash_set.h" - +#include +#include #include #include +#include +#include #include "absl/base/internal/raw_logging.h" #include "absl/container/internal/hash_function_defaults.h" +#include "absl/container/internal/raw_hash_set.h" #include "absl/strings/str_format.h" #include "benchmark/benchmark.h" @@ -202,40 +205,113 @@ void CacheInSteadyStateArgs(Benchmark* bm) { BENCHMARK(BM_CacheInSteadyState)->Apply(CacheInSteadyStateArgs); void BM_EndComparison(benchmark::State& state) { + StringTable t = {{"a", "a"}, {"b", "b"}}; + auto it = t.begin(); + for (auto i : state) { + benchmark::DoNotOptimize(t); + benchmark::DoNotOptimize(it); + benchmark::DoNotOptimize(it != t.end()); + } +} +BENCHMARK(BM_EndComparison); + +void BM_Iteration(benchmark::State& state) { std::random_device rd; std::mt19937 rng(rd()); string_generator gen{12}; StringTable t; - while (t.size() < state.range(0)) { + + size_t capacity = state.range(0); + size_t size = state.range(1); + t.reserve(capacity); + + while (t.size() < size) { t.emplace(gen(rng), gen(rng)); } - for (auto _ : state) { + for (auto i : state) { + benchmark::DoNotOptimize(t); for (auto it = t.begin(); it != t.end(); ++it) { - benchmark::DoNotOptimize(it); - benchmark::DoNotOptimize(t); - benchmark::DoNotOptimize(it != t.end()); + benchmark::DoNotOptimize(*it); } } } -BENCHMARK(BM_EndComparison)->Arg(400); -void BM_CopyCtor(benchmark::State& state) { +BENCHMARK(BM_Iteration) + ->ArgPair(1, 1) + ->ArgPair(2, 2) + ->ArgPair(4, 4) + ->ArgPair(7, 7) + ->ArgPair(10, 10) + ->ArgPair(15, 15) + ->ArgPair(16, 16) + ->ArgPair(54, 54) + ->ArgPair(100, 100) + ->ArgPair(400, 400) + // empty + ->ArgPair(0, 0) + ->ArgPair(10, 0) + ->ArgPair(100, 0) + ->ArgPair(1000, 0) + ->ArgPair(10000, 0) + // sparse + ->ArgPair(100, 1) + ->ArgPair(1000, 10); + +void BM_CopyCtorSparseInt(benchmark::State& state) { std::random_device rd; std::mt19937 rng(rd()); IntTable t; std::uniform_int_distribution dist(0, ~uint64_t{}); - while (t.size() < state.range(0)) { + size_t size = state.range(0); + t.reserve(size * 10); + while (t.size() < size) { t.emplace(dist(rng)); } - for (auto _ : state) { + for (auto i : state) { + IntTable t2 = t; + benchmark::DoNotOptimize(t2); + } +} +BENCHMARK(BM_CopyCtorSparseInt)->Range(128, 4096); + +void BM_CopyCtorInt(benchmark::State& state) { + std::random_device rd; + std::mt19937 rng(rd()); + IntTable t; + std::uniform_int_distribution dist(0, ~uint64_t{}); + + size_t size = state.range(0); + while (t.size() < size) { + t.emplace(dist(rng)); + } + + for (auto i : state) { IntTable t2 = t; benchmark::DoNotOptimize(t2); } } -BENCHMARK(BM_CopyCtor)->Range(128, 4096); +BENCHMARK(BM_CopyCtorInt)->Range(128, 4096); + +void BM_CopyCtorString(benchmark::State& state) { + std::random_device rd; + std::mt19937 rng(rd()); + StringTable t; + std::uniform_int_distribution dist(0, ~uint64_t{}); + + size_t size = state.range(0); + while (t.size() < size) { + t.emplace(std::to_string(dist(rng)), std::to_string(dist(rng))); + } + + for (auto i : state) { + StringTable t2 = t; + benchmark::DoNotOptimize(t2); + } +} +BENCHMARK(BM_CopyCtorString)->Range(128, 4096); void BM_CopyAssign(benchmark::State& state) { std::random_device rd; @@ -336,27 +412,27 @@ void BM_Group_Match(benchmark::State& state) { } BENCHMARK(BM_Group_Match); -void BM_Group_MatchEmpty(benchmark::State& state) { +void BM_Group_MaskEmpty(benchmark::State& state) { std::array group; Iota(group.begin(), group.end(), -4); Group g{group.data()}; for (auto _ : state) { ::benchmark::DoNotOptimize(g); - ::benchmark::DoNotOptimize(g.MatchEmpty()); + ::benchmark::DoNotOptimize(g.MaskEmpty()); } } -BENCHMARK(BM_Group_MatchEmpty); +BENCHMARK(BM_Group_MaskEmpty); -void BM_Group_MatchEmptyOrDeleted(benchmark::State& state) { +void BM_Group_MaskEmptyOrDeleted(benchmark::State& state) { std::array group; Iota(group.begin(), group.end(), -4); Group g{group.data()}; for (auto _ : state) { ::benchmark::DoNotOptimize(g); - ::benchmark::DoNotOptimize(g.MatchEmptyOrDeleted()); + ::benchmark::DoNotOptimize(g.MaskEmptyOrDeleted()); } } -BENCHMARK(BM_Group_MatchEmptyOrDeleted); +BENCHMARK(BM_Group_MaskEmptyOrDeleted); void BM_Group_CountLeadingEmptyOrDeleted(benchmark::State& state) { std::array group; @@ -375,7 +451,7 @@ void BM_Group_MatchFirstEmptyOrDeleted(benchmark::State& state) { Group g{group.data()}; for (auto _ : state) { ::benchmark::DoNotOptimize(g); - ::benchmark::DoNotOptimize(*g.MatchEmptyOrDeleted()); + ::benchmark::DoNotOptimize(g.MaskEmptyOrDeleted().LowestBitSet()); } } BENCHMARK(BM_Group_MatchFirstEmptyOrDeleted); @@ -437,7 +513,6 @@ void CodegenAbslRawHashSetInt64Iterate( int odr = (::benchmark::DoNotOptimize(std::make_tuple( &CodegenAbslRawHashSetInt64Find, &CodegenAbslRawHashSetInt64FindNeEnd, - &CodegenAbslRawHashSetInt64Insert, - &CodegenAbslRawHashSetInt64Contains, + &CodegenAbslRawHashSetInt64Insert, &CodegenAbslRawHashSetInt64Contains, &CodegenAbslRawHashSetInt64Iterate)), 1); diff --git a/TMessagesProj/jni/voip/webrtc/absl/container/internal/raw_hash_set_test.cc b/TMessagesProj/jni/voip/webrtc/absl/container/internal/raw_hash_set_test.cc index e7732f671e..daa3281450 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/container/internal/raw_hash_set_test.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/container/internal/raw_hash_set_test.cc @@ -14,28 +14,40 @@ #include "absl/container/internal/raw_hash_set.h" +#include #include #include #include #include #include +#include +#include +#include #include #include +#include #include #include +#include #include #include +#include +#include #include "gmock/gmock.h" #include "gtest/gtest.h" #include "absl/base/attributes.h" #include "absl/base/config.h" #include "absl/base/internal/cycleclock.h" +#include "absl/base/internal/prefetch.h" #include "absl/base/internal/raw_logging.h" +#include "absl/container/flat_hash_map.h" +#include "absl/container/flat_hash_set.h" #include "absl/container/internal/container_memory.h" #include "absl/container/internal/hash_function_defaults.h" #include "absl/container/internal/hash_policy_testing.h" #include "absl/container/internal/hashtable_debug.h" +#include "absl/log/log.h" #include "absl/strings/string_view.h" namespace absl { @@ -194,35 +206,39 @@ TEST(Group, Match) { } } -TEST(Group, MatchEmpty) { +TEST(Group, MaskEmpty) { if (Group::kWidth == 16) { ctrl_t group[] = {ctrl_t::kEmpty, CtrlT(1), ctrl_t::kDeleted, CtrlT(3), ctrl_t::kEmpty, CtrlT(5), ctrl_t::kSentinel, CtrlT(7), CtrlT(7), CtrlT(5), CtrlT(3), CtrlT(1), CtrlT(1), CtrlT(1), CtrlT(1), CtrlT(1)}; - EXPECT_THAT(Group{group}.MatchEmpty(), ElementsAre(0, 4)); + EXPECT_THAT(Group{group}.MaskEmpty().LowestBitSet(), 0); + EXPECT_THAT(Group{group}.MaskEmpty().HighestBitSet(), 4); } else if (Group::kWidth == 8) { ctrl_t group[] = {ctrl_t::kEmpty, CtrlT(1), CtrlT(2), ctrl_t::kDeleted, CtrlT(2), CtrlT(1), ctrl_t::kSentinel, CtrlT(1)}; - EXPECT_THAT(Group{group}.MatchEmpty(), ElementsAre(0)); + EXPECT_THAT(Group{group}.MaskEmpty().LowestBitSet(), 0); + EXPECT_THAT(Group{group}.MaskEmpty().HighestBitSet(), 0); } else { FAIL() << "No test coverage for Group::kWidth==" << Group::kWidth; } } -TEST(Group, MatchEmptyOrDeleted) { +TEST(Group, MaskEmptyOrDeleted) { if (Group::kWidth == 16) { - ctrl_t group[] = {ctrl_t::kEmpty, CtrlT(1), ctrl_t::kDeleted, CtrlT(3), - ctrl_t::kEmpty, CtrlT(5), ctrl_t::kSentinel, CtrlT(7), - CtrlT(7), CtrlT(5), CtrlT(3), CtrlT(1), - CtrlT(1), CtrlT(1), CtrlT(1), CtrlT(1)}; - EXPECT_THAT(Group{group}.MatchEmptyOrDeleted(), ElementsAre(0, 2, 4)); + ctrl_t group[] = {ctrl_t::kEmpty, CtrlT(1), ctrl_t::kEmpty, CtrlT(3), + ctrl_t::kDeleted, CtrlT(5), ctrl_t::kSentinel, CtrlT(7), + CtrlT(7), CtrlT(5), CtrlT(3), CtrlT(1), + CtrlT(1), CtrlT(1), CtrlT(1), CtrlT(1)}; + EXPECT_THAT(Group{group}.MaskEmptyOrDeleted().LowestBitSet(), 0); + EXPECT_THAT(Group{group}.MaskEmptyOrDeleted().HighestBitSet(), 4); } else if (Group::kWidth == 8) { ctrl_t group[] = {ctrl_t::kEmpty, CtrlT(1), CtrlT(2), ctrl_t::kDeleted, CtrlT(2), CtrlT(1), ctrl_t::kSentinel, CtrlT(1)}; - EXPECT_THAT(Group{group}.MatchEmptyOrDeleted(), ElementsAre(0, 3)); + EXPECT_THAT(Group{group}.MaskEmptyOrDeleted().LowestBitSet(), 0); + EXPECT_THAT(Group{group}.MaskEmptyOrDeleted().HighestBitSet(), 3); } else { FAIL() << "No test coverage for Group::kWidth==" << Group::kWidth; } @@ -334,7 +350,7 @@ class StringPolicy { struct ctor {}; template - slot_type(ctor, Ts&&... ts) : pair(std::forward(ts)...) {} + explicit slot_type(ctor, Ts&&... ts) : pair(std::forward(ts)...) {} std::pair pair; }; @@ -406,7 +422,7 @@ struct CustomAlloc : std::allocator { CustomAlloc() {} template - CustomAlloc(const CustomAlloc& other) {} + explicit CustomAlloc(const CustomAlloc& /*other*/) {} template struct rebind { using other = CustomAlloc; @@ -1270,6 +1286,7 @@ TEST(Table, DISABLED_EnsureNonQuadraticTopNXorSeedByProbeSeqLength) { for (size_t size : sizes) { auto& stat = stats[size]; VerifyStats(size, expected, stat); + LOG(INFO) << size << " " << stat; } } @@ -1365,6 +1382,7 @@ TEST(Table, DISABLED_EnsureNonQuadraticTopNLinearTransformByProbeSeqLength) { for (size_t size : sizes) { auto& stat = stats[size]; VerifyStats(size, expected, stat); + LOG(INFO) << size << " " << stat; } } @@ -1499,7 +1517,7 @@ TEST(Table, RehashZeroForcesRehash) { TEST(Table, ConstructFromInitList) { using P = std::pair; struct Q { - operator P() const { return {}; } + operator P() const { return {}; } // NOLINT }; StringTable t = {P(), Q(), {}, {{}, {}}}; } @@ -2018,20 +2036,75 @@ TEST(Table, UnstablePointers) { EXPECT_NE(old_ptr, addr(0)); } -// Confirm that we assert if we try to erase() end(). -TEST(TableDeathTest, EraseOfEndAsserts) { +bool IsAssertEnabled() { // Use an assert with side-effects to figure out if they are actually enabled. bool assert_enabled = false; - assert([&]() { + assert([&]() { // NOLINT assert_enabled = true; return true; }()); - if (!assert_enabled) return; + return assert_enabled; +} + +TEST(TableDeathTest, InvalidIteratorAsserts) { + if (!IsAssertEnabled()) GTEST_SKIP() << "Assertions not enabled."; + + IntTable t; + // Extra simple "regexp" as regexp support is highly varied across platforms. + EXPECT_DEATH_IF_SUPPORTED( + t.erase(t.end()), + "erase.* called on invalid iterator. The iterator might be an " + "end.*iterator or may have been default constructed."); + typename IntTable::iterator iter; + EXPECT_DEATH_IF_SUPPORTED( + ++iter, + "operator.* called on invalid iterator. The iterator might be an " + "end.*iterator or may have been default constructed."); + t.insert(0); + iter = t.begin(); + t.erase(iter); + EXPECT_DEATH_IF_SUPPORTED( + ++iter, + "operator.* called on invalid iterator. The element might have been " + "erased or .*the table might have rehashed."); +} + +TEST(TableDeathTest, IteratorInvalidAssertsEqualityOperator) { + if (!IsAssertEnabled()) GTEST_SKIP() << "Assertions not enabled."; IntTable t; + t.insert(1); + t.insert(2); + t.insert(3); + auto iter1 = t.begin(); + auto iter2 = std::next(iter1); + ASSERT_NE(iter1, t.end()); + ASSERT_NE(iter2, t.end()); + t.erase(iter1); // Extra simple "regexp" as regexp support is highly varied across platforms. - constexpr char kDeathMsg[] = "Invalid operation on iterator"; - EXPECT_DEATH_IF_SUPPORTED(t.erase(t.end()), kDeathMsg); + const char* const kErasedDeathMessage = + "Invalid iterator comparison. The element might have .*been erased or " + "the table might have rehashed."; + EXPECT_DEATH_IF_SUPPORTED(void(iter1 == iter2), kErasedDeathMessage); + EXPECT_DEATH_IF_SUPPORTED(void(iter2 != iter1), kErasedDeathMessage); + t.erase(iter2); + EXPECT_DEATH_IF_SUPPORTED(void(iter1 == iter2), kErasedDeathMessage); + + IntTable t1, t2; + t1.insert(0); + t2.insert(0); + iter1 = t1.begin(); + iter2 = t2.begin(); + const char* const kContainerDiffDeathMessage = + "Invalid iterator comparison. The iterators may be from different " + ".*containers or the container might have rehashed."; + EXPECT_DEATH_IF_SUPPORTED(void(iter1 == iter2), kContainerDiffDeathMessage); + EXPECT_DEATH_IF_SUPPORTED(void(iter2 == iter1), kContainerDiffDeathMessage); + + for (int i = 0; i < 10; ++i) t1.insert(i); + // There should have been a rehash in t1. + EXPECT_DEATH_IF_SUPPORTED(void(iter1 == t1.begin()), + kContainerDiffDeathMessage); } #if defined(ABSL_INTERNAL_HASHTABLEZ_SAMPLE) @@ -2042,7 +2115,7 @@ TEST(RawHashSamplerTest, Sample) { auto& sampler = GlobalHashtablezSampler(); size_t start_size = 0; - std::unordered_set preexisting_info; + absl::flat_hash_set preexisting_info; start_size += sampler.Iterate([&](const HashtablezInfo& info) { preexisting_info.insert(&info); ++start_size; @@ -2069,8 +2142,8 @@ TEST(RawHashSamplerTest, Sample) { } } size_t end_size = 0; - std::unordered_map observed_checksums; - std::unordered_map reservations; + absl::flat_hash_map observed_checksums; + absl::flat_hash_map reservations; end_size += sampler.Iterate([&](const HashtablezInfo& info) { if (preexisting_info.count(&info) == 0) { observed_checksums[info.hashes_bitwise_xor.load( diff --git a/TMessagesProj/jni/voip/webrtc/absl/container/node_hash_map.h b/TMessagesProj/jni/voip/webrtc/absl/container/node_hash_map.h index ca1ed408c6..6868e63a42 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/container/node_hash_map.h +++ b/TMessagesProj/jni/voip/webrtc/absl/container/node_hash_map.h @@ -78,6 +78,10 @@ class NodeHashMapPolicy; // absl/hash/hash.h for information on extending Abseil hashing to user-defined // types. // +// Using `absl::node_hash_map` at interface boundaries in dynamically loaded +// libraries (e.g. .dll, .so) is unsupported due to way `absl::Hash` values may +// be randomized across dynamically loaded libraries. +// // Example: // // // Create a node hash map of three strings (that map to strings) @@ -348,8 +352,8 @@ class node_hash_map // `node_hash_map`. // // iterator try_emplace(const_iterator hint, - // const init_type& k, Args&&... args): - // iterator try_emplace(const_iterator hint, init_type&& k, Args&&... args): + // const key_type& k, Args&&... args): + // iterator try_emplace(const_iterator hint, key_type&& k, Args&&... args): // // Inserts (via copy or move) the element of the specified key into the // `node_hash_map` using the position of `hint` as a non-binding suggestion diff --git a/TMessagesProj/jni/voip/webrtc/absl/container/node_hash_set.h b/TMessagesProj/jni/voip/webrtc/absl/container/node_hash_set.h index 9421e11e61..f2cc70c3f6 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/container/node_hash_set.h +++ b/TMessagesProj/jni/voip/webrtc/absl/container/node_hash_set.h @@ -74,6 +74,10 @@ struct NodeHashSetPolicy; // absl/hash/hash.h for information on extending Abseil hashing to user-defined // types. // +// Using `absl::node_hash_set` at interface boundaries in dynamically loaded +// libraries (e.g. .dll, .so) is unsupported due to way `absl::Hash` values may +// be randomized across dynamically loaded libraries. +// // Example: // // // Create a node hash set of three strings diff --git a/TMessagesProj/jni/voip/webrtc/absl/crc/crc32c.cc b/TMessagesProj/jni/voip/webrtc/absl/crc/crc32c.cc new file mode 100644 index 0000000000..82865df5c2 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/absl/crc/crc32c.cc @@ -0,0 +1,100 @@ +// Copyright 2022 The Abseil Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "absl/crc/crc32c.h" + +#include + +#include "absl/crc/internal/crc.h" +#include "absl/crc/internal/crc32c.h" +#include "absl/crc/internal/crc_memcpy.h" +#include "absl/strings/string_view.h" + +namespace absl { +ABSL_NAMESPACE_BEGIN + +namespace { + +const crc_internal::CRC* CrcEngine() { + static const crc_internal::CRC* engine = crc_internal::CRC::Crc32c(); + return engine; +} + +constexpr uint32_t kCRC32Xor = 0xffffffffU; + +} // namespace + +namespace crc_internal { + +crc32c_t UnextendCrc32cByZeroes(crc32c_t initial_crc, size_t length) { + uint32_t crc = static_cast(initial_crc) ^ kCRC32Xor; + CrcEngine()->UnextendByZeroes(&crc, length); + return static_cast(crc ^ kCRC32Xor); +} + +// Called by `absl::ExtendCrc32c()` on strings with size > 64 or when hardware +// CRC32C support is missing. +crc32c_t ExtendCrc32cInternal(crc32c_t initial_crc, + absl::string_view buf_to_add) { + uint32_t crc = static_cast(initial_crc) ^ kCRC32Xor; + CrcEngine()->Extend(&crc, buf_to_add.data(), buf_to_add.size()); + return static_cast(crc ^ kCRC32Xor); +} + +} // namespace crc_internal + +crc32c_t ComputeCrc32c(absl::string_view buf) { + return ExtendCrc32c(ToCrc32c(0), buf); +} + +crc32c_t ExtendCrc32cByZeroes(crc32c_t initial_crc, size_t length) { + uint32_t crc = static_cast(initial_crc) ^ kCRC32Xor; + CrcEngine()->ExtendByZeroes(&crc, length); + return static_cast(crc ^ kCRC32Xor); +} + +crc32c_t ConcatCrc32c(crc32c_t lhs_crc, crc32c_t rhs_crc, size_t rhs_len) { + uint32_t result = static_cast(lhs_crc); + CrcEngine()->ExtendByZeroes(&result, rhs_len); + return static_cast(result) ^ rhs_crc; +} + +crc32c_t RemoveCrc32cPrefix(crc32c_t crc_a, crc32c_t crc_ab, size_t length_b) { + return ConcatCrc32c(crc_a, crc_ab, length_b); +} + +crc32c_t MemcpyCrc32c(void* dest, const void* src, size_t count, + crc32c_t initial_crc) { + return static_cast( + crc_internal::Crc32CAndCopy(dest, src, count, initial_crc, false)); +} + +// Remove a Suffix of given size from a buffer +// +// Given a CRC32C of an existing buffer, `full_string_crc`; the CRC32C of a +// suffix of that buffer to remove, `suffix_crc`; and suffix buffer's length, +// `suffix_len` return the CRC32C of the buffer with suffix removed +// +// This operation has a runtime cost of O(log(`suffix_len`)) +crc32c_t RemoveCrc32cSuffix(crc32c_t full_string_crc, crc32c_t suffix_crc, + size_t suffix_len) { + crc32c_t crc_with_suffix_zeroed = + suffix_crc ^ full_string_crc ^ + ExtendCrc32cByZeroes(ToCrc32c(0), suffix_len); + return crc_internal::UnextendCrc32cByZeroes( + crc_with_suffix_zeroed, suffix_len); +} + +ABSL_NAMESPACE_END +} // namespace absl diff --git a/TMessagesProj/jni/voip/webrtc/absl/crc/crc32c.h b/TMessagesProj/jni/voip/webrtc/absl/crc/crc32c.h new file mode 100644 index 0000000000..8b03073265 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/absl/crc/crc32c.h @@ -0,0 +1,176 @@ +// Copyright 2022 The Abseil Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ----------------------------------------------------------------------------- +// File: crc32c.h +// ----------------------------------------------------------------------------- +// +// This header file defines the API for computing CRC32C values as checksums +// for arbitrary sequences of bytes provided as a string buffer. +// +// The API includes the basic functions for computing such CRC32C values and +// some utility functions for performing more efficient mathematical +// computations using an existing checksum. +#ifndef ABSL_CRC_CRC32C_H_ +#define ABSL_CRC_CRC32C_H_ + +#include +#include +#include + +#include "absl/crc/internal/crc32c_inline.h" +#include "absl/strings/string_view.h" + +namespace absl { +ABSL_NAMESPACE_BEGIN + +//----------------------------------------------------------------------------- +// crc32c_t +//----------------------------------------------------------------------------- + +// `crc32c_t` defines a strongly typed integer type for holding a CRC32C value. +enum class crc32c_t : uint32_t {}; + +// ToCrc32c() +// +// Converts a uint32_t value to crc32c_t. This API is necessary in C++14 +// and earlier. Code targeting C++17-or-later can instead use `crc32c_t{n}`. +inline crc32c_t ToCrc32c(uint32_t n) { + return static_cast(n); +} +// operator^ +// +// Performs a bitwise XOR on two CRC32C values +inline crc32c_t operator^(crc32c_t lhs, crc32c_t rhs) { + const auto lhs_int = static_cast(lhs); + const auto rhs_int = static_cast(rhs); + return ToCrc32c(lhs_int ^ rhs_int); +} + +namespace crc_internal { +// Non-inline code path for `absl::ExtendCrc32c()`. Do not call directly. +// Call `absl::ExtendCrc32c()` (defined below) instead. +crc32c_t ExtendCrc32cInternal(crc32c_t initial_crc, + absl::string_view buf_to_add); +} // namespace crc_internal + +// ----------------------------------------------------------------------------- +// CRC32C Computation Functions +// ----------------------------------------------------------------------------- + +// ComputeCrc32c() +// +// Returns the CRC32C value of the provided string. +crc32c_t ComputeCrc32c(absl::string_view buf); + +// ExtendCrc32c() +// +// Computes a CRC32C value from an `initial_crc` CRC32C value including the +// `buf_to_add` bytes of an additional buffer. Using this function is more +// efficient than computing a CRC32C value for the combined buffer from +// scratch. +// +// Note: `ExtendCrc32c` with an initial_crc of 0 is equivalent to +// `ComputeCrc32c`. +// +// This operation has a runtime cost of O(`buf_to_add.size()`) +inline crc32c_t ExtendCrc32c(crc32c_t initial_crc, + absl::string_view buf_to_add) { + // Approximately 75% of calls have size <= 64. + if (buf_to_add.size() <= 64) { + uint32_t crc = static_cast(initial_crc); + if (crc_internal::ExtendCrc32cInline(&crc, buf_to_add.data(), + buf_to_add.size())) { + return ToCrc32c(crc); + } + } + return crc_internal::ExtendCrc32cInternal(initial_crc, buf_to_add); +} + +// ExtendCrc32cByZeroes() +// +// Computes a CRC32C value for a buffer with an `initial_crc` CRC32C value, +// where `length` bytes with a value of 0 are appended to the buffer. Using this +// function is more efficient than computing a CRC32C value for the combined +// buffer from scratch. +// +// This operation has a runtime cost of O(log(`length`)) +crc32c_t ExtendCrc32cByZeroes(crc32c_t initial_crc, size_t length); + +// MemcpyCrc32c() +// +// Copies `src` to `dest` using `memcpy()` semantics, returning the CRC32C +// value of the copied buffer. +// +// Using `MemcpyCrc32c()` is potentially faster than performing the `memcpy()` +// and `ComputeCrc32c()` operations separately. +crc32c_t MemcpyCrc32c(void* dest, const void* src, size_t count, + crc32c_t initial_crc = ToCrc32c(0)); + +// ----------------------------------------------------------------------------- +// CRC32C Arithmetic Functions +// ----------------------------------------------------------------------------- + +// The following functions perform arithmetic on CRC32C values, which are +// generally more efficient than recalculating any given result's CRC32C value. + +// ConcatCrc32c() +// +// Calculates the CRC32C value of two buffers with known CRC32C values +// concatenated together. +// +// Given a buffer with CRC32C value `crc1` and a buffer with +// CRC32C value `crc2` and length, `crc2_length`, returns the CRC32C value of +// the concatenation of these two buffers. +// +// This operation has a runtime cost of O(log(`crc2_length`)). +crc32c_t ConcatCrc32c(crc32c_t crc1, crc32c_t crc2, size_t crc2_length); + +// RemoveCrc32cPrefix() +// +// Calculates the CRC32C value of an existing buffer with a series of bytes +// (the prefix) removed from the beginning of that buffer. +// +// Given the CRC32C value of an existing buffer, `full_string_crc`; The CRC32C +// value of a prefix of that buffer, `prefix_crc`; and the length of the buffer +// with the prefix removed, `remaining_string_length` , return the CRC32C +// value of the buffer with the prefix removed. +// +// This operation has a runtime cost of O(log(`remaining_string_length`)). +crc32c_t RemoveCrc32cPrefix(crc32c_t prefix_crc, crc32c_t full_string_crc, + size_t remaining_string_length); +// RemoveCrc32cSuffix() +// +// Calculates the CRC32C value of an existing buffer with a series of bytes +// (the suffix) removed from the end of that buffer. +// +// Given a CRC32C value of an existing buffer `full_string_crc`, the CRC32C +// value of the suffix to remove `suffix_crc`, and the length of that suffix +// `suffix_len`, returns the CRC32C value of the buffer with suffix removed. +// +// This operation has a runtime cost of O(log(`suffix_len`)) +crc32c_t RemoveCrc32cSuffix(crc32c_t full_string_crc, crc32c_t suffix_crc, + size_t suffix_length); + +// operator<< +// +// Streams the CRC32C value `crc` to the stream `os`. +inline std::ostream& operator<<(std::ostream& os, crc32c_t crc) { + return os << static_cast(crc); +} + +ABSL_NAMESPACE_END +} // namespace absl + +#endif // ABSL_CRC_CRC32C_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/crc/internal/cpu_detect.cc b/TMessagesProj/jni/voip/webrtc/absl/crc/internal/cpu_detect.cc new file mode 100644 index 0000000000..339b7cc796 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/absl/crc/internal/cpu_detect.cc @@ -0,0 +1,251 @@ +// Copyright 2022 The Abseil Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "absl/crc/internal/cpu_detect.h" + +#include +#include + +#include "absl/base/config.h" + +#if defined(__aarch64__) && defined(__linux__) +#include +#include +#endif + +namespace absl { +ABSL_NAMESPACE_BEGIN +namespace crc_internal { + +#if defined(__x86_64__) + +// Inline cpuid instruction. %rbx is occasionally used to address stack +// variables in presence of dynamic allocas. Preserve the %rbx register via +// %rdi to work around a clang bug https://bugs.llvm.org/show_bug.cgi?id=17907 +// (%rbx in an output constraint is not considered a clobbered register). +// +// a_inp and c_inp are the input parameters eax and ecx of the CPUID +// instruction. +// a, b, c, and d contain the contents of eax, ebx, ecx, and edx as returned by +// the CPUID instruction +#define ABSL_INTERNAL_GETCPUID(a, b, c, d, a_inp, c_inp) \ + asm("mov %%rbx, %%rdi\n" \ + "cpuid\n" \ + "xchg %%rdi, %%rbx\n" \ + : "=a"(a), "=D"(b), "=c"(c), "=d"(d) \ + : "a"(a_inp), "2"(c_inp)) + +namespace { + +enum class Vendor { + kUnknown, + kIntel, + kAmd, +}; + +Vendor GetVendor() { + uint32_t eax, ebx, ecx, edx; + + // Get vendor string (issue CPUID with eax = 0) + ABSL_INTERNAL_GETCPUID(eax, ebx, ecx, edx, 0, 0); + std::string vendor; + vendor.append(reinterpret_cast(&ebx), 4); + vendor.append(reinterpret_cast(&edx), 4); + vendor.append(reinterpret_cast(&ecx), 4); + if (vendor == "GenuineIntel") { + return Vendor::kIntel; + } else if (vendor == "AuthenticAmd") { + return Vendor::kAmd; + } else { + return Vendor::kUnknown; + } +} + +CpuType GetIntelCpuType() { + uint32_t eax, ebx, ecx, edx; + // to get general information and extended features we send eax = 1 and + // ecx = 0 to cpuid. The response is returned in eax, ebx, ecx and edx. + // (See Intel 64 and IA-32 Architectures Software Developer's Manual + // Volume 2A: Instruction Set Reference, A-M CPUID). + // https://www.intel.com/content/www/us/en/architecture-and-technology/64-ia-32-architectures-software-developer-vol-2a-manual.html + ABSL_INTERNAL_GETCPUID(eax, ebx, ecx, edx, 1, 0); + + // Response in eax bits as follows: + // 0-3 (stepping id) + // 4-7 (model number), + // 8-11 (family code), + // 12-13 (processor type), + // 16-19 (extended model) + // 20-27 (extended family) + + int family = (eax >> 8) & 0x0f; + int model_num = (eax >> 4) & 0x0f; + int ext_family = (eax >> 20) & 0xff; + int ext_model_num = (eax >> 16) & 0x0f; + + int brand_id = ebx & 0xff; + + // Process the extended family and model info if necessary + if (family == 0x0f) { + family += ext_family; + } + + if (family == 0x0f || family == 0x6) { + model_num += (ext_model_num << 4); + } + + switch (brand_id) { + case 0: // no brand ID, so parse CPU family/model + switch (family) { + case 6: // Most PentiumIII processors are in this category + switch (model_num) { + case 0x2c: // Westmere: Gulftown + return CpuType::kIntelWestmere; + case 0x2d: // Sandybridge + return CpuType::kIntelSandybridge; + case 0x3e: // Ivybridge + return CpuType::kIntelIvybridge; + case 0x3c: // Haswell (client) + case 0x3f: // Haswell + return CpuType::kIntelHaswell; + case 0x4f: // Broadwell + case 0x56: // BroadwellDE + return CpuType::kIntelBroadwell; + case 0x55: // Skylake Xeon + if ((eax & 0x0f) < 5) { // stepping < 5 is skylake + return CpuType::kIntelSkylakeXeon; + } else { // stepping >= 5 is cascadelake + return CpuType::kIntelCascadelakeXeon; + } + case 0x5e: // Skylake (client) + return CpuType::kIntelSkylake; + default: + return CpuType::kUnknown; + } + default: + return CpuType::kUnknown; + } + default: + return CpuType::kUnknown; + } +} + +CpuType GetAmdCpuType() { + uint32_t eax, ebx, ecx, edx; + // to get general information and extended features we send eax = 1 and + // ecx = 0 to cpuid. The response is returned in eax, ebx, ecx and edx. + // (See Intel 64 and IA-32 Architectures Software Developer's Manual + // Volume 2A: Instruction Set Reference, A-M CPUID). + ABSL_INTERNAL_GETCPUID(eax, ebx, ecx, edx, 1, 0); + + // Response in eax bits as follows: + // 0-3 (stepping id) + // 4-7 (model number), + // 8-11 (family code), + // 12-13 (processor type), + // 16-19 (extended model) + // 20-27 (extended family) + + int family = (eax >> 8) & 0x0f; + int model_num = (eax >> 4) & 0x0f; + int ext_family = (eax >> 20) & 0xff; + int ext_model_num = (eax >> 16) & 0x0f; + + if (family == 0x0f) { + family += ext_family; + model_num += (ext_model_num << 4); + } + + switch (family) { + case 0x17: + switch (model_num) { + case 0x0: // Stepping Ax + case 0x1: // Stepping Bx + return CpuType::kAmdNaples; + case 0x30: // Stepping Ax + case 0x31: // Stepping Bx + return CpuType::kAmdRome; + default: + return CpuType::kUnknown; + } + break; + case 0x19: + switch (model_num) { + case 0x1: // Stepping B0 + return CpuType::kAmdMilan; + default: + return CpuType::kUnknown; + } + break; + default: + return CpuType::kUnknown; + } +} + +} // namespace + +CpuType GetCpuType() { + switch (GetVendor()) { + case Vendor::kIntel: + return GetIntelCpuType(); + case Vendor::kAmd: + return GetAmdCpuType(); + default: + return CpuType::kUnknown; + } +} + +bool SupportsArmCRC32PMULL() { return false; } + +#elif defined(__aarch64__) && defined(__linux__) + +#define ABSL_INTERNAL_AARCH64_ID_REG_READ(id, val) \ + asm("mrs %0, " #id : "=r"(val)) + +CpuType GetCpuType() { + // MIDR_EL1 is not visible to EL0, however the access will be emulated by + // linux if AT_HWCAP has HWCAP_CPUID set. + // + // This method will be unreliable on heterogeneous computing systems (ex: + // big.LITTLE) since the value of MIDR_EL1 will change based on the calling + // thread. + uint64_t hwcaps = getauxval(AT_HWCAP); + if (hwcaps & HWCAP_CPUID) { + uint64_t midr = 0; + ABSL_INTERNAL_AARCH64_ID_REG_READ(MIDR_EL1, midr); + uint32_t implementer = (midr >> 24) & 0xff; + uint32_t part_number = (midr >> 4) & 0xfff; + if (implementer == 0x41 && part_number == 0xd0c) { + return CpuType::kArmNeoverseN1; + } + } + return CpuType::kUnknown; +} + +bool SupportsArmCRC32PMULL() { + uint64_t hwcaps = getauxval(AT_HWCAP); + return (hwcaps & HWCAP_CRC32) && (hwcaps & HWCAP_PMULL); +} + +#else + +CpuType GetCpuType() { return CpuType::kUnknown; } + +bool SupportsArmCRC32PMULL() { return false; } + +#endif + +} // namespace crc_internal +ABSL_NAMESPACE_END +} // namespace absl diff --git a/TMessagesProj/jni/voip/webrtc/absl/crc/internal/cpu_detect.h b/TMessagesProj/jni/voip/webrtc/absl/crc/internal/cpu_detect.h new file mode 100644 index 0000000000..6054f6960d --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/absl/crc/internal/cpu_detect.h @@ -0,0 +1,57 @@ +// Copyright 2022 The Abseil Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef ABSL_CRC_INTERNAL_CPU_DETECT_H_ +#define ABSL_CRC_INTERNAL_CPU_DETECT_H_ + +#include "absl/base/config.h" + +namespace absl { +ABSL_NAMESPACE_BEGIN +namespace crc_internal { + +// Enumeration of architectures that we have special-case tuning parameters for. +// This set may change over time. +enum class CpuType { + kUnknown, + kIntelHaswell, + kAmdRome, + kAmdNaples, + kAmdMilan, + kIntelCascadelakeXeon, + kIntelSkylakeXeon, + kIntelBroadwell, + kIntelSkylake, + kIntelIvybridge, + kIntelSandybridge, + kIntelWestmere, + kArmNeoverseN1, +}; + +// Returns the type of host CPU this code is running on. Returns kUnknown if +// the host CPU is of unknown type, or if detection otherwise fails. +CpuType GetCpuType(); + +// Returns whether the host CPU supports the CPU features needed for our +// accelerated implementations. The CpuTypes enumerated above apart from +// kUnknown support the required features. On unknown CPUs, we can use +// this to see if it's safe to use hardware acceleration, though without any +// tuning. +bool SupportsArmCRC32PMULL(); + +} // namespace crc_internal +ABSL_NAMESPACE_END +} // namespace absl + +#endif // ABSL_CRC_INTERNAL_CPU_DETECT_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/crc/internal/crc.cc b/TMessagesProj/jni/voip/webrtc/absl/crc/internal/crc.cc new file mode 100644 index 0000000000..bb8936e373 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/absl/crc/internal/crc.cc @@ -0,0 +1,468 @@ +// Copyright 2022 The Abseil Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Implementation of CRCs (aka Rabin Fingerprints). +// Treats the input as a polynomial with coefficients in Z(2), +// and finds the remainder when divided by an irreducible polynomial +// of the appropriate length. +// It handles all CRC sizes from 8 to 128 bits. +// It's somewhat complicated by having separate implementations optimized for +// CRC's <=32 bits, <= 64 bits, and <= 128 bits. +// The input string is prefixed with a "1" bit, and has "degree" "0" bits +// appended to it before the remainder is found. This ensures that +// short strings are scrambled somewhat and that strings consisting +// of all nulls have a non-zero CRC. +// +// Uses the "interleaved word-by-word" method from +// "Everything we know about CRC but afraid to forget" by Andrew Kadatch +// and Bob Jenkins, +// http://crcutil.googlecode.com/files/crc-doc.1.0.pdf +// +// The idea is to compute kStride CRCs simultaneously, allowing the +// processor to more effectively use multiple execution units. Each of +// the CRCs is calculated on one word of data followed by kStride - 1 +// words of zeroes; the CRC starting points are staggered by one word. +// Assuming a stride of 4 with data words "ABCDABCDABCD", the first +// CRC is over A000A000A, the second over 0B000B000B, and so on. +// The CRC of the whole data is then calculated by properly aligning the +// CRCs by appending zeroes until the data lengths agree then XORing +// the CRCs. + +#include "absl/crc/internal/crc.h" + +#include + +#include "absl/base/internal/endian.h" +#include "absl/base/internal/prefetch.h" +#include "absl/base/internal/raw_logging.h" +#include "absl/crc/internal/crc_internal.h" + +namespace absl { +ABSL_NAMESPACE_BEGIN +namespace crc_internal { + +namespace { + +// Constants +#if defined(__i386__) || defined(__x86_64__) +constexpr bool kNeedAlignedLoads = false; +#else +constexpr bool kNeedAlignedLoads = true; +#endif + +// We express the number of zeroes as a number in base ZEROES_BASE. By +// pre-computing the zero extensions for all possible components of such an +// expression (numbers in a form a*ZEROES_BASE**b), we can calculate the +// resulting extension by multiplying the extensions for individual components +// using log_{ZEROES_BASE}(num_zeroes) polynomial multiplications. The tables of +// zero extensions contain (ZEROES_BASE - 1) * (log_{ZEROES_BASE}(64)) entries. +constexpr int ZEROES_BASE_LG = 4; // log_2(ZEROES_BASE) +constexpr int ZEROES_BASE = (1 << ZEROES_BASE_LG); // must be a power of 2 + +constexpr uint32_t kCrc32cPoly = 0x82f63b78; + +uint32_t ReverseBits(uint32_t bits) { + bits = (bits & 0xaaaaaaaau) >> 1 | (bits & 0x55555555u) << 1; + bits = (bits & 0xccccccccu) >> 2 | (bits & 0x33333333u) << 2; + bits = (bits & 0xf0f0f0f0u) >> 4 | (bits & 0x0f0f0f0fu) << 4; + return absl::gbswap_32(bits); +} + +// Polynomial long multiplication mod the polynomial of degree 32. +void PolyMultiply(uint32_t* val, uint32_t m, uint32_t poly) { + uint32_t l = *val; + uint32_t result = 0; + auto onebit = uint32_t{0x80000000u}; + for (uint32_t one = onebit; one != 0; one >>= 1) { + if ((l & one) != 0) { + result ^= m; + } + if (m & 1) { + m = (m >> 1) ^ poly; + } else { + m >>= 1; + } + } + *val = result; +} +} // namespace + +void CRCImpl::FillWordTable(uint32_t poly, uint32_t last, int word_size, + Uint32By256* t) { + for (int j = 0; j != word_size; j++) { // for each byte of extension.... + t[j][0] = 0; // a zero has no effect + for (int i = 128; i != 0; i >>= 1) { // fill in entries for powers of 2 + if (j == 0 && i == 128) { + t[j][i] = last; // top bit in last byte is given + } else { + // each successive power of two is derived from the previous + // one, either in this table, or the last table + uint32_t pred; + if (i == 128) { + pred = t[j - 1][1]; + } else { + pred = t[j][i << 1]; + } + // Advance the CRC by one bit (multiply by X, and take remainder + // through one step of polynomial long division) + if (pred & 1) { + t[j][i] = (pred >> 1) ^ poly; + } else { + t[j][i] = pred >> 1; + } + } + } + // CRCs have the property that CRC(a xor b) == CRC(a) xor CRC(b) + // so we can make all the tables for non-powers of two by + // xoring previously created entries. + for (int i = 2; i != 256; i <<= 1) { + for (int k = i + 1; k != (i << 1); k++) { + t[j][k] = t[j][i] ^ t[j][k - i]; + } + } + } +} + +int CRCImpl::FillZeroesTable(uint32_t poly, Uint32By256* t) { + uint32_t inc = 1; + inc <<= 31; + + // Extend by one zero bit. We know degree > 1 so (inc & 1) == 0. + inc >>= 1; + + // Now extend by 2, 4, and 8 bits, so now `inc` is extended by one zero byte. + for (int i = 0; i < 3; ++i) { + PolyMultiply(&inc, inc, poly); + } + + int j = 0; + for (uint64_t inc_len = 1; inc_len != 0; inc_len <<= ZEROES_BASE_LG) { + // Every entry in the table adds an additional inc_len zeroes. + uint32_t v = inc; + for (int a = 1; a != ZEROES_BASE; a++) { + t[0][j] = v; + PolyMultiply(&v, inc, poly); + j++; + } + inc = v; + } + ABSL_RAW_CHECK(j <= 256, ""); + return j; +} + +// Internal version of the "constructor". +CRCImpl* CRCImpl::NewInternal() { + // Find an accelearated implementation first. + CRCImpl* result = TryNewCRC32AcceleratedX86ARMCombined(); + + // Fall back to generic implementions if no acceleration is available. + if (result == nullptr) { + result = new CRC32(); + } + + result->InitTables(); + + return result; +} + +// The CRC of the empty string is always the CRC polynomial itself. +void CRCImpl::Empty(uint32_t* crc) const { *crc = kCrc32cPoly; } + +// The 32-bit implementation + +void CRC32::InitTables() { + // Compute the table for extending a CRC by one byte. + Uint32By256* t = new Uint32By256[4]; + FillWordTable(kCrc32cPoly, kCrc32cPoly, 1, t); + for (int i = 0; i != 256; i++) { + this->table0_[i] = t[0][i]; + } + + // Construct a table for updating the CRC by 4 bytes data followed by + // 12 bytes of zeroes. + // + // Note: the data word size could be larger than the CRC size; it might + // be slightly faster to use a 64-bit data word, but doing so doubles the + // table size. + uint32_t last = kCrc32cPoly; + const size_t size = 12; + for (size_t i = 0; i < size; ++i) { + last = (last >> 8) ^ this->table0_[last & 0xff]; + } + FillWordTable(kCrc32cPoly, last, 4, t); + for (size_t b = 0; b < 4; ++b) { + for (int i = 0; i < 256; ++i) { + this->table_[b][i] = t[b][i]; + } + } + + int j = FillZeroesTable(kCrc32cPoly, t); + ABSL_RAW_CHECK(j <= static_cast(ABSL_ARRAYSIZE(this->zeroes_)), ""); + for (int i = 0; i < j; i++) { + this->zeroes_[i] = t[0][i]; + } + + delete[] t; + + // Build up tables for _reversing_ the operation of doing CRC operations on + // zero bytes. + + // In C++, extending `crc` by a single zero bit is done by the following: + // (A) bool low_bit_set = (crc & 1); + // crc >>= 1; + // if (low_bit_set) crc ^= kCrc32cPoly; + // + // In particular note that the high bit of `crc` after this operation will be + // set if and only if the low bit of `crc` was set before it. This means that + // no information is lost, and the operation can be reversed, as follows: + // (B) bool high_bit_set = (crc & 0x80000000u); + // if (high_bit_set) crc ^= kCrc32cPoly; + // crc <<= 1; + // if (high_bit_set) crc ^= 1; + // + // Or, equivalently: + // (C) bool high_bit_set = (crc & 0x80000000u); + // crc <<= 1; + // if (high_bit_set) crc ^= ((kCrc32cPoly << 1) ^ 1); + // + // The last observation is, if we store our checksums in variable `rcrc`, + // with order of the bits reversed, the inverse operation becomes: + // (D) bool low_bit_set = (rcrc & 1); + // rcrc >>= 1; + // if (low_bit_set) rcrc ^= ReverseBits((kCrc32cPoly << 1) ^ 1) + // + // This is the same algorithm (A) that we started with, only with a different + // polynomial bit pattern. This means that by building up our tables with + // this alternate polynomial, we can apply the CRC algorithms to a + // bit-reversed CRC checksum to perform inverse zero-extension. + + const uint32_t kCrc32cUnextendPoly = + ReverseBits(static_cast((kCrc32cPoly << 1) ^ 1)); + FillWordTable(kCrc32cUnextendPoly, kCrc32cUnextendPoly, 1, &reverse_table0_); + + j = FillZeroesTable(kCrc32cUnextendPoly, &reverse_zeroes_); + ABSL_RAW_CHECK(j <= static_cast(ABSL_ARRAYSIZE(this->reverse_zeroes_)), + ""); +} + +void CRC32::Extend(uint32_t* crc, const void* bytes, size_t length) const { + const uint8_t* p = static_cast(bytes); + const uint8_t* e = p + length; + uint32_t l = *crc; + + auto step_one_byte = [this, &p, &l] () { + int c = (l & 0xff) ^ *p++; + l = this->table0_[c] ^ (l >> 8); + }; + + if (kNeedAlignedLoads) { + // point x at first 4-byte aligned byte in string. this might be past the + // end of the string. + const uint8_t* x = RoundUp<4>(p); + if (x <= e) { + // Process bytes until finished or p is 4-byte aligned + while (p != x) { + step_one_byte(); + } + } + } + + const size_t kSwathSize = 16; + if (static_cast(e - p) >= kSwathSize) { + // Load one swath of data into the operating buffers. + uint32_t buf0 = absl::little_endian::Load32(p) ^ l; + uint32_t buf1 = absl::little_endian::Load32(p + 4); + uint32_t buf2 = absl::little_endian::Load32(p + 8); + uint32_t buf3 = absl::little_endian::Load32(p + 12); + p += kSwathSize; + + // Increment a CRC value by a "swath"; this combines the four bytes + // starting at `ptr` and twelve zero bytes, so that four CRCs can be + // built incrementally and combined at the end. + const auto step_swath = [this](uint32_t crc_in, const std::uint8_t* ptr) { + return absl::little_endian::Load32(ptr) ^ + this->table_[3][crc_in & 0xff] ^ + this->table_[2][(crc_in >> 8) & 0xff] ^ + this->table_[1][(crc_in >> 16) & 0xff] ^ + this->table_[0][crc_in >> 24]; + }; + + // Run one CRC calculation step over all swaths in one 16-byte stride + const auto step_stride = [&]() { + buf0 = step_swath(buf0, p); + buf1 = step_swath(buf1, p + 4); + buf2 = step_swath(buf2, p + 8); + buf3 = step_swath(buf3, p + 12); + p += 16; + }; + + // Process kStride interleaved swaths through the data in parallel. + while ((e - p) > kPrefetchHorizon) { + base_internal::PrefetchNta( + reinterpret_cast(p + kPrefetchHorizon)); + // Process 64 bytes at a time + step_stride(); + step_stride(); + step_stride(); + step_stride(); + } + while (static_cast(e - p) >= kSwathSize) { + step_stride(); + } + + // Now advance one word at a time as far as possible. This isn't worth + // doing if we have word-advance tables. + while (static_cast(e - p) >= 4) { + buf0 = step_swath(buf0, p); + uint32_t tmp = buf0; + buf0 = buf1; + buf1 = buf2; + buf2 = buf3; + buf3 = tmp; + p += 4; + } + + // Combine the results from the different swaths. This is just a CRC + // on the data values in the bufX words. + auto combine_one_word = [this](uint32_t crc_in, uint32_t w) { + w ^= crc_in; + for (size_t i = 0; i < 4; ++i) { + w = (w >> 8) ^ this->table0_[w & 0xff]; + } + return w; + }; + + l = combine_one_word(0, buf0); + l = combine_one_word(l, buf1); + l = combine_one_word(l, buf2); + l = combine_one_word(l, buf3); + } + + // Process the last few bytes + while (p != e) { + step_one_byte(); + } + + *crc = l; +} + +void CRC32::ExtendByZeroesImpl(uint32_t* crc, size_t length, + const uint32_t zeroes_table[256], + const uint32_t poly_table[256]) const { + if (length != 0) { + uint32_t l = *crc; + // For each ZEROES_BASE_LG bits in length + // (after the low-order bits have been removed) + // we lookup the appropriate polynomial in the zeroes_ array + // and do a polynomial long multiplication (mod the CRC polynomial) + // to extend the CRC by the appropriate number of bits. + for (int i = 0; length != 0; + i += ZEROES_BASE - 1, length >>= ZEROES_BASE_LG) { + int c = length & (ZEROES_BASE - 1); // pick next ZEROES_BASE_LG bits + if (c != 0) { // if they are not zero, + // multiply by entry in table + // Build a table to aid in multiplying 2 bits at a time. + // It takes too long to build tables for more bits. + uint64_t m = zeroes_table[c + i - 1]; + m <<= 1; + uint64_t m2 = m << 1; + uint64_t mtab[4] = {0, m, m2, m2 ^ m}; + + // Do the multiply one byte at a time. + uint64_t result = 0; + for (int x = 0; x < 32; x += 8) { + // The carry-less multiply. + result ^= mtab[l & 3] ^ (mtab[(l >> 2) & 3] << 2) ^ + (mtab[(l >> 4) & 3] << 4) ^ (mtab[(l >> 6) & 3] << 6); + l >>= 8; + + // Reduce modulo the polynomial + result = (result >> 8) ^ poly_table[result & 0xff]; + } + l = static_cast(result); + } + } + *crc = l; + } +} + +void CRC32::ExtendByZeroes(uint32_t* crc, size_t length) const { + return CRC32::ExtendByZeroesImpl(crc, length, zeroes_, table0_); +} + +void CRC32::UnextendByZeroes(uint32_t* crc, size_t length) const { + // See the comment in CRC32::InitTables() for an explanation of the algorithm + // below. + *crc = ReverseBits(*crc); + ExtendByZeroesImpl(crc, length, reverse_zeroes_, reverse_table0_); + *crc = ReverseBits(*crc); +} + +void CRC32::Scramble(uint32_t* crc) const { + // Rotate by near half the word size plus 1. See the scramble comment in + // crc_internal.h for an explanation. + constexpr int scramble_rotate = (32 / 2) + 1; + *crc = RotateRight(static_cast(*crc + kScrambleLo), + 32, scramble_rotate) & + MaskOfLength(32); +} + +void CRC32::Unscramble(uint32_t* crc) const { + constexpr int scramble_rotate = (32 / 2) + 1; + uint64_t rotated = RotateRight(static_cast(*crc), 32, + 32 - scramble_rotate); + *crc = (rotated - kScrambleLo) & MaskOfLength(32); +} + +// Constructor and destructor for base class CRC. +CRC::~CRC() {} +CRC::CRC() {} + +// The "constructor" for a CRC32C with a standard polynomial. +CRC* CRC::Crc32c() { + static CRC* singleton = CRCImpl::NewInternal(); + return singleton; +} + +// This Concat implementation works for arbitrary polynomials. +void CRC::Concat(uint32_t* px, uint32_t y, size_t ylen) { + // https://en.wikipedia.org/wiki/Mathematics_of_cyclic_redundancy_checks + // The CRC of a message M is the remainder of polynomial divison modulo G, + // where the coefficient arithmetic is performed modulo 2 (so +/- are XOR): + // R(x) = M(x) x**n (mod G) + // (n is the degree of G) + // In practice, we use an initial value A and a bitmask B to get + // R = (A ^ B)x**|M| ^ Mx**n ^ B (mod G) + // If M is the concatenation of two strings S and T, and Z is the string of + // len(T) 0s, then the remainder CRC(ST) can be expressed as: + // R = (A ^ B)x**|ST| ^ STx**n ^ B + // = (A ^ B)x**|SZ| ^ SZx**n ^ B ^ Tx**n + // = CRC(SZ) ^ Tx**n + // CRC(Z) = (A ^ B)x**|T| ^ B + // CRC(T) = (A ^ B)x**|T| ^ Tx**n ^ B + // So R = CRC(SZ) ^ CRC(Z) ^ CRC(T) + // + // And further, since CRC(SZ) = Extend(CRC(S), Z), + // CRC(SZ) ^ CRC(Z) = Extend(CRC(S) ^ CRC(''), Z). + uint32_t z; + uint32_t t; + Empty(&z); + t = *px ^ z; + ExtendByZeroes(&t, ylen); + *px = t ^ y; +} + +} // namespace crc_internal +ABSL_NAMESPACE_END +} // namespace absl diff --git a/TMessagesProj/jni/voip/webrtc/absl/crc/internal/crc.h b/TMessagesProj/jni/voip/webrtc/absl/crc/internal/crc.h new file mode 100644 index 0000000000..72515b061d --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/absl/crc/internal/crc.h @@ -0,0 +1,91 @@ +// Copyright 2022 The Abseil Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef ABSL_CRC_INTERNAL_CRC_H_ +#define ABSL_CRC_INTERNAL_CRC_H_ + +#include + +#include "absl/base/config.h" + +// This class implements CRCs (aka Rabin Fingerprints). +// Treats the input as a polynomial with coefficients in Z(2), +// and finds the remainder when divided by an primitive polynomial +// of the appropriate length. + +// A polynomial is represented by the bit pattern formed by its coefficients, +// but with the highest order bit not stored. +// The highest degree coefficient is stored in the lowest numbered bit +// in the lowest addressed byte. Thus, in what follows, the highest degree +// coefficient that is stored is in the low order bit of "lo" or "*lo". + +// Hardware acceleration is used when available. + +namespace absl { +ABSL_NAMESPACE_BEGIN +namespace crc_internal { + +class CRC { + public: + virtual ~CRC(); + + // Place the CRC of the empty string in "*crc" + virtual void Empty(uint32_t* crc) const = 0; + + // If "*crc" is the CRC of bytestring A, place the CRC of + // the bytestring formed from the concatenation of A and the "length" + // bytes at "bytes" into "*crc". + virtual void Extend(uint32_t* crc, const void* bytes, + size_t length) const = 0; + + // Equivalent to Extend(crc, bytes, length) where "bytes" + // points to an array of "length" zero bytes. + virtual void ExtendByZeroes(uint32_t* crc, size_t length) const = 0; + + // Inverse opration of ExtendByZeroes. If `crc` is the CRC value of a string + // ending in `length` zero bytes, this returns a CRC value of that string + // with those zero bytes removed. + virtual void UnextendByZeroes(uint32_t* crc, size_t length) const = 0; + + // If *px is the CRC (as defined by *crc) of some string X, + // and y is the CRC of some string Y that is ylen bytes long, set + // *px to the CRC of the concatenation of X followed by Y. + virtual void Concat(uint32_t* px, uint32_t y, size_t ylen); + + // Apply a non-linear transformation to "*crc" so that + // it is safe to CRC the result with the same polynomial without + // any reduction of error-detection ability in the outer CRC. + // Unscramble() performs the inverse transformation. + // It is strongly recommended that CRCs be scrambled before storage or + // transmission, and unscrambled at the other end before futher manipulation. + virtual void Scramble(uint32_t* crc) const = 0; + virtual void Unscramble(uint32_t* crc) const = 0; + + // Crc32c() returns the singleton implementation of CRC for the CRC32C + // polynomial. Returns a handle that MUST NOT be destroyed with delete. + static CRC* Crc32c(); + + protected: + CRC(); // Clients may not call constructor; use Crc32c() instead. + + private: + CRC(const CRC&) = delete; + CRC& operator=(const CRC&) = delete; +}; + +} // namespace crc_internal +ABSL_NAMESPACE_END +} // namespace absl + +#endif // ABSL_CRC_INTERNAL_CRC_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/crc/internal/crc32_x86_arm_combined_simd.h b/TMessagesProj/jni/voip/webrtc/absl/crc/internal/crc32_x86_arm_combined_simd.h new file mode 100644 index 0000000000..59d71fd466 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/absl/crc/internal/crc32_x86_arm_combined_simd.h @@ -0,0 +1,260 @@ +// Copyright 2022 The Abseil Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef ABSL_CRC_INTERNAL_CRC32_X86_ARM_COMBINED_SIMD_H_ +#define ABSL_CRC_INTERNAL_CRC32_X86_ARM_COMBINED_SIMD_H_ + +#include + +#include "absl/base/config.h" + +// ------------------------------------------------------------------------- +// Many x86 and ARM machines have CRC acceleration hardware. +// We can do a faster version of Extend() on such machines. +// We define a translation layer for both x86 and ARM for the ease of use and +// most performance gains. + +// We need CRC (part of sse4.2) and PCLMULQDQ instructions. +#if defined(__SSE4_2__) && defined(__PCLMUL__) + +#include +#define ABSL_CRC_INTERNAL_HAVE_X86_SIMD + +#elif defined(__aarch64__) && defined(__LITTLE_ENDIAN__) && \ + defined(__ARM_FEATURE_CRC32) && defined(__ARM_NEON) + +#include +#include +#define ABSL_CRC_INTERNAL_HAVE_ARM_SIMD + +#endif + +namespace absl { +ABSL_NAMESPACE_BEGIN +namespace crc_internal { + +#if defined(ABSL_CRC_INTERNAL_HAVE_ARM_SIMD) || \ + defined(ABSL_CRC_INTERNAL_HAVE_X86_SIMD) + +#if defined(ABSL_CRC_INTERNAL_HAVE_ARM_SIMD) +using V128 = uint64x2_t; +#else +using V128 = __m128i; +#endif + +// Starting with the initial value in |crc|, accumulates a CRC32 value for +// unsigned integers of different sizes. +uint32_t CRC32_u8(uint32_t crc, uint8_t v); + +uint32_t CRC32_u16(uint32_t crc, uint16_t v); + +uint32_t CRC32_u32(uint32_t crc, uint32_t v); + +uint32_t CRC32_u64(uint32_t crc, uint64_t v); + +// Loads 128 bits of integer data. |src| must be 16-byte aligned. +V128 V128_Load(const V128* src); + +// Load 128 bits of integer data. |src| does not need to be aligned. +V128 V128_LoadU(const V128* src); + +// Polynomially multiplies the high 64 bits of |l| and |r|. +V128 V128_PMulHi(const V128 l, const V128 r); + +// Polynomially multiplies the low 64 bits of |l| and |r|. +V128 V128_PMulLow(const V128 l, const V128 r); + +// Polynomially multiplies the low 64 bits of |r| and high 64 bits of |l|. +V128 V128_PMul01(const V128 l, const V128 r); + +// Polynomially multiplies the low 64 bits of |l| and high 64 bits of |r|. +V128 V128_PMul10(const V128 l, const V128 r); + +// Produces a XOR operation of |l| and |r|. +V128 V128_Xor(const V128 l, const V128 r); + +// Produces an AND operation of |l| and |r|. +V128 V128_And(const V128 l, const V128 r); + +// Sets two 64 bit integers to one 128 bit vector. The order is reverse. +// dst[63:0] := |r| +// dst[127:64] := |l| +V128 V128_From2x64(const uint64_t l, const uint64_t r); + +// Shift |l| right by |imm| bytes while shifting in zeros. +template +V128 V128_ShiftRight(const V128 l); + +// Extracts a 32-bit integer from |l|, selected with |imm|. +template +int V128_Extract32(const V128 l); + +// Extracts the low 64 bits from V128. +int64_t V128_Low64(const V128 l); + +// Left-shifts packed 64-bit integers in l by r. +V128 V128_ShiftLeft64(const V128 l, const V128 r); + +#endif + +#if defined(ABSL_CRC_INTERNAL_HAVE_X86_SIMD) + +inline uint32_t CRC32_u8(uint32_t crc, uint8_t v) { + return _mm_crc32_u8(crc, v); +} + +inline uint32_t CRC32_u16(uint32_t crc, uint16_t v) { + return _mm_crc32_u16(crc, v); +} + +inline uint32_t CRC32_u32(uint32_t crc, uint32_t v) { + return _mm_crc32_u32(crc, v); +} + +inline uint32_t CRC32_u64(uint32_t crc, uint64_t v) { + return _mm_crc32_u64(crc, v); +} + +inline V128 V128_Load(const V128* src) { return _mm_load_si128(src); } + +inline V128 V128_LoadU(const V128* src) { return _mm_loadu_si128(src); } + +inline V128 V128_PMulHi(const V128 l, const V128 r) { + return _mm_clmulepi64_si128(l, r, 0x11); +} + +inline V128 V128_PMulLow(const V128 l, const V128 r) { + return _mm_clmulepi64_si128(l, r, 0x00); +} + +inline V128 V128_PMul01(const V128 l, const V128 r) { + return _mm_clmulepi64_si128(l, r, 0x01); +} + +inline V128 V128_PMul10(const V128 l, const V128 r) { + return _mm_clmulepi64_si128(l, r, 0x10); +} + +inline V128 V128_Xor(const V128 l, const V128 r) { return _mm_xor_si128(l, r); } + +inline V128 V128_And(const V128 l, const V128 r) { return _mm_and_si128(l, r); } + +inline V128 V128_From2x64(const uint64_t l, const uint64_t r) { + return _mm_set_epi64x(l, r); +} + +template +inline V128 V128_ShiftRight(const V128 l) { + return _mm_srli_si128(l, imm); +} + +template +inline int V128_Extract32(const V128 l) { + return _mm_extract_epi32(l, imm); +} + +inline int64_t V128_Low64(const V128 l) { return _mm_cvtsi128_si64(l); } + +inline V128 V128_ShiftLeft64(const V128 l, const V128 r) { + return _mm_sll_epi64(l, r); +} + +#elif defined(ABSL_CRC_INTERNAL_HAVE_ARM_SIMD) + +inline uint32_t CRC32_u8(uint32_t crc, uint8_t v) { return __crc32cb(crc, v); } + +inline uint32_t CRC32_u16(uint32_t crc, uint16_t v) { + return __crc32ch(crc, v); +} + +inline uint32_t CRC32_u32(uint32_t crc, uint32_t v) { + return __crc32cw(crc, v); +} + +inline uint32_t CRC32_u64(uint32_t crc, uint64_t v) { + return __crc32cd(crc, v); +} + +inline V128 V128_Load(const V128* src) { + return vld1q_u64(reinterpret_cast(src)); +} + +inline V128 V128_LoadU(const V128* src) { + return vld1q_u64(reinterpret_cast(src)); +} + +// Using inline assembly as clang does not generate the pmull2 instruction and +// performance drops by 15-20%. +// TODO(b/193678732): Investigate why the compiler decides not to generate +// such instructions and why it becomes so much worse. +inline V128 V128_PMulHi(const V128 l, const V128 r) { + uint64x2_t res; + __asm__ __volatile__("pmull2 %0.1q, %1.2d, %2.2d \n\t" + : "=w"(res) + : "w"(l), "w"(r)); + return res; +} + +inline V128 V128_PMulLow(const V128 l, const V128 r) { + return reinterpret_cast(vmull_p64( + reinterpret_cast(vget_low_p64(vreinterpretq_p64_u64(l))), + reinterpret_cast(vget_low_p64(vreinterpretq_p64_u64(r))))); +} + +inline V128 V128_PMul01(const V128 l, const V128 r) { + return reinterpret_cast(vmull_p64( + reinterpret_cast(vget_high_p64(vreinterpretq_p64_u64(l))), + reinterpret_cast(vget_low_p64(vreinterpretq_p64_u64(r))))); +} + +inline V128 V128_PMul10(const V128 l, const V128 r) { + return reinterpret_cast(vmull_p64( + reinterpret_cast(vget_low_p64(vreinterpretq_p64_u64(l))), + reinterpret_cast(vget_high_p64(vreinterpretq_p64_u64(r))))); +} + +inline V128 V128_Xor(const V128 l, const V128 r) { return veorq_u64(l, r); } + +inline V128 V128_And(const V128 l, const V128 r) { return vandq_u64(l, r); } + +inline V128 V128_From2x64(const uint64_t l, const uint64_t r) { + return vcombine_u64(vcreate_u64(r), vcreate_u64(l)); +} + +template +inline V128 V128_ShiftRight(const V128 l) { + return vreinterpretq_u64_s8( + vextq_s8(vreinterpretq_s8_u64(l), vdupq_n_s8(0), imm)); +} + +template +inline int V128_Extract32(const V128 l) { + return vgetq_lane_s32(vreinterpretq_s32_u64(l), imm); +} + +inline int64_t V128_Low64(const V128 l) { + return vgetq_lane_s64(vreinterpretq_s64_u64(l), 0); +} + +inline V128 V128_ShiftLeft64(const V128 l, const V128 r) { + return vshlq_u64(l, r); +} + +#endif + +} // namespace crc_internal +ABSL_NAMESPACE_END +} // namespace absl + +#endif // ABSL_CRC_INTERNAL_CRC32_X86_ARM_COMBINED_SIMD_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/crc/internal/crc32c.h b/TMessagesProj/jni/voip/webrtc/absl/crc/internal/crc32c.h new file mode 100644 index 0000000000..34027c55e4 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/absl/crc/internal/crc32c.h @@ -0,0 +1,39 @@ +// Copyright 2022 The Abseil Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef ABSL_CRC_INTERNAL_CRC32C_H_ +#define ABSL_CRC_INTERNAL_CRC32C_H_ + +#include "absl/base/config.h" +#include "absl/crc/crc32c.h" + +namespace absl { +ABSL_NAMESPACE_BEGIN +namespace crc_internal { + +// Modifies a CRC32 value by removing `length` bytes with a value of 0 from +// the end of the string. +// +// This is the inverse operation of ExtendCrc32cByZeroes(). +// +// This operation has a runtime cost of O(log(`length`)) +// +// Internal implementation detail, exposed for testing only. +crc32c_t UnextendCrc32cByZeroes(crc32c_t initial_crc, size_t length); + +} // namespace crc_internal +ABSL_NAMESPACE_END +} // namespace absl + +#endif // ABSL_CRC_INTERNAL_CRC32C_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/crc/internal/crc32c_inline.h b/TMessagesProj/jni/voip/webrtc/absl/crc/internal/crc32c_inline.h new file mode 100644 index 0000000000..43ad14f4dc --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/absl/crc/internal/crc32c_inline.h @@ -0,0 +1,72 @@ +// Copyright 2022 The Abseil Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef ABSL_CRC_INTERNAL_CRC32C_INLINE_H_ +#define ABSL_CRC_INTERNAL_CRC32C_INLINE_H_ + +#include + +#include "absl/base/config.h" +#include "absl/base/internal/endian.h" +#include "absl/crc/internal/crc32_x86_arm_combined_simd.h" + +namespace absl { +ABSL_NAMESPACE_BEGIN +namespace crc_internal { + +// CRC32C implementation optimized for small inputs. +// Either computes crc and return true, or if there is +// no hardware support does nothing and returns false. +inline bool ExtendCrc32cInline(uint32_t* crc, const char* p, size_t n) { +#if defined(ABSL_CRC_INTERNAL_HAVE_ARM_SIMD) || \ + defined(ABSL_CRC_INTERNAL_HAVE_X86_SIMD) + constexpr uint32_t kCrc32Xor = 0xffffffffU; + *crc ^= kCrc32Xor; + if (n & 1) { + *crc = CRC32_u8(*crc, *p); + n--; + p++; + } + if (n & 2) { + *crc = CRC32_u16(*crc, absl::little_endian::Load16(p)); + n -= 2; + p += 2; + } + if (n & 4) { + *crc = CRC32_u32(*crc, absl::little_endian::Load32(p)); + n -= 4; + p += 4; + } + while (n) { + *crc = CRC32_u64(*crc, absl::little_endian::Load64(p)); + n -= 8; + p += 8; + } + *crc ^= kCrc32Xor; + return true; +#else + // No hardware support, signal the need to fallback. + static_cast(crc); + static_cast(p); + static_cast(n); + return false; +#endif // defined(ABSL_CRC_INTERNAL_HAVE_ARM_SIMD) || + // defined(ABSL_CRC_INTERNAL_HAVE_X86_SIMD) +} + +} // namespace crc_internal +ABSL_NAMESPACE_END +} // namespace absl + +#endif // ABSL_CRC_INTERNAL_CRC32C_INLINE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/crc/internal/crc_internal.h b/TMessagesProj/jni/voip/webrtc/absl/crc/internal/crc_internal.h new file mode 100644 index 0000000000..7a503433a5 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/absl/crc/internal/crc_internal.h @@ -0,0 +1,177 @@ +// Copyright 2022 The Abseil Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef ABSL_CRC_INTERNAL_CRC_INTERNAL_H_ +#define ABSL_CRC_INTERNAL_CRC_INTERNAL_H_ + +#include +#include +#include + +#include "absl/base/internal/raw_logging.h" +#include "absl/crc/internal/crc.h" + +namespace absl { +ABSL_NAMESPACE_BEGIN + +namespace crc_internal { + +// Prefetch constants used in some Extend() implementations +constexpr int kPrefetchHorizon = ABSL_CACHELINE_SIZE * 4; // Prefetch this far +static_assert(kPrefetchHorizon >= 64, "CRCPrefetchHorizon less than loop len"); + +// We require the Scramble() function: +// - to be reversible (Unscramble() must exist) +// - to be non-linear in the polynomial's Galois field (so the CRC of a +// scrambled CRC is not linearly affected by the scrambled CRC, even if +// using the same polynomial) +// - not to be its own inverse. Preferably, if X=Scramble^N(X) and N!=0, then +// N is large. +// - to be fast. +// - not to change once defined. +// We introduce non-linearity in two ways: +// Addition of a constant. +// - The carries introduce non-linearity; we use bits of an irrational +// (phi) to make it unlikely that we introduce no carries. +// Rotate by a constant number of bits. +// - We use floor(degree/2)+1, which does not divide the degree, and +// splits the bits nearly evenly, which makes it less likely the +// halves will be the same or one will be all zeroes. +// We do both things to improve the chances of non-linearity in the face of +// bit patterns with low numbers of bits set, while still being fast. +// Below is the constant that we add. The bits are the first 128 bits of the +// fractional part of phi, with a 1 ored into the bottom bit to maximize the +// cycle length of repeated adds. +constexpr uint64_t kScrambleHi = (static_cast(0x4f1bbcdcU) << 32) | + static_cast(0xbfa53e0aU); +constexpr uint64_t kScrambleLo = (static_cast(0xf9ce6030U) << 32) | + static_cast(0x2e76e41bU); + +class CRCImpl : public CRC { // Implemention of the abstract class CRC + public: + using Uint32By256 = uint32_t[256]; + + CRCImpl() {} + ~CRCImpl() override = default; + + // The internal version of CRC::New(). + static CRCImpl* NewInternal(); + + void Empty(uint32_t* crc) const override; + + // Fill in a table for updating a CRC by one word of 'word_size' bytes + // [last_lo, last_hi] contains the answer if the last bit in the word + // is set. + static void FillWordTable(uint32_t poly, uint32_t last, int word_size, + Uint32By256* t); + + // Build the table for extending by zeroes, returning the number of entries. + // For a in {1, 2, ..., ZEROES_BASE-1}, b in {0, 1, 2, 3, ...}, + // entry j=a-1+(ZEROES_BASE-1)*b + // contains a polynomial Pi such that multiplying + // a CRC by Pi mod P, where P is the CRC polynomial, is equivalent to + // appending a*2**(ZEROES_BASE_LG*b) zero bytes to the original string. + static int FillZeroesTable(uint32_t poly, Uint32By256* t); + + virtual void InitTables() = 0; + + private: + CRCImpl(const CRCImpl&) = delete; + CRCImpl& operator=(const CRCImpl&) = delete; +}; + +// This is the 32-bit implementation. It handles all sizes from 8 to 32. +class CRC32 : public CRCImpl { + public: + CRC32() {} + ~CRC32() override {} + + void Extend(uint32_t* crc, const void* bytes, size_t length) const override; + void ExtendByZeroes(uint32_t* crc, size_t length) const override; + void Scramble(uint32_t* crc) const override; + void Unscramble(uint32_t* crc) const override; + void UnextendByZeroes(uint32_t* crc, size_t length) const override; + + void InitTables() override; + + private: + // Common implementation guts for ExtendByZeroes and UnextendByZeroes(). + // + // zeroes_table is a table as returned by FillZeroesTable(), containing + // polynomials representing CRCs of strings-of-zeros of various lenghts, + // and which can be combined by polynomial multiplication. poly_table is + // a table of CRC byte extension values. These tables are determined by + // the generator polynomial. + // + // These will be set to reverse_zeroes_ and reverse_table0_ for Unextend, and + // CRC32::zeroes_ and CRC32::table0_ for Extend. + void ExtendByZeroesImpl(uint32_t* crc, size_t length, + const uint32_t zeroes_table[256], + const uint32_t poly_table[256]) const; + + uint32_t table0_[256]; // table of byte extensions + uint32_t zeroes_[256]; // table of zero extensions + + // table of 4-byte extensions shifted by 12 bytes of zeroes + uint32_t table_[4][256]; + + // Reverse lookup tables, using the alternate polynomial used by + // UnextendByZeroes(). + uint32_t reverse_table0_[256]; // table of reverse byte extensions + uint32_t reverse_zeroes_[256]; // table of reverse zero extensions + + CRC32(const CRC32&) = delete; + CRC32& operator=(const CRC32&) = delete; +}; + +// Helpers + +// Return a bit mask containing len 1-bits. +// Requires 0 < len <= sizeof(T) +template +T MaskOfLength(int len) { + // shift 2 by len-1 rather than 1 by len because shifts of wordsize + // are undefined. + return (T(2) << (len - 1)) - 1; +} + +// Rotate low-order "width" bits of "in" right by "r" bits, +// setting other bits in word to arbitrary values. +template +T RotateRight(T in, int width, int r) { + return (in << (width - r)) | ((in >> r) & MaskOfLength(width - r)); +} + +// RoundUp(p) returns the lowest address >= p aligned to an N-byte +// boundary. Requires that N is a power of 2. +template +const uint8_t* RoundUp(const uint8_t* p) { + static_assert((alignment & (alignment - 1)) == 0, "alignment is not 2^n"); + constexpr uintptr_t mask = alignment - 1; + const uintptr_t as_uintptr = reinterpret_cast(p); + return reinterpret_cast((as_uintptr + mask) & ~mask); +} + +// Return a newly created CRC32AcceleratedX86ARMCombined if we can use Intel's +// or ARM's CRC acceleration for a given polynomial. Return nullptr otherwise. +CRCImpl* TryNewCRC32AcceleratedX86ARMCombined(); + +// Return all possible hardware accelerated implementations. For testing only. +std::vector> NewCRC32AcceleratedX86ARMCombinedAll(); + +} // namespace crc_internal +ABSL_NAMESPACE_END +} // namespace absl + +#endif // ABSL_CRC_INTERNAL_CRC_INTERNAL_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/crc/internal/crc_memcpy.h b/TMessagesProj/jni/voip/webrtc/absl/crc/internal/crc_memcpy.h new file mode 100644 index 0000000000..8e728a6ea3 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/absl/crc/internal/crc_memcpy.h @@ -0,0 +1,112 @@ +// Copyright 2022 The Abseil Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef ABSL_CRC_INTERNAL_CRC_MEMCPY_H_ +#define ABSL_CRC_INTERNAL_CRC_MEMCPY_H_ + +#include +#include + +#include "absl/base/config.h" +#include "absl/crc/crc32c.h" + +namespace absl { +ABSL_NAMESPACE_BEGIN +namespace crc_internal { + +class CrcMemcpyEngine { + public: + virtual ~CrcMemcpyEngine() = default; + + virtual crc32c_t Compute(void* __restrict dst, const void* __restrict src, + std::size_t length, crc32c_t initial_crc) const = 0; + + protected: + CrcMemcpyEngine() = default; +}; + +class CrcMemcpy { + public: + static crc32c_t CrcAndCopy(void* __restrict dst, const void* __restrict src, + std::size_t length, + crc32c_t initial_crc = ToCrc32c(0), + bool non_temporal = false) { + static const ArchSpecificEngines engines = GetArchSpecificEngines(); + auto* engine = non_temporal ? engines.non_temporal : engines.temporal; + return engine->Compute(dst, src, length, initial_crc); + } + + // For testing only: get an architecture-specific engine for tests. + static std::unique_ptr GetTestEngine(int vector, + int integer); + + private: + struct ArchSpecificEngines { + CrcMemcpyEngine* temporal; + CrcMemcpyEngine* non_temporal; + }; + + static ArchSpecificEngines GetArchSpecificEngines(); +}; + +// Fallback CRC-memcpy engine. +class FallbackCrcMemcpyEngine : public CrcMemcpyEngine { + public: + FallbackCrcMemcpyEngine() = default; + FallbackCrcMemcpyEngine(const FallbackCrcMemcpyEngine&) = delete; + FallbackCrcMemcpyEngine operator=(const FallbackCrcMemcpyEngine&) = delete; + + crc32c_t Compute(void* __restrict dst, const void* __restrict src, + std::size_t length, crc32c_t initial_crc) const override; +}; + +// CRC Non-Temporal-Memcpy engine. +class CrcNonTemporalMemcpyEngine : public CrcMemcpyEngine { + public: + CrcNonTemporalMemcpyEngine() = default; + CrcNonTemporalMemcpyEngine(const CrcNonTemporalMemcpyEngine&) = delete; + CrcNonTemporalMemcpyEngine operator=(const CrcNonTemporalMemcpyEngine&) = + delete; + + crc32c_t Compute(void* __restrict dst, const void* __restrict src, + std::size_t length, crc32c_t initial_crc) const override; +}; + +// CRC Non-Temporal-Memcpy AVX engine. +class CrcNonTemporalMemcpyAVXEngine : public CrcMemcpyEngine { + public: + CrcNonTemporalMemcpyAVXEngine() = default; + CrcNonTemporalMemcpyAVXEngine(const CrcNonTemporalMemcpyAVXEngine&) = delete; + CrcNonTemporalMemcpyAVXEngine operator=( + const CrcNonTemporalMemcpyAVXEngine&) = delete; + + crc32c_t Compute(void* __restrict dst, const void* __restrict src, + std::size_t length, crc32c_t initial_crc) const override; +}; + +// Copy source to destination and return the CRC32C of the data copied. If an +// accelerated version is available, use the accelerated version, otherwise use +// the generic fallback version. +inline crc32c_t Crc32CAndCopy(void* __restrict dst, const void* __restrict src, + std::size_t length, + crc32c_t initial_crc = ToCrc32c(0), + bool non_temporal = false) { + return CrcMemcpy::CrcAndCopy(dst, src, length, initial_crc, non_temporal); +} + +} // namespace crc_internal +ABSL_NAMESPACE_END +} // namespace absl + +#endif // ABSL_CRC_INTERNAL_CRC_MEMCPY_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/crc/internal/crc_memcpy_fallback.cc b/TMessagesProj/jni/voip/webrtc/absl/crc/internal/crc_memcpy_fallback.cc new file mode 100644 index 0000000000..4579c164d8 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/absl/crc/internal/crc_memcpy_fallback.cc @@ -0,0 +1,75 @@ +// Copyright 2022 The Abseil Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include +#include + +#include "absl/base/config.h" +#include "absl/crc/crc32c.h" +#include "absl/crc/internal/crc_memcpy.h" + +namespace absl { +ABSL_NAMESPACE_BEGIN +namespace crc_internal { + +absl::crc32c_t FallbackCrcMemcpyEngine::Compute(void* __restrict dst, + const void* __restrict src, + std::size_t length, + crc32c_t initial_crc) const { + constexpr size_t kBlockSize = 8192; + absl::crc32c_t crc = initial_crc; + + const char* src_bytes = reinterpret_cast(src); + char* dst_bytes = reinterpret_cast(dst); + + // Copy + CRC loop - run 8k chunks until we are out of full chunks. CRC + // then copy was found to be slightly more efficient in our test cases. + std::size_t offset = 0; + for (; offset + kBlockSize < length; offset += kBlockSize) { + crc = absl::ExtendCrc32c(crc, + absl::string_view(src_bytes + offset, kBlockSize)); + memcpy(dst_bytes + offset, src_bytes + offset, kBlockSize); + } + + // Save some work if length is 0. + if (offset < length) { + std::size_t final_copy_size = length - offset; + crc = absl::ExtendCrc32c( + crc, absl::string_view(src_bytes + offset, final_copy_size)); + memcpy(dst_bytes + offset, src_bytes + offset, final_copy_size); + } + + return crc; +} + +// Compile the following only if we don't have +#ifndef __SSE4_2__ + +CrcMemcpy::ArchSpecificEngines CrcMemcpy::GetArchSpecificEngines() { + CrcMemcpy::ArchSpecificEngines engines; + engines.temporal = new FallbackCrcMemcpyEngine(); + engines.non_temporal = new FallbackCrcMemcpyEngine(); + return engines; +} + +std::unique_ptr CrcMemcpy::GetTestEngine(int /*vector*/, + int /*integer*/) { + return std::make_unique(); +} + +#endif + +} // namespace crc_internal +ABSL_NAMESPACE_END +} // namespace absl diff --git a/TMessagesProj/jni/voip/webrtc/absl/crc/internal/crc_memcpy_test.cc b/TMessagesProj/jni/voip/webrtc/absl/crc/internal/crc_memcpy_test.cc new file mode 100644 index 0000000000..708e866612 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/absl/crc/internal/crc_memcpy_test.cc @@ -0,0 +1,169 @@ +// Copyright 2022 The Abseil Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "absl/crc/internal/crc_memcpy.h" + +#include +#include +#include +#include +#include +#include +#include + +#include "gtest/gtest.h" +#include "absl/crc/crc32c.h" +#include "absl/memory/memory.h" +#include "absl/random/distributions.h" +#include "absl/random/random.h" +#include "absl/strings/str_cat.h" +#include "absl/strings/string_view.h" + +namespace { + +enum CrcEngine { + X86 = 0, + NONTEMPORAL = 1, + FALLBACK = 2, +}; + +// Correctness tests: +// - Every source/destination byte alignment 0-15, every size 0-511 bytes +// - Arbitrarily aligned source, large size +template +class CrcMemcpyTest : public testing::Test { + protected: + CrcMemcpyTest() { + source_ = std::make_unique(kSize); + destination_ = std::make_unique(kSize); + } + static constexpr size_t kAlignment = 16; + static constexpr size_t kMaxCopySize = max_size; + static constexpr size_t kSize = kAlignment + kMaxCopySize; + std::unique_ptr source_; + std::unique_ptr destination_; + + absl::BitGen gen_; +}; + +// Small test is slightly larger 4096 bytes to allow coverage of the "large" +// copy function. The minimum size to exercise all code paths in that function +// would be around 256 consecutive tests (getting every possible tail value +// and 0-2 small copy loops after the main block), so testing from 4096-4500 +// will cover all of those code paths multiple times. +typedef CrcMemcpyTest<4500> CrcSmallTest; +typedef CrcMemcpyTest<(1 << 24)> CrcLargeTest; +// Parametrize the small test so that it can be done with all configurations. +template +class x86ParamTestTemplate : public CrcSmallTest, + public ::testing::WithParamInterface { + protected: + x86ParamTestTemplate() { + if (GetParam().crc_engine_selector == FALLBACK) { + engine_ = std::make_unique(); + } else if (GetParam().crc_engine_selector == NONTEMPORAL) { + engine_ = + std::make_unique(); + } else { + engine_ = absl::crc_internal::CrcMemcpy::GetTestEngine( + GetParam().vector_lanes, GetParam().integer_lanes); + } + } + + // Convenience method. + ParamsT GetParam() const { + return ::testing::WithParamInterface::GetParam(); + } + + std::unique_ptr engine_; +}; +struct TestParams { + CrcEngine crc_engine_selector = X86; + int vector_lanes = 0; + int integer_lanes = 0; +}; +using x86ParamTest = x86ParamTestTemplate; +// SmallCorrectness is designed to exercise every possible set of code paths +// in the memcpy code, not including the loop. +TEST_P(x86ParamTest, SmallCorrectnessCheckSourceAlignment) { + constexpr size_t kTestSizes[] = {0, 100, 255, 512, 1024, 4000, kMaxCopySize}; + + for (size_t source_alignment = 0; source_alignment < kAlignment; + source_alignment++) { + for (auto size : kTestSizes) { + char* base_data = static_cast(source_.get()) + source_alignment; + for (size_t i = 0; i < size; i++) { + *(base_data + i) = + static_cast(absl::Uniform(gen_)); + } + absl::crc32c_t initial_crc = + absl::ToCrc32c(absl::Uniform(gen_)); + absl::crc32c_t experiment_crc = + engine_->Compute(destination_.get(), source_.get() + source_alignment, + size, initial_crc); + // Check the memory region to make sure it is the same + int mem_comparison = + memcmp(destination_.get(), source_.get() + source_alignment, size); + SCOPED_TRACE(absl::StrCat("Error in memcpy of size: ", size, + " with source alignment: ", source_alignment)); + ASSERT_EQ(mem_comparison, 0); + absl::crc32c_t baseline_crc = absl::ExtendCrc32c( + initial_crc, + absl::string_view( + static_cast(source_.get()) + source_alignment, size)); + ASSERT_EQ(baseline_crc, experiment_crc); + } + } +} + +TEST_P(x86ParamTest, SmallCorrectnessCheckDestAlignment) { + constexpr size_t kTestSizes[] = {0, 100, 255, 512, 1024, 4000, kMaxCopySize}; + + for (size_t dest_alignment = 0; dest_alignment < kAlignment; + dest_alignment++) { + for (auto size : kTestSizes) { + char* base_data = static_cast(source_.get()); + for (size_t i = 0; i < size; i++) { + *(base_data + i) = + static_cast(absl::Uniform(gen_)); + } + absl::crc32c_t initial_crc = + absl::ToCrc32c(absl::Uniform(gen_)); + absl::crc32c_t experiment_crc = + engine_->Compute(destination_.get() + dest_alignment, source_.get(), + size, initial_crc); + // Check the memory region to make sure it is the same + int mem_comparison = + memcmp(destination_.get() + dest_alignment, source_.get(), size); + SCOPED_TRACE(absl::StrCat("Error in memcpy of size: ", size, + " with dest alignment: ", dest_alignment)); + ASSERT_EQ(mem_comparison, 0); + absl::crc32c_t baseline_crc = absl::ExtendCrc32c( + initial_crc, + absl::string_view(static_cast(source_.get()), size)); + ASSERT_EQ(baseline_crc, experiment_crc); + } + } +} + +INSTANTIATE_TEST_SUITE_P(x86ParamTest, x86ParamTest, + ::testing::Values( + // Tests for configurations that may occur in prod. + TestParams{X86, 3, 0}, TestParams{X86, 1, 2}, + // Fallback test. + TestParams{FALLBACK, 0, 0}, + // Non Temporal + TestParams{NONTEMPORAL, 0, 0})); + +} // namespace diff --git a/TMessagesProj/jni/voip/webrtc/absl/crc/internal/crc_memcpy_x86_64.cc b/TMessagesProj/jni/voip/webrtc/absl/crc/internal/crc_memcpy_x86_64.cc new file mode 100644 index 0000000000..4680fbce46 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/absl/crc/internal/crc_memcpy_x86_64.cc @@ -0,0 +1,435 @@ +// Copyright 2022 The Abseil Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Simultaneous memcopy and CRC-32C for x86-64. Uses integer registers because +// XMM registers do not support the CRC instruction (yet). While copying, +// compute the running CRC of the data being copied. +// +// It is assumed that any CPU running this code has SSE4.2 instructions +// available (for CRC32C). This file will do nothing if that is not true. +// +// The CRC instruction has a 3-byte latency, and we are stressing the ALU ports +// here (unlike a traditional memcopy, which has almost no ALU use), so we will +// need to copy in such a way that the CRC unit is used efficiently. We have two +// regimes in this code: +// 1. For operations of size < kCrcSmallSize, do the CRC then the memcpy +// 2. For operations of size > kCrcSmallSize: +// a) compute an initial CRC + copy on a small amount of data to align the +// destination pointer on a 16-byte boundary. +// b) Split the data into 3 main regions and a tail (smaller than 48 bytes) +// c) Do the copy and CRC of the 3 main regions, interleaving (start with +// full cache line copies for each region, then move to single 16 byte +// pieces per region). +// d) Combine the CRCs with CRC32C::Concat. +// e) Copy the tail and extend the CRC with the CRC of the tail. +// This method is not ideal for op sizes between ~1k and ~8k because CRC::Concat +// takes a significant amount of time. A medium-sized approach could be added +// using 3 CRCs over fixed-size blocks where the zero-extensions required for +// CRC32C::Concat can be precomputed. + +#include +#include + +#include "absl/crc/crc32c.h" +#include "absl/strings/string_view.h" + +#ifdef __SSE4_2__ + +#include +#include + +#include + +#include "absl/base/dynamic_annotations.h" +#include "absl/base/internal/prefetch.h" +#include "absl/base/optimization.h" +#include "absl/crc/internal/cpu_detect.h" +#include "absl/crc/internal/crc_memcpy.h" + +namespace absl { +ABSL_NAMESPACE_BEGIN +namespace crc_internal { + +namespace { + +inline crc32c_t ShortCrcCopy(char* dst, const char* src, std::size_t length, + crc32c_t crc) { + // Small copy: just go 1 byte at a time: being nice to the branch predictor + // is more important here than anything else + uint32_t crc_uint32 = static_cast(crc); + for (std::size_t i = 0; i < length; i++) { + uint8_t data = *reinterpret_cast(src); + crc_uint32 = _mm_crc32_u8(crc_uint32, data); + *reinterpret_cast(dst) = data; + ++src; + ++dst; + } + return ToCrc32c(crc_uint32); +} + +constexpr int kIntLoadsPerVec = sizeof(__m128i) / sizeof(uint64_t); + +// Common function for copying the tails of multiple large regions. +template +inline void LargeTailCopy(crc32c_t* crcs, char** dst, const char** src, + size_t region_size, size_t copy_rounds) { + __m128i data[vec_regions]; + uint64_t int_data[kIntLoadsPerVec * int_regions]; + + while (copy_rounds > 0) { +#pragma unroll_completely + for (int i = 0; i < vec_regions; i++) { + int region = i; + + auto* vsrc = + reinterpret_cast(*src + region_size * region); + auto* vdst = reinterpret_cast<__m128i*>(*dst + region_size * region); + + // Load the blocks, unaligned + data[i] = _mm_loadu_si128(vsrc); + + // Store the blocks, aligned + _mm_store_si128(vdst, data[i]); + + // Compute the running CRC + crcs[region] = ToCrc32c(_mm_crc32_u64(static_cast(crcs[region]), + _mm_extract_epi64(data[i], 0))); + crcs[region] = ToCrc32c(_mm_crc32_u64(static_cast(crcs[region]), + _mm_extract_epi64(data[i], 1))); + } + +#pragma unroll_completely + for (int i = 0; i < int_regions; i++) { + int region = vec_regions + i; + + auto* usrc = + reinterpret_cast(*src + region_size * region); + auto* udst = reinterpret_cast(*dst + region_size * region); + +#pragma unroll_completely + for (int j = 0; j < kIntLoadsPerVec; j++) { + int data_index = i * kIntLoadsPerVec + j; + + int_data[data_index] = *(usrc + j); + crcs[region] = ToCrc32c(_mm_crc32_u64( + static_cast(crcs[region]), int_data[data_index])); + + *(udst + j) = int_data[data_index]; + } + } + + // Increment pointers + *src += sizeof(__m128i); + *dst += sizeof(__m128i); + --copy_rounds; + } +} + +} // namespace + +template +class AcceleratedCrcMemcpyEngine : public CrcMemcpyEngine { + public: + AcceleratedCrcMemcpyEngine() = default; + AcceleratedCrcMemcpyEngine(const AcceleratedCrcMemcpyEngine&) = delete; + AcceleratedCrcMemcpyEngine operator=(const AcceleratedCrcMemcpyEngine&) = + delete; + + crc32c_t Compute(void* __restrict dst, const void* __restrict src, + std::size_t length, crc32c_t initial_crc) const override; +}; + +template +crc32c_t AcceleratedCrcMemcpyEngine::Compute( + void* __restrict dst, const void* __restrict src, std::size_t length, + crc32c_t initial_crc) const { + constexpr std::size_t kRegions = vec_regions + int_regions; + constexpr crc32c_t kCrcDataXor = crc32c_t{0xffffffff}; + constexpr std::size_t kBlockSize = sizeof(__m128i); + constexpr std::size_t kCopyRoundSize = kRegions * kBlockSize; + + // Number of blocks per cacheline. + constexpr std::size_t kBlocksPerCacheLine = ABSL_CACHELINE_SIZE / kBlockSize; + + char* dst_bytes = static_cast(dst); + const char* src_bytes = static_cast(src); + + // Make sure that one prefetch per big block is enough to cover the whole + // dataset, and we don't prefetch too much. + static_assert(ABSL_CACHELINE_SIZE % kBlockSize == 0, + "Cache lines are not divided evenly into blocks, may have " + "unintended behavior!"); + + // Experimentally-determined boundary between a small and large copy. + // Below this number, spin-up and concatenation of CRCs takes enough time that + // it kills the throughput gains of using 3 regions and wide vectors. + constexpr size_t kCrcSmallSize = 256; + + // Experimentally-determined prefetch distance. Main loop copies will + // prefeth data 2 cache lines ahead. + constexpr std::size_t kPrefetchAhead = 2 * ABSL_CACHELINE_SIZE; + + // Small-size CRC-memcpy : just do CRC + memcpy + if (length < kCrcSmallSize) { + crc32c_t crc = + ExtendCrc32c(initial_crc, absl::string_view(src_bytes, length)); + memcpy(dst, src, length); + return crc; + } + + // Start work on the CRC: undo the XOR from the previous calculation or set up + // the initial value of the CRC. + // initial_crc ^= kCrcDataXor; + initial_crc = initial_crc ^ kCrcDataXor; + + // Do an initial alignment copy, so we can use aligned store instructions to + // the destination pointer. We align the destination pointer because the + // penalty for an unaligned load is small compared to the penalty of an + // unaligned store on modern CPUs. + std::size_t bytes_from_last_aligned = + reinterpret_cast(dst) & (kBlockSize - 1); + if (bytes_from_last_aligned != 0) { + std::size_t bytes_for_alignment = kBlockSize - bytes_from_last_aligned; + + // Do the short-sized copy and CRC. + initial_crc = + ShortCrcCopy(dst_bytes, src_bytes, bytes_for_alignment, initial_crc); + src_bytes += bytes_for_alignment; + dst_bytes += bytes_for_alignment; + length -= bytes_for_alignment; + } + + // We are going to do the copy and CRC in kRegions regions to make sure that + // we can saturate the CRC unit. The CRCs will be combined at the end of the + // run. Copying will use the SSE registers, and we will extract words from + // the SSE registers to add to the CRC. Initially, we run the loop one full + // cache line per region at a time, in order to insert prefetches. + + // Initialize CRCs for kRegions regions. + crc32c_t crcs[kRegions]; + crcs[0] = initial_crc; + for (int i = 1; i < kRegions; i++) { + crcs[i] = kCrcDataXor; + } + + // Find the number of rounds to copy and the region size. Also compute the + // tail size here. + int64_t copy_rounds = length / kCopyRoundSize; + + // Find the size of each region and the size of the tail. + const std::size_t region_size = copy_rounds * kBlockSize; + const std::size_t tail_size = length - (kRegions * region_size); + + // Holding registers for data in each region. + __m128i vec_data[vec_regions]; + uint64_t int_data[int_regions * kIntLoadsPerVec]; + + // Main loop. + while (copy_rounds > kBlocksPerCacheLine) { + // Prefetch kPrefetchAhead bytes ahead of each pointer. +#pragma unroll_completely + for (int i = 0; i < kRegions; i++) { + absl::base_internal::PrefetchT0(src_bytes + kPrefetchAhead + + region_size * i); + absl::base_internal::PrefetchT0(dst_bytes + kPrefetchAhead + + region_size * i); + } + + // Load and store data, computing CRC on the way. +#pragma unroll_completely + for (int i = 0; i < kBlocksPerCacheLine; i++) { + // Copy and CRC the data for the CRC regions. +#pragma unroll_completely + for (int j = 0; j < vec_regions; j++) { + // Cycle which regions get vector load/store and integer load/store, to + // engage prefetching logic around vector load/stores and save issue + // slots by using the integer registers. + int region = (j + i) % kRegions; + + auto* src = reinterpret_cast(src_bytes + + region_size * region); + auto* dst = + reinterpret_cast<__m128i*>(dst_bytes + region_size * region); + + // Load and CRC data. + vec_data[j] = _mm_loadu_si128(src + i); + crcs[region] = + ToCrc32c(_mm_crc32_u64(static_cast(crcs[region]), + _mm_extract_epi64(vec_data[j], 0))); + crcs[region] = + ToCrc32c(_mm_crc32_u64(static_cast(crcs[region]), + _mm_extract_epi64(vec_data[j], 1))); + + // Store the data. + _mm_store_si128(dst + i, vec_data[j]); + } + + // Preload the partial CRCs for the CLMUL subregions. +#pragma unroll_completely + for (int j = 0; j < int_regions; j++) { + // Cycle which regions get vector load/store and integer load/store, to + // engage prefetching logic around vector load/stores and save issue + // slots by using the integer registers. + int region = (j + vec_regions + i) % kRegions; + + auto* usrc = + reinterpret_cast(src_bytes + region_size * region); + auto* udst = + reinterpret_cast(dst_bytes + region_size * region); + +#pragma unroll_completely + for (int k = 0; k < kIntLoadsPerVec; k++) { + int data_index = j * kIntLoadsPerVec + k; + + // Load and CRC the data. + int_data[data_index] = *(usrc + i * kIntLoadsPerVec + k); + crcs[region] = ToCrc32c(_mm_crc32_u64( + static_cast(crcs[region]), int_data[data_index])); + + // Store the data. + *(udst + i * kIntLoadsPerVec + k) = int_data[data_index]; + } + } + } + + // Increment pointers + src_bytes += kBlockSize * kBlocksPerCacheLine; + dst_bytes += kBlockSize * kBlocksPerCacheLine; + copy_rounds -= kBlocksPerCacheLine; + } + + // Copy and CRC the tails of each region. + LargeTailCopy(crcs, &dst_bytes, &src_bytes, + region_size, copy_rounds); + + // Move the source and destination pointers to the end of the region + src_bytes += region_size * (kRegions - 1); + dst_bytes += region_size * (kRegions - 1); + + // Finalize the first CRCs: XOR the internal CRCs by the XOR mask to undo the + // XOR done before doing block copy + CRCs. + for (int i = 0; i < kRegions - 1; i++) { + crcs[i] = crcs[i] ^ kCrcDataXor; + } + + // Build a CRC of the first kRegions - 1 regions. + crc32c_t full_crc = crcs[0]; + for (int i = 1; i < kRegions - 1; i++) { + full_crc = ConcatCrc32c(full_crc, crcs[i], region_size); + } + + // Copy and CRC the tail through the XMM registers. + std::size_t tail_blocks = tail_size / kBlockSize; + LargeTailCopy<0, 1>(&crcs[kRegions - 1], &dst_bytes, &src_bytes, 0, + tail_blocks); + + // Final tail copy for under 16 bytes. + crcs[kRegions - 1] = + ShortCrcCopy(dst_bytes, src_bytes, tail_size - tail_blocks * kBlockSize, + crcs[kRegions - 1]); + + // Finalize and concatenate the final CRC, then return. + crcs[kRegions - 1] = crcs[kRegions - 1] ^ kCrcDataXor; + return ConcatCrc32c(full_crc, crcs[kRegions - 1], region_size + tail_size); +} + +CrcMemcpy::ArchSpecificEngines CrcMemcpy::GetArchSpecificEngines() { +#ifdef UNDEFINED_BEHAVIOR_SANITIZER + // UBSAN does not play nicely with unaligned loads (which we use a lot). + // Get the underlying architecture. + CpuType cpu_type = GetCpuType(); + switch (cpu_type) { + case CpuType::kUnknown: + case CpuType::kAmdRome: + case CpuType::kAmdNaples: + case CpuType::kIntelCascadelakeXeon: + case CpuType::kIntelSkylakeXeon: + case CpuType::kIntelSkylake: + case CpuType::kIntelBroadwell: + case CpuType::kIntelHaswell: + case CpuType::kIntelIvybridge: + return { + .temporal = new FallbackCrcMemcpyEngine(), + .non_temporal = new CrcNonTemporalMemcpyAVXEngine(), + }; + // INTEL_SANDYBRIDGE performs better with SSE than AVX. + case CpuType::kIntelSandybridge: + return { + .temporal = new FallbackCrcMemcpyEngine(), + .non_temporal = new CrcNonTemporalMemcpyEngine(), + }; + default: + return {.temporal = new FallbackCrcMemcpyEngine(), + .non_temporal = new FallbackCrcMemcpyEngine()}; + } +#else + // Get the underlying architecture. + CpuType cpu_type = GetCpuType(); + switch (cpu_type) { + // On Zen 2, PEXTRQ uses 2 micro-ops, including one on the vector store port + // which data movement from the vector registers to the integer registers + // (where CRC32C happens) to crowd the same units as vector stores. As a + // result, using that path exclusively causes bottlenecking on this port. + // We can avoid this bottleneck by using the integer side of the CPU for + // most operations rather than the vector side. We keep a vector region to + // engage some of the prefetching logic in the cache hierarchy which seems + // to give vector instructions special treatment. These prefetch units see + // strided access to each region, and do the right thing. + case CpuType::kAmdRome: + case CpuType::kAmdNaples: + return { + .temporal = new AcceleratedCrcMemcpyEngine<1, 2>(), + .non_temporal = new CrcNonTemporalMemcpyAVXEngine(), + }; + // PCLMULQDQ is slow and we don't have wide enough issue width to take + // advantage of it. For an unknown architecture, don't risk using CLMULs. + case CpuType::kIntelCascadelakeXeon: + case CpuType::kIntelSkylakeXeon: + case CpuType::kIntelSkylake: + case CpuType::kIntelBroadwell: + case CpuType::kIntelHaswell: + case CpuType::kIntelIvybridge: + return { + .temporal = new AcceleratedCrcMemcpyEngine<3, 0>(), + .non_temporal = new CrcNonTemporalMemcpyAVXEngine(), + }; + // INTEL_SANDYBRIDGE performs better with SSE than AVX. + case CpuType::kIntelSandybridge: + return { + .temporal = new AcceleratedCrcMemcpyEngine<3, 0>(), + .non_temporal = new CrcNonTemporalMemcpyEngine(), + }; + default: + return {.temporal = new FallbackCrcMemcpyEngine(), + .non_temporal = new FallbackCrcMemcpyEngine()}; + } +#endif // UNDEFINED_BEHAVIOR_SANITIZER +} + +// For testing, allow the user to specify which engine they want. +std::unique_ptr CrcMemcpy::GetTestEngine(int vector, + int integer) { + if (vector == 3 && integer == 0) { + return std::make_unique>(); + } else if (vector == 1 && integer == 2) { + return std::make_unique>(); + } + return nullptr; +} + +} // namespace crc_internal +ABSL_NAMESPACE_END +} // namespace absl + +#endif // __SSE4_2__ diff --git a/TMessagesProj/jni/voip/webrtc/absl/crc/internal/crc_non_temporal_memcpy.cc b/TMessagesProj/jni/voip/webrtc/absl/crc/internal/crc_non_temporal_memcpy.cc new file mode 100644 index 0000000000..adc867f6b7 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/absl/crc/internal/crc_non_temporal_memcpy.cc @@ -0,0 +1,93 @@ +// Copyright 2022 The Abseil Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include + +#include "absl/base/config.h" +#include "absl/crc/crc32c.h" +#include "absl/crc/internal/crc_memcpy.h" +#include "absl/crc/internal/non_temporal_memcpy.h" +#include "absl/strings/string_view.h" + +namespace absl { +ABSL_NAMESPACE_BEGIN +namespace crc_internal { + +crc32c_t CrcNonTemporalMemcpyEngine::Compute(void* __restrict dst, + const void* __restrict src, + std::size_t length, + crc32c_t initial_crc) const { + constexpr size_t kBlockSize = 8192; + crc32c_t crc = initial_crc; + + const char* src_bytes = reinterpret_cast(src); + char* dst_bytes = reinterpret_cast(dst); + + // Copy + CRC loop - run 8k chunks until we are out of full chunks. + std::size_t offset = 0; + for (; offset + kBlockSize < length; offset += kBlockSize) { + crc = absl::ExtendCrc32c(crc, + absl::string_view(src_bytes + offset, kBlockSize)); + non_temporal_store_memcpy(dst_bytes + offset, src_bytes + offset, + kBlockSize); + } + + // Save some work if length is 0. + if (offset < length) { + std::size_t final_copy_size = length - offset; + crc = ExtendCrc32c(crc, + absl::string_view(src_bytes + offset, final_copy_size)); + + non_temporal_store_memcpy(dst_bytes + offset, src_bytes + offset, + final_copy_size); + } + + return crc; +} + +crc32c_t CrcNonTemporalMemcpyAVXEngine::Compute(void* __restrict dst, + const void* __restrict src, + std::size_t length, + crc32c_t initial_crc) const { + constexpr size_t kBlockSize = 8192; + crc32c_t crc = initial_crc; + + const char* src_bytes = reinterpret_cast(src); + char* dst_bytes = reinterpret_cast(dst); + + // Copy + CRC loop - run 8k chunks until we are out of full chunks. + std::size_t offset = 0; + for (; offset + kBlockSize < length; offset += kBlockSize) { + crc = ExtendCrc32c(crc, absl::string_view(src_bytes + offset, kBlockSize)); + + non_temporal_store_memcpy_avx(dst_bytes + offset, src_bytes + offset, + kBlockSize); + } + + // Save some work if length is 0. + if (offset < length) { + std::size_t final_copy_size = length - offset; + crc = ExtendCrc32c(crc, + absl::string_view(src_bytes + offset, final_copy_size)); + + non_temporal_store_memcpy_avx(dst_bytes + offset, src_bytes + offset, + final_copy_size); + } + + return crc; +} + +} // namespace crc_internal +ABSL_NAMESPACE_END +} // namespace absl diff --git a/TMessagesProj/jni/voip/webrtc/absl/crc/internal/crc_x86_arm_combined.cc b/TMessagesProj/jni/voip/webrtc/absl/crc/internal/crc_x86_arm_combined.cc new file mode 100644 index 0000000000..06f9c69cd3 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/absl/crc/internal/crc_x86_arm_combined.cc @@ -0,0 +1,691 @@ +// Copyright 2022 The Abseil Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Hardware accelerated CRC32 computation on Intel and ARM architecture. + +#include + +#include + +#include "absl/base/attributes.h" +#include "absl/base/call_once.h" +#include "absl/base/dynamic_annotations.h" +#include "absl/base/internal/endian.h" +#include "absl/base/internal/prefetch.h" +#include "absl/crc/internal/cpu_detect.h" +#include "absl/crc/internal/crc.h" +#include "absl/crc/internal/crc32_x86_arm_combined_simd.h" +#include "absl/crc/internal/crc_internal.h" +#include "absl/memory/memory.h" +#include "absl/numeric/bits.h" + +#if defined(__aarch64__) && defined(__LITTLE_ENDIAN__) && \ + defined(__ARM_FEATURE_CRC32) && defined(__ARM_NEON) +#define ABSL_INTERNAL_CAN_USE_SIMD_CRC32C +#elif defined(__SSE4_2__) && defined(__PCLMUL__) +#define ABSL_INTERNAL_CAN_USE_SIMD_CRC32C +#endif + +namespace absl { +ABSL_NAMESPACE_BEGIN +namespace crc_internal { + +#if defined(ABSL_INTERNAL_CAN_USE_SIMD_CRC32C) + +// Implementation details not exported outside of file +namespace { + +// Some machines have CRC acceleration hardware. +// We can do a faster version of Extend() on such machines. +class CRC32AcceleratedX86ARMCombined : public CRC32 { + public: + CRC32AcceleratedX86ARMCombined() {} + ~CRC32AcceleratedX86ARMCombined() override {} + void ExtendByZeroes(uint32_t* crc, size_t length) const override; + uint32_t ComputeZeroConstant(size_t length) const; + + private: + CRC32AcceleratedX86ARMCombined(const CRC32AcceleratedX86ARMCombined&) = + delete; + CRC32AcceleratedX86ARMCombined& operator=( + const CRC32AcceleratedX86ARMCombined&) = delete; +}; + +// Constants for switching between algorithms. +// Chosen by comparing speed at different powers of 2. +constexpr int kSmallCutoff = 256; +constexpr int kMediumCutoff = 2048; + +#define ABSL_INTERNAL_STEP1(crc) \ + do { \ + crc = CRC32_u8(crc, *p++); \ + } while (0) +#define ABSL_INTERNAL_STEP2(crc) \ + do { \ + crc = CRC32_u16(crc, absl::little_endian::Load16(p)); \ + p += 2; \ + } while (0) +#define ABSL_INTERNAL_STEP4(crc) \ + do { \ + crc = CRC32_u32(crc, absl::little_endian::Load32(p)); \ + p += 4; \ + } while (0) +#define ABSL_INTERNAL_STEP8(crc, data) \ + do { \ + crc = CRC32_u64(crc, absl::little_endian::Load64(data)); \ + data += 8; \ + } while (0) +#define ABSL_INTERNAL_STEP8BY2(crc0, crc1, p0, p1) \ + do { \ + ABSL_INTERNAL_STEP8(crc0, p0); \ + ABSL_INTERNAL_STEP8(crc1, p1); \ + } while (0) +#define ABSL_INTERNAL_STEP8BY3(crc0, crc1, crc2, p0, p1, p2) \ + do { \ + ABSL_INTERNAL_STEP8(crc0, p0); \ + ABSL_INTERNAL_STEP8(crc1, p1); \ + ABSL_INTERNAL_STEP8(crc2, p2); \ + } while (0) + +uint32_t multiply(uint32_t a, uint32_t b) { + V128 shifts = V128_From2x64(0, 1); + V128 power = V128_From2x64(0, a); + V128 crc = V128_From2x64(0, b); + V128 res = V128_PMulLow(power, crc); + + // Combine crc values + res = V128_ShiftLeft64(res, shifts); + return V128_Extract32<1>(res) ^ CRC32_u32(0, V128_Low64(res)); +} + +namespace { + +// Powers of crc32c polynomial, for faster ExtendByZeros. +// Verified against folly: +// folly/hash/detail/Crc32CombineDetail.cpp +constexpr uint32_t kCRC32CPowers[] = { + 0x82f63b78, 0x6ea2d55c, 0x18b8ea18, 0x510ac59a, 0xb82be955, 0xb8fdb1e7, + 0x88e56f72, 0x74c360a4, 0xe4172b16, 0x0d65762a, 0x35d73a62, 0x28461564, + 0xbf455269, 0xe2ea32dc, 0xfe7740e6, 0xf946610b, 0x3c204f8f, 0x538586e3, + 0x59726915, 0x734d5309, 0xbc1ac763, 0x7d0722cc, 0xd289cabe, 0xe94ca9bc, + 0x05b74f3f, 0xa51e1f42, 0x40000000, 0x20000000, 0x08000000, 0x00800000, + 0x00008000, 0x82f63b78, 0x6ea2d55c, 0x18b8ea18, 0x510ac59a, 0xb82be955, + 0xb8fdb1e7, 0x88e56f72, 0x74c360a4, 0xe4172b16, 0x0d65762a, 0x35d73a62, + 0x28461564, 0xbf455269, 0xe2ea32dc, 0xfe7740e6, 0xf946610b, 0x3c204f8f, + 0x538586e3, 0x59726915, 0x734d5309, 0xbc1ac763, 0x7d0722cc, 0xd289cabe, + 0xe94ca9bc, 0x05b74f3f, 0xa51e1f42, 0x40000000, 0x20000000, 0x08000000, + 0x00800000, 0x00008000, +}; + +} // namespace + +// Compute a magic constant, so that multiplying by it is the same as +// extending crc by length zeros. +uint32_t CRC32AcceleratedX86ARMCombined::ComputeZeroConstant( + size_t length) const { + // Lowest 2 bits are handled separately in ExtendByZeroes + length >>= 2; + + int index = absl::countr_zero(length); + uint32_t prev = kCRC32CPowers[index]; + length &= length - 1; + + while (length) { + // For each bit of length, extend by 2**n zeros. + index = absl::countr_zero(length); + prev = multiply(prev, kCRC32CPowers[index]); + length &= length - 1; + } + return prev; +} + +void CRC32AcceleratedX86ARMCombined::ExtendByZeroes(uint32_t* crc, + size_t length) const { + uint32_t val = *crc; + // Don't bother with multiplication for small length. + switch (length & 3) { + case 0: + break; + case 1: + val = CRC32_u8(val, 0); + break; + case 2: + val = CRC32_u16(val, 0); + break; + case 3: + val = CRC32_u8(val, 0); + val = CRC32_u16(val, 0); + break; + } + if (length > 3) { + val = multiply(val, ComputeZeroConstant(length)); + } + *crc = val; +} + +// Taken from Intel paper "Fast CRC Computation for iSCSI Polynomial Using CRC32 +// Instruction" +// https://www.intel.com/content/dam/www/public/us/en/documents/white-papers/crc-iscsi-polynomial-crc32-instruction-paper.pdf +// We only need every 4th value, because we unroll loop by 4. +constexpr uint64_t kClmulConstants[] = { + 0x09e4addf8, 0x0ba4fc28e, 0x00d3b6092, 0x09e4addf8, 0x0ab7aff2a, + 0x102f9b8a2, 0x0b9e02b86, 0x00d3b6092, 0x1bf2e8b8a, 0x18266e456, + 0x0d270f1a2, 0x0ab7aff2a, 0x11eef4f8e, 0x083348832, 0x0dd7e3b0c, + 0x0b9e02b86, 0x0271d9844, 0x1b331e26a, 0x06b749fb2, 0x1bf2e8b8a, + 0x0e6fc4e6a, 0x0ce7f39f4, 0x0d7a4825c, 0x0d270f1a2, 0x026f6a60a, + 0x12ed0daac, 0x068bce87a, 0x11eef4f8e, 0x1329d9f7e, 0x0b3e32c28, + 0x0170076fa, 0x0dd7e3b0c, 0x1fae1cc66, 0x010746f3c, 0x086d8e4d2, + 0x0271d9844, 0x0b3af077a, 0x093a5f730, 0x1d88abd4a, 0x06b749fb2, + 0x0c9c8b782, 0x0cec3662e, 0x1ddffc5d4, 0x0e6fc4e6a, 0x168763fa6, + 0x0b0cd4768, 0x19b1afbc4, 0x0d7a4825c, 0x123888b7a, 0x00167d312, + 0x133d7a042, 0x026f6a60a, 0x000bcf5f6, 0x19d34af3a, 0x1af900c24, + 0x068bce87a, 0x06d390dec, 0x16cba8aca, 0x1f16a3418, 0x1329d9f7e, + 0x19fb2a8b0, 0x02178513a, 0x1a0f717c4, 0x0170076fa, +}; + +enum class CutoffStrategy { + // Use 3 CRC streams to fold into 1. + Fold3, + // Unroll CRC instructions for 64 bytes. + Unroll64CRC, +}; + +template +class CRC32AcceleratedX86ARMCombinedMultipleStreams + : public CRC32AcceleratedX86ARMCombined { + ABSL_ATTRIBUTE_HOT + void Extend(uint32_t* crc, const void* bytes, size_t length) const override { + static_assert(num_crc_streams >= 1 && num_crc_streams <= kMaxStreams, + "Invalid number of crc streams"); + static_assert(num_pclmul_streams >= 0 && num_pclmul_streams <= kMaxStreams, + "Invalid number of pclmul streams"); + const uint8_t* p = static_cast(bytes); + const uint8_t* e = p + length; + uint32_t l = *crc; + uint64_t l64; + + // We have dedicated instruction for 1,2,4 and 8 bytes. + if (length & 8) { + ABSL_INTERNAL_STEP8(l, p); + length &= ~8LL; + } + if (length & 4) { + ABSL_INTERNAL_STEP4(l); + length &= ~4LL; + } + if (length & 2) { + ABSL_INTERNAL_STEP2(l); + length &= ~2LL; + } + if (length & 1) { + ABSL_INTERNAL_STEP1(l); + length &= ~1LL; + } + if (length == 0) { + *crc = l; + return; + } + // length is now multiple of 16. + + // For small blocks just run simple loop, because cost of combining multiple + // streams is significant. + if (strategy != CutoffStrategy::Unroll64CRC) { + if (length < kSmallCutoff) { + while (length >= 16) { + ABSL_INTERNAL_STEP8(l, p); + ABSL_INTERNAL_STEP8(l, p); + length -= 16; + } + *crc = l; + return; + } + } + + // For medium blocks we run 3 crc streams and combine them as described in + // Intel paper above. Running 4th stream doesn't help, because crc + // instruction has latency 3 and throughput 1. + if (length < kMediumCutoff) { + l64 = l; + if (strategy == CutoffStrategy::Fold3) { + uint64_t l641 = 0; + uint64_t l642 = 0; + const int blockSize = 32; + int64_t bs = (e - p) / kGroupsSmall / blockSize; + const uint8_t* p1 = p + bs * blockSize; + const uint8_t* p2 = p1 + bs * blockSize; + + for (int64_t i = 0; i < bs - 1; ++i) { + ABSL_INTERNAL_STEP8BY3(l64, l641, l642, p, p1, p2); + ABSL_INTERNAL_STEP8BY3(l64, l641, l642, p, p1, p2); + ABSL_INTERNAL_STEP8BY3(l64, l641, l642, p, p1, p2); + ABSL_INTERNAL_STEP8BY3(l64, l641, l642, p, p1, p2); + } + // Don't run crc on last 8 bytes. + ABSL_INTERNAL_STEP8BY3(l64, l641, l642, p, p1, p2); + ABSL_INTERNAL_STEP8BY3(l64, l641, l642, p, p1, p2); + ABSL_INTERNAL_STEP8BY3(l64, l641, l642, p, p1, p2); + ABSL_INTERNAL_STEP8BY2(l64, l641, p, p1); + + V128 magic = *(reinterpret_cast(kClmulConstants) + bs - 1); + + V128 tmp = V128_From2x64(0, l64); + + V128 res1 = V128_PMulLow(tmp, magic); + + tmp = V128_From2x64(0, l641); + + V128 res2 = V128_PMul10(tmp, magic); + V128 x = V128_Xor(res1, res2); + l64 = V128_Low64(x) ^ absl::little_endian::Load64(p2); + l64 = CRC32_u64(l642, l64); + + p = p2 + 8; + } else if (strategy == CutoffStrategy::Unroll64CRC) { + while ((e - p) >= 64) { + l64 = Process64BytesCRC(p, l64); + p += 64; + } + } + } else { + // There is a lot of data, we can ignore combine costs and run all + // requested streams (num_crc_streams + num_pclmul_streams), + // using prefetch. CRC and PCLMULQDQ use different cpu execution units, + // so on some cpus it makes sense to execute both of them for different + // streams. + + // Point x at first 8-byte aligned byte in string. + const uint8_t* x = RoundUp<8>(p); + // Process bytes until p is 8-byte aligned, if that isn't past the end. + while (p != x) { + ABSL_INTERNAL_STEP1(l); + } + + int64_t bs = (e - p) / (num_crc_streams + num_pclmul_streams) / 64; + const uint8_t* crc_streams[kMaxStreams]; + const uint8_t* pclmul_streams[kMaxStreams]; + // We are guaranteed to have at least one crc stream. + crc_streams[0] = p; + for (int i = 1; i < num_crc_streams; i++) { + crc_streams[i] = crc_streams[i - 1] + bs * 64; + } + pclmul_streams[0] = crc_streams[num_crc_streams - 1] + bs * 64; + for (int i = 1; i < num_pclmul_streams; i++) { + pclmul_streams[i] = pclmul_streams[i - 1] + bs * 64; + } + + // Per stream crc sums. + uint64_t l64_crc[kMaxStreams] = {l}; + uint64_t l64_pclmul[kMaxStreams] = {0}; + + // Peel first iteration, because PCLMULQDQ stream, needs setup. + for (int i = 0; i < num_crc_streams; i++) { + l64_crc[i] = Process64BytesCRC(crc_streams[i], l64_crc[i]); + crc_streams[i] += 16 * 4; + } + + V128 partialCRC[kMaxStreams][4]; + for (int i = 0; i < num_pclmul_streams; i++) { + partialCRC[i][0] = V128_LoadU( + reinterpret_cast(pclmul_streams[i] + 16 * 0)); + partialCRC[i][1] = V128_LoadU( + reinterpret_cast(pclmul_streams[i] + 16 * 1)); + partialCRC[i][2] = V128_LoadU( + reinterpret_cast(pclmul_streams[i] + 16 * 2)); + partialCRC[i][3] = V128_LoadU( + reinterpret_cast(pclmul_streams[i] + 16 * 3)); + pclmul_streams[i] += 16 * 4; + } + + for (int64_t i = 1; i < bs; i++) { + // Prefetch data for next itterations. + for (int j = 0; j < num_crc_streams; j++) { + base_internal::PrefetchT0( + reinterpret_cast(crc_streams[j] + kPrefetchHorizon)); + } + for (int j = 0; j < num_pclmul_streams; j++) { + base_internal::PrefetchT0(reinterpret_cast( + pclmul_streams[j] + kPrefetchHorizon)); + } + + // We process each stream in 64 byte blocks. This can be written as + // for (int i = 0; i < num_pclmul_streams; i++) { + // Process64BytesPclmul(pclmul_streams[i], partialCRC[i]); + // pclmul_streams[i] += 16 * 4; + // } + // for (int i = 0; i < num_crc_streams; i++) { + // l64_crc[i] = Process64BytesCRC(crc_streams[i], l64_crc[i]); + // crc_streams[i] += 16*4; + // } + // But unrolling and interleaving PCLMULQDQ and CRC blocks manually + // gives ~2% performance boost. + l64_crc[0] = Process64BytesCRC(crc_streams[0], l64_crc[0]); + crc_streams[0] += 16 * 4; + if (num_pclmul_streams > 0) { + Process64BytesPclmul(pclmul_streams[0], partialCRC[0]); + pclmul_streams[0] += 16 * 4; + } + if (num_crc_streams > 1) { + l64_crc[1] = Process64BytesCRC(crc_streams[1], l64_crc[1]); + crc_streams[1] += 16 * 4; + } + if (num_pclmul_streams > 1) { + Process64BytesPclmul(pclmul_streams[1], partialCRC[1]); + pclmul_streams[1] += 16 * 4; + } + if (num_crc_streams > 2) { + l64_crc[2] = Process64BytesCRC(crc_streams[2], l64_crc[2]); + crc_streams[2] += 16 * 4; + } + if (num_pclmul_streams > 2) { + Process64BytesPclmul(pclmul_streams[2], partialCRC[2]); + pclmul_streams[2] += 16 * 4; + } + } + + // PCLMULQDQ based streams require special final step; + // CRC based don't. + for (int i = 0; i < num_pclmul_streams; i++) { + l64_pclmul[i] = FinalizePclmulStream(partialCRC[i]); + } + + // Combine all streams into single result. + uint32_t magic = ComputeZeroConstant(bs * 64); + l64 = l64_crc[0]; + for (int i = 1; i < num_crc_streams; i++) { + l64 = multiply(l64, magic); + l64 ^= l64_crc[i]; + } + for (int i = 0; i < num_pclmul_streams; i++) { + l64 = multiply(l64, magic); + l64 ^= l64_pclmul[i]; + } + + // Update p. + if (num_pclmul_streams > 0) { + p = pclmul_streams[num_pclmul_streams - 1]; + } else { + p = crc_streams[num_crc_streams - 1]; + } + } + l = l64; + + while ((e - p) >= 16) { + ABSL_INTERNAL_STEP8(l, p); + ABSL_INTERNAL_STEP8(l, p); + } + // Process the last few bytes + while (p != e) { + ABSL_INTERNAL_STEP1(l); + } + +#undef ABSL_INTERNAL_STEP8BY3 +#undef ABSL_INTERNAL_STEP8BY2 +#undef ABSL_INTERNAL_STEP8 +#undef ABSL_INTERNAL_STEP4 +#undef ABSL_INTERNAL_STEP2 +#undef ABSL_INTERNAL_STEP1 + + *crc = l; + } + + private: + // Update partialCRC with crc of 64 byte block. Calling FinalizePclmulStream + // would produce a single crc checksum, but it is expensive. PCLMULQDQ has a + // high latency, so we run 4 128-bit partial checksums that can be reduced to + // a single value by FinalizePclmulStream later. Computing crc for arbitrary + // polynomialas with PCLMULQDQ is described in Intel paper "Fast CRC + // Computation for Generic Polynomials Using PCLMULQDQ Instruction" + // https://www.intel.com/content/dam/www/public/us/en/documents/white-papers/fast-crc-computation-generic-polynomials-pclmulqdq-paper.pdf + // We are applying it to CRC32C polynomial. + ABSL_ATTRIBUTE_ALWAYS_INLINE void Process64BytesPclmul( + const uint8_t* p, V128* partialCRC) const { + V128 loopMultiplicands = V128_Load(reinterpret_cast(k1k2)); + + V128 partialCRC1 = partialCRC[0]; + V128 partialCRC2 = partialCRC[1]; + V128 partialCRC3 = partialCRC[2]; + V128 partialCRC4 = partialCRC[3]; + + V128 tmp1 = V128_PMulHi(partialCRC1, loopMultiplicands); + V128 tmp2 = V128_PMulHi(partialCRC2, loopMultiplicands); + V128 tmp3 = V128_PMulHi(partialCRC3, loopMultiplicands); + V128 tmp4 = V128_PMulHi(partialCRC4, loopMultiplicands); + V128 data1 = V128_LoadU(reinterpret_cast(p + 16 * 0)); + V128 data2 = V128_LoadU(reinterpret_cast(p + 16 * 1)); + V128 data3 = V128_LoadU(reinterpret_cast(p + 16 * 2)); + V128 data4 = V128_LoadU(reinterpret_cast(p + 16 * 3)); + partialCRC1 = V128_PMulLow(partialCRC1, loopMultiplicands); + partialCRC2 = V128_PMulLow(partialCRC2, loopMultiplicands); + partialCRC3 = V128_PMulLow(partialCRC3, loopMultiplicands); + partialCRC4 = V128_PMulLow(partialCRC4, loopMultiplicands); + partialCRC1 = V128_Xor(tmp1, partialCRC1); + partialCRC2 = V128_Xor(tmp2, partialCRC2); + partialCRC3 = V128_Xor(tmp3, partialCRC3); + partialCRC4 = V128_Xor(tmp4, partialCRC4); + partialCRC1 = V128_Xor(partialCRC1, data1); + partialCRC2 = V128_Xor(partialCRC2, data2); + partialCRC3 = V128_Xor(partialCRC3, data3); + partialCRC4 = V128_Xor(partialCRC4, data4); + partialCRC[0] = partialCRC1; + partialCRC[1] = partialCRC2; + partialCRC[2] = partialCRC3; + partialCRC[3] = partialCRC4; + } + + // Reduce partialCRC produced by Process64BytesPclmul into a single value, + // that represents crc checksum of all the processed bytes. + ABSL_ATTRIBUTE_ALWAYS_INLINE uint64_t + FinalizePclmulStream(V128* partialCRC) const { + V128 partialCRC1 = partialCRC[0]; + V128 partialCRC2 = partialCRC[1]; + V128 partialCRC3 = partialCRC[2]; + V128 partialCRC4 = partialCRC[3]; + + // Combine 4 vectors of partial crc into a single vector. + V128 reductionMultiplicands = + V128_Load(reinterpret_cast(k5k6)); + + V128 low = V128_PMulLow(reductionMultiplicands, partialCRC1); + V128 high = V128_PMulHi(reductionMultiplicands, partialCRC1); + + partialCRC1 = V128_Xor(low, high); + partialCRC1 = V128_Xor(partialCRC1, partialCRC2); + + low = V128_PMulLow(reductionMultiplicands, partialCRC3); + high = V128_PMulHi(reductionMultiplicands, partialCRC3); + + partialCRC3 = V128_Xor(low, high); + partialCRC3 = V128_Xor(partialCRC3, partialCRC4); + + reductionMultiplicands = V128_Load(reinterpret_cast(k3k4)); + + low = V128_PMulLow(reductionMultiplicands, partialCRC1); + high = V128_PMulHi(reductionMultiplicands, partialCRC1); + V128 fullCRC = V128_Xor(low, high); + fullCRC = V128_Xor(fullCRC, partialCRC3); + + // Reduce fullCRC into scalar value. + reductionMultiplicands = V128_Load(reinterpret_cast(k5k6)); + + V128 mask = V128_Load(reinterpret_cast(kMask)); + + V128 tmp = V128_PMul01(reductionMultiplicands, fullCRC); + fullCRC = V128_ShiftRight<8>(fullCRC); + fullCRC = V128_Xor(fullCRC, tmp); + + reductionMultiplicands = V128_Load(reinterpret_cast(k7k0)); + + tmp = V128_ShiftRight<4>(fullCRC); + fullCRC = V128_And(fullCRC, mask); + fullCRC = V128_PMulLow(reductionMultiplicands, fullCRC); + fullCRC = V128_Xor(tmp, fullCRC); + + reductionMultiplicands = V128_Load(reinterpret_cast(kPoly)); + + tmp = V128_And(fullCRC, mask); + tmp = V128_PMul01(reductionMultiplicands, tmp); + tmp = V128_And(tmp, mask); + tmp = V128_PMulLow(reductionMultiplicands, tmp); + + fullCRC = V128_Xor(tmp, fullCRC); + + return V128_Extract32<1>(fullCRC); + } + + // Update crc with 64 bytes of data from p. + ABSL_ATTRIBUTE_ALWAYS_INLINE uint64_t Process64BytesCRC(const uint8_t* p, + uint64_t crc) const { + for (int i = 0; i < 8; i++) { + crc = CRC32_u64(crc, absl::little_endian::Load64(p)); + p += 8; + } + return crc; + } + + // Generated by crc32c_x86_test --crc32c_generate_constants=true + // and verified against constants in linux kernel for S390: + // https://github.com/torvalds/linux/blob/master/arch/s390/crypto/crc32le-vx.S + alignas(16) static constexpr uint64_t k1k2[2] = {0x0740eef02, 0x09e4addf8}; + alignas(16) static constexpr uint64_t k3k4[2] = {0x1384aa63a, 0x0ba4fc28e}; + alignas(16) static constexpr uint64_t k5k6[2] = {0x0f20c0dfe, 0x14cd00bd6}; + alignas(16) static constexpr uint64_t k7k0[2] = {0x0dd45aab8, 0x000000000}; + alignas(16) static constexpr uint64_t kPoly[2] = {0x105ec76f0, 0x0dea713f1}; + alignas(16) static constexpr uint32_t kMask[4] = {~0u, 0u, ~0u, 0u}; + + // Medium runs of bytes are broken into groups of kGroupsSmall blocks of same + // size. Each group is CRCed in parallel then combined at the end of the + // block. + static constexpr int kGroupsSmall = 3; + // For large runs we use up to kMaxStreams blocks computed with CRC + // instruction, and up to kMaxStreams blocks computed with PCLMULQDQ, which + // are combined in the end. + static constexpr int kMaxStreams = 3; +}; + +} // namespace + +// Intel processors with SSE4.2 have an instruction for one particular +// 32-bit CRC polynomial: crc32c +CRCImpl* TryNewCRC32AcceleratedX86ARMCombined() { + CpuType type = GetCpuType(); + switch (type) { + case CpuType::kIntelHaswell: + case CpuType::kAmdRome: + case CpuType::kAmdNaples: + case CpuType::kAmdMilan: + return new CRC32AcceleratedX86ARMCombinedMultipleStreams< + 3, 1, CutoffStrategy::Fold3>(); + // PCLMULQDQ is fast, use combined PCLMULQDQ + CRC implementation. + case CpuType::kIntelCascadelakeXeon: + case CpuType::kIntelSkylakeXeon: + case CpuType::kIntelBroadwell: + case CpuType::kIntelSkylake: + return new CRC32AcceleratedX86ARMCombinedMultipleStreams< + 3, 2, CutoffStrategy::Fold3>(); + // PCLMULQDQ is slow, don't use it. + case CpuType::kIntelIvybridge: + case CpuType::kIntelSandybridge: + case CpuType::kIntelWestmere: + return new CRC32AcceleratedX86ARMCombinedMultipleStreams< + 3, 0, CutoffStrategy::Fold3>(); + case CpuType::kArmNeoverseN1: + return new CRC32AcceleratedX86ARMCombinedMultipleStreams< + 1, 1, CutoffStrategy::Unroll64CRC>(); +#if defined(__aarch64__) + default: + // Not all ARM processors support the needed instructions, so check here + // before trying to use an accelerated implementation. + if (SupportsArmCRC32PMULL()) { + return new CRC32AcceleratedX86ARMCombinedMultipleStreams< + 1, 1, CutoffStrategy::Unroll64CRC>(); + } else { + return nullptr; + } +#else + default: + // Something else, play it safe and assume slow PCLMULQDQ. + return new CRC32AcceleratedX86ARMCombinedMultipleStreams< + 3, 0, CutoffStrategy::Fold3>(); +#endif + } +} + +std::vector> NewCRC32AcceleratedX86ARMCombinedAll() { + auto ret = std::vector>(); + ret.push_back(absl::make_unique>()); + ret.push_back(absl::make_unique>()); + ret.push_back(absl::make_unique>()); + ret.push_back(absl::make_unique>()); + ret.push_back(absl::make_unique>()); + ret.push_back(absl::make_unique>()); + ret.push_back(absl::make_unique>()); + ret.push_back(absl::make_unique>()); + ret.push_back(absl::make_unique>()); + ret.push_back(absl::make_unique>()); + ret.push_back(absl::make_unique>()); + ret.push_back(absl::make_unique>()); + ret.push_back(absl::make_unique>()); + ret.push_back(absl::make_unique>()); + ret.push_back(absl::make_unique>()); + ret.push_back(absl::make_unique>()); + ret.push_back(absl::make_unique>()); + ret.push_back(absl::make_unique>()); + ret.push_back(absl::make_unique>()); + ret.push_back(absl::make_unique>()); + ret.push_back(absl::make_unique>()); + ret.push_back(absl::make_unique>()); + ret.push_back(absl::make_unique>()); + ret.push_back(absl::make_unique>()); + + return ret; +} + +#else // !ABSL_INTERNAL_CAN_USE_SIMD_CRC32C + +std::vector> NewCRC32AcceleratedX86ARMCombinedAll() { + return std::vector>(); +} + +// no hardware acceleration available +CRCImpl* TryNewCRC32AcceleratedX86ARMCombined() { return nullptr; } + +#endif + +} // namespace crc_internal +ABSL_NAMESPACE_END +} // namespace absl diff --git a/TMessagesProj/jni/voip/webrtc/absl/crc/internal/non_temporal_arm_intrinsics.h b/TMessagesProj/jni/voip/webrtc/absl/crc/internal/non_temporal_arm_intrinsics.h new file mode 100644 index 0000000000..92632a3341 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/absl/crc/internal/non_temporal_arm_intrinsics.h @@ -0,0 +1,77 @@ +// Copyright 2022 The Abseil Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef ABSL_CRC_INTERNAL_NON_TEMPORAL_ARM_INTRINSICS_H_ +#define ABSL_CRC_INTERNAL_NON_TEMPORAL_ARM_INTRINSICS_H_ + +#ifdef __aarch64__ +#include + +typedef int64x2_t __m128i; /* 128-bit vector containing integers */ +#define vreinterpretq_m128i_s32(x) vreinterpretq_s64_s32(x) +#define vreinterpretq_s64_m128i(x) (x) + +// Guarantees that every preceding store is globally visible before any +// subsequent store. +// https://msdn.microsoft.com/en-us/library/5h2w73d1%28v=vs.90%29.aspx +static inline __attribute__((always_inline)) void _mm_sfence(void) { + __sync_synchronize(); +} + +// Load 128-bits of integer data from unaligned memory into dst. This intrinsic +// may perform better than _mm_loadu_si128 when the data crosses a cache line +// boundary. +// +// dst[127:0] := MEM[mem_addr+127:mem_addr] +// +// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_lddqu_si128 +#define _mm_lddqu_si128 _mm_loadu_si128 + +// Loads 128-bit value. : +// https://msdn.microsoft.com/zh-cn/library/f4k12ae8(v=vs.90).aspx +static inline __attribute__((always_inline)) __m128i _mm_loadu_si128( + const __m128i *p) { + return vreinterpretq_m128i_s32(vld1q_s32((const int32_t *)p)); +} + +// Stores the data in a to the address p without polluting the caches. If the +// cache line containing address p is already in the cache, the cache will be +// updated. +// https://msdn.microsoft.com/en-us/library/ba08y07y%28v=vs.90%29.aspx +static inline __attribute__((always_inline)) void _mm_stream_si128(__m128i *p, + __m128i a) { +#if __has_builtin(__builtin_nontemporal_store) + __builtin_nontemporal_store(a, p); +#else + vst1q_s64((int64_t *)p, vreinterpretq_s64_m128i(a)); +#endif +} + +// Sets the 16 signed 8-bit integer values. +// https://msdn.microsoft.com/en-us/library/x0cx8zd3(v=vs.90).aspx +static inline __attribute__((always_inline)) __m128i _mm_set_epi8( + signed char b15, signed char b14, signed char b13, signed char b12, + signed char b11, signed char b10, signed char b9, signed char b8, + signed char b7, signed char b6, signed char b5, signed char b4, + signed char b3, signed char b2, signed char b1, signed char b0) { + int8_t __attribute__((aligned(16))) + data[16] = {(int8_t)b0, (int8_t)b1, (int8_t)b2, (int8_t)b3, + (int8_t)b4, (int8_t)b5, (int8_t)b6, (int8_t)b7, + (int8_t)b8, (int8_t)b9, (int8_t)b10, (int8_t)b11, + (int8_t)b12, (int8_t)b13, (int8_t)b14, (int8_t)b15}; + return (__m128i)vld1q_s8(data); +} +#endif // __aarch64__ + +#endif // ABSL_CRC_INTERNAL_NON_TEMPORAL_ARM_INTRINSICS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/crc/internal/non_temporal_memcpy.h b/TMessagesProj/jni/voip/webrtc/absl/crc/internal/non_temporal_memcpy.h new file mode 100644 index 0000000000..0c6d7655bb --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/absl/crc/internal/non_temporal_memcpy.h @@ -0,0 +1,172 @@ +// Copyright 2022 The Abseil Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef ABSL_CRC_INTERNAL_NON_TEMPORAL_MEMCPY_H_ +#define ABSL_CRC_INTERNAL_NON_TEMPORAL_MEMCPY_H_ + +#include +#include +#include +#include + +#include "absl/base/config.h" +#include "absl/base/optimization.h" + +#ifdef __SSE__ +// Only include if we're running on a CPU that supports SSE ISA, needed for +// sfence +#include // IWYU pragma: keep +#endif +#ifdef __SSE2__ +// Only include if we're running on a CPU that supports SSE2 ISA, needed for +// movdqa, movdqu, movntdq +#include // IWYU pragma: keep +#endif +#ifdef __aarch64__ +// Only include if we're running on a CPU that supports ARM NEON ISA, needed for +// sfence, movdqa, movdqu, movntdq +#include "absl/crc/internal/non_temporal_arm_intrinsics.h" +#endif + +namespace absl { +ABSL_NAMESPACE_BEGIN +namespace crc_internal { +// This non-temporal memcpy does regular load and non-temporal store memory +// copy. It is compatible to both 16-byte aligned and unaligned addresses. If +// data at the destination is not immediately accessed, using non-temporal +// memcpy can save 1 DRAM load of the destination cacheline. + +constexpr int kCacheLineSize = ABSL_CACHELINE_SIZE; + +// If the objects overlap, the behavior is undefined. +// MSVC does not have proper header support for some of these intrinsics, +// so it should go to fallback +inline void *non_temporal_store_memcpy(void *__restrict dst, + const void *__restrict src, size_t len) { +#if (defined(__SSE3__) || defined(__aarch64__)) && !defined(_MSC_VER) + uint8_t *d = reinterpret_cast(dst); + const uint8_t *s = reinterpret_cast(src); + + // memcpy() the misaligned header. At the end of this if block, is + // aligned to a 64-byte cacheline boundary or == 0. + if (reinterpret_cast(d) & (kCacheLineSize - 1)) { + uintptr_t bytes_before_alignment_boundary = + kCacheLineSize - + (reinterpret_cast(d) & (kCacheLineSize - 1)); + int header_len = (std::min)(bytes_before_alignment_boundary, len); + assert(bytes_before_alignment_boundary < kCacheLineSize); + memcpy(d, s, header_len); + d += header_len; + s += header_len; + len -= header_len; + } + + if (len >= kCacheLineSize) { + _mm_sfence(); + __m128i *dst_cacheline = reinterpret_cast<__m128i *>(d); + const __m128i *src_cacheline = reinterpret_cast(s); + constexpr int kOpsPerCacheLine = kCacheLineSize / sizeof(__m128i); + uint64_t loops = len / kCacheLineSize; + + while (len >= kCacheLineSize) { + __m128i temp1, temp2, temp3, temp4; + temp1 = _mm_lddqu_si128(src_cacheline + 0); + temp2 = _mm_lddqu_si128(src_cacheline + 1); + temp3 = _mm_lddqu_si128(src_cacheline + 2); + temp4 = _mm_lddqu_si128(src_cacheline + 3); + _mm_stream_si128(dst_cacheline + 0, temp1); + _mm_stream_si128(dst_cacheline + 1, temp2); + _mm_stream_si128(dst_cacheline + 2, temp3); + _mm_stream_si128(dst_cacheline + 3, temp4); + src_cacheline += kOpsPerCacheLine; + dst_cacheline += kOpsPerCacheLine; + len -= kCacheLineSize; + } + d += loops * kCacheLineSize; + s += loops * kCacheLineSize; + _mm_sfence(); + } + + // memcpy the tail. + if (len) { + memcpy(d, s, len); + } + return dst; +#else + // Fallback to regular memcpy when SSE2/3 & aarch64 is not available. + return memcpy(dst, src, len); +#endif // __SSE3__ || __aarch64__ +} + +// MSVC does not have proper header support for some of these intrinsics, +// so it should go to fallback +inline void *non_temporal_store_memcpy_avx(void *__restrict dst, + const void *__restrict src, + size_t len) { +#if defined(__AVX__) && !defined(_MSC_VER) + uint8_t *d = reinterpret_cast(dst); + const uint8_t *s = reinterpret_cast(src); + + // memcpy() the misaligned header. At the end of this if block, is + // aligned to a 64-byte cacheline boundary or == 0. + if (reinterpret_cast(d) & (kCacheLineSize - 1)) { + uintptr_t bytes_before_alignment_boundary = + kCacheLineSize - + (reinterpret_cast(d) & (kCacheLineSize - 1)); + int header_len = (std::min)(bytes_before_alignment_boundary, len); + assert(bytes_before_alignment_boundary < kCacheLineSize); + memcpy(d, s, header_len); + d += header_len; + s += header_len; + len -= header_len; + } + + if (len >= kCacheLineSize) { + _mm_sfence(); + __m256i *dst_cacheline = reinterpret_cast<__m256i *>(d); + const __m256i *src_cacheline = reinterpret_cast(s); + constexpr int kOpsPerCacheLine = kCacheLineSize / sizeof(__m256i); + int loops = len / kCacheLineSize; + + while (len >= kCacheLineSize) { + __m256i temp1, temp2; + temp1 = _mm256_lddqu_si256(src_cacheline + 0); + temp2 = _mm256_lddqu_si256(src_cacheline + 1); + _mm256_stream_si256(dst_cacheline + 0, temp1); + _mm256_stream_si256(dst_cacheline + 1, temp2); + src_cacheline += kOpsPerCacheLine; + dst_cacheline += kOpsPerCacheLine; + len -= kCacheLineSize; + } + d += loops * kCacheLineSize; + s += loops * kCacheLineSize; + _mm_sfence(); + } + + // memcpy the tail. + if (len) { + memcpy(d, s, len); + } + return dst; +#else + // Fallback to regular memcpy when AVX is not available. + return memcpy(dst, src, len); +#endif // __AVX__ +} + +} // namespace crc_internal +ABSL_NAMESPACE_END +} // namespace absl + +#endif // ABSL_CRC_INTERNAL_NON_TEMPORAL_MEMCPY_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/crc/internal/non_temporal_memcpy_test.cc b/TMessagesProj/jni/voip/webrtc/absl/crc/internal/non_temporal_memcpy_test.cc new file mode 100644 index 0000000000..eb07a559a5 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/absl/crc/internal/non_temporal_memcpy_test.cc @@ -0,0 +1,88 @@ +// Copyright 2022 The Abseil Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "absl/crc/internal/non_temporal_memcpy.h" + +#include +#include +#include +#include + +#include "gtest/gtest.h" + +namespace { + +struct TestParam { + size_t copy_size; + uint32_t src_offset; + uint32_t dst_offset; +}; + +class NonTemporalMemcpyTest : public testing::TestWithParam { + protected: + void SetUp() override { + // Make buf_size multiple of 16 bytes. + size_t buf_size = ((std::max(GetParam().src_offset, GetParam().dst_offset) + + GetParam().copy_size) + + 15) / + 16 * 16; + a_.resize(buf_size); + b_.resize(buf_size); + for (size_t i = 0; i < buf_size; i++) { + a_[i] = static_cast(i % 256); + b_[i] = ~a_[i]; + } + } + + std::vector a_, b_; +}; + +TEST_P(NonTemporalMemcpyTest, SSEEquality) { + uint8_t *src = a_.data() + GetParam().src_offset; + uint8_t *dst = b_.data() + GetParam().dst_offset; + absl::crc_internal::non_temporal_store_memcpy(dst, src, GetParam().copy_size); + for (size_t i = 0; i < GetParam().copy_size; i++) { + EXPECT_EQ(src[i], dst[i]); + } +} + +TEST_P(NonTemporalMemcpyTest, AVXEquality) { + uint8_t* src = a_.data() + GetParam().src_offset; + uint8_t* dst = b_.data() + GetParam().dst_offset; + + absl::crc_internal::non_temporal_store_memcpy_avx(dst, src, + GetParam().copy_size); + for (size_t i = 0; i < GetParam().copy_size; i++) { + EXPECT_EQ(src[i], dst[i]); + } +} + +// 63B is smaller than one cacheline operation thus the non-temporal routine +// will not be called. +// 4352B is sufficient for testing 4092B data copy with room for offsets. +constexpr TestParam params[] = { + {63, 0, 0}, {58, 5, 5}, {61, 2, 0}, {61, 0, 2}, + {58, 5, 2}, {4096, 0, 0}, {4096, 0, 1}, {4096, 0, 2}, + {4096, 0, 3}, {4096, 0, 4}, {4096, 0, 5}, {4096, 0, 6}, + {4096, 0, 7}, {4096, 0, 8}, {4096, 0, 9}, {4096, 0, 10}, + {4096, 0, 11}, {4096, 0, 12}, {4096, 0, 13}, {4096, 0, 14}, + {4096, 0, 15}, {4096, 7, 7}, {4096, 3, 0}, {4096, 1, 0}, + {4096, 9, 3}, {4096, 9, 11}, {8192, 0, 0}, {8192, 5, 2}, + {1024768, 7, 11}, {1, 0, 0}, {1, 0, 1}, {1, 1, 0}, + {1, 1, 1}}; + +INSTANTIATE_TEST_SUITE_P(ParameterizedNonTemporalMemcpyTest, + NonTemporalMemcpyTest, testing::ValuesIn(params)); + +} // namespace diff --git a/TMessagesProj/jni/voip/webrtc/absl/debugging/failure_signal_handler.cc b/TMessagesProj/jni/voip/webrtc/absl/debugging/failure_signal_handler.cc index 689e5979e7..ef8ab9e5a8 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/debugging/failure_signal_handler.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/debugging/failure_signal_handler.cc @@ -42,7 +42,6 @@ #include #include "absl/base/attributes.h" -#include "absl/base/internal/errno_saver.h" #include "absl/base/internal/raw_logging.h" #include "absl/base/internal/sysinfo.h" #include "absl/debugging/internal/examine_stack.h" @@ -51,8 +50,10 @@ #ifndef _WIN32 #define ABSL_HAVE_SIGACTION // Apple WatchOS and TVOS don't allow sigaltstack -#if !(defined(TARGET_OS_WATCH) && TARGET_OS_WATCH) && \ - !(defined(TARGET_OS_TV) && TARGET_OS_TV) +// Apple macOS has sigaltstack, but using it makes backtrace() unusable. +#if !(defined(TARGET_OS_OSX) && TARGET_OS_OSX) && \ + !(defined(TARGET_OS_WATCH) && TARGET_OS_WATCH) && \ + !(defined(TARGET_OS_TV) && TARGET_OS_TV) && !defined(__QNX__) #define ABSL_HAVE_SIGALTSTACK #endif #endif @@ -134,10 +135,11 @@ static bool SetupAlternateStackOnce() { #if defined(__wasm__) || defined (__asjms__) const size_t page_mask = getpagesize() - 1; #else - const size_t page_mask = sysconf(_SC_PAGESIZE) - 1; + const size_t page_mask = static_cast(sysconf(_SC_PAGESIZE)) - 1; #endif size_t stack_size = - (std::max(SIGSTKSZ, 65536) + page_mask) & ~page_mask; + (std::max(static_cast(SIGSTKSZ), size_t{65536}) + page_mask) & + ~page_mask; #if defined(ABSL_HAVE_ADDRESS_SANITIZER) || \ defined(ABSL_HAVE_MEMORY_SANITIZER) || defined(ABSL_HAVE_THREAD_SANITIZER) // Account for sanitizer instrumentation requiring additional stack space. @@ -217,8 +219,7 @@ static void InstallOneFailureHandler(FailureSignalData* data, #endif static void WriteToStderr(const char* data) { - absl::base_internal::ErrnoSaver errno_saver; - absl::raw_logging_internal::SafeWriteToStderr(data, strlen(data)); + absl::raw_log_internal::AsyncSignalSafeWriteToStderr(data, strlen(data)); } static void WriteSignalMessage(int signo, int cpu, @@ -291,7 +292,7 @@ static void WriteFailureInfo(int signo, void* ucontext, int cpu, // some platforms. static void PortableSleepForSeconds(int seconds) { #ifdef _WIN32 - Sleep(seconds * 1000); + Sleep(static_cast(seconds * 1000)); #else struct timespec sleep_time; sleep_time.tv_sec = seconds; @@ -325,9 +326,9 @@ static void AbslFailureSignalHandler(int signo, siginfo_t*, void* ucontext) { const GetTidType this_tid = absl::base_internal::GetTID(); GetTidType previous_failed_tid = 0; - if (!failed_tid.compare_exchange_strong( - previous_failed_tid, static_cast(this_tid), - std::memory_order_acq_rel, std::memory_order_relaxed)) { + if (!failed_tid.compare_exchange_strong(previous_failed_tid, this_tid, + std::memory_order_acq_rel, + std::memory_order_relaxed)) { ABSL_RAW_LOG( ERROR, "Signal %d raised at PC=%p while already in AbslFailureSignalHandler()", @@ -356,7 +357,7 @@ static void AbslFailureSignalHandler(int signo, siginfo_t*, void* ucontext) { if (fsh_options.alarm_on_failure_secs > 0) { alarm(0); // Cancel any existing alarms. signal(SIGALRM, ImmediateAbortSignalHandler); - alarm(fsh_options.alarm_on_failure_secs); + alarm(static_cast(fsh_options.alarm_on_failure_secs)); } #endif diff --git a/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/address_is_readable.cc b/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/address_is_readable.cc index 4be6256bfb..91eaa76f8a 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/address_is_readable.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/address_is_readable.cc @@ -52,7 +52,7 @@ namespace debugging_internal { bool AddressIsReadable(const void *addr) { // Align address on 8-byte boundary. On aarch64, checking last // byte before inaccessible page returned unexpected EFAULT. - const uintptr_t u_addr = reinterpret_cast(addr) & ~7; + const uintptr_t u_addr = reinterpret_cast(addr) & ~uintptr_t{7}; addr = reinterpret_cast(u_addr); // rt_sigprocmask below will succeed for this input. diff --git a/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/demangle.cc b/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/demangle.cc index 93ae32796c..f2832915bf 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/demangle.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/demangle.cc @@ -151,12 +151,12 @@ static const AbbrevPair kSubstitutionList[] = { // State needed for demangling. This struct is copied in almost every stack // frame, so every byte counts. typedef struct { - int mangled_idx; // Cursor of mangled name. - int out_cur_idx; // Cursor of output string. - int prev_name_idx; // For constructors/destructors. - signed int prev_name_length : 16; // For constructors/destructors. - signed int nest_level : 15; // For nested names. - unsigned int append : 1; // Append flag. + int mangled_idx; // Cursor of mangled name. + int out_cur_idx; // Cursor of output string. + int prev_name_idx; // For constructors/destructors. + unsigned int prev_name_length : 16; // For constructors/destructors. + signed int nest_level : 15; // For nested names. + unsigned int append : 1; // Append flag. // Note: for some reason MSVC can't pack "bool append : 1" into the same int // with the above two fields, so we use an int instead. Amusingly it can pack // "signed bool" as expected, but relying on that to continue to be a legal @@ -235,8 +235,8 @@ static size_t StrLen(const char *str) { } // Returns true if "str" has at least "n" characters remaining. -static bool AtLeastNumCharsRemaining(const char *str, int n) { - for (int i = 0; i < n; ++i) { +static bool AtLeastNumCharsRemaining(const char *str, size_t n) { + for (size_t i = 0; i < n; ++i) { if (str[i] == '\0') { return false; } @@ -253,18 +253,20 @@ static bool StrPrefix(const char *str, const char *prefix) { return prefix[i] == '\0'; // Consumed everything in "prefix". } -static void InitState(State *state, const char *mangled, char *out, - int out_size) { +static void InitState(State* state, + const char* mangled, + char* out, + size_t out_size) { state->mangled_begin = mangled; state->out = out; - state->out_end_idx = out_size; + state->out_end_idx = static_cast(out_size); state->recursion_depth = 0; state->steps = 0; state->parse_state.mangled_idx = 0; state->parse_state.out_cur_idx = 0; state->parse_state.prev_name_idx = 0; - state->parse_state.prev_name_length = -1; + state->parse_state.prev_name_length = 0; state->parse_state.nest_level = -1; state->parse_state.append = true; } @@ -356,8 +358,8 @@ static bool ZeroOrMore(ParseFunc parse_func, State *state) { // Append "str" at "out_cur_idx". If there is an overflow, out_cur_idx is // set to out_end_idx+1. The output string is ensured to // always terminate with '\0' as long as there is no overflow. -static void Append(State *state, const char *const str, const int length) { - for (int i = 0; i < length; ++i) { +static void Append(State *state, const char *const str, const size_t length) { + for (size_t i = 0; i < length; ++i) { if (state->parse_state.out_cur_idx + 1 < state->out_end_idx) { // +1 for '\0' state->out[state->parse_state.out_cur_idx++] = str[i]; @@ -420,7 +422,7 @@ static bool EndsWith(State *state, const char chr) { // Append "str" with some tweaks, iff "append" state is true. static void MaybeAppendWithLength(State *state, const char *const str, - const int length) { + const size_t length) { if (state->parse_state.append && length > 0) { // Append a space if the output buffer ends with '<' and "str" // starts with '<' to avoid <<<. @@ -432,14 +434,14 @@ static void MaybeAppendWithLength(State *state, const char *const str, if (state->parse_state.out_cur_idx < state->out_end_idx && (IsAlpha(str[0]) || str[0] == '_')) { state->parse_state.prev_name_idx = state->parse_state.out_cur_idx; - state->parse_state.prev_name_length = length; + state->parse_state.prev_name_length = static_cast(length); } Append(state, str, length); } } // Appends a positive decimal number to the output if appending is enabled. -static bool MaybeAppendDecimal(State *state, unsigned int val) { +static bool MaybeAppendDecimal(State *state, int val) { // Max {32-64}-bit unsigned int is 20 digits. constexpr size_t kMaxLength = 20; char buf[kMaxLength]; @@ -451,12 +453,12 @@ static bool MaybeAppendDecimal(State *state, unsigned int val) { // one-past-the-end and manipulate one character before the pointer. char *p = &buf[kMaxLength]; do { // val=0 is the only input that should write a leading zero digit. - *--p = (val % 10) + '0'; + *--p = static_cast((val % 10) + '0'); val /= 10; } while (p > buf && val != 0); // 'p' landed on the last character we set. How convenient. - Append(state, p, kMaxLength - (p - buf)); + Append(state, p, kMaxLength - static_cast(p - buf)); } return true; @@ -466,7 +468,7 @@ static bool MaybeAppendDecimal(State *state, unsigned int val) { // Returns true so that it can be placed in "if" conditions. static bool MaybeAppend(State *state, const char *const str) { if (state->parse_state.append) { - int length = StrLen(str); + size_t length = StrLen(str); MaybeAppendWithLength(state, str, length); } return true; @@ -521,10 +523,10 @@ static void MaybeCancelLastSeparator(State *state) { // Returns true if the identifier of the given length pointed to by // "mangled_cur" is anonymous namespace. -static bool IdentifierIsAnonymousNamespace(State *state, int length) { +static bool IdentifierIsAnonymousNamespace(State *state, size_t length) { // Returns true if "anon_prefix" is a proper prefix of "mangled_cur". static const char anon_prefix[] = "_GLOBAL__N_"; - return (length > static_cast(sizeof(anon_prefix) - 1) && + return (length > (sizeof(anon_prefix) - 1) && StrPrefix(RemainingInput(state), anon_prefix)); } @@ -542,12 +544,13 @@ static bool ParseUnnamedTypeName(State *state); static bool ParseNumber(State *state, int *number_out); static bool ParseFloatNumber(State *state); static bool ParseSeqId(State *state); -static bool ParseIdentifier(State *state, int length); +static bool ParseIdentifier(State *state, size_t length); static bool ParseOperatorName(State *state, int *arity); static bool ParseSpecialName(State *state); static bool ParseCallOffset(State *state); static bool ParseNVOffset(State *state); static bool ParseVOffset(State *state); +static bool ParseAbiTags(State *state); static bool ParseCtorDtorName(State *state); static bool ParseDecltype(State *state); static bool ParseType(State *state); @@ -601,7 +604,7 @@ static bool ParseSubstitution(State *state, bool accept_std); // // Reference: // - Itanium C++ ABI -// +// // ::= _Z static bool ParseMangledName(State *state) { @@ -741,17 +744,42 @@ static bool ParsePrefix(State *state) { return true; } -// ::= -// ::= -// ::= -// ::= // GCC extension; see below. -// ::= +// ::= [] +// ::= [] +// ::= [] +// ::= [] +// ::= [] +// +// is a GCC extension; see below. static bool ParseUnqualifiedName(State *state) { ComplexityGuard guard(state); if (guard.IsTooComplex()) return false; - return (ParseOperatorName(state, nullptr) || ParseCtorDtorName(state) || - ParseSourceName(state) || ParseLocalSourceName(state) || - ParseUnnamedTypeName(state)); + if (ParseOperatorName(state, nullptr) || ParseCtorDtorName(state) || + ParseSourceName(state) || ParseLocalSourceName(state) || + ParseUnnamedTypeName(state)) { + return ParseAbiTags(state); + } + return false; +} + +// ::= [] +// ::= B +static bool ParseAbiTags(State *state) { + ComplexityGuard guard(state); + if (guard.IsTooComplex()) return false; + + while (ParseOneCharToken(state, 'B')) { + ParseState copy = state->parse_state; + MaybeAppend(state, "[abi:"); + + if (!ParseSourceName(state)) { + state->parse_state = copy; + return false; + } + MaybeAppend(state, "]"); + } + + return true; } // ::= @@ -760,7 +788,8 @@ static bool ParseSourceName(State *state) { if (guard.IsTooComplex()) return false; ParseState copy = state->parse_state; int length = -1; - if (ParseNumber(state, &length) && ParseIdentifier(state, length)) { + if (ParseNumber(state, &length) && + ParseIdentifier(state, static_cast(length))) { return true; } state->parse_state = copy; @@ -838,7 +867,7 @@ static bool ParseNumber(State *state, int *number_out) { uint64_t number = 0; for (; *p != '\0'; ++p) { if (IsDigit(*p)) { - number = number * 10 + (*p - '0'); + number = number * 10 + static_cast(*p - '0'); } else { break; } @@ -853,7 +882,7 @@ static bool ParseNumber(State *state, int *number_out) { state->parse_state.mangled_idx += p - RemainingInput(state); if (number_out != nullptr) { // Note: possibly truncate "number". - *number_out = number; + *number_out = static_cast(number); } return true; } @@ -897,10 +926,10 @@ static bool ParseSeqId(State *state) { } // ::= (of given length) -static bool ParseIdentifier(State *state, int length) { +static bool ParseIdentifier(State *state, size_t length) { ComplexityGuard guard(state); if (guard.IsTooComplex()) return false; - if (length < 0 || !AtLeastNumCharsRemaining(RemainingInput(state), length)) { + if (!AtLeastNumCharsRemaining(RemainingInput(state), length)) { return false; } if (IdentifierIsAnonymousNamespace(state, length)) { @@ -1947,7 +1976,7 @@ static bool Overflowed(const State *state) { } // The demangler entry point. -bool Demangle(const char *mangled, char *out, int out_size) { +bool Demangle(const char* mangled, char* out, size_t out_size) { State state; InitState(&state, mangled, out, out_size); return ParseTopLevelMangledName(&state) && !Overflowed(&state) && diff --git a/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/demangle.h b/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/demangle.h index c314d9bc23..e1f156989f 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/demangle.h +++ b/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/demangle.h @@ -62,7 +62,7 @@ namespace debugging_internal { // Demangle `mangled`. On success, return true and write the // demangled symbol name to `out`. Otherwise, return false. // `out` is modified even if demangling is unsuccessful. -bool Demangle(const char *mangled, char *out, int out_size); +bool Demangle(const char* mangled, char* out, size_t out_size); } // namespace debugging_internal ABSL_NAMESPACE_END diff --git a/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/demangle_test.cc b/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/demangle_test.cc index 6b142902ca..8463a2b7d1 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/demangle_test.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/demangle_test.cc @@ -102,6 +102,30 @@ TEST(Demangle, Clones) { EXPECT_FALSE(Demangle("_ZL3Foov.isra.2.constprop.", tmp, sizeof(tmp))); } +// Test the GNU abi_tag extension. +TEST(Demangle, AbiTags) { + char tmp[80]; + + // Mangled name generated via: + // struct [[gnu::abi_tag("abc")]] A{}; + // A a; + EXPECT_TRUE(Demangle("_Z1aB3abc", tmp, sizeof(tmp))); + EXPECT_STREQ("a[abi:abc]", tmp); + + // Mangled name generated via: + // struct B { + // B [[gnu::abi_tag("xyz")]] (){}; + // }; + // B b; + EXPECT_TRUE(Demangle("_ZN1BC2B3xyzEv", tmp, sizeof(tmp))); + EXPECT_STREQ("B::B[abi:xyz]()", tmp); + + // Mangled name generated via: + // [[gnu::abi_tag("foo", "bar")]] void C() {} + EXPECT_TRUE(Demangle("_Z1CB3barB3foov", tmp, sizeof(tmp))); + EXPECT_STREQ("C[abi:bar][abi:foo]()", tmp); +} + // Tests that verify that Demangle footprint is within some limit. // They are not to be run under sanitizers as the sanitizers increase // stack consumption by about 4x. diff --git a/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/elf_mem_image.cc b/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/elf_mem_image.cc index 29a281812b..42dcd3cde9 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/elf_mem_image.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/elf_mem_image.cc @@ -91,7 +91,7 @@ int ElfMemImage::GetNumSymbols() const { return 0; } // See http://www.caldera.com/developers/gabi/latest/ch5.dynamic.html#hash - return hash_[1]; + return static_cast(hash_[1]); } const ElfW(Sym) *ElfMemImage::GetDynsym(int index) const { @@ -105,11 +105,9 @@ const ElfW(Versym) *ElfMemImage::GetVersym(int index) const { } const ElfW(Phdr) *ElfMemImage::GetPhdr(int index) const { - ABSL_RAW_CHECK(index < ehdr_->e_phnum, "index out of range"); - return GetTableElement(ehdr_, - ehdr_->e_phoff, - ehdr_->e_phentsize, - index); + ABSL_RAW_CHECK(index >= 0 && index < ehdr_->e_phnum, "index out of range"); + return GetTableElement(ehdr_, ehdr_->e_phoff, ehdr_->e_phentsize, + static_cast(index)); } const char *ElfMemImage::GetDynstr(ElfW(Word) offset) const { @@ -159,7 +157,8 @@ void ElfMemImage::Init(const void *base) { hash_ = nullptr; strsize_ = 0; verdefnum_ = 0; - link_base_ = ~0L; // Sentinel: PT_LOAD .p_vaddr can't possibly be this. + // Sentinel: PT_LOAD .p_vaddr can't possibly be this. + link_base_ = ~ElfW(Addr){0}; // NOLINT(readability/braces) if (!base) { return; } @@ -218,11 +217,11 @@ void ElfMemImage::Init(const void *base) { } ptrdiff_t relocation = base_as_char - reinterpret_cast(link_base_); - ElfW(Dyn) *dynamic_entry = - reinterpret_cast(dynamic_program_header->p_vaddr + - relocation); + ElfW(Dyn)* dynamic_entry = reinterpret_cast( + static_cast(dynamic_program_header->p_vaddr) + relocation); for (; dynamic_entry->d_tag != DT_NULL; ++dynamic_entry) { - const auto value = dynamic_entry->d_un.d_val + relocation; + const auto value = + static_cast(dynamic_entry->d_un.d_val) + relocation; switch (dynamic_entry->d_tag) { case DT_HASH: hash_ = reinterpret_cast(value); @@ -240,10 +239,10 @@ void ElfMemImage::Init(const void *base) { verdef_ = reinterpret_cast(value); break; case DT_VERDEFNUM: - verdefnum_ = dynamic_entry->d_un.d_val; + verdefnum_ = static_cast(dynamic_entry->d_un.d_val); break; case DT_STRSZ: - strsize_ = dynamic_entry->d_un.d_val; + strsize_ = static_cast(dynamic_entry->d_un.d_val); break; default: // Unrecognized entries explicitly ignored. @@ -351,7 +350,11 @@ void ElfMemImage::SymbolIterator::Update(int increment) { const ElfW(Versym) *version_symbol = image->GetVersym(index_); ABSL_RAW_CHECK(symbol && version_symbol, ""); const char *const symbol_name = image->GetDynstr(symbol->st_name); +#if defined(__NetBSD__) + const int version_index = version_symbol->vs_vers & VERSYM_VERSION; +#else const ElfW(Versym) version_index = version_symbol[0] & VERSYM_VERSION; +#endif const ElfW(Verdef) *version_definition = nullptr; const char *version_name = ""; if (symbol->st_shndx == SHN_UNDEF) { diff --git a/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/elf_mem_image.h b/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/elf_mem_image.h index a894bd423e..113071a9d1 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/elf_mem_image.h +++ b/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/elf_mem_image.h @@ -31,8 +31,9 @@ #error ABSL_HAVE_ELF_MEM_IMAGE cannot be directly set #endif -#if defined(__ELF__) && !defined(__native_client__) && !defined(__asmjs__) && \ - !defined(__wasm__) +#if defined(__ELF__) && !defined(__OpenBSD__) && !defined(__QNX__) && \ + !defined(__native_client__) && !defined(__asmjs__) && \ + !defined(__wasm__) && !defined(__HAIKU__) #define ABSL_HAVE_ELF_MEM_IMAGE 1 #endif diff --git a/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/examine_stack.cc b/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/examine_stack.cc index 589a3ef367..57863228d8 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/examine_stack.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/examine_stack.cc @@ -20,7 +20,13 @@ #include #endif -#ifdef __APPLE__ +#include "absl/base/config.h" + +#ifdef ABSL_HAVE_MMAP +#include +#endif + +#if defined(__linux__) || defined(__APPLE__) #include #endif @@ -37,10 +43,115 @@ namespace absl { ABSL_NAMESPACE_BEGIN namespace debugging_internal { +namespace { +constexpr int kDefaultDumpStackFramesLimit = 64; +// The %p field width for printf() functions is two characters per byte, +// and two extra for the leading "0x". +constexpr int kPrintfPointerFieldWidth = 2 + 2 * sizeof(void*); + +ABSL_CONST_INIT SymbolizeUrlEmitter debug_stack_trace_hook = nullptr; + +// Async-signal safe mmap allocator. +void* Allocate(size_t num_bytes) { +#ifdef ABSL_HAVE_MMAP + void* p = ::mmap(nullptr, num_bytes, PROT_READ | PROT_WRITE, + MAP_PRIVATE | MAP_ANONYMOUS, -1, 0); + return p == MAP_FAILED ? nullptr : p; +#else + (void)num_bytes; + return nullptr; +#endif // ABSL_HAVE_MMAP +} + +void Deallocate(void* p, size_t size) { +#ifdef ABSL_HAVE_MMAP + ::munmap(p, size); +#else + (void)p; + (void)size; +#endif // ABSL_HAVE_MMAP +} + +// Print a program counter only. +void DumpPC(OutputWriter* writer, void* writer_arg, void* const pc, + const char* const prefix) { + char buf[100]; + snprintf(buf, sizeof(buf), "%s@ %*p\n", prefix, kPrintfPointerFieldWidth, pc); + writer(buf, writer_arg); +} + +// Print a program counter and the corresponding stack frame size. +void DumpPCAndFrameSize(OutputWriter* writer, void* writer_arg, void* const pc, + int framesize, const char* const prefix) { + char buf[100]; + if (framesize <= 0) { + snprintf(buf, sizeof(buf), "%s@ %*p (unknown)\n", prefix, + kPrintfPointerFieldWidth, pc); + } else { + snprintf(buf, sizeof(buf), "%s@ %*p %9d\n", prefix, + kPrintfPointerFieldWidth, pc, framesize); + } + writer(buf, writer_arg); +} + +// Print a program counter and the corresponding symbol. +void DumpPCAndSymbol(OutputWriter* writer, void* writer_arg, void* const pc, + const char* const prefix) { + char tmp[1024]; + const char* symbol = "(unknown)"; + // Symbolizes the previous address of pc because pc may be in the + // next function. The overrun happens when the function ends with + // a call to a function annotated noreturn (e.g. CHECK). + // If symbolization of pc-1 fails, also try pc on the off-chance + // that we crashed on the first instruction of a function (that + // actually happens very often for e.g. __restore_rt). + const uintptr_t prev_pc = reinterpret_cast(pc) - 1; + if (absl::Symbolize(reinterpret_cast(prev_pc), tmp, + sizeof(tmp)) || + absl::Symbolize(pc, tmp, sizeof(tmp))) { + symbol = tmp; + } + char buf[1024]; + snprintf(buf, sizeof(buf), "%s@ %*p %s\n", prefix, kPrintfPointerFieldWidth, + pc, symbol); + writer(buf, writer_arg); +} + +// Print a program counter, its stack frame size, and its symbol name. +// Note that there is a separate symbolize_pc argument. Return addresses may be +// at the end of the function, and this allows the caller to back up from pc if +// appropriate. +void DumpPCAndFrameSizeAndSymbol(OutputWriter* writer, void* writer_arg, + void* const pc, void* const symbolize_pc, + int framesize, const char* const prefix) { + char tmp[1024]; + const char* symbol = "(unknown)"; + if (absl::Symbolize(symbolize_pc, tmp, sizeof(tmp))) { + symbol = tmp; + } + char buf[1024]; + if (framesize <= 0) { + snprintf(buf, sizeof(buf), "%s@ %*p (unknown) %s\n", prefix, + kPrintfPointerFieldWidth, pc, symbol); + } else { + snprintf(buf, sizeof(buf), "%s@ %*p %9d %s\n", prefix, + kPrintfPointerFieldWidth, pc, framesize, symbol); + } + writer(buf, writer_arg); +} + +} // namespace + +void RegisterDebugStackTraceHook(SymbolizeUrlEmitter hook) { + debug_stack_trace_hook = hook; +} + +SymbolizeUrlEmitter GetDebugStackTraceHook() { return debug_stack_trace_hook; } + // Returns the program counter from signal context, nullptr if // unknown. vuc is a ucontext_t*. We use void* to avoid the use of // ucontext_t on non-POSIX systems. -void* GetProgramCounter(void* vuc) { +void* GetProgramCounter(void* const vuc) { #ifdef __linux__ if (vuc != nullptr) { ucontext_t* context = reinterpret_cast(vuc); @@ -82,6 +193,8 @@ void* GetProgramCounter(void* vuc) { return reinterpret_cast(context->uc_mcontext.gregs[16]); #elif defined(__e2k__) return reinterpret_cast(context->uc_mcontext.cr0_hi); +#elif defined(__loongarch__) + return reinterpret_cast(context->uc_mcontext.__pc); #else #error "Undefined Architecture." #endif @@ -120,59 +233,17 @@ void* GetProgramCounter(void* vuc) { return nullptr; } -// The %p field width for printf() functions is two characters per byte, -// and two extra for the leading "0x". -static constexpr int kPrintfPointerFieldWidth = 2 + 2 * sizeof(void*); - -// Print a program counter, its stack frame size, and its symbol name. -// Note that there is a separate symbolize_pc argument. Return addresses may be -// at the end of the function, and this allows the caller to back up from pc if -// appropriate. -static void DumpPCAndFrameSizeAndSymbol(void (*writerfn)(const char*, void*), - void* writerfn_arg, void* pc, - void* symbolize_pc, int framesize, - const char* const prefix) { - char tmp[1024]; - const char* symbol = "(unknown)"; - if (absl::Symbolize(symbolize_pc, tmp, sizeof(tmp))) { - symbol = tmp; - } - char buf[1024]; - if (framesize <= 0) { - snprintf(buf, sizeof(buf), "%s@ %*p (unknown) %s\n", prefix, - kPrintfPointerFieldWidth, pc, symbol); - } else { - snprintf(buf, sizeof(buf), "%s@ %*p %9d %s\n", prefix, - kPrintfPointerFieldWidth, pc, framesize, symbol); - } - writerfn(buf, writerfn_arg); -} - -// Print a program counter and the corresponding stack frame size. -static void DumpPCAndFrameSize(void (*writerfn)(const char*, void*), - void* writerfn_arg, void* pc, int framesize, - const char* const prefix) { - char buf[100]; - if (framesize <= 0) { - snprintf(buf, sizeof(buf), "%s@ %*p (unknown)\n", prefix, - kPrintfPointerFieldWidth, pc); - } else { - snprintf(buf, sizeof(buf), "%s@ %*p %9d\n", prefix, - kPrintfPointerFieldWidth, pc, framesize); - } - writerfn(buf, writerfn_arg); -} - -void DumpPCAndFrameSizesAndStackTrace( - void* pc, void* const stack[], int frame_sizes[], int depth, - int min_dropped_frames, bool symbolize_stacktrace, - void (*writerfn)(const char*, void*), void* writerfn_arg) { +void DumpPCAndFrameSizesAndStackTrace(void* const pc, void* const stack[], + int frame_sizes[], int depth, + int min_dropped_frames, + bool symbolize_stacktrace, + OutputWriter* writer, void* writer_arg) { if (pc != nullptr) { // We don't know the stack frame size for PC, use 0. if (symbolize_stacktrace) { - DumpPCAndFrameSizeAndSymbol(writerfn, writerfn_arg, pc, pc, 0, "PC: "); + DumpPCAndFrameSizeAndSymbol(writer, writer_arg, pc, pc, 0, "PC: "); } else { - DumpPCAndFrameSize(writerfn, writerfn_arg, pc, 0, "PC: "); + DumpPCAndFrameSize(writer, writer_arg, pc, 0, "PC: "); } } for (int i = 0; i < depth; i++) { @@ -182,20 +253,63 @@ void DumpPCAndFrameSizesAndStackTrace( // call to a function annotated noreturn (e.g. CHECK). Note that we don't // do this for pc above, as the adjustment is only correct for return // addresses. - DumpPCAndFrameSizeAndSymbol(writerfn, writerfn_arg, stack[i], + DumpPCAndFrameSizeAndSymbol(writer, writer_arg, stack[i], reinterpret_cast(stack[i]) - 1, frame_sizes[i], " "); } else { - DumpPCAndFrameSize(writerfn, writerfn_arg, stack[i], frame_sizes[i], - " "); + DumpPCAndFrameSize(writer, writer_arg, stack[i], frame_sizes[i], " "); } } if (min_dropped_frames > 0) { char buf[100]; snprintf(buf, sizeof(buf), " @ ... and at least %d more frames\n", min_dropped_frames); - writerfn(buf, writerfn_arg); + writer(buf, writer_arg); + } +} + +// Dump current stack trace as directed by writer. +// Make sure this function is not inlined to avoid skipping too many top frames. +ABSL_ATTRIBUTE_NOINLINE +void DumpStackTrace(int min_dropped_frames, int max_num_frames, + bool symbolize_stacktrace, OutputWriter* writer, + void* writer_arg) { + // Print stack trace + void* stack_buf[kDefaultDumpStackFramesLimit]; + void** stack = stack_buf; + int num_stack = kDefaultDumpStackFramesLimit; + size_t allocated_bytes = 0; + + if (num_stack >= max_num_frames) { + // User requested fewer frames than we already have space for. + num_stack = max_num_frames; + } else { + const size_t needed_bytes = + static_cast(max_num_frames) * sizeof(stack[0]); + void* p = Allocate(needed_bytes); + if (p != nullptr) { // We got the space. + num_stack = max_num_frames; + stack = reinterpret_cast(p); + allocated_bytes = needed_bytes; + } } + + int depth = absl::GetStackTrace(stack, num_stack, min_dropped_frames + 1); + for (int i = 0; i < depth; i++) { + if (symbolize_stacktrace) { + DumpPCAndSymbol(writer, writer_arg, stack[static_cast(i)], + " "); + } else { + DumpPC(writer, writer_arg, stack[static_cast(i)], " "); + } + } + + auto hook = GetDebugStackTraceHook(); + if (hook != nullptr) { + (*hook)(stack, depth, writer, writer_arg); + } + + if (allocated_bytes != 0) Deallocate(stack, allocated_bytes); } } // namespace debugging_internal diff --git a/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/examine_stack.h b/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/examine_stack.h index 393369131f..190af87f1c 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/examine_stack.h +++ b/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/examine_stack.h @@ -23,17 +23,39 @@ namespace absl { ABSL_NAMESPACE_BEGIN namespace debugging_internal { +// Type of function used for printing in stack trace dumping, etc. +// We avoid closures to keep things simple. +typedef void OutputWriter(const char*, void*); + +// RegisterDebugStackTraceHook() allows to register a single routine +// `hook` that is called each time DumpStackTrace() is called. +// `hook` may be called from a signal handler. +typedef void (*SymbolizeUrlEmitter)(void* const stack[], int depth, + OutputWriter* writer, void* writer_arg); + +// Registration of SymbolizeUrlEmitter for use inside of a signal handler. +// This is inherently unsafe and must be signal safe code. +void RegisterDebugStackTraceHook(SymbolizeUrlEmitter hook); +SymbolizeUrlEmitter GetDebugStackTraceHook(); + // Returns the program counter from signal context, or nullptr if // unknown. `vuc` is a ucontext_t*. We use void* to avoid the use of // ucontext_t on non-POSIX systems. -void* GetProgramCounter(void* vuc); +void* GetProgramCounter(void* const vuc); -// Uses `writerfn` to dump the program counter, stack trace, and stack +// Uses `writer` to dump the program counter, stack trace, and stack // frame sizes. -void DumpPCAndFrameSizesAndStackTrace( - void* pc, void* const stack[], int frame_sizes[], int depth, - int min_dropped_frames, bool symbolize_stacktrace, - void (*writerfn)(const char*, void*), void* writerfn_arg); +void DumpPCAndFrameSizesAndStackTrace(void* const pc, void* const stack[], + int frame_sizes[], int depth, + int min_dropped_frames, + bool symbolize_stacktrace, + OutputWriter* writer, void* writer_arg); + +// Dump current stack trace omitting the topmost `min_dropped_frames` stack +// frames. +void DumpStackTrace(int min_dropped_frames, int max_num_frames, + bool symbolize_stacktrace, OutputWriter* writer, + void* writer_arg); } // namespace debugging_internal ABSL_NAMESPACE_END diff --git a/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/stacktrace_aarch64-inl.inc b/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/stacktrace_aarch64-inl.inc index 4f9db9d66b..71cdaf0940 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/stacktrace_aarch64-inl.inc +++ b/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/stacktrace_aarch64-inl.inc @@ -19,7 +19,7 @@ #include "absl/debugging/internal/vdso_support.h" // a no-op on non-elf or non-glibc systems #include "absl/debugging/stacktrace.h" -static const uintptr_t kUnknownFrameSize = 0; +static const size_t kUnknownFrameSize = 0; #if defined(__linux__) // Returns the address of the VDSO __kernel_rt_sigreturn function, if present. @@ -65,11 +65,12 @@ static const unsigned char* GetKernelRtSigreturnAddress() { // Compute the size of a stack frame in [low..high). We assume that // low < high. Return size of kUnknownFrameSize. template -static inline uintptr_t ComputeStackFrameSize(const T* low, - const T* high) { +static inline size_t ComputeStackFrameSize(const T* low, + const T* high) { const char* low_char_ptr = reinterpret_cast(low); const char* high_char_ptr = reinterpret_cast(high); - return low < high ? high_char_ptr - low_char_ptr : kUnknownFrameSize; + return low < high ? static_cast(high_char_ptr - low_char_ptr) + : kUnknownFrameSize; } // Given a pointer to a stack frame, locate and return the calling @@ -110,15 +111,15 @@ static void **NextStackFrame(void **old_frame_pointer, const void *uc) { } #endif - // aarch64 ABI requires stack pointer to be 16-byte-aligned. - if ((reinterpret_cast(new_frame_pointer) & 15) != 0) + // The frame pointer should be 8-byte aligned. + if ((reinterpret_cast(new_frame_pointer) & 7) != 0) return nullptr; // Check frame size. In strict mode, we assume frames to be under // 100,000 bytes. In non-strict mode, we relax the limit to 1MB. if (check_frame_size) { - const uintptr_t max_size = STRICT_UNWINDING ? 100000 : 1000000; - const uintptr_t frame_size = + const size_t max_size = STRICT_UNWINDING ? 100000 : 1000000; + const size_t frame_size = ComputeStackFrameSize(old_frame_pointer, new_frame_pointer); if (frame_size == kUnknownFrameSize || frame_size > max_size) return nullptr; @@ -165,7 +166,8 @@ static int UnwindImpl(void** result, int* sizes, int max_depth, int skip_count, } else { result[n] = prev_return_address; if (IS_STACK_FRAMES) { - sizes[n] = ComputeStackFrameSize(frame_pointer, next_frame_pointer); + sizes[n] = static_cast( + ComputeStackFrameSize(frame_pointer, next_frame_pointer)); } n++; } diff --git a/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/stacktrace_generic-inl.inc b/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/stacktrace_generic-inl.inc index b94c61233b..5fa169a7ec 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/stacktrace_generic-inl.inc +++ b/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/stacktrace_generic-inl.inc @@ -43,6 +43,17 @@ static __thread int recursive = 0; // glibc implementation itself will trigger malloc the first time it is called. // As such, we suppress usage of backtrace during this early stage of execution. static std::atomic disable_stacktraces(true); // Disabled until healthy. +// Waiting until static initializers run seems to be late enough. +// This file is included into stacktrace.cc so this will only run once. +ABSL_ATTRIBUTE_UNUSED static int stacktraces_enabler = []() { + void* unused_stack[1]; + // Force the first backtrace to happen early to get the one-time shared lib + // loading (allocation) out of the way. After the first call it is much safer + // to use backtrace from a signal handler if we crash somewhere later. + backtrace(unused_stack, 1); + disable_stacktraces.store(false, std::memory_order_relaxed); + return 0; +}(); template static int UnwindImpl(void** result, int* sizes, int max_depth, int skip_count, @@ -69,7 +80,7 @@ static int UnwindImpl(void** result, int* sizes, int max_depth, int skip_count, if (IS_STACK_FRAMES) { // No implementation for finding out the stack frame sizes yet. - memset(sizes, 0, sizeof(*sizes) * result_count); + memset(sizes, 0, sizeof(*sizes) * static_cast(result_count)); } if (min_dropped_frames != nullptr) { if (size - skip_count - max_depth > 0) { @@ -88,7 +99,7 @@ namespace absl { ABSL_NAMESPACE_BEGIN namespace debugging_internal { bool StackTraceWorksForTest() { - return false; + return true; } } // namespace debugging_internal ABSL_NAMESPACE_END diff --git a/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/stacktrace_riscv-inl.inc b/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/stacktrace_riscv-inl.inc index b4bdb5f15a..20183fa321 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/stacktrace_riscv-inl.inc +++ b/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/stacktrace_riscv-inl.inc @@ -30,56 +30,14 @@ #include #include #include +#include +#include #include "absl/base/attributes.h" -#include "absl/debugging/internal/address_is_readable.h" -#include "absl/debugging/internal/vdso_support.h" #include "absl/debugging/stacktrace.h" static const uintptr_t kUnknownFrameSize = 0; -#if defined(__linux__) -// Returns the address of the VDSO __kernel_rt_sigreturn function, if present. -static const unsigned char *GetKernelRtSigreturnAddress() { - constexpr uintptr_t kImpossibleAddress = 0; - ABSL_CONST_INIT static std::atomic memoized(kImpossibleAddress); - uintptr_t address = memoized.load(std::memory_order_relaxed); - if (address != kImpossibleAddress) { - return reinterpret_cast(address); - } - - address = reinterpret_cast(nullptr); - -#if ABSL_HAVE_VDSO_SUPPORT - absl::debugging_internal::VDSOSupport vdso; - if (vdso.IsPresent()) { - absl::debugging_internal::VDSOSupport::SymbolInfo symbol_info; - // Symbol versioning pulled from arch/riscv/kernel/vdso/vdso.lds at v5.10. - auto lookup = [&](int type) { - return vdso.LookupSymbol("__kernel_rt_sigreturn", "LINUX_4.15", type, - &symbol_info); - }; - if ((!lookup(STT_FUNC) && !lookup(STT_NOTYPE)) || - symbol_info.address == nullptr) { - // Unexpected: VDSO is present, yet the expected symbol is missing or - // null. - assert(false && "VDSO is present, but doesn't have expected symbol"); - } else { - if (reinterpret_cast(symbol_info.address) != - kImpossibleAddress) { - address = reinterpret_cast(symbol_info.address); - } else { - assert(false && "VDSO returned invalid address"); - } - } - } -#endif - - memoized.store(address, std::memory_order_relaxed); - return reinterpret_cast(address); -} -#endif // __linux__ - // Compute the size of a stack frame in [low..high). We assume that low < high. // Return size of kUnknownFrameSize. template @@ -96,7 +54,8 @@ static inline uintptr_t ComputeStackFrameSize(const T *low, const T *high) { template ABSL_ATTRIBUTE_NO_SANITIZE_ADDRESS // May read random elements from stack. ABSL_ATTRIBUTE_NO_SANITIZE_MEMORY // May read random elements from stack. -static void ** NextStackFrame(void **old_frame_pointer, const void *uc) { +static void ** NextStackFrame(void **old_frame_pointer, const void *uc, + const std::pair range) { // . // . // . @@ -114,55 +73,43 @@ static void ** NextStackFrame(void **old_frame_pointer, const void *uc) { // $sp ->| ... | // +----------------+ void **new_frame_pointer = reinterpret_cast(old_frame_pointer[-2]); - bool check_frame_size = true; - -#if defined(__linux__) - if (WITH_CONTEXT && uc != nullptr) { - // Check to see if next frame's return address is __kernel_rt_sigreturn. - if (old_frame_pointer[-1] == GetKernelRtSigreturnAddress()) { - const ucontext_t *ucv = static_cast(uc); - // old_frame_pointer is not suitable for unwinding, look at ucontext to - // discover frame pointer before signal. - // - // RISCV ELF psABI has the frame pointer at x8/fp/s0. - // -- RISCV psABI Table 18.2 - void **const pre_signal_frame_pointer = - reinterpret_cast(ucv->uc_mcontext.__gregs[8]); - - // Check the alleged frame pointer is actually readable. This is to - // prevent "double fault" in case we hit the first fault due to stack - // corruption. - if (!absl::debugging_internal::AddressIsReadable( - pre_signal_frame_pointer)) - return nullptr; - - // Alleged frame pointer is readable, use it for further unwinding. - new_frame_pointer = pre_signal_frame_pointer; - - // Skip frame size check if we return from a signal. We may be using an - // alterate stack for signals. - check_frame_size = false; - } - } -#endif + uintptr_t frame_pointer = reinterpret_cast(new_frame_pointer); // The RISCV ELF psABI mandates that the stack pointer is always 16-byte // aligned. - // FIXME(abdulras) this doesn't hold for ILP32E which only mandates a 4-byte + // TODO(#1236) this doesn't hold for ILP32E which only mandates a 4-byte // alignment. - if ((reinterpret_cast(new_frame_pointer) & 15) != 0) + if (frame_pointer & 15) return nullptr; + // If the new frame pointer matches the signal context, avoid terminating + // early to deal with alternate signal stacks. + if (WITH_CONTEXT) + if (const ucontext_t *ucv = static_cast(uc)) + // RISCV ELF psABI has the frame pointer at x8/fp/s0. + // -- RISCV psABI Table 18.2 + if (ucv->uc_mcontext.__gregs[8] == frame_pointer) + return new_frame_pointer; + // Check frame size. In strict mode, we assume frames to be under 100,000 // bytes. In non-strict mode, we relax the limit to 1MB. - if (check_frame_size) { - const uintptr_t max_size = STRICT_UNWINDING ? 100000 : 1000000; - const uintptr_t frame_size = - ComputeStackFrameSize(old_frame_pointer, new_frame_pointer); - if (frame_size == kUnknownFrameSize || frame_size > max_size) + const uintptr_t max_size = STRICT_UNWINDING ? 100000 : 1000000; + const uintptr_t frame_size = + ComputeStackFrameSize(old_frame_pointer, new_frame_pointer); + if (frame_size == kUnknownFrameSize) { + if (STRICT_UNWINDING) + return nullptr; + + // In non-strict mode permit non-contiguous stacks (e.g. alternate signal + // frame handling). + if (reinterpret_cast(new_frame_pointer) < range.first || + reinterpret_cast(new_frame_pointer) > range.second) return nullptr; } + if (frame_size > max_size) + return nullptr; + return new_frame_pointer; } @@ -171,44 +118,47 @@ ABSL_ATTRIBUTE_NO_SANITIZE_ADDRESS // May read random elements from stack. ABSL_ATTRIBUTE_NO_SANITIZE_MEMORY // May read random elements from stack. static int UnwindImpl(void **result, int *sizes, int max_depth, int skip_count, const void *ucp, int *min_dropped_frames) { + // The `frame_pointer` that is computed here points to the top of the frame. + // The two words preceding the address are the return address and the previous + // frame pointer. #if defined(__GNUC__) void **frame_pointer = reinterpret_cast(__builtin_frame_address(0)); #else #error reading stack pointer not yet supported on this platform #endif - skip_count++; // Skip the frame for this function. - int n = 0; - - // The `frame_pointer` that is computed here points to the top of the frame. - // The two words preceding the address are the return address and the previous - // frame pointer. To find a PC value associated with the current frame, we - // need to go down a level in the call chain. So we remember the return - // address of the last frame seen. This does not work for the first stack - // frame, which belongs to `UnwindImp()` but we skip the frame for - // `UnwindImp()` anyway. - void *prev_return_address = nullptr; + std::pair stack = { + // assume that the first page is not the stack. + static_cast(sysconf(_SC_PAGESIZE)), + std::numeric_limits::max() - sizeof(void *) + }; + int n = 0; + void *return_address = nullptr; while (frame_pointer && n < max_depth) { - // The absl::GetStackFrames routine si called when we are in some + return_address = frame_pointer[-1]; + + // The absl::GetStackFrames routine is called when we are in some // informational context (the failure signal handler for example). Use the // non-strict unwinding rules to produce a stack trace that is as complete // as possible (even if it contains a few bogus entries in some rare cases). void **next_frame_pointer = - NextStackFrame(frame_pointer, ucp); + NextStackFrame(frame_pointer, ucp, + stack); if (skip_count > 0) { skip_count--; } else { - result[n] = prev_return_address; + result[n] = return_address; if (IS_STACK_FRAMES) { sizes[n] = ComputeStackFrameSize(frame_pointer, next_frame_pointer); } n++; } - prev_return_address = frame_pointer[-1]; + frame_pointer = next_frame_pointer; } + if (min_dropped_frames != nullptr) { // Implementation detail: we clamp the max of frames we are willing to // count, so as not to spend too much time in the loop below. @@ -221,10 +171,12 @@ static int UnwindImpl(void **result, int *sizes, int max_depth, int skip_count, num_dropped_frames++; } frame_pointer = - NextStackFrame(frame_pointer, ucp); + NextStackFrame(frame_pointer, ucp, + stack); } *min_dropped_frames = num_dropped_frames; } + return n; } diff --git a/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/stacktrace_win32-inl.inc b/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/stacktrace_win32-inl.inc index 1c666c8b56..ef2b973ec3 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/stacktrace_win32-inl.inc +++ b/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/stacktrace_win32-inl.inc @@ -63,11 +63,12 @@ static RtlCaptureStackBackTrace_Function* const RtlCaptureStackBackTrace_fn = template static int UnwindImpl(void** result, int* sizes, int max_depth, int skip_count, const void*, int* min_dropped_frames) { - int n = 0; - if (!RtlCaptureStackBackTrace_fn) { - // can't find a stacktrace with no function to call + USHORT n = 0; + if (!RtlCaptureStackBackTrace_fn || skip_count < 0 || max_depth < 0) { + // can't get a stacktrace with no function/invalid args } else { - n = (int)RtlCaptureStackBackTrace_fn(skip_count + 2, max_depth, result, 0); + n = RtlCaptureStackBackTrace_fn(static_cast(skip_count) + 2, + static_cast(max_depth), result, 0); } if (IS_STACK_FRAMES) { // No implementation for finding out the stack frame sizes yet. diff --git a/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/stacktrace_x86-inl.inc b/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/stacktrace_x86-inl.inc index 1b5d8235a8..2f8bf428fb 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/stacktrace_x86-inl.inc +++ b/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/stacktrace_x86-inl.inc @@ -29,14 +29,13 @@ #include #include +#include "absl/base/attributes.h" #include "absl/base/macros.h" #include "absl/base/port.h" #include "absl/debugging/internal/address_is_readable.h" #include "absl/debugging/internal/vdso_support.h" // a no-op on non-elf or non-glibc systems #include "absl/debugging/stacktrace.h" -#include "absl/base/internal/raw_logging.h" - using absl::debugging_internal::AddressIsReadable; #if defined(__linux__) && defined(__i386__) @@ -140,13 +139,14 @@ static uintptr_t GetFP(const void *vuc) { // TODO(bcmills): -momit-leaf-frame-pointer is currently the default // behavior when building with clang. Talk to the C++ toolchain team about // fixing that. - if (bp >= sp && bp - sp <= kMaxFrameBytes) return bp; + if (bp >= sp && bp - sp <= kMaxFrameBytes) + return static_cast(bp); // If bp isn't a plausible frame pointer, return the stack pointer instead. // If we're lucky, it points to the start of a stack frame; otherwise, we'll // get one frame of garbage in the stack trace and fail the sanity check on // the next iteration. - return sp; + return static_cast(sp); } #endif return 0; @@ -310,7 +310,8 @@ static int UnwindImpl(void **result, int *sizes, int max_depth, int skip_count, int n = 0; void **fp = reinterpret_cast(__builtin_frame_address(0)); - size_t stack_low = getpagesize(); // Assume that the first page is not stack. + // Assume that the first page is not stack. + size_t stack_low = static_cast(getpagesize()); size_t stack_high = std::numeric_limits::max() - sizeof(void *); while (fp && n < max_depth) { @@ -327,7 +328,9 @@ static int UnwindImpl(void **result, int *sizes, int max_depth, int skip_count, result[n] = *(fp + 1); if (IS_STACK_FRAMES) { if (next_fp > fp) { - sizes[n] = (uintptr_t)next_fp - (uintptr_t)fp; + sizes[n] = static_cast( + reinterpret_cast(next_fp) - + reinterpret_cast(fp)); } else { // A frame-size of 0 is used to indicate unknown frame size. sizes[n] = 0; diff --git a/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/vdso_support.cc b/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/vdso_support.cc index 8a015d5580..8a588eaffe 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/vdso_support.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/debugging/internal/vdso_support.cc @@ -33,7 +33,7 @@ #endif #include -#if defined(__GLIBC__) && \ +#if !defined(__UCLIBC__) && defined(__GLIBC__) && \ (__GLIBC__ > 2 || (__GLIBC__ == 2 && __GLIBC_MINOR__ >= 16)) #define ABSL_HAVE_GETAUXVAL #endif @@ -50,6 +50,10 @@ #define AT_SYSINFO_EHDR 33 // for crosstoolv10 #endif +#if defined(__NetBSD__) +using Elf32_auxv_t = Aux32Info; +using Elf64_auxv_t = Aux64Info; +#endif #if defined(__FreeBSD__) #if defined(__ELF_WORD_SIZE) && __ELF_WORD_SIZE == 64 using Elf64_auxv_t = Elf64_Auxinfo; @@ -65,7 +69,9 @@ ABSL_CONST_INIT std::atomic VDSOSupport::vdso_base_( debugging_internal::ElfMemImage::kInvalidBase); -std::atomic VDSOSupport::getcpu_fn_(&InitAndGetCPU); +ABSL_CONST_INIT std::atomic VDSOSupport::getcpu_fn_( + &InitAndGetCPU); + VDSOSupport::VDSOSupport() // If vdso_base_ is still set to kInvalidBase, we got here // before VDSOSupport::Init has been called. Call it now. @@ -106,8 +112,13 @@ const void *VDSOSupport::Init() { ElfW(auxv_t) aux; while (read(fd, &aux, sizeof(aux)) == sizeof(aux)) { if (aux.a_type == AT_SYSINFO_EHDR) { +#if defined(__NetBSD__) + vdso_base_.store(reinterpret_cast(aux.a_v), + std::memory_order_relaxed); +#else vdso_base_.store(reinterpret_cast(aux.a_un.a_val), std::memory_order_relaxed); +#endif break; } } @@ -182,8 +193,9 @@ long VDSOSupport::InitAndGetCPU(unsigned *cpu, // NOLINT(runtime/int) ABSL_ATTRIBUTE_NO_SANITIZE_MEMORY int GetCPU() { unsigned cpu; - int ret_code = (*VDSOSupport::getcpu_fn_)(&cpu, nullptr, nullptr); - return ret_code == 0 ? cpu : ret_code; + long ret_code = // NOLINT(runtime/int) + (*VDSOSupport::getcpu_fn_)(&cpu, nullptr, nullptr); + return ret_code == 0 ? static_cast(cpu) : static_cast(ret_code); } } // namespace debugging_internal diff --git a/TMessagesProj/jni/voip/webrtc/absl/debugging/leak_check.cc b/TMessagesProj/jni/voip/webrtc/absl/debugging/leak_check.cc index 764ca0ad00..195e82bf16 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/debugging/leak_check.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/debugging/leak_check.cc @@ -11,29 +11,19 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. - +// // Wrappers around lsan_interface functions. -// When lsan is not linked in, these functions are not available, -// therefore Abseil code which depends on these functions is conditioned on the -// definition of LEAK_SANITIZER. -#include "absl/base/attributes.h" -#include "absl/debugging/leak_check.h" +// +// These are always-available run-time functions manipulating the LeakSanitizer, +// even when the lsan_interface (and LeakSanitizer) is not available. When +// LeakSanitizer is not linked in, these functions become no-op stubs. -#ifndef LEAK_SANITIZER +#include "absl/debugging/leak_check.h" -namespace absl { -ABSL_NAMESPACE_BEGIN -bool HaveLeakSanitizer() { return false; } -bool LeakCheckerIsActive() { return false; } -void DoIgnoreLeak(const void*) { } -void RegisterLivePointers(const void*, size_t) { } -void UnRegisterLivePointers(const void*, size_t) { } -LeakCheckDisabler::LeakCheckDisabler() { } -LeakCheckDisabler::~LeakCheckDisabler() { } -ABSL_NAMESPACE_END -} // namespace absl +#include "absl/base/attributes.h" +#include "absl/base/config.h" -#else +#if defined(ABSL_HAVE_LEAK_SANITIZER) #include @@ -66,4 +56,18 @@ LeakCheckDisabler::~LeakCheckDisabler() { __lsan_enable(); } ABSL_NAMESPACE_END } // namespace absl -#endif // LEAK_SANITIZER +#else // defined(ABSL_HAVE_LEAK_SANITIZER) + +namespace absl { +ABSL_NAMESPACE_BEGIN +bool HaveLeakSanitizer() { return false; } +bool LeakCheckerIsActive() { return false; } +void DoIgnoreLeak(const void*) { } +void RegisterLivePointers(const void*, size_t) { } +void UnRegisterLivePointers(const void*, size_t) { } +LeakCheckDisabler::LeakCheckDisabler() { } +LeakCheckDisabler::~LeakCheckDisabler() { } +ABSL_NAMESPACE_END +} // namespace absl + +#endif // defined(ABSL_HAVE_LEAK_SANITIZER) diff --git a/TMessagesProj/jni/voip/webrtc/absl/debugging/leak_check.h b/TMessagesProj/jni/voip/webrtc/absl/debugging/leak_check.h index 5fc2b052e4..eff162f67f 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/debugging/leak_check.h +++ b/TMessagesProj/jni/voip/webrtc/absl/debugging/leak_check.h @@ -24,7 +24,24 @@ // Note: this leak checking API is not yet supported in MSVC. // Leak checking is enabled by default in all ASan builds. // -// See https://github.com/google/sanitizers/wiki/AddressSanitizerLeakSanitizer +// https://clang.llvm.org/docs/LeakSanitizer.html +// https://github.com/google/sanitizers/wiki/AddressSanitizerLeakSanitizer +// +// GCC and Clang both automatically enable LeakSanitizer when AddressSanitizer +// is enabled. To use the mode, simply pass `-fsanitize=address` to both the +// compiler and linker. An example Bazel command could be +// +// $ bazel test --copt=-fsanitize=address --linkopt=-fsanitize=address ... +// +// GCC and Clang auto support a standalone LeakSanitizer mode (a mode which does +// not also use AddressSanitizer). To use the mode, simply pass +// `-fsanitize=leak` to both the compiler and linker. Since GCC does not +// currently provide a way of detecting this mode at compile-time, GCC users +// must also pass -DLEAK_SANIITIZER to the compiler. An example Bazel command +// could be +// +// $ bazel test --copt=-DLEAK_SANITIZER --copt=-fsanitize=leak +// --linkopt=-fsanitize=leak ... // // ----------------------------------------------------------------------------- #ifndef ABSL_DEBUGGING_LEAK_CHECK_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/debugging/leak_check_disable.cc b/TMessagesProj/jni/voip/webrtc/absl/debugging/leak_check_disable.cc deleted file mode 100644 index 924d6e3d54..0000000000 --- a/TMessagesProj/jni/voip/webrtc/absl/debugging/leak_check_disable.cc +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright 2017 The Abseil Authors. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Disable LeakSanitizer when this file is linked in. -// This function overrides __lsan_is_turned_off from sanitizer/lsan_interface.h -extern "C" int __lsan_is_turned_off(); -extern "C" int __lsan_is_turned_off() { - return 1; -} diff --git a/TMessagesProj/jni/voip/webrtc/absl/debugging/symbolize_darwin.inc b/TMessagesProj/jni/voip/webrtc/absl/debugging/symbolize_darwin.inc index 443ce9efc4..cf63d1919b 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/debugging/symbolize_darwin.inc +++ b/TMessagesProj/jni/voip/webrtc/absl/debugging/symbolize_darwin.inc @@ -83,13 +83,14 @@ bool Symbolize(const void* pc, char* out, int out_size) { memmove(out, tmp_buf, len + 1); } } else { - strncpy(out, symbol.c_str(), out_size); + strncpy(out, symbol.c_str(), static_cast(out_size)); } if (out[out_size - 1] != '\0') { // strncpy() does not '\0' terminate when it truncates. static constexpr char kEllipsis[] = "..."; - int ellipsis_size = std::min(sizeof(kEllipsis) - 1, out_size - 1); + size_t ellipsis_size = + std::min(sizeof(kEllipsis) - 1, static_cast(out_size) - 1); memcpy(out + out_size - ellipsis_size - 1, kEllipsis, ellipsis_size); out[out_size - 1] = '\0'; } diff --git a/TMessagesProj/jni/voip/webrtc/absl/debugging/symbolize_elf.inc b/TMessagesProj/jni/voip/webrtc/absl/debugging/symbolize_elf.inc index ddccd59003..ffb4eecfb9 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/debugging/symbolize_elf.inc +++ b/TMessagesProj/jni/voip/webrtc/absl/debugging/symbolize_elf.inc @@ -205,7 +205,8 @@ struct ObjFile { // PT_LOAD program header describing executable code. // Normally we expect just one, but SWIFT binaries have two. - std::array phdr; + // CUDA binaries have 3 (see cr/473913254 description). + std::array phdr; }; // Build 4-way associative cache for symbols. Within each cache line, symbols @@ -252,21 +253,21 @@ class AddrMap { public: AddrMap() : size_(0), allocated_(0), obj_(nullptr) {} ~AddrMap() { base_internal::LowLevelAlloc::Free(obj_); } - int Size() const { return size_; } - ObjFile *At(int i) { return &obj_[i]; } + size_t Size() const { return size_; } + ObjFile *At(size_t i) { return &obj_[i]; } ObjFile *Add(); void Clear(); private: - int size_; // count of valid elements (<= allocated_) - int allocated_; // count of allocated elements - ObjFile *obj_; // array of allocated_ elements + size_t size_; // count of valid elements (<= allocated_) + size_t allocated_; // count of allocated elements + ObjFile *obj_; // array of allocated_ elements AddrMap(const AddrMap &) = delete; AddrMap &operator=(const AddrMap &) = delete; }; void AddrMap::Clear() { - for (int i = 0; i != size_; i++) { + for (size_t i = 0; i != size_; i++) { At(i)->~ObjFile(); } size_ = 0; @@ -274,7 +275,7 @@ void AddrMap::Clear() { ObjFile *AddrMap::Add() { if (size_ == allocated_) { - int new_allocated = allocated_ * 2 + 50; + size_t new_allocated = allocated_ * 2 + 50; ObjFile *new_obj_ = static_cast(base_internal::LowLevelAlloc::AllocWithArena( new_allocated * sizeof(*new_obj_), SigSafeArena())); @@ -300,7 +301,7 @@ class Symbolizer { private: char *CopyString(const char *s) { - int len = strlen(s); + size_t len = strlen(s); char *dst = static_cast( base_internal::LowLevelAlloc::AllocWithArena(len + 1, SigSafeArena())); ABSL_RAW_CHECK(dst != nullptr, "out of memory"); @@ -321,8 +322,8 @@ class Symbolizer { FindSymbolResult GetSymbolFromObjectFile(const ObjFile &obj, const void *const pc, const ptrdiff_t relocation, - char *out, int out_size, - char *tmp_buf, int tmp_buf_size); + char *out, size_t out_size, + char *tmp_buf, size_t tmp_buf_size); const char *GetUncachedSymbol(const void *pc); enum { @@ -353,11 +354,11 @@ static std::atomic g_cached_symbolizer; } // namespace -static int SymbolizerSize() { +static size_t SymbolizerSize() { #if defined(__wasm__) || defined(__asmjs__) - int pagesize = getpagesize(); + auto pagesize = static_cast(getpagesize()); #else - int pagesize = sysconf(_SC_PAGESIZE); + auto pagesize = static_cast(sysconf(_SC_PAGESIZE)); #endif return ((sizeof(Symbolizer) - 1) / pagesize + 1) * pagesize; } @@ -429,7 +430,7 @@ static ssize_t ReadPersistent(int fd, void *buf, size_t count) { if (len == 0) { // Reached EOF. break; } - num_bytes += len; + num_bytes += static_cast(len); } SAFE_ASSERT(num_bytes <= count); return static_cast(num_bytes); @@ -442,8 +443,8 @@ static ssize_t ReadFromOffset(const int fd, void *buf, const size_t count, const off_t offset) { off_t off = lseek(fd, offset, SEEK_SET); if (off == (off_t)-1) { - ABSL_RAW_LOG(WARNING, "lseek(%d, %ju, SEEK_SET) failed: errno=%d", fd, - static_cast(offset), errno); + ABSL_RAW_LOG(WARNING, "lseek(%d, %jd, SEEK_SET) failed: errno=%d", fd, + static_cast(offset), errno); return -1; } return ReadPersistent(fd, buf, count); @@ -478,29 +479,37 @@ static int FileGetElfType(const int fd) { // inlined. static ABSL_ATTRIBUTE_NOINLINE bool GetSectionHeaderByType( const int fd, ElfW(Half) sh_num, const off_t sh_offset, ElfW(Word) type, - ElfW(Shdr) * out, char *tmp_buf, int tmp_buf_size) { + ElfW(Shdr) * out, char *tmp_buf, size_t tmp_buf_size) { ElfW(Shdr) *buf = reinterpret_cast(tmp_buf); - const int buf_entries = tmp_buf_size / sizeof(buf[0]); - const int buf_bytes = buf_entries * sizeof(buf[0]); + const size_t buf_entries = tmp_buf_size / sizeof(buf[0]); + const size_t buf_bytes = buf_entries * sizeof(buf[0]); - for (int i = 0; i < sh_num;) { - const ssize_t num_bytes_left = (sh_num - i) * sizeof(buf[0]); - const ssize_t num_bytes_to_read = + for (size_t i = 0; static_cast(i) < sh_num;) { + const size_t num_bytes_left = + (static_cast(sh_num) - i) * sizeof(buf[0]); + const size_t num_bytes_to_read = (buf_bytes > num_bytes_left) ? num_bytes_left : buf_bytes; - const off_t offset = sh_offset + i * sizeof(buf[0]); + const off_t offset = sh_offset + static_cast(i * sizeof(buf[0])); const ssize_t len = ReadFromOffset(fd, buf, num_bytes_to_read, offset); - if (len % sizeof(buf[0]) != 0) { + if (len < 0) { ABSL_RAW_LOG( WARNING, - "Reading %zd bytes from offset %ju returned %zd which is not a " + "Reading %zu bytes from offset %ju returned %zd which is negative.", + num_bytes_to_read, static_cast(offset), len); + return false; + } + if (static_cast(len) % sizeof(buf[0]) != 0) { + ABSL_RAW_LOG( + WARNING, + "Reading %zu bytes from offset %jd returned %zd which is not a " "multiple of %zu.", - num_bytes_to_read, static_cast(offset), len, + num_bytes_to_read, static_cast(offset), len, sizeof(buf[0])); return false; } - const ssize_t num_headers_in_buf = len / sizeof(buf[0]); + const size_t num_headers_in_buf = static_cast(len) / sizeof(buf[0]); SAFE_ASSERT(num_headers_in_buf <= buf_entries); - for (int j = 0; j < num_headers_in_buf; ++j) { + for (size_t j = 0; j < num_headers_in_buf; ++j) { if (buf[j].sh_type == type) { *out = buf[j]; return true; @@ -524,8 +533,8 @@ bool ForEachSection(int fd, } ElfW(Shdr) shstrtab; - off_t shstrtab_offset = - (elf_header.e_shoff + elf_header.e_shentsize * elf_header.e_shstrndx); + off_t shstrtab_offset = static_cast(elf_header.e_shoff) + + elf_header.e_shentsize * elf_header.e_shstrndx; if (!ReadFromOffsetExact(fd, &shstrtab, sizeof(shstrtab), shstrtab_offset)) { return false; } @@ -533,22 +542,23 @@ bool ForEachSection(int fd, for (int i = 0; i < elf_header.e_shnum; ++i) { ElfW(Shdr) out; off_t section_header_offset = - (elf_header.e_shoff + elf_header.e_shentsize * i); + static_cast(elf_header.e_shoff) + elf_header.e_shentsize * i; if (!ReadFromOffsetExact(fd, &out, sizeof(out), section_header_offset)) { return false; } - off_t name_offset = shstrtab.sh_offset + out.sh_name; + off_t name_offset = static_cast(shstrtab.sh_offset) + out.sh_name; char header_name[kMaxSectionNameLen]; ssize_t n_read = ReadFromOffset(fd, &header_name, kMaxSectionNameLen, name_offset); - if (n_read == -1) { + if (n_read < 0) { return false; } else if (n_read > kMaxSectionNameLen) { // Long read? return false; } - absl::string_view name(header_name, strnlen(header_name, n_read)); + absl::string_view name(header_name, + strnlen(header_name, static_cast(n_read))); if (!callback(name, out)) { break; } @@ -575,19 +585,19 @@ bool GetSectionHeaderByName(int fd, const char *name, size_t name_len, } ElfW(Shdr) shstrtab; - off_t shstrtab_offset = - (elf_header.e_shoff + elf_header.e_shentsize * elf_header.e_shstrndx); + off_t shstrtab_offset = static_cast(elf_header.e_shoff) + + elf_header.e_shentsize * elf_header.e_shstrndx; if (!ReadFromOffsetExact(fd, &shstrtab, sizeof(shstrtab), shstrtab_offset)) { return false; } for (int i = 0; i < elf_header.e_shnum; ++i) { off_t section_header_offset = - (elf_header.e_shoff + elf_header.e_shentsize * i); + static_cast(elf_header.e_shoff) + elf_header.e_shentsize * i; if (!ReadFromOffsetExact(fd, out, sizeof(*out), section_header_offset)) { return false; } - off_t name_offset = shstrtab.sh_offset + out->sh_name; + off_t name_offset = static_cast(shstrtab.sh_offset) + out->sh_name; ssize_t n_read = ReadFromOffset(fd, &header_name, name_len, name_offset); if (n_read < 0) { return false; @@ -645,10 +655,10 @@ static bool InSection(const void *address, const ElfW(Shdr) * section) { } static const char *ComputeOffset(const char *base, ptrdiff_t offset) { - // Note: cast to uintptr_t to avoid undefined behavior when base evaluates to + // Note: cast to intptr_t to avoid undefined behavior when base evaluates to // zero and offset is non-zero. - return reinterpret_cast( - reinterpret_cast(base) + offset); + return reinterpret_cast(reinterpret_cast(base) + + offset); } // Read a symbol table and look for the symbol containing the @@ -661,18 +671,18 @@ static const char *ComputeOffset(const char *base, ptrdiff_t offset) { // To keep stack consumption low, we would like this function to not get // inlined. static ABSL_ATTRIBUTE_NOINLINE FindSymbolResult FindSymbol( - const void *const pc, const int fd, char *out, int out_size, + const void *const pc, const int fd, char *out, size_t out_size, ptrdiff_t relocation, const ElfW(Shdr) * strtab, const ElfW(Shdr) * symtab, - const ElfW(Shdr) * opd, char *tmp_buf, int tmp_buf_size) { + const ElfW(Shdr) * opd, char *tmp_buf, size_t tmp_buf_size) { if (symtab == nullptr) { return SYMBOL_NOT_FOUND; } // Read multiple symbols at once to save read() calls. ElfW(Sym) *buf = reinterpret_cast(tmp_buf); - const int buf_entries = tmp_buf_size / sizeof(buf[0]); + const size_t buf_entries = tmp_buf_size / sizeof(buf[0]); - const int num_symbols = symtab->sh_size / symtab->sh_entsize; + const size_t num_symbols = symtab->sh_size / symtab->sh_entsize; // On platforms using an .opd section (PowerPC & IA64), a function symbol // has the address of a function descriptor, which contains the real @@ -687,16 +697,19 @@ static ABSL_ATTRIBUTE_NOINLINE FindSymbolResult FindSymbol( ElfW(Sym) best_match; SafeMemZero(&best_match, sizeof(best_match)); bool found_match = false; - for (int i = 0; i < num_symbols;) { - off_t offset = symtab->sh_offset + i * symtab->sh_entsize; - const int num_remaining_symbols = num_symbols - i; - const int entries_in_chunk = std::min(num_remaining_symbols, buf_entries); - const int bytes_in_chunk = entries_in_chunk * sizeof(buf[0]); + for (size_t i = 0; i < num_symbols;) { + off_t offset = + static_cast(symtab->sh_offset + i * symtab->sh_entsize); + const size_t num_remaining_symbols = num_symbols - i; + const size_t entries_in_chunk = + std::min(num_remaining_symbols, buf_entries); + const size_t bytes_in_chunk = entries_in_chunk * sizeof(buf[0]); const ssize_t len = ReadFromOffset(fd, buf, bytes_in_chunk, offset); - SAFE_ASSERT(len % sizeof(buf[0]) == 0); - const ssize_t num_symbols_in_buf = len / sizeof(buf[0]); + SAFE_ASSERT(len >= 0); + SAFE_ASSERT(static_cast(len) % sizeof(buf[0]) == 0); + const size_t num_symbols_in_buf = static_cast(len) / sizeof(buf[0]); SAFE_ASSERT(num_symbols_in_buf <= entries_in_chunk); - for (int j = 0; j < num_symbols_in_buf; ++j) { + for (size_t j = 0; j < num_symbols_in_buf; ++j) { const ElfW(Sym) &symbol = buf[j]; // For a DSO, a symbol address is relocated by the loading address. @@ -713,7 +726,7 @@ static ABSL_ATTRIBUTE_NOINLINE FindSymbolResult FindSymbol( // about what encoding is being used; we just want the real start address // of the function. start_address = reinterpret_cast( - reinterpret_cast(start_address) & ~1); + reinterpret_cast(start_address) & ~1u); #endif if (deref_function_descriptor_pointer && @@ -726,7 +739,8 @@ static ABSL_ATTRIBUTE_NOINLINE FindSymbolResult FindSymbol( // If pc is inside the .opd section, it points to a function descriptor. const size_t size = pc_in_opd ? kFunctionDescriptorSize : symbol.st_size; - const void *const end_address = ComputeOffset(start_address, size); + const void *const end_address = + ComputeOffset(start_address, static_cast(size)); if (symbol.st_value != 0 && // Skip null value symbols. symbol.st_shndx != 0 && // Skip undefined symbols. #ifdef STT_TLS @@ -744,16 +758,18 @@ static ABSL_ATTRIBUTE_NOINLINE FindSymbolResult FindSymbol( } if (found_match) { - const size_t off = strtab->sh_offset + best_match.st_name; + const off_t off = + static_cast(strtab->sh_offset) + best_match.st_name; const ssize_t n_read = ReadFromOffset(fd, out, out_size, off); if (n_read <= 0) { // This should never happen. ABSL_RAW_LOG(WARNING, - "Unable to read from fd %d at offset %zu: n_read = %zd", fd, - off, n_read); + "Unable to read from fd %d at offset %lld: n_read = %zd", fd, + static_cast(off), n_read); return SYMBOL_NOT_FOUND; } - ABSL_RAW_CHECK(n_read <= out_size, "ReadFromOffset read too much data."); + ABSL_RAW_CHECK(static_cast(n_read) <= out_size, + "ReadFromOffset read too much data."); // strtab->sh_offset points into .strtab-like section that contains // NUL-terminated strings: '\0foo\0barbaz\0...". @@ -761,7 +777,7 @@ static ABSL_ATTRIBUTE_NOINLINE FindSymbolResult FindSymbol( // sh_offset+st_name points to the start of symbol name, but we don't know // how long the symbol is, so we try to read as much as we have space for, // and usually over-read (i.e. there is a NUL somewhere before n_read). - if (memchr(out, '\0', n_read) == nullptr) { + if (memchr(out, '\0', static_cast(n_read)) == nullptr) { // Either out_size was too small (n_read == out_size and no NUL), or // we tried to read past the EOF (n_read < out_size) and .strtab is // corrupt (missing terminating NUL; should never happen for valid ELF). @@ -779,7 +795,7 @@ static ABSL_ATTRIBUTE_NOINLINE FindSymbolResult FindSymbol( // See FindSymbol() comment for description of return value. FindSymbolResult Symbolizer::GetSymbolFromObjectFile( const ObjFile &obj, const void *const pc, const ptrdiff_t relocation, - char *out, int out_size, char *tmp_buf, int tmp_buf_size) { + char *out, size_t out_size, char *tmp_buf, size_t tmp_buf_size) { ElfW(Shdr) symtab; ElfW(Shdr) strtab; ElfW(Shdr) opd; @@ -802,13 +818,15 @@ FindSymbolResult Symbolizer::GetSymbolFromObjectFile( // Consult a regular symbol table, then fall back to the dynamic symbol table. for (const auto symbol_table_type : {SHT_SYMTAB, SHT_DYNSYM}) { if (!GetSectionHeaderByType(obj.fd, obj.elf_header.e_shnum, - obj.elf_header.e_shoff, symbol_table_type, + static_cast(obj.elf_header.e_shoff), + static_cast(symbol_table_type), &symtab, tmp_buf, tmp_buf_size)) { continue; } if (!ReadFromOffsetExact( obj.fd, &strtab, sizeof(strtab), - obj.elf_header.e_shoff + symtab.sh_link * sizeof(symtab))) { + static_cast(obj.elf_header.e_shoff + + symtab.sh_link * sizeof(symtab)))) { continue; } const FindSymbolResult rc = @@ -833,7 +851,7 @@ class FileDescriptor { ~FileDescriptor() { if (fd_ >= 0) { - NO_INTR(close(fd_)); + close(fd_); } } @@ -850,7 +868,7 @@ class FileDescriptor { // and snprintf(). class LineReader { public: - explicit LineReader(int fd, char *buf, int buf_len) + explicit LineReader(int fd, char *buf, size_t buf_len) : fd_(fd), buf_len_(buf_len), buf_(buf), @@ -878,12 +896,12 @@ class LineReader { bol_ = eol_ + 1; // Advance to the next line in the buffer. SAFE_ASSERT(bol_ <= eod_); // "bol_" can point to "eod_". if (!HasCompleteLine()) { - const int incomplete_line_length = eod_ - bol_; + const auto incomplete_line_length = static_cast(eod_ - bol_); // Move the trailing incomplete line to the beginning. memmove(buf_, bol_, incomplete_line_length); // Read text from file and append it. char *const append_pos = buf_ + incomplete_line_length; - const int capacity_left = buf_len_ - incomplete_line_length; + const size_t capacity_left = buf_len_ - incomplete_line_length; const ssize_t num_bytes = ReadPersistent(fd_, append_pos, capacity_left); if (num_bytes <= 0) { // EOF or error. @@ -906,7 +924,8 @@ class LineReader { private: char *FindLineFeed() const { - return reinterpret_cast(memchr(bol_, '\n', eod_ - bol_)); + return reinterpret_cast( + memchr(bol_, '\n', static_cast(eod_ - bol_))); } bool BufferIsEmpty() const { return buf_ == eod_; } @@ -916,7 +935,7 @@ class LineReader { } const int fd_; - const int buf_len_; + const size_t buf_len_; char *const buf_; char *bol_; char *eol_; @@ -934,7 +953,8 @@ static const char *GetHex(const char *start, const char *end, int ch = *p; if ((ch >= '0' && ch <= '9') || (ch >= 'A' && ch <= 'F') || (ch >= 'a' && ch <= 'f')) { - hex = (hex << 4) | (ch < 'A' ? ch - '0' : (ch & 0xF) + 9); + hex = (hex << 4) | + static_cast(ch < 'A' ? ch - '0' : (ch & 0xF) + 9); } else { // Encountered the first non-hex character. break; } @@ -966,7 +986,7 @@ static bool ShouldUseMapping(const char *const flags) { static ABSL_ATTRIBUTE_NOINLINE bool ReadAddrMap( bool (*callback)(const char *filename, const void *const start_addr, const void *const end_addr, uint64_t offset, void *arg), - void *arg, void *tmp_buf, int tmp_buf_size) { + void *arg, void *tmp_buf, size_t tmp_buf_size) { // Use /proc/self/task//maps instead of /proc/self/maps. The latter // requires kernel to stop all threads, and is significantly slower when there // are 1000s of threads. @@ -1081,10 +1101,10 @@ ObjFile *Symbolizer::FindObjFile(const void *const addr, size_t len) { } } - int lo = 0; - int hi = addr_map_.Size(); + size_t lo = 0; + size_t hi = addr_map_.Size(); while (lo < hi) { - int mid = (lo + hi) / 2; + size_t mid = (lo + hi) / 2; if (addr < addr_map_.At(mid)->end_addr) { hi = mid; } else { @@ -1106,11 +1126,11 @@ ObjFile *Symbolizer::FindObjFile(const void *const addr, size_t len) { } void Symbolizer::ClearAddrMap() { - for (int i = 0; i != addr_map_.Size(); i++) { + for (size_t i = 0; i != addr_map_.Size(); i++) { ObjFile *o = addr_map_.At(i); base_internal::LowLevelAlloc::Free(o->filename); if (o->fd >= 0) { - NO_INTR(close(o->fd)); + close(o->fd); } } addr_map_.Clear(); @@ -1126,7 +1146,7 @@ bool Symbolizer::RegisterObjFile(const char *filename, // Files are supposed to be added in the increasing address order. Make // sure that's the case. - int addr_map_size = impl->addr_map_.Size(); + size_t addr_map_size = impl->addr_map_.Size(); if (addr_map_size != 0) { ObjFile *old = impl->addr_map_.At(addr_map_size - 1); if (old->end_addr > end_addr) { @@ -1146,6 +1166,14 @@ bool Symbolizer::RegisterObjFile(const char *filename, reinterpret_cast(old->end_addr), old->filename); } return true; + } else if (old->end_addr == start_addr && + reinterpret_cast(old->start_addr) - old->offset == + reinterpret_cast(start_addr) - offset && + strcmp(old->filename, filename) == 0) { + // Two contiguous map entries that span a contiguous region of the file, + // perhaps because some part of the file was mlock()ed. Combine them. + old->end_addr = end_addr; + return true; } } ObjFile *obj = impl->addr_map_.Add(); @@ -1162,12 +1190,12 @@ bool Symbolizer::RegisterObjFile(const char *filename, // where the input symbol is demangled in-place. // To keep stack consumption low, we would like this function to not // get inlined. -static ABSL_ATTRIBUTE_NOINLINE void DemangleInplace(char *out, int out_size, +static ABSL_ATTRIBUTE_NOINLINE void DemangleInplace(char *out, size_t out_size, char *tmp_buf, - int tmp_buf_size) { + size_t tmp_buf_size) { if (Demangle(out, tmp_buf, tmp_buf_size)) { // Demangling succeeded. Copy to out if the space allows. - int len = strlen(tmp_buf); + size_t len = strlen(tmp_buf); if (len + 1 <= out_size) { // +1 for '\0'. SAFE_ASSERT(len < tmp_buf_size); memmove(out, tmp_buf, len + 1); @@ -1210,7 +1238,8 @@ const char *Symbolizer::InsertSymbolInCache(const void *const pc, SymbolCacheLine *line = GetCacheLine(pc); uint32_t max_age = 0; - int oldest_index = -1; + size_t oldest_index = 0; + bool found_oldest_index = false; for (size_t i = 0; i < ABSL_ARRAYSIZE(line->pc); ++i) { if (line->pc[i] == nullptr) { AgeSymbols(line); @@ -1222,11 +1251,12 @@ const char *Symbolizer::InsertSymbolInCache(const void *const pc, if (line->age[i] >= max_age) { max_age = line->age[i]; oldest_index = i; + found_oldest_index = true; } } AgeSymbols(line); - ABSL_RAW_CHECK(oldest_index >= 0, "Corrupt cache"); + ABSL_RAW_CHECK(found_oldest_index, "Corrupt cache"); base_internal::LowLevelAlloc::Free(line->name[oldest_index]); line->pc[oldest_index] = pc; line->name[oldest_index] = CopyString(name); @@ -1295,7 +1325,7 @@ static bool MaybeInitializeObjFile(ObjFile *obj) { } const int phnum = obj->elf_header.e_phnum; const int phentsize = obj->elf_header.e_phentsize; - size_t phoff = obj->elf_header.e_phoff; + auto phoff = static_cast(obj->elf_header.e_phoff); size_t num_executable_load_segments = 0; for (int j = 0; j < phnum; j++) { ElfW(Phdr) phdr; @@ -1313,8 +1343,9 @@ static bool MaybeInitializeObjFile(ObjFile *obj) { if (num_executable_load_segments < obj->phdr.size()) { memcpy(&obj->phdr[num_executable_load_segments++], &phdr, sizeof(phdr)); } else { - ABSL_RAW_LOG(WARNING, "%s: too many executable LOAD segments", - obj->filename); + ABSL_RAW_LOG( + WARNING, "%s: too many executable LOAD segments: %zu >= %zu", + obj->filename, num_executable_load_segments, obj->phdr.size()); break; } } @@ -1346,7 +1377,7 @@ const char *Symbolizer::GetUncachedSymbol(const void *pc) { // // For obj->offset > 0, adjust the relocation since a mapping at offset // X in the file will have a start address of [true relocation]+X. - relocation = start_addr - obj->offset; + relocation = static_cast(start_addr - obj->offset); // Note: some binaries have multiple "rx" LOAD segments. We must // find the right one. @@ -1521,7 +1552,7 @@ bool RegisterFileMappingHint(const void *start, const void *end, uint64_t offset ret = false; } else { // TODO(ckennelly): Move this into a string copy routine. - int len = strlen(filename); + size_t len = strlen(filename); char *dst = static_cast( base_internal::LowLevelAlloc::AllocWithArena(len + 1, SigSafeArena())); ABSL_RAW_CHECK(dst != nullptr, "out of memory"); @@ -1577,16 +1608,17 @@ bool Symbolize(const void *pc, char *out, int out_size) { const char *name = s->GetSymbol(pc); bool ok = false; if (name != nullptr && out_size > 0) { - strncpy(out, name, out_size); + strncpy(out, name, static_cast(out_size)); ok = true; - if (out[out_size - 1] != '\0') { + if (out[static_cast(out_size) - 1] != '\0') { // strncpy() does not '\0' terminate when it truncates. Do so, with // trailing ellipsis. static constexpr char kEllipsis[] = "..."; - int ellipsis_size = - std::min(implicit_cast(strlen(kEllipsis)), out_size - 1); - memcpy(out + out_size - ellipsis_size - 1, kEllipsis, ellipsis_size); - out[out_size - 1] = '\0'; + size_t ellipsis_size = + std::min(strlen(kEllipsis), static_cast(out_size) - 1); + memcpy(out + static_cast(out_size) - ellipsis_size - 1, kEllipsis, + ellipsis_size); + out[static_cast(out_size) - 1] = '\0'; } } debugging_internal::FreeSymbolizer(s); diff --git a/TMessagesProj/jni/voip/webrtc/absl/debugging/symbolize_win32.inc b/TMessagesProj/jni/voip/webrtc/absl/debugging/symbolize_win32.inc index c3df46f606..53a099a181 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/debugging/symbolize_win32.inc +++ b/TMessagesProj/jni/voip/webrtc/absl/debugging/symbolize_win32.inc @@ -65,14 +65,15 @@ bool Symbolize(const void* pc, char* out, int out_size) { if (!SymFromAddr(process, reinterpret_cast(pc), nullptr, symbol)) { return false; } - strncpy(out, symbol->Name, out_size); - if (out[out_size - 1] != '\0') { + const size_t out_size_t = static_cast(out_size); + strncpy(out, symbol->Name, out_size_t); + if (out[out_size_t - 1] != '\0') { // strncpy() does not '\0' terminate when it truncates. static constexpr char kEllipsis[] = "..."; - int ellipsis_size = - std::min(sizeof(kEllipsis) - 1, out_size - 1); - memcpy(out + out_size - ellipsis_size - 1, kEllipsis, ellipsis_size); - out[out_size - 1] = '\0'; + size_t ellipsis_size = + std::min(sizeof(kEllipsis) - 1, out_size_t - 1); + memcpy(out + out_size_t - ellipsis_size - 1, kEllipsis, ellipsis_size); + out[out_size_t - 1] = '\0'; } return true; } diff --git a/TMessagesProj/jni/voip/webrtc/absl/flags/declare.h b/TMessagesProj/jni/voip/webrtc/absl/flags/declare.h index a791b66711..d1437bb9f6 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/flags/declare.h +++ b/TMessagesProj/jni/voip/webrtc/absl/flags/declare.h @@ -60,7 +60,11 @@ ABSL_NAMESPACE_END // The ABSL_DECLARE_FLAG(type, name) macro expands to: // // extern absl::Flag FLAGS_name; -#define ABSL_DECLARE_FLAG(type, name) \ +#define ABSL_DECLARE_FLAG(type, name) ABSL_DECLARE_FLAG_INTERNAL(type, name) + +// Internal implementation of ABSL_DECLARE_FLAG to allow macro expansion of its +// arguments. Clients must use ABSL_DECLARE_FLAG instead. +#define ABSL_DECLARE_FLAG_INTERNAL(type, name) \ extern absl::Flag FLAGS_##name; \ namespace absl /* block flags in namespaces */ {} \ /* second redeclaration is to allow applying attributes */ \ diff --git a/TMessagesProj/jni/voip/webrtc/absl/flags/flag.h b/TMessagesProj/jni/voip/webrtc/absl/flags/flag.h index 5010608232..b7f94be7c5 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/flags/flag.h +++ b/TMessagesProj/jni/voip/webrtc/absl/flags/flag.h @@ -67,6 +67,10 @@ ABSL_NAMESPACE_BEGIN // ABSL_FLAG(int, count, 0, "Count of items to process"); // // No public methods of `absl::Flag` are part of the Abseil Flags API. +// +// For type support of Abseil Flags, see the marshalling.h header file, which +// discusses supported standard types, optional flags, and additional Abseil +// type support. #if !defined(_MSC_VER) || defined(__clang__) template using Flag = flags_internal::Flag; @@ -163,7 +167,6 @@ ABSL_NAMESPACE_END // Note: do not construct objects of type `absl::Flag` directly. Only use the // `ABSL_FLAG()` macro for such construction. #define ABSL_FLAG(Type, name, default_value, help) \ - extern ::absl::Flag FLAGS_##name; \ ABSL_FLAG_IMPL(Type, name, default_value, help) // ABSL_FLAG().OnUpdate() @@ -266,6 +269,7 @@ ABSL_NAMESPACE_END // global name for FLAGS_no symbol, thus preventing the possibility // of defining two flags with names foo and nofoo. #define ABSL_FLAG_IMPL(Type, name, default_value, help) \ + extern ::absl::Flag FLAGS_##name; \ namespace absl /* block flags in namespaces */ {} \ ABSL_FLAG_IMPL_DECLARE_DEF_VAL_WRAPPER(name, Type, default_value) \ ABSL_FLAG_IMPL_DECLARE_HELP_WRAPPER(name, help) \ diff --git a/TMessagesProj/jni/voip/webrtc/absl/flags/flag_test.cc b/TMessagesProj/jni/voip/webrtc/absl/flags/flag_test.cc index 6e974a5b5e..845b4ebac6 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/flags/flag_test.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/flags/flag_test.cc @@ -854,7 +854,9 @@ ABSL_RETIRED_FLAG(bool, old_bool_flag, true, "old descr"); ABSL_RETIRED_FLAG(int, old_int_flag, (int)std::sqrt(10), "old descr"); ABSL_RETIRED_FLAG(std::string, old_str_flag, "", absl::StrCat("old ", "descr")); -bool initializaion_order_fiasco_test = [] { +namespace { + +bool initialization_order_fiasco_test ABSL_ATTRIBUTE_UNUSED = [] { // Iterate over all the flags during static initialization. // This should not trigger ASan's initialization-order-fiasco. auto* handle1 = absl::FindCommandLineFlag("flag_on_separate_file"); @@ -865,8 +867,6 @@ bool initializaion_order_fiasco_test = [] { return true; }(); -namespace { - TEST_F(FlagTest, TestRetiredFlagRegistration) { auto* handle = absl::FindCommandLineFlag("old_bool_flag"); EXPECT_TRUE(handle->IsOfType()); @@ -977,3 +977,190 @@ TEST_F(FlagTest, TesTypeWrappingEnum) { value = absl::GetFlag(FLAGS_test_enum_wrapper_flag); EXPECT_EQ(value.e, B); } + +// This is a compile test to ensure macros are expanded within ABSL_FLAG and +// ABSL_DECLARE_FLAG. +#define FLAG_NAME_MACRO(name) prefix_ ## name +ABSL_DECLARE_FLAG(int, FLAG_NAME_MACRO(test_macro_named_flag)); +ABSL_FLAG(int, FLAG_NAME_MACRO(test_macro_named_flag), 0, + "Testing macro expansion within ABSL_FLAG"); + +TEST_F(FlagTest, MacroWithinAbslFlag) { + EXPECT_EQ(absl::GetFlag(FLAGS_prefix_test_macro_named_flag), 0); + absl::SetFlag(&FLAGS_prefix_test_macro_named_flag, 1); + EXPECT_EQ(absl::GetFlag(FLAGS_prefix_test_macro_named_flag), 1); +} + +// -------------------------------------------------------------------- + +#if defined(__GNUC__) && !defined(__clang__) && __GNUC__ <= 5 +#define ABSL_SKIP_OPTIONAL_BOOL_TEST_DUE_TO_GCC_BUG +#endif + +#ifndef ABSL_SKIP_OPTIONAL_BOOL_TEST_DUE_TO_GCC_BUG +ABSL_FLAG(absl::optional, optional_bool, absl::nullopt, "help"); +#endif +ABSL_FLAG(absl::optional, optional_int, {}, "help"); +ABSL_FLAG(absl::optional, optional_double, 9.3, "help"); +ABSL_FLAG(absl::optional, optional_string, absl::nullopt, "help"); +ABSL_FLAG(absl::optional, optional_duration, absl::nullopt, + "help"); +ABSL_FLAG(absl::optional>, optional_optional_int, + absl::nullopt, "help"); +#if defined(ABSL_HAVE_STD_OPTIONAL) && !defined(ABSL_USES_STD_OPTIONAL) +ABSL_FLAG(std::optional, std_optional_int64, std::nullopt, "help"); +#endif + +namespace { + +#ifndef ABSL_SKIP_OPTIONAL_BOOL_TEST_DUE_TO_GCC_BUG +TEST_F(FlagTest, TestOptionalBool) { + EXPECT_FALSE(absl::GetFlag(FLAGS_optional_bool).has_value()); + EXPECT_EQ(absl::GetFlag(FLAGS_optional_bool), absl::nullopt); + + absl::SetFlag(&FLAGS_optional_bool, false); + EXPECT_TRUE(absl::GetFlag(FLAGS_optional_bool).has_value()); + EXPECT_EQ(absl::GetFlag(FLAGS_optional_bool), false); + + absl::SetFlag(&FLAGS_optional_bool, true); + EXPECT_TRUE(absl::GetFlag(FLAGS_optional_bool).has_value()); + EXPECT_EQ(absl::GetFlag(FLAGS_optional_bool), true); + + absl::SetFlag(&FLAGS_optional_bool, absl::nullopt); + EXPECT_FALSE(absl::GetFlag(FLAGS_optional_bool).has_value()); + EXPECT_EQ(absl::GetFlag(FLAGS_optional_bool), absl::nullopt); +} + +// -------------------------------------------------------------------- +#endif + +TEST_F(FlagTest, TestOptionalInt) { + EXPECT_FALSE(absl::GetFlag(FLAGS_optional_int).has_value()); + EXPECT_EQ(absl::GetFlag(FLAGS_optional_int), absl::nullopt); + + absl::SetFlag(&FLAGS_optional_int, 0); + EXPECT_TRUE(absl::GetFlag(FLAGS_optional_int).has_value()); + EXPECT_EQ(absl::GetFlag(FLAGS_optional_int), 0); + + absl::SetFlag(&FLAGS_optional_int, 10); + EXPECT_TRUE(absl::GetFlag(FLAGS_optional_int).has_value()); + EXPECT_EQ(absl::GetFlag(FLAGS_optional_int), 10); + + absl::SetFlag(&FLAGS_optional_int, absl::nullopt); + EXPECT_FALSE(absl::GetFlag(FLAGS_optional_int).has_value()); + EXPECT_EQ(absl::GetFlag(FLAGS_optional_int), absl::nullopt); +} + +// -------------------------------------------------------------------- + +TEST_F(FlagTest, TestOptionalDouble) { + EXPECT_TRUE(absl::GetFlag(FLAGS_optional_double).has_value()); + EXPECT_DOUBLE_EQ(*absl::GetFlag(FLAGS_optional_double), 9.3); + + absl::SetFlag(&FLAGS_optional_double, 0.0); + EXPECT_TRUE(absl::GetFlag(FLAGS_optional_double).has_value()); + EXPECT_EQ(absl::GetFlag(FLAGS_optional_double), 0.0); + + absl::SetFlag(&FLAGS_optional_double, 1.234); + EXPECT_TRUE(absl::GetFlag(FLAGS_optional_double).has_value()); + EXPECT_DOUBLE_EQ(*absl::GetFlag(FLAGS_optional_double), 1.234); + + absl::SetFlag(&FLAGS_optional_double, absl::nullopt); + EXPECT_FALSE(absl::GetFlag(FLAGS_optional_double).has_value()); + EXPECT_EQ(absl::GetFlag(FLAGS_optional_double), absl::nullopt); +} + +// -------------------------------------------------------------------- + +TEST_F(FlagTest, TestOptionalString) { + EXPECT_FALSE(absl::GetFlag(FLAGS_optional_string).has_value()); + EXPECT_EQ(absl::GetFlag(FLAGS_optional_string), absl::nullopt); + + // Setting optional string to "" leads to undefined behavior. + + absl::SetFlag(&FLAGS_optional_string, " "); + EXPECT_TRUE(absl::GetFlag(FLAGS_optional_string).has_value()); + EXPECT_EQ(absl::GetFlag(FLAGS_optional_string), " "); + + absl::SetFlag(&FLAGS_optional_string, "QWERTY"); + EXPECT_TRUE(absl::GetFlag(FLAGS_optional_string).has_value()); + EXPECT_EQ(absl::GetFlag(FLAGS_optional_string), "QWERTY"); + + absl::SetFlag(&FLAGS_optional_string, absl::nullopt); + EXPECT_FALSE(absl::GetFlag(FLAGS_optional_string).has_value()); + EXPECT_EQ(absl::GetFlag(FLAGS_optional_string), absl::nullopt); +} + +// -------------------------------------------------------------------- + +TEST_F(FlagTest, TestOptionalDuration) { + EXPECT_FALSE(absl::GetFlag(FLAGS_optional_duration).has_value()); + EXPECT_EQ(absl::GetFlag(FLAGS_optional_duration), absl::nullopt); + + absl::SetFlag(&FLAGS_optional_duration, absl::ZeroDuration()); + EXPECT_TRUE(absl::GetFlag(FLAGS_optional_duration).has_value()); + EXPECT_EQ(absl::GetFlag(FLAGS_optional_duration), absl::Seconds(0)); + + absl::SetFlag(&FLAGS_optional_duration, absl::Hours(3)); + EXPECT_TRUE(absl::GetFlag(FLAGS_optional_duration).has_value()); + EXPECT_EQ(absl::GetFlag(FLAGS_optional_duration), absl::Hours(3)); + + absl::SetFlag(&FLAGS_optional_duration, absl::nullopt); + EXPECT_FALSE(absl::GetFlag(FLAGS_optional_duration).has_value()); + EXPECT_EQ(absl::GetFlag(FLAGS_optional_duration), absl::nullopt); +} + +// -------------------------------------------------------------------- + +TEST_F(FlagTest, TestOptionalOptional) { + EXPECT_FALSE(absl::GetFlag(FLAGS_optional_optional_int).has_value()); + EXPECT_EQ(absl::GetFlag(FLAGS_optional_optional_int), absl::nullopt); + + absl::optional nullint{absl::nullopt}; + + absl::SetFlag(&FLAGS_optional_optional_int, nullint); + EXPECT_TRUE(absl::GetFlag(FLAGS_optional_optional_int).has_value()); + EXPECT_NE(absl::GetFlag(FLAGS_optional_optional_int), nullint); + EXPECT_EQ(absl::GetFlag(FLAGS_optional_optional_int), + absl::optional>{nullint}); + + absl::SetFlag(&FLAGS_optional_optional_int, 0); + EXPECT_TRUE(absl::GetFlag(FLAGS_optional_optional_int).has_value()); + EXPECT_EQ(absl::GetFlag(FLAGS_optional_optional_int), 0); + + absl::SetFlag(&FLAGS_optional_optional_int, absl::optional{0}); + EXPECT_TRUE(absl::GetFlag(FLAGS_optional_optional_int).has_value()); + EXPECT_EQ(absl::GetFlag(FLAGS_optional_optional_int), 0); + EXPECT_EQ(absl::GetFlag(FLAGS_optional_optional_int), absl::optional{0}); + + absl::SetFlag(&FLAGS_optional_optional_int, absl::nullopt); + EXPECT_FALSE(absl::GetFlag(FLAGS_optional_optional_int).has_value()); + EXPECT_EQ(absl::GetFlag(FLAGS_optional_optional_int), absl::nullopt); +} + +// -------------------------------------------------------------------- + +#if defined(ABSL_HAVE_STD_OPTIONAL) && !defined(ABSL_USES_STD_OPTIONAL) + +TEST_F(FlagTest, TestStdOptional) { + EXPECT_FALSE(absl::GetFlag(FLAGS_std_optional_int64).has_value()); + EXPECT_EQ(absl::GetFlag(FLAGS_std_optional_int64), std::nullopt); + + absl::SetFlag(&FLAGS_std_optional_int64, 0); + EXPECT_TRUE(absl::GetFlag(FLAGS_std_optional_int64).has_value()); + EXPECT_EQ(absl::GetFlag(FLAGS_std_optional_int64), 0); + + absl::SetFlag(&FLAGS_std_optional_int64, 0xFFFFFFFFFF16); + EXPECT_TRUE(absl::GetFlag(FLAGS_std_optional_int64).has_value()); + EXPECT_EQ(absl::GetFlag(FLAGS_std_optional_int64), 0xFFFFFFFFFF16); + + absl::SetFlag(&FLAGS_std_optional_int64, std::nullopt); + EXPECT_FALSE(absl::GetFlag(FLAGS_std_optional_int64).has_value()); + EXPECT_EQ(absl::GetFlag(FLAGS_std_optional_int64), std::nullopt); +} + +// -------------------------------------------------------------------- + +#endif + +} // namespace diff --git a/TMessagesProj/jni/voip/webrtc/absl/flags/internal/flag.cc b/TMessagesProj/jni/voip/webrtc/absl/flags/internal/flag.cc index 55892d77dd..cc656f9d13 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/flags/internal/flag.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/flags/internal/flag.cc @@ -406,7 +406,7 @@ template StorageT* FlagImpl::OffsetValue() const { char* p = reinterpret_cast(const_cast(this)); // The offset is deduced via Flag value type specific op_. - size_t offset = flags_internal::ValueOffset(op_); + ptrdiff_t offset = flags_internal::ValueOffset(op_); return reinterpret_cast(p + offset); } @@ -486,7 +486,7 @@ bool FlagImpl::ReadOneBool() const { } void FlagImpl::ReadSequenceLockedData(void* dst) const { - int size = Sizeof(op_); + size_t size = Sizeof(op_); // Attempt to read using the sequence lock. if (ABSL_PREDICT_TRUE(seq_lock_.TryRead(dst, AtomicBufferValue(), size))) { return; diff --git a/TMessagesProj/jni/voip/webrtc/absl/flags/internal/flag_msvc.inc b/TMessagesProj/jni/voip/webrtc/absl/flags/internal/flag_msvc.inc new file mode 100644 index 0000000000..c31bd27fd8 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/absl/flags/internal/flag_msvc.inc @@ -0,0 +1,116 @@ +// +// Copyright 2021 The Abseil Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Do not include this file directly. +// Include absl/flags/flag.h instead. + +// MSVC debug builds do not implement initialization with constexpr constructors +// correctly. To work around this we add a level of indirection, so that the +// class `absl::Flag` contains an `internal::Flag*` (instead of being an alias +// to that class) and dynamically allocates an instance when necessary. We also +// forward all calls to internal::Flag methods via trampoline methods. In this +// setup the `absl::Flag` class does not have constructor and virtual methods, +// all the data members are public and thus MSVC is able to initialize it at +// link time. To deal with multiple threads accessing the flag for the first +// time concurrently we use an atomic boolean indicating if flag object is +// initialized. We also employ the double-checked locking pattern where the +// second level of protection is a global Mutex, so if two threads attempt to +// construct the flag concurrently only one wins. +// +// This solution is based on a recomendation here: +// https://developercommunity.visualstudio.com/content/problem/336946/class-with-constexpr-constructor-not-using-static.html?childToView=648454#comment-648454 + +namespace flags_internal { +absl::Mutex* GetGlobalConstructionGuard(); +} // namespace flags_internal + +// Public methods of `absl::Flag` are NOT part of the Abseil Flags API. +// See https://abseil.io/docs/cpp/guides/flags +template +class Flag { + public: + // No constructor and destructor to ensure this is an aggregate type. + // Visual Studio 2015 still requires the constructor for class to be + // constexpr initializable. +#if _MSC_VER <= 1900 + constexpr Flag(const char* name, const char* filename, + const flags_internal::HelpGenFunc help_gen, + const flags_internal::FlagDfltGenFunc default_value_gen) + : name_(name), + filename_(filename), + help_gen_(help_gen), + default_value_gen_(default_value_gen), + inited_(false), + impl_(nullptr) {} +#endif + + flags_internal::Flag& GetImpl() const { + if (!inited_.load(std::memory_order_acquire)) { + absl::MutexLock l(flags_internal::GetGlobalConstructionGuard()); + + if (inited_.load(std::memory_order_acquire)) { + return *impl_; + } + + impl_ = new flags_internal::Flag( + name_, filename_, + {flags_internal::FlagHelpMsg(help_gen_), + flags_internal::FlagHelpKind::kGenFunc}, + {flags_internal::FlagDefaultSrc(default_value_gen_), + flags_internal::FlagDefaultKind::kGenFunc}); + inited_.store(true, std::memory_order_release); + } + + return *impl_; + } + + // Public methods of `absl::Flag` are NOT part of the Abseil Flags API. + // See https://abseil.io/docs/cpp/guides/flags + bool IsRetired() const { return GetImpl().IsRetired(); } + absl::string_view Name() const { return GetImpl().Name(); } + std::string Help() const { return GetImpl().Help(); } + bool IsModified() const { return GetImpl().IsModified(); } + bool IsSpecifiedOnCommandLine() const { + return GetImpl().IsSpecifiedOnCommandLine(); + } + std::string Filename() const { return GetImpl().Filename(); } + std::string DefaultValue() const { return GetImpl().DefaultValue(); } + std::string CurrentValue() const { return GetImpl().CurrentValue(); } + template + inline bool IsOfType() const { + return GetImpl().template IsOfType(); + } + T Get() const { + return flags_internal::FlagImplPeer::InvokeGet(GetImpl()); + } + void Set(const T& v) { + flags_internal::FlagImplPeer::InvokeSet(GetImpl(), v); + } + void InvokeCallback() { GetImpl().InvokeCallback(); } + + const CommandLineFlag& Reflect() const { + return flags_internal::FlagImplPeer::InvokeReflect(GetImpl()); + } + + // The data members are logically private, but they need to be public for + // this to be an aggregate type. + const char* name_; + const char* filename_; + const flags_internal::HelpGenFunc help_gen_; + const flags_internal::FlagDfltGenFunc default_value_gen_; + + mutable std::atomic inited_; + mutable flags_internal::Flag* impl_; +}; diff --git a/TMessagesProj/jni/voip/webrtc/absl/flags/internal/usage.cc b/TMessagesProj/jni/voip/webrtc/absl/flags/internal/usage.cc index 949709e883..5efc7b07a3 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/flags/internal/usage.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/flags/internal/usage.cc @@ -17,7 +17,9 @@ #include +#include #include +#include #include #include #include @@ -33,6 +35,7 @@ #include "absl/flags/internal/program_name.h" #include "absl/flags/internal/registry.h" #include "absl/flags/usage_config.h" +#include "absl/strings/match.h" #include "absl/strings/str_cat.h" #include "absl/strings/str_split.h" #include "absl/strings/string_view.h" @@ -148,8 +151,7 @@ class FlagHelpPrettyPrinter { } // Write the token, ending the string first if necessary/possible. - if (!new_line && - (line_len_ + static_cast(token.size()) >= max_line_len_)) { + if (!new_line && (line_len_ + token.size() >= max_line_len_)) { EndLine(); new_line = true; } @@ -344,7 +346,7 @@ void FlagHelp(std::ostream& out, const CommandLineFlag& flag, void FlagsHelp(std::ostream& out, absl::string_view filter, HelpFormat format, absl::string_view program_usage_message) { flags_internal::FlagKindFilter filter_cb = [&](absl::string_view filename) { - return filter.empty() || filename.find(filter) != absl::string_view::npos; + return filter.empty() || absl::StrContains(filename, filter); }; flags_internal::FlagsHelpImpl(out, filter_cb, format, program_usage_message); } @@ -466,7 +468,7 @@ void SetFlagsHelpFormat(HelpFormat format) { // function. bool DeduceUsageFlags(absl::string_view name, absl::string_view value) { if (absl::ConsumePrefix(&name, "help")) { - if (name == "") { + if (name.empty()) { if (value.empty()) { SetFlagsHelpMode(HelpMode::kImportant); } else { diff --git a/TMessagesProj/jni/voip/webrtc/absl/flags/internal/usage_test.cc b/TMessagesProj/jni/voip/webrtc/absl/flags/internal/usage_test.cc index 044d71c87d..209a7be9b6 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/flags/internal/usage_test.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/flags/internal/usage_test.cc @@ -20,6 +20,7 @@ #include #include +#include "gmock/gmock.h" #include "gtest/gtest.h" #include "absl/flags/flag.h" #include "absl/flags/internal/parse.h" @@ -47,8 +48,10 @@ struct UDT { UDT(const UDT&) = default; UDT& operator=(const UDT&) = default; }; -bool AbslParseFlag(absl::string_view, UDT*, std::string*) { return true; } -std::string AbslUnparseFlag(const UDT&) { return "UDT{}"; } +static bool AbslParseFlag(absl::string_view, UDT*, std::string*) { + return true; +} +static std::string AbslUnparseFlag(const UDT&) { return "UDT{}"; } ABSL_FLAG(UDT, usage_reporting_test_flag_05, {}, "usage_reporting_test_flag_05 help message"); @@ -103,14 +106,19 @@ class UsageReportingTest : public testing::Test { using UsageReportingDeathTest = UsageReportingTest; TEST_F(UsageReportingDeathTest, TestSetProgramUsageMessage) { +#if !defined(GTEST_HAS_ABSL) || !GTEST_HAS_ABSL + // Check for kTestUsageMessage set in main() below. EXPECT_EQ(absl::ProgramUsageMessage(), kTestUsageMessage); +#else + // Check for part of the usage message set by GoogleTest. + EXPECT_THAT(absl::ProgramUsageMessage(), + ::testing::HasSubstr( + "This program contains tests written using Google Test")); +#endif -#ifndef _WIN32 - // TODO(rogeeff): figure out why this does not work on Windows. EXPECT_DEATH_IF_SUPPORTED( absl::SetProgramUsageMessage("custom usage message"), - ".*SetProgramUsageMessage\\(\\) called twice.*"); -#endif + ::testing::HasSubstr("SetProgramUsageMessage() called twice")); } // -------------------------------------------------------------------- @@ -487,8 +495,10 @@ path. int main(int argc, char* argv[]) { (void)absl::GetFlag(FLAGS_undefok); // Force linking of parse.cc flags::SetProgramInvocationName("usage_test"); +#if !defined(GTEST_HAS_ABSL) || !GTEST_HAS_ABSL + // GoogleTest calls absl::SetProgramUsageMessage() already. absl::SetProgramUsageMessage(kTestUsageMessage); +#endif ::testing::InitGoogleTest(&argc, argv); - return RUN_ALL_TESTS(); } diff --git a/TMessagesProj/jni/voip/webrtc/absl/flags/marshalling.h b/TMessagesProj/jni/voip/webrtc/absl/flags/marshalling.h index 7cbc136d57..325e75e516 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/flags/marshalling.h +++ b/TMessagesProj/jni/voip/webrtc/absl/flags/marshalling.h @@ -33,6 +33,7 @@ // * `double` // * `std::string` // * `std::vector` +// * `std::optional` // * `absl::LogSeverity` (provided natively for layering reasons) // // Note that support for integral types is implemented using overloads for @@ -65,6 +66,42 @@ // below.) // // ----------------------------------------------------------------------------- +// Optional Flags +// ----------------------------------------------------------------------------- +// +// The Abseil flags library supports flags of type `std::optional` where +// `T` is a type of one of the supported flags. We refer to this flag type as +// an "optional flag." An optional flag is either "valueless", holding no value +// of type `T` (indicating that the flag has not been set) or a value of type +// `T`. The valueless state in C++ code is represented by a value of +// `std::nullopt` for the optional flag. +// +// Using `std::nullopt` as an optional flag's default value allows you to check +// whether such a flag was ever specified on the command line: +// +// if (absl::GetFlag(FLAGS_foo).has_value()) { +// // flag was set on command line +// } else { +// // flag was not passed on command line +// } +// +// Using an optional flag in this manner avoids common workarounds for +// indicating such an unset flag (such as using sentinel values to indicate this +// state). +// +// An optional flag also allows a developer to pass a flag in an "unset" +// valueless state on the command line, allowing the flag to later be set in +// binary logic. An optional flag's valueless state is indicated by the special +// notation of passing the value as an empty string through the syntax `--flag=` +// or `--flag ""`. +// +// $ binary_with_optional --flag_in_unset_state= +// $ binary_with_optional --flag_in_unset_state "" +// +// Note: as a result of the above syntax requirements, an optional flag cannot +// be set to a `T` of any value which unparses to the empty string. +// +// ----------------------------------------------------------------------------- // Adding Type Support for Abseil Flags // ----------------------------------------------------------------------------- // @@ -162,14 +199,27 @@ #ifndef ABSL_FLAGS_MARSHALLING_H_ #define ABSL_FLAGS_MARSHALLING_H_ +#include "absl/base/config.h" + +#if defined(ABSL_HAVE_STD_OPTIONAL) && !defined(ABSL_USES_STD_OPTIONAL) +#include +#endif #include #include -#include "absl/base/config.h" #include "absl/strings/string_view.h" +#include "absl/types/optional.h" namespace absl { ABSL_NAMESPACE_BEGIN + +// Forward declaration to be used inside composable flag parse/unparse +// implementations +template +inline bool ParseFlag(absl::string_view input, T* dst, std::string* error); +template +inline std::string UnparseFlag(const T& v); + namespace flags_internal { // Overloads of `AbslParseFlag()` and `AbslUnparseFlag()` for fundamental types. @@ -188,6 +238,36 @@ bool AbslParseFlag(absl::string_view, double*, std::string*); bool AbslParseFlag(absl::string_view, std::string*, std::string*); bool AbslParseFlag(absl::string_view, std::vector*, std::string*); +template +bool AbslParseFlag(absl::string_view text, absl::optional* f, + std::string* err) { + if (text.empty()) { + *f = absl::nullopt; + return true; + } + T value; + if (!absl::ParseFlag(text, &value, err)) return false; + + *f = std::move(value); + return true; +} + +#if defined(ABSL_HAVE_STD_OPTIONAL) && !defined(ABSL_USES_STD_OPTIONAL) +template +bool AbslParseFlag(absl::string_view text, std::optional* f, + std::string* err) { + if (text.empty()) { + *f = std::nullopt; + return true; + } + T value; + if (!absl::ParseFlag(text, &value, err)) return false; + + *f = std::move(value); + return true; +} +#endif + template bool InvokeParseFlag(absl::string_view input, T* dst, std::string* err) { // Comment on next line provides a good compiler error message if T @@ -201,6 +281,18 @@ bool InvokeParseFlag(absl::string_view input, T* dst, std::string* err) { std::string AbslUnparseFlag(absl::string_view v); std::string AbslUnparseFlag(const std::vector&); +template +std::string AbslUnparseFlag(const absl::optional& f) { + return f.has_value() ? absl::UnparseFlag(*f) : ""; +} + +#if defined(ABSL_HAVE_STD_OPTIONAL) && !defined(ABSL_USES_STD_OPTIONAL) +template +std::string AbslUnparseFlag(const std::optional& f) { + return f.has_value() ? absl::UnparseFlag(*f) : ""; +} +#endif + template std::string Unparse(const T& v) { // Comment on next line provides a good compiler error message if T does not diff --git a/TMessagesProj/jni/voip/webrtc/absl/flags/parse.cc b/TMessagesProj/jni/voip/webrtc/absl/flags/parse.cc index dd1a6796ca..2851c0f788 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/flags/parse.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/flags/parse.cc @@ -159,14 +159,14 @@ class ArgsList { // Returns success status: true if parsing successful, false otherwise. bool ReadFromFlagfile(const std::string& flag_file_name); - int Size() const { return args_.size() - next_arg_; } - int FrontIndex() const { return next_arg_; } + size_t Size() const { return args_.size() - next_arg_; } + size_t FrontIndex() const { return next_arg_; } absl::string_view Front() const { return args_[next_arg_]; } void PopFront() { next_arg_++; } private: std::vector args_; - int next_arg_; + size_t next_arg_; }; bool ArgsList::ReadFromFlagfile(const std::string& flag_file_name) { @@ -626,7 +626,7 @@ std::vector ParseCommandLineImpl(int argc, char* argv[], std::vector output_args; std::vector positional_args; - output_args.reserve(argc); + output_args.reserve(static_cast(argc)); // This is the list of undefined flags. The element of the list is the pair // consisting of boolean indicating if flag came from command line (vs from @@ -795,8 +795,8 @@ std::vector ParseCommandLineImpl(int argc, char* argv[], // All the remaining arguments are positional. if (!input_args.empty()) { - for (int arg_index = input_args.back().FrontIndex(); arg_index < argc; - ++arg_index) { + for (size_t arg_index = input_args.back().FrontIndex(); + arg_index < static_cast(argc); ++arg_index) { output_args.push_back(argv[arg_index]); } } diff --git a/TMessagesProj/jni/voip/webrtc/absl/functional/any_invocable.h b/TMessagesProj/jni/voip/webrtc/absl/functional/any_invocable.h new file mode 100644 index 0000000000..3e783c871d --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/absl/functional/any_invocable.h @@ -0,0 +1,316 @@ +// Copyright 2022 The Abseil Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ----------------------------------------------------------------------------- +// File: any_invocable.h +// ----------------------------------------------------------------------------- +// +// This header file defines an `absl::AnyInvocable` type that assumes ownership +// and wraps an object of an invocable type. (Invocable types adhere to the +// concept specified in https://en.cppreference.com/w/cpp/concepts/invocable.) +// +// In general, prefer `absl::AnyInvocable` when you need a type-erased +// function parameter that needs to take ownership of the type. +// +// NOTE: `absl::AnyInvocable` is similar to the C++23 `std::move_only_function` +// abstraction, but has a slightly different API and is not designed to be a +// drop-in replacement or C++11-compatible backfill of that type. +// +// Credits to Matt Calabrese (https://github.com/mattcalabrese) for the original +// implementation. + +#ifndef ABSL_FUNCTIONAL_ANY_INVOCABLE_H_ +#define ABSL_FUNCTIONAL_ANY_INVOCABLE_H_ + +#include +#include +#include +#include + +#include "absl/base/config.h" +#include "absl/functional/internal/any_invocable.h" +#include "absl/meta/type_traits.h" +#include "absl/utility/utility.h" + +namespace absl { +ABSL_NAMESPACE_BEGIN + +// absl::AnyInvocable +// +// `absl::AnyInvocable` is a functional wrapper type, like `std::function`, that +// assumes ownership of an invocable object. Unlike `std::function`, an +// `absl::AnyInvocable` is more type-safe and provides the following additional +// benefits: +// +// * Properly adheres to const correctness of the underlying type +// * Is move-only so avoids concurrency problems with copied invocables and +// unnecessary copies in general. +// * Supports reference qualifiers allowing it to perform unique actions (noted +// below). +// +// `absl::AnyInvocable` is a template, and an `absl::AnyInvocable` instantiation +// may wrap any invocable object with a compatible function signature, e.g. +// having arguments and return types convertible to types matching the +// `absl::AnyInvocable` signature, and also matching any stated reference +// qualifiers, as long as that type is moveable. It therefore provides broad +// type erasure for functional objects. +// +// An `absl::AnyInvocable` is typically used as a type-erased function parameter +// for accepting various functional objects: +// +// // Define a function taking an AnyInvocable parameter. +// void my_func(absl::AnyInvocable f) { +// ... +// }; +// +// // That function can accept any invocable type: +// +// // Accept a function reference. We don't need to move a reference. +// int func1() { return 0; }; +// my_func(func1); +// +// // Accept a lambda. We use std::move here because otherwise my_func would +// // copy the lambda. +// auto lambda = []() { return 0; }; +// my_func(std::move(lambda)); +// +// // Accept a function pointer. We don't need to move a function pointer. +// func2 = &func1; +// my_func(func2); +// +// // Accept an std::function by moving it. Note that the lambda is copyable +// // (satisfying std::function requirements) and moveable (satisfying +// // absl::AnyInvocable requirements). +// std::function func6 = []() { return 0; }; +// my_func(std::move(func6)); +// +// `AnyInvocable` also properly respects `const` qualifiers, reference +// qualifiers, and the `noexcept` specification (only in C++ 17 and beyond) as +// part of the user-specified function type (e.g. +// `AnyInvocable`). These qualifiers will be applied to +// the `AnyInvocable` object's `operator()`, and the underlying invocable must +// be compatible with those qualifiers. +// +// Comparison of const and non-const function types: +// +// // Store a closure inside of `func` with the function type `int()`. +// // Note that we have made `func` itself `const`. +// const AnyInvocable func = [](){ return 0; }; +// +// func(); // Compile-error: the passed type `int()` isn't `const`. +// +// // Store a closure inside of `const_func` with the function type +// // `int() const`. +// // Note that we have also made `const_func` itself `const`. +// const AnyInvocable const_func = [](){ return 0; }; +// +// const_func(); // Fine: `int() const` is `const`. +// +// In the above example, the call `func()` would have compiled if +// `std::function` were used even though the types are not const compatible. +// This is a bug, and using `absl::AnyInvocable` properly detects that bug. +// +// In addition to affecting the signature of `operator()`, the `const` and +// reference qualifiers of the function type also appropriately constrain which +// kinds of invocable objects you are allowed to place into the `AnyInvocable` +// instance. If you specify a function type that is const-qualified, then +// anything that you attempt to put into the `AnyInvocable` must be callable on +// a `const` instance of that type. +// +// Constraint example: +// +// // Fine because the lambda is callable when `const`. +// AnyInvocable func = [=](){ return 0; }; +// +// // This is a compile-error because the lambda isn't callable when `const`. +// AnyInvocable error = [=]() mutable { return 0; }; +// +// An `&&` qualifier can be used to express that an `absl::AnyInvocable` +// instance should be invoked at most once: +// +// // Invokes `continuation` with the logical result of an operation when +// // that operation completes (common in asynchronous code). +// void CallOnCompletion(AnyInvocable continuation) { +// int result_of_foo = foo(); +// +// // `std::move` is required because the `operator()` of `continuation` is +// // rvalue-reference qualified. +// std::move(continuation)(result_of_foo); +// } +// +// Attempting to call `absl::AnyInvocable` multiple times in such a case +// results in undefined behavior. +template +class AnyInvocable : private internal_any_invocable::Impl { + private: + static_assert( + std::is_function::value, + "The template argument of AnyInvocable must be a function type."); + + using Impl = internal_any_invocable::Impl; + + public: + // The return type of Sig + using result_type = typename Impl::result_type; + + // Constructors + + // Constructs the `AnyInvocable` in an empty state. + AnyInvocable() noexcept = default; + AnyInvocable(std::nullptr_t) noexcept {} // NOLINT + + // Constructs the `AnyInvocable` from an existing `AnyInvocable` by a move. + // Note that `f` is not guaranteed to be empty after move-construction, + // although it may be. + AnyInvocable(AnyInvocable&& /*f*/) noexcept = default; + + // Constructs an `AnyInvocable` from an invocable object. + // + // Upon construction, `*this` is only empty if `f` is a function pointer or + // member pointer type and is null, or if `f` is an `AnyInvocable` that is + // empty. + template ::value>> + AnyInvocable(F&& f) // NOLINT + : Impl(internal_any_invocable::ConversionConstruct(), + std::forward(f)) {} + + // Constructs an `AnyInvocable` that holds an invocable object of type `T`, + // which is constructed in-place from the given arguments. + // + // Example: + // + // AnyInvocable func( + // absl::in_place_type, arg1, arg2); + // + template ::value>> + explicit AnyInvocable(absl::in_place_type_t, Args&&... args) + : Impl(absl::in_place_type>, + std::forward(args)...) { + static_assert(std::is_same>::value, + "The explicit template argument of in_place_type is required " + "to be an unqualified object type."); + } + + // Overload of the above constructor to support list-initialization. + template &, Args...>::value>> + explicit AnyInvocable(absl::in_place_type_t, + std::initializer_list ilist, Args&&... args) + : Impl(absl::in_place_type>, ilist, + std::forward(args)...) { + static_assert(std::is_same>::value, + "The explicit template argument of in_place_type is required " + "to be an unqualified object type."); + } + + // Assignment Operators + + // Assigns an `AnyInvocable` through move-assignment. + // Note that `f` is not guaranteed to be empty after move-assignment + // although it may be. + AnyInvocable& operator=(AnyInvocable&& /*f*/) noexcept = default; + + // Assigns an `AnyInvocable` from a nullptr, clearing the `AnyInvocable`. If + // not empty, destroys the target, putting `*this` into an empty state. + AnyInvocable& operator=(std::nullptr_t) noexcept { + this->Clear(); + return *this; + } + + // Assigns an `AnyInvocable` from an existing `AnyInvocable` instance. + // + // Upon assignment, `*this` is only empty if `f` is a function pointer or + // member pointer type and is null, or if `f` is an `AnyInvocable` that is + // empty. + template ::value>> + AnyInvocable& operator=(F&& f) { + *this = AnyInvocable(std::forward(f)); + return *this; + } + + // Assigns an `AnyInvocable` from a reference to an invocable object. + // Upon assignment, stores a reference to the invocable object in the + // `AnyInvocable` instance. + template < + class F, + typename = absl::enable_if_t< + internal_any_invocable::CanAssignReferenceWrapper::value>> + AnyInvocable& operator=(std::reference_wrapper f) noexcept { + *this = AnyInvocable(f); + return *this; + } + + // Destructor + + // If not empty, destroys the target. + ~AnyInvocable() = default; + + // absl::AnyInvocable::swap() + // + // Exchanges the targets of `*this` and `other`. + void swap(AnyInvocable& other) noexcept { std::swap(*this, other); } + + // abl::AnyInvocable::operator bool() + // + // Returns `true` if `*this` is not empty. + explicit operator bool() const noexcept { return this->HasValue(); } + + // Invokes the target object of `*this`. `*this` must not be empty. + // + // Note: The signature of this function call operator is the same as the + // template parameter `Sig`. + using Impl::operator(); + + // Equality operators + + // Returns `true` if `*this` is empty. + friend bool operator==(const AnyInvocable& f, std::nullptr_t) noexcept { + return !f.HasValue(); + } + + // Returns `true` if `*this` is empty. + friend bool operator==(std::nullptr_t, const AnyInvocable& f) noexcept { + return !f.HasValue(); + } + + // Returns `false` if `*this` is empty. + friend bool operator!=(const AnyInvocable& f, std::nullptr_t) noexcept { + return f.HasValue(); + } + + // Returns `false` if `*this` is empty. + friend bool operator!=(std::nullptr_t, const AnyInvocable& f) noexcept { + return f.HasValue(); + } + + // swap() + // + // Exchanges the targets of `f1` and `f2`. + friend void swap(AnyInvocable& f1, AnyInvocable& f2) noexcept { f1.swap(f2); } + + private: + // Friending other instantiations is necessary for conversions. + template + friend class internal_any_invocable::CoreImpl; +}; + +ABSL_NAMESPACE_END +} // namespace absl + +#endif // ABSL_FUNCTIONAL_ANY_INVOCABLE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/functional/bind_front.h b/TMessagesProj/jni/voip/webrtc/absl/functional/bind_front.h index 5b47970e35..f9075bd1d5 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/functional/bind_front.h +++ b/TMessagesProj/jni/voip/webrtc/absl/functional/bind_front.h @@ -30,6 +30,10 @@ #ifndef ABSL_FUNCTIONAL_BIND_FRONT_H_ #define ABSL_FUNCTIONAL_BIND_FRONT_H_ +#if defined(__cpp_lib_bind_front) && __cpp_lib_bind_front >= 201907L +#include // For std::bind_front. +#endif // defined(__cpp_lib_bind_front) && __cpp_lib_bind_front >= 201907L + #include "absl/functional/internal/front_binder.h" #include "absl/utility/utility.h" @@ -46,7 +50,8 @@ ABSL_NAMESPACE_BEGIN // specified. More importantly, it provides more reliable correctness guarantees // than `std::bind()`; while `std::bind()` will silently ignore passing more // parameters than expected, for example, `absl::bind_front()` will report such -// mis-uses as errors. +// mis-uses as errors. In C++20, `absl::bind_front` is replaced by +// `std::bind_front`. // // absl::bind_front(a...) can be seen as storing the results of // std::make_tuple(a...). @@ -170,6 +175,9 @@ ABSL_NAMESPACE_BEGIN // // Doesn't copy "hi". // absl::bind_front(Print, absl::string_view(hi))("Chuk"); // +#if defined(__cpp_lib_bind_front) && __cpp_lib_bind_front >= 201907L +using std::bind_front; +#else // defined(__cpp_lib_bind_front) && __cpp_lib_bind_front >= 201907L template constexpr functional_internal::bind_front_t bind_front( F&& func, BoundArgs&&... args) { @@ -177,6 +185,7 @@ constexpr functional_internal::bind_front_t bind_front( absl::in_place, absl::forward(func), absl::forward(args)...); } +#endif // defined(__cpp_lib_bind_front) && __cpp_lib_bind_front >= 201907L ABSL_NAMESPACE_END } // namespace absl diff --git a/TMessagesProj/jni/voip/webrtc/absl/functional/function_ref_benchmark.cc b/TMessagesProj/jni/voip/webrtc/absl/functional/function_ref_benchmark.cc deleted file mode 100644 index 045305bfef..0000000000 --- a/TMessagesProj/jni/voip/webrtc/absl/functional/function_ref_benchmark.cc +++ /dev/null @@ -1,142 +0,0 @@ -// Copyright 2019 The Abseil Authors. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "absl/functional/function_ref.h" - -#include - -#include "benchmark/benchmark.h" -#include "absl/base/attributes.h" - -namespace absl { -ABSL_NAMESPACE_BEGIN -namespace { - -int dummy = 0; - -void FreeFunction() { benchmark::DoNotOptimize(dummy); } - -struct TrivialFunctor { - void operator()() const { benchmark::DoNotOptimize(dummy); } -}; - -struct LargeFunctor { - void operator()() const { benchmark::DoNotOptimize(this); } - std::string a, b, c; -}; - -template -void ABSL_ATTRIBUTE_NOINLINE CallFunction(Function f, Args&&... args) { - f(std::forward(args)...); -} - -template -void ConstructAndCallFunctionBenchmark(benchmark::State& state, - const Callable& c, Args&&... args) { - for (auto _ : state) { - CallFunction(c, std::forward(args)...); - } -} - -void BM_TrivialStdFunction(benchmark::State& state) { - ConstructAndCallFunctionBenchmark>(state, - TrivialFunctor{}); -} -BENCHMARK(BM_TrivialStdFunction); - -void BM_TrivialFunctionRef(benchmark::State& state) { - ConstructAndCallFunctionBenchmark>(state, - TrivialFunctor{}); -} -BENCHMARK(BM_TrivialFunctionRef); - -void BM_LargeStdFunction(benchmark::State& state) { - ConstructAndCallFunctionBenchmark>(state, - LargeFunctor{}); -} -BENCHMARK(BM_LargeStdFunction); - -void BM_LargeFunctionRef(benchmark::State& state) { - ConstructAndCallFunctionBenchmark>(state, LargeFunctor{}); -} -BENCHMARK(BM_LargeFunctionRef); - -void BM_FunPtrStdFunction(benchmark::State& state) { - ConstructAndCallFunctionBenchmark>(state, FreeFunction); -} -BENCHMARK(BM_FunPtrStdFunction); - -void BM_FunPtrFunctionRef(benchmark::State& state) { - ConstructAndCallFunctionBenchmark>(state, FreeFunction); -} -BENCHMARK(BM_FunPtrFunctionRef); - -// Doesn't include construction or copy overhead in the loop. -template -void CallFunctionBenchmark(benchmark::State& state, const Callable& c, - Args... args) { - Function f = c; - for (auto _ : state) { - benchmark::DoNotOptimize(&f); - f(args...); - } -} - -struct FunctorWithTrivialArgs { - void operator()(int a, int b, int c) const { - benchmark::DoNotOptimize(a); - benchmark::DoNotOptimize(b); - benchmark::DoNotOptimize(c); - } -}; - -void BM_TrivialArgsStdFunction(benchmark::State& state) { - CallFunctionBenchmark>( - state, FunctorWithTrivialArgs{}, 1, 2, 3); -} -BENCHMARK(BM_TrivialArgsStdFunction); - -void BM_TrivialArgsFunctionRef(benchmark::State& state) { - CallFunctionBenchmark>( - state, FunctorWithTrivialArgs{}, 1, 2, 3); -} -BENCHMARK(BM_TrivialArgsFunctionRef); - -struct FunctorWithNonTrivialArgs { - void operator()(std::string a, std::string b, std::string c) const { - benchmark::DoNotOptimize(&a); - benchmark::DoNotOptimize(&b); - benchmark::DoNotOptimize(&c); - } -}; - -void BM_NonTrivialArgsStdFunction(benchmark::State& state) { - std::string a, b, c; - CallFunctionBenchmark< - std::function>( - state, FunctorWithNonTrivialArgs{}, a, b, c); -} -BENCHMARK(BM_NonTrivialArgsStdFunction); - -void BM_NonTrivialArgsFunctionRef(benchmark::State& state) { - std::string a, b, c; - CallFunctionBenchmark< - FunctionRef>( - state, FunctorWithNonTrivialArgs{}, a, b, c); -} -BENCHMARK(BM_NonTrivialArgsFunctionRef); - -} // namespace -ABSL_NAMESPACE_END -} // namespace absl diff --git a/TMessagesProj/jni/voip/webrtc/absl/functional/function_type_benchmark.cc b/TMessagesProj/jni/voip/webrtc/absl/functional/function_type_benchmark.cc new file mode 100644 index 0000000000..03dc31d8cd --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/absl/functional/function_type_benchmark.cc @@ -0,0 +1,176 @@ +// Copyright 2022 The Abseil Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include +#include +#include + +#include "benchmark/benchmark.h" +#include "absl/base/attributes.h" +#include "absl/functional/any_invocable.h" +#include "absl/functional/function_ref.h" + +namespace absl { +ABSL_NAMESPACE_BEGIN +namespace { + +int dummy = 0; + +void FreeFunction() { benchmark::DoNotOptimize(dummy); } + +struct TrivialFunctor { + void operator()() const { benchmark::DoNotOptimize(dummy); } +}; + +struct LargeFunctor { + void operator()() const { benchmark::DoNotOptimize(this); } + std::string a, b, c; +}; + +template +void ABSL_ATTRIBUTE_NOINLINE CallFunction(Function f, Args&&... args) { + f(std::forward(args)...); +} + +template +void ConstructAndCallFunctionBenchmark(benchmark::State& state, + const Callable& c, Args&&... args) { + for (auto _ : state) { + CallFunction(c, std::forward(args)...); + } +} + +void BM_TrivialStdFunction(benchmark::State& state) { + ConstructAndCallFunctionBenchmark>(state, + TrivialFunctor{}); +} +BENCHMARK(BM_TrivialStdFunction); + +void BM_TrivialFunctionRef(benchmark::State& state) { + ConstructAndCallFunctionBenchmark>(state, + TrivialFunctor{}); +} +BENCHMARK(BM_TrivialFunctionRef); + +void BM_TrivialAnyInvocable(benchmark::State& state) { + ConstructAndCallFunctionBenchmark>(state, + TrivialFunctor{}); +} +BENCHMARK(BM_TrivialAnyInvocable); + +void BM_LargeStdFunction(benchmark::State& state) { + ConstructAndCallFunctionBenchmark>(state, + LargeFunctor{}); +} +BENCHMARK(BM_LargeStdFunction); + +void BM_LargeFunctionRef(benchmark::State& state) { + ConstructAndCallFunctionBenchmark>(state, LargeFunctor{}); +} +BENCHMARK(BM_LargeFunctionRef); + + +void BM_LargeAnyInvocable(benchmark::State& state) { + ConstructAndCallFunctionBenchmark>(state, + LargeFunctor{}); +} +BENCHMARK(BM_LargeAnyInvocable); + +void BM_FunPtrStdFunction(benchmark::State& state) { + ConstructAndCallFunctionBenchmark>(state, FreeFunction); +} +BENCHMARK(BM_FunPtrStdFunction); + +void BM_FunPtrFunctionRef(benchmark::State& state) { + ConstructAndCallFunctionBenchmark>(state, FreeFunction); +} +BENCHMARK(BM_FunPtrFunctionRef); + +void BM_FunPtrAnyInvocable(benchmark::State& state) { + ConstructAndCallFunctionBenchmark>(state, FreeFunction); +} +BENCHMARK(BM_FunPtrAnyInvocable); + +// Doesn't include construction or copy overhead in the loop. +template +void CallFunctionBenchmark(benchmark::State& state, const Callable& c, + Args... args) { + Function f = c; + for (auto _ : state) { + benchmark::DoNotOptimize(&f); + f(args...); + } +} + +struct FunctorWithTrivialArgs { + void operator()(int a, int b, int c) const { + benchmark::DoNotOptimize(a); + benchmark::DoNotOptimize(b); + benchmark::DoNotOptimize(c); + } +}; + +void BM_TrivialArgsStdFunction(benchmark::State& state) { + CallFunctionBenchmark>( + state, FunctorWithTrivialArgs{}, 1, 2, 3); +} +BENCHMARK(BM_TrivialArgsStdFunction); + +void BM_TrivialArgsFunctionRef(benchmark::State& state) { + CallFunctionBenchmark>( + state, FunctorWithTrivialArgs{}, 1, 2, 3); +} +BENCHMARK(BM_TrivialArgsFunctionRef); + +void BM_TrivialArgsAnyInvocable(benchmark::State& state) { + CallFunctionBenchmark>( + state, FunctorWithTrivialArgs{}, 1, 2, 3); +} +BENCHMARK(BM_TrivialArgsAnyInvocable); + +struct FunctorWithNonTrivialArgs { + void operator()(std::string a, std::string b, std::string c) const { + benchmark::DoNotOptimize(&a); + benchmark::DoNotOptimize(&b); + benchmark::DoNotOptimize(&c); + } +}; + +void BM_NonTrivialArgsStdFunction(benchmark::State& state) { + std::string a, b, c; + CallFunctionBenchmark< + std::function>( + state, FunctorWithNonTrivialArgs{}, a, b, c); +} +BENCHMARK(BM_NonTrivialArgsStdFunction); + +void BM_NonTrivialArgsFunctionRef(benchmark::State& state) { + std::string a, b, c; + CallFunctionBenchmark< + FunctionRef>( + state, FunctorWithNonTrivialArgs{}, a, b, c); +} +BENCHMARK(BM_NonTrivialArgsFunctionRef); + +void BM_NonTrivialArgsAnyInvocable(benchmark::State& state) { + std::string a, b, c; + CallFunctionBenchmark< + AnyInvocable>( + state, FunctorWithNonTrivialArgs{}, a, b, c); +} +BENCHMARK(BM_NonTrivialArgsAnyInvocable); + +} // namespace +ABSL_NAMESPACE_END +} // namespace absl diff --git a/TMessagesProj/jni/voip/webrtc/absl/functional/internal/any_invocable.h b/TMessagesProj/jni/voip/webrtc/absl/functional/internal/any_invocable.h new file mode 100644 index 0000000000..8fce4bf614 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/absl/functional/internal/any_invocable.h @@ -0,0 +1,877 @@ +// Copyright 2022 The Abseil Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Implementation details for `absl::AnyInvocable` + +#ifndef ABSL_FUNCTIONAL_INTERNAL_ANY_INVOCABLE_H_ +#define ABSL_FUNCTIONAL_INTERNAL_ANY_INVOCABLE_H_ + +//////////////////////////////////////////////////////////////////////////////// +// // +// This implementation of the proposed `any_invocable` uses an approach that // +// chooses between local storage and remote storage for the contained target // +// object based on the target object's size, alignment requirements, and // +// whether or not it has a nothrow move constructor. Additional optimizations // +// are performed when the object is a trivially copyable type [basic.types]. // +// // +// There are three datamembers per `AnyInvocable` instance // +// // +// 1) A union containing either // +// - A pointer to the target object referred to via a void*, or // +// - the target object, emplaced into a raw char buffer // +// // +// 2) A function pointer to a "manager" function operation that takes a // +// discriminator and logically branches to either perform a move operation // +// or destroy operation based on that discriminator. // +// // +// 3) A function pointer to an "invoker" function operation that invokes the // +// target object, directly returning the result. // +// // +// When in the logically empty state, the manager function is an empty // +// function and the invoker function is one that would be undefined-behavior // +// to call. // +// // +// An additional optimization is performed when converting from one // +// AnyInvocable to another where only the noexcept specification and/or the // +// cv/ref qualifiers of the function type differ. In these cases, the // +// conversion works by "moving the guts", similar to if they were the same // +// exact type, as opposed to having to perform an additional layer of // +// wrapping through remote storage. // +// // +//////////////////////////////////////////////////////////////////////////////// + +// IWYU pragma: private, include "absl/functional/any_invocable.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "absl/base/config.h" +#include "absl/base/internal/invoke.h" +#include "absl/base/macros.h" +#include "absl/meta/type_traits.h" +#include "absl/utility/utility.h" + +namespace absl { +ABSL_NAMESPACE_BEGIN + +// Helper macro used to prevent spelling `noexcept` in language versions older +// than C++17, where it is not part of the type system, in order to avoid +// compilation failures and internal compiler errors. +#if ABSL_INTERNAL_CPLUSPLUS_LANG >= 201703L +#define ABSL_INTERNAL_NOEXCEPT_SPEC(noex) noexcept(noex) +#else +#define ABSL_INTERNAL_NOEXCEPT_SPEC(noex) +#endif + +// Defined in functional/any_invocable.h +template +class AnyInvocable; + +namespace internal_any_invocable { + +// Constants relating to the small-object-storage for AnyInvocable +enum StorageProperty : std::size_t { + kAlignment = alignof(std::max_align_t), // The alignment of the storage + kStorageSize = sizeof(void*) * 2 // The size of the storage +}; + +//////////////////////////////////////////////////////////////////////////////// +// +// A metafunction for checking if a type is an AnyInvocable instantiation. +// This is used during conversion operations. +template +struct IsAnyInvocable : std::false_type {}; + +template +struct IsAnyInvocable> : std::true_type {}; +// +//////////////////////////////////////////////////////////////////////////////// + +// A type trait that tells us whether or not a target function type should be +// stored locally in the small object optimization storage +template +using IsStoredLocally = std::integral_constant< + bool, sizeof(T) <= kStorageSize && alignof(T) <= kAlignment && + kAlignment % alignof(T) == 0 && + std::is_nothrow_move_constructible::value>; + +// An implementation of std::remove_cvref_t of C++20. +template +using RemoveCVRef = + typename std::remove_cv::type>::type; + +//////////////////////////////////////////////////////////////////////////////// +// +// An implementation of the C++ standard INVOKE pseudo-macro, operation is +// equivalent to std::invoke except that it forces an implicit conversion to the +// specified return type. If "R" is void, the function is executed and the +// return value is simply ignored. +template ::value>> +void InvokeR(F&& f, P&&... args) { + absl::base_internal::invoke(std::forward(f), std::forward

(args)...); +} + +template ::value, int> = 0> +ReturnType InvokeR(F&& f, P&&... args) { + return absl::base_internal::invoke(std::forward(f), + std::forward

(args)...); +} + +// +//////////////////////////////////////////////////////////////////////////////// + +//////////////////////////////////////////////////////////////////////////////// +/// +// A metafunction that takes a "T" corresponding to a parameter type of the +// user's specified function type, and yields the parameter type to use for the +// type-erased invoker. In order to prevent observable moves, this must be +// either a reference or, if the type is trivial, the original parameter type +// itself. Since the parameter type may be incomplete at the point that this +// metafunction is used, we can only do this optimization for scalar types +// rather than for any trivial type. +template +T ForwardImpl(std::true_type); + +template +T&& ForwardImpl(std::false_type); + +// NOTE: We deliberately use an intermediate struct instead of a direct alias, +// as a workaround for b/206991861 on MSVC versions < 1924. +template +struct ForwardedParameter { + using type = decltype(( + ForwardImpl)(std::integral_constant::value>())); +}; + +template +using ForwardedParameterType = typename ForwardedParameter::type; +// +//////////////////////////////////////////////////////////////////////////////// + +// A discriminator when calling the "manager" function that describes operation +// type-erased operation should be invoked. +// +// "relocate_from_to" specifies that the manager should perform a move. +// +// "dispose" specifies that the manager should perform a destroy. +enum class FunctionToCall : bool { relocate_from_to, dispose }; + +// The portion of `AnyInvocable` state that contains either a pointer to the +// target object or the object itself in local storage +union TypeErasedState { + struct { + // A pointer to the type-erased object when remotely stored + void* target; + // The size of the object for `RemoteManagerTrivial` + std::size_t size; + } remote; + + // Local-storage for the type-erased object when small and trivial enough + alignas(kAlignment) char storage[kStorageSize]; +}; + +// A typed accessor for the object in `TypeErasedState` storage +template +T& ObjectInLocalStorage(TypeErasedState* const state) { + // We launder here because the storage may be reused with the same type. +#if ABSL_INTERNAL_CPLUSPLUS_LANG >= 201703L + return *std::launder(reinterpret_cast(&state->storage)); +#elif ABSL_HAVE_BUILTIN(__builtin_launder) + return *__builtin_launder(reinterpret_cast(&state->storage)); +#else + + // When `std::launder` or equivalent are not available, we rely on undefined + // behavior, which works as intended on Abseil's officially supported + // platforms as of Q2 2022. +#if !defined(__clang__) && defined(__GNUC__) +#pragma GCC diagnostic ignored "-Wstrict-aliasing" +#pragma GCC diagnostic push +#endif + return *reinterpret_cast(&state->storage); +#if !defined(__clang__) && defined(__GNUC__) +#pragma GCC diagnostic pop +#endif + +#endif +} + +// The type for functions issuing lifetime-related operations: move and dispose +// A pointer to such a function is contained in each `AnyInvocable` instance. +// NOTE: When specifying `FunctionToCall::`dispose, the same state must be +// passed as both "from" and "to". +using ManagerType = void(FunctionToCall /*operation*/, + TypeErasedState* /*from*/, TypeErasedState* /*to*/) + ABSL_INTERNAL_NOEXCEPT_SPEC(true); + +// The type for functions issuing the actual invocation of the object +// A pointer to such a function is contained in each AnyInvocable instance. +template +using InvokerType = ReturnType(TypeErasedState*, ForwardedParameterType

...) + ABSL_INTERNAL_NOEXCEPT_SPEC(SigIsNoexcept); + +// The manager that is used when AnyInvocable is empty +inline void EmptyManager(FunctionToCall /*operation*/, + TypeErasedState* /*from*/, + TypeErasedState* /*to*/) noexcept {} + +// The manager that is used when a target function is in local storage and is +// a trivially copyable type. +inline void LocalManagerTrivial(FunctionToCall /*operation*/, + TypeErasedState* const from, + TypeErasedState* const to) noexcept { + // This single statement without branching handles both possible operations. + // + // For FunctionToCall::dispose, "from" and "to" point to the same state, and + // so this assignment logically would do nothing. + // + // Note: Correctness here relies on http://wg21.link/p0593, which has only + // become standard in C++20, though implementations do not break it in + // practice for earlier versions of C++. + // + // The correct way to do this without that paper is to first placement-new a + // default-constructed T in "to->storage" prior to the memmove, but doing so + // requires a different function to be created for each T that is stored + // locally, which can cause unnecessary bloat and be less cache friendly. + *to = *from; + + // Note: Because the type is trivially copyable, the destructor does not need + // to be called ("trivially copyable" requires a trivial destructor). +} + +// The manager that is used when a target function is in local storage and is +// not a trivially copyable type. +template +void LocalManagerNontrivial(FunctionToCall operation, + TypeErasedState* const from, + TypeErasedState* const to) noexcept { + static_assert(IsStoredLocally::value, + "Local storage must only be used for supported types."); + static_assert(!std::is_trivially_copyable::value, + "Locally stored types must be trivially copyable."); + + T& from_object = (ObjectInLocalStorage)(from); + + switch (operation) { + case FunctionToCall::relocate_from_to: + // NOTE: Requires that the left-hand operand is already empty. + ::new (static_cast(&to->storage)) T(std::move(from_object)); + ABSL_FALLTHROUGH_INTENDED; + case FunctionToCall::dispose: + from_object.~T(); // Must not throw. // NOLINT + return; + } + ABSL_INTERNAL_UNREACHABLE; +} + +// The invoker that is used when a target function is in local storage +// Note: QualTRef here is the target function type along with cv and reference +// qualifiers that must be used when calling the function. +template +ReturnType LocalInvoker( + TypeErasedState* const state, + ForwardedParameterType

... args) noexcept(SigIsNoexcept) { + using RawT = RemoveCVRef; + static_assert( + IsStoredLocally::value, + "Target object must be in local storage in order to be invoked from it."); + + auto& f = (ObjectInLocalStorage)(state); + return (InvokeR)(static_cast(f), + static_cast>(args)...); +} + +// The manager that is used when a target function is in remote storage and it +// has a trivial destructor +inline void RemoteManagerTrivial(FunctionToCall operation, + TypeErasedState* const from, + TypeErasedState* const to) noexcept { + switch (operation) { + case FunctionToCall::relocate_from_to: + // NOTE: Requires that the left-hand operand is already empty. + to->remote = from->remote; + return; + case FunctionToCall::dispose: +#if defined(__cpp_sized_deallocation) + ::operator delete(from->remote.target, from->remote.size); +#else // __cpp_sized_deallocation + ::operator delete(from->remote.target); +#endif // __cpp_sized_deallocation + return; + } + ABSL_INTERNAL_UNREACHABLE; +} + +// The manager that is used when a target function is in remote storage and the +// destructor of the type is not trivial +template +void RemoteManagerNontrivial(FunctionToCall operation, + TypeErasedState* const from, + TypeErasedState* const to) noexcept { + static_assert(!IsStoredLocally::value, + "Remote storage must only be used for types that do not " + "qualify for local storage."); + + switch (operation) { + case FunctionToCall::relocate_from_to: + // NOTE: Requires that the left-hand operand is already empty. + to->remote.target = from->remote.target; + return; + case FunctionToCall::dispose: + ::delete static_cast(from->remote.target); // Must not throw. + return; + } + ABSL_INTERNAL_UNREACHABLE; +} + +// The invoker that is used when a target function is in remote storage +template +ReturnType RemoteInvoker( + TypeErasedState* const state, + ForwardedParameterType

... args) noexcept(SigIsNoexcept) { + using RawT = RemoveCVRef; + static_assert(!IsStoredLocally::value, + "Target object must be in remote storage in order to be " + "invoked from it."); + + auto& f = *static_cast(state->remote.target); + return (InvokeR)(static_cast(f), + static_cast>(args)...); +} + +//////////////////////////////////////////////////////////////////////////////// +// +// A metafunction that checks if a type T is an instantiation of +// absl::in_place_type_t (needed for constructor constraints of AnyInvocable). +template +struct IsInPlaceType : std::false_type {}; + +template +struct IsInPlaceType> : std::true_type {}; +// +//////////////////////////////////////////////////////////////////////////////// + +// A constructor name-tag used with CoreImpl (below) to request the +// conversion-constructor. QualDecayedTRef is the decayed-type of the object to +// wrap, along with the cv and reference qualifiers that must be applied when +// performing an invocation of the wrapped object. +template +struct TypedConversionConstruct {}; + +// A helper base class for all core operations of AnyInvocable. Most notably, +// this class creates the function call operator and constraint-checkers so that +// the top-level class does not have to be a series of partial specializations. +// +// Note: This definition exists (as opposed to being a declaration) so that if +// the user of the top-level template accidentally passes a template argument +// that is not a function type, they will get a static_assert in AnyInvocable's +// class body rather than an error stating that Impl is not defined. +template +class Impl {}; // Note: This is partially-specialized later. + +// A std::unique_ptr deleter that deletes memory allocated via ::operator new. +#if defined(__cpp_sized_deallocation) +class TrivialDeleter { + public: + explicit TrivialDeleter(std::size_t size) : size_(size) {} + + void operator()(void* target) const { + ::operator delete(target, size_); + } + + private: + std::size_t size_; +}; +#else // __cpp_sized_deallocation +class TrivialDeleter { + public: + explicit TrivialDeleter(std::size_t) {} + + void operator()(void* target) const { ::operator delete(target); } +}; +#endif // __cpp_sized_deallocation + +template +class CoreImpl; + +constexpr bool IsCompatibleConversion(void*, void*) { return false; } +template +constexpr bool IsCompatibleConversion(CoreImpl*, + CoreImpl*) { + return !NoExceptDest || NoExceptSrc; +} + +// A helper base class for all core operations of AnyInvocable that do not +// depend on the cv/ref qualifiers of the function type. +template +class CoreImpl { + public: + using result_type = ReturnType; + + CoreImpl() noexcept : manager_(EmptyManager), invoker_(nullptr) {} + + enum class TargetType : int { + kPointer = 0, + kCompatibleAnyInvocable = 1, + kIncompatibleAnyInvocable = 2, + kOther = 3, + }; + + // Note: QualDecayedTRef here includes the cv-ref qualifiers associated with + // the invocation of the Invocable. The unqualified type is the target object + // type to be stored. + template + explicit CoreImpl(TypedConversionConstruct, F&& f) { + using DecayedT = RemoveCVRef; + + constexpr TargetType kTargetType = + (std::is_pointer::value || + std::is_member_pointer::value) + ? TargetType::kPointer + : IsCompatibleAnyInvocable::value + ? TargetType::kCompatibleAnyInvocable + : IsAnyInvocable::value + ? TargetType::kIncompatibleAnyInvocable + : TargetType::kOther; + // NOTE: We only use integers instead of enums as template parameters in + // order to work around a bug on C++14 under MSVC 2017. + // See b/236131881. + Initialize(kTargetType), QualDecayedTRef>( + std::forward(f)); + } + + // Note: QualTRef here includes the cv-ref qualifiers associated with the + // invocation of the Invocable. The unqualified type is the target object + // type to be stored. + template + explicit CoreImpl(absl::in_place_type_t, Args&&... args) { + InitializeStorage(std::forward(args)...); + } + + CoreImpl(CoreImpl&& other) noexcept { + other.manager_(FunctionToCall::relocate_from_to, &other.state_, &state_); + manager_ = other.manager_; + invoker_ = other.invoker_; + other.manager_ = EmptyManager; + other.invoker_ = nullptr; + } + + CoreImpl& operator=(CoreImpl&& other) noexcept { + // Put the left-hand operand in an empty state. + // + // Note: A full reset that leaves us with an object that has its invariants + // intact is necessary in order to handle self-move. This is required by + // types that are used with certain operations of the standard library, such + // as the default definition of std::swap when both operands target the same + // object. + Clear(); + + // Perform the actual move/destory operation on the target function. + other.manager_(FunctionToCall::relocate_from_to, &other.state_, &state_); + manager_ = other.manager_; + invoker_ = other.invoker_; + other.manager_ = EmptyManager; + other.invoker_ = nullptr; + + return *this; + } + + ~CoreImpl() { manager_(FunctionToCall::dispose, &state_, &state_); } + + // Check whether or not the AnyInvocable is in the empty state. + bool HasValue() const { return invoker_ != nullptr; } + + // Effects: Puts the object into its empty state. + void Clear() { + manager_(FunctionToCall::dispose, &state_, &state_); + manager_ = EmptyManager; + invoker_ = nullptr; + } + + template = 0> + void Initialize(F&& f) { +// This condition handles types that decay into pointers, which includes +// function references. Since function references cannot be null, GCC warns +// against comparing their decayed form with nullptr. +// Since this is template-heavy code, we prefer to disable these warnings +// locally instead of adding yet another overload of this function. +#if !defined(__clang__) && defined(__GNUC__) +#pragma GCC diagnostic ignored "-Wpragmas" +#pragma GCC diagnostic ignored "-Waddress" +#pragma GCC diagnostic ignored "-Wnonnull-compare" +#pragma GCC diagnostic push +#endif + if (static_cast>(f) == nullptr) { +#if !defined(__clang__) && defined(__GNUC__) +#pragma GCC diagnostic pop +#endif + manager_ = EmptyManager; + invoker_ = nullptr; + return; + } + InitializeStorage(std::forward(f)); + } + + template = 0> + void Initialize(F&& f) { + // In this case we can "steal the guts" of the other AnyInvocable. + f.manager_(FunctionToCall::relocate_from_to, &f.state_, &state_); + manager_ = f.manager_; + invoker_ = f.invoker_; + + f.manager_ = EmptyManager; + f.invoker_ = nullptr; + } + + template = 0> + void Initialize(F&& f) { + if (f.HasValue()) { + InitializeStorage(std::forward(f)); + } else { + manager_ = EmptyManager; + invoker_ = nullptr; + } + } + + template > + void Initialize(F&& f) { + InitializeStorage(std::forward(f)); + } + + // Use local (inline) storage for applicable target object types. + template >::value>> + void InitializeStorage(Args&&... args) { + using RawT = RemoveCVRef; + ::new (static_cast(&state_.storage)) + RawT(std::forward(args)...); + + invoker_ = LocalInvoker; + // We can simplify our manager if we know the type is trivially copyable. + InitializeLocalManager(); + } + + // Use remote storage for target objects that cannot be stored locally. + template >::value, + int> = 0> + void InitializeStorage(Args&&... args) { + InitializeRemoteManager>(std::forward(args)...); + // This is set after everything else in case an exception is thrown in an + // earlier step of the initialization. + invoker_ = RemoteInvoker; + } + + template ::value>> + void InitializeLocalManager() { + manager_ = LocalManagerTrivial; + } + + template ::value, int> = 0> + void InitializeLocalManager() { + manager_ = LocalManagerNontrivial; + } + + template + using HasTrivialRemoteStorage = + std::integral_constant::value && + alignof(T) <= + ABSL_INTERNAL_DEFAULT_NEW_ALIGNMENT>; + + template ::value>> + void InitializeRemoteManager(Args&&... args) { + // unique_ptr is used for exception-safety in case construction throws. + std::unique_ptr uninitialized_target( + ::operator new(sizeof(T)), TrivialDeleter(sizeof(T))); + ::new (uninitialized_target.get()) T(std::forward(args)...); + state_.remote.target = uninitialized_target.release(); + state_.remote.size = sizeof(T); + manager_ = RemoteManagerTrivial; + } + + template ::value, int> = 0> + void InitializeRemoteManager(Args&&... args) { + state_.remote.target = ::new T(std::forward(args)...); + manager_ = RemoteManagerNontrivial; + } + + ////////////////////////////////////////////////////////////////////////////// + // + // Type trait to determine if the template argument is an AnyInvocable whose + // function type is compatible enough with ours such that we can + // "move the guts" out of it when moving, rather than having to place a new + // object into remote storage. + + template + struct IsCompatibleAnyInvocable { + static constexpr bool value = false; + }; + + template + struct IsCompatibleAnyInvocable> { + static constexpr bool value = + (IsCompatibleConversion)(static_cast< + typename AnyInvocable::CoreImpl*>( + nullptr), + static_cast(nullptr)); + }; + + // + ////////////////////////////////////////////////////////////////////////////// + + TypeErasedState state_; + ManagerType* manager_; + InvokerType* invoker_; +}; + +// A constructor name-tag used with Impl to request the +// conversion-constructor +struct ConversionConstruct {}; + +//////////////////////////////////////////////////////////////////////////////// +// +// A metafunction that is normally an identity metafunction except that when +// given a std::reference_wrapper, it yields T&. This is necessary because +// currently std::reference_wrapper's operator() is not conditionally noexcept, +// so when checking if such an Invocable is nothrow-invocable, we must pull out +// the underlying type. +template +struct UnwrapStdReferenceWrapperImpl { + using type = T; +}; + +template +struct UnwrapStdReferenceWrapperImpl> { + using type = T&; +}; + +template +using UnwrapStdReferenceWrapper = + typename UnwrapStdReferenceWrapperImpl::type; +// +//////////////////////////////////////////////////////////////////////////////// + +// An alias that always yields std::true_type (used with constraints) where +// substitution failures happen when forming the template arguments. +template +using TrueAlias = + std::integral_constant*) != 0>; + +/*SFINAE constraints for the conversion-constructor.*/ +template , AnyInvocable>::value>> +using CanConvert = TrueAlias< + absl::enable_if_t>::value>, + absl::enable_if_t::template CallIsValid::value>, + absl::enable_if_t< + Impl::template CallIsNoexceptIfSigIsNoexcept::value>, + absl::enable_if_t, F>::value>>; + +/*SFINAE constraints for the std::in_place constructors.*/ +template +using CanEmplace = TrueAlias< + absl::enable_if_t::template CallIsValid::value>, + absl::enable_if_t< + Impl::template CallIsNoexceptIfSigIsNoexcept::value>, + absl::enable_if_t, Args...>::value>>; + +/*SFINAE constraints for the conversion-assign operator.*/ +template , AnyInvocable>::value>> +using CanAssign = TrueAlias< + absl::enable_if_t::template CallIsValid::value>, + absl::enable_if_t< + Impl::template CallIsNoexceptIfSigIsNoexcept::value>, + absl::enable_if_t, F>::value>>; + +/*SFINAE constraints for the reference-wrapper conversion-assign operator.*/ +template +using CanAssignReferenceWrapper = TrueAlias< + absl::enable_if_t< + Impl::template CallIsValid>::value>, + absl::enable_if_t::template CallIsNoexceptIfSigIsNoexcept< + std::reference_wrapper>::value>>; + +//////////////////////////////////////////////////////////////////////////////// +// +// The constraint for checking whether or not a call meets the noexcept +// callability requirements. This is a preprocessor macro because specifying it +// this way as opposed to a disjunction/branch can improve the user-side error +// messages and avoids an instantiation of std::is_nothrow_invocable_r in the +// cases where the user did not specify a noexcept function type. +// +#define ABSL_INTERNAL_ANY_INVOCABLE_NOEXCEPT_CONSTRAINT(inv_quals, noex) \ + ABSL_INTERNAL_ANY_INVOCABLE_NOEXCEPT_CONSTRAINT_##noex(inv_quals) + +// The disjunction below is because we can't rely on std::is_nothrow_invocable_r +// to give the right result when ReturnType is non-moveable in toolchains that +// don't treat non-moveable result types correctly. For example this was the +// case in libc++ before commit c3a24882 (2022-05). +#define ABSL_INTERNAL_ANY_INVOCABLE_NOEXCEPT_CONSTRAINT_true(inv_quals) \ + absl::enable_if_t> inv_quals, \ + P...>, \ + std::conjunction< \ + std::is_nothrow_invocable< \ + UnwrapStdReferenceWrapper> inv_quals, P...>, \ + std::is_same< \ + ReturnType, \ + absl::base_internal::invoke_result_t< \ + UnwrapStdReferenceWrapper> inv_quals, \ + P...>>>>::value> + +#define ABSL_INTERNAL_ANY_INVOCABLE_NOEXCEPT_CONSTRAINT_false(inv_quals) +// +//////////////////////////////////////////////////////////////////////////////// + +// A macro to generate partial specializations of Impl with the different +// combinations of supported cv/reference qualifiers and noexcept specifier. +// +// Here, `cv` are the cv-qualifiers if any, `ref` is the ref-qualifier if any, +// inv_quals is the reference type to be used when invoking the target, and +// noex is "true" if the function type is noexcept, or false if it is not. +// +// The CallIsValid condition is more complicated than simply using +// absl::base_internal::is_invocable_r because we can't rely on it to give the +// right result when ReturnType is non-moveable in toolchains that don't treat +// non-moveable result types correctly. For example this was the case in libc++ +// before commit c3a24882 (2022-05). +#define ABSL_INTERNAL_ANY_INVOCABLE_IMPL_(cv, ref, inv_quals, noex) \ + template \ + class Impl \ + : public CoreImpl { \ + public: \ + /*The base class, which contains the datamembers and core operations*/ \ + using Core = CoreImpl; \ + \ + /*SFINAE constraint to check if F is invocable with the proper signature*/ \ + template \ + using CallIsValid = TrueAlias inv_quals, P...>, \ + std::is_same inv_quals, P...>>>::value>>; \ + \ + /*SFINAE constraint to check if F is nothrow-invocable when necessary*/ \ + template \ + using CallIsNoexceptIfSigIsNoexcept = \ + TrueAlias; \ + \ + /*Put the AnyInvocable into an empty state.*/ \ + Impl() = default; \ + \ + /*The implementation of a conversion-constructor from "f*/ \ + /*This forwards to Core, attaching inv_quals so that the base class*/ \ + /*knows how to properly type-erase the invocation.*/ \ + template \ + explicit Impl(ConversionConstruct, F&& f) \ + : Core(TypedConversionConstruct< \ + typename std::decay::type inv_quals>(), \ + std::forward(f)) {} \ + \ + /*Forward along the in-place construction parameters.*/ \ + template \ + explicit Impl(absl::in_place_type_t, Args&&... args) \ + : Core(absl::in_place_type inv_quals>, \ + std::forward(args)...) {} \ + \ + InvokerType* ExtractInvoker() cv { \ + using QualifiedTestType = int cv ref; \ + auto* invoker = this->invoker_; \ + if (!std::is_const::value && \ + std::is_rvalue_reference::value) { \ + ABSL_HARDENING_ASSERT([this]() { \ + /* We checked that this isn't const above, so const_cast is safe */ \ + const_cast(this)->invoker_ = \ + [](TypeErasedState*, \ + ForwardedParameterType

...) noexcept(noex) -> ReturnType { \ + ABSL_HARDENING_ASSERT(false && "AnyInvocable use-after-move"); \ + std::terminate(); \ + }; \ + return this->HasValue(); \ + }()); \ + } \ + return invoker; \ + } \ + \ + /*The actual invocation operation with the proper signature*/ \ + ReturnType operator()(P... args) cv ref noexcept(noex) { \ + assert(this->invoker_ != nullptr); \ + return this->ExtractInvoker()( \ + const_cast(&this->state_), \ + static_cast>(args)...); \ + } \ + } + +// Define the `noexcept(true)` specialization only for C++17 and beyond, when +// `noexcept` is part of the type system. +#if ABSL_INTERNAL_CPLUSPLUS_LANG >= 201703L +// A convenience macro that defines specializations for the noexcept(true) and +// noexcept(false) forms, given the other properties. +#define ABSL_INTERNAL_ANY_INVOCABLE_IMPL(cv, ref, inv_quals) \ + ABSL_INTERNAL_ANY_INVOCABLE_IMPL_(cv, ref, inv_quals, false); \ + ABSL_INTERNAL_ANY_INVOCABLE_IMPL_(cv, ref, inv_quals, true) +#else +#define ABSL_INTERNAL_ANY_INVOCABLE_IMPL(cv, ref, inv_quals) \ + ABSL_INTERNAL_ANY_INVOCABLE_IMPL_(cv, ref, inv_quals, false) +#endif + +// Non-ref-qualified partial specializations +ABSL_INTERNAL_ANY_INVOCABLE_IMPL(, , &); +ABSL_INTERNAL_ANY_INVOCABLE_IMPL(const, , const&); + +// Lvalue-ref-qualified partial specializations +ABSL_INTERNAL_ANY_INVOCABLE_IMPL(, &, &); +ABSL_INTERNAL_ANY_INVOCABLE_IMPL(const, &, const&); + +// Rvalue-ref-qualified partial specializations +ABSL_INTERNAL_ANY_INVOCABLE_IMPL(, &&, &&); +ABSL_INTERNAL_ANY_INVOCABLE_IMPL(const, &&, const&&); + +// Undef the detail-only macros. +#undef ABSL_INTERNAL_ANY_INVOCABLE_IMPL +#undef ABSL_INTERNAL_ANY_INVOCABLE_IMPL_ +#undef ABSL_INTERNAL_ANY_INVOCABLE_NOEXCEPT_CONSTRAINT_false +#undef ABSL_INTERNAL_ANY_INVOCABLE_NOEXCEPT_CONSTRAINT_true +#undef ABSL_INTERNAL_ANY_INVOCABLE_NOEXCEPT_CONSTRAINT +#undef ABSL_INTERNAL_NOEXCEPT_SPEC + +} // namespace internal_any_invocable +ABSL_NAMESPACE_END +} // namespace absl + +#endif // ABSL_FUNCTIONAL_INTERNAL_ANY_INVOCABLE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/hash/hash.h b/TMessagesProj/jni/voip/webrtc/absl/hash/hash.h index f31fde4059..74e2d7c053 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/hash/hash.h +++ b/TMessagesProj/jni/voip/webrtc/absl/hash/hash.h @@ -40,6 +40,11 @@ // each process. E.g., `absl::Hash{}(9)` in one process and // `absl::Hash{}(9)` in another process are likely to differ. // +// `absl::Hash` may also produce different values from different dynamically +// loaded libraries. For this reason, `absl::Hash` values must never cross +// boundries in dynamically loaded libraries (including when used in types like +// hash containers.) +// // `absl::Hash` is intended to strongly mix input bits with a target of passing // an [Avalanche Test](https://en.wikipedia.org/wiki/Avalanche_effect). // diff --git a/TMessagesProj/jni/voip/webrtc/absl/hash/hash_test.cc b/TMessagesProj/jni/voip/webrtc/absl/hash/hash_test.cc index 39ff8f525b..744a2e54c8 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/hash/hash_test.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/hash/hash_test.cc @@ -185,6 +185,8 @@ TEST(HashValueTest, FloatingPoint) { TEST(HashValueTest, Pointer) { EXPECT_TRUE((is_hashable::value)); + EXPECT_TRUE((is_hashable::value)); + EXPECT_TRUE((is_hashable::value)); int i; int* ptr = &i; @@ -220,10 +222,89 @@ TEST(HashValueTest, PointerAlignment) { // Limit the scope to the bits we would be using for Swisstable. constexpr size_t kMask = (1 << (kLog2NumValues + 7)) - 1; size_t stuck_bits = (~bits_or | bits_and) & kMask; - EXPECT_EQ(stuck_bits, 0) << "0x" << std::hex << stuck_bits; + EXPECT_EQ(stuck_bits, 0u) << "0x" << std::hex << stuck_bits; } } +TEST(HashValueTest, PointerToMember) { + struct Bass { + void q() {} + }; + + struct A : Bass { + virtual ~A() = default; + virtual void vfa() {} + + static auto pq() -> void (A::*)() { return &A::q; } + }; + + struct B : Bass { + virtual ~B() = default; + virtual void vfb() {} + + static auto pq() -> void (B::*)() { return &B::q; } + }; + + struct Foo : A, B { + void f1() {} + void f2() const {} + + int g1() & { return 0; } + int g2() const & { return 0; } + int g3() && { return 0; } + int g4() const && { return 0; } + + int h1() & { return 0; } + int h2() const & { return 0; } + int h3() && { return 0; } + int h4() const && { return 0; } + + int a; + int b; + + const int c = 11; + const int d = 22; + }; + + EXPECT_TRUE((is_hashable::value)); + EXPECT_TRUE((is_hashable::value)); + + EXPECT_TRUE(absl::VerifyTypeImplementsAbslHashCorrectly( + std::make_tuple(&Foo::a, &Foo::b, static_cast(nullptr)))); + + EXPECT_TRUE(absl::VerifyTypeImplementsAbslHashCorrectly( + std::make_tuple(&Foo::c, &Foo::d, static_cast(nullptr), + &Foo::a, &Foo::b))); + + EXPECT_TRUE(absl::VerifyTypeImplementsAbslHashCorrectly(std::make_tuple( + &Foo::f1, static_cast(nullptr)))); + + EXPECT_TRUE(absl::VerifyTypeImplementsAbslHashCorrectly(std::make_tuple( + &Foo::f2, static_cast(nullptr)))); + + EXPECT_TRUE(absl::VerifyTypeImplementsAbslHashCorrectly(std::make_tuple( + &Foo::g1, &Foo::h1, static_cast(nullptr)))); + + EXPECT_TRUE(absl::VerifyTypeImplementsAbslHashCorrectly(std::make_tuple( + &Foo::g2, &Foo::h2, static_cast(nullptr)))); + + EXPECT_TRUE(absl::VerifyTypeImplementsAbslHashCorrectly(std::make_tuple( + &Foo::g3, &Foo::h3, static_cast(nullptr)))); + + EXPECT_TRUE(absl::VerifyTypeImplementsAbslHashCorrectly(std::make_tuple( + &Foo::g4, &Foo::h4, static_cast(nullptr)))); + + EXPECT_TRUE(absl::VerifyTypeImplementsAbslHashCorrectly( + std::make_tuple(static_cast(&Foo::vfa), + static_cast(&Foo::vfb), + static_cast(nullptr)))); + + EXPECT_TRUE(absl::VerifyTypeImplementsAbslHashCorrectly( + std::make_tuple(static_cast(Foo::A::pq()), + static_cast(Foo::B::pq()), + static_cast(nullptr)))); +} + TEST(HashValueTest, PairAndTuple) { EXPECT_TRUE((is_hashable>::value)); EXPECT_TRUE((is_hashable>::value)); @@ -656,10 +737,10 @@ TEST(HashValueTest, CombinePiecewiseBuffer) { // // This test is run on a buffer that is a multiple of the stride size, and one // that isn't. - for (size_t big_buffer_size : {1024 * 2 + 512, 1024 * 3}) { + for (size_t big_buffer_size : {1024u * 2 + 512u, 1024u * 3}) { SCOPED_TRACE(big_buffer_size); std::string big_buffer; - for (int i = 0; i < big_buffer_size; ++i) { + for (size_t i = 0; i < big_buffer_size; ++i) { // Arbitrary string big_buffer.push_back(32 + (i * (i / 3)) % 64); } @@ -1054,10 +1135,10 @@ TEST(HashTest, HashNonUniquelyRepresentedType) { unsigned char buffer2[kNumStructs * sizeof(StructWithPadding)]; std::memset(buffer2, 255, sizeof(buffer2)); auto* s2 = reinterpret_cast(buffer2); - for (int i = 0; i < kNumStructs; ++i) { + for (size_t i = 0; i < kNumStructs; ++i) { SCOPED_TRACE(i); - s1[i].c = s2[i].c = '0' + i; - s1[i].i = s2[i].i = i; + s1[i].c = s2[i].c = static_cast('0' + i); + s1[i].i = s2[i].i = static_cast(i); ASSERT_FALSE(memcmp(buffer1 + i * sizeof(StructWithPadding), buffer2 + i * sizeof(StructWithPadding), sizeof(StructWithPadding)) == 0) @@ -1145,7 +1226,9 @@ struct ValueWithBoolConversion { namespace std { template <> struct hash { - size_t operator()(ValueWithBoolConversion v) { return v.i; } + size_t operator()(ValueWithBoolConversion v) { + return static_cast(v.i); + } }; } // namespace std diff --git a/TMessagesProj/jni/voip/webrtc/absl/hash/internal/city.cc b/TMessagesProj/jni/voip/webrtc/absl/hash/internal/city.cc index 5460134e57..f0d3196470 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/hash/internal/city.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/hash/internal/city.cc @@ -97,7 +97,7 @@ static uint32_t Hash32Len13to24(const char *s, size_t len) { uint32_t d = Fetch32(s + (len >> 1)); uint32_t e = Fetch32(s); uint32_t f = Fetch32(s + len - 4); - uint32_t h = len; + uint32_t h = static_cast(len); return fmix(Mur(f, Mur(e, Mur(d, Mur(c, Mur(b, Mur(a, h))))))); } @@ -106,15 +106,15 @@ static uint32_t Hash32Len0to4(const char *s, size_t len) { uint32_t b = 0; uint32_t c = 9; for (size_t i = 0; i < len; i++) { - signed char v = s[i]; - b = b * c1 + v; + signed char v = static_cast(s[i]); + b = b * c1 + static_cast(v); c ^= b; } - return fmix(Mur(b, Mur(len, c))); + return fmix(Mur(b, Mur(static_cast(len), c))); } static uint32_t Hash32Len5to12(const char *s, size_t len) { - uint32_t a = len, b = len * 5, c = 9, d = b; + uint32_t a = static_cast(len), b = a * 5, c = 9, d = b; a += Fetch32(s); b += Fetch32(s + len - 4); c += Fetch32(s + ((len >> 1) & 4)); @@ -129,7 +129,7 @@ uint32_t CityHash32(const char *s, size_t len) { } // len > 24 - uint32_t h = len, g = c1 * len, f = g; + uint32_t h = static_cast(len), g = c1 * h, f = g; uint32_t a0 = Rotate32(Fetch32(s + len - 4) * c1, 17) * c2; uint32_t a1 = Rotate32(Fetch32(s + len - 8) * c1, 17) * c2; @@ -230,11 +230,11 @@ static uint64_t HashLen0to16(const char *s, size_t len) { return HashLen16(len + (a << 3), Fetch32(s + len - 4), mul); } if (len > 0) { - uint8_t a = s[0]; - uint8_t b = s[len >> 1]; - uint8_t c = s[len - 1]; + uint8_t a = static_cast(s[0]); + uint8_t b = static_cast(s[len >> 1]); + uint8_t c = static_cast(s[len - 1]); uint32_t y = static_cast(a) + (static_cast(b) << 8); - uint32_t z = len + (static_cast(c) << 2); + uint32_t z = static_cast(len) + (static_cast(c) << 2); return ShiftMix(y * k2 ^ z * k0) * k2; } return k2; diff --git a/TMessagesProj/jni/voip/webrtc/absl/hash/internal/hash.h b/TMessagesProj/jni/voip/webrtc/absl/hash/internal/hash.h index a424e0149a..dbdc20504f 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/hash/internal/hash.h +++ b/TMessagesProj/jni/voip/webrtc/absl/hash/internal/hash.h @@ -421,6 +421,39 @@ H AbslHashValue(H hash_state, std::nullptr_t) { return H::combine(std::move(hash_state), static_cast(nullptr)); } +// AbslHashValue() for hashing pointers-to-member +template +H AbslHashValue(H hash_state, T C::* ptr) { + auto salient_ptm_size = [](std::size_t n) -> std::size_t { +#if defined(_MSC_VER) + // Pointers-to-member-function on MSVC consist of one pointer plus 0, 1, 2, + // or 3 ints. In 64-bit mode, they are 8-byte aligned and thus can contain + // padding (namely when they have 1 or 3 ints). The value below is a lower + // bound on the number of salient, non-padding bytes that we use for + // hashing. + if (alignof(T C::*) == alignof(int)) { + // No padding when all subobjects have the same size as the total + // alignment. This happens in 32-bit mode. + return n; + } else { + // Padding for 1 int (size 16) or 3 ints (size 24). + // With 2 ints, the size is 16 with no padding, which we pessimize. + return n == 24 ? 20 : n == 16 ? 12 : n; + } +#else + // On other platforms, we assume that pointers-to-members do not have + // padding. +#ifdef __cpp_lib_has_unique_object_representations + static_assert(std::has_unique_object_representations::value); +#endif // __cpp_lib_has_unique_object_representations + return n; +#endif + }; + return H::combine_contiguous(std::move(hash_state), + reinterpret_cast(&ptr), + salient_ptm_size(sizeof ptr)); +} + // ----------------------------------------------------------------------------- // AbslHashValue for Composite Types // ----------------------------------------------------------------------------- @@ -1057,15 +1090,10 @@ class ABSL_DLL MixingHashState : public HashStateBase { } ABSL_ATTRIBUTE_ALWAYS_INLINE static uint64_t Mix(uint64_t state, uint64_t v) { -#if defined(__aarch64__) - // On AArch64, calculating a 128-bit product is inefficient, because it - // requires a sequence of two instructions to calculate the upper and lower - // halves of the result. - using MultType = uint64_t; -#else + // Though the 128-bit product on AArch64 needs two instructions, it is + // still a good balance between speed and hash quality. using MultType = absl::conditional_t; -#endif // We do the addition in 64-bit space to make sure the 128-bit // multiplication is fast. If we were to do it as MultType the compiler has // to assume that the high word is non-zero and needs to perform 2 diff --git a/TMessagesProj/jni/voip/webrtc/absl/hash/internal/low_level_hash.cc b/TMessagesProj/jni/voip/webrtc/absl/hash/internal/low_level_hash.cc index 6f9cb9c7bf..e05e7885ad 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/hash/internal/low_level_hash.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/hash/internal/low_level_hash.cc @@ -40,7 +40,7 @@ static uint64_t Mix(uint64_t v0, uint64_t v1) { } uint64_t LowLevelHash(const void* data, size_t len, uint64_t seed, - const uint64_t salt[]) { + const uint64_t salt[5]) { const uint8_t* ptr = static_cast(data); uint64_t starting_length = static_cast(len); uint64_t current_state = seed ^ salt[0]; @@ -106,7 +106,8 @@ uint64_t LowLevelHash(const void* data, size_t len, uint64_t seed, } else if (len > 0) { // If we have at least 1 and at most 3 bytes, read all of the provided // bits into A, with some adjustments. - a = ((ptr[0] << 16) | (ptr[len >> 1] << 8) | ptr[len - 1]); + a = static_cast((ptr[0] << 16) | (ptr[len >> 1] << 8) | + ptr[len - 1]); b = 0; } else { a = 0; diff --git a/TMessagesProj/jni/voip/webrtc/absl/memory/CMakeLists.txt b/TMessagesProj/jni/voip/webrtc/absl/memory/CMakeLists.txt new file mode 100644 index 0000000000..c5ed4b4255 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/absl/memory/CMakeLists.txt @@ -0,0 +1,41 @@ +# +# Copyright 2017 The Abseil Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +absl_cc_library( + NAME + memory + HDRS + "memory.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::core_headers + absl::meta + PUBLIC +) + +absl_cc_test( + NAME + memory_test + SRCS + "memory_test.cc" + COPTS + ${ABSL_TEST_COPTS} + DEPS + absl::memory + absl::core_headers + GTest::gmock_main +) diff --git a/TMessagesProj/jni/voip/webrtc/absl/memory/memory.h b/TMessagesProj/jni/voip/webrtc/absl/memory/memory.h index d63326068f..e5ff0e6563 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/memory/memory.h +++ b/TMessagesProj/jni/voip/webrtc/absl/memory/memory.h @@ -75,32 +75,6 @@ std::unique_ptr WrapUnique(T* ptr) { return std::unique_ptr(ptr); } -namespace memory_internal { - -// Traits to select proper overload and return type for `absl::make_unique<>`. -template -struct MakeUniqueResult { - using scalar = std::unique_ptr; -}; -template -struct MakeUniqueResult { - using array = std::unique_ptr; -}; -template -struct MakeUniqueResult { - using invalid = void; -}; - -} // namespace memory_internal - -// gcc 4.8 has __cplusplus at 201301 but the libstdc++ shipped with it doesn't -// define make_unique. Other supported compilers either just define __cplusplus -// as 201103 but have make_unique (msvc), or have make_unique whenever -// __cplusplus > 201103 (clang). -#if (__cplusplus > 201103L || defined(_MSC_VER)) && \ - !(defined(__GLIBCXX__) && !defined(__cpp_lib_make_unique)) -using std::make_unique; -#else // ----------------------------------------------------------------------------- // Function Template: make_unique() // ----------------------------------------------------------------------------- @@ -109,82 +83,18 @@ using std::make_unique; // during the construction process. `absl::make_unique<>` also avoids redundant // type declarations, by avoiding the need to explicitly use the `new` operator. // -// This implementation of `absl::make_unique<>` is designed for C++11 code and -// will be replaced in C++14 by the equivalent `std::make_unique<>` abstraction. -// `absl::make_unique<>` is designed to be 100% compatible with -// `std::make_unique<>` so that the eventual migration will involve a simple -// rename operation. +// https://en.cppreference.com/w/cpp/memory/unique_ptr/make_unique // // For more background on why `std::unique_ptr(new T(a,b))` is problematic, // see Herb Sutter's explanation on // (Exception-Safe Function Calls)[https://herbsutter.com/gotw/_102/]. // (In general, reviewers should treat `new T(a,b)` with scrutiny.) // -// Example usage: -// -// auto p = make_unique(args...); // 'p' is a std::unique_ptr -// auto pa = make_unique(5); // 'pa' is a std::unique_ptr -// -// Three overloads of `absl::make_unique` are required: -// -// - For non-array T: -// -// Allocates a T with `new T(std::forward args...)`, -// forwarding all `args` to T's constructor. -// Returns a `std::unique_ptr` owning that object. -// -// - For an array of unknown bounds T[]: -// -// `absl::make_unique<>` will allocate an array T of type U[] with -// `new U[n]()` and return a `std::unique_ptr` owning that array. -// -// Note that 'U[n]()' is different from 'U[n]', and elements will be -// value-initialized. Note as well that `std::unique_ptr` will perform its -// own destruction of the array elements upon leaving scope, even though -// the array [] does not have a default destructor. -// -// NOTE: an array of unknown bounds T[] may still be (and often will be) -// initialized to have a size, and will still use this overload. E.g: -// -// auto my_array = absl::make_unique(10); -// -// - For an array of known bounds T[N]: -// -// `absl::make_unique<>` is deleted (like with `std::make_unique<>`) as -// this overload is not useful. -// -// NOTE: an array of known bounds T[N] is not considered a useful -// construction, and may cause undefined behavior in templates. E.g: -// -// auto my_array = absl::make_unique(); -// -// In those cases, of course, you can still use the overload above and -// simply initialize it to its desired size: -// -// auto my_array = absl::make_unique(10); - -// `absl::make_unique` overload for non-array types. -template -typename memory_internal::MakeUniqueResult::scalar make_unique( - Args&&... args) { - return std::unique_ptr(new T(std::forward(args)...)); -} - -// `absl::make_unique` overload for an array T[] of unknown bounds. -// The array allocation needs to use the `new T[size]` form and cannot take -// element constructor arguments. The `std::unique_ptr` will manage destructing -// these array elements. -template -typename memory_internal::MakeUniqueResult::array make_unique(size_t n) { - return std::unique_ptr(new typename absl::remove_extent_t[n]()); -} - -// `absl::make_unique` overload for an array T[N] of known bounds. -// This construction will be rejected. -template -typename memory_internal::MakeUniqueResult::invalid make_unique( - Args&&... /* args */) = delete; -#endif +// Historical note: Abseil once provided a C++11 compatible implementation of +// the C++14's `std::make_unique`. Now that C++11 support has been sunsetted, +// `absl::make_unique` simply uses the STL-provided implementation. New code +// should use `std::make_unique`. +using std::make_unique; // ----------------------------------------------------------------------------- // Function Template: RawPtr() diff --git a/TMessagesProj/jni/voip/webrtc/absl/memory/memory_test.cc b/TMessagesProj/jni/voip/webrtc/absl/memory/memory_test.cc new file mode 100644 index 0000000000..6f01cdff9a --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/absl/memory/memory_test.cc @@ -0,0 +1,554 @@ +// Copyright 2017 The Abseil Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Tests for pointer utilities. + +#include "absl/memory/memory.h" + +#include + +#include +#include +#include +#include +#include +#include + +#include "gmock/gmock.h" +#include "gtest/gtest.h" + +namespace { + +using ::testing::ElementsAre; +using ::testing::Return; + +// This class creates observable behavior to verify that a destructor has +// been called, via the instance_count variable. +class DestructorVerifier { + public: + DestructorVerifier() { ++instance_count_; } + DestructorVerifier(const DestructorVerifier&) = delete; + DestructorVerifier& operator=(const DestructorVerifier&) = delete; + ~DestructorVerifier() { --instance_count_; } + + // The number of instances of this class currently active. + static int instance_count() { return instance_count_; } + + private: + // The number of instances of this class currently active. + static int instance_count_; +}; + +int DestructorVerifier::instance_count_ = 0; + +TEST(WrapUniqueTest, WrapUnique) { + // Test that the unique_ptr is constructed properly by verifying that the + // destructor for its payload gets called at the proper time. + { + auto dv = new DestructorVerifier; + EXPECT_EQ(1, DestructorVerifier::instance_count()); + std::unique_ptr ptr = absl::WrapUnique(dv); + EXPECT_EQ(1, DestructorVerifier::instance_count()); + } + EXPECT_EQ(0, DestructorVerifier::instance_count()); +} + +// InitializationVerifier fills in a pattern when allocated so we can +// distinguish between its default and value initialized states (without +// accessing truly uninitialized memory). +struct InitializationVerifier { + static constexpr int kDefaultScalar = 0x43; + static constexpr int kDefaultArray = 0x4B; + + static void* operator new(size_t n) { + void* ret = ::operator new(n); + memset(ret, kDefaultScalar, n); + return ret; + } + + static void* operator new[](size_t n) { + void* ret = ::operator new[](n); + memset(ret, kDefaultArray, n); + return ret; + } + + int a; + int b; +}; + +struct ArrayWatch { + void* operator new[](size_t n) { + allocs().push_back(n); + return ::operator new[](n); + } + void operator delete[](void* p) { return ::operator delete[](p); } + static std::vector& allocs() { + static auto& v = *new std::vector; + return v; + } +}; + +TEST(RawPtrTest, RawPointer) { + int i = 5; + EXPECT_EQ(&i, absl::RawPtr(&i)); +} + +TEST(RawPtrTest, SmartPointer) { + int* o = new int(5); + std::unique_ptr p(o); + EXPECT_EQ(o, absl::RawPtr(p)); +} + +class IntPointerNonConstDeref { + public: + explicit IntPointerNonConstDeref(int* p) : p_(p) {} + friend bool operator!=(const IntPointerNonConstDeref& a, std::nullptr_t) { + return a.p_ != nullptr; + } + int& operator*() { return *p_; } + + private: + std::unique_ptr p_; +}; + +TEST(RawPtrTest, SmartPointerNonConstDereference) { + int* o = new int(5); + IntPointerNonConstDeref p(o); + EXPECT_EQ(o, absl::RawPtr(p)); +} + +TEST(RawPtrTest, NullValuedRawPointer) { + int* p = nullptr; + EXPECT_EQ(nullptr, absl::RawPtr(p)); +} + +TEST(RawPtrTest, NullValuedSmartPointer) { + std::unique_ptr p; + EXPECT_EQ(nullptr, absl::RawPtr(p)); +} + +TEST(RawPtrTest, Nullptr) { + auto p = absl::RawPtr(nullptr); + EXPECT_TRUE((std::is_same::value)); + EXPECT_EQ(nullptr, p); +} + +TEST(RawPtrTest, Null) { + auto p = absl::RawPtr(nullptr); + EXPECT_TRUE((std::is_same::value)); + EXPECT_EQ(nullptr, p); +} + +TEST(RawPtrTest, Zero) { + auto p = absl::RawPtr(nullptr); + EXPECT_TRUE((std::is_same::value)); + EXPECT_EQ(nullptr, p); +} + +TEST(ShareUniquePtrTest, Share) { + auto up = absl::make_unique(); + int* rp = up.get(); + auto sp = absl::ShareUniquePtr(std::move(up)); + EXPECT_EQ(sp.get(), rp); +} + +TEST(ShareUniquePtrTest, ShareNull) { + struct NeverDie { + using pointer = void*; + void operator()(pointer) { + ASSERT_TRUE(false) << "Deleter should not have been called."; + } + }; + + std::unique_ptr up; + auto sp = absl::ShareUniquePtr(std::move(up)); +} + +TEST(WeakenPtrTest, Weak) { + auto sp = std::make_shared(); + auto wp = absl::WeakenPtr(sp); + EXPECT_EQ(sp.get(), wp.lock().get()); + sp.reset(); + EXPECT_TRUE(wp.expired()); +} + +// Should not compile. +/* +TEST(RawPtrTest, NotAPointer) { + absl::RawPtr(1.5); +} +*/ + +template +struct SmartPointer { + using difference_type = char; +}; + +struct PointerWith { + using element_type = int32_t; + using difference_type = int16_t; + template + using rebind = SmartPointer; + + static PointerWith pointer_to( + element_type& r) { // NOLINT(runtime/references) + return PointerWith{&r}; + } + + element_type* ptr; +}; + +template +struct PointerWithout {}; + +TEST(PointerTraits, Types) { + using TraitsWith = absl::pointer_traits; + EXPECT_TRUE((std::is_same::value)); + EXPECT_TRUE((std::is_same::value)); + EXPECT_TRUE((std::is_same::value)); + EXPECT_TRUE(( + std::is_same, SmartPointer>::value)); + + using TraitsWithout = absl::pointer_traits>; + EXPECT_TRUE((std::is_same>::value)); + EXPECT_TRUE((std::is_same::value)); + EXPECT_TRUE( + (std::is_same::value)); + EXPECT_TRUE((std::is_same, + PointerWithout>::value)); + + using TraitsRawPtr = absl::pointer_traits; + EXPECT_TRUE((std::is_same::value)); + EXPECT_TRUE((std::is_same::value)); + EXPECT_TRUE( + (std::is_same::value)); + EXPECT_TRUE((std::is_same, int64_t*>::value)); +} + +TEST(PointerTraits, Functions) { + int i; + EXPECT_EQ(&i, absl::pointer_traits::pointer_to(i).ptr); + EXPECT_EQ(&i, absl::pointer_traits::pointer_to(i)); +} + +TEST(AllocatorTraits, Typedefs) { + struct A { + struct value_type {}; + }; + EXPECT_TRUE(( + std::is_same::allocator_type>::value)); + EXPECT_TRUE( + (std::is_same::value_type>::value)); + + struct X {}; + struct HasPointer { + using value_type = X; + using pointer = SmartPointer; + }; + EXPECT_TRUE((std::is_same, typename absl::allocator_traits< + HasPointer>::pointer>::value)); + EXPECT_TRUE( + (std::is_same::pointer>::value)); + + EXPECT_TRUE( + (std::is_same< + SmartPointer, + typename absl::allocator_traits::const_pointer>::value)); + EXPECT_TRUE( + (std::is_same::const_pointer>::value)); + + struct HasVoidPointer { + using value_type = X; + struct void_pointer {}; + }; + + EXPECT_TRUE((std::is_same::void_pointer>::value)); + EXPECT_TRUE( + (std::is_same, typename absl::allocator_traits< + HasPointer>::void_pointer>::value)); + + struct HasConstVoidPointer { + using value_type = X; + struct const_void_pointer {}; + }; + + EXPECT_TRUE( + (std::is_same::const_void_pointer>::value)); + EXPECT_TRUE((std::is_same, + typename absl::allocator_traits< + HasPointer>::const_void_pointer>::value)); + + struct HasDifferenceType { + using value_type = X; + using difference_type = int; + }; + EXPECT_TRUE( + (std::is_same::difference_type>::value)); + EXPECT_TRUE((std::is_same::difference_type>::value)); + + struct HasSizeType { + using value_type = X; + using size_type = unsigned int; + }; + EXPECT_TRUE((std::is_same::size_type>::value)); + EXPECT_TRUE((std::is_same::size_type>::value)); + + struct HasPropagateOnCopy { + using value_type = X; + struct propagate_on_container_copy_assignment {}; + }; + + EXPECT_TRUE( + (std::is_same:: + propagate_on_container_copy_assignment>::value)); + EXPECT_TRUE( + (std::is_same::propagate_on_container_copy_assignment>::value)); + + struct HasPropagateOnMove { + using value_type = X; + struct propagate_on_container_move_assignment {}; + }; + + EXPECT_TRUE( + (std::is_same:: + propagate_on_container_move_assignment>::value)); + EXPECT_TRUE( + (std::is_same::propagate_on_container_move_assignment>::value)); + + struct HasPropagateOnSwap { + using value_type = X; + struct propagate_on_container_swap {}; + }; + + EXPECT_TRUE( + (std::is_same:: + propagate_on_container_swap>::value)); + EXPECT_TRUE( + (std::is_same:: + propagate_on_container_swap>::value)); + + struct HasIsAlwaysEqual { + using value_type = X; + struct is_always_equal {}; + }; + + EXPECT_TRUE((std::is_same::is_always_equal>::value)); + EXPECT_TRUE((std::is_same::is_always_equal>::value)); + struct NonEmpty { + using value_type = X; + int i; + }; + EXPECT_TRUE( + (std::is_same::is_always_equal>::value)); +} + +template +struct AllocWithPrivateInheritance : private std::allocator { + using value_type = T; +}; + +TEST(AllocatorTraits, RebindWithPrivateInheritance) { + // Regression test for some versions of gcc that do not like the sfinae we + // used in combination with private inheritance. + EXPECT_TRUE( + (std::is_same, + absl::allocator_traits>:: + rebind_alloc>::value)); +} + +template +struct Rebound {}; + +struct AllocWithRebind { + using value_type = int; + template + struct rebind { + using other = Rebound; + }; +}; + +template +struct AllocWithoutRebind { + using value_type = int; +}; + +TEST(AllocatorTraits, Rebind) { + EXPECT_TRUE( + (std::is_same, + typename absl::allocator_traits< + AllocWithRebind>::template rebind_alloc>::value)); + EXPECT_TRUE( + (std::is_same>, + typename absl::allocator_traits< + AllocWithRebind>::template rebind_traits>::value)); + + EXPECT_TRUE( + (std::is_same, + typename absl::allocator_traits>::template rebind_alloc>::value)); + EXPECT_TRUE( + (std::is_same>, + typename absl::allocator_traits>::template rebind_traits>::value)); +} + +struct TestValue { + TestValue() {} + explicit TestValue(int* trace) : trace(trace) { ++*trace; } + ~TestValue() { + if (trace) --*trace; + } + int* trace = nullptr; +}; + +struct MinimalMockAllocator { + MinimalMockAllocator() : value(0) {} + explicit MinimalMockAllocator(int value) : value(value) {} + MinimalMockAllocator(const MinimalMockAllocator& other) + : value(other.value) {} + using value_type = TestValue; + MOCK_METHOD(value_type*, allocate, (size_t)); + MOCK_METHOD(void, deallocate, (value_type*, size_t)); + + int value; +}; + +TEST(AllocatorTraits, FunctionsMinimal) { + int trace = 0; + int hint; + alignas(TestValue) char buffer[sizeof(TestValue)]; + auto* x = reinterpret_cast(buffer); + MinimalMockAllocator mock; + using Traits = absl::allocator_traits; + EXPECT_CALL(mock, allocate(7)).WillRepeatedly(Return(x)); + EXPECT_CALL(mock, deallocate(x, 7)); + + EXPECT_EQ(x, Traits::allocate(mock, 7)); + static_cast(Traits::allocate(mock, 7, static_cast(&hint))); + EXPECT_EQ(x, Traits::allocate(mock, 7, static_cast(&hint))); + Traits::deallocate(mock, x, 7); + + EXPECT_EQ(0, trace); + Traits::construct(mock, x, &trace); + EXPECT_EQ(1, trace); + Traits::destroy(mock, x); + EXPECT_EQ(0, trace); + + EXPECT_EQ(std::numeric_limits::max() / sizeof(TestValue), + Traits::max_size(mock)); + + EXPECT_EQ(0, mock.value); + EXPECT_EQ(0, Traits::select_on_container_copy_construction(mock).value); +} + +struct FullMockAllocator { + FullMockAllocator() : value(0) {} + explicit FullMockAllocator(int value) : value(value) {} + FullMockAllocator(const FullMockAllocator& other) : value(other.value) {} + using value_type = TestValue; + MOCK_METHOD(value_type*, allocate, (size_t)); + MOCK_METHOD(value_type*, allocate, (size_t, const void*)); + MOCK_METHOD(void, construct, (value_type*, int*)); + MOCK_METHOD(void, destroy, (value_type*)); + MOCK_METHOD(size_t, max_size, (), + (const)); + MOCK_METHOD(FullMockAllocator, select_on_container_copy_construction, (), + (const)); + + int value; +}; + +TEST(AllocatorTraits, FunctionsFull) { + int trace = 0; + int hint; + TestValue x(&trace), y; + FullMockAllocator mock; + using Traits = absl::allocator_traits; + EXPECT_CALL(mock, allocate(7)).WillRepeatedly(Return(&x)); + EXPECT_CALL(mock, allocate(13, &hint)).WillRepeatedly(Return(&y)); + EXPECT_CALL(mock, construct(&x, &trace)); + EXPECT_CALL(mock, destroy(&x)); + EXPECT_CALL(mock, max_size()).WillRepeatedly(Return(17u)); + EXPECT_CALL(mock, select_on_container_copy_construction()) + .WillRepeatedly(Return(FullMockAllocator(23))); + + EXPECT_EQ(&x, Traits::allocate(mock, 7)); + EXPECT_EQ(&y, Traits::allocate(mock, 13, static_cast(&hint))); + + EXPECT_EQ(1, trace); + Traits::construct(mock, &x, &trace); + EXPECT_EQ(1, trace); + Traits::destroy(mock, &x); + EXPECT_EQ(1, trace); + + EXPECT_EQ(17u, Traits::max_size(mock)); + + EXPECT_EQ(0, mock.value); + EXPECT_EQ(23, Traits::select_on_container_copy_construction(mock).value); +} + +TEST(AllocatorNoThrowTest, DefaultAllocator) { +#if defined(ABSL_ALLOCATOR_NOTHROW) && ABSL_ALLOCATOR_NOTHROW + EXPECT_TRUE(absl::default_allocator_is_nothrow::value); +#else + EXPECT_FALSE(absl::default_allocator_is_nothrow::value); +#endif +} + +TEST(AllocatorNoThrowTest, StdAllocator) { +#if defined(ABSL_ALLOCATOR_NOTHROW) && ABSL_ALLOCATOR_NOTHROW + EXPECT_TRUE(absl::allocator_is_nothrow>::value); +#else + EXPECT_FALSE(absl::allocator_is_nothrow>::value); +#endif +} + +TEST(AllocatorNoThrowTest, CustomAllocator) { + struct NoThrowAllocator { + using is_nothrow = std::true_type; + }; + struct CanThrowAllocator { + using is_nothrow = std::false_type; + }; + struct UnspecifiedAllocator {}; + EXPECT_TRUE(absl::allocator_is_nothrow::value); + EXPECT_FALSE(absl::allocator_is_nothrow::value); + EXPECT_FALSE(absl::allocator_is_nothrow::value); +} + +} // namespace diff --git a/TMessagesProj/jni/voip/webrtc/absl/meta/type_traits.h b/TMessagesProj/jni/voip/webrtc/absl/meta/type_traits.h index d886cb30a8..6e6001fe8e 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/meta/type_traits.h +++ b/TMessagesProj/jni/voip/webrtc/absl/meta/type_traits.h @@ -298,8 +298,12 @@ struct is_function // https://gcc.gnu.org/onlinedocs/gcc/Type-Traits.html#Type-Traits. template struct is_trivially_destructible +#ifdef ABSL_HAVE_STD_IS_TRIVIALLY_DESTRUCTIBLE + : std::is_trivially_destructible { +#else : std::integral_constant::value> { +#endif #ifdef ABSL_HAVE_STD_IS_TRIVIALLY_DESTRUCTIBLE private: static constexpr bool compliant = std::is_trivially_destructible::value == @@ -347,9 +351,13 @@ struct is_trivially_destructible // Nontrivially destructible types will cause the expression to be nontrivial. template struct is_trivially_default_constructible +#if defined(ABSL_HAVE_STD_IS_TRIVIALLY_CONSTRUCTIBLE) + : std::is_trivially_default_constructible { +#else : std::integral_constant::value && is_trivially_destructible::value> { +#endif #if defined(ABSL_HAVE_STD_IS_TRIVIALLY_CONSTRUCTIBLE) && \ !defined( \ ABSL_META_INTERNAL_STD_CONSTRUCTION_TRAITS_DONT_CHECK_DESTRUCTION) @@ -381,10 +389,14 @@ struct is_trivially_default_constructible // expression to be nontrivial. template struct is_trivially_move_constructible +#if defined(ABSL_HAVE_STD_IS_TRIVIALLY_CONSTRUCTIBLE) + : std::is_trivially_move_constructible { +#else : std::conditional< std::is_object::value && !std::is_array::value, type_traits_internal::IsTriviallyMoveConstructibleObject, std::is_reference>::type::type { +#endif #if defined(ABSL_HAVE_STD_IS_TRIVIALLY_CONSTRUCTIBLE) && \ !defined( \ ABSL_META_INTERNAL_STD_CONSTRUCTION_TRAITS_DONT_CHECK_DESTRUCTION) @@ -490,9 +502,13 @@ struct is_trivially_move_assignable // `is_trivially_assignable`. template struct is_trivially_copy_assignable +#ifdef ABSL_HAVE_STD_IS_TRIVIALLY_ASSIGNABLE + : std::is_trivially_copy_assignable { +#else : std::integral_constant< bool, __has_trivial_assign(typename std::remove_reference::type) && absl::is_copy_assignable::value> { +#endif #ifdef ABSL_HAVE_STD_IS_TRIVIALLY_ASSIGNABLE private: static constexpr bool compliant = @@ -544,6 +560,11 @@ namespace type_traits_internal { // destructible. Arrays of trivially copyable types are trivially copyable. // // We expose this metafunction only for internal use within absl. + +#if defined(ABSL_HAVE_STD_IS_TRIVIALLY_COPYABLE) +template +struct is_trivially_copyable : std::is_trivially_copyable {}; +#else template class is_trivially_copyable_impl { using ExtentsRemoved = typename std::remove_all_extents::type; @@ -569,6 +590,7 @@ template struct is_trivially_copyable : std::integral_constant< bool, type_traits_internal::is_trivially_copyable_impl::kValue> {}; +#endif } // namespace type_traits_internal // ----------------------------------------------------------------------------- @@ -791,6 +813,34 @@ using swap_internal::Swap; using swap_internal::StdSwapIsUnconstrained; } // namespace type_traits_internal + +// absl::is_trivially_relocatable +// Detects whether a type is "trivially relocatable" -- meaning it can be +// relocated without invoking the constructor/destructor, using a form of move +// elision. +// +// Example: +// +// if constexpr (absl::is_trivially_relocatable::value) { +// memcpy(new_location, old_location, sizeof(T)); +// } else { +// new(new_location) T(std::move(*old_location)); +// old_location->~T(); +// } +// +// Upstream documentation: +// +// https://clang.llvm.org/docs/LanguageExtensions.html#:~:text=__is_trivially_relocatable +// +#if ABSL_HAVE_BUILTIN(__is_trivially_relocatable) +template +struct is_trivially_relocatable + : std::integral_constant {}; +#else +template +struct is_trivially_relocatable : std::integral_constant {}; +#endif + ABSL_NAMESPACE_END } // namespace absl diff --git a/TMessagesProj/jni/voip/webrtc/absl/numeric/bits.h b/TMessagesProj/jni/voip/webrtc/absl/numeric/bits.h index 628cdf50f1..df81b9a929 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/numeric/bits.h +++ b/TMessagesProj/jni/voip/webrtc/absl/numeric/bits.h @@ -131,10 +131,9 @@ has_single_bit(T x) noexcept { // fractional part discarded. template ABSL_INTERNAL_CONSTEXPR_CLZ inline - typename std::enable_if::value, T>::type + typename std::enable_if::value, int>::type bit_width(T x) noexcept { - return std::numeric_limits::digits - - static_cast(countl_zero(x)); + return std::numeric_limits::digits - countl_zero(x); } // Returns: If x == 0, 0; otherwise the maximal value y such that diff --git a/TMessagesProj/jni/voip/webrtc/absl/numeric/int128.cc b/TMessagesProj/jni/voip/webrtc/absl/numeric/int128.cc index 17d88744ae..e5526c6f59 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/numeric/int128.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/numeric/int128.cc @@ -42,11 +42,11 @@ namespace { // Returns: 2 inline ABSL_ATTRIBUTE_ALWAYS_INLINE int Fls128(uint128 n) { if (uint64_t hi = Uint128High64(n)) { - ABSL_INTERNAL_ASSUME(hi != 0); + ABSL_ASSUME(hi != 0); return 127 - countl_zero(hi); } const uint64_t low = Uint128Low64(n); - ABSL_INTERNAL_ASSUME(low != 0); + ABSL_ASSUME(low != 0); return 63 - countl_zero(low); } @@ -209,15 +209,16 @@ std::ostream& operator<<(std::ostream& os, uint128 v) { // Add the requisite padding. std::streamsize width = os.width(0); if (static_cast(width) > rep.size()) { + const size_t count = static_cast(width) - rep.size(); std::ios::fmtflags adjustfield = flags & std::ios::adjustfield; if (adjustfield == std::ios::left) { - rep.append(width - rep.size(), os.fill()); + rep.append(count, os.fill()); } else if (adjustfield == std::ios::internal && (flags & std::ios::showbase) && (flags & std::ios::basefield) == std::ios::hex && v != 0) { - rep.insert(2, width - rep.size(), os.fill()); + rep.insert(2, count, os.fill()); } else { - rep.insert(0, width - rep.size(), os.fill()); + rep.insert(0, count, os.fill()); } } @@ -306,22 +307,23 @@ std::ostream& operator<<(std::ostream& os, int128 v) { // Add the requisite padding. std::streamsize width = os.width(0); if (static_cast(width) > rep.size()) { + const size_t count = static_cast(width) - rep.size(); switch (flags & std::ios::adjustfield) { case std::ios::left: - rep.append(width - rep.size(), os.fill()); + rep.append(count, os.fill()); break; case std::ios::internal: if (print_as_decimal && (rep[0] == '+' || rep[0] == '-')) { - rep.insert(1, width - rep.size(), os.fill()); + rep.insert(1, count, os.fill()); } else if ((flags & std::ios::basefield) == std::ios::hex && (flags & std::ios::showbase) && v != 0) { - rep.insert(2, width - rep.size(), os.fill()); + rep.insert(2, count, os.fill()); } else { - rep.insert(0, width - rep.size(), os.fill()); + rep.insert(0, count, os.fill()); } break; default: // std::ios::right - rep.insert(0, width - rep.size(), os.fill()); + rep.insert(0, count, os.fill()); break; } } @@ -332,6 +334,7 @@ std::ostream& operator<<(std::ostream& os, int128 v) { ABSL_NAMESPACE_END } // namespace absl +#ifdef ABSL_INTERNAL_NEED_REDUNDANT_CONSTEXPR_DECL namespace std { constexpr bool numeric_limits::is_specialized; constexpr bool numeric_limits::is_signed; @@ -381,3 +384,4 @@ constexpr int numeric_limits::max_exponent10; constexpr bool numeric_limits::traps; constexpr bool numeric_limits::tinyness_before; } // namespace std +#endif diff --git a/TMessagesProj/jni/voip/webrtc/absl/numeric/int128.h b/TMessagesProj/jni/voip/webrtc/absl/numeric/int128.h index c7ad96befd..7a899eec84 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/numeric/int128.h +++ b/TMessagesProj/jni/voip/webrtc/absl/numeric/int128.h @@ -44,7 +44,7 @@ // builtin type. We need to make sure not to define operator wchar_t() // alongside operator unsigned short() in these instances. #define ABSL_INTERNAL_WCHAR_T __wchar_t -#if defined(_M_X64) +#if defined(_M_X64) && !defined(_M_ARM64EC) #include #pragma intrinsic(_umul128) #endif // defined(_M_X64) @@ -980,7 +980,7 @@ inline uint128 operator*(uint128 lhs, uint128 rhs) { // can be used for uint128 storage. return static_cast(lhs) * static_cast(rhs); -#elif defined(_MSC_VER) && defined(_M_X64) +#elif defined(_MSC_VER) && defined(_M_X64) && !defined(_M_ARM64EC) uint64_t carry; uint64_t low = _umul128(Uint128Low64(lhs), Uint128Low64(rhs), &carry); return MakeUint128(Uint128Low64(lhs) * Uint128High64(rhs) + diff --git a/TMessagesProj/jni/voip/webrtc/absl/base/internal/periodic_sampler_benchmark.cc b/TMessagesProj/jni/voip/webrtc/absl/profiling/internal/periodic_sampler_benchmark.cc similarity index 94% rename from TMessagesProj/jni/voip/webrtc/absl/base/internal/periodic_sampler_benchmark.cc rename to TMessagesProj/jni/voip/webrtc/absl/profiling/internal/periodic_sampler_benchmark.cc index 5ad469ce79..8f0e5574c3 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/base/internal/periodic_sampler_benchmark.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/profiling/internal/periodic_sampler_benchmark.cc @@ -12,12 +12,12 @@ // See the License for the specific language governing permissions and // limitations under the License. +#include "absl/profiling/internal/periodic_sampler.h" #include "benchmark/benchmark.h" -#include "absl/base/internal/periodic_sampler.h" namespace absl { ABSL_NAMESPACE_BEGIN -namespace base_internal { +namespace profiling_internal { namespace { template @@ -74,6 +74,6 @@ void BM_PeriodicSampler_Disabled(benchmark::State& state) { BENCHMARK(BM_PeriodicSampler_Disabled); } // namespace -} // namespace base_internal +} // namespace profiling_internal ABSL_NAMESPACE_END } // namespace absl diff --git a/TMessagesProj/jni/voip/webrtc/absl/profiling/internal/sample_recorder.h b/TMessagesProj/jni/voip/webrtc/absl/profiling/internal/sample_recorder.h index 5f65983bc8..ef1489b1f6 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/profiling/internal/sample_recorder.h +++ b/TMessagesProj/jni/voip/webrtc/absl/profiling/internal/sample_recorder.h @@ -77,8 +77,8 @@ class SampleRecorder { // samples that have been dropped. int64_t Iterate(const std::function& f); - int32_t GetMaxSamples() const; - void SetMaxSamples(int32_t max); + size_t GetMaxSamples() const; + void SetMaxSamples(size_t max); private: void PushNew(T* sample); @@ -88,7 +88,7 @@ class SampleRecorder { std::atomic dropped_samples_; std::atomic size_estimate_; - std::atomic max_samples_{1 << 20}; + std::atomic max_samples_{1 << 20}; // Intrusive lock free linked lists for tracking samples. // @@ -186,7 +186,7 @@ T* SampleRecorder::PopDead(Targs... args) { template template T* SampleRecorder::Register(Targs&&... args) { - int64_t size = size_estimate_.fetch_add(1, std::memory_order_relaxed); + size_t size = size_estimate_.fetch_add(1, std::memory_order_relaxed); if (size > max_samples_.load(std::memory_order_relaxed)) { size_estimate_.fetch_sub(1, std::memory_order_relaxed); dropped_samples_.fetch_add(1, std::memory_order_relaxed); @@ -229,12 +229,12 @@ int64_t SampleRecorder::Iterate( } template -void SampleRecorder::SetMaxSamples(int32_t max) { +void SampleRecorder::SetMaxSamples(size_t max) { max_samples_.store(max, std::memory_order_release); } template -int32_t SampleRecorder::GetMaxSamples() const { +size_t SampleRecorder::GetMaxSamples() const { return max_samples_.load(std::memory_order_acquire); } diff --git a/TMessagesProj/jni/voip/webrtc/absl/random/bit_gen_ref.h b/TMessagesProj/jni/voip/webrtc/absl/random/bit_gen_ref.h index 9555460fd4..e475221a15 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/random/bit_gen_ref.h +++ b/TMessagesProj/jni/voip/webrtc/absl/random/bit_gen_ref.h @@ -24,6 +24,10 @@ #ifndef ABSL_RANDOM_BIT_GEN_REF_H_ #define ABSL_RANDOM_BIT_GEN_REF_H_ +#include +#include +#include + #include "absl/base/internal/fast_type_id.h" #include "absl/base/macros.h" #include "absl/meta/type_traits.h" diff --git a/TMessagesProj/jni/voip/webrtc/absl/random/internal/chi_square.cc b/TMessagesProj/jni/voip/webrtc/absl/random/internal/chi_square.cc index 640d48cea6..fbe0173299 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/random/internal/chi_square.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/random/internal/chi_square.cc @@ -125,7 +125,8 @@ double ChiSquareValue(int dof, double p) { const double variance = 2.0 / (9 * dof); // Cannot use this method if the variance is 0. if (variance != 0) { - return std::pow(z * std::sqrt(variance) + mean, 3.0) * dof; + double term = z * std::sqrt(variance) + mean; + return dof * (term * term * term); } } diff --git a/TMessagesProj/jni/voip/webrtc/absl/random/internal/distribution_caller.h b/TMessagesProj/jni/voip/webrtc/absl/random/internal/distribution_caller.h index f1ad5ccdb4..0f162a4e29 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/random/internal/distribution_caller.h +++ b/TMessagesProj/jni/voip/webrtc/absl/random/internal/distribution_caller.h @@ -18,6 +18,7 @@ #define ABSL_RANDOM_INTERNAL_DISTRIBUTION_CALLER_H_ #include +#include #include "absl/base/config.h" #include "absl/base/internal/fast_type_id.h" diff --git a/TMessagesProj/jni/voip/webrtc/absl/random/internal/fast_uniform_bits.h b/TMessagesProj/jni/voip/webrtc/absl/random/internal/fast_uniform_bits.h index f3a5c00f39..8d8ed04515 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/random/internal/fast_uniform_bits.h +++ b/TMessagesProj/jni/voip/webrtc/absl/random/internal/fast_uniform_bits.h @@ -151,7 +151,8 @@ FastUniformBits::Generate(URBG& g, // NOLINT(runtime/references) result_type r = static_cast(g() - kMin); for (size_t n = 1; n < kIters; ++n) { - r = (r << kShift) + static_cast(g() - kMin); + r = static_cast(r << kShift) + + static_cast(g() - kMin); } return r; } diff --git a/TMessagesProj/jni/voip/webrtc/absl/random/internal/generate_real.h b/TMessagesProj/jni/voip/webrtc/absl/random/internal/generate_real.h index d5fbb44c24..b569450cf7 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/random/internal/generate_real.h +++ b/TMessagesProj/jni/voip/webrtc/absl/random/internal/generate_real.h @@ -50,10 +50,10 @@ struct GenerateSignedTag {}; // inputs, otherwise it never returns 0. // // When a value in U(0,1) is required, use: -// Uniform64ToReal; +// GenerateRealFromBits; // // When a value in U(-1,1) is required, use: -// Uniform64ToReal; +// GenerateRealFromBits; // // This generates more distinct values than the mathematical equivalent // `U(0, 1) * 2.0 - 1.0`. diff --git a/TMessagesProj/jni/voip/webrtc/absl/random/internal/mock_helpers.h b/TMessagesProj/jni/voip/webrtc/absl/random/internal/mock_helpers.h new file mode 100644 index 0000000000..882b0518ca --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/absl/random/internal/mock_helpers.h @@ -0,0 +1,135 @@ +// +// Copyright 2019 The Abseil Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef ABSL_RANDOM_INTERNAL_MOCK_HELPERS_H_ +#define ABSL_RANDOM_INTERNAL_MOCK_HELPERS_H_ + +#include +#include +#include + +#include "absl/base/internal/fast_type_id.h" +#include "absl/types/optional.h" + +namespace absl { +ABSL_NAMESPACE_BEGIN +namespace random_internal { + +// MockHelpers works in conjunction with MockOverloadSet, MockingBitGen, and +// BitGenRef to enable the mocking capability for absl distribution functions. +// +// MockingBitGen registers mocks based on the typeid of a mock signature, KeyT, +// which is used to generate a unique id. +// +// KeyT is a signature of the form: +// result_type(discriminator_type, std::tuple) +// The mocked function signature will be composed from KeyT as: +// result_type(args...) +// +class MockHelpers { + using IdType = ::absl::base_internal::FastTypeIdType; + + // Given a key signature type used to index the mock, extract the components. + // KeyT is expected to have the form: + // result_type(discriminator_type, arg_tuple_type) + template + struct KeySignature; + + template + struct KeySignature { + using result_type = ResultT; + using discriminator_type = DiscriminatorT; + using arg_tuple_type = ArgTupleT; + }; + + // Detector for InvokeMock. + template + using invoke_mock_t = decltype(std::declval()->InvokeMock( + std::declval(), std::declval(), std::declval())); + + // Empty implementation of InvokeMock. + template + static absl::optional InvokeMockImpl(char, URBG*, Args&&...) { + return absl::nullopt; + } + + // Non-empty implementation of InvokeMock. + template , typename... Args> + static absl::optional InvokeMockImpl(int, URBG* urbg, + Args&&... args) { + ArgTupleT arg_tuple(std::forward(args)...); + ReturnT result; + if (urbg->InvokeMock(::absl::base_internal::FastTypeId(), &arg_tuple, + &result)) { + return result; + } + return absl::nullopt; + } + + public: + // InvokeMock is private; this provides access for some specialized use cases. + template + static inline bool PrivateInvokeMock(URBG* urbg, IdType type, + void* args_tuple, void* result) { + return urbg->InvokeMock(type, args_tuple, result); + } + + // Invoke a mock for the KeyT (may or may not be a signature). + // + // KeyT is used to generate a typeid-based lookup key for the mock. + // KeyT is a signature of the form: + // result_type(discriminator_type, std::tuple) + // The mocked function signature will be composed from KeyT as: + // result_type(args...) + // + // An instance of arg_tuple_type must be constructable from Args..., since + // the underlying mechanism requires a pointer to an argument tuple. + template + static auto MaybeInvokeMock(URBG* urbg, Args&&... args) + -> absl::optional::result_type> { + // Use function overloading to dispatch to the implemenation since + // more modern patterns (e.g. require + constexpr) are not supported in all + // compiler configurations. + return InvokeMockImpl::result_type, + typename KeySignature::arg_tuple_type, URBG>( + 0, urbg, std::forward(args)...); + } + + // Acquire a mock for the KeyT (may or may not be a signature). + // + // KeyT is used to generate a typeid-based lookup for the mock. + // KeyT is a signature of the form: + // result_type(discriminator_type, std::tuple) + // The mocked function signature will be composed from KeyT as: + // result_type(args...) + template + static auto MockFor(MockURBG& m) + -> decltype(m.template RegisterMock< + typename KeySignature::result_type, + typename KeySignature::arg_tuple_type>( + m, std::declval())) { + return m.template RegisterMock::result_type, + typename KeySignature::arg_tuple_type>( + m, ::absl::base_internal::FastTypeId()); + } +}; + +} // namespace random_internal +ABSL_NAMESPACE_END +} // namespace absl + +#endif // ABSL_RANDOM_INTERNAL_MOCK_HELPERS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/random/internal/nonsecure_base.h b/TMessagesProj/jni/voip/webrtc/absl/random/internal/nonsecure_base.h index 730fa2ea12..c3b80335ae 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/random/internal/nonsecure_base.h +++ b/TMessagesProj/jni/voip/webrtc/absl/random/internal/nonsecure_base.h @@ -17,28 +17,82 @@ #include #include -#include #include -#include -#include #include +#include #include #include "absl/base/macros.h" +#include "absl/container/inlined_vector.h" #include "absl/meta/type_traits.h" #include "absl/random/internal/pool_urbg.h" #include "absl/random/internal/salted_seed_seq.h" #include "absl/random/internal/seed_material.h" -#include "absl/types/optional.h" #include "absl/types/span.h" namespace absl { ABSL_NAMESPACE_BEGIN namespace random_internal { +// RandenPoolSeedSeq is a custom seed sequence type where generate() fills the +// provided buffer via the RandenPool entropy source. +class RandenPoolSeedSeq { + private: + struct ContiguousTag {}; + struct BufferTag {}; + + // Generate random unsigned values directly into the buffer. + template + void generate_impl(ContiguousTag, Contiguous begin, Contiguous end) { + const size_t n = static_cast(std::distance(begin, end)); + auto* a = &(*begin); + RandenPool::Fill( + absl::MakeSpan(reinterpret_cast(a), sizeof(*a) * n)); + } + + // Construct a buffer of size n and fill it with values, then copy + // those values into the seed iterators. + template + void generate_impl(BufferTag, RandomAccessIterator begin, + RandomAccessIterator end) { + const size_t n = std::distance(begin, end); + absl::InlinedVector data(n, 0); + RandenPool::Fill(absl::MakeSpan(data.begin(), data.end())); + std::copy(std::begin(data), std::end(data), begin); + } + + public: + using result_type = uint32_t; + + size_t size() { return 0; } + + template + void param(OutIterator) const {} + + template + void generate(RandomAccessIterator begin, RandomAccessIterator end) { + // RandomAccessIterator must be assignable from uint32_t + if (begin != end) { + using U = typename std::iterator_traits::value_type; + // ContiguousTag indicates the common case of a known contiguous buffer, + // which allows directly filling the buffer. In C++20, + // std::contiguous_iterator_tag provides a mechanism for testing this + // capability, however until Abseil's support requirements allow us to + // assume C++20, limit checks to a few common cases. + using TagType = absl::conditional_t< + (std::is_pointer::value || + std::is_same::iterator>::value), + ContiguousTag, BufferTag>; + + generate_impl(TagType{}, begin, end); + } + } +}; + // Each instance of NonsecureURBGBase will be seeded by variates produced // by a thread-unique URBG-instance. -template +template class NonsecureURBGBase { public: using result_type = typename URBG::result_type; @@ -85,49 +139,6 @@ class NonsecureURBGBase { } private: - // Seeder is a custom seed sequence type where generate() fills the provided - // buffer via the RandenPool entropy source. - struct Seeder { - using result_type = uint32_t; - - size_t size() { return 0; } - - template - void param(OutIterator) const {} - - template - void generate(RandomAccessIterator begin, RandomAccessIterator end) { - if (begin != end) { - // begin, end must be random access iterators assignable from uint32_t. - generate_impl( - std::integral_constant{}, - begin, end); - } - } - - // Commonly, generate is invoked with a pointer to a buffer which - // can be cast to a uint32_t. - template - void generate_impl(std::integral_constant, - RandomAccessIterator begin, RandomAccessIterator end) { - auto buffer = absl::MakeSpan(begin, end); - auto target = absl::MakeSpan(reinterpret_cast(buffer.data()), - buffer.size()); - RandenPool::Fill(target); - } - - // The non-uint32_t case should be uncommon, and involves an extra copy, - // filling the uint32_t buffer and then mixing into the output. - template - void generate_impl(std::integral_constant, - RandomAccessIterator begin, RandomAccessIterator end) { - const size_t n = std::distance(begin, end); - absl::InlinedVector data(n, 0); - RandenPool::Fill(absl::MakeSpan(data.begin(), data.end())); - std::copy(std::begin(data), std::end(data), begin); - } - }; - static URBG ConstructURBG() { Seeder seeder; return URBG(seeder); diff --git a/TMessagesProj/jni/voip/webrtc/absl/random/internal/pcg_engine.h b/TMessagesProj/jni/voip/webrtc/absl/random/internal/pcg_engine.h index 4ab44c94af..e1f4ef3317 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/random/internal/pcg_engine.h +++ b/TMessagesProj/jni/voip/webrtc/absl/random/internal/pcg_engine.h @@ -221,47 +221,26 @@ class pcg_engine { template class pcg128_params { public: -#if ABSL_HAVE_INTRINSIC_INT128 - using state_type = __uint128_t; - static inline constexpr state_type make_u128(uint64_t a, uint64_t b) { - return (static_cast<__uint128_t>(a) << 64) | b; - } -#else using state_type = absl::uint128; - static inline constexpr state_type make_u128(uint64_t a, uint64_t b) { - return absl::MakeUint128(a, b); - } -#endif - static inline constexpr state_type multiplier() { - return make_u128(kMultA, kMultB); + return absl::MakeUint128(kMultA, kMultB); } static inline constexpr state_type increment() { - return make_u128(kIncA, kIncB); + return absl::MakeUint128(kIncA, kIncB); } }; // Implementation of the PCG xsl_rr_128_64 128-bit mixing function, which // accepts an input of state_type and mixes it into an output of result_type. struct pcg_xsl_rr_128_64 { -#if ABSL_HAVE_INTRINSIC_INT128 - using state_type = __uint128_t; -#else using state_type = absl::uint128; -#endif using result_type = uint64_t; inline uint64_t operator()(state_type state) { // This is equivalent to the xsl_rr_128_64 mixing function. -#if ABSL_HAVE_INTRINSIC_INT128 uint64_t rotate = static_cast(state >> 122u); state ^= state >> 64; uint64_t s = static_cast(state); -#else - uint64_t h = Uint128High64(state); - uint64_t rotate = h >> 58u; - uint64_t s = Uint128Low64(state) ^ h; -#endif return rotr(s, static_cast(rotate)); } }; diff --git a/TMessagesProj/jni/voip/webrtc/absl/random/internal/pool_urbg.cc b/TMessagesProj/jni/voip/webrtc/absl/random/internal/pool_urbg.cc index 725100a415..5aefa7d97b 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/random/internal/pool_urbg.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/random/internal/pool_urbg.cc @@ -131,7 +131,7 @@ void RandenPoolEntry::Fill(uint8_t* out, size_t bytes) { } // Number of pooled urbg entries. -static constexpr int kPoolSize = 8; +static constexpr size_t kPoolSize = 8; // Shared pool entries. static absl::once_flag pool_once; @@ -147,15 +147,15 @@ ABSL_CACHELINE_ALIGNED static RandenPoolEntry* shared_pools[kPoolSize]; // on subsequent runs the order within the same program may be significantly // different. However, as other thread IDs are not assigned sequentially, // this is not expected to matter. -int GetPoolID() { +size_t GetPoolID() { static_assert(kPoolSize >= 1, "At least one urbg instance is required for PoolURBG"); - ABSL_CONST_INIT static std::atomic sequence{0}; + ABSL_CONST_INIT static std::atomic sequence{0}; #ifdef ABSL_HAVE_THREAD_LOCAL - static thread_local int my_pool_id = -1; - if (ABSL_PREDICT_FALSE(my_pool_id < 0)) { + static thread_local size_t my_pool_id = kPoolSize; + if (ABSL_PREDICT_FALSE(my_pool_id == kPoolSize)) { my_pool_id = (sequence++ % kPoolSize); } return my_pool_id; @@ -171,8 +171,8 @@ int GetPoolID() { // Store the value in the pthread_{get/set}specific. However an uninitialized // value is 0, so add +1 to distinguish from the null value. - intptr_t my_pool_id = - reinterpret_cast(pthread_getspecific(tid_key)); + uintptr_t my_pool_id = + reinterpret_cast(pthread_getspecific(tid_key)); if (ABSL_PREDICT_FALSE(my_pool_id == 0)) { // No allocated ID, allocate the next value, cache it, and return. my_pool_id = (sequence++ % kPoolSize) + 1; @@ -194,7 +194,7 @@ RandenPoolEntry* PoolAlignedAlloc() { // Not all the platforms that we build for have std::aligned_alloc, however // since we never free these objects, we can over allocate and munge the // pointers to the correct alignment. - intptr_t x = reinterpret_cast( + uintptr_t x = reinterpret_cast( new char[sizeof(RandenPoolEntry) + kAlignment]); auto y = x % kAlignment; void* aligned = reinterpret_cast(y == 0 ? x : (x + kAlignment - y)); @@ -215,7 +215,7 @@ void InitPoolURBG() { absl::MakeSpan(seed_material))) { random_internal::ThrowSeedGenException(); } - for (int i = 0; i < kPoolSize; i++) { + for (size_t i = 0; i < kPoolSize; i++) { shared_pools[i] = PoolAlignedAlloc(); shared_pools[i]->Init( absl::MakeSpan(&seed_material[i * kSeedSize], kSeedSize)); diff --git a/TMessagesProj/jni/voip/webrtc/absl/random/internal/randen_detect.cc b/TMessagesProj/jni/voip/webrtc/absl/random/internal/randen_detect.cc index 9bb58fc68c..6dababa351 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/random/internal/randen_detect.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/random/internal/randen_detect.cc @@ -24,6 +24,11 @@ #include "absl/random/internal/platform.h" +#if !defined(__UCLIBC__) && defined(__GLIBC__) && \ + (__GLIBC__ > 2 || (__GLIBC__ == 2 && __GLIBC_MINOR__ >= 16)) +#define ABSL_HAVE_GETAUXVAL +#endif + #if defined(ABSL_ARCH_X86_64) #define ABSL_INTERNAL_USE_X86_CPUID #elif defined(ABSL_ARCH_PPC) || defined(ABSL_ARCH_ARM) || \ @@ -31,7 +36,7 @@ #if defined(__ANDROID__) #define ABSL_INTERNAL_USE_ANDROID_GETAUXVAL #define ABSL_INTERNAL_USE_GETAUXVAL -#elif defined(__linux__) +#elif defined(__linux__) && defined(ABSL_HAVE_GETAUXVAL) #define ABSL_INTERNAL_USE_LINUX_GETAUXVAL #define ABSL_INTERNAL_USE_GETAUXVAL #endif diff --git a/TMessagesProj/jni/voip/webrtc/absl/random/internal/salted_seed_seq.h b/TMessagesProj/jni/voip/webrtc/absl/random/internal/salted_seed_seq.h index 5953a090f8..06291865e3 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/random/internal/salted_seed_seq.h +++ b/TMessagesProj/jni/voip/webrtc/absl/random/internal/salted_seed_seq.h @@ -22,6 +22,7 @@ #include #include #include +#include #include "absl/container/inlined_vector.h" #include "absl/meta/type_traits.h" @@ -65,15 +66,19 @@ class SaltedSeedSeq { template void generate(RandomAccessIterator begin, RandomAccessIterator end) { + using U = typename std::iterator_traits::value_type; + // The common case is that generate is called with ContiguousIterators // to uint arrays. Such contiguous memory regions may be optimized, // which we detect here. - using tag = absl::conditional_t< - (std::is_pointer::value && - std::is_same, uint32_t>::value), + using TagType = absl::conditional_t< + (std::is_same::value && + (std::is_pointer::value || + std::is_same::iterator>::value)), ContiguousAndUint32Tag, DefaultTag>; if (begin != end) { - generate_impl(begin, end, tag{}); + generate_impl(TagType{}, begin, end, std::distance(begin, end)); } } @@ -89,8 +94,15 @@ class SaltedSeedSeq { struct DefaultTag {}; // Generate which requires the iterators are contiguous pointers to uint32_t. - void generate_impl(uint32_t* begin, uint32_t* end, ContiguousAndUint32Tag) { - generate_contiguous(absl::MakeSpan(begin, end)); + // Fills the initial seed buffer the underlying SSeq::generate() call, + // then mixes in the salt material. + template + void generate_impl(ContiguousAndUint32Tag, Contiguous begin, Contiguous end, + size_t n) { + seq_->generate(begin, end); + const uint32_t salt = absl::random_internal::GetSaltMaterial().value_or(0); + auto span = absl::Span(&*begin, n); + MixIntoSeedMaterial(absl::MakeConstSpan(&salt, 1), span); } // The uncommon case for generate is that it is called with iterators over @@ -98,27 +110,13 @@ class SaltedSeedSeq { // case we allocate a temporary 32-bit buffer and then copy-assign back // to the initial inputs. template - void generate_impl(RandomAccessIterator begin, RandomAccessIterator end, - DefaultTag) { - return generate_and_copy(std::distance(begin, end), begin); - } - - // Fills the initial seed buffer the underlying SSeq::generate() call, - // mixing in the salt material. - void generate_contiguous(absl::Span buffer) { - seq_->generate(buffer.begin(), buffer.end()); - const uint32_t salt = absl::random_internal::GetSaltMaterial().value_or(0); - MixIntoSeedMaterial(absl::MakeConstSpan(&salt, 1), buffer); - } - - // Allocates a seed buffer of `n` elements, generates the seed, then - // copies the result into the `out` iterator. - template - void generate_and_copy(size_t n, Iterator out) { - // Allocate a temporary buffer, generate, and then copy. + void generate_impl(DefaultTag, RandomAccessIterator begin, + RandomAccessIterator, size_t n) { + // Allocates a seed buffer of `n` elements, generates the seed, then + // copies the result into the `out` iterator. absl::InlinedVector data(n, 0); - generate_contiguous(absl::MakeSpan(data.data(), data.size())); - std::copy(data.begin(), data.end(), out); + generate_impl(ContiguousAndUint32Tag{}, data.begin(), data.end(), n); + std::copy(data.begin(), data.end(), begin); } // Because [rand.req.seedseq] is not required to be copy-constructible, diff --git a/TMessagesProj/jni/voip/webrtc/absl/random/internal/seed_material.cc b/TMessagesProj/jni/voip/webrtc/absl/random/internal/seed_material.cc index c03cad8502..1041302b58 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/random/internal/seed_material.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/random/internal/seed_material.cc @@ -173,12 +173,12 @@ bool ReadSeedMaterialFromDevURandom(absl::Span values) { } while (success && buffer_size > 0) { - int bytes_read = read(dev_urandom, buffer, buffer_size); + ssize_t bytes_read = read(dev_urandom, buffer, buffer_size); int read_error = errno; success = (bytes_read > 0); if (success) { buffer += bytes_read; - buffer_size -= bytes_read; + buffer_size -= static_cast(bytes_read); } else if (bytes_read == -1 && read_error == EINTR) { success = true; // Need to try again. } diff --git a/TMessagesProj/jni/voip/webrtc/absl/random/seed_sequences.h b/TMessagesProj/jni/voip/webrtc/absl/random/seed_sequences.h index ff1340cc8e..c3af4b00a4 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/random/seed_sequences.h +++ b/TMessagesProj/jni/voip/webrtc/absl/random/seed_sequences.h @@ -28,6 +28,7 @@ #include #include +#include "absl/base/config.h" #include "absl/random/internal/salted_seed_seq.h" #include "absl/random/internal/seed_material.h" #include "absl/random/seed_gen_exception.h" diff --git a/TMessagesProj/jni/voip/webrtc/absl/random/uniform_real_distribution.h b/TMessagesProj/jni/voip/webrtc/absl/random/uniform_real_distribution.h index 5ba17b2341..196833415e 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/random/uniform_real_distribution.h +++ b/TMessagesProj/jni/voip/webrtc/absl/random/uniform_real_distribution.h @@ -73,12 +73,12 @@ class uniform_real_distribution { : lo_(lo), hi_(hi), range_(hi - lo) { // [rand.dist.uni.real] preconditions 2 & 3 assert(lo <= hi); + // NOTE: For integral types, we can promote the range to an unsigned type, // which gives full width of the range. However for real (fp) types, this // is not possible, so value generation cannot use the full range of the // real type. assert(range_ <= (std::numeric_limits::max)()); - assert(std::isfinite(range_)); } result_type a() const { return lo_; } diff --git a/TMessagesProj/jni/voip/webrtc/absl/status/internal/status_internal.h b/TMessagesProj/jni/voip/webrtc/absl/status/internal/status_internal.h index 34914d2e5f..873eb5c245 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/status/internal/status_internal.h +++ b/TMessagesProj/jni/voip/webrtc/absl/status/internal/status_internal.h @@ -14,7 +14,9 @@ #ifndef ABSL_STATUS_INTERNAL_STATUS_INTERNAL_H_ #define ABSL_STATUS_INTERNAL_STATUS_INTERNAL_H_ +#include #include +#include #include "absl/base/attributes.h" #include "absl/container/inlined_vector.h" @@ -69,6 +71,14 @@ struct StatusRep { }; absl::StatusCode MapToLocalCode(int value); + +// Returns a pointer to a newly-allocated string with the given `prefix`, +// suitable for output as an error message in assertion/`CHECK()` failures. +// +// This is an internal implementation detail for Abseil logging. +std::string* MakeCheckFailString(const absl::Status* status, + const char* prefix); + } // namespace status_internal ABSL_NAMESPACE_END diff --git a/TMessagesProj/jni/voip/webrtc/absl/status/status.cc b/TMessagesProj/jni/voip/webrtc/absl/status/status.cc index 6b316ac694..bbf2335d85 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/status/status.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/status/status.cc @@ -13,9 +13,13 @@ // limitations under the License. #include "absl/status/status.h" +#include + #include +#include #include "absl/base/internal/raw_logging.h" +#include "absl/base/internal/strerror.h" #include "absl/debugging/stacktrace.h" #include "absl/debugging/symbolize.h" #include "absl/status/status_payload_printer.h" @@ -74,15 +78,17 @@ std::ostream& operator<<(std::ostream& os, StatusCode code) { namespace status_internal { -static int FindPayloadIndexByUrl(const Payloads* payloads, - absl::string_view type_url) { - if (payloads == nullptr) return -1; +static absl::optional FindPayloadIndexByUrl( + const Payloads* payloads, + absl::string_view type_url) { + if (payloads == nullptr) + return absl::nullopt; for (size_t i = 0; i < payloads->size(); ++i) { if ((*payloads)[i].type_url == type_url) return i; } - return -1; + return absl::nullopt; } // Convert canonical code to a value known to this binary. @@ -116,8 +122,10 @@ absl::StatusCode MapToLocalCode(int value) { absl::optional Status::GetPayload( absl::string_view type_url) const { const auto* payloads = GetPayloads(); - int index = status_internal::FindPayloadIndexByUrl(payloads, type_url); - if (index != -1) return (*payloads)[index].payload; + absl::optional index = + status_internal::FindPayloadIndexByUrl(payloads, type_url); + if (index.has_value()) + return (*payloads)[index.value()].payload; return absl::nullopt; } @@ -132,10 +140,10 @@ void Status::SetPayload(absl::string_view type_url, absl::Cord payload) { rep->payloads = absl::make_unique(); } - int index = + absl::optional index = status_internal::FindPayloadIndexByUrl(rep->payloads.get(), type_url); - if (index != -1) { - (*rep->payloads)[index].payload = std::move(payload); + if (index.has_value()) { + (*rep->payloads)[index.value()].payload = std::move(payload); return; } @@ -143,10 +151,11 @@ void Status::SetPayload(absl::string_view type_url, absl::Cord payload) { } bool Status::ErasePayload(absl::string_view type_url) { - int index = status_internal::FindPayloadIndexByUrl(GetPayloads(), type_url); - if (index != -1) { + absl::optional index = + status_internal::FindPayloadIndexByUrl(GetPayloads(), type_url); + if (index.has_value()) { PrepareToModify(); - GetPayloads()->erase(GetPayloads()->begin() + index); + GetPayloads()->erase(GetPayloads()->begin() + index.value()); if (GetPayloads()->empty() && message().empty()) { // Special case: If this can be represented inlined, it MUST be // inlined (EqualsSlow depends on this behavior). @@ -192,7 +201,9 @@ const std::string* Status::EmptyString() { return &empty.str; } +#ifdef ABSL_INTERNAL_NEED_REDUNDANT_CONSTEXPR_DECL constexpr const char Status::kMovedFromString[]; +#endif const std::string* Status::MovedFromString() { static std::string* moved_from_string = new std::string(kMovedFromString); @@ -443,5 +454,169 @@ bool IsUnknown(const Status& status) { return status.code() == absl::StatusCode::kUnknown; } +StatusCode ErrnoToStatusCode(int error_number) { + switch (error_number) { + case 0: + return StatusCode::kOk; + case EINVAL: // Invalid argument + case ENAMETOOLONG: // Filename too long + case E2BIG: // Argument list too long + case EDESTADDRREQ: // Destination address required + case EDOM: // Mathematics argument out of domain of function + case EFAULT: // Bad address + case EILSEQ: // Illegal byte sequence + case ENOPROTOOPT: // Protocol not available + case ENOSTR: // Not a STREAM + case ENOTSOCK: // Not a socket + case ENOTTY: // Inappropriate I/O control operation + case EPROTOTYPE: // Protocol wrong type for socket + case ESPIPE: // Invalid seek + return StatusCode::kInvalidArgument; + case ETIMEDOUT: // Connection timed out + case ETIME: // Timer expired + return StatusCode::kDeadlineExceeded; + case ENODEV: // No such device + case ENOENT: // No such file or directory +#ifdef ENOMEDIUM + case ENOMEDIUM: // No medium found +#endif + case ENXIO: // No such device or address + case ESRCH: // No such process + return StatusCode::kNotFound; + case EEXIST: // File exists + case EADDRNOTAVAIL: // Address not available + case EALREADY: // Connection already in progress +#ifdef ENOTUNIQ + case ENOTUNIQ: // Name not unique on network +#endif + return StatusCode::kAlreadyExists; + case EPERM: // Operation not permitted + case EACCES: // Permission denied +#ifdef ENOKEY + case ENOKEY: // Required key not available +#endif + case EROFS: // Read only file system + return StatusCode::kPermissionDenied; + case ENOTEMPTY: // Directory not empty + case EISDIR: // Is a directory + case ENOTDIR: // Not a directory + case EADDRINUSE: // Address already in use + case EBADF: // Invalid file descriptor +#ifdef EBADFD + case EBADFD: // File descriptor in bad state +#endif + case EBUSY: // Device or resource busy + case ECHILD: // No child processes + case EISCONN: // Socket is connected +#ifdef EISNAM + case EISNAM: // Is a named type file +#endif +#ifdef ENOTBLK + case ENOTBLK: // Block device required +#endif + case ENOTCONN: // The socket is not connected + case EPIPE: // Broken pipe +#ifdef ESHUTDOWN + case ESHUTDOWN: // Cannot send after transport endpoint shutdown +#endif + case ETXTBSY: // Text file busy +#ifdef EUNATCH + case EUNATCH: // Protocol driver not attached +#endif + return StatusCode::kFailedPrecondition; + case ENOSPC: // No space left on device +#ifdef EDQUOT + case EDQUOT: // Disk quota exceeded +#endif + case EMFILE: // Too many open files + case EMLINK: // Too many links + case ENFILE: // Too many open files in system + case ENOBUFS: // No buffer space available + case ENODATA: // No message is available on the STREAM read queue + case ENOMEM: // Not enough space + case ENOSR: // No STREAM resources +#ifdef EUSERS + case EUSERS: // Too many users +#endif + return StatusCode::kResourceExhausted; +#ifdef ECHRNG + case ECHRNG: // Channel number out of range +#endif + case EFBIG: // File too large + case EOVERFLOW: // Value too large to be stored in data type + case ERANGE: // Result too large + return StatusCode::kOutOfRange; +#ifdef ENOPKG + case ENOPKG: // Package not installed +#endif + case ENOSYS: // Function not implemented + case ENOTSUP: // Operation not supported + case EAFNOSUPPORT: // Address family not supported +#ifdef EPFNOSUPPORT + case EPFNOSUPPORT: // Protocol family not supported +#endif + case EPROTONOSUPPORT: // Protocol not supported +#ifdef ESOCKTNOSUPPORT + case ESOCKTNOSUPPORT: // Socket type not supported +#endif + case EXDEV: // Improper link + return StatusCode::kUnimplemented; + case EAGAIN: // Resource temporarily unavailable +#ifdef ECOMM + case ECOMM: // Communication error on send +#endif + case ECONNREFUSED: // Connection refused + case ECONNABORTED: // Connection aborted + case ECONNRESET: // Connection reset + case EINTR: // Interrupted function call +#ifdef EHOSTDOWN + case EHOSTDOWN: // Host is down +#endif + case EHOSTUNREACH: // Host is unreachable + case ENETDOWN: // Network is down + case ENETRESET: // Connection aborted by network + case ENETUNREACH: // Network unreachable + case ENOLCK: // No locks available + case ENOLINK: // Link has been severed +#ifdef ENONET + case ENONET: // Machine is not on the network +#endif + return StatusCode::kUnavailable; + case EDEADLK: // Resource deadlock avoided +#ifdef ESTALE + case ESTALE: // Stale file handle +#endif + return StatusCode::kAborted; + case ECANCELED: // Operation cancelled + return StatusCode::kCancelled; + default: + return StatusCode::kUnknown; + } +} + +namespace { +std::string MessageForErrnoToStatus(int error_number, + absl::string_view message) { + return absl::StrCat(message, ": ", + absl::base_internal::StrError(error_number)); +} +} // namespace + +Status ErrnoToStatus(int error_number, absl::string_view message) { + return Status(ErrnoToStatusCode(error_number), + MessageForErrnoToStatus(error_number, message)); +} + +namespace status_internal { + +std::string* MakeCheckFailString(const absl::Status* status, + const char* prefix) { + return new std::string( + absl::StrCat(prefix, " (", + status->ToString(StatusToStringMode::kWithEverything), ")")); +} + +} // namespace status_internal + ABSL_NAMESPACE_END } // namespace absl diff --git a/TMessagesProj/jni/voip/webrtc/absl/status/status.h b/TMessagesProj/jni/voip/webrtc/absl/status/status.h index db4b340a91..4e8292fc0e 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/status/status.h +++ b/TMessagesProj/jni/voip/webrtc/absl/status/status.h @@ -24,11 +24,11 @@ // * A set of helper functions for creating status codes and checking their // values // -// Within Google, `absl::Status` is the primary mechanism for gracefully -// handling errors across API boundaries (and in particular across RPC -// boundaries). Some of these errors may be recoverable, but others may not. -// Most functions that can produce a recoverable error should be designed to -// return an `absl::Status` (or `absl::StatusOr`). +// Within Google, `absl::Status` is the primary mechanism for communicating +// errors in C++, and is used to represent error state in both in-process +// library calls as well as RPC calls. Some of these errors may be recoverable, +// but others may not. Most functions that can produce a recoverable error +// should be designed to return an `absl::Status` (or `absl::StatusOr`). // // Example: // @@ -51,10 +51,10 @@ #ifndef ABSL_STATUS_STATUS_H_ #define ABSL_STATUS_STATUS_H_ -#include +#include #include +#include -#include "absl/container/inlined_vector.h" #include "absl/functional/function_ref.h" #include "absl/status/internal/status_internal.h" #include "absl/strings/cord.h" @@ -613,10 +613,6 @@ class Status final { const status_internal::Payloads* GetPayloads() const; status_internal::Payloads* GetPayloads(); - // Takes ownership of payload. - static uintptr_t NewRep( - absl::StatusCode code, absl::string_view msg, - std::unique_ptr payload); static bool EqualsSlow(const absl::Status& a, const absl::Status& b); // MSVC 14.0 limitation requires the const. @@ -742,6 +738,19 @@ Status UnavailableError(absl::string_view message); Status UnimplementedError(absl::string_view message); Status UnknownError(absl::string_view message); +// ErrnoToStatusCode() +// +// Returns the StatusCode for `error_number`, which should be an `errno` value. +// See https://en.cppreference.com/w/cpp/error/errno_macros and similar +// references. +absl::StatusCode ErrnoToStatusCode(int error_number); + +// ErrnoToStatus() +// +// Convenience function that creates a `absl::Status` using an `error_number`, +// which should be an `errno` value. +Status ErrnoToStatus(int error_number, absl::string_view message); + //------------------------------------------------------------------------------ // Implementation details follow //------------------------------------------------------------------------------ diff --git a/TMessagesProj/jni/voip/webrtc/absl/status/statusor.h b/TMessagesProj/jni/voip/webrtc/absl/status/statusor.h index d6ebdc2b7b..a76e720153 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/status/statusor.h +++ b/TMessagesProj/jni/voip/webrtc/absl/status/statusor.h @@ -162,8 +162,8 @@ class ABSL_MUST_USE_RESULT StatusOr; // A `absl::StatusOr` can be constructed from a null pointer like any other // pointer value, and the result will be that `ok()` returns `true` and // `value()` returns `nullptr`. Checking the value of pointer in an -// `absl::StatusOr` generally requires a bit more care, to ensure both that a -// value is present and that value is not null: +// `absl::StatusOr` generally requires a bit more care, to ensure both that +// a value is present and that value is not null: // // StatusOr> result = FooFactory::MakeNewFoo(arg); // if (!result.ok()) { @@ -477,7 +477,7 @@ class StatusOr : private internal_statusor::StatusOrData, // StatusOr::ok() // // Returns whether or not this `absl::StatusOr` holds a `T` value. This - // member function is analagous to `absl::Status::ok()` and should be used + // member function is analogous to `absl::Status::ok()` and should be used // similarly to check the status of return values. // // Example: diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/ascii.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/ascii.cc index 93bb03e958..868df2d102 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/ascii.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/ascii.cc @@ -157,13 +157,13 @@ ABSL_DLL const char kToUpper[256] = { void AsciiStrToLower(std::string* s) { for (auto& ch : *s) { - ch = absl::ascii_tolower(ch); + ch = absl::ascii_tolower(static_cast(ch)); } } void AsciiStrToUpper(std::string* s) { for (auto& ch : *s) { - ch = absl::ascii_toupper(ch); + ch = absl::ascii_toupper(static_cast(ch)); } } @@ -183,17 +183,17 @@ void RemoveExtraAsciiWhitespace(std::string* str) { for (; input_it < input_end; ++input_it) { if (is_ws) { // Consecutive whitespace? Keep only the last. - is_ws = absl::ascii_isspace(*input_it); + is_ws = absl::ascii_isspace(static_cast(*input_it)); if (is_ws) --output_it; } else { - is_ws = absl::ascii_isspace(*input_it); + is_ws = absl::ascii_isspace(static_cast(*input_it)); } *output_it = *input_it; ++output_it; } - str->erase(output_it - &(*str)[0]); + str->erase(static_cast(output_it - &(*str)[0])); } ABSL_NAMESPACE_END diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/ascii_test.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/ascii_test.cc index 83af7825e1..dfed114c21 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/ascii_test.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/ascii_test.cc @@ -27,103 +27,99 @@ namespace { TEST(AsciiIsFoo, All) { for (int i = 0; i < 256; i++) { - if ((i >= 'a' && i <= 'z') || (i >= 'A' && i <= 'Z')) - EXPECT_TRUE(absl::ascii_isalpha(i)) << ": failed on " << i; + const auto c = static_cast(i); + if ((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z')) + EXPECT_TRUE(absl::ascii_isalpha(c)) << ": failed on " << c; else - EXPECT_TRUE(!absl::ascii_isalpha(i)) << ": failed on " << i; + EXPECT_TRUE(!absl::ascii_isalpha(c)) << ": failed on " << c; } for (int i = 0; i < 256; i++) { - if ((i >= '0' && i <= '9')) - EXPECT_TRUE(absl::ascii_isdigit(i)) << ": failed on " << i; + const auto c = static_cast(i); + if ((c >= '0' && c <= '9')) + EXPECT_TRUE(absl::ascii_isdigit(c)) << ": failed on " << c; else - EXPECT_TRUE(!absl::ascii_isdigit(i)) << ": failed on " << i; + EXPECT_TRUE(!absl::ascii_isdigit(c)) << ": failed on " << c; } for (int i = 0; i < 256; i++) { - if (absl::ascii_isalpha(i) || absl::ascii_isdigit(i)) - EXPECT_TRUE(absl::ascii_isalnum(i)) << ": failed on " << i; + const auto c = static_cast(i); + if (absl::ascii_isalpha(c) || absl::ascii_isdigit(c)) + EXPECT_TRUE(absl::ascii_isalnum(c)) << ": failed on " << c; else - EXPECT_TRUE(!absl::ascii_isalnum(i)) << ": failed on " << i; + EXPECT_TRUE(!absl::ascii_isalnum(c)) << ": failed on " << c; } for (int i = 0; i < 256; i++) { + const auto c = static_cast(i); if (i != '\0' && strchr(" \r\n\t\v\f", i)) - EXPECT_TRUE(absl::ascii_isspace(i)) << ": failed on " << i; + EXPECT_TRUE(absl::ascii_isspace(c)) << ": failed on " << c; else - EXPECT_TRUE(!absl::ascii_isspace(i)) << ": failed on " << i; + EXPECT_TRUE(!absl::ascii_isspace(c)) << ": failed on " << c; } for (int i = 0; i < 256; i++) { + const auto c = static_cast(i); if (i >= 32 && i < 127) - EXPECT_TRUE(absl::ascii_isprint(i)) << ": failed on " << i; + EXPECT_TRUE(absl::ascii_isprint(c)) << ": failed on " << c; else - EXPECT_TRUE(!absl::ascii_isprint(i)) << ": failed on " << i; + EXPECT_TRUE(!absl::ascii_isprint(c)) << ": failed on " << c; } for (int i = 0; i < 256; i++) { - if (absl::ascii_isprint(i) && !absl::ascii_isspace(i) && - !absl::ascii_isalnum(i)) - EXPECT_TRUE(absl::ascii_ispunct(i)) << ": failed on " << i; - else - EXPECT_TRUE(!absl::ascii_ispunct(i)) << ": failed on " << i; + const auto c = static_cast(i); + if (absl::ascii_isprint(c) && !absl::ascii_isspace(c) && + !absl::ascii_isalnum(c)) { + EXPECT_TRUE(absl::ascii_ispunct(c)) << ": failed on " << c; + } else { + EXPECT_TRUE(!absl::ascii_ispunct(c)) << ": failed on " << c; + } } for (int i = 0; i < 256; i++) { + const auto c = static_cast(i); if (i == ' ' || i == '\t') - EXPECT_TRUE(absl::ascii_isblank(i)) << ": failed on " << i; + EXPECT_TRUE(absl::ascii_isblank(c)) << ": failed on " << c; else - EXPECT_TRUE(!absl::ascii_isblank(i)) << ": failed on " << i; + EXPECT_TRUE(!absl::ascii_isblank(c)) << ": failed on " << c; } for (int i = 0; i < 256; i++) { + const auto c = static_cast(i); if (i < 32 || i == 127) - EXPECT_TRUE(absl::ascii_iscntrl(i)) << ": failed on " << i; + EXPECT_TRUE(absl::ascii_iscntrl(c)) << ": failed on " << c; else - EXPECT_TRUE(!absl::ascii_iscntrl(i)) << ": failed on " << i; + EXPECT_TRUE(!absl::ascii_iscntrl(c)) << ": failed on " << c; } for (int i = 0; i < 256; i++) { - if (absl::ascii_isdigit(i) || (i >= 'A' && i <= 'F') || - (i >= 'a' && i <= 'f')) - EXPECT_TRUE(absl::ascii_isxdigit(i)) << ": failed on " << i; - else - EXPECT_TRUE(!absl::ascii_isxdigit(i)) << ": failed on " << i; + const auto c = static_cast(i); + if (absl::ascii_isdigit(c) || (i >= 'A' && i <= 'F') || + (i >= 'a' && i <= 'f')) { + EXPECT_TRUE(absl::ascii_isxdigit(c)) << ": failed on " << c; + } else { + EXPECT_TRUE(!absl::ascii_isxdigit(c)) << ": failed on " << c; + } } for (int i = 0; i < 256; i++) { + const auto c = static_cast(i); if (i > 32 && i < 127) - EXPECT_TRUE(absl::ascii_isgraph(i)) << ": failed on " << i; + EXPECT_TRUE(absl::ascii_isgraph(c)) << ": failed on " << c; else - EXPECT_TRUE(!absl::ascii_isgraph(i)) << ": failed on " << i; + EXPECT_TRUE(!absl::ascii_isgraph(c)) << ": failed on " << c; } for (int i = 0; i < 256; i++) { + const auto c = static_cast(i); if (i >= 'A' && i <= 'Z') - EXPECT_TRUE(absl::ascii_isupper(i)) << ": failed on " << i; + EXPECT_TRUE(absl::ascii_isupper(c)) << ": failed on " << c; else - EXPECT_TRUE(!absl::ascii_isupper(i)) << ": failed on " << i; + EXPECT_TRUE(!absl::ascii_isupper(c)) << ": failed on " << c; } for (int i = 0; i < 256; i++) { + const auto c = static_cast(i); if (i >= 'a' && i <= 'z') - EXPECT_TRUE(absl::ascii_islower(i)) << ": failed on " << i; + EXPECT_TRUE(absl::ascii_islower(c)) << ": failed on " << c; else - EXPECT_TRUE(!absl::ascii_islower(i)) << ": failed on " << i; + EXPECT_TRUE(!absl::ascii_islower(c)) << ": failed on " << c; } - for (int i = 0; i < 128; i++) { - EXPECT_TRUE(absl::ascii_isascii(i)) << ": failed on " << i; + for (unsigned char c = 0; c < 128; c++) { + EXPECT_TRUE(absl::ascii_isascii(c)) << ": failed on " << c; } for (int i = 128; i < 256; i++) { - EXPECT_TRUE(!absl::ascii_isascii(i)) << ": failed on " << i; - } - - // The official is* functions don't accept negative signed chars, but - // our absl::ascii_is* functions do. - for (int i = 0; i < 256; i++) { - signed char sc = static_cast(static_cast(i)); - EXPECT_EQ(absl::ascii_isalpha(i), absl::ascii_isalpha(sc)) << i; - EXPECT_EQ(absl::ascii_isdigit(i), absl::ascii_isdigit(sc)) << i; - EXPECT_EQ(absl::ascii_isalnum(i), absl::ascii_isalnum(sc)) << i; - EXPECT_EQ(absl::ascii_isspace(i), absl::ascii_isspace(sc)) << i; - EXPECT_EQ(absl::ascii_ispunct(i), absl::ascii_ispunct(sc)) << i; - EXPECT_EQ(absl::ascii_isblank(i), absl::ascii_isblank(sc)) << i; - EXPECT_EQ(absl::ascii_iscntrl(i), absl::ascii_iscntrl(sc)) << i; - EXPECT_EQ(absl::ascii_isxdigit(i), absl::ascii_isxdigit(sc)) << i; - EXPECT_EQ(absl::ascii_isprint(i), absl::ascii_isprint(sc)) << i; - EXPECT_EQ(absl::ascii_isgraph(i), absl::ascii_isgraph(sc)) << i; - EXPECT_EQ(absl::ascii_isupper(i), absl::ascii_isupper(sc)) << i; - EXPECT_EQ(absl::ascii_islower(i), absl::ascii_islower(sc)) << i; - EXPECT_EQ(absl::ascii_isascii(i), absl::ascii_isascii(sc)) << i; + const auto c = static_cast(i); + EXPECT_TRUE(!absl::ascii_isascii(c)) << ": failed on " << c; } } @@ -137,19 +133,20 @@ TEST(AsciiIsFoo, SameAsIsFoo) { #endif for (int i = 0; i < 256; i++) { - EXPECT_EQ(isalpha(i) != 0, absl::ascii_isalpha(i)) << i; - EXPECT_EQ(isdigit(i) != 0, absl::ascii_isdigit(i)) << i; - EXPECT_EQ(isalnum(i) != 0, absl::ascii_isalnum(i)) << i; - EXPECT_EQ(isspace(i) != 0, absl::ascii_isspace(i)) << i; - EXPECT_EQ(ispunct(i) != 0, absl::ascii_ispunct(i)) << i; - EXPECT_EQ(isblank(i) != 0, absl::ascii_isblank(i)) << i; - EXPECT_EQ(iscntrl(i) != 0, absl::ascii_iscntrl(i)) << i; - EXPECT_EQ(isxdigit(i) != 0, absl::ascii_isxdigit(i)) << i; - EXPECT_EQ(isprint(i) != 0, absl::ascii_isprint(i)) << i; - EXPECT_EQ(isgraph(i) != 0, absl::ascii_isgraph(i)) << i; - EXPECT_EQ(isupper(i) != 0, absl::ascii_isupper(i)) << i; - EXPECT_EQ(islower(i) != 0, absl::ascii_islower(i)) << i; - EXPECT_EQ(isascii(i) != 0, absl::ascii_isascii(i)) << i; + const auto c = static_cast(i); + EXPECT_EQ(isalpha(c) != 0, absl::ascii_isalpha(c)) << c; + EXPECT_EQ(isdigit(c) != 0, absl::ascii_isdigit(c)) << c; + EXPECT_EQ(isalnum(c) != 0, absl::ascii_isalnum(c)) << c; + EXPECT_EQ(isspace(c) != 0, absl::ascii_isspace(c)) << c; + EXPECT_EQ(ispunct(c) != 0, absl::ascii_ispunct(c)) << c; + EXPECT_EQ(isblank(c) != 0, absl::ascii_isblank(c)) << c; + EXPECT_EQ(iscntrl(c) != 0, absl::ascii_iscntrl(c)) << c; + EXPECT_EQ(isxdigit(c) != 0, absl::ascii_isxdigit(c)) << c; + EXPECT_EQ(isprint(c) != 0, absl::ascii_isprint(c)) << c; + EXPECT_EQ(isgraph(c) != 0, absl::ascii_isgraph(c)) << c; + EXPECT_EQ(isupper(c) != 0, absl::ascii_isupper(c)) << c; + EXPECT_EQ(islower(c) != 0, absl::ascii_islower(c)) << c; + EXPECT_EQ(isascii(c) != 0, absl::ascii_isascii(c)) << c; } #ifndef __ANDROID__ @@ -166,25 +163,20 @@ TEST(AsciiToFoo, All) { #endif for (int i = 0; i < 256; i++) { - if (absl::ascii_islower(i)) - EXPECT_EQ(absl::ascii_toupper(i), 'A' + (i - 'a')) << i; + const auto c = static_cast(i); + if (absl::ascii_islower(c)) + EXPECT_EQ(absl::ascii_toupper(c), 'A' + (i - 'a')) << c; else - EXPECT_EQ(absl::ascii_toupper(i), static_cast(i)) << i; + EXPECT_EQ(absl::ascii_toupper(c), static_cast(i)) << c; - if (absl::ascii_isupper(i)) - EXPECT_EQ(absl::ascii_tolower(i), 'a' + (i - 'A')) << i; + if (absl::ascii_isupper(c)) + EXPECT_EQ(absl::ascii_tolower(c), 'a' + (i - 'A')) << c; else - EXPECT_EQ(absl::ascii_tolower(i), static_cast(i)) << i; + EXPECT_EQ(absl::ascii_tolower(c), static_cast(i)) << c; // These CHECKs only hold in a C locale. - EXPECT_EQ(static_cast(tolower(i)), absl::ascii_tolower(i)) << i; - EXPECT_EQ(static_cast(toupper(i)), absl::ascii_toupper(i)) << i; - - // The official to* functions don't accept negative signed chars, but - // our absl::ascii_to* functions do. - signed char sc = static_cast(static_cast(i)); - EXPECT_EQ(absl::ascii_tolower(i), absl::ascii_tolower(sc)) << i; - EXPECT_EQ(absl::ascii_toupper(i), absl::ascii_toupper(sc)) << i; + EXPECT_EQ(static_cast(tolower(i)), absl::ascii_tolower(c)) << c; + EXPECT_EQ(static_cast(toupper(i)), absl::ascii_toupper(c)) << c; } #ifndef __ANDROID__ // restore the old locale. diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/charconv.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/charconv.cc index fefcfc90a5..69d420bcea 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/charconv.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/charconv.cc @@ -18,6 +18,7 @@ #include #include #include +#include #include "absl/base/casts.h" #include "absl/numeric/bits.h" @@ -65,6 +66,14 @@ struct FloatTraits; template <> struct FloatTraits { + using mantissa_t = uint64_t; + + // The number of bits in the given float type. + static constexpr int kTargetBits = 64; + + // The number of exponent bits in the given float type. + static constexpr int kTargetExponentBits = 11; + // The number of mantissa bits in the given float type. This includes the // implied high bit. static constexpr int kTargetMantissaBits = 53; @@ -83,6 +92,31 @@ struct FloatTraits { // m * 2**kMinNormalExponent is exactly equal to DBL_MIN. static constexpr int kMinNormalExponent = -1074; + // The IEEE exponent bias. It equals ((1 << (kTargetExponentBits - 1)) - 1). + static constexpr int kExponentBias = 1023; + + // The Eisel-Lemire "Shifting to 54/25 Bits" adjustment. It equals (63 - 1 - + // kTargetMantissaBits). + static constexpr int kEiselLemireShift = 9; + + // The Eisel-Lemire high64_mask. It equals ((1 << kEiselLemireShift) - 1). + static constexpr uint64_t kEiselLemireMask = uint64_t{0x1FF}; + + // The smallest negative integer N (smallest negative means furthest from + // zero) such that parsing 9999999999999999999eN, with 19 nines, is still + // positive. Parsing a smaller (more negative) N will produce zero. + // + // Adjusting the decimal point and exponent, without adjusting the value, + // 9999999999999999999eN equals 9.999999999999999999eM where M = N + 18. + // + // 9999999999999999999, with 19 nines but no decimal point, is the largest + // "repeated nines" integer that fits in a uint64_t. + static constexpr int kEiselLemireMinInclusiveExp10 = -324 - 18; + + // The smallest positive integer N such that parsing 1eN produces infinity. + // Parsing a smaller N will produce something finite. + static constexpr int kEiselLemireMaxExclusiveExp10 = 309; + static double MakeNan(const char* tagp) { // Support nan no matter which namespace it's in. Some platforms // incorrectly don't put it in namespace std. @@ -103,7 +137,7 @@ struct FloatTraits { // a normal value is made, or it must be less narrow than that, in which case // `exponent` must be exactly kMinNormalExponent, and a subnormal value is // made. - static double Make(uint64_t mantissa, int exponent, bool sign) { + static double Make(mantissa_t mantissa, int exponent, bool sign) { #ifndef ABSL_BIT_PACK_FLOATS // Support ldexp no matter which namespace it's in. Some platforms // incorrectly don't put it in namespace std. @@ -116,8 +150,10 @@ struct FloatTraits { if (mantissa > kMantissaMask) { // Normal value. // Adjust by 1023 for the exponent representation bias, and an additional - // 52 due to the implied decimal point in the IEEE mantissa represenation. - dbl += uint64_t{exponent + 1023u + kTargetMantissaBits - 1} << 52; + // 52 due to the implied decimal point in the IEEE mantissa + // representation. + dbl += static_cast(exponent + 1023 + kTargetMantissaBits - 1) + << 52; mantissa &= kMantissaMask; } else { // subnormal value @@ -134,16 +170,27 @@ struct FloatTraits { // members and methods. template <> struct FloatTraits { + using mantissa_t = uint32_t; + + static constexpr int kTargetBits = 32; + static constexpr int kTargetExponentBits = 8; static constexpr int kTargetMantissaBits = 24; static constexpr int kMaxExponent = 104; static constexpr int kMinNormalExponent = -149; + static constexpr int kExponentBias = 127; + static constexpr int kEiselLemireShift = 38; + static constexpr uint64_t kEiselLemireMask = uint64_t{0x3FFFFFFFFF}; + static constexpr int kEiselLemireMinInclusiveExp10 = -46 - 18; + static constexpr int kEiselLemireMaxExclusiveExp10 = 39; + static float MakeNan(const char* tagp) { // Support nanf no matter which namespace it's in. Some platforms // incorrectly don't put it in namespace std. using namespace std; // NOLINT return nanf(tagp); } - static float Make(uint32_t mantissa, int exponent, bool sign) { + + static float Make(mantissa_t mantissa, int exponent, bool sign) { #ifndef ABSL_BIT_PACK_FLOATS // Support ldexpf no matter which namespace it's in. Some platforms // incorrectly don't put it in namespace std. @@ -157,7 +204,8 @@ struct FloatTraits { // Normal value. // Adjust by 127 for the exponent representation bias, and an additional // 23 due to the implied decimal point in the IEEE mantissa represenation. - flt += uint32_t{exponent + 127u + kTargetMantissaBits - 1} << 23; + flt += static_cast(exponent + 127 + kTargetMantissaBits - 1) + << 23; mantissa &= kMantissaMask; } else { // subnormal value @@ -181,39 +229,45 @@ struct FloatTraits { // // 2**63 <= Power10Mantissa(n) < 2**64. // +// See the "Table of powers of 10" comment below for a "1e60" example. +// // Lookups into the power-of-10 table must first check the Power10Overflow() and // Power10Underflow() functions, to avoid out-of-bounds table access. // -// Indexes into these tables are biased by -kPower10TableMin, and the table has -// values in the range [kPower10TableMin, kPower10TableMax]. -extern const uint64_t kPower10MantissaTable[]; -extern const int16_t kPower10ExponentTable[]; +// Indexes into these tables are biased by -kPower10TableMinInclusive. Valid +// indexes range from kPower10TableMinInclusive to kPower10TableMaxExclusive. +extern const uint64_t kPower10MantissaHighTable[]; // High 64 of 128 bits. +extern const uint64_t kPower10MantissaLowTable[]; // Low 64 of 128 bits. -// The smallest allowed value for use with the Power10Mantissa() and -// Power10Exponent() functions below. (If a smaller exponent is needed in +// The smallest (inclusive) allowed value for use with the Power10Mantissa() +// and Power10Exponent() functions below. (If a smaller exponent is needed in // calculations, the end result is guaranteed to underflow.) -constexpr int kPower10TableMin = -342; +constexpr int kPower10TableMinInclusive = -342; -// The largest allowed value for use with the Power10Mantissa() and -// Power10Exponent() functions below. (If a smaller exponent is needed in -// calculations, the end result is guaranteed to overflow.) -constexpr int kPower10TableMax = 308; +// The largest (exclusive) allowed value for use with the Power10Mantissa() and +// Power10Exponent() functions below. (If a larger-or-equal exponent is needed +// in calculations, the end result is guaranteed to overflow.) +constexpr int kPower10TableMaxExclusive = 309; uint64_t Power10Mantissa(int n) { - return kPower10MantissaTable[n - kPower10TableMin]; + return kPower10MantissaHighTable[n - kPower10TableMinInclusive]; } int Power10Exponent(int n) { - return kPower10ExponentTable[n - kPower10TableMin]; + // The 217706 etc magic numbers encode the results as a formula instead of a + // table. Their equivalence (over the kPower10TableMinInclusive .. + // kPower10TableMaxExclusive range) is confirmed by + // https://github.com/google/wuffs/blob/315b2e52625ebd7b02d8fac13e3cd85ea374fb80/script/print-mpb-powers-of-10.go + return (217706 * n >> 16) - 63; } // Returns true if n is large enough that 10**n always results in an IEEE // overflow. -bool Power10Overflow(int n) { return n > kPower10TableMax; } +bool Power10Overflow(int n) { return n >= kPower10TableMaxExclusive; } // Returns true if n is small enough that 10**n times a ParsedFloat mantissa // always results in an IEEE underflow. -bool Power10Underflow(int n) { return n < kPower10TableMin; } +bool Power10Underflow(int n) { return n < kPower10TableMinInclusive; } // Returns true if Power10Mantissa(n) * 2**Power10Exponent(n) is exactly equal // to 10**n numerically. Put another way, this returns true if there is no @@ -242,9 +296,11 @@ struct CalculatedFloat { // Returns the bit width of the given uint128. (Equivalently, returns 128 // minus the number of leading zero bits.) -unsigned BitWidth(uint128 value) { +int BitWidth(uint128 value) { if (Uint128High64(value) == 0) { - return static_cast(bit_width(Uint128Low64(value))); + // This static_cast is only needed when using a std::bit_width() + // implementation that does not have the fix for LWG 3656 applied. + return static_cast(bit_width(Uint128Low64(value))); } return 128 - countl_zero(Uint128High64(value)); } @@ -285,14 +341,19 @@ template bool HandleEdgeCase(const strings_internal::ParsedFloat& input, bool negative, FloatType* value) { if (input.type == strings_internal::FloatType::kNan) { - // A bug in both clang and gcc would cause the compiler to optimize away the - // buffer we are building below. Declaring the buffer volatile avoids the - // issue, and has no measurable performance impact in microbenchmarks. + // A bug in both clang < 7 and gcc would cause the compiler to optimize + // away the buffer we are building below. Declaring the buffer volatile + // avoids the issue, and has no measurable performance impact in + // microbenchmarks. // // https://bugs.llvm.org/show_bug.cgi?id=37778 // https://gcc.gnu.org/bugzilla/show_bug.cgi?id=86113 constexpr ptrdiff_t kNanBufferSize = 128; +#if defined(__GNUC__) || (defined(__clang__) && __clang_major__ < 7) volatile char n_char_sequence[kNanBufferSize]; +#else + char n_char_sequence[kNanBufferSize]; +#endif if (input.subrange_begin == nullptr) { n_char_sequence[0] = '\0'; } else { @@ -337,8 +398,10 @@ void EncodeResult(const CalculatedFloat& calculated, bool negative, *value = negative ? -0.0 : 0.0; return; } - *value = FloatTraits::Make(calculated.mantissa, - calculated.exponent, negative); + *value = FloatTraits::Make( + static_cast::mantissa_t>( + calculated.mantissa), + calculated.exponent, negative); } // Returns the given uint128 shifted to the right by `shift` bits, and rounds @@ -519,7 +582,9 @@ CalculatedFloat CalculateFromParsedHexadecimal( const strings_internal::ParsedFloat& parsed_hex) { uint64_t mantissa = parsed_hex.mantissa; int exponent = parsed_hex.exponent; - auto mantissa_width = static_cast(bit_width(mantissa)); + // This static_cast is only needed when using a std::bit_width() + // implementation that does not have the fix for LWG 3656 applied. + int mantissa_width = static_cast(bit_width(mantissa)); const int shift = NormalizedShiftSize(mantissa_width, exponent); bool result_exact; exponent += shift; @@ -595,6 +660,185 @@ CalculatedFloat CalculateFromParsedDecimal( binary_exponent); } +// As discussed in https://nigeltao.github.io/blog/2020/eisel-lemire.html the +// primary goal of the Eisel-Lemire algorithm is speed, for 99+% of the cases, +// not 100% coverage. As long as Eisel-Lemire doesn’t claim false positives, +// the combined approach (falling back to an alternative implementation when +// this function returns false) is both fast and correct. +template +bool EiselLemire(const strings_internal::ParsedFloat& input, bool negative, + FloatType* value, std::errc* ec) { + uint64_t man = input.mantissa; + int exp10 = input.exponent; + if (exp10 < FloatTraits::kEiselLemireMinInclusiveExp10) { + *value = negative ? -0.0 : 0.0; + *ec = std::errc::result_out_of_range; + return true; + } else if (exp10 >= FloatTraits::kEiselLemireMaxExclusiveExp10) { + // Return max (a finite value) consistent with from_chars and DR 3081. For + // SimpleAtod and SimpleAtof, post-processing will return infinity. + *value = negative ? -std::numeric_limits::max() + : std::numeric_limits::max(); + *ec = std::errc::result_out_of_range; + return true; + } + + // Assert kPower10TableMinInclusive <= exp10 < kPower10TableMaxExclusive. + // Equivalently, !Power10Underflow(exp10) and !Power10Overflow(exp10). + static_assert( + FloatTraits::kEiselLemireMinInclusiveExp10 >= + kPower10TableMinInclusive, + "(exp10-kPower10TableMinInclusive) in kPower10MantissaHighTable bounds"); + static_assert( + FloatTraits::kEiselLemireMaxExclusiveExp10 <= + kPower10TableMaxExclusive, + "(exp10-kPower10TableMinInclusive) in kPower10MantissaHighTable bounds"); + + // The terse (+) comments in this function body refer to sections of the + // https://nigeltao.github.io/blog/2020/eisel-lemire.html blog post. + // + // That blog post discusses double precision (11 exponent bits with a -1023 + // bias, 52 mantissa bits), but the same approach applies to single precision + // (8 exponent bits with a -127 bias, 23 mantissa bits). Either way, the + // computation here happens with 64-bit values (e.g. man) or 128-bit values + // (e.g. x) before finally converting to 64- or 32-bit floating point. + // + // See also "Number Parsing at a Gigabyte per Second, Software: Practice and + // Experience 51 (8), 2021" (https://arxiv.org/abs/2101.11408) for detail. + + // (+) Normalization. + int clz = countl_zero(man); + man <<= static_cast(clz); + // The 217706 etc magic numbers are from the Power10Exponent function. + uint64_t ret_exp2 = + static_cast((217706 * exp10 >> 16) + 64 + + FloatTraits::kExponentBias - clz); + + // (+) Multiplication. + uint128 x = static_cast(man) * + static_cast( + kPower10MantissaHighTable[exp10 - kPower10TableMinInclusive]); + + // (+) Wider Approximation. + static constexpr uint64_t high64_mask = + FloatTraits::kEiselLemireMask; + if (((Uint128High64(x) & high64_mask) == high64_mask) && + (man > (std::numeric_limits::max() - Uint128Low64(x)))) { + uint128 y = + static_cast(man) * + static_cast( + kPower10MantissaLowTable[exp10 - kPower10TableMinInclusive]); + x += Uint128High64(y); + // For example, parsing "4503599627370497.5" will take the if-true + // branch here (for double precision), since: + // - x = 0x8000000000000BFF_FFFFFFFFFFFFFFFF + // - y = 0x8000000000000BFF_7FFFFFFFFFFFF400 + // - man = 0xA000000000000F00 + // Likewise, when parsing "0.0625" for single precision: + // - x = 0x7FFFFFFFFFFFFFFF_FFFFFFFFFFFFFFFF + // - y = 0x813FFFFFFFFFFFFF_8A00000000000000 + // - man = 0x9C40000000000000 + if (((Uint128High64(x) & high64_mask) == high64_mask) && + ((Uint128Low64(x) + 1) == 0) && + (man > (std::numeric_limits::max() - Uint128Low64(y)))) { + return false; + } + } + + // (+) Shifting to 54 Bits (or for single precision, to 25 bits). + uint64_t msb = Uint128High64(x) >> 63; + uint64_t ret_man = + Uint128High64(x) >> (msb + FloatTraits::kEiselLemireShift); + ret_exp2 -= 1 ^ msb; + + // (+) Half-way Ambiguity. + // + // For example, parsing "1e+23" will take the if-true branch here (for double + // precision), since: + // - x = 0x54B40B1F852BDA00_0000000000000000 + // - ret_man = 0x002A5A058FC295ED + // Likewise, when parsing "20040229.0" for single precision: + // - x = 0x4C72894000000000_0000000000000000 + // - ret_man = 0x000000000131CA25 + if ((Uint128Low64(x) == 0) && ((Uint128High64(x) & high64_mask) == 0) && + ((ret_man & 3) == 1)) { + return false; + } + + // (+) From 54 to 53 Bits (or for single precision, from 25 to 24 bits). + ret_man += ret_man & 1; // Line From54a. + ret_man >>= 1; // Line From54b. + // Incrementing ret_man (at line From54a) may have overflowed 54 bits (53 + // bits after the right shift by 1 at line From54b), so adjust for that. + // + // For example, parsing "9223372036854775807" will take the if-true branch + // here (for double precision), since: + // - ret_man = 0x0020000000000000 = (1 << 53) + // Likewise, when parsing "2147483647.0" for single precision: + // - ret_man = 0x0000000001000000 = (1 << 24) + if ((ret_man >> FloatTraits::kTargetMantissaBits) > 0) { + ret_exp2 += 1; + // Conceptually, we need a "ret_man >>= 1" in this if-block to balance + // incrementing ret_exp2 in the line immediately above. However, we only + // get here when line From54a overflowed (after adding a 1), so ret_man + // here is (1 << 53). Its low 53 bits are therefore all zeroes. The only + // remaining use of ret_man is to mask it with ((1 << 52) - 1), so only its + // low 52 bits matter. A "ret_man >>= 1" would have no effect in practice. + // + // We omit the "ret_man >>= 1", even if it is cheap (and this if-branch is + // rarely taken) and technically 'more correct', so that mutation tests + // that would otherwise modify or omit that "ret_man >>= 1" don't complain + // that such code mutations have no observable effect. + } + + // ret_exp2 is a uint64_t. Zero or underflow means that we're in subnormal + // space. max_exp2 (0x7FF for double precision, 0xFF for single precision) or + // above means that we're in Inf/NaN space. + // + // The if block is equivalent to (but has fewer branches than): + // if ((ret_exp2 <= 0) || (ret_exp2 >= max_exp2)) { etc } + // + // For example, parsing "4.9406564584124654e-324" will take the if-true + // branch here, since ret_exp2 = -51. + static constexpr uint64_t max_exp2 = + (1 << FloatTraits::kTargetExponentBits) - 1; + if ((ret_exp2 - 1) >= (max_exp2 - 1)) { + return false; + } + +#ifndef ABSL_BIT_PACK_FLOATS + if (FloatTraits::kTargetBits == 64) { + *value = FloatTraits::Make( + (ret_man & 0x000FFFFFFFFFFFFFu) | 0x0010000000000000u, + static_cast(ret_exp2) - 1023 - 52, negative); + return true; + } else if (FloatTraits::kTargetBits == 32) { + *value = FloatTraits::Make( + (static_cast(ret_man) & 0x007FFFFFu) | 0x00800000u, + static_cast(ret_exp2) - 127 - 23, negative); + return true; + } +#else + if (FloatTraits::kTargetBits == 64) { + uint64_t ret_bits = (ret_exp2 << 52) | (ret_man & 0x000FFFFFFFFFFFFFu); + if (negative) { + ret_bits |= 0x8000000000000000u; + } + *value = absl::bit_cast(ret_bits); + return true; + } else if (FloatTraits::kTargetBits == 32) { + uint32_t ret_bits = (static_cast(ret_exp2) << 23) | + (static_cast(ret_man) & 0x007FFFFFu); + if (negative) { + ret_bits |= 0x80000000u; + } + *value = absl::bit_cast(ret_bits); + return true; + } +#endif // ABSL_BIT_PACK_FLOATS + return false; +} + template from_chars_result FromCharsImpl(const char* first, const char* last, FloatType& value, chars_format fmt_flags) { @@ -668,6 +912,12 @@ from_chars_result FromCharsImpl(const char* first, const char* last, if (HandleEdgeCase(decimal_parse, negative, &value)) { return result; } + // A nullptr subrange_begin means that the decimal_parse.mantissa is exact + // (not truncated), a precondition of the Eisel-Lemire algorithm. + if ((decimal_parse.subrange_begin == nullptr) && + EiselLemire(decimal_parse, negative, &value, &result.ec)) { + return result; + } CalculatedFloat calculated = CalculateFromParsedDecimal(decimal_parse); EncodeResult(calculated, negative, &result, &value); @@ -688,15 +938,46 @@ from_chars_result from_chars(const char* first, const char* last, float& value, namespace { -// Table of powers of 10, from kPower10TableMin to kPower10TableMax. +// Table of powers of 10, from kPower10TableMinInclusive to +// kPower10TableMaxExclusive. +// +// kPower10MantissaHighTable[i - kPower10TableMinInclusive] stores the 64-bit +// mantissa. The high bit is always on. +// +// kPower10MantissaLowTable extends that 64-bit mantissa to 128 bits. +// +// Power10Exponent(i) calculates the power-of-two exponent. +// +// For a number i, this gives the unique mantissaHigh and exponent such that +// (mantissaHigh * 2**exponent) <= 10**i < ((mantissaHigh + 1) * 2**exponent). +// +// For example, Python can confirm that the exact hexadecimal value of 1e60 is: +// >>> a = 1000000000000000000000000000000000000000000000000000000000000 +// >>> hex(a) +// '0x9f4f2726179a224501d762422c946590d91000000000000000' +// Adding underscores at every 8th hex digit shows 50 hex digits: +// '0x9f4f2726_179a2245_01d76242_2c946590_d9100000_00000000_00'. +// In this case, the high bit of the first hex digit, 9, is coincidentally set, +// so we do not have to do further shifting to deduce the 128-bit mantissa: +// - kPower10MantissaHighTable[60 - kP10TMI] = 0x9f4f2726179a2245U +// - kPower10MantissaLowTable[ 60 - kP10TMI] = 0x01d762422c946590U +// where kP10TMI is kPower10TableMinInclusive. The low 18 of those 50 hex +// digits are truncated. +// +// 50 hex digits (with the high bit set) is 200 bits and mantissaHigh holds 64 +// bits, so Power10Exponent(60) = 200 - 64 = 136. Again, Python can confirm: +// >>> b = 0x9f4f2726179a2245 +// >>> ((b+0)<<136) <= a +// True +// >>> ((b+1)<<136) <= a +// False // -// kPower10MantissaTable[i - kPower10TableMin] stores the 64-bit mantissa (high -// bit always on), and kPower10ExponentTable[i - kPower10TableMin] stores the -// power-of-two exponent. For a given number i, this gives the unique mantissa -// and exponent such that mantissa * 2**exponent <= 10**i < (mantissa + 1) * -// 2**exponent. +// The tables were generated by +// https://github.com/google/wuffs/blob/315b2e52625ebd7b02d8fac13e3cd85ea374fb80/script/print-mpb-powers-of-10.go +// after re-formatting its output into two arrays of N uint64_t values (instead +// of an N element array of uint64_t pairs). -const uint64_t kPower10MantissaTable[] = { +const uint64_t kPower10MantissaHighTable[] = { 0xeef453d6923bd65aU, 0x9558b4661b6565f8U, 0xbaaee17fa23ebf76U, 0xe95a99df8ace6f53U, 0x91d8a02bb6c10594U, 0xb64ec836a47146f9U, 0xe3e27a444d8d98b7U, 0x8e6d8c6ab0787f72U, 0xb208ef855c969f4fU, @@ -916,67 +1197,224 @@ const uint64_t kPower10MantissaTable[] = { 0xb6472e511c81471dU, 0xe3d8f9e563a198e5U, 0x8e679c2f5e44ff8fU, }; -const int16_t kPower10ExponentTable[] = { - -1200, -1196, -1193, -1190, -1186, -1183, -1180, -1176, -1173, -1170, -1166, - -1163, -1160, -1156, -1153, -1150, -1146, -1143, -1140, -1136, -1133, -1130, - -1127, -1123, -1120, -1117, -1113, -1110, -1107, -1103, -1100, -1097, -1093, - -1090, -1087, -1083, -1080, -1077, -1073, -1070, -1067, -1063, -1060, -1057, - -1053, -1050, -1047, -1043, -1040, -1037, -1034, -1030, -1027, -1024, -1020, - -1017, -1014, -1010, -1007, -1004, -1000, -997, -994, -990, -987, -984, - -980, -977, -974, -970, -967, -964, -960, -957, -954, -950, -947, - -944, -940, -937, -934, -931, -927, -924, -921, -917, -914, -911, - -907, -904, -901, -897, -894, -891, -887, -884, -881, -877, -874, - -871, -867, -864, -861, -857, -854, -851, -847, -844, -841, -838, - -834, -831, -828, -824, -821, -818, -814, -811, -808, -804, -801, - -798, -794, -791, -788, -784, -781, -778, -774, -771, -768, -764, - -761, -758, -754, -751, -748, -744, -741, -738, -735, -731, -728, - -725, -721, -718, -715, -711, -708, -705, -701, -698, -695, -691, - -688, -685, -681, -678, -675, -671, -668, -665, -661, -658, -655, - -651, -648, -645, -642, -638, -635, -632, -628, -625, -622, -618, - -615, -612, -608, -605, -602, -598, -595, -592, -588, -585, -582, - -578, -575, -572, -568, -565, -562, -558, -555, -552, -549, -545, - -542, -539, -535, -532, -529, -525, -522, -519, -515, -512, -509, - -505, -502, -499, -495, -492, -489, -485, -482, -479, -475, -472, - -469, -465, -462, -459, -455, -452, -449, -446, -442, -439, -436, - -432, -429, -426, -422, -419, -416, -412, -409, -406, -402, -399, - -396, -392, -389, -386, -382, -379, -376, -372, -369, -366, -362, - -359, -356, -353, -349, -346, -343, -339, -336, -333, -329, -326, - -323, -319, -316, -313, -309, -306, -303, -299, -296, -293, -289, - -286, -283, -279, -276, -273, -269, -266, -263, -259, -256, -253, - -250, -246, -243, -240, -236, -233, -230, -226, -223, -220, -216, - -213, -210, -206, -203, -200, -196, -193, -190, -186, -183, -180, - -176, -173, -170, -166, -163, -160, -157, -153, -150, -147, -143, - -140, -137, -133, -130, -127, -123, -120, -117, -113, -110, -107, - -103, -100, -97, -93, -90, -87, -83, -80, -77, -73, -70, - -67, -63, -60, -57, -54, -50, -47, -44, -40, -37, -34, - -30, -27, -24, -20, -17, -14, -10, -7, -4, 0, 3, - 6, 10, 13, 16, 20, 23, 26, 30, 33, 36, 39, - 43, 46, 49, 53, 56, 59, 63, 66, 69, 73, 76, - 79, 83, 86, 89, 93, 96, 99, 103, 106, 109, 113, - 116, 119, 123, 126, 129, 132, 136, 139, 142, 146, 149, - 152, 156, 159, 162, 166, 169, 172, 176, 179, 182, 186, - 189, 192, 196, 199, 202, 206, 209, 212, 216, 219, 222, - 226, 229, 232, 235, 239, 242, 245, 249, 252, 255, 259, - 262, 265, 269, 272, 275, 279, 282, 285, 289, 292, 295, - 299, 302, 305, 309, 312, 315, 319, 322, 325, 328, 332, - 335, 338, 342, 345, 348, 352, 355, 358, 362, 365, 368, - 372, 375, 378, 382, 385, 388, 392, 395, 398, 402, 405, - 408, 412, 415, 418, 422, 425, 428, 431, 435, 438, 441, - 445, 448, 451, 455, 458, 461, 465, 468, 471, 475, 478, - 481, 485, 488, 491, 495, 498, 501, 505, 508, 511, 515, - 518, 521, 524, 528, 531, 534, 538, 541, 544, 548, 551, - 554, 558, 561, 564, 568, 571, 574, 578, 581, 584, 588, - 591, 594, 598, 601, 604, 608, 611, 614, 617, 621, 624, - 627, 631, 634, 637, 641, 644, 647, 651, 654, 657, 661, - 664, 667, 671, 674, 677, 681, 684, 687, 691, 694, 697, - 701, 704, 707, 711, 714, 717, 720, 724, 727, 730, 734, - 737, 740, 744, 747, 750, 754, 757, 760, 764, 767, 770, - 774, 777, 780, 784, 787, 790, 794, 797, 800, 804, 807, - 810, 813, 817, 820, 823, 827, 830, 833, 837, 840, 843, - 847, 850, 853, 857, 860, 863, 867, 870, 873, 877, 880, - 883, 887, 890, 893, 897, 900, 903, 907, 910, 913, 916, - 920, 923, 926, 930, 933, 936, 940, 943, 946, 950, 953, - 956, 960, +const uint64_t kPower10MantissaLowTable[] = { + 0x113faa2906a13b3fU, 0x4ac7ca59a424c507U, 0x5d79bcf00d2df649U, + 0xf4d82c2c107973dcU, 0x79071b9b8a4be869U, 0x9748e2826cdee284U, + 0xfd1b1b2308169b25U, 0xfe30f0f5e50e20f7U, 0xbdbd2d335e51a935U, + 0xad2c788035e61382U, 0x4c3bcb5021afcc31U, 0xdf4abe242a1bbf3dU, + 0xd71d6dad34a2af0dU, 0x8672648c40e5ad68U, 0x680efdaf511f18c2U, + 0x0212bd1b2566def2U, 0x014bb630f7604b57U, 0x419ea3bd35385e2dU, + 0x52064cac828675b9U, 0x7343efebd1940993U, 0x1014ebe6c5f90bf8U, + 0xd41a26e077774ef6U, 0x8920b098955522b4U, 0x55b46e5f5d5535b0U, + 0xeb2189f734aa831dU, 0xa5e9ec7501d523e4U, 0x47b233c92125366eU, + 0x999ec0bb696e840aU, 0xc00670ea43ca250dU, 0x380406926a5e5728U, + 0xc605083704f5ecf2U, 0xf7864a44c633682eU, 0x7ab3ee6afbe0211dU, + 0x5960ea05bad82964U, 0x6fb92487298e33bdU, 0xa5d3b6d479f8e056U, + 0x8f48a4899877186cU, 0x331acdabfe94de87U, 0x9ff0c08b7f1d0b14U, + 0x07ecf0ae5ee44dd9U, 0xc9e82cd9f69d6150U, 0xbe311c083a225cd2U, + 0x6dbd630a48aaf406U, 0x092cbbccdad5b108U, 0x25bbf56008c58ea5U, + 0xaf2af2b80af6f24eU, 0x1af5af660db4aee1U, 0x50d98d9fc890ed4dU, + 0xe50ff107bab528a0U, 0x1e53ed49a96272c8U, 0x25e8e89c13bb0f7aU, + 0x77b191618c54e9acU, 0xd59df5b9ef6a2417U, 0x4b0573286b44ad1dU, + 0x4ee367f9430aec32U, 0x229c41f793cda73fU, 0x6b43527578c1110fU, + 0x830a13896b78aaa9U, 0x23cc986bc656d553U, 0x2cbfbe86b7ec8aa8U, + 0x7bf7d71432f3d6a9U, 0xdaf5ccd93fb0cc53U, 0xd1b3400f8f9cff68U, + 0x23100809b9c21fa1U, 0xabd40a0c2832a78aU, 0x16c90c8f323f516cU, + 0xae3da7d97f6792e3U, 0x99cd11cfdf41779cU, 0x40405643d711d583U, + 0x482835ea666b2572U, 0xda3243650005eecfU, 0x90bed43e40076a82U, + 0x5a7744a6e804a291U, 0x711515d0a205cb36U, 0x0d5a5b44ca873e03U, + 0xe858790afe9486c2U, 0x626e974dbe39a872U, 0xfb0a3d212dc8128fU, + 0x7ce66634bc9d0b99U, 0x1c1fffc1ebc44e80U, 0xa327ffb266b56220U, + 0x4bf1ff9f0062baa8U, 0x6f773fc3603db4a9U, 0xcb550fb4384d21d3U, + 0x7e2a53a146606a48U, 0x2eda7444cbfc426dU, 0xfa911155fefb5308U, + 0x793555ab7eba27caU, 0x4bc1558b2f3458deU, 0x9eb1aaedfb016f16U, + 0x465e15a979c1cadcU, 0x0bfacd89ec191ec9U, 0xcef980ec671f667bU, + 0x82b7e12780e7401aU, 0xd1b2ecb8b0908810U, 0x861fa7e6dcb4aa15U, + 0x67a791e093e1d49aU, 0xe0c8bb2c5c6d24e0U, 0x58fae9f773886e18U, + 0xaf39a475506a899eU, 0x6d8406c952429603U, 0xc8e5087ba6d33b83U, + 0xfb1e4a9a90880a64U, 0x5cf2eea09a55067fU, 0xf42faa48c0ea481eU, + 0xf13b94daf124da26U, 0x76c53d08d6b70858U, 0x54768c4b0c64ca6eU, + 0xa9942f5dcf7dfd09U, 0xd3f93b35435d7c4cU, 0xc47bc5014a1a6dafU, + 0x359ab6419ca1091bU, 0xc30163d203c94b62U, 0x79e0de63425dcf1dU, + 0x985915fc12f542e4U, 0x3e6f5b7b17b2939dU, 0xa705992ceecf9c42U, + 0x50c6ff782a838353U, 0xa4f8bf5635246428U, 0x871b7795e136be99U, + 0x28e2557b59846e3fU, 0x331aeada2fe589cfU, 0x3ff0d2c85def7621U, + 0x0fed077a756b53a9U, 0xd3e8495912c62894U, 0x64712dd7abbbd95cU, + 0xbd8d794d96aacfb3U, 0xecf0d7a0fc5583a0U, 0xf41686c49db57244U, + 0x311c2875c522ced5U, 0x7d633293366b828bU, 0xae5dff9c02033197U, + 0xd9f57f830283fdfcU, 0xd072df63c324fd7bU, 0x4247cb9e59f71e6dU, + 0x52d9be85f074e608U, 0x67902e276c921f8bU, 0x00ba1cd8a3db53b6U, + 0x80e8a40eccd228a4U, 0x6122cd128006b2cdU, 0x796b805720085f81U, + 0xcbe3303674053bb0U, 0xbedbfc4411068a9cU, 0xee92fb5515482d44U, + 0x751bdd152d4d1c4aU, 0xd262d45a78a0635dU, 0x86fb897116c87c34U, + 0xd45d35e6ae3d4da0U, 0x8974836059cca109U, 0x2bd1a438703fc94bU, + 0x7b6306a34627ddcfU, 0x1a3bc84c17b1d542U, 0x20caba5f1d9e4a93U, + 0x547eb47b7282ee9cU, 0xe99e619a4f23aa43U, 0x6405fa00e2ec94d4U, + 0xde83bc408dd3dd04U, 0x9624ab50b148d445U, 0x3badd624dd9b0957U, + 0xe54ca5d70a80e5d6U, 0x5e9fcf4ccd211f4cU, 0x7647c3200069671fU, + 0x29ecd9f40041e073U, 0xf468107100525890U, 0x7182148d4066eeb4U, + 0xc6f14cd848405530U, 0xb8ada00e5a506a7cU, 0xa6d90811f0e4851cU, + 0x908f4a166d1da663U, 0x9a598e4e043287feU, 0x40eff1e1853f29fdU, + 0xd12bee59e68ef47cU, 0x82bb74f8301958ceU, 0xe36a52363c1faf01U, + 0xdc44e6c3cb279ac1U, 0x29ab103a5ef8c0b9U, 0x7415d448f6b6f0e7U, + 0x111b495b3464ad21U, 0xcab10dd900beec34U, 0x3d5d514f40eea742U, + 0x0cb4a5a3112a5112U, 0x47f0e785eaba72abU, 0x59ed216765690f56U, + 0x306869c13ec3532cU, 0x1e414218c73a13fbU, 0xe5d1929ef90898faU, + 0xdf45f746b74abf39U, 0x6b8bba8c328eb783U, 0x066ea92f3f326564U, + 0xc80a537b0efefebdU, 0xbd06742ce95f5f36U, 0x2c48113823b73704U, + 0xf75a15862ca504c5U, 0x9a984d73dbe722fbU, 0xc13e60d0d2e0ebbaU, + 0x318df905079926a8U, 0xfdf17746497f7052U, 0xfeb6ea8bedefa633U, + 0xfe64a52ee96b8fc0U, 0x3dfdce7aa3c673b0U, 0x06bea10ca65c084eU, + 0x486e494fcff30a62U, 0x5a89dba3c3efccfaU, 0xf89629465a75e01cU, + 0xf6bbb397f1135823U, 0x746aa07ded582e2cU, 0xa8c2a44eb4571cdcU, + 0x92f34d62616ce413U, 0x77b020baf9c81d17U, 0x0ace1474dc1d122eU, + 0x0d819992132456baU, 0x10e1fff697ed6c69U, 0xca8d3ffa1ef463c1U, + 0xbd308ff8a6b17cb2U, 0xac7cb3f6d05ddbdeU, 0x6bcdf07a423aa96bU, + 0x86c16c98d2c953c6U, 0xe871c7bf077ba8b7U, 0x11471cd764ad4972U, + 0xd598e40d3dd89bcfU, 0x4aff1d108d4ec2c3U, 0xcedf722a585139baU, + 0xc2974eb4ee658828U, 0x733d226229feea32U, 0x0806357d5a3f525fU, + 0xca07c2dcb0cf26f7U, 0xfc89b393dd02f0b5U, 0xbbac2078d443ace2U, + 0xd54b944b84aa4c0dU, 0x0a9e795e65d4df11U, 0x4d4617b5ff4a16d5U, + 0x504bced1bf8e4e45U, 0xe45ec2862f71e1d6U, 0x5d767327bb4e5a4cU, + 0x3a6a07f8d510f86fU, 0x890489f70a55368bU, 0x2b45ac74ccea842eU, + 0x3b0b8bc90012929dU, 0x09ce6ebb40173744U, 0xcc420a6a101d0515U, + 0x9fa946824a12232dU, 0x47939822dc96abf9U, 0x59787e2b93bc56f7U, + 0x57eb4edb3c55b65aU, 0xede622920b6b23f1U, 0xe95fab368e45ecedU, + 0x11dbcb0218ebb414U, 0xd652bdc29f26a119U, 0x4be76d3346f0495fU, + 0x6f70a4400c562ddbU, 0xcb4ccd500f6bb952U, 0x7e2000a41346a7a7U, + 0x8ed400668c0c28c8U, 0x728900802f0f32faU, 0x4f2b40a03ad2ffb9U, + 0xe2f610c84987bfa8U, 0x0dd9ca7d2df4d7c9U, 0x91503d1c79720dbbU, + 0x75a44c6397ce912aU, 0xc986afbe3ee11abaU, 0xfbe85badce996168U, + 0xfae27299423fb9c3U, 0xdccd879fc967d41aU, 0x5400e987bbc1c920U, + 0x290123e9aab23b68U, 0xf9a0b6720aaf6521U, 0xf808e40e8d5b3e69U, + 0xb60b1d1230b20e04U, 0xb1c6f22b5e6f48c2U, 0x1e38aeb6360b1af3U, + 0x25c6da63c38de1b0U, 0x579c487e5a38ad0eU, 0x2d835a9df0c6d851U, + 0xf8e431456cf88e65U, 0x1b8e9ecb641b58ffU, 0xe272467e3d222f3fU, + 0x5b0ed81dcc6abb0fU, 0x98e947129fc2b4e9U, 0x3f2398d747b36224U, + 0x8eec7f0d19a03aadU, 0x1953cf68300424acU, 0x5fa8c3423c052dd7U, + 0x3792f412cb06794dU, 0xe2bbd88bbee40bd0U, 0x5b6aceaeae9d0ec4U, + 0xf245825a5a445275U, 0xeed6e2f0f0d56712U, 0x55464dd69685606bU, + 0xaa97e14c3c26b886U, 0xd53dd99f4b3066a8U, 0xe546a8038efe4029U, + 0xde98520472bdd033U, 0x963e66858f6d4440U, 0xdde7001379a44aa8U, + 0x5560c018580d5d52U, 0xaab8f01e6e10b4a6U, 0xcab3961304ca70e8U, + 0x3d607b97c5fd0d22U, 0x8cb89a7db77c506aU, 0x77f3608e92adb242U, + 0x55f038b237591ed3U, 0x6b6c46dec52f6688U, 0x2323ac4b3b3da015U, + 0xabec975e0a0d081aU, 0x96e7bd358c904a21U, 0x7e50d64177da2e54U, + 0xdde50bd1d5d0b9e9U, 0x955e4ec64b44e864U, 0xbd5af13bef0b113eU, + 0xecb1ad8aeacdd58eU, 0x67de18eda5814af2U, 0x80eacf948770ced7U, + 0xa1258379a94d028dU, 0x096ee45813a04330U, 0x8bca9d6e188853fcU, + 0x775ea264cf55347dU, 0x95364afe032a819dU, 0x3a83ddbd83f52204U, + 0xc4926a9672793542U, 0x75b7053c0f178293U, 0x5324c68b12dd6338U, + 0xd3f6fc16ebca5e03U, 0x88f4bb1ca6bcf584U, 0x2b31e9e3d06c32e5U, + 0x3aff322e62439fcfU, 0x09befeb9fad487c2U, 0x4c2ebe687989a9b3U, + 0x0f9d37014bf60a10U, 0x538484c19ef38c94U, 0x2865a5f206b06fb9U, + 0xf93f87b7442e45d3U, 0xf78f69a51539d748U, 0xb573440e5a884d1bU, + 0x31680a88f8953030U, 0xfdc20d2b36ba7c3dU, 0x3d32907604691b4cU, + 0xa63f9a49c2c1b10fU, 0x0fcf80dc33721d53U, 0xd3c36113404ea4a8U, + 0x645a1cac083126e9U, 0x3d70a3d70a3d70a3U, 0xccccccccccccccccU, + 0x0000000000000000U, 0x0000000000000000U, 0x0000000000000000U, + 0x0000000000000000U, 0x0000000000000000U, 0x0000000000000000U, + 0x0000000000000000U, 0x0000000000000000U, 0x0000000000000000U, + 0x0000000000000000U, 0x0000000000000000U, 0x0000000000000000U, + 0x0000000000000000U, 0x0000000000000000U, 0x0000000000000000U, + 0x0000000000000000U, 0x0000000000000000U, 0x0000000000000000U, + 0x0000000000000000U, 0x0000000000000000U, 0x0000000000000000U, + 0x0000000000000000U, 0x0000000000000000U, 0x0000000000000000U, + 0x0000000000000000U, 0x0000000000000000U, 0x0000000000000000U, + 0x0000000000000000U, 0x4000000000000000U, 0x5000000000000000U, + 0xa400000000000000U, 0x4d00000000000000U, 0xf020000000000000U, + 0x6c28000000000000U, 0xc732000000000000U, 0x3c7f400000000000U, + 0x4b9f100000000000U, 0x1e86d40000000000U, 0x1314448000000000U, + 0x17d955a000000000U, 0x5dcfab0800000000U, 0x5aa1cae500000000U, + 0xf14a3d9e40000000U, 0x6d9ccd05d0000000U, 0xe4820023a2000000U, + 0xdda2802c8a800000U, 0xd50b2037ad200000U, 0x4526f422cc340000U, + 0x9670b12b7f410000U, 0x3c0cdd765f114000U, 0xa5880a69fb6ac800U, + 0x8eea0d047a457a00U, 0x72a4904598d6d880U, 0x47a6da2b7f864750U, + 0x999090b65f67d924U, 0xfff4b4e3f741cf6dU, 0xbff8f10e7a8921a4U, + 0xaff72d52192b6a0dU, 0x9bf4f8a69f764490U, 0x02f236d04753d5b4U, + 0x01d762422c946590U, 0x424d3ad2b7b97ef5U, 0xd2e0898765a7deb2U, + 0x63cc55f49f88eb2fU, 0x3cbf6b71c76b25fbU, 0x8bef464e3945ef7aU, + 0x97758bf0e3cbb5acU, 0x3d52eeed1cbea317U, 0x4ca7aaa863ee4bddU, + 0x8fe8caa93e74ef6aU, 0xb3e2fd538e122b44U, 0x60dbbca87196b616U, + 0xbc8955e946fe31cdU, 0x6babab6398bdbe41U, 0xc696963c7eed2dd1U, + 0xfc1e1de5cf543ca2U, 0x3b25a55f43294bcbU, 0x49ef0eb713f39ebeU, + 0x6e3569326c784337U, 0x49c2c37f07965404U, 0xdc33745ec97be906U, + 0x69a028bb3ded71a3U, 0xc40832ea0d68ce0cU, 0xf50a3fa490c30190U, + 0x792667c6da79e0faU, 0x577001b891185938U, 0xed4c0226b55e6f86U, + 0x544f8158315b05b4U, 0x696361ae3db1c721U, 0x03bc3a19cd1e38e9U, + 0x04ab48a04065c723U, 0x62eb0d64283f9c76U, 0x3ba5d0bd324f8394U, + 0xca8f44ec7ee36479U, 0x7e998b13cf4e1ecbU, 0x9e3fedd8c321a67eU, + 0xc5cfe94ef3ea101eU, 0xbba1f1d158724a12U, 0x2a8a6e45ae8edc97U, + 0xf52d09d71a3293bdU, 0x593c2626705f9c56U, 0x6f8b2fb00c77836cU, + 0x0b6dfb9c0f956447U, 0x4724bd4189bd5eacU, 0x58edec91ec2cb657U, + 0x2f2967b66737e3edU, 0xbd79e0d20082ee74U, 0xecd8590680a3aa11U, + 0xe80e6f4820cc9495U, 0x3109058d147fdcddU, 0xbd4b46f0599fd415U, + 0x6c9e18ac7007c91aU, 0x03e2cf6bc604ddb0U, 0x84db8346b786151cU, + 0xe612641865679a63U, 0x4fcb7e8f3f60c07eU, 0xe3be5e330f38f09dU, + 0x5cadf5bfd3072cc5U, 0x73d9732fc7c8f7f6U, 0x2867e7fddcdd9afaU, + 0xb281e1fd541501b8U, 0x1f225a7ca91a4226U, 0x3375788de9b06958U, + 0x0052d6b1641c83aeU, 0xc0678c5dbd23a49aU, 0xf840b7ba963646e0U, + 0xb650e5a93bc3d898U, 0xa3e51f138ab4cebeU, 0xc66f336c36b10137U, + 0xb80b0047445d4184U, 0xa60dc059157491e5U, 0x87c89837ad68db2fU, + 0x29babe4598c311fbU, 0xf4296dd6fef3d67aU, 0x1899e4a65f58660cU, + 0x5ec05dcff72e7f8fU, 0x76707543f4fa1f73U, 0x6a06494a791c53a8U, + 0x0487db9d17636892U, 0x45a9d2845d3c42b6U, 0x0b8a2392ba45a9b2U, + 0x8e6cac7768d7141eU, 0x3207d795430cd926U, 0x7f44e6bd49e807b8U, + 0x5f16206c9c6209a6U, 0x36dba887c37a8c0fU, 0xc2494954da2c9789U, + 0xf2db9baa10b7bd6cU, 0x6f92829494e5acc7U, 0xcb772339ba1f17f9U, + 0xff2a760414536efbU, 0xfef5138519684abaU, 0x7eb258665fc25d69U, + 0xef2f773ffbd97a61U, 0xaafb550ffacfd8faU, 0x95ba2a53f983cf38U, + 0xdd945a747bf26183U, 0x94f971119aeef9e4U, 0x7a37cd5601aab85dU, + 0xac62e055c10ab33aU, 0x577b986b314d6009U, 0xed5a7e85fda0b80bU, + 0x14588f13be847307U, 0x596eb2d8ae258fc8U, 0x6fca5f8ed9aef3bbU, + 0x25de7bb9480d5854U, 0xaf561aa79a10ae6aU, 0x1b2ba1518094da04U, + 0x90fb44d2f05d0842U, 0x353a1607ac744a53U, 0x42889b8997915ce8U, + 0x69956135febada11U, 0x43fab9837e699095U, 0x94f967e45e03f4bbU, + 0x1d1be0eebac278f5U, 0x6462d92a69731732U, 0x7d7b8f7503cfdcfeU, + 0x5cda735244c3d43eU, 0x3a0888136afa64a7U, 0x088aaa1845b8fdd0U, + 0x8aad549e57273d45U, 0x36ac54e2f678864bU, 0x84576a1bb416a7ddU, + 0x656d44a2a11c51d5U, 0x9f644ae5a4b1b325U, 0x873d5d9f0dde1feeU, + 0xa90cb506d155a7eaU, 0x09a7f12442d588f2U, 0x0c11ed6d538aeb2fU, + 0x8f1668c8a86da5faU, 0xf96e017d694487bcU, 0x37c981dcc395a9acU, + 0x85bbe253f47b1417U, 0x93956d7478ccec8eU, 0x387ac8d1970027b2U, + 0x06997b05fcc0319eU, 0x441fece3bdf81f03U, 0xd527e81cad7626c3U, + 0x8a71e223d8d3b074U, 0xf6872d5667844e49U, 0xb428f8ac016561dbU, + 0xe13336d701beba52U, 0xecc0024661173473U, 0x27f002d7f95d0190U, + 0x31ec038df7b441f4U, 0x7e67047175a15271U, 0x0f0062c6e984d386U, + 0x52c07b78a3e60868U, 0xa7709a56ccdf8a82U, 0x88a66076400bb691U, + 0x6acff893d00ea435U, 0x0583f6b8c4124d43U, 0xc3727a337a8b704aU, + 0x744f18c0592e4c5cU, 0x1162def06f79df73U, 0x8addcb5645ac2ba8U, + 0x6d953e2bd7173692U, 0xc8fa8db6ccdd0437U, 0x1d9c9892400a22a2U, + 0x2503beb6d00cab4bU, 0x2e44ae64840fd61dU, 0x5ceaecfed289e5d2U, + 0x7425a83e872c5f47U, 0xd12f124e28f77719U, 0x82bd6b70d99aaa6fU, + 0x636cc64d1001550bU, 0x3c47f7e05401aa4eU, 0x65acfaec34810a71U, + 0x7f1839a741a14d0dU, 0x1ede48111209a050U, 0x934aed0aab460432U, + 0xf81da84d5617853fU, 0x36251260ab9d668eU, 0xc1d72b7c6b426019U, + 0xb24cf65b8612f81fU, 0xdee033f26797b627U, 0x169840ef017da3b1U, + 0x8e1f289560ee864eU, 0xf1a6f2bab92a27e2U, 0xae10af696774b1dbU, + 0xacca6da1e0a8ef29U, 0x17fd090a58d32af3U, 0xddfc4b4cef07f5b0U, + 0x4abdaf101564f98eU, 0x9d6d1ad41abe37f1U, 0x84c86189216dc5edU, + 0x32fd3cf5b4e49bb4U, 0x3fbc8c33221dc2a1U, 0x0fabaf3feaa5334aU, + 0x29cb4d87f2a7400eU, 0x743e20e9ef511012U, 0x914da9246b255416U, + 0x1ad089b6c2f7548eU, 0xa184ac2473b529b1U, 0xc9e5d72d90a2741eU, + 0x7e2fa67c7a658892U, 0xddbb901b98feeab7U, 0x552a74227f3ea565U, + 0xd53a88958f87275fU, 0x8a892abaf368f137U, 0x2d2b7569b0432d85U, + 0x9c3b29620e29fc73U, 0x8349f3ba91b47b8fU, 0x241c70a936219a73U, + 0xed238cd383aa0110U, 0xf4363804324a40aaU, 0xb143c6053edcd0d5U, + 0xdd94b7868e94050aU, 0xca7cf2b4191c8326U, 0xfd1c2f611f63a3f0U, + 0xbc633b39673c8cecU, 0xd5be0503e085d813U, 0x4b2d8644d8a74e18U, + 0xddf8e7d60ed1219eU, 0xcabb90e5c942b503U, 0x3d6a751f3b936243U, + 0x0cc512670a783ad4U, 0x27fb2b80668b24c5U, 0xb1f9f660802dedf6U, + 0x5e7873f8a0396973U, 0xdb0b487b6423e1e8U, 0x91ce1a9a3d2cda62U, + 0x7641a140cc7810fbU, 0xa9e904c87fcb0a9dU, 0x546345fa9fbdcd44U, + 0xa97c177947ad4095U, 0x49ed8eabcccc485dU, 0x5c68f256bfff5a74U, + 0x73832eec6fff3111U, 0xc831fd53c5ff7eabU, 0xba3e7ca8b77f5e55U, + 0x28ce1bd2e55f35ebU, 0x7980d163cf5b81b3U, 0xd7e105bcc332621fU, + 0x8dd9472bf3fefaa7U, 0xb14f98f6f0feb951U, 0x6ed1bf9a569f33d3U, + 0x0a862f80ec4700c8U, 0xcd27bb612758c0faU, 0x8038d51cb897789cU, + 0xe0470a63e6bd56c3U, 0x1858ccfce06cac74U, 0x0f37801e0c43ebc8U, + 0xd30560258f54e6baU, 0x47c6b82ef32a2069U, 0x4cdc331d57fa5441U, + 0xe0133fe4adf8e952U, 0x58180fddd97723a6U, 0x570f09eaa7ea7648U, }; } // namespace diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/cord.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/cord.cc index 6547c2daba..92822c0588 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/cord.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/cord.cc @@ -20,6 +20,7 @@ #include #include #include +#include #include #include #include @@ -34,6 +35,7 @@ #include "absl/base/port.h" #include "absl/container/fixed_array.h" #include "absl/container/inlined_vector.h" +#include "absl/strings/cord_buffer.h" #include "absl/strings/escaping.h" #include "absl/strings/internal/cord_data_edge.h" #include "absl/strings/internal/cord_internal.h" @@ -87,30 +89,6 @@ static inline CordRep* VerifyTree(CordRep* node) { return node; } -// Create a concatenation of the specified nodes. -// Does not change the refcounts of "left" and "right". -// The returned node has a refcount of 1. -static CordRep* RawConcat(CordRep* left, CordRep* right) { - // Avoid making degenerate concat nodes (one child is empty) - if (left == nullptr) return right; - if (right == nullptr) return left; - if (left->length == 0) { - CordRep::Unref(left); - return right; - } - if (right->length == 0) { - CordRep::Unref(right); - return left; - } - ABSL_INTERNAL_LOG(FATAL, "CordRepConcat is no longer supported"); - return nullptr; -} - -static CordRep* Concat(CordRep* left, CordRep* right) { - CordRep* rep = RawConcat(left, right); - return VerifyTree(rep); -} - static CordRepFlat* CreateFlat(const char* data, size_t length, size_t alloc_hint) { CordRepFlat* flat = CordRepFlat::New(length + alloc_hint); @@ -151,23 +129,6 @@ void InitializeCordRepExternal(absl::string_view data, CordRepExternal* rep) { } // namespace cord_internal -static CordRep* NewSubstring(CordRep* child, size_t offset, size_t length) { - // Never create empty substring nodes - if (length == 0) { - CordRep::Unref(child); - return nullptr; - } else { - CordRepSubstring* rep = new CordRepSubstring(); - assert(child->IsExternal() || child->IsFlat()); - assert((offset + length) <= child->length); - rep->length = length; - rep->tag = cord_internal::SUBSTRING; - rep->start = offset; - rep->child = child; - return VerifyTree(rep); - } -} - // Creates a CordRep from the provided string. If the string is large enough, // and not wasteful, we move the string into an external cord rep, preserving // the already allocated string contents. @@ -200,7 +161,9 @@ static CordRep* CordRepFromString(std::string&& src) { // -------------------------------------------------------------------- // Cord::InlineRep functions +#ifdef ABSL_INTERNAL_NEED_REDUNDANT_CONSTEXPR_DECL constexpr unsigned char Cord::InlineRep::kMaxInline; +#endif inline void Cord::InlineRep::set_data(const char* data, size_t n) { static_assert(kMaxInline == 15, "set_data is hard-coded for a length of 15"); @@ -222,7 +185,7 @@ inline void Cord::InlineRep::reduce_size(size_t n) { assert(tag >= n); tag -= n; memset(data_.as_chars() + tag, 0, n); - set_inline_size(static_cast(tag)); + set_inline_size(tag); } inline void Cord::InlineRep::remove_prefix(size_t n) { @@ -457,6 +420,7 @@ Cord& Cord::operator=(absl::string_view src) { // we keep it here to make diffs easier. void Cord::InlineRep::AppendArray(absl::string_view src, MethodIdentifier method) { + MaybeRemoveEmptyCrcNode(); if (src.empty()) return; // memcpy(_, nullptr, 0) is undefined. size_t appended = 0; @@ -516,6 +480,10 @@ inline CordRep* Cord::TakeRep() && { template inline void Cord::AppendImpl(C&& src) { auto constexpr method = CordzUpdateTracker::kAppendCord; + + contents_.MaybeRemoveEmptyCrcNode(); + if (src.empty()) return; + if (empty()) { // Since destination is empty, we can avoid allocating a node, if (src.contents_.is_tree()) { @@ -561,6 +529,52 @@ inline void Cord::AppendImpl(C&& src) { contents_.AppendTree(rep, CordzUpdateTracker::kAppendCord); } +static CordRep::ExtractResult ExtractAppendBuffer(CordRep* rep, + size_t min_capacity) { + switch (rep->tag) { + case cord_internal::BTREE: + return CordRepBtree::ExtractAppendBuffer(rep->btree(), min_capacity); + default: + if (rep->IsFlat() && rep->refcount.IsOne() && + rep->flat()->Capacity() - rep->length >= min_capacity) { + return {nullptr, rep}; + } + return {rep, nullptr}; + } +} + +static CordBuffer CreateAppendBuffer(InlineData& data, size_t block_size, + size_t capacity) { + // Watch out for overflow, people can ask for size_t::max(). + const size_t size = data.inline_size(); + const size_t max_capacity = std::numeric_limits::max() - size; + capacity = (std::min)(max_capacity, capacity) + size; + CordBuffer buffer = + block_size ? CordBuffer::CreateWithCustomLimit(block_size, capacity) + : CordBuffer::CreateWithDefaultLimit(capacity); + cord_internal::SmallMemmove(buffer.data(), data.as_chars(), size); + buffer.SetLength(size); + data = {}; + return buffer; +} + +CordBuffer Cord::GetAppendBufferSlowPath(size_t block_size, size_t capacity, + size_t min_capacity) { + auto constexpr method = CordzUpdateTracker::kGetAppendBuffer; + CordRep* tree = contents_.tree(); + if (tree != nullptr) { + CordzUpdateScope scope(contents_.cordz_info(), method); + CordRep::ExtractResult result = ExtractAppendBuffer(tree, min_capacity); + if (result.extracted != nullptr) { + contents_.SetTreeOrEmpty(result.tree, scope); + return CordBuffer(result.extracted->flat()); + } + return block_size ? CordBuffer::CreateWithCustomLimit(block_size, capacity) + : CordBuffer::CreateWithDefaultLimit(capacity); + } + return CreateAppendBuffer(contents_.data_, block_size, capacity); +} + void Cord::Append(const Cord& src) { AppendImpl(src); } @@ -582,6 +596,9 @@ void Cord::Append(T&& src) { template void Cord::Append(std::string&& src); void Cord::Prepend(const Cord& src) { + contents_.MaybeRemoveEmptyCrcNode(); + if (src.empty()) return; + CordRep* src_tree = src.contents_.tree(); if (src_tree != nullptr) { CordRep::Ref(src_tree); @@ -596,16 +613,18 @@ void Cord::Prepend(const Cord& src) { } void Cord::PrependArray(absl::string_view src, MethodIdentifier method) { + contents_.MaybeRemoveEmptyCrcNode(); if (src.empty()) return; // memcpy(_, nullptr, 0) is undefined. + if (!contents_.is_tree()) { size_t cur_size = contents_.inline_size(); if (cur_size + src.size() <= InlineRep::kMaxInline) { // Use embedded storage. - char data[InlineRep::kMaxInline + 1] = {0}; - memcpy(data, src.data(), src.size()); - memcpy(data + src.size(), contents_.data(), cur_size); - memcpy(contents_.data_.as_chars(), data, InlineRep::kMaxInline + 1); - contents_.set_inline_size(cur_size + src.size()); + InlineData data; + memcpy(data.as_chars(), src.data(), src.size()); + memcpy(data.as_chars() + src.size(), contents_.data(), cur_size); + data.set_inline_size(cur_size + src.size()); + contents_.data_ = data; return; } } @@ -613,84 +632,50 @@ void Cord::PrependArray(absl::string_view src, MethodIdentifier method) { contents_.PrependTree(rep, method); } -template > -inline void Cord::Prepend(T&& src) { - if (src.size() <= kMaxBytesToCopy) { - Prepend(absl::string_view(src)); +void Cord::AppendPrecise(absl::string_view src, MethodIdentifier method) { + assert(!src.empty()); + assert(src.size() <= cord_internal::kMaxFlatLength); + if (contents_.remaining_inline_capacity() >= src.size()) { + const size_t inline_length = contents_.inline_size(); + memcpy(contents_.data_.as_chars() + inline_length, src.data(), src.size()); + contents_.set_inline_size(inline_length + src.size()); } else { - CordRep* rep = CordRepFromString(std::forward(src)); - contents_.PrependTree(rep, CordzUpdateTracker::kPrependString); + contents_.AppendTree(CordRepFlat::Create(src), method); } } -template void Cord::Prepend(std::string&& src); - -static CordRep* RemovePrefixFrom(CordRep* node, size_t n) { - if (n >= node->length) return nullptr; - if (n == 0) return CordRep::Ref(node); - absl::InlinedVector rhs_stack; - - assert(!node->IsCrc()); - assert(n <= node->length); - - if (n == 0) { - CordRep::Ref(node); +void Cord::PrependPrecise(absl::string_view src, MethodIdentifier method) { + assert(!src.empty()); + assert(src.size() <= cord_internal::kMaxFlatLength); + if (contents_.remaining_inline_capacity() >= src.size()) { + const size_t cur_size = contents_.inline_size(); + InlineData data; + memcpy(data.as_chars(), src.data(), src.size()); + memcpy(data.as_chars() + src.size(), contents_.data(), cur_size); + data.set_inline_size(cur_size + src.size()); + contents_.data_ = data; } else { - size_t start = n; - size_t len = node->length - n; - if (node->IsSubstring()) { - // Consider in-place update of node, similar to in RemoveSuffixFrom(). - start += node->substring()->start; - node = node->substring()->child; - } - node = NewSubstring(CordRep::Ref(node), start, len); - } - while (!rhs_stack.empty()) { - node = Concat(node, CordRep::Ref(rhs_stack.back())); - rhs_stack.pop_back(); + contents_.PrependTree(CordRepFlat::Create(src), method); } - return node; } -// RemoveSuffixFrom() is very similar to RemovePrefixFrom(), with the -// exception that removing a suffix has an optimization where a node may be -// edited in place iff that node and all its ancestors have a refcount of 1. -static CordRep* RemoveSuffixFrom(CordRep* node, size_t n) { - if (n >= node->length) return nullptr; - if (n == 0) return CordRep::Ref(node); - absl::InlinedVector lhs_stack; - bool inplace_ok = node->refcount.IsOne(); - assert(!node->IsCrc()); - - assert(n <= node->length); - - if (n == 0) { - CordRep::Ref(node); - } else if (inplace_ok && !node->IsExternal()) { - // Consider making a new buffer if the current node capacity is much - // larger than the new length. - CordRep::Ref(node); - node->length -= n; +template > +inline void Cord::Prepend(T&& src) { + if (src.size() <= kMaxBytesToCopy) { + Prepend(absl::string_view(src)); } else { - size_t start = 0; - size_t len = node->length - n; - if (node->IsSubstring()) { - start = node->substring()->start; - node = node->substring()->child; - } - node = NewSubstring(CordRep::Ref(node), start, len); - } - while (!lhs_stack.empty()) { - node = Concat(CordRep::Ref(lhs_stack.back()), node); - lhs_stack.pop_back(); + CordRep* rep = CordRepFromString(std::forward(src)); + contents_.PrependTree(rep, CordzUpdateTracker::kPrependString); } - return node; } +template void Cord::Prepend(std::string&& src); + void Cord::RemovePrefix(size_t n) { ABSL_INTERNAL_CHECK(n <= size(), absl::StrCat("Requested prefix size ", n, " exceeds Cord's size ", size())); + contents_.MaybeRemoveEmptyCrcNode(); CordRep* tree = contents_.tree(); if (tree == nullptr) { contents_.remove_prefix(n); @@ -698,14 +683,20 @@ void Cord::RemovePrefix(size_t n) { auto constexpr method = CordzUpdateTracker::kRemovePrefix; CordzUpdateScope scope(contents_.cordz_info(), method); tree = cord_internal::RemoveCrcNode(tree); - if (tree->IsBtree()) { + if (n >= tree->length) { + CordRep::Unref(tree); + tree = nullptr; + } else if (tree->IsBtree()) { CordRep* old = tree; tree = tree->btree()->SubTree(n, tree->length - n); CordRep::Unref(old); + } else if (tree->IsSubstring() && tree->refcount.IsOne()) { + tree->substring()->start += n; + tree->length -= n; } else { - CordRep* newrep = RemovePrefixFrom(tree, n); + CordRep* rep = CordRepSubstring::Substring(tree, n, tree->length - n); CordRep::Unref(tree); - tree = VerifyTree(newrep); + tree = rep; } contents_.SetTreeOrEmpty(tree, scope); } @@ -715,6 +706,7 @@ void Cord::RemoveSuffix(size_t n) { ABSL_INTERNAL_CHECK(n <= size(), absl::StrCat("Requested suffix size ", n, " exceeds Cord's size ", size())); + contents_.MaybeRemoveEmptyCrcNode(); CordRep* tree = contents_.tree(); if (tree == nullptr) { contents_.reduce_size(n); @@ -722,59 +714,23 @@ void Cord::RemoveSuffix(size_t n) { auto constexpr method = CordzUpdateTracker::kRemoveSuffix; CordzUpdateScope scope(contents_.cordz_info(), method); tree = cord_internal::RemoveCrcNode(tree); - if (tree->IsBtree()) { + if (n >= tree->length) { + CordRep::Unref(tree); + tree = nullptr; + } else if (tree->IsBtree()) { tree = CordRepBtree::RemoveSuffix(tree->btree(), n); + } else if (!tree->IsExternal() && tree->refcount.IsOne()) { + assert(tree->IsFlat() || tree->IsSubstring()); + tree->length -= n; } else { - CordRep* newrep = RemoveSuffixFrom(tree, n); + CordRep* rep = CordRepSubstring::Substring(tree, 0, tree->length - n); CordRep::Unref(tree); - tree = VerifyTree(newrep); + tree = rep; } contents_.SetTreeOrEmpty(tree, scope); } } -// Work item for NewSubRange(). -struct SubRange { - SubRange(CordRep* a_node, size_t a_pos, size_t a_n) - : node(a_node), pos(a_pos), n(a_n) {} - CordRep* node; // nullptr means concat last 2 results. - size_t pos; - size_t n; -}; - -static CordRep* NewSubRange(CordRep* node, size_t pos, size_t n) { - absl::InlinedVector results; - absl::InlinedVector todo; - assert(!node->IsCrc()); - todo.push_back(SubRange(node, pos, n)); - do { - const SubRange& sr = todo.back(); - node = sr.node; - pos = sr.pos; - n = sr.n; - todo.pop_back(); - - if (node == nullptr) { - assert(results.size() >= 2); - CordRep* right = results.back(); - results.pop_back(); - CordRep* left = results.back(); - results.pop_back(); - results.push_back(Concat(left, right)); - } else if (pos == 0 && n == node->length) { - results.push_back(CordRep::Ref(node)); - } else { - if (node->IsSubstring()) { - pos += node->substring()->start; - node = node->substring()->child; - } - results.push_back(NewSubstring(CordRep::Ref(node), pos, n)); - } - } while (!todo.empty()); - assert(results.size() == 1); - return results[0]; -} - Cord Cord::Subcord(size_t pos, size_t new_size) const { Cord sub_cord; size_t length = size(); @@ -808,7 +764,7 @@ Cord Cord::Subcord(size_t pos, size_t new_size) const { if (tree->IsBtree()) { tree = tree->btree()->SubTree(pos, new_size); } else { - tree = NewSubRange(tree, pos, new_size); + tree = CordRepSubstring::Substring(tree, pos, new_size); } sub_cord.contents_.EmplaceTree(tree, contents_.data_, CordzUpdateTracker::kSubCord); @@ -900,9 +856,11 @@ inline absl::string_view Cord::InlineRep::FindFlatStartPiece() const { void Cord::SetExpectedChecksum(uint32_t crc) { auto constexpr method = CordzUpdateTracker::kSetExpectedChecksum; - if (empty()) return; - - if (!contents_.is_tree()) { + if (empty()) { + contents_.MaybeRemoveEmptyCrcNode(); + CordRep* rep = CordRepCrc::New(nullptr, crc); + contents_.EmplaceTree(rep, method); + } else if (!contents_.is_tree()) { CordRep* rep = contents_.MakeFlatWithExtraCapacity(0); rep = CordRepCrc::New(rep, crc); contents_.EmplaceTree(rep, method); @@ -985,6 +943,7 @@ inline int Cord::CompareSlowPath(const Cord& rhs, size_t compared_size, } inline absl::string_view Cord::GetFirstChunk(const Cord& c) { + if (c.empty()) return {}; return c.contents_.FindFlatStartPiece(); } inline absl::string_view Cord::GetFirstChunk(absl::string_view sv) { @@ -1155,14 +1114,9 @@ Cord Cord::ChunkIterator::AdvanceAndReadBytes(size_t n) { : current_leaf_; const char* data = payload->IsExternal() ? payload->external()->base : payload->flat()->Data(); - const size_t offset = current_chunk_.data() - data; - - CordRepSubstring* tree = new CordRepSubstring(); - tree->tag = cord_internal::SUBSTRING; - tree->length = n; - tree->start = offset; - tree->child = CordRep::Ref(payload); + const size_t offset = static_cast(current_chunk_.data() - data); + auto* tree = CordRepSubstring::Substring(payload, offset, n); subcord.contents_.EmplaceTree(VerifyTree(tree), method); bytes_remaining_ -= n; current_chunk_.remove_prefix(n); @@ -1227,6 +1181,10 @@ absl::string_view Cord::FlattenSlowPath() { /* static */ bool Cord::GetFlatAux(CordRep* rep, absl::string_view* fragment) { assert(rep != nullptr); + if (rep->length == 0) { + *fragment = absl::string_view(); + return true; + } rep = cord_internal::SkipCrcNode(rep); if (rep->IsFlat()) { *fragment = absl::string_view(rep->flat()->Data(), rep->length); @@ -1258,6 +1216,7 @@ absl::string_view Cord::FlattenSlowPath() { absl::cord_internal::CordRep* rep, absl::FunctionRef callback) { assert(rep != nullptr); + if (rep->length == 0) return; rep = cord_internal::SkipCrcNode(rep); if (rep->IsBtree()) { @@ -1291,7 +1250,11 @@ static void DumpNode(CordRep* rep, bool include_data, std::ostream* os, if (include_data) *os << static_cast(rep); *os << "]"; *os << " " << std::setw(indent) << ""; - if (rep->IsCrc()) { + bool leaf = false; + if (rep == nullptr) { + *os << "NULL\n"; + leaf = true; + } else if (rep->IsCrc()) { *os << "CRC crc=" << rep->crc()->crc << "\n"; indent += kIndentStep; rep = rep->crc()->child; @@ -1300,6 +1263,7 @@ static void DumpNode(CordRep* rep, bool include_data, std::ostream* os, indent += kIndentStep; rep = rep->substring()->child; } else { // Leaf or ring + leaf = true; if (rep->IsExternal()) { *os << "EXTERNAL ["; if (include_data) @@ -1313,6 +1277,8 @@ static void DumpNode(CordRep* rep, bool include_data, std::ostream* os, } else { CordRepBtree::Dump(rep, /*label=*/ "", include_data, *os); } + } + if (leaf) { if (stack.empty()) break; rep = stack.back(); stack.pop_back(); @@ -1358,11 +1324,14 @@ static bool VerifyNode(CordRep* root, CordRep* start_node, node->substring()->child->length, ReportError(root, node)); } else if (node->IsCrc()) { - ABSL_INTERNAL_CHECK(node->crc()->child != nullptr, - ReportError(root, node)); - ABSL_INTERNAL_CHECK(node->crc()->length == node->crc()->child->length, - ReportError(root, node)); - worklist.push_back(node->crc()->child); + ABSL_INTERNAL_CHECK( + node->crc()->child != nullptr || node->crc()->length == 0, + ReportError(root, node)); + if (node->crc()->child != nullptr) { + ABSL_INTERNAL_CHECK(node->crc()->length == node->crc()->child->length, + ReportError(root, node)); + worklist.push_back(node->crc()->child); + } } } while (!worklist.empty()); return true; @@ -1370,7 +1339,7 @@ static bool VerifyNode(CordRep* root, CordRep* start_node, std::ostream& operator<<(std::ostream& out, const Cord& cord) { for (absl::string_view chunk : cord.Chunks()) { - out.write(chunk.data(), chunk.size()); + out.write(chunk.data(), static_cast(chunk.size())); } return out; } diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/cord.h b/TMessagesProj/jni/voip/webrtc/absl/strings/cord.h index 081b6311e8..6e3da89e62 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/cord.h +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/cord.h @@ -20,8 +20,7 @@ // structure. A Cord is a string-like sequence of characters optimized for // specific use cases. Unlike a `std::string`, which stores an array of // contiguous characters, Cord data is stored in a structure consisting of -// separate, reference-counted "chunks." (Currently, this implementation is a -// tree structure, though that implementation may change.) +// separate, reference-counted "chunks." // // Because a Cord consists of these chunks, data can be added to or removed from // a Cord during its lifetime. Chunks may also be shared between Cords. Unlike a @@ -80,6 +79,7 @@ #include "absl/functional/function_ref.h" #include "absl/meta/type_traits.h" #include "absl/strings/cord_analysis.h" +#include "absl/strings/cord_buffer.h" #include "absl/strings/internal/cord_data_edge.h" #include "absl/strings/internal/cord_internal.h" #include "absl/strings/internal/cord_rep_btree.h" @@ -244,6 +244,58 @@ class Cord { template = 0> void Append(T&& src); + // Appends `buffer` to this cord, unless `buffer` has a zero length in which + // case this method has no effect on this cord instance. + // This method is guaranteed to consume `buffer`. + void Append(CordBuffer buffer); + + // Returns a CordBuffer, re-using potential existing capacity in this cord. + // + // Cord instances may have additional unused capacity in the last (or first) + // nodes of the underlying tree to facilitate amortized growth. This method + // allows applications to explicitly use this spare capacity if available, + // or create a new CordBuffer instance otherwise. + // If this cord has a final non-shared node with at least `min_capacity` + // available, then this method will return that buffer including its data + // contents. I.e.; the returned buffer will have a non-zero length, and + // a capacity of at least `buffer.length + min_capacity`. Otherwise, this + // method will return `CordBuffer::CreateWithDefaultLimit(capacity)`. + // + // Below an example of using GetAppendBuffer. Notice that in this example we + // use `GetAppendBuffer()` only on the first iteration. As we know nothing + // about any initial extra capacity in `cord`, we may be able to use the extra + // capacity. But as we add new buffers with fully utilized contents after that + // we avoid calling `GetAppendBuffer()` on subsequent iterations: while this + // works fine, it results in an unnecessary inspection of cord contents: + // + // void AppendRandomDataToCord(absl::Cord &cord, size_t n) { + // bool first = true; + // while (n > 0) { + // CordBuffer buffer = first ? cord.GetAppendBuffer(n) + // : CordBuffer::CreateWithDefaultLimit(n); + // absl::Span data = buffer.available_up_to(n); + // FillRandomValues(data.data(), data.size()); + // buffer.IncreaseLengthBy(data.size()); + // cord.Append(std::move(buffer)); + // n -= data.size(); + // first = false; + // } + // } + CordBuffer GetAppendBuffer(size_t capacity, size_t min_capacity = 16); + + // Returns a CordBuffer, re-using potential existing capacity in this cord. + // + // This function is identical to `GetAppendBuffer`, except that in the case + // where a new `CordBuffer` is allocated, it is allocated using the provided + // custom limit instead of the default limit. `GetAppendBuffer` will default + // to `CordBuffer::CreateWithDefaultLimit(capacity)` whereas this method + // will default to `CordBuffer::CreateWithCustomLimit(block_size, capacity)`. + // This method is equivalent to `GetAppendBuffer` if `block_size` is zero. + // See the documentation for `CreateWithCustomLimit` for more details on the + // restrictions and legal values for `block_size`. + CordBuffer GetCustomAppendBuffer(size_t block_size, size_t capacity, + size_t min_capacity = 16); + // Cord::Prepend() // // Prepends data to the Cord, which may come from another Cord or other string @@ -253,6 +305,11 @@ class Cord { template = 0> void Prepend(T&& src); + // Prepends `buffer` to this cord, unless `buffer` has a zero length in which + // case this method has no effect on this cord instance. + // This method is guaranteed to consume `buffer`. + void Prepend(CordBuffer buffer); + // Cord::RemovePrefix() // // Removes the first `n` bytes of a Cord. @@ -289,7 +346,7 @@ class Cord { // Cord::EstimatedMemoryUsage() // // Returns the *approximate* number of bytes held by this cord. - // See CordMemoryAccounting for more information on accounting method used. + // See CordMemoryAccounting for more information on the accounting method. size_t EstimatedMemoryUsage(CordMemoryAccounting accounting_method = CordMemoryAccounting::kTotal) const; @@ -341,7 +398,7 @@ class Cord { //---------------------------------------------------------------------------- // // A `Cord::ChunkIterator` allows iteration over the constituent chunks of its - // Cord. Such iteration allows you to perform non-const operatons on the data + // Cord. Such iteration allows you to perform non-const operations on the data // of a Cord without modifying it. // // Generally, you do not instantiate a `Cord::ChunkIterator` directly; @@ -422,7 +479,7 @@ class Cord { CordRepBtreeReader btree_reader_; }; - // Cord::ChunkIterator::chunk_begin() + // Cord::chunk_begin() // // Returns an iterator to the first chunk of the `Cord`. // @@ -438,7 +495,7 @@ class Cord { // } ChunkIterator chunk_begin() const; - // Cord::ChunkItertator::chunk_end() + // Cord::chunk_end() // // Returns an iterator one increment past the last chunk of the `Cord`. // @@ -448,7 +505,7 @@ class Cord { ChunkIterator chunk_end() const; //---------------------------------------------------------------------------- - // Cord::ChunkIterator::ChunkRange + // Cord::ChunkRange //---------------------------------------------------------------------------- // // `ChunkRange` is a helper class for iterating over the chunks of the `Cord`, @@ -462,7 +519,7 @@ class Cord { class ChunkRange { public: // Fulfill minimum c++ container requirements [container.requirements] - // Theses (partial) container type definitions allow ChunkRange to be used + // These (partial) container type definitions allow ChunkRange to be used // in various utilities expecting a subset of [container.requirements]. // For example, the below enables using `::testing::ElementsAre(...)` using value_type = absl::string_view; @@ -482,9 +539,9 @@ class Cord { // Cord::Chunks() // - // Returns a `Cord::ChunkIterator::ChunkRange` for iterating over the chunks - // of a `Cord` with a range-based for-loop. For most iteration tasks on a - // Cord, use `Cord::Chunks()` to retrieve this iterator. + // Returns a `Cord::ChunkRange` for iterating over the chunks of a `Cord` with + // a range-based for-loop. For most iteration tasks on a Cord, use + // `Cord::Chunks()` to retrieve this iterator. // // Example: // @@ -550,7 +607,7 @@ class Cord { ChunkIterator chunk_iterator_; }; - // Cord::CharIterator::AdvanceAndRead() + // Cord::AdvanceAndRead() // // Advances the `Cord::CharIterator` by `n_bytes` and returns the bytes // advanced as a separate `Cord`. `n_bytes` must be less than or equal to the @@ -558,21 +615,21 @@ class Cord { // valid to pass `char_end()` and `0`. static Cord AdvanceAndRead(CharIterator* it, size_t n_bytes); - // Cord::CharIterator::Advance() + // Cord::Advance() // // Advances the `Cord::CharIterator` by `n_bytes`. `n_bytes` must be less than // or equal to the number of bytes remaining within the Cord; otherwise, // behavior is undefined. It is valid to pass `char_end()` and `0`. static void Advance(CharIterator* it, size_t n_bytes); - // Cord::CharIterator::ChunkRemaining() + // Cord::ChunkRemaining() // // Returns the longest contiguous view starting at the iterator's position. // // `it` must be dereferenceable. static absl::string_view ChunkRemaining(const CharIterator& it); - // Cord::CharIterator::char_begin() + // Cord::char_begin() // // Returns an iterator to the first character of the `Cord`. // @@ -581,7 +638,7 @@ class Cord { // a `CharIterator` where range-based for-loops may not be available. CharIterator char_begin() const; - // Cord::CharIterator::char_end() + // Cord::char_end() // // Returns an iterator to one past the last character of the `Cord`. // @@ -590,13 +647,13 @@ class Cord { // a `CharIterator` where range-based for-loops are not useful. CharIterator char_end() const; - // Cord::CharIterator::CharRange + // Cord::CharRange // // `CharRange` is a helper class for iterating over the characters of a // producing an iterator which can be used within a range-based for loop. // Construction of a `CharRange` will return an iterator pointing to the first // character of the Cord. Generally, do not construct a `CharRange` directly; - // instead, prefer to use the `Cord::Chars()` method show below. + // instead, prefer to use the `Cord::Chars()` method shown below. // // Implementation note: `CharRange` is simply a convenience wrapper over // `Cord::char_begin()` and `Cord::char_end()`. @@ -621,11 +678,11 @@ class Cord { const Cord* cord_; }; - // Cord::CharIterator::Chars() + // Cord::Chars() // - // Returns a `Cord::CharIterator` for iterating over the characters of a - // `Cord` with a range-based for-loop. For most character-based iteration - // tasks on a Cord, use `Cord::Chars()` to retrieve this iterator. + // Returns a `Cord::CharRange` for iterating over the characters of a `Cord` + // with a range-based for-loop. For most character-based iteration tasks on a + // Cord, use `Cord::Chars()` to retrieve this iterator. // // Example: // @@ -768,6 +825,7 @@ class Cord { // Returns nullptr if holding bytes absl::cord_internal::CordRep* tree() const; absl::cord_internal::CordRep* as_tree() const; + const char* as_chars() const; // Returns non-null iff was holding a pointer absl::cord_internal::CordRep* clear(); // Converts to pointer if necessary. @@ -815,33 +873,15 @@ class Cord { void PrependTreeToTree(CordRep* tree, MethodIdentifier method); void PrependTree(CordRep* tree, MethodIdentifier method); - template - void GetAppendRegion(char** region, size_t* size, size_t length); - bool IsSame(const InlineRep& other) const { return memcmp(&data_, &other.data_, sizeof(data_)) == 0; } - int BitwiseCompare(const InlineRep& other) const { - uint64_t x, y; - // Use memcpy to avoid aliasing issues. - memcpy(&x, &data_, sizeof(x)); - memcpy(&y, &other.data_, sizeof(y)); - if (x == y) { - memcpy(&x, reinterpret_cast(&data_) + 8, sizeof(x)); - memcpy(&y, reinterpret_cast(&other.data_) + 8, sizeof(y)); - if (x == y) return 0; - } - return absl::big_endian::FromHost64(x) < absl::big_endian::FromHost64(y) - ? -1 - : 1; - } void CopyTo(std::string* dst) const { // memcpy is much faster when operating on a known size. On most supported // platforms, the small string optimization is large enough that resizing // to 15 bytes does not cause a memory allocation. - absl::strings_internal::STLStringResizeUninitialized(dst, - sizeof(data_) - 1); - memcpy(&(*dst)[0], &data_, sizeof(data_) - 1); + absl::strings_internal::STLStringResizeUninitialized(dst, kMaxInline); + memcpy(&(*dst)[0], data_.as_chars(), kMaxInline); // erase is faster than resize because the logic for memory allocation is // not needed. dst->erase(inline_size()); @@ -886,6 +926,13 @@ class Cord { void set_inline_size(size_t size) { data_.set_inline_size(size); } size_t inline_size() const { return data_.inline_size(); } + // Empty cords that carry a checksum have a CordRepCrc node with a null + // child node. The code can avoid lots of special cases where it would + // otherwise transition from tree to inline storage if we just remove the + // CordRepCrc node before mutations. Must never be called inside a + // CordzUpdateScope since it untracks the cordz info. + void MaybeRemoveEmptyCrcNode(); + cord_internal::InlineData data_; }; InlineRep contents_; @@ -927,6 +974,16 @@ class Cord { template void AppendImpl(C&& src); + // Appends / Prepends `src` to this instance, using precise sizing. + // This method does explicitly not attempt to use any spare capacity + // in any pending last added private owned flat. + // Requires `src` to be <= kMaxFlatLength. + void AppendPrecise(absl::string_view src, MethodIdentifier method); + void PrependPrecise(absl::string_view src, MethodIdentifier method); + + CordBuffer GetAppendBufferSlowPath(size_t block_size, size_t capacity, + size_t min_capacity); + // Prepends the provided data to this instance. `method` contains the public // API method for this action which is tracked for Cordz sampling purposes. void PrependArray(absl::string_view src, MethodIdentifier method); @@ -963,17 +1020,17 @@ namespace cord_internal { // Fast implementation of memmove for up to 15 bytes. This implementation is // safe for overlapping regions. If nullify_tail is true, the destination is -// padded with '\0' up to 16 bytes. +// padded with '\0' up to 15 bytes. template inline void SmallMemmove(char* dst, const char* src, size_t n) { if (n >= 8) { - assert(n <= 16); + assert(n <= 15); uint64_t buf1; uint64_t buf2; memcpy(&buf1, src, 8); memcpy(&buf2, src + n - 8, 8); if (nullify_tail) { - memset(dst + 8, 0, 8); + memset(dst + 7, 0, 8); } memcpy(dst, &buf1, 8); memcpy(dst + n - 8, &buf2, 8); @@ -984,7 +1041,7 @@ inline void SmallMemmove(char* dst, const char* src, size_t n) { memcpy(&buf2, src + n - 4, 4); if (nullify_tail) { memset(dst + 4, 0, 4); - memset(dst + 8, 0, 8); + memset(dst + 7, 0, 8); } memcpy(dst, &buf1, 4); memcpy(dst + n - 4, &buf2, 4); @@ -995,7 +1052,7 @@ inline void SmallMemmove(char* dst, const char* src, size_t n) { dst[n - 1] = src[n - 1]; } if (nullify_tail) { - memset(dst + 8, 0, 8); + memset(dst + 7, 0, 8); memset(dst + n, 0, 8); } } @@ -1094,6 +1151,11 @@ inline const char* Cord::InlineRep::data() const { return is_tree() ? nullptr : data_.as_chars(); } +inline const char* Cord::InlineRep::as_chars() const { + assert(!data_.is_tree()); + return data_.as_chars(); +} + inline absl::cord_internal::CordRep* Cord::InlineRep::as_tree() const { assert(data_.is_tree()); return data_.as_tree(); @@ -1119,7 +1181,7 @@ inline cord_internal::CordRepFlat* Cord::InlineRep::MakeFlatWithExtraCapacity( size_t len = data_.inline_size(); auto* result = CordRepFlat::New(len + extra); result->length = len; - memcpy(result->Data(), data_.as_chars(), sizeof(data_)); + memcpy(result->Data(), data_.as_chars(), InlineRep::kMaxInline); return result; } @@ -1181,6 +1243,18 @@ inline void Cord::InlineRep::CopyToArray(char* dst) const { cord_internal::SmallMemmove(dst, data_.as_chars(), n); } +inline void Cord::InlineRep::MaybeRemoveEmptyCrcNode() { + CordRep* rep = tree(); + if (rep == nullptr || ABSL_PREDICT_TRUE(rep->length > 0)) { + return; + } + assert(rep->IsCrc()); + assert(rep->crc()->child == nullptr); + CordzInfo::MaybeUntrackCord(cordz_info()); + CordRep::Unref(rep); + ResetToEmpty(); +} + constexpr inline Cord::Cord() noexcept {} inline Cord::Cord(absl::string_view src) @@ -1230,7 +1304,7 @@ inline size_t Cord::size() const { return contents_.size(); } -inline bool Cord::empty() const { return contents_.empty(); } +inline bool Cord::empty() const { return size() == 0; } inline size_t Cord::EstimatedMemoryUsage( CordMemoryAccounting accounting_method) const { @@ -1278,12 +1352,47 @@ inline void Cord::Prepend(absl::string_view src) { PrependArray(src, CordzUpdateTracker::kPrependString); } +inline void Cord::Append(CordBuffer buffer) { + if (ABSL_PREDICT_FALSE(buffer.length() == 0)) return; + absl::string_view short_value; + if (CordRep* rep = buffer.ConsumeValue(short_value)) { + contents_.AppendTree(rep, CordzUpdateTracker::kAppendCordBuffer); + } else { + AppendPrecise(short_value, CordzUpdateTracker::kAppendCordBuffer); + } +} + +inline void Cord::Prepend(CordBuffer buffer) { + if (ABSL_PREDICT_FALSE(buffer.length() == 0)) return; + absl::string_view short_value; + if (CordRep* rep = buffer.ConsumeValue(short_value)) { + contents_.PrependTree(rep, CordzUpdateTracker::kPrependCordBuffer); + } else { + PrependPrecise(short_value, CordzUpdateTracker::kPrependCordBuffer); + } +} + +inline CordBuffer Cord::GetAppendBuffer(size_t capacity, size_t min_capacity) { + if (empty()) return CordBuffer::CreateWithDefaultLimit(capacity); + return GetAppendBufferSlowPath(0, capacity, min_capacity); +} + +inline CordBuffer Cord::GetCustomAppendBuffer(size_t block_size, + size_t capacity, + size_t min_capacity) { + if (empty()) { + return block_size ? CordBuffer::CreateWithCustomLimit(block_size, capacity) + : CordBuffer::CreateWithDefaultLimit(capacity); + } + return GetAppendBufferSlowPath(block_size, capacity, min_capacity); +} + extern template void Cord::Append(std::string&& src); extern template void Cord::Prepend(std::string&& src); inline int Cord::Compare(const Cord& rhs) const { if (!contents_.is_tree() && !rhs.contents_.is_tree()) { - return contents_.BitwiseCompare(rhs.contents_); + return contents_.data_.Compare(rhs.contents_.data_); } return CompareImpl(rhs); @@ -1321,7 +1430,11 @@ inline Cord::ChunkIterator::ChunkIterator(cord_internal::CordRep* tree) { inline Cord::ChunkIterator::ChunkIterator(const Cord* cord) { if (CordRep* tree = cord->contents_.tree()) { bytes_remaining_ = tree->length; - InitTree(tree); + if (ABSL_PREDICT_TRUE(bytes_remaining_ != 0)) { + InitTree(tree); + } else { + current_chunk_ = {}; + } } else { bytes_remaining_ = cord->contents_.inline_size(); current_chunk_ = {cord->contents_.data(), bytes_remaining_}; @@ -1490,11 +1603,11 @@ inline void Cord::ForEachChunk( if (rep == nullptr) { callback(absl::string_view(contents_.data(), contents_.size())); } else { - return ForEachChunkAux(rep, callback); + ForEachChunkAux(rep, callback); } } -// Nonmember Cord-to-Cord relational operarators. +// Nonmember Cord-to-Cord relational operators. inline bool operator==(const Cord& lhs, const Cord& rhs) { if (lhs.contents_.IsSame(rhs.contents_)) return true; size_t rhs_size = rhs.size(); diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/cord_buffer.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/cord_buffer.cc new file mode 100644 index 0000000000..fad6269cb9 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/cord_buffer.cc @@ -0,0 +1,30 @@ +// Copyright 2022 The Abseil Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "absl/strings/cord_buffer.h" + +#include + +#include "absl/base/config.h" + +namespace absl { +ABSL_NAMESPACE_BEGIN + +#ifdef ABSL_INTERNAL_NEED_REDUNDANT_CONSTEXPR_DECL +constexpr size_t CordBuffer::kDefaultLimit; +constexpr size_t CordBuffer::kCustomLimit; +#endif + +ABSL_NAMESPACE_END +} // namespace absl diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/cord_buffer.h b/TMessagesProj/jni/voip/webrtc/absl/strings/cord_buffer.h new file mode 100644 index 0000000000..15494b31e0 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/cord_buffer.h @@ -0,0 +1,575 @@ +// Copyright 2021 The Abseil Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ----------------------------------------------------------------------------- +// File: cord_buffer.h +// ----------------------------------------------------------------------------- +// +// This file defines an `absl::CordBuffer` data structure to hold data for +// eventual inclusion within an existing `Cord` data structure. Cord buffers are +// useful for building large Cords that may require custom allocation of its +// associated memory. +// +#ifndef ABSL_STRINGS_CORD_BUFFER_H_ +#define ABSL_STRINGS_CORD_BUFFER_H_ + +#include +#include +#include +#include +#include +#include + +#include "absl/base/config.h" +#include "absl/base/macros.h" +#include "absl/numeric/bits.h" +#include "absl/strings/internal/cord_internal.h" +#include "absl/strings/internal/cord_rep_flat.h" +#include "absl/types/span.h" + +namespace absl { +ABSL_NAMESPACE_BEGIN + +class Cord; +class CordBufferTestPeer; + +// CordBuffer +// +// CordBuffer manages memory buffers for purposes such as zero-copy APIs as well +// as applications building cords with large data requiring granular control +// over the allocation and size of cord data. For example, a function creating +// a cord of random data could use a CordBuffer as follows: +// +// absl::Cord CreateRandomCord(size_t length) { +// absl::Cord cord; +// while (length > 0) { +// CordBuffer buffer = CordBuffer::CreateWithDefaultLimit(length); +// absl::Span data = buffer.available_up_to(length); +// FillRandomValues(data.data(), data.size()); +// buffer.IncreaseLengthBy(data.size()); +// cord.Append(std::move(buffer)); +// length -= data.size(); +// } +// return cord; +// } +// +// CordBuffer instances are by default limited to a capacity of `kDefaultLimit` +// bytes. `kDefaultLimit` is currently just under 4KiB, but this default may +// change in the future and/or for specific architectures. The default limit is +// aimed to provide a good trade-off between performance and memory overhead. +// Smaller buffers typically incur more compute cost while larger buffers are +// more CPU efficient but create significant memory overhead because of such +// allocations being less granular. Using larger buffers may also increase the +// risk of memory fragmentation. +// +// Applications create a buffer using one of the `CreateWithDefaultLimit()` or +// `CreateWithCustomLimit()` methods. The returned instance will have a non-zero +// capacity and a zero length. Applications use the `data()` method to set the +// contents of the managed memory, and once done filling the buffer, use the +// `IncreaseLengthBy()` or 'SetLength()' method to specify the length of the +// initialized data before adding the buffer to a Cord. +// +// The `CreateWithCustomLimit()` method is intended for applications needing +// larger buffers than the default memory limit, allowing the allocation of up +// to a capacity of `kCustomLimit` bytes minus some minimum internal overhead. +// The usage of `CreateWithCustomLimit()` should be limited to only those use +// cases where the distribution of the input is relatively well known, and/or +// where the trade-off between the efficiency gains outweigh the risk of memory +// fragmentation. See the documentation for `CreateWithCustomLimit()` for more +// information on using larger custom limits. +// +// The capacity of a `CordBuffer` returned by one of the `Create` methods may +// be larger than the requested capacity due to rounding, alignment and +// granularity of the memory allocator. Applications should use the `capacity` +// method to obtain the effective capacity of the returned instance as +// demonstrated in the provided example above. +// +// CordBuffer is a move-only class. All references into the managed memory are +// invalidated when an instance is moved into either another CordBuffer instance +// or a Cord. Writing to a location obtained by a previous call to `data()` +// after an instance was moved will lead to undefined behavior. +// +// A `moved from` CordBuffer instance will have a valid, but empty state. +// CordBuffer is thread compatible. +class CordBuffer { + public: + // kDefaultLimit + // + // Default capacity limits of allocated CordBuffers. + // See the class comments for more information on allocation limits. + static constexpr size_t kDefaultLimit = cord_internal::kMaxFlatLength; + + // kCustomLimit + // + // Maximum size for CreateWithCustomLimit() allocated buffers. + // Note that the effective capacity may be slightly less + // because of internal overhead of internal cord buffers. + static constexpr size_t kCustomLimit = 64U << 10; + + // Constructors, Destructors and Assignment Operators + + // Creates an empty CordBuffer. + CordBuffer() = default; + + // Destroys this CordBuffer instance and, if not empty, releases any memory + // managed by this instance, invalidating previously returned references. + ~CordBuffer(); + + // CordBuffer is move-only + CordBuffer(CordBuffer&& rhs) noexcept; + CordBuffer& operator=(CordBuffer&&) noexcept; + CordBuffer(const CordBuffer&) = delete; + CordBuffer& operator=(const CordBuffer&) = delete; + + // CordBuffer::MaximumPayload() + // + // Returns the guaranteed maximum payload for a CordBuffer returned by the + // `CreateWithDefaultLimit()` method. While small, each internal buffer inside + // a Cord incurs an overhead to manage the length, type and reference count + // for the buffer managed inside the cord tree. Applications can use this + // method to get approximate number of buffers required for a given byte + // size, etc. + // + // For example: + // const size_t payload = absl::CordBuffer::MaximumPayload(); + // const size_t buffer_count = (total_size + payload - 1) / payload; + // buffers.reserve(buffer_count); + static constexpr size_t MaximumPayload(); + + // Overload to the above `MaximumPayload()` except that it returns the + // maximum payload for a CordBuffer returned by the `CreateWithCustomLimit()` + // method given the provided `block_size`. + static constexpr size_t MaximumPayload(size_t block_size); + + // CordBuffer::CreateWithDefaultLimit() + // + // Creates a CordBuffer instance of the desired `capacity`, capped at the + // default limit `kDefaultLimit`. The returned buffer has a guaranteed + // capacity of at least `min(kDefaultLimit, capacity)`. See the class comments + // for more information on buffer capacities and intended usage. + static CordBuffer CreateWithDefaultLimit(size_t capacity); + + + // CordBuffer::CreateWithCustomLimit() + // + // Creates a CordBuffer instance of the desired `capacity` rounded to an + // appropriate power of 2 size less than, or equal to `block_size`. + // Requires `block_size` to be a power of 2. + // + // If `capacity` is less than or equal to `kDefaultLimit`, then this method + // behaves identical to `CreateWithDefaultLimit`, which means that the caller + // is guaranteed to get a buffer of at least the requested capacity. + // + // If `capacity` is greater than or equal to `block_size`, then this method + // returns a buffer with an `allocated size` of `block_size` bytes. Otherwise, + // this methods returns a buffer with a suitable smaller power of 2 block size + // to satisfy the request. The actual size depends on a number of factors, and + // is typically (but not necessarily) the highest or second highest power of 2 + // value less than or equal to `capacity`. + // + // The 'allocated size' includes a small amount of overhead required for + // internal state, which is currently 13 bytes on 64-bit platforms. For + // example: a buffer created with `block_size` and `capacity' set to 8KiB + // will have an allocated size of 8KiB, and an effective internal `capacity` + // of 8KiB - 13 = 8179 bytes. + // + // To demonstrate this in practice, let's assume we want to read data from + // somewhat larger files using approximately 64KiB buffers: + // + // absl::Cord ReadFromFile(int fd, size_t n) { + // absl::Cord cord; + // while (n > 0) { + // CordBuffer buffer = CordBuffer::CreateWithCustomLimit(64 << 10, n); + // absl::Span data = buffer.available_up_to(n); + // ReadFileDataOrDie(fd, data.data(), data.size()); + // buffer.IncreaseLengthBy(data.size()); + // cord.Append(std::move(buffer)); + // n -= data.size(); + // } + // return cord; + // } + // + // If we'd use this function to read a file of 659KiB, we may get the + // following pattern of allocated cord buffer sizes: + // + // CreateWithCustomLimit(64KiB, 674816) --> ~64KiB (65523) + // CreateWithCustomLimit(64KiB, 674816) --> ~64KiB (65523) + // ... + // CreateWithCustomLimit(64KiB, 19586) --> ~16KiB (16371) + // CreateWithCustomLimit(64KiB, 3215) --> 3215 (at least 3215) + // + // The reason the method returns a 16K buffer instead of a roughly 19K buffer + // is to reduce memory overhead and fragmentation risks. Using carefully + // chosen power of 2 values reduces the entropy of allocated memory sizes. + // + // Additionally, let's assume we'd use the above function on files that are + // generally smaller than 64K. If we'd use 'precise' sized buffers for such + // files, than we'd get a very wide distribution of allocated memory sizes + // rounded to 4K page sizes, and we'd end up with a lot of unused capacity. + // + // In general, application should only use custom sizes if the data they are + // consuming or storing is expected to be many times the chosen block size, + // and be based on objective data and performance metrics. For example, a + // compress function may work faster and consume less CPU when using larger + // buffers. Such an application should pick a size offering a reasonable + // trade-off between expected data size, compute savings with larger buffers, + // and the cost or fragmentation effect of larger buffers. + // Applications must pick a reasonable spot on that curve, and make sure their + // data meets their expectations in size distributions such as "mostly large". + static CordBuffer CreateWithCustomLimit(size_t block_size, size_t capacity); + + // CordBuffer::available() + // + // Returns the span delineating the available capacity in this buffer + // which is defined as `{ data() + length(), capacity() - length() }`. + absl::Span available(); + + // CordBuffer::available_up_to() + // + // Returns the span delineating the available capacity in this buffer limited + // to `size` bytes. This is equivalent to `available().subspan(0, size)`. + absl::Span available_up_to(size_t size); + + // CordBuffer::data() + // + // Returns a non-null reference to the data managed by this instance. + // Applications are allowed to write up to `capacity` bytes of instance data. + // CordBuffer data is uninitialized by default. Reading data from an instance + // that has not yet been initialized will lead to undefined behavior. + char* data(); + const char* data() const; + + // CordBuffer::length() + // + // Returns the length of this instance. The default length of a CordBuffer is + // 0, indicating an 'empty' CordBuffer. Applications must specify the length + // of the data in a CordBuffer before adding it to a Cord. + size_t length() const; + + // CordBuffer::capacity() + // + // Returns the capacity of this instance. All instances have a non-zero + // capacity: default and `moved from` instances have a small internal buffer. + size_t capacity() const; + + // CordBuffer::IncreaseLengthBy() + // + // Increases the length of this buffer by the specified 'n' bytes. + // Applications must make sure all data in this buffer up to the new length + // has been initialized before adding a CordBuffer to a Cord: failure to do so + // will lead to undefined behavior. Requires `length() + n <= capacity()`. + // Typically, applications will use 'available_up_to()` to get a span of the + // desired capacity, and use `span.size()` to increase the length as in: + // absl::Span span = buffer.available_up_to(desired); + // buffer.IncreaseLengthBy(span.size()); + // memcpy(span.data(), src, span.size()); + // etc... + void IncreaseLengthBy(size_t n); + + // CordBuffer::SetLength() + // + // Sets the data length of this instance. Applications must make sure all data + // of the specified length has been initialized before adding a CordBuffer to + // a Cord: failure to do so will lead to undefined behavior. + // Setting the length to a small value or zero does not release any memory + // held by this CordBuffer instance. Requires `length <= capacity()`. + // Applications should preferably use the `IncreaseLengthBy()` method above + // in combination with the 'available()` or `available_up_to()` methods. + void SetLength(size_t length); + + private: + // Make sure we don't accidentally over promise. + static_assert(kCustomLimit <= cord_internal::kMaxLargeFlatSize, ""); + + // Assume the cost of an 'uprounded' allocation to CeilPow2(size) versus + // the cost of allocating at least 1 extra flat <= 4KB: + // - Flat overhead = 13 bytes + // - Btree amortized cost / node =~ 13 bytes + // - 64 byte granularity of tcmalloc at 4K =~ 32 byte average + // CPU cost and efficiency requires we should at least 'save' something by + // splitting, as a poor man's measure, we say the slop needs to be + // at least double the cost offset to make it worth splitting: ~128 bytes. + static constexpr size_t kMaxPageSlop = 128; + + // Overhead for allocation a flat. + static constexpr size_t kOverhead = cord_internal::kFlatOverhead; + + using CordRepFlat = cord_internal::CordRepFlat; + + // `Rep` is the internal data representation of a CordBuffer. The internal + // representation has an internal small size optimization similar to + // std::string (SSO). + struct Rep { + // Inline SSO size of a CordBuffer + static constexpr size_t kInlineCapacity = sizeof(intptr_t) * 2 - 1; + + // Creates a default instance with kInlineCapacity. + Rep() : short_rep{} {} + + // Creates an instance managing an allocated non zero CordRep. + explicit Rep(cord_internal::CordRepFlat* rep) : long_rep{rep} { + assert(rep != nullptr); + } + + // Returns true if this instance manages the SSO internal buffer. + bool is_short() const { + constexpr size_t offset = offsetof(Short, raw_size); + return (reinterpret_cast(this)[offset] & 1) != 0; + } + + // Returns the available area of the internal SSO data + absl::Span short_available() { + const size_t length = short_length(); + return absl::Span(short_rep.data + length, + kInlineCapacity - length); + } + + // Returns the available area of the internal SSO data + absl::Span long_available() { + assert(!is_short()); + const size_t length = long_rep.rep->length; + return absl::Span(long_rep.rep->Data() + length, + long_rep.rep->Capacity() - length); + } + + // Returns the length of the internal SSO data. + size_t short_length() const { + assert(is_short()); + return static_cast(short_rep.raw_size >> 1); + } + + // Sets the length of the internal SSO data. + // Disregards any previously set CordRep instance. + void set_short_length(size_t length) { + short_rep.raw_size = static_cast((length << 1) + 1); + } + + // Adds `n` to the current short length. + void add_short_length(size_t n) { + assert(is_short()); + short_rep.raw_size += static_cast(n << 1); + } + + // Returns reference to the internal SSO data buffer. + char* data() { + assert(is_short()); + return short_rep.data; + } + const char* data() const { + assert(is_short()); + return short_rep.data; + } + + // Returns a pointer the external CordRep managed by this instance. + cord_internal::CordRepFlat* rep() const { + assert(!is_short()); + return long_rep.rep; + } + + // The internal representation takes advantage of the fact that allocated + // memory is always on an even address, and uses the least significant bit + // of the first or last byte (depending on endianness) as the inline size + // indicator overlapping with the least significant byte of the CordRep*. +#if defined(ABSL_IS_BIG_ENDIAN) + struct Long { + explicit Long(cord_internal::CordRepFlat* rep_arg) : rep(rep_arg) {} + void* padding; + cord_internal::CordRepFlat* rep; + }; + struct Short { + char data[sizeof(Long) - 1]; + char raw_size = 1; + }; +#else + struct Long { + explicit Long(cord_internal::CordRepFlat* rep_arg) : rep(rep_arg) {} + cord_internal::CordRepFlat* rep; + void* padding; + }; + struct Short { + char raw_size = 1; + char data[sizeof(Long) - 1]; + }; +#endif + + union { + Long long_rep; + Short short_rep; + }; + }; + + // Power2 functions + static bool IsPow2(size_t size) { return absl::has_single_bit(size); } + static size_t Log2Floor(size_t size) { + return static_cast(absl::bit_width(size) - 1); + } + static size_t Log2Ceil(size_t size) { + return static_cast(absl::bit_width(size - 1)); + } + + // Implementation of `CreateWithCustomLimit()`. + // This implementation allows for future memory allocation hints to + // be passed down into the CordRepFlat allocation function. + template + static CordBuffer CreateWithCustomLimitImpl(size_t block_size, + size_t capacity, + AllocationHints... hints); + + // Consumes the value contained in this instance and resets the instance. + // This method returns a non-null Cordrep* if the current instances manages a + // CordRep*, and resets the instance to an empty SSO instance. If the current + // instance is an SSO instance, then this method returns nullptr and sets + // `short_value` to the inlined data value. In either case, the current + // instance length is reset to zero. + // This method is intended to be used by Cord internal functions only. + cord_internal::CordRep* ConsumeValue(absl::string_view& short_value) { + cord_internal::CordRep* rep = nullptr; + if (rep_.is_short()) { + short_value = absl::string_view(rep_.data(), rep_.short_length()); + } else { + rep = rep_.rep(); + } + rep_.set_short_length(0); + return rep; + } + + // Internal constructor. + explicit CordBuffer(cord_internal::CordRepFlat* rep) : rep_(rep) { + assert(rep != nullptr); + } + + Rep rep_; + + friend class Cord; + friend class CordBufferTestPeer; +}; + +inline constexpr size_t CordBuffer::MaximumPayload() { + return cord_internal::kMaxFlatLength; +} + +inline constexpr size_t CordBuffer::MaximumPayload(size_t block_size) { + // TODO(absl-team): Use std::min when C++11 support is dropped. + return (kCustomLimit < block_size ? kCustomLimit : block_size) - + cord_internal::kFlatOverhead; +} + +inline CordBuffer CordBuffer::CreateWithDefaultLimit(size_t capacity) { + if (capacity > Rep::kInlineCapacity) { + auto* rep = cord_internal::CordRepFlat::New(capacity); + rep->length = 0; + return CordBuffer(rep); + } + return CordBuffer(); +} + +template +inline CordBuffer CordBuffer::CreateWithCustomLimitImpl( + size_t block_size, size_t capacity, AllocationHints... hints) { + assert(IsPow2(block_size)); + capacity = (std::min)(capacity, kCustomLimit); + block_size = (std::min)(block_size, kCustomLimit); + if (capacity + kOverhead >= block_size) { + capacity = block_size; + } else if (capacity <= kDefaultLimit) { + capacity = capacity + kOverhead; + } else if (!IsPow2(capacity)) { + // Check if rounded up to next power 2 is a good enough fit + // with limited waste making it an acceptable direct fit. + const size_t rounded_up = size_t{1} << Log2Ceil(capacity); + const size_t slop = rounded_up - capacity; + if (slop >= kOverhead && slop <= kMaxPageSlop + kOverhead) { + capacity = rounded_up; + } else { + // Round down to highest power of 2 <= capacity. + // Consider a more aggressive step down if that may reduce the + // risk of fragmentation where 'people are holding it wrong'. + const size_t rounded_down = size_t{1} << Log2Floor(capacity); + capacity = rounded_down; + } + } + const size_t length = capacity - kOverhead; + auto* rep = CordRepFlat::New(CordRepFlat::Large(), length, hints...); + rep->length = 0; + return CordBuffer(rep); +} + +inline CordBuffer CordBuffer::CreateWithCustomLimit(size_t block_size, + size_t capacity) { + return CreateWithCustomLimitImpl(block_size, capacity); +} + +inline CordBuffer::~CordBuffer() { + if (!rep_.is_short()) { + cord_internal::CordRepFlat::Delete(rep_.rep()); + } +} + +inline CordBuffer::CordBuffer(CordBuffer&& rhs) noexcept : rep_(rhs.rep_) { + rhs.rep_.set_short_length(0); +} + +inline CordBuffer& CordBuffer::operator=(CordBuffer&& rhs) noexcept { + if (!rep_.is_short()) cord_internal::CordRepFlat::Delete(rep_.rep()); + rep_ = rhs.rep_; + rhs.rep_.set_short_length(0); + return *this; +} + +inline absl::Span CordBuffer::available() { + return rep_.is_short() ? rep_.short_available() : rep_.long_available(); +} + +inline absl::Span CordBuffer::available_up_to(size_t size) { + return available().subspan(0, size); +} + +inline char* CordBuffer::data() { + return rep_.is_short() ? rep_.data() : rep_.rep()->Data(); +} + +inline const char* CordBuffer::data() const { + return rep_.is_short() ? rep_.data() : rep_.rep()->Data(); +} + +inline size_t CordBuffer::capacity() const { + return rep_.is_short() ? Rep::kInlineCapacity : rep_.rep()->Capacity(); +} + +inline size_t CordBuffer::length() const { + return rep_.is_short() ? rep_.short_length() : rep_.rep()->length; +} + +inline void CordBuffer::SetLength(size_t length) { + ABSL_HARDENING_ASSERT(length <= capacity()); + if (rep_.is_short()) { + rep_.set_short_length(length); + } else { + rep_.rep()->length = length; + } +} + +inline void CordBuffer::IncreaseLengthBy(size_t n) { + ABSL_HARDENING_ASSERT(n <= capacity() && length() + n <= capacity()); + if (rep_.is_short()) { + rep_.add_short_length(n); + } else { + rep_.rep()->length += n; + } +} + +ABSL_NAMESPACE_END +} // namespace absl + +#endif // ABSL_STRINGS_CORD_BUFFER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/cord_ring_reader_test.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/cord_ring_reader_test.cc index d9a9a76d1e..8e7183bff0 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/cord_ring_reader_test.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/cord_ring_reader_test.cc @@ -126,7 +126,7 @@ TEST(CordRingReaderTest, SeekForward) { reader.Reset(ring); size_t consumed = 0; - size_t remaining = ring->length;; + size_t remaining = ring->length; for (int i = 0; i < flats.size(); ++i) { CordRepRing::index_type index = ring->advance(head, i); size_t offset = consumed; diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/cord_test.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/cord_test.cc index 9dcc4ce505..a4fa8955d3 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/cord_test.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/cord_test.cc @@ -28,7 +28,6 @@ #include "gmock/gmock.h" #include "gtest/gtest.h" -#include "absl/base/casts.h" #include "absl/base/config.h" #include "absl/base/internal/endian.h" #include "absl/base/internal/raw_logging.h" @@ -56,6 +55,7 @@ using absl::cord_internal::CordRepCrc; using absl::cord_internal::CordRepExternal; using absl::cord_internal::CordRepFlat; using absl::cord_internal::CordRepSubstring; +using absl::cord_internal::CordzUpdateTracker; using absl::cord_internal::kFlatOverhead; using absl::cord_internal::kMaxFlatLength; @@ -212,14 +212,9 @@ class CordTestPeer { ABSL_RAW_CHECK(src.ExpectedChecksum() == absl::nullopt, "Can not be hardened"); Cord cord; - auto* rep = new cord_internal::CordRepSubstring; - rep->tag = cord_internal::SUBSTRING; - rep->child = cord_internal::CordRep::Ref( - cord_internal::SkipCrcNode(src.contents_.tree())); - rep->start = offset; - rep->length = length; - cord.contents_.EmplaceTree(rep, - cord_internal::CordzUpdateTracker::kSubCord); + auto* tree = cord_internal::SkipCrcNode(src.contents_.tree()); + auto* rep = CordRepSubstring::Create(CordRep::Ref(tree), offset, length); + cord.contents_.EmplaceTree(rep, CordzUpdateTracker::kSubCord); return cord; } }; @@ -601,6 +596,315 @@ TEST_P(CordTest, CopyToString) { "copying ", "to ", "a ", "string."}))); } +TEST_P(CordTest, AppendEmptyBuffer) { + absl::Cord cord; + cord.Append(absl::CordBuffer()); + cord.Append(absl::CordBuffer::CreateWithDefaultLimit(2000)); +} + +TEST_P(CordTest, AppendEmptyBufferToFlat) { + absl::Cord cord(std::string(2000, 'x')); + cord.Append(absl::CordBuffer()); + cord.Append(absl::CordBuffer::CreateWithDefaultLimit(2000)); +} + +TEST_P(CordTest, AppendEmptyBufferToTree) { + absl::Cord cord(std::string(2000, 'x')); + cord.Append(std::string(2000, 'y')); + cord.Append(absl::CordBuffer()); + cord.Append(absl::CordBuffer::CreateWithDefaultLimit(2000)); +} + +TEST_P(CordTest, AppendSmallBuffer) { + absl::Cord cord; + absl::CordBuffer buffer = absl::CordBuffer::CreateWithDefaultLimit(3); + ASSERT_THAT(buffer.capacity(), ::testing::Le(15)); + memcpy(buffer.data(), "Abc", 3); + buffer.SetLength(3); + cord.Append(std::move(buffer)); + EXPECT_EQ(buffer.length(), 0); // NOLINT + EXPECT_GT(buffer.capacity(), 0); // NOLINT + + buffer = absl::CordBuffer::CreateWithDefaultLimit(3); + memcpy(buffer.data(), "defgh", 5); + buffer.SetLength(5); + cord.Append(std::move(buffer)); + EXPECT_EQ(buffer.length(), 0); // NOLINT + EXPECT_GT(buffer.capacity(), 0); // NOLINT + + EXPECT_THAT(cord.Chunks(), ::testing::ElementsAre("Abcdefgh")); +} + +TEST_P(CordTest, AppendAndPrependBufferArePrecise) { + // Create a cord large enough to force 40KB flats. + std::string test_data(absl::cord_internal::kMaxFlatLength * 10, 'x'); + absl::Cord cord1(test_data); + absl::Cord cord2(test_data); + const size_t size1 = cord1.EstimatedMemoryUsage(); + const size_t size2 = cord2.EstimatedMemoryUsage(); + + absl::CordBuffer buffer = absl::CordBuffer::CreateWithDefaultLimit(3); + memcpy(buffer.data(), "Abc", 3); + buffer.SetLength(3); + cord1.Append(std::move(buffer)); + + buffer = absl::CordBuffer::CreateWithDefaultLimit(3); + memcpy(buffer.data(), "Abc", 3); + buffer.SetLength(3); + cord2.Prepend(std::move(buffer)); + +#ifndef NDEBUG + // Allow 32 bytes new CordRepFlat, and 128 bytes for 'glue nodes' + constexpr size_t kMaxDelta = 128 + 32; +#else + // Allow 256 bytes extra for 'allocation debug overhead' + constexpr size_t kMaxDelta = 128 + 32 + 256; +#endif + + EXPECT_LE(cord1.EstimatedMemoryUsage() - size1, kMaxDelta); + EXPECT_LE(cord2.EstimatedMemoryUsage() - size2, kMaxDelta); + + EXPECT_EQ(cord1, absl::StrCat(test_data, "Abc")); + EXPECT_EQ(cord2, absl::StrCat("Abc", test_data)); +} + +TEST_P(CordTest, PrependSmallBuffer) { + absl::Cord cord; + absl::CordBuffer buffer = absl::CordBuffer::CreateWithDefaultLimit(3); + ASSERT_THAT(buffer.capacity(), ::testing::Le(15)); + memcpy(buffer.data(), "Abc", 3); + buffer.SetLength(3); + cord.Prepend(std::move(buffer)); + EXPECT_EQ(buffer.length(), 0); // NOLINT + EXPECT_GT(buffer.capacity(), 0); // NOLINT + + buffer = absl::CordBuffer::CreateWithDefaultLimit(3); + memcpy(buffer.data(), "defgh", 5); + buffer.SetLength(5); + cord.Prepend(std::move(buffer)); + EXPECT_EQ(buffer.length(), 0); // NOLINT + EXPECT_GT(buffer.capacity(), 0); // NOLINT + + EXPECT_THAT(cord.Chunks(), ::testing::ElementsAre("defghAbc")); +} + +TEST_P(CordTest, AppendLargeBuffer) { + absl::Cord cord; + + std::string s1(700, '1'); + absl::CordBuffer buffer = absl::CordBuffer::CreateWithDefaultLimit(s1.size()); + memcpy(buffer.data(), s1.data(), s1.size()); + buffer.SetLength(s1.size()); + cord.Append(std::move(buffer)); + EXPECT_EQ(buffer.length(), 0); // NOLINT + EXPECT_GT(buffer.capacity(), 0); // NOLINT + + std::string s2(1000, '2'); + buffer = absl::CordBuffer::CreateWithDefaultLimit(s2.size()); + memcpy(buffer.data(), s2.data(), s2.size()); + buffer.SetLength(s2.size()); + cord.Append(std::move(buffer)); + EXPECT_EQ(buffer.length(), 0); // NOLINT + EXPECT_GT(buffer.capacity(), 0); // NOLINT + + EXPECT_THAT(cord.Chunks(), ::testing::ElementsAre(s1, s2)); +} + +TEST_P(CordTest, PrependLargeBuffer) { + absl::Cord cord; + + std::string s1(700, '1'); + absl::CordBuffer buffer = absl::CordBuffer::CreateWithDefaultLimit(s1.size()); + memcpy(buffer.data(), s1.data(), s1.size()); + buffer.SetLength(s1.size()); + cord.Prepend(std::move(buffer)); + EXPECT_EQ(buffer.length(), 0); // NOLINT + EXPECT_GT(buffer.capacity(), 0); // NOLINT + + std::string s2(1000, '2'); + buffer = absl::CordBuffer::CreateWithDefaultLimit(s2.size()); + memcpy(buffer.data(), s2.data(), s2.size()); + buffer.SetLength(s2.size()); + cord.Prepend(std::move(buffer)); + EXPECT_EQ(buffer.length(), 0); // NOLINT + EXPECT_GT(buffer.capacity(), 0); // NOLINT + + EXPECT_THAT(cord.Chunks(), ::testing::ElementsAre(s2, s1)); +} + +class CordAppendBufferTest : public testing::TestWithParam { + public: + size_t is_default() const { return GetParam(); } + + // Returns human readable string representation of the test parameter. + static std::string ToString(testing::TestParamInfo param) { + return param.param ? "DefaultLimit" : "CustomLimit"; + } + + size_t limit() const { + return is_default() ? absl::CordBuffer::kDefaultLimit + : absl::CordBuffer::kCustomLimit; + } + + size_t maximum_payload() const { + return is_default() ? absl::CordBuffer::MaximumPayload() + : absl::CordBuffer::MaximumPayload(limit()); + } + + absl::CordBuffer GetAppendBuffer(absl::Cord& cord, size_t capacity, + size_t min_capacity = 16) { + return is_default() + ? cord.GetAppendBuffer(capacity, min_capacity) + : cord.GetCustomAppendBuffer(limit(), capacity, min_capacity); + } +}; + +INSTANTIATE_TEST_SUITE_P(WithParam, CordAppendBufferTest, testing::Bool(), + CordAppendBufferTest::ToString); + +TEST_P(CordAppendBufferTest, GetAppendBufferOnEmptyCord) { + absl::Cord cord; + absl::CordBuffer buffer = GetAppendBuffer(cord, 1000); + EXPECT_GE(buffer.capacity(), 1000); + EXPECT_EQ(buffer.length(), 0); +} + +TEST_P(CordAppendBufferTest, GetAppendBufferOnInlinedCord) { + static constexpr int kInlinedSize = sizeof(absl::CordBuffer) - 1; + for (int size : {6, kInlinedSize - 3, kInlinedSize - 2, 1000}) { + absl::Cord cord("Abc"); + absl::CordBuffer buffer = GetAppendBuffer(cord, size, 1); + EXPECT_GE(buffer.capacity(), 3 + size); + EXPECT_EQ(buffer.length(), 3); + EXPECT_EQ(absl::string_view(buffer.data(), buffer.length()), "Abc"); + EXPECT_TRUE(cord.empty()); + } +} + +TEST_P(CordAppendBufferTest, GetAppendBufferOnInlinedCordCapacityCloseToMax) { + // Cover the use case where we have a non empty inlined cord with some size + // 'n', and ask for something like 'uint64_max - k', assuming internal logic + // could overflow on 'uint64_max - k + size', and return a valid, but + // inefficiently smaller buffer if it would provide is the max allowed size. + for (size_t dist_from_max = 0; dist_from_max <= 4; ++dist_from_max) { + absl::Cord cord("Abc"); + size_t size = std::numeric_limits::max() - dist_from_max; + absl::CordBuffer buffer = GetAppendBuffer(cord, size, 1); + EXPECT_GE(buffer.capacity(), maximum_payload()); + EXPECT_EQ(buffer.length(), 3); + EXPECT_EQ(absl::string_view(buffer.data(), buffer.length()), "Abc"); + EXPECT_TRUE(cord.empty()); + } +} + +TEST_P(CordAppendBufferTest, GetAppendBufferOnFlat) { + // Create a cord with a single flat and extra capacity + absl::Cord cord; + absl::CordBuffer buffer = absl::CordBuffer::CreateWithDefaultLimit(500); + const size_t expected_capacity = buffer.capacity(); + buffer.SetLength(3); + memcpy(buffer.data(), "Abc", 3); + cord.Append(std::move(buffer)); + + buffer = GetAppendBuffer(cord, 6); + EXPECT_EQ(buffer.capacity(), expected_capacity); + EXPECT_EQ(buffer.length(), 3); + EXPECT_EQ(absl::string_view(buffer.data(), buffer.length()), "Abc"); + EXPECT_TRUE(cord.empty()); +} + +TEST_P(CordAppendBufferTest, GetAppendBufferOnFlatWithoutMinCapacity) { + // Create a cord with a single flat and extra capacity + absl::Cord cord; + absl::CordBuffer buffer = absl::CordBuffer::CreateWithDefaultLimit(500); + buffer.SetLength(30); + memset(buffer.data(), 'x', 30); + cord.Append(std::move(buffer)); + + buffer = GetAppendBuffer(cord, 1000, 900); + EXPECT_GE(buffer.capacity(), 1000); + EXPECT_EQ(buffer.length(), 0); + EXPECT_EQ(cord, std::string(30, 'x')); +} + +TEST_P(CordAppendBufferTest, GetAppendBufferOnTree) { + RandomEngine rng; + for (int num_flats : {2, 3, 100}) { + // Create a cord with `num_flats` flats and extra capacity + absl::Cord cord; + std::string prefix; + std::string last; + for (int i = 0; i < num_flats - 1; ++i) { + prefix += last; + last = RandomLowercaseString(&rng, 10); + absl::CordBuffer buffer = absl::CordBuffer::CreateWithDefaultLimit(500); + buffer.SetLength(10); + memcpy(buffer.data(), last.data(), 10); + cord.Append(std::move(buffer)); + } + absl::CordBuffer buffer = GetAppendBuffer(cord, 6); + EXPECT_GE(buffer.capacity(), 500); + EXPECT_EQ(buffer.length(), 10); + EXPECT_EQ(absl::string_view(buffer.data(), buffer.length()), last); + EXPECT_EQ(cord, prefix); + } +} + +TEST_P(CordAppendBufferTest, GetAppendBufferOnTreeWithoutMinCapacity) { + absl::Cord cord; + for (int i = 0; i < 2; ++i) { + absl::CordBuffer buffer = absl::CordBuffer::CreateWithDefaultLimit(500); + buffer.SetLength(3); + memcpy(buffer.data(), i ? "def" : "Abc", 3); + cord.Append(std::move(buffer)); + } + absl::CordBuffer buffer = GetAppendBuffer(cord, 1000, 900); + EXPECT_GE(buffer.capacity(), 1000); + EXPECT_EQ(buffer.length(), 0); + EXPECT_EQ(cord, "Abcdef"); +} + +TEST_P(CordAppendBufferTest, GetAppendBufferOnSubstring) { + // Create a large cord with a single flat and some extra capacity + absl::Cord cord; + absl::CordBuffer buffer = absl::CordBuffer::CreateWithDefaultLimit(500); + buffer.SetLength(450); + memset(buffer.data(), 'x', 450); + cord.Append(std::move(buffer)); + cord.RemovePrefix(1); + + // Deny on substring + buffer = GetAppendBuffer(cord, 6); + EXPECT_EQ(buffer.length(), 0); + EXPECT_EQ(cord, std::string(449, 'x')); +} + +TEST_P(CordAppendBufferTest, GetAppendBufferOnSharedCord) { + // Create a shared cord with a single flat and extra capacity + absl::Cord cord; + absl::CordBuffer buffer = absl::CordBuffer::CreateWithDefaultLimit(500); + buffer.SetLength(3); + memcpy(buffer.data(), "Abc", 3); + cord.Append(std::move(buffer)); + absl::Cord shared_cord = cord; + + // Deny on flat + buffer = GetAppendBuffer(cord, 6); + EXPECT_EQ(buffer.length(), 0); + EXPECT_EQ(cord, "Abc"); + + buffer = absl::CordBuffer::CreateWithDefaultLimit(500); + buffer.SetLength(3); + memcpy(buffer.data(), "def", 3); + cord.Append(std::move(buffer)); + shared_cord = cord; + + // Deny on tree + buffer = GetAppendBuffer(cord, 6); + EXPECT_EQ(buffer.length(), 0); + EXPECT_EQ(cord, "Abcdef"); +} + TEST_P(CordTest, TryFlatEmpty) { absl::Cord c; EXPECT_EQ(c.TryFlat(), ""); @@ -645,15 +949,6 @@ TEST_P(CordTest, TryFlatSubstrExternal) { EXPECT_EQ(sub.TryFlat(), "ell"); } -TEST_P(CordTest, TryFlatSubstrConcat) { - absl::Cord c = absl::MakeFragmentedCord({"hello", " world"}); - absl::Cord sub = absl::CordTestPeer::MakeSubstring(c, 1, c.size() - 1); - MaybeHarden(sub); - EXPECT_EQ(sub.TryFlat(), absl::nullopt); - c.RemovePrefix(1); - EXPECT_EQ(c.TryFlat(), absl::nullopt); -} - TEST_P(CordTest, TryFlatCommonlyAssumedInvariants) { // The behavior tested below is not part of the API contract of Cord, but it's // something we intend to be true in our current implementation. This test @@ -1693,6 +1988,12 @@ TEST_P(CordTest, HugeCord) { // Tests that Append() works ok when handed a self reference TEST_P(CordTest, AppendSelf) { + // Test the empty case. + absl::Cord empty; + MaybeHarden(empty); + empty.Append(empty); + ASSERT_EQ(empty, ""); + // We run the test until data is ~16K // This guarantees it covers small, medium and large data. std::string control_data = "Abc"; @@ -2254,12 +2555,34 @@ class AfterExitCordTester { absl::string_view expected_; }; +// Deliberately prevents the destructor for an absl::Cord from running. The cord +// is accessible via the cord member during the lifetime of the CordLeaker. +// After the CordLeaker is destroyed, pointers to the cord will remain valid +// until the CordLeaker's memory is deallocated. +struct CordLeaker { + union { + absl::Cord cord; + }; + + template + constexpr explicit CordLeaker(const Str& str) : cord(str) {} + + ~CordLeaker() { + // Don't do anything, including running cord's destructor. (cord's + // destructor won't run automatically because cord is hidden inside a + // union.) + } +}; + template void TestConstinitConstructor(Str) { const auto expected = Str::value; // Defined before `cord` to be destroyed after it. static AfterExitCordTester exit_tester; // NOLINT - ABSL_CONST_INIT static absl::Cord cord(Str{}); // NOLINT + ABSL_CONST_INIT static CordLeaker cord_leaker(Str{}); // NOLINT + // cord_leaker is static, so this reference will remain valid through the end + // of program execution. + static absl::Cord& cord = cord_leaker.cord; static bool init_exit_tester = exit_tester.Set(&cord, expected); (void)init_exit_tester; @@ -2395,7 +2718,7 @@ class CordMutator { // clang-format off // This array is constant-initialized in conformant compilers. -CordMutator cord_mutators[] ={ +CordMutator cord_mutators[] = { {"clear", [](absl::Cord& c) { c.Clear(); }}, {"overwrite", [](absl::Cord& c) { c = "overwritten"; }}, { @@ -2424,6 +2747,25 @@ CordMutator cord_mutators[] ={ [](absl::Cord& c) { c.Append(c); }, [](absl::Cord& c) { c.RemoveSuffix(c.size() / 2); } }, + { + "append empty string", + [](absl::Cord& c) { c.Append(""); }, + [](absl::Cord& c) { } + }, + { + "append empty cord", + [](absl::Cord& c) { c.Append(absl::Cord()); }, + [](absl::Cord& c) { } + }, + { + "append empty checksummed cord", + [](absl::Cord& c) { + absl::Cord to_append; + to_append.SetExpectedChecksum(999); + c.Append(to_append); + }, + [](absl::Cord& c) { } + }, { "prepend string", [](absl::Cord& c) { c.Prepend("9876543210"); }, @@ -2445,13 +2787,34 @@ CordMutator cord_mutators[] ={ }, [](absl::Cord& c) { c.RemovePrefix(10); } }, + { + "prepend empty string", + [](absl::Cord& c) { c.Prepend(""); }, + [](absl::Cord& c) { } + }, + { + "prepend empty cord", + [](absl::Cord& c) { c.Prepend(absl::Cord()); }, + [](absl::Cord& c) { } + }, + { + "prepend empty checksummed cord", + [](absl::Cord& c) { + absl::Cord to_prepend; + to_prepend.SetExpectedChecksum(999); + c.Prepend(to_prepend); + }, + [](absl::Cord& c) { } + }, { "prepend self", [](absl::Cord& c) { c.Prepend(c); }, [](absl::Cord& c) { c.RemovePrefix(c.size() / 2); } }, - {"remove prefix", [](absl::Cord& c) { c.RemovePrefix(2); }}, - {"remove suffix", [](absl::Cord& c) { c.RemoveSuffix(2); }}, + {"remove prefix", [](absl::Cord& c) { c.RemovePrefix(c.size() / 2); }}, + {"remove suffix", [](absl::Cord& c) { c.RemoveSuffix(c.size() / 2); }}, + {"remove 0-prefix", [](absl::Cord& c) { c.RemovePrefix(0); }}, + {"remove 0-suffix", [](absl::Cord& c) { c.RemoveSuffix(0); }}, {"subcord", [](absl::Cord& c) { c = c.Subcord(1, c.size() - 2); }}, { "swap inline", @@ -2493,6 +2856,12 @@ TEST_P(CordTest, ExpectedChecksum) { EXPECT_EQ(c1.ExpectedChecksum().value_or(0), 12345); EXPECT_EQ(c1, base_value); + // Test that setting an expected checksum again doesn't crash or leak + // memory. + c1.SetExpectedChecksum(12345); + EXPECT_EQ(c1.ExpectedChecksum().value_or(0), 12345); + EXPECT_EQ(c1, base_value); + // CRC persists through copies, assignments, and moves: absl::Cord c1_copy_construct = c1; EXPECT_EQ(c1_copy_construct.ExpectedChecksum().value_or(0), 12345); @@ -2517,6 +2886,13 @@ TEST_P(CordTest, ExpectedChecksum) { c2.SetExpectedChecksum(24680); mutator.Mutate(c2); + + if (c1 == c2) { + // Not a mutation (for example, appending the empty string). + // Whether the checksum is removed is not defined. + continue; + } + EXPECT_EQ(c2.ExpectedChecksum(), absl::nullopt); if (mutator.CanUndo()) { @@ -2586,3 +2962,98 @@ TEST_P(CordTest, ExpectedChecksum) { } } } + +// Test the special cases encountered with an empty checksummed cord. +TEST_P(CordTest, ChecksummedEmptyCord) { + absl::Cord c1; + EXPECT_FALSE(c1.ExpectedChecksum().has_value()); + + // Setting an expected checksum works. + c1.SetExpectedChecksum(12345); + EXPECT_EQ(c1.ExpectedChecksum().value_or(0), 12345); + EXPECT_EQ(c1, ""); + EXPECT_TRUE(c1.empty()); + + // Test that setting an expected checksum again doesn't crash or leak memory. + c1.SetExpectedChecksum(12345); + EXPECT_EQ(c1.ExpectedChecksum().value_or(0), 12345); + EXPECT_EQ(c1, ""); + EXPECT_TRUE(c1.empty()); + + // CRC persists through copies, assignments, and moves: + absl::Cord c1_copy_construct = c1; + EXPECT_EQ(c1_copy_construct.ExpectedChecksum().value_or(0), 12345); + + absl::Cord c1_copy_assign; + c1_copy_assign = c1; + EXPECT_EQ(c1_copy_assign.ExpectedChecksum().value_or(0), 12345); + + absl::Cord c1_move(std::move(c1_copy_assign)); + EXPECT_EQ(c1_move.ExpectedChecksum().value_or(0), 12345); + + EXPECT_EQ(c1.ExpectedChecksum().value_or(0), 12345); + + // A CRC Cord compares equal to its non-CRC value. + EXPECT_EQ(c1, absl::Cord()); + + for (const CordMutator& mutator : cord_mutators) { + SCOPED_TRACE(mutator.Name()); + + // Exercise mutating an empty checksummed cord to catch crashes and exercise + // memory sanitizers. + absl::Cord c2; + c2.SetExpectedChecksum(24680); + mutator.Mutate(c2); + + if (c2.empty()) { + // Not a mutation + continue; + } + EXPECT_EQ(c2.ExpectedChecksum(), absl::nullopt); + + if (mutator.CanUndo()) { + mutator.Undo(c2); + } + } + + absl::Cord c3; + c3.SetExpectedChecksum(999); + const absl::Cord& cc3 = c3; + + // Test that all cord reading operations function in the face of an + // expected checksum. + EXPECT_TRUE(cc3.StartsWith("")); + EXPECT_TRUE(cc3.EndsWith("")); + EXPECT_TRUE(cc3.empty()); + EXPECT_EQ(cc3, ""); + EXPECT_EQ(cc3, absl::Cord()); + EXPECT_EQ(cc3.size(), 0); + EXPECT_EQ(cc3.Compare(absl::Cord()), 0); + EXPECT_EQ(cc3.Compare(c1), 0); + EXPECT_EQ(cc3.Compare(cc3), 0); + EXPECT_EQ(cc3.Compare(""), 0); + EXPECT_EQ(cc3.Compare("wxyz"), -1); + EXPECT_EQ(cc3.Compare(absl::Cord("wxyz")), -1); + EXPECT_EQ(absl::Cord("wxyz").Compare(cc3), 1); + EXPECT_EQ(std::string(cc3), ""); + + std::string dest; + absl::CopyCordToString(cc3, &dest); + EXPECT_EQ(dest, ""); + + for (absl::string_view chunk : cc3.Chunks()) { // NOLINT(unreachable loop) + static_cast(chunk); + GTEST_FAIL() << "no chunks expected"; + } + EXPECT_TRUE(cc3.chunk_begin() == cc3.chunk_end()); + + for (char ch : cc3.Chars()) { // NOLINT(unreachable loop) + static_cast(ch); + GTEST_FAIL() << "no chars expected"; + } + EXPECT_TRUE(cc3.char_begin() == cc3.char_end()); + + EXPECT_EQ(cc3.TryFlat(), ""); + EXPECT_EQ(absl::HashOf(c3), absl::HashOf(absl::Cord())); + EXPECT_EQ(absl::HashOf(c3), absl::HashOf(absl::string_view())); +} diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/escaping.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/escaping.cc index 18b20b83fd..7d97944eba 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/escaping.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/escaping.cc @@ -42,11 +42,11 @@ constexpr bool kUnescapeNulls = false; inline bool is_octal_digit(char c) { return ('0' <= c) && (c <= '7'); } -inline int hex_digit_to_int(char c) { +inline unsigned int hex_digit_to_int(char c) { static_assert('0' == 0x30 && 'A' == 0x41 && 'a' == 0x61, "Character set must be ASCII."); - assert(absl::ascii_isxdigit(c)); - int x = static_cast(c); + assert(absl::ascii_isxdigit(static_cast(c))); + unsigned int x = static_cast(c); if (x > '9') { x += 9; } @@ -121,27 +121,29 @@ bool CUnescapeInternal(absl::string_view source, bool leave_nulls_escaped, case '7': { // octal digit: 1 to 3 digits const char* octal_start = p; - unsigned int ch = *p - '0'; - if (p < last_byte && is_octal_digit(p[1])) ch = ch * 8 + *++p - '0'; + unsigned int ch = static_cast(*p - '0'); // digit 1 if (p < last_byte && is_octal_digit(p[1])) - ch = ch * 8 + *++p - '0'; // now points at last digit + ch = ch * 8 + static_cast(*++p - '0'); // digit 2 + if (p < last_byte && is_octal_digit(p[1])) + ch = ch * 8 + static_cast(*++p - '0'); // digit 3 if (ch > 0xff) { if (error) { *error = "Value of \\" + - std::string(octal_start, p + 1 - octal_start) + + std::string(octal_start, + static_cast(p + 1 - octal_start)) + " exceeds 0xff"; } return false; } if ((ch == 0) && leave_nulls_escaped) { // Copy the escape sequence for the null character - const ptrdiff_t octal_size = p + 1 - octal_start; + const size_t octal_size = static_cast(p + 1 - octal_start); *d++ = '\\'; memmove(d, octal_start, octal_size); d += octal_size; break; } - *d++ = ch; + *d++ = static_cast(ch); break; } case 'x': @@ -149,32 +151,34 @@ bool CUnescapeInternal(absl::string_view source, bool leave_nulls_escaped, if (p >= last_byte) { if (error) *error = "String cannot end with \\x"; return false; - } else if (!absl::ascii_isxdigit(p[1])) { + } else if (!absl::ascii_isxdigit(static_cast(p[1]))) { if (error) *error = "\\x cannot be followed by a non-hex digit"; return false; } unsigned int ch = 0; const char* hex_start = p; - while (p < last_byte && absl::ascii_isxdigit(p[1])) + while (p < last_byte && + absl::ascii_isxdigit(static_cast(p[1]))) // Arbitrarily many hex digits ch = (ch << 4) + hex_digit_to_int(*++p); if (ch > 0xFF) { if (error) { *error = "Value of \\" + - std::string(hex_start, p + 1 - hex_start) + + std::string(hex_start, + static_cast(p + 1 - hex_start)) + " exceeds 0xff"; } return false; } if ((ch == 0) && leave_nulls_escaped) { // Copy the escape sequence for the null character - const ptrdiff_t hex_size = p + 1 - hex_start; + const size_t hex_size = static_cast(p + 1 - hex_start); *d++ = '\\'; memmove(d, hex_start, hex_size); d += hex_size; break; } - *d++ = ch; + *d++ = static_cast(ch); break; } case 'u': { @@ -184,18 +188,20 @@ bool CUnescapeInternal(absl::string_view source, bool leave_nulls_escaped, if (p + 4 >= end) { if (error) { *error = "\\u must be followed by 4 hex digits: \\" + - std::string(hex_start, p + 1 - hex_start); + std::string(hex_start, + static_cast(p + 1 - hex_start)); } return false; } for (int i = 0; i < 4; ++i) { // Look one char ahead. - if (absl::ascii_isxdigit(p[1])) { + if (absl::ascii_isxdigit(static_cast(p[1]))) { rune = (rune << 4) + hex_digit_to_int(*++p); // Advance p. } else { if (error) { *error = "\\u must be followed by 4 hex digits: \\" + - std::string(hex_start, p + 1 - hex_start); + std::string(hex_start, + static_cast(p + 1 - hex_start)); } return false; } @@ -220,20 +226,22 @@ bool CUnescapeInternal(absl::string_view source, bool leave_nulls_escaped, if (p + 8 >= end) { if (error) { *error = "\\U must be followed by 8 hex digits: \\" + - std::string(hex_start, p + 1 - hex_start); + std::string(hex_start, + static_cast(p + 1 - hex_start)); } return false; } for (int i = 0; i < 8; ++i) { // Look one char ahead. - if (absl::ascii_isxdigit(p[1])) { + if (absl::ascii_isxdigit(static_cast(p[1]))) { // Don't change rune until we're sure this // is within the Unicode limit, but do advance p. uint32_t newrune = (rune << 4) + hex_digit_to_int(*++p); if (newrune > 0x10FFFF) { if (error) { *error = "Value of \\" + - std::string(hex_start, p + 1 - hex_start) + + std::string(hex_start, + static_cast(p + 1 - hex_start)) + " exceeds Unicode limit (0x10FFFF)"; } return false; @@ -243,7 +251,8 @@ bool CUnescapeInternal(absl::string_view source, bool leave_nulls_escaped, } else { if (error) { *error = "\\U must be followed by 8 hex digits: \\" + - std::string(hex_start, p + 1 - hex_start); + std::string(hex_start, + static_cast(p + 1 - hex_start)); } return false; } @@ -291,7 +300,7 @@ bool CUnescapeInternal(absl::string_view source, bool leave_nulls_escaped, error)) { return false; } - dest->erase(dest_size); + dest->erase(static_cast(dest_size)); return true; } @@ -311,7 +320,7 @@ std::string CEscapeInternal(absl::string_view src, bool use_hex, std::string dest; bool last_hex_escape = false; // true if last output char was \xNN. - for (unsigned char c : src) { + for (char c : src) { bool is_hex_escape = false; switch (c) { case '\n': dest.append("\\" "n"); break; @@ -320,28 +329,30 @@ std::string CEscapeInternal(absl::string_view src, bool use_hex, case '\"': dest.append("\\" "\""); break; case '\'': dest.append("\\" "'"); break; case '\\': dest.append("\\" "\\"); break; - default: + default: { // Note that if we emit \xNN and the src character after that is a hex // digit then that digit must be escaped too to prevent it being // interpreted as part of the character code by C. - if ((!utf8_safe || c < 0x80) && - (!absl::ascii_isprint(c) || - (last_hex_escape && absl::ascii_isxdigit(c)))) { + const unsigned char uc = static_cast(c); + if ((!utf8_safe || uc < 0x80) && + (!absl::ascii_isprint(uc) || + (last_hex_escape && absl::ascii_isxdigit(uc)))) { if (use_hex) { dest.append("\\" "x"); - dest.push_back(numbers_internal::kHexChar[c / 16]); - dest.push_back(numbers_internal::kHexChar[c % 16]); + dest.push_back(numbers_internal::kHexChar[uc / 16]); + dest.push_back(numbers_internal::kHexChar[uc % 16]); is_hex_escape = true; } else { dest.append("\\"); - dest.push_back(numbers_internal::kHexChar[c / 64]); - dest.push_back(numbers_internal::kHexChar[(c % 64) / 8]); - dest.push_back(numbers_internal::kHexChar[c % 8]); + dest.push_back(numbers_internal::kHexChar[uc / 64]); + dest.push_back(numbers_internal::kHexChar[(uc % 64) / 8]); + dest.push_back(numbers_internal::kHexChar[uc % 8]); } } else { dest.push_back(c); break; } + } } last_hex_escape = is_hex_escape; } @@ -350,7 +361,7 @@ std::string CEscapeInternal(absl::string_view src, bool use_hex, } /* clang-format off */ -constexpr char c_escaped_len[256] = { +constexpr unsigned char c_escaped_len[256] = { 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 4, 4, 2, 4, 4, // \t, \n, \r 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 1, 1, 2, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, // ", ' @@ -375,7 +386,8 @@ constexpr char c_escaped_len[256] = { // that UTF-8 bytes are not handled specially. inline size_t CEscapedLength(absl::string_view src) { size_t escaped_len = 0; - for (unsigned char c : src) escaped_len += c_escaped_len[c]; + for (char c : src) + escaped_len += c_escaped_len[static_cast(c)]; return escaped_len; } @@ -391,8 +403,8 @@ void CEscapeAndAppendInternal(absl::string_view src, std::string* dest) { cur_dest_len + escaped_len); char* append_ptr = &(*dest)[cur_dest_len]; - for (unsigned char c : src) { - int char_len = c_escaped_len[c]; + for (char c : src) { + size_t char_len = c_escaped_len[static_cast(c)]; if (char_len == 1) { *append_ptr++ = c; } else if (char_len == 2) { @@ -424,9 +436,9 @@ void CEscapeAndAppendInternal(absl::string_view src, std::string* dest) { } } else { *append_ptr++ = '\\'; - *append_ptr++ = '0' + c / 64; - *append_ptr++ = '0' + (c % 64) / 8; - *append_ptr++ = '0' + c % 8; + *append_ptr++ = '0' + static_cast(c) / 64; + *append_ptr++ = '0' + (static_cast(c) % 64) / 8; + *append_ptr++ = '0' + static_cast(c) % 8; } } } @@ -440,7 +452,7 @@ bool Base64UnescapeInternal(const char* src_param, size_t szsrc, char* dest, size_t destidx = 0; int decode = 0; int state = 0; - unsigned int ch = 0; + unsigned char ch = 0; unsigned int temp = 0; // If "char" is signed by default, using *src as an array index results in @@ -500,13 +512,13 @@ bool Base64UnescapeInternal(const char* src_param, size_t szsrc, char* dest, // how to handle those cases. GET_INPUT(first, 4); - temp = decode; + temp = static_cast(decode); GET_INPUT(second, 3); - temp = (temp << 6) | decode; + temp = (temp << 6) | static_cast(decode); GET_INPUT(third, 2); - temp = (temp << 6) | decode; + temp = (temp << 6) | static_cast(decode); GET_INPUT(fourth, 1); - temp = (temp << 6) | decode; + temp = (temp << 6) | static_cast(decode); } else { // We really did have four good data bytes, so advance four // characters in the string. @@ -518,11 +530,11 @@ bool Base64UnescapeInternal(const char* src_param, size_t szsrc, char* dest, // temp has 24 bits of input, so write that out as three bytes. if (destidx + 3 > szdest) return false; - dest[destidx + 2] = temp; + dest[destidx + 2] = static_cast(temp); temp >>= 8; - dest[destidx + 1] = temp; + dest[destidx + 1] = static_cast(temp); temp >>= 8; - dest[destidx] = temp; + dest[destidx] = static_cast(temp); destidx += 3; } } else { @@ -583,18 +595,18 @@ bool Base64UnescapeInternal(const char* src_param, size_t szsrc, char* dest, } // Each input character gives us six bits of output. - temp = (temp << 6) | decode; + temp = (temp << 6) | static_cast(decode); ++state; if (state == 4) { // If we've accumulated 24 bits of output, write that out as // three bytes. if (dest) { if (destidx + 3 > szdest) return false; - dest[destidx + 2] = temp; + dest[destidx + 2] = static_cast(temp); temp >>= 8; - dest[destidx + 1] = temp; + dest[destidx + 1] = static_cast(temp); temp >>= 8; - dest[destidx] = temp; + dest[destidx] = static_cast(temp); } destidx += 3; state = 0; @@ -619,7 +631,7 @@ bool Base64UnescapeInternal(const char* src_param, size_t szsrc, char* dest, if (dest) { if (destidx + 1 > szdest) return false; temp >>= 4; - dest[destidx] = temp; + dest[destidx] = static_cast(temp); } ++destidx; expected_equals = 2; @@ -630,9 +642,9 @@ bool Base64UnescapeInternal(const char* src_param, size_t szsrc, char* dest, if (dest) { if (destidx + 2 > szdest) return false; temp >>= 2; - dest[destidx + 1] = temp; + dest[destidx + 1] = static_cast(temp); temp >>= 8; - dest[destidx] = temp; + dest[destidx] = static_cast(temp); } destidx += 2; expected_equals = 1; @@ -773,7 +785,8 @@ bool Base64UnescapeInternal(const char* src, size_t slen, String* dest, const signed char* unbase64) { // Determine the size of the output string. Base64 encodes every 3 bytes into // 4 characters. any leftover chars are added directly for good measure. - // This is documented in the base64 RFC: http://tools.ietf.org/html/rfc3548 + // This is documented in the base64 RFC: + // https://datatracker.ietf.org/doc/html/rfc3548 const size_t dest_len = 3 * (slen / 4) + (slen % 4); strings_internal::STLStringResizeUninitialized(dest, dest_len); @@ -821,9 +834,9 @@ constexpr char kHexValueLenient[256] = { // or a string. This works because we use the [] operator to access // individual characters at a time. template -void HexStringToBytesInternal(const char* from, T to, ptrdiff_t num) { - for (int i = 0; i < num; i++) { - to[i] = (kHexValueLenient[from[i * 2] & 0xFF] << 4) + +void HexStringToBytesInternal(const char* from, T to, size_t num) { + for (size_t i = 0; i < num; i++) { + to[i] = static_cast(kHexValueLenient[from[i * 2] & 0xFF] << 4) + (kHexValueLenient[from[i * 2 + 1] & 0xFF]); } } @@ -831,7 +844,7 @@ void HexStringToBytesInternal(const char* from, T to, ptrdiff_t num) { // This is a templated function so that T can be either a char* or a // std::string. template -void BytesToHexStringInternal(const unsigned char* src, T dest, ptrdiff_t num) { +void BytesToHexStringInternal(const unsigned char* src, T dest, size_t num) { auto dest_ptr = &dest[0]; for (auto src_ptr = src; src_ptr != (src + num); ++src_ptr, dest_ptr += 2) { const char* hex_p = &numbers_internal::kHexTable[*src_ptr * 2]; @@ -876,8 +889,8 @@ std::string Utf8SafeCHexEscape(absl::string_view src) { // WebSafeBase64Escape() - Google's variation of base64 encoder // // Check out -// http://tools.ietf.org/html/rfc2045 for formal description, but what we -// care about is that... +// https://datatracker.ietf.org/doc/html/rfc2045 for formal description, but +// what we care about is that... // Take the encoded stuff in groups of 4 characters and turn each // character into a code 0 to 63 thus: // A-Z map to 0 to 25 diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/escaping.h b/TMessagesProj/jni/voip/webrtc/absl/strings/escaping.h index f5ca26c5da..aa6d17508c 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/escaping.h +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/escaping.h @@ -122,6 +122,8 @@ std::string Utf8SafeCHexEscape(absl::string_view src); // Converts a `src` string encoded in Base64 to its binary equivalent, writing // it to a `dest` buffer, returning `true` on success. If `src` contains invalid // characters, `dest` is cleared and returns `false`. +// Padding is optional. If padding is included, it must be correct. In the +// padding, '=' and '.' are treated identically. bool Base64Unescape(absl::string_view src, std::string* dest); // WebSafeBase64Unescape() @@ -129,6 +131,8 @@ bool Base64Unescape(absl::string_view src, std::string* dest); // Converts a `src` string encoded in Base64 to its binary equivalent, writing // it to a `dest` buffer, but using '-' instead of '+', and '_' instead of '/'. // If `src` contains invalid characters, `dest` is cleared and returns `false`. +// Padding is optional. If padding is included, it must be correct. In the +// padding, '=' and '.' are treated identically. bool WebSafeBase64Unescape(absl::string_view src, std::string* dest); // Base64Escape() diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/escaping_test.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/escaping_test.cc index 45671a0ed5..44ffcba7e3 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/escaping_test.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/escaping_test.cc @@ -617,6 +617,48 @@ TEST(Base64, EscapeAndUnescape) { TestEscapeAndUnescape(); } +TEST(Base64, Padding) { + // Padding is optional. + // '.' is an acceptable padding character, just like '='. + std::initializer_list good_padding = { + "YQ", + "YQ==", + "YQ=.", + "YQ.=", + "YQ..", + }; + for (absl::string_view b64 : good_padding) { + std::string decoded; + EXPECT_TRUE(absl::Base64Unescape(b64, &decoded)); + EXPECT_EQ(decoded, "a"); + std::string websafe_decoded; + EXPECT_TRUE(absl::WebSafeBase64Unescape(b64, &websafe_decoded)); + EXPECT_EQ(websafe_decoded, "a"); + } + std::initializer_list bad_padding = { + "YQ=", + "YQ.", + "YQ===", + "YQ==.", + "YQ=.=", + "YQ=..", + "YQ.==", + "YQ.=.", + "YQ..=", + "YQ...", + "YQ====", + "YQ....", + "YQ=====", + "YQ.....", + }; + for (absl::string_view b64 : bad_padding) { + std::string decoded; + EXPECT_FALSE(absl::Base64Unescape(b64, &decoded)); + std::string websafe_decoded; + EXPECT_FALSE(absl::WebSafeBase64Unescape(b64, &websafe_decoded)); + } +} + TEST(Base64, DISABLED_HugeData) { const size_t kSize = size_t(3) * 1000 * 1000 * 1000; static_assert(kSize % 3 == 0, "kSize must be divisible by 3"); diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/char_map.h b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/char_map.h index 61484de0b7..5aabc1fc64 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/char_map.h +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/char_map.h @@ -103,10 +103,9 @@ class Charmap { constexpr Charmap(uint64_t b0, uint64_t b1, uint64_t b2, uint64_t b3) : m_{b0, b1, b2, b3} {} - static constexpr uint64_t RangeForWord(unsigned char lo, unsigned char hi, - uint64_t word) { - return OpenRangeFromZeroForWord(hi + 1, word) & - ~OpenRangeFromZeroForWord(lo, word); + static constexpr uint64_t RangeForWord(char lo, char hi, uint64_t word) { + return OpenRangeFromZeroForWord(static_cast(hi) + 1, word) & + ~OpenRangeFromZeroForWord(static_cast(lo), word); } // All the chars in the specified word of the range [0, upper). @@ -119,13 +118,16 @@ class Charmap { : (~static_cast(0) >> (64 - upper % 64)); } - static constexpr uint64_t CharMaskForWord(unsigned char x, uint64_t word) { - return (x / 64 == word) ? (static_cast(1) << (x % 64)) : 0; + static constexpr uint64_t CharMaskForWord(char x, uint64_t word) { + const auto unsigned_x = static_cast(x); + return (unsigned_x / 64 == word) + ? (static_cast(1) << (unsigned_x % 64)) + : 0; } - private: - void SetChar(unsigned char c) { - m_[c / 64] |= static_cast(1) << (c % 64); + void SetChar(char c) { + const auto unsigned_c = static_cast(c); + m_[unsigned_c / 64] |= static_cast(1) << (unsigned_c % 64); } uint64_t m_[4]; diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/charconv_bigint.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/charconv_bigint.cc index ebf8c0791a..282b639eb2 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/charconv_bigint.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/charconv_bigint.cc @@ -242,7 +242,7 @@ int BigUnsigned::ReadDigits(const char* begin, const char* end, // decimal exponent to compensate. --exponent_adjust; } - int digit = (*begin - '0'); + char digit = (*begin - '0'); --significant_digits; if (significant_digits == 0 && std::next(begin) != end && (digit == 0 || digit == 5)) { @@ -255,7 +255,7 @@ int BigUnsigned::ReadDigits(const char* begin, const char* end, // 500000...000000000001 to correctly round up, rather than to nearest. ++digit; } - queued = 10 * queued + digit; + queued = 10 * queued + static_cast(digit); ++digits_queued; if (digits_queued == kMaxSmallPowerOfTen) { MultiplyBy(kTenToNth[kMaxSmallPowerOfTen]); @@ -341,8 +341,8 @@ std::string BigUnsigned::ToString() const { std::string result; // Build result in reverse order while (copy.size() > 0) { - int next_digit = copy.DivMod<10>(); - result.push_back('0' + next_digit); + uint32_t next_digit = copy.DivMod<10>(); + result.push_back('0' + static_cast(next_digit)); } if (result.empty()) { result.push_back('0'); diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/charconv_parse.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/charconv_parse.cc index d29acaf462..98823def83 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/charconv_parse.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/charconv_parse.cc @@ -190,11 +190,11 @@ bool IsDigit<16>(char ch) { template <> unsigned ToDigit<10>(char ch) { - return ch - '0'; + return static_cast(ch - '0'); } template <> unsigned ToDigit<16>(char ch) { - return kAsciiToInt[static_cast(ch)]; + return static_cast(kAsciiToInt[static_cast(ch)]); } template <> diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cord_internal.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cord_internal.cc index 06119350ce..b6b06cfa2a 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cord_internal.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cord_internal.cc @@ -17,11 +17,13 @@ #include #include +#include "absl/base/internal/raw_logging.h" #include "absl/container/inlined_vector.h" #include "absl/strings/internal/cord_rep_btree.h" #include "absl/strings/internal/cord_rep_crc.h" #include "absl/strings/internal/cord_rep_flat.h" #include "absl/strings/internal/cord_rep_ring.h" +#include "absl/strings/str_cat.h" namespace absl { ABSL_NAMESPACE_BEGIN @@ -33,6 +35,11 @@ ABSL_CONST_INIT std::atomic shallow_subcords_enabled( kCordShallowSubcordsDefault); ABSL_CONST_INIT std::atomic cord_btree_exhaustive_validation(false); +void LogFatalNodeType(CordRep* rep) { + ABSL_INTERNAL_LOG(FATAL, absl::StrCat("Unexpected node type: ", + static_cast(rep->tag))); +} + void CordRep::Destroy(CordRep* rep) { assert(rep != nullptr); diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cord_internal.h b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cord_internal.h index 8db6aa6d74..fcca3a28cd 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cord_internal.h +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cord_internal.h @@ -21,6 +21,7 @@ #include #include +#include "absl/base/attributes.h" #include "absl/base/config.h" #include "absl/base/internal/endian.h" #include "absl/base/internal/invoke.h" @@ -33,6 +34,19 @@ namespace absl { ABSL_NAMESPACE_BEGIN namespace cord_internal { +// The overhead of a vtable is too much for Cord, so we roll our own subclasses +// using only a single byte to differentiate classes from each other - the "tag" +// byte. Define the subclasses first so we can provide downcasting helper +// functions in the base class. +struct CordRep; +struct CordRepConcat; +struct CordRepExternal; +struct CordRepFlat; +struct CordRepSubstring; +struct CordRepCrc; +class CordRepRing; +class CordRepBtree; + class CordzInfo; // Default feature enable states for cord ring buffers @@ -74,6 +88,9 @@ enum Constants { kMaxBytesToCopy = 511 }; +// Emits a fatal error "Unexpected node type: xyz" and aborts the program. +ABSL_ATTRIBUTE_NORETURN void LogFatalNodeType(CordRep* rep); + // Compact class for tracking the reference count and state flags for CordRep // instances. Data is stored in an atomic int32_t for compactness and speed. class RefcountAndFlags { @@ -112,8 +129,9 @@ class RefcountAndFlags { } // Returns the current reference count using acquire semantics. - inline int32_t Get() const { - return count_.load(std::memory_order_acquire) >> kNumFlags; + inline size_t Get() const { + return static_cast(count_.load(std::memory_order_acquire) >> + kNumFlags); } // Returns whether the atomic integer is 1. @@ -139,7 +157,7 @@ class RefcountAndFlags { // used for the StringConstant constructor to avoid collecting immutable // constant cords. // kReservedFlag is reserved for future use. - enum { + enum Flags { kNumFlags = 2, kImmortalFlag = 0x1, @@ -156,19 +174,6 @@ class RefcountAndFlags { std::atomic count_; }; -// The overhead of a vtable is too much for Cord, so we roll our own subclasses -// using only a single byte to differentiate classes from each other - the "tag" -// byte. Define the subclasses first so we can provide downcasting helper -// functions in the base class. - -struct CordRepConcat; -struct CordRepExternal; -struct CordRepFlat; -struct CordRepSubstring; -struct CordRepCrc; -class CordRepRing; -class CordRepBtree; - // Various representations that we allow enum CordRepKind { UNUSED_0 = 0, @@ -276,6 +281,20 @@ struct CordRep { struct CordRepSubstring : public CordRep { size_t start; // Starting offset of substring in child CordRep* child; + + // Creates a substring on `child`, adopting a reference on `child`. + // Requires `child` to be either a flat or external node, and `pos` and `n` to + // form a non-empty partial sub range of `'child`, i.e.: + // `n > 0 && n < length && n + pos <= length` + static inline CordRepSubstring* Create(CordRep* child, size_t pos, size_t n); + + // Creates a substring of `rep`. Does not adopt a reference on `rep`. + // Requires `IsDataEdge(rep) && n > 0 && pos + n <= rep->length`. + // If `n == rep->length` then this method returns `CordRep::Ref(rep)` + // If `rep` is a substring of a flat or external node, then this method will + // return a new substring of that flat or external node with `pos` adjusted + // with the original `start` position. + static inline CordRep* Substring(CordRep* rep, size_t pos, size_t n); }; // Type for function pointer that will invoke the releaser function and also @@ -339,6 +358,47 @@ struct CordRepExternalImpl } }; +inline CordRepSubstring* CordRepSubstring::Create(CordRep* child, size_t pos, + size_t n) { + assert(child != nullptr); + assert(n > 0); + assert(n < child->length); + assert(pos < child->length); + assert(n <= child->length - pos); + + // TODO(b/217376272): Harden internal logic. + // Move to strategical places inside the Cord logic and make this an assert. + if (ABSL_PREDICT_FALSE(!(child->IsExternal() || child->IsFlat()))) { + LogFatalNodeType(child); + } + + CordRepSubstring* rep = new CordRepSubstring(); + rep->length = n; + rep->tag = SUBSTRING; + rep->start = pos; + rep->child = child; + return rep; +} + +inline CordRep* CordRepSubstring::Substring(CordRep* rep, size_t pos, + size_t n) { + assert(rep != nullptr); + assert(n != 0); + assert(pos < rep->length); + assert(n <= rep->length - pos); + if (n == rep->length) return CordRep::Ref(rep); + if (rep->IsSubstring()) { + pos += rep->substring()->start; + rep = rep->substring()->child; + } + CordRepSubstring* substr = new CordRepSubstring(); + substr->length = n; + substr->tag = SUBSTRING; + substr->start = pos; + substr->child = CordRep::Ref(rep); + return substr; +} + inline void CordRepExternal::Delete(CordRep* rep) { assert(rep != nullptr && rep->IsExternal()); auto* rep_external = static_cast(rep); @@ -352,7 +412,8 @@ struct ConstInitExternalStorage { }; template -CordRepExternal ConstInitExternalStorage::value(Str::value); +ABSL_CONST_INIT CordRepExternal + ConstInitExternalStorage::value(Str::value); enum { kMaxInline = 15, @@ -362,8 +423,8 @@ constexpr char GetOrNull(absl::string_view data, size_t pos) { return pos < data.size() ? data[pos] : '\0'; } -// We store cordz_info as 64 bit pointer value in big endian format. This -// guarantees that the least significant byte of cordz_info matches the last +// We store cordz_info as 64 bit pointer value in little endian format. This +// guarantees that the least significant byte of cordz_info matches the first // byte of the inline data representation in as_chars_, which holds the inlined // size or the 'is_tree' bit. using cordz_info_t = int64_t; @@ -373,14 +434,14 @@ using cordz_info_t = int64_t; static_assert(sizeof(cordz_info_t) * 2 == kMaxInline + 1, ""); static_assert(sizeof(cordz_info_t) >= sizeof(intptr_t), ""); -// BigEndianByte() creates a big endian representation of 'value', i.e.: a big -// endian value where the last byte in the host's representation holds 'value`, -// with all other bytes being 0. -static constexpr cordz_info_t BigEndianByte(unsigned char value) { +// LittleEndianByte() creates a little endian representation of 'value', i.e.: +// a little endian value where the first byte in the host's representation +// holds 'value`, with all other bytes being 0. +static constexpr cordz_info_t LittleEndianByte(unsigned char value) { #if defined(ABSL_IS_BIG_ENDIAN) - return value; -#else return static_cast(value) << ((sizeof(cordz_info_t) - 1) * 8); +#else + return value; #endif } @@ -389,25 +450,37 @@ class InlineData { // DefaultInitType forces the use of the default initialization constructor. enum DefaultInitType { kDefaultInit }; - // kNullCordzInfo holds the big endian representation of intptr_t(1) + // kNullCordzInfo holds the little endian representation of intptr_t(1) // This is the 'null' / initial value of 'cordz_info'. The null value // is specifically big endian 1 as with 64-bit pointers, the last // byte of cordz_info overlaps with the last byte holding the tag. - static constexpr cordz_info_t kNullCordzInfo = BigEndianByte(1); + static constexpr cordz_info_t kNullCordzInfo = LittleEndianByte(1); + + // kTagOffset contains the offset of the control byte / tag. This constant is + // intended mostly for debugging purposes: do not remove this constant as it + // is actively inspected and used by gdb pretty printing code. + static constexpr size_t kTagOffset = 0; constexpr InlineData() : as_chars_{0} {} explicit InlineData(DefaultInitType) {} explicit constexpr InlineData(CordRep* rep) : as_tree_(rep) {} explicit constexpr InlineData(absl::string_view chars) - : as_chars_{ - GetOrNull(chars, 0), GetOrNull(chars, 1), - GetOrNull(chars, 2), GetOrNull(chars, 3), - GetOrNull(chars, 4), GetOrNull(chars, 5), - GetOrNull(chars, 6), GetOrNull(chars, 7), - GetOrNull(chars, 8), GetOrNull(chars, 9), - GetOrNull(chars, 10), GetOrNull(chars, 11), - GetOrNull(chars, 12), GetOrNull(chars, 13), - GetOrNull(chars, 14), static_cast((chars.size() << 1))} {} + : as_chars_{static_cast((chars.size() << 1)), + GetOrNull(chars, 0), + GetOrNull(chars, 1), + GetOrNull(chars, 2), + GetOrNull(chars, 3), + GetOrNull(chars, 4), + GetOrNull(chars, 5), + GetOrNull(chars, 6), + GetOrNull(chars, 7), + GetOrNull(chars, 8), + GetOrNull(chars, 9), + GetOrNull(chars, 10), + GetOrNull(chars, 11), + GetOrNull(chars, 12), + GetOrNull(chars, 13), + GetOrNull(chars, 14)} {} // Returns true if the current instance is empty. // The 'empty value' is an inlined data value of zero length. @@ -438,8 +511,8 @@ class InlineData { // Requires the current instance to hold a tree value. CordzInfo* cordz_info() const { assert(is_tree()); - intptr_t info = static_cast( - absl::big_endian::ToHost64(static_cast(as_tree_.cordz_info))); + intptr_t info = static_cast(absl::little_endian::ToHost64( + static_cast(as_tree_.cordz_info))); assert(info & 1); return reinterpret_cast(info - 1); } @@ -451,7 +524,7 @@ class InlineData { assert(is_tree()); uintptr_t info = reinterpret_cast(cordz_info) | 1; as_tree_.cordz_info = - static_cast(absl::big_endian::FromHost64(info)); + static_cast(absl::little_endian::FromHost64(info)); } // Resets the current cordz_info to null / empty. @@ -464,7 +537,7 @@ class InlineData { // Requires the current instance to hold inline data. const char* as_chars() const { assert(!is_tree()); - return as_chars_; + return &as_chars_[1]; } // Returns a mutable pointer to the character data inside this instance. @@ -482,7 +555,7 @@ class InlineData { // // It's an error to read from the returned pointer without a preceding write // if the current instance does not hold inline data, i.e.: is_tree() == true. - char* as_chars() { return as_chars_; } + char* as_chars() { return &as_chars_[1]; } // Returns the tree value of this value. // Requires the current instance to hold a tree value. @@ -510,7 +583,7 @@ class InlineData { // Requires the current instance to hold inline data. size_t inline_size() const { assert(!is_tree()); - return tag() >> 1; + return static_cast(tag()) >> 1; } // Sets the size of the inlined character data inside this instance. @@ -518,26 +591,42 @@ class InlineData { // See the documentation on 'as_chars()' for more information and examples. void set_inline_size(size_t size) { ABSL_ASSERT(size <= kMaxInline); - tag() = static_cast(size << 1); + tag() = static_cast(size << 1); + } + + // Compares 'this' inlined data with rhs. The comparison is a straightforward + // lexicographic comparison. `Compare()` returns values as follows: + // + // -1 'this' InlineData instance is smaller + // 0 the InlineData instances are equal + // 1 'this' InlineData instance larger + int Compare(const InlineData& rhs) const { + uint64_t x, y; + memcpy(&x, as_chars(), sizeof(x)); + memcpy(&y, rhs.as_chars(), sizeof(y)); + if (x == y) { + memcpy(&x, as_chars() + 7, sizeof(x)); + memcpy(&y, rhs.as_chars() + 7, sizeof(y)); + if (x == y) { + if (inline_size() == rhs.inline_size()) return 0; + return inline_size() < rhs.inline_size() ? -1 : 1; + } + } + x = absl::big_endian::FromHost64(x); + y = absl::big_endian::FromHost64(y); + return x < y ? -1 : 1; } private: // See cordz_info_t for forced alignment and size of `cordz_info` details. struct AsTree { - explicit constexpr AsTree(absl::cord_internal::CordRep* tree) - : rep(tree), cordz_info(kNullCordzInfo) {} - // This union uses up extra space so that whether rep is 32 or 64 bits, - // cordz_info will still start at the eighth byte, and the last - // byte of cordz_info will still be the last byte of InlineData. - union { - absl::cord_internal::CordRep* rep; - cordz_info_t unused_aligner; - }; - cordz_info_t cordz_info; + explicit constexpr AsTree(absl::cord_internal::CordRep* tree) : rep(tree) {} + cordz_info_t cordz_info = kNullCordzInfo; + absl::cord_internal::CordRep* rep; }; - char& tag() { return reinterpret_cast(this)[kMaxInline]; } - char tag() const { return reinterpret_cast(this)[kMaxInline]; } + int8_t& tag() { return reinterpret_cast(this)[0]; } + int8_t tag() const { return reinterpret_cast(this)[0]; } // If the data has length <= kMaxInline, we store it in `as_chars_`, and // store the size in the last char of `as_chars_` shifted left + 1. @@ -572,7 +661,9 @@ inline const CordRepExternal* CordRep::external() const { } inline CordRep* CordRep::Ref(CordRep* rep) { - assert(rep != nullptr); + // ABSL_ASSUME is a workaround for + // https://gcc.gnu.org/bugzilla/show_bug.cgi?id=105585 + ABSL_ASSUME(rep != nullptr); rep->refcount.Increment(); return rep; } diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cord_rep_btree.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cord_rep_btree.cc index 2b592b4726..7ce36128eb 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cord_rep_btree.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cord_rep_btree.cc @@ -17,6 +17,7 @@ #include #include #include +#include #include #include "absl/base/attributes.h" @@ -33,7 +34,9 @@ namespace absl { ABSL_NAMESPACE_BEGIN namespace cord_internal { -constexpr size_t CordRepBtree::kMaxCapacity; // NOLINT: needed for c++ < c++17 +#ifdef ABSL_INTERNAL_NEED_REDUNDANT_CONSTEXPR_DECL +constexpr size_t CordRepBtree::kMaxCapacity; +#endif namespace { @@ -53,8 +56,10 @@ inline bool exhaustive_validation() { // Prints the entire tree structure or 'rep'. External callers should // not specify 'depth' and leave it to its default (0) value. // Rep may be a CordRepBtree tree, or a SUBSTRING / EXTERNAL / FLAT node. -void DumpAll(const CordRep* rep, bool include_contents, std::ostream& stream, - int depth = 0) { +void DumpAll(const CordRep* rep, + bool include_contents, + std::ostream& stream, + size_t depth = 0) { // Allow for full height trees + substring -> flat / external nodes. assert(depth <= CordRepBtree::kMaxDepth + 2); std::string sharing = const_cast(rep)->refcount.IsOne() diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cord_rep_btree.h b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cord_rep_btree.h index 0e78e12cd5..eed5609e55 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cord_rep_btree.h +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cord_rep_btree.h @@ -95,8 +95,9 @@ class CordRepBtree : public CordRep { // local stack variable compared to Cord's current near 400 bytes stack use. // The maximum `height` value of a node is then `kMaxDepth - 1` as node height // values start with a value of 0 for leaf nodes. - static constexpr int kMaxDepth = 12; - static constexpr int kMaxHeight = kMaxDepth - 1; + static constexpr size_t kMaxDepth = 12; + // See comments on height() for why this is an int and not a size_t. + static constexpr int kMaxHeight = static_cast(kMaxDepth - 1); // `Action` defines the action for unwinding changes done at the btree's leaf // level that need to be propagated up to the parent node(s). Each operation @@ -716,7 +717,7 @@ inline void CordRepBtree::AlignBegin() { // size, and then do overlapping load/store of up to 4 pointers (inlined as // XMM, YMM or ZMM load/store) and up to 2 pointers (XMM / YMM), which is a) // compact and b) not clobbering any registers. - ABSL_INTERNAL_ASSUME(new_end <= kMaxCapacity); + ABSL_ASSUME(new_end <= kMaxCapacity); #ifdef __clang__ #pragma unroll 1 #endif @@ -734,7 +735,7 @@ inline void CordRepBtree::AlignEnd() { const size_t new_end = end() + delta; set_begin(new_begin); set_end(new_end); - ABSL_INTERNAL_ASSUME(new_end <= kMaxCapacity); + ABSL_ASSUME(new_end <= kMaxCapacity); #ifdef __clang__ #pragma unroll 1 #endif diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cord_rep_btree_navigator.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cord_rep_btree_navigator.cc index 9b896a3d09..6ed20c23a7 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cord_rep_btree_navigator.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cord_rep_btree_navigator.cc @@ -90,7 +90,7 @@ CordRepBtreeNavigator::Position CordRepBtreeNavigator::Skip(size_t n) { // edges that must be skipped. while (height > 0) { node = edge->btree(); - index_[height] = index; + index_[height] = static_cast(index); node_[--height] = node; index = node->begin(); edge = node->Edge(index); @@ -101,7 +101,7 @@ CordRepBtreeNavigator::Position CordRepBtreeNavigator::Skip(size_t n) { edge = node->Edge(index); } } - index_[0] = index; + index_[0] = static_cast(index); return {edge, n}; } @@ -126,7 +126,7 @@ ReadResult CordRepBtreeNavigator::Read(size_t edge_offset, size_t n) { do { length -= edge->length; while (++index == node->end()) { - index_[height] = index; + index_[height] = static_cast(index); if (++height > height_) { subtree->set_end(subtree_end); if (length == 0) return {subtree, 0}; @@ -154,7 +154,7 @@ ReadResult CordRepBtreeNavigator::Read(size_t edge_offset, size_t n) { // edges that must be read, adding 'down' nodes to `subtree`. while (height > 0) { node = edge->btree(); - index_[height] = index; + index_[height] = static_cast(index); node_[--height] = node; index = node->begin(); edge = node->Edge(index); @@ -178,7 +178,7 @@ ReadResult CordRepBtreeNavigator::Read(size_t edge_offset, size_t n) { subtree->edges_[subtree_end++] = Substring(edge, 0, length); } subtree->set_end(subtree_end); - index_[0] = index; + index_[0] = static_cast(index); return {tree, length}; } diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cord_rep_btree_navigator.h b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cord_rep_btree_navigator.h index 971b92eda6..3d581c877e 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cord_rep_btree_navigator.h +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cord_rep_btree_navigator.h @@ -143,8 +143,8 @@ class CordRepBtreeNavigator { // `index_` and `node_` contain the navigation state as the 'path' to the // current data edge which is at `node_[0]->Edge(index_[0])`. The contents // of these are undefined until the instance is initialized (`height_ >= 0`). - uint8_t index_[CordRepBtree::kMaxHeight]; - CordRepBtree* node_[CordRepBtree::kMaxHeight]; + uint8_t index_[CordRepBtree::kMaxDepth]; + CordRepBtree* node_[CordRepBtree::kMaxDepth]; }; // Returns true if this instance is not empty. @@ -173,6 +173,7 @@ template inline CordRep* CordRepBtreeNavigator::Init(CordRepBtree* tree) { assert(tree != nullptr); assert(tree->size() > 0); + assert(tree->height() <= CordRepBtree::kMaxHeight); int height = height_ = tree->height(); size_t index = tree->index(edge_type); node_[height] = tree; @@ -206,6 +207,7 @@ inline CordRepBtreeNavigator::Position CordRepBtreeNavigator::Seek( inline CordRepBtreeNavigator::Position CordRepBtreeNavigator::InitOffset( CordRepBtree* tree, size_t offset) { assert(tree != nullptr); + assert(tree->height() <= CordRepBtree::kMaxHeight); if (ABSL_PREDICT_FALSE(offset >= tree->length)) return {nullptr, 0}; height_ = tree->height(); node_[height_] = tree; diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cord_rep_crc.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cord_rep_crc.cc index ee14035410..7d7273ef8d 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cord_rep_crc.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cord_rep_crc.cc @@ -25,8 +25,7 @@ ABSL_NAMESPACE_BEGIN namespace cord_internal { CordRepCrc* CordRepCrc::New(CordRep* child, uint32_t crc) { - assert(child != nullptr); - if (child->IsCrc()) { + if (child != nullptr && child->IsCrc()) { if (child->refcount.IsOne()) { child->crc()->crc = crc; return child->crc(); @@ -37,7 +36,7 @@ CordRepCrc* CordRepCrc::New(CordRep* child, uint32_t crc) { CordRep::Unref(old); } auto* new_cordrep = new CordRepCrc; - new_cordrep->length = child->length; + new_cordrep->length = child != nullptr ? child->length : 0; new_cordrep->tag = cord_internal::CRC; new_cordrep->child = child; new_cordrep->crc = crc; @@ -45,7 +44,9 @@ CordRepCrc* CordRepCrc::New(CordRep* child, uint32_t crc) { } void CordRepCrc::Destroy(CordRepCrc* node) { - CordRep::Unref(node->child); + if (node->child != nullptr) { + CordRep::Unref(node->child); + } delete node; } diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cord_rep_crc.h b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cord_rep_crc.h index 5294b0d133..455a1127d6 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cord_rep_crc.h +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cord_rep_crc.h @@ -40,7 +40,7 @@ struct CordRepCrc : public CordRep { // If the specified `child` is itself a CordRepCrc node, then this method // either replaces the existing node, or directly updates the crc value in it // depending on the node being shared or not, i.e.: refcount.IsOne(). - // `child` must not be null. Never returns null. + // `child` must only be null if the Cord is empty. Never returns null. static CordRepCrc* New(CordRep* child, uint32_t crc); // Destroys (deletes) the provided node. `node` must not be null. diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cord_rep_ring.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cord_rep_ring.cc index db1f63fa67..af2fc7683d 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cord_rep_ring.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cord_rep_ring.cc @@ -129,7 +129,9 @@ class CordRepRing::Filler { index_type pos_; }; -constexpr size_t CordRepRing::kMaxCapacity; // NOLINT: needed for c++11 +#ifdef ABSL_INTERNAL_NEED_REDUNDANT_CONSTEXPR_DECL +constexpr size_t CordRepRing::kMaxCapacity; +#endif bool CordRepRing::IsValid(std::ostream& output) const { if (capacity_ == 0) { diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cordz_functions.h b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cordz_functions.h index c9ba14508a..93f46ec6fe 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cordz_functions.h +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cordz_functions.h @@ -12,8 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. -#ifndef ABSL_STRINGS_CORDZ_FUNCTIONS_H_ -#define ABSL_STRINGS_CORDZ_FUNCTIONS_H_ +#ifndef ABSL_STRINGS_INTERNAL_CORDZ_FUNCTIONS_H_ +#define ABSL_STRINGS_INTERNAL_CORDZ_FUNCTIONS_H_ #include @@ -82,4 +82,4 @@ inline void cordz_set_next_sample_for_testing(int64_t) {} ABSL_NAMESPACE_END } // namespace absl -#endif // ABSL_STRINGS_CORDZ_FUNCTIONS_H_ +#endif // ABSL_STRINGS_INTERNAL_CORDZ_FUNCTIONS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cordz_handle.h b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cordz_handle.h index 5df53c782a..3c800b433f 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cordz_handle.h +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cordz_handle.h @@ -12,8 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. -#ifndef ABSL_STRINGS_CORDZ_HANDLE_H_ -#define ABSL_STRINGS_CORDZ_HANDLE_H_ +#ifndef ABSL_STRINGS_INTERNAL_CORDZ_HANDLE_H_ +#define ABSL_STRINGS_INTERNAL_CORDZ_HANDLE_H_ #include #include @@ -128,4 +128,4 @@ class CordzSnapshot : public CordzHandle { ABSL_NAMESPACE_END } // namespace absl -#endif // ABSL_STRINGS_CORDZ_HANDLE_H_ +#endif // ABSL_STRINGS_INTERNAL_CORDZ_HANDLE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cordz_info.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cordz_info.cc index c891d0ed0a..530f33bed4 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cordz_info.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cordz_info.cc @@ -34,7 +34,9 @@ namespace cord_internal { using ::absl::base_internal::SpinLockHolder; -constexpr int CordzInfo::kMaxStackDepth; +#ifdef ABSL_INTERNAL_NEED_REDUNDANT_CONSTEXPR_DECL +constexpr size_t CordzInfo::kMaxStackDepth; +#endif ABSL_CONST_INIT CordzInfo::List CordzInfo::global_list_{absl::kConstInit}; @@ -289,7 +291,7 @@ CordzInfo::MethodIdentifier CordzInfo::GetParentMethod(const CordzInfo* src) { : src->method_; } -int CordzInfo::FillParentStack(const CordzInfo* src, void** stack) { +size_t CordzInfo::FillParentStack(const CordzInfo* src, void** stack) { assert(stack); if (src == nullptr) return 0; if (src->parent_stack_depth_) { @@ -300,11 +302,14 @@ int CordzInfo::FillParentStack(const CordzInfo* src, void** stack) { return src->stack_depth_; } -CordzInfo::CordzInfo(CordRep* rep, const CordzInfo* src, +CordzInfo::CordzInfo(CordRep* rep, + const CordzInfo* src, MethodIdentifier method) : rep_(rep), - stack_depth_(absl::GetStackTrace(stack_, /*max_depth=*/kMaxStackDepth, - /*skip_count=*/1)), + stack_depth_( + static_cast(absl::GetStackTrace(stack_, + /*max_depth=*/kMaxStackDepth, + /*skip_count=*/1))), parent_stack_depth_(FillParentStack(src, parent_stack_)), method_(method), parent_method_(GetParentMethod(src)), diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cordz_info.h b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cordz_info.h index 026d5b9981..17eaa91c77 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cordz_info.h +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cordz_info.h @@ -12,8 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. -#ifndef ABSL_STRINGS_CORDZ_INFO_H_ -#define ABSL_STRINGS_CORDZ_INFO_H_ +#ifndef ABSL_STRINGS_INTERNAL_CORDZ_INFO_H_ +#define ABSL_STRINGS_INTERNAL_CORDZ_INFO_H_ #include #include @@ -196,7 +196,7 @@ class ABSL_LOCKABLE CordzInfo : public CordzHandle { std::atomic head ABSL_GUARDED_BY(mutex){nullptr}; }; - static constexpr int kMaxStackDepth = 64; + static constexpr size_t kMaxStackDepth = 64; explicit CordzInfo(CordRep* rep, const CordzInfo* src, MethodIdentifier method); @@ -216,7 +216,7 @@ class ABSL_LOCKABLE CordzInfo : public CordzHandle { // `stack_` depending on `parent_stack_` being empty, returning the size of // the parent stack. // Returns 0 if `src` is null. - static int FillParentStack(const CordzInfo* src, void** stack); + static size_t FillParentStack(const CordzInfo* src, void** stack); void ODRCheck() const { #ifndef NDEBUG @@ -244,8 +244,8 @@ class ABSL_LOCKABLE CordzInfo : public CordzHandle { void* stack_[kMaxStackDepth]; void* parent_stack_[kMaxStackDepth]; - const int stack_depth_; - const int parent_stack_depth_; + const size_t stack_depth_; + const size_t parent_stack_depth_; const MethodIdentifier method_; const MethodIdentifier parent_method_; CordzUpdateTracker update_tracker_; @@ -295,4 +295,4 @@ inline CordRep* CordzInfo::RefCordRep() const ABSL_LOCKS_EXCLUDED(mutex_) { ABSL_NAMESPACE_END } // namespace absl -#endif // ABSL_STRINGS_CORDZ_INFO_H_ +#endif // ABSL_STRINGS_INTERNAL_CORDZ_INFO_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cordz_info_statistics_test.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cordz_info_statistics_test.cc index 476c38d2b2..6d6feb52bd 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cordz_info_statistics_test.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cordz_info_statistics_test.cc @@ -62,7 +62,7 @@ CordRepFlat* Flat(size_t size) { } // Creates an external of the specified length -CordRepExternal* External(int length = 512) { +CordRepExternal* External(size_t length = 512) { return static_cast( NewExternalRep(absl::string_view("", length), [](absl::string_view) {})); } @@ -352,7 +352,7 @@ TEST(CordzInfoStatisticsTest, SharedSubstringRing) { } TEST(CordzInfoStatisticsTest, BtreeLeaf) { - ASSERT_THAT(CordRepBtree::kMaxCapacity, Ge(3)); + ASSERT_THAT(CordRepBtree::kMaxCapacity, Ge(3u)); RefHelper ref; auto* flat1 = Flat(2000); auto* flat2 = Flat(200); @@ -392,7 +392,7 @@ TEST(CordzInfoStatisticsTest, BtreeNodeShared) { RefHelper ref; static constexpr int leaf_count = 3; const size_t flat3_count = CordRepBtree::kMaxCapacity - 3; - ASSERT_THAT(flat3_count, Ge(0)); + ASSERT_THAT(flat3_count, Ge(0u)); CordRepBtree* tree = nullptr; size_t mem_size = 0; diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cordz_info_test.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cordz_info_test.cc index b98343ae79..cd226c3ed5 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cordz_info_test.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cordz_info_test.cc @@ -124,7 +124,7 @@ TEST(CordzInfoTest, UntrackCord) { CordzInfo* info = data.data.cordz_info(); info->Untrack(); - EXPECT_THAT(DeleteQueue(), SizeIs(0)); + EXPECT_THAT(DeleteQueue(), SizeIs(0u)); } TEST(CordzInfoTest, UntrackCordWithSnapshot) { @@ -263,8 +263,9 @@ TEST(CordzInfoTest, StackV2) { // resultant formatted stack will be "", but that still equals the stack // recorded in CordzInfo, which is also empty. The skip_count is 1 so that the // line number of the current stack isn't included in the HasSubstr check. - local_stack.resize(absl::GetStackTrace(local_stack.data(), kMaxStackDepth, - /*skip_count=*/1)); + local_stack.resize(static_cast( + absl::GetStackTrace(local_stack.data(), kMaxStackDepth, + /*skip_count=*/1))); std::string got_stack = FormatStack(info->GetStack()); std::string expected_stack = FormatStack(local_stack); diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cordz_sample_token.h b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cordz_sample_token.h index 28a1d70ccc..b58022c3f9 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cordz_sample_token.h +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cordz_sample_token.h @@ -16,8 +16,8 @@ #include "absl/strings/internal/cordz_handle.h" #include "absl/strings/internal/cordz_info.h" -#ifndef ABSL_STRINGS_CORDZ_SAMPLE_TOKEN_H_ -#define ABSL_STRINGS_CORDZ_SAMPLE_TOKEN_H_ +#ifndef ABSL_STRINGS_INTERNAL_CORDZ_SAMPLE_TOKEN_H_ +#define ABSL_STRINGS_INTERNAL_CORDZ_SAMPLE_TOKEN_H_ namespace absl { ABSL_NAMESPACE_BEGIN @@ -94,4 +94,4 @@ class CordzSampleToken : public CordzSnapshot { ABSL_NAMESPACE_END } // namespace absl -#endif // ABSL_STRINGS_CORDZ_SAMPLE_TOKEN_H_ +#endif // ABSL_STRINGS_INTERNAL_CORDZ_SAMPLE_TOKEN_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cordz_sample_token_test.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cordz_sample_token_test.cc index 9f54301d68..6be1770d59 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cordz_sample_token_test.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cordz_sample_token_test.cc @@ -167,7 +167,7 @@ TEST(CordzSampleTokenTest, MultiThreaded) { if (cord.data.is_profiled()) { // 1) Untrack cord.data.cordz_info()->Untrack(); - cord.data.clear_cordz_info();; + cord.data.clear_cordz_info(); } else { // 2) Track CordzInfo::TrackCord(cord.data, kTrackCordMethod); diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cordz_statistics.h b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cordz_statistics.h index 5707190577..9f558df494 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cordz_statistics.h +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/cordz_statistics.h @@ -45,12 +45,12 @@ struct CordzStatistics { }; // The size of the cord in bytes. This matches the result of Cord::size(). - int64_t size = 0; + size_t size = 0; // The estimated memory used by the sampled cord. This value matches the // value as reported by Cord::EstimatedMemoryUsage(). // A value of 0 implies the property has not been recorded. - int64_t estimated_memory_usage = 0; + size_t estimated_memory_usage = 0; // The effective memory used by the sampled cord, inversely weighted by the // effective indegree of each allocated node. This is a representation of the @@ -59,14 +59,14 @@ struct CordzStatistics { // by multiple Cord instances, and for cases where a Cord includes the same // node multiple times (either directly or indirectly). // A value of 0 implies the property has not been recorded. - int64_t estimated_fair_share_memory_usage = 0; + size_t estimated_fair_share_memory_usage = 0; // The total number of nodes referenced by this cord. // For ring buffer Cords, this includes the 'ring buffer' node. // For btree Cords, this includes all 'CordRepBtree' tree nodes as well as all // the substring, flat and external nodes referenced by the tree. // A value of 0 implies the property has not been recorded. - int64_t node_count = 0; + size_t node_count = 0; // Detailed node counts per type NodeCounts node_counts; diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/damerau_levenshtein_distance.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/damerau_levenshtein_distance.cc new file mode 100644 index 0000000000..a084568fa8 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/damerau_levenshtein_distance.cc @@ -0,0 +1,93 @@ +// Copyright 2022 The Abseil Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "absl/strings/internal/damerau_levenshtein_distance.h" + +#include +#include +#include + +#include "absl/strings/string_view.h" +namespace absl { +ABSL_NAMESPACE_BEGIN +namespace strings_internal { +// Calculate DamerauLevenshtein (adjacent transpositions) distance +// between two strings, +// https://en.wikipedia.org/wiki/Damerau%E2%80%93Levenshtein_distance. The +// algorithm follows the condition that no substring is edited more than once. +// While this can reduce is larger distance, it's a) a much simpler algorithm +// and b) more realistic for the case that typographic mistakes should be +// detected. +// When the distance is larger than cutoff, or one of the strings has more +// than MAX_SIZE=100 characters, the code returns min(MAX_SIZE, cutoff) + 1. +uint8_t CappedDamerauLevenshteinDistance(absl::string_view s1, + absl::string_view s2, uint8_t cutoff) { + const uint8_t MAX_SIZE = 100; + const uint8_t _cutoff = std::min(MAX_SIZE, cutoff); + const uint8_t cutoff_plus_1 = static_cast(_cutoff + 1); + + if (s1.size() > s2.size()) std::swap(s1, s2); + if (s1.size() + _cutoff < s2.size() || s2.size() > MAX_SIZE) + return cutoff_plus_1; + + if (s1.empty()) + return static_cast(s2.size()); + + // Lower diagonal bound: y = x - lower_diag + const uint8_t lower_diag = + _cutoff - static_cast(s2.size() - s1.size()); + // Upper diagonal bound: y = x + upper_diag + const uint8_t upper_diag = _cutoff; + + // d[i][j] is the number of edits required to convert s1[0, i] to s2[0, j] + std::array, MAX_SIZE + 2> d; + std::iota(d[0].begin(), d[0].begin() + upper_diag + 1, 0); + d[0][cutoff_plus_1] = cutoff_plus_1; + for (size_t i = 1; i <= s1.size(); ++i) { + // Deduce begin of relevant window. + size_t j_begin = 1; + if (i > lower_diag) { + j_begin = i - lower_diag; + d[i][j_begin - 1] = cutoff_plus_1; + } else { + d[i][0] = static_cast(i); + } + + // Deduce end of relevant window. + size_t j_end = i + upper_diag; + if (j_end > s2.size()) { + j_end = s2.size(); + } else { + d[i][j_end + 1] = cutoff_plus_1; + } + + for (size_t j = j_begin; j <= j_end; ++j) { + const uint8_t deletion_distance = d[i - 1][j] + 1; + const uint8_t insertion_distance = d[i][j - 1] + 1; + const uint8_t mismatched_tail_cost = s1[i - 1] == s2[j - 1] ? 0 : 1; + const uint8_t mismatch_distance = d[i - 1][j - 1] + mismatched_tail_cost; + uint8_t transposition_distance = _cutoff + 1; + if (i > 1 && j > 1 && s1[i - 1] == s2[j - 2] && s1[i - 2] == s2[j - 1]) + transposition_distance = d[i - 2][j - 2] + 1; + d[i][j] = std::min({cutoff_plus_1, deletion_distance, insertion_distance, + mismatch_distance, transposition_distance}); + } + } + return d[s1.size()][s2.size()]; +} + +} // namespace strings_internal + +ABSL_NAMESPACE_END +} // namespace absl diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/damerau_levenshtein_distance.h b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/damerau_levenshtein_distance.h new file mode 100644 index 0000000000..1a9684254a --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/damerau_levenshtein_distance.h @@ -0,0 +1,35 @@ +// Copyright 2022 The Abseil Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef ABSL_STRINGS_INTERNAL_DAMERAU_LEVENSHTEIN_DISTANCE_H_ +#define ABSL_STRINGS_INTERNAL_DAMERAU_LEVENSHTEIN_DISTANCE_H_ + +#include +#include + +#include "absl/strings/string_view.h" + +namespace absl { +ABSL_NAMESPACE_BEGIN +namespace strings_internal { +// Calculate DamerauLevenshtein distance between two strings. +// When the distance is larger than cutoff, the code just returns cutoff + 1. +uint8_t CappedDamerauLevenshteinDistance(absl::string_view s1, + absl::string_view s2, uint8_t cutoff); + +} // namespace strings_internal +ABSL_NAMESPACE_END +} // namespace absl + +#endif // ABSL_STRINGS_INTERNAL_DAMERAU_LEVENSHTEIN_DISTANCE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/escaping.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/escaping.cc index 7f87e1249a..cfea096111 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/escaping.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/escaping.cc @@ -21,7 +21,7 @@ namespace absl { ABSL_NAMESPACE_BEGIN namespace strings_internal { -const char kBase64Chars[] = +ABSL_CONST_INIT const char kBase64Chars[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"; size_t CalculateBase64EscapedLenInternal(size_t input_len, bool do_padding) { diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/has_absl_stringify.h b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/has_absl_stringify.h new file mode 100644 index 0000000000..55a0850829 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/has_absl_stringify.h @@ -0,0 +1,55 @@ +// Copyright 2022 The Abseil Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef ABSL_STRINGS_INTERNAL_HAS_ABSL_STRINGIFY_H_ +#define ABSL_STRINGS_INTERNAL_HAS_ABSL_STRINGIFY_H_ +#include +#include +#include + +#include "absl/strings/string_view.h" + +namespace absl { +ABSL_NAMESPACE_BEGIN + +namespace strings_internal { + +// This is an empty class not intended to be used. It exists so that +// `HasAbslStringify` can reference a universal class rather than needing to be +// copied for each new sink. +class UnimplementedSink { + public: + void Append(size_t count, char ch); + + void Append(string_view v); + + // Support `absl::Format(&sink, format, args...)`. + friend void AbslFormatFlush(UnimplementedSink* sink, absl::string_view v); +}; + +template +struct HasAbslStringify : std::false_type {}; + +template +struct HasAbslStringify< + T, std::enable_if_t(), + std::declval()))>::value>> : std::true_type {}; + +} // namespace strings_internal + +ABSL_NAMESPACE_END +} // namespace absl + +#endif // ABSL_STRINGS_INTERNAL_HAS_ABSL_STRINGIFY_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/memutil.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/memutil.cc index 2519c6881e..44996a7549 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/memutil.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/memutil.cc @@ -54,10 +54,11 @@ size_t memspn(const char* s, size_t slen, const char* accept) { cont: c = *p++; - if (slen-- == 0) return p - 1 - s; + if (slen-- == 0) + return static_cast(p - 1 - s); for (spanp = accept; (sc = *spanp++) != '\0';) if (sc == c) goto cont; - return p - 1 - s; + return static_cast(p - 1 - s); } size_t memcspn(const char* s, size_t slen, const char* reject) { @@ -68,9 +69,10 @@ size_t memcspn(const char* s, size_t slen, const char* reject) { while (slen-- != 0) { c = *p++; for (spanp = reject; (sc = *spanp++) != '\0';) - if (sc == c) return p - 1 - s; + if (sc == c) + return static_cast(p - 1 - s); } - return p - s; + return static_cast(p - s); } char* mempbrk(const char* s, size_t slen, const char* accept) { @@ -97,8 +99,9 @@ const char* memmatch(const char* phaystack, size_t haylen, const char* pneedle, const char* hayend = phaystack + haylen - neelen + 1; // A static cast is used here to work around the fact that memchr returns // a void* on Posix-compliant systems and const void* on Windows. - while ((match = static_cast( - memchr(phaystack, pneedle[0], hayend - phaystack)))) { + while ( + (match = static_cast(memchr( + phaystack, pneedle[0], static_cast(hayend - phaystack))))) { if (memcmp(match, pneedle, neelen) == 0) return match; else diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/ostringstream.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/ostringstream.cc index dc6cfe1686..a0e5ec08c2 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/ostringstream.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/ostringstream.cc @@ -14,20 +14,27 @@ #include "absl/strings/internal/ostringstream.h" +#include +#include +#include +#include + namespace absl { ABSL_NAMESPACE_BEGIN namespace strings_internal { -OStringStream::Buf::int_type OStringStream::overflow(int c) { - assert(s_); - if (!Buf::traits_type::eq_int_type(c, Buf::traits_type::eof())) - s_->push_back(static_cast(c)); +OStringStream::Streambuf::int_type OStringStream::Streambuf::overflow(int c) { + assert(str_); + if (!std::streambuf::traits_type::eq_int_type( + c, std::streambuf::traits_type::eof())) + str_->push_back(static_cast(c)); return 1; } -std::streamsize OStringStream::xsputn(const char* s, std::streamsize n) { - assert(s_); - s_->append(s, static_cast(n)); +std::streamsize OStringStream::Streambuf::xsputn(const char* s, + std::streamsize n) { + assert(str_); + str_->append(s, static_cast(n)); return n; } diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/ostringstream.h b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/ostringstream.h index d25d60473f..c0e237dbe8 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/ostringstream.h +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/ostringstream.h @@ -16,11 +16,13 @@ #define ABSL_STRINGS_INTERNAL_OSTRINGSTREAM_H_ #include +#include #include #include #include +#include -#include "absl/base/port.h" +#include "absl/base/config.h" namespace absl { ABSL_NAMESPACE_BEGIN @@ -60,26 +62,49 @@ namespace strings_internal { // strm << 3.14; // // Note: flush() has no effect. No reason to call it. -class OStringStream : private std::basic_streambuf, public std::ostream { +class OStringStream final : public std::ostream { public: // The argument can be null, in which case you'll need to call str(p) with a // non-null argument before you can write to the stream. // // The destructor of OStringStream doesn't use the std::string. It's OK to // destroy the std::string before the stream. - explicit OStringStream(std::string* s) : std::ostream(this), s_(s) {} + explicit OStringStream(std::string* str) + : std::ostream(&buf_), buf_(str) {} + OStringStream(OStringStream&& that) + : std::ostream(std::move(static_cast(that))), + buf_(that.buf_) { + rdbuf(&buf_); + } + OStringStream& operator=(OStringStream&& that) { + std::ostream::operator=(std::move(static_cast(that))); + buf_ = that.buf_; + rdbuf(&buf_); + return *this; + } - std::string* str() { return s_; } - const std::string* str() const { return s_; } - void str(std::string* s) { s_ = s; } + std::string* str() { return buf_.str(); } + const std::string* str() const { return buf_.str(); } + void str(std::string* str) { buf_.str(str); } private: - using Buf = std::basic_streambuf; + class Streambuf final : public std::streambuf { + public: + explicit Streambuf(std::string* str) : str_(str) {} + Streambuf(const Streambuf&) = default; + Streambuf& operator=(const Streambuf&) = default; - Buf::int_type overflow(int c) override; - std::streamsize xsputn(const char* s, std::streamsize n) override; + std::string* str() { return str_; } + const std::string* str() const { return str_; } + void str(std::string* str) { str_ = str; } - std::string* s_; + protected: + int_type overflow(int c) override; + std::streamsize xsputn(const char* s, std::streamsize n) override; + + private: + std::string* str_; + } buf_; }; } // namespace strings_internal diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/ostringstream_test.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/ostringstream_test.cc index 2879e50eb3..ef3ad573e9 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/ostringstream_test.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/ostringstream_test.cc @@ -14,10 +14,12 @@ #include "absl/strings/internal/ostringstream.h" +#include #include #include #include #include +#include #include "gtest/gtest.h" @@ -29,24 +31,51 @@ TEST(OStringStream, IsOStream) { ""); } -TEST(OStringStream, ConstructDestroy) { +TEST(OStringStream, ConstructNullptr) { + absl::strings_internal::OStringStream strm(nullptr); + EXPECT_EQ(nullptr, strm.str()); +} + +TEST(OStringStream, ConstructStr) { + std::string s = "abc"; { - absl::strings_internal::OStringStream strm(nullptr); - EXPECT_EQ(nullptr, strm.str()); + absl::strings_internal::OStringStream strm(&s); + EXPECT_EQ(&s, strm.str()); } + EXPECT_EQ("abc", s); +} + +TEST(OStringStream, Destroy) { + std::unique_ptr s(new std::string); + absl::strings_internal::OStringStream strm(s.get()); + s.reset(); +} + +TEST(OStringStream, MoveConstruct) { + std::string s = "abc"; { - std::string s = "abc"; - { - absl::strings_internal::OStringStream strm(&s); - EXPECT_EQ(&s, strm.str()); - } - EXPECT_EQ("abc", s); + absl::strings_internal::OStringStream strm1(&s); + strm1 << std::hex << 16; + EXPECT_EQ(&s, strm1.str()); + absl::strings_internal::OStringStream strm2(std::move(strm1)); + strm2 << 16; // We should still be in base 16. + EXPECT_EQ(&s, strm2.str()); } + EXPECT_EQ("abc1010", s); +} + +TEST(OStringStream, MoveAssign) { + std::string s = "abc"; { - std::unique_ptr s(new std::string); - absl::strings_internal::OStringStream strm(s.get()); - s.reset(); + absl::strings_internal::OStringStream strm1(&s); + strm1 << std::hex << 16; + EXPECT_EQ(&s, strm1.str()); + absl::strings_internal::OStringStream strm2(nullptr); + strm2 = std::move(strm1); + strm2 << 16; // We should still be in base 16. + EXPECT_EQ(&s, strm2.str()); } + EXPECT_EQ("abc1010", s); } TEST(OStringStream, Str) { diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/arg.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/arg.cc index e28a29b171..967fe9ca26 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/arg.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/arg.cc @@ -77,7 +77,7 @@ class IntDigits { v >>= 3; } while (v); start_ = p; - size_ = storage_ + sizeof(storage_) - p; + size_ = static_cast(storage_ + sizeof(storage_) - p); } // Print the signed or unsigned integer as decimal. @@ -86,7 +86,8 @@ class IntDigits { void PrintAsDec(T v) { static_assert(std::is_integral::value, ""); start_ = storage_; - size_ = numbers_internal::FastIntToBuffer(v, storage_) - storage_; + size_ = static_cast(numbers_internal::FastIntToBuffer(v, storage_) - + storage_); } void PrintAsDec(int128 v) { @@ -115,7 +116,7 @@ class IntDigits { if (add_neg) { *--p = '-'; } - size_ = storage_ + sizeof(storage_) - p; + size_ = static_cast(storage_ + sizeof(storage_) - p); start_ = p; } @@ -138,7 +139,7 @@ class IntDigits { ++p; } start_ = p; - size_ = storage_ + sizeof(storage_) - p; + size_ = static_cast(storage_ + sizeof(storage_) - p); } // Print the unsigned integer as hex using uppercase. @@ -154,7 +155,7 @@ class IntDigits { v >>= 4; } while (v); start_ = p; - size_ = storage_ + sizeof(storage_) - p; + size_ = static_cast(storage_ + sizeof(storage_) - p); } // The printed value including the '-' sign if available. @@ -208,10 +209,12 @@ string_view SignColumn(bool neg, const FormatConversionSpecImpl conv) { return {}; } -bool ConvertCharImpl(unsigned char v, const FormatConversionSpecImpl conv, - FormatSinkImpl *sink) { +bool ConvertCharImpl(char v, + const FormatConversionSpecImpl conv, + FormatSinkImpl* sink) { size_t fill = 0; - if (conv.width() >= 0) fill = conv.width(); + if (conv.width() >= 0) + fill = static_cast(conv.width()); ReducePadding(1, &fill); if (!conv.has_left_flag()) sink->Append(fill, ' '); sink->Append(1, v); @@ -225,7 +228,8 @@ bool ConvertIntImplInnerSlow(const IntDigits &as_digits, // Print as a sequence of Substrings: // [left_spaces][sign][base_indicator][zeroes][formatted][right_spaces] size_t fill = 0; - if (conv.width() >= 0) fill = conv.width(); + if (conv.width() >= 0) + fill = static_cast(conv.width()); string_view formatted = as_digits.without_neg_or_zero(); ReducePadding(formatted, &fill); @@ -236,10 +240,9 @@ bool ConvertIntImplInnerSlow(const IntDigits &as_digits, string_view base_indicator = BaseIndicator(as_digits, conv); ReducePadding(base_indicator, &fill); - int precision = conv.precision(); - bool precision_specified = precision >= 0; - if (!precision_specified) - precision = 1; + bool precision_specified = conv.precision() >= 0; + size_t precision = + precision_specified ? static_cast(conv.precision()) : size_t{1}; if (conv.has_alt_flag() && conv.conversion_char() == FormatConversionCharInternal::o) { @@ -247,7 +250,7 @@ bool ConvertIntImplInnerSlow(const IntDigits &as_digits, // "For o conversion, it increases the precision (if necessary) to // force the first digit of the result to be zero." if (formatted.empty() || *formatted.begin() != '0') { - int needed = static_cast(formatted.size()) + 1; + size_t needed = formatted.size() + 1; precision = std::max(precision, needed); } } @@ -275,19 +278,40 @@ bool ConvertIntImplInnerSlow(const IntDigits &as_digits, return true; } +template ::value && + std::is_signed::value) || + std::is_same::value, + int>::type = 0> +constexpr auto ConvertV(T) { + return FormatConversionCharInternal::d; +} + +template ::value && + std::is_unsigned::value) || + std::is_same::value, + int>::type = 0> +constexpr auto ConvertV(T) { + return FormatConversionCharInternal::u; +} + template -bool ConvertIntArg(T v, const FormatConversionSpecImpl conv, - FormatSinkImpl *sink) { +bool ConvertIntArg(T v, FormatConversionSpecImpl conv, FormatSinkImpl *sink) { using U = typename MakeUnsigned::type; IntDigits as_digits; + if (conv.conversion_char() == FormatConversionCharInternal::v) { + conv.set_conversion_char(ConvertV(T{})); + } + // This odd casting is due to a bug in -Wswitch behavior in gcc49 which causes // it to complain about a switch/case type mismatch, even though both are // FormatConverionChar. Likely this is because at this point // FormatConversionChar is declared, but not defined. switch (static_cast(conv.conversion_char())) { case static_cast(FormatConversionCharInternal::c): - return ConvertCharImpl(static_cast(v), conv, sink); + return ConvertCharImpl(static_cast(v), conv, sink); case static_cast(FormatConversionCharInternal::o): as_digits.PrintAsOct(static_cast(v)); @@ -320,7 +344,7 @@ bool ConvertIntArg(T v, const FormatConversionSpecImpl conv, return ConvertFloatImpl(static_cast(v), conv, sink); default: - ABSL_INTERNAL_ASSUME(false); + ABSL_ASSUME(false); } if (conv.is_basic()) { @@ -331,8 +355,11 @@ bool ConvertIntArg(T v, const FormatConversionSpecImpl conv, } template -bool ConvertFloatArg(T v, const FormatConversionSpecImpl conv, - FormatSinkImpl *sink) { +bool ConvertFloatArg(T v, FormatConversionSpecImpl conv, FormatSinkImpl *sink) { + if (conv.conversion_char() == FormatConversionCharInternal::v) { + conv.set_conversion_char(FormatConversionCharInternal::g); + } + return FormatConversionCharIsFloat(conv.conversion_char()) && ConvertFloatImpl(v, conv, sink); } @@ -349,6 +376,15 @@ inline bool ConvertStringArg(string_view v, const FormatConversionSpecImpl conv, } // namespace +bool ConvertBoolArg(bool v, FormatSinkImpl *sink) { + if (v) { + sink->Append("true"); + } else { + sink->Append("false"); + } + return true; +} + // ==================== Strings ==================== StringConvertResult FormatConvertImpl(const std::string &v, const FormatConversionSpecImpl conv, @@ -375,7 +411,7 @@ FormatConvertImpl(const char *v, const FormatConversionSpecImpl conv, len = std::strlen(v); } else { // If precision is set, we look for the NUL-terminator on the valid range. - len = std::find(v, v + conv.precision(), '\0') - v; + len = static_cast(std::find(v, v + conv.precision(), '\0') - v); } return {ConvertStringArg(string_view(v, len), conv, sink)}; } @@ -410,19 +446,18 @@ FloatingConvertResult FormatConvertImpl(long double v, } // ==================== Chars ==================== -IntegralConvertResult FormatConvertImpl(char v, - const FormatConversionSpecImpl conv, - FormatSinkImpl *sink) { +CharConvertResult FormatConvertImpl(char v, const FormatConversionSpecImpl conv, + FormatSinkImpl *sink) { return {ConvertIntArg(v, conv, sink)}; } -IntegralConvertResult FormatConvertImpl(signed char v, - const FormatConversionSpecImpl conv, - FormatSinkImpl *sink) { +CharConvertResult FormatConvertImpl(signed char v, + const FormatConversionSpecImpl conv, + FormatSinkImpl *sink) { return {ConvertIntArg(v, conv, sink)}; } -IntegralConvertResult FormatConvertImpl(unsigned char v, - const FormatConversionSpecImpl conv, - FormatSinkImpl *sink) { +CharConvertResult FormatConvertImpl(unsigned char v, + const FormatConversionSpecImpl conv, + FormatSinkImpl *sink) { return {ConvertIntArg(v, conv, sink)}; } diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/arg.h b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/arg.h index b9dda90901..bc4cde9677 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/arg.h +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/arg.h @@ -18,6 +18,7 @@ #include #include +#include #include #include #include @@ -25,10 +26,12 @@ #include #include #include +#include #include "absl/base/port.h" #include "absl/meta/type_traits.h" #include "absl/numeric/int128.h" +#include "absl/strings/internal/has_absl_stringify.h" #include "absl/strings/internal/str_format/extension.h" #include "absl/strings/string_view.h" @@ -45,6 +48,11 @@ class FormatConversionSpec; namespace str_format_internal { +template +struct ArgConvertResult { + bool value; +}; + template struct HasUserDefinedConvert : std::false_type {}; @@ -55,7 +63,12 @@ struct HasUserDefinedConvert()))>> : std::true_type {}; -void AbslFormatConvert(); // Stops the lexical name lookup +// These declarations prevent ADL lookup from continuing in absl namespaces, +// we are deliberately using these as ADL hooks and want them to consider +// non-absl namespaces only. +void AbslFormatConvert(); +void AbslStringify(); + template auto FormatConvertImpl(const T& v, FormatConversionSpecImpl conv, FormatSinkImpl* sink) @@ -71,6 +84,19 @@ auto FormatConvertImpl(const T& v, FormatConversionSpecImpl conv, return AbslFormatConvert(v, fcs, &fs); } +template +auto FormatConvertImpl(const T& v, FormatConversionSpecImpl, + FormatSinkImpl* sink) + -> std::enable_if_t(), v))>::value, + ArgConvertResult> { + using FormatSinkT = + absl::enable_if_t; + auto fs = sink->Wrap(); + AbslStringify(fs, v); + return {true}; +} + template class StreamedWrapper; @@ -95,11 +121,6 @@ struct VoidPtr { uintptr_t value; }; -template -struct ArgConvertResult { - bool value; -}; - template constexpr FormatConversionCharSet ExtractCharSet(FormatConvertResult) { return C; @@ -110,8 +131,8 @@ constexpr FormatConversionCharSet ExtractCharSet(ArgConvertResult) { return C; } -using StringConvertResult = - ArgConvertResult; +using StringConvertResult = ArgConvertResult; ArgConvertResult FormatConvertImpl( VoidPtr v, FormatConversionSpecImpl conv, FormatSinkImpl* sink); @@ -174,11 +195,19 @@ StringConvertResult FormatConvertImpl(const AbslCord& value, } using IntegralConvertResult = ArgConvertResult; +using FloatingConvertResult = ArgConvertResult; +using CharConvertResult = ArgConvertResult; -using FloatingConvertResult = - ArgConvertResult; + +bool ConvertBoolArg(bool v, FormatSinkImpl* sink); // Floats. FloatingConvertResult FormatConvertImpl(float v, FormatConversionSpecImpl conv, @@ -190,14 +219,14 @@ FloatingConvertResult FormatConvertImpl(long double v, FormatSinkImpl* sink); // Chars. -IntegralConvertResult FormatConvertImpl(char v, FormatConversionSpecImpl conv, - FormatSinkImpl* sink); -IntegralConvertResult FormatConvertImpl(signed char v, - FormatConversionSpecImpl conv, - FormatSinkImpl* sink); -IntegralConvertResult FormatConvertImpl(unsigned char v, - FormatConversionSpecImpl conv, - FormatSinkImpl* sink); +CharConvertResult FormatConvertImpl(char v, FormatConversionSpecImpl conv, + FormatSinkImpl* sink); +CharConvertResult FormatConvertImpl(signed char v, + FormatConversionSpecImpl conv, + FormatSinkImpl* sink); +CharConvertResult FormatConvertImpl(unsigned char v, + FormatConversionSpecImpl conv, + FormatSinkImpl* sink); // Ints. IntegralConvertResult FormatConvertImpl(short v, // NOLINT @@ -228,9 +257,16 @@ IntegralConvertResult FormatConvertImpl(int128 v, FormatConversionSpecImpl conv, IntegralConvertResult FormatConvertImpl(uint128 v, FormatConversionSpecImpl conv, FormatSinkImpl* sink); + +// This function needs to be a template due to ambiguity regarding type +// conversions. template ::value, int> = 0> IntegralConvertResult FormatConvertImpl(T v, FormatConversionSpecImpl conv, FormatSinkImpl* sink) { + if (conv.conversion_char() == FormatConversionCharInternal::v) { + return {ConvertBoolArg(v, sink)}; + } + return FormatConvertImpl(static_cast(v), conv, sink); } @@ -238,7 +274,8 @@ IntegralConvertResult FormatConvertImpl(T v, FormatConversionSpecImpl conv, // FormatArgImpl will use the underlying Convert functions instead. template typename std::enable_if::value && - !HasUserDefinedConvert::value, + !HasUserDefinedConvert::value && + !strings_internal::HasAbslStringify::value, IntegralConvertResult>::type FormatConvertImpl(T v, FormatConversionSpecImpl conv, FormatSinkImpl* sink); @@ -301,11 +338,11 @@ struct FormatArgImplFriend { template constexpr FormatConversionCharSet ArgumentToConv() { - return absl::str_format_internal::ExtractCharSet( - decltype(str_format_internal::FormatConvertImpl( - std::declval(), - std::declval(), - std::declval())){}); + using ConvResult = decltype(str_format_internal::FormatConvertImpl( + std::declval(), + std::declval(), + std::declval())); + return absl::str_format_internal::ExtractCharSet(ConvResult{}); } // A type-erased handle to a format argument. @@ -351,7 +388,8 @@ class FormatArgImpl { template struct DecayType { static constexpr bool kHasUserDefined = - str_format_internal::HasUserDefinedConvert::value; + str_format_internal::HasUserDefinedConvert::value || + strings_internal::HasAbslStringify::value; using type = typename std::conditional< !kHasUserDefined && std::is_convertible::value, const char*, @@ -363,6 +401,7 @@ class FormatArgImpl { struct DecayType::value && + !strings_internal::HasAbslStringify::value && std::is_enum::value>::type> { using type = typename std::underlying_type::type; }; diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/bind.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/bind.cc index c988ba8fd2..77a4222337 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/bind.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/bind.cc @@ -32,7 +32,8 @@ inline bool BindFromPosition(int position, int* value, return false; } // -1 because positions are 1-based - return FormatArgImplFriend::ToInt(pack[position - 1], value); + return FormatArgImplFriend::ToInt(pack[static_cast(position) - 1], + value); } class ArgContext { @@ -56,7 +57,7 @@ inline bool ArgContext::Bind(const UnboundConversion* unbound, const FormatArgImpl* arg = nullptr; int arg_position = unbound->arg_position; if (static_cast(arg_position - 1) >= pack_.size()) return false; - arg = &pack_[arg_position - 1]; // 1-based + arg = &pack_[static_cast(arg_position - 1)]; // 1-based if (unbound->flags != Flags::kBasic) { int width = unbound->width.value(); diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/bind.h b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/bind.h index b26cff6648..b73c50287c 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/bind.h +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/bind.h @@ -25,6 +25,7 @@ #include "absl/strings/internal/str_format/checker.h" #include "absl/strings/internal/str_format/parser.h" #include "absl/types/span.h" +#include "absl/utility/utility.h" namespace absl { ABSL_NAMESPACE_BEGIN @@ -87,6 +88,36 @@ class FormatSpecTemplate : public MakeDependent::type { using Base = typename MakeDependent::type; + template + struct ErrorMaker { + constexpr bool operator()(int) const { return res; } + }; + + template + static constexpr bool CheckArity(ErrorMaker SpecifierCount = {}, + ErrorMaker ParametersPassed = {}) { + static_assert(SpecifierCount(i) == ParametersPassed(j), + "Number of arguments passed must match the number of " + "conversion specifiers."); + return true; + } + + template + static constexpr bool CheckMatch( + ErrorMaker MismatchedArgumentNumber = {}) { + static_assert(MismatchedArgumentNumber(arg), + "Passed argument must match specified format."); + return true; + } + + template + static bool CheckMatches(absl::index_sequence) { + bool res[] = {true, CheckMatch()...}; + (void)res; + return true; + } + public: #ifdef ABSL_INTERNAL_ENABLE_FORMAT_CHECKER @@ -112,7 +143,8 @@ class FormatSpecTemplate template FormatSpecTemplate(string_view s) // NOLINT __attribute__((enable_if(str_format_internal::EnsureConstexpr(s), - "constexpr trap"))) { + "constexpr trap"))) + : Base("to avoid noise in the compiler error") { static_assert(sizeof(T*) == 0, "Format specified does not match the arguments passed."); } @@ -133,13 +165,12 @@ class FormatSpecTemplate #endif // ABSL_INTERNAL_ENABLE_FORMAT_CHECKER - template < - FormatConversionCharSet... C, - typename = typename std::enable_if::type, - typename = typename std::enable_if::type> + template FormatSpecTemplate(const ExtendedParsedFormat& pc) // NOLINT - : Base(&pc) {} + : Base(&pc) { + CheckArity(); + CheckMatches(absl::make_index_sequence{}); + } }; class Streamable { @@ -204,9 +235,10 @@ class StreamedWrapper { private: template - friend ArgConvertResult FormatConvertImpl( - const StreamedWrapper& v, FormatConversionSpecImpl conv, - FormatSinkImpl* out); + friend ArgConvertResult + FormatConvertImpl(const StreamedWrapper& v, FormatConversionSpecImpl conv, + FormatSinkImpl* out); const T& v_; }; diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/checker.h b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/checker.h index 4fd19d13ae..aeb9d48d39 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/checker.h +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/checker.h @@ -82,9 +82,10 @@ constexpr string_view ConsumeFront(string_view str, size_t len = 1) { } constexpr string_view ConsumeAnyOf(string_view format, const char* chars) { - return ContainsChar(chars, GetChar(format, 0)) - ? ConsumeAnyOf(ConsumeFront(format), chars) - : format; + while (ContainsChar(chars, GetChar(format, 0))) { + format = ConsumeFront(format); + } + return format; } constexpr bool IsDigit(char c) { return c >= '0' && c <= '9'; } @@ -98,16 +99,22 @@ struct Integer { // If the next character is a '$', consume it. // Otherwise, make `this` an invalid positional argument. constexpr Integer ConsumePositionalDollar() const { - return GetChar(format, 0) == '$' ? Integer{ConsumeFront(format), value} - : Integer{format, 0}; + if (GetChar(format, 0) == '$') { + return Integer{ConsumeFront(format), value}; + } else { + return Integer{format, 0}; + } } }; -constexpr Integer ParseDigits(string_view format, int value = 0) { - return IsDigit(GetChar(format, 0)) - ? ParseDigits(ConsumeFront(format), - 10 * value + GetChar(format, 0) - '0') - : Integer{format, value}; +constexpr Integer ParseDigits(string_view format) { + int value = 0; + while (IsDigit(GetChar(format, 0))) { + value = 10 * value + GetChar(format, 0) - '0'; + format = ConsumeFront(format); + } + + return Integer{format, value}; } // Parse digits for a positional argument. @@ -163,30 +170,36 @@ class ConvParser { // If it is '*', we verify that it matches `args_`. `error_` is set if it // doesn't match. constexpr ConvParser ParseWidth() const { - return IsDigit(GetChar(format_, 0)) - ? SetFormat(ParseDigits(format_).format) - : GetChar(format_, 0) == '*' - ? is_positional_ - ? VerifyPositional( - ParsePositional(ConsumeFront(format_)), '*') - : SetFormat(ConsumeFront(format_)) - .ConsumeNextArg('*') - : *this; + char first_char = GetChar(format_, 0); + + if (IsDigit(first_char)) { + return SetFormat(ParseDigits(format_).format); + } else if (first_char == '*') { + if (is_positional_) { + return VerifyPositional(ParsePositional(ConsumeFront(format_)), '*'); + } else { + return SetFormat(ConsumeFront(format_)).ConsumeNextArg('*'); + } + } else { + return *this; + } } // Consume the precision. // If it is '*', we verify that it matches `args_`. `error_` is set if it // doesn't match. constexpr ConvParser ParsePrecision() const { - return GetChar(format_, 0) != '.' - ? *this - : GetChar(format_, 1) == '*' - ? is_positional_ - ? VerifyPositional( - ParsePositional(ConsumeFront(format_, 2)), '*') - : SetFormat(ConsumeFront(format_, 2)) - .ConsumeNextArg('*') - : SetFormat(ParseDigits(ConsumeFront(format_)).format); + if (GetChar(format_, 0) != '.') { + return *this; + } else if (GetChar(format_, 1) == '*') { + if (is_positional_) { + return VerifyPositional(ParsePositional(ConsumeFront(format_, 2)), '*'); + } else { + return SetFormat(ConsumeFront(format_, 2)).ConsumeNextArg('*'); + } + } else { + return SetFormat(ParseDigits(ConsumeFront(format_)).format); + } } // Consume the length characters. @@ -197,11 +210,18 @@ class ConvParser { // Consume the conversion character and verify that it matches `args_`. // `error_` is set if it doesn't match. constexpr ConvParser ParseConversion() const { - return is_positional_ - ? VerifyPositional({ConsumeFront(format_), arg_position_}, - GetChar(format_, 0)) - : ConsumeNextArg(GetChar(format_, 0)) - .SetFormat(ConsumeFront(format_)); + char first_char = GetChar(format_, 0); + + if (first_char == 'v' && *(format_.data() - 1) != '%') { + return SetError(true); + } + + if (is_positional_) { + return VerifyPositional({ConsumeFront(format_), arg_position_}, + first_char); + } else { + return ConsumeNextArg(first_char).SetFormat(ConsumeFront(format_)); + } } constexpr ConvParser(string_view format, ConvList args, bool error, @@ -224,8 +244,13 @@ class ConvParser { // `format()` will be set to the character after the conversion character. // `error()` will be set if any of the arguments do not match. constexpr ConvParser Run() const { - return (is_positional_ ? ParseArgPosition(ParsePositional(format_)) : *this) - .ParseFlags() + ConvParser parser = *this; + + if (is_positional_) { + parser = ParseArgPosition(ParsePositional(format_)); + } + + return parser.ParseFlags() .ParseWidth() .ParsePrecision() .ParseLength() @@ -262,29 +287,40 @@ class FormatParser { // We use an inner function to increase the recursion limit. // The inner function consumes up to `limit` characters on every run. // This increases the limit from 512 to ~512*limit. - static constexpr string_view ConsumeNonPercentInner(string_view format, - int limit = 20) { - return FoundPercent(format) || !limit - ? format - : ConsumeNonPercentInner( - ConsumeFront(format, GetChar(format, 0) == '%' && - GetChar(format, 1) == '%' - ? 2 - : 1), - limit - 1); + static constexpr string_view ConsumeNonPercentInner(string_view format) { + int limit = 20; + while (!FoundPercent(format) && limit != 0) { + size_t len = 0; + + if (GetChar(format, 0) == '%' && GetChar(format, 1) == '%') { + len = 2; + } else { + len = 1; + } + + format = ConsumeFront(format, len); + --limit; + } + + return format; } // Consume characters until the next conversion spec %. // It skips %%. static constexpr string_view ConsumeNonPercent(string_view format) { - return FoundPercent(format) - ? format - : ConsumeNonPercent(ConsumeNonPercentInner(format)); + while (!FoundPercent(format)) { + format = ConsumeNonPercentInner(format); + } + + return format; } static constexpr bool IsPositional(string_view format) { - return IsDigit(GetChar(format, 0)) ? IsPositional(ConsumeFront(format)) - : GetChar(format, 0) == '$'; + while (IsDigit(GetChar(format, 0))) { + format = ConsumeFront(format); + } + + return GetChar(format, 0) == '$'; } constexpr bool RunImpl(bool is_positional) const { diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/checker_test.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/checker_test.cc index 7c70f47d68..680517f7fc 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/checker_test.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/checker_test.cc @@ -39,16 +39,16 @@ std::string ConvToString(FormatConversionCharSet conv) { TEST(StrFormatChecker, ArgumentToConv) { FormatConversionCharSet conv = ArgumentToConv(); - EXPECT_EQ(ConvToString(conv), "s"); + EXPECT_EQ(ConvToString(conv), "sv"); conv = ArgumentToConv(); EXPECT_EQ(ConvToString(conv), "sp"); conv = ArgumentToConv(); - EXPECT_EQ(ConvToString(conv), "fFeEgGaA"); + EXPECT_EQ(ConvToString(conv), "fFeEgGaAv"); conv = ArgumentToConv(); - EXPECT_EQ(ConvToString(conv), "cdiouxXfFeEgGaA*"); + EXPECT_EQ(ConvToString(conv), "cdiouxXfFeEgGaAv*"); conv = ArgumentToConv(); EXPECT_EQ(ConvToString(conv), "p"); diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/convert_test.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/convert_test.cc index d9fbf61c5d..300612b7ba 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/convert_test.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/convert_test.cc @@ -24,6 +24,7 @@ #include "gmock/gmock.h" #include "gtest/gtest.h" +#include "absl/base/attributes.h" #include "absl/base/internal/raw_logging.h" #include "absl/strings/internal/str_format/bind.h" #include "absl/strings/match.h" @@ -124,6 +125,7 @@ void StrAppendV(std::string *dst, const char *format, va_list ap) { delete[] buf; } +void StrAppend(std::string *, const char *, ...) ABSL_PRINTF_ATTRIBUTE(2, 3); void StrAppend(std::string *out, const char *format, ...) { va_list ap; va_start(ap, format); @@ -131,6 +133,7 @@ void StrAppend(std::string *out, const char *format, ...) { va_end(ap); } +std::string StrPrint(const char *, ...) ABSL_PRINTF_ATTRIBUTE(1, 2); std::string StrPrint(const char *format, ...) { va_list ap; va_start(ap, format); @@ -455,21 +458,32 @@ TYPED_TEST_P(TypedFormatConvertTest, AllIntsWithFlags) { } TYPED_TEST_P(TypedFormatConvertTest, Char) { + // Pass a bunch of values of type TypeParam to both FormatPack and libc's + // vsnprintf("%c", ...) (wrapped in StrPrint) to make sure we get the same + // value. typedef TypeParam T; using remove_volatile_t = typename std::remove_volatile::type; - static const T kMin = std::numeric_limits::min(); - static const T kMax = std::numeric_limits::max(); - T kVals[] = { - remove_volatile_t(1), remove_volatile_t(2), remove_volatile_t(10), - remove_volatile_t(-1), remove_volatile_t(-2), remove_volatile_t(-10), - remove_volatile_t(0), - kMin + remove_volatile_t(1), kMin, - kMax - remove_volatile_t(1), kMax + std::vector vals = { + remove_volatile_t(1), remove_volatile_t(2), remove_volatile_t(10), // + remove_volatile_t(-1), remove_volatile_t(-2), remove_volatile_t(-10), // + remove_volatile_t(0), }; - for (const T &c : kVals) { + + // We'd like to test values near std::numeric_limits::min() and + // std::numeric_limits::max(), too, but vsnprintf("%c", ...) can't handle + // anything larger than an int. Add in the most extreme values we can without + // exceeding that range. + static const T kMin = + static_cast(std::numeric_limits::min()); + static const T kMax = + static_cast(std::numeric_limits::max()); + vals.insert(vals.end(), {kMin + 1, kMin, kMax - 1, kMax}); + + for (const T c : vals) { const FormatArgImpl args[] = {FormatArgImpl(c)}; UntypedFormatSpecImpl format("%c"); - EXPECT_EQ(StrPrint("%c", c), FormatPack(format, absl::MakeSpan(args))); + EXPECT_EQ(StrPrint("%c", static_cast(c)), + FormatPack(format, absl::MakeSpan(args))); } } diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/extension.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/extension.cc index 484f6ebfc1..2a0ceb13d7 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/extension.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/extension.cc @@ -33,6 +33,8 @@ std::string FlagsToString(Flags v) { return s; } +#ifdef ABSL_INTERNAL_NEED_REDUNDANT_CONSTEXPR_DECL + #define ABSL_INTERNAL_X_VAL(id) \ constexpr absl::FormatConversionChar FormatConversionCharInternal::id; ABSL_INTERNAL_CONVERSION_CHARS_EXPAND_(ABSL_INTERNAL_X_VAL, ) @@ -45,21 +47,19 @@ constexpr absl::FormatConversionChar FormatConversionCharInternal::kNone; ABSL_INTERNAL_CONVERSION_CHARS_EXPAND_(ABSL_INTERNAL_CHAR_SET_CASE, ) #undef ABSL_INTERNAL_CHAR_SET_CASE -// NOLINTNEXTLINE(readability-redundant-declaration) constexpr FormatConversionCharSet FormatConversionCharSetInternal::kStar; -// NOLINTNEXTLINE(readability-redundant-declaration) constexpr FormatConversionCharSet FormatConversionCharSetInternal::kIntegral; -// NOLINTNEXTLINE(readability-redundant-declaration) constexpr FormatConversionCharSet FormatConversionCharSetInternal::kFloating; -// NOLINTNEXTLINE(readability-redundant-declaration) constexpr FormatConversionCharSet FormatConversionCharSetInternal::kNumeric; -// NOLINTNEXTLINE(readability-redundant-declaration) constexpr FormatConversionCharSet FormatConversionCharSetInternal::kPointer; +#endif // ABSL_INTERNAL_NEED_REDUNDANT_CONSTEXPR_DECL + bool FormatSinkImpl::PutPaddedString(string_view value, int width, int precision, bool left) { size_t space_remaining = 0; - if (width >= 0) space_remaining = width; + if (width >= 0) + space_remaining = static_cast(width); size_t n = value.size(); if (precision >= 0) n = std::min(n, static_cast(precision)); string_view shown(value.data(), n); diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/extension.h b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/extension.h index c47536d63e..603bd49d18 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/extension.h +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/extension.h @@ -19,6 +19,7 @@ #include #include +#include #include #include @@ -168,7 +169,7 @@ inline std::ostream& operator<<(std::ostream& os, Flags v) { X_VAL(f) X_SEP X_VAL(F) X_SEP X_VAL(e) X_SEP X_VAL(E) X_SEP \ X_VAL(g) X_SEP X_VAL(G) X_SEP X_VAL(a) X_SEP X_VAL(A) X_SEP \ /* misc */ \ - X_VAL(n) X_SEP X_VAL(p) + X_VAL(n) X_SEP X_VAL(p) X_SEP X_VAL(v) // clang-format on // This type should not be referenced, it exists only to provide labels @@ -190,7 +191,7 @@ struct FormatConversionCharInternal { c, s, // text d, i, o, u, x, X, // int f, F, e, E, g, G, a, A, // float - n, p, // misc + n, p, v, // misc kNone }; // clang-format on @@ -291,6 +292,8 @@ class FormatConversionSpecImpl { return conv_; } + void set_conversion_char(FormatConversionChar c) { conv_ = c; } + // Returns the specified width. If width is unspecfied, it returns a negative // value. int width() const { return width_; } diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/extension_test.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/extension_test.cc index 1c93fdb1c7..694c126406 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/extension_test.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/extension_test.cc @@ -19,6 +19,7 @@ #include #include +#include "gmock/gmock.h" #include "gtest/gtest.h" #include "absl/strings/str_format.h" #include "absl/strings/string_view.h" @@ -95,4 +96,14 @@ TEST(FormatExtensionTest, VerifyEnumEquality) { #undef X_VAL } +TEST(FormatExtensionTest, SetConversionChar) { + absl::str_format_internal::FormatConversionSpecImpl spec; + EXPECT_EQ(spec.conversion_char(), + absl::str_format_internal::FormatConversionCharInternal::kNone); + spec.set_conversion_char( + absl::str_format_internal::FormatConversionCharInternal::d); + EXPECT_EQ(spec.conversion_char(), + absl::str_format_internal::FormatConversionCharInternal::d); +} + } // namespace diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/float_conversion.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/float_conversion.cc index b1c4068475..8e497852bb 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/float_conversion.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/float_conversion.cc @@ -92,27 +92,30 @@ class StackArray { // Calculates `10 * (*v) + carry` and stores the result in `*v` and returns // the carry. +// Requires: `0 <= carry <= 9` template -inline Int MultiplyBy10WithCarry(Int *v, Int carry) { +inline char MultiplyBy10WithCarry(Int* v, char carry) { using BiggerInt = absl::conditional_t; - BiggerInt tmp = 10 * static_cast(*v) + carry; + BiggerInt tmp = + 10 * static_cast(*v) + static_cast(carry); *v = static_cast(tmp); - return static_cast(tmp >> (sizeof(Int) * 8)); + return static_cast(tmp >> (sizeof(Int) * 8)); } // Calculates `(2^64 * carry + *v) / 10`. // Stores the quotient in `*v` and returns the remainder. // Requires: `0 <= carry <= 9` -inline uint64_t DivideBy10WithCarry(uint64_t *v, uint64_t carry) { +inline char DivideBy10WithCarry(uint64_t* v, char carry) { constexpr uint64_t divisor = 10; // 2^64 / divisor = chunk_quotient + chunk_remainder / divisor constexpr uint64_t chunk_quotient = (uint64_t{1} << 63) / (divisor / 2); constexpr uint64_t chunk_remainder = uint64_t{} - chunk_quotient * divisor; + const uint64_t carry_u64 = static_cast(carry); const uint64_t mod = *v % divisor; - const uint64_t next_carry = chunk_remainder * carry + mod; - *v = *v / divisor + carry * chunk_quotient + next_carry / divisor; - return next_carry % divisor; + const uint64_t next_carry = chunk_remainder * carry_u64 + mod; + *v = *v / divisor + carry_u64 * chunk_quotient + next_carry / divisor; + return static_cast(next_carry % divisor); } using MaxFloatType = @@ -125,11 +128,11 @@ using MaxFloatType = // // Requires `0 <= exp` and `exp <= numeric_limits::max_exponent`. class BinaryToDecimal { - static constexpr int ChunksNeeded(int exp) { + static constexpr size_t ChunksNeeded(int exp) { // We will left shift a uint128 by `exp` bits, so we need `128+exp` total // bits. Round up to 32. // See constructor for details about adding `10%` to the value. - return (128 + exp + 31) / 32 * 11 / 10; + return static_cast((128 + exp + 31) / 32 * 11 / 10); } public: @@ -140,7 +143,7 @@ class BinaryToDecimal { assert(exp > 0); assert(exp <= std::numeric_limits::max_exponent); static_assert( - static_cast(StackArray::kMaxCapacity) >= + StackArray::kMaxCapacity >= ChunksNeeded(std::numeric_limits::max_exponent), ""); @@ -149,9 +152,9 @@ class BinaryToDecimal { [=](absl::Span input) { f(BinaryToDecimal(input, v, exp)); }); } - int TotalDigits() const { - return static_cast((decimal_end_ - decimal_start_) * kDigitsPerChunk + - CurrentDigits().size()); + size_t TotalDigits() const { + return (decimal_end_ - decimal_start_) * kDigitsPerChunk + + CurrentDigits().size(); } // See the current block of digits. @@ -190,30 +193,31 @@ class BinaryToDecimal { // the decimal representation is around 7% less efficient in space than the // binary one. We allocate an extra 10% memory to account for this. See // ChunksNeeded for this calculation. - int chunk_index = exp / 32; + size_t after_chunk_index = static_cast(exp / 32 + 1); decimal_start_ = decimal_end_ = ChunksNeeded(exp); const int offset = exp % 32; // Left shift v by exp bits. - data_[chunk_index] = static_cast(v << offset); + data_[after_chunk_index - 1] = static_cast(v << offset); for (v >>= (32 - offset); v; v >>= 32) - data_[++chunk_index] = static_cast(v); + data_[++after_chunk_index - 1] = static_cast(v); - while (chunk_index >= 0) { + while (after_chunk_index > 0) { // While we have more than one chunk available, go in steps of 1e9. - // `data_[chunk_index]` holds the highest non-zero binary chunk, so keep - // the variable updated. + // `data_[after_chunk_index - 1]` holds the highest non-zero binary chunk, + // so keep the variable updated. uint32_t carry = 0; - for (int i = chunk_index; i >= 0; --i) { - uint64_t tmp = uint64_t{data_[i]} + (uint64_t{carry} << 32); - data_[i] = static_cast(tmp / uint64_t{1000000000}); + for (size_t i = after_chunk_index; i > 0; --i) { + uint64_t tmp = uint64_t{data_[i - 1]} + (uint64_t{carry} << 32); + data_[i - 1] = static_cast(tmp / uint64_t{1000000000}); carry = static_cast(tmp % uint64_t{1000000000}); } // If the highest chunk is now empty, remove it from view. - if (data_[chunk_index] == 0) --chunk_index; + if (data_[after_chunk_index - 1] == 0) + --after_chunk_index; --decimal_start_; - assert(decimal_start_ != chunk_index); + assert(decimal_start_ != after_chunk_index - 1); data_[decimal_start_] = carry; } @@ -225,13 +229,13 @@ class BinaryToDecimal { } private: - static constexpr int kDigitsPerChunk = 9; + static constexpr size_t kDigitsPerChunk = 9; - int decimal_start_; - int decimal_end_; + size_t decimal_start_; + size_t decimal_end_; char digits_[kDigitsPerChunk]; - int size_ = 0; + size_t size_ = 0; absl::Span data_; }; @@ -251,25 +255,26 @@ class FractionalDigitGenerator { static_assert(StackArray::kMaxCapacity >= (Limits::digits + 128 - Limits::min_exponent + 31) / 32, ""); - StackArray::RunWithCapacity((Limits::digits + exp + 31) / 32, - [=](absl::Span input) { - f(FractionalDigitGenerator(input, v, exp)); - }); + StackArray::RunWithCapacity( + static_cast((Limits::digits + exp + 31) / 32), + [=](absl::Span input) { + f(FractionalDigitGenerator(input, v, exp)); + }); } // Returns true if there are any more non-zero digits left. - bool HasMoreDigits() const { return next_digit_ != 0 || chunk_index_ >= 0; } + bool HasMoreDigits() const { return next_digit_ != 0 || after_chunk_index_; } // Returns true if the remainder digits are greater than 5000... bool IsGreaterThanHalf() const { - return next_digit_ > 5 || (next_digit_ == 5 && chunk_index_ >= 0); + return next_digit_ > 5 || (next_digit_ == 5 && after_chunk_index_); } // Returns true if the remainder digits are exactly 5000... - bool IsExactlyHalf() const { return next_digit_ == 5 && chunk_index_ < 0; } + bool IsExactlyHalf() const { return next_digit_ == 5 && !after_chunk_index_; } struct Digits { - int digit_before_nine; - int num_nines; + char digit_before_nine; + size_t num_nines; }; // Get the next set of digits. @@ -288,35 +293,37 @@ class FractionalDigitGenerator { private: // Return the next digit. - int GetOneDigit() { - if (chunk_index_ < 0) return 0; + char GetOneDigit() { + if (!after_chunk_index_) + return 0; - uint32_t carry = 0; - for (int i = chunk_index_; i >= 0; --i) { - carry = MultiplyBy10WithCarry(&data_[i], carry); + char carry = 0; + for (size_t i = after_chunk_index_; i > 0; --i) { + carry = MultiplyBy10WithCarry(&data_[i - 1], carry); } // If the lowest chunk is now empty, remove it from view. - if (data_[chunk_index_] == 0) --chunk_index_; + if (data_[after_chunk_index_ - 1] == 0) + --after_chunk_index_; return carry; } FractionalDigitGenerator(absl::Span data, uint128 v, int exp) - : chunk_index_(exp / 32), data_(data) { + : after_chunk_index_(static_cast(exp / 32 + 1)), data_(data) { const int offset = exp % 32; // Right shift `v` by `exp` bits. - data_[chunk_index_] = static_cast(v << (32 - offset)); + data_[after_chunk_index_ - 1] = static_cast(v << (32 - offset)); v >>= offset; // Make sure we don't overflow the data. We already calculated that // non-zero bits fit, so we might not have space for leading zero bits. - for (int pos = chunk_index_; v; v >>= 32) + for (size_t pos = after_chunk_index_ - 1; v; v >>= 32) data_[--pos] = static_cast(v); // Fill next_digit_, as GetDigits expects it to be populated always. next_digit_ = GetOneDigit(); } - int next_digit_; - int chunk_index_; + char next_digit_; + size_t after_chunk_index_; absl::Span data_; }; @@ -362,7 +369,7 @@ char *PrintIntegralDigitsFromRightFast(uint128 v, char *p) { auto low = static_cast(v); while (high != 0) { - uint64_t carry = DivideBy10WithCarry(&high, 0); + char carry = DivideBy10WithCarry(&high, 0); carry = DivideBy10WithCarry(&low, carry); *--p = carry + '0'; } @@ -373,13 +380,15 @@ char *PrintIntegralDigitsFromRightFast(uint128 v, char *p) { // shifting. // Performs rounding if necessary to fit within `precision`. // Returns the pointer to one after the last character written. -char *PrintFractionalDigitsFast(uint64_t v, char *start, int exp, - int precision) { +char* PrintFractionalDigitsFast(uint64_t v, + char* start, + int exp, + size_t precision) { char *p = start; v <<= (64 - exp); while (precision > 0) { if (!v) return p; - *p++ = MultiplyBy10WithCarry(&v, uint64_t{0}) + '0'; + *p++ = MultiplyBy10WithCarry(&v, 0) + '0'; --precision; } @@ -393,8 +402,6 @@ char *PrintFractionalDigitsFast(uint64_t v, char *start, int exp, RoundToEven(p - 1); } - assert(precision == 0); - // Precision can only be zero here. return p; } @@ -402,8 +409,10 @@ char *PrintFractionalDigitsFast(uint64_t v, char *start, int exp, // after shifting. // Performs rounding if necessary to fit within `precision`. // Returns the pointer to one after the last character written. -char *PrintFractionalDigitsFast(uint128 v, char *start, int exp, - int precision) { +char* PrintFractionalDigitsFast(uint128 v, + char* start, + int exp, + size_t precision) { char *p = start; v <<= (128 - exp); auto high = static_cast(v >> 64); @@ -412,7 +421,7 @@ char *PrintFractionalDigitsFast(uint128 v, char *start, int exp, // While we have digits to print and `low` is not empty, do the long // multiplication. while (precision > 0 && low != 0) { - uint64_t carry = MultiplyBy10WithCarry(&low, uint64_t{0}); + char carry = MultiplyBy10WithCarry(&low, 0); carry = MultiplyBy10WithCarry(&high, carry); *p++ = carry + '0'; @@ -424,7 +433,7 @@ char *PrintFractionalDigitsFast(uint128 v, char *start, int exp, // above. while (precision > 0) { if (!high) return p; - *p++ = MultiplyBy10WithCarry(&high, uint64_t{0}) + '0'; + *p++ = MultiplyBy10WithCarry(&high, 0) + '0'; --precision; } @@ -438,14 +447,12 @@ char *PrintFractionalDigitsFast(uint128 v, char *start, int exp, RoundToEven(p - 1); } - assert(precision == 0); - // Precision can only be zero here. return p; } struct FormatState { char sign_char; - int precision; + size_t precision; const FormatConversionSpecImpl &conv; FormatSinkImpl *sink; @@ -455,9 +462,9 @@ struct FormatState { }; struct Padding { - int left_spaces; - int zeros; - int right_spaces; + size_t left_spaces; + size_t zeros; + size_t right_spaces; }; Padding ExtraWidthToPadding(size_t total_size, const FormatState &state) { @@ -465,7 +472,7 @@ Padding ExtraWidthToPadding(size_t total_size, const FormatState &state) { static_cast(state.conv.width()) <= total_size) { return {0, 0, 0}; } - int missing_chars = state.conv.width() - total_size; + size_t missing_chars = static_cast(state.conv.width()) - total_size; if (state.conv.has_left_flag()) { return {0, 0, missing_chars}; } else if (state.conv.has_zero_flag()) { @@ -475,8 +482,10 @@ Padding ExtraWidthToPadding(size_t total_size, const FormatState &state) { } } -void FinalPrint(const FormatState &state, absl::string_view data, - int padding_offset, int trailing_zeros, +void FinalPrint(const FormatState& state, + absl::string_view data, + size_t padding_offset, + size_t trailing_zeros, absl::string_view data_postfix) { if (state.conv.width() < 0) { // No width specified. Fast-path. @@ -487,10 +496,10 @@ void FinalPrint(const FormatState &state, absl::string_view data, return; } - auto padding = ExtraWidthToPadding((state.sign_char != '\0' ? 1 : 0) + - data.size() + data_postfix.size() + - static_cast(trailing_zeros), - state); + auto padding = + ExtraWidthToPadding((state.sign_char != '\0' ? 1 : 0) + data.size() + + data_postfix.size() + trailing_zeros, + state); state.sink->Append(padding.left_spaces, ' '); if (state.sign_char != '\0') state.sink->Append(1, state.sign_char); @@ -547,15 +556,16 @@ void FormatFFast(Int v, int exp, const FormatState &state) { if (integral_digits_start[-1] != '0') --integral_digits_start; } - size_t size = fractional_digits_end - integral_digits_start; + size_t size = + static_cast(fractional_digits_end - integral_digits_start); // In `alt` mode (flag #) we keep the `.` even if there are no fractional // digits. In non-alt mode, we strip it. if (!state.ShouldPrintDot()) --size; FinalPrint(state, absl::string_view(integral_digits_start, size), /*padding_offset=*/0, - static_cast(state.precision - (fractional_digits_end - - fractional_digits_start)), + state.precision - static_cast(fractional_digits_end - + fractional_digits_start), /*data_postfix=*/""); } @@ -567,21 +577,22 @@ void FormatFFast(Int v, int exp, const FormatState &state) { void FormatFPositiveExpSlow(uint128 v, int exp, const FormatState &state) { BinaryToDecimal::RunConversion(v, exp, [&](BinaryToDecimal btd) { const size_t total_digits = - btd.TotalDigits() + - (state.ShouldPrintDot() ? static_cast(state.precision) + 1 : 0); + btd.TotalDigits() + (state.ShouldPrintDot() ? state.precision + 1 : 0); const auto padding = ExtraWidthToPadding( total_digits + (state.sign_char != '\0' ? 1 : 0), state); state.sink->Append(padding.left_spaces, ' '); - if (state.sign_char != '\0') state.sink->Append(1, state.sign_char); + if (state.sign_char != '\0') + state.sink->Append(1, state.sign_char); state.sink->Append(padding.zeros, '0'); do { state.sink->Append(btd.CurrentDigits()); } while (btd.AdvanceDigits()); - if (state.ShouldPrintDot()) state.sink->Append(1, '.'); + if (state.ShouldPrintDot()) + state.sink->Append(1, '.'); state.sink->Append(state.precision, '0'); state.sink->Append(padding.right_spaces, ' '); }); @@ -594,8 +605,7 @@ void FormatFPositiveExpSlow(uint128 v, int exp, const FormatState &state) { // digits. void FormatFNegativeExpSlow(uint128 v, int exp, const FormatState &state) { const size_t total_digits = - /* 0 */ 1 + - (state.ShouldPrintDot() ? static_cast(state.precision) + 1 : 0); + /* 0 */ 1 + (state.ShouldPrintDot() ? state.precision + 1 : 0); auto padding = ExtraWidthToPadding(total_digits + (state.sign_char ? 1 : 0), state); padding.zeros += 1; @@ -606,7 +616,7 @@ void FormatFNegativeExpSlow(uint128 v, int exp, const FormatState &state) { if (state.ShouldPrintDot()) state.sink->Append(1, '.'); // Print digits - int digits_to_go = state.precision; + size_t digits_to_go = state.precision; FractionalDigitGenerator::RunConversion( v, exp, [&](FractionalDigitGenerator digit_gen) { @@ -666,7 +676,8 @@ void FormatFNegativeExpSlow(uint128 v, int exp, const FormatState &state) { template void FormatF(Int mantissa, int exp, const FormatState &state) { if (exp >= 0) { - const int total_bits = sizeof(Int) * 8 - LeadingZeros(mantissa) + exp; + const int total_bits = + static_cast(sizeof(Int) * 8) - LeadingZeros(mantissa) + exp; // Fallback to the slow stack-based approach if we can't do it in a 64 or // 128 bit state. @@ -686,9 +697,9 @@ void FormatF(Int mantissa, int exp, const FormatState &state) { // Grab the group of four bits (nibble) from `n`. E.g., nibble 1 corresponds to // bits 4-7. template -uint8_t GetNibble(Int n, int nibble_index) { +uint8_t GetNibble(Int n, size_t nibble_index) { constexpr Int mask_low_nibble = Int{0xf}; - int shift = nibble_index * 4; + int shift = static_cast(nibble_index * 4); n &= mask_low_nibble << shift; return static_cast((n >> shift) & 0xf); } @@ -696,9 +707,9 @@ uint8_t GetNibble(Int n, int nibble_index) { // Add one to the given nibble, applying carry to higher nibbles. Returns true // if overflow, false otherwise. template -bool IncrementNibble(int nibble_index, Int *n) { - constexpr int kShift = sizeof(Int) * 8 - 1; - constexpr int kNumNibbles = sizeof(Int) * 8 / 4; +bool IncrementNibble(size_t nibble_index, Int* n) { + constexpr size_t kShift = sizeof(Int) * 8 - 1; + constexpr size_t kNumNibbles = sizeof(Int) * 8 / 4; Int before = *n >> kShift; // Here we essentially want to take the number 1 and move it into the requsted // nibble, then add it to *n to effectively increment the nibble. However, @@ -706,28 +717,32 @@ bool IncrementNibble(int nibble_index, Int *n) { // i.e., if the nibble_index is out of range. So therefore we check for this // and if we are out of range we just add 0 which leaves *n unchanged, which // seems like the reasonable thing to do in that case. - *n += ((nibble_index >= kNumNibbles) ? 0 : (Int{1} << (nibble_index * 4))); + *n += ((nibble_index >= kNumNibbles) + ? 0 + : (Int{1} << static_cast(nibble_index * 4))); Int after = *n >> kShift; return (before && !after) || (nibble_index >= kNumNibbles); } // Return a mask with 1's in the given nibble and all lower nibbles. template -Int MaskUpToNibbleInclusive(int nibble_index) { - constexpr int kNumNibbles = sizeof(Int) * 8 / 4; +Int MaskUpToNibbleInclusive(size_t nibble_index) { + constexpr size_t kNumNibbles = sizeof(Int) * 8 / 4; static const Int ones = ~Int{0}; - return ones >> std::max(0, 4 * (kNumNibbles - nibble_index - 1)); + ++nibble_index; + return ones >> static_cast( + 4 * (std::max(kNumNibbles, nibble_index) - nibble_index)); } // Return a mask with 1's below the given nibble. template -Int MaskUpToNibbleExclusive(int nibble_index) { - return nibble_index <= 0 ? 0 : MaskUpToNibbleInclusive(nibble_index - 1); +Int MaskUpToNibbleExclusive(size_t nibble_index) { + return nibble_index == 0 ? 0 : MaskUpToNibbleInclusive(nibble_index - 1); } template -Int MoveToNibble(uint8_t nibble, int nibble_index) { - return Int{nibble} << (4 * nibble_index); +Int MoveToNibble(uint8_t nibble, size_t nibble_index) { + return Int{nibble} << static_cast(4 * nibble_index); } // Given mantissa size, find optimal # of mantissa bits to put in initial digit. @@ -744,10 +759,10 @@ Int MoveToNibble(uint8_t nibble, int nibble_index) { // a multiple of four. Once again, the goal is to have all fractional digits // represent real precision. template -constexpr int HexFloatLeadingDigitSizeInBits() { +constexpr size_t HexFloatLeadingDigitSizeInBits() { return std::numeric_limits::digits % 4 > 0 - ? std::numeric_limits::digits % 4 - : 4; + ? static_cast(std::numeric_limits::digits % 4) + : size_t{4}; } // This function captures the rounding behavior of glibc for hex float @@ -757,16 +772,17 @@ constexpr int HexFloatLeadingDigitSizeInBits() { // point that is not followed by 800000..., it disregards the parity and rounds // up if > 8 and rounds down if < 8. template -bool HexFloatNeedsRoundUp(Int mantissa, int final_nibble_displayed, +bool HexFloatNeedsRoundUp(Int mantissa, + size_t final_nibble_displayed, uint8_t leading) { // If the last nibble (hex digit) to be displayed is the lowest on in the // mantissa then that means that we don't have any further nibbles to inform // rounding, so don't round. - if (final_nibble_displayed <= 0) { + if (final_nibble_displayed == 0) { return false; } - int rounding_nibble_idx = final_nibble_displayed - 1; - constexpr int kTotalNibbles = sizeof(Int) * 8 / 4; + size_t rounding_nibble_idx = final_nibble_displayed - 1; + constexpr size_t kTotalNibbles = sizeof(Int) * 8 / 4; assert(final_nibble_displayed <= kTotalNibbles); Int mantissa_up_to_rounding_nibble_inclusive = mantissa & MaskUpToNibbleInclusive(rounding_nibble_idx); @@ -793,7 +809,7 @@ struct HexFloatTypeParams { } int min_exponent; - int leading_digit_size_bits; + size_t leading_digit_size_bits; }; // Hex Float Rounding. First check if we need to round; if so, then we do that @@ -803,10 +819,12 @@ struct HexFloatTypeParams { template void FormatARound(bool precision_specified, const FormatState &state, uint8_t *leading, Int *mantissa, int *exp) { - constexpr int kTotalNibbles = sizeof(Int) * 8 / 4; + constexpr size_t kTotalNibbles = sizeof(Int) * 8 / 4; // Index of the last nibble that we could display given precision. - int final_nibble_displayed = - precision_specified ? std::max(0, (kTotalNibbles - state.precision)) : 0; + size_t final_nibble_displayed = + precision_specified + ? (std::max(kTotalNibbles, state.precision) - state.precision) + : 0; if (HexFloatNeedsRoundUp(*mantissa, final_nibble_displayed, *leading)) { // Need to round up. bool overflow = IncrementNibble(final_nibble_displayed, mantissa); @@ -830,9 +848,9 @@ void FormatARound(bool precision_specified, const FormatState &state, template void FormatANormalize(const HexFloatTypeParams float_traits, uint8_t *leading, Int *mantissa, int *exp) { - constexpr int kIntBits = sizeof(Int) * 8; + constexpr size_t kIntBits = sizeof(Int) * 8; static const Int kHighIntBit = Int{1} << (kIntBits - 1); - const int kLeadDigitBitsCount = float_traits.leading_digit_size_bits; + const size_t kLeadDigitBitsCount = float_traits.leading_digit_size_bits; // Normalize mantissa so that highest bit set is in MSB position, unless we // get interrupted by the exponent threshold. while (*mantissa && !(*mantissa & kHighIntBit)) { @@ -846,18 +864,18 @@ void FormatANormalize(const HexFloatTypeParams float_traits, uint8_t *leading, } // Extract bits for leading digit then shift them away leaving the // fractional part. - *leading = - static_cast(*mantissa >> (kIntBits - kLeadDigitBitsCount)); - *exp -= (*mantissa != 0) ? kLeadDigitBitsCount : *exp; - *mantissa <<= kLeadDigitBitsCount; + *leading = static_cast( + *mantissa >> static_cast(kIntBits - kLeadDigitBitsCount)); + *exp -= (*mantissa != 0) ? static_cast(kLeadDigitBitsCount) : *exp; + *mantissa <<= static_cast(kLeadDigitBitsCount); } template void FormatA(const HexFloatTypeParams float_traits, Int mantissa, int exp, bool uppercase, const FormatState &state) { // Int properties. - constexpr int kIntBits = sizeof(Int) * 8; - constexpr int kTotalNibbles = sizeof(Int) * 8 / 4; + constexpr size_t kIntBits = sizeof(Int) * 8; + constexpr size_t kTotalNibbles = sizeof(Int) * 8 / 4; // Did the user specify a precision explicitly? const bool precision_specified = state.conv.precision() >= 0; @@ -903,16 +921,19 @@ void FormatA(const HexFloatTypeParams float_traits, Int mantissa, int exp, } // ============ Fractional Digits ============ - int digits_emitted = 0; + size_t digits_emitted = 0; while (mantissa > 0) { *digits_iter++ = digits[GetNibble(mantissa, kTotalNibbles - 1)]; mantissa <<= 4; ++digits_emitted; } - int trailing_zeros = - precision_specified ? state.precision - digits_emitted : 0; - assert(trailing_zeros >= 0); - auto digits_result = string_view(digits_buffer, digits_iter - digits_buffer); + size_t trailing_zeros = 0; + if (precision_specified) { + assert(state.precision >= digits_emitted); + trailing_zeros = state.precision - digits_emitted; + } + auto digits_result = string_view( + digits_buffer, static_cast(digits_iter - digits_buffer)); // =============== Exponent ================== constexpr size_t kBufSizeForExpDecRepr = @@ -925,11 +946,11 @@ void FormatA(const HexFloatTypeParams float_traits, Int mantissa, int exp, numbers_internal::FastIntToBuffer(exp < 0 ? -exp : exp, exp_buffer + 2); // ============ Assemble Result ============== - FinalPrint(state, // - digits_result, // 0xN.NNN... - 2, // offset in `data` to start padding if needed. - trailing_zeros, // num remaining mantissa padding zeros - exp_buffer); // exponent + FinalPrint(state, + digits_result, // 0xN.NNN... + 2, // offset of any padding + static_cast(trailing_zeros), // remaining mantissa padding + exp_buffer); // exponent } char *CopyStringTo(absl::string_view v, char *out) { @@ -961,10 +982,10 @@ bool FallbackToSnprintf(const Float v, const FormatConversionSpecImpl &conv, int n = snprintf(&space[0], space.size(), fmt, w, p, v); if (n < 0) return false; if (static_cast(n) < space.size()) { - result = absl::string_view(space.data(), n); + result = absl::string_view(space.data(), static_cast(n)); break; } - space.resize(n + 1); + space.resize(static_cast(n) + 1); } sink->Append(result); return true; @@ -972,13 +993,13 @@ bool FallbackToSnprintf(const Float v, const FormatConversionSpecImpl &conv, // 128-bits in decimal: ceil(128*log(2)/log(10)) // or std::numeric_limits<__uint128_t>::digits10 -constexpr int kMaxFixedPrecision = 39; +constexpr size_t kMaxFixedPrecision = 39; -constexpr int kBufferLength = /*sign*/ 1 + - /*integer*/ kMaxFixedPrecision + - /*point*/ 1 + - /*fraction*/ kMaxFixedPrecision + - /*exponent e+123*/ 5; +constexpr size_t kBufferLength = /*sign*/ 1 + + /*integer*/ kMaxFixedPrecision + + /*point*/ 1 + + /*fraction*/ kMaxFixedPrecision + + /*exponent e+123*/ 5; struct Buffer { void push_front(char c) { @@ -1001,7 +1022,7 @@ struct Buffer { char last_digit() const { return end[-1] == '.' ? end[-2] : end[-1]; } - int size() const { return static_cast(end - begin); } + size_t size() const { return static_cast(end - begin); } char data[kBufferLength]; char *begin; @@ -1030,8 +1051,9 @@ bool ConvertNonNumericFloats(char sign_char, Float v, return false; } - return sink->PutPaddedString(string_view(text, ptr - text), conv.width(), -1, - conv.has_left_flag()); + return sink->PutPaddedString( + string_view(text, static_cast(ptr - text)), conv.width(), -1, + conv.has_left_flag()); } // Round up the last digit of the value. @@ -1068,12 +1090,12 @@ void PrintExponent(int exp, char e, Buffer *out) { } // Exponent digits. if (exp > 99) { - out->push_back(exp / 100 + '0'); - out->push_back(exp / 10 % 10 + '0'); - out->push_back(exp % 10 + '0'); + out->push_back(static_cast(exp / 100 + '0')); + out->push_back(static_cast(exp / 10 % 10 + '0')); + out->push_back(static_cast(exp % 10 + '0')); } else { - out->push_back(exp / 10 + '0'); - out->push_back(exp % 10 + '0'); + out->push_back(static_cast(exp / 10 + '0')); + out->push_back(static_cast(exp % 10 + '0')); } } @@ -1115,8 +1137,8 @@ Decomposed Decompose(Float v) { // In Fixed mode, we add a '.' at the end. // In Precision mode, we add a '.' after the first digit. template -int PrintIntegralDigits(Int digits, Buffer *out) { - int printed = 0; +size_t PrintIntegralDigits(Int digits, Buffer* out) { + size_t printed = 0; if (digits) { for (; digits; digits /= 10) out->push_front(digits % 10 + '0'); printed = out->size(); @@ -1135,10 +1157,10 @@ int PrintIntegralDigits(Int digits, Buffer *out) { } // Back out 'extra_digits' digits and round up if necessary. -bool RemoveExtraPrecision(int extra_digits, bool has_leftover_value, - Buffer *out, int *exp_out) { - if (extra_digits <= 0) return false; - +void RemoveExtraPrecision(size_t extra_digits, + bool has_leftover_value, + Buffer* out, + int* exp_out) { // Back out the extra digits out->end -= extra_digits; @@ -1158,15 +1180,17 @@ bool RemoveExtraPrecision(int extra_digits, bool has_leftover_value, if (needs_to_round_up) { RoundUp(out, exp_out); } - return true; } // Print the value into the buffer. // This will not include the exponent, which will be returned in 'exp_out' for // Precision mode. template -bool FloatToBufferImpl(Int int_mantissa, int exp, int precision, Buffer *out, - int *exp_out) { +bool FloatToBufferImpl(Int int_mantissa, + int exp, + size_t precision, + Buffer* out, + int* exp_out) { assert((CanFitMantissa())); const int int_bits = std::numeric_limits::digits; @@ -1182,14 +1206,16 @@ bool FloatToBufferImpl(Int int_mantissa, int exp, int precision, Buffer *out, // The value will overflow the Int return false; } - int digits_printed = PrintIntegralDigits(int_mantissa << exp, out); - int digits_to_zero_pad = precision; + size_t digits_printed = PrintIntegralDigits(int_mantissa << exp, out); + size_t digits_to_zero_pad = precision; if (mode == FormatStyle::Precision) { - *exp_out = digits_printed - 1; - digits_to_zero_pad -= digits_printed - 1; - if (RemoveExtraPrecision(-digits_to_zero_pad, false, out, exp_out)) { + *exp_out = static_cast(digits_printed - 1); + if (digits_to_zero_pad < digits_printed - 1) { + RemoveExtraPrecision(digits_printed - 1 - digits_to_zero_pad, false, + out, exp_out); return true; } + digits_to_zero_pad -= digits_printed - 1; } for (; digits_to_zero_pad-- > 0;) out->push_back('0'); return true; @@ -1203,10 +1229,10 @@ bool FloatToBufferImpl(Int int_mantissa, int exp, int precision, Buffer *out, const Int mask = (Int{1} << exp) - 1; // Print the integral part first. - int digits_printed = PrintIntegralDigits(int_mantissa >> exp, out); + size_t digits_printed = PrintIntegralDigits(int_mantissa >> exp, out); int_mantissa &= mask; - int fractional_count = precision; + size_t fractional_count = precision; if (mode == FormatStyle::Precision) { if (digits_printed == 0) { // Find the first non-zero digit, when in Precision mode. @@ -1222,20 +1248,21 @@ bool FloatToBufferImpl(Int int_mantissa, int exp, int precision, Buffer *out, int_mantissa &= mask; } else { // We already have a digit, and a '.' - *exp_out = digits_printed - 1; - fractional_count -= *exp_out; - if (RemoveExtraPrecision(-fractional_count, int_mantissa != 0, out, - exp_out)) { + *exp_out = static_cast(digits_printed - 1); + if (fractional_count < digits_printed - 1) { // If we had enough digits, return right away. // The code below will try to round again otherwise. + RemoveExtraPrecision(digits_printed - 1 - fractional_count, + int_mantissa != 0, out, exp_out); return true; } + fractional_count -= digits_printed - 1; } } auto get_next_digit = [&] { int_mantissa *= 10; - int digit = static_cast(int_mantissa >> exp); + char digit = static_cast(int_mantissa >> exp); int_mantissa &= mask; return digit; }; @@ -1245,7 +1272,7 @@ bool FloatToBufferImpl(Int int_mantissa, int exp, int precision, Buffer *out, out->push_back(get_next_digit() + '0'); } - int next_digit = get_next_digit(); + char next_digit = get_next_digit(); if (next_digit > 5 || (next_digit == 5 && (int_mantissa || out->last_digit() % 2 == 1))) { RoundUp(out, exp_out); @@ -1255,24 +1282,25 @@ bool FloatToBufferImpl(Int int_mantissa, int exp, int precision, Buffer *out, } template -bool FloatToBuffer(Decomposed decomposed, int precision, Buffer *out, - int *exp) { +bool FloatToBuffer(Decomposed decomposed, + size_t precision, + Buffer* out, + int* exp) { if (precision > kMaxFixedPrecision) return false; // Try with uint64_t. if (CanFitMantissa() && FloatToBufferImpl( - static_cast(decomposed.mantissa), - static_cast(decomposed.exponent), precision, out, exp)) + static_cast(decomposed.mantissa), decomposed.exponent, + precision, out, exp)) return true; #if defined(ABSL_HAVE_INTRINSIC_INT128) // If that is not enough, try with __uint128_t. return CanFitMantissa() && FloatToBufferImpl<__uint128_t, Float, mode>( - static_cast<__uint128_t>(decomposed.mantissa), - static_cast<__uint128_t>(decomposed.exponent), precision, out, - exp); + static_cast<__uint128_t>(decomposed.mantissa), decomposed.exponent, + precision, out, exp); #endif return false; } @@ -1280,12 +1308,15 @@ bool FloatToBuffer(Decomposed decomposed, int precision, Buffer *out, void WriteBufferToSink(char sign_char, absl::string_view str, const FormatConversionSpecImpl &conv, FormatSinkImpl *sink) { - int left_spaces = 0, zeros = 0, right_spaces = 0; - int missing_chars = - conv.width() >= 0 ? std::max(conv.width() - static_cast(str.size()) - - static_cast(sign_char != 0), - 0) - : 0; + size_t left_spaces = 0, zeros = 0, right_spaces = 0; + size_t missing_chars = 0; + if (conv.width() >= 0) { + const size_t conv_width_size_t = static_cast(conv.width()); + const size_t existing_chars = + str.size() + static_cast(sign_char != 0); + if (conv_width_size_t > existing_chars) + missing_chars = conv_width_size_t - existing_chars; + } if (conv.has_left_flag()) { right_spaces = missing_chars; } else if (conv.has_zero_flag()) { @@ -1321,7 +1352,8 @@ bool FloatToSink(const Float v, const FormatConversionSpecImpl &conv, return true; } - int precision = conv.precision() < 0 ? 6 : conv.precision(); + size_t precision = + conv.precision() < 0 ? 6 : static_cast(conv.precision()); int exp = 0; @@ -1348,12 +1380,12 @@ bool FloatToSink(const Float v, const FormatConversionSpecImpl &conv, &buffer); } else if (c == FormatConversionCharInternal::g || c == FormatConversionCharInternal::G) { - precision = std::max(0, precision - 1); + precision = std::max(precision, size_t{1}) - 1; if (!FloatToBuffer(decomposed, precision, &buffer, &exp)) { return FallbackToSnprintf(v, conv, sink); } - if (precision + 1 > exp && exp >= -4) { + if ((exp < 0 || precision + 1 > static_cast(exp)) && exp >= -4) { if (exp < 0) { // Have 1.23456, needs 0.00123456 // Move the first digit @@ -1388,9 +1420,11 @@ bool FloatToSink(const Float v, const FormatConversionSpecImpl &conv, return false; } - WriteBufferToSink(sign_char, - absl::string_view(buffer.begin, buffer.end - buffer.begin), - conv, sink); + WriteBufferToSink( + sign_char, + absl::string_view(buffer.begin, + static_cast(buffer.end - buffer.begin)), + conv, sink); return true; } diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/parser.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/parser.cc index 2c9c07dacc..13731ee247 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/parser.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/parser.cc @@ -56,7 +56,7 @@ ABSL_CONST_INIT const ConvTag kTags[256] = { CC::X, {}, {}, {}, {}, {}, {}, {}, // XYZ[\]^_ {}, CC::a, {}, CC::c, CC::d, CC::e, CC::f, CC::g, // `abcdefg LM::h, CC::i, LM::j, {}, LM::l, {}, CC::n, CC::o, // hijklmno - CC::p, LM::q, {}, CC::s, LM::t, CC::u, {}, {}, // pqrstuvw + CC::p, LM::q, {}, CC::s, LM::t, CC::u, CC::v, {}, // pqrstuvw CC::x, {}, LM::z, {}, {}, {}, {}, {}, // xyz{|}! {}, {}, {}, {}, {}, {}, {}, {}, // 80-87 {}, {}, {}, {}, {}, {}, {}, {}, // 88-8f @@ -202,6 +202,8 @@ const char *ConsumeConversion(const char *pos, const char *const end, auto tag = GetTagForChar(c); + if (ABSL_PREDICT_FALSE(c == 'v' && (pos - original_pos) != 1)) return nullptr; + if (ABSL_PREDICT_FALSE(!tag.is_conv())) { if (ABSL_PREDICT_FALSE(!tag.is_length())) return nullptr; @@ -219,6 +221,8 @@ const char *ConsumeConversion(const char *pos, const char *const end, conv->length_mod = length_mod; } tag = GetTagForChar(c); + + if (ABSL_PREDICT_FALSE(c == 'v')) return nullptr; if (ABSL_PREDICT_FALSE(!tag.is_conv())) return nullptr; } @@ -312,11 +316,11 @@ bool ParsedFormatBase::MatchesConversions( std::initializer_list convs) const { std::unordered_set used; auto add_if_valid_conv = [&](int pos, char c) { - if (static_cast(pos) > convs.size() || - !Contains(convs.begin()[pos - 1], c)) - return false; - used.insert(pos); - return true; + if (static_cast(pos) > convs.size() || + !Contains(convs.begin()[pos - 1], c)) + return false; + used.insert(pos); + return true; }; for (const ConversionItem &item : items_) { if (!item.is_conversion) continue; diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/parser.h b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/parser.h index 32b91d034d..a81bac8333 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/parser.h +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/parser.h @@ -155,10 +155,11 @@ bool ParseFormatString(string_view src, Consumer consumer) { static_cast(memchr(p, '%', static_cast(end - p))); if (!percent) { // We found the last substring. - return consumer.Append(string_view(p, end - p)); + return consumer.Append(string_view(p, static_cast(end - p))); } // We found a percent, so push the text run then process the percent. - if (ABSL_PREDICT_FALSE(!consumer.Append(string_view(p, percent - p)))) { + if (ABSL_PREDICT_FALSE(!consumer.Append( + string_view(p, static_cast(percent - p))))) { return false; } if (ABSL_PREDICT_FALSE(percent + 1 >= end)) return false; @@ -189,7 +190,8 @@ bool ParseFormatString(string_view src, Consumer consumer) { p = ConsumeUnboundConversion(percent + 1, end, &conv, &next_arg); if (ABSL_PREDICT_FALSE(p == nullptr)) return false; if (ABSL_PREDICT_FALSE(!consumer.ConvertOne( - conv, string_view(percent + 1, p - (percent + 1))))) { + conv, string_view(percent + 1, + static_cast(p - (percent + 1)))))) { return false; } } else { diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/parser_test.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/parser_test.cc index fe0d296360..c3e825fe02 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/parser_test.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_format/parser_test.cc @@ -110,10 +110,13 @@ TEST_F(ConsumeUnboundConversionTest, ConsumeSpecification) { {__LINE__, "ba", "", "ba"}, // 'b' is invalid {__LINE__, "l", "", "l" }, // just length mod isn't okay {__LINE__, "d", "d", "" }, // basic + {__LINE__, "v", "v", "" }, // basic {__LINE__, "d ", "d", " " }, // leave suffix {__LINE__, "dd", "d", "d" }, // don't be greedy {__LINE__, "d9", "d", "9" }, // leave non-space suffix {__LINE__, "dzz", "d", "zz"}, // length mod as suffix + {__LINE__, "3v", "", "3v"}, // 'v' cannot have modifiers + {__LINE__, "hv", "", "hv"}, // 'v' cannot have modifiers {__LINE__, "1$*2$d", "1$*2$d", "" }, // arg indexing and * allowed. {__LINE__, "0-14.3hhd", "0-14.3hhd", ""}, // precision, width {__LINE__, " 0-+#14.3hhd", " 0-+#14.3hhd", ""}, // flags diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_split_internal.h b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_split_internal.h index e766421617..35edf3aa43 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_split_internal.h +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/str_split_internal.h @@ -132,7 +132,8 @@ class SplitIterator { const absl::string_view text = splitter_->text(); const absl::string_view d = delimiter_.Find(text, pos_); if (d.data() == text.data() + text.size()) state_ = kLastState; - curr_ = text.substr(pos_, d.data() - (text.data() + pos_)); + curr_ = text.substr(pos_, + static_cast(d.data() - (text.data() + pos_))); pos_ += curr_.size() + d.size(); } while (!predicate_(curr_)); return *this; diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/string_constant.h b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/string_constant.h index b358efddbd..f68b17d75e 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/string_constant.h +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/string_constant.h @@ -50,8 +50,10 @@ struct StringConstant { "The input string_view must point to constant data."); }; +#ifdef ABSL_INTERNAL_NEED_REDUNDANT_CONSTEXPR_DECL template -constexpr absl::string_view StringConstant::value; // NOLINT +constexpr absl::string_view StringConstant::value; +#endif // Factory function for `StringConstant` instances. // It supports callables that have a constexpr default constructor and a diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/stringify_sink.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/stringify_sink.cc new file mode 100644 index 0000000000..7c6995abb1 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/stringify_sink.cc @@ -0,0 +1,28 @@ +// Copyright 2022 The Abseil Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "absl/strings/internal/stringify_sink.h" +namespace absl { +ABSL_NAMESPACE_BEGIN +namespace strings_internal { + +void StringifySink::Append(size_t count, char ch) { buffer_.append(count, ch); } + +void StringifySink::Append(string_view v) { + buffer_.append(v.data(), v.size()); +} + +} // namespace strings_internal +ABSL_NAMESPACE_END +} // namespace absl diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/internal/stringify_sink.h b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/stringify_sink.h new file mode 100644 index 0000000000..fc3747bb72 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/internal/stringify_sink.h @@ -0,0 +1,57 @@ +// Copyright 2022 The Abseil Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef ABSL_STRINGS_INTERNAL_STRINGIFY_SINK_H_ +#define ABSL_STRINGS_INTERNAL_STRINGIFY_SINK_H_ + +#include +#include +#include + +#include "absl/strings/string_view.h" + +namespace absl { +ABSL_NAMESPACE_BEGIN + +namespace strings_internal { +class StringifySink { + public: + void Append(size_t count, char ch); + + void Append(string_view v); + + // Support `absl::Format(&sink, format, args...)`. + friend void AbslFormatFlush(StringifySink* sink, absl::string_view v) { + sink->Append(v); + } + + private: + template + friend string_view ExtractStringification(StringifySink& sink, const T& v); + + std::string buffer_; +}; + +template +string_view ExtractStringification(StringifySink& sink, const T& v) { + AbslStringify(sink, v); + return sink.buffer_; +} + +} // namespace strings_internal + +ABSL_NAMESPACE_END +} // namespace absl + +#endif // ABSL_STRINGS_INTERNAL_STRINGIFY_SINK_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/numbers.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/numbers.cc index cbd84c918b..2987158e07 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/numbers.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/numbers.cc @@ -190,32 +190,32 @@ char* numbers_internal::FastIntToBuffer(uint32_t i, char* buffer) { if (i >= 1000) goto lt10_000; digits = i / 100; i -= digits * 100; - *buffer++ = '0' + digits; + *buffer++ = '0' + static_cast(digits); goto lt100; } if (i < 1000000) { // 1,000,000 if (i >= 100000) goto lt1_000_000; digits = i / 10000; // 10,000 i -= digits * 10000; - *buffer++ = '0' + digits; + *buffer++ = '0' + static_cast(digits); goto lt10_000; } if (i < 100000000) { // 100,000,000 if (i >= 10000000) goto lt100_000_000; digits = i / 1000000; // 1,000,000 i -= digits * 1000000; - *buffer++ = '0' + digits; + *buffer++ = '0' + static_cast(digits); goto lt1_000_000; } // we already know that i < 1,000,000,000 digits = i / 100000000; // 100,000,000 i -= digits * 100000000; - *buffer++ = '0' + digits; + *buffer++ = '0' + static_cast(digits); goto lt100_000_000; } char* numbers_internal::FastIntToBuffer(int32_t i, char* buffer) { - uint32_t u = i; + uint32_t u = static_cast(i); if (i < 0) { *buffer++ = '-'; // We need to do the negation in modular (i.e., "unsigned") @@ -268,7 +268,7 @@ char* numbers_internal::FastIntToBuffer(uint64_t i, char* buffer) { } char* numbers_internal::FastIntToBuffer(int64_t i, char* buffer) { - uint64_t u = i; + uint64_t u = static_cast(i); if (i < 0) { *buffer++ = '-'; u = 0 - u; @@ -329,7 +329,7 @@ static std::pair PowFive(uint64_t num, int expfive) { result = Mul32(result, 5 * 5 * 5 * 5 * 5 * 5 * 5 * 5 * 5 * 5 * 5 * 5 * 5); expfive -= 13; } - constexpr int powers_of_five[13] = { + constexpr uint32_t powers_of_five[13] = { 1, 5, 5 * 5, @@ -404,14 +404,14 @@ static ExpDigits SplitToSix(const double value) { // we multiply it by 65536 and see if the fractional part is close to 32768. // (The number doesn't have to be a power of two,but powers of two are faster) uint64_t d64k = d * 65536; - int dddddd; // A 6-digit decimal integer. + uint32_t dddddd; // A 6-digit decimal integer. if ((d64k % 65536) == 32767 || (d64k % 65536) == 32768) { // OK, it's fairly likely that precision was lost above, which is // not a surprise given only 52 mantissa bits are available. Therefore // redo the calculation using 128-bit numbers. (64 bits are not enough). // Start out with digits rounded down; maybe add one below. - dddddd = static_cast(d64k / 65536); + dddddd = static_cast(d64k / 65536); // mantissa is a 64-bit integer representing M.mmm... * 2^63. The actual // value we're representing, of course, is M.mmm... * 2^exp2. @@ -461,7 +461,7 @@ static ExpDigits SplitToSix(const double value) { } } else { // Here, we are not close to the edge. - dddddd = static_cast((d64k + 32768) / 65536); + dddddd = static_cast((d64k + 32768) / 65536); } if (dddddd == 1000000) { dddddd = 100000; @@ -469,7 +469,7 @@ static ExpDigits SplitToSix(const double value) { } exp_dig.exponent = exp; - int two_digits = dddddd / 10000; + uint32_t two_digits = dddddd / 10000; dddddd -= two_digits * 10000; numbers_internal::PutTwoDigits(two_digits, &exp_dig.digits[0]); @@ -499,7 +499,7 @@ size_t numbers_internal::SixDigitsToBuffer(double d, char* const buffer) { if (std::signbit(d)) *out++ = '-'; *out++ = '0'; *out = 0; - return out - buffer; + return static_cast(out - buffer); } if (d < 0) { *out++ = '-'; @@ -507,7 +507,7 @@ size_t numbers_internal::SixDigitsToBuffer(double d, char* const buffer) { } if (d > std::numeric_limits::max()) { strcpy(out, "inf"); // NOLINT(runtime/printf) - return out + 3 - buffer; + return static_cast(out + 3 - buffer); } auto exp_dig = SplitToSix(d); @@ -519,7 +519,7 @@ size_t numbers_internal::SixDigitsToBuffer(double d, char* const buffer) { case 5: memcpy(out, &digits[0], 6), out += 6; *out = 0; - return out - buffer; + return static_cast(out - buffer); case 4: memcpy(out, &digits[0], 5), out += 5; if (digits[5] != '0') { @@ -527,7 +527,7 @@ size_t numbers_internal::SixDigitsToBuffer(double d, char* const buffer) { *out++ = digits[5]; } *out = 0; - return out - buffer; + return static_cast(out - buffer); case 3: memcpy(out, &digits[0], 4), out += 4; if ((digits[5] | digits[4]) != '0') { @@ -536,7 +536,7 @@ size_t numbers_internal::SixDigitsToBuffer(double d, char* const buffer) { if (digits[5] != '0') *out++ = digits[5]; } *out = 0; - return out - buffer; + return static_cast(out - buffer); case 2: memcpy(out, &digits[0], 3), out += 3; *out++ = '.'; @@ -545,7 +545,7 @@ size_t numbers_internal::SixDigitsToBuffer(double d, char* const buffer) { while (out[-1] == '0') --out; if (out[-1] == '.') --out; *out = 0; - return out - buffer; + return static_cast(out - buffer); case 1: memcpy(out, &digits[0], 2), out += 2; *out++ = '.'; @@ -554,7 +554,7 @@ size_t numbers_internal::SixDigitsToBuffer(double d, char* const buffer) { while (out[-1] == '0') --out; if (out[-1] == '.') --out; *out = 0; - return out - buffer; + return static_cast(out - buffer); case 0: memcpy(out, &digits[0], 1), out += 1; *out++ = '.'; @@ -563,7 +563,7 @@ size_t numbers_internal::SixDigitsToBuffer(double d, char* const buffer) { while (out[-1] == '0') --out; if (out[-1] == '.') --out; *out = 0; - return out - buffer; + return static_cast(out - buffer); case -4: out[2] = '0'; ++out; @@ -582,7 +582,7 @@ size_t numbers_internal::SixDigitsToBuffer(double d, char* const buffer) { out += 6; while (out[-1] == '0') --out; *out = 0; - return out - buffer; + return static_cast(out - buffer); } assert(exp < -4 || exp >= 6); out[0] = digits[0]; @@ -601,12 +601,12 @@ size_t numbers_internal::SixDigitsToBuffer(double d, char* const buffer) { if (exp > 99) { int dig1 = exp / 100; exp -= dig1 * 100; - *out++ = '0' + dig1; + *out++ = '0' + static_cast(dig1); } - PutTwoDigits(exp, out); + PutTwoDigits(static_cast(exp), out); out += 2; *out = 0; - return out - buffer; + return static_cast(out - buffer); } namespace { @@ -642,10 +642,12 @@ inline bool safe_parse_sign_and_base(absl::string_view* text /*inout*/, int base = *base_ptr; // Consume whitespace. - while (start < end && absl::ascii_isspace(start[0])) { + while (start < end && + absl::ascii_isspace(static_cast(start[0]))) { ++start; } - while (start < end && absl::ascii_isspace(end[-1])) { + while (start < end && + absl::ascii_isspace(static_cast(end[-1]))) { --end; } if (start >= end) { @@ -694,7 +696,7 @@ inline bool safe_parse_sign_and_base(absl::string_view* text /*inout*/, } else { return false; } - *text = absl::string_view(start, end - start); + *text = absl::string_view(start, static_cast(end - start)); *base_ptr = base; return true; } @@ -757,8 +759,8 @@ struct LookupTables { // // uint128& operator/=(uint128) is not constexpr, so hardcode the resulting // array to avoid a static initializer. -template<> -const uint128 LookupTables::kVmaxOverBase[] = { +template <> +ABSL_CONST_INIT const uint128 LookupTables::kVmaxOverBase[] = { 0, 0, MakeUint128(9223372036854775807u, 18446744073709551615u), @@ -809,8 +811,8 @@ const uint128 LookupTables::kVmaxOverBase[] = { // // int128& operator/=(int128) is not constexpr, so hardcode the resulting array // to avoid a static initializer. -template<> -const int128 LookupTables::kVmaxOverBase[] = { +template <> +ABSL_CONST_INIT const int128 LookupTables::kVmaxOverBase[] = { 0, 0, MakeInt128(4611686018427387903, 18446744073709551615u), @@ -862,8 +864,8 @@ const int128 LookupTables::kVmaxOverBase[] = { // // int128& operator/=(int128) is not constexpr, so hardcode the resulting array // to avoid a static initializer. -template<> -const int128 LookupTables::kVminOverBase[] = { +template <> +ABSL_CONST_INIT const int128 LookupTables::kVminOverBase[] = { 0, 0, MakeInt128(-4611686018427387904, 0u), @@ -904,11 +906,11 @@ const int128 LookupTables::kVminOverBase[] = { }; template -const IntType LookupTables::kVmaxOverBase[] = +ABSL_CONST_INIT const IntType LookupTables::kVmaxOverBase[] = X_OVER_BASE_INITIALIZER(std::numeric_limits::max()); template -const IntType LookupTables::kVminOverBase[] = +ABSL_CONST_INIT const IntType LookupTables::kVminOverBase[] = X_OVER_BASE_INITIALIZER(std::numeric_limits::min()); #undef X_OVER_BASE_INITIALIZER @@ -920,17 +922,18 @@ inline bool safe_parse_positive_int(absl::string_view text, int base, const IntType vmax = std::numeric_limits::max(); assert(vmax > 0); assert(base >= 0); - assert(vmax >= static_cast(base)); + const IntType base_inttype = static_cast(base); + assert(vmax >= base_inttype); const IntType vmax_over_base = LookupTables::kVmaxOverBase[base]; assert(base < 2 || - std::numeric_limits::max() / base == vmax_over_base); + std::numeric_limits::max() / base_inttype == vmax_over_base); const char* start = text.data(); const char* end = start + text.size(); // loop over digits for (; start < end; ++start) { unsigned char c = static_cast(start[0]); - int digit = kAsciiToInt[c]; - if (digit >= base) { + IntType digit = static_cast(kAsciiToInt[c]); + if (digit >= base_inttype) { *value_p = value; return false; } @@ -938,7 +941,7 @@ inline bool safe_parse_positive_int(absl::string_view text, int base, *value_p = vmax; return false; } - value *= base; + value *= base_inttype; if (value > vmax - digit) { *value_p = vmax; return false; diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/numbers.h b/TMessagesProj/jni/voip/webrtc/absl/strings/numbers.h index 3ed2466928..86c84ed39b 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/numbers.h +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/numbers.h @@ -23,8 +23,12 @@ #ifndef ABSL_STRINGS_NUMBERS_H_ #define ABSL_STRINGS_NUMBERS_H_ -#ifdef __SSE4_2__ -#include +#ifdef __SSSE3__ +#include +#endif + +#ifdef _MSC_VER +#include #endif #include @@ -36,14 +40,7 @@ #include #include "absl/base/config.h" -#ifdef __SSE4_2__ -// TODO(jorg): Remove this when we figure out the right way -// to swap bytes on SSE 4.2 that works with the compilers -// we claim to support. Also, add tests for the compiler -// that doesn't support the Intel _bswap64 intrinsic but -// does support all the SSE 4.2 intrinsics #include "absl/base/internal/endian.h" -#endif #include "absl/base/macros.h" #include "absl/base/port.h" #include "absl/numeric/bits.h" @@ -246,7 +243,7 @@ ABSL_MUST_USE_RESULT bool safe_strtoi_base(absl::string_view s, int_type* out, // Returns the number of non-pad digits of the output (it can never be zero // since 0 has one digit). inline size_t FastHexToBufferZeroPad16(uint64_t val, char* out) { -#ifdef __SSE4_2__ +#ifdef ABSL_INTERNAL_HAVE_SSSE3 uint64_t be = absl::big_endian::FromHost64(val); const auto kNibbleMask = _mm_set1_epi8(0xf); const auto kHexDigits = _mm_setr_epi8('0', '1', '2', '3', '4', '5', '6', '7', diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/numbers_test.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/numbers_test.cc index 498c210d3b..b3c098d1a8 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/numbers_test.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/numbers_test.cc @@ -19,6 +19,7 @@ #include #include // NOLINT(build/c++11) +#include #include #include #include @@ -388,9 +389,209 @@ TEST(NumbersTest, Atoi) { } TEST(NumbersTest, Atod) { + // DBL_TRUE_MIN and FLT_TRUE_MIN were not mandated in before C++17. +#if !defined(DBL_TRUE_MIN) + static constexpr double DBL_TRUE_MIN = + 4.940656458412465441765687928682213723650598026143247644255856825e-324; +#endif +#if !defined(FLT_TRUE_MIN) + static constexpr float FLT_TRUE_MIN = + 1.401298464324817070923729583289916131280261941876515771757068284e-45f; +#endif + double d; - EXPECT_TRUE(absl::SimpleAtod("nan", &d)); + float f; + + // NaN can be spelled in multiple ways. + EXPECT_TRUE(absl::SimpleAtod("NaN", &d)); + EXPECT_TRUE(std::isnan(d)); + EXPECT_TRUE(absl::SimpleAtod("nAN", &d)); EXPECT_TRUE(std::isnan(d)); + EXPECT_TRUE(absl::SimpleAtod("-nan", &d)); + EXPECT_TRUE(std::isnan(d)); + + // Likewise for Infinity. + EXPECT_TRUE(absl::SimpleAtod("inf", &d)); + EXPECT_TRUE(std::isinf(d) && (d > 0)); + EXPECT_TRUE(absl::SimpleAtod("+Infinity", &d)); + EXPECT_TRUE(std::isinf(d) && (d > 0)); + EXPECT_TRUE(absl::SimpleAtod("-INF", &d)); + EXPECT_TRUE(std::isinf(d) && (d < 0)); + + // Parse DBL_MAX. Parsing something more than twice as big should also + // produce infinity. + EXPECT_TRUE(absl::SimpleAtod("1.7976931348623157e+308", &d)); + EXPECT_EQ(d, 1.7976931348623157e+308); + EXPECT_TRUE(absl::SimpleAtod("5e308", &d)); + EXPECT_TRUE(std::isinf(d) && (d > 0)); + // Ditto, but for FLT_MAX. + EXPECT_TRUE(absl::SimpleAtof("3.4028234663852886e+38", &f)); + EXPECT_EQ(f, 3.4028234663852886e+38f); + EXPECT_TRUE(absl::SimpleAtof("7e38", &f)); + EXPECT_TRUE(std::isinf(f) && (f > 0)); + + // Parse the largest N such that parsing 1eN produces a finite value and the + // smallest M = N + 1 such that parsing 1eM produces infinity. + // + // The 309 exponent (and 39) confirms the "definition of + // kEiselLemireMaxExclExp10" comment in charconv.cc. + EXPECT_TRUE(absl::SimpleAtod("1e308", &d)); + EXPECT_EQ(d, 1e308); + EXPECT_FALSE(std::isinf(d)); + EXPECT_TRUE(absl::SimpleAtod("1e309", &d)); + EXPECT_TRUE(std::isinf(d)); + // Ditto, but for Atof instead of Atod. + EXPECT_TRUE(absl::SimpleAtof("1e38", &f)); + EXPECT_EQ(f, 1e38f); + EXPECT_FALSE(std::isinf(f)); + EXPECT_TRUE(absl::SimpleAtof("1e39", &f)); + EXPECT_TRUE(std::isinf(f)); + + // Parse the largest N such that parsing 9.999999999999999999eN, with 19 + // nines, produces a finite value. + // + // 9999999999999999999, with 19 nines but no decimal point, is the largest + // "repeated nines" integer that fits in a uint64_t. + EXPECT_TRUE(absl::SimpleAtod("9.999999999999999999e307", &d)); + EXPECT_EQ(d, 9.999999999999999999e307); + EXPECT_FALSE(std::isinf(d)); + EXPECT_TRUE(absl::SimpleAtod("9.999999999999999999e308", &d)); + EXPECT_TRUE(std::isinf(d)); + // Ditto, but for Atof instead of Atod. + EXPECT_TRUE(absl::SimpleAtof("9.999999999999999999e37", &f)); + EXPECT_EQ(f, 9.999999999999999999e37f); + EXPECT_FALSE(std::isinf(f)); + EXPECT_TRUE(absl::SimpleAtof("9.999999999999999999e38", &f)); + EXPECT_TRUE(std::isinf(f)); + + // Parse DBL_MIN (normal), DBL_TRUE_MIN (subnormal) and (DBL_TRUE_MIN / 10) + // (effectively zero). + EXPECT_TRUE(absl::SimpleAtod("2.2250738585072014e-308", &d)); + EXPECT_EQ(d, 2.2250738585072014e-308); + EXPECT_TRUE(absl::SimpleAtod("4.9406564584124654e-324", &d)); + EXPECT_EQ(d, 4.9406564584124654e-324); + EXPECT_TRUE(absl::SimpleAtod("4.9406564584124654e-325", &d)); + EXPECT_EQ(d, 0); + // Ditto, but for FLT_MIN, FLT_TRUE_MIN and (FLT_TRUE_MIN / 10). + EXPECT_TRUE(absl::SimpleAtof("1.1754943508222875e-38", &f)); + EXPECT_EQ(f, 1.1754943508222875e-38f); + EXPECT_TRUE(absl::SimpleAtof("1.4012984643248171e-45", &f)); + EXPECT_EQ(f, 1.4012984643248171e-45f); + EXPECT_TRUE(absl::SimpleAtof("1.4012984643248171e-46", &f)); + EXPECT_EQ(f, 0); + + // Parse the largest N (the most negative -N) such that parsing 1e-N produces + // a normal or subnormal (but still positive) or zero value. + EXPECT_TRUE(absl::SimpleAtod("1e-307", &d)); + EXPECT_EQ(d, 1e-307); + EXPECT_GE(d, DBL_MIN); + EXPECT_LT(d, DBL_MIN * 10); + EXPECT_TRUE(absl::SimpleAtod("1e-323", &d)); + EXPECT_EQ(d, 1e-323); + EXPECT_GE(d, DBL_TRUE_MIN); + EXPECT_LT(d, DBL_TRUE_MIN * 10); + EXPECT_TRUE(absl::SimpleAtod("1e-324", &d)); + EXPECT_EQ(d, 0); + // Ditto, but for Atof instead of Atod. + EXPECT_TRUE(absl::SimpleAtof("1e-37", &f)); + EXPECT_EQ(f, 1e-37f); + EXPECT_GE(f, FLT_MIN); + EXPECT_LT(f, FLT_MIN * 10); + EXPECT_TRUE(absl::SimpleAtof("1e-45", &f)); + EXPECT_EQ(f, 1e-45f); + EXPECT_GE(f, FLT_TRUE_MIN); + EXPECT_LT(f, FLT_TRUE_MIN * 10); + EXPECT_TRUE(absl::SimpleAtof("1e-46", &f)); + EXPECT_EQ(f, 0); + + // Parse the largest N (the most negative -N) such that parsing + // 9.999999999999999999e-N, with 19 nines, produces a normal or subnormal + // (but still positive) or zero value. + // + // 9999999999999999999, with 19 nines but no decimal point, is the largest + // "repeated nines" integer that fits in a uint64_t. + // + // The -324/-325 exponents (and -46/-47) confirms the "definition of + // kEiselLemireMinInclExp10" comment in charconv.cc. + EXPECT_TRUE(absl::SimpleAtod("9.999999999999999999e-308", &d)); + EXPECT_EQ(d, 9.999999999999999999e-308); + EXPECT_GE(d, DBL_MIN); + EXPECT_LT(d, DBL_MIN * 10); + EXPECT_TRUE(absl::SimpleAtod("9.999999999999999999e-324", &d)); + EXPECT_EQ(d, 9.999999999999999999e-324); + EXPECT_GE(d, DBL_TRUE_MIN); + EXPECT_LT(d, DBL_TRUE_MIN * 10); + EXPECT_TRUE(absl::SimpleAtod("9.999999999999999999e-325", &d)); + EXPECT_EQ(d, 0); + // Ditto, but for Atof instead of Atod. + EXPECT_TRUE(absl::SimpleAtof("9.999999999999999999e-38", &f)); + EXPECT_EQ(f, 9.999999999999999999e-38f); + EXPECT_GE(f, FLT_MIN); + EXPECT_LT(f, FLT_MIN * 10); + EXPECT_TRUE(absl::SimpleAtof("9.999999999999999999e-46", &f)); + EXPECT_EQ(f, 9.999999999999999999e-46f); + EXPECT_GE(f, FLT_TRUE_MIN); + EXPECT_LT(f, FLT_TRUE_MIN * 10); + EXPECT_TRUE(absl::SimpleAtof("9.999999999999999999e-47", &f)); + EXPECT_EQ(f, 0); + + // Leading and/or trailing whitespace is OK. + EXPECT_TRUE(absl::SimpleAtod(" \t\r\n 2.718", &d)); + EXPECT_EQ(d, 2.718); + EXPECT_TRUE(absl::SimpleAtod(" 3.141 ", &d)); + EXPECT_EQ(d, 3.141); + + // Leading or trailing not-whitespace is not OK. + EXPECT_FALSE(absl::SimpleAtod("n 0", &d)); + EXPECT_FALSE(absl::SimpleAtod("0n ", &d)); + + // Multiple leading 0s are OK. + EXPECT_TRUE(absl::SimpleAtod("000123", &d)); + EXPECT_EQ(d, 123); + EXPECT_TRUE(absl::SimpleAtod("000.456", &d)); + EXPECT_EQ(d, 0.456); + + // An absent leading 0 (for a fraction < 1) is OK. + EXPECT_TRUE(absl::SimpleAtod(".5", &d)); + EXPECT_EQ(d, 0.5); + EXPECT_TRUE(absl::SimpleAtod("-.707", &d)); + EXPECT_EQ(d, -0.707); + + // Unary + is OK. + EXPECT_TRUE(absl::SimpleAtod("+6.0221408e+23", &d)); + EXPECT_EQ(d, 6.0221408e+23); + + // Underscores are not OK. + EXPECT_FALSE(absl::SimpleAtod("123_456", &d)); + + // The decimal separator must be '.' and is never ','. + EXPECT_TRUE(absl::SimpleAtod("8.9", &d)); + EXPECT_FALSE(absl::SimpleAtod("8,9", &d)); + + // These examples are called out in the EiselLemire function's comments. + EXPECT_TRUE(absl::SimpleAtod("4503599627370497.5", &d)); + EXPECT_EQ(d, 4503599627370497.5); + EXPECT_TRUE(absl::SimpleAtod("1e+23", &d)); + EXPECT_EQ(d, 1e+23); + EXPECT_TRUE(absl::SimpleAtod("9223372036854775807", &d)); + EXPECT_EQ(d, 9223372036854775807); + // Ditto, but for Atof instead of Atod. + EXPECT_TRUE(absl::SimpleAtof("0.0625", &f)); + EXPECT_EQ(f, 0.0625f); + EXPECT_TRUE(absl::SimpleAtof("20040229.0", &f)); + EXPECT_EQ(f, 20040229.0f); + EXPECT_TRUE(absl::SimpleAtof("2147483647.0", &f)); + EXPECT_EQ(f, 2147483647.0f); + + // Some parsing algorithms don't always round correctly (but absl::SimpleAtod + // should). This test case comes from + // https://github.com/serde-rs/json/issues/707 + // + // See also atod_manual_test.cc for running many more test cases. + EXPECT_TRUE(absl::SimpleAtod("122.416294033786585", &d)); + EXPECT_EQ(d, 122.416294033786585); + EXPECT_TRUE(absl::SimpleAtof("122.416294033786585", &f)); + EXPECT_EQ(f, 122.416294033786585f); } TEST(NumbersTest, Prefixes) { diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/str_cat.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/str_cat.cc index f4a77493a4..e5cb6d84e8 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/str_cat.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/str_cat.cc @@ -17,12 +17,15 @@ #include #include +#include #include #include +#include #include "absl/strings/ascii.h" #include "absl/strings/internal/resize_uninitialized.h" #include "absl/strings/numbers.h" +#include "absl/strings/string_view.h" namespace absl { ABSL_NAMESPACE_BEGIN @@ -56,7 +59,7 @@ AlphaNum::AlphaNum(Dec dec) { *--writer = '0' + (value % 10); value /= 10; } - *--writer = '0' + value; + *--writer = '0' + static_cast(value); if (neg) *--writer = '-'; ptrdiff_t fillers = writer - minfill; @@ -73,7 +76,7 @@ AlphaNum::AlphaNum(Dec dec) { if (add_sign_again) *--writer = '-'; } - piece_ = absl::string_view(writer, end - writer); + piece_ = absl::string_view(writer, static_cast(end - writer)); } // ---------------------------------------------------------------------- diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/str_cat.h b/TMessagesProj/jni/voip/webrtc/absl/strings/str_cat.h index 4d228b09eb..5ee26db027 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/str_cat.h +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/str_cat.h @@ -48,6 +48,40 @@ // `StrCat()` or `StrAppend()`. You may specify a minimum hex field width using // a `PadSpec` enum. // +// User-defined types can be formatted with the `AbslStringify()` customization +// point. The API relies on detecting an overload in the user-defined type's +// namespace of a free (non-member) `AbslStringify()` function as a definition +// (typically declared as a friend and implemented in-line. +// with the following signature: +// +// class MyClass { ... }; +// +// template +// void AbslStringify(Sink& sink, const MyClass& value); +// +// An `AbslStringify()` overload for a type should only be declared in the same +// file and namespace as said type. +// +// Note that `AbslStringify()` also supports use with `absl::StrFormat()` and +// `absl::Substitute()`. +// +// Example: +// +// struct Point { +// // To add formatting support to `Point`, we simply need to add a free +// // (non-member) function `AbslStringify()`. This method specifies how +// // Point should be printed when absl::StrCat() is called on it. You can add +// // such a free function using a friend declaration within the body of the +// // class. The sink parameter is a templated type to avoid requiring +// // dependencies. +// template friend void AbslStringify(Sink& +// sink, const Point& p) { +// absl::Format(&sink, "(%v, %v)", p.x, p.y); +// } +// +// int x; +// int y; +// }; // ----------------------------------------------------------------------------- #ifndef ABSL_STRINGS_STR_CAT_H_ @@ -57,9 +91,12 @@ #include #include #include +#include #include #include "absl/base/port.h" +#include "absl/strings/internal/has_absl_stringify.h" +#include "absl/strings/internal/stringify_sink.h" #include "absl/strings/numbers.h" #include "absl/strings/string_view.h" @@ -251,9 +288,17 @@ class AlphaNum { const strings_internal::AlphaNumBuffer& buf) : piece_(&buf.data[0], buf.size) {} - AlphaNum(const char* c_str) : piece_(c_str) {} // NOLINT(runtime/explicit) + AlphaNum(const char* c_str) // NOLINT(runtime/explicit) + : piece_(NullSafeStringView(c_str)) {} // NOLINT(runtime/explicit) AlphaNum(absl::string_view pc) : piece_(pc) {} // NOLINT(runtime/explicit) + template ::value>::type> + AlphaNum( // NOLINT(runtime/explicit) + const T& v, // NOLINT(runtime/explicit) + strings_internal::StringifySink&& sink = {}) // NOLINT(runtime/explicit) + : piece_(strings_internal::ExtractStringification(sink, v)) {} + template AlphaNum( // NOLINT(runtime/explicit) const std::basic_string, Allocator>& str) @@ -273,7 +318,8 @@ class AlphaNum { // This overload matches only scoped enums. template {} && !std::is_convertible{}>::type> + std::is_enum{} && !std::is_convertible{} && + !strings_internal::HasAbslStringify::value>::type> AlphaNum(T e) // NOLINT(runtime/explicit) : AlphaNum(static_cast::type>(e)) {} diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/str_cat_test.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/str_cat_test.cc index f3770dc076..c3fb317023 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/str_cat_test.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/str_cat_test.cc @@ -21,6 +21,7 @@ #include #include "gtest/gtest.h" +#include "absl/strings/str_format.h" #include "absl/strings/substitute.h" #ifdef __ANDROID__ @@ -210,6 +211,11 @@ TEST(StrCat, CornerCases) { EXPECT_EQ(result, ""); } +TEST(StrCat, NullConstCharPtr) { + const char* null = nullptr; + EXPECT_EQ(absl::StrCat("mon", null, "key"), "monkey"); +} + // A minimal allocator that uses malloc(). template struct Mallocator { @@ -607,4 +613,53 @@ TEST(Numbers, TestFunctionsMovedOverFromNumbersMain) { TestFastPrints(); } +struct PointStringify { + template + friend void AbslStringify(FormatSink& sink, const PointStringify& p) { + sink.Append("("); + sink.Append(absl::StrCat(p.x)); + sink.Append(", "); + sink.Append(absl::StrCat(p.y)); + sink.Append(")"); + } + + double x = 10.0; + double y = 20.0; +}; + +TEST(StrCat, AbslStringifyExample) { + PointStringify p; + EXPECT_EQ(absl::StrCat(p), "(10, 20)"); + EXPECT_EQ(absl::StrCat("a ", p, " z"), "a (10, 20) z"); +} + +struct PointStringifyUsingFormat { + template + friend void AbslStringify(FormatSink& sink, + const PointStringifyUsingFormat& p) { + absl::Format(&sink, "(%g, %g)", p.x, p.y); + } + + double x = 10.0; + double y = 20.0; +}; + +TEST(StrCat, AbslStringifyExampleUsingFormat) { + PointStringifyUsingFormat p; + EXPECT_EQ(absl::StrCat(p), "(10, 20)"); + EXPECT_EQ(absl::StrCat("a ", p, " z"), "a (10, 20) z"); +} + +enum class EnumWithStringify { Many = 0, Choices = 1 }; + +template +void AbslStringify(Sink& sink, EnumWithStringify e) { + absl::Format(&sink, "%s", e == EnumWithStringify::Many ? "Many" : "Choices"); +} + +TEST(StrCat, AbslStringifyWithEnum) { + const auto e = EnumWithStringify::Choices; + EXPECT_EQ(absl::StrCat(e), "Choices"); +} + } // namespace diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/str_format.h b/TMessagesProj/jni/voip/webrtc/absl/strings/str_format.h index 4b05c70c23..f4c98f415b 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/str_format.h +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/str_format.h @@ -191,7 +191,7 @@ class FormatCountCapture { // absl::StrFormat(formatString, "TheVillage", 6); // // A format string generally follows the POSIX syntax as used within the POSIX -// `printf` specification. +// `printf` specification. (Exceptions are noted below.) // // (See http://pubs.opengroup.org/onlinepubs/9699919799/functions/fprintf.html.) // @@ -211,6 +211,10 @@ class FormatCountCapture { // * `n` for the special case of writing out the number of characters // written to this point. The resulting value must be captured within an // `absl::FormatCountCapture` type. +// * `v` for values using the default format for a deduced type. These deduced +// types include many of the primitive types denoted here as well as +// user-defined types containing the proper extensions. (See below for more +// information.) // // Implementation-defined behavior: // * A null pointer provided to "%s" or "%p" is output as "(nil)". @@ -239,6 +243,15 @@ class FormatCountCapture { // "%s%d%n", "hello", 123, absl::FormatCountCapture(&n)); // EXPECT_EQ(8, n); // +// NOTE: the `v` specifier (for "value") is a type specifier not present in the +// POSIX specification. %v will format values according to their deduced type. +// `v` uses `d` for signed integer values, `u` for unsigned integer values, `g` +// for floating point values, and formats boolean values as "true"/"false" +// (instead of 1 or 0 for booleans formatted using d). `const char*` is not +// supported; please use `std:string` and `string_view`. `char` is also not +// supported due to ambiguity of the type. This specifier does not support +// modifiers. +// // The `FormatSpec` intrinsically supports all of these fundamental C++ types: // // * Characters: `char`, `signed char`, `unsigned char` @@ -570,6 +583,41 @@ ABSL_MUST_USE_RESULT inline bool FormatUntyped( // StrFormat Extensions //------------------------------------------------------------------------------ // +// AbslStringify() +// +// A simpler customization API for formatting user-defined types using +// absl::StrFormat(). The API relies on detecting an overload in the +// user-defined type's namespace of a free (non-member) `AbslStringify()` +// function as a friend definition with the following signature: +// +// template +// void AbslStringify(Sink& sink, const X& value); +// +// An `AbslStringify()` overload for a type should only be declared in the same +// file and namespace as said type. +// +// Note that unlike with AbslFormatConvert(), AbslStringify() does not allow +// customization of allowed conversion characters. AbslStringify() uses `%v` as +// the underlying conversion specififer. Additionally, AbslStringify() supports +// use with absl::StrCat while AbslFormatConvert() does not. +// +// Example: +// +// struct Point { +// // To add formatting support to `Point`, we simply need to add a free +// // (non-member) function `AbslStringify()`. This method prints in the +// // request format using the underlying `%v` specifier. You can add such a +// // free function using a friend declaration within the body of the class. +// // The sink parameter is a templated type to avoid requiring dependencies. +// template +// friend void AbslStringify(Sink& sink, const Point& p) { +// absl::Format(&sink, "(%v, %v)", p.x, p.y); +// } +// +// int x; +// int y; +// }; +// // AbslFormatConvert() // // The StrFormat library provides a customization API for formatting @@ -616,9 +664,9 @@ ABSL_MUST_USE_RESULT inline bool FormatUntyped( // AbslFormatConvert(const Point& p, const absl::FormatConversionSpec& spec, // absl::FormatSink* s) { // if (spec.conversion_char() == absl::FormatConversionChar::s) { -// s->Append(absl::StrCat("x=", p.x, " y=", p.y)); +// absl::Format(s, "x=%vy=%v", p.x, p.y); // } else { -// s->Append(absl::StrCat(p.x, ",", p.y)); +// absl::Format(s, "%v,%v", p.x, p.y); // } // return {true}; // } @@ -637,7 +685,7 @@ enum class FormatConversionChar : uint8_t { c, s, // text d, i, o, u, x, X, // int f, F, e, E, g, G, a, A, // float - n, p // misc + n, p, v // misc }; // clang-format on @@ -757,6 +805,7 @@ enum class FormatConversionCharSet : uint64_t { // misc n = str_format_internal::FormatConversionCharToConvInt('n'), p = str_format_internal::FormatConversionCharToConvInt('p'), + v = str_format_internal::FormatConversionCharToConvInt('v'), // Used for width/precision '*' specification. kStar = static_cast( @@ -771,23 +820,36 @@ enum class FormatConversionCharSet : uint64_t { // FormatSink // -// An abstraction to which conversions write their string data. +// A format sink is a generic abstraction to which conversions may write their +// formatted string data. `absl::FormatConvert()` uses this sink to write its +// formatted string. // class FormatSink { public: - // Appends `count` copies of `ch`. + // FormatSink::Append() + // + // Appends `count` copies of `ch` to the format sink. void Append(size_t count, char ch) { sink_->Append(count, ch); } + // Overload of FormatSink::Append() for appending the characters of a string + // view to a format sink. void Append(string_view v) { sink_->Append(v); } - // Appends the first `precision` bytes of `v`. If this is less than - // `width`, spaces will be appended first (if `left` is false), or + // FormatSink::PutPaddedString() + // + // Appends `precision` number of bytes of `v` to the format sink. If this is + // less than `width`, spaces will be appended first (if `left` is false), or // after (if `left` is true) to ensure the total amount appended is // at least `width`. bool PutPaddedString(string_view v, int width, int precision, bool left) { return sink_->PutPaddedString(v, width, precision, left); } + // Support `absl::Format(&sink, format, args...)`. + friend void AbslFormatFlush(FormatSink* sink, absl::string_view v) { + sink->Append(v); + } + private: friend str_format_internal::FormatSinkImpl; explicit FormatSink(str_format_internal::FormatSinkImpl* s) : sink_(s) {} diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/str_format_test.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/str_format_test.cc index c60027ad29..2aa22b0d06 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/str_format_test.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/str_format_test.cc @@ -42,6 +42,18 @@ TEST_F(FormatEntryPointTest, Format) { EXPECT_TRUE(Format(&sink, pc, 123)); EXPECT_EQ("A format 123", sink); } + +TEST_F(FormatEntryPointTest, FormatWithV) { + std::string sink; + EXPECT_TRUE(Format(&sink, "A format %v", 123)); + EXPECT_EQ("A format 123", sink); + sink.clear(); + + ParsedFormat<'v'> pc("A format %v"); + EXPECT_TRUE(Format(&sink, pc, 123)); + EXPECT_EQ("A format 123", sink); +} + TEST_F(FormatEntryPointTest, UntypedFormat) { constexpr const char* formats[] = { "", @@ -84,6 +96,14 @@ TEST_F(FormatEntryPointTest, StringFormat) { EXPECT_EQ("=123=", StrFormat(view, 123)); } +TEST_F(FormatEntryPointTest, StringFormatV) { + std::string hello = "hello"; + EXPECT_EQ("hello", StrFormat("%v", hello)); + EXPECT_EQ("123", StrFormat("%v", 123)); + constexpr absl::string_view view("=%v=", 4); + EXPECT_EQ("=123=", StrFormat(view, 123)); +} + TEST_F(FormatEntryPointTest, AppendFormat) { std::string s; std::string& r = StrAppendFormat(&s, "%d", 123); @@ -91,6 +111,13 @@ TEST_F(FormatEntryPointTest, AppendFormat) { EXPECT_EQ("123", r); } +TEST_F(FormatEntryPointTest, AppendFormatWithV) { + std::string s; + std::string& r = StrAppendFormat(&s, "%v", 123); + EXPECT_EQ(&s, &r); // should be same object + EXPECT_EQ("123", r); +} + TEST_F(FormatEntryPointTest, AppendFormatFail) { std::string s = "orig"; @@ -103,6 +130,17 @@ TEST_F(FormatEntryPointTest, AppendFormatFail) { {&arg, 1})); } +TEST_F(FormatEntryPointTest, AppendFormatFailWithV) { + std::string s = "orig"; + + UntypedFormatSpec format(" more %v"); + FormatArgImpl arg("not an int"); + + EXPECT_EQ("orig", + str_format_internal::AppendPack( + &s, str_format_internal::UntypedFormatSpecImpl::Extract(format), + {&arg, 1})); +} TEST_F(FormatEntryPointTest, ManyArgs) { EXPECT_EQ("24", StrFormat("%24$d", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, @@ -123,6 +161,15 @@ TEST_F(FormatEntryPointTest, Preparsed) { EXPECT_EQ("=123=", StrFormat(ParsedFormat<'d'>(view), 123)); } +TEST_F(FormatEntryPointTest, PreparsedWithV) { + ParsedFormat<'v'> pc("%v"); + EXPECT_EQ("123", StrFormat(pc, 123)); + // rvalue ok? + EXPECT_EQ("123", StrFormat(ParsedFormat<'v'>("%v"), 123)); + constexpr absl::string_view view("=%v=", 4); + EXPECT_EQ("=123=", StrFormat(ParsedFormat<'v'>(view), 123)); +} + TEST_F(FormatEntryPointTest, FormatCountCapture) { int n = 0; EXPECT_EQ("", StrFormat("%n", FormatCountCapture(&n))); @@ -131,6 +178,14 @@ TEST_F(FormatEntryPointTest, FormatCountCapture) { EXPECT_EQ(3, n); } +TEST_F(FormatEntryPointTest, FormatCountCaptureWithV) { + int n = 0; + EXPECT_EQ("", StrFormat("%n", FormatCountCapture(&n))); + EXPECT_EQ(0, n); + EXPECT_EQ("123", StrFormat("%v%n", 123, FormatCountCapture(&n))); + EXPECT_EQ(3, n); +} + TEST_F(FormatEntryPointTest, FormatCountCaptureWrongType) { // Should reject int*. int n = 0; @@ -143,6 +198,18 @@ TEST_F(FormatEntryPointTest, FormatCountCaptureWrongType) { absl::MakeSpan(args))); } +TEST_F(FormatEntryPointTest, FormatCountCaptureWrongTypeWithV) { + // Should reject int*. + int n = 0; + UntypedFormatSpec format("%v%n"); + int i = 123, *ip = &n; + FormatArgImpl args[2] = {FormatArgImpl(i), FormatArgImpl(ip)}; + + EXPECT_EQ("", str_format_internal::FormatPack( + str_format_internal::UntypedFormatSpecImpl::Extract(format), + absl::MakeSpan(args))); +} + TEST_F(FormatEntryPointTest, FormatCountCaptureMultiple) { int n1 = 0; int n2 = 0; @@ -165,6 +232,21 @@ TEST_F(FormatEntryPointTest, FormatCountCaptureExample) { s); } +TEST_F(FormatEntryPointTest, FormatCountCaptureExampleWithV) { + int n; + std::string s; + std::string a1 = "(1,1)"; + std::string a2 = "(1,2)"; + std::string a3 = "(2,2)"; + StrAppendFormat(&s, "%v: %n%v\n", a1, FormatCountCapture(&n), a2); + StrAppendFormat(&s, "%*s%v\n", n, "", a3); + EXPECT_EQ(7, n); + EXPECT_EQ( + "(1,1): (1,2)\n" + " (2,2)\n", + s); +} + TEST_F(FormatEntryPointTest, Stream) { const std::string formats[] = { "", @@ -183,7 +265,7 @@ TEST_F(FormatEntryPointTest, Stream) { std::ostringstream oss; oss << StreamFormat(*parsed, 123, 3, 49, "multistreaming!!!", 1.01, 1.01); int fmt_result = snprintf(&*buf.begin(), buf.size(), fmt.c_str(), // - 123, 3, 49, "multistreaming!!!", 1.01, 1.01); + 123, 3, 49, "multistreaming!!!", 1.01, 1.01); ASSERT_TRUE(oss) << fmt; ASSERT_TRUE(fmt_result >= 0 && static_cast(fmt_result) < buf.size()) << fmt_result; @@ -191,6 +273,36 @@ TEST_F(FormatEntryPointTest, Stream) { } } +TEST_F(FormatEntryPointTest, StreamWithV) { + const std::string formats[] = { + "", + "a", + "%v %u %c %v %f %v", + }; + + const std::string formats_for_buf[] = { + "", + "a", + "%d %u %c %s %f %g", + }; + + std::string buf(4096, '\0'); + for (auto i = 0; i < ABSL_ARRAYSIZE(formats); ++i) { + const auto parsed = + ParsedFormat<'v', 'u', 'c', 'v', 'f', 'v'>::NewAllowIgnored(formats[i]); + std::ostringstream oss; + oss << StreamFormat(*parsed, 123, 3, 49, + absl::string_view("multistreaming!!!"), 1.01, 1.01); + int fmt_result = + snprintf(&*buf.begin(), buf.size(), formats_for_buf[i].c_str(), // + 123, 3, 49, "multistreaming!!!", 1.01, 1.01); + ASSERT_TRUE(oss) << formats[i]; + ASSERT_TRUE(fmt_result >= 0 && static_cast(fmt_result) < buf.size()) + << fmt_result; + EXPECT_EQ(buf.c_str(), oss.str()); + } +} + TEST_F(FormatEntryPointTest, StreamOk) { std::ostringstream oss; oss << StreamFormat("hello %d", 123); @@ -198,6 +310,13 @@ TEST_F(FormatEntryPointTest, StreamOk) { EXPECT_TRUE(oss.good()); } +TEST_F(FormatEntryPointTest, StreamOkWithV) { + std::ostringstream oss; + oss << StreamFormat("hello %v", 123); + EXPECT_EQ("hello 123", oss.str()); + EXPECT_TRUE(oss.good()); +} + TEST_F(FormatEntryPointTest, StreamFail) { std::ostringstream oss; UntypedFormatSpec format("hello %d"); @@ -208,6 +327,16 @@ TEST_F(FormatEntryPointTest, StreamFail) { EXPECT_TRUE(oss.fail()); } +TEST_F(FormatEntryPointTest, StreamFailWithV) { + std::ostringstream oss; + UntypedFormatSpec format("hello %v"); + FormatArgImpl arg("non-numeric"); + oss << str_format_internal::Streamable( + str_format_internal::UntypedFormatSpecImpl::Extract(format), {&arg, 1}); + EXPECT_EQ("hello ", oss.str()); // partial write + EXPECT_TRUE(oss.fail()); +} + std::string WithSnprintf(const char* fmt, ...) { std::string buf; buf.resize(128); @@ -249,6 +378,12 @@ TEST_F(FormatEntryPointTest, FormatStreamed) { EXPECT_EQ("123", StrFormat("%s", FormatStreamed(StreamFormat("%d", 123)))); } +TEST_F(FormatEntryPointTest, FormatStreamedWithV) { + EXPECT_EQ("123", StrFormat("%v", FormatStreamed(123))); + EXPECT_EQ("X", StrFormat("%v", FormatStreamed(streamed_test::X()))); + EXPECT_EQ("123", StrFormat("%v", FormatStreamed(StreamFormat("%d", 123)))); +} + // Helper class that creates a temporary file and exposes a FILE* to it. // It will close the file on destruction. class TempFile { @@ -284,6 +419,14 @@ TEST_F(FormatEntryPointTest, FPrintF) { EXPECT_EQ(tmp.ReadFile(), "STRING: ABC NUMBER: -000000019"); } +TEST_F(FormatEntryPointTest, FPrintFWithV) { + TempFile tmp; + int result = + FPrintF(tmp.file(), "STRING: %v NUMBER: %010d", std::string("ABC"), -19); + EXPECT_EQ(result, 30); + EXPECT_EQ(tmp.ReadFile(), "STRING: ABC NUMBER: -000000019"); +} + TEST_F(FormatEntryPointTest, FPrintFError) { errno = 0; int result = FPrintF(stdin, "ABC"); @@ -318,6 +461,23 @@ TEST_F(FormatEntryPointTest, PrintF) { EXPECT_EQ(result, 30); EXPECT_EQ(tmp.ReadFile(), "STRING: ABC NUMBER: -000000019"); } + +TEST_F(FormatEntryPointTest, PrintFWithV) { + int stdout_tmp = dup(STDOUT_FILENO); + + TempFile tmp; + std::fflush(stdout); + dup2(fileno(tmp.file()), STDOUT_FILENO); + + int result = PrintF("STRING: %v NUMBER: %010d", std::string("ABC"), -19); + + std::fflush(stdout); + dup2(stdout_tmp, STDOUT_FILENO); + close(stdout_tmp); + + EXPECT_EQ(result, 30); + EXPECT_EQ(tmp.ReadFile(), "STRING: ABC NUMBER: -000000019"); +} #endif // __GLIBC__ TEST_F(FormatEntryPointTest, SNPrintF) { @@ -347,9 +507,41 @@ TEST_F(FormatEntryPointTest, SNPrintF) { EXPECT_EQ(result, 37); } +TEST_F(FormatEntryPointTest, SNPrintFWithV) { + char buffer[16]; + int result = + SNPrintF(buffer, sizeof(buffer), "STRING: %v", std::string("ABC")); + EXPECT_EQ(result, 11); + EXPECT_EQ(std::string(buffer), "STRING: ABC"); + + result = SNPrintF(buffer, sizeof(buffer), "NUMBER: %v", 123456); + EXPECT_EQ(result, 14); + EXPECT_EQ(std::string(buffer), "NUMBER: 123456"); + + result = SNPrintF(buffer, sizeof(buffer), "NUMBER: %v", 1234567); + EXPECT_EQ(result, 15); + EXPECT_EQ(std::string(buffer), "NUMBER: 1234567"); + + result = SNPrintF(buffer, sizeof(buffer), "NUMBER: %v", 12345678); + EXPECT_EQ(result, 16); + EXPECT_EQ(std::string(buffer), "NUMBER: 1234567"); + + result = SNPrintF(buffer, sizeof(buffer), "NUMBER: %v", 123456789); + EXPECT_EQ(result, 17); + EXPECT_EQ(std::string(buffer), "NUMBER: 1234567"); + + std::string size = "size"; + + result = SNPrintF(nullptr, 0, "Just checking the %v of the output.", size); + EXPECT_EQ(result, 37); +} + TEST(StrFormat, BehavesAsDocumented) { std::string s = absl::StrFormat("%s, %d!", "Hello", 123); EXPECT_EQ("Hello, 123!", s); + std::string hello = "Hello"; + std::string s2 = absl::StrFormat("%v, %v!", hello, 123); + EXPECT_EQ("Hello, 123!", s2); // The format of a replacement is // '%'[position][flags][width['.'precision]][length_modifier][format] EXPECT_EQ(absl::StrFormat("%1$+3.2Lf", 1.1), "+1.10"); @@ -364,22 +556,31 @@ TEST(StrFormat, BehavesAsDocumented) { // "s" - string Eg: "C" -> "C", std::string("C++") -> "C++" // Formats std::string, char*, string_view, and Cord. EXPECT_EQ(StrFormat("%s", "C"), "C"); + EXPECT_EQ(StrFormat("%v", std::string("C")), "C"); EXPECT_EQ(StrFormat("%s", std::string("C++")), "C++"); + EXPECT_EQ(StrFormat("%v", std::string("C++")), "C++"); EXPECT_EQ(StrFormat("%s", string_view("view")), "view"); + EXPECT_EQ(StrFormat("%v", string_view("view")), "view"); EXPECT_EQ(StrFormat("%s", absl::Cord("cord")), "cord"); + EXPECT_EQ(StrFormat("%v", absl::Cord("cord")), "cord"); // Integral Conversion // These format integral types: char, int, long, uint64_t, etc. EXPECT_EQ(StrFormat("%d", char{10}), "10"); EXPECT_EQ(StrFormat("%d", int{10}), "10"); EXPECT_EQ(StrFormat("%d", long{10}), "10"); // NOLINT EXPECT_EQ(StrFormat("%d", uint64_t{10}), "10"); + EXPECT_EQ(StrFormat("%v", int{10}), "10"); + EXPECT_EQ(StrFormat("%v", long{10}), "10"); // NOLINT + EXPECT_EQ(StrFormat("%v", uint64_t{10}), "10"); // d,i - signed decimal Eg: -10 -> "-10" EXPECT_EQ(StrFormat("%d", -10), "-10"); EXPECT_EQ(StrFormat("%i", -10), "-10"); + EXPECT_EQ(StrFormat("%v", -10), "-10"); // o - octal Eg: 10 -> "12" EXPECT_EQ(StrFormat("%o", 10), "12"); // u - unsigned decimal Eg: 10 -> "10" EXPECT_EQ(StrFormat("%u", 10), "10"); + EXPECT_EQ(StrFormat("%v", 10), "10"); // x/X - lower,upper case hex Eg: 10 -> "a"/"A" EXPECT_EQ(StrFormat("%x", 10), "a"); EXPECT_EQ(StrFormat("%X", 10), "A"); @@ -404,6 +605,8 @@ TEST(StrFormat, BehavesAsDocumented) { EXPECT_EQ(StrFormat("%g", .01), "0.01"); EXPECT_EQ(StrFormat("%g", 1e10), "1e+10"); EXPECT_EQ(StrFormat("%G", 1e10), "1E+10"); + EXPECT_EQ(StrFormat("%v", .01), "0.01"); + EXPECT_EQ(StrFormat("%v", 1e10), "1e+10"); // a/A - lower,upper case hex Eg: -3.0 -> "-0x1.8p+1"/"-0X1.8P+1" // On Android platform <=21, there is a regression in hexfloat formatting. @@ -441,6 +644,11 @@ TEST(StrFormat, BehavesAsDocumented) { EXPECT_EQ(StrFormat("%zd", int{1}), "1"); EXPECT_EQ(StrFormat("%td", int{1}), "1"); EXPECT_EQ(StrFormat("%qd", int{1}), "1"); + + // Bool is handled correctly depending on whether %v is used + EXPECT_EQ(StrFormat("%v", true), "true"); + EXPECT_EQ(StrFormat("%v", false), "false"); + EXPECT_EQ(StrFormat("%d", true), "1"); } using str_format_internal::ExtendedParsedFormat; @@ -490,6 +698,15 @@ TEST_F(ParsedFormatTest, SimpleChecked) { SummarizeParsedFormat(ParsedFormat<'s', '*', 'd'>("%s %.*d"))); } +TEST_F(ParsedFormatTest, SimpleCheckedWithV) { + EXPECT_EQ("[ABC]{v:1$v}[DEF]", + SummarizeParsedFormat(ParsedFormat<'v'>("ABC%vDEF"))); + EXPECT_EQ("{v:1$v}[FFF]{v:2$v}[ZZZ]{f:3$f}", + SummarizeParsedFormat(ParsedFormat<'v', 'v', 'f'>("%vFFF%vZZZ%f"))); + EXPECT_EQ("{v:1$v}[ ]{.*d:3$.2$*d}", + SummarizeParsedFormat(ParsedFormat<'v', '*', 'd'>("%v %.*d"))); +} + TEST_F(ParsedFormatTest, SimpleUncheckedCorrect) { auto f = ParsedFormat<'d'>::New("ABC%dDEF"); ASSERT_TRUE(f); @@ -520,6 +737,23 @@ TEST_F(ParsedFormatTest, SimpleUncheckedCorrect) { SummarizeParsedFormat(*dollar)); } +TEST_F(ParsedFormatTest, SimpleUncheckedCorrectWithV) { + auto f = ParsedFormat<'v'>::New("ABC%vDEF"); + ASSERT_TRUE(f); + EXPECT_EQ("[ABC]{v:1$v}[DEF]", SummarizeParsedFormat(*f)); + + std::string format = "%vFFF%vZZZ%f"; + auto f2 = ParsedFormat<'v', 'v', 'f'>::New(format); + + ASSERT_TRUE(f2); + EXPECT_EQ("{v:1$v}[FFF]{v:2$v}[ZZZ]{f:3$f}", SummarizeParsedFormat(*f2)); + + f2 = ParsedFormat<'v', 'v', 'f'>::New("%v %v %f"); + + ASSERT_TRUE(f2); + EXPECT_EQ("{v:1$v}[ ]{v:2$v}[ ]{f:3$f}", SummarizeParsedFormat(*f2)); +} + TEST_F(ParsedFormatTest, SimpleUncheckedIgnoredArgs) { EXPECT_FALSE((ParsedFormat<'d', 's'>::New("ABC"))); EXPECT_FALSE((ParsedFormat<'d', 's'>::New("%dABC"))); @@ -535,6 +769,18 @@ TEST_F(ParsedFormatTest, SimpleUncheckedIgnoredArgs) { EXPECT_EQ("[ABC]{2$s:2$s}", SummarizeParsedFormat(*f)); } +TEST_F(ParsedFormatTest, SimpleUncheckedIgnoredArgsWithV) { + EXPECT_FALSE((ParsedFormat<'v', 'v'>::New("ABC"))); + EXPECT_FALSE((ParsedFormat<'v', 'v'>::New("%vABC"))); + EXPECT_FALSE((ParsedFormat<'v', 's'>::New("ABC%2$s"))); + auto f = ParsedFormat<'v', 'v'>::NewAllowIgnored("ABC"); + ASSERT_TRUE(f); + EXPECT_EQ("[ABC]", SummarizeParsedFormat(*f)); + f = ParsedFormat<'v', 'v'>::NewAllowIgnored("%vABC"); + ASSERT_TRUE(f); + EXPECT_EQ("{v:1$v}[ABC]", SummarizeParsedFormat(*f)); +} + TEST_F(ParsedFormatTest, SimpleUncheckedUnsupported) { EXPECT_FALSE(ParsedFormat<'d'>::New("%1$d %1$x")); EXPECT_FALSE(ParsedFormat<'x'>::New("%1$d %1$x")); @@ -549,6 +795,15 @@ TEST_F(ParsedFormatTest, SimpleUncheckedIncorrect) { EXPECT_FALSE((ParsedFormat<'s', 'd', 'g'>::New(format))); } +TEST_F(ParsedFormatTest, SimpleUncheckedIncorrectWithV) { + EXPECT_FALSE(ParsedFormat<'v'>::New("")); + + EXPECT_FALSE(ParsedFormat<'v'>::New("ABC%vDEF%v")); + + std::string format = "%vFFF%vZZZ%f"; + EXPECT_FALSE((ParsedFormat<'v', 'v', 'g'>::New(format))); +} + #if defined(__cpp_nontype_template_parameter_auto) template @@ -595,6 +850,23 @@ TEST_F(ParsedFormatTest, ExtendedTyping) { 's'>::New("%s%s"); ASSERT_TRUE(v4); } + +TEST_F(ParsedFormatTest, ExtendedTypingWithV) { + EXPECT_FALSE(ParsedFormat::New("")); + ASSERT_TRUE(ParsedFormat::New("%v")); + auto v1 = ParsedFormat<'v', absl::FormatConversionCharSet::v>::New("%v%v"); + ASSERT_TRUE(v1); + auto v2 = ParsedFormat::New("%v%v"); + ASSERT_TRUE(v2); + auto v3 = ParsedFormat::New("%v%v"); + ASSERT_TRUE(v3); + auto v4 = ParsedFormat::New("%v%v"); + ASSERT_TRUE(v4); +} #endif TEST_F(ParsedFormatTest, UncheckedCorrect) { @@ -638,6 +910,28 @@ TEST_F(ParsedFormatTest, UncheckedCorrect) { SummarizeParsedFormat(*dollar)); } +TEST_F(ParsedFormatTest, UncheckedCorrectWithV) { + auto f = + ExtendedParsedFormat::New("ABC%vDEF"); + ASSERT_TRUE(f); + EXPECT_EQ("[ABC]{v:1$v}[DEF]", SummarizeParsedFormat(*f)); + + std::string format = "%vFFF%vZZZ%f"; + auto f2 = ExtendedParsedFormat< + absl::FormatConversionCharSet::v, absl::FormatConversionCharSet::v, + absl::FormatConversionCharSet::kFloating>::New(format); + + ASSERT_TRUE(f2); + EXPECT_EQ("{v:1$v}[FFF]{v:2$v}[ZZZ]{f:3$f}", SummarizeParsedFormat(*f2)); + + f2 = ExtendedParsedFormat< + absl::FormatConversionCharSet::v, absl::FormatConversionCharSet::v, + absl::FormatConversionCharSet::kFloating>::New("%v %v %f"); + + ASSERT_TRUE(f2); + EXPECT_EQ("{v:1$v}[ ]{v:2$v}[ ]{f:3$f}", SummarizeParsedFormat(*f2)); +} + TEST_F(ParsedFormatTest, UncheckedIgnoredArgs) { EXPECT_FALSE( (ExtendedParsedFormat::New("ABC"))); + EXPECT_FALSE( + (ExtendedParsedFormat::New("%vABC"))); + EXPECT_FALSE((ExtendedParsedFormat:: + New("ABC%2$s"))); + auto f = ExtendedParsedFormat< + absl::FormatConversionCharSet::v, + absl::FormatConversionCharSet::v>::NewAllowIgnored("ABC"); + ASSERT_TRUE(f); + EXPECT_EQ("[ABC]", SummarizeParsedFormat(*f)); + f = ExtendedParsedFormat< + absl::FormatConversionCharSet::v, + absl::FormatConversionCharSet::v>::NewAllowIgnored("%vABC"); + ASSERT_TRUE(f); + EXPECT_EQ("{v:1$v}[ABC]", SummarizeParsedFormat(*f)); +} + TEST_F(ParsedFormatTest, UncheckedMultipleTypes) { auto dx = ExtendedParsedFormat::New(format))); } +TEST_F(ParsedFormatTest, UncheckedIncorrectWithV) { + EXPECT_FALSE(ExtendedParsedFormat::New("")); + + EXPECT_FALSE(ExtendedParsedFormat::New( + "ABC%vDEF%v")); + + std::string format = "%vFFF%vZZZ%f"; + EXPECT_FALSE( + (ExtendedParsedFormat::New(format))); +} + TEST_F(ParsedFormatTest, RegressionMixPositional) { EXPECT_FALSE( (ExtendedParsedFormat::New("%1$d %o"))); } +TEST_F(ParsedFormatTest, DisallowModifiersWithV) { + auto f = ParsedFormat<'v'>::New("ABC%80vDEF"); + EXPECT_EQ(f, nullptr); + + f = ParsedFormat<'v'>::New("ABC%0vDEF"); + EXPECT_EQ(f, nullptr); + + f = ParsedFormat<'v'>::New("ABC%.1vDEF"); + EXPECT_EQ(f, nullptr); +} + using FormatWrapperTest = ::testing::Test; // Plain wrapper for StrFormat. @@ -710,20 +1049,33 @@ TEST_F(FormatWrapperTest, ConstexprStringFormat) { EXPECT_EQ(WrappedFormat("%s there", "hello"), "hello there"); } +TEST_F(FormatWrapperTest, ConstexprStringFormatWithV) { + std::string hello = "hello"; + EXPECT_EQ(WrappedFormat("%v there", hello), "hello there"); +} + TEST_F(FormatWrapperTest, ParsedFormat) { ParsedFormat<'s'> format("%s there"); EXPECT_EQ(WrappedFormat(format, "hello"), "hello there"); } +TEST_F(FormatWrapperTest, ParsedFormatWithV) { + std::string hello = "hello"; + ParsedFormat<'v'> format("%v there"); + EXPECT_EQ(WrappedFormat(format, hello), "hello there"); +} + } // namespace ABSL_NAMESPACE_END } // namespace absl +namespace { using FormatExtensionTest = ::testing::Test; struct Point { friend absl::FormatConvertResult + absl::FormatConversionCharSet::kIntegral | + absl::FormatConversionCharSet::v> AbslFormatConvert(const Point& p, const absl::FormatConversionSpec& spec, absl::FormatSink* s) { if (spec.conversion_char() == absl::FormatConversionChar::s) { @@ -742,6 +1094,7 @@ TEST_F(FormatExtensionTest, AbslFormatConvertExample) { Point p; EXPECT_EQ(absl::StrFormat("a %s z", p), "a x=10 y=20 z"); EXPECT_EQ(absl::StrFormat("a %d z", p), "a 10,20 z"); + EXPECT_EQ(absl::StrFormat("a %v z", p), "a 10,20 z"); // Typed formatting will fail to compile an invalid format. // StrFormat("%f", p); // Does not compile. @@ -751,6 +1104,51 @@ TEST_F(FormatExtensionTest, AbslFormatConvertExample) { EXPECT_FALSE(absl::FormatUntyped(&actual, f1, {absl::FormatArg(p)})); } +struct PointStringify { + template + friend void AbslStringify(FormatSink& sink, const PointStringify& p) { + sink.Append(absl::StrCat("(", p.x, ", ", p.y, ")")); + } + + double x = 10.0; + double y = 20.0; +}; + +TEST_F(FormatExtensionTest, AbslStringifyExample) { + PointStringify p; + EXPECT_EQ(absl::StrFormat("a %v z", p), "a (10, 20) z"); +} + +struct PointStringifyUsingFormat { + template + friend void AbslStringify(FormatSink& sink, + const PointStringifyUsingFormat& p) { + absl::Format(&sink, "(%g, %g)", p.x, p.y); + } + + double x = 10.0; + double y = 20.0; +}; + +TEST_F(FormatExtensionTest, AbslStringifyExampleUsingFormat) { + PointStringifyUsingFormat p; + EXPECT_EQ(absl::StrFormat("a %v z", p), "a (10, 20) z"); +} + +enum class EnumWithStringify { Many = 0, Choices = 1 }; + +template +void AbslStringify(Sink& sink, EnumWithStringify e) { + absl::Format(&sink, "%s", e == EnumWithStringify::Many ? "Many" : "Choices"); +} + +TEST_F(FormatExtensionTest, AbslStringifyWithEnum) { + const auto e = EnumWithStringify::Choices; + EXPECT_EQ(absl::StrFormat("My choice is %v", e), "My choice is Choices"); +} + +} // namespace + // Some codegen thunks that we can use to easily dump the generated assembly for // different StrFormat calls. diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/str_join.h b/TMessagesProj/jni/voip/webrtc/absl/strings/str_join.h index 33534536cf..ee5ae7efdf 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/str_join.h +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/str_join.h @@ -72,21 +72,15 @@ ABSL_NAMESPACE_BEGIN // functions. You may provide your own Formatter to enable `absl::StrJoin()` to // work with arbitrary types. // -// The following is an example of a custom Formatter that simply uses -// `std::to_string()` to format an integer as a std::string. -// -// struct MyFormatter { -// void operator()(std::string* out, int i) const { -// out->append(std::to_string(i)); -// } -// }; -// -// You would use the above formatter by passing an instance of it as the final -// argument to `absl::StrJoin()`: -// -// std::vector v = {1, 2, 3, 4}; -// std::string s = absl::StrJoin(v, "-", MyFormatter()); -// EXPECT_EQ("1-2-3-4", s); +// The following is an example of a custom Formatter that uses +// `absl::FormatDuration` to join a list of `absl::Duration`s. +// +// std::vector v = {absl::Seconds(1), absl::Milliseconds(10)}; +// std::string s = +// absl::StrJoin(v, ", ", [](std::string* out, absl::Duration dur) { +// absl::StrAppend(out, absl::FormatDuration(dur)); +// }); +// EXPECT_EQ("1s, 10ms", s); // // The following standard formatters are provided within this file: // diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/string_view.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/string_view.cc index d596e08cde..e2261625f9 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/string_view.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/string_view.cc @@ -32,7 +32,7 @@ void WritePadding(std::ostream& o, size_t pad) { memset(fill_buf, o.fill(), sizeof(fill_buf)); while (pad) { size_t n = std::min(pad, sizeof(fill_buf)); - o.write(fill_buf, n); + o.write(fill_buf, static_cast(n)); pad -= n; } } @@ -63,7 +63,7 @@ std::ostream& operator<<(std::ostream& o, string_view piece) { size_t lpad = 0; size_t rpad = 0; if (static_cast(o.width()) > piece.size()) { - size_t pad = o.width() - piece.size(); + size_t pad = static_cast(o.width()) - piece.size(); if ((o.flags() & o.adjustfield) == o.left) { rpad = pad; } else { @@ -71,7 +71,7 @@ std::ostream& operator<<(std::ostream& o, string_view piece) { } } if (lpad) WritePadding(o, lpad); - o.write(piece.data(), piece.size()); + o.write(piece.data(), static_cast(piece.size())); if (rpad) WritePadding(o, rpad); o.width(0); } @@ -86,7 +86,7 @@ string_view::size_type string_view::find(string_view s, } const char* result = strings_internal::memmatch(ptr_ + pos, length_ - pos, s.ptr_, s.length_); - return result ? result - ptr_ : npos; + return result ? static_cast(result - ptr_) : npos; } string_view::size_type string_view::find(char c, size_type pos) const noexcept { @@ -95,7 +95,7 @@ string_view::size_type string_view::find(char c, size_type pos) const noexcept { } const char* result = static_cast(memchr(ptr_ + pos, c, length_ - pos)); - return result != nullptr ? result - ptr_ : npos; + return result != nullptr ? static_cast(result - ptr_) : npos; } string_view::size_type string_view::rfind(string_view s, @@ -104,7 +104,7 @@ string_view::size_type string_view::rfind(string_view s, if (s.empty()) return std::min(length_, pos); const char* last = ptr_ + std::min(length_ - s.length_, pos) + s.length_; const char* result = std::find_end(ptr_, last, s.ptr_, s.ptr_ + s.length_); - return result != last ? result - ptr_ : npos; + return result != last ? static_cast(result - ptr_) : npos; } // Search range is [0..pos] inclusive. If pos == npos, search everything. @@ -207,22 +207,11 @@ string_view::size_type string_view::find_last_not_of( return npos; } -// MSVC has non-standard behavior that implicitly creates definitions for static -// const members. These implicit definitions conflict with explicit out-of-class -// member definitions that are required by the C++ standard, resulting in -// LNK1169 "multiply defined" errors at link time. __declspec(selectany) asks -// MSVC to choose only one definition for the symbol it decorates. See details -// at https://msdn.microsoft.com/en-us/library/34h23df8(v=vs.100).aspx -#ifdef _MSC_VER -#define ABSL_STRING_VIEW_SELECTANY __declspec(selectany) -#else -#define ABSL_STRING_VIEW_SELECTANY -#endif -ABSL_STRING_VIEW_SELECTANY +#ifdef ABSL_INTERNAL_NEED_REDUNDANT_CONSTEXPR_DECL constexpr string_view::size_type string_view::npos; -ABSL_STRING_VIEW_SELECTANY constexpr string_view::size_type string_view::kMaxSize; +#endif ABSL_NAMESPACE_END } // namespace absl diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/string_view.h b/TMessagesProj/jni/voip/webrtc/absl/strings/string_view.h index a4c9a6526c..eae11b2ab6 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/string_view.h +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/string_view.h @@ -55,19 +55,14 @@ ABSL_NAMESPACE_END #else // ABSL_USES_STD_STRING_VIEW -#if ABSL_HAVE_BUILTIN(__builtin_memcmp) || \ - (defined(__GNUC__) && !defined(__clang__)) +#if ABSL_HAVE_BUILTIN(__builtin_memcmp) || \ + (defined(__GNUC__) && !defined(__clang__)) || \ + (defined(_MSC_VER) && _MSC_VER >= 1928) #define ABSL_INTERNAL_STRING_VIEW_MEMCMP __builtin_memcmp #else // ABSL_HAVE_BUILTIN(__builtin_memcmp) #define ABSL_INTERNAL_STRING_VIEW_MEMCMP memcmp #endif // ABSL_HAVE_BUILTIN(__builtin_memcmp) -#if defined(__cplusplus) && __cplusplus >= 201402L -#define ABSL_INTERNAL_STRING_VIEW_CXX14_CONSTEXPR constexpr -#else -#define ABSL_INTERNAL_STRING_VIEW_CXX14_CONSTEXPR -#endif - namespace absl { ABSL_NAMESPACE_BEGIN @@ -340,7 +335,7 @@ class string_view { // // Removes the first `n` characters from the `string_view`. Note that the // underlying string is not changed, only the view. - ABSL_INTERNAL_STRING_VIEW_CXX14_CONSTEXPR void remove_prefix(size_type n) { + constexpr void remove_prefix(size_type n) { ABSL_HARDENING_ASSERT(n <= length_); ptr_ += n; length_ -= n; @@ -350,7 +345,7 @@ class string_view { // // Removes the last `n` characters from the `string_view`. Note that the // underlying string is not changed, only the view. - ABSL_INTERNAL_STRING_VIEW_CXX14_CONSTEXPR void remove_suffix(size_type n) { + constexpr void remove_suffix(size_type n) { ABSL_HARDENING_ASSERT(n <= length_); length_ -= n; } @@ -358,7 +353,7 @@ class string_view { // string_view::swap() // // Swaps this `string_view` with another `string_view`. - ABSL_INTERNAL_STRING_VIEW_CXX14_CONSTEXPR void swap(string_view& s) noexcept { + constexpr void swap(string_view& s) noexcept { auto t = *this; *this = s; s = t; @@ -677,7 +672,6 @@ std::ostream& operator<<(std::ostream& o, string_view piece); ABSL_NAMESPACE_END } // namespace absl -#undef ABSL_INTERNAL_STRING_VIEW_CXX14_CONSTEXPR #undef ABSL_INTERNAL_STRING_VIEW_MEMCMP #endif // ABSL_USES_STD_STRING_VIEW diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/string_view_test.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/string_view_test.cc index 2c13dd1c14..990c211a8e 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/string_view_test.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/string_view_test.cc @@ -82,7 +82,7 @@ TEST(StringViewTest, Ctor) { // Null. absl::string_view s10; EXPECT_TRUE(s10.data() == nullptr); - EXPECT_EQ(0, s10.length()); + EXPECT_EQ(0u, s10.length()); } { @@ -90,17 +90,17 @@ TEST(StringViewTest, Ctor) { const char* hello = "hello"; absl::string_view s20(hello); EXPECT_TRUE(s20.data() == hello); - EXPECT_EQ(5, s20.length()); + EXPECT_EQ(5u, s20.length()); // const char* with length. absl::string_view s21(hello, 4); EXPECT_TRUE(s21.data() == hello); - EXPECT_EQ(4, s21.length()); + EXPECT_EQ(4u, s21.length()); // Not recommended, but valid C++ absl::string_view s22(hello, 6); EXPECT_TRUE(s22.data() == hello); - EXPECT_EQ(6, s22.length()); + EXPECT_EQ(6u, s22.length()); } { @@ -108,7 +108,7 @@ TEST(StringViewTest, Ctor) { std::string hola = "hola"; absl::string_view s30(hola); EXPECT_TRUE(s30.data() == hola.data()); - EXPECT_EQ(4, s30.length()); + EXPECT_EQ(4u, s30.length()); // std::string with embedded '\0'. hola.push_back('\0'); @@ -116,7 +116,7 @@ TEST(StringViewTest, Ctor) { hola.push_back('\0'); absl::string_view s31(hola); EXPECT_TRUE(s31.data() == hola.data()); - EXPECT_EQ(8, s31.length()); + EXPECT_EQ(8u, s31.length()); } { @@ -165,7 +165,7 @@ TEST(StringViewTest, STLComparator) { map.insert(std::make_pair(p1, 0)); map.insert(std::make_pair(p2, 1)); map.insert(std::make_pair(p3, 2)); - EXPECT_EQ(map.size(), 3); + EXPECT_EQ(map.size(), 3u); TestMap::const_iterator iter = map.begin(); EXPECT_EQ(iter->second, 1); @@ -183,7 +183,7 @@ TEST(StringViewTest, STLComparator) { EXPECT_TRUE(new_iter != map.end()); map.erase(new_iter); - EXPECT_EQ(map.size(), 2); + EXPECT_EQ(map.size(), 2u); iter = map.begin(); EXPECT_EQ(iter->second, 2); @@ -261,11 +261,11 @@ TEST(StringViewTest, ComparisonOperators) { TEST(StringViewTest, ComparisonOperatorsByCharacterPosition) { std::string x; - for (int i = 0; i < 256; i++) { + for (size_t i = 0; i < 256; i++) { x += 'a'; std::string y = x; COMPARE(true, ==, x, y); - for (int j = 0; j < i; j++) { + for (size_t j = 0; j < i; j++) { std::string z = x; z[j] = 'b'; // Differs in position 'j' COMPARE(false, ==, x, z); @@ -341,12 +341,12 @@ TEST(StringViewTest, STL1) { EXPECT_EQ(*(c.rend() - 1), 'x'); EXPECT_TRUE(a.rbegin() + 26 == a.rend()); - EXPECT_EQ(a.size(), 26); - EXPECT_EQ(b.size(), 3); - EXPECT_EQ(c.size(), 3); - EXPECT_EQ(d.size(), 6); - EXPECT_EQ(e.size(), 0); - EXPECT_EQ(f.size(), 7); + EXPECT_EQ(a.size(), 26u); + EXPECT_EQ(b.size(), 3u); + EXPECT_EQ(c.size(), 3u); + EXPECT_EQ(d.size(), 6u); + EXPECT_EQ(e.size(), 0u); + EXPECT_EQ(f.size(), 7u); EXPECT_TRUE(!d.empty()); EXPECT_TRUE(d.begin() != d.end()); @@ -356,17 +356,17 @@ TEST(StringViewTest, STL1) { EXPECT_TRUE(e.begin() == e.end()); char buf[4] = { '%', '%', '%', '%' }; - EXPECT_EQ(a.copy(buf, 4), 4); + EXPECT_EQ(a.copy(buf, 4), 4u); EXPECT_EQ(buf[0], a[0]); EXPECT_EQ(buf[1], a[1]); EXPECT_EQ(buf[2], a[2]); EXPECT_EQ(buf[3], a[3]); - EXPECT_EQ(a.copy(buf, 3, 7), 3); + EXPECT_EQ(a.copy(buf, 3, 7), 3u); EXPECT_EQ(buf[0], a[7]); EXPECT_EQ(buf[1], a[8]); EXPECT_EQ(buf[2], a[9]); EXPECT_EQ(buf[3], a[3]); - EXPECT_EQ(c.copy(buf, 99), 3); + EXPECT_EQ(c.copy(buf, 99), 3u); EXPECT_EQ(buf[0], c[0]); EXPECT_EQ(buf[1], c[1]); EXPECT_EQ(buf[2], c[2]); @@ -393,22 +393,22 @@ TEST(StringViewTest, STL2) { 7); d = absl::string_view(); - EXPECT_EQ(d.size(), 0); + EXPECT_EQ(d.size(), 0u); EXPECT_TRUE(d.empty()); EXPECT_TRUE(d.data() == nullptr); EXPECT_TRUE(d.begin() == d.end()); - EXPECT_EQ(a.find(b), 0); + EXPECT_EQ(a.find(b), 0u); EXPECT_EQ(a.find(b, 1), absl::string_view::npos); - EXPECT_EQ(a.find(c), 23); - EXPECT_EQ(a.find(c, 9), 23); + EXPECT_EQ(a.find(c), 23u); + EXPECT_EQ(a.find(c, 9), 23u); EXPECT_EQ(a.find(c, absl::string_view::npos), absl::string_view::npos); EXPECT_EQ(b.find(c), absl::string_view::npos); EXPECT_EQ(b.find(c, absl::string_view::npos), absl::string_view::npos); - EXPECT_EQ(a.find(d), 0); - EXPECT_EQ(a.find(e), 0); - EXPECT_EQ(a.find(d, 12), 12); - EXPECT_EQ(a.find(e, 17), 17); + EXPECT_EQ(a.find(d), 0u); + EXPECT_EQ(a.find(e), 0u); + EXPECT_EQ(a.find(d, 12), 12u); + EXPECT_EQ(a.find(e, 17), 17u); absl::string_view g("xx not found bb"); EXPECT_EQ(a.find(g), absl::string_view::npos); // empty string nonsense @@ -427,17 +427,17 @@ TEST(StringViewTest, STL2) { EXPECT_EQ(e.find(d, 4), std::string().find(std::string(), 4)); EXPECT_EQ(e.find(e, 4), std::string().find(std::string(), 4)); - EXPECT_EQ(a.find('a'), 0); - EXPECT_EQ(a.find('c'), 2); - EXPECT_EQ(a.find('z'), 25); + EXPECT_EQ(a.find('a'), 0u); + EXPECT_EQ(a.find('c'), 2u); + EXPECT_EQ(a.find('z'), 25u); EXPECT_EQ(a.find('$'), absl::string_view::npos); EXPECT_EQ(a.find('\0'), absl::string_view::npos); - EXPECT_EQ(f.find('\0'), 3); - EXPECT_EQ(f.find('3'), 2); - EXPECT_EQ(f.find('5'), 5); - EXPECT_EQ(g.find('o'), 4); - EXPECT_EQ(g.find('o', 4), 4); - EXPECT_EQ(g.find('o', 5), 8); + EXPECT_EQ(f.find('\0'), 3u); + EXPECT_EQ(f.find('3'), 2u); + EXPECT_EQ(f.find('5'), 5u); + EXPECT_EQ(g.find('o'), 4u); + EXPECT_EQ(g.find('o', 4), 4u); + EXPECT_EQ(g.find('o', 5), 8u); EXPECT_EQ(a.find('b', 5), absl::string_view::npos); // empty string nonsense EXPECT_EQ(d.find('\0'), absl::string_view::npos); @@ -449,8 +449,8 @@ TEST(StringViewTest, STL2) { EXPECT_EQ(d.find('x', 4), absl::string_view::npos); EXPECT_EQ(e.find('x', 7), absl::string_view::npos); - EXPECT_EQ(a.find(b.data(), 1, 0), 1); - EXPECT_EQ(a.find(c.data(), 9, 0), 9); + EXPECT_EQ(a.find(b.data(), 1, 0), 1u); + EXPECT_EQ(a.find(c.data(), 9, 0), 9u); EXPECT_EQ(a.find(c.data(), absl::string_view::npos, 0), absl::string_view::npos); EXPECT_EQ(b.find(c.data(), absl::string_view::npos, 0), @@ -460,16 +460,16 @@ TEST(StringViewTest, STL2) { EXPECT_EQ(e.find(b.data(), 7, 0), absl::string_view::npos); EXPECT_EQ(a.find(b.data(), 1), absl::string_view::npos); - EXPECT_EQ(a.find(c.data(), 9), 23); + EXPECT_EQ(a.find(c.data(), 9), 23u); EXPECT_EQ(a.find(c.data(), absl::string_view::npos), absl::string_view::npos); EXPECT_EQ(b.find(c.data(), absl::string_view::npos), absl::string_view::npos); // empty string nonsense EXPECT_EQ(d.find(b.data(), 4), absl::string_view::npos); EXPECT_EQ(e.find(b.data(), 7), absl::string_view::npos); - EXPECT_EQ(a.rfind(b), 0); - EXPECT_EQ(a.rfind(b, 1), 0); - EXPECT_EQ(a.rfind(c), 23); + EXPECT_EQ(a.rfind(b), 0u); + EXPECT_EQ(a.rfind(b, 1), 0u); + EXPECT_EQ(a.rfind(c), 23u); EXPECT_EQ(a.rfind(c, 22), absl::string_view::npos); EXPECT_EQ(a.rfind(c, 1), absl::string_view::npos); EXPECT_EQ(a.rfind(c, 0), absl::string_view::npos); @@ -477,8 +477,8 @@ TEST(StringViewTest, STL2) { EXPECT_EQ(b.rfind(c, 0), absl::string_view::npos); EXPECT_EQ(a.rfind(d), std::string(a).rfind(std::string())); EXPECT_EQ(a.rfind(e), std::string(a).rfind(std::string())); - EXPECT_EQ(a.rfind(d, 12), 12); - EXPECT_EQ(a.rfind(e, 17), 17); + EXPECT_EQ(a.rfind(d, 12), 12u); + EXPECT_EQ(a.rfind(e, 17), 17u); EXPECT_EQ(a.rfind(g), absl::string_view::npos); EXPECT_EQ(d.rfind(b), absl::string_view::npos); EXPECT_EQ(e.rfind(b), absl::string_view::npos); @@ -494,28 +494,28 @@ TEST(StringViewTest, STL2) { EXPECT_EQ(d.rfind(e), std::string().rfind(std::string())); EXPECT_EQ(e.rfind(e), std::string().rfind(std::string())); - EXPECT_EQ(g.rfind('o'), 8); + EXPECT_EQ(g.rfind('o'), 8u); EXPECT_EQ(g.rfind('q'), absl::string_view::npos); - EXPECT_EQ(g.rfind('o', 8), 8); - EXPECT_EQ(g.rfind('o', 7), 4); + EXPECT_EQ(g.rfind('o', 8), 8u); + EXPECT_EQ(g.rfind('o', 7), 4u); EXPECT_EQ(g.rfind('o', 3), absl::string_view::npos); - EXPECT_EQ(f.rfind('\0'), 3); - EXPECT_EQ(f.rfind('\0', 12), 3); - EXPECT_EQ(f.rfind('3'), 2); - EXPECT_EQ(f.rfind('5'), 5); + EXPECT_EQ(f.rfind('\0'), 3u); + EXPECT_EQ(f.rfind('\0', 12), 3u); + EXPECT_EQ(f.rfind('3'), 2u); + EXPECT_EQ(f.rfind('5'), 5u); // empty string nonsense EXPECT_EQ(d.rfind('o'), absl::string_view::npos); EXPECT_EQ(e.rfind('o'), absl::string_view::npos); EXPECT_EQ(d.rfind('o', 4), absl::string_view::npos); EXPECT_EQ(e.rfind('o', 7), absl::string_view::npos); - EXPECT_EQ(a.rfind(b.data(), 1, 0), 1); - EXPECT_EQ(a.rfind(c.data(), 22, 0), 22); - EXPECT_EQ(a.rfind(c.data(), 1, 0), 1); - EXPECT_EQ(a.rfind(c.data(), 0, 0), 0); - EXPECT_EQ(b.rfind(c.data(), 0, 0), 0); - EXPECT_EQ(d.rfind(b.data(), 4, 0), 0); - EXPECT_EQ(e.rfind(b.data(), 7, 0), 0); + EXPECT_EQ(a.rfind(b.data(), 1, 0), 1u); + EXPECT_EQ(a.rfind(c.data(), 22, 0), 22u); + EXPECT_EQ(a.rfind(c.data(), 1, 0), 1u); + EXPECT_EQ(a.rfind(c.data(), 0, 0), 0u); + EXPECT_EQ(b.rfind(c.data(), 0, 0), 0u); + EXPECT_EQ(d.rfind(b.data(), 4, 0), 0u); + EXPECT_EQ(e.rfind(b.data(), 7, 0), 0u); } // Continued from STL2 @@ -533,18 +533,18 @@ TEST(StringViewTest, STL2FindFirst) { absl::string_view g("xx not found bb"); d = absl::string_view(); - EXPECT_EQ(a.find_first_of(b), 0); - EXPECT_EQ(a.find_first_of(b, 0), 0); - EXPECT_EQ(a.find_first_of(b, 1), 1); - EXPECT_EQ(a.find_first_of(b, 2), 2); + EXPECT_EQ(a.find_first_of(b), 0u); + EXPECT_EQ(a.find_first_of(b, 0), 0u); + EXPECT_EQ(a.find_first_of(b, 1), 1u); + EXPECT_EQ(a.find_first_of(b, 2), 2u); EXPECT_EQ(a.find_first_of(b, 3), absl::string_view::npos); - EXPECT_EQ(a.find_first_of(c), 23); - EXPECT_EQ(a.find_first_of(c, 23), 23); - EXPECT_EQ(a.find_first_of(c, 24), 24); - EXPECT_EQ(a.find_first_of(c, 25), 25); + EXPECT_EQ(a.find_first_of(c), 23u); + EXPECT_EQ(a.find_first_of(c, 23), 23u); + EXPECT_EQ(a.find_first_of(c, 24), 24u); + EXPECT_EQ(a.find_first_of(c, 25), 25u); EXPECT_EQ(a.find_first_of(c, 26), absl::string_view::npos); - EXPECT_EQ(g.find_first_of(b), 13); - EXPECT_EQ(g.find_first_of(c), 0); + EXPECT_EQ(g.find_first_of(b), 13u); + EXPECT_EQ(g.find_first_of(c), 0u); EXPECT_EQ(a.find_first_of(f), absl::string_view::npos); EXPECT_EQ(f.find_first_of(a), absl::string_view::npos); // empty string nonsense @@ -557,19 +557,19 @@ TEST(StringViewTest, STL2FindFirst) { EXPECT_EQ(d.find_first_of(e), absl::string_view::npos); EXPECT_EQ(e.find_first_of(e), absl::string_view::npos); - EXPECT_EQ(a.find_first_not_of(b), 3); - EXPECT_EQ(a.find_first_not_of(c), 0); + EXPECT_EQ(a.find_first_not_of(b), 3u); + EXPECT_EQ(a.find_first_not_of(c), 0u); EXPECT_EQ(b.find_first_not_of(a), absl::string_view::npos); EXPECT_EQ(c.find_first_not_of(a), absl::string_view::npos); - EXPECT_EQ(f.find_first_not_of(a), 0); - EXPECT_EQ(a.find_first_not_of(f), 0); - EXPECT_EQ(a.find_first_not_of(d), 0); - EXPECT_EQ(a.find_first_not_of(e), 0); + EXPECT_EQ(f.find_first_not_of(a), 0u); + EXPECT_EQ(a.find_first_not_of(f), 0u); + EXPECT_EQ(a.find_first_not_of(d), 0u); + EXPECT_EQ(a.find_first_not_of(e), 0u); // empty string nonsense - EXPECT_EQ(a.find_first_not_of(d), 0); - EXPECT_EQ(a.find_first_not_of(e), 0); - EXPECT_EQ(a.find_first_not_of(d, 1), 1); - EXPECT_EQ(a.find_first_not_of(e, 1), 1); + EXPECT_EQ(a.find_first_not_of(d), 0u); + EXPECT_EQ(a.find_first_not_of(e), 0u); + EXPECT_EQ(a.find_first_not_of(d, 1), 1u); + EXPECT_EQ(a.find_first_not_of(e, 1), 1u); EXPECT_EQ(a.find_first_not_of(d, a.size() - 1), a.size() - 1); EXPECT_EQ(a.find_first_not_of(e, a.size() - 1), a.size() - 1); EXPECT_EQ(a.find_first_not_of(d, a.size()), absl::string_view::npos); @@ -588,11 +588,11 @@ TEST(StringViewTest, STL2FindFirst) { absl::string_view h("===="); EXPECT_EQ(h.find_first_not_of('='), absl::string_view::npos); EXPECT_EQ(h.find_first_not_of('=', 3), absl::string_view::npos); - EXPECT_EQ(h.find_first_not_of('\0'), 0); - EXPECT_EQ(g.find_first_not_of('x'), 2); - EXPECT_EQ(f.find_first_not_of('\0'), 0); - EXPECT_EQ(f.find_first_not_of('\0', 3), 4); - EXPECT_EQ(f.find_first_not_of('\0', 2), 2); + EXPECT_EQ(h.find_first_not_of('\0'), 0u); + EXPECT_EQ(g.find_first_not_of('x'), 2u); + EXPECT_EQ(f.find_first_not_of('\0'), 0u); + EXPECT_EQ(f.find_first_not_of('\0', 3), 4u); + EXPECT_EQ(f.find_first_not_of('\0', 2), 2u); // empty string nonsense EXPECT_EQ(d.find_first_not_of('x'), absl::string_view::npos); EXPECT_EQ(e.find_first_not_of('x'), absl::string_view::npos); @@ -618,20 +618,20 @@ TEST(StringViewTest, STL2FindLast) { d = absl::string_view(); EXPECT_EQ(h.find_last_of(a), absl::string_view::npos); - EXPECT_EQ(g.find_last_of(a), g.size()-1); - EXPECT_EQ(a.find_last_of(b), 2); - EXPECT_EQ(a.find_last_of(c), a.size()-1); - EXPECT_EQ(f.find_last_of(i), 6); - EXPECT_EQ(a.find_last_of('a'), 0); - EXPECT_EQ(a.find_last_of('b'), 1); - EXPECT_EQ(a.find_last_of('z'), 25); - EXPECT_EQ(a.find_last_of('a', 5), 0); - EXPECT_EQ(a.find_last_of('b', 5), 1); + EXPECT_EQ(g.find_last_of(a), g.size() - 1); + EXPECT_EQ(a.find_last_of(b), 2u); + EXPECT_EQ(a.find_last_of(c), a.size() - 1); + EXPECT_EQ(f.find_last_of(i), 6u); + EXPECT_EQ(a.find_last_of('a'), 0u); + EXPECT_EQ(a.find_last_of('b'), 1u); + EXPECT_EQ(a.find_last_of('z'), 25u); + EXPECT_EQ(a.find_last_of('a', 5), 0u); + EXPECT_EQ(a.find_last_of('b', 5), 1u); EXPECT_EQ(a.find_last_of('b', 0), absl::string_view::npos); - EXPECT_EQ(a.find_last_of('z', 25), 25); + EXPECT_EQ(a.find_last_of('z', 25), 25u); EXPECT_EQ(a.find_last_of('z', 24), absl::string_view::npos); - EXPECT_EQ(f.find_last_of(i, 5), 5); - EXPECT_EQ(f.find_last_of(i, 6), 6); + EXPECT_EQ(f.find_last_of(i, 5), 5u); + EXPECT_EQ(f.find_last_of(i, 6), 6u); EXPECT_EQ(f.find_last_of(a, 4), absl::string_view::npos); // empty string nonsense EXPECT_EQ(f.find_last_of(d), absl::string_view::npos); @@ -651,19 +651,19 @@ TEST(StringViewTest, STL2FindLast) { EXPECT_EQ(d.find_last_of(f, 4), absl::string_view::npos); EXPECT_EQ(e.find_last_of(f, 4), absl::string_view::npos); - EXPECT_EQ(a.find_last_not_of(b), a.size()-1); - EXPECT_EQ(a.find_last_not_of(c), 22); + EXPECT_EQ(a.find_last_not_of(b), a.size() - 1); + EXPECT_EQ(a.find_last_not_of(c), 22u); EXPECT_EQ(b.find_last_not_of(a), absl::string_view::npos); EXPECT_EQ(b.find_last_not_of(b), absl::string_view::npos); - EXPECT_EQ(f.find_last_not_of(i), 4); - EXPECT_EQ(a.find_last_not_of(c, 24), 22); - EXPECT_EQ(a.find_last_not_of(b, 3), 3); + EXPECT_EQ(f.find_last_not_of(i), 4u); + EXPECT_EQ(a.find_last_not_of(c, 24), 22u); + EXPECT_EQ(a.find_last_not_of(b, 3), 3u); EXPECT_EQ(a.find_last_not_of(b, 2), absl::string_view::npos); // empty string nonsense - EXPECT_EQ(f.find_last_not_of(d), f.size()-1); - EXPECT_EQ(f.find_last_not_of(e), f.size()-1); - EXPECT_EQ(f.find_last_not_of(d, 4), 4); - EXPECT_EQ(f.find_last_not_of(e, 4), 4); + EXPECT_EQ(f.find_last_not_of(d), f.size() - 1); + EXPECT_EQ(f.find_last_not_of(e), f.size() - 1); + EXPECT_EQ(f.find_last_not_of(d, 4), 4u); + EXPECT_EQ(f.find_last_not_of(e, 4), 4u); EXPECT_EQ(d.find_last_not_of(d), absl::string_view::npos); EXPECT_EQ(d.find_last_not_of(e), absl::string_view::npos); EXPECT_EQ(e.find_last_not_of(d), absl::string_view::npos); @@ -679,10 +679,10 @@ TEST(StringViewTest, STL2FindLast) { EXPECT_EQ(h.find_last_not_of('x'), h.size() - 1); EXPECT_EQ(h.find_last_not_of('='), absl::string_view::npos); - EXPECT_EQ(b.find_last_not_of('c'), 1); - EXPECT_EQ(h.find_last_not_of('x', 2), 2); + EXPECT_EQ(b.find_last_not_of('c'), 1u); + EXPECT_EQ(h.find_last_not_of('x', 2), 2u); EXPECT_EQ(h.find_last_not_of('=', 2), absl::string_view::npos); - EXPECT_EQ(b.find_last_not_of('b', 1), 0); + EXPECT_EQ(b.find_last_not_of('b', 1), 0u); // empty string nonsense EXPECT_EQ(d.find_last_not_of('x'), absl::string_view::npos); EXPECT_EQ(e.find_last_not_of('x'), absl::string_view::npos); @@ -734,7 +734,7 @@ TEST(StringViewTest, TruncSubstr) { TEST(StringViewTest, UTF8) { std::string utf8 = "\u00E1"; std::string utf8_twice = utf8 + " " + utf8; - int utf8_len = strlen(utf8.data()); + size_t utf8_len = strlen(utf8.data()); EXPECT_EQ(utf8_len, absl::string_view(utf8_twice).find_first_of(" ")); EXPECT_EQ(utf8_len, absl::string_view(utf8_twice).find_first_of(" \t")); } @@ -879,12 +879,12 @@ TEST(StringViewTest, FrontBackEmpty) { TEST(StringViewTest, NULLInput) { absl::string_view s; EXPECT_EQ(s.data(), nullptr); - EXPECT_EQ(s.size(), 0); + EXPECT_EQ(s.size(), 0u); #ifdef ABSL_HAVE_STRING_VIEW_FROM_NULLPTR s = absl::string_view(nullptr); EXPECT_EQ(s.data(), nullptr); - EXPECT_EQ(s.size(), 0); + EXPECT_EQ(s.size(), 0u); // .ToString() on a absl::string_view with nullptr should produce the empty // string. @@ -959,7 +959,7 @@ TEST(StringViewTest, NullSafeStringView) { { absl::string_view s = absl::NullSafeStringView(nullptr); EXPECT_EQ(nullptr, s.data()); - EXPECT_EQ(0, s.size()); + EXPECT_EQ(0u, s.size()); EXPECT_EQ(absl::string_view(), s); } { @@ -975,7 +975,7 @@ TEST(StringViewTest, ConstexprNullSafeStringView) { { constexpr absl::string_view s = absl::NullSafeStringView(nullptr); EXPECT_EQ(nullptr, s.data()); - EXPECT_EQ(0, s.size()); + EXPECT_EQ(0u, s.size()); EXPECT_EQ(absl::string_view(), s); } #if !defined(_MSC_VER) || _MSC_VER >= 1910 @@ -990,7 +990,7 @@ TEST(StringViewTest, ConstexprNullSafeStringView) { } { constexpr absl::string_view s = absl::NullSafeStringView("hello"); - EXPECT_EQ(s.size(), 5); + EXPECT_EQ(s.size(), 5u); EXPECT_EQ("hello", s); } #endif @@ -1036,7 +1036,7 @@ TEST(StringViewTest, ConstexprCompiles) { #ifdef ABSL_HAVE_CONSTEXPR_STRING_VIEW_FROM_CSTR constexpr absl::string_view cstr_strlen("foo"); - EXPECT_EQ(cstr_strlen.length(), 3); + EXPECT_EQ(cstr_strlen.length(), 3u); constexpr absl::string_view cstr_strlen2 = "bar"; EXPECT_EQ(cstr_strlen2, "bar"); @@ -1111,7 +1111,7 @@ TEST(StringViewTest, ConstexprCompiles) { EXPECT_NE(cstr_ptr, nullptr); constexpr size_t sp_npos = sp.npos; - EXPECT_EQ(sp_npos, -1); + EXPECT_EQ(sp_npos, static_cast(-1)); } constexpr char ConstexprMethodsHelper() { @@ -1179,7 +1179,7 @@ TEST(StringViewTest, BoundsCheck) { // Abseil's string_view implementation has bounds-checking in debug mode. absl::string_view h = "hello"; ABSL_EXPECT_DEATH_IF_SUPPORTED(h[5], ""); - ABSL_EXPECT_DEATH_IF_SUPPORTED(h[-1], ""); + ABSL_EXPECT_DEATH_IF_SUPPORTED(h[static_cast(-1)], ""); #endif #endif } @@ -1189,7 +1189,7 @@ TEST(ComparisonOpsTest, StringCompareNotAmbiguous) { EXPECT_LT("hello", std::string("world")); } -TEST(ComparisonOpsTest, HeterogenousStringViewEquals) { +TEST(ComparisonOpsTest, HeterogeneousStringViewEquals) { EXPECT_EQ(absl::string_view("hello"), std::string("hello")); EXPECT_EQ("hello", absl::string_view("hello")); } @@ -1201,17 +1201,17 @@ TEST(FindOneCharTest, EdgeCases) { a.remove_prefix(1); a.remove_suffix(1); - EXPECT_EQ(0, a.find('x')); - EXPECT_EQ(0, a.find('x', 0)); - EXPECT_EQ(4, a.find('x', 1)); - EXPECT_EQ(4, a.find('x', 4)); + EXPECT_EQ(0u, a.find('x')); + EXPECT_EQ(0u, a.find('x', 0)); + EXPECT_EQ(4u, a.find('x', 1)); + EXPECT_EQ(4u, a.find('x', 4)); EXPECT_EQ(absl::string_view::npos, a.find('x', 5)); - EXPECT_EQ(4, a.rfind('x')); - EXPECT_EQ(4, a.rfind('x', 5)); - EXPECT_EQ(4, a.rfind('x', 4)); - EXPECT_EQ(0, a.rfind('x', 3)); - EXPECT_EQ(0, a.rfind('x', 0)); + EXPECT_EQ(4u, a.rfind('x')); + EXPECT_EQ(4u, a.rfind('x', 5)); + EXPECT_EQ(4u, a.rfind('x', 4)); + EXPECT_EQ(0u, a.rfind('x', 3)); + EXPECT_EQ(0u, a.rfind('x', 0)); // Set a = "yyy". a.remove_prefix(1); @@ -1239,8 +1239,8 @@ TEST(HugeStringView, TwoPointTwoGB) { #if !defined(NDEBUG) && !defined(ABSL_USES_STD_STRING_VIEW) TEST(NonNegativeLenTest, NonNegativeLen) { - ABSL_EXPECT_DEATH_IF_SUPPORTED(absl::string_view("xyz", -1), - "len <= kMaxSize"); + ABSL_EXPECT_DEATH_IF_SUPPORTED( + absl::string_view("xyz", static_cast(-1)), "len <= kMaxSize"); } TEST(LenExceedsMaxSizeTest, LenExceedsMaxSize) { diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/strip.h b/TMessagesProj/jni/voip/webrtc/absl/strings/strip.h index 111872ca54..341e66fc92 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/strip.h +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/strip.h @@ -34,8 +34,9 @@ ABSL_NAMESPACE_BEGIN // ConsumePrefix() // -// Strips the `expected` prefix from the start of the given string, returning -// `true` if the strip operation succeeded or false otherwise. +// Strips the `expected` prefix, if found, from the start of `str`. +// If the operation succeeded, `true` is returned. If not, `false` +// is returned and `str` is not modified. // // Example: // @@ -49,8 +50,9 @@ inline bool ConsumePrefix(absl::string_view* str, absl::string_view expected) { } // ConsumeSuffix() // -// Strips the `expected` suffix from the end of the given string, returning -// `true` if the strip operation succeeded or false otherwise. +// Strips the `expected` suffix, if found, from the end of `str`. +// If the operation succeeded, `true` is returned. If not, `false` +// is returned and `str` is not modified. // // Example: // @@ -65,7 +67,7 @@ inline bool ConsumeSuffix(absl::string_view* str, absl::string_view expected) { // StripPrefix() // -// Returns a view into the input string 'str' with the given 'prefix' removed, +// Returns a view into the input string `str` with the given `prefix` removed, // but leaving the original string intact. If the prefix does not match at the // start of the string, returns the original string instead. ABSL_MUST_USE_RESULT inline absl::string_view StripPrefix( @@ -76,7 +78,7 @@ ABSL_MUST_USE_RESULT inline absl::string_view StripPrefix( // StripSuffix() // -// Returns a view into the input string 'str' with the given 'suffix' removed, +// Returns a view into the input string `str` with the given `suffix` removed, // but leaving the original string intact. If the suffix does not match at the // end of the string, returns the original string instead. ABSL_MUST_USE_RESULT inline absl::string_view StripSuffix( diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/substitute.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/substitute.cc index 8980b198c2..33a39305db 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/substitute.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/substitute.cc @@ -40,7 +40,8 @@ void SubstituteAndAppendArray(std::string* output, absl::string_view format, absl::CEscape(format).c_str()); #endif return; - } else if (absl::ascii_isdigit(format[i + 1])) { + } else if (absl::ascii_isdigit( + static_cast(format[i + 1]))) { int index = format[i + 1] - '0'; if (static_cast(index) >= num_args) { #ifndef NDEBUG @@ -80,7 +81,7 @@ void SubstituteAndAppendArray(std::string* output, absl::string_view format, char* target = &(*output)[original_size]; for (size_t i = 0; i < format.size(); i++) { if (format[i] == '$') { - if (absl::ascii_isdigit(format[i + 1])) { + if (absl::ascii_isdigit(static_cast(format[i + 1]))) { const absl::string_view src = args_array[format[i + 1] - '0']; target = std::copy(src.begin(), src.end(), target); ++i; // Skip next char. @@ -110,7 +111,8 @@ Arg::Arg(const void* value) { } while (num != 0); *--ptr = 'x'; *--ptr = '0'; - piece_ = absl::string_view(ptr, scratch_ + sizeof(scratch_) - ptr); + piece_ = absl::string_view( + ptr, static_cast(scratch_ + sizeof(scratch_) - ptr)); } } @@ -132,7 +134,7 @@ Arg::Arg(Hex hex) { beg = writer; } - piece_ = absl::string_view(beg, end - beg); + piece_ = absl::string_view(beg, static_cast(end - beg)); } // TODO(jorg): Don't duplicate so much code between here and str_cat.cc @@ -147,7 +149,7 @@ Arg::Arg(Dec dec) { *--writer = '0' + (value % 10); value /= 10; } - *--writer = '0' + value; + *--writer = '0' + static_cast(value); if (neg) *--writer = '-'; ptrdiff_t fillers = writer - minfill; @@ -164,7 +166,7 @@ Arg::Arg(Dec dec) { if (add_sign_again) *--writer = '-'; } - piece_ = absl::string_view(writer, end - writer); + piece_ = absl::string_view(writer, static_cast(end - writer)); } } // namespace substitute_internal diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/substitute.h b/TMessagesProj/jni/voip/webrtc/absl/strings/substitute.h index 6d2b08abb9..5c3f6eff34 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/substitute.h +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/substitute.h @@ -55,6 +55,8 @@ // * bool (Printed as "true" or "false") // * pointer types other than char* (Printed as "0x", // except that null is printed as "NULL") +// * user-defined types via the `AbslStringify()` customization point. See the +// documentation for `absl::StrCat` for an explanation on how to use this. // // If an invalid format string is provided, Substitute returns an empty string // and SubstituteAndAppend does not change the provided output string. @@ -79,6 +81,7 @@ #include "absl/base/port.h" #include "absl/strings/ascii.h" #include "absl/strings/escaping.h" +#include "absl/strings/internal/stringify_sink.h" #include "absl/strings/numbers.h" #include "absl/strings/str_cat.h" #include "absl/strings/str_split.h" @@ -102,14 +105,14 @@ class Arg { // Overloads for string-y things // // Explicitly overload `const char*` so the compiler doesn't cast to `bool`. - Arg(const char* value) // NOLINT(runtime/explicit) + Arg(const char* value) // NOLINT(google-explicit-constructor) : piece_(absl::NullSafeStringView(value)) {} template Arg( // NOLINT const std::basic_string, Allocator>& value) noexcept : piece_(value) {} - Arg(absl::string_view value) // NOLINT(runtime/explicit) + Arg(absl::string_view value) // NOLINT(google-explicit-constructor) : piece_(value) {} // Overloads for primitives @@ -119,45 +122,67 @@ class Arg { // probably using them as 8-bit integers and would probably prefer an integer // representation. However, we can't really know, so we make the caller decide // what to do. - Arg(char value) // NOLINT(runtime/explicit) + Arg(char value) // NOLINT(google-explicit-constructor) : piece_(scratch_, 1) { scratch_[0] = value; } Arg(short value) // NOLINT(*) : piece_(scratch_, - numbers_internal::FastIntToBuffer(value, scratch_) - scratch_) {} + static_cast( + numbers_internal::FastIntToBuffer(value, scratch_) - + scratch_)) {} Arg(unsigned short value) // NOLINT(*) : piece_(scratch_, - numbers_internal::FastIntToBuffer(value, scratch_) - scratch_) {} - Arg(int value) // NOLINT(runtime/explicit) + static_cast( + numbers_internal::FastIntToBuffer(value, scratch_) - + scratch_)) {} + Arg(int value) // NOLINT(google-explicit-constructor) : piece_(scratch_, - numbers_internal::FastIntToBuffer(value, scratch_) - scratch_) {} - Arg(unsigned int value) // NOLINT(runtime/explicit) + static_cast( + numbers_internal::FastIntToBuffer(value, scratch_) - + scratch_)) {} + Arg(unsigned int value) // NOLINT(google-explicit-constructor) : piece_(scratch_, - numbers_internal::FastIntToBuffer(value, scratch_) - scratch_) {} + static_cast( + numbers_internal::FastIntToBuffer(value, scratch_) - + scratch_)) {} Arg(long value) // NOLINT(*) : piece_(scratch_, - numbers_internal::FastIntToBuffer(value, scratch_) - scratch_) {} + static_cast( + numbers_internal::FastIntToBuffer(value, scratch_) - + scratch_)) {} Arg(unsigned long value) // NOLINT(*) : piece_(scratch_, - numbers_internal::FastIntToBuffer(value, scratch_) - scratch_) {} + static_cast( + numbers_internal::FastIntToBuffer(value, scratch_) - + scratch_)) {} Arg(long long value) // NOLINT(*) : piece_(scratch_, - numbers_internal::FastIntToBuffer(value, scratch_) - scratch_) {} + static_cast( + numbers_internal::FastIntToBuffer(value, scratch_) - + scratch_)) {} Arg(unsigned long long value) // NOLINT(*) : piece_(scratch_, - numbers_internal::FastIntToBuffer(value, scratch_) - scratch_) {} - Arg(float value) // NOLINT(runtime/explicit) + static_cast( + numbers_internal::FastIntToBuffer(value, scratch_) - + scratch_)) {} + Arg(float value) // NOLINT(google-explicit-constructor) : piece_(scratch_, numbers_internal::SixDigitsToBuffer(value, scratch_)) { } - Arg(double value) // NOLINT(runtime/explicit) + Arg(double value) // NOLINT(google-explicit-constructor) : piece_(scratch_, numbers_internal::SixDigitsToBuffer(value, scratch_)) { } - Arg(bool value) // NOLINT(runtime/explicit) + Arg(bool value) // NOLINT(google-explicit-constructor) : piece_(value ? "true" : "false") {} - Arg(Hex hex); // NOLINT(runtime/explicit) - Arg(Dec dec); // NOLINT(runtime/explicit) + template ::value>::type> + Arg( // NOLINT(google-explicit-constructor) + const T& v, strings_internal::StringifySink&& sink = {}) + : piece_(strings_internal::ExtractStringification(sink, v)) {} + + Arg(Hex hex); // NOLINT(google-explicit-constructor) + Arg(Dec dec); // NOLINT(google-explicit-constructor) // vector::reference and const_reference require special help to convert // to `Arg` because it requires two user defined conversions. @@ -172,7 +197,7 @@ class Arg { // `void*` values, with the exception of `char*`, are printed as // "0x". However, in the case of `nullptr`, "NULL" is printed. - Arg(const void* value); // NOLINT(runtime/explicit) + Arg(const void* value); // NOLINT(google-explicit-constructor) // Normal enums are already handled by the integer formatters. // This overload matches only scoped enums. diff --git a/TMessagesProj/jni/voip/webrtc/absl/strings/substitute_test.cc b/TMessagesProj/jni/voip/webrtc/absl/strings/substitute_test.cc index 9e6b94039a..9f04545f89 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/strings/substitute_test.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/strings/substitute_test.cc @@ -22,6 +22,16 @@ namespace { +struct MyStruct { + template + friend void AbslStringify(Sink& sink, const MyStruct& s) { + sink.Append("MyStruct{.value = "); + sink.Append(absl::StrCat(s.value)); + sink.Append("}"); + } + int value; +}; + TEST(SubstituteTest, Substitute) { // Basic. EXPECT_EQ("Hello, world!", absl::Substitute("$0, $1!", "Hello", "world")); @@ -70,7 +80,7 @@ TEST(SubstituteTest, Substitute) { // Volatile Pointer. // Like C++ streamed I/O, such pointers implicitly become bool volatile int vol = 237; - volatile int *volatile volptr = &vol; + volatile int* volatile volptr = &vol; str = absl::Substitute("$0", volptr); EXPECT_EQ("true", str); @@ -128,6 +138,11 @@ TEST(SubstituteTest, Substitute) { const char* null_cstring = nullptr; EXPECT_EQ("Text: ''", absl::Substitute("Text: '$0'", null_cstring)); + + MyStruct s1 = MyStruct{17}; + MyStruct s2 = MyStruct{1043}; + EXPECT_EQ("MyStruct{.value = 17}, MyStruct{.value = 1043}", + absl::Substitute("$0, $1", s1, s2)); } TEST(SubstituteTest, SubstituteAndAppend) { @@ -171,6 +186,12 @@ TEST(SubstituteTest, SubstituteAndAppend) { absl::SubstituteAndAppend(&str, "$0 $1 $2 $3 $4 $5 $6 $7 $8 $9", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j"); EXPECT_EQ("a b c d e f g h i j", str); + + str.clear(); + MyStruct s1 = MyStruct{17}; + MyStruct s2 = MyStruct{1043}; + absl::SubstituteAndAppend(&str, "$0, $1", s1, s2); + EXPECT_EQ("MyStruct{.value = 17}, MyStruct{.value = 1043}", str); } TEST(SubstituteTest, VectorBoolRef) { diff --git a/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/create_thread_identity.cc b/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/create_thread_identity.cc index 53a71b342b..44e6129bb0 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/create_thread_identity.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/create_thread_identity.cc @@ -38,7 +38,7 @@ ABSL_CONST_INIT static base_internal::ThreadIdentity* thread_identity_freelist; // A per-thread destructor for reclaiming associated ThreadIdentity objects. // Since we must preserve their storage we cache them for re-use. -void ReclaimThreadIdentity(void* v) { +static void ReclaimThreadIdentity(void* v) { base_internal::ThreadIdentity* identity = static_cast(v); @@ -48,8 +48,6 @@ void ReclaimThreadIdentity(void* v) { base_internal::LowLevelAlloc::Free(identity->per_thread_synch.all_locks); } - PerThreadSem::Destroy(identity); - // We must explicitly clear the current thread's identity: // (a) Subsequent (unrelated) per-thread destructors may require an identity. // We must guarantee a new identity is used in this case (this instructor @@ -71,7 +69,12 @@ static intptr_t RoundUp(intptr_t addr, intptr_t align) { return (addr + align - 1) & ~(align - 1); } -static void ResetThreadIdentity(base_internal::ThreadIdentity* identity) { +void OneTimeInitThreadIdentity(base_internal::ThreadIdentity* identity) { + PerThreadSem::Init(identity); +} + +static void ResetThreadIdentityBetweenReuse( + base_internal::ThreadIdentity* identity) { base_internal::PerThreadSynch* pts = &identity->per_thread_synch; pts->next = nullptr; pts->skip = nullptr; @@ -116,8 +119,9 @@ static base_internal::ThreadIdentity* NewThreadIdentity() { identity = reinterpret_cast( RoundUp(reinterpret_cast(allocation), base_internal::PerThreadSynch::kAlignment)); + OneTimeInitThreadIdentity(identity); } - ResetThreadIdentity(identity); + ResetThreadIdentityBetweenReuse(identity); return identity; } @@ -127,7 +131,6 @@ static base_internal::ThreadIdentity* NewThreadIdentity() { // REQUIRES: CurrentThreadIdentity(false) == nullptr base_internal::ThreadIdentity* CreateThreadIdentity() { base_internal::ThreadIdentity* identity = NewThreadIdentity(); - PerThreadSem::Init(identity); // Associate the value with the current thread, and attach our destructor. base_internal::SetCurrentThreadIdentity(identity, ReclaimThreadIdentity); return identity; diff --git a/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/create_thread_identity.h b/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/create_thread_identity.h index e121f68377..4cfde0913c 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/create_thread_identity.h +++ b/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/create_thread_identity.h @@ -36,10 +36,6 @@ namespace synchronization_internal { // For private use only. base_internal::ThreadIdentity* CreateThreadIdentity(); -// A per-thread destructor for reclaiming associated ThreadIdentity objects. -// For private use only. -void ReclaimThreadIdentity(void* v); - // Returns the ThreadIdentity object representing the calling thread; guaranteed // to be unique for its lifetime. The returned object will remain valid for the // program's lifetime; although it may be re-assigned to a subsequent thread. diff --git a/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/futex.h b/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/futex.h index 06fbd6d072..cb97da09ce 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/futex.h +++ b/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/futex.h @@ -87,7 +87,7 @@ class FutexImpl { public: static int WaitUntil(std::atomic *v, int32_t val, KernelTimeout t) { - int err = 0; + long err = 0; // NOLINT(runtime/int) if (t.has_timeout()) { // https://locklessinc.com/articles/futex_cheat_sheet/ // Unlike FUTEX_WAIT, FUTEX_WAIT_BITSET uses absolute time. @@ -105,41 +105,44 @@ class FutexImpl { FUTEX_WAIT | FUTEX_PRIVATE_FLAG, val, nullptr); } if (ABSL_PREDICT_FALSE(err != 0)) { - err = -errno; + return -errno; } - return err; + return 0; } static int WaitBitsetAbsoluteTimeout(std::atomic *v, int32_t val, int32_t bits, const struct timespec *abstime) { - int err = syscall(SYS_futex, reinterpret_cast(v), - FUTEX_WAIT_BITSET | FUTEX_PRIVATE_FLAG, val, abstime, - nullptr, bits); + // NOLINTNEXTLINE(runtime/int) + long err = syscall(SYS_futex, reinterpret_cast(v), + FUTEX_WAIT_BITSET | FUTEX_PRIVATE_FLAG, val, abstime, + nullptr, bits); if (ABSL_PREDICT_FALSE(err != 0)) { - err = -errno; + return -errno; } - return err; + return 0; } static int Wake(std::atomic *v, int32_t count) { - int err = syscall(SYS_futex, reinterpret_cast(v), - FUTEX_WAKE | FUTEX_PRIVATE_FLAG, count); + // NOLINTNEXTLINE(runtime/int) + long err = syscall(SYS_futex, reinterpret_cast(v), + FUTEX_WAKE | FUTEX_PRIVATE_FLAG, count); if (ABSL_PREDICT_FALSE(err < 0)) { - err = -errno; + return -errno; } - return err; + return 0; } // FUTEX_WAKE_BITSET static int WakeBitset(std::atomic *v, int32_t count, int32_t bits) { - int err = syscall(SYS_futex, reinterpret_cast(v), - FUTEX_WAKE_BITSET | FUTEX_PRIVATE_FLAG, count, nullptr, - nullptr, bits); + // NOLINTNEXTLINE(runtime/int) + long err = syscall(SYS_futex, reinterpret_cast(v), + FUTEX_WAKE_BITSET | FUTEX_PRIVATE_FLAG, count, nullptr, + nullptr, bits); if (ABSL_PREDICT_FALSE(err < 0)) { - err = -errno; + return -errno; } - return err; + return 0; } }; diff --git a/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/graphcycles.cc b/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/graphcycles.cc index 27fec21681..feec4581fe 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/graphcycles.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/graphcycles.cc @@ -181,9 +181,9 @@ class NodeSet { return true; } - void erase(uint32_t v) { + void erase(int32_t v) { uint32_t i = FindIndex(v); - if (static_cast(table_[i]) == v) { + if (table_[i] == v) { table_[i] = kDel; } } @@ -195,7 +195,7 @@ class NodeSet { for (int32_t elem, _cursor = 0; (eset).Next(&_cursor, &elem); ) bool Next(int32_t* cursor, int32_t* elem) { while (static_cast(*cursor) < table_.size()) { - int32_t v = table_[*cursor]; + int32_t v = table_[static_cast(*cursor)]; (*cursor)++; if (v >= 0) { *elem = v; @@ -210,24 +210,26 @@ class NodeSet { Vec table_; uint32_t occupied_; // Count of non-empty slots (includes deleted slots) - static uint32_t Hash(uint32_t a) { return a * 41; } + static uint32_t Hash(int32_t a) { return static_cast(a * 41); } // Return index for storing v. May return an empty index or deleted index - int FindIndex(int32_t v) const { + uint32_t FindIndex(int32_t v) const { // Search starting at hash index. const uint32_t mask = table_.size() - 1; uint32_t i = Hash(v) & mask; - int deleted_index = -1; // If >= 0, index of first deleted element we see + uint32_t deleted_index = 0; // index of first deleted element we see + bool seen_deleted_element = false; while (true) { int32_t e = table_[i]; if (v == e) { return i; } else if (e == kEmpty) { // Return any previously encountered deleted slot. - return (deleted_index >= 0) ? deleted_index : i; - } else if (e == kDel && deleted_index < 0) { + return seen_deleted_element ? deleted_index : i; + } else if (e == kDel && !seen_deleted_element) { // Keep searching since v might be present later. deleted_index = i; + seen_deleted_element = true; } i = (i + 1) & mask; // Linear probing; quadratic is slightly slower. } @@ -268,7 +270,7 @@ inline GraphId MakeId(int32_t index, uint32_t version) { } inline int32_t NodeIndex(GraphId id) { - return static_cast(id.handle & 0xfffffffful); + return static_cast(id.handle); } inline uint32_t NodeVersion(GraphId id) { @@ -298,7 +300,7 @@ class PointerMap { int32_t Find(void* ptr) { auto masked = base_internal::HidePtr(ptr); for (int32_t i = table_[Hash(ptr)]; i != -1;) { - Node* n = (*nodes_)[i]; + Node* n = (*nodes_)[static_cast(i)]; if (n->masked_ptr == masked) return i; i = n->next_hash; } @@ -307,7 +309,7 @@ class PointerMap { void Add(void* ptr, int32_t i) { int32_t* head = &table_[Hash(ptr)]; - (*nodes_)[i]->next_hash = *head; + (*nodes_)[static_cast(i)]->next_hash = *head; *head = i; } @@ -317,7 +319,7 @@ class PointerMap { auto masked = base_internal::HidePtr(ptr); for (int32_t* slot = &table_[Hash(ptr)]; *slot != -1; ) { int32_t index = *slot; - Node* n = (*nodes_)[index]; + Node* n = (*nodes_)[static_cast(index)]; if (n->masked_ptr == masked) { *slot = n->next_hash; // Remove n from linked list n->next_hash = -1; @@ -358,7 +360,7 @@ struct GraphCycles::Rep { }; static Node* FindNode(GraphCycles::Rep* rep, GraphId id) { - Node* n = rep->nodes_[NodeIndex(id)]; + Node* n = rep->nodes_[static_cast(NodeIndex(id))]; return (n->version == NodeVersion(id)) ? n : nullptr; } @@ -393,7 +395,7 @@ bool GraphCycles::CheckInvariants() const { ABSL_RAW_LOG(FATAL, "Duplicate occurrence of rank %d", nx->rank); } HASH_FOR_EACH(y, nx->out) { - Node* ny = r->nodes_[y]; + Node* ny = r->nodes_[static_cast(y)]; if (nx->rank >= ny->rank) { ABSL_RAW_LOG(FATAL, "Edge %u->%d has bad rank assignment %d->%d", x, y, nx->rank, ny->rank); @@ -406,14 +408,14 @@ bool GraphCycles::CheckInvariants() const { GraphId GraphCycles::GetId(void* ptr) { int32_t i = rep_->ptrmap_.Find(ptr); if (i != -1) { - return MakeId(i, rep_->nodes_[i]->version); + return MakeId(i, rep_->nodes_[static_cast(i)]->version); } else if (rep_->free_nodes_.empty()) { Node* n = new (base_internal::LowLevelAlloc::AllocWithArena(sizeof(Node), arena)) Node; n->version = 1; // Avoid 0 since it is used by InvalidGraphId() n->visited = false; - n->rank = rep_->nodes_.size(); + n->rank = static_cast(rep_->nodes_.size()); n->masked_ptr = base_internal::HidePtr(ptr); n->nstack = 0; n->priority = 0; @@ -425,7 +427,7 @@ GraphId GraphCycles::GetId(void* ptr) { // a permutation of [0,rep_->nodes_.size()-1]. int32_t r = rep_->free_nodes_.back(); rep_->free_nodes_.pop_back(); - Node* n = rep_->nodes_[r]; + Node* n = rep_->nodes_[static_cast(r)]; n->masked_ptr = base_internal::HidePtr(ptr); n->nstack = 0; n->priority = 0; @@ -439,12 +441,12 @@ void GraphCycles::RemoveNode(void* ptr) { if (i == -1) { return; } - Node* x = rep_->nodes_[i]; + Node* x = rep_->nodes_[static_cast(i)]; HASH_FOR_EACH(y, x->out) { - rep_->nodes_[y]->in.erase(i); + rep_->nodes_[static_cast(y)]->in.erase(i); } HASH_FOR_EACH(y, x->in) { - rep_->nodes_[y]->out.erase(i); + rep_->nodes_[static_cast(y)]->out.erase(i); } x->in.clear(); x->out.clear(); @@ -520,7 +522,7 @@ bool GraphCycles::InsertEdge(GraphId idx, GraphId idy) { // Since we do not call Reorder() on this path, clear any visited // markers left by ForwardDFS. for (const auto& d : r->deltaf_) { - r->nodes_[d]->visited = false; + r->nodes_[static_cast(d)]->visited = false; } return false; } @@ -538,14 +540,14 @@ static bool ForwardDFS(GraphCycles::Rep* r, int32_t n, int32_t upper_bound) { while (!r->stack_.empty()) { n = r->stack_.back(); r->stack_.pop_back(); - Node* nn = r->nodes_[n]; + Node* nn = r->nodes_[static_cast(n)]; if (nn->visited) continue; nn->visited = true; r->deltaf_.push_back(n); HASH_FOR_EACH(w, nn->out) { - Node* nw = r->nodes_[w]; + Node* nw = r->nodes_[static_cast(w)]; if (nw->rank == upper_bound) { return false; // Cycle } @@ -564,14 +566,14 @@ static void BackwardDFS(GraphCycles::Rep* r, int32_t n, int32_t lower_bound) { while (!r->stack_.empty()) { n = r->stack_.back(); r->stack_.pop_back(); - Node* nn = r->nodes_[n]; + Node* nn = r->nodes_[static_cast(n)]; if (nn->visited) continue; nn->visited = true; r->deltab_.push_back(n); HASH_FOR_EACH(w, nn->in) { - Node* nw = r->nodes_[w]; + Node* nw = r->nodes_[static_cast(w)]; if (!nw->visited && lower_bound < nw->rank) { r->stack_.push_back(w); } @@ -596,7 +598,7 @@ static void Reorder(GraphCycles::Rep* r) { // Assign the ranks in order to the collected list. for (uint32_t i = 0; i < r->list_.size(); i++) { - r->nodes_[r->list_[i]]->rank = r->merged_[i]; + r->nodes_[static_cast(r->list_[i])]->rank = r->merged_[i]; } } @@ -604,7 +606,8 @@ static void Sort(const Vec& nodes, Vec* delta) { struct ByRank { const Vec* nodes; bool operator()(int32_t a, int32_t b) const { - return (*nodes)[a]->rank < (*nodes)[b]->rank; + return (*nodes)[static_cast(a)]->rank < + (*nodes)[static_cast(b)]->rank; } }; ByRank cmp; @@ -616,8 +619,10 @@ static void MoveToList( GraphCycles::Rep* r, Vec* src, Vec* dst) { for (auto& v : *src) { int32_t w = v; - v = r->nodes_[w]->rank; // Replace v entry with its rank - r->nodes_[w]->visited = false; // Prepare for future DFS calls + // Replace v entry with its rank + v = r->nodes_[static_cast(w)]->rank; + // Prepare for future DFS calls + r->nodes_[static_cast(w)]->visited = false; dst->push_back(w); } } @@ -647,7 +652,8 @@ int GraphCycles::FindPath(GraphId idx, GraphId idy, int max_path_len, } if (path_len < max_path_len) { - path[path_len] = MakeId(n, rep_->nodes_[n]->version); + path[path_len] = + MakeId(n, rep_->nodes_[static_cast(n)]->version); } path_len++; r->stack_.push_back(-1); // Will remove tentative path entry @@ -656,7 +662,7 @@ int GraphCycles::FindPath(GraphId idx, GraphId idy, int max_path_len, return path_len; } - HASH_FOR_EACH(w, r->nodes_[n]->out) { + HASH_FOR_EACH(w, r->nodes_[static_cast(n)]->out) { if (seen.insert(w)) { r->stack_.push_back(w); } diff --git a/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/kernel_timeout.h b/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/kernel_timeout.h index bbd4d2d70f..44a3a2e802 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/kernel_timeout.h +++ b/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/kernel_timeout.h @@ -111,7 +111,8 @@ class KernelTimeout { constexpr uint64_t max_nanos = (std::numeric_limits::max)() - 999999u; uint64_t ms_from_now = - (std::min(max_nanos, ns_ - now) + 999999u) / 1000000u; + ((std::min)(max_nanos, static_cast(ns_ - now)) + 999999u) / + 1000000u; if (ms_from_now > kInfinite) { return kInfinite; } diff --git a/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/per_thread_sem.cc b/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/per_thread_sem.cc index a6031787e0..469e8f3298 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/per_thread_sem.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/per_thread_sem.cc @@ -47,10 +47,6 @@ void PerThreadSem::Init(base_internal::ThreadIdentity *identity) { identity->is_idle.store(false, std::memory_order_relaxed); } -void PerThreadSem::Destroy(base_internal::ThreadIdentity *identity) { - Waiter::GetWaiter(identity)->~Waiter(); -} - void PerThreadSem::Tick(base_internal::ThreadIdentity *identity) { const int ticker = identity->ticker.fetch_add(1, std::memory_order_relaxed) + 1; diff --git a/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/per_thread_sem.h b/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/per_thread_sem.h index 7beae8ef1d..90a88809e4 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/per_thread_sem.h +++ b/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/per_thread_sem.h @@ -66,10 +66,6 @@ class PerThreadSem { // REQUIRES: May only be called by ThreadIdentity. static void Init(base_internal::ThreadIdentity* identity); - // Destroy the PerThreadSem associated with "identity". - // REQUIRES: May only be called by ThreadIdentity. - static void Destroy(base_internal::ThreadIdentity* identity); - // Increments "identity"'s count. static inline void Post(base_internal::ThreadIdentity* identity); @@ -81,8 +77,7 @@ class PerThreadSem { // Permitted callers. friend class PerThreadSemTest; friend class absl::Mutex; - friend absl::base_internal::ThreadIdentity* CreateThreadIdentity(); - friend void ReclaimThreadIdentity(void* v); + friend void OneTimeInitThreadIdentity(absl::base_internal::ThreadIdentity*); }; } // namespace synchronization_internal diff --git a/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/per_thread_sem_test.cc b/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/per_thread_sem_test.cc index db1184e679..24a6b54827 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/per_thread_sem_test.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/per_thread_sem_test.cc @@ -174,6 +174,15 @@ TEST_F(PerThreadSemTest, Timeouts) { EXPECT_TRUE(Wait(negative_timeout)); } +TEST_F(PerThreadSemTest, ThreadIdentityReuse) { + // Create a base_internal::ThreadIdentity object and keep reusing it. There + // should be no memory or resource leaks. + for (int i = 0; i < 10000; i++) { + std::thread t([]() { GetOrCreateCurrentThreadIdentity(); }); + t.join(); + } +} + } // namespace } // namespace synchronization_internal diff --git a/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/thread_pool.h b/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/thread_pool.h index 0cb96dacde..5eb0bb605e 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/thread_pool.h +++ b/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/thread_pool.h @@ -20,9 +20,11 @@ #include #include #include // NOLINT(build/c++11) +#include #include #include "absl/base/thread_annotations.h" +#include "absl/functional/any_invocable.h" #include "absl/synchronization/mutex.h" namespace absl { @@ -33,6 +35,7 @@ namespace synchronization_internal { class ThreadPool { public: explicit ThreadPool(int num_threads) { + threads_.reserve(num_threads); for (int i = 0; i < num_threads; ++i) { threads_.push_back(std::thread(&ThreadPool::WorkLoop, this)); } @@ -54,7 +57,7 @@ class ThreadPool { } // Schedule a function to be run on a ThreadPool thread immediately. - void Schedule(std::function func) { + void Schedule(absl::AnyInvocable func) { assert(func != nullptr); absl::MutexLock l(&mu_); queue_.push(std::move(func)); @@ -67,7 +70,7 @@ class ThreadPool { void WorkLoop() { while (true) { - std::function func; + absl::AnyInvocable func; { absl::MutexLock l(&mu_); mu_.Await(absl::Condition(this, &ThreadPool::WorkAvailable)); @@ -82,7 +85,7 @@ class ThreadPool { } absl::Mutex mu_; - std::queue> queue_ ABSL_GUARDED_BY(mu_); + std::queue> queue_ ABSL_GUARDED_BY(mu_); std::vector threads_; }; diff --git a/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/waiter.cc b/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/waiter.cc index 28ef311e4a..f2051d6725 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/waiter.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/waiter.cc @@ -71,8 +71,6 @@ Waiter::Waiter() { futex_.store(0, std::memory_order_relaxed); } -Waiter::~Waiter() = default; - bool Waiter::Wait(KernelTimeout t) { // Loop until we can atomically decrement futex from a positive // value, waiting on a futex while we believe it is zero. @@ -161,18 +159,6 @@ Waiter::Waiter() { wakeup_count_ = 0; } -Waiter::~Waiter() { - const int err = pthread_mutex_destroy(&mu_); - if (err != 0) { - ABSL_RAW_LOG(FATAL, "pthread_mutex_destroy failed: %d", err); - } - - const int err2 = pthread_cond_destroy(&cv_); - if (err2 != 0) { - ABSL_RAW_LOG(FATAL, "pthread_cond_destroy failed: %d", err2); - } -} - bool Waiter::Wait(KernelTimeout t) { struct timespec abs_timeout; if (t.has_timeout()) { @@ -240,12 +226,6 @@ Waiter::Waiter() { wakeups_.store(0, std::memory_order_relaxed); } -Waiter::~Waiter() { - if (sem_destroy(&sem_) != 0) { - ABSL_RAW_LOG(FATAL, "sem_destroy failed with errno %d\n", errno); - } -} - bool Waiter::Wait(KernelTimeout t) { struct timespec abs_timeout; if (t.has_timeout()) { @@ -363,11 +343,6 @@ Waiter::Waiter() { wakeup_count_ = 0; } -// SRW locks and condition variables do not need to be explicitly destroyed. -// https://docs.microsoft.com/en-us/windows/win32/api/synchapi/nf-synchapi-initializesrwlock -// https://stackoverflow.com/questions/28975958/why-does-windows-have-no-deleteconditionvariable-function-to-go-together-with -Waiter::~Waiter() = default; - bool Waiter::Wait(KernelTimeout t) { SRWLOCK *mu = WinHelper::GetLock(this); CONDITION_VARIABLE *cv = WinHelper::GetCond(this); diff --git a/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/waiter.h b/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/waiter.h index be3df180d4..b8adfeb537 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/waiter.h +++ b/TMessagesProj/jni/voip/webrtc/absl/synchronization/internal/waiter.h @@ -71,9 +71,6 @@ class Waiter { Waiter(const Waiter&) = delete; Waiter& operator=(const Waiter&) = delete; - // Destroy any data to track waits. - ~Waiter(); - // Blocks the calling thread until a matching call to `Post()` or // `t` has passed. Returns `true` if woken (`Post()` called), // `false` on timeout. @@ -106,6 +103,12 @@ class Waiter { #endif private: + // The destructor must not be called since Mutex/CondVar + // can use PerThreadSem/Waiter after the thread exits. + // Waiter objects are embedded in ThreadIdentity objects, + // which are reused via a freelist and are never destroyed. + ~Waiter() = delete; + #if ABSL_WAITER_MODE == ABSL_WAITER_MODE_FUTEX // Futexes are defined by specification to be 32-bits. // Thus std::atomic must be just an int32_t with lockfree methods. @@ -136,8 +139,11 @@ class Waiter { // REQUIRES: WinHelper::GetLock(this) must be held. void InternalCondVarPoke(); - // We can't include Windows.h in our headers, so we use aligned charachter + // We can't include Windows.h in our headers, so we use aligned character // buffers to define the storage of SRWLOCK and CONDITION_VARIABLE. + // SRW locks and condition variables do not need to be explicitly destroyed. + // https://docs.microsoft.com/en-us/windows/win32/api/synchapi/nf-synchapi-initializesrwlock + // https://stackoverflow.com/questions/28975958/why-does-windows-have-no-deleteconditionvariable-function-to-go-together-with alignas(void*) unsigned char mu_storage_[sizeof(void*)]; alignas(void*) unsigned char cv_storage_[sizeof(void*)]; int waiter_count_; diff --git a/TMessagesProj/jni/voip/webrtc/absl/synchronization/lifetime_test.cc b/TMessagesProj/jni/voip/webrtc/absl/synchronization/lifetime_test.cc index cc973a3290..e6274232f1 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/synchronization/lifetime_test.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/synchronization/lifetime_test.cc @@ -123,10 +123,10 @@ class OnDestruction { }; // These tests require that the compiler correctly supports C++11 constant -// initialization... but MSVC has a known regression since v19.10: +// initialization... but MSVC has a known regression since v19.10 till v19.25: // https://developercommunity.visualstudio.com/content/problem/336946/class-with-constexpr-constructor-not-using-static.html -// TODO(epastor): Limit the affected range once MSVC fixes this bug. -#if defined(__clang__) || !(defined(_MSC_VER) && _MSC_VER > 1900) +#if defined(__clang__) || \ + !(defined(_MSC_VER) && _MSC_VER > 1900 && _MSC_VER < 1925) // kConstInit // Test early usage. (Declaration comes first; definitions must appear after // the test runner.) diff --git a/TMessagesProj/jni/voip/webrtc/absl/synchronization/mutex.cc b/TMessagesProj/jni/voip/webrtc/absl/synchronization/mutex.cc index 8f25e3ddc6..dd771421f8 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/synchronization/mutex.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/synchronization/mutex.cc @@ -36,6 +36,9 @@ #include #include #include +#include +#include +#include #include // NOLINT(build/c++11) #include "absl/base/attributes.h" @@ -134,25 +137,42 @@ enum DelayMode { AGGRESSIVE, GENTLE }; struct ABSL_CACHELINE_ALIGNED MutexGlobals { absl::once_flag once; int spinloop_iterations = 0; - int32_t mutex_sleep_limit[2] = {}; + int32_t mutex_sleep_spins[2] = {}; + absl::Duration mutex_sleep_time; }; +absl::Duration MeasureTimeToYield() { + absl::Time before = absl::Now(); + ABSL_INTERNAL_C_SYMBOL(AbslInternalMutexYield)(); + return absl::Now() - before; +} + const MutexGlobals &GetMutexGlobals() { ABSL_CONST_INIT static MutexGlobals data; absl::base_internal::LowLevelCallOnce(&data.once, [&]() { const int num_cpus = absl::base_internal::NumCPUs(); data.spinloop_iterations = num_cpus > 1 ? 1500 : 0; - // If this a uniprocessor, only yield/sleep. Otherwise, if the mode is + // If this a uniprocessor, only yield/sleep. + // Real-time threads are often unable to yield, so the sleep time needs + // to be long enough to keep the calling thread asleep until scheduling + // happens. + // If this is multiprocessor, allow spinning. If the mode is // aggressive then spin many times before yielding. If the mode is // gentle then spin only a few times before yielding. Aggressive spinning // is used to ensure that an Unlock() call, which must get the spin lock // for any thread to make progress gets it without undue delay. if (num_cpus > 1) { - data.mutex_sleep_limit[AGGRESSIVE] = 5000; - data.mutex_sleep_limit[GENTLE] = 250; + data.mutex_sleep_spins[AGGRESSIVE] = 5000; + data.mutex_sleep_spins[GENTLE] = 250; + data.mutex_sleep_time = absl::Microseconds(10); } else { - data.mutex_sleep_limit[AGGRESSIVE] = 0; - data.mutex_sleep_limit[GENTLE] = 0; + data.mutex_sleep_spins[AGGRESSIVE] = 0; + data.mutex_sleep_spins[GENTLE] = 0; + data.mutex_sleep_time = MeasureTimeToYield() * 5; + data.mutex_sleep_time = + std::min(data.mutex_sleep_time, absl::Milliseconds(1)); + data.mutex_sleep_time = + std::max(data.mutex_sleep_time, absl::Microseconds(10)); } }); return data; @@ -163,7 +183,8 @@ namespace synchronization_internal { // Returns the Mutex delay on iteration `c` depending on the given `mode`. // The returned value should be used as `c` for the next call to `MutexDelay`. int MutexDelay(int32_t c, int mode) { - const int32_t limit = GetMutexGlobals().mutex_sleep_limit[mode]; + const int32_t limit = GetMutexGlobals().mutex_sleep_spins[mode]; + const absl::Duration sleep_time = GetMutexGlobals().mutex_sleep_time; if (c < limit) { // Spin. c++; @@ -176,7 +197,7 @@ int MutexDelay(int32_t c, int mode) { c++; } else { // Then wait. - absl::SleepFor(absl::Microseconds(10)); + absl::SleepFor(sleep_time); c = 0; } ABSL_TSAN_MUTEX_POST_DIVERT(nullptr, 0); @@ -325,7 +346,7 @@ static struct SynchEvent { // this is a trivial hash table for the events static SynchEvent *EnsureSynchEvent(std::atomic *addr, const char *name, intptr_t bits, intptr_t lockbit) { - uint32_t h = reinterpret_cast(addr) % kNSynchEvent; + uint32_t h = reinterpret_cast(addr) % kNSynchEvent; SynchEvent *e; // first look for existing SynchEvent struct.. synch_event_mu.Lock(); @@ -378,7 +399,7 @@ static void UnrefSynchEvent(SynchEvent *e) { // is clear before doing so). static void ForgetSynchEvent(std::atomic *addr, intptr_t bits, intptr_t lockbit) { - uint32_t h = reinterpret_cast(addr) % kNSynchEvent; + uint32_t h = reinterpret_cast(addr) % kNSynchEvent; SynchEvent **pe; SynchEvent *e; synch_event_mu.Lock(); @@ -402,7 +423,7 @@ static void ForgetSynchEvent(std::atomic *addr, intptr_t bits, // "addr", if any. The pointer returned is valid until the UnrefSynchEvent() is // called. static SynchEvent *GetSynchEvent(const void *addr) { - uint32_t h = reinterpret_cast(addr) % kNSynchEvent; + uint32_t h = reinterpret_cast(addr) % kNSynchEvent; SynchEvent *e; synch_event_mu.Lock(); for (e = synch_event[h]; @@ -430,7 +451,13 @@ static void PostSynchEvent(void *obj, int ev) { char buffer[ABSL_ARRAYSIZE(pcs) * 24]; int pos = snprintf(buffer, sizeof (buffer), " @"); for (int i = 0; i != n; i++) { - pos += snprintf(&buffer[pos], sizeof (buffer) - pos, " %p", pcs[i]); + int b = snprintf(&buffer[pos], sizeof(buffer) - static_cast(pos), + " %p", pcs[i]); + if (b < 0 || + static_cast(b) >= sizeof(buffer) - static_cast(pos)) { + break; + } + pos += b; } ABSL_RAW_LOG(INFO, "%s%p %s %s", event_properties[ev].msg, obj, (e == nullptr ? "" : e->name), buffer); @@ -486,7 +513,8 @@ struct SynchWaitParams { cvmu(cvmu_arg), thread(thread_arg), cv_word(cv_word_arg), - contention_start_cycles(base_internal::CycleClock::Now()) {} + contention_start_cycles(base_internal::CycleClock::Now()), + should_submit_contention_data(false) {} const Mutex::MuHow how; // How this thread needs to wait. const Condition *cond; // The condition that this thread is waiting for. @@ -504,6 +532,7 @@ struct SynchWaitParams { int64_t contention_start_cycles; // Time (in cycles) when this thread started // to contend for the mutex. + bool should_submit_contention_data; }; struct SynchLocksHeld { @@ -1273,15 +1302,17 @@ static char *StackString(void **pcs, int n, char *buf, int maxlen, char sym[kSymLen]; int len = 0; for (int i = 0; i != n; i++) { + if (len >= maxlen) + return buf; + size_t count = static_cast(maxlen - len); if (symbolize) { if (!symbolizer(pcs[i], sym, kSymLen)) { sym[0] = '\0'; } - snprintf(buf + len, maxlen - len, "%s\t@ %p %s\n", - (i == 0 ? "\n" : ""), - pcs[i], sym); + snprintf(buf + len, count, "%s\t@ %p %s\n", (i == 0 ? "\n" : ""), pcs[i], + sym); } else { - snprintf(buf + len, maxlen - len, " %p", pcs[i]); + snprintf(buf + len, count, " %p", pcs[i]); } len += strlen(&buf[len]); } @@ -1366,12 +1397,12 @@ static GraphId DeadlockCheck(Mutex *mu) { bool symbolize = number_of_reported_deadlocks <= 2; ABSL_RAW_LOG(ERROR, "Potential Mutex deadlock: %s", CurrentStackString(b->buf, sizeof (b->buf), symbolize)); - int len = 0; + size_t len = 0; for (int j = 0; j != all_locks->n; j++) { void* pr = deadlock_graph->Ptr(all_locks->locks[j].id); if (pr != nullptr) { snprintf(b->buf + len, sizeof (b->buf) - len, " %p", pr); - len += static_cast(strlen(&b->buf[len])); + len += strlen(&b->buf[len]); } } ABSL_RAW_LOG(ERROR, @@ -1790,8 +1821,8 @@ static inline bool EvalConditionAnnotated(const Condition *cond, Mutex *mu, // operation tsan considers that we've already released the mutex. bool res = false; #ifdef ABSL_INTERNAL_HAVE_TSAN_INTERFACE - const int flags = read_lock ? __tsan_mutex_read_lock : 0; - const int tryflags = flags | (trylock ? __tsan_mutex_try_lock : 0); + const uint32_t flags = read_lock ? __tsan_mutex_read_lock : 0; + const uint32_t tryflags = flags | (trylock ? __tsan_mutex_try_lock : 0); #endif if (locking) { // For lock we pretend that we have finished the operation, @@ -1904,7 +1935,7 @@ static void CheckForMutexCorruption(intptr_t v, const char* label) { // Test for either of two situations that should not occur in v: // kMuWriter and kMuReader // kMuWrWait and !kMuWait - const uintptr_t w = v ^ kMuWait; + const uintptr_t w = static_cast(v ^ kMuWait); // By flipping that bit, we can now test for: // kMuWriter and kMuReader in w // kMuWrWait and kMuWait in w @@ -2331,21 +2362,26 @@ ABSL_ATTRIBUTE_NOINLINE void Mutex::UnlockSlow(SynchWaitParams *waitp) { } // end of for(;;)-loop if (wake_list != kPerThreadSynchNull) { - int64_t wait_cycles = 0; + int64_t total_wait_cycles = 0; + int64_t max_wait_cycles = 0; int64_t now = base_internal::CycleClock::Now(); do { - // Sample lock contention events only if the waiter was trying to acquire + // Profile lock contention events only if the waiter was trying to acquire // the lock, not waiting on a condition variable or Condition. if (!wake_list->cond_waiter) { - wait_cycles += (now - wake_list->waitp->contention_start_cycles); + int64_t cycles_waited = + (now - wake_list->waitp->contention_start_cycles); + total_wait_cycles += cycles_waited; + if (max_wait_cycles == 0) max_wait_cycles = cycles_waited; wake_list->waitp->contention_start_cycles = now; + wake_list->waitp->should_submit_contention_data = true; } wake_list = Wakeup(wake_list); // wake waiters } while (wake_list != kPerThreadSynchNull); - if (wait_cycles > 0) { - mutex_tracer("slow release", this, wait_cycles); + if (total_wait_cycles > 0) { + mutex_tracer("slow release", this, total_wait_cycles); ABSL_TSAN_MUTEX_PRE_DIVERT(this, 0); - submit_profile_data(wait_cycles); + submit_profile_data(total_wait_cycles); ABSL_TSAN_MUTEX_POST_DIVERT(this, 0); } } @@ -2510,9 +2546,9 @@ void CondVar::Remove(PerThreadSynch *s) { // before calling Mutex::UnlockSlow(), the Mutex code might be re-entered (via // the logging code, or via a Condition function) and might potentially attempt // to block this thread. That would be a problem if the thread were already on -// a the condition variable waiter queue. Thus, we use the waitp->cv_word -// to tell the unlock code to call CondVarEnqueue() to queue the thread on the -// condition variable queue just before the mutex is to be unlocked, and (most +// a condition variable waiter queue. Thus, we use the waitp->cv_word to tell +// the unlock code to call CondVarEnqueue() to queue the thread on the condition +// variable queue just before the mutex is to be unlocked, and (most // importantly) after any call to an external routine that might re-enter the // mutex code. static void CondVarEnqueue(SynchWaitParams *waitp) { @@ -2575,6 +2611,23 @@ bool CondVar::WaitCommon(Mutex *mutex, KernelTimeout t) { while (waitp.thread->state.load(std::memory_order_acquire) == PerThreadSynch::kQueued) { if (!Mutex::DecrementSynchSem(mutex, waitp.thread, t)) { + // DecrementSynchSem returned due to timeout. + // Now we will either (1) remove ourselves from the wait list in Remove + // below, in which case Remove will set thread.state = kAvailable and + // we will not call DecrementSynchSem again; or (2) Signal/SignalAll + // has removed us concurrently and is calling Wakeup, which will set + // thread.state = kAvailable and post to the semaphore. + // It's important to reset the timeout for the case (2) because otherwise + // we can live-lock in this loop since DecrementSynchSem will always + // return immediately due to timeout, but Signal/SignalAll is not + // necessary set thread.state = kAvailable yet (and is not scheduled + // due to thread priorities or other scheduler artifacts). + // Note this could also be resolved if Signal/SignalAll would set + // thread.state = kAvailable while holding the wait list spin lock. + // But this can't be easily done for SignalAll since it grabs the whole + // wait list with a single compare-exchange and does not really grab + // the spin lock. + t = KernelTimeout::Never(); this->Remove(waitp.thread); rc = true; } @@ -2729,25 +2782,32 @@ static bool Dereference(void *arg) { return *(static_cast(arg)); } -Condition::Condition() {} // null constructor, used for kTrue only +Condition::Condition() = default; // null constructor, used for kTrue only const Condition Condition::kTrue; Condition::Condition(bool (*func)(void *), void *arg) : eval_(&CallVoidPtrFunction), - function_(func), - method_(nullptr), - arg_(arg) {} + arg_(arg) { + static_assert(sizeof(&func) <= sizeof(callback_), + "An overlarge function pointer passed to Condition."); + StoreCallback(func); +} bool Condition::CallVoidPtrFunction(const Condition *c) { - return (*c->function_)(c->arg_); + using FunctionPointer = bool (*)(void *); + FunctionPointer function_pointer; + std::memcpy(&function_pointer, c->callback_, sizeof(function_pointer)); + return (*function_pointer)(c->arg_); } Condition::Condition(const bool *cond) : eval_(CallVoidPtrFunction), - function_(Dereference), - method_(nullptr), // const_cast is safe since Dereference does not modify arg - arg_(const_cast(cond)) {} + arg_(const_cast(cond)) { + using FunctionPointer = bool (*)(void *); + const FunctionPointer dereference = Dereference; + StoreCallback(dereference); +} bool Condition::Eval() const { // eval_ == null for kTrue @@ -2755,14 +2815,15 @@ bool Condition::Eval() const { } bool Condition::GuaranteedEqual(const Condition *a, const Condition *b) { - if (a == nullptr) { + // kTrue logic. + if (a == nullptr || a->eval_ == nullptr) { return b == nullptr || b->eval_ == nullptr; + }else if (b == nullptr || b->eval_ == nullptr) { + return false; } - if (b == nullptr || b->eval_ == nullptr) { - return a->eval_ == nullptr; - } - return a->eval_ == b->eval_ && a->function_ == b->function_ && - a->arg_ == b->arg_ && a->method_ == b->method_; + // Check equality of the representative fields. + return a->eval_ == b->eval_ && a->arg_ == b->arg_ && + !memcmp(a->callback_, b->callback_, sizeof(a->callback_)); } ABSL_NAMESPACE_END diff --git a/TMessagesProj/jni/voip/webrtc/absl/synchronization/mutex.h b/TMessagesProj/jni/voip/webrtc/absl/synchronization/mutex.h index 9a3e438f21..779aafa0ba 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/synchronization/mutex.h +++ b/TMessagesProj/jni/voip/webrtc/absl/synchronization/mutex.h @@ -60,6 +60,8 @@ #include #include +#include +#include #include #include "absl/base/const_init.h" @@ -174,9 +176,12 @@ class ABSL_LOCKABLE Mutex { // Mutex::AssertHeld() // - // Return immediately if this thread holds the `Mutex` exclusively (in write - // mode). Otherwise, may report an error (typically by crashing with a - // diagnostic), or may return immediately. + // Require that the mutex be held exclusively (write mode) by this thread. + // + // If the mutex is not currently held by this thread, this function may report + // an error (typically by crashing with a diagnostic) or it may do nothing. + // This function is intended only as a tool to assist debugging; it doesn't + // guarantee correctness. void AssertHeld() const ABSL_ASSERT_EXCLUSIVE_LOCK(); // --------------------------------------------------------------------------- @@ -236,9 +241,13 @@ class ABSL_LOCKABLE Mutex { // Mutex::AssertReaderHeld() // - // Returns immediately if this thread holds the `Mutex` in at least shared - // mode (read mode). Otherwise, may report an error (typically by - // crashing with a diagnostic), or may return immediately. + // Require that the mutex be held at least in shared mode (read mode) by this + // thread. + // + // If the mutex is not currently held by this thread, this function may report + // an error (typically by crashing with a diagnostic) or it may do nothing. + // This function is intended only as a tool to assist debugging; it doesn't + // guarantee correctness. void AssertReaderHeld() const ABSL_ASSERT_SHARED_LOCK(); // Mutex::WriterLock() @@ -605,12 +614,12 @@ class ABSL_SCOPED_LOCKABLE WriterMutexLock { // Condition // ----------------------------------------------------------------------------- // -// As noted above, `Mutex` contains a number of member functions which take a -// `Condition` as an argument; clients can wait for conditions to become `true` -// before attempting to acquire the mutex. These sections are known as -// "condition critical" sections. To use a `Condition`, you simply need to -// construct it, and use within an appropriate `Mutex` member function; -// everything else in the `Condition` class is an implementation detail. +// `Mutex` contains a number of member functions which take a `Condition` as an +// argument; clients can wait for conditions to become `true` before attempting +// to acquire the mutex. These sections are known as "condition critical" +// sections. To use a `Condition`, you simply need to construct it, and use +// within an appropriate `Mutex` member function; everything else in the +// `Condition` class is an implementation detail. // // A `Condition` is specified as a function pointer which returns a boolean. // `Condition` functions should be pure functions -- their results should depend @@ -735,22 +744,53 @@ class Condition { static bool GuaranteedEqual(const Condition *a, const Condition *b); private: - typedef bool (*InternalFunctionType)(void * arg); - typedef bool (Condition::*InternalMethodType)(); - typedef bool (*InternalMethodCallerType)(void * arg, - InternalMethodType internal_method); - - bool (*eval_)(const Condition*); // Actual evaluator - InternalFunctionType function_; // function taking pointer returning bool - InternalMethodType method_; // method returning bool - void *arg_; // arg of function_ or object of method_ - - Condition(); // null constructor used only to create kTrue + // Sizing an allocation for a method pointer can be subtle. In the Itanium + // specifications, a method pointer has a predictable, uniform size. On the + // other hand, MSVC ABI, method pointer sizes vary based on the + // inheritance of the class. Specifically, method pointers from classes with + // multiple inheritance are bigger than those of classes with single + // inheritance. Other variations also exist. + +#ifndef _MSC_VER + // Allocation for a function pointer or method pointer. + // The {0} initializer ensures that all unused bytes of this buffer are + // always zeroed out. This is necessary, because GuaranteedEqual() compares + // all of the bytes, unaware of which bytes are relevant to a given `eval_`. + using MethodPtr = bool (Condition::*)(); + char callback_[sizeof(MethodPtr)] = {0}; +#else + // It is well known that the larget MSVC pointer-to-member is 24 bytes. This + // may be the largest known pointer-to-member of any platform. For this + // reason we will allocate 24 bytes for MSVC platform toolchains. + char callback_[24] = {0}; +#endif + + // Function with which to evaluate callbacks and/or arguments. + bool (*eval_)(const Condition*); + + // Either an argument for a function call or an object for a method call. + void *arg_; // Various functions eval_ can point to: static bool CallVoidPtrFunction(const Condition*); template static bool CastAndCallFunction(const Condition* c); template static bool CastAndCallMethod(const Condition* c); + + // Helper methods for storing, validating, and reading callback arguments. + template + inline void StoreCallback(T callback) { + static_assert( + sizeof(callback) <= sizeof(callback_), + "An overlarge pointer was passed as a callback to Condition."); + std::memcpy(callback_, &callback, sizeof(callback)); + } + + template + inline void ReadCallback(T *callback) const { + std::memcpy(callback, callback_, sizeof(*callback)); + } + + Condition(); // null constructor used only to create kTrue }; // ----------------------------------------------------------------------------- @@ -942,46 +982,50 @@ inline CondVar::CondVar() : cv_(0) {} // static template bool Condition::CastAndCallMethod(const Condition *c) { - typedef bool (T::*MemberType)(); - MemberType rm = reinterpret_cast(c->method_); - T *x = static_cast(c->arg_); - return (x->*rm)(); + T *object = static_cast(c->arg_); + bool (T::*method_pointer)(); + c->ReadCallback(&method_pointer); + return (object->*method_pointer)(); } // static template bool Condition::CastAndCallFunction(const Condition *c) { - typedef bool (*FuncType)(T *); - FuncType fn = reinterpret_cast(c->function_); - T *x = static_cast(c->arg_); - return (*fn)(x); + bool (*function)(T *); + c->ReadCallback(&function); + T *argument = static_cast(c->arg_); + return (*function)(argument); } template inline Condition::Condition(bool (*func)(T *), T *arg) : eval_(&CastAndCallFunction), - function_(reinterpret_cast(func)), - method_(nullptr), - arg_(const_cast(static_cast(arg))) {} + arg_(const_cast(static_cast(arg))) { + static_assert(sizeof(&func) <= sizeof(callback_), + "An overlarge function pointer was passed to Condition."); + StoreCallback(func); +} template inline Condition::Condition(T *object, bool (absl::internal::identity::type::*method)()) : eval_(&CastAndCallMethod), - function_(nullptr), - method_(reinterpret_cast(method)), - arg_(object) {} + arg_(object) { + static_assert(sizeof(&method) <= sizeof(callback_), + "An overlarge method pointer was passed to Condition."); + StoreCallback(method); +} template inline Condition::Condition(const T *object, bool (absl::internal::identity::type::*method)() const) : eval_(&CastAndCallMethod), - function_(nullptr), - method_(reinterpret_cast(method)), - arg_(reinterpret_cast(const_cast(object))) {} + arg_(reinterpret_cast(const_cast(object))) { + StoreCallback(method); +} -// Register a hook for profiling support. +// Register hooks for profiling support. // // The function pointer registered here will be called whenever a mutex is // contended. The callback is given the cycles for which waiting happened (as diff --git a/TMessagesProj/jni/voip/webrtc/absl/synchronization/mutex_test.cc b/TMessagesProj/jni/voip/webrtc/absl/synchronization/mutex_test.cc index 4f40317684..34751cb1be 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/synchronization/mutex_test.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/synchronization/mutex_test.cc @@ -295,8 +295,9 @@ static void TestTime(TestContext *cxt, int c, bool use_cv) { "TestTime failed"); } elapsed = absl::Now() - start; - ABSL_RAW_CHECK(absl::Seconds(0.9) <= elapsed && - elapsed <= absl::Seconds(2.0), "TestTime failed"); + ABSL_RAW_CHECK( + absl::Seconds(0.9) <= elapsed && elapsed <= absl::Seconds(2.0), + "TestTime failed"); ABSL_RAW_CHECK(cxt->g0 == cxt->threads, "TestTime failed"); } else if (c == 1) { @@ -343,7 +344,7 @@ static void TestMuTime(TestContext *cxt, int c) { TestTime(cxt, c, false); } static void TestCVTime(TestContext *cxt, int c) { TestTime(cxt, c, true); } static void EndTest(int *c0, int *c1, absl::Mutex *mu, absl::CondVar *cv, - const std::function& cb) { + const std::function &cb) { mu->Lock(); int c = (*c0)++; mu->Unlock(); @@ -366,9 +367,9 @@ static int RunTestCommon(TestContext *cxt, void (*test)(TestContext *cxt, int), cxt->threads = threads; absl::synchronization_internal::ThreadPool tp(threads); for (int i = 0; i != threads; i++) { - tp.Schedule(std::bind(&EndTest, &c0, &c1, &mu2, &cv2, - std::function( - std::bind(test, cxt, std::placeholders::_1)))); + tp.Schedule(std::bind( + &EndTest, &c0, &c1, &mu2, &cv2, + std::function(std::bind(test, cxt, std::placeholders::_1)))); } mu2.Lock(); while (c1 != threads) { @@ -682,14 +683,14 @@ struct LockWhenTestStruct { bool waiting = false; }; -static bool LockWhenTestIsCond(LockWhenTestStruct* s) { +static bool LockWhenTestIsCond(LockWhenTestStruct *s) { s->mu2.Lock(); s->waiting = true; s->mu2.Unlock(); return s->cond; } -static void LockWhenTestWaitForIsCond(LockWhenTestStruct* s) { +static void LockWhenTestWaitForIsCond(LockWhenTestStruct *s) { s->mu1.LockWhen(absl::Condition(&LockWhenTestIsCond, s)); s->mu1.Unlock(); } @@ -1694,8 +1695,7 @@ TEST(Mutex, Timed) { TEST(Mutex, CVTime) { int threads = 10; // Use a fixed thread count of 10 int iterations = 1; - EXPECT_EQ(RunTest(&TestCVTime, threads, iterations, 1), - threads * iterations); + EXPECT_EQ(RunTest(&TestCVTime, threads, iterations, 1), threads * iterations); } TEST(Mutex, MuTime) { @@ -1704,4 +1704,30 @@ TEST(Mutex, MuTime) { EXPECT_EQ(RunTest(&TestMuTime, threads, iterations, 1), threads * iterations); } +TEST(Mutex, SignalExitedThread) { + // The test may expose a race when Mutex::Unlock signals a thread + // that has already exited. +#if defined(__wasm__) || defined(__asmjs__) + constexpr int kThreads = 1; // OOMs under WASM +#else + constexpr int kThreads = 100; +#endif + std::vector top; + for (unsigned i = 0; i < 2 * std::thread::hardware_concurrency(); i++) { + top.emplace_back([&]() { + for (int i = 0; i < kThreads; i++) { + absl::Mutex mu; + std::thread t([&]() { + mu.Lock(); + mu.Unlock(); + }); + mu.Lock(); + mu.Unlock(); + t.join(); + } + }); + } + for (auto &th : top) th.join(); +} + } // namespace diff --git a/TMessagesProj/jni/voip/webrtc/absl/synchronization/notification.cc b/TMessagesProj/jni/voip/webrtc/absl/synchronization/notification.cc index e91b903822..165ba669fb 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/synchronization/notification.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/synchronization/notification.cc @@ -16,7 +16,6 @@ #include -#include "absl/base/attributes.h" #include "absl/base/internal/raw_logging.h" #include "absl/synchronization/mutex.h" #include "absl/time/time.h" diff --git a/TMessagesProj/jni/voip/webrtc/absl/synchronization/notification.h b/TMessagesProj/jni/voip/webrtc/absl/synchronization/notification.h index 4bec2689b9..8986d9a408 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/synchronization/notification.h +++ b/TMessagesProj/jni/voip/webrtc/absl/synchronization/notification.h @@ -53,7 +53,6 @@ #include #include "absl/base/attributes.h" -#include "absl/base/macros.h" #include "absl/synchronization/mutex.h" #include "absl/time/time.h" diff --git a/TMessagesProj/jni/voip/webrtc/absl/time/civil_time.cc b/TMessagesProj/jni/voip/webrtc/absl/time/civil_time.cc index 6a231edb2d..65df39d731 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/time/civil_time.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/time/civil_time.cc @@ -15,6 +15,7 @@ #include "absl/time/civil_time.h" #include +#include #include #include "absl/strings/str_cat.h" @@ -167,6 +168,31 @@ std::ostream& operator<<(std::ostream& os, CivilSecond s) { return os << FormatCivilTime(s); } +bool AbslParseFlag(string_view s, CivilSecond* c, std::string*) { + return ParseLenientCivilTime(s, c); +} +bool AbslParseFlag(string_view s, CivilMinute* c, std::string*) { + return ParseLenientCivilTime(s, c); +} +bool AbslParseFlag(string_view s, CivilHour* c, std::string*) { + return ParseLenientCivilTime(s, c); +} +bool AbslParseFlag(string_view s, CivilDay* c, std::string*) { + return ParseLenientCivilTime(s, c); +} +bool AbslParseFlag(string_view s, CivilMonth* c, std::string*) { + return ParseLenientCivilTime(s, c); +} +bool AbslParseFlag(string_view s, CivilYear* c, std::string*) { + return ParseLenientCivilTime(s, c); +} +std::string AbslUnparseFlag(CivilSecond c) { return FormatCivilTime(c); } +std::string AbslUnparseFlag(CivilMinute c) { return FormatCivilTime(c); } +std::string AbslUnparseFlag(CivilHour c) { return FormatCivilTime(c); } +std::string AbslUnparseFlag(CivilDay c) { return FormatCivilTime(c); } +std::string AbslUnparseFlag(CivilMonth c) { return FormatCivilTime(c); } +std::string AbslUnparseFlag(CivilYear c) { return FormatCivilTime(c); } + } // namespace time_internal ABSL_NAMESPACE_END diff --git a/TMessagesProj/jni/voip/webrtc/absl/time/civil_time.h b/TMessagesProj/jni/voip/webrtc/absl/time/civil_time.h index bb46004434..5855bc73a1 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/time/civil_time.h +++ b/TMessagesProj/jni/voip/webrtc/absl/time/civil_time.h @@ -70,8 +70,10 @@ #ifndef ABSL_TIME_CIVIL_TIME_H_ #define ABSL_TIME_CIVIL_TIME_H_ +#include #include +#include "absl/base/config.h" #include "absl/strings/string_view.h" #include "absl/time/internal/cctz/include/cctz/civil_time.h" @@ -530,6 +532,29 @@ std::ostream& operator<<(std::ostream& os, CivilHour h); std::ostream& operator<<(std::ostream& os, CivilMinute m); std::ostream& operator<<(std::ostream& os, CivilSecond s); +// AbslParseFlag() +// +// Parses the command-line flag string representation `s` into a civil-time +// value. Flags must be specified in a format that is valid for +// `absl::ParseLenientCivilTime()`. +bool AbslParseFlag(absl::string_view s, CivilSecond* c, std::string* error); +bool AbslParseFlag(absl::string_view s, CivilMinute* c, std::string* error); +bool AbslParseFlag(absl::string_view s, CivilHour* c, std::string* error); +bool AbslParseFlag(absl::string_view s, CivilDay* c, std::string* error); +bool AbslParseFlag(absl::string_view s, CivilMonth* c, std::string* error); +bool AbslParseFlag(absl::string_view s, CivilYear* c, std::string* error); + +// AbslUnparseFlag() +// +// Unparses a civil-time value into a command-line string representation using +// the format specified by `absl::ParseCivilTime()`. +std::string AbslUnparseFlag(CivilSecond c); +std::string AbslUnparseFlag(CivilMinute c); +std::string AbslUnparseFlag(CivilHour c); +std::string AbslUnparseFlag(CivilDay c); +std::string AbslUnparseFlag(CivilMonth c); +std::string AbslUnparseFlag(CivilYear c); + } // namespace time_internal ABSL_NAMESPACE_END diff --git a/TMessagesProj/jni/voip/webrtc/absl/time/clock.cc b/TMessagesProj/jni/voip/webrtc/absl/time/clock.cc index 7b204c4ee0..2bf53d9c61 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/time/clock.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/time/clock.cc @@ -196,7 +196,7 @@ struct ABSL_CACHELINE_ALIGNED TimeState { absl::base_internal::SpinLock lock{absl::kConstInit, base_internal::SCHEDULE_KERNEL_ONLY}; }; -ABSL_CONST_INIT static TimeState time_state{}; +ABSL_CONST_INIT static TimeState time_state; // Return the time in ns as told by the kernel interface. Place in *cycleclock // the value of the cycleclock at about the time of the syscall. @@ -217,9 +217,11 @@ static int64_t GetCurrentTimeNanosFromKernel(uint64_t last_cycleclock, uint64_t elapsed_cycles; int loops = 0; do { - before_cycles = GET_CURRENT_TIME_NANOS_CYCLECLOCK_NOW(); + before_cycles = + static_cast(GET_CURRENT_TIME_NANOS_CYCLECLOCK_NOW()); current_time_nanos_from_system = GET_CURRENT_TIME_NANOS_FROM_SYSTEM(); - after_cycles = GET_CURRENT_TIME_NANOS_CYCLECLOCK_NOW(); + after_cycles = + static_cast(GET_CURRENT_TIME_NANOS_CYCLECLOCK_NOW()); // elapsed_cycles is unsigned, so is large on overflow elapsed_cycles = after_cycles - before_cycles; if (elapsed_cycles >= local_approx_syscall_time_in_cycles && @@ -316,7 +318,8 @@ int64_t GetCurrentTimeNanos() { // contribute to register pressure - reading it early before initializing // the other pieces of the calculation minimizes spill/restore instructions, // minimizing icache cost. - uint64_t now_cycles = GET_CURRENT_TIME_NANOS_CYCLECLOCK_NOW(); + uint64_t now_cycles = + static_cast(GET_CURRENT_TIME_NANOS_CYCLECLOCK_NOW()); // Acquire pairs with the barrier in SeqRelease - if this load sees that // store, the shared-data reads necessarily see that SeqRelease's updates @@ -356,7 +359,8 @@ int64_t GetCurrentTimeNanos() { uint64_t delta_cycles; if (seq_read0 == seq_read1 && (seq_read0 & 1) == 0 && (delta_cycles = now_cycles - base_cycles) < min_cycles_per_sample) { - return base_ns + ((delta_cycles * nsscaled_per_cycle) >> kScale); + return static_cast( + base_ns + ((delta_cycles * nsscaled_per_cycle) >> kScale)); } return GetCurrentTimeNanosSlowPath(); } @@ -404,8 +408,8 @@ static int64_t GetCurrentTimeNanosSlowPath() // Sample the kernel time base. This is the definition of // "now" if we take the slow path. uint64_t now_cycles; - uint64_t now_ns = - GetCurrentTimeNanosFromKernel(time_state.last_now_cycles, &now_cycles); + uint64_t now_ns = static_cast( + GetCurrentTimeNanosFromKernel(time_state.last_now_cycles, &now_cycles)); time_state.last_now_cycles = now_cycles; uint64_t estimated_base_ns; @@ -432,7 +436,7 @@ static int64_t GetCurrentTimeNanosSlowPath() time_state.lock.Unlock(); - return estimated_base_ns; + return static_cast(estimated_base_ns); } // Main part of the algorithm. Locks out readers, updates the approximation @@ -489,7 +493,8 @@ static uint64_t UpdateLastSample(uint64_t now_cycles, uint64_t now_ns, uint64_t assumed_next_sample_delta_cycles = SafeDivideAndScale(kMinNSBetweenSamples, measured_nsscaled_per_cycle); - int64_t diff_ns = now_ns - estimated_base_ns; // estimate low by this much + // Estimate low by this much. + int64_t diff_ns = static_cast(now_ns - estimated_base_ns); // We want to set nsscaled_per_cycle so that our estimate of the ns time // at the assumed cycle time is the assumed ns time. @@ -500,7 +505,8 @@ static uint64_t UpdateLastSample(uint64_t now_cycles, uint64_t now_ns, // of our current error, by solving: // kMinNSBetweenSamples + diff_ns - (diff_ns / 16) == // (assumed_next_sample_delta_cycles * nsscaled_per_cycle) >> kScale - ns = kMinNSBetweenSamples + diff_ns - (diff_ns / 16); + ns = static_cast(static_cast(kMinNSBetweenSamples) + + diff_ns - (diff_ns / 16)); uint64_t new_nsscaled_per_cycle = SafeDivideAndScale(ns, assumed_next_sample_delta_cycles); if (new_nsscaled_per_cycle != 0 && @@ -558,7 +564,7 @@ constexpr absl::Duration MaxSleep() { // REQUIRES: to_sleep <= MaxSleep(). void SleepOnce(absl::Duration to_sleep) { #ifdef _WIN32 - Sleep(to_sleep / absl::Milliseconds(1)); + Sleep(static_cast(to_sleep / absl::Milliseconds(1))); #else struct timespec sleep_time = absl::ToTimespec(to_sleep); while (nanosleep(&sleep_time, &sleep_time) != 0 && errno == EINTR) { diff --git a/TMessagesProj/jni/voip/webrtc/absl/time/duration.cc b/TMessagesProj/jni/voip/webrtc/absl/time/duration.cc index 4443109a51..911e80f834 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/time/duration.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/time/duration.cc @@ -617,7 +617,7 @@ timespec ToTimespec(Duration d) { rep_lo -= kTicksPerSecond; } } - ts.tv_sec = rep_hi; + ts.tv_sec = static_cast(rep_hi); if (ts.tv_sec == rep_hi) { // no time_t narrowing ts.tv_nsec = rep_lo / kTicksPerNanosecond; return ts; @@ -645,7 +645,7 @@ timeval ToTimeval(Duration d) { ts.tv_nsec -= 1000 * 1000 * 1000; } } - tv.tv_sec = ts.tv_sec; + tv.tv_sec = static_cast(ts.tv_sec); if (tv.tv_sec != ts.tv_sec) { // narrowing if (ts.tv_sec < 0) { tv.tv_sec = std::numeric_limits::min(); @@ -691,7 +691,7 @@ namespace { char* Format64(char* ep, int width, int64_t v) { do { --width; - *--ep = '0' + (v % 10); // contiguous digits + *--ep = static_cast('0' + (v % 10)); // contiguous digits } while (v /= 10); while (--width >= 0) *--ep = '0'; // zero pad return ep; @@ -728,7 +728,7 @@ void AppendNumberUnit(std::string* out, int64_t n, DisplayUnit unit) { char* const ep = buf + sizeof(buf); char* bp = Format64(ep, 0, n); if (*bp != '0' || bp + 1 != ep) { - out->append(bp, ep - bp); + out->append(bp, static_cast(ep - bp)); out->append(unit.abbr.data(), unit.abbr.size()); } } @@ -745,12 +745,12 @@ void AppendNumberUnit(std::string* out, double n, DisplayUnit unit) { int64_t int_part = d; if (int_part != 0 || frac_part != 0) { char* bp = Format64(ep, 0, int_part); // always < 1000 - out->append(bp, ep - bp); + out->append(bp, static_cast(ep - bp)); if (frac_part != 0) { out->push_back('.'); bp = Format64(ep, prec, frac_part); while (ep[-1] == '0') --ep; - out->append(bp, ep - bp); + out->append(bp, static_cast(ep - bp)); } out->append(unit.abbr.data(), unit.abbr.size()); } @@ -766,13 +766,14 @@ void AppendNumberUnit(std::string* out, double n, DisplayUnit unit) { // is non-zero. // Unlike Go, we format the zero duration as 0, with no unit. std::string FormatDuration(Duration d) { - const Duration min_duration = Seconds(kint64min); - if (d == min_duration) { + constexpr Duration kMinDuration = Seconds(kint64min); + std::string s; + if (d == kMinDuration) { // Avoid needing to negate kint64min by directly returning what the // following code should produce in that case. - return "-2562047788015215h30m8s"; + s = "-2562047788015215h30m8s"; + return s; } - std::string s; if (d < ZeroDuration()) { s.append("-"); d = -d; @@ -840,7 +841,7 @@ bool ConsumeDurationNumber(const char** dpp, const char* ep, int64_t* int_part, // in "*unit". The given string pointer is modified to point to the first // unconsumed char. bool ConsumeDurationUnit(const char** start, const char* end, Duration* unit) { - size_t size = end - *start; + size_t size = static_cast(end - *start); switch (size) { case 0: return false; diff --git a/TMessagesProj/jni/voip/webrtc/absl/time/duration_test.cc b/TMessagesProj/jni/voip/webrtc/absl/time/duration_test.cc index b7209e1c0a..b7abf4baa2 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/time/duration_test.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/time/duration_test.cc @@ -349,6 +349,11 @@ TEST(Duration, ToChrono) { } TEST(Duration, FactoryOverloads) { +#if defined(ABSL_SKIP_TIME_TESTS_BROKEN_ON_MSVC_OPT) && \ + ABSL_SKIP_TIME_TESTS_BROKEN_ON_MSVC_OPT + GTEST_SKIP(); +#endif + enum E { kOne = 1 }; #define TEST_FACTORY_OVERLOADS(NAME) \ EXPECT_EQ(1, NAME(kOne) / NAME(kOne)); \ @@ -879,6 +884,11 @@ TEST(Duration, RelationalOperators) { } TEST(Duration, Addition) { +#if defined(ABSL_SKIP_TIME_TESTS_BROKEN_ON_MSVC_OPT) && \ + ABSL_SKIP_TIME_TESTS_BROKEN_ON_MSVC_OPT + GTEST_SKIP(); +#endif + #define TEST_ADD_OPS(UNIT) \ do { \ EXPECT_EQ(UNIT(2), UNIT(1) + UNIT(1)); \ @@ -972,6 +982,11 @@ TEST(Duration, Negation) { } TEST(Duration, AbsoluteValue) { +#if defined(ABSL_SKIP_TIME_TESTS_BROKEN_ON_MSVC_OPT) && \ + ABSL_SKIP_TIME_TESTS_BROKEN_ON_MSVC_OPT + GTEST_SKIP(); +#endif + EXPECT_EQ(absl::ZeroDuration(), AbsDuration(absl::ZeroDuration())); EXPECT_EQ(absl::Seconds(1), AbsDuration(absl::Seconds(1))); EXPECT_EQ(absl::Seconds(1), AbsDuration(absl::Seconds(-1))); @@ -989,6 +1004,11 @@ TEST(Duration, AbsoluteValue) { } TEST(Duration, Multiplication) { +#if defined(ABSL_SKIP_TIME_TESTS_BROKEN_ON_MSVC_OPT) && \ + ABSL_SKIP_TIME_TESTS_BROKEN_ON_MSVC_OPT + GTEST_SKIP(); +#endif + #define TEST_MUL_OPS(UNIT) \ do { \ EXPECT_EQ(UNIT(5), UNIT(2) * 2.5); \ @@ -1241,6 +1261,11 @@ TEST(Duration, RoundTripUnits) { } TEST(Duration, TruncConversions) { +#if defined(ABSL_SKIP_TIME_TESTS_BROKEN_ON_MSVC_OPT) && \ + ABSL_SKIP_TIME_TESTS_BROKEN_ON_MSVC_OPT + GTEST_SKIP(); +#endif + // Tests ToTimespec()/DurationFromTimespec() const struct { absl::Duration d; @@ -1537,6 +1562,11 @@ TEST(Duration, ConversionSaturation) { } TEST(Duration, FormatDuration) { +#if defined(ABSL_SKIP_TIME_TESTS_BROKEN_ON_MSVC_OPT) && \ + ABSL_SKIP_TIME_TESTS_BROKEN_ON_MSVC_OPT + GTEST_SKIP(); +#endif + // Example from Go's docs. EXPECT_EQ("72h3m0.5s", absl::FormatDuration(absl::Hours(72) + absl::Minutes(3) + @@ -1671,6 +1701,11 @@ TEST(Duration, FormatDuration) { } TEST(Duration, ParseDuration) { +#if defined(ABSL_SKIP_TIME_TESTS_BROKEN_ON_MSVC_OPT) && \ + ABSL_SKIP_TIME_TESTS_BROKEN_ON_MSVC_OPT + GTEST_SKIP(); +#endif + absl::Duration d; // No specified unit. Should only work for zero and infinity. diff --git a/TMessagesProj/jni/voip/webrtc/absl/time/format.cc b/TMessagesProj/jni/voip/webrtc/absl/time/format.cc index 4005fb704c..15a26b14f7 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/time/format.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/time/format.cc @@ -64,7 +64,8 @@ cctz_parts Split(absl::Time t) { // details about rep_hi and rep_lo. absl::Time Join(const cctz_parts& parts) { const int64_t rep_hi = (parts.sec - unix_epoch()).count(); - const uint32_t rep_lo = parts.fem.count() / (1000 * 1000 / 4); + const uint32_t rep_lo = + static_cast(parts.fem.count() / (1000 * 1000 / 4)); const auto d = time_internal::MakeDuration(rep_hi, rep_lo); return time_internal::FromUnixDuration(d); } diff --git a/TMessagesProj/jni/voip/webrtc/absl/time/internal/cctz/src/cctz_benchmark.cc b/TMessagesProj/jni/voip/webrtc/absl/time/internal/cctz/src/cctz_benchmark.cc index 6770ad6b97..c64f3801db 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/time/internal/cctz/src/cctz_benchmark.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/time/internal/cctz/src/cctz_benchmark.cc @@ -554,6 +554,7 @@ const char* const kTimeZoneNames[] = {"Africa/Abidjan", "Europe/Kaliningrad", "Europe/Kiev", "Europe/Kirov", + "Europe/Kyiv", "Europe/Lisbon", "Europe/Ljubljana", "Europe/London", @@ -593,6 +594,7 @@ const char* const kTimeZoneNames[] = {"Africa/Abidjan", "Europe/Zagreb", "Europe/Zaporozhye", "Europe/Zurich", + "Factory", "GB", "GB-Eire", "GMT", diff --git a/TMessagesProj/jni/voip/webrtc/absl/time/internal/cctz/src/time_zone_format.cc b/TMessagesProj/jni/voip/webrtc/absl/time/internal/cctz/src/time_zone_format.cc index d8cb047425..2e5f532911 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/time/internal/cctz/src/time_zone_format.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/time/internal/cctz/src/time_zone_format.cc @@ -19,7 +19,7 @@ #endif #if defined(HAS_STRPTIME) && HAS_STRPTIME -#if !defined(_XOPEN_SOURCE) +#if !defined(_XOPEN_SOURCE) && !defined(__OpenBSD__) #define _XOPEN_SOURCE // Definedness suffices for strptime. #endif #endif diff --git a/TMessagesProj/jni/voip/webrtc/absl/time/internal/cctz/src/time_zone_format_test.cc b/TMessagesProj/jni/voip/webrtc/absl/time/internal/cctz/src/time_zone_format_test.cc index 6487fa9373..f1f79a20fc 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/time/internal/cctz/src/time_zone_format_test.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/time/internal/cctz/src/time_zone_format_test.cc @@ -18,11 +18,15 @@ #include #include +#include "absl/base/config.h" +#include "absl/time/internal/cctz/include/cctz/time_zone.h" +#if defined(__linux__) +#include +#endif + #include "gmock/gmock.h" #include "gtest/gtest.h" -#include "absl/base/config.h" #include "absl/time/internal/cctz/include/cctz/civil_time.h" -#include "absl/time/internal/cctz/include/cctz/time_zone.h" namespace chrono = std::chrono; @@ -183,8 +187,10 @@ TEST(Format, PosixConversions) { TestFormatSpecifier(tp, tz, "%F", "1970-01-01"); TestFormatSpecifier(tp, tz, "%g", "70"); TestFormatSpecifier(tp, tz, "%G", "1970"); +#if defined(__GLIBC__) TestFormatSpecifier(tp, tz, "%k", " 0"); TestFormatSpecifier(tp, tz, "%l", "12"); +#endif TestFormatSpecifier(tp, tz, "%n", "\n"); TestFormatSpecifier(tp, tz, "%R", "00:00"); TestFormatSpecifier(tp, tz, "%t", "\t"); @@ -216,7 +222,9 @@ TEST(Format, LocaleSpecific) { #if defined(__linux__) // SU/C99/TZ extensions TestFormatSpecifier(tp, tz, "%h", "Jan"); // Same as %b +#if defined(__GLIBC__) TestFormatSpecifier(tp, tz, "%P", "am"); +#endif TestFormatSpecifier(tp, tz, "%r", "12:00:00 AM"); // Modified conversion specifiers %E_ @@ -1045,9 +1053,11 @@ TEST(Parse, LocaleSpecific) { EXPECT_TRUE(parse("%h", "Feb", tz, &tp)); EXPECT_EQ(2, convert(tp, tz).month()); // Equivalent to %b +#if defined(__GLIBC__) tp = reset; EXPECT_TRUE(parse("%l %p", "5 PM", tz, &tp)); EXPECT_EQ(17, convert(tp, tz).hour()); +#endif tp = reset; EXPECT_TRUE(parse("%r", "03:44:55 PM", tz, &tp)); @@ -1055,6 +1065,7 @@ TEST(Parse, LocaleSpecific) { EXPECT_EQ(44, convert(tp, tz).minute()); EXPECT_EQ(55, convert(tp, tz).second()); +#if defined(__GLIBC__) tp = reset; EXPECT_TRUE(parse("%Ec", "Tue Nov 19 05:06:07 2013", tz, &tp)); EXPECT_EQ(convert(civil_second(2013, 11, 19, 5, 6, 7), tz), tp); @@ -1126,6 +1137,7 @@ TEST(Parse, LocaleSpecific) { EXPECT_TRUE(parse("%Oy", "04", tz, &tp)); EXPECT_EQ(2004, convert(tp, tz).year()); #endif +#endif } TEST(Parse, ExtendedSeconds) { diff --git a/TMessagesProj/jni/voip/webrtc/absl/time/internal/cctz/src/time_zone_info.cc b/TMessagesProj/jni/voip/webrtc/absl/time/internal/cctz/src/time_zone_info.cc index 4f175d95fc..787426f755 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/time/internal/cctz/src/time_zone_info.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/time/internal/cctz/src/time_zone_info.cc @@ -134,6 +134,21 @@ std::int_fast64_t Decode64(const char* cp) { return static_cast(v - s64maxU - 1) - s64max - 1; } +// Does the rule for future transitions call for year-round daylight time? +// See tz/zic.c:stringzone() for the details on how such rules are encoded. +bool AllYearDST(const PosixTimeZone& posix) { + if (posix.dst_start.date.fmt != PosixTransition::N) return false; + if (posix.dst_start.date.n.day != 0) return false; + if (posix.dst_start.time.offset != 0) return false; + + if (posix.dst_end.date.fmt != PosixTransition::J) return false; + if (posix.dst_end.date.j.day != kDaysPerYear[0]) return false; + const auto offset = posix.std_offset - posix.dst_offset; + if (posix.dst_end.time.offset + offset != kSecsPerDay) return false; + + return true; +} + // Generate a year-relative offset for a PosixTransition. std::int_fast64_t TransOffset(bool leap_year, int jan1_weekday, const PosixTransition& pt) { @@ -351,6 +366,12 @@ bool TimeZoneInfo::ExtendTransitions() { if (!GetTransitionType(posix.dst_offset, true, posix.dst_abbr, &dst_ti)) return false; + if (AllYearDST(posix)) { // dst only + // The future specification should match the last transition, and + // that means that handling the future will fall out naturally. + return EquivTransitions(transitions_.back().type_index, dst_ti); + } + // Extend the transitions for an additional 400 years using the // future specification. Years beyond those can be handled by // mapping back to a cycle-equivalent year within that range. @@ -481,9 +502,9 @@ bool TimeZoneInfo::Load(ZoneInfoSource* zip) { // encoded zoneinfo. The ttisstd/ttisgmt indicators only apply when // interpreting a POSIX spec that does not include start/end rules, and // that isn't the case here (see "zic -p"). - bp += (8 + 4) * hdr.leapcnt; // leap-time + TAI-UTC - bp += 1 * hdr.ttisstdcnt; // UTC/local indicators - bp += 1 * hdr.ttisutcnt; // standard/wall indicators + bp += (time_len + 4) * hdr.leapcnt; // leap-time + TAI-UTC + bp += 1 * hdr.ttisstdcnt; // UTC/local indicators + bp += 1 * hdr.ttisutcnt; // standard/wall indicators assert(bp == tbuf.data() + tbuf.size()); future_spec_.clear(); @@ -512,8 +533,8 @@ bool TimeZoneInfo::Load(ZoneInfoSource* zip) { // Trim redundant transitions. zic may have added these to work around // differences between the glibc and reference implementations (see - // zic.c:dontmerge) and the Qt library (see zic.c:WORK_AROUND_QTBUG_53071). - // For us, they just get in the way when we do future_spec_ extension. + // zic.c:dontmerge) or to avoid bugs in old readers. For us, they just + // get in the way when we do future_spec_ extension. while (hdr.timecnt > 1) { if (!EquivTransitions(transitions_[hdr.timecnt - 1].type_index, transitions_[hdr.timecnt - 2].type_index)) { diff --git a/TMessagesProj/jni/voip/webrtc/absl/time/internal/cctz/src/time_zone_lookup.cc b/TMessagesProj/jni/voip/webrtc/absl/time/internal/cctz/src/time_zone_lookup.cc index 898d04c125..f6983aeb95 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/time/internal/cctz/src/time_zone_lookup.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/time/internal/cctz/src/time_zone_lookup.cc @@ -31,7 +31,7 @@ #if defined(__Fuchsia__) #include #include -#include +#include #include #endif @@ -140,8 +140,9 @@ time_zone local_time_zone() { if (CFStringRef tz_name = CFTimeZoneGetName(tz_default)) { CFStringEncoding encoding = kCFStringEncodingUTF8; CFIndex length = CFStringGetLength(tz_name); - buffer.resize(CFStringGetMaximumSizeForEncoding(length, encoding) + 1); - if (CFStringGetCString(tz_name, &buffer[0], buffer.size(), encoding)) { + CFIndex max_size = CFStringGetMaximumSizeForEncoding(length, encoding) + 1; + buffer.resize(static_cast(max_size)); + if (CFStringGetCString(tz_name, &buffer[0], max_size, encoding)) { zone = &buffer[0]; } } @@ -160,11 +161,11 @@ time_zone local_time_zone() { // would be set to null when the loop is destroyed, causing any other FIDL // code running on the same thread to crash. async::Loop loop(&kAsyncLoopConfigNeverAttachToThread); - std::unique_ptr context = - sys::ComponentContext::Create(); fuchsia::intl::PropertyProviderHandle handle; - zx_status_t status = context->svc()->Connect(handle.NewRequest()); + zx_status_t status = fdio_service_connect_by_name( + fuchsia::intl::PropertyProvider::Name_, + handle.NewRequest().TakeChannel().release()); if (status != ZX_OK) { return; } diff --git a/TMessagesProj/jni/voip/webrtc/absl/time/internal/cctz/src/time_zone_lookup_test.cc b/TMessagesProj/jni/voip/webrtc/absl/time/internal/cctz/src/time_zone_lookup_test.cc index 0226ab71ab..ab461f0451 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/time/internal/cctz/src/time_zone_lookup_test.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/time/internal/cctz/src/time_zone_lookup_test.cc @@ -21,10 +21,14 @@ #include #include -#include "gtest/gtest.h" #include "absl/base/config.h" -#include "absl/time/internal/cctz/include/cctz/civil_time.h" #include "absl/time/internal/cctz/include/cctz/time_zone.h" +#if defined(__linux__) +#include +#endif + +#include "gtest/gtest.h" +#include "absl/time/internal/cctz/include/cctz/civil_time.h" namespace chrono = std::chrono; @@ -485,6 +489,7 @@ const char* const kTimeZoneNames[] = {"Africa/Abidjan", "Europe/Kaliningrad", "Europe/Kiev", "Europe/Kirov", + "Europe/Kyiv", "Europe/Lisbon", "Europe/Ljubljana", "Europe/London", @@ -524,6 +529,7 @@ const char* const kTimeZoneNames[] = {"Africa/Abidjan", "Europe/Zagreb", "Europe/Zaporozhye", "Europe/Zurich", + "Factory", "GB", "GB-Eire", "GMT", @@ -1043,7 +1049,7 @@ TEST(MakeTime, LocalTimeLibC) { // 1) we know how to change the time zone used by localtime()/mktime(), // 2) cctz and localtime()/mktime() will use similar-enough tzdata, and // 3) we have some idea about how mktime() behaves during transitions. -#if defined(__linux__) && !defined(__ANDROID__) +#if defined(__linux__) && defined(__GLIBC__) && !defined(__ANDROID__) const char* const ep = getenv("TZ"); std::string tz_name = (ep != nullptr) ? ep : ""; for (const char* const* np = kTimeZoneNames; *np != nullptr; ++np) { @@ -1182,6 +1188,45 @@ TEST(PrevTransition, AmericaNewYork) { // We have a transition but we don't know which one. } +TEST(NextTransition, Scan) { + for (const char* const* np = kTimeZoneNames; *np != nullptr; ++np) { + SCOPED_TRACE(testing::Message() << "In " << *np); + time_zone tz; + // EXPECT_TRUE(load_time_zone(*np, &tz)); + if (!load_time_zone(*np, &tz)) { + continue; // tolerate kTimeZoneNames/zoneinfo skew + } + + auto tp = time_point::min(); + time_zone::civil_transition trans; + while (tz.next_transition(tp, &trans)) { + time_zone::civil_lookup from_cl = tz.lookup(trans.from); + EXPECT_NE(from_cl.kind, time_zone::civil_lookup::REPEATED); + time_zone::civil_lookup to_cl = tz.lookup(trans.to); + EXPECT_NE(to_cl.kind, time_zone::civil_lookup::SKIPPED); + + auto trans_tp = to_cl.trans; + time_zone::absolute_lookup trans_al = tz.lookup(trans_tp); + EXPECT_EQ(trans_al.cs, trans.to); + auto pre_trans_tp = trans_tp - absl::time_internal::cctz::seconds(1); + time_zone::absolute_lookup pre_trans_al = tz.lookup(pre_trans_tp); + EXPECT_EQ(pre_trans_al.cs + 1, trans.from); + + auto offset_delta = trans_al.offset - pre_trans_al.offset; + EXPECT_EQ(offset_delta, trans.to - trans.from); + if (offset_delta == 0) { + // This "transition" is only an is_dst or abbr change. + EXPECT_EQ(to_cl.kind, time_zone::civil_lookup::UNIQUE); + if (trans_al.is_dst == pre_trans_al.is_dst) { + EXPECT_STRNE(trans_al.abbr, pre_trans_al.abbr); + } + } + + tp = trans_tp; // continue scan from transition + } + } +} + TEST(TimeZoneEdgeCase, AmericaNewYork) { const time_zone tz = LoadZone("America/New_York"); diff --git a/TMessagesProj/jni/voip/webrtc/absl/time/internal/cctz/src/zone_info_source.cc b/TMessagesProj/jni/voip/webrtc/absl/time/internal/cctz/src/zone_info_source.cc index 5ab5a59ecf..b818c21381 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/time/internal/cctz/src/zone_info_source.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/time/internal/cctz/src/zone_info_source.cc @@ -66,41 +66,41 @@ extern ZoneInfoSourceFactory zone_info_source_factory; extern ZoneInfoSourceFactory default_factory; ZoneInfoSourceFactory default_factory = DefaultFactory; #if defined(_M_IX86) || defined(_M_ARM) -#pragma comment( \ - linker, \ - "/alternatename:?zone_info_source_factory@cctz_extension@time_internal@" ABSL_INTERNAL_MANGLED_NS \ - "@@3P6A?AV?$unique_ptr@VZoneInfoSource@cctz@time_internal@" ABSL_INTERNAL_MANGLED_NS \ - "@@U?$default_delete@VZoneInfoSource@cctz@time_internal@" ABSL_INTERNAL_MANGLED_NS \ - "@@@std@@@std@@ABV?$basic_string@DU?$char_traits@D@std@@V?$allocator@D@2@@" ABSL_INTERNAL_MANGLED_BACKREFERENCE \ - "@ABV?$function@$$A6A?AV?$unique_ptr@VZoneInfoSource@cctz@time_internal@" ABSL_INTERNAL_MANGLED_NS \ - "@@U?$default_delete@VZoneInfoSource@cctz@time_internal@" ABSL_INTERNAL_MANGLED_NS \ - "@@@std@@@std@@ABV?$basic_string@DU?$char_traits@D@std@@V?$allocator@D@2@@2@@Z@" ABSL_INTERNAL_MANGLED_BACKREFERENCE \ - "@@ZA=?default_factory@cctz_extension@time_internal@" ABSL_INTERNAL_MANGLED_NS \ - "@@3P6A?AV?$unique_ptr@VZoneInfoSource@cctz@time_internal@" ABSL_INTERNAL_MANGLED_NS \ - "@@U?$default_delete@VZoneInfoSource@cctz@time_internal@" ABSL_INTERNAL_MANGLED_NS \ - "@@@std@@@std@@ABV?$basic_string@DU?$char_traits@D@std@@V?$allocator@D@2@@" ABSL_INTERNAL_MANGLED_BACKREFERENCE \ - "@ABV?$function@$$A6A?AV?$unique_ptr@VZoneInfoSource@cctz@time_internal@" ABSL_INTERNAL_MANGLED_NS \ - "@@U?$default_delete@VZoneInfoSource@cctz@time_internal@" ABSL_INTERNAL_MANGLED_NS \ - "@@@std@@@std@@ABV?$basic_string@DU?$char_traits@D@std@@V?$allocator@D@2@@2@@Z@" ABSL_INTERNAL_MANGLED_BACKREFERENCE \ - "@@ZA") +#pragma comment( \ + linker, \ + "/alternatename:?zone_info_source_factory@cctz_extension@time_internal@" ABSL_INTERNAL_MANGLED_NS \ + "@@3P6A?AV?$unique_ptr@VZoneInfoSource@cctz@time_internal@" ABSL_INTERNAL_MANGLED_NS \ + "@@U?$default_delete@VZoneInfoSource@cctz@time_internal@" ABSL_INTERNAL_MANGLED_NS \ + "@@@std@@@std@@ABV?$basic_string@DU?$char_traits@D@std@@V?$allocator@D@2@@" ABSL_INTERNAL_MANGLED_BACKREFERENCE \ + "@ABV?$function@$$A6A?AV?$unique_ptr@VZoneInfoSource@cctz@time_internal@" ABSL_INTERNAL_MANGLED_NS \ + "@@U?$default_delete@VZoneInfoSource@cctz@time_internal@" ABSL_INTERNAL_MANGLED_NS \ + "@@@std@@@std@@ABV?$basic_string@DU?$char_traits@D@std@@V?$allocator@D@2@@2@@Z@" ABSL_INTERNAL_MANGLED_BACKREFERENCE \ + "@@ZA=?default_factory@cctz_extension@time_internal@" ABSL_INTERNAL_MANGLED_NS \ + "@@3P6A?AV?$unique_ptr@VZoneInfoSource@cctz@time_internal@" ABSL_INTERNAL_MANGLED_NS \ + "@@U?$default_delete@VZoneInfoSource@cctz@time_internal@" ABSL_INTERNAL_MANGLED_NS \ + "@@@std@@@std@@ABV?$basic_string@DU?$char_traits@D@std@@V?$allocator@D@2@@" ABSL_INTERNAL_MANGLED_BACKREFERENCE \ + "@ABV?$function@$$A6A?AV?$unique_ptr@VZoneInfoSource@cctz@time_internal@" ABSL_INTERNAL_MANGLED_NS \ + "@@U?$default_delete@VZoneInfoSource@cctz@time_internal@" ABSL_INTERNAL_MANGLED_NS \ + "@@@std@@@std@@ABV?$basic_string@DU?$char_traits@D@std@@V?$allocator@D@2@@2@@Z@" ABSL_INTERNAL_MANGLED_BACKREFERENCE \ + "@@ZA") #elif defined(_M_IA_64) || defined(_M_AMD64) || defined(_M_ARM64) -#pragma comment( \ - linker, \ - "/alternatename:?zone_info_source_factory@cctz_extension@time_internal@" ABSL_INTERNAL_MANGLED_NS \ - "@@3P6A?AV?$unique_ptr@VZoneInfoSource@cctz@time_internal@" ABSL_INTERNAL_MANGLED_NS \ - "@@U?$default_delete@VZoneInfoSource@cctz@time_internal@" ABSL_INTERNAL_MANGLED_NS \ - "@@@std@@@std@@AEBV?$basic_string@DU?$char_traits@D@std@@V?$allocator@D@2@@" ABSL_INTERNAL_MANGLED_BACKREFERENCE \ - "@AEBV?$function@$$A6A?AV?$unique_ptr@VZoneInfoSource@cctz@time_internal@" ABSL_INTERNAL_MANGLED_NS \ - "@@U?$default_delete@VZoneInfoSource@cctz@time_internal@" ABSL_INTERNAL_MANGLED_NS \ - "@@@std@@@std@@AEBV?$basic_string@DU?$char_traits@D@std@@V?$allocator@D@2@@2@@Z@" ABSL_INTERNAL_MANGLED_BACKREFERENCE \ - "@@ZEA=?default_factory@cctz_extension@time_internal@" ABSL_INTERNAL_MANGLED_NS \ - "@@3P6A?AV?$unique_ptr@VZoneInfoSource@cctz@time_internal@" ABSL_INTERNAL_MANGLED_NS \ - "@@U?$default_delete@VZoneInfoSource@cctz@time_internal@" ABSL_INTERNAL_MANGLED_NS \ - "@@@std@@@std@@AEBV?$basic_string@DU?$char_traits@D@std@@V?$allocator@D@2@@" ABSL_INTERNAL_MANGLED_BACKREFERENCE \ - "@AEBV?$function@$$A6A?AV?$unique_ptr@VZoneInfoSource@cctz@time_internal@" ABSL_INTERNAL_MANGLED_NS \ - "@@U?$default_delete@VZoneInfoSource@cctz@time_internal@" ABSL_INTERNAL_MANGLED_NS \ - "@@@std@@@std@@AEBV?$basic_string@DU?$char_traits@D@std@@V?$allocator@D@2@@2@@Z@" ABSL_INTERNAL_MANGLED_BACKREFERENCE \ - "@@ZEA") +#pragma comment( \ + linker, \ + "/alternatename:?zone_info_source_factory@cctz_extension@time_internal@" ABSL_INTERNAL_MANGLED_NS \ + "@@3P6A?AV?$unique_ptr@VZoneInfoSource@cctz@time_internal@" ABSL_INTERNAL_MANGLED_NS \ + "@@U?$default_delete@VZoneInfoSource@cctz@time_internal@" ABSL_INTERNAL_MANGLED_NS \ + "@@@std@@@std@@AEBV?$basic_string@DU?$char_traits@D@std@@V?$allocator@D@2@@" ABSL_INTERNAL_MANGLED_BACKREFERENCE \ + "@AEBV?$function@$$A6A?AV?$unique_ptr@VZoneInfoSource@cctz@time_internal@" ABSL_INTERNAL_MANGLED_NS \ + "@@U?$default_delete@VZoneInfoSource@cctz@time_internal@" ABSL_INTERNAL_MANGLED_NS \ + "@@@std@@@std@@AEBV?$basic_string@DU?$char_traits@D@std@@V?$allocator@D@2@@2@@Z@" ABSL_INTERNAL_MANGLED_BACKREFERENCE \ + "@@ZEA=?default_factory@cctz_extension@time_internal@" ABSL_INTERNAL_MANGLED_NS \ + "@@3P6A?AV?$unique_ptr@VZoneInfoSource@cctz@time_internal@" ABSL_INTERNAL_MANGLED_NS \ + "@@U?$default_delete@VZoneInfoSource@cctz@time_internal@" ABSL_INTERNAL_MANGLED_NS \ + "@@@std@@@std@@AEBV?$basic_string@DU?$char_traits@D@std@@V?$allocator@D@2@@" ABSL_INTERNAL_MANGLED_BACKREFERENCE \ + "@AEBV?$function@$$A6A?AV?$unique_ptr@VZoneInfoSource@cctz@time_internal@" ABSL_INTERNAL_MANGLED_NS \ + "@@U?$default_delete@VZoneInfoSource@cctz@time_internal@" ABSL_INTERNAL_MANGLED_NS \ + "@@@std@@@std@@AEBV?$basic_string@DU?$char_traits@D@std@@V?$allocator@D@2@@2@@Z@" ABSL_INTERNAL_MANGLED_BACKREFERENCE \ + "@@ZEA") #else #error Unsupported MSVC platform #endif // _M_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/time/internal/test_util.cc b/TMessagesProj/jni/voip/webrtc/absl/time/internal/test_util.cc index 9a485a0750..3e2452e9c3 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/time/internal/test_util.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/time/internal/test_util.cc @@ -14,15 +14,8 @@ #include "absl/time/internal/test_util.h" -#include -#include -#include - #include "absl/base/config.h" #include "absl/base/internal/raw_logging.h" -#include "absl/time/internal/cctz/include/cctz/zone_info_source.h" - -namespace cctz = absl::time_internal::cctz; namespace absl { ABSL_NAMESPACE_BEGIN @@ -37,95 +30,3 @@ TimeZone LoadTimeZone(const std::string& name) { } // namespace time_internal ABSL_NAMESPACE_END } // namespace absl - -namespace absl { -ABSL_NAMESPACE_BEGIN -namespace time_internal { -namespace cctz_extension { -namespace { - -// Embed the zoneinfo data for time zones used during tests and benchmarks. -// The data was generated using "xxd -i zoneinfo-file". There is no need -// to update the data as long as the tests do not depend on recent changes -// (and the past rules remain the same). -#include "absl/time/internal/zoneinfo.inc" - -const struct ZoneInfo { - const char* name; - const char* data; - std::size_t length; -} kZoneInfo[] = { - // The three real time zones used by :time_test and :time_benchmark. - {"America/Los_Angeles", // - reinterpret_cast(America_Los_Angeles), America_Los_Angeles_len}, - {"America/New_York", // - reinterpret_cast(America_New_York), America_New_York_len}, - {"Australia/Sydney", // - reinterpret_cast(Australia_Sydney), Australia_Sydney_len}, - - // Other zones named in tests but which should fail to load. - {"Invalid/TimeZone", nullptr, 0}, - {"", nullptr, 0}, - - // Also allow for loading the local time zone under TZ=US/Pacific. - {"US/Pacific", // - reinterpret_cast(America_Los_Angeles), America_Los_Angeles_len}, - - // Allows use of the local time zone from a system-specific location. -#ifdef _MSC_VER - {"localtime", // - reinterpret_cast(America_Los_Angeles), America_Los_Angeles_len}, -#else - {"/etc/localtime", // - reinterpret_cast(America_Los_Angeles), America_Los_Angeles_len}, -#endif -}; - -class TestZoneInfoSource : public cctz::ZoneInfoSource { - public: - TestZoneInfoSource(const char* data, std::size_t size) - : data_(data), end_(data + size) {} - - std::size_t Read(void* ptr, std::size_t size) override { - const std::size_t len = std::min(size, end_ - data_); - memcpy(ptr, data_, len); - data_ += len; - return len; - } - - int Skip(std::size_t offset) override { - data_ += std::min(offset, end_ - data_); - return 0; - } - - private: - const char* data_; - const char* const end_; -}; - -std::unique_ptr TestFactory( - const std::string& name, - const std::function( - const std::string& name)>& /*fallback_factory*/) { - for (const ZoneInfo& zoneinfo : kZoneInfo) { - if (name == zoneinfo.name) { - if (zoneinfo.data == nullptr) return nullptr; - return std::unique_ptr( - new TestZoneInfoSource(zoneinfo.data, zoneinfo.length)); - } - } - ABSL_RAW_LOG(FATAL, "Unexpected time zone \"%s\" in test", name.c_str()); - return nullptr; -} - -} // namespace - -#if !defined(__MINGW32__) -// MinGW does not support the weak symbol extension mechanism. -ZoneInfoSourceFactory zone_info_source_factory = TestFactory; -#endif - -} // namespace cctz_extension -} // namespace time_internal -ABSL_NAMESPACE_END -} // namespace absl diff --git a/TMessagesProj/jni/voip/webrtc/absl/time/internal/zoneinfo.inc b/TMessagesProj/jni/voip/webrtc/absl/time/internal/zoneinfo.inc deleted file mode 100644 index bfed82990d..0000000000 --- a/TMessagesProj/jni/voip/webrtc/absl/time/internal/zoneinfo.inc +++ /dev/null @@ -1,729 +0,0 @@ -unsigned char America_Los_Angeles[] = { - 0x54, 0x5a, 0x69, 0x66, 0x32, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x05, - 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xba, - 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x14, 0x80, 0x00, 0x00, 0x00, - 0x9e, 0xa6, 0x48, 0xa0, 0x9f, 0xbb, 0x15, 0x90, 0xa0, 0x86, 0x2a, 0xa0, - 0xa1, 0x9a, 0xf7, 0x90, 0xcb, 0x89, 0x1a, 0xa0, 0xd2, 0x23, 0xf4, 0x70, - 0xd2, 0x61, 0x26, 0x10, 0xd6, 0xfe, 0x74, 0x5c, 0xd8, 0x80, 0xad, 0x90, - 0xda, 0xfe, 0xc3, 0x90, 0xdb, 0xc0, 0x90, 0x10, 0xdc, 0xde, 0xa5, 0x90, - 0xdd, 0xa9, 0xac, 0x90, 0xde, 0xbe, 0x87, 0x90, 0xdf, 0x89, 0x8e, 0x90, - 0xe0, 0x9e, 0x69, 0x90, 0xe1, 0x69, 0x70, 0x90, 0xe2, 0x7e, 0x4b, 0x90, - 0xe3, 0x49, 0x52, 0x90, 0xe4, 0x5e, 0x2d, 0x90, 0xe5, 0x29, 0x34, 0x90, - 0xe6, 0x47, 0x4a, 0x10, 0xe7, 0x12, 0x51, 0x10, 0xe8, 0x27, 0x2c, 0x10, - 0xe8, 0xf2, 0x33, 0x10, 0xea, 0x07, 0x0e, 0x10, 0xea, 0xd2, 0x15, 0x10, - 0xeb, 0xe6, 0xf0, 0x10, 0xec, 0xb1, 0xf7, 0x10, 0xed, 0xc6, 0xd2, 0x10, - 0xee, 0x91, 0xd9, 0x10, 0xef, 0xaf, 0xee, 0x90, 0xf0, 0x71, 0xbb, 0x10, - 0xf1, 0x8f, 0xd0, 0x90, 0xf2, 0x7f, 0xc1, 0x90, 0xf3, 0x6f, 0xb2, 0x90, - 0xf4, 0x5f, 0xa3, 0x90, 0xf5, 0x4f, 0x94, 0x90, 0xf6, 0x3f, 0x85, 0x90, - 0xf7, 0x2f, 0x76, 0x90, 0xf8, 0x28, 0xa2, 0x10, 0xf9, 0x0f, 0x58, 0x90, - 0xfa, 0x08, 0x84, 0x10, 0xfa, 0xf8, 0x83, 0x20, 0xfb, 0xe8, 0x66, 0x10, - 0xfc, 0xd8, 0x65, 0x20, 0xfd, 0xc8, 0x48, 0x10, 0xfe, 0xb8, 0x47, 0x20, - 0xff, 0xa8, 0x2a, 0x10, 0x00, 0x98, 0x29, 0x20, 0x01, 0x88, 0x0c, 0x10, - 0x02, 0x78, 0x0b, 0x20, 0x03, 0x71, 0x28, 0x90, 0x04, 0x61, 0x27, 0xa0, - 0x05, 0x51, 0x0a, 0x90, 0x06, 0x41, 0x09, 0xa0, 0x07, 0x30, 0xec, 0x90, - 0x07, 0x8d, 0x43, 0xa0, 0x09, 0x10, 0xce, 0x90, 0x09, 0xad, 0xbf, 0x20, - 0x0a, 0xf0, 0xb0, 0x90, 0x0b, 0xe0, 0xaf, 0xa0, 0x0c, 0xd9, 0xcd, 0x10, - 0x0d, 0xc0, 0x91, 0xa0, 0x0e, 0xb9, 0xaf, 0x10, 0x0f, 0xa9, 0xae, 0x20, - 0x10, 0x99, 0x91, 0x10, 0x11, 0x89, 0x90, 0x20, 0x12, 0x79, 0x73, 0x10, - 0x13, 0x69, 0x72, 0x20, 0x14, 0x59, 0x55, 0x10, 0x15, 0x49, 0x54, 0x20, - 0x16, 0x39, 0x37, 0x10, 0x17, 0x29, 0x36, 0x20, 0x18, 0x22, 0x53, 0x90, - 0x19, 0x09, 0x18, 0x20, 0x1a, 0x02, 0x35, 0x90, 0x1a, 0xf2, 0x34, 0xa0, - 0x1b, 0xe2, 0x17, 0x90, 0x1c, 0xd2, 0x16, 0xa0, 0x1d, 0xc1, 0xf9, 0x90, - 0x1e, 0xb1, 0xf8, 0xa0, 0x1f, 0xa1, 0xdb, 0x90, 0x20, 0x76, 0x2b, 0x20, - 0x21, 0x81, 0xbd, 0x90, 0x22, 0x56, 0x0d, 0x20, 0x23, 0x6a, 0xda, 0x10, - 0x24, 0x35, 0xef, 0x20, 0x25, 0x4a, 0xbc, 0x10, 0x26, 0x15, 0xd1, 0x20, - 0x27, 0x2a, 0x9e, 0x10, 0x27, 0xfe, 0xed, 0xa0, 0x29, 0x0a, 0x80, 0x10, - 0x29, 0xde, 0xcf, 0xa0, 0x2a, 0xea, 0x62, 0x10, 0x2b, 0xbe, 0xb1, 0xa0, - 0x2c, 0xd3, 0x7e, 0x90, 0x2d, 0x9e, 0x93, 0xa0, 0x2e, 0xb3, 0x60, 0x90, - 0x2f, 0x7e, 0x75, 0xa0, 0x30, 0x93, 0x42, 0x90, 0x31, 0x67, 0x92, 0x20, - 0x32, 0x73, 0x24, 0x90, 0x33, 0x47, 0x74, 0x20, 0x34, 0x53, 0x06, 0x90, - 0x35, 0x27, 0x56, 0x20, 0x36, 0x32, 0xe8, 0x90, 0x37, 0x07, 0x38, 0x20, - 0x38, 0x1c, 0x05, 0x10, 0x38, 0xe7, 0x1a, 0x20, 0x39, 0xfb, 0xe7, 0x10, - 0x3a, 0xc6, 0xfc, 0x20, 0x3b, 0xdb, 0xc9, 0x10, 0x3c, 0xb0, 0x18, 0xa0, - 0x3d, 0xbb, 0xab, 0x10, 0x3e, 0x8f, 0xfa, 0xa0, 0x3f, 0x9b, 0x8d, 0x10, - 0x40, 0x6f, 0xdc, 0xa0, 0x41, 0x84, 0xa9, 0x90, 0x42, 0x4f, 0xbe, 0xa0, - 0x43, 0x64, 0x8b, 0x90, 0x44, 0x2f, 0xa0, 0xa0, 0x45, 0x44, 0x6d, 0x90, - 0x45, 0xf3, 0xd3, 0x20, 0x47, 0x2d, 0x8a, 0x10, 0x47, 0xd3, 0xb5, 0x20, - 0x49, 0x0d, 0x6c, 0x10, 0x49, 0xb3, 0x97, 0x20, 0x4a, 0xed, 0x4e, 0x10, - 0x4b, 0x9c, 0xb3, 0xa0, 0x4c, 0xd6, 0x6a, 0x90, 0x4d, 0x7c, 0x95, 0xa0, - 0x4e, 0xb6, 0x4c, 0x90, 0x4f, 0x5c, 0x77, 0xa0, 0x50, 0x96, 0x2e, 0x90, - 0x51, 0x3c, 0x59, 0xa0, 0x52, 0x76, 0x10, 0x90, 0x53, 0x1c, 0x3b, 0xa0, - 0x54, 0x55, 0xf2, 0x90, 0x54, 0xfc, 0x1d, 0xa0, 0x56, 0x35, 0xd4, 0x90, - 0x56, 0xe5, 0x3a, 0x20, 0x58, 0x1e, 0xf1, 0x10, 0x58, 0xc5, 0x1c, 0x20, - 0x59, 0xfe, 0xd3, 0x10, 0x5a, 0xa4, 0xfe, 0x20, 0x5b, 0xde, 0xb5, 0x10, - 0x5c, 0x84, 0xe0, 0x20, 0x5d, 0xbe, 0x97, 0x10, 0x5e, 0x64, 0xc2, 0x20, - 0x5f, 0x9e, 0x79, 0x10, 0x60, 0x4d, 0xde, 0xa0, 0x61, 0x87, 0x95, 0x90, - 0x62, 0x2d, 0xc0, 0xa0, 0x63, 0x67, 0x77, 0x90, 0x64, 0x0d, 0xa2, 0xa0, - 0x65, 0x47, 0x59, 0x90, 0x65, 0xed, 0x84, 0xa0, 0x67, 0x27, 0x3b, 0x90, - 0x67, 0xcd, 0x66, 0xa0, 0x69, 0x07, 0x1d, 0x90, 0x69, 0xad, 0x48, 0xa0, - 0x6a, 0xe6, 0xff, 0x90, 0x6b, 0x96, 0x65, 0x20, 0x6c, 0xd0, 0x1c, 0x10, - 0x6d, 0x76, 0x47, 0x20, 0x6e, 0xaf, 0xfe, 0x10, 0x6f, 0x56, 0x29, 0x20, - 0x70, 0x8f, 0xe0, 0x10, 0x71, 0x36, 0x0b, 0x20, 0x72, 0x6f, 0xc2, 0x10, - 0x73, 0x15, 0xed, 0x20, 0x74, 0x4f, 0xa4, 0x10, 0x74, 0xff, 0x09, 0xa0, - 0x76, 0x38, 0xc0, 0x90, 0x76, 0xde, 0xeb, 0xa0, 0x78, 0x18, 0xa2, 0x90, - 0x78, 0xbe, 0xcd, 0xa0, 0x79, 0xf8, 0x84, 0x90, 0x7a, 0x9e, 0xaf, 0xa0, - 0x7b, 0xd8, 0x66, 0x90, 0x7c, 0x7e, 0x91, 0xa0, 0x7d, 0xb8, 0x48, 0x90, - 0x7e, 0x5e, 0x73, 0xa0, 0x7f, 0x98, 0x2a, 0x90, 0x02, 0x01, 0x02, 0x01, - 0x02, 0x03, 0x04, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, - 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, - 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, - 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, - 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, - 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, - 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, - 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, - 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, - 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, - 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, - 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, - 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, - 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, - 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, - 0x01, 0x02, 0xff, 0xff, 0x91, 0x26, 0x00, 0x00, 0xff, 0xff, 0x9d, 0x90, - 0x01, 0x04, 0xff, 0xff, 0x8f, 0x80, 0x00, 0x08, 0xff, 0xff, 0x9d, 0x90, - 0x01, 0x0c, 0xff, 0xff, 0x9d, 0x90, 0x01, 0x10, 0x4c, 0x4d, 0x54, 0x00, - 0x50, 0x44, 0x54, 0x00, 0x50, 0x53, 0x54, 0x00, 0x50, 0x57, 0x54, 0x00, - 0x50, 0x50, 0x54, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, - 0x00, 0x01, 0x54, 0x5a, 0x69, 0x66, 0x32, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x05, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0xbb, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x14, 0xf8, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0x5e, 0x04, - 0x1a, 0xc0, 0xff, 0xff, 0xff, 0xff, 0x9e, 0xa6, 0x48, 0xa0, 0xff, 0xff, - 0xff, 0xff, 0x9f, 0xbb, 0x15, 0x90, 0xff, 0xff, 0xff, 0xff, 0xa0, 0x86, - 0x2a, 0xa0, 0xff, 0xff, 0xff, 0xff, 0xa1, 0x9a, 0xf7, 0x90, 0xff, 0xff, - 0xff, 0xff, 0xcb, 0x89, 0x1a, 0xa0, 0xff, 0xff, 0xff, 0xff, 0xd2, 0x23, - 0xf4, 0x70, 0xff, 0xff, 0xff, 0xff, 0xd2, 0x61, 0x26, 0x10, 0xff, 0xff, - 0xff, 0xff, 0xd6, 0xfe, 0x74, 0x5c, 0xff, 0xff, 0xff, 0xff, 0xd8, 0x80, - 0xad, 0x90, 0xff, 0xff, 0xff, 0xff, 0xda, 0xfe, 0xc3, 0x90, 0xff, 0xff, - 0xff, 0xff, 0xdb, 0xc0, 0x90, 0x10, 0xff, 0xff, 0xff, 0xff, 0xdc, 0xde, - 0xa5, 0x90, 0xff, 0xff, 0xff, 0xff, 0xdd, 0xa9, 0xac, 0x90, 0xff, 0xff, - 0xff, 0xff, 0xde, 0xbe, 0x87, 0x90, 0xff, 0xff, 0xff, 0xff, 0xdf, 0x89, - 0x8e, 0x90, 0xff, 0xff, 0xff, 0xff, 0xe0, 0x9e, 0x69, 0x90, 0xff, 0xff, - 0xff, 0xff, 0xe1, 0x69, 0x70, 0x90, 0xff, 0xff, 0xff, 0xff, 0xe2, 0x7e, - 0x4b, 0x90, 0xff, 0xff, 0xff, 0xff, 0xe3, 0x49, 0x52, 0x90, 0xff, 0xff, - 0xff, 0xff, 0xe4, 0x5e, 0x2d, 0x90, 0xff, 0xff, 0xff, 0xff, 0xe5, 0x29, - 0x34, 0x90, 0xff, 0xff, 0xff, 0xff, 0xe6, 0x47, 0x4a, 0x10, 0xff, 0xff, - 0xff, 0xff, 0xe7, 0x12, 0x51, 0x10, 0xff, 0xff, 0xff, 0xff, 0xe8, 0x27, - 0x2c, 0x10, 0xff, 0xff, 0xff, 0xff, 0xe8, 0xf2, 0x33, 0x10, 0xff, 0xff, - 0xff, 0xff, 0xea, 0x07, 0x0e, 0x10, 0xff, 0xff, 0xff, 0xff, 0xea, 0xd2, - 0x15, 0x10, 0xff, 0xff, 0xff, 0xff, 0xeb, 0xe6, 0xf0, 0x10, 0xff, 0xff, - 0xff, 0xff, 0xec, 0xb1, 0xf7, 0x10, 0xff, 0xff, 0xff, 0xff, 0xed, 0xc6, - 0xd2, 0x10, 0xff, 0xff, 0xff, 0xff, 0xee, 0x91, 0xd9, 0x10, 0xff, 0xff, - 0xff, 0xff, 0xef, 0xaf, 0xee, 0x90, 0xff, 0xff, 0xff, 0xff, 0xf0, 0x71, - 0xbb, 0x10, 0xff, 0xff, 0xff, 0xff, 0xf1, 0x8f, 0xd0, 0x90, 0xff, 0xff, - 0xff, 0xff, 0xf2, 0x7f, 0xc1, 0x90, 0xff, 0xff, 0xff, 0xff, 0xf3, 0x6f, - 0xb2, 0x90, 0xff, 0xff, 0xff, 0xff, 0xf4, 0x5f, 0xa3, 0x90, 0xff, 0xff, - 0xff, 0xff, 0xf5, 0x4f, 0x94, 0x90, 0xff, 0xff, 0xff, 0xff, 0xf6, 0x3f, - 0x85, 0x90, 0xff, 0xff, 0xff, 0xff, 0xf7, 0x2f, 0x76, 0x90, 0xff, 0xff, - 0xff, 0xff, 0xf8, 0x28, 0xa2, 0x10, 0xff, 0xff, 0xff, 0xff, 0xf9, 0x0f, - 0x58, 0x90, 0xff, 0xff, 0xff, 0xff, 0xfa, 0x08, 0x84, 0x10, 0xff, 0xff, - 0xff, 0xff, 0xfa, 0xf8, 0x83, 0x20, 0xff, 0xff, 0xff, 0xff, 0xfb, 0xe8, - 0x66, 0x10, 0xff, 0xff, 0xff, 0xff, 0xfc, 0xd8, 0x65, 0x20, 0xff, 0xff, - 0xff, 0xff, 0xfd, 0xc8, 0x48, 0x10, 0xff, 0xff, 0xff, 0xff, 0xfe, 0xb8, - 0x47, 0x20, 0xff, 0xff, 0xff, 0xff, 0xff, 0xa8, 0x2a, 0x10, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x98, 0x29, 0x20, 0x00, 0x00, 0x00, 0x00, 0x01, 0x88, - 0x0c, 0x10, 0x00, 0x00, 0x00, 0x00, 0x02, 0x78, 0x0b, 0x20, 0x00, 0x00, - 0x00, 0x00, 0x03, 0x71, 0x28, 0x90, 0x00, 0x00, 0x00, 0x00, 0x04, 0x61, - 0x27, 0xa0, 0x00, 0x00, 0x00, 0x00, 0x05, 0x51, 0x0a, 0x90, 0x00, 0x00, - 0x00, 0x00, 0x06, 0x41, 0x09, 0xa0, 0x00, 0x00, 0x00, 0x00, 0x07, 0x30, - 0xec, 0x90, 0x00, 0x00, 0x00, 0x00, 0x07, 0x8d, 0x43, 0xa0, 0x00, 0x00, - 0x00, 0x00, 0x09, 0x10, 0xce, 0x90, 0x00, 0x00, 0x00, 0x00, 0x09, 0xad, - 0xbf, 0x20, 0x00, 0x00, 0x00, 0x00, 0x0a, 0xf0, 0xb0, 0x90, 0x00, 0x00, - 0x00, 0x00, 0x0b, 0xe0, 0xaf, 0xa0, 0x00, 0x00, 0x00, 0x00, 0x0c, 0xd9, - 0xcd, 0x10, 0x00, 0x00, 0x00, 0x00, 0x0d, 0xc0, 0x91, 0xa0, 0x00, 0x00, - 0x00, 0x00, 0x0e, 0xb9, 0xaf, 0x10, 0x00, 0x00, 0x00, 0x00, 0x0f, 0xa9, - 0xae, 0x20, 0x00, 0x00, 0x00, 0x00, 0x10, 0x99, 0x91, 0x10, 0x00, 0x00, - 0x00, 0x00, 0x11, 0x89, 0x90, 0x20, 0x00, 0x00, 0x00, 0x00, 0x12, 0x79, - 0x73, 0x10, 0x00, 0x00, 0x00, 0x00, 0x13, 0x69, 0x72, 0x20, 0x00, 0x00, - 0x00, 0x00, 0x14, 0x59, 0x55, 0x10, 0x00, 0x00, 0x00, 0x00, 0x15, 0x49, - 0x54, 0x20, 0x00, 0x00, 0x00, 0x00, 0x16, 0x39, 0x37, 0x10, 0x00, 0x00, - 0x00, 0x00, 0x17, 0x29, 0x36, 0x20, 0x00, 0x00, 0x00, 0x00, 0x18, 0x22, - 0x53, 0x90, 0x00, 0x00, 0x00, 0x00, 0x19, 0x09, 0x18, 0x20, 0x00, 0x00, - 0x00, 0x00, 0x1a, 0x02, 0x35, 0x90, 0x00, 0x00, 0x00, 0x00, 0x1a, 0xf2, - 0x34, 0xa0, 0x00, 0x00, 0x00, 0x00, 0x1b, 0xe2, 0x17, 0x90, 0x00, 0x00, - 0x00, 0x00, 0x1c, 0xd2, 0x16, 0xa0, 0x00, 0x00, 0x00, 0x00, 0x1d, 0xc1, - 0xf9, 0x90, 0x00, 0x00, 0x00, 0x00, 0x1e, 0xb1, 0xf8, 0xa0, 0x00, 0x00, - 0x00, 0x00, 0x1f, 0xa1, 0xdb, 0x90, 0x00, 0x00, 0x00, 0x00, 0x20, 0x76, - 0x2b, 0x20, 0x00, 0x00, 0x00, 0x00, 0x21, 0x81, 0xbd, 0x90, 0x00, 0x00, - 0x00, 0x00, 0x22, 0x56, 0x0d, 0x20, 0x00, 0x00, 0x00, 0x00, 0x23, 0x6a, - 0xda, 0x10, 0x00, 0x00, 0x00, 0x00, 0x24, 0x35, 0xef, 0x20, 0x00, 0x00, - 0x00, 0x00, 0x25, 0x4a, 0xbc, 0x10, 0x00, 0x00, 0x00, 0x00, 0x26, 0x15, - 0xd1, 0x20, 0x00, 0x00, 0x00, 0x00, 0x27, 0x2a, 0x9e, 0x10, 0x00, 0x00, - 0x00, 0x00, 0x27, 0xfe, 0xed, 0xa0, 0x00, 0x00, 0x00, 0x00, 0x29, 0x0a, - 0x80, 0x10, 0x00, 0x00, 0x00, 0x00, 0x29, 0xde, 0xcf, 0xa0, 0x00, 0x00, - 0x00, 0x00, 0x2a, 0xea, 0x62, 0x10, 0x00, 0x00, 0x00, 0x00, 0x2b, 0xbe, - 0xb1, 0xa0, 0x00, 0x00, 0x00, 0x00, 0x2c, 0xd3, 0x7e, 0x90, 0x00, 0x00, - 0x00, 0x00, 0x2d, 0x9e, 0x93, 0xa0, 0x00, 0x00, 0x00, 0x00, 0x2e, 0xb3, - 0x60, 0x90, 0x00, 0x00, 0x00, 0x00, 0x2f, 0x7e, 0x75, 0xa0, 0x00, 0x00, - 0x00, 0x00, 0x30, 0x93, 0x42, 0x90, 0x00, 0x00, 0x00, 0x00, 0x31, 0x67, - 0x92, 0x20, 0x00, 0x00, 0x00, 0x00, 0x32, 0x73, 0x24, 0x90, 0x00, 0x00, - 0x00, 0x00, 0x33, 0x47, 0x74, 0x20, 0x00, 0x00, 0x00, 0x00, 0x34, 0x53, - 0x06, 0x90, 0x00, 0x00, 0x00, 0x00, 0x35, 0x27, 0x56, 0x20, 0x00, 0x00, - 0x00, 0x00, 0x36, 0x32, 0xe8, 0x90, 0x00, 0x00, 0x00, 0x00, 0x37, 0x07, - 0x38, 0x20, 0x00, 0x00, 0x00, 0x00, 0x38, 0x1c, 0x05, 0x10, 0x00, 0x00, - 0x00, 0x00, 0x38, 0xe7, 0x1a, 0x20, 0x00, 0x00, 0x00, 0x00, 0x39, 0xfb, - 0xe7, 0x10, 0x00, 0x00, 0x00, 0x00, 0x3a, 0xc6, 0xfc, 0x20, 0x00, 0x00, - 0x00, 0x00, 0x3b, 0xdb, 0xc9, 0x10, 0x00, 0x00, 0x00, 0x00, 0x3c, 0xb0, - 0x18, 0xa0, 0x00, 0x00, 0x00, 0x00, 0x3d, 0xbb, 0xab, 0x10, 0x00, 0x00, - 0x00, 0x00, 0x3e, 0x8f, 0xfa, 0xa0, 0x00, 0x00, 0x00, 0x00, 0x3f, 0x9b, - 0x8d, 0x10, 0x00, 0x00, 0x00, 0x00, 0x40, 0x6f, 0xdc, 0xa0, 0x00, 0x00, - 0x00, 0x00, 0x41, 0x84, 0xa9, 0x90, 0x00, 0x00, 0x00, 0x00, 0x42, 0x4f, - 0xbe, 0xa0, 0x00, 0x00, 0x00, 0x00, 0x43, 0x64, 0x8b, 0x90, 0x00, 0x00, - 0x00, 0x00, 0x44, 0x2f, 0xa0, 0xa0, 0x00, 0x00, 0x00, 0x00, 0x45, 0x44, - 0x6d, 0x90, 0x00, 0x00, 0x00, 0x00, 0x45, 0xf3, 0xd3, 0x20, 0x00, 0x00, - 0x00, 0x00, 0x47, 0x2d, 0x8a, 0x10, 0x00, 0x00, 0x00, 0x00, 0x47, 0xd3, - 0xb5, 0x20, 0x00, 0x00, 0x00, 0x00, 0x49, 0x0d, 0x6c, 0x10, 0x00, 0x00, - 0x00, 0x00, 0x49, 0xb3, 0x97, 0x20, 0x00, 0x00, 0x00, 0x00, 0x4a, 0xed, - 0x4e, 0x10, 0x00, 0x00, 0x00, 0x00, 0x4b, 0x9c, 0xb3, 0xa0, 0x00, 0x00, - 0x00, 0x00, 0x4c, 0xd6, 0x6a, 0x90, 0x00, 0x00, 0x00, 0x00, 0x4d, 0x7c, - 0x95, 0xa0, 0x00, 0x00, 0x00, 0x00, 0x4e, 0xb6, 0x4c, 0x90, 0x00, 0x00, - 0x00, 0x00, 0x4f, 0x5c, 0x77, 0xa0, 0x00, 0x00, 0x00, 0x00, 0x50, 0x96, - 0x2e, 0x90, 0x00, 0x00, 0x00, 0x00, 0x51, 0x3c, 0x59, 0xa0, 0x00, 0x00, - 0x00, 0x00, 0x52, 0x76, 0x10, 0x90, 0x00, 0x00, 0x00, 0x00, 0x53, 0x1c, - 0x3b, 0xa0, 0x00, 0x00, 0x00, 0x00, 0x54, 0x55, 0xf2, 0x90, 0x00, 0x00, - 0x00, 0x00, 0x54, 0xfc, 0x1d, 0xa0, 0x00, 0x00, 0x00, 0x00, 0x56, 0x35, - 0xd4, 0x90, 0x00, 0x00, 0x00, 0x00, 0x56, 0xe5, 0x3a, 0x20, 0x00, 0x00, - 0x00, 0x00, 0x58, 0x1e, 0xf1, 0x10, 0x00, 0x00, 0x00, 0x00, 0x58, 0xc5, - 0x1c, 0x20, 0x00, 0x00, 0x00, 0x00, 0x59, 0xfe, 0xd3, 0x10, 0x00, 0x00, - 0x00, 0x00, 0x5a, 0xa4, 0xfe, 0x20, 0x00, 0x00, 0x00, 0x00, 0x5b, 0xde, - 0xb5, 0x10, 0x00, 0x00, 0x00, 0x00, 0x5c, 0x84, 0xe0, 0x20, 0x00, 0x00, - 0x00, 0x00, 0x5d, 0xbe, 0x97, 0x10, 0x00, 0x00, 0x00, 0x00, 0x5e, 0x64, - 0xc2, 0x20, 0x00, 0x00, 0x00, 0x00, 0x5f, 0x9e, 0x79, 0x10, 0x00, 0x00, - 0x00, 0x00, 0x60, 0x4d, 0xde, 0xa0, 0x00, 0x00, 0x00, 0x00, 0x61, 0x87, - 0x95, 0x90, 0x00, 0x00, 0x00, 0x00, 0x62, 0x2d, 0xc0, 0xa0, 0x00, 0x00, - 0x00, 0x00, 0x63, 0x67, 0x77, 0x90, 0x00, 0x00, 0x00, 0x00, 0x64, 0x0d, - 0xa2, 0xa0, 0x00, 0x00, 0x00, 0x00, 0x65, 0x47, 0x59, 0x90, 0x00, 0x00, - 0x00, 0x00, 0x65, 0xed, 0x84, 0xa0, 0x00, 0x00, 0x00, 0x00, 0x67, 0x27, - 0x3b, 0x90, 0x00, 0x00, 0x00, 0x00, 0x67, 0xcd, 0x66, 0xa0, 0x00, 0x00, - 0x00, 0x00, 0x69, 0x07, 0x1d, 0x90, 0x00, 0x00, 0x00, 0x00, 0x69, 0xad, - 0x48, 0xa0, 0x00, 0x00, 0x00, 0x00, 0x6a, 0xe6, 0xff, 0x90, 0x00, 0x00, - 0x00, 0x00, 0x6b, 0x96, 0x65, 0x20, 0x00, 0x00, 0x00, 0x00, 0x6c, 0xd0, - 0x1c, 0x10, 0x00, 0x00, 0x00, 0x00, 0x6d, 0x76, 0x47, 0x20, 0x00, 0x00, - 0x00, 0x00, 0x6e, 0xaf, 0xfe, 0x10, 0x00, 0x00, 0x00, 0x00, 0x6f, 0x56, - 0x29, 0x20, 0x00, 0x00, 0x00, 0x00, 0x70, 0x8f, 0xe0, 0x10, 0x00, 0x00, - 0x00, 0x00, 0x71, 0x36, 0x0b, 0x20, 0x00, 0x00, 0x00, 0x00, 0x72, 0x6f, - 0xc2, 0x10, 0x00, 0x00, 0x00, 0x00, 0x73, 0x15, 0xed, 0x20, 0x00, 0x00, - 0x00, 0x00, 0x74, 0x4f, 0xa4, 0x10, 0x00, 0x00, 0x00, 0x00, 0x74, 0xff, - 0x09, 0xa0, 0x00, 0x00, 0x00, 0x00, 0x76, 0x38, 0xc0, 0x90, 0x00, 0x00, - 0x00, 0x00, 0x76, 0xde, 0xeb, 0xa0, 0x00, 0x00, 0x00, 0x00, 0x78, 0x18, - 0xa2, 0x90, 0x00, 0x00, 0x00, 0x00, 0x78, 0xbe, 0xcd, 0xa0, 0x00, 0x00, - 0x00, 0x00, 0x79, 0xf8, 0x84, 0x90, 0x00, 0x00, 0x00, 0x00, 0x7a, 0x9e, - 0xaf, 0xa0, 0x00, 0x00, 0x00, 0x00, 0x7b, 0xd8, 0x66, 0x90, 0x00, 0x00, - 0x00, 0x00, 0x7c, 0x7e, 0x91, 0xa0, 0x00, 0x00, 0x00, 0x00, 0x7d, 0xb8, - 0x48, 0x90, 0x00, 0x00, 0x00, 0x00, 0x7e, 0x5e, 0x73, 0xa0, 0x00, 0x00, - 0x00, 0x00, 0x7f, 0x98, 0x2a, 0x90, 0x00, 0x02, 0x01, 0x02, 0x01, 0x02, - 0x03, 0x04, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, - 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, - 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, - 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, - 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, - 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, - 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, - 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, - 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, - 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, - 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, - 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, - 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, - 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, - 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, - 0x02, 0xff, 0xff, 0x91, 0x26, 0x00, 0x00, 0xff, 0xff, 0x9d, 0x90, 0x01, - 0x04, 0xff, 0xff, 0x8f, 0x80, 0x00, 0x08, 0xff, 0xff, 0x9d, 0x90, 0x01, - 0x0c, 0xff, 0xff, 0x9d, 0x90, 0x01, 0x10, 0x4c, 0x4d, 0x54, 0x00, 0x50, - 0x44, 0x54, 0x00, 0x50, 0x53, 0x54, 0x00, 0x50, 0x57, 0x54, 0x00, 0x50, - 0x50, 0x54, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, - 0x01, 0x0a, 0x50, 0x53, 0x54, 0x38, 0x50, 0x44, 0x54, 0x2c, 0x4d, 0x33, - 0x2e, 0x32, 0x2e, 0x30, 0x2c, 0x4d, 0x31, 0x31, 0x2e, 0x31, 0x2e, 0x30, - 0x0a -}; -unsigned int America_Los_Angeles_len = 2845; -unsigned char America_New_York[] = { - 0x54, 0x5a, 0x69, 0x66, 0x32, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x05, - 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xec, - 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x14, 0x80, 0x00, 0x00, 0x00, - 0x9e, 0xa6, 0x1e, 0x70, 0x9f, 0xba, 0xeb, 0x60, 0xa0, 0x86, 0x00, 0x70, - 0xa1, 0x9a, 0xcd, 0x60, 0xa2, 0x65, 0xe2, 0x70, 0xa3, 0x83, 0xe9, 0xe0, - 0xa4, 0x6a, 0xae, 0x70, 0xa5, 0x35, 0xa7, 0x60, 0xa6, 0x53, 0xca, 0xf0, - 0xa7, 0x15, 0x89, 0x60, 0xa8, 0x33, 0xac, 0xf0, 0xa8, 0xfe, 0xa5, 0xe0, - 0xaa, 0x13, 0x8e, 0xf0, 0xaa, 0xde, 0x87, 0xe0, 0xab, 0xf3, 0x70, 0xf0, - 0xac, 0xbe, 0x69, 0xe0, 0xad, 0xd3, 0x52, 0xf0, 0xae, 0x9e, 0x4b, 0xe0, - 0xaf, 0xb3, 0x34, 0xf0, 0xb0, 0x7e, 0x2d, 0xe0, 0xb1, 0x9c, 0x51, 0x70, - 0xb2, 0x67, 0x4a, 0x60, 0xb3, 0x7c, 0x33, 0x70, 0xb4, 0x47, 0x2c, 0x60, - 0xb5, 0x5c, 0x15, 0x70, 0xb6, 0x27, 0x0e, 0x60, 0xb7, 0x3b, 0xf7, 0x70, - 0xb8, 0x06, 0xf0, 0x60, 0xb9, 0x1b, 0xd9, 0x70, 0xb9, 0xe6, 0xd2, 0x60, - 0xbb, 0x04, 0xf5, 0xf0, 0xbb, 0xc6, 0xb4, 0x60, 0xbc, 0xe4, 0xd7, 0xf0, - 0xbd, 0xaf, 0xd0, 0xe0, 0xbe, 0xc4, 0xb9, 0xf0, 0xbf, 0x8f, 0xb2, 0xe0, - 0xc0, 0xa4, 0x9b, 0xf0, 0xc1, 0x6f, 0x94, 0xe0, 0xc2, 0x84, 0x7d, 0xf0, - 0xc3, 0x4f, 0x76, 0xe0, 0xc4, 0x64, 0x5f, 0xf0, 0xc5, 0x2f, 0x58, 0xe0, - 0xc6, 0x4d, 0x7c, 0x70, 0xc7, 0x0f, 0x3a, 0xe0, 0xc8, 0x2d, 0x5e, 0x70, - 0xc8, 0xf8, 0x57, 0x60, 0xca, 0x0d, 0x40, 0x70, 0xca, 0xd8, 0x39, 0x60, - 0xcb, 0x88, 0xf0, 0x70, 0xd2, 0x23, 0xf4, 0x70, 0xd2, 0x60, 0xfb, 0xe0, - 0xd3, 0x75, 0xe4, 0xf0, 0xd4, 0x40, 0xdd, 0xe0, 0xd5, 0x55, 0xc6, 0xf0, - 0xd6, 0x20, 0xbf, 0xe0, 0xd7, 0x35, 0xa8, 0xf0, 0xd8, 0x00, 0xa1, 0xe0, - 0xd9, 0x15, 0x8a, 0xf0, 0xd9, 0xe0, 0x83, 0xe0, 0xda, 0xfe, 0xa7, 0x70, - 0xdb, 0xc0, 0x65, 0xe0, 0xdc, 0xde, 0x89, 0x70, 0xdd, 0xa9, 0x82, 0x60, - 0xde, 0xbe, 0x6b, 0x70, 0xdf, 0x89, 0x64, 0x60, 0xe0, 0x9e, 0x4d, 0x70, - 0xe1, 0x69, 0x46, 0x60, 0xe2, 0x7e, 0x2f, 0x70, 0xe3, 0x49, 0x28, 0x60, - 0xe4, 0x5e, 0x11, 0x70, 0xe5, 0x57, 0x2e, 0xe0, 0xe6, 0x47, 0x2d, 0xf0, - 0xe7, 0x37, 0x10, 0xe0, 0xe8, 0x27, 0x0f, 0xf0, 0xe9, 0x16, 0xf2, 0xe0, - 0xea, 0x06, 0xf1, 0xf0, 0xea, 0xf6, 0xd4, 0xe0, 0xeb, 0xe6, 0xd3, 0xf0, - 0xec, 0xd6, 0xb6, 0xe0, 0xed, 0xc6, 0xb5, 0xf0, 0xee, 0xbf, 0xd3, 0x60, - 0xef, 0xaf, 0xd2, 0x70, 0xf0, 0x9f, 0xb5, 0x60, 0xf1, 0x8f, 0xb4, 0x70, - 0xf2, 0x7f, 0x97, 0x60, 0xf3, 0x6f, 0x96, 0x70, 0xf4, 0x5f, 0x79, 0x60, - 0xf5, 0x4f, 0x78, 0x70, 0xf6, 0x3f, 0x5b, 0x60, 0xf7, 0x2f, 0x5a, 0x70, - 0xf8, 0x28, 0x77, 0xe0, 0xf9, 0x0f, 0x3c, 0x70, 0xfa, 0x08, 0x59, 0xe0, - 0xfa, 0xf8, 0x58, 0xf0, 0xfb, 0xe8, 0x3b, 0xe0, 0xfc, 0xd8, 0x3a, 0xf0, - 0xfd, 0xc8, 0x1d, 0xe0, 0xfe, 0xb8, 0x1c, 0xf0, 0xff, 0xa7, 0xff, 0xe0, - 0x00, 0x97, 0xfe, 0xf0, 0x01, 0x87, 0xe1, 0xe0, 0x02, 0x77, 0xe0, 0xf0, - 0x03, 0x70, 0xfe, 0x60, 0x04, 0x60, 0xfd, 0x70, 0x05, 0x50, 0xe0, 0x60, - 0x06, 0x40, 0xdf, 0x70, 0x07, 0x30, 0xc2, 0x60, 0x07, 0x8d, 0x19, 0x70, - 0x09, 0x10, 0xa4, 0x60, 0x09, 0xad, 0x94, 0xf0, 0x0a, 0xf0, 0x86, 0x60, - 0x0b, 0xe0, 0x85, 0x70, 0x0c, 0xd9, 0xa2, 0xe0, 0x0d, 0xc0, 0x67, 0x70, - 0x0e, 0xb9, 0x84, 0xe0, 0x0f, 0xa9, 0x83, 0xf0, 0x10, 0x99, 0x66, 0xe0, - 0x11, 0x89, 0x65, 0xf0, 0x12, 0x79, 0x48, 0xe0, 0x13, 0x69, 0x47, 0xf0, - 0x14, 0x59, 0x2a, 0xe0, 0x15, 0x49, 0x29, 0xf0, 0x16, 0x39, 0x0c, 0xe0, - 0x17, 0x29, 0x0b, 0xf0, 0x18, 0x22, 0x29, 0x60, 0x19, 0x08, 0xed, 0xf0, - 0x1a, 0x02, 0x0b, 0x60, 0x1a, 0xf2, 0x0a, 0x70, 0x1b, 0xe1, 0xed, 0x60, - 0x1c, 0xd1, 0xec, 0x70, 0x1d, 0xc1, 0xcf, 0x60, 0x1e, 0xb1, 0xce, 0x70, - 0x1f, 0xa1, 0xb1, 0x60, 0x20, 0x76, 0x00, 0xf0, 0x21, 0x81, 0x93, 0x60, - 0x22, 0x55, 0xe2, 0xf0, 0x23, 0x6a, 0xaf, 0xe0, 0x24, 0x35, 0xc4, 0xf0, - 0x25, 0x4a, 0x91, 0xe0, 0x26, 0x15, 0xa6, 0xf0, 0x27, 0x2a, 0x73, 0xe0, - 0x27, 0xfe, 0xc3, 0x70, 0x29, 0x0a, 0x55, 0xe0, 0x29, 0xde, 0xa5, 0x70, - 0x2a, 0xea, 0x37, 0xe0, 0x2b, 0xbe, 0x87, 0x70, 0x2c, 0xd3, 0x54, 0x60, - 0x2d, 0x9e, 0x69, 0x70, 0x2e, 0xb3, 0x36, 0x60, 0x2f, 0x7e, 0x4b, 0x70, - 0x30, 0x93, 0x18, 0x60, 0x31, 0x67, 0x67, 0xf0, 0x32, 0x72, 0xfa, 0x60, - 0x33, 0x47, 0x49, 0xf0, 0x34, 0x52, 0xdc, 0x60, 0x35, 0x27, 0x2b, 0xf0, - 0x36, 0x32, 0xbe, 0x60, 0x37, 0x07, 0x0d, 0xf0, 0x38, 0x1b, 0xda, 0xe0, - 0x38, 0xe6, 0xef, 0xf0, 0x39, 0xfb, 0xbc, 0xe0, 0x3a, 0xc6, 0xd1, 0xf0, - 0x3b, 0xdb, 0x9e, 0xe0, 0x3c, 0xaf, 0xee, 0x70, 0x3d, 0xbb, 0x80, 0xe0, - 0x3e, 0x8f, 0xd0, 0x70, 0x3f, 0x9b, 0x62, 0xe0, 0x40, 0x6f, 0xb2, 0x70, - 0x41, 0x84, 0x7f, 0x60, 0x42, 0x4f, 0x94, 0x70, 0x43, 0x64, 0x61, 0x60, - 0x44, 0x2f, 0x76, 0x70, 0x45, 0x44, 0x43, 0x60, 0x45, 0xf3, 0xa8, 0xf0, - 0x47, 0x2d, 0x5f, 0xe0, 0x47, 0xd3, 0x8a, 0xf0, 0x49, 0x0d, 0x41, 0xe0, - 0x49, 0xb3, 0x6c, 0xf0, 0x4a, 0xed, 0x23, 0xe0, 0x4b, 0x9c, 0x89, 0x70, - 0x4c, 0xd6, 0x40, 0x60, 0x4d, 0x7c, 0x6b, 0x70, 0x4e, 0xb6, 0x22, 0x60, - 0x4f, 0x5c, 0x4d, 0x70, 0x50, 0x96, 0x04, 0x60, 0x51, 0x3c, 0x2f, 0x70, - 0x52, 0x75, 0xe6, 0x60, 0x53, 0x1c, 0x11, 0x70, 0x54, 0x55, 0xc8, 0x60, - 0x54, 0xfb, 0xf3, 0x70, 0x56, 0x35, 0xaa, 0x60, 0x56, 0xe5, 0x0f, 0xf0, - 0x58, 0x1e, 0xc6, 0xe0, 0x58, 0xc4, 0xf1, 0xf0, 0x59, 0xfe, 0xa8, 0xe0, - 0x5a, 0xa4, 0xd3, 0xf0, 0x5b, 0xde, 0x8a, 0xe0, 0x5c, 0x84, 0xb5, 0xf0, - 0x5d, 0xbe, 0x6c, 0xe0, 0x5e, 0x64, 0x97, 0xf0, 0x5f, 0x9e, 0x4e, 0xe0, - 0x60, 0x4d, 0xb4, 0x70, 0x61, 0x87, 0x6b, 0x60, 0x62, 0x2d, 0x96, 0x70, - 0x63, 0x67, 0x4d, 0x60, 0x64, 0x0d, 0x78, 0x70, 0x65, 0x47, 0x2f, 0x60, - 0x65, 0xed, 0x5a, 0x70, 0x67, 0x27, 0x11, 0x60, 0x67, 0xcd, 0x3c, 0x70, - 0x69, 0x06, 0xf3, 0x60, 0x69, 0xad, 0x1e, 0x70, 0x6a, 0xe6, 0xd5, 0x60, - 0x6b, 0x96, 0x3a, 0xf0, 0x6c, 0xcf, 0xf1, 0xe0, 0x6d, 0x76, 0x1c, 0xf0, - 0x6e, 0xaf, 0xd3, 0xe0, 0x6f, 0x55, 0xfe, 0xf0, 0x70, 0x8f, 0xb5, 0xe0, - 0x71, 0x35, 0xe0, 0xf0, 0x72, 0x6f, 0x97, 0xe0, 0x73, 0x15, 0xc2, 0xf0, - 0x74, 0x4f, 0x79, 0xe0, 0x74, 0xfe, 0xdf, 0x70, 0x76, 0x38, 0x96, 0x60, - 0x76, 0xde, 0xc1, 0x70, 0x78, 0x18, 0x78, 0x60, 0x78, 0xbe, 0xa3, 0x70, - 0x79, 0xf8, 0x5a, 0x60, 0x7a, 0x9e, 0x85, 0x70, 0x7b, 0xd8, 0x3c, 0x60, - 0x7c, 0x7e, 0x67, 0x70, 0x7d, 0xb8, 0x1e, 0x60, 0x7e, 0x5e, 0x49, 0x70, - 0x7f, 0x98, 0x00, 0x60, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, - 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, - 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, - 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, - 0x02, 0x01, 0x02, 0x01, 0x02, 0x03, 0x04, 0x02, 0x01, 0x02, 0x01, 0x02, - 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, - 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, - 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, - 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, - 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, - 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, - 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, - 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, - 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, - 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, - 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, - 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, - 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, - 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, - 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, - 0xff, 0xff, 0xba, 0x9e, 0x00, 0x00, 0xff, 0xff, 0xc7, 0xc0, 0x01, 0x04, - 0xff, 0xff, 0xb9, 0xb0, 0x00, 0x08, 0xff, 0xff, 0xc7, 0xc0, 0x01, 0x0c, - 0xff, 0xff, 0xc7, 0xc0, 0x01, 0x10, 0x4c, 0x4d, 0x54, 0x00, 0x45, 0x44, - 0x54, 0x00, 0x45, 0x53, 0x54, 0x00, 0x45, 0x57, 0x54, 0x00, 0x45, 0x50, - 0x54, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x01, - 0x54, 0x5a, 0x69, 0x66, 0x32, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x05, - 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xed, - 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x14, 0xf8, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0x5e, 0x03, 0xf0, 0x90, - 0xff, 0xff, 0xff, 0xff, 0x9e, 0xa6, 0x1e, 0x70, 0xff, 0xff, 0xff, 0xff, - 0x9f, 0xba, 0xeb, 0x60, 0xff, 0xff, 0xff, 0xff, 0xa0, 0x86, 0x00, 0x70, - 0xff, 0xff, 0xff, 0xff, 0xa1, 0x9a, 0xcd, 0x60, 0xff, 0xff, 0xff, 0xff, - 0xa2, 0x65, 0xe2, 0x70, 0xff, 0xff, 0xff, 0xff, 0xa3, 0x83, 0xe9, 0xe0, - 0xff, 0xff, 0xff, 0xff, 0xa4, 0x6a, 0xae, 0x70, 0xff, 0xff, 0xff, 0xff, - 0xa5, 0x35, 0xa7, 0x60, 0xff, 0xff, 0xff, 0xff, 0xa6, 0x53, 0xca, 0xf0, - 0xff, 0xff, 0xff, 0xff, 0xa7, 0x15, 0x89, 0x60, 0xff, 0xff, 0xff, 0xff, - 0xa8, 0x33, 0xac, 0xf0, 0xff, 0xff, 0xff, 0xff, 0xa8, 0xfe, 0xa5, 0xe0, - 0xff, 0xff, 0xff, 0xff, 0xaa, 0x13, 0x8e, 0xf0, 0xff, 0xff, 0xff, 0xff, - 0xaa, 0xde, 0x87, 0xe0, 0xff, 0xff, 0xff, 0xff, 0xab, 0xf3, 0x70, 0xf0, - 0xff, 0xff, 0xff, 0xff, 0xac, 0xbe, 0x69, 0xe0, 0xff, 0xff, 0xff, 0xff, - 0xad, 0xd3, 0x52, 0xf0, 0xff, 0xff, 0xff, 0xff, 0xae, 0x9e, 0x4b, 0xe0, - 0xff, 0xff, 0xff, 0xff, 0xaf, 0xb3, 0x34, 0xf0, 0xff, 0xff, 0xff, 0xff, - 0xb0, 0x7e, 0x2d, 0xe0, 0xff, 0xff, 0xff, 0xff, 0xb1, 0x9c, 0x51, 0x70, - 0xff, 0xff, 0xff, 0xff, 0xb2, 0x67, 0x4a, 0x60, 0xff, 0xff, 0xff, 0xff, - 0xb3, 0x7c, 0x33, 0x70, 0xff, 0xff, 0xff, 0xff, 0xb4, 0x47, 0x2c, 0x60, - 0xff, 0xff, 0xff, 0xff, 0xb5, 0x5c, 0x15, 0x70, 0xff, 0xff, 0xff, 0xff, - 0xb6, 0x27, 0x0e, 0x60, 0xff, 0xff, 0xff, 0xff, 0xb7, 0x3b, 0xf7, 0x70, - 0xff, 0xff, 0xff, 0xff, 0xb8, 0x06, 0xf0, 0x60, 0xff, 0xff, 0xff, 0xff, - 0xb9, 0x1b, 0xd9, 0x70, 0xff, 0xff, 0xff, 0xff, 0xb9, 0xe6, 0xd2, 0x60, - 0xff, 0xff, 0xff, 0xff, 0xbb, 0x04, 0xf5, 0xf0, 0xff, 0xff, 0xff, 0xff, - 0xbb, 0xc6, 0xb4, 0x60, 0xff, 0xff, 0xff, 0xff, 0xbc, 0xe4, 0xd7, 0xf0, - 0xff, 0xff, 0xff, 0xff, 0xbd, 0xaf, 0xd0, 0xe0, 0xff, 0xff, 0xff, 0xff, - 0xbe, 0xc4, 0xb9, 0xf0, 0xff, 0xff, 0xff, 0xff, 0xbf, 0x8f, 0xb2, 0xe0, - 0xff, 0xff, 0xff, 0xff, 0xc0, 0xa4, 0x9b, 0xf0, 0xff, 0xff, 0xff, 0xff, - 0xc1, 0x6f, 0x94, 0xe0, 0xff, 0xff, 0xff, 0xff, 0xc2, 0x84, 0x7d, 0xf0, - 0xff, 0xff, 0xff, 0xff, 0xc3, 0x4f, 0x76, 0xe0, 0xff, 0xff, 0xff, 0xff, - 0xc4, 0x64, 0x5f, 0xf0, 0xff, 0xff, 0xff, 0xff, 0xc5, 0x2f, 0x58, 0xe0, - 0xff, 0xff, 0xff, 0xff, 0xc6, 0x4d, 0x7c, 0x70, 0xff, 0xff, 0xff, 0xff, - 0xc7, 0x0f, 0x3a, 0xe0, 0xff, 0xff, 0xff, 0xff, 0xc8, 0x2d, 0x5e, 0x70, - 0xff, 0xff, 0xff, 0xff, 0xc8, 0xf8, 0x57, 0x60, 0xff, 0xff, 0xff, 0xff, - 0xca, 0x0d, 0x40, 0x70, 0xff, 0xff, 0xff, 0xff, 0xca, 0xd8, 0x39, 0x60, - 0xff, 0xff, 0xff, 0xff, 0xcb, 0x88, 0xf0, 0x70, 0xff, 0xff, 0xff, 0xff, - 0xd2, 0x23, 0xf4, 0x70, 0xff, 0xff, 0xff, 0xff, 0xd2, 0x60, 0xfb, 0xe0, - 0xff, 0xff, 0xff, 0xff, 0xd3, 0x75, 0xe4, 0xf0, 0xff, 0xff, 0xff, 0xff, - 0xd4, 0x40, 0xdd, 0xe0, 0xff, 0xff, 0xff, 0xff, 0xd5, 0x55, 0xc6, 0xf0, - 0xff, 0xff, 0xff, 0xff, 0xd6, 0x20, 0xbf, 0xe0, 0xff, 0xff, 0xff, 0xff, - 0xd7, 0x35, 0xa8, 0xf0, 0xff, 0xff, 0xff, 0xff, 0xd8, 0x00, 0xa1, 0xe0, - 0xff, 0xff, 0xff, 0xff, 0xd9, 0x15, 0x8a, 0xf0, 0xff, 0xff, 0xff, 0xff, - 0xd9, 0xe0, 0x83, 0xe0, 0xff, 0xff, 0xff, 0xff, 0xda, 0xfe, 0xa7, 0x70, - 0xff, 0xff, 0xff, 0xff, 0xdb, 0xc0, 0x65, 0xe0, 0xff, 0xff, 0xff, 0xff, - 0xdc, 0xde, 0x89, 0x70, 0xff, 0xff, 0xff, 0xff, 0xdd, 0xa9, 0x82, 0x60, - 0xff, 0xff, 0xff, 0xff, 0xde, 0xbe, 0x6b, 0x70, 0xff, 0xff, 0xff, 0xff, - 0xdf, 0x89, 0x64, 0x60, 0xff, 0xff, 0xff, 0xff, 0xe0, 0x9e, 0x4d, 0x70, - 0xff, 0xff, 0xff, 0xff, 0xe1, 0x69, 0x46, 0x60, 0xff, 0xff, 0xff, 0xff, - 0xe2, 0x7e, 0x2f, 0x70, 0xff, 0xff, 0xff, 0xff, 0xe3, 0x49, 0x28, 0x60, - 0xff, 0xff, 0xff, 0xff, 0xe4, 0x5e, 0x11, 0x70, 0xff, 0xff, 0xff, 0xff, - 0xe5, 0x57, 0x2e, 0xe0, 0xff, 0xff, 0xff, 0xff, 0xe6, 0x47, 0x2d, 0xf0, - 0xff, 0xff, 0xff, 0xff, 0xe7, 0x37, 0x10, 0xe0, 0xff, 0xff, 0xff, 0xff, - 0xe8, 0x27, 0x0f, 0xf0, 0xff, 0xff, 0xff, 0xff, 0xe9, 0x16, 0xf2, 0xe0, - 0xff, 0xff, 0xff, 0xff, 0xea, 0x06, 0xf1, 0xf0, 0xff, 0xff, 0xff, 0xff, - 0xea, 0xf6, 0xd4, 0xe0, 0xff, 0xff, 0xff, 0xff, 0xeb, 0xe6, 0xd3, 0xf0, - 0xff, 0xff, 0xff, 0xff, 0xec, 0xd6, 0xb6, 0xe0, 0xff, 0xff, 0xff, 0xff, - 0xed, 0xc6, 0xb5, 0xf0, 0xff, 0xff, 0xff, 0xff, 0xee, 0xbf, 0xd3, 0x60, - 0xff, 0xff, 0xff, 0xff, 0xef, 0xaf, 0xd2, 0x70, 0xff, 0xff, 0xff, 0xff, - 0xf0, 0x9f, 0xb5, 0x60, 0xff, 0xff, 0xff, 0xff, 0xf1, 0x8f, 0xb4, 0x70, - 0xff, 0xff, 0xff, 0xff, 0xf2, 0x7f, 0x97, 0x60, 0xff, 0xff, 0xff, 0xff, - 0xf3, 0x6f, 0x96, 0x70, 0xff, 0xff, 0xff, 0xff, 0xf4, 0x5f, 0x79, 0x60, - 0xff, 0xff, 0xff, 0xff, 0xf5, 0x4f, 0x78, 0x70, 0xff, 0xff, 0xff, 0xff, - 0xf6, 0x3f, 0x5b, 0x60, 0xff, 0xff, 0xff, 0xff, 0xf7, 0x2f, 0x5a, 0x70, - 0xff, 0xff, 0xff, 0xff, 0xf8, 0x28, 0x77, 0xe0, 0xff, 0xff, 0xff, 0xff, - 0xf9, 0x0f, 0x3c, 0x70, 0xff, 0xff, 0xff, 0xff, 0xfa, 0x08, 0x59, 0xe0, - 0xff, 0xff, 0xff, 0xff, 0xfa, 0xf8, 0x58, 0xf0, 0xff, 0xff, 0xff, 0xff, - 0xfb, 0xe8, 0x3b, 0xe0, 0xff, 0xff, 0xff, 0xff, 0xfc, 0xd8, 0x3a, 0xf0, - 0xff, 0xff, 0xff, 0xff, 0xfd, 0xc8, 0x1d, 0xe0, 0xff, 0xff, 0xff, 0xff, - 0xfe, 0xb8, 0x1c, 0xf0, 0xff, 0xff, 0xff, 0xff, 0xff, 0xa7, 0xff, 0xe0, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x97, 0xfe, 0xf0, 0x00, 0x00, 0x00, 0x00, - 0x01, 0x87, 0xe1, 0xe0, 0x00, 0x00, 0x00, 0x00, 0x02, 0x77, 0xe0, 0xf0, - 0x00, 0x00, 0x00, 0x00, 0x03, 0x70, 0xfe, 0x60, 0x00, 0x00, 0x00, 0x00, - 0x04, 0x60, 0xfd, 0x70, 0x00, 0x00, 0x00, 0x00, 0x05, 0x50, 0xe0, 0x60, - 0x00, 0x00, 0x00, 0x00, 0x06, 0x40, 0xdf, 0x70, 0x00, 0x00, 0x00, 0x00, - 0x07, 0x30, 0xc2, 0x60, 0x00, 0x00, 0x00, 0x00, 0x07, 0x8d, 0x19, 0x70, - 0x00, 0x00, 0x00, 0x00, 0x09, 0x10, 0xa4, 0x60, 0x00, 0x00, 0x00, 0x00, - 0x09, 0xad, 0x94, 0xf0, 0x00, 0x00, 0x00, 0x00, 0x0a, 0xf0, 0x86, 0x60, - 0x00, 0x00, 0x00, 0x00, 0x0b, 0xe0, 0x85, 0x70, 0x00, 0x00, 0x00, 0x00, - 0x0c, 0xd9, 0xa2, 0xe0, 0x00, 0x00, 0x00, 0x00, 0x0d, 0xc0, 0x67, 0x70, - 0x00, 0x00, 0x00, 0x00, 0x0e, 0xb9, 0x84, 0xe0, 0x00, 0x00, 0x00, 0x00, - 0x0f, 0xa9, 0x83, 0xf0, 0x00, 0x00, 0x00, 0x00, 0x10, 0x99, 0x66, 0xe0, - 0x00, 0x00, 0x00, 0x00, 0x11, 0x89, 0x65, 0xf0, 0x00, 0x00, 0x00, 0x00, - 0x12, 0x79, 0x48, 0xe0, 0x00, 0x00, 0x00, 0x00, 0x13, 0x69, 0x47, 0xf0, - 0x00, 0x00, 0x00, 0x00, 0x14, 0x59, 0x2a, 0xe0, 0x00, 0x00, 0x00, 0x00, - 0x15, 0x49, 0x29, 0xf0, 0x00, 0x00, 0x00, 0x00, 0x16, 0x39, 0x0c, 0xe0, - 0x00, 0x00, 0x00, 0x00, 0x17, 0x29, 0x0b, 0xf0, 0x00, 0x00, 0x00, 0x00, - 0x18, 0x22, 0x29, 0x60, 0x00, 0x00, 0x00, 0x00, 0x19, 0x08, 0xed, 0xf0, - 0x00, 0x00, 0x00, 0x00, 0x1a, 0x02, 0x0b, 0x60, 0x00, 0x00, 0x00, 0x00, - 0x1a, 0xf2, 0x0a, 0x70, 0x00, 0x00, 0x00, 0x00, 0x1b, 0xe1, 0xed, 0x60, - 0x00, 0x00, 0x00, 0x00, 0x1c, 0xd1, 0xec, 0x70, 0x00, 0x00, 0x00, 0x00, - 0x1d, 0xc1, 0xcf, 0x60, 0x00, 0x00, 0x00, 0x00, 0x1e, 0xb1, 0xce, 0x70, - 0x00, 0x00, 0x00, 0x00, 0x1f, 0xa1, 0xb1, 0x60, 0x00, 0x00, 0x00, 0x00, - 0x20, 0x76, 0x00, 0xf0, 0x00, 0x00, 0x00, 0x00, 0x21, 0x81, 0x93, 0x60, - 0x00, 0x00, 0x00, 0x00, 0x22, 0x55, 0xe2, 0xf0, 0x00, 0x00, 0x00, 0x00, - 0x23, 0x6a, 0xaf, 0xe0, 0x00, 0x00, 0x00, 0x00, 0x24, 0x35, 0xc4, 0xf0, - 0x00, 0x00, 0x00, 0x00, 0x25, 0x4a, 0x91, 0xe0, 0x00, 0x00, 0x00, 0x00, - 0x26, 0x15, 0xa6, 0xf0, 0x00, 0x00, 0x00, 0x00, 0x27, 0x2a, 0x73, 0xe0, - 0x00, 0x00, 0x00, 0x00, 0x27, 0xfe, 0xc3, 0x70, 0x00, 0x00, 0x00, 0x00, - 0x29, 0x0a, 0x55, 0xe0, 0x00, 0x00, 0x00, 0x00, 0x29, 0xde, 0xa5, 0x70, - 0x00, 0x00, 0x00, 0x00, 0x2a, 0xea, 0x37, 0xe0, 0x00, 0x00, 0x00, 0x00, - 0x2b, 0xbe, 0x87, 0x70, 0x00, 0x00, 0x00, 0x00, 0x2c, 0xd3, 0x54, 0x60, - 0x00, 0x00, 0x00, 0x00, 0x2d, 0x9e, 0x69, 0x70, 0x00, 0x00, 0x00, 0x00, - 0x2e, 0xb3, 0x36, 0x60, 0x00, 0x00, 0x00, 0x00, 0x2f, 0x7e, 0x4b, 0x70, - 0x00, 0x00, 0x00, 0x00, 0x30, 0x93, 0x18, 0x60, 0x00, 0x00, 0x00, 0x00, - 0x31, 0x67, 0x67, 0xf0, 0x00, 0x00, 0x00, 0x00, 0x32, 0x72, 0xfa, 0x60, - 0x00, 0x00, 0x00, 0x00, 0x33, 0x47, 0x49, 0xf0, 0x00, 0x00, 0x00, 0x00, - 0x34, 0x52, 0xdc, 0x60, 0x00, 0x00, 0x00, 0x00, 0x35, 0x27, 0x2b, 0xf0, - 0x00, 0x00, 0x00, 0x00, 0x36, 0x32, 0xbe, 0x60, 0x00, 0x00, 0x00, 0x00, - 0x37, 0x07, 0x0d, 0xf0, 0x00, 0x00, 0x00, 0x00, 0x38, 0x1b, 0xda, 0xe0, - 0x00, 0x00, 0x00, 0x00, 0x38, 0xe6, 0xef, 0xf0, 0x00, 0x00, 0x00, 0x00, - 0x39, 0xfb, 0xbc, 0xe0, 0x00, 0x00, 0x00, 0x00, 0x3a, 0xc6, 0xd1, 0xf0, - 0x00, 0x00, 0x00, 0x00, 0x3b, 0xdb, 0x9e, 0xe0, 0x00, 0x00, 0x00, 0x00, - 0x3c, 0xaf, 0xee, 0x70, 0x00, 0x00, 0x00, 0x00, 0x3d, 0xbb, 0x80, 0xe0, - 0x00, 0x00, 0x00, 0x00, 0x3e, 0x8f, 0xd0, 0x70, 0x00, 0x00, 0x00, 0x00, - 0x3f, 0x9b, 0x62, 0xe0, 0x00, 0x00, 0x00, 0x00, 0x40, 0x6f, 0xb2, 0x70, - 0x00, 0x00, 0x00, 0x00, 0x41, 0x84, 0x7f, 0x60, 0x00, 0x00, 0x00, 0x00, - 0x42, 0x4f, 0x94, 0x70, 0x00, 0x00, 0x00, 0x00, 0x43, 0x64, 0x61, 0x60, - 0x00, 0x00, 0x00, 0x00, 0x44, 0x2f, 0x76, 0x70, 0x00, 0x00, 0x00, 0x00, - 0x45, 0x44, 0x43, 0x60, 0x00, 0x00, 0x00, 0x00, 0x45, 0xf3, 0xa8, 0xf0, - 0x00, 0x00, 0x00, 0x00, 0x47, 0x2d, 0x5f, 0xe0, 0x00, 0x00, 0x00, 0x00, - 0x47, 0xd3, 0x8a, 0xf0, 0x00, 0x00, 0x00, 0x00, 0x49, 0x0d, 0x41, 0xe0, - 0x00, 0x00, 0x00, 0x00, 0x49, 0xb3, 0x6c, 0xf0, 0x00, 0x00, 0x00, 0x00, - 0x4a, 0xed, 0x23, 0xe0, 0x00, 0x00, 0x00, 0x00, 0x4b, 0x9c, 0x89, 0x70, - 0x00, 0x00, 0x00, 0x00, 0x4c, 0xd6, 0x40, 0x60, 0x00, 0x00, 0x00, 0x00, - 0x4d, 0x7c, 0x6b, 0x70, 0x00, 0x00, 0x00, 0x00, 0x4e, 0xb6, 0x22, 0x60, - 0x00, 0x00, 0x00, 0x00, 0x4f, 0x5c, 0x4d, 0x70, 0x00, 0x00, 0x00, 0x00, - 0x50, 0x96, 0x04, 0x60, 0x00, 0x00, 0x00, 0x00, 0x51, 0x3c, 0x2f, 0x70, - 0x00, 0x00, 0x00, 0x00, 0x52, 0x75, 0xe6, 0x60, 0x00, 0x00, 0x00, 0x00, - 0x53, 0x1c, 0x11, 0x70, 0x00, 0x00, 0x00, 0x00, 0x54, 0x55, 0xc8, 0x60, - 0x00, 0x00, 0x00, 0x00, 0x54, 0xfb, 0xf3, 0x70, 0x00, 0x00, 0x00, 0x00, - 0x56, 0x35, 0xaa, 0x60, 0x00, 0x00, 0x00, 0x00, 0x56, 0xe5, 0x0f, 0xf0, - 0x00, 0x00, 0x00, 0x00, 0x58, 0x1e, 0xc6, 0xe0, 0x00, 0x00, 0x00, 0x00, - 0x58, 0xc4, 0xf1, 0xf0, 0x00, 0x00, 0x00, 0x00, 0x59, 0xfe, 0xa8, 0xe0, - 0x00, 0x00, 0x00, 0x00, 0x5a, 0xa4, 0xd3, 0xf0, 0x00, 0x00, 0x00, 0x00, - 0x5b, 0xde, 0x8a, 0xe0, 0x00, 0x00, 0x00, 0x00, 0x5c, 0x84, 0xb5, 0xf0, - 0x00, 0x00, 0x00, 0x00, 0x5d, 0xbe, 0x6c, 0xe0, 0x00, 0x00, 0x00, 0x00, - 0x5e, 0x64, 0x97, 0xf0, 0x00, 0x00, 0x00, 0x00, 0x5f, 0x9e, 0x4e, 0xe0, - 0x00, 0x00, 0x00, 0x00, 0x60, 0x4d, 0xb4, 0x70, 0x00, 0x00, 0x00, 0x00, - 0x61, 0x87, 0x6b, 0x60, 0x00, 0x00, 0x00, 0x00, 0x62, 0x2d, 0x96, 0x70, - 0x00, 0x00, 0x00, 0x00, 0x63, 0x67, 0x4d, 0x60, 0x00, 0x00, 0x00, 0x00, - 0x64, 0x0d, 0x78, 0x70, 0x00, 0x00, 0x00, 0x00, 0x65, 0x47, 0x2f, 0x60, - 0x00, 0x00, 0x00, 0x00, 0x65, 0xed, 0x5a, 0x70, 0x00, 0x00, 0x00, 0x00, - 0x67, 0x27, 0x11, 0x60, 0x00, 0x00, 0x00, 0x00, 0x67, 0xcd, 0x3c, 0x70, - 0x00, 0x00, 0x00, 0x00, 0x69, 0x06, 0xf3, 0x60, 0x00, 0x00, 0x00, 0x00, - 0x69, 0xad, 0x1e, 0x70, 0x00, 0x00, 0x00, 0x00, 0x6a, 0xe6, 0xd5, 0x60, - 0x00, 0x00, 0x00, 0x00, 0x6b, 0x96, 0x3a, 0xf0, 0x00, 0x00, 0x00, 0x00, - 0x6c, 0xcf, 0xf1, 0xe0, 0x00, 0x00, 0x00, 0x00, 0x6d, 0x76, 0x1c, 0xf0, - 0x00, 0x00, 0x00, 0x00, 0x6e, 0xaf, 0xd3, 0xe0, 0x00, 0x00, 0x00, 0x00, - 0x6f, 0x55, 0xfe, 0xf0, 0x00, 0x00, 0x00, 0x00, 0x70, 0x8f, 0xb5, 0xe0, - 0x00, 0x00, 0x00, 0x00, 0x71, 0x35, 0xe0, 0xf0, 0x00, 0x00, 0x00, 0x00, - 0x72, 0x6f, 0x97, 0xe0, 0x00, 0x00, 0x00, 0x00, 0x73, 0x15, 0xc2, 0xf0, - 0x00, 0x00, 0x00, 0x00, 0x74, 0x4f, 0x79, 0xe0, 0x00, 0x00, 0x00, 0x00, - 0x74, 0xfe, 0xdf, 0x70, 0x00, 0x00, 0x00, 0x00, 0x76, 0x38, 0x96, 0x60, - 0x00, 0x00, 0x00, 0x00, 0x76, 0xde, 0xc1, 0x70, 0x00, 0x00, 0x00, 0x00, - 0x78, 0x18, 0x78, 0x60, 0x00, 0x00, 0x00, 0x00, 0x78, 0xbe, 0xa3, 0x70, - 0x00, 0x00, 0x00, 0x00, 0x79, 0xf8, 0x5a, 0x60, 0x00, 0x00, 0x00, 0x00, - 0x7a, 0x9e, 0x85, 0x70, 0x00, 0x00, 0x00, 0x00, 0x7b, 0xd8, 0x3c, 0x60, - 0x00, 0x00, 0x00, 0x00, 0x7c, 0x7e, 0x67, 0x70, 0x00, 0x00, 0x00, 0x00, - 0x7d, 0xb8, 0x1e, 0x60, 0x00, 0x00, 0x00, 0x00, 0x7e, 0x5e, 0x49, 0x70, - 0x00, 0x00, 0x00, 0x00, 0x7f, 0x98, 0x00, 0x60, 0x00, 0x02, 0x01, 0x02, - 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, - 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, - 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, - 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x03, 0x04, - 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, - 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, - 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, - 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, - 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, - 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, - 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, - 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, - 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, - 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, - 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, - 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, - 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, - 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, - 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, - 0x02, 0x01, 0x02, 0x01, 0x02, 0xff, 0xff, 0xba, 0x9e, 0x00, 0x00, 0xff, - 0xff, 0xc7, 0xc0, 0x01, 0x04, 0xff, 0xff, 0xb9, 0xb0, 0x00, 0x08, 0xff, - 0xff, 0xc7, 0xc0, 0x01, 0x0c, 0xff, 0xff, 0xc7, 0xc0, 0x01, 0x10, 0x4c, - 0x4d, 0x54, 0x00, 0x45, 0x44, 0x54, 0x00, 0x45, 0x53, 0x54, 0x00, 0x45, - 0x57, 0x54, 0x00, 0x45, 0x50, 0x54, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, - 0x00, 0x00, 0x00, 0x00, 0x01, 0x0a, 0x45, 0x53, 0x54, 0x35, 0x45, 0x44, - 0x54, 0x2c, 0x4d, 0x33, 0x2e, 0x32, 0x2e, 0x30, 0x2c, 0x4d, 0x31, 0x31, - 0x2e, 0x31, 0x2e, 0x30, 0x0a -}; -unsigned int America_New_York_len = 3545; -unsigned char Australia_Sydney[] = { - 0x54, 0x5a, 0x69, 0x66, 0x32, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x05, - 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x8e, - 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x0e, 0x80, 0x00, 0x00, 0x00, - 0x9c, 0x4e, 0xa6, 0x9c, 0x9c, 0xbc, 0x20, 0xf0, 0xcb, 0x54, 0xb3, 0x00, - 0xcb, 0xc7, 0x57, 0x70, 0xcc, 0xb7, 0x56, 0x80, 0xcd, 0xa7, 0x39, 0x70, - 0xce, 0xa0, 0x73, 0x00, 0xcf, 0x87, 0x1b, 0x70, 0x03, 0x70, 0x39, 0x80, - 0x04, 0x0d, 0x1c, 0x00, 0x05, 0x50, 0x1b, 0x80, 0x05, 0xf6, 0x38, 0x80, - 0x07, 0x2f, 0xfd, 0x80, 0x07, 0xd6, 0x1a, 0x80, 0x09, 0x0f, 0xdf, 0x80, - 0x09, 0xb5, 0xfc, 0x80, 0x0a, 0xef, 0xc1, 0x80, 0x0b, 0x9f, 0x19, 0x00, - 0x0c, 0xd8, 0xde, 0x00, 0x0d, 0x7e, 0xfb, 0x00, 0x0e, 0xb8, 0xc0, 0x00, - 0x0f, 0x5e, 0xdd, 0x00, 0x10, 0x98, 0xa2, 0x00, 0x11, 0x3e, 0xbf, 0x00, - 0x12, 0x78, 0x84, 0x00, 0x13, 0x1e, 0xa1, 0x00, 0x14, 0x58, 0x66, 0x00, - 0x14, 0xfe, 0x83, 0x00, 0x16, 0x38, 0x48, 0x00, 0x17, 0x0c, 0x89, 0x80, - 0x18, 0x21, 0x64, 0x80, 0x18, 0xc7, 0x81, 0x80, 0x1a, 0x01, 0x46, 0x80, - 0x1a, 0xa7, 0x63, 0x80, 0x1b, 0xe1, 0x28, 0x80, 0x1c, 0x87, 0x45, 0x80, - 0x1d, 0xc1, 0x0a, 0x80, 0x1e, 0x79, 0x9c, 0x80, 0x1f, 0x97, 0xb2, 0x00, - 0x20, 0x59, 0x7e, 0x80, 0x21, 0x80, 0xce, 0x80, 0x22, 0x42, 0x9b, 0x00, - 0x23, 0x69, 0xeb, 0x00, 0x24, 0x22, 0x7d, 0x00, 0x25, 0x49, 0xcd, 0x00, - 0x25, 0xef, 0xea, 0x00, 0x27, 0x29, 0xaf, 0x00, 0x27, 0xcf, 0xcc, 0x00, - 0x29, 0x09, 0x91, 0x00, 0x29, 0xaf, 0xae, 0x00, 0x2a, 0xe9, 0x73, 0x00, - 0x2b, 0x98, 0xca, 0x80, 0x2c, 0xd2, 0x8f, 0x80, 0x2d, 0x78, 0xac, 0x80, - 0x2e, 0xb2, 0x71, 0x80, 0x2f, 0x58, 0x8e, 0x80, 0x30, 0x92, 0x53, 0x80, - 0x31, 0x5d, 0x5a, 0x80, 0x32, 0x72, 0x35, 0x80, 0x33, 0x3d, 0x3c, 0x80, - 0x34, 0x52, 0x17, 0x80, 0x35, 0x1d, 0x1e, 0x80, 0x36, 0x31, 0xf9, 0x80, - 0x36, 0xfd, 0x00, 0x80, 0x38, 0x1b, 0x16, 0x00, 0x38, 0xdc, 0xe2, 0x80, - 0x39, 0xa7, 0xe9, 0x80, 0x3a, 0xbc, 0xc4, 0x80, 0x3b, 0xda, 0xda, 0x00, - 0x3c, 0xa5, 0xe1, 0x00, 0x3d, 0xba, 0xbc, 0x00, 0x3e, 0x85, 0xc3, 0x00, - 0x3f, 0x9a, 0x9e, 0x00, 0x40, 0x65, 0xa5, 0x00, 0x41, 0x83, 0xba, 0x80, - 0x42, 0x45, 0x87, 0x00, 0x43, 0x63, 0x9c, 0x80, 0x44, 0x2e, 0xa3, 0x80, - 0x45, 0x43, 0x7e, 0x80, 0x46, 0x05, 0x4b, 0x00, 0x47, 0x23, 0x60, 0x80, - 0x47, 0xf7, 0xa2, 0x00, 0x48, 0xe7, 0x93, 0x00, 0x49, 0xd7, 0x84, 0x00, - 0x4a, 0xc7, 0x75, 0x00, 0x4b, 0xb7, 0x66, 0x00, 0x4c, 0xa7, 0x57, 0x00, - 0x4d, 0x97, 0x48, 0x00, 0x4e, 0x87, 0x39, 0x00, 0x4f, 0x77, 0x2a, 0x00, - 0x50, 0x70, 0x55, 0x80, 0x51, 0x60, 0x46, 0x80, 0x52, 0x50, 0x37, 0x80, - 0x53, 0x40, 0x28, 0x80, 0x54, 0x30, 0x19, 0x80, 0x55, 0x20, 0x0a, 0x80, - 0x56, 0x0f, 0xfb, 0x80, 0x56, 0xff, 0xec, 0x80, 0x57, 0xef, 0xdd, 0x80, - 0x58, 0xdf, 0xce, 0x80, 0x59, 0xcf, 0xbf, 0x80, 0x5a, 0xbf, 0xb0, 0x80, - 0x5b, 0xb8, 0xdc, 0x00, 0x5c, 0xa8, 0xcd, 0x00, 0x5d, 0x98, 0xbe, 0x00, - 0x5e, 0x88, 0xaf, 0x00, 0x5f, 0x78, 0xa0, 0x00, 0x60, 0x68, 0x91, 0x00, - 0x61, 0x58, 0x82, 0x00, 0x62, 0x48, 0x73, 0x00, 0x63, 0x38, 0x64, 0x00, - 0x64, 0x28, 0x55, 0x00, 0x65, 0x18, 0x46, 0x00, 0x66, 0x11, 0x71, 0x80, - 0x67, 0x01, 0x62, 0x80, 0x67, 0xf1, 0x53, 0x80, 0x68, 0xe1, 0x44, 0x80, - 0x69, 0xd1, 0x35, 0x80, 0x6a, 0xc1, 0x26, 0x80, 0x6b, 0xb1, 0x17, 0x80, - 0x6c, 0xa1, 0x08, 0x80, 0x6d, 0x90, 0xf9, 0x80, 0x6e, 0x80, 0xea, 0x80, - 0x6f, 0x70, 0xdb, 0x80, 0x70, 0x6a, 0x07, 0x00, 0x71, 0x59, 0xf8, 0x00, - 0x72, 0x49, 0xe9, 0x00, 0x73, 0x39, 0xda, 0x00, 0x74, 0x29, 0xcb, 0x00, - 0x75, 0x19, 0xbc, 0x00, 0x76, 0x09, 0xad, 0x00, 0x76, 0xf9, 0x9e, 0x00, - 0x77, 0xe9, 0x8f, 0x00, 0x78, 0xd9, 0x80, 0x00, 0x79, 0xc9, 0x71, 0x00, - 0x7a, 0xb9, 0x62, 0x00, 0x7b, 0xb2, 0x8d, 0x80, 0x7c, 0xa2, 0x7e, 0x80, - 0x7d, 0x92, 0x6f, 0x80, 0x7e, 0x82, 0x60, 0x80, 0x7f, 0x72, 0x51, 0x80, - 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, 0x03, 0x04, 0x03, - 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, - 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, - 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, - 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, - 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, - 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, - 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, - 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, - 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, - 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, - 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x00, 0x00, - 0x8d, 0xc4, 0x00, 0x00, 0x00, 0x00, 0x9a, 0xb0, 0x01, 0x04, 0x00, 0x00, - 0x8c, 0xa0, 0x00, 0x09, 0x00, 0x00, 0x9a, 0xb0, 0x01, 0x04, 0x00, 0x00, - 0x8c, 0xa0, 0x00, 0x09, 0x4c, 0x4d, 0x54, 0x00, 0x41, 0x45, 0x44, 0x54, - 0x00, 0x41, 0x45, 0x53, 0x54, 0x00, 0x00, 0x00, 0x00, 0x01, 0x01, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x54, 0x5a, 0x69, 0x66, 0x32, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x8f, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x0e, - 0xf8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, - 0x73, 0x16, 0x7f, 0x3c, 0xff, 0xff, 0xff, 0xff, 0x9c, 0x4e, 0xa6, 0x9c, - 0xff, 0xff, 0xff, 0xff, 0x9c, 0xbc, 0x20, 0xf0, 0xff, 0xff, 0xff, 0xff, - 0xcb, 0x54, 0xb3, 0x00, 0xff, 0xff, 0xff, 0xff, 0xcb, 0xc7, 0x57, 0x70, - 0xff, 0xff, 0xff, 0xff, 0xcc, 0xb7, 0x56, 0x80, 0xff, 0xff, 0xff, 0xff, - 0xcd, 0xa7, 0x39, 0x70, 0xff, 0xff, 0xff, 0xff, 0xce, 0xa0, 0x73, 0x00, - 0xff, 0xff, 0xff, 0xff, 0xcf, 0x87, 0x1b, 0x70, 0x00, 0x00, 0x00, 0x00, - 0x03, 0x70, 0x39, 0x80, 0x00, 0x00, 0x00, 0x00, 0x04, 0x0d, 0x1c, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x05, 0x50, 0x1b, 0x80, 0x00, 0x00, 0x00, 0x00, - 0x05, 0xf6, 0x38, 0x80, 0x00, 0x00, 0x00, 0x00, 0x07, 0x2f, 0xfd, 0x80, - 0x00, 0x00, 0x00, 0x00, 0x07, 0xd6, 0x1a, 0x80, 0x00, 0x00, 0x00, 0x00, - 0x09, 0x0f, 0xdf, 0x80, 0x00, 0x00, 0x00, 0x00, 0x09, 0xb5, 0xfc, 0x80, - 0x00, 0x00, 0x00, 0x00, 0x0a, 0xef, 0xc1, 0x80, 0x00, 0x00, 0x00, 0x00, - 0x0b, 0x9f, 0x19, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0c, 0xd8, 0xde, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x0d, 0x7e, 0xfb, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x0e, 0xb8, 0xc0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0f, 0x5e, 0xdd, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x10, 0x98, 0xa2, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x11, 0x3e, 0xbf, 0x00, 0x00, 0x00, 0x00, 0x00, 0x12, 0x78, 0x84, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x13, 0x1e, 0xa1, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x14, 0x58, 0x66, 0x00, 0x00, 0x00, 0x00, 0x00, 0x14, 0xfe, 0x83, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x16, 0x38, 0x48, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x17, 0x0c, 0x89, 0x80, 0x00, 0x00, 0x00, 0x00, 0x18, 0x21, 0x64, 0x80, - 0x00, 0x00, 0x00, 0x00, 0x18, 0xc7, 0x81, 0x80, 0x00, 0x00, 0x00, 0x00, - 0x1a, 0x01, 0x46, 0x80, 0x00, 0x00, 0x00, 0x00, 0x1a, 0xa7, 0x63, 0x80, - 0x00, 0x00, 0x00, 0x00, 0x1b, 0xe1, 0x28, 0x80, 0x00, 0x00, 0x00, 0x00, - 0x1c, 0x87, 0x45, 0x80, 0x00, 0x00, 0x00, 0x00, 0x1d, 0xc1, 0x0a, 0x80, - 0x00, 0x00, 0x00, 0x00, 0x1e, 0x79, 0x9c, 0x80, 0x00, 0x00, 0x00, 0x00, - 0x1f, 0x97, 0xb2, 0x00, 0x00, 0x00, 0x00, 0x00, 0x20, 0x59, 0x7e, 0x80, - 0x00, 0x00, 0x00, 0x00, 0x21, 0x80, 0xce, 0x80, 0x00, 0x00, 0x00, 0x00, - 0x22, 0x42, 0x9b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x23, 0x69, 0xeb, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x24, 0x22, 0x7d, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x25, 0x49, 0xcd, 0x00, 0x00, 0x00, 0x00, 0x00, 0x25, 0xef, 0xea, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x27, 0x29, 0xaf, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x27, 0xcf, 0xcc, 0x00, 0x00, 0x00, 0x00, 0x00, 0x29, 0x09, 0x91, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x29, 0xaf, 0xae, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x2a, 0xe9, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, 0x2b, 0x98, 0xca, 0x80, - 0x00, 0x00, 0x00, 0x00, 0x2c, 0xd2, 0x8f, 0x80, 0x00, 0x00, 0x00, 0x00, - 0x2d, 0x78, 0xac, 0x80, 0x00, 0x00, 0x00, 0x00, 0x2e, 0xb2, 0x71, 0x80, - 0x00, 0x00, 0x00, 0x00, 0x2f, 0x58, 0x8e, 0x80, 0x00, 0x00, 0x00, 0x00, - 0x30, 0x92, 0x53, 0x80, 0x00, 0x00, 0x00, 0x00, 0x31, 0x5d, 0x5a, 0x80, - 0x00, 0x00, 0x00, 0x00, 0x32, 0x72, 0x35, 0x80, 0x00, 0x00, 0x00, 0x00, - 0x33, 0x3d, 0x3c, 0x80, 0x00, 0x00, 0x00, 0x00, 0x34, 0x52, 0x17, 0x80, - 0x00, 0x00, 0x00, 0x00, 0x35, 0x1d, 0x1e, 0x80, 0x00, 0x00, 0x00, 0x00, - 0x36, 0x31, 0xf9, 0x80, 0x00, 0x00, 0x00, 0x00, 0x36, 0xfd, 0x00, 0x80, - 0x00, 0x00, 0x00, 0x00, 0x38, 0x1b, 0x16, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x38, 0xdc, 0xe2, 0x80, 0x00, 0x00, 0x00, 0x00, 0x39, 0xa7, 0xe9, 0x80, - 0x00, 0x00, 0x00, 0x00, 0x3a, 0xbc, 0xc4, 0x80, 0x00, 0x00, 0x00, 0x00, - 0x3b, 0xda, 0xda, 0x00, 0x00, 0x00, 0x00, 0x00, 0x3c, 0xa5, 0xe1, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x3d, 0xba, 0xbc, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x3e, 0x85, 0xc3, 0x00, 0x00, 0x00, 0x00, 0x00, 0x3f, 0x9a, 0x9e, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x40, 0x65, 0xa5, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x41, 0x83, 0xba, 0x80, 0x00, 0x00, 0x00, 0x00, 0x42, 0x45, 0x87, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x43, 0x63, 0x9c, 0x80, 0x00, 0x00, 0x00, 0x00, - 0x44, 0x2e, 0xa3, 0x80, 0x00, 0x00, 0x00, 0x00, 0x45, 0x43, 0x7e, 0x80, - 0x00, 0x00, 0x00, 0x00, 0x46, 0x05, 0x4b, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x47, 0x23, 0x60, 0x80, 0x00, 0x00, 0x00, 0x00, 0x47, 0xf7, 0xa2, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x48, 0xe7, 0x93, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x49, 0xd7, 0x84, 0x00, 0x00, 0x00, 0x00, 0x00, 0x4a, 0xc7, 0x75, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x4b, 0xb7, 0x66, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x4c, 0xa7, 0x57, 0x00, 0x00, 0x00, 0x00, 0x00, 0x4d, 0x97, 0x48, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x4e, 0x87, 0x39, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x4f, 0x77, 0x2a, 0x00, 0x00, 0x00, 0x00, 0x00, 0x50, 0x70, 0x55, 0x80, - 0x00, 0x00, 0x00, 0x00, 0x51, 0x60, 0x46, 0x80, 0x00, 0x00, 0x00, 0x00, - 0x52, 0x50, 0x37, 0x80, 0x00, 0x00, 0x00, 0x00, 0x53, 0x40, 0x28, 0x80, - 0x00, 0x00, 0x00, 0x00, 0x54, 0x30, 0x19, 0x80, 0x00, 0x00, 0x00, 0x00, - 0x55, 0x20, 0x0a, 0x80, 0x00, 0x00, 0x00, 0x00, 0x56, 0x0f, 0xfb, 0x80, - 0x00, 0x00, 0x00, 0x00, 0x56, 0xff, 0xec, 0x80, 0x00, 0x00, 0x00, 0x00, - 0x57, 0xef, 0xdd, 0x80, 0x00, 0x00, 0x00, 0x00, 0x58, 0xdf, 0xce, 0x80, - 0x00, 0x00, 0x00, 0x00, 0x59, 0xcf, 0xbf, 0x80, 0x00, 0x00, 0x00, 0x00, - 0x5a, 0xbf, 0xb0, 0x80, 0x00, 0x00, 0x00, 0x00, 0x5b, 0xb8, 0xdc, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x5c, 0xa8, 0xcd, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x5d, 0x98, 0xbe, 0x00, 0x00, 0x00, 0x00, 0x00, 0x5e, 0x88, 0xaf, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x5f, 0x78, 0xa0, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x60, 0x68, 0x91, 0x00, 0x00, 0x00, 0x00, 0x00, 0x61, 0x58, 0x82, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x62, 0x48, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x63, 0x38, 0x64, 0x00, 0x00, 0x00, 0x00, 0x00, 0x64, 0x28, 0x55, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x65, 0x18, 0x46, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x66, 0x11, 0x71, 0x80, 0x00, 0x00, 0x00, 0x00, 0x67, 0x01, 0x62, 0x80, - 0x00, 0x00, 0x00, 0x00, 0x67, 0xf1, 0x53, 0x80, 0x00, 0x00, 0x00, 0x00, - 0x68, 0xe1, 0x44, 0x80, 0x00, 0x00, 0x00, 0x00, 0x69, 0xd1, 0x35, 0x80, - 0x00, 0x00, 0x00, 0x00, 0x6a, 0xc1, 0x26, 0x80, 0x00, 0x00, 0x00, 0x00, - 0x6b, 0xb1, 0x17, 0x80, 0x00, 0x00, 0x00, 0x00, 0x6c, 0xa1, 0x08, 0x80, - 0x00, 0x00, 0x00, 0x00, 0x6d, 0x90, 0xf9, 0x80, 0x00, 0x00, 0x00, 0x00, - 0x6e, 0x80, 0xea, 0x80, 0x00, 0x00, 0x00, 0x00, 0x6f, 0x70, 0xdb, 0x80, - 0x00, 0x00, 0x00, 0x00, 0x70, 0x6a, 0x07, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x71, 0x59, 0xf8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x72, 0x49, 0xe9, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x73, 0x39, 0xda, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x74, 0x29, 0xcb, 0x00, 0x00, 0x00, 0x00, 0x00, 0x75, 0x19, 0xbc, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x76, 0x09, 0xad, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x76, 0xf9, 0x9e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x77, 0xe9, 0x8f, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x78, 0xd9, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x79, 0xc9, 0x71, 0x00, 0x00, 0x00, 0x00, 0x00, 0x7a, 0xb9, 0x62, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x7b, 0xb2, 0x8d, 0x80, 0x00, 0x00, 0x00, 0x00, - 0x7c, 0xa2, 0x7e, 0x80, 0x00, 0x00, 0x00, 0x00, 0x7d, 0x92, 0x6f, 0x80, - 0x00, 0x00, 0x00, 0x00, 0x7e, 0x82, 0x60, 0x80, 0x00, 0x00, 0x00, 0x00, - 0x7f, 0x72, 0x51, 0x80, 0x00, 0x02, 0x01, 0x02, 0x01, 0x02, 0x01, 0x02, - 0x01, 0x02, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, - 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, - 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, - 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, - 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, - 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, - 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, - 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, - 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, - 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, - 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, 0x03, 0x04, - 0x03, 0x04, 0x03, 0x00, 0x00, 0x8d, 0xc4, 0x00, 0x00, 0x00, 0x00, 0x9a, - 0xb0, 0x01, 0x04, 0x00, 0x00, 0x8c, 0xa0, 0x00, 0x09, 0x00, 0x00, 0x9a, - 0xb0, 0x01, 0x04, 0x00, 0x00, 0x8c, 0xa0, 0x00, 0x09, 0x4c, 0x4d, 0x54, - 0x00, 0x41, 0x45, 0x44, 0x54, 0x00, 0x41, 0x45, 0x53, 0x54, 0x00, 0x00, - 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0a, 0x41, 0x45, - 0x53, 0x54, 0x2d, 0x31, 0x30, 0x41, 0x45, 0x44, 0x54, 0x2c, 0x4d, 0x31, - 0x30, 0x2e, 0x31, 0x2e, 0x30, 0x2c, 0x4d, 0x34, 0x2e, 0x31, 0x2e, 0x30, - 0x2f, 0x33, 0x0a -}; -unsigned int Australia_Sydney_len = 2223; diff --git a/TMessagesProj/jni/voip/webrtc/absl/time/time.cc b/TMessagesProj/jni/voip/webrtc/absl/time/time.cc index 1ec2026e25..7256a699d2 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/time/time.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/time/time.cc @@ -297,7 +297,7 @@ timespec ToTimespec(Time t) { timespec ts; absl::Duration d = time_internal::ToUnixDuration(t); if (!time_internal::IsInfiniteDuration(d)) { - ts.tv_sec = time_internal::GetRepHi(d); + ts.tv_sec = static_cast(time_internal::GetRepHi(d)); if (ts.tv_sec == time_internal::GetRepHi(d)) { // no time_t narrowing ts.tv_nsec = time_internal::GetRepLo(d) / 4; // floor return ts; @@ -316,7 +316,7 @@ timespec ToTimespec(Time t) { timeval ToTimeval(Time t) { timeval tv; timespec ts = absl::ToTimespec(t); - tv.tv_sec = ts.tv_sec; + tv.tv_sec = static_cast(ts.tv_sec); if (tv.tv_sec != ts.tv_sec) { // narrowing if (ts.tv_sec < 0) { tv.tv_sec = std::numeric_limits::min(); diff --git a/TMessagesProj/jni/voip/webrtc/absl/time/time.h b/TMessagesProj/jni/voip/webrtc/absl/time/time.h index 61fa159b9e..11796b4f0c 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/time/time.h +++ b/TMessagesProj/jni/voip/webrtc/absl/time/time.h @@ -162,7 +162,7 @@ class Duration { constexpr Duration() : rep_hi_(0), rep_lo_(0) {} // zero-length duration // Copyable. -#if !defined(__clang__) && defined(_MSC_VER) && _MSC_VER < 1910 +#if !defined(__clang__) && defined(_MSC_VER) && _MSC_VER < 1930 // Explicitly defining the constexpr copy constructor avoids an MSVC bug. constexpr Duration(const Duration& d) : rep_hi_(d.rep_hi_), rep_lo_(d.rep_lo_) {} @@ -495,7 +495,7 @@ ABSL_ATTRIBUTE_PURE_FUNCTION int64_t ToInt64Seconds(Duration d); ABSL_ATTRIBUTE_PURE_FUNCTION int64_t ToInt64Minutes(Duration d); ABSL_ATTRIBUTE_PURE_FUNCTION int64_t ToInt64Hours(Duration d); -// ToDoubleNanoSeconds() +// ToDoubleNanoseconds() // ToDoubleMicroseconds() // ToDoubleMilliseconds() // ToDoubleSeconds() @@ -579,7 +579,7 @@ bool ParseDuration(absl::string_view dur_string, Duration* d); // AbslParseFlag() // -// Parses a command-line flag string representation `text` into a a Duration +// Parses a command-line flag string representation `text` into a Duration // value. Duration flags must be specified in a format that is valid input for // `absl::ParseDuration()`. bool AbslParseFlag(absl::string_view text, Duration* dst, std::string* error); @@ -750,23 +750,24 @@ constexpr Time UnixEpoch() { return Time(); } constexpr Time UniversalEpoch() { // 719162 is the number of days from 0001-01-01 to 1970-01-01, // assuming the Gregorian calendar. - return Time(time_internal::MakeDuration(-24 * 719162 * int64_t{3600}, 0U)); + return Time( + time_internal::MakeDuration(-24 * 719162 * int64_t{3600}, uint32_t{0})); } // InfiniteFuture() // // Returns an `absl::Time` that is infinitely far in the future. constexpr Time InfiniteFuture() { - return Time( - time_internal::MakeDuration((std::numeric_limits::max)(), ~0U)); + return Time(time_internal::MakeDuration((std::numeric_limits::max)(), + ~uint32_t{0})); } // InfinitePast() // // Returns an `absl::Time` that is infinitely far in the past. constexpr Time InfinitePast() { - return Time( - time_internal::MakeDuration((std::numeric_limits::min)(), ~0U)); + return Time(time_internal::MakeDuration((std::numeric_limits::min)(), + ~uint32_t{0})); } // FromUnixNanos() @@ -1422,14 +1423,17 @@ constexpr int64_t GetRepHi(Duration d) { return d.rep_hi_; } constexpr uint32_t GetRepLo(Duration d) { return d.rep_lo_; } // Returns true iff d is positive or negative infinity. -constexpr bool IsInfiniteDuration(Duration d) { return GetRepLo(d) == ~0U; } +constexpr bool IsInfiniteDuration(Duration d) { + return GetRepLo(d) == ~uint32_t{0}; +} // Returns an infinite Duration with the opposite sign. // REQUIRES: IsInfiniteDuration(d) constexpr Duration OppositeInfinity(Duration d) { return GetRepHi(d) < 0 - ? MakeDuration((std::numeric_limits::max)(), ~0U) - : MakeDuration((std::numeric_limits::min)(), ~0U); + ? MakeDuration((std::numeric_limits::max)(), ~uint32_t{0}) + : MakeDuration((std::numeric_limits::min)(), + ~uint32_t{0}); } // Returns (-n)-1 (equivalently -(n+1)) without avoidable overflow. @@ -1568,7 +1572,7 @@ constexpr Duration operator-(Duration d) { constexpr Duration InfiniteDuration() { return time_internal::MakeDuration((std::numeric_limits::max)(), - ~0U); + ~uint32_t{0}); } constexpr Duration FromChrono(const std::chrono::nanoseconds& d) { diff --git a/TMessagesProj/jni/voip/webrtc/absl/time/time_test.cc b/TMessagesProj/jni/voip/webrtc/absl/time/time_test.cc index cde9423feb..d235e9ad0a 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/time/time_test.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/time/time_test.cc @@ -377,6 +377,11 @@ TEST(Time, FloorConversion) { } TEST(Time, RoundtripConversion) { +#if defined(ABSL_SKIP_TIME_TESTS_BROKEN_ON_MSVC_OPT) && \ + ABSL_SKIP_TIME_TESTS_BROKEN_ON_MSVC_OPT + GTEST_SKIP(); +#endif + #define TEST_CONVERSION_ROUND_TRIP(SOURCE, FROM, TO, MATCHER) \ EXPECT_THAT(TO(FROM(SOURCE)), MATCHER(SOURCE)) @@ -558,6 +563,11 @@ TEST(Time, FromChrono) { } TEST(Time, ToChronoTime) { +#if defined(ABSL_SKIP_TIME_TESTS_BROKEN_ON_MSVC_OPT) && \ + ABSL_SKIP_TIME_TESTS_BROKEN_ON_MSVC_OPT + GTEST_SKIP(); +#endif + EXPECT_EQ(std::chrono::system_clock::from_time_t(-1), absl::ToChronoTime(absl::FromTimeT(-1))); EXPECT_EQ(std::chrono::system_clock::from_time_t(0), diff --git a/TMessagesProj/jni/voip/webrtc/absl/types/any_test.cc b/TMessagesProj/jni/voip/webrtc/absl/types/any_test.cc index 70e4ba22b1..d382b927c2 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/types/any_test.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/types/any_test.cc @@ -754,26 +754,23 @@ TEST(AnyTest, FailedCopy) { // Test the guarantees regarding exceptions in emplace. TEST(AnyTest, FailedEmplace) { - { - BadCopyable bad; - absl::any target; - ABSL_ANY_TEST_EXPECT_BAD_COPY(target.emplace(bad)); - } + BadCopyable bad; + absl::any target; + ABSL_ANY_TEST_EXPECT_BAD_COPY(target.emplace(bad)); +} - { - BadCopyable bad; - absl::any target(absl::in_place_type); - ABSL_ANY_TEST_EXPECT_BAD_COPY(target.emplace(bad)); -#if defined(ABSL_USES_STD_ANY) && defined(__GLIBCXX__) - // libstdc++ std::any::emplace() implementation (as of 7.2) has a bug: if an - // exception is thrown, *this contains a value. -#define ABSL_GLIBCXX_ANY_EMPLACE_EXCEPTION_BUG 1 -#endif -#if defined(ABSL_HAVE_EXCEPTIONS) && \ - !defined(ABSL_GLIBCXX_ANY_EMPLACE_EXCEPTION_BUG) - EXPECT_FALSE(target.has_value()); +// GCC and Clang have a bug here. +// Ine some cases, the exception seems to be thrown at the wrong time, and +// target may contain a value. +#ifdef __GNUC__ +TEST(AnyTest, DISABLED_FailedEmplaceInPlace) { +#else +TEST(AnyTest, FailedEmplaceInPlace) { #endif - } + BadCopyable bad; + absl::any target(absl::in_place_type); + ABSL_ANY_TEST_EXPECT_BAD_COPY(target.emplace(bad)); + EXPECT_FALSE(target.has_value()); } } // namespace diff --git a/TMessagesProj/jni/voip/webrtc/absl/types/internal/conformance_profile.h b/TMessagesProj/jni/voip/webrtc/absl/types/internal/conformance_profile.h index cf64ff4fcd..37b017db47 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/types/internal/conformance_profile.h +++ b/TMessagesProj/jni/voip/webrtc/absl/types/internal/conformance_profile.h @@ -719,6 +719,7 @@ struct SyntacticConformanceProfileOf { type##_support); \ ABSL_INTERNAL_CONFORMANCE_TESTING_DATA_MEMBER_DEF_IMPL(bool, is_##type) +#ifdef ABSL_INTERNAL_NEED_REDUNDANT_CONSTEXPR_DECL ABSL_INTERNAL_CONFORMANCE_TESTING_DATA_MEMBER_DEF(default_constructible); ABSL_INTERNAL_CONFORMANCE_TESTING_DATA_MEMBER_DEF(move_constructible); ABSL_INTERNAL_CONFORMANCE_TESTING_DATA_MEMBER_DEF(copy_constructible); @@ -733,6 +734,7 @@ ABSL_INTERNAL_CONFORMANCE_TESTING_DATA_MEMBER_DEF(greater_equal_comparable); ABSL_INTERNAL_CONFORMANCE_TESTING_DATA_MEMBER_DEF(greater_than_comparable); ABSL_INTERNAL_CONFORMANCE_TESTING_DATA_MEMBER_DEF(swappable); ABSL_INTERNAL_CONFORMANCE_TESTING_DATA_MEMBER_DEF(hashable); +#endif #undef ABSL_INTERNAL_CONFORMANCE_TESTING_DATA_MEMBER_DEF #undef ABSL_INTERNAL_CONFORMANCE_TESTING_DATA_MEMBER_DEF_IMPL diff --git a/TMessagesProj/jni/voip/webrtc/absl/types/internal/optional.h b/TMessagesProj/jni/voip/webrtc/absl/types/internal/optional.h index 92932b6001..6ed0c6699c 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/types/internal/optional.h +++ b/TMessagesProj/jni/voip/webrtc/absl/types/internal/optional.h @@ -91,7 +91,15 @@ class optional_data_dtor_base { void destruct() noexcept { if (engaged_) { + // `data_` must be initialized if `engaged_` is true. +#if ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(12, 0) +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wmaybe-uninitialized" +#endif data_.~T(); +#if ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(12, 0) +#pragma GCC diagnostic pop +#endif engaged_ = false; } } diff --git a/TMessagesProj/jni/voip/webrtc/absl/types/internal/span.h b/TMessagesProj/jni/voip/webrtc/absl/types/internal/span.h index 112612f4bd..d653bb2c0c 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/types/internal/span.h +++ b/TMessagesProj/jni/voip/webrtc/absl/types/internal/span.h @@ -28,10 +28,10 @@ namespace absl { ABSL_NAMESPACE_BEGIN -namespace span_internal { -// A constexpr min function -constexpr size_t Min(size_t a, size_t b) noexcept { return a < b ? a : b; } +template +class Span; +namespace span_internal { // Wrappers for access to container data pointers. template constexpr auto GetDataImpl(C& c, char) noexcept // NOLINT(runtime/references) @@ -121,6 +121,36 @@ struct IsConvertible : IsConvertibleHelper::type {}; template using EnableIfConvertibleTo = typename std::enable_if::value>::type; + +// IsView is true for types where the return type of .data() is the same for +// mutable and const instances. This isn't foolproof, but it's only used to +// enable a compiler warning. +template +struct IsView { + static constexpr bool value = false; +}; + +template +struct IsView< + T, absl::void_t()))>, + absl::void_t()))>> { + private: + using Container = std::remove_const_t; + using ConstData = + decltype(span_internal::GetData(std::declval())); + using MutData = decltype(span_internal::GetData(std::declval())); + public: + static constexpr bool value = std::is_same::value; +}; + +// These enablers result in 'int' so they can be used as typenames or defaults +// in template paramters lists. +template +using EnableIfIsView = std::enable_if_t::value, int>; + +template +using EnableIfNotIsView = std::enable_if_t::value, int>; + } // namespace span_internal ABSL_NAMESPACE_END } // namespace absl diff --git a/TMessagesProj/jni/voip/webrtc/absl/types/internal/variant.h b/TMessagesProj/jni/voip/webrtc/absl/types/internal/variant.h index 772008c74e..c82ded44f8 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/types/internal/variant.h +++ b/TMessagesProj/jni/voip/webrtc/absl/types/internal/variant.h @@ -16,8 +16,8 @@ // separate file to avoid cluttering the top of the API header with // implementation details. -#ifndef ABSL_TYPES_variant_internal_H_ -#define ABSL_TYPES_variant_internal_H_ +#ifndef ABSL_TYPES_INTERNAL_VARIANT_H_ +#define ABSL_TYPES_INTERNAL_VARIANT_H_ #include #include @@ -449,7 +449,7 @@ struct FlattenIndices; template struct FlattenIndices { - template + template static constexpr std::size_t Run(std::size_t head, SizeType... tail) { return head + HeadSize * FlattenIndices::Run(tail...); } @@ -498,8 +498,8 @@ struct VisitIndicesVariadicImpl, EndIndices...> { }; template - static VisitIndicesResultT Run( - Op&& op, SizeType... i) { + static VisitIndicesResultT Run(Op&& op, + SizeType... i) { return VisitIndicesSwitch::value>::Run( FlattenedOp{absl::forward(op)}, FlattenIndices<(EndIndices + std::size_t{1})...>::Run( @@ -683,13 +683,13 @@ struct VariantCoreAccess { variant_internal::IndexOfConstructedType; void operator()(SizeT /*old_i*/ - ) const { + ) const { Access(*left) = absl::forward(other); } template void operator()(SizeT /*old_i*/ - ) const { + ) const { using New = typename absl::variant_alternative::type; if (std::is_nothrow_constructible::value || @@ -868,18 +868,6 @@ struct IsNeitherSelfNorInPlace> : std::false_type {}; template struct IsNeitherSelfNorInPlace> : std::false_type {}; -template -struct ConversionIsPossibleImpl : std::false_type {}; - -template -struct ConversionIsPossibleImpl< - Variant, T, - void_t::Run(std::declval(), {}))>> - : std::true_type {}; - -template -struct ConversionIsPossible : ConversionIsPossibleImpl::type {}; - template struct IndexOfConstructedType< Variant, T, @@ -1151,16 +1139,16 @@ struct VariantHelper> { // Type metafunction which returns the element type selected if // OverloadSet::Overload() is well-formed when called with argument type U. template - using BestMatch = decltype( - variant_internal::OverloadSet::Overload(std::declval())); + using BestMatch = decltype(variant_internal::OverloadSet::Overload( + std::declval())); // Type metafunction which returns true if OverloadSet::Overload() is // well-formed when called with argument type U. // CanAccept can't be just an alias because there is a MSVC bug on parameter // pack expansion involving decltype. template - struct CanAccept : - std::integral_constant>::value> {}; + struct CanAccept + : std::integral_constant>::value> {}; // Type metafunction which returns true if Other is an instantiation of // variant, and variants's converting constructor from Other will be @@ -1183,8 +1171,8 @@ struct TrivialMoveOnly { // A union's defaulted copy/move constructor is deleted if any variant member's // copy/move constructor is nontrivial. template -struct IsTriviallyMoveConstructible: - std::is_move_constructible> {}; +struct IsTriviallyMoveConstructible + : std::is_move_constructible> {}; // To guarantee triviality of all special-member functions that can be trivial, // we use a chain of conditional bases for each one. @@ -1419,14 +1407,14 @@ class VariantMoveAssignBaseNontrivial : protected VariantCopyBase { VariantMoveAssignBaseNontrivial& operator=( VariantMoveAssignBaseNontrivial const&) = default; - VariantMoveAssignBaseNontrivial& - operator=(VariantMoveAssignBaseNontrivial&& other) noexcept( - absl::conjunction..., - std::is_nothrow_move_assignable...>::value) { - VisitIndices::Run( - VariantCoreAccess::MakeMoveAssignVisitor(this, &other), other.index_); - return *this; - } + VariantMoveAssignBaseNontrivial& + operator=(VariantMoveAssignBaseNontrivial&& other) noexcept( + absl::conjunction..., + std::is_nothrow_move_assignable...>::value) { + VisitIndices::Run( + VariantCoreAccess::MakeMoveAssignVisitor(this, &other), other.index_); + return *this; + } protected: using Base::index_; @@ -1450,12 +1438,12 @@ class VariantCopyAssignBaseNontrivial : protected VariantMoveAssignBase { VariantCopyAssignBaseNontrivial& operator=( VariantCopyAssignBaseNontrivial&&) = default; - VariantCopyAssignBaseNontrivial& operator=( - const VariantCopyAssignBaseNontrivial& other) { - VisitIndices::Run( - VariantCoreAccess::MakeCopyAssignVisitor(this, other), other.index_); - return *this; - } + VariantCopyAssignBaseNontrivial& operator=( + const VariantCopyAssignBaseNontrivial& other) { + VisitIndices::Run( + VariantCoreAccess::MakeCopyAssignVisitor(this, other), other.index_); + return *this; + } protected: using Base::index_; @@ -1643,4 +1631,4 @@ ABSL_NAMESPACE_END } // namespace absl #endif // !defined(ABSL_USES_STD_VARIANT) -#endif // ABSL_TYPES_variant_internal_H_ +#endif // ABSL_TYPES_INTERNAL_VARIANT_H_ diff --git a/TMessagesProj/jni/voip/webrtc/absl/types/optional.h b/TMessagesProj/jni/voip/webrtc/absl/types/optional.h index 61540cfdb2..134b2aff42 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/types/optional.h +++ b/TMessagesProj/jni/voip/webrtc/absl/types/optional.h @@ -282,15 +282,16 @@ class optional : private optional_internal::optional_data, optional& operator=(optional&& src) = default; // Value assignment operators - template < - typename U = T, - typename = typename std::enable_if, typename std::decay::type>>, - absl::negation< - absl::conjunction, - std::is_same::type>>>, - std::is_constructible, std::is_assignable>::value>::type> + template , typename std::decay::type> >, + absl::negation, + std::is_same::type> > >, + std::is_constructible, + std::is_assignable >::value>::type> optional& operator=(U&& v) { this->assign(std::forward(v)); return *this; @@ -298,13 +299,14 @@ class optional : private optional_internal::optional_data, template < typename U, + int&..., // Workaround an internal compiler error in GCC 5 to 10. typename = typename std::enable_if>, + absl::negation >, std::is_constructible, std::is_assignable, absl::negation< optional_internal:: is_constructible_convertible_assignable_from_optional< - T, U>>>::value>::type> + T, U> > >::value>::type> optional& operator=(const optional& rhs) { if (rhs) { this->assign(*rhs); @@ -315,13 +317,14 @@ class optional : private optional_internal::optional_data, } template >, std::is_constructible, - std::is_assignable, + absl::negation >, + std::is_constructible, std::is_assignable, absl::negation< optional_internal:: is_constructible_convertible_assignable_from_optional< - T, U>>>::value>::type> + T, U> > >::value>::type> optional& operator=(optional&& rhs) { if (rhs) { this->assign(std::move(*rhs)); diff --git a/TMessagesProj/jni/voip/webrtc/absl/types/optional_test.cc b/TMessagesProj/jni/voip/webrtc/absl/types/optional_test.cc index 7ef142cb99..21653a903e 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/types/optional_test.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/types/optional_test.cc @@ -27,6 +27,37 @@ #include "absl/meta/type_traits.h" #include "absl/strings/string_view.h" +#if defined(__cplusplus) && __cplusplus >= 202002L +// In C++20, volatile-qualified return types are deprecated. +#define ABSL_VOLATILE_RETURN_TYPES_DEPRECATED 1 +#endif + +// The following types help test an internal compiler error in GCC5 though +// GCC10. The case OptionalTest.InternalCompilerErrorInGcc5ToGcc10 crashes the +// compiler without a workaround. This test case should remain at the beginning +// of the file as the internal compiler error is sensitive to other constructs +// in this file. +template +using GccIceHelper1 = T; +template +struct GccIceHelper2 {}; +template +class GccIce { + template &, U>> + GccIce& operator=(GccIceHelper2 const&) {} +}; + +TEST(OptionalTest, InternalCompilerErrorInGcc5ToGcc10) { + GccIce instantiate_ice_with_same_type_as_optional; + static_cast(instantiate_ice_with_same_type_as_optional); + absl::optional val1; + absl::optional val2; + val1 = val2; +} + struct Hashable {}; namespace std { @@ -205,6 +236,7 @@ TEST(optionalTest, CopyConstructor) { EXPECT_TRUE(opt42_copy); EXPECT_EQ(42, *opt42_copy); } +#if !defined(ABSL_VOLATILE_RETURN_TYPES_DEPRECATED) { absl::optional empty, opt42 = 42; absl::optional empty_copy(empty); @@ -213,6 +245,7 @@ TEST(optionalTest, CopyConstructor) { EXPECT_TRUE(opt42_copy); EXPECT_EQ(42, *opt42_copy); } +#endif // test copyablility EXPECT_TRUE(std::is_copy_constructible>::value); EXPECT_TRUE(std::is_copy_constructible>::value); @@ -224,18 +257,11 @@ TEST(optionalTest, CopyConstructor) { EXPECT_FALSE( absl::is_trivially_copy_constructible>::value); -#if defined(ABSL_USES_STD_OPTIONAL) && defined(__GLIBCXX__) - // libstdc++ std::optional implementation (as of 7.2) has a bug: when T is - // trivially copyable, optional is not trivially copyable (due to one of - // its base class is unconditionally nontrivial). -#define ABSL_GLIBCXX_OPTIONAL_TRIVIALITY_BUG 1 -#endif -#ifndef ABSL_GLIBCXX_OPTIONAL_TRIVIALITY_BUG EXPECT_TRUE( absl::is_trivially_copy_constructible>::value); EXPECT_TRUE( absl::is_trivially_copy_constructible>::value); -#ifndef _MSC_VER +#if !defined(_MSC_VER) && !defined(ABSL_VOLATILE_RETURN_TYPES_DEPRECATED) // See defect report "Trivial copy/move constructor for class with volatile // member" at // http://www.open-std.org/jtc1/sc22/wg21/docs/cwg_defects.html#2094 @@ -244,8 +270,7 @@ TEST(optionalTest, CopyConstructor) { // Also a cv-qualified scalar type should be trivially copyable. EXPECT_TRUE(absl::is_trivially_copy_constructible< absl::optional>::value); -#endif // _MSC_VER -#endif // ABSL_GLIBCXX_OPTIONAL_TRIVIALITY_BUG +#endif // !defined(_MSC_VER) && !defined(ABSL_VOLATILE_RETURN_TYPES_DEPRECATED) // constexpr copy constructor for trivially copyable types { @@ -275,17 +300,10 @@ TEST(optionalTest, CopyConstructor) { EXPECT_TRUE(absl::is_trivially_copy_constructible< absl::optional>::value); #endif - // When testing with VS 2017 15.3, there seems to be a bug in MSVC - // std::optional when T is volatile-qualified. So skipping this test. - // Bug report: - // https://connect.microsoft.com/VisualStudio/feedback/details/3142534 -#if defined(ABSL_USES_STD_OPTIONAL) && defined(_MSC_VER) && _MSC_VER >= 1911 -#define ABSL_MSVC_OPTIONAL_VOLATILE_COPY_BUG 1 -#endif -#ifndef ABSL_MSVC_OPTIONAL_VOLATILE_COPY_BUG +#if !defined(ABSL_VOLATILE_RETURN_TYPES_DEPRECATED) EXPECT_FALSE(std::is_copy_constructible< absl::optional>::value); -#endif +#endif // !defined(ABSL_VOLATILE_RETURN_TYPES_DEPRECATED) } } @@ -305,11 +323,9 @@ TEST(optionalTest, MoveConstructor) { EXPECT_FALSE(std::is_move_constructible>::value); // test noexcept EXPECT_TRUE(std::is_nothrow_move_constructible>::value); -#ifndef ABSL_USES_STD_OPTIONAL EXPECT_EQ( absl::default_allocator_is_nothrow::value, std::is_nothrow_move_constructible>::value); -#endif EXPECT_TRUE(std::is_nothrow_move_constructible< absl::optional>::value); } @@ -638,8 +654,7 @@ TEST(optionalTest, CopyAssignment) { EXPECT_TRUE(absl::is_copy_assignable::value); EXPECT_FALSE(absl::is_trivially_copy_assignable::value); - // std::optional doesn't support volatile nontrivial types. -#ifndef ABSL_USES_STD_OPTIONAL +#if !defined(ABSL_VOLATILE_RETURN_TYPES_DEPRECATED) { StructorListener listener; Listenable::listener = &listener; @@ -658,7 +673,7 @@ TEST(optionalTest, CopyAssignment) { EXPECT_EQ(1, listener.destruct); EXPECT_EQ(1, listener.volatile_copy_assign); } -#endif // ABSL_USES_STD_OPTIONAL +#endif // !defined(ABSL_VOLATILE_RETURN_TYPES_DEPRECATED) } TEST(optionalTest, MoveAssignment) { @@ -681,8 +696,7 @@ TEST(optionalTest, MoveAssignment) { EXPECT_EQ(1, listener.destruct); EXPECT_EQ(1, listener.move_assign); } - // std::optional doesn't support volatile nontrivial types. -#ifndef ABSL_USES_STD_OPTIONAL +#if !defined(ABSL_VOLATILE_RETURN_TYPES_DEPRECATED) { StructorListener listener; Listenable::listener = &listener; @@ -702,7 +716,7 @@ TEST(optionalTest, MoveAssignment) { EXPECT_EQ(1, listener.destruct); EXPECT_EQ(1, listener.volatile_move_assign); } -#endif // ABSL_USES_STD_OPTIONAL +#endif // !defined(ABSL_VOLATILE_RETURN_TYPES_DEPRECATED) EXPECT_FALSE(absl::is_move_assignable>::value); EXPECT_TRUE(absl::is_move_assignable>::value); EXPECT_TRUE(absl::is_move_assignable>::value); @@ -974,8 +988,8 @@ TEST(optionalTest, PointerStuff) { EXPECT_EQ("foo", *opt); const auto& opt_const = opt; EXPECT_EQ("foo", *opt_const); - EXPECT_EQ(opt->size(), 3); - EXPECT_EQ(opt_const->size(), 3); + EXPECT_EQ(opt->size(), 3u); + EXPECT_EQ(opt_const->size(), 3u); constexpr absl::optional opt1(1); static_assert((*opt1).x == ConstexprType::kCtorInt, ""); @@ -1038,6 +1052,7 @@ TEST(optionalTest, Value) { #endif EXPECT_EQ("c&&", TypeQuals(OC(absl::in_place, "xvalue_c").value())); +#if !defined(ABSL_VOLATILE_RETURN_TYPES_DEPRECATED) // test on volatile type using OV = absl::optional; OV lvalue_v(absl::in_place, 42); @@ -1045,6 +1060,7 @@ TEST(optionalTest, Value) { EXPECT_EQ(42, OV(42).value()); EXPECT_TRUE((std::is_same::value)); EXPECT_TRUE((std::is_same::value)); +#endif // !defined(ABSL_VOLATILE_RETURN_TYPES_DEPRECATED) // test exception throw on value() absl::optional empty; @@ -1087,6 +1103,7 @@ TEST(optionalTest, DerefOperator) { #endif EXPECT_EQ("c&&", TypeQuals(*OC(absl::in_place, "xvalue_c"))); +#if !defined(ABSL_VOLATILE_RETURN_TYPES_DEPRECATED) // test on volatile type using OV = absl::optional; OV lvalue_v(absl::in_place, 42); @@ -1094,6 +1111,7 @@ TEST(optionalTest, DerefOperator) { EXPECT_EQ(42, *OV(42)); EXPECT_TRUE((std::is_same::value)); EXPECT_TRUE((std::is_same::value)); +#endif // !defined(ABSL_VOLATILE_RETURN_TYPES_DEPRECATED) constexpr absl::optional opt1(1); static_assert(*opt1 == 1, ""); @@ -1505,7 +1523,7 @@ TEST(optionalTest, Hash) { for (int i = 0; i < 100; ++i) { hashcodes.insert(hash(i)); } - EXPECT_GT(hashcodes.size(), 90); + EXPECT_GT(hashcodes.size(), 90u); static_assert(is_hash_enabled_for>::value, ""); static_assert(is_hash_enabled_for>::value, ""); @@ -1558,12 +1576,10 @@ TEST(optionalTest, NoExcept) { static_assert( std::is_nothrow_move_constructible>::value, ""); -#ifndef ABSL_USES_STD_OPTIONAL static_assert(absl::default_allocator_is_nothrow::value == std::is_nothrow_move_constructible< absl::optional>::value, ""); -#endif std::vector> v; for (int i = 0; i < 10; ++i) v.emplace_back(); } diff --git a/TMessagesProj/jni/voip/webrtc/absl/types/span.h b/TMessagesProj/jni/voip/webrtc/absl/types/span.h index fdfbd77c07..d7bdbb1fb5 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/types/span.h +++ b/TMessagesProj/jni/voip/webrtc/absl/types/span.h @@ -60,6 +60,7 @@ #include #include +#include "absl/base/attributes.h" #include "absl/base/internal/throw_delegate.h" #include "absl/base/macros.h" #include "absl/base/optimization.h" @@ -160,12 +161,12 @@ class Span { // Used to SFINAE-enable a function when the slice elements are const. template - using EnableIfConstView = + using EnableIfValueIsConst = typename std::enable_if::value, U>::type; // Used to SFINAE-enable a function when the slice elements are mutable. template - using EnableIfMutableView = + using EnableIfValueIsMutable = typename std::enable_if::value, U>::type; public: @@ -196,13 +197,34 @@ class Span { // Explicit reference constructor for a mutable `Span` type. Can be // replaced with MakeSpan() to infer the type parameter. template , - typename = EnableIfMutableView> - explicit Span(V& v) noexcept // NOLINT(runtime/references) + typename = EnableIfValueIsMutable, + typename = span_internal::EnableIfNotIsView> + explicit Span( + V& v + ABSL_ATTRIBUTE_LIFETIME_BOUND) noexcept // NOLINT(runtime/references) : Span(span_internal::GetData(v), v.size()) {} // Implicit reference constructor for a read-only `Span` type template , - typename = EnableIfConstView> + typename = EnableIfValueIsConst, + typename = span_internal::EnableIfNotIsView> + constexpr Span( + const V& v + ABSL_ATTRIBUTE_LIFETIME_BOUND) noexcept // NOLINT(runtime/explicit) + : Span(span_internal::GetData(v), v.size()) {} + + // Overloads of the above two functions that are only enabled for view types. + // This is so we can drop the ABSL_ATTRIBUTE_LIFETIME_BOUND annotation. These + // overloads must be made unique by using a different template parameter list + // (hence the = 0 for the IsView enabler). + template , + typename = EnableIfValueIsMutable, + span_internal::EnableIfIsView = 0> + explicit Span(V& v) noexcept // NOLINT(runtime/references) + : Span(span_internal::GetData(v), v.size()) {} + template , + typename = EnableIfValueIsConst, + span_internal::EnableIfIsView = 0> constexpr Span(const V& v) noexcept // NOLINT(runtime/explicit) : Span(span_internal::GetData(v), v.size()) {} @@ -242,7 +264,7 @@ class Span { // Process(ints); // template > + typename = EnableIfValueIsConst> Span(std::initializer_list v ABSL_ATTRIBUTE_LIFETIME_BOUND) noexcept // NOLINT(runtime/explicit) : Span(v.begin(), v.size()) {} @@ -398,7 +420,7 @@ class Span { // absl::MakeSpan(vec).subspan(5); // throws std::out_of_range constexpr Span subspan(size_type pos = 0, size_type len = npos) const { return (pos <= size()) - ? Span(data() + pos, span_internal::Min(size() - pos, len)) + ? Span(data() + pos, (std::min)(size() - pos, len)) : (base_internal::ThrowStdOutOfRange("pos > size()"), Span()); } diff --git a/TMessagesProj/jni/voip/webrtc/absl/types/variant_test.cc b/TMessagesProj/jni/voip/webrtc/absl/types/variant_test.cc index cf237334da..4cd5b7a358 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/types/variant_test.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/types/variant_test.cc @@ -281,7 +281,7 @@ TEST(VariantTest, TestDefaultConstructor) { using X = variant; constexpr variant x{}; ASSERT_FALSE(x.valueless_by_exception()); - ASSERT_EQ(0, x.index()); + ASSERT_EQ(0u, x.index()); EXPECT_EQ(0, absl::get<0>(x)); EXPECT_TRUE(std::is_nothrow_default_constructible::value); } @@ -290,7 +290,7 @@ TEST(VariantTest, TestDefaultConstructor) { using X = variant; X x{}; ASSERT_FALSE(x.valueless_by_exception()); - ASSERT_EQ(0, x.index()); + ASSERT_EQ(0u, x.index()); EXPECT_EQ(5, absl::get<0>(x).value); EXPECT_FALSE(std::is_nothrow_default_constructible::value); } @@ -299,7 +299,7 @@ TEST(VariantTest, TestDefaultConstructor) { using X = variant; X x{}; ASSERT_FALSE(x.valueless_by_exception()); - ASSERT_EQ(0, x.index()); + ASSERT_EQ(0u, x.index()); EXPECT_EQ(0, absl::get<0>(x)); EXPECT_TRUE(std::is_nothrow_default_constructible::value); } @@ -308,7 +308,7 @@ TEST(VariantTest, TestDefaultConstructor) { using X = variant; X x{}; ASSERT_FALSE(x.valueless_by_exception()); - ASSERT_EQ(0, x.index()); + ASSERT_EQ(0u, x.index()); EXPECT_EQ(5, absl::get<0>(x).value); EXPECT_FALSE(std::is_nothrow_default_constructible::value); } @@ -480,7 +480,7 @@ TEST(VariantTest, InPlaceType) { ASSERT_TRUE(absl::holds_alternative(v2)); EXPECT_EQ("ABC", absl::get(v2)); - Var v3(in_place_type_t(), "ABC", 2); + Var v3(in_place_type_t(), "ABC", 2u); ASSERT_TRUE(absl::holds_alternative(v3)); EXPECT_EQ("AB", absl::get(v3)); @@ -503,7 +503,7 @@ TEST(VariantTest, InPlaceTypeVariableTemplate) { ASSERT_TRUE(absl::holds_alternative(v2)); EXPECT_EQ("ABC", absl::get(v2)); - Var v3(in_place_type, "ABC", 2); + Var v3(in_place_type, "ABC", 2u); ASSERT_TRUE(absl::holds_alternative(v3)); EXPECT_EQ("AB", absl::get(v3)); @@ -544,7 +544,7 @@ TEST(VariantTest, InPlaceIndex) { ASSERT_TRUE(absl::holds_alternative(v2)); EXPECT_EQ("ABC", absl::get(v2)); - Var v3(in_place_index_t<1>(), "ABC", 2); + Var v3(in_place_index_t<1>(), "ABC", 2u); ASSERT_TRUE(absl::holds_alternative(v3)); EXPECT_EQ("AB", absl::get(v3)); @@ -571,7 +571,7 @@ TEST(VariantTest, InPlaceIndexVariableTemplate) { ASSERT_TRUE(absl::holds_alternative(v2)); EXPECT_EQ("ABC", absl::get(v2)); - Var v3(in_place_index<1>, "ABC", 2); + Var v3(in_place_index<1>, "ABC", 2u); ASSERT_TRUE(absl::holds_alternative(v3)); EXPECT_EQ("AB", absl::get(v3)); @@ -688,11 +688,11 @@ TEST(VariantTest, TestSelfAssignment) { EXPECT_EQ(long_str, foo); variant so = long_str; - ASSERT_EQ(1, so.index()); + ASSERT_EQ(1u, so.index()); EXPECT_EQ(long_str, absl::get<1>(so)); so = *&so; - ASSERT_EQ(1, so.index()); + ASSERT_EQ(1u, so.index()); EXPECT_EQ(long_str, absl::get<1>(so)); } @@ -968,16 +968,16 @@ TEST(VariantTest, Index) { using Var = variant; Var v = 1; - EXPECT_EQ(0, v.index()); + EXPECT_EQ(0u, v.index()); v = "str"; - EXPECT_EQ(1, v.index()); + EXPECT_EQ(1u, v.index()); v = 0.; - EXPECT_EQ(2, v.index()); + EXPECT_EQ(2u, v.index()); Var v2 = v; - EXPECT_EQ(2, v2.index()); + EXPECT_EQ(2u, v2.index()); v2.emplace(3); - EXPECT_EQ(0, v2.index()); + EXPECT_EQ(0u, v2.index()); } TEST(VariantTest, NotValuelessByException) { @@ -1002,11 +1002,11 @@ TEST(VariantTest, IndexValuelessByException) { using Var = variant; Var v(absl::in_place_index<0>); - EXPECT_EQ(0, v.index()); + EXPECT_EQ(0u, v.index()); ToValuelessByException(v); EXPECT_EQ(absl::variant_npos, v.index()); v = "str"; - EXPECT_EQ(1, v.index()); + EXPECT_EQ(1u, v.index()); } TEST(VariantTest, ValuelessByException) { @@ -1084,18 +1084,18 @@ TEST(VariantTest, MemberSwap) { TEST(VariantTest, VariantSize) { { using Size1Variant = absl::variant; - EXPECT_EQ(1, absl::variant_size::value); - EXPECT_EQ(1, absl::variant_size::value); - EXPECT_EQ(1, absl::variant_size::value); - EXPECT_EQ(1, absl::variant_size::value); + EXPECT_EQ(1u, absl::variant_size::value); + EXPECT_EQ(1u, absl::variant_size::value); + EXPECT_EQ(1u, absl::variant_size::value); + EXPECT_EQ(1u, absl::variant_size::value); } { using Size3Variant = absl::variant; - EXPECT_EQ(3, absl::variant_size::value); - EXPECT_EQ(3, absl::variant_size::value); - EXPECT_EQ(3, absl::variant_size::value); - EXPECT_EQ(3, absl::variant_size::value); + EXPECT_EQ(3u, absl::variant_size::value); + EXPECT_EQ(3u, absl::variant_size::value); + EXPECT_EQ(3u, absl::variant_size::value); + EXPECT_EQ(3u, absl::variant_size::value); } } @@ -1799,14 +1799,14 @@ TEST(VariantTest, VisitSimple) { EXPECT_EQ("B", piece); struct StrLen { - int operator()(const char* s) const { return strlen(s); } - int operator()(const std::string& s) const { return s.size(); } + size_t operator()(const char* s) const { return strlen(s); } + size_t operator()(const std::string& s) const { return s.size(); } }; v = "SomeStr"; - EXPECT_EQ(7, absl::visit(StrLen{}, v)); + EXPECT_EQ(7u, absl::visit(StrLen{}, v)); v = std::string("VeryLargeThisTime"); - EXPECT_EQ(17, absl::visit(StrLen{}, v)); + EXPECT_EQ(17u, absl::visit(StrLen{}, v)); } TEST(VariantTest, VisitRValue) { @@ -1979,7 +1979,7 @@ TEST(VariantTest, MonostateBasic) { TEST(VariantTest, VariantMonostateDefaultConstruction) { absl::variant var; - EXPECT_EQ(var.index(), 0); + EXPECT_EQ(var.index(), 0u); } //////////////////////////////// @@ -2100,7 +2100,7 @@ TEST(VariantTest, Hash) { for (int i = 0; i < 100; ++i) { hashcodes.insert(hash(i)); } - EXPECT_GT(hashcodes.size(), 90); + EXPECT_GT(hashcodes.size(), 90u); // test const-qualified static_assert(type_traits_internal::IsHashable>::value, @@ -2312,9 +2312,9 @@ TEST(VariantTest, TestRvalueConversion) { EXPECT_EQ(42, absl::get(variant2)); variant2 = - ConvertVariantTo>(variant(42)); + ConvertVariantTo>(variant(42u)); ASSERT_TRUE(absl::holds_alternative(variant2)); - EXPECT_EQ(42, absl::get(variant2)); + EXPECT_EQ(42u, absl::get(variant2)); #endif // !ABSL_USES_STD_VARIANT variant variant3( @@ -2361,10 +2361,10 @@ TEST(VariantTest, TestLvalueConversion) { ASSERT_TRUE(absl::holds_alternative(variant2)); EXPECT_EQ(42, absl::get(variant2)); - variant source6(42); + variant source6(42u); variant2 = ConvertVariantTo>(source6); ASSERT_TRUE(absl::holds_alternative(variant2)); - EXPECT_EQ(42, absl::get(variant2)); + EXPECT_EQ(42u, absl::get(variant2)); #endif variant source7((Convertible1())); @@ -2455,8 +2455,8 @@ TEST(VariantTest, TestRvalueConversionViaConvertVariantTo) { EXPECT_THAT(absl::get_if(&variant2), Pointee(42)); variant2 = - ConvertVariantTo>(variant(42)); - EXPECT_THAT(absl::get_if(&variant2), Pointee(42)); + ConvertVariantTo>(variant(42u)); + EXPECT_THAT(absl::get_if(&variant2), Pointee(42u)); #endif variant variant3( @@ -2499,9 +2499,9 @@ TEST(VariantTest, TestLvalueConversionViaConvertVariantTo) { ConvertVariantTo>(source5)); EXPECT_THAT(absl::get_if(&variant2), Pointee(42)); - variant source6(42); + variant source6(42u); variant2 = ConvertVariantTo>(source6); - EXPECT_THAT(absl::get_if(&variant2), Pointee(42)); + EXPECT_THAT(absl::get_if(&variant2), Pointee(42u)); #endif // !ABSL_USES_STD_VARIANT variant source7((Convertible1())); @@ -2570,7 +2570,7 @@ TEST(VariantTest, TestVectorOfMoveonlyVariant) { vec.reserve(3); auto another_vec = absl::move(vec); // As a sanity check, verify vector contents. - ASSERT_EQ(2, another_vec.size()); + ASSERT_EQ(2u, another_vec.size()); EXPECT_EQ(42, *absl::get>(another_vec[0])); EXPECT_EQ("Hello", absl::get(another_vec[1])); } diff --git a/TMessagesProj/jni/voip/webrtc/absl/utility/utility_test.cc b/TMessagesProj/jni/voip/webrtc/absl/utility/utility_test.cc index f044ad644a..2f0509aa4d 100644 --- a/TMessagesProj/jni/voip/webrtc/absl/utility/utility_test.cc +++ b/TMessagesProj/jni/voip/webrtc/absl/utility/utility_test.cc @@ -1,4 +1,4 @@ -// Copyright 2017 The Abseil Authors. +// Copyright 2022 The Abseil Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -14,10 +14,12 @@ #include "absl/utility/utility.h" +#include #include #include #include #include +#include #include #include "gmock/gmock.h" @@ -35,10 +37,10 @@ namespace { // Both the unused variables and the name length warnings are due to calls // to absl::make_index_sequence with very large values, creating very long type // names. The resulting warnings are so long they make build output unreadable. -#pragma warning( push ) -#pragma warning( disable : 4503 ) // decorated name length exceeded -#pragma warning( disable : 4101 ) // unreferenced local variable -#endif // _MSC_VER +#pragma warning(push) +#pragma warning(disable : 4503) // decorated name length exceeded +#pragma warning(disable : 4101) // unreferenced local variable +#endif // _MSC_VER using ::testing::ElementsAre; using ::testing::Pointee; @@ -227,8 +229,7 @@ TEST(ApplyTest, NonCopyableArgument) { } TEST(ApplyTest, NonCopyableResult) { - EXPECT_THAT(absl::apply(Factory, std::make_tuple(42)), - ::testing::Pointee(42)); + EXPECT_THAT(absl::apply(Factory, std::make_tuple(42)), Pointee(42)); } TEST(ApplyTest, VoidResult) { absl::apply(NoOp, std::tuple<>()); } @@ -373,4 +374,3 @@ TEST(MakeFromTupleTest, Pair) { } } // namespace - diff --git a/TMessagesProj/jni/voip/webrtc/api/OWNERS b/TMessagesProj/jni/voip/webrtc/api/OWNERS index 516ae17f5e..383ac8a3ed 100644 --- a/TMessagesProj/jni/voip/webrtc/api/OWNERS +++ b/TMessagesProj/jni/voip/webrtc/api/OWNERS @@ -1,11 +1,12 @@ -crodbro@webrtc.org -deadbeef@webrtc.org hta@webrtc.org magjed@webrtc.org perkj@webrtc.org -tkchin@webrtc.org tommi@webrtc.org +# For approvals that absolutely must be done on US Pacific time +deadbeef@webrtc.org +tkchin@webrtc.org + per-file peer_connection*=hbos@webrtc.org per-file DEPS=mbonadei@webrtc.org diff --git a/TMessagesProj/jni/voip/webrtc/api/async_dns_resolver.h b/TMessagesProj/jni/voip/webrtc/api/async_dns_resolver.h index 138503b59f..82d80de2c3 100644 --- a/TMessagesProj/jni/voip/webrtc/api/async_dns_resolver.h +++ b/TMessagesProj/jni/voip/webrtc/api/async_dns_resolver.h @@ -14,6 +14,7 @@ #include #include +#include "rtc_base/checks.h" #include "rtc_base/socket_address.h" #include "rtc_base/system/rtc_export.h" @@ -63,6 +64,10 @@ class RTC_EXPORT AsyncDnsResolverInterface { // Start address resolution of the hostname in `addr`. virtual void Start(const rtc::SocketAddress& addr, std::function callback) = 0; + // Start address resolution of the hostname in `addr` matching `family`. + virtual void Start(const rtc::SocketAddress& addr, + int family, + std::function callback) = 0; virtual const AsyncDnsResolverResult& result() const = 0; }; @@ -79,6 +84,14 @@ class AsyncDnsResolverFactoryInterface { virtual std::unique_ptr CreateAndResolve( const rtc::SocketAddress& addr, std::function callback) = 0; + // Creates an AsyncDnsResolver and starts resolving the name to an address + // matching the specified family. The callback will be called when resolution + // is finished. The callback will be called on the sequence that the caller + // runs on. + virtual std::unique_ptr CreateAndResolve( + const rtc::SocketAddress& addr, + int family, + std::function callback) = 0; // Creates an AsyncDnsResolver and does not start it. // For backwards compatibility, will be deprecated and removed. // One has to do a separate Start() call on the diff --git a/TMessagesProj/jni/voip/webrtc/api/audio/audio_frame.h b/TMessagesProj/jni/voip/webrtc/api/audio/audio_frame.h index 20b9d994b4..0f3ca80dd1 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio/audio_frame.h +++ b/TMessagesProj/jni/voip/webrtc/api/audio/audio_frame.h @@ -18,7 +18,6 @@ #include "api/audio/channel_layout.h" #include "api/rtp_packet_infos.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -60,6 +59,9 @@ class AudioFrame { AudioFrame(); + AudioFrame(const AudioFrame&) = delete; + AudioFrame& operator=(const AudioFrame&) = delete; + // Resets all members to their default state. void Reset(); // Same as Reset(), but leaves mute state unchanged. Muting a frame requires @@ -166,8 +168,6 @@ class AudioFrame { // capture timestamp of a received frame is found in `packet_infos_`. // This timestamp MUST be based on the same clock as rtc::TimeMillis(). absl::optional absolute_capture_timestamp_ms_; - - RTC_DISALLOW_COPY_AND_ASSIGN(AudioFrame); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/audio/echo_canceller3_config.h b/TMessagesProj/jni/voip/webrtc/api/audio/echo_canceller3_config.h index c2ee797727..4b1c7fbc47 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio/echo_canceller3_config.h +++ b/TMessagesProj/jni/voip/webrtc/api/audio/echo_canceller3_config.h @@ -59,6 +59,7 @@ struct RTC_EXPORT EchoCanceller3Config { }; AlignmentMixing render_alignment_mixing = {false, true, 10000.f, true}; AlignmentMixing capture_alignment_mixing = {false, true, 10000.f, false}; + bool detect_pre_echo = true; } delay; struct Filter { @@ -112,7 +113,7 @@ struct RTC_EXPORT EchoCanceller3Config { bool echo_can_saturate = true; bool bounded_erl = false; bool erle_onset_compensation_in_dominant_nearend = false; - bool use_conservative_tail_frequency_response = false; + bool use_conservative_tail_frequency_response = true; } ep_strength; struct EchoAudibility { @@ -236,6 +237,13 @@ struct RTC_EXPORT EchoCanceller3Config { float floor_first_increase = 0.00001f; bool conservative_hf_suppression = false; } suppressor; + + struct MultiChannel { + bool detect_stereo_content = true; + float stereo_detection_threshold = 0.0f; + int stereo_detection_timeout_threshold_seconds = 300; + float stereo_detection_hysteresis_seconds = 2.0f; + } multi_channel; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/audio/echo_canceller3_config_json.cc b/TMessagesProj/jni/voip/webrtc/api/audio/echo_canceller3_config_json.cc index 71966c13b3..96e45ffe6d 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio/echo_canceller3_config_json.cc +++ b/TMessagesProj/jni/voip/webrtc/api/audio/echo_canceller3_config_json.cc @@ -220,6 +220,7 @@ void Aec3ConfigFromJsonString(absl::string_view json_string, &cfg.delay.render_alignment_mixing); ReadParam(section, "capture_alignment_mixing", &cfg.delay.capture_alignment_mixing); + ReadParam(section, "detect_pre_echo", &cfg.delay.detect_pre_echo); } if (rtc::GetValueFromJsonObject(aec3_root, "filter", §ion)) { @@ -415,6 +416,17 @@ void Aec3ConfigFromJsonString(absl::string_view json_string, ReadParam(section, "conservative_hf_suppression", &cfg.suppressor.conservative_hf_suppression); } + + if (rtc::GetValueFromJsonObject(aec3_root, "multi_channel", §ion)) { + ReadParam(section, "detect_stereo_content", + &cfg.multi_channel.detect_stereo_content); + ReadParam(section, "stereo_detection_threshold", + &cfg.multi_channel.stereo_detection_threshold); + ReadParam(section, "stereo_detection_timeout_threshold_seconds", + &cfg.multi_channel.stereo_detection_timeout_threshold_seconds); + ReadParam(section, "stereo_detection_hysteresis_seconds", + &cfg.multi_channel.stereo_detection_hysteresis_seconds); + } } EchoCanceller3Config Aec3ConfigFromJsonString(absl::string_view json_string) { @@ -494,7 +506,9 @@ std::string Aec3ConfigToJsonString(const EchoCanceller3Config& config) { << (config.delay.capture_alignment_mixing.prefer_first_two_channels ? "true" : "false"); - ost << "}"; + ost << "},"; + ost << "\"detect_pre_echo\": " + << (config.delay.detect_pre_echo ? "true" : "false"); ost << "},"; ost << "\"filter\": {"; @@ -574,7 +588,8 @@ std::string Aec3ConfigToJsonString(const EchoCanceller3Config& config) { ost << "\"erle_onset_compensation_in_dominant_nearend\": " << (config.ep_strength.erle_onset_compensation_in_dominant_nearend ? "true" - : "false") << ","; + : "false") + << ","; ost << "\"use_conservative_tail_frequency_response\": " << (config.ep_strength.use_conservative_tail_frequency_response ? "true" @@ -736,7 +751,19 @@ std::string Aec3ConfigToJsonString(const EchoCanceller3Config& config) { << ","; ost << "\"conservative_hf_suppression\": " << config.suppressor.conservative_hf_suppression; + ost << "},"; + + ost << "\"multi_channel\": {"; + ost << "\"detect_stereo_content\": " + << (config.multi_channel.detect_stereo_content ? "true" : "false") << ","; + ost << "\"stereo_detection_threshold\": " + << config.multi_channel.stereo_detection_threshold << ","; + ost << "\"stereo_detection_timeout_threshold_seconds\": " + << config.multi_channel.stereo_detection_timeout_threshold_seconds << ","; + ost << "\"stereo_detection_hysteresis_seconds\": " + << config.multi_channel.stereo_detection_hysteresis_seconds; ost << "}"; + ost << "}"; ost << "}"; diff --git a/TMessagesProj/jni/voip/webrtc/api/audio/echo_canceller3_factory.cc b/TMessagesProj/jni/voip/webrtc/api/audio/echo_canceller3_factory.cc index d65a7262fa..284b117bea 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio/echo_canceller3_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/api/audio/echo_canceller3_factory.cc @@ -25,7 +25,8 @@ std::unique_ptr EchoCanceller3Factory::Create( int num_render_channels, int num_capture_channels) { return std::make_unique( - config_, sample_rate_hz, num_render_channels, num_capture_channels); + config_, /*multichannel_config=*/absl::nullopt, sample_rate_hz, + num_render_channels, num_capture_channels); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/audio/echo_detector_creator.cc b/TMessagesProj/jni/voip/webrtc/api/audio/echo_detector_creator.cc index 04215b0deb..15b7c51dca 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio/echo_detector_creator.cc +++ b/TMessagesProj/jni/voip/webrtc/api/audio/echo_detector_creator.cc @@ -9,8 +9,8 @@ */ #include "api/audio/echo_detector_creator.h" +#include "api/make_ref_counted.h" #include "modules/audio_processing/residual_echo_detector.h" -#include "rtc_base/ref_counted_object.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/L16/audio_decoder_L16.cc b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/L16/audio_decoder_L16.cc index 93863f1020..a03abe26f7 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/L16/audio_decoder_L16.cc +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/L16/audio_decoder_L16.cc @@ -37,7 +37,8 @@ void AudioDecoderL16::AppendSupportedDecoders( std::unique_ptr AudioDecoderL16::MakeAudioDecoder( const Config& config, - absl::optional /*codec_pair_id*/) { + absl::optional /*codec_pair_id*/, + const FieldTrialsView* field_trials) { if (!config.IsOk()) { return nullptr; } diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/L16/audio_decoder_L16.h b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/L16/audio_decoder_L16.h index 581a5b82c1..5a01b7dc01 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/L16/audio_decoder_L16.h +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/L16/audio_decoder_L16.h @@ -18,6 +18,7 @@ #include "api/audio_codecs/audio_codec_pair_id.h" #include "api/audio_codecs/audio_decoder.h" #include "api/audio_codecs/audio_format.h" +#include "api/field_trials_view.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -39,7 +40,8 @@ struct RTC_EXPORT AudioDecoderL16 { static void AppendSupportedDecoders(std::vector* specs); static std::unique_ptr MakeAudioDecoder( const Config& config, - absl::optional codec_pair_id = absl::nullopt); + absl::optional codec_pair_id = absl::nullopt, + const FieldTrialsView* field_trials = nullptr); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/L16/audio_encoder_L16.cc b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/L16/audio_encoder_L16.cc index 590d3e32d9..20259b9ad8 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/L16/audio_encoder_L16.cc +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/L16/audio_encoder_L16.cc @@ -59,7 +59,8 @@ AudioCodecInfo AudioEncoderL16::QueryAudioEncoder( std::unique_ptr AudioEncoderL16::MakeAudioEncoder( const AudioEncoderL16::Config& config, int payload_type, - absl::optional /*codec_pair_id*/) { + absl::optional /*codec_pair_id*/, + const FieldTrialsView* field_trials) { AudioEncoderPcm16B::Config c; c.sample_rate_hz = config.sample_rate_hz; c.num_channels = config.num_channels; diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/L16/audio_encoder_L16.h b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/L16/audio_encoder_L16.h index 25d221148e..47509849de 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/L16/audio_encoder_L16.h +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/L16/audio_encoder_L16.h @@ -18,6 +18,7 @@ #include "api/audio_codecs/audio_codec_pair_id.h" #include "api/audio_codecs/audio_encoder.h" #include "api/audio_codecs/audio_format.h" +#include "api/field_trials_view.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -44,7 +45,8 @@ struct RTC_EXPORT AudioEncoderL16 { static std::unique_ptr MakeAudioEncoder( const Config& config, int payload_type, - absl::optional codec_pair_id = absl::nullopt); + absl::optional codec_pair_id = absl::nullopt, + const FieldTrialsView* field_trials = nullptr); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/audio_decoder.h b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/audio_decoder.h index 336e38449b..41138741bb 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/audio_decoder.h +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/audio_decoder.h @@ -20,7 +20,6 @@ #include "absl/types/optional.h" #include "api/array_view.h" #include "rtc_base/buffer.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -37,6 +36,9 @@ class AudioDecoder { AudioDecoder() = default; virtual ~AudioDecoder() = default; + AudioDecoder(const AudioDecoder&) = delete; + AudioDecoder& operator=(const AudioDecoder&) = delete; + class EncodedAudioFrame { public: struct DecodeResult { @@ -187,9 +189,6 @@ class AudioDecoder { int sample_rate_hz, int16_t* decoded, SpeechType* speech_type); - - private: - RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoder); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/audio_decoder_factory_template.h b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/audio_decoder_factory_template.h index 976f9c62d7..7ea0c91372 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/audio_decoder_factory_template.h +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/audio_decoder_factory_template.h @@ -15,8 +15,9 @@ #include #include "api/audio_codecs/audio_decoder_factory.h" +#include "api/field_trials_view.h" +#include "api/make_ref_counted.h" #include "api/scoped_refptr.h" -#include "rtc_base/ref_counted_object.h" namespace webrtc { @@ -32,7 +33,8 @@ struct Helper<> { static bool IsSupportedDecoder(const SdpAudioFormat& format) { return false; } static std::unique_ptr MakeAudioDecoder( const SdpAudioFormat& format, - absl::optional codec_pair_id) { + absl::optional codec_pair_id, + const FieldTrialsView* field_trials) { return nullptr; } }; @@ -55,16 +57,22 @@ struct Helper { } static std::unique_ptr MakeAudioDecoder( const SdpAudioFormat& format, - absl::optional codec_pair_id) { + absl::optional codec_pair_id, + const FieldTrialsView* field_trials) { auto opt_config = T::SdpToConfig(format); return opt_config ? T::MakeAudioDecoder(*opt_config, codec_pair_id) - : Helper::MakeAudioDecoder(format, codec_pair_id); + : Helper::MakeAudioDecoder(format, codec_pair_id, + field_trials); } }; template class AudioDecoderFactoryT : public AudioDecoderFactory { public: + explicit AudioDecoderFactoryT(const FieldTrialsView* field_trials) { + field_trials_ = field_trials; + } + std::vector GetSupportedDecoders() override { std::vector specs; Helper::AppendSupportedDecoders(&specs); @@ -78,8 +86,11 @@ class AudioDecoderFactoryT : public AudioDecoderFactory { std::unique_ptr MakeAudioDecoder( const SdpAudioFormat& format, absl::optional codec_pair_id) override { - return Helper::MakeAudioDecoder(format, codec_pair_id); + return Helper::MakeAudioDecoder(format, codec_pair_id, + field_trials_); } + + const FieldTrialsView* field_trials_; }; } // namespace audio_decoder_factory_template_impl @@ -115,7 +126,8 @@ class AudioDecoderFactoryT : public AudioDecoderFactory { // TODO(kwiberg): Point at CreateBuiltinAudioDecoderFactory() for an example of // how it is used. template -rtc::scoped_refptr CreateAudioDecoderFactory() { +rtc::scoped_refptr CreateAudioDecoderFactory( + const FieldTrialsView* field_trials = nullptr) { // There's no technical reason we couldn't allow zero template parameters, // but such a factory couldn't create any decoders, and callers can do this // by mistake by simply forgetting the <> altogether. So we forbid it in @@ -124,7 +136,8 @@ rtc::scoped_refptr CreateAudioDecoderFactory() { "Caller must give at least one template parameter"); return rtc::make_ref_counted< - audio_decoder_factory_template_impl::AudioDecoderFactoryT>(); + audio_decoder_factory_template_impl::AudioDecoderFactoryT>( + field_trials); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/audio_encoder_factory_template.h b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/audio_encoder_factory_template.h index 4dc0672c46..8a70ba2268 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/audio_encoder_factory_template.h +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/audio_encoder_factory_template.h @@ -15,8 +15,9 @@ #include #include "api/audio_codecs/audio_encoder_factory.h" +#include "api/field_trials_view.h" +#include "api/make_ref_counted.h" #include "api/scoped_refptr.h" -#include "rtc_base/ref_counted_object.h" namespace webrtc { @@ -36,7 +37,8 @@ struct Helper<> { static std::unique_ptr MakeAudioEncoder( int payload_type, const SdpAudioFormat& format, - absl::optional codec_pair_id) { + absl::optional codec_pair_id, + const FieldTrialsView* field_trials) { return nullptr; } }; @@ -63,13 +65,14 @@ struct Helper { static std::unique_ptr MakeAudioEncoder( int payload_type, const SdpAudioFormat& format, - absl::optional codec_pair_id) { + absl::optional codec_pair_id, + const FieldTrialsView* field_trials) { auto opt_config = T::SdpToConfig(format); if (opt_config) { return T::MakeAudioEncoder(*opt_config, payload_type, codec_pair_id); } else { return Helper::MakeAudioEncoder(payload_type, format, - codec_pair_id); + codec_pair_id, field_trials); } } }; @@ -77,6 +80,10 @@ struct Helper { template class AudioEncoderFactoryT : public AudioEncoderFactory { public: + explicit AudioEncoderFactoryT(const FieldTrialsView* field_trials) { + field_trials_ = field_trials; + } + std::vector GetSupportedEncoders() override { std::vector specs; Helper::AppendSupportedEncoders(&specs); @@ -92,8 +99,11 @@ class AudioEncoderFactoryT : public AudioEncoderFactory { int payload_type, const SdpAudioFormat& format, absl::optional codec_pair_id) override { - return Helper::MakeAudioEncoder(payload_type, format, codec_pair_id); + return Helper::MakeAudioEncoder(payload_type, format, codec_pair_id, + field_trials_); } + + const FieldTrialsView* field_trials_; }; } // namespace audio_encoder_factory_template_impl @@ -134,7 +144,8 @@ class AudioEncoderFactoryT : public AudioEncoderFactory { // TODO(kwiberg): Point at CreateBuiltinAudioEncoderFactory() for an example of // how it is used. template -rtc::scoped_refptr CreateAudioEncoderFactory() { +rtc::scoped_refptr CreateAudioEncoderFactory( + const FieldTrialsView* field_trials = nullptr) { // There's no technical reason we couldn't allow zero template parameters, // but such a factory couldn't create any encoders, and callers can do this // by mistake by simply forgetting the <> altogether. So we forbid it in @@ -143,7 +154,8 @@ rtc::scoped_refptr CreateAudioEncoderFactory() { "Caller must give at least one template parameter"); return rtc::make_ref_counted< - audio_encoder_factory_template_impl::AudioEncoderFactoryT>(); + audio_encoder_factory_template_impl::AudioEncoderFactoryT>( + field_trials); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/builtin_audio_encoder_factory.cc b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/builtin_audio_encoder_factory.cc index 99fac09a57..530d64b2ba 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/builtin_audio_encoder_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/builtin_audio_encoder_factory.cc @@ -47,8 +47,10 @@ struct NotAdvertised { static std::unique_ptr MakeAudioEncoder( const Config& config, int payload_type, - absl::optional codec_pair_id = absl::nullopt) { - return T::MakeAudioEncoder(config, payload_type, codec_pair_id); + absl::optional codec_pair_id = absl::nullopt, + const FieldTrialsView* field_trials = nullptr) { + return T::MakeAudioEncoder(config, payload_type, codec_pair_id, + field_trials); } }; diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/g711/audio_decoder_g711.cc b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/g711/audio_decoder_g711.cc index f3d3378cf2..838f7e9624 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/g711/audio_decoder_g711.cc +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/g711/audio_decoder_g711.cc @@ -47,7 +47,8 @@ void AudioDecoderG711::AppendSupportedDecoders( std::unique_ptr AudioDecoderG711::MakeAudioDecoder( const Config& config, - absl::optional /*codec_pair_id*/) { + absl::optional /*codec_pair_id*/, + const FieldTrialsView* field_trials) { if (!config.IsOk()) { RTC_DCHECK_NOTREACHED(); return nullptr; diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/g711/audio_decoder_g711.h b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/g711/audio_decoder_g711.h index 18c15a8d60..0f7a98d345 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/g711/audio_decoder_g711.h +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/g711/audio_decoder_g711.h @@ -18,6 +18,7 @@ #include "api/audio_codecs/audio_codec_pair_id.h" #include "api/audio_codecs/audio_decoder.h" #include "api/audio_codecs/audio_format.h" +#include "api/field_trials_view.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -39,7 +40,8 @@ struct RTC_EXPORT AudioDecoderG711 { static void AppendSupportedDecoders(std::vector* specs); static std::unique_ptr MakeAudioDecoder( const Config& config, - absl::optional codec_pair_id = absl::nullopt); + absl::optional codec_pair_id = absl::nullopt, + const FieldTrialsView* field_trials = nullptr); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/g711/audio_encoder_g711.cc b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/g711/audio_encoder_g711.cc index 4c1ce0f8e6..1dca3b80d3 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/g711/audio_encoder_g711.cc +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/g711/audio_encoder_g711.cc @@ -64,7 +64,8 @@ AudioCodecInfo AudioEncoderG711::QueryAudioEncoder(const Config& config) { std::unique_ptr AudioEncoderG711::MakeAudioEncoder( const Config& config, int payload_type, - absl::optional /*codec_pair_id*/) { + absl::optional /*codec_pair_id*/, + const FieldTrialsView* field_trials) { if (!config.IsOk()) { RTC_DCHECK_NOTREACHED(); return nullptr; diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/g711/audio_encoder_g711.h b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/g711/audio_encoder_g711.h index 29fe38f1a0..4b3eb845e0 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/g711/audio_encoder_g711.h +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/g711/audio_encoder_g711.h @@ -18,6 +18,7 @@ #include "api/audio_codecs/audio_codec_pair_id.h" #include "api/audio_codecs/audio_encoder.h" #include "api/audio_codecs/audio_format.h" +#include "api/field_trials_view.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -44,7 +45,8 @@ struct RTC_EXPORT AudioEncoderG711 { static std::unique_ptr MakeAudioEncoder( const Config& config, int payload_type, - absl::optional codec_pair_id = absl::nullopt); + absl::optional codec_pair_id = absl::nullopt, + const FieldTrialsView* field_trials = nullptr); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/g722/audio_decoder_g722.cc b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/g722/audio_decoder_g722.cc index 0049e5ab32..ed7163471a 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/g722/audio_decoder_g722.cc +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/g722/audio_decoder_g722.cc @@ -36,7 +36,8 @@ void AudioDecoderG722::AppendSupportedDecoders( std::unique_ptr AudioDecoderG722::MakeAudioDecoder( Config config, - absl::optional /*codec_pair_id*/) { + absl::optional /*codec_pair_id*/, + const FieldTrialsView* field_trials) { if (!config.IsOk()) { RTC_DCHECK_NOTREACHED(); return nullptr; diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/g722/audio_decoder_g722.h b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/g722/audio_decoder_g722.h index 2a674926db..6f7b253039 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/g722/audio_decoder_g722.h +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/g722/audio_decoder_g722.h @@ -18,6 +18,7 @@ #include "api/audio_codecs/audio_codec_pair_id.h" #include "api/audio_codecs/audio_decoder.h" #include "api/audio_codecs/audio_format.h" +#include "api/field_trials_view.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -33,7 +34,8 @@ struct RTC_EXPORT AudioDecoderG722 { static void AppendSupportedDecoders(std::vector* specs); static std::unique_ptr MakeAudioDecoder( Config config, - absl::optional codec_pair_id = absl::nullopt); + absl::optional codec_pair_id = absl::nullopt, + const FieldTrialsView* field_trials = nullptr); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/g722/audio_encoder_g722.cc b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/g722/audio_encoder_g722.cc index 66cf9e19d6..56a6c4da6a 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/g722/audio_encoder_g722.cc +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/g722/audio_encoder_g722.cc @@ -62,7 +62,8 @@ AudioCodecInfo AudioEncoderG722::QueryAudioEncoder( std::unique_ptr AudioEncoderG722::MakeAudioEncoder( const AudioEncoderG722Config& config, int payload_type, - absl::optional /*codec_pair_id*/) { + absl::optional /*codec_pair_id*/, + const FieldTrialsView* field_trials) { if (!config.IsOk()) { RTC_DCHECK_NOTREACHED(); return nullptr; diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/g722/audio_encoder_g722.h b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/g722/audio_encoder_g722.h index 327c0af04a..78ceddd1e9 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/g722/audio_encoder_g722.h +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/g722/audio_encoder_g722.h @@ -19,6 +19,7 @@ #include "api/audio_codecs/audio_encoder.h" #include "api/audio_codecs/audio_format.h" #include "api/audio_codecs/g722/audio_encoder_g722_config.h" +#include "api/field_trials_view.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -34,7 +35,8 @@ struct RTC_EXPORT AudioEncoderG722 { static std::unique_ptr MakeAudioEncoder( const AudioEncoderG722Config& config, int payload_type, - absl::optional codec_pair_id = absl::nullopt); + absl::optional codec_pair_id = absl::nullopt, + const FieldTrialsView* field_trials = nullptr); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/g722/audio_encoder_g722_config.h b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/g722/audio_encoder_g722_config.h index f85eef00a8..f3f3a9f016 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/g722/audio_encoder_g722_config.h +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/g722/audio_encoder_g722_config.h @@ -11,6 +11,8 @@ #ifndef API_AUDIO_CODECS_G722_AUDIO_ENCODER_G722_CONFIG_H_ #define API_AUDIO_CODECS_G722_AUDIO_ENCODER_G722_CONFIG_H_ +#include "api/audio_codecs/audio_encoder.h" + namespace webrtc { struct AudioEncoderG722Config { diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/ilbc/audio_decoder_ilbc.cc b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/ilbc/audio_decoder_ilbc.cc index 237cef23c1..c58316903a 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/ilbc/audio_decoder_ilbc.cc +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/ilbc/audio_decoder_ilbc.cc @@ -34,7 +34,8 @@ void AudioDecoderIlbc::AppendSupportedDecoders( std::unique_ptr AudioDecoderIlbc::MakeAudioDecoder( Config config, - absl::optional /*codec_pair_id*/) { + absl::optional /*codec_pair_id*/, + const FieldTrialsView* field_trials) { return std::make_unique(); } diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/ilbc/audio_decoder_ilbc.h b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/ilbc/audio_decoder_ilbc.h index 9ab847977d..60566c88df 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/ilbc/audio_decoder_ilbc.h +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/ilbc/audio_decoder_ilbc.h @@ -18,6 +18,7 @@ #include "api/audio_codecs/audio_codec_pair_id.h" #include "api/audio_codecs/audio_decoder.h" #include "api/audio_codecs/audio_format.h" +#include "api/field_trials_view.h" namespace webrtc { @@ -29,7 +30,8 @@ struct AudioDecoderIlbc { static void AppendSupportedDecoders(std::vector* specs); static std::unique_ptr MakeAudioDecoder( Config config, - absl::optional codec_pair_id = absl::nullopt); + absl::optional codec_pair_id = absl::nullopt, + const FieldTrialsView* field_trials = nullptr); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/ilbc/audio_encoder_ilbc.cc b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/ilbc/audio_encoder_ilbc.cc index 52ba8f6b88..b497948491 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/ilbc/audio_encoder_ilbc.cc +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/ilbc/audio_encoder_ilbc.cc @@ -76,7 +76,8 @@ AudioCodecInfo AudioEncoderIlbc::QueryAudioEncoder( std::unique_ptr AudioEncoderIlbc::MakeAudioEncoder( const AudioEncoderIlbcConfig& config, int payload_type, - absl::optional /*codec_pair_id*/) { + absl::optional /*codec_pair_id*/, + const FieldTrialsView* field_trials) { if (!config.IsOk()) { RTC_DCHECK_NOTREACHED(); return nullptr; diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/ilbc/audio_encoder_ilbc.h b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/ilbc/audio_encoder_ilbc.h index e4aeca70de..a5306841ce 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/ilbc/audio_encoder_ilbc.h +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/ilbc/audio_encoder_ilbc.h @@ -19,6 +19,7 @@ #include "api/audio_codecs/audio_encoder.h" #include "api/audio_codecs/audio_format.h" #include "api/audio_codecs/ilbc/audio_encoder_ilbc_config.h" +#include "api/field_trials_view.h" namespace webrtc { @@ -33,7 +34,8 @@ struct AudioEncoderIlbc { static std::unique_ptr MakeAudioEncoder( const AudioEncoderIlbcConfig& config, int payload_type, - absl::optional codec_pair_id = absl::nullopt); + absl::optional codec_pair_id = absl::nullopt, + const FieldTrialsView* field_trials = nullptr); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/isac/audio_decoder_isac_fix.cc b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/isac/audio_decoder_isac_fix.cc index 305e15a525..b3ab91da47 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/isac/audio_decoder_isac_fix.cc +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/isac/audio_decoder_isac_fix.cc @@ -33,7 +33,8 @@ void AudioDecoderIsacFix::AppendSupportedDecoders( std::unique_ptr AudioDecoderIsacFix::MakeAudioDecoder( Config config, - absl::optional /*codec_pair_id*/) { + absl::optional /*codec_pair_id*/, + const FieldTrialsView* field_trials) { AudioDecoderIsacFixImpl::Config c; c.sample_rate_hz = 16000; return std::make_unique(c); diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/isac/audio_decoder_isac_fix.h b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/isac/audio_decoder_isac_fix.h index 200914adfe..8f61d9ab0e 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/isac/audio_decoder_isac_fix.h +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/isac/audio_decoder_isac_fix.h @@ -18,6 +18,7 @@ #include "api/audio_codecs/audio_codec_pair_id.h" #include "api/audio_codecs/audio_decoder.h" #include "api/audio_codecs/audio_format.h" +#include "api/field_trials_view.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -30,7 +31,8 @@ struct RTC_EXPORT AudioDecoderIsacFix { static void AppendSupportedDecoders(std::vector* specs); static std::unique_ptr MakeAudioDecoder( Config config, - absl::optional codec_pair_id = absl::nullopt); + absl::optional codec_pair_id = absl::nullopt, + const FieldTrialsView* field_trials = nullptr); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/isac/audio_decoder_isac_float.cc b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/isac/audio_decoder_isac_float.cc index 683eb6c0ad..98f672b468 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/isac/audio_decoder_isac_float.cc +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/isac/audio_decoder_isac_float.cc @@ -42,7 +42,8 @@ void AudioDecoderIsacFloat::AppendSupportedDecoders( std::unique_ptr AudioDecoderIsacFloat::MakeAudioDecoder( Config config, - absl::optional /*codec_pair_id*/) { + absl::optional /*codec_pair_id*/, + const FieldTrialsView* field_trials) { AudioDecoderIsacFloatImpl::Config c; c.sample_rate_hz = config.sample_rate_hz; if (!config.IsOk()) { diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/isac/audio_decoder_isac_float.h b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/isac/audio_decoder_isac_float.h index e78f8b81ee..864c6b999f 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/isac/audio_decoder_isac_float.h +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/isac/audio_decoder_isac_float.h @@ -18,6 +18,7 @@ #include "api/audio_codecs/audio_codec_pair_id.h" #include "api/audio_codecs/audio_decoder.h" #include "api/audio_codecs/audio_format.h" +#include "api/field_trials_view.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -35,7 +36,8 @@ struct RTC_EXPORT AudioDecoderIsacFloat { static void AppendSupportedDecoders(std::vector* specs); static std::unique_ptr MakeAudioDecoder( Config config, - absl::optional codec_pair_id = absl::nullopt); + absl::optional codec_pair_id = absl::nullopt, + const FieldTrialsView* field_trials = nullptr); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/isac/audio_encoder_isac_fix.cc b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/isac/audio_encoder_isac_fix.cc index b590be1ea3..39603775a4 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/isac/audio_encoder_isac_fix.cc +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/isac/audio_encoder_isac_fix.cc @@ -56,7 +56,8 @@ AudioCodecInfo AudioEncoderIsacFix::QueryAudioEncoder( std::unique_ptr AudioEncoderIsacFix::MakeAudioEncoder( AudioEncoderIsacFix::Config config, int payload_type, - absl::optional /*codec_pair_id*/) { + absl::optional /*codec_pair_id*/, + const FieldTrialsView* field_trials) { AudioEncoderIsacFixImpl::Config c; c.frame_size_ms = config.frame_size_ms; c.bit_rate = config.bit_rate; diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/isac/audio_encoder_isac_fix.h b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/isac/audio_encoder_isac_fix.h index e50d9f5112..de0f1d1308 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/isac/audio_encoder_isac_fix.h +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/isac/audio_encoder_isac_fix.h @@ -18,6 +18,7 @@ #include "api/audio_codecs/audio_codec_pair_id.h" #include "api/audio_codecs/audio_encoder.h" #include "api/audio_codecs/audio_format.h" +#include "api/field_trials_view.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -44,7 +45,8 @@ struct RTC_EXPORT AudioEncoderIsacFix { static std::unique_ptr MakeAudioEncoder( Config config, int payload_type, - absl::optional codec_pair_id = absl::nullopt); + absl::optional codec_pair_id = absl::nullopt, + const FieldTrialsView* field_trials = nullptr); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/isac/audio_encoder_isac_float.cc b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/isac/audio_encoder_isac_float.cc index e2afeae84e..e3e50080fa 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/isac/audio_encoder_isac_float.cc +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/isac/audio_encoder_isac_float.cc @@ -68,7 +68,8 @@ AudioCodecInfo AudioEncoderIsacFloat::QueryAudioEncoder( std::unique_ptr AudioEncoderIsacFloat::MakeAudioEncoder( const AudioEncoderIsacFloat::Config& config, int payload_type, - absl::optional /*codec_pair_id*/) { + absl::optional /*codec_pair_id*/, + const FieldTrialsView* field_trials) { AudioEncoderIsacFloatImpl::Config c; c.payload_type = payload_type; c.sample_rate_hz = config.sample_rate_hz; diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/isac/audio_encoder_isac_float.h b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/isac/audio_encoder_isac_float.h index 0cb9c17d71..d031d76db1 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/isac/audio_encoder_isac_float.h +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/isac/audio_encoder_isac_float.h @@ -18,6 +18,7 @@ #include "api/audio_codecs/audio_codec_pair_id.h" #include "api/audio_codecs/audio_encoder.h" #include "api/audio_codecs/audio_format.h" +#include "api/field_trials_view.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -58,7 +59,8 @@ struct RTC_EXPORT AudioEncoderIsacFloat { static std::unique_ptr MakeAudioEncoder( const Config& config, int payload_type, - absl::optional codec_pair_id = absl::nullopt); + absl::optional codec_pair_id = absl::nullopt, + const FieldTrialsView* field_trials = nullptr); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_decoder_multi_channel_opus.cc b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_decoder_multi_channel_opus.cc index 6ba2b6d9d3..0fb4e05511 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_decoder_multi_channel_opus.cc +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_decoder_multi_channel_opus.cc @@ -64,7 +64,8 @@ void AudioDecoderMultiChannelOpus::AppendSupportedDecoders( std::unique_ptr AudioDecoderMultiChannelOpus::MakeAudioDecoder( AudioDecoderMultiChannelOpusConfig config, - absl::optional /*codec_pair_id*/) { + absl::optional /*codec_pair_id*/, + const FieldTrialsView* field_trials) { return AudioDecoderMultiChannelOpusImpl::MakeAudioDecoder(config); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_decoder_multi_channel_opus.h b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_decoder_multi_channel_opus.h index b5ca0fe41b..eafd6c6939 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_decoder_multi_channel_opus.h +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_decoder_multi_channel_opus.h @@ -19,6 +19,7 @@ #include "api/audio_codecs/audio_decoder.h" #include "api/audio_codecs/audio_format.h" #include "api/audio_codecs/opus/audio_decoder_multi_channel_opus_config.h" +#include "api/field_trials_view.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -32,7 +33,8 @@ struct RTC_EXPORT AudioDecoderMultiChannelOpus { static void AppendSupportedDecoders(std::vector* specs); static std::unique_ptr MakeAudioDecoder( AudioDecoderMultiChannelOpusConfig config, - absl::optional codec_pair_id = absl::nullopt); + absl::optional codec_pair_id = absl::nullopt, + const FieldTrialsView* field_trials = nullptr); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_decoder_multi_channel_opus_config.h b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_decoder_multi_channel_opus_config.h index 7350045bf5..f97c5c3193 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_decoder_multi_channel_opus_config.h +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_decoder_multi_channel_opus_config.h @@ -13,6 +13,8 @@ #include +#include "api/audio_codecs/audio_decoder.h" + namespace webrtc { struct AudioDecoderMultiChannelOpusConfig { // The number of channels that the decoder will output. diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_decoder_opus.cc b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_decoder_opus.cc index 7e0d88b7ad..efc9a73546 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_decoder_opus.cc +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_decoder_opus.cc @@ -73,7 +73,8 @@ void AudioDecoderOpus::AppendSupportedDecoders( std::unique_ptr AudioDecoderOpus::MakeAudioDecoder( Config config, - absl::optional /*codec_pair_id*/) { + absl::optional /*codec_pair_id*/, + const FieldTrialsView* field_trials) { if (!config.IsOk()) { RTC_DCHECK_NOTREACHED(); return nullptr; diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_decoder_opus.h b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_decoder_opus.h index ec0f61d5bb..138c0377df 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_decoder_opus.h +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_decoder_opus.h @@ -18,6 +18,7 @@ #include "api/audio_codecs/audio_codec_pair_id.h" #include "api/audio_codecs/audio_decoder.h" #include "api/audio_codecs/audio_format.h" +#include "api/field_trials_view.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -34,7 +35,8 @@ struct RTC_EXPORT AudioDecoderOpus { static void AppendSupportedDecoders(std::vector* specs); static std::unique_ptr MakeAudioDecoder( Config config, - absl::optional codec_pair_id = absl::nullopt); + absl::optional codec_pair_id = absl::nullopt, + const FieldTrialsView* field_trials = nullptr); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_encoder_multi_channel_opus.cc b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_encoder_multi_channel_opus.cc index 758eaaeebe..14f480b1ec 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_encoder_multi_channel_opus.cc +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_encoder_multi_channel_opus.cc @@ -66,7 +66,8 @@ AudioCodecInfo AudioEncoderMultiChannelOpus::QueryAudioEncoder( std::unique_ptr AudioEncoderMultiChannelOpus::MakeAudioEncoder( const AudioEncoderMultiChannelOpusConfig& config, int payload_type, - absl::optional /*codec_pair_id*/) { + absl::optional /*codec_pair_id*/, + const FieldTrialsView* field_trials) { return AudioEncoderMultiChannelOpusImpl::MakeAudioEncoder(config, payload_type); } diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_encoder_multi_channel_opus.h b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_encoder_multi_channel_opus.h index 977a3a4b9c..c1c4db3577 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_encoder_multi_channel_opus.h +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_encoder_multi_channel_opus.h @@ -19,6 +19,7 @@ #include "api/audio_codecs/audio_encoder.h" #include "api/audio_codecs/audio_format.h" #include "api/audio_codecs/opus/audio_encoder_multi_channel_opus_config.h" +#include "api/field_trials_view.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -33,7 +34,8 @@ struct RTC_EXPORT AudioEncoderMultiChannelOpus { static std::unique_ptr MakeAudioEncoder( const Config& config, int payload_type, - absl::optional codec_pair_id = absl::nullopt); + absl::optional codec_pair_id = absl::nullopt, + const FieldTrialsView* field_trials = nullptr); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_encoder_opus.cc b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_encoder_opus.cc index 6d950c5e74..5b6322da4c 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_encoder_opus.cc +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_encoder_opus.cc @@ -32,7 +32,8 @@ AudioCodecInfo AudioEncoderOpus::QueryAudioEncoder( std::unique_ptr AudioEncoderOpus::MakeAudioEncoder( const AudioEncoderOpusConfig& config, int payload_type, - absl::optional /*codec_pair_id*/) { + absl::optional /*codec_pair_id*/, + const FieldTrialsView* field_trials) { if (!config.IsOk()) { RTC_DCHECK_NOTREACHED(); return nullptr; diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_encoder_opus.h b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_encoder_opus.h index 03cb0d6b38..df93ae5303 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_encoder_opus.h +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_encoder_opus.h @@ -19,6 +19,7 @@ #include "api/audio_codecs/audio_encoder.h" #include "api/audio_codecs/audio_format.h" #include "api/audio_codecs/opus/audio_encoder_opus_config.h" +#include "api/field_trials_view.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -34,7 +35,8 @@ struct RTC_EXPORT AudioEncoderOpus { static std::unique_ptr MakeAudioEncoder( const AudioEncoderOpusConfig& config, int payload_type, - absl::optional codec_pair_id = absl::nullopt); + absl::optional codec_pair_id = absl::nullopt, + const FieldTrialsView* field_trials = nullptr); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus_audio_encoder_factory.cc b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus_audio_encoder_factory.cc index 5f0c7147f5..8c286f21e1 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus_audio_encoder_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus_audio_encoder_factory.cc @@ -37,8 +37,10 @@ struct NotAdvertised { static std::unique_ptr MakeAudioEncoder( const Config& config, int payload_type, - absl::optional codec_pair_id = absl::nullopt) { - return T::MakeAudioEncoder(config, payload_type, codec_pair_id); + absl::optional codec_pair_id = absl::nullopt, + const FieldTrialsView* field_trials = nullptr) { + return T::MakeAudioEncoder(config, payload_type, codec_pair_id, + field_trials); } }; diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_options.cc b/TMessagesProj/jni/voip/webrtc/api/audio_options.cc index 6832bbe294..658515062c 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_options.cc +++ b/TMessagesProj/jni/voip/webrtc/api/audio_options.cc @@ -52,19 +52,10 @@ void AudioOptions::SetAll(const AudioOptions& change) { change.audio_jitter_buffer_fast_accelerate); SetFrom(&audio_jitter_buffer_min_delay_ms, change.audio_jitter_buffer_min_delay_ms); - SetFrom(&audio_jitter_buffer_enable_rtx_handling, - change.audio_jitter_buffer_enable_rtx_handling); - SetFrom(&typing_detection, change.typing_detection); - SetFrom(&experimental_agc, change.experimental_agc); - SetFrom(&experimental_ns, change.experimental_ns); - SetFrom(&residual_echo_detector, change.residual_echo_detector); - SetFrom(&tx_agc_target_dbov, change.tx_agc_target_dbov); - SetFrom(&tx_agc_digital_compression_gain, - change.tx_agc_digital_compression_gain); - SetFrom(&tx_agc_limiter, change.tx_agc_limiter); SetFrom(&combined_audio_video_bwe, change.combined_audio_video_bwe); SetFrom(&audio_network_adaptor, change.audio_network_adaptor); SetFrom(&audio_network_adaptor_config, change.audio_network_adaptor_config); + SetFrom(&init_recording_on_send, change.init_recording_on_send); } bool AudioOptions::operator==(const AudioOptions& o) const { @@ -81,18 +72,10 @@ bool AudioOptions::operator==(const AudioOptions& o) const { o.audio_jitter_buffer_fast_accelerate && audio_jitter_buffer_min_delay_ms == o.audio_jitter_buffer_min_delay_ms && - audio_jitter_buffer_enable_rtx_handling == - o.audio_jitter_buffer_enable_rtx_handling && - typing_detection == o.typing_detection && - experimental_agc == o.experimental_agc && - experimental_ns == o.experimental_ns && - residual_echo_detector == o.residual_echo_detector && - tx_agc_target_dbov == o.tx_agc_target_dbov && - tx_agc_digital_compression_gain == o.tx_agc_digital_compression_gain && - tx_agc_limiter == o.tx_agc_limiter && combined_audio_video_bwe == o.combined_audio_video_bwe && audio_network_adaptor == o.audio_network_adaptor && - audio_network_adaptor_config == o.audio_network_adaptor_config; + audio_network_adaptor_config == o.audio_network_adaptor_config && + init_recording_on_send == o.init_recording_on_send; } std::string AudioOptions::ToString() const { @@ -114,18 +97,9 @@ std::string AudioOptions::ToString() const { audio_jitter_buffer_fast_accelerate); ToStringIfSet(&result, "audio_jitter_buffer_min_delay_ms", audio_jitter_buffer_min_delay_ms); - ToStringIfSet(&result, "audio_jitter_buffer_enable_rtx_handling", - audio_jitter_buffer_enable_rtx_handling); - ToStringIfSet(&result, "typing", typing_detection); - ToStringIfSet(&result, "experimental_agc", experimental_agc); - ToStringIfSet(&result, "experimental_ns", experimental_ns); - ToStringIfSet(&result, "residual_echo_detector", residual_echo_detector); - ToStringIfSet(&result, "tx_agc_target_dbov", tx_agc_target_dbov); - ToStringIfSet(&result, "tx_agc_digital_compression_gain", - tx_agc_digital_compression_gain); - ToStringIfSet(&result, "tx_agc_limiter", tx_agc_limiter); ToStringIfSet(&result, "combined_audio_video_bwe", combined_audio_video_bwe); ToStringIfSet(&result, "audio_network_adaptor", audio_network_adaptor); + ToStringIfSet(&result, "init_recording_on_send", init_recording_on_send); result << "}"; return result.str(); } diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_options.h b/TMessagesProj/jni/voip/webrtc/api/audio_options.h index 1b0d1ad0bd..39ba3886ea 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_options.h +++ b/TMessagesProj/jni/voip/webrtc/api/audio_options.h @@ -58,17 +58,6 @@ struct RTC_EXPORT AudioOptions { absl::optional audio_jitter_buffer_fast_accelerate; // Audio receiver jitter buffer (NetEq) minimum target delay in milliseconds. absl::optional audio_jitter_buffer_min_delay_ms; - // Audio receiver jitter buffer (NetEq) should handle retransmitted packets. - absl::optional audio_jitter_buffer_enable_rtx_handling; - // Audio processing to detect typing. - absl::optional typing_detection; - absl::optional experimental_agc; - absl::optional experimental_ns; - // Note that tx_agc_* only applies to non-experimental AGC. - absl::optional residual_echo_detector; - absl::optional tx_agc_target_dbov; - absl::optional tx_agc_digital_compression_gain; - absl::optional tx_agc_limiter; // Enable combined audio+bandwidth BWE. // TODO(pthatcher): This flag is set from the // "googCombinedAudioVideoBwe", but not used anywhere. So delete it, @@ -80,6 +69,10 @@ struct RTC_EXPORT AudioOptions { absl::optional audio_network_adaptor; // Config string for audio network adaptor. absl::optional audio_network_adaptor_config; + // Pre-initialize the ADM for recording when starting to send. Default to + // true. + // TODO(webrtc:13566): Remove this option. See issue for details. + absl::optional init_recording_on_send; }; } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/api/call/audio_sink.h b/TMessagesProj/jni/voip/webrtc/api/call/audio_sink.h index fa4c3f6814..fec26593a6 100644 --- a/TMessagesProj/jni/voip/webrtc/api/call/audio_sink.h +++ b/TMessagesProj/jni/voip/webrtc/api/call/audio_sink.h @@ -11,13 +11,8 @@ #ifndef API_CALL_AUDIO_SINK_H_ #define API_CALL_AUDIO_SINK_H_ -#if defined(WEBRTC_POSIX) && !defined(__STDC_FORMAT_MACROS) -// Avoid conflict with format_macros.h. -#define __STDC_FORMAT_MACROS -#endif - -#include #include +#include namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/api/candidate.cc b/TMessagesProj/jni/voip/webrtc/api/candidate.cc index 4d17256c2e..a14dda350c 100644 --- a/TMessagesProj/jni/voip/webrtc/api/candidate.cc +++ b/TMessagesProj/jni/voip/webrtc/api/candidate.cc @@ -22,6 +22,7 @@ Candidate::Candidate() component_(0), priority_(0), network_type_(rtc::ADAPTER_TYPE_UNKNOWN), + underlying_type_for_vpn_(rtc::ADAPTER_TYPE_UNKNOWN), generation_(0), network_id_(0), network_cost_(0) {} @@ -46,6 +47,7 @@ Candidate::Candidate(int component, password_(password), type_(type), network_type_(rtc::ADAPTER_TYPE_UNKNOWN), + underlying_type_for_vpn_(rtc::ADAPTER_TYPE_UNKNOWN), generation_(generation), foundation_(foundation), network_id_(network_id), diff --git a/TMessagesProj/jni/voip/webrtc/api/candidate.h b/TMessagesProj/jni/voip/webrtc/api/candidate.h index ecfdee3fcb..b8aaebc14a 100644 --- a/TMessagesProj/jni/voip/webrtc/api/candidate.h +++ b/TMessagesProj/jni/voip/webrtc/api/candidate.h @@ -109,6 +109,13 @@ class RTC_EXPORT Candidate { network_type_ = network_type; } + rtc::AdapterType underlying_type_for_vpn() const { + return underlying_type_for_vpn_; + } + void set_underlying_type_for_vpn(rtc::AdapterType network_type) { + underlying_type_for_vpn_ = network_type; + } + // Candidates in a new generation replace those in the old generation. uint32_t generation() const { return generation_; } void set_generation(uint32_t generation) { generation_ = generation; } @@ -195,6 +202,7 @@ class RTC_EXPORT Candidate { std::string type_; std::string network_name_; rtc::AdapterType network_type_; + rtc::AdapterType underlying_type_for_vpn_; uint32_t generation_; std::string foundation_; rtc::SocketAddress related_address_; diff --git a/TMessagesProj/jni/voip/webrtc/api/create_peerconnection_factory.cc b/TMessagesProj/jni/voip/webrtc/api/create_peerconnection_factory.cc index 008fce3e80..f9cc7ad3e2 100644 --- a/TMessagesProj/jni/voip/webrtc/api/create_peerconnection_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/api/create_peerconnection_factory.cc @@ -38,17 +38,27 @@ rtc::scoped_refptr CreatePeerConnectionFactory( std::unique_ptr video_decoder_factory, rtc::scoped_refptr audio_mixer, rtc::scoped_refptr audio_processing, - AudioFrameProcessor* audio_frame_processor) { + AudioFrameProcessor* audio_frame_processor, + std::unique_ptr field_trials) { + if (!field_trials) { + field_trials = std::make_unique(); + } + PeerConnectionFactoryDependencies dependencies; dependencies.network_thread = network_thread; dependencies.worker_thread = worker_thread; dependencies.signaling_thread = signaling_thread; - dependencies.task_queue_factory = CreateDefaultTaskQueueFactory(); + dependencies.task_queue_factory = + CreateDefaultTaskQueueFactory(field_trials.get()); dependencies.call_factory = CreateCallFactory(); dependencies.event_log_factory = std::make_unique( dependencies.task_queue_factory.get()); - dependencies.trials = std::make_unique(); + dependencies.trials = std::move(field_trials); + if (network_thread) { + // TODO(bugs.webrtc.org/13145): Add an rtc::SocketFactory* argument. + dependencies.socket_factory = network_thread->socketserver(); + } cricket::MediaEngineDependencies media_dependencies; media_dependencies.task_queue_factory = dependencies.task_queue_factory.get(); media_dependencies.adm = std::move(default_adm); diff --git a/TMessagesProj/jni/voip/webrtc/api/create_peerconnection_factory.h b/TMessagesProj/jni/voip/webrtc/api/create_peerconnection_factory.h index 4eb0a00e54..efebc5f3ea 100644 --- a/TMessagesProj/jni/voip/webrtc/api/create_peerconnection_factory.h +++ b/TMessagesProj/jni/voip/webrtc/api/create_peerconnection_factory.h @@ -49,7 +49,8 @@ CreatePeerConnectionFactory( std::unique_ptr video_decoder_factory, rtc::scoped_refptr audio_mixer, rtc::scoped_refptr audio_processing, - AudioFrameProcessor* audio_frame_processor = nullptr); + AudioFrameProcessor* audio_frame_processor = nullptr, + std::unique_ptr field_trials = nullptr); } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/crypto_params.h b/TMessagesProj/jni/voip/webrtc/api/crypto_params.h index 5da352cbef..95bd892f9c 100644 --- a/TMessagesProj/jni/voip/webrtc/api/crypto_params.h +++ b/TMessagesProj/jni/voip/webrtc/api/crypto_params.h @@ -13,6 +13,8 @@ #include +#include "absl/strings/string_view.h" + namespace cricket { // Parameters for SRTP negotiation, as described in RFC 4568. @@ -21,9 +23,9 @@ namespace cricket { struct CryptoParams { CryptoParams() : tag(0) {} CryptoParams(int t, - const std::string& cs, - const std::string& kp, - const std::string& sp) + absl::string_view cs, + absl::string_view kp, + absl::string_view sp) : tag(t), cipher_suite(cs), key_params(kp), session_params(sp) {} bool Matches(const CryptoParams& params) const { diff --git a/TMessagesProj/jni/voip/webrtc/api/dtls_transport_interface.cc b/TMessagesProj/jni/voip/webrtc/api/dtls_transport_interface.cc index a68ff8feb0..faebc0972f 100644 --- a/TMessagesProj/jni/voip/webrtc/api/dtls_transport_interface.cc +++ b/TMessagesProj/jni/voip/webrtc/api/dtls_transport_interface.cc @@ -20,11 +20,27 @@ DtlsTransportInformation::DtlsTransportInformation(DtlsTransportState state) DtlsTransportInformation::DtlsTransportInformation( DtlsTransportState state, + absl::optional role, absl::optional tls_version, absl::optional ssl_cipher_suite, absl::optional srtp_cipher_suite, std::unique_ptr remote_ssl_certificates) : state_(state), + role_(role), + tls_version_(tls_version), + ssl_cipher_suite_(ssl_cipher_suite), + srtp_cipher_suite_(srtp_cipher_suite), + remote_ssl_certificates_(std::move(remote_ssl_certificates)) {} + +// Deprecated version +DtlsTransportInformation::DtlsTransportInformation( + DtlsTransportState state, + absl::optional tls_version, + absl::optional ssl_cipher_suite, + absl::optional srtp_cipher_suite, + std::unique_ptr remote_ssl_certificates) + : state_(state), + role_(absl::nullopt), tls_version_(tls_version), ssl_cipher_suite_(ssl_cipher_suite), srtp_cipher_suite_(srtp_cipher_suite), @@ -33,6 +49,7 @@ DtlsTransportInformation::DtlsTransportInformation( DtlsTransportInformation::DtlsTransportInformation( const DtlsTransportInformation& c) : state_(c.state()), + role_(c.role_), tls_version_(c.tls_version_), ssl_cipher_suite_(c.ssl_cipher_suite_), srtp_cipher_suite_(c.srtp_cipher_suite_), @@ -43,6 +60,7 @@ DtlsTransportInformation::DtlsTransportInformation( DtlsTransportInformation& DtlsTransportInformation::operator=( const DtlsTransportInformation& c) { state_ = c.state(); + role_ = c.role_; tls_version_ = c.tls_version_; ssl_cipher_suite_ = c.ssl_cipher_suite_; srtp_cipher_suite_ = c.srtp_cipher_suite_; diff --git a/TMessagesProj/jni/voip/webrtc/api/dtls_transport_interface.h b/TMessagesProj/jni/voip/webrtc/api/dtls_transport_interface.h index 86715b0400..7b0151249c 100644 --- a/TMessagesProj/jni/voip/webrtc/api/dtls_transport_interface.h +++ b/TMessagesProj/jni/voip/webrtc/api/dtls_transport_interface.h @@ -36,6 +36,11 @@ enum class DtlsTransportState { kNumValues }; +enum class DtlsTransportTlsRole { + kServer, // Other end sends CLIENT_HELLO + kClient // This end sends CLIENT_HELLO +}; + // This object gives snapshot information about the changeable state of a // DTLSTransport. class RTC_EXPORT DtlsTransportInformation { @@ -44,10 +49,19 @@ class RTC_EXPORT DtlsTransportInformation { explicit DtlsTransportInformation(DtlsTransportState state); DtlsTransportInformation( DtlsTransportState state, + absl::optional role, absl::optional tls_version, absl::optional ssl_cipher_suite, absl::optional srtp_cipher_suite, std::unique_ptr remote_ssl_certificates); + ABSL_DEPRECATED("Use version with role parameter") + DtlsTransportInformation( + DtlsTransportState state, + absl::optional tls_version, + absl::optional ssl_cipher_suite, + absl::optional srtp_cipher_suite, + std::unique_ptr remote_ssl_certificates); + // Copy and assign DtlsTransportInformation(const DtlsTransportInformation& c); DtlsTransportInformation& operator=(const DtlsTransportInformation& c); @@ -57,6 +71,7 @@ class RTC_EXPORT DtlsTransportInformation { default; DtlsTransportState state() const { return state_; } + absl::optional role() const { return role_; } absl::optional tls_version() const { return tls_version_; } absl::optional ssl_cipher_suite() const { return ssl_cipher_suite_; } absl::optional srtp_cipher_suite() const { return srtp_cipher_suite_; } @@ -67,6 +82,7 @@ class RTC_EXPORT DtlsTransportInformation { private: DtlsTransportState state_; + absl::optional role_; absl::optional tls_version_; absl::optional ssl_cipher_suite_; absl::optional srtp_cipher_suite_; diff --git a/TMessagesProj/jni/voip/webrtc/api/field_trials.cc b/TMessagesProj/jni/voip/webrtc/api/field_trials.cc new file mode 100644 index 0000000000..4bd11271dc --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/field_trials.cc @@ -0,0 +1,107 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/field_trials.h" + +#include + +#include "rtc_base/checks.h" +#include "system_wrappers/include/field_trial.h" + +namespace { + +// This part is copied from system_wrappers/field_trial.cc. +webrtc::flat_map InsertIntoMap(const std::string& s) { + std::string::size_type field_start = 0; + webrtc::flat_map key_value_map; + while (field_start < s.size()) { + std::string::size_type separator_pos = s.find('/', field_start); + RTC_CHECK_NE(separator_pos, std::string::npos) + << "Missing separator '/' after field trial key."; + RTC_CHECK_GT(separator_pos, field_start) + << "Field trial key cannot be empty."; + std::string key = s.substr(field_start, separator_pos - field_start); + field_start = separator_pos + 1; + + RTC_CHECK_LT(field_start, s.size()) + << "Missing value after field trial key. String ended."; + separator_pos = s.find('/', field_start); + RTC_CHECK_NE(separator_pos, std::string::npos) + << "Missing terminating '/' in field trial string."; + RTC_CHECK_GT(separator_pos, field_start) + << "Field trial value cannot be empty."; + std::string value = s.substr(field_start, separator_pos - field_start); + field_start = separator_pos + 1; + + // If a key is specified multiple times, only the value linked to the first + // key is stored. note: This will crash in debug build when calling + // InitFieldTrialsFromString(). + key_value_map.emplace(key, value); + } + // This check is technically redundant due to earlier checks. + // We nevertheless keep the check to make it clear that the entire + // string has been processed, and without indexing past the end. + RTC_CHECK_EQ(field_start, s.size()); + + return key_value_map; +} + +// Makes sure that only one instance is created, since the usage +// of global string makes behaviour unpredicatable otherwise. +// TODO(bugs.webrtc.org/10335): Remove once global string is gone. +std::atomic instance_created_{false}; + +} // namespace + +namespace webrtc { + +FieldTrials::FieldTrials(const std::string& s) + : uses_global_(true), + field_trial_string_(s), + previous_field_trial_string_(webrtc::field_trial::GetFieldTrialString()), + key_value_map_(InsertIntoMap(s)) { + // TODO(bugs.webrtc.org/10335): Remove the global string! + field_trial::InitFieldTrialsFromString(field_trial_string_.c_str()); + RTC_CHECK(!instance_created_.exchange(true)) + << "Only one instance may be instanciated at any given time!"; +} + +std::unique_ptr FieldTrials::CreateNoGlobal(const std::string& s) { + return std::unique_ptr(new FieldTrials(s, true)); +} + +FieldTrials::FieldTrials(const std::string& s, bool) + : uses_global_(false), + previous_field_trial_string_(nullptr), + key_value_map_(InsertIntoMap(s)) {} + +FieldTrials::~FieldTrials() { + // TODO(bugs.webrtc.org/10335): Remove the global string! + if (uses_global_) { + field_trial::InitFieldTrialsFromString(previous_field_trial_string_); + RTC_CHECK(instance_created_.exchange(false)); + } +} + +std::string FieldTrials::GetValue(absl::string_view key) const { + auto it = key_value_map_.find(std::string(key)); + if (it != key_value_map_.end()) + return it->second; + + // Check the global string so that programs using + // a mix between FieldTrials and the global string continue to work + // TODO(bugs.webrtc.org/10335): Remove the global string! + if (uses_global_) { + return field_trial::FindFullName(std::string(key)); + } + return ""; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/field_trials.h b/TMessagesProj/jni/voip/webrtc/api/field_trials.h new file mode 100644 index 0000000000..bf7a7cc625 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/field_trials.h @@ -0,0 +1,59 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_FIELD_TRIALS_H_ +#define API_FIELD_TRIALS_H_ + +#include +#include + +#include "absl/strings/string_view.h" +#include "api/field_trials_registry.h" +#include "rtc_base/containers/flat_map.h" + +namespace webrtc { + +// The FieldTrials class is used to inject field trials into webrtc. +// +// Field trials allow webrtc clients (such as Chromium) to turn on feature code +// in binaries out in the field and gather information with that. +// +// They are designed to be easy to use with Chromium field trials and to speed +// up developers by reducing the need to wire up APIs to control whether a +// feature is on/off. +// +// The field trials are injected into objects that use them at creation time. +// +// NOTE: Creating multiple FieldTrials-object is currently prohibited +// until we remove the global string (TODO(bugs.webrtc.org/10335)) +// (unless using CreateNoGlobal): +class FieldTrials : public FieldTrialsRegistry { + public: + explicit FieldTrials(const std::string& s); + ~FieldTrials(); + + // Create a FieldTrials object that is not reading/writing from + // global variable (i.e can not be used for all parts of webrtc). + static std::unique_ptr CreateNoGlobal(const std::string& s); + + private: + explicit FieldTrials(const std::string& s, bool); + + std::string GetValue(absl::string_view key) const override; + + const bool uses_global_; + const std::string field_trial_string_; + const char* const previous_field_trial_string_; + const flat_map key_value_map_; +}; + +} // namespace webrtc + +#endif // API_FIELD_TRIALS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/field_trials_registry.cc b/TMessagesProj/jni/voip/webrtc/api/field_trials_registry.cc new file mode 100644 index 0000000000..14cbf31f0a --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/field_trials_registry.cc @@ -0,0 +1,30 @@ +/* + * Copyright 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/field_trials_registry.h" + +#include + +#include "absl/algorithm/container.h" +#include "absl/strings/string_view.h" +#include "rtc_base/checks.h" +#include "rtc_base/containers/flat_set.h" + +namespace webrtc { + +std::string FieldTrialsRegistry::Lookup(absl::string_view key) const { +#if WEBRTC_STRICT_FIELD_TRIALS + RTC_DCHECK(absl::c_linear_search(kRegisteredFieldTrials, key) || + test_keys_.contains(key)) + << key << " is not registered."; +#endif + return GetValue(key); +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/field_trials_registry.h b/TMessagesProj/jni/voip/webrtc/api/field_trials_registry.h new file mode 100644 index 0000000000..dc7e8445b1 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/field_trials_registry.h @@ -0,0 +1,54 @@ +/* + * Copyright 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef API_FIELD_TRIALS_REGISTRY_H_ +#define API_FIELD_TRIALS_REGISTRY_H_ + +#include +#include + +#include "absl/strings/string_view.h" +#include "api/field_trials_view.h" +#include "rtc_base/containers/flat_set.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// Abstract base class for a field trial registry that verifies that any looked +// up key has been pre-registered in accordance with `g3doc/field-trials.md`. +class RTC_EXPORT FieldTrialsRegistry : public FieldTrialsView { + public: + FieldTrialsRegistry() = default; + + FieldTrialsRegistry(const FieldTrialsRegistry&) = default; + FieldTrialsRegistry& operator=(const FieldTrialsRegistry&) = default; + + ~FieldTrialsRegistry() override = default; + + // Verifies that `key` is a registered field trial and then returns the + // configured value for `key` or an empty string if the field trial isn't + // configured. + std::string Lookup(absl::string_view key) const override; + + // Register additional `keys` for testing. This should only be used for + // imaginary keys that are never used outside test code. + void RegisterKeysForTesting(flat_set keys) { + test_keys_ = std::move(keys); + } + + private: + virtual std::string GetValue(absl::string_view key) const = 0; + + // Imaginary keys only used for testing. + flat_set test_keys_; +}; + +} // namespace webrtc + +#endif // API_FIELD_TRIALS_REGISTRY_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/field_trials_view.h b/TMessagesProj/jni/voip/webrtc/api/field_trials_view.h new file mode 100644 index 0000000000..45e6f7899b --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/field_trials_view.h @@ -0,0 +1,49 @@ +/* + * Copyright 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef API_FIELD_TRIALS_VIEW_H_ +#define API_FIELD_TRIALS_VIEW_H_ + +#include + +#include "absl/strings/string_view.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// An interface that provides the means to access field trials. +// +// Note that there are no guarantess that the meaning of a particular key-value +// mapping will be preserved over time and no announcements will be made if they +// are changed. It's up to the library user to ensure that the behavior does not +// break. +class RTC_EXPORT FieldTrialsView { + public: + virtual ~FieldTrialsView() = default; + + // Returns the configured value for `key` or an empty string if the field + // trial isn't configured. + virtual std::string Lookup(absl::string_view key) const = 0; + + bool IsEnabled(absl::string_view key) const { + return Lookup(key).find("Enabled") == 0; + } + + bool IsDisabled(absl::string_view key) const { + return Lookup(key).find("Disabled") == 0; + } +}; + +// TODO(bugs.webrtc.org/10335): Remove once all migrated to +// api/field_trials_view.h +typedef FieldTrialsView WebRtcKeyValueConfig; + +} // namespace webrtc + +#endif // API_FIELD_TRIALS_VIEW_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/frame_transformer_interface.h b/TMessagesProj/jni/voip/webrtc/api/frame_transformer_interface.h index de2c612ac0..5efd3ea051 100644 --- a/TMessagesProj/jni/voip/webrtc/api/frame_transformer_interface.h +++ b/TMessagesProj/jni/voip/webrtc/api/frame_transformer_interface.h @@ -73,6 +73,8 @@ class TransformableAudioFrameInterface : public TransformableFrameInterface { // information in the header as needed, for example to compile the list of // csrcs. virtual const RTPHeader& GetHeader() const = 0; + + virtual rtc::ArrayView GetContributingSources() const = 0; }; // Objects implement this interface to be notified with the transformed frame. diff --git a/TMessagesProj/jni/voip/webrtc/api/ice_transport_factory.cc b/TMessagesProj/jni/voip/webrtc/api/ice_transport_factory.cc index 26ef88bf1c..e88ac183fa 100644 --- a/TMessagesProj/jni/voip/webrtc/api/ice_transport_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/api/ice_transport_factory.cc @@ -13,6 +13,7 @@ #include #include +#include "api/make_ref_counted.h" #include "p2p/base/ice_transport_internal.h" #include "p2p/base/p2p_constants.h" #include "p2p/base/p2p_transport_channel.h" @@ -58,18 +59,9 @@ rtc::scoped_refptr CreateIceTransport( rtc::scoped_refptr CreateIceTransport( IceTransportInit init) { - if (init.async_resolver_factory()) { - // Backwards compatibility mode - return rtc::make_ref_counted( - std::make_unique( - "", cricket::ICE_CANDIDATE_COMPONENT_RTP, init.port_allocator(), - init.async_resolver_factory(), init.event_log())); - } else { - return rtc::make_ref_counted( - cricket::P2PTransportChannel::Create( - "", cricket::ICE_CANDIDATE_COMPONENT_RTP, init.port_allocator(), - init.async_dns_resolver_factory(), init.event_log())); - } + return rtc::make_ref_counted( + cricket::P2PTransportChannel::Create( + "", cricket::ICE_CANDIDATE_COMPONENT_RTP, std::move(init))); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/ice_transport_interface.h b/TMessagesProj/jni/voip/webrtc/api/ice_transport_interface.h index a3b364c87a..2ec41aaa69 100644 --- a/TMessagesProj/jni/voip/webrtc/api/ice_transport_interface.h +++ b/TMessagesProj/jni/voip/webrtc/api/ice_transport_interface.h @@ -23,9 +23,12 @@ namespace cricket { class IceTransportInternal; class PortAllocator; +class IceControllerFactoryInterface; +class ActiveIceControllerFactoryInterface; } // namespace cricket namespace webrtc { +class FieldTrialsView; // An ICE transport, as represented to the outside world. // This object is refcounted, and is therefore alive until the @@ -74,12 +77,57 @@ struct IceTransportInit final { RtcEventLog* event_log() { return event_log_; } void set_event_log(RtcEventLog* event_log) { event_log_ = event_log; } + void set_ice_controller_factory( + cricket::IceControllerFactoryInterface* ice_controller_factory) { + ice_controller_factory_ = ice_controller_factory; + } + cricket::IceControllerFactoryInterface* ice_controller_factory() { + return ice_controller_factory_; + } + + // An active ICE controller actively manages the connection used by an ICE + // transport, in contrast with a legacy ICE controller that only picks the + // best connection to use or ping, and lets the transport decide when and + // whether to switch. + // + // Which ICE controller is used is determined based on the field trial + // "WebRTC-UseActiveIceController" as follows: + // + // 1. If the field trial is not enabled + // a. The legacy ICE controller factory is used if one is supplied. + // b. If not, a default ICE controller (BasicIceController) is + // constructed and used. + // + // 2. If the field trial is enabled + // a. If an active ICE controller factory is supplied, it is used and + // the legacy ICE controller factory is not used. + // b. If not, a default active ICE controller is used, wrapping over the + // supplied or the default legacy ICE controller. + void set_active_ice_controller_factory( + cricket::ActiveIceControllerFactoryInterface* + active_ice_controller_factory) { + active_ice_controller_factory_ = active_ice_controller_factory; + } + cricket::ActiveIceControllerFactoryInterface* + active_ice_controller_factory() { + return active_ice_controller_factory_; + } + + const FieldTrialsView* field_trials() { return field_trials_; } + void set_field_trials(const FieldTrialsView* field_trials) { + field_trials_ = field_trials; + } + private: cricket::PortAllocator* port_allocator_ = nullptr; AsyncDnsResolverFactoryInterface* async_dns_resolver_factory_ = nullptr; // For backwards compatibility. Only one resolver factory can be set. AsyncResolverFactory* async_resolver_factory_ = nullptr; RtcEventLog* event_log_ = nullptr; + cricket::IceControllerFactoryInterface* ice_controller_factory_ = nullptr; + cricket::ActiveIceControllerFactoryInterface* active_ice_controller_factory_ = + nullptr; + const FieldTrialsView* field_trials_ = nullptr; // TODO(https://crbug.com/webrtc/12657): Redesign to have const members. }; diff --git a/TMessagesProj/jni/voip/webrtc/api/jsep_ice_candidate.h b/TMessagesProj/jni/voip/webrtc/api/jsep_ice_candidate.h index 40e2783457..8f47a102e7 100644 --- a/TMessagesProj/jni/voip/webrtc/api/jsep_ice_candidate.h +++ b/TMessagesProj/jni/voip/webrtc/api/jsep_ice_candidate.h @@ -22,7 +22,6 @@ #include "api/candidate.h" #include "api/jsep.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -64,6 +63,10 @@ class JsepCandidateCollection : public IceCandidateCollection { // Move constructor is defined so that a vector of JsepCandidateCollections // can be resized. JsepCandidateCollection(JsepCandidateCollection&& o); + + JsepCandidateCollection(const JsepCandidateCollection&) = delete; + JsepCandidateCollection& operator=(const JsepCandidateCollection&) = delete; + // Returns a copy of the candidate collection. JsepCandidateCollection Clone() const; size_t count() const override; @@ -80,8 +83,6 @@ class JsepCandidateCollection : public IceCandidateCollection { private: std::vector> candidates_; - - RTC_DISALLOW_COPY_AND_ASSIGN(JsepCandidateCollection); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/jsep_session_description.h b/TMessagesProj/jni/voip/webrtc/api/jsep_session_description.h index a4300eba98..0b65734ea9 100644 --- a/TMessagesProj/jni/voip/webrtc/api/jsep_session_description.h +++ b/TMessagesProj/jni/voip/webrtc/api/jsep_session_description.h @@ -22,7 +22,6 @@ #include "api/candidate.h" #include "api/jsep.h" #include "api/jsep_ice_candidate.h" -#include "rtc_base/constructor_magic.h" namespace cricket { class SessionDescription; @@ -43,6 +42,9 @@ class JsepSessionDescription : public SessionDescriptionInterface { absl::string_view session_version); virtual ~JsepSessionDescription(); + JsepSessionDescription(const JsepSessionDescription&) = delete; + JsepSessionDescription& operator=(const JsepSessionDescription&) = delete; + // Takes ownership of `description`. bool Initialize(std::unique_ptr description, const std::string& session_id, @@ -82,8 +84,6 @@ class JsepSessionDescription : public SessionDescriptionInterface { bool GetMediasectionIndex(const IceCandidateInterface* candidate, size_t* index); int GetMediasectionIndex(const cricket::Candidate& candidate); - - RTC_DISALLOW_COPY_AND_ASSIGN(JsepSessionDescription); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/make_ref_counted.h b/TMessagesProj/jni/voip/webrtc/api/make_ref_counted.h new file mode 100644 index 0000000000..cc8871784a --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/make_ref_counted.h @@ -0,0 +1,119 @@ +/* + * Copyright 2022 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef API_MAKE_REF_COUNTED_H_ +#define API_MAKE_REF_COUNTED_H_ + +#include + +#include "rtc_base/ref_counted_object.h" + +namespace rtc { + +namespace webrtc_make_ref_counted_internal { +// Determines if the given class has AddRef and Release methods. +template +class HasAddRefAndRelease { + private: + template ().AddRef())* = nullptr, + decltype(std::declval().Release())* = nullptr> + static int Test(int); + template + static char Test(...); + + public: + static constexpr bool value = std::is_same_v(0)), int>; +}; +} // namespace webrtc_make_ref_counted_internal + +// General utilities for constructing a reference counted class and the +// appropriate reference count implementation for that class. +// +// These utilities select either the `RefCountedObject` implementation or +// `FinalRefCountedObject` depending on whether the to-be-shared class is +// derived from the RefCountInterface interface or not (respectively). + +// `make_ref_counted`: +// +// Use this when you want to construct a reference counted object of type T and +// get a `scoped_refptr<>` back. Example: +// +// auto p = make_ref_counted("bar", 123); +// +// For a class that inherits from RefCountInterface, this is equivalent to: +// +// auto p = scoped_refptr(new RefCountedObject("bar", 123)); +// +// If the class does not inherit from RefCountInterface, but does have +// AddRef/Release methods (so a T* is convertible to rtc::scoped_refptr), this +// is equivalent to just +// +// auto p = scoped_refptr(new Foo("bar", 123)); +// +// Otherwise, the example is equivalent to: +// +// auto p = scoped_refptr>( +// new FinalRefCountedObject("bar", 123)); +// +// In these cases, `make_ref_counted` reduces the amount of boilerplate code but +// also helps with the most commonly intended usage of RefCountedObject whereby +// methods for reference counting, are virtual and designed to satisfy the need +// of an interface. When such a need does not exist, it is more efficient to use +// the `FinalRefCountedObject` template, which does not add the vtable overhead. +// +// Note that in some cases, using RefCountedObject directly may still be what's +// needed. + +// `make_ref_counted` for abstract classes that are convertible to +// RefCountInterface. The is_abstract requirement rejects classes that inherit +// both RefCountInterface and RefCounted object, which is a a discouraged +// pattern, and would result in double inheritance of RefCountedObject if this +// template was applied. +template < + typename T, + typename... Args, + typename std::enable_if && + std::is_abstract_v, + T>::type* = nullptr> +scoped_refptr make_ref_counted(Args&&... args) { + return scoped_refptr(new RefCountedObject(std::forward(args)...)); +} + +// `make_ref_counted` for complete classes that are not convertible to +// RefCountInterface and already carry a ref count. +template < + typename T, + typename... Args, + typename std::enable_if< + !std::is_convertible_v && + webrtc_make_ref_counted_internal::HasAddRefAndRelease::value, + T>::type* = nullptr> +scoped_refptr make_ref_counted(Args&&... args) { + return scoped_refptr(new T(std::forward(args)...)); +} + +// `make_ref_counted` for complete classes that are not convertible to +// RefCountInterface and have no ref count of their own. +template < + typename T, + typename... Args, + typename std::enable_if< + !std::is_convertible_v && + !webrtc_make_ref_counted_internal::HasAddRefAndRelease::value, + + T>::type* = nullptr> +scoped_refptr> make_ref_counted(Args&&... args) { + return scoped_refptr>( + new FinalRefCountedObject(std::forward(args)...)); +} + +} // namespace rtc + +#endif // API_MAKE_REF_COUNTED_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/media_stream_interface.h b/TMessagesProj/jni/voip/webrtc/api/media_stream_interface.h index d61dd98498..9d336739e4 100644 --- a/TMessagesProj/jni/voip/webrtc/api/media_stream_interface.h +++ b/TMessagesProj/jni/voip/webrtc/api/media_stream_interface.h @@ -333,10 +333,39 @@ class MediaStreamInterface : public rtc::RefCountInterface, virtual rtc::scoped_refptr FindVideoTrack( const std::string& track_id) = 0; - virtual bool AddTrack(AudioTrackInterface* track) = 0; - virtual bool AddTrack(VideoTrackInterface* track) = 0; - virtual bool RemoveTrack(AudioTrackInterface* track) = 0; - virtual bool RemoveTrack(VideoTrackInterface* track) = 0; + // Takes ownership of added tracks. + // Note: Default implementations are for avoiding link time errors in + // implementations that mock this API. + // TODO(bugs.webrtc.org/13980): Remove default implementations. + virtual bool AddTrack(rtc::scoped_refptr track) { + RTC_CHECK_NOTREACHED(); + } + virtual bool AddTrack(rtc::scoped_refptr track) { + RTC_CHECK_NOTREACHED(); + } + virtual bool RemoveTrack(rtc::scoped_refptr track) { + RTC_CHECK_NOTREACHED(); + } + virtual bool RemoveTrack(rtc::scoped_refptr track) { + RTC_CHECK_NOTREACHED(); + } + // Deprecated: Should use scoped_refptr versions rather than pointers. + [[deprecated("Pass a scoped_refptr")]] virtual bool AddTrack( + AudioTrackInterface* track) { + return AddTrack(rtc::scoped_refptr(track)); + } + [[deprecated("Pass a scoped_refptr")]] virtual bool AddTrack( + VideoTrackInterface* track) { + return AddTrack(rtc::scoped_refptr(track)); + } + [[deprecated("Pass a scoped_refptr")]] virtual bool RemoveTrack( + AudioTrackInterface* track) { + return RemoveTrack(rtc::scoped_refptr(track)); + } + [[deprecated("Pass a scoped_refptr")]] virtual bool RemoveTrack( + VideoTrackInterface* track) { + return RemoveTrack(rtc::scoped_refptr(track)); + } protected: ~MediaStreamInterface() override = default; diff --git a/TMessagesProj/jni/voip/webrtc/api/media_stream_track.h b/TMessagesProj/jni/voip/webrtc/api/media_stream_track.h index 738f034143..316dd788ef 100644 --- a/TMessagesProj/jni/voip/webrtc/api/media_stream_track.h +++ b/TMessagesProj/jni/voip/webrtc/api/media_stream_track.h @@ -13,6 +13,7 @@ #include +#include "absl/strings/string_view.h" #include "api/media_stream_interface.h" #include "api/notifier.h" @@ -41,7 +42,7 @@ class MediaStreamTrack : public Notifier { void set_ended() { set_state(MediaStreamTrackInterface::TrackState::kEnded); } protected: - explicit MediaStreamTrack(const std::string& id) + explicit MediaStreamTrack(absl::string_view id) : enabled_(true), id_(id), state_(MediaStreamTrackInterface::kLive) {} bool set_state(MediaStreamTrackInterface::TrackState new_state) { diff --git a/TMessagesProj/jni/voip/webrtc/api/metronome/metronome.h b/TMessagesProj/jni/voip/webrtc/api/metronome/metronome.h new file mode 100644 index 0000000000..fc5f350db2 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/metronome/metronome.h @@ -0,0 +1,65 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_METRONOME_METRONOME_H_ +#define API_METRONOME_METRONOME_H_ + +#include "api/task_queue/task_queue_base.h" +#include "api/units/time_delta.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// The Metronome posts OnTick() on task queues provided by its listeners' task +// queue periodically. The metronome can be used as an alternative to using +// PostDelayedTask on a thread or task queue for coalescing work and reducing +// the number of idle-wakeups. +// +// Listeners can be added and removed from any sequence, but it is illegal to +// remove a listener from an OnTick invocation. +// +// The metronome concept is still under experimentation, and may not be availble +// in all platforms or applications. See https://crbug.com/1253787 for more +// details. +// +// Metronome implementations must be thread-safe. +class RTC_EXPORT Metronome { + public: + class RTC_EXPORT TickListener { + public: + virtual ~TickListener() = default; + + // OnTick is run on the task queue provided by OnTickTaskQueue each time the + // metronome ticks. + virtual void OnTick() = 0; + + // The task queue that OnTick will run on. Must not be null. + virtual TaskQueueBase* OnTickTaskQueue() = 0; + }; + + virtual ~Metronome() = default; + + // Adds a tick listener to the metronome. Once this method has returned + // OnTick will be invoked on each metronome tick. A listener may + // only be added to the metronome once. + virtual void AddListener(TickListener* listener) = 0; + + // Removes the tick listener from the metronome. Once this method has returned + // OnTick will never be called again. This method must not be called from + // within OnTick. + virtual void RemoveListener(TickListener* listener) = 0; + + // Returns the current tick period of the metronome. + virtual TimeDelta TickPeriod() const = 0; +}; + +} // namespace webrtc + +#endif // API_METRONOME_METRONOME_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/neteq/neteq.h b/TMessagesProj/jni/voip/webrtc/api/neteq/neteq.h index 675742a1ce..ffc3958345 100644 --- a/TMessagesProj/jni/voip/webrtc/api/neteq/neteq.h +++ b/TMessagesProj/jni/voip/webrtc/api/neteq/neteq.h @@ -67,11 +67,13 @@ struct NetEqLifetimeStatistics { uint64_t jitter_buffer_delay_ms = 0; uint64_t jitter_buffer_emitted_count = 0; uint64_t jitter_buffer_target_delay_ms = 0; + uint64_t jitter_buffer_minimum_delay_ms = 0; uint64_t inserted_samples_for_deceleration = 0; uint64_t removed_samples_for_acceleration = 0; uint64_t silent_concealed_samples = 0; uint64_t fec_packets_received = 0; uint64_t fec_packets_discarded = 0; + uint64_t packets_discarded = 0; // Below stats are not part of the spec. uint64_t delayed_packet_outage_samples = 0; // This is sum of relative packet arrival delays of received packets so far. @@ -88,6 +90,8 @@ struct NetEqLifetimeStatistics { // these events. int32_t interruption_count = 0; int32_t total_interruption_duration_ms = 0; + // Total number of comfort noise samples generated during DTX. + uint64_t generated_noise_samples = 0; }; // Metrics that describe the operations performed in NetEq, and the internal @@ -100,8 +104,6 @@ struct NetEqOperationsAndState { uint64_t accelerate_samples = 0; // Count of the number of buffer flushes. uint64_t packet_buffer_flushes = 0; - // The number of primary packets that were discarded. - uint64_t discarded_primary_packets = 0; // The statistics below are not cumulative. // The waiting time of the last decoded packet. uint64_t last_waiting_time_ms = 0; @@ -311,12 +313,6 @@ class NetEq { virtual std::vector GetNackList( int64_t round_trip_time_ms) const = 0; - // Returns a vector containing the timestamps of the packets that were decoded - // in the last GetAudio call. If no packets were decoded in the last call, the - // vector is empty. - // Mainly intended for testing. - virtual std::vector LastDecodedTimestamps() const = 0; - // Returns the length of the audio yet to play in the sync buffer. // Mainly intended for testing. virtual int SyncBufferSizeMs() const = 0; diff --git a/TMessagesProj/jni/voip/webrtc/api/neteq/neteq_controller.h b/TMessagesProj/jni/voip/webrtc/api/neteq/neteq_controller.h index 2f203f4344..f0101d3d1a 100644 --- a/TMessagesProj/jni/voip/webrtc/api/neteq/neteq_controller.h +++ b/TMessagesProj/jni/voip/webrtc/api/neteq/neteq_controller.h @@ -163,6 +163,12 @@ class NetEqController { // Returns the target buffer level in ms. virtual int TargetLevelMs() const = 0; + // Returns the target buffer level in ms as it would be if no minimum or + // maximum delay was set. + // TODO(bugs.webrtc.org/14270): Make pure virtual once all implementations are + // updated. + virtual int UnlimitedTargetLevelMs() const { return 0; } + // Notify the NetEqController that a packet has arrived. Returns the relative // arrival delay, if it can be computed. virtual absl::optional PacketArrived(int fs_hz, @@ -170,7 +176,7 @@ class NetEqController { const PacketArrivedInfo& info) = 0; // Notify the NetEqController that we are currently in muted state. - // TODO(ivoc): Make pure virtual when downstream is updated. + // TODO(bugs.webrtc.org/14270): Make pure virtual when downstream is updated. virtual void NotifyMutedState() {} // Returns true if a peak was found. diff --git a/TMessagesProj/jni/voip/webrtc/api/notifier.h b/TMessagesProj/jni/voip/webrtc/api/notifier.h index c03b1049eb..fc2480e00a 100644 --- a/TMessagesProj/jni/voip/webrtc/api/notifier.h +++ b/TMessagesProj/jni/voip/webrtc/api/notifier.h @@ -14,7 +14,9 @@ #include #include "api/media_stream_interface.h" +#include "api/sequence_checker.h" #include "rtc_base/checks.h" +#include "rtc_base/system/no_unique_address.h" namespace webrtc { @@ -23,14 +25,16 @@ namespace webrtc { template class Notifier : public T { public: - Notifier() {} + Notifier() { sequence_checker_.Detach(); } virtual void RegisterObserver(ObserverInterface* observer) { + RTC_DCHECK_RUN_ON(&sequence_checker_); RTC_DCHECK(observer != nullptr); observers_.push_back(observer); } virtual void UnregisterObserver(ObserverInterface* observer) { + RTC_DCHECK_RUN_ON(&sequence_checker_); for (std::list::iterator it = observers_.begin(); it != observers_.end(); it++) { if (*it == observer) { @@ -41,6 +45,7 @@ class Notifier : public T { } void FireOnChanged() { + RTC_DCHECK_RUN_ON(&sequence_checker_); // Copy the list of observers to avoid a crash if the observer object // unregisters as a result of the OnChanged() call. If the same list is used // UnregisterObserver will affect the list make the iterator invalid. @@ -52,7 +57,10 @@ class Notifier : public T { } protected: - std::list observers_; + std::list observers_ RTC_GUARDED_BY(sequence_checker_); + + private: + RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/numerics/samples_stats_counter.cc b/TMessagesProj/jni/voip/webrtc/api/numerics/samples_stats_counter.cc index 36871a6713..4eb0cde299 100644 --- a/TMessagesProj/jni/voip/webrtc/api/numerics/samples_stats_counter.cc +++ b/TMessagesProj/jni/voip/webrtc/api/numerics/samples_stats_counter.cc @@ -19,6 +19,10 @@ namespace webrtc { SamplesStatsCounter::SamplesStatsCounter() = default; +SamplesStatsCounter::SamplesStatsCounter(size_t expected_samples_count) { + samples_.reserve(expected_samples_count); +} + SamplesStatsCounter::~SamplesStatsCounter() = default; SamplesStatsCounter::SamplesStatsCounter(const SamplesStatsCounter&) = default; SamplesStatsCounter& SamplesStatsCounter::operator=( diff --git a/TMessagesProj/jni/voip/webrtc/api/numerics/samples_stats_counter.h b/TMessagesProj/jni/voip/webrtc/api/numerics/samples_stats_counter.h index 16d5d2a891..9d72296317 100644 --- a/TMessagesProj/jni/voip/webrtc/api/numerics/samples_stats_counter.h +++ b/TMessagesProj/jni/voip/webrtc/api/numerics/samples_stats_counter.h @@ -11,6 +11,8 @@ #ifndef API_NUMERICS_SAMPLES_STATS_COUNTER_H_ #define API_NUMERICS_SAMPLES_STATS_COUNTER_H_ +#include +#include #include #include "api/array_view.h" @@ -27,9 +29,12 @@ class SamplesStatsCounter { struct StatsSample { double value; Timestamp time; + // Sample's specific metadata. + std::map metadata; }; SamplesStatsCounter(); + explicit SamplesStatsCounter(size_t expected_samples_count); ~SamplesStatsCounter(); SamplesStatsCounter(const SamplesStatsCounter&); SamplesStatsCounter& operator=(const SamplesStatsCounter&); diff --git a/TMessagesProj/jni/voip/webrtc/api/peer_connection_interface.cc b/TMessagesProj/jni/voip/webrtc/api/peer_connection_interface.cc index 230731c42d..d01d58d32b 100644 --- a/TMessagesProj/jni/voip/webrtc/api/peer_connection_interface.cc +++ b/TMessagesProj/jni/voip/webrtc/api/peer_connection_interface.cc @@ -41,17 +41,6 @@ PeerConnectionInterface::RTCConfiguration::RTCConfiguration( PeerConnectionInterface::RTCConfiguration::~RTCConfiguration() = default; -RTCError PeerConnectionInterface::RemoveTrackNew( - rtc::scoped_refptr sender) { - return RTCError(RemoveTrack(sender) ? RTCErrorType::NONE - : RTCErrorType::INTERNAL_ERROR); -} - -RTCError PeerConnectionInterface::SetConfiguration( - const PeerConnectionInterface::RTCConfiguration& config) { - return RTCError(); -} - PeerConnectionDependencies::PeerConnectionDependencies( PeerConnectionObserver* observer_in) : observer(observer_in) {} diff --git a/TMessagesProj/jni/voip/webrtc/api/peer_connection_interface.h b/TMessagesProj/jni/voip/webrtc/api/peer_connection_interface.h index 6d42b848b6..55300a58f1 100644 --- a/TMessagesProj/jni/voip/webrtc/api/peer_connection_interface.h +++ b/TMessagesProj/jni/voip/webrtc/api/peer_connection_interface.h @@ -91,10 +91,12 @@ #include "api/data_channel_interface.h" #include "api/dtls_transport_interface.h" #include "api/fec_controller.h" +#include "api/field_trials_view.h" #include "api/ice_transport_interface.h" #include "api/jsep.h" #include "api/media_stream_interface.h" #include "api/media_types.h" +#include "api/metronome/metronome.h" #include "api/neteq/neteq_factory.h" #include "api/network_state_predictor.h" #include "api/packet_socket_factory.h" @@ -116,7 +118,6 @@ #include "api/transport/enums.h" #include "api/transport/network_control.h" #include "api/transport/sctp_transport_factory_interface.h" -#include "api/transport/webrtc_key_value_config.h" #include "api/turn_customizer.h" #include "api/video/video_bitrate_allocator_factory.h" #include "call/rtp_transport_controller_send_factory_interface.h" @@ -124,9 +125,8 @@ #include "media/base/media_engine.h" // TODO(bugs.webrtc.org/7447): We plan to provide a way to let applications // inject a PacketSocketFactory and/or NetworkManager, and not expose -// PortAllocator in the PeerConnection api. This will let us remove nogncheck. -#include "p2p/base/port.h" // nogncheck -#include "p2p/base/port_allocator.h" // nogncheck +// PortAllocator in the PeerConnection api. +#include "p2p/base/port_allocator.h" #include "rtc_base/network.h" #include "rtc_base/network_constants.h" #include "rtc_base/network_monitor_factory.h" @@ -169,9 +169,10 @@ class StatsObserver : public rtc::RefCountInterface { }; enum class SdpSemantics { + // TODO(https://crbug.com/webrtc/13528): Remove support for kPlanB. kPlanB_DEPRECATED, kPlanB [[deprecated]] = kPlanB_DEPRECATED, - kUnifiedPlan + kUnifiedPlan, }; class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { @@ -426,8 +427,7 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { ////////////////////////////////////////////////////////////////////////// // If set to true, don't gather IPv6 ICE candidates. - // TODO(deadbeef): Remove this? IPv6 support has long stopped being - // experimental + // TODO(https://crbug.com/webrtc/14608): Delete this flag. bool disable_ipv6 = false; // If set to true, don't gather IPv6 ICE candidates on Wi-Fi. @@ -455,11 +455,14 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // Use new combined audio/video bandwidth estimation? absl::optional combined_audio_video_bwe; +#if defined(WEBRTC_FUCHSIA) + // TODO(bugs.webrtc.org/11066): Remove entirely once Fuchsia does not use. // TODO(bugs.webrtc.org/9891) - Move to crypto_options // Can be used to disable DTLS-SRTP. This should never be done, but can be // useful for testing purposes, for example in setting up a loopback call // with a single PeerConnection. absl::optional enable_dtls_srtp; +#endif ///////////////////////////////////////////////// // The below fields are not part of the standard. @@ -486,10 +489,6 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // The minimum delay in milliseconds for the audio jitter buffer. int audio_jitter_buffer_min_delay_ms = 0; - // Whether the audio jitter buffer adapts the delay to retransmitted - // packets. - bool audio_jitter_buffer_enable_rtx_handling = false; - // Timeout in milliseconds before an ICE candidate pair is considered to be // "not receiving", after which a lower priority candidate pair may be // selected. @@ -622,27 +621,26 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // cost. absl::optional network_preference; - // Configure the SDP semantics used by this PeerConnection. Note that the - // WebRTC 1.0 specification requires kUnifiedPlan semantics. The - // RtpTransceiver API is only available with kUnifiedPlan semantics. + // Configure the SDP semantics used by this PeerConnection. By default, this + // is Unified Plan which is compliant to the WebRTC 1.0 specification. It is + // possible to overrwite this to the deprecated Plan B SDP format, but note + // that kPlanB will be deleted at some future date, see + // https://crbug.com/webrtc/13528. // - // kUnifiedPlan will cause PeerConnection to create offers and answers with - // multiple m= sections where each m= section maps to one RtpSender and one - // RtpReceiver (an RtpTransceiver), either both audio or both video. This - // will also cause PeerConnection to ignore all but the first a=ssrc lines - // that form a Plan B stream. + // kUnifiedPlan will cause the PeerConnection to create offers and answers + // with multiple m= sections where each m= section maps to one RtpSender and + // one RtpReceiver (an RtpTransceiver), either both audio or both video. + // This will also cause the PeerConnection to ignore all but the first + // a=ssrc lines that form a Plan B streams (if the PeerConnection is given + // Plan B SDP to process). // - // kPlanB will cause PeerConnection to create offers and answers with at + // kPlanB will cause the PeerConnection to create offers and answers with at // most one audio and one video m= section with multiple RtpSenders and // RtpReceivers specified as multiple a=ssrc lines within the section. This // will also cause PeerConnection to ignore all but the first m= section of - // the same media type. - // - // For users who have to interwork with legacy WebRTC implementations, - // it is possible to specify kPlanB until the code is finally removed. - // - // For all other users, specify kUnifiedPlan. - SdpSemantics sdp_semantics = SdpSemantics::kPlanB_DEPRECATED; + // the same media type (if the PeerConnection is given Unified Plan SDP to + // process). + SdpSemantics sdp_semantics = SdpSemantics::kUnifiedPlan; // TODO(bugs.webrtc.org/9891) - Move to crypto_options or remove. // Actively reset the SRTP parameters whenever the DTLS transports @@ -805,23 +803,35 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { rtc::scoped_refptr track, const std::vector& stream_ids) = 0; - // Remove an RtpSender from this PeerConnection. - // Returns true on success. - // TODO(steveanton): Replace with signature that returns RTCError. - virtual bool RemoveTrack(RtpSenderInterface* sender) = 0; + // Add a new MediaStreamTrack as above, but with an additional parameter, + // `init_send_encodings` : initial RtpEncodingParameters for RtpSender, + // similar to init_send_encodings in RtpTransceiverInit. + // Note that a new transceiver will always be created. + // + virtual RTCErrorOr> AddTrack( + rtc::scoped_refptr track, + const std::vector& stream_ids, + const std::vector& init_send_encodings) = 0; - // Plan B semantics: Removes the RtpSender from this PeerConnection. - // Unified Plan semantics: Stop sending on the RtpSender and mark the + // Removes the connection between a MediaStreamTrack and the PeerConnection. + // Stops sending on the RtpSender and marks the // corresponding RtpTransceiver direction as no longer sending. + // https://w3c.github.io/webrtc-pc/#dom-rtcpeerconnection-removetrack // // Errors: // - INVALID_PARAMETER: `sender` is null or (Plan B only) the sender is not // associated with this PeerConnection. // - INVALID_STATE: PeerConnection is closed. + // + // Plan B semantics: Removes the RtpSender from this PeerConnection. + // // TODO(bugs.webrtc.org/9534): Rename to RemoveTrack once the other signature - // is removed. - virtual RTCError RemoveTrackNew( - rtc::scoped_refptr sender); + // is removed; remove default implementation once upstream is updated. + virtual RTCError RemoveTrackOrError( + rtc::scoped_refptr sender) { + RTC_CHECK_NOTREACHED(); + return RTCError(); + } // AddTransceiver creates a new RtpTransceiver and adds it to the set of // transceivers. Adding a transceiver will cause future calls to CreateOffer @@ -949,8 +959,6 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { rtc::scoped_refptr selector, rtc::scoped_refptr callback) = 0; // Clear cached stats in the RTCStatsCollector. - // Exposed for testing while waiting for automatic cache clear to work. - // https://bugs.webrtc.org/8693 virtual void ClearStatsCache() {} // Create a data channel with the provided config, or default config if none @@ -1105,11 +1113,8 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // - SYNTAX_ERROR if parsing an ICE server URL failed. // - INVALID_PARAMETER if a TURN server is missing `username` or `password`. // - INTERNAL_ERROR if an unexpected error occurred. - // - // TODO(nisse): Make this pure virtual once all Chrome subclasses of - // PeerConnectionInterface implement it. virtual RTCError SetConfiguration( - const PeerConnectionInterface::RTCConfiguration& config); + const PeerConnectionInterface::RTCConfiguration& config) = 0; // Provides a remote candidate to the ICE Agent. // A copy of the `candidate` will be created and added to the remote @@ -1295,14 +1300,6 @@ class PeerConnectionObserver { // A new ICE candidate has been gathered. virtual void OnIceCandidate(const IceCandidateInterface* candidate) = 0; - // Gathering of an ICE candidate failed. - // See https://w3c.github.io/webrtc-pc/#event-icecandidateerror - // `host_candidate` is a stringified socket address. - virtual void OnIceCandidateError(const std::string& host_candidate, - const std::string& url, - int error_code, - const std::string& error_text) {} - // Gathering of an ICE candidate failed. // See https://w3c.github.io/webrtc-pc/#event-icecandidateerror virtual void OnIceCandidateError(const std::string& address, @@ -1386,10 +1383,9 @@ struct RTC_EXPORT PeerConnectionDependencies final { PeerConnectionObserver* observer = nullptr; // Optional dependencies // TODO(bugs.webrtc.org/7447): remove port allocator once downstream is - // updated. For now, you can only set one of allocator and - // packet_socket_factory, not both. + // updated. The recommended way to inject networking components is to pass a + // PacketSocketFactory when creating the PeerConnectionFactory. std::unique_ptr allocator; - std::unique_ptr packet_socket_factory; // Factory for creating resolvers that look up hostnames in DNS std::unique_ptr async_dns_resolver_factory; @@ -1400,6 +1396,9 @@ struct RTC_EXPORT PeerConnectionDependencies final { std::unique_ptr tls_cert_verifier; std::unique_ptr video_bitrate_allocator_factory; + // Optional field trials to use. + // Overrides those from PeerConnectionFactoryDependencies. + std::unique_ptr trials; }; // PeerConnectionFactoryDependencies holds all of the PeerConnectionFactory @@ -1425,6 +1424,10 @@ struct RTC_EXPORT PeerConnectionFactoryDependencies final { rtc::Thread* network_thread = nullptr; rtc::Thread* worker_thread = nullptr; rtc::Thread* signaling_thread = nullptr; + rtc::SocketFactory* socket_factory = nullptr; + // The `packet_socket_factory` will only be used if CreatePeerConnection is + // called without a `port_allocator`. + std::unique_ptr packet_socket_factory; std::unique_ptr task_queue_factory; std::unique_ptr media_engine; std::unique_ptr call_factory; @@ -1433,15 +1436,19 @@ struct RTC_EXPORT PeerConnectionFactoryDependencies final { std::unique_ptr network_state_predictor_factory; std::unique_ptr network_controller_factory; - // This will only be used if CreatePeerConnection is called without a - // `port_allocator`, causing the default allocator and network manager to be - // used. + // The `network_manager` will only be used if CreatePeerConnection is called + // without a `port_allocator`, causing the default allocator and network + // manager to be used. + std::unique_ptr network_manager; + // The `network_monitor_factory` will only be used if CreatePeerConnection is + // called without a `port_allocator`, and the above `network_manager' is null. std::unique_ptr network_monitor_factory; std::unique_ptr neteq_factory; std::unique_ptr sctp_factory; - std::unique_ptr trials; + std::unique_ptr trials; std::unique_ptr transport_controller_send_factory; + std::unique_ptr metronome; }; // PeerConnectionFactoryInterface is the factory interface used for creating @@ -1615,7 +1622,8 @@ inline constexpr absl::string_view PeerConnectionInterface::AsString( case SignalingState::kClosed: return "closed"; } - RTC_CHECK_NOTREACHED(); + // This cannot happen. + // Not using "RTC_CHECK_NOTREACHED()" because AsString() is constexpr. return ""; } @@ -1630,7 +1638,8 @@ inline constexpr absl::string_view PeerConnectionInterface::AsString( case IceGatheringState::kIceGatheringComplete: return "complete"; } - RTC_CHECK_NOTREACHED(); + // This cannot happen. + // Not using "RTC_CHECK_NOTREACHED()" because AsString() is constexpr. return ""; } @@ -1651,7 +1660,8 @@ inline constexpr absl::string_view PeerConnectionInterface::AsString( case PeerConnectionState::kClosed: return "closed"; } - RTC_CHECK_NOTREACHED(); + // This cannot happen. + // Not using "RTC_CHECK_NOTREACHED()" because AsString() is constexpr. return ""; } @@ -1673,10 +1683,12 @@ inline constexpr absl::string_view PeerConnectionInterface::AsString( case kIceConnectionClosed: return "closed"; case kIceConnectionMax: - RTC_CHECK_NOTREACHED(); + // This cannot happen. + // Not using "RTC_CHECK_NOTREACHED()" because AsString() is constexpr. return ""; } - RTC_CHECK_NOTREACHED(); + // This cannot happen. + // Not using "RTC_CHECK_NOTREACHED()" because AsString() is constexpr. return ""; } diff --git a/TMessagesProj/jni/voip/webrtc/api/ref_counted_base.h b/TMessagesProj/jni/voip/webrtc/api/ref_counted_base.h index 931cb20762..f20228b740 100644 --- a/TMessagesProj/jni/voip/webrtc/api/ref_counted_base.h +++ b/TMessagesProj/jni/voip/webrtc/api/ref_counted_base.h @@ -12,7 +12,6 @@ #include -#include "rtc_base/constructor_magic.h" #include "rtc_base/ref_counter.h" namespace rtc { @@ -21,6 +20,9 @@ class RefCountedBase { public: RefCountedBase() = default; + RefCountedBase(const RefCountedBase&) = delete; + RefCountedBase& operator=(const RefCountedBase&) = delete; + void AddRef() const { ref_count_.IncRef(); } RefCountReleaseStatus Release() const { const auto status = ref_count_.DecRef(); @@ -39,8 +41,6 @@ class RefCountedBase { private: mutable webrtc::webrtc_impl::RefCounter ref_count_{0}; - - RTC_DISALLOW_COPY_AND_ASSIGN(RefCountedBase); }; // Template based version of `RefCountedBase` for simple implementations that do @@ -61,6 +61,9 @@ class RefCountedNonVirtual { public: RefCountedNonVirtual() = default; + RefCountedNonVirtual(const RefCountedNonVirtual&) = delete; + RefCountedNonVirtual& operator=(const RefCountedNonVirtual&) = delete; + void AddRef() const { ref_count_.IncRef(); } RefCountReleaseStatus Release() const { // If you run into this assert, T has virtual methods. There are two @@ -88,8 +91,6 @@ class RefCountedNonVirtual { private: mutable webrtc::webrtc_impl::RefCounter ref_count_{0}; - - RTC_DISALLOW_COPY_AND_ASSIGN(RefCountedNonVirtual); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/api/rtc_error.h b/TMessagesProj/jni/voip/webrtc/api/rtc_error.h index 1376793a0b..42ceed18d9 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtc_error.h +++ b/TMessagesProj/jni/voip/webrtc/api/rtc_error.h @@ -244,7 +244,7 @@ class RTCErrorOr { // // REQUIRES: !error.ok(). This requirement is DCHECKed. RTCErrorOr(RTCError&& error) : error_(std::move(error)) { // NOLINT - RTC_DCHECK(!error.ok()); + RTC_DCHECK(!error_.ok()); } // Constructs a new RTCErrorOr with the given value. After calling this diff --git a/TMessagesProj/jni/voip/webrtc/api/rtc_event_log/rtc_event.h b/TMessagesProj/jni/voip/webrtc/api/rtc_event_log/rtc_event.h index 51db8f0b4d..8697a25a74 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtc_event_log/rtc_event.h +++ b/TMessagesProj/jni/voip/webrtc/api/rtc_event_log/rtc_event.h @@ -27,7 +27,7 @@ class RtcEvent { // of Type. This leaks the information of existing subclasses into the // superclass, but the *actual* information - rtclog::StreamConfig, etc. - // is kept separate. - enum class Type { + enum class Type : uint32_t { AlrStateEvent, RouteChangeEvent, RemoteEstimateEvent, @@ -53,7 +53,9 @@ class RtcEvent { GenericPacketSent, GenericPacketReceived, GenericAckReceived, - FrameDecoded + FrameDecoded, + BeginV3Log = 0x2501580, + EndV3Log = 0x2501581 }; RtcEvent(); @@ -63,6 +65,13 @@ class RtcEvent { virtual bool IsConfigEvent() const = 0; + // Events are grouped by Type before being encoded. + // Optionally, `GetGroupKey` can be overloaded to group the + // events by a secondary key (in addition to the event type.) + // This can, in some cases, improve compression efficiency + // e.g. by grouping events by SSRC. + virtual uint32_t GetGroupKey() const { return 0; } + int64_t timestamp_ms() const { return timestamp_us_ / 1000; } int64_t timestamp_us() const { return timestamp_us_; } diff --git a/TMessagesProj/jni/voip/webrtc/api/rtc_event_log/rtc_event_log.h b/TMessagesProj/jni/voip/webrtc/api/rtc_event_log/rtc_event_log.h index 86613ddd85..7b42cdc028 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtc_event_log/rtc_event_log.h +++ b/TMessagesProj/jni/voip/webrtc/api/rtc_event_log/rtc_event_log.h @@ -29,7 +29,7 @@ class RtcEventLog { // TODO(eladalon): Get rid of the legacy encoding and this enum once all // clients have migrated to the new format. - enum class EncodingType { Legacy, NewFormat }; + enum class EncodingType { Legacy, NewFormat, ProtoFree }; virtual ~RtcEventLog() = default; diff --git a/TMessagesProj/jni/voip/webrtc/api/rtc_event_log/rtc_event_log_factory.cc b/TMessagesProj/jni/voip/webrtc/api/rtc_event_log/rtc_event_log_factory.cc index fdf267b7ba..38a0e4ebd2 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtc_event_log/rtc_event_log_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/api/rtc_event_log/rtc_event_log_factory.cc @@ -27,8 +27,8 @@ RtcEventLogFactory::RtcEventLogFactory(TaskQueueFactory* task_queue_factory) RTC_DCHECK(task_queue_factory_); } -std::unique_ptr RtcEventLogFactory::CreateRtcEventLog( - RtcEventLog::EncodingType encoding_type) { +std::unique_ptr RtcEventLogFactory::Create( + RtcEventLog::EncodingType encoding_type) const { #ifdef WEBRTC_ENABLE_RTC_EVENT_LOG if (field_trial::IsEnabled("WebRTC-RtcEventLogKillSwitch")) { return std::make_unique(); @@ -39,4 +39,9 @@ std::unique_ptr RtcEventLogFactory::CreateRtcEventLog( #endif } +std::unique_ptr RtcEventLogFactory::CreateRtcEventLog( + RtcEventLog::EncodingType encoding_type) { + return Create(encoding_type); +} + } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/rtc_event_log/rtc_event_log_factory.h b/TMessagesProj/jni/voip/webrtc/api/rtc_event_log/rtc_event_log_factory.h index 06cc074d20..fd1db3c728 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtc_event_log/rtc_event_log_factory.h +++ b/TMessagesProj/jni/voip/webrtc/api/rtc_event_log/rtc_event_log_factory.h @@ -25,6 +25,8 @@ class RTC_EXPORT RtcEventLogFactory : public RtcEventLogFactoryInterface { explicit RtcEventLogFactory(TaskQueueFactory* task_queue_factory); ~RtcEventLogFactory() override {} + std::unique_ptr Create( + RtcEventLog::EncodingType encoding_type) const override; std::unique_ptr CreateRtcEventLog( RtcEventLog::EncodingType encoding_type) override; diff --git a/TMessagesProj/jni/voip/webrtc/api/rtc_event_log/rtc_event_log_factory_interface.h b/TMessagesProj/jni/voip/webrtc/api/rtc_event_log/rtc_event_log_factory_interface.h index acc5bcb038..a6f4dee92f 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtc_event_log/rtc_event_log_factory_interface.h +++ b/TMessagesProj/jni/voip/webrtc/api/rtc_event_log/rtc_event_log_factory_interface.h @@ -24,7 +24,9 @@ class RtcEventLogFactoryInterface { public: virtual ~RtcEventLogFactoryInterface() = default; - virtual std::unique_ptr CreateRtcEventLog( + virtual std::unique_ptr Create( + RtcEventLog::EncodingType encoding_type) const = 0; + [[deprecated]] virtual std::unique_ptr CreateRtcEventLog( RtcEventLog::EncodingType encoding_type) = 0; }; diff --git a/TMessagesProj/jni/voip/webrtc/api/rtc_event_log_output.h b/TMessagesProj/jni/voip/webrtc/api/rtc_event_log_output.h index cd16b27501..f1f84a5f3a 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtc_event_log_output.h +++ b/TMessagesProj/jni/voip/webrtc/api/rtc_event_log_output.h @@ -13,6 +13,8 @@ #include +#include "absl/strings/string_view.h" + namespace webrtc { // NOTE: This class is still under development and may change without notice. @@ -31,7 +33,7 @@ class RtcEventLogOutput { // about how much data was written, if any. The output sink becomes inactive // after the first time `false` is returned. Write() may not be called on // an inactive output sink. - virtual bool Write(const std::string& output) = 0; + virtual bool Write(absl::string_view output) = 0; // Indicates that buffers should be written to disk if applicable. virtual void Flush() {} diff --git a/TMessagesProj/jni/voip/webrtc/api/rtc_event_log_output_file.cc b/TMessagesProj/jni/voip/webrtc/api/rtc_event_log_output_file.cc index 2e31c2df66..e1d4c7c711 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtc_event_log_output_file.cc +++ b/TMessagesProj/jni/voip/webrtc/api/rtc_event_log_output_file.cc @@ -54,15 +54,15 @@ bool RtcEventLogOutputFile::IsActive() const { return IsActiveInternal(); } -bool RtcEventLogOutputFile::Write(const std::string& output) { +bool RtcEventLogOutputFile::Write(absl::string_view output) { RTC_DCHECK(IsActiveInternal()); // No single write may be so big, that it would risk overflowing the // calculation of (written_bytes_ + output.length()). - RTC_DCHECK_LT(output.length(), kMaxReasonableFileSize); + RTC_DCHECK_LT(output.size(), kMaxReasonableFileSize); if (max_size_bytes_ == RtcEventLog::kUnlimitedOutput || - written_bytes_ + output.length() <= max_size_bytes_) { - if (file_.Write(output.c_str(), output.size())) { + written_bytes_ + output.size() <= max_size_bytes_) { + if (file_.Write(output.data(), output.size())) { written_bytes_ += output.size(); return true; } else { diff --git a/TMessagesProj/jni/voip/webrtc/api/rtc_event_log_output_file.h b/TMessagesProj/jni/voip/webrtc/api/rtc_event_log_output_file.h index d2901be1d0..c9ae0a8ede 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtc_event_log_output_file.h +++ b/TMessagesProj/jni/voip/webrtc/api/rtc_event_log_output_file.h @@ -37,7 +37,7 @@ class RtcEventLogOutputFile final : public RtcEventLogOutput { bool IsActive() const override; - bool Write(const std::string& output) override; + bool Write(absl::string_view output) override; private: RtcEventLogOutputFile(FileWrapper file, size_t max_size_bytes); diff --git a/TMessagesProj/jni/voip/webrtc/api/rtp_headers.h b/TMessagesProj/jni/voip/webrtc/api/rtp_headers.h index cf3d909499..a640eb7d38 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtp_headers.h +++ b/TMessagesProj/jni/voip/webrtc/api/rtp_headers.h @@ -103,15 +103,6 @@ struct RTPHeaderExtension { (1 << kAbsSendTimeFraction)); } - TimeDelta GetAbsoluteSendTimeDelta(uint32_t previous_sendtime) const { - RTC_DCHECK(hasAbsoluteSendTime); - RTC_DCHECK(absoluteSendTime < (1ul << 24)); - RTC_DCHECK(previous_sendtime < (1ul << 24)); - int32_t delta = - static_cast((absoluteSendTime - previous_sendtime) << 8) >> 8; - return TimeDelta::Micros((delta * 1000000ll) / (1 << kAbsSendTimeFraction)); - } - bool hasTransmissionTimeOffset; int32_t transmissionTimeOffset; bool hasAbsoluteSendTime; diff --git a/TMessagesProj/jni/voip/webrtc/api/rtp_packet_info.cc b/TMessagesProj/jni/voip/webrtc/api/rtp_packet_info.cc index db818f7657..cba274ec38 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtp_packet_info.cc +++ b/TMessagesProj/jni/voip/webrtc/api/rtp_packet_info.cc @@ -18,18 +18,13 @@ namespace webrtc { RtpPacketInfo::RtpPacketInfo() : ssrc_(0), rtp_timestamp_(0), receive_time_(Timestamp::MinusInfinity()) {} -RtpPacketInfo::RtpPacketInfo( - uint32_t ssrc, - std::vector csrcs, - uint32_t rtp_timestamp, - absl::optional audio_level, - absl::optional absolute_capture_time, - Timestamp receive_time) +RtpPacketInfo::RtpPacketInfo(uint32_t ssrc, + std::vector csrcs, + uint32_t rtp_timestamp, + Timestamp receive_time) : ssrc_(ssrc), csrcs_(std::move(csrcs)), rtp_timestamp_(rtp_timestamp), - audio_level_(audio_level), - absolute_capture_time_(absolute_capture_time), receive_time_(receive_time) {} RtpPacketInfo::RtpPacketInfo(const RTPHeader& rtp_header, @@ -49,31 +44,13 @@ RtpPacketInfo::RtpPacketInfo(const RTPHeader& rtp_header, absolute_capture_time_ = extension.absolute_capture_time; } -RtpPacketInfo::RtpPacketInfo( - uint32_t ssrc, - std::vector csrcs, - uint32_t rtp_timestamp, - absl::optional audio_level, - absl::optional absolute_capture_time, - int64_t receive_time_ms) - : RtpPacketInfo(ssrc, - csrcs, - rtp_timestamp, - audio_level, - absolute_capture_time, - Timestamp::Millis(receive_time_ms)) {} -RtpPacketInfo::RtpPacketInfo(const RTPHeader& rtp_header, - int64_t receive_time_ms) - : RtpPacketInfo(rtp_header, Timestamp::Millis(receive_time_ms)) {} - bool operator==(const RtpPacketInfo& lhs, const RtpPacketInfo& rhs) { return (lhs.ssrc() == rhs.ssrc()) && (lhs.csrcs() == rhs.csrcs()) && (lhs.rtp_timestamp() == rhs.rtp_timestamp()) && + (lhs.receive_time() == rhs.receive_time()) && (lhs.audio_level() == rhs.audio_level()) && (lhs.absolute_capture_time() == rhs.absolute_capture_time()) && - (lhs.receive_time() == rhs.receive_time() && - (lhs.local_capture_clock_offset() == - rhs.local_capture_clock_offset())); + (lhs.local_capture_clock_offset() == rhs.local_capture_clock_offset()); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/rtp_packet_info.h b/TMessagesProj/jni/voip/webrtc/api/rtp_packet_info.h index bc9839f479..8df12a36cf 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtp_packet_info.h +++ b/TMessagesProj/jni/voip/webrtc/api/rtp_packet_info.h @@ -17,6 +17,7 @@ #include "absl/types/optional.h" #include "api/rtp_headers.h" +#include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "rtc_base/system/rtc_export.h" @@ -34,22 +35,10 @@ class RTC_EXPORT RtpPacketInfo { RtpPacketInfo(uint32_t ssrc, std::vector csrcs, uint32_t rtp_timestamp, - absl::optional audio_level, - absl::optional absolute_capture_time, Timestamp receive_time); RtpPacketInfo(const RTPHeader& rtp_header, Timestamp receive_time); - // TODO(bugs.webrtc.org/12722): Deprecated, remove once downstream projects - // are updated. - RtpPacketInfo(uint32_t ssrc, - std::vector csrcs, - uint32_t rtp_timestamp, - absl::optional audio_level, - absl::optional absolute_capture_time, - int64_t receive_time_ms); - RtpPacketInfo(const RTPHeader& rtp_header, int64_t receive_time_ms); - RtpPacketInfo(const RtpPacketInfo& other) = default; RtpPacketInfo(RtpPacketInfo&& other) = default; RtpPacketInfo& operator=(const RtpPacketInfo& other) = default; @@ -64,31 +53,33 @@ class RTC_EXPORT RtpPacketInfo { uint32_t rtp_timestamp() const { return rtp_timestamp_; } void set_rtp_timestamp(uint32_t value) { rtp_timestamp_ = value; } + Timestamp receive_time() const { return receive_time_; } + void set_receive_time(Timestamp value) { receive_time_ = value; } + absl::optional audio_level() const { return audio_level_; } - void set_audio_level(absl::optional value) { audio_level_ = value; } + RtpPacketInfo& set_audio_level(absl::optional value) { + audio_level_ = value; + return *this; + } const absl::optional& absolute_capture_time() const { return absolute_capture_time_; } - void set_absolute_capture_time( + RtpPacketInfo& set_absolute_capture_time( const absl::optional& value) { absolute_capture_time_ = value; + return *this; } - const absl::optional& local_capture_clock_offset() const { + const absl::optional& local_capture_clock_offset() const { return local_capture_clock_offset_; } - - void set_local_capture_clock_offset(const absl::optional& value) { + RtpPacketInfo& set_local_capture_clock_offset( + absl::optional value) { local_capture_clock_offset_ = value; + return *this; } - Timestamp receive_time() const { return receive_time_; } - void set_receive_time(Timestamp value) { receive_time_ = value; } - // TODO(bugs.webrtc.org/12722): Deprecated, remove once downstream projects - // are updated. - int64_t receive_time_ms() const { return receive_time_.ms(); } - private: // Fields from the RTP header: // https://tools.ietf.org/html/rfc3550#section-5.1 @@ -96,25 +87,23 @@ class RTC_EXPORT RtpPacketInfo { std::vector csrcs_; uint32_t rtp_timestamp_; + // Local `webrtc::Clock`-based timestamp of when the packet was received. + Timestamp receive_time_; + // Fields from the Audio Level header extension: // https://tools.ietf.org/html/rfc6464#section-3 absl::optional audio_level_; // Fields from the Absolute Capture Time header extension: // http://www.webrtc.org/experiments/rtp-hdrext/abs-capture-time - // To not be confused with `local_capture_clock_offset_`, the - // `estimated_capture_clock_offset` in `absolute_capture_time_` should - // represent the clock offset between a remote sender and the capturer, and - // thus equals to the corresponding values in the received RTP packets, - // subjected to possible interpolations. absl::optional absolute_capture_time_; - // Clock offset against capturer's clock. Should be derived from the estimated - // capture clock offset defined in the Absolute Capture Time header extension. - absl::optional local_capture_clock_offset_; - - // Local `webrtc::Clock`-based timestamp of when the packet was received. - Timestamp receive_time_; + // Clock offset between the local clock and the capturer's clock. + // Do not confuse with `AbsoluteCaptureTime::estimated_capture_clock_offset` + // which instead represents the clock offset between a remote sender and the + // capturer. The following holds: + // Capture's NTP Clock = Local NTP Clock + Local-Capture Clock Offset + absl::optional local_capture_clock_offset_; }; bool operator==(const RtpPacketInfo& lhs, const RtpPacketInfo& rhs); diff --git a/TMessagesProj/jni/voip/webrtc/api/rtp_packet_infos.h b/TMessagesProj/jni/voip/webrtc/api/rtp_packet_infos.h index 2ca3174037..7445729fbb 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtp_packet_infos.h +++ b/TMessagesProj/jni/voip/webrtc/api/rtp_packet_infos.h @@ -15,6 +15,7 @@ #include #include +#include "api/make_ref_counted.h" #include "api/ref_counted_base.h" #include "api/rtp_packet_info.h" #include "api/scoped_refptr.h" @@ -79,7 +80,7 @@ class RTC_EXPORT RtpPacketInfos { size_type size() const { return entries().size(); } private: - class Data : public rtc::RefCountedBase { + class Data final : public rtc::RefCountedNonVirtual { public: static rtc::scoped_refptr Create(const vector_type& entries) { // Performance optimization for the empty case. @@ -87,7 +88,7 @@ class RTC_EXPORT RtpPacketInfos { return nullptr; } - return new Data(entries); + return rtc::make_ref_counted(entries); } static rtc::scoped_refptr Create(vector_type&& entries) { @@ -96,16 +97,16 @@ class RTC_EXPORT RtpPacketInfos { return nullptr; } - return new Data(std::move(entries)); + return rtc::make_ref_counted(std::move(entries)); } const vector_type& entries() const { return entries_; } - private: explicit Data(const vector_type& entries) : entries_(entries) {} explicit Data(vector_type&& entries) : entries_(std::move(entries)) {} - ~Data() override {} + ~Data() = default; + private: const vector_type entries_; }; diff --git a/TMessagesProj/jni/voip/webrtc/api/rtp_parameters.cc b/TMessagesProj/jni/voip/webrtc/api/rtp_parameters.cc index feba39348d..c48b8da02c 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtp_parameters.cc +++ b/TMessagesProj/jni/voip/webrtc/api/rtp_parameters.cc @@ -11,6 +11,7 @@ #include #include +#include #include #include "api/array_view.h" @@ -280,6 +281,14 @@ const std::vector RtpExtension::DeduplicateHeaderExtensions( } } + // Sort the returned vector to make comparisons of header extensions reliable. + // In order of priority, we sort by uri first, then encrypt and id last. + std::sort(filtered.begin(), filtered.end(), + [](const RtpExtension& a, const RtpExtension& b) { + return std::tie(a.uri, a.encrypt, a.id) < + std::tie(b.uri, b.encrypt, b.id); + }); + return filtered; } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/rtp_parameters.h b/TMessagesProj/jni/voip/webrtc/api/rtp_parameters.h index 84f3a0e840..0d3c9dfd22 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtp_parameters.h +++ b/TMessagesProj/jni/voip/webrtc/api/rtp_parameters.h @@ -17,11 +17,14 @@ #include #include +#include "absl/container/inlined_vector.h" #include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/media_types.h" #include "api/priority.h" #include "api/rtp_transceiver_direction.h" +#include "api/video/resolution.h" +#include "api/video_codecs/scalability_mode.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -186,6 +189,9 @@ struct RTC_EXPORT RtpCodecCapability { // TODO(deadbeef): Not implemented. bool svc_multi_stream_support = false; + // https://w3c.github.io/webrtc-svc/#dom-rtcrtpcodeccapability-scalabilitymodes + absl::InlinedVector scalability_modes; + bool operator==(const RtpCodecCapability& o) const { return name == o.name && kind == o.kind && clock_rate == o.clock_rate && preferred_payload_type == o.preferred_payload_type && @@ -194,7 +200,8 @@ struct RTC_EXPORT RtpCodecCapability { parameters == o.parameters && options == o.options && max_temporal_layer_extensions == o.max_temporal_layer_extensions && max_spatial_layer_extensions == o.max_spatial_layer_extensions && - svc_multi_stream_support == o.svc_multi_stream_support; + svc_multi_stream_support == o.svc_multi_stream_support && + scalability_modes == o.scalability_modes; } bool operator!=(const RtpCodecCapability& o) const { return !(*this == o); } }; @@ -286,6 +293,9 @@ struct RTC_EXPORT RtpExtension { bool encrypt); // Returns a list of extensions where any extension URI is unique. + // The returned list will be sorted by uri first, then encrypt and id last. + // Having the list sorted allows the caller fo compare filtered lists for + // equality to detect when changes have been made. static const std::vector DeduplicateHeaderExtensions( const std::vector& extensions, Filter filter); @@ -493,6 +503,24 @@ struct RTC_EXPORT RtpEncodingParameters { // https://w3c.github.io/webrtc-svc/#rtcrtpencodingparameters absl::optional scalability_mode; + // Requested encode resolution. + // + // This field provides an alternative to `scale_resolution_down_by` + // that is not dependent on the video source. + // + // When setting requested_resolution it is not necessary to adapt the + // video source using OnOutputFormatRequest, since the VideoStreamEncoder + // will apply downscaling if necessary. requested_resolution will also be + // propagated to the video source, this allows downscaling earlier in the + // pipeline which can be beneficial if the source is consumed by multiple + // encoders, but is not strictly necessary. + // + // The `requested_resolution` is subject to resource adaptation. + // + // It is an error to set both `requested_resolution` and + // `scale_resolution_down_by`. + absl::optional requested_resolution; + // For an RtpSender, set to true to cause this encoding to be encoded and // sent, and false for it not to be encoded and sent. This allows control // across multiple encodings of a sender for turning simulcast layers on and @@ -518,7 +546,8 @@ struct RTC_EXPORT RtpEncodingParameters { num_temporal_layers == o.num_temporal_layers && scale_resolution_down_by == o.scale_resolution_down_by && active == o.active && rid == o.rid && - adaptive_ptime == o.adaptive_ptime; + adaptive_ptime == o.adaptive_ptime && + requested_resolution == o.requested_resolution; } bool operator!=(const RtpEncodingParameters& o) const { return !(*this == o); diff --git a/TMessagesProj/jni/voip/webrtc/api/rtp_sender_interface.cc b/TMessagesProj/jni/voip/webrtc/api/rtp_sender_interface.cc deleted file mode 100644 index 57a5a10fb5..0000000000 --- a/TMessagesProj/jni/voip/webrtc/api/rtp_sender_interface.cc +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright 2018 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "api/rtp_sender_interface.h" - -namespace webrtc { - -void RtpSenderInterface::SetFrameEncryptor( - rtc::scoped_refptr frame_encryptor) {} - -rtc::scoped_refptr -RtpSenderInterface::GetFrameEncryptor() const { - return nullptr; -} - -std::vector RtpSenderInterface::init_send_encodings() - const { - return {}; -} - -rtc::scoped_refptr RtpSenderInterface::dtls_transport() - const { - return nullptr; -} - -void RtpSenderInterface::SetEncoderToPacketizerFrameTransformer( - rtc::scoped_refptr frame_transformer) {} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/rtp_sender_interface.h b/TMessagesProj/jni/voip/webrtc/api/rtp_sender_interface.h index 9ffad68644..6fc658f9ee 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtp_sender_interface.h +++ b/TMessagesProj/jni/voip/webrtc/api/rtp_sender_interface.h @@ -14,6 +14,7 @@ #ifndef API_RTP_SENDER_INTERFACE_H_ #define API_RTP_SENDER_INTERFACE_H_ +#include #include #include @@ -26,6 +27,7 @@ #include "api/rtc_error.h" #include "api/rtp_parameters.h" #include "api/scoped_refptr.h" +#include "api/video_codecs/video_encoder_factory.h" #include "rtc_base/ref_count.h" #include "rtc_base/system/rtc_export.h" @@ -41,8 +43,7 @@ class RTC_EXPORT RtpSenderInterface : public rtc::RefCountInterface { // The dtlsTransport attribute exposes the DTLS transport on which the // media is sent. It may be null. // https://w3c.github.io/webrtc-pc/#dom-rtcrtpsender-transport - // TODO(https://bugs.webrtc.org/907849) remove default implementation - virtual rtc::scoped_refptr dtls_transport() const; + virtual rtc::scoped_refptr dtls_transport() const = 0; // Returns primary SSRC used by this sender for sending media. // Returns 0 if not yet determined. @@ -65,13 +66,13 @@ class RTC_EXPORT RtpSenderInterface : public rtc::RefCountInterface { // Sets the IDs of the media streams associated with this sender's track. // These are signalled in the SDP so that the remote side can associate // tracks. - virtual void SetStreams(const std::vector& stream_ids) {} + virtual void SetStreams(const std::vector& stream_ids) = 0; // Returns the list of encoding parameters that will be applied when the SDP // local description is set. These initial encoding parameters can be set by // PeerConnection::AddTransceiver, and later updated with Get/SetParameters. // TODO(orphis): Make it pure virtual once Chrome has updated - virtual std::vector init_send_encodings() const; + virtual std::vector init_send_encodings() const = 0; virtual RtpParameters GetParameters() const = 0; // Note that only a subset of the parameters can currently be changed. See @@ -87,14 +88,24 @@ class RTC_EXPORT RtpSenderInterface : public rtc::RefCountInterface { // using the user provided encryption mechanism regardless of whether SRTP is // enabled or not. virtual void SetFrameEncryptor( - rtc::scoped_refptr frame_encryptor); + rtc::scoped_refptr frame_encryptor) = 0; // Returns a pointer to the frame encryptor set previously by the // user. This can be used to update the state of the object. - virtual rtc::scoped_refptr GetFrameEncryptor() const; + virtual rtc::scoped_refptr GetFrameEncryptor() + const = 0; virtual void SetEncoderToPacketizerFrameTransformer( - rtc::scoped_refptr frame_transformer); + rtc::scoped_refptr frame_transformer) = 0; + + // Sets a user defined encoder selector. + // Overrides selector that is (optionally) provided by VideoEncoderFactory. + virtual void SetEncoderSelector( + std::unique_ptr + encoder_selector) = 0; + + // TODO(crbug.com/1354101): make pure virtual again after Chrome roll. + virtual RTCError GenerateKeyFrame() { return RTCError::OK(); } protected: ~RtpSenderInterface() override = default; diff --git a/TMessagesProj/jni/voip/webrtc/api/rtp_transceiver_interface.cc b/TMessagesProj/jni/voip/webrtc/api/rtp_transceiver_interface.cc index 454e450c8d..7267b286be 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtp_transceiver_interface.cc +++ b/TMessagesProj/jni/voip/webrtc/api/rtp_transceiver_interface.cc @@ -44,33 +44,6 @@ void RtpTransceiverInterface::StopInternal() { << "DEBUG: RtpTransceiverInterface::StopInternal called"; } -RTCError RtpTransceiverInterface::SetCodecPreferences( - rtc::ArrayView) { - RTC_DCHECK_NOTREACHED() << "Not implemented"; - return {}; -} - -std::vector RtpTransceiverInterface::codec_preferences() - const { - return {}; -} - -std::vector -RtpTransceiverInterface::HeaderExtensionsToOffer() const { - return {}; -} - -webrtc::RTCError RtpTransceiverInterface::SetOfferedRtpHeaderExtensions( - rtc::ArrayView - header_extensions_to_offer) { - return webrtc::RTCError(webrtc::RTCErrorType::UNSUPPORTED_OPERATION); -} - -std::vector -RtpTransceiverInterface::HeaderExtensionsNegotiated() const { - return {}; -} - // TODO(bugs.webrtc.org/11839) Remove default implementations when clients // are updated. void RtpTransceiverInterface::SetDirection( diff --git a/TMessagesProj/jni/voip/webrtc/api/rtp_transceiver_interface.h b/TMessagesProj/jni/voip/webrtc/api/rtp_transceiver_interface.h index 4799c4b153..c9d911fac1 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtp_transceiver_interface.h +++ b/TMessagesProj/jni/voip/webrtc/api/rtp_transceiver_interface.h @@ -97,8 +97,7 @@ class RTC_EXPORT RtpTransceiverInterface : public rtc::RefCountInterface { // transceiver's stop() method has been called, but the negotiation with // the other end for shutting down the transceiver is not yet done. // https://w3c.github.io/webrtc-pc/#dfn-stopping-0 - // TODO(hta): Remove default implementation. - virtual bool stopping() const; + virtual bool stopping() const = 0; // The direction attribute indicates the preferred direction of this // transceiver, which will be used in calls to CreateOffer and CreateAnswer. @@ -147,28 +146,28 @@ class RTC_EXPORT RtpTransceiverInterface : public rtc::RefCountInterface { // by WebRTC for this transceiver. // https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-setcodecpreferences virtual RTCError SetCodecPreferences( - rtc::ArrayView codecs); - virtual std::vector codec_preferences() const; + rtc::ArrayView codecs) = 0; + virtual std::vector codec_preferences() const = 0; // Readonly attribute which contains the set of header extensions that was set // with SetOfferedRtpHeaderExtensions, or a default set if it has not been // called. // https://w3c.github.io/webrtc-extensions/#rtcrtptransceiver-interface virtual std::vector HeaderExtensionsToOffer() - const; + const = 0; // Readonly attribute which is either empty if negotation has not yet // happened, or a vector of the negotiated header extensions. // https://w3c.github.io/webrtc-extensions/#rtcrtptransceiver-interface virtual std::vector HeaderExtensionsNegotiated() - const; + const = 0; // The SetOfferedRtpHeaderExtensions method modifies the next SDP negotiation // so that it negotiates use of header extensions which are not kStopped. // https://w3c.github.io/webrtc-extensions/#rtcrtptransceiver-interface virtual webrtc::RTCError SetOfferedRtpHeaderExtensions( rtc::ArrayView - header_extensions_to_offer); + header_extensions_to_offer) = 0; protected: ~RtpTransceiverInterface() override = default; diff --git a/TMessagesProj/jni/voip/webrtc/api/scoped_refptr.h b/TMessagesProj/jni/voip/webrtc/api/scoped_refptr.h index 5b3a08541e..e145509127 100644 --- a/TMessagesProj/jni/voip/webrtc/api/scoped_refptr.h +++ b/TMessagesProj/jni/voip/webrtc/api/scoped_refptr.h @@ -74,8 +74,9 @@ class scoped_refptr { typedef T element_type; scoped_refptr() : ptr_(nullptr) {} + scoped_refptr(std::nullptr_t) : ptr_(nullptr) {} // NOLINT(runtime/explicit) - scoped_refptr(T* p) : ptr_(p) { // NOLINT(runtime/explicit) + explicit scoped_refptr(T* p) : ptr_(p) { if (ptr_) ptr_->AddRef(); } @@ -103,7 +104,7 @@ class scoped_refptr { } T* get() const { return ptr_; } - operator T*() const { return ptr_; } + explicit operator bool() const { return ptr_ != nullptr; } T& operator*() const { return *ptr_; } T* operator->() const { return ptr_; } @@ -160,6 +161,62 @@ class scoped_refptr { T* ptr_; }; +template +bool operator==(const rtc::scoped_refptr& a, + const rtc::scoped_refptr& b) { + return a.get() == b.get(); +} +template +bool operator!=(const rtc::scoped_refptr& a, + const rtc::scoped_refptr& b) { + return !(a == b); +} + +template +bool operator==(const rtc::scoped_refptr& a, std::nullptr_t) { + return a.get() == nullptr; +} + +template +bool operator!=(const rtc::scoped_refptr& a, std::nullptr_t) { + return !(a == nullptr); +} + +template +bool operator==(std::nullptr_t, const rtc::scoped_refptr& a) { + return a.get() == nullptr; +} + +template +bool operator!=(std::nullptr_t, const rtc::scoped_refptr& a) { + return !(a == nullptr); +} + +// Comparison with raw pointer. +template +bool operator==(const rtc::scoped_refptr& a, const U* b) { + return a.get() == b; +} +template +bool operator!=(const rtc::scoped_refptr& a, const U* b) { + return !(a == b); +} + +template +bool operator==(const T* a, const rtc::scoped_refptr& b) { + return a == b.get(); +} +template +bool operator!=(const T* a, const rtc::scoped_refptr& b) { + return !(a == b); +} + +// Ordered comparison, needed for use as a std::map key. +template +bool operator<(const rtc::scoped_refptr& a, const rtc::scoped_refptr& b) { + return a.get() < b.get(); +} + } // namespace rtc #endif // API_SCOPED_REFPTR_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/sequence_checker.h b/TMessagesProj/jni/voip/webrtc/api/sequence_checker.h index 5db7b9e4df..a79d04f61f 100644 --- a/TMessagesProj/jni/voip/webrtc/api/sequence_checker.h +++ b/TMessagesProj/jni/voip/webrtc/api/sequence_checker.h @@ -107,10 +107,15 @@ class RTC_LOCKABLE SequenceChecker #define RTC_RUN_ON(x) \ RTC_THREAD_ANNOTATION_ATTRIBUTE__(exclusive_locks_required(x)) -#define RTC_DCHECK_RUN_ON(x) \ - webrtc::webrtc_sequence_checker_internal::SequenceCheckerScope \ - seq_check_scope(x); \ - RTC_DCHECK((x)->IsCurrent()) \ - << webrtc::webrtc_sequence_checker_internal::ExpectationToString(x) +// Checks current code is running on the desired sequence. +// +// First statement validates it is running on the sequence `x`. +// Second statement annotates for the thread safety analyzer the check was done. +// Such annotation has to be attached to a function, and that function has to be +// called. Thus current implementation creates a noop lambda and calls it. +#define RTC_DCHECK_RUN_ON(x) \ + RTC_DCHECK((x)->IsCurrent()) \ + << webrtc::webrtc_sequence_checker_internal::ExpectationToString(x); \ + []() RTC_ASSERT_EXCLUSIVE_LOCK(x) {}() #endif // API_SEQUENCE_CHECKER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/stats/rtc_stats.h b/TMessagesProj/jni/voip/webrtc/api/stats/rtc_stats.h index a5fae52c29..273ea316cb 100644 --- a/TMessagesProj/jni/voip/webrtc/api/stats/rtc_stats.h +++ b/TMessagesProj/jni/voip/webrtc/api/stats/rtc_stats.h @@ -20,6 +20,7 @@ #include #include +#include "absl/types/optional.h" #include "rtc_base/checks.h" #include "rtc_base/system/rtc_export.h" #include "rtc_base/system/rtc_export_template.h" @@ -158,7 +159,7 @@ class RTC_EXPORT RTCStats { const char this_class::kType[] = type_str; \ \ std::unique_ptr this_class::copy() const { \ - return std::unique_ptr(new this_class(*this)); \ + return std::make_unique(*this); \ } \ \ const char* this_class::type() const { return this_class::kType; } \ @@ -189,7 +190,7 @@ class RTC_EXPORT RTCStats { const char this_class::kType[] = type_str; \ \ std::unique_ptr this_class::copy() const { \ - return std::unique_ptr(new this_class(*this)); \ + return std::make_unique(*this); \ } \ \ const char* this_class::type() const { return this_class::kType; } \ @@ -215,6 +216,17 @@ enum class NonStandardGroupId { kRtcStatsRelativePacketArrivalDelay, }; +// Certain stat members should only be exposed to the JavaScript API in +// certain circumstances as to avoid passive fingerprinting. +enum class StatExposureCriteria : uint8_t { + // The stat should always be exposed. This is the default. + kAlways, + // The stat exposes hardware capabilities and thus should has limited exposure + // to JavaScript. The requirements for exposure are written in the spec at + // https://w3c.github.io/webrtc-stats/#limiting-exposure-of-hardware-capabilities. + kHardwareCapability, +}; + // Interface for `RTCStats` members, which have a name and a value of a type // defined in a subclass. Only the types listed in `Type` are supported, these // are implemented by `RTCStatsMember`. The value of a member may be @@ -249,16 +261,24 @@ class RTCStatsMemberInterface { virtual Type type() const = 0; virtual bool is_sequence() const = 0; virtual bool is_string() const = 0; - bool is_defined() const { return is_defined_; } + virtual bool is_defined() const = 0; // Is this part of the stats spec? Used so that chromium can easily filter // out anything unstandardized. virtual bool is_standardized() const = 0; // Non-standard stats members can have group IDs in order to be exposed in // JavaScript through experiments. Standardized stats have no group IDs. virtual std::vector group_ids() const { return {}; } + // The conditions for exposing the statistic to JavaScript. Stats with + // criteria that is not kAlways has some restriction and should be filtered + // in accordance to the spec. + virtual StatExposureCriteria exposure_criteria() const { + return StatExposureCriteria::kAlways; + } // Type and value comparator. The names are not compared. These operators are // exposed for testing. - virtual bool operator==(const RTCStatsMemberInterface& other) const = 0; + bool operator==(const RTCStatsMemberInterface& other) const { + return IsEqual(other); + } bool operator!=(const RTCStatsMemberInterface& other) const { return !(*this == other); } @@ -277,11 +297,11 @@ class RTCStatsMemberInterface { } protected: - RTCStatsMemberInterface(const char* name, bool is_defined) - : name_(name), is_defined_(is_defined) {} + explicit RTCStatsMemberInterface(const char* name) : name_(name) {} + + virtual bool IsEqual(const RTCStatsMemberInterface& other) const = 0; const char* const name_; - bool is_defined_; }; // Template implementation of `RTCStatsMemberInterface`. @@ -291,80 +311,73 @@ template class RTCStatsMember : public RTCStatsMemberInterface { public: explicit RTCStatsMember(const char* name) - : RTCStatsMemberInterface(name, /*is_defined=*/false), value_() {} + : RTCStatsMemberInterface(name), value_() {} RTCStatsMember(const char* name, const T& value) - : RTCStatsMemberInterface(name, /*is_defined=*/true), value_(value) {} + : RTCStatsMemberInterface(name), value_(value) {} RTCStatsMember(const char* name, T&& value) - : RTCStatsMemberInterface(name, /*is_defined=*/true), - value_(std::move(value)) {} + : RTCStatsMemberInterface(name), value_(std::move(value)) {} explicit RTCStatsMember(const RTCStatsMember& other) - : RTCStatsMemberInterface(other.name_, other.is_defined_), - value_(other.value_) {} + : RTCStatsMemberInterface(other.name_), value_(other.value_) {} explicit RTCStatsMember(RTCStatsMember&& other) - : RTCStatsMemberInterface(other.name_, other.is_defined_), - value_(std::move(other.value_)) {} + : RTCStatsMemberInterface(other.name_), value_(std::move(other.value_)) {} static Type StaticType(); Type type() const override { return StaticType(); } bool is_sequence() const override; bool is_string() const override; + bool is_defined() const override { return value_.has_value(); } bool is_standardized() const override { return true; } - bool operator==(const RTCStatsMemberInterface& other) const override { - if (type() != other.type() || is_standardized() != other.is_standardized()) - return false; - const RTCStatsMember& other_t = - static_cast&>(other); - if (!is_defined_) - return !other_t.is_defined(); - if (!other.is_defined()) - return false; - return value_ == other_t.value_; - } std::string ValueToString() const override; std::string ValueToJson() const override; template inline T ValueOrDefault(U default_value) const { - if (is_defined()) { - return *(*this); - } - return default_value; + return value_.value_or(default_value); } // Assignment operators. T& operator=(const T& value) { value_ = value; - is_defined_ = true; - return value_; + return value_.value(); } T& operator=(const T&& value) { value_ = std::move(value); - is_defined_ = true; - return value_; + return value_.value(); } // Value getters. T& operator*() { - RTC_DCHECK(is_defined_); - return value_; + RTC_DCHECK(value_); + return *value_; } const T& operator*() const { - RTC_DCHECK(is_defined_); - return value_; + RTC_DCHECK(value_); + return *value_; } // Value getters, arrow operator. T* operator->() { - RTC_DCHECK(is_defined_); - return &value_; + RTC_DCHECK(value_); + return &(*value_); } const T* operator->() const { - RTC_DCHECK(is_defined_); - return &value_; + RTC_DCHECK(value_); + return &(*value_); + } + + protected: + bool IsEqual(const RTCStatsMemberInterface& other) const override { + if (type() != other.type() || + is_standardized() != other.is_standardized() || + exposure_criteria() != other.exposure_criteria()) + return false; + const RTCStatsMember& other_t = + static_cast&>(other); + return value_ == other_t.value_; } private: - T value_; + absl::optional value_; }; namespace rtc_stats_internal { @@ -405,6 +418,81 @@ WEBRTC_DECLARE_RTCSTATSMEMBER(std::vector); WEBRTC_DECLARE_RTCSTATSMEMBER(rtc_stats_internal::MapStringUint64); WEBRTC_DECLARE_RTCSTATSMEMBER(rtc_stats_internal::MapStringDouble); +// For stats with restricted exposure. +template +class RTCRestrictedStatsMember : public RTCStatsMember { + public: + explicit RTCRestrictedStatsMember(const char* name) + : RTCStatsMember(name) {} + RTCRestrictedStatsMember(const char* name, const T& value) + : RTCStatsMember(name, value) {} + RTCRestrictedStatsMember(const char* name, T&& value) + : RTCStatsMember(name, std::move(value)) {} + RTCRestrictedStatsMember(const RTCRestrictedStatsMember& other) + : RTCStatsMember(other) {} + RTCRestrictedStatsMember(RTCRestrictedStatsMember&& other) + : RTCStatsMember(std::move(other)) {} + + StatExposureCriteria exposure_criteria() const override { return E; } + + T& operator=(const T& value) { return RTCStatsMember::operator=(value); } + T& operator=(const T&& value) { + return RTCStatsMember::operator=(std::move(value)); + } + + private: + static_assert(E != StatExposureCriteria::kAlways, + "kAlways is the default exposure criteria. Use " + "RTCStatMember instead."); +}; + +extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) + RTCRestrictedStatsMember; +extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) + RTCRestrictedStatsMember; +extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) + RTCRestrictedStatsMember; +extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) + RTCRestrictedStatsMember; +extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) + RTCRestrictedStatsMember; +extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) + RTCRestrictedStatsMember; +extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) + RTCRestrictedStatsMember; +extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) + RTCRestrictedStatsMember, + StatExposureCriteria::kHardwareCapability>; +extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) + RTCRestrictedStatsMember, + StatExposureCriteria::kHardwareCapability>; +extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) + RTCRestrictedStatsMember, + StatExposureCriteria::kHardwareCapability>; +extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) + RTCRestrictedStatsMember, + StatExposureCriteria::kHardwareCapability>; +extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) + RTCRestrictedStatsMember, + StatExposureCriteria::kHardwareCapability>; +extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) + RTCRestrictedStatsMember, + StatExposureCriteria::kHardwareCapability>; +extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) + RTCRestrictedStatsMember, + StatExposureCriteria::kHardwareCapability>; +extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) + RTCRestrictedStatsMember, + StatExposureCriteria::kHardwareCapability>; +extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) + RTCRestrictedStatsMember, + StatExposureCriteria::kHardwareCapability>; + // Using inheritance just so that it's obvious from the member's declaration // whether it's standardized or not. template diff --git a/TMessagesProj/jni/voip/webrtc/api/stats/rtc_stats_report.h b/TMessagesProj/jni/voip/webrtc/api/stats/rtc_stats_report.h index 2ced422370..1cc6293fec 100644 --- a/TMessagesProj/jni/voip/webrtc/api/stats/rtc_stats_report.h +++ b/TMessagesProj/jni/voip/webrtc/api/stats/rtc_stats_report.h @@ -17,11 +17,13 @@ #include #include #include +#include #include #include "api/ref_counted_base.h" #include "api/scoped_refptr.h" #include "api/stats/rtc_stats.h" +#include "api/units/timestamp.h" // TODO(tommi): Remove this include after fixing iwyu issue in chromium. // See: third_party/blink/renderer/platform/peerconnection/rtc_stats.cc #include "rtc_base/ref_counted_object.h" @@ -58,16 +60,34 @@ class RTC_EXPORT RTCStatsReport final StatsMap::const_iterator it_; }; - // TODO(hbos): Remove "= 0" once Chromium unittest has been updated to call - // with a parameter. crbug.com/627816 + // TODO(bugs.webrtc.org/13756): deprecate this in favor of Timestamp. + // TODO(hbos): Remove "= 0" once downstream has been updated to call with a + // parameter. static rtc::scoped_refptr Create(int64_t timestamp_us = 0); + static rtc::scoped_refptr Create(Timestamp timestamp); + // TODO(bugs.webrtc.org/13756): deprecate this in favor of Timestamp. explicit RTCStatsReport(int64_t timestamp_us); + explicit RTCStatsReport(Timestamp timestamp); + RTCStatsReport(const RTCStatsReport& other) = delete; rtc::scoped_refptr Copy() const; - int64_t timestamp_us() const { return timestamp_us_; } + int64_t timestamp_us() const { return timestamp_.us_or(-1); } + Timestamp timestamp() const { return timestamp_; } void AddStats(std::unique_ptr stats); + // On success, returns a non-owning pointer to `stats`. If the stats ID is not + // unique, `stats` is not inserted and nullptr is returned. + template + T* TryAddStats(std::unique_ptr stats) { + T* stats_ptr = stats.get(); + if (!stats_ + .insert(std::make_pair(std::string(stats->id()), std::move(stats))) + .second) { + return nullptr; + } + return stats_ptr; + } const RTCStats* Get(const std::string& id) const; size_t size() const { return stats_.size(); } @@ -115,7 +135,7 @@ class RTC_EXPORT RTCStatsReport final ~RTCStatsReport() = default; private: - int64_t timestamp_us_; + Timestamp timestamp_; StatsMap stats_; }; diff --git a/TMessagesProj/jni/voip/webrtc/api/stats/rtcstats_objects.h b/TMessagesProj/jni/voip/webrtc/api/stats/rtcstats_objects.h index dec3094b45..059fb0d68f 100644 --- a/TMessagesProj/jni/voip/webrtc/api/stats/rtcstats_objects.h +++ b/TMessagesProj/jni/voip/webrtc/api/stats/rtcstats_objects.h @@ -90,6 +90,31 @@ struct RTCContentType { static const char* const kScreenshare; }; +// https://w3c.github.io/webrtc-stats/#dom-rtcdtlsrole +struct RTCDtlsRole { + static const char* const kUnknown; + static const char* const kClient; + static const char* const kServer; +}; + +// https://www.w3.org/TR/webrtc/#rtcicerole +struct RTCIceRole { + static const char* const kUnknown; + static const char* const kControlled; + static const char* const kControlling; +}; + +// https://www.w3.org/TR/webrtc/#dom-rtcicetransportstate +struct RTCIceTransportState { + static const char* const kNew; + static const char* const kChecking; + static const char* const kConnected; + static const char* const kCompleted; + static const char* const kDisconnected; + static const char* const kFailed; + static const char* const kClosed; +}; + // https://w3c.github.io/webrtc-stats/#certificatestats-dict* class RTC_EXPORT RTCCertificateStats final : public RTCStats { public: @@ -106,6 +131,20 @@ class RTC_EXPORT RTCCertificateStats final : public RTCStats { RTCStatsMember issuer_certificate_id; }; +// Non standard extension mapping to rtc::AdapterType +struct RTCNetworkAdapterType { + static constexpr char kUnknown[] = "unknown"; + static constexpr char kEthernet[] = "ethernet"; + static constexpr char kWifi[] = "wifi"; + static constexpr char kCellular[] = "cellular"; + static constexpr char kLoopback[] = "loopback"; + static constexpr char kAny[] = "any"; + static constexpr char kCellular2g[] = "cellular2g"; + static constexpr char kCellular3g[] = "cellular3g"; + static constexpr char kCellular4g[] = "cellular4g"; + static constexpr char kCellular5g[] = "cellular5g"; +}; + // https://w3c.github.io/webrtc-stats/#codec-dict* class RTC_EXPORT RTCCodecStats final : public RTCStats { public: @@ -137,7 +176,7 @@ class RTC_EXPORT RTCDataChannelStats final : public RTCStats { RTCStatsMember label; RTCStatsMember protocol; RTCStatsMember data_channel_identifier; - // TODO(hbos): Support enum types? "RTCStatsMember"? + // Enum type RTCDataChannelState. RTCStatsMember state; RTCStatsMember messages_sent; RTCStatsMember bytes_sent; @@ -146,7 +185,6 @@ class RTC_EXPORT RTCDataChannelStats final : public RTCStats { }; // https://w3c.github.io/webrtc-stats/#candidatepair-dict* -// TODO(hbos): Tracking bug https://bugs.webrtc.org/7062 class RTC_EXPORT RTCIceCandidatePairStats final : public RTCStats { public: WEBRTC_RTCSTATS_DECL(); @@ -159,17 +197,16 @@ class RTC_EXPORT RTCIceCandidatePairStats final : public RTCStats { RTCStatsMember transport_id; RTCStatsMember local_candidate_id; RTCStatsMember remote_candidate_id; - // TODO(hbos): Support enum types? - // "RTCStatsMember"? + // Enum type RTCStatsIceCandidatePairState. RTCStatsMember state; // Obsolete: priority RTCStatsMember priority; RTCStatsMember nominated; - // TODO(hbos): Collect this the way the spec describes it. We have a value for - // it but it is not spec-compliant. https://bugs.webrtc.org/7062 + // `writable` does not exist in the spec and old comments suggest it used to + // exist but was incorrectly implemented. + // TODO(https://crbug.com/webrtc/14171): Standardize and/or modify + // implementation. RTCStatsMember writable; - // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7062 - RTCStatsMember readable; RTCStatsMember packets_sent; RTCStatsMember packets_received; RTCStatsMember bytes_sent; @@ -177,35 +214,17 @@ class RTC_EXPORT RTCIceCandidatePairStats final : public RTCStats { RTCStatsMember total_round_trip_time; RTCStatsMember current_round_trip_time; RTCStatsMember available_outgoing_bitrate; - // TODO(hbos): Populate this value. It is wired up and collected the same way - // "VideoBwe.googAvailableReceiveBandwidth" is, but that value is always - // undefined. https://bugs.webrtc.org/7062 RTCStatsMember available_incoming_bitrate; RTCStatsMember requests_received; RTCStatsMember requests_sent; RTCStatsMember responses_received; RTCStatsMember responses_sent; - // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7062 - RTCStatsMember retransmissions_received; - // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7062 - RTCStatsMember retransmissions_sent; - // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7062 - RTCStatsMember consent_requests_received; RTCStatsMember consent_requests_sent; - // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7062 - RTCStatsMember consent_responses_received; - // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7062 - RTCStatsMember consent_responses_sent; RTCStatsMember packets_discarded_on_send; RTCStatsMember bytes_discarded_on_send; }; // https://w3c.github.io/webrtc-stats/#icecandidate-dict* -// TODO(hbos): `RTCStatsCollector` only collects candidates that are part of -// ice candidate pairs, but there could be candidates not paired with anything. -// crbug.com/632723 -// TODO(qingsi): Add the stats of STUN binding requests (keepalives) and collect -// them in the new PeerConnection::GetStats. class RTC_EXPORT RTCIceCandidateStats : public RTCStats { public: WEBRTC_RTCSTATS_DECL(); @@ -222,11 +241,19 @@ class RTC_EXPORT RTCIceCandidateStats : public RTCStats { RTCStatsMember port; RTCStatsMember protocol; RTCStatsMember relay_protocol; - // TODO(hbos): Support enum types? "RTCStatsMember"? + // Enum type RTCIceCandidateType. RTCStatsMember candidate_type; RTCStatsMember priority; - // TODO(hbos): Not collected by `RTCStatsCollector`. crbug.com/632723 RTCStatsMember url; + RTCStatsMember foundation; + RTCStatsMember related_address; + RTCStatsMember related_port; + RTCStatsMember username_fragment; + // Enum type RTCIceTcpCandidateType. + RTCStatsMember tcp_type; + + RTCNonStandardStatsMember vpn; + RTCNonStandardStatsMember network_adapter_type; protected: RTCIceCandidateStats(const std::string& id, @@ -259,63 +286,55 @@ class RTC_EXPORT RTCRemoteIceCandidateStats final const char* type() const override; }; -// https://w3c.github.io/webrtc-stats/#msstats-dict* -// TODO(hbos): Tracking bug crbug.com/660827 -class RTC_EXPORT RTCMediaStreamStats final : public RTCStats { +// TODO(https://crbug.com/webrtc/14419): Delete this class, it's deprecated. +class RTC_EXPORT DEPRECATED_RTCMediaStreamStats final : public RTCStats { public: WEBRTC_RTCSTATS_DECL(); - RTCMediaStreamStats(const std::string& id, int64_t timestamp_us); - RTCMediaStreamStats(std::string&& id, int64_t timestamp_us); - RTCMediaStreamStats(const RTCMediaStreamStats& other); - ~RTCMediaStreamStats() override; + DEPRECATED_RTCMediaStreamStats(const std::string& id, int64_t timestamp_us); + DEPRECATED_RTCMediaStreamStats(std::string&& id, int64_t timestamp_us); + DEPRECATED_RTCMediaStreamStats(const DEPRECATED_RTCMediaStreamStats& other); + ~DEPRECATED_RTCMediaStreamStats() override; RTCStatsMember stream_identifier; RTCStatsMember> track_ids; }; +using RTCMediaStreamStats [[deprecated("bugs.webrtc.org/14419")]] = + DEPRECATED_RTCMediaStreamStats; -// https://w3c.github.io/webrtc-stats/#mststats-dict* -// TODO(hbos): Tracking bug crbug.com/659137 -class RTC_EXPORT RTCMediaStreamTrackStats final : public RTCStats { +// TODO(https://crbug.com/webrtc/14175): Delete this class, it's deprecated. +class RTC_EXPORT DEPRECATED_RTCMediaStreamTrackStats final : public RTCStats { public: WEBRTC_RTCSTATS_DECL(); - RTCMediaStreamTrackStats(const std::string& id, - int64_t timestamp_us, - const char* kind); - RTCMediaStreamTrackStats(std::string&& id, - int64_t timestamp_us, - const char* kind); - RTCMediaStreamTrackStats(const RTCMediaStreamTrackStats& other); - ~RTCMediaStreamTrackStats() override; + DEPRECATED_RTCMediaStreamTrackStats(const std::string& id, + int64_t timestamp_us, + const char* kind); + DEPRECATED_RTCMediaStreamTrackStats(std::string&& id, + int64_t timestamp_us, + const char* kind); + DEPRECATED_RTCMediaStreamTrackStats( + const DEPRECATED_RTCMediaStreamTrackStats& other); + ~DEPRECATED_RTCMediaStreamTrackStats() override; RTCStatsMember track_identifier; RTCStatsMember media_source_id; RTCStatsMember remote_source; RTCStatsMember ended; - // TODO(hbos): `RTCStatsCollector` does not return stats for detached tracks. - // crbug.com/659137 + // TODO(https://crbug.com/webrtc/14173): Remove this obsolete metric. RTCStatsMember detached; - // See `RTCMediaStreamTrackKind` for valid values. + // Enum type RTCMediaStreamTrackKind. RTCStatsMember kind; RTCStatsMember jitter_buffer_delay; RTCStatsMember jitter_buffer_emitted_count; // Video-only members RTCStatsMember frame_width; RTCStatsMember frame_height; - // TODO(hbos): Not collected by `RTCStatsCollector`. crbug.com/659137 - RTCStatsMember frames_per_second; RTCStatsMember frames_sent; RTCStatsMember huge_frames_sent; RTCStatsMember frames_received; RTCStatsMember frames_decoded; RTCStatsMember frames_dropped; - // TODO(hbos): Not collected by `RTCStatsCollector`. crbug.com/659137 - RTCStatsMember frames_corrupted; - // TODO(hbos): Not collected by `RTCStatsCollector`. crbug.com/659137 - RTCStatsMember partial_frames_lost; - // TODO(hbos): Not collected by `RTCStatsCollector`. crbug.com/659137 - RTCStatsMember full_frames_lost; // Audio-only members RTCStatsMember audio_level; // Receive-only RTCStatsMember total_audio_energy; // Receive-only @@ -328,32 +347,24 @@ class RTC_EXPORT RTCMediaStreamTrackStats final : public RTCStats { RTCStatsMember concealment_events; RTCStatsMember inserted_samples_for_deceleration; RTCStatsMember removed_samples_for_acceleration; - // Non-standard audio-only member - // TODO(kuddai): Add description to standard. crbug.com/webrtc/10042 + // TODO(crbug.com/webrtc/14524): These metrics have been moved, delete them. RTCNonStandardStatsMember jitter_buffer_flushes; RTCNonStandardStatsMember delayed_packet_outage_samples; RTCNonStandardStatsMember relative_packet_arrival_delay; - // Non-standard metric showing target delay of jitter buffer. - // This value is increased by the target jitter buffer delay every time a - // sample is emitted by the jitter buffer. The added target is the target - // delay, in seconds, at the time that the sample was emitted from the jitter - // buffer. (https://github.com/w3c/webrtc-provisional-stats/pull/20) - // Currently it is implemented only for audio. - // TODO(titovartem) implement for video streams when will be requested. - RTCNonStandardStatsMember jitter_buffer_target_delay; - // TODO(henrik.lundin): Add description of the interruption metrics at - // https://github.com/henbos/webrtc-provisional-stats/issues/17 RTCNonStandardStatsMember interruption_count; RTCNonStandardStatsMember total_interruption_duration; // Non-standard video-only members. - // https://henbos.github.io/webrtc-provisional-stats/#RTCVideoReceiverStats-dict* + // https://w3c.github.io/webrtc-provisional-stats/#dom-rtcvideoreceiverstats + RTCNonStandardStatsMember total_frames_duration; + RTCNonStandardStatsMember sum_squared_frame_durations; + // TODO(crbug.com/webrtc/14521): These metrics have been moved, delete them. RTCNonStandardStatsMember freeze_count; RTCNonStandardStatsMember pause_count; RTCNonStandardStatsMember total_freezes_duration; RTCNonStandardStatsMember total_pauses_duration; - RTCNonStandardStatsMember total_frames_duration; - RTCNonStandardStatsMember sum_squared_frame_durations; }; +using RTCMediaStreamTrackStats [[deprecated("bugs.webrtc.org/14175")]] = + DEPRECATED_RTCMediaStreamTrackStats; // https://w3c.github.io/webrtc-stats/#pcstats-dict* class RTC_EXPORT RTCPeerConnectionStats final : public RTCStats { @@ -370,7 +381,6 @@ class RTC_EXPORT RTCPeerConnectionStats final : public RTCStats { }; // https://w3c.github.io/webrtc-stats/#streamstats-dict* -// TODO(hbos): Tracking bug crbug.com/657854 class RTC_EXPORT RTCRTPStreamStats : public RTCStats { public: WEBRTC_RTCSTATS_DECL(); @@ -401,16 +411,8 @@ class RTC_EXPORT RTCReceivedRtpStreamStats : public RTCRTPStreamStats { RTCReceivedRtpStreamStats(const RTCReceivedRtpStreamStats& other); ~RTCReceivedRtpStreamStats() override; - // TODO(hbos) The following fields need to be added and migrated - // both from RTCInboundRtpStreamStats and RTCRemoteInboundRtpStreamStats: - // packetsReceived, packetsRepaired, burstPacketsLost, - // burstPacketDiscarded, burstLossCount, burstDiscardCount, burstLossRate, - // burstDiscardRate, gapLossRate, gapDiscardRate, framesDropped, - // partialFramesLost, fullFramesLost - // crbug.com/webrtc/12532 RTCStatsMember jitter; RTCStatsMember packets_lost; // Signed per RFC 3550 - RTCStatsMember packets_discarded; protected: RTCReceivedRtpStreamStats(const std::string&& id, int64_t timestamp_us); @@ -434,8 +436,6 @@ class RTC_EXPORT RTCSentRtpStreamStats : public RTCRTPStreamStats { }; // https://w3c.github.io/webrtc-stats/#inboundrtpstats-dict* -// TODO(hbos): Support the remote case |is_remote = true|. -// https://bugs.webrtc.org/7065 class RTC_EXPORT RTCInboundRTPStreamStats final : public RTCReceivedRtpStreamStats { public: @@ -446,14 +446,21 @@ class RTC_EXPORT RTCInboundRTPStreamStats final RTCInboundRTPStreamStats(const RTCInboundRTPStreamStats& other); ~RTCInboundRTPStreamStats() override; + // TODO(https://crbug.com/webrtc/14174): Implement trackIdentifier and kind. + + RTCStatsMember track_identifier; + RTCStatsMember mid; RTCStatsMember remote_id; RTCStatsMember packets_received; + RTCStatsMember packets_discarded; RTCStatsMember fec_packets_received; RTCStatsMember fec_packets_discarded; RTCStatsMember bytes_received; RTCStatsMember header_bytes_received; RTCStatsMember last_packet_received_timestamp; RTCStatsMember jitter_buffer_delay; + RTCStatsMember jitter_buffer_target_delay; + RTCStatsMember jitter_buffer_minimum_delay; RTCStatsMember jitter_buffer_emitted_count; RTCStatsMember total_samples_received; RTCStatsMember concealed_samples; @@ -464,54 +471,56 @@ class RTC_EXPORT RTCInboundRTPStreamStats final RTCStatsMember audio_level; RTCStatsMember total_audio_energy; RTCStatsMember total_samples_duration; + // Stats below are only implemented or defined for video. RTCStatsMember frames_received; - // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065 - RTCStatsMember round_trip_time; - // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065 - RTCStatsMember packets_repaired; - // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065 - RTCStatsMember burst_packets_lost; - // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065 - RTCStatsMember burst_packets_discarded; - // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065 - RTCStatsMember burst_loss_count; - // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065 - RTCStatsMember burst_discard_count; - // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065 - RTCStatsMember burst_loss_rate; - // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065 - RTCStatsMember burst_discard_rate; - // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065 - RTCStatsMember gap_loss_rate; - // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065 - RTCStatsMember gap_discard_rate; RTCStatsMember frame_width; RTCStatsMember frame_height; - RTCStatsMember frame_bit_depth; RTCStatsMember frames_per_second; RTCStatsMember frames_decoded; RTCStatsMember key_frames_decoded; RTCStatsMember frames_dropped; RTCStatsMember total_decode_time; + RTCStatsMember total_processing_delay; + RTCStatsMember total_assembly_time; + RTCStatsMember frames_assembled_from_multiple_packets; RTCStatsMember total_inter_frame_delay; RTCStatsMember total_squared_inter_frame_delay; - // https://henbos.github.io/webrtc-provisional-stats/#dom-rtcinboundrtpstreamstats-contenttype + RTCStatsMember pause_count; + RTCStatsMember total_pauses_duration; + RTCStatsMember freeze_count; + RTCStatsMember total_freezes_duration; + // https://w3c.github.io/webrtc-provisional-stats/#dom-rtcinboundrtpstreamstats-contenttype RTCStatsMember content_type; - // TODO(asapersson): Currently only populated if audio/video sync is enabled. + // Only populated if audio/video sync is enabled. + // TODO(https://crbug.com/webrtc/14177): Expose even if A/V sync is off? RTCStatsMember estimated_playout_timestamp; - // TODO(hbos): This is only implemented for video; implement it for audio as - // well. + // Only implemented for video. + // TODO(https://crbug.com/webrtc/14178): Also implement for audio. RTCStatsMember decoder_implementation; - // FIR and PLI counts are only defined for |media_type == "video"|. + // FIR and PLI counts are only defined for |kind == "video"|. RTCStatsMember fir_count; RTCStatsMember pli_count; RTCStatsMember nack_count; RTCStatsMember qp_sum; + // This is a remnant of the legacy getStats() API. When the "video-timing" + // header extension is used, + // https://webrtc.github.io/webrtc-org/experiments/rtp-hdrext/video-timing/, + // `googTimingFrameInfo` is exposed with the value of + // TimingFrameInfo::ToString(). + // TODO(https://crbug.com/webrtc/14586): Unship or standardize this metric. + RTCStatsMember goog_timing_frame_info; + // Non-standard audio metrics. + RTCNonStandardStatsMember jitter_buffer_flushes; + RTCNonStandardStatsMember delayed_packet_outage_samples; + RTCNonStandardStatsMember relative_packet_arrival_delay; + RTCNonStandardStatsMember interruption_count; + RTCNonStandardStatsMember total_interruption_duration; + + // The former googMinPlayoutDelayMs (in seconds). + RTCNonStandardStatsMember min_playout_delay; }; // https://w3c.github.io/webrtc-stats/#outboundrtpstats-dict* -// TODO(hbos): Support the remote case |is_remote = true|. -// https://bugs.webrtc.org/7066 class RTC_EXPORT RTCOutboundRTPStreamStats final : public RTCRTPStreamStats { public: WEBRTC_RTCSTATS_DECL(); @@ -523,13 +532,13 @@ class RTC_EXPORT RTCOutboundRTPStreamStats final : public RTCRTPStreamStats { RTCStatsMember media_source_id; RTCStatsMember remote_id; + RTCStatsMember mid; RTCStatsMember rid; RTCStatsMember packets_sent; RTCStatsMember retransmitted_packets_sent; RTCStatsMember bytes_sent; RTCStatsMember header_bytes_sent; RTCStatsMember retransmitted_bytes_sent; - // TODO(https://crbug.com/webrtc/13394): Also collect this metric for video. RTCStatsMember target_bitrate; RTCStatsMember frames_encoded; RTCStatsMember key_frames_encoded; @@ -540,24 +549,23 @@ class RTC_EXPORT RTCOutboundRTPStreamStats final : public RTCRTPStreamStats { RTCStatsMember frames_per_second; RTCStatsMember frames_sent; RTCStatsMember huge_frames_sent; - // TODO(https://crbug.com/webrtc/10635): This is only implemented for video; - // implement it for audio as well. RTCStatsMember total_packet_send_delay; // Enum type RTCQualityLimitationReason RTCStatsMember quality_limitation_reason; RTCStatsMember> quality_limitation_durations; // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-qualitylimitationresolutionchanges RTCStatsMember quality_limitation_resolution_changes; - // https://henbos.github.io/webrtc-provisional-stats/#dom-rtcoutboundrtpstreamstats-contenttype + // https://w3c.github.io/webrtc-provisional-stats/#dom-rtcoutboundrtpstreamstats-contenttype RTCStatsMember content_type; - // TODO(hbos): This is only implemented for video; implement it for audio as - // well. + // Only implemented for video. + // TODO(https://crbug.com/webrtc/14178): Implement for audio as well. RTCStatsMember encoder_implementation; - // FIR and PLI counts are only defined for |media_type == "video"|. + // FIR and PLI counts are only defined for |kind == "video"|. RTCStatsMember fir_count; RTCStatsMember pli_count; RTCStatsMember nack_count; RTCStatsMember qp_sum; + RTCStatsMember active; }; // https://w3c.github.io/webrtc-stats/#remoteinboundrtpstats-dict* @@ -571,11 +579,6 @@ class RTC_EXPORT RTCRemoteInboundRtpStreamStats final RTCRemoteInboundRtpStreamStats(const RTCRemoteInboundRtpStreamStats& other); ~RTCRemoteInboundRtpStreamStats() override; - // TODO(hbos): The following RTCReceivedRtpStreamStats metrics should also be - // implemented: packetsReceived, packetsRepaired, - // burstPacketsLost, burstPacketsDiscarded, burstLossCount, burstDiscardCount, - // burstLossRate, burstDiscardRate, gapLossRate and gapDiscardRate. - // RTCRemoteInboundRtpStreamStats RTCStatsMember local_id; RTCStatsMember round_trip_time; RTCStatsMember fraction_lost; @@ -666,15 +669,19 @@ class RTC_EXPORT RTCTransportStats final : public RTCStats { RTCStatsMember bytes_received; RTCStatsMember packets_received; RTCStatsMember rtcp_transport_stats_id; - // TODO(hbos): Support enum types? "RTCStatsMember"? + // Enum type RTCDtlsTransportState. RTCStatsMember dtls_state; RTCStatsMember selected_candidate_pair_id; RTCStatsMember local_certificate_id; RTCStatsMember remote_certificate_id; RTCStatsMember tls_version; RTCStatsMember dtls_cipher; + RTCStatsMember dtls_role; RTCStatsMember srtp_cipher; RTCStatsMember selected_candidate_pair_changes; + RTCStatsMember ice_role; + RTCStatsMember ice_local_username_fragment; + RTCStatsMember ice_state; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/stats_types.cc b/TMessagesProj/jni/voip/webrtc/api/stats_types.cc index 1090643f1c..61a0b8499d 100644 --- a/TMessagesProj/jni/voip/webrtc/api/stats_types.cc +++ b/TMessagesProj/jni/voip/webrtc/api/stats_types.cc @@ -13,8 +13,8 @@ #include #include "absl/algorithm/container.h" +#include "api/make_ref_counted.h" #include "rtc_base/checks.h" -#include "rtc_base/ref_counted_object.h" #include "rtc_base/string_encode.h" // TODO(tommi): Could we have a static map of value name -> expected type @@ -648,8 +648,6 @@ const char* StatsReport::Value::display_name() const { return "googTrackId"; case kStatsValueNameTimingFrameInfo: return "googTimingFrameInfo"; - case kStatsValueNameTypingNoiseState: - return "googTypingNoiseState"; case kStatsValueNameWritable: return "googWritable"; case kStatsValueNameAudioDeviceUnderrunCounter: diff --git a/TMessagesProj/jni/voip/webrtc/api/stats_types.h b/TMessagesProj/jni/voip/webrtc/api/stats_types.h index b7cb8eff7d..d75da46439 100644 --- a/TMessagesProj/jni/voip/webrtc/api/stats_types.h +++ b/TMessagesProj/jni/voip/webrtc/api/stats_types.h @@ -22,7 +22,6 @@ #include "api/scoped_refptr.h" #include "api/sequence_checker.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/ref_count.h" #include "rtc_base/system/rtc_export.h" @@ -236,7 +235,6 @@ class RTC_EXPORT StatsReport { kStatsValueNameTrackId, kStatsValueNameTransmitBitrate, kStatsValueNameTransportType, - kStatsValueNameTypingNoiseState, kStatsValueNameWritable, kStatsValueNameAudioDeviceUnderrunCounter, kStatsValueNameLocalCandidateRelayProtocol, @@ -288,6 +286,9 @@ class RTC_EXPORT StatsReport { ~Value(); + Value(const Value&) = delete; + Value& operator=(const Value&) = delete; + // Support ref counting. Note that for performance reasons, we // don't use thread safe operations. Therefore, all operations // affecting the ref count (in practice, creation and copying of @@ -358,8 +359,6 @@ class RTC_EXPORT StatsReport { const char* static_string_; Id* id_; } value_; - - RTC_DISALLOW_COPY_AND_ASSIGN(Value); }; typedef rtc::scoped_refptr ValuePtr; @@ -369,6 +368,9 @@ class RTC_EXPORT StatsReport { explicit StatsReport(const Id& id); ~StatsReport(); + StatsReport(const StatsReport&) = delete; + StatsReport& operator=(const StatsReport&) = delete; + // Factory functions for various types of stats IDs. static Id NewBandwidthEstimationId(); static Id NewTypedId(StatsType type, const std::string& id); @@ -408,8 +410,6 @@ class RTC_EXPORT StatsReport { const Id id_; double timestamp_; // Time since 1970-01-01T00:00:00Z in milliseconds. Values values_; - - RTC_DISALLOW_COPY_AND_ASSIGN(StatsReport); }; // Typedef for an array of const StatsReport pointers. diff --git a/TMessagesProj/jni/voip/webrtc/api/task_queue/default_task_queue_factory.h b/TMessagesProj/jni/voip/webrtc/api/task_queue/default_task_queue_factory.h index ccdd1ebec0..1d2dbd7ec2 100644 --- a/TMessagesProj/jni/voip/webrtc/api/task_queue/default_task_queue_factory.h +++ b/TMessagesProj/jni/voip/webrtc/api/task_queue/default_task_queue_factory.h @@ -12,11 +12,13 @@ #include +#include "api/field_trials_view.h" #include "api/task_queue/task_queue_factory.h" namespace webrtc { -std::unique_ptr CreateDefaultTaskQueueFactory(); +std::unique_ptr CreateDefaultTaskQueueFactory( + const FieldTrialsView* field_trials = nullptr); } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/task_queue/default_task_queue_factory_gcd.cc b/TMessagesProj/jni/voip/webrtc/api/task_queue/default_task_queue_factory_gcd.cc index 7e17b4846d..391f09b393 100644 --- a/TMessagesProj/jni/voip/webrtc/api/task_queue/default_task_queue_factory_gcd.cc +++ b/TMessagesProj/jni/voip/webrtc/api/task_queue/default_task_queue_factory_gcd.cc @@ -9,12 +9,14 @@ */ #include +#include "api/field_trials_view.h" #include "api/task_queue/task_queue_factory.h" #include "rtc_base/task_queue_gcd.h" namespace webrtc { -std::unique_ptr CreateDefaultTaskQueueFactory() { +std::unique_ptr CreateDefaultTaskQueueFactory( + const FieldTrialsView* field_trials) { return CreateTaskQueueGcdFactory(); } diff --git a/TMessagesProj/jni/voip/webrtc/api/task_queue/default_task_queue_factory_libevent.cc b/TMessagesProj/jni/voip/webrtc/api/task_queue/default_task_queue_factory_libevent.cc index f2fb418fd3..89079f51ca 100644 --- a/TMessagesProj/jni/voip/webrtc/api/task_queue/default_task_queue_factory_libevent.cc +++ b/TMessagesProj/jni/voip/webrtc/api/task_queue/default_task_queue_factory_libevent.cc @@ -9,12 +9,14 @@ */ #include +#include "api/field_trials_view.h" #include "api/task_queue/task_queue_factory.h" #include "rtc_base/task_queue_libevent.h" namespace webrtc { -std::unique_ptr CreateDefaultTaskQueueFactory() { +std::unique_ptr CreateDefaultTaskQueueFactory( + const FieldTrialsView* field_trials) { return CreateTaskQueueLibeventFactory(); } diff --git a/TMessagesProj/jni/voip/webrtc/api/task_queue/default_task_queue_factory_stdlib.cc b/TMessagesProj/jni/voip/webrtc/api/task_queue/default_task_queue_factory_stdlib.cc index ca7d720cbe..10cda7c5ec 100644 --- a/TMessagesProj/jni/voip/webrtc/api/task_queue/default_task_queue_factory_stdlib.cc +++ b/TMessagesProj/jni/voip/webrtc/api/task_queue/default_task_queue_factory_stdlib.cc @@ -9,12 +9,14 @@ */ #include +#include "api/field_trials_view.h" #include "api/task_queue/task_queue_factory.h" #include "rtc_base/task_queue_stdlib.h" namespace webrtc { -std::unique_ptr CreateDefaultTaskQueueFactory() { +std::unique_ptr CreateDefaultTaskQueueFactory( + const FieldTrialsView* field_trials) { return CreateTaskQueueStdlibFactory(); } diff --git a/TMessagesProj/jni/voip/webrtc/api/task_queue/default_task_queue_factory_win.cc b/TMessagesProj/jni/voip/webrtc/api/task_queue/default_task_queue_factory_win.cc index 493ea66ea5..e3adc07327 100644 --- a/TMessagesProj/jni/voip/webrtc/api/task_queue/default_task_queue_factory_win.cc +++ b/TMessagesProj/jni/voip/webrtc/api/task_queue/default_task_queue_factory_win.cc @@ -9,12 +9,14 @@ */ #include +#include "api/field_trials_view.h" #include "api/task_queue/task_queue_factory.h" #include "rtc_base/task_queue_win.h" namespace webrtc { -std::unique_ptr CreateDefaultTaskQueueFactory() { +std::unique_ptr CreateDefaultTaskQueueFactory( + const FieldTrialsView* field_trials) { return CreateTaskQueueWinFactory(); } diff --git a/TMessagesProj/jni/voip/webrtc/api/task_queue/pending_task_safety_flag.cc b/TMessagesProj/jni/voip/webrtc/api/task_queue/pending_task_safety_flag.cc new file mode 100644 index 0000000000..437ce0755d --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/task_queue/pending_task_safety_flag.cc @@ -0,0 +1,57 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/task_queue/pending_task_safety_flag.h" + +namespace webrtc { + +// static +rtc::scoped_refptr PendingTaskSafetyFlag::CreateInternal( + bool alive) { + // Explicit new, to access private constructor. + return rtc::scoped_refptr( + new PendingTaskSafetyFlag(alive)); +} + +// static +rtc::scoped_refptr PendingTaskSafetyFlag::Create() { + return CreateInternal(true); +} + +rtc::scoped_refptr +PendingTaskSafetyFlag::CreateDetached() { + rtc::scoped_refptr safety_flag = CreateInternal(true); + safety_flag->main_sequence_.Detach(); + return safety_flag; +} + +rtc::scoped_refptr +PendingTaskSafetyFlag::CreateDetachedInactive() { + rtc::scoped_refptr safety_flag = CreateInternal(false); + safety_flag->main_sequence_.Detach(); + return safety_flag; +} + +void PendingTaskSafetyFlag::SetNotAlive() { + RTC_DCHECK_RUN_ON(&main_sequence_); + alive_ = false; +} + +void PendingTaskSafetyFlag::SetAlive() { + RTC_DCHECK_RUN_ON(&main_sequence_); + alive_ = true; +} + +bool PendingTaskSafetyFlag::alive() const { + RTC_DCHECK_RUN_ON(&main_sequence_); + return alive_; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/pending_task_safety_flag.h b/TMessagesProj/jni/voip/webrtc/api/task_queue/pending_task_safety_flag.h similarity index 76% rename from TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/pending_task_safety_flag.h rename to TMessagesProj/jni/voip/webrtc/api/task_queue/pending_task_safety_flag.h index 6446bfe55e..3b948ca8f1 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/pending_task_safety_flag.h +++ b/TMessagesProj/jni/voip/webrtc/api/task_queue/pending_task_safety_flag.h @@ -8,9 +8,12 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef RTC_BASE_TASK_UTILS_PENDING_TASK_SAFETY_FLAG_H_ -#define RTC_BASE_TASK_UTILS_PENDING_TASK_SAFETY_FLAG_H_ +#ifndef API_TASK_QUEUE_PENDING_TASK_SAFETY_FLAG_H_ +#define API_TASK_QUEUE_PENDING_TASK_SAFETY_FLAG_H_ +#include + +#include "absl/functional/any_invocable.h" #include "api/ref_counted_base.h" #include "api/scoped_refptr.h" #include "api/sequence_checker.h" @@ -35,25 +38,25 @@ namespace webrtc { // // class ExampleClass { // .... -// my_task_queue_->PostTask(ToQueuedTask( -// [safety = pending_task_safety_flag_, this]() { +// rtc::scoped_refptr flag = safety_flag_; +// my_task_queue_->PostTask( +// [flag = std::move(flag), this] { // // Now running on the main thread. -// if (!safety->alive()) +// if (!flag->alive()) // return; // MyMethod(); -// })); +// }); // .... // ~ExampleClass() { -// pending_task_safety_flag_->SetNotAlive(); +// safety_flag_->SetNotAlive(); // } -// scoped_refptr pending_task_safety_flag_ +// scoped_refptr safety_flag_ // = PendingTaskSafetyFlag::Create(); // } // -// ToQueuedTask has an overload that makes this check automatic: +// SafeTask makes this check automatic: // -// my_task_queue_->PostTask(ToQueuedTask(pending_task_safety_flag_, -// [this]() { MyMethod(); })); +// my_task_queue_->PostTask(SafeTask(safety_flag_, [this] { MyMethod(); })); // class PendingTaskSafetyFlag final : public rtc::RefCountedNonVirtual { @@ -93,6 +96,8 @@ class PendingTaskSafetyFlag final explicit PendingTaskSafetyFlag(bool alive) : alive_(alive) {} private: + static rtc::scoped_refptr CreateInternal(bool alive); + bool alive_ = true; RTC_NO_UNIQUE_ADDRESS SequenceChecker main_sequence_; }; @@ -101,13 +106,10 @@ class PendingTaskSafetyFlag final // It does automatic PTSF creation and signalling of destruction when the // ScopedTaskSafety instance goes out of scope. // -// ToQueuedTask has an overload that takes a ScopedTaskSafety too, so there -// is no need to explicitly call the "flag" method. -// // Example usage: // -// my_task_queue->PostTask(ToQueuedTask(scoped_task_safety, -// [this]() { +// my_task_queue->PostTask(SafeTask(scoped_task_safety.flag(), +// [this] { // // task goes here // } // @@ -117,11 +119,20 @@ class PendingTaskSafetyFlag final class ScopedTaskSafety final { public: ScopedTaskSafety() = default; + explicit ScopedTaskSafety(rtc::scoped_refptr flag) + : flag_(std::move(flag)) {} ~ScopedTaskSafety() { flag_->SetNotAlive(); } // Returns a new reference to the safety flag. rtc::scoped_refptr flag() const { return flag_; } + // Marks the current flag as not-alive and attaches to a new one. + void reset(rtc::scoped_refptr new_flag = + PendingTaskSafetyFlag::Create()) { + flag_->SetNotAlive(); + flag_ = std::move(new_flag); + } + private: rtc::scoped_refptr flag_ = PendingTaskSafetyFlag::Create(); @@ -142,6 +153,16 @@ class ScopedTaskSafetyDetached final { PendingTaskSafetyFlag::CreateDetached(); }; +inline absl::AnyInvocable SafeTask( + rtc::scoped_refptr flag, + absl::AnyInvocable task) { + return [flag = std::move(flag), task = std::move(task)]() mutable { + if (flag->alive()) { + std::move(task)(); + } + }; +} + } // namespace webrtc -#endif // RTC_BASE_TASK_UTILS_PENDING_TASK_SAFETY_FLAG_H_ +#endif // API_TASK_QUEUE_PENDING_TASK_SAFETY_FLAG_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/task_queue/queued_task.h b/TMessagesProj/jni/voip/webrtc/api/task_queue/queued_task.h deleted file mode 100644 index 27a5eda5a5..0000000000 --- a/TMessagesProj/jni/voip/webrtc/api/task_queue/queued_task.h +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright 2018 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#ifndef API_TASK_QUEUE_QUEUED_TASK_H_ -#define API_TASK_QUEUE_QUEUED_TASK_H_ - -namespace webrtc { - -// Base interface for asynchronously executed tasks. -// The interface basically consists of a single function, Run(), that executes -// on the target queue. For more details see the Run() method and TaskQueue. -class QueuedTask { - public: - virtual ~QueuedTask() = default; - - // Main routine that will run when the task is executed on the desired queue. - // The task should return `true` to indicate that it should be deleted or - // `false` to indicate that the queue should consider ownership of the task - // having been transferred. Returning `false` can be useful if a task has - // re-posted itself to a different queue or is otherwise being re-used. - virtual bool Run() = 0; -}; - -} // namespace webrtc - -#endif // API_TASK_QUEUE_QUEUED_TASK_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/task_queue/task_queue_base.cc b/TMessagesProj/jni/voip/webrtc/api/task_queue/task_queue_base.cc index 7d3539a63d..ecdc7f7691 100644 --- a/TMessagesProj/jni/voip/webrtc/api/task_queue/task_queue_base.cc +++ b/TMessagesProj/jni/voip/webrtc/api/task_queue/task_queue_base.cc @@ -11,6 +11,8 @@ #include "absl/base/attributes.h" #include "absl/base/config.h" +#include "absl/functional/any_invocable.h" +#include "api/units/time_delta.h" #include "rtc_base/checks.h" #if defined(ABSL_HAVE_THREAD_LOCAL) diff --git a/TMessagesProj/jni/voip/webrtc/api/task_queue/task_queue_base.h b/TMessagesProj/jni/voip/webrtc/api/task_queue/task_queue_base.h index d8af6e67db..a2cff9c738 100644 --- a/TMessagesProj/jni/voip/webrtc/api/task_queue/task_queue_base.h +++ b/TMessagesProj/jni/voip/webrtc/api/task_queue/task_queue_base.h @@ -11,8 +11,10 @@ #define API_TASK_QUEUE_TASK_QUEUE_BASE_H_ #include +#include -#include "api/task_queue/queued_task.h" +#include "absl/functional/any_invocable.h" +#include "api/units/time_delta.h" #include "rtc_base/system/rtc_export.h" #include "rtc_base/thread_annotations.h" @@ -24,6 +26,16 @@ namespace webrtc { // known task queue, use IsCurrent(). class RTC_LOCKABLE RTC_EXPORT TaskQueueBase { public: + enum class DelayPrecision { + // This may include up to a 17 ms leeway in addition to OS timer precision. + // See PostDelayedTask() for more information. + kLow, + // This does not have the additional delay that kLow has, but it is still + // limited by OS timer precision. See PostDelayedHighPrecisionTask() for + // more information. + kHigh, + }; + // Starts destruction of the task queue. // On return ensures no task are running and no new tasks are able to start // on the task queue. @@ -37,24 +49,74 @@ class RTC_LOCKABLE RTC_EXPORT TaskQueueBase { // was created on. virtual void Delete() = 0; - // Schedules a task to execute. Tasks are executed in FIFO order. - // If `task->Run()` returns true, task is deleted on the task queue - // before next QueuedTask starts executing. + // Schedules a `task` to execute. Tasks are executed in FIFO order. // When a TaskQueue is deleted, pending tasks will not be executed but they // will be deleted. The deletion of tasks may happen synchronously on the // TaskQueue or it may happen asynchronously after TaskQueue is deleted. // This may vary from one implementation to the next so assumptions about // lifetimes of pending tasks should not be made. // May be called on any thread or task queue, including this task queue. - virtual void PostTask(std::unique_ptr task) = 0; + virtual void PostTask(absl::AnyInvocable task) = 0; - // Schedules a task to execute a specified number of milliseconds from when - // the call is made. The precision should be considered as "best effort" - // and in some cases, such as on Windows when all high precision timers have - // been used up, can be off by as much as 15 millseconds. + // Prefer PostDelayedTask() over PostDelayedHighPrecisionTask() whenever + // possible. + // + // Schedules a `task` to execute a specified `delay` from when the call is + // made, using "low" precision. All scheduling is affected by OS-specific + // leeway and current workloads which means that in terms of precision there + // are no hard guarantees, but in addition to the OS induced leeway, "low" + // precision adds up to a 17 ms additional leeway. The purpose of this leeway + // is to achieve more efficient CPU scheduling and reduce Idle Wake Up + // frequency. + // + // The task may execute with [-1, 17 + OS induced leeway) ms additional delay. + // + // Avoid making assumptions about the precision of the OS scheduler. On macOS, + // the OS induced leeway may be 10% of sleep interval. On Windows, 1 ms + // precision timers may be used but there are cases, such as when running on + // battery, when the timer precision can be as poor as 15 ms. + // + // "Low" precision is not implemented everywhere yet. Where not yet + // implemented, PostDelayedTask() has "high" precision. See + // https://crbug.com/webrtc/13583 for more information. + // // May be called on any thread or task queue, including this task queue. - virtual void PostDelayedTask(std::unique_ptr task, - uint32_t milliseconds) = 0; + virtual void PostDelayedTask(absl::AnyInvocable task, + TimeDelta delay) = 0; + + // Prefer PostDelayedTask() over PostDelayedHighPrecisionTask() whenever + // possible. + // + // Schedules a `task` to execute a specified `delay` from when the call is + // made, using "high" precision. All scheduling is affected by OS-specific + // leeway and current workloads which means that in terms of precision there + // are no hard guarantees. + // + // The task may execute with [-1, OS induced leeway] ms additional delay. + // + // Avoid making assumptions about the precision of the OS scheduler. On macOS, + // the OS induced leeway may be 10% of sleep interval. On Windows, 1 ms + // precision timers may be used but there are cases, such as when running on + // battery, when the timer precision can be as poor as 15 ms. + // + // May be called on any thread or task queue, including this task queue. + virtual void PostDelayedHighPrecisionTask(absl::AnyInvocable task, + TimeDelta delay) = 0; + + // As specified by `precision`, calls either PostDelayedTask() or + // PostDelayedHighPrecisionTask(). + void PostDelayedTaskWithPrecision(DelayPrecision precision, + absl::AnyInvocable task, + TimeDelta delay) { + switch (precision) { + case DelayPrecision::kLow: + PostDelayedTask(std::move(task), delay); + break; + case DelayPrecision::kHigh: + PostDelayedHighPrecisionTask(std::move(task), delay); + break; + } + } // Returns the task queue that is running the current thread. // Returns nullptr if this thread is not associated with any task queue. @@ -63,7 +125,7 @@ class RTC_LOCKABLE RTC_EXPORT TaskQueueBase { bool IsCurrent() const { return Current() == this; } protected: - class CurrentTaskQueueSetter { + class RTC_EXPORT CurrentTaskQueueSetter { public: explicit CurrentTaskQueueSetter(TaskQueueBase* task_queue); CurrentTaskQueueSetter(const CurrentTaskQueueSetter&) = delete; diff --git a/TMessagesProj/jni/voip/webrtc/api/test/compile_all_headers.cc b/TMessagesProj/jni/voip/webrtc/api/test/compile_all_headers.cc index 9275eb0791..1fcf63e97b 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/compile_all_headers.cc +++ b/TMessagesProj/jni/voip/webrtc/api/test/compile_all_headers.cc @@ -27,13 +27,13 @@ // "api/test/videocodec_test_fixture.h" // "api/test/videocodec_test_stats.h" -#include "api/test/dummy_peer_connection.h" #include "api/test/fake_frame_decryptor.h" #include "api/test/fake_frame_encryptor.h" #include "api/test/mock_async_dns_resolver.h" #include "api/test/mock_audio_mixer.h" #include "api/test/mock_audio_sink.h" #include "api/test/mock_data_channel.h" +#include "api/test/mock_dtmf_sender.h" #include "api/test/mock_frame_decryptor.h" #include "api/test/mock_frame_encryptor.h" #include "api/test/mock_media_stream_interface.h" @@ -42,6 +42,7 @@ #include "api/test/mock_rtp_transceiver.h" #include "api/test/mock_rtpreceiver.h" #include "api/test/mock_rtpsender.h" +#include "api/test/mock_session_description_interface.h" #include "api/test/mock_transformable_video_frame.h" #include "api/test/mock_video_bitrate_allocator.h" #include "api/test/mock_video_bitrate_allocator_factory.h" diff --git a/TMessagesProj/jni/voip/webrtc/api/test/create_frame_generator.cc b/TMessagesProj/jni/voip/webrtc/api/test/create_frame_generator.cc index 7ed06473a1..5e6fb3228b 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/create_frame_generator.cc +++ b/TMessagesProj/jni/voip/webrtc/api/test/create_frame_generator.cc @@ -47,6 +47,23 @@ std::unique_ptr CreateFromYuvFileFrameGenerator( frame_repeat_count); } +std::unique_ptr CreateFromNV12FileFrameGenerator( + std::vector filenames, + size_t width, + size_t height, + int frame_repeat_count) { + RTC_DCHECK(!filenames.empty()); + std::vector files; + for (const std::string& filename : filenames) { + FILE* file = fopen(filename.c_str(), "rb"); + RTC_DCHECK(file != nullptr) << "Failed to open: '" << filename << "'\n"; + files.push_back(file); + } + + return std::make_unique(files, width, height, + frame_repeat_count); +} + std::unique_ptr CreateFromIvfFileFrameGenerator( std::string filename) { return std::make_unique(std::move(filename)); diff --git a/TMessagesProj/jni/voip/webrtc/api/test/create_frame_generator.h b/TMessagesProj/jni/voip/webrtc/api/test/create_frame_generator.h index cd4fcccd69..70be0c4e8e 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/create_frame_generator.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/create_frame_generator.h @@ -41,6 +41,15 @@ std::unique_ptr CreateFromYuvFileFrameGenerator( size_t height, int frame_repeat_count); +// Creates a frame generator that repeatedly plays a set of nv12 files. +// The frame_repeat_count determines how many times each frame is shown, +// with 1 = show each frame once, etc. +std::unique_ptr CreateFromNV12FileFrameGenerator( + std::vector filenames, + size_t width, + size_t height, + int frame_repeat_count = 1); + // Creates a frame generator that repeatedly plays an ivf file. std::unique_ptr CreateFromIvfFileFrameGenerator( std::string filename); diff --git a/TMessagesProj/jni/voip/webrtc/api/test/create_peerconnection_quality_test_fixture.cc b/TMessagesProj/jni/voip/webrtc/api/test/create_peerconnection_quality_test_fixture.cc index 2d9d0821fc..e156991ed4 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/create_peerconnection_quality_test_fixture.cc +++ b/TMessagesProj/jni/voip/webrtc/api/test/create_peerconnection_quality_test_fixture.cc @@ -13,6 +13,7 @@ #include #include +#include "api/test/metrics/global_metrics_logger_and_exporter.h" #include "api/test/time_controller.h" #include "test/pc/e2e/peer_connection_quality_test.h" @@ -27,7 +28,8 @@ CreatePeerConnectionE2EQualityTestFixture( std::unique_ptr video_quality_analyzer) { return std::make_unique( std::move(test_case_name), time_controller, - std::move(audio_quality_analyzer), std::move(video_quality_analyzer)); + std::move(audio_quality_analyzer), std::move(video_quality_analyzer), + test::GetGlobalMetricsLogger()); } } // namespace webrtc_pc_e2e diff --git a/TMessagesProj/jni/voip/webrtc/api/test/create_time_controller.cc b/TMessagesProj/jni/voip/webrtc/api/test/create_time_controller.cc index f7faeaab42..d198f2b0fe 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/create_time_controller.cc +++ b/TMessagesProj/jni/voip/webrtc/api/test/create_time_controller.cc @@ -37,23 +37,15 @@ std::unique_ptr CreateTimeControllerBasedCallFactory( explicit TimeControllerBasedCallFactory(TimeController* time_controller) : time_controller_(time_controller) {} Call* CreateCall(const Call::Config& config) override { - if (!module_thread_) { - module_thread_ = SharedModuleThread::Create( - time_controller_->CreateProcessThread("CallModules"), - [this]() { module_thread_ = nullptr; }); - } - RtpTransportConfig transportConfig = config.ExtractTransportConfig(); - return Call::Create(config, time_controller_->GetClock(), module_thread_, + return Call::Create(config, time_controller_->GetClock(), config.rtp_transport_controller_send_factory->Create( - transportConfig, time_controller_->GetClock(), - time_controller_->CreateProcessThread("Pacer"))); + transportConfig, time_controller_->GetClock())); } private: TimeController* time_controller_; - rtc::scoped_refptr module_thread_; }; return std::make_unique(time_controller); } diff --git a/TMessagesProj/jni/voip/webrtc/api/test/dummy_peer_connection.h b/TMessagesProj/jni/voip/webrtc/api/test/dummy_peer_connection.h deleted file mode 100644 index 80ae20c3c7..0000000000 --- a/TMessagesProj/jni/voip/webrtc/api/test/dummy_peer_connection.h +++ /dev/null @@ -1,251 +0,0 @@ -/* - * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef API_TEST_DUMMY_PEER_CONNECTION_H_ -#define API_TEST_DUMMY_PEER_CONNECTION_H_ - -#include -#include -#include - -#include "api/peer_connection_interface.h" -#include "api/rtc_error.h" -#include "rtc_base/checks.h" -#include "rtc_base/ref_counted_object.h" - -namespace webrtc { - -// This class includes dummy implementations of all methods on the -// PeerconnectionInterface. Accessor/getter methods return empty or default -// values. State-changing methods with a return value return failure. Remaining -// methods (except Close())) will crash with FATAL if called. -class DummyPeerConnection : public PeerConnectionInterface { - rtc::scoped_refptr local_streams() override { - return nullptr; - } - rtc::scoped_refptr remote_streams() override { - return nullptr; - } - - bool AddStream(MediaStreamInterface* stream) override { return false; } - void RemoveStream(MediaStreamInterface* stream) override { - RTC_CHECK_NOTREACHED(); - } - - RTCErrorOr> AddTrack( - rtc::scoped_refptr track, - const std::vector& stream_ids) override { - return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented"); - } - - bool RemoveTrack(RtpSenderInterface* sender) override { return false; } - - RTCError RemoveTrackNew( - rtc::scoped_refptr sender) override { - return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented"); - } - - RTCErrorOr> AddTransceiver( - rtc::scoped_refptr track) override { - return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented"); - } - RTCErrorOr> AddTransceiver( - rtc::scoped_refptr track, - const RtpTransceiverInit& init) override { - return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented"); - } - - RTCErrorOr> AddTransceiver( - cricket::MediaType media_type) override { - return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented"); - } - RTCErrorOr> AddTransceiver( - cricket::MediaType media_type, - const RtpTransceiverInit& init) override { - return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented"); - } - - rtc::scoped_refptr CreateSender( - const std::string& kind, - const std::string& stream_id) override { - return nullptr; - } - - std::vector> GetSenders() - const override { - return {}; - } - - std::vector> GetReceivers() - const override { - return {}; - } - - std::vector> GetTransceivers() - const override { - return {}; - } - - bool GetStats(StatsObserver* observer, - MediaStreamTrackInterface* track, // Optional - StatsOutputLevel level) override { - return false; - } - - void GetStats(RTCStatsCollectorCallback* callback) override { - RTC_CHECK_NOTREACHED(); - } - void GetStats( - rtc::scoped_refptr selector, - rtc::scoped_refptr callback) override { - RTC_CHECK_NOTREACHED(); - } - void GetStats( - rtc::scoped_refptr selector, - rtc::scoped_refptr callback) override { - RTC_CHECK_NOTREACHED(); - } - void ClearStatsCache() override {} - - RTCErrorOr> CreateDataChannelOrError( - const std::string& label, - const DataChannelInit* config) override { - return RTCError(RTCErrorType::INTERNAL_ERROR, "Dummy function called"); - } - - const SessionDescriptionInterface* local_description() const override { - return nullptr; - } - const SessionDescriptionInterface* remote_description() const override { - return nullptr; - } - - const SessionDescriptionInterface* current_local_description() - const override { - return nullptr; - } - const SessionDescriptionInterface* current_remote_description() - const override { - return nullptr; - } - - const SessionDescriptionInterface* pending_local_description() - const override { - return nullptr; - } - const SessionDescriptionInterface* pending_remote_description() - const override { - return nullptr; - } - - void RestartIce() override { RTC_CHECK_NOTREACHED(); } - - // Create a new offer. - // The CreateSessionDescriptionObserver callback will be called when done. - void CreateOffer(CreateSessionDescriptionObserver* observer, - const RTCOfferAnswerOptions& options) override { - RTC_CHECK_NOTREACHED(); - } - - void CreateAnswer(CreateSessionDescriptionObserver* observer, - const RTCOfferAnswerOptions& options) override { - RTC_CHECK_NOTREACHED(); - } - - void SetLocalDescription(SetSessionDescriptionObserver* observer, - SessionDescriptionInterface* desc) override { - RTC_CHECK_NOTREACHED(); - } - void SetRemoteDescription(SetSessionDescriptionObserver* observer, - SessionDescriptionInterface* desc) override { - RTC_CHECK_NOTREACHED(); - } - void SetRemoteDescription( - std::unique_ptr desc, - rtc::scoped_refptr observer) - override { - RTC_CHECK_NOTREACHED(); - } - - PeerConnectionInterface::RTCConfiguration GetConfiguration() override { - return RTCConfiguration(); - } - RTCError SetConfiguration( - const PeerConnectionInterface::RTCConfiguration& config) override { - return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented"); - } - - bool AddIceCandidate(const IceCandidateInterface* candidate) override { - return false; - } - bool RemoveIceCandidates( - const std::vector& candidates) override { - return false; - } - - RTCError SetBitrate(const BitrateSettings& bitrate) override { - return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented"); - } - - void SetAudioPlayout(bool playout) override { RTC_CHECK_NOTREACHED(); } - void SetAudioRecording(bool recording) override { RTC_CHECK_NOTREACHED(); } - - rtc::scoped_refptr LookupDtlsTransportByMid( - const std::string& mid) override { - return nullptr; - } - rtc::scoped_refptr GetSctpTransport() const override { - return nullptr; - } - - SignalingState signaling_state() override { return SignalingState(); } - - IceConnectionState ice_connection_state() override { - return IceConnectionState(); - } - - IceConnectionState standardized_ice_connection_state() override { - return IceConnectionState(); - } - - PeerConnectionState peer_connection_state() override { - return PeerConnectionState(); - } - - IceGatheringState ice_gathering_state() override { - return IceGatheringState(); - } - - absl::optional can_trickle_ice_candidates() { return absl::nullopt; } - - bool StartRtcEventLog(std::unique_ptr output, - int64_t output_period_ms) override { - return false; - } - bool StartRtcEventLog(std::unique_ptr output) override { - return false; - } - - void StopRtcEventLog() { RTC_CHECK_NOTREACHED(); } - - void Close() override {} - - rtc::Thread* signaling_thread() const override { - return rtc::Thread::Current(); - } -}; - -static_assert( - !std::is_abstract>::value, - ""); - -} // namespace webrtc - -#endif // API_TEST_DUMMY_PEER_CONNECTION_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/test/fake_frame_decryptor.h b/TMessagesProj/jni/voip/webrtc/api/test/fake_frame_decryptor.h index bfd0e6903b..783bc805c4 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/fake_frame_decryptor.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/fake_frame_decryptor.h @@ -19,7 +19,6 @@ #include "api/array_view.h" #include "api/crypto/frame_decryptor_interface.h" #include "api/media_types.h" -#include "rtc_base/ref_counted_object.h" namespace webrtc { @@ -27,8 +26,7 @@ namespace webrtc { // FrameDecryptorInterface. It is constructed with a simple single digit key and // a fixed postfix byte. This is just to validate that the core code works // as expected. -class FakeFrameDecryptor final - : public rtc::RefCountedObject { +class FakeFrameDecryptor : public FrameDecryptorInterface { public: // Provide a key (0,255) and some postfix byte (0,255) this should match the // byte you expect from the FakeFrameEncryptor. diff --git a/TMessagesProj/jni/voip/webrtc/api/test/metrics/chrome_perf_dashboard_metrics_exporter.cc b/TMessagesProj/jni/voip/webrtc/api/test/metrics/chrome_perf_dashboard_metrics_exporter.cc new file mode 100644 index 0000000000..018d110b12 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/metrics/chrome_perf_dashboard_metrics_exporter.cc @@ -0,0 +1,146 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/test/metrics/chrome_perf_dashboard_metrics_exporter.h" + +#include + +#include +#include +#include + +#include "absl/memory/memory.h" +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/test/metrics/metric.h" +#include "test/testsupport/file_utils.h" +#include "test/testsupport/perf_test_histogram_writer.h" +#include "test/testsupport/perf_test_result_writer.h" + +namespace webrtc { +namespace test { +namespace { + +std::string ToChromePerfDashboardUnit(Unit unit) { + switch (unit) { + case Unit::kMilliseconds: + return "msBestFitFormat"; + case Unit::kPercent: + return "n%"; + case Unit::kBytes: + return "sizeInBytes"; + case Unit::kKilobitsPerSecond: + // Chrome Perf Dashboard doesn't have kpbs units, so we change the unit + // and value accordingly. + return "bytesPerSecond"; + case Unit::kHertz: + return "Hz"; + case Unit::kUnitless: + return "unitless"; + case Unit::kCount: + return "count"; + } +} + +double ToChromePerfDashboardValue(double value, Unit unit) { + switch (unit) { + case Unit::kKilobitsPerSecond: + // Chrome Perf Dashboard doesn't have kpbs units, so we change the unit + // and value accordingly. + return value * 1000 / 8; + default: + return value; + } +} + +ImproveDirection ToChromePerfDashboardImproveDirection( + ImprovementDirection direction) { + switch (direction) { + case ImprovementDirection::kBiggerIsBetter: + return ImproveDirection::kBiggerIsBetter; + case ImprovementDirection::kNeitherIsBetter: + return ImproveDirection::kNone; + case ImprovementDirection::kSmallerIsBetter: + return ImproveDirection::kSmallerIsBetter; + } +} + +bool WriteMetricsToFile(const std::string& path, const std::string& data) { + CreateDir(DirName(path)); + FILE* output = fopen(path.c_str(), "wb"); + if (output == NULL) { + printf("Failed to write to %s.\n", path.c_str()); + return false; + } + size_t written = fwrite(data.c_str(), sizeof(char), data.size(), output); + fclose(output); + + if (written != data.size()) { + size_t expected = data.size(); + printf("Wrote %zu, tried to write %zu\n", written, expected); + return false; + } + return true; +} + +bool IsEmpty(const Metric::Stats& stats) { + return !stats.mean.has_value() && !stats.stddev.has_value() && + !stats.min.has_value() && !stats.max.has_value(); +} + +} // namespace + +ChromePerfDashboardMetricsExporter::ChromePerfDashboardMetricsExporter( + absl::string_view export_file_path) + : export_file_path_(export_file_path) {} + +bool ChromePerfDashboardMetricsExporter::Export( + rtc::ArrayView metrics) { + std::unique_ptr writer = + absl::WrapUnique(CreateHistogramWriter()); + for (const Metric& metric : metrics) { + if (metric.time_series.samples.empty() && IsEmpty(metric.stats)) { + // If there were no data collected for the metric it is expected that 0 + // will be exported, so add 0 to the samples. + writer->LogResult( + metric.name, metric.test_case, + ToChromePerfDashboardValue(0, metric.unit), + ToChromePerfDashboardUnit(metric.unit), + /*important=*/false, + ToChromePerfDashboardImproveDirection(metric.improvement_direction)); + continue; + } + + if (metric.time_series.samples.empty()) { + writer->LogResultMeanAndError( + metric.name, metric.test_case, + ToChromePerfDashboardValue(*metric.stats.mean, metric.unit), + ToChromePerfDashboardValue(*metric.stats.stddev, metric.unit), + ToChromePerfDashboardUnit(metric.unit), + /*important=*/false, + ToChromePerfDashboardImproveDirection(metric.improvement_direction)); + continue; + } + + std::vector samples(metric.time_series.samples.size()); + for (size_t i = 0; i < metric.time_series.samples.size(); ++i) { + samples[i] = ToChromePerfDashboardValue( + metric.time_series.samples[i].value, metric.unit); + } + writer->LogResultList( + metric.name, metric.test_case, samples, + ToChromePerfDashboardUnit(metric.unit), + /*important=*/false, + ToChromePerfDashboardImproveDirection(metric.improvement_direction)); + } + return WriteMetricsToFile(export_file_path_, writer->Serialize()); +} + +} // namespace test +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/test/metrics/chrome_perf_dashboard_metrics_exporter.h b/TMessagesProj/jni/voip/webrtc/api/test/metrics/chrome_perf_dashboard_metrics_exporter.h new file mode 100644 index 0000000000..dda17a08c6 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/metrics/chrome_perf_dashboard_metrics_exporter.h @@ -0,0 +1,41 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_METRICS_CHROME_PERF_DASHBOARD_METRICS_EXPORTER_H_ +#define API_TEST_METRICS_CHROME_PERF_DASHBOARD_METRICS_EXPORTER_H_ + +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/test/metrics/metric.h" +#include "api/test/metrics/metrics_exporter.h" + +namespace webrtc { +namespace test { + +// Exports all collected metrics in the Chrome Perf Dashboard proto format. +class ChromePerfDashboardMetricsExporter : public MetricsExporter { + public: + // `export_file_path` - path where the proto file will be written. + explicit ChromePerfDashboardMetricsExporter( + absl::string_view export_file_path); + ~ChromePerfDashboardMetricsExporter() override = default; + + bool Export(rtc::ArrayView metrics) override; + + private: + const std::string export_file_path_; +}; + +} // namespace test +} // namespace webrtc + +#endif // API_TEST_METRICS_CHROME_PERF_DASHBOARD_METRICS_EXPORTER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/test/metrics/chrome_perf_dashboard_metrics_exporter_test.cc b/TMessagesProj/jni/voip/webrtc/api/test/metrics/chrome_perf_dashboard_metrics_exporter_test.cc new file mode 100644 index 0000000000..5d3136f49a --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/metrics/chrome_perf_dashboard_metrics_exporter_test.cc @@ -0,0 +1,248 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/test/metrics/chrome_perf_dashboard_metrics_exporter.h" + +#include +#include +#include + +#include "api/test/metrics/metric.h" +#include "api/units/timestamp.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/testsupport/file_utils.h" +#include "third_party/catapult/tracing/tracing/value/histogram.h" + +namespace webrtc { +namespace test { +namespace { + +using ::testing::DoubleNear; +using ::testing::Eq; +using ::testing::Test; + +namespace proto = ::catapult::tracing::tracing::proto; + +std::map DefaultMetadata() { + return std::map{{"key", "value"}}; +} + +Metric::TimeSeries::Sample Sample(double value) { + return Metric::TimeSeries::Sample{.timestamp = Timestamp::Seconds(1), + .value = value, + .sample_metadata = DefaultMetadata()}; +} + +std::string ReadFileAsString(const std::string& filename) { + std::ifstream infile(filename, std::ios_base::binary); + auto buffer = std::vector(std::istreambuf_iterator(infile), + std::istreambuf_iterator()); + return std::string(buffer.begin(), buffer.end()); +} + +class ChromePerfDashboardMetricsExporterTest : public Test { + protected: + ~ChromePerfDashboardMetricsExporterTest() override = default; + + void SetUp() override { + temp_filename_ = webrtc::test::TempFilename( + webrtc::test::OutputPath(), + "chrome_perf_dashboard_metrics_exporter_test"); + } + + void TearDown() override { + ASSERT_TRUE(webrtc::test::RemoveFile(temp_filename_)); + } + + std::string temp_filename_; +}; + +TEST_F(ChromePerfDashboardMetricsExporterTest, ExportMetricFormatCorrect) { + Metric metric1{ + .name = "test_metric1", + .unit = Unit::kMilliseconds, + .improvement_direction = ImprovementDirection::kBiggerIsBetter, + .test_case = "test_case_name1", + .metric_metadata = DefaultMetadata(), + .time_series = + Metric::TimeSeries{.samples = std::vector{Sample(10), Sample(20)}}, + .stats = + Metric::Stats{.mean = 15.0, .stddev = 5.0, .min = 10.0, .max = 20.0}}; + Metric metric2{ + .name = "test_metric2", + .unit = Unit::kKilobitsPerSecond, + .improvement_direction = ImprovementDirection::kSmallerIsBetter, + .test_case = "test_case_name2", + .metric_metadata = DefaultMetadata(), + .time_series = + Metric::TimeSeries{.samples = std::vector{Sample(20), Sample(40)}}, + .stats = Metric::Stats{ + .mean = 30.0, .stddev = 10.0, .min = 20.0, .max = 40.0}}; + + ChromePerfDashboardMetricsExporter exporter(temp_filename_); + + ASSERT_TRUE(exporter.Export(std::vector{metric1, metric2})); + proto::HistogramSet actual_histogram_set; + actual_histogram_set.ParseFromString(ReadFileAsString(temp_filename_)); + EXPECT_THAT(actual_histogram_set.histograms().size(), Eq(2)); + + // Validate output for `metric1` + EXPECT_THAT(actual_histogram_set.histograms(0).name(), Eq("test_metric1")); + EXPECT_THAT(actual_histogram_set.histograms(0).unit().unit(), + Eq(proto::Unit::MS_BEST_FIT_FORMAT)); + EXPECT_THAT(actual_histogram_set.histograms(0).unit().improvement_direction(), + Eq(proto::ImprovementDirection::BIGGER_IS_BETTER)); + EXPECT_THAT( + actual_histogram_set.histograms(0).diagnostics().diagnostic_map().size(), + Eq(1lu)); + EXPECT_THAT(actual_histogram_set.histograms(0) + .diagnostics() + .diagnostic_map() + .at("stories") + .generic_set() + .values(0), + Eq("\"test_case_name1\"")); + EXPECT_THAT(actual_histogram_set.histograms(0).sample_values().size(), Eq(2)); + EXPECT_THAT(actual_histogram_set.histograms(0).sample_values(0), Eq(10.0)); + EXPECT_THAT(actual_histogram_set.histograms(0).sample_values(1), Eq(20.0)); + EXPECT_THAT(actual_histogram_set.histograms(0).running().count(), Eq(2)); + EXPECT_THAT(actual_histogram_set.histograms(0).running().max(), Eq(20)); + EXPECT_THAT(actual_histogram_set.histograms(0).running().meanlogs(), + DoubleNear(2.64916, 0.1)); + EXPECT_THAT(actual_histogram_set.histograms(0).running().mean(), Eq(15)); + EXPECT_THAT(actual_histogram_set.histograms(0).running().min(), Eq(10)); + EXPECT_THAT(actual_histogram_set.histograms(0).running().sum(), Eq(30)); + EXPECT_THAT(actual_histogram_set.histograms(0).running().variance(), Eq(50)); + + // Validate output for `metric2` + EXPECT_THAT(actual_histogram_set.histograms(1).name(), Eq("test_metric2")); + EXPECT_THAT(actual_histogram_set.histograms(1).unit().unit(), + Eq(proto::Unit::BYTES_PER_SECOND)); + EXPECT_THAT(actual_histogram_set.histograms(1).unit().improvement_direction(), + Eq(proto::ImprovementDirection::SMALLER_IS_BETTER)); + EXPECT_THAT( + actual_histogram_set.histograms(1).diagnostics().diagnostic_map().size(), + Eq(1lu)); + EXPECT_THAT(actual_histogram_set.histograms(1) + .diagnostics() + .diagnostic_map() + .at("stories") + .generic_set() + .values(0), + Eq("\"test_case_name2\"")); + EXPECT_THAT(actual_histogram_set.histograms(1).sample_values().size(), Eq(2)); + EXPECT_THAT(actual_histogram_set.histograms(1).sample_values(0), Eq(2500.0)); + EXPECT_THAT(actual_histogram_set.histograms(1).sample_values(1), Eq(5000.0)); + EXPECT_THAT(actual_histogram_set.histograms(1).running().count(), Eq(2)); + EXPECT_THAT(actual_histogram_set.histograms(1).running().max(), Eq(5000)); + EXPECT_THAT(actual_histogram_set.histograms(1).running().meanlogs(), + DoubleNear(8.17062, 0.1)); + EXPECT_THAT(actual_histogram_set.histograms(1).running().mean(), Eq(3750)); + EXPECT_THAT(actual_histogram_set.histograms(1).running().min(), Eq(2500)); + EXPECT_THAT(actual_histogram_set.histograms(1).running().sum(), Eq(7500)); + EXPECT_THAT(actual_histogram_set.histograms(1).running().variance(), + Eq(3125000)); +} + +TEST_F(ChromePerfDashboardMetricsExporterTest, + ExportEmptyMetricExportsZeroValue) { + Metric metric{.name = "test_metric", + .unit = Unit::kMilliseconds, + .improvement_direction = ImprovementDirection::kBiggerIsBetter, + .test_case = "test_case_name", + .metric_metadata = DefaultMetadata(), + .time_series = Metric::TimeSeries{.samples = {}}, + .stats = Metric::Stats{}}; + + ChromePerfDashboardMetricsExporter exporter(temp_filename_); + + ASSERT_TRUE(exporter.Export(std::vector{metric})); + proto::HistogramSet actual_histogram_set; + actual_histogram_set.ParseFromString(ReadFileAsString(temp_filename_)); + EXPECT_THAT(actual_histogram_set.histograms().size(), Eq(1)); + + // Validate values for `metric` + EXPECT_THAT(actual_histogram_set.histograms(0).sample_values().size(), Eq(1)); + EXPECT_THAT(actual_histogram_set.histograms(0).sample_values(0), Eq(0.0)); + EXPECT_THAT(actual_histogram_set.histograms(0).running().count(), Eq(1)); + EXPECT_THAT(actual_histogram_set.histograms(0).running().max(), + DoubleNear(0, 1e-6)); + EXPECT_THAT(actual_histogram_set.histograms(0).running().meanlogs(), Eq(0)); + EXPECT_THAT(actual_histogram_set.histograms(0).running().mean(), Eq(0)); + EXPECT_THAT(actual_histogram_set.histograms(0).running().min(), Eq(0)); + EXPECT_THAT(actual_histogram_set.histograms(0).running().sum(), Eq(0)); + EXPECT_THAT(actual_histogram_set.histograms(0).running().variance(), Eq(0)); +} + +TEST_F(ChromePerfDashboardMetricsExporterTest, + ExportMetricWithOnlyStatsExportsMeanValues) { + Metric metric{.name = "test_metric", + .unit = Unit::kMilliseconds, + .improvement_direction = ImprovementDirection::kBiggerIsBetter, + .test_case = "test_case_name", + .metric_metadata = DefaultMetadata(), + .time_series = Metric::TimeSeries{.samples = {}}, + .stats = Metric::Stats{ + .mean = 15.0, .stddev = 5.0, .min = 10.0, .max = 20.0}}; + + ChromePerfDashboardMetricsExporter exporter(temp_filename_); + + ASSERT_TRUE(exporter.Export(std::vector{metric})); + proto::HistogramSet actual_histogram_set; + actual_histogram_set.ParseFromString(ReadFileAsString(temp_filename_)); + EXPECT_THAT(actual_histogram_set.histograms().size(), Eq(1)); + + // Validate values for `metric` + EXPECT_THAT(actual_histogram_set.histograms(0).sample_values().size(), Eq(1)); + EXPECT_THAT(actual_histogram_set.histograms(0).sample_values(0), Eq(15.0)); + EXPECT_THAT(actual_histogram_set.histograms(0).running().count(), Eq(1)); + EXPECT_THAT(actual_histogram_set.histograms(0).running().max(), Eq(15)); + EXPECT_THAT(actual_histogram_set.histograms(0).running().meanlogs(), + DoubleNear(2.70805, 0.1)); + EXPECT_THAT(actual_histogram_set.histograms(0).running().mean(), Eq(15)); + EXPECT_THAT(actual_histogram_set.histograms(0).running().min(), Eq(15)); + EXPECT_THAT(actual_histogram_set.histograms(0).running().sum(), Eq(15)); + EXPECT_THAT(actual_histogram_set.histograms(0).running().variance(), Eq(0)); +} + +TEST_F(ChromePerfDashboardMetricsExporterTest, + ExportMetricWithOnlyStatsConvertsMeanValuesWhenRequired) { + Metric metric{.name = "test_metric", + .unit = Unit::kKilobitsPerSecond, + .improvement_direction = ImprovementDirection::kBiggerIsBetter, + .test_case = "test_case_name", + .metric_metadata = DefaultMetadata(), + .time_series = Metric::TimeSeries{.samples = {}}, + .stats = Metric::Stats{ + .mean = 15.0, .stddev = 5.0, .min = 10.0, .max = 20.0}}; + + ChromePerfDashboardMetricsExporter exporter(temp_filename_); + + ASSERT_TRUE(exporter.Export(std::vector{metric})); + proto::HistogramSet actual_histogram_set; + actual_histogram_set.ParseFromString(ReadFileAsString(temp_filename_)); + EXPECT_THAT(actual_histogram_set.histograms().size(), Eq(1)); + + // Validate values for `metric` + EXPECT_THAT(actual_histogram_set.histograms(0).sample_values().size(), Eq(1)); + EXPECT_THAT(actual_histogram_set.histograms(0).sample_values(0), Eq(1875.0)); + EXPECT_THAT(actual_histogram_set.histograms(0).running().count(), Eq(1)); + EXPECT_THAT(actual_histogram_set.histograms(0).running().max(), Eq(1875)); + EXPECT_THAT(actual_histogram_set.histograms(0).running().meanlogs(), + DoubleNear(7.53636, 0.1)); + EXPECT_THAT(actual_histogram_set.histograms(0).running().mean(), Eq(1875)); + EXPECT_THAT(actual_histogram_set.histograms(0).running().min(), Eq(1875)); + EXPECT_THAT(actual_histogram_set.histograms(0).running().sum(), Eq(1875)); + EXPECT_THAT(actual_histogram_set.histograms(0).running().variance(), Eq(0)); +} + +} // namespace +} // namespace test +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/test/metrics/global_metrics_logger_and_exporter.cc b/TMessagesProj/jni/voip/webrtc/api/test/metrics/global_metrics_logger_and_exporter.cc new file mode 100644 index 0000000000..9c3c8978f5 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/metrics/global_metrics_logger_and_exporter.cc @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/test/metrics/global_metrics_logger_and_exporter.h" + +#include +#include +#include + +#include "api/test/metrics/metrics_exporter.h" +#include "api/test/metrics/metrics_logger.h" +#include "api/test/metrics/metrics_logger_and_exporter.h" +#include "rtc_base/checks.h" +#include "system_wrappers/include/clock.h" + +namespace webrtc { +namespace test { + +DefaultMetricsLogger* GetGlobalMetricsLogger() { + static DefaultMetricsLogger* logger_ = + new DefaultMetricsLogger(Clock::GetRealTimeClock()); + return logger_; +} + +bool ExportPerfMetric(MetricsLogger& logger, + std::vector> exporters) { + std::vector metrics = logger.GetCollectedMetrics(); + bool success = true; + for (auto& exporter : exporters) { + bool export_result = exporter->Export(metrics); + success = success && export_result; + } + return success; +} + +} // namespace test +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/test/metrics/global_metrics_logger_and_exporter.h b/TMessagesProj/jni/voip/webrtc/api/test/metrics/global_metrics_logger_and_exporter.h new file mode 100644 index 0000000000..42bdf93c12 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/metrics/global_metrics_logger_and_exporter.h @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_METRICS_GLOBAL_METRICS_LOGGER_AND_EXPORTER_H_ +#define API_TEST_METRICS_GLOBAL_METRICS_LOGGER_AND_EXPORTER_H_ + +#include +#include + +#include "api/test/metrics/metrics_exporter.h" +#include "api/test/metrics/metrics_logger_and_exporter.h" + +namespace webrtc { +namespace test { + +// Returns non-null global `MetricsLogger` to log metrics. +DefaultMetricsLogger* GetGlobalMetricsLogger(); + +bool ExportPerfMetric(MetricsLogger& logger, + std::vector> exporters); + +} // namespace test +} // namespace webrtc + +#endif // API_TEST_METRICS_GLOBAL_METRICS_LOGGER_AND_EXPORTER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/test/metrics/global_metrics_logger_and_exporter_test.cc b/TMessagesProj/jni/voip/webrtc/api/test/metrics/global_metrics_logger_and_exporter_test.cc new file mode 100644 index 0000000000..567b3da9e3 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/metrics/global_metrics_logger_and_exporter_test.cc @@ -0,0 +1,131 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/test/metrics/global_metrics_logger_and_exporter.h" + +#include +#include +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/test/metrics/metric.h" +#include "api/test/metrics/metrics_exporter.h" +#include "api/test/metrics/metrics_logger.h" +#include "system_wrappers/include/clock.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace test { +namespace { + +using ::testing::Eq; +using ::testing::IsEmpty; + +std::map DefaultMetadata() { + return std::map{{"key", "value"}}; +} + +struct TestMetricsExporterFactory { + public: + std::unique_ptr CreateExporter() { + return std::make_unique(this, /*export_result=*/true); + } + + std::unique_ptr CreateFailureExporter() { + return std::make_unique(this, /*export_result=*/false); + } + + std::vector exported_metrics; + + private: + class TestMetricsExporter : public MetricsExporter { + public: + TestMetricsExporter(TestMetricsExporterFactory* factory, bool export_result) + : factory_(factory), export_result_(export_result) {} + ~TestMetricsExporter() override = default; + + bool Export(rtc::ArrayView metrics) override { + factory_->exported_metrics = + std::vector(metrics.begin(), metrics.end()); + return export_result_; + } + + TestMetricsExporterFactory* factory_; + bool export_result_; + }; +}; + +TEST(ExportPerfMetricTest, CollectedMetricsAreExporter) { + TestMetricsExporterFactory exporter_factory; + + DefaultMetricsLogger logger(Clock::GetRealTimeClock()); + logger.LogSingleValueMetric( + "metric_name", "test_case_name", + /*value=*/10, Unit::kMilliseconds, ImprovementDirection::kBiggerIsBetter, + std::map{{"key", "value"}}); + + std::vector> exporters; + exporters.push_back(exporter_factory.CreateExporter()); + ASSERT_TRUE(ExportPerfMetric(logger, std::move(exporters))); + + std::vector metrics = exporter_factory.exported_metrics; + ASSERT_THAT(metrics.size(), Eq(1lu)); + const Metric& metric = metrics[0]; + EXPECT_THAT(metric.name, Eq("metric_name")); + EXPECT_THAT(metric.test_case, Eq("test_case_name")); + EXPECT_THAT(metric.unit, Eq(Unit::kMilliseconds)); + EXPECT_THAT(metric.improvement_direction, + Eq(ImprovementDirection::kBiggerIsBetter)); + EXPECT_THAT(metric.metric_metadata, + Eq(std::map{{"key", "value"}})); + ASSERT_THAT(metric.time_series.samples.size(), Eq(1lu)); + EXPECT_THAT(metric.time_series.samples[0].value, Eq(10.0)); + EXPECT_THAT(metric.time_series.samples[0].sample_metadata, + Eq(std::map{})); + ASSERT_THAT(metric.stats.mean, absl::optional(10.0)); + ASSERT_THAT(metric.stats.stddev, absl::nullopt); + ASSERT_THAT(metric.stats.min, absl::optional(10.0)); + ASSERT_THAT(metric.stats.max, absl::optional(10.0)); +} + +TEST(ExportPerfMetricTest, OneFailedExporterDoesNotPreventExportToOthers) { + TestMetricsExporterFactory exporter_factory1; + TestMetricsExporterFactory exporter_factory2; + TestMetricsExporterFactory exporter_factory3; + + DefaultMetricsLogger logger(Clock::GetRealTimeClock()); + logger.LogSingleValueMetric("metric_name", "test_case_name", + /*value=*/10, Unit::kMilliseconds, + ImprovementDirection::kBiggerIsBetter, + DefaultMetadata()); + + std::vector> exporters; + exporters.push_back(exporter_factory1.CreateExporter()); + exporters.push_back(exporter_factory2.CreateFailureExporter()); + exporters.push_back(exporter_factory3.CreateExporter()); + ASSERT_FALSE(ExportPerfMetric(logger, std::move(exporters))); + + std::vector metrics1 = exporter_factory1.exported_metrics; + std::vector metrics2 = exporter_factory2.exported_metrics; + std::vector metrics3 = exporter_factory3.exported_metrics; + ASSERT_THAT(metrics1.size(), Eq(1lu)) + << metrics1[0].name << "; " << metrics1[1].name; + EXPECT_THAT(metrics1[0].name, Eq("metric_name")); + ASSERT_THAT(metrics2.size(), Eq(1lu)); + EXPECT_THAT(metrics2[0].name, Eq("metric_name")); + ASSERT_THAT(metrics3.size(), Eq(1lu)); + EXPECT_THAT(metrics3[0].name, Eq("metric_name")); +} + +} // namespace +} // namespace test +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/test/metrics/metric.cc b/TMessagesProj/jni/voip/webrtc/api/test/metrics/metric.cc new file mode 100644 index 0000000000..3c30f36f49 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/metrics/metric.cc @@ -0,0 +1,48 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/test/metrics/metric.h" + +#include + +namespace webrtc { +namespace test { + +absl::string_view ToString(Unit unit) { + switch (unit) { + case Unit::kMilliseconds: + return "Milliseconds"; + case Unit::kPercent: + return "Percent"; + case Unit::kBytes: + return "Bytes"; + case Unit::kKilobitsPerSecond: + return "KilobitsPerSecond"; + case Unit::kHertz: + return "Hertz"; + case Unit::kUnitless: + return "Unitless"; + case Unit::kCount: + return "Count"; + } +} + +absl::string_view ToString(ImprovementDirection direction) { + switch (direction) { + case ImprovementDirection::kBiggerIsBetter: + return "BiggerIsBetter"; + case ImprovementDirection::kNeitherIsBetter: + return "NeitherIsBetter"; + case ImprovementDirection::kSmallerIsBetter: + return "SmallerIsBetter"; + } +} + +} // namespace test +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/test/metrics/metric.h b/TMessagesProj/jni/voip/webrtc/api/test/metrics/metric.h new file mode 100644 index 0000000000..17c1755f95 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/metrics/metric.h @@ -0,0 +1,96 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_METRICS_METRIC_H_ +#define API_TEST_METRICS_METRIC_H_ + +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/units/timestamp.h" + +namespace webrtc { +namespace test { + +enum class Unit { + kMilliseconds, + kPercent, + kBytes, + kKilobitsPerSecond, + kHertz, + // General unitless value. Can be used either for dimensionless quantities + // (ex ratio) or for units not presented in this enum and too specific to add + // to this enum. + kUnitless, + kCount +}; + +absl::string_view ToString(Unit unit); + +enum class ImprovementDirection { + kBiggerIsBetter, + kNeitherIsBetter, + kSmallerIsBetter +}; + +absl::string_view ToString(ImprovementDirection direction); + +struct Metric { + struct TimeSeries { + struct Sample { + // Timestamp in microseconds associated with a sample. For example, + // the timestamp when the sample was collected. + webrtc::Timestamp timestamp; + double value; + // Metadata associated with this particular sample. + std::map sample_metadata; + }; + + // All samples collected for this metric. It can be empty if the Metric + // object only contains `stats`. + std::vector samples; + }; + + // Contains metric's precomputed statistics based on the `time_series` or if + // `time_series` is omitted (has 0 samples) contains precomputed statistics + // provided by the metric's calculator. + struct Stats { + // Sample mean of the metric + // (https://en.wikipedia.org/wiki/Sample_mean_and_covariance). + absl::optional mean; + // Standard deviation (https://en.wikipedia.org/wiki/Standard_deviation). + // Is undefined if `time_series` contains only a single value. + absl::optional stddev; + absl::optional min; + absl::optional max; + }; + + // Metric name, for example PSNR, SSIM, decode_time, etc. + std::string name; + Unit unit; + ImprovementDirection improvement_direction; + // If the metric is generated by a test, this field can be used to specify + // this information. + std::string test_case; + // Metadata associated with the whole metric. + std::map metric_metadata; + // Contains all samples of the metric collected during test execution. + // It can be empty if the user only stores precomputed statistics into + // `stats`. + TimeSeries time_series; + Stats stats; +}; + +} // namespace test +} // namespace webrtc + +#endif // API_TEST_METRICS_METRIC_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/test/metrics/metrics_accumulator.cc b/TMessagesProj/jni/voip/webrtc/api/test/metrics/metrics_accumulator.cc new file mode 100644 index 0000000000..c34396be97 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/metrics/metrics_accumulator.cc @@ -0,0 +1,132 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/test/metrics/metrics_accumulator.h" + +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/numerics/samples_stats_counter.h" +#include "api/test/metrics/metric.h" +#include "api/units/timestamp.h" +#include "rtc_base/synchronization/mutex.h" + +namespace webrtc { +namespace test { +namespace { + +Metric::Stats ToStats(const SamplesStatsCounter& values) { + if (values.IsEmpty()) { + return Metric::Stats(); + } + return Metric::Stats{.mean = values.GetAverage(), + .stddev = values.GetStandardDeviation(), + .min = values.GetMin(), + .max = values.GetMax()}; +} + +Metric SetTimeseries(const Metric& prototype, + const SamplesStatsCounter& counter) { + Metric output(prototype); + Metric::TimeSeries time_series; + for (const SamplesStatsCounter::StatsSample& sample : + counter.GetTimedSamples()) { + time_series.samples.push_back( + Metric::TimeSeries::Sample{.timestamp = sample.time, + .value = sample.value, + .sample_metadata = sample.metadata}); + } + output.time_series = std::move(time_series); + output.stats = ToStats(counter); + return output; +} + +} // namespace + +bool operator<(const MetricsAccumulator::MetricKey& a, + const MetricsAccumulator::MetricKey& b) { + if (a.test_case_name < b.test_case_name) { + return true; + } else if (a.test_case_name > b.test_case_name) { + return false; + } else { + return a.metric_name < b.metric_name; + } +} + +bool MetricsAccumulator::AddSample( + absl::string_view metric_name, + absl::string_view test_case_name, + double value, + Timestamp timestamp, + std::map point_metadata) { + MutexLock lock(&mutex_); + bool created; + MetricValue* metric_value = + GetOrCreateMetric(metric_name, test_case_name, &created); + metric_value->counter.AddSample( + SamplesStatsCounter::StatsSample{.value = value, + .time = timestamp, + .metadata = std::move(point_metadata)}); + return created; +} + +bool MetricsAccumulator::AddMetricMetadata( + absl::string_view metric_name, + absl::string_view test_case_name, + Unit unit, + ImprovementDirection improvement_direction, + std::map metric_metadata) { + MutexLock lock(&mutex_); + bool created; + MetricValue* metric_value = + GetOrCreateMetric(metric_name, test_case_name, &created); + metric_value->metric.unit = unit; + metric_value->metric.improvement_direction = improvement_direction; + metric_value->metric.metric_metadata = std::move(metric_metadata); + return created; +} + +std::vector MetricsAccumulator::GetCollectedMetrics() const { + MutexLock lock(&mutex_); + std::vector out; + out.reserve(metrics_.size()); + for (const auto& [unused_key, metric_value] : metrics_) { + out.push_back(SetTimeseries(metric_value.metric, metric_value.counter)); + } + return out; +} + +MetricsAccumulator::MetricValue* MetricsAccumulator::GetOrCreateMetric( + absl::string_view metric_name, + absl::string_view test_case_name, + bool* created) { + MetricKey key(metric_name, test_case_name); + auto it = metrics_.find(key); + if (it != metrics_.end()) { + *created = false; + return &it->second; + } + *created = true; + + Metric metric{ + .name = key.metric_name, + .unit = Unit::kUnitless, + .improvement_direction = ImprovementDirection::kNeitherIsBetter, + .test_case = key.test_case_name, + }; + return &metrics_.emplace(key, MetricValue{.metric = std::move(metric)}) + .first->second; +} + +} // namespace test +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/test/metrics/metrics_accumulator.h b/TMessagesProj/jni/voip/webrtc/api/test/metrics/metrics_accumulator.h new file mode 100644 index 0000000000..c75bd9429c --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/metrics/metrics_accumulator.h @@ -0,0 +1,99 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_METRICS_METRICS_ACCUMULATOR_H_ +#define API_TEST_METRICS_METRICS_ACCUMULATOR_H_ + +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/numerics/samples_stats_counter.h" +#include "api/test/metrics/metric.h" +#include "api/units/timestamp.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread_annotations.h" + +namespace webrtc { +namespace test { + +// Accumulates metrics' samples internally and provides API to get collected +// ones. +// +// This object is thread safe. +class MetricsAccumulator { + public: + MetricsAccumulator() = default; + + // Adds sample for the specified `metric_name` within specified + // `test_case_name`. If it is the first time when this combination of + // `metric_name` and `test_case_name` is used, creates a new Metric to collect + // samples, otherwise adds a sample to the previously created Metric. + // + // By default metric will use `Unit::kUnitless` and + // `ImprovementDirection::kNeitherIsBetter`. + // + // `point_metadata` - the metadata to be added to the single data point that + // this method adds to the Metric (it is not a metric global metadata). + // + // Returns true if a new metric was created and false otherwise. + bool AddSample(absl::string_view metric_name, + absl::string_view test_case_name, + double value, + Timestamp timestamp, + std::map point_metadata = {}); + + // Adds metadata to the metric specified by `metric_name` within specified + // `test_case_name`. If such a metric doesn't exist, creates a new one, + // otherwise overrides previously recorded values. + // + // Returns true if a new metric was created and false otherwise. + bool AddMetricMetadata( + absl::string_view metric_name, + absl::string_view test_case_name, + Unit unit, + ImprovementDirection improvement_direction, + std::map metric_metadata = {}); + + // Returns all metrics collected by this accumulator. No order guarantees + // provided. + std::vector GetCollectedMetrics() const; + + private: + struct MetricKey { + MetricKey(absl::string_view metric_name, absl::string_view test_case_name) + : metric_name(metric_name), test_case_name(test_case_name) {} + + std::string metric_name; + std::string test_case_name; + }; + friend bool operator<(const MetricKey& a, const MetricKey& b); + + struct MetricValue { + SamplesStatsCounter counter; + Metric metric; + }; + + // Gets existing metrics or creates a new one. If metric was created `created` + // will be set to true. + MetricValue* GetOrCreateMetric(absl::string_view metric_name, + absl::string_view test_case_name, + bool* created) + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + + mutable Mutex mutex_; + std::map metrics_ RTC_GUARDED_BY(mutex_); +}; + +} // namespace test +} // namespace webrtc + +#endif // API_TEST_METRICS_METRICS_ACCUMULATOR_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/test/metrics/metrics_accumulator_test.cc b/TMessagesProj/jni/voip/webrtc/api/test/metrics/metrics_accumulator_test.cc new file mode 100644 index 0000000000..677f523339 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/metrics/metrics_accumulator_test.cc @@ -0,0 +1,315 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/test/metrics/metrics_accumulator.h" + +#include +#include + +#include "api/test/metrics/metric.h" +#include "api/units/timestamp.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace test { +namespace { + +using ::testing::Eq; +using ::testing::IsEmpty; +using ::testing::SizeIs; + +TEST(MetricsAccumulatorTest, AddSampleToTheNewMetricWillCreateOne) { + MetricsAccumulator accumulator; + ASSERT_TRUE(accumulator.AddSample( + "metric_name", "test_case_name", + /*value=*/10, Timestamp::Seconds(1), + /*point_metadata=*/std::map{{"key", "value"}})); + + std::vector metrics = accumulator.GetCollectedMetrics(); + ASSERT_THAT(metrics, SizeIs(1)); + const Metric& metric = metrics[0]; + EXPECT_THAT(metric.name, Eq("metric_name")); + EXPECT_THAT(metric.test_case, Eq("test_case_name")); + EXPECT_THAT(metric.unit, Eq(Unit::kUnitless)); + EXPECT_THAT(metric.improvement_direction, + Eq(ImprovementDirection::kNeitherIsBetter)); + EXPECT_THAT(metric.metric_metadata, IsEmpty()); + ASSERT_THAT(metric.time_series.samples, SizeIs(1)); + EXPECT_THAT(metric.time_series.samples[0].value, Eq(10.0)); + EXPECT_THAT(metric.time_series.samples[0].timestamp, + Eq(Timestamp::Seconds(1))); + EXPECT_THAT(metric.time_series.samples[0].sample_metadata, + Eq(std::map{{"key", "value"}})); + ASSERT_THAT(metric.stats.mean, absl::optional(10.0)); + ASSERT_THAT(metric.stats.stddev, absl::optional(0.0)); + ASSERT_THAT(metric.stats.min, absl::optional(10.0)); + ASSERT_THAT(metric.stats.max, absl::optional(10.0)); +} + +TEST(MetricsAccumulatorTest, AddSamplesToExistingMetricWontCreateNewOne) { + MetricsAccumulator accumulator; + ASSERT_TRUE(accumulator.AddSample( + "metric_name", "test_case_name", + /*value=*/10, Timestamp::Seconds(1), + /*point_metadata=*/ + std::map{{"key1", "value1"}})); + ASSERT_FALSE(accumulator.AddSample( + "metric_name", "test_case_name", + /*value=*/20, Timestamp::Seconds(2), + /*point_metadata=*/ + std::map{{"key2", "value2"}})); + + std::vector metrics = accumulator.GetCollectedMetrics(); + ASSERT_THAT(metrics, SizeIs(1)); + const Metric& metric = metrics[0]; + EXPECT_THAT(metric.name, Eq("metric_name")); + EXPECT_THAT(metric.test_case, Eq("test_case_name")); + EXPECT_THAT(metric.unit, Eq(Unit::kUnitless)); + EXPECT_THAT(metric.improvement_direction, + Eq(ImprovementDirection::kNeitherIsBetter)); + EXPECT_THAT(metric.metric_metadata, IsEmpty()); + ASSERT_THAT(metric.time_series.samples, SizeIs(2)); + EXPECT_THAT(metric.time_series.samples[0].value, Eq(10.0)); + EXPECT_THAT(metric.time_series.samples[0].timestamp, + Eq(Timestamp::Seconds(1))); + EXPECT_THAT(metric.time_series.samples[0].sample_metadata, + Eq(std::map{{"key1", "value1"}})); + EXPECT_THAT(metric.time_series.samples[1].value, Eq(20.0)); + EXPECT_THAT(metric.time_series.samples[1].timestamp, + Eq(Timestamp::Seconds(2))); + EXPECT_THAT(metric.time_series.samples[1].sample_metadata, + Eq(std::map{{"key2", "value2"}})); + ASSERT_THAT(metric.stats.mean, absl::optional(15.0)); + ASSERT_THAT(metric.stats.stddev, absl::optional(5.0)); + ASSERT_THAT(metric.stats.min, absl::optional(10.0)); + ASSERT_THAT(metric.stats.max, absl::optional(20.0)); +} + +TEST(MetricsAccumulatorTest, AddSampleToDifferentMetricsWillCreateBoth) { + MetricsAccumulator accumulator; + ASSERT_TRUE(accumulator.AddSample( + "metric_name1", "test_case_name1", + /*value=*/10, Timestamp::Seconds(1), + /*point_metadata=*/ + std::map{{"key1", "value1"}})); + ASSERT_TRUE(accumulator.AddSample( + "metric_name2", "test_case_name2", + /*value=*/20, Timestamp::Seconds(2), + /*point_metadata=*/ + std::map{{"key2", "value2"}})); + + std::vector metrics = accumulator.GetCollectedMetrics(); + ASSERT_THAT(metrics, SizeIs(2)); + EXPECT_THAT(metrics[0].name, Eq("metric_name1")); + EXPECT_THAT(metrics[0].test_case, Eq("test_case_name1")); + EXPECT_THAT(metrics[0].unit, Eq(Unit::kUnitless)); + EXPECT_THAT(metrics[0].improvement_direction, + Eq(ImprovementDirection::kNeitherIsBetter)); + EXPECT_THAT(metrics[0].metric_metadata, IsEmpty()); + ASSERT_THAT(metrics[0].time_series.samples, SizeIs(1)); + EXPECT_THAT(metrics[0].time_series.samples[0].value, Eq(10.0)); + EXPECT_THAT(metrics[0].time_series.samples[0].timestamp, + Eq(Timestamp::Seconds(1))); + EXPECT_THAT(metrics[0].time_series.samples[0].sample_metadata, + Eq(std::map{{"key1", "value1"}})); + ASSERT_THAT(metrics[0].stats.mean, absl::optional(10.0)); + ASSERT_THAT(metrics[0].stats.stddev, absl::optional(0.0)); + ASSERT_THAT(metrics[0].stats.min, absl::optional(10.0)); + ASSERT_THAT(metrics[0].stats.max, absl::optional(10.0)); + EXPECT_THAT(metrics[1].name, Eq("metric_name2")); + EXPECT_THAT(metrics[1].test_case, Eq("test_case_name2")); + EXPECT_THAT(metrics[1].unit, Eq(Unit::kUnitless)); + EXPECT_THAT(metrics[1].improvement_direction, + Eq(ImprovementDirection::kNeitherIsBetter)); + EXPECT_THAT(metrics[1].metric_metadata, IsEmpty()); + ASSERT_THAT(metrics[1].time_series.samples, SizeIs(1)); + EXPECT_THAT(metrics[1].time_series.samples[0].value, Eq(20.0)); + EXPECT_THAT(metrics[1].time_series.samples[0].timestamp, + Eq(Timestamp::Seconds(2))); + EXPECT_THAT(metrics[1].time_series.samples[0].sample_metadata, + Eq(std::map{{"key2", "value2"}})); + ASSERT_THAT(metrics[1].stats.mean, absl::optional(20.0)); + ASSERT_THAT(metrics[1].stats.stddev, absl::optional(0.0)); + ASSERT_THAT(metrics[1].stats.min, absl::optional(20.0)); + ASSERT_THAT(metrics[1].stats.max, absl::optional(20.0)); +} + +TEST(MetricsAccumulatorTest, AddMetadataToTheNewMetricWillCreateOne) { + MetricsAccumulator accumulator; + ASSERT_TRUE(accumulator.AddMetricMetadata( + "metric_name", "test_case_name", Unit::kMilliseconds, + ImprovementDirection::kBiggerIsBetter, + /*metric_metadata=*/ + std::map{{"key", "value"}})); + + std::vector metrics = accumulator.GetCollectedMetrics(); + ASSERT_THAT(metrics, SizeIs(1)); + const Metric& metric = metrics[0]; + EXPECT_THAT(metric.name, Eq("metric_name")); + EXPECT_THAT(metric.test_case, Eq("test_case_name")); + EXPECT_THAT(metric.unit, Eq(Unit::kMilliseconds)); + EXPECT_THAT(metric.improvement_direction, + Eq(ImprovementDirection::kBiggerIsBetter)); + EXPECT_THAT(metric.metric_metadata, + Eq(std::map{{"key", "value"}})); + ASSERT_THAT(metric.time_series.samples, IsEmpty()); + ASSERT_THAT(metric.stats.mean, absl::nullopt); + ASSERT_THAT(metric.stats.stddev, absl::nullopt); + ASSERT_THAT(metric.stats.min, absl::nullopt); + ASSERT_THAT(metric.stats.max, absl::nullopt); +} + +TEST(MetricsAccumulatorTest, + AddMetadataToTheExistingMetricWillOverwriteValues) { + MetricsAccumulator accumulator; + ASSERT_TRUE(accumulator.AddMetricMetadata( + "metric_name", "test_case_name", Unit::kMilliseconds, + ImprovementDirection::kBiggerIsBetter, + /*metric_metadata=*/ + std::map{{"key1", "value1"}})); + + ASSERT_FALSE(accumulator.AddMetricMetadata( + "metric_name", "test_case_name", Unit::kBytes, + ImprovementDirection::kSmallerIsBetter, + /*metric_metadata=*/ + std::map{{"key2", "value2"}})); + + std::vector metrics = accumulator.GetCollectedMetrics(); + ASSERT_THAT(metrics, SizeIs(1)); + const Metric& metric = metrics[0]; + EXPECT_THAT(metric.name, Eq("metric_name")); + EXPECT_THAT(metric.test_case, Eq("test_case_name")); + EXPECT_THAT(metric.unit, Eq(Unit::kBytes)); + EXPECT_THAT(metric.improvement_direction, + Eq(ImprovementDirection::kSmallerIsBetter)); + EXPECT_THAT(metric.metric_metadata, + Eq(std::map{{"key2", "value2"}})); + ASSERT_THAT(metric.time_series.samples, IsEmpty()); + ASSERT_THAT(metric.stats.mean, absl::nullopt); + ASSERT_THAT(metric.stats.stddev, absl::nullopt); + ASSERT_THAT(metric.stats.min, absl::nullopt); + ASSERT_THAT(metric.stats.max, absl::nullopt); +} + +TEST(MetricsAccumulatorTest, AddMetadataToDifferentMetricsWillCreateBoth) { + MetricsAccumulator accumulator; + ASSERT_TRUE(accumulator.AddMetricMetadata( + "metric_name1", "test_case_name1", Unit::kMilliseconds, + ImprovementDirection::kBiggerIsBetter, + /*metric_metadata=*/ + std::map{{"key1", "value1"}})); + + ASSERT_TRUE(accumulator.AddMetricMetadata( + "metric_name2", "test_case_name2", Unit::kBytes, + ImprovementDirection::kSmallerIsBetter, + /*metric_metadata=*/ + std::map{{"key2", "value2"}})); + + std::vector metrics = accumulator.GetCollectedMetrics(); + ASSERT_THAT(metrics, SizeIs(2)); + EXPECT_THAT(metrics[0].name, Eq("metric_name1")); + EXPECT_THAT(metrics[0].test_case, Eq("test_case_name1")); + EXPECT_THAT(metrics[0].unit, Eq(Unit::kMilliseconds)); + EXPECT_THAT(metrics[0].improvement_direction, + Eq(ImprovementDirection::kBiggerIsBetter)); + EXPECT_THAT(metrics[0].metric_metadata, + Eq(std::map{{"key1", "value1"}})); + ASSERT_THAT(metrics[0].time_series.samples, IsEmpty()); + ASSERT_THAT(metrics[0].stats.mean, absl::nullopt); + ASSERT_THAT(metrics[0].stats.stddev, absl::nullopt); + ASSERT_THAT(metrics[0].stats.min, absl::nullopt); + ASSERT_THAT(metrics[0].stats.max, absl::nullopt); + EXPECT_THAT(metrics[1].name, Eq("metric_name2")); + EXPECT_THAT(metrics[1].test_case, Eq("test_case_name2")); + EXPECT_THAT(metrics[1].unit, Eq(Unit::kBytes)); + EXPECT_THAT(metrics[1].improvement_direction, + Eq(ImprovementDirection::kSmallerIsBetter)); + EXPECT_THAT(metrics[1].metric_metadata, + Eq(std::map{{"key2", "value2"}})); + ASSERT_THAT(metrics[1].time_series.samples, IsEmpty()); + ASSERT_THAT(metrics[1].stats.mean, absl::nullopt); + ASSERT_THAT(metrics[1].stats.stddev, absl::nullopt); + ASSERT_THAT(metrics[1].stats.min, absl::nullopt); + ASSERT_THAT(metrics[1].stats.max, absl::nullopt); +} + +TEST(MetricsAccumulatorTest, AddMetadataAfterAddingSampleWontCreateNewMetric) { + MetricsAccumulator accumulator; + ASSERT_TRUE(accumulator.AddSample( + "metric_name", "test_case_name", + /*value=*/10, Timestamp::Seconds(1), + /*point_metadata=*/ + std::map{{"key_s", "value_s"}})); + ASSERT_FALSE(accumulator.AddMetricMetadata( + "metric_name", "test_case_name", Unit::kMilliseconds, + ImprovementDirection::kBiggerIsBetter, + /*metric_metadata=*/ + std::map{{"key_m", "value_m"}})); + + std::vector metrics = accumulator.GetCollectedMetrics(); + ASSERT_THAT(metrics, SizeIs(1)); + const Metric& metric = metrics[0]; + EXPECT_THAT(metric.name, Eq("metric_name")); + EXPECT_THAT(metric.test_case, Eq("test_case_name")); + EXPECT_THAT(metric.unit, Eq(Unit::kMilliseconds)); + EXPECT_THAT(metric.improvement_direction, + Eq(ImprovementDirection::kBiggerIsBetter)); + EXPECT_THAT(metric.metric_metadata, + Eq(std::map{{"key_m", "value_m"}})); + ASSERT_THAT(metric.time_series.samples, SizeIs(1)); + EXPECT_THAT(metric.time_series.samples[0].value, Eq(10.0)); + EXPECT_THAT(metric.time_series.samples[0].timestamp, + Eq(Timestamp::Seconds(1))); + EXPECT_THAT(metric.time_series.samples[0].sample_metadata, + Eq(std::map{{"key_s", "value_s"}})); + ASSERT_THAT(metric.stats.mean, absl::optional(10.0)); + ASSERT_THAT(metric.stats.stddev, absl::optional(0.0)); + ASSERT_THAT(metric.stats.min, absl::optional(10.0)); + ASSERT_THAT(metric.stats.max, absl::optional(10.0)); +} + +TEST(MetricsAccumulatorTest, AddSampleAfterAddingMetadataWontCreateNewMetric) { + MetricsAccumulator accumulator; + ASSERT_TRUE(accumulator.AddMetricMetadata( + "metric_name", "test_case_name", Unit::kMilliseconds, + ImprovementDirection::kBiggerIsBetter, + /*metric_metadata=*/ + std::map{{"key_m", "value_m"}})); + ASSERT_FALSE(accumulator.AddSample( + "metric_name", "test_case_name", + /*value=*/10, Timestamp::Seconds(1), + /*point_metadata=*/ + std::map{{"key_s", "value_s"}})); + + std::vector metrics = accumulator.GetCollectedMetrics(); + ASSERT_THAT(metrics, SizeIs(1)); + const Metric& metric = metrics[0]; + EXPECT_THAT(metric.name, Eq("metric_name")); + EXPECT_THAT(metric.test_case, Eq("test_case_name")); + EXPECT_THAT(metric.unit, Eq(Unit::kMilliseconds)); + EXPECT_THAT(metric.improvement_direction, + Eq(ImprovementDirection::kBiggerIsBetter)); + EXPECT_THAT(metric.metric_metadata, + Eq(std::map{{"key_m", "value_m"}})); + ASSERT_THAT(metric.time_series.samples, SizeIs(1)); + EXPECT_THAT(metric.time_series.samples[0].value, Eq(10.0)); + EXPECT_THAT(metric.time_series.samples[0].timestamp, + Eq(Timestamp::Seconds(1))); + EXPECT_THAT(metric.time_series.samples[0].sample_metadata, + Eq(std::map{{"key_s", "value_s"}})); + ASSERT_THAT(metric.stats.mean, absl::optional(10.0)); + ASSERT_THAT(metric.stats.stddev, absl::optional(0.0)); + ASSERT_THAT(metric.stats.min, absl::optional(10.0)); + ASSERT_THAT(metric.stats.max, absl::optional(10.0)); +} + +} // namespace +} // namespace test +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/test/metrics/metrics_exporter.h b/TMessagesProj/jni/voip/webrtc/api/test/metrics/metrics_exporter.h new file mode 100644 index 0000000000..23954b6b1f --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/metrics/metrics_exporter.h @@ -0,0 +1,33 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_METRICS_METRICS_EXPORTER_H_ +#define API_TEST_METRICS_METRICS_EXPORTER_H_ + +#include "api/array_view.h" +#include "api/test/metrics/metric.h" + +namespace webrtc { +namespace test { + +// Exports metrics in the requested format. +class MetricsExporter { + public: + virtual ~MetricsExporter() = default; + + // Exports specified metrics in a format that depends on the implementation. + // Returns true if export succeeded, false otherwise. + virtual bool Export(rtc::ArrayView metrics) = 0; +}; + +} // namespace test +} // namespace webrtc + +#endif // API_TEST_METRICS_METRICS_EXPORTER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/test/metrics/metrics_logger.cc b/TMessagesProj/jni/voip/webrtc/api/test/metrics/metrics_logger.cc new file mode 100644 index 0000000000..1e24400367 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/metrics/metrics_logger.cc @@ -0,0 +1,114 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/test/metrics/metrics_logger.h" + +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/numerics/samples_stats_counter.h" +#include "api/test/metrics/metric.h" +#include "rtc_base/synchronization/mutex.h" + +namespace webrtc { +namespace test { +namespace { + +Metric::Stats ToStats(const SamplesStatsCounter& values) { + if (values.IsEmpty()) { + return Metric::Stats(); + } + return Metric::Stats{.mean = values.GetAverage(), + .stddev = values.GetStandardDeviation(), + .min = values.GetMin(), + .max = values.GetMax()}; +} + +} // namespace + +void DefaultMetricsLogger::LogSingleValueMetric( + absl::string_view name, + absl::string_view test_case_name, + double value, + Unit unit, + ImprovementDirection improvement_direction, + std::map metadata) { + MutexLock lock(&mutex_); + metrics_.push_back(Metric{ + .name = std::string(name), + .unit = unit, + .improvement_direction = improvement_direction, + .test_case = std::string(test_case_name), + .metric_metadata = std::move(metadata), + .time_series = + Metric::TimeSeries{.samples = std::vector{Metric::TimeSeries::Sample{ + .timestamp = Now(), .value = value}}}, + .stats = Metric::Stats{ + .mean = value, .stddev = absl::nullopt, .min = value, .max = value}}); +} + +void DefaultMetricsLogger::LogMetric( + absl::string_view name, + absl::string_view test_case_name, + const SamplesStatsCounter& values, + Unit unit, + ImprovementDirection improvement_direction, + std::map metadata) { + MutexLock lock(&mutex_); + Metric::TimeSeries time_series; + for (const SamplesStatsCounter::StatsSample& sample : + values.GetTimedSamples()) { + time_series.samples.push_back( + Metric::TimeSeries::Sample{.timestamp = sample.time, + .value = sample.value, + .sample_metadata = sample.metadata}); + } + + metrics_.push_back(Metric{.name = std::string(name), + .unit = unit, + .improvement_direction = improvement_direction, + .test_case = std::string(test_case_name), + .metric_metadata = std::move(metadata), + .time_series = std::move(time_series), + .stats = ToStats(values)}); +} + +void DefaultMetricsLogger::LogMetric( + absl::string_view name, + absl::string_view test_case_name, + const Metric::Stats& metric_stats, + Unit unit, + ImprovementDirection improvement_direction, + std::map metadata) { + MutexLock lock(&mutex_); + metrics_.push_back(Metric{.name = std::string(name), + .unit = unit, + .improvement_direction = improvement_direction, + .test_case = std::string(test_case_name), + .metric_metadata = std::move(metadata), + .time_series = Metric::TimeSeries{.samples = {}}, + .stats = std::move(metric_stats)}); +} + +std::vector DefaultMetricsLogger::GetCollectedMetrics() const { + std::vector out = metrics_accumulator_.GetCollectedMetrics(); + MutexLock lock(&mutex_); + out.insert(out.end(), metrics_.begin(), metrics_.end()); + return out; +} + +Timestamp DefaultMetricsLogger::Now() { + return clock_->CurrentTime(); +} + +} // namespace test +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/test/metrics/metrics_logger.h b/TMessagesProj/jni/voip/webrtc/api/test/metrics/metrics_logger.h new file mode 100644 index 0000000000..66f9e55b95 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/metrics/metrics_logger.h @@ -0,0 +1,112 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_METRICS_METRICS_LOGGER_H_ +#define API_TEST_METRICS_METRICS_LOGGER_H_ + +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/numerics/samples_stats_counter.h" +#include "api/test/metrics/metric.h" +#include "api/test/metrics/metrics_accumulator.h" +#include "rtc_base/synchronization/mutex.h" +#include "system_wrappers/include/clock.h" + +namespace webrtc { +namespace test { + +// Provides API to log and collect performance metrics. +class MetricsLogger { + public: + virtual ~MetricsLogger() = default; + + // Adds a metric with a single value. + // `metadata` - metric's level metadata to add. + virtual void LogSingleValueMetric( + absl::string_view name, + absl::string_view test_case_name, + double value, + Unit unit, + ImprovementDirection improvement_direction, + std::map metadata = {}) = 0; + + // Adds metrics with a time series created based on the provided `values`. + // `metadata` - metric's level metadata to add. + virtual void LogMetric(absl::string_view name, + absl::string_view test_case_name, + const SamplesStatsCounter& values, + Unit unit, + ImprovementDirection improvement_direction, + std::map metadata = {}) = 0; + + // Adds metric with a time series with only stats object and without actual + // collected values. + // `metadata` - metric's level metadata to add. + virtual void LogMetric(absl::string_view name, + absl::string_view test_case_name, + const Metric::Stats& metric_stats, + Unit unit, + ImprovementDirection improvement_direction, + std::map metadata = {}) = 0; + + // Returns all metrics collected by this logger. + virtual std::vector GetCollectedMetrics() const = 0; +}; + +class DefaultMetricsLogger : public MetricsLogger { + public: + explicit DefaultMetricsLogger(webrtc::Clock* clock) : clock_(clock) {} + ~DefaultMetricsLogger() override = default; + + void LogSingleValueMetric( + absl::string_view name, + absl::string_view test_case_name, + double value, + Unit unit, + ImprovementDirection improvement_direction, + std::map metadata = {}) override; + + void LogMetric(absl::string_view name, + absl::string_view test_case_name, + const SamplesStatsCounter& values, + Unit unit, + ImprovementDirection improvement_direction, + std::map metadata = {}) override; + + void LogMetric(absl::string_view name, + absl::string_view test_case_name, + const Metric::Stats& metric_stats, + Unit unit, + ImprovementDirection improvement_direction, + std::map metadata = {}) override; + + // Returns all metrics collected by this logger and its `MetricsAccumulator`. + std::vector GetCollectedMetrics() const override; + + MetricsAccumulator* GetMetricsAccumulator() { return &metrics_accumulator_; } + + private: + webrtc::Timestamp Now(); + + webrtc::Clock* const clock_; + MetricsAccumulator metrics_accumulator_; + + mutable Mutex mutex_; + std::vector metrics_ RTC_GUARDED_BY(mutex_); +}; + +} // namespace test +} // namespace webrtc + +#endif // API_TEST_METRICS_METRICS_LOGGER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/test/metrics/metrics_logger_and_exporter.cc b/TMessagesProj/jni/voip/webrtc/api/test/metrics/metrics_logger_and_exporter.cc new file mode 100644 index 0000000000..9f91eac334 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/metrics/metrics_logger_and_exporter.cc @@ -0,0 +1,129 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/test/metrics/metrics_logger_and_exporter.h" + +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/numerics/samples_stats_counter.h" +#include "api/test/metrics/metric.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "rtc_base/synchronization/mutex.h" + +namespace webrtc { +namespace test { +namespace { + +Metric::Stats ToStats(const SamplesStatsCounter& values) { + if (values.IsEmpty()) { + return Metric::Stats(); + } + return Metric::Stats{.mean = values.GetAverage(), + .stddev = values.GetStandardDeviation(), + .min = values.GetMin(), + .max = values.GetMax()}; +} + +} // namespace + +MetricsLoggerAndExporter::~MetricsLoggerAndExporter() { + bool export_result = Export(); + if (crash_on_export_failure_) { + RTC_CHECK(export_result); + } else { + RTC_LOG(LS_ERROR) << "One of exporters failed to export collected metrics"; + } +} + +void MetricsLoggerAndExporter::LogSingleValueMetric( + absl::string_view name, + absl::string_view test_case_name, + double value, + Unit unit, + ImprovementDirection improvement_direction, + std::map metadata) { + MutexLock lock(&mutex_); + metrics_.push_back(Metric{ + .name = std::string(name), + .unit = unit, + .improvement_direction = improvement_direction, + .test_case = std::string(test_case_name), + .metric_metadata = std::move(metadata), + .time_series = + Metric::TimeSeries{.samples = std::vector{Metric::TimeSeries::Sample{ + .timestamp = Now(), .value = value}}}, + .stats = Metric::Stats{ + .mean = value, .stddev = absl::nullopt, .min = value, .max = value}}); +} + +void MetricsLoggerAndExporter::LogMetric( + absl::string_view name, + absl::string_view test_case_name, + const SamplesStatsCounter& values, + Unit unit, + ImprovementDirection improvement_direction, + std::map metadata) { + MutexLock lock(&mutex_); + Metric::TimeSeries time_series; + for (const SamplesStatsCounter::StatsSample& sample : + values.GetTimedSamples()) { + time_series.samples.push_back( + Metric::TimeSeries::Sample{.timestamp = sample.time, + .value = sample.value, + .sample_metadata = sample.metadata}); + } + + metrics_.push_back(Metric{.name = std::string(name), + .unit = unit, + .improvement_direction = improvement_direction, + .test_case = std::string(test_case_name), + .metric_metadata = std::move(metadata), + .time_series = std::move(time_series), + .stats = ToStats(values)}); +} + +void MetricsLoggerAndExporter::LogMetric( + absl::string_view name, + absl::string_view test_case_name, + const Metric::Stats& metric_stats, + Unit unit, + ImprovementDirection improvement_direction, + std::map metadata) { + MutexLock lock(&mutex_); + metrics_.push_back(Metric{.name = std::string(name), + .unit = unit, + .improvement_direction = improvement_direction, + .test_case = std::string(test_case_name), + .metric_metadata = std::move(metadata), + .time_series = Metric::TimeSeries{.samples = {}}, + .stats = std::move(metric_stats)}); +} + +Timestamp MetricsLoggerAndExporter::Now() { + return clock_->CurrentTime(); +} + +bool MetricsLoggerAndExporter::Export() { + MutexLock lock(&mutex_); + bool success = true; + for (auto& exporter : exporters_) { + bool export_result = exporter->Export(metrics_); + success = success && export_result; + } + return success; +} + +} // namespace test +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/test/metrics/metrics_logger_and_exporter.h b/TMessagesProj/jni/voip/webrtc/api/test/metrics/metrics_logger_and_exporter.h new file mode 100644 index 0000000000..562aa6e264 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/metrics/metrics_logger_and_exporter.h @@ -0,0 +1,96 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_METRICS_METRICS_LOGGER_AND_EXPORTER_H_ +#define API_TEST_METRICS_METRICS_LOGGER_AND_EXPORTER_H_ + +#include +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/numerics/samples_stats_counter.h" +#include "api/test/metrics/metric.h" +#include "api/test/metrics/metrics_exporter.h" +#include "api/test/metrics/metrics_logger.h" +#include "rtc_base/synchronization/mutex.h" +#include "system_wrappers/include/clock.h" + +namespace webrtc { +namespace test { + +// Combines metrics logging and exporting to provide simple API to automatically +// export metrics at the end of the scope. +class MetricsLoggerAndExporter : public MetricsLogger { + public: + // `crash_on_export_failure` - makes MetricsLoggerAndExporter to crash if + // any of exporters failed to export data. + MetricsLoggerAndExporter( + webrtc::Clock* clock, + std::vector> exporters, + bool crash_on_export_failure = true) + : clock_(clock), + crash_on_export_failure_(crash_on_export_failure), + exporters_(std::move(exporters)) {} + ~MetricsLoggerAndExporter() override; + + // Adds a metric with a single value. + // `metadata` - metric's level metadata to add. + void LogSingleValueMetric( + absl::string_view name, + absl::string_view test_case_name, + double value, + Unit unit, + ImprovementDirection improvement_direction, + std::map metadata = {}) override; + + // Adds metrics with a time series created based on the provided `values`. + // `metadata` - metric's level metadata to add. + void LogMetric(absl::string_view name, + absl::string_view test_case_name, + const SamplesStatsCounter& values, + Unit unit, + ImprovementDirection improvement_direction, + std::map metadata = {}) override; + + // Adds metric with a time series with only stats object and without actual + // collected values. + // `metadata` - metric's level metadata to add. + void LogMetric(absl::string_view name, + absl::string_view test_case_name, + const Metric::Stats& metric_stats, + Unit unit, + ImprovementDirection improvement_direction, + std::map metadata = {}) override; + + // Returns all metrics collected by this logger. + std::vector GetCollectedMetrics() const override { + MutexLock lock(&mutex_); + return metrics_; + } + + private: + webrtc::Timestamp Now(); + bool Export(); + + webrtc::Clock* const clock_; + const bool crash_on_export_failure_; + + mutable Mutex mutex_; + std::vector metrics_ RTC_GUARDED_BY(mutex_); + std::vector> exporters_; +}; + +} // namespace test +} // namespace webrtc + +#endif // API_TEST_METRICS_METRICS_LOGGER_AND_EXPORTER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/test/metrics/metrics_logger_and_exporter_test.cc b/TMessagesProj/jni/voip/webrtc/api/test/metrics/metrics_logger_and_exporter_test.cc new file mode 100644 index 0000000000..65b1d8f68d --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/metrics/metrics_logger_and_exporter_test.cc @@ -0,0 +1,361 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/test/metrics/metrics_logger_and_exporter.h" + +#include +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/numerics/samples_stats_counter.h" +#include "api/test/metrics/metric.h" +#include "api/test/metrics/metrics_exporter.h" +#include "system_wrappers/include/clock.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace test { +namespace { + +using ::testing::Eq; +using ::testing::IsEmpty; + +std::map DefaultMetadata() { + return std::map{{"key", "value"}}; +} + +struct TestMetricsExporterFactory { + public: + std::unique_ptr CreateExporter() { + return std::make_unique(this, /*export_result=*/true); + } + + std::unique_ptr CreateFailureExporter() { + return std::make_unique(this, /*export_result=*/false); + } + + std::vector exported_metrics; + + private: + class TestMetricsExporter : public MetricsExporter { + public: + TestMetricsExporter(TestMetricsExporterFactory* factory, bool export_result) + : factory_(factory), export_result_(export_result) {} + ~TestMetricsExporter() override = default; + + bool Export(rtc::ArrayView metrics) override { + factory_->exported_metrics = + std::vector(metrics.begin(), metrics.end()); + return export_result_; + } + + TestMetricsExporterFactory* factory_; + bool export_result_; + }; +}; + +TEST(MetricsLoggerAndExporterTest, LogSingleValueMetricRecordsMetric) { + TestMetricsExporterFactory exporter_factory; + { + std::vector> exporters; + exporters.push_back(exporter_factory.CreateExporter()); + MetricsLoggerAndExporter logger(Clock::GetRealTimeClock(), + std::move(exporters)); + logger.LogSingleValueMetric( + "metric_name", "test_case_name", + /*value=*/10, Unit::kMilliseconds, + ImprovementDirection::kBiggerIsBetter, + std::map{{"key", "value"}}); + } + + std::vector metrics = exporter_factory.exported_metrics; + ASSERT_THAT(metrics.size(), Eq(1lu)); + const Metric& metric = metrics[0]; + EXPECT_THAT(metric.name, Eq("metric_name")); + EXPECT_THAT(metric.test_case, Eq("test_case_name")); + EXPECT_THAT(metric.unit, Eq(Unit::kMilliseconds)); + EXPECT_THAT(metric.improvement_direction, + Eq(ImprovementDirection::kBiggerIsBetter)); + EXPECT_THAT(metric.metric_metadata, + Eq(std::map{{"key", "value"}})); + ASSERT_THAT(metric.time_series.samples.size(), Eq(1lu)); + EXPECT_THAT(metric.time_series.samples[0].value, Eq(10.0)); + EXPECT_THAT(metric.time_series.samples[0].sample_metadata, + Eq(std::map{})); + ASSERT_THAT(metric.stats.mean, absl::optional(10.0)); + ASSERT_THAT(metric.stats.stddev, absl::nullopt); + ASSERT_THAT(metric.stats.min, absl::optional(10.0)); + ASSERT_THAT(metric.stats.max, absl::optional(10.0)); +} + +TEST(MetricsLoggerAndExporterTest, + LogMetricWithSamplesStatsCounterRecordsMetric) { + TestMetricsExporterFactory exporter_factory; + { + std::vector> exporters; + exporters.push_back(exporter_factory.CreateExporter()); + MetricsLoggerAndExporter logger(Clock::GetRealTimeClock(), + std::move(exporters)); + + SamplesStatsCounter values; + values.AddSample(SamplesStatsCounter::StatsSample{ + .value = 10, + .time = Clock::GetRealTimeClock()->CurrentTime(), + .metadata = + std::map{{"point_key1", "value1"}}}); + values.AddSample(SamplesStatsCounter::StatsSample{ + .value = 20, + .time = Clock::GetRealTimeClock()->CurrentTime(), + .metadata = + std::map{{"point_key2", "value2"}}}); + logger.LogMetric("metric_name", "test_case_name", values, + Unit::kMilliseconds, ImprovementDirection::kBiggerIsBetter, + std::map{{"key", "value"}}); + } + + std::vector metrics = exporter_factory.exported_metrics; + ASSERT_THAT(metrics.size(), Eq(1lu)); + const Metric& metric = metrics[0]; + EXPECT_THAT(metric.name, Eq("metric_name")); + EXPECT_THAT(metric.test_case, Eq("test_case_name")); + EXPECT_THAT(metric.unit, Eq(Unit::kMilliseconds)); + EXPECT_THAT(metric.improvement_direction, + Eq(ImprovementDirection::kBiggerIsBetter)); + EXPECT_THAT(metric.metric_metadata, + Eq(std::map{{"key", "value"}})); + ASSERT_THAT(metric.time_series.samples.size(), Eq(2lu)); + EXPECT_THAT(metric.time_series.samples[0].value, Eq(10.0)); + EXPECT_THAT(metric.time_series.samples[0].sample_metadata, + Eq(std::map{{"point_key1", "value1"}})); + EXPECT_THAT(metric.time_series.samples[1].value, Eq(20.0)); + EXPECT_THAT(metric.time_series.samples[1].sample_metadata, + Eq(std::map{{"point_key2", "value2"}})); + ASSERT_THAT(metric.stats.mean, absl::optional(15.0)); + ASSERT_THAT(metric.stats.stddev, absl::optional(5.0)); + ASSERT_THAT(metric.stats.min, absl::optional(10.0)); + ASSERT_THAT(metric.stats.max, absl::optional(20.0)); +} + +TEST(MetricsLoggerAndExporterTest, + LogMetricWithEmptySamplesStatsCounterRecordsEmptyMetric) { + TestMetricsExporterFactory exporter_factory; + { + std::vector> exporters; + exporters.push_back(exporter_factory.CreateExporter()); + MetricsLoggerAndExporter logger(Clock::GetRealTimeClock(), + std::move(exporters)); + SamplesStatsCounter values; + logger.LogMetric("metric_name", "test_case_name", values, Unit::kUnitless, + ImprovementDirection::kBiggerIsBetter, DefaultMetadata()); + } + + std::vector metrics = exporter_factory.exported_metrics; + ASSERT_THAT(metrics.size(), Eq(1lu)); + EXPECT_THAT(metrics[0].name, Eq("metric_name")); + EXPECT_THAT(metrics[0].test_case, Eq("test_case_name")); + EXPECT_THAT(metrics[0].time_series.samples, IsEmpty()); + ASSERT_THAT(metrics[0].stats.mean, Eq(absl::nullopt)); + ASSERT_THAT(metrics[0].stats.stddev, Eq(absl::nullopt)); + ASSERT_THAT(metrics[0].stats.min, Eq(absl::nullopt)); + ASSERT_THAT(metrics[0].stats.max, Eq(absl::nullopt)); +} + +TEST(MetricsLoggerAndExporterTest, LogMetricWithStatsRecordsMetric) { + TestMetricsExporterFactory exporter_factory; + { + std::vector> exporters; + exporters.push_back(exporter_factory.CreateExporter()); + MetricsLoggerAndExporter logger(Clock::GetRealTimeClock(), + std::move(exporters)); + Metric::Stats metric_stats{.mean = 15, .stddev = 5, .min = 10, .max = 20}; + logger.LogMetric("metric_name", "test_case_name", metric_stats, + Unit::kMilliseconds, ImprovementDirection::kBiggerIsBetter, + std::map{{"key", "value"}}); + } + + std::vector metrics = exporter_factory.exported_metrics; + ASSERT_THAT(metrics.size(), Eq(1lu)); + const Metric& metric = metrics[0]; + EXPECT_THAT(metric.name, Eq("metric_name")); + EXPECT_THAT(metric.test_case, Eq("test_case_name")); + EXPECT_THAT(metric.unit, Eq(Unit::kMilliseconds)); + EXPECT_THAT(metric.improvement_direction, + Eq(ImprovementDirection::kBiggerIsBetter)); + EXPECT_THAT(metric.metric_metadata, + Eq(std::map{{"key", "value"}})); + ASSERT_THAT(metric.time_series.samples.size(), Eq(0lu)); + ASSERT_THAT(metric.stats.mean, absl::optional(15.0)); + ASSERT_THAT(metric.stats.stddev, absl::optional(5.0)); + ASSERT_THAT(metric.stats.min, absl::optional(10.0)); + ASSERT_THAT(metric.stats.max, absl::optional(20.0)); +} + +TEST(MetricsLoggerAndExporterTest, LogSingleValueMetricRecordsMultipleMetrics) { + TestMetricsExporterFactory exporter_factory; + { + std::vector> exporters; + exporters.push_back(exporter_factory.CreateExporter()); + MetricsLoggerAndExporter logger(Clock::GetRealTimeClock(), + std::move(exporters)); + + logger.LogSingleValueMetric("metric_name1", "test_case_name1", + /*value=*/10, Unit::kMilliseconds, + ImprovementDirection::kBiggerIsBetter, + DefaultMetadata()); + logger.LogSingleValueMetric("metric_name2", "test_case_name2", + /*value=*/10, Unit::kMilliseconds, + ImprovementDirection::kBiggerIsBetter, + DefaultMetadata()); + } + + std::vector metrics = exporter_factory.exported_metrics; + ASSERT_THAT(metrics.size(), Eq(2lu)); + EXPECT_THAT(metrics[0].name, Eq("metric_name1")); + EXPECT_THAT(metrics[0].test_case, Eq("test_case_name1")); + EXPECT_THAT(metrics[1].name, Eq("metric_name2")); + EXPECT_THAT(metrics[1].test_case, Eq("test_case_name2")); +} + +TEST(MetricsLoggerAndExporterTest, + LogMetricWithSamplesStatsCounterRecordsMultipleMetrics) { + TestMetricsExporterFactory exporter_factory; + { + std::vector> exporters; + exporters.push_back(exporter_factory.CreateExporter()); + MetricsLoggerAndExporter logger(Clock::GetRealTimeClock(), + std::move(exporters)); + SamplesStatsCounter values; + values.AddSample(SamplesStatsCounter::StatsSample{ + .value = 10, + .time = Clock::GetRealTimeClock()->CurrentTime(), + .metadata = DefaultMetadata()}); + values.AddSample(SamplesStatsCounter::StatsSample{ + .value = 20, + .time = Clock::GetRealTimeClock()->CurrentTime(), + .metadata = DefaultMetadata()}); + + logger.LogMetric("metric_name1", "test_case_name1", values, + Unit::kMilliseconds, ImprovementDirection::kBiggerIsBetter, + DefaultMetadata()); + logger.LogMetric("metric_name2", "test_case_name2", values, + Unit::kMilliseconds, ImprovementDirection::kBiggerIsBetter, + DefaultMetadata()); + } + + std::vector metrics = exporter_factory.exported_metrics; + ASSERT_THAT(metrics.size(), Eq(2lu)); + EXPECT_THAT(metrics[0].name, Eq("metric_name1")); + EXPECT_THAT(metrics[0].test_case, Eq("test_case_name1")); + EXPECT_THAT(metrics[1].name, Eq("metric_name2")); + EXPECT_THAT(metrics[1].test_case, Eq("test_case_name2")); +} + +TEST(MetricsLoggerAndExporterTest, LogMetricWithStatsRecordsMultipleMetrics) { + TestMetricsExporterFactory exporter_factory; + { + std::vector> exporters; + exporters.push_back(exporter_factory.CreateExporter()); + MetricsLoggerAndExporter logger(Clock::GetRealTimeClock(), + std::move(exporters)); + Metric::Stats metric_stats{.mean = 15, .stddev = 5, .min = 10, .max = 20}; + + logger.LogMetric("metric_name1", "test_case_name1", metric_stats, + Unit::kMilliseconds, ImprovementDirection::kBiggerIsBetter, + DefaultMetadata()); + logger.LogMetric("metric_name2", "test_case_name2", metric_stats, + Unit::kMilliseconds, ImprovementDirection::kBiggerIsBetter, + DefaultMetadata()); + } + + std::vector metrics = exporter_factory.exported_metrics; + ASSERT_THAT(metrics.size(), Eq(2lu)); + EXPECT_THAT(metrics[0].name, Eq("metric_name1")); + EXPECT_THAT(metrics[0].test_case, Eq("test_case_name1")); + EXPECT_THAT(metrics[1].name, Eq("metric_name2")); + EXPECT_THAT(metrics[1].test_case, Eq("test_case_name2")); +} + +TEST(MetricsLoggerAndExporterTest, + LogMetricThroughtAllMethodsAccumulateAllMetrics) { + TestMetricsExporterFactory exporter_factory; + { + std::vector> exporters; + exporters.push_back(exporter_factory.CreateExporter()); + MetricsLoggerAndExporter logger(Clock::GetRealTimeClock(), + std::move(exporters)); + SamplesStatsCounter values; + values.AddSample(SamplesStatsCounter::StatsSample{ + .value = 10, + .time = Clock::GetRealTimeClock()->CurrentTime(), + .metadata = DefaultMetadata()}); + values.AddSample(SamplesStatsCounter::StatsSample{ + .value = 20, + .time = Clock::GetRealTimeClock()->CurrentTime(), + .metadata = DefaultMetadata()}); + Metric::Stats metric_stats{.mean = 15, .stddev = 5, .min = 10, .max = 20}; + + logger.LogSingleValueMetric("metric_name1", "test_case_name1", + /*value=*/10, Unit::kMilliseconds, + ImprovementDirection::kBiggerIsBetter, + DefaultMetadata()); + logger.LogMetric("metric_name2", "test_case_name2", values, + Unit::kMilliseconds, ImprovementDirection::kBiggerIsBetter, + DefaultMetadata()); + logger.LogMetric("metric_name3", "test_case_name3", metric_stats, + Unit::kMilliseconds, ImprovementDirection::kBiggerIsBetter, + DefaultMetadata()); + } + + std::vector metrics = exporter_factory.exported_metrics; + ASSERT_THAT(metrics.size(), Eq(3lu)); + EXPECT_THAT(metrics[0].name, Eq("metric_name1")); + EXPECT_THAT(metrics[0].test_case, Eq("test_case_name1")); + EXPECT_THAT(metrics[1].name, Eq("metric_name2")); + EXPECT_THAT(metrics[1].test_case, Eq("test_case_name2")); + EXPECT_THAT(metrics[2].name, Eq("metric_name3")); + EXPECT_THAT(metrics[2].test_case, Eq("test_case_name3")); +} + +TEST(MetricsLoggerAndExporterTest, + OneFailedExporterDoesNotPreventExportToOthers) { + TestMetricsExporterFactory exporter_factory1; + TestMetricsExporterFactory exporter_factory2; + TestMetricsExporterFactory exporter_factory3; + { + std::vector> exporters; + exporters.push_back(exporter_factory1.CreateExporter()); + exporters.push_back(exporter_factory2.CreateFailureExporter()); + exporters.push_back(exporter_factory3.CreateExporter()); + MetricsLoggerAndExporter logger(Clock::GetRealTimeClock(), + std::move(exporters), + /*crash_on_export_failure=*/false); + + logger.LogSingleValueMetric("metric_name", "test_case_name", + /*value=*/10, Unit::kMilliseconds, + ImprovementDirection::kBiggerIsBetter, + DefaultMetadata()); + } + + std::vector metrics1 = exporter_factory1.exported_metrics; + std::vector metrics2 = exporter_factory2.exported_metrics; + std::vector metrics3 = exporter_factory3.exported_metrics; + ASSERT_THAT(metrics1.size(), Eq(1lu)); + EXPECT_THAT(metrics1[0].name, Eq("metric_name")); + ASSERT_THAT(metrics2.size(), Eq(1lu)); + EXPECT_THAT(metrics2[0].name, Eq("metric_name")); + ASSERT_THAT(metrics3.size(), Eq(1lu)); + EXPECT_THAT(metrics3[0].name, Eq("metric_name")); +} + +} // namespace +} // namespace test +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/test/metrics/metrics_logger_test.cc b/TMessagesProj/jni/voip/webrtc/api/test/metrics/metrics_logger_test.cc new file mode 100644 index 0000000000..de4501ca36 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/metrics/metrics_logger_test.cc @@ -0,0 +1,326 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/test/metrics/metrics_logger.h" + +#include +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/numerics/samples_stats_counter.h" +#include "api/test/metrics/metric.h" +#include "system_wrappers/include/clock.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace test { +namespace { + +using ::testing::Eq; +using ::testing::IsEmpty; +using ::testing::SizeIs; + +std::map DefaultMetadata() { + return std::map{{"key", "value"}}; +} + +TEST(DefaultMetricsLoggerTest, LogSingleValueMetricRecordsMetric) { + DefaultMetricsLogger logger(Clock::GetRealTimeClock()); + logger.LogSingleValueMetric( + "metric_name", "test_case_name", + /*value=*/10, Unit::kMilliseconds, ImprovementDirection::kBiggerIsBetter, + std::map{{"key", "value"}}); + + std::vector metrics = logger.GetCollectedMetrics(); + ASSERT_THAT(metrics, SizeIs(1)); + const Metric& metric = metrics[0]; + EXPECT_THAT(metric.name, Eq("metric_name")); + EXPECT_THAT(metric.test_case, Eq("test_case_name")); + EXPECT_THAT(metric.unit, Eq(Unit::kMilliseconds)); + EXPECT_THAT(metric.improvement_direction, + Eq(ImprovementDirection::kBiggerIsBetter)); + EXPECT_THAT(metric.metric_metadata, + Eq(std::map{{"key", "value"}})); + ASSERT_THAT(metric.time_series.samples, SizeIs(1)); + EXPECT_THAT(metric.time_series.samples[0].value, Eq(10.0)); + EXPECT_THAT(metric.time_series.samples[0].sample_metadata, + Eq(std::map{})); + ASSERT_THAT(metric.stats.mean, absl::optional(10.0)); + ASSERT_THAT(metric.stats.stddev, absl::nullopt); + ASSERT_THAT(metric.stats.min, absl::optional(10.0)); + ASSERT_THAT(metric.stats.max, absl::optional(10.0)); +} + +TEST(DefaultMetricsLoggerTest, LogMetricWithSamplesStatsCounterRecordsMetric) { + DefaultMetricsLogger logger(Clock::GetRealTimeClock()); + + SamplesStatsCounter values; + values.AddSample(SamplesStatsCounter::StatsSample{ + .value = 10, + .time = Clock::GetRealTimeClock()->CurrentTime(), + .metadata = + std::map{{"point_key1", "value1"}}}); + values.AddSample(SamplesStatsCounter::StatsSample{ + .value = 20, + .time = Clock::GetRealTimeClock()->CurrentTime(), + .metadata = + std::map{{"point_key2", "value2"}}}); + logger.LogMetric("metric_name", "test_case_name", values, Unit::kMilliseconds, + ImprovementDirection::kBiggerIsBetter, + std::map{{"key", "value"}}); + + std::vector metrics = logger.GetCollectedMetrics(); + ASSERT_THAT(metrics, SizeIs(1)); + const Metric& metric = metrics[0]; + EXPECT_THAT(metric.name, Eq("metric_name")); + EXPECT_THAT(metric.test_case, Eq("test_case_name")); + EXPECT_THAT(metric.unit, Eq(Unit::kMilliseconds)); + EXPECT_THAT(metric.improvement_direction, + Eq(ImprovementDirection::kBiggerIsBetter)); + EXPECT_THAT(metric.metric_metadata, + Eq(std::map{{"key", "value"}})); + ASSERT_THAT(metric.time_series.samples, SizeIs(2)); + EXPECT_THAT(metric.time_series.samples[0].value, Eq(10.0)); + EXPECT_THAT(metric.time_series.samples[0].sample_metadata, + Eq(std::map{{"point_key1", "value1"}})); + EXPECT_THAT(metric.time_series.samples[1].value, Eq(20.0)); + EXPECT_THAT(metric.time_series.samples[1].sample_metadata, + Eq(std::map{{"point_key2", "value2"}})); + ASSERT_THAT(metric.stats.mean, absl::optional(15.0)); + ASSERT_THAT(metric.stats.stddev, absl::optional(5.0)); + ASSERT_THAT(metric.stats.min, absl::optional(10.0)); + ASSERT_THAT(metric.stats.max, absl::optional(20.0)); +} + +TEST(DefaultMetricsLoggerTest, + LogMetricWithEmptySamplesStatsCounterRecordsEmptyMetric) { + DefaultMetricsLogger logger(Clock::GetRealTimeClock()); + SamplesStatsCounter values; + logger.LogMetric("metric_name", "test_case_name", values, Unit::kUnitless, + ImprovementDirection::kBiggerIsBetter, DefaultMetadata()); + + std::vector metrics = logger.GetCollectedMetrics(); + ASSERT_THAT(metrics, SizeIs(1)); + EXPECT_THAT(metrics[0].name, Eq("metric_name")); + EXPECT_THAT(metrics[0].test_case, Eq("test_case_name")); + EXPECT_THAT(metrics[0].time_series.samples, IsEmpty()); + ASSERT_THAT(metrics[0].stats.mean, Eq(absl::nullopt)); + ASSERT_THAT(metrics[0].stats.stddev, Eq(absl::nullopt)); + ASSERT_THAT(metrics[0].stats.min, Eq(absl::nullopt)); + ASSERT_THAT(metrics[0].stats.max, Eq(absl::nullopt)); +} + +TEST(DefaultMetricsLoggerTest, LogMetricWithStatsRecordsMetric) { + DefaultMetricsLogger logger(Clock::GetRealTimeClock()); + Metric::Stats metric_stats{.mean = 15, .stddev = 5, .min = 10, .max = 20}; + logger.LogMetric("metric_name", "test_case_name", metric_stats, + Unit::kMilliseconds, ImprovementDirection::kBiggerIsBetter, + std::map{{"key", "value"}}); + + std::vector metrics = logger.GetCollectedMetrics(); + ASSERT_THAT(metrics, SizeIs(1)); + const Metric& metric = metrics[0]; + EXPECT_THAT(metric.name, Eq("metric_name")); + EXPECT_THAT(metric.test_case, Eq("test_case_name")); + EXPECT_THAT(metric.unit, Eq(Unit::kMilliseconds)); + EXPECT_THAT(metric.improvement_direction, + Eq(ImprovementDirection::kBiggerIsBetter)); + EXPECT_THAT(metric.metric_metadata, + Eq(std::map{{"key", "value"}})); + ASSERT_THAT(metric.time_series.samples, IsEmpty()); + ASSERT_THAT(metric.stats.mean, absl::optional(15.0)); + ASSERT_THAT(metric.stats.stddev, absl::optional(5.0)); + ASSERT_THAT(metric.stats.min, absl::optional(10.0)); + ASSERT_THAT(metric.stats.max, absl::optional(20.0)); +} + +TEST(DefaultMetricsLoggerTest, LogSingleValueMetricRecordsMultipleMetrics) { + DefaultMetricsLogger logger(Clock::GetRealTimeClock()); + + logger.LogSingleValueMetric("metric_name1", "test_case_name1", + /*value=*/10, Unit::kMilliseconds, + ImprovementDirection::kBiggerIsBetter, + DefaultMetadata()); + logger.LogSingleValueMetric("metric_name2", "test_case_name2", + /*value=*/10, Unit::kMilliseconds, + ImprovementDirection::kBiggerIsBetter, + DefaultMetadata()); + + std::vector metrics = logger.GetCollectedMetrics(); + ASSERT_THAT(metrics, SizeIs(2)); + EXPECT_THAT(metrics[0].name, Eq("metric_name1")); + EXPECT_THAT(metrics[0].test_case, Eq("test_case_name1")); + EXPECT_THAT(metrics[1].name, Eq("metric_name2")); + EXPECT_THAT(metrics[1].test_case, Eq("test_case_name2")); +} + +TEST(DefaultMetricsLoggerTest, + LogMetricWithSamplesStatsCounterRecordsMultipleMetrics) { + DefaultMetricsLogger logger(Clock::GetRealTimeClock()); + SamplesStatsCounter values; + values.AddSample(SamplesStatsCounter::StatsSample{ + .value = 10, + .time = Clock::GetRealTimeClock()->CurrentTime(), + .metadata = DefaultMetadata()}); + values.AddSample(SamplesStatsCounter::StatsSample{ + .value = 20, + .time = Clock::GetRealTimeClock()->CurrentTime(), + .metadata = DefaultMetadata()}); + + logger.LogMetric("metric_name1", "test_case_name1", values, + Unit::kMilliseconds, ImprovementDirection::kBiggerIsBetter, + DefaultMetadata()); + logger.LogMetric("metric_name2", "test_case_name2", values, + Unit::kMilliseconds, ImprovementDirection::kBiggerIsBetter, + DefaultMetadata()); + + std::vector metrics = logger.GetCollectedMetrics(); + ASSERT_THAT(metrics, SizeIs(2)); + EXPECT_THAT(metrics[0].name, Eq("metric_name1")); + EXPECT_THAT(metrics[0].test_case, Eq("test_case_name1")); + EXPECT_THAT(metrics[1].name, Eq("metric_name2")); + EXPECT_THAT(metrics[1].test_case, Eq("test_case_name2")); +} + +TEST(DefaultMetricsLoggerTest, LogMetricWithStatsRecordsMultipleMetrics) { + DefaultMetricsLogger logger(Clock::GetRealTimeClock()); + Metric::Stats metric_stats{.mean = 15, .stddev = 5, .min = 10, .max = 20}; + + logger.LogMetric("metric_name1", "test_case_name1", metric_stats, + Unit::kMilliseconds, ImprovementDirection::kBiggerIsBetter, + DefaultMetadata()); + logger.LogMetric("metric_name2", "test_case_name2", metric_stats, + Unit::kMilliseconds, ImprovementDirection::kBiggerIsBetter, + DefaultMetadata()); + + std::vector metrics = logger.GetCollectedMetrics(); + ASSERT_THAT(metrics, SizeIs(2)); + EXPECT_THAT(metrics[0].name, Eq("metric_name1")); + EXPECT_THAT(metrics[0].test_case, Eq("test_case_name1")); + EXPECT_THAT(metrics[1].name, Eq("metric_name2")); + EXPECT_THAT(metrics[1].test_case, Eq("test_case_name2")); +} + +TEST(DefaultMetricsLoggerTest, + LogMetricThroughtAllMethodsAccumulateAllMetrics) { + DefaultMetricsLogger logger(Clock::GetRealTimeClock()); + SamplesStatsCounter values; + values.AddSample(SamplesStatsCounter::StatsSample{ + .value = 10, + .time = Clock::GetRealTimeClock()->CurrentTime(), + .metadata = DefaultMetadata()}); + values.AddSample(SamplesStatsCounter::StatsSample{ + .value = 20, + .time = Clock::GetRealTimeClock()->CurrentTime(), + .metadata = DefaultMetadata()}); + Metric::Stats metric_stats{.mean = 15, .stddev = 5, .min = 10, .max = 20}; + + logger.LogSingleValueMetric("metric_name1", "test_case_name1", + /*value=*/10, Unit::kMilliseconds, + ImprovementDirection::kBiggerIsBetter, + DefaultMetadata()); + logger.LogMetric("metric_name2", "test_case_name2", values, + Unit::kMilliseconds, ImprovementDirection::kBiggerIsBetter, + DefaultMetadata()); + logger.LogMetric("metric_name3", "test_case_name3", metric_stats, + Unit::kMilliseconds, ImprovementDirection::kBiggerIsBetter, + DefaultMetadata()); + + std::vector metrics = logger.GetCollectedMetrics(); + ASSERT_THAT(metrics.size(), Eq(3lu)); + EXPECT_THAT(metrics[0].name, Eq("metric_name1")); + EXPECT_THAT(metrics[0].test_case, Eq("test_case_name1")); + EXPECT_THAT(metrics[1].name, Eq("metric_name2")); + EXPECT_THAT(metrics[1].test_case, Eq("test_case_name2")); + EXPECT_THAT(metrics[2].name, Eq("metric_name3")); + EXPECT_THAT(metrics[2].test_case, Eq("test_case_name3")); +} + +TEST(DefaultMetricsLoggerTest, AccumulatedMetricsReturnedInCollectedMetrics) { + DefaultMetricsLogger logger(Clock::GetRealTimeClock()); + logger.GetMetricsAccumulator()->AddSample( + "metric_name", "test_case_name", + /*value=*/10, Timestamp::Seconds(1), + /*point_metadata=*/std::map{{"key", "value"}}); + + std::vector metrics = logger.GetCollectedMetrics(); + ASSERT_THAT(metrics, SizeIs(1)); + const Metric& metric = metrics[0]; + EXPECT_THAT(metric.name, Eq("metric_name")); + EXPECT_THAT(metric.test_case, Eq("test_case_name")); + EXPECT_THAT(metric.unit, Eq(Unit::kUnitless)); + EXPECT_THAT(metric.improvement_direction, + Eq(ImprovementDirection::kNeitherIsBetter)); + EXPECT_THAT(metric.metric_metadata, IsEmpty()); + ASSERT_THAT(metric.time_series.samples, SizeIs(1)); + EXPECT_THAT(metric.time_series.samples[0].value, Eq(10.0)); + EXPECT_THAT(metric.time_series.samples[0].timestamp, + Eq(Timestamp::Seconds(1))); + EXPECT_THAT(metric.time_series.samples[0].sample_metadata, + Eq(std::map{{"key", "value"}})); + ASSERT_THAT(metric.stats.mean, absl::optional(10.0)); + ASSERT_THAT(metric.stats.stddev, absl::optional(0.0)); + ASSERT_THAT(metric.stats.min, absl::optional(10.0)); + ASSERT_THAT(metric.stats.max, absl::optional(10.0)); +} + +TEST(DefaultMetricsLoggerTest, + AccumulatedMetricsReturnedTogetherWithLoggedMetrics) { + DefaultMetricsLogger logger(Clock::GetRealTimeClock()); + logger.LogSingleValueMetric( + "metric_name1", "test_case_name1", + /*value=*/10, Unit::kMilliseconds, ImprovementDirection::kBiggerIsBetter, + std::map{{"key_m", "value_m"}}); + logger.GetMetricsAccumulator()->AddSample( + "metric_name2", "test_case_name2", + /*value=*/10, Timestamp::Seconds(1), + /*point_metadata=*/ + std::map{{"key_s", "value_s"}}); + + std::vector metrics = logger.GetCollectedMetrics(); + ASSERT_THAT(metrics, SizeIs(2)); + EXPECT_THAT(metrics[0].name, Eq("metric_name2")); + EXPECT_THAT(metrics[0].test_case, Eq("test_case_name2")); + EXPECT_THAT(metrics[0].unit, Eq(Unit::kUnitless)); + EXPECT_THAT(metrics[0].improvement_direction, + Eq(ImprovementDirection::kNeitherIsBetter)); + EXPECT_THAT(metrics[0].metric_metadata, IsEmpty()); + ASSERT_THAT(metrics[0].time_series.samples, SizeIs(1)); + EXPECT_THAT(metrics[0].time_series.samples[0].value, Eq(10.0)); + EXPECT_THAT(metrics[0].time_series.samples[0].timestamp, + Eq(Timestamp::Seconds(1))); + EXPECT_THAT(metrics[0].time_series.samples[0].sample_metadata, + Eq(std::map{{"key_s", "value_s"}})); + ASSERT_THAT(metrics[0].stats.mean, absl::optional(10.0)); + ASSERT_THAT(metrics[0].stats.stddev, absl::optional(0.0)); + ASSERT_THAT(metrics[0].stats.min, absl::optional(10.0)); + ASSERT_THAT(metrics[0].stats.max, absl::optional(10.0)); + EXPECT_THAT(metrics[1].name, Eq("metric_name1")); + EXPECT_THAT(metrics[1].test_case, Eq("test_case_name1")); + EXPECT_THAT(metrics[1].unit, Eq(Unit::kMilliseconds)); + EXPECT_THAT(metrics[1].improvement_direction, + Eq(ImprovementDirection::kBiggerIsBetter)); + EXPECT_THAT(metrics[1].metric_metadata, + Eq(std::map{{"key_m", "value_m"}})); + ASSERT_THAT(metrics[1].time_series.samples, SizeIs(1)); + EXPECT_THAT(metrics[1].time_series.samples[0].value, Eq(10.0)); + EXPECT_THAT(metrics[1].time_series.samples[0].sample_metadata, + Eq(std::map{})); + ASSERT_THAT(metrics[1].stats.mean, absl::optional(10.0)); + ASSERT_THAT(metrics[1].stats.stddev, absl::nullopt); + ASSERT_THAT(metrics[1].stats.min, absl::optional(10.0)); + ASSERT_THAT(metrics[1].stats.max, absl::optional(10.0)); +} + +} // namespace +} // namespace test +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/test/metrics/metrics_set_proto_file_exporter.cc b/TMessagesProj/jni/voip/webrtc/api/test/metrics/metrics_set_proto_file_exporter.cc new file mode 100644 index 0000000000..86e6f2e136 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/metrics/metrics_set_proto_file_exporter.cc @@ -0,0 +1,157 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/test/metrics/metrics_set_proto_file_exporter.h" + +#include + +#include + +#include "api/test/metrics/metric.h" +#include "rtc_base/logging.h" +#include "test/testsupport/file_utils.h" + +#if WEBRTC_ENABLE_PROTOBUF +#include "api/test/metrics/proto/metric.pb.h" +#endif + +namespace webrtc { +namespace test { +namespace { + +#if WEBRTC_ENABLE_PROTOBUF +webrtc::test_metrics::Unit ToProtoUnit(Unit unit) { + switch (unit) { + case Unit::kMilliseconds: + return webrtc::test_metrics::Unit::MILLISECONDS; + case Unit::kPercent: + return webrtc::test_metrics::Unit::PERCENT; + case Unit::kBytes: + return webrtc::test_metrics::Unit::BYTES; + case Unit::kKilobitsPerSecond: + return webrtc::test_metrics::Unit::KILOBITS_PER_SECOND; + case Unit::kHertz: + return webrtc::test_metrics::Unit::HERTZ; + case Unit::kUnitless: + return webrtc::test_metrics::Unit::UNITLESS; + case Unit::kCount: + return webrtc::test_metrics::Unit::COUNT; + } +} + +webrtc::test_metrics::ImprovementDirection ToProtoImprovementDirection( + ImprovementDirection direction) { + switch (direction) { + case ImprovementDirection::kBiggerIsBetter: + return webrtc::test_metrics::ImprovementDirection::BIGGER_IS_BETTER; + case ImprovementDirection::kNeitherIsBetter: + return webrtc::test_metrics::ImprovementDirection::NEITHER_IS_BETTER; + case ImprovementDirection::kSmallerIsBetter: + return webrtc::test_metrics::ImprovementDirection::SMALLER_IS_BETTER; + } +} + +void SetTimeSeries( + const Metric::TimeSeries& time_series, + webrtc::test_metrics::Metric::TimeSeries* proto_time_series) { + for (const Metric::TimeSeries::Sample& sample : time_series.samples) { + webrtc::test_metrics::Metric::TimeSeries::Sample* proto_sample = + proto_time_series->add_samples(); + proto_sample->set_value(sample.value); + proto_sample->set_timestamp_us(sample.timestamp.us()); + for (const auto& [key, value] : sample.sample_metadata) { + proto_sample->mutable_sample_metadata()->insert({key, value}); + } + } +} + +void SetStats(const Metric::Stats& stats, + webrtc::test_metrics::Metric::Stats* proto_stats) { + if (stats.mean.has_value()) { + proto_stats->set_mean(*stats.mean); + } + if (stats.stddev.has_value()) { + proto_stats->set_stddev(*stats.stddev); + } + if (stats.min.has_value()) { + proto_stats->set_min(*stats.min); + } + if (stats.max.has_value()) { + proto_stats->set_max(*stats.max); + } +} + +bool WriteMetricsToFile(const std::string& path, + const webrtc::test_metrics::MetricsSet& metrics_set) { + std::string data; + bool ok = metrics_set.SerializeToString(&data); + if (!ok) { + RTC_LOG(LS_ERROR) << "Failed to serialize histogram set to string"; + return false; + } + + CreateDir(DirName(path)); + FILE* output = fopen(path.c_str(), "wb"); + if (output == NULL) { + RTC_LOG(LS_ERROR) << "Failed to write to " << path; + return false; + } + size_t written = fwrite(data.c_str(), sizeof(char), data.size(), output); + fclose(output); + + if (written != data.size()) { + size_t expected = data.size(); + RTC_LOG(LS_ERROR) << "Wrote " << written << ", tried to write " << expected; + return false; + } + return true; +} +#endif // WEBRTC_ENABLE_PROTOBUF + +} // namespace + +MetricsSetProtoFileExporter::Options::Options( + absl::string_view export_file_path) + : export_file_path(export_file_path) {} +MetricsSetProtoFileExporter::Options::Options( + absl::string_view export_file_path, + bool export_whole_time_series) + : export_file_path(export_file_path), + export_whole_time_series(export_whole_time_series) {} + +bool MetricsSetProtoFileExporter::Export(rtc::ArrayView metrics) { +#if WEBRTC_ENABLE_PROTOBUF + webrtc::test_metrics::MetricsSet metrics_set; + for (const Metric& metric : metrics) { + webrtc::test_metrics::Metric* metric_proto = metrics_set.add_metrics(); + metric_proto->set_name(metric.name); + metric_proto->set_unit(ToProtoUnit(metric.unit)); + metric_proto->set_improvement_direction( + ToProtoImprovementDirection(metric.improvement_direction)); + metric_proto->set_test_case(metric.test_case); + for (const auto& [key, value] : metric.metric_metadata) { + metric_proto->mutable_metric_metadata()->insert({key, value}); + } + + if (options_.export_whole_time_series) { + SetTimeSeries(metric.time_series, metric_proto->mutable_time_series()); + } + SetStats(metric.stats, metric_proto->mutable_stats()); + } + + return WriteMetricsToFile(options_.export_file_path, metrics_set); +#else + RTC_LOG(LS_ERROR) + << "Compile with protobuf support to properly use this class"; + return false; +#endif +} + +} // namespace test +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/test/metrics/metrics_set_proto_file_exporter.h b/TMessagesProj/jni/voip/webrtc/api/test/metrics/metrics_set_proto_file_exporter.h new file mode 100644 index 0000000000..f996e9e7b0 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/metrics/metrics_set_proto_file_exporter.h @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_METRICS_METRICS_SET_PROTO_FILE_EXPORTER_H_ +#define API_TEST_METRICS_METRICS_SET_PROTO_FILE_EXPORTER_H_ + +#include + +#include "api/array_view.h" +#include "api/test/metrics/metric.h" +#include "api/test/metrics/metrics_exporter.h" + +namespace webrtc { +namespace test { + +// Exports all collected metrics to the proto file using +// `webrtc::test_metrics::MetricsSet` format. +class MetricsSetProtoFileExporter : public MetricsExporter { + public: + struct Options { + explicit Options(absl::string_view export_file_path); + Options(absl::string_view export_file_path, bool export_whole_time_series); + + // File to export proto. + std::string export_file_path; + // If true will write all time series values to the output proto file, + // otherwise will write stats only. + bool export_whole_time_series = true; + }; + + explicit MetricsSetProtoFileExporter(const Options& options) + : options_(options) {} + + MetricsSetProtoFileExporter(const MetricsSetProtoFileExporter&) = delete; + MetricsSetProtoFileExporter& operator=(const MetricsSetProtoFileExporter&) = + delete; + + bool Export(rtc::ArrayView metrics) override; + + private: + const Options options_; +}; + +} // namespace test +} // namespace webrtc + +#endif // API_TEST_METRICS_METRICS_SET_PROTO_FILE_EXPORTER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/test/metrics/metrics_set_proto_file_exporter_test.cc b/TMessagesProj/jni/voip/webrtc/api/test/metrics/metrics_set_proto_file_exporter_test.cc new file mode 100644 index 0000000000..eb4d483068 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/metrics/metrics_set_proto_file_exporter_test.cc @@ -0,0 +1,151 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/test/metrics/metrics_set_proto_file_exporter.h" + +#include +#include +#include +#include + +#include "api/test/metrics/metric.h" +#include "api/test/metrics/proto/metric.pb.h" +#include "api/units/timestamp.h" +#include "rtc_base/protobuf_utils.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/testsupport/file_utils.h" + +namespace webrtc { +namespace test { +namespace { + +using ::testing::Eq; +using ::testing::Test; + +namespace proto = ::webrtc::test_metrics; + +std::string ReadFileAsString(const std::string& filename) { + std::ifstream infile(filename, std::ios_base::binary); + auto buffer = std::vector(std::istreambuf_iterator(infile), + std::istreambuf_iterator()); + return std::string(buffer.begin(), buffer.end()); +} + +std::map DefaultMetadata() { + return std::map{{"key", "value"}}; +} + +Metric::TimeSeries::Sample Sample(double value) { + return Metric::TimeSeries::Sample{.timestamp = Timestamp::Seconds(1), + .value = value, + .sample_metadata = DefaultMetadata()}; +} + +void AssertSamplesEqual(const proto::Metric::TimeSeries::Sample& actual_sample, + const Metric::TimeSeries::Sample& expected_sample) { + EXPECT_THAT(actual_sample.value(), Eq(expected_sample.value)); + EXPECT_THAT(actual_sample.timestamp_us(), Eq(expected_sample.timestamp.us())); + EXPECT_THAT(actual_sample.sample_metadata().size(), + Eq(expected_sample.sample_metadata.size())); + for (const auto& [key, value] : expected_sample.sample_metadata) { + EXPECT_THAT(actual_sample.sample_metadata().at(key), Eq(value)); + } +} + +class MetricsSetProtoFileExporterTest : public Test { + protected: + ~MetricsSetProtoFileExporterTest() override = default; + + void SetUp() override { + temp_filename_ = webrtc::test::TempFilename( + webrtc::test::OutputPath(), "metrics_set_proto_file_exporter_test"); + } + + void TearDown() override { + ASSERT_TRUE(webrtc::test::RemoveFile(temp_filename_)); + } + + std::string temp_filename_; +}; + +TEST_F(MetricsSetProtoFileExporterTest, MetricsAreExportedCorrectly) { + MetricsSetProtoFileExporter::Options options(temp_filename_); + MetricsSetProtoFileExporter exporter(options); + + Metric metric1{ + .name = "test_metric1", + .unit = Unit::kMilliseconds, + .improvement_direction = ImprovementDirection::kBiggerIsBetter, + .test_case = "test_case_name1", + .metric_metadata = DefaultMetadata(), + .time_series = + Metric::TimeSeries{.samples = std::vector{Sample(10), Sample(20)}}, + .stats = + Metric::Stats{.mean = 15.0, .stddev = 5.0, .min = 10.0, .max = 20.0}}; + Metric metric2{ + .name = "test_metric2", + .unit = Unit::kKilobitsPerSecond, + .improvement_direction = ImprovementDirection::kSmallerIsBetter, + .test_case = "test_case_name2", + .metric_metadata = DefaultMetadata(), + .time_series = + Metric::TimeSeries{.samples = std::vector{Sample(20), Sample(40)}}, + .stats = Metric::Stats{ + .mean = 30.0, .stddev = 10.0, .min = 20.0, .max = 40.0}}; + + ASSERT_TRUE(exporter.Export(std::vector{metric1, metric2})); + webrtc::test_metrics::MetricsSet actual_metrics_set; + actual_metrics_set.ParseFromString(ReadFileAsString(temp_filename_)); + EXPECT_THAT(actual_metrics_set.metrics().size(), Eq(2)); + + EXPECT_THAT(actual_metrics_set.metrics(0).name(), Eq("test_metric1")); + EXPECT_THAT(actual_metrics_set.metrics(0).test_case(), Eq("test_case_name1")); + EXPECT_THAT(actual_metrics_set.metrics(0).unit(), + Eq(proto::Unit::MILLISECONDS)); + EXPECT_THAT(actual_metrics_set.metrics(0).improvement_direction(), + Eq(proto::ImprovementDirection::BIGGER_IS_BETTER)); + EXPECT_THAT(actual_metrics_set.metrics(0).metric_metadata().size(), Eq(1lu)); + EXPECT_THAT(actual_metrics_set.metrics(0).metric_metadata().at("key"), + Eq("value")); + EXPECT_THAT(actual_metrics_set.metrics(0).time_series().samples().size(), + Eq(2)); + AssertSamplesEqual(actual_metrics_set.metrics(0).time_series().samples(0), + Sample(10.0)); + AssertSamplesEqual(actual_metrics_set.metrics(0).time_series().samples(1), + Sample(20.0)); + EXPECT_THAT(actual_metrics_set.metrics(0).stats().mean(), Eq(15.0)); + EXPECT_THAT(actual_metrics_set.metrics(0).stats().stddev(), Eq(5.0)); + EXPECT_THAT(actual_metrics_set.metrics(0).stats().min(), Eq(10.0)); + EXPECT_THAT(actual_metrics_set.metrics(0).stats().max(), Eq(20.0)); + + EXPECT_THAT(actual_metrics_set.metrics(1).name(), Eq("test_metric2")); + EXPECT_THAT(actual_metrics_set.metrics(1).test_case(), Eq("test_case_name2")); + EXPECT_THAT(actual_metrics_set.metrics(1).unit(), + Eq(proto::Unit::KILOBITS_PER_SECOND)); + EXPECT_THAT(actual_metrics_set.metrics(1).improvement_direction(), + Eq(proto::ImprovementDirection::SMALLER_IS_BETTER)); + EXPECT_THAT(actual_metrics_set.metrics(1).metric_metadata().size(), Eq(1lu)); + EXPECT_THAT(actual_metrics_set.metrics(1).metric_metadata().at("key"), + Eq("value")); + EXPECT_THAT(actual_metrics_set.metrics(1).time_series().samples().size(), + Eq(2)); + AssertSamplesEqual(actual_metrics_set.metrics(1).time_series().samples(0), + Sample(20.0)); + AssertSamplesEqual(actual_metrics_set.metrics(1).time_series().samples(1), + Sample(40.0)); + EXPECT_THAT(actual_metrics_set.metrics(1).stats().mean(), Eq(30.0)); + EXPECT_THAT(actual_metrics_set.metrics(1).stats().stddev(), Eq(10.0)); + EXPECT_THAT(actual_metrics_set.metrics(1).stats().min(), Eq(20.0)); + EXPECT_THAT(actual_metrics_set.metrics(1).stats().max(), Eq(40.0)); +} + +} // namespace +} // namespace test +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/test/metrics/print_result_proxy_metrics_exporter.cc b/TMessagesProj/jni/voip/webrtc/api/test/metrics/print_result_proxy_metrics_exporter.cc new file mode 100644 index 0000000000..1ce1e63892 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/metrics/print_result_proxy_metrics_exporter.cc @@ -0,0 +1,157 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/test/metrics/print_result_proxy_metrics_exporter.h" + +#include +#include + +#include "api/array_view.h" +#include "api/test/metrics/metric.h" +#include "test/testsupport/perf_test.h" + +namespace webrtc { +namespace test { +namespace { + +std::string ToPrintResultUnit(Unit unit) { + switch (unit) { + case Unit::kMilliseconds: + return "msBestFitFormat"; + case Unit::kPercent: + return "n%"; + case Unit::kBytes: + return "sizeInBytes"; + case Unit::kKilobitsPerSecond: + // PrintResults prefer Chrome Perf Dashboard units, which doesn't have + // kpbs units, so we change the unit and value accordingly. + return "bytesPerSecond"; + case Unit::kHertz: + return "Hz"; + case Unit::kUnitless: + return "unitless"; + case Unit::kCount: + return "count"; + } +} + +double ToPrintResultValue(double value, Unit unit) { + switch (unit) { + case Unit::kKilobitsPerSecond: + // PrintResults prefer Chrome Perf Dashboard units, which doesn't have + // kpbs units, so we change the unit and value accordingly. + return value * 1000 / 8; + default: + return value; + } +} + +ImproveDirection ToPrintResultImproveDirection(ImprovementDirection direction) { + switch (direction) { + case ImprovementDirection::kBiggerIsBetter: + return ImproveDirection::kBiggerIsBetter; + case ImprovementDirection::kNeitherIsBetter: + return ImproveDirection::kNone; + case ImprovementDirection::kSmallerIsBetter: + return ImproveDirection::kSmallerIsBetter; + } +} + +bool IsEmpty(const Metric::Stats& stats) { + return !stats.mean.has_value() && !stats.stddev.has_value() && + !stats.min.has_value() && !stats.max.has_value(); +} + +bool NameEndsWithConnected(const std::string& name) { + static const std::string suffix = "_connected"; + return name.size() >= suffix.size() && + 0 == name.compare(name.size() - suffix.size(), suffix.size(), suffix); +} + +} // namespace + +bool PrintResultProxyMetricsExporter::Export( + rtc::ArrayView metrics) { + static const std::unordered_set per_call_metrics{ + "actual_encode_bitrate", + "encode_frame_rate", + "harmonic_framerate", + "max_skipped", + "min_psnr_dB", + "retransmission_bitrate", + "sent_packets_loss", + "transmission_bitrate", + "dropped_frames", + "frames_in_flight", + "rendered_frames", + "average_receive_rate", + "average_send_rate", + "bytes_discarded_no_receiver", + "bytes_received", + "bytes_sent", + "packets_discarded_no_receiver", + "packets_received", + "packets_sent", + "payload_bytes_received", + "payload_bytes_sent", + "cpu_usage"}; + + for (const Metric& metric : metrics) { + if (metric.time_series.samples.empty() && IsEmpty(metric.stats)) { + // If there were no data collected for the metric it is expected that 0 + // will be exported, so add 0 to the samples. + PrintResult(metric.name, /*modifier=*/"", metric.test_case, + ToPrintResultValue(0, metric.unit), + ToPrintResultUnit(metric.unit), /*important=*/false, + ToPrintResultImproveDirection(metric.improvement_direction)); + continue; + } + + if (metric.time_series.samples.empty()) { + PrintResultMeanAndError( + metric.name, /*modifier=*/"", metric.test_case, + ToPrintResultValue(*metric.stats.mean, metric.unit), + ToPrintResultValue(*metric.stats.stddev, metric.unit), + ToPrintResultUnit(metric.unit), + /*important=*/false, + ToPrintResultImproveDirection(metric.improvement_direction)); + continue; + } + + if (metric.time_series.samples.size() == 1lu && + (per_call_metrics.count(metric.name) > 0 || + NameEndsWithConnected(metric.name))) { + // Increase backwards compatibility for 1 value use case. + PrintResult( + metric.name, /*modifier=*/"", metric.test_case, + ToPrintResultValue(metric.time_series.samples[0].value, metric.unit), + ToPrintResultUnit(metric.unit), /*important=*/false, + ToPrintResultImproveDirection(metric.improvement_direction)); + continue; + } + + SamplesStatsCounter counter; + for (size_t i = 0; i < metric.time_series.samples.size(); ++i) { + counter.AddSample(SamplesStatsCounter::StatsSample{ + .value = ToPrintResultValue(metric.time_series.samples[i].value, + metric.unit), + .time = metric.time_series.samples[i].timestamp, + .metadata = metric.time_series.samples[i].sample_metadata}); + } + + PrintResult(metric.name, /*modifier=*/"", metric.test_case, counter, + ToPrintResultUnit(metric.unit), + /*important=*/false, + ToPrintResultImproveDirection(metric.improvement_direction)); + } + return true; +} + +} // namespace test +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/test/metrics/print_result_proxy_metrics_exporter.h b/TMessagesProj/jni/voip/webrtc/api/test/metrics/print_result_proxy_metrics_exporter.h new file mode 100644 index 0000000000..bad0594972 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/metrics/print_result_proxy_metrics_exporter.h @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_METRICS_PRINT_RESULT_PROXY_METRICS_EXPORTER_H_ +#define API_TEST_METRICS_PRINT_RESULT_PROXY_METRICS_EXPORTER_H_ + +#include "api/array_view.h" +#include "api/test/metrics/metric.h" +#include "api/test/metrics/metrics_exporter.h" + +namespace webrtc { +namespace test { + +// Proxies all exported metrics to the `webrtc::test::PrintResult` API. +class PrintResultProxyMetricsExporter : public MetricsExporter { + public: + ~PrintResultProxyMetricsExporter() override = default; + + bool Export(rtc::ArrayView metrics) override; +}; + +} // namespace test +} // namespace webrtc + +#endif // API_TEST_METRICS_PRINT_RESULT_PROXY_METRICS_EXPORTER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/test/metrics/print_result_proxy_metrics_exporter_test.cc b/TMessagesProj/jni/voip/webrtc/api/test/metrics/print_result_proxy_metrics_exporter_test.cc new file mode 100644 index 0000000000..768c794b40 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/metrics/print_result_proxy_metrics_exporter_test.cc @@ -0,0 +1,177 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/test/metrics/print_result_proxy_metrics_exporter.h" + +#include +#include +#include + +#include "api/test/metrics/metric.h" +#include "api/units/timestamp.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace test { +namespace { + +using ::testing::TestWithParam; + +std::map DefaultMetadata() { + return std::map{{"key", "value"}}; +} + +Metric::TimeSeries::Sample Sample(double value) { + return Metric::TimeSeries::Sample{.timestamp = Timestamp::Seconds(1), + .value = value, + .sample_metadata = DefaultMetadata()}; +} + +TEST(PrintResultProxyMetricsExporterTest, + ExportMetricsWithTimeSeriesFormatCorrect) { + Metric metric1{ + .name = "test_metric1", + .unit = Unit::kMilliseconds, + .improvement_direction = ImprovementDirection::kBiggerIsBetter, + .test_case = "test_case_name1", + .metric_metadata = DefaultMetadata(), + .time_series = + Metric::TimeSeries{.samples = std::vector{Sample(10), Sample(20)}}, + .stats = + Metric::Stats{.mean = 15.0, .stddev = 5.0, .min = 10.0, .max = 20.0}}; + Metric metric2{ + .name = "test_metric2", + .unit = Unit::kKilobitsPerSecond, + .improvement_direction = ImprovementDirection::kSmallerIsBetter, + .test_case = "test_case_name2", + .metric_metadata = DefaultMetadata(), + .time_series = + Metric::TimeSeries{.samples = std::vector{Sample(20), Sample(40)}}, + .stats = Metric::Stats{ + .mean = 30.0, .stddev = 10.0, .min = 20.0, .max = 40.0}}; + + testing::internal::CaptureStdout(); + PrintResultProxyMetricsExporter exporter; + + std::string expected = + "RESULT test_metric1: test_case_name1= {15,5} " + "msBestFitFormat_biggerIsBetter\n" + "RESULT test_metric2: test_case_name2= {3750,1250} " + "bytesPerSecond_smallerIsBetter\n"; + + EXPECT_TRUE(exporter.Export(std::vector{metric1, metric2})); + EXPECT_EQ(expected, testing::internal::GetCapturedStdout()); +} + +TEST(PrintResultProxyMetricsExporterTest, + ExportMetricsTimeSeriesOfSingleValueBackwardCompatibleFormat) { + // This should be printed as {mean, stddev} despite only being a single data + // point. + Metric metric1{ + .name = "available_send_bandwidth", + .unit = Unit::kKilobitsPerSecond, + .improvement_direction = ImprovementDirection::kBiggerIsBetter, + .test_case = "test_case/alice", + .metric_metadata = DefaultMetadata(), + .time_series = Metric::TimeSeries{.samples = std::vector{Sample(1000)}}, + .stats = Metric::Stats{ + .mean = 1000.0, .stddev = 0.0, .min = 1000.0, .max = 1000.0}}; + // This is a per-call metric that shouldn't have a stddev estimate. + Metric metric2{ + .name = "min_psnr_dB", + .unit = Unit::kUnitless, + .improvement_direction = ImprovementDirection::kBiggerIsBetter, + .test_case = "test_case/alice-video", + .metric_metadata = DefaultMetadata(), + .time_series = Metric::TimeSeries{.samples = std::vector{Sample(10)}}, + .stats = + Metric::Stats{.mean = 10.0, .stddev = 0.0, .min = 10.0, .max = 10.0}}; + // This is a per-call metric that shouldn't have a stddev estimate. + Metric metric3{ + .name = "alice_connected", + .unit = Unit::kUnitless, + .improvement_direction = ImprovementDirection::kBiggerIsBetter, + .test_case = "test_case", + .metric_metadata = DefaultMetadata(), + .time_series = Metric::TimeSeries{.samples = std::vector{Sample(1)}}, + .stats = + Metric::Stats{.mean = 1.0, .stddev = 0.0, .min = 1.0, .max = 1.0}}; + + testing::internal::CaptureStdout(); + PrintResultProxyMetricsExporter exporter; + + std::string expected = + "RESULT available_send_bandwidth: test_case/alice= {125000,0} " + "bytesPerSecond_biggerIsBetter\n" + "RESULT min_psnr_dB: test_case/alice-video= 10 " + "unitless_biggerIsBetter\n" + "RESULT alice_connected: test_case= 1 " + "unitless_biggerIsBetter\n"; + + EXPECT_TRUE(exporter.Export(std::vector{metric1, metric2, metric3})); + EXPECT_EQ(expected, testing::internal::GetCapturedStdout()); +} + +TEST(PrintResultProxyMetricsExporterTest, + ExportMetricsWithStatsOnlyFormatCorrect) { + Metric metric1{.name = "test_metric1", + .unit = Unit::kMilliseconds, + .improvement_direction = ImprovementDirection::kBiggerIsBetter, + .test_case = "test_case_name1", + .metric_metadata = DefaultMetadata(), + .time_series = Metric::TimeSeries{.samples = {}}, + .stats = Metric::Stats{ + .mean = 15.0, .stddev = 5.0, .min = 10.0, .max = 20.0}}; + Metric metric2{ + .name = "test_metric2", + .unit = Unit::kKilobitsPerSecond, + .improvement_direction = ImprovementDirection::kSmallerIsBetter, + .test_case = "test_case_name2", + .metric_metadata = DefaultMetadata(), + .time_series = Metric::TimeSeries{.samples = {}}, + .stats = Metric::Stats{ + .mean = 30.0, .stddev = 10.0, .min = 20.0, .max = 40.0}}; + + testing::internal::CaptureStdout(); + PrintResultProxyMetricsExporter exporter; + + std::string expected = + "RESULT test_metric1: test_case_name1= {15,5} " + "msBestFitFormat_biggerIsBetter\n" + "RESULT test_metric2: test_case_name2= {3750,1250} " + "bytesPerSecond_smallerIsBetter\n"; + + EXPECT_TRUE(exporter.Export(std::vector{metric1, metric2})); + EXPECT_EQ(expected, testing::internal::GetCapturedStdout()); +} + +TEST(PrintResultProxyMetricsExporterTest, ExportEmptyMetricOnlyFormatCorrect) { + Metric metric{.name = "test_metric", + .unit = Unit::kMilliseconds, + .improvement_direction = ImprovementDirection::kBiggerIsBetter, + .test_case = "test_case_name", + .metric_metadata = DefaultMetadata(), + .time_series = Metric::TimeSeries{.samples = {}}, + .stats = Metric::Stats{}}; + + testing::internal::CaptureStdout(); + PrintResultProxyMetricsExporter exporter; + + std::string expected = + "RESULT test_metric: test_case_name= 0 " + "msBestFitFormat_biggerIsBetter\n"; + + EXPECT_TRUE(exporter.Export(std::vector{metric})); + EXPECT_EQ(expected, testing::internal::GetCapturedStdout()); +} + +} // namespace +} // namespace test +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/test/metrics/stdout_metrics_exporter.cc b/TMessagesProj/jni/voip/webrtc/api/test/metrics/stdout_metrics_exporter.cc new file mode 100644 index 0000000000..22243e73e8 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/metrics/stdout_metrics_exporter.cc @@ -0,0 +1,101 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/test/metrics/stdout_metrics_exporter.h" + +#include + +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/test/metrics/metric.h" +#include "rtc_base/strings/string_builder.h" + +namespace webrtc { +namespace test { +namespace { + +// Returns positive integral part of the number. +int64_t IntegralPart(double value) { + return std::lround(std::floor(std::abs(value))); +} + +void AppendWithPrecision(double value, + int digits_after_comma, + rtc::StringBuilder& out) { + int64_t multiplier = std::lround(std::pow(10, digits_after_comma)); + int64_t integral_part = IntegralPart(value); + double decimal_part = std::abs(value) - integral_part; + + // If decimal part has leading zeros then when it will be multiplied on + // `multiplier`, leading zeros will be lost. To preserve them we add "1" + // so then leading digit will be greater than 0 and won't be removed. + // + // During conversion to the string leading digit has to be stripped. + // + // Also due to rounding it may happen that leading digit may be incremented, + // like with `digits_after_comma` 3 number 1.9995 will be rounded to 2. In + // such case this increment has to be propagated to the `integral_part`. + int64_t decimal_holder = std::lround((1 + decimal_part) * multiplier); + if (decimal_holder >= 2 * multiplier) { + // Rounding incremented added leading digit, so we need to transfer 1 to + // integral part. + integral_part++; + decimal_holder -= multiplier; + } + // Remove trailing zeros. + while (decimal_holder % 10 == 0) { + decimal_holder /= 10; + } + + // Print serialized number to output. + if (value < 0) { + out << "-"; + } + out << integral_part; + if (decimal_holder != 1) { + out << "." << std::to_string(decimal_holder).substr(1, digits_after_comma); + } +} + +} // namespace + +StdoutMetricsExporter::StdoutMetricsExporter() : output_(stdout) {} + +bool StdoutMetricsExporter::Export(rtc::ArrayView metrics) { + for (const Metric& metric : metrics) { + PrintMetric(metric); + } + return true; +} + +void StdoutMetricsExporter::PrintMetric(const Metric& metric) { + rtc::StringBuilder value_stream; + value_stream << metric.test_case << " / " << metric.name << "= {mean="; + if (metric.stats.mean.has_value()) { + AppendWithPrecision(*metric.stats.mean, 8, value_stream); + } else { + value_stream << "-"; + } + value_stream << ", stddev="; + if (metric.stats.stddev.has_value()) { + AppendWithPrecision(*metric.stats.stddev, 8, value_stream); + } else { + value_stream << "-"; + } + value_stream << "} " << ToString(metric.unit) << " (" + << ToString(metric.improvement_direction) << ")"; + + fprintf(output_, "RESULT: %s\n", value_stream.str().c_str()); +} + +} // namespace test +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/test/metrics/stdout_metrics_exporter.h b/TMessagesProj/jni/voip/webrtc/api/test/metrics/stdout_metrics_exporter.h new file mode 100644 index 0000000000..2c572cb2ea --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/metrics/stdout_metrics_exporter.h @@ -0,0 +1,41 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_METRICS_STDOUT_METRICS_EXPORTER_H_ +#define API_TEST_METRICS_STDOUT_METRICS_EXPORTER_H_ + +#include "api/array_view.h" +#include "api/test/metrics/metric.h" +#include "api/test/metrics/metrics_exporter.h" + +namespace webrtc { +namespace test { + +// Exports all collected metrics to stdout. +class StdoutMetricsExporter : public MetricsExporter { + public: + StdoutMetricsExporter(); + ~StdoutMetricsExporter() override = default; + + StdoutMetricsExporter(const StdoutMetricsExporter&) = delete; + StdoutMetricsExporter& operator=(const StdoutMetricsExporter&) = delete; + + bool Export(rtc::ArrayView metrics) override; + + private: + void PrintMetric(const Metric& metric); + + FILE* const output_; +}; + +} // namespace test +} // namespace webrtc + +#endif // API_TEST_METRICS_STDOUT_METRICS_EXPORTER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/test/metrics/stdout_metrics_exporter_test.cc b/TMessagesProj/jni/voip/webrtc/api/test/metrics/stdout_metrics_exporter_test.cc new file mode 100644 index 0000000000..91c06fac5b --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/metrics/stdout_metrics_exporter_test.cc @@ -0,0 +1,211 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/test/metrics/stdout_metrics_exporter.h" + +#include +#include +#include + +#include "api/test/metrics/metric.h" +#include "api/units/timestamp.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace test { +namespace { + +using ::testing::TestWithParam; + +std::map DefaultMetadata() { + return std::map{{"key", "value"}}; +} + +Metric::TimeSeries::Sample Sample(double value) { + return Metric::TimeSeries::Sample{.timestamp = Timestamp::Seconds(1), + .value = value, + .sample_metadata = DefaultMetadata()}; +} + +Metric PsnrForTestFoo(double mean, double stddev) { + return Metric{.name = "psnr", + .unit = Unit::kUnitless, + .improvement_direction = ImprovementDirection::kBiggerIsBetter, + .test_case = "foo", + .time_series = Metric::TimeSeries{}, + .stats = Metric::Stats{.mean = mean, .stddev = stddev}}; +} + +TEST(StdoutMetricsExporterTest, ExportMetricFormatCorrect) { + Metric metric1{ + .name = "test_metric1", + .unit = Unit::kMilliseconds, + .improvement_direction = ImprovementDirection::kBiggerIsBetter, + .test_case = "test_case_name1", + .metric_metadata = DefaultMetadata(), + .time_series = + Metric::TimeSeries{.samples = std::vector{Sample(10), Sample(20)}}, + .stats = + Metric::Stats{.mean = 15.0, .stddev = 5.0, .min = 10.0, .max = 20.0}}; + Metric metric2{ + .name = "test_metric2", + .unit = Unit::kKilobitsPerSecond, + .improvement_direction = ImprovementDirection::kSmallerIsBetter, + .test_case = "test_case_name2", + .metric_metadata = DefaultMetadata(), + .time_series = + Metric::TimeSeries{.samples = std::vector{Sample(20), Sample(40)}}, + .stats = Metric::Stats{ + .mean = 30.0, .stddev = 10.0, .min = 20.0, .max = 40.0}}; + + testing::internal::CaptureStdout(); + StdoutMetricsExporter exporter; + + std::string expected = + "RESULT: test_case_name1 / test_metric1= " + "{mean=15, stddev=5} Milliseconds (BiggerIsBetter)\n" + "RESULT: test_case_name2 / test_metric2= " + "{mean=30, stddev=10} KilobitsPerSecond (SmallerIsBetter)\n"; + + EXPECT_TRUE(exporter.Export(std::vector{metric1, metric2})); + EXPECT_EQ(expected, testing::internal::GetCapturedStdout()); +} + +TEST(StdoutMetricsExporterNumberFormatTest, PositiveNumberMaxPrecision) { + testing::internal::CaptureStdout(); + StdoutMetricsExporter exporter; + + Metric metric = PsnrForTestFoo(15.00000001, 0.00000001); + std::string expected = + "RESULT: foo / psnr= " + "{mean=15.00000001, stddev=0.00000001} Unitless (BiggerIsBetter)\n"; + EXPECT_TRUE(exporter.Export(std::vector{metric})); + EXPECT_EQ(expected, testing::internal::GetCapturedStdout()); +} + +TEST(StdoutMetricsExporterNumberFormatTest, + PositiveNumberTrailingZeroNotAdded) { + testing::internal::CaptureStdout(); + StdoutMetricsExporter exporter; + + Metric metric = PsnrForTestFoo(15.12345, 0.12); + std::string expected = + "RESULT: foo / psnr= " + "{mean=15.12345, stddev=0.12} Unitless (BiggerIsBetter)\n"; + EXPECT_TRUE(exporter.Export(std::vector{metric})); + EXPECT_EQ(expected, testing::internal::GetCapturedStdout()); +} + +TEST(StdoutMetricsExporterNumberFormatTest, + PositiveNumberTrailingZeroAreRemoved) { + testing::internal::CaptureStdout(); + StdoutMetricsExporter exporter; + + Metric metric = PsnrForTestFoo(15.123450000, 0.120000000); + std::string expected = + "RESULT: foo / psnr= " + "{mean=15.12345, stddev=0.12} Unitless (BiggerIsBetter)\n"; + EXPECT_TRUE(exporter.Export(std::vector{metric})); + EXPECT_EQ(expected, testing::internal::GetCapturedStdout()); +} + +TEST(StdoutMetricsExporterNumberFormatTest, + PositiveNumberRoundsUpOnPrecisionCorrectly) { + testing::internal::CaptureStdout(); + StdoutMetricsExporter exporter; + + Metric metric = PsnrForTestFoo(15.000000009, 0.999999999); + std::string expected = + "RESULT: foo / psnr= " + "{mean=15.00000001, stddev=1} Unitless (BiggerIsBetter)\n"; + EXPECT_TRUE(exporter.Export(std::vector{metric})); + EXPECT_EQ(expected, testing::internal::GetCapturedStdout()); +} + +TEST(StdoutMetricsExporterNumberFormatTest, + PositiveNumberRoundsDownOnPrecisionCorrectly) { + testing::internal::CaptureStdout(); + StdoutMetricsExporter exporter; + + Metric metric = PsnrForTestFoo(15.0000000049, 0.9999999949); + std::string expected = + "RESULT: foo / psnr= " + "{mean=15, stddev=0.99999999} Unitless (BiggerIsBetter)\n"; + EXPECT_TRUE(exporter.Export(std::vector{metric})); + EXPECT_EQ(expected, testing::internal::GetCapturedStdout()); +} + +TEST(StdoutMetricsExporterNumberFormatTest, NegativeNumberMaxPrecision) { + testing::internal::CaptureStdout(); + StdoutMetricsExporter exporter; + + Metric metric = PsnrForTestFoo(-15.00000001, -0.00000001); + std::string expected = + "RESULT: foo / psnr= " + "{mean=-15.00000001, stddev=-0.00000001} Unitless (BiggerIsBetter)\n"; + EXPECT_TRUE(exporter.Export(std::vector{metric})); + EXPECT_EQ(expected, testing::internal::GetCapturedStdout()); +} + +TEST(StdoutMetricsExporterNumberFormatTest, + NegativeNumberTrailingZeroNotAdded) { + testing::internal::CaptureStdout(); + StdoutMetricsExporter exporter; + + Metric metric = PsnrForTestFoo(-15.12345, -0.12); + std::string expected = + "RESULT: foo / psnr= " + "{mean=-15.12345, stddev=-0.12} Unitless (BiggerIsBetter)\n"; + EXPECT_TRUE(exporter.Export(std::vector{metric})); + EXPECT_EQ(expected, testing::internal::GetCapturedStdout()); +} + +TEST(StdoutMetricsExporterNumberFormatTest, + NegativeNumberTrailingZeroAreRemoved) { + testing::internal::CaptureStdout(); + StdoutMetricsExporter exporter; + + Metric metric = PsnrForTestFoo(-15.123450000, -0.120000000); + std::string expected = + "RESULT: foo / psnr= " + "{mean=-15.12345, stddev=-0.12} Unitless (BiggerIsBetter)\n"; + EXPECT_TRUE(exporter.Export(std::vector{metric})); + EXPECT_EQ(expected, testing::internal::GetCapturedStdout()); +} + +TEST(StdoutMetricsExporterNumberFormatTest, + NegativeNumberRoundsUpOnPrecisionCorrectly) { + testing::internal::CaptureStdout(); + StdoutMetricsExporter exporter; + + Metric metric = PsnrForTestFoo(-15.000000009, -0.999999999); + std::string expected = + "RESULT: foo / psnr= " + "{mean=-15.00000001, stddev=-1} Unitless (BiggerIsBetter)\n"; + EXPECT_TRUE(exporter.Export(std::vector{metric})); + EXPECT_EQ(expected, testing::internal::GetCapturedStdout()); +} + +TEST(StdoutMetricsExporterNumberFormatTest, + NegativeNumberRoundsDownOnPrecisionCorrectly) { + testing::internal::CaptureStdout(); + StdoutMetricsExporter exporter; + + Metric metric = PsnrForTestFoo(-15.0000000049, -0.9999999949); + std::string expected = + "RESULT: foo / psnr= " + "{mean=-15, stddev=-0.99999999} Unitless (BiggerIsBetter)\n"; + EXPECT_TRUE(exporter.Export(std::vector{metric})); + EXPECT_EQ(expected, testing::internal::GetCapturedStdout()); +} + +} // namespace +} // namespace test +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/test/mock_async_dns_resolver.h b/TMessagesProj/jni/voip/webrtc/api/test/mock_async_dns_resolver.h index e863cac6e6..81132c96a5 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/mock_async_dns_resolver.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/mock_async_dns_resolver.h @@ -24,8 +24,8 @@ class MockAsyncDnsResolverResult : public AsyncDnsResolverResult { MOCK_METHOD(bool, GetResolvedAddress, (int, rtc::SocketAddress*), - (const override)); - MOCK_METHOD(int, GetError, (), (const override)); + (const, override)); + MOCK_METHOD(int, GetError, (), (const, override)); }; class MockAsyncDnsResolver : public AsyncDnsResolverInterface { @@ -34,7 +34,11 @@ class MockAsyncDnsResolver : public AsyncDnsResolverInterface { Start, (const rtc::SocketAddress&, std::function), (override)); - MOCK_METHOD(AsyncDnsResolverResult&, result, (), (const override)); + MOCK_METHOD(void, + Start, + (const rtc::SocketAddress&, int family, std::function), + (override)); + MOCK_METHOD(AsyncDnsResolverResult&, result, (), (const, override)); }; class MockAsyncDnsResolverFactory : public AsyncDnsResolverFactoryInterface { @@ -43,6 +47,10 @@ class MockAsyncDnsResolverFactory : public AsyncDnsResolverFactoryInterface { CreateAndResolve, (const rtc::SocketAddress&, std::function), (override)); + MOCK_METHOD(std::unique_ptr, + CreateAndResolve, + (const rtc::SocketAddress&, int, std::function), + (override)); MOCK_METHOD(std::unique_ptr, Create, (), diff --git a/TMessagesProj/jni/voip/webrtc/api/test/mock_data_channel.h b/TMessagesProj/jni/voip/webrtc/api/test/mock_data_channel.h index 9346ffd638..40f7edb08a 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/mock_data_channel.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/mock_data_channel.h @@ -22,7 +22,8 @@ class MockDataChannelInterface final : public rtc::RefCountedObject { public: static rtc::scoped_refptr Create() { - return new MockDataChannelInterface(); + return rtc::scoped_refptr( + new MockDataChannelInterface()); } MOCK_METHOD(void, diff --git a/TMessagesProj/jni/voip/webrtc/api/test/mock_dtmf_sender.h b/TMessagesProj/jni/voip/webrtc/api/test/mock_dtmf_sender.h new file mode 100644 index 0000000000..9029195025 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/mock_dtmf_sender.h @@ -0,0 +1,56 @@ +/* + * Copyright 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_MOCK_DTMF_SENDER_H_ +#define API_TEST_MOCK_DTMF_SENDER_H_ + +#include + +#include "api/dtmf_sender_interface.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockDtmfSenderObserver : public DtmfSenderObserverInterface { + public: + MOCK_METHOD(void, + OnToneChange, + (const std::string&, const std::string&), + (override)); + MOCK_METHOD(void, OnToneChange, (const std::string&), (override)); +}; + +static_assert(!std::is_abstract_v, ""); + +class MockDtmfSender : public DtmfSenderInterface { + public: + static rtc::scoped_refptr Create() { + return rtc::make_ref_counted(); + } + + MOCK_METHOD(void, + RegisterObserver, + (DtmfSenderObserverInterface * observer), + (override)); + MOCK_METHOD(void, UnregisterObserver, (), (override)); + MOCK_METHOD(bool, CanInsertDtmf, (), (override)); + MOCK_METHOD(std::string, tones, (), (const override)); + MOCK_METHOD(int, duration, (), (const override)); + MOCK_METHOD(int, inter_tone_gap, (), (const override)); + + protected: + MockDtmfSender() = default; +}; + +static_assert(!std::is_abstract_v>, ""); + +} // namespace webrtc + +#endif // API_TEST_MOCK_DTMF_SENDER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/test/mock_encoder_selector.h b/TMessagesProj/jni/voip/webrtc/api/test/mock_encoder_selector.h new file mode 100644 index 0000000000..2e018d57ba --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/mock_encoder_selector.h @@ -0,0 +1,42 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_MOCK_ENCODER_SELECTOR_H_ +#define API_TEST_MOCK_ENCODER_SELECTOR_H_ + +#include "api/video_codecs/video_encoder_factory.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockEncoderSelector + : public VideoEncoderFactory::EncoderSelectorInterface { + public: + MOCK_METHOD(void, + OnCurrentEncoder, + (const SdpVideoFormat& format), + (override)); + + MOCK_METHOD(absl::optional, + OnAvailableBitrate, + (const DataRate& rate), + (override)); + + MOCK_METHOD(absl::optional, + OnResolutionChange, + (const RenderResolution& resolution), + (override)); + + MOCK_METHOD(absl::optional, OnEncoderBroken, (), (override)); +}; + +} // namespace webrtc + +#endif // API_TEST_MOCK_ENCODER_SELECTOR_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/test/mock_media_stream_interface.h b/TMessagesProj/jni/voip/webrtc/api/test/mock_media_stream_interface.h index 29521e6e23..209962358d 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/mock_media_stream_interface.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/mock_media_stream_interface.h @@ -22,7 +22,7 @@ class MockAudioSource final : public rtc::RefCountedObject { public: static rtc::scoped_refptr Create() { - return new MockAudioSource(); + return rtc::scoped_refptr(new MockAudioSource()); } MOCK_METHOD(void, @@ -55,7 +55,7 @@ class MockAudioSource final class MockAudioTrack final : public rtc::RefCountedObject { public: static rtc::scoped_refptr Create() { - return new MockAudioTrack(); + return rtc::scoped_refptr(new MockAudioTrack()); } MOCK_METHOD(void, @@ -67,7 +67,7 @@ class MockAudioTrack final : public rtc::RefCountedObject { (ObserverInterface * observer), (override)); MOCK_METHOD(std::string, kind, (), (const, override)); - MOCK_METHOD(std::string, id, (), (const override)); + MOCK_METHOD(std::string, id, (), (const, override)); MOCK_METHOD(bool, enabled, (), (const, override)); MOCK_METHOD(bool, set_enabled, (bool enable), (override)); MOCK_METHOD(TrackState, state, (), (const, override)); @@ -84,6 +84,52 @@ class MockAudioTrack final : public rtc::RefCountedObject { MockAudioTrack() = default; }; +class MockMediaStream : public MediaStreamInterface { + public: + MOCK_METHOD(std::string, id, (), (const override)); + MOCK_METHOD(AudioTrackVector, GetAudioTracks, (), (override)); + MOCK_METHOD(VideoTrackVector, GetVideoTracks, (), (override)); + MOCK_METHOD(rtc::scoped_refptr, + FindAudioTrack, + (const std::string& track_id), + (override)); + MOCK_METHOD(rtc::scoped_refptr, + FindVideoTrack, + (const std::string& track_id), + (override)); + MOCK_METHOD(bool, + AddTrack, + (rtc::scoped_refptr track), + (override)); + MOCK_METHOD(bool, + AddTrack, + (rtc::scoped_refptr track), + (override)); + MOCK_METHOD(bool, + RemoveTrack, + (rtc::scoped_refptr track), + (override)); + MOCK_METHOD(bool, + RemoveTrack, + (rtc::scoped_refptr track), + (override)); + // Old AddTrack/RemoveTrack methods - slated for removal + MOCK_METHOD(bool, AddTrack, (AudioTrackInterface * track), (override)); + MOCK_METHOD(bool, AddTrack, (VideoTrackInterface * track), (override)); + MOCK_METHOD(bool, RemoveTrack, (AudioTrackInterface * track), (override)); + MOCK_METHOD(bool, RemoveTrack, (VideoTrackInterface * track), (override)); + MOCK_METHOD(void, + RegisterObserver, + (ObserverInterface * observer), + (override)); + MOCK_METHOD(void, + UnregisterObserver, + (ObserverInterface * observer), + (override)); +}; + +static_assert(!std::is_abstract_v>, ""); + } // namespace webrtc #endif // API_TEST_MOCK_MEDIA_STREAM_INTERFACE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/test/mock_packet_socket_factory.h b/TMessagesProj/jni/voip/webrtc/api/test/mock_packet_socket_factory.h new file mode 100644 index 0000000000..7e59556385 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/mock_packet_socket_factory.h @@ -0,0 +1,49 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_MOCK_PACKET_SOCKET_FACTORY_H_ +#define API_TEST_MOCK_PACKET_SOCKET_FACTORY_H_ + +#include +#include + +#include "api/packet_socket_factory.h" +#include "test/gmock.h" + +namespace rtc { +class MockPacketSocketFactory : public PacketSocketFactory { + public: + MOCK_METHOD(AsyncPacketSocket*, + CreateUdpSocket, + (const SocketAddress&, uint16_t, uint16_t), + (override)); + MOCK_METHOD(AsyncListenSocket*, + CreateServerTcpSocket, + (const SocketAddress&, uint16_t, uint16_t, int opts), + (override)); + MOCK_METHOD(AsyncPacketSocket*, + CreateClientTcpSocket, + (const SocketAddress& local_address, + const SocketAddress&, + const ProxyInfo&, + const std::string&, + const PacketSocketTcpOptions&), + (override)); + MOCK_METHOD(std::unique_ptr, + CreateAsyncDnsResolver, + (), + (override)); +}; + +static_assert(!std::is_abstract_v, ""); + +} // namespace rtc + +#endif // API_TEST_MOCK_PACKET_SOCKET_FACTORY_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/test/mock_peer_connection_factory_interface.h b/TMessagesProj/jni/voip/webrtc/api/test/mock_peer_connection_factory_interface.h index c2f2435fb8..6bab595b5a 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/mock_peer_connection_factory_interface.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/mock_peer_connection_factory_interface.h @@ -23,7 +23,8 @@ class MockPeerConnectionFactoryInterface final : public rtc::RefCountedObject { public: static rtc::scoped_refptr Create() { - return new MockPeerConnectionFactoryInterface(); + return rtc::scoped_refptr( + new MockPeerConnectionFactoryInterface()); } MOCK_METHOD(void, SetOptions, (const Options&), (override)); @@ -47,11 +48,11 @@ class MockPeerConnectionFactoryInterface final MOCK_METHOD(RtpCapabilities, GetRtpSenderCapabilities, (cricket::MediaType), - (const override)); + (const, override)); MOCK_METHOD(RtpCapabilities, GetRtpReceiverCapabilities, (cricket::MediaType), - (const override)); + (const, override)); MOCK_METHOD(rtc::scoped_refptr, CreateLocalMediaStream, (const std::string&), diff --git a/TMessagesProj/jni/voip/webrtc/api/test/mock_peerconnectioninterface.h b/TMessagesProj/jni/voip/webrtc/api/test/mock_peerconnectioninterface.h index cd67d32a10..ccc6ce46b1 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/mock_peerconnectioninterface.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/mock_peerconnectioninterface.h @@ -25,11 +25,10 @@ namespace webrtc { -class MockPeerConnectionInterface - : public rtc::RefCountedObject { +class MockPeerConnectionInterface : public webrtc::PeerConnectionInterface { public: static rtc::scoped_refptr Create() { - return new MockPeerConnectionInterface(); + return rtc::make_ref_counted(); } // PeerConnectionInterface @@ -48,9 +47,14 @@ class MockPeerConnectionInterface (rtc::scoped_refptr, const std::vector&), (override)); - MOCK_METHOD(bool, RemoveTrack, (RtpSenderInterface*), (override)); + MOCK_METHOD(RTCErrorOr>, + AddTrack, + (rtc::scoped_refptr, + const std::vector&, + const std::vector&), + (override)); MOCK_METHOD(RTCError, - RemoveTrackNew, + RemoveTrackOrError, (rtc::scoped_refptr), (override)); MOCK_METHOD(RTCErrorOr>, @@ -77,15 +81,15 @@ class MockPeerConnectionInterface MOCK_METHOD(std::vector>, GetSenders, (), - (const override)); + (const, override)); MOCK_METHOD(std::vector>, GetReceivers, (), - (const override)); + (const, override)); MOCK_METHOD(std::vector>, GetTransceivers, (), - (const override)); + (const, override)); MOCK_METHOD(bool, GetStats, (StatsObserver*, MediaStreamTrackInterface*, StatsOutputLevel), @@ -105,7 +109,7 @@ class MockPeerConnectionInterface MOCK_METHOD(rtc::scoped_refptr, GetSctpTransport, (), - (const override)); + (const, override)); MOCK_METHOD(RTCErrorOr>, CreateDataChannelOrError, (const std::string&, const DataChannelInit*), @@ -113,27 +117,27 @@ class MockPeerConnectionInterface MOCK_METHOD(const SessionDescriptionInterface*, local_description, (), - (const override)); + (const, override)); MOCK_METHOD(const SessionDescriptionInterface*, remote_description, (), - (const override)); + (const, override)); MOCK_METHOD(const SessionDescriptionInterface*, current_local_description, (), - (const override)); + (const, override)); MOCK_METHOD(const SessionDescriptionInterface*, current_remote_description, (), - (const override)); + (const, override)); MOCK_METHOD(const SessionDescriptionInterface*, pending_local_description, (), - (const override)); + (const, override)); MOCK_METHOD(const SessionDescriptionInterface*, pending_remote_description, (), - (const override)); + (const, override)); MOCK_METHOD(void, RestartIce, (), (override)); MOCK_METHOD(void, CreateOffer, @@ -200,7 +204,9 @@ class MockPeerConnectionInterface MOCK_METHOD(void, Close, (), (override)); }; -static_assert(!std::is_abstract::value, ""); +static_assert( + !std::is_abstract_v>, + ""); } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/test/mock_rtp_transceiver.h b/TMessagesProj/jni/voip/webrtc/api/test/mock_rtp_transceiver.h index a0a08c4772..1d21bce5eb 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/mock_rtp_transceiver.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/mock_rtp_transceiver.h @@ -19,11 +19,12 @@ namespace webrtc { -class MockRtpTransceiver final - : public rtc::RefCountedObject { +class MockRtpTransceiver : public RtpTransceiverInterface { public: + MockRtpTransceiver() = default; + static rtc::scoped_refptr Create() { - return new MockRtpTransceiver(); + return rtc::make_ref_counted(); } MOCK_METHOD(cricket::MediaType, media_type, (), (const, override)); @@ -70,14 +71,15 @@ class MockRtpTransceiver final HeaderExtensionsToOffer, (), (const, override)); + MOCK_METHOD(std::vector, + HeaderExtensionsNegotiated, + (), + (const, override)); MOCK_METHOD(webrtc::RTCError, SetOfferedRtpHeaderExtensions, (rtc::ArrayView header_extensions_to_offer), (override)); - - private: - MockRtpTransceiver() = default; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/test/mock_rtpreceiver.h b/TMessagesProj/jni/voip/webrtc/api/test/mock_rtpreceiver.h index a0b79e0bed..4bcf064b2a 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/mock_rtpreceiver.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/mock_rtpreceiver.h @@ -24,20 +24,20 @@ class MockRtpReceiver : public rtc::RefCountedObject { MOCK_METHOD(rtc::scoped_refptr, track, (), - (const override)); + (const, override)); MOCK_METHOD(std::vector>, streams, (), - (const override)); - MOCK_METHOD(cricket::MediaType, media_type, (), (const override)); - MOCK_METHOD(std::string, id, (), (const override)); - MOCK_METHOD(RtpParameters, GetParameters, (), (const override)); + (const, override)); + MOCK_METHOD(cricket::MediaType, media_type, (), (const, override)); + MOCK_METHOD(std::string, id, (), (const, override)); + MOCK_METHOD(RtpParameters, GetParameters, (), (const, override)); MOCK_METHOD(void, SetObserver, (RtpReceiverObserverInterface*), (override)); MOCK_METHOD(void, SetJitterBufferMinimumDelay, (absl::optional), (override)); - MOCK_METHOD(std::vector, GetSources, (), (const override)); + MOCK_METHOD(std::vector, GetSources, (), (const, override)); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/test/mock_rtpsender.h b/TMessagesProj/jni/voip/webrtc/api/test/mock_rtpsender.h index f12a6185a6..e2351f87fe 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/mock_rtpsender.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/mock_rtpsender.h @@ -11,6 +11,7 @@ #ifndef API_TEST_MOCK_RTPSENDER_H_ #define API_TEST_MOCK_RTPSENDER_H_ +#include #include #include @@ -19,29 +20,55 @@ namespace webrtc { -class MockRtpSender : public rtc::RefCountedObject { +class MockRtpSender : public RtpSenderInterface { public: + static rtc::scoped_refptr Create() { + return rtc::make_ref_counted(); + } + MOCK_METHOD(bool, SetTrack, (MediaStreamTrackInterface*), (override)); MOCK_METHOD(rtc::scoped_refptr, track, (), + (const, override)); + MOCK_METHOD(rtc::scoped_refptr, + dtls_transport, + (), (const override)); - MOCK_METHOD(uint32_t, ssrc, (), (const override)); - MOCK_METHOD(cricket::MediaType, media_type, (), (const override)); - MOCK_METHOD(std::string, id, (), (const override)); - MOCK_METHOD(std::vector, stream_ids, (), (const override)); + MOCK_METHOD(uint32_t, ssrc, (), (const, override)); + MOCK_METHOD(cricket::MediaType, media_type, (), (const, override)); + MOCK_METHOD(std::string, id, (), (const, override)); + MOCK_METHOD(std::vector, stream_ids, (), (const, override)); + MOCK_METHOD(void, SetStreams, (const std::vector&), (override)); MOCK_METHOD(std::vector, init_send_encodings, (), - (const override)); - MOCK_METHOD(RtpParameters, GetParameters, (), (const override)); + (const, override)); + MOCK_METHOD(RtpParameters, GetParameters, (), (const, override)); MOCK_METHOD(RTCError, SetParameters, (const RtpParameters&), (override)); MOCK_METHOD(rtc::scoped_refptr, GetDtmfSender, (), - (const override)); + (const, override)); + MOCK_METHOD(void, + SetFrameEncryptor, + (rtc::scoped_refptr), + (override)); + MOCK_METHOD(rtc::scoped_refptr, + GetFrameEncryptor, + (), + (const, override)); + MOCK_METHOD(void, + SetEncoderToPacketizerFrameTransformer, + (rtc::scoped_refptr), + (override)); + MOCK_METHOD(void, + SetEncoderSelector, + (std::unique_ptr), + (override)); }; +static_assert(!std::is_abstract_v>, ""); } // namespace webrtc #endif // API_TEST_MOCK_RTPSENDER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/test/mock_session_description_interface.h b/TMessagesProj/jni/voip/webrtc/api/test/mock_session_description_interface.h new file mode 100644 index 0000000000..f0346ceb11 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/mock_session_description_interface.h @@ -0,0 +1,56 @@ +/* + * Copyright 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_MOCK_SESSION_DESCRIPTION_INTERFACE_H_ +#define API_TEST_MOCK_SESSION_DESCRIPTION_INTERFACE_H_ + +#include +#include +#include +#include + +#include "api/jsep.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockSessionDescriptionInterface : public SessionDescriptionInterface { + public: + MOCK_METHOD(std::unique_ptr, + Clone, + (), + (const, override)); + MOCK_METHOD(cricket::SessionDescription*, description, (), (override)); + MOCK_METHOD(const cricket::SessionDescription*, + description, + (), + (const, override)); + MOCK_METHOD(std::string, session_id, (), (const, override)); + MOCK_METHOD(std::string, session_version, (), (const, override)); + MOCK_METHOD(SdpType, GetType, (), (const, override)); + MOCK_METHOD(std::string, type, (), (const, override)); + MOCK_METHOD(bool, AddCandidate, (const IceCandidateInterface*), (override)); + MOCK_METHOD(size_t, + RemoveCandidates, + (const std::vector&), + (override)); + MOCK_METHOD(size_t, number_of_mediasections, (), (const, override)); + MOCK_METHOD(const IceCandidateCollection*, + candidates, + (size_t), + (const, override)); + MOCK_METHOD(bool, ToString, (std::string*), (const, override)); +}; + +static_assert(!std::is_abstract_v); + +} // namespace webrtc + +#endif // API_TEST_MOCK_SESSION_DESCRIPTION_INTERFACE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/test/mock_transformable_video_frame.h b/TMessagesProj/jni/voip/webrtc/api/test/mock_transformable_video_frame.h index 36798b5d73..5cebcaba80 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/mock_transformable_video_frame.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/mock_transformable_video_frame.h @@ -21,9 +21,9 @@ namespace webrtc { class MockTransformableVideoFrame : public webrtc::TransformableVideoFrameInterface { public: - MOCK_METHOD(rtc::ArrayView, GetData, (), (const override)); + MOCK_METHOD(rtc::ArrayView, GetData, (), (const, override)); MOCK_METHOD(void, SetData, (rtc::ArrayView data), (override)); - MOCK_METHOD(uint32_t, GetTimestamp, (), (const override)); + MOCK_METHOD(uint32_t, GetTimestamp, (), (const, override)); MOCK_METHOD(uint32_t, GetSsrc, (), (const, override)); MOCK_METHOD(bool, IsKeyFrame, (), (const, override)); MOCK_METHOD(std::vector, GetAdditionalData, (), (const, override)); diff --git a/TMessagesProj/jni/voip/webrtc/api/test/mock_video_decoder.h b/TMessagesProj/jni/voip/webrtc/api/test/mock_video_decoder.h index b6d53f8d8d..34f732ca4d 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/mock_video_decoder.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/mock_video_decoder.h @@ -11,6 +11,8 @@ #ifndef API_TEST_MOCK_VIDEO_DECODER_H_ #define API_TEST_MOCK_VIDEO_DECODER_H_ +#include + #include "api/video_codecs/video_decoder.h" #include "test/gmock.h" @@ -43,6 +45,8 @@ class MockVideoDecoder : public VideoDecoder { ON_CALL(*this, Configure).WillByDefault(testing::Return(true)); } + ~MockVideoDecoder() override { Destruct(); } + MOCK_METHOD(bool, Configure, (const Settings& settings), (override)); MOCK_METHOD(int32_t, Decode, @@ -55,6 +59,10 @@ class MockVideoDecoder : public VideoDecoder { (DecodedImageCallback * callback), (override)); MOCK_METHOD(int32_t, Release, (), (override)); + + // Special utility method that allows a test to monitor/verify when + // destruction of the decoder instance occurs. + MOCK_METHOD(void, Destruct, (), ()); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/test/mock_video_decoder_factory.h b/TMessagesProj/jni/voip/webrtc/api/test/mock_video_decoder_factory.h index 98a5d40eb6..6150d9f8b5 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/mock_video_decoder_factory.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/mock_video_decoder_factory.h @@ -15,6 +15,7 @@ #include #include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_decoder.h" #include "api/video_codecs/video_decoder_factory.h" #include "test/gmock.h" diff --git a/TMessagesProj/jni/voip/webrtc/api/test/mock_video_encoder_factory.h b/TMessagesProj/jni/voip/webrtc/api/test/mock_video_encoder_factory.h index 79851096b7..02ee7aa15e 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/mock_video_encoder_factory.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/mock_video_encoder_factory.h @@ -15,6 +15,7 @@ #include #include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_encoder.h" #include "api/video_codecs/video_encoder_factory.h" #include "test/gmock.h" diff --git a/TMessagesProj/jni/voip/webrtc/api/test/mock_video_track.h b/TMessagesProj/jni/voip/webrtc/api/test/mock_video_track.h index 58a531bf42..705d13509b 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/mock_video_track.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/mock_video_track.h @@ -24,7 +24,7 @@ class MockVideoTrack final : public rtc::RefCountedObject { public: static rtc::scoped_refptr Create() { - return new MockVideoTrack(); + return rtc::scoped_refptr(new MockVideoTrack()); } // NotifierInterface diff --git a/TMessagesProj/jni/voip/webrtc/api/test/network_emulation_manager.cc b/TMessagesProj/jni/voip/webrtc/api/test/network_emulation_manager.cc index 9c148a069b..236e2f0e17 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/network_emulation_manager.cc +++ b/TMessagesProj/jni/voip/webrtc/api/test/network_emulation_manager.cc @@ -7,13 +7,40 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ +#include "api/test/network_emulation_manager.h" + #include -#include "api/test/network_emulation_manager.h" #include "call/simulated_network.h" +#include "rtc_base/checks.h" namespace webrtc { +bool AbslParseFlag(absl::string_view text, TimeMode* mode, std::string* error) { + if (text == "realtime") { + *mode = TimeMode::kRealTime; + return true; + } + if (text == "simulated") { + *mode = TimeMode::kSimulated; + return true; + } + *error = + "Unknown value for TimeMode enum. Options are 'realtime' or 'simulated'"; + return false; +} + +std::string AbslUnparseFlag(TimeMode mode) { + switch (mode) { + case TimeMode::kRealTime: + return "realtime"; + case TimeMode::kSimulated: + return "simulated"; + } + RTC_CHECK_NOTREACHED(); + return "unknown"; +} + NetworkEmulationManager::SimulatedNetworkNode::Builder& NetworkEmulationManager::SimulatedNetworkNode::Builder::config( BuiltInNetworkBehaviorConfig config) { @@ -74,4 +101,22 @@ NetworkEmulationManager::SimulatedNetworkNode::Builder::Build( res.node = net->CreateEmulatedNode(std::move(behavior)); return res; } + +std::pair +NetworkEmulationManager::CreateEndpointPairWithTwoWayRoutes( + const BuiltInNetworkBehaviorConfig& config) { + auto* alice_node = CreateEmulatedNode(config); + auto* bob_node = CreateEmulatedNode(config); + + auto* alice_endpoint = CreateEndpoint(EmulatedEndpointConfig()); + auto* bob_endpoint = CreateEndpoint(EmulatedEndpointConfig()); + + CreateRoute(alice_endpoint, {alice_node}, bob_endpoint); + CreateRoute(bob_endpoint, {bob_node}, alice_endpoint); + + return { + CreateEmulatedNetworkManagerInterface({alice_endpoint}), + CreateEmulatedNetworkManagerInterface({bob_endpoint}), + }; +} } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/test/network_emulation_manager.h b/TMessagesProj/jni/voip/webrtc/api/test/network_emulation_manager.h index b5c68af5f3..427ad6d4d4 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/network_emulation_manager.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/network_emulation_manager.h @@ -14,6 +14,7 @@ #include #include #include +#include #include #include "api/array_view.h" @@ -148,6 +149,16 @@ class EmulatedNetworkManagerInterface { enum class TimeMode { kRealTime, kSimulated }; +// Called implicitly when parsing an ABSL_FLAG of type TimeMode. +// from the command line flag value `text`. +// Returns `true` and sets `*mode` on success; +// returns `false` and sets `*error` on failure. +bool AbslParseFlag(absl::string_view text, TimeMode* mode, std::string* error); + +// AbslUnparseFlag returns a textual flag value corresponding to the TimeMode +// `mode`. +std::string AbslUnparseFlag(TimeMode mode); + // Provides an API for creating and configuring emulated network layer. // All objects returned by this API are owned by NetworkEmulationManager itself // and will be deleted when manager will be deleted. @@ -187,6 +198,11 @@ class NetworkEmulationManager { // Returns a mode in which underlying time controller operates. virtual TimeMode time_mode() const = 0; + // Creates an emulated network node, which represents ideal network with + // unlimited capacity, no delay and no packet loss. + EmulatedNetworkNode* CreateUnconstrainedEmulatedNode() { + return CreateEmulatedNode(BuiltInNetworkBehaviorConfig()); + } // Creates an emulated network node, which represents single network in // the emulated network layer. Uses default implementation on network behavior // which can be configured with `config`. `random_seed` can be provided to @@ -322,6 +338,11 @@ class NetworkEmulationManager { // - GetPeerEndpoint() - the endpoint that is "connected to the internet". virtual EmulatedTURNServerInterface* CreateTURNServer( EmulatedTURNServerConfig config) = 0; + + // Create a pair of EmulatedNetworkManagerInterfaces connected to each other. + std::pair + CreateEndpointPairWithTwoWayRoutes( + const BuiltInNetworkBehaviorConfig& config); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/test/peerconnection_quality_test_fixture.cc b/TMessagesProj/jni/voip/webrtc/api/test/peerconnection_quality_test_fixture.cc new file mode 100644 index 0000000000..89ac366e22 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/peerconnection_quality_test_fixture.cc @@ -0,0 +1,257 @@ +/* + * Copyright 2022 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/test/peerconnection_quality_test_fixture.h" + +#include + +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/test/video/video_frame_writer.h" +#include "rtc_base/checks.h" +#include "rtc_base/strings/string_builder.h" +#include "test/pc/e2e/analyzer/video/video_dumping.h" +#include "test/testsupport/file_utils.h" + +namespace webrtc { +namespace webrtc_pc_e2e { +namespace { + +using VideoCodecConfig = ::webrtc::webrtc_pc_e2e:: + PeerConnectionE2EQualityTestFixture::VideoCodecConfig; +using VideoSubscription = ::webrtc::webrtc_pc_e2e:: + PeerConnectionE2EQualityTestFixture::VideoSubscription; +using VideoResolution = ::webrtc::webrtc_pc_e2e:: + PeerConnectionE2EQualityTestFixture::VideoResolution; + +std::string SpecToString( + PeerConnectionE2EQualityTestFixture::VideoResolution::VideoResolution::Spec + spec) { + switch (spec) { + case PeerConnectionE2EQualityTestFixture::VideoResolution::Spec::kNone: + return "None"; + case PeerConnectionE2EQualityTestFixture::VideoResolution::Spec:: + kMaxFromSender: + return "MaxFromSender"; + } +} + +void AppendResolution(const VideoResolution& resolution, + rtc::StringBuilder& builder) { + builder << "_" << resolution.width() << "x" << resolution.height() << "_" + << resolution.fps(); +} + +} // namespace + +PeerConnectionE2EQualityTestFixture::VideoResolution::VideoResolution( + size_t width, + size_t height, + int32_t fps) + : width_(width), height_(height), fps_(fps), spec_(Spec::kNone) {} +PeerConnectionE2EQualityTestFixture::VideoResolution::VideoResolution(Spec spec) + : width_(0), height_(0), fps_(0), spec_(spec) {} + +bool PeerConnectionE2EQualityTestFixture::VideoResolution::operator==( + const VideoResolution& other) const { + if (spec_ != Spec::kNone && spec_ == other.spec_) { + // If there is some particular spec set, then it doesn't matter what + // values we have in other fields. + return true; + } + return width_ == other.width_ && height_ == other.height_ && + fps_ == other.fps_ && spec_ == other.spec_; +} + +std::string PeerConnectionE2EQualityTestFixture::VideoResolution::ToString() + const { + rtc::StringBuilder out; + out << "{ width=" << width_ << ", height=" << height_ << ", fps=" << fps_ + << ", spec=" << SpecToString(spec_) << " }"; + return out.Release(); +} + +bool PeerConnectionE2EQualityTestFixture::VideoSubscription::operator==( + const VideoSubscription& other) const { + return default_resolution_ == other.default_resolution_ && + peers_resolution_ == other.peers_resolution_; +} + +absl::optional +PeerConnectionE2EQualityTestFixture::VideoSubscription::GetMaxResolution( + rtc::ArrayView video_configs) { + std::vector resolutions; + for (const auto& video_config : video_configs) { + resolutions.push_back(video_config.GetResolution()); + } + return GetMaxResolution(resolutions); +} + +absl::optional +PeerConnectionE2EQualityTestFixture::VideoSubscription::GetMaxResolution( + rtc::ArrayView resolutions) { + if (resolutions.empty()) { + return absl::nullopt; + } + + VideoResolution max_resolution; + for (const VideoResolution& resolution : resolutions) { + if (max_resolution.width() < resolution.width()) { + max_resolution.set_width(resolution.width()); + } + if (max_resolution.height() < resolution.height()) { + max_resolution.set_height(resolution.height()); + } + if (max_resolution.fps() < resolution.fps()) { + max_resolution.set_fps(resolution.fps()); + } + } + return max_resolution; +} + +std::string PeerConnectionE2EQualityTestFixture::VideoSubscription::ToString() + const { + rtc::StringBuilder out; + out << "{ default_resolution_=["; + if (default_resolution_.has_value()) { + out << default_resolution_->ToString(); + } else { + out << "undefined"; + } + out << "], {"; + for (const auto& [peer_name, resolution] : peers_resolution_) { + out << "[" << peer_name << ": " << resolution.ToString() << "], "; + } + out << "} }"; + return out.Release(); +} + +PeerConnectionE2EQualityTestFixture::VideoDumpOptions::VideoDumpOptions( + absl::string_view output_directory, + int sampling_modulo, + bool export_frame_ids, + std::function( + absl::string_view file_name_prefix, + const VideoResolution& resolution)> video_frame_writer_factory) + : output_directory_(output_directory), + sampling_modulo_(sampling_modulo), + export_frame_ids_(export_frame_ids), + video_frame_writer_factory_(video_frame_writer_factory) { + RTC_CHECK_GT(sampling_modulo, 0); +} + +PeerConnectionE2EQualityTestFixture::VideoDumpOptions::VideoDumpOptions( + absl::string_view output_directory, + bool export_frame_ids) + : VideoDumpOptions(output_directory, + kDefaultSamplingModulo, + export_frame_ids) {} + +std::unique_ptr PeerConnectionE2EQualityTestFixture:: + VideoDumpOptions::CreateInputDumpVideoFrameWriter( + absl::string_view stream_label, + const VideoResolution& resolution) const { + std::unique_ptr writer = video_frame_writer_factory_( + GetInputDumpFileName(stream_label, resolution), resolution); + absl::optional frame_ids_file = + GetInputFrameIdsDumpFileName(stream_label, resolution); + if (frame_ids_file.has_value()) { + writer = CreateVideoFrameWithIdsWriter(std::move(writer), *frame_ids_file); + } + return writer; +} + +std::unique_ptr PeerConnectionE2EQualityTestFixture:: + VideoDumpOptions::CreateOutputDumpVideoFrameWriter( + absl::string_view stream_label, + absl::string_view receiver, + const VideoResolution& resolution) const { + std::unique_ptr writer = video_frame_writer_factory_( + GetOutputDumpFileName(stream_label, receiver, resolution), resolution); + absl::optional frame_ids_file = + GetOutputFrameIdsDumpFileName(stream_label, receiver, resolution); + if (frame_ids_file.has_value()) { + writer = CreateVideoFrameWithIdsWriter(std::move(writer), *frame_ids_file); + } + return writer; +} + +std::unique_ptr PeerConnectionE2EQualityTestFixture:: + VideoDumpOptions::Y4mVideoFrameWriterFactory( + absl::string_view file_name_prefix, + const VideoResolution& resolution) { + return std::make_unique( + std::string(file_name_prefix) + ".y4m", resolution.width(), + resolution.height(), resolution.fps()); +} + +std::string +PeerConnectionE2EQualityTestFixture::VideoDumpOptions::GetInputDumpFileName( + absl::string_view stream_label, + const VideoResolution& resolution) const { + rtc::StringBuilder file_name; + file_name << stream_label; + AppendResolution(resolution, file_name); + return test::JoinFilename(output_directory_, file_name.Release()); +} + +absl::optional PeerConnectionE2EQualityTestFixture:: + VideoDumpOptions::GetInputFrameIdsDumpFileName( + absl::string_view stream_label, + const VideoResolution& resolution) const { + if (!export_frame_ids_) { + return absl::nullopt; + } + return GetInputDumpFileName(stream_label, resolution) + ".frame_ids.txt"; +} + +std::string +PeerConnectionE2EQualityTestFixture::VideoDumpOptions::GetOutputDumpFileName( + absl::string_view stream_label, + absl::string_view receiver, + const VideoResolution& resolution) const { + rtc::StringBuilder file_name; + file_name << stream_label << "_" << receiver; + AppendResolution(resolution, file_name); + return test::JoinFilename(output_directory_, file_name.Release()); +} + +absl::optional PeerConnectionE2EQualityTestFixture:: + VideoDumpOptions::GetOutputFrameIdsDumpFileName( + absl::string_view stream_label, + absl::string_view receiver, + const VideoResolution& resolution) const { + if (!export_frame_ids_) { + return absl::nullopt; + } + return GetOutputDumpFileName(stream_label, receiver, resolution) + + ".frame_ids.txt"; +} + +std::string PeerConnectionE2EQualityTestFixture::VideoDumpOptions::ToString() + const { + rtc::StringBuilder out; + out << "{ output_directory_=" << output_directory_ + << ", sampling_modulo_=" << sampling_modulo_ + << ", export_frame_ids_=" << export_frame_ids_ << " }"; + return out.Release(); +} + +PeerConnectionE2EQualityTestFixture::VideoConfig::VideoConfig( + const VideoResolution& resolution) + : width(resolution.width()), + height(resolution.height()), + fps(resolution.fps()) { + RTC_CHECK(resolution.IsRegular()); +} + +} // namespace webrtc_pc_e2e +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/test/peerconnection_quality_test_fixture.h b/TMessagesProj/jni/voip/webrtc/api/test/peerconnection_quality_test_fixture.h index 303671c12d..21b21b4dfc 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/peerconnection_quality_test_fixture.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/peerconnection_quality_test_fixture.h @@ -19,7 +19,9 @@ #include "absl/memory/memory.h" #include "absl/strings/string_view.h" #include "absl/types/optional.h" +#include "api/array_view.h" #include "api/async_resolver_factory.h" +#include "api/audio/audio_mixer.h" #include "api/call/call_factory_interface.h" #include "api/fec_controller.h" #include "api/function_view.h" @@ -34,6 +36,7 @@ #include "api/test/simulated_network.h" #include "api/test/stats_observer_interface.h" #include "api/test/track_id_stream_info_map.h" +#include "api/test/video/video_frame_writer.h" #include "api/test/video_quality_analyzer_interface.h" #include "api/transport/network_control.h" #include "api/units/time_delta.h" @@ -41,6 +44,7 @@ #include "api/video_codecs/video_encoder.h" #include "api/video_codecs/video_encoder_factory.h" #include "media/base/media_constants.h" +#include "modules/audio_processing/include/audio_processing.h" #include "rtc_base/network.h" #include "rtc_base/rtc_certificate_generator.h" #include "rtc_base/ssl_certificate.h" @@ -124,7 +128,12 @@ class PeerConnectionE2EQualityTestFixture { std::vector slides_yuv_file_names; }; - // Config for Vp8 simulcast or Vp9 SVC testing. + // Config for Vp8 simulcast or non-standard Vp9 SVC testing. + // + // To configure standard SVC setting, use `scalability_mode` in the + // `encoding_params` array. + // This configures Vp9 SVC by requesting simulcast layers, the request is + // internally converted to a request for SVC layers. // // SVC support is limited: // During SVC testing there is no SFU, so framework will try to emulate SFU @@ -141,43 +150,186 @@ class PeerConnectionE2EQualityTestFixture { : simulcast_streams_count(simulcast_streams_count) { RTC_CHECK_GT(simulcast_streams_count, 1); } - VideoSimulcastConfig(int simulcast_streams_count, int target_spatial_index) - : simulcast_streams_count(simulcast_streams_count), - target_spatial_index(target_spatial_index) { - RTC_CHECK_GT(simulcast_streams_count, 1); - RTC_CHECK_GE(target_spatial_index, 0); - RTC_CHECK_LT(target_spatial_index, simulcast_streams_count); - } // Specified amount of simulcast streams/SVC layers, depending on which // encoder is used. int simulcast_streams_count; - // Specifies spatial index of the video stream to analyze. + }; + + // Configuration for the emulated Selective Forward Unit (SFU) + // + // The framework can optionally filter out frames that are decoded + // using an emulated SFU. + // When using simulcast or SVC, it's not always desirable to receive + // all frames. In a real world call, a SFU will only forward a subset + // of the frames. + // The emulated SFU is not able to change its configuration dynamically, + // if adaptation happens during the call, layers may be dropped and the + // analyzer won't receive the required data which will cause wrong results or + // test failures. + struct EmulatedSFUConfig { + EmulatedSFUConfig() {} + explicit EmulatedSFUConfig(int target_layer_index) + : target_layer_index(target_layer_index) { + RTC_CHECK_GE(target_layer_index, 0); + } + + EmulatedSFUConfig(absl::optional target_layer_index, + absl::optional target_temporal_index) + : target_layer_index(target_layer_index), + target_temporal_index(target_temporal_index) { + RTC_CHECK_GE(target_temporal_index.value_or(0), 0); + if (target_temporal_index) + RTC_CHECK_GE(*target_temporal_index, 0); + } + + // Specifies simulcast or spatial index of the video stream to analyze. // There are 2 cases: - // 1. simulcast encoder is used: - // in such case `target_spatial_index` will specify the index of + // 1. simulcast encoding is used: + // in such case `target_layer_index` will specify the index of // simulcast stream, that should be analyzed. Other streams will be // dropped. - // 2. SVC encoder is used: - // in such case `target_spatial_index` will specify the top interesting + // 2. SVC encoding is used: + // in such case `target_layer_index` will specify the top interesting // spatial layer and all layers below, including target one will be // processed. All layers above target one will be dropped. - // If not specified than whatever stream will be received will be analyzed. - // It requires Selective Forwarding Unit (SFU) to be configured in the - // network. - absl::optional target_spatial_index; - - // Encoding parameters per simulcast layer. If not empty, `encoding_params` - // size have to be equal to `simulcast_streams_count`. Will be used to set - // transceiver send encoding params for simulcast layers. Applicable only - // for codecs that support simulcast (ex. Vp8) and will be ignored - // otherwise. RtpEncodingParameters::rid may be changed by fixture - // implementation to ensure signaling correctness. - std::vector encoding_params; + // If not specified then all streams will be received and analyzed. + // When set, it instructs the framework to create an emulated Selective + // Forwarding Unit (SFU) that will propagate only the requested layers. + absl::optional target_layer_index; + // Specifies the index of the maximum temporal unit to keep. + // If not specified then all temporal layers will be received and analyzed. + // When set, it instructs the framework to create an emulated Selective + // Forwarding Unit (SFU) that will propagate only up to the requested layer. + absl::optional target_temporal_index; + }; + + class VideoResolution { + public: + // Determines special resolutions, which can't be expressed in terms of + // width, height and fps. + enum class Spec { + // No extra spec set. It describes a regular resolution described by + // width, height and fps. + kNone, + // Describes resolution which contains max value among all sender's + // video streams in each dimension (width, height, fps). + kMaxFromSender + }; + + VideoResolution(size_t width, size_t height, int32_t fps); + explicit VideoResolution(Spec spec = Spec::kNone); + + bool operator==(const VideoResolution& other) const; + bool operator!=(const VideoResolution& other) const { + return !(*this == other); + } + + size_t width() const { return width_; } + void set_width(size_t width) { width_ = width; } + size_t height() const { return height_; } + void set_height(size_t height) { height_ = height; } + int32_t fps() const { return fps_; } + void set_fps(int32_t fps) { fps_ = fps; } + + // Returns if it is a regular resolution or not. The resolution is regular + // if it's spec is `Spec::kNone`. + bool IsRegular() const { return spec_ == Spec::kNone; } + + std::string ToString() const; + + private: + size_t width_ = 0; + size_t height_ = 0; + int32_t fps_ = 0; + Spec spec_ = Spec::kNone; + }; + + class VideoDumpOptions { + public: + static constexpr int kDefaultSamplingModulo = 1; + + // output_directory - the output directory where stream will be dumped. The + // output files' names will be constructed as + // __. for output dumps + // and _. for input dumps. + // By default is "y4m". Resolution is in the format + // x_. + // sampling_modulo - the module for the video frames to be dumped. Modulo + // equals X means every Xth frame will be written to the dump file. The + // value must be greater than 0. (Default: 1) + // export_frame_ids - specifies if frame ids should be exported together + // with content of the stream. If true, an output file with the same name as + // video dump and suffix ".frame_ids.txt" will be created. It will contain + // the frame ids in the same order as original frames in the output + // file with stream content. File will contain one frame id per line. + // (Default: false) + // `video_frame_writer_factory` - factory function to create a video frame + // writer for input and output video files. (Default: Y4M video writer + // factory). + explicit VideoDumpOptions( + absl::string_view output_directory, + int sampling_modulo = kDefaultSamplingModulo, + bool export_frame_ids = false, + std::function( + absl::string_view file_name_prefix, + const VideoResolution& resolution)> video_frame_writer_factory = + Y4mVideoFrameWriterFactory); + VideoDumpOptions(absl::string_view output_directory, bool export_frame_ids); + + VideoDumpOptions(const VideoDumpOptions&) = default; + VideoDumpOptions& operator=(const VideoDumpOptions&) = default; + VideoDumpOptions(VideoDumpOptions&&) = default; + VideoDumpOptions& operator=(VideoDumpOptions&&) = default; + + std::string output_directory() const { return output_directory_; } + int sampling_modulo() const { return sampling_modulo_; } + bool export_frame_ids() const { return export_frame_ids_; } + + std::unique_ptr CreateInputDumpVideoFrameWriter( + absl::string_view stream_label, + const VideoResolution& resolution) const; + + std::unique_ptr CreateOutputDumpVideoFrameWriter( + absl::string_view stream_label, + absl::string_view receiver, + const VideoResolution& resolution) const; + + std::string ToString() const; + + private: + static std::unique_ptr Y4mVideoFrameWriterFactory( + absl::string_view file_name_prefix, + const VideoResolution& resolution); + std::string GetInputDumpFileName(absl::string_view stream_label, + const VideoResolution& resolution) const; + // Returns file name for input frame ids dump if `export_frame_ids()` is + // true, absl::nullopt otherwise. + absl::optional GetInputFrameIdsDumpFileName( + absl::string_view stream_label, + const VideoResolution& resolution) const; + std::string GetOutputDumpFileName(absl::string_view stream_label, + absl::string_view receiver, + const VideoResolution& resolution) const; + // Returns file name for output frame ids dump if `export_frame_ids()` is + // true, absl::nullopt otherwise. + absl::optional GetOutputFrameIdsDumpFileName( + absl::string_view stream_label, + absl::string_view receiver, + const VideoResolution& resolution) const; + + std::string output_directory_; + int sampling_modulo_ = 1; + bool export_frame_ids_ = false; + std::function( + absl::string_view file_name_prefix, + const VideoResolution& resolution)> + video_frame_writer_factory_; }; // Contains properties of single video stream. struct VideoConfig { + explicit VideoConfig(const VideoResolution& resolution); VideoConfig(size_t width, size_t height, int32_t fps) : width(width), height(height), fps(fps) {} VideoConfig(std::string stream_label, @@ -190,10 +342,14 @@ class PeerConnectionE2EQualityTestFixture { stream_label(std::move(stream_label)) {} // Video stream width. - const size_t width; + size_t width; // Video stream height. - const size_t height; - const int32_t fps; + size_t height; + int32_t fps; + VideoResolution GetResolution() const { + return VideoResolution(width, height, fps); + } + // Have to be unique among all specified configs for all peers in the call. // Will be auto generated if omitted. absl::optional stream_label; @@ -209,67 +365,42 @@ class PeerConnectionE2EQualityTestFixture { // but only on non-lossy networks. See more in documentation to // VideoSimulcastConfig. absl::optional simulcast_config; + // Configuration for the emulated Selective Forward Unit (SFU). + absl::optional emulated_sfu_config; + // Encoding parameters for both singlecast and per simulcast layer. + // If singlecast is used, if not empty, a single value can be provided. + // If simulcast is used, if not empty, `encoding_params` size have to be + // equal to `simulcast_config.simulcast_streams_count`. Will be used to set + // transceiver send encoding params for each layer. + // RtpEncodingParameters::rid may be changed by fixture implementation to + // ensure signaling correctness. + std::vector encoding_params; // Count of temporal layers for video stream. This value will be set into // each RtpEncodingParameters of RtpParameters of corresponding // RtpSenderInterface for this video stream. absl::optional temporal_layers_count; - // Sets the maximum encode bitrate in bps. If this value is not set, the - // encoder will be capped at an internal maximum value around 2 Mbps - // depending on the resolution. This means that it will never be able to - // utilize a high bandwidth link. - absl::optional max_encode_bitrate_bps; - // Sets the minimum encode bitrate in bps. If this value is not set, the - // encoder will use an internal minimum value. Please note that if this - // value is set higher than the bandwidth of the link, the encoder will - // generate more data than the link can handle regardless of the bandwidth - // estimation. - absl::optional min_encode_bitrate_bps; - // If specified the input stream will be also copied to specified file. - // It is actually one of the test's output file, which contains copy of what - // was captured during the test for this video stream on sender side. - // It is useful when generator is used as input. - absl::optional input_dump_file_name; - // Used only if `input_dump_file_name` is set. Specifies the module for the - // video frames to be dumped. Modulo equals X means every Xth frame will be - // written to the dump file. The value must be greater than 0. - int input_dump_sampling_modulo = 1; - // If specified this file will be used as output on the receiver side for - // this stream. - // - // If multiple output streams will be produced by this stream (e.g. when the - // stream represented by this `VideoConfig` is received by more than one - // peer), output files will be appended with receiver names. If the second - // and other receivers will be added in the middle of the call after the - // first frame for this stream has been already written to the output file, - // then only dumps for newly added peers will be appended with receiver - // name, the dump for the first receiver will have name equal to the - // specified one. For example: - // * If we have peers A and B and A has `VideoConfig` V_a with - // V_a.output_dump_file_name = "/foo/a_output.yuv", then the stream - // related to V_a will be written into "/foo/a_output.yuv". - // * If we have peers A, B and C and A has `VideoConfig` V_a with - // V_a.output_dump_file_name = "/foo/a_output.yuv", then the stream - // related to V_a will be written for peer B into "/foo/a_output.yuv.B" - // and for peer C into "/foo/a_output.yuv.C" - // * If we have peers A and B and A has `VideoConfig` V_a with - // V_a.output_dump_file_name = "/foo/a_output.yuv", then if after B - // received the first frame related to V_a peer C joined the call, then - // the stream related to V_a will be written for peer B into - // "/foo/a_output.yuv" and for peer C into "/foo/a_output.yuv.C" - // - // The produced files contains what was rendered for this video stream on - // receiver side. - absl::optional output_dump_file_name; - // Used only if `output_dump_file_name` is set. Specifies the module for the - // video frames to be dumped. Modulo equals X means every Xth frame will be - // written to the dump file. The value must be greater than 0. - int output_dump_sampling_modulo = 1; + // If specified defines how input should be dumped. It is actually one of + // the test's output file, which contains copy of what was captured during + // the test for this video stream on sender side. It is useful when + // generator is used as input. + absl::optional input_dump_options; + // If specified defines how output should be dumped on the receiver side for + // this stream. The produced files contain what was rendered for this video + // stream on receiver side per each receiver. + absl::optional output_dump_options; + // If set to true uses fixed frame rate while dumping output video to the + // file. Requested `VideoSubscription::fps()` will be used as frame rate. + bool output_dump_use_fixed_framerate = false; // If true will display input and output video on the user's screen. bool show_on_screen = false; // If specified, determines a sync group to which this video stream belongs. // According to bugs.webrtc.org/4762 WebRTC supports synchronization only // for pair of single audio and single video stream. absl::optional sync_group; + // If specified, it will be set into RtpParameters of corresponding + // RtpSenderInterface for this video stream. + // Note that this setting takes precedence over `content_hint`. + absl::optional degradation_preference; }; // Contains properties for audio in the call. @@ -325,6 +456,75 @@ class PeerConnectionE2EQualityTestFixture { std::map required_params; }; + // Subscription to the remote video streams. It declares which remote stream + // peer should receive and in which resolution (width x height x fps). + class VideoSubscription { + public: + // Returns the resolution constructed as maximum from all resolution + // dimensions: width, height and fps. + static absl::optional GetMaxResolution( + rtc::ArrayView video_configs); + static absl::optional GetMaxResolution( + rtc::ArrayView resolutions); + + bool operator==(const VideoSubscription& other) const; + bool operator!=(const VideoSubscription& other) const { + return !(*this == other); + } + + // Subscribes receiver to all streams sent by the specified peer with + // specified resolution. It will override any resolution that was used in + // `SubscribeToAll` independently from methods call order. + VideoSubscription& SubscribeToPeer( + absl::string_view peer_name, + VideoResolution resolution = + VideoResolution(VideoResolution::Spec::kMaxFromSender)) { + peers_resolution_[std::string(peer_name)] = resolution; + return *this; + } + + // Subscribes receiver to the all sent streams with specified resolution. + // If any stream was subscribed to with `SubscribeTo` method that will + // override resolution passed to this function independently from methods + // call order. + VideoSubscription& SubscribeToAllPeers( + VideoResolution resolution = + VideoResolution(VideoResolution::Spec::kMaxFromSender)) { + default_resolution_ = resolution; + return *this; + } + + // Returns resolution for specific sender. If no specific resolution was + // set for this sender, then will return resolution used for all streams. + // If subscription doesn't subscribe to all streams, `absl::nullopt` will be + // returned. + absl::optional GetResolutionForPeer( + absl::string_view peer_name) const { + auto it = peers_resolution_.find(std::string(peer_name)); + if (it == peers_resolution_.end()) { + return default_resolution_; + } + return it->second; + } + + // Returns a maybe empty list of senders for which peer explicitly + // subscribed to with specific resolution. + std::vector GetSubscribedPeers() const { + std::vector subscribed_streams; + subscribed_streams.reserve(peers_resolution_.size()); + for (const auto& entry : peers_resolution_) { + subscribed_streams.push_back(entry.first); + } + return subscribed_streams; + } + + std::string ToString() const; + + private: + absl::optional default_resolution_ = absl::nullopt; + std::map peers_resolution_; + }; + // This class is used to fully configure one peer inside the call. class PeerConfigurer { public: @@ -357,6 +557,14 @@ class PeerConnectionE2EQualityTestFixture { // Set a custom NetEqFactory to be used in the call. virtual PeerConfigurer* SetNetEqFactory( std::unique_ptr neteq_factory) = 0; + virtual PeerConfigurer* SetAudioProcessing( + rtc::scoped_refptr audio_processing) = 0; + virtual PeerConfigurer* SetAudioMixer( + rtc::scoped_refptr audio_mixer) = 0; + + // Forces the Peerconnection to use the network thread as the worker thread. + // Ie, worker thread and the network thread is the same thread. + virtual PeerConfigurer* SetUseNetworkThreadAsWorkerThread() = 0; // The parameters of the following 4 methods will be passed to the // PeerConnectionInterface implementation that will be created for this @@ -371,6 +579,11 @@ class PeerConnectionE2EQualityTestFixture { std::unique_ptr tls_cert_verifier) = 0; virtual PeerConfigurer* SetIceTransportFactory( std::unique_ptr factory) = 0; + // Flags to set on `cricket::PortAllocator`. These flags will be added + // to the default ones that are presented on the port allocator. + // For possible values check p2p/base/port_allocator.h. + virtual PeerConfigurer* SetPortAllocatorExtraFlags( + uint32_t extra_flags) = 0; // Add new video stream to the call that will be sent from this peer. // Default implementation of video frames generator will be used. @@ -385,6 +598,11 @@ class PeerConnectionE2EQualityTestFixture { virtual PeerConfigurer* AddVideoConfig( VideoConfig config, CapturingDeviceIndex capturing_device_index) = 0; + // Sets video subscription for the peer. By default subscription will + // include all streams with `VideoSubscription::kSameAsSendStream` + // resolution. To override this behavior use this method. + virtual PeerConfigurer* SetVideoSubscription( + VideoSubscription subscription) = 0; // Set the list of video codecs used by the peer during the test. These // codecs will be negotiated in SDP during offer/answer exchange. The order // of these codecs during negotiation will be the same as in `video_codecs`. @@ -396,6 +614,22 @@ class PeerConnectionE2EQualityTestFixture { // Set the audio stream for the call from this peer. If this method won't // be invoked, this peer will send no audio. virtual PeerConfigurer* SetAudioConfig(AudioConfig config) = 0; + + // Set if ULP FEC should be used or not. False by default. + virtual PeerConfigurer* SetUseUlpFEC(bool value) = 0; + // Set if Flex FEC should be used or not. False by default. + // Client also must enable `enable_flex_fec_support` in the `RunParams` to + // be able to use this feature. + virtual PeerConfigurer* SetUseFlexFEC(bool value) = 0; + // Specifies how much video encoder target bitrate should be different than + // target bitrate, provided by WebRTC stack. Must be greater than 0. Can be + // used to emulate overshooting of video encoders. This multiplier will + // be applied for all video encoder on both sides for all layers. Bitrate + // estimated by WebRTC stack will be multiplied by this multiplier and then + // provided into VideoEncoder::SetRates(...). 1.0 by default. + virtual PeerConfigurer* SetVideoEncoderBitrateMultiplier( + double multiplier) = 0; + // If is set, an RTCEventLog will be saved in that location and it will be // available for further analysis. virtual PeerConfigurer* SetRtcEventLogPath(std::string path) = 0; @@ -404,6 +638,8 @@ class PeerConnectionE2EQualityTestFixture { virtual PeerConfigurer* SetAecDumpPath(std::string path) = 0; virtual PeerConfigurer* SetRTCConfiguration( PeerConnectionInterface::RTCConfiguration configuration) = 0; + virtual PeerConfigurer* SetRTCOfferAnswerOptions( + PeerConnectionInterface::RTCOfferAnswerOptions options) = 0; // Set bitrate parameters on PeerConnection. This constraints will be // applied to all summed RTP streams for this peer. virtual PeerConfigurer* SetBitrateSettings( @@ -427,15 +663,9 @@ class PeerConnectionE2EQualityTestFixture { // it will be shut downed. TimeDelta run_duration; - bool use_ulp_fec = false; - bool use_flex_fec = false; - // Specifies how much video encoder target bitrate should be different than - // target bitrate, provided by WebRTC stack. Must be greater then 0. Can be - // used to emulate overshooting of video encoders. This multiplier will - // be applied for all video encoder on both sides for all layers. Bitrate - // estimated by WebRTC stack will be multiplied on this multiplier and then - // provided into VideoEncoder::SetRates(...). - double video_encoder_bitrate_multiplier = 1.0; + // If set to true peers will be able to use Flex FEC, otherwise they won't + // be able to negotiate it even if it's enabled on per peer level. + bool enable_flex_fec_support = false; // If true will set conference mode in SDP media section for all video // tracks for all peers. bool use_conference_mode = false; diff --git a/TMessagesProj/jni/voip/webrtc/api/test/simulated_network.h b/TMessagesProj/jni/voip/webrtc/api/test/simulated_network.h index fcac51f4ea..fbf5c5ca29 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/simulated_network.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/simulated_network.h @@ -62,8 +62,6 @@ struct BuiltInNetworkBehaviorConfig { int avg_burst_loss_length = -1; // Additional bytes to add to packet size. int packet_overhead = 0; - // Enable CoDel active queue management. - bool codel_active_queue_management = false; }; class NetworkBehaviorInterface { diff --git a/TMessagesProj/jni/voip/webrtc/api/test/time_controller.h b/TMessagesProj/jni/voip/webrtc/api/test/time_controller.h index 17aa0db80f..121f65cea9 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/time_controller.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/time_controller.h @@ -17,7 +17,6 @@ #include "api/task_queue/task_queue_factory.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" -#include "modules/utility/include/process_thread.h" #include "rtc_base/synchronization/yield_policy.h" #include "rtc_base/thread.h" #include "system_wrappers/include/clock.h" @@ -41,9 +40,6 @@ class TimeController { // is destroyed. std::unique_ptr CreateTaskQueueFactory(); - // Creates a process thread. - virtual std::unique_ptr CreateProcessThread( - const char* thread_name) = 0; // Creates an rtc::Thread instance. If `socket_server` is nullptr, a default // noop socket server is created. // Returned thread is not null and started. diff --git a/TMessagesProj/jni/voip/webrtc/api/test/video/function_video_decoder_factory.h b/TMessagesProj/jni/voip/webrtc/api/test/video/function_video_decoder_factory.h index 86abdd0746..2145c71bff 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/video/function_video_decoder_factory.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/video/function_video_decoder_factory.h @@ -17,6 +17,7 @@ #include #include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_decoder.h" #include "api/video_codecs/video_decoder_factory.h" #include "rtc_base/checks.h" diff --git a/TMessagesProj/jni/voip/webrtc/api/test/video/function_video_encoder_factory.h b/TMessagesProj/jni/voip/webrtc/api/test/video/function_video_encoder_factory.h index 9ae9719916..98ece2bc94 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/video/function_video_encoder_factory.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/video/function_video_encoder_factory.h @@ -17,6 +17,7 @@ #include #include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_encoder.h" #include "api/video_codecs/video_encoder_factory.h" #include "rtc_base/checks.h" diff --git a/TMessagesProj/jni/voip/webrtc/api/test/video/video_frame_writer.h b/TMessagesProj/jni/voip/webrtc/api/test/video/video_frame_writer.h new file mode 100644 index 0000000000..ac72534890 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/video/video_frame_writer.h @@ -0,0 +1,37 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_VIDEO_VIDEO_FRAME_WRITER_H_ +#define API_TEST_VIDEO_VIDEO_FRAME_WRITER_H_ + +#include "api/video/video_frame.h" + +namespace webrtc { +namespace test { + +class VideoFrameWriter { + public: + virtual ~VideoFrameWriter() = default; + + // Writes `VideoFrame` and returns true if operation was successful, false + // otherwise. + // + // Calling `WriteFrame` after `Close` is not allowed. + virtual bool WriteFrame(const VideoFrame& frame) = 0; + + // Closes writer and cleans up all resources. No invocations to `WriteFrame` + // are allowed after `Close` was invoked. + virtual void Close() = 0; +}; + +} // namespace test +} // namespace webrtc + +#endif // API_TEST_VIDEO_VIDEO_FRAME_WRITER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/test/video_quality_analyzer_interface.h b/TMessagesProj/jni/voip/webrtc/api/test/video_quality_analyzer_interface.h index d27c9ea015..dc58b04967 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/video_quality_analyzer_interface.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/video_quality_analyzer_interface.h @@ -101,7 +101,8 @@ class VideoQualityAnalyzerInterface virtual void OnFrameEncoded(absl::string_view peer_name, uint16_t frame_id, const EncodedImage& encoded_image, - const EncoderStats& stats) {} + const EncoderStats& stats, + bool discarded) {} // Will be called for each frame dropped by encoder. // `peer_name` is name of the peer on which side frame drop was detected. virtual void OnFrameDropped(absl::string_view peer_name, @@ -133,7 +134,8 @@ class VideoQualityAnalyzerInterface // `peer_name` is name of the peer on which side error acquired. virtual void OnDecoderError(absl::string_view peer_name, uint16_t frame_id, - int32_t error_code) {} + int32_t error_code, + const DecoderStats& stats) {} // Will be called every time new stats reports are available for the // Peer Connection identified by `pc_label`. void OnStatsReports( @@ -142,6 +144,9 @@ class VideoQualityAnalyzerInterface // Will be called before test adds new participant in the middle of a call. virtual void RegisterParticipantInCall(absl::string_view peer_name) {} + // Will be called after test removed existing participant in the middle of the + // call. + virtual void UnregisterParticipantInCall(absl::string_view peer_name) {} // Tells analyzer that analysis complete and it should calculate final // statistics. @@ -153,12 +158,6 @@ class VideoQualityAnalyzerInterface virtual std::string GetStreamLabel(uint16_t frame_id) = 0; }; -namespace webrtc_pc_e2e { - -// Temporary alias to make downstream projects able to migrate. -using VideoQualityAnalyzerInterface = ::webrtc::VideoQualityAnalyzerInterface; - -} // namespace webrtc_pc_e2e } // namespace webrtc #endif // API_TEST_VIDEO_QUALITY_ANALYZER_INTERFACE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/test/video_quality_test_fixture.h b/TMessagesProj/jni/voip/webrtc/api/test/video_quality_test_fixture.h index 08ae12b816..0aa23b17bf 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/video_quality_test_fixture.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/video_quality_test_fixture.h @@ -24,8 +24,8 @@ #include "api/transport/network_control.h" #include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_decoder_factory.h" -#include "api/video_codecs/video_encoder_config.h" #include "api/video_codecs/video_encoder_factory.h" +#include "video/config/video_encoder_config.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/api/test/videocodec_test_stats.cc b/TMessagesProj/jni/voip/webrtc/api/test/videocodec_test_stats.cc index 0cf00da85b..f082b1e935 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/videocodec_test_stats.cc +++ b/TMessagesProj/jni/voip/webrtc/api/test/videocodec_test_stats.cc @@ -86,6 +86,10 @@ std::map VideoCodecTestStats::VideoStatistics::ToMap() map["framerate_fps"] = std::to_string(framerate_fps); map["enc_speed_fps"] = std::to_string(enc_speed_fps); map["dec_speed_fps"] = std::to_string(dec_speed_fps); + map["avg_encode_latency_sec"] = std::to_string(avg_encode_latency_sec); + map["max_encode_latency_sec"] = std::to_string(max_encode_latency_sec); + map["avg_decode_latency_sec"] = std::to_string(avg_decode_latency_sec); + map["max_decode_latency_sec"] = std::to_string(max_decode_latency_sec); map["avg_delay_sec"] = std::to_string(avg_delay_sec); map["max_key_frame_delay_sec"] = std::to_string(max_key_frame_delay_sec); map["max_delta_frame_delay_sec"] = std::to_string(max_delta_frame_delay_sec); diff --git a/TMessagesProj/jni/voip/webrtc/api/test/videocodec_test_stats.h b/TMessagesProj/jni/voip/webrtc/api/test/videocodec_test_stats.h index 3f862338ee..a05985a665 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/videocodec_test_stats.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/videocodec_test_stats.h @@ -101,6 +101,11 @@ class VideoCodecTestStats { float enc_speed_fps = 0.0f; float dec_speed_fps = 0.0f; + float avg_encode_latency_sec = 0.0f; + float max_encode_latency_sec = 0.0f; + float avg_decode_latency_sec = 0.0f; + float max_decode_latency_sec = 0.0f; + float avg_delay_sec = 0.0f; float max_key_frame_delay_sec = 0.0f; float max_delta_frame_delay_sec = 0.0f; diff --git a/TMessagesProj/jni/voip/webrtc/api/transport/field_trial_based_config.cc b/TMessagesProj/jni/voip/webrtc/api/transport/field_trial_based_config.cc index 4a3a179240..0cef30f054 100644 --- a/TMessagesProj/jni/voip/webrtc/api/transport/field_trial_based_config.cc +++ b/TMessagesProj/jni/voip/webrtc/api/transport/field_trial_based_config.cc @@ -12,7 +12,7 @@ #include "system_wrappers/include/field_trial.h" namespace webrtc { -std::string FieldTrialBasedConfig::Lookup(absl::string_view key) const { +std::string FieldTrialBasedConfig::GetValue(absl::string_view key) const { return webrtc::field_trial::FindFullName(std::string(key)); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/transport/field_trial_based_config.h b/TMessagesProj/jni/voip/webrtc/api/transport/field_trial_based_config.h index 0754570fde..d47140e579 100644 --- a/TMessagesProj/jni/voip/webrtc/api/transport/field_trial_based_config.h +++ b/TMessagesProj/jni/voip/webrtc/api/transport/field_trial_based_config.h @@ -13,13 +13,13 @@ #include #include "absl/strings/string_view.h" -#include "api/transport/webrtc_key_value_config.h" +#include "api/field_trials_registry.h" namespace webrtc { // Implementation using the field trial API fo the key value lookup. -class FieldTrialBasedConfig : public WebRtcKeyValueConfig { - public: - std::string Lookup(absl::string_view key) const override; +class FieldTrialBasedConfig : public FieldTrialsRegistry { + private: + std::string GetValue(absl::string_view key) const override; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/transport/network_control.h b/TMessagesProj/jni/voip/webrtc/api/transport/network_control.h index c2b005e713..862322443d 100644 --- a/TMessagesProj/jni/voip/webrtc/api/transport/network_control.h +++ b/TMessagesProj/jni/voip/webrtc/api/transport/network_control.h @@ -15,9 +15,9 @@ #include #include "absl/base/attributes.h" +#include "api/field_trials_view.h" #include "api/rtc_event_log/rtc_event_log.h" #include "api/transport/network_types.h" -#include "api/transport/webrtc_key_value_config.h" namespace webrtc { @@ -46,7 +46,7 @@ struct NetworkControllerConfig { // Optional override of configuration of WebRTC internals. Using nullptr here // indicates that the field trial API will be used. - const WebRtcKeyValueConfig* key_value_config = nullptr; + const FieldTrialsView* key_value_config = nullptr; // Optional override of event log. RtcEventLog* event_log = nullptr; }; @@ -132,7 +132,7 @@ class NetworkStateEstimator { class NetworkStateEstimatorFactory { public: virtual std::unique_ptr Create( - const WebRtcKeyValueConfig* key_value_config) = 0; + const FieldTrialsView* key_value_config) = 0; virtual ~NetworkStateEstimatorFactory() = default; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/transport/network_types.cc b/TMessagesProj/jni/voip/webrtc/api/transport/network_types.cc index 7451940151..d6495ce490 100644 --- a/TMessagesProj/jni/voip/webrtc/api/transport/network_types.cc +++ b/TMessagesProj/jni/voip/webrtc/api/transport/network_types.cc @@ -103,8 +103,4 @@ bool PacedPacketInfo::operator==(const PacedPacketInfo& rhs) const { probe_cluster_min_bytes == rhs.probe_cluster_min_bytes; } -ProcessInterval::ProcessInterval() = default; -ProcessInterval::ProcessInterval(const ProcessInterval&) = default; -ProcessInterval::~ProcessInterval() = default; - } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/transport/network_types.h b/TMessagesProj/jni/voip/webrtc/api/transport/network_types.h index 4e96b0f12e..29a7cf7705 100644 --- a/TMessagesProj/jni/voip/webrtc/api/transport/network_types.h +++ b/TMessagesProj/jni/voip/webrtc/api/transport/network_types.h @@ -241,9 +241,6 @@ struct NetworkControlUpdate { // Process control struct ProcessInterval { - ProcessInterval(); - ProcessInterval(const ProcessInterval&); - ~ProcessInterval(); Timestamp at_time = Timestamp::PlusInfinity(); absl::optional pacer_queue; }; diff --git a/TMessagesProj/jni/voip/webrtc/api/transport/rtp/rtp_source.h b/TMessagesProj/jni/voip/webrtc/api/transport/rtp/rtp_source.h index 8c543cac0c..e51dcd70b6 100644 --- a/TMessagesProj/jni/voip/webrtc/api/transport/rtp/rtp_source.h +++ b/TMessagesProj/jni/voip/webrtc/api/transport/rtp/rtp_source.h @@ -15,6 +15,7 @@ #include "absl/types/optional.h" #include "api/rtp_headers.h" +#include "api/units/time_delta.h" #include "rtc_base/checks.h" namespace webrtc { @@ -28,24 +29,21 @@ class RtpSource { public: struct Extensions { absl::optional audio_level; + + // Fields from the Absolute Capture Time header extension: + // http://www.webrtc.org/experiments/rtp-hdrext/abs-capture-time absl::optional absolute_capture_time; + + // Clock offset between the local clock and the capturer's clock. + // Do not confuse with `AbsoluteCaptureTime::estimated_capture_clock_offset` + // which instead represents the clock offset between a remote sender and the + // capturer. The following holds: + // Capture's NTP Clock = Local NTP Clock + Local-Capture Clock Offset + absl::optional local_capture_clock_offset; }; RtpSource() = delete; - // TODO(bugs.webrtc.org/10739): Remove this constructor once all clients - // migrate to the version with absolute capture time. - RtpSource(int64_t timestamp_ms, - uint32_t source_id, - RtpSourceType source_type, - absl::optional audio_level, - uint32_t rtp_timestamp) - : RtpSource(timestamp_ms, - source_id, - source_type, - rtp_timestamp, - {audio_level, absl::nullopt}) {} - RtpSource(int64_t timestamp_ms, uint32_t source_id, RtpSourceType source_type, @@ -87,6 +85,10 @@ class RtpSource { return extensions_.absolute_capture_time; } + absl::optional local_capture_clock_offset() const { + return extensions_.local_capture_clock_offset; + } + bool operator==(const RtpSource& o) const { return timestamp_ms_ == o.timestamp_ms() && source_id_ == o.source_id() && source_type_ == o.source_type() && diff --git a/TMessagesProj/jni/voip/webrtc/api/transport/stun.cc b/TMessagesProj/jni/voip/webrtc/api/transport/stun.cc index 87da0058d3..1098c6720e 100644 --- a/TMessagesProj/jni/voip/webrtc/api/transport/stun.cc +++ b/TMessagesProj/jni/voip/webrtc/api/transport/stun.cc @@ -11,6 +11,7 @@ #include "api/transport/stun.h" #include + #include #include #include @@ -20,8 +21,10 @@ #include "rtc_base/byte_order.h" #include "rtc_base/checks.h" #include "rtc_base/crc32.h" +#include "rtc_base/helpers.h" #include "rtc_base/logging.h" #include "rtc_base/message_digest.h" +#include "system_wrappers/include/metrics.h" using rtc::ByteBufferReader; using rtc::ByteBufferWriter; @@ -34,11 +37,11 @@ const int k127Utf8CharactersLengthInBytes = 508; const int kMessageIntegrityAttributeLength = 20; const int kTheoreticalMaximumAttributeLength = 65535; -uint32_t ReduceTransactionId(const std::string& transaction_id) { +uint32_t ReduceTransactionId(absl::string_view transaction_id) { RTC_DCHECK(transaction_id.length() == cricket::kStunTransactionIdLength || - transaction_id.length() == - cricket::kStunLegacyTransactionIdLength); - ByteBufferReader reader(transaction_id.c_str(), transaction_id.length()); + transaction_id.length() == cricket::kStunLegacyTransactionIdLength) + << transaction_id.length(); + ByteBufferReader reader(transaction_id.data(), transaction_id.size()); uint32_t result = 0; uint32_t next; while (reader.ReadUInt32(&next)) { @@ -102,10 +105,15 @@ const int SERVER_NOT_REACHABLE_ERROR = 701; // StunMessage StunMessage::StunMessage() - : type_(0), - length_(0), - transaction_id_(EMPTY_TRANSACTION_ID), - stun_magic_cookie_(kStunMagicCookie) { + : StunMessage(STUN_INVALID_MESSAGE_TYPE, EMPTY_TRANSACTION_ID) {} + +StunMessage::StunMessage(uint16_t type) + : StunMessage(type, GenerateTransactionId()) {} + +StunMessage::StunMessage(uint16_t type, absl::string_view transaction_id) + : type_(type), + transaction_id_(transaction_id), + reduced_transaction_id_(ReduceTransactionId(transaction_id_)) { RTC_DCHECK(IsValidTransactionId(transaction_id_)); } @@ -118,15 +126,6 @@ bool StunMessage::IsLegacy() const { return false; } -bool StunMessage::SetTransactionID(const std::string& str) { - if (!IsValidTransactionId(str)) { - return false; - } - transaction_id_ = str; - reduced_transaction_id_ = ReduceTransactionId(transaction_id_); - return true; -} - static bool DesignatedExpertRange(int attr_type) { return (attr_type >= 0x4000 && attr_type <= 0x7FFF) || (attr_type >= 0xC000 && attr_type <= 0xFFFF); @@ -240,6 +239,8 @@ const StunUInt16ListAttribute* StunMessage::GetUnknownAttributes() const { StunMessage::IntegrityStatus StunMessage::ValidateMessageIntegrity( const std::string& password) { + RTC_DCHECK(integrity_ == IntegrityStatus::kNotSet) + << "Usage error: Verification should only be done once"; password_ = password; if (GetByteString(STUN_ATTR_MESSAGE_INTEGRITY)) { if (ValidateMessageIntegrityOfType( @@ -260,9 +261,101 @@ StunMessage::IntegrityStatus StunMessage::ValidateMessageIntegrity( } else { integrity_ = IntegrityStatus::kNoIntegrity; } + // Log the result of integrity checking. See crbug.com/1177125 for background. + // Convert args to integer for the benefit of the macros. + int bucket_count = static_cast(IntegrityStatus::kMaxValue) + 1; + int integrity = static_cast(integrity_); + if (IsStunRequestType(type_)) { + RTC_HISTOGRAM_ENUMERATION("WebRTC.Stun.Integrity.Request", integrity, + bucket_count); + } else if (IsStunSuccessResponseType(type_)) { + RTC_HISTOGRAM_ENUMERATION("WebRTC.Stun.Integrity.Response", integrity, + bucket_count); + } else if (IsStunIndicationType(type_)) { + RTC_HISTOGRAM_ENUMERATION("WebRTC.Stun.Integrity.Indication", integrity, + bucket_count); + } else { + RTC_DCHECK(IsStunErrorResponseType(type_)); + auto* error_attribute = GetErrorCode(); + if (!error_attribute) { + RTC_HISTOGRAM_ENUMERATION( + "WebRTC.Stun.Integrity.ErrorResponse.NoErrorAttribute", integrity, + bucket_count); + } else { + switch (error_attribute->code()) { + case STUN_ERROR_TRY_ALTERNATE: + RTC_HISTOGRAM_ENUMERATION( + "WebRTC.Stun.Integrity.ErrorResponse.TryAlternate", integrity, + bucket_count); + break; + case STUN_ERROR_BAD_REQUEST: + RTC_HISTOGRAM_ENUMERATION( + "WebRTC.Stun.Integrity.ErrorResponse.BadRequest", integrity, + bucket_count); + break; + case STUN_ERROR_UNAUTHORIZED: + RTC_HISTOGRAM_ENUMERATION( + "WebRTC.Stun.Integrity.ErrorResponse.Unauthorized", integrity, + bucket_count); + break; + case STUN_ERROR_UNKNOWN_ATTRIBUTE: + RTC_HISTOGRAM_ENUMERATION( + "WebRTC.Stun.Integrity.ErrorResponse.UnknownAttribute", integrity, + bucket_count); + break; + case STUN_ERROR_STALE_NONCE: + RTC_HISTOGRAM_ENUMERATION( + "WebRTC.Stun.Integrity.ErrorResponse.StaleNonce", integrity, + bucket_count); + break; + case STUN_ERROR_SERVER_ERROR: + RTC_HISTOGRAM_ENUMERATION( + "WebRTC.Stun.Integrity.ErrorResponse.ServerError", integrity, + bucket_count); + break; + case STUN_ERROR_GLOBAL_FAILURE: + RTC_HISTOGRAM_ENUMERATION( + "WebRTC.Stun.Integrity.ErrorResponse.GlobalFailure", integrity, + bucket_count); + break; + default: + RTC_HISTOGRAM_ENUMERATION( + "WebRTC.Stun.Integrity.ErrorResponse.ErrorOther", integrity, + bucket_count); + break; + } + } + } return integrity_; } +StunMessage::IntegrityStatus StunMessage::RevalidateMessageIntegrity( + const std::string& password) { + RTC_LOG(LS_INFO) << "Message revalidation, old status was " + << static_cast(integrity_); + integrity_ = IntegrityStatus::kNotSet; + return ValidateMessageIntegrity(password); +} + +bool StunMessage::ValidateMessageIntegrityForTesting( + const char* data, + size_t size, + const std::string& password) { + return ValidateMessageIntegrityOfType(STUN_ATTR_MESSAGE_INTEGRITY, + kStunMessageIntegritySize, data, size, + password); +} + +bool StunMessage::ValidateMessageIntegrity32ForTesting( + const char* data, + size_t size, + const std::string& password) { + return ValidateMessageIntegrityOfType(STUN_ATTR_GOOG_MESSAGE_INTEGRITY_32, + kStunMessageIntegrity32Size, data, size, + password); +} + +// Deprecated bool StunMessage::ValidateMessageIntegrity(const char* data, size_t size, const std::string& password) { @@ -271,6 +364,7 @@ bool StunMessage::ValidateMessageIntegrity(const char* data, password); } +// Deprecated bool StunMessage::ValidateMessageIntegrity32(const char* data, size_t size, const std::string& password) { @@ -364,22 +458,19 @@ bool StunMessage::ValidateMessageIntegrityOfType(int mi_attr_type, mi_attr_size) == 0; } -bool StunMessage::AddMessageIntegrity(const std::string& password) { +bool StunMessage::AddMessageIntegrity(absl::string_view password) { return AddMessageIntegrityOfType(STUN_ATTR_MESSAGE_INTEGRITY, - kStunMessageIntegritySize, password.c_str(), - password.size()); + kStunMessageIntegritySize, password); } bool StunMessage::AddMessageIntegrity32(absl::string_view password) { return AddMessageIntegrityOfType(STUN_ATTR_GOOG_MESSAGE_INTEGRITY_32, - kStunMessageIntegrity32Size, password.data(), - password.length()); + kStunMessageIntegrity32Size, password); } bool StunMessage::AddMessageIntegrityOfType(int attr_type, size_t attr_size, - const char* key, - size_t keylen) { + absl::string_view key) { // Add the attribute with a dummy value. Since this is a known attribute, it // can't fail. RTC_DCHECK(attr_size <= kStunMessageIntegritySize); @@ -396,8 +487,9 @@ bool StunMessage::AddMessageIntegrityOfType(int attr_type, int msg_len_for_hmac = static_cast( buf.Length() - kStunAttributeHeaderSize - msg_integrity_attr->length()); char hmac[kStunMessageIntegritySize]; - size_t ret = rtc::ComputeHmac(rtc::DIGEST_SHA_1, key, keylen, buf.Data(), - msg_len_for_hmac, hmac, sizeof(hmac)); + size_t ret = + rtc::ComputeHmac(rtc::DIGEST_SHA_1, key.data(), key.size(), buf.Data(), + msg_len_for_hmac, hmac, sizeof(hmac)); RTC_DCHECK(ret == sizeof(hmac)); if (ret != sizeof(hmac)) { RTC_LOG(LS_ERROR) << "HMAC computation failed. Message-Integrity " @@ -407,7 +499,7 @@ bool StunMessage::AddMessageIntegrityOfType(int attr_type, // Insert correct HMAC into the attribute. msg_integrity_attr->CopyBytes(hmac, attr_size); - password_.assign(key, keylen); + password_ = std::string(key); integrity_ = IntegrityStatus::kIntegrityOk; return true; } @@ -442,6 +534,11 @@ bool StunMessage::ValidateFingerprint(const char* data, size_t size) { rtc::ComputeCrc32(data, size - fingerprint_attr_size)); } +// static +std::string StunMessage::GenerateTransactionId() { + return rtc::CreateRandomString(kStunTransactionIdLength); +} + bool StunMessage::IsStunMethod(rtc::ArrayView methods, const char* data, size_t size) { @@ -589,6 +686,12 @@ void StunMessage::SetStunMagicCookie(uint32_t val) { stun_magic_cookie_ = val; } +void StunMessage::SetTransactionIdForTesting(absl::string_view transaction_id) { + RTC_DCHECK(IsValidTransactionId(transaction_id)); + transaction_id_ = std::string(transaction_id); + reduced_transaction_id_ = ReduceTransactionId(transaction_id_); +} + StunAttributeValueType StunMessage::GetAttributeValueType(int type) const { switch (type) { case STUN_ATTR_MAPPED_ADDRESS: @@ -647,7 +750,7 @@ const StunAttribute* StunMessage::GetAttribute(int type) const { return NULL; } -bool StunMessage::IsValidTransactionId(const std::string& transaction_id) { +bool StunMessage::IsValidTransactionId(absl::string_view transaction_id) { return transaction_id.size() == kStunTransactionIdLength || transaction_id.size() == kStunLegacyTransactionIdLength; } @@ -997,9 +1100,9 @@ StunByteStringAttribute::StunByteStringAttribute(uint16_t type) : StunAttribute(type, 0), bytes_(NULL) {} StunByteStringAttribute::StunByteStringAttribute(uint16_t type, - const std::string& str) + absl::string_view str) : StunAttribute(type, 0), bytes_(NULL) { - CopyBytes(str.c_str(), str.size()); + CopyBytes(str); } StunByteStringAttribute::StunByteStringAttribute(uint16_t type, @@ -1020,8 +1123,10 @@ StunAttributeValueType StunByteStringAttribute::value_type() const { return STUN_VALUE_BYTE_STRING; } -void StunByteStringAttribute::CopyBytes(const char* bytes) { - CopyBytes(bytes, strlen(bytes)); +void StunByteStringAttribute::CopyBytes(absl::string_view bytes) { + char* new_bytes = new char[bytes.size()]; + memcpy(new_bytes, bytes.data(), bytes.size()); + SetBytes(new_bytes, bytes.size()); } void StunByteStringAttribute::CopyBytes(const void* bytes, size_t length) { diff --git a/TMessagesProj/jni/voip/webrtc/api/transport/stun.h b/TMessagesProj/jni/voip/webrtc/api/transport/stun.h index 766b9ec368..c2c9ad4b9c 100644 --- a/TMessagesProj/jni/voip/webrtc/api/transport/stun.h +++ b/TMessagesProj/jni/voip/webrtc/api/transport/stun.h @@ -31,7 +31,8 @@ namespace cricket { // These are the types of STUN messages defined in RFC 5389. -enum StunMessageType { +enum StunMessageType : uint16_t { + STUN_INVALID_MESSAGE_TYPE = 0x0000, STUN_BINDING_REQUEST = 0x0001, STUN_BINDING_INDICATION = 0x0011, STUN_BINDING_RESPONSE = 0x0101, @@ -144,16 +145,28 @@ class StunXorAddressAttribute; // that attribute class. class StunMessage { public: + // Constructs a StunMessage with an invalid type and empty, legacy length + // (16 bytes, RFC3489) transaction id. StunMessage(); + + // Construct a `StunMessage` with a specific type and generate a new + // 12 byte transaction id (RFC5389). + explicit StunMessage(uint16_t type); + + StunMessage(uint16_t type, absl::string_view transaction_id); + virtual ~StunMessage(); // The verification status of the message. This is checked on parsing, // or set by AddMessageIntegrity. + // These values are persisted to logs. Entries should not be renumbered and + // numeric values should never be reused. enum class IntegrityStatus { - kNotSet, - kNoIntegrity, // Message-integrity attribute missing - kIntegrityOk, // Message-integrity checked OK - kIntegrityBad, // Message-integrity verification failed + kNotSet = 0, + kNoIntegrity = 1, // Message-integrity attribute missing + kIntegrityOk = 2, // Message-integrity checked OK + kIntegrityBad = 3, // Message-integrity verification failed + kMaxValue = kIntegrityBad, }; int type() const { return type_; } @@ -168,8 +181,13 @@ class StunMessage { // is determined by the lengths of the transaction ID. bool IsLegacy() const; - void SetType(int type) { type_ = static_cast(type); } - bool SetTransactionID(const std::string& str); + [[deprecated]] void SetType(int type) { type_ = static_cast(type); } + [[deprecated]] bool SetTransactionID(absl::string_view transaction_id) { + if (!IsValidTransactionId(transaction_id)) + return false; + SetTransactionIdForTesting(transaction_id); + return true; + } // Get a list of all of the attribute types in the "comprehension required" // range that were not recognized. @@ -202,6 +220,11 @@ class StunMessage { // This uses the buffered raw-format message stored by Read(). IntegrityStatus ValidateMessageIntegrity(const std::string& password); + // Revalidates the STUN message with (possibly) a new password. + // Indicates that calling logic needs review - probably previous call + // was checking with the wrong password. + IntegrityStatus RevalidateMessageIntegrity(const std::string& password); + // Returns the current integrity status of the message. IntegrityStatus integrity() const { return integrity_; } @@ -218,7 +241,7 @@ class StunMessage { } // Adds a MESSAGE-INTEGRITY attribute that is valid for the current message. - bool AddMessageIntegrity(const std::string& password); + bool AddMessageIntegrity(absl::string_view password); // Adds a STUN_ATTR_GOOG_MESSAGE_INTEGRITY_32 attribute that is valid for the // current message. @@ -233,6 +256,9 @@ class StunMessage { // Verifies that a given buffer is STUN by checking for a correct FINGERPRINT. static bool ValidateFingerprint(const char* data, size_t size); + // Generates a new 12 byte (RFC5389) transaction id. + static std::string GenerateTransactionId(); + // Adds a FINGERPRINT attribute that is valid for the current message. bool AddFingerprint(); @@ -249,7 +275,10 @@ class StunMessage { // Modify the stun magic cookie used for this STUN message. // This is used for testing. - void SetStunMagicCookie(uint32_t val); + [[deprecated]] void SetStunMagicCookie(uint32_t val); + + // Change the internal transaction id. Used only for testing. + void SetTransactionIdForTesting(absl::string_view transaction_id); // Contruct a copy of `this`. std::unique_ptr Clone() const; @@ -259,29 +288,27 @@ class StunMessage { bool EqualAttributes(const StunMessage* other, std::function attribute_type_mask) const; - // Expose raw-buffer ValidateMessageIntegrity function for testing. - static bool ValidateMessageIntegrityForTesting(const char* data, - size_t size, - const std::string& password) { - return ValidateMessageIntegrity(data, size, password); - } - // Expose raw-buffer ValidateMessageIntegrity function for testing. - static bool ValidateMessageIntegrity32ForTesting( - const char* data, - size_t size, - const std::string& password) { - return ValidateMessageIntegrity32(data, size, password); - } // Validates that a STUN message in byte buffer form // has a correct MESSAGE-INTEGRITY value. // These functions are not recommended and will be deprecated; use // ValidateMessageIntegrity(password) on the parsed form instead. - static bool ValidateMessageIntegrity(const char* data, - size_t size, - const std::string& password); - static bool ValidateMessageIntegrity32(const char* data, - size_t size, - const std::string& password); + [[deprecated("Use member function")]] static bool ValidateMessageIntegrity( + const char* data, + size_t size, + const std::string& password); + [[deprecated("Use member function")]] static bool ValidateMessageIntegrity32( + const char* data, + size_t size, + const std::string& password); + + // Expose raw-buffer ValidateMessageIntegrity function for testing. + static bool ValidateMessageIntegrityForTesting(const char* data, + size_t size, + const std::string& password); + // Expose raw-buffer ValidateMessageIntegrity function for testing. + static bool ValidateMessageIntegrity32ForTesting(const char* data, + size_t size, + const std::string& password); protected: // Verifies that the given attribute is allowed for this message. @@ -292,22 +319,21 @@ class StunMessage { private: StunAttribute* CreateAttribute(int type, size_t length) /* const*/; const StunAttribute* GetAttribute(int type) const; - static bool IsValidTransactionId(const std::string& transaction_id); + static bool IsValidTransactionId(absl::string_view transaction_id); bool AddMessageIntegrityOfType(int mi_attr_type, size_t mi_attr_size, - const char* key, - size_t keylen); + absl::string_view key); static bool ValidateMessageIntegrityOfType(int mi_attr_type, size_t mi_attr_size, const char* data, size_t size, const std::string& password); - uint16_t type_; - uint16_t length_; + uint16_t type_ = STUN_INVALID_MESSAGE_TYPE; + uint16_t length_ = 0; std::string transaction_id_; - uint32_t reduced_transaction_id_; - uint32_t stun_magic_cookie_; + uint32_t reduced_transaction_id_ = 0; + uint32_t stun_magic_cookie_ = kStunMagicCookie; // The original buffer for messages created by Read(). std::string buffer_; IntegrityStatus integrity_ = IntegrityStatus::kNotSet; @@ -486,7 +512,7 @@ class StunUInt64Attribute : public StunAttribute { class StunByteStringAttribute : public StunAttribute { public: explicit StunByteStringAttribute(uint16_t type); - StunByteStringAttribute(uint16_t type, const std::string& str); + StunByteStringAttribute(uint16_t type, absl::string_view str); StunByteStringAttribute(uint16_t type, const void* bytes, size_t length); StunByteStringAttribute(uint16_t type, uint16_t length); ~StunByteStringAttribute() override; @@ -494,10 +520,16 @@ class StunByteStringAttribute : public StunAttribute { StunAttributeValueType value_type() const override; const char* bytes() const { return bytes_; } - std::string GetString() const { return std::string(bytes_, length()); } + absl::string_view string_view() const { + return absl::string_view(bytes_, length()); + } + + [[deprecated]] std::string GetString() const { + return std::string(bytes_, length()); + } - void CopyBytes(const char* bytes); // uses strlen void CopyBytes(const void* bytes, size_t length); + void CopyBytes(absl::string_view bytes); uint8_t GetByte(size_t index) const; void SetByte(size_t index, uint8_t value); @@ -635,13 +667,16 @@ enum RelayAttributeType { // A "GTURN" STUN message. class RelayMessage : public StunMessage { + public: + using StunMessage::StunMessage; + protected: StunAttributeValueType GetAttributeValueType(int type) const override; StunMessage* CreateNew() const override; }; // Defined in TURN RFC 5766. -enum TurnMessageType { +enum TurnMessageType : uint16_t { STUN_ALLOCATE_REQUEST = 0x0003, STUN_ALLOCATE_RESPONSE = 0x0103, STUN_ALLOCATE_ERROR_RESPONSE = 0x0113, @@ -689,6 +724,9 @@ extern const char STUN_ERROR_REASON_ALLOCATION_MISMATCH[]; extern const char STUN_ERROR_REASON_WRONG_CREDENTIALS[]; extern const char STUN_ERROR_REASON_UNSUPPORTED_PROTOCOL[]; class TurnMessage : public StunMessage { + public: + using StunMessage::StunMessage; + protected: StunAttributeValueType GetAttributeValueType(int type) const override; StunMessage* CreateNew() const override; @@ -747,6 +785,9 @@ extern const char STUN_ERROR_REASON_ROLE_CONFLICT[]; // A RFC 5245 ICE STUN message. class IceMessage : public StunMessage { + public: + using StunMessage::StunMessage; + protected: StunAttributeValueType GetAttributeValueType(int type) const override; StunMessage* CreateNew() const override; diff --git a/TMessagesProj/jni/voip/webrtc/api/transport/webrtc_key_value_config.h b/TMessagesProj/jni/voip/webrtc/api/transport/webrtc_key_value_config.h deleted file mode 100644 index 5666a82783..0000000000 --- a/TMessagesProj/jni/voip/webrtc/api/transport/webrtc_key_value_config.h +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#ifndef API_TRANSPORT_WEBRTC_KEY_VALUE_CONFIG_H_ -#define API_TRANSPORT_WEBRTC_KEY_VALUE_CONFIG_H_ - -#include - -#include "absl/strings/string_view.h" -#include "rtc_base/system/rtc_export.h" - -namespace webrtc { - -// An interface that provides a key-value mapping for configuring internal -// details of WebRTC. Note that there's no guarantess that the meaning of a -// particular key value mapping will be preserved over time and no announcements -// will be made if they are changed. It's up to the library user to ensure that -// the behavior does not break. -class RTC_EXPORT WebRtcKeyValueConfig { - public: - virtual ~WebRtcKeyValueConfig() = default; - // The configured value for the given key. Defaults to an empty string. - virtual std::string Lookup(absl::string_view key) const = 0; -}; -} // namespace webrtc - -#endif // API_TRANSPORT_WEBRTC_KEY_VALUE_CONFIG_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/turn_customizer.h b/TMessagesProj/jni/voip/webrtc/api/turn_customizer.h index 50e406516e..8d569b36d2 100644 --- a/TMessagesProj/jni/voip/webrtc/api/turn_customizer.h +++ b/TMessagesProj/jni/voip/webrtc/api/turn_customizer.h @@ -13,9 +13,10 @@ #include +#include "api/transport/stun.h" + namespace cricket { class PortInterface; -class StunMessage; } // namespace cricket namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/api/uma_metrics.h b/TMessagesProj/jni/voip/webrtc/api/uma_metrics.h index a975b82aeb..9eb3bf7e1f 100644 --- a/TMessagesProj/jni/voip/webrtc/api/uma_metrics.h +++ b/TMessagesProj/jni/voip/webrtc/api/uma_metrics.h @@ -83,14 +83,6 @@ enum IceCandidatePairType { // occurrences of events, while "Name" has a value associated with it which is // used to form a histogram. -// These values are persisted to logs. Entries should not be renumbered and -// numeric values should never be reused. -enum KeyExchangeProtocolType { - kEnumCounterKeyProtocolDtls = 0, - kEnumCounterKeyProtocolSdes = 1, - kEnumCounterKeyProtocolMax -}; - // These values are persisted to logs. Entries should not be renumbered and // numeric values should never be reused. enum KeyExchangeProtocolMedia { @@ -112,36 +104,6 @@ enum SdpSemanticRequested { kSdpSemanticRequestMax }; -// These values are persisted to logs. Entries should not be renumbered and -// numeric values should never be reused. -enum SdpSemanticNegotiated { - kSdpSemanticNegotiatedNone = 0, - kSdpSemanticNegotiatedPlanB = 1, - kSdpSemanticNegotiatedUnifiedPlan = 2, - kSdpSemanticNegotiatedMixed = 3, - kSdpSemanticNegotiatedMax -}; - -// Metric which records the format of the received SDP for tracking how much the -// difference between Plan B and Unified Plan affect users. -// These values are persisted to logs. Entries should not be renumbered and -// numeric values should never be reused. -enum SdpFormatReceived { - // No audio or video tracks. This is worth special casing since it seems to be - // the most common scenario (data-channel only). - kSdpFormatReceivedNoTracks = 0, - // No more than one audio and one video track. Should be compatible with both - // Plan B and Unified Plan endpoints. - kSdpFormatReceivedSimple = 1, - // More than one audio track or more than one video track in the Plan B format - // (e.g., one audio media section with multiple streams). - kSdpFormatReceivedComplexPlanB = 2, - // More than one audio track or more than one video track in the Unified Plan - // format (e.g., two audio media sections). - kSdpFormatReceivedComplexUnifiedPlan = 3, - kSdpFormatReceivedMax -}; - // Metric for counting the outcome of adding an ICE candidate // These values are persisted to logs. Entries should not be renumbered and // numeric values should never be reused. diff --git a/TMessagesProj/jni/voip/webrtc/api/units/time_delta.h b/TMessagesProj/jni/voip/webrtc/api/units/time_delta.h index 6f1910379b..d5951005e3 100644 --- a/TMessagesProj/jni/voip/webrtc/api/units/time_delta.h +++ b/TMessagesProj/jni/voip/webrtc/api/units/time_delta.h @@ -32,6 +32,11 @@ namespace webrtc { // microseconds (us). class TimeDelta final : public rtc_units_impl::RelativeUnit { public: + template + static constexpr TimeDelta Minutes(T value) { + static_assert(std::is_arithmetic::value, ""); + return Seconds(value * 60); + } template static constexpr TimeDelta Seconds(T value) { static_assert(std::is_arithmetic::value, ""); diff --git a/TMessagesProj/jni/voip/webrtc/api/video/OWNERS b/TMessagesProj/jni/voip/webrtc/api/video/OWNERS index e4a16c360a..49b62f3780 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/OWNERS +++ b/TMessagesProj/jni/voip/webrtc/api/video/OWNERS @@ -1,5 +1,5 @@ brandtr@webrtc.org magjed@webrtc.org -nisse@webrtc.org +philipel@webrtc.org per-file video_timing.h=ilnik@webrtc.org diff --git a/TMessagesProj/jni/voip/webrtc/api/video/encoded_frame.cc b/TMessagesProj/jni/voip/webrtc/api/video/encoded_frame.cc index 42d6b06b84..c5e2abbbb4 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/encoded_frame.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video/encoded_frame.cc @@ -10,10 +10,24 @@ #include "api/video/encoded_frame.h" +#include "absl/types/optional.h" + namespace webrtc { +absl::optional EncodedFrame::ReceivedTimestamp() const { + return ReceivedTime() >= 0 + ? absl::make_optional(Timestamp::Millis(ReceivedTime())) + : absl::nullopt; +} + +absl::optional EncodedFrame::RenderTimestamp() const { + return RenderTimeMs() >= 0 + ? absl::make_optional(Timestamp::Millis(RenderTimeMs())) + : absl::nullopt; +} + bool EncodedFrame::delayed_by_retransmission() const { - return 0; + return false; } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/video/encoded_frame.h b/TMessagesProj/jni/voip/webrtc/api/video/encoded_frame.h index 3ef26caf6e..66aee227bb 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/encoded_frame.h +++ b/TMessagesProj/jni/voip/webrtc/api/video/encoded_frame.h @@ -14,6 +14,8 @@ #include #include +#include "absl/types/optional.h" +#include "api/units/timestamp.h" #include "modules/video_coding/encoded_frame.h" namespace webrtc { @@ -30,10 +32,18 @@ class EncodedFrame : public webrtc::VCMEncodedFrame { virtual ~EncodedFrame() {} // When this frame was received. + // TODO(bugs.webrtc.org/13756): Use Timestamp instead of int. virtual int64_t ReceivedTime() const = 0; + // Returns a Timestamp from `ReceivedTime`, or nullopt if there is no receive + // time. + absl::optional ReceivedTimestamp() const; // When this frame should be rendered. + // TODO(bugs.webrtc.org/13756): Use Timestamp instead of int. virtual int64_t RenderTime() const = 0; + // Returns a Timestamp from `RenderTime`, or nullopt if there is no + // render time. + absl::optional RenderTimestamp() const; // This information is currently needed by the timing calculation class. // TODO(philipel): Remove this function when a new timing class has diff --git a/TMessagesProj/jni/voip/webrtc/api/video/encoded_image.cc b/TMessagesProj/jni/voip/webrtc/api/video/encoded_image.cc index fc77b9415b..ff61994dee 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/encoded_image.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video/encoded_image.cc @@ -13,8 +13,6 @@ #include #include -#include "rtc_base/ref_counted_object.h" - namespace webrtc { EncodedImageBuffer::EncodedImageBuffer(size_t size) : size_(size) { diff --git a/TMessagesProj/jni/voip/webrtc/api/video/encoded_image.h b/TMessagesProj/jni/voip/webrtc/api/video/encoded_image.h index 987645b569..dae790c46c 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/encoded_image.h +++ b/TMessagesProj/jni/voip/webrtc/api/video/encoded_image.h @@ -78,9 +78,8 @@ class RTC_EXPORT EncodedImage { EncodedImage& operator=(EncodedImage&&); EncodedImage& operator=(const EncodedImage&); - // TODO(nisse): Change style to timestamp(), set_timestamp(), for consistency - // with the VideoFrame class. - // Set frame timestamp (90kHz). + // TODO(bugs.webrtc.org/9378): Change style to timestamp(), set_timestamp(), + // for consistency with the VideoFrame class. Set frame timestamp (90kHz). void SetTimestamp(uint32_t timestamp) { timestamp_rtp_ = timestamp; } // Get frame timestamp (90kHz). @@ -97,6 +96,13 @@ class RTC_EXPORT EncodedImage { spatial_index_ = spatial_index; } + absl::optional TemporalIndex() const { return temporal_index_; } + void SetTemporalIndex(absl::optional temporal_index) { + RTC_DCHECK_GE(temporal_index_.value_or(0), 0); + RTC_DCHECK_LT(temporal_index_.value_or(0), kMaxTemporalStreams); + temporal_index_ = temporal_index; + } + // These methods can be used to set/get size of subframe with spatial index // `spatial_index` on encoded frames that consist of multiple spatial layers. absl::optional SpatialLayerFrameSize(int spatial_index) const; @@ -154,6 +160,16 @@ class RTC_EXPORT EncodedImage { return encoded_data_ ? encoded_data_->data() : nullptr; } + // Returns whether the encoded image can be considered to be of target + // quality. + bool IsAtTargetQuality() const { return at_target_quality_; } + + // Sets that the encoded image can be considered to be of target quality to + // true or false. + void SetAtTargetQuality(bool at_target_quality) { + at_target_quality_ = at_target_quality; + } + uint32_t _encodedWidth = 0; uint32_t _encodedHeight = 0; // NTP time of the capture time in local timebase in milliseconds. @@ -189,6 +205,7 @@ class RTC_EXPORT EncodedImage { size_t size_ = 0; // Size of encoded frame data. uint32_t timestamp_rtp_ = 0; absl::optional spatial_index_; + absl::optional temporal_index_; std::map spatial_layer_frame_size_bytes_; absl::optional color_space_; // This field is meant for media quality testing purpose only. When enabled it @@ -200,6 +217,8 @@ class RTC_EXPORT EncodedImage { // https://w3c.github.io/webrtc-pc/#dom-rtcrtpreceiver-getcontributingsources RtpPacketInfos packet_infos_; bool retransmission_allowed_ = true; + // True if the encoded image can be considered to be of target quality. + bool at_target_quality_ = false; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/video/frame_buffer.cc b/TMessagesProj/jni/voip/webrtc/api/video/frame_buffer.cc new file mode 100644 index 0000000000..4cdf2212a6 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/video/frame_buffer.cc @@ -0,0 +1,280 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video/frame_buffer.h" + +#include + +#include "absl/algorithm/container.h" +#include "absl/container/inlined_vector.h" +#include "rtc_base/logging.h" +#include "rtc_base/numerics/sequence_number_util.h" + +namespace webrtc { +namespace { +bool ValidReferences(const EncodedFrame& frame) { + // All references must point backwards, and duplicates are not allowed. + for (size_t i = 0; i < frame.num_references; ++i) { + if (frame.references[i] >= frame.Id()) + return false; + + for (size_t j = i + 1; j < frame.num_references; ++j) { + if (frame.references[i] == frame.references[j]) + return false; + } + } + + return true; +} + +// Since FrameBuffer::FrameInfo is private it can't be used in the function +// signature, hence the FrameIteratorT type. +template +rtc::ArrayView GetReferences(const FrameIteratorT& it) { + return {it->second.encoded_frame->references, + std::min(it->second.encoded_frame->num_references, + EncodedFrame::kMaxFrameReferences)}; +} + +template +int64_t GetFrameId(const FrameIteratorT& it) { + return it->first; +} + +template +uint32_t GetTimestamp(const FrameIteratorT& it) { + return it->second.encoded_frame->Timestamp(); +} + +template +bool IsLastFrameInTemporalUnit(const FrameIteratorT& it) { + return it->second.encoded_frame->is_last_spatial_layer; +} +} // namespace + +FrameBuffer::FrameBuffer(int max_size, + int max_decode_history, + const FieldTrialsView& field_trials) + : legacy_frame_id_jump_behavior_( + !field_trials.IsDisabled("WebRTC-LegacyFrameIdJumpBehavior")), + max_size_(max_size), + decoded_frame_history_(max_decode_history) {} + +bool FrameBuffer::InsertFrame(std::unique_ptr frame) { + if (!ValidReferences(*frame)) { + RTC_DLOG(LS_WARNING) << "Frame " << frame->Id() + << " has invalid references, dropping frame."; + return false; + } + + if (frame->Id() <= decoded_frame_history_.GetLastDecodedFrameId()) { + if (legacy_frame_id_jump_behavior_ && frame->is_keyframe() && + AheadOf(frame->Timestamp(), + *decoded_frame_history_.GetLastDecodedFrameTimestamp())) { + RTC_DLOG(LS_WARNING) + << "Keyframe " << frame->Id() + << " has newer timestamp but older picture id, clearing buffer."; + Clear(); + } else { + // Already decoded past this frame. + return false; + } + } + + if (frames_.size() == max_size_) { + if (frame->is_keyframe()) { + RTC_DLOG(LS_WARNING) << "Keyframe " << frame->Id() + << " inserted into full buffer, clearing buffer."; + Clear(); + } else { + // No space for this frame. + return false; + } + } + + const int64_t frame_id = frame->Id(); + auto insert_res = frames_.emplace(frame_id, FrameInfo{std::move(frame)}); + if (!insert_res.second) { + // Frame has already been inserted. + return false; + } + + if (frames_.size() == max_size_) { + RTC_DLOG(LS_WARNING) << "Frame " << frame_id + << " inserted, buffer is now full."; + } + + PropagateContinuity(insert_res.first); + FindNextAndLastDecodableTemporalUnit(); + return true; +} + +absl::InlinedVector, 4> +FrameBuffer::ExtractNextDecodableTemporalUnit() { + absl::InlinedVector, 4> res; + if (!next_decodable_temporal_unit_) { + return res; + } + + auto end_it = std::next(next_decodable_temporal_unit_->last_frame); + for (auto it = next_decodable_temporal_unit_->first_frame; it != end_it; + ++it) { + decoded_frame_history_.InsertDecoded(GetFrameId(it), GetTimestamp(it)); + res.push_back(std::move(it->second.encoded_frame)); + } + + DropNextDecodableTemporalUnit(); + return res; +} + +void FrameBuffer::DropNextDecodableTemporalUnit() { + if (!next_decodable_temporal_unit_) { + return; + } + + auto end_it = std::next(next_decodable_temporal_unit_->last_frame); + num_dropped_frames_ += std::count_if( + frames_.begin(), end_it, + [](const auto& f) { return f.second.encoded_frame != nullptr; }); + + frames_.erase(frames_.begin(), end_it); + FindNextAndLastDecodableTemporalUnit(); +} + +absl::optional FrameBuffer::LastContinuousFrameId() const { + return last_continuous_frame_id_; +} + +absl::optional FrameBuffer::LastContinuousTemporalUnitFrameId() const { + return last_continuous_temporal_unit_frame_id_; +} + +absl::optional +FrameBuffer::DecodableTemporalUnitsInfo() const { + return decodable_temporal_units_info_; +} + +int FrameBuffer::GetTotalNumberOfContinuousTemporalUnits() const { + return num_continuous_temporal_units_; +} +int FrameBuffer::GetTotalNumberOfDroppedFrames() const { + return num_dropped_frames_; +} + +size_t FrameBuffer::CurrentSize() const { + return frames_.size(); +} + +bool FrameBuffer::IsContinuous(const FrameIterator& it) const { + for (int64_t reference : GetReferences(it)) { + if (decoded_frame_history_.WasDecoded(reference)) { + continue; + } + + auto reference_frame_it = frames_.find(reference); + if (reference_frame_it != frames_.end() && + reference_frame_it->second.continuous) { + continue; + } + + return false; + } + + return true; +} + +void FrameBuffer::PropagateContinuity(const FrameIterator& frame_it) { + for (auto it = frame_it; it != frames_.end(); ++it) { + if (!it->second.continuous) { + if (IsContinuous(it)) { + it->second.continuous = true; + if (last_continuous_frame_id_ < GetFrameId(it)) { + last_continuous_frame_id_ = GetFrameId(it); + } + if (IsLastFrameInTemporalUnit(it)) { + num_continuous_temporal_units_++; + if (last_continuous_temporal_unit_frame_id_ < GetFrameId(it)) { + last_continuous_temporal_unit_frame_id_ = GetFrameId(it); + } + } + } + } + } +} + +void FrameBuffer::FindNextAndLastDecodableTemporalUnit() { + next_decodable_temporal_unit_.reset(); + decodable_temporal_units_info_.reset(); + + if (!last_continuous_temporal_unit_frame_id_) { + return; + } + + FrameIterator first_frame_it = frames_.begin(); + FrameIterator last_frame_it = frames_.begin(); + absl::InlinedVector frames_in_temporal_unit; + uint32_t last_decodable_temporal_unit_timestamp; + for (auto frame_it = frames_.begin(); frame_it != frames_.end();) { + if (GetFrameId(frame_it) > *last_continuous_temporal_unit_frame_id_) { + break; + } + + if (GetTimestamp(frame_it) != GetTimestamp(first_frame_it)) { + frames_in_temporal_unit.clear(); + first_frame_it = frame_it; + } + + frames_in_temporal_unit.push_back(GetFrameId(frame_it)); + + last_frame_it = frame_it++; + + if (IsLastFrameInTemporalUnit(last_frame_it)) { + bool temporal_unit_decodable = true; + for (auto it = first_frame_it; it != frame_it && temporal_unit_decodable; + ++it) { + for (int64_t reference : GetReferences(it)) { + if (!decoded_frame_history_.WasDecoded(reference) && + !absl::c_linear_search(frames_in_temporal_unit, reference)) { + // A frame in the temporal unit has a non-decoded reference outside + // the temporal unit, so it's not yet ready to be decoded. + temporal_unit_decodable = false; + break; + } + } + } + + if (temporal_unit_decodable) { + if (!next_decodable_temporal_unit_) { + next_decodable_temporal_unit_ = {first_frame_it, last_frame_it}; + } + + last_decodable_temporal_unit_timestamp = GetTimestamp(first_frame_it); + } + } + } + + if (next_decodable_temporal_unit_) { + decodable_temporal_units_info_ = { + .next_rtp_timestamp = + GetTimestamp(next_decodable_temporal_unit_->first_frame), + .last_rtp_timestamp = last_decodable_temporal_unit_timestamp}; + } +} + +void FrameBuffer::Clear() { + frames_.clear(); + next_decodable_temporal_unit_.reset(); + decodable_temporal_units_info_.reset(); + last_continuous_frame_id_.reset(); + last_continuous_temporal_unit_frame_id_.reset(); + decoded_frame_history_.Clear(); +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/video/frame_buffer.h b/TMessagesProj/jni/voip/webrtc/api/video/frame_buffer.h new file mode 100644 index 0000000000..94edf64d5a --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/video/frame_buffer.h @@ -0,0 +1,106 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_FRAME_BUFFER_H_ +#define API_VIDEO_FRAME_BUFFER_H_ + +#include +#include +#include + +#include "absl/container/inlined_vector.h" +#include "absl/types/optional.h" +#include "api/field_trials_view.h" +#include "api/video/encoded_frame.h" +#include "modules/video_coding/utility/decoded_frames_history.h" + +namespace webrtc { +// The high level idea of the FrameBuffer is to order frames received from the +// network into a decodable stream. Frames are order by frame ID, and grouped +// into temporal units by timestamp. A temporal unit is decodable after all +// referenced frames outside the unit has been decoded, and a temporal unit is +// continuous if all referenced frames are directly or indirectly decodable. +// The FrameBuffer is thread-unsafe. +class FrameBuffer { + public: + struct DecodabilityInfo { + uint32_t next_rtp_timestamp; + uint32_t last_rtp_timestamp; + }; + + // The `max_size` determines the maximum number of frames the buffer will + // store, and max_decode_history determines how far back (by frame ID) the + // buffer will store if a frame was decoded or not. + FrameBuffer(int max_size, + int max_decode_history, + // TODO(hta): remove field trials! + const FieldTrialsView& field_trials); + FrameBuffer(const FrameBuffer&) = delete; + FrameBuffer& operator=(const FrameBuffer&) = delete; + ~FrameBuffer() = default; + + // Inserted frames may only reference backwards, and must have no duplicate + // references. Frame insertion will fail if `frame` is a duplicate, has + // already been decoded, invalid, or if the buffer is full and the frame is + // not a keyframe. Returns true if the frame was successfully inserted. + bool InsertFrame(std::unique_ptr frame); + + // Mark all frames belonging to the next decodable temporal unit as decoded + // and returns them. + absl::InlinedVector, 4> + ExtractNextDecodableTemporalUnit(); + + // Drop all frames in the next decodable unit. + void DropNextDecodableTemporalUnit(); + + absl::optional LastContinuousFrameId() const; + absl::optional LastContinuousTemporalUnitFrameId() const; + absl::optional DecodableTemporalUnitsInfo() const; + + int GetTotalNumberOfContinuousTemporalUnits() const; + int GetTotalNumberOfDroppedFrames() const; + size_t CurrentSize() const; + + private: + struct FrameInfo { + std::unique_ptr encoded_frame; + bool continuous = false; + }; + + using FrameMap = std::map; + using FrameIterator = FrameMap::iterator; + + struct TemporalUnit { + // Both first and last are inclusive. + FrameIterator first_frame; + FrameIterator last_frame; + }; + + bool IsContinuous(const FrameIterator& it) const; + void PropagateContinuity(const FrameIterator& frame_it); + void FindNextAndLastDecodableTemporalUnit(); + void Clear(); + + const bool legacy_frame_id_jump_behavior_; + const size_t max_size_; + FrameMap frames_; + absl::optional next_decodable_temporal_unit_; + absl::optional decodable_temporal_units_info_; + absl::optional last_continuous_frame_id_; + absl::optional last_continuous_temporal_unit_frame_id_; + video_coding::DecodedFramesHistory decoded_frame_history_; + + int num_continuous_temporal_units_ = 0; + int num_dropped_frames_ = 0; +}; + +} // namespace webrtc + +#endif // API_VIDEO_FRAME_BUFFER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/video/i010_buffer.cc b/TMessagesProj/jni/voip/webrtc/api/video/i010_buffer.cc index b98e586562..32507febed 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/i010_buffer.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video/i010_buffer.cc @@ -11,9 +11,9 @@ #include +#include "api/make_ref_counted.h" #include "api/video/i420_buffer.h" #include "rtc_base/checks.h" -#include "rtc_base/ref_counted_object.h" #include "third_party/libyuv/include/libyuv/convert.h" #include "third_party/libyuv/include/libyuv/scale.h" diff --git a/TMessagesProj/jni/voip/webrtc/api/video/i210_buffer.cc b/TMessagesProj/jni/voip/webrtc/api/video/i210_buffer.cc new file mode 100644 index 0000000000..d3cd68d366 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/video/i210_buffer.cc @@ -0,0 +1,345 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/video/i210_buffer.h" + +#include + +#include "api/make_ref_counted.h" +#include "api/video/i420_buffer.h" +#include "api/video/i422_buffer.h" +#include "rtc_base/checks.h" +#include "third_party/libyuv/include/libyuv/convert.h" +#include "third_party/libyuv/include/libyuv/scale.h" + +// Aligning pointer to 64 bytes for improved performance, e.g. use SIMD. +static const int kBufferAlignment = 64; +static const int kBytesPerPixel = 2; + +namespace webrtc { + +namespace { + +int I210DataSize(int height, int stride_y, int stride_u, int stride_v) { + return kBytesPerPixel * + (stride_y * height + stride_u * height + stride_v * height); +} + +void webrtcRotatePlane90_16(const uint16_t* src, + int src_stride, + uint16_t* dst, + int dst_stride, + int width, + int height) { + for (int x = 0; x < width; x++) { + for (int y = 0; y < height; y++) { + int dest_x = height - y - 1; + int dest_y = x; + dst[dest_x + dst_stride * dest_y] = src[x + src_stride * y]; + } + } +} + +void webrtcRotatePlane180_16(const uint16_t* src, + int src_stride, + uint16_t* dst, + int dst_stride, + int width, + int height) { + for (int x = 0; x < width; x++) { + for (int y = 0; y < height; y++) { + int dest_x = width - x - 1; + int dest_y = height - y - 1; + dst[dest_x + dst_stride * dest_y] = src[x + src_stride * y]; + } + } +} + +void webrtcRotatePlane270_16(const uint16_t* src, + int src_stride, + uint16_t* dst, + int dst_stride, + int width, + int height) { + for (int x = 0; x < width; x++) { + for (int y = 0; y < height; y++) { + int dest_x = y; + int dest_y = width - x - 1; + dst[dest_x + dst_stride * dest_y] = src[x + src_stride * y]; + } + } +} + +// TODO(sergio.garcia.murillo@gmail.com): Remove as soon it is available in +// libyuv. Due to the rotate&scale required, this function may not be merged in +// to libyuv inmediatelly. +// https://bugs.chromium.org/p/libyuv/issues/detail?id=926 +// This method assumes continuous allocation of the y-plane, possibly clobbering +// any padding between pixel rows. +int webrtcI210Rotate(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint16_t* dst_y, + int dst_stride_y, + uint16_t* dst_u, + int dst_stride_u, + uint16_t* dst_v, + int dst_stride_v, + int width, + int height, + enum libyuv::RotationMode mode) { + int halfwidth = (width + 1) >> 1; + int halfheight = (height + 1) >> 1; + if (!src_y || !src_u || !src_v || width <= 0 || height == 0 || !dst_y || + !dst_u || !dst_v || dst_stride_y < 0) { + return -1; + } + // Negative height means invert the image. + if (height < 0) { + height = -height; + src_y = src_y + (height - 1) * src_stride_y; + src_u = src_u + (height - 1) * src_stride_u; + src_v = src_v + (height - 1) * src_stride_v; + src_stride_y = -src_stride_y; + src_stride_u = -src_stride_u; + src_stride_v = -src_stride_v; + } + + switch (mode) { + case libyuv::kRotate0: + // copy frame + libyuv::CopyPlane_16(src_y, src_stride_y, dst_y, dst_stride_y, width, + height); + libyuv::CopyPlane_16(src_u, src_stride_u, dst_u, dst_stride_u, halfwidth, + height); + libyuv::CopyPlane_16(src_v, src_stride_v, dst_v, dst_stride_v, halfwidth, + height); + return 0; + case libyuv::kRotate90: + // We need to rotate and rescale, we use plane Y as temporal storage. + webrtcRotatePlane90_16(src_u, src_stride_u, dst_y, height, halfwidth, + height); + libyuv::ScalePlane_16(dst_y, height, height, halfwidth, dst_u, halfheight, + halfheight, width, libyuv::kFilterBilinear); + webrtcRotatePlane90_16(src_v, src_stride_v, dst_y, height, halfwidth, + height); + libyuv::ScalePlane_16(dst_y, height, height, halfwidth, dst_v, halfheight, + halfheight, width, libyuv::kFilterLinear); + webrtcRotatePlane90_16(src_y, src_stride_y, dst_y, dst_stride_y, width, + height); + return 0; + case libyuv::kRotate270: + // We need to rotate and rescale, we use plane Y as temporal storage. + webrtcRotatePlane270_16(src_u, src_stride_u, dst_y, height, halfwidth, + height); + libyuv::ScalePlane_16(dst_y, height, height, halfwidth, dst_u, halfheight, + halfheight, width, libyuv::kFilterBilinear); + webrtcRotatePlane270_16(src_v, src_stride_v, dst_y, height, halfwidth, + height); + libyuv::ScalePlane_16(dst_y, height, height, halfwidth, dst_v, halfheight, + halfheight, width, libyuv::kFilterLinear); + webrtcRotatePlane270_16(src_y, src_stride_y, dst_y, dst_stride_y, width, + height); + + return 0; + case libyuv::kRotate180: + webrtcRotatePlane180_16(src_y, src_stride_y, dst_y, dst_stride_y, width, + height); + webrtcRotatePlane180_16(src_u, src_stride_u, dst_u, dst_stride_u, + halfwidth, height); + webrtcRotatePlane180_16(src_v, src_stride_v, dst_v, dst_stride_v, + halfwidth, height); + return 0; + default: + break; + } + return -1; +} + +} // namespace + +I210Buffer::I210Buffer(int width, + int height, + int stride_y, + int stride_u, + int stride_v) + : width_(width), + height_(height), + stride_y_(stride_y), + stride_u_(stride_u), + stride_v_(stride_v), + data_(static_cast( + AlignedMalloc(I210DataSize(height, stride_y, stride_u, stride_v), + kBufferAlignment))) { + RTC_DCHECK_GT(width, 0); + RTC_DCHECK_GT(height, 0); + RTC_DCHECK_GE(stride_y, width); + RTC_DCHECK_GE(stride_u, (width + 1) / 2); + RTC_DCHECK_GE(stride_v, (width + 1) / 2); +} + +I210Buffer::~I210Buffer() {} + +// static +rtc::scoped_refptr I210Buffer::Create(int width, int height) { + return rtc::make_ref_counted(width, height, width, + (width + 1) / 2, (width + 1) / 2); +} + +// static +rtc::scoped_refptr I210Buffer::Copy( + const I210BufferInterface& source) { + const int width = source.width(); + const int height = source.height(); + rtc::scoped_refptr buffer = Create(width, height); + RTC_CHECK_EQ( + 0, libyuv::I210Copy( + source.DataY(), source.StrideY(), source.DataU(), source.StrideU(), + source.DataV(), source.StrideV(), buffer->MutableDataY(), + buffer->StrideY(), buffer->MutableDataU(), buffer->StrideU(), + buffer->MutableDataV(), buffer->StrideV(), width, height)); + return buffer; +} + +// static +rtc::scoped_refptr I210Buffer::Copy( + const I420BufferInterface& source) { + const int width = source.width(); + const int height = source.height(); + auto i422buffer = I422Buffer::Copy(source); + rtc::scoped_refptr buffer = Create(width, height); + RTC_CHECK_EQ(0, libyuv::I422ToI210(i422buffer->DataY(), i422buffer->StrideY(), + i422buffer->DataU(), i422buffer->StrideU(), + i422buffer->DataV(), i422buffer->StrideV(), + buffer->MutableDataY(), buffer->StrideY(), + buffer->MutableDataU(), buffer->StrideU(), + buffer->MutableDataV(), buffer->StrideV(), + width, height)); + return buffer; +} + +// static +rtc::scoped_refptr I210Buffer::Rotate( + const I210BufferInterface& src, + VideoRotation rotation) { + RTC_CHECK(src.DataY()); + RTC_CHECK(src.DataU()); + RTC_CHECK(src.DataV()); + + int rotated_width = src.width(); + int rotated_height = src.height(); + if (rotation == webrtc::kVideoRotation_90 || + rotation == webrtc::kVideoRotation_270) { + std::swap(rotated_width, rotated_height); + } + + rtc::scoped_refptr buffer = + I210Buffer::Create(rotated_width, rotated_height); + + RTC_CHECK_EQ(0, + webrtcI210Rotate( + src.DataY(), src.StrideY(), src.DataU(), src.StrideU(), + src.DataV(), src.StrideV(), buffer->MutableDataY(), + buffer->StrideY(), buffer->MutableDataU(), buffer->StrideU(), + buffer->MutableDataV(), buffer->StrideV(), src.width(), + src.height(), static_cast(rotation))); + + return buffer; +} + +rtc::scoped_refptr I210Buffer::ToI420() { + rtc::scoped_refptr i420_buffer = + I420Buffer::Create(width(), height()); + libyuv::I210ToI420(DataY(), StrideY(), DataU(), StrideU(), DataV(), StrideV(), + i420_buffer->MutableDataY(), i420_buffer->StrideY(), + i420_buffer->MutableDataU(), i420_buffer->StrideU(), + i420_buffer->MutableDataV(), i420_buffer->StrideV(), + width(), height()); + return i420_buffer; +} + +int I210Buffer::width() const { + return width_; +} + +int I210Buffer::height() const { + return height_; +} + +const uint16_t* I210Buffer::DataY() const { + return data_.get(); +} +const uint16_t* I210Buffer::DataU() const { + return data_.get() + stride_y_ * height_; +} +const uint16_t* I210Buffer::DataV() const { + return data_.get() + stride_y_ * height_ + stride_u_ * height_; +} + +int I210Buffer::StrideY() const { + return stride_y_; +} +int I210Buffer::StrideU() const { + return stride_u_; +} +int I210Buffer::StrideV() const { + return stride_v_; +} + +uint16_t* I210Buffer::MutableDataY() { + return const_cast(DataY()); +} +uint16_t* I210Buffer::MutableDataU() { + return const_cast(DataU()); +} +uint16_t* I210Buffer::MutableDataV() { + return const_cast(DataV()); +} + +void I210Buffer::CropAndScaleFrom(const I210BufferInterface& src, + int offset_x, + int offset_y, + int crop_width, + int crop_height) { + RTC_CHECK_LE(crop_width, src.width()); + RTC_CHECK_LE(crop_height, src.height()); + RTC_CHECK_LE(crop_width + offset_x, src.width()); + RTC_CHECK_LE(crop_height + offset_y, src.height()); + RTC_CHECK_GE(offset_x, 0); + RTC_CHECK_GE(offset_y, 0); + RTC_CHECK_GE(crop_width, 0); + RTC_CHECK_GE(crop_height, 0); + + // Make sure offset is even so that u/v plane becomes aligned. + const int uv_offset_x = offset_x / 2; + const int uv_offset_y = offset_y; + offset_x = uv_offset_x * 2; + + const uint16_t* y_plane = src.DataY() + src.StrideY() * offset_y + offset_x; + const uint16_t* u_plane = + src.DataU() + src.StrideU() * uv_offset_y + uv_offset_x; + const uint16_t* v_plane = + src.DataV() + src.StrideV() * uv_offset_y + uv_offset_x; + int res = libyuv::I422Scale_16( + y_plane, src.StrideY(), u_plane, src.StrideU(), v_plane, src.StrideV(), + crop_width, crop_height, MutableDataY(), StrideY(), MutableDataU(), + StrideU(), MutableDataV(), StrideV(), width(), height(), + libyuv::kFilterBox); + + RTC_DCHECK_EQ(res, 0); +} + +void I210Buffer::ScaleFrom(const I210BufferInterface& src) { + CropAndScaleFrom(src, 0, 0, src.width(), src.height()); +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/video/i210_buffer.h b/TMessagesProj/jni/voip/webrtc/api/video/i210_buffer.h new file mode 100644 index 0000000000..e3b6452b95 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/video/i210_buffer.h @@ -0,0 +1,84 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_I210_BUFFER_H_ +#define API_VIDEO_I210_BUFFER_H_ + +#include + +#include + +#include "api/scoped_refptr.h" +#include "api/video/video_frame_buffer.h" +#include "api/video/video_rotation.h" +#include "rtc_base/memory/aligned_malloc.h" + +namespace webrtc { + +// Plain I210 (yuv 422 planar 10 bits) buffer in standard memory. +class I210Buffer : public I210BufferInterface { + public: + // Create a new buffer. + static rtc::scoped_refptr Create(int width, int height); + + // Create a new buffer and copy the pixel data. + static rtc::scoped_refptr Copy(const I210BufferInterface& buffer); + + // Convert and put I420 buffer into a new buffer. + static rtc::scoped_refptr Copy(const I420BufferInterface& buffer); + + // Return a rotated copy of `src`. + static rtc::scoped_refptr Rotate(const I210BufferInterface& src, + VideoRotation rotation); + + // VideoFrameBuffer implementation. + rtc::scoped_refptr ToI420() override; + + // PlanarYuv16BBuffer implementation. + int width() const override; + int height() const override; + const uint16_t* DataY() const override; + const uint16_t* DataU() const override; + const uint16_t* DataV() const override; + int StrideY() const override; + int StrideU() const override; + int StrideV() const override; + + uint16_t* MutableDataY(); + uint16_t* MutableDataU(); + uint16_t* MutableDataV(); + + // Scale the cropped area of `src` to the size of `this` buffer, and + // write the result into `this`. + void CropAndScaleFrom(const I210BufferInterface& src, + int offset_x, + int offset_y, + int crop_width, + int crop_height); + + // Scale all of `src` to the size of `this` buffer, with no cropping. + void ScaleFrom(const I210BufferInterface& src); + + protected: + I210Buffer(int width, int height, int stride_y, int stride_u, int stride_v); + ~I210Buffer() override; + + private: + const int width_; + const int height_; + const int stride_y_; + const int stride_u_; + const int stride_v_; + const std::unique_ptr data_; +}; + +} // namespace webrtc + +#endif // API_VIDEO_I210_BUFFER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/video/i420_buffer.cc b/TMessagesProj/jni/voip/webrtc/api/video/i420_buffer.cc index deecf1d71d..bf7fc06ee9 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/i420_buffer.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video/i420_buffer.cc @@ -14,8 +14,8 @@ #include #include +#include "api/make_ref_counted.h" #include "rtc_base/checks.h" -#include "rtc_base/ref_counted_object.h" #include "third_party/libyuv/include/libyuv/convert.h" #include "third_party/libyuv/include/libyuv/planar_functions.h" #include "third_party/libyuv/include/libyuv/scale.h" diff --git a/TMessagesProj/jni/voip/webrtc/api/video/i420_buffer.h b/TMessagesProj/jni/voip/webrtc/api/video/i420_buffer.h index af52c64fb4..b337489657 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/i420_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/api/video/i420_buffer.h @@ -65,8 +65,8 @@ class RTC_EXPORT I420Buffer : public I420BufferInterface { // quirks in memory checkers // (https://bugs.chromium.org/p/libyuv/issues/detail?id=377) and // ffmpeg (http://crbug.com/390941). - // TODO(nisse): Deprecated. Should be deleted if/when those issues - // are resolved in a better way. Or in the mean time, use SetBlack. + // TODO(https://crbug.com/390941): Deprecated. Should be deleted if/when those + // issues are resolved in a better way. Or in the mean time, use SetBlack. void InitializeData(); int width() const override; diff --git a/TMessagesProj/jni/voip/webrtc/api/video/i422_buffer.cc b/TMessagesProj/jni/voip/webrtc/api/video/i422_buffer.cc new file mode 100644 index 0000000000..d30580d6fb --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/video/i422_buffer.cc @@ -0,0 +1,234 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/video/i422_buffer.h" + +#include + +#include +#include + +#include "api/make_ref_counted.h" +#include "api/video/i420_buffer.h" +#include "rtc_base/checks.h" +#include "third_party/libyuv/include/libyuv/convert.h" +#include "third_party/libyuv/include/libyuv/planar_functions.h" +#include "third_party/libyuv/include/libyuv/scale.h" + +// Aligning pointer to 64 bytes for improved performance, e.g. use SIMD. +static const int kBufferAlignment = 64; + +namespace webrtc { + +namespace { + +int I422DataSize(int height, int stride_y, int stride_u, int stride_v) { + return stride_y * height + stride_u * height + stride_v * height; +} +} // namespace + +I422Buffer::I422Buffer(int width, int height) + : I422Buffer(width, height, width, (width + 1) / 2, (width + 1) / 2) {} + +I422Buffer::I422Buffer(int width, + int height, + int stride_y, + int stride_u, + int stride_v) + : width_(width), + height_(height), + stride_y_(stride_y), + stride_u_(stride_u), + stride_v_(stride_v), + data_(static_cast( + AlignedMalloc(I422DataSize(height, stride_y, stride_u, stride_v), + kBufferAlignment))) { + RTC_DCHECK_GT(width, 0); + RTC_DCHECK_GT(height, 0); + RTC_DCHECK_GE(stride_y, width); + RTC_DCHECK_GE(stride_u, (width + 1) / 2); + RTC_DCHECK_GE(stride_v, (width + 1) / 2); +} + +I422Buffer::~I422Buffer() {} + +// static +rtc::scoped_refptr I422Buffer::Create(int width, int height) { + return rtc::make_ref_counted(width, height); +} + +// static +rtc::scoped_refptr I422Buffer::Create(int width, + int height, + int stride_y, + int stride_u, + int stride_v) { + return rtc::make_ref_counted(width, height, stride_y, stride_u, + stride_v); +} + +// static +rtc::scoped_refptr I422Buffer::Copy( + const I422BufferInterface& source) { + return Copy(source.width(), source.height(), source.DataY(), source.StrideY(), + source.DataU(), source.StrideU(), source.DataV(), + source.StrideV()); +} + +// static +rtc::scoped_refptr I422Buffer::Copy( + const I420BufferInterface& source) { + const int width = source.width(); + const int height = source.height(); + rtc::scoped_refptr buffer = Create(width, height); + RTC_CHECK_EQ( + 0, libyuv::I420ToI422( + source.DataY(), source.StrideY(), source.DataU(), source.StrideU(), + source.DataV(), source.StrideV(), buffer->MutableDataY(), + buffer->StrideY(), buffer->MutableDataU(), buffer->StrideU(), + buffer->MutableDataV(), buffer->StrideV(), width, height)); + return buffer; +} + +// static +rtc::scoped_refptr I422Buffer::Copy(int width, + int height, + const uint8_t* data_y, + int stride_y, + const uint8_t* data_u, + int stride_u, + const uint8_t* data_v, + int stride_v) { + // Note: May use different strides than the input data. + rtc::scoped_refptr buffer = Create(width, height); + RTC_CHECK_EQ(0, libyuv::I422Copy(data_y, stride_y, data_u, stride_u, data_v, + stride_v, buffer->MutableDataY(), + buffer->StrideY(), buffer->MutableDataU(), + buffer->StrideU(), buffer->MutableDataV(), + buffer->StrideV(), width, height)); + return buffer; +} + +// static +rtc::scoped_refptr I422Buffer::Rotate( + const I422BufferInterface& src, + VideoRotation rotation) { + RTC_CHECK(src.DataY()); + RTC_CHECK(src.DataU()); + RTC_CHECK(src.DataV()); + + int rotated_width = src.width(); + int rotated_height = src.height(); + if (rotation == webrtc::kVideoRotation_90 || + rotation == webrtc::kVideoRotation_270) { + std::swap(rotated_width, rotated_height); + } + + rtc::scoped_refptr buffer = + I422Buffer::Create(rotated_width, rotated_height); + + RTC_CHECK_EQ(0, + libyuv::I422Rotate( + src.DataY(), src.StrideY(), src.DataU(), src.StrideU(), + src.DataV(), src.StrideV(), buffer->MutableDataY(), + buffer->StrideY(), buffer->MutableDataU(), buffer->StrideU(), + buffer->MutableDataV(), buffer->StrideV(), src.width(), + src.height(), static_cast(rotation))); + + return buffer; +} + +rtc::scoped_refptr I422Buffer::ToI420() { + rtc::scoped_refptr i420_buffer = + I420Buffer::Create(width(), height()); + libyuv::I422ToI420(DataY(), StrideY(), DataU(), StrideU(), DataV(), StrideV(), + i420_buffer->MutableDataY(), i420_buffer->StrideY(), + i420_buffer->MutableDataU(), i420_buffer->StrideU(), + i420_buffer->MutableDataV(), i420_buffer->StrideV(), + width(), height()); + return i420_buffer; +} + +void I422Buffer::InitializeData() { + memset(data_.get(), 0, + I422DataSize(height_, stride_y_, stride_u_, stride_v_)); +} + +int I422Buffer::width() const { + return width_; +} + +int I422Buffer::height() const { + return height_; +} + +const uint8_t* I422Buffer::DataY() const { + return data_.get(); +} +const uint8_t* I422Buffer::DataU() const { + return data_.get() + stride_y_ * height_; +} +const uint8_t* I422Buffer::DataV() const { + return data_.get() + stride_y_ * height_ + stride_u_ * height_; +} + +int I422Buffer::StrideY() const { + return stride_y_; +} +int I422Buffer::StrideU() const { + return stride_u_; +} +int I422Buffer::StrideV() const { + return stride_v_; +} + +uint8_t* I422Buffer::MutableDataY() { + return const_cast(DataY()); +} +uint8_t* I422Buffer::MutableDataU() { + return const_cast(DataU()); +} +uint8_t* I422Buffer::MutableDataV() { + return const_cast(DataV()); +} + +void I422Buffer::CropAndScaleFrom(const I422BufferInterface& src, + int offset_x, + int offset_y, + int crop_width, + int crop_height) { + RTC_CHECK_LE(crop_width, src.width()); + RTC_CHECK_LE(crop_height, src.height()); + RTC_CHECK_LE(crop_width + offset_x, src.width()); + RTC_CHECK_LE(crop_height + offset_y, src.height()); + RTC_CHECK_GE(offset_x, 0); + RTC_CHECK_GE(offset_y, 0); + + // Make sure offset is even so that u/v plane becomes aligned. + const int uv_offset_x = offset_x / 2; + const int uv_offset_y = offset_y; + offset_x = uv_offset_x * 2; + + const uint8_t* y_plane = src.DataY() + src.StrideY() * offset_y + offset_x; + const uint8_t* u_plane = + src.DataU() + src.StrideU() * uv_offset_y + uv_offset_x; + const uint8_t* v_plane = + src.DataV() + src.StrideV() * uv_offset_y + uv_offset_x; + + int res = + //TODO no member named 'I422Scale' in namespace + libyuv::I420Scale(y_plane, src.StrideY(), u_plane, src.StrideU(), v_plane, + src.StrideV(), crop_width, crop_height, MutableDataY(), + StrideY(), MutableDataU(), StrideU(), MutableDataV(), + StrideV(), width(), height(), libyuv::kFilterBox); + + RTC_DCHECK_EQ(res, 0); +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/video/i422_buffer.h b/TMessagesProj/jni/voip/webrtc/api/video/i422_buffer.h new file mode 100644 index 0000000000..600b4ecea7 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/video/i422_buffer.h @@ -0,0 +1,114 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_I422_BUFFER_H_ +#define API_VIDEO_I422_BUFFER_H_ + +#include + +#include + +#include "api/scoped_refptr.h" +#include "api/video/video_frame_buffer.h" +#include "api/video/video_rotation.h" +#include "rtc_base/memory/aligned_malloc.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// Plain I422 buffer in standard memory. +class RTC_EXPORT I422Buffer : public I422BufferInterface { + public: + static rtc::scoped_refptr Create(int width, int height); + static rtc::scoped_refptr Create(int width, + int height, + int stride_y, + int stride_u, + int stride_v); + + // Create a new buffer and copy the pixel data. + static rtc::scoped_refptr Copy(const I422BufferInterface& buffer); + /// Convert and put I420 buffer into a new buffer. + static rtc::scoped_refptr Copy(const I420BufferInterface& buffer); + + static rtc::scoped_refptr Copy(int width, + int height, + const uint8_t* data_y, + int stride_y, + const uint8_t* data_u, + int stride_u, + const uint8_t* data_v, + int stride_v); + + // Returns a rotated copy of `src`. + static rtc::scoped_refptr Rotate(const I422BufferInterface& src, + VideoRotation rotation); + + rtc::scoped_refptr ToI420() final; + const I420BufferInterface* GetI420() const final { return nullptr; } + + // Sets the buffer to all black. + static void SetBlack(I422Buffer* buffer); + + // Sets all three planes to all zeros. Used to work around for + // quirks in memory checkers + // (https://bugs.chromium.org/p/libyuv/issues/detail?id=377) and + // ffmpeg (http://crbug.com/390941). + // TODO(https://crbug.com/390941): Deprecated. Should be deleted if/when those + // issues are resolved in a better way. Or in the mean time, use SetBlack. + void InitializeData(); + + int width() const override; + int height() const override; + const uint8_t* DataY() const override; + const uint8_t* DataU() const override; + const uint8_t* DataV() const override; + + int StrideY() const override; + int StrideU() const override; + int StrideV() const override; + + uint8_t* MutableDataY(); + uint8_t* MutableDataU(); + uint8_t* MutableDataV(); + + // Scale the cropped area of `src` to the size of `this` buffer, and + // write the result into `this`. + void CropAndScaleFrom(const I422BufferInterface& src, + int offset_x, + int offset_y, + int crop_width, + int crop_height); + + // The common case of a center crop, when needed to adjust the + // aspect ratio without distorting the image. + void CropAndScaleFrom(const I422BufferInterface& src); + + // Scale all of `src` to the size of `this` buffer, with no cropping. + void ScaleFrom(const I422BufferInterface& src); + + protected: + I422Buffer(int width, int height); + I422Buffer(int width, int height, int stride_y, int stride_u, int stride_v); + + ~I422Buffer() override; + + private: + const int width_; + const int height_; + const int stride_y_; + const int stride_u_; + const int stride_v_; + const std::unique_ptr data_; +}; + +} // namespace webrtc + +#endif // API_VIDEO_I422_BUFFER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/video/i444_buffer.cc b/TMessagesProj/jni/voip/webrtc/api/video/i444_buffer.cc new file mode 100644 index 0000000000..98e892308f --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/video/i444_buffer.cc @@ -0,0 +1,211 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/video/i444_buffer.h" + +#include + +#include +#include + +#include "api/make_ref_counted.h" +#include "api/video/i420_buffer.h" +#include "rtc_base/checks.h" +#include "third_party/libyuv/include/libyuv/convert.h" +#include "third_party/libyuv/include/libyuv/planar_functions.h" +#include "third_party/libyuv/include/libyuv/scale.h" + +// Aligning pointer to 64 bytes for improved performance, e.g. use SIMD. +static const int kBufferAlignment = 64; + +namespace webrtc { + +namespace { + +int I444DataSize(int height, int stride_y, int stride_u, int stride_v) { + return stride_y * height + stride_u * height + stride_v * height; +} + +} // namespace + +I444Buffer::I444Buffer(int width, int height) + : I444Buffer(width, height, width, (width), (width)) {} + +I444Buffer::I444Buffer(int width, + int height, + int stride_y, + int stride_u, + int stride_v) + : width_(width), + height_(height), + stride_y_(stride_y), + stride_u_(stride_u), + stride_v_(stride_v), + data_(static_cast( + AlignedMalloc(I444DataSize(height, stride_y, stride_u, stride_v), + kBufferAlignment))) { + RTC_DCHECK_GT(width, 0); + RTC_DCHECK_GT(height, 0); + RTC_DCHECK_GE(stride_y, width); + RTC_DCHECK_GE(stride_u, (width)); + RTC_DCHECK_GE(stride_v, (width)); +} + +I444Buffer::~I444Buffer() {} + +// static +rtc::scoped_refptr I444Buffer::Create(int width, int height) { + return rtc::make_ref_counted(width, height); +} + +// static +rtc::scoped_refptr I444Buffer::Create(int width, + int height, + int stride_y, + int stride_u, + int stride_v) { + return rtc::make_ref_counted(width, height, stride_y, stride_u, + stride_v); +} + +// static +rtc::scoped_refptr I444Buffer::Copy( + const I444BufferInterface& source) { + return Copy(source.width(), source.height(), source.DataY(), source.StrideY(), + source.DataU(), source.StrideU(), source.DataV(), + source.StrideV()); +} + +// static +rtc::scoped_refptr I444Buffer::Copy(int width, + int height, + const uint8_t* data_y, + int stride_y, + const uint8_t* data_u, + int stride_u, + const uint8_t* data_v, + int stride_v) { + // Note: May use different strides than the input data. + rtc::scoped_refptr buffer = Create(width, height); + RTC_CHECK_EQ(0, libyuv::I444Copy(data_y, stride_y, data_u, stride_u, data_v, + stride_v, buffer->MutableDataY(), + buffer->StrideY(), buffer->MutableDataU(), + buffer->StrideU(), buffer->MutableDataV(), + buffer->StrideV(), width, height)); + return buffer; +} + +// static +rtc::scoped_refptr I444Buffer::Rotate( + const I444BufferInterface& src, + VideoRotation rotation) { + RTC_CHECK(src.DataY()); + RTC_CHECK(src.DataU()); + RTC_CHECK(src.DataV()); + + int rotated_width = src.width(); + int rotated_height = src.height(); + if (rotation == webrtc::kVideoRotation_90 || + rotation == webrtc::kVideoRotation_270) { + std::swap(rotated_width, rotated_height); + } + + rtc::scoped_refptr buffer = + I444Buffer::Create(rotated_width, rotated_height); + + RTC_CHECK_EQ(0, + libyuv::I444Rotate( + src.DataY(), src.StrideY(), src.DataU(), src.StrideU(), + src.DataV(), src.StrideV(), buffer->MutableDataY(), + buffer->StrideY(), buffer->MutableDataU(), buffer->StrideU(), + buffer->MutableDataV(), buffer->StrideV(), src.width(), + src.height(), static_cast(rotation))); + + return buffer; +} + +rtc::scoped_refptr I444Buffer::ToI420() { + rtc::scoped_refptr i420_buffer = + I420Buffer::Create(width(), height()); + libyuv::I444ToI420(DataY(), StrideY(), DataU(), StrideU(), DataV(), StrideV(), + i420_buffer->MutableDataY(), i420_buffer->StrideY(), + i420_buffer->MutableDataU(), i420_buffer->StrideU(), + i420_buffer->MutableDataV(), i420_buffer->StrideV(), + width(), height()); + return i420_buffer; +} + +void I444Buffer::InitializeData() { + memset(data_.get(), 0, + I444DataSize(height_, stride_y_, stride_u_, stride_v_)); +} + +int I444Buffer::width() const { + return width_; +} + +int I444Buffer::height() const { + return height_; +} + +const uint8_t* I444Buffer::DataY() const { + return data_.get(); +} +const uint8_t* I444Buffer::DataU() const { + return data_.get() + stride_y_ * height_; +} +const uint8_t* I444Buffer::DataV() const { + return data_.get() + stride_y_ * height_ + stride_u_ * ((height_)); +} + +int I444Buffer::StrideY() const { + return stride_y_; +} +int I444Buffer::StrideU() const { + return stride_u_; +} +int I444Buffer::StrideV() const { + return stride_v_; +} + +uint8_t* I444Buffer::MutableDataY() { + return const_cast(DataY()); +} +uint8_t* I444Buffer::MutableDataU() { + return const_cast(DataU()); +} +uint8_t* I444Buffer::MutableDataV() { + return const_cast(DataV()); +} + +void I444Buffer::CropAndScaleFrom(const I444BufferInterface& src, + int offset_x, + int offset_y, + int crop_width, + int crop_height) { + RTC_CHECK_LE(crop_width, src.width()); + RTC_CHECK_LE(crop_height, src.height()); + RTC_CHECK_LE(crop_width + offset_x, src.width()); + RTC_CHECK_LE(crop_height + offset_y, src.height()); + RTC_CHECK_GE(offset_x, 0); + RTC_CHECK_GE(offset_y, 0); + + const uint8_t* y_plane = src.DataY() + src.StrideY() * offset_y + offset_x; + const uint8_t* u_plane = src.DataU() + src.StrideU() * offset_y + offset_x; + const uint8_t* v_plane = src.DataV() + src.StrideV() * offset_y + offset_x; + int res = + libyuv::I444Scale(y_plane, src.StrideY(), u_plane, src.StrideU(), v_plane, + src.StrideV(), crop_width, crop_height, MutableDataY(), + StrideY(), MutableDataU(), StrideU(), MutableDataV(), + StrideV(), width(), height(), libyuv::kFilterBox); + + RTC_DCHECK_EQ(res, 0); +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/video/i444_buffer.h b/TMessagesProj/jni/voip/webrtc/api/video/i444_buffer.h new file mode 100644 index 0000000000..f1e3f63114 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/video/i444_buffer.h @@ -0,0 +1,104 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_I444_BUFFER_H_ +#define API_VIDEO_I444_BUFFER_H_ + +#include + +#include + +#include "api/scoped_refptr.h" +#include "api/video/video_frame_buffer.h" +#include "api/video/video_rotation.h" +#include "rtc_base/memory/aligned_malloc.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// Plain I444 buffer in standard memory. +// I444 represents an image with in YUV format withouth any chroma subsampling. +// https://en.wikipedia.org/wiki/Chroma_subsampling#4:4:4 +class RTC_EXPORT I444Buffer : public I444BufferInterface { + public: + static rtc::scoped_refptr Create(int width, int height); + static rtc::scoped_refptr Create(int width, + int height, + int stride_y, + int stride_u, + int stride_v); + + // Create a new buffer and copy the pixel data. + static rtc::scoped_refptr Copy(const I444BufferInterface& buffer); + + static rtc::scoped_refptr Copy(int width, + int height, + const uint8_t* data_y, + int stride_y, + const uint8_t* data_u, + int stride_u, + const uint8_t* data_v, + int stride_v); + + // Returns a rotated copy of |src|. + static rtc::scoped_refptr Rotate(const I444BufferInterface& src, + VideoRotation rotation); + + rtc::scoped_refptr ToI420() final; + const I420BufferInterface* GetI420() const final { return nullptr; } + + // Sets all three planes to all zeros. Used to work around for + // quirks in memory checkers + // (https://bugs.chromium.org/p/libyuv/issues/detail?id=377) and + // ffmpeg (http://crbug.com/390941). + // TODO(https://crbug.com/390941): Deprecated. Should be deleted if/when those + // issues are resolved in a better way. Or in the mean time, use SetBlack. + void InitializeData(); + + int width() const override; + int height() const override; + const uint8_t* DataY() const override; + const uint8_t* DataU() const override; + const uint8_t* DataV() const override; + + int StrideY() const override; + int StrideU() const override; + int StrideV() const override; + + uint8_t* MutableDataY(); + uint8_t* MutableDataU(); + uint8_t* MutableDataV(); + + // Scale the cropped area of |src| to the size of |this| buffer, and + // write the result into |this|. + void CropAndScaleFrom(const I444BufferInterface& src, + int offset_x, + int offset_y, + int crop_width, + int crop_height); + + protected: + I444Buffer(int width, int height); + I444Buffer(int width, int height, int stride_y, int stride_u, int stride_v); + + ~I444Buffer() override; + + private: + const int width_; + const int height_; + const int stride_y_; + const int stride_u_; + const int stride_v_; + const std::unique_ptr data_; +}; + +} // namespace webrtc + +#endif // API_VIDEO_I444_BUFFER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/video/nv12_buffer.cc b/TMessagesProj/jni/voip/webrtc/api/video/nv12_buffer.cc index 37d688b88b..ca9dcd8677 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/nv12_buffer.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video/nv12_buffer.cc @@ -10,9 +10,9 @@ #include "api/video/nv12_buffer.h" +#include "api/make_ref_counted.h" #include "api/video/i420_buffer.h" #include "rtc_base/checks.h" -#include "rtc_base/ref_counted_object.h" #include "third_party/libyuv/include/libyuv/convert.h" #include "third_party/libyuv/include/libyuv/scale.h" diff --git a/TMessagesProj/jni/voip/webrtc/api/video/nv12_buffer.h b/TMessagesProj/jni/voip/webrtc/api/video/nv12_buffer.h index 7baef2aeba..46a85f82e1 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/nv12_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/api/video/nv12_buffer.h @@ -52,8 +52,8 @@ class RTC_EXPORT NV12Buffer : public NV12BufferInterface { // quirks in memory checkers // (https://bugs.chromium.org/p/libyuv/issues/detail?id=377) and // ffmpeg (http://crbug.com/390941). - // TODO(nisse): Deprecated. Should be deleted if/when those issues - // are resolved in a better way. Or in the mean time, use SetBlack. + // TODO(https://crbug.com/390941): Deprecated. Should be deleted if/when those + // issues are resolved in a better way. Or in the mean time, use SetBlack. void InitializeData(); // Scale the cropped area of `src` to the size of `this` buffer, and diff --git a/TMessagesProj/jni/voip/webrtc/api/video/recordable_encoded_frame.h b/TMessagesProj/jni/voip/webrtc/api/video/recordable_encoded_frame.h index b4ad83a344..47ea23f119 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/recordable_encoded_frame.h +++ b/TMessagesProj/jni/voip/webrtc/api/video/recordable_encoded_frame.h @@ -17,7 +17,6 @@ #include "api/video/color_space.h" #include "api/video/encoded_image.h" #include "api/video/video_codec_type.h" -#include "rtc_base/ref_count.h" namespace webrtc { @@ -25,6 +24,7 @@ namespace webrtc { class RecordableEncodedFrame { public: // Encoded resolution in pixels + // TODO(bugs.webrtc.org/12114) : remove in favor of Resolution. struct EncodedResolution { bool empty() const { return width == 0 && height == 0; } diff --git a/TMessagesProj/jni/voip/webrtc/api/video/render_resolution.h b/TMessagesProj/jni/voip/webrtc/api/video/render_resolution.h index edcf8f8ee5..fcf4f122d6 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/render_resolution.h +++ b/TMessagesProj/jni/voip/webrtc/api/video/render_resolution.h @@ -13,6 +13,7 @@ namespace webrtc { +// TODO(bugs.webrtc.org/12114) : remove in favor of Resolution. class RenderResolution { public: constexpr RenderResolution() = default; diff --git a/TMessagesProj/jni/voip/webrtc/api/video/resolution.h b/TMessagesProj/jni/voip/webrtc/api/video/resolution.h new file mode 100644 index 0000000000..11ffef0b03 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/video/resolution.h @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_RESOLUTION_H_ +#define API_VIDEO_RESOLUTION_H_ + +#include + +namespace webrtc { + +// A struct representing a video resolution in pixels. +struct Resolution { + int width = 0; + int height = 0; + + // Helper methods. + int PixelCount() const { return width * height; } + std::pair ToPair() const { return std::make_pair(width, height); } +}; + +inline bool operator==(const Resolution& lhs, const Resolution& rhs) { + return lhs.width == rhs.width && lhs.height == rhs.height; +} + +inline bool operator!=(const Resolution& lhs, const Resolution& rhs) { + return !(lhs == rhs); +} + +} // namespace webrtc + +#endif // API_VIDEO_RESOLUTION_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/video/video_frame.cc b/TMessagesProj/jni/voip/webrtc/api/video/video_frame.cc index d97e3aa82a..130820a886 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/video_frame.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video/video_frame.cc @@ -164,8 +164,8 @@ VideoFrame::Builder::~Builder() = default; VideoFrame VideoFrame::Builder::build() { RTC_CHECK(video_frame_buffer_ != nullptr); return VideoFrame(id_, video_frame_buffer_, timestamp_us_, timestamp_rtp_, - ntp_time_ms_, rotation_, color_space_, update_rect_, - packet_infos_); + ntp_time_ms_, rotation_, color_space_, render_parameters_, + update_rect_, packet_infos_); } VideoFrame::Builder& VideoFrame::Builder::set_video_frame_buffer( @@ -260,6 +260,7 @@ VideoFrame::VideoFrame(uint16_t id, int64_t ntp_time_ms, VideoRotation rotation, const absl::optional& color_space, + const RenderParameters& render_parameters, const absl::optional& update_rect, RtpPacketInfos packet_infos) : id_(id), @@ -269,6 +270,7 @@ VideoFrame::VideoFrame(uint16_t id, timestamp_us_(timestamp_us), rotation_(rotation), color_space_(color_space), + render_parameters_(render_parameters), update_rect_(update_rect), packet_infos_(std::move(packet_infos)) { if (update_rect_) { diff --git a/TMessagesProj/jni/voip/webrtc/api/video/video_frame.h b/TMessagesProj/jni/voip/webrtc/api/video/video_frame.h index 512055d770..086aad820f 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/video_frame.h +++ b/TMessagesProj/jni/voip/webrtc/api/video/video_frame.h @@ -29,6 +29,9 @@ namespace webrtc { class RTC_EXPORT VideoFrame { public: + // Value used to signal that `VideoFrame::id()` is not set. + static constexpr uint16_t kNotSetId = 0; + struct RTC_EXPORT UpdateRect { int offset_x; int offset_y; @@ -78,6 +81,21 @@ class RTC_EXPORT VideoFrame { Timestamp finish; }; + struct RTC_EXPORT RenderParameters { + bool use_low_latency_rendering = false; + absl::optional max_composition_delay_in_frames; + + bool operator==(const RenderParameters& other) const { + return other.use_low_latency_rendering == use_low_latency_rendering && + other.max_composition_delay_in_frames == + max_composition_delay_in_frames; + } + + bool operator!=(const RenderParameters& other) const { + return !(*this == other); + } + }; + // Preferred way of building VideoFrame objects. class RTC_EXPORT Builder { public: @@ -99,13 +117,14 @@ class RTC_EXPORT VideoFrame { Builder& set_packet_infos(RtpPacketInfos packet_infos); private: - uint16_t id_ = 0; + uint16_t id_ = kNotSetId; rtc::scoped_refptr video_frame_buffer_; int64_t timestamp_us_ = 0; uint32_t timestamp_rtp_ = 0; int64_t ntp_time_ms_ = 0; VideoRotation rotation_ = kVideoRotation_0; absl::optional color_space_; + RenderParameters render_parameters_; absl::optional update_rect_; RtpPacketInfos packet_infos_; }; @@ -134,12 +153,12 @@ class RTC_EXPORT VideoFrame { // Get frame size in pixels. uint32_t size() const; - // Get frame ID. Returns 0 if ID is not set. Not guaranteed to be transferred - // from the sender to the receiver, but preserved on the sender side. The id - // should be propagated between all frame modifications during its lifetime - // from capturing to sending as encoded image. It is intended to be unique - // over a time window of a few minutes for the peer connection to which the - // corresponding video stream belongs to. + // Get frame ID. Returns `kNotSetId` if ID is not set. Not guaranteed to be + // transferred from the sender to the receiver, but preserved on the sender + // side. The id should be propagated between all frame modifications during + // its lifetime from capturing to sending as encoded image. It is intended to + // be unique over a time window of a few minutes for the peer connection to + // which the corresponding video stream belongs to. uint16_t id() const { return id_; } void set_id(uint16_t id) { id_ = id; } @@ -147,20 +166,12 @@ class RTC_EXPORT VideoFrame { int64_t timestamp_us() const { return timestamp_us_; } void set_timestamp_us(int64_t timestamp_us) { timestamp_us_ = timestamp_us; } - // TODO(nisse): After the cricket::VideoFrame and webrtc::VideoFrame - // merge, timestamps other than timestamp_us will likely be - // deprecated. - // Set frame timestamp (90kHz). void set_timestamp(uint32_t timestamp) { timestamp_rtp_ = timestamp; } // Get frame timestamp (90kHz). uint32_t timestamp() const { return timestamp_rtp_; } - // For now, transport_frame_id and rtp timestamp are the same. - // TODO(nisse): Must be handled differently for QUIC. - uint32_t transport_frame_id() const { return timestamp(); } - // Set capture ntp time in milliseconds. void set_ntp_time_ms(int64_t ntp_time_ms) { ntp_time_ms_ = ntp_time_ms; } @@ -186,18 +197,20 @@ class RTC_EXPORT VideoFrame { color_space_ = color_space; } - // max_composition_delay_in_frames() is used in an experiment of a low-latency - // renderer algorithm see crbug.com/1138888. - absl::optional max_composition_delay_in_frames() const { - return max_composition_delay_in_frames_; + RenderParameters render_parameters() const { return render_parameters_; } + void set_render_parameters(const RenderParameters& render_parameters) { + render_parameters_ = render_parameters; } - void set_max_composition_delay_in_frames( - absl::optional max_composition_delay_in_frames) { - max_composition_delay_in_frames_ = max_composition_delay_in_frames; + + // Deprecated in favor of render_parameters, will be removed once Chromium is + // updated. max_composition_delay_in_frames() is used in an experiment of a + // low-latency renderer algorithm see crbug.com/1138888. + [[deprecated("Use render_parameters() instead.")]] absl::optional + max_composition_delay_in_frames() const { + return render_parameters_.max_composition_delay_in_frames; } // Get render time in milliseconds. - // TODO(nisse): Deprecated. Migrate all users to timestamp_us(). int64_t render_time_ms() const; // Return the underlying buffer. Never nullptr for a properly @@ -207,7 +220,6 @@ class RTC_EXPORT VideoFrame { void set_video_frame_buffer( const rtc::scoped_refptr& buffer); - // TODO(nisse): Deprecated. // Return true if the frame is stored in a texture. bool is_texture() const { return video_frame_buffer()->type() == VideoFrameBuffer::Type::kNative; @@ -254,6 +266,7 @@ class RTC_EXPORT VideoFrame { int64_t ntp_time_ms, VideoRotation rotation, const absl::optional& color_space, + const RenderParameters& render_parameters, const absl::optional& update_rect, RtpPacketInfos packet_infos); @@ -265,7 +278,8 @@ class RTC_EXPORT VideoFrame { int64_t timestamp_us_; VideoRotation rotation_; absl::optional color_space_; - absl::optional max_composition_delay_in_frames_; + // Contains parameters that affect have the frame should be rendered. + RenderParameters render_parameters_; // Updated since the last frame area. If present it means that the bounding // box of all the changes is within the rectangular area and is close to it. // If absent, it means that there's no information about the change at all and diff --git a/TMessagesProj/jni/voip/webrtc/api/video/video_frame_buffer.cc b/TMessagesProj/jni/voip/webrtc/api/video/video_frame_buffer.cc index 2b493dcc42..398e30b606 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/video_frame_buffer.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video/video_frame_buffer.cc @@ -11,6 +11,8 @@ #include "api/video/video_frame_buffer.h" #include "api/video/i420_buffer.h" +#include "api/video/i422_buffer.h" +#include "api/video/i444_buffer.h" #include "api/video/nv12_buffer.h" #include "rtc_base/checks.h" @@ -46,11 +48,21 @@ const I444BufferInterface* VideoFrameBuffer::GetI444() const { return static_cast(this); } +const I422BufferInterface* VideoFrameBuffer::GetI422() const { + RTC_CHECK(type() == Type::kI422); + return static_cast(this); +} + const I010BufferInterface* VideoFrameBuffer::GetI010() const { RTC_CHECK(type() == Type::kI010); return static_cast(this); } +const I210BufferInterface* VideoFrameBuffer::GetI210() const { + RTC_CHECK(type() == Type::kI210); + return static_cast(this); +} + const NV12BufferInterface* VideoFrameBuffer::GetNV12() const { RTC_CHECK(type() == Type::kNV12); return static_cast(this); @@ -76,8 +88,12 @@ const char* VideoFrameBufferTypeToString(VideoFrameBuffer::Type type) { return "kI420A"; case VideoFrameBuffer::Type::kI444: return "kI444"; + case VideoFrameBuffer::Type::kI422: + return "kI422"; case VideoFrameBuffer::Type::kI010: return "kI010"; + case VideoFrameBuffer::Type::kI210: + return "kI210"; case VideoFrameBuffer::Type::kNV12: return "kNV12"; default: @@ -94,7 +110,7 @@ int I420BufferInterface::ChromaHeight() const { } rtc::scoped_refptr I420BufferInterface::ToI420() { - return this; + return rtc::scoped_refptr(this); } const I420BufferInterface* I420BufferInterface::GetI420() const { @@ -117,6 +133,44 @@ int I444BufferInterface::ChromaHeight() const { return height(); } +rtc::scoped_refptr I444BufferInterface::CropAndScale( + int offset_x, + int offset_y, + int crop_width, + int crop_height, + int scaled_width, + int scaled_height) { + rtc::scoped_refptr result = + I444Buffer::Create(scaled_width, scaled_height); + result->CropAndScaleFrom(*this, offset_x, offset_y, crop_width, crop_height); + return result; +} + +VideoFrameBuffer::Type I422BufferInterface::type() const { + return Type::kI422; +} + +int I422BufferInterface::ChromaWidth() const { + return (width() + 1) / 2; +} + +int I422BufferInterface::ChromaHeight() const { + return height(); +} + +rtc::scoped_refptr I422BufferInterface::CropAndScale( + int offset_x, + int offset_y, + int crop_width, + int crop_height, + int scaled_width, + int scaled_height) { + rtc::scoped_refptr result = + I422Buffer::Create(scaled_width, scaled_height); + result->CropAndScaleFrom(*this, offset_x, offset_y, crop_width, crop_height); + return result; +} + VideoFrameBuffer::Type I010BufferInterface::type() const { return Type::kI010; } @@ -129,6 +183,18 @@ int I010BufferInterface::ChromaHeight() const { return (height() + 1) / 2; } +VideoFrameBuffer::Type I210BufferInterface::type() const { + return Type::kI210; +} + +int I210BufferInterface::ChromaWidth() const { + return (width() + 1) / 2; +} + +int I210BufferInterface::ChromaHeight() const { + return height(); +} + VideoFrameBuffer::Type NV12BufferInterface::type() const { return Type::kNV12; } diff --git a/TMessagesProj/jni/voip/webrtc/api/video/video_frame_buffer.h b/TMessagesProj/jni/voip/webrtc/api/video/video_frame_buffer.h index 7b0782f9c4..cf90ff22c1 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/video_frame_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/api/video/video_frame_buffer.h @@ -22,8 +22,10 @@ namespace webrtc { class I420BufferInterface; class I420ABufferInterface; +class I422BufferInterface; class I444BufferInterface; class I010BufferInterface; +class I210BufferInterface; class NV12BufferInterface; // Base class for frame buffers of different types of pixel format and storage. @@ -52,8 +54,10 @@ class RTC_EXPORT VideoFrameBuffer : public rtc::RefCountInterface { kNative, kI420, kI420A, + kI422, kI444, kI010, + kI210, kNV12, }; @@ -104,8 +108,10 @@ class RTC_EXPORT VideoFrameBuffer : public rtc::RefCountInterface { // These functions should only be called if type() is of the correct type. // Calling with a different type will result in a crash. const I420ABufferInterface* GetI420A() const; + const I422BufferInterface* GetI422() const; const I444BufferInterface* GetI444() const; const I010BufferInterface* GetI010() const; + const I210BufferInterface* GetI210() const; const NV12BufferInterface* GetNV12() const; // From a kNative frame, returns a VideoFrameBuffer with a pixel format in @@ -140,7 +146,7 @@ class PlanarYuvBuffer : public VideoFrameBuffer { }; // This interface represents 8-bit color depth formats: Type::kI420, -// Type::kI420A and Type::kI444. +// Type::kI420A, Type::kI422 and Type::kI444. class PlanarYuv8Buffer : public PlanarYuvBuffer { public: // Returns pointer to the pixel data for a given plane. The memory is owned by @@ -177,6 +183,26 @@ class RTC_EXPORT I420ABufferInterface : public I420BufferInterface { ~I420ABufferInterface() override {} }; +// Represents Type::kI422, 4:2:2 planar with 8 bits per pixel. +class I422BufferInterface : public PlanarYuv8Buffer { + public: + Type type() const final; + + int ChromaWidth() const final; + int ChromaHeight() const final; + + rtc::scoped_refptr CropAndScale(int offset_x, + int offset_y, + int crop_width, + int crop_height, + int scaled_width, + int scaled_height) override; + + protected: + ~I422BufferInterface() override {} +}; + +// Represents Type::kI444, 4:4:4 planar with 8 bits per pixel. class I444BufferInterface : public PlanarYuv8Buffer { public: Type type() const final; @@ -184,11 +210,19 @@ class I444BufferInterface : public PlanarYuv8Buffer { int ChromaWidth() const final; int ChromaHeight() const final; + rtc::scoped_refptr CropAndScale(int offset_x, + int offset_y, + int crop_width, + int crop_height, + int scaled_width, + int scaled_height) override; + protected: ~I444BufferInterface() override {} }; -// This interface represents 8-bit to 16-bit color depth formats: Type::kI010. +// This interface represents 8-bit to 16-bit color depth formats: Type::kI010 or +// Type::kI210 . class PlanarYuv16BBuffer : public PlanarYuvBuffer { public: // Returns pointer to the pixel data for a given plane. The memory is owned by @@ -214,6 +248,19 @@ class I010BufferInterface : public PlanarYuv16BBuffer { ~I010BufferInterface() override {} }; +// Represents Type::kI210, allocates 16 bits per pixel and fills 10 least +// significant bits with color information. +class I210BufferInterface : public PlanarYuv16BBuffer { + public: + Type type() const override; + + int ChromaWidth() const final; + int ChromaHeight() const final; + + protected: + ~I210BufferInterface() override {} +}; + class BiplanarYuvBuffer : public VideoFrameBuffer { public: virtual int ChromaWidth() const = 0; diff --git a/TMessagesProj/jni/voip/webrtc/api/video/video_source_interface.h b/TMessagesProj/jni/voip/webrtc/api/video/video_source_interface.h index d66a235da0..38d0041718 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/video_source_interface.h +++ b/TMessagesProj/jni/voip/webrtc/api/video/video_source_interface.h @@ -80,6 +80,24 @@ struct RTC_EXPORT VideoSinkWants { // Note that the `resolutions` can change while frames are in flight and // should only be used as a hint when constructing the webrtc::VideoFrame. std::vector resolutions; + + // This is the resolution requested by the user using RtpEncodingParameters. + absl::optional requested_resolution; + + // `active` : is (any) of the layers/sink(s) active. + bool is_active = true; + + // This sub-struct contains information computed by VideoBroadcaster + // that aggregates several VideoSinkWants (and sends them to + // AdaptedVideoTrackSource). + struct Aggregates { + // `active_without_requested_resolution` is set by VideoBroadcaster + // when aggregating sink wants if there exists any sink (encoder) that is + // active but has not set the `requested_resolution`, i.e is relying on + // OnOutputFormatRequest to handle encode resolution. + bool any_active_without_requested_resolution = false; + }; + absl::optional aggregates; }; inline bool operator==(const VideoSinkWants::FrameSize& a, @@ -87,6 +105,11 @@ inline bool operator==(const VideoSinkWants::FrameSize& a, return a.width == b.width && a.height == b.height; } +inline bool operator!=(const VideoSinkWants::FrameSize& a, + const VideoSinkWants::FrameSize& b) { + return !(a == b); +} + template class VideoSourceInterface { public: @@ -97,6 +120,10 @@ class VideoSourceInterface { // RemoveSink must guarantee that at the time the method returns, // there is no current and no future calls to VideoSinkInterface::OnFrame. virtual void RemoveSink(VideoSinkInterface* sink) = 0; + + // Request underlying source to capture a new frame. + // TODO(crbug/1255737): make pure virtual once downstream projects adapt. + virtual void RequestRefreshFrame() {} }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/api/video/video_stream_decoder_create.cc b/TMessagesProj/jni/voip/webrtc/api/video/video_stream_decoder_create.cc index 8d70556b4d..e14c3bc851 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/video_stream_decoder_create.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video/video_stream_decoder_create.cc @@ -20,10 +20,13 @@ std::unique_ptr CreateVideoStreamDecoder( VideoStreamDecoderInterface::Callbacks* callbacks, VideoDecoderFactory* decoder_factory, TaskQueueFactory* task_queue_factory, - std::map> decoder_settings) { - return std::make_unique(callbacks, decoder_factory, - task_queue_factory, - std::move(decoder_settings)); + std::map> decoder_settings, + // TODO(jonaso, webrtc:10335): Consider what to do with factories + // vs. field trials. + const FieldTrialsView* field_trials) { + return std::make_unique( + callbacks, decoder_factory, task_queue_factory, + std::move(decoder_settings), field_trials); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/video/video_stream_decoder_create.h b/TMessagesProj/jni/voip/webrtc/api/video/video_stream_decoder_create.h index 9c898ec610..974fd804ce 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/video_stream_decoder_create.h +++ b/TMessagesProj/jni/voip/webrtc/api/video/video_stream_decoder_create.h @@ -15,6 +15,7 @@ #include #include +#include "api/field_trials_view.h" #include "api/task_queue/task_queue_factory.h" #include "api/video/video_stream_decoder.h" #include "api/video_codecs/sdp_video_format.h" @@ -28,7 +29,8 @@ std::unique_ptr CreateVideoStreamDecoder( VideoStreamDecoderInterface::Callbacks* callbacks, VideoDecoderFactory* decoder_factory, TaskQueueFactory* task_queue_factory, - std::map> decoder_settings); + std::map> decoder_settings, + const FieldTrialsView* field_trials = nullptr); } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/video/video_stream_encoder_settings.h b/TMessagesProj/jni/voip/webrtc/api/video/video_stream_encoder_settings.h index 743524b352..3aee5b7050 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/video_stream_encoder_settings.h +++ b/TMessagesProj/jni/voip/webrtc/api/video/video_stream_encoder_settings.h @@ -23,19 +23,13 @@ class EncoderSwitchRequestCallback { public: virtual ~EncoderSwitchRequestCallback() {} - struct Config { - std::string codec_name; - absl::optional param; - absl::optional value; - }; - - // Requests that encoder fallback is performed. + // Requests switch to next negotiated encoder. virtual void RequestEncoderFallback() = 0; - // Requests that a switch to a specific encoder is performed. - virtual void RequestEncoderSwitch(const Config& conf) = 0; - - virtual void RequestEncoderSwitch(const SdpVideoFormat& format) = 0; + // Requests switch to a specific encoder. If the encoder is not available and + // `allow_default_fallback` is `true` the default fallback is invoked. + virtual void RequestEncoderSwitch(const SdpVideoFormat& format, + bool allow_default_fallback) = 0; }; struct VideoStreamEncoderSettings { diff --git a/TMessagesProj/jni/voip/webrtc/api/video/video_timing.cc b/TMessagesProj/jni/voip/webrtc/api/video/video_timing.cc index df1bc4857a..0483c20e66 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/video_timing.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video/video_timing.cc @@ -11,6 +11,7 @@ #include "api/video/video_timing.h" #include "api/array_view.h" +#include "api/units/time_delta.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/strings/string_builder.h" @@ -25,6 +26,14 @@ uint16_t VideoSendTiming::GetDeltaCappedMs(int64_t base_ms, int64_t time_ms) { return rtc::saturated_cast(time_ms - base_ms); } +uint16_t VideoSendTiming::GetDeltaCappedMs(TimeDelta delta) { + if (delta < TimeDelta::Zero()) { + RTC_DLOG(LS_ERROR) << "Delta " << delta.ms() + << "ms expected to be positive"; + } + return rtc::saturated_cast(delta.ms()); +} + TimingFrameInfo::TimingFrameInfo() : rtp_timestamp(0), capture_time_ms(-1), diff --git a/TMessagesProj/jni/voip/webrtc/api/video/video_timing.h b/TMessagesProj/jni/voip/webrtc/api/video/video_timing.h index dd8febb3db..698477a81a 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/video_timing.h +++ b/TMessagesProj/jni/voip/webrtc/api/video/video_timing.h @@ -16,6 +16,8 @@ #include #include +#include "api/units/time_delta.h" + namespace webrtc { // Video timing timestamps in ms counted from capture_time_ms of a frame. @@ -34,6 +36,7 @@ struct VideoSendTiming { // https://webrtc.org/experiments/rtp-hdrext/video-timing/ extension stores // 16-bit deltas of timestamps from packet capture time. static uint16_t GetDeltaCappedMs(int64_t base_ms, int64_t time_ms); + static uint16_t GetDeltaCappedMs(TimeDelta delta); uint16_t encode_start_delta_ms; uint16_t encode_finish_delta_ms; diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/av1_profile.cc b/TMessagesProj/jni/voip/webrtc/api/video_codecs/av1_profile.cc new file mode 100644 index 0000000000..eefe166d80 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/av1_profile.cc @@ -0,0 +1,69 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video_codecs/av1_profile.h" + +#include +#include + +#include "rtc_base/string_to_number.h" + +namespace webrtc { + +// Parameter name in the format parameter map for AV1 video. +const char kAV1FmtpProfile[] = "profile"; + +absl::string_view AV1ProfileToString(AV1Profile profile) { + switch (profile) { + case AV1Profile::kProfile0: + return "0"; + case AV1Profile::kProfile1: + return "1"; + case AV1Profile::kProfile2: + return "2"; + } + return "0"; +} + +absl::optional StringToAV1Profile(absl::string_view str) { + const absl::optional i = rtc::StringToNumber(str); + if (!i.has_value()) + return absl::nullopt; + + switch (i.value()) { + case 0: + return AV1Profile::kProfile0; + case 1: + return AV1Profile::kProfile1; + case 2: + return AV1Profile::kProfile2; + default: + return absl::nullopt; + } +} + +absl::optional ParseSdpForAV1Profile( + const SdpVideoFormat::Parameters& params) { + const auto profile_it = params.find(kAV1FmtpProfile); + if (profile_it == params.end()) + return AV1Profile::kProfile0; + const std::string& profile_str = profile_it->second; + return StringToAV1Profile(profile_str); +} + +bool AV1IsSameProfile(const SdpVideoFormat::Parameters& params1, + const SdpVideoFormat::Parameters& params2) { + const absl::optional profile = ParseSdpForAV1Profile(params1); + const absl::optional other_profile = + ParseSdpForAV1Profile(params2); + return profile && other_profile && profile == other_profile; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/av1_profile.h b/TMessagesProj/jni/voip/webrtc/api/video_codecs/av1_profile.h new file mode 100644 index 0000000000..2254d5ecd3 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/av1_profile.h @@ -0,0 +1,57 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_CODECS_AV1_PROFILE_H_ +#define API_VIDEO_CODECS_AV1_PROFILE_H_ + +#include + +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/video_codecs/sdp_video_format.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// Profile information for AV1 video. +extern RTC_EXPORT const char kAV1FmtpProfile[]; + +// Profiles can be found at: +// https://aomedia.org/av1/specification/annex-a/#profiles +// The enum values match the number specified in the SDP. +enum class AV1Profile { + kProfile0 = 0, + kProfile1 = 1, + kProfile2 = 2, +}; + +// Helper function which converts an AV1Profile to std::string. Returns "0" if +// an unknown value is passed in. +RTC_EXPORT absl::string_view AV1ProfileToString(AV1Profile profile); + +// Helper function which converts a std::string to AV1Profile. Returns null if +// |profile| is not a valid profile string. +absl::optional StringToAV1Profile(absl::string_view profile); + +// Parses an SDP key-value map of format parameters to retrive an AV1 profile. +// Returns an AV1Profile if one has been specified, `kProfile0` if no profile is +// specified and an empty value if the profile key is present but contains an +// invalid value. +RTC_EXPORT absl::optional ParseSdpForAV1Profile( + const SdpVideoFormat::Parameters& params); + +// Returns true if the parameters have the same AV1 profile or neither contains +// an AV1 profile, otherwise false. +bool AV1IsSameProfile(const SdpVideoFormat::Parameters& params1, + const SdpVideoFormat::Parameters& params2); + +} // namespace webrtc + +#endif // API_VIDEO_CODECS_AV1_PROFILE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/bitstream_parser.h b/TMessagesProj/jni/voip/webrtc/api/video_codecs/bitstream_parser.h index 0d8d014d62..86ce192e49 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/bitstream_parser.h +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/bitstream_parser.h @@ -10,9 +10,11 @@ #ifndef API_VIDEO_CODECS_BITSTREAM_PARSER_H_ #define API_VIDEO_CODECS_BITSTREAM_PARSER_H_ + #include #include +#include "absl/types/optional.h" #include "api/array_view.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/h264_profile_level_id.cc b/TMessagesProj/jni/voip/webrtc/api/video_codecs/h264_profile_level_id.cc index fa47758189..02b43ba4f2 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/h264_profile_level_id.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/h264_profile_level_id.cc @@ -68,7 +68,8 @@ constexpr ProfilePattern kProfilePatterns[] = { {0x58, BitPattern("10xx0000"), H264Profile::kProfileBaseline}, {0x4D, BitPattern("0x0x0000"), H264Profile::kProfileMain}, {0x64, BitPattern("00000000"), H264Profile::kProfileHigh}, - {0x64, BitPattern("00001100"), H264Profile::kProfileConstrainedHigh}}; + {0x64, BitPattern("00001100"), H264Profile::kProfileConstrainedHigh}, + {0xF4, BitPattern("00000000"), H264Profile::kProfilePredictiveHigh444}}; struct LevelConstraint { const int max_macroblocks_per_second; @@ -228,6 +229,9 @@ absl::optional H264ProfileLevelIdToString( case H264Profile::kProfileHigh: profile_idc_iop_string = "6400"; break; + case H264Profile::kProfilePredictiveHigh444: + profile_idc_iop_string = "f400"; + break; // Unrecognized profile. default: return absl::nullopt; diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/h264_profile_level_id.h b/TMessagesProj/jni/voip/webrtc/api/video_codecs/h264_profile_level_id.h index 51d025cd7b..4b46ad329d 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/h264_profile_level_id.h +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/h264_profile_level_id.h @@ -25,6 +25,7 @@ enum class H264Profile { kProfileMain, kProfileConstrainedHigh, kProfileHigh, + kProfilePredictiveHigh444, }; // All values are equal to ten times the level number, except level 1b which is diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/scalability_mode.cc b/TMessagesProj/jni/voip/webrtc/api/video_codecs/scalability_mode.cc new file mode 100644 index 0000000000..c449b4217e --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/scalability_mode.cc @@ -0,0 +1,91 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video_codecs/scalability_mode.h" + +#include "rtc_base/checks.h" + +namespace webrtc { + +absl::string_view ScalabilityModeToString(ScalabilityMode scalability_mode) { + switch (scalability_mode) { + case ScalabilityMode::kL1T1: + return "L1T1"; + case ScalabilityMode::kL1T2: + return "L1T2"; + case ScalabilityMode::kL1T3: + return "L1T3"; + case ScalabilityMode::kL2T1: + return "L2T1"; + case ScalabilityMode::kL2T1h: + return "L2T1h"; + case ScalabilityMode::kL2T1_KEY: + return "L2T1_KEY"; + case ScalabilityMode::kL2T2: + return "L2T2"; + case ScalabilityMode::kL2T2h: + return "L2T2h"; + case ScalabilityMode::kL2T2_KEY: + return "L2T2_KEY"; + case ScalabilityMode::kL2T2_KEY_SHIFT: + return "L2T2_KEY_SHIFT"; + case ScalabilityMode::kL2T3: + return "L2T3"; + case ScalabilityMode::kL2T3h: + return "L2T3h"; + case ScalabilityMode::kL2T3_KEY: + return "L2T3_KEY"; + case ScalabilityMode::kL3T1: + return "L3T1"; + case ScalabilityMode::kL3T1h: + return "L3T1h"; + case ScalabilityMode::kL3T1_KEY: + return "L3T1_KEY"; + case ScalabilityMode::kL3T2: + return "L3T2"; + case ScalabilityMode::kL3T2h: + return "L3T2h"; + case ScalabilityMode::kL3T2_KEY: + return "L3T2_KEY"; + case ScalabilityMode::kL3T3: + return "L3T3"; + case ScalabilityMode::kL3T3h: + return "L3T3h"; + case ScalabilityMode::kL3T3_KEY: + return "L3T3_KEY"; + case ScalabilityMode::kS2T1: + return "S2T1"; + case ScalabilityMode::kS2T1h: + return "S2T1h"; + case ScalabilityMode::kS2T2: + return "S2T2"; + case ScalabilityMode::kS2T2h: + return "S2T2h"; + case ScalabilityMode::kS2T3: + return "S2T3"; + case ScalabilityMode::kS2T3h: + return "S2T3h"; + case ScalabilityMode::kS3T1: + return "S3T1"; + case ScalabilityMode::kS3T1h: + return "S3T1h"; + case ScalabilityMode::kS3T2: + return "S3T2"; + case ScalabilityMode::kS3T2h: + return "S3T2h"; + case ScalabilityMode::kS3T3: + return "S3T3"; + case ScalabilityMode::kS3T3h: + return "S3T3h"; + } + RTC_CHECK_NOTREACHED(); +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/scalability_mode.h b/TMessagesProj/jni/voip/webrtc/api/video_codecs/scalability_mode.h new file mode 100644 index 0000000000..b26f32eb22 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/scalability_mode.h @@ -0,0 +1,111 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_CODECS_SCALABILITY_MODE_H_ +#define API_VIDEO_CODECS_SCALABILITY_MODE_H_ + +#include +#include + +#include "absl/strings/string_view.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// Supported scalability modes. Most applications should use the +// PeerConnection-level apis where scalability mode is represented as a string. +// This list of currently recognized modes is intended for the api boundary +// between webrtc and injected encoders. Any application usage outside of +// injected encoders is strongly discouraged. +enum class ScalabilityMode : uint8_t { + kL1T1, + kL1T2, + kL1T3, + kL2T1, + kL2T1h, + kL2T1_KEY, + kL2T2, + kL2T2h, + kL2T2_KEY, + kL2T2_KEY_SHIFT, + kL2T3, + kL2T3h, + kL2T3_KEY, + kL3T1, + kL3T1h, + kL3T1_KEY, + kL3T2, + kL3T2h, + kL3T2_KEY, + kL3T3, + kL3T3h, + kL3T3_KEY, + kS2T1, + kS2T1h, + kS2T2, + kS2T2h, + kS2T3, + kS2T3h, + kS3T1, + kS3T1h, + kS3T2, + kS3T2h, + kS3T3, + kS3T3h, +}; + +inline constexpr ScalabilityMode kAllScalabilityModes[] = { + // clang-format off + ScalabilityMode::kL1T1, + ScalabilityMode::kL1T2, + ScalabilityMode::kL1T3, + ScalabilityMode::kL2T1, + ScalabilityMode::kL2T1h, + ScalabilityMode::kL2T1_KEY, + ScalabilityMode::kL2T2, + ScalabilityMode::kL2T2h, + ScalabilityMode::kL2T2_KEY, + ScalabilityMode::kL2T2_KEY_SHIFT, + ScalabilityMode::kL2T3, + ScalabilityMode::kL2T3h, + ScalabilityMode::kL2T3_KEY, + ScalabilityMode::kL3T1, + ScalabilityMode::kL3T1h, + ScalabilityMode::kL3T1_KEY, + ScalabilityMode::kL3T2, + ScalabilityMode::kL3T2h, + ScalabilityMode::kL3T2_KEY, + ScalabilityMode::kL3T3, + ScalabilityMode::kL3T3h, + ScalabilityMode::kL3T3_KEY, + ScalabilityMode::kS2T1, + ScalabilityMode::kS2T1h, + ScalabilityMode::kS2T2, + ScalabilityMode::kS2T2h, + ScalabilityMode::kS2T3, + ScalabilityMode::kS2T3h, + ScalabilityMode::kS3T1, + ScalabilityMode::kS3T1h, + ScalabilityMode::kS3T2, + ScalabilityMode::kS3T2h, + ScalabilityMode::kS3T3, + ScalabilityMode::kS3T3h, + // clang-format on +}; + +inline constexpr size_t kScalabilityModeCount = + sizeof(kAllScalabilityModes) / sizeof(ScalabilityMode); + +RTC_EXPORT +absl::string_view ScalabilityModeToString(ScalabilityMode scalability_mode); + +} // namespace webrtc + +#endif // API_VIDEO_CODECS_SCALABILITY_MODE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/scalability_mode_helper.cc b/TMessagesProj/jni/voip/webrtc/api/video_codecs/scalability_mode_helper.cc new file mode 100644 index 0000000000..b4571632d9 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/scalability_mode_helper.cc @@ -0,0 +1,37 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video_codecs/scalability_mode_helper.h" + +#include "modules/video_coding/svc/scalability_mode_util.h" + +namespace webrtc { + +absl::optional ScalabilityModeStringToNumSpatialLayers( + absl::string_view scalability_mode_string) { + absl::optional scalability_mode = + ScalabilityModeFromString(scalability_mode_string); + if (!scalability_mode.has_value()) { + return absl::nullopt; + } + return ScalabilityModeToNumSpatialLayers(*scalability_mode); +} + +absl::optional ScalabilityModeStringToNumTemporalLayers( + absl::string_view scalability_mode_string) { + absl::optional scalability_mode = + ScalabilityModeFromString(scalability_mode_string); + if (!scalability_mode.has_value()) { + return absl::nullopt; + } + return ScalabilityModeToNumTemporalLayers(*scalability_mode); +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/scalability_mode_helper.h b/TMessagesProj/jni/voip/webrtc/api/video_codecs/scalability_mode_helper.h new file mode 100644 index 0000000000..a8b060d079 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/scalability_mode_helper.h @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_CODECS_SCALABILITY_MODE_HELPER_H_ +#define API_VIDEO_CODECS_SCALABILITY_MODE_HELPER_H_ + +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" + +namespace webrtc { + +// Returns the number of spatial layers from the `scalability_mode_string` +// or nullopt if the given mode is unknown. +absl::optional ScalabilityModeStringToNumSpatialLayers( + absl::string_view scalability_mode_string); + +// Returns the number of temporal layers from the `scalability_mode_string` +// or nullopt if the given mode is unknown. +absl::optional ScalabilityModeStringToNumTemporalLayers( + absl::string_view scalability_mode_string); + +} // namespace webrtc + +#endif // API_VIDEO_CODECS_SCALABILITY_MODE_HELPER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/sdp_video_format.cc b/TMessagesProj/jni/voip/webrtc/api/video_codecs/sdp_video_format.cc index 689c337ced..cb7e98a682 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/sdp_video_format.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/sdp_video_format.cc @@ -11,10 +11,14 @@ #include "api/video_codecs/sdp_video_format.h" #include "absl/strings/match.h" +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/video_codecs/av1_profile.h" #include "api/video_codecs/h264_profile_level_id.h" #include "api/video_codecs/video_codec.h" #include "api/video_codecs/vp9_profile.h" #include "rtc_base/checks.h" +#include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" namespace webrtc { @@ -55,6 +59,8 @@ bool IsSameCodecSpecific(const SdpVideoFormat& format1, format2.parameters); case kVideoCodecVP9: return VP9IsSameProfile(format1.parameters, format2.parameters); + case kVideoCodecAV1: + return AV1IsSameProfile(format1.parameters, format2.parameters); default: return true; } @@ -67,6 +73,15 @@ SdpVideoFormat::SdpVideoFormat(const std::string& name, const Parameters& parameters) : name(name), parameters(parameters) {} +SdpVideoFormat::SdpVideoFormat( + const std::string& name, + const Parameters& parameters, + const absl::InlinedVector& + scalability_modes) + : name(name), + parameters(parameters), + scalability_modes(scalability_modes) {} + SdpVideoFormat::SdpVideoFormat(const SdpVideoFormat&) = default; SdpVideoFormat::SdpVideoFormat(SdpVideoFormat&&) = default; SdpVideoFormat& SdpVideoFormat::operator=(const SdpVideoFormat&) = default; @@ -77,9 +92,24 @@ SdpVideoFormat::~SdpVideoFormat() = default; std::string SdpVideoFormat::ToString() const { rtc::StringBuilder builder; builder << "Codec name: " << name << ", parameters: {"; - for (const auto& kv : parameters) + for (const auto& kv : parameters) { builder << " " << kv.first << "=" << kv.second; + } + builder << " }"; + if (!scalability_modes.empty()) { + builder << ", scalability_modes: ["; + bool first = true; + for (const auto scalability_mode : scalability_modes) { + if (first) { + first = false; + } else { + builder << ", "; + } + builder << ScalabilityModeToString(scalability_mode); + } + builder << "]"; + } return builder.str(); } @@ -102,7 +132,40 @@ bool SdpVideoFormat::IsCodecInList( } bool operator==(const SdpVideoFormat& a, const SdpVideoFormat& b) { - return a.name == b.name && a.parameters == b.parameters; + return a.name == b.name && a.parameters == b.parameters && + a.scalability_modes == b.scalability_modes; +} + +absl::optional FuzzyMatchSdpVideoFormat( + rtc::ArrayView supported_formats, + const SdpVideoFormat& format) { + absl::optional res; + int best_parameter_match = 0; + for (const auto& supported_format : supported_formats) { + if (absl::EqualsIgnoreCase(supported_format.name, format.name)) { + int matching_parameters = 0; + for (const auto& kv : supported_format.parameters) { + auto it = format.parameters.find(kv.first); + if (it != format.parameters.end() && it->second == kv.second) { + matching_parameters += 1; + } + } + + if (!res || matching_parameters > best_parameter_match) { + res = supported_format; + best_parameter_match = matching_parameters; + } + } + } + + if (!res) { + RTC_LOG(LS_INFO) << "Failed to match SdpVideoFormat " << format.ToString(); + } else if (*res != format) { + RTC_LOG(LS_INFO) << "Matched SdpVideoFormat " << format.ToString() + << " with " << res->ToString(); + } + + return res; } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/sdp_video_format.h b/TMessagesProj/jni/voip/webrtc/api/video_codecs/sdp_video_format.h index a1e23f4f9c..faaa66c241 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/sdp_video_format.h +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/sdp_video_format.h @@ -14,7 +14,10 @@ #include #include +#include "absl/container/inlined_vector.h" +#include "absl/types/optional.h" #include "api/array_view.h" +#include "api/video_codecs/scalability_mode.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -26,6 +29,11 @@ struct RTC_EXPORT SdpVideoFormat { explicit SdpVideoFormat(const std::string& name); SdpVideoFormat(const std::string& name, const Parameters& parameters); + SdpVideoFormat( + const std::string& name, + const Parameters& parameters, + const absl::InlinedVector& + scalability_modes); SdpVideoFormat(const SdpVideoFormat&); SdpVideoFormat(SdpVideoFormat&&); SdpVideoFormat& operator=(const SdpVideoFormat&); @@ -51,8 +59,17 @@ struct RTC_EXPORT SdpVideoFormat { std::string name; Parameters parameters; + absl::InlinedVector scalability_modes; }; +// For not so good reasons sometimes additional parameters are added to an +// SdpVideoFormat, which makes instances that should compare equal to not match +// anymore. Until we stop misusing SdpVideoFormats provide this convenience +// function to perform fuzzy matching. +absl::optional FuzzyMatchSdpVideoFormat( + rtc::ArrayView supported_formats, + const SdpVideoFormat& format); + } // namespace webrtc #endif // API_VIDEO_CODECS_SDP_VIDEO_FORMAT_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/simulcast_stream.cc b/TMessagesProj/jni/voip/webrtc/api/video_codecs/simulcast_stream.cc new file mode 100644 index 0000000000..312429ef9f --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/simulcast_stream.cc @@ -0,0 +1,37 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video_codecs/simulcast_stream.h" + +#include "rtc_base/checks.h" + +namespace webrtc { + +unsigned char SimulcastStream::GetNumberOfTemporalLayers() const { + return numberOfTemporalLayers; +} +void SimulcastStream::SetNumberOfTemporalLayers(unsigned char n) { + RTC_DCHECK_GE(n, 1); + RTC_DCHECK_LE(n, 3); + numberOfTemporalLayers = n; +} + +ScalabilityMode SimulcastStream::GetScalabilityMode() const { + RTC_CHECK_GE(numberOfTemporalLayers, 1); + RTC_CHECK_LE(numberOfTemporalLayers, 3); + static const ScalabilityMode scalability_modes[3] = { + ScalabilityMode::kL1T1, + ScalabilityMode::kL1T2, + ScalabilityMode::kL1T3, + }; + return scalability_modes[numberOfTemporalLayers - 1]; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/simulcast_stream.h b/TMessagesProj/jni/voip/webrtc/api/video_codecs/simulcast_stream.h new file mode 100644 index 0000000000..7c0dd5d786 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/simulcast_stream.h @@ -0,0 +1,39 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_CODECS_SIMULCAST_STREAM_H_ +#define API_VIDEO_CODECS_SIMULCAST_STREAM_H_ + +#include "api/video_codecs/scalability_mode.h" + +namespace webrtc { + +// TODO(bugs.webrtc.org/6883): Unify with struct VideoStream, part of +// VideoEncoderConfig. +struct SimulcastStream { + // Temporary utility methods for transition from numberOfTemporalLayers + // setting to ScalabilityMode. + unsigned char GetNumberOfTemporalLayers() const; + ScalabilityMode GetScalabilityMode() const; + void SetNumberOfTemporalLayers(unsigned char n); + + int width = 0; + int height = 0; + float maxFramerate = 0; // fps. + unsigned char numberOfTemporalLayers = 1; + unsigned int maxBitrate = 0; // kilobits/sec. + unsigned int targetBitrate = 0; // kilobits/sec. + unsigned int minBitrate = 0; // kilobits/sec. + unsigned int qpMax = 0; // minimum quality + bool active = false; // encoded and sent. +}; + +} // namespace webrtc +#endif // API_VIDEO_CODECS_SIMULCAST_STREAM_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_codec.cc b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_codec.cc index 5fbedd99a5..ea6c339eab 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_codec.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_codec.cc @@ -34,19 +34,15 @@ constexpr char kPayloadNameMultiplex[] = "Multiplex"; } // namespace bool VideoCodecVP8::operator==(const VideoCodecVP8& other) const { - return (complexity == other.complexity && - numberOfTemporalLayers == other.numberOfTemporalLayers && + return (numberOfTemporalLayers == other.numberOfTemporalLayers && denoisingOn == other.denoisingOn && automaticResizeOn == other.automaticResizeOn && - frameDroppingOn == other.frameDroppingOn && keyFrameInterval == other.keyFrameInterval); } bool VideoCodecVP9::operator==(const VideoCodecVP9& other) const { - return (complexity == other.complexity && - numberOfTemporalLayers == other.numberOfTemporalLayers && + return (numberOfTemporalLayers == other.numberOfTemporalLayers && denoisingOn == other.denoisingOn && - frameDroppingOn == other.frameDroppingOn && keyFrameInterval == other.keyFrameInterval && adaptiveQpMode == other.adaptiveQpMode && automaticResizeOn == other.automaticResizeOn && @@ -55,8 +51,7 @@ bool VideoCodecVP9::operator==(const VideoCodecVP9& other) const { } bool VideoCodecH264::operator==(const VideoCodecH264& other) const { - return (frameDroppingOn == other.frameDroppingOn && - keyFrameInterval == other.keyFrameInterval && + return (keyFrameInterval == other.keyFrameInterval && numberOfTemporalLayers == other.numberOfTemporalLayers); } @@ -88,7 +83,8 @@ VideoCodec::VideoCodec() expect_encode_from_texture(false), timing_frame_thresholds({0, 0}), legacy_conference_mode(false), - codec_specific_() {} + codec_specific_(), + complexity_(VideoCodecComplexity::kComplexityNormal) {} VideoCodecVP8* VideoCodec::VP8() { RTC_DCHECK_EQ(codecType, kVideoCodecVP8); @@ -120,7 +116,6 @@ const VideoCodecH264& VideoCodec::H264() const { return codec_specific_.H264; } -#ifndef DISABLE_H265 VideoCodecH265* VideoCodec::H265() { RTC_DCHECK_EQ(codecType, kVideoCodecH265); return &codec_specific_.H265; @@ -130,7 +125,6 @@ const VideoCodecH265& VideoCodec::H265() const { RTC_DCHECK_EQ(codecType, kVideoCodecH265); return codec_specific_.H265; } -#endif const char* CodecTypeToPayloadString(VideoCodecType type) { switch (type) { @@ -142,14 +136,11 @@ const char* CodecTypeToPayloadString(VideoCodecType type) { return kPayloadNameAv1; case kVideoCodecH264: return kPayloadNameH264; -#ifndef DISABLE_H265 case kVideoCodecH265: return kPayloadNameH265; -#endif case kVideoCodecMultiplex: return kPayloadNameMultiplex; case kVideoCodecGeneric: - default: return kPayloadNameGeneric; } RTC_CHECK_NOTREACHED(); @@ -165,13 +156,28 @@ VideoCodecType PayloadStringToCodecType(const std::string& name) { return kVideoCodecAV1; if (absl::EqualsIgnoreCase(name, kPayloadNameH264)) return kVideoCodecH264; - if (absl::EqualsIgnoreCase(name, kPayloadNameMultiplex)) - return kVideoCodecMultiplex; -#ifndef DISABLE_H265 if (absl::EqualsIgnoreCase(name, kPayloadNameH265)) return kVideoCodecH265; -#endif + if (absl::EqualsIgnoreCase(name, kPayloadNameMultiplex)) + return kVideoCodecMultiplex; return kVideoCodecGeneric; } +VideoCodecComplexity VideoCodec::GetVideoEncoderComplexity() const { + return complexity_; +} + +void VideoCodec::SetVideoEncoderComplexity( + VideoCodecComplexity complexity_setting) { + complexity_ = complexity_setting; +} + +bool VideoCodec::GetFrameDropEnabled() const { + return frame_drop_enabled_; +} + +void VideoCodec::SetFrameDropEnabled(bool enabled) { + frame_drop_enabled_ = enabled; +} + } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_codec.h b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_codec.h index e7f8650a2c..7103ce7580 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_codec.h +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_codec.h @@ -19,6 +19,8 @@ #include "absl/strings/string_view.h" #include "api/video/video_bitrate_allocation.h" #include "api/video/video_codec_type.h" +#include "api/video_codecs/scalability_mode.h" +#include "api/video_codecs/simulcast_stream.h" #include "api/video_codecs/spatial_layer.h" #include "rtc_base/system/rtc_export.h" @@ -29,6 +31,7 @@ namespace webrtc { // Video codec enum class VideoCodecComplexity { + kComplexityLow = -1, kComplexityNormal = 0, kComplexityHigh = 1, kComplexityHigher = 2, @@ -41,11 +44,14 @@ struct VideoCodecVP8 { bool operator!=(const VideoCodecVP8& other) const { return !(*this == other); } - VideoCodecComplexity complexity; + // Temporary utility method for transition deleting numberOfTemporalLayers + // setting (replaced by ScalabilityMode). + void SetNumberOfTemporalLayers(unsigned char n) { + numberOfTemporalLayers = n; + } unsigned char numberOfTemporalLayers; bool denoisingOn; bool automaticResizeOn; - bool frameDroppingOn; int keyFrameInterval; }; @@ -61,10 +67,13 @@ struct VideoCodecVP9 { bool operator!=(const VideoCodecVP9& other) const { return !(*this == other); } - VideoCodecComplexity complexity; + // Temporary utility method for transition deleting numberOfTemporalLayers + // setting (replaced by ScalabilityMode). + void SetNumberOfTemporalLayers(unsigned char n) { + numberOfTemporalLayers = n; + } unsigned char numberOfTemporalLayers; bool denoisingOn; - bool frameDroppingOn; int keyFrameInterval; bool adaptiveQpMode; bool automaticResizeOn; @@ -79,7 +88,11 @@ struct VideoCodecH264 { bool operator!=(const VideoCodecH264& other) const { return !(*this == other); } - bool frameDroppingOn; + // Temporary utility method for transition deleting numberOfTemporalLayers + // setting (replaced by ScalabilityMode). + void SetNumberOfTemporalLayers(unsigned char n) { + numberOfTemporalLayers = n; + } int keyFrameInterval; uint8_t numberOfTemporalLayers; }; @@ -123,11 +136,19 @@ class RTC_EXPORT VideoCodec { // Scalability mode as described in // https://www.w3.org/TR/webrtc-svc/#scalabilitymodes* - // or value 'NONE' to indicate no scalability. - absl::string_view ScalabilityMode() const { return scalability_mode_; } - void SetScalabilityMode(absl::string_view scalability_mode) { - scalability_mode_ = std::string(scalability_mode); + absl::optional GetScalabilityMode() const { + return scalability_mode_; } + void SetScalabilityMode(ScalabilityMode scalability_mode) { + scalability_mode_ = scalability_mode; + } + void UnsetScalabilityMode() { scalability_mode_ = absl::nullopt; } + + VideoCodecComplexity GetVideoEncoderComplexity() const; + void SetVideoEncoderComplexity(VideoCodecComplexity complexity_setting); + + bool GetFrameDropEnabled() const; + void SetFrameDropEnabled(bool enabled); // Public variables. TODO(hta): Make them private with accessors. VideoCodecType codecType; @@ -148,7 +169,7 @@ class RTC_EXPORT VideoCodec { unsigned int qpMax; unsigned char numberOfSimulcastStreams; - SpatialLayer simulcastStream[kMaxSimulcastStreams]; + SimulcastStream simulcastStream[kMaxSimulcastStreams]; SpatialLayer spatialLayers[kMaxSpatialLayers]; VideoCodecMode mode; @@ -183,16 +204,18 @@ class RTC_EXPORT VideoCodec { const VideoCodecVP9& VP9() const; VideoCodecH264* H264(); const VideoCodecH264& H264() const; -#ifndef DISABLE_H265 VideoCodecH265* H265(); const VideoCodecH265& H265() const; -#endif private: // TODO(hta): Consider replacing the union with a pointer type. // This will allow removing the VideoCodec* types from this file. VideoCodecUnion codec_specific_; - std::string scalability_mode_; + absl::optional scalability_mode_; + // 'complexity_' indicates the CPU capability of the client. It's used to + // determine encoder CPU complexity (e.g., cpu_used for VP8, VP9. and AV1). + VideoCodecComplexity complexity_; + bool frame_drop_enabled_ = false; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder_factory_template.h b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder_factory_template.h new file mode 100644 index 0000000000..703ae11664 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder_factory_template.h @@ -0,0 +1,95 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_CODECS_VIDEO_DECODER_FACTORY_TEMPLATE_H_ +#define API_VIDEO_CODECS_VIDEO_DECODER_FACTORY_TEMPLATE_H_ + +#include +#include + +#include "absl/algorithm/container.h" +#include "api/array_view.h" +#include "api/video_codecs/video_decoder.h" +#include "api/video_codecs/video_decoder_factory.h" + +namespace webrtc { +// The VideoDecoderFactoryTemplate supports decoder implementations given as +// template arguments. +// +// To include a decoder in the factory it requires two static members +// functions to be defined: +// +// // Returns the supported SdpVideoFormats this decoder can decode. +// static std::vector SupportedFormats(); +// +// // Creates a decoder instance for the given format. +// static std::unique_ptr +// CreateDecoder(const SdpVideoFormat& format); +// +// Note that the order of the template arguments matter as the factory will +// return the first decoder implementation supporting the given SdpVideoFormat. +template +class VideoDecoderFactoryTemplate : public VideoDecoderFactory { + public: + std::vector GetSupportedFormats() const override { + return GetSupportedFormatsInternal(); + } + + std::unique_ptr CreateVideoDecoder( + const SdpVideoFormat& format) override { + return CreateVideoDecoderInternal(format); + } + + private: + bool IsFormatInList( + const SdpVideoFormat& format, + rtc::ArrayView supported_formats) const { + return absl::c_any_of( + supported_formats, [&](const SdpVideoFormat& supported_format) { + return supported_format.name == format.name && + supported_format.parameters == format.parameters; + }); + } + + template + std::vector GetSupportedFormatsInternal() const { + auto supported_formats = V::SupportedFormats(); + + if constexpr (sizeof...(Vs) > 0) { + // Supported formats may overlap between implementations, so duplicates + // should be filtered out. + for (const auto& other_format : GetSupportedFormatsInternal()) { + if (!IsFormatInList(other_format, supported_formats)) { + supported_formats.push_back(other_format); + } + } + } + + return supported_formats; + } + + template + std::unique_ptr CreateVideoDecoderInternal( + const SdpVideoFormat& format) { + if (IsFormatInList(format, V::SupportedFormats())) { + return V::CreateDecoder(format); + } + + if constexpr (sizeof...(Vs) > 0) { + return CreateVideoDecoderInternal(format); + } + + return nullptr; + } +}; + +} // namespace webrtc + +#endif // API_VIDEO_CODECS_VIDEO_DECODER_FACTORY_TEMPLATE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder_factory_template_dav1d_adapter.h b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder_factory_template_dav1d_adapter.h new file mode 100644 index 0000000000..6d80cadf83 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder_factory_template_dav1d_adapter.h @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_CODECS_VIDEO_DECODER_FACTORY_TEMPLATE_DAV1D_ADAPTER_H_ +#define API_VIDEO_CODECS_VIDEO_DECODER_FACTORY_TEMPLATE_DAV1D_ADAPTER_H_ + +#include +#include + +#include "api/video_codecs/sdp_video_format.h" +#include "modules/video_coding/codecs/av1/dav1d_decoder.h" + +namespace webrtc { +struct Dav1dDecoderTemplateAdapter { + static std::vector SupportedFormats() { + return {SdpVideoFormat("AV1")}; + } + + static std::unique_ptr CreateDecoder( + const SdpVideoFormat& format) { + return CreateDav1dDecoder(); + } +}; + +} // namespace webrtc + +#endif // API_VIDEO_CODECS_VIDEO_DECODER_FACTORY_TEMPLATE_DAV1D_ADAPTER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h new file mode 100644 index 0000000000..0c45a4b622 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h @@ -0,0 +1,33 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_CODECS_VIDEO_DECODER_FACTORY_TEMPLATE_LIBVPX_VP8_ADAPTER_H_ +#define API_VIDEO_CODECS_VIDEO_DECODER_FACTORY_TEMPLATE_LIBVPX_VP8_ADAPTER_H_ + +#include +#include + +#include "api/video_codecs/sdp_video_format.h" +#include "modules/video_coding/codecs/vp8/include/vp8.h" + +namespace webrtc { +struct LibvpxVp8DecoderTemplateAdapter { + static std::vector SupportedFormats() { + return {SdpVideoFormat("VP8")}; + } + + static std::unique_ptr CreateDecoder( + const SdpVideoFormat& format) { + return VP8Decoder::Create(); + } +}; +} // namespace webrtc + +#endif // API_VIDEO_CODECS_VIDEO_DECODER_FACTORY_TEMPLATE_LIBVPX_VP8_ADAPTER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder_factory_template_libvpx_vp9_adapter.h b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder_factory_template_libvpx_vp9_adapter.h new file mode 100644 index 0000000000..e0ec0010be --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder_factory_template_libvpx_vp9_adapter.h @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_CODECS_VIDEO_DECODER_FACTORY_TEMPLATE_LIBVPX_VP9_ADAPTER_H_ +#define API_VIDEO_CODECS_VIDEO_DECODER_FACTORY_TEMPLATE_LIBVPX_VP9_ADAPTER_H_ + +#include +#include + +#include "modules/video_coding/codecs/vp9/include/vp9.h" + +namespace webrtc { +struct LibvpxVp9DecoderTemplateAdapter { + static std::vector SupportedFormats() { + return SupportedVP9DecoderCodecs(); + } + + static std::unique_ptr CreateDecoder( + const SdpVideoFormat& format) { + return VP9Decoder::Create(); + } +}; +} // namespace webrtc + +#endif // API_VIDEO_CODECS_VIDEO_DECODER_FACTORY_TEMPLATE_LIBVPX_VP9_ADAPTER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder_factory_template_open_h264_adapter.h b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder_factory_template_open_h264_adapter.h new file mode 100644 index 0000000000..2746bde132 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder_factory_template_open_h264_adapter.h @@ -0,0 +1,44 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_CODECS_VIDEO_DECODER_FACTORY_TEMPLATE_OPEN_H264_ADAPTER_H_ +#define API_VIDEO_CODECS_VIDEO_DECODER_FACTORY_TEMPLATE_OPEN_H264_ADAPTER_H_ + +#include +#include + +#include "modules/video_coding/codecs/h264/include/h264.h" + +namespace webrtc { +// TODO(bugs.webrtc.org/13573): When OpenH264 is no longer a conditional build +// target remove #ifdefs. +struct OpenH264DecoderTemplateAdapter { + static std::vector SupportedFormats() { +#if defined(WEBRTC_USE_H264) + + return SupportedH264DecoderCodecs(); +#else + return {}; +#endif + } + + static std::unique_ptr CreateDecoder( + const SdpVideoFormat& format) { +#if defined(WEBRTC_USE_H264) + + return H264Decoder::Create(); +#else + return nullptr; +#endif + } +}; +} // namespace webrtc + +#endif // API_VIDEO_CODECS_VIDEO_DECODER_FACTORY_TEMPLATE_OPEN_H264_ADAPTER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder_software_fallback_wrapper.cc b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder_software_fallback_wrapper.cc index 182d9db668..4bb051da4c 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder_software_fallback_wrapper.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder_software_fallback_wrapper.cc @@ -16,7 +16,6 @@ #include #include -#include "absl/base/macros.h" #include "api/video/encoded_image.h" #include "api/video_codecs/video_decoder.h" #include "modules/video_coding/include/video_error_codes.h" @@ -84,8 +83,9 @@ VideoDecoderSoftwareFallbackWrapper::VideoDecoderSoftwareFallbackWrapper( hw_decoder_(std::move(hw_decoder)), fallback_decoder_(std::move(sw_fallback_decoder)), fallback_implementation_name_( - std::string(fallback_decoder_->ImplementationName()) + - " (fallback from: " + hw_decoder_->ImplementationName() + ")"), + fallback_decoder_->GetDecoderInfo().implementation_name + + " (fallback from: " + + hw_decoder_->GetDecoderInfo().implementation_name + ")"), callback_(nullptr), hw_decoded_frames_since_last_fallback_(0), hw_consequtive_generic_errors_(0) {} @@ -215,7 +215,7 @@ int32_t VideoDecoderSoftwareFallbackWrapper::Decode( } // Fallback decoder initialized, fall-through. - ABSL_FALLTHROUGH_INTENDED; + [[fallthrough]]; } case DecoderType::kFallback: return fallback_decoder_->Decode(input_image, missing_frames, diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder.cc b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder.cc index b1f3ec07f1..801b80fad3 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder.cc @@ -26,7 +26,6 @@ VideoCodecVP8 VideoEncoder::GetDefaultVp8Settings() { vp8_settings.numberOfTemporalLayers = 1; vp8_settings.denoisingOn = true; vp8_settings.automaticResizeOn = false; - vp8_settings.frameDroppingOn = true; vp8_settings.keyFrameInterval = 3000; return vp8_settings; @@ -38,7 +37,6 @@ VideoCodecVP9 VideoEncoder::GetDefaultVp9Settings() { vp9_settings.numberOfTemporalLayers = 1; vp9_settings.denoisingOn = true; - vp9_settings.frameDroppingOn = true; vp9_settings.keyFrameInterval = 3000; vp9_settings.adaptiveQpMode = true; vp9_settings.automaticResizeOn = true; @@ -53,7 +51,6 @@ VideoCodecH264 VideoEncoder::GetDefaultH264Settings() { VideoCodecH264 h264_settings; memset(&h264_settings, 0, sizeof(h264_settings)); - h264_settings.frameDroppingOn = true; h264_settings.keyFrameInterval = 3000; h264_settings.numberOfTemporalLayers = 1; diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder.h b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder.h index 7ffb8af8e2..7752ed8475 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder.h +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder.h @@ -66,6 +66,10 @@ class RTC_EXPORT EncodedImageCallback { // kDroppedByMediaOptimizations - dropped by MediaOptimizations (for rate // limiting purposes). // kDroppedByEncoder - dropped by encoder's internal rate limiter. + // TODO(bugs.webrtc.org/10164): Delete this enum? It duplicates the more + // general VideoStreamEncoderObserver::DropReason. Also, + // kDroppedByMediaOptimizations is not produced by any encoder, but by + // VideoStreamEncoder. enum class DropReason : uint8_t { kDroppedByMediaOptimizations, kDroppedByEncoder @@ -96,11 +100,9 @@ class RTC_EXPORT VideoEncoder { struct KOff {}; public: - // TODO(nisse): Would be nicer if kOff were a constant ScalingSettings - // rather than a magic value. However, absl::optional is not trivially copy - // constructible, and hence a constant ScalingSettings needs a static - // initializer, which is strongly discouraged in Chrome. We can hopefully - // fix this when we switch to absl::optional or std::optional. + // TODO(bugs.webrtc.org/9078): Since absl::optional should be trivially copy + // constructible, this magic value can likely be replaced by a constexpr + // ScalingSettings value. static constexpr KOff kOff = {}; ScalingSettings(int low, int high); diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_factory.h b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_factory.h index d7cea47909..d28a2a4035 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_factory.h +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_factory.h @@ -17,6 +17,7 @@ #include "absl/types/optional.h" #include "api/units/data_rate.h" +#include "api/video/render_resolution.h" #include "api/video_codecs/sdp_video_format.h" namespace webrtc { @@ -33,7 +34,10 @@ class VideoEncoderFactory { }; // An injectable class that is continuously updated with encoding conditions - // and selects the best encoder given those conditions. + // and selects the best encoder given those conditions. An implementation is + // typically stateful to avoid toggling between different encoders, which is + // costly due to recreation of objects, a new codec will always start with a + // key-frame. class EncoderSelectorInterface { public: virtual ~EncoderSelectorInterface() {} @@ -47,6 +51,13 @@ class VideoEncoderFactory { virtual absl::optional OnAvailableBitrate( const DataRate& rate) = 0; + // Called every time the encoder input resolution change. Should return a + // non-empty if an encoder switch should be performed. + virtual absl::optional OnResolutionChange( + const RenderResolution& resolution) { + return absl::nullopt; + } + // Called if the currently used encoder reports itself as broken. Should // return a non-empty if an encoder switch should be performed. virtual absl::optional OnEncoderBroken() = 0; @@ -88,6 +99,22 @@ class VideoEncoderFactory { virtual std::unique_ptr CreateVideoEncoder( const SdpVideoFormat& format) = 0; + // This method creates a EncoderSelector to use for a VideoSendStream. + // (and hence should probably been called CreateEncoderSelector()). + // + // Note: This method is unsuitable if encoding several streams that + // are using same VideoEncoderFactory (either by several streams in one + // PeerConnection or streams with different PeerConnection but same + // PeerConnectionFactory). This is due to the fact that the method is not + // given any stream identifier, nor is the EncoderSelectorInterface given any + // stream identifiers, i.e one does not know which stream is being encoded + // with help of the selector. + // + // In such scenario, the `RtpSenderInterface::SetEncoderSelector` is + // recommended. + // + // TODO(bugs.webrtc.org:14122): Deprecate and remove in favor of + // `RtpSenderInterface::SetEncoderSelector`. virtual std::unique_ptr GetEncoderSelector() const { return nullptr; } diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_factory_template.h b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_factory_template.h new file mode 100644 index 0000000000..643096dbbb --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_factory_template.h @@ -0,0 +1,135 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_CODECS_VIDEO_ENCODER_FACTORY_TEMPLATE_H_ +#define API_VIDEO_CODECS_VIDEO_ENCODER_FACTORY_TEMPLATE_H_ + +#include +#include +#include + +#include "absl/algorithm/container.h" +#include "api/array_view.h" +#include "api/video_codecs/video_encoder.h" +#include "api/video_codecs/video_encoder_factory.h" +#include "modules/video_coding/svc/scalability_mode_util.h" + +namespace webrtc { +// The VideoEncoderFactoryTemplate supports encoders implementations given as +// template arguments. +// +// To include an encoder in the factory it requires three static members +// functions to be defined: +// +// // Returns the supported SdpVideoFormats this encoder can produce. +// static std::vector SupportedFormats(); +// +// // Creates an encoder instance for the given format. +// static std::unique_ptr +// CreateEncoder(const SdpVideoFormat& format); +// +// // Returns true if the encoder supports the given scalability mode. +// static bool +// IsScalabilityModeSupported(ScalabilityMode scalability_mode); +// +// Note that the order of the template arguments matter as the factory will +// query/return the first encoder implementation supporting the given +// SdpVideoFormat. +template +class VideoEncoderFactoryTemplate : public VideoEncoderFactory { + public: + std::vector GetSupportedFormats() const override { + return GetSupportedFormatsInternal(); + } + + std::unique_ptr CreateVideoEncoder( + const SdpVideoFormat& format) override { + return CreateVideoEncoderInternal(format); + } + + CodecSupport QueryCodecSupport( + const SdpVideoFormat& format, + absl::optional scalability_mode) const override { + return QueryCodecSupportInternal(format, scalability_mode); + } + + private: + bool IsFormatInList( + const SdpVideoFormat& format, + rtc::ArrayView supported_formats) const { + return absl::c_any_of( + supported_formats, [&](const SdpVideoFormat& supported_format) { + return supported_format.name == format.name && + supported_format.parameters == format.parameters; + }); + } + + template + bool IsScalabilityModeSupported( + const absl::optional& scalability_mode_string) const { + if (!scalability_mode_string.has_value()) { + return true; + } + absl::optional scalability_mode = + ScalabilityModeFromString(*scalability_mode_string); + return scalability_mode.has_value() && + V::IsScalabilityModeSupported(*scalability_mode); + } + + template + std::vector GetSupportedFormatsInternal() const { + auto supported_formats = V::SupportedFormats(); + + if constexpr (sizeof...(Vs) > 0) { + // Supported formats may overlap between implementations, so duplicates + // should be filtered out. + for (const auto& other_format : GetSupportedFormatsInternal()) { + if (!IsFormatInList(other_format, supported_formats)) { + supported_formats.push_back(other_format); + } + } + } + + return supported_formats; + } + + template + std::unique_ptr CreateVideoEncoderInternal( + const SdpVideoFormat& format) { + if (IsFormatInList(format, V::SupportedFormats())) { + return V::CreateEncoder(format); + } + + if constexpr (sizeof...(Vs) > 0) { + return CreateVideoEncoderInternal(format); + } + + return nullptr; + } + + template + CodecSupport QueryCodecSupportInternal( + const SdpVideoFormat& format, + const absl::optional& scalability_mode) const { + if (IsFormatInList(format, V::SupportedFormats())) { + return {.is_supported = IsScalabilityModeSupported(scalability_mode)}; + } + + if constexpr (sizeof...(Vs) > 0) { + return QueryCodecSupportInternal(format, scalability_mode); + } + + return {.is_supported = false}; + } +}; + +} // namespace webrtc + +#endif // API_VIDEO_CODECS_VIDEO_ENCODER_FACTORY_TEMPLATE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_factory_template_libaom_av1_adapter.h b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_factory_template_libaom_av1_adapter.h new file mode 100644 index 0000000000..417df1e192 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_factory_template_libaom_av1_adapter.h @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_CODECS_VIDEO_ENCODER_FACTORY_TEMPLATE_LIBAOM_AV1_ADAPTER_H_ +#define API_VIDEO_CODECS_VIDEO_ENCODER_FACTORY_TEMPLATE_LIBAOM_AV1_ADAPTER_H_ + +#include +#include + +#include "absl/container/inlined_vector.h" +#include "api/video_codecs/sdp_video_format.h" +#include "modules/video_coding/codecs/av1/av1_svc_config.h" +#include "modules/video_coding/codecs/av1/libaom_av1_encoder.h" + +namespace webrtc { +struct LibaomAv1EncoderTemplateAdapter { + static std::vector SupportedFormats() { + absl::InlinedVector + scalability_modes = LibaomAv1EncoderSupportedScalabilityModes(); + return { + SdpVideoFormat("AV1", SdpVideoFormat::Parameters(), scalability_modes)}; + } + + static std::unique_ptr CreateEncoder( + const SdpVideoFormat& format) { + return CreateLibaomAv1Encoder(); + } + + static bool IsScalabilityModeSupported(ScalabilityMode scalability_mode) { + return LibaomAv1EncoderSupportsScalabilityMode(scalability_mode); + } +}; + +} // namespace webrtc + +#endif // API_VIDEO_CODECS_VIDEO_ENCODER_FACTORY_TEMPLATE_LIBAOM_AV1_ADAPTER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_factory_template_libvpx_vp8_adapter.h b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_factory_template_libvpx_vp8_adapter.h new file mode 100644 index 0000000000..0f0a9bacd5 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_factory_template_libvpx_vp8_adapter.h @@ -0,0 +1,46 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_CODECS_VIDEO_ENCODER_FACTORY_TEMPLATE_LIBVPX_VP8_ADAPTER_H_ +#define API_VIDEO_CODECS_VIDEO_ENCODER_FACTORY_TEMPLATE_LIBVPX_VP8_ADAPTER_H_ + +#include +#include + +#include "absl/container/inlined_vector.h" +#include "api/video_codecs/sdp_video_format.h" +#include "modules/video_coding/codecs/vp8/include/vp8.h" +#include "modules/video_coding/codecs/vp8/vp8_scalability.h" + +namespace webrtc { +struct LibvpxVp8EncoderTemplateAdapter { + static std::vector SupportedFormats() { + absl::InlinedVector + scalability_modes; + for (const auto scalability_mode : kVP8SupportedScalabilityModes) { + scalability_modes.push_back(scalability_mode); + } + + return { + SdpVideoFormat("VP8", SdpVideoFormat::Parameters(), scalability_modes)}; + } + + static std::unique_ptr CreateEncoder( + const SdpVideoFormat& format) { + return VP8Encoder::Create(); + } + + static bool IsScalabilityModeSupported(ScalabilityMode scalability_mode) { + return VP8SupportsScalabilityMode(scalability_mode); + } +}; +} // namespace webrtc + +#endif // API_VIDEO_CODECS_VIDEO_ENCODER_FACTORY_TEMPLATE_LIBVPX_VP8_ADAPTER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h new file mode 100644 index 0000000000..c10fda4dc2 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h @@ -0,0 +1,36 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_CODECS_VIDEO_ENCODER_FACTORY_TEMPLATE_LIBVPX_VP9_ADAPTER_H_ +#define API_VIDEO_CODECS_VIDEO_ENCODER_FACTORY_TEMPLATE_LIBVPX_VP9_ADAPTER_H_ + +#include +#include + +#include "modules/video_coding/codecs/vp9/include/vp9.h" + +namespace webrtc { +struct LibvpxVp9EncoderTemplateAdapter { + static std::vector SupportedFormats() { + return SupportedVP9Codecs(/*add_scalability_modes=*/true); + } + + static std::unique_ptr CreateEncoder( + const SdpVideoFormat& format) { + return VP9Encoder::Create(cricket::VideoCodec(format)); + } + + static bool IsScalabilityModeSupported(ScalabilityMode scalability_mode) { + return VP9Encoder::SupportsScalabilityMode(scalability_mode); + } +}; +} // namespace webrtc + +#endif // API_VIDEO_CODECS_VIDEO_ENCODER_FACTORY_TEMPLATE_LIBVPX_VP9_ADAPTER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_factory_template_open_h264_adapter.h b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_factory_template_open_h264_adapter.h new file mode 100644 index 0000000000..0830460cdb --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_factory_template_open_h264_adapter.h @@ -0,0 +1,50 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_CODECS_VIDEO_ENCODER_FACTORY_TEMPLATE_OPEN_H264_ADAPTER_H_ +#define API_VIDEO_CODECS_VIDEO_ENCODER_FACTORY_TEMPLATE_OPEN_H264_ADAPTER_H_ + +#include +#include + +#include "modules/video_coding/codecs/h264/include/h264.h" + +namespace webrtc { +// TODO(bugs.webrtc.org/13573): When OpenH264 is no longer a conditional build +// target remove #ifdefs. +struct OpenH264EncoderTemplateAdapter { + static std::vector SupportedFormats() { +#if defined(WEBRTC_USE_H264) + return SupportedH264Codecs(/*add_scalability_modes=*/true); +#else + return {}; +#endif + } + + static std::unique_ptr CreateEncoder( + const SdpVideoFormat& format) { +#if defined(WEBRTC_USE_H264) + return H264Encoder::Create(cricket::VideoCodec(format)); +#else + return nullptr; +#endif + } + + static bool IsScalabilityModeSupported(ScalabilityMode scalability_mode) { +#if defined(WEBRTC_USE_H264) + return H264Encoder::SupportsScalabilityMode(scalability_mode); +#else + return false; +#endif + } +}; +} // namespace webrtc + +#endif // API_VIDEO_CODECS_VIDEO_ENCODER_FACTORY_TEMPLATE_OPEN_H264_ADAPTER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_software_fallback_wrapper.cc b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_software_fallback_wrapper.cc index 72e08a704c..39c52a0081 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_software_fallback_wrapper.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_software_fallback_wrapper.cc @@ -155,7 +155,7 @@ class VideoEncoderSoftwareFallbackWrapper final : public VideoEncoder { RTC_LOG(LS_WARNING) << "Trying to access encoder in uninitialized fallback wrapper."; // Return main encoder to preserve previous behavior. - ABSL_FALLTHROUGH_INTENDED; + [[fallthrough]]; case EncoderState::kMainEncoderUsed: return encoder_.get(); case EncoderState::kFallbackDueToFailure: diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/vp9_profile.cc b/TMessagesProj/jni/voip/webrtc/api/video_codecs/vp9_profile.cc index 5e2bd53a86..7e627cc080 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/vp9_profile.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/vp9_profile.cc @@ -28,6 +28,8 @@ std::string VP9ProfileToString(VP9Profile profile) { return "1"; case VP9Profile::kProfile2: return "2"; + case VP9Profile::kProfile3: + return "3"; } return "0"; } @@ -44,6 +46,8 @@ absl::optional StringToVP9Profile(const std::string& str) { return VP9Profile::kProfile1; case 2: return VP9Profile::kProfile2; + case 3: + return VP9Profile::kProfile3; default: return absl::nullopt; } diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/vp9_profile.h b/TMessagesProj/jni/voip/webrtc/api/video_codecs/vp9_profile.h index e632df437b..b570bc3bb6 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/vp9_profile.h +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/vp9_profile.h @@ -26,6 +26,7 @@ enum class VP9Profile { kProfile0, kProfile1, kProfile2, + kProfile3, }; // Helper functions to convert VP9Profile to std::string. Returns "0" by diff --git a/TMessagesProj/jni/voip/webrtc/api/video_track_source_proxy_factory.h b/TMessagesProj/jni/voip/webrtc/api/video_track_source_proxy_factory.h index 7b161f4443..eb6e96429a 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_track_source_proxy_factory.h +++ b/TMessagesProj/jni/voip/webrtc/api/video_track_source_proxy_factory.h @@ -12,6 +12,7 @@ #define API_VIDEO_TRACK_SOURCE_PROXY_FACTORY_H_ #include "api/media_stream_interface.h" +#include "rtc_base/thread.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/api/webrtc_key_value_config.h b/TMessagesProj/jni/voip/webrtc/api/webrtc_key_value_config.h new file mode 100644 index 0000000000..e3cac59698 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/webrtc_key_value_config.h @@ -0,0 +1,17 @@ +/* + * Copyright 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef API_WEBRTC_KEY_VALUE_CONFIG_H_ +#define API_WEBRTC_KEY_VALUE_CONFIG_H_ + +// TODO(bugs.webrtc.org/10335): Remove once all migrated to +// api/field_trials_view.h +#include "api/field_trials_view.h" + +#endif // API_WEBRTC_KEY_VALUE_CONFIG_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/wrapping_async_dns_resolver.h b/TMessagesProj/jni/voip/webrtc/api/wrapping_async_dns_resolver.h index 80da206e75..5155b0f528 100644 --- a/TMessagesProj/jni/voip/webrtc/api/wrapping_async_dns_resolver.h +++ b/TMessagesProj/jni/voip/webrtc/api/wrapping_async_dns_resolver.h @@ -13,6 +13,7 @@ #include #include +#include #include "absl/memory/memory.h" #include "api/async_dns_resolver.h" @@ -68,14 +69,18 @@ class RTC_EXPORT WrappingAsyncDnsResolver : public AsyncDnsResolverInterface, void Start(const rtc::SocketAddress& addr, std::function callback) override { RTC_DCHECK_RUN_ON(&sequence_checker_); - RTC_DCHECK_EQ(State::kNotStarted, state_); - state_ = State::kStarted; - callback_ = callback; - wrapped_->SignalDone.connect(this, - &WrappingAsyncDnsResolver::OnResolveResult); + PrepareToResolve(std::move(callback)); wrapped_->Start(addr); } + void Start(const rtc::SocketAddress& addr, + int family, + std::function callback) override { + RTC_DCHECK_RUN_ON(&sequence_checker_); + PrepareToResolve(std::move(callback)); + wrapped_->Start(addr, family); + } + const AsyncDnsResolverResult& result() const override { RTC_DCHECK_RUN_ON(&sequence_checker_); RTC_DCHECK_EQ(State::kResolved, state_); @@ -92,6 +97,15 @@ class RTC_EXPORT WrappingAsyncDnsResolver : public AsyncDnsResolverInterface, return wrapped_.get(); } + void PrepareToResolve(std::function callback) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + RTC_DCHECK_EQ(State::kNotStarted, state_); + state_ = State::kStarted; + callback_ = std::move(callback); + wrapped_->SignalDone.connect(this, + &WrappingAsyncDnsResolver::OnResolveResult); + } + void OnResolveResult(rtc::AsyncResolverInterface* ref) { RTC_DCHECK_RUN_ON(&sequence_checker_); RTC_DCHECK(state_ == State::kStarted); diff --git a/TMessagesProj/jni/voip/webrtc/audio/audio_receive_stream.cc b/TMessagesProj/jni/voip/webrtc/audio/audio_receive_stream.cc index 6f2444901b..168d214ecd 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/audio_receive_stream.cc +++ b/TMessagesProj/jni/voip/webrtc/audio/audio_receive_stream.cc @@ -33,7 +33,7 @@ namespace webrtc { -std::string AudioReceiveStream::Config::Rtp::ToString() const { +std::string AudioReceiveStreamInterface::Config::Rtp::ToString() const { char ss_buf[1024]; rtc::SimpleStringBuilder ss(ss_buf); ss << "{remote_ssrc: " << remote_ssrc; @@ -52,7 +52,7 @@ std::string AudioReceiveStream::Config::Rtp::ToString() const { return ss.str(); } -std::string AudioReceiveStream::Config::ToString() const { +std::string AudioReceiveStreamInterface::Config::ToString() const { char ss_buf[1024]; rtc::SimpleStringBuilder ss(ss_buf); ss << "{rtp: " << rtp.ToString(); @@ -65,13 +65,12 @@ std::string AudioReceiveStream::Config::ToString() const { return ss.str(); } -namespace internal { namespace { std::unique_ptr CreateChannelReceive( Clock* clock, webrtc::AudioState* audio_state, NetEqFactory* neteq_factory, - const webrtc::AudioReceiveStream::Config& config, + const webrtc::AudioReceiveStreamInterface::Config& config, RtcEventLog* event_log) { RTC_DCHECK(audio_state); internal::AudioState* internal_audio_state = @@ -81,35 +80,34 @@ std::unique_ptr CreateChannelReceive( config.rtcp_send_transport, event_log, config.rtp.local_ssrc, config.rtp.remote_ssrc, config.jitter_buffer_max_packets, config.jitter_buffer_fast_accelerate, config.jitter_buffer_min_delay_ms, - config.jitter_buffer_enable_rtx_handling, config.enable_non_sender_rtt, - config.decoder_factory, config.codec_pair_id, - std::move(config.frame_decryptor), config.crypto_options, - std::move(config.frame_transformer)); + config.enable_non_sender_rtt, config.decoder_factory, + config.codec_pair_id, std::move(config.frame_decryptor), + config.crypto_options, std::move(config.frame_transformer)); } } // namespace -AudioReceiveStream::AudioReceiveStream( +AudioReceiveStreamImpl::AudioReceiveStreamImpl( Clock* clock, PacketRouter* packet_router, NetEqFactory* neteq_factory, - const webrtc::AudioReceiveStream::Config& config, + const webrtc::AudioReceiveStreamInterface::Config& config, const rtc::scoped_refptr& audio_state, webrtc::RtcEventLog* event_log) - : AudioReceiveStream(clock, - packet_router, - config, - audio_state, - event_log, - CreateChannelReceive(clock, - audio_state.get(), - neteq_factory, - config, - event_log)) {} - -AudioReceiveStream::AudioReceiveStream( + : AudioReceiveStreamImpl(clock, + packet_router, + config, + audio_state, + event_log, + CreateChannelReceive(clock, + audio_state.get(), + neteq_factory, + config, + event_log)) {} + +AudioReceiveStreamImpl::AudioReceiveStreamImpl( Clock* clock, PacketRouter* packet_router, - const webrtc::AudioReceiveStream::Config& config, + const webrtc::AudioReceiveStreamInterface::Config& config, const rtc::scoped_refptr& audio_state, webrtc::RtcEventLog* event_log, std::unique_ptr channel_receive) @@ -117,7 +115,7 @@ AudioReceiveStream::AudioReceiveStream( audio_state_(audio_state), source_tracker_(clock), channel_receive_(std::move(channel_receive)) { - RTC_LOG(LS_INFO) << "AudioReceiveStream: " << config.rtp.remote_ssrc; + RTC_LOG(LS_INFO) << "AudioReceiveStreamImpl: " << config.rtp.remote_ssrc; RTC_DCHECK(config.decoder_factory); RTC_DCHECK(config.rtcp_send_transport); RTC_DCHECK(audio_state_); @@ -144,34 +142,34 @@ AudioReceiveStream::AudioReceiveStream( // `channel_receive_` already. } -AudioReceiveStream::~AudioReceiveStream() { +AudioReceiveStreamImpl::~AudioReceiveStreamImpl() { RTC_DCHECK_RUN_ON(&worker_thread_checker_); - RTC_LOG(LS_INFO) << "~AudioReceiveStream: " << config_.rtp.remote_ssrc; + RTC_LOG(LS_INFO) << "~AudioReceiveStreamImpl: " << remote_ssrc(); Stop(); channel_receive_->SetAssociatedSendChannel(nullptr); channel_receive_->ResetReceiverCongestionControlObjects(); } -void AudioReceiveStream::RegisterWithTransport( +void AudioReceiveStreamImpl::RegisterWithTransport( RtpStreamReceiverControllerInterface* receiver_controller) { RTC_DCHECK_RUN_ON(&packet_sequence_checker_); RTC_DCHECK(!rtp_stream_receiver_); rtp_stream_receiver_ = receiver_controller->CreateReceiver( - config_.rtp.remote_ssrc, channel_receive_.get()); + remote_ssrc(), channel_receive_.get()); } -void AudioReceiveStream::UnregisterFromTransport() { +void AudioReceiveStreamImpl::UnregisterFromTransport() { RTC_DCHECK_RUN_ON(&packet_sequence_checker_); rtp_stream_receiver_.reset(); } -void AudioReceiveStream::ReconfigureForTesting( - const webrtc::AudioReceiveStream::Config& config) { +void AudioReceiveStreamImpl::ReconfigureForTesting( + const webrtc::AudioReceiveStreamInterface::Config& config) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); // SSRC can't be changed mid-stream. - RTC_DCHECK_EQ(config_.rtp.remote_ssrc, config.rtp.remote_ssrc); - RTC_DCHECK_EQ(config_.rtp.local_ssrc, config.rtp.local_ssrc); + RTC_DCHECK_EQ(remote_ssrc(), config.rtp.remote_ssrc); + RTC_DCHECK_EQ(local_ssrc(), config.rtp.local_ssrc); // Configuration parameters which cannot be changed. RTC_DCHECK_EQ(config_.rtcp_send_transport, config.rtcp_send_transport); @@ -191,7 +189,7 @@ void AudioReceiveStream::ReconfigureForTesting( config_ = config; } -void AudioReceiveStream::Start() { +void AudioReceiveStreamImpl::Start() { RTC_DCHECK_RUN_ON(&worker_thread_checker_); if (playing_) { return; @@ -201,7 +199,7 @@ void AudioReceiveStream::Start() { audio_state()->AddReceivingStream(this); } -void AudioReceiveStream::Stop() { +void AudioReceiveStreamImpl::Stop() { RTC_DCHECK_RUN_ON(&worker_thread_checker_); if (!playing_) { return; @@ -211,45 +209,55 @@ void AudioReceiveStream::Stop() { audio_state()->RemoveReceivingStream(this); } -bool AudioReceiveStream::IsRunning() const { +bool AudioReceiveStreamImpl::transport_cc() const { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + return config_.rtp.transport_cc; +} + +void AudioReceiveStreamImpl::SetTransportCc(bool transport_cc) { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + config_.rtp.transport_cc = transport_cc; +} + +bool AudioReceiveStreamImpl::IsRunning() const { RTC_DCHECK_RUN_ON(&worker_thread_checker_); return playing_; } -void AudioReceiveStream::SetDepacketizerToDecoderFrameTransformer( +void AudioReceiveStreamImpl::SetDepacketizerToDecoderFrameTransformer( rtc::scoped_refptr frame_transformer) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); channel_receive_->SetDepacketizerToDecoderFrameTransformer( std::move(frame_transformer)); } -void AudioReceiveStream::SetDecoderMap( +void AudioReceiveStreamImpl::SetDecoderMap( std::map decoder_map) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); config_.decoder_map = std::move(decoder_map); channel_receive_->SetReceiveCodecs(config_.decoder_map); } -void AudioReceiveStream::SetUseTransportCcAndNackHistory(bool use_transport_cc, - int history_ms) { +void AudioReceiveStreamImpl::SetNackHistory(int history_ms) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_DCHECK_GE(history_ms, 0); - config_.rtp.transport_cc = use_transport_cc; - if (config_.rtp.nack.rtp_history_ms != history_ms) { - config_.rtp.nack.rtp_history_ms = history_ms; - // TODO(solenberg): Config NACK history window (which is a packet count), - // using the actual packet size for the configured codec. - channel_receive_->SetNACKStatus(history_ms != 0, history_ms / 20); - } + + if (config_.rtp.nack.rtp_history_ms == history_ms) + return; + + config_.rtp.nack.rtp_history_ms = history_ms; + // TODO(solenberg): Config NACK history window (which is a packet count), + // using the actual packet size for the configured codec. + channel_receive_->SetNACKStatus(history_ms != 0, history_ms / 20); } -void AudioReceiveStream::SetNonSenderRttMeasurement(bool enabled) { +void AudioReceiveStreamImpl::SetNonSenderRttMeasurement(bool enabled) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); config_.enable_non_sender_rtt = enabled; channel_receive_->SetNonSenderRttMeasurement(enabled); } -void AudioReceiveStream::SetFrameDecryptor( +void AudioReceiveStreamImpl::SetFrameDecryptor( rtc::scoped_refptr frame_decryptor) { // TODO(bugs.webrtc.org/11993): This is called via WebRtcAudioReceiveStream, // expect to be called on the network thread. @@ -257,7 +265,7 @@ void AudioReceiveStream::SetFrameDecryptor( channel_receive_->SetFrameDecryptor(std::move(frame_decryptor)); } -void AudioReceiveStream::SetRtpExtensions( +void AudioReceiveStreamImpl::SetRtpExtensions( std::vector extensions) { // TODO(bugs.webrtc.org/11993): This is called via WebRtcAudioReceiveStream, // expect to be called on the network thread. @@ -265,11 +273,21 @@ void AudioReceiveStream::SetRtpExtensions( config_.rtp.extensions = std::move(extensions); } -webrtc::AudioReceiveStream::Stats AudioReceiveStream::GetStats( +const std::vector& AudioReceiveStreamImpl::GetRtpExtensions() + const { + RTC_DCHECK_RUN_ON(&worker_thread_checker_); + return config_.rtp.extensions; +} + +RtpHeaderExtensionMap AudioReceiveStreamImpl::GetRtpExtensionMap() const { + return RtpHeaderExtensionMap(config_.rtp.extensions); +} + +webrtc::AudioReceiveStreamInterface::Stats AudioReceiveStreamImpl::GetStats( bool get_and_clear_legacy_stats) const { RTC_DCHECK_RUN_ON(&worker_thread_checker_); - webrtc::AudioReceiveStream::Stats stats; - stats.remote_ssrc = config_.rtp.remote_ssrc; + webrtc::AudioReceiveStreamInterface::Stats stats; + stats.remote_ssrc = remote_ssrc(); webrtc::CallReceiveStatistics call_stats = channel_receive_->GetRTCPStatistics(); @@ -321,6 +339,9 @@ webrtc::AudioReceiveStream::Stats AudioReceiveStream::GetStats( stats.jitter_buffer_target_delay_seconds = static_cast(ns.jitterBufferTargetDelayMs) / static_cast(rtc::kNumMillisecsPerSec); + stats.jitter_buffer_minimum_delay_seconds = + static_cast(ns.jitterBufferMinimumDelayMs) / + static_cast(rtc::kNumMillisecsPerSec); stats.inserted_samples_for_deceleration = ns.insertedSamplesForDeceleration; stats.removed_samples_for_acceleration = ns.removedSamplesForAcceleration; stats.expand_rate = Q14ToFloat(ns.currentExpandRate); @@ -361,34 +382,34 @@ webrtc::AudioReceiveStream::Stats AudioReceiveStream::GetStats( return stats; } -void AudioReceiveStream::SetSink(AudioSinkInterface* sink) { +void AudioReceiveStreamImpl::SetSink(AudioSinkInterface* sink) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); channel_receive_->SetSink(sink); } -void AudioReceiveStream::SetGain(float gain) { +void AudioReceiveStreamImpl::SetGain(float gain) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); channel_receive_->SetChannelOutputVolumeScaling(gain); } -bool AudioReceiveStream::SetBaseMinimumPlayoutDelayMs(int delay_ms) { +bool AudioReceiveStreamImpl::SetBaseMinimumPlayoutDelayMs(int delay_ms) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); return channel_receive_->SetBaseMinimumPlayoutDelayMs(delay_ms); } -int AudioReceiveStream::GetBaseMinimumPlayoutDelayMs() const { +int AudioReceiveStreamImpl::GetBaseMinimumPlayoutDelayMs() const { RTC_DCHECK_RUN_ON(&worker_thread_checker_); return channel_receive_->GetBaseMinimumPlayoutDelayMs(); } -std::vector AudioReceiveStream::GetSources() const { +std::vector AudioReceiveStreamImpl::GetSources() const { RTC_DCHECK_RUN_ON(&worker_thread_checker_); return source_tracker_.GetSources(); } -AudioMixer::Source::AudioFrameInfo AudioReceiveStream::GetAudioFrameWithInfo( - int sample_rate_hz, - AudioFrame* audio_frame) { +AudioMixer::Source::AudioFrameInfo +AudioReceiveStreamImpl::GetAudioFrameWithInfo(int sample_rate_hz, + AudioFrame* audio_frame) { AudioMixer::Source::AudioFrameInfo audio_frame_info = channel_receive_->GetAudioFrameWithInfo(sample_rate_hz, audio_frame); if (audio_frame_info != AudioMixer::Source::AudioFrameInfo::kError) { @@ -397,33 +418,33 @@ AudioMixer::Source::AudioFrameInfo AudioReceiveStream::GetAudioFrameWithInfo( return audio_frame_info; } -int AudioReceiveStream::Ssrc() const { - return config_.rtp.remote_ssrc; +int AudioReceiveStreamImpl::Ssrc() const { + return remote_ssrc(); } -int AudioReceiveStream::PreferredSampleRate() const { +int AudioReceiveStreamImpl::PreferredSampleRate() const { return channel_receive_->PreferredSampleRate(); } -uint32_t AudioReceiveStream::id() const { +uint32_t AudioReceiveStreamImpl::id() const { RTC_DCHECK_RUN_ON(&worker_thread_checker_); - return config_.rtp.remote_ssrc; + return remote_ssrc(); } -absl::optional AudioReceiveStream::GetInfo() const { +absl::optional AudioReceiveStreamImpl::GetInfo() const { // TODO(bugs.webrtc.org/11993): This is called via RtpStreamsSynchronizer, // expect to be called on the network thread. RTC_DCHECK_RUN_ON(&worker_thread_checker_); return channel_receive_->GetSyncInfo(); } -bool AudioReceiveStream::GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp, - int64_t* time_ms) const { +bool AudioReceiveStreamImpl::GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp, + int64_t* time_ms) const { // Called on video capture thread. return channel_receive_->GetPlayoutRtpTimestamp(rtp_timestamp, time_ms); } -void AudioReceiveStream::SetEstimatedPlayoutNtpTimestampMs( +void AudioReceiveStreamImpl::SetEstimatedPlayoutNtpTimestampMs( int64_t ntp_timestamp_ms, int64_t time_ms) { // Called on video capture thread. @@ -431,21 +452,22 @@ void AudioReceiveStream::SetEstimatedPlayoutNtpTimestampMs( time_ms); } -bool AudioReceiveStream::SetMinimumPlayoutDelay(int delay_ms) { +bool AudioReceiveStreamImpl::SetMinimumPlayoutDelay(int delay_ms) { // TODO(bugs.webrtc.org/11993): This is called via RtpStreamsSynchronizer, // expect to be called on the network thread. RTC_DCHECK_RUN_ON(&worker_thread_checker_); return channel_receive_->SetMinimumPlayoutDelay(delay_ms); } -void AudioReceiveStream::AssociateSendStream(AudioSendStream* send_stream) { +void AudioReceiveStreamImpl::AssociateSendStream( + internal::AudioSendStream* send_stream) { RTC_DCHECK_RUN_ON(&packet_sequence_checker_); channel_receive_->SetAssociatedSendChannel( send_stream ? send_stream->GetChannel() : nullptr); associated_send_stream_ = send_stream; } -void AudioReceiveStream::DeliverRtcp(const uint8_t* packet, size_t length) { +void AudioReceiveStreamImpl::DeliverRtcp(const uint8_t* packet, size_t length) { // TODO(solenberg): Tests call this function on a network thread, libjingle // calls on the worker thread. We should move towards always using a network // thread. Then this check can be enabled. @@ -453,39 +475,38 @@ void AudioReceiveStream::DeliverRtcp(const uint8_t* packet, size_t length) { channel_receive_->ReceivedRTCPPacket(packet, length); } -void AudioReceiveStream::SetSyncGroup(const std::string& sync_group) { +void AudioReceiveStreamImpl::SetSyncGroup(absl::string_view sync_group) { RTC_DCHECK_RUN_ON(&packet_sequence_checker_); - config_.sync_group = sync_group; + config_.sync_group = std::string(sync_group); } -void AudioReceiveStream::SetLocalSsrc(uint32_t local_ssrc) { +void AudioReceiveStreamImpl::SetLocalSsrc(uint32_t local_ssrc) { RTC_DCHECK_RUN_ON(&packet_sequence_checker_); // TODO(tommi): Consider storing local_ssrc in one place. config_.rtp.local_ssrc = local_ssrc; channel_receive_->OnLocalSsrcChange(local_ssrc); } -uint32_t AudioReceiveStream::local_ssrc() const { +uint32_t AudioReceiveStreamImpl::local_ssrc() const { RTC_DCHECK_RUN_ON(&packet_sequence_checker_); RTC_DCHECK_EQ(config_.rtp.local_ssrc, channel_receive_->GetLocalSsrc()); return config_.rtp.local_ssrc; } -const webrtc::AudioReceiveStream::Config& AudioReceiveStream::config() const { - RTC_DCHECK_RUN_ON(&worker_thread_checker_); - return config_; +const std::string& AudioReceiveStreamImpl::sync_group() const { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + return config_.sync_group; } -const AudioSendStream* AudioReceiveStream::GetAssociatedSendStreamForTesting() - const { +const AudioSendStream* +AudioReceiveStreamImpl::GetAssociatedSendStreamForTesting() const { RTC_DCHECK_RUN_ON(&packet_sequence_checker_); return associated_send_stream_; } -internal::AudioState* AudioReceiveStream::audio_state() const { +internal::AudioState* AudioReceiveStreamImpl::audio_state() const { auto* audio_state = static_cast(audio_state_.get()); RTC_DCHECK(audio_state); return audio_state; } -} // namespace internal } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/audio/audio_receive_stream.h b/TMessagesProj/jni/voip/webrtc/audio/audio_receive_stream.h index 444ec4586e..427077fd94 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/audio_receive_stream.h +++ b/TMessagesProj/jni/voip/webrtc/audio/audio_receive_stream.h @@ -16,6 +16,7 @@ #include #include +#include "absl/strings/string_view.h" #include "api/audio/audio_mixer.h" #include "api/neteq/neteq_factory.h" #include "api/rtp_headers.h" @@ -29,9 +30,7 @@ namespace webrtc { class PacketRouter; -class ProcessThread; class RtcEventLog; -class RtpPacketReceived; class RtpStreamReceiverControllerInterface; class RtpStreamReceiverInterface; @@ -41,36 +40,38 @@ class ChannelReceiveInterface; namespace internal { class AudioSendStream; +} // namespace internal -class AudioReceiveStream final : public webrtc::AudioReceiveStream, - public AudioMixer::Source, - public Syncable { +class AudioReceiveStreamImpl final : public webrtc::AudioReceiveStreamInterface, + public AudioMixer::Source, + public Syncable { public: - AudioReceiveStream(Clock* clock, - PacketRouter* packet_router, - NetEqFactory* neteq_factory, - const webrtc::AudioReceiveStream::Config& config, - const rtc::scoped_refptr& audio_state, - webrtc::RtcEventLog* event_log); + AudioReceiveStreamImpl( + Clock* clock, + PacketRouter* packet_router, + NetEqFactory* neteq_factory, + const webrtc::AudioReceiveStreamInterface::Config& config, + const rtc::scoped_refptr& audio_state, + webrtc::RtcEventLog* event_log); // For unit tests, which need to supply a mock channel receive. - AudioReceiveStream( + AudioReceiveStreamImpl( Clock* clock, PacketRouter* packet_router, - const webrtc::AudioReceiveStream::Config& config, + const webrtc::AudioReceiveStreamInterface::Config& config, const rtc::scoped_refptr& audio_state, webrtc::RtcEventLog* event_log, std::unique_ptr channel_receive); - AudioReceiveStream() = delete; - AudioReceiveStream(const AudioReceiveStream&) = delete; - AudioReceiveStream& operator=(const AudioReceiveStream&) = delete; + AudioReceiveStreamImpl() = delete; + AudioReceiveStreamImpl(const AudioReceiveStreamImpl&) = delete; + AudioReceiveStreamImpl& operator=(const AudioReceiveStreamImpl&) = delete; // Destruction happens on the worker thread. Prior to destruction the caller // must ensure that a registration with the transport has been cleared. See // `RegisterWithTransport` for details. // TODO(tommi): As a further improvement to this, performing the full // destruction on the network thread could be made the default. - ~AudioReceiveStream() override; + ~AudioReceiveStreamImpl() override; // Called on the network thread to register/unregister with the network // transport. @@ -81,23 +82,25 @@ class AudioReceiveStream final : public webrtc::AudioReceiveStream, // network thread. void UnregisterFromTransport(); - // webrtc::AudioReceiveStream implementation. + // webrtc::AudioReceiveStreamInterface implementation. void Start() override; void Stop() override; - const RtpConfig& rtp_config() const override { return config_.rtp; } + bool transport_cc() const override; + void SetTransportCc(bool transport_cc) override; bool IsRunning() const override; void SetDepacketizerToDecoderFrameTransformer( rtc::scoped_refptr frame_transformer) override; void SetDecoderMap(std::map decoder_map) override; - void SetUseTransportCcAndNackHistory(bool use_transport_cc, - int history_ms) override; + void SetNackHistory(int history_ms) override; void SetNonSenderRttMeasurement(bool enabled) override; void SetFrameDecryptor(rtc::scoped_refptr frame_decryptor) override; void SetRtpExtensions(std::vector extensions) override; + const std::vector& GetRtpExtensions() const override; + RtpHeaderExtensionMap GetRtpExtensionMap() const override; - webrtc::AudioReceiveStream::Stats GetStats( + webrtc::AudioReceiveStreamInterface::Stats GetStats( bool get_and_clear_legacy_stats) const override; void SetSink(AudioSinkInterface* sink) override; void SetGain(float gain) override; @@ -120,29 +123,33 @@ class AudioReceiveStream final : public webrtc::AudioReceiveStream, int64_t time_ms) override; bool SetMinimumPlayoutDelay(int delay_ms) override; - void AssociateSendStream(AudioSendStream* send_stream); + void AssociateSendStream(internal::AudioSendStream* send_stream); void DeliverRtcp(const uint8_t* packet, size_t length); - void SetSyncGroup(const std::string& sync_group); + void SetSyncGroup(absl::string_view sync_group); void SetLocalSsrc(uint32_t local_ssrc); uint32_t local_ssrc() const; - uint32_t remote_ssrc() const { + uint32_t remote_ssrc() const override { // The remote_ssrc member variable of config_ will never change and can be // considered const. return config_.rtp.remote_ssrc; } - const webrtc::AudioReceiveStream::Config& config() const; + // Returns a reference to the currently set sync group of the stream. + // Must be called on the packet delivery thread. + const std::string& sync_group() const; + const AudioSendStream* GetAssociatedSendStreamForTesting() const; // TODO(tommi): Remove this method. - void ReconfigureForTesting(const webrtc::AudioReceiveStream::Config& config); + void ReconfigureForTesting( + const webrtc::AudioReceiveStreamInterface::Config& config); private: - AudioState* audio_state() const; + internal::AudioState* audio_state() const; RTC_NO_UNIQUE_ADDRESS SequenceChecker worker_thread_checker_; // TODO(bugs.webrtc.org/11993): This checker conceptually represents @@ -153,7 +160,7 @@ class AudioReceiveStream final : public webrtc::AudioReceiveStream, // that belong to the network thread. Once the packets are fully delivered // on the network thread, this comment will be deleted. RTC_NO_UNIQUE_ADDRESS SequenceChecker packet_sequence_checker_; - webrtc::AudioReceiveStream::Config config_; + webrtc::AudioReceiveStreamInterface::Config config_; rtc::scoped_refptr audio_state_; SourceTracker source_tracker_; const std::unique_ptr channel_receive_; @@ -165,7 +172,6 @@ class AudioReceiveStream final : public webrtc::AudioReceiveStream, std::unique_ptr rtp_stream_receiver_ RTC_GUARDED_BY(packet_sequence_checker_); }; -} // namespace internal } // namespace webrtc #endif // AUDIO_AUDIO_RECEIVE_STREAM_H_ diff --git a/TMessagesProj/jni/voip/webrtc/audio/audio_send_stream.cc b/TMessagesProj/jni/voip/webrtc/audio/audio_send_stream.cc index 08bb4e6bbf..097ffcb835 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/audio_send_stream.cc +++ b/TMessagesProj/jni/voip/webrtc/audio/audio_send_stream.cc @@ -22,6 +22,7 @@ #include "api/crypto/frame_encryptor_interface.h" #include "api/function_view.h" #include "api/rtc_event_log/rtc_event_log.h" +#include "api/task_queue/task_queue_base.h" #include "audio/audio_state.h" #include "audio/channel_send.h" #include "audio/conversion.h" @@ -35,11 +36,9 @@ #include "modules/audio_processing/include/audio_processing.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "rtc_base/checks.h" -#include "rtc_base/event.h" #include "rtc_base/logging.h" #include "rtc_base/strings/audio_format_to_string.h" -#include "rtc_base/task_queue.h" -#include "system_wrappers/include/field_trial.h" +#include "rtc_base/trace_event.h" namespace webrtc { namespace { @@ -75,6 +74,7 @@ void UpdateEventLogStreamConfig(RtcEventLog* event_log, event_log->Log(std::make_unique( std::move(rtclog_config))); } + } // namespace constexpr char AudioAllocationConfig::kKey[]; @@ -88,8 +88,9 @@ std::unique_ptr AudioAllocationConfig::Parser() { "rate_prio", &bitrate_priority); } -AudioAllocationConfig::AudioAllocationConfig() { - Parser()->Parse(field_trial::FindFullName(kKey)); +AudioAllocationConfig::AudioAllocationConfig( + const FieldTrialsView& field_trials) { + Parser()->Parse(field_trials.Lookup(kKey)); if (priority_bitrate_raw && !priority_bitrate.IsZero()) { RTC_LOG(LS_WARNING) << "'priority_bitrate' and '_raw' are mutually " "exclusive but both were configured."; @@ -106,28 +107,31 @@ AudioSendStream::AudioSendStream( BitrateAllocatorInterface* bitrate_allocator, RtcEventLog* event_log, RtcpRttStats* rtcp_rtt_stats, - const absl::optional& suspended_rtp_state) - : AudioSendStream(clock, - config, - audio_state, - task_queue_factory, - rtp_transport, - bitrate_allocator, - event_log, - suspended_rtp_state, - voe::CreateChannelSend( - clock, - task_queue_factory, - config.send_transport, - rtcp_rtt_stats, - event_log, - config.frame_encryptor, - config.crypto_options, - config.rtp.extmap_allow_mixed, - config.rtcp_report_interval_ms, - config.rtp.ssrc, - config.frame_transformer, - rtp_transport->transport_feedback_observer())) {} + const absl::optional& suspended_rtp_state, + const FieldTrialsView& field_trials) + : AudioSendStream( + clock, + config, + audio_state, + task_queue_factory, + rtp_transport, + bitrate_allocator, + event_log, + suspended_rtp_state, + voe::CreateChannelSend(clock, + task_queue_factory, + config.send_transport, + rtcp_rtt_stats, + event_log, + config.frame_encryptor.get(), + config.crypto_options, + config.rtp.extmap_allow_mixed, + config.rtcp_report_interval_ms, + config.rtp.ssrc, + config.frame_transformer, + rtp_transport->transport_feedback_observer(), + field_trials), + field_trials) {} AudioSendStream::AudioSendStream( Clock* clock, @@ -138,21 +142,24 @@ AudioSendStream::AudioSendStream( BitrateAllocatorInterface* bitrate_allocator, RtcEventLog* event_log, const absl::optional& suspended_rtp_state, - std::unique_ptr channel_send) + std::unique_ptr channel_send, + const FieldTrialsView& field_trials) : clock_(clock), + field_trials_(field_trials), rtp_transport_queue_(rtp_transport->GetWorkerQueue()), allocate_audio_without_feedback_( - field_trial::IsEnabled("WebRTC-Audio-ABWENoTWCC")), + field_trials_.IsEnabled("WebRTC-Audio-ABWENoTWCC")), enable_audio_alr_probing_( - !field_trial::IsDisabled("WebRTC-Audio-AlrProbing")), + !field_trials_.IsDisabled("WebRTC-Audio-AlrProbing")), send_side_bwe_with_overhead_( - !field_trial::IsDisabled("WebRTC-SendSideBwe-WithOverhead")), + !field_trials_.IsDisabled("WebRTC-SendSideBwe-WithOverhead")), + allocation_settings_(field_trials_), config_(Config(/*send_transport=*/nullptr)), audio_state_(audio_state), channel_send_(std::move(channel_send)), event_log_(event_log), use_legacy_overhead_calculation_( - field_trial::IsEnabled("WebRTC-Audio-LegacyOverhead")), + field_trials_.IsEnabled("WebRTC-Audio-LegacyOverhead")), bitrate_allocator_(bitrate_allocator), rtp_transport_(rtp_transport), rtp_rtcp_module_(channel_send_->GetRtpRtcp()), @@ -169,7 +176,6 @@ AudioSendStream::AudioSendStream( RTC_DCHECK_RUN_ON(&worker_thread_checker_); ConfigureStream(config, true); UpdateCachedTargetAudioBitrateConstraints(); - pacer_thread_checker_.Detach(); } AudioSendStream::~AudioSendStream() { @@ -177,11 +183,10 @@ AudioSendStream::~AudioSendStream() { RTC_LOG(LS_INFO) << "~AudioSendStream: " << config_.rtp.ssrc; RTC_DCHECK(!sending_); channel_send_->ResetSenderCongestionControlObjects(); + // Blocking call to synchronize state with worker queue to ensure that there // are no pending tasks left that keeps references to audio. - rtc::Event thread_sync_event; - rtp_transport_queue_->PostTask([&] { thread_sync_event.Set(); }); - thread_sync_event.Wait(rtc::Event::kForever); + rtp_transport_queue_->RunSynchronous([] {}); } const webrtc::AudioSendStream::Config& AudioSendStream::GetConfig() const { @@ -386,6 +391,7 @@ void AudioSendStream::Stop() { void AudioSendStream::SendAudioData(std::unique_ptr audio_frame) { RTC_CHECK_RUNS_SERIALIZED(&audio_capture_race_checker_); RTC_DCHECK_GT(audio_frame->sample_rate_hz_, 0); + TRACE_EVENT0("webrtc", "AudioSendStream::SendAudioData"); double duration = static_cast(audio_frame->samples_per_channel_) / audio_frame->sample_rate_hz_; { @@ -435,6 +441,7 @@ webrtc::AudioSendStream::Stats AudioSendStream::GetStats( call_stats.header_and_padding_bytes_sent; stats.retransmitted_bytes_sent = call_stats.retransmitted_bytes_sent; stats.packets_sent = call_stats.packetsSent; + stats.total_packet_send_delay = call_stats.total_packet_send_delay; stats.retransmitted_packets_sent = call_stats.retransmitted_packets_sent; // RTT isn't known until a RTCP report is received. Until then, VoiceEngine // returns 0 to indicate an error value. @@ -469,7 +476,6 @@ webrtc::AudioSendStream::Stats AudioSendStream::GetStats( stats.total_input_duration = audio_level_.TotalDuration(); } - stats.typing_noise_detected = audio_state()->typing_noise_detected(); stats.ana_statistics = channel_send_->GetANAStatistics(); AudioProcessing* ap = audio_state_->audio_processing(); @@ -640,7 +646,8 @@ bool AudioSendStream::SetupSendCodec(const Config& new_config) { AudioEncoderCopyRed::Config red_config; red_config.payload_type = *spec.red_payload_type; red_config.speech_encoder = std::move(encoder); - encoder = std::make_unique(std::move(red_config)); + encoder = std::make_unique(std::move(red_config), + field_trials_); } // Set currently known overhead (used in ANA, opus only). @@ -838,9 +845,9 @@ void AudioSendStream::ConfigureBitrateObserver() { if (allocation_settings_.priority_bitrate_raw) priority_bitrate = *allocation_settings_.priority_bitrate_raw; - rtp_transport_queue_->PostTask([this, constraints, priority_bitrate, - config_bitrate_priority = - config_.bitrate_priority] { + rtp_transport_queue_->RunOrPost([this, constraints, priority_bitrate, + config_bitrate_priority = + config_.bitrate_priority] { RTC_DCHECK_RUN_ON(rtp_transport_queue_); bitrate_allocator_->AddObserver( this, @@ -855,13 +862,10 @@ void AudioSendStream::ConfigureBitrateObserver() { void AudioSendStream::RemoveBitrateObserver() { registered_with_allocator_ = false; - rtc::Event thread_sync_event; - rtp_transport_queue_->PostTask([this, &thread_sync_event] { + rtp_transport_queue_->RunSynchronous([this] { RTC_DCHECK_RUN_ON(rtp_transport_queue_); bitrate_allocator_->RemoveObserver(this); - thread_sync_event.Set(); }); - thread_sync_event.Wait(rtc::Event::kForever); } absl::optional @@ -924,7 +928,7 @@ void AudioSendStream::UpdateCachedTargetAudioBitrateConstraints() { if (!new_constraints.has_value()) { return; } - rtp_transport_queue_->PostTask([this, new_constraints]() { + rtp_transport_queue_->RunOrPost([this, new_constraints]() { RTC_DCHECK_RUN_ON(rtp_transport_queue_); cached_constraints_ = new_constraints; }); diff --git a/TMessagesProj/jni/voip/webrtc/audio/audio_send_stream.h b/TMessagesProj/jni/voip/webrtc/audio/audio_send_stream.h index b40750891c..4962ccd7a3 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/audio_send_stream.h +++ b/TMessagesProj/jni/voip/webrtc/audio/audio_send_stream.h @@ -15,13 +15,17 @@ #include #include +#include "absl/functional/any_invocable.h" +#include "api/field_trials_view.h" #include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" #include "audio/audio_level.h" #include "audio/channel_send.h" #include "call/audio_send_stream.h" #include "call/audio_state.h" #include "call/bitrate_allocator.h" #include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" +#include "modules/utility/maybe_worker_thread.h" #include "rtc_base/experiments/struct_parameters_parser.h" #include "rtc_base/race_checker.h" #include "rtc_base/synchronization/mutex.h" @@ -46,7 +50,7 @@ struct AudioAllocationConfig { absl::optional bitrate_priority; std::unique_ptr Parser(); - AudioAllocationConfig(); + explicit AudioAllocationConfig(const FieldTrialsView& field_trials); }; namespace internal { class AudioState; @@ -62,7 +66,8 @@ class AudioSendStream final : public webrtc::AudioSendStream, BitrateAllocatorInterface* bitrate_allocator, RtcEventLog* event_log, RtcpRttStats* rtcp_rtt_stats, - const absl::optional& suspended_rtp_state); + const absl::optional& suspended_rtp_state, + const FieldTrialsView& field_trials); // For unit tests, which need to supply a mock ChannelSend. AudioSendStream(Clock* clock, const webrtc::AudioSendStream::Config& config, @@ -72,7 +77,8 @@ class AudioSendStream final : public webrtc::AudioSendStream, BitrateAllocatorInterface* bitrate_allocator, RtcEventLog* event_log, const absl::optional& suspended_rtp_state, - std::unique_ptr channel_send); + std::unique_ptr channel_send, + const FieldTrialsView& field_trials); AudioSendStream() = delete; AudioSendStream(const AudioSendStream&) = delete; @@ -160,11 +166,11 @@ class AudioSendStream final : public webrtc::AudioSendStream, RTC_RUN_ON(worker_thread_checker_); Clock* clock_; + const FieldTrialsView& field_trials_; SequenceChecker worker_thread_checker_; - SequenceChecker pacer_thread_checker_; rtc::RaceChecker audio_capture_race_checker_; - rtc::TaskQueue* rtp_transport_queue_; + MaybeWorkerThread* rtp_transport_queue_; const bool allocate_audio_without_feedback_; const bool force_no_audio_feedback_ = allocate_audio_without_feedback_; diff --git a/TMessagesProj/jni/voip/webrtc/audio/audio_send_stream_tests.cc b/TMessagesProj/jni/voip/webrtc/audio/audio_send_stream_tests.cc index e3895039d8..2ec7229bfb 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/audio_send_stream_tests.cc +++ b/TMessagesProj/jni/voip/webrtc/audio/audio_send_stream_tests.cc @@ -31,7 +31,7 @@ enum : int { // The first valid value is 1. class AudioSendTest : public SendTest { public: - AudioSendTest() : SendTest(CallTest::kDefaultTimeoutMs) {} + AudioSendTest() : SendTest(CallTest::kDefaultTimeout) {} size_t GetNumVideoStreams() const override { return 0; } size_t GetNumAudioStreams() const override { return 1; } @@ -61,9 +61,9 @@ TEST_F(AudioSendStreamCallTest, SupportsCName) { return SEND_PACKET; } - void ModifyAudioConfigs( - AudioSendStream::Config* send_config, - std::vector* receive_configs) override { + void ModifyAudioConfigs(AudioSendStream::Config* send_config, + std::vector* + receive_configs) override { send_config->rtp.c_name = kCName; } @@ -90,9 +90,9 @@ TEST_F(AudioSendStreamCallTest, NoExtensionsByDefault) { return SEND_PACKET; } - void ModifyAudioConfigs( - AudioSendStream::Config* send_config, - std::vector* receive_configs) override { + void ModifyAudioConfigs(AudioSendStream::Config* send_config, + std::vector* + receive_configs) override { send_config->rtp.extensions.clear(); } @@ -129,9 +129,9 @@ TEST_F(AudioSendStreamCallTest, SupportsAudioLevel) { return SEND_PACKET; } - void ModifyAudioConfigs( - AudioSendStream::Config* send_config, - std::vector* receive_configs) override { + void ModifyAudioConfigs(AudioSendStream::Config* send_config, + std::vector* + receive_configs) override { send_config->rtp.extensions.clear(); send_config->rtp.extensions.push_back( RtpExtension(RtpExtension::kAudioLevelUri, kAudioLevelExtensionId)); @@ -171,9 +171,9 @@ class TransportWideSequenceNumberObserver : public AudioSendTest { return SEND_PACKET; } - void ModifyAudioConfigs( - AudioSendStream::Config* send_config, - std::vector* receive_configs) override { + void ModifyAudioConfigs(AudioSendStream::Config* send_config, + std::vector* + receive_configs) override { send_config->rtp.extensions.clear(); send_config->rtp.extensions.push_back( RtpExtension(RtpExtension::kTransportSequenceNumberUri, @@ -223,9 +223,9 @@ TEST_F(AudioSendStreamCallTest, SendDtmf) { return SEND_PACKET; } - void OnAudioStreamsCreated( - AudioSendStream* send_stream, - const std::vector& receive_streams) override { + void OnAudioStreamsCreated(AudioSendStream* send_stream, + const std::vector& + receive_streams) override { // Need to start stream here, else DTMF events are dropped. send_stream->Start(); for (int event = kDtmfEventFirst; event <= kDtmfEventLast; ++event) { diff --git a/TMessagesProj/jni/voip/webrtc/audio/audio_state.cc b/TMessagesProj/jni/voip/webrtc/audio/audio_state.cc index 9e5b63b999..76ff152eea 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/audio_state.cc +++ b/TMessagesProj/jni/voip/webrtc/audio/audio_state.cc @@ -15,20 +15,21 @@ #include #include +#include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" +#include "api/units/time_delta.h" #include "audio/audio_receive_stream.h" #include "audio/audio_send_stream.h" #include "modules/audio_device/include/audio_device.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "rtc_base/ref_counted_object.h" -#include "rtc_base/thread.h" namespace webrtc { namespace internal { AudioState::AudioState(const AudioState::Config& config) : config_(config), - audio_transport_(config_.audio_mixer, + audio_transport_(config_.audio_mixer.get(), config_.audio_processing.get(), config_.async_audio_processing_factory.get()) { process_thread_checker_.Detach(); @@ -37,9 +38,10 @@ AudioState::AudioState(const AudioState::Config& config) } AudioState::~AudioState() { - RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&thread_checker_); RTC_DCHECK(receiving_streams_.empty()); RTC_DCHECK(sending_streams_.empty()); + RTC_DCHECK(!null_audio_poller_.Running()); } AudioProcessing* AudioState::audio_processing() { @@ -50,17 +52,13 @@ AudioTransport* AudioState::audio_transport() { return &audio_transport_; } -bool AudioState::typing_noise_detected() const { - RTC_DCHECK(thread_checker_.IsCurrent()); - return audio_transport_.typing_noise_detected(); -} - -void AudioState::AddReceivingStream(webrtc::AudioReceiveStream* stream) { - RTC_DCHECK(thread_checker_.IsCurrent()); +void AudioState::AddReceivingStream( + webrtc::AudioReceiveStreamInterface* stream) { + RTC_DCHECK_RUN_ON(&thread_checker_); RTC_DCHECK_EQ(0, receiving_streams_.count(stream)); receiving_streams_.insert(stream); if (!config_.audio_mixer->AddSource( - static_cast(stream))) { + static_cast(stream))) { RTC_DLOG(LS_ERROR) << "Failed to add source to mixer."; } @@ -78,12 +76,13 @@ void AudioState::AddReceivingStream(webrtc::AudioReceiveStream* stream) { } } -void AudioState::RemoveReceivingStream(webrtc::AudioReceiveStream* stream) { - RTC_DCHECK(thread_checker_.IsCurrent()); +void AudioState::RemoveReceivingStream( + webrtc::AudioReceiveStreamInterface* stream) { + RTC_DCHECK_RUN_ON(&thread_checker_); auto count = receiving_streams_.erase(stream); RTC_DCHECK_EQ(1, count); config_.audio_mixer->RemoveSource( - static_cast(stream)); + static_cast(stream)); UpdateNullAudioPollerState(); if (receiving_streams_.empty()) { config_.audio_device_module->StopPlayout(); @@ -93,7 +92,7 @@ void AudioState::RemoveReceivingStream(webrtc::AudioReceiveStream* stream) { void AudioState::AddSendingStream(webrtc::AudioSendStream* stream, int sample_rate_hz, size_t num_channels) { - RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&thread_checker_); auto& properties = sending_streams_[stream]; properties.sample_rate_hz = sample_rate_hz; properties.num_channels = num_channels; @@ -113,7 +112,7 @@ void AudioState::AddSendingStream(webrtc::AudioSendStream* stream, } void AudioState::RemoveSendingStream(webrtc::AudioSendStream* stream) { - RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&thread_checker_); auto count = sending_streams_.erase(stream); RTC_DCHECK_EQ(1, count); UpdateAudioTransportWithSendingStreams(); @@ -124,7 +123,7 @@ void AudioState::RemoveSendingStream(webrtc::AudioSendStream* stream) { void AudioState::SetPlayout(bool enabled) { RTC_LOG(LS_INFO) << "SetPlayout(" << enabled << ")"; - RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&thread_checker_); if (playout_enabled_ != enabled) { playout_enabled_ = enabled; if (enabled) { @@ -141,7 +140,7 @@ void AudioState::SetPlayout(bool enabled) { void AudioState::SetRecording(bool enabled) { RTC_LOG(LS_INFO) << "SetRecording(" << enabled << ")"; - RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&thread_checker_); if (recording_enabled_ != enabled) { recording_enabled_ = enabled; if (enabled) { @@ -177,10 +176,32 @@ void AudioState::UpdateNullAudioPollerState() { // Run NullAudioPoller when there are receiving streams and playout is // disabled. if (!receiving_streams_.empty() && !playout_enabled_) { - if (!null_audio_poller_) - null_audio_poller_ = std::make_unique(&audio_transport_); + if (!null_audio_poller_.Running()) { + AudioTransport* audio_transport = &audio_transport_; + null_audio_poller_ = RepeatingTaskHandle::Start( + TaskQueueBase::Current(), [audio_transport] { + static constexpr size_t kNumChannels = 1; + static constexpr uint32_t kSamplesPerSecond = 48'000; + // 10ms of samples + static constexpr size_t kNumSamples = kSamplesPerSecond / 100; + + // Buffer to hold the audio samples. + int16_t buffer[kNumSamples * kNumChannels]; + + // Output variables from `NeedMorePlayData`. + size_t n_samples; + int64_t elapsed_time_ms; + int64_t ntp_time_ms; + audio_transport->NeedMorePlayData( + kNumSamples, sizeof(int16_t), kNumChannels, kSamplesPerSecond, + buffer, n_samples, &elapsed_time_ms, &ntp_time_ms); + + // Reschedule the next poll iteration. + return TimeDelta::Millis(10); + }); + } } else { - null_audio_poller_.reset(); + null_audio_poller_.Stop(); } } } // namespace internal diff --git a/TMessagesProj/jni/voip/webrtc/audio/audio_state.h b/TMessagesProj/jni/voip/webrtc/audio/audio_state.h index 55f35511bf..6c2b7aa453 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/audio_state.h +++ b/TMessagesProj/jni/voip/webrtc/audio/audio_state.h @@ -16,15 +16,16 @@ #include "api/sequence_checker.h" #include "audio/audio_transport_impl.h" -#include "audio/null_audio_poller.h" #include "call/audio_state.h" #include "rtc_base/containers/flat_set.h" #include "rtc_base/ref_count.h" +#include "rtc_base/task_utils/repeating_task.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { class AudioSendStream; -class AudioReceiveStream; +class AudioReceiveStreamInterface; namespace internal { @@ -51,10 +52,8 @@ class AudioState : public webrtc::AudioState { return config_.audio_device_module.get(); } - bool typing_noise_detected() const; - - void AddReceivingStream(webrtc::AudioReceiveStream* stream); - void RemoveReceivingStream(webrtc::AudioReceiveStream* stream); + void AddReceivingStream(webrtc::AudioReceiveStreamInterface* stream); + void RemoveReceivingStream(webrtc::AudioReceiveStreamInterface* stream); void AddSendingStream(webrtc::AudioSendStream* stream, int sample_rate_hz, @@ -63,7 +62,7 @@ class AudioState : public webrtc::AudioState { private: void UpdateAudioTransportWithSendingStreams(); - void UpdateNullAudioPollerState(); + void UpdateNullAudioPollerState() RTC_RUN_ON(&thread_checker_); SequenceChecker thread_checker_; SequenceChecker process_thread_checker_; @@ -78,9 +77,9 @@ class AudioState : public webrtc::AudioState { // Null audio poller is used to continue polling the audio streams if audio // playout is disabled so that audio processing still happens and the audio // stats are still updated. - std::unique_ptr null_audio_poller_; + RepeatingTaskHandle null_audio_poller_ RTC_GUARDED_BY(&thread_checker_); - webrtc::flat_set receiving_streams_; + webrtc::flat_set receiving_streams_; struct StreamProperties { int sample_rate_hz = 0; size_t num_channels = 0; diff --git a/TMessagesProj/jni/voip/webrtc/audio/audio_transport_impl.cc b/TMessagesProj/jni/voip/webrtc/audio/audio_transport_impl.cc index 2a80ea893d..9f2823bcda 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/audio_transport_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/audio/audio_transport_impl.cc @@ -20,6 +20,7 @@ #include "modules/async_audio_processing/async_audio_processing.h" #include "modules/audio_processing/include/audio_frame_proxies.h" #include "rtc_base/checks.h" +#include "rtc_base/trace_event.h" namespace webrtc { @@ -70,6 +71,8 @@ int Resample(const AudioFrame& frame, const int destination_sample_rate, PushResampler* resampler, int16_t* destination) { + TRACE_EVENT2("webrtc", "Resample", "frame sample rate", frame.sample_rate_hz_, + "destination_sample_rate", destination_sample_rate); const int number_of_channels = static_cast(frame.num_channels_); const int target_number_of_samples_per_channel = destination_sample_rate / 100; @@ -102,6 +105,23 @@ AudioTransportImpl::AudioTransportImpl( AudioTransportImpl::~AudioTransportImpl() {} +int32_t AudioTransportImpl::RecordedDataIsAvailable( + const void* audio_data, + const size_t number_of_frames, + const size_t bytes_per_sample, + const size_t number_of_channels, + const uint32_t sample_rate, + const uint32_t audio_delay_milliseconds, + const int32_t clock_drift, + const uint32_t volume, + const bool key_pressed, + uint32_t& new_mic_volume) { // NOLINT: to avoid changing APIs + return RecordedDataIsAvailable( + audio_data, number_of_frames, bytes_per_sample, number_of_channels, + sample_rate, audio_delay_milliseconds, clock_drift, volume, key_pressed, + new_mic_volume, /* estimated_capture_time_ns */ 0); +} + // Not used in Chromium. Process captured audio and distribute to all sending // streams, and try to do this at the lowest possible sample rate. int32_t AudioTransportImpl::RecordedDataIsAvailable( @@ -114,7 +134,9 @@ int32_t AudioTransportImpl::RecordedDataIsAvailable( const int32_t /*clock_drift*/, const uint32_t /*volume*/, const bool key_pressed, - uint32_t& /*new_mic_volume*/) { // NOLINT: to avoid changing APIs + uint32_t& /*new_mic_volume*/, + const int64_t + estimated_capture_time_ns) { // NOLINT: to avoid changing APIs RTC_DCHECK(audio_data); RTC_DCHECK_GE(number_of_channels, 1); RTC_DCHECK_LE(number_of_channels, 2); @@ -144,25 +166,8 @@ int32_t AudioTransportImpl::RecordedDataIsAvailable( ProcessCaptureFrame(audio_delay_milliseconds, key_pressed, swap_stereo_channels, audio_processing_, audio_frame.get()); - - // Typing detection (utilizes the APM/VAD decision). We let the VAD determine - // if we're using this feature or not. - // TODO(solenberg): GetConfig() takes a lock. Work around that. - bool typing_detected = false; - if (audio_processing_ && - audio_processing_->GetConfig().voice_detection.enabled) { - if (audio_frame->vad_activity_ != AudioFrame::kVadUnknown) { - bool vad_active = audio_frame->vad_activity_ == AudioFrame::kVadActive; - typing_detected = typing_detection_.Process(key_pressed, vad_active); - } - } - - // Copy frame and push to each sending stream. The copy is required since an - // encoding task will be posted internally to each stream. - { - MutexLock lock(&capture_lock_); - typing_noise_detected_ = typing_detected; - } + audio_frame->set_absolute_capture_timestamp_ms(estimated_capture_time_ns / + 1000000); RTC_DCHECK_GT(audio_frame->samples_per_channel_, 0); if (async_audio_processing_) @@ -175,6 +180,7 @@ int32_t AudioTransportImpl::RecordedDataIsAvailable( void AudioTransportImpl::SendProcessedData( std::unique_ptr audio_frame) { + TRACE_EVENT0("webrtc", "AudioTransportImpl::SendProcessedData"); RTC_DCHECK_GT(audio_frame->samples_per_channel_, 0); MutexLock lock(&capture_lock_); if (audio_senders_.empty()) @@ -200,6 +206,7 @@ int32_t AudioTransportImpl::NeedMorePlayData(const size_t nSamples, size_t& nSamplesOut, int64_t* elapsed_time_ms, int64_t* ntp_time_ms) { + TRACE_EVENT0("webrtc", "AudioTransportImpl::SendProcessedData"); RTC_DCHECK_EQ(sizeof(int16_t) * nChannels, nBytesPerSample); RTC_DCHECK_GE(nChannels, 1); RTC_DCHECK_LE(nChannels, 2); @@ -237,6 +244,8 @@ void AudioTransportImpl::PullRenderData(int bits_per_sample, void* audio_data, int64_t* elapsed_time_ms, int64_t* ntp_time_ms) { + TRACE_EVENT2("webrtc", "AudioTransportImpl::PullRenderData", "sample_rate", + sample_rate, "number_of_frames", number_of_frames); RTC_DCHECK_EQ(bits_per_sample, 16); RTC_DCHECK_GE(number_of_channels, 1); RTC_DCHECK_GE(sample_rate, AudioProcessing::NativeRate::kSampleRate8kHz); @@ -270,8 +279,4 @@ void AudioTransportImpl::SetStereoChannelSwapping(bool enable) { swap_stereo_channels_ = enable; } -bool AudioTransportImpl::typing_noise_detected() const { - MutexLock lock(&capture_lock_); - return typing_noise_detected_; -} } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/audio/audio_transport_impl.h b/TMessagesProj/jni/voip/webrtc/audio/audio_transport_impl.h index f3ca2fa848..ba067de99d 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/audio_transport_impl.h +++ b/TMessagesProj/jni/voip/webrtc/audio/audio_transport_impl.h @@ -20,7 +20,6 @@ #include "modules/async_audio_processing/async_audio_processing.h" #include "modules/audio_device/include/audio_device.h" #include "modules/audio_processing/include/audio_processing.h" -#include "modules/audio_processing/typing_detection.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" @@ -41,21 +40,34 @@ class AudioTransportImpl : public AudioTransport { ~AudioTransportImpl() override; + // TODO(bugs.webrtc.org/13620) Deprecate this function int32_t RecordedDataIsAvailable(const void* audioSamples, - const size_t nSamples, - const size_t nBytesPerSample, - const size_t nChannels, - const uint32_t samplesPerSec, - const uint32_t totalDelayMS, - const int32_t clockDrift, - const uint32_t currentMicLevel, - const bool keyPressed, + size_t nSamples, + size_t nBytesPerSample, + size_t nChannels, + uint32_t samplesPerSec, + uint32_t totalDelayMS, + int32_t clockDrift, + uint32_t currentMicLevel, + bool keyPressed, uint32_t& newMicLevel) override; - int32_t NeedMorePlayData(const size_t nSamples, - const size_t nBytesPerSample, - const size_t nChannels, - const uint32_t samplesPerSec, + int32_t RecordedDataIsAvailable(const void* audioSamples, + size_t nSamples, + size_t nBytesPerSample, + size_t nChannels, + uint32_t samplesPerSec, + uint32_t totalDelayMS, + int32_t clockDrift, + uint32_t currentMicLevel, + bool keyPressed, + uint32_t& newMicLevel, + int64_t estimated_capture_time_ns) override; + + int32_t NeedMorePlayData(size_t nSamples, + size_t nBytesPerSample, + size_t nChannels, + uint32_t samplesPerSec, void* audioSamples, size_t& nSamplesOut, int64_t* elapsed_time_ms, @@ -73,7 +85,6 @@ class AudioTransportImpl : public AudioTransport { int send_sample_rate_hz, size_t send_num_channels); void SetStereoChannelSwapping(bool enable); - bool typing_noise_detected() const; private: void SendProcessedData(std::unique_ptr audio_frame); @@ -90,10 +101,8 @@ class AudioTransportImpl : public AudioTransport { std::vector audio_senders_ RTC_GUARDED_BY(capture_lock_); int send_sample_rate_hz_ RTC_GUARDED_BY(capture_lock_) = 8000; size_t send_num_channels_ RTC_GUARDED_BY(capture_lock_) = 1; - bool typing_noise_detected_ RTC_GUARDED_BY(capture_lock_) = false; bool swap_stereo_channels_ RTC_GUARDED_BY(capture_lock_) = false; PushResampler capture_resampler_; - TypingDetection typing_detection_; // Render side. diff --git a/TMessagesProj/jni/voip/webrtc/audio/channel_receive.cc b/TMessagesProj/jni/voip/webrtc/audio/channel_receive.cc index 6c16f435fa..363eebf0a1 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/channel_receive.cc +++ b/TMessagesProj/jni/voip/webrtc/audio/channel_receive.cc @@ -21,7 +21,9 @@ #include "api/frame_transformer_interface.h" #include "api/rtc_event_log/rtc_event_log.h" #include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_base.h" +#include "api/units/time_delta.h" #include "audio/audio_level.h" #include "audio/channel_receive_frame_transformer_delegate.h" #include "audio/channel_send.h" @@ -39,19 +41,16 @@ #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_rtcp_config.h" #include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" -#include "modules/utility/include/process_thread.h" #include "rtc_base/checks.h" -#include "rtc_base/format_macros.h" -#include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_minmax.h" #include "rtc_base/race_checker.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/system/no_unique_address.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" -#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/time_utils.h" +#include "rtc_base/trace_event.h" #include "system_wrappers/include/metrics.h" +#include "system_wrappers/include/ntp_time.h" namespace webrtc { namespace voe { @@ -96,7 +95,6 @@ class ChannelReceive : public ChannelReceiveInterface, size_t jitter_buffer_max_packets, bool jitter_buffer_fast_playout, int jitter_buffer_min_delay_ms, - bool jitter_buffer_enable_rtx_handling, bool enable_non_sender_rtt, rtc::scoped_refptr decoder_factory, absl::optional codec_pair_id, @@ -199,7 +197,6 @@ class ChannelReceive : public ChannelReceiveInterface, RTC_RUN_ON(worker_thread_checker_); int GetRtpTimestampRateHz() const; - int64_t GetRTT() const; void OnReceivedPayloadData(rtc::ArrayView payload, const RTPHeader& rtpHeader) @@ -320,13 +317,13 @@ void ChannelReceive::OnReceivedPayloadData( // packet as discarded. // If we have a source_tracker_, tell it that the frame has been - // "delivered". Normally, this happens in AudioReceiveStream when audio - // frames are pulled out, but when playout is muted, nothing is pulling - // frames. The downside of this approach is that frames delivered this way - // won't be delayed for playout, and therefore will be unsynchronized with - // (a) audio delay when playing and (b) any audio/video synchronization. But - // the alternative is that muting playout also stops the SourceTracker from - // updating RtpSource information. + // "delivered". Normally, this happens in AudioReceiveStreamInterface when + // audio frames are pulled out, but when playout is muted, nothing is + // pulling frames. The downside of this approach is that frames delivered + // this way won't be delayed for playout, and therefore will be + // unsynchronized with (a) audio delay when playing and (b) any audio/video + // synchronization. But the alternative is that muting playout also stops + // the SourceTracker from updating RtpSource information. if (source_tracker_) { RtpPacketInfos::vector_type packet_vector = { RtpPacketInfo(rtpHeader, clock_->CurrentTime())}; @@ -344,7 +341,8 @@ void ChannelReceive::OnReceivedPayloadData( } int64_t round_trip_time = 0; - rtp_rtcp_->RTT(remote_ssrc_, &round_trip_time, NULL, NULL, NULL); + rtp_rtcp_->RTT(remote_ssrc_, &round_trip_time, /*avg_rtt=*/nullptr, + /*min_rtt=*/nullptr, /*max_rtt=*/nullptr); std::vector nack_list = acm_receiver_.GetNackList(round_trip_time); if (!nack_list.empty()) { @@ -378,6 +376,8 @@ void ChannelReceive::InitFrameTransformerDelegate( AudioMixer::Source::AudioFrameInfo ChannelReceive::GetAudioFrameWithInfo( int sample_rate_hz, AudioFrame* audio_frame) { + TRACE_EVENT_BEGIN1("webrtc", "ChannelReceive::GetAudioFrameWithInfo", + "sample_rate_hz", sample_rate_hz); RTC_DCHECK_RUNS_SERIALIZED(&audio_thread_race_checker_); audio_frame->sample_rate_hz_ = sample_rate_hz; @@ -393,6 +393,9 @@ AudioMixer::Source::AudioFrameInfo ChannelReceive::GetAudioFrameWithInfo( // error so that the audio mixer module doesn't add it to the mix. As // a result, it won't be played out and the actions skipped here are // irrelevant. + + TRACE_EVENT_END1("webrtc", "ChannelReceive::GetAudioFrameWithInfo", "error", + 1); return AudioMixer::Source::AudioFrameInfo::kError; } @@ -443,7 +446,6 @@ AudioMixer::Source::AudioFrameInfo ChannelReceive::GetAudioFrameWithInfo( if (capture_start_rtp_time_stamp_ >= 0) { // audio_frame.timestamp_ should be valid from now on. - // Compute elapsed time. int64_t unwrap_timestamp = rtp_ts_wraparound_handler_->Unwrap(audio_frame->timestamp_); @@ -469,14 +471,19 @@ AudioMixer::Source::AudioFrameInfo ChannelReceive::GetAudioFrameWithInfo( // Fill in local capture clock offset in `audio_frame->packet_infos_`. RtpPacketInfos::vector_type packet_infos; for (auto& packet_info : audio_frame->packet_infos_) { - absl::optional local_capture_clock_offset; + absl::optional local_capture_clock_offset_q32x32; if (packet_info.absolute_capture_time().has_value()) { - local_capture_clock_offset = + local_capture_clock_offset_q32x32 = capture_clock_offset_updater_.AdjustEstimatedCaptureClockOffset( packet_info.absolute_capture_time() ->estimated_capture_clock_offset); } RtpPacketInfo new_packet_info(packet_info); + absl::optional local_capture_clock_offset; + if (local_capture_clock_offset_q32x32.has_value()) { + local_capture_clock_offset = TimeDelta::Millis( + UQ32x32ToInt64Ms(*local_capture_clock_offset_q32x32)); + } new_packet_info.set_local_capture_clock_offset(local_capture_clock_offset); packet_infos.push_back(std::move(new_packet_info)); } @@ -485,7 +492,7 @@ AudioMixer::Source::AudioFrameInfo ChannelReceive::GetAudioFrameWithInfo( ++audio_frame_interval_count_; if (audio_frame_interval_count_ >= kHistogramReportingInterval) { audio_frame_interval_count_ = 0; - worker_thread_->PostTask(ToQueuedTask(worker_safety_, [this]() { + worker_thread_->PostTask(SafeTask(worker_safety_.flag(), [this]() { RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_HISTOGRAM_COUNTS_1000("WebRTC.Audio.TargetJitterBufferDelayMs", acm_receiver_.TargetDelayMs()); @@ -499,6 +506,8 @@ AudioMixer::Source::AudioFrameInfo ChannelReceive::GetAudioFrameWithInfo( })); } + TRACE_EVENT_END2("webrtc", "ChannelReceive::GetAudioFrameWithInfo", "gain", + output_gain, "muted", muted); return muted ? AudioMixer::Source::AudioFrameInfo::kMuted : AudioMixer::Source::AudioFrameInfo::kNormal; } @@ -525,7 +534,6 @@ ChannelReceive::ChannelReceive( size_t jitter_buffer_max_packets, bool jitter_buffer_fast_playout, int jitter_buffer_min_delay_ms, - bool jitter_buffer_enable_rtx_handling, bool enable_non_sender_rtt, rtc::scoped_refptr decoder_factory, absl::optional codec_pair_id, @@ -582,7 +590,6 @@ ChannelReceive::ChannelReceive( InitFrameTransformerDelegate(std::move(frame_transformer)); rtp_rtcp_ = ModuleRtpRtcpImpl2::Create(configuration); - rtp_rtcp_->SetSendingMediaStatus(false); rtp_rtcp_->SetRemoteSSRC(remote_ssrc_); // Ensure that RTCP is enabled for the created channel. @@ -648,7 +655,8 @@ void ChannelReceive::OnRtpPacket(const RtpPacketReceived& packet) { const auto& it = payload_type_frequencies_.find(packet.PayloadType()); if (it == payload_type_frequencies_.end()) return; - // TODO(nisse): Set payload_type_frequency earlier, when packet is parsed. + // TODO(bugs.webrtc.org/7135): Set payload_type_frequency earlier, when packet + // is parsed. RtpPacketReceived packet_copy(packet); packet_copy.set_payload_type_frequency(it->second); @@ -731,7 +739,9 @@ void ChannelReceive::ReceivedRTCPPacket(const uint8_t* data, size_t length) { // Deliver RTCP packet to RTP/RTCP module for parsing rtp_rtcp_->IncomingRtcpPacket(data, length); - int64_t rtt = GetRTT(); + int64_t rtt = 0; + rtp_rtcp_->RTT(remote_ssrc_, &rtt, /*avg_rtt=*/nullptr, /*min_rtt=*/nullptr, + /*max_rtt=*/nullptr); if (rtt == 0) { // Waiting for valid RTT. return; @@ -750,12 +760,13 @@ void ChannelReceive::ReceivedRTCPPacket(const uint8_t* data, size_t length) { { MutexLock lock(&ts_stats_lock_); - ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp); - absl::optional remote_to_local_clock_offset_ms = - ntp_estimator_.EstimateRemoteToLocalClockOffsetMs(); - if (remote_to_local_clock_offset_ms.has_value()) { + ntp_estimator_.UpdateRtcpTimestamp( + TimeDelta::Millis(rtt), NtpTime(ntp_secs, ntp_frac), rtp_timestamp); + absl::optional remote_to_local_clock_offset = + ntp_estimator_.EstimateRemoteToLocalClockOffset(); + if (remote_to_local_clock_offset.has_value()) { capture_clock_offset_updater_.SetRemoteToLocalClockOffset( - Int64MsToQ32x32(*remote_to_local_clock_offset_ms)); + *remote_to_local_clock_offset); } } } @@ -814,8 +825,6 @@ CallReceiveStatistics ChannelReceive::GetRTCPStatistics() const { stats.cumulativeLost = rtp_stats.packets_lost; stats.jitterSamples = rtp_stats.jitter; - stats.rttMs = GetRTT(); - // Data counters. if (statistician) { stats.payload_bytes_rcvd = rtp_stats.packet_counter.payload_bytes; @@ -847,14 +856,11 @@ CallReceiveStatistics ChannelReceive::GetRTCPStatistics() const { absl::optional rtcp_sr_stats = rtp_rtcp_->GetSenderReportStats(); if (rtcp_sr_stats.has_value()) { - // Number of seconds since 1900 January 1 00:00 GMT (see - // https://tools.ietf.org/html/rfc868). - constexpr int64_t kNtpJan1970Millisecs = - 2208988800 * rtc::kNumMillisecsPerSec; stats.last_sender_report_timestamp_ms = - rtcp_sr_stats->last_arrival_timestamp.ToMs() - kNtpJan1970Millisecs; + rtcp_sr_stats->last_arrival_timestamp.ToMs() - + rtc::kNtpJan1970Millisecs; stats.last_sender_report_remote_timestamp_ms = - rtcp_sr_stats->last_remote_timestamp.ToMs() - kNtpJan1970Millisecs; + rtcp_sr_stats->last_remote_timestamp.ToMs() - rtc::kNtpJan1970Millisecs; stats.sender_reports_packets_sent = rtcp_sr_stats->packets_sent; stats.sender_reports_bytes_sent = rtcp_sr_stats->bytes_sent; stats.sender_reports_reports_count = rtcp_sr_stats->reports_count; @@ -1043,8 +1049,8 @@ absl::optional ChannelReceive::GetSyncInfo() const { return info; } -// RTC_RUN_ON(worker_thread_checker_) void ChannelReceive::UpdatePlayoutTimestamp(bool rtcp, int64_t now_ms) { + RTC_DCHECK_RUN_ON(&worker_thread_checker_); // TODO(bugs.webrtc.org/11993): Expect to be called exclusively on the // network thread. Once that's done, we won't need video_sync_lock_. @@ -1091,29 +1097,6 @@ int ChannelReceive::GetRtpTimestampRateHz() const { : acm_receiver_.last_output_sample_rate_hz(); } -int64_t ChannelReceive::GetRTT() const { - RTC_DCHECK_RUN_ON(&network_thread_checker_); - std::vector report_blocks = - rtp_rtcp_->GetLatestReportBlockData(); - - if (report_blocks.empty()) { - // Try fall back on an RTT from an associated channel. - if (!associated_send_channel_) { - return 0; - } - return associated_send_channel_->GetRTT(); - } - - // TODO(nisse): This method computes RTT based on sender reports, even though - // a receive stream is not supposed to do that. - for (const ReportBlockData& data : report_blocks) { - if (data.report_block().sender_ssrc == remote_ssrc_) { - return data.last_rtt_ms(); - } - } - return 0; -} - } // namespace std::unique_ptr CreateChannelReceive( @@ -1127,7 +1110,6 @@ std::unique_ptr CreateChannelReceive( size_t jitter_buffer_max_packets, bool jitter_buffer_fast_playout, int jitter_buffer_min_delay_ms, - bool jitter_buffer_enable_rtx_handling, bool enable_non_sender_rtt, rtc::scoped_refptr decoder_factory, absl::optional codec_pair_id, @@ -1138,9 +1120,8 @@ std::unique_ptr CreateChannelReceive( clock, neteq_factory, audio_device_module, rtcp_send_transport, rtc_event_log, local_ssrc, remote_ssrc, jitter_buffer_max_packets, jitter_buffer_fast_playout, jitter_buffer_min_delay_ms, - jitter_buffer_enable_rtx_handling, enable_non_sender_rtt, decoder_factory, - codec_pair_id, std::move(frame_decryptor), crypto_options, - std::move(frame_transformer)); + enable_non_sender_rtt, decoder_factory, codec_pair_id, + std::move(frame_decryptor), crypto_options, std::move(frame_transformer)); } } // namespace voe diff --git a/TMessagesProj/jni/voip/webrtc/audio/channel_receive.h b/TMessagesProj/jni/voip/webrtc/audio/channel_receive.h index d811e87719..c3eca29006 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/channel_receive.h +++ b/TMessagesProj/jni/voip/webrtc/audio/channel_receive.h @@ -53,7 +53,6 @@ class RtpRtcp; struct CallReceiveStatistics { unsigned int cumulativeLost; unsigned int jitterSamples; - int64_t rttMs; int64_t payload_bytes_rcvd = 0; int64_t header_and_padding_bytes_rcvd = 0; int packetsReceived; @@ -83,7 +82,7 @@ namespace voe { class ChannelSendInterface; -// Interface class needed for AudioReceiveStream tests that use a +// Interface class needed for AudioReceiveStreamInterface tests that use a // MockChannelReceive. class ChannelReceiveInterface : public RtpPacketSinkInterface { @@ -182,7 +181,6 @@ std::unique_ptr CreateChannelReceive( size_t jitter_buffer_max_packets, bool jitter_buffer_fast_playout, int jitter_buffer_min_delay_ms, - bool jitter_buffer_enable_rtx_handling, bool enable_non_sender_rtt, rtc::scoped_refptr decoder_factory, absl::optional codec_pair_id, diff --git a/TMessagesProj/jni/voip/webrtc/audio/channel_receive_frame_transformer_delegate.cc b/TMessagesProj/jni/voip/webrtc/audio/channel_receive_frame_transformer_delegate.cc index c9e8a8b29d..e8ba6ded47 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/channel_receive_frame_transformer_delegate.cc +++ b/TMessagesProj/jni/voip/webrtc/audio/channel_receive_frame_transformer_delegate.cc @@ -13,7 +13,6 @@ #include #include "rtc_base/buffer.h" -#include "rtc_base/task_utils/to_queued_task.h" namespace webrtc { namespace { @@ -38,6 +37,9 @@ class TransformableIncomingAudioFrame uint32_t GetSsrc() const override { return ssrc_; } uint32_t GetTimestamp() const override { return header_.timestamp; } const RTPHeader& GetHeader() const override { return header_; } + rtc::ArrayView GetContributingSources() const override { + return rtc::ArrayView(header_.arrOfCSRCs, header_.numCSRCs); + } Direction GetDirection() const override { return Direction::kReceiver; } private: @@ -79,11 +81,11 @@ void ChannelReceiveFrameTransformerDelegate::Transform( void ChannelReceiveFrameTransformerDelegate::OnTransformedFrame( std::unique_ptr frame) { - rtc::scoped_refptr delegate = this; - channel_receive_thread_->PostTask(ToQueuedTask( + rtc::scoped_refptr delegate(this); + channel_receive_thread_->PostTask( [delegate = std::move(delegate), frame = std::move(frame)]() mutable { delegate->ReceiveFrame(std::move(frame)); - })); + }); } void ChannelReceiveFrameTransformerDelegate::ReceiveFrame( diff --git a/TMessagesProj/jni/voip/webrtc/audio/channel_send.cc b/TMessagesProj/jni/voip/webrtc/audio/channel_send.cc index 2ad031e543..d2604061b8 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/channel_send.cc +++ b/TMessagesProj/jni/voip/webrtc/audio/channel_send.cc @@ -31,11 +31,8 @@ #include "modules/audio_processing/rms_level.h" #include "modules/pacing/packet_router.h" #include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" -#include "modules/utility/include/process_thread.h" #include "rtc_base/checks.h" #include "rtc_base/event.h" -#include "rtc_base/format_macros.h" -#include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/race_checker.h" @@ -43,8 +40,8 @@ #include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue.h" #include "rtc_base/time_utils.h" +#include "rtc_base/trace_event.h" #include "system_wrappers/include/clock.h" -#include "system_wrappers/include/field_trial.h" #include "system_wrappers/include/metrics.h" namespace webrtc { @@ -64,10 +61,6 @@ class ChannelSend : public ChannelSendInterface, // packets from the ACM public RtcpPacketTypeCounterObserver { public: - // TODO(nisse): Make OnUplinkPacketLossRate public, and delete friend - // declaration. - friend class VoERtcpObserver; - ChannelSend(Clock* clock, TaskQueueFactory* task_queue_factory, Transport* rtp_transport, @@ -79,7 +72,8 @@ class ChannelSend : public ChannelSendInterface, int rtcp_report_interval_ms, uint32_t ssrc, rtc::scoped_refptr frame_transformer, - TransportFeedbackObserver* feedback_observer); + TransportFeedbackObserver* feedback_observer, + const FieldTrialsView& field_trials); ~ChannelSend() override; @@ -156,6 +150,8 @@ class ChannelSend : public ChannelSendInterface, uint32_t ssrc, const RtcpPacketTypeCounter& packet_counter) override; + void OnUplinkPacketLossRate(float packet_loss_rate); + private: // From AudioPacketizationCallback in the ACM int32_t SendData(AudioFrameType frameType, @@ -165,7 +161,6 @@ class ChannelSend : public ChannelSendInterface, size_t payloadSize, int64_t absolute_capture_timestamp_ms) override; - void OnUplinkPacketLossRate(float packet_loss_rate); bool InputMute() const; int32_t SendRtpAudio(AudioFrameType frameType, @@ -238,15 +233,15 @@ class ChannelSend : public ChannelSendInterface, rtc::scoped_refptr frame_transformer_delegate_ RTC_GUARDED_BY(encoder_queue_); - // Defined last to ensure that there are no running tasks when the other - // members are destroyed. - rtc::TaskQueue encoder_queue_; - const bool fixing_timestamp_stall_; mutable Mutex rtcp_counter_mutex_; RtcpPacketTypeCounter rtcp_packet_type_counter_ RTC_GUARDED_BY(rtcp_counter_mutex_); + + // Defined last to ensure that there are no running tasks when the other + // members are destroyed. + rtc::TaskQueue encoder_queue_; }; const int kTelephoneEventAttenuationdB = 10; @@ -459,7 +454,8 @@ ChannelSend::ChannelSend( int rtcp_report_interval_ms, uint32_t ssrc, rtc::scoped_refptr frame_transformer, - TransportFeedbackObserver* feedback_observer) + TransportFeedbackObserver* feedback_observer, + const FieldTrialsView& field_trials) : ssrc_(ssrc), event_log_(rtc_event_log), _timeStamp(0), // This is just an offset, RTP module will add it's own @@ -474,11 +470,11 @@ ChannelSend::ChannelSend( new RateLimiter(clock, kMaxRetransmissionWindowMs)), frame_encryptor_(frame_encryptor), crypto_options_(crypto_options), + fixing_timestamp_stall_( + field_trials.IsDisabled("WebRTC-Audio-FixTimestampStall")), encoder_queue_(task_queue_factory->CreateTaskQueue( "AudioEncoder", - TaskQueueFactory::Priority::NORMAL)), - fixing_timestamp_stall_( - !field_trial::IsDisabled("WebRTC-Audio-FixTimestampStall")) { + TaskQueueFactory::Priority::NORMAL)) { audio_coding_.reset(AudioCodingModule::Create(AudioCodingModule::Config())); RtpRtcpInterface::Configuration configuration; @@ -783,6 +779,7 @@ CallSendStatistics ChannelSend::GetRTCPStatistics() const { stats.retransmitted_bytes_sent = rtp_stats.retransmitted.payload_bytes; stats.packetsSent = rtp_stats.transmitted.packets + rtx_stats.transmitted.packets; + stats.total_packet_send_delay = rtp_stats.transmitted.total_packet_delay; stats.retransmitted_packets_sent = rtp_stats.retransmitted.packets; stats.report_block_datas = rtp_rtcp_->GetLatestReportBlockData(); @@ -806,6 +803,8 @@ void ChannelSend::RtcpPacketTypesCounterUpdated( void ChannelSend::ProcessAndEncodeAudio( std::unique_ptr audio_frame) { + TRACE_EVENT0("webrtc", "ChannelSend::ProcessAndEncodeAudio"); + RTC_DCHECK_RUNS_SERIALIZED(&audio_thread_race_checker_); RTC_DCHECK_GT(audio_frame->samples_per_channel_, 0); RTC_DCHECK_LE(audio_frame->num_channels_, 8); @@ -949,12 +948,13 @@ std::unique_ptr CreateChannelSend( int rtcp_report_interval_ms, uint32_t ssrc, rtc::scoped_refptr frame_transformer, - TransportFeedbackObserver* feedback_observer) { + TransportFeedbackObserver* feedback_observer, + const FieldTrialsView& field_trials) { return std::make_unique( clock, task_queue_factory, rtp_transport, rtcp_rtt_stats, rtc_event_log, frame_encryptor, crypto_options, extmap_allow_mixed, rtcp_report_interval_ms, ssrc, std::move(frame_transformer), - feedback_observer); + feedback_observer, field_trials); } } // namespace voe diff --git a/TMessagesProj/jni/voip/webrtc/audio/channel_send.h b/TMessagesProj/jni/voip/webrtc/audio/channel_send.h index e100725460..cf9a273f70 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/channel_send.h +++ b/TMessagesProj/jni/voip/webrtc/audio/channel_send.h @@ -18,6 +18,7 @@ #include "api/audio/audio_frame.h" #include "api/audio_codecs/audio_encoder.h" #include "api/crypto/crypto_options.h" +#include "api/field_trials_view.h" #include "api/frame_transformer_interface.h" #include "api/function_view.h" #include "api/task_queue/task_queue_factory.h" @@ -38,6 +39,8 @@ struct CallSendStatistics { // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-retransmittedbytessent uint64_t retransmitted_bytes_sent; int packetsSent; + // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-totalpacketsenddelay + TimeDelta total_packet_send_delay = TimeDelta::Zero(); // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-retransmittedpacketssent uint64_t retransmitted_packets_sent; // A snapshot of Report Blocks with additional data of interest to statistics. @@ -135,7 +138,8 @@ std::unique_ptr CreateChannelSend( int rtcp_report_interval_ms, uint32_t ssrc, rtc::scoped_refptr frame_transformer, - TransportFeedbackObserver* feedback_observer); + TransportFeedbackObserver* feedback_observer, + const FieldTrialsView& field_trials); } // namespace voe } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/audio/channel_send_frame_transformer_delegate.cc b/TMessagesProj/jni/voip/webrtc/audio/channel_send_frame_transformer_delegate.cc index eee4cd0d96..29bb0b81d8 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/channel_send_frame_transformer_delegate.cc +++ b/TMessagesProj/jni/voip/webrtc/audio/channel_send_frame_transformer_delegate.cc @@ -102,7 +102,7 @@ void ChannelSendFrameTransformerDelegate::OnTransformedFrame( MutexLock lock(&send_lock_); if (!send_frame_callback_) return; - rtc::scoped_refptr delegate = this; + rtc::scoped_refptr delegate(this); encoder_queue_->PostTask( [delegate = std::move(delegate), frame = std::move(frame)]() mutable { delegate->SendFrame(std::move(frame)); diff --git a/TMessagesProj/jni/voip/webrtc/audio/conversion.h b/TMessagesProj/jni/voip/webrtc/audio/conversion.h index 920aa3a434..dd71942f6a 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/conversion.h +++ b/TMessagesProj/jni/voip/webrtc/audio/conversion.h @@ -11,6 +11,9 @@ #ifndef AUDIO_CONVERSION_H_ #define AUDIO_CONVERSION_H_ +#include +#include + namespace webrtc { // Convert fixed point number with 8 bit fractional part, to floating point. diff --git a/TMessagesProj/jni/voip/webrtc/audio/null_audio_poller.cc b/TMessagesProj/jni/voip/webrtc/audio/null_audio_poller.cc deleted file mode 100644 index de2c5cabec..0000000000 --- a/TMessagesProj/jni/voip/webrtc/audio/null_audio_poller.cc +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "audio/null_audio_poller.h" - -#include - -#include "rtc_base/checks.h" -#include "rtc_base/location.h" -#include "rtc_base/thread.h" -#include "rtc_base/time_utils.h" - -namespace webrtc { -namespace internal { - -namespace { - -constexpr int64_t kPollDelayMs = 10; // WebRTC uses 10ms by default - -constexpr size_t kNumChannels = 1; -constexpr uint32_t kSamplesPerSecond = 48000; // 48kHz -constexpr size_t kNumSamples = kSamplesPerSecond / 100; // 10ms of samples - -} // namespace - -NullAudioPoller::NullAudioPoller(AudioTransport* audio_transport) - : audio_transport_(audio_transport), - reschedule_at_(rtc::TimeMillis() + kPollDelayMs) { - RTC_DCHECK(audio_transport); - OnMessage(nullptr); // Start the poll loop. -} - -NullAudioPoller::~NullAudioPoller() { - RTC_DCHECK(thread_checker_.IsCurrent()); - rtc::Thread::Current()->Clear(this); -} - -void NullAudioPoller::OnMessage(rtc::Message* msg) { - RTC_DCHECK(thread_checker_.IsCurrent()); - - // Buffer to hold the audio samples. - int16_t buffer[kNumSamples * kNumChannels]; - // Output variables from `NeedMorePlayData`. - size_t n_samples; - int64_t elapsed_time_ms; - int64_t ntp_time_ms; - audio_transport_->NeedMorePlayData(kNumSamples, sizeof(int16_t), kNumChannels, - kSamplesPerSecond, buffer, n_samples, - &elapsed_time_ms, &ntp_time_ms); - - // Reschedule the next poll iteration. If, for some reason, the given - // reschedule time has already passed, reschedule as soon as possible. - int64_t now = rtc::TimeMillis(); - if (reschedule_at_ < now) { - reschedule_at_ = now; - } - rtc::Thread::Current()->PostAt(RTC_FROM_HERE, reschedule_at_, this, 0); - - // Loop after next will be kPollDelayMs later. - reschedule_at_ += kPollDelayMs; -} - -} // namespace internal -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/audio/null_audio_poller.h b/TMessagesProj/jni/voip/webrtc/audio/null_audio_poller.h deleted file mode 100644 index 47e67a91da..0000000000 --- a/TMessagesProj/jni/voip/webrtc/audio/null_audio_poller.h +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef AUDIO_NULL_AUDIO_POLLER_H_ -#define AUDIO_NULL_AUDIO_POLLER_H_ - -#include - -#include "api/sequence_checker.h" -#include "modules/audio_device/include/audio_device_defines.h" -#include "rtc_base/message_handler.h" - -namespace webrtc { -namespace internal { - -class NullAudioPoller final : public rtc::MessageHandler { - public: - explicit NullAudioPoller(AudioTransport* audio_transport); - ~NullAudioPoller() override; - - protected: - void OnMessage(rtc::Message* msg) override; - - private: - SequenceChecker thread_checker_; - AudioTransport* const audio_transport_; - int64_t reschedule_at_; -}; - -} // namespace internal -} // namespace webrtc - -#endif // AUDIO_NULL_AUDIO_POLLER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/audio/voip/audio_channel.cc b/TMessagesProj/jni/voip/webrtc/audio/voip/audio_channel.cc index 4650d195f2..a70e33ec38 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/voip/audio_channel.cc +++ b/TMessagesProj/jni/voip/webrtc/audio/voip/audio_channel.cc @@ -17,7 +17,6 @@ #include "api/task_queue/task_queue_factory.h" #include "modules/rtp_rtcp/include/receive_statistics.h" #include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" -#include "rtc_base/location.h" #include "rtc_base/logging.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/audio/voip/audio_ingress.cc b/TMessagesProj/jni/voip/webrtc/audio/voip/audio_ingress.cc index 8aa552bb28..71026e84e0 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/voip/audio_ingress.cc +++ b/TMessagesProj/jni/voip/webrtc/audio/voip/audio_ingress.cc @@ -226,7 +226,8 @@ void AudioIngress::ReceivedRTCPPacket( { MutexLock lock(&lock_); - ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp); + ntp_estimator_.UpdateRtcpTimestamp( + TimeDelta::Millis(rtt), NtpTime(ntp_secs, ntp_frac), rtp_timestamp); } } diff --git a/TMessagesProj/jni/voip/webrtc/audio/voip/voip_core.h b/TMessagesProj/jni/voip/webrtc/audio/voip/voip_core.h index 439393585c..6c3aec6fa2 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/voip/voip_core.h +++ b/TMessagesProj/jni/voip/webrtc/audio/voip/voip_core.h @@ -53,9 +53,6 @@ class VoipCore : public VoipEngine, public VoipVolumeControl { public: // Construct VoipCore with provided arguments. - // ProcessThread implementation can be injected by `process_thread` - // (mainly for testing purpose) and when set to nullptr, default - // implementation will be used. VoipCore(rtc::scoped_refptr encoder_factory, rtc::scoped_refptr decoder_factory, std::unique_ptr task_queue_factory, diff --git a/TMessagesProj/jni/voip/webrtc/base/android/scoped_java_ref.cc b/TMessagesProj/jni/voip/webrtc/base/android/scoped_java_ref.cc index 7d31a75bc8..d0d71f3ea6 100644 --- a/TMessagesProj/jni/voip/webrtc/base/android/scoped_java_ref.cc +++ b/TMessagesProj/jni/voip/webrtc/base/android/scoped_java_ref.cc @@ -7,6 +7,8 @@ #include "base/android/jni_android.h" #include "base/logging.h" +#include + namespace base { namespace android { namespace { @@ -60,10 +62,14 @@ void JavaRef::SetNewGlobalRef(JNIEnv* env, jobject obj) { } else { DCHECK_EQ(env, AttachCurrentThread()); // Is |env| on correct thread. } - if (obj) + if (obj) { + DEBUG_REF("scoped_java_ref.cc"); obj = env->NewGlobalRef(obj); - if (obj_) + } + if (obj_) { + DEBUG_DELREF("scoped_java_ref.cc"); env->DeleteGlobalRef(obj_); + } obj_ = obj; } @@ -77,6 +83,7 @@ void JavaRef::ResetLocalRef(JNIEnv* env) { void JavaRef::ResetGlobalRef() { if (obj_) { + DEBUG_DELREF("webrtc ResetGlobalRef"); AttachCurrentThread()->DeleteGlobalRef(obj_); obj_ = nullptr; } diff --git a/TMessagesProj/jni/voip/webrtc/call/OWNERS b/TMessagesProj/jni/voip/webrtc/call/OWNERS index 48b403d95a..e275834bb4 100644 --- a/TMessagesProj/jni/voip/webrtc/call/OWNERS +++ b/TMessagesProj/jni/voip/webrtc/call/OWNERS @@ -1,7 +1,8 @@ +sprang@webrtc.org +danilchap@webrtc.org +brandtr@webrtc.org +tommi@webrtc.org mflodman@webrtc.org stefan@webrtc.org -srte@webrtc.org -terelius@webrtc.org -sprang@webrtc.org per-file version.cc=webrtc-version-updater@webrtc-ci.iam.gserviceaccount.com diff --git a/TMessagesProj/jni/voip/webrtc/call/adaptation/broadcast_resource_listener.cc b/TMessagesProj/jni/voip/webrtc/call/adaptation/broadcast_resource_listener.cc index 876d4c0bf6..505036db3d 100644 --- a/TMessagesProj/jni/voip/webrtc/call/adaptation/broadcast_resource_listener.cc +++ b/TMessagesProj/jni/voip/webrtc/call/adaptation/broadcast_resource_listener.cc @@ -14,8 +14,9 @@ #include #include +#include "absl/strings/string_view.h" +#include "api/make_ref_counted.h" #include "rtc_base/checks.h" -#include "rtc_base/ref_counted_object.h" #include "rtc_base/synchronization/mutex.h" namespace webrtc { @@ -24,7 +25,7 @@ namespace webrtc { // a single ResourceListener. class BroadcastResourceListener::AdapterResource : public Resource { public: - explicit AdapterResource(std::string name) : name_(std::move(name)) {} + explicit AdapterResource(absl::string_view name) : name_(std::move(name)) {} ~AdapterResource() override { RTC_DCHECK(!listener_); } // The parent is letting us know we have a usage neasurement. @@ -32,7 +33,8 @@ class BroadcastResourceListener::AdapterResource : public Resource { MutexLock lock(&lock_); if (!listener_) return; - listener_->OnResourceUsageStateMeasured(this, usage_state); + listener_->OnResourceUsageStateMeasured(rtc::scoped_refptr(this), + usage_state); } // Resource implementation. diff --git a/TMessagesProj/jni/voip/webrtc/call/adaptation/encoder_settings.h b/TMessagesProj/jni/voip/webrtc/call/adaptation/encoder_settings.h index ddb198a96e..30ce0a05bc 100644 --- a/TMessagesProj/jni/voip/webrtc/call/adaptation/encoder_settings.h +++ b/TMessagesProj/jni/voip/webrtc/call/adaptation/encoder_settings.h @@ -14,7 +14,7 @@ #include "absl/types/optional.h" #include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_encoder.h" -#include "api/video_codecs/video_encoder_config.h" +#include "video/config/video_encoder_config.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/call/adaptation/resource_adaptation_processor.cc b/TMessagesProj/jni/voip/webrtc/call/adaptation/resource_adaptation_processor.cc index d95cd75a9d..f4d1bf3538 100644 --- a/TMessagesProj/jni/voip/webrtc/call/adaptation/resource_adaptation_processor.cc +++ b/TMessagesProj/jni/voip/webrtc/call/adaptation/resource_adaptation_processor.cc @@ -15,26 +15,19 @@ #include #include "absl/algorithm/container.h" +#include "absl/strings/string_view.h" #include "api/sequence_checker.h" #include "api/video/video_adaptation_counters.h" #include "call/adaptation/video_stream_adapter.h" #include "rtc_base/logging.h" -#include "rtc_base/ref_counted_object.h" #include "rtc_base/strings/string_builder.h" -#include "rtc_base/task_utils/to_queued_task.h" namespace webrtc { ResourceAdaptationProcessor::ResourceListenerDelegate::ResourceListenerDelegate( ResourceAdaptationProcessor* processor) - : task_queue_(nullptr), processor_(processor) {} - -void ResourceAdaptationProcessor::ResourceListenerDelegate::SetTaskQueue( - TaskQueueBase* task_queue) { - RTC_DCHECK(!task_queue_); - RTC_DCHECK(task_queue); - task_queue_ = task_queue; - RTC_DCHECK_RUN_ON(task_queue_); + : task_queue_(TaskQueueBase::Current()), processor_(processor) { + RTC_DCHECK(task_queue_); } void ResourceAdaptationProcessor::ResourceListenerDelegate:: @@ -47,11 +40,11 @@ void ResourceAdaptationProcessor::ResourceListenerDelegate:: OnResourceUsageStateMeasured(rtc::scoped_refptr resource, ResourceUsageState usage_state) { if (!task_queue_->IsCurrent()) { - task_queue_->PostTask(ToQueuedTask( + task_queue_->PostTask( [this_ref = rtc::scoped_refptr(this), resource, usage_state] { this_ref->OnResourceUsageStateMeasured(resource, usage_state); - })); + }); return; } RTC_DCHECK_RUN_ON(task_queue_); @@ -65,19 +58,21 @@ ResourceAdaptationProcessor::MitigationResultAndLogMessage:: : result(MitigationResult::kAdaptationApplied), message() {} ResourceAdaptationProcessor::MitigationResultAndLogMessage:: - MitigationResultAndLogMessage(MitigationResult result, std::string message) - : result(result), message(std::move(message)) {} + MitigationResultAndLogMessage(MitigationResult result, + absl::string_view message) + : result(result), message(message) {} ResourceAdaptationProcessor::ResourceAdaptationProcessor( VideoStreamAdapter* stream_adapter) - : task_queue_(nullptr), + : task_queue_(TaskQueueBase::Current()), resource_listener_delegate_( rtc::make_ref_counted(this)), resources_(), stream_adapter_(stream_adapter), last_reported_source_restrictions_(), previous_mitigation_results_() { - RTC_DCHECK(stream_adapter_); + RTC_DCHECK(task_queue_); + stream_adapter_->AddRestrictionsListener(this); } ResourceAdaptationProcessor::~ResourceAdaptationProcessor() { @@ -89,16 +84,6 @@ ResourceAdaptationProcessor::~ResourceAdaptationProcessor() { resource_listener_delegate_->OnProcessorDestroyed(); } -void ResourceAdaptationProcessor::SetTaskQueue(TaskQueueBase* task_queue) { - RTC_DCHECK(!task_queue_); - RTC_DCHECK(task_queue); - task_queue_ = task_queue; - resource_listener_delegate_->SetTaskQueue(task_queue); - RTC_DCHECK_RUN_ON(task_queue_); - // Now that we have the queue we can attach as adaptation listener. - stream_adapter_->AddRestrictionsListener(this); -} - void ResourceAdaptationProcessor::AddResourceLimitationsListener( ResourceLimitationsListener* limitations_listener) { RTC_DCHECK_RUN_ON(task_queue_); @@ -128,7 +113,7 @@ void ResourceAdaptationProcessor::AddResource( << "Resource \"" << resource->Name() << "\" was already registered."; resources_.push_back(resource); } - resource->SetResourceListener(resource_listener_delegate_); + resource->SetResourceListener(resource_listener_delegate_.get()); RTC_LOG(LS_INFO) << "Registered resource \"" << resource->Name() << "\"."; } @@ -156,8 +141,8 @@ void ResourceAdaptationProcessor::RemoveResource( void ResourceAdaptationProcessor::RemoveLimitationsImposedByResource( rtc::scoped_refptr resource) { if (!task_queue_->IsCurrent()) { - task_queue_->PostTask(ToQueuedTask( - [this, resource]() { RemoveLimitationsImposedByResource(resource); })); + task_queue_->PostTask( + [this, resource]() { RemoveLimitationsImposedByResource(resource); }); return; } RTC_DCHECK_RUN_ON(task_queue_); diff --git a/TMessagesProj/jni/voip/webrtc/call/adaptation/resource_adaptation_processor.h b/TMessagesProj/jni/voip/webrtc/call/adaptation/resource_adaptation_processor.h index 3e273081f8..db3b4c2506 100644 --- a/TMessagesProj/jni/voip/webrtc/call/adaptation/resource_adaptation_processor.h +++ b/TMessagesProj/jni/voip/webrtc/call/adaptation/resource_adaptation_processor.h @@ -17,6 +17,7 @@ #include #include +#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/adaptation/resource.h" #include "api/rtp_parameters.h" @@ -24,12 +25,12 @@ #include "api/task_queue/task_queue_base.h" #include "api/video/video_adaptation_counters.h" #include "api/video/video_frame.h" -#include "api/video/video_stream_encoder_observer.h" #include "call/adaptation/resource_adaptation_processor_interface.h" #include "call/adaptation/video_source_restrictions.h" #include "call/adaptation/video_stream_adapter.h" #include "call/adaptation/video_stream_input_state.h" #include "call/adaptation/video_stream_input_state_provider.h" +#include "video/video_stream_encoder_observer.h" namespace webrtc { @@ -58,8 +59,6 @@ class ResourceAdaptationProcessor : public ResourceAdaptationProcessorInterface, VideoStreamAdapter* video_stream_adapter); ~ResourceAdaptationProcessor() override; - void SetTaskQueue(TaskQueueBase* task_queue) override; - // ResourceAdaptationProcessorInterface implementation. void AddResourceLimitationsListener( ResourceLimitationsListener* limitations_listener) override; @@ -90,7 +89,6 @@ class ResourceAdaptationProcessor : public ResourceAdaptationProcessorInterface, public: explicit ResourceListenerDelegate(ResourceAdaptationProcessor* processor); - void SetTaskQueue(TaskQueueBase* task_queue); void OnProcessorDestroyed(); // ResourceListener implementation. @@ -111,7 +109,8 @@ class ResourceAdaptationProcessor : public ResourceAdaptationProcessorInterface, struct MitigationResultAndLogMessage { MitigationResultAndLogMessage(); - MitigationResultAndLogMessage(MitigationResult result, std::string message); + MitigationResultAndLogMessage(MitigationResult result, + absl::string_view message); MitigationResult result; std::string message; }; diff --git a/TMessagesProj/jni/voip/webrtc/call/adaptation/resource_adaptation_processor_interface.h b/TMessagesProj/jni/voip/webrtc/call/adaptation/resource_adaptation_processor_interface.h index 8b1f94b73a..4729488150 100644 --- a/TMessagesProj/jni/voip/webrtc/call/adaptation/resource_adaptation_processor_interface.h +++ b/TMessagesProj/jni/voip/webrtc/call/adaptation/resource_adaptation_processor_interface.h @@ -47,8 +47,6 @@ class ResourceAdaptationProcessorInterface { public: virtual ~ResourceAdaptationProcessorInterface(); - virtual void SetTaskQueue(TaskQueueBase* task_queue) = 0; - virtual void AddResourceLimitationsListener( ResourceLimitationsListener* limitations_listener) = 0; virtual void RemoveResourceLimitationsListener( diff --git a/TMessagesProj/jni/voip/webrtc/call/adaptation/video_source_restrictions.cc b/TMessagesProj/jni/voip/webrtc/call/adaptation/video_source_restrictions.cc index e9d6c26137..719bc53278 100644 --- a/TMessagesProj/jni/voip/webrtc/call/adaptation/video_source_restrictions.cc +++ b/TMessagesProj/jni/voip/webrtc/call/adaptation/video_source_restrictions.cc @@ -10,6 +10,7 @@ #include "call/adaptation/video_source_restrictions.h" +#include #include #include "rtc_base/checks.h" @@ -79,6 +80,30 @@ void VideoSourceRestrictions::set_max_frame_rate( max_frame_rate_ = std::move(max_frame_rate); } +void VideoSourceRestrictions::UpdateMin(const VideoSourceRestrictions& other) { + if (max_pixels_per_frame_.has_value()) { + max_pixels_per_frame_ = std::min(*max_pixels_per_frame_, + other.max_pixels_per_frame().value_or( + std::numeric_limits::max())); + } else { + max_pixels_per_frame_ = other.max_pixels_per_frame(); + } + if (target_pixels_per_frame_.has_value()) { + target_pixels_per_frame_ = std::min( + *target_pixels_per_frame_, other.target_pixels_per_frame().value_or( + std::numeric_limits::max())); + } else { + target_pixels_per_frame_ = other.target_pixels_per_frame(); + } + if (max_frame_rate_.has_value()) { + max_frame_rate_ = std::min( + *max_frame_rate_, + other.max_frame_rate().value_or(std::numeric_limits::max())); + } else { + max_frame_rate_ = other.max_frame_rate(); + } +} + bool DidRestrictionsIncrease(VideoSourceRestrictions before, VideoSourceRestrictions after) { bool decreased_resolution = DidDecreaseResolution(before, after); diff --git a/TMessagesProj/jni/voip/webrtc/call/adaptation/video_source_restrictions.h b/TMessagesProj/jni/voip/webrtc/call/adaptation/video_source_restrictions.h index 004cc09055..be8520a385 100644 --- a/TMessagesProj/jni/voip/webrtc/call/adaptation/video_source_restrictions.h +++ b/TMessagesProj/jni/voip/webrtc/call/adaptation/video_source_restrictions.h @@ -60,6 +60,9 @@ class VideoSourceRestrictions { absl::optional target_pixels_per_frame); void set_max_frame_rate(absl::optional max_frame_rate); + // Update `this` with min(`this`, `other`). + void UpdateMin(const VideoSourceRestrictions& other); + private: // These map to rtc::VideoSinkWants's `max_pixel_count` and // `target_pixel_count`. diff --git a/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_adapter.cc b/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_adapter.cc index c6560b316e..f30a4d7abb 100644 --- a/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_adapter.cc +++ b/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_adapter.cc @@ -23,7 +23,6 @@ #include "call/adaptation/video_source_restrictions.h" #include "call/adaptation/video_stream_input_state.h" #include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" @@ -204,9 +203,11 @@ const VideoAdaptationCounters& Adaptation::counters() const { VideoStreamAdapter::VideoStreamAdapter( VideoStreamInputStateProvider* input_state_provider, - VideoStreamEncoderObserver* encoder_stats_observer) + VideoStreamEncoderObserver* encoder_stats_observer, + const FieldTrialsView& field_trials) : input_state_provider_(input_state_provider), encoder_stats_observer_(encoder_stats_observer), + balanced_settings_(field_trials), adaptation_validation_id_(0), degradation_preference_(DegradationPreference::DISABLED), awaiting_frame_size_change_(absl::nullopt) { @@ -375,7 +376,7 @@ VideoStreamAdapter::RestrictionsOrState VideoStreamAdapter::GetAdaptationUpStep( return increase_frame_rate; } // else, increase resolution. - ABSL_FALLTHROUGH_INTENDED; + [[fallthrough]]; } case DegradationPreference::MAINTAIN_FRAMERATE: { // Attempt to increase pixel count. @@ -459,7 +460,7 @@ VideoStreamAdapter::GetAdaptationDownStep( return decrease_frame_rate; } // else, decrease resolution. - ABSL_FALLTHROUGH_INTENDED; + [[fallthrough]]; } case DegradationPreference::MAINTAIN_FRAMERATE: { return DecreaseResolution(input_state, current_restrictions); diff --git a/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_adapter.h b/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_adapter.h index 7bf424a17e..5c174178e4 100644 --- a/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_adapter.h +++ b/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_adapter.h @@ -18,9 +18,9 @@ #include "absl/types/optional.h" #include "absl/types/variant.h" #include "api/adaptation/resource.h" +#include "api/field_trials_view.h" #include "api/rtp_parameters.h" #include "api/video/video_adaptation_counters.h" -#include "api/video/video_stream_encoder_observer.h" #include "call/adaptation/adaptation_constraint.h" #include "call/adaptation/degradation_preference_provider.h" #include "call/adaptation/video_source_restrictions.h" @@ -30,6 +30,7 @@ #include "rtc_base/experiments/balanced_degradation_settings.h" #include "rtc_base/system/no_unique_address.h" #include "rtc_base/thread_annotations.h" +#include "video/video_stream_encoder_observer.h" namespace webrtc { @@ -123,7 +124,8 @@ class Adaptation final { class VideoStreamAdapter { public: VideoStreamAdapter(VideoStreamInputStateProvider* input_state_provider, - VideoStreamEncoderObserver* encoder_stats_observer); + VideoStreamEncoderObserver* encoder_stats_observer, + const FieldTrialsView& field_trials); ~VideoStreamAdapter(); VideoSourceRestrictions source_restrictions() const; diff --git a/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_input_state_provider.h b/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_input_state_provider.h index f4a3e0bfa0..81996e6eb9 100644 --- a/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_input_state_provider.h +++ b/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_input_state_provider.h @@ -11,10 +11,10 @@ #ifndef CALL_ADAPTATION_VIDEO_STREAM_INPUT_STATE_PROVIDER_H_ #define CALL_ADAPTATION_VIDEO_STREAM_INPUT_STATE_PROVIDER_H_ -#include "api/video/video_stream_encoder_observer.h" #include "call/adaptation/encoder_settings.h" #include "call/adaptation/video_stream_input_state.h" #include "rtc_base/synchronization/mutex.h" +#include "video/video_stream_encoder_observer.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/call/audio_receive_stream.cc b/TMessagesProj/jni/voip/webrtc/call/audio_receive_stream.cc index c3c2ac77d0..0766eb6bbb 100644 --- a/TMessagesProj/jni/voip/webrtc/call/audio_receive_stream.cc +++ b/TMessagesProj/jni/voip/webrtc/call/audio_receive_stream.cc @@ -12,13 +12,13 @@ namespace webrtc { -AudioReceiveStream::Stats::Stats() = default; -AudioReceiveStream::Stats::~Stats() = default; +AudioReceiveStreamInterface::Stats::Stats() = default; +AudioReceiveStreamInterface::Stats::~Stats() = default; -AudioReceiveStream::Config::Config() = default; -AudioReceiveStream::Config::~Config() = default; +AudioReceiveStreamInterface::Config::Config() = default; +AudioReceiveStreamInterface::Config::~Config() = default; -AudioReceiveStream::Config::Rtp::Rtp() = default; -AudioReceiveStream::Config::Rtp::~Rtp() = default; +AudioReceiveStreamInterface::Config::Rtp::Rtp() = default; +AudioReceiveStreamInterface::Config::Rtp::~Rtp() = default; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/call/audio_receive_stream.h b/TMessagesProj/jni/voip/webrtc/call/audio_receive_stream.h index 17691f7021..5d3c38fb05 100644 --- a/TMessagesProj/jni/voip/webrtc/call/audio_receive_stream.h +++ b/TMessagesProj/jni/voip/webrtc/call/audio_receive_stream.h @@ -27,7 +27,7 @@ namespace webrtc { class AudioSinkInterface; -class AudioReceiveStream : public MediaReceiveStream { +class AudioReceiveStreamInterface : public MediaReceiveStreamInterface { public: struct Stats { Stats(); @@ -59,6 +59,7 @@ class AudioReceiveStream : public MediaReceiveStream { double jitter_buffer_delay_seconds = 0.0; uint64_t jitter_buffer_emitted_count = 0; double jitter_buffer_target_delay_seconds = 0.0; + double jitter_buffer_minimum_delay_seconds = 0.0; uint64_t inserted_samples_for_deceleration = 0; uint64_t removed_samples_for_acceleration = 0; // Stats below DO NOT correspond directly to anything in the WebRTC stats @@ -108,7 +109,7 @@ class AudioReceiveStream : public MediaReceiveStream { std::string ToString() const; // Receive-stream specific RTP settings. - struct Rtp : public RtpConfig { + struct Rtp : public ReceiveStreamRtpConfig { Rtp(); ~Rtp(); @@ -127,7 +128,6 @@ class AudioReceiveStream : public MediaReceiveStream { size_t jitter_buffer_max_packets = 200; bool jitter_buffer_fast_accelerate = false; int jitter_buffer_min_delay_ms = 0; - bool jitter_buffer_enable_rtx_handling = false; // Identifier for an A/V synchronization group. Empty string to disable. // TODO(pbos): Synchronize streams in a sync group, not just one video @@ -148,22 +148,21 @@ class AudioReceiveStream : public MediaReceiveStream { // decrypted in whatever way the caller choses. This is not required by // default. // TODO(tommi): Remove this member variable from the struct. It's not - // a part of the AudioReceiveStream state but rather a pass through + // a part of the AudioReceiveStreamInterface state but rather a pass through // variable. rtc::scoped_refptr frame_decryptor; // An optional frame transformer used by insertable streams to transform // encoded frames. // TODO(tommi): Remove this member variable from the struct. It's not - // a part of the AudioReceiveStream state but rather a pass through + // a part of the AudioReceiveStreamInterface state but rather a pass through // variable. rtc::scoped_refptr frame_transformer; }; // Methods that support reconfiguring the stream post initialization. virtual void SetDecoderMap(std::map decoder_map) = 0; - virtual void SetUseTransportCcAndNackHistory(bool use_transport_cc, - int history_ms) = 0; + virtual void SetNackHistory(int history_ms) = 0; virtual void SetNonSenderRttMeasurement(bool enabled) = 0; // Returns true if the stream has been started. @@ -194,9 +193,21 @@ class AudioReceiveStream : public MediaReceiveStream { // Returns current value of base minimum delay in milliseconds. virtual int GetBaseMinimumPlayoutDelayMs() const = 0; + // Synchronization source (stream identifier) to be received. + // This member will not change mid-stream and can be assumed to be const + // post initialization. + virtual uint32_t remote_ssrc() const = 0; + + // Access the currently set rtp extensions. Must be called on the packet + // delivery thread. + // TODO(tommi): This is currently only called from + // `WebRtcAudioReceiveStream::GetRtpParameters()`. See if we can remove it. + virtual const std::vector& GetRtpExtensions() const = 0; + protected: - virtual ~AudioReceiveStream() {} + virtual ~AudioReceiveStreamInterface() {} }; + } // namespace webrtc #endif // CALL_AUDIO_RECEIVE_STREAM_H_ diff --git a/TMessagesProj/jni/voip/webrtc/call/audio_send_stream.h b/TMessagesProj/jni/voip/webrtc/call/audio_send_stream.h index e38a47f871..15b439c593 100644 --- a/TMessagesProj/jni/voip/webrtc/call/audio_send_stream.h +++ b/TMessagesProj/jni/voip/webrtc/call/audio_send_stream.h @@ -46,6 +46,8 @@ class AudioSendStream : public AudioSender { // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-retransmittedbytessent uint64_t retransmitted_bytes_sent = 0; int32_t packets_sent = 0; + // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-totalpacketsenddelay + TimeDelta total_packet_send_delay = TimeDelta::Zero(); // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-retransmittedpacketssent uint64_t retransmitted_packets_sent = 0; int32_t packets_lost = -1; @@ -59,7 +61,6 @@ class AudioSendStream : public AudioSender { // https://w3c.github.io/webrtc-stats/#dom-rtcmediastreamtrackstats-totalaudioenergy double total_input_energy = 0.0; double total_input_duration = 0.0; - bool typing_noise_detected = false; ANAStats ana_statistics; AudioProcessingStats apm_statistics; diff --git a/TMessagesProj/jni/voip/webrtc/call/bitrate_allocator.cc b/TMessagesProj/jni/voip/webrtc/call/bitrate_allocator.cc index 1693661ef5..2684a1650e 100644 --- a/TMessagesProj/jni/voip/webrtc/call/bitrate_allocator.cc +++ b/TMessagesProj/jni/voip/webrtc/call/bitrate_allocator.cc @@ -23,7 +23,6 @@ #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_minmax.h" #include "system_wrappers/include/clock.h" -#include "system_wrappers/include/field_trial.h" #include "system_wrappers/include/metrics.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/call/bitrate_estimator_tests.cc b/TMessagesProj/jni/voip/webrtc/call/bitrate_estimator_tests.cc index 4634f6e147..5fb05e59d9 100644 --- a/TMessagesProj/jni/voip/webrtc/call/bitrate_estimator_tests.cc +++ b/TMessagesProj/jni/voip/webrtc/call/bitrate_estimator_tests.cc @@ -12,6 +12,7 @@ #include #include +#include "absl/strings/string_view.h" #include "api/test/create_frame_generator.h" #include "call/call.h" #include "call/fake_network_pipe.h" @@ -40,7 +41,7 @@ class LogObserver { ~LogObserver() { rtc::LogMessage::RemoveLogToStream(&callback_); } - void PushExpectedLogLine(const std::string& expected_log_line) { + void PushExpectedLogLine(absl::string_view expected_log_line) { callback_.PushExpectedLogLine(expected_log_line); } @@ -50,13 +51,17 @@ class LogObserver { class Callback : public rtc::LogSink { public: void OnLogMessage(const std::string& message) override { + OnLogMessage(absl::string_view(message)); + } + + void OnLogMessage(absl::string_view message) override { MutexLock lock(&mutex_); // Ignore log lines that are due to missing AST extensions, these are // logged when we switch back from AST to TOF until the wrapping bitrate // estimator gives up on using AST. - if (message.find("BitrateEstimator") != std::string::npos && - message.find("packet is missing") == std::string::npos) { - received_log_lines_.push_back(message); + if (message.find("BitrateEstimator") != absl::string_view::npos && + message.find("packet is missing") == absl::string_view::npos) { + received_log_lines_.push_back(std::string(message)); } int num_popped = 0; @@ -66,7 +71,7 @@ class LogObserver { received_log_lines_.pop_front(); expected_log_lines_.pop_front(); num_popped++; - EXPECT_TRUE(a.find(b) != std::string::npos) << a << " != " << b; + EXPECT_TRUE(a.find(b) != absl::string_view::npos) << a << " != " << b; } if (expected_log_lines_.empty()) { if (num_popped > 0) { @@ -76,11 +81,11 @@ class LogObserver { } } - bool Wait() { return done_.Wait(test::CallTest::kDefaultTimeoutMs); } + bool Wait() { return done_.Wait(test::CallTest::kDefaultTimeout); } - void PushExpectedLogLine(const std::string& expected_log_line) { + void PushExpectedLogLine(absl::string_view expected_log_line) { MutexLock lock(&mutex_); - expected_log_lines_.push_back(expected_log_line); + expected_log_lines_.emplace_back(expected_log_line); } private: @@ -105,7 +110,7 @@ class BitrateEstimatorTest : public test::CallTest { virtual ~BitrateEstimatorTest() { EXPECT_TRUE(streams_.empty()); } virtual void SetUp() { - SendTask(RTC_FROM_HERE, task_queue(), [this]() { + SendTask(task_queue(), [this]() { CreateCalls(); send_transport_.reset(new test::DirectTransport( @@ -136,7 +141,8 @@ class BitrateEstimatorTest : public test::CallTest { test::FillEncoderConfiguration(kVideoCodecVP8, 1, &video_encoder_config); SetVideoEncoderConfig(video_encoder_config); - receive_config_ = VideoReceiveStream::Config(receive_transport_.get()); + receive_config_ = + VideoReceiveStreamInterface::Config(receive_transport_.get()); // receive_config_.decoders will be set by every stream separately. receive_config_.rtp.remote_ssrc = GetVideoSendConfig()->rtp.ssrcs[0]; receive_config_.rtp.local_ssrc = kReceiverLocalVideoSsrc; @@ -148,7 +154,7 @@ class BitrateEstimatorTest : public test::CallTest { } virtual void TearDown() { - SendTask(RTC_FROM_HERE, task_queue(), [this]() { + SendTask(task_queue(), [this]() { for (auto* stream : streams_) { stream->StopSending(); delete stream; @@ -190,7 +196,7 @@ class BitrateEstimatorTest : public test::CallTest { DegradationPreference::MAINTAIN_FRAMERATE); send_stream_->Start(); - VideoReceiveStream::Decoder decoder; + VideoReceiveStreamInterface::Decoder decoder; test_->receive_config_.decoder_factory = &decoder_factory_; decoder.payload_type = test_->GetVideoSendConfig()->rtp.payload_type; decoder.video_format = @@ -232,7 +238,7 @@ class BitrateEstimatorTest : public test::CallTest { BitrateEstimatorTest* test_; bool is_sending_receiving_; VideoSendStream* send_stream_; - VideoReceiveStream* video_receive_stream_; + VideoReceiveStreamInterface* video_receive_stream_; std::unique_ptr frame_generator_capturer_; test::FunctionVideoDecoderFactory decoder_factory_; @@ -241,7 +247,7 @@ class BitrateEstimatorTest : public test::CallTest { LogObserver receiver_log_; std::unique_ptr send_transport_; std::unique_ptr receive_transport_; - VideoReceiveStream::Config receive_config_; + VideoReceiveStreamInterface::Config receive_config_; std::vector streams_; }; @@ -251,7 +257,7 @@ static const char* kSingleStreamLog = "RemoteBitrateEstimatorSingleStream: Instantiating."; TEST_F(BitrateEstimatorTest, InstantiatesTOFPerDefaultForVideo) { - SendTask(RTC_FROM_HERE, task_queue(), [this]() { + SendTask(task_queue(), [this]() { GetVideoSendConfig()->rtp.extensions.push_back( RtpExtension(RtpExtension::kTimestampOffsetUri, kTOFExtensionId)); receiver_log_.PushExpectedLogLine(kSingleStreamLog); @@ -262,7 +268,7 @@ TEST_F(BitrateEstimatorTest, InstantiatesTOFPerDefaultForVideo) { } TEST_F(BitrateEstimatorTest, ImmediatelySwitchToASTForVideo) { - SendTask(RTC_FROM_HERE, task_queue(), [this]() { + SendTask(task_queue(), [this]() { GetVideoSendConfig()->rtp.extensions.push_back( RtpExtension(RtpExtension::kAbsSendTimeUri, kASTExtensionId)); receiver_log_.PushExpectedLogLine(kSingleStreamLog); @@ -275,7 +281,7 @@ TEST_F(BitrateEstimatorTest, ImmediatelySwitchToASTForVideo) { } TEST_F(BitrateEstimatorTest, SwitchesToASTForVideo) { - SendTask(RTC_FROM_HERE, task_queue(), [this]() { + SendTask(task_queue(), [this]() { GetVideoSendConfig()->rtp.extensions.push_back( RtpExtension(RtpExtension::kTimestampOffsetUri, kTOFExtensionId)); receiver_log_.PushExpectedLogLine(kSingleStreamLog); @@ -284,7 +290,7 @@ TEST_F(BitrateEstimatorTest, SwitchesToASTForVideo) { }); EXPECT_TRUE(receiver_log_.Wait()); - SendTask(RTC_FROM_HERE, task_queue(), [this]() { + SendTask(task_queue(), [this]() { GetVideoSendConfig()->rtp.extensions[0] = RtpExtension(RtpExtension::kAbsSendTimeUri, kASTExtensionId); receiver_log_.PushExpectedLogLine("Switching to absolute send time RBE."); @@ -296,7 +302,7 @@ TEST_F(BitrateEstimatorTest, SwitchesToASTForVideo) { // This test is flaky. See webrtc:5790. TEST_F(BitrateEstimatorTest, DISABLED_SwitchesToASTThenBackToTOFForVideo) { - SendTask(RTC_FROM_HERE, task_queue(), [this]() { + SendTask(task_queue(), [this]() { GetVideoSendConfig()->rtp.extensions.push_back( RtpExtension(RtpExtension::kTimestampOffsetUri, kTOFExtensionId)); receiver_log_.PushExpectedLogLine(kSingleStreamLog); @@ -306,7 +312,7 @@ TEST_F(BitrateEstimatorTest, DISABLED_SwitchesToASTThenBackToTOFForVideo) { }); EXPECT_TRUE(receiver_log_.Wait()); - SendTask(RTC_FROM_HERE, task_queue(), [this]() { + SendTask(task_queue(), [this]() { GetVideoSendConfig()->rtp.extensions[0] = RtpExtension(RtpExtension::kAbsSendTimeUri, kASTExtensionId); receiver_log_.PushExpectedLogLine(kAbsSendTimeLog); @@ -315,7 +321,7 @@ TEST_F(BitrateEstimatorTest, DISABLED_SwitchesToASTThenBackToTOFForVideo) { }); EXPECT_TRUE(receiver_log_.Wait()); - SendTask(RTC_FROM_HERE, task_queue(), [this]() { + SendTask(task_queue(), [this]() { GetVideoSendConfig()->rtp.extensions[0] = RtpExtension(RtpExtension::kTimestampOffsetUri, kTOFExtensionId); receiver_log_.PushExpectedLogLine(kAbsSendTimeLog); diff --git a/TMessagesProj/jni/voip/webrtc/call/call.cc b/TMessagesProj/jni/voip/webrtc/call/call.cc index b30b92f861..ae796cf6a0 100644 --- a/TMessagesProj/jni/voip/webrtc/call/call.cc +++ b/TMessagesProj/jni/voip/webrtc/call/call.cc @@ -21,9 +21,11 @@ #include #include "absl/functional/bind_front.h" +#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/rtc_event_log/rtc_event_log.h" #include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "api/transport/network_control.h" #include "audio/audio_receive_stream.h" #include "audio/audio_send_stream.h" @@ -48,21 +50,17 @@ #include "modules/rtp_rtcp/source/byte_io.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_util.h" -#include "modules/utility/include/process_thread.h" #include "modules/video_coding/fec_controller_default.h" #include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" -#include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/system/no_unique_address.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" +#include "rtc_base/task_utils/repeating_task.h" #include "rtc_base/thread_annotations.h" #include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" #include "system_wrappers/include/clock.h" #include "system_wrappers/include/cpu_info.h" -#include "system_wrappers/include/field_trial.h" #include "system_wrappers/include/metrics.h" #include "video/call_stats2.h" #include "video/send_delay_stats.h" @@ -81,15 +79,14 @@ bool SendPeriodicFeedback(const std::vector& extensions) { return true; } -bool UseSendSideBwe(const ReceiveStream::RtpConfig& rtp) { - if (!rtp.transport_cc) - return false; - for (const auto& extension : rtp.extensions) { - if (extension.uri == RtpExtension::kTransportSequenceNumberUri || - extension.uri == RtpExtension::kTransportSequenceNumberV2Uri) - return true; - } - return false; +bool HasTransportSequenceNumber(const RtpHeaderExtensionMap& map) { + return map.IsRegistered(kRtpExtensionTransportSequenceNumber) || + map.IsRegistered(kRtpExtensionTransportSequenceNumber02); +} + +bool UseSendSideBwe(const ReceiveStreamInterface* stream) { + return stream->transport_cc() && + HasTransportSequenceNumber(stream->GetRtpExtensionMap()); } const int* FindKeyByValue(const std::map& m, int v) { @@ -101,7 +98,7 @@ const int* FindKeyByValue(const std::map& m, int v) { } std::unique_ptr CreateRtcLogStreamConfig( - const VideoReceiveStream::Config& config) { + const VideoReceiveStreamInterface::Config& config) { auto rtclog_config = std::make_unique(); rtclog_config->remote_ssrc = config.rtp.remote_ssrc; rtclog_config->local_ssrc = config.rtp.local_ssrc; @@ -136,7 +133,7 @@ std::unique_ptr CreateRtcLogStreamConfig( } std::unique_ptr CreateRtcLogStreamConfig( - const AudioReceiveStream::Config& config) { + const AudioReceiveStreamInterface::Config& config) { auto rtclog_config = std::make_unique(); rtclog_config->remote_ssrc = config.rtp.remote_ssrc; rtclog_config->local_ssrc = config.rtp.local_ssrc; @@ -205,10 +202,12 @@ class Call final : public webrtc::Call, Call(Clock* clock, const Call::Config& config, std::unique_ptr transport_send, - rtc::scoped_refptr module_process_thread, TaskQueueFactory* task_queue_factory); ~Call() override; + Call(const Call&) = delete; + Call& operator=(const Call&) = delete; + // Implements webrtc::Call. PacketReceiver* Receiver() override; @@ -216,10 +215,10 @@ class Call final : public webrtc::Call, const webrtc::AudioSendStream::Config& config) override; void DestroyAudioSendStream(webrtc::AudioSendStream* send_stream) override; - webrtc::AudioReceiveStream* CreateAudioReceiveStream( - const webrtc::AudioReceiveStream::Config& config) override; + webrtc::AudioReceiveStreamInterface* CreateAudioReceiveStream( + const webrtc::AudioReceiveStreamInterface::Config& config) override; void DestroyAudioReceiveStream( - webrtc::AudioReceiveStream* receive_stream) override; + webrtc::AudioReceiveStreamInterface* receive_stream) override; webrtc::VideoSendStream* CreateVideoSendStream( webrtc::VideoSendStream::Config config, @@ -230,13 +229,13 @@ class Call final : public webrtc::Call, std::unique_ptr fec_controller) override; void DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) override; - webrtc::VideoReceiveStream* CreateVideoReceiveStream( - webrtc::VideoReceiveStream::Config configuration) override; + webrtc::VideoReceiveStreamInterface* CreateVideoReceiveStream( + webrtc::VideoReceiveStreamInterface::Config configuration) override; void DestroyVideoReceiveStream( - webrtc::VideoReceiveStream* receive_stream) override; + webrtc::VideoReceiveStreamInterface* receive_stream) override; FlexfecReceiveStream* CreateFlexfecReceiveStream( - const FlexfecReceiveStream::Config& config) override; + const FlexfecReceiveStream::Config config) override; void DestroyFlexfecReceiveStream( FlexfecReceiveStream* receive_stream) override; @@ -246,7 +245,7 @@ class Call final : public webrtc::Call, Stats GetStats() const override; - const WebRtcKeyValueConfig& trials() const override; + const FieldTrialsView& trials() const override; TaskQueueBase* network_thread() const override; TaskQueueBase* worker_thread() const override; @@ -264,11 +263,15 @@ class Call final : public webrtc::Call, void OnAudioTransportOverheadChanged( int transport_overhead_per_packet) override; - void OnLocalSsrcUpdated(webrtc::AudioReceiveStream& stream, + void OnLocalSsrcUpdated(webrtc::AudioReceiveStreamInterface& stream, + uint32_t local_ssrc) override; + void OnLocalSsrcUpdated(VideoReceiveStreamInterface& stream, + uint32_t local_ssrc) override; + void OnLocalSsrcUpdated(FlexfecReceiveStream& stream, uint32_t local_ssrc) override; - void OnUpdateSyncGroup(webrtc::AudioReceiveStream& stream, - const std::string& sync_group) override; + void OnUpdateSyncGroup(webrtc::AudioReceiveStreamInterface& stream, + absl::string_view sync_group) override; void OnSentPacket(const rtc::SentPacket& sent_packet) override; @@ -344,12 +347,21 @@ class Call final : public webrtc::Call, DeliveryStatus DeliverRtp(MediaType media_type, rtc::CopyOnWriteBuffer packet, int64_t packet_time_us) RTC_RUN_ON(worker_thread_); - void ConfigureSync(const std::string& sync_group) RTC_RUN_ON(worker_thread_); + + AudioReceiveStreamImpl* FindAudioStreamForSyncGroup( + absl::string_view sync_group) RTC_RUN_ON(worker_thread_); + void ConfigureSync(absl::string_view sync_group) RTC_RUN_ON(worker_thread_); void NotifyBweOfReceivedPacket(const RtpPacketReceived& packet, - MediaType media_type) + MediaType media_type, + bool use_send_side_bwe) RTC_RUN_ON(worker_thread_); + bool IdentifyReceivedPacket(RtpPacketReceived& packet, + bool* use_send_side_bwe = nullptr); + bool RegisterReceiveStream(uint32_t ssrc, ReceiveStreamInterface* stream); + bool UnregisterReceiveStream(uint32_t ssrc); + void UpdateAggregateNetworkState(); // Ensure that necessary process threads are started, and any required @@ -360,15 +372,15 @@ class Call final : public webrtc::Call, TaskQueueFactory* const task_queue_factory_; TaskQueueBase* const worker_thread_; TaskQueueBase* const network_thread_; + const std::unique_ptr decode_sync_; RTC_NO_UNIQUE_ADDRESS SequenceChecker send_transport_sequence_checker_; const int num_cpu_cores_; - const rtc::scoped_refptr module_process_thread_; const std::unique_ptr call_stats_; const std::unique_ptr bitrate_allocator_; const Call::Config config_ RTC_GUARDED_BY(worker_thread_); // Maps to config_.trials, can be used from any thread via `trials()`. - const WebRtcKeyValueConfig& trials_; + const FieldTrialsView& trials_; NetworkState audio_network_state_ RTC_GUARDED_BY(worker_thread_); NetworkState video_network_state_ RTC_GUARDED_BY(worker_thread_); @@ -382,16 +394,13 @@ class Call final : public webrtc::Call, // Audio, Video, and FlexFEC receive streams are owned by the client that // creates them. // TODO(bugs.webrtc.org/11993): Move audio_receive_streams_, - // video_receive_streams_ and sync_stream_mapping_ over to the network thread. - std::set audio_receive_streams_ + // video_receive_streams_ over to the network thread. + std::set audio_receive_streams_ RTC_GUARDED_BY(worker_thread_); std::set video_receive_streams_ RTC_GUARDED_BY(worker_thread_); - std::map sync_stream_mapping_ - RTC_GUARDED_BY(worker_thread_); - - // TODO(nisse): Should eventually be injected at creation, - // with a single object in the bundled case. + // TODO(bugs.webrtc.org/7135, bugs.webrtc.org/9719): Should eventually be + // injected at creation, with a single object in the bundled case. RtpStreamReceiverController audio_receiver_controller_ RTC_GUARDED_BY(worker_thread_); RtpStreamReceiverController video_receiver_controller_ @@ -400,12 +409,16 @@ class Call final : public webrtc::Call, // This extra map is used for receive processing which is // independent of media type. + RTC_NO_UNIQUE_ADDRESS SequenceChecker receive_11993_checker_; + // TODO(bugs.webrtc.org/11993): Move receive_rtp_config_ over to the // network thread. - std::map receive_rtp_config_ - RTC_GUARDED_BY(worker_thread_); + std::map receive_rtp_config_ + RTC_GUARDED_BY(&receive_11993_checker_); // Audio and Video send streams are owned by the client that creates them. + // TODO(bugs.webrtc.org/11993): `audio_send_ssrcs_` and `video_send_ssrcs_` + // should be accessed on the network thread. std::map audio_send_ssrcs_ RTC_GUARDED_BY(worker_thread_); std::map video_send_ssrcs_ @@ -440,6 +453,7 @@ class Call final : public webrtc::Call, std::atomic configured_max_padding_bitrate_bps_{0}; ReceiveSideCongestionController receive_side_cc_; + RepeatingTaskHandle receive_side_cc_periodic_task_; const std::unique_ptr receive_time_calculator_; @@ -466,11 +480,11 @@ class Call final : public webrtc::Call, bool is_started_ RTC_GUARDED_BY(worker_thread_) = false; + // Sequence checker for outgoing network traffic. Could be the network thread. + // Could also be a pacer owned thread or TQ such as the TaskQueuePacedSender. RTC_NO_UNIQUE_ADDRESS SequenceChecker sent_packet_sequence_checker_; absl::optional last_sent_packet_ RTC_GUARDED_BY(sent_packet_sequence_checker_); - - RTC_DISALLOW_COPY_AND_ASSIGN(Call); }; } // namespace internal @@ -488,130 +502,19 @@ std::string Call::Stats::ToString(int64_t time_ms) const { } Call* Call::Create(const Call::Config& config) { - rtc::scoped_refptr call_thread = - SharedModuleThread::Create(ProcessThread::Create("ModuleProcessThread"), - nullptr); - return Create(config, Clock::GetRealTimeClock(), std::move(call_thread), - ProcessThread::Create("PacerThread")); -} - -Call* Call::Create(const Call::Config& config, - Clock* clock, - rtc::scoped_refptr call_thread, - std::unique_ptr pacer_thread) { - RTC_DCHECK(config.task_queue_factory); - - RtpTransportControllerSendFactory transport_controller_factory_; - - RtpTransportConfig transportConfig = config.ExtractTransportConfig(); - - return new internal::Call( - clock, config, - transport_controller_factory_.Create(transportConfig, clock, - std::move(pacer_thread)), - std::move(call_thread), config.task_queue_factory); + Clock* clock = Clock::GetRealTimeClock(); + return Create(config, clock, + RtpTransportControllerSendFactory().Create( + config.ExtractTransportConfig(), clock)); } Call* Call::Create(const Call::Config& config, Clock* clock, - rtc::scoped_refptr call_thread, std::unique_ptr transportControllerSend) { RTC_DCHECK(config.task_queue_factory); return new internal::Call(clock, config, std::move(transportControllerSend), - std::move(call_thread), config.task_queue_factory); -} - -class SharedModuleThread::Impl { - public: - Impl(std::unique_ptr process_thread, - std::function on_one_ref_remaining) - : module_thread_(std::move(process_thread)), - on_one_ref_remaining_(std::move(on_one_ref_remaining)) {} - - void EnsureStarted() { - RTC_DCHECK_RUN_ON(&sequence_checker_); - if (started_) - return; - started_ = true; - module_thread_->Start(); - } - - ProcessThread* process_thread() { - RTC_DCHECK_RUN_ON(&sequence_checker_); - return module_thread_.get(); - } - - void AddRef() const { - RTC_DCHECK_RUN_ON(&sequence_checker_); - ++ref_count_; - } - - rtc::RefCountReleaseStatus Release() const { - RTC_DCHECK_RUN_ON(&sequence_checker_); - --ref_count_; - - if (ref_count_ == 0) { - module_thread_->Stop(); - return rtc::RefCountReleaseStatus::kDroppedLastRef; - } - - if (ref_count_ == 1 && on_one_ref_remaining_) { - auto moved_fn = std::move(on_one_ref_remaining_); - // NOTE: after this function returns, chances are that `this` has been - // deleted - do not touch any member variables. - // If the owner of the last reference implements a lambda that releases - // that last reference inside of the callback (which is legal according - // to this implementation), we will recursively enter Release() above, - // call Stop() and release the last reference. - moved_fn(); - } - - return rtc::RefCountReleaseStatus::kOtherRefsRemained; - } - - private: - RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; - mutable int ref_count_ RTC_GUARDED_BY(sequence_checker_) = 0; - std::unique_ptr const module_thread_; - std::function const on_one_ref_remaining_; - bool started_ = false; -}; - -SharedModuleThread::SharedModuleThread( - std::unique_ptr process_thread, - std::function on_one_ref_remaining) - : impl_(std::make_unique(std::move(process_thread), - std::move(on_one_ref_remaining))) {} - -SharedModuleThread::~SharedModuleThread() = default; - -// static - -rtc::scoped_refptr SharedModuleThread::Create( - std::unique_ptr process_thread, - std::function on_one_ref_remaining) { - return new SharedModuleThread(std::move(process_thread), - std::move(on_one_ref_remaining)); -} - -void SharedModuleThread::EnsureStarted() { - impl_->EnsureStarted(); -} - -ProcessThread* SharedModuleThread::process_thread() { - return impl_->process_thread(); -} - -void SharedModuleThread::AddRef() const { - impl_->AddRef(); -} - -rtc::RefCountReleaseStatus SharedModuleThread::Release() const { - auto ret = impl_->Release(); - if (ret == rtc::RefCountReleaseStatus::kDroppedLastRef) - delete this; - return ret; + config.task_queue_factory); } // This method here to avoid subclasses has to implement this method. @@ -780,7 +683,6 @@ void Call::SendStats::SetMinAllocatableRate(BitrateAllocationLimits limits) { Call::Call(Clock* clock, const Call::Config& config, std::unique_ptr transport_send, - rtc::scoped_refptr module_process_thread, TaskQueueFactory* task_queue_factory) : clock_(clock), task_queue_factory_(task_queue_factory), @@ -789,8 +691,12 @@ Call::Call(Clock* clock, // must be made on `worker_thread_` (i.e. they're one and the same). network_thread_(config.network_task_queue_ ? config.network_task_queue_ : worker_thread_), + decode_sync_(config.metronome + ? std::make_unique(clock_, + config.metronome, + worker_thread_) + : nullptr), num_cpu_cores_(CpuInfo::DetectNumberOfCores()), - module_process_thread_(std::move(module_process_thread)), call_stats_(new CallStats(clock_, worker_thread_)), bitrate_allocator_(new BitrateAllocator(this)), config_(config), @@ -807,7 +713,8 @@ Call::Call(Clock* clock, absl::bind_front(&PacketRouter::SendRemb, transport_send->packet_router()), /*network_state_estimator=*/nullptr), - receive_time_calculator_(ReceiveTimeCalculator::CreateFromFieldTrial()), + receive_time_calculator_( + ReceiveTimeCalculator::CreateFromFieldTrial(*config.trials)), video_send_delay_stats_(new SendDelayStats(clock_)), start_of_call_(clock_->CurrentTime()), transport_send_ptr_(transport_send.get()), @@ -817,6 +724,7 @@ Call::Call(Clock* clock, RTC_DCHECK(network_thread_); RTC_DCHECK(worker_thread_->IsCurrent()); + receive_11993_checker_.Detach(); send_transport_sequence_checker_.Detach(); sent_packet_sequence_checker_.Detach(); @@ -826,10 +734,11 @@ Call::Call(Clock* clock, call_stats_->RegisterStatsObserver(&receive_side_cc_); - module_process_thread_->process_thread()->RegisterModule( - receive_side_cc_.GetRemoteBitrateEstimator(true), RTC_FROM_HERE); - module_process_thread_->process_thread()->RegisterModule(&receive_side_cc_, - RTC_FROM_HERE); + ReceiveSideCongestionController* receive_side_cc = &receive_side_cc_; + receive_side_cc_periodic_task_ = RepeatingTaskHandle::Start( + worker_thread_, + [receive_side_cc] { return receive_side_cc->MaybeProcess(); }, + TaskQueueBase::DelayPrecision::kLow, clock_); } Call::~Call() { @@ -841,9 +750,7 @@ Call::~Call() { RTC_CHECK(audio_receive_streams_.empty()); RTC_CHECK(video_receive_streams_.empty()); - module_process_thread_->process_thread()->DeRegisterModule( - receive_side_cc_.GetRemoteBitrateEstimator(true)); - module_process_thread_->process_thread()->DeRegisterModule(&receive_side_cc_); + receive_side_cc_periodic_task_.Stop(); call_stats_->DeregisterStatsObserver(&receive_side_cc_); send_stats_.SetFirstPacketTime(transport_send_->GetFirstPacketTime()); @@ -864,7 +771,6 @@ void Call::EnsureStarted() { // off being kicked off on request rather than in the ctor. transport_send_->RegisterTargetTransferRateObserver(this); - module_process_thread_->EnsureStarted(); transport_send_->EnsureStarted(); } @@ -897,14 +803,14 @@ webrtc::AudioSendStream* Call::CreateAudioSendStream( AudioSendStream* send_stream = new AudioSendStream( clock_, config, config_.audio_state, task_queue_factory_, transport_send_.get(), bitrate_allocator_.get(), event_log_, - call_stats_->AsRtcpRttStats(), suspended_rtp_state); + call_stats_->AsRtcpRttStats(), suspended_rtp_state, trials()); RTC_DCHECK(audio_send_ssrcs_.find(config.rtp.ssrc) == audio_send_ssrcs_.end()); audio_send_ssrcs_[config.rtp.ssrc] = send_stream; // TODO(bugs.webrtc.org/11993): call AssociateSendStream and // UpdateAggregateNetworkState asynchronously on the network thread. - for (AudioReceiveStream* stream : audio_receive_streams_) { + for (AudioReceiveStreamImpl* stream : audio_receive_streams_) { if (stream->local_ssrc() == config.rtp.ssrc) { stream->AssociateSendStream(send_stream); } @@ -932,7 +838,7 @@ void Call::DestroyAudioSendStream(webrtc::AudioSendStream* send_stream) { // TODO(bugs.webrtc.org/11993): call AssociateSendStream and // UpdateAggregateNetworkState asynchronously on the network thread. - for (AudioReceiveStream* stream : audio_receive_streams_) { + for (AudioReceiveStreamImpl* stream : audio_receive_streams_) { if (stream->local_ssrc() == ssrc) { stream->AssociateSendStream(nullptr); } @@ -943,15 +849,15 @@ void Call::DestroyAudioSendStream(webrtc::AudioSendStream* send_stream) { delete send_stream; } -webrtc::AudioReceiveStream* Call::CreateAudioReceiveStream( - const webrtc::AudioReceiveStream::Config& config) { +webrtc::AudioReceiveStreamInterface* Call::CreateAudioReceiveStream( + const webrtc::AudioReceiveStreamInterface::Config& config) { TRACE_EVENT0("webrtc", "Call::CreateAudioReceiveStream"); RTC_DCHECK_RUN_ON(worker_thread_); EnsureStarted(); event_log_->Log(std::make_unique( CreateRtcLogStreamConfig(config))); - AudioReceiveStream* receive_stream = new AudioReceiveStream( + AudioReceiveStreamImpl* receive_stream = new AudioReceiveStreamImpl( clock_, transport_send_->packet_router(), config_.neteq_factory, config, config_.audio_state, event_log_); audio_receive_streams_.insert(receive_stream); @@ -964,7 +870,7 @@ webrtc::AudioReceiveStream* Call::CreateAudioReceiveStream( // TODO(bugs.webrtc.org/11993): Update the below on the network thread. // We could possibly set up the audio_receiver_controller_ association up // as part of the async setup. - receive_rtp_config_.emplace(config.rtp.remote_ssrc, receive_stream); + RegisterReceiveStream(config.rtp.remote_ssrc, receive_stream); ConfigureSync(config.sync_group); @@ -978,12 +884,12 @@ webrtc::AudioReceiveStream* Call::CreateAudioReceiveStream( } void Call::DestroyAudioReceiveStream( - webrtc::AudioReceiveStream* receive_stream) { + webrtc::AudioReceiveStreamInterface* receive_stream) { TRACE_EVENT0("webrtc", "Call::DestroyAudioReceiveStream"); RTC_DCHECK_RUN_ON(worker_thread_); RTC_DCHECK(receive_stream != nullptr); - webrtc::internal::AudioReceiveStream* audio_receive_stream = - static_cast(receive_stream); + webrtc::AudioReceiveStreamImpl* audio_receive_stream = + static_cast(receive_stream); // TODO(bugs.webrtc.org/11993): Access the map, rtp config, call ConfigureSync // and UpdateAggregateNetworkState on the network thread. The call to @@ -991,18 +897,16 @@ void Call::DestroyAudioReceiveStream( audio_receive_stream->UnregisterFromTransport(); uint32_t ssrc = audio_receive_stream->remote_ssrc(); - const AudioReceiveStream::Config& config = audio_receive_stream->config(); - receive_side_cc_.GetRemoteBitrateEstimator(UseSendSideBwe(config.rtp)) - ->RemoveStream(ssrc); + receive_side_cc_.RemoveStream(ssrc); audio_receive_streams_.erase(audio_receive_stream); - const auto it = sync_stream_mapping_.find(config.sync_group); - if (it != sync_stream_mapping_.end() && it->second == audio_receive_stream) { - sync_stream_mapping_.erase(it); - ConfigureSync(config.sync_group); - } - receive_rtp_config_.erase(ssrc); + // After calling erase(), call ConfigureSync. This will clear associated + // video streams or associate them with a different audio stream if one exists + // for this sync_group. + ConfigureSync(audio_receive_stream->sync_group()); + + UnregisterReceiveStream(ssrc); UpdateAggregateNetworkState(); // TODO(bugs.webrtc.org/11993): Consider if deleting `audio_receive_stream` @@ -1038,7 +942,8 @@ webrtc::VideoSendStream* Call::CreateVideoSendStream( call_stats_->AsRtcpRttStats(), transport_send_.get(), bitrate_allocator_.get(), video_send_delay_stats_.get(), event_log_, std::move(config), std::move(encoder_config), suspended_video_send_ssrcs_, - suspended_video_payload_states_, std::move(fec_controller)); + suspended_video_payload_states_, std::move(fec_controller), + *config_.trials); for (uint32_t ssrc : ssrcs) { RTC_DCHECK(video_send_ssrcs_.find(ssrc) == video_send_ssrcs_.end()); @@ -1079,10 +984,6 @@ void Call::DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) { VideoSendStream* send_stream_impl = static_cast(send_stream); - VideoSendStream::RtpStateMap rtp_states; - VideoSendStream::RtpPayloadStateMap rtp_payload_states; - send_stream_impl->StopPermanentlyAndGetRtpStates(&rtp_states, - &rtp_payload_states); auto it = video_send_ssrcs_.begin(); while (it != video_send_ssrcs_.end()) { @@ -1102,6 +1003,10 @@ void Call::DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) { if (video_send_streams_.empty()) video_send_streams_empty_.store(true, std::memory_order_relaxed); + VideoSendStream::RtpStateMap rtp_states; + VideoSendStream::RtpPayloadStateMap rtp_payload_states; + send_stream_impl->StopPermanentlyAndGetRtpStates(&rtp_states, + &rtp_payload_states); for (const auto& kv : rtp_states) { suspended_video_send_ssrcs_[kv.first] = kv.second; } @@ -1115,8 +1020,8 @@ void Call::DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) { delete send_stream_impl; } -webrtc::VideoReceiveStream* Call::CreateVideoReceiveStream( - webrtc::VideoReceiveStream::Config configuration) { +webrtc::VideoReceiveStreamInterface* Call::CreateVideoReceiveStream( + webrtc::VideoReceiveStreamInterface::Config configuration) { TRACE_EVENT0("webrtc", "Call::CreateVideoReceiveStream"); RTC_DCHECK_RUN_ON(worker_thread_); @@ -1135,21 +1040,20 @@ webrtc::VideoReceiveStream* Call::CreateVideoReceiveStream( VideoReceiveStream2* receive_stream = new VideoReceiveStream2( task_queue_factory_, this, num_cpu_cores_, transport_send_->packet_router(), std::move(configuration), - call_stats_.get(), clock_, new VCMTiming(clock_), - &nack_periodic_processor_); + call_stats_.get(), clock_, std::make_unique(clock_, trials()), + &nack_periodic_processor_, decode_sync_.get(), event_log_); // TODO(bugs.webrtc.org/11993): Set this up asynchronously on the network // thread. receive_stream->RegisterWithTransport(&video_receiver_controller_); - const webrtc::VideoReceiveStream::Config::Rtp& rtp = receive_stream->rtp(); - if (rtp.rtx_ssrc) { + if (receive_stream->rtx_ssrc()) { // We record identical config for the rtx stream as for the main // stream. Since the transport_send_cc negotiation is per payload // type, we may get an incorrect value for the rtx stream, but // that is unlikely to matter in practice. - receive_rtp_config_.emplace(rtp.rtx_ssrc, receive_stream); + RegisterReceiveStream(receive_stream->rtx_ssrc(), receive_stream); } - receive_rtp_config_.emplace(rtp.remote_ssrc, receive_stream); + RegisterReceiveStream(receive_stream->remote_ssrc(), receive_stream); video_receive_streams_.insert(receive_stream); ConfigureSync(receive_stream->sync_group()); @@ -1160,7 +1064,7 @@ webrtc::VideoReceiveStream* Call::CreateVideoReceiveStream( } void Call::DestroyVideoReceiveStream( - webrtc::VideoReceiveStream* receive_stream) { + webrtc::VideoReceiveStreamInterface* receive_stream) { TRACE_EVENT0("webrtc", "Call::DestroyVideoReceiveStream"); RTC_DCHECK_RUN_ON(worker_thread_); RTC_DCHECK(receive_stream != nullptr); @@ -1169,50 +1073,40 @@ void Call::DestroyVideoReceiveStream( // TODO(bugs.webrtc.org/11993): Unregister on the network thread. receive_stream_impl->UnregisterFromTransport(); - const webrtc::VideoReceiveStream::Config::Rtp& rtp = - receive_stream_impl->rtp(); - // Remove all ssrcs pointing to a receive stream. As RTX retransmits on a // separate SSRC there can be either one or two. - receive_rtp_config_.erase(rtp.remote_ssrc); - if (rtp.rtx_ssrc) { - receive_rtp_config_.erase(rtp.rtx_ssrc); + UnregisterReceiveStream(receive_stream_impl->remote_ssrc()); + + if (receive_stream_impl->rtx_ssrc()) { + UnregisterReceiveStream(receive_stream_impl->rtx_ssrc()); } video_receive_streams_.erase(receive_stream_impl); ConfigureSync(receive_stream_impl->sync_group()); - receive_side_cc_.GetRemoteBitrateEstimator(UseSendSideBwe(rtp)) - ->RemoveStream(rtp.remote_ssrc); + receive_side_cc_.RemoveStream(receive_stream_impl->remote_ssrc()); UpdateAggregateNetworkState(); delete receive_stream_impl; } FlexfecReceiveStream* Call::CreateFlexfecReceiveStream( - const FlexfecReceiveStream::Config& config) { + const FlexfecReceiveStream::Config config) { TRACE_EVENT0("webrtc", "Call::CreateFlexfecReceiveStream"); RTC_DCHECK_RUN_ON(worker_thread_); - RecoveredPacketReceiver* recovered_packet_receiver = this; - - FlexfecReceiveStreamImpl* receive_stream; - // Unlike the video and audio receive streams, FlexfecReceiveStream implements // RtpPacketSinkInterface itself, and hence its constructor passes its `this` // pointer to video_receiver_controller_->CreateStream(). Calling the // constructor while on the worker thread ensures that we don't call // OnRtpPacket until the constructor is finished and the object is // in a valid state, since OnRtpPacket runs on the same thread. - receive_stream = new FlexfecReceiveStreamImpl( - clock_, config, recovered_packet_receiver, call_stats_->AsRtcpRttStats()); + FlexfecReceiveStreamImpl* receive_stream = new FlexfecReceiveStreamImpl( + clock_, std::move(config), this, call_stats_->AsRtcpRttStats()); // TODO(bugs.webrtc.org/11993): Set this up asynchronously on the network // thread. receive_stream->RegisterWithTransport(&video_receiver_controller_); - - RTC_DCHECK(receive_rtp_config_.find(config.rtp.remote_ssrc) == - receive_rtp_config_.end()); - receive_rtp_config_.emplace(config.rtp.remote_ssrc, receive_stream); + RegisterReceiveStream(receive_stream->remote_ssrc(), receive_stream); // TODO(brandtr): Store config in RtcEventLog here. @@ -1228,16 +1122,14 @@ void Call::DestroyFlexfecReceiveStream(FlexfecReceiveStream* receive_stream) { // TODO(bugs.webrtc.org/11993): Unregister on the network thread. receive_stream_impl->UnregisterFromTransport(); - RTC_DCHECK(receive_stream != nullptr); - const FlexfecReceiveStream::RtpConfig& rtp = receive_stream->rtp_config(); - receive_rtp_config_.erase(rtp.remote_ssrc); + auto ssrc = receive_stream_impl->remote_ssrc(); + UnregisterReceiveStream(ssrc); // Remove all SSRCs pointing to the FlexfecReceiveStreamImpl to be // destroyed. - receive_side_cc_.GetRemoteBitrateEstimator(UseSendSideBwe(rtp)) - ->RemoveStream(rtp.remote_ssrc); + receive_side_cc_.RemoveStream(ssrc); - delete receive_stream; + delete receive_stream_impl; } void Call::AddAdaptationResource(rtc::scoped_refptr resource) { @@ -1266,11 +1158,7 @@ Call::Stats Call::GetStats() const { stats.rtt_ms = call_stats_->LastProcessedRtt(); // Fetch available send/receive bitrates. - std::vector ssrcs; - uint32_t recv_bandwidth = 0; - receive_side_cc_.GetRemoteBitrateEstimator(false)->LatestEstimate( - &ssrcs, &recv_bandwidth); - stats.recv_bandwidth_bps = recv_bandwidth; + stats.recv_bandwidth_bps = receive_side_cc_.LatestReceiveSideEstimate().bps(); stats.send_bandwidth_bps = last_bandwidth_bps_.load(std::memory_order_relaxed); stats.max_padding_bitrate_bps = @@ -1279,7 +1167,7 @@ Call::Stats Call::GetStats() const { return stats; } -const WebRtcKeyValueConfig& Call::trials() const { +const FieldTrialsView& Call::trials() const { return trials_; } @@ -1320,14 +1208,14 @@ void Call::SignalChannelNetworkState(MediaType media, NetworkState state) { } else { // TODO(bugs.webrtc.org/11993): Remove workaround when we no longer need to // post to the worker thread. - worker_thread_->PostTask(ToQueuedTask(task_safety_, std::move(closure))); + worker_thread_->PostTask(SafeTask(task_safety_.flag(), std::move(closure))); } } void Call::OnAudioTransportOverheadChanged(int transport_overhead_per_packet) { RTC_DCHECK_RUN_ON(network_thread_); worker_thread_->PostTask( - ToQueuedTask(task_safety_, [this, transport_overhead_per_packet]() { + SafeTask(task_safety_.flag(), [this, transport_overhead_per_packet]() { // TODO(bugs.webrtc.org/11993): Move this over to the network thread. RTC_DCHECK_RUN_ON(worker_thread_); for (auto& kv : audio_send_ssrcs_) { @@ -1365,11 +1253,11 @@ void Call::UpdateAggregateNetworkState() { transport_send_->OnNetworkAvailability(aggregate_network_up); } -void Call::OnLocalSsrcUpdated(webrtc::AudioReceiveStream& stream, +void Call::OnLocalSsrcUpdated(webrtc::AudioReceiveStreamInterface& stream, uint32_t local_ssrc) { RTC_DCHECK_RUN_ON(worker_thread_); - webrtc::internal::AudioReceiveStream& receive_stream = - static_cast(stream); + webrtc::AudioReceiveStreamImpl& receive_stream = + static_cast(stream); receive_stream.SetLocalSsrc(local_ssrc); auto it = audio_send_ssrcs_.find(local_ssrc); @@ -1377,11 +1265,23 @@ void Call::OnLocalSsrcUpdated(webrtc::AudioReceiveStream& stream, : nullptr); } -void Call::OnUpdateSyncGroup(webrtc::AudioReceiveStream& stream, - const std::string& sync_group) { +void Call::OnLocalSsrcUpdated(VideoReceiveStreamInterface& stream, + uint32_t local_ssrc) { + RTC_DCHECK_RUN_ON(worker_thread_); + static_cast(stream).SetLocalSsrc(local_ssrc); +} + +void Call::OnLocalSsrcUpdated(FlexfecReceiveStream& stream, + uint32_t local_ssrc) { RTC_DCHECK_RUN_ON(worker_thread_); - webrtc::internal::AudioReceiveStream& receive_stream = - static_cast(stream); + static_cast(stream).SetLocalSsrc(local_ssrc); +} + +void Call::OnUpdateSyncGroup(webrtc::AudioReceiveStreamInterface& stream, + absl::string_view sync_group) { + RTC_DCHECK_RUN_ON(worker_thread_); + webrtc::AudioReceiveStreamImpl& receive_stream = + static_cast(stream); receive_stream.SetSyncGroup(sync_group); ConfigureSync(sync_group); } @@ -1449,60 +1349,47 @@ void Call::OnAllocationLimitsChanged(BitrateAllocationLimits limits) { std::memory_order_relaxed); } -// RTC_RUN_ON(worker_thread_) -void Call::ConfigureSync(const std::string& sync_group) { - // TODO(bugs.webrtc.org/11993): Expect to be called on the network thread. - // Set sync only if there was no previous one. - if (sync_group.empty()) - return; - - AudioReceiveStream* sync_audio_stream = nullptr; - // Find existing audio stream. - const auto it = sync_stream_mapping_.find(sync_group); - if (it != sync_stream_mapping_.end()) { - sync_audio_stream = it->second; - } else { - // No configured audio stream, see if we can find one. - for (AudioReceiveStream* stream : audio_receive_streams_) { - if (stream->config().sync_group == sync_group) { - if (sync_audio_stream != nullptr) { - RTC_LOG(LS_WARNING) - << "Attempting to sync more than one audio stream " - "within the same sync group. This is not " - "supported in the current implementation."; - break; - } - sync_audio_stream = stream; - } +AudioReceiveStreamImpl* Call::FindAudioStreamForSyncGroup( + absl::string_view sync_group) { + RTC_DCHECK_RUN_ON(worker_thread_); + RTC_DCHECK_RUN_ON(&receive_11993_checker_); + if (!sync_group.empty()) { + for (AudioReceiveStreamImpl* stream : audio_receive_streams_) { + if (stream->sync_group() == sync_group) + return stream; } } - if (sync_audio_stream) - sync_stream_mapping_[sync_group] = sync_audio_stream; + + return nullptr; +} + +void Call::ConfigureSync(absl::string_view sync_group) { + // TODO(bugs.webrtc.org/11993): Expect to be called on the network thread. + RTC_DCHECK_RUN_ON(worker_thread_); + // `audio_stream` may be nullptr when clearing the audio stream for a group. + AudioReceiveStreamImpl* audio_stream = + FindAudioStreamForSyncGroup(sync_group); + size_t num_synced_streams = 0; for (VideoReceiveStream2* video_stream : video_receive_streams_) { if (video_stream->sync_group() != sync_group) continue; ++num_synced_streams; - if (num_synced_streams > 1) { - // TODO(pbos): Support synchronizing more than one A/V pair. - // https://code.google.com/p/webrtc/issues/detail?id=4762 - RTC_LOG(LS_WARNING) - << "Attempting to sync more than one audio/video pair " - "within the same sync group. This is not supported in " - "the current implementation."; - } + // TODO(bugs.webrtc.org/4762): Support synchronizing more than one A/V pair. + // Attempting to sync more than one audio/video pair within the same sync + // group is not supported in the current implementation. // Only sync the first A/V pair within this sync group. if (num_synced_streams == 1) { // sync_audio_stream may be null and that's ok. - video_stream->SetSync(sync_audio_stream); + video_stream->SetSync(audio_stream); } else { video_stream->SetSync(nullptr); } } } -// RTC_RUN_ON(network_thread_) void Call::DeliverRtcp(MediaType media_type, rtc::CopyOnWriteBuffer packet) { + RTC_DCHECK_RUN_ON(network_thread_); TRACE_EVENT0("webrtc", "Call::DeliverRtcp"); // TODO(bugs.webrtc.org/11993): This DCHECK is here just to maintain the @@ -1520,7 +1407,7 @@ void Call::DeliverRtcp(MediaType media_type, rtc::CopyOnWriteBuffer packet) { // TODO(bugs.webrtc.org/11993): This should execute directly on the network // thread. worker_thread_->PostTask( - ToQueuedTask(task_safety_, [this, packet = std::move(packet)]() { + SafeTask(task_safety_.flag(), [this, packet = std::move(packet)]() { RTC_DCHECK_RUN_ON(worker_thread_); receive_stats_.AddReceivedRtcpBytes(static_cast(packet.size())); @@ -1530,7 +1417,7 @@ void Call::DeliverRtcp(MediaType media_type, rtc::CopyOnWriteBuffer packet) { rtcp_delivered = true; } - for (AudioReceiveStream* stream : audio_receive_streams_) { + for (AudioReceiveStreamImpl* stream : audio_receive_streams_) { stream->DeliverRtcp(packet.cdata(), packet.size()); rtcp_delivered = true; } @@ -1582,22 +1469,11 @@ PacketReceiver::DeliveryStatus Call::DeliverRtp(MediaType media_type, RTC_DCHECK(media_type == MediaType::AUDIO || media_type == MediaType::VIDEO || is_keep_alive_packet); - auto it = receive_rtp_config_.find(parsed_packet.Ssrc()); - if (it == receive_rtp_config_.end()) { - RTC_LOG(LS_ERROR) << "receive_rtp_config_ lookup failed for ssrc " - << parsed_packet.Ssrc(); - // Destruction of the receive stream, including deregistering from the - // RtpDemuxer, is not protected by the `worker_thread_`. - // But deregistering in the `receive_rtp_config_` map is. So by not passing - // the packet on to demuxing in this case, we prevent incoming packets to be - // passed on via the demuxer to a receive stream which is being torned down. + bool use_send_side_bwe = false; + if (!IdentifyReceivedPacket(parsed_packet, &use_send_side_bwe)) return DELIVERY_UNKNOWN_SSRC; - } - parsed_packet.IdentifyExtensions( - RtpHeaderExtensionMap(it->second->rtp_config().extensions)); - - NotifyBweOfReceivedPacket(parsed_packet, media_type); + NotifyBweOfReceivedPacket(parsed_packet, media_type, use_send_side_bwe); // RateCounters expect input parameter as int, save it as int, // instead of converting each time it is passed to RateCounter::Add below. @@ -1648,33 +1524,18 @@ void Call::OnRecoveredPacket(const uint8_t* packet, size_t length) { parsed_packet.set_recovered(true); - auto it = receive_rtp_config_.find(parsed_packet.Ssrc()); - if (it == receive_rtp_config_.end()) { - RTC_LOG(LS_ERROR) << "receive_rtp_config_ lookup failed for ssrc " - << parsed_packet.Ssrc(); - // Destruction of the receive stream, including deregistering from the - // RtpDemuxer, is not protected by the `worker_thread_`. - // But deregistering in the `receive_rtp_config_` map is. - // So by not passing the packet on to demuxing in this case, we prevent - // incoming packets to be passed on via the demuxer to a receive stream - // which is being torn down. + if (!IdentifyReceivedPacket(parsed_packet)) return; - } - parsed_packet.IdentifyExtensions( - RtpHeaderExtensionMap(it->second->rtp_config().extensions)); // TODO(brandtr): Update here when we support protecting audio packets too. parsed_packet.set_payload_type_frequency(kVideoPayloadTypeFrequency); video_receiver_controller_.OnRtpPacket(parsed_packet); } -// RTC_RUN_ON(worker_thread_) void Call::NotifyBweOfReceivedPacket(const RtpPacketReceived& packet, - MediaType media_type) { - auto it = receive_rtp_config_.find(packet.Ssrc()); - bool use_send_side_bwe = (it != receive_rtp_config_.end()) && - UseSendSideBwe(it->second->rtp_config()); - + MediaType media_type, + bool use_send_side_bwe) { + RTC_DCHECK_RUN_ON(worker_thread_); RTPHeader header; packet.GetHeader(&header); @@ -1688,12 +1549,6 @@ void Call::NotifyBweOfReceivedPacket(const RtpPacketReceived& packet, if (!use_send_side_bwe && header.extension.hasTransportSequenceNumber) { // Inconsistent configuration of send side BWE. Do nothing. - // TODO(nisse): Without this check, we may produce RTCP feedback - // packets even when not negotiated. But it would be cleaner to - // move the check down to RTCPSender::SendFeedbackPacket, which - // would also help the PacketRouter to select an appropriate rtp - // module in the case that some, but not all, have RTCP feedback - // enabled. return; } // For audio, we only support send side BWE. @@ -1705,6 +1560,45 @@ void Call::NotifyBweOfReceivedPacket(const RtpPacketReceived& packet, } } +bool Call::IdentifyReceivedPacket(RtpPacketReceived& packet, + bool* use_send_side_bwe /*= nullptr*/) { + RTC_DCHECK_RUN_ON(&receive_11993_checker_); + auto it = receive_rtp_config_.find(packet.Ssrc()); + if (it == receive_rtp_config_.end()) { + RTC_DLOG(LS_WARNING) << "receive_rtp_config_ lookup failed for ssrc " + << packet.Ssrc(); + return false; + } + + packet.IdentifyExtensions(it->second->GetRtpExtensionMap()); + + if (use_send_side_bwe) { + *use_send_side_bwe = UseSendSideBwe(it->second); + } + + return true; +} + +bool Call::RegisterReceiveStream(uint32_t ssrc, + ReceiveStreamInterface* stream) { + RTC_DCHECK_RUN_ON(&receive_11993_checker_); + RTC_DCHECK(stream); + auto inserted = receive_rtp_config_.emplace(ssrc, stream); + if (!inserted.second) { + RTC_DLOG(LS_WARNING) << "ssrc already registered: " << ssrc; + } + return inserted.second; +} + +bool Call::UnregisterReceiveStream(uint32_t ssrc) { + RTC_DCHECK_RUN_ON(&receive_11993_checker_); + size_t erased = receive_rtp_config_.erase(ssrc); + if (!erased) { + RTC_DLOG(LS_WARNING) << "ssrc wasn't registered: " << ssrc; + } + return erased != 0u; +} + } // namespace internal } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/call/call.h b/TMessagesProj/jni/voip/webrtc/call/call.h index f6388c3c78..366978392e 100644 --- a/TMessagesProj/jni/voip/webrtc/call/call.h +++ b/TMessagesProj/jni/voip/webrtc/call/call.h @@ -15,6 +15,7 @@ #include #include +#include "absl/strings/string_view.h" #include "api/adaptation/resource.h" #include "api/media_types.h" #include "api/task_queue/task_queue_base.h" @@ -26,7 +27,6 @@ #include "call/rtp_transport_controller_send_interface.h" #include "call/video_receive_stream.h" #include "call/video_send_stream.h" -#include "modules/utility/include/process_thread.h" #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/network/sent_packet.h" #include "rtc_base/network_route.h" @@ -34,40 +34,16 @@ namespace webrtc { -// A restricted way to share the module process thread across multiple instances -// of Call that are constructed on the same worker thread (which is what the -// peer connection factory guarantees). -// SharedModuleThread supports a callback that is issued when only one reference -// remains, which is used to indicate to the original owner that the thread may -// be discarded. -class SharedModuleThread : public rtc::RefCountInterface { - protected: - SharedModuleThread(std::unique_ptr process_thread, - std::function on_one_ref_remaining); - friend class rtc::scoped_refptr; - ~SharedModuleThread() override; - - public: - // Allows injection of an externally created process thread. - static rtc::scoped_refptr Create( - std::unique_ptr process_thread, - std::function on_one_ref_remaining); - - void EnsureStarted(); - - ProcessThread* process_thread(); - - private: - void AddRef() const override; - rtc::RefCountReleaseStatus Release() const override; - - class Impl; - mutable std::unique_ptr impl_; -}; +// A Call represents a two-way connection carrying zero or more outgoing +// and incoming media streams, transported over one or more RTP transports. // A Call instance can contain several send and/or receive streams. All streams // are assumed to have the same remote endpoint and will share bitrate estimates // etc. + +// When using the PeerConnection API, there is an one to one relationship +// between the PeerConnection and the Call. + class Call { public: using Config = CallConfig; @@ -85,11 +61,6 @@ class Call { static Call* Create(const Call::Config& config); static Call* Create(const Call::Config& config, Clock* clock, - rtc::scoped_refptr call_thread, - std::unique_ptr pacer_thread); - static Call* Create(const Call::Config& config, - Clock* clock, - rtc::scoped_refptr call_thread, std::unique_ptr transportControllerSend); @@ -98,10 +69,10 @@ class Call { virtual void DestroyAudioSendStream(AudioSendStream* send_stream) = 0; - virtual AudioReceiveStream* CreateAudioReceiveStream( - const AudioReceiveStream::Config& config) = 0; + virtual AudioReceiveStreamInterface* CreateAudioReceiveStream( + const AudioReceiveStreamInterface::Config& config) = 0; virtual void DestroyAudioReceiveStream( - AudioReceiveStream* receive_stream) = 0; + AudioReceiveStreamInterface* receive_stream) = 0; virtual VideoSendStream* CreateVideoSendStream( VideoSendStream::Config config, @@ -112,16 +83,16 @@ class Call { std::unique_ptr fec_controller); virtual void DestroyVideoSendStream(VideoSendStream* send_stream) = 0; - virtual VideoReceiveStream* CreateVideoReceiveStream( - VideoReceiveStream::Config configuration) = 0; + virtual VideoReceiveStreamInterface* CreateVideoReceiveStream( + VideoReceiveStreamInterface::Config configuration) = 0; virtual void DestroyVideoReceiveStream( - VideoReceiveStream* receive_stream) = 0; + VideoReceiveStreamInterface* receive_stream) = 0; - // In order for a created VideoReceiveStream to be aware that it is + // In order for a created VideoReceiveStreamInterface to be aware that it is // protected by a FlexfecReceiveStream, the latter should be created before // the former. virtual FlexfecReceiveStream* CreateFlexfecReceiveStream( - const FlexfecReceiveStream::Config& config) = 0; + const FlexfecReceiveStream::Config config) = 0; virtual void DestroyFlexfecReceiveStream( FlexfecReceiveStream* receive_stream) = 0; @@ -157,18 +128,22 @@ class Call { // Called when a receive stream's local ssrc has changed and association with // send streams needs to be updated. - virtual void OnLocalSsrcUpdated(AudioReceiveStream& stream, + virtual void OnLocalSsrcUpdated(AudioReceiveStreamInterface& stream, + uint32_t local_ssrc) = 0; + virtual void OnLocalSsrcUpdated(VideoReceiveStreamInterface& stream, + uint32_t local_ssrc) = 0; + virtual void OnLocalSsrcUpdated(FlexfecReceiveStream& stream, uint32_t local_ssrc) = 0; - virtual void OnUpdateSyncGroup(AudioReceiveStream& stream, - const std::string& sync_group) = 0; + virtual void OnUpdateSyncGroup(AudioReceiveStreamInterface& stream, + absl::string_view sync_group) = 0; virtual void OnSentPacket(const rtc::SentPacket& sent_packet) = 0; virtual void SetClientBitratePreferences( const BitrateSettings& preferences) = 0; - virtual const WebRtcKeyValueConfig& trials() const = 0; + virtual const FieldTrialsView& trials() const = 0; virtual TaskQueueBase* network_thread() const = 0; virtual TaskQueueBase* worker_thread() const = 0; diff --git a/TMessagesProj/jni/voip/webrtc/call/call_config.h b/TMessagesProj/jni/voip/webrtc/call/call_config.h index f149790150..3072fa452f 100644 --- a/TMessagesProj/jni/voip/webrtc/call/call_config.h +++ b/TMessagesProj/jni/voip/webrtc/call/call_config.h @@ -11,13 +11,14 @@ #define CALL_CALL_CONFIG_H_ #include "api/fec_controller.h" +#include "api/field_trials_view.h" +#include "api/metronome/metronome.h" #include "api/neteq/neteq_factory.h" #include "api/network_state_predictor.h" #include "api/rtc_error.h" #include "api/task_queue/task_queue_factory.h" #include "api/transport/bitrate_settings.h" #include "api/transport/network_control.h" -#include "api/transport/webrtc_key_value_config.h" #include "call/audio_state.h" #include "call/rtp_transport_config.h" #include "call/rtp_transport_controller_send_factory_interface.h" @@ -69,12 +70,14 @@ struct CallConfig { // Key-value mapping of internal configurations to apply, // e.g. field trials. - const WebRtcKeyValueConfig* trials = nullptr; + const FieldTrialsView* trials = nullptr; TaskQueueBase* const network_task_queue_ = nullptr; // RtpTransportControllerSend to use for this call. RtpTransportControllerSendFactoryInterface* rtp_transport_controller_send_factory = nullptr; + + Metronome* metronome = nullptr; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/call/call_factory.cc b/TMessagesProj/jni/voip/webrtc/call/call_factory.cc index aeb3cbdaa7..380e80ce12 100644 --- a/TMessagesProj/jni/voip/webrtc/call/call_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/call/call_factory.cc @@ -15,61 +15,68 @@ #include #include #include +#include +#include "absl/memory/memory.h" #include "absl/types/optional.h" #include "api/test/simulated_network.h" +#include "api/units/time_delta.h" #include "call/call.h" #include "call/degraded_call.h" #include "call/rtp_transport_config.h" #include "rtc_base/checks.h" -#include "system_wrappers/include/field_trial.h" +#include "rtc_base/experiments/field_trial_list.h" +#include "rtc_base/experiments/field_trial_parser.h" namespace webrtc { namespace { -bool ParseConfigParam(std::string exp_name, int* field) { - std::string group = field_trial::FindFullName(exp_name); - if (group.empty()) - return false; +using TimeScopedNetworkConfig = DegradedCall::TimeScopedNetworkConfig; - return (sscanf(group.c_str(), "%d", field) == 1); -} - -absl::optional ParseDegradationConfig( +std::vector GetNetworkConfigs( + const FieldTrialsView& trials, bool send) { - std::string exp_prefix = "WebRTCFakeNetwork"; - if (send) { - exp_prefix += "Send"; - } else { - exp_prefix += "Receive"; - } - - webrtc::BuiltInNetworkBehaviorConfig config; - bool configured = false; - configured |= - ParseConfigParam(exp_prefix + "DelayMs", &config.queue_delay_ms); - configured |= ParseConfigParam(exp_prefix + "DelayStdDevMs", - &config.delay_standard_deviation_ms); - int queue_length = 0; - if (ParseConfigParam(exp_prefix + "QueueLength", &queue_length)) { - RTC_CHECK_GE(queue_length, 0); - config.queue_length_packets = queue_length; - configured = true; - } - configured |= - ParseConfigParam(exp_prefix + "CapacityKbps", &config.link_capacity_kbps); - configured |= - ParseConfigParam(exp_prefix + "LossPercent", &config.loss_percent); - int allow_reordering = 0; - if (ParseConfigParam(exp_prefix + "AllowReordering", &allow_reordering)) { - config.allow_reordering = true; - configured = true; - } - configured |= ParseConfigParam(exp_prefix + "AvgBurstLossLength", - &config.avg_burst_loss_length); - return configured - ? absl::optional(config) - : absl::nullopt; + FieldTrialStructList trials_list( + {FieldTrialStructMember("queue_length_packets", + [](TimeScopedNetworkConfig* p) { + // FieldTrialParser does not natively support + // size_t type, so use this ugly cast as + // workaround. + return reinterpret_cast( + &p->queue_length_packets); + }), + FieldTrialStructMember( + "queue_delay_ms", + [](TimeScopedNetworkConfig* p) { return &p->queue_delay_ms; }), + FieldTrialStructMember("delay_standard_deviation_ms", + [](TimeScopedNetworkConfig* p) { + return &p->delay_standard_deviation_ms; + }), + FieldTrialStructMember( + "link_capacity_kbps", + [](TimeScopedNetworkConfig* p) { return &p->link_capacity_kbps; }), + FieldTrialStructMember( + "loss_percent", + [](TimeScopedNetworkConfig* p) { return &p->loss_percent; }), + FieldTrialStructMember( + "allow_reordering", + [](TimeScopedNetworkConfig* p) { return &p->allow_reordering; }), + FieldTrialStructMember("avg_burst_loss_length", + [](TimeScopedNetworkConfig* p) { + return &p->avg_burst_loss_length; + }), + FieldTrialStructMember( + "packet_overhead", + [](TimeScopedNetworkConfig* p) { return &p->packet_overhead; }), + FieldTrialStructMember( + "duration", + [](TimeScopedNetworkConfig* p) { return &p->duration; })}, + {}); + ParseFieldTrial({&trials_list}, + trials.Lookup(send ? "WebRTC-FakeNetworkSendConfig" + : "WebRTC-FakeNetworkReceiveConfig")); + return trials_list.Get(); } + } // namespace CallFactory::CallFactory() { @@ -78,38 +85,28 @@ CallFactory::CallFactory() { Call* CallFactory::CreateCall(const Call::Config& config) { RTC_DCHECK_RUN_ON(&call_thread_); - absl::optional send_degradation_config = - ParseDegradationConfig(true); - absl::optional - receive_degradation_config = ParseDegradationConfig(false); + RTC_DCHECK(config.trials); + + std::vector send_degradation_configs = + GetNetworkConfigs(*config.trials, /*send=*/true); + std::vector + receive_degradation_configs = + GetNetworkConfigs(*config.trials, /*send=*/false); RtpTransportConfig transportConfig = config.ExtractTransportConfig(); - if (send_degradation_config || receive_degradation_config) { - return new DegradedCall( - std::unique_ptr(Call::Create( - config, Clock::GetRealTimeClock(), - SharedModuleThread::Create( - ProcessThread::Create("ModuleProcessThread"), nullptr), - config.rtp_transport_controller_send_factory->Create( - transportConfig, Clock::GetRealTimeClock(), - ProcessThread::Create("PacerThread")))), - send_degradation_config, receive_degradation_config, - config.task_queue_factory); - } + Call* call = + Call::Create(config, Clock::GetRealTimeClock(), + config.rtp_transport_controller_send_factory->Create( + transportConfig, Clock::GetRealTimeClock())); - if (!module_thread_) { - module_thread_ = SharedModuleThread::Create( - ProcessThread::Create("SharedModThread"), [this]() { - RTC_DCHECK_RUN_ON(&call_thread_); - module_thread_ = nullptr; - }); + if (!send_degradation_configs.empty() || + !receive_degradation_configs.empty()) { + return new DegradedCall(absl::WrapUnique(call), send_degradation_configs, + receive_degradation_configs); } - return Call::Create(config, Clock::GetRealTimeClock(), module_thread_, - config.rtp_transport_controller_send_factory->Create( - transportConfig, Clock::GetRealTimeClock(), - ProcessThread::Create("PacerThread"))); + return call; } std::unique_ptr CreateCallFactory() { diff --git a/TMessagesProj/jni/voip/webrtc/call/call_factory.h b/TMessagesProj/jni/voip/webrtc/call/call_factory.h index 469bec39e1..9feed7bbb6 100644 --- a/TMessagesProj/jni/voip/webrtc/call/call_factory.h +++ b/TMessagesProj/jni/voip/webrtc/call/call_factory.h @@ -29,8 +29,6 @@ class CallFactory : public CallFactoryInterface { Call* CreateCall(const CallConfig& config) override; RTC_NO_UNIQUE_ADDRESS SequenceChecker call_thread_; - rtc::scoped_refptr module_thread_ - RTC_GUARDED_BY(call_thread_); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/call/call_perf_tests.cc b/TMessagesProj/jni/voip/webrtc/call/call_perf_tests.cc index f4a20b82fe..9379dce833 100644 --- a/TMessagesProj/jni/voip/webrtc/call/call_perf_tests.cc +++ b/TMessagesProj/jni/voip/webrtc/call/call_perf_tests.cc @@ -13,14 +13,18 @@ #include #include +#include "absl/strings/string_view.h" #include "api/audio_codecs/builtin_audio_encoder_factory.h" +#include "api/numerics/samples_stats_counter.h" #include "api/rtc_event_log/rtc_event_log.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_base.h" +#include "api/test/metrics/global_metrics_logger_and_exporter.h" +#include "api/test/metrics/metric.h" #include "api/test/simulated_network.h" #include "api/video/builtin_video_bitrate_allocator_factory.h" #include "api/video/video_bitrate_allocation.h" #include "api/video_codecs/video_encoder.h" -#include "api/video_codecs/video_encoder_config.h" #include "call/call.h" #include "call/fake_network_pipe.h" #include "call/simulated_network.h" @@ -33,7 +37,6 @@ #include "rtc_base/checks.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue_for_test.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" #include "system_wrappers/include/metrics.h" @@ -48,17 +51,23 @@ #include "test/null_transport.h" #include "test/rtp_rtcp_observer.h" #include "test/testsupport/file_utils.h" -#include "test/testsupport/perf_test.h" #include "test/video_encoder_proxy_factory.h" +#include "video/config/video_encoder_config.h" #include "video/transport_adapter.h" using webrtc::test::DriftingClock; namespace webrtc { namespace { + +using ::webrtc::test::GetGlobalMetricsLogger; +using ::webrtc::test::ImprovementDirection; +using ::webrtc::test::Unit; + enum : int { // The first valid value is 1. kTransportSequenceNumberExtensionId = 1, }; + } // namespace class CallPerfTest : public test::CallTest { @@ -76,7 +85,7 @@ class CallPerfTest : public test::CallTest { float video_ntp_speed, float video_rtp_speed, float audio_rtp_speed, - const std::string& test_label); + absl::string_view test_label); void TestMinTransmitBitrate(bool pad_to_min_bitrate); @@ -91,7 +100,7 @@ class CallPerfTest : public test::CallTest { int start_bwe, int max_bwe); void TestEncodeFramerate(VideoEncoderFactory* encoder_factory, - const std::string& payload_name, + absl::string_view payload_name, const std::vector& max_framerates); }; @@ -104,22 +113,22 @@ class VideoRtcpAndSyncObserver : public test::RtpRtcpObserver, public: explicit VideoRtcpAndSyncObserver(TaskQueueBase* task_queue, Clock* clock, - const std::string& test_label) - : test::RtpRtcpObserver(CallPerfTest::kLongTimeoutMs), + absl::string_view test_label) + : test::RtpRtcpObserver(CallPerfTest::kLongTimeout), clock_(clock), test_label_(test_label), creation_time_ms_(clock_->TimeInMilliseconds()), task_queue_(task_queue) {} void OnFrame(const VideoFrame& video_frame) override { - task_queue_->PostTask(ToQueuedTask([this]() { CheckStats(); })); + task_queue_->PostTask([this]() { CheckStats(); }); } void CheckStats() { if (!receive_stream_) return; - VideoReceiveStream::Stats stats = receive_stream_->GetStats(); + VideoReceiveStreamInterface::Stats stats = receive_stream_->GetStats(); if (stats.sync_offset_ms == std::numeric_limits::max()) return; @@ -132,26 +141,28 @@ class VideoRtcpAndSyncObserver : public test::RtpRtcpObserver, if (std::abs(stats.sync_offset_ms) < kInSyncThresholdMs) { if (first_time_in_sync_ == -1) { first_time_in_sync_ = now_ms; - webrtc::test::PrintResult("sync_convergence_time", test_label_, - "synchronization", time_since_creation, "ms", - false); + GetGlobalMetricsLogger()->LogSingleValueMetric( + "sync_convergence_time" + test_label_, "synchronization", + time_since_creation, Unit::kMilliseconds, + ImprovementDirection::kSmallerIsBetter); } if (time_since_creation > kMinRunTimeMs) observation_complete_.Set(); } if (first_time_in_sync_ != -1) - sync_offset_ms_list_.push_back(stats.sync_offset_ms); + sync_offset_ms_list_.AddSample(stats.sync_offset_ms); } - void set_receive_stream(VideoReceiveStream* receive_stream) { + void set_receive_stream(VideoReceiveStreamInterface* receive_stream) { RTC_DCHECK_EQ(task_queue_, TaskQueueBase::Current()); // Note that receive_stream may be nullptr. receive_stream_ = receive_stream; } void PrintResults() { - test::PrintResultList("stream_offset", test_label_, "synchronization", - sync_offset_ms_list_, "ms", false); + GetGlobalMetricsLogger()->LogMetric( + "stream_offset" + test_label_, "synchronization", sync_offset_ms_list_, + Unit::kMilliseconds, ImprovementDirection::kNeitherIsBetter); } private: @@ -159,8 +170,8 @@ class VideoRtcpAndSyncObserver : public test::RtpRtcpObserver, const std::string test_label_; const int64_t creation_time_ms_; int64_t first_time_in_sync_ = -1; - VideoReceiveStream* receive_stream_ = nullptr; - std::vector sync_offset_ms_list_; + VideoReceiveStreamInterface* receive_stream_ = nullptr; + SamplesStatsCounter sync_offset_ms_list_; TaskQueueBase* const task_queue_; }; @@ -169,7 +180,7 @@ void CallPerfTest::TestAudioVideoSync(FecMode fec, float video_ntp_speed, float video_rtp_speed, float audio_rtp_speed, - const std::string& test_label) { + absl::string_view test_label) { const char* kSyncGroup = "av_sync"; const uint32_t kAudioSendSsrc = 1234; const uint32_t kAudioRecvSsrc = 5678; @@ -189,10 +200,10 @@ void CallPerfTest::TestAudioVideoSync(FecMode fec, std::unique_ptr receive_transport; AudioSendStream* audio_send_stream; - AudioReceiveStream* audio_receive_stream; + AudioReceiveStreamInterface* audio_receive_stream; std::unique_ptr drifting_clock; - SendTask(RTC_FROM_HERE, task_queue(), [&]() { + SendTask(task_queue(), [&]() { metrics::Reset(); rtc::scoped_refptr fake_audio_device = TestAudioDeviceModule::Create( @@ -272,7 +283,7 @@ void CallPerfTest::TestAudioVideoSync(FecMode fec, video_receive_configs_[0].renderer = observer.get(); video_receive_configs_[0].sync_group = kSyncGroup; - AudioReceiveStream::Config audio_recv_config; + AudioReceiveStreamInterface::Config audio_recv_config; audio_recv_config.rtp.remote_ssrc = kAudioSendSsrc; audio_recv_config.rtp.local_ssrc = kAudioRecvSsrc; audio_recv_config.rtcp_send_transport = receive_transport.get(); @@ -306,7 +317,7 @@ void CallPerfTest::TestAudioVideoSync(FecMode fec, EXPECT_TRUE(observer->Wait()) << "Timed out while waiting for audio and video to be synchronized."; - SendTask(RTC_FROM_HERE, task_queue(), [&]() { + SendTask(task_queue(), [&]() { // Clear the pointer to the receive stream since it will now be deleted. observer->set_receive_stream(nullptr); @@ -342,7 +353,7 @@ void CallPerfTest::TestAudioVideoSync(FecMode fec, } task_queue()->PostTask( - ToQueuedTask([to_delete = observer.release()]() { delete to_delete; })); + [to_delete = observer.release()]() { delete to_delete; }); } TEST_F(CallPerfTest, Synchronization_PlaysOutAudioAndVideoWithoutClockDrift) { @@ -386,7 +397,7 @@ void CallPerfTest::TestCaptureNtpTime( int threshold_ms, int start_time_ms, int run_time_ms) - : EndToEndTest(kLongTimeoutMs), + : EndToEndTest(kLongTimeout), net_config_(net_config), clock_(Clock::GetRealTimeClock()), threshold_ms_(threshold_ms), @@ -450,7 +461,7 @@ void CallPerfTest::TestCaptureNtpTime( uint32_t real_capture_timestamp = iter->second; int time_offset_ms = real_capture_timestamp - estimated_capture_timestamp; time_offset_ms = time_offset_ms / 90; - time_offset_ms_list_.push_back(time_offset_ms); + time_offset_ms_list_.AddSample(time_offset_ms); EXPECT_TRUE(std::abs(time_offset_ms) < threshold_ms_); } @@ -484,7 +495,7 @@ void CallPerfTest::TestCaptureNtpTime( void ModifyVideoConfigs( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { (*receive_configs)[0].renderer = this; // Enable the receiver side rtt calculation. @@ -494,8 +505,9 @@ void CallPerfTest::TestCaptureNtpTime( void PerformTest() override { EXPECT_TRUE(Wait()) << "Timed out while waiting for estimated capture " "NTP time to be within bounds."; - test::PrintResultList("capture_ntp_time", "", "real - estimated", - time_offset_ms_list_, "ms", true); + GetGlobalMetricsLogger()->LogMetric( + "capture_ntp_time", "real - estimated", time_offset_ms_list_, + Unit::kMilliseconds, ImprovementDirection::kNeitherIsBetter); } Mutex mutex_; @@ -510,7 +522,7 @@ void CallPerfTest::TestCaptureNtpTime( uint32_t rtp_start_timestamp_; typedef std::map FrameCaptureTimeList; FrameCaptureTimeList capture_time_list_ RTC_GUARDED_BY(&mutex_); - std::vector time_offset_ms_list_; + SamplesStatsCounter time_offset_ms_list_; } test(net_config, threshold_ms, start_time_ms, run_time_ms); RunBaseTest(&test); @@ -551,7 +563,7 @@ TEST_F(CallPerfTest, ReceivesCpuOveruseAndUnderuse) { class LoadObserver : public test::SendTest, public test::FrameGeneratorCapturer::SinkWantsObserver { public: - LoadObserver() : SendTest(kLongTimeoutMs), test_phase_(TestPhase::kInit) {} + LoadObserver() : SendTest(kLongTimeout), test_phase_(TestPhase::kInit) {} void OnFrameGeneratorCapturerCreated( test::FrameGeneratorCapturer* frame_generator_capturer) override { @@ -628,7 +640,7 @@ TEST_F(CallPerfTest, ReceivesCpuOveruseAndUnderuse) { void ModifyVideoConfigs( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) override {} void PerformTest() override { @@ -660,7 +672,7 @@ void CallPerfTest::TestMinTransmitBitrate(bool pad_to_min_bitrate) { public: explicit BitrateObserver(bool using_min_transmit_bitrate, TaskQueueBase* task_queue) - : EndToEndTest(kLongTimeoutMs), + : EndToEndTest(kLongTimeout), send_stream_(nullptr), converged_(false), pad_to_min_bitrate_(using_min_transmit_bitrate), @@ -679,7 +691,7 @@ void CallPerfTest::TestMinTransmitBitrate(bool pad_to_min_bitrate) { private: // TODO(holmer): Run this with a timer instead of once per packet. Action OnSendRtp(const uint8_t* packet, size_t length) override { - task_queue_->PostTask(ToQueuedTask(task_safety_flag_, [this]() { + task_queue_->PostTask(SafeTask(task_safety_flag_, [this]() { VideoSendStream::Stats stats = send_stream_->GetStats(); if (!stats.substreams.empty()) { @@ -695,15 +707,15 @@ void CallPerfTest::TestMinTransmitBitrate(bool pad_to_min_bitrate) { observation_complete_.Set(); } if (converged_) - bitrate_kbps_list_.push_back(bitrate_kbps); + bitrate_kbps_list_.AddSample(bitrate_kbps); } })); return SEND_PACKET; } - void OnVideoStreamsCreated( - VideoSendStream* send_stream, - const std::vector& receive_streams) override { + void OnVideoStreamsCreated(VideoSendStream* send_stream, + const std::vector& + receive_streams) override { send_stream_ = send_stream; } @@ -711,7 +723,7 @@ void CallPerfTest::TestMinTransmitBitrate(bool pad_to_min_bitrate) { void ModifyVideoConfigs( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { if (pad_to_min_bitrate_) { encoder_config->min_transmit_bitrate_bps = kMinTransmitBitrateBps; @@ -722,11 +734,12 @@ void CallPerfTest::TestMinTransmitBitrate(bool pad_to_min_bitrate) { void PerformTest() override { EXPECT_TRUE(Wait()) << "Timeout while waiting for send-bitrate stats."; - test::PrintResultList( - "bitrate_stats_", - (pad_to_min_bitrate_ ? "min_transmit_bitrate" - : "without_min_transmit_bitrate"), - "bitrate_kbps", bitrate_kbps_list_, "kbps", false); + GetGlobalMetricsLogger()->LogMetric( + std::string("bitrate_stats_") + + (pad_to_min_bitrate_ ? "min_transmit_bitrate" + : "without_min_transmit_bitrate"), + "bitrate_kbps", bitrate_kbps_list_, Unit::kUnitless, + ImprovementDirection::kNeitherIsBetter); } VideoSendStream* send_stream_; @@ -735,7 +748,7 @@ void CallPerfTest::TestMinTransmitBitrate(bool pad_to_min_bitrate) { const int min_acceptable_bitrate_; const int max_acceptable_bitrate_; int num_bitrate_observations_in_range_; - std::vector bitrate_kbps_list_; + SamplesStatsCounter bitrate_kbps_list_; TaskQueueBase* task_queue_; rtc::scoped_refptr task_safety_flag_; } test(pad_to_min_bitrate, task_queue()); @@ -766,8 +779,8 @@ TEST_F(CallPerfTest, MAYBE_KeepsHighBitrateWhenReconfiguringSender) { // We get lower bitrate than expected by this test if the following field // trial is enabled. - test::ScopedFieldTrials field_trials( - "WebRTC-SendSideBwe-WithOverhead/Disabled/"); + test::ScopedKeyValueConfig field_trials( + field_trials_, "WebRTC-SendSideBwe-WithOverhead/Disabled/"); class VideoStreamFactory : public VideoEncoderConfig::VideoStreamFactoryInterface { @@ -776,11 +789,11 @@ TEST_F(CallPerfTest, MAYBE_KeepsHighBitrateWhenReconfiguringSender) { private: std::vector CreateEncoderStreams( - int width, - int height, - const VideoEncoderConfig& encoder_config) override { + int frame_width, + int frame_height, + const webrtc::VideoEncoderConfig& encoder_config) override { std::vector streams = - test::CreateVideoStreams(width, height, encoder_config); + test::CreateVideoStreams(frame_width, frame_height, encoder_config); streams[0].min_bitrate_bps = 50000; streams[0].target_bitrate_bps = streams[0].max_bitrate_bps = 2000000; return streams; @@ -790,7 +803,7 @@ TEST_F(CallPerfTest, MAYBE_KeepsHighBitrateWhenReconfiguringSender) { class BitrateObserver : public test::EndToEndTest, public test::FakeEncoder { public: explicit BitrateObserver(TaskQueueBase* task_queue) - : EndToEndTest(kDefaultTimeoutMs), + : EndToEndTest(kDefaultTimeout), FakeEncoder(Clock::GetRealTimeClock()), encoder_inits_(0), last_set_bitrate_kbps_(0), @@ -842,7 +855,7 @@ TEST_F(CallPerfTest, MAYBE_KeepsHighBitrateWhenReconfiguringSender) { void ModifyVideoConfigs( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { send_config->encoder_settings.encoder_factory = &encoder_factory_; send_config->encoder_settings.bitrate_allocator_factory = @@ -854,9 +867,9 @@ TEST_F(CallPerfTest, MAYBE_KeepsHighBitrateWhenReconfiguringSender) { encoder_config_ = encoder_config->Copy(); } - void OnVideoStreamsCreated( - VideoSendStream* send_stream, - const std::vector& receive_streams) override { + void OnVideoStreamsCreated(VideoSendStream* send_stream, + const std::vector& + receive_streams) override { send_stream_ = send_stream; } @@ -866,10 +879,10 @@ TEST_F(CallPerfTest, MAYBE_KeepsHighBitrateWhenReconfiguringSender) { } void PerformTest() override { - ASSERT_TRUE(time_to_reconfigure_.Wait(kDefaultTimeoutMs)) + ASSERT_TRUE(time_to_reconfigure_.Wait(kDefaultTimeout)) << "Timed out before receiving an initial high bitrate."; frame_generator_->ChangeResolution(kDefaultWidth * 2, kDefaultHeight * 2); - SendTask(RTC_FROM_HERE, task_queue_, [&]() { + SendTask(task_queue_, [&]() { send_stream_->ReconfigureVideoEncoder(encoder_config_.Copy()); }); EXPECT_TRUE(Wait()) @@ -986,7 +999,7 @@ void CallPerfTest::TestMinAudioVideoBitrate(int test_bitrate_from, int64_t avg_rtt = 0; for (int i = 0; i < kBitrateMeasurements; i++) { Call::Stats call_stats; - SendTask(RTC_FROM_HERE, task_queue_, [this, &call_stats]() { + SendTask(task_queue_, [this, &call_stats]() { call_stats = sender_call_->GetStats(); }); avg_rtt += call_stats.rtt_ms; @@ -1002,8 +1015,9 @@ void CallPerfTest::TestMinAudioVideoBitrate(int test_bitrate_from, } EXPECT_GT(last_passed_test_bitrate, -1) << "Minimum supported bitrate out of the test scope"; - webrtc::test::PrintResult("min_test_bitrate_", "", "min_bitrate", - last_passed_test_bitrate, "kbps", false); + GetGlobalMetricsLogger()->LogSingleValueMetric( + "min_test_bitrate_", "min_bitrate", last_passed_test_bitrate, + Unit::kUnitless, ImprovementDirection::kNeitherIsBetter); } void OnCallsCreated(Call* sender_call, Call* receiver_call) override { @@ -1020,9 +1034,9 @@ void CallPerfTest::TestMinAudioVideoBitrate(int test_bitrate_from, size_t GetNumAudioStreams() const override { return 1; } - void ModifyAudioConfigs( - AudioSendStream::Config* send_config, - std::vector* receive_configs) override { + void ModifyAudioConfigs(AudioSendStream::Config* send_config, + std::vector* + receive_configs) override { send_config->send_codec_spec->target_bitrate_bps = absl::optional(kOpusBitrateFbBps); } @@ -1055,7 +1069,7 @@ TEST_F(CallPerfTest, MAYBE_Min_Bitrate_VideoAndAudio) { } void CallPerfTest::TestEncodeFramerate(VideoEncoderFactory* encoder_factory, - const std::string& payload_name, + absl::string_view payload_name, const std::vector& max_framerates) { static constexpr double kAllowedFpsDiff = 1.5; static constexpr TimeDelta kMinGetStatsInterval = TimeDelta::Millis(400); @@ -1067,10 +1081,10 @@ void CallPerfTest::TestEncodeFramerate(VideoEncoderFactory* encoder_factory, public test::FrameGeneratorCapturer::SinkWantsObserver { public: FramerateObserver(VideoEncoderFactory* encoder_factory, - const std::string& payload_name, + absl::string_view payload_name, const std::vector& max_framerates, TaskQueueBase* task_queue) - : EndToEndTest(kDefaultTimeoutMs), + : EndToEndTest(kDefaultTimeout), clock_(Clock::GetRealTimeClock()), encoder_factory_(encoder_factory), payload_name_(payload_name), @@ -1093,9 +1107,9 @@ void CallPerfTest::TestEncodeFramerate(VideoEncoderFactory* encoder_factory, bitrate_config->start_bitrate_bps = kMaxBitrate.bps() / 2; } - void OnVideoStreamsCreated( - VideoSendStream* send_stream, - const std::vector& receive_streams) override { + void OnVideoStreamsCreated(VideoSendStream* send_stream, + const std::vector& + receive_streams) override { send_stream_ = send_stream; } @@ -1105,7 +1119,7 @@ void CallPerfTest::TestEncodeFramerate(VideoEncoderFactory* encoder_factory, void ModifyVideoConfigs( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { send_config->encoder_settings.encoder_factory = encoder_factory_; send_config->rtp.payload_name = payload_name_; @@ -1129,11 +1143,14 @@ void CallPerfTest::TestEncodeFramerate(VideoEncoderFactory* encoder_factory, input_fps = std::max(configured_framerate.second, input_fps); } for (const auto& encode_frame_rate_list : encode_frame_rate_lists_) { - const std::vector& values = encode_frame_rate_list.second; - test::PrintResultList("substream", "", "encode_frame_rate", values, - "fps", false); - double average_fps = - std::accumulate(values.begin(), values.end(), 0.0) / values.size(); + const SamplesStatsCounter& values = encode_frame_rate_list.second; + GetGlobalMetricsLogger()->LogMetric( + "substream_fps", "encode_frame_rate", values, Unit::kUnitless, + ImprovementDirection::kNeitherIsBetter); + if (values.IsEmpty()) { + continue; + } + double average_fps = values.GetAverage(); uint32_t ssrc = encode_frame_rate_list.first; double expected_fps = configured_framerates_.find(ssrc)->second; if (expected_fps != input_fps) @@ -1145,17 +1162,17 @@ void CallPerfTest::TestEncodeFramerate(VideoEncoderFactory* encoder_factory, const Timestamp now = clock_->CurrentTime(); if (now - last_getstats_time_ > kMinGetStatsInterval) { last_getstats_time_ = now; - task_queue_->PostTask(ToQueuedTask([this, now]() { + task_queue_->PostTask([this, now]() { VideoSendStream::Stats stats = send_stream_->GetStats(); for (const auto& stat : stats.substreams) { - encode_frame_rate_lists_[stat.first].push_back( + encode_frame_rate_lists_[stat.first].AddSample( stat.second.encode_frame_rate); } if (now - start_time_ > kMinRunTime) { VerifyStats(); observation_complete_.Set(); } - })); + }); } return SEND_PACKET; } @@ -1168,7 +1185,7 @@ void CallPerfTest::TestEncodeFramerate(VideoEncoderFactory* encoder_factory, const Timestamp start_time_; Timestamp last_getstats_time_; VideoSendStream* send_stream_; - std::map> encode_frame_rate_lists_; + std::map encode_frame_rate_lists_; std::map configured_framerates_; } test(encoder_factory, payload_name, max_framerates, task_queue()); diff --git a/TMessagesProj/jni/voip/webrtc/call/degraded_call.cc b/TMessagesProj/jni/voip/webrtc/call/degraded_call.cc index 5462085490..c59a63ba69 100644 --- a/TMessagesProj/jni/voip/webrtc/call/degraded_call.cc +++ b/TMessagesProj/jni/voip/webrtc/call/degraded_call.cc @@ -13,18 +13,20 @@ #include #include -#include "rtc_base/location.h" +#include "absl/strings/string_view.h" +#include "modules/rtp_rtcp/source/rtp_util.h" +#include "rtc_base/event.h" namespace webrtc { DegradedCall::FakeNetworkPipeOnTaskQueue::FakeNetworkPipeOnTaskQueue( - TaskQueueFactory* task_queue_factory, + TaskQueueBase* task_queue, + rtc::scoped_refptr call_alive, Clock* clock, std::unique_ptr network_behavior) : clock_(clock), - task_queue_(task_queue_factory->CreateTaskQueue( - "DegradedSendQueue", - TaskQueueFactory::Priority::NORMAL)), + task_queue_(task_queue), + call_alive_(std::move(call_alive)), pipe_(clock, std::move(network_behavior)) {} void DegradedCall::FakeNetworkPipeOnTaskQueue::SendRtp( @@ -61,21 +63,22 @@ bool DegradedCall::FakeNetworkPipeOnTaskQueue::Process() { return false; } - task_queue_.PostTask([this, time_to_next]() { - RTC_DCHECK_RUN_ON(&task_queue_); + task_queue_->PostTask(SafeTask(call_alive_, [this, time_to_next] { + RTC_DCHECK_RUN_ON(task_queue_); int64_t next_process_time = *time_to_next + clock_->TimeInMilliseconds(); if (!next_process_ms_ || next_process_time < *next_process_ms_) { next_process_ms_ = next_process_time; - task_queue_.PostDelayedTask( - [this]() { - RTC_DCHECK_RUN_ON(&task_queue_); - if (!Process()) { - next_process_ms_.reset(); - } - }, - *time_to_next); + task_queue_->PostDelayedHighPrecisionTask( + SafeTask(call_alive_, + [this] { + RTC_DCHECK_RUN_ON(task_queue_); + if (!Process()) { + next_process_ms_.reset(); + } + }), + TimeDelta::Millis(*time_to_next)); } - }); + })); return true; } @@ -125,37 +128,112 @@ bool DegradedCall::FakeNetworkPipeTransportAdapter::SendRtcp( return true; } +DegradedCall::ThreadedPacketReceiver::ThreadedPacketReceiver( + webrtc::TaskQueueBase* worker_thread, + webrtc::TaskQueueBase* network_thread, + rtc::scoped_refptr call_alive, + webrtc::PacketReceiver* receiver) + : worker_thread_(worker_thread), + network_thread_(network_thread), + call_alive_(std::move(call_alive)), + receiver_(receiver) {} + +DegradedCall::ThreadedPacketReceiver::~ThreadedPacketReceiver() = default; + +PacketReceiver::DeliveryStatus +DegradedCall::ThreadedPacketReceiver::DeliverPacket( + MediaType media_type, + rtc::CopyOnWriteBuffer packet, + int64_t packet_time_us) { + // `Call::DeliverPacket` expects RTCP packets to be delivered from the + // network thread and RTP packets to be delivered from the worker thread. + // Because `FakeNetworkPipe` queues packets, the thread used when this packet + // is delivered to `DegradedCall::DeliverPacket` may differ from the thread + // used when this packet is delivered to + // `ThreadedPacketReceiver::DeliverPacket`. To solve this problem, always + // make sure that packets are sent in the correct thread. + if (IsRtcpPacket(packet)) { + if (!network_thread_->IsCurrent()) { + network_thread_->PostTask( + SafeTask(call_alive_, [receiver = receiver_, media_type, + packet = std::move(packet), packet_time_us]() { + receiver->DeliverPacket(media_type, std::move(packet), + packet_time_us); + })); + return DELIVERY_OK; + } + } else { + if (!worker_thread_->IsCurrent()) { + worker_thread_->PostTask([receiver = receiver_, media_type, + packet = std::move(packet), packet_time_us]() { + receiver->DeliverPacket(media_type, std::move(packet), packet_time_us); + }); + return DELIVERY_OK; + } + } + + return receiver_->DeliverPacket(media_type, std::move(packet), + packet_time_us); +} + DegradedCall::DegradedCall( std::unique_ptr call, - absl::optional send_config, - absl::optional receive_config, - TaskQueueFactory* task_queue_factory) + const std::vector& send_configs, + const std::vector& receive_configs) : clock_(Clock::GetRealTimeClock()), call_(std::move(call)), - task_queue_factory_(task_queue_factory), - send_config_(send_config), + call_alive_(PendingTaskSafetyFlag::CreateDetached()), + send_config_index_(0), + send_configs_(send_configs), send_simulated_network_(nullptr), - receive_config_(receive_config) { - if (receive_config_) { - auto network = std::make_unique(*receive_config_); + receive_config_index_(0), + receive_configs_(receive_configs) { + if (!receive_configs_.empty()) { + auto network = std::make_unique(receive_configs_[0]); receive_simulated_network_ = network.get(); receive_pipe_ = std::make_unique(clock_, std::move(network)); - receive_pipe_->SetReceiver(call_->Receiver()); + packet_receiver_ = std::make_unique( + call_->worker_thread(), call_->network_thread(), call_alive_, + call_->Receiver()); + receive_pipe_->SetReceiver(packet_receiver_.get()); + if (receive_configs_.size() > 1) { + call_->network_thread()->PostDelayedTask( + SafeTask(call_alive_, [this] { UpdateReceiveNetworkConfig(); }), + receive_configs_[0].duration); + } } - if (send_config_) { - auto network = std::make_unique(*send_config_); + if (!send_configs_.empty()) { + auto network = std::make_unique(send_configs_[0]); send_simulated_network_ = network.get(); send_pipe_ = std::make_unique( - task_queue_factory_, clock_, std::move(network)); + call_->network_thread(), call_alive_, clock_, std::move(network)); + if (send_configs_.size() > 1) { + call_->network_thread()->PostDelayedTask( + SafeTask(call_alive_, [this] { UpdateSendNetworkConfig(); }), + send_configs_[0].duration); + } } } -DegradedCall::~DegradedCall() = default; +DegradedCall::~DegradedCall() { + RTC_DCHECK_RUN_ON(call_->worker_thread()); + // Thread synchronization is required to call `SetNotAlive`. + // Otherwise, when the `DegradedCall` object is destroyed but + // `SetNotAlive` has not yet been called, + // another Closure guarded by `call_alive_` may be called. + rtc::Event event; + call_->network_thread()->PostTask( + [flag = std::move(call_alive_), &event]() mutable { + flag->SetNotAlive(); + event.Set(); + }); + event.Wait(rtc::Event::kForever); +} AudioSendStream* DegradedCall::CreateAudioSendStream( const AudioSendStream::Config& config) { - if (send_config_) { + if (!send_configs_.empty()) { auto transport_adapter = std::make_unique( send_pipe_.get(), call_.get(), clock_, config.send_transport); AudioSendStream::Config degrade_config = config; @@ -175,13 +253,13 @@ void DegradedCall::DestroyAudioSendStream(AudioSendStream* send_stream) { audio_send_transport_adapters_.erase(send_stream); } -AudioReceiveStream* DegradedCall::CreateAudioReceiveStream( - const AudioReceiveStream::Config& config) { +AudioReceiveStreamInterface* DegradedCall::CreateAudioReceiveStream( + const AudioReceiveStreamInterface::Config& config) { return call_->CreateAudioReceiveStream(config); } void DegradedCall::DestroyAudioReceiveStream( - AudioReceiveStream* receive_stream) { + AudioReceiveStreamInterface* receive_stream) { call_->DestroyAudioReceiveStream(receive_stream); } @@ -189,7 +267,7 @@ VideoSendStream* DegradedCall::CreateVideoSendStream( VideoSendStream::Config config, VideoEncoderConfig encoder_config) { std::unique_ptr transport_adapter; - if (send_config_) { + if (!send_configs_.empty()) { transport_adapter = std::make_unique( send_pipe_.get(), call_.get(), clock_, config.send_transport); config.send_transport = transport_adapter.get(); @@ -207,7 +285,7 @@ VideoSendStream* DegradedCall::CreateVideoSendStream( VideoEncoderConfig encoder_config, std::unique_ptr fec_controller) { std::unique_ptr transport_adapter; - if (send_config_) { + if (!send_configs_.empty()) { transport_adapter = std::make_unique( send_pipe_.get(), call_.get(), clock_, config.send_transport); config.send_transport = transport_adapter.get(); @@ -225,19 +303,19 @@ void DegradedCall::DestroyVideoSendStream(VideoSendStream* send_stream) { video_send_transport_adapters_.erase(send_stream); } -VideoReceiveStream* DegradedCall::CreateVideoReceiveStream( - VideoReceiveStream::Config configuration) { +VideoReceiveStreamInterface* DegradedCall::CreateVideoReceiveStream( + VideoReceiveStreamInterface::Config configuration) { return call_->CreateVideoReceiveStream(std::move(configuration)); } void DegradedCall::DestroyVideoReceiveStream( - VideoReceiveStream* receive_stream) { + VideoReceiveStreamInterface* receive_stream) { call_->DestroyVideoReceiveStream(receive_stream); } FlexfecReceiveStream* DegradedCall::CreateFlexfecReceiveStream( - const FlexfecReceiveStream::Config& config) { - return call_->CreateFlexfecReceiveStream(config); + const FlexfecReceiveStream::Config config) { + return call_->CreateFlexfecReceiveStream(std::move(config)); } void DegradedCall::DestroyFlexfecReceiveStream( @@ -251,7 +329,7 @@ void DegradedCall::AddAdaptationResource( } PacketReceiver* DegradedCall::Receiver() { - if (receive_config_) { + if (!receive_configs_.empty()) { return this; } return call_->Receiver(); @@ -266,7 +344,7 @@ Call::Stats DegradedCall::GetStats() const { return call_->GetStats(); } -const WebRtcKeyValueConfig& DegradedCall::trials() const { +const FieldTrialsView& DegradedCall::trials() const { return call_->trials(); } @@ -288,18 +366,28 @@ void DegradedCall::OnAudioTransportOverheadChanged( call_->OnAudioTransportOverheadChanged(transport_overhead_per_packet); } -void DegradedCall::OnLocalSsrcUpdated(AudioReceiveStream& stream, +void DegradedCall::OnLocalSsrcUpdated(AudioReceiveStreamInterface& stream, uint32_t local_ssrc) { call_->OnLocalSsrcUpdated(stream, local_ssrc); } -void DegradedCall::OnUpdateSyncGroup(AudioReceiveStream& stream, - const std::string& sync_group) { +void DegradedCall::OnLocalSsrcUpdated(VideoReceiveStreamInterface& stream, + uint32_t local_ssrc) { + call_->OnLocalSsrcUpdated(stream, local_ssrc); +} + +void DegradedCall::OnLocalSsrcUpdated(FlexfecReceiveStream& stream, + uint32_t local_ssrc) { + call_->OnLocalSsrcUpdated(stream, local_ssrc); +} + +void DegradedCall::OnUpdateSyncGroup(AudioReceiveStreamInterface& stream, + absl::string_view sync_group) { call_->OnUpdateSyncGroup(stream, sync_group); } void DegradedCall::OnSentPacket(const rtc::SentPacket& sent_packet) { - if (send_config_) { + if (!send_configs_.empty()) { // If we have a degraded send-transport, we have already notified call // about the supposed network send time. Discard the actual network send // time in order to properly fool the BWE. @@ -325,4 +413,26 @@ PacketReceiver::DeliveryStatus DegradedCall::DeliverPacket( receive_pipe_->Process(); return status; } + +void DegradedCall::SetClientBitratePreferences( + const webrtc::BitrateSettings& preferences) { + call_->SetClientBitratePreferences(preferences); +} + +void DegradedCall::UpdateSendNetworkConfig() { + send_config_index_ = (send_config_index_ + 1) % send_configs_.size(); + send_simulated_network_->SetConfig(send_configs_[send_config_index_]); + call_->network_thread()->PostDelayedTask( + SafeTask(call_alive_, [this] { UpdateSendNetworkConfig(); }), + send_configs_[send_config_index_].duration); +} + +void DegradedCall::UpdateReceiveNetworkConfig() { + receive_config_index_ = (receive_config_index_ + 1) % receive_configs_.size(); + receive_simulated_network_->SetConfig( + receive_configs_[receive_config_index_]); + call_->network_thread()->PostDelayedTask( + SafeTask(call_alive_, [this] { UpdateReceiveNetworkConfig(); }), + receive_configs_[receive_config_index_].duration); +} } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/call/degraded_call.h b/TMessagesProj/jni/voip/webrtc/call/degraded_call.h index 70dc126807..5906e557f1 100644 --- a/TMessagesProj/jni/voip/webrtc/call/degraded_call.h +++ b/TMessagesProj/jni/voip/webrtc/call/degraded_call.h @@ -17,14 +17,16 @@ #include #include #include +#include +#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/call/transport.h" #include "api/fec_controller.h" #include "api/media_types.h" #include "api/rtp_headers.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "api/test/simulated_network.h" -#include "api/video_codecs/video_encoder_config.h" #include "call/audio_receive_stream.h" #include "call/audio_send_stream.h" #include "call/call.h" @@ -35,20 +37,23 @@ #include "call/simulated_network.h" #include "call/video_receive_stream.h" #include "call/video_send_stream.h" -#include "modules/utility/include/process_thread.h" #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/network/sent_packet.h" #include "rtc_base/task_queue.h" #include "system_wrappers/include/clock.h" +#include "video/config/video_encoder_config.h" namespace webrtc { class DegradedCall : public Call, private PacketReceiver { public: + struct TimeScopedNetworkConfig : public BuiltInNetworkBehaviorConfig { + TimeDelta duration = TimeDelta::PlusInfinity(); + }; + explicit DegradedCall( std::unique_ptr call, - absl::optional send_config, - absl::optional receive_config, - TaskQueueFactory* task_queue_factory); + const std::vector& send_configs, + const std::vector& receive_configs); ~DegradedCall() override; // Implements Call. @@ -56,9 +61,10 @@ class DegradedCall : public Call, private PacketReceiver { const AudioSendStream::Config& config) override; void DestroyAudioSendStream(AudioSendStream* send_stream) override; - AudioReceiveStream* CreateAudioReceiveStream( - const AudioReceiveStream::Config& config) override; - void DestroyAudioReceiveStream(AudioReceiveStream* receive_stream) override; + AudioReceiveStreamInterface* CreateAudioReceiveStream( + const AudioReceiveStreamInterface::Config& config) override; + void DestroyAudioReceiveStream( + AudioReceiveStreamInterface* receive_stream) override; VideoSendStream* CreateVideoSendStream( VideoSendStream::Config config, @@ -69,12 +75,13 @@ class DegradedCall : public Call, private PacketReceiver { std::unique_ptr fec_controller) override; void DestroyVideoSendStream(VideoSendStream* send_stream) override; - VideoReceiveStream* CreateVideoReceiveStream( - VideoReceiveStream::Config configuration) override; - void DestroyVideoReceiveStream(VideoReceiveStream* receive_stream) override; + VideoReceiveStreamInterface* CreateVideoReceiveStream( + VideoReceiveStreamInterface::Config configuration) override; + void DestroyVideoReceiveStream( + VideoReceiveStreamInterface* receive_stream) override; FlexfecReceiveStream* CreateFlexfecReceiveStream( - const FlexfecReceiveStream::Config& config) override; + const FlexfecReceiveStream::Config config) override; void DestroyFlexfecReceiveStream( FlexfecReceiveStream* receive_stream) override; @@ -86,7 +93,7 @@ class DegradedCall : public Call, private PacketReceiver { Stats GetStats() const override; - const WebRtcKeyValueConfig& trials() const override; + const FieldTrialsView& trials() const override; TaskQueueBase* network_thread() const override; TaskQueueBase* worker_thread() const override; @@ -94,10 +101,14 @@ class DegradedCall : public Call, private PacketReceiver { void SignalChannelNetworkState(MediaType media, NetworkState state) override; void OnAudioTransportOverheadChanged( int transport_overhead_per_packet) override; - void OnLocalSsrcUpdated(AudioReceiveStream& stream, + void OnLocalSsrcUpdated(AudioReceiveStreamInterface& stream, + uint32_t local_ssrc) override; + void OnLocalSsrcUpdated(VideoReceiveStreamInterface& stream, + uint32_t local_ssrc) override; + void OnLocalSsrcUpdated(FlexfecReceiveStream& stream, uint32_t local_ssrc) override; - void OnUpdateSyncGroup(AudioReceiveStream& stream, - const std::string& sync_group) override; + void OnUpdateSyncGroup(AudioReceiveStreamInterface& stream, + absl::string_view sync_group) override; void OnSentPacket(const rtc::SentPacket& sent_packet) override; protected: @@ -110,7 +121,8 @@ class DegradedCall : public Call, private PacketReceiver { class FakeNetworkPipeOnTaskQueue { public: FakeNetworkPipeOnTaskQueue( - TaskQueueFactory* task_queue_factory, + TaskQueueBase* task_queue, + rtc::scoped_refptr call_alive, Clock* clock, std::unique_ptr network_behavior); @@ -129,11 +141,31 @@ class DegradedCall : public Call, private PacketReceiver { bool Process(); Clock* const clock_; - rtc::TaskQueue task_queue_; + TaskQueueBase* const task_queue_; + rtc::scoped_refptr call_alive_; FakeNetworkPipe pipe_; absl::optional next_process_ms_ RTC_GUARDED_BY(&task_queue_); }; + class ThreadedPacketReceiver : public PacketReceiver { + public: + ThreadedPacketReceiver(webrtc::TaskQueueBase* worker_thread, + webrtc::TaskQueueBase* network_thread, + rtc::scoped_refptr call_alive, + PacketReceiver* receiver); + ~ThreadedPacketReceiver() override; + + DeliveryStatus DeliverPacket(MediaType media_type, + rtc::CopyOnWriteBuffer packet, + int64_t packet_time_us) override; + + private: + webrtc::TaskQueueBase* const worker_thread_; + webrtc::TaskQueueBase* const network_thread_; + rtc::scoped_refptr call_alive_; + webrtc::PacketReceiver* const receiver_; + }; + // For audio/video send stream, a TransportAdapter instance is used to // intercept packets to be sent, and put them into a common FakeNetworkPipe // in such as way that they will eventually (unless dropped) be forwarded to @@ -158,14 +190,17 @@ class DegradedCall : public Call, private PacketReceiver { Transport* const real_transport_; }; - Clock* const clock_; - const std::unique_ptr call_; - TaskQueueFactory* const task_queue_factory_; - void SetClientBitratePreferences( - const webrtc::BitrateSettings& preferences) override {} + const webrtc::BitrateSettings& preferences) override; + void UpdateSendNetworkConfig(); + void UpdateReceiveNetworkConfig(); - const absl::optional send_config_; + Clock* const clock_; + const std::unique_ptr call_; + // For cancelling tasks on the network thread when DegradedCall is destroyed + rtc::scoped_refptr call_alive_; + size_t send_config_index_; + const std::vector send_configs_; SimulatedNetwork* send_simulated_network_; std::unique_ptr send_pipe_; std::map> @@ -173,9 +208,11 @@ class DegradedCall : public Call, private PacketReceiver { std::map> video_send_transport_adapters_; - const absl::optional receive_config_; + size_t receive_config_index_; + const std::vector receive_configs_; SimulatedNetwork* receive_simulated_network_; std::unique_ptr receive_pipe_; + std::unique_ptr packet_receiver_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/call/fake_network_pipe.cc b/TMessagesProj/jni/voip/webrtc/call/fake_network_pipe.cc index 4b5579dfc8..8a03e0ce7a 100644 --- a/TMessagesProj/jni/voip/webrtc/call/fake_network_pipe.cc +++ b/TMessagesProj/jni/voip/webrtc/call/fake_network_pipe.cc @@ -18,7 +18,6 @@ #include #include "api/media_types.h" -#include "modules/utility/include/process_thread.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "system_wrappers/include/clock.h" diff --git a/TMessagesProj/jni/voip/webrtc/call/fake_network_pipe.h b/TMessagesProj/jni/voip/webrtc/call/fake_network_pipe.h index fadae337f5..be72e91637 100644 --- a/TMessagesProj/jni/voip/webrtc/call/fake_network_pipe.h +++ b/TMessagesProj/jni/voip/webrtc/call/fake_network_pipe.h @@ -23,7 +23,6 @@ #include "api/test/simulated_network.h" #include "call/call.h" #include "call/simulated_packet_receiver.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" @@ -109,6 +108,9 @@ class FakeNetworkPipe : public SimulatedPacketReceiverInterface { ~FakeNetworkPipe() override; + FakeNetworkPipe(const FakeNetworkPipe&) = delete; + FakeNetworkPipe& operator=(const FakeNetworkPipe&) = delete; + void SetClockOffset(int64_t offset_ms); // Must not be called in parallel with DeliverPacket or Process. @@ -228,8 +230,6 @@ class FakeNetworkPipe : public SimulatedPacketReceiverInterface { int64_t last_log_time_us_; std::map active_transports_ RTC_GUARDED_BY(config_lock_); - - RTC_DISALLOW_COPY_AND_ASSIGN(FakeNetworkPipe); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/call/flexfec_receive_stream.h b/TMessagesProj/jni/voip/webrtc/call/flexfec_receive_stream.h index 72e544e7ec..4f6fe44afa 100644 --- a/TMessagesProj/jni/voip/webrtc/call/flexfec_receive_stream.h +++ b/TMessagesProj/jni/voip/webrtc/call/flexfec_receive_stream.h @@ -25,17 +25,10 @@ namespace webrtc { class FlexfecReceiveStream : public RtpPacketSinkInterface, - public ReceiveStream { + public ReceiveStreamInterface { public: ~FlexfecReceiveStream() override = default; - struct Stats { - std::string ToString(int64_t time_ms) const; - - // TODO(brandtr): Add appropriate stats here. - int flexfec_bitrate_bps; - }; - struct Config { explicit Config(Transport* rtcp_send_transport); Config(const Config&); @@ -50,7 +43,7 @@ class FlexfecReceiveStream : public RtpPacketSinkInterface, // Payload type for FlexFEC. int payload_type = -1; - RtpConfig rtp; + ReceiveStreamRtpConfig rtp; // Vector containing a single element, corresponding to the SSRC of the // media stream being protected by this FlexFEC stream. The vector MUST have @@ -67,7 +60,15 @@ class FlexfecReceiveStream : public RtpPacketSinkInterface, Transport* rtcp_send_transport = nullptr; }; - virtual Stats GetStats() const = 0; + // TODO(tommi): FlexfecReceiveStream inherits from ReceiveStreamInterface, + // not VideoReceiveStreamInterface where there's also a SetRtcpMode method. + // Perhaps this should be in ReceiveStreamInterface and apply to audio streams + // as well (although there's no logic that would use it at present). + virtual void SetRtcpMode(RtcpMode mode) = 0; + + // Called to change the payload type after initialization. + virtual void SetPayloadType(int payload_type) = 0; + virtual int payload_type() const = 0; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/call/flexfec_receive_stream_impl.cc b/TMessagesProj/jni/voip/webrtc/call/flexfec_receive_stream_impl.cc index eda5c7f05d..db8b7e7edb 100644 --- a/TMessagesProj/jni/voip/webrtc/call/flexfec_receive_stream_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/call/flexfec_receive_stream_impl.cc @@ -23,23 +23,13 @@ #include "modules/rtp_rtcp/include/flexfec_receiver.h" #include "modules/rtp_rtcp/include/receive_statistics.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" -#include "modules/utility/include/process_thread.h" #include "rtc_base/checks.h" -#include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" #include "system_wrappers/include/clock.h" namespace webrtc { -std::string FlexfecReceiveStream::Stats::ToString(int64_t time_ms) const { - char buf[1024]; - rtc::SimpleStringBuilder ss(buf); - ss << "FlexfecReceiveStream stats: " << time_ms - << ", {flexfec_bitrate_bps: " << flexfec_bitrate_bps << "}"; - return ss.str(); -} - std::string FlexfecReceiveStream::Config::ToString() const { char buf[1024]; rtc::SimpleStringBuilder ss(buf); @@ -138,28 +128,31 @@ std::unique_ptr CreateRtpRtcpModule( FlexfecReceiveStreamImpl::FlexfecReceiveStreamImpl( Clock* clock, - const Config& config, + Config config, RecoveredPacketReceiver* recovered_packet_receiver, RtcpRttStats* rtt_stats) - : config_(config), - receiver_(MaybeCreateFlexfecReceiver(clock, - config_, - recovered_packet_receiver)), + : extension_map_(std::move(config.rtp.extensions)), + remote_ssrc_(config.rtp.remote_ssrc), + transport_cc_(config.rtp.transport_cc), + payload_type_(config.payload_type), + receiver_( + MaybeCreateFlexfecReceiver(clock, config, recovered_packet_receiver)), rtp_receive_statistics_(ReceiveStatistics::Create(clock)), rtp_rtcp_(CreateRtpRtcpModule(clock, rtp_receive_statistics_.get(), - config_, + config, rtt_stats)) { - RTC_LOG(LS_INFO) << "FlexfecReceiveStreamImpl: " << config_.ToString(); + RTC_LOG(LS_INFO) << "FlexfecReceiveStreamImpl: " << config.ToString(); + RTC_DCHECK_GE(payload_type_, -1); packet_sequence_checker_.Detach(); // RTCP reporting. - rtp_rtcp_->SetRTCPStatus(config_.rtcp_mode); + rtp_rtcp_->SetRTCPStatus(config.rtcp_mode); } FlexfecReceiveStreamImpl::~FlexfecReceiveStreamImpl() { - RTC_LOG(LS_INFO) << "~FlexfecReceiveStreamImpl: " << config_.ToString(); + RTC_DLOG(LS_INFO) << "~FlexfecReceiveStreamImpl: ssrc: " << remote_ssrc_; } void FlexfecReceiveStreamImpl::RegisterWithTransport( @@ -175,7 +168,7 @@ void FlexfecReceiveStreamImpl::RegisterWithTransport( // here at all, we'd then delete the OnRtpPacket method and instead register // `receiver_` as the RtpPacketSinkInterface for this stream. rtp_stream_receiver_ = - receiver_controller->CreateReceiver(config_.rtp.remote_ssrc, this); + receiver_controller->CreateReceiver(remote_ssrc(), this); } void FlexfecReceiveStreamImpl::UnregisterFromTransport() { @@ -191,21 +184,39 @@ void FlexfecReceiveStreamImpl::OnRtpPacket(const RtpPacketReceived& packet) { receiver_->OnRtpPacket(packet); // Do not report media packets in the RTCP RRs generated by `rtp_rtcp_`. - if (packet.Ssrc() == config_.rtp.remote_ssrc) { + if (packet.Ssrc() == remote_ssrc()) { rtp_receive_statistics_->OnRtpPacket(packet); } } -// TODO(brandtr): Implement this member function when we have designed the -// stats for FlexFEC. -FlexfecReceiveStreamImpl::Stats FlexfecReceiveStreamImpl::GetStats() const { - return FlexfecReceiveStream::Stats(); +void FlexfecReceiveStreamImpl::SetPayloadType(int payload_type) { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + RTC_DCHECK_GE(payload_type, -1); + payload_type_ = payload_type; +} + +int FlexfecReceiveStreamImpl::payload_type() const { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + return payload_type_; } void FlexfecReceiveStreamImpl::SetRtpExtensions( std::vector extensions) { RTC_DCHECK_RUN_ON(&packet_sequence_checker_); - config_.rtp.extensions = std::move(extensions); + extension_map_.Reset(extensions); +} + +RtpHeaderExtensionMap FlexfecReceiveStreamImpl::GetRtpExtensionMap() const { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + return extension_map_; +} + +void FlexfecReceiveStreamImpl::SetLocalSsrc(uint32_t local_ssrc) { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + if (local_ssrc == rtp_rtcp_->local_media_ssrc()) + return; + + rtp_rtcp_->SetLocalSsrc(local_ssrc); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/call/flexfec_receive_stream_impl.h b/TMessagesProj/jni/voip/webrtc/call/flexfec_receive_stream_impl.h index c2407cd419..9cb383afee 100644 --- a/TMessagesProj/jni/voip/webrtc/call/flexfec_receive_stream_impl.h +++ b/TMessagesProj/jni/voip/webrtc/call/flexfec_receive_stream_impl.h @@ -34,7 +34,7 @@ class RtpStreamReceiverInterface; class FlexfecReceiveStreamImpl : public FlexfecReceiveStream { public: FlexfecReceiveStreamImpl(Clock* clock, - const Config& config, + Config config, RecoveredPacketReceiver* recovered_packet_receiver, RtcpRttStats* rtt_stats); // Destruction happens on the worker thread. Prior to destruction the caller @@ -56,17 +56,45 @@ class FlexfecReceiveStreamImpl : public FlexfecReceiveStream { // RtpPacketSinkInterface. void OnRtpPacket(const RtpPacketReceived& packet) override; - Stats GetStats() const override; + void SetPayloadType(int payload_type) override; + int payload_type() const override; - // ReceiveStream impl. + // ReceiveStreamInterface impl. void SetRtpExtensions(std::vector extensions) override; - const RtpConfig& rtp_config() const override { return config_.rtp; } + RtpHeaderExtensionMap GetRtpExtensionMap() const override; + + // Updates the `rtp_video_stream_receiver_`'s `local_ssrc` when the default + // sender has been created, changed or removed. + void SetLocalSsrc(uint32_t local_ssrc); + + uint32_t remote_ssrc() const { return remote_ssrc_; } + + bool transport_cc() const override { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + return transport_cc_; + } + + void SetTransportCc(bool transport_cc) override { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + transport_cc_ = transport_cc; + } + + void SetRtcpMode(RtcpMode mode) override { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + rtp_rtcp_->SetRTCPStatus(mode); + } private: RTC_NO_UNIQUE_ADDRESS SequenceChecker packet_sequence_checker_; - // Config. Mostly const, header extensions may change. - Config config_ RTC_GUARDED_BY(packet_sequence_checker_); + RtpHeaderExtensionMap extension_map_; + + const uint32_t remote_ssrc_; + bool transport_cc_ RTC_GUARDED_BY(packet_sequence_checker_); + + // `payload_type_` is initially set to -1, indicating that FlexFec is + // disabled. + int payload_type_ RTC_GUARDED_BY(packet_sequence_checker_) = -1; // Erasure code interfacing. const std::unique_ptr receiver_; diff --git a/TMessagesProj/jni/voip/webrtc/call/rampup_tests.cc b/TMessagesProj/jni/voip/webrtc/call/rampup_tests.cc index 8e589b1f8a..dd4fe573df 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rampup_tests.cc +++ b/TMessagesProj/jni/voip/webrtc/call/rampup_tests.cc @@ -13,11 +13,14 @@ #include #include "absl/flags/flag.h" +#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event_log_factory.h" #include "api/rtc_event_log_output_file.h" #include "api/task_queue/default_task_queue_factory.h" #include "api/task_queue/task_queue_base.h" #include "api/task_queue/task_queue_factory.h" +#include "api/test/metrics/global_metrics_logger_and_exporter.h" +#include "api/test/metrics/metric.h" #include "call/fake_network_pipe.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -26,9 +29,7 @@ #include "rtc_base/task_queue_for_test.h" #include "rtc_base/time_utils.h" #include "test/encoder_settings.h" -#include "test/field_trial.h" #include "test/gtest.h" -#include "test/testsupport/perf_test.h" ABSL_FLAG(std::string, ramp_dump_name, @@ -38,6 +39,10 @@ ABSL_FLAG(std::string, namespace webrtc { namespace { +using ::webrtc::test::GetGlobalMetricsLogger; +using ::webrtc::test::ImprovementDirection; +using ::webrtc::test::Unit; + constexpr TimeDelta kPollInterval = TimeDelta::Millis(20); static const int kExpectedHighVideoBitrateBps = 80000; static const int kExpectedHighAudioBitrateBps = 30000; @@ -52,6 +57,7 @@ std::vector GenerateSsrcs(size_t num_streams, uint32_t ssrc_offset) { ssrcs.push_back(static_cast(ssrc_offset + i)); return ssrcs; } + } // namespace RampUpTester::RampUpTester(size_t num_video_streams, @@ -59,12 +65,12 @@ RampUpTester::RampUpTester(size_t num_video_streams, size_t num_flexfec_streams, unsigned int start_bitrate_bps, int64_t min_run_time_ms, - const std::string& extension_type, + absl::string_view extension_type, bool rtx, bool red, bool report_perf_stats, TaskQueueBase* task_queue) - : EndToEndTest(test::CallTest::kLongTimeoutMs), + : EndToEndTest(test::CallTest::kLongTimeout), clock_(Clock::GetRealTimeClock()), num_video_streams_(num_video_streams), num_audio_streams_(num_audio_streams), @@ -103,7 +109,7 @@ void RampUpTester::ModifySenderBitrateConfig( void RampUpTester::OnVideoStreamsCreated( VideoSendStream* send_stream, - const std::vector& receive_streams) { + const std::vector& receive_streams) { send_stream_ = send_stream; } @@ -140,11 +146,11 @@ class RampUpTester::VideoStreamFactory private: std::vector CreateEncoderStreams( - int width, - int height, + int frame_width, + int frame_height, const VideoEncoderConfig& encoder_config) override { std::vector streams = - test::CreateVideoStreams(width, height, encoder_config); + test::CreateVideoStreams(frame_width, frame_height, encoder_config); if (encoder_config.number_of_streams == 1) { streams[0].target_bitrate_bps = streams[0].max_bitrate_bps = 2000000; } @@ -154,7 +160,7 @@ class RampUpTester::VideoStreamFactory void RampUpTester::ModifyVideoConfigs( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) { send_config->suspend_below_min_bitrate = true; encoder_config->number_of_streams = num_video_streams_; @@ -214,7 +220,7 @@ void RampUpTester::ModifyVideoConfigs( } size_t i = 0; - for (VideoReceiveStream::Config& recv_config : *receive_configs) { + for (VideoReceiveStreamInterface::Config& recv_config : *receive_configs) { recv_config.rtp.transport_cc = transport_cc; recv_config.rtp.extensions = send_config->rtp.extensions; recv_config.decoders.reserve(1); @@ -256,7 +262,7 @@ void RampUpTester::ModifyVideoConfigs( void RampUpTester::ModifyAudioConfigs( AudioSendStream::Config* send_config, - std::vector* receive_configs) { + std::vector* receive_configs) { if (num_audio_streams_ == 0) return; @@ -278,7 +284,7 @@ void RampUpTester::ModifyAudioConfigs( extension_type_.c_str(), kTransportSequenceNumberExtensionId)); } - for (AudioReceiveStream::Config& recv_config : *receive_configs) { + for (AudioReceiveStreamInterface::Config& recv_config : *receive_configs) { recv_config.rtp.transport_cc = transport_cc; recv_config.rtp.extensions = send_config->rtp.extensions; recv_config.rtp.remote_ssrc = send_config->rtp.ssrc; @@ -329,13 +335,15 @@ void RampUpTester::PollStats() { } } -void RampUpTester::ReportResult(const std::string& measurement, - size_t value, - const std::string& units) const { - webrtc::test::PrintResult( - measurement, "", +void RampUpTester::ReportResult( + absl::string_view measurement, + size_t value, + Unit unit, + ImprovementDirection improvement_direction) const { + GetGlobalMetricsLogger()->LogSingleValueMetric( + measurement, ::testing::UnitTest::GetInstance()->current_test_info()->name(), value, - units, false); + unit, improvement_direction); } void RampUpTester::AccumulateStats(const VideoSendStream::StreamStats& stream, @@ -360,15 +368,14 @@ void RampUpTester::TriggerTestDone() { // Stop polling stats. // Corner case for field_trials=WebRTC-QuickPerfTest/Enabled/ - SendTask(RTC_FROM_HERE, task_queue_, [this] { pending_task_.Stop(); }); + SendTask(task_queue_, [this] { pending_task_.Stop(); }); // TODO(holmer): Add audio send stats here too when those APIs are available. if (!send_stream_) return; VideoSendStream::Stats send_stats; - SendTask(RTC_FROM_HERE, task_queue_, - [&] { send_stats = send_stream_->GetStats(); }); + SendTask(task_queue_, [&] { send_stats = send_stream_->GetStats(); }); send_stream_ = nullptr; // To avoid dereferencing a bad pointer. @@ -391,16 +398,21 @@ void RampUpTester::TriggerTestDone() { } if (report_perf_stats_) { - ReportResult("ramp-up-media-sent", media_sent, "bytes"); - ReportResult("ramp-up-padding-sent", padding_sent, "bytes"); - ReportResult("ramp-up-rtx-media-sent", rtx_media_sent, "bytes"); - ReportResult("ramp-up-rtx-padding-sent", rtx_padding_sent, "bytes"); + ReportResult("ramp-up-media-sent", media_sent, Unit::kBytes, + ImprovementDirection::kBiggerIsBetter); + ReportResult("ramp-up-padding-sent", padding_sent, Unit::kBytes, + ImprovementDirection::kSmallerIsBetter); + ReportResult("ramp-up-rtx-media-sent", rtx_media_sent, Unit::kBytes, + ImprovementDirection::kBiggerIsBetter); + ReportResult("ramp-up-rtx-padding-sent", rtx_padding_sent, Unit::kBytes, + ImprovementDirection::kSmallerIsBetter); if (ramp_up_finished_ms_ >= 0) { ReportResult("ramp-up-time", ramp_up_finished_ms_ - test_start_ms_, - "milliseconds"); + Unit::kMilliseconds, ImprovementDirection::kSmallerIsBetter); } ReportResult("ramp-up-average-network-latency", - send_transport_->GetAverageDelayMs(), "milliseconds"); + send_transport_->GetAverageDelayMs(), Unit::kMilliseconds, + ImprovementDirection::kSmallerIsBetter); } } @@ -414,7 +426,7 @@ RampUpDownUpTester::RampUpDownUpTester(size_t num_video_streams, size_t num_audio_streams, size_t num_flexfec_streams, unsigned int start_bitrate_bps, - const std::string& extension_type, + absl::string_view extension_type, bool rtx, bool red, const std::vector& loss_rates, @@ -525,9 +537,10 @@ void RampUpDownUpTester::EvolveTestState(int bitrate_bps, bool suspended) { EXPECT_FALSE(suspended); if (bitrate_bps >= GetExpectedHighBitrate()) { if (report_perf_stats_) { - webrtc::test::PrintResult("ramp_up_down_up", GetModifierString(), - "first_rampup", now - state_start_ms_, "ms", - false); + GetGlobalMetricsLogger()->LogSingleValueMetric( + "ramp_up_down_up" + GetModifierString(), "first_rampup", + now - state_start_ms_, Unit::kMilliseconds, + ImprovementDirection::kSmallerIsBetter); } // Apply loss during the transition between states if FEC is enabled. forward_transport_config_.loss_percent = loss_rates_[test_state_]; @@ -541,9 +554,10 @@ void RampUpDownUpTester::EvolveTestState(int bitrate_bps, bool suspended) { if (bitrate_bps < kLowBandwidthLimitBps + kLowBitrateMarginBps && suspended == check_suspend_state) { if (report_perf_stats_) { - webrtc::test::PrintResult("ramp_up_down_up", GetModifierString(), - "rampdown", now - state_start_ms_, "ms", - false); + GetGlobalMetricsLogger()->LogSingleValueMetric( + "ramp_up_down_up" + GetModifierString(), "rampdown", + now - state_start_ms_, Unit::kMilliseconds, + ImprovementDirection::kSmallerIsBetter); } // Apply loss during the transition between states if FEC is enabled. forward_transport_config_.loss_percent = loss_rates_[test_state_]; @@ -555,11 +569,14 @@ void RampUpDownUpTester::EvolveTestState(int bitrate_bps, bool suspended) { case kSecondRampup: if (bitrate_bps >= GetExpectedHighBitrate() && !suspended) { if (report_perf_stats_) { - webrtc::test::PrintResult("ramp_up_down_up", GetModifierString(), - "second_rampup", now - state_start_ms_, - "ms", false); + GetGlobalMetricsLogger()->LogSingleValueMetric( + "ramp_up_down_up" + GetModifierString(), "second_rampup", + now - state_start_ms_, Unit::kMilliseconds, + ImprovementDirection::kSmallerIsBetter); ReportResult("ramp-up-down-up-average-network-latency", - send_transport_->GetAverageDelayMs(), "milliseconds"); + send_transport_->GetAverageDelayMs(), + Unit::kMilliseconds, + ImprovementDirection::kSmallerIsBetter); } // Apply loss during the transition between states if FEC is enabled. forward_transport_config_.loss_percent = loss_rates_[test_state_]; @@ -719,4 +736,5 @@ TEST_F(RampUpTest, AudioTransportSequenceNumber) { false, task_queue()); RunBaseTest(&test); } + } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/call/rampup_tests.h b/TMessagesProj/jni/voip/webrtc/call/rampup_tests.h index 075de6d888..de0022c994 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rampup_tests.h +++ b/TMessagesProj/jni/voip/webrtc/call/rampup_tests.h @@ -17,8 +17,10 @@ #include #include +#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event_log.h" #include "api/task_queue/task_queue_base.h" +#include "api/test/metrics/metric.h" #include "api/test/simulated_network.h" #include "call/call.h" #include "call/simulated_network.h" @@ -42,7 +44,7 @@ class RampUpTester : public test::EndToEndTest { size_t num_flexfec_streams, unsigned int start_bitrate_bps, int64_t min_run_time_ms, - const std::string& extension_type, + absl::string_view extension_type, bool rtx, bool red, bool report_perf_stats, @@ -64,9 +66,10 @@ class RampUpTester : public test::EndToEndTest { size_t* padding_sent, size_t* media_sent) const; - void ReportResult(const std::string& measurement, + void ReportResult(absl::string_view measurement, size_t value, - const std::string& units) const; + test::Unit unit, + test::ImprovementDirection improvement_direction) const; void TriggerTestDone(); Clock* const clock_; @@ -87,19 +90,19 @@ class RampUpTester : public test::EndToEndTest { class VideoStreamFactory; void ModifySenderBitrateConfig(BitrateConstraints* bitrate_config) override; - void OnVideoStreamsCreated( - VideoSendStream* send_stream, - const std::vector& receive_streams) override; + void OnVideoStreamsCreated(VideoSendStream* send_stream, + const std::vector& + receive_streams) override; std::unique_ptr CreateSendTransport( TaskQueueBase* task_queue, Call* sender_call) override; void ModifyVideoConfigs( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) override; - void ModifyAudioConfigs( - AudioSendStream::Config* send_config, - std::vector* receive_configs) override; + void ModifyAudioConfigs(AudioSendStream::Config* send_config, + std::vector* + receive_configs) override; void ModifyFlexfecConfigs( std::vector* receive_configs) override; void OnCallsCreated(Call* sender_call, Call* receiver_call) override; @@ -126,7 +129,7 @@ class RampUpDownUpTester : public RampUpTester { size_t num_audio_streams, size_t num_flexfec_streams, unsigned int start_bitrate_bps, - const std::string& extension_type, + absl::string_view extension_type, bool rtx, bool red, const std::vector& loss_rates, @@ -163,5 +166,6 @@ class RampUpDownUpTester : public RampUpTester { int sent_bytes_; std::vector loss_rates_; }; + } // namespace webrtc #endif // CALL_RAMPUP_TESTS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/call/receive_stream.h b/TMessagesProj/jni/voip/webrtc/call/receive_stream.h index a6756fc5c1..eb04653000 100644 --- a/TMessagesProj/jni/voip/webrtc/call/receive_stream.h +++ b/TMessagesProj/jni/voip/webrtc/call/receive_stream.h @@ -18,15 +18,18 @@ #include "api/media_types.h" #include "api/scoped_refptr.h" #include "api/transport/rtp/rtp_source.h" +#include "modules/rtp_rtcp/include/rtp_header_extension_map.h" namespace webrtc { -// Common base interface for MediaReceiveStream based classes and +// Common base interface for MediaReceiveStreamInterface based classes and // FlexfecReceiveStream. -class ReceiveStream { +class ReceiveStreamInterface { public: // Receive-stream specific RTP settings. - struct RtpConfig { + // TODO(tommi): This struct isn't needed at this level anymore. Move it closer + // to where it's used. + struct ReceiveStreamRtpConfig { // Synchronization source (stream identifier) to be received. // This member will not change mid-stream and can be assumed to be const // post initialization. @@ -54,19 +57,24 @@ class ReceiveStream { // Set/change the rtp header extensions. Must be called on the packet // delivery thread. virtual void SetRtpExtensions(std::vector extensions) = 0; + virtual RtpHeaderExtensionMap GetRtpExtensionMap() const = 0; - // Called on the packet delivery thread since some members of the config may - // change mid-stream (e.g. the local ssrc). All mutation must also happen on - // the packet delivery thread. Return value can be assumed to - // only be used in the calling context (on the stack basically). - virtual const RtpConfig& rtp_config() const = 0; + // Returns a bool for whether feedback for send side bandwidth estimation is + // enabled. See + // https://tools.ietf.org/html/draft-holmer-rmcat-transport-wide-cc-extensions + // for details. + // This value may change mid-stream and must be done on the same thread + // that the value is read on (i.e. packet delivery). + virtual bool transport_cc() const = 0; + + virtual void SetTransportCc(bool transport_cc) = 0; protected: - virtual ~ReceiveStream() {} + virtual ~ReceiveStreamInterface() {} }; // Either an audio or video receive stream. -class MediaReceiveStream : public ReceiveStream { +class MediaReceiveStreamInterface : public ReceiveStreamInterface { public: // Starts stream activity. // When a stream is active, it can receive, process and deliver packets. diff --git a/TMessagesProj/jni/voip/webrtc/call/receive_time_calculator.cc b/TMessagesProj/jni/voip/webrtc/call/receive_time_calculator.cc index 94d1fd18cc..417168b15d 100644 --- a/TMessagesProj/jni/voip/webrtc/call/receive_time_calculator.cc +++ b/TMessagesProj/jni/voip/webrtc/call/receive_time_calculator.cc @@ -16,22 +16,20 @@ #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/numerics/safe_minmax.h" -#include "system_wrappers/include/field_trial.h" namespace webrtc { namespace { -using ::webrtc::field_trial::IsEnabled; const char kBweReceiveTimeCorrection[] = "WebRTC-Bwe-ReceiveTimeFix"; } // namespace -ReceiveTimeCalculatorConfig::ReceiveTimeCalculatorConfig() +ReceiveTimeCalculatorConfig::ReceiveTimeCalculatorConfig( + const FieldTrialsView& field_trials) : max_packet_time_repair("maxrep", TimeDelta::Millis(2000)), stall_threshold("stall", TimeDelta::Millis(5)), tolerance("tol", TimeDelta::Millis(1)), max_stall("maxstall", TimeDelta::Seconds(5)) { - std::string trial_string = - field_trial::FindFullName(kBweReceiveTimeCorrection); + std::string trial_string = field_trials.Lookup(kBweReceiveTimeCorrection); ParseFieldTrial( {&max_packet_time_repair, &stall_threshold, &tolerance, &max_stall}, trial_string); @@ -40,14 +38,16 @@ ReceiveTimeCalculatorConfig::ReceiveTimeCalculatorConfig( const ReceiveTimeCalculatorConfig&) = default; ReceiveTimeCalculatorConfig::~ReceiveTimeCalculatorConfig() = default; -ReceiveTimeCalculator::ReceiveTimeCalculator() - : config_(ReceiveTimeCalculatorConfig()) {} +ReceiveTimeCalculator::ReceiveTimeCalculator( + const FieldTrialsView& field_trials) + : config_(field_trials) {} std::unique_ptr -ReceiveTimeCalculator::CreateFromFieldTrial() { - if (!IsEnabled(kBweReceiveTimeCorrection)) +ReceiveTimeCalculator::CreateFromFieldTrial( + const FieldTrialsView& field_trials) { + if (!field_trials.IsEnabled(kBweReceiveTimeCorrection)) return nullptr; - return std::make_unique(); + return std::make_unique(field_trials); } int64_t ReceiveTimeCalculator::ReconcileReceiveTimes(int64_t packet_time_us, diff --git a/TMessagesProj/jni/voip/webrtc/call/receive_time_calculator.h b/TMessagesProj/jni/voip/webrtc/call/receive_time_calculator.h index 0bd3a82afc..57ba331844 100644 --- a/TMessagesProj/jni/voip/webrtc/call/receive_time_calculator.h +++ b/TMessagesProj/jni/voip/webrtc/call/receive_time_calculator.h @@ -14,13 +14,14 @@ #include +#include "api/field_trials_view.h" #include "api/units/time_delta.h" #include "rtc_base/experiments/field_trial_parser.h" namespace webrtc { struct ReceiveTimeCalculatorConfig { - ReceiveTimeCalculatorConfig(); + explicit ReceiveTimeCalculatorConfig(const FieldTrialsView& field_trials); ReceiveTimeCalculatorConfig(const ReceiveTimeCalculatorConfig&); ReceiveTimeCalculatorConfig& operator=(const ReceiveTimeCalculatorConfig&) = default; @@ -41,8 +42,9 @@ struct ReceiveTimeCalculatorConfig { // is received. class ReceiveTimeCalculator { public: - static std::unique_ptr CreateFromFieldTrial(); - ReceiveTimeCalculator(); + static std::unique_ptr CreateFromFieldTrial( + const FieldTrialsView& field_trials); + explicit ReceiveTimeCalculator(const FieldTrialsView& field_trials); int64_t ReconcileReceiveTimes(int64_t packet_time_us_, int64_t system_time_us_, int64_t safe_time_us_); diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_bitrate_configurator.h b/TMessagesProj/jni/voip/webrtc/call/rtp_bitrate_configurator.h index 7ad83f8b0b..5cb779a3b3 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_bitrate_configurator.h +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_bitrate_configurator.h @@ -14,7 +14,6 @@ #include "absl/types/optional.h" #include "api/transport/bitrate_settings.h" #include "api/units/data_rate.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -24,6 +23,10 @@ class RtpBitrateConfigurator { public: explicit RtpBitrateConfigurator(const BitrateConstraints& bitrate_config); ~RtpBitrateConfigurator(); + + RtpBitrateConfigurator(const RtpBitrateConfigurator&) = delete; + RtpBitrateConfigurator& operator=(const RtpBitrateConfigurator&) = delete; + BitrateConstraints GetConfig() const; // The greater min and smaller max set by this and SetClientBitratePreferences @@ -68,8 +71,6 @@ class RtpBitrateConfigurator { // Bandwidth cap applied for relayed calls. DataRate max_bitrate_over_relay_ = DataRate::PlusInfinity(); - - RTC_DISALLOW_COPY_AND_ASSIGN(RtpBitrateConfigurator); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_config.h b/TMessagesProj/jni/voip/webrtc/call/rtp_config.h index c3b5b4a255..0cc9466a9f 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_config.h +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_config.h @@ -104,7 +104,7 @@ struct RtpConfig { // changing codec without recreating the VideoSendStream. Then these // fields must be removed, and association between payload type and codec // must move above the per-stream level. Ownership could be with - // RtpTransportControllerSend, with a reference from PayloadRouter, where + // RtpTransportControllerSend, with a reference from RtpVideoSender, where // the latter would be responsible for mapping the codec type of encoded // images to the right payload type. std::string payload_name; diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_demuxer.cc b/TMessagesProj/jni/voip/webrtc/call/rtp_demuxer.cc index 28962fd2eb..0b74f2ac0a 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_demuxer.cc +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_demuxer.cc @@ -10,6 +10,7 @@ #include "call/rtp_demuxer.h" +#include "absl/strings/string_view.h" #include "call/rtp_packet_sink_interface.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" @@ -39,15 +40,32 @@ size_t RemoveFromMapByValue(Map* map, const Value& value) { return EraseIf(*map, [&](const auto& elem) { return elem.second == value; }); } +// Temp fix: MID in SDP is allowed to be slightly longer than what's allowed +// in the RTP demuxer. Truncate if needed; this won't match, but it only +// makes sense in places that wouldn't use this for matching anyway. +// TODO(bugs.webrtc.org/12517): remove when length 16 is policed by parser. +std::string CheckMidLength(absl::string_view mid) { + std::string new_mid(mid); + if (new_mid.length() > BaseRtpStringExtension::kMaxValueSizeBytes) { + RTC_LOG(LS_WARNING) << "`mid` attribute too long. Truncating."; + new_mid.resize(BaseRtpStringExtension::kMaxValueSizeBytes); + } + return new_mid; +} + } // namespace +RtpDemuxerCriteria::RtpDemuxerCriteria( + absl::string_view mid, + absl::string_view rsid /*= absl::string_view()*/) + : mid_(CheckMidLength(mid)), rsid_(rsid) {} + RtpDemuxerCriteria::RtpDemuxerCriteria() = default; RtpDemuxerCriteria::~RtpDemuxerCriteria() = default; bool RtpDemuxerCriteria::operator==(const RtpDemuxerCriteria& other) const { - return this->mid == other.mid && this->rsid == other.rsid && - this->ssrcs == other.ssrcs && - this->payload_types == other.payload_types; + return mid_ == other.mid_ && rsid_ == other.rsid_ && ssrcs_ == other.ssrcs_ && + payload_types_ == other.payload_types_; } bool RtpDemuxerCriteria::operator!=(const RtpDemuxerCriteria& other) const { @@ -56,16 +74,16 @@ bool RtpDemuxerCriteria::operator!=(const RtpDemuxerCriteria& other) const { std::string RtpDemuxerCriteria::ToString() const { rtc::StringBuilder sb; - sb << "{mid: " << (mid.empty() ? "" : mid) - << ", rsid: " << (rsid.empty() ? "" : rsid) << ", ssrcs: ["; + sb << "{mid: " << (mid_.empty() ? "" : mid_) + << ", rsid: " << (rsid_.empty() ? "" : rsid_) << ", ssrcs: ["; - for (auto ssrc : ssrcs) { + for (auto ssrc : ssrcs_) { sb << ssrc << ", "; } sb << "], payload_types = ["; - for (auto pt : payload_types) { + for (auto pt : payload_types_) { sb << pt << ", "; } @@ -104,60 +122,60 @@ RtpDemuxer::~RtpDemuxer() { bool RtpDemuxer::AddSink(const RtpDemuxerCriteria& criteria, RtpPacketSinkInterface* sink) { - RTC_DCHECK(!criteria.payload_types.empty() || !criteria.ssrcs.empty() || - !criteria.mid.empty() || !criteria.rsid.empty()); - RTC_DCHECK(criteria.mid.empty() || IsLegalMidName(criteria.mid)); - RTC_DCHECK(criteria.rsid.empty() || IsLegalRsidName(criteria.rsid)); + RTC_DCHECK(!criteria.payload_types().empty() || !criteria.ssrcs().empty() || + !criteria.mid().empty() || !criteria.rsid().empty()); + RTC_DCHECK(criteria.mid().empty() || IsLegalMidName(criteria.mid())); + RTC_DCHECK(criteria.rsid().empty() || IsLegalRsidName(criteria.rsid())); RTC_DCHECK(sink); // We return false instead of DCHECKing for logical conflicts with the new // criteria because new sinks are created according to user-specified SDP and // we do not want to crash due to a data validation error. if (CriteriaWouldConflict(criteria)) { - RTC_LOG(LS_ERROR) << "Unable to add sink = " << sink - << " due conflicting criteria " << criteria.ToString(); + RTC_LOG(LS_ERROR) << "Unable to add sink=" << sink + << " due to conflicting criteria " << criteria.ToString(); return false; } - if (!criteria.mid.empty()) { - if (criteria.rsid.empty()) { - sink_by_mid_.emplace(criteria.mid, sink); + if (!criteria.mid().empty()) { + if (criteria.rsid().empty()) { + sink_by_mid_.emplace(criteria.mid(), sink); } else { - sink_by_mid_and_rsid_.emplace(std::make_pair(criteria.mid, criteria.rsid), - sink); + sink_by_mid_and_rsid_.emplace( + std::make_pair(criteria.mid(), criteria.rsid()), sink); } } else { - if (!criteria.rsid.empty()) { - sink_by_rsid_.emplace(criteria.rsid, sink); + if (!criteria.rsid().empty()) { + sink_by_rsid_.emplace(criteria.rsid(), sink); } } - for (uint32_t ssrc : criteria.ssrcs) { + for (uint32_t ssrc : criteria.ssrcs()) { sink_by_ssrc_.emplace(ssrc, sink); } - for (uint8_t payload_type : criteria.payload_types) { + for (uint8_t payload_type : criteria.payload_types()) { sinks_by_pt_.emplace(payload_type, sink); } RefreshKnownMids(); - RTC_LOG(LS_INFO) << "Added sink = " << sink << " for criteria " - << criteria.ToString(); + RTC_DLOG(LS_INFO) << "Added sink = " << sink << " for criteria " + << criteria.ToString(); return true; } bool RtpDemuxer::CriteriaWouldConflict( const RtpDemuxerCriteria& criteria) const { - if (!criteria.mid.empty()) { - if (criteria.rsid.empty()) { + if (!criteria.mid().empty()) { + if (criteria.rsid().empty()) { // If the MID is in the known_mids_ set, then there is already a sink // added for this MID directly, or there is a sink already added with a // MID, RSID pair for our MID and some RSID. // Adding this criteria would cause one of these rules to be shadowed, so // reject this new criteria. - if (known_mids_.find(criteria.mid) != known_mids_.end()) { + if (known_mids_.find(criteria.mid()) != known_mids_.end()) { RTC_LOG(LS_INFO) << criteria.ToString() << " would conflict with known mid"; return true; @@ -165,7 +183,7 @@ bool RtpDemuxer::CriteriaWouldConflict( } else { // If the exact rule already exists, then reject this duplicate. const auto sink_by_mid_and_rsid = sink_by_mid_and_rsid_.find( - std::make_pair(criteria.mid, criteria.rsid)); + std::make_pair(criteria.mid(), criteria.rsid())); if (sink_by_mid_and_rsid != sink_by_mid_and_rsid_.end()) { RTC_LOG(LS_INFO) << criteria.ToString() << " would conflict with existing sink = " @@ -176,7 +194,7 @@ bool RtpDemuxer::CriteriaWouldConflict( // If there is already a sink registered for the bare MID, then this // criteria will never receive any packets because they will just be // directed to that MID sink, so reject this new criteria. - const auto sink_by_mid = sink_by_mid_.find(criteria.mid); + const auto sink_by_mid = sink_by_mid_.find(criteria.mid()); if (sink_by_mid != sink_by_mid_.end()) { RTC_LOG(LS_INFO) << criteria.ToString() << " would conflict with existing sink = " @@ -186,7 +204,7 @@ bool RtpDemuxer::CriteriaWouldConflict( } } - for (uint32_t ssrc : criteria.ssrcs) { + for (uint32_t ssrc : criteria.ssrcs()) { const auto sink_by_ssrc = sink_by_ssrc_.find(ssrc); if (sink_by_ssrc != sink_by_ssrc_.end()) { RTC_LOG(LS_INFO) << criteria.ToString() @@ -217,14 +235,12 @@ void RtpDemuxer::RefreshKnownMids() { bool RtpDemuxer::AddSink(uint32_t ssrc, RtpPacketSinkInterface* sink) { RtpDemuxerCriteria criteria; - criteria.ssrcs.insert(ssrc); + criteria.ssrcs().insert(ssrc); return AddSink(criteria, sink); } -void RtpDemuxer::AddSink(const std::string& rsid, - RtpPacketSinkInterface* sink) { - RtpDemuxerCriteria criteria; - criteria.rsid = rsid; +void RtpDemuxer::AddSink(absl::string_view rsid, RtpPacketSinkInterface* sink) { + RtpDemuxerCriteria criteria(absl::string_view() /* mid */, rsid); AddSink(criteria, sink); } @@ -236,11 +252,7 @@ bool RtpDemuxer::RemoveSink(const RtpPacketSinkInterface* sink) { RemoveFromMapByValue(&sink_by_mid_and_rsid_, sink) + RemoveFromMapByValue(&sink_by_rsid_, sink); RefreshKnownMids(); - bool removed = num_removed > 0; - if (removed) { - RTC_LOG(LS_INFO) << "Removed sink = " << sink << " bindings"; - } - return removed; + return num_removed > 0; } bool RtpDemuxer::OnRtpPacket(const RtpPacketReceived& packet) { @@ -351,7 +363,7 @@ RtpPacketSinkInterface* RtpDemuxer::ResolveSink( return ResolveSinkByPayloadType(packet.PayloadType(), ssrc); } -RtpPacketSinkInterface* RtpDemuxer::ResolveSinkByMid(const std::string& mid, +RtpPacketSinkInterface* RtpDemuxer::ResolveSinkByMid(absl::string_view mid, uint32_t ssrc) { const auto it = sink_by_mid_.find(mid); if (it != sink_by_mid_.end()) { @@ -362,11 +374,11 @@ RtpPacketSinkInterface* RtpDemuxer::ResolveSinkByMid(const std::string& mid, return nullptr; } -RtpPacketSinkInterface* RtpDemuxer::ResolveSinkByMidRsid( - const std::string& mid, - const std::string& rsid, - uint32_t ssrc) { - const auto it = sink_by_mid_and_rsid_.find(std::make_pair(mid, rsid)); +RtpPacketSinkInterface* RtpDemuxer::ResolveSinkByMidRsid(absl::string_view mid, + absl::string_view rsid, + uint32_t ssrc) { + const auto it = sink_by_mid_and_rsid_.find( + std::make_pair(std::string(mid), std::string(rsid))); if (it != sink_by_mid_and_rsid_.end()) { RtpPacketSinkInterface* sink = it->second; AddSsrcSinkBinding(ssrc, sink); @@ -375,7 +387,7 @@ RtpPacketSinkInterface* RtpDemuxer::ResolveSinkByMidRsid( return nullptr; } -RtpPacketSinkInterface* RtpDemuxer::ResolveSinkByRsid(const std::string& rsid, +RtpPacketSinkInterface* RtpDemuxer::ResolveSinkByRsid(absl::string_view rsid, uint32_t ssrc) { const auto it = sink_by_rsid_.find(rsid); if (it != sink_by_rsid_.end()) { @@ -415,11 +427,11 @@ void RtpDemuxer::AddSsrcSinkBinding(uint32_t ssrc, auto it = result.first; bool inserted = result.second; if (inserted) { - RTC_LOG(LS_INFO) << "Added sink = " << sink - << " binding with SSRC=" << ssrc; + RTC_DLOG(LS_INFO) << "Added sink = " << sink + << " binding with SSRC=" << ssrc; } else if (it->second != sink) { - RTC_LOG(LS_INFO) << "Updated sink = " << sink - << " binding with SSRC=" << ssrc; + RTC_DLOG(LS_INFO) << "Updated sink = " << sink + << " binding with SSRC=" << ssrc; it->second = sink; } } diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_demuxer.h b/TMessagesProj/jni/voip/webrtc/call/rtp_demuxer.h index fb65fce368..53eeb0b6b6 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_demuxer.h +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_demuxer.h @@ -16,6 +16,7 @@ #include #include +#include "absl/strings/string_view.h" #include "rtc_base/containers/flat_map.h" #include "rtc_base/containers/flat_set.h" @@ -26,7 +27,10 @@ class RtpPacketSinkInterface; // This struct describes the criteria that will be used to match packets to a // specific sink. -struct RtpDemuxerCriteria { +class RtpDemuxerCriteria { + public: + explicit RtpDemuxerCriteria(absl::string_view mid, + absl::string_view rsid = absl::string_view()); RtpDemuxerCriteria(); ~RtpDemuxerCriteria(); @@ -34,23 +38,37 @@ struct RtpDemuxerCriteria { bool operator!=(const RtpDemuxerCriteria& other) const; // If not the empty string, will match packets with this MID. - std::string mid; + const std::string& mid() const { return mid_; } + + // Return string representation of demux criteria to facilitate logging + std::string ToString() const; // If not the empty string, will match packets with this as their RTP stream // ID or repaired RTP stream ID. // Note that if both MID and RSID are specified, this will only match packets // that have both specified (either through RTP header extensions, SSRC // latching or RTCP). - std::string rsid; + const std::string& rsid() const { return rsid_; } - // Will match packets with any of these SSRCs. - flat_set ssrcs; + // The criteria will match packets with any of these SSRCs. + const flat_set& ssrcs() const { return ssrcs_; } - // Will match packets with any of these payload types. - flat_set payload_types; + // Writable accessor for directly modifying the list of ssrcs. + flat_set& ssrcs() { return ssrcs_; } - // Return string representation of demux criteria to facilitate logging - std::string ToString() const; + // The criteria will match packets with any of these payload types. + const flat_set& payload_types() const { return payload_types_; } + + // Writable accessor for directly modifying the list of payload types. + flat_set& payload_types() { return payload_types_; } + + private: + // Intentionally private member variables to encourage specifying them via the + // constructor and consider them to be const as much as possible. + const std::string mid_; + const std::string rsid_; + flat_set ssrcs_; + flat_set payload_types_; }; // This class represents the RTP demuxing, for a single RTP session (i.e., one @@ -127,7 +145,7 @@ class RtpDemuxer { // Registers a sink's association to an RSID. Only one sink may be associated // with a given RSID. Null pointer is not allowed. - void AddSink(const std::string& rsid, RtpPacketSinkInterface* sink); + void AddSink(absl::string_view rsid, RtpPacketSinkInterface* sink); // Removes a sink. Return value reports if anything was actually removed. // Null pointer is not allowed. @@ -149,12 +167,12 @@ class RtpDemuxer { RtpPacketSinkInterface* ResolveSink(const RtpPacketReceived& packet); // Used by the ResolveSink algorithm. - RtpPacketSinkInterface* ResolveSinkByMid(const std::string& mid, + RtpPacketSinkInterface* ResolveSinkByMid(absl::string_view mid, uint32_t ssrc); - RtpPacketSinkInterface* ResolveSinkByMidRsid(const std::string& mid, - const std::string& rsid, + RtpPacketSinkInterface* ResolveSinkByMidRsid(absl::string_view mid, + absl::string_view rsid, uint32_t ssrc); - RtpPacketSinkInterface* ResolveSinkByRsid(const std::string& rsid, + RtpPacketSinkInterface* ResolveSinkByRsid(absl::string_view rsid, uint32_t ssrc); RtpPacketSinkInterface* ResolveSinkByPayloadType(uint8_t payload_type, uint32_t ssrc); diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_payload_params.cc b/TMessagesProj/jni/voip/webrtc/call/rtp_payload_params.cc index a712a005f4..48079dc61c 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_payload_params.cc +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_payload_params.cc @@ -30,8 +30,10 @@ #include "rtc_base/time_utils.h" namespace webrtc { - namespace { + +constexpr int kMaxSimulatedSpatialLayers = 3; + void PopulateRtpWithCodecSpecifics(const CodecSpecificInfo& info, absl::optional spatial_index, RTPVideoHeader* rtp) { @@ -131,15 +133,62 @@ void SetVideoTiming(const EncodedImage& image, VideoSendTiming* timing) { timing->network2_timestamp_delta_ms = 0; timing->flags = image.timing_.flags; } + +// Returns structure that aligns with simulated generic info. The templates +// allow to produce valid dependency descriptor for any stream where +// `num_spatial_layers` * `num_temporal_layers` <= 32 (limited by +// https://aomediacodec.github.io/av1-rtp-spec/#a82-syntax, see +// template_fdiffs()). The set of the templates is not tuned for any paricular +// structure thus dependency descriptor would use more bytes on the wire than +// with tuned templates. +FrameDependencyStructure MinimalisticStructure(int num_spatial_layers, + int num_temporal_layers) { + RTC_DCHECK_LE(num_spatial_layers, DependencyDescriptor::kMaxSpatialIds); + RTC_DCHECK_LE(num_temporal_layers, DependencyDescriptor::kMaxTemporalIds); + RTC_DCHECK_LE(num_spatial_layers * num_temporal_layers, 32); + FrameDependencyStructure structure; + structure.num_decode_targets = num_spatial_layers * num_temporal_layers; + structure.num_chains = num_spatial_layers; + structure.templates.reserve(num_spatial_layers * num_temporal_layers); + for (int sid = 0; sid < num_spatial_layers; ++sid) { + for (int tid = 0; tid < num_temporal_layers; ++tid) { + FrameDependencyTemplate a_template; + a_template.spatial_id = sid; + a_template.temporal_id = tid; + for (int s = 0; s < num_spatial_layers; ++s) { + for (int t = 0; t < num_temporal_layers; ++t) { + // Prefer kSwitch indication for frames that is part of the decode + // target because dependency descriptor information generated in this + // class use kSwitch indications more often that kRequired, increasing + // the chance of a good (or complete) template match. + a_template.decode_target_indications.push_back( + sid <= s && tid <= t ? DecodeTargetIndication::kSwitch + : DecodeTargetIndication::kNotPresent); + } + } + a_template.frame_diffs.push_back(tid == 0 ? num_spatial_layers * + num_temporal_layers + : num_spatial_layers); + a_template.chain_diffs.assign(structure.num_chains, 1); + structure.templates.push_back(a_template); + + structure.decode_target_protected_by_chain.push_back(sid); + } + } + return structure; +} } // namespace RtpPayloadParams::RtpPayloadParams(const uint32_t ssrc, const RtpPayloadState* state, - const WebRtcKeyValueConfig& trials) + const FieldTrialsView& trials) : ssrc_(ssrc), generic_picture_id_experiment_( absl::StartsWith(trials.Lookup("WebRTC-GenericPictureId"), - "Enabled")) { + "Enabled")), + simulate_generic_structure_(absl::StartsWith( + trials.Lookup("WebRTC-GenericCodecDependencyDescriptor"), + "Enabled")) { for (auto& spatial_layer : last_shared_frame_id_) spatial_layer.fill(-1); @@ -309,6 +358,70 @@ void RtpPayloadParams::SetGeneric(const CodecSpecificInfo* codec_specific_info, RTC_DCHECK_NOTREACHED() << "Unsupported codec."; } +absl::optional RtpPayloadParams::GenericStructure( + const CodecSpecificInfo* codec_specific_info) { + if (codec_specific_info == nullptr) { + return absl::nullopt; + } + // This helper shouldn't be used when template structure is specified + // explicetly. + RTC_DCHECK(!codec_specific_info->template_structure.has_value()); + switch (codec_specific_info->codecType) { + case VideoCodecType::kVideoCodecGeneric: + if (simulate_generic_structure_) { + return MinimalisticStructure(/*num_spatial_layers=*/1, + /*num_temporal_layer=*/1); + } + return absl::nullopt; + case VideoCodecType::kVideoCodecVP8: + return MinimalisticStructure(/*num_spatial_layers=*/1, + /*num_temporal_layer=*/kMaxTemporalStreams); + case VideoCodecType::kVideoCodecVP9: { + absl::optional structure = + MinimalisticStructure( + /*num_spatial_layers=*/kMaxSimulatedSpatialLayers, + /*num_temporal_layer=*/kMaxTemporalStreams); + const CodecSpecificInfoVP9& vp9 = codec_specific_info->codecSpecific.VP9; + if (vp9.ss_data_available && vp9.spatial_layer_resolution_present) { + RenderResolution first_valid; + RenderResolution last_valid; + for (size_t i = 0; i < vp9.num_spatial_layers; ++i) { + RenderResolution r(vp9.width[i], vp9.height[i]); + if (r.Valid()) { + if (!first_valid.Valid()) { + first_valid = r; + } + last_valid = r; + } + structure->resolutions.push_back(r); + } + if (!last_valid.Valid()) { + // No valid resolution found. Do not send resolutions. + structure->resolutions.clear(); + } else { + structure->resolutions.resize(kMaxSimulatedSpatialLayers, last_valid); + // VP9 encoder wrapper may disable first few spatial layers by + // setting invalid resolution (0,0). `structure->resolutions` + // doesn't support invalid resolution, so reset them to something + // valid. + for (RenderResolution& r : structure->resolutions) { + if (!r.Valid()) { + r = first_valid; + } + } + } + } + return structure; + } + case VideoCodecType::kVideoCodecAV1: + case VideoCodecType::kVideoCodecH264: + case VideoCodecType::kVideoCodecH265: + case VideoCodecType::kVideoCodecMultiplex: + return absl::nullopt; + } + RTC_DCHECK_NOTREACHED() << "Unsupported codec."; +} + void RtpPayloadParams::GenericToGeneric(int64_t shared_frame_id, bool is_keyframe, RTPVideoHeader* rtp_video_header) { @@ -409,6 +522,15 @@ void RtpPayloadParams::Vp8ToGeneric(const CodecSpecificInfoVP8& vp8_info, generic.spatial_index = spatial_index; generic.temporal_index = temporal_index; + // Generate decode target indications. + RTC_DCHECK_LT(temporal_index, kMaxTemporalStreams); + generic.decode_target_indications.resize(kMaxTemporalStreams); + auto it = std::fill_n(generic.decode_target_indications.begin(), + temporal_index, DecodeTargetIndication::kNotPresent); + std::fill(it, generic.decode_target_indications.end(), + DecodeTargetIndication::kSwitch); + + // Frame dependencies. if (vp8_info.useExplicitDependencies) { SetDependenciesVp8New(vp8_info, shared_frame_id, is_keyframe, vp8_header.layerSync, &generic); @@ -417,42 +539,15 @@ void RtpPayloadParams::Vp8ToGeneric(const CodecSpecificInfoVP8& vp8_info, spatial_index, temporal_index, vp8_header.layerSync, &generic); } -} - -FrameDependencyStructure RtpPayloadParams::MinimalisticStructure( - int num_spatial_layers, - int num_temporal_layers) { - RTC_DCHECK_LE(num_spatial_layers * num_temporal_layers, 32); - FrameDependencyStructure structure; - structure.num_decode_targets = num_spatial_layers * num_temporal_layers; - structure.num_chains = num_spatial_layers; - structure.templates.reserve(num_spatial_layers * num_temporal_layers); - for (int sid = 0; sid < num_spatial_layers; ++sid) { - for (int tid = 0; tid < num_temporal_layers; ++tid) { - FrameDependencyTemplate a_template; - a_template.spatial_id = sid; - a_template.temporal_id = tid; - for (int s = 0; s < num_spatial_layers; ++s) { - for (int t = 0; t < num_temporal_layers; ++t) { - // Prefer kSwitch indication for frames that is part of the decode - // target because dependency descriptor information generated in this - // class use kSwitch indications more often that kRequired, increasing - // the chance of a good (or complete) template match. - a_template.decode_target_indications.push_back( - sid <= s && tid <= t ? DecodeTargetIndication::kSwitch - : DecodeTargetIndication::kNotPresent); - } - } - a_template.frame_diffs.push_back(tid == 0 ? num_spatial_layers * - num_temporal_layers - : num_spatial_layers); - a_template.chain_diffs.assign(structure.num_chains, 1); - structure.templates.push_back(a_template); - structure.decode_target_protected_by_chain.push_back(sid); - } + // Calculate chains. + generic.chain_diffs = { + (is_keyframe || chain_last_frame_id_[0] < 0) + ? 0 + : static_cast(shared_frame_id - chain_last_frame_id_[0])}; + if (temporal_index == 0) { + chain_last_frame_id_[0] = shared_frame_id; } - return structure; } void RtpPayloadParams::Vp9ToGeneric(const CodecSpecificInfoVP9& vp9_info, @@ -460,8 +555,15 @@ void RtpPayloadParams::Vp9ToGeneric(const CodecSpecificInfoVP9& vp9_info, RTPVideoHeader& rtp_video_header) { const auto& vp9_header = absl::get(rtp_video_header.video_type_header); - const int num_spatial_layers = vp9_header.num_spatial_layers; + const int num_spatial_layers = kMaxSimulatedSpatialLayers; + const int num_active_spatial_layers = vp9_header.num_spatial_layers; const int num_temporal_layers = kMaxTemporalStreams; + static_assert(num_spatial_layers <= + RtpGenericFrameDescriptor::kMaxSpatialLayers); + static_assert(num_temporal_layers <= + RtpGenericFrameDescriptor::kMaxTemporalLayers); + static_assert(num_spatial_layers <= DependencyDescriptor::kMaxSpatialIds); + static_assert(num_temporal_layers <= DependencyDescriptor::kMaxTemporalIds); int spatial_index = vp9_header.spatial_idx != kNoSpatialIdx ? vp9_header.spatial_idx : 0; @@ -470,7 +572,7 @@ void RtpPayloadParams::Vp9ToGeneric(const CodecSpecificInfoVP9& vp9_info, if (spatial_index >= num_spatial_layers || temporal_index >= num_temporal_layers || - num_spatial_layers > RtpGenericFrameDescriptor::kMaxSpatialLayers) { + num_active_spatial_layers > num_spatial_layers) { // Prefer to generate no generic layering than an inconsistent one. return; } @@ -534,6 +636,9 @@ void RtpPayloadParams::Vp9ToGeneric(const CodecSpecificInfoVP9& vp9_info, last_vp9_frame_id_[vp9_header.picture_id % kPictureDiffLimit][spatial_index] = shared_frame_id; + result.active_decode_targets = + ((uint32_t{1} << num_temporal_layers * num_active_spatial_layers) - 1); + // Calculate chains, asuming chain includes all frames with temporal_id = 0 if (!vp9_header.inter_pic_predicted && !vp9_header.inter_layer_predicted) { // Assume frames without dependencies also reset chains. @@ -541,8 +646,8 @@ void RtpPayloadParams::Vp9ToGeneric(const CodecSpecificInfoVP9& vp9_info, chain_last_frame_id_[sid] = -1; } } - result.chain_diffs.resize(num_spatial_layers); - for (int sid = 0; sid < num_spatial_layers; ++sid) { + result.chain_diffs.resize(num_spatial_layers, 0); + for (int sid = 0; sid < num_active_spatial_layers; ++sid) { if (chain_last_frame_id_[sid] == -1) { result.chain_diffs[sid] = 0; continue; diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_payload_params.h b/TMessagesProj/jni/voip/webrtc/call/rtp_payload_params.h index 48b0ec2c61..5feee11ab0 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_payload_params.h +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_payload_params.h @@ -15,7 +15,7 @@ #include #include "absl/types/optional.h" -#include "api/transport/webrtc_key_value_config.h" +#include "api/field_trials_view.h" #include "api/video_codecs/video_encoder.h" #include "call/rtp_config.h" #include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h" @@ -26,15 +26,13 @@ namespace webrtc { -class RtpRtcp; - // State for setting picture id and tl0 pic idx, for VP8 and VP9 // TODO(nisse): Make these properties not codec specific. class RtpPayloadParams final { public: - RtpPayloadParams(const uint32_t ssrc, + RtpPayloadParams(uint32_t ssrc, const RtpPayloadState* state, - const WebRtcKeyValueConfig& trials); + const FieldTrialsView& trials); RtpPayloadParams(const RtpPayloadParams& other); ~RtpPayloadParams(); @@ -42,16 +40,10 @@ class RtpPayloadParams final { const CodecSpecificInfo* codec_specific_info, int64_t shared_frame_id); - // Returns structure that aligns with simulated generic info. The templates - // allow to produce valid dependency descriptor for any stream where - // `num_spatial_layers` * `num_temporal_layers` <= 32 (limited by - // https://aomediacodec.github.io/av1-rtp-spec/#a82-syntax, see - // template_fdiffs()). The set of the templates is not tuned for any paricular - // structure thus dependency descriptor would use more bytes on the wire than - // with tuned templates. - static FrameDependencyStructure MinimalisticStructure( - int num_spatial_layers, - int num_temporal_layers); + // Returns structure that aligns with simulated generic info generated by + // `GetRtpVideoHeader` for the `codec_specific_info` + absl::optional GenericStructure( + const CodecSpecificInfo* codec_specific_info); uint32_t ssrc() const; @@ -136,6 +128,7 @@ class RtpPayloadParams final { RtpPayloadState state_; const bool generic_picture_id_experiment_; + const bool simulate_generic_structure_; }; } // namespace webrtc #endif // CALL_RTP_PAYLOAD_PARAMS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_stream_receiver_controller.cc b/TMessagesProj/jni/voip/webrtc/call/rtp_stream_receiver_controller.cc index 7150b34bdb..8cefa2dffc 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_stream_receiver_controller.cc +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_stream_receiver_controller.cc @@ -31,9 +31,7 @@ RtpStreamReceiverController::Receiver::Receiver( } RtpStreamReceiverController::Receiver::~Receiver() { - // Don't require return value > 0, since for RTX we currently may - // have multiple Receiver objects with the same sink. - // TODO(nisse): Consider adding a DCHECK when RtxReceiveStream is wired up. + // This may fail, if corresponding AddSink in the constructor failed. controller_->RemoveSink(sink_); } @@ -58,7 +56,7 @@ bool RtpStreamReceiverController::AddSink(uint32_t ssrc, return demuxer_.AddSink(ssrc, sink); } -size_t RtpStreamReceiverController::RemoveSink( +bool RtpStreamReceiverController::RemoveSink( const RtpPacketSinkInterface* sink) { RTC_DCHECK_RUN_ON(&demuxer_sequence_); return demuxer_.RemoveSink(sink); diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_stream_receiver_controller.h b/TMessagesProj/jni/voip/webrtc/call/rtp_stream_receiver_controller.h index 284c9fa12f..46d04f73f8 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_stream_receiver_controller.h +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_stream_receiver_controller.h @@ -22,10 +22,8 @@ class RtpPacketReceived; // This class represents the RTP receive parsing and demuxing, for a // single RTP session. -// TODO(nisse): Add RTCP processing, we should aim to terminate RTCP -// and not leave any RTCP processing to individual receive streams. -// TODO(nisse): Extract per-packet processing, including parsing and -// demuxing, into a separate class. +// TODO(bugs.webrtc.org/7135): Add RTCP processing, we should aim to terminate +// RTCP and not leave any RTCP processing to individual receive streams. class RtpStreamReceiverController : public RtpStreamReceiverControllerInterface { public: @@ -37,11 +35,7 @@ class RtpStreamReceiverController uint32_t ssrc, RtpPacketSinkInterface* sink) override; - // Thread-safe wrappers for the corresponding RtpDemuxer methods. - bool AddSink(uint32_t ssrc, RtpPacketSinkInterface* sink) override; - size_t RemoveSink(const RtpPacketSinkInterface* sink) override; - - // TODO(nisse): Not yet responsible for parsing. + // TODO(bugs.webrtc.org/7135): Not yet responsible for parsing. bool OnRtpPacket(const RtpPacketReceived& packet); private: @@ -58,6 +52,10 @@ class RtpStreamReceiverController RtpPacketSinkInterface* const sink_; }; + // Thread-safe wrappers for the corresponding RtpDemuxer methods. + bool AddSink(uint32_t ssrc, RtpPacketSinkInterface* sink); + bool RemoveSink(const RtpPacketSinkInterface* sink); + // TODO(bugs.webrtc.org/11993): We expect construction and all methods to be // called on the same thread/tq. Currently this is the worker thread // (including OnRtpPacket) but a more natural fit would be the network thread. diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_stream_receiver_controller_interface.h b/TMessagesProj/jni/voip/webrtc/call/rtp_stream_receiver_controller_interface.h index a5e5295c31..793d0bc145 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_stream_receiver_controller_interface.h +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_stream_receiver_controller_interface.h @@ -18,12 +18,11 @@ namespace webrtc { // An RtpStreamReceiver is responsible for the rtp-specific but // media-independent state needed for receiving an RTP stream. -// TODO(nisse): Currently, only owns the association between ssrc and -// the stream's RtpPacketSinkInterface. Ownership of corresponding -// objects from modules/rtp_rtcp/ should move to this class (or -// rather, the corresponding implementation class). We should add -// methods for getting rtp receive stats, and for sending RTCP -// messages related to the receive stream. +// TODO(bugs.webrtc.org/7135): Currently, only owns the association between ssrc +// and the stream's RtpPacketSinkInterface. Ownership of corresponding objects +// from modules/rtp_rtcp/ should move to this class (or rather, the +// corresponding implementation class). We should add methods for getting rtp +// receive stats, and for sending RTCP messages related to the receive stream. class RtpStreamReceiverInterface { public: virtual ~RtpStreamReceiverInterface() {} @@ -37,9 +36,6 @@ class RtpStreamReceiverControllerInterface { virtual std::unique_ptr CreateReceiver( uint32_t ssrc, RtpPacketSinkInterface* sink) = 0; - // For registering additional sinks, needed for FlexFEC. - virtual bool AddSink(uint32_t ssrc, RtpPacketSinkInterface* sink) = 0; - virtual size_t RemoveSink(const RtpPacketSinkInterface* sink) = 0; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_transport_config.h b/TMessagesProj/jni/voip/webrtc/call/rtp_transport_config.h index 9aa9f14c16..f2030b3672 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_transport_config.h +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_transport_config.h @@ -13,12 +13,11 @@ #include +#include "api/field_trials_view.h" #include "api/network_state_predictor.h" #include "api/rtc_event_log/rtc_event_log.h" #include "api/transport/bitrate_settings.h" #include "api/transport/network_control.h" -#include "api/transport/webrtc_key_value_config.h" -#include "modules/utility/include/process_thread.h" #include "rtc_base/task_queue.h" namespace webrtc { @@ -44,7 +43,7 @@ struct RtpTransportConfig { // Key-value mapping of internal configurations to apply, // e.g. field trials. - const WebRtcKeyValueConfig* trials = nullptr; + const FieldTrialsView* trials = nullptr; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send.cc b/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send.cc index c9388e47aa..3ecec98b80 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send.cc +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send.cc @@ -14,7 +14,9 @@ #include #include "absl/strings/match.h" +#include "absl/strings/string_view.h" #include "absl/types/optional.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "api/transport/goog_cc_factory.h" #include "api/transport/network_types.h" #include "api/units/data_rate.h" @@ -59,29 +61,25 @@ TargetRateConstraints ConvertConstraints(const BitrateConstraints& contraints, contraints.start_bitrate_bps, clock); } -bool IsEnabled(const WebRtcKeyValueConfig* trials, absl::string_view key) { - RTC_DCHECK(trials != nullptr); - return absl::StartsWith(trials->Lookup(key), "Enabled"); +bool IsEnabled(const FieldTrialsView& trials, absl::string_view key) { + return absl::StartsWith(trials.Lookup(key), "Enabled"); } -bool IsDisabled(const WebRtcKeyValueConfig* trials, absl::string_view key) { - RTC_DCHECK(trials != nullptr); - return absl::StartsWith(trials->Lookup(key), "Disabled"); +bool IsDisabled(const FieldTrialsView& trials, absl::string_view key) { + return absl::StartsWith(trials.Lookup(key), "Disabled"); } bool IsRelayed(const rtc::NetworkRoute& route) { return route.local.uses_turn() || route.remote.uses_turn(); } - } // namespace RtpTransportControllerSend::PacerSettings::PacerSettings( - const WebRtcKeyValueConfig* trials) - : tq_disabled("Disabled"), - holdback_window("holdback_window", PacingController::kMinSleepTime), - holdback_packets("holdback_packets", -1) { - ParseFieldTrial({&tq_disabled, &holdback_window, &holdback_packets}, - trials->Lookup("WebRTC-TaskQueuePacer")); + const FieldTrialsView& trials) + : holdback_window("holdback_window", TimeDelta::Millis(5)), + holdback_packets("holdback_packets", 3) { + ParseFieldTrial({&holdback_window, &holdback_packets}, + trials.Lookup("WebRTC-TaskQueuePacer")); } RtpTransportControllerSend::RtpTransportControllerSend( @@ -90,32 +88,20 @@ RtpTransportControllerSend::RtpTransportControllerSend( NetworkStatePredictorFactoryInterface* predictor_factory, NetworkControllerFactoryInterface* controller_factory, const BitrateConstraints& bitrate_config, - std::unique_ptr process_thread, TaskQueueFactory* task_queue_factory, - const WebRtcKeyValueConfig* trials) + const FieldTrialsView& trials) : clock_(clock), event_log_(event_log), + task_queue_factory_(task_queue_factory), bitrate_configurator_(bitrate_config), pacer_started_(false), - process_thread_(std::move(process_thread)), pacer_settings_(trials), - process_thread_pacer_(pacer_settings_.use_task_queue_pacer() - ? nullptr - : new PacedSender(clock, - &packet_router_, - event_log, - trials, - process_thread_.get())), - task_queue_pacer_( - pacer_settings_.use_task_queue_pacer() - ? new TaskQueuePacedSender(clock, - &packet_router_, - event_log, - trials, - task_queue_factory, - pacer_settings_.holdback_window.Get(), - pacer_settings_.holdback_packets.Get()) - : nullptr), + pacer_(clock, + &packet_router_, + trials, + task_queue_factory, + pacer_settings_.holdback_window.Get(), + pacer_settings_.holdback_packets.Get()), observer_(nullptr), controller_factory_override_(controller_factory), controller_factory_fallback_( @@ -131,28 +117,32 @@ RtpTransportControllerSend::RtpTransportControllerSend( relay_bandwidth_cap_("relay_cap", DataRate::PlusInfinity()), transport_overhead_bytes_per_packet_(0), network_available_(false), + congestion_window_size_(DataSize::PlusInfinity()), + is_congested_(false), retransmission_rate_limiter_(clock, kRetransmitWindowSizeMs), - task_queue_(task_queue_factory->CreateTaskQueue( - "rtp_send_controller", - TaskQueueFactory::Priority::NORMAL)) { + task_queue_(trials, "rtp_send_controller", task_queue_factory), + field_trials_(trials) { ParseFieldTrial({&relay_bandwidth_cap_}, - trials->Lookup("WebRTC-Bwe-NetworkRouteConstraints")); + trials.Lookup("WebRTC-Bwe-NetworkRouteConstraints")); initial_config_.constraints = ConvertConstraints(bitrate_config, clock_); initial_config_.event_log = event_log; - initial_config_.key_value_config = trials; + initial_config_.key_value_config = &trials; RTC_DCHECK(bitrate_config.start_bitrate_bps > 0); - pacer()->SetPacingRates( - DataRate::BitsPerSec(bitrate_config.start_bitrate_bps), DataRate::Zero()); - - if (absl::StartsWith(trials->Lookup("WebRTC-LazyPacerStart"), "Disabled")) { - EnsureStarted(); - } + pacer_.SetPacingRates(DataRate::BitsPerSec(bitrate_config.start_bitrate_bps), + DataRate::Zero()); } RtpTransportControllerSend::~RtpTransportControllerSend() { + RTC_DCHECK_RUN_ON(&main_thread_); RTC_DCHECK(video_rtp_senders_.empty()); - process_thread_->Stop(); + if (task_queue_.IsCurrent()) { + // If these repeated tasks run on a task queue owned by + // `task_queue_`, they are stopped when the task queue is deleted. + // Otherwise, stop them here. + pacer_queue_update_task_.Stop(); + controller_task_.Stop(); + } } RtpVideoSenderInterface* RtpTransportControllerSend::CreateRtpVideoSender( @@ -174,7 +164,8 @@ RtpVideoSenderInterface* RtpTransportControllerSend::CreateRtpVideoSender( // the parts of RtpTransportControllerSendInterface that are really used. this, event_log, &retransmission_rate_limiter_, std::move(fec_controller), frame_encryption_config.frame_encryptor, - frame_encryption_config.crypto_options, std::move(frame_transformer))); + frame_encryption_config.crypto_options, std::move(frame_transformer), + field_trials_, task_queue_factory_)); return video_rtp_senders_.back().get(); } @@ -202,21 +193,16 @@ void RtpTransportControllerSend::UpdateControlState() { observer_->OnTargetTransferRate(*update); } -RtpPacketPacer* RtpTransportControllerSend::pacer() { - if (pacer_settings_.use_task_queue_pacer()) { - return task_queue_pacer_.get(); - } - return process_thread_pacer_.get(); -} - -const RtpPacketPacer* RtpTransportControllerSend::pacer() const { - if (pacer_settings_.use_task_queue_pacer()) { - return task_queue_pacer_.get(); +void RtpTransportControllerSend::UpdateCongestedState() { + bool congested = transport_feedback_adapter_.GetOutstandingData() >= + congestion_window_size_; + if (congested != is_congested_) { + is_congested_ = congested; + pacer_.SetCongested(congested); } - return process_thread_pacer_.get(); } -rtc::TaskQueue* RtpTransportControllerSend::GetWorkerQueue() { +MaybeWorkerThread* RtpTransportControllerSend::GetWorkerQueue() { return &task_queue_; } @@ -235,10 +221,7 @@ RtpTransportControllerSend::transport_feedback_observer() { } RtpPacketSender* RtpTransportControllerSend::packet_sender() { - if (pacer_settings_.use_task_queue_pacer()) { - return task_queue_pacer_.get(); - } - return process_thread_pacer_.get(); + return &pacer_; } void RtpTransportControllerSend::SetAllocatedSendBitrateLimits( @@ -255,7 +238,7 @@ void RtpTransportControllerSend::SetPacingFactor(float pacing_factor) { UpdateStreamsConfig(); } void RtpTransportControllerSend::SetQueueTimeLimit(int limit_ms) { - pacer()->SetQueueTimeLimit(TimeDelta::Millis(limit_ms)); + pacer_.SetQueueTimeLimit(TimeDelta::Millis(limit_ms)); } StreamFeedbackProvider* RtpTransportControllerSend::GetStreamFeedbackProvider() { @@ -264,7 +247,7 @@ RtpTransportControllerSend::GetStreamFeedbackProvider() { void RtpTransportControllerSend::RegisterTargetTransferRateObserver( TargetTransferRateObserver* observer) { - task_queue_.PostTask([this, observer] { + task_queue_.RunOrPost([this, observer] { RTC_DCHECK_RUN_ON(&task_queue_); RTC_DCHECK(observer_ == nullptr); observer_ = observer; @@ -291,7 +274,7 @@ bool RtpTransportControllerSend::IsRelevantRouteChange( } void RtpTransportControllerSend::OnNetworkRouteChanged( - const std::string& transport_name, + absl::string_view transport_name, const rtc::NetworkRoute& network_route) { // Check if the network route is connected. @@ -305,8 +288,11 @@ void RtpTransportControllerSend::OnNetworkRouteChanged( ApplyOrLiftRelayCap(IsRelayed(network_route)); // Check whether the network route has changed on each transport. - auto result = - network_routes_.insert(std::make_pair(transport_name, network_route)); + auto result = network_routes_.insert( + // Explicit conversion of transport_name to std::string here is necessary + // to support some platforms that cannot yet deal with implicit + // conversion in these types of situations. + std::make_pair(std::string(transport_name), network_route)); auto kv = result.first; bool inserted = result.second; if (inserted || !(kv->second == network_route)) { @@ -321,7 +307,7 @@ void RtpTransportControllerSend::OnNetworkRouteChanged( if (relay_constraint_update.has_value()) { UpdateBitrateConstraints(*relay_constraint_update); } - task_queue_.PostTask([this, network_route] { + task_queue_.RunOrPost([this, network_route] { RTC_DCHECK_RUN_ON(&task_queue_); transport_overhead_bytes_per_packet_ = network_route.packet_overhead; }); @@ -350,7 +336,7 @@ void RtpTransportControllerSend::OnNetworkRouteChanged( NetworkRouteChange msg; msg.at_time = Timestamp::Millis(clock_->TimeInMilliseconds()); msg.constraints = ConvertConstraints(bitrate_config, clock_); - task_queue_.PostTask([this, msg, network_route] { + task_queue_.RunOrPost([this, msg, network_route] { RTC_DCHECK_RUN_ON(&task_queue_); transport_overhead_bytes_per_packet_ = network_route.packet_overhead; if (reset_feedback_on_route_change_) { @@ -361,7 +347,8 @@ void RtpTransportControllerSend::OnNetworkRouteChanged( } else { UpdateInitialConstraints(msg.constraints); } - pacer()->UpdateOutstandingData(DataSize::Zero()); + is_congested_ = false; + pacer_.SetCongested(false); }); } } @@ -372,17 +359,18 @@ void RtpTransportControllerSend::OnNetworkAvailability(bool network_available) { NetworkAvailability msg; msg.at_time = Timestamp::Millis(clock_->TimeInMilliseconds()); msg.network_available = network_available; - task_queue_.PostTask([this, msg]() { + task_queue_.RunOrPost([this, msg]() { RTC_DCHECK_RUN_ON(&task_queue_); if (network_available_ == msg.network_available) return; network_available_ = msg.network_available; if (network_available_) { - pacer()->Resume(); + pacer_.Resume(); } else { - pacer()->Pause(); + pacer_.Pause(); } - pacer()->UpdateOutstandingData(DataSize::Zero()); + is_congested_ = false; + pacer_.SetCongested(false); if (controller_) { control_handler_->SetNetworkAvailability(network_available_); @@ -401,14 +389,14 @@ RtcpBandwidthObserver* RtpTransportControllerSend::GetBandwidthObserver() { return this; } int64_t RtpTransportControllerSend::GetPacerQueuingDelayMs() const { - return pacer()->OldestPacketWaitTime().ms(); + return pacer_.OldestPacketWaitTime().ms(); } absl::optional RtpTransportControllerSend::GetFirstPacketTime() const { - return pacer()->FirstSentPacketTime(); + return pacer_.FirstSentPacketTime(); } void RtpTransportControllerSend::EnablePeriodicAlrProbing(bool enable) { - task_queue_.PostTask([this, enable]() { + task_queue_.RunOrPost([this, enable]() { RTC_DCHECK_RUN_ON(&task_queue_); streams_config_.requests_alr_probing = enable; UpdateStreamsConfig(); @@ -416,26 +404,33 @@ void RtpTransportControllerSend::EnablePeriodicAlrProbing(bool enable) { } void RtpTransportControllerSend::OnSentPacket( const rtc::SentPacket& sent_packet) { - task_queue_.PostTask([this, sent_packet]() { - RTC_DCHECK_RUN_ON(&task_queue_); - absl::optional packet_msg = - transport_feedback_adapter_.ProcessSentPacket(sent_packet); - if (packet_msg) { - // Only update outstanding data in pacer if: - // 1. Packet feadback is used. - // 2. The packet has not yet received an acknowledgement. - // 3. It is not a retransmission of an earlier packet. - pacer()->UpdateOutstandingData( - transport_feedback_adapter_.GetOutstandingData()); - if (controller_) - PostUpdates(controller_->OnSentPacket(*packet_msg)); - } - }); + // Normally called on the network thread ! + + // We can not use SafeTask here if we are using an owned task queue, because + // the safety flag will be destroyed when RtpTransportControllerSend is + // destroyed on the worker thread. But we must use SafeTask if we are using + // the worker thread, since the worker thread outlive + // RtpTransportControllerSend. + task_queue_.TaskQueueForPost()->PostTask( + task_queue_.MaybeSafeTask(safety_.flag(), [this, sent_packet]() { + RTC_DCHECK_RUN_ON(&task_queue_); + absl::optional packet_msg = + transport_feedback_adapter_.ProcessSentPacket(sent_packet); + if (packet_msg) { + // Only update outstanding data if: + // 1. Packet feedback is used. + // 2. The packet has not yet received an acknowledgement. + // 3. It is not a retransmission of an earlier packet. + UpdateCongestedState(); + if (controller_) + PostUpdates(controller_->OnSentPacket(*packet_msg)); + } + })); } void RtpTransportControllerSend::OnReceivedPacket( const ReceivedPacket& packet_msg) { - task_queue_.PostTask([this, packet_msg]() { + task_queue_.RunOrPost([this, packet_msg]() { RTC_DCHECK_RUN_ON(&task_queue_); if (controller_) PostUpdates(controller_->OnReceivedPacket(packet_msg)); @@ -445,7 +440,7 @@ void RtpTransportControllerSend::OnReceivedPacket( void RtpTransportControllerSend::UpdateBitrateConstraints( const BitrateConstraints& updated) { TargetRateConstraints msg = ConvertConstraints(updated, clock_); - task_queue_.PostTask([this, msg]() { + task_queue_.RunOrPost([this, msg]() { RTC_DCHECK_RUN_ON(&task_queue_); if (controller_) { PostUpdates(controller_->OnTargetRateConstraints(msg)); @@ -495,7 +490,7 @@ void RtpTransportControllerSend::OnTransportOverheadChanged( return; } - pacer()->SetTransportOverhead( + pacer_.SetTransportOverhead( DataSize::Bytes(transport_overhead_bytes_per_packet)); // TODO(holmer): Call AudioRtpSenders when they have been moved to @@ -508,21 +503,17 @@ void RtpTransportControllerSend::OnTransportOverheadChanged( void RtpTransportControllerSend::AccountForAudioPacketsInPacedSender( bool account_for_audio) { - pacer()->SetAccountForAudioPackets(account_for_audio); + pacer_.SetAccountForAudioPackets(account_for_audio); } void RtpTransportControllerSend::IncludeOverheadInPacedSender() { - pacer()->SetIncludeOverhead(); + pacer_.SetIncludeOverhead(); } void RtpTransportControllerSend::EnsureStarted() { if (!pacer_started_) { pacer_started_ = true; - if (pacer_settings_.use_task_queue_pacer()) { - task_queue_pacer_->EnsureStarted(); - } else { - process_thread_->Start(); - } + pacer_.EnsureStarted(); } } @@ -530,7 +521,7 @@ void RtpTransportControllerSend::OnReceivedEstimatedBitrate(uint32_t bitrate) { RemoteBitrateReport msg; msg.receive_time = Timestamp::Millis(clock_->TimeInMilliseconds()); msg.bandwidth = DataRate::BitsPerSec(bitrate); - task_queue_.PostTask([this, msg]() { + task_queue_.RunOrPost([this, msg]() { RTC_DCHECK_RUN_ON(&task_queue_); if (controller_) PostUpdates(controller_->OnRemoteBitrateReport(msg)); @@ -541,13 +532,9 @@ void RtpTransportControllerSend::OnReceivedRtcpReceiverReport( const ReportBlockList& report_blocks, int64_t rtt_ms, int64_t now_ms) { - task_queue_.PostTask([this, report_blocks, now_ms]() { + task_queue_.RunOrPost([this, report_blocks, now_ms, rtt_ms]() { RTC_DCHECK_RUN_ON(&task_queue_); OnReceivedRtcpReceiverReportBlocks(report_blocks, now_ms); - }); - - task_queue_.PostTask([this, now_ms, rtt_ms]() { - RTC_DCHECK_RUN_ON(&task_queue_); RoundTripTimeUpdate report; report.receive_time = Timestamp::Millis(now_ms); report.round_trip_time = TimeDelta::Millis(rtt_ms); @@ -559,11 +546,11 @@ void RtpTransportControllerSend::OnReceivedRtcpReceiverReport( void RtpTransportControllerSend::OnAddPacket( const RtpPacketSendInfo& packet_info) { - feedback_demuxer_.AddPacket(packet_info); - Timestamp creation_time = Timestamp::Millis(clock_->TimeInMilliseconds()); - task_queue_.PostTask([this, packet_info, creation_time]() { + + task_queue_.RunOrPost([this, packet_info, creation_time]() { RTC_DCHECK_RUN_ON(&task_queue_); + feedback_demuxer_.AddPacket(packet_info); transport_feedback_adapter_.AddPacket( packet_info, send_side_bwe_with_overhead_ ? transport_overhead_bytes_per_packet_ : 0, @@ -573,10 +560,10 @@ void RtpTransportControllerSend::OnAddPacket( void RtpTransportControllerSend::OnTransportFeedback( const rtcp::TransportFeedback& feedback) { - feedback_demuxer_.OnTransportFeedback(feedback); auto feedback_time = Timestamp::Millis(clock_->TimeInMilliseconds()); - task_queue_.PostTask([this, feedback, feedback_time]() { + task_queue_.RunOrPost([this, feedback, feedback_time]() { RTC_DCHECK_RUN_ON(&task_queue_); + feedback_demuxer_.OnTransportFeedback(feedback); absl::optional feedback_msg = transport_feedback_adapter_.ProcessTransportFeedback(feedback, feedback_time); @@ -584,10 +571,8 @@ void RtpTransportControllerSend::OnTransportFeedback( if (controller_) PostUpdates(controller_->OnTransportPacketsFeedback(*feedback_msg)); - // Only update outstanding data in pacer if any packet is first time - // acked. - pacer()->UpdateOutstandingData( - transport_feedback_adapter_.GetOutstandingData()); + // Only update outstanding data if any packet is first time acked. + UpdateCongestedState(); } }); } @@ -599,7 +584,7 @@ void RtpTransportControllerSend::OnRemoteNetworkEstimate( estimate.link_capacity_lower, estimate.link_capacity_upper)); } estimate.update_time = Timestamp::Millis(clock_->TimeInMilliseconds()); - task_queue_.PostTask([this, estimate] { + task_queue_.RunOrPost([this, estimate] { RTC_DCHECK_RUN_ON(&task_queue_); if (controller_) PostUpdates(controller_->OnNetworkStateEstimate(estimate)); @@ -641,11 +626,13 @@ void RtpTransportControllerSend::UpdateInitialConstraints( } void RtpTransportControllerSend::StartProcessPeriodicTasks() { + RTC_DCHECK_RUN_ON(&task_queue_); if (!pacer_queue_update_task_.Running()) { pacer_queue_update_task_ = RepeatingTaskHandle::DelayedStart( - task_queue_.Get(), kPacerQueueUpdateInterval, [this]() { + task_queue_.TaskQueueForDelayedTasks(), kPacerQueueUpdateInterval, + [this]() { RTC_DCHECK_RUN_ON(&task_queue_); - TimeDelta expected_queue_time = pacer()->ExpectedQueueTime(); + TimeDelta expected_queue_time = pacer_.ExpectedQueueTime(); control_handler_->SetPacerQueue(expected_queue_time); UpdateControlState(); return kPacerQueueUpdateInterval; @@ -654,7 +641,7 @@ void RtpTransportControllerSend::StartProcessPeriodicTasks() { controller_task_.Stop(); if (process_interval_.IsFinite()) { controller_task_ = RepeatingTaskHandle::DelayedStart( - task_queue_.Get(), process_interval_, [this]() { + task_queue_.TaskQueueForDelayedTasks(), process_interval_, [this]() { RTC_DCHECK_RUN_ON(&task_queue_); UpdateControllerWithTimeInterval(); return process_interval_; @@ -667,7 +654,7 @@ void RtpTransportControllerSend::UpdateControllerWithTimeInterval() { ProcessInterval msg; msg.at_time = Timestamp::Millis(clock_->TimeInMilliseconds()); if (add_pacing_to_cwin_) - msg.pacer_queue = pacer()->QueueSizeData(); + msg.pacer_queue = pacer_.QueueSizeData(); PostUpdates(controller_->OnProcessInterval(msg)); } @@ -679,14 +666,15 @@ void RtpTransportControllerSend::UpdateStreamsConfig() { void RtpTransportControllerSend::PostUpdates(NetworkControlUpdate update) { if (update.congestion_window) { - pacer()->SetCongestionWindow(*update.congestion_window); + congestion_window_size_ = *update.congestion_window; + UpdateCongestedState(); } if (update.pacer_config) { - pacer()->SetPacingRates(update.pacer_config->data_rate(), - update.pacer_config->pad_rate()); + pacer_.SetPacingRates(update.pacer_config->data_rate(), + update.pacer_config->pad_rate()); } - for (const auto& probe : update.probe_cluster_configs) { - pacer()->CreateProbeCluster(probe.target_data_rate, probe.id); + if (!update.probe_cluster_configs.empty()) { + pacer_.CreateProbeClusters(std::move(update.probe_cluster_configs)); } if (update.target_rate) { control_handler_->SetTargetRate(*update.target_rate); diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send.h b/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send.h index 62af78ceb8..88f5b2bae4 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send.h +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send.h @@ -17,8 +17,11 @@ #include #include +#include "absl/strings/string_view.h" #include "api/network_state_predictor.h" #include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" +#include "api/task_queue/task_queue_factory.h" #include "api/transport/network_control.h" #include "api/units/data_rate.h" #include "call/rtp_bitrate_configurator.h" @@ -27,12 +30,10 @@ #include "modules/congestion_controller/rtp/control_handler.h" #include "modules/congestion_controller/rtp/transport_feedback_adapter.h" #include "modules/congestion_controller/rtp/transport_feedback_demuxer.h" -#include "modules/pacing/paced_sender.h" #include "modules/pacing/packet_router.h" #include "modules/pacing/rtp_packet_pacer.h" #include "modules/pacing/task_queue_paced_sender.h" -#include "modules/utility/include/process_thread.h" -#include "rtc_base/constructor_magic.h" +#include "modules/utility/maybe_worker_thread.h" #include "rtc_base/network_route.h" #include "rtc_base/race_checker.h" #include "rtc_base/task_queue.h" @@ -43,9 +44,6 @@ class Clock; class FrameEncryptorInterface; class RtcEventLog; -// TODO(nisse): When we get the underlying transports here, we should -// have one object implementing RtpTransportControllerSendInterface -// per transport, sharing the same congestion controller. class RtpTransportControllerSend final : public RtpTransportControllerSendInterface, public RtcpBandwidthObserver, @@ -58,11 +56,14 @@ class RtpTransportControllerSend final NetworkStatePredictorFactoryInterface* predictor_factory, NetworkControllerFactoryInterface* controller_factory, const BitrateConstraints& bitrate_config, - std::unique_ptr process_thread, TaskQueueFactory* task_queue_factory, - const WebRtcKeyValueConfig* trials); + const FieldTrialsView& trials); ~RtpTransportControllerSend() override; + RtpTransportControllerSend(const RtpTransportControllerSend&) = delete; + RtpTransportControllerSend& operator=(const RtpTransportControllerSend&) = + delete; + // TODO(tommi): Change to std::unique_ptr<>. RtpVideoSenderInterface* CreateRtpVideoSender( const std::map& suspended_ssrcs, @@ -80,7 +81,7 @@ class RtpTransportControllerSend final RtpVideoSenderInterface* rtp_video_sender) override; // Implements RtpTransportControllerSendInterface - rtc::TaskQueue* GetWorkerQueue() override; + MaybeWorkerThread* GetWorkerQueue() override; PacketRouter* packet_router() override; NetworkStateEstimateObserver* network_state_estimate_observer() override; @@ -94,7 +95,7 @@ class RtpTransportControllerSend final StreamFeedbackProvider* GetStreamFeedbackProvider() override; void RegisterTargetTransferRateObserver( TargetTransferRateObserver* observer) override; - void OnNetworkRouteChanged(const std::string& transport_name, + void OnNetworkRouteChanged(absl::string_view transport_name, const rtc::NetworkRoute& network_route) override; void OnNetworkAvailability(bool network_available) override; RtcpBandwidthObserver* GetBandwidthObserver() override; @@ -129,11 +130,8 @@ class RtpTransportControllerSend final private: struct PacerSettings { - explicit PacerSettings(const WebRtcKeyValueConfig* trials); - - bool use_task_queue_pacer() const { return !tq_disabled.Get(); } + explicit PacerSettings(const FieldTrialsView& trials); - FieldTrialFlag tq_disabled; // Kill-switch not normally used. FieldTrialParameter holdback_window; FieldTrialParameter holdback_packets; }; @@ -155,11 +153,11 @@ class RtpTransportControllerSend final RTC_RUN_ON(task_queue_); void PostUpdates(NetworkControlUpdate update) RTC_RUN_ON(task_queue_); void UpdateControlState() RTC_RUN_ON(task_queue_); - RtpPacketPacer* pacer(); - const RtpPacketPacer* pacer() const; + void UpdateCongestedState() RTC_RUN_ON(task_queue_); Clock* const clock_; RtcEventLog* const event_log_; + TaskQueueFactory* const task_queue_factory_; SequenceChecker main_thread_; PacketRouter packet_router_; std::vector> video_rtp_senders_ @@ -167,10 +165,8 @@ class RtpTransportControllerSend final RtpBitrateConfigurator bitrate_configurator_; std::map network_routes_; bool pacer_started_; - const std::unique_ptr process_thread_; const PacerSettings pacer_settings_; - std::unique_ptr process_thread_pacer_; - std::unique_ptr task_queue_pacer_; + TaskQueuePacedSender pacer_; TargetTransferRateObserver* observer_ RTC_GUARDED_BY(task_queue_); TransportFeedbackDemuxer feedback_demuxer_; @@ -208,14 +204,16 @@ class RtpTransportControllerSend final RepeatingTaskHandle pacer_queue_update_task_ RTC_GUARDED_BY(task_queue_); RepeatingTaskHandle controller_task_ RTC_GUARDED_BY(task_queue_); + DataSize congestion_window_size_ RTC_GUARDED_BY(task_queue_); + bool is_congested_ RTC_GUARDED_BY(task_queue_); + // Protected by internal locks. RateLimiter retransmission_rate_limiter_; - // TODO(perkj): `task_queue_` is supposed to replace `process_thread_`. - // `task_queue_` is defined last to ensure all pending tasks are cancelled - // and deleted before any other members. - rtc::TaskQueue task_queue_; - RTC_DISALLOW_COPY_AND_ASSIGN(RtpTransportControllerSend); + ScopedTaskSafety safety_; + MaybeWorkerThread task_queue_; + + const FieldTrialsView& field_trials_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send_factory.h b/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send_factory.h index a857ca7e6f..8cdae8cfbe 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send_factory.h +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send_factory.h @@ -23,12 +23,12 @@ class RtpTransportControllerSendFactory public: std::unique_ptr Create( const RtpTransportConfig& config, - Clock* clock, - std::unique_ptr process_thread) override { + Clock* clock) override { + RTC_CHECK(config.trials); return std::make_unique( clock, config.event_log, config.network_state_predictor_factory, config.network_controller_factory, config.bitrate_config, - std::move(process_thread), config.task_queue_factory, config.trials); + config.task_queue_factory, *config.trials); } virtual ~RtpTransportControllerSendFactory() {} diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send_factory_interface.h b/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send_factory_interface.h index a0218532a1..0f4c36c221 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send_factory_interface.h +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send_factory_interface.h @@ -14,7 +14,6 @@ #include "call/rtp_transport_config.h" #include "call/rtp_transport_controller_send_interface.h" -#include "modules/utility/include/process_thread.h" namespace webrtc { // A factory used for dependency injection on the send side of the transport @@ -23,8 +22,7 @@ class RtpTransportControllerSendFactoryInterface { public: virtual std::unique_ptr Create( const RtpTransportConfig& config, - Clock* clock, - std::unique_ptr process_thread) = 0; + Clock* clock) = 0; virtual ~RtpTransportControllerSendFactoryInterface() {} }; diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send_interface.h b/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send_interface.h index f68c4bf3dd..44df5aa736 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send_interface.h +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send_interface.h @@ -18,6 +18,7 @@ #include #include +#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/crypto/crypto_options.h" #include "api/fec_controller.h" @@ -41,6 +42,7 @@ class TaskQueue; namespace webrtc { class FrameEncryptorInterface; +class MaybeWorkerThread; class TargetTransferRateObserver; class Transport; class PacketRouter; @@ -92,7 +94,9 @@ struct RtpSenderFrameEncryptionConfig { class RtpTransportControllerSendInterface { public: virtual ~RtpTransportControllerSendInterface() {} - virtual rtc::TaskQueue* GetWorkerQueue() = 0; + // TODO(webrtc:14502): Remove MaybeWorkerThread when experiment has been + // evaluated. + virtual MaybeWorkerThread* GetWorkerQueue() = 0; virtual PacketRouter* packet_router() = 0; virtual RtpVideoSenderInterface* CreateRtpVideoSender( @@ -127,7 +131,7 @@ class RtpTransportControllerSendInterface { virtual void RegisterTargetTransferRateObserver( TargetTransferRateObserver* observer) = 0; virtual void OnNetworkRouteChanged( - const std::string& transport_name, + absl::string_view transport_name, const rtc::NetworkRoute& network_route) = 0; virtual void OnNetworkAvailability(bool network_available) = 0; virtual RtcpBandwidthObserver* GetBandwidthObserver() = 0; diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender.cc b/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender.cc index b953705459..5d2d1f1288 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender.cc +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender.cc @@ -17,7 +17,9 @@ #include "absl/algorithm/container.h" #include "absl/strings/match.h" +#include "absl/strings/string_view.h" #include "api/array_view.h" +#include "api/task_queue/task_queue_factory.h" #include "api/transport/field_trial_based_config.h" #include "api/video_codecs/video_codec.h" #include "call/rtp_transport_controller_send_interface.h" @@ -25,10 +27,9 @@ #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" #include "modules/rtp_rtcp/source/rtp_sender.h" -#include "modules/utility/include/process_thread.h" +#include "modules/utility/maybe_worker_thread.h" #include "modules/video_coding/include/video_codec_interface.h" #include "rtc_base/checks.h" -#include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/task_queue.h" #include "rtc_base/trace_event.h" @@ -56,9 +57,10 @@ static const size_t kPathMTU = 1500; using webrtc_internal_rtp_video_sender::RtpStreamSender; -bool PayloadTypeSupportsSkippingFecPackets(const std::string& payload_name, - const WebRtcKeyValueConfig& trials) { - const VideoCodecType codecType = PayloadStringToCodecType(payload_name); +bool PayloadTypeSupportsSkippingFecPackets(absl::string_view payload_name, + const FieldTrialsView& trials) { + const VideoCodecType codecType = + PayloadStringToCodecType(std::string(payload_name)); if (codecType == kVideoCodecVP8 || codecType == kVideoCodecVP9) { return true; } @@ -71,7 +73,7 @@ bool PayloadTypeSupportsSkippingFecPackets(const std::string& payload_name, bool ShouldDisableRedAndUlpfec(bool flexfec_enabled, const RtpConfig& rtp_config, - const WebRtcKeyValueConfig& trials) { + const FieldTrialsView& trials) { // Consistency of NACK and RED+ULPFEC parameters is checked in this function. const bool nack_enabled = rtp_config.nack.rtp_history_ms > 0; @@ -127,7 +129,7 @@ std::unique_ptr MaybeCreateFecGenerator( const RtpConfig& rtp, const std::map& suspended_ssrcs, int simulcast_index, - const WebRtcKeyValueConfig& trials) { + const FieldTrialsView& trials) { // If flexfec is configured that takes priority. if (rtp.flexfec.payload_type >= 0) { RTC_DCHECK_GE(rtp.flexfec.payload_type, 0); @@ -198,8 +200,10 @@ std::vector CreateRtpStreamSenders( FrameEncryptorInterface* frame_encryptor, const CryptoOptions& crypto_options, rtc::scoped_refptr frame_transformer, - const WebRtcKeyValueConfig& trials) { + const FieldTrialsView& trials, + TaskQueueFactory* task_queue_factory) { RTC_DCHECK_GT(rtp_config.ssrcs.size(), 0); + RTC_DCHECK(task_queue_factory); RtpRtcpInterface::Configuration configuration; configuration.clock = clock; @@ -237,6 +241,12 @@ std::vector CreateRtpStreamSenders( RTC_DCHECK(rtp_config.rtx.ssrcs.empty() || rtp_config.rtx.ssrcs.size() == rtp_config.ssrcs.size()); + + // Some streams could have been disabled, but the rids are still there. + // This will occur when simulcast has been disabled for a codec (e.g. VP9) + RTC_DCHECK(rtp_config.rids.empty() || + rtp_config.rids.size() >= rtp_config.ssrcs.size()); + for (size_t i = 0; i < rtp_config.ssrcs.size(); ++i) { RTPSenderVideo::Config video_config; configuration.local_media_ssrc = rtp_config.ssrcs[i]; @@ -250,6 +260,8 @@ std::vector CreateRtpStreamSenders( RTC_DCHECK_EQ(configuration.rtx_send_ssrc.has_value(), !rtp_config.rtx.ssrcs.empty()); + configuration.rid = (i < rtp_config.rids.size()) ? rtp_config.rids[i] : ""; + configuration.need_rtp_packet_infos = rtp_config.lntf.enabled; std::unique_ptr rtp_rtcp( @@ -282,7 +294,7 @@ std::vector CreateRtpStreamSenders( video_config.fec_overhead_bytes = fec_generator->MaxPacketOverhead(); } video_config.frame_transformer = frame_transformer; - video_config.send_transport_queue = transport->GetWorkerQueue()->Get(); + video_config.task_queue_factory = task_queue_factory; auto sender_video = std::make_unique(video_config); rtp_streams.emplace_back(std::move(rtp_rtcp), std::move(sender_video), std::move(fec_generator)); @@ -359,17 +371,17 @@ RtpVideoSender::RtpVideoSender( std::unique_ptr fec_controller, FrameEncryptorInterface* frame_encryptor, const CryptoOptions& crypto_options, - rtc::scoped_refptr frame_transformer) - : send_side_bwe_with_overhead_(!absl::StartsWith( + rtc::scoped_refptr frame_transformer, + const FieldTrialsView& field_trials, + TaskQueueFactory* task_queue_factory) + : field_trials_(field_trials), + send_side_bwe_with_overhead_(!absl::StartsWith( field_trials_.Lookup("WebRTC-SendSideBwe-WithOverhead"), "Disabled")), use_frame_rate_for_overhead_(absl::StartsWith( field_trials_.Lookup("WebRTC-Video-UseFrameRateForOverhead"), "Enabled")), has_packet_feedback_(TransportSeqNumExtensionConfigured(rtp_config)), - simulate_generic_structure_(absl::StartsWith( - field_trials_.Lookup("WebRTC-GenericCodecDependencyDescriptor"), - "Enabled")), active_(false), fec_controller_(std::move(fec_controller)), fec_allowed_(true), @@ -386,7 +398,8 @@ RtpVideoSender::RtpVideoSender( frame_encryptor, crypto_options, std::move(frame_transformer), - field_trials_)), + field_trials_, + task_queue_factory)), rtp_config_(rtp_config), codec_type_(GetVideoCodecType(rtp_config)), transport_(transport), @@ -394,6 +407,7 @@ RtpVideoSender::RtpVideoSender( encoder_target_rate_bps_(0), frame_counts_(rtp_config.ssrcs.size()), frame_count_observer_(observers.frame_count_observer) { + transport_checker_.Detach(); RTC_DCHECK_EQ(rtp_config_.ssrcs.size(), rtp_streams_.size()); if (send_side_bwe_with_overhead_ && has_packet_feedback_) transport_->IncludeOverheadInPacedSender(); @@ -421,7 +435,6 @@ RtpVideoSender::RtpVideoSender( } ConfigureSsrcs(suspended_ssrcs); - ConfigureRids(); if (!rtp_config_.mid.empty()) { for (const RtpStreamSender& stream : rtp_streams_) { @@ -445,9 +458,6 @@ RtpVideoSender::RtpVideoSender( fec_controller_->SetProtectionMethod(fec_enabled, NackEnabled()); fec_controller_->SetProtectionCallback(this); - // Signal congestion controller this object is ready for OnPacket* callbacks. - transport_->GetStreamFeedbackProvider()->RegisterStreamFeedbackObserver( - rtp_config_.ssrcs, this); // Construction happens on the worker thread (see Call::CreateVideoSendStream) // but subseqeuent calls to the RTP state will happen on one of two threads: @@ -460,27 +470,44 @@ RtpVideoSender::RtpVideoSender( } RtpVideoSender::~RtpVideoSender() { + // TODO(bugs.webrtc.org/13517): Remove once RtpVideoSender gets deleted on the + // transport task queue. + transport_checker_.Detach(); + SetActiveModulesLocked( std::vector(rtp_streams_.size(), /*active=*/false)); - transport_->GetStreamFeedbackProvider()->DeRegisterStreamFeedbackObserver( - this); + + RTC_DCHECK(!registered_for_feedback_); } void RtpVideoSender::SetActive(bool active) { + RTC_DCHECK_RUN_ON(&transport_checker_); MutexLock lock(&mutex_); if (active_ == active) return; + const std::vector active_modules(rtp_streams_.size(), active); SetActiveModulesLocked(active_modules); + + auto* feedback_provider = transport_->GetStreamFeedbackProvider(); + if (active && !registered_for_feedback_) { + feedback_provider->RegisterStreamFeedbackObserver(rtp_config_.ssrcs, this); + registered_for_feedback_ = true; + } else if (!active && registered_for_feedback_) { + feedback_provider->DeRegisterStreamFeedbackObserver(this); + registered_for_feedback_ = false; + } } void RtpVideoSender::SetActiveModules(const std::vector active_modules) { + RTC_DCHECK_RUN_ON(&transport_checker_); MutexLock lock(&mutex_); return SetActiveModulesLocked(active_modules); } void RtpVideoSender::SetActiveModulesLocked( const std::vector active_modules) { + RTC_DCHECK_RUN_ON(&transport_checker_); RTC_DCHECK_EQ(rtp_streams_.size(), active_modules.size()); active_ = false; for (size_t i = 0; i < active_modules.size(); ++i) { @@ -489,7 +516,7 @@ void RtpVideoSender::SetActiveModulesLocked( } RtpRtcpInterface& rtp_module = *rtp_streams_[i].rtp_rtcp; - const bool was_active = rtp_module.SendingMedia(); + const bool was_active = rtp_module.Sending(); const bool should_be_active = active_modules[i]; // Sends a kRtcpByeCode when going from true to false. @@ -514,6 +541,7 @@ void RtpVideoSender::SetActiveModulesLocked( } bool RtpVideoSender::IsActive() { + RTC_DCHECK_RUN_ON(&transport_checker_); MutexLock lock(&mutex_); return IsActiveLocked(); } @@ -566,32 +594,21 @@ EncodedImageCallback::Result RtpVideoSender::OnEncodedImage( } if (IsFirstFrameOfACodedVideoSequence(encoded_image, codec_specific_info)) { - // If encoder adapter produce FrameDependencyStructure, pass it so that - // dependency descriptor rtp header extension can be used. - // If not supported, disable using dependency descriptor by passing nullptr. + // In order to use the dependency descriptor RTP header extension: + // - Pass along any `FrameDependencyStructure` templates produced by the + // encoder adapter. + // - If none were produced the `RtpPayloadParams::*ToGeneric` for the + // particular codec have simulated a dependency structure, so provide a + // minimal set of templates. + // - Otherwise, don't pass along any templates at all which will disable + // the generation of a dependency descriptor. RTPSenderVideo& sender_video = *rtp_streams_[stream_index].sender_video; if (codec_specific_info && codec_specific_info->template_structure) { sender_video.SetVideoStructure(&*codec_specific_info->template_structure); - } else if (codec_specific_info && - codec_specific_info->codecType == kVideoCodecVP9) { - const CodecSpecificInfoVP9& vp9 = codec_specific_info->codecSpecific.VP9; - - FrameDependencyStructure structure = - RtpPayloadParams::MinimalisticStructure(vp9.num_spatial_layers, - kMaxTemporalStreams); - if (vp9.ss_data_available && vp9.spatial_layer_resolution_present) { - for (size_t i = 0; i < vp9.num_spatial_layers; ++i) { - structure.resolutions.emplace_back(vp9.width[i], vp9.height[i]); - } - } - sender_video.SetVideoStructure(&structure); - } else if (simulate_generic_structure_ && codec_specific_info && - codec_specific_info->codecType == kVideoCodecGeneric) { - FrameDependencyStructure structure = - RtpPayloadParams::MinimalisticStructure( - /*num_spatial_layers=*/1, - /*num_temporal_layers=*/1); - sender_video.SetVideoStructure(&structure); + } else if (absl::optional structure = + params_[stream_index].GenericStructure( + codec_specific_info)) { + sender_video.SetVideoStructure(&*structure); } else { sender_video.SetVideoStructure(nullptr); } @@ -622,6 +639,7 @@ EncodedImageCallback::Result RtpVideoSender::OnEncodedImage( void RtpVideoSender::OnBitrateAllocationUpdated( const VideoBitrateAllocation& bitrate) { + RTC_DCHECK_RUN_ON(&transport_checker_); MutexLock lock(&mutex_); if (IsActiveLocked()) { if (rtp_streams_.size() == 1) { @@ -656,6 +674,14 @@ void RtpVideoSender::OnVideoLayersAllocationUpdated( stream_allocation.rtp_stream_index = i; rtp_streams_[i].sender_video->SetVideoLayersAllocation( std::move(stream_allocation)); + // Only send video frames on the rtp module if the encoder is configured + // to send. This is to prevent stray frames to be sent after an encoder + // has been reconfigured. + rtp_streams_[i].rtp_rtcp->SetSendingMediaStatus( + absl::c_any_of(allocation.active_spatial_layers, + [&i](const VideoLayersAllocation::SpatialLayer layer) { + return layer.rtp_stream_index == static_cast(i); + })); } } } @@ -729,18 +755,6 @@ void RtpVideoSender::ConfigureSsrcs( } } -void RtpVideoSender::ConfigureRids() { - if (rtp_config_.rids.empty()) - return; - - // Some streams could have been disabled, but the rids are still there. - // This will occur when simulcast has been disabled for a codec (e.g. VP9) - RTC_DCHECK(rtp_config_.rids.size() >= rtp_streams_.size()); - for (size_t i = 0; i < rtp_streams_.size(); ++i) { - rtp_streams_[i].rtp_rtcp->SetRid(rtp_config_.rids[i]); - } -} - void RtpVideoSender::OnNetworkAvailability(bool network_available) { for (const RtpStreamSender& stream : rtp_streams_) { stream.rtp_rtcp->SetRTCPStatus(network_available ? rtp_config_.rtcp_mode diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender.h b/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender.h index 7e5de98763..9804bd8630 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender.h +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender.h @@ -21,9 +21,11 @@ #include "api/call/transport.h" #include "api/fec_controller.h" #include "api/fec_controller_override.h" +#include "api/field_trials_view.h" #include "api/rtc_event_log/rtc_event_log.h" #include "api/sequence_checker.h" -#include "api/transport/field_trial_based_config.h" +#include "api/task_queue/task_queue_base.h" +#include "api/task_queue/task_queue_factory.h" #include "api/video_codecs/video_encoder.h" #include "call/rtp_config.h" #include "call/rtp_payload_params.h" @@ -35,7 +37,6 @@ #include "modules/rtp_rtcp/source/rtp_sender_video.h" #include "modules/rtp_rtcp/source/rtp_sequence_number_map.h" #include "modules/rtp_rtcp/source/rtp_video_header.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/rate_limiter.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" @@ -86,15 +87,20 @@ class RtpVideoSender : public RtpVideoSenderInterface, std::unique_ptr fec_controller, FrameEncryptorInterface* frame_encryptor, const CryptoOptions& crypto_options, // move inside RtpTransport - rtc::scoped_refptr frame_transformer); + rtc::scoped_refptr frame_transformer, + const FieldTrialsView& field_trials, + TaskQueueFactory* task_queue_factory); ~RtpVideoSender() override; + RtpVideoSender(const RtpVideoSender&) = delete; + RtpVideoSender& operator=(const RtpVideoSender&) = delete; + // RtpVideoSender will only route packets if being active, all packets will be // dropped otherwise. void SetActive(bool active) RTC_LOCKS_EXCLUDED(mutex_) override; // Sets the sending status of the rtp modules and appropriately sets the // payload router to active if any rtp modules are active. - void SetActiveModules(const std::vector active_modules) + void SetActiveModules(std::vector active_modules) RTC_LOCKS_EXCLUDED(mutex_) override; bool IsActive() RTC_LOCKS_EXCLUDED(mutex_) override; @@ -151,12 +157,11 @@ class RtpVideoSender : public RtpVideoSenderInterface, private: bool IsActiveLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - void SetActiveModulesLocked(const std::vector active_modules) + void SetActiveModulesLocked(std::vector active_modules) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); void UpdateModuleSendingState() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); void ConfigureProtection(); void ConfigureSsrcs(const std::map& suspended_ssrcs); - void ConfigureRids(); bool NackEnabled() const; uint32_t GetPacketizationOverheadRate() const; DataRate CalculateOverheadRate(DataRate data_rate, @@ -164,16 +169,20 @@ class RtpVideoSender : public RtpVideoSenderInterface, DataSize overhead_per_packet, Frequency framerate) const; - const FieldTrialBasedConfig field_trials_; + const FieldTrialsView& field_trials_; const bool send_side_bwe_with_overhead_; const bool use_frame_rate_for_overhead_; const bool has_packet_feedback_; - const bool simulate_generic_structure_; - // TODO(holmer): Remove mutex_ once RtpVideoSender runs on the + // Semantically equivalent to checking for `transport_->GetWorkerQueue()` + // but some tests need to be updated to call from the correct context. + RTC_NO_UNIQUE_ADDRESS SequenceChecker transport_checker_; + + // TODO(bugs.webrtc.org/13517): Remove mutex_ once RtpVideoSender runs on the // transport task queue. mutable Mutex mutex_; bool active_ RTC_GUARDED_BY(mutex_); + bool registered_for_feedback_ RTC_GUARDED_BY(transport_checker_) = false; const std::unique_ptr fec_controller_; bool fec_allowed_ RTC_GUARDED_BY(mutex_); @@ -205,8 +214,6 @@ class RtpVideoSender : public RtpVideoSenderInterface, // This map is set at construction time and never changed, but it's // non-trivial to make it properly const. std::map ssrc_to_rtp_module_; - - RTC_DISALLOW_COPY_AND_ASSIGN(RtpVideoSender); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender_interface.h b/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender_interface.h index a0b4baccb4..acb68e3ae2 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender_interface.h +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender_interface.h @@ -36,7 +36,7 @@ class RtpVideoSenderInterface : public EncodedImageCallback, virtual void SetActive(bool active) = 0; // Sets the sending status of the rtp modules and appropriately sets the // RtpVideoSender to active if any rtp modules are active. - virtual void SetActiveModules(const std::vector active_modules) = 0; + virtual void SetActiveModules(std::vector active_modules) = 0; virtual bool IsActive() = 0; virtual void OnNetworkAvailability(bool network_available) = 0; diff --git a/TMessagesProj/jni/voip/webrtc/call/rtx_receive_stream.cc b/TMessagesProj/jni/voip/webrtc/call/rtx_receive_stream.cc index c0b138b416..6c5fa3f859 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtx_receive_stream.cc +++ b/TMessagesProj/jni/voip/webrtc/call/rtx_receive_stream.cc @@ -32,6 +32,7 @@ RtxReceiveStream::RtxReceiveStream( associated_payload_types_(std::move(associated_payload_types)), media_ssrc_(media_ssrc), rtp_receive_statistics_(rtp_receive_statistics) { + packet_checker_.Detach(); if (associated_payload_types_.empty()) { RTC_LOG(LS_WARNING) << "RtxReceiveStream created with empty payload type mapping."; @@ -40,7 +41,14 @@ RtxReceiveStream::RtxReceiveStream( RtxReceiveStream::~RtxReceiveStream() = default; +void RtxReceiveStream::SetAssociatedPayloadTypes( + std::map associated_payload_types) { + RTC_DCHECK_RUN_ON(&packet_checker_); + associated_payload_types_ = std::move(associated_payload_types); +} + void RtxReceiveStream::OnRtpPacket(const RtpPacketReceived& rtx_packet) { + RTC_DCHECK_RUN_ON(&packet_checker_); if (rtp_receive_statistics_) { rtp_receive_statistics_->OnRtpPacket(rtx_packet); } @@ -52,9 +60,9 @@ void RtxReceiveStream::OnRtpPacket(const RtpPacketReceived& rtx_packet) { auto it = associated_payload_types_.find(rtx_packet.PayloadType()); if (it == associated_payload_types_.end()) { - RTC_LOG(LS_VERBOSE) << "Unknown payload type " - << static_cast(rtx_packet.PayloadType()) - << " on rtx ssrc " << rtx_packet.Ssrc(); + RTC_DLOG(LS_VERBOSE) << "Unknown payload type " + << static_cast(rtx_packet.PayloadType()) + << " on rtx ssrc " << rtx_packet.Ssrc(); return; } RtpPacketReceived media_packet; diff --git a/TMessagesProj/jni/voip/webrtc/call/rtx_receive_stream.h b/TMessagesProj/jni/voip/webrtc/call/rtx_receive_stream.h index a389fc2a57..79b03d306b 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtx_receive_stream.h +++ b/TMessagesProj/jni/voip/webrtc/call/rtx_receive_stream.h @@ -14,7 +14,9 @@ #include #include +#include "api/sequence_checker.h" #include "call/rtp_packet_sink_interface.h" +#include "rtc_base/system/no_unique_address.h" namespace webrtc { @@ -33,13 +35,19 @@ class RtxReceiveStream : public RtpPacketSinkInterface { // RtpStreamReceiverController. ReceiveStatistics* rtp_receive_statistics = nullptr); ~RtxReceiveStream() override; + + // Update payload types post construction. Must be called from the same + // calling context as `OnRtpPacket` is called on. + void SetAssociatedPayloadTypes(std::map associated_payload_types); + // RtpPacketSinkInterface. void OnRtpPacket(const RtpPacketReceived& packet) override; private: + RTC_NO_UNIQUE_ADDRESS SequenceChecker packet_checker_; RtpPacketSinkInterface* const media_sink_; // Map from rtx payload type -> media payload type. - const std::map associated_payload_types_; + std::map associated_payload_types_ RTC_GUARDED_BY(&packet_checker_); // TODO(nisse): Ultimately, the media receive stream shouldn't care about the // ssrc, and we should delete this. const uint32_t media_ssrc_; diff --git a/TMessagesProj/jni/voip/webrtc/call/simulated_network.cc b/TMessagesProj/jni/voip/webrtc/call/simulated_network.cc index fc34fda914..f5d0501313 100644 --- a/TMessagesProj/jni/voip/webrtc/call/simulated_network.cc +++ b/TMessagesProj/jni/voip/webrtc/call/simulated_network.cc @@ -24,62 +24,6 @@ namespace { constexpr TimeDelta kDefaultProcessDelay = TimeDelta::Millis(5); } // namespace -CoDelSimulation::CoDelSimulation() = default; -CoDelSimulation::~CoDelSimulation() = default; - -bool CoDelSimulation::DropDequeuedPacket(Timestamp now, - Timestamp enqueing_time, - DataSize packet_size, - DataSize queue_size) { - constexpr TimeDelta kWindow = TimeDelta::Millis(100); - constexpr TimeDelta kDelayThreshold = TimeDelta::Millis(5); - constexpr TimeDelta kDropCountMemory = TimeDelta::Millis(1600); - constexpr DataSize kMaxPacketSize = DataSize::Bytes(1500); - - // Compensates for process interval in simulation; not part of standard CoDel. - TimeDelta queuing_time = now - enqueing_time - kDefaultProcessDelay; - - if (queue_size < kMaxPacketSize || queuing_time < kDelayThreshold) { - enter_drop_state_at_ = Timestamp::PlusInfinity(); - state_ = kNormal; - return false; - } - switch (state_) { - case kNormal: - enter_drop_state_at_ = now + kWindow; - state_ = kPending; - return false; - - case kPending: - if (now >= enter_drop_state_at_) { - state_ = kDropping; - // Starting the drop counter with the drops made during the most recent - // drop state period. - drop_count_ = drop_count_ - previous_drop_count_; - if (now >= last_drop_at_ + kDropCountMemory) - drop_count_ = 0; - previous_drop_count_ = drop_count_; - last_drop_at_ = now; - ++drop_count_; - return true; - } - return false; - - case kDropping: - TimeDelta drop_delay = kWindow / sqrt(static_cast(drop_count_)); - Timestamp next_drop_at = last_drop_at_ + drop_delay; - if (now >= next_drop_at) { - if (queue_size - packet_size < kMaxPacketSize) - state_ = kPending; - last_drop_at_ = next_drop_at; - ++drop_count_; - return true; - } - return false; - } - RTC_CHECK_NOTREACHED(); -} - SimulatedNetwork::SimulatedNetwork(Config config, uint64_t random_seed) : random_(random_seed), bursting_(false) { SetConfig(config); @@ -195,20 +139,6 @@ void SimulatedNetwork::UpdateCapacityQueue(ConfigState state, capacity_link_.pop(); time_us += time_until_front_exits_us; - if (state.config.codel_active_queue_management) { - while (!capacity_link_.empty() && - codel_controller_.DropDequeuedPacket( - Timestamp::Micros(time_us), - Timestamp::Micros(capacity_link_.front().packet.send_time_us), - DataSize::Bytes(capacity_link_.front().packet.size), - DataSize::Bytes(queue_size_bytes_))) { - PacketInfo dropped = capacity_link_.front(); - capacity_link_.pop(); - queue_size_bytes_ -= dropped.packet.size; - dropped.arrival_time_us = PacketDeliveryInfo::kNotReceived; - delay_link_.emplace_back(dropped); - } - } RTC_DCHECK(time_us >= packet.packet.send_time_us); packet.arrival_time_us = std::max(state.pause_transmission_until_us, time_us); diff --git a/TMessagesProj/jni/voip/webrtc/call/simulated_network.h b/TMessagesProj/jni/voip/webrtc/call/simulated_network.h index b781b4658f..d3092aefba 100644 --- a/TMessagesProj/jni/voip/webrtc/call/simulated_network.h +++ b/TMessagesProj/jni/voip/webrtc/call/simulated_network.h @@ -27,29 +27,6 @@ #include "rtc_base/thread_annotations.h" namespace webrtc { -// Implementation of the CoDel active queue management algorithm. Loosely based -// on CoDel pseudocode from ACMQueue. CoDel keeps queuing delays low by dropping -// packets when delay is high. For each packet ready for dequeue, call -// DropDequeuePacket with the packet parameters to update the CoDel state. -class CoDelSimulation { - public: - CoDelSimulation(); - ~CoDelSimulation(); - - // Returns true if packet should be dropped. - bool DropDequeuedPacket(Timestamp now, - Timestamp enqueing_time, - DataSize packet_size, - DataSize queue_size); - - private: - enum State { kNormal, kPending, kDropping }; - Timestamp enter_drop_state_at_ = Timestamp::PlusInfinity(); - Timestamp last_drop_at_ = Timestamp::MinusInfinity(); - int drop_count_ = 0; - int previous_drop_count_ = 0; - State state_ = State::kNormal; -}; // Class simulating a network link. This is a simple and naive solution just // faking capacity and adding an extra transport delay in addition to the @@ -101,7 +78,6 @@ class SimulatedNetwork : public SimulatedNetworkInterface { // `process_checker_` guards the data structures involved in delay and loss // processes, such as the packet queues. rtc::RaceChecker process_checker_; - CoDelSimulation codel_controller_ RTC_GUARDED_BY(process_checker_); std::queue capacity_link_ RTC_GUARDED_BY(process_checker_); Random random_; diff --git a/TMessagesProj/jni/voip/webrtc/call/syncable.h b/TMessagesProj/jni/voip/webrtc/call/syncable.h index 43b16a0720..6817be9c55 100644 --- a/TMessagesProj/jni/voip/webrtc/call/syncable.h +++ b/TMessagesProj/jni/voip/webrtc/call/syncable.h @@ -8,8 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ -// Syncable is used by RtpStreamsSynchronizer in VideoReceiveStream, and -// implemented by AudioReceiveStream. +// Syncable is used by RtpStreamsSynchronizer in VideoReceiveStreamInterface, +// and implemented by AudioReceiveStreamInterface. #ifndef CALL_SYNCABLE_H_ #define CALL_SYNCABLE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/call/version.cc b/TMessagesProj/jni/voip/webrtc/call/version.cc index f8147c9d16..87326c135c 100644 --- a/TMessagesProj/jni/voip/webrtc/call/version.cc +++ b/TMessagesProj/jni/voip/webrtc/call/version.cc @@ -13,7 +13,7 @@ namespace webrtc { // The timestamp is always in UTC. -const char* const kSourceTimestamp = "WebRTC source stamp 2021-12-09T04:05:04"; +const char* const kSourceTimestamp = "WebRTC source stamp 2022-10-24T04:02:03"; void LoadWebRTCVersionInRegister() { // Using volatile to instruct the compiler to not optimize `p` away even diff --git a/TMessagesProj/jni/voip/webrtc/call/video_receive_stream.cc b/TMessagesProj/jni/voip/webrtc/call/video_receive_stream.cc index d0518b6e0d..8cd4a952d4 100644 --- a/TMessagesProj/jni/voip/webrtc/call/video_receive_stream.cc +++ b/TMessagesProj/jni/voip/webrtc/call/video_receive_stream.cc @@ -14,19 +14,20 @@ namespace webrtc { -VideoReceiveStream::Decoder::Decoder(SdpVideoFormat video_format, - int payload_type) +VideoReceiveStreamInterface::Decoder::Decoder(SdpVideoFormat video_format, + int payload_type) : video_format(std::move(video_format)), payload_type(payload_type) {} -VideoReceiveStream::Decoder::Decoder() : video_format("Unset") {} -VideoReceiveStream::Decoder::Decoder(const Decoder&) = default; -VideoReceiveStream::Decoder::~Decoder() = default; +VideoReceiveStreamInterface::Decoder::Decoder() : video_format("Unset") {} +VideoReceiveStreamInterface::Decoder::Decoder(const Decoder&) = default; +VideoReceiveStreamInterface::Decoder::~Decoder() = default; -bool VideoReceiveStream::Decoder::operator==(const Decoder& other) const { +bool VideoReceiveStreamInterface::Decoder::operator==( + const Decoder& other) const { return payload_type == other.payload_type && video_format == other.video_format; } -std::string VideoReceiveStream::Decoder::ToString() const { +std::string VideoReceiveStreamInterface::Decoder::ToString() const { char buf[1024]; rtc::SimpleStringBuilder ss(buf); ss << "{payload_type: " << payload_type; @@ -45,13 +46,15 @@ std::string VideoReceiveStream::Decoder::ToString() const { return ss.str(); } -VideoReceiveStream::Stats::Stats() = default; -VideoReceiveStream::Stats::~Stats() = default; +VideoReceiveStreamInterface::Stats::Stats() = default; +VideoReceiveStreamInterface::Stats::~Stats() = default; -std::string VideoReceiveStream::Stats::ToString(int64_t time_ms) const { +std::string VideoReceiveStreamInterface::Stats::ToString( + int64_t time_ms) const { char buf[2048]; rtc::SimpleStringBuilder ss(buf); - ss << "VideoReceiveStream stats: " << time_ms << ", {ssrc: " << ssrc << ", "; + ss << "VideoReceiveStreamInterface stats: " << time_ms << ", {ssrc: " << ssrc + << ", "; ss << "total_bps: " << total_bitrate_bps << ", "; ss << "width: " << width << ", "; ss << "height: " << height << ", "; @@ -80,18 +83,19 @@ std::string VideoReceiveStream::Stats::ToString(int64_t time_ms) const { return ss.str(); } -VideoReceiveStream::Config::Config(const Config&) = default; -VideoReceiveStream::Config::Config(Config&&) = default; -VideoReceiveStream::Config::Config(Transport* rtcp_send_transport, - VideoDecoderFactory* decoder_factory) +VideoReceiveStreamInterface::Config::Config(const Config&) = default; +VideoReceiveStreamInterface::Config::Config(Config&&) = default; +VideoReceiveStreamInterface::Config::Config( + Transport* rtcp_send_transport, + VideoDecoderFactory* decoder_factory) : decoder_factory(decoder_factory), rtcp_send_transport(rtcp_send_transport) {} -VideoReceiveStream::Config& VideoReceiveStream::Config::operator=(Config&&) = - default; -VideoReceiveStream::Config::Config::~Config() = default; +VideoReceiveStreamInterface::Config& +VideoReceiveStreamInterface::Config::operator=(Config&&) = default; +VideoReceiveStreamInterface::Config::Config::~Config() = default; -std::string VideoReceiveStream::Config::ToString() const { +std::string VideoReceiveStreamInterface::Config::ToString() const { char buf[4 * 1024]; rtc::SimpleStringBuilder ss(buf); ss << "{decoders: ["; @@ -106,17 +110,16 @@ std::string VideoReceiveStream::Config::ToString() const { ss << ", render_delay_ms: " << render_delay_ms; if (!sync_group.empty()) ss << ", sync_group: " << sync_group; - ss << ", target_delay_ms: " << target_delay_ms; ss << '}'; return ss.str(); } -VideoReceiveStream::Config::Rtp::Rtp() = default; -VideoReceiveStream::Config::Rtp::Rtp(const Rtp&) = default; -VideoReceiveStream::Config::Rtp::~Rtp() = default; +VideoReceiveStreamInterface::Config::Rtp::Rtp() = default; +VideoReceiveStreamInterface::Config::Rtp::Rtp(const Rtp&) = default; +VideoReceiveStreamInterface::Config::Rtp::~Rtp() = default; -std::string VideoReceiveStream::Config::Rtp::ToString() const { +std::string VideoReceiveStreamInterface::Config::Rtp::ToString() const { char buf[2 * 1024]; rtc::SimpleStringBuilder ss(buf); ss << "{remote_ssrc: " << remote_ssrc; diff --git a/TMessagesProj/jni/voip/webrtc/call/video_receive_stream.h b/TMessagesProj/jni/voip/webrtc/call/video_receive_stream.h index d39762834a..2e2742a814 100644 --- a/TMessagesProj/jni/voip/webrtc/call/video_receive_stream.h +++ b/TMessagesProj/jni/voip/webrtc/call/video_receive_stream.h @@ -39,7 +39,7 @@ namespace webrtc { class RtpPacketSinkInterface; class VideoDecoderFactory; -class VideoReceiveStream : public MediaReceiveStream { +class VideoReceiveStreamInterface : public MediaReceiveStreamInterface { public: // Class for handling moving in/out recording state. struct RecordingState { @@ -48,11 +48,11 @@ class VideoReceiveStream : public MediaReceiveStream { std::function callback) : callback(std::move(callback)) {} - // Callback stored from the VideoReceiveStream. The VideoReceiveStream - // client should not interpret the attribute. + // Callback stored from the VideoReceiveStreamInterface. The + // VideoReceiveStreamInterface client should not interpret the attribute. std::function callback; - // Memento of when a keyframe request was last sent. The VideoReceiveStream - // client should not interpret the attribute. + // Memento of when a keyframe request was last sent. The + // VideoReceiveStreamInterface client should not interpret the attribute. absl::optional last_keyframe_request_ms; }; @@ -87,6 +87,7 @@ class VideoReceiveStream : public MediaReceiveStream { // Decoder stats. std::string decoder_implementation_name = "unknown"; + absl::optional power_efficient_decoder; FrameCounts frame_counts; int decode_ms = 0; int max_decode_ms = 0; @@ -105,7 +106,12 @@ class VideoReceiveStream : public MediaReceiveStream { uint32_t frames_dropped = 0; uint32_t frames_decoded = 0; // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-totaldecodetime - uint64_t total_decode_time_ms = 0; + TimeDelta total_decode_time = TimeDelta::Zero(); + // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-totalprocessingdelay + TimeDelta total_processing_delay = TimeDelta::Zero(); + // TODO(bugs.webrtc.org/13986): standardize + TimeDelta total_assembly_time = TimeDelta::Zero(); + uint32_t frames_assembled_from_multiple_packets = 0; // Total inter frame delay in seconds. // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-totalinterframedelay double total_inter_frame_delay = 0; @@ -172,7 +178,7 @@ class VideoReceiveStream : public MediaReceiveStream { VideoDecoderFactory* decoder_factory = nullptr; // Receive-stream specific RTP settings. - struct Rtp : public RtpConfig { + struct Rtp : public ReceiveStreamRtpConfig { Rtp(); Rtp(const Rtp&); ~Rtp(); @@ -191,6 +197,10 @@ class VideoReceiveStream : public MediaReceiveStream { bool receiver_reference_time_report = false; } rtcp_xr; + // How to request keyframes from a remote sender. Applies only if lntf is + // disabled. + KeyFrameReqMethod keyframe_method = KeyFrameReqMethod::kPliRtcp; + // See LntfConfig for description. LntfConfig lntf; @@ -204,7 +214,7 @@ class VideoReceiveStream : public MediaReceiveStream { // Set if the stream is protected using FlexFEC. bool protected_by_flexfec = false; - // Optional callback sink to support additional packet handlsers such as + // Optional callback sink to support additional packet handlers such as // FlexFec. RtpPacketSinkInterface* packet_sink_ = nullptr; @@ -238,10 +248,6 @@ class VideoReceiveStream : public MediaReceiveStream { // to one of the audio streams. std::string sync_group; - // Target delay in milliseconds. A positive value indicates this stream is - // used for streaming instead of a real-time call. - int target_delay_ms = 0; - // An optional custom frame decryptor that allows the entire frame to be // decrypted in whatever way the caller choses. This is not required by // default. @@ -280,8 +286,34 @@ class VideoReceiveStream : public MediaReceiveStream { // Cause eventual generation of a key frame from the sender. virtual void GenerateKeyFrame() = 0; + virtual void SetRtcpMode(RtcpMode mode) = 0; + + // Sets or clears a flexfec RTP sink. This affects `rtp.packet_sink_` and + // `rtp.protected_by_flexfec` parts of the configuration. Must be called on + // the packet delivery thread. + // TODO(bugs.webrtc.org/11993): Packet delivery thread today means `worker + // thread` but will be `network thread`. + virtual void SetFlexFecProtection(RtpPacketSinkInterface* flexfec_sink) = 0; + + // Turns on/off loss notifications. Must be called on the packet delivery + // thread. + virtual void SetLossNotificationEnabled(bool enabled) = 0; + + // Modify `rtp.nack.rtp_history_ms` post construction. Setting this value + // to 0 disables nack. + // Must be called on the packet delivery thread. + virtual void SetNackHistory(TimeDelta history) = 0; + + virtual void SetProtectionPayloadTypes(int red_payload_type, + int ulpfec_payload_type) = 0; + + virtual void SetRtcpXr(Config::Rtp::RtcpXr rtcp_xr) = 0; + + virtual void SetAssociatedPayloadTypes( + std::map associated_payload_types) = 0; + protected: - virtual ~VideoReceiveStream() {} + virtual ~VideoReceiveStreamInterface() {} }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/call/video_send_stream.h b/TMessagesProj/jni/voip/webrtc/call/video_send_stream.h index f899a7371b..4946253ca8 100644 --- a/TMessagesProj/jni/voip/webrtc/call/video_send_stream.h +++ b/TMessagesProj/jni/voip/webrtc/call/video_send_stream.h @@ -29,13 +29,13 @@ #include "api/video/video_sink_interface.h" #include "api/video/video_source_interface.h" #include "api/video/video_stream_encoder_settings.h" -#include "api/video_codecs/video_encoder_config.h" #include "call/rtp_config.h" #include "common_video/frame_counts.h" #include "common_video/include/quality_limitation_reason.h" #include "modules/rtp_rtcp/include/report_block_data.h" #include "modules/rtp_rtcp/include/rtcp_statistics.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "video/config/video_encoder_config.h" namespace webrtc { @@ -77,9 +77,10 @@ class VideoSendStream { // TODO(holmer): Move bitrate_bps out to the webrtc::Call layer. int total_bitrate_bps = 0; int retransmit_bitrate_bps = 0; + // `avg_delay_ms` and `max_delay_ms` are only used in tests. Consider + // deleting. int avg_delay_ms = 0; int max_delay_ms = 0; - uint64_t total_packet_send_delay_ms = 0; StreamDataCounters rtp_stats; RtcpPacketTypeCounter rtcp_packet_type_counts; // A snapshot of the most recent Report Block with additional data of @@ -140,6 +141,7 @@ class VideoSendStream { webrtc::VideoContentType::UNSPECIFIED; uint32_t frames_sent = 0; uint32_t huge_frames_sent = 0; + absl::optional power_efficient_encoder; }; struct Config { @@ -190,6 +192,11 @@ class VideoSendStream { // default. rtc::scoped_refptr frame_encryptor; + // An optional encoder selector provided by the user. + // Overrides VideoEncoderFactory::GetEncoderSelector(). + // Owned by RtpSenderBase. + VideoEncoderFactory::EncoderSelectorInterface* encoder_selector = nullptr; + // Per PeerConnection cryptography options. CryptoOptions crypto_options; @@ -208,8 +215,7 @@ class VideoSendStream { // Note: This starts stream activity if it is inactive and one of the layers // is active. This stops stream activity if it is active and all layers are // inactive. - virtual void UpdateActiveSimulcastLayers( - const std::vector active_layers) = 0; + virtual void UpdateActiveSimulcastLayers(std::vector active_layers) = 0; // Starts stream activity. // When a stream is active, it can receive, process and deliver packets. @@ -247,6 +253,8 @@ class VideoSendStream { virtual Stats GetStats() = 0; + virtual void GenerateKeyFrame() = 0; + protected: virtual ~VideoSendStream() {} }; diff --git a/TMessagesProj/jni/voip/webrtc/common_audio/audio_converter.h b/TMessagesProj/jni/voip/webrtc/common_audio/audio_converter.h index e12e601b24..4afbb6d0fd 100644 --- a/TMessagesProj/jni/voip/webrtc/common_audio/audio_converter.h +++ b/TMessagesProj/jni/voip/webrtc/common_audio/audio_converter.h @@ -15,8 +15,6 @@ #include -#include "rtc_base/constructor_magic.h" - namespace webrtc { // Format conversion (remixing and resampling) for audio. Only simple remixing @@ -35,6 +33,9 @@ class AudioConverter { size_t dst_frames); virtual ~AudioConverter() {} + AudioConverter(const AudioConverter&) = delete; + AudioConverter& operator=(const AudioConverter&) = delete; + // Convert `src`, containing `src_size` samples, to `dst`, having a sample // capacity of `dst_capacity`. Both point to a series of buffers containing // the samples for each channel. The sizes must correspond to the format @@ -64,8 +65,6 @@ class AudioConverter { const size_t src_frames_; const size_t dst_channels_; const size_t dst_frames_; - - RTC_DISALLOW_COPY_AND_ASSIGN(AudioConverter); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/common_audio/resampler/push_resampler.cc b/TMessagesProj/jni/voip/webrtc/common_audio/resampler/push_resampler.cc index d7aa8d7613..810d778993 100644 --- a/TMessagesProj/jni/voip/webrtc/common_audio/resampler/push_resampler.cc +++ b/TMessagesProj/jni/voip/webrtc/common_audio/resampler/push_resampler.cc @@ -20,42 +20,6 @@ #include "rtc_base/checks.h" namespace webrtc { -namespace { -// These checks were factored out into a non-templatized function -// due to problems with clang on Windows in debug builds. -// For some reason having the DCHECKs inline in the template code -// caused the compiler to generate code that threw off the linker. -// TODO(tommi): Re-enable when we've figured out what the problem is. -// http://crbug.com/615050 -void CheckValidInitParams(int src_sample_rate_hz, - int dst_sample_rate_hz, - size_t num_channels) { -// The below checks are temporarily disabled on WEBRTC_WIN due to problems -// with clang debug builds. -#if !defined(WEBRTC_WIN) && defined(__clang__) - RTC_DCHECK_GT(src_sample_rate_hz, 0); - RTC_DCHECK_GT(dst_sample_rate_hz, 0); - RTC_DCHECK_GT(num_channels, 0); -#endif -} - -void CheckExpectedBufferSizes(size_t src_length, - size_t dst_capacity, - size_t num_channels, - int src_sample_rate, - int dst_sample_rate) { -// The below checks are temporarily disabled on WEBRTC_WIN due to problems -// with clang debug builds. -// TODO(tommi): Re-enable when we've figured out what the problem is. -// http://crbug.com/615050 -#if !defined(WEBRTC_WIN) && defined(__clang__) - const size_t src_size_10ms = src_sample_rate * num_channels / 100; - const size_t dst_size_10ms = dst_sample_rate * num_channels / 100; - RTC_DCHECK_EQ(src_length, src_size_10ms); - RTC_DCHECK_GE(dst_capacity, dst_size_10ms); -#endif -} -} // namespace template PushResampler::PushResampler() @@ -68,7 +32,11 @@ template int PushResampler::InitializeIfNeeded(int src_sample_rate_hz, int dst_sample_rate_hz, size_t num_channels) { - CheckValidInitParams(src_sample_rate_hz, dst_sample_rate_hz, num_channels); + // These checks used to be factored out of this template function due to + // Windows debug build issues with clang. http://crbug.com/615050 + RTC_DCHECK_GT(src_sample_rate_hz, 0); + RTC_DCHECK_GT(dst_sample_rate_hz, 0); + RTC_DCHECK_GT(num_channels, 0); if (src_sample_rate_hz == src_sample_rate_hz_ && dst_sample_rate_hz == dst_sample_rate_hz_ && @@ -109,8 +77,12 @@ int PushResampler::Resample(const T* src, size_t src_length, T* dst, size_t dst_capacity) { - CheckExpectedBufferSizes(src_length, dst_capacity, num_channels_, - src_sample_rate_hz_, dst_sample_rate_hz_); + // These checks used to be factored out of this template function due to + // Windows debug build issues with clang. http://crbug.com/615050 + const size_t src_size_10ms = (src_sample_rate_hz_ / 100) * num_channels_; + const size_t dst_size_10ms = (dst_sample_rate_hz_ / 100) * num_channels_; + RTC_DCHECK_EQ(src_length, src_size_10ms); + RTC_DCHECK_GE(dst_capacity, dst_size_10ms); if (src_sample_rate_hz_ == dst_sample_rate_hz_) { // The old resampler provides this memcpy facility in the case of matching diff --git a/TMessagesProj/jni/voip/webrtc/common_audio/resampler/push_sinc_resampler.h b/TMessagesProj/jni/voip/webrtc/common_audio/resampler/push_sinc_resampler.h index 88792d427a..7946ef8f82 100644 --- a/TMessagesProj/jni/voip/webrtc/common_audio/resampler/push_sinc_resampler.h +++ b/TMessagesProj/jni/voip/webrtc/common_audio/resampler/push_sinc_resampler.h @@ -17,7 +17,6 @@ #include #include "common_audio/resampler/sinc_resampler.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -33,6 +32,9 @@ class PushSincResampler : public SincResamplerCallback { PushSincResampler(size_t source_frames, size_t destination_frames); ~PushSincResampler() override; + PushSincResampler(const PushSincResampler&) = delete; + PushSincResampler& operator=(const PushSincResampler&) = delete; + // Perform the resampling. `source_frames` must always equal the // `source_frames` provided at construction. `destination_capacity` must be // at least as large as `destination_frames`. Returns the number of samples @@ -72,8 +74,6 @@ class PushSincResampler : public SincResamplerCallback { // Used to assert we are only requested for as much data as is available. size_t source_available_; - - RTC_DISALLOW_COPY_AND_ASSIGN(PushSincResampler); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/common_audio/resampler/sinc_resampler.h b/TMessagesProj/jni/voip/webrtc/common_audio/resampler/sinc_resampler.h index d071e96f4f..b89bba7ab4 100644 --- a/TMessagesProj/jni/voip/webrtc/common_audio/resampler/sinc_resampler.h +++ b/TMessagesProj/jni/voip/webrtc/common_audio/resampler/sinc_resampler.h @@ -18,7 +18,6 @@ #include -#include "rtc_base/constructor_magic.h" #include "rtc_base/gtest_prod_util.h" #include "rtc_base/memory/aligned_malloc.h" #include "rtc_base/system/arch.h" @@ -64,6 +63,9 @@ class SincResampler { SincResamplerCallback* read_cb); virtual ~SincResampler(); + SincResampler(const SincResampler&) = delete; + SincResampler& operator=(const SincResampler&) = delete; + // Resample `frames` of data from `read_cb_` into `destination`. void Resample(size_t frames, float* destination); @@ -172,8 +174,6 @@ class SincResampler { float* const r2_; float* r3_; float* r4_; - - RTC_DISALLOW_COPY_AND_ASSIGN(SincResampler); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/common_audio/resampler/sinusoidal_linear_chirp_source.h b/TMessagesProj/jni/voip/webrtc/common_audio/resampler/sinusoidal_linear_chirp_source.h index 8534119e5c..ccd11bbd61 100644 --- a/TMessagesProj/jni/voip/webrtc/common_audio/resampler/sinusoidal_linear_chirp_source.h +++ b/TMessagesProj/jni/voip/webrtc/common_audio/resampler/sinusoidal_linear_chirp_source.h @@ -15,7 +15,6 @@ #define COMMON_AUDIO_RESAMPLER_SINUSOIDAL_LINEAR_CHIRP_SOURCE_H_ #include "common_audio/resampler/sinc_resampler.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -33,12 +32,16 @@ class SinusoidalLinearChirpSource : public SincResamplerCallback { ~SinusoidalLinearChirpSource() override {} + SinusoidalLinearChirpSource(const SinusoidalLinearChirpSource&) = delete; + SinusoidalLinearChirpSource& operator=(const SinusoidalLinearChirpSource&) = + delete; + void Run(size_t frames, float* destination) override; double Frequency(size_t position); private: - enum { kMinFrequency = 5 }; + static constexpr int kMinFrequency = 5; int sample_rate_; size_t total_samples_; @@ -46,8 +49,6 @@ class SinusoidalLinearChirpSource : public SincResamplerCallback { double k_; size_t current_index_; double delay_samples_; - - RTC_DISALLOW_COPY_AND_ASSIGN(SinusoidalLinearChirpSource); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/common_audio/signal_processing/include/spl_inl.h b/TMessagesProj/jni/voip/webrtc/common_audio/signal_processing/include/spl_inl.h index 656a3125bb..2b0995886a 100644 --- a/TMessagesProj/jni/voip/webrtc/common_audio/signal_processing/include/spl_inl.h +++ b/TMessagesProj/jni/voip/webrtc/common_audio/signal_processing/include/spl_inl.h @@ -14,6 +14,8 @@ #ifndef COMMON_AUDIO_SIGNAL_PROCESSING_INCLUDE_SPL_INL_H_ #define COMMON_AUDIO_SIGNAL_PROCESSING_INCLUDE_SPL_INL_H_ +#include + #include "rtc_base/compile_assert_c.h" extern const int8_t kWebRtcSpl_CountLeadingZeros32_Table[64]; diff --git a/TMessagesProj/jni/voip/webrtc/common_audio/signal_processing/include/spl_inl_armv7.h b/TMessagesProj/jni/voip/webrtc/common_audio/signal_processing/include/spl_inl_armv7.h index 930e91e2b3..6fc3e7c1b8 100644 --- a/TMessagesProj/jni/voip/webrtc/common_audio/signal_processing/include/spl_inl_armv7.h +++ b/TMessagesProj/jni/voip/webrtc/common_audio/signal_processing/include/spl_inl_armv7.h @@ -15,6 +15,8 @@ #ifndef COMMON_AUDIO_SIGNAL_PROCESSING_INCLUDE_SPL_INL_ARMV7_H_ #define COMMON_AUDIO_SIGNAL_PROCESSING_INCLUDE_SPL_INL_ARMV7_H_ +#include + /* TODO(kma): Replace some assembly code with GCC intrinsics * (e.g. __builtin_clz). */ diff --git a/TMessagesProj/jni/voip/webrtc/common_audio/vad/vad_core.h b/TMessagesProj/jni/voip/webrtc/common_audio/vad/vad_core.h index ee102de745..fbaf970065 100644 --- a/TMessagesProj/jni/voip/webrtc/common_audio/vad/vad_core.h +++ b/TMessagesProj/jni/voip/webrtc/common_audio/vad/vad_core.h @@ -17,10 +17,19 @@ #include "common_audio/signal_processing/include/signal_processing_library.h" -enum { kNumChannels = 6 }; // Number of frequency bands (named channels). -enum { kNumGaussians = 2 }; // Number of Gaussians per channel in the GMM. -enum { kTableSize = kNumChannels * kNumGaussians }; -enum { kMinEnergy = 10 }; // Minimum energy required to trigger audio signal. +// TODO(https://bugs.webrtc.org/14476): When converted to C++, remove the macro. +#if defined(__cplusplus) +#define CONSTEXPR_INT(x) constexpr int x +#else +#define CONSTEXPR_INT(x) enum { x } +#endif + +CONSTEXPR_INT(kNumChannels = 6); // Number of frequency bands (named channels). +CONSTEXPR_INT( + kNumGaussians = 2); // Number of Gaussians per channel in the GMM. +CONSTEXPR_INT(kTableSize = kNumChannels * kNumGaussians); +CONSTEXPR_INT( + kMinEnergy = 10); // Minimum energy required to trigger audio signal. typedef struct VadInstT_ { int vad; diff --git a/TMessagesProj/jni/voip/webrtc/common_audio/vad/webrtc_vad.c b/TMessagesProj/jni/voip/webrtc/common_audio/vad/webrtc_vad.c index 49e7682780..6dd14d8b55 100644 --- a/TMessagesProj/jni/voip/webrtc/common_audio/vad/webrtc_vad.c +++ b/TMessagesProj/jni/voip/webrtc/common_audio/vad/webrtc_vad.c @@ -21,7 +21,7 @@ static const int kValidRates[] = { 8000, 16000, 32000, 48000 }; static const size_t kRatesSize = sizeof(kValidRates) / sizeof(*kValidRates); static const int kMaxFrameLengthMs = 30; -VadInst* WebRtcVad_Create() { +VadInst* WebRtcVad_Create(void) { VadInstT* self = (VadInstT*)malloc(sizeof(VadInstT)); self->init_flag = 0; diff --git a/TMessagesProj/jni/voip/webrtc/common_audio/wav_file.cc b/TMessagesProj/jni/voip/webrtc/common_audio/wav_file.cc index e49126f139..127c9c0757 100644 --- a/TMessagesProj/jni/voip/webrtc/common_audio/wav_file.cc +++ b/TMessagesProj/jni/voip/webrtc/common_audio/wav_file.cc @@ -65,7 +65,7 @@ constexpr size_t kMaxChunksize = 4096; } // namespace -WavReader::WavReader(const std::string& filename) +WavReader::WavReader(absl::string_view filename) : WavReader(FileWrapper::OpenReadOnly(filename)) {} WavReader::WavReader(FileWrapper file) : file_(std::move(file)) { @@ -178,7 +178,7 @@ void WavReader::Close() { file_.Close(); } -WavWriter::WavWriter(const std::string& filename, +WavWriter::WavWriter(absl::string_view filename, int sample_rate, size_t num_channels, SampleFormat sample_format) diff --git a/TMessagesProj/jni/voip/webrtc/common_audio/wav_file.h b/TMessagesProj/jni/voip/webrtc/common_audio/wav_file.h index dda611be6a..72a4db79c2 100644 --- a/TMessagesProj/jni/voip/webrtc/common_audio/wav_file.h +++ b/TMessagesProj/jni/voip/webrtc/common_audio/wav_file.h @@ -39,7 +39,7 @@ class WavFile { class WavWriter final : public WavFile { public: // Opens a new WAV file for writing. - WavWriter(const std::string& filename, + WavWriter(absl::string_view filename, int sample_rate, size_t num_channels, SampleFormat sample_format = SampleFormat::kInt16); @@ -77,7 +77,7 @@ class WavWriter final : public WavFile { class WavReader final : public WavFile { public: // Opens an existing WAV file for reading. - explicit WavReader(const std::string& filename); + explicit WavReader(absl::string_view filename); explicit WavReader(FileWrapper file); // Close the WAV file. diff --git a/TMessagesProj/jni/voip/webrtc/common_audio/wav_header.cc b/TMessagesProj/jni/voip/webrtc/common_audio/wav_header.cc index 65d8be5b89..bca209a665 100644 --- a/TMessagesProj/jni/voip/webrtc/common_audio/wav_header.cc +++ b/TMessagesProj/jni/voip/webrtc/common_audio/wav_header.cc @@ -80,8 +80,6 @@ const uint32_t kFmtIeeeFloatSubchunkSize = // read audio samples. #pragma pack(2) struct WavHeaderPcm { - WavHeaderPcm(const WavHeaderPcm&) = default; - WavHeaderPcm& operator=(const WavHeaderPcm&) = default; RiffHeader riff; FmtPcmSubchunk fmt; struct { @@ -95,8 +93,6 @@ static_assert(sizeof(WavHeaderPcm) == kPcmWavHeaderSize, // WAV implementation. #pragma pack(2) struct WavHeaderIeeeFloat { - WavHeaderIeeeFloat(const WavHeaderIeeeFloat&) = default; - WavHeaderIeeeFloat& operator=(const WavHeaderIeeeFloat&) = default; RiffHeader riff; FmtIeeeFloatSubchunk fmt; struct { diff --git a/TMessagesProj/jni/voip/webrtc/common_video/frame_counts.h b/TMessagesProj/jni/voip/webrtc/common_video/frame_counts.h index 663fda4a2f..505d3129ef 100644 --- a/TMessagesProj/jni/voip/webrtc/common_video/frame_counts.h +++ b/TMessagesProj/jni/voip/webrtc/common_video/frame_counts.h @@ -11,6 +11,8 @@ #ifndef COMMON_VIDEO_FRAME_COUNTS_H_ #define COMMON_VIDEO_FRAME_COUNTS_H_ +#include + namespace webrtc { struct FrameCounts { diff --git a/TMessagesProj/jni/voip/webrtc/common_video/h265/h265_bitstream_parser.cc b/TMessagesProj/jni/voip/webrtc/common_video/h265/h265_bitstream_parser.cc index 452e422f03..9d36504ad1 100644 --- a/TMessagesProj/jni/voip/webrtc/common_video/h265/h265_bitstream_parser.cc +++ b/TMessagesProj/jni/voip/webrtc/common_video/h265/h265_bitstream_parser.cc @@ -16,7 +16,6 @@ #include "common_video/h265/h265_common.h" #include "common_video/h265/legacy_bit_buffer.h" -#include "rtc_base/bit_buffer.h" #include "rtc_base/logging.h" namespace { diff --git a/TMessagesProj/jni/voip/webrtc/common_video/include/video_frame_buffer.h b/TMessagesProj/jni/voip/webrtc/common_video/include/video_frame_buffer.h index 593464abe4..34a9bb5a37 100644 --- a/TMessagesProj/jni/voip/webrtc/common_video/include/video_frame_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/common_video/include/video_frame_buffer.h @@ -12,11 +12,11 @@ #define COMMON_VIDEO_INCLUDE_VIDEO_FRAME_BUFFER_H_ #include + #include #include "api/scoped_refptr.h" #include "api/video/video_frame_buffer.h" -#include "rtc_base/ref_counted_object.h" namespace webrtc { @@ -31,6 +31,17 @@ rtc::scoped_refptr WrapI420Buffer( int v_stride, std::function no_longer_used); +rtc::scoped_refptr WrapI422Buffer( + int width, + int height, + const uint8_t* y_plane, + int y_stride, + const uint8_t* u_plane, + int u_stride, + const uint8_t* v_plane, + int v_stride, + std::function no_longer_used); + rtc::scoped_refptr WrapI444Buffer( int width, int height, @@ -78,6 +89,16 @@ rtc::scoped_refptr WrapI010Buffer( int v_stride, std::function no_longer_used); +rtc::scoped_refptr WrapI210Buffer( + int width, + int height, + const uint16_t* y_plane, + int y_stride, + const uint16_t* u_plane, + int u_stride, + const uint16_t* v_plane, + int v_stride, + std::function no_longer_used); } // namespace webrtc #endif // COMMON_VIDEO_INCLUDE_VIDEO_FRAME_BUFFER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/common_video/include/video_frame_buffer_pool.h b/TMessagesProj/jni/voip/webrtc/common_video/include/video_frame_buffer_pool.h index 539a6cc0f3..fd1bd164ec 100644 --- a/TMessagesProj/jni/voip/webrtc/common_video/include/video_frame_buffer_pool.h +++ b/TMessagesProj/jni/voip/webrtc/common_video/include/video_frame_buffer_pool.h @@ -16,10 +16,13 @@ #include #include "api/scoped_refptr.h" +#include "api/video/i010_buffer.h" +#include "api/video/i210_buffer.h" #include "api/video/i420_buffer.h" +#include "api/video/i422_buffer.h" +#include "api/video/i444_buffer.h" #include "api/video/nv12_buffer.h" #include "rtc_base/race_checker.h" -#include "rtc_base/ref_counted_object.h" namespace webrtc { @@ -43,6 +46,10 @@ class VideoFrameBufferPool { // and there are less than `max_number_of_buffers` pending, a buffer is // created. Returns null otherwise. rtc::scoped_refptr CreateI420Buffer(int width, int height); + rtc::scoped_refptr CreateI422Buffer(int width, int height); + rtc::scoped_refptr CreateI444Buffer(int width, int height); + rtc::scoped_refptr CreateI010Buffer(int width, int height); + rtc::scoped_refptr CreateI210Buffer(int width, int height); rtc::scoped_refptr CreateNV12Buffer(int width, int height); // Changes the max amount of buffers in the pool to the new value. diff --git a/TMessagesProj/jni/voip/webrtc/common_video/libyuv/include/webrtc_libyuv.h b/TMessagesProj/jni/voip/webrtc/common_video/libyuv/include/webrtc_libyuv.h index 905219b6a6..08a035a8d7 100644 --- a/TMessagesProj/jni/voip/webrtc/common_video/libyuv/include/webrtc_libyuv.h +++ b/TMessagesProj/jni/voip/webrtc/common_video/libyuv/include/webrtc_libyuv.h @@ -39,6 +39,7 @@ enum class VideoType { kUYVY, kMJPEG, kBGRA, + kNV12, }; // This is the max PSNR value our algorithms can return. @@ -87,11 +88,24 @@ double I420SSE(const I420BufferInterface& ref_buffer, const I420BufferInterface& test_buffer); // Compute PSNR for an I420 frame (all planes). -// Returns the PSNR in decibel, to a maximum of kInfinitePSNR. +// Returns the PSNR in decibel, to a maximum of kPerfectPSNR. double I420PSNR(const VideoFrame* ref_frame, const VideoFrame* test_frame); double I420PSNR(const I420BufferInterface& ref_buffer, const I420BufferInterface& test_buffer); +// Computes the weighted PSNR-YUV for an I420 buffer. +// +// For the definition and motivation, see +// J. Ohm, G. J. Sullivan, H. Schwarz, T. K. Tan and T. Wiegand, +// "Comparison of the Coding Efficiency of Video Coding Standards—Including +// High Efficiency Video Coding (HEVC)," in IEEE Transactions on Circuits and +// Systems for Video Technology, vol. 22, no. 12, pp. 1669-1684, Dec. 2012 +// doi: 10.1109/TCSVT.2012.2221192. +// +// Returns the PSNR-YUV in decibel, to a maximum of kPerfectPSNR. +double I420WeightedPSNR(const I420BufferInterface& ref_buffer, + const I420BufferInterface& test_buffer); + // Compute SSIM for an I420 frame (all planes). double I420SSIM(const VideoFrame* ref_frame, const VideoFrame* test_frame); double I420SSIM(const I420BufferInterface& ref_buffer, diff --git a/TMessagesProj/jni/voip/webrtc/common_video/libyuv/webrtc_libyuv.cc b/TMessagesProj/jni/voip/webrtc/common_video/libyuv/webrtc_libyuv.cc index 2e10a60776..14e2d22612 100644 --- a/TMessagesProj/jni/voip/webrtc/common_video/libyuv/webrtc_libyuv.cc +++ b/TMessagesProj/jni/voip/webrtc/common_video/libyuv/webrtc_libyuv.cc @@ -26,7 +26,8 @@ size_t CalcBufferSize(VideoType type, int width, int height) { switch (type) { case VideoType::kI420: case VideoType::kIYUV: - case VideoType::kYV12: { + case VideoType::kYV12: + case VideoType::kNV12: { int half_width = (width + 1) >> 1; int half_height = (height + 1) >> 1; buffer_size = width * height + half_width * half_height * 2; @@ -105,6 +106,8 @@ int ConvertVideoType(VideoType video_type) { return libyuv::FOURCC_ARGB; case VideoType::kBGRA: return libyuv::FOURCC_BGRA; + case VideoType::kNV12: + return libyuv::FOURCC_NV12; } RTC_DCHECK_NOTREACHED(); return libyuv::FOURCC_ANY; @@ -255,6 +258,45 @@ double I420PSNR(const VideoFrame* ref_frame, const VideoFrame* test_frame) { *test_frame->video_frame_buffer()->ToI420()); } +double I420WeightedPSNR(const I420BufferInterface& ref_buffer, + const I420BufferInterface& test_buffer) { + RTC_DCHECK_GE(ref_buffer.width(), test_buffer.width()); + RTC_DCHECK_GE(ref_buffer.height(), test_buffer.height()); + if ((ref_buffer.width() != test_buffer.width()) || + (ref_buffer.height() != test_buffer.height())) { + rtc::scoped_refptr scaled_ref_buffer = + I420Buffer::Create(test_buffer.width(), test_buffer.height()); + scaled_ref_buffer->ScaleFrom(ref_buffer); + return I420WeightedPSNR(*scaled_ref_buffer, test_buffer); + } + + // Luma. + int width_y = test_buffer.width(); + int height_y = test_buffer.height(); + uint64_t sse_y = libyuv::ComputeSumSquareErrorPlane( + ref_buffer.DataY(), ref_buffer.StrideY(), test_buffer.DataY(), + test_buffer.StrideY(), width_y, height_y); + uint64_t num_samples_y = (uint64_t)width_y * (uint64_t)height_y; + double psnr_y = libyuv::SumSquareErrorToPsnr(sse_y, num_samples_y); + + // Chroma. + int width_uv = (width_y + 1) >> 1; + int height_uv = (height_y + 1) >> 1; + uint64_t sse_u = libyuv::ComputeSumSquareErrorPlane( + ref_buffer.DataU(), ref_buffer.StrideU(), test_buffer.DataU(), + test_buffer.StrideU(), width_uv, height_uv); + uint64_t num_samples_uv = (uint64_t)width_uv * (uint64_t)height_uv; + double psnr_u = libyuv::SumSquareErrorToPsnr(sse_u, num_samples_uv); + uint64_t sse_v = libyuv::ComputeSumSquareErrorPlane( + ref_buffer.DataV(), ref_buffer.StrideV(), test_buffer.DataV(), + test_buffer.StrideV(), width_uv, height_uv); + double psnr_v = libyuv::SumSquareErrorToPsnr(sse_v, num_samples_uv); + + // Weights from Ohm et. al 2012. + double psnr_yuv = (6.0 * psnr_y + psnr_u + psnr_v) / 8.0; + return (psnr_yuv > kPerfectPSNR) ? kPerfectPSNR : psnr_yuv; +} + // Compute SSIM for an I420A frame (all planes). Can upscale test frame. double I420ASSIM(const I420ABufferInterface& ref_buffer, const I420ABufferInterface& test_buffer) { diff --git a/TMessagesProj/jni/voip/webrtc/common_video/video_frame_buffer.cc b/TMessagesProj/jni/voip/webrtc/common_video/video_frame_buffer.cc index 78a126419a..d57330c652 100644 --- a/TMessagesProj/jni/voip/webrtc/common_video/video_frame_buffer.cc +++ b/TMessagesProj/jni/voip/webrtc/common_video/video_frame_buffer.cc @@ -9,9 +9,9 @@ */ #include "common_video/include/video_frame_buffer.h" +#include "api/make_ref_counted.h" #include "api/video/i420_buffer.h" #include "rtc_base/checks.h" -#include "rtc_base/ref_counted_object.h" #include "third_party/libyuv/include/libyuv/convert.h" namespace webrtc { @@ -124,6 +124,22 @@ rtc::scoped_refptr I444BufferBase::ToI420() { return i420_buffer; } +class I422BufferBase : public I422BufferInterface { + public: + rtc::scoped_refptr ToI420() final; +}; + +rtc::scoped_refptr I422BufferBase::ToI420() { + rtc::scoped_refptr i420_buffer = + I420Buffer::Create(width(), height()); + libyuv::I422ToI420(DataY(), StrideY(), DataU(), StrideU(), DataV(), StrideV(), + i420_buffer->MutableDataY(), i420_buffer->StrideY(), + i420_buffer->MutableDataU(), i420_buffer->StrideU(), + i420_buffer->MutableDataV(), i420_buffer->StrideV(), + width(), height()); + return i420_buffer; +} + // Template to implement a wrapped buffer for a PlanarYuv16BBuffer. template class WrappedYuv16BBuffer : public Base { @@ -195,6 +211,22 @@ rtc::scoped_refptr I010BufferBase::ToI420() { return i420_buffer; } +class I210BufferBase : public I210BufferInterface { + public: + rtc::scoped_refptr ToI420() final; +}; + +rtc::scoped_refptr I210BufferBase::ToI420() { + rtc::scoped_refptr i420_buffer = + I420Buffer::Create(width(), height()); + libyuv::I210ToI420(DataY(), StrideY(), DataU(), StrideU(), DataV(), StrideV(), + i420_buffer->MutableDataY(), i420_buffer->StrideY(), + i420_buffer->MutableDataU(), i420_buffer->StrideU(), + i420_buffer->MutableDataV(), i420_buffer->StrideV(), + width(), height()); + return i420_buffer; +} + } // namespace rtc::scoped_refptr WrapI420Buffer( @@ -231,6 +263,22 @@ rtc::scoped_refptr WrapI420ABuffer( v_stride, a_plane, a_stride, no_longer_used)); } +rtc::scoped_refptr WrapI422Buffer( + int width, + int height, + const uint8_t* y_plane, + int y_stride, + const uint8_t* u_plane, + int u_stride, + const uint8_t* v_plane, + int v_stride, + std::function no_longer_used) { + return rtc::scoped_refptr( + rtc::make_ref_counted>( + width, height, y_plane, y_stride, u_plane, u_stride, v_plane, + v_stride, no_longer_used)); +} + rtc::scoped_refptr WrapI444Buffer( int width, int height, @@ -262,6 +310,9 @@ rtc::scoped_refptr WrapYuvBuffer( case VideoFrameBuffer::Type::kI420: return WrapI420Buffer(width, height, y_plane, y_stride, u_plane, u_stride, v_plane, v_stride, no_longer_used); + case VideoFrameBuffer::Type::kI422: + return WrapI422Buffer(width, height, y_plane, y_stride, u_plane, u_stride, + v_plane, v_stride, no_longer_used); case VideoFrameBuffer::Type::kI444: return WrapI444Buffer(width, height, y_plane, y_stride, u_plane, u_stride, v_plane, v_stride, no_longer_used); @@ -286,4 +337,20 @@ rtc::scoped_refptr WrapI010Buffer( v_stride, no_longer_used)); } +rtc::scoped_refptr WrapI210Buffer( + int width, + int height, + const uint16_t* y_plane, + int y_stride, + const uint16_t* u_plane, + int u_stride, + const uint16_t* v_plane, + int v_stride, + std::function no_longer_used) { + return rtc::scoped_refptr( + rtc::make_ref_counted>( + width, height, y_plane, y_stride, u_plane, u_stride, v_plane, + v_stride, no_longer_used)); +} + } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/common_video/video_frame_buffer_pool.cc b/TMessagesProj/jni/voip/webrtc/common_video/video_frame_buffer_pool.cc index 9c88f0b0df..7f695814f9 100644 --- a/TMessagesProj/jni/voip/webrtc/common_video/video_frame_buffer_pool.cc +++ b/TMessagesProj/jni/voip/webrtc/common_video/video_frame_buffer_pool.cc @@ -12,6 +12,7 @@ #include +#include "api/make_ref_counted.h" #include "rtc_base/checks.h" namespace webrtc { @@ -20,12 +21,29 @@ namespace { bool HasOneRef(const rtc::scoped_refptr& buffer) { // Cast to rtc::RefCountedObject is safe because this function is only called // on locally created VideoFrameBuffers, which are either - // `rtc::RefCountedObject` or `rtc::RefCountedObject`. + // `rtc::RefCountedObject`, `rtc::RefCountedObject` or + // `rtc::RefCountedObject`. switch (buffer->type()) { case VideoFrameBuffer::Type::kI420: { return static_cast*>(buffer.get()) ->HasOneRef(); } + case VideoFrameBuffer::Type::kI444: { + return static_cast*>(buffer.get()) + ->HasOneRef(); + } + case VideoFrameBuffer::Type::kI422: { + return static_cast*>(buffer.get()) + ->HasOneRef(); + } + case VideoFrameBuffer::Type::kI010: { + return static_cast*>(buffer.get()) + ->HasOneRef(); + } + case VideoFrameBuffer::Type::kI210: { + return static_cast*>(buffer.get()) + ->HasOneRef(); + } case VideoFrameBuffer::Type::kNV12: { return static_cast*>(buffer.get()) ->HasOneRef(); @@ -116,6 +134,68 @@ rtc::scoped_refptr VideoFrameBufferPool::CreateI420Buffer( return buffer; } +rtc::scoped_refptr VideoFrameBufferPool::CreateI444Buffer( + int width, + int height) { + RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); + + rtc::scoped_refptr existing_buffer = + GetExistingBuffer(width, height, VideoFrameBuffer::Type::kI444); + if (existing_buffer) { + // Cast is safe because the only way kI444 buffer is created is + // in the same function below, where |RefCountedObject| + // is created. + rtc::RefCountedObject* raw_buffer = + static_cast*>(existing_buffer.get()); + // Creates a new scoped_refptr, which is also pointing to the same + // RefCountedObject as buffer, increasing ref count. + return rtc::scoped_refptr(raw_buffer); + } + + if (buffers_.size() >= max_number_of_buffers_) + return nullptr; + // Allocate new buffer. + rtc::scoped_refptr buffer = + rtc::make_ref_counted(width, height); + + if (zero_initialize_) + buffer->InitializeData(); + + buffers_.push_back(buffer); + return buffer; +} + +rtc::scoped_refptr VideoFrameBufferPool::CreateI422Buffer( + int width, + int height) { + RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); + + rtc::scoped_refptr existing_buffer = + GetExistingBuffer(width, height, VideoFrameBuffer::Type::kI422); + if (existing_buffer) { + // Cast is safe because the only way kI422 buffer is created is + // in the same function below, where |RefCountedObject| + // is created. + rtc::RefCountedObject* raw_buffer = + static_cast*>(existing_buffer.get()); + // Creates a new scoped_refptr, which is also pointing to the same + // RefCountedObject as buffer, increasing ref count. + return rtc::scoped_refptr(raw_buffer); + } + + if (buffers_.size() >= max_number_of_buffers_) + return nullptr; + // Allocate new buffer. + rtc::scoped_refptr buffer = + rtc::make_ref_counted(width, height); + + if (zero_initialize_) + buffer->InitializeData(); + + buffers_.push_back(buffer); + return buffer; +} + rtc::scoped_refptr VideoFrameBufferPool::CreateNV12Buffer( int width, int height) { @@ -147,6 +227,60 @@ rtc::scoped_refptr VideoFrameBufferPool::CreateNV12Buffer( return buffer; } +rtc::scoped_refptr VideoFrameBufferPool::CreateI010Buffer( + int width, + int height) { + RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); + + rtc::scoped_refptr existing_buffer = + GetExistingBuffer(width, height, VideoFrameBuffer::Type::kI010); + if (existing_buffer) { + // Cast is safe because the only way kI010 buffer is created is + // in the same function below, where |RefCountedObject| + // is created. + rtc::RefCountedObject* raw_buffer = + static_cast*>(existing_buffer.get()); + // Creates a new scoped_refptr, which is also pointing to the same + // RefCountedObject as buffer, increasing ref count. + return rtc::scoped_refptr(raw_buffer); + } + + if (buffers_.size() >= max_number_of_buffers_) + return nullptr; + // Allocate new buffer. + rtc::scoped_refptr buffer = I010Buffer::Create(width, height); + + buffers_.push_back(buffer); + return buffer; +} + +rtc::scoped_refptr VideoFrameBufferPool::CreateI210Buffer( + int width, + int height) { + RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); + + rtc::scoped_refptr existing_buffer = + GetExistingBuffer(width, height, VideoFrameBuffer::Type::kI210); + if (existing_buffer) { + // Cast is safe because the only way kI210 buffer is created is + // in the same function below, where |RefCountedObject| + // is created. + rtc::RefCountedObject* raw_buffer = + static_cast*>(existing_buffer.get()); + // Creates a new scoped_refptr, which is also pointing to the same + // RefCountedObject as buffer, increasing ref count. + return rtc::scoped_refptr(raw_buffer); + } + + if (buffers_.size() >= max_number_of_buffers_) + return nullptr; + // Allocate new buffer. + rtc::scoped_refptr buffer = I210Buffer::Create(width, height); + + buffers_.push_back(buffer); + return buffer; +} + rtc::scoped_refptr VideoFrameBufferPool::GetExistingBuffer( int width, int height, diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/bit_writer.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/bit_writer.h index 85340c380d..421e7c4370 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/bit_writer.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/bit_writer.h @@ -20,7 +20,6 @@ #include "absl/strings/string_view.h" #include "rtc_base/bit_buffer.h" #include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -36,6 +35,9 @@ class BitWriter final { RTC_DCHECK_GT(byte_count, 0); } + BitWriter(const BitWriter&) = delete; + BitWriter& operator=(const BitWriter&) = delete; + void WriteBits(uint64_t val, size_t bit_count); void WriteBits(absl::string_view input); @@ -52,8 +54,6 @@ class BitWriter final { // to go anywhere near the limit, though, so this is good enough. size_t written_bits_; bool valid_; - - RTC_DISALLOW_COPY_AND_ASSIGN(BitWriter); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/delta_encoding.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/delta_encoding.cc index a96d3a7dc2..c80424574c 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/delta_encoding.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/delta_encoding.cc @@ -16,12 +16,12 @@ #include #include "absl/memory/memory.h" +#include "absl/strings/string_view.h" #include "logging/rtc_event_log/encoder/bit_writer.h" #include "logging/rtc_event_log/encoder/var_int.h" #include "rtc_base/bit_buffer.h" #include "rtc_base/bitstream_reader.h" #include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" @@ -187,6 +187,9 @@ class FixedLengthDeltaEncoder final { absl::optional base, const std::vector>& values); + FixedLengthDeltaEncoder(const FixedLengthDeltaEncoder&) = delete; + FixedLengthDeltaEncoder& operator=(const FixedLengthDeltaEncoder&) = delete; + private: // Calculate min/max values of unsigned/signed deltas, given the bit width // of all the values in the series. @@ -249,8 +252,6 @@ class FixedLengthDeltaEncoder final { // ctor has finished running when this is constructed, so that the lower // bound on the buffer size would be guaranteed correct. std::unique_ptr writer_; - - RTC_DISALLOW_COPY_AND_ASSIGN(FixedLengthDeltaEncoder); }; // TODO(eladalon): Reduce the number of passes. @@ -554,7 +555,7 @@ class FixedLengthDeltaDecoder final { // bitstream. Note that this does NOT imply that stream is valid, and will // be decoded successfully. It DOES imply that all other decoder classes // will fail to decode this input, though. - static bool IsSuitableDecoderFor(const std::string& input); + static bool IsSuitableDecoderFor(absl::string_view input); // Assuming that `input` is the result of fixed-size delta-encoding // that took place with the same value to `base` and over `num_of_deltas` @@ -562,10 +563,13 @@ class FixedLengthDeltaDecoder final { // If an error occurs (can happen if `input` is corrupt), an empty // vector will be returned. static std::vector> DecodeDeltas( - const std::string& input, + absl::string_view input, absl::optional base, size_t num_of_deltas); + FixedLengthDeltaDecoder(const FixedLengthDeltaDecoder&) = delete; + FixedLengthDeltaDecoder& operator=(const FixedLengthDeltaDecoder&) = delete; + private: // Reads the encoding header in `input` and returns a FixedLengthDeltaDecoder // with the corresponding configuration, that can be used to decode the @@ -576,7 +580,7 @@ class FixedLengthDeltaDecoder final { // the entire stream is free of error. Rather, only the encoding header is // examined and guaranteed. static std::unique_ptr Create( - const std::string& input, + absl::string_view input, absl::optional base, size_t num_of_deltas); @@ -619,11 +623,9 @@ class FixedLengthDeltaDecoder final { // The number of values to be known to be decoded. const size_t num_of_deltas_; - - RTC_DISALLOW_COPY_AND_ASSIGN(FixedLengthDeltaDecoder); }; -bool FixedLengthDeltaDecoder::IsSuitableDecoderFor(const std::string& input) { +bool FixedLengthDeltaDecoder::IsSuitableDecoderFor(absl::string_view input) { BitstreamReader reader(input); uint64_t encoding_type_bits = reader.ReadBits(kBitsInHeaderForEncodingType); if (!reader.Ok()) { @@ -638,7 +640,7 @@ bool FixedLengthDeltaDecoder::IsSuitableDecoderFor(const std::string& input) { } std::vector> FixedLengthDeltaDecoder::DecodeDeltas( - const std::string& input, + absl::string_view input, absl::optional base, size_t num_of_deltas) { auto decoder = FixedLengthDeltaDecoder::Create(input, base, num_of_deltas); @@ -650,7 +652,7 @@ std::vector> FixedLengthDeltaDecoder::DecodeDeltas( } std::unique_ptr FixedLengthDeltaDecoder::Create( - const std::string& input, + absl::string_view input, absl::optional base, size_t num_of_deltas) { BitstreamReader reader(input); @@ -803,7 +805,7 @@ std::string EncodeDeltas(absl::optional base, } std::vector> DecodeDeltas( - const std::string& input, + absl::string_view input, absl::optional base, size_t num_of_deltas) { RTC_DCHECK_GT(num_of_deltas, 0); // Allows empty vector to indicate error. diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/delta_encoding.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/delta_encoding.h index 614012acd3..779cdc6b2f 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/delta_encoding.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/delta_encoding.h @@ -17,6 +17,7 @@ #include #include +#include "absl/strings/string_view.h" #include "absl/types/optional.h" namespace webrtc { @@ -39,7 +40,7 @@ std::string EncodeDeltas(absl::optional base, // vector, which signals an error. // TODO(eladalon): Split into optional and non-optional variants (efficiency). std::vector> DecodeDeltas( - const std::string& input, + absl::string_view input, absl::optional base, size_t num_of_deltas); diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.cc index ff72163a81..9bc770849c 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.cc @@ -32,6 +32,7 @@ #include "logging/rtc_event_log/events/rtc_event_probe_cluster_created.h" #include "logging/rtc_event_log/events/rtc_event_probe_result_failure.h" #include "logging/rtc_event_log/events/rtc_event_probe_result_success.h" +#include "logging/rtc_event_log/events/rtc_event_remote_estimate.h" #include "logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.h" #include "logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.h" #include "logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.h" @@ -44,7 +45,6 @@ #include "modules/rtp_rtcp/source/rtcp_packet/app.h" #include "modules/rtp_rtcp/source/rtcp_packet/bye.h" #include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" -#include "modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report.h" #include "modules/rtp_rtcp/source/rtcp_packet/extended_reports.h" #include "modules/rtp_rtcp/source/rtcp_packet/psfb.h" #include "modules/rtp_rtcp/source/rtcp_packet/receiver_report.h" @@ -332,6 +332,11 @@ std::string RtcEventLogEncoderLegacy::Encode(const RtcEvent& event) { return EncodeProbeResultSuccess(rtc_event); } + case RtcEvent::Type::RemoteEstimateEvent: { + auto& rtc_event = static_cast(event); + return EncodeRemoteEstimate(rtc_event); + } + case RtcEvent::Type::RtcpPacketIncoming: { auto& rtc_event = static_cast(event); return EncodeRtcpPacketIncoming(rtc_event); @@ -363,8 +368,13 @@ std::string RtcEventLogEncoderLegacy::Encode(const RtcEvent& event) { static_cast(event); return EncodeVideoSendStreamConfig(rtc_event); } + case RtcEvent::Type::BeginV3Log: + case RtcEvent::Type::EndV3Log: + // These special events are written as part of starting + // and stopping the log, and only as part of version 3 of the format. + RTC_DCHECK_NOTREACHED(); + break; case RtcEvent::Type::RouteChangeEvent: - case RtcEvent::Type::RemoteEstimateEvent: case RtcEvent::Type::GenericPacketReceived: case RtcEvent::Type::GenericPacketSent: case RtcEvent::Type::GenericAckReceived: @@ -582,6 +592,23 @@ std::string RtcEventLogEncoderLegacy::EncodeProbeResultSuccess( return Serialize(&rtclog_event); } +std::string RtcEventLogEncoderLegacy::EncodeRemoteEstimate( + const RtcEventRemoteEstimate& event) { + rtclog::Event rtclog_event; + rtclog_event.set_timestamp_us(event.timestamp_us()); + rtclog_event.set_type(rtclog::Event::REMOTE_ESTIMATE); + + auto* remote_estimate = rtclog_event.mutable_remote_estimate(); + if (event.link_capacity_lower_.IsFinite()) + remote_estimate->set_link_capacity_lower_kbps( + event.link_capacity_lower_.kbps()); + if (event.link_capacity_upper_.IsFinite()) + remote_estimate->set_link_capacity_upper_kbps( + event.link_capacity_upper_.kbps()); + + return Serialize(&rtclog_event); +} + std::string RtcEventLogEncoderLegacy::EncodeRtcpPacketIncoming( const RtcEventRtcpPacketIncoming& event) { return EncodeRtcpPacket(event.timestamp_us(), event.packet(), true); @@ -707,15 +734,13 @@ std::string RtcEventLogEncoderLegacy::EncodeRtcpPacket( uint32_t block_size = next_block - block_begin; switch (header.type()) { case rtcp::Bye::kPacketType: - case rtcp::ExtendedJitterReport::kPacketType: case rtcp::ExtendedReports::kPacketType: case rtcp::Psfb::kPacketType: case rtcp::ReceiverReport::kPacketType: case rtcp::Rtpfb::kPacketType: case rtcp::SenderReport::kPacketType: - // We log sender reports, receiver reports, bye messages - // inter-arrival jitter, third-party loss reports, payload-specific - // feedback and extended reports. + // We log sender reports, receiver reports, bye messages, third-party + // loss reports, payload-specific feedback and extended reports. memcpy(buffer.data() + buffer_length, block_begin, block_size); buffer_length += block_size; break; diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.h index 37296e797f..33c530789b 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.h @@ -25,6 +25,7 @@ namespace rtclog { class Event; // Auto-generated from protobuf. } // namespace rtclog +class RtcEventAlrState; class RtcEventAudioNetworkAdaptation; class RtcEventAudioPlayout; class RtcEventAudioReceiveStreamConfig; @@ -38,13 +39,13 @@ class RtcEventLoggingStopped; class RtcEventProbeClusterCreated; class RtcEventProbeResultFailure; class RtcEventProbeResultSuccess; +class RtcEventRemoteEstimate; class RtcEventRtcpPacketIncoming; class RtcEventRtcpPacketOutgoing; class RtcEventRtpPacketIncoming; class RtcEventRtpPacketOutgoing; class RtcEventVideoReceiveStreamConfig; class RtcEventVideoSendStreamConfig; -class RtcEventAlrState; class RtpPacket; class RtcEventLogEncoderLegacy final : public RtcEventLogEncoder { @@ -81,6 +82,7 @@ class RtcEventLogEncoderLegacy final : public RtcEventLogEncoder { const RtcEventProbeClusterCreated& event); std::string EncodeProbeResultFailure(const RtcEventProbeResultFailure& event); std::string EncodeProbeResultSuccess(const RtcEventProbeResultSuccess&); + std::string EncodeRemoteEstimate(const RtcEventRemoteEstimate& event); std::string EncodeRtcpPacketIncoming(const RtcEventRtcpPacketIncoming& event); std::string EncodeRtcpPacketOutgoing(const RtcEventRtcpPacketOutgoing& event); std::string EncodeRtpPacketIncoming(const RtcEventRtpPacketIncoming& event); diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_new_format.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_new_format.cc index 569f7eaa8f..4ececa284d 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_new_format.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_new_format.cc @@ -49,7 +49,6 @@ #include "modules/rtp_rtcp/source/rtcp_packet/app.h" #include "modules/rtp_rtcp/source/rtcp_packet/bye.h" #include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" -#include "modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report.h" #include "modules/rtp_rtcp/source/rtcp_packet/extended_reports.h" #include "modules/rtp_rtcp/source/rtcp_packet/psfb.h" #include "modules/rtp_rtcp/source/rtcp_packet/receiver_report.h" @@ -308,15 +307,13 @@ size_t RemoveNonAllowlistedRtcpBlocks(const rtc::Buffer& packet, size_t block_size = next_block - block_begin; switch (header.type()) { case rtcp::Bye::kPacketType: - case rtcp::ExtendedJitterReport::kPacketType: case rtcp::ExtendedReports::kPacketType: case rtcp::Psfb::kPacketType: case rtcp::ReceiverReport::kPacketType: case rtcp::Rtpfb::kPacketType: case rtcp::SenderReport::kPacketType: - // We log sender reports, receiver reports, bye messages - // inter-arrival jitter, third-party loss reports, payload-specific - // feedback and extended reports. + // We log sender reports, receiver reports, bye messages, third-party + // loss reports, payload-specific feedback and extended reports. // TODO(terelius): As an optimization, don't copy anything if all blocks // in the packet are allowlisted types. memcpy(buffer + buffer_length, block_begin, block_size); @@ -882,6 +879,12 @@ std::string RtcEventLogEncoderNewFormat::EncodeBatch( frames_decoded[rtc_event->ssrc()].emplace_back(rtc_event); break; } + case RtcEvent::Type::BeginV3Log: + case RtcEvent::Type::EndV3Log: + // These special events are written as part of starting + // and stopping the log, and only as part of version 3 of the format. + RTC_DCHECK_NOTREACHED(); + break; } } diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_v3.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_v3.cc new file mode 100644 index 0000000000..131aae1de8 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_v3.cc @@ -0,0 +1,164 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "logging/rtc_event_log/encoder/rtc_event_log_encoder_v3.h" + +#include +#include + +#include "absl/types/optional.h" +#include "logging/rtc_event_log/encoder/rtc_event_log_encoder_common.h" +#include "logging/rtc_event_log/encoder/var_int.h" +#include "logging/rtc_event_log/events/rtc_event_alr_state.h" +#include "logging/rtc_event_log/events/rtc_event_audio_network_adaptation.h" +#include "logging/rtc_event_log/events/rtc_event_audio_playout.h" +#include "logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.h" +#include "logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h" +#include "logging/rtc_event_log/events/rtc_event_begin_log.h" +#include "logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h" +#include "logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h" +#include "logging/rtc_event_log/events/rtc_event_dtls_transport_state.h" +#include "logging/rtc_event_log/events/rtc_event_dtls_writable_state.h" +#include "logging/rtc_event_log/events/rtc_event_end_log.h" +#include "logging/rtc_event_log/events/rtc_event_frame_decoded.h" +#include "logging/rtc_event_log/events/rtc_event_generic_ack_received.h" +#include "logging/rtc_event_log/events/rtc_event_generic_packet_received.h" +#include "logging/rtc_event_log/events/rtc_event_generic_packet_sent.h" +#include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h" +#include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h" +#include "logging/rtc_event_log/events/rtc_event_probe_cluster_created.h" +#include "logging/rtc_event_log/events/rtc_event_probe_result_failure.h" +#include "logging/rtc_event_log/events/rtc_event_probe_result_success.h" +#include "logging/rtc_event_log/events/rtc_event_remote_estimate.h" +#include "logging/rtc_event_log/events/rtc_event_route_change.h" +#include "logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.h" +#include "logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.h" +#include "logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.h" +#include "logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h" +#include "logging/rtc_event_log/events/rtc_event_video_receive_stream_config.h" +#include "logging/rtc_event_log/events/rtc_event_video_send_stream_config.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" + +namespace webrtc { + +std::string RtcEventLogEncoderV3::EncodeLogStart(int64_t timestamp_us, + int64_t utc_time_us) { + std::unique_ptr begin_log = + std::make_unique(Timestamp::Micros(timestamp_us), + Timestamp::Micros(utc_time_us)); + std::vector batch; + batch.push_back(begin_log.get()); + + std::string encoded_event = RtcEventBeginLog::Encode(batch); + + return encoded_event; +} + +std::string RtcEventLogEncoderV3::EncodeLogEnd(int64_t timestamp_us) { + std::unique_ptr end_log = + std::make_unique(Timestamp::Micros(timestamp_us)); + std::vector batch; + batch.push_back(end_log.get()); + + std::string encoded_event = RtcEventEndLog::Encode(batch); + + return encoded_event; +} + +RtcEventLogEncoderV3::RtcEventLogEncoderV3() { + encoders_[RtcEvent::Type::AlrStateEvent] = RtcEventAlrState::Encode; + encoders_[RtcEvent::Type::AudioNetworkAdaptation] = + RtcEventAudioNetworkAdaptation::Encode; + encoders_[RtcEvent::Type::AudioPlayout] = RtcEventAudioPlayout::Encode; + encoders_[RtcEvent::Type::AudioReceiveStreamConfig] = + RtcEventAudioReceiveStreamConfig::Encode; + encoders_[RtcEvent::Type::AudioSendStreamConfig] = + RtcEventAudioSendStreamConfig::Encode; + encoders_[RtcEvent::Type::BweUpdateDelayBased] = + RtcEventBweUpdateDelayBased::Encode; + encoders_[RtcEvent::Type::BweUpdateLossBased] = + RtcEventBweUpdateLossBased::Encode; + encoders_[RtcEvent::Type::DtlsTransportState] = + RtcEventDtlsTransportState::Encode; + encoders_[RtcEvent::Type::DtlsWritableState] = + RtcEventDtlsWritableState::Encode; + encoders_[RtcEvent::Type::FrameDecoded] = RtcEventFrameDecoded::Encode; + encoders_[RtcEvent::Type::GenericAckReceived] = + RtcEventGenericAckReceived::Encode; + encoders_[RtcEvent::Type::GenericPacketReceived] = + RtcEventGenericPacketReceived::Encode; + encoders_[RtcEvent::Type::GenericPacketSent] = + RtcEventGenericPacketSent::Encode; + encoders_[RtcEvent::Type::IceCandidatePairConfig] = + RtcEventIceCandidatePairConfig::Encode; + encoders_[RtcEvent::Type::IceCandidatePairEvent] = + RtcEventIceCandidatePair::Encode; + encoders_[RtcEvent::Type::ProbeClusterCreated] = + RtcEventProbeClusterCreated::Encode; + encoders_[RtcEvent::Type::ProbeResultFailure] = + RtcEventProbeResultFailure::Encode; + encoders_[RtcEvent::Type::ProbeResultSuccess] = + RtcEventProbeResultSuccess::Encode; + encoders_[RtcEvent::Type::RemoteEstimateEvent] = + RtcEventRemoteEstimate::Encode; + encoders_[RtcEvent::Type::RouteChangeEvent] = RtcEventRouteChange::Encode; + encoders_[RtcEvent::Type::RtcpPacketIncoming] = + RtcEventRtcpPacketIncoming::Encode; + encoders_[RtcEvent::Type::RtcpPacketOutgoing] = + RtcEventRtcpPacketOutgoing::Encode; + encoders_[RtcEvent::Type::RtpPacketIncoming] = + RtcEventRtpPacketIncoming::Encode; + encoders_[RtcEvent::Type::RtpPacketOutgoing] = + RtcEventRtpPacketOutgoing::Encode; + encoders_[RtcEvent::Type::VideoReceiveStreamConfig] = + RtcEventVideoReceiveStreamConfig::Encode; + encoders_[RtcEvent::Type::VideoSendStreamConfig] = + RtcEventVideoSendStreamConfig::Encode; +} + +std::string RtcEventLogEncoderV3::EncodeBatch( + std::deque>::const_iterator begin, + std::deque>::const_iterator end) { + struct EventGroupKey { + // Events are grouped by event type. For compression efficiency, + // events can optionally have a secondary key, in most cases the + // SSRC. + RtcEvent::Type type; + uint32_t secondary_group_key; + + bool operator<(EventGroupKey other) const { + return type < other.type || + (type == other.type && + secondary_group_key < other.secondary_group_key); + } + }; + + std::map> event_groups; + + for (auto it = begin; it != end; ++it) { + event_groups[{(*it)->GetType(), (*it)->GetGroupKey()}].push_back(it->get()); + } + + std::string encoded_output; + for (auto& kv : event_groups) { + auto it = encoders_.find(kv.first.type); + RTC_DCHECK(it != encoders_.end()); + if (it != encoders_.end()) { + auto& encoder = it->second; + // TODO(terelius): Use some "string builder" or preallocate? + encoded_output += encoder(kv.second); + } + } + + return encoded_output; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_v3.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_v3.h new file mode 100644 index 0000000000..cb796ec562 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_v3.h @@ -0,0 +1,46 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef LOGGING_RTC_EVENT_LOG_ENCODER_RTC_EVENT_LOG_ENCODER_V3_H_ +#define LOGGING_RTC_EVENT_LOG_ENCODER_RTC_EVENT_LOG_ENCODER_V3_H_ + +#include +#include +#include +#include + +#include "api/array_view.h" +#include "logging/rtc_event_log/encoder/rtc_event_log_encoder.h" +#include "logging/rtc_event_log/events/rtc_event_definition.h" + +namespace webrtc { + +class RtcEventLogEncoderV3 final : public RtcEventLogEncoder { + public: + RtcEventLogEncoderV3(); + ~RtcEventLogEncoderV3() override = default; + + std::string EncodeBatch( + std::deque>::const_iterator begin, + std::deque>::const_iterator end) override; + + std::string EncodeLogStart(int64_t timestamp_us, + int64_t utc_time_us) override; + std::string EncodeLogEnd(int64_t timestamp_us) override; + + private: + std::map)>> + encoders_; +}; + +} // namespace webrtc + +#endif // LOGGING_RTC_EVENT_LOG_ENCODER_RTC_EVENT_LOG_ENCODER_V3_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/fixed_length_encoding_parameters_v3.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/fixed_length_encoding_parameters_v3.h index 59d633ced7..666fae1c63 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/fixed_length_encoding_parameters_v3.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/fixed_length_encoding_parameters_v3.h @@ -33,7 +33,7 @@ class FixedLengthEncodingParametersV3 final { static FixedLengthEncodingParametersV3 CalculateParameters( uint64_t base, - const rtc::ArrayView values, + rtc::ArrayView values, uint64_t value_bit_width, bool values_optional); static absl::optional ParseDeltaHeader( diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/logged_rtp_rtcp.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/logged_rtp_rtcp.h new file mode 100644 index 0000000000..00689a0a16 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/logged_rtp_rtcp.h @@ -0,0 +1,260 @@ +/* + * Copyright 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef LOGGING_RTC_EVENT_LOG_EVENTS_LOGGED_RTP_RTCP_H_ +#define LOGGING_RTC_EVENT_LOG_EVENTS_LOGGED_RTP_RTCP_H_ + +#include +#include + +#include "absl/strings/string_view.h" +#include "api/rtp_headers.h" +#include "api/units/timestamp.h" +#include "modules/rtp_rtcp/source/rtcp_packet/bye.h" +#include "modules/rtp_rtcp/source/rtcp_packet/extended_reports.h" +#include "modules/rtp_rtcp/source/rtcp_packet/fir.h" +#include "modules/rtp_rtcp/source/rtcp_packet/loss_notification.h" +#include "modules/rtp_rtcp/source/rtcp_packet/nack.h" +#include "modules/rtp_rtcp/source/rtcp_packet/pli.h" +#include "modules/rtp_rtcp/source/rtcp_packet/receiver_report.h" +#include "modules/rtp_rtcp/source/rtcp_packet/remb.h" +#include "modules/rtp_rtcp/source/rtcp_packet/sender_report.h" +#include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" + +namespace webrtc { + +struct LoggedRtpPacket { + LoggedRtpPacket(Timestamp timestamp, + RTPHeader header, + size_t header_length, + size_t total_length) + : timestamp(timestamp), + header(header), + header_length(header_length), + total_length(total_length) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + Timestamp log_time() const { return timestamp; } + + Timestamp timestamp; + // TODO(terelius): This allocates space for 15 CSRCs even if none are used. + RTPHeader header; + size_t header_length; + size_t total_length; +}; + +struct LoggedRtpPacketIncoming { + LoggedRtpPacketIncoming(Timestamp timestamp, + RTPHeader header, + size_t header_length, + size_t total_length) + : rtp(timestamp, header, header_length, total_length) {} + int64_t log_time_us() const { return rtp.timestamp.us(); } + int64_t log_time_ms() const { return rtp.timestamp.ms(); } + Timestamp log_time() const { return rtp.timestamp; } + + LoggedRtpPacket rtp; +}; + +struct LoggedRtpPacketOutgoing { + LoggedRtpPacketOutgoing(Timestamp timestamp, + RTPHeader header, + size_t header_length, + size_t total_length) + : rtp(timestamp, header, header_length, total_length) {} + int64_t log_time_us() const { return rtp.timestamp.us(); } + int64_t log_time_ms() const { return rtp.timestamp.ms(); } + Timestamp log_time() const { return rtp.timestamp; } + + LoggedRtpPacket rtp; +}; + +struct LoggedRtcpPacket { + LoggedRtcpPacket(Timestamp timestamp, const std::vector& packet) + : timestamp(timestamp), raw_data(packet) {} + LoggedRtcpPacket(Timestamp timestamp, absl::string_view packet) + : timestamp(timestamp), raw_data(packet.size()) { + memcpy(raw_data.data(), packet.data(), packet.size()); + } + + LoggedRtcpPacket(const LoggedRtcpPacket& rhs) = default; + + ~LoggedRtcpPacket() = default; + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + Timestamp log_time() const { return timestamp; } + + Timestamp timestamp; + std::vector raw_data; +}; + +struct LoggedRtcpPacketIncoming { + LoggedRtcpPacketIncoming(Timestamp timestamp, + const std::vector& packet) + : rtcp(timestamp, packet) {} + LoggedRtcpPacketIncoming(Timestamp timestamp, absl::string_view packet) + : rtcp(timestamp, packet) {} + + int64_t log_time_us() const { return rtcp.timestamp.us(); } + int64_t log_time_ms() const { return rtcp.timestamp.ms(); } + Timestamp log_time() const { return rtcp.timestamp; } + + LoggedRtcpPacket rtcp; +}; + +struct LoggedRtcpPacketOutgoing { + LoggedRtcpPacketOutgoing(Timestamp timestamp, + const std::vector& packet) + : rtcp(timestamp, packet) {} + LoggedRtcpPacketOutgoing(Timestamp timestamp, absl::string_view packet) + : rtcp(timestamp, packet) {} + + int64_t log_time_us() const { return rtcp.timestamp.us(); } + int64_t log_time_ms() const { return rtcp.timestamp.ms(); } + Timestamp log_time() const { return rtcp.timestamp; } + + LoggedRtcpPacket rtcp; +}; + +struct LoggedRtcpPacketReceiverReport { + LoggedRtcpPacketReceiverReport() = default; + LoggedRtcpPacketReceiverReport(Timestamp timestamp, + const rtcp::ReceiverReport& rr) + : timestamp(timestamp), rr(rr) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + Timestamp log_time() const { return timestamp; } + + Timestamp timestamp = Timestamp::MinusInfinity(); + rtcp::ReceiverReport rr; +}; + +struct LoggedRtcpPacketSenderReport { + LoggedRtcpPacketSenderReport() = default; + LoggedRtcpPacketSenderReport(Timestamp timestamp, + const rtcp::SenderReport& sr) + : timestamp(timestamp), sr(sr) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + Timestamp log_time() const { return timestamp; } + + Timestamp timestamp = Timestamp::MinusInfinity(); + rtcp::SenderReport sr; +}; + +struct LoggedRtcpPacketExtendedReports { + LoggedRtcpPacketExtendedReports() = default; + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + Timestamp log_time() const { return timestamp; } + + Timestamp timestamp = Timestamp::MinusInfinity(); + rtcp::ExtendedReports xr; +}; + +struct LoggedRtcpPacketRemb { + LoggedRtcpPacketRemb() = default; + LoggedRtcpPacketRemb(Timestamp timestamp, const rtcp::Remb& remb) + : timestamp(timestamp), remb(remb) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + Timestamp log_time() const { return timestamp; } + + Timestamp timestamp = Timestamp::MinusInfinity(); + rtcp::Remb remb; +}; + +struct LoggedRtcpPacketNack { + LoggedRtcpPacketNack() = default; + LoggedRtcpPacketNack(Timestamp timestamp, const rtcp::Nack& nack) + : timestamp(timestamp), nack(nack) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + Timestamp log_time() const { return timestamp; } + + Timestamp timestamp = Timestamp::MinusInfinity(); + rtcp::Nack nack; +}; + +struct LoggedRtcpPacketFir { + LoggedRtcpPacketFir() = default; + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + Timestamp log_time() const { return timestamp; } + + Timestamp timestamp = Timestamp::MinusInfinity(); + rtcp::Fir fir; +}; + +struct LoggedRtcpPacketPli { + LoggedRtcpPacketPli() = default; + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + Timestamp log_time() const { return timestamp; } + + Timestamp timestamp = Timestamp::MinusInfinity(); + rtcp::Pli pli; +}; + +struct LoggedRtcpPacketTransportFeedback { + LoggedRtcpPacketTransportFeedback() + : transport_feedback(/*include_timestamps=*/true, /*include_lost*/ true) { + } + LoggedRtcpPacketTransportFeedback( + Timestamp timestamp, + const rtcp::TransportFeedback& transport_feedback) + : timestamp(timestamp), transport_feedback(transport_feedback) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + Timestamp log_time() const { return timestamp; } + + Timestamp timestamp = Timestamp::MinusInfinity(); + rtcp::TransportFeedback transport_feedback; +}; + +struct LoggedRtcpPacketLossNotification { + LoggedRtcpPacketLossNotification() = default; + LoggedRtcpPacketLossNotification( + Timestamp timestamp, + const rtcp::LossNotification& loss_notification) + : timestamp(timestamp), loss_notification(loss_notification) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + Timestamp log_time() const { return timestamp; } + + Timestamp timestamp = Timestamp::MinusInfinity(); + rtcp::LossNotification loss_notification; +}; + +struct LoggedRtcpPacketBye { + LoggedRtcpPacketBye() = default; + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + Timestamp log_time() const { return timestamp; } + + Timestamp timestamp = Timestamp::MinusInfinity(); + rtcp::Bye bye; +}; + +} // namespace webrtc + +#endif // LOGGING_RTC_EVENT_LOG_EVENTS_LOGGED_RTP_RTCP_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_alr_state.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_alr_state.cc index 3c307b9ca0..25941eb16b 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_alr_state.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_alr_state.cc @@ -13,6 +13,9 @@ #include "absl/memory/memory.h" namespace webrtc { +constexpr RtcEvent::Type RtcEventAlrState::kType; +constexpr RtcEventDefinition + RtcEventAlrState::definition_; RtcEventAlrState::RtcEventAlrState(bool in_alr) : in_alr_(in_alr) {} @@ -25,4 +28,11 @@ std::unique_ptr RtcEventAlrState::Copy() const { return absl::WrapUnique(new RtcEventAlrState(*this)); } +RtcEventLogParseStatus RtcEventAlrState::Parse( + absl::string_view s, + bool batched, + std::vector& output) { + return RtcEventAlrState::definition_.ParseBatch(s, batched, output); +} + } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_alr_state.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_alr_state.h index 74d66015ef..9f595ecd90 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_alr_state.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_alr_state.h @@ -12,12 +12,32 @@ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_ALR_STATE_H_ #include +#include +#include +#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" +#include "logging/rtc_event_log/events/rtc_event_definition.h" +#include "logging/rtc_event_log/events/rtc_event_field_encoding.h" +#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" +#include "logging/rtc_event_log/events/rtc_event_field_extraction.h" namespace webrtc { +struct LoggedAlrStateEvent { + LoggedAlrStateEvent() = default; + LoggedAlrStateEvent(Timestamp timestamp, bool in_alr) + : timestamp(timestamp), in_alr(in_alr) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + Timestamp log_time() const { return timestamp; } + + Timestamp timestamp = Timestamp::MinusInfinity(); + bool in_alr; +}; + class RtcEventAlrState final : public RtcEvent { public: static constexpr Type kType = Type::AlrStateEvent; @@ -32,22 +52,26 @@ class RtcEventAlrState final : public RtcEvent { bool in_alr() const { return in_alr_; } + static std::string Encode(rtc::ArrayView batch) { + return RtcEventAlrState::definition_.EncodeBatch(batch); + } + + static RtcEventLogParseStatus Parse(absl::string_view s, + bool batched, + std::vector& output); + private: RtcEventAlrState(const RtcEventAlrState& other); const bool in_alr_; -}; - -struct LoggedAlrStateEvent { - LoggedAlrStateEvent() = default; - LoggedAlrStateEvent(Timestamp timestamp, bool in_alr) - : timestamp(timestamp), in_alr(in_alr) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - Timestamp timestamp = Timestamp::MinusInfinity(); - bool in_alr; + static constexpr RtcEventDefinition + definition_{{"AlrState", RtcEventAlrState::kType}, + {&RtcEventAlrState::in_alr_, + &LoggedAlrStateEvent::in_alr, + {"in_alr", /*id=*/1, FieldType::kFixed8, /*width=*/1}}}; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_network_adaptation.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_network_adaptation.h index aeeb28e218..d4cae3abfa 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_network_adaptation.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_network_adaptation.h @@ -12,13 +12,31 @@ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_AUDIO_NETWORK_ADAPTATION_H_ #include +#include +#include +#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" +#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" namespace webrtc { +struct LoggedAudioNetworkAdaptationEvent { + LoggedAudioNetworkAdaptationEvent() = default; + LoggedAudioNetworkAdaptationEvent(Timestamp timestamp, + const AudioEncoderRuntimeConfig& config) + : timestamp(timestamp), config(config) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + Timestamp log_time() const { return timestamp; } + + Timestamp timestamp = Timestamp::MinusInfinity(); + AudioEncoderRuntimeConfig config; +}; + struct AudioEncoderRuntimeConfig; class RtcEventAudioNetworkAdaptation final : public RtcEvent { @@ -36,25 +54,25 @@ class RtcEventAudioNetworkAdaptation final : public RtcEvent { const AudioEncoderRuntimeConfig& config() const { return *config_; } + static std::string Encode(rtc::ArrayView batch) { + // TODO(terelius): Implement + return ""; + } + + static RtcEventLogParseStatus Parse( + absl::string_view encoded_bytes, + bool batched, + std::vector& output) { + // TODO(terelius): Implement + return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); + } + private: RtcEventAudioNetworkAdaptation(const RtcEventAudioNetworkAdaptation& other); const std::unique_ptr config_; }; -struct LoggedAudioNetworkAdaptationEvent { - LoggedAudioNetworkAdaptationEvent() = default; - LoggedAudioNetworkAdaptationEvent(Timestamp timestamp, - const AudioEncoderRuntimeConfig& config) - : timestamp(timestamp), config(config) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - - Timestamp timestamp = Timestamp::MinusInfinity(); - AudioEncoderRuntimeConfig config; -}; - } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_AUDIO_NETWORK_ADAPTATION_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_playout.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_playout.cc index dae61c4df3..21a3f9266c 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_playout.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_playout.cc @@ -14,6 +14,11 @@ namespace webrtc { +constexpr RtcEventDefinition + RtcEventAudioPlayout::definition_; + RtcEventAudioPlayout::RtcEventAudioPlayout(uint32_t ssrc) : ssrc_(ssrc) {} RtcEventAudioPlayout::RtcEventAudioPlayout(const RtcEventAudioPlayout& other) diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_playout.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_playout.h index 00d07a65bf..196c3ca247 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_playout.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_playout.h @@ -13,13 +13,31 @@ #include +#include #include +#include +#include +#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" +#include "logging/rtc_event_log/events/rtc_event_definition.h" namespace webrtc { +struct LoggedAudioPlayoutEvent { + LoggedAudioPlayoutEvent() = default; + LoggedAudioPlayoutEvent(Timestamp timestamp, uint32_t ssrc) + : timestamp(timestamp), ssrc(ssrc) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + Timestamp log_time() const { return timestamp; } + + Timestamp timestamp = Timestamp::MinusInfinity(); + uint32_t ssrc; +}; + class RtcEventAudioPlayout final : public RtcEvent { public: static constexpr Type kType = Type::AudioPlayout; @@ -34,22 +52,35 @@ class RtcEventAudioPlayout final : public RtcEvent { uint32_t ssrc() const { return ssrc_; } + static std::string Encode(rtc::ArrayView batch) { + return RtcEventAudioPlayout::definition_.EncodeBatch(batch); + } + + static RtcEventLogParseStatus Parse( + absl::string_view encoded_bytes, + bool batched, + std::map>& output) { + std::vector temp_output; + auto status = RtcEventAudioPlayout::definition_.ParseBatch( + encoded_bytes, batched, temp_output); + for (const LoggedAudioPlayoutEvent& event : temp_output) { + output[event.ssrc].push_back(event); + } + return status; + } + private: RtcEventAudioPlayout(const RtcEventAudioPlayout& other); const uint32_t ssrc_; -}; - -struct LoggedAudioPlayoutEvent { - LoggedAudioPlayoutEvent() = default; - LoggedAudioPlayoutEvent(Timestamp timestamp, uint32_t ssrc) - : timestamp(timestamp), ssrc(ssrc) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - Timestamp timestamp = Timestamp::MinusInfinity(); - uint32_t ssrc; + static constexpr RtcEventDefinition + definition_{{"AudioPlayout", RtcEventAudioPlayout::kType}, + {&RtcEventAudioPlayout::ssrc_, + &LoggedAudioPlayoutEvent::ssrc, + {"ssrc", /*id=*/1, FieldType::kFixed32, /*width=*/32}}}; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.h index ccf76025e6..9863e235af 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.h @@ -12,13 +12,30 @@ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_AUDIO_RECEIVE_STREAM_CONFIG_H_ #include +#include +#include +#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" +#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" #include "logging/rtc_event_log/rtc_stream_config.h" namespace webrtc { +struct LoggedAudioRecvConfig { + LoggedAudioRecvConfig() = default; + LoggedAudioRecvConfig(Timestamp timestamp, const rtclog::StreamConfig config) + : timestamp(timestamp), config(config) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + Timestamp log_time() const { return timestamp; } + + Timestamp timestamp = Timestamp::MinusInfinity(); + rtclog::StreamConfig config; +}; + class RtcEventAudioReceiveStreamConfig final : public RtcEvent { public: static constexpr Type kType = Type::AudioReceiveStreamConfig; @@ -34,6 +51,19 @@ class RtcEventAudioReceiveStreamConfig final : public RtcEvent { const rtclog::StreamConfig& config() const { return *config_; } + static std::string Encode(rtc::ArrayView batch) { + // TODO(terelius): Implement + return ""; + } + + static RtcEventLogParseStatus Parse( + absl::string_view encoded_bytes, + bool batched, + std::vector& output) { + // TODO(terelius): Implement + return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); + } + private: RtcEventAudioReceiveStreamConfig( const RtcEventAudioReceiveStreamConfig& other); @@ -41,18 +71,6 @@ class RtcEventAudioReceiveStreamConfig final : public RtcEvent { const std::unique_ptr config_; }; -struct LoggedAudioRecvConfig { - LoggedAudioRecvConfig() = default; - LoggedAudioRecvConfig(Timestamp timestamp, const rtclog::StreamConfig config) - : timestamp(timestamp), config(config) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - - Timestamp timestamp = Timestamp::MinusInfinity(); - rtclog::StreamConfig config; -}; - } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_AUDIO_RECEIVE_STREAM_CONFIG_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h index 4e93871ae8..550723bcf0 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h @@ -12,12 +12,29 @@ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_AUDIO_SEND_STREAM_CONFIG_H_ #include +#include +#include +#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event.h" +#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" #include "logging/rtc_event_log/rtc_stream_config.h" namespace webrtc { +struct LoggedAudioSendConfig { + LoggedAudioSendConfig() = default; + LoggedAudioSendConfig(Timestamp timestamp, const rtclog::StreamConfig config) + : timestamp(timestamp), config(config) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + Timestamp log_time() const { return timestamp; } + + Timestamp timestamp = Timestamp::MinusInfinity(); + rtclog::StreamConfig config; +}; + class RtcEventAudioSendStreamConfig final : public RtcEvent { public: static constexpr Type kType = Type::AudioSendStreamConfig; @@ -33,23 +50,25 @@ class RtcEventAudioSendStreamConfig final : public RtcEvent { const rtclog::StreamConfig& config() const { return *config_; } + static std::string Encode(rtc::ArrayView batch) { + // TODO(terelius): Implement + return ""; + } + + static RtcEventLogParseStatus Parse( + absl::string_view encoded_bytes, + bool batched, + std::vector& output) { + // TODO(terelius): Implement + return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); + } + private: RtcEventAudioSendStreamConfig(const RtcEventAudioSendStreamConfig& other); const std::unique_ptr config_; }; -struct LoggedAudioSendConfig { - LoggedAudioSendConfig() = default; - LoggedAudioSendConfig(Timestamp timestamp, const rtclog::StreamConfig config) - : timestamp(timestamp), config(config) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - - Timestamp timestamp = Timestamp::MinusInfinity(); - rtclog::StreamConfig config; -}; } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_AUDIO_SEND_STREAM_CONFIG_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_begin_log.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_begin_log.cc new file mode 100644 index 0000000000..49b9effa9e --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_begin_log.cc @@ -0,0 +1,73 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "logging/rtc_event_log/events/rtc_event_begin_log.h" + +#include "absl/strings/string_view.h" + +namespace webrtc { +constexpr RtcEvent::Type RtcEventBeginLog::kType; +constexpr EventParameters RtcEventBeginLog::event_params_; +constexpr FieldParameters RtcEventBeginLog::utc_start_time_params_; + +RtcEventBeginLog::RtcEventBeginLog(Timestamp timestamp, + Timestamp utc_start_time) + : RtcEvent(timestamp.us()), utc_start_time_ms_(utc_start_time.ms()) {} + +RtcEventBeginLog::RtcEventBeginLog(const RtcEventBeginLog& other) + : RtcEvent(other.timestamp_us_) {} + +RtcEventBeginLog::~RtcEventBeginLog() = default; + +std::string RtcEventBeginLog::Encode(rtc::ArrayView batch) { + EventEncoder encoder(event_params_, batch); + + encoder.EncodeField( + utc_start_time_params_, + ExtractRtcEventMember(batch, &RtcEventBeginLog::utc_start_time_ms_)); + + return encoder.AsString(); +} + +RtcEventLogParseStatus RtcEventBeginLog::Parse( + absl::string_view encoded_bytes, + bool batched, + std::vector& output) { + EventParser parser; + auto status = parser.Initialize(encoded_bytes, batched); + if (!status.ok()) + return status; + + rtc::ArrayView output_batch = + ExtendLoggedBatch(output, parser.NumEventsInBatch()); + + constexpr FieldParameters timestamp_params{ + "timestamp_ms", FieldParameters::kTimestampField, FieldType::kVarInt, 64}; + RtcEventLogParseStatusOr> result = + parser.ParseNumericField(timestamp_params); + if (!result.ok()) + return result.status(); + status = PopulateRtcEventTimestamp( + result.value(), &LoggedStartEvent::timestamp, output_batch); + if (!status.ok()) + return status; + + result = parser.ParseNumericField(utc_start_time_params_); + if (!result.ok()) + return result.status(); + status = PopulateRtcEventTimestamp( + result.value(), &LoggedStartEvent::utc_start_time, output_batch); + if (!status.ok()) + return status; + + return RtcEventLogParseStatus::Success(); +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_begin_log.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_begin_log.h new file mode 100644 index 0000000000..f3b74c117e --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_begin_log.h @@ -0,0 +1,74 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_BEGIN_LOG_H_ +#define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_BEGIN_LOG_H_ + +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/rtc_event_log/rtc_event.h" +#include "api/units/timestamp.h" +#include "logging/rtc_event_log/events/rtc_event_field_encoding.h" +#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" +#include "logging/rtc_event_log/events/rtc_event_field_extraction.h" + +namespace webrtc { + +struct LoggedStartEvent { + LoggedStartEvent() = default; + + explicit LoggedStartEvent(Timestamp timestamp) + : LoggedStartEvent(timestamp, timestamp) {} + + LoggedStartEvent(Timestamp timestamp, Timestamp utc_start_time) + : timestamp(timestamp), utc_start_time(utc_start_time) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + Timestamp log_time() const { return timestamp; } + + Timestamp utc_time() const { return utc_start_time; } + + Timestamp timestamp = Timestamp::PlusInfinity(); + Timestamp utc_start_time = Timestamp::PlusInfinity(); +}; + +class RtcEventBeginLog final : public RtcEvent { + public: + static constexpr Type kType = Type::BeginV3Log; + + RtcEventBeginLog(Timestamp timestamp, Timestamp utc_start_time); + ~RtcEventBeginLog() override; + + Type GetType() const override { return kType; } + bool IsConfigEvent() const override { return false; } + + static std::string Encode(rtc::ArrayView batch); + + static RtcEventLogParseStatus Parse(absl::string_view encoded_bytes, + bool batched, + std::vector& output); + + private: + RtcEventBeginLog(const RtcEventBeginLog& other); + + int64_t utc_start_time_ms_; + + static constexpr EventParameters event_params_{"BeginLog", + RtcEventBeginLog::kType}; + static constexpr FieldParameters utc_start_time_params_{ + "utc_start_time_ms", /*id=*/1, FieldType::kVarInt, /*width=*/64}; +}; + +} // namespace webrtc +#endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_BEGIN_LOG_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.cc index f3f12192c4..0e98b2ff11 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.cc @@ -15,6 +15,12 @@ namespace webrtc { +constexpr RtcEventDefinition + RtcEventBweUpdateDelayBased::definition_; + RtcEventBweUpdateDelayBased::RtcEventBweUpdateDelayBased( int32_t bitrate_bps, BandwidthUsage detector_state) diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h index 522f98fd8d..796f119388 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h @@ -13,14 +13,76 @@ #include +#include #include +#include +#include +#include "absl/strings/string_view.h" #include "api/network_state_predictor.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" +#include "logging/rtc_event_log/events/rtc_event_definition.h" namespace webrtc { +// Separate the event log encoding from the enum values. +// As long as the enum values are the same as the encodings, +// the two conversion functions can be compiled to (roughly) +// a range check each. +template <> +class RtcEventLogEnum { + static constexpr uint64_t kBwNormal = 0; + static constexpr uint64_t kBwUnderusing = 1; + static constexpr uint64_t kBwOverusing = 2; + + public: + static uint64_t Encode(BandwidthUsage x) { + switch (x) { + case BandwidthUsage::kBwNormal: + return kBwNormal; + case BandwidthUsage::kBwUnderusing: + return kBwUnderusing; + case BandwidthUsage::kBwOverusing: + return kBwOverusing; + case BandwidthUsage::kLast: + RTC_DCHECK_NOTREACHED(); + } + RTC_DCHECK_NOTREACHED(); + return std::numeric_limits::max(); + } + static RtcEventLogParseStatusOr Decode(uint64_t x) { + switch (x) { + case kBwNormal: + return BandwidthUsage::kBwNormal; + case kBwUnderusing: + return BandwidthUsage::kBwUnderusing; + case kBwOverusing: + return BandwidthUsage::kBwOverusing; + } + return RtcEventLogParseStatus::Error("Failed to decode BandwidthUsage enum", + __FILE__, __LINE__); + } +}; + +struct LoggedBweDelayBasedUpdate { + LoggedBweDelayBasedUpdate() = default; + LoggedBweDelayBasedUpdate(Timestamp timestamp, + int32_t bitrate_bps, + BandwidthUsage detector_state) + : timestamp(timestamp), + bitrate_bps(bitrate_bps), + detector_state(detector_state) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + Timestamp log_time() const { return timestamp; } + + Timestamp timestamp = Timestamp::MinusInfinity(); + int32_t bitrate_bps; + BandwidthUsage detector_state; +}; + class RtcEventBweUpdateDelayBased final : public RtcEvent { public: static constexpr Type kType = Type::BweUpdateDelayBased; @@ -37,28 +99,36 @@ class RtcEventBweUpdateDelayBased final : public RtcEvent { int32_t bitrate_bps() const { return bitrate_bps_; } BandwidthUsage detector_state() const { return detector_state_; } + static std::string Encode(rtc::ArrayView batch) { + return RtcEventBweUpdateDelayBased::definition_.EncodeBatch(batch); + } + + static RtcEventLogParseStatus Parse( + absl::string_view encoded_bytes, + bool batched, + std::vector& output) { + return RtcEventBweUpdateDelayBased::definition_.ParseBatch(encoded_bytes, + batched, output); + } + private: RtcEventBweUpdateDelayBased(const RtcEventBweUpdateDelayBased& other); const int32_t bitrate_bps_; const BandwidthUsage detector_state_; -}; -struct LoggedBweDelayBasedUpdate { - LoggedBweDelayBasedUpdate() = default; - LoggedBweDelayBasedUpdate(Timestamp timestamp, - int32_t bitrate_bps, - BandwidthUsage detector_state) - : timestamp(timestamp), - bitrate_bps(bitrate_bps), - detector_state(detector_state) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - - Timestamp timestamp = Timestamp::MinusInfinity(); - int32_t bitrate_bps; - BandwidthUsage detector_state; + static constexpr RtcEventDefinition + definition_{ + {"BweDelayBased", RtcEventBweUpdateDelayBased::kType}, + {&RtcEventBweUpdateDelayBased::bitrate_bps_, + &LoggedBweDelayBasedUpdate::bitrate_bps, + {"bitrate_bps", /*id=*/1, FieldType::kVarInt, /*width=*/32}}, + {&RtcEventBweUpdateDelayBased::detector_state_, + &LoggedBweDelayBasedUpdate::detector_state, + {"detector_state", /*id=*/2, FieldType::kVarInt, /*width=*/64}}}; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h index b031658ea2..fd41b316e0 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h @@ -14,12 +14,37 @@ #include #include +#include +#include +#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" +#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" namespace webrtc { +struct LoggedBweLossBasedUpdate { + LoggedBweLossBasedUpdate() = default; + LoggedBweLossBasedUpdate(Timestamp timestamp, + int32_t bitrate_bps, + uint8_t fraction_lost, + int32_t expected_packets) + : timestamp(timestamp), + bitrate_bps(bitrate_bps), + fraction_lost(fraction_lost), + expected_packets(expected_packets) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + Timestamp log_time() const { return timestamp; } + + Timestamp timestamp = Timestamp::MinusInfinity(); + int32_t bitrate_bps; + uint8_t fraction_lost; + int32_t expected_packets; +}; + class RtcEventBweUpdateLossBased final : public RtcEvent { public: static constexpr Type kType = Type::BweUpdateLossBased; @@ -38,6 +63,19 @@ class RtcEventBweUpdateLossBased final : public RtcEvent { uint8_t fraction_loss() const { return fraction_loss_; } int32_t total_packets() const { return total_packets_; } + static std::string Encode(rtc::ArrayView batch) { + // TODO(terelius): Implement + return ""; + } + + static RtcEventLogParseStatus Parse( + absl::string_view encoded_bytes, + bool batched, + std::vector& output) { + // TODO(terelius): Implement + return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); + } + private: RtcEventBweUpdateLossBased(const RtcEventBweUpdateLossBased& other); @@ -46,26 +84,6 @@ class RtcEventBweUpdateLossBased final : public RtcEvent { const int32_t total_packets_; }; -struct LoggedBweLossBasedUpdate { - LoggedBweLossBasedUpdate() = default; - LoggedBweLossBasedUpdate(Timestamp timestamp, - int32_t bitrate_bps, - uint8_t fraction_lost, - int32_t expected_packets) - : timestamp(timestamp), - bitrate_bps(bitrate_bps), - fraction_lost(fraction_lost), - expected_packets(expected_packets) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - - Timestamp timestamp = Timestamp::MinusInfinity(); - int32_t bitrate_bps; - uint8_t fraction_lost; - int32_t expected_packets; -}; - } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_BWE_UPDATE_LOSS_BASED_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_definition.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_definition.h new file mode 100644 index 0000000000..8688c5fc7b --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_definition.h @@ -0,0 +1,152 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_DEFINITION_H_ +#define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_DEFINITION_H_ + +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/units/timestamp.h" +#include "logging/rtc_event_log/events/rtc_event_field_encoding.h" +#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" +#include "logging/rtc_event_log/events/rtc_event_field_extraction.h" +#include "rtc_base/logging.h" + +namespace webrtc { + +template +struct RtcEventFieldDefinition { + const T EventType::*event_member; + T LoggedType::*logged_member; + FieldParameters params; +}; + +// Base case +template +class RtcEventDefinitionImpl { + public: + void EncodeImpl(EventEncoder&, rtc::ArrayView) const {} + RtcEventLogParseStatus ParseImpl(EventParser&, + rtc::ArrayView) const { + return RtcEventLogParseStatus::Success(); + } +}; + +// Recursive case +template +class RtcEventDefinitionImpl { + public: + constexpr RtcEventDefinitionImpl( + RtcEventFieldDefinition field, + RtcEventFieldDefinition... rest) + : field_(field), rest_(rest...) {} + + void EncodeImpl(EventEncoder& encoder, + rtc::ArrayView batch) const { + auto values = ExtractRtcEventMember(batch, field_.event_member); + encoder.EncodeField(field_.params, values); + rest_.EncodeImpl(encoder, batch); + } + + RtcEventLogParseStatus ParseImpl( + EventParser& parser, + rtc::ArrayView output_batch) const { + RtcEventLogParseStatusOr> result = + parser.ParseNumericField(field_.params); + if (!result.ok()) + return result.status(); + auto status = PopulateRtcEventMember(result.value(), field_.logged_member, + output_batch); + if (!status.ok()) + return status; + + return rest_.ParseImpl(parser, output_batch); + } + + private: + RtcEventFieldDefinition field_; + RtcEventDefinitionImpl rest_; +}; + +// The RtcEventDefinition sets up a mapping between the fields +// in an RtcEvent and the corresponding fields in the parsed struct. +// For example, an RtcFoo class containing two fields; `uint32_t bar` +// and `bool baz` (a log timestamp is always implicitly added) +// might have a definition +// RtcEventDefinition( +// {"foo", RtcFoo::Type}, +// {&RtcFoo::bar_, &LoggedFoo::bar, {"bar", 1, FieldType::kVarInt, 32}}, +// {&RtcFoo::baz_, &LoggedFoo::baz, {"baz", 2, FieldType::kFixed8, 1}}, +// ); +// In addition to defining string names to aid debugging, +// this specifies that +// * RtcFoo::Type uniquely identifies an RtcFoo in the encoded stream +// * The `bar` field has ID 1, is encoded as a VarInt +// (when not delta compressed), and wraps around after 32 bits. +// * The `baz` field has ID 2, is encoded as an 8-bit field +// (when not delta compressed), and wraps around after 1 bit. +// Note that the numerical field and event IDs can't be changed since +// that would break compatibility with old logs. +// In most cases (including all cases where wrap around isn't +// expected), the wrap around should be equal to the bitwidth of +// the field. +template +class RtcEventDefinition { + public: + constexpr RtcEventDefinition( + EventParameters params, + RtcEventFieldDefinition... fields) + : params_(params), fields_(fields...) {} + + std::string EncodeBatch(rtc::ArrayView batch) const { + EventEncoder encoder(params_, batch); + fields_.EncodeImpl(encoder, batch); + return encoder.AsString(); + } + + RtcEventLogParseStatus ParseBatch(absl::string_view s, + bool batched, + std::vector& output) const { + EventParser parser; + auto status = parser.Initialize(s, batched); + if (!status.ok()) + return status; + + rtc::ArrayView output_batch = + ExtendLoggedBatch(output, parser.NumEventsInBatch()); + + constexpr FieldParameters timestamp_params{"timestamp_ms", + FieldParameters::kTimestampField, + FieldType::kVarInt, 64}; + RtcEventLogParseStatusOr> result = + parser.ParseNumericField(timestamp_params); + if (!result.ok()) + return result.status(); + status = PopulateRtcEventTimestamp(result.value(), &LoggedType::timestamp, + output_batch); + if (!status.ok()) + return status; + + return fields_.ParseImpl(parser, output_batch); + } + + private: + EventParameters params_; + RtcEventDefinitionImpl fields_; +}; + +} // namespace webrtc + +#endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_DEFINITION_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_dtls_transport_state.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_dtls_transport_state.h index 9a3eecb3d3..b9af213256 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_dtls_transport_state.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_dtls_transport_state.h @@ -12,13 +12,26 @@ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_DTLS_TRANSPORT_STATE_H_ #include +#include +#include +#include "absl/strings/string_view.h" #include "api/dtls_transport_interface.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" +#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" namespace webrtc { +struct LoggedDtlsTransportState { + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + Timestamp log_time() const { return timestamp; } + + Timestamp timestamp = Timestamp::MinusInfinity(); + DtlsTransportState dtls_transport_state; +}; + class RtcEventDtlsTransportState : public RtcEvent { public: static constexpr Type kType = Type::DtlsTransportState; @@ -35,20 +48,25 @@ class RtcEventDtlsTransportState : public RtcEvent { return dtls_transport_state_; } + static std::string Encode(rtc::ArrayView batch) { + // TODO(terelius): Implement + return ""; + } + + static RtcEventLogParseStatus Parse( + absl::string_view encoded_bytes, + bool batched, + std::vector& output) { + // TODO(terelius): Implement + return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); + } + private: RtcEventDtlsTransportState(const RtcEventDtlsTransportState& other); const DtlsTransportState dtls_transport_state_; }; -struct LoggedDtlsTransportState { - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - - Timestamp timestamp = Timestamp::MinusInfinity(); - DtlsTransportState dtls_transport_state; -}; - } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_DTLS_TRANSPORT_STATE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_dtls_writable_state.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_dtls_writable_state.h index c0cc5b87ef..c820f184d7 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_dtls_writable_state.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_dtls_writable_state.h @@ -12,12 +12,28 @@ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_DTLS_WRITABLE_STATE_H_ #include +#include +#include +#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" +#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" namespace webrtc { +struct LoggedDtlsWritableState { + LoggedDtlsWritableState() = default; + explicit LoggedDtlsWritableState(bool writable) : writable(writable) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + Timestamp log_time() const { return timestamp; } + + Timestamp timestamp = Timestamp::MinusInfinity(); + bool writable; +}; + class RtcEventDtlsWritableState : public RtcEvent { public: static constexpr Type kType = Type::DtlsWritableState; @@ -32,23 +48,25 @@ class RtcEventDtlsWritableState : public RtcEvent { bool writable() const { return writable_; } + static std::string Encode(rtc::ArrayView batch) { + // TODO(terelius): Implement + return ""; + } + + static RtcEventLogParseStatus Parse( + absl::string_view encoded_bytes, + bool batched, + std::vector& output) { + // TODO(terelius): Implement + return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); + } + private: RtcEventDtlsWritableState(const RtcEventDtlsWritableState& other); const bool writable_; }; -struct LoggedDtlsWritableState { - LoggedDtlsWritableState() = default; - explicit LoggedDtlsWritableState(bool writable) : writable(writable) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - - Timestamp timestamp = Timestamp::MinusInfinity(); - bool writable; -}; - } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_DTLS_WRITABLE_STATE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_end_log.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_end_log.cc new file mode 100644 index 0000000000..52abf9e842 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_end_log.cc @@ -0,0 +1,58 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "logging/rtc_event_log/events/rtc_event_end_log.h" + +#include "absl/strings/string_view.h" + +namespace webrtc { +constexpr RtcEvent::Type RtcEventEndLog::kType; +constexpr EventParameters RtcEventEndLog::event_params_; + +RtcEventEndLog::RtcEventEndLog(Timestamp timestamp) + : RtcEvent(timestamp.us()) {} + +RtcEventEndLog::RtcEventEndLog(const RtcEventEndLog& other) + : RtcEvent(other.timestamp_us_) {} + +RtcEventEndLog::~RtcEventEndLog() = default; + +std::string RtcEventEndLog::Encode(rtc::ArrayView batch) { + EventEncoder encoder(event_params_, batch); + return encoder.AsString(); +} + +RtcEventLogParseStatus RtcEventEndLog::Parse( + absl::string_view encoded_bytes, + bool batched, + std::vector& output) { + EventParser parser; + auto status = parser.Initialize(encoded_bytes, batched); + if (!status.ok()) + return status; + + rtc::ArrayView output_batch = + ExtendLoggedBatch(output, parser.NumEventsInBatch()); + + constexpr FieldParameters timestamp_params{ + "timestamp_ms", FieldParameters::kTimestampField, FieldType::kVarInt, 64}; + RtcEventLogParseStatusOr> result = + parser.ParseNumericField(timestamp_params); + if (!result.ok()) + return result.status(); + status = PopulateRtcEventTimestamp(result.value(), + &LoggedStopEvent::timestamp, output_batch); + if (!status.ok()) + return status; + + return RtcEventLogParseStatus::Success(); +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_end_log.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_end_log.h new file mode 100644 index 0000000000..79648bdb8d --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_end_log.h @@ -0,0 +1,64 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_END_LOG_H_ +#define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_END_LOG_H_ + +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/rtc_event_log/rtc_event.h" +#include "api/units/timestamp.h" +#include "logging/rtc_event_log/events/rtc_event_field_encoding.h" +#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" +#include "logging/rtc_event_log/events/rtc_event_field_extraction.h" + +namespace webrtc { + +struct LoggedStopEvent { + LoggedStopEvent() = default; + + explicit LoggedStopEvent(Timestamp timestamp) : timestamp(timestamp) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + Timestamp log_time() const { return timestamp; } + + Timestamp timestamp = Timestamp::PlusInfinity(); +}; + +class RtcEventEndLog final : public RtcEvent { + public: + static constexpr Type kType = Type::EndV3Log; + + explicit RtcEventEndLog(Timestamp timestamp); + ~RtcEventEndLog() override; + + Type GetType() const override { return kType; } + bool IsConfigEvent() const override { return false; } + + static std::string Encode(rtc::ArrayView batch); + + static RtcEventLogParseStatus Parse(absl::string_view encoded_bytes, + bool batched, + std::vector& output); + + private: + RtcEventEndLog(const RtcEventEndLog& other); + + static constexpr EventParameters event_params_{"EndLog", + RtcEventEndLog::kType}; +}; + +} // namespace webrtc +#endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_END_LOG_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_field_encoding.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_field_encoding.h index 8376a8b8a5..33b77b80f5 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_field_encoding.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_field_encoding.h @@ -93,5 +93,87 @@ std::string EncodeDeltasV3(FixedLengthEncodingParametersV3 params, uint64_t base, rtc::ArrayView values); +// Given a batch of RtcEvents and a member pointer, extract that +// member from each event in the batch. Signed integer members are +// encoded as unsigned, and the bitsize increased so the result can +// represented as a std::vector. +// This is intended to be used in conjuction with +// EventEncoder::EncodeField to encode a batch of events as follows: +// auto values = ExtractRtcEventMember(batch, RtcEventFoo::timestamp_ms); +// encoder.EncodeField(timestamp_params, values) +template ::value, bool> = true> +std::vector ExtractRtcEventMember( + rtc::ArrayView batch, + const T E::*member) { + std::vector values; + values.reserve(batch.size()); + for (const RtcEvent* event : batch) { + RTC_CHECK_EQ(event->GetType(), E::kType); + T value = static_cast(event)->*member; + values.push_back(EncodeAsUnsigned(value)); + } + return values; +} + +// Extract an optional field from a batch of RtcEvents. +// The function returns a vector of positions in addition to the vector of +// values. The vector `positions` has the same length as the batch where +// `positions[i] == true` iff the batch[i]->member has a value. +// The values vector only contains the values that exists, so it +// may be shorter than the batch. +template ::value, bool> = true> +ValuesWithPositions ExtractRtcEventMember(rtc::ArrayView batch, + const absl::optional E::*member) { + ValuesWithPositions result; + result.position_mask.reserve(batch.size()); + result.values.reserve(batch.size()); + for (const RtcEvent* event : batch) { + RTC_CHECK_EQ(event->GetType(), E::kType); + absl::optional field = static_cast(event)->*member; + result.position_mask.push_back(field.has_value()); + if (field.has_value()) { + result.values.push_back(EncodeAsUnsigned(field.value())); + } + } + return result; +} + +// Extract an enum field from a batch of RtcEvents. +// Requires specializing RtcEventLogEnum for the enum type T. +template ::value, bool> = true> +std::vector ExtractRtcEventMember( + rtc::ArrayView batch, + const T E::*member) { + std::vector values; + values.reserve(batch.size()); + for (const RtcEvent* event : batch) { + RTC_CHECK_EQ(event->GetType(), E::kType); + T value = static_cast(event)->*member; + values.push_back(RtcEventLogEnum::Encode(value)); + } + return values; +} + +// Extract a string field from a batch of RtcEvents. +template +std::vector ExtractRtcEventMember( + rtc::ArrayView batch, + const std::string E::*member) { + std::vector values; + values.reserve(batch.size()); + for (const RtcEvent* event : batch) { + RTC_CHECK_EQ(event->GetType(), E::kType); + absl::string_view str = static_cast(event)->*member; + values.push_back(str); + } + return values; +} + } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_FIELD_ENCODING_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_field_encoding_parser.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_field_encoding_parser.h index f1af5db44a..fc87faf611 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_field_encoding_parser.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_field_encoding_parser.h @@ -14,6 +14,7 @@ #include #include +#include "absl/strings/string_view.h" #include "logging/rtc_event_log/events/rtc_event_field_encoding.h" // TODO(terelius): Compared to a generic 'Status' class, this @@ -29,20 +30,26 @@ class RtcEventLogParseStatus { public: static RtcEventLogParseStatus Success() { return RtcEventLogParseStatus(); } - static RtcEventLogParseStatus Error(std::string error, - std::string file, + static RtcEventLogParseStatus Error(absl::string_view error, + absl::string_view file, int line) { return RtcEventLogParseStatus(error, file, line); } bool ok() const { return error_.empty(); } + ABSL_DEPRECATED("Use ok() instead") explicit operator bool() const { + return ok(); + } std::string message() const { return error_; } private: RtcEventLogParseStatus() : error_() {} - RtcEventLogParseStatus(std::string error, std::string file, int line) - : error_(error + " (" + file + ": " + std::to_string(line) + ")") {} + RtcEventLogParseStatus(absl::string_view error, + absl::string_view file, + int line) + : error_(std::string(error) + " (" + std::string(file) + ": " + + std::to_string(line) + ")") {} std::string error_; }; @@ -50,15 +57,17 @@ class RtcEventLogParseStatus { template class RtcEventLogParseStatusOr { public: - explicit RtcEventLogParseStatusOr(RtcEventLogParseStatus status) + RtcEventLogParseStatusOr(RtcEventLogParseStatus status) // NOLINT : status_(status), value_() {} - explicit RtcEventLogParseStatusOr(const T& value) + RtcEventLogParseStatusOr(const T& value) // NOLINT : status_(), value_(value) {} bool ok() const { return status_.ok(); } std::string message() const { return status_.message(); } + RtcEventLogParseStatus status() const { return status_; } + const T& value() const { RTC_DCHECK(ok()); return value_; @@ -69,15 +78,17 @@ class RtcEventLogParseStatusOr { return value_; } - static RtcEventLogParseStatusOr Error(std::string error, - std::string file, + static RtcEventLogParseStatusOr Error(absl::string_view error, + absl::string_view file, int line) { return RtcEventLogParseStatusOr(error, file, line); } private: RtcEventLogParseStatusOr() : status_() {} - RtcEventLogParseStatusOr(std::string error, std::string file, int line) + RtcEventLogParseStatusOr(absl::string_view error, + absl::string_view file, + int line) : status_(error, file, line), value_() {} RtcEventLogParseStatus status_; @@ -129,7 +140,7 @@ class EventParser { uint64_t ReadOptionalValuePositions(); void ReadDeltasAndPopulateValues(FixedLengthEncodingParametersV3 params, uint64_t num_deltas, - const uint64_t base); + uint64_t base); RtcEventLogParseStatus ParseNumericFieldInternal(uint64_t value_bit_width, FieldType field_type); RtcEventLogParseStatus ParseStringFieldInternal(); @@ -171,5 +182,110 @@ class EventParser { uint64_t last_field_id_ = FieldParameters::kTimestampField; }; +// Inverse of the ExtractRtcEventMember function used when parsing +// a log. Uses a vector of values to populate a specific field in a +// vector of structs. +template ::value, bool> = true> +ABSL_MUST_USE_RESULT RtcEventLogParseStatus +PopulateRtcEventMember(const rtc::ArrayView values, + T E::*member, + rtc::ArrayView output) { + size_t batch_size = values.size(); + RTC_CHECK_EQ(output.size(), batch_size); + for (size_t i = 0; i < batch_size; ++i) { + output[i].*member = DecodeFromUnsignedToType(values[i]); + } + return RtcEventLogParseStatus::Success(); +} + +// Same as above, but for optional fields. +template ::value, bool> = true> +ABSL_MUST_USE_RESULT RtcEventLogParseStatus +PopulateRtcEventMember(const rtc::ArrayView positions, + const rtc::ArrayView values, + absl::optional E::*member, + rtc::ArrayView output) { + size_t batch_size = positions.size(); + RTC_CHECK_EQ(output.size(), batch_size); + RTC_CHECK_LE(values.size(), batch_size); + auto value_it = values.begin(); + for (size_t i = 0; i < batch_size; ++i) { + if (positions[i]) { + RTC_CHECK(value_it != values.end()); + output[i].*member = DecodeFromUnsignedToType(value_it); + ++value_it; + } else { + output[i].*member = absl::nullopt; + } + } + RTC_CHECK(value_it == values.end()); + return RtcEventLogParseStatus::Success(); +} + +// Same as above, but for enum fields. +template ::value, bool> = true> +ABSL_MUST_USE_RESULT RtcEventLogParseStatus +PopulateRtcEventMember(const rtc::ArrayView values, + T E::*member, + rtc::ArrayView output) { + size_t batch_size = values.size(); + RTC_CHECK_EQ(output.size(), batch_size); + for (size_t i = 0; i < batch_size; ++i) { + auto result = RtcEventLogEnum::Decode(values[i]); + if (!result.ok()) { + return result.status(); + } + output[i].*member = result.value(); + } + return RtcEventLogParseStatus::Success(); +} + +// Same as above, but for string fields. +template +ABSL_MUST_USE_RESULT RtcEventLogParseStatus +PopulateRtcEventMember(const rtc::ArrayView values, + std::string E::*member, + rtc::ArrayView output) { + size_t batch_size = values.size(); + RTC_CHECK_EQ(output.size(), batch_size); + for (size_t i = 0; i < batch_size; ++i) { + output[i].*member = values[i]; + } + return RtcEventLogParseStatus::Success(); +} + +// Same as above, but for Timestamp fields. +// N.B. Assumes that the encoded value uses millisecond precision. +template +ABSL_MUST_USE_RESULT RtcEventLogParseStatus +PopulateRtcEventTimestamp(const rtc::ArrayView& values, + Timestamp E::*timestamp, + rtc::ArrayView output) { + size_t batch_size = values.size(); + RTC_CHECK_EQ(batch_size, output.size()); + for (size_t i = 0; i < batch_size; ++i) { + output[i].*timestamp = + Timestamp::Millis(DecodeFromUnsignedToType(values[i])); + } + return RtcEventLogParseStatus::Success(); +} + +template +rtc::ArrayView ExtendLoggedBatch(std::vector& output, + size_t new_elements) { + size_t old_size = output.size(); + output.insert(output.end(), old_size + new_elements, E()); + rtc::ArrayView output_batch = output; + output_batch.subview(old_size); + RTC_DCHECK_EQ(output_batch.size(), new_elements); + return output_batch; +} + } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_FIELD_ENCODING_PARSER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_field_extraction.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_field_extraction.h index 8cd020fe16..eb9d67f1c2 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_field_extraction.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_field_extraction.h @@ -17,6 +17,7 @@ #include "absl/types/optional.h" #include "api/array_view.h" #include "api/rtc_event_log/rtc_event.h" +#include "api/units/timestamp.h" #include "logging/rtc_event_log/encoder/rtc_event_log_encoder_common.h" #include "rtc_base/logging.h" @@ -55,29 +56,16 @@ T DecodeFromUnsignedToType(uint64_t value) { return static_cast(value); } -// Given a batch of RtcEvents and a member pointer, extract that -// member from each event in the batch. Signed integer members are -// encoded as unsigned, and the bitsize increased so the result can -// represented as a std::vector. -// This is intended to be used in conjuction with -// EventEncoder::EncodeField to encode a batch of events as follows: -// auto values = ExtractRtcEventMember(batch, RtcEventFoo::timestamp_ms); -// encoder.EncodeField(timestamp_params, values) -template ::value, bool> = true> -std::vector ExtractRtcEventMember( - rtc::ArrayView batch, - const T E::*member) { - std::vector values; - values.reserve(batch.size()); - for (const RtcEvent* event : batch) { - RTC_CHECK_EQ(event->GetType(), E::kType); - T value = static_cast(event)->*member; - values.push_back(EncodeAsUnsigned(value)); - } - return values; -} +// RtcEventLogEnum defines a mapping between an enum T +// and the event log encodings. To log a new enum type T, +// specialize RtcEventLogEnum and add static methods +// static uint64_t Encode(T x) {} +// static RtcEventLogParseStatusOr Decode(uint64_t x) {} +template +class RtcEventLogEnum { + static_assert(sizeof(T) != sizeof(T), + "Missing specialisation of RtcEventLogEnum for type"); +}; // Represents a vector> optional_values // as a bit-vector `position_mask` which identifies the positions @@ -91,96 +79,6 @@ struct ValuesWithPositions { std::vector values; }; -// Same as above but for optional fields. It returns a struct -// containing a vector of positions in addition to the vector of values. -// The vector `positions` has the same length as the batch where -// `positions[i] == true` iff the batch[i]->member has a value. -// The values vector only contains the values that exists, so it -// may be shorter than the batch. -template ::value, bool> = true> -ValuesWithPositions ExtractRtcEventMember(rtc::ArrayView batch, - const absl::optional E::*member) { - ValuesWithPositions result; - result.position_mask.reserve(batch.size()); - result.values.reserve(batch.size()); - for (const RtcEvent* event : batch) { - RTC_CHECK_EQ(event->GetType(), E::kType); - absl::optional field = static_cast(event)->*member; - result.position_mask.push_back(field.has_value()); - if (field.has_value()) { - result.values.push_back(EncodeAsUnsigned(field.value())); - } - } - return result; -} - -template -std::vector ExtractRtcEventMember( - rtc::ArrayView batch, - const std::string E::*member) { - std::vector values; - values.reserve(batch.size()); - for (const RtcEvent* event : batch) { - RTC_CHECK_EQ(event->GetType(), E::kType); - absl::string_view str = static_cast(event)->*member; - values.push_back(str); - } - return values; -} - -// Inverse of the ExtractRtcEventMember function used when parsing -// a log. Uses a vector of values to populate a specific field in a -// vector of structs. -template ::value, bool> = true> -void PopulateRtcEventMember(const rtc::ArrayView values, - T E::*member, - rtc::ArrayView output) { - size_t batch_size = values.size(); - RTC_CHECK_EQ(output.size(), batch_size); - for (size_t i = 0; i < batch_size; ++i) { - output[i].*member = DecodeFromUnsignedToType(values[i]); - } -} - -// Same as above, but for optional fields. -template ::value, bool> = true> -void PopulateRtcEventMember(const rtc::ArrayView positions, - const rtc::ArrayView values, - absl::optional E::*member, - rtc::ArrayView output) { - size_t batch_size = positions.size(); - RTC_CHECK_EQ(output.size(), batch_size); - RTC_CHECK_LE(values.size(), batch_size); - auto value_it = values.begin(); - for (size_t i = 0; i < batch_size; ++i) { - if (positions[i]) { - RTC_CHECK(value_it != values.end()); - output[i].*member = DecodeFromUnsignedToType(value_it); - ++value_it; - } else { - output[i].*member = absl::nullopt; - } - } - RTC_CHECK(value_it == values.end()); -} - -template -void PopulateRtcEventMember(const rtc::ArrayView values, - std::string E::*member, - rtc::ArrayView output) { - size_t batch_size = values.size(); - RTC_CHECK_EQ(output.size(), batch_size); - for (size_t i = 0; i < batch_size; ++i) { - output[i].*member = values[i]; - } -} - } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_FIELD_EXTRACTION_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_frame_decoded.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_frame_decoded.h index 4a6bb90d02..91190faea9 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_frame_decoded.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_frame_decoded.h @@ -13,14 +13,33 @@ #include +#include #include +#include +#include +#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" #include "api/video/video_codec_type.h" +#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" namespace webrtc { +struct LoggedFrameDecoded { + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + Timestamp log_time() const { return timestamp; } + + Timestamp timestamp = Timestamp::MinusInfinity(); + int64_t render_time_ms; + uint32_t ssrc; + int width; + int height; + VideoCodecType codec; + uint8_t qp; +}; + class RtcEventFrameDecoded final : public RtcEvent { public: static constexpr Type kType = Type::FrameDecoded; @@ -45,6 +64,19 @@ class RtcEventFrameDecoded final : public RtcEvent { VideoCodecType codec() const { return codec_; } uint8_t qp() const { return qp_; } + static std::string Encode(rtc::ArrayView batch) { + // TODO(terelius): Implement + return ""; + } + + static RtcEventLogParseStatus Parse( + absl::string_view encoded_bytes, + bool batched, + std::map>& output) { + // TODO(terelius): Implement + return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); + } + private: RtcEventFrameDecoded(const RtcEventFrameDecoded& other); @@ -56,19 +88,6 @@ class RtcEventFrameDecoded final : public RtcEvent { const uint8_t qp_; }; -struct LoggedFrameDecoded { - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - - Timestamp timestamp = Timestamp::MinusInfinity(); - int64_t render_time_ms; - uint32_t ssrc; - int width; - int height; - VideoCodecType codec; - uint8_t qp; -}; - } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_FRAME_DECODED_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_ack_received.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_ack_received.h index 3cd8f5ccee..57fd7cd9a6 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_ack_received.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_ack_received.h @@ -12,14 +12,38 @@ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_GENERIC_ACK_RECEIVED_H_ #include +#include #include +#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" +#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" namespace webrtc { +struct LoggedGenericAckReceived { + LoggedGenericAckReceived() = default; + LoggedGenericAckReceived(Timestamp timestamp, + int64_t packet_number, + int64_t acked_packet_number, + absl::optional receive_acked_packet_time_ms) + : timestamp(timestamp), + packet_number(packet_number), + acked_packet_number(acked_packet_number), + receive_acked_packet_time_ms(receive_acked_packet_time_ms) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + Timestamp log_time() const { return timestamp; } + + Timestamp timestamp = Timestamp::MinusInfinity(); + int64_t packet_number; + int64_t acked_packet_number; + absl::optional receive_acked_packet_time_ms; +}; + struct AckedPacket { // The packet number that was acked. int64_t packet_number; @@ -57,6 +81,19 @@ class RtcEventGenericAckReceived final : public RtcEvent { return receive_acked_packet_time_ms_; } + static std::string Encode(rtc::ArrayView batch) { + // TODO(terelius): Implement + return ""; + } + + static RtcEventLogParseStatus Parse( + absl::string_view encoded_bytes, + bool batched, + std::vector& output) { + // TODO(terelius): Implement + return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); + } + private: RtcEventGenericAckReceived(const RtcEventGenericAckReceived& packet); @@ -76,26 +113,6 @@ class RtcEventGenericAckReceived final : public RtcEvent { const absl::optional receive_acked_packet_time_ms_; }; -struct LoggedGenericAckReceived { - LoggedGenericAckReceived() = default; - LoggedGenericAckReceived(Timestamp timestamp, - int64_t packet_number, - int64_t acked_packet_number, - absl::optional receive_acked_packet_time_ms) - : timestamp(timestamp), - packet_number(packet_number), - acked_packet_number(acked_packet_number), - receive_acked_packet_time_ms(receive_acked_packet_time_ms) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - - Timestamp timestamp = Timestamp::MinusInfinity(); - int64_t packet_number; - int64_t acked_packet_number; - absl::optional receive_acked_packet_time_ms; -}; - } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_GENERIC_ACK_RECEIVED_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_packet_received.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_packet_received.h index 428e7b3806..a6006ca4d4 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_packet_received.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_packet_received.h @@ -12,12 +12,34 @@ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_GENERIC_PACKET_RECEIVED_H_ #include +#include +#include +#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" +#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" namespace webrtc { +struct LoggedGenericPacketReceived { + LoggedGenericPacketReceived() = default; + LoggedGenericPacketReceived(Timestamp timestamp, + int64_t packet_number, + int packet_length) + : timestamp(timestamp), + packet_number(packet_number), + packet_length(packet_length) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + Timestamp log_time() const { return timestamp; } + + Timestamp timestamp = Timestamp::MinusInfinity(); + int64_t packet_number; + int packet_length; +}; + class RtcEventGenericPacketReceived final : public RtcEvent { public: static constexpr Type kType = Type::GenericPacketReceived; @@ -37,6 +59,19 @@ class RtcEventGenericPacketReceived final : public RtcEvent { // including ICE/TURN/IP overheads. size_t packet_length() const { return packet_length_; } + static std::string Encode(rtc::ArrayView batch) { + // TODO(terelius): Implement + return ""; + } + + static RtcEventLogParseStatus Parse( + absl::string_view encoded_bytes, + bool batched, + std::vector& output) { + // TODO(terelius): Implement + return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); + } + private: RtcEventGenericPacketReceived(const RtcEventGenericPacketReceived& packet); @@ -44,23 +79,6 @@ class RtcEventGenericPacketReceived final : public RtcEvent { const size_t packet_length_; }; -struct LoggedGenericPacketReceived { - LoggedGenericPacketReceived() = default; - LoggedGenericPacketReceived(Timestamp timestamp, - int64_t packet_number, - int packet_length) - : timestamp(timestamp), - packet_number(packet_number), - packet_length(packet_length) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - - Timestamp timestamp = Timestamp::MinusInfinity(); - int64_t packet_number; - int packet_length; -}; - } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_GENERIC_PACKET_RECEIVED_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_packet_sent.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_packet_sent.h index 6e626e63a1..903950a398 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_packet_sent.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_packet_sent.h @@ -12,12 +12,43 @@ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_GENERIC_PACKET_SENT_H_ #include +#include +#include +#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" +#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" namespace webrtc { +struct LoggedGenericPacketSent { + LoggedGenericPacketSent() = default; + LoggedGenericPacketSent(Timestamp timestamp, + int64_t packet_number, + size_t overhead_length, + size_t payload_length, + size_t padding_length) + : timestamp(timestamp), + packet_number(packet_number), + overhead_length(overhead_length), + payload_length(payload_length), + padding_length(padding_length) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + Timestamp log_time() const { return timestamp; } + + size_t packet_length() const { + return payload_length + padding_length + overhead_length; + } + Timestamp timestamp = Timestamp::MinusInfinity(); + int64_t packet_number; + size_t overhead_length; + size_t payload_length; + size_t padding_length; +}; + class RtcEventGenericPacketSent final : public RtcEvent { public: static constexpr Type kType = Type::GenericPacketSent; @@ -52,6 +83,19 @@ class RtcEventGenericPacketSent final : public RtcEvent { size_t padding_length() const { return padding_length_; } + static std::string Encode(rtc::ArrayView batch) { + // TODO(terelius): Implement + return ""; + } + + static RtcEventLogParseStatus Parse( + absl::string_view encoded_bytes, + bool batched, + std::vector& output) { + // TODO(terelius): Implement + return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); + } + private: RtcEventGenericPacketSent(const RtcEventGenericPacketSent& packet); @@ -61,31 +105,6 @@ class RtcEventGenericPacketSent final : public RtcEvent { const size_t padding_length_; }; -struct LoggedGenericPacketSent { - LoggedGenericPacketSent() = default; - LoggedGenericPacketSent(Timestamp timestamp, - int64_t packet_number, - size_t overhead_length, - size_t payload_length, - size_t padding_length) - : timestamp(timestamp), - packet_number(packet_number), - overhead_length(overhead_length), - payload_length(payload_length), - padding_length(padding_length) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - - size_t packet_length() const { - return payload_length + padding_length + overhead_length; - } - Timestamp timestamp = Timestamp::MinusInfinity(); - int64_t packet_number; - size_t overhead_length; - size_t payload_length; - size_t padding_length; -}; } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_GENERIC_PACKET_SENT_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h index 1f4d825a99..bdacf15a59 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h @@ -14,9 +14,13 @@ #include #include +#include +#include +#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" +#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" namespace webrtc { @@ -28,6 +32,27 @@ enum class IceCandidatePairEventType { kNumValues, }; +struct LoggedIceCandidatePairEvent { + LoggedIceCandidatePairEvent() = default; + LoggedIceCandidatePairEvent(Timestamp timestamp, + IceCandidatePairEventType type, + uint32_t candidate_pair_id, + uint32_t transaction_id) + : timestamp(timestamp), + type(type), + candidate_pair_id(candidate_pair_id), + transaction_id(transaction_id) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + Timestamp log_time() const { return timestamp; } + + Timestamp timestamp = Timestamp::MinusInfinity(); + IceCandidatePairEventType type; + uint32_t candidate_pair_id; + uint32_t transaction_id; +}; + class RtcEventIceCandidatePair final : public RtcEvent { public: static constexpr Type kType = Type::IceCandidatePairEvent; @@ -47,6 +72,19 @@ class RtcEventIceCandidatePair final : public RtcEvent { uint32_t candidate_pair_id() const { return candidate_pair_id_; } uint32_t transaction_id() const { return transaction_id_; } + static std::string Encode(rtc::ArrayView batch) { + // TODO(terelius): Implement + return ""; + } + + static RtcEventLogParseStatus Parse( + absl::string_view encoded_bytes, + bool batched, + std::vector& output) { + // TODO(terelius): Implement + return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); + } + private: RtcEventIceCandidatePair(const RtcEventIceCandidatePair& other); @@ -55,26 +93,6 @@ class RtcEventIceCandidatePair final : public RtcEvent { const uint32_t transaction_id_; }; -struct LoggedIceCandidatePairEvent { - LoggedIceCandidatePairEvent() = default; - LoggedIceCandidatePairEvent(Timestamp timestamp, - IceCandidatePairEventType type, - uint32_t candidate_pair_id, - uint32_t transaction_id) - : timestamp(timestamp), - type(type), - candidate_pair_id(candidate_pair_id), - transaction_id(transaction_id) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - - Timestamp timestamp = Timestamp::MinusInfinity(); - IceCandidatePairEventType type; - uint32_t candidate_pair_id; - uint32_t transaction_id; -}; - } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_ICE_CANDIDATE_PAIR_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h index 465a799780..e72d999cff 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h @@ -14,9 +14,13 @@ #include #include +#include +#include +#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" +#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" namespace webrtc { @@ -65,6 +69,23 @@ enum class IceCandidateNetworkType { kNumValues, }; +struct LoggedIceCandidatePairConfig { + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + Timestamp log_time() const { return timestamp; } + + Timestamp timestamp = Timestamp::MinusInfinity(); + IceCandidatePairConfigType type; + uint32_t candidate_pair_id; + IceCandidateType local_candidate_type; + IceCandidatePairProtocol local_relay_protocol; + IceCandidateNetworkType local_network_type; + IceCandidatePairAddressFamily local_address_family; + IceCandidateType remote_candidate_type; + IceCandidatePairAddressFamily remote_address_family; + IceCandidatePairProtocol candidate_pair_protocol; +}; + class IceCandidatePairDescription { public: IceCandidatePairDescription(); @@ -105,6 +126,19 @@ class RtcEventIceCandidatePairConfig final : public RtcEvent { return candidate_pair_desc_; } + static std::string Encode(rtc::ArrayView batch) { + // TODO(terelius): Implement + return ""; + } + + static RtcEventLogParseStatus Parse( + absl::string_view encoded_bytes, + bool batched, + std::vector& output) { + // TODO(terelius): Implement + return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); + } + private: RtcEventIceCandidatePairConfig(const RtcEventIceCandidatePairConfig& other); @@ -113,22 +147,6 @@ class RtcEventIceCandidatePairConfig final : public RtcEvent { const IceCandidatePairDescription candidate_pair_desc_; }; -struct LoggedIceCandidatePairConfig { - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - - Timestamp timestamp = Timestamp::MinusInfinity(); - IceCandidatePairConfigType type; - uint32_t candidate_pair_id; - IceCandidateType local_candidate_type; - IceCandidatePairProtocol local_relay_protocol; - IceCandidateNetworkType local_network_type; - IceCandidatePairAddressFamily local_address_family; - IceCandidateType remote_candidate_type; - IceCandidatePairAddressFamily remote_address_family; - IceCandidatePairProtocol candidate_pair_protocol; -}; - } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_ICE_CANDIDATE_PAIR_CONFIG_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_cluster_created.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_cluster_created.h index 974a0c9a5c..ae6810c39d 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_cluster_created.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_cluster_created.h @@ -14,12 +14,40 @@ #include #include +#include +#include +#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" +#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" namespace webrtc { +struct LoggedBweProbeClusterCreatedEvent { + LoggedBweProbeClusterCreatedEvent() = default; + LoggedBweProbeClusterCreatedEvent(Timestamp timestamp, + int32_t id, + int32_t bitrate_bps, + uint32_t min_packets, + uint32_t min_bytes) + : timestamp(timestamp), + id(id), + bitrate_bps(bitrate_bps), + min_packets(min_packets), + min_bytes(min_bytes) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + Timestamp log_time() const { return timestamp; } + + Timestamp timestamp = Timestamp::MinusInfinity(); + int32_t id; + int32_t bitrate_bps; + uint32_t min_packets; + uint32_t min_bytes; +}; + class RtcEventProbeClusterCreated final : public RtcEvent { public: static constexpr Type kType = Type::ProbeClusterCreated; @@ -40,6 +68,19 @@ class RtcEventProbeClusterCreated final : public RtcEvent { uint32_t min_probes() const { return min_probes_; } uint32_t min_bytes() const { return min_bytes_; } + static std::string Encode(rtc::ArrayView batch) { + // TODO(terelius): Implement + return ""; + } + + static RtcEventLogParseStatus Parse( + absl::string_view encoded_bytes, + bool batched, + std::vector& output) { + // TODO(terelius): Implement + return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); + } + private: RtcEventProbeClusterCreated(const RtcEventProbeClusterCreated& other); @@ -49,29 +90,6 @@ class RtcEventProbeClusterCreated final : public RtcEvent { const uint32_t min_bytes_; }; -struct LoggedBweProbeClusterCreatedEvent { - LoggedBweProbeClusterCreatedEvent() = default; - LoggedBweProbeClusterCreatedEvent(Timestamp timestamp, - int32_t id, - int32_t bitrate_bps, - uint32_t min_packets, - uint32_t min_bytes) - : timestamp(timestamp), - id(id), - bitrate_bps(bitrate_bps), - min_packets(min_packets), - min_bytes(min_bytes) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - - Timestamp timestamp = Timestamp::MinusInfinity(); - int32_t id; - int32_t bitrate_bps; - uint32_t min_packets; - uint32_t min_bytes; -}; - } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_PROBE_CLUSTER_CREATED_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_result_failure.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_result_failure.h index fa61b314b4..1aa6e75cb7 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_result_failure.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_result_failure.h @@ -14,9 +14,13 @@ #include #include +#include +#include +#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" +#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" namespace webrtc { @@ -27,6 +31,22 @@ enum class ProbeFailureReason { kLast }; +struct LoggedBweProbeFailureEvent { + LoggedBweProbeFailureEvent() = default; + LoggedBweProbeFailureEvent(Timestamp timestamp, + int32_t id, + ProbeFailureReason failure_reason) + : timestamp(timestamp), id(id), failure_reason(failure_reason) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + Timestamp log_time() const { return timestamp; } + + Timestamp timestamp = Timestamp::MinusInfinity(); + int32_t id; + ProbeFailureReason failure_reason; +}; + class RtcEventProbeResultFailure final : public RtcEvent { public: static constexpr Type kType = Type::ProbeResultFailure; @@ -42,6 +62,19 @@ class RtcEventProbeResultFailure final : public RtcEvent { int32_t id() const { return id_; } ProbeFailureReason failure_reason() const { return failure_reason_; } + static std::string Encode(rtc::ArrayView batch) { + // TODO(terelius): Implement + return ""; + } + + static RtcEventLogParseStatus Parse( + absl::string_view encoded_bytes, + bool batched, + std::vector& output) { + // TODO(terelius): Implement + return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); + } + private: RtcEventProbeResultFailure(const RtcEventProbeResultFailure& other); @@ -49,21 +82,6 @@ class RtcEventProbeResultFailure final : public RtcEvent { const ProbeFailureReason failure_reason_; }; -struct LoggedBweProbeFailureEvent { - LoggedBweProbeFailureEvent() = default; - LoggedBweProbeFailureEvent(Timestamp timestamp, - int32_t id, - ProbeFailureReason failure_reason) - : timestamp(timestamp), id(id), failure_reason(failure_reason) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - - Timestamp timestamp = Timestamp::MinusInfinity(); - int32_t id; - ProbeFailureReason failure_reason; -}; - } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_PROBE_RESULT_FAILURE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_result_success.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_result_success.h index d00cfa81d6..49d1abec5a 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_result_success.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_result_success.h @@ -14,12 +14,32 @@ #include #include +#include +#include +#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" +#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" namespace webrtc { +struct LoggedBweProbeSuccessEvent { + LoggedBweProbeSuccessEvent() = default; + LoggedBweProbeSuccessEvent(Timestamp timestamp, + int32_t id, + int32_t bitrate_bps) + : timestamp(timestamp), id(id), bitrate_bps(bitrate_bps) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + Timestamp log_time() const { return timestamp; } + + Timestamp timestamp = Timestamp::MinusInfinity(); + int32_t id; + int32_t bitrate_bps; +}; + class RtcEventProbeResultSuccess final : public RtcEvent { public: static constexpr Type kType = Type::ProbeResultSuccess; @@ -35,6 +55,19 @@ class RtcEventProbeResultSuccess final : public RtcEvent { int32_t id() const { return id_; } int32_t bitrate_bps() const { return bitrate_bps_; } + static std::string Encode(rtc::ArrayView batch) { + // TODO(terelius): Implement + return ""; + } + + static RtcEventLogParseStatus Parse( + absl::string_view encoded_bytes, + bool batched, + std::vector& output) { + // TODO(terelius): Implement + return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); + } + private: RtcEventProbeResultSuccess(const RtcEventProbeResultSuccess& other); @@ -42,21 +75,6 @@ class RtcEventProbeResultSuccess final : public RtcEvent { const int32_t bitrate_bps_; }; -struct LoggedBweProbeSuccessEvent { - LoggedBweProbeSuccessEvent() = default; - LoggedBweProbeSuccessEvent(Timestamp timestamp, - int32_t id, - int32_t bitrate_bps) - : timestamp(timestamp), id(id), bitrate_bps(bitrate_bps) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - - Timestamp timestamp = Timestamp::MinusInfinity(); - int32_t id; - int32_t bitrate_bps; -}; - } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_PROBE_RESULT_SUCCESS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_remote_estimate.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_remote_estimate.h index 956e05f682..4a39ecc597 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_remote_estimate.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_remote_estimate.h @@ -11,14 +11,30 @@ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_REMOTE_ESTIMATE_H_ #include +#include +#include +#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/data_rate.h" #include "api/units/timestamp.h" +#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" namespace webrtc { +struct LoggedRemoteEstimateEvent { + LoggedRemoteEstimateEvent() = default; + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + Timestamp log_time() const { return timestamp; } + + Timestamp timestamp = Timestamp::MinusInfinity(); + absl::optional link_capacity_lower; + absl::optional link_capacity_upper; +}; + class RtcEventRemoteEstimate final : public RtcEvent { public: static constexpr Type kType = Type::RemoteEstimateEvent; @@ -31,19 +47,22 @@ class RtcEventRemoteEstimate final : public RtcEvent { Type GetType() const override { return kType; } bool IsConfigEvent() const override { return false; } + static std::string Encode(rtc::ArrayView batch) { + // TODO(terelius): Implement + return ""; + } + + static RtcEventLogParseStatus Parse( + absl::string_view encoded_bytes, + bool batched, + std::vector& output) { + // TODO(terelius): Implement + return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); + } + const DataRate link_capacity_lower_; const DataRate link_capacity_upper_; }; -struct LoggedRemoteEstimateEvent { - LoggedRemoteEstimateEvent() = default; - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - - Timestamp timestamp = Timestamp::MinusInfinity(); - absl::optional link_capacity_lower; - absl::optional link_capacity_upper; -}; } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_REMOTE_ESTIMATE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_route_change.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_route_change.h index 4a4e9aef80..bc1461d7bb 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_route_change.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_route_change.h @@ -12,12 +12,30 @@ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_ROUTE_CHANGE_H_ #include +#include +#include +#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" +#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" namespace webrtc { +struct LoggedRouteChangeEvent { + LoggedRouteChangeEvent() = default; + LoggedRouteChangeEvent(Timestamp timestamp, bool connected, uint32_t overhead) + : timestamp(timestamp), connected(connected), overhead(overhead) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + Timestamp log_time() const { return timestamp; } + + Timestamp timestamp = Timestamp::MinusInfinity(); + bool connected; + uint32_t overhead; +}; + class RtcEventRouteChange final : public RtcEvent { public: static constexpr Type kType = Type::RouteChangeEvent; @@ -33,6 +51,19 @@ class RtcEventRouteChange final : public RtcEvent { bool connected() const { return connected_; } uint32_t overhead() const { return overhead_; } + static std::string Encode(rtc::ArrayView batch) { + // TODO(terelius): Implement + return ""; + } + + static RtcEventLogParseStatus Parse( + absl::string_view encoded_bytes, + bool batched, + std::vector& output) { + // TODO(terelius): Implement + return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); + } + private: RtcEventRouteChange(const RtcEventRouteChange& other); @@ -40,18 +71,5 @@ class RtcEventRouteChange final : public RtcEvent { const uint32_t overhead_; }; -struct LoggedRouteChangeEvent { - LoggedRouteChangeEvent() = default; - LoggedRouteChangeEvent(Timestamp timestamp, bool connected, uint32_t overhead) - : timestamp(timestamp), connected(connected), overhead(overhead) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - - Timestamp timestamp = Timestamp::MinusInfinity(); - bool connected; - uint32_t overhead; -}; - } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_ROUTE_CHANGE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.h index 1cbac7712f..84fe398e08 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.h @@ -14,9 +14,14 @@ #include #include +#include +#include +#include "absl/strings/string_view.h" #include "api/array_view.h" #include "api/rtc_event_log/rtc_event.h" +#include "logging/rtc_event_log/events/logged_rtp_rtcp.h" +#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" #include "rtc_base/buffer.h" namespace webrtc { @@ -35,6 +40,19 @@ class RtcEventRtcpPacketIncoming final : public RtcEvent { const rtc::Buffer& packet() const { return packet_; } + static std::string Encode(rtc::ArrayView batch) { + // TODO(terelius): Implement + return ""; + } + + static RtcEventLogParseStatus Parse( + absl::string_view encoded_bytes, + bool batched, + std::vector& output) { + // TODO(terelius): Implement + return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); + } + private: RtcEventRtcpPacketIncoming(const RtcEventRtcpPacketIncoming& other); diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.h index 0ecccbeaae..687bd319b4 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.h @@ -14,9 +14,14 @@ #include #include +#include +#include +#include "absl/strings/string_view.h" #include "api/array_view.h" #include "api/rtc_event_log/rtc_event.h" +#include "logging/rtc_event_log/events/logged_rtp_rtcp.h" +#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" #include "rtc_base/buffer.h" namespace webrtc { @@ -35,6 +40,19 @@ class RtcEventRtcpPacketOutgoing final : public RtcEvent { const rtc::Buffer& packet() const { return packet_; } + static std::string Encode(rtc::ArrayView batch) { + // TODO(terelius): Implement + return ""; + } + + static RtcEventLogParseStatus Parse( + absl::string_view encoded_bytes, + bool batched, + std::vector& output) { + // TODO(terelius): Implement + return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); + } + private: RtcEventRtcpPacketOutgoing(const RtcEventRtcpPacketOutgoing& other); diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.h index ee48fa360b..926ddddff5 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.h @@ -13,11 +13,17 @@ #include #include +#include #include +#include #include +#include +#include "absl/strings/string_view.h" #include "api/array_view.h" #include "api/rtc_event_log/rtc_event.h" +#include "logging/rtc_event_log/events/logged_rtp_rtcp.h" +#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" #include "modules/rtp_rtcp/source/rtp_packet.h" namespace webrtc { @@ -59,6 +65,19 @@ class RtcEventRtpPacketIncoming final : public RtcEvent { size_t header_length() const { return packet_.headers_size(); } size_t padding_length() const { return packet_.padding_size(); } + static std::string Encode(rtc::ArrayView batch) { + // TODO(terelius): Implement + return ""; + } + + static RtcEventLogParseStatus Parse( + absl::string_view encoded_bytes, + bool batched, + std::map>& output) { + // TODO(terelius): Implement + return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); + } + private: RtcEventRtpPacketIncoming(const RtcEventRtpPacketIncoming& other); diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h index 626c094ca9..c7b7a09718 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h @@ -13,11 +13,17 @@ #include #include +#include #include +#include #include +#include +#include "absl/strings/string_view.h" #include "api/array_view.h" #include "api/rtc_event_log/rtc_event.h" +#include "logging/rtc_event_log/events/logged_rtp_rtcp.h" +#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" #include "modules/rtp_rtcp/source/rtp_packet.h" namespace webrtc { @@ -61,6 +67,19 @@ class RtcEventRtpPacketOutgoing final : public RtcEvent { size_t padding_length() const { return packet_.padding_size(); } int probe_cluster_id() const { return probe_cluster_id_; } + static std::string Encode(rtc::ArrayView batch) { + // TODO(terelius): Implement + return ""; + } + + static RtcEventLogParseStatus Parse( + absl::string_view encoded_bytes, + bool batched, + std::map>& output) { + // TODO(terelius): Implement + return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); + } + private: RtcEventRtpPacketOutgoing(const RtcEventRtpPacketOutgoing& other); diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_video_receive_stream_config.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_video_receive_stream_config.h index e7b9061872..0be56c2065 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_video_receive_stream_config.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_video_receive_stream_config.h @@ -12,13 +12,30 @@ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_VIDEO_RECEIVE_STREAM_CONFIG_H_ #include +#include +#include +#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" +#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" #include "logging/rtc_event_log/rtc_stream_config.h" namespace webrtc { +struct LoggedVideoRecvConfig { + LoggedVideoRecvConfig() = default; + LoggedVideoRecvConfig(Timestamp timestamp, const rtclog::StreamConfig config) + : timestamp(timestamp), config(config) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + Timestamp log_time() const { return timestamp; } + + Timestamp timestamp = Timestamp::MinusInfinity(); + rtclog::StreamConfig config; +}; + class RtcEventVideoReceiveStreamConfig final : public RtcEvent { public: static constexpr Type kType = Type::VideoReceiveStreamConfig; @@ -34,6 +51,19 @@ class RtcEventVideoReceiveStreamConfig final : public RtcEvent { const rtclog::StreamConfig& config() const { return *config_; } + static std::string Encode(rtc::ArrayView batch) { + // TODO(terelius): Implement + return ""; + } + + static RtcEventLogParseStatus Parse( + absl::string_view encoded_bytes, + bool batched, + std::vector& output) { + // TODO(terelius): Implement + return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); + } + private: RtcEventVideoReceiveStreamConfig( const RtcEventVideoReceiveStreamConfig& other); @@ -41,18 +71,6 @@ class RtcEventVideoReceiveStreamConfig final : public RtcEvent { const std::unique_ptr config_; }; -struct LoggedVideoRecvConfig { - LoggedVideoRecvConfig() = default; - LoggedVideoRecvConfig(Timestamp timestamp, const rtclog::StreamConfig config) - : timestamp(timestamp), config(config) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - - Timestamp timestamp = Timestamp::MinusInfinity(); - rtclog::StreamConfig config; -}; - } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_VIDEO_RECEIVE_STREAM_CONFIG_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_video_send_stream_config.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_video_send_stream_config.h index e72e75e49d..f1717b19ea 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_video_send_stream_config.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_video_send_stream_config.h @@ -12,13 +12,30 @@ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_VIDEO_SEND_STREAM_CONFIG_H_ #include +#include +#include +#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" +#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" #include "logging/rtc_event_log/rtc_stream_config.h" namespace webrtc { +struct LoggedVideoSendConfig { + LoggedVideoSendConfig() = default; + LoggedVideoSendConfig(Timestamp timestamp, const rtclog::StreamConfig config) + : timestamp(timestamp), config(config) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + Timestamp log_time() const { return timestamp; } + + Timestamp timestamp = Timestamp::MinusInfinity(); + rtclog::StreamConfig config; +}; + class RtcEventVideoSendStreamConfig final : public RtcEvent { public: static constexpr Type kType = Type::VideoSendStreamConfig; @@ -34,23 +51,25 @@ class RtcEventVideoSendStreamConfig final : public RtcEvent { const rtclog::StreamConfig& config() const { return *config_; } + static std::string Encode(rtc::ArrayView batch) { + // TODO(terelius): Implement + return ""; + } + + static RtcEventLogParseStatus Parse( + absl::string_view encoded_bytes, + bool batched, + std::vector& output) { + // TODO(terelius): Implement + return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); + } + private: RtcEventVideoSendStreamConfig(const RtcEventVideoSendStreamConfig& other); const std::unique_ptr config_; }; -struct LoggedVideoSendConfig { - LoggedVideoSendConfig() = default; - LoggedVideoSendConfig(Timestamp timestamp, const rtclog::StreamConfig config) - : timestamp(timestamp), config(config) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - - Timestamp timestamp = Timestamp::MinusInfinity(); - rtclog::StreamConfig config; -}; } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_VIDEO_SEND_STREAM_CONFIG_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/fake_rtc_event_log_factory.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/fake_rtc_event_log_factory.cc index f663ec5abe..47db40c9f4 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/fake_rtc_event_log_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/fake_rtc_event_log_factory.cc @@ -17,11 +17,17 @@ namespace webrtc { -std::unique_ptr FakeRtcEventLogFactory::CreateRtcEventLog( - RtcEventLog::EncodingType /*encoding_type*/) { +std::unique_ptr FakeRtcEventLogFactory::Create( + RtcEventLog::EncodingType /*encoding_type*/) const { auto fake_event_log = std::make_unique(); - last_log_created_ = fake_event_log.get(); + const_cast(this)->last_log_created_ = + fake_event_log.get(); return fake_event_log; } +std::unique_ptr FakeRtcEventLogFactory::CreateRtcEventLog( + RtcEventLog::EncodingType encoding_type) { + return Create(encoding_type); +} + } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/fake_rtc_event_log_factory.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/fake_rtc_event_log_factory.h index 114c3e6323..c7ff33dee4 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/fake_rtc_event_log_factory.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/fake_rtc_event_log_factory.h @@ -23,6 +23,9 @@ class FakeRtcEventLogFactory : public RtcEventLogFactoryInterface { FakeRtcEventLogFactory() = default; ~FakeRtcEventLogFactory() override = default; + std::unique_ptr Create( + RtcEventLog::EncodingType encoding_type) const override; + std::unique_ptr CreateRtcEventLog( RtcEventLog::EncodingType encoding_type) override; diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/logged_events.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/logged_events.cc deleted file mode 100644 index 5ef3de11c0..0000000000 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/logged_events.cc +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "logging/rtc_event_log/logged_events.h" - -namespace webrtc { - -LoggedPacketInfo::LoggedPacketInfo(const LoggedRtpPacket& rtp, - LoggedMediaType media_type, - bool rtx, - Timestamp capture_time) - : ssrc(rtp.header.ssrc), - stream_seq_no(rtp.header.sequenceNumber), - size(static_cast(rtp.total_length)), - payload_size(static_cast(rtp.total_length - - rtp.header.paddingLength - - rtp.header.headerLength)), - padding_size(static_cast(rtp.header.paddingLength)), - payload_type(rtp.header.payloadType), - media_type(media_type), - rtx(rtx), - marker_bit(rtp.header.markerBit), - has_transport_seq_no(rtp.header.extension.hasTransportSequenceNumber), - transport_seq_no(static_cast( - has_transport_seq_no ? rtp.header.extension.transportSequenceNumber - : 0)), - capture_time(capture_time), - log_packet_time(Timestamp::Micros(rtp.log_time_us())), - reported_send_time(rtp.header.extension.hasAbsoluteSendTime - ? rtp.header.extension.GetAbsoluteSendTimestamp() - : Timestamp::MinusInfinity()) {} - -LoggedPacketInfo::LoggedPacketInfo(const LoggedPacketInfo&) = default; - -LoggedPacketInfo::~LoggedPacketInfo() {} - -LoggedRtcpPacket::LoggedRtcpPacket(Timestamp timestamp, - const std::vector& packet) - : timestamp(timestamp), raw_data(packet) {} - -LoggedRtcpPacket::LoggedRtcpPacket(Timestamp timestamp, - const std::string& packet) - : timestamp(timestamp), raw_data(packet.size()) { - memcpy(raw_data.data(), packet.data(), packet.size()); -} - -LoggedRtcpPacket::LoggedRtcpPacket(const LoggedRtcpPacket& rhs) = default; - -LoggedRtcpPacket::~LoggedRtcpPacket() = default; - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/logged_events.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/logged_events.h index 5bce658c30..d6b3cc607e 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/logged_events.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/logged_events.h @@ -7,337 +7,12 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ + #ifndef LOGGING_RTC_EVENT_LOG_LOGGED_EVENTS_H_ #define LOGGING_RTC_EVENT_LOG_LOGGED_EVENTS_H_ -#include -#include - -#include "absl/types/optional.h" -#include "api/rtp_headers.h" -#include "api/units/time_delta.h" -#include "api/units/timestamp.h" -#include "modules/rtp_rtcp/source/rtcp_packet/bye.h" -#include "modules/rtp_rtcp/source/rtcp_packet/extended_reports.h" -#include "modules/rtp_rtcp/source/rtcp_packet/fir.h" -#include "modules/rtp_rtcp/source/rtcp_packet/loss_notification.h" -#include "modules/rtp_rtcp/source/rtcp_packet/nack.h" -#include "modules/rtp_rtcp/source/rtcp_packet/pli.h" -#include "modules/rtp_rtcp/source/rtcp_packet/receiver_report.h" -#include "modules/rtp_rtcp/source/rtcp_packet/remb.h" -#include "modules/rtp_rtcp/source/rtcp_packet/sender_report.h" -#include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" - -namespace webrtc { - -// The different event types are deliberately POD. Analysis of large logs is -// already resource intensive. The code simplifications that would be possible -// possible by having a base class (containing e.g. the log time) are not -// considered to outweigh the added memory and runtime overhead incurred by -// adding a vptr. - -struct LoggedRtpPacket { - LoggedRtpPacket(Timestamp timestamp, - RTPHeader header, - size_t header_length, - size_t total_length) - : timestamp(timestamp), - header(header), - header_length(header_length), - total_length(total_length) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - - Timestamp timestamp; - // TODO(terelius): This allocates space for 15 CSRCs even if none are used. - RTPHeader header; - size_t header_length; - size_t total_length; -}; - -struct LoggedRtpPacketIncoming { - LoggedRtpPacketIncoming(Timestamp timestamp, - RTPHeader header, - size_t header_length, - size_t total_length) - : rtp(timestamp, header, header_length, total_length) {} - int64_t log_time_us() const { return rtp.timestamp.us(); } - int64_t log_time_ms() const { return rtp.timestamp.ms(); } - - LoggedRtpPacket rtp; -}; - -struct LoggedRtpPacketOutgoing { - LoggedRtpPacketOutgoing(Timestamp timestamp, - RTPHeader header, - size_t header_length, - size_t total_length) - : rtp(timestamp, header, header_length, total_length) {} - int64_t log_time_us() const { return rtp.timestamp.us(); } - int64_t log_time_ms() const { return rtp.timestamp.ms(); } - - LoggedRtpPacket rtp; -}; - -struct LoggedRtcpPacket { - LoggedRtcpPacket(Timestamp timestamp, const std::vector& packet); - LoggedRtcpPacket(Timestamp timestamp, const std::string& packet); - LoggedRtcpPacket(const LoggedRtcpPacket&); - ~LoggedRtcpPacket(); - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - - Timestamp timestamp; - std::vector raw_data; -}; - -struct LoggedRtcpPacketIncoming { - LoggedRtcpPacketIncoming(Timestamp timestamp, - const std::vector& packet) - : rtcp(timestamp, packet) {} - LoggedRtcpPacketIncoming(Timestamp timestamp, const std::string& packet) - : rtcp(timestamp, packet) {} - - int64_t log_time_us() const { return rtcp.timestamp.us(); } - int64_t log_time_ms() const { return rtcp.timestamp.ms(); } - - LoggedRtcpPacket rtcp; -}; - -struct LoggedRtcpPacketOutgoing { - LoggedRtcpPacketOutgoing(Timestamp timestamp, - const std::vector& packet) - : rtcp(timestamp, packet) {} - LoggedRtcpPacketOutgoing(Timestamp timestamp, const std::string& packet) - : rtcp(timestamp, packet) {} - - int64_t log_time_us() const { return rtcp.timestamp.us(); } - int64_t log_time_ms() const { return rtcp.timestamp.ms(); } - - LoggedRtcpPacket rtcp; -}; - -struct LoggedRtcpPacketReceiverReport { - LoggedRtcpPacketReceiverReport() = default; - LoggedRtcpPacketReceiverReport(Timestamp timestamp, - const rtcp::ReceiverReport& rr) - : timestamp(timestamp), rr(rr) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - - Timestamp timestamp = Timestamp::MinusInfinity(); - rtcp::ReceiverReport rr; -}; - -struct LoggedRtcpPacketSenderReport { - LoggedRtcpPacketSenderReport() = default; - LoggedRtcpPacketSenderReport(Timestamp timestamp, - const rtcp::SenderReport& sr) - : timestamp(timestamp), sr(sr) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - - Timestamp timestamp = Timestamp::MinusInfinity(); - rtcp::SenderReport sr; -}; - -struct LoggedRtcpPacketExtendedReports { - LoggedRtcpPacketExtendedReports() = default; - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - - Timestamp timestamp = Timestamp::MinusInfinity(); - rtcp::ExtendedReports xr; -}; - -struct LoggedRtcpPacketRemb { - LoggedRtcpPacketRemb() = default; - LoggedRtcpPacketRemb(Timestamp timestamp, const rtcp::Remb& remb) - : timestamp(timestamp), remb(remb) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - - Timestamp timestamp = Timestamp::MinusInfinity(); - rtcp::Remb remb; -}; - -struct LoggedRtcpPacketNack { - LoggedRtcpPacketNack() = default; - LoggedRtcpPacketNack(Timestamp timestamp, const rtcp::Nack& nack) - : timestamp(timestamp), nack(nack) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - - Timestamp timestamp = Timestamp::MinusInfinity(); - rtcp::Nack nack; -}; - -struct LoggedRtcpPacketFir { - LoggedRtcpPacketFir() = default; - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - - Timestamp timestamp = Timestamp::MinusInfinity(); - rtcp::Fir fir; -}; - -struct LoggedRtcpPacketPli { - LoggedRtcpPacketPli() = default; - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - - Timestamp timestamp = Timestamp::MinusInfinity(); - rtcp::Pli pli; -}; - -struct LoggedRtcpPacketTransportFeedback { - LoggedRtcpPacketTransportFeedback() - : transport_feedback(/*include_timestamps=*/true, /*include_lost*/ true) { - } - LoggedRtcpPacketTransportFeedback( - Timestamp timestamp, - const rtcp::TransportFeedback& transport_feedback) - : timestamp(timestamp), transport_feedback(transport_feedback) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - - Timestamp timestamp = Timestamp::MinusInfinity(); - rtcp::TransportFeedback transport_feedback; -}; - -struct LoggedRtcpPacketLossNotification { - LoggedRtcpPacketLossNotification() = default; - LoggedRtcpPacketLossNotification( - Timestamp timestamp, - const rtcp::LossNotification& loss_notification) - : timestamp(timestamp), loss_notification(loss_notification) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - - Timestamp timestamp = Timestamp::MinusInfinity(); - rtcp::LossNotification loss_notification; -}; - -struct LoggedRtcpPacketBye { - LoggedRtcpPacketBye() = default; - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - - Timestamp timestamp = Timestamp::MinusInfinity(); - rtcp::Bye bye; -}; - -struct LoggedStartEvent { - explicit LoggedStartEvent(Timestamp timestamp) - : LoggedStartEvent(timestamp, timestamp) {} - - LoggedStartEvent(Timestamp timestamp, Timestamp utc_start_time) - : timestamp(timestamp), utc_start_time(utc_start_time) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - - Timestamp utc_time() const { return utc_start_time; } - - Timestamp timestamp; - Timestamp utc_start_time; -}; - -struct LoggedStopEvent { - explicit LoggedStopEvent(Timestamp timestamp) : timestamp(timestamp) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - - Timestamp timestamp; -}; - -struct InferredRouteChangeEvent { - int64_t log_time_ms() const { return log_time.ms(); } - int64_t log_time_us() const { return log_time.us(); } - uint32_t route_id; - Timestamp log_time = Timestamp::MinusInfinity(); - uint16_t send_overhead; - uint16_t return_overhead; -}; - -enum class LoggedMediaType : uint8_t { kUnknown, kAudio, kVideo }; - -struct LoggedPacketInfo { - LoggedPacketInfo(const LoggedRtpPacket& rtp, - LoggedMediaType media_type, - bool rtx, - Timestamp capture_time); - LoggedPacketInfo(const LoggedPacketInfo&); - ~LoggedPacketInfo(); - int64_t log_time_ms() const { return log_packet_time.ms(); } - int64_t log_time_us() const { return log_packet_time.us(); } - uint32_t ssrc; - uint16_t stream_seq_no; - uint16_t size; - uint16_t payload_size; - uint16_t padding_size; - uint16_t overhead = 0; - uint8_t payload_type; - LoggedMediaType media_type = LoggedMediaType::kUnknown; - bool rtx = false; - bool marker_bit = false; - bool has_transport_seq_no = false; - bool last_in_feedback = false; - uint16_t transport_seq_no = 0; - // The RTP header timestamp unwrapped and converted from tick count to seconds - // based timestamp. - Timestamp capture_time; - // The time the packet was logged. This is the receive time for incoming - // packets and send time for outgoing. - Timestamp log_packet_time; - // Send time as reported by abs-send-time extension, For outgoing packets this - // corresponds to log_packet_time, but might be measured using another clock. - Timestamp reported_send_time; - // The receive time that was reported in feedback. For incoming packets this - // corresponds to log_packet_time, but might be measured using another clock. - // PlusInfinity indicates that the packet was lost. - Timestamp reported_recv_time = Timestamp::MinusInfinity(); - // The time feedback message was logged. This is the feedback send time for - // incoming packets and feedback receive time for outgoing. - // PlusInfinity indicates that feedback was expected but not received. - Timestamp log_feedback_time = Timestamp::MinusInfinity(); - // The delay betweeen receiving an RTP packet and sending feedback for - // incoming packets. For outgoing packets we don't know the feedback send - // time, and this is instead calculated as the difference in reported receive - // time between this packet and the last packet in the same feedback message. - TimeDelta feedback_hold_duration = TimeDelta::MinusInfinity(); -}; - -enum class LoggedIceEventType { - kAdded, - kUpdated, - kDestroyed, - kSelected, - kCheckSent, - kCheckReceived, - kCheckResponseSent, - kCheckResponseReceived, -}; - -struct LoggedIceEvent { - uint32_t candidate_pair_id; - Timestamp log_time; - LoggedIceEventType event_type; -}; - +// TODO(terelius): Delete this forwarding header when downstream +// projects have been updated. +#include "logging/rtc_event_log/events/logged_rtp_rtcp.h" -} // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_LOGGED_EVENTS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log2rtp_dump.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log2rtp_dump.cc index 93cd4652cc..a0514259aa 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log2rtp_dump.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log2rtp_dump.cc @@ -21,6 +21,7 @@ #include "absl/flags/parse.h" #include "absl/flags/usage.h" #include "absl/memory/memory.h" +#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/array_view.h" #include "api/rtc_event_log/rtc_event_log.h" @@ -75,7 +76,7 @@ using MediaType = webrtc::ParsedRtcEventLog::MediaType; // The empty string must be validated as true, because it is the default value // of the command-line flag. In this case, no value is written to the output // variable. -absl::optional ParseSsrc(std::string str) { +absl::optional ParseSsrc(absl::string_view str) { // If the input string starts with 0x or 0X it indicates a hexadecimal number. uint32_t ssrc; auto read_mode = std::dec; @@ -84,7 +85,7 @@ absl::optional ParseSsrc(std::string str) { read_mode = std::hex; str = str.substr(2); } - std::stringstream ss(str); + std::stringstream ss(std::string{str}); ss >> read_mode >> ssrc; if (str.empty() || (!ss.fail() && ss.eof())) return ssrc; diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_impl.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_impl.cc index 4237b2a713..a48bbdeb8e 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_impl.cc @@ -16,13 +16,13 @@ #include #include +#include "absl/strings/string_view.h" #include "absl/types/optional.h" -#include "api/task_queue/queued_task.h" #include "api/task_queue/task_queue_base.h" +#include "api/units/time_delta.h" #include "logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.h" #include "logging/rtc_event_log/encoder/rtc_event_log_encoder_new_format.h" #include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/event.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" @@ -40,10 +40,10 @@ std::unique_ptr CreateEncoder( RtcEventLog::EncodingType type) { switch (type) { case RtcEventLog::EncodingType::Legacy: - RTC_LOG(LS_INFO) << "Creating legacy encoder for RTC event log."; + RTC_DLOG(LS_INFO) << "Creating legacy encoder for RTC event log."; return std::make_unique(); case RtcEventLog::EncodingType::NewFormat: - RTC_LOG(LS_INFO) << "Creating new format encoder for RTC event log."; + RTC_DLOG(LS_INFO) << "Creating new format encoder for RTC event log."; return std::make_unique(); default: RTC_LOG(LS_ERROR) << "Unknown RtcEventLog encoder type (" << int(type) @@ -92,8 +92,7 @@ bool RtcEventLogImpl::StartLogging(std::unique_ptr output, const int64_t timestamp_us = rtc::TimeMillis() * 1000; const int64_t utc_time_us = rtc::TimeUTCMillis() * 1000; - RTC_LOG(LS_INFO) << "Starting WebRTC event log. (Timestamp, UTC) = " - "(" + RTC_LOG(LS_INFO) << "Starting WebRTC event log. (Timestamp, UTC) = (" << timestamp_us << ", " << utc_time_us << ")."; RTC_DCHECK_RUN_ON(&logging_state_checker_); @@ -114,7 +113,7 @@ bool RtcEventLogImpl::StartLogging(std::unique_ptr output, } void RtcEventLogImpl::StopLogging() { - RTC_LOG(LS_INFO) << "Stopping WebRTC event log."; + RTC_DLOG(LS_INFO) << "Stopping WebRTC event log."; // TODO(danilchap): Do not block current thread waiting on the task queue. // It might work for now, for current callers, but disallows caller to share // threads with the `task_queue_`. @@ -122,7 +121,7 @@ void RtcEventLogImpl::StopLogging() { StopLogging([&output_stopped]() { output_stopped.Set(); }); output_stopped.Wait(rtc::Event::kForever); - RTC_LOG(LS_INFO) << "WebRTC event log successfully stopped."; + RTC_DLOG(LS_INFO) << "WebRTC event log successfully stopped."; } void RtcEventLogImpl::StopLogging(std::function callback) { @@ -183,7 +182,8 @@ void RtcEventLogImpl::ScheduleOutput() { const int64_t time_since_output_ms = now_ms - last_output_ms_; const uint32_t delay = rtc::SafeClamp( *output_period_ms_ - time_since_output_ms, 0, *output_period_ms_); - task_queue_->PostDelayedTask(output_task, delay); + task_queue_->PostDelayedTask(std::move(output_task), + TimeDelta::Millis(delay)); } } @@ -232,8 +232,8 @@ void RtcEventLogImpl::LogEventsFromMemoryToOutput() { } void RtcEventLogImpl::WriteConfigsAndHistoryToOutput( - const std::string& encoded_configs, - const std::string& encoded_history) { + absl::string_view encoded_configs, + absl::string_view encoded_history) { // This function is used to merge the strings instead of calling the output // object twice with small strings. The function also avoids copying any // strings in the typical case where there are no config events. @@ -242,7 +242,11 @@ void RtcEventLogImpl::WriteConfigsAndHistoryToOutput( } else if (encoded_history.empty()) { WriteToOutput(encoded_configs); // Very unusual case. } else { - WriteToOutput(encoded_configs + encoded_history); + std::string s; + s.reserve(encoded_configs.size() + encoded_history.size()); + s.append(encoded_configs.data(), encoded_configs.size()); + s.append(encoded_history.data(), encoded_history.size()); + WriteToOutput(s); } } @@ -259,7 +263,7 @@ void RtcEventLogImpl::StopLoggingInternal() { StopOutput(); } -void RtcEventLogImpl::WriteToOutput(const std::string& output_string) { +void RtcEventLogImpl::WriteToOutput(absl::string_view output_string) { RTC_DCHECK(event_output_ && event_output_->IsActive()); if (!event_output_->Write(output_string)) { RTC_LOG(LS_ERROR) << "Failed to write RTC event to output."; diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_impl.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_impl.h index 61e90d139d..6c6417254e 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_impl.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_impl.h @@ -17,6 +17,7 @@ #include #include +#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/rtc_event_log/rtc_event.h" #include "api/rtc_event_log/rtc_event_log.h" @@ -54,10 +55,10 @@ class RtcEventLogImpl final : public RtcEventLog { void StopOutput() RTC_RUN_ON(task_queue_); - void WriteConfigsAndHistoryToOutput(const std::string& encoded_configs, - const std::string& encoded_history) + void WriteConfigsAndHistoryToOutput(absl::string_view encoded_configs, + absl::string_view encoded_history) RTC_RUN_ON(task_queue_); - void WriteToOutput(const std::string& output_string) RTC_RUN_ON(task_queue_); + void WriteToOutput(absl::string_view output_string) RTC_RUN_ON(task_queue_); void StopLoggingInternal() RTC_RUN_ON(task_queue_); diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_parser.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_parser.cc index b4e1170444..7f4761146e 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_parser.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_parser.cc @@ -19,6 +19,7 @@ #include #include "absl/memory/memory.h" +#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/network_state_predictor.h" #include "api/rtc_event_log/rtc_event_log.h" @@ -28,6 +29,7 @@ #include "logging/rtc_event_log/encoder/delta_encoding.h" #include "logging/rtc_event_log/encoder/rtc_event_log_encoder_common.h" #include "logging/rtc_event_log/encoder/var_int.h" +#include "logging/rtc_event_log/events/logged_rtp_rtcp.h" #include "logging/rtc_event_log/rtc_event_processor.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor.h" #include "modules/include/module_common_types_public.h" @@ -37,6 +39,7 @@ #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/numerics/sequence_number_util.h" @@ -53,6 +56,12 @@ return ParsedRtcEventLog::ParseStatus::Error(#X, __FILE__, __LINE__); \ } while (0) +#define RTC_PARSE_CHECK_OR_RETURN_MESSAGE(X, M) \ + do { \ + if (!(X)) \ + return ParsedRtcEventLog::ParseStatus::Error((M), __FILE__, __LINE__); \ + } while (0) + #define RTC_PARSE_CHECK_OR_RETURN_OP(OP, X, Y) \ do { \ if (!((X)OP(Y))) \ @@ -684,46 +693,40 @@ ParsedRtcEventLog::ParseStatus StoreRtcpBlocks( header.fmt() == rtcp::TransportFeedback::kFeedbackMessageType) { LoggedRtcpPacketTransportFeedback parsed_block; parsed_block.timestamp = timestamp; - if (parsed_block.transport_feedback.Parse(header)) - transport_feedback_list->push_back(std::move(parsed_block)); + RTC_PARSE_CHECK_OR_RETURN(parsed_block.transport_feedback.Parse(header)); + transport_feedback_list->push_back(std::move(parsed_block)); } else if (header.type() == rtcp::SenderReport::kPacketType) { LoggedRtcpPacketSenderReport parsed_block; parsed_block.timestamp = timestamp; - if (parsed_block.sr.Parse(header)) { - sr_list->push_back(std::move(parsed_block)); - } + RTC_PARSE_CHECK_OR_RETURN(parsed_block.sr.Parse(header)); + sr_list->push_back(std::move(parsed_block)); } else if (header.type() == rtcp::ReceiverReport::kPacketType) { LoggedRtcpPacketReceiverReport parsed_block; parsed_block.timestamp = timestamp; - if (parsed_block.rr.Parse(header)) { - rr_list->push_back(std::move(parsed_block)); - } + RTC_PARSE_CHECK_OR_RETURN(parsed_block.rr.Parse(header)); + rr_list->push_back(std::move(parsed_block)); } else if (header.type() == rtcp::ExtendedReports::kPacketType) { LoggedRtcpPacketExtendedReports parsed_block; parsed_block.timestamp = timestamp; - if (parsed_block.xr.Parse(header)) { - xr_list->push_back(std::move(parsed_block)); - } + RTC_PARSE_CHECK_OR_RETURN(parsed_block.xr.Parse(header)); + xr_list->push_back(std::move(parsed_block)); } else if (header.type() == rtcp::Fir::kPacketType && header.fmt() == rtcp::Fir::kFeedbackMessageType) { LoggedRtcpPacketFir parsed_block; parsed_block.timestamp = timestamp; - if (parsed_block.fir.Parse(header)) { - fir_list->push_back(std::move(parsed_block)); - } + RTC_PARSE_CHECK_OR_RETURN(parsed_block.fir.Parse(header)); + fir_list->push_back(std::move(parsed_block)); } else if (header.type() == rtcp::Pli::kPacketType && header.fmt() == rtcp::Pli::kFeedbackMessageType) { LoggedRtcpPacketPli parsed_block; parsed_block.timestamp = timestamp; - if (parsed_block.pli.Parse(header)) { - pli_list->push_back(std::move(parsed_block)); - } + RTC_PARSE_CHECK_OR_RETURN(parsed_block.pli.Parse(header)); + pli_list->push_back(std::move(parsed_block)); } else if (header.type() == rtcp::Bye::kPacketType) { LoggedRtcpPacketBye parsed_block; parsed_block.timestamp = timestamp; - if (parsed_block.bye.Parse(header)) { - bye_list->push_back(std::move(parsed_block)); - } + RTC_PARSE_CHECK_OR_RETURN(parsed_block.bye.Parse(header)); + bye_list->push_back(std::move(parsed_block)); } else if (header.type() == rtcp::Psfb::kPacketType && header.fmt() == rtcp::Psfb::kAfbMessageType) { bool type_found = false; @@ -743,13 +746,13 @@ ParsedRtcEventLog::ParseStatus StoreRtcpBlocks( type_found = true; } } + // We ignore other application-layer feedback types. } else if (header.type() == rtcp::Nack::kPacketType && header.fmt() == rtcp::Nack::kFeedbackMessageType) { LoggedRtcpPacketNack parsed_block; parsed_block.timestamp = timestamp; - if (parsed_block.nack.Parse(header)) { - nack_list->push_back(std::move(parsed_block)); - } + RTC_PARSE_CHECK_OR_RETURN(parsed_block.nack.Parse(header)); + nack_list->push_back(std::move(parsed_block)); } } return ParsedRtcEventLog::ParseStatus::Success(); @@ -947,6 +950,35 @@ std::vector GetRuntimeRtpHeaderExtensionConfig( } // End of conversion functions. +LoggedPacketInfo::LoggedPacketInfo(const LoggedRtpPacket& rtp, + LoggedMediaType media_type, + bool rtx, + Timestamp capture_time) + : ssrc(rtp.header.ssrc), + stream_seq_no(rtp.header.sequenceNumber), + size(static_cast(rtp.total_length)), + payload_size(static_cast(rtp.total_length - + rtp.header.paddingLength - + rtp.header.headerLength)), + padding_size(static_cast(rtp.header.paddingLength)), + payload_type(rtp.header.payloadType), + media_type(media_type), + rtx(rtx), + marker_bit(rtp.header.markerBit), + has_transport_seq_no(rtp.header.extension.hasTransportSequenceNumber), + transport_seq_no(static_cast( + has_transport_seq_no ? rtp.header.extension.transportSequenceNumber + : 0)), + capture_time(capture_time), + log_packet_time(Timestamp::Micros(rtp.log_time_us())), + reported_send_time(rtp.header.extension.hasAbsoluteSendTime + ? rtp.header.extension.GetAbsoluteSendTimestamp() + : Timestamp::MinusInfinity()) {} + +LoggedPacketInfo::LoggedPacketInfo(const LoggedPacketInfo&) = default; + +LoggedPacketInfo::~LoggedPacketInfo() {} + ParsedRtcEventLog::~ParsedRtcEventLog() = default; ParsedRtcEventLog::LoggedRtpStreamIncoming::LoggedRtpStreamIncoming() = default; @@ -1076,8 +1108,8 @@ void ParsedRtcEventLog::Clear() { last_incoming_rtcp_packet_.clear(); - first_timestamp_ = std::numeric_limits::max(); - last_timestamp_ = std::numeric_limits::min(); + first_timestamp_ = Timestamp::PlusInfinity(); + last_timestamp_ = Timestamp::MinusInfinity(); first_log_segment_ = LogSegment(0, std::numeric_limits::max()); incoming_rtp_extensions_maps_.clear(); @@ -1085,7 +1117,7 @@ void ParsedRtcEventLog::Clear() { } ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::ParseFile( - const std::string& filename) { + absl::string_view filename) { FileWrapper file = FileWrapper::OpenReadOnly(filename); if (!file.is_open()) { RTC_LOG(LS_WARNING) << "Could not open file " << filename @@ -1111,12 +1143,12 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::ParseFile( } ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::ParseString( - const std::string& s) { + absl::string_view s) { return ParseStream(s); } ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::ParseStream( - const std::string& s) { + absl::string_view s) { Clear(); ParseStatus status = ParseStreamInternal(s); @@ -1198,8 +1230,8 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::ParseStream( // stream configurations and starting/stopping the log. // TODO(terelius): Figure out if we actually need to find the first and last // timestamp in the parser. It seems like this could be done by the caller. - first_timestamp_ = std::numeric_limits::max(); - last_timestamp_ = std::numeric_limits::min(); + first_timestamp_ = Timestamp::PlusInfinity(); + last_timestamp_ = Timestamp::MinusInfinity(); StoreFirstAndLastTimestamp(alr_state_events()); StoreFirstAndLastTimestamp(route_change_events()); for (const auto& audio_stream : audio_playout_events()) { @@ -1238,7 +1270,8 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::ParseStream( // event, we could use the timestamp of the the last previous regular event. auto start_iter = start_log_events().begin(); auto stop_iter = stop_log_events().begin(); - int64_t start_us = first_timestamp(); + int64_t start_us = + first_timestamp().us_or(std::numeric_limits::max()); int64_t next_start_us = std::numeric_limits::max(); int64_t stop_us = std::numeric_limits::max(); if (start_iter != start_log_events().end()) { @@ -1252,15 +1285,14 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::ParseStream( } stop_us = std::min(stop_us, next_start_us); if (stop_us == std::numeric_limits::max() && - last_timestamp() != std::numeric_limits::min()) { - stop_us = last_timestamp(); + !last_timestamp().IsMinusInfinity()) { + stop_us = last_timestamp().us(); } RTC_PARSE_CHECK_OR_RETURN_LE(start_us, stop_us); first_log_segment_ = LogSegment(start_us, stop_us); - if (first_timestamp_ == std::numeric_limits::max() && - last_timestamp_ == std::numeric_limits::min()) { - first_timestamp_ = last_timestamp_ = 0; + if (first_timestamp_ > last_timestamp_) { + first_timestamp_ = last_timestamp_ = Timestamp::Zero(); } return status; @@ -1269,18 +1301,34 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::ParseStream( ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::ParseStreamInternal( absl::string_view s) { constexpr uint64_t kMaxEventSize = 10000000; // Sanity check. + // Protobuf defines the message tag as + // (field_number << 3) | wire_type. In the legacy encoding, the field number + // is supposed to be 1 and the wire type for a length-delimited field is 2. + // In the new encoding we still expect the wire type to be 2, but the field + // number will be greater than 1. + constexpr uint64_t kExpectedV1Tag = (1 << 3) | 2; + bool success = false; + + // "Peek" at the first varint. + absl::string_view event_start = s; + uint64_t tag = 0; + std::tie(success, std::ignore) = DecodeVarInt(s, &tag); + if (!success) { + RTC_LOG(LS_WARNING) << "Failed to read varint from beginning of event log."; + RTC_PARSE_WARN_AND_RETURN_SUCCESS_IF(allow_incomplete_logs_, + kIncompleteLogError); + return ParseStatus::Error("Failed to read field tag varint", __FILE__, + __LINE__); + } + s = event_start; + + if (tag >> 1 == static_cast(RtcEvent::Type::BeginV3Log)) { + return ParseStreamInternalV3(s); + } while (!s.empty()) { - absl::string_view event_start = s; - bool success = false; - - // Read the next message tag. Protobuf defines the message tag as - // (field_number << 3) | wire_type. In the legacy encoding, the field number - // is supposed to be 1 and the wire type for a length-delimited field is 2. - // In the new encoding we still expect the wire type to be 2, but the field - // number will be greater than 1. - constexpr uint64_t kExpectedV1Tag = (1 << 3) | 2; - uint64_t tag = 0; + // If not, "reset" event_start and read the field tag for the next event. + event_start = s; std::tie(success, s) = DecodeVarInt(s, &tag); if (!success) { RTC_LOG(LS_WARNING) @@ -1290,6 +1338,7 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::ParseStreamInternal( return ParseStatus::Error("Failed to read field tag varint", __FILE__, __LINE__); } + constexpr uint64_t kWireTypeMask = 0x07; const uint64_t wire_type = tag & kWireTypeMask; if (wire_type != 2) { @@ -1359,12 +1408,161 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::ParseStreamInternal( return ParseStatus::Success(); } +ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::ParseStreamInternalV3( + absl::string_view s) { + constexpr uint64_t kMaxEventSize = 10000000; // Sanity check. + bool expect_begin_log_event = true; + bool success = false; + + while (!s.empty()) { + // Read event type. + uint64_t event_tag = 0; + std::tie(success, s) = DecodeVarInt(s, &event_tag); + RTC_PARSE_CHECK_OR_RETURN_MESSAGE(success, "Failed to read event type."); + bool batched = event_tag & 1; + uint64_t event_type = event_tag >> 1; + + // Read event size + uint64_t event_size_bytes = 0; + std::tie(success, s) = DecodeVarInt(s, &event_size_bytes); + RTC_PARSE_CHECK_OR_RETURN_MESSAGE(success, "Failed to read event size."); + if (event_size_bytes > kMaxEventSize || event_size_bytes > s.size()) { + RTC_LOG(LS_WARNING) << "Event size is too large."; + RTC_PARSE_CHECK_OR_RETURN_LE(event_size_bytes, kMaxEventSize); + RTC_PARSE_CHECK_OR_RETURN_LE(event_size_bytes, s.size()); + } + + // Read remaining event fields into a buffer. + absl::string_view event_fields = s.substr(0, event_size_bytes); + s = s.substr(event_size_bytes); + + if (expect_begin_log_event) { + RTC_PARSE_CHECK_OR_RETURN_EQ( + event_type, static_cast(RtcEvent::Type::BeginV3Log)); + expect_begin_log_event = false; + } + + switch (event_type) { + case static_cast(RtcEvent::Type::BeginV3Log): + RtcEventBeginLog::Parse(event_fields, batched, start_log_events_); + break; + case static_cast(RtcEvent::Type::EndV3Log): + RtcEventEndLog::Parse(event_fields, batched, stop_log_events_); + expect_begin_log_event = true; + break; + case static_cast(RtcEvent::Type::AlrStateEvent): + RtcEventAlrState::Parse(event_fields, batched, alr_state_events_); + break; + case static_cast(RtcEvent::Type::AudioPlayout): + RtcEventAudioPlayout::Parse(event_fields, batched, + audio_playout_events_); + break; + case static_cast(RtcEvent::Type::BweUpdateDelayBased): + RtcEventBweUpdateDelayBased::Parse(event_fields, batched, + bwe_delay_updates_); + break; + case static_cast(RtcEvent::Type::AudioNetworkAdaptation): + RtcEventAudioNetworkAdaptation::Parse(event_fields, batched, + audio_network_adaptation_events_); + break; + case static_cast(RtcEvent::Type::AudioReceiveStreamConfig): + RtcEventAudioReceiveStreamConfig::Parse(event_fields, batched, + audio_recv_configs_); + break; + case static_cast(RtcEvent::Type::AudioSendStreamConfig): + RtcEventAudioSendStreamConfig::Parse(event_fields, batched, + audio_send_configs_); + break; + case static_cast(RtcEvent::Type::BweUpdateLossBased): + RtcEventBweUpdateLossBased::Parse(event_fields, batched, + bwe_loss_updates_); + break; + case static_cast(RtcEvent::Type::DtlsTransportState): + RtcEventDtlsTransportState::Parse(event_fields, batched, + dtls_transport_states_); + break; + case static_cast(RtcEvent::Type::DtlsWritableState): + RtcEventDtlsWritableState::Parse(event_fields, batched, + dtls_writable_states_); + break; + case static_cast(RtcEvent::Type::FrameDecoded): + RtcEventFrameDecoded::Parse(event_fields, batched, decoded_frames_); + break; + case static_cast(RtcEvent::Type::GenericAckReceived): + RtcEventGenericAckReceived::Parse(event_fields, batched, + generic_acks_received_); + break; + case static_cast(RtcEvent::Type::GenericPacketReceived): + RtcEventGenericPacketReceived::Parse(event_fields, batched, + generic_packets_received_); + break; + case static_cast(RtcEvent::Type::GenericPacketSent): + RtcEventGenericPacketSent::Parse(event_fields, batched, + generic_packets_sent_); + break; + case static_cast(RtcEvent::Type::IceCandidatePairConfig): + RtcEventIceCandidatePairConfig::Parse(event_fields, batched, + ice_candidate_pair_configs_); + break; + case static_cast(RtcEvent::Type::IceCandidatePairEvent): + RtcEventIceCandidatePair::Parse(event_fields, batched, + ice_candidate_pair_events_); + break; + case static_cast(RtcEvent::Type::ProbeClusterCreated): + RtcEventProbeClusterCreated::Parse(event_fields, batched, + bwe_probe_cluster_created_events_); + break; + case static_cast(RtcEvent::Type::ProbeResultFailure): + RtcEventProbeResultFailure::Parse(event_fields, batched, + bwe_probe_failure_events_); + break; + case static_cast(RtcEvent::Type::ProbeResultSuccess): + RtcEventProbeResultSuccess::Parse(event_fields, batched, + bwe_probe_success_events_); + break; + case static_cast(RtcEvent::Type::RemoteEstimateEvent): + RtcEventRemoteEstimate::Parse(event_fields, batched, + remote_estimate_events_); + break; + case static_cast(RtcEvent::Type::RouteChangeEvent): + RtcEventRouteChange::Parse(event_fields, batched, route_change_events_); + break; + case static_cast(RtcEvent::Type::RtcpPacketIncoming): + RtcEventRtcpPacketIncoming::Parse(event_fields, batched, + incoming_rtcp_packets_); + break; + case static_cast(RtcEvent::Type::RtcpPacketOutgoing): + RtcEventRtcpPacketOutgoing::Parse(event_fields, batched, + outgoing_rtcp_packets_); + break; + case static_cast(RtcEvent::Type::RtpPacketIncoming): + RtcEventRtpPacketIncoming::Parse(event_fields, batched, + incoming_rtp_packets_map_); + break; + case static_cast(RtcEvent::Type::RtpPacketOutgoing): + RtcEventRtpPacketOutgoing::Parse(event_fields, batched, + outgoing_rtp_packets_map_); + break; + case static_cast(RtcEvent::Type::VideoReceiveStreamConfig): + RtcEventVideoReceiveStreamConfig::Parse(event_fields, batched, + video_recv_configs_); + break; + case static_cast(RtcEvent::Type::VideoSendStreamConfig): + RtcEventVideoSendStreamConfig::Parse(event_fields, batched, + video_send_configs_); + break; + } + } + + return ParseStatus::Success(); +} + template void ParsedRtcEventLog::StoreFirstAndLastTimestamp(const std::vector& v) { if (v.empty()) return; - first_timestamp_ = std::min(first_timestamp_, v.front().log_time_us()); - last_timestamp_ = std::max(last_timestamp_, v.back().log_time_us()); + first_timestamp_ = std::min(first_timestamp_, v.front().log_time()); + last_timestamp_ = std::max(last_timestamp_, v.back().log_time()); } ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreParsedLegacyEvent( @@ -1437,7 +1635,8 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreParsedLegacyEvent( // Use RtpPacketReceived instead of more generic RtpPacket because former // has a buildin convertion to RTPHeader. RtpPacketReceived rtp_header; - RTC_PARSE_CHECK_OR_RETURN(rtp_header.Parse(rtp_packet.header())); + RTC_PARSE_CHECK_OR_RETURN( + rtp_header.Parse(rtc::CopyOnWriteBuffer(rtp_packet.header()))); if (const RtpHeaderExtensionMap* extension_map = GetRtpHeaderExtensionMap( rtp_packet.incoming(), rtp_header.Ssrc())) { @@ -1573,6 +1772,12 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreParsedLegacyEvent( ice_candidate_pair_events_.push_back(status_or_value.value()); break; } + case rtclog::Event::REMOTE_ESTIMATE: { + auto status_or_value = GetRemoteEstimateEvent(event); + RTC_RETURN_IF_ERROR(status_or_value.status()); + remote_estimate_events_.push_back(status_or_value.value()); + break; + } case rtclog::Event::UNKNOWN_EVENT: { break; } @@ -1591,13 +1796,13 @@ const RtpHeaderExtensionMap* ParsedRtcEventLog::GetRtpHeaderExtensionMap( } if (parse_unconfigured_header_extensions_ == UnconfiguredHeaderExtensions::kAttemptWebrtcDefaultConfig) { - RTC_LOG(LS_WARNING) << "Using default header extension map for SSRC " - << ssrc; + RTC_DLOG(LS_WARNING) << "Using default header extension map for SSRC " + << ssrc; extensions_maps.insert(std::make_pair(ssrc, default_extension_map_)); return &default_extension_map_; } - RTC_LOG(LS_WARNING) << "Not parsing header extensions for SSRC " << ssrc - << ". No header extension map found."; + RTC_DLOG(LS_WARNING) << "Not parsing header extensions for SSRC " << ssrc + << ". No header extension map found."; return nullptr; } @@ -1947,7 +2152,7 @@ ParsedRtcEventLog::GetIceCandidatePairConfig( LoggedIceCandidatePairConfig res; const rtclog::IceCandidatePairConfig& config = rtc_event.ice_candidate_pair_config(); - RTC_CHECK(rtc_event.has_timestamp_us()); + RTC_PARSE_CHECK_OR_RETURN(rtc_event.has_timestamp_us()); res.timestamp = Timestamp::Micros(rtc_event.timestamp_us()); RTC_PARSE_CHECK_OR_RETURN(config.has_config_type()); res.type = GetRuntimeIceCandidatePairConfigType(config.config_type()); @@ -1986,7 +2191,7 @@ ParsedRtcEventLog::GetIceCandidatePairEvent( LoggedIceCandidatePairEvent res; const rtclog::IceCandidatePairEvent& event = rtc_event.ice_candidate_pair_event(); - RTC_CHECK(rtc_event.has_timestamp_us()); + RTC_PARSE_CHECK_OR_RETURN(rtc_event.has_timestamp_us()); res.timestamp = Timestamp::Micros(rtc_event.timestamp_us()); RTC_PARSE_CHECK_OR_RETURN(event.has_event_type()); res.type = GetRuntimeIceCandidatePairEventType(event.event_type()); @@ -1997,6 +2202,23 @@ ParsedRtcEventLog::GetIceCandidatePairEvent( return res; } +ParsedRtcEventLog::ParseStatusOr +ParsedRtcEventLog::GetRemoteEstimateEvent(const rtclog::Event& event) const { + RTC_PARSE_CHECK_OR_RETURN(event.has_type()); + RTC_PARSE_CHECK_OR_RETURN_EQ(event.type(), rtclog::Event::REMOTE_ESTIMATE); + LoggedRemoteEstimateEvent res; + const rtclog::RemoteEstimate& remote_estimate_event = event.remote_estimate(); + RTC_PARSE_CHECK_OR_RETURN(event.has_timestamp_us()); + res.timestamp = Timestamp::Micros(event.timestamp_us()); + if (remote_estimate_event.has_link_capacity_lower_kbps()) + res.link_capacity_lower = DataRate::KilobitsPerSec( + remote_estimate_event.link_capacity_lower_kbps()); + if (remote_estimate_event.has_link_capacity_upper_kbps()) + res.link_capacity_upper = DataRate::KilobitsPerSec( + remote_estimate_event.link_capacity_upper_kbps()); + return res; +} + // Returns the MediaType for registered SSRCs. Search from the end to use last // registered types first. ParsedRtcEventLog::MediaType ParsedRtcEventLog::GetMediaType( @@ -2084,12 +2306,12 @@ std::vector ParsedRtcEventLog::GetPacketInfos( seq_num_unwrapper = SequenceNumberUnwrapper(); indices.clear(); } - RTC_DCHECK(new_log_time >= last_log_time); + RTC_DCHECK_GE(new_log_time, last_log_time); last_log_time = new_log_time; }; auto rtp_handler = [&](const LoggedRtpPacket& rtp) { - advance_time(Timestamp::Millis(rtp.log_time_ms())); + advance_time(rtp.log_time()); MediaStreamInfo* stream = &streams[rtp.header.ssrc]; Timestamp capture_time = Timestamp::MinusInfinity(); if (!stream->rtx) { @@ -2128,23 +2350,22 @@ std::vector ParsedRtcEventLog::GetPacketInfos( }; Timestamp feedback_base_time = Timestamp::MinusInfinity(); - absl::optional last_feedback_base_time_us; + Timestamp last_feedback_base_time = Timestamp::MinusInfinity(); auto feedback_handler = [&](const LoggedRtcpPacketTransportFeedback& logged_rtcp) { - auto log_feedback_time = Timestamp::Millis(logged_rtcp.log_time_ms()); + auto log_feedback_time = logged_rtcp.log_time(); advance_time(log_feedback_time); const auto& feedback = logged_rtcp.transport_feedback; // Add timestamp deltas to a local time base selected on first packet // arrival. This won't be the true time base, but makes it easier to // manually inspect time stamps. - if (!last_feedback_base_time_us) { + if (!last_feedback_base_time.IsFinite()) { feedback_base_time = log_feedback_time; } else { - feedback_base_time += TimeDelta::Micros( - feedback.GetBaseDeltaUs(*last_feedback_base_time_us)); + feedback_base_time += feedback.GetBaseDelta(last_feedback_base_time); } - last_feedback_base_time_us = feedback.GetBaseTimeUs(); + last_feedback_base_time = feedback.BaseTime(); std::vector packet_feedbacks; packet_feedbacks.reserve(feedback.GetAllPackets().size()); @@ -2166,7 +2387,7 @@ std::vector ParsedRtcEventLog::GetPacketInfos( continue; } if (packet.received()) { - receive_timestamp += TimeDelta::Micros(packet.delta_us()); + receive_timestamp += packet.delta(); if (sent->reported_recv_time.IsInfinite()) { sent->reported_recv_time = receive_timestamp; sent->log_feedback_time = log_feedback_time; @@ -2191,8 +2412,11 @@ std::vector ParsedRtcEventLog::GetPacketInfos( last->last_in_feedback = true; for (LoggedPacketInfo* fb : packet_feedbacks) { if (direction == PacketDirection::kOutgoingPacket) { - fb->feedback_hold_duration = - last->reported_recv_time - fb->reported_recv_time; + if (last->reported_recv_time.IsFinite() && + fb->reported_recv_time.IsFinite()) { + fb->feedback_hold_duration = + last->reported_recv_time - fb->reported_recv_time; + } } else { fb->feedback_hold_duration = log_feedback_time - fb->log_packet_time; diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_parser.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_parser.h index d4c8409b61..ae2d4fe586 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_parser.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_parser.h @@ -15,22 +15,25 @@ #include #include #include -#include // pair #include #include "absl/base/attributes.h" +#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event_log.h" #include "call/video_receive_stream.h" #include "call/video_send_stream.h" +#include "logging/rtc_event_log/events/logged_rtp_rtcp.h" #include "logging/rtc_event_log/events/rtc_event_alr_state.h" #include "logging/rtc_event_log/events/rtc_event_audio_network_adaptation.h" #include "logging/rtc_event_log/events/rtc_event_audio_playout.h" #include "logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.h" #include "logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h" +#include "logging/rtc_event_log/events/rtc_event_begin_log.h" #include "logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h" #include "logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h" #include "logging/rtc_event_log/events/rtc_event_dtls_transport_state.h" #include "logging/rtc_event_log/events/rtc_event_dtls_writable_state.h" +#include "logging/rtc_event_log/events/rtc_event_end_log.h" #include "logging/rtc_event_log/events/rtc_event_frame_decoded.h" #include "logging/rtc_event_log/events/rtc_event_generic_ack_received.h" #include "logging/rtc_event_log/events/rtc_event_generic_packet_received.h" @@ -42,9 +45,12 @@ #include "logging/rtc_event_log/events/rtc_event_probe_result_success.h" #include "logging/rtc_event_log/events/rtc_event_remote_estimate.h" #include "logging/rtc_event_log/events/rtc_event_route_change.h" +#include "logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.h" +#include "logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.h" +#include "logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.h" +#include "logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h" #include "logging/rtc_event_log/events/rtc_event_video_receive_stream_config.h" #include "logging/rtc_event_log/events/rtc_event_video_send_stream_config.h" -#include "logging/rtc_event_log/logged_events.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" #include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" #include "rtc_base/ignore_wundef.h" @@ -64,6 +70,80 @@ namespace webrtc { enum PacketDirection { kIncomingPacket = 0, kOutgoingPacket }; +enum class LoggedMediaType : uint8_t { kUnknown, kAudio, kVideo }; + +struct LoggedPacketInfo { + LoggedPacketInfo(const LoggedRtpPacket& rtp, + LoggedMediaType media_type, + bool rtx, + Timestamp capture_time); + LoggedPacketInfo(const LoggedPacketInfo&); + ~LoggedPacketInfo(); + int64_t log_time_ms() const { return log_packet_time.ms(); } + int64_t log_time_us() const { return log_packet_time.us(); } + uint32_t ssrc; + uint16_t stream_seq_no; + uint16_t size; + uint16_t payload_size; + uint16_t padding_size; + uint16_t overhead = 0; + uint8_t payload_type; + LoggedMediaType media_type = LoggedMediaType::kUnknown; + bool rtx = false; + bool marker_bit = false; + bool has_transport_seq_no = false; + bool last_in_feedback = false; + uint16_t transport_seq_no = 0; + // The RTP header timestamp unwrapped and converted from tick count to seconds + // based timestamp. + Timestamp capture_time; + // The time the packet was logged. This is the receive time for incoming + // packets and send time for outgoing. + Timestamp log_packet_time; + // Send time as reported by abs-send-time extension, For outgoing packets this + // corresponds to log_packet_time, but might be measured using another clock. + Timestamp reported_send_time; + // The receive time that was reported in feedback. For incoming packets this + // corresponds to log_packet_time, but might be measured using another clock. + // PlusInfinity indicates that the packet was lost. + Timestamp reported_recv_time = Timestamp::MinusInfinity(); + // The time feedback message was logged. This is the feedback send time for + // incoming packets and feedback receive time for outgoing. + // PlusInfinity indicates that feedback was expected but not received. + Timestamp log_feedback_time = Timestamp::MinusInfinity(); + // The delay betweeen receiving an RTP packet and sending feedback for + // incoming packets. For outgoing packets we don't know the feedback send + // time, and this is instead calculated as the difference in reported receive + // time between this packet and the last packet in the same feedback message. + TimeDelta feedback_hold_duration = TimeDelta::MinusInfinity(); +}; + +struct InferredRouteChangeEvent { + int64_t log_time_ms() const { return log_time.ms(); } + int64_t log_time_us() const { return log_time.us(); } + uint32_t route_id; + Timestamp log_time = Timestamp::MinusInfinity(); + uint16_t send_overhead; + uint16_t return_overhead; +}; + +enum class LoggedIceEventType { + kAdded, + kUpdated, + kDestroyed, + kSelected, + kCheckSent, + kCheckReceived, + kCheckResponseSent, + kCheckResponseReceived, +}; + +struct LoggedIceEvent { + uint32_t candidate_pair_id; + Timestamp log_time; + LoggedIceEventType event_type; +}; + // This class is used to process lists of LoggedRtpPacketIncoming // and LoggedRtpPacketOutgoing without duplicating the code. // TODO(terelius): Remove this class. Instead use e.g. a vector of pointers @@ -240,48 +320,11 @@ class ParsedRtcEventLog { kDontParse, kAttemptWebrtcDefaultConfig }; - class ParseStatus { - public: - static ParseStatus Success() { return ParseStatus(); } - static ParseStatus Error(std::string error, std::string file, int line) { - return ParseStatus(error, file, line); - } - bool ok() const { return error_.empty() && file_.empty() && line_ == 0; } - std::string message() const { - return error_ + " failed at " + file_ + " line " + std::to_string(line_); - } - - ABSL_DEPRECATED("Use ok() instead") operator bool() const { return ok(); } - - private: - ParseStatus() : error_(), file_(), line_(0) {} - ParseStatus(std::string error, std::string file, int line) - : error_(error), file_(file), line_(line) {} - std::string error_; - std::string file_; - int line_; - }; + using ParseStatus = RtcEventLogParseStatus; template - class ParseStatusOr { - public: - ParseStatusOr(const ParseStatus& error) // NOLINT - : status_(error), value_() {} - ParseStatusOr(const T& value) // NOLINT - : status_(ParseStatus::Success()), value_(value) {} - bool ok() const { return status_.ok(); } - const T& value() const& { - RTC_DCHECK(status_.ok()); - return value_; - } - std::string message() const { return status_.message(); } - const ParseStatus& status() const { return status_; } - - private: - ParseStatus status_; - T value_; - }; + using ParseStatusOr = RtcEventLogParseStatusOr; struct LoggedRtpStreamIncoming { LoggedRtpStreamIncoming(); @@ -337,13 +380,13 @@ class ParsedRtcEventLog { void Clear(); // Reads an RtcEventLog file and returns success if parsing was successful. - ParseStatus ParseFile(const std::string& file_name); + ParseStatus ParseFile(absl::string_view file_name); // Reads an RtcEventLog from a string and returns success if successful. - ParseStatus ParseString(const std::string& s); + ParseStatus ParseString(absl::string_view s); // Reads an RtcEventLog from an string and returns success if successful. - ParseStatus ParseStream(const std::string& s); + ParseStatus ParseStream(absl::string_view s); MediaType GetMediaType(uint32_t ssrc, PacketDirection direction) const; @@ -601,8 +644,8 @@ class ParsedRtcEventLog { return decoded_frames_; } - int64_t first_timestamp() const { return first_timestamp_; } - int64_t last_timestamp() const { return last_timestamp_; } + Timestamp first_timestamp() const { return first_timestamp_; } + Timestamp last_timestamp() const { return last_timestamp_; } const LogSegment& first_log_segment() const { return first_log_segment_; } @@ -620,6 +663,7 @@ class ParsedRtcEventLog { private: ABSL_MUST_USE_RESULT ParseStatus ParseStreamInternal(absl::string_view s); + ABSL_MUST_USE_RESULT ParseStatus ParseStreamInternalV3(absl::string_view s); ABSL_MUST_USE_RESULT ParseStatus StoreParsedLegacyEvent(const rtclog::Event& event); @@ -681,6 +725,9 @@ class ParsedRtcEventLog { ParsedRtcEventLog::ParseStatusOr GetIceCandidatePairEvent(const rtclog::Event& event) const; + ParsedRtcEventLog::ParseStatusOr + GetRemoteEstimateEvent(const rtclog::Event& event) const; + // Parsing functions for new format. ParseStatus StoreAlrStateEvent(const rtclog2::AlrState& proto); ParseStatus StoreAudioNetworkAdaptationEvent( @@ -846,8 +893,8 @@ class ParsedRtcEventLog { std::vector last_incoming_rtcp_packet_; - int64_t first_timestamp_; - int64_t last_timestamp_; + Timestamp first_timestamp_ = Timestamp::PlusInfinity(); + Timestamp last_timestamp_ = Timestamp::MinusInfinity(); LogSegment first_log_segment_ = LogSegment(0, std::numeric_limits::max()); diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_unittest_helper.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_unittest_helper.cc index e77a67182b..2607028f60 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_unittest_helper.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_unittest_helper.cc @@ -21,11 +21,14 @@ #include #include +#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/array_view.h" #include "api/network_state_predictor.h" #include "api/rtp_headers.h" #include "api/rtp_parameters.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" #include "modules/rtp_rtcp/include/rtp_cvo.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" @@ -79,7 +82,7 @@ void ShuffleInPlace(Random* prng, rtc::ArrayView array) { } absl::optional GetExtensionId(const std::vector& extensions, - const std::string& uri) { + absl::string_view uri) { for (const auto& extension : extensions) { if (extension.uri == uri) return extension.id; @@ -354,14 +357,14 @@ rtcp::Bye EventGenerator::NewBye() { rtcp::TransportFeedback EventGenerator::NewTransportFeedback() { rtcp::TransportFeedback transport_feedback; uint16_t base_seq_no = prng_.Rand(); - int64_t base_time_us = prng_.Rand(); - transport_feedback.SetBase(base_seq_no, base_time_us); - transport_feedback.AddReceivedPacket(base_seq_no, base_time_us); - int64_t time_us = base_time_us; + Timestamp base_time = Timestamp::Micros(prng_.Rand()); + transport_feedback.SetBase(base_seq_no, base_time); + transport_feedback.AddReceivedPacket(base_seq_no, base_time); + Timestamp time = base_time; for (uint16_t i = 1u; i < 10u; i++) { - time_us += prng_.Rand(0, 100000); + time += TimeDelta::Micros(prng_.Rand(0, 100'000)); if (prng_.Rand()) { - transport_feedback.AddReceivedPacket(base_seq_no + i, time_us); + transport_feedback.AddReceivedPacket(base_seq_no + i, time); } } return transport_feedback; @@ -1252,9 +1255,9 @@ void EventVerifier::VerifyLoggedTransportFeedback( logged_transport_feedback.transport_feedback.GetReceivedPackets()[i] .sequence_number()); EXPECT_EQ( - original_transport_feedback.GetReceivedPackets()[i].delta_us(), + original_transport_feedback.GetReceivedPackets()[i].delta(), logged_transport_feedback.transport_feedback.GetReceivedPackets()[i] - .delta_us()); + .delta()); } } diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_stream_config.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_stream_config.cc index d4d30d00bc..aa107c80bc 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_stream_config.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_stream_config.cc @@ -10,6 +10,8 @@ #include "logging/rtc_event_log/rtc_stream_config.h" +#include "absl/strings/string_view.h" + namespace webrtc { namespace rtclog { @@ -30,7 +32,7 @@ bool StreamConfig::operator!=(const StreamConfig& other) const { return !(*this == other); } -StreamConfig::Codec::Codec(const std::string& payload_name, +StreamConfig::Codec::Codec(absl::string_view payload_name, int payload_type, int rtx_payload_type) : payload_name(payload_name), diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_stream_config.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_stream_config.h index a81249aebf..d114332d34 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_stream_config.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_stream_config.h @@ -16,6 +16,7 @@ #include #include +#include "absl/strings/string_view.h" #include "api/rtp_headers.h" #include "api/rtp_parameters.h" @@ -41,7 +42,7 @@ struct StreamConfig { RtcpMode rtcp_mode = RtcpMode::kReducedSize; struct Codec { - Codec(const std::string& payload_name, + Codec(absl::string_view payload_name, int payload_type, int rtx_payload_type); diff --git a/TMessagesProj/jni/voip/webrtc/media/OWNERS b/TMessagesProj/jni/voip/webrtc/media/OWNERS index b8910326b9..5d8ec5aba6 100644 --- a/TMessagesProj/jni/voip/webrtc/media/OWNERS +++ b/TMessagesProj/jni/voip/webrtc/media/OWNERS @@ -1,4 +1,4 @@ -nisse@webrtc.org +brandtr@webrtc.org ilnik@webrtc.org sprang@webrtc.org magjed@webrtc.org @@ -8,3 +8,6 @@ perkj@webrtc.org # Audio-related changes: peah@webrtc.org saza@webrtc.org + +# Datachannel-related changes: +orphis@webrtc.org diff --git a/TMessagesProj/jni/voip/webrtc/media/base/adapted_video_track_source.cc b/TMessagesProj/jni/voip/webrtc/media/base/adapted_video_track_source.cc index f8f8f2dad1..816ada5f16 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/adapted_video_track_source.cc +++ b/TMessagesProj/jni/voip/webrtc/media/base/adapted_video_track_source.cc @@ -61,6 +61,10 @@ void AdaptedVideoTrackSource::OnFrame(const webrtc::VideoFrame& frame) { } } +void AdaptedVideoTrackSource::OnFrameDropped() { + broadcaster_.OnDiscardedFrame(); +} + void AdaptedVideoTrackSource::AddOrUpdateSink( rtc::VideoSinkInterface* sink, const rtc::VideoSinkWants& wants) { diff --git a/TMessagesProj/jni/voip/webrtc/media/base/adapted_video_track_source.h b/TMessagesProj/jni/voip/webrtc/media/base/adapted_video_track_source.h index 1386fbd9db..1c3e0b68d3 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/adapted_video_track_source.h +++ b/TMessagesProj/jni/voip/webrtc/media/base/adapted_video_track_source.h @@ -45,6 +45,8 @@ class RTC_EXPORT AdaptedVideoTrackSource // plain memory frame, it is rotated. Subclasses producing native frames must // handle apply_rotation() themselves. void OnFrame(const webrtc::VideoFrame& frame); + // Indication from source that a frame was dropped. + void OnFrameDropped(); // Reports the appropriate frame size after adaptation. Returns true // if a frame is wanted. Returns false if there are no interested diff --git a/TMessagesProj/jni/voip/webrtc/media/base/codec.cc b/TMessagesProj/jni/voip/webrtc/media/base/codec.cc index 9b09f5e73b..e43d61cc1b 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/codec.cc +++ b/TMessagesProj/jni/voip/webrtc/media/base/codec.cc @@ -12,13 +12,13 @@ #include "absl/algorithm/container.h" #include "absl/strings/match.h" +#include "api/video_codecs/av1_profile.h" #include "api/video_codecs/h264_profile_level_id.h" #include "api/video_codecs/vp9_profile.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/string_encode.h" #include "rtc_base/strings/string_builder.h" -#include "system_wrappers/include/field_trial.h" namespace cricket { namespace { @@ -55,6 +55,8 @@ bool IsSameCodecSpecific(const std::string& name1, IsSameH264PacketizationMode(params1, params2); if (either_name_matches(kVp9CodecName)) return webrtc::VP9IsSameProfile(params1, params2); + if (either_name_matches(kAv1CodecName)) + return webrtc::AV1IsSameProfile(params1, params2); return true; } @@ -129,13 +131,14 @@ bool Codec::operator==(const Codec& c) const { feedback_params == c.feedback_params; } -bool Codec::Matches(const Codec& codec) const { +bool Codec::Matches(const Codec& codec, + const webrtc::FieldTrialsView* field_trials) const { // Match the codec id/name based on the typical static/dynamic name rules. // Matching is case-insensitive. // Legacy behaviour with killswitch. - if (webrtc::field_trial::IsDisabled( - "WebRTC-PayloadTypes-Lower-Dynamic-Range")) { + if (field_trials && + field_trials->IsDisabled("WebRTC-PayloadTypes-Lower-Dynamic-Range")) { const int kMaxStaticPayloadId = 95; return (id <= kMaxStaticPayloadId || codec.id <= kMaxStaticPayloadId) ? (id == codec.id) @@ -238,7 +241,8 @@ bool AudioCodec::operator==(const AudioCodec& c) const { return bitrate == c.bitrate && channels == c.channels && Codec::operator==(c); } -bool AudioCodec::Matches(const AudioCodec& codec) const { +bool AudioCodec::Matches(const AudioCodec& codec, + const webrtc::FieldTrialsView* field_trials) const { // If a nonzero clockrate is specified, it must match the actual clockrate. // If a nonzero bitrate is specified, it must match the actual bitrate, // unless the codec is VBR (0), where we just force the supplied value. @@ -248,7 +252,7 @@ bool AudioCodec::Matches(const AudioCodec& codec) const { // omitted if the number of channels is one." // Preference is ignored. // TODO(juberti): Treat a zero clockrate as 8000Hz, the RTP default clockrate. - return Codec::Matches(codec) && + return Codec::Matches(codec, field_trials) && ((codec.clockrate == 0 /*&& clockrate == 8000*/) || clockrate == codec.clockrate) && (codec.bitrate == 0 || bitrate <= 0 || bitrate == codec.bitrate) && @@ -303,6 +307,7 @@ VideoCodec::VideoCodec() : Codec() { VideoCodec::VideoCodec(const webrtc::SdpVideoFormat& c) : Codec(0 /* id */, c.name, kVideoCodecClockrate) { params = c.parameters; + scalability_modes = c.scalability_modes; } VideoCodec::VideoCodec(const VideoCodec& c) = default; @@ -324,8 +329,9 @@ bool VideoCodec::operator==(const VideoCodec& c) const { return Codec::operator==(c) && packetization == c.packetization; } -bool VideoCodec::Matches(const VideoCodec& other) const { - return Codec::Matches(other) && +bool VideoCodec::Matches(const VideoCodec& other, + const webrtc::FieldTrialsView* field_trials) const { + return Codec::Matches(other, field_trials) && IsSameCodecSpecific(name, params, other.name, other.params); } diff --git a/TMessagesProj/jni/voip/webrtc/media/base/codec.h b/TMessagesProj/jni/voip/webrtc/media/base/codec.h index cfc31aed1f..f0ed25123c 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/codec.h +++ b/TMessagesProj/jni/voip/webrtc/media/base/codec.h @@ -16,7 +16,10 @@ #include #include +#include "absl/container/inlined_vector.h" +#include "absl/strings/string_view.h" #include "absl/types/optional.h" +#include "api/field_trials_view.h" #include "api/rtp_parameters.h" #include "api/video_codecs/sdp_video_format.h" #include "media/base/media_constants.h" @@ -29,9 +32,9 @@ typedef std::map CodecParameterMap; class FeedbackParam { public: FeedbackParam() = default; - FeedbackParam(const std::string& id, const std::string& param) + FeedbackParam(absl::string_view id, const std::string& param) : id_(id), param_(param) {} - explicit FeedbackParam(const std::string& id) + explicit FeedbackParam(absl::string_view id) : id_(id), param_(kParamValueEmpty) {} bool operator==(const FeedbackParam& other) const; @@ -75,7 +78,8 @@ struct RTC_EXPORT Codec { virtual ~Codec(); // Indicates if this codec is compatible with the specified codec. - bool Matches(const Codec& codec) const; + bool Matches(const Codec& codec, + const webrtc::FieldTrialsView* field_trials = nullptr) const; bool MatchesCapability(const webrtc::RtpCodecCapability& capability) const; // Find the parameter for `name` and write the value to `out`. @@ -132,7 +136,8 @@ struct AudioCodec : public Codec { ~AudioCodec() override = default; // Indicates if this codec is compatible with the specified codec. - bool Matches(const AudioCodec& codec) const; + bool Matches(const AudioCodec& codec, + const webrtc::FieldTrialsView* field_trials = nullptr) const; std::string ToString() const; @@ -148,6 +153,8 @@ struct AudioCodec : public Codec { struct RTC_EXPORT VideoCodec : public Codec { absl::optional packetization; + absl::InlinedVector + scalability_modes; // Creates a codec with the given parameters. VideoCodec(int id, const std::string& name); @@ -163,7 +170,8 @@ struct RTC_EXPORT VideoCodec : public Codec { // Indicates if this video codec is the same as the other video codec, e.g. if // they are both VP8 or VP9, or if they are both H264 with the same H264 // profile. H264 levels however are not compared. - bool Matches(const VideoCodec& codec) const; + bool Matches(const VideoCodec& codec, + const webrtc::FieldTrialsView* field_trials = nullptr) const; std::string ToString() const; diff --git a/TMessagesProj/jni/voip/webrtc/media/base/fake_media_engine.cc b/TMessagesProj/jni/voip/webrtc/media/base/fake_media_engine.cc index aa8e2325b6..60f158eb2c 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/fake_media_engine.cc +++ b/TMessagesProj/jni/voip/webrtc/media/base/fake_media_engine.cc @@ -427,7 +427,8 @@ void FakeVideoMediaChannel::SetRecordableEncodedFrameCallback( void FakeVideoMediaChannel::ClearRecordableEncodedFrameCallback(uint32_t ssrc) { } -void FakeVideoMediaChannel::GenerateKeyFrame(uint32_t ssrc) {} +void FakeVideoMediaChannel::RequestRecvKeyFrame(uint32_t ssrc) {} +void FakeVideoMediaChannel::GenerateSendKeyFrame(uint32_t ssrc) {} FakeVoiceEngine::FakeVoiceEngine() : fail_create_channel_(false) { // Add a fake audio codec. Note that the name must not be "" as there are @@ -527,11 +528,11 @@ void FakeVideoEngine::UnregisterChannel(VideoMediaChannel* channel) { RTC_DCHECK(it != channels_.end()); channels_.erase(it); } -std::vector FakeVideoEngine::send_codecs() const { +std::vector FakeVideoEngine::send_codecs(bool use_rtx) const { return send_codecs_; } -std::vector FakeVideoEngine::recv_codecs() const { +std::vector FakeVideoEngine::recv_codecs(bool use_rtx) const { return recv_codecs_; } diff --git a/TMessagesProj/jni/voip/webrtc/media/base/fake_media_engine.h b/TMessagesProj/jni/voip/webrtc/media/base/fake_media_engine.h index e4f7b6659f..b98a2da950 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/fake_media_engine.h +++ b/TMessagesProj/jni/voip/webrtc/media/base/fake_media_engine.h @@ -275,7 +275,7 @@ class RtpHelper : public Base { } void OnPacketSent(const rtc::SentPacket& sent_packet) override {} void OnReadyToSend(bool ready) override { ready_to_send_ = ready; } - void OnNetworkRouteChanged(const std::string& transport_name, + void OnNetworkRouteChanged(absl::string_view transport_name, const rtc::NetworkRoute& network_route) override { last_network_route_ = network_route; ++num_network_route_changes_; @@ -462,7 +462,8 @@ class FakeVideoMediaChannel : public RtpHelper { std::function callback) override; void ClearRecordableEncodedFrameCallback(uint32_t ssrc) override; - void GenerateKeyFrame(uint32_t ssrc) override; + void RequestRecvKeyFrame(uint32_t ssrc) override; + void GenerateSendKeyFrame(uint32_t ssrc) override; private: bool SetRecvCodecs(const std::vector& codecs); @@ -532,8 +533,14 @@ class FakeVideoEngine : public VideoEngineInterface { override; FakeVideoMediaChannel* GetChannel(size_t index); void UnregisterChannel(VideoMediaChannel* channel); - std::vector send_codecs() const override; - std::vector recv_codecs() const override; + std::vector send_codecs() const override { + return send_codecs(true); + } + std::vector recv_codecs() const override { + return recv_codecs(true); + } + std::vector send_codecs(bool include_rtx) const override; + std::vector recv_codecs(bool include_rtx) const override; void SetSendCodecs(const std::vector& codecs); void SetRecvCodecs(const std::vector& codecs); bool SetCapture(bool capture); diff --git a/TMessagesProj/jni/voip/webrtc/media/base/fake_network_interface.h b/TMessagesProj/jni/voip/webrtc/media/base/fake_network_interface.h index 043e559f28..099b7cad38 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/fake_network_interface.h +++ b/TMessagesProj/jni/voip/webrtc/media/base/fake_network_interface.h @@ -13,8 +13,11 @@ #include #include +#include #include +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/task_queue/task_queue_base.h" #include "media/base/media_channel.h" #include "media/base/rtp_utils.h" #include "modules/rtp_rtcp/source/rtp_util.h" @@ -22,15 +25,13 @@ #include "rtc_base/checks.h" #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/dscp.h" -#include "rtc_base/message_handler.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread.h" namespace cricket { // Fake NetworkInterface that sends/receives RTP/RTCP packets. -class FakeNetworkInterface : public MediaChannel::NetworkInterface, - public rtc::MessageHandlerAutoCleanup { +class FakeNetworkInterface : public MediaChannel::NetworkInterface { public: FakeNetworkInterface() : thread_(rtc::Thread::Current()), @@ -129,10 +130,10 @@ class FakeNetworkInterface : public MediaChannel::NetworkInterface, if (conf_) { for (size_t i = 0; i < conf_sent_ssrcs_.size(); ++i) { SetRtpSsrc(conf_sent_ssrcs_[i], *packet); - PostMessage(ST_RTP, *packet); + PostPacket(*packet); } } else { - PostMessage(ST_RTP, *packet); + PostPacket(*packet); } return true; } @@ -145,7 +146,8 @@ class FakeNetworkInterface : public MediaChannel::NetworkInterface, options_ = options; if (!conf_) { // don't worry about RTCP in conf mode for now - PostMessage(ST_RTCP, *packet); + RTC_LOG(LS_VERBOSE) << "Dropping RTCP packet, they are not handled by " + "MediaChannel anymore."; } return true; } @@ -161,22 +163,13 @@ class FakeNetworkInterface : public MediaChannel::NetworkInterface, return 0; } - void PostMessage(int id, const rtc::CopyOnWriteBuffer& packet) { - thread_->Post(RTC_FROM_HERE, this, id, rtc::WrapMessageData(packet)); - } - - virtual void OnMessage(rtc::Message* msg) { - rtc::TypedMessageData* msg_data = - static_cast*>(msg->pdata); - if (dest_) { - if (msg->message_id == ST_RTP) { - dest_->OnPacketReceived(msg_data->data(), rtc::TimeMicros()); - } else { - RTC_LOG(LS_VERBOSE) << "Dropping RTCP packet, they not handled by " - "MediaChannel anymore."; - } - } - delete msg_data; + void PostPacket(rtc::CopyOnWriteBuffer packet) { + thread_->PostTask( + SafeTask(safety_.flag(), [this, packet = std::move(packet)]() mutable { + if (dest_) { + dest_->OnPacketReceived(std::move(packet), rtc::TimeMicros()); + } + })); } private: @@ -204,7 +197,7 @@ class FakeNetworkInterface : public MediaChannel::NetworkInterface, } } - rtc::Thread* thread_; + webrtc::TaskQueueBase* thread_; MediaChannel* dest_; bool conf_; // The ssrcs used in sending out packets in conference mode. @@ -222,6 +215,7 @@ class FakeNetworkInterface : public MediaChannel::NetworkInterface, rtc::DiffServCodePoint dscp_; // Options of the most recently sent packet. rtc::PacketOptions options_; + webrtc::ScopedTaskSafety safety_; }; } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/media/base/fake_rtp.cc b/TMessagesProj/jni/voip/webrtc/media/base/fake_rtp.cc index 4f42821762..21322419e1 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/fake_rtp.cc +++ b/TMessagesProj/jni/voip/webrtc/media/base/fake_rtp.cc @@ -21,7 +21,7 @@ void CompareHeaderExtensions(const char* packet1, size_t packet1_size, const char* packet2, size_t packet2_size, - const std::vector encrypted_headers, + const std::vector& encrypted_headers, bool expect_equal) { // Sanity check: packets must be large enough to contain the RTP header and // extensions header. diff --git a/TMessagesProj/jni/voip/webrtc/media/base/fake_rtp.h b/TMessagesProj/jni/voip/webrtc/media/base/fake_rtp.h index f2578151ed..8a176038cb 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/fake_rtp.h +++ b/TMessagesProj/jni/voip/webrtc/media/base/fake_rtp.h @@ -295,7 +295,7 @@ void CompareHeaderExtensions(const char* packet1, size_t packet1_size, const char* packet2, size_t packet2_size, - const std::vector encrypted_headers, + const std::vector& encrypted_headers, bool expect_equal); #endif // MEDIA_BASE_FAKE_RTP_H_ diff --git a/TMessagesProj/jni/voip/webrtc/media/base/fake_video_renderer.cc b/TMessagesProj/jni/voip/webrtc/media/base/fake_video_renderer.cc index b3ceb352f0..b235738d24 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/fake_video_renderer.cc +++ b/TMessagesProj/jni/voip/webrtc/media/base/fake_video_renderer.cc @@ -11,31 +11,77 @@ #include "media/base/fake_video_renderer.h" namespace cricket { - -FakeVideoRenderer::FakeVideoRenderer() = default; - -void FakeVideoRenderer::OnFrame(const webrtc::VideoFrame& frame) { - webrtc::MutexLock lock(&mutex_); +namespace { +bool CheckFrameColorYuv(const webrtc::VideoFrame& frame) { // TODO(zhurunz) Check with VP8 team to see if we can remove this // tolerance on Y values. Some unit tests produce Y values close // to 16 rather than close to zero, for supposedly black frames. // Largest value observed is 34, e.g., running // PeerConnectionIntegrationTest.SendAndReceive16To9AspectRatio. - black_frame_ = CheckFrameColorYuv(0, 48, 128, 128, 128, 128, &frame); - // Treat unexpected frame size as error. + static constexpr uint8_t y_min = 0; + static constexpr uint8_t y_max = 48; + static constexpr uint8_t u_min = 128; + static constexpr uint8_t u_max = 128; + static constexpr uint8_t v_min = 128; + static constexpr uint8_t v_max = 128; + + if (!frame.video_frame_buffer()) { + return false; + } + rtc::scoped_refptr i420_buffer = + frame.video_frame_buffer()->ToI420(); + // Y + int y_width = frame.width(); + int y_height = frame.height(); + const uint8_t* y_plane = i420_buffer->DataY(); + const uint8_t* y_pos = y_plane; + int32_t y_pitch = i420_buffer->StrideY(); + for (int i = 0; i < y_height; ++i) { + for (int j = 0; j < y_width; ++j) { + uint8_t y_value = *(y_pos + j); + if (y_value < y_min || y_value > y_max) { + return false; + } + } + y_pos += y_pitch; + } + // U and V + int chroma_width = i420_buffer->ChromaWidth(); + int chroma_height = i420_buffer->ChromaHeight(); + const uint8_t* u_plane = i420_buffer->DataU(); + const uint8_t* v_plane = i420_buffer->DataV(); + const uint8_t* u_pos = u_plane; + const uint8_t* v_pos = v_plane; + int32_t u_pitch = i420_buffer->StrideU(); + int32_t v_pitch = i420_buffer->StrideV(); + for (int i = 0; i < chroma_height; ++i) { + for (int j = 0; j < chroma_width; ++j) { + uint8_t u_value = *(u_pos + j); + if (u_value < u_min || u_value > u_max) { + return false; + } + uint8_t v_value = *(v_pos + j); + if (v_value < v_min || v_value > v_max) { + return false; + } + } + u_pos += u_pitch; + v_pos += v_pitch; + } + return true; +} +} // namespace + +FakeVideoRenderer::FakeVideoRenderer() = default; + +void FakeVideoRenderer::OnFrame(const webrtc::VideoFrame& frame) { + webrtc::MutexLock lock(&mutex_); + black_frame_ = CheckFrameColorYuv(frame); ++num_rendered_frames_; width_ = frame.width(); height_ = frame.height(); rotation_ = frame.rotation(); timestamp_us_ = frame.timestamp_us(); - ntp_timestamp_ms_ = frame.ntp_time_ms(); - color_space_ = frame.color_space(); - packet_infos_ = frame.packet_infos(); - frame_rendered_event_.Set(); -} - -bool FakeVideoRenderer::WaitForRenderedFrame(int64_t timeout_ms) { - return frame_rendered_event_.Wait(timeout_ms); } } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/media/base/fake_video_renderer.h b/TMessagesProj/jni/voip/webrtc/media/base/fake_video_renderer.h index 9f3c87c379..33d99a2668 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/fake_video_renderer.h +++ b/TMessagesProj/jni/voip/webrtc/media/base/fake_video_renderer.h @@ -18,7 +18,6 @@ #include "api/video/video_frame_buffer.h" #include "api/video/video_rotation.h" #include "api/video/video_sink_interface.h" -#include "rtc_base/event.h" #include "rtc_base/synchronization/mutex.h" namespace cricket { @@ -30,8 +29,6 @@ class FakeVideoRenderer : public rtc::VideoSinkInterface { void OnFrame(const webrtc::VideoFrame& frame) override; - int errors() const { return errors_; } - int width() const { webrtc::MutexLock lock(&mutex_); return width_; @@ -61,89 +58,14 @@ class FakeVideoRenderer : public rtc::VideoSinkInterface { return black_frame_; } - int64_t ntp_time_ms() const { - webrtc::MutexLock lock(&mutex_); - return ntp_timestamp_ms_; - } - - absl::optional color_space() const { - webrtc::MutexLock lock(&mutex_); - return color_space_; - } - - webrtc::RtpPacketInfos packet_infos() const { - webrtc::MutexLock lock(&mutex_); - return packet_infos_; - } - - bool WaitForRenderedFrame(int64_t timeout_ms); - private: - static bool CheckFrameColorYuv(uint8_t y_min, - uint8_t y_max, - uint8_t u_min, - uint8_t u_max, - uint8_t v_min, - uint8_t v_max, - const webrtc::VideoFrame* frame) { - if (!frame || !frame->video_frame_buffer()) { - return false; - } - rtc::scoped_refptr i420_buffer = - frame->video_frame_buffer()->ToI420(); - // Y - int y_width = frame->width(); - int y_height = frame->height(); - const uint8_t* y_plane = i420_buffer->DataY(); - const uint8_t* y_pos = y_plane; - int32_t y_pitch = i420_buffer->StrideY(); - for (int i = 0; i < y_height; ++i) { - for (int j = 0; j < y_width; ++j) { - uint8_t y_value = *(y_pos + j); - if (y_value < y_min || y_value > y_max) { - return false; - } - } - y_pos += y_pitch; - } - // U and V - int chroma_width = i420_buffer->ChromaWidth(); - int chroma_height = i420_buffer->ChromaHeight(); - const uint8_t* u_plane = i420_buffer->DataU(); - const uint8_t* v_plane = i420_buffer->DataV(); - const uint8_t* u_pos = u_plane; - const uint8_t* v_pos = v_plane; - int32_t u_pitch = i420_buffer->StrideU(); - int32_t v_pitch = i420_buffer->StrideV(); - for (int i = 0; i < chroma_height; ++i) { - for (int j = 0; j < chroma_width; ++j) { - uint8_t u_value = *(u_pos + j); - if (u_value < u_min || u_value > u_max) { - return false; - } - uint8_t v_value = *(v_pos + j); - if (v_value < v_min || v_value > v_max) { - return false; - } - } - u_pos += u_pitch; - v_pos += v_pitch; - } - return true; - } - - int errors_ = 0; int width_ = 0; int height_ = 0; webrtc::VideoRotation rotation_ = webrtc::kVideoRotation_0; int64_t timestamp_us_ = 0; int num_rendered_frames_ = 0; - int64_t ntp_timestamp_ms_ = 0; bool black_frame_ = false; mutable webrtc::Mutex mutex_; - rtc::Event frame_rendered_event_; - absl::optional color_space_; - webrtc::RtpPacketInfos packet_infos_; }; } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/media/base/media_channel.cc b/TMessagesProj/jni/voip/webrtc/media/base/media_channel.cc index 11953c2c5b..e01bfb1a82 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/media_channel.cc +++ b/TMessagesProj/jni/voip/webrtc/media/base/media_channel.cc @@ -11,29 +11,22 @@ #include "media/base/media_channel.h" #include "media/base/rtp_utils.h" -#include "rtc_base/task_utils/to_queued_task.h" namespace cricket { using webrtc::FrameDecryptorInterface; using webrtc::FrameEncryptorInterface; using webrtc::FrameTransformerInterface; using webrtc::PendingTaskSafetyFlag; +using webrtc::SafeTask; using webrtc::TaskQueueBase; -using webrtc::ToQueuedTask; using webrtc::VideoTrackInterface; VideoOptions::VideoOptions() : content_hint(VideoTrackInterface::ContentHint::kNone) {} VideoOptions::~VideoOptions() = default; -MediaChannel::MediaChannel(const MediaConfig& config, - TaskQueueBase* network_thread) - : enable_dscp_(config.enable_dscp), - network_safety_(PendingTaskSafetyFlag::CreateDetachedInactive()), - network_thread_(network_thread) {} - -MediaChannel::MediaChannel(TaskQueueBase* network_thread) - : enable_dscp_(false), +MediaChannel::MediaChannel(TaskQueueBase* network_thread, bool enable_dscp) + : enable_dscp_(enable_dscp), network_safety_(PendingTaskSafetyFlag::CreateDetachedInactive()), network_thread_(network_thread) {} @@ -95,6 +88,11 @@ bool MediaChannel::ExtmapAllowMixed() const { return extmap_allow_mixed_; } +bool MediaChannel::HasNetworkInterface() const { + RTC_DCHECK_RUN_ON(network_thread_); + return network_interface_ != nullptr; +} + void MediaChannel::SetEncoderToPacketizerFrameTransformer( uint32_t ssrc, rtc::scoped_refptr frame_transformer) {} @@ -127,7 +125,7 @@ void MediaChannel::SetPreferredDscp(rtc::DiffServCodePoint new_dscp) { // This is currently the common path as the derived channel classes // get called on the worker thread. There are still some tests though // that call directly on the network thread. - network_thread_->PostTask(ToQueuedTask( + network_thread_->PostTask(SafeTask( network_safety_, [this, new_dscp]() { SetPreferredDscp(new_dscp); })); return; } @@ -191,7 +189,7 @@ void MediaChannel::SendRtp(const uint8_t* data, if (network_thread_->IsCurrent()) { send(); } else { - network_thread_->PostTask(ToQueuedTask(network_safety_, std::move(send))); + network_thread_->PostTask(SafeTask(network_safety_, std::move(send))); } } @@ -208,7 +206,7 @@ void MediaChannel::SendRtcp(const uint8_t* data, size_t len) { if (network_thread_->IsCurrent()) { send(); } else { - network_thread_->PostTask(ToQueuedTask(network_safety_, std::move(send))); + network_thread_->PostTask(SafeTask(network_safety_, std::move(send))); } } diff --git a/TMessagesProj/jni/voip/webrtc/media/base/media_channel.h b/TMessagesProj/jni/voip/webrtc/media/base/media_channel.h index b3ceb3930f..721bbd3948 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/media_channel.h +++ b/TMessagesProj/jni/voip/webrtc/media/base/media_channel.h @@ -26,6 +26,7 @@ #include "api/media_stream_interface.h" #include "api/rtc_error.h" #include "api/rtp_parameters.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "api/transport/data_channel_transport_interface.h" #include "api/transport/rtp/rtp_source.h" #include "api/units/time_delta.h" @@ -33,12 +34,11 @@ #include "api/video/video_sink_interface.h" #include "api/video/video_source_interface.h" #include "api/video/video_timing.h" -#include "api/video_codecs/video_encoder_config.h" +#include "api/video_codecs/video_encoder_factory.h" #include "call/video_receive_stream.h" #include "common_video/include/quality_limitation_reason.h" #include "media/base/codec.h" #include "media/base/delayable.h" -#include "media/base/media_config.h" #include "media/base/media_constants.h" #include "media/base/stream_params.h" #include "modules/audio_processing/include/audio_processing_statistics.h" @@ -52,7 +52,7 @@ #include "rtc_base/socket.h" #include "rtc_base/string_encode.h" #include "rtc_base/strings/string_builder.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" +#include "video/config/video_encoder_config.h" namespace rtc { class Timing; @@ -138,6 +138,8 @@ struct VideoOptions { // Force screencast to use a minimum bitrate. This flag comes from // the PeerConnection constraint 'googScreencastMinBitrate'. It is // copied to the encoder config by WebRtcVideoChannel. + // TODO(https://crbug.com/1315155): Remove the ability to set it in Chromium + // and delete this flag (it should default to 100 kbps). absl::optional screencast_min_bitrate_kbps; // Set by screencast sources. Implies selection of encoding settings // suitable for screencast. Most likely not the right way to do @@ -170,9 +172,8 @@ class MediaChannel { virtual ~NetworkInterface() {} }; - MediaChannel(const MediaConfig& config, - webrtc::TaskQueueBase* network_thread); - explicit MediaChannel(webrtc::TaskQueueBase* network_thread); + explicit MediaChannel(webrtc::TaskQueueBase* network_thread, + bool enable_dscp = false); virtual ~MediaChannel(); virtual cricket::MediaType media_type() const = 0; @@ -189,7 +190,7 @@ class MediaChannel { virtual void OnReadyToSend(bool ready) = 0; // Called when the network route used for sending packets changed. virtual void OnNetworkRouteChanged( - const std::string& transport_name, + absl::string_view transport_name, const rtc::NetworkRoute& network_route) = 0; // Creates a new outgoing media stream with SSRCs and CNAME as described // by sp. @@ -210,11 +211,14 @@ class MediaChannel { // Resets any cached StreamParams for an unsignaled RecvStream, and removes // any existing unsignaled streams. virtual void ResetUnsignaledRecvStream() = 0; - // Informs the media channel when the transport's demuxer criteria is updated. + // This is currently a workaround because of the demuxer state being managed + // across two separate threads. Once the state is consistently managed on + // the same thread (network), this workaround can be removed. + // These two notifications inform the media channel when the transport's + // demuxer criteria is being updated. // * OnDemuxerCriteriaUpdatePending() happens on the same thread that the // channel's streams are added and removed (worker thread). - // * OnDemuxerCriteriaUpdateComplete() happens on the thread where the demuxer - // lives (network thread). + // * OnDemuxerCriteriaUpdateComplete() happens on the same thread. // Because the demuxer is updated asynchronously, there is a window of time // where packets are arriving to the channel for streams that have already // been removed on the worker thread. It is important NOT to treat these as @@ -241,6 +245,13 @@ class MediaChannel { // Enable network condition based codec switching. virtual void SetVideoCodecSwitchingEnabled(bool enabled); + // note: The encoder_selector object must remain valid for the lifetime of the + // MediaChannel, unless replaced. + virtual void SetEncoderSelector( + uint32_t ssrc, + webrtc::VideoEncoderFactory::EncoderSelectorInterface* encoder_selector) { + } + // Base method to send packet using NetworkInterface. bool SendPacket(rtc::CopyOnWriteBuffer* packet, const rtc::PacketOptions& options); @@ -259,6 +270,10 @@ class MediaChannel { void SetExtmapAllowMixed(bool extmap_allow_mixed); bool ExtmapAllowMixed() const; + // Returns `true` if a non-null NetworkInterface pointer is held. + // Must be called on the network thread. + bool HasNetworkInterface() const; + virtual webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const = 0; virtual webrtc::RTCError SetRtpSendParameters( uint32_t ssrc, @@ -376,7 +391,7 @@ struct MediaSenderInfo { // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-nackcount uint32_t nacks_rcvd = 0; // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-targetbitrate - double target_bitrate = 0.0; + absl::optional target_bitrate; int packets_lost = 0; float fraction_lost = 0.0f; int64_t rtt_ms = 0; @@ -389,6 +404,9 @@ struct MediaSenderInfo { // this list, the ReportBlockData::RTCPReportBlock::source_ssrc(), which is // the SSRC of the corresponding outbound RTP stream, is unique. std::vector report_block_datas; + absl::optional active; + // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-totalpacketsenddelay + webrtc::TimeDelta total_packet_send_delay = webrtc::TimeDelta::Zero(); }; struct MediaReceiverInfo { @@ -435,6 +453,16 @@ struct MediaReceiverInfo { // Jitter (network-related) latency (cumulative). // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-jitterbufferdelay double jitter_buffer_delay_seconds = 0.0; + // Target delay for the jitter buffer (cumulative). + // TODO(crbug.com/webrtc/14244): This metric is only implemented for + // audio, it should be implemented for video as well. + // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-jitterbuffertargetdelay + absl::optional jitter_buffer_target_delay_seconds; + // Minimum obtainable delay for the jitter buffer (cumulative). + // TODO(crbug.com/webrtc/14244): This metric is only implemented for + // audio, it should be implemented for video as well. + // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-jitterbufferminimumdelay + absl::optional jitter_buffer_minimum_delay_seconds; // Number of observations for cumulative jitter latency. // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-jitterbufferemittedcount uint64_t jitter_buffer_emitted_count = 0; @@ -460,7 +488,6 @@ struct VoiceSenderInfo : public MediaSenderInfo { // https://w3c.github.io/webrtc-stats/#dom-rtcmediastreamtrackstats-totalaudioenergy double total_input_energy = 0.0; double total_input_duration = 0.0; - bool typing_noise_detected = false; webrtc::ANAStats ana_statistics; webrtc::AudioProcessingStats apm_statistics; }; @@ -481,7 +508,6 @@ struct VoiceReceiverInfo : public MediaReceiverInfo { uint64_t concealed_samples = 0; uint64_t silent_concealed_samples = 0; uint64_t concealment_events = 0; - double jitter_buffer_target_delay_seconds = 0.0; uint64_t inserted_samples_for_deceleration = 0; uint64_t removed_samples_for_acceleration = 0; uint64_t fec_packets_received = 0; @@ -571,7 +597,6 @@ struct VideoSenderInfo : public MediaSenderInfo { uint64_t total_encode_time_ms = 0; // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-totalencodedbytestarget uint64_t total_encoded_bytes_target = 0; - uint64_t total_packet_send_delay_ms = 0; bool has_entered_low_resolution = false; absl::optional qp_sum; webrtc::VideoContentType content_type = webrtc::VideoContentType::UNSPECIFIED; @@ -580,6 +605,7 @@ struct VideoSenderInfo : public MediaSenderInfo { uint32_t huge_frames_sent = 0; uint32_t aggregated_huge_frames_sent = 0; absl::optional rid; + absl::optional power_efficient_encoder; }; struct VideoReceiverInfo : public MediaReceiverInfo { @@ -587,6 +613,7 @@ struct VideoReceiverInfo : public MediaReceiverInfo { ~VideoReceiverInfo(); std::vector ssrc_groups; std::string decoder_implementation_name; + absl::optional power_efficient_decoder; int packets_concealed = 0; int firs_sent = 0; int plis_sent = 0; @@ -606,7 +633,11 @@ struct VideoReceiverInfo : public MediaReceiverInfo { uint32_t frames_rendered = 0; absl::optional qp_sum; // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-totaldecodetime - uint64_t total_decode_time_ms = 0; + webrtc::TimeDelta total_decode_time = webrtc::TimeDelta::Zero(); + // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-totalprocessingdelay + webrtc::TimeDelta total_processing_delay = webrtc::TimeDelta::Zero(); + webrtc::TimeDelta total_assembly_time = webrtc::TimeDelta::Zero(); + uint32_t frames_assembled_from_multiple_packets = 0; double total_inter_frame_delay = 0; double total_squared_inter_frame_delay = 0; int64_t interframe_delay_max_ms = -1; @@ -774,11 +805,9 @@ struct AudioRecvParameters : RtpParameters {}; class VoiceMediaChannel : public MediaChannel, public Delayable { public: - explicit VoiceMediaChannel(webrtc::TaskQueueBase* network_thread) - : MediaChannel(network_thread) {} - VoiceMediaChannel(const MediaConfig& config, - webrtc::TaskQueueBase* network_thread) - : MediaChannel(config, network_thread) {} + VoiceMediaChannel(webrtc::TaskQueueBase* network_thread, + bool enable_dscp = false) + : MediaChannel(network_thread, enable_dscp) {} ~VoiceMediaChannel() override {} cricket::MediaType media_type() const override; @@ -846,11 +875,9 @@ struct VideoRecvParameters : RtpParameters {}; class VideoMediaChannel : public MediaChannel, public Delayable { public: - explicit VideoMediaChannel(webrtc::TaskQueueBase* network_thread) - : MediaChannel(network_thread) {} - VideoMediaChannel(const MediaConfig& config, - webrtc::TaskQueueBase* network_thread) - : MediaChannel(config, network_thread) {} + explicit VideoMediaChannel(webrtc::TaskQueueBase* network_thread, + bool enable_dscp = false) + : MediaChannel(network_thread, enable_dscp) {} ~VideoMediaChannel() override {} cricket::MediaType media_type() const override; @@ -895,8 +922,11 @@ class VideoMediaChannel : public MediaChannel, public Delayable { std::function callback) = 0; // Clear recordable encoded frame callback for `ssrc` virtual void ClearRecordableEncodedFrameCallback(uint32_t ssrc) = 0; - // Cause generation of a keyframe for `ssrc` - virtual void GenerateKeyFrame(uint32_t ssrc) = 0; + // Request generation of a keyframe for `ssrc` on a receiving channel via + // RTCP feedback. + virtual void RequestRecvKeyFrame(uint32_t ssrc) = 0; + // Cause generation of a keyframe for `ssrc` on a sending channel. + virtual void GenerateSendKeyFrame(uint32_t ssrc) = 0; virtual std::vector GetSources(uint32_t ssrc) const = 0; }; diff --git a/TMessagesProj/jni/voip/webrtc/media/base/media_config.h b/TMessagesProj/jni/voip/webrtc/media/base/media_config.h index be314a8aa3..b383c9aa3d 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/media_config.h +++ b/TMessagesProj/jni/voip/webrtc/media/base/media_config.h @@ -18,12 +18,16 @@ namespace cricket { struct MediaConfig { // Set DSCP value on packets. This flag comes from the // PeerConnection constraint 'googDscp'. - bool enable_dscp = false; + // TODO(https://crbug.com/1315574): Remove the ability to set it in Chromium + // and delete this flag. + bool enable_dscp = true; // Video-specific config. struct Video { // Enable WebRTC CPU Overuse Detection. This flag comes from the // PeerConnection constraint 'googCpuOveruseDetection'. + // TODO(https://crbug.com/1315569): Remove the ability to set it in Chromium + // and delete this flag. bool enable_cpu_adaptation = true; // Enable WebRTC suspension of video. No video frames will be sent @@ -31,6 +35,8 @@ struct MediaConfig { // flag comes from the PeerConnection constraint // 'googSuspendBelowMinBitrate', and WebRtcVideoChannel copies it // to VideoSendStream::Config::suspend_below_min_bitrate. + // TODO(https://crbug.com/1315564): Remove the ability to set it in Chromium + // and delete this flag. bool suspend_below_min_bitrate = false; // Enable buffering and playout timing smoothing of decoded frames. diff --git a/TMessagesProj/jni/voip/webrtc/media/base/media_constants.cc b/TMessagesProj/jni/voip/webrtc/media/base/media_constants.cc index 6e996bb2aa..aaa7d04c73 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/media_constants.cc +++ b/TMessagesProj/jni/voip/webrtc/media/base/media_constants.cc @@ -124,6 +124,8 @@ const char kH265FmtpTierFlag[] = "tier-flag"; const char kH265FmtpLevelId[] = "level-id"; #endif +const char kVP9ProfileId[] = "profile-id"; + const int kDefaultVideoMaxFramerate = 60; const size_t kConferenceMaxNumSpatialLayers = 3; diff --git a/TMessagesProj/jni/voip/webrtc/media/base/media_constants.h b/TMessagesProj/jni/voip/webrtc/media/base/media_constants.h index 70f9f3f461..dee3eee3f0 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/media_constants.h +++ b/TMessagesProj/jni/voip/webrtc/media/base/media_constants.h @@ -137,6 +137,8 @@ extern const char kH264FmtpSpsPpsIdrInKeyframe[]; extern const char kH264ProfileLevelConstrainedBaseline[]; extern const char kH264ProfileLevelConstrainedHigh[]; +extern const char kVP9ProfileId[]; + #ifndef DISABLE_H265 // RFC 7798 RTP Payload Format for H.265 video RTC_EXPORT extern const char kH265FmtpProfileSpace[]; diff --git a/TMessagesProj/jni/voip/webrtc/media/base/media_engine.cc b/TMessagesProj/jni/voip/webrtc/media/base/media_engine.cc index 21c3787382..0efbd71bf7 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/media_engine.cc +++ b/TMessagesProj/jni/voip/webrtc/media/base/media_engine.cc @@ -65,8 +65,46 @@ std::vector GetDefaultEnabledRtpHeaderExtensions( return extensions; } +webrtc::RTCError CheckScalabilityModeValues( + const webrtc::RtpParameters& rtp_parameters, + rtc::ArrayView codecs) { + using webrtc::RTCErrorType; + + if (codecs.empty()) { + // This is an audio sender or an extra check in the stack where the codec + // list is not available and we can't check the scalability_mode values. + return webrtc::RTCError::OK(); + } + + for (size_t i = 0; i < rtp_parameters.encodings.size(); ++i) { + if (rtp_parameters.encodings[i].scalability_mode) { + bool scalabilityModeFound = false; + for (const cricket::VideoCodec& codec : codecs) { + for (const auto& scalability_mode : codec.scalability_modes) { + if (ScalabilityModeToString(scalability_mode) == + *rtp_parameters.encodings[i].scalability_mode) { + scalabilityModeFound = true; + break; + } + } + if (scalabilityModeFound) + break; + } + + if (!scalabilityModeFound) { + LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_OPERATION, + "Attempted to set RtpParameters scalabilityMode " + "to an unsupported value for the current codecs."); + } + } + } + + return webrtc::RTCError::OK(); +} + webrtc::RTCError CheckRtpParametersValues( - const webrtc::RtpParameters& rtp_parameters) { + const webrtc::RtpParameters& rtp_parameters, + rtc::ArrayView codecs) { using webrtc::RTCErrorType; for (size_t i = 0; i < rtp_parameters.encodings.size(); ++i) { @@ -106,14 +144,29 @@ webrtc::RTCError CheckRtpParametersValues( "num_temporal_layers to an invalid number."); } } + + if (rtp_parameters.encodings[i].requested_resolution && + rtp_parameters.encodings[i].scale_resolution_down_by) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_RANGE, + "Attempted to set scale_resolution_down_by and " + "requested_resolution simultaniously."); + } } - return webrtc::RTCError::OK(); + return CheckScalabilityModeValues(rtp_parameters, codecs); } webrtc::RTCError CheckRtpParametersInvalidModificationAndValues( const webrtc::RtpParameters& old_rtp_parameters, const webrtc::RtpParameters& rtp_parameters) { + return CheckRtpParametersInvalidModificationAndValues(old_rtp_parameters, + rtp_parameters, {}); +} + +webrtc::RTCError CheckRtpParametersInvalidModificationAndValues( + const webrtc::RtpParameters& old_rtp_parameters, + const webrtc::RtpParameters& rtp_parameters, + rtc::ArrayView codecs) { using webrtc::RTCErrorType; if (rtp_parameters.encodings.size() != old_rtp_parameters.encodings.size()) { LOG_AND_RETURN_ERROR( @@ -148,11 +201,11 @@ webrtc::RTCError CheckRtpParametersInvalidModificationAndValues( "Attempted to set RtpParameters with modified SSRC"); } - return CheckRtpParametersValues(rtp_parameters); + return CheckRtpParametersValues(rtp_parameters, codecs); } CompositeMediaEngine::CompositeMediaEngine( - std::unique_ptr trials, + std::unique_ptr trials, std::unique_ptr audio_engine, std::unique_ptr video_engine) : trials_(std::move(trials)), diff --git a/TMessagesProj/jni/voip/webrtc/media/base/media_engine.h b/TMessagesProj/jni/voip/webrtc/media/base/media_engine.h index 6f47127f30..e533691751 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/media_engine.h +++ b/TMessagesProj/jni/voip/webrtc/media/base/media_engine.h @@ -18,12 +18,13 @@ #include "api/audio_codecs/audio_decoder_factory.h" #include "api/audio_codecs/audio_encoder_factory.h" #include "api/crypto/crypto_options.h" +#include "api/field_trials_view.h" #include "api/rtp_parameters.h" -#include "api/transport/webrtc_key_value_config.h" #include "api/video/video_bitrate_allocator_factory.h" #include "call/audio_state.h" #include "media/base/codec.h" #include "media/base/media_channel.h" +#include "media/base/media_config.h" #include "media/base/video_common.h" #include "rtc_base/system/file_wrapper.h" @@ -36,9 +37,28 @@ class Call; namespace cricket { +// Checks that the scalability_mode value of each encoding is supported by at +// least one video codec of the list. If the list is empty, no check is done. +webrtc::RTCError CheckScalabilityModeValues( + const webrtc::RtpParameters& new_parameters, + rtc::ArrayView codecs); + +// Checks the parameters have valid and supported values, and checks parameters +// with CheckScalabilityModeValues(). webrtc::RTCError CheckRtpParametersValues( - const webrtc::RtpParameters& new_parameters); + const webrtc::RtpParameters& new_parameters, + rtc::ArrayView codecs); +// Checks that the immutable values have not changed in new_parameters and +// checks all parameters with CheckRtpParametersValues(). +webrtc::RTCError CheckRtpParametersInvalidModificationAndValues( + const webrtc::RtpParameters& old_parameters, + const webrtc::RtpParameters& new_parameters, + rtc::ArrayView codecs); + +// Checks that the immutable values have not changed in new_parameters and +// checks parameters (except SVC) with CheckRtpParametersValues(). It should +// usually be paired with a call to CheckScalabilityModeValues(). webrtc::RTCError CheckRtpParametersInvalidModificationAndValues( const webrtc::RtpParameters& old_parameters, const webrtc::RtpParameters& new_parameters); @@ -63,7 +83,9 @@ class VoiceEngineInterface : public RtpHeaderExtensionQueryInterface { public: VoiceEngineInterface() = default; virtual ~VoiceEngineInterface() = default; - RTC_DISALLOW_COPY_AND_ASSIGN(VoiceEngineInterface); + + VoiceEngineInterface(const VoiceEngineInterface&) = delete; + VoiceEngineInterface& operator=(const VoiceEngineInterface&) = delete; // Initialization // Starts the engine. @@ -97,7 +119,9 @@ class VideoEngineInterface : public RtpHeaderExtensionQueryInterface { public: VideoEngineInterface() = default; virtual ~VideoEngineInterface() = default; - RTC_DISALLOW_COPY_AND_ASSIGN(VideoEngineInterface); + + VideoEngineInterface(const VideoEngineInterface&) = delete; + VideoEngineInterface& operator=(const VideoEngineInterface&) = delete; // Creates a video media channel, paired with the specified voice channel. // Returns NULL on failure. @@ -109,8 +133,20 @@ class VideoEngineInterface : public RtpHeaderExtensionQueryInterface { webrtc::VideoBitrateAllocatorFactory* video_bitrate_allocator_factory) = 0; + // Retrieve list of supported codecs. virtual std::vector send_codecs() const = 0; virtual std::vector recv_codecs() const = 0; + // As above, but if include_rtx is false, don't include RTX codecs. + // TODO(bugs.webrtc.org/13931): Remove default implementation once + // upstream subclasses have converted. + virtual std::vector send_codecs(bool include_rtx) const { + RTC_DCHECK(include_rtx); + return send_codecs(); + } + virtual std::vector recv_codecs(bool include_rtx) const { + RTC_DCHECK(include_rtx); + return recv_codecs(); + } }; // MediaEngineInterface is an abstraction of a media engine which can be @@ -132,10 +168,10 @@ class MediaEngineInterface { // CompositeMediaEngine constructs a MediaEngine from separate // voice and video engine classes. -// Optionally owns a WebRtcKeyValueConfig trials map. +// Optionally owns a FieldTrialsView trials map. class CompositeMediaEngine : public MediaEngineInterface { public: - CompositeMediaEngine(std::unique_ptr trials, + CompositeMediaEngine(std::unique_ptr trials, std::unique_ptr audio_engine, std::unique_ptr video_engine); CompositeMediaEngine(std::unique_ptr audio_engine, @@ -151,7 +187,7 @@ class CompositeMediaEngine : public MediaEngineInterface { const VideoEngineInterface& video() const override; private: - const std::unique_ptr trials_; + const std::unique_ptr trials_; const std::unique_ptr voice_engine_; const std::unique_ptr video_engine_; }; diff --git a/TMessagesProj/jni/voip/webrtc/media/base/stream_params.h b/TMessagesProj/jni/voip/webrtc/media/base/stream_params.h index 1f46469cb5..c9c8a09592 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/stream_params.h +++ b/TMessagesProj/jni/voip/webrtc/media/base/stream_params.h @@ -54,7 +54,6 @@ #include "absl/algorithm/container.h" #include "media/base/rid_description.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/unique_id_generator.h" namespace cricket { diff --git a/TMessagesProj/jni/voip/webrtc/media/base/test_utils.h b/TMessagesProj/jni/voip/webrtc/media/base/test_utils.h index 22bda4f12a..fb18485d32 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/test_utils.h +++ b/TMessagesProj/jni/voip/webrtc/media/base/test_utils.h @@ -37,10 +37,12 @@ inline std::vector MakeVector(const T a[], size_t s) { // Checks whether `codecs` contains `codec`; checks using Codec::Matches(). template -bool ContainsMatchingCodec(const std::vector& codecs, const C& codec) { +bool ContainsMatchingCodec(const std::vector& codecs, + const C& codec, + const webrtc::FieldTrialsView* field_trials) { typename std::vector::const_iterator it; for (it = codecs.begin(); it != codecs.end(); ++it) { - if (it->Matches(codec)) { + if (it->Matches(codec, field_trials)) { return true; } } diff --git a/TMessagesProj/jni/voip/webrtc/media/base/video_adapter.cc b/TMessagesProj/jni/voip/webrtc/media/base/video_adapter.cc index 4785dfcfe2..149071d153 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/video_adapter.cc +++ b/TMessagesProj/jni/voip/webrtc/media/base/video_adapter.cc @@ -20,6 +20,7 @@ #include "media/base/video_common.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/strings/string_builder.h" #include "rtc_base/time_utils.h" #include "system_wrappers/include/field_trial.h" @@ -121,6 +122,15 @@ Fraction FindScale(int input_width, return best_scale; } + +absl::optional> Swap( + const absl::optional>& in) { + if (!in) { + return absl::nullopt; + } + return std::make_pair(in->second, in->first); +} + } // namespace namespace cricket { @@ -132,7 +142,7 @@ VideoAdapter::VideoAdapter(int source_resolution_alignment) adaption_changes_(0), previous_width_(0), previous_height_(0), - variable_start_scale_factor_(webrtc::field_trial::IsEnabled( + variable_start_scale_factor_(!webrtc::field_trial::IsDisabled( "WebRTC-Video-VariableStartScaleFactor")), source_resolution_alignment_(source_resolution_alignment), resolution_alignment_(source_resolution_alignment), @@ -146,8 +156,8 @@ VideoAdapter::~VideoAdapter() {} bool VideoAdapter::DropFrame(int64_t in_timestamp_ns) { int max_fps = max_framerate_request_; - if (max_fps_) - max_fps = std::min(max_fps, *max_fps_); + if (output_format_request_.max_fps) + max_fps = std::min(max_fps, *output_format_request_.max_fps); framerate_controller_.SetMaxFramerate(max_fps); return framerate_controller_.ShouldDropFrame(in_timestamp_ns); @@ -171,13 +181,15 @@ bool VideoAdapter::AdaptFrameResolution(int in_width, // orientation. absl::optional> target_aspect_ratio; if (in_width > in_height) { - target_aspect_ratio = target_landscape_aspect_ratio_; - if (max_landscape_pixel_count_) - max_pixel_count = std::min(max_pixel_count, *max_landscape_pixel_count_); + target_aspect_ratio = output_format_request_.target_landscape_aspect_ratio; + if (output_format_request_.max_landscape_pixel_count) + max_pixel_count = std::min( + max_pixel_count, *output_format_request_.max_landscape_pixel_count); } else { - target_aspect_ratio = target_portrait_aspect_ratio_; - if (max_portrait_pixel_count_) - max_pixel_count = std::min(max_pixel_count, *max_portrait_pixel_count_); + target_aspect_ratio = output_format_request_.target_portrait_aspect_ratio; + if (output_format_request_.max_portrait_pixel_count) + max_pixel_count = std::min( + max_pixel_count, *output_format_request_.max_portrait_pixel_count); } int target_pixel_count = @@ -195,7 +207,7 @@ bool VideoAdapter::AdaptFrameResolution(int in_width, << " Input: " << in_width << "x" << in_height << " timestamp: " << in_timestamp_ns << " Output fps: " << max_framerate_request_ << "/" - << max_fps_.value_or(-1) + << output_format_request_.max_fps.value_or(-1) << " alignment: " << resolution_alignment_; } @@ -249,7 +261,7 @@ bool VideoAdapter::AdaptFrameResolution(int in_width, << " Scale: " << scale.numerator << "/" << scale.denominator << " Output: " << *out_width << "x" << *out_height << " fps: " << max_framerate_request_ << "/" - << max_fps_.value_or(-1) + << output_format_request_.max_fps.value_or(-1) << " alignment: " << resolution_alignment_; } @@ -300,11 +312,27 @@ void VideoAdapter::OnOutputFormatRequest( const absl::optional& max_portrait_pixel_count, const absl::optional& max_fps) { webrtc::MutexLock lock(&mutex_); - target_landscape_aspect_ratio_ = target_landscape_aspect_ratio; - max_landscape_pixel_count_ = max_landscape_pixel_count; - target_portrait_aspect_ratio_ = target_portrait_aspect_ratio; - max_portrait_pixel_count_ = max_portrait_pixel_count; - max_fps_ = max_fps; + + OutputFormatRequest request = { + .target_landscape_aspect_ratio = target_landscape_aspect_ratio, + .max_landscape_pixel_count = max_landscape_pixel_count, + .target_portrait_aspect_ratio = target_portrait_aspect_ratio, + .max_portrait_pixel_count = max_portrait_pixel_count, + .max_fps = max_fps}; + + if (stashed_output_format_request_) { + // Save the output format request for later use in case the encoder making + // this call would become active, because currently all active encoders use + // requested_resolution instead. + stashed_output_format_request_ = request; + RTC_LOG(LS_INFO) << "Stashing OnOutputFormatRequest: " + << stashed_output_format_request_->ToString(); + } else { + output_format_request_ = request; + RTC_LOG(LS_INFO) << "Setting output_format_request_: " + << output_format_request_.ToString(); + } + framerate_controller_.Reset(); } @@ -317,6 +345,60 @@ void VideoAdapter::OnSinkWants(const rtc::VideoSinkWants& sink_wants) { max_framerate_request_ = sink_wants.max_framerate_fps; resolution_alignment_ = cricket::LeastCommonMultiple( source_resolution_alignment_, sink_wants.resolution_alignment); + + if (!sink_wants.aggregates) { + RTC_LOG(LS_WARNING) + << "These should always be created by VideoBroadcaster!"; + return; + } + + // If requested_resolution is used, and there are no active encoders + // that are NOT using requested_resolution (aka newapi), then override + // calls to OnOutputFormatRequest and use values from requested_resolution + // instead (combined with qualityscaling based on pixel counts above). + if (webrtc::field_trial::IsDisabled( + "WebRTC-Video-RequestedResolutionOverrideOutputFormatRequest")) { + // kill-switch... + return; + } + + if (!sink_wants.requested_resolution) { + if (stashed_output_format_request_) { + // because current active_output_format_request is based on + // requested_resolution logic, while current encoder(s) doesn't want that, + // we have to restore the stashed request. + RTC_LOG(LS_INFO) << "Unstashing OnOutputFormatRequest: " + << stashed_output_format_request_->ToString(); + output_format_request_ = *stashed_output_format_request_; + stashed_output_format_request_.reset(); + } + return; + } + + if (sink_wants.aggregates->any_active_without_requested_resolution) { + return; + } + + if (!stashed_output_format_request_) { + // The active output format request is about to be rewritten by + // request_resolution. We need to save it for later use in case the encoder + // which doesn't use request_resolution logic become active in the future. + stashed_output_format_request_ = output_format_request_; + RTC_LOG(LS_INFO) << "Stashing OnOutputFormatRequest: " + << stashed_output_format_request_->ToString(); + } + + auto res = *sink_wants.requested_resolution; + auto pixel_count = res.width * res.height; + output_format_request_.target_landscape_aspect_ratio = + std::make_pair(res.width, res.height); + output_format_request_.max_landscape_pixel_count = pixel_count; + output_format_request_.target_portrait_aspect_ratio = + std::make_pair(res.height, res.width); + output_format_request_.max_portrait_pixel_count = pixel_count; + output_format_request_.max_fps = max_framerate_request_; + RTC_LOG(LS_INFO) << "Setting output_format_request_ based on sink_wants: " + << output_format_request_.ToString(); } int VideoAdapter::GetTargetPixels() const { @@ -326,10 +408,11 @@ int VideoAdapter::GetTargetPixels() const { float VideoAdapter::GetMaxFramerate() const { webrtc::MutexLock lock(&mutex_); - // Minimum of `max_fps_` and `max_framerate_request_` is used to throttle - // frame-rate. - int framerate = std::min(max_framerate_request_, - max_fps_.value_or(max_framerate_request_)); + // Minimum of `output_format_request_.max_fps` and `max_framerate_request_` is + // used to throttle frame-rate. + int framerate = + std::min(max_framerate_request_, + output_format_request_.max_fps.value_or(max_framerate_request_)); if (framerate == std::numeric_limits::max()) { return std::numeric_limits::infinity(); } else { @@ -337,4 +420,49 @@ float VideoAdapter::GetMaxFramerate() const { } } +std::string VideoAdapter::OutputFormatRequest::ToString() const { + rtc::StringBuilder oss; + oss << "[ "; + if (target_landscape_aspect_ratio == Swap(target_portrait_aspect_ratio) && + max_landscape_pixel_count == max_portrait_pixel_count) { + if (target_landscape_aspect_ratio) { + oss << target_landscape_aspect_ratio->first << "x" + << target_landscape_aspect_ratio->second; + } else { + oss << "unset-resolution"; + } + if (max_landscape_pixel_count) { + oss << " max_pixel_count: " << *max_landscape_pixel_count; + } + } else { + oss << "[ landscape: "; + if (target_landscape_aspect_ratio) { + oss << target_landscape_aspect_ratio->first << "x" + << target_landscape_aspect_ratio->second; + } else { + oss << "unset"; + } + if (max_landscape_pixel_count) { + oss << " max_pixel_count: " << *max_landscape_pixel_count; + } + oss << " ] [ portrait: "; + if (target_portrait_aspect_ratio) { + oss << target_portrait_aspect_ratio->first << "x" + << target_portrait_aspect_ratio->second; + } + if (max_portrait_pixel_count) { + oss << " max_pixel_count: " << *max_portrait_pixel_count; + } + oss << " ]"; + } + oss << " max_fps: "; + if (max_fps) { + oss << *max_fps; + } else { + oss << "unset"; + } + oss << " ]"; + return oss.Release(); +} + } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/media/base/video_adapter.h b/TMessagesProj/jni/voip/webrtc/media/base/video_adapter.h index 1bae10d419..b3e69c492b 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/video_adapter.h +++ b/TMessagesProj/jni/voip/webrtc/media/base/video_adapter.h @@ -13,13 +13,13 @@ #include +#include #include #include "absl/types/optional.h" #include "api/video/video_source_interface.h" #include "common_video/framerate_controller.h" #include "media/base/video_common.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/system/rtc_export.h" #include "rtc_base/thread_annotations.h" @@ -38,6 +38,9 @@ class RTC_EXPORT VideoAdapter { explicit VideoAdapter(int source_resolution_alignment); virtual ~VideoAdapter(); + VideoAdapter(const VideoAdapter&) = delete; + VideoAdapter& operator=(const VideoAdapter&) = delete; + // Return the adapted resolution and cropping parameters given the // input resolution. The input frame should first be cropped, then // scaled to the final output resolution. Returns true if the frame @@ -131,23 +134,37 @@ class RTC_EXPORT VideoAdapter { // Max number of pixels/fps requested via calls to OnOutputFormatRequest, // OnResolutionFramerateRequest respectively. // The adapted output format is the minimum of these. - absl::optional> target_landscape_aspect_ratio_ - RTC_GUARDED_BY(mutex_); - absl::optional max_landscape_pixel_count_ RTC_GUARDED_BY(mutex_); - absl::optional> target_portrait_aspect_ratio_ - RTC_GUARDED_BY(mutex_); - absl::optional max_portrait_pixel_count_ RTC_GUARDED_BY(mutex_); - absl::optional max_fps_ RTC_GUARDED_BY(mutex_); + struct OutputFormatRequest { + absl::optional> target_landscape_aspect_ratio; + absl::optional max_landscape_pixel_count; + absl::optional> target_portrait_aspect_ratio; + absl::optional max_portrait_pixel_count; + absl::optional max_fps; + + // For logging. + std::string ToString() const; + }; + + OutputFormatRequest output_format_request_ RTC_GUARDED_BY(mutex_); int resolution_request_target_pixel_count_ RTC_GUARDED_BY(mutex_); int resolution_request_max_pixel_count_ RTC_GUARDED_BY(mutex_); int max_framerate_request_ RTC_GUARDED_BY(mutex_); + // Stashed OutputFormatRequest that is used to save value of + // OnOutputFormatRequest in case all active encoders are using + // requested_resolution. I.e when all active encoders are using + // requested_resolution, the call to OnOutputFormatRequest is ignored + // and the value from requested_resolution is used instead (to scale/crop + // frame). This allows for an application to only use + // RtpEncodingParameters::request_resolution and get the same behavior as if + // it had used VideoAdapter::OnOutputFormatRequest. + absl::optional stashed_output_format_request_ + RTC_GUARDED_BY(mutex_); + webrtc::FramerateController framerate_controller_ RTC_GUARDED_BY(mutex_); // The critical section to protect the above variables. mutable webrtc::Mutex mutex_; - - RTC_DISALLOW_COPY_AND_ASSIGN(VideoAdapter); }; } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/media/base/video_broadcaster.cc b/TMessagesProj/jni/voip/webrtc/media/base/video_broadcaster.cc index 1167d7fb32..43c17734e3 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/video_broadcaster.cc +++ b/TMessagesProj/jni/voip/webrtc/media/base/video_broadcaster.cc @@ -10,6 +10,7 @@ #include "media/base/video_broadcaster.h" +#include #include #include "absl/types/optional.h" @@ -123,7 +124,29 @@ void VideoBroadcaster::UpdateWants() { VideoSinkWants wants; wants.rotation_applied = false; wants.resolution_alignment = 1; + wants.aggregates.emplace(VideoSinkWants::Aggregates()); + wants.is_active = false; + + // TODO(webrtc:14451) : I think it makes sense to always + // "ignore" encoders that are not active. But that would + // probably require a controlled roll out with a field trials? + // To play it safe, only ignore inactive encoders is there is an + // active encoder using the new api (requested_resolution), + // this means that there is only a behavioural change when using new + // api. + bool ignore_inactive_encoders_old_api = false; for (auto& sink : sink_pairs()) { + if (sink.wants.is_active && sink.wants.requested_resolution.has_value()) { + ignore_inactive_encoders_old_api = true; + break; + } + } + + for (auto& sink : sink_pairs()) { + if (!sink.wants.is_active && + (sink.wants.requested_resolution || ignore_inactive_encoders_old_api)) { + continue; + } // wants.rotation_applied == ANY(sink.wants.rotation_applied) if (sink.wants.rotation_applied) { wants.rotation_applied = true; @@ -147,6 +170,25 @@ void VideoBroadcaster::UpdateWants() { } wants.resolution_alignment = cricket::LeastCommonMultiple( wants.resolution_alignment, sink.wants.resolution_alignment); + + // Pick MAX(requested_resolution) since the actual can be downscaled + // in encoder instead. + if (sink.wants.requested_resolution) { + if (!wants.requested_resolution) { + wants.requested_resolution = sink.wants.requested_resolution; + } else { + wants.requested_resolution->width = + std::max(wants.requested_resolution->width, + sink.wants.requested_resolution->width); + wants.requested_resolution->height = + std::max(wants.requested_resolution->height, + sink.wants.requested_resolution->height); + } + } else if (sink.wants.is_active) { + wants.aggregates->any_active_without_requested_resolution = true; + } + + wants.is_active |= sink.wants.is_active; } if (wants.target_pixel_count && diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/fake_webrtc_call.cc b/TMessagesProj/jni/voip/webrtc/media/engine/fake_webrtc_call.cc index 7b9174d961..48a8b12092 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/fake_webrtc_call.cc +++ b/TMessagesProj/jni/voip/webrtc/media/engine/fake_webrtc_call.cc @@ -13,11 +13,13 @@ #include #include "absl/algorithm/container.h" +#include "absl/strings/string_view.h" #include "api/call/audio_sink.h" #include "modules/rtp_rtcp/source/rtp_util.h" #include "rtc_base/checks.h" #include "rtc_base/gunit.h" #include "rtc_base/thread.h" +#include "video/config/encoder_stream_factory.h" namespace cricket { @@ -73,16 +75,16 @@ webrtc::AudioSendStream::Stats FakeAudioSendStream::GetStats( FakeAudioReceiveStream::FakeAudioReceiveStream( int id, - const webrtc::AudioReceiveStream::Config& config) + const webrtc::AudioReceiveStreamInterface::Config& config) : id_(id), config_(config) {} -const webrtc::AudioReceiveStream::Config& FakeAudioReceiveStream::GetConfig() - const { +const webrtc::AudioReceiveStreamInterface::Config& +FakeAudioReceiveStream::GetConfig() const { return config_; } void FakeAudioReceiveStream::SetStats( - const webrtc::AudioReceiveStream::Stats& stats) { + const webrtc::AudioReceiveStreamInterface::Stats& stats) { stats_ = stats; } @@ -109,10 +111,7 @@ void FakeAudioReceiveStream::SetDecoderMap( config_.decoder_map = std::move(decoder_map); } -void FakeAudioReceiveStream::SetUseTransportCcAndNackHistory( - bool use_transport_cc, - int history_ms) { - config_.rtp.transport_cc = use_transport_cc; +void FakeAudioReceiveStream::SetNackHistory(int history_ms) { config_.rtp.nack.rtp_history_ms = history_ms; } @@ -130,7 +129,17 @@ void FakeAudioReceiveStream::SetRtpExtensions( config_.rtp.extensions = std::move(extensions); } -webrtc::AudioReceiveStream::Stats FakeAudioReceiveStream::GetStats( +const std::vector& +FakeAudioReceiveStream::GetRtpExtensions() const { + return config_.rtp.extensions; +} + +webrtc::RtpHeaderExtensionMap FakeAudioReceiveStream::GetRtpExtensionMap() + const { + return webrtc::RtpHeaderExtensionMap(config_.rtp.extensions); +} + +webrtc::AudioReceiveStreamInterface::Stats FakeAudioReceiveStream::GetStats( bool get_and_clear_legacy_stats) const { return stats_; } @@ -233,8 +242,24 @@ void FakeVideoSendStream::OnFrame(const webrtc::VideoFrame& frame) { if (!last_frame_ || frame.width() != last_frame_->width() || frame.height() != last_frame_->height() || frame.rotation() != last_frame_->rotation()) { - video_streams_ = encoder_config_.video_stream_factory->CreateEncoderStreams( - frame.width(), frame.height(), encoder_config_); + if (encoder_config_.video_stream_factory) { + // Note: only tests set their own EncoderStreamFactory... + video_streams_ = + encoder_config_.video_stream_factory->CreateEncoderStreams( + frame.width(), frame.height(), encoder_config_); + } else { + webrtc::VideoEncoder::EncoderInfo encoder_info; + rtc::scoped_refptr< + webrtc::VideoEncoderConfig::VideoStreamFactoryInterface> + factory = rtc::make_ref_counted( + encoder_config_.video_format.name, encoder_config_.max_qp, + encoder_config_.content_type == + webrtc::VideoEncoderConfig::ContentType::kScreen, + encoder_config_.legacy_conference_mode, encoder_info); + + video_streams_ = factory->CreateEncoderStreams( + frame.width(), frame.height(), encoder_config_); + } } last_frame_ = frame; } @@ -257,9 +282,23 @@ void FakeVideoSendStream::ReconfigureVideoEncoder( } else { width = height = 0; } - video_streams_ = - config.video_stream_factory->CreateEncoderStreams(width, height, config); - if (config.encoder_specific_settings != NULL) { + if (config.video_stream_factory) { + // Note: only tests set their own EncoderStreamFactory... + video_streams_ = config.video_stream_factory->CreateEncoderStreams( + width, height, config); + } else { + webrtc::VideoEncoder::EncoderInfo encoder_info; + rtc::scoped_refptr + factory = rtc::make_ref_counted( + config.video_format.name, config.max_qp, + config.content_type == + webrtc::VideoEncoderConfig::ContentType::kScreen, + config.legacy_conference_mode, encoder_info); + + video_streams_ = factory->CreateEncoderStreams(width, height, config); + } + + if (config.encoder_specific_settings != nullptr) { const unsigned char num_temporal_layers = static_cast( video_streams_.back().num_temporal_layers.value_or(1)); if (config_.rtp.payload_name == "VP8") { @@ -277,8 +316,6 @@ void FakeVideoSendStream::ReconfigureVideoEncoder( num_temporal_layers; } } else if (config_.rtp.payload_name == "H264") { - config.encoder_specific_settings->FillVideoCodecH264( - &codec_specific_settings_.h264); codec_specific_settings_.h264.numberOfTemporalLayers = num_temporal_layers; } else { @@ -286,7 +323,7 @@ void FakeVideoSendStream::ReconfigureVideoEncoder( << config_.rtp.payload_name; } } - codec_settings_set_ = config.encoder_specific_settings != NULL; + codec_settings_set_ = config.encoder_specific_settings != nullptr; encoder_config_ = std::move(config); ++num_encoder_reconfigurations_; } @@ -355,11 +392,11 @@ void FakeVideoSendStream::InjectVideoSinkWants( } FakeVideoReceiveStream::FakeVideoReceiveStream( - webrtc::VideoReceiveStream::Config config) + webrtc::VideoReceiveStreamInterface::Config config) : config_(std::move(config)), receiving_(false) {} -const webrtc::VideoReceiveStream::Config& FakeVideoReceiveStream::GetConfig() - const { +const webrtc::VideoReceiveStreamInterface::Config& +FakeVideoReceiveStream::GetConfig() const { return config_; } @@ -371,7 +408,8 @@ void FakeVideoReceiveStream::InjectFrame(const webrtc::VideoFrame& frame) { config_.renderer->OnFrame(frame); } -webrtc::VideoReceiveStream::Stats FakeVideoReceiveStream::GetStats() const { +webrtc::VideoReceiveStreamInterface::Stats FakeVideoReceiveStream::GetStats() + const { return stats_; } @@ -380,6 +418,11 @@ void FakeVideoReceiveStream::SetRtpExtensions( config_.rtp.extensions = std::move(extensions); } +webrtc::RtpHeaderExtensionMap FakeVideoReceiveStream::GetRtpExtensionMap() + const { + return webrtc::RtpHeaderExtensionMap(config_.rtp.extensions); +} + void FakeVideoReceiveStream::Start() { receiving_ = true; } @@ -389,44 +432,46 @@ void FakeVideoReceiveStream::Stop() { } void FakeVideoReceiveStream::SetStats( - const webrtc::VideoReceiveStream::Stats& stats) { + const webrtc::VideoReceiveStreamInterface::Stats& stats) { stats_ = stats; } FakeFlexfecReceiveStream::FakeFlexfecReceiveStream( - const webrtc::FlexfecReceiveStream::Config& config) - : config_(config) {} + const webrtc::FlexfecReceiveStream::Config config) + : config_(std::move(config)) {} void FakeFlexfecReceiveStream::SetRtpExtensions( std::vector extensions) { config_.rtp.extensions = std::move(extensions); } +webrtc::RtpHeaderExtensionMap FakeFlexfecReceiveStream::GetRtpExtensionMap() + const { + return webrtc::RtpHeaderExtensionMap(config_.rtp.extensions); +} + const webrtc::FlexfecReceiveStream::Config& FakeFlexfecReceiveStream::GetConfig() const { return config_; } -// TODO(brandtr): Implement when the stats have been designed. -webrtc::FlexfecReceiveStream::Stats FakeFlexfecReceiveStream::GetStats() const { - return webrtc::FlexfecReceiveStream::Stats(); -} - void FakeFlexfecReceiveStream::OnRtpPacket(const webrtc::RtpPacketReceived&) { RTC_DCHECK_NOTREACHED() << "Not implemented."; } -FakeCall::FakeCall() - : FakeCall(rtc::Thread::Current(), rtc::Thread::Current()) {} +FakeCall::FakeCall(webrtc::test::ScopedKeyValueConfig* field_trials) + : FakeCall(rtc::Thread::Current(), rtc::Thread::Current(), field_trials) {} FakeCall::FakeCall(webrtc::TaskQueueBase* worker_thread, - webrtc::TaskQueueBase* network_thread) + webrtc::TaskQueueBase* network_thread, + webrtc::test::ScopedKeyValueConfig* field_trials) : network_thread_(network_thread), worker_thread_(worker_thread), audio_network_state_(webrtc::kNetworkUp), video_network_state_(webrtc::kNetworkUp), num_created_send_streams_(0), - num_created_receive_streams_(0) {} + num_created_receive_streams_(0), + trials_(field_trials ? field_trials : &fallback_trials_) {} FakeCall::~FakeCall() { EXPECT_EQ(0u, video_send_streams_.size()); @@ -521,8 +566,8 @@ void FakeCall::DestroyAudioSendStream(webrtc::AudioSendStream* send_stream) { } } -webrtc::AudioReceiveStream* FakeCall::CreateAudioReceiveStream( - const webrtc::AudioReceiveStream::Config& config) { +webrtc::AudioReceiveStreamInterface* FakeCall::CreateAudioReceiveStream( + const webrtc::AudioReceiveStreamInterface::Config& config) { audio_receive_streams_.push_back( new FakeAudioReceiveStream(next_stream_id_++, config)); ++num_created_receive_streams_; @@ -530,7 +575,7 @@ webrtc::AudioReceiveStream* FakeCall::CreateAudioReceiveStream( } void FakeCall::DestroyAudioReceiveStream( - webrtc::AudioReceiveStream* receive_stream) { + webrtc::AudioReceiveStreamInterface* receive_stream) { auto it = absl::c_find(audio_receive_streams_, static_cast(receive_stream)); if (it == audio_receive_streams_.end()) { @@ -562,8 +607,8 @@ void FakeCall::DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) { } } -webrtc::VideoReceiveStream* FakeCall::CreateVideoReceiveStream( - webrtc::VideoReceiveStream::Config config) { +webrtc::VideoReceiveStreamInterface* FakeCall::CreateVideoReceiveStream( + webrtc::VideoReceiveStreamInterface::Config config) { video_receive_streams_.push_back( new FakeVideoReceiveStream(std::move(config))); ++num_created_receive_streams_; @@ -571,7 +616,7 @@ webrtc::VideoReceiveStream* FakeCall::CreateVideoReceiveStream( } void FakeCall::DestroyVideoReceiveStream( - webrtc::VideoReceiveStream* receive_stream) { + webrtc::VideoReceiveStreamInterface* receive_stream) { auto it = absl::c_find(video_receive_streams_, static_cast(receive_stream)); if (it == video_receive_streams_.end()) { @@ -583,8 +628,9 @@ void FakeCall::DestroyVideoReceiveStream( } webrtc::FlexfecReceiveStream* FakeCall::CreateFlexfecReceiveStream( - const webrtc::FlexfecReceiveStream::Config& config) { - FakeFlexfecReceiveStream* fake_stream = new FakeFlexfecReceiveStream(config); + const webrtc::FlexfecReceiveStream::Config config) { + FakeFlexfecReceiveStream* fake_stream = + new FakeFlexfecReceiveStream(std::move(config)); flexfec_receive_streams_.push_back(fake_stream); ++num_created_receive_streams_; return fake_stream; @@ -686,14 +732,26 @@ void FakeCall::SignalChannelNetworkState(webrtc::MediaType media, void FakeCall::OnAudioTransportOverheadChanged( int transport_overhead_per_packet) {} -void FakeCall::OnLocalSsrcUpdated(webrtc::AudioReceiveStream& stream, +void FakeCall::OnLocalSsrcUpdated(webrtc::AudioReceiveStreamInterface& stream, uint32_t local_ssrc) { auto& fake_stream = static_cast(stream); fake_stream.SetLocalSsrc(local_ssrc); } -void FakeCall::OnUpdateSyncGroup(webrtc::AudioReceiveStream& stream, - const std::string& sync_group) { +void FakeCall::OnLocalSsrcUpdated(webrtc::VideoReceiveStreamInterface& stream, + uint32_t local_ssrc) { + auto& fake_stream = static_cast(stream); + fake_stream.SetLocalSsrc(local_ssrc); +} + +void FakeCall::OnLocalSsrcUpdated(webrtc::FlexfecReceiveStream& stream, + uint32_t local_ssrc) { + auto& fake_stream = static_cast(stream); + fake_stream.SetLocalSsrc(local_ssrc); +} + +void FakeCall::OnUpdateSyncGroup(webrtc::AudioReceiveStreamInterface& stream, + absl::string_view sync_group) { auto& fake_stream = static_cast(stream); fake_stream.SetSyncGroup(sync_group); } diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/fake_webrtc_call.h b/TMessagesProj/jni/voip/webrtc/media/engine/fake_webrtc_call.h index e732379cbd..65ee0d5b17 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/fake_webrtc_call.h +++ b/TMessagesProj/jni/voip/webrtc/media/engine/fake_webrtc_call.h @@ -13,9 +13,9 @@ // // webrtc::Call // webrtc::AudioSendStream -// webrtc::AudioReceiveStream +// webrtc::AudioReceiveStreamInterface // webrtc::VideoSendStream -// webrtc::VideoReceiveStream +// webrtc::VideoReceiveStreamInterface #ifndef MEDIA_ENGINE_FAKE_WEBRTC_CALL_H_ #define MEDIA_ENGINE_FAKE_WEBRTC_CALL_H_ @@ -23,8 +23,10 @@ #include #include #include +#include #include +#include "absl/strings/string_view.h" #include "api/transport/field_trial_based_config.h" #include "api/video/video_frame.h" #include "call/audio_receive_stream.h" @@ -36,6 +38,7 @@ #include "call/video_send_stream.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "rtc_base/buffer.h" +#include "test/scoped_key_value_config.h" namespace cricket { class FakeAudioSendStream final : public webrtc::AudioSendStream { @@ -81,15 +84,16 @@ class FakeAudioSendStream final : public webrtc::AudioSendStream { bool muted_ = false; }; -class FakeAudioReceiveStream final : public webrtc::AudioReceiveStream { +class FakeAudioReceiveStream final + : public webrtc::AudioReceiveStreamInterface { public: explicit FakeAudioReceiveStream( int id, - const webrtc::AudioReceiveStream::Config& config); + const webrtc::AudioReceiveStreamInterface::Config& config); int id() const { return id_; } - const webrtc::AudioReceiveStream::Config& GetConfig() const; - void SetStats(const webrtc::AudioReceiveStream::Stats& stats); + const webrtc::AudioReceiveStreamInterface::Config& GetConfig() const; + void SetStats(const webrtc::AudioReceiveStreamInterface::Stats& stats); int received_packets() const { return received_packets_; } bool VerifyLastPacket(const uint8_t* data, size_t length) const; const webrtc::AudioSinkInterface* sink() const { return sink_; } @@ -104,14 +108,15 @@ class FakeAudioReceiveStream final : public webrtc::AudioReceiveStream { config_.rtp.local_ssrc = local_ssrc; } - void SetSyncGroup(const std::string& sync_group) { - config_.sync_group = sync_group; + void SetSyncGroup(absl::string_view sync_group) { + config_.sync_group = std::string(sync_group); } - private: - const webrtc::ReceiveStream::RtpConfig& rtp_config() const override { - return config_.rtp; + bool transport_cc() const override { return config_.rtp.transport_cc; } + void SetTransportCc(bool transport_cc) override { + config_.rtp.transport_cc = transport_cc; } + uint32_t remote_ssrc() const override { return config_.rtp.remote_ssrc; } void Start() override { started_ = true; } void Stop() override { started_ = false; } bool IsRunning() const override { return started_; } @@ -120,14 +125,15 @@ class FakeAudioReceiveStream final : public webrtc::AudioReceiveStream { override; void SetDecoderMap( std::map decoder_map) override; - void SetUseTransportCcAndNackHistory(bool use_transport_cc, - int history_ms) override; + void SetNackHistory(int history_ms) override; void SetNonSenderRttMeasurement(bool enabled) override; void SetFrameDecryptor(rtc::scoped_refptr frame_decryptor) override; void SetRtpExtensions(std::vector extensions) override; + const std::vector& GetRtpExtensions() const override; + webrtc::RtpHeaderExtensionMap GetRtpExtensionMap() const override; - webrtc::AudioReceiveStream::Stats GetStats( + webrtc::AudioReceiveStreamInterface::Stats GetStats( bool get_and_clear_legacy_stats) const override; void SetSink(webrtc::AudioSinkInterface* sink) override; void SetGain(float gain) override; @@ -142,9 +148,10 @@ class FakeAudioReceiveStream final : public webrtc::AudioReceiveStream { return std::vector(); } + private: int id_ = -1; - webrtc::AudioReceiveStream::Config config_; - webrtc::AudioReceiveStream::Stats stats_; + webrtc::AudioReceiveStreamInterface::Config config_; + webrtc::AudioReceiveStreamInterface::Stats stats_; int received_packets_ = 0; webrtc::AudioSinkInterface* sink_ = nullptr; float gain_ = 1.0f; @@ -187,14 +194,14 @@ class FakeVideoSendStream final rtc::VideoSourceInterface* source() const { return source_; } + void GenerateKeyFrame() override {} private: // rtc::VideoSinkInterface implementation. void OnFrame(const webrtc::VideoFrame& frame) override; // webrtc::VideoSendStream implementation. - void UpdateActiveSimulcastLayers( - const std::vector active_layers) override; + void UpdateActiveSimulcastLayers(std::vector active_layers) override; void Start() override; void Stop() override; bool started() override { return IsSending(); } @@ -229,17 +236,19 @@ class FakeVideoSendStream final int num_encoder_reconfigurations_ = 0; }; -class FakeVideoReceiveStream final : public webrtc::VideoReceiveStream { +class FakeVideoReceiveStream final + : public webrtc::VideoReceiveStreamInterface { public: - explicit FakeVideoReceiveStream(webrtc::VideoReceiveStream::Config config); + explicit FakeVideoReceiveStream( + webrtc::VideoReceiveStreamInterface::Config config); - const webrtc::VideoReceiveStream::Config& GetConfig() const; + const webrtc::VideoReceiveStreamInterface::Config& GetConfig() const; bool IsReceiving() const; void InjectFrame(const webrtc::VideoFrame& frame); - void SetStats(const webrtc::VideoReceiveStream::Stats& stats); + void SetStats(const webrtc::VideoReceiveStreamInterface::Stats& stats); std::vector GetSources() const override { return std::vector(); @@ -249,6 +258,10 @@ class FakeVideoReceiveStream final : public webrtc::VideoReceiveStream { return base_mininum_playout_delay_ms_; } + void SetLocalSsrc(uint32_t local_ssrc) { + config_.rtp.local_ssrc = local_ssrc; + } + void SetFrameDecryptor(rtc::scoped_refptr frame_decryptor) override {} @@ -262,18 +275,49 @@ class FakeVideoReceiveStream final : public webrtc::VideoReceiveStream { } void GenerateKeyFrame() override {} - private: - // webrtc::VideoReceiveStream implementation. + // webrtc::VideoReceiveStreamInterface implementation. void SetRtpExtensions(std::vector extensions) override; + webrtc::RtpHeaderExtensionMap GetRtpExtensionMap() const override; + bool transport_cc() const override { return config_.rtp.transport_cc; } + void SetTransportCc(bool transport_cc) override { + config_.rtp.transport_cc = transport_cc; + } + void SetRtcpMode(webrtc::RtcpMode mode) override { + config_.rtp.rtcp_mode = mode; + } + + void SetFlexFecProtection(webrtc::RtpPacketSinkInterface* sink) override { + config_.rtp.packet_sink_ = sink; + config_.rtp.protected_by_flexfec = (sink != nullptr); + } + + void SetLossNotificationEnabled(bool enabled) override { + config_.rtp.lntf.enabled = enabled; + } + + void SetNackHistory(webrtc::TimeDelta history) override { + config_.rtp.nack.rtp_history_ms = history.ms(); + } + + void SetProtectionPayloadTypes(int red_payload_type, + int ulpfec_payload_type) override { + config_.rtp.red_payload_type = red_payload_type; + config_.rtp.ulpfec_payload_type = ulpfec_payload_type; + } + + void SetRtcpXr(Config::Rtp::RtcpXr rtcp_xr) override { + config_.rtp.rtcp_xr = rtcp_xr; + } - const webrtc::ReceiveStream::RtpConfig& rtp_config() const override { - return config_.rtp; + void SetAssociatedPayloadTypes(std::map associated_payload_types) { + config_.rtp.rtx_associated_payload_types = + std::move(associated_payload_types); } void Start() override; void Stop() override; - webrtc::VideoReceiveStream::Stats GetStats() const override; + webrtc::VideoReceiveStreamInterface::Stats GetStats() const override; bool SetBaseMinimumPlayoutDelayMs(int delay_ms) override { base_mininum_playout_delay_ms_ = delay_ms; @@ -284,9 +328,10 @@ class FakeVideoReceiveStream final : public webrtc::VideoReceiveStream { return base_mininum_playout_delay_ms_; } - webrtc::VideoReceiveStream::Config config_; + private: + webrtc::VideoReceiveStreamInterface::Config config_; bool receiving_; - webrtc::VideoReceiveStream::Stats stats_; + webrtc::VideoReceiveStreamInterface::Stats stats_; int base_mininum_playout_delay_ms_ = 0; }; @@ -294,19 +339,30 @@ class FakeVideoReceiveStream final : public webrtc::VideoReceiveStream { class FakeFlexfecReceiveStream final : public webrtc::FlexfecReceiveStream { public: explicit FakeFlexfecReceiveStream( - const webrtc::FlexfecReceiveStream::Config& config); + const webrtc::FlexfecReceiveStream::Config config); + + void SetLocalSsrc(uint32_t local_ssrc) { + config_.rtp.local_ssrc = local_ssrc; + } void SetRtpExtensions(std::vector extensions) override; + webrtc::RtpHeaderExtensionMap GetRtpExtensionMap() const override; + bool transport_cc() const override { return config_.rtp.transport_cc; } + void SetTransportCc(bool transport_cc) override { + config_.rtp.transport_cc = transport_cc; + } + void SetRtcpMode(webrtc::RtcpMode mode) override { config_.rtcp_mode = mode; } - const webrtc::ReceiveStream::RtpConfig& rtp_config() const override { - return config_.rtp; + int payload_type() const override { return config_.payload_type; } + void SetPayloadType(int payload_type) override { + config_.payload_type = payload_type; } const webrtc::FlexfecReceiveStream::Config& GetConfig() const; - private: - webrtc::FlexfecReceiveStream::Stats GetStats() const override; + uint32_t remote_ssrc() const { return config_.rtp.remote_ssrc; } + private: void OnRtpPacket(const webrtc::RtpPacketReceived& packet) override; webrtc::FlexfecReceiveStream::Config config_; @@ -314,9 +370,10 @@ class FakeFlexfecReceiveStream final : public webrtc::FlexfecReceiveStream { class FakeCall final : public webrtc::Call, public webrtc::PacketReceiver { public: - FakeCall(); + explicit FakeCall(webrtc::test::ScopedKeyValueConfig* field_trials = nullptr); FakeCall(webrtc::TaskQueueBase* worker_thread, - webrtc::TaskQueueBase* network_thread); + webrtc::TaskQueueBase* network_thread, + webrtc::test::ScopedKeyValueConfig* field_trials = nullptr); ~FakeCall() override; webrtc::MockRtpTransportControllerSend* GetMockTransportControllerSend() { @@ -354,28 +411,35 @@ class FakeCall final : public webrtc::Call, public webrtc::PacketReceiver { void SetClientBitratePreferences( const webrtc::BitrateSettings& preferences) override {} + void SetFieldTrial(const std::string& field_trial_string) { + trials_overrides_ = std::make_unique( + *trials_, field_trial_string); + } + + const webrtc::FieldTrialsView& trials() const override { return *trials_; } + private: webrtc::AudioSendStream* CreateAudioSendStream( const webrtc::AudioSendStream::Config& config) override; void DestroyAudioSendStream(webrtc::AudioSendStream* send_stream) override; - webrtc::AudioReceiveStream* CreateAudioReceiveStream( - const webrtc::AudioReceiveStream::Config& config) override; + webrtc::AudioReceiveStreamInterface* CreateAudioReceiveStream( + const webrtc::AudioReceiveStreamInterface::Config& config) override; void DestroyAudioReceiveStream( - webrtc::AudioReceiveStream* receive_stream) override; + webrtc::AudioReceiveStreamInterface* receive_stream) override; webrtc::VideoSendStream* CreateVideoSendStream( webrtc::VideoSendStream::Config config, webrtc::VideoEncoderConfig encoder_config) override; void DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) override; - webrtc::VideoReceiveStream* CreateVideoReceiveStream( - webrtc::VideoReceiveStream::Config config) override; + webrtc::VideoReceiveStreamInterface* CreateVideoReceiveStream( + webrtc::VideoReceiveStreamInterface::Config config) override; void DestroyVideoReceiveStream( - webrtc::VideoReceiveStream* receive_stream) override; + webrtc::VideoReceiveStreamInterface* receive_stream) override; webrtc::FlexfecReceiveStream* CreateFlexfecReceiveStream( - const webrtc::FlexfecReceiveStream::Config& config) override; + const webrtc::FlexfecReceiveStream::Config config) override; void DestroyFlexfecReceiveStream( webrtc::FlexfecReceiveStream* receive_stream) override; @@ -395,10 +459,6 @@ class FakeCall final : public webrtc::Call, public webrtc::PacketReceiver { webrtc::Call::Stats GetStats() const override; - const webrtc::WebRtcKeyValueConfig& trials() const override { - return trials_; - } - webrtc::TaskQueueBase* network_thread() const override; webrtc::TaskQueueBase* worker_thread() const override; @@ -406,10 +466,14 @@ class FakeCall final : public webrtc::Call, public webrtc::PacketReceiver { webrtc::NetworkState state) override; void OnAudioTransportOverheadChanged( int transport_overhead_per_packet) override; - void OnLocalSsrcUpdated(webrtc::AudioReceiveStream& stream, + void OnLocalSsrcUpdated(webrtc::AudioReceiveStreamInterface& stream, + uint32_t local_ssrc) override; + void OnLocalSsrcUpdated(webrtc::VideoReceiveStreamInterface& stream, uint32_t local_ssrc) override; - void OnUpdateSyncGroup(webrtc::AudioReceiveStream& stream, - const std::string& sync_group) override; + void OnLocalSsrcUpdated(webrtc::FlexfecReceiveStream& stream, + uint32_t local_ssrc) override; + void OnUpdateSyncGroup(webrtc::AudioReceiveStreamInterface& stream, + absl::string_view sync_group) override; void OnSentPacket(const rtc::SentPacket& sent_packet) override; webrtc::TaskQueueBase* const network_thread_; @@ -433,7 +497,16 @@ class FakeCall final : public webrtc::Call, public webrtc::PacketReceiver { int num_created_send_streams_; int num_created_receive_streams_; - webrtc::FieldTrialBasedConfig trials_; + + // The field trials that are in use, either supplied by caller + // or pointer to &fallback_trials_. + webrtc::test::ScopedKeyValueConfig* trials_; + + // fallback_trials_ is used if caller does not provide any field trials. + webrtc::test::ScopedKeyValueConfig fallback_trials_; + + // An extra field trial that can be set using SetFieldTrial. + std::unique_ptr trials_overrides_; }; } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/fake_webrtc_video_engine.cc b/TMessagesProj/jni/voip/webrtc/media/engine/fake_webrtc_video_engine.cc index 7383c26399..4a4ef2335e 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/fake_webrtc_video_engine.cc +++ b/TMessagesProj/jni/voip/webrtc/media/engine/fake_webrtc_video_engine.cc @@ -24,7 +24,8 @@ namespace cricket { namespace { -static const int kEventTimeoutMs = 10000; +static constexpr webrtc::TimeDelta kEventTimeout = + webrtc::TimeDelta::Seconds(10); } // namespace @@ -171,7 +172,7 @@ webrtc::VideoEncoder::EncoderInfo FakeWebRtcVideoEncoder::GetEncoderInfo() } bool FakeWebRtcVideoEncoder::WaitForInitEncode() { - return init_encode_event_.Wait(kEventTimeoutMs); + return init_encode_event_.Wait(kEventTimeout); } webrtc::VideoCodec FakeWebRtcVideoEncoder::GetCodecSettings() { @@ -228,12 +229,13 @@ FakeWebRtcVideoEncoderFactory::CreateVideoEncoder( bool FakeWebRtcVideoEncoderFactory::WaitForCreatedVideoEncoders( int num_encoders) { int64_t start_offset_ms = rtc::TimeMillis(); - int64_t wait_time = kEventTimeoutMs; + int64_t wait_time = kEventTimeout.ms(); do { if (GetNumCreatedEncoders() >= num_encoders) return true; - wait_time = kEventTimeoutMs - (rtc::TimeMillis() - start_offset_ms); - } while (wait_time > 0 && created_video_encoder_event_.Wait(wait_time)); + wait_time = kEventTimeout.ms() - (rtc::TimeMillis() - start_offset_ms); + } while (wait_time > 0 && created_video_encoder_event_.Wait( + webrtc::TimeDelta::Millis(wait_time))); return false; } diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/internal_decoder_factory.cc b/TMessagesProj/jni/voip/webrtc/media/engine/internal_decoder_factory.cc index faac91e70f..24d54aa83b 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/internal_decoder_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/media/engine/internal_decoder_factory.cc @@ -11,11 +11,11 @@ #include "media/engine/internal_decoder_factory.h" #include "absl/strings/match.h" +#include "api/video_codecs/av1_profile.h" #include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_codec.h" #include "media/base/codec.h" #include "media/base/media_constants.h" -#include "modules/video_coding/codecs/av1/libaom_av1_decoder.h" #include "modules/video_coding/codecs/h264/include/h264.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" @@ -24,7 +24,7 @@ #include "system_wrappers/include/field_trial.h" #if defined(RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY) -#include "modules/video_coding/codecs/av1/dav1d_decoder.h" +#include "modules/video_coding/codecs/av1/dav1d_decoder.h" // nogncheck #endif namespace webrtc { @@ -47,12 +47,14 @@ std::vector InternalDecoderFactory::GetSupportedFormats() formats.push_back(SdpVideoFormat(cricket::kVp8CodecName)); for (const SdpVideoFormat& format : SupportedVP9DecoderCodecs()) formats.push_back(format); - for (const SdpVideoFormat& h264_format : SupportedH264Codecs()) + for (const SdpVideoFormat& h264_format : SupportedH264DecoderCodecs()) formats.push_back(h264_format); - if (kIsLibaomAv1DecoderSupported || - (kDav1dIsIncluded && field_trial::IsEnabled(kDav1dFieldTrial))) { + if (kDav1dIsIncluded && !field_trial::IsDisabled(kDav1dFieldTrial)) { formats.push_back(SdpVideoFormat(cricket::kAv1CodecName)); + formats.push_back(SdpVideoFormat( + cricket::kAv1CodecName, + {{kAV1FmtpProfile, AV1ProfileToString(AV1Profile::kProfile1).data()}})); } return formats; @@ -92,15 +94,10 @@ std::unique_ptr InternalDecoderFactory::CreateVideoDecoder( return H264Decoder::Create(); if (absl::EqualsIgnoreCase(format.name, cricket::kAv1CodecName) && - kDav1dIsIncluded && field_trial::IsEnabled(kDav1dFieldTrial)) { + kDav1dIsIncluded && !field_trial::IsDisabled(kDav1dFieldTrial)) { return CreateDav1dDecoder(); } - if (absl::EqualsIgnoreCase(format.name, cricket::kAv1CodecName) && - kIsLibaomAv1DecoderSupported) { - return CreateLibaomAv1Decoder(); - } - RTC_DCHECK_NOTREACHED(); return nullptr; } diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/internal_encoder_factory.cc b/TMessagesProj/jni/voip/webrtc/media/engine/internal_encoder_factory.cc index bb550662f9..7b5fc24e0a 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/internal_encoder_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/media/engine/internal_encoder_factory.cc @@ -10,85 +10,57 @@ #include "media/engine/internal_encoder_factory.h" +#include #include +#include #include "absl/strings/match.h" -#include "api/video_codecs/sdp_video_format.h" -#include "media/base/codec.h" -#include "media/base/media_constants.h" -#include "modules/video_coding/codecs/av1/libaom_av1_encoder.h" -#include "modules/video_coding/codecs/h264/include/h264.h" -#include "modules/video_coding/codecs/vp8/include/vp8.h" -#include "modules/video_coding/codecs/vp9/include/vp9.h" -#include "rtc_base/logging.h" +#include "api/video_codecs/video_encoder_factory.h" +#include "api/video_codecs/video_encoder_factory_template.h" +#if defined(RTC_USE_LIBAOM_AV1_ENCODER) +#include "api/video_codecs/video_encoder_factory_template_libaom_av1_adapter.h" // nogncheck +#endif +#include "api/video_codecs/video_encoder_factory_template_libvpx_vp8_adapter.h" +#include "api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h" +#if defined(WEBRTC_USE_H264) +#include "api/video_codecs/video_encoder_factory_template_open_h264_adapter.h" // nogncheck +#endif namespace webrtc { +namespace { -std::vector InternalEncoderFactory::SupportedFormats() { - std::vector supported_codecs; - supported_codecs.push_back(SdpVideoFormat(cricket::kVp8CodecName)); - for (const webrtc::SdpVideoFormat& format : webrtc::SupportedVP9Codecs()) - supported_codecs.push_back(format); - for (const webrtc::SdpVideoFormat& format : webrtc::SupportedH264Codecs()) - supported_codecs.push_back(format); - if (kIsLibaomAv1EncoderSupported) - supported_codecs.push_back(SdpVideoFormat(cricket::kAv1CodecName)); - return supported_codecs; -} +using Factory = + VideoEncoderFactoryTemplate; +} // namespace std::vector InternalEncoderFactory::GetSupportedFormats() const { - return SupportedFormats(); + return Factory().GetSupportedFormats(); } std::unique_ptr InternalEncoderFactory::CreateVideoEncoder( const SdpVideoFormat& format) { - if (absl::EqualsIgnoreCase(format.name, cricket::kVp8CodecName)) - return VP8Encoder::Create(); - if (absl::EqualsIgnoreCase(format.name, cricket::kVp9CodecName)) - return VP9Encoder::Create(cricket::VideoCodec(format)); - if (absl::EqualsIgnoreCase(format.name, cricket::kH264CodecName)) - return H264Encoder::Create(cricket::VideoCodec(format)); - if (kIsLibaomAv1EncoderSupported && - absl::EqualsIgnoreCase(format.name, cricket::kAv1CodecName)) - return CreateLibaomAv1Encoder(); - RTC_LOG(LS_ERROR) << "Trying to created encoder of unsupported format " - << format.name; - return nullptr; + auto original_format = + FuzzyMatchSdpVideoFormat(Factory().GetSupportedFormats(), format); + return original_format ? Factory().CreateVideoEncoder(*original_format) + : nullptr; } VideoEncoderFactory::CodecSupport InternalEncoderFactory::QueryCodecSupport( const SdpVideoFormat& format, absl::optional scalability_mode) const { - // Query for supported formats and check if the specified format is supported. - // Begin with filtering out unsupported scalability modes. - if (scalability_mode) { - bool scalability_mode_supported = false; - if (absl::EqualsIgnoreCase(format.name, cricket::kVp8CodecName)) { - scalability_mode_supported = - VP8Encoder::SupportsScalabilityMode(*scalability_mode); - } else if (absl::EqualsIgnoreCase(format.name, cricket::kVp9CodecName)) { - scalability_mode_supported = - VP9Encoder::SupportsScalabilityMode(*scalability_mode); - } else if (absl::EqualsIgnoreCase(format.name, cricket::kH264CodecName)) { - scalability_mode_supported = - H264Encoder::SupportsScalabilityMode(*scalability_mode); - } else if (kIsLibaomAv1EncoderSupported && - absl::EqualsIgnoreCase(format.name, cricket::kAv1CodecName)) { - scalability_mode_supported = - LibaomAv1EncoderSupportsScalabilityMode(*scalability_mode); - } - - static constexpr VideoEncoderFactory::CodecSupport kUnsupported = { - /*is_supported=*/false, /*is_power_efficient=*/false}; - if (!scalability_mode_supported) { - return kUnsupported; - } - } - - CodecSupport codec_support; - codec_support.is_supported = format.IsCodecInList(GetSupportedFormats()); - return codec_support; + auto original_format = + FuzzyMatchSdpVideoFormat(Factory().GetSupportedFormats(), format); + return original_format + ? Factory().QueryCodecSupport(*original_format, scalability_mode) + : VideoEncoderFactory::CodecSupport{.is_supported = false}; } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/internal_encoder_factory.h b/TMessagesProj/jni/voip/webrtc/media/engine/internal_encoder_factory.h index e12810cd30..25480d088f 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/internal_encoder_factory.h +++ b/TMessagesProj/jni/voip/webrtc/media/engine/internal_encoder_factory.h @@ -15,17 +15,12 @@ #include #include -#include "absl/types/optional.h" -#include "api/video_codecs/sdp_video_format.h" -#include "api/video_codecs/video_encoder.h" #include "api/video_codecs/video_encoder_factory.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { - class RTC_EXPORT InternalEncoderFactory : public VideoEncoderFactory { public: - static std::vector SupportedFormats(); std::vector GetSupportedFormats() const override; CodecSupport QueryCodecSupport( const SdpVideoFormat& format, diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/multiplex_codec_factory.cc b/TMessagesProj/jni/voip/webrtc/media/engine/multiplex_codec_factory.cc index fb296811db..660c3594bc 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/multiplex_codec_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/media/engine/multiplex_codec_factory.cc @@ -82,16 +82,17 @@ MultiplexDecoderFactory::MultiplexDecoderFactory( std::vector MultiplexDecoderFactory::GetSupportedFormats() const { std::vector formats = factory_->GetSupportedFormats(); + std::vector augmented_formats = formats; for (const auto& format : formats) { if (absl::EqualsIgnoreCase(format.name, kMultiplexAssociatedCodecName)) { SdpVideoFormat multiplex_format = format; multiplex_format.parameters[cricket::kCodecParamAssociatedCodecName] = format.name; multiplex_format.name = cricket::kMultiplexCodecName; - formats.push_back(multiplex_format); + augmented_formats.push_back(multiplex_format); } } - return formats; + return augmented_formats; } std::unique_ptr MultiplexDecoderFactory::CreateVideoDecoder( diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/null_webrtc_video_engine.h b/TMessagesProj/jni/voip/webrtc/media/engine/null_webrtc_video_engine.h index a914af954b..ede0d1b52b 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/null_webrtc_video_engine.h +++ b/TMessagesProj/jni/voip/webrtc/media/engine/null_webrtc_video_engine.h @@ -30,6 +30,13 @@ class VideoMediaChannel; // CompositeMediaEngine. class NullWebRtcVideoEngine : public VideoEngineInterface { public: + std::vector send_codecs(bool) const override { + return std::vector(); + } + + std::vector recv_codecs(bool) const override { + return std::vector(); + } std::vector send_codecs() const override { return std::vector(); } diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/simulcast_encoder_adapter.cc b/TMessagesProj/jni/voip/webrtc/media/engine/simulcast_encoder_adapter.cc index 9143361e82..e7f6205ab6 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/simulcast_encoder_adapter.cc +++ b/TMessagesProj/jni/voip/webrtc/media/engine/simulcast_encoder_adapter.cc @@ -30,7 +30,6 @@ #include "media/base/video_common.h" #include "modules/video_coding/include/video_error_codes.h" #include "modules/video_coding/utility/simulcast_rate_allocator.h" -#include "rtc_base/atomic_ops.h" #include "rtc_base/checks.h" #include "rtc_base/experiments/rate_control_settings.h" #include "rtc_base/logging.h" @@ -108,14 +107,14 @@ int VerifyCodec(const webrtc::VideoCodec* inst) { return WEBRTC_VIDEO_CODEC_OK; } -bool StreamQualityCompare(const webrtc::SpatialLayer& a, - const webrtc::SpatialLayer& b) { +bool StreamQualityCompare(const webrtc::SimulcastStream& a, + const webrtc::SimulcastStream& b) { return std::tie(a.height, a.width, a.maxBitrate, a.maxFramerate) < std::tie(b.height, b.width, b.maxBitrate, b.maxFramerate); } void GetLowestAndHighestQualityStreamIndixes( - rtc::ArrayView streams, + rtc::ArrayView streams, int* lowest_quality_stream_idx, int* highest_quality_stream_idx) { const auto lowest_highest_quality_streams = @@ -294,7 +293,7 @@ int SimulcastEncoderAdapter::Release() { // It's legal to move the encoder to another queue now. encoder_queue_.Detach(); - rtc::AtomicOps::ReleaseStore(&inited_, 0); + inited_.store(0); return WEBRTC_VIDEO_CODEC_OK; } @@ -328,8 +327,8 @@ int SimulcastEncoderAdapter::InitEncode( int highest_quality_stream_idx = 0; if (!is_legacy_singlecast) { GetLowestAndHighestQualityStreamIndixes( - rtc::ArrayView(codec_.simulcastStream, - total_streams_count_), + rtc::ArrayView(codec_.simulcastStream, + total_streams_count_), &lowest_quality_stream_idx, &highest_quality_stream_idx); } @@ -368,7 +367,7 @@ int SimulcastEncoderAdapter::InitEncode( bypass_mode_ = true; DestroyStoredEncoders(); - rtc::AtomicOps::ReleaseStore(&inited_, 1); + inited_.store(1); return WEBRTC_VIDEO_CODEC_OK; } @@ -424,7 +423,7 @@ int SimulcastEncoderAdapter::InitEncode( // To save memory, don't store encoders that we don't use. DestroyStoredEncoders(); - rtc::AtomicOps::ReleaseStore(&inited_, 1); + inited_.store(1); return WEBRTC_VIDEO_CODEC_OK; } @@ -678,7 +677,7 @@ void SimulcastEncoderAdapter::OnDroppedFrame(size_t stream_idx) { } bool SimulcastEncoderAdapter::Initialized() const { - return rtc::AtomicOps::AcquireLoad(&inited_) == 1; + return inited_.load() == 1; } void SimulcastEncoderAdapter::DestroyStoredEncoders() { @@ -762,7 +761,7 @@ webrtc::VideoCodec SimulcastEncoderAdapter::MakeStreamCodec( bool is_lowest_quality_stream, bool is_highest_quality_stream) { webrtc::VideoCodec codec_params = codec; - const SpatialLayer& stream_params = codec.simulcastStream[stream_idx]; + const SimulcastStream& stream_params = codec.simulcastStream[stream_idx]; codec_params.numberOfSimulcastStreams = 0; codec_params.width = stream_params.width; @@ -772,6 +771,7 @@ webrtc::VideoCodec SimulcastEncoderAdapter::MakeStreamCodec( codec_params.maxFramerate = stream_params.maxFramerate; codec_params.qpMax = stream_params.qpMax; codec_params.active = stream_params.active; + codec_params.SetScalabilityMode(stream_params.GetScalabilityMode()); // Settings that are based on stream/resolution. if (is_lowest_quality_stream) { // Settings for lowest spatial resolutions. @@ -791,8 +791,8 @@ webrtc::VideoCodec SimulcastEncoderAdapter::MakeStreamCodec( // kComplexityHigher, which maps to cpu_used = -4. int pixels_per_frame = codec_params.width * codec_params.height; if (pixels_per_frame < 352 * 288) { - codec_params.VP8()->complexity = - webrtc::VideoCodecComplexity::kComplexityHigher; + codec_params.SetVideoEncoderComplexity( + webrtc::VideoCodecComplexity::kComplexityHigher); } // Turn off denoising for all streams but the highest resolution. codec_params.VP8()->denoisingOn = false; @@ -824,7 +824,9 @@ void SimulcastEncoderAdapter::OverrideFromFieldTrial( info->apply_alignment_to_all_simulcast_layers || encoder_info_override_.apply_alignment_to_all_simulcast_layers(); } - if (!encoder_info_override_.resolution_bitrate_limits().empty()) { + // Override resolution bitrate limits unless they're set already. + if (info->resolution_bitrate_limits.empty() && + !encoder_info_override_.resolution_bitrate_limits().empty()) { info->resolution_bitrate_limits = encoder_info_override_.resolution_bitrate_limits(); } diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/simulcast_encoder_adapter.h b/TMessagesProj/jni/voip/webrtc/media/engine/simulcast_encoder_adapter.h index e6b6badbe5..ef8205e91a 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/simulcast_encoder_adapter.h +++ b/TMessagesProj/jni/voip/webrtc/media/engine/simulcast_encoder_adapter.h @@ -12,6 +12,7 @@ #ifndef MEDIA_ENGINE_SIMULCAST_ENCODER_ADAPTER_H_ #define MEDIA_ENGINE_SIMULCAST_ENCODER_ADAPTER_H_ +#include #include #include #include @@ -27,7 +28,6 @@ #include "api/video_codecs/video_encoder_factory.h" #include "common_video/framerate_controller.h" #include "modules/video_coding/include/video_codec_interface.h" -#include "rtc_base/atomic_ops.h" #include "rtc_base/experiments/encoder_info_settings.h" #include "rtc_base/system/no_unique_address.h" #include "rtc_base/system/rtc_export.h" @@ -167,7 +167,7 @@ class RTC_EXPORT SimulcastEncoderAdapter : public VideoEncoder { void OverrideFromFieldTrial(VideoEncoder::EncoderInfo* info) const; - volatile int inited_; // Accessed atomically. + std::atomic inited_; VideoEncoderFactory* const primary_encoder_factory_; VideoEncoderFactory* const fallback_encoder_factory_; const SdpVideoFormat video_format_; diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/unhandled_packets_buffer.cc b/TMessagesProj/jni/voip/webrtc/media/engine/unhandled_packets_buffer.cc index cb6f0ec335..563712bdf3 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/unhandled_packets_buffer.cc +++ b/TMessagesProj/jni/voip/webrtc/media/engine/unhandled_packets_buffer.cc @@ -46,7 +46,6 @@ void UnhandledPacketsBuffer::BackfillPackets( start = insert_pos_; } - size_t count = 0; std::vector remaining; remaining.reserve(kMaxStashedPackets); for (size_t i = 0; i < buffer_.size(); ++i) { @@ -56,7 +55,6 @@ void UnhandledPacketsBuffer::BackfillPackets( // scheme. const uint32_t ssrc = buffer_[pos].ssrc; if (absl::c_linear_search(ssrcs, ssrc)) { - ++count; consumer(ssrc, buffer_[pos].packet_time_us, buffer_[pos].packet); } else { remaining.push_back(buffer_[pos]); diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_media_engine.cc b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_media_engine.cc index f083b9c9ca..514e228780 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_media_engine.cc +++ b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_media_engine.cc @@ -10,13 +10,19 @@ #include "media/engine/webrtc_media_engine.h" +#include #include #include +#include #include #include "absl/algorithm/container.h" #include "absl/strings/match.h" +#include "api/transport/field_trial_based_config.h" +#include "media/base/media_constants.h" #include "media/engine/webrtc_voice_engine.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" #ifdef HAVE_WEBRTC_VIDEO #include "media/engine/webrtc_video_engine.h" @@ -30,12 +36,12 @@ std::unique_ptr CreateMediaEngine( MediaEngineDependencies dependencies) { // TODO(sprang): Make populating `dependencies.trials` mandatory and remove // these fallbacks. - std::unique_ptr fallback_trials( + std::unique_ptr fallback_trials( dependencies.trials ? nullptr : new webrtc::FieldTrialBasedConfig()); - const webrtc::WebRtcKeyValueConfig& trials = + const webrtc::FieldTrialsView& trials = dependencies.trials ? *dependencies.trials : *fallback_trials; auto audio_engine = std::make_unique( - dependencies.task_queue_factory, std::move(dependencies.adm), + dependencies.task_queue_factory, dependencies.adm.get(), std::move(dependencies.audio_encoder_factory), std::move(dependencies.audio_decoder_factory), std::move(dependencies.audio_mixer), @@ -137,7 +143,7 @@ std::vector FilterRtpExtensions( const std::vector& extensions, bool (*supported)(absl::string_view), bool filter_redundant_extensions, - const webrtc::WebRtcKeyValueConfig& trials) { + const webrtc::FieldTrialsView& trials) { // Don't check against old parameters; this should have been done earlier. RTC_DCHECK(ValidateRtpExtensions(extensions, {})); RTC_DCHECK(supported); diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_media_engine.h b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_media_engine.h index ff977609b2..e65824bd83 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_media_engine.h +++ b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_media_engine.h @@ -12,18 +12,19 @@ #define MEDIA_ENGINE_WEBRTC_MEDIA_ENGINE_H_ #include -#include #include +#include "absl/strings/string_view.h" +#include "api/array_view.h" #include "api/audio/audio_frame_processor.h" #include "api/audio/audio_mixer.h" #include "api/audio_codecs/audio_decoder_factory.h" #include "api/audio_codecs/audio_encoder_factory.h" +#include "api/field_trials_view.h" #include "api/rtp_parameters.h" +#include "api/scoped_refptr.h" #include "api/task_queue/task_queue_factory.h" #include "api/transport/bitrate_settings.h" -#include "api/transport/field_trial_based_config.h" -#include "api/transport/webrtc_key_value_config.h" #include "api/video_codecs/video_decoder_factory.h" #include "api/video_codecs/video_encoder_factory.h" #include "media/base/codec.h" @@ -53,7 +54,7 @@ struct MediaEngineDependencies { std::unique_ptr video_encoder_factory; std::unique_ptr video_decoder_factory; - const webrtc::WebRtcKeyValueConfig* trials = nullptr; + const webrtc::FieldTrialsView* trials = nullptr; }; // CreateMediaEngine may be called on any thread, though the engine is @@ -76,7 +77,7 @@ std::vector FilterRtpExtensions( const std::vector& extensions, bool (*supported)(absl::string_view), bool filter_redundant_extensions, - const webrtc::WebRtcKeyValueConfig& trials); + const webrtc::FieldTrialsView& trials); webrtc::BitrateConstraints GetBitrateConfigForCodec(const Codec& codec); diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_video_engine.cc b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_video_engine.cc index f9de3ffb45..483c23a50b 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_video_engine.cc +++ b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_video_engine.cc @@ -20,7 +20,6 @@ #include "absl/algorithm/container.h" #include "absl/strings/match.h" #include "api/media_stream_interface.h" -#include "api/units/data_rate.h" #include "api/video/video_codec_constants.h" #include "api/video/video_codec_type.h" #include "api/video_codecs/sdp_video_format.h" @@ -28,15 +27,14 @@ #include "api/video_codecs/video_encoder.h" #include "api/video_codecs/video_encoder_factory.h" #include "call/call.h" -#include "media/engine/simulcast.h" #include "media/engine/webrtc_media_engine.h" #include "media/engine/webrtc_voice_engine.h" #include "modules/rtp_rtcp/source/rtp_util.h" +#include "modules/video_coding/codecs/vp9/svc_config.h" +#include "modules/video_coding/svc/scalability_mode_util.h" #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/experiments/field_trial_units.h" -#include "rtc_base/experiments/min_video_bitrate_experiment.h" -#include "rtc_base/experiments/normalize_simulcast_size_experiment.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/strings/string_builder.h" @@ -50,25 +48,12 @@ namespace { using ::webrtc::ParseRtpPayloadType; using ::webrtc::ParseRtpSsrc; -const int kMinLayerSize = 16; constexpr int64_t kUnsignaledSsrcCooldownMs = rtc::kNumMillisecsPerSec / 2; // TODO(bugs.webrtc.org/13166): Remove AV1X when backwards compatibility is not // needed. constexpr char kAv1xCodecName[] = "AV1X"; -int ScaleDownResolution(int resolution, - double scale_down_by, - int min_resolution) { - // Resolution is never scalied down to smaller than min_resolution. - // If the input resolution is already smaller than min_resolution, - // no scaling should be done at all. - if (resolution <= min_resolution) - return resolution; - return std::max(static_cast(resolution / scale_down_by + 0.5), - min_resolution); -} - const char* StreamTypeToString( webrtc::VideoSendStream::StreamStats::StreamType type) { switch (type) { @@ -82,32 +67,16 @@ const char* StreamTypeToString( return nullptr; } -bool IsEnabled(const webrtc::WebRtcKeyValueConfig& trials, - absl::string_view name) { +bool IsEnabled(const webrtc::FieldTrialsView& trials, absl::string_view name) { return absl::StartsWith(trials.Lookup(name), "Enabled"); } -bool IsDisabled(const webrtc::WebRtcKeyValueConfig& trials, - absl::string_view name) { +bool IsDisabled(const webrtc::FieldTrialsView& trials, absl::string_view name) { return absl::StartsWith(trials.Lookup(name), "Disabled"); } -bool PowerOfTwo(int value) { - return (value > 0) && ((value & (value - 1)) == 0); -} - -bool IsScaleFactorsPowerOfTwo(const webrtc::VideoEncoderConfig& config) { - for (const auto& layer : config.simulcast_layers) { - double scale = std::max(layer.scale_resolution_down_by, 1.0); - if (std::round(scale) != scale || !PowerOfTwo(scale)) { - return false; - } - } - return true; -} - void AddDefaultFeedbackParams(VideoCodec* codec, - const webrtc::WebRtcKeyValueConfig& trials) { + const webrtc::FieldTrialsView& trials) { // Don't add any feedback params for RED and ULPFEC. if (codec->name == kRedCodecName || codec->name == kUlpfecCodecName) return; @@ -134,10 +103,25 @@ bool IsCodecValidForLowerRange(const VideoCodec& codec) { absl::EqualsIgnoreCase(codec.name, kAv1xCodecName)) { return true; } else if (absl::EqualsIgnoreCase(codec.name, kH264CodecName)) { - std::string profileLevelId; - // H264 with YUV444. - if (codec.GetParam(kH264FmtpProfileLevelId, &profileLevelId)) { - return absl::StartsWithIgnoreCase(profileLevelId, "f400"); + std::string profile_level_id; + std::string packetization_mode; + + if (codec.GetParam(kH264FmtpProfileLevelId, &profile_level_id)) { + if (absl::StartsWithIgnoreCase(profile_level_id, "4d00")) { + if (codec.GetParam(kH264FmtpPacketizationMode, &packetization_mode)) { + return packetization_mode == "0"; + } + } + // H264 with YUV444. + return absl::StartsWithIgnoreCase(profile_level_id, "f400"); + } + } else if (absl::EqualsIgnoreCase(codec.name, kVp9CodecName)) { + std::string profile_id; + + if (codec.GetParam(kVP9ProfileId, &profile_id)) { + if (profile_id.compare("1") == 0 || profile_id.compare("3") == 0) { + return true; + } } } return false; @@ -157,7 +141,8 @@ template std::vector GetPayloadTypesAndDefaultCodecs( const T* factory, bool is_decoder_factory, - const webrtc::WebRtcKeyValueConfig& trials) { + bool include_rtx, + const webrtc::FieldTrialsView& trials) { if (!factory) { return {}; } @@ -228,34 +213,31 @@ std::vector GetPayloadTypesAndDefaultCodecs( output_codecs.push_back(codec); // Add associated RTX codec for non-FEC codecs. - if (!isFecCodec) { - // Check if we ran out of payload types. - if (payload_type_lower > kLastDynamicPayloadTypeLowerRange) { - // TODO(https://bugs.chromium.org/p/webrtc/issues/detail?id=12248): - // return an error. - RTC_LOG(LS_ERROR) << "Out of dynamic payload types [35,63] after " - "fallback from [96, 127], skipping the rest."; - RTC_DCHECK_EQ(payload_type_upper, kLastDynamicPayloadTypeUpperRange); - break; - } - if (IsCodecValidForLowerRange(codec) || - payload_type_upper >= kLastDynamicPayloadTypeUpperRange) { - output_codecs.push_back( - VideoCodec::CreateRtxCodec(payload_type_lower++, codec.id)); - } else { - output_codecs.push_back( - VideoCodec::CreateRtxCodec(payload_type_upper++, codec.id)); + if (include_rtx) { + if (!isFecCodec) { + // Check if we ran out of payload types. + if (payload_type_lower > kLastDynamicPayloadTypeLowerRange) { + // TODO(https://bugs.chromium.org/p/webrtc/issues/detail?id=12248): + // return an error. + RTC_LOG(LS_ERROR) << "Out of dynamic payload types [35,63] after " + "fallback from [96, 127], skipping the rest."; + RTC_DCHECK_EQ(payload_type_upper, kLastDynamicPayloadTypeUpperRange); + break; + } + if (IsCodecValidForLowerRange(codec) || + payload_type_upper >= kLastDynamicPayloadTypeUpperRange) { + output_codecs.push_back( + VideoCodec::CreateRtxCodec(payload_type_lower++, codec.id)); + } else { + output_codecs.push_back( + VideoCodec::CreateRtxCodec(payload_type_upper++, codec.id)); + } } } } return output_codecs; } -bool IsTemporalLayersSupported(const std::string& codec_name) { - return absl::EqualsIgnoreCase(codec_name, kVp8CodecName) || - absl::EqualsIgnoreCase(codec_name, kVp9CodecName); -} - static std::string CodecVectorToString(const std::vector& codecs) { rtc::StringBuilder out; out << "{"; @@ -324,7 +306,7 @@ static bool ValidateStreamParams(const StreamParams& sp) { // Returns true if the given codec is disallowed from doing simulcast. bool IsCodecDisabledForSimulcast(const std::string& codec_name, - const webrtc::WebRtcKeyValueConfig& trials) { + const webrtc::FieldTrialsView& trials) { if (absl::EqualsIgnoreCase(codec_name, kVp9CodecName) || absl::EqualsIgnoreCase(codec_name, kAv1CodecName)) { return true; @@ -337,26 +319,6 @@ bool IsCodecDisabledForSimulcast(const std::string& codec_name, return false; } -// The selected thresholds for QVGA and VGA corresponded to a QP around 10. -// The change in QP declined above the selected bitrates. -static int GetMaxDefaultVideoBitrateKbps(int width, - int height, - bool is_screenshare) { - int max_bitrate; - if (width * height <= 320 * 240) { - max_bitrate = 600; - } else if (width * height <= 640 * 480) { - max_bitrate = 1700; - } else if (width * height <= 960 * 540) { - max_bitrate = 2000; - } else { - max_bitrate = 2500; - } - if (is_screenshare) - max_bitrate = std::max(max_bitrate, 1200); - return max_bitrate; -} - // Returns its smallest positive argument. If neither argument is positive, // returns an arbitrary nonpositive value. int MinPositive(int a, int b) { @@ -375,17 +337,6 @@ bool IsLayerActive(const webrtc::RtpEncodingParameters& layer) { (!layer.max_framerate || *layer.max_framerate > 0); } -size_t FindRequiredActiveLayers( - const webrtc::VideoEncoderConfig& encoder_config) { - // Need enough layers so that at least the first active one is present. - for (size_t i = 0; i < encoder_config.number_of_streams; ++i) { - if (encoder_config.simulcast_layers[i].active) { - return i + 1; - } - } - return 0; -} - int NumActiveStreams(const webrtc::RtpParameters& rtp_parameters) { int res = 0; for (size_t i = 0; i < rtp_parameters.encodings.size(); ++i) { @@ -396,6 +347,21 @@ int NumActiveStreams(const webrtc::RtpParameters& rtp_parameters) { return res; } +absl::optional NumSpatialLayersFromEncoding( + const webrtc::RtpParameters& rtp_parameters, + size_t idx) { + if (idx >= rtp_parameters.encodings.size()) + return absl::nullopt; + + absl::optional scalability_mode = + webrtc::ScalabilityModeFromString( + rtp_parameters.encodings[idx].scalability_mode.value_or("")); + return scalability_mode + ? absl::optional( + ScalabilityModeToNumSpatialLayers(*scalability_mode)) + : absl::nullopt; +} + std::map MergeInfoAboutOutboundRtpSubstreams( const std::map& @@ -479,7 +445,6 @@ WebRtcVideoChannel::WebRtcVideoSendStream::ConfigureVideoEncoderSettings( (parameters_.config.rtp.ssrcs.size() == 1 || NumActiveStreams(rtp_parameters_) == 1); - bool frame_dropping = !is_screencast; bool denoising; bool codec_default_denoising = false; if (is_screencast) { @@ -491,11 +456,7 @@ WebRtcVideoChannel::WebRtcVideoSendStream::ConfigureVideoEncoderSettings( } if (absl::EqualsIgnoreCase(codec.name, kH264CodecName)) { - webrtc::VideoCodecH264 h264_settings = - webrtc::VideoEncoder::GetDefaultH264Settings(); - h264_settings.frameDroppingOn = frame_dropping; - return rtc::make_ref_counted< - webrtc::VideoEncoderConfig::H264EncoderSpecificSettings>(h264_settings); + return nullptr; } if (absl::EqualsIgnoreCase(codec.name, kVp8CodecName)) { webrtc::VideoCodecVP8 vp8_settings = @@ -503,7 +464,6 @@ WebRtcVideoChannel::WebRtcVideoSendStream::ConfigureVideoEncoderSettings( vp8_settings.automaticResizeOn = automatic_resize; // VP8 denoising is enabled by default. vp8_settings.denoisingOn = codec_default_denoising ? true : denoising; - vp8_settings.frameDroppingOn = frame_dropping; return rtc::make_ref_counted< webrtc::VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings); } @@ -521,19 +481,25 @@ WebRtcVideoChannel::WebRtcVideoSendStream::ConfigureVideoEncoderSettings( // VP9 denoising is disabled by default. vp9_settings.denoisingOn = codec_default_denoising ? true : denoising; - vp9_settings.automaticResizeOn = automatic_resize; - // Ensure frame dropping is always enabled. - RTC_DCHECK(vp9_settings.frameDroppingOn); + // Disable automatic resize if more than one spatial layer is requested. + bool vp9_automatic_resize = automatic_resize; + absl::optional num_spatial_layers = + NumSpatialLayersFromEncoding(rtp_parameters_, /*idx=*/0); + if (num_spatial_layers && *num_spatial_layers > 1) { + vp9_automatic_resize = false; + } + vp9_settings.automaticResizeOn = vp9_automatic_resize; if (!is_screencast) { - webrtc::FieldTrialFlag interlayer_pred_experiment_enabled = - webrtc::FieldTrialFlag("Enabled"); + webrtc::FieldTrialFlag interlayer_pred_experiment_enabled("Enabled"); webrtc::FieldTrialEnum inter_layer_pred_mode( "inter_layer_pred_mode", webrtc::InterLayerPredMode::kOnKeyPic, {{"off", webrtc::InterLayerPredMode::kOff}, {"on", webrtc::InterLayerPredMode::kOn}, {"onkeypic", webrtc::InterLayerPredMode::kOnKeyPic}}); + webrtc::FieldTrialFlag force_flexible_mode("FlexibleMode"); webrtc::ParseFieldTrial( - {&interlayer_pred_experiment_enabled, &inter_layer_pred_mode}, + {&interlayer_pred_experiment_enabled, &inter_layer_pred_mode, + &force_flexible_mode}, call_->trials().Lookup("WebRTC-Vp9InterLayerPred")); if (interlayer_pred_experiment_enabled) { vp9_settings.interLayerPred = inter_layer_pred_mode; @@ -541,6 +507,7 @@ WebRtcVideoChannel::WebRtcVideoSendStream::ConfigureVideoEncoderSettings( // Limit inter-layer prediction to key pictures by default. vp9_settings.interLayerPred = webrtc::InterLayerPredMode::kOnKeyPic; } + vp9_settings.flexibleMode = force_flexible_mode.Get(); } else { // Multiple spatial layers vp9 screenshare needs flexible mode. vp9_settings.flexibleMode = vp9_settings.numberOfSpatialLayers > 1; @@ -557,7 +524,8 @@ DefaultUnsignalledSsrcHandler::DefaultUnsignalledSsrcHandler() UnsignalledSsrcHandler::Action DefaultUnsignalledSsrcHandler::OnUnsignalledSsrc( WebRtcVideoChannel* channel, - uint32_t ssrc) { + uint32_t ssrc, + absl::optional rtx_ssrc) { absl::optional default_recv_ssrc = channel->GetDefaultReceiveStreamSsrc(); @@ -569,7 +537,9 @@ UnsignalledSsrcHandler::Action DefaultUnsignalledSsrcHandler::OnUnsignalledSsrc( StreamParams sp = channel->unsignaled_stream_params(); sp.ssrcs.push_back(ssrc); - + if (rtx_ssrc) { + sp.AddFidSsrc(ssrc, *rtx_ssrc); + } RTC_LOG(LS_INFO) << "Creating default receive stream for SSRC=" << ssrc << "."; if (!channel->AddRecvStream(sp, /*default_stream=*/true)) { @@ -606,7 +576,7 @@ void DefaultUnsignalledSsrcHandler::SetDefaultSink( WebRtcVideoEngine::WebRtcVideoEngine( std::unique_ptr video_encoder_factory, std::unique_ptr video_decoder_factory, - const webrtc::WebRtcKeyValueConfig& trials) + const webrtc::FieldTrialsView& trials) : decoder_factory_(std::move(video_decoder_factory)), encoder_factory_(std::move(video_encoder_factory)), trials_(trials) { @@ -628,14 +598,16 @@ VideoMediaChannel* WebRtcVideoEngine::CreateMediaChannel( encoder_factory_.get(), decoder_factory_.get(), video_bitrate_allocator_factory); } -std::vector WebRtcVideoEngine::send_codecs() const { +std::vector WebRtcVideoEngine::send_codecs(bool include_rtx) const { return GetPayloadTypesAndDefaultCodecs(encoder_factory_.get(), - /*is_decoder_factory=*/false, trials_); + /*is_decoder_factory=*/false, + include_rtx, trials_); } -std::vector WebRtcVideoEngine::recv_codecs() const { +std::vector WebRtcVideoEngine::recv_codecs(bool include_rtx) const { return GetPayloadTypesAndDefaultCodecs(decoder_factory_.get(), - /*is_decoder_factory=*/true, trials_); + /*is_decoder_factory=*/true, + include_rtx, trials_); } std::vector @@ -687,7 +659,7 @@ WebRtcVideoChannel::WebRtcVideoChannel( webrtc::VideoEncoderFactory* encoder_factory, webrtc::VideoDecoderFactory* decoder_factory, webrtc::VideoBitrateAllocatorFactory* bitrate_allocator_factory) - : VideoMediaChannel(config, call->network_thread()), + : VideoMediaChannel(call->network_thread(), config.enable_dscp), worker_thread_(call->worker_thread()), call_(call), unsignalled_ssrc_handler_(&default_unsignalled_ssrc_handler_), @@ -712,7 +684,8 @@ WebRtcVideoChannel::WebRtcVideoChannel( rtcp_receiver_report_ssrc_ = kDefaultRtcpReceiverReportSsrc; sending_ = false; recv_codecs_ = MapCodecs(GetPayloadTypesAndDefaultCodecs( - decoder_factory_, /*is_decoder_factory=*/true, call_->trials())); + decoder_factory_, /*is_decoder_factory=*/true, + /*include_rtx=*/true, call_->trials())); recv_flexfec_payload_type_ = recv_codecs_.empty() ? 0 : recv_codecs_.front().flexfec_payload_type; } @@ -882,7 +855,7 @@ bool WebRtcVideoChannel::SetSendParameters(const VideoSendParameters& params) { void WebRtcVideoChannel::RequestEncoderFallback() { if (!worker_thread_->IsCurrent()) { worker_thread_->PostTask( - ToQueuedTask(task_safety_, [this] { RequestEncoderFallback(); })); + SafeTask(task_safety_.flag(), [this] { RequestEncoderFallback(); })); return; } @@ -900,56 +873,13 @@ void WebRtcVideoChannel::RequestEncoderFallback() { } void WebRtcVideoChannel::RequestEncoderSwitch( - const EncoderSwitchRequestCallback::Config& conf) { - if (!worker_thread_->IsCurrent()) { - worker_thread_->PostTask(ToQueuedTask( - task_safety_, [this, conf] { RequestEncoderSwitch(conf); })); - return; - } - - RTC_DCHECK_RUN_ON(&thread_checker_); - - if (!allow_codec_switching_) { - RTC_LOG(LS_INFO) << "Encoder switch requested but codec switching has" - " not been enabled yet."; - requested_encoder_switch_ = conf; - return; - } - - for (const VideoCodecSettings& codec_setting : negotiated_codecs_) { - if (codec_setting.codec.name == conf.codec_name) { - if (conf.param) { - auto it = codec_setting.codec.params.find(*conf.param); - if (it == codec_setting.codec.params.end()) - continue; - - if (conf.value && it->second != *conf.value) - continue; - } - - if (send_codec_ == codec_setting) { - // Already using this codec, no switch required. - return; - } - - ChangedSendParameters params; - params.send_codec = codec_setting; - ApplyChangedParams(params); - return; - } - } - - RTC_LOG(LS_WARNING) << "Requested encoder with codec_name:" << conf.codec_name - << ", param:" << conf.param.value_or("none") - << " and value:" << conf.value.value_or("none") - << "not found. No switch performed."; -} - -void WebRtcVideoChannel::RequestEncoderSwitch( - const webrtc::SdpVideoFormat& format) { + const webrtc::SdpVideoFormat& format, + bool allow_default_fallback) { if (!worker_thread_->IsCurrent()) { - worker_thread_->PostTask(ToQueuedTask( - task_safety_, [this, format] { RequestEncoderSwitch(format); })); + worker_thread_->PostTask( + SafeTask(task_safety_.flag(), [this, format, allow_default_fallback] { + RequestEncoderSwitch(format, allow_default_fallback); + })); return; } @@ -975,8 +905,13 @@ void WebRtcVideoChannel::RequestEncoderSwitch( } } - RTC_LOG(LS_WARNING) << "Encoder switch failed: SdpVideoFormat " - << format.ToString() << " not negotiated."; + RTC_LOG(LS_WARNING) << "Failed to switch encoder to: " << format.ToString() + << ". Is default fallback allowed: " + << allow_default_fallback; + + if (allow_default_fallback) { + RequestEncoderFallback(); + } } bool WebRtcVideoChannel::ApplyChangedParams( @@ -1070,8 +1005,16 @@ webrtc::RtpParameters WebRtcVideoChannel::GetRtpSendParameters( // Need to add the common list of codecs to the send stream-specific // RTP parameters. for (const VideoCodec& codec : send_params_.codecs) { - rtp_params.codecs.push_back(codec.ToCodecParameters()); + if (send_codec_ && send_codec_->codec.id == codec.id) { + // Put the current send codec to the front of the codecs list. + RTC_DCHECK_EQ(codec.name, send_codec_->codec.name); + rtp_params.codecs.insert(rtp_params.codecs.begin(), + codec.ToCodecParameters()); + } else { + rtp_params.codecs.push_back(codec.ToCodecParameters()); + } } + return rtp_params; } @@ -1150,9 +1093,9 @@ webrtc::RtpParameters WebRtcVideoChannel::GetDefaultRtpReceiveParameters() RTC_DCHECK_RUN_ON(&thread_checker_); webrtc::RtpParameters rtp_params; if (!default_unsignalled_ssrc_handler_.GetDefaultSink()) { - RTC_LOG(LS_WARNING) << "Attempting to get RTP parameters for the default, " - "unsignaled video receive stream, but not yet " - "configured to receive such a stream."; + // Getting parameters on a default, unsignaled video receive stream but + // because we've not configured to receive such a stream, `encodings` is + // empty. return rtp_params; } rtp_params.encodings.emplace_back(); @@ -1187,7 +1130,7 @@ bool WebRtcVideoChannel::GetChangedRecvParameters( const std::vector local_supported_codecs = GetPayloadTypesAndDefaultCodecs(decoder_factory_, /*is_decoder_factory=*/true, - call_->trials()); + /*include_rtx=*/true, call_->trials()); for (const VideoCodecSettings& mapped_codec : mapped_codecs) { if (!FindMatchingCodec(local_supported_codecs, mapped_codec.codec)) { RTC_LOG(LS_ERROR) @@ -1266,6 +1209,38 @@ std::string WebRtcVideoChannel::CodecSettingsVectorToString( return out.Release(); } +void WebRtcVideoChannel::ExtractCodecInformation( + rtc::ArrayView recv_codecs, + std::map& rtx_associated_payload_types, + std::set& raw_payload_types, + std::vector& decoders) { + RTC_DCHECK(!recv_codecs.empty()); + RTC_DCHECK(rtx_associated_payload_types.empty()); + RTC_DCHECK(raw_payload_types.empty()); + RTC_DCHECK(decoders.empty()); + + for (const VideoCodecSettings& recv_codec : recv_codecs) { + decoders.emplace_back( + webrtc::SdpVideoFormat(recv_codec.codec.name, recv_codec.codec.params), + recv_codec.codec.id); + rtx_associated_payload_types.emplace(recv_codec.rtx_payload_type, + recv_codec.codec.id); + if (recv_codec.codec.packetization == kPacketizationParamRaw) { + raw_payload_types.insert(recv_codec.codec.id); + } + } +} + +void WebRtcVideoChannel::SetReceiverReportSsrc(uint32_t ssrc) { + RTC_DCHECK_RUN_ON(&thread_checker_); + if (ssrc == rtcp_receiver_report_ssrc_) + return; + + rtcp_receiver_report_ssrc_ = ssrc; + for (auto& [unused, receive_stream] : receive_streams_) + receive_stream->SetLocalSsrc(ssrc); +} + bool WebRtcVideoChannel::GetSendCodec(VideoCodec* codec) { RTC_DCHECK_RUN_ON(&thread_checker_); if (!send_codec_) { @@ -1378,13 +1353,9 @@ bool WebRtcVideoChannel::AddSendStream(const StreamParams& sp) { send_streams_[ssrc] = stream; if (rtcp_receiver_report_ssrc_ == kDefaultRtcpReceiverReportSsrc) { - rtcp_receiver_report_ssrc_ = ssrc; - RTC_LOG(LS_INFO) - << "SetLocalSsrc on all the receive streams because we added " - "a send stream."; - for (auto& kv : receive_streams_) - kv.second->SetLocalSsrc(ssrc); + SetReceiverReportSsrc(ssrc); } + if (sending_) { stream->SetSend(true); } @@ -1411,15 +1382,8 @@ bool WebRtcVideoChannel::RemoveSendStream(uint32_t ssrc) { // Switch receiver report SSRCs, the one in use is no longer valid. if (rtcp_receiver_report_ssrc_ == ssrc) { - rtcp_receiver_report_ssrc_ = send_streams_.empty() - ? kDefaultRtcpReceiverReportSsrc - : send_streams_.begin()->first; - RTC_LOG(LS_INFO) << "SetLocalSsrc on all the receive streams because the " - "previous local SSRC was removed."; - - for (auto& kv : receive_streams_) { - kv.second->SetLocalSsrc(rtcp_receiver_report_ssrc_); - } + SetReceiverReportSsrc(send_streams_.empty() ? kDefaultRtcpReceiverReportSsrc + : send_streams_.begin()->first); } delete removed_stream; @@ -1475,7 +1439,7 @@ bool WebRtcVideoChannel::AddRecvStream(const StreamParams& sp, for (uint32_t used_ssrc : sp.ssrcs) receive_ssrcs_.insert(used_ssrc); - webrtc::VideoReceiveStream::Config config(this, decoder_factory_); + webrtc::VideoReceiveStreamInterface::Config config(this, decoder_factory_); webrtc::FlexfecReceiveStream::Config flexfec_config(this); ConfigureReceiverRtp(&config, &flexfec_config, sp); @@ -1497,7 +1461,7 @@ bool WebRtcVideoChannel::AddRecvStream(const StreamParams& sp, } void WebRtcVideoChannel::ConfigureReceiverRtp( - webrtc::VideoReceiveStream::Config* config, + webrtc::VideoReceiveStreamInterface::Config* config, webrtc::FlexfecReceiveStream::Config* flexfec_config, const StreamParams& sp) const { uint32_t ssrc = sp.first_ssrc(); @@ -1596,11 +1560,8 @@ void WebRtcVideoChannel::OnDemuxerCriteriaUpdatePending() { } void WebRtcVideoChannel::OnDemuxerCriteriaUpdateComplete() { - RTC_DCHECK_RUN_ON(&network_thread_checker_); - worker_thread_->PostTask(ToQueuedTask(task_safety_, [this] { - RTC_DCHECK_RUN_ON(&thread_checker_); - ++demuxer_criteria_completed_id_; - })); + RTC_DCHECK_RUN_ON(&thread_checker_); + ++demuxer_criteria_completed_id_; } bool WebRtcVideoChannel::SetSink( @@ -1720,7 +1681,7 @@ void WebRtcVideoChannel::OnPacketReceived(rtc::CopyOnWriteBuffer packet, // to a common implementation and provide a callback on the worker thread // for the exception case (DELIVERY_UNKNOWN_SSRC) and how retry is attempted. worker_thread_->PostTask( - ToQueuedTask(task_safety_, [this, packet, packet_time_us] { + SafeTask(task_safety_.flag(), [this, packet, packet_time_us] { RTC_DCHECK_RUN_ON(&thread_checker_); const webrtc::PacketReceiver::DeliveryStatus delivery_result = call_->Receiver()->DeliverPacket(webrtc::MediaType::VIDEO, packet, @@ -1734,6 +1695,7 @@ void WebRtcVideoChannel::OnPacketReceived(rtc::CopyOnWriteBuffer packet, break; } + absl::optional rtx_ssrc; uint32_t ssrc = ParseRtpSsrc(packet); if (unknown_ssrc_packet_buffer_) { @@ -1753,11 +1715,26 @@ void WebRtcVideoChannel::OnPacketReceived(rtc::CopyOnWriteBuffer packet, // know what stream it associates with, and we shouldn't ever create an // implicit channel for these. for (auto& codec : recv_codecs_) { - if (payload_type == codec.rtx_payload_type || - payload_type == codec.ulpfec.red_rtx_payload_type || + if (payload_type == codec.ulpfec.red_rtx_payload_type || payload_type == codec.ulpfec.ulpfec_payload_type) { return; } + if (payload_type == codec.rtx_payload_type) { + // As we don't support receiving simulcast there can only be one RTX + // stream, which will be associated with unsignaled media stream. + // It is not possible to update the ssrcs of a receive stream, so we + // recreate it insead if found. + auto default_ssrc = GetDefaultReceiveStreamSsrc(); + if (!default_ssrc) { + return; + } + rtx_ssrc = ssrc; + ssrc = *default_ssrc; + // Allow recreating the receive stream even if the RTX packet is + // received just after the media packet. + last_unsignalled_ssrc_creation_time_ms_.reset(); + break; + } } if (payload_type == recv_flexfec_payload_type_) { return; @@ -1787,7 +1764,8 @@ void WebRtcVideoChannel::OnPacketReceived(rtc::CopyOnWriteBuffer packet, } } // Let the unsignalled ssrc handler decide whether to drop or deliver. - switch (unsignalled_ssrc_handler_->OnUnsignalledSsrc(this, ssrc)) { + switch (unsignalled_ssrc_handler_->OnUnsignalledSsrc(this, ssrc, + rtx_ssrc)) { case UnsignalledSsrcHandler::kDropPacket: return; case UnsignalledSsrcHandler::kDeliverPacket: @@ -1870,11 +1848,12 @@ void WebRtcVideoChannel::OnReadyToSend(bool ready) { } void WebRtcVideoChannel::OnNetworkRouteChanged( - const std::string& transport_name, + absl::string_view transport_name, const rtc::NetworkRoute& network_route) { RTC_DCHECK_RUN_ON(&network_thread_checker_); - worker_thread_->PostTask(ToQueuedTask( - task_safety_, [this, name = transport_name, route = network_route] { + worker_thread_->PostTask(SafeTask( + task_safety_.flag(), + [this, name = std::string(transport_name), route = network_route] { RTC_DCHECK_RUN_ON(&thread_checker_); webrtc::RtpTransportControllerSendInterface* transport = call_->GetTransportControllerSend(); @@ -1887,23 +1866,8 @@ void WebRtcVideoChannel::SetInterface(NetworkInterface* iface) { RTC_DCHECK_RUN_ON(&network_thread_checker_); MediaChannel::SetInterface(iface); // Set the RTP recv/send buffer to a bigger size. - - // The group should be a positive integer with an explicit size, in - // which case that is used as UDP recevie buffer size. All other values shall - // result in the default value being used. - const std::string group_name_recv_buf_size = - call_->trials().Lookup("WebRTC-IncreasedReceivebuffers"); - int recv_buffer_size = kVideoRtpRecvBufferSize; - if (!group_name_recv_buf_size.empty() && - (sscanf(group_name_recv_buf_size.c_str(), "%d", &recv_buffer_size) != 1 || - recv_buffer_size <= 0)) { - RTC_LOG(LS_WARNING) << "Invalid receive buffer size: " - << group_name_recv_buf_size; - recv_buffer_size = kVideoRtpRecvBufferSize; - } - MediaChannel::SetOption(NetworkInterface::ST_RTP, rtc::Socket::OPT_RCVBUF, - recv_buffer_size); + kVideoRtpRecvBufferSize); // Speculative change to increase the outbound socket buffer size. // In b/15152257, we are seeing a significant number of packets discarded @@ -1946,16 +1910,23 @@ void WebRtcVideoChannel::SetFrameEncryptor( } } +void WebRtcVideoChannel::SetEncoderSelector( + uint32_t ssrc, + webrtc::VideoEncoderFactory::EncoderSelectorInterface* encoder_selector) { + RTC_DCHECK_RUN_ON(&thread_checker_); + auto matching_stream = send_streams_.find(ssrc); + if (matching_stream != send_streams_.end()) { + matching_stream->second->SetEncoderSelector(encoder_selector); + } else { + RTC_LOG(LS_ERROR) << "No stream found to attach encoder selector"; + } +} + void WebRtcVideoChannel::SetVideoCodecSwitchingEnabled(bool enabled) { RTC_DCHECK_RUN_ON(&thread_checker_); allow_codec_switching_ = enabled; if (allow_codec_switching_) { RTC_LOG(LS_INFO) << "Encoder switching enabled."; - if (requested_encoder_switch_) { - RTC_LOG(LS_INFO) << "Executing cached video encoder switch request."; - RequestEncoderSwitch(*requested_encoder_switch_); - requested_encoder_switch_.reset(); - } } } @@ -2257,7 +2228,7 @@ void WebRtcVideoChannel::WebRtcVideoSendStream::SetCodec( parameters_.codec_settings = codec_settings; - // TODO(nisse): Avoid recreation, it should be enough to call + // TODO(bugs.webrtc.org/8830): Avoid recreation, it should be enough to call // ReconfigureEncoder. RTC_LOG(LS_INFO) << "RecreateWebRtcStream (send) because of SetCodec."; RecreateWebRtcStream(); @@ -2314,9 +2285,14 @@ void WebRtcVideoChannel::WebRtcVideoSendStream::SetSendParameters( webrtc::RTCError WebRtcVideoChannel::WebRtcVideoSendStream::SetRtpParameters( const webrtc::RtpParameters& new_parameters) { RTC_DCHECK_RUN_ON(&thread_checker_); + // This is checked higher in the stack (RtpSender), so this is only checking + // for users accessing the private APIs or tests, not specification + // conformance. + // TODO(orphis): Migrate tests to later make this a DCHECK only webrtc::RTCError error = CheckRtpParametersInvalidModificationAndValues( rtp_parameters_, new_parameters); if (!error.ok()) { + // Error is propagated to the callback at a higher level return error; } @@ -2331,7 +2307,9 @@ webrtc::RTCError WebRtcVideoChannel::WebRtcVideoSendStream::SetRtpParameters( (new_parameters.encodings[i].scale_resolution_down_by != rtp_parameters_.encodings[i].scale_resolution_down_by) || (new_parameters.encodings[i].num_temporal_layers != - rtp_parameters_.encodings[i].num_temporal_layers)) { + rtp_parameters_.encodings[i].num_temporal_layers) || + (new_parameters.encodings[i].requested_resolution != + rtp_parameters_.encodings[i].requested_resolution)) { new_param = true; break; } @@ -2397,6 +2375,18 @@ void WebRtcVideoChannel::WebRtcVideoSendStream::SetFrameEncryptor( } } +void WebRtcVideoChannel::WebRtcVideoSendStream::SetEncoderSelector( + webrtc::VideoEncoderFactory::EncoderSelectorInterface* encoder_selector) { + RTC_DCHECK_RUN_ON(&thread_checker_); + parameters_.config.encoder_selector = encoder_selector; + if (stream_) { + RTC_LOG(LS_INFO) + << "RecreateWebRtcStream (send) because of SetEncoderSelector, ssrc=" + << parameters_.config.rtp.ssrcs[0]; + RecreateWebRtcStream(); + } +} + void WebRtcVideoChannel::WebRtcVideoSendStream::UpdateSendState() { RTC_DCHECK_RUN_ON(&thread_checker_); if (sending_) { @@ -2509,7 +2499,8 @@ WebRtcVideoChannel::WebRtcVideoSendStream::CreateVideoEncoderConfig( encoder_config.simulcast_layers[i].active = rtp_parameters_.encodings[i].active; encoder_config.simulcast_layers[i].scalability_mode = - rtp_parameters_.encodings[i].scalability_mode; + webrtc::ScalabilityModeFromString( + rtp_parameters_.encodings[i].scalability_mode.value_or("")); if (rtp_parameters_.encodings[i].min_bitrate_bps) { encoder_config.simulcast_layers[i].min_bitrate_bps = *rtp_parameters_.encodings[i].min_bitrate_bps; @@ -2530,6 +2521,8 @@ WebRtcVideoChannel::WebRtcVideoSendStream::CreateVideoEncoderConfig( encoder_config.simulcast_layers[i].num_temporal_layers = *rtp_parameters_.encodings[i].num_temporal_layers; } + encoder_config.simulcast_layers[i].requested_resolution = + rtp_parameters_.encodings[i].requested_resolution; } encoder_config.legacy_conference_mode = parameters_.conference_mode; @@ -2539,11 +2532,12 @@ WebRtcVideoChannel::WebRtcVideoSendStream::CreateVideoEncoderConfig( (parameters_.config.rtp.ssrcs.size() == 1 || NumActiveStreams(rtp_parameters_) == 1); + // Ensure frame dropping is always enabled. + encoder_config.frame_drop_enabled = true; + int max_qp = kDefaultQpMax; codec.GetParam(kCodecParamMaxQuantization, &max_qp); - encoder_config.video_stream_factory = - rtc::make_ref_counted( - codec.name, max_qp, is_screencast, parameters_.conference_mode); + encoder_config.max_qp = max_qp; return encoder_config; } @@ -2621,6 +2615,7 @@ WebRtcVideoChannel::WebRtcVideoSendStream::GetPerLayerVideoSenderInfos( common_info.quality_limitation_resolution_changes = stats.quality_limitation_resolution_changes; common_info.encoder_implementation_name = stats.encoder_implementation_name; + common_info.target_bitrate = stats.target_media_bitrate_bps; common_info.ssrc_groups = ssrc_groups_; common_info.frames = stats.frames; common_info.framerate_input = stats.input_frame_rate; @@ -2630,12 +2625,21 @@ WebRtcVideoChannel::WebRtcVideoSendStream::GetPerLayerVideoSenderInfos( common_info.content_type = stats.content_type; common_info.aggregated_framerate_sent = stats.encode_frame_rate; common_info.aggregated_huge_frames_sent = stats.huge_frames_sent; + common_info.power_efficient_encoder = stats.power_efficient_encoder; // If we don't have any substreams, get the remaining metrics from `stats`. // Otherwise, these values are obtained from `sub_stream` below. if (stats.substreams.empty()) { for (uint32_t ssrc : parameters_.config.rtp.ssrcs) { common_info.add_ssrc(ssrc); + auto encoding_it = std::find_if( + rtp_parameters_.encodings.begin(), rtp_parameters_.encodings.end(), + [&ssrc](const webrtc::RtpEncodingParameters& parameters) { + return parameters.ssrc && parameters.ssrc == ssrc; + }); + if (encoding_it != rtp_parameters_.encodings.end()) { + common_info.active = encoding_it->active; + } } common_info.framerate_sent = stats.encode_frame_rate; common_info.frames_encoded = stats.frames_encoded; @@ -2653,6 +2657,15 @@ WebRtcVideoChannel::WebRtcVideoSendStream::GetPerLayerVideoSenderInfos( auto info = common_info; info.add_ssrc(pair.first); info.rid = parameters_.config.rtp.GetRidForSsrc(pair.first); + // Search the associated encoding by SSRC. + auto encoding_it = std::find_if( + rtp_parameters_.encodings.begin(), rtp_parameters_.encodings.end(), + [&pair](const webrtc::RtpEncodingParameters& parameters) { + return parameters.ssrc && pair.first == *parameters.ssrc; + }); + if (encoding_it != rtp_parameters_.encodings.end()) { + info.active = encoding_it->active; + } auto stream_stats = pair.second; RTC_DCHECK_EQ(stream_stats.type, webrtc::VideoSendStream::StreamStats::StreamType::kMedia); @@ -2661,7 +2674,8 @@ WebRtcVideoChannel::WebRtcVideoSendStream::GetPerLayerVideoSenderInfos( stream_stats.rtp_stats.transmitted.header_bytes + stream_stats.rtp_stats.transmitted.padding_bytes; info.packets_sent = stream_stats.rtp_stats.transmitted.packets; - info.total_packet_send_delay_ms += stream_stats.total_packet_send_delay_ms; + info.total_packet_send_delay += + stream_stats.rtp_stats.transmitted.total_packet_delay; info.send_frame_width = stream_stats.width; info.send_frame_height = stream_stats.height; info.key_frames_encoded = stream_stats.frame_counts.key_frames; @@ -2715,7 +2729,7 @@ WebRtcVideoChannel::WebRtcVideoSendStream::GetAggregatedVideoSenderInfo( info.header_and_padding_bytes_sent += infos[i].header_and_padding_bytes_sent; info.packets_sent += infos[i].packets_sent; - info.total_packet_send_delay_ms += infos[i].total_packet_send_delay_ms; + info.total_packet_send_delay += infos[i].total_packet_send_delay; info.retransmitted_bytes_sent += infos[i].retransmitted_bytes_sent; info.retransmitted_packets_sent += infos[i].retransmitted_packets_sent; info.packets_lost += infos[i].packets_lost; @@ -2803,19 +2817,32 @@ void WebRtcVideoChannel::WebRtcVideoSendStream::RecreateWebRtcStream() { parameters_.encoder_config.encoder_specific_settings = NULL; + // Calls stream_->UpdateActiveSimulcastLayers() to start the VideoSendStream + // if necessary conditions are met. + UpdateSendState(); + + // Attach the source after starting the send stream to prevent frames from + // being injected into a not-yet initializated video stream encoder. if (source_) { stream_->SetSource(source_, GetDegradationPreference()); } +} - // Call stream_->Start() if necessary conditions are met. - UpdateSendState(); +void WebRtcVideoChannel::WebRtcVideoSendStream::GenerateKeyFrame() { + RTC_DCHECK_RUN_ON(&thread_checker_); + if (stream_ != NULL) { + stream_->GenerateKeyFrame(); + } else { + RTC_LOG(LS_WARNING) + << "Absent send stream; ignoring request to generate keyframe."; + } } WebRtcVideoChannel::WebRtcVideoReceiveStream::WebRtcVideoReceiveStream( WebRtcVideoChannel* channel, webrtc::Call* call, const StreamParams& sp, - webrtc::VideoReceiveStream::Config config, + webrtc::VideoReceiveStreamInterface::Config config, bool default_stream, const std::vector& recv_codecs, const webrtc::FlexfecReceiveStream::Config& flexfec_config) @@ -2831,10 +2858,33 @@ WebRtcVideoChannel::WebRtcVideoReceiveStream::WebRtcVideoReceiveStream( first_frame_timestamp_(-1), estimated_remote_start_ntp_time_ms_(0) { RTC_DCHECK(config_.decoder_factory); + RTC_DCHECK(config_.decoders.empty()) + << "Decoder info is supplied via `recv_codecs`"; + + ExtractCodecInformation(recv_codecs, config_.rtp.rtx_associated_payload_types, + config_.rtp.raw_payload_types, config_.decoders); + const VideoCodecSettings& codec = recv_codecs.front(); + config_.rtp.ulpfec_payload_type = codec.ulpfec.ulpfec_payload_type; + config_.rtp.red_payload_type = codec.ulpfec.red_payload_type; + config_.rtp.lntf.enabled = HasLntf(codec.codec); + config_.rtp.nack.rtp_history_ms = HasNack(codec.codec) ? kNackHistoryMs : 0; + if (codec.rtx_time != -1 && config_.rtp.nack.rtp_history_ms != 0) { + config_.rtp.nack.rtp_history_ms = codec.rtx_time; + } + + config_.rtp.rtcp_xr.receiver_reference_time_report = HasRrtr(codec.codec); + + if (codec.ulpfec.red_rtx_payload_type != -1) { + config_.rtp + .rtx_associated_payload_types[codec.ulpfec.red_rtx_payload_type] = + codec.ulpfec.red_payload_type; + } + config_.renderer = this; - ConfigureCodecs(recv_codecs); flexfec_config_.payload_type = flexfec_config.payload_type; - RecreateWebRtcVideoStream(); + + CreateReceiveStream(); + StartReceiveStream(); } WebRtcVideoChannel::WebRtcVideoReceiveStream::~WebRtcVideoReceiveStream() { @@ -2843,6 +2893,17 @@ WebRtcVideoChannel::WebRtcVideoReceiveStream::~WebRtcVideoReceiveStream() { call_->DestroyFlexfecReceiveStream(flexfec_stream_); } +webrtc::VideoReceiveStreamInterface& +WebRtcVideoChannel::WebRtcVideoReceiveStream::stream() { + RTC_DCHECK(stream_); + return *stream_; +} + +webrtc::FlexfecReceiveStream* +WebRtcVideoChannel::WebRtcVideoReceiveStream::flexfec_stream() { + return flexfec_stream_; +} + const std::vector& WebRtcVideoChannel::WebRtcVideoReceiveStream::GetSsrcs() const { return stream_params_.ssrcs; @@ -2872,60 +2933,54 @@ WebRtcVideoChannel::WebRtcVideoReceiveStream::GetRtpParameters() const { return rtp_parameters; } -bool WebRtcVideoChannel::WebRtcVideoReceiveStream::ConfigureCodecs( +bool WebRtcVideoChannel::WebRtcVideoReceiveStream::ReconfigureCodecs( const std::vector& recv_codecs) { + RTC_DCHECK(stream_); RTC_DCHECK(!recv_codecs.empty()); std::map rtx_associated_payload_types; std::set raw_payload_types; - std::vector decoders; - for (const auto& recv_codec : recv_codecs) { - decoders.emplace_back( - webrtc::SdpVideoFormat(recv_codec.codec.name, recv_codec.codec.params), - recv_codec.codec.id); - rtx_associated_payload_types.insert( - {recv_codec.rtx_payload_type, recv_codec.codec.id}); - if (recv_codec.codec.packetization == kPacketizationParamRaw) { - raw_payload_types.insert(recv_codec.codec.id); - } - } - - bool recreate_needed = (stream_ == nullptr); + std::vector decoders; + ExtractCodecInformation(recv_codecs, rtx_associated_payload_types, + raw_payload_types, decoders); const auto& codec = recv_codecs.front(); - if (config_.rtp.ulpfec_payload_type != codec.ulpfec.ulpfec_payload_type) { - config_.rtp.ulpfec_payload_type = codec.ulpfec.ulpfec_payload_type; - recreate_needed = true; - } - if (config_.rtp.red_payload_type != codec.ulpfec.red_payload_type) { + if (config_.rtp.red_payload_type != codec.ulpfec.red_payload_type || + config_.rtp.ulpfec_payload_type != codec.ulpfec.ulpfec_payload_type) { + config_.rtp.ulpfec_payload_type = codec.ulpfec.ulpfec_payload_type; config_.rtp.red_payload_type = codec.ulpfec.red_payload_type; - recreate_needed = true; + stream_->SetProtectionPayloadTypes(config_.rtp.red_payload_type, + config_.rtp.ulpfec_payload_type); } const bool has_lntf = HasLntf(codec.codec); if (config_.rtp.lntf.enabled != has_lntf) { config_.rtp.lntf.enabled = has_lntf; - recreate_needed = true; + stream_->SetLossNotificationEnabled(has_lntf); } + int new_history_ms = config_.rtp.nack.rtp_history_ms; const int rtp_history_ms = HasNack(codec.codec) ? kNackHistoryMs : 0; if (rtp_history_ms != config_.rtp.nack.rtp_history_ms) { - config_.rtp.nack.rtp_history_ms = rtp_history_ms; - recreate_needed = true; + new_history_ms = rtp_history_ms; } // The rtx-time parameter can be used to override the hardcoded default for // the NACK buffer length. - if (codec.rtx_time != -1 && config_.rtp.nack.rtp_history_ms != 0) { - config_.rtp.nack.rtp_history_ms = codec.rtx_time; - recreate_needed = true; + if (codec.rtx_time != -1 && new_history_ms != 0) { + new_history_ms = codec.rtx_time; + } + + if (config_.rtp.nack.rtp_history_ms != new_history_ms) { + config_.rtp.nack.rtp_history_ms = new_history_ms; + stream_->SetNackHistory(webrtc::TimeDelta::Millis(new_history_ms)); } const bool has_rtr = HasRrtr(codec.codec); if (has_rtr != config_.rtp.rtcp_xr.receiver_reference_time_report) { config_.rtp.rtcp_xr.receiver_reference_time_report = has_rtr; - recreate_needed = true; + stream_->SetRtcpXr(config_.rtp.rtcp_xr); } if (codec.ulpfec.red_rtx_payload_type != -1) { @@ -2935,10 +2990,12 @@ bool WebRtcVideoChannel::WebRtcVideoReceiveStream::ConfigureCodecs( if (config_.rtp.rtx_associated_payload_types != rtx_associated_payload_types) { + stream_->SetAssociatedPayloadTypes(rtx_associated_payload_types); rtx_associated_payload_types.swap(config_.rtp.rtx_associated_payload_types); - recreate_needed = true; } + bool recreate_needed = false; + if (raw_payload_types != config_.rtp.raw_payload_types) { raw_payload_types.swap(config_.rtp.raw_payload_types); recreate_needed = true; @@ -2952,108 +3009,118 @@ bool WebRtcVideoChannel::WebRtcVideoReceiveStream::ConfigureCodecs( return recreate_needed; } -void WebRtcVideoChannel::WebRtcVideoReceiveStream::SetLocalSsrc( - uint32_t local_ssrc) { - // TODO(pbos): Consider turning this sanity check into a RTC_DCHECK. You - // should not be able to create a sender with the same SSRC as a receiver, but - // right now this can't be done due to unittests depending on receiving what - // they are sending from the same MediaChannel. - if (local_ssrc == config_.rtp.local_ssrc) { - RTC_DLOG(LS_INFO) << "Ignoring call to SetLocalSsrc because parameters are " - "unchanged; local_ssrc=" - << local_ssrc; - return; - } - - config_.rtp.local_ssrc = local_ssrc; - flexfec_config_.rtp.local_ssrc = local_ssrc; - RTC_LOG(LS_INFO) - << "RecreateWebRtcVideoStream (recv) because of SetLocalSsrc; local_ssrc=" - << local_ssrc; - RecreateWebRtcVideoStream(); -} - void WebRtcVideoChannel::WebRtcVideoReceiveStream::SetFeedbackParameters( bool lntf_enabled, bool nack_enabled, bool transport_cc_enabled, webrtc::RtcpMode rtcp_mode, int rtx_time) { - int nack_history_ms = - nack_enabled ? rtx_time != -1 ? rtx_time : kNackHistoryMs : 0; - if (config_.rtp.lntf.enabled == lntf_enabled && - config_.rtp.nack.rtp_history_ms == nack_history_ms && - config_.rtp.transport_cc == transport_cc_enabled && - config_.rtp.rtcp_mode == rtcp_mode) { - RTC_LOG(LS_INFO) - << "Ignoring call to SetFeedbackParameters because parameters are " - "unchanged; lntf=" - << lntf_enabled << ", nack=" << nack_enabled - << ", transport_cc=" << transport_cc_enabled - << ", rtx_time=" << rtx_time; - return; + RTC_DCHECK(stream_); + + if (config_.rtp.rtcp_mode != rtcp_mode) { + config_.rtp.rtcp_mode = rtcp_mode; + stream_->SetRtcpMode(rtcp_mode); + + flexfec_config_.rtcp_mode = rtcp_mode; + if (flexfec_stream_) { + flexfec_stream_->SetRtcpMode(rtcp_mode); + } + } + + if (config_.rtp.transport_cc != transport_cc_enabled) { + config_.rtp.transport_cc = transport_cc_enabled; + stream_->SetTransportCc(transport_cc_enabled); + // TODO(brandtr): We should be spec-compliant and set `transport_cc` here + // based on the rtcp-fb for the FlexFEC codec, not the media codec. + flexfec_config_.rtp.transport_cc = transport_cc_enabled; + if (flexfec_stream_) { + flexfec_stream_->SetTransportCc(transport_cc_enabled); + } } + config_.rtp.lntf.enabled = lntf_enabled; + stream_->SetLossNotificationEnabled(lntf_enabled); + + int nack_history_ms = + nack_enabled ? rtx_time != -1 ? rtx_time : kNackHistoryMs : 0; config_.rtp.nack.rtp_history_ms = nack_history_ms; - config_.rtp.transport_cc = transport_cc_enabled; - config_.rtp.rtcp_mode = rtcp_mode; - // TODO(brandtr): We should be spec-compliant and set `transport_cc` here - // based on the rtcp-fb for the FlexFEC codec, not the media codec. - flexfec_config_.rtp.transport_cc = config_.rtp.transport_cc; - flexfec_config_.rtcp_mode = config_.rtp.rtcp_mode; - RTC_LOG(LS_INFO) << "RecreateWebRtcVideoStream (recv) because of " - "SetFeedbackParameters; nack=" - << nack_enabled << ", transport_cc=" << transport_cc_enabled; - RecreateWebRtcVideoStream(); + stream_->SetNackHistory(webrtc::TimeDelta::Millis(nack_history_ms)); +} + +void WebRtcVideoChannel::WebRtcVideoReceiveStream::SetFlexFecPayload( + int payload_type) { + // TODO(bugs.webrtc.org/11993, tommi): See if it is better to always have a + // flexfec stream object around and instead of recreating the video stream, + // reconfigure the flexfec object from within the rtp callback (soon to be on + // the network thread). + if (flexfec_stream_) { + if (flexfec_stream_->payload_type() == payload_type) { + RTC_DCHECK_EQ(flexfec_config_.payload_type, payload_type); + return; + } + + flexfec_config_.payload_type = payload_type; + flexfec_stream_->SetPayloadType(payload_type); + + if (payload_type == -1) { + stream_->SetFlexFecProtection(nullptr); + call_->DestroyFlexfecReceiveStream(flexfec_stream_); + flexfec_stream_ = nullptr; + } + } else if (payload_type != -1) { + flexfec_config_.payload_type = payload_type; + if (flexfec_config_.IsCompleteAndEnabled()) { + flexfec_stream_ = call_->CreateFlexfecReceiveStream(flexfec_config_); + stream_->SetFlexFecProtection(flexfec_stream_); + } + } else { + // Noop. No flexfec stream exists and "new" payload_type == -1. + RTC_DCHECK(!flexfec_config_.IsCompleteAndEnabled()); + flexfec_config_.payload_type = payload_type; + } } void WebRtcVideoChannel::WebRtcVideoReceiveStream::SetRecvParameters( const ChangedRecvParameters& params) { + RTC_DCHECK(stream_); bool video_needs_recreation = false; if (params.codec_settings) { - video_needs_recreation = ConfigureCodecs(*params.codec_settings); + video_needs_recreation = ReconfigureCodecs(*params.codec_settings); } if (params.rtp_header_extensions) { if (config_.rtp.extensions != *params.rtp_header_extensions) { config_.rtp.extensions = *params.rtp_header_extensions; - if (stream_) { - stream_->SetRtpExtensions(config_.rtp.extensions); - } else { - video_needs_recreation = true; - } + stream_->SetRtpExtensions(config_.rtp.extensions); } if (flexfec_config_.rtp.extensions != *params.rtp_header_extensions) { flexfec_config_.rtp.extensions = *params.rtp_header_extensions; if (flexfec_stream_) { flexfec_stream_->SetRtpExtensions(flexfec_config_.rtp.extensions); - } else if (flexfec_config_.IsCompleteAndEnabled()) { - video_needs_recreation = true; } } } - if (params.flexfec_payload_type) { - flexfec_config_.payload_type = *params.flexfec_payload_type; - // TODO(tommi): See if it is better to always have a flexfec stream object - // configured and instead of recreating the video stream, reconfigure the - // flexfec object from within the rtp callback (soon to be on the network - // thread). - if (flexfec_stream_ || flexfec_config_.IsCompleteAndEnabled()) - video_needs_recreation = true; - } + + if (params.flexfec_payload_type) + SetFlexFecPayload(*params.flexfec_payload_type); + if (video_needs_recreation) { - RecreateWebRtcVideoStream(); + RecreateReceiveStream(); + } else { + RTC_DLOG_F(LS_INFO) << "No receive stream recreate needed."; } } -void WebRtcVideoChannel::WebRtcVideoReceiveStream::RecreateWebRtcVideoStream() { +void WebRtcVideoChannel::WebRtcVideoReceiveStream::RecreateReceiveStream() { + RTC_DCHECK(stream_); absl::optional base_minimum_playout_delay_ms; - absl::optional recording_state; + absl::optional + recording_state; if (stream_) { base_minimum_playout_delay_ms = stream_->GetBaseMinimumPlayoutDelayMs(); recording_state = stream_->SetAndGetRecordingState( - webrtc::VideoReceiveStream::RecordingState(), + webrtc::VideoReceiveStreamInterface::RecordingState(), /*generate_key_frame=*/false); call_->DestroyVideoReceiveStream(stream_); stream_ = nullptr; @@ -3064,14 +3131,8 @@ void WebRtcVideoChannel::WebRtcVideoReceiveStream::RecreateWebRtcVideoStream() { flexfec_stream_ = nullptr; } - if (flexfec_config_.IsCompleteAndEnabled()) { - flexfec_stream_ = call_->CreateFlexfecReceiveStream(flexfec_config_); - } + CreateReceiveStream(); - webrtc::VideoReceiveStream::Config config = config_.Copy(); - config.rtp.protected_by_flexfec = (flexfec_stream_ != nullptr); - config.rtp.packet_sink_ = flexfec_stream_; - stream_ = call_->CreateVideoReceiveStream(std::move(config)); if (base_minimum_playout_delay_ms) { stream_->SetBaseMinimumPlayoutDelayMs( base_minimum_playout_delay_ms.value()); @@ -3081,8 +3142,24 @@ void WebRtcVideoChannel::WebRtcVideoReceiveStream::RecreateWebRtcVideoStream() { /*generate_key_frame=*/false); } - stream_->Start(); + StartReceiveStream(); +} + +void WebRtcVideoChannel::WebRtcVideoReceiveStream::CreateReceiveStream() { + RTC_DCHECK(!stream_); + RTC_DCHECK(!flexfec_stream_); + if (flexfec_config_.IsCompleteAndEnabled()) { + flexfec_stream_ = call_->CreateFlexfecReceiveStream(flexfec_config_); + } + + webrtc::VideoReceiveStreamInterface::Config config = config_.Copy(); + config.rtp.protected_by_flexfec = (flexfec_stream_ != nullptr); + config.rtp.packet_sink_ = flexfec_stream_; + stream_ = call_->CreateVideoReceiveStream(std::move(config)); +} +void WebRtcVideoChannel::WebRtcVideoReceiveStream::StartReceiveStream() { + stream_->Start(); if (IsEnabled(call_->trials(), "WebRTC-Video-BufferPacketsWithUnknownSsrc")) { channel_->BackfillBufferedPackets(stream_params_.ssrcs); } @@ -3100,7 +3177,8 @@ void WebRtcVideoChannel::WebRtcVideoReceiveStream::OnFrame( estimated_remote_start_ntp_time_ms_ = frame.ntp_time_ms() - elapsed_time_ms; if (sink_ == NULL) { - RTC_LOG(LS_WARNING) << "VideoReceiveStream not connected to a VideoSink."; + RTC_LOG(LS_WARNING) + << "VideoReceiveStreamInterface not connected to a VideoSink."; return; } @@ -3139,27 +3217,22 @@ void WebRtcVideoChannel::WebRtcVideoReceiveStream::SetSink( sink_ = sink; } -std::string -WebRtcVideoChannel::WebRtcVideoReceiveStream::GetCodecNameFromPayloadType( - int payload_type) { - for (const webrtc::VideoReceiveStream::Decoder& decoder : config_.decoders) { - if (decoder.payload_type == payload_type) { - return decoder.video_format.name; - } - } - return ""; -} - VideoReceiverInfo WebRtcVideoChannel::WebRtcVideoReceiveStream::GetVideoReceiverInfo( bool log_stats) { VideoReceiverInfo info; info.ssrc_groups = stream_params_.ssrc_groups; info.add_ssrc(config_.rtp.remote_ssrc); - webrtc::VideoReceiveStream::Stats stats = stream_->GetStats(); + webrtc::VideoReceiveStreamInterface::Stats stats = stream_->GetStats(); info.decoder_implementation_name = stats.decoder_implementation_name; + info.power_efficient_decoder = stats.power_efficient_decoder; if (stats.current_payload_type != -1) { info.codec_payload_type = stats.current_payload_type; + auto decoder_it = absl::c_find_if(config_.decoders, [&](const auto& d) { + return d.payload_type == stats.current_payload_type; + }); + if (decoder_it != config_.decoders.end()) + info.codec_name = decoder_it->video_format.name; } info.payload_bytes_rcvd = stats.rtp_stats.packet_counter.payload_bytes; info.header_and_padding_bytes_rcvd = @@ -3196,7 +3269,11 @@ WebRtcVideoChannel::WebRtcVideoReceiveStream::GetVideoReceiverInfo( info.key_frames_decoded = stats.frame_counts.key_frames; info.frames_rendered = stats.frames_rendered; info.qp_sum = stats.qp_sum; - info.total_decode_time_ms = stats.total_decode_time_ms; + info.total_decode_time = stats.total_decode_time; + info.total_processing_delay = stats.total_processing_delay; + info.total_assembly_time = stats.total_assembly_time; + info.frames_assembled_from_multiple_packets = + stats.frames_assembled_from_multiple_packets; info.last_packet_received_timestamp_ms = stats.rtp_stats.last_packet_received_timestamp_ms; info.estimated_playout_ntp_timestamp_ms = @@ -3215,8 +3292,6 @@ WebRtcVideoChannel::WebRtcVideoReceiveStream::GetVideoReceiverInfo( info.content_type = stats.content_type; - info.codec_name = GetCodecNameFromPayloadType(stats.current_payload_type); - info.firs_sent = stats.rtcp_packet_type_counts.fir_packets; info.plis_sent = stats.rtcp_packet_type_counts.pli_packets; info.nacks_sent = stats.rtcp_packet_type_counts.nack_packets; @@ -3235,7 +3310,8 @@ void WebRtcVideoChannel::WebRtcVideoReceiveStream:: std::function callback) { if (stream_) { stream_->SetAndGetRecordingState( - webrtc::VideoReceiveStream::RecordingState(std::move(callback)), + webrtc::VideoReceiveStreamInterface::RecordingState( + std::move(callback)), /*generate_key_frame=*/true); } else { RTC_LOG(LS_ERROR) << "Absent receive stream; ignoring setting encoded " @@ -3247,7 +3323,7 @@ void WebRtcVideoChannel::WebRtcVideoReceiveStream:: ClearRecordableEncodedFrameCallback() { if (stream_) { stream_->SetAndGetRecordingState( - webrtc::VideoReceiveStream::RecordingState(), + webrtc::VideoReceiveStreamInterface::RecordingState(), /*generate_key_frame=*/false); } else { RTC_LOG(LS_ERROR) << "Absent receive stream; ignoring clearing encoded " @@ -3273,6 +3349,13 @@ void WebRtcVideoChannel::WebRtcVideoReceiveStream:: stream_->SetDepacketizerToDecoderFrameTransformer(frame_transformer); } +void WebRtcVideoChannel::WebRtcVideoReceiveStream::SetLocalSsrc(uint32_t ssrc) { + config_.rtp.local_ssrc = ssrc; + call_->OnLocalSsrcUpdated(stream(), ssrc); + if (flexfec_stream_) + call_->OnLocalSsrcUpdated(*flexfec_stream_, ssrc); +} + WebRtcVideoChannel::VideoCodecSettings::VideoCodecSettings() : flexfec_payload_type(-1), rtx_payload_type(-1), rtx_time(-1) {} @@ -3479,11 +3562,11 @@ void WebRtcVideoChannel::ClearRecordableEncodedFrameCallback(uint32_t ssrc) { } } -void WebRtcVideoChannel::GenerateKeyFrame(uint32_t ssrc) { +void WebRtcVideoChannel::RequestRecvKeyFrame(uint32_t ssrc) { RTC_DCHECK_RUN_ON(&thread_checker_); WebRtcVideoReceiveStream* stream = FindReceiveStream(ssrc); if (stream) { - stream->GenerateKeyFrame(); + return stream->GenerateKeyFrame(); } else { RTC_LOG(LS_ERROR) << "Absent receive stream; ignoring key frame generation for ssrc " @@ -3491,6 +3574,18 @@ void WebRtcVideoChannel::GenerateKeyFrame(uint32_t ssrc) { } } +void WebRtcVideoChannel::GenerateSendKeyFrame(uint32_t ssrc) { + RTC_DCHECK_RUN_ON(&thread_checker_); + auto it = send_streams_.find(ssrc); + if (it != send_streams_.end()) { + it->second->GenerateKeyFrame(); + } else { + RTC_LOG(LS_ERROR) + << "Absent send stream; ignoring key frame generation for ssrc " + << ssrc; + } +} + void WebRtcVideoChannel::SetEncoderToPacketizerFrameTransformer( uint32_t ssrc, rtc::scoped_refptr frame_transformer) { @@ -3521,275 +3616,4 @@ void WebRtcVideoChannel::SetDepacketizerToDecoderFrameTransformer( } } -// TODO(bugs.webrtc.org/8785): Consider removing max_qp as member of -// EncoderStreamFactory and instead set this value individually for each stream -// in the VideoEncoderConfig.simulcast_layers. -EncoderStreamFactory::EncoderStreamFactory( - std::string codec_name, - int max_qp, - bool is_screenshare, - bool conference_mode, - const webrtc::WebRtcKeyValueConfig* trials) - - : codec_name_(codec_name), - max_qp_(max_qp), - is_screenshare_(is_screenshare), - conference_mode_(conference_mode), - trials_(trials ? *trials : fallback_trials_) {} - -std::vector EncoderStreamFactory::CreateEncoderStreams( - int width, - int height, - const webrtc::VideoEncoderConfig& encoder_config) { - RTC_DCHECK_GT(encoder_config.number_of_streams, 0); - RTC_DCHECK_GE(encoder_config.simulcast_layers.size(), - encoder_config.number_of_streams); - - const absl::optional experimental_min_bitrate = - GetExperimentalMinVideoBitrate(encoder_config.codec_type); - - if (encoder_config.number_of_streams > 1 || - ((absl::EqualsIgnoreCase(codec_name_, kVp8CodecName) || - absl::EqualsIgnoreCase(codec_name_, kH264CodecName)) && - is_screenshare_ && conference_mode_)) { - return CreateSimulcastOrConferenceModeScreenshareStreams( - width, height, encoder_config, experimental_min_bitrate); - } - - return CreateDefaultVideoStreams(width, height, encoder_config, - experimental_min_bitrate); -} - -std::vector -EncoderStreamFactory::CreateDefaultVideoStreams( - int width, - int height, - const webrtc::VideoEncoderConfig& encoder_config, - const absl::optional& experimental_min_bitrate) const { - std::vector layers; - - // For unset max bitrates set default bitrate for non-simulcast. - int max_bitrate_bps = - (encoder_config.max_bitrate_bps > 0) - ? encoder_config.max_bitrate_bps - : GetMaxDefaultVideoBitrateKbps(width, height, is_screenshare_) * - 1000; - - int min_bitrate_bps = - experimental_min_bitrate - ? rtc::saturated_cast(experimental_min_bitrate->bps()) - : webrtc::kDefaultMinVideoBitrateBps; - if (encoder_config.simulcast_layers[0].min_bitrate_bps > 0) { - // Use set min bitrate. - min_bitrate_bps = encoder_config.simulcast_layers[0].min_bitrate_bps; - // If only min bitrate is configured, make sure max is above min. - if (encoder_config.max_bitrate_bps <= 0) - max_bitrate_bps = std::max(min_bitrate_bps, max_bitrate_bps); - } - int max_framerate = (encoder_config.simulcast_layers[0].max_framerate > 0) - ? encoder_config.simulcast_layers[0].max_framerate - : kDefaultVideoMaxFramerate; - - webrtc::VideoStream layer; - layer.width = width; - layer.height = height; - layer.max_framerate = max_framerate; - - if (encoder_config.simulcast_layers[0].scale_resolution_down_by > 1.) { - layer.width = ScaleDownResolution( - layer.width, - encoder_config.simulcast_layers[0].scale_resolution_down_by, - kMinLayerSize); - layer.height = ScaleDownResolution( - layer.height, - encoder_config.simulcast_layers[0].scale_resolution_down_by, - kMinLayerSize); - } - - // In the case that the application sets a max bitrate that's lower than the - // min bitrate, we adjust it down (see bugs.webrtc.org/9141). - layer.min_bitrate_bps = std::min(min_bitrate_bps, max_bitrate_bps); - if (encoder_config.simulcast_layers[0].target_bitrate_bps <= 0) { - layer.target_bitrate_bps = max_bitrate_bps; - } else { - layer.target_bitrate_bps = - encoder_config.simulcast_layers[0].target_bitrate_bps; - } - layer.max_bitrate_bps = max_bitrate_bps; - layer.max_qp = max_qp_; - layer.bitrate_priority = encoder_config.bitrate_priority; - - if (absl::EqualsIgnoreCase(codec_name_, kVp9CodecName)) { - RTC_DCHECK(encoder_config.encoder_specific_settings); - // Use VP9 SVC layering from codec settings which might be initialized - // though field trial in ConfigureVideoEncoderSettings. - webrtc::VideoCodecVP9 vp9_settings; - encoder_config.encoder_specific_settings->FillVideoCodecVp9(&vp9_settings); - layer.num_temporal_layers = vp9_settings.numberOfTemporalLayers; - } - - if (IsTemporalLayersSupported(codec_name_)) { - // Use configured number of temporal layers if set. - if (encoder_config.simulcast_layers[0].num_temporal_layers) { - layer.num_temporal_layers = - *encoder_config.simulcast_layers[0].num_temporal_layers; - } - } - layer.scalability_mode = encoder_config.simulcast_layers[0].scalability_mode; - layers.push_back(layer); - return layers; -} - -std::vector -EncoderStreamFactory::CreateSimulcastOrConferenceModeScreenshareStreams( - int width, - int height, - const webrtc::VideoEncoderConfig& encoder_config, - const absl::optional& experimental_min_bitrate) const { - std::vector layers; - - const bool temporal_layers_supported = - absl::EqualsIgnoreCase(codec_name_, kVp8CodecName) || - absl::EqualsIgnoreCase(codec_name_, kH264CodecName); - // Use legacy simulcast screenshare if conference mode is explicitly enabled - // or use the regular simulcast configuration path which is generic. - layers = GetSimulcastConfig(FindRequiredActiveLayers(encoder_config), - encoder_config.number_of_streams, width, height, - encoder_config.bitrate_priority, max_qp_, - is_screenshare_ && conference_mode_, - temporal_layers_supported, trials_); - // Allow an experiment to override the minimum bitrate for the lowest - // spatial layer. The experiment's configuration has the lowest priority. - if (experimental_min_bitrate) { - layers[0].min_bitrate_bps = - rtc::saturated_cast(experimental_min_bitrate->bps()); - } - // Update the active simulcast layers and configured bitrates. - bool is_highest_layer_max_bitrate_configured = false; - const bool has_scale_resolution_down_by = absl::c_any_of( - encoder_config.simulcast_layers, [](const webrtc::VideoStream& layer) { - return layer.scale_resolution_down_by != -1.; - }); - - bool default_scale_factors_used = true; - if (has_scale_resolution_down_by) { - default_scale_factors_used = IsScaleFactorsPowerOfTwo(encoder_config); - } - const bool norm_size_configured = - webrtc::NormalizeSimulcastSizeExperiment::GetBase2Exponent().has_value(); - const int normalized_width = - (default_scale_factors_used || norm_size_configured) && - (width >= kMinLayerSize) - ? NormalizeSimulcastSize(width, encoder_config.number_of_streams) - : width; - const int normalized_height = - (default_scale_factors_used || norm_size_configured) && - (height >= kMinLayerSize) - ? NormalizeSimulcastSize(height, encoder_config.number_of_streams) - : height; - for (size_t i = 0; i < layers.size(); ++i) { - layers[i].active = encoder_config.simulcast_layers[i].active; - layers[i].scalability_mode = - encoder_config.simulcast_layers[i].scalability_mode; - // Update with configured num temporal layers if supported by codec. - if (encoder_config.simulcast_layers[i].num_temporal_layers && - IsTemporalLayersSupported(codec_name_)) { - layers[i].num_temporal_layers = - *encoder_config.simulcast_layers[i].num_temporal_layers; - } - if (encoder_config.simulcast_layers[i].max_framerate > 0) { - layers[i].max_framerate = - encoder_config.simulcast_layers[i].max_framerate; - } - if (has_scale_resolution_down_by) { - const double scale_resolution_down_by = std::max( - encoder_config.simulcast_layers[i].scale_resolution_down_by, 1.0); - layers[i].width = ScaleDownResolution( - normalized_width, scale_resolution_down_by, kMinLayerSize); - layers[i].height = ScaleDownResolution( - normalized_height, scale_resolution_down_by, kMinLayerSize); - } - // Update simulcast bitrates with configured min and max bitrate. - if (encoder_config.simulcast_layers[i].min_bitrate_bps > 0) { - layers[i].min_bitrate_bps = - encoder_config.simulcast_layers[i].min_bitrate_bps; - } - if (encoder_config.simulcast_layers[i].max_bitrate_bps > 0) { - layers[i].max_bitrate_bps = - encoder_config.simulcast_layers[i].max_bitrate_bps; - } - if (encoder_config.simulcast_layers[i].target_bitrate_bps > 0) { - layers[i].target_bitrate_bps = - encoder_config.simulcast_layers[i].target_bitrate_bps; - } - if (encoder_config.simulcast_layers[i].min_bitrate_bps > 0 && - encoder_config.simulcast_layers[i].max_bitrate_bps > 0) { - // Min and max bitrate are configured. - // Set target to 3/4 of the max bitrate (or to max if below min). - if (encoder_config.simulcast_layers[i].target_bitrate_bps <= 0) - layers[i].target_bitrate_bps = layers[i].max_bitrate_bps * 3 / 4; - if (layers[i].target_bitrate_bps < layers[i].min_bitrate_bps) - layers[i].target_bitrate_bps = layers[i].max_bitrate_bps; - } else if (encoder_config.simulcast_layers[i].min_bitrate_bps > 0) { - // Only min bitrate is configured, make sure target/max are above min. - layers[i].target_bitrate_bps = - std::max(layers[i].target_bitrate_bps, layers[i].min_bitrate_bps); - layers[i].max_bitrate_bps = - std::max(layers[i].max_bitrate_bps, layers[i].min_bitrate_bps); - } else if (encoder_config.simulcast_layers[i].max_bitrate_bps > 0) { - // Only max bitrate is configured, make sure min/target are below max. - // Keep target bitrate if it is set explicitly in encoding config. - // Otherwise set target bitrate to 3/4 of the max bitrate - // or the one calculated from GetSimulcastConfig() which is larger. - layers[i].min_bitrate_bps = - std::min(layers[i].min_bitrate_bps, layers[i].max_bitrate_bps); - if (encoder_config.simulcast_layers[i].target_bitrate_bps <= 0) { - layers[i].target_bitrate_bps = std::max( - layers[i].target_bitrate_bps, layers[i].max_bitrate_bps * 3 / 4); - } - layers[i].target_bitrate_bps = std::max( - std::min(layers[i].target_bitrate_bps, layers[i].max_bitrate_bps), - layers[i].min_bitrate_bps); - } - if (i == layers.size() - 1) { - is_highest_layer_max_bitrate_configured = - encoder_config.simulcast_layers[i].max_bitrate_bps > 0; - } - } - if (!is_screenshare_ && !is_highest_layer_max_bitrate_configured && - encoder_config.max_bitrate_bps > 0) { - // No application-configured maximum for the largest layer. - // If there is bitrate leftover, give it to the largest layer. - BoostMaxSimulcastLayer( - webrtc::DataRate::BitsPerSec(encoder_config.max_bitrate_bps), &layers); - } - - // Sort the layers by max_bitrate_bps, they might not always be from - // smallest to biggest - std::vector index(layers.size()); - std::iota(index.begin(), index.end(), 0); - std::stable_sort(index.begin(), index.end(), [&layers](size_t a, size_t b) { - return layers[a].max_bitrate_bps < layers[b].max_bitrate_bps; - }); - - if (!layers[index[0]].active) { - // Adjust min bitrate of the first active layer to allow it to go as low as - // the lowest (now inactive) layer could. - // Otherwise, if e.g. a single HD stream is active, it would have 600kbps - // min bitrate, which would always be allocated to the stream. - // This would lead to congested network, dropped frames and overall bad - // experience. - - const int min_configured_bitrate = layers[index[0]].min_bitrate_bps; - for (size_t i = 0; i < layers.size(); ++i) { - if (layers[index[i]].active) { - layers[index[i]].min_bitrate_bps = min_configured_bitrate; - break; - } - } - } - - return layers; -} - } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_video_engine.h b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_video_engine.h index 90d824a55b..a0150a8589 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_video_engine.h +++ b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_video_engine.h @@ -20,6 +20,7 @@ #include "absl/types/optional.h" #include "api/call/transport.h" #include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "api/transport/field_trial_based_config.h" #include "api/video/video_bitrate_allocator_factory.h" #include "api/video/video_frame.h" @@ -34,7 +35,7 @@ #include "media/engine/unhandled_packets_buffer.h" #include "rtc_base/network_route.h" #include "rtc_base/synchronization/mutex.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" +#include "rtc_base/system/no_unique_address.h" #include "rtc_base/thread_annotations.h" namespace webrtc { @@ -66,7 +67,8 @@ class UnsignalledSsrcHandler { kDeliverPacket, }; virtual Action OnUnsignalledSsrc(WebRtcVideoChannel* channel, - uint32_t ssrc) = 0; + uint32_t ssrc, + absl::optional rtx_ssrc) = 0; virtual ~UnsignalledSsrcHandler() = default; }; @@ -74,7 +76,9 @@ class UnsignalledSsrcHandler { class DefaultUnsignalledSsrcHandler : public UnsignalledSsrcHandler { public: DefaultUnsignalledSsrcHandler(); - Action OnUnsignalledSsrc(WebRtcVideoChannel* channel, uint32_t ssrc) override; + Action OnUnsignalledSsrc(WebRtcVideoChannel* channel, + uint32_t ssrc, + absl::optional rtx_ssrc) override; rtc::VideoSinkInterface* GetDefaultSink() const; void SetDefaultSink(WebRtcVideoChannel* channel, @@ -94,7 +98,7 @@ class WebRtcVideoEngine : public VideoEngineInterface { WebRtcVideoEngine( std::unique_ptr video_encoder_factory, std::unique_ptr video_decoder_factory, - const webrtc::WebRtcKeyValueConfig& trials); + const webrtc::FieldTrialsView& trials); ~WebRtcVideoEngine() override; @@ -106,8 +110,14 @@ class WebRtcVideoEngine : public VideoEngineInterface { webrtc::VideoBitrateAllocatorFactory* video_bitrate_allocator_factory) override; - std::vector send_codecs() const override; - std::vector recv_codecs() const override; + std::vector send_codecs() const override { + return send_codecs(true); + } + std::vector recv_codecs() const override { + return recv_codecs(true); + } + std::vector send_codecs(bool include_rtx) const override; + std::vector recv_codecs(bool include_rtx) const override; std::vector GetRtpHeaderExtensions() const override; @@ -116,7 +126,7 @@ class WebRtcVideoEngine : public VideoEngineInterface { const std::unique_ptr encoder_factory_; const std::unique_ptr bitrate_allocator_factory_; - const webrtc::WebRtcKeyValueConfig& trials_; + const webrtc::FieldTrialsView& trials_; }; class WebRtcVideoChannel : public VideoMediaChannel, @@ -167,7 +177,7 @@ class WebRtcVideoChannel : public VideoMediaChannel, int64_t packet_time_us) override; void OnPacketSent(const rtc::SentPacket& sent_packet) override; void OnReadyToSend(bool ready) override; - void OnNetworkRouteChanged(const std::string& transport_name, + void OnNetworkRouteChanged(absl::string_view transport_name, const rtc::NetworkRoute& network_route) override; void SetInterface(NetworkInterface* iface) override; @@ -185,6 +195,12 @@ class WebRtcVideoChannel : public VideoMediaChannel, rtc::scoped_refptr frame_encryptor) override; + // note: The encoder_selector object must remain valid for the lifetime of the + // MediaChannel, unless replaced. + void SetEncoderSelector(uint32_t ssrc, + webrtc::VideoEncoderFactory::EncoderSelectorInterface* + encoder_selector) override; + void SetVideoCodecSwitchingEnabled(bool enabled) override; bool SetBaseMinimumPlayoutDelayMs(uint32_t ssrc, int delay_ms) override; @@ -224,18 +240,16 @@ class WebRtcVideoChannel : public VideoMediaChannel, // Implements webrtc::EncoderSwitchRequestCallback. void RequestEncoderFallback() override; - - // TODO(bugs.webrtc.org/11341) : Remove this version of RequestEncoderSwitch. - void RequestEncoderSwitch( - const EncoderSwitchRequestCallback::Config& conf) override; - void RequestEncoderSwitch(const webrtc::SdpVideoFormat& format) override; + void RequestEncoderSwitch(const webrtc::SdpVideoFormat& format, + bool allow_default_fallback) override; void SetRecordableEncodedFrameCallback( uint32_t ssrc, std::function callback) override; void ClearRecordableEncodedFrameCallback(uint32_t ssrc) override; - void GenerateKeyFrame(uint32_t ssrc) override; + void RequestRecvKeyFrame(uint32_t ssrc) override; + void GenerateSendKeyFrame(uint32_t ssrc) override; void SetEncoderToPacketizerFrameTransformer( uint32_t ssrc, @@ -249,8 +263,8 @@ class WebRtcVideoChannel : public VideoMediaChannel, private: class WebRtcVideoReceiveStream; - // Finds VideoReceiveStream corresponding to ssrc. Aware of unsignalled ssrc - // handling. + // Finds VideoReceiveStreamInterface corresponding to ssrc. Aware of + // unsignalled ssrc handling. WebRtcVideoReceiveStream* FindReceiveStream(uint32_t ssrc) RTC_EXCLUSIVE_LOCKS_REQUIRED(thread_checker_); @@ -292,7 +306,7 @@ class WebRtcVideoChannel : public VideoMediaChannel, absl::optional> rtp_header_extensions; // Keep track of the FlexFEC payload type separately from `codec_settings`. // This allows us to recreate the FlexfecReceiveStream separately from the - // VideoReceiveStream when the FlexFEC payload type is changed. + // VideoReceiveStreamInterface when the FlexFEC payload type is changed. absl::optional flexfec_payload_type; }; @@ -305,7 +319,7 @@ class WebRtcVideoChannel : public VideoMediaChannel, RTC_EXCLUSIVE_LOCKS_REQUIRED(thread_checker_); void ConfigureReceiverRtp( - webrtc::VideoReceiveStream::Config* config, + webrtc::VideoReceiveStreamInterface::Config* config, webrtc::FlexfecReceiveStream::Config* flexfec_config, const StreamParams& sp) const RTC_EXCLUSIVE_LOCKS_REQUIRED(thread_checker_); @@ -319,6 +333,19 @@ class WebRtcVideoChannel : public VideoMediaChannel, static std::string CodecSettingsVectorToString( const std::vector& codecs); + // Populates `rtx_associated_payload_types`, `raw_payload_types` and + // `decoders` based on codec settings provided by `recv_codecs`. + // `recv_codecs` must be non-empty and all other parameters must be empty. + static void ExtractCodecInformation( + rtc::ArrayView recv_codecs, + std::map& rtx_associated_payload_types, + std::set& raw_payload_types, + std::vector& decoders); + + // Called when the local ssrc changes. Sets `rtcp_receiver_report_ssrc_` and + // updates the receive streams. + void SetReceiverReportSsrc(uint32_t ssrc) RTC_RUN_ON(&thread_checker_); + // Wrapper for the sender part. class WebRtcVideoSendStream { public: @@ -344,6 +371,12 @@ class WebRtcVideoChannel : public VideoMediaChannel, bool SetVideoSend(const VideoOptions* options, rtc::VideoSourceInterface* source); + // note: The encoder_selector object must remain valid for the lifetime of + // the MediaChannel, unless replaced. + void SetEncoderSelector( + webrtc::VideoEncoderFactory::EncoderSelectorInterface* + encoder_selector); + void SetSend(bool send); const std::vector& GetSsrcs() const; @@ -358,6 +391,7 @@ class WebRtcVideoChannel : public VideoMediaChannel, void SetEncoderToPacketizerFrameTransformer( rtc::scoped_refptr frame_transformer); + void GenerateKeyFrame(); private: // Parameters needed to reconstruct the underlying stream. @@ -396,7 +430,7 @@ class WebRtcVideoChannel : public VideoMediaChannel, webrtc::DegradationPreference GetDegradationPreference() const RTC_EXCLUSIVE_LOCKS_REQUIRED(&thread_checker_); - webrtc::SequenceChecker thread_checker_; + RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker thread_checker_; webrtc::TaskQueueBase* const worker_thread_; const std::vector ssrcs_ RTC_GUARDED_BY(&thread_checker_); const std::vector ssrc_groups_ RTC_GUARDED_BY(&thread_checker_); @@ -427,7 +461,7 @@ class WebRtcVideoChannel : public VideoMediaChannel, }; // Wrapper for the receiver part, contains configs etc. that are needed to - // reconstruct the underlying VideoReceiveStream. + // reconstruct the underlying VideoReceiveStreamInterface. class WebRtcVideoReceiveStream : public rtc::VideoSinkInterface { public: @@ -435,12 +469,16 @@ class WebRtcVideoChannel : public VideoMediaChannel, WebRtcVideoChannel* channel, webrtc::Call* call, const StreamParams& sp, - webrtc::VideoReceiveStream::Config config, + webrtc::VideoReceiveStreamInterface::Config config, bool default_stream, const std::vector& recv_codecs, const webrtc::FlexfecReceiveStream::Config& flexfec_config); ~WebRtcVideoReceiveStream(); + webrtc::VideoReceiveStreamInterface& stream(); + // Return value may be nullptr. + webrtc::FlexfecReceiveStream* flexfec_stream(); + const std::vector& GetSsrcs() const; std::vector GetSources(); @@ -448,7 +486,6 @@ class WebRtcVideoChannel : public VideoMediaChannel, // Does not return codecs, they are filled by the owning WebRtcVideoChannel. webrtc::RtpParameters GetRtpParameters() const; - void SetLocalSsrc(uint32_t local_ssrc); // TODO(deadbeef): Move these feedback parameters into the recv parameters. void SetFeedbackParameters(bool lntf_enabled, bool nack_enabled, @@ -480,15 +517,22 @@ class WebRtcVideoChannel : public VideoMediaChannel, rtc::scoped_refptr frame_transformer); + void SetLocalSsrc(uint32_t local_ssrc); + private: - void RecreateWebRtcVideoStream(); + // Attempts to reconfigure an already existing `flexfec_stream_`, create + // one if the configuration is now complete or remove a flexfec stream + // when disabled. + void SetFlexFecPayload(int payload_type); + + void RecreateReceiveStream(); + void CreateReceiveStream(); + void StartReceiveStream(); // Applies a new receive codecs configration to `config_`. Returns true // if the internal stream needs to be reconstructed, or false if no changes // were applied. - bool ConfigureCodecs(const std::vector& recv_codecs); - - std::string GetCodecNameFromPayloadType(int payload_type); + bool ReconfigureCodecs(const std::vector& recv_codecs); WebRtcVideoChannel* const channel_; webrtc::Call* const call_; @@ -497,9 +541,9 @@ class WebRtcVideoChannel : public VideoMediaChannel, // Both `stream_` and `flexfec_stream_` are managed by `this`. They are // destroyed by calling call_->DestroyVideoReceiveStream and // call_->DestroyFlexfecReceiveStream, respectively. - webrtc::VideoReceiveStream* stream_; + webrtc::VideoReceiveStreamInterface* stream_; const bool default_stream_; - webrtc::VideoReceiveStream::Config config_; + webrtc::VideoReceiveStreamInterface::Config config_; webrtc::FlexfecReceiveStream::Config flexfec_config_; webrtc::FlexfecReceiveStream* flexfec_stream_; @@ -550,8 +594,8 @@ class WebRtcVideoChannel : public VideoMediaChannel, webrtc::TaskQueueBase* const worker_thread_; webrtc::ScopedTaskSafety task_safety_; - webrtc::SequenceChecker network_thread_checker_; - webrtc::SequenceChecker thread_checker_; + RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker network_thread_checker_; + RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker thread_checker_; uint32_t rtcp_receiver_report_ssrc_ RTC_GUARDED_BY(thread_checker_); bool sending_ RTC_GUARDED_BY(thread_checker_); @@ -638,57 +682,10 @@ class WebRtcVideoChannel : public VideoMediaChannel, std::unique_ptr unknown_ssrc_packet_buffer_ RTC_GUARDED_BY(thread_checker_); + // TODO(bugs.webrtc.org/11341): Remove this and relevant PC API. Presence + // of multiple negotiated codecs allows generic encoder fallback on failures. + // Presence of EncoderSelector allows switching to specific encoders. bool allow_codec_switching_ = false; - absl::optional - requested_encoder_switch_; -}; - -class EncoderStreamFactory - : public webrtc::VideoEncoderConfig::VideoStreamFactoryInterface { - public: - EncoderStreamFactory(std::string codec_name, - int max_qp, - bool is_screenshare, - bool conference_mode) - : EncoderStreamFactory(codec_name, - max_qp, - is_screenshare, - conference_mode, - nullptr) {} - - EncoderStreamFactory(std::string codec_name, - int max_qp, - bool is_screenshare, - bool conference_mode, - const webrtc::WebRtcKeyValueConfig* trials); - - private: - std::vector CreateEncoderStreams( - int width, - int height, - const webrtc::VideoEncoderConfig& encoder_config) override; - - std::vector CreateDefaultVideoStreams( - int width, - int height, - const webrtc::VideoEncoderConfig& encoder_config, - const absl::optional& experimental_min_bitrate) const; - - std::vector - CreateSimulcastOrConferenceModeScreenshareStreams( - int width, - int height, - const webrtc::VideoEncoderConfig& encoder_config, - const absl::optional& experimental_min_bitrate) const; - - const std::string codec_name_; - const int max_qp_; - const bool is_screenshare_; - // Allows a screenshare specific configuration, which enables temporal - // layering and various settings. - const bool conference_mode_; - const webrtc::FieldTrialBasedConfig fallback_trials_; - const webrtc::WebRtcKeyValueConfig& trials_; }; } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_voice_engine.cc b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_voice_engine.cc index 736b4263d9..4eb3f8422a 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_voice_engine.cc +++ b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_voice_engine.cc @@ -23,7 +23,8 @@ #include "api/audio/audio_frame_processor.h" #include "api/audio_codecs/audio_codec_pair_id.h" #include "api/call/audio_sink.h" -#include "api/transport/webrtc_key_value_config.h" +#include "api/field_trials_view.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "media/base/audio_source.h" #include "media/base/media_constants.h" #include "media/base/stream_params.h" @@ -48,8 +49,6 @@ #include "rtc_base/strings/audio_format_to_string.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/strings/string_format.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" -#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/third_party/base64/base64.h" #include "rtc_base/trace_event.h" #include "system_wrappers/include/metrics.h" @@ -126,9 +125,10 @@ bool IsCodec(const AudioCodec& codec, const char* ref_name) { bool FindCodec(const std::vector& codecs, const AudioCodec& codec, - AudioCodec* found_codec) { + AudioCodec* found_codec, + const webrtc::FieldTrialsView* field_trials) { for (const AudioCodec& c : codecs) { - if (c.Matches(codec)) { + if (c.Matches(codec, field_trials)) { if (found_codec != NULL) { *found_codec = c; } @@ -206,16 +206,10 @@ absl::optional ComputeSendBitrate(int max_send_bitrate_bps, } } -bool IsEnabled(const webrtc::WebRtcKeyValueConfig& config, - absl::string_view trial) { +bool IsEnabled(const webrtc::FieldTrialsView& config, absl::string_view trial) { return absl::StartsWith(config.Lookup(trial), "Enabled"); } -bool IsDisabled(const webrtc::WebRtcKeyValueConfig& config, - absl::string_view trial) { - return absl::StartsWith(config.Lookup(trial), "Disabled"); -} - struct AdaptivePtimeConfig { bool enabled = false; webrtc::DataRate min_payload_bitrate = webrtc::DataRate::KilobitsPerSec(16); @@ -234,7 +228,7 @@ struct AdaptivePtimeConfig { "use_slow_adaptation", &use_slow_adaptation); } - explicit AdaptivePtimeConfig(const webrtc::WebRtcKeyValueConfig& trials) { + explicit AdaptivePtimeConfig(const webrtc::FieldTrialsView& trials) { Parser()->Parse(trials.Lookup("WebRTC-Audio-AdaptivePtime")); #if WEBRTC_ENABLE_PROTOBUF webrtc::audio_network_adaptor::config::ControllerManager config; @@ -251,7 +245,7 @@ struct AdaptivePtimeConfig { // TODO(tommi): Constructing a receive stream could be made simpler. // Move some of this boiler plate code into the config structs themselves. -webrtc::AudioReceiveStream::Config BuildReceiveStreamConfig( +webrtc::AudioReceiveStreamInterface::Config BuildReceiveStreamConfig( uint32_t remote_ssrc, uint32_t local_ssrc, bool use_transport_cc, @@ -266,11 +260,10 @@ webrtc::AudioReceiveStream::Config BuildReceiveStreamConfig( size_t jitter_buffer_max_packets, bool jitter_buffer_fast_accelerate, int jitter_buffer_min_delay_ms, - bool jitter_buffer_enable_rtx_handling, rtc::scoped_refptr frame_decryptor, const webrtc::CryptoOptions& crypto_options, rtc::scoped_refptr frame_transformer) { - webrtc::AudioReceiveStream::Config config; + webrtc::AudioReceiveStreamInterface::Config config; config.rtp.remote_ssrc = remote_ssrc; config.rtp.local_ssrc = local_ssrc; config.rtp.transport_cc = use_transport_cc; @@ -287,7 +280,6 @@ webrtc::AudioReceiveStream::Config BuildReceiveStreamConfig( config.jitter_buffer_max_packets = jitter_buffer_max_packets; config.jitter_buffer_fast_accelerate = jitter_buffer_fast_accelerate; config.jitter_buffer_min_delay_ms = jitter_buffer_min_delay_ms; - config.jitter_buffer_enable_rtx_handling = jitter_buffer_enable_rtx_handling; config.frame_decryptor = std::move(frame_decryptor); config.crypto_options = crypto_options; config.frame_transformer = std::move(frame_transformer); @@ -304,7 +296,7 @@ WebRtcVoiceEngine::WebRtcVoiceEngine( rtc::scoped_refptr audio_mixer, rtc::scoped_refptr audio_processing, webrtc::AudioFrameProcessor* audio_frame_processor, - const webrtc::WebRtcKeyValueConfig& trials) + const webrtc::FieldTrialsView& trials) : task_queue_factory_(task_queue_factory), adm_(adm), encoder_factory_(encoder_factory), @@ -312,8 +304,6 @@ WebRtcVoiceEngine::WebRtcVoiceEngine( audio_mixer_(audio_mixer), apm_(audio_processing), audio_frame_processor_(audio_frame_processor), - audio_red_for_opus_enabled_( - !IsDisabled(trials, "WebRTC-Audio-Red-For-Opus")), minimized_remsampling_on_mobile_trial_enabled_( IsEnabled(trials, "WebRTC-Audio-MinimizeResamplingOnMobile")) { // This may be called from any thread, so detach thread checkers. @@ -344,6 +334,7 @@ void WebRtcVoiceEngine::Init() { RTC_LOG(LS_INFO) << "WebRtcVoiceEngine::Init"; // TaskQueue expects to be created/destroyed on the same thread. + RTC_DCHECK(!low_priority_worker_queue_); low_priority_worker_queue_.reset( new rtc::TaskQueue(task_queue_factory_->CreateTaskQueue( "rtc-low-prio", webrtc::TaskQueueFactory::Priority::LOW))); @@ -399,22 +390,15 @@ void WebRtcVoiceEngine::Init() { #if defined(WEBRTC_IOS) // On iOS, VPIO provides built-in NS. options.noise_suppression = false; - options.typing_detection = false; #else options.noise_suppression = true; - options.typing_detection = true; #endif - options.experimental_ns = false; options.highpass_filter = true; options.stereo_swapping = false; options.audio_jitter_buffer_max_packets = 200; options.audio_jitter_buffer_fast_accelerate = false; options.audio_jitter_buffer_min_delay_ms = 0; - options.audio_jitter_buffer_enable_rtx_handling = false; - options.experimental_agc = false; - options.residual_echo_detector = true; - bool error = ApplyOptions(options); - RTC_DCHECK(error); + ApplyOptions(options); } initialized_ = true; } @@ -435,7 +419,7 @@ VoiceMediaChannel* WebRtcVoiceEngine::CreateMediaChannel( call); } -bool WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) { +void WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_LOG(LS_INFO) << "WebRtcVoiceEngine::ApplyOptions: " << options_in.ToString(); @@ -462,20 +446,11 @@ bool WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) { use_mobile_software_aec = true; #endif -// Override noise suppression options for Android. -#if defined(WEBRTC_ANDROID) - options.typing_detection = false; - options.experimental_ns = false; -#endif - // Set and adjust gain control options. #if defined(WEBRTC_IOS) // On iOS, VPIO provides built-in AGC. options.auto_gain_control = false; - options.experimental_agc = false; RTC_LOG(LS_INFO) << "Always disable AGC on iOS. Use built-in instead."; -#elif defined(WEBRTC_ANDROID) - options.experimental_agc = false; #endif #if defined(WEBRTC_IOS) || defined(WEBRTC_ANDROID) @@ -546,55 +521,29 @@ bool WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) { } if (options.stereo_swapping) { - RTC_LOG(LS_INFO) << "Stereo swapping enabled? " << *options.stereo_swapping; audio_state()->SetStereoChannelSwapping(*options.stereo_swapping); } if (options.audio_jitter_buffer_max_packets) { - RTC_LOG(LS_INFO) << "NetEq capacity is " - << *options.audio_jitter_buffer_max_packets; audio_jitter_buffer_max_packets_ = std::max(20, *options.audio_jitter_buffer_max_packets); } if (options.audio_jitter_buffer_fast_accelerate) { - RTC_LOG(LS_INFO) << "NetEq fast mode? " - << *options.audio_jitter_buffer_fast_accelerate; audio_jitter_buffer_fast_accelerate_ = *options.audio_jitter_buffer_fast_accelerate; } if (options.audio_jitter_buffer_min_delay_ms) { - RTC_LOG(LS_INFO) << "NetEq minimum delay is " - << *options.audio_jitter_buffer_min_delay_ms; audio_jitter_buffer_min_delay_ms_ = *options.audio_jitter_buffer_min_delay_ms; } - if (options.audio_jitter_buffer_enable_rtx_handling) { - RTC_LOG(LS_INFO) << "NetEq handle reordered packets? " - << *options.audio_jitter_buffer_enable_rtx_handling; - audio_jitter_buffer_enable_rtx_handling_ = - *options.audio_jitter_buffer_enable_rtx_handling; - } webrtc::AudioProcessing* ap = apm(); if (!ap) { - RTC_LOG(LS_INFO) - << "No audio processing module present. No software-provided effects " - "(AEC, NS, AGC, ...) are activated"; - return true; - } - - if (options.experimental_ns) { - experimental_ns_ = options.experimental_ns; + return; } webrtc::AudioProcessing::Config apm_config = ap->GetConfig(); -#if !(defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS)) - if (experimental_ns_.has_value()) { - apm_config.transient_suppression.enabled = experimental_ns_.value(); - } -#endif - if (options.echo_cancellation) { apm_config.echo_canceller.enabled = *options.echo_cancellation; apm_config.echo_canceller.mobile_mode = use_mobile_software_aec; @@ -611,41 +560,19 @@ bool WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) { apm_config.gain_controller1.kAdaptiveAnalog; #endif } - if (options.tx_agc_target_dbov) { - apm_config.gain_controller1.target_level_dbfs = *options.tx_agc_target_dbov; - } - if (options.tx_agc_digital_compression_gain) { - apm_config.gain_controller1.compression_gain_db = - *options.tx_agc_digital_compression_gain; - } - if (options.tx_agc_limiter) { - apm_config.gain_controller1.enable_limiter = *options.tx_agc_limiter; - } if (options.highpass_filter) { apm_config.high_pass_filter.enabled = *options.highpass_filter; } - if (options.residual_echo_detector) { - apm_config.residual_echo_detector.enabled = *options.residual_echo_detector; - } - if (options.noise_suppression) { const bool enabled = *options.noise_suppression; apm_config.noise_suppression.enabled = enabled; apm_config.noise_suppression.level = webrtc::AudioProcessing::Config::NoiseSuppression::Level::kHigh; - RTC_LOG(LS_INFO) << "NS set to " << enabled; - } - - if (options.typing_detection) { - RTC_LOG(LS_INFO) << "Typing detection is enabled? " - << *options.typing_detection; - apm_config.voice_detection.enabled = *options.typing_detection; } ap->ApplyConfig(apm_config); - return true; } const std::vector& WebRtcVoiceEngine::send_codecs() const { @@ -770,7 +697,7 @@ std::vector WebRtcVoiceEngine::CollectCodecs( out.push_back(codec); - if (codec.name == kOpusCodecName && audio_red_for_opus_enabled_) { + if (codec.name == kOpusCodecName) { std::string redFmtp = rtc::ToString(codec.id) + "/" + rtc::ToString(codec.id); map_format({kRedCodecName, 48000, 2, {{"", redFmtp}}}, &out); @@ -997,6 +924,8 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream size_t number_of_channels, size_t number_of_frames, absl::optional absolute_capture_timestamp_ms) override { + TRACE_EVENT_BEGIN2("webrtc", "WebRtcAudioSendStream::OnData", "sample_rate", + sample_rate, "number_of_frames", number_of_frames); RTC_DCHECK_EQ(16, bits_per_sample); RTC_CHECK_RUNS_SERIALIZED(&audio_capture_race_checker_); RTC_DCHECK(stream_); @@ -1012,6 +941,8 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream *absolute_capture_timestamp_ms); } stream_->SendAudioData(std::move(audio_frame)); + TRACE_EVENT_END1("webrtc", "WebRtcAudioSendStream::OnData", + "number_of_channels", number_of_channels); } // Callback from the `source_` when it is going away. In case Start() has @@ -1215,7 +1146,7 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream class WebRtcVoiceMediaChannel::WebRtcAudioReceiveStream { public: - WebRtcAudioReceiveStream(webrtc::AudioReceiveStream::Config config, + WebRtcAudioReceiveStream(webrtc::AudioReceiveStreamInterface::Config config, webrtc::Call* call) : call_(call), stream_(call_->CreateAudioReceiveStream(config)) { RTC_DCHECK(call); @@ -1231,7 +1162,7 @@ class WebRtcVoiceMediaChannel::WebRtcAudioReceiveStream { call_->DestroyAudioReceiveStream(stream_); } - webrtc::AudioReceiveStream& stream() { + webrtc::AudioReceiveStreamInterface& stream() { RTC_DCHECK(stream_); return *stream_; } @@ -1244,8 +1175,8 @@ class WebRtcVoiceMediaChannel::WebRtcAudioReceiveStream { void SetUseTransportCc(bool use_transport_cc, bool use_nack) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); - stream_->SetUseTransportCcAndNackHistory(use_transport_cc, - use_nack ? kNackRtpHistoryMs : 0); + stream_->SetTransportCc(use_transport_cc); + stream_->SetNackHistory(use_nack ? kNackRtpHistoryMs : 0); } void SetNonSenderRttMeasurement(bool enabled) { @@ -1264,7 +1195,7 @@ class WebRtcVoiceMediaChannel::WebRtcAudioReceiveStream { stream_->SetDecoderMap(decoder_map); } - webrtc::AudioReceiveStream::Stats GetStats( + webrtc::AudioReceiveStreamInterface::Stats GetStats( bool get_and_clear_legacy_stats) const { RTC_DCHECK_RUN_ON(&worker_thread_checker_); return stream_->GetStats(get_and_clear_legacy_stats); @@ -1298,8 +1229,8 @@ class WebRtcVoiceMediaChannel::WebRtcAudioReceiveStream { return true; RTC_LOG(LS_ERROR) << "Failed to SetBaseMinimumPlayoutDelayMs" - " on AudioReceiveStream on SSRC=" - << stream_->rtp_config().remote_ssrc + " on AudioReceiveStreamInterface on SSRC=" + << stream_->remote_ssrc() << " with delay_ms=" << delay_ms; return false; } @@ -1317,9 +1248,8 @@ class WebRtcVoiceMediaChannel::WebRtcAudioReceiveStream { webrtc::RtpParameters GetRtpParameters() const { webrtc::RtpParameters rtp_parameters; rtp_parameters.encodings.emplace_back(); - const auto& config = stream_->rtp_config(); - rtp_parameters.encodings[0].ssrc = config.remote_ssrc; - rtp_parameters.header_extensions = config.extensions; + rtp_parameters.encodings[0].ssrc = stream_->remote_ssrc(); + rtp_parameters.header_extensions = stream_->GetRtpExtensions(); return rtp_parameters; } @@ -1332,7 +1262,7 @@ class WebRtcVoiceMediaChannel::WebRtcAudioReceiveStream { private: webrtc::SequenceChecker worker_thread_checker_; webrtc::Call* call_ = nullptr; - webrtc::AudioReceiveStream* const stream_ = nullptr; + webrtc::AudioReceiveStreamInterface* const stream_ = nullptr; std::unique_ptr raw_audio_sink_ RTC_GUARDED_BY(worker_thread_checker_); }; @@ -1343,14 +1273,12 @@ WebRtcVoiceMediaChannel::WebRtcVoiceMediaChannel( const AudioOptions& options, const webrtc::CryptoOptions& crypto_options, webrtc::Call* call) - : VoiceMediaChannel(config, call->network_thread()), + : VoiceMediaChannel(call->network_thread(), config.enable_dscp), worker_thread_(call->worker_thread()), engine_(engine), call_(call), audio_config_(config.audio), - crypto_options_(crypto_options), - audio_red_for_opus_enabled_( - !IsDisabled(call->trials(), "WebRTC-Audio-Red-For-Opus")) { + crypto_options_(crypto_options) { network_thread_checker_.Detach(); RTC_LOG(LS_VERBOSE) << "WebRtcVoiceMediaChannel::WebRtcVoiceMediaChannel"; RTC_DCHECK(call); @@ -1546,9 +1474,9 @@ webrtc::RtpParameters WebRtcVoiceMediaChannel::GetDefaultRtpReceiveParameters() RTC_DCHECK_RUN_ON(worker_thread_); webrtc::RtpParameters rtp_params; if (!default_sink_) { - RTC_LOG(LS_WARNING) << "Attempting to get RTP parameters for the default, " - "unsignaled audio receive stream, but not yet " - "configured to receive such a stream."; + // Getting parameters on a default, unsignaled audio receive stream but + // because we've not configured to receive such a stream, `encodings` is + // empty. return rtp_params; } rtp_params.encodings.emplace_back(); @@ -1567,11 +1495,7 @@ bool WebRtcVoiceMediaChannel::SetOptions(const AudioOptions& options) { // on top. This means there is no way to "clear" options such that // they go back to the engine default. options_.SetAll(options); - if (!engine()->ApplyOptions(options_)) { - RTC_LOG(LS_WARNING) - << "Failed to apply engine options during channel SetOptions."; - return false; - } + engine()->ApplyOptions(options_); absl::optional audio_network_adaptor_config = GetAudioNetworkAdaptorConfig(options_); @@ -1603,7 +1527,7 @@ bool WebRtcVoiceMediaChannel::SetRecvCodecs( // Log a warning if a codec's payload type is changing. This used to be // treated as an error. It's abnormal, but not really illegal. AudioCodec old_codec; - if (FindCodec(recv_codecs_, codec, &old_codec) && + if (FindCodec(recv_codecs_, codec, &old_codec, &call_->trials()) && old_codec.id != codec.id) { RTC_LOG(LS_WARNING) << codec.name << " mapped to a second payload type (" << codec.id << ", was already mapped to " @@ -1611,7 +1535,7 @@ bool WebRtcVoiceMediaChannel::SetRecvCodecs( } auto format = AudioCodecToSdpAudioFormat(codec); if (!IsCodec(codec, kCnCodecName) && !IsCodec(codec, kDtmfCodecName) && - (!audio_red_for_opus_enabled_ || !IsCodec(codec, kRedCodecName)) && + !IsCodec(codec, kRedCodecName) && !engine()->decoder_factory_->IsSupportedDecoder(format)) { RTC_LOG(LS_ERROR) << "Unsupported codec: " << rtc::ToString(format); return false; @@ -1678,8 +1602,8 @@ bool CheckRedParameters( RTC_LOG(LS_WARNING) << "audio/RED missing fmtp parameters."; return false; } - std::vector redundant_payloads; - rtc::split(red_parameters->second, '/', &redundant_payloads); + std::vector redundant_payloads = + rtc::split(red_parameters->second, '/'); // 32 is chosen as a maximum upper bound for consistency with the // red payload splitter. if (redundant_payloads.size() < 2 || redundant_payloads.size() > 32) { @@ -1797,19 +1721,17 @@ bool WebRtcVoiceMediaChannel::SetSendCodecs( } } - if (audio_red_for_opus_enabled_) { - // Loop through the codecs to find the RED codec that matches opus - // with respect to clockrate and number of channels. - size_t red_codec_position = 0; - for (const AudioCodec& red_codec : codecs) { - if (red_codec_position < send_codec_position && - IsCodec(red_codec, kRedCodecName) && - CheckRedParameters(red_codec, *send_codec_spec)) { - send_codec_spec->red_payload_type = red_codec.id; - break; - } - red_codec_position++; + // Loop through the codecs to find the RED codec that matches opus + // with respect to clockrate and number of channels. + size_t red_codec_position = 0; + for (const AudioCodec& red_codec : codecs) { + if (red_codec_position < send_codec_position && + IsCodec(red_codec, kRedCodecName) && + CheckRedParameters(red_codec, *send_codec_spec)) { + send_codec_spec->red_payload_type = red_codec.id; + break; } + red_codec_position++; } if (send_codec_spec_ != send_codec_spec) { @@ -1833,7 +1755,6 @@ bool WebRtcVoiceMediaChannel::SetSendCodecs( "streams."; recv_transport_cc_enabled_ = send_codec_spec_->transport_cc_enabled; recv_nack_enabled_ = send_codec_spec_->nack_enabled; - enable_non_sender_rtt_ = send_codec_spec_->enable_non_sender_rtt; for (auto& kv : recv_streams_) { kv.second->SetUseTransportCc(recv_transport_cc_enabled_, recv_nack_enabled_); @@ -1873,13 +1794,15 @@ void WebRtcVoiceMediaChannel::SetSend(bool send) { return; } - // Apply channel specific options, and initialize the ADM for recording (this - // may take time on some platforms, e.g. Android). + // Apply channel specific options. if (send) { engine()->ApplyOptions(options_); - // InitRecording() may return an error if the ADM is already recording. - if (!engine()->adm()->RecordingIsInitialized() && + // Initialize the ADM for recording (this may take time on some platforms, + // e.g. Android). + if (options_.init_recording_on_send.value_or(true) && + // InitRecording() may return an error if the ADM is already recording. + !engine()->adm()->RecordingIsInitialized() && !engine()->adm()->Recording()) { if (engine()->adm()->InitRecording() != 0) { RTC_LOG(LS_WARNING) << "Failed to initialize recording"; @@ -2017,9 +1940,8 @@ bool WebRtcVoiceMediaChannel::AddRecvStream(const StreamParams& sp) { recv_rtp_extensions_, this, engine()->decoder_factory_, decoder_map_, codec_pair_id_, engine()->audio_jitter_buffer_max_packets_, engine()->audio_jitter_buffer_fast_accelerate_, - engine()->audio_jitter_buffer_min_delay_ms_, - engine()->audio_jitter_buffer_enable_rtx_handling_, - unsignaled_frame_decryptor_, crypto_options_, nullptr); + engine()->audio_jitter_buffer_min_delay_ms_, unsignaled_frame_decryptor_, + crypto_options_, unsignaled_frame_transformer_); recv_streams_.insert(std::make_pair( ssrc, new WebRtcAudioReceiveStream(std::move(config), call_))); @@ -2221,8 +2143,8 @@ void WebRtcVoiceMediaChannel::OnPacketReceived(rtc::CopyOnWriteBuffer packet, // consistency it would be good to move the interaction with call_->Receiver() // to a common implementation and provide a callback on the worker thread // for the exception case (DELIVERY_UNKNOWN_SSRC) and how retry is attempted. - worker_thread_->PostTask(ToQueuedTask(task_safety_, [this, packet, - packet_time_us] { + worker_thread_->PostTask(SafeTask(task_safety_.flag(), [this, packet, + packet_time_us] { RTC_DCHECK_RUN_ON(worker_thread_); webrtc::PacketReceiver::DeliveryStatus delivery_result = @@ -2295,14 +2217,15 @@ void WebRtcVoiceMediaChannel::OnPacketSent(const rtc::SentPacket& sent_packet) { } void WebRtcVoiceMediaChannel::OnNetworkRouteChanged( - const std::string& transport_name, + absl::string_view transport_name, const rtc::NetworkRoute& network_route) { RTC_DCHECK_RUN_ON(&network_thread_checker_); call_->OnAudioTransportOverheadChanged(network_route.packet_overhead); - worker_thread_->PostTask(ToQueuedTask( - task_safety_, [this, name = transport_name, route = network_route] { + worker_thread_->PostTask(SafeTask( + task_safety_.flag(), + [this, name = std::string(transport_name), route = network_route] { RTC_DCHECK_RUN_ON(worker_thread_); call_->GetTransportControllerSend()->OnNetworkRouteChanged(name, route); })); @@ -2371,6 +2294,7 @@ bool WebRtcVoiceMediaChannel::GetStats(VoiceMediaInfo* info, sinfo.header_and_padding_bytes_sent = stats.header_and_padding_bytes_sent; sinfo.retransmitted_bytes_sent = stats.retransmitted_bytes_sent; sinfo.packets_sent = stats.packets_sent; + sinfo.total_packet_send_delay = stats.total_packet_send_delay; sinfo.retransmitted_packets_sent = stats.retransmitted_packets_sent; sinfo.packets_lost = stats.packets_lost; sinfo.fraction_lost = stats.fraction_lost; @@ -2383,10 +2307,15 @@ bool WebRtcVoiceMediaChannel::GetStats(VoiceMediaInfo* info, sinfo.audio_level = stats.audio_level; sinfo.total_input_energy = stats.total_input_energy; sinfo.total_input_duration = stats.total_input_duration; - sinfo.typing_noise_detected = (send_ ? stats.typing_noise_detected : false); sinfo.ana_statistics = stats.ana_statistics; sinfo.apm_statistics = stats.apm_statistics; sinfo.report_block_datas = std::move(stats.report_block_datas); + + auto encodings = stream.second->rtp_parameters().encodings; + if (!encodings.empty()) { + sinfo.active = encodings[0].active; + } + info->senders.push_back(sinfo); } @@ -2410,7 +2339,7 @@ bool WebRtcVoiceMediaChannel::GetStats(VoiceMediaInfo* info, continue; } } - webrtc::AudioReceiveStream::Stats stats = + webrtc::AudioReceiveStreamInterface::Stats stats = stream.second->GetStats(get_and_clear_legacy_stats); VoiceReceiverInfo rinfo; rinfo.add_ssrc(stats.remote_ssrc); @@ -2438,6 +2367,8 @@ bool WebRtcVoiceMediaChannel::GetStats(VoiceMediaInfo* info, rinfo.jitter_buffer_emitted_count = stats.jitter_buffer_emitted_count; rinfo.jitter_buffer_target_delay_seconds = stats.jitter_buffer_target_delay_seconds; + rinfo.jitter_buffer_minimum_delay_seconds = + stats.jitter_buffer_minimum_delay_seconds; rinfo.inserted_samples_for_deceleration = stats.inserted_samples_for_deceleration; rinfo.removed_samples_for_acceleration = @@ -2557,6 +2488,13 @@ void WebRtcVoiceMediaChannel::SetDepacketizerToDecoderFrameTransformer( uint32_t ssrc, rtc::scoped_refptr frame_transformer) { RTC_DCHECK_RUN_ON(worker_thread_); + if (ssrc == 0) { + // If the receiver is unsignaled, save the frame transformer and set it when + // the stream is associated with an ssrc. + unsignaled_frame_transformer_ = std::move(frame_transformer); + return; + } + auto matching_stream = recv_streams_.find(ssrc); if (matching_stream == recv_streams_.end()) { RTC_LOG(LS_INFO) << "Attempting to set frame transformer for SSRC:" << ssrc diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_voice_engine.h b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_voice_engine.h index a8eb61d318..0a501bea0a 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_voice_engine.h +++ b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_voice_engine.h @@ -17,11 +17,12 @@ #include #include "api/audio_codecs/audio_encoder_factory.h" +#include "api/field_trials_view.h" #include "api/scoped_refptr.h" #include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_factory.h" #include "api/transport/rtp/rtp_source.h" -#include "api/transport/webrtc_key_value_config.h" #include "call/audio_state.h" #include "call/call.h" #include "media/base/media_engine.h" @@ -30,7 +31,6 @@ #include "rtc_base/buffer.h" #include "rtc_base/network_route.h" #include "rtc_base/task_queue.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" namespace webrtc { class AudioFrameProcessor; @@ -55,7 +55,7 @@ class WebRtcVoiceEngine final : public VoiceEngineInterface { rtc::scoped_refptr audio_mixer, rtc::scoped_refptr audio_processing, webrtc::AudioFrameProcessor* audio_frame_processor, - const webrtc::WebRtcKeyValueConfig& trials); + const webrtc::FieldTrialsView& trials); WebRtcVoiceEngine() = delete; WebRtcVoiceEngine(const WebRtcVoiceEngine&) = delete; @@ -91,7 +91,7 @@ class WebRtcVoiceEngine final : public VoiceEngineInterface { // Every option that is "set" will be applied. Every option not "set" will be // ignored. This allows us to selectively turn on and off different options // easily at any time. - bool ApplyOptions(const AudioOptions& options); + void ApplyOptions(const AudioOptions& options); int CreateVoEChannel(); @@ -124,18 +124,11 @@ class WebRtcVoiceEngine final : public VoiceEngineInterface { bool is_dumping_aec_ = false; bool initialized_ = false; - // Cache experimental_ns and apply in case they are missing in the audio - // options. - absl::optional experimental_ns_; // Jitter buffer settings for new streams. size_t audio_jitter_buffer_max_packets_ = 200; bool audio_jitter_buffer_fast_accelerate_ = false; int audio_jitter_buffer_min_delay_ms_ = 0; - bool audio_jitter_buffer_enable_rtx_handling_ = false; - // If this field is enabled, we will negotiate and use RFC 2198 - // redundancy for opus audio. - const bool audio_red_for_opus_enabled_; const bool minimized_remsampling_on_mobile_trial_enabled_; }; @@ -209,7 +202,7 @@ class WebRtcVoiceMediaChannel final : public VoiceMediaChannel, void OnPacketReceived(rtc::CopyOnWriteBuffer packet, int64_t packet_time_us) override; void OnPacketSent(const rtc::SentPacket& sent_packet) override; - void OnNetworkRouteChanged(const std::string& transport_name, + void OnNetworkRouteChanged(absl::string_view transport_name, const rtc::NetworkRoute& network_route) override; void OnReadyToSend(bool ready) override; bool GetStats(VoiceMediaInfo* info, bool get_and_clear_legacy_stats) override; @@ -328,8 +321,8 @@ class WebRtcVoiceMediaChannel final : public VoiceMediaChannel, // Unsignaled streams have an option to have a frame decryptor set on them. rtc::scoped_refptr unsignaled_frame_decryptor_; - - const bool audio_red_for_opus_enabled_; + rtc::scoped_refptr + unsignaled_frame_transformer_; }; } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/media/sctp/dcsctp_transport.cc b/TMessagesProj/jni/voip/webrtc/media/sctp/dcsctp_transport.cc index 4a5166863b..062360d251 100644 --- a/TMessagesProj/jni/voip/webrtc/media/sctp/dcsctp_transport.cc +++ b/TMessagesProj/jni/voip/webrtc/media/sctp/dcsctp_transport.cc @@ -10,6 +10,7 @@ #include "media/sctp/dcsctp_transport.h" +#include #include #include #include @@ -115,10 +116,21 @@ bool IsEmptyPPID(dcsctp::PPID ppid) { DcSctpTransport::DcSctpTransport(rtc::Thread* network_thread, rtc::PacketTransportInternal* transport, Clock* clock) + : DcSctpTransport(network_thread, + transport, + clock, + std::make_unique()) {} + +DcSctpTransport::DcSctpTransport( + rtc::Thread* network_thread, + rtc::PacketTransportInternal* transport, + Clock* clock, + std::unique_ptr socket_factory) : network_thread_(network_thread), transport_(transport), clock_(clock), random_(clock_->TimeInMicroseconds()), + socket_factory_(std::move(socket_factory)), task_queue_timeout_factory_( *network_thread, [this]() { return TimeMillis(); }, @@ -126,7 +138,7 @@ DcSctpTransport::DcSctpTransport(rtc::Thread* network_thread, socket_->HandleTimeout(timeout_id); }) { RTC_DCHECK_RUN_ON(network_thread_); - static int instance_count = 0; + static std::atomic instance_count = 0; rtc::StringBuilder sb; sb << debug_name_ << instance_count++; debug_name_ = sb.Release(); @@ -139,6 +151,19 @@ DcSctpTransport::~DcSctpTransport() { } } +void DcSctpTransport::SetOnConnectedCallback(std::function callback) { + RTC_DCHECK_RUN_ON(network_thread_); + on_connected_callback_ = std::move(callback); +} + +void DcSctpTransport::SetDataChannelSink(DataChannelSink* sink) { + RTC_DCHECK_RUN_ON(network_thread_); + data_channel_sink_ = sink; + if (data_channel_sink_ && ready_to_send_data_) { + data_channel_sink_->OnReadyToSend(); + } +} + void DcSctpTransport::SetDtlsTransport( rtc::PacketTransportInternal* transport) { RTC_DCHECK_RUN_ON(network_thread_); @@ -153,10 +178,9 @@ bool DcSctpTransport::Start(int local_sctp_port, int max_message_size) { RTC_DCHECK_RUN_ON(network_thread_); RTC_DCHECK(max_message_size > 0); - - RTC_LOG(LS_INFO) << debug_name_ << "->Start(local=" << local_sctp_port - << ", remote=" << remote_sctp_port - << ", max_message_size=" << max_message_size << ")"; + RTC_DLOG(LS_INFO) << debug_name_ << "->Start(local=" << local_sctp_port + << ", remote=" << remote_sctp_port + << ", max_message_size=" << max_message_size << ")"; if (!socket_) { dcsctp::DcSctpOptions options; @@ -174,9 +198,8 @@ bool DcSctpTransport::Start(int local_sctp_port, std::make_unique(debug_name_); } - dcsctp::DcSctpSocketFactory factory; - socket_ = - factory.Create(debug_name_, *this, std::move(packet_observer), options); + socket_ = socket_factory_->Create(debug_name_, *this, + std::move(packet_observer), options); } else { if (local_sctp_port != socket_->options().local_port || remote_sctp_port != socket_->options().remote_port) { @@ -195,23 +218,41 @@ bool DcSctpTransport::Start(int local_sctp_port, } bool DcSctpTransport::OpenStream(int sid) { - RTC_LOG(LS_INFO) << debug_name_ << "->OpenStream(" << sid << ")."; - if (!socket_) { - RTC_LOG(LS_ERROR) << debug_name_ << "->OpenStream(sid=" << sid - << "): Transport is not started."; - return false; - } + RTC_DCHECK_RUN_ON(network_thread_); + RTC_DLOG(LS_INFO) << debug_name_ << "->OpenStream(" << sid << ")."; + + StreamState stream_state; + stream_states_.insert_or_assign(dcsctp::StreamID(static_cast(sid)), + stream_state); return true; } bool DcSctpTransport::ResetStream(int sid) { - RTC_LOG(LS_INFO) << debug_name_ << "->ResetStream(" << sid << ")."; + RTC_DCHECK_RUN_ON(network_thread_); + RTC_DLOG(LS_INFO) << debug_name_ << "->ResetStream(" << sid << ")."; if (!socket_) { - RTC_LOG(LS_ERROR) << debug_name_ << "->OpenStream(sid=" << sid + RTC_LOG(LS_ERROR) << debug_name_ << "->ResetStream(sid=" << sid << "): Transport is not started."; return false; } + dcsctp::StreamID streams[1] = {dcsctp::StreamID(static_cast(sid))}; + + auto it = stream_states_.find(streams[0]); + if (it == stream_states_.end()) { + RTC_LOG(LS_ERROR) << debug_name_ << "->ResetStream(sid=" << sid + << "): Stream is not open."; + return false; + } + + StreamState& stream_state = it->second; + if (stream_state.closure_initiated || stream_state.incoming_reset_done || + stream_state.outgoing_reset_done) { + // The closing procedure was already initiated by the remote, don't do + // anything. + return false; + } + stream_state.closure_initiated = true; socket_->ResetStreams(streams); return true; } @@ -221,10 +262,9 @@ bool DcSctpTransport::SendData(int sid, const rtc::CopyOnWriteBuffer& payload, cricket::SendDataResult* result) { RTC_DCHECK_RUN_ON(network_thread_); - - RTC_LOG(LS_VERBOSE) << debug_name_ << "->SendData(sid=" << sid - << ", type=" << static_cast(params.type) - << ", length=" << payload.size() << ")."; + RTC_DLOG(LS_VERBOSE) << debug_name_ << "->SendData(sid=" << sid + << ", type=" << static_cast(params.type) + << ", length=" << payload.size() << ")."; if (!socket_) { RTC_LOG(LS_ERROR) << debug_name_ @@ -233,6 +273,30 @@ bool DcSctpTransport::SendData(int sid, return false; } + // It is possible for a message to be sent from the signaling thread at the + // same time a data-channel is closing, but before the signaling thread is + // aware of it. So we need to keep track of currently active data channels and + // skip sending messages for the ones that are not open or closing. + // The sending errors are not impacting the data channel API contract as + // it is allowed to discard queued messages when the channel is closing. + auto stream_state = + stream_states_.find(dcsctp::StreamID(static_cast(sid))); + if (stream_state == stream_states_.end()) { + RTC_LOG(LS_VERBOSE) << "Skipping message on non-open stream with sid: " + << sid; + *result = cricket::SDR_ERROR; + return false; + } + + if (stream_state->second.closure_initiated || + stream_state->second.incoming_reset_done || + stream_state->second.outgoing_reset_done) { + RTC_LOG(LS_VERBOSE) << "Skipping message on closing stream with sid: " + << sid; + *result = cricket::SDR_ERROR; + return false; + } + auto max_message_size = socket_->options().max_message_size; if (max_message_size > 0 && payload.size() > max_message_size) { RTC_LOG(LS_WARNING) << debug_name_ @@ -287,6 +351,7 @@ bool DcSctpTransport::SendData(int sid, << "->SendData(...): send() failed with error " << dcsctp::ToString(error) << "."; *result = cricket::SDR_ERROR; + break; } return *result == cricket::SDR_SUCCESS; @@ -359,8 +424,9 @@ SendPacketStatus DcSctpTransport::SendPacketWithStatus( return SendPacketStatus::kSuccess; } -std::unique_ptr DcSctpTransport::CreateTimeout() { - return task_queue_timeout_factory_.CreateTimeout(); +std::unique_ptr DcSctpTransport::CreateTimeout( + webrtc::TaskQueueBase::DelayPrecision precision) { + return task_queue_timeout_factory_.CreateTimeout(precision); } dcsctp::TimeMs DcSctpTransport::TimeMillis() { @@ -372,18 +438,21 @@ uint32_t DcSctpTransport::GetRandomInt(uint32_t low, uint32_t high) { } void DcSctpTransport::OnTotalBufferedAmountLow() { + RTC_DCHECK_RUN_ON(network_thread_); if (!ready_to_send_data_) { ready_to_send_data_ = true; - SignalReadyToSendData(); + if (data_channel_sink_) { + data_channel_sink_->OnReadyToSend(); + } } } void DcSctpTransport::OnMessageReceived(dcsctp::DcSctpMessage message) { RTC_DCHECK_RUN_ON(network_thread_); - RTC_LOG(LS_VERBOSE) << debug_name_ << "->OnMessageReceived(sid=" - << message.stream_id().value() - << ", ppid=" << message.ppid().value() - << ", length=" << message.payload().size() << ")."; + RTC_DLOG(LS_VERBOSE) << debug_name_ << "->OnMessageReceived(sid=" + << message.stream_id().value() + << ", ppid=" << message.ppid().value() + << ", length=" << message.payload().size() << ")."; cricket::ReceiveDataParams receive_data_params; receive_data_params.sid = message.stream_id().value(); auto type = ToDataMessageType(message.ppid()); @@ -401,7 +470,10 @@ void DcSctpTransport::OnMessageReceived(dcsctp::DcSctpMessage message) { receive_buffer_.AppendData(message.payload().data(), message.payload().size()); - SignalDataReceived(receive_data_params, receive_buffer_); + if (data_channel_sink_) { + data_channel_sink_->OnDataReceived( + receive_data_params.sid, receive_data_params.type, receive_buffer_); + } } void DcSctpTransport::OnError(dcsctp::ErrorKind error, @@ -422,6 +494,7 @@ void DcSctpTransport::OnError(dcsctp::ErrorKind error, void DcSctpTransport::OnAborted(dcsctp::ErrorKind error, absl::string_view message) { + RTC_DCHECK_RUN_ON(network_thread_); RTC_LOG(LS_ERROR) << debug_name_ << "->OnAborted(error=" << dcsctp::ToString(error) << ", message=" << message << ")."; @@ -433,23 +506,30 @@ void DcSctpTransport::OnAborted(dcsctp::ErrorKind error, if (code.has_value()) { rtc_error.set_sctp_cause_code(static_cast(*code)); } - SignalClosedAbruptly(rtc_error); + if (data_channel_sink_) { + data_channel_sink_->OnTransportClosed(rtc_error); + } } void DcSctpTransport::OnConnected() { - RTC_LOG(LS_INFO) << debug_name_ << "->OnConnected()."; + RTC_DCHECK_RUN_ON(network_thread_); + RTC_DLOG(LS_INFO) << debug_name_ << "->OnConnected()."; ready_to_send_data_ = true; - SignalReadyToSendData(); - SignalAssociationChangeCommunicationUp(); + if (data_channel_sink_) { + data_channel_sink_->OnReadyToSend(); + } + if (on_connected_callback_) { + on_connected_callback_(); + } } void DcSctpTransport::OnClosed() { - RTC_LOG(LS_INFO) << debug_name_ << "->OnClosed()."; + RTC_DLOG(LS_INFO) << debug_name_ << "->OnClosed()."; ready_to_send_data_ = false; } void DcSctpTransport::OnConnectionRestarted() { - RTC_LOG(LS_INFO) << debug_name_ << "->OnConnectionRestarted()."; + RTC_DLOG(LS_INFO) << debug_name_ << "->OnConnectionRestarted()."; } void DcSctpTransport::OnStreamsResetFailed( @@ -466,22 +546,66 @@ void DcSctpTransport::OnStreamsResetFailed( void DcSctpTransport::OnStreamsResetPerformed( rtc::ArrayView outgoing_streams) { + RTC_DCHECK_RUN_ON(network_thread_); for (auto& stream_id : outgoing_streams) { RTC_LOG(LS_INFO) << debug_name_ << "->OnStreamsResetPerformed(...): Outgoing stream reset" << ", sid=" << stream_id.value(); - SignalClosingProcedureComplete(stream_id.value()); + + auto it = stream_states_.find(stream_id); + if (it == stream_states_.end()) { + // Ignoring an outgoing stream reset for a closed stream + return; + } + + StreamState& stream_state = it->second; + stream_state.outgoing_reset_done = true; + + if (stream_state.incoming_reset_done) { + // When the close was not initiated locally, we can signal the end of the + // data channel close procedure when the remote ACKs the reset. + if (data_channel_sink_) { + data_channel_sink_->OnChannelClosed(stream_id.value()); + } + stream_states_.erase(stream_id); + } } } void DcSctpTransport::OnIncomingStreamsReset( rtc::ArrayView incoming_streams) { + RTC_DCHECK_RUN_ON(network_thread_); for (auto& stream_id : incoming_streams) { RTC_LOG(LS_INFO) << debug_name_ << "->OnIncomingStreamsReset(...): Incoming stream reset" << ", sid=" << stream_id.value(); - SignalClosingProcedureStartedRemotely(stream_id.value()); - SignalClosingProcedureComplete(stream_id.value()); + + auto it = stream_states_.find(stream_id); + if (it == stream_states_.end()) + return; + + StreamState& stream_state = it->second; + stream_state.incoming_reset_done = true; + + if (!stream_state.closure_initiated) { + // When receiving an incoming stream reset event for a non local close + // procedure, the transport needs to reset the stream in the other + // direction too. + dcsctp::StreamID streams[1] = {stream_id}; + socket_->ResetStreams(streams); + if (data_channel_sink_) { + data_channel_sink_->OnChannelClosing(stream_id.value()); + } + } + + if (stream_state.outgoing_reset_done) { + // The close procedure that was initiated locally is complete when we + // receive and incoming reset event. + if (data_channel_sink_) { + data_channel_sink_->OnChannelClosed(stream_id.value()); + } + stream_states_.erase(stream_id); + } } } @@ -511,11 +635,9 @@ void DcSctpTransport::OnTransportWritableState( rtc::PacketTransportInternal* transport) { RTC_DCHECK_RUN_ON(network_thread_); RTC_DCHECK_EQ(transport_, transport); - - RTC_LOG(LS_VERBOSE) << debug_name_ - << "->OnTransportWritableState(), writable=" - << transport->writable(); - + RTC_DLOG(LS_VERBOSE) << debug_name_ + << "->OnTransportWritableState(), writable=" + << transport->writable(); MaybeConnectSocket(); } @@ -525,6 +647,7 @@ void DcSctpTransport::OnTransportReadPacket( size_t length, const int64_t& /* packet_time_us */, int flags) { + RTC_DCHECK_RUN_ON(network_thread_); if (flags) { // We are only interested in SCTP packets. return; @@ -540,8 +663,11 @@ void DcSctpTransport::OnTransportReadPacket( void DcSctpTransport::OnTransportClosed( rtc::PacketTransportInternal* transport) { - RTC_LOG(LS_VERBOSE) << debug_name_ << "->OnTransportClosed()."; - SignalClosedAbruptly({}); + RTC_DCHECK_RUN_ON(network_thread_); + RTC_DLOG(LS_VERBOSE) << debug_name_ << "->OnTransportClosed()."; + if (data_channel_sink_) { + data_channel_sink_->OnTransportClosed({}); + } } void DcSctpTransport::MaybeConnectSocket() { diff --git a/TMessagesProj/jni/voip/webrtc/media/sctp/dcsctp_transport.h b/TMessagesProj/jni/voip/webrtc/media/sctp/dcsctp_transport.h index c8c5199396..f86ac5a23a 100644 --- a/TMessagesProj/jni/voip/webrtc/media/sctp/dcsctp_transport.h +++ b/TMessagesProj/jni/voip/webrtc/media/sctp/dcsctp_transport.h @@ -17,16 +17,20 @@ #include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/array_view.h" +#include "api/task_queue/task_queue_base.h" #include "media/sctp/sctp_transport_internal.h" #include "net/dcsctp/public/dcsctp_options.h" #include "net/dcsctp/public/dcsctp_socket.h" +#include "net/dcsctp/public/dcsctp_socket_factory.h" #include "net/dcsctp/public/types.h" #include "net/dcsctp/timer/task_queue_timeout.h" #include "p2p/base/packet_transport_internal.h" +#include "rtc_base/containers/flat_map.h" #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/random.h" #include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" #include "system_wrappers/include/clock.h" namespace webrtc { @@ -38,9 +42,15 @@ class DcSctpTransport : public cricket::SctpTransportInternal, DcSctpTransport(rtc::Thread* network_thread, rtc::PacketTransportInternal* transport, Clock* clock); + DcSctpTransport(rtc::Thread* network_thread, + rtc::PacketTransportInternal* transport, + Clock* clock, + std::unique_ptr socket_factory); ~DcSctpTransport() override; // cricket::SctpTransportInternal + void SetOnConnectedCallback(std::function callback) override; + void SetDataChannelSink(DataChannelSink* sink) override; void SetDtlsTransport(rtc::PacketTransportInternal* transport) override; bool Start(int local_sctp_port, int remote_sctp_port, @@ -61,7 +71,8 @@ class DcSctpTransport : public cricket::SctpTransportInternal, // dcsctp::DcSctpSocketCallbacks dcsctp::SendPacketStatus SendPacketWithStatus( rtc::ArrayView data) override; - std::unique_ptr CreateTimeout() override; + std::unique_ptr CreateTimeout( + webrtc::TaskQueueBase::DelayPrecision precision) override; dcsctp::TimeMs TimeMillis() override; uint32_t GetRandomInt(uint32_t low, uint32_t high) override; void OnTotalBufferedAmountLow() override; @@ -97,12 +108,33 @@ class DcSctpTransport : public cricket::SctpTransportInternal, Clock* clock_; Random random_; + std::unique_ptr socket_factory_; dcsctp::TaskQueueTimeoutFactory task_queue_timeout_factory_; std::unique_ptr socket_; std::string debug_name_ = "DcSctpTransport"; rtc::CopyOnWriteBuffer receive_buffer_; + // Used to keep track of the state of data channels. + // Reset needs to happen both ways before signaling the transport + // is closed. + struct StreamState { + // True when the local connection has initiated the reset. + // If a connection receives a reset for a stream that isn't + // already being reset locally, it needs to fire the signal + // SignalClosingProcedureStartedRemotely. + bool closure_initiated = false; + // True when the local connection received OnIncomingStreamsReset + bool incoming_reset_done = false; + // True when the local connection received OnStreamsResetPerformed + bool outgoing_reset_done = false; + }; + + // Map of all currently open or closing data channels + flat_map stream_states_ + RTC_GUARDED_BY(network_thread_); bool ready_to_send_data_ = false; + std::function on_connected_callback_ RTC_GUARDED_BY(network_thread_); + DataChannelSink* data_channel_sink_ RTC_GUARDED_BY(network_thread_) = nullptr; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport_factory.cc b/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport_factory.cc index 5097d423d9..457bc5f889 100644 --- a/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport_factory.cc @@ -13,24 +13,15 @@ #include "rtc_base/system/unused.h" #ifdef WEBRTC_HAVE_DCSCTP -#include "media/sctp/dcsctp_transport.h" // nogncheck -#include "system_wrappers/include/clock.h" // nogncheck -#include "system_wrappers/include/field_trial.h" // nogncheck -#endif - -#ifdef WEBRTC_HAVE_USRSCTP -#include "media/sctp/usrsctp_transport.h" // nogncheck +#include "media/sctp/dcsctp_transport.h" // nogncheck +#include "system_wrappers/include/clock.h" // nogncheck #endif namespace cricket { SctpTransportFactory::SctpTransportFactory(rtc::Thread* network_thread) - : network_thread_(network_thread), use_dcsctp_("Enabled", false) { + : network_thread_(network_thread) { RTC_UNUSED(network_thread_); -#ifdef WEBRTC_HAVE_DCSCTP - webrtc::ParseFieldTrial({&use_dcsctp_}, webrtc::field_trial::FindFullName( - "WebRTC-DataChannel-Dcsctp")); -#endif } std::unique_ptr @@ -38,16 +29,8 @@ SctpTransportFactory::CreateSctpTransport( rtc::PacketTransportInternal* transport) { std::unique_ptr result; #ifdef WEBRTC_HAVE_DCSCTP - if (use_dcsctp_.Get()) { - result = std::unique_ptr(new webrtc::DcSctpTransport( - network_thread_, transport, webrtc::Clock::GetRealTimeClock())); - } -#endif -#ifdef WEBRTC_HAVE_USRSCTP - if (!result) { - result = std::unique_ptr( - new UsrsctpTransport(network_thread_, transport)); - } + result = std::unique_ptr(new webrtc::DcSctpTransport( + network_thread_, transport, webrtc::Clock::GetRealTimeClock())); #endif return result; } diff --git a/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport_factory.h b/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport_factory.h index ed7c2163d7..4fff214129 100644 --- a/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport_factory.h +++ b/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport_factory.h @@ -15,7 +15,6 @@ #include "api/transport/sctp_transport_factory_interface.h" #include "media/sctp/sctp_transport_internal.h" -#include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/thread.h" namespace cricket { @@ -29,7 +28,6 @@ class SctpTransportFactory : public webrtc::SctpTransportFactoryInterface { private: rtc::Thread* network_thread_; - webrtc::FieldTrialFlag use_dcsctp_; }; } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport_internal.h b/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport_internal.h index 93a59b9dc7..38da554911 100644 --- a/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport_internal.h +++ b/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport_internal.h @@ -19,13 +19,13 @@ #include #include "api/transport/data_channel_transport_interface.h" -#include "rtc_base/copy_on_write_buffer.h" -#include "rtc_base/thread.h" // For SendDataParams/ReceiveDataParams. // TODO(deadbeef): Use something else for SCTP. It's confusing that we use an // SSRC field for SID. #include "media/base/media_channel.h" #include "p2p/base/packet_transport_internal.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/thread.h" namespace cricket { @@ -77,6 +77,9 @@ class SctpTransportInternal { public: virtual ~SctpTransportInternal() {} + virtual void SetOnConnectedCallback(std::function callback) = 0; + virtual void SetDataChannelSink(webrtc::DataChannelSink* sink) = 0; + // Changes what underlying DTLS transport is uses. Used when switching which // bundled transport the SctpTransport uses. virtual void SetDtlsTransport(rtc::PacketTransportInternal* transport) = 0; @@ -140,24 +143,6 @@ class SctpTransportInternal { // Returns the current negotiated max # of inbound streams. virtual absl::optional max_inbound_streams() const = 0; - sigslot::signal0<> SignalReadyToSendData; - sigslot::signal0<> SignalAssociationChangeCommunicationUp; - // ReceiveDataParams includes SID, seq num, timestamp, etc. CopyOnWriteBuffer - // contains message payload. - sigslot::signal2 - SignalDataReceived; - // Parameter is SID; fired when we receive an incoming stream reset on an - // open stream, indicating that the other side started the closing procedure. - // After resetting the outgoing stream, SignalClosingProcedureComplete will - // fire too. - sigslot::signal1 SignalClosingProcedureStartedRemotely; - // Parameter is SID; fired when closing procedure is complete (both incoming - // and outgoing streams reset). - sigslot::signal1 SignalClosingProcedureComplete; - // Fired when the underlying DTLS transport has closed due to an error - // or an incoming DTLS disconnect or SCTP transport errors. - sigslot::signal1 SignalClosedAbruptly; - // Helper for debugging. virtual void set_debug_name_for_testing(const char* debug_name) = 0; }; diff --git a/TMessagesProj/jni/voip/webrtc/media/sctp/usrsctp_transport.cc b/TMessagesProj/jni/voip/webrtc/media/sctp/usrsctp_transport.cc deleted file mode 100644 index 4babf110a2..0000000000 --- a/TMessagesProj/jni/voip/webrtc/media/sctp/usrsctp_transport.cc +++ /dev/null @@ -1,1575 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include -namespace { -// Some ERRNO values get re-#defined to WSA* equivalents in some talk/ -// headers. We save the original ones in an enum. -enum PreservedErrno { - SCTP_EINPROGRESS = EINPROGRESS, - SCTP_EWOULDBLOCK = EWOULDBLOCK -}; - -// Successful return value from usrsctp callbacks. Is not actually used by -// usrsctp, but all example programs for usrsctp use 1 as their return value. -constexpr int kSctpSuccessReturn = 1; -constexpr int kSctpErrorReturn = 0; - -} // namespace - -#include -#include -#include - -#include -#include -#include - -#include "absl/algorithm/container.h" -#include "absl/base/attributes.h" -#include "absl/types/optional.h" -#include "api/sequence_checker.h" -#include "media/base/codec.h" -#include "media/base/media_channel.h" -#include "media/base/media_constants.h" -#include "media/base/stream_params.h" -#include "media/sctp/usrsctp_transport.h" -#include "p2p/base/dtls_transport_internal.h" // For PF_NORMAL -#include "rtc_base/arraysize.h" -#include "rtc_base/copy_on_write_buffer.h" -#include "rtc_base/helpers.h" -#include "rtc_base/logging.h" -#include "rtc_base/numerics/safe_conversions.h" -#include "rtc_base/string_utils.h" -#include "rtc_base/synchronization/mutex.h" -#include "rtc_base/task_utils/to_queued_task.h" -#include "rtc_base/thread_annotations.h" -#include "rtc_base/trace_event.h" - -namespace cricket { -namespace { - -// The biggest SCTP packet. Starting from a 'safe' wire MTU value of 1280, -// take off 85 bytes for DTLS/TURN/TCP/IP and ciphertext overhead. -// -// Additionally, it's possible that TURN adds an additional 4 bytes of overhead -// after a channel has been established, so we subtract an additional 4 bytes. -// -// 1280 IPV6 MTU -// -40 IPV6 header -// -8 UDP -// -24 GCM Cipher -// -13 DTLS record header -// -4 TURN ChannelData -// = 1191 bytes. -static constexpr size_t kSctpMtu = 1191; - -// Set the initial value of the static SCTP Data Engines reference count. -ABSL_CONST_INIT int g_usrsctp_usage_count = 0; -ABSL_CONST_INIT bool g_usrsctp_initialized_ = false; -ABSL_CONST_INIT webrtc::GlobalMutex g_usrsctp_lock_(absl::kConstInit); -ABSL_CONST_INIT char kZero[] = {'\0'}; - -// DataMessageType is used for the SCTP "Payload Protocol Identifier", as -// defined in http://tools.ietf.org/html/rfc4960#section-14.4 -// -// For the list of IANA approved values see: -// https://tools.ietf.org/html/rfc8831 Sec. 8 -// http://www.iana.org/assignments/sctp-parameters/sctp-parameters.xml -// The value is not used by SCTP itself. It indicates the protocol running -// on top of SCTP. -enum { - PPID_NONE = 0, // No protocol is specified. - PPID_CONTROL = 50, - PPID_TEXT_LAST = 51, - PPID_BINARY_PARTIAL = 52, // Deprecated - PPID_BINARY_LAST = 53, - PPID_TEXT_PARTIAL = 54, // Deprecated - PPID_TEXT_EMPTY = 56, - PPID_BINARY_EMPTY = 57, -}; - -// Should only be modified by UsrSctpWrapper. -ABSL_CONST_INIT cricket::UsrsctpTransportMap* g_transport_map_ = nullptr; - -// Helper that will call C's free automatically. -// TODO(b/181900299): Figure out why unique_ptr with a custom deleter is causing -// issues in a certain build environment. -class AutoFreedPointer { - public: - explicit AutoFreedPointer(void* ptr) : ptr_(ptr) {} - AutoFreedPointer(AutoFreedPointer&& o) : ptr_(o.ptr_) { o.ptr_ = nullptr; } - ~AutoFreedPointer() { free(ptr_); } - - void* get() const { return ptr_; } - - private: - void* ptr_; -}; - -// Helper for logging SCTP messages. -#if defined(__GNUC__) -__attribute__((__format__(__printf__, 1, 2))) -#endif -void DebugSctpPrintf(const char* format, ...) { -#if RTC_DCHECK_IS_ON - char s[255]; - va_list ap; - va_start(ap, format); - vsnprintf(s, sizeof(s), format, ap); - RTC_LOG(LS_INFO) << "SCTP: " << s; - va_end(ap); -#endif -} - -// Get the PPID to use for the terminating fragment of this type. -uint32_t GetPpid(webrtc::DataMessageType type, size_t size) { - switch (type) { - case webrtc::DataMessageType::kControl: - return PPID_CONTROL; - case webrtc::DataMessageType::kBinary: - return size > 0 ? PPID_BINARY_LAST : PPID_BINARY_EMPTY; - case webrtc::DataMessageType::kText: - return size > 0 ? PPID_TEXT_LAST : PPID_TEXT_EMPTY; - } -} - -bool GetDataMediaType(uint32_t ppid, webrtc::DataMessageType* dest) { - RTC_DCHECK(dest != NULL); - switch (ppid) { - case PPID_BINARY_PARTIAL: - case PPID_BINARY_LAST: - case PPID_BINARY_EMPTY: - *dest = webrtc::DataMessageType::kBinary; - return true; - - case PPID_TEXT_PARTIAL: - case PPID_TEXT_LAST: - case PPID_TEXT_EMPTY: - *dest = webrtc::DataMessageType::kText; - return true; - - case PPID_CONTROL: - *dest = webrtc::DataMessageType::kControl; - return true; - } - return false; -} - -bool IsEmptyPPID(uint32_t ppid) { - return ppid == PPID_BINARY_EMPTY || ppid == PPID_TEXT_EMPTY; -} - -// Log the packet in text2pcap format, if log level is at LS_VERBOSE. -// -// In order to turn these logs into a pcap file you can use, first filter the -// "SCTP_PACKET" log lines: -// -// cat chrome_debug.log | grep SCTP_PACKET > filtered.log -// -// Then run through text2pcap: -// -// text2pcap -n -l 248 -D -t '%H:%M:%S.' filtered.log filtered.pcapng -// -// Command flag information: -// -n: Outputs to a pcapng file, can specify inbound/outbound packets. -// -l: Specifies the link layer header type. 248 means SCTP. See: -// http://www.tcpdump.org/linktypes.html -// -D: Text before packet specifies if it is inbound or outbound. -// -t: Time format. -// -// Why do all this? Because SCTP goes over DTLS, which is encrypted. So just -// getting a normal packet capture won't help you, unless you have the DTLS -// keying material. -void VerboseLogPacket(const void* data, size_t length, int direction) { - if (RTC_LOG_CHECK_LEVEL(LS_VERBOSE) && length > 0) { - char* dump_buf; - // Some downstream project uses an older version of usrsctp that expects - // a non-const "void*" as first parameter when dumping the packet, so we - // need to cast the const away here to avoid a compiler error. - if ((dump_buf = usrsctp_dumppacket(const_cast(data), length, - direction)) != NULL) { - RTC_LOG(LS_VERBOSE) << dump_buf; - usrsctp_freedumpbuffer(dump_buf); - } - } -} - -// Creates the sctp_sendv_spa struct used for setting flags in the -// sctp_sendv() call. -sctp_sendv_spa CreateSctpSendParams(int sid, - const webrtc::SendDataParams& params, - size_t size) { - struct sctp_sendv_spa spa = {0}; - spa.sendv_flags |= SCTP_SEND_SNDINFO_VALID; - spa.sendv_sndinfo.snd_sid = sid; - spa.sendv_sndinfo.snd_ppid = rtc::HostToNetwork32(GetPpid(params.type, size)); - // Explicitly marking the EOR flag turns the usrsctp_sendv call below into a - // non atomic operation. This means that the sctp lib might only accept the - // message partially. This is done in order to improve throughput, so that we - // don't have to wait for an empty buffer to send the max message length, for - // example. - spa.sendv_sndinfo.snd_flags |= SCTP_EOR; - - if (!params.ordered) { - spa.sendv_sndinfo.snd_flags |= SCTP_UNORDERED; - } - if (params.max_rtx_count.has_value()) { - RTC_DCHECK(*params.max_rtx_count >= 0 && - *params.max_rtx_count <= std::numeric_limits::max()); - spa.sendv_flags |= SCTP_SEND_PRINFO_VALID; - spa.sendv_prinfo.pr_policy = SCTP_PR_SCTP_RTX; - spa.sendv_prinfo.pr_value = *params.max_rtx_count; - } - if (params.max_rtx_ms.has_value()) { - RTC_DCHECK(*params.max_rtx_ms >= 0 && - *params.max_rtx_ms <= std::numeric_limits::max()); - spa.sendv_flags |= SCTP_SEND_PRINFO_VALID; - spa.sendv_prinfo.pr_policy = SCTP_PR_SCTP_TTL; - spa.sendv_prinfo.pr_value = *params.max_rtx_ms; - } - return spa; -} - -std::string SctpErrorCauseCodeToString(SctpErrorCauseCode code) { - switch (code) { - case SctpErrorCauseCode::kInvalidStreamIdentifier: - return "Invalid Stream Identifier"; - case SctpErrorCauseCode::kMissingMandatoryParameter: - return "Missing Mandatory Parameter"; - case SctpErrorCauseCode::kStaleCookieError: - return "Stale Cookie Error"; - case SctpErrorCauseCode::kOutOfResource: - return "Out of Resource"; - case SctpErrorCauseCode::kUnresolvableAddress: - return "Unresolvable Address"; - case SctpErrorCauseCode::kUnrecognizedChunkType: - return "Unrecognized Chunk Type"; - case SctpErrorCauseCode::kInvalidMandatoryParameter: - return "Invalid Mandatory Parameter"; - case SctpErrorCauseCode::kUnrecognizedParameters: - return "Unrecognized Parameters"; - case SctpErrorCauseCode::kNoUserData: - return "No User Data"; - case SctpErrorCauseCode::kCookieReceivedWhileShuttingDown: - return "Cookie Received Whilte Shutting Down"; - case SctpErrorCauseCode::kRestartWithNewAddresses: - return "Restart With New Addresses"; - case SctpErrorCauseCode::kUserInitiatedAbort: - return "User Initiated Abort"; - case SctpErrorCauseCode::kProtocolViolation: - return "Protocol Violation"; - } - return "Unknown error"; -} -} // namespace - -// Maps SCTP transport ID to UsrsctpTransport object, necessary in send -// threshold callback and outgoing packet callback. It also provides a facility -// to safely post a task to an UsrsctpTransport's network thread from another -// thread. -class UsrsctpTransportMap { - public: - UsrsctpTransportMap() = default; - - // Assigns a new unused ID to the following transport. - uintptr_t Register(cricket::UsrsctpTransport* transport) { - webrtc::MutexLock lock(&lock_); - // usrsctp_connect fails with a value of 0... - if (next_id_ == 0) { - ++next_id_; - } - // In case we've wrapped around and need to find an empty spot from a - // removed transport. Assumes we'll never be full. - while (map_.find(next_id_) != map_.end()) { - ++next_id_; - if (next_id_ == 0) { - ++next_id_; - } - } - map_[next_id_] = transport; - return next_id_++; - } - - // Returns true if found. - bool Deregister(uintptr_t id) { - webrtc::MutexLock lock(&lock_); - return map_.erase(id) > 0; - } - - // Posts `action` to the network thread of the transport identified by `id` - // and returns true if found, all while holding a lock to protect against the - // transport being simultaneously deleted/deregistered, or returns false if - // not found. - template - bool PostToTransportThread(uintptr_t id, F action) const { - webrtc::MutexLock lock(&lock_); - UsrsctpTransport* transport = RetrieveWhileHoldingLock(id); - if (!transport) { - return false; - } - transport->network_thread_->PostTask(ToQueuedTask( - transport->task_safety_, - [transport, action{std::move(action)}]() { action(transport); })); - return true; - } - - private: - UsrsctpTransport* RetrieveWhileHoldingLock(uintptr_t id) const - RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_) { - auto it = map_.find(id); - if (it == map_.end()) { - return nullptr; - } - return it->second; - } - - mutable webrtc::Mutex lock_; - - uintptr_t next_id_ RTC_GUARDED_BY(lock_) = 0; - std::unordered_map map_ RTC_GUARDED_BY(lock_); -}; - -// Handles global init/deinit, and mapping from usrsctp callbacks to -// UsrsctpTransport calls. -class UsrsctpTransport::UsrSctpWrapper { - public: - static void InitializeUsrSctp() { - RTC_LOG(LS_INFO) << __FUNCTION__; - // UninitializeUsrSctp tries to call usrsctp_finish in a loop for three - // seconds; if that failed and we were left in a still-initialized state, we - // don't want to call usrsctp_init again as that will result in undefined - // behavior. - if (g_usrsctp_initialized_) { - RTC_LOG(LS_WARNING) << "Not reinitializing usrsctp since last attempt at " - "usrsctp_finish failed."; - } else { - // First argument is udp_encapsulation_port, which is not releveant for - // our AF_CONN use of sctp. - usrsctp_init(0, &UsrSctpWrapper::OnSctpOutboundPacket, &DebugSctpPrintf); - g_usrsctp_initialized_ = true; - } - - // To turn on/off detailed SCTP debugging. You will also need to have the - // SCTP_DEBUG cpp defines flag, which can be turned on in media/BUILD.gn. - // usrsctp_sysctl_set_sctp_debug_on(SCTP_DEBUG_ALL); - - // TODO(ldixon): Consider turning this on/off. - usrsctp_sysctl_set_sctp_ecn_enable(0); - - // WebRTC doesn't use these features, so disable them to reduce the - // potential attack surface. - usrsctp_sysctl_set_sctp_asconf_enable(0); - usrsctp_sysctl_set_sctp_auth_enable(0); - - // This is harmless, but we should find out when the library default - // changes. - int send_size = usrsctp_sysctl_get_sctp_sendspace(); - if (send_size != kSctpSendBufferSize) { - RTC_LOG(LS_ERROR) << "Got different send size than expected: " - << send_size; - } - - // TODO(ldixon): Consider turning this on/off. - // This is not needed right now (we don't do dynamic address changes): - // If SCTP Auto-ASCONF is enabled, the peer is informed automatically - // when a new address is added or removed. This feature is enabled by - // default. - // usrsctp_sysctl_set_sctp_auto_asconf(0); - - // TODO(ldixon): Consider turning this on/off. - // Add a blackhole sysctl. Setting it to 1 results in no ABORTs - // being sent in response to INITs, setting it to 2 results - // in no ABORTs being sent for received OOTB packets. - // This is similar to the TCP sysctl. - // - // See: http://lakerest.net/pipermail/sctp-coders/2012-January/009438.html - // See: http://svnweb.freebsd.org/base?view=revision&revision=229805 - // usrsctp_sysctl_set_sctp_blackhole(2); - - // Set the number of default outgoing streams. This is the number we'll - // send in the SCTP INIT message. - usrsctp_sysctl_set_sctp_nr_outgoing_streams_default(kMaxSctpStreams); - - g_transport_map_ = new UsrsctpTransportMap(); - } - - static void UninitializeUsrSctp() { - RTC_LOG(LS_INFO) << __FUNCTION__; - // usrsctp_finish() may fail if it's called too soon after the transports - // are - // closed. Wait and try again until it succeeds for up to 3 seconds. - for (size_t i = 0; i < 300; ++i) { - if (usrsctp_finish() == 0) { - g_usrsctp_initialized_ = false; - delete g_transport_map_; - g_transport_map_ = nullptr; - return; - } - - rtc::Thread::SleepMs(10); - } - delete g_transport_map_; - g_transport_map_ = nullptr; - RTC_LOG(LS_ERROR) << "Failed to shutdown usrsctp."; - } - - static void IncrementUsrSctpUsageCount() { - webrtc::GlobalMutexLock lock(&g_usrsctp_lock_); - if (!g_usrsctp_usage_count) { - InitializeUsrSctp(); - } - ++g_usrsctp_usage_count; - } - - static void DecrementUsrSctpUsageCount() { - webrtc::GlobalMutexLock lock(&g_usrsctp_lock_); - --g_usrsctp_usage_count; - if (!g_usrsctp_usage_count) { - UninitializeUsrSctp(); - } - } - - // This is the callback usrsctp uses when there's data to send on the network - // that has been wrapped appropriatly for the SCTP protocol. - static int OnSctpOutboundPacket(void* addr, - void* data, - size_t length, - uint8_t tos, - uint8_t set_df) { - if (!g_transport_map_) { - RTC_LOG(LS_ERROR) - << "OnSctpOutboundPacket called after usrsctp uninitialized?"; - return EINVAL; - } - RTC_LOG(LS_VERBOSE) << "global OnSctpOutboundPacket():" - "addr: " - << addr << "; length: " << length - << "; tos: " << rtc::ToHex(tos) - << "; set_df: " << rtc::ToHex(set_df); - - VerboseLogPacket(data, length, SCTP_DUMP_OUTBOUND); - - // Note: We have to copy the data; the caller will delete it. - rtc::CopyOnWriteBuffer buf(reinterpret_cast(data), length); - - // PostsToTransportThread protects against the transport being - // simultaneously deregistered/deleted, since this callback may come from - // the SCTP timer thread and thus race with the network thread. - bool found = g_transport_map_->PostToTransportThread( - reinterpret_cast(addr), [buf](UsrsctpTransport* transport) { - transport->OnPacketFromSctpToNetwork(buf); - }); - if (!found) { - RTC_LOG(LS_ERROR) - << "OnSctpOutboundPacket: Failed to get transport for socket ID " - << addr << "; possibly was already destroyed."; - return EINVAL; - } - - return 0; - } - - // This is the callback called from usrsctp when data has been received, after - // a packet has been interpreted and parsed by usrsctp and found to contain - // payload data. It is called by a usrsctp thread. It is assumed this function - // will free the memory used by 'data'. - static int OnSctpInboundPacket(struct socket* sock, - union sctp_sockstore addr, - void* data, - size_t length, - struct sctp_rcvinfo rcv, - int flags, - void* ulp_info) { - AutoFreedPointer owned_data(data); - - if (!g_transport_map_) { - RTC_LOG(LS_ERROR) - << "OnSctpInboundPacket called after usrsctp uninitialized?"; - return kSctpErrorReturn; - } - - uintptr_t id = reinterpret_cast(ulp_info); - - // PostsToTransportThread protects against the transport being - // simultaneously deregistered/deleted, since this callback may come from - // the SCTP timer thread and thus race with the network thread. - bool found = g_transport_map_->PostToTransportThread( - id, [owned_data{std::move(owned_data)}, length, rcv, - flags](UsrsctpTransport* transport) { - transport->OnDataOrNotificationFromSctp(owned_data.get(), length, rcv, - flags); - }); - if (!found) { - RTC_LOG(LS_ERROR) - << "OnSctpInboundPacket: Failed to get transport for socket ID " << id - << "; possibly was already destroyed."; - return kSctpErrorReturn; - } - return kSctpSuccessReturn; - } - - static int SendThresholdCallback(struct socket* sock, - uint32_t sb_free, - void* ulp_info) { - // Fired on our I/O thread. UsrsctpTransport::OnPacketReceived() gets - // a packet containing acknowledgments, which goes into usrsctp_conninput, - // and then back here. - if (!g_transport_map_) { - RTC_LOG(LS_ERROR) - << "SendThresholdCallback called after usrsctp uninitialized?"; - return 0; - } - - uintptr_t id = reinterpret_cast(ulp_info); - - bool found = g_transport_map_->PostToTransportThread( - id, [](UsrsctpTransport* transport) { - transport->OnSendThresholdCallback(); - }); - if (!found) { - RTC_LOG(LS_ERROR) - << "SendThresholdCallback: Failed to get transport for socket ID " - << id << "; possibly was already destroyed."; - } - return 0; - } -}; - -UsrsctpTransport::UsrsctpTransport(rtc::Thread* network_thread, - rtc::PacketTransportInternal* transport) - : network_thread_(network_thread), - transport_(transport), - was_ever_writable_(transport ? transport->writable() : false) { - RTC_DCHECK(network_thread_); - RTC_DCHECK_RUN_ON(network_thread_); - ConnectTransportSignals(); -} - -UsrsctpTransport::~UsrsctpTransport() { - RTC_DCHECK_RUN_ON(network_thread_); - // Close abruptly; no reset procedure. - CloseSctpSocket(); - // It's not strictly necessary to reset these fields to nullptr, - // but having these fields set to nullptr is a clear indication that - // object was destructed. There was a bug in usrsctp when it - // invoked OnSctpOutboundPacket callback for destructed UsrsctpTransport, - // which caused obscure SIGSEGV on access to these fields, - // having this fields set to nullptr will make it easier to understand - // that UsrsctpTransport was destructed and "use-after-free" bug happen. - // SIGSEGV error triggered on dereference these pointers will also - // be easier to understand due to 0x0 address. All of this assumes - // that ASAN is not enabled to detect "use-after-free", which is - // currently default configuration. - network_thread_ = nullptr; - transport_ = nullptr; -} - -void UsrsctpTransport::SetDtlsTransport( - rtc::PacketTransportInternal* transport) { - RTC_DCHECK_RUN_ON(network_thread_); - DisconnectTransportSignals(); - transport_ = transport; - ConnectTransportSignals(); - if (!was_ever_writable_ && transport && transport->writable()) { - was_ever_writable_ = true; - // New transport is writable, now we can start the SCTP connection if Start - // was called already. - if (started_) { - RTC_DCHECK(!sock_); - Connect(); - } - } -} - -bool UsrsctpTransport::Start(int local_sctp_port, - int remote_sctp_port, - int max_message_size) { - RTC_DCHECK_RUN_ON(network_thread_); - if (local_sctp_port == -1) { - local_sctp_port = kSctpDefaultPort; - } - if (remote_sctp_port == -1) { - remote_sctp_port = kSctpDefaultPort; - } - if (max_message_size > kSctpSendBufferSize) { - RTC_LOG(LS_ERROR) << "Max message size of " << max_message_size - << " is larger than send bufffer size " - << kSctpSendBufferSize; - return false; - } - if (max_message_size < 1) { - RTC_LOG(LS_ERROR) << "Max message size of " << max_message_size - << " is too small"; - return false; - } - // We allow changing max_message_size with a second Start() call, - // but not changing the port numbers. - max_message_size_ = max_message_size; - if (started_) { - if (local_sctp_port != local_port_ || remote_sctp_port != remote_port_) { - RTC_LOG(LS_ERROR) - << "Can't change SCTP port after SCTP association formed."; - return false; - } - return true; - } - local_port_ = local_sctp_port; - remote_port_ = remote_sctp_port; - started_ = true; - RTC_DCHECK(!sock_); - // Only try to connect if the DTLS transport has been writable before - // (indicating that the DTLS handshake is complete). - if (was_ever_writable_) { - return Connect(); - } - return true; -} - -bool UsrsctpTransport::OpenStream(int sid) { - RTC_DCHECK_RUN_ON(network_thread_); - if (sid > kMaxSctpSid) { - RTC_LOG(LS_WARNING) << debug_name_ - << "->OpenStream(...): " - "Not adding data stream " - "with sid=" - << sid << " because sid is too high."; - return false; - } - auto it = stream_status_by_sid_.find(sid); - if (it == stream_status_by_sid_.end()) { - stream_status_by_sid_[sid] = StreamStatus(); - return true; - } - if (it->second.is_open()) { - RTC_LOG(LS_WARNING) << debug_name_ - << "->OpenStream(...): " - "Not adding data stream " - "with sid=" - << sid << " because stream is already open."; - return false; - } else { - RTC_LOG(LS_WARNING) << debug_name_ - << "->OpenStream(...): " - "Not adding data stream " - " with sid=" - << sid << " because stream is still closing."; - return false; - } -} - -bool UsrsctpTransport::ResetStream(int sid) { - RTC_DCHECK_RUN_ON(network_thread_); - - auto it = stream_status_by_sid_.find(sid); - if (it == stream_status_by_sid_.end() || !it->second.is_open()) { - RTC_LOG(LS_WARNING) << debug_name_ << "->ResetStream(" << sid - << "): stream not open."; - return false; - } - - RTC_LOG(LS_VERBOSE) << debug_name_ << "->ResetStream(" << sid - << "): " - "Queuing RE-CONFIG chunk."; - it->second.closure_initiated = true; - - // Signal our stream-reset logic that it should try to send now, if it can. - SendQueuedStreamResets(); - - // The stream will actually get removed when we get the acknowledgment. - return true; -} - -bool UsrsctpTransport::SendData(int sid, - const webrtc::SendDataParams& params, - const rtc::CopyOnWriteBuffer& payload, - SendDataResult* result) { - RTC_DCHECK_RUN_ON(network_thread_); - - if (partial_outgoing_message_.has_value()) { - if (result) { - *result = SDR_BLOCK; - } - // Ready to send should get set only when SendData() call gets blocked. - ready_to_send_data_ = false; - return false; - } - - // Do not queue data to send on a closing stream. - auto it = stream_status_by_sid_.find(sid); - if (it == stream_status_by_sid_.end() || !it->second.is_open()) { - RTC_LOG(LS_WARNING) - << debug_name_ - << "->SendData(...): " - "Not sending data because sid is unknown or closing: " - << sid; - if (result) { - *result = SDR_ERROR; - } - return false; - } - - size_t payload_size = payload.size(); - OutgoingMessage message(payload, sid, params); - SendDataResult send_message_result = SendMessageInternal(&message); - if (result) { - *result = send_message_result; - } - if (payload_size == message.size()) { - // Nothing was sent. - return false; - } - // If any data is sent, we accept the message. In the case that data was - // partially accepted by the sctp library, the remaining is buffered. This - // ensures the client does not resend the message. - RTC_DCHECK_LT(message.size(), payload_size); - if (message.size() > 0) { - RTC_DCHECK(!partial_outgoing_message_.has_value()); - RTC_DLOG(LS_VERBOSE) << "Partially sent message. Buffering the remaining" - << message.size() << "/" << payload_size << " bytes."; - - partial_outgoing_message_.emplace(message); - } - return true; -} - -SendDataResult UsrsctpTransport::SendMessageInternal(OutgoingMessage* message) { - RTC_DCHECK_RUN_ON(network_thread_); - if (!sock_) { - RTC_LOG(LS_WARNING) << debug_name_ - << "->SendMessageInternal(...): " - "Not sending packet with sid=" - << message->sid() << " len=" << message->size() - << " before Start()."; - return SDR_ERROR; - } - if (message->send_params().type != webrtc::DataMessageType::kControl) { - auto it = stream_status_by_sid_.find(message->sid()); - if (it == stream_status_by_sid_.end()) { - RTC_LOG(LS_WARNING) << debug_name_ - << "->SendMessageInternal(...): " - "Not sending data because sid is unknown: " - << message->sid(); - return SDR_ERROR; - } - } - if (message->size() > static_cast(max_message_size_)) { - RTC_LOG(LS_ERROR) << "Attempting to send message of size " - << message->size() << " which is larger than limit " - << max_message_size_; - return SDR_ERROR; - } - - // Send data using SCTP. - sctp_sendv_spa spa = CreateSctpSendParams( - message->sid(), message->send_params(), message->size()); - const void* data = message->data(); - size_t data_length = message->size(); - if (message->size() == 0) { - // Empty messages are replaced by a single NUL byte on the wire as SCTP - // doesn't support empty messages. - // The PPID carries the information that the payload needs to be ignored. - data = kZero; - data_length = 1; - } - // Note: this send call is not atomic because the EOR bit is set. This means - // that usrsctp can partially accept this message and it is our duty to buffer - // the rest. - ssize_t send_res = usrsctp_sendv(sock_, data, data_length, NULL, 0, &spa, - rtc::checked_cast(sizeof(spa)), - SCTP_SENDV_SPA, 0); - if (send_res < 0) { - if (errno == SCTP_EWOULDBLOCK) { - ready_to_send_data_ = false; - RTC_LOG(LS_VERBOSE) << debug_name_ - << "->SendMessageInternal(...): EWOULDBLOCK returned"; - return SDR_BLOCK; - } - - RTC_LOG_ERRNO(LS_ERROR) << "ERROR:" << debug_name_ - << "->SendMessageInternal(...): " - " usrsctp_sendv: "; - return SDR_ERROR; - } - - size_t amount_sent = static_cast(send_res); - RTC_DCHECK_LE(amount_sent, data_length); - if (message->size() != 0) - message->Advance(amount_sent); - // Only way out now is success. - return SDR_SUCCESS; -} - -bool UsrsctpTransport::ReadyToSendData() { - RTC_DCHECK_RUN_ON(network_thread_); - return ready_to_send_data_; -} - -void UsrsctpTransport::ConnectTransportSignals() { - RTC_DCHECK_RUN_ON(network_thread_); - if (!transport_) { - return; - } - transport_->SignalWritableState.connect(this, - &UsrsctpTransport::OnWritableState); - transport_->SignalReadPacket.connect(this, &UsrsctpTransport::OnPacketRead); - transport_->SignalClosed.connect(this, &UsrsctpTransport::OnClosed); -} - -void UsrsctpTransport::DisconnectTransportSignals() { - RTC_DCHECK_RUN_ON(network_thread_); - if (!transport_) { - return; - } - transport_->SignalWritableState.disconnect(this); - transport_->SignalReadPacket.disconnect(this); - transport_->SignalClosed.disconnect(this); -} - -bool UsrsctpTransport::Connect() { - RTC_DCHECK_RUN_ON(network_thread_); - RTC_LOG(LS_VERBOSE) << debug_name_ << "->Connect()."; - - // If we already have a socket connection (which shouldn't ever happen), just - // return. - RTC_DCHECK(!sock_); - if (sock_) { - RTC_LOG(LS_ERROR) << debug_name_ - << "->Connect(): Ignored as socket " - "is already established."; - return true; - } - - // If no socket (it was closed) try to start it again. This can happen when - // the socket we are connecting to closes, does an sctp shutdown handshake, - // or behaves unexpectedly causing us to perform a CloseSctpSocket. - if (!OpenSctpSocket()) { - return false; - } - - // Note: conversion from int to uint16_t happens on assignment. - sockaddr_conn local_sconn = GetSctpSockAddr(local_port_); - if (usrsctp_bind(sock_, reinterpret_cast(&local_sconn), - sizeof(local_sconn)) < 0) { - RTC_LOG_ERRNO(LS_ERROR) - << debug_name_ << "->Connect(): " << ("Failed usrsctp_bind"); - CloseSctpSocket(); - return false; - } - - // Note: conversion from int to uint16_t happens on assignment. - sockaddr_conn remote_sconn = GetSctpSockAddr(remote_port_); - int connect_result = usrsctp_connect( - sock_, reinterpret_cast(&remote_sconn), sizeof(remote_sconn)); - if (connect_result < 0 && errno != SCTP_EINPROGRESS) { - RTC_LOG_ERRNO(LS_ERROR) << debug_name_ - << "->Connect(): " - "Failed usrsctp_connect. got errno=" - << errno << ", but wanted " << SCTP_EINPROGRESS; - CloseSctpSocket(); - return false; - } - // Set the MTU and disable MTU discovery. - // We can only do this after usrsctp_connect or it has no effect. - sctp_paddrparams params = {}; - memcpy(¶ms.spp_address, &remote_sconn, sizeof(remote_sconn)); - params.spp_flags = SPP_PMTUD_DISABLE; - // The MTU value provided specifies the space available for chunks in the - // packet, so we subtract the SCTP header size. - params.spp_pathmtu = kSctpMtu - sizeof(struct sctp_common_header); - if (usrsctp_setsockopt(sock_, IPPROTO_SCTP, SCTP_PEER_ADDR_PARAMS, ¶ms, - sizeof(params))) { - RTC_LOG_ERRNO(LS_ERROR) << debug_name_ - << "->Connect(): " - "Failed to set SCTP_PEER_ADDR_PARAMS."; - } - // Since this is a fresh SCTP association, we'll always start out with empty - // queues, so "ReadyToSendData" should be true. - SetReadyToSendData(); - return true; -} - -bool UsrsctpTransport::OpenSctpSocket() { - RTC_DCHECK_RUN_ON(network_thread_); - if (sock_) { - RTC_LOG(LS_WARNING) << debug_name_ - << "->OpenSctpSocket(): " - "Ignoring attempt to re-create existing socket."; - return false; - } - - UsrSctpWrapper::IncrementUsrSctpUsageCount(); - - // If kSctpSendBufferSize isn't reflective of reality, we log an error, but we - // still have to do something reasonable here. Look up what the buffer's real - // size is and set our threshold to something reasonable. - // TODO(bugs.webrtc.org/11824): That was previously set to 50%, not 25%, but - // it was reduced to a recent usrsctp regression. Can return to 50% when the - // root cause is fixed. - static const int kSendThreshold = usrsctp_sysctl_get_sctp_sendspace() / 4; - - sock_ = usrsctp_socket( - AF_CONN, SOCK_STREAM, IPPROTO_SCTP, &UsrSctpWrapper::OnSctpInboundPacket, - &UsrSctpWrapper::SendThresholdCallback, kSendThreshold, nullptr); - if (!sock_) { - RTC_LOG_ERRNO(LS_ERROR) << debug_name_ - << "->OpenSctpSocket(): " - "Failed to create SCTP socket."; - UsrSctpWrapper::DecrementUsrSctpUsageCount(); - return false; - } - - if (!ConfigureSctpSocket()) { - usrsctp_close(sock_); - sock_ = nullptr; - UsrSctpWrapper::DecrementUsrSctpUsageCount(); - return false; - } - id_ = g_transport_map_->Register(this); - usrsctp_set_ulpinfo(sock_, reinterpret_cast(id_)); - // Register our id as an address for usrsctp. This is used by SCTP to - // direct the packets received (by the created socket) to this class. - usrsctp_register_address(reinterpret_cast(id_)); - return true; -} - -bool UsrsctpTransport::ConfigureSctpSocket() { - RTC_DCHECK_RUN_ON(network_thread_); - RTC_DCHECK(sock_); - // Make the socket non-blocking. Connect, close, shutdown etc will not block - // the thread waiting for the socket operation to complete. - if (usrsctp_set_non_blocking(sock_, 1) < 0) { - RTC_LOG_ERRNO(LS_ERROR) << debug_name_ - << "->ConfigureSctpSocket(): " - "Failed to set SCTP to non blocking."; - return false; - } - - // This ensures that the usrsctp close call deletes the association. This - // prevents usrsctp from calling OnSctpOutboundPacket with references to - // this class as the address. - linger linger_opt; - linger_opt.l_onoff = 1; - linger_opt.l_linger = 0; - if (usrsctp_setsockopt(sock_, SOL_SOCKET, SO_LINGER, &linger_opt, - sizeof(linger_opt))) { - RTC_LOG_ERRNO(LS_ERROR) << debug_name_ - << "->ConfigureSctpSocket(): " - "Failed to set SO_LINGER."; - return false; - } - - // Enable stream ID resets. - struct sctp_assoc_value stream_rst; - stream_rst.assoc_id = SCTP_ALL_ASSOC; - stream_rst.assoc_value = 1; - if (usrsctp_setsockopt(sock_, IPPROTO_SCTP, SCTP_ENABLE_STREAM_RESET, - &stream_rst, sizeof(stream_rst))) { - RTC_LOG_ERRNO(LS_ERROR) << debug_name_ - << "->ConfigureSctpSocket(): " - "Failed to set SCTP_ENABLE_STREAM_RESET."; - return false; - } - - // Nagle. - uint32_t nodelay = 1; - if (usrsctp_setsockopt(sock_, IPPROTO_SCTP, SCTP_NODELAY, &nodelay, - sizeof(nodelay))) { - RTC_LOG_ERRNO(LS_ERROR) << debug_name_ - << "->ConfigureSctpSocket(): " - "Failed to set SCTP_NODELAY."; - return false; - } - - // Explicit EOR. - uint32_t eor = 1; - if (usrsctp_setsockopt(sock_, IPPROTO_SCTP, SCTP_EXPLICIT_EOR, &eor, - sizeof(eor))) { - RTC_LOG_ERRNO(LS_ERROR) << debug_name_ - << "->ConfigureSctpSocket(): " - "Failed to set SCTP_EXPLICIT_EOR."; - return false; - } - - // Subscribe to SCTP event notifications. - // TODO(crbug.com/1137936): Subscribe to SCTP_SEND_FAILED_EVENT once deadlock - // is fixed upstream, or we switch to the upcall API: - // https://github.com/sctplab/usrsctp/issues/537 - int event_types[] = {SCTP_ASSOC_CHANGE, SCTP_PEER_ADDR_CHANGE, - SCTP_SENDER_DRY_EVENT, SCTP_STREAM_RESET_EVENT}; - struct sctp_event event = {0}; - event.se_assoc_id = SCTP_ALL_ASSOC; - event.se_on = 1; - for (size_t i = 0; i < arraysize(event_types); i++) { - event.se_type = event_types[i]; - if (usrsctp_setsockopt(sock_, IPPROTO_SCTP, SCTP_EVENT, &event, - sizeof(event)) < 0) { - RTC_LOG_ERRNO(LS_ERROR) << debug_name_ - << "->ConfigureSctpSocket(): " - "Failed to set SCTP_EVENT type: " - << event.se_type; - return false; - } - } - return true; -} - -void UsrsctpTransport::CloseSctpSocket() { - RTC_DCHECK_RUN_ON(network_thread_); - if (sock_) { - // We assume that SO_LINGER option is set to close the association when - // close is called. This means that any pending packets in usrsctp will be - // discarded instead of being sent. - usrsctp_close(sock_); - sock_ = nullptr; - usrsctp_deregister_address(reinterpret_cast(id_)); - RTC_CHECK(g_transport_map_->Deregister(id_)); - UsrSctpWrapper::DecrementUsrSctpUsageCount(); - ready_to_send_data_ = false; - } -} - -bool UsrsctpTransport::SendQueuedStreamResets() { - RTC_DCHECK_RUN_ON(network_thread_); - - auto needs_reset = - [this](const std::map::value_type& stream) { - // Ignore streams with partial outgoing messages as they are required to - // be fully sent by the WebRTC spec - // https://w3c.github.io/webrtc-pc/#closing-procedure - return stream.second.need_outgoing_reset() && - (!partial_outgoing_message_.has_value() || - partial_outgoing_message_.value().sid() != - static_cast(stream.first)); - }; - // Figure out how many streams need to be reset. We need to do this so we can - // allocate the right amount of memory for the sctp_reset_streams structure. - size_t num_streams = absl::c_count_if(stream_status_by_sid_, needs_reset); - if (num_streams == 0) { - // Nothing to reset. - return true; - } - - RTC_LOG(LS_VERBOSE) << "SendQueuedStreamResets[" << debug_name_ - << "]: Resetting " << num_streams << " outgoing streams."; - - const size_t num_bytes = - sizeof(struct sctp_reset_streams) + (num_streams * sizeof(uint16_t)); - std::vector reset_stream_buf(num_bytes, 0); - struct sctp_reset_streams* resetp = - reinterpret_cast(&reset_stream_buf[0]); - resetp->srs_assoc_id = SCTP_ALL_ASSOC; - resetp->srs_flags = SCTP_STREAM_RESET_OUTGOING; - resetp->srs_number_streams = rtc::checked_cast(num_streams); - int result_idx = 0; - - for (const auto& stream : stream_status_by_sid_) { - if (needs_reset(stream)) { - resetp->srs_stream_list[result_idx++] = stream.first; - } - } - - int ret = - usrsctp_setsockopt(sock_, IPPROTO_SCTP, SCTP_RESET_STREAMS, resetp, - rtc::checked_cast(reset_stream_buf.size())); - if (ret < 0) { - // Note that usrsctp only lets us have one reset in progress at a time - // (even though multiple streams can be reset at once). If this happens, - // SendQueuedStreamResets will end up called after the current in-progress - // reset finishes, in OnStreamResetEvent. - RTC_LOG_ERRNO(LS_WARNING) << debug_name_ - << "->SendQueuedStreamResets(): " - "Failed to send a stream reset for " - << num_streams << " streams"; - return false; - } - - // Since the usrsctp call completed successfully, update our stream status - // map to note that we started the outgoing reset. - for (auto it = stream_status_by_sid_.begin(); - it != stream_status_by_sid_.end(); ++it) { - if (it->second.need_outgoing_reset()) { - it->second.outgoing_reset_initiated = true; - } - } - return true; -} - -void UsrsctpTransport::SetReadyToSendData() { - RTC_DCHECK_RUN_ON(network_thread_); - if (!ready_to_send_data_) { - ready_to_send_data_ = true; - SignalReadyToSendData(); - } -} - -bool UsrsctpTransport::SendBufferedMessage() { - RTC_DCHECK_RUN_ON(network_thread_); - RTC_DCHECK(partial_outgoing_message_.has_value()); - RTC_DLOG(LS_VERBOSE) << "Sending partially buffered message of size " - << partial_outgoing_message_->size() << "."; - - SendMessageInternal(&partial_outgoing_message_.value()); - if (partial_outgoing_message_->size() > 0) { - // Still need to finish sending the message. - return false; - } - RTC_DCHECK_EQ(0u, partial_outgoing_message_->size()); - - int sid = partial_outgoing_message_->sid(); - partial_outgoing_message_.reset(); - - // Send the queued stream reset if it was pending for this stream. - auto it = stream_status_by_sid_.find(sid); - if (it->second.need_outgoing_reset()) { - SendQueuedStreamResets(); - } - - return true; -} - -void UsrsctpTransport::OnWritableState( - rtc::PacketTransportInternal* transport) { - RTC_DCHECK_RUN_ON(network_thread_); - RTC_DCHECK_EQ(transport_, transport); - if (!was_ever_writable_ && transport->writable()) { - was_ever_writable_ = true; - if (started_) { - Connect(); - } - } -} - -// Called by network interface when a packet has been received. -void UsrsctpTransport::OnPacketRead(rtc::PacketTransportInternal* transport, - const char* data, - size_t len, - const int64_t& /* packet_time_us */, - int flags) { - RTC_DCHECK_RUN_ON(network_thread_); - RTC_DCHECK_EQ(transport_, transport); - TRACE_EVENT0("webrtc", "UsrsctpTransport::OnPacketRead"); - - if (flags & PF_SRTP_BYPASS) { - // We are only interested in SCTP packets. - return; - } - - RTC_LOG(LS_VERBOSE) << debug_name_ - << "->OnPacketRead(...): " - " length=" - << len << ", started: " << started_; - // Only give receiving packets to usrsctp after if connected. This enables two - // peers to each make a connect call, but for them not to receive an INIT - // packet before they have called connect; least the last receiver of the INIT - // packet will have called connect, and a connection will be established. - if (sock_) { - // Pass received packet to SCTP stack. Once processed by usrsctp, the data - // will be will be given to the global OnSctpInboundPacket callback and - // posted to the transport thread. - VerboseLogPacket(data, len, SCTP_DUMP_INBOUND); - usrsctp_conninput(reinterpret_cast(id_), data, len, 0); - } else { - // TODO(ldixon): Consider caching the packet for very slightly better - // reliability. - } -} - -void UsrsctpTransport::OnClosed(rtc::PacketTransportInternal* transport) { - webrtc::RTCError error = - webrtc::RTCError(webrtc::RTCErrorType::OPERATION_ERROR_WITH_DATA, - "Transport channel closed"); - error.set_error_detail(webrtc::RTCErrorDetailType::SCTP_FAILURE); - SignalClosedAbruptly(error); -} - -void UsrsctpTransport::OnSendThresholdCallback() { - RTC_DCHECK_RUN_ON(network_thread_); - if (partial_outgoing_message_.has_value()) { - if (!SendBufferedMessage()) { - // Did not finish sending the buffered message. - return; - } - } - SetReadyToSendData(); -} - -sockaddr_conn UsrsctpTransport::GetSctpSockAddr(int port) { - sockaddr_conn sconn = {0}; - sconn.sconn_family = AF_CONN; -#ifdef HAVE_SCONN_LEN - sconn.sconn_len = sizeof(sockaddr_conn); -#endif - // Note: conversion from int to uint16_t happens here. - sconn.sconn_port = rtc::HostToNetwork16(port); - sconn.sconn_addr = reinterpret_cast(id_); - return sconn; -} - -void UsrsctpTransport::OnPacketFromSctpToNetwork( - const rtc::CopyOnWriteBuffer& buffer) { - RTC_DCHECK_RUN_ON(network_thread_); - if (buffer.size() > (kSctpMtu)) { - RTC_LOG(LS_ERROR) << debug_name_ - << "->OnPacketFromSctpToNetwork(...): " - "SCTP seems to have made a packet that is bigger " - "than its official MTU: " - << buffer.size() << " vs max of " << kSctpMtu; - } - TRACE_EVENT0("webrtc", "UsrsctpTransport::OnPacketFromSctpToNetwork"); - - // Don't create noise by trying to send a packet when the DTLS transport isn't - // even writable. - if (!transport_ || !transport_->writable()) { - return; - } - - // Bon voyage. - transport_->SendPacket(buffer.data(), buffer.size(), - rtc::PacketOptions(), PF_NORMAL); -} - -void UsrsctpTransport::InjectDataOrNotificationFromSctpForTesting( - const void* data, - size_t length, - struct sctp_rcvinfo rcv, - int flags) { - OnDataOrNotificationFromSctp(data, length, rcv, flags); -} - -void UsrsctpTransport::OnDataOrNotificationFromSctp(const void* data, - size_t length, - struct sctp_rcvinfo rcv, - int flags) { - RTC_DCHECK_RUN_ON(network_thread_); - // If data is NULL, the SCTP association has been closed. - if (!data) { - RTC_LOG(LS_INFO) << debug_name_ - << "->OnDataOrNotificationFromSctp(...): " - "No data; association closed."; - return; - } - - // Handle notifications early. - // Note: Notifications are never split into chunks, so they can and should - // be handled early and entirely separate from the reassembly - // process. - if (flags & MSG_NOTIFICATION) { - RTC_LOG(LS_VERBOSE) - << debug_name_ - << "->OnDataOrNotificationFromSctp(...): SCTP notification" - << " length=" << length; - - rtc::CopyOnWriteBuffer notification(reinterpret_cast(data), - length); - OnNotificationFromSctp(notification); - return; - } - - // Log data chunk - const uint32_t ppid = rtc::NetworkToHost32(rcv.rcv_ppid); - RTC_LOG(LS_VERBOSE) << debug_name_ - << "->OnDataOrNotificationFromSctp(...): SCTP data chunk" - << " length=" << length << ", sid=" << rcv.rcv_sid - << ", ppid=" << ppid << ", ssn=" << rcv.rcv_ssn - << ", cum-tsn=" << rcv.rcv_cumtsn - << ", eor=" << ((flags & MSG_EOR) ? "y" : "n"); - - // Validate payload protocol identifier - webrtc::DataMessageType type; - if (!GetDataMediaType(ppid, &type)) { - // Unexpected PPID, dropping - RTC_LOG(LS_ERROR) << "Received an unknown PPID " << ppid - << " on an SCTP packet. Dropping."; - return; - } - - // Expect only continuation messages belonging to the same SID. The SCTP - // stack is expected to ensure this as long as the User Message - // Interleaving extension (RFC 8260) is not explicitly enabled, so this - // merely acts as a safeguard. - if ((partial_incoming_message_.size() != 0) && - (rcv.rcv_sid != partial_params_.sid)) { - RTC_LOG(LS_ERROR) << "Received a new SID without EOR in the previous" - << " SCTP packet. Discarding the previous packet."; - partial_incoming_message_.Clear(); - } - - // Copy metadata of interest - ReceiveDataParams params; - params.type = type; - params.sid = rcv.rcv_sid; - // Note that the SSN is identical for each chunk of the same message. - // Furthermore, it is increased per stream and not on the whole - // association. - params.seq_num = rcv.rcv_ssn; - - // Append the chunk's data to the message buffer unless we have a chunk with a - // PPID marking an empty message. - // See: https://tools.ietf.org/html/rfc8831#section-6.6 - if (!IsEmptyPPID(ppid)) - partial_incoming_message_.AppendData(reinterpret_cast(data), - length); - partial_params_ = params; - partial_flags_ = flags; - - // If the message is not yet complete... - if (!(flags & MSG_EOR)) { - if (partial_incoming_message_.size() < kSctpSendBufferSize) { - // We still have space in the buffer. Continue buffering chunks until - // the message is complete before handing it out. - return; - } else { - // The sender is exceeding the maximum message size that we announced. - // Spit out a warning but still hand out the partial message. Note that - // this behaviour is undesirable, see the discussion in issue 7774. - // - // TODO(lgrahl): Once sufficient time has passed and all supported - // browser versions obey the announced maximum message size, we should - // abort the SCTP association instead to prevent message integrity - // violation. - RTC_LOG(LS_ERROR) << "Handing out partial SCTP message."; - } - } - - // Dispatch the complete message and reset the message buffer. - OnDataFromSctpToTransport(params, partial_incoming_message_); - partial_incoming_message_.Clear(); -} - -void UsrsctpTransport::OnDataFromSctpToTransport( - const ReceiveDataParams& params, - const rtc::CopyOnWriteBuffer& buffer) { - RTC_DCHECK_RUN_ON(network_thread_); - RTC_LOG(LS_VERBOSE) << debug_name_ - << "->OnDataFromSctpToTransport(...): " - "Posting with length: " - << buffer.size() << " on stream " << params.sid; - // Reports all received messages to upper layers, no matter whether the sid - // is known. - SignalDataReceived(params, buffer); -} - -void UsrsctpTransport::OnNotificationFromSctp( - const rtc::CopyOnWriteBuffer& buffer) { - RTC_DCHECK_RUN_ON(network_thread_); - if (buffer.size() < sizeof(sctp_notification::sn_header)) { - RTC_LOG(LS_ERROR) << "SCTP notification is shorter than header size: " - << buffer.size(); - return; - } - - const sctp_notification& notification = - reinterpret_cast(*buffer.data()); - if (buffer.size() != notification.sn_header.sn_length) { - RTC_LOG(LS_ERROR) << "SCTP notification length (" << buffer.size() - << ") does not match sn_length field (" - << notification.sn_header.sn_length << ")."; - return; - } - - // TODO(ldixon): handle notifications appropriately. - switch (notification.sn_header.sn_type) { - case SCTP_ASSOC_CHANGE: - RTC_LOG(LS_VERBOSE) << "SCTP_ASSOC_CHANGE"; - if (buffer.size() < sizeof(notification.sn_assoc_change)) { - RTC_LOG(LS_ERROR) - << "SCTP_ASSOC_CHANGE notification has less than required length: " - << buffer.size(); - return; - } - OnNotificationAssocChange(notification.sn_assoc_change); - break; - case SCTP_REMOTE_ERROR: - RTC_LOG(LS_INFO) << "SCTP_REMOTE_ERROR"; - break; - case SCTP_SHUTDOWN_EVENT: - RTC_LOG(LS_INFO) << "SCTP_SHUTDOWN_EVENT"; - break; - case SCTP_ADAPTATION_INDICATION: - RTC_LOG(LS_INFO) << "SCTP_ADAPTATION_INDICATION"; - break; - case SCTP_PARTIAL_DELIVERY_EVENT: - RTC_LOG(LS_INFO) << "SCTP_PARTIAL_DELIVERY_EVENT"; - break; - case SCTP_AUTHENTICATION_EVENT: - RTC_LOG(LS_INFO) << "SCTP_AUTHENTICATION_EVENT"; - break; - case SCTP_SENDER_DRY_EVENT: - RTC_LOG(LS_VERBOSE) << "SCTP_SENDER_DRY_EVENT"; - SetReadyToSendData(); - break; - // TODO(ldixon): Unblock after congestion. - case SCTP_NOTIFICATIONS_STOPPED_EVENT: - RTC_LOG(LS_INFO) << "SCTP_NOTIFICATIONS_STOPPED_EVENT"; - break; - case SCTP_SEND_FAILED_EVENT: { - if (buffer.size() < sizeof(notification.sn_send_failed_event)) { - RTC_LOG(LS_ERROR) << "SCTP_SEND_FAILED_EVENT notification has less " - "than required length: " - << buffer.size(); - return; - } - const struct sctp_send_failed_event& ssfe = - notification.sn_send_failed_event; - RTC_LOG(LS_WARNING) << "SCTP_SEND_FAILED_EVENT: message with" - " PPID = " - << rtc::NetworkToHost32(ssfe.ssfe_info.snd_ppid) - << " SID = " << ssfe.ssfe_info.snd_sid - << " flags = " << rtc::ToHex(ssfe.ssfe_info.snd_flags) - << " failed to sent due to error = " - << rtc::ToHex(ssfe.ssfe_error); - break; - } - case SCTP_STREAM_RESET_EVENT: - if (buffer.size() < sizeof(notification.sn_strreset_event)) { - RTC_LOG(LS_ERROR) << "SCTP_STREAM_RESET_EVENT notification has less " - "than required length: " - << buffer.size(); - return; - } - OnStreamResetEvent(¬ification.sn_strreset_event); - break; - case SCTP_ASSOC_RESET_EVENT: - RTC_LOG(LS_INFO) << "SCTP_ASSOC_RESET_EVENT"; - break; - case SCTP_STREAM_CHANGE_EVENT: - RTC_LOG(LS_INFO) << "SCTP_STREAM_CHANGE_EVENT"; - // An acknowledgment we get after our stream resets have gone through, - // if they've failed. We log the message, but don't react -- we don't - // keep around the last-transmitted set of SSIDs we wanted to close for - // error recovery. It doesn't seem likely to occur, and if so, likely - // harmless within the lifetime of a single SCTP association. - break; - case SCTP_PEER_ADDR_CHANGE: - RTC_LOG(LS_INFO) << "SCTP_PEER_ADDR_CHANGE"; - break; - default: - RTC_LOG(LS_WARNING) << "Unknown SCTP event: " - << notification.sn_header.sn_type; - break; - } -} - -void UsrsctpTransport::OnNotificationAssocChange( - const sctp_assoc_change& change) { - RTC_DCHECK_RUN_ON(network_thread_); - switch (change.sac_state) { - case SCTP_COMM_UP: - RTC_LOG(LS_VERBOSE) << "Association change SCTP_COMM_UP, stream # is " - << change.sac_outbound_streams << " outbound, " - << change.sac_inbound_streams << " inbound."; - max_outbound_streams_ = change.sac_outbound_streams; - max_inbound_streams_ = change.sac_inbound_streams; - SignalAssociationChangeCommunicationUp(); - // In case someone tried to close a stream before communication - // came up, send any queued resets. - SendQueuedStreamResets(); - break; - case SCTP_COMM_LOST: { - RTC_LOG(LS_INFO) << "Association change SCTP_COMM_LOST"; - webrtc::RTCError error = webrtc::RTCError( - webrtc::RTCErrorType::OPERATION_ERROR_WITH_DATA, - SctpErrorCauseCodeToString( - static_cast(change.sac_error))); - error.set_error_detail(webrtc::RTCErrorDetailType::SCTP_FAILURE); - error.set_sctp_cause_code(change.sac_error); - SignalClosedAbruptly(error); - break; - } - case SCTP_RESTART: - RTC_LOG(LS_INFO) << "Association change SCTP_RESTART"; - break; - case SCTP_SHUTDOWN_COMP: - RTC_LOG(LS_INFO) << "Association change SCTP_SHUTDOWN_COMP"; - break; - case SCTP_CANT_STR_ASSOC: - RTC_LOG(LS_INFO) << "Association change SCTP_CANT_STR_ASSOC"; - break; - default: - RTC_LOG(LS_INFO) << "Association change UNKNOWN"; - break; - } -} - -void UsrsctpTransport::OnStreamResetEvent( - const struct sctp_stream_reset_event* evt) { - RTC_DCHECK_RUN_ON(network_thread_); - - // This callback indicates that a reset is complete for incoming and/or - // outgoing streams. The reset may have been initiated by us or the remote - // side. - const int num_sids = (evt->strreset_length - sizeof(*evt)) / - sizeof(evt->strreset_stream_list[0]); - - if (evt->strreset_flags & SCTP_STREAM_RESET_FAILED) { - // OK, just try sending any previously sent stream resets again. The stream - // IDs sent over when the RESET_FIALED flag is set seem to be garbage - // values. Ignore them. - for (std::map::value_type& stream : - stream_status_by_sid_) { - stream.second.outgoing_reset_initiated = false; - } - SendQueuedStreamResets(); - // TODO(deadbeef): If this happens, the entire SCTP association is in quite - // crippled state. The SCTP session should be dismantled, and the WebRTC - // connectivity errored because is clear that the distant party is not - // playing ball: malforms the transported data. - return; - } - - // Loop over the received events and properly update the StreamStatus map. - for (int i = 0; i < num_sids; i++) { - const uint32_t sid = evt->strreset_stream_list[i]; - auto it = stream_status_by_sid_.find(sid); - if (it == stream_status_by_sid_.end()) { - // This stream is unknown. Sometimes this can be from a - // RESET_FAILED-related retransmit. - RTC_LOG(LS_VERBOSE) << "SCTP_STREAM_RESET_EVENT(" << debug_name_ - << "): Unknown sid " << sid; - continue; - } - StreamStatus& status = it->second; - - if (evt->strreset_flags & SCTP_STREAM_RESET_INCOMING_SSN) { - RTC_LOG(LS_VERBOSE) << "SCTP_STREAM_RESET_INCOMING_SSN(" << debug_name_ - << "): sid " << sid; - status.incoming_reset_complete = true; - // If we receive an incoming stream reset and we haven't started the - // closing procedure ourselves, this means the remote side started the - // closing procedure; fire a signal so that the relevant data channel - // can change to "closing" (we still need to reset the outgoing stream - // before it changes to "closed"). - if (!status.closure_initiated) { - SignalClosingProcedureStartedRemotely(sid); - } - } - if (evt->strreset_flags & SCTP_STREAM_RESET_OUTGOING_SSN) { - RTC_LOG(LS_VERBOSE) << "SCTP_STREAM_RESET_OUTGOING_SSN(" << debug_name_ - << "): sid " << sid; - status.outgoing_reset_complete = true; - } - - // If this reset completes the closing procedure, remove the stream from - // our map so we can consider it closed, and fire a signal such that the - // relevant DataChannel will change its state to "closed" and its ID can be - // re-used. - if (status.reset_complete()) { - stream_status_by_sid_.erase(it); - SignalClosingProcedureComplete(sid); - } - } - - // Always try to send any queued resets because this call indicates that the - // last outgoing or incoming reset has made some progress. - SendQueuedStreamResets(); -} - -} // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/media/sctp/usrsctp_transport.h b/TMessagesProj/jni/voip/webrtc/media/sctp/usrsctp_transport.h deleted file mode 100644 index 06988fd156..0000000000 --- a/TMessagesProj/jni/voip/webrtc/media/sctp/usrsctp_transport.h +++ /dev/null @@ -1,296 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MEDIA_SCTP_USRSCTP_TRANSPORT_H_ -#define MEDIA_SCTP_USRSCTP_TRANSPORT_H_ - -#include - -#include -#include -#include -#include -#include -#include - -#include "absl/types/optional.h" -#include "rtc_base/buffer.h" -#include "rtc_base/constructor_magic.h" -#include "rtc_base/copy_on_write_buffer.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" -#include "rtc_base/third_party/sigslot/sigslot.h" -#include "rtc_base/thread.h" -// For SendDataParams/ReceiveDataParams. -#include "media/base/media_channel.h" -#include "media/sctp/sctp_transport_internal.h" - -// Defined by "usrsctplib/usrsctp.h" -struct sockaddr_conn; -struct sctp_assoc_change; -struct sctp_rcvinfo; -struct sctp_stream_reset_event; -struct sctp_sendv_spa; - -// Defined by -struct socket; -namespace cricket { - -// Holds data to be passed on to a transport. -struct SctpInboundPacket; - -// From transport calls, data flows like this: -// [network thread (although it can in princple be another thread)] -// 1. SctpTransport::SendData(data) -// 2. usrsctp_sendv(data) -// [network thread returns; sctp thread then calls the following] -// 3. OnSctpOutboundPacket(wrapped_data) -// [sctp thread returns having async invoked on the network thread] -// 4. SctpTransport::OnPacketFromSctpToNetwork(wrapped_data) -// 5. DtlsTransport::SendPacket(wrapped_data) -// 6. ... across network ... a packet is sent back ... -// 7. SctpTransport::OnPacketReceived(wrapped_data) -// 8. usrsctp_conninput(wrapped_data) -// [network thread returns; sctp thread then calls the following] -// 9. OnSctpInboundData(data) -// 10. SctpTransport::OnDataFromSctpToTransport(data) -// [sctp thread returns having async invoked on the network thread] -// 11. SctpTransport::OnDataFromSctpToTransport(data) -// 12. SctpTransport::SignalDataReceived(data) -// [from the same thread, methods registered/connected to -// SctpTransport are called with the recieved data] -class UsrsctpTransport : public SctpTransportInternal, - public sigslot::has_slots<> { - public: - // `network_thread` is where packets will be processed and callbacks from - // this transport will be posted, and is the only thread on which public - // methods can be called. - // `transport` is not required (can be null). - UsrsctpTransport(rtc::Thread* network_thread, - rtc::PacketTransportInternal* transport); - ~UsrsctpTransport() override; - - // SctpTransportInternal overrides (see sctptransportinternal.h for comments). - void SetDtlsTransport(rtc::PacketTransportInternal* transport) override; - bool Start(int local_port, int remote_port, int max_message_size) override; - bool OpenStream(int sid) override; - bool ResetStream(int sid) override; - bool SendData(int sid, - const webrtc::SendDataParams& params, - const rtc::CopyOnWriteBuffer& payload, - SendDataResult* result = nullptr) override; - bool ReadyToSendData() override; - int max_message_size() const override { return max_message_size_; } - absl::optional max_outbound_streams() const override { - return max_outbound_streams_; - } - absl::optional max_inbound_streams() const override { - return max_inbound_streams_; - } - void set_debug_name_for_testing(const char* debug_name) override { - debug_name_ = debug_name; - } - void InjectDataOrNotificationFromSctpForTesting(const void* data, - size_t length, - struct sctp_rcvinfo rcv, - int flags); - - // Exposed to allow Post call from c-callbacks. - // TODO(deadbeef): Remove this or at least make it return a const pointer. - rtc::Thread* network_thread() const { return network_thread_; } - - private: - // A message to be sent by the sctp library. This class is used to track the - // progress of writing a single message to the sctp library in the presence of - // partial writes. In this case, the Advance() function is provided in order - // to advance over what has already been accepted by the sctp library and - // avoid copying the remaining partial message buffer. - class OutgoingMessage { - public: - OutgoingMessage(const rtc::CopyOnWriteBuffer& buffer, - int sid, - const webrtc::SendDataParams& send_params) - : buffer_(buffer), sid_(sid), send_params_(send_params) {} - - // Advances the buffer by the incremented amount. Must not advance further - // than the current data size. - void Advance(size_t increment) { - RTC_DCHECK_LE(increment + offset_, buffer_.size()); - offset_ += increment; - } - - size_t size() const { return buffer_.size() - offset_; } - - const void* data() const { return buffer_.data() + offset_; } - - int sid() const { return sid_; } - webrtc::SendDataParams send_params() const { return send_params_; } - - private: - const rtc::CopyOnWriteBuffer buffer_; - int sid_; - const webrtc::SendDataParams send_params_; - size_t offset_ = 0; - }; - - void ConnectTransportSignals(); - void DisconnectTransportSignals(); - - // Creates the socket and connects. - bool Connect(); - - // Returns false when opening the socket failed. - bool OpenSctpSocket(); - // Helpet method to set socket options. - bool ConfigureSctpSocket(); - // Sets |sock_ |to nullptr. - void CloseSctpSocket(); - - // Sends a SCTP_RESET_STREAM for all streams in closing_ssids_. - bool SendQueuedStreamResets(); - - // Sets the "ready to send" flag and fires signal if needed. - void SetReadyToSendData(); - - // Sends the outgoing buffered message that was only partially accepted by the - // sctp lib because it did not have enough space. Returns true if the entire - // buffered message was accepted by the sctp lib. - bool SendBufferedMessage(); - - // Tries to send the `payload` on the usrsctp lib. The message will be - // advanced by the amount that was sent. - SendDataResult SendMessageInternal(OutgoingMessage* message); - - // Callbacks from DTLS transport. - void OnWritableState(rtc::PacketTransportInternal* transport); - virtual void OnPacketRead(rtc::PacketTransportInternal* transport, - const char* data, - size_t len, - const int64_t& packet_time_us, - int flags); - void OnClosed(rtc::PacketTransportInternal* transport); - - // Methods related to usrsctp callbacks. - void OnSendThresholdCallback(); - sockaddr_conn GetSctpSockAddr(int port); - - // Called using `invoker_` to send packet on the network. - void OnPacketFromSctpToNetwork(const rtc::CopyOnWriteBuffer& buffer); - - // Called on the network thread. - // Flags are standard socket API flags (RFC 6458). - void OnDataOrNotificationFromSctp(const void* data, - size_t length, - struct sctp_rcvinfo rcv, - int flags); - // Called using `invoker_` to decide what to do with the data. - void OnDataFromSctpToTransport(const ReceiveDataParams& params, - const rtc::CopyOnWriteBuffer& buffer); - // Called using `invoker_` to decide what to do with the notification. - void OnNotificationFromSctp(const rtc::CopyOnWriteBuffer& buffer); - void OnNotificationAssocChange(const sctp_assoc_change& change); - - void OnStreamResetEvent(const struct sctp_stream_reset_event* evt); - - // Responsible for marshalling incoming data to the transports listeners, and - // outgoing data to the network interface. - rtc::Thread* network_thread_; - // Helps pass inbound/outbound packets asynchronously to the network thread. - webrtc::ScopedTaskSafety task_safety_; - // Underlying DTLS transport. - rtc::PacketTransportInternal* transport_ = nullptr; - - // Track the data received from usrsctp between callbacks until the EOR bit - // arrives. - rtc::CopyOnWriteBuffer partial_incoming_message_; - ReceiveDataParams partial_params_; - int partial_flags_; - // A message that was attempted to be sent, but was only partially accepted by - // usrsctp lib with usrsctp_sendv() because it cannot buffer the full message. - // This occurs because we explicitly set the EOR bit when sending, so - // usrsctp_sendv() is not atomic. - absl::optional partial_outgoing_message_; - - bool was_ever_writable_ = false; - int local_port_ = kSctpDefaultPort; - int remote_port_ = kSctpDefaultPort; - int max_message_size_ = kSctpSendBufferSize; - struct socket* sock_ = nullptr; // The socket created by usrsctp_socket(...). - - // Has Start been called? Don't create SCTP socket until it has. - bool started_ = false; - // Are we ready to queue data (SCTP socket created, and not blocked due to - // congestion control)? Different than `transport_`'s "ready to send". - bool ready_to_send_data_ = false; - - // Used to keep track of the status of each stream (or rather, each pair of - // incoming/outgoing streams with matching IDs). It's specifically used to - // keep track of the status of resets, but more information could be put here - // later. - // - // See datachannel.h for a summary of the closing procedure. - struct StreamStatus { - // Closure initiated by application via ResetStream? Note that - // this may be true while outgoing_reset_initiated is false if the outgoing - // reset needed to be queued. - bool closure_initiated = false; - // Whether we've initiated the outgoing stream reset via - // SCTP_RESET_STREAMS. - bool outgoing_reset_initiated = false; - // Whether usrsctp has indicated that the incoming/outgoing streams have - // been reset. It's expected that the peer will reset its outgoing stream - // (our incoming stream) after receiving the reset for our outgoing stream, - // though older versions of chromium won't do this. See crbug.com/559394 - // for context. - bool outgoing_reset_complete = false; - bool incoming_reset_complete = false; - - // Some helper methods to improve code readability. - bool is_open() const { - return !closure_initiated && !incoming_reset_complete && - !outgoing_reset_complete; - } - // We need to send an outgoing reset if the application has closed the data - // channel, or if we received a reset of the incoming stream from the - // remote endpoint, indicating the data channel was closed remotely. - bool need_outgoing_reset() const { - return (incoming_reset_complete || closure_initiated) && - !outgoing_reset_initiated; - } - bool reset_complete() const { - return outgoing_reset_complete && incoming_reset_complete; - } - }; - - // Entries should only be removed from this map if `reset_complete` is - // true. - std::map stream_status_by_sid_; - - // A static human-readable name for debugging messages. - const char* debug_name_ = "UsrsctpTransport"; - // Hides usrsctp interactions from this header file. - class UsrSctpWrapper; - // Number of channels negotiated. Not set before negotiation completes. - absl::optional max_outbound_streams_; - absl::optional max_inbound_streams_; - - // Used for associating this transport with the underlying sctp socket in - // various callbacks. - uintptr_t id_ = 0; - - friend class UsrsctpTransportMap; - - RTC_DISALLOW_COPY_AND_ASSIGN(UsrsctpTransport); -}; - -class UsrsctpTransportMap; - -} // namespace cricket - -#endif // MEDIA_SCTP_USRSCTP_TRANSPORT_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/acm2/acm_receiver.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/acm2/acm_receiver.cc index aa9816956e..b078af1d2d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/acm2/acm_receiver.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/acm2/acm_receiver.cc @@ -287,6 +287,8 @@ void AcmReceiver::GetNetworkStatistics( acm_stat->jitterBufferDelayMs = neteq_lifetime_stat.jitter_buffer_delay_ms; acm_stat->jitterBufferTargetDelayMs = neteq_lifetime_stat.jitter_buffer_target_delay_ms; + acm_stat->jitterBufferMinimumDelayMs = + neteq_lifetime_stat.jitter_buffer_minimum_delay_ms; acm_stat->jitterBufferEmittedCount = neteq_lifetime_stat.jitter_buffer_emitted_count; acm_stat->delayedPacketOutageSamples = @@ -302,13 +304,12 @@ void AcmReceiver::GetNetworkStatistics( neteq_lifetime_stat.removed_samples_for_acceleration; acm_stat->fecPacketsReceived = neteq_lifetime_stat.fec_packets_received; acm_stat->fecPacketsDiscarded = neteq_lifetime_stat.fec_packets_discarded; + acm_stat->packetsDiscarded = neteq_lifetime_stat.packets_discarded; NetEqOperationsAndState neteq_operations_and_state = neteq_->GetOperationsAndState(); acm_stat->packetBufferFlushes = neteq_operations_and_state.packet_buffer_flushes; - acm_stat->packetsDiscarded = - neteq_operations_and_state.discarded_primary_packets; } int AcmReceiver::EnableNack(size_t max_nack_list_size) { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/acm2/audio_coding_module.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/acm2/audio_coding_module.cc index e2081e20dc..4367ab08fa 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/acm2/audio_coding_module.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/acm2/audio_coding_module.cc @@ -14,6 +14,7 @@ #include #include "absl/strings/match.h" +#include "absl/strings/string_view.h" #include "api/array_view.h" #include "modules/audio_coding/acm2/acm_receiver.h" #include "modules/audio_coding/acm2/acm_remixing.h" @@ -112,7 +113,7 @@ class AudioCodingModuleImpl final : public AudioCodingModule { // the value has changed since the last time (and always for the first call). class ChangeLogger { public: - explicit ChangeLogger(const std::string& histogram_name) + explicit ChangeLogger(absl::string_view histogram_name) : histogram_name_(histogram_name) {} // Logs the new value if it is different from the last logged value, or if // this is the first call. @@ -135,7 +136,7 @@ class AudioCodingModuleImpl final : public AudioCodingModule { int InitializeReceiverSafe() RTC_EXCLUSIVE_LOCKS_REQUIRED(acm_mutex_); - bool HaveValidEncoder(const char* caller_name) const + bool HaveValidEncoder(absl::string_view caller_name) const RTC_EXCLUSIVE_LOCKS_REQUIRED(acm_mutex_); // Preprocessing of input audio, including resampling and down-mixing if @@ -589,7 +590,8 @@ int AudioCodingModuleImpl::GetNetworkStatistics(NetworkStatistics* statistics) { return 0; } -bool AudioCodingModuleImpl::HaveValidEncoder(const char* caller_name) const { +bool AudioCodingModuleImpl::HaveValidEncoder( + absl::string_view caller_name) const { if (!encoder_stack_) { RTC_LOG(LS_ERROR) << caller_name << " failed: No send codec is registered."; return false; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/audio_network_adaptor_impl.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/audio_network_adaptor_impl.h index 1c91fa19a8..664e76bda5 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/audio_network_adaptor_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/audio_network_adaptor_impl.h @@ -21,7 +21,6 @@ #include "modules/audio_coding/audio_network_adaptor/debug_dump_writer.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -44,6 +43,9 @@ class AudioNetworkAdaptorImpl final : public AudioNetworkAdaptor { ~AudioNetworkAdaptorImpl() override; + AudioNetworkAdaptorImpl(const AudioNetworkAdaptorImpl&) = delete; + AudioNetworkAdaptorImpl& operator=(const AudioNetworkAdaptorImpl&) = delete; + void SetUplinkBandwidth(int uplink_bandwidth_bps) override; void SetUplinkPacketLossFraction(float uplink_packet_loss_fraction) override; @@ -80,8 +82,6 @@ class AudioNetworkAdaptorImpl final : public AudioNetworkAdaptor { absl::optional prev_config_; ANAStats stats_; - - RTC_DISALLOW_COPY_AND_ASSIGN(AudioNetworkAdaptorImpl); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/bitrate_controller.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/bitrate_controller.h index 41bfbd1c32..c1032146cc 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/bitrate_controller.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/bitrate_controller.h @@ -16,7 +16,6 @@ #include "absl/types/optional.h" #include "modules/audio_coding/audio_network_adaptor/controller.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { namespace audio_network_adaptor { @@ -39,6 +38,9 @@ class BitrateController final : public Controller { ~BitrateController() override; + BitrateController(const BitrateController&) = delete; + BitrateController& operator=(const BitrateController&) = delete; + void UpdateNetworkMetrics(const NetworkMetrics& network_metrics) override; void MakeDecision(AudioEncoderRuntimeConfig* config) override; @@ -49,7 +51,6 @@ class BitrateController final : public Controller { int frame_length_ms_; absl::optional target_audio_bitrate_bps_; absl::optional overhead_bytes_per_packet_; - RTC_DISALLOW_COPY_AND_ASSIGN(BitrateController); }; } // namespace audio_network_adaptor diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/channel_controller.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/channel_controller.h index f211f40f17..3cd4bb7dec 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/channel_controller.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/channel_controller.h @@ -16,7 +16,6 @@ #include "absl/types/optional.h" #include "modules/audio_coding/audio_network_adaptor/controller.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -41,6 +40,9 @@ class ChannelController final : public Controller { ~ChannelController() override; + ChannelController(const ChannelController&) = delete; + ChannelController& operator=(const ChannelController&) = delete; + void UpdateNetworkMetrics(const NetworkMetrics& network_metrics) override; void MakeDecision(AudioEncoderRuntimeConfig* config) override; @@ -49,7 +51,6 @@ class ChannelController final : public Controller { const Config config_; size_t channels_to_encode_; absl::optional uplink_bandwidth_bps_; - RTC_DISALLOW_COPY_AND_ASSIGN(ChannelController); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/controller_manager.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/controller_manager.cc index 87759c37ea..42dd8a8786 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/controller_manager.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/controller_manager.cc @@ -15,6 +15,7 @@ #include #include +#include "absl/strings/string_view.h" #include "modules/audio_coding/audio_network_adaptor/bitrate_controller.h" #include "modules/audio_coding/audio_network_adaptor/channel_controller.h" #include "modules/audio_coding/audio_network_adaptor/debug_dump_writer.h" @@ -219,7 +220,7 @@ ControllerManagerImpl::Config::Config(int min_reordering_time_ms, ControllerManagerImpl::Config::~Config() = default; std::unique_ptr ControllerManagerImpl::Create( - const std::string& config_string, + absl::string_view config_string, size_t num_encoder_channels, rtc::ArrayView encoder_frame_lengths_ms, int min_encoder_bitrate_bps, @@ -235,7 +236,7 @@ std::unique_ptr ControllerManagerImpl::Create( } std::unique_ptr ControllerManagerImpl::Create( - const std::string& config_string, + absl::string_view config_string, size_t num_encoder_channels, rtc::ArrayView encoder_frame_lengths_ms, int min_encoder_bitrate_bps, @@ -247,7 +248,8 @@ std::unique_ptr ControllerManagerImpl::Create( DebugDumpWriter* debug_dump_writer) { #if WEBRTC_ENABLE_PROTOBUF audio_network_adaptor::config::ControllerManager controller_manager_config; - RTC_CHECK(controller_manager_config.ParseFromString(config_string)); + RTC_CHECK( + controller_manager_config.ParseFromString(std::string(config_string))); if (debug_dump_writer) debug_dump_writer->DumpControllerManagerConfig(controller_manager_config, rtc::TimeMillis()); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/controller_manager.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/controller_manager.h index c168ebc6ce..47e8e0f5a0 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/controller_manager.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/controller_manager.h @@ -16,8 +16,8 @@ #include #include +#include "absl/strings/string_view.h" #include "modules/audio_coding/audio_network_adaptor/controller.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -47,7 +47,7 @@ class ControllerManagerImpl final : public ControllerManager { }; static std::unique_ptr Create( - const std::string& config_string, + absl::string_view config_string, size_t num_encoder_channels, rtc::ArrayView encoder_frame_lengths_ms, int min_encoder_bitrate_bps, @@ -58,7 +58,7 @@ class ControllerManagerImpl final : public ControllerManager { bool initial_dtx_enabled); static std::unique_ptr Create( - const std::string& config_string, + absl::string_view config_string, size_t num_encoder_channels, rtc::ArrayView encoder_frame_lengths_ms, int min_encoder_bitrate_bps, @@ -80,6 +80,9 @@ class ControllerManagerImpl final : public ControllerManager { ~ControllerManagerImpl() override; + ControllerManagerImpl(const ControllerManagerImpl&) = delete; + ControllerManagerImpl& operator=(const ControllerManagerImpl&) = delete; + // Sort controllers based on their significance. std::vector GetSortedControllers( const Controller::NetworkMetrics& metrics) override; @@ -114,8 +117,6 @@ class ControllerManagerImpl final : public ControllerManager { // `scoring_points_` saves the scoring points of various // controllers. std::map controller_scoring_points_; - - RTC_DISALLOW_COPY_AND_ASSIGN(ControllerManagerImpl); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/debug_dump_writer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/debug_dump_writer.h index 367f659542..8fdf2f7728 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/debug_dump_writer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/debug_dump_writer.h @@ -15,7 +15,6 @@ #include "modules/audio_coding/audio_network_adaptor/controller.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/ignore_wundef.h" #include "rtc_base/system/file_wrapper.h" #if WEBRTC_ENABLE_PROTOBUF diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/dtx_controller.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/dtx_controller.h index 83fdf3ddd7..b8a8e476e4 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/dtx_controller.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/dtx_controller.h @@ -14,7 +14,6 @@ #include "absl/types/optional.h" #include "modules/audio_coding/audio_network_adaptor/controller.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -35,6 +34,9 @@ class DtxController final : public Controller { ~DtxController() override; + DtxController(const DtxController&) = delete; + DtxController& operator=(const DtxController&) = delete; + void UpdateNetworkMetrics(const NetworkMetrics& network_metrics) override; void MakeDecision(AudioEncoderRuntimeConfig* config) override; @@ -43,7 +45,6 @@ class DtxController final : public Controller { const Config config_; bool dtx_enabled_; absl::optional uplink_bandwidth_bps_; - RTC_DISALLOW_COPY_AND_ASSIGN(DtxController); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/event_log_writer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/event_log_writer.h index c5e57e63e6..a147311fc7 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/event_log_writer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/event_log_writer.h @@ -12,7 +12,6 @@ #define MODULES_AUDIO_CODING_AUDIO_NETWORK_ADAPTOR_EVENT_LOG_WRITER_H_ #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { class RtcEventLog; @@ -24,6 +23,10 @@ class EventLogWriter final { float min_bitrate_change_fraction, float min_packet_loss_change_fraction); ~EventLogWriter(); + + EventLogWriter(const EventLogWriter&) = delete; + EventLogWriter& operator=(const EventLogWriter&) = delete; + void MaybeLogEncoderConfig(const AudioEncoderRuntimeConfig& config); private: @@ -34,7 +37,6 @@ class EventLogWriter final { const float min_bitrate_change_fraction_; const float min_packet_loss_change_fraction_; AudioEncoderRuntimeConfig last_logged_config_; - RTC_DISALLOW_COPY_AND_ASSIGN(EventLogWriter); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/fec_controller_plr_based.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/fec_controller_plr_based.h index 85d235ed26..0c57ad1d1e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/fec_controller_plr_based.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/fec_controller_plr_based.h @@ -18,7 +18,6 @@ #include "modules/audio_coding/audio_network_adaptor/controller.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" #include "modules/audio_coding/audio_network_adaptor/util/threshold_curve.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -53,6 +52,9 @@ class FecControllerPlrBased final : public Controller { ~FecControllerPlrBased() override; + FecControllerPlrBased(const FecControllerPlrBased&) = delete; + FecControllerPlrBased& operator=(const FecControllerPlrBased&) = delete; + void UpdateNetworkMetrics(const NetworkMetrics& network_metrics) override; void MakeDecision(AudioEncoderRuntimeConfig* config) override; @@ -65,8 +67,6 @@ class FecControllerPlrBased final : public Controller { bool fec_enabled_; absl::optional uplink_bandwidth_bps_; const std::unique_ptr packet_loss_smoother_; - - RTC_DISALLOW_COPY_AND_ASSIGN(FecControllerPlrBased); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/frame_length_controller.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/frame_length_controller.h index 74a787e1c1..04693f8db7 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/frame_length_controller.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/frame_length_controller.h @@ -19,7 +19,6 @@ #include "absl/types/optional.h" #include "modules/audio_coding/audio_network_adaptor/controller.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -62,6 +61,9 @@ class FrameLengthController final : public Controller { ~FrameLengthController() override; + FrameLengthController(const FrameLengthController&) = delete; + FrameLengthController& operator=(const FrameLengthController&) = delete; + void UpdateNetworkMetrics(const NetworkMetrics& network_metrics) override; void MakeDecision(AudioEncoderRuntimeConfig* config) override; @@ -84,8 +86,6 @@ class FrameLengthController final : public Controller { // True if the previous frame length decision was an increase, otherwise // false. bool prev_decision_increase_ = false; - - RTC_DISALLOW_COPY_AND_ASSIGN(FrameLengthController); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.cc index d580a0509b..46ac671b30 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.cc @@ -40,8 +40,14 @@ int AudioDecoderPcmU::DecodeInternal(const uint8_t* encoded, int16_t* decoded, SpeechType* speech_type) { RTC_DCHECK_EQ(SampleRateHz(), sample_rate_hz); + // Adjust the encoded length down to ensure the same number of samples in each + // channel. + const size_t encoded_len_adjusted = + PacketDuration(encoded, encoded_len) * + Channels(); // 1 byte per sample per channel int16_t temp_type = 1; // Default is speech. - size_t ret = WebRtcG711_DecodeU(encoded, encoded_len, decoded, &temp_type); + size_t ret = + WebRtcG711_DecodeU(encoded, encoded_len_adjusted, decoded, &temp_type); *speech_type = ConvertSpeechType(temp_type); return static_cast(ret); } @@ -75,8 +81,14 @@ int AudioDecoderPcmA::DecodeInternal(const uint8_t* encoded, int16_t* decoded, SpeechType* speech_type) { RTC_DCHECK_EQ(SampleRateHz(), sample_rate_hz); + // Adjust the encoded length down to ensure the same number of samples in each + // channel. + const size_t encoded_len_adjusted = + PacketDuration(encoded, encoded_len) * + Channels(); // 1 byte per sample per channel int16_t temp_type = 1; // Default is speech. - size_t ret = WebRtcG711_DecodeA(encoded, encoded_len, decoded, &temp_type); + size_t ret = + WebRtcG711_DecodeA(encoded, encoded_len_adjusted, decoded, &temp_type); *speech_type = ConvertSpeechType(temp_type); return static_cast(ret); } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.h index 618591876d..3fa42cba30 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.h @@ -19,7 +19,6 @@ #include "api/audio_codecs/audio_decoder.h" #include "rtc_base/buffer.h" #include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -28,6 +27,10 @@ class AudioDecoderPcmU final : public AudioDecoder { explicit AudioDecoderPcmU(size_t num_channels) : num_channels_(num_channels) { RTC_DCHECK_GE(num_channels, 1); } + + AudioDecoderPcmU(const AudioDecoderPcmU&) = delete; + AudioDecoderPcmU& operator=(const AudioDecoderPcmU&) = delete; + void Reset() override; std::vector ParsePayload(rtc::Buffer&& payload, uint32_t timestamp) override; @@ -44,7 +47,6 @@ class AudioDecoderPcmU final : public AudioDecoder { private: const size_t num_channels_; - RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoderPcmU); }; class AudioDecoderPcmA final : public AudioDecoder { @@ -52,6 +54,10 @@ class AudioDecoderPcmA final : public AudioDecoder { explicit AudioDecoderPcmA(size_t num_channels) : num_channels_(num_channels) { RTC_DCHECK_GE(num_channels, 1); } + + AudioDecoderPcmA(const AudioDecoderPcmA&) = delete; + AudioDecoderPcmA& operator=(const AudioDecoderPcmA&) = delete; + void Reset() override; std::vector ParsePayload(rtc::Buffer&& payload, uint32_t timestamp) override; @@ -68,7 +74,6 @@ class AudioDecoderPcmA final : public AudioDecoder { private: const size_t num_channels_; - RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoderPcmA); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.h index c4413f50a4..d50be4b457 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.h @@ -17,7 +17,6 @@ #include "absl/types/optional.h" #include "api/audio_codecs/audio_encoder.h" #include "api/units/time_delta.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -83,6 +82,9 @@ class AudioEncoderPcmA final : public AudioEncoderPcm { explicit AudioEncoderPcmA(const Config& config) : AudioEncoderPcm(config, kSampleRateHz) {} + AudioEncoderPcmA(const AudioEncoderPcmA&) = delete; + AudioEncoderPcmA& operator=(const AudioEncoderPcmA&) = delete; + protected: size_t EncodeCall(const int16_t* audio, size_t input_len, @@ -94,7 +96,6 @@ class AudioEncoderPcmA final : public AudioEncoderPcm { private: static const int kSampleRateHz = 8000; - RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderPcmA); }; class AudioEncoderPcmU final : public AudioEncoderPcm { @@ -106,6 +107,9 @@ class AudioEncoderPcmU final : public AudioEncoderPcm { explicit AudioEncoderPcmU(const Config& config) : AudioEncoderPcm(config, kSampleRateHz) {} + AudioEncoderPcmU(const AudioEncoderPcmU&) = delete; + AudioEncoderPcmU& operator=(const AudioEncoderPcmU&) = delete; + protected: size_t EncodeCall(const int16_t* audio, size_t input_len, @@ -117,7 +121,6 @@ class AudioEncoderPcmU final : public AudioEncoderPcm { private: static const int kSampleRateHz = 8000; - RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderPcmU); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g711/g711_interface.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g711/g711_interface.h index 83f9d378ed..c92e6cc1c8 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g711/g711_interface.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g711/g711_interface.h @@ -11,6 +11,7 @@ #ifndef MODULES_AUDIO_CODING_CODECS_G711_G711_INTERFACE_H_ #define MODULES_AUDIO_CODING_CODECS_G711_G711_INTERFACE_H_ +#include #include // Comfort noise constants diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.cc index f02ca7f896..1ecc9bc3d1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.cc @@ -89,16 +89,22 @@ int AudioDecoderG722StereoImpl::DecodeInternal(const uint8_t* encoded, int16_t* decoded, SpeechType* speech_type) { RTC_DCHECK_EQ(SampleRateHz(), sample_rate_hz); + // Adjust the encoded length down to ensure the same number of samples in each + // channel. + const size_t encoded_len_adjusted = PacketDuration(encoded, encoded_len) * + Channels() / + 2; // 1/2 byte per sample per channel int16_t temp_type = 1; // Default is speech. // De-interleave the bit-stream into two separate payloads. - uint8_t* encoded_deinterleaved = new uint8_t[encoded_len]; - SplitStereoPacket(encoded, encoded_len, encoded_deinterleaved); + uint8_t* encoded_deinterleaved = new uint8_t[encoded_len_adjusted]; + SplitStereoPacket(encoded, encoded_len_adjusted, encoded_deinterleaved); // Decode left and right. - size_t decoded_len = WebRtcG722_Decode(dec_state_left_, encoded_deinterleaved, - encoded_len / 2, decoded, &temp_type); + size_t decoded_len = + WebRtcG722_Decode(dec_state_left_, encoded_deinterleaved, + encoded_len_adjusted / 2, decoded, &temp_type); size_t ret = WebRtcG722_Decode( - dec_state_right_, &encoded_deinterleaved[encoded_len / 2], - encoded_len / 2, &decoded[decoded_len], &temp_type); + dec_state_right_, &encoded_deinterleaved[encoded_len_adjusted / 2], + encoded_len_adjusted / 2, &decoded[decoded_len], &temp_type); if (ret == decoded_len) { ret += decoded_len; // Return total number of samples. // Interleave output. @@ -114,6 +120,14 @@ int AudioDecoderG722StereoImpl::DecodeInternal(const uint8_t* encoded, return static_cast(ret); } +int AudioDecoderG722StereoImpl::PacketDuration(const uint8_t* encoded, + size_t encoded_len) const { + // 1/2 encoded byte per sample per channel. Make sure the length represents + // an equal number of bytes per channel. Otherwise, we cannot de-interleave + // the encoded data later. + return static_cast(2 * (encoded_len / Channels())); +} + int AudioDecoderG722StereoImpl::SampleRateHz() const { return 16000; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.h index eeca13975f..5872fad5de 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.h @@ -12,7 +12,6 @@ #define MODULES_AUDIO_CODING_CODECS_G722_AUDIO_DECODER_G722_H_ #include "api/audio_codecs/audio_decoder.h" -#include "rtc_base/constructor_magic.h" typedef struct WebRtcG722DecInst G722DecInst; @@ -22,6 +21,10 @@ class AudioDecoderG722Impl final : public AudioDecoder { public: AudioDecoderG722Impl(); ~AudioDecoderG722Impl() override; + + AudioDecoderG722Impl(const AudioDecoderG722Impl&) = delete; + AudioDecoderG722Impl& operator=(const AudioDecoderG722Impl&) = delete; + bool HasDecodePlc() const override; void Reset() override; std::vector ParsePayload(rtc::Buffer&& payload, @@ -39,17 +42,22 @@ class AudioDecoderG722Impl final : public AudioDecoder { private: G722DecInst* dec_state_; - RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoderG722Impl); }; class AudioDecoderG722StereoImpl final : public AudioDecoder { public: AudioDecoderG722StereoImpl(); ~AudioDecoderG722StereoImpl() override; + + AudioDecoderG722StereoImpl(const AudioDecoderG722StereoImpl&) = delete; + AudioDecoderG722StereoImpl& operator=(const AudioDecoderG722StereoImpl&) = + delete; + void Reset() override; std::vector ParsePayload(rtc::Buffer&& payload, uint32_t timestamp) override; int SampleRateHz() const override; + int PacketDuration(const uint8_t* encoded, size_t encoded_len) const override; size_t Channels() const override; protected: @@ -71,7 +79,6 @@ class AudioDecoderG722StereoImpl final : public AudioDecoder { G722DecInst* dec_state_left_; G722DecInst* dec_state_right_; - RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoderG722StereoImpl); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.h index c836503f2b..a932aa8b7d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.h @@ -20,7 +20,6 @@ #include "api/units/time_delta.h" #include "modules/audio_coding/codecs/g722/g722_interface.h" #include "rtc_base/buffer.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -29,6 +28,9 @@ class AudioEncoderG722Impl final : public AudioEncoder { AudioEncoderG722Impl(const AudioEncoderG722Config& config, int payload_type); ~AudioEncoderG722Impl() override; + AudioEncoderG722Impl(const AudioEncoderG722Impl&) = delete; + AudioEncoderG722Impl& operator=(const AudioEncoderG722Impl&) = delete; + int SampleRateHz() const override; size_t NumChannels() const override; int RtpTimestampRateHz() const override; @@ -63,7 +65,6 @@ class AudioEncoderG722Impl final : public AudioEncoder { uint32_t first_timestamp_in_buffer_; const std::unique_ptr encoders_; rtc::Buffer interleave_buffer_; - RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderG722Impl); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g722/g722_interface.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g722/g722_interface.h index 85c1cd02a0..353de4504f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g722/g722_interface.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g722/g722_interface.h @@ -11,6 +11,7 @@ #ifndef MODULES_AUDIO_CODING_CODECS_G722_G722_INTERFACE_H_ #define MODULES_AUDIO_CODING_CODECS_G722_G722_INTERFACE_H_ +#include #include /* diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h index c2d62ed2d1..46ba755148 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h @@ -18,7 +18,6 @@ #include "api/audio_codecs/audio_decoder.h" #include "rtc_base/buffer.h" -#include "rtc_base/constructor_magic.h" typedef struct iLBC_decinst_t_ IlbcDecoderInstance; @@ -28,6 +27,10 @@ class AudioDecoderIlbcImpl final : public AudioDecoder { public: AudioDecoderIlbcImpl(); ~AudioDecoderIlbcImpl() override; + + AudioDecoderIlbcImpl(const AudioDecoderIlbcImpl&) = delete; + AudioDecoderIlbcImpl& operator=(const AudioDecoderIlbcImpl&) = delete; + bool HasDecodePlc() const override; size_t DecodePlc(size_t num_frames, int16_t* decoded) override; void Reset() override; @@ -45,7 +48,6 @@ class AudioDecoderIlbcImpl final : public AudioDecoder { private: IlbcDecoderInstance* dec_state_; - RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoderIlbcImpl); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h index 05a900e3c4..c8dfa2ca6d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h @@ -21,7 +21,6 @@ #include "api/audio_codecs/ilbc/audio_encoder_ilbc_config.h" #include "api/units/time_delta.h" #include "modules/audio_coding/codecs/ilbc/ilbc.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -30,6 +29,9 @@ class AudioEncoderIlbcImpl final : public AudioEncoder { AudioEncoderIlbcImpl(const AudioEncoderIlbcConfig& config, int payload_type); ~AudioEncoderIlbcImpl() override; + AudioEncoderIlbcImpl(const AudioEncoderIlbcImpl&) = delete; + AudioEncoderIlbcImpl& operator=(const AudioEncoderIlbcImpl&) = delete; + int SampleRateHz() const override; size_t NumChannels() const override; size_t Num10MsFramesInNextPacket() const override; @@ -53,7 +55,6 @@ class AudioEncoderIlbcImpl final : public AudioEncoder { uint32_t first_timestamp_in_buffer_; int16_t input_buffer_[kMaxSamplesPerPacket]; IlbcEncoderInstance* encoder_; - RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderIlbcImpl); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/audio_decoder_isac_t.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/audio_decoder_isac_t.h index 23a302018f..aae708f295 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/audio_decoder_isac_t.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/audio_decoder_isac_t.h @@ -16,7 +16,6 @@ #include "absl/types/optional.h" #include "api/audio_codecs/audio_decoder.h" #include "api/scoped_refptr.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -30,6 +29,9 @@ class AudioDecoderIsacT final : public AudioDecoder { explicit AudioDecoderIsacT(const Config& config); virtual ~AudioDecoderIsacT() override; + AudioDecoderIsacT(const AudioDecoderIsacT&) = delete; + AudioDecoderIsacT& operator=(const AudioDecoderIsacT&) = delete; + bool HasDecodePlc() const override; size_t DecodePlc(size_t num_frames, int16_t* decoded) override; void Reset() override; @@ -45,8 +47,6 @@ class AudioDecoderIsacT final : public AudioDecoder { private: typename T::instance_type* isac_state_; int sample_rate_hz_; - - RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoderIsacT); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/audio_decoder_isac_t_impl.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/audio_decoder_isac_t_impl.h index 2e43fd317f..9aa498866b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/audio_decoder_isac_t_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/audio_decoder_isac_t_impl.h @@ -11,6 +11,7 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ISAC_AUDIO_DECODER_ISAC_T_IMPL_H_ #define MODULES_AUDIO_CODING_CODECS_ISAC_AUDIO_DECODER_ISAC_T_IMPL_H_ +#include "modules/audio_coding/codecs/isac/audio_decoder_isac_t.h" #include "rtc_base/checks.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h index 8bde0e34ad..c382ea076e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h @@ -18,7 +18,6 @@ #include "api/audio_codecs/audio_encoder.h" #include "api/scoped_refptr.h" #include "api/units/time_delta.h" -#include "rtc_base/constructor_magic.h" #include "system_wrappers/include/field_trial.h" namespace webrtc { @@ -44,6 +43,9 @@ class AudioEncoderIsacT final : public AudioEncoder { explicit AudioEncoderIsacT(const Config& config); ~AudioEncoderIsacT() override; + AudioEncoderIsacT(const AudioEncoderIsacT&) = delete; + AudioEncoderIsacT& operator=(const AudioEncoderIsacT&) = delete; + int SampleRateHz() const override; size_t NumChannels() const override; size_t Num10MsFramesInNextPacket() const override; @@ -99,8 +101,6 @@ class AudioEncoderIsacT final : public AudioEncoder { // Start out with a reasonable default that we can use until we receive a real // value. DataSize overhead_per_packet_ = DataSize::Bytes(28); - - RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderIsacT); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h index fa84515204..1bd27cf80d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h @@ -11,6 +11,7 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ISAC_AUDIO_ENCODER_ISAC_T_IMPL_H_ #define MODULES_AUDIO_CODING_CODECS_ISAC_AUDIO_ENCODER_ISAC_T_IMPL_H_ +#include "modules/audio_coding/codecs/isac/audio_encoder_isac_t.h" #include "rtc_base/checks.h" #include "rtc_base/numerics/safe_minmax.h" diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/include/isacfix.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/include/isacfix.h index 87956a6997..dcc7b0991d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/include/isacfix.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/include/isacfix.h @@ -394,7 +394,7 @@ int16_t WebRtcIsacfix_FreeInternal(ISACFIX_MainStruct* ISAC_main_inst); /**************************************************************************** * WebRtcIsacfix_GetNewBitStream(...) * - * This function returns encoded data, with the recieved bwe-index in the + * This function returns encoded data, with the received bwe-index in the * stream. It should always return a complete packet, i.e. only called once * even for 60 msec frames * diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routins.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routins.h index cc4ed555cf..d112bfe7f2 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routins.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routins.h @@ -38,7 +38,7 @@ int WebRtcIsacfix_EncLogisticMulti2(Bitstr_enc* streamData, int16_t* dataQ7, const uint16_t* env, - const int16_t lenData); + int16_t lenData); /**************************************************************************** * WebRtcIsacfix_EncTerminate(...) @@ -73,7 +73,7 @@ int16_t WebRtcIsacfix_EncTerminate(Bitstr_enc* streamData); int WebRtcIsacfix_DecLogisticMulti2(int16_t* data, Bitstr_dec* streamData, const int32_t* env, - const int16_t lenData); + int16_t lenData); /**************************************************************************** * WebRtcIsacfix_EncHistMulti(...) @@ -92,7 +92,7 @@ int WebRtcIsacfix_DecLogisticMulti2(int16_t* data, int WebRtcIsacfix_EncHistMulti(Bitstr_enc* streamData, const int16_t* data, const uint16_t* const* cdf, - const int16_t lenData); + int16_t lenData); /**************************************************************************** * WebRtcIsacfix_DecHistBisectMulti(...) @@ -118,7 +118,7 @@ int16_t WebRtcIsacfix_DecHistBisectMulti(int16_t* data, Bitstr_dec* streamData, const uint16_t* const* cdf, const uint16_t* cdfSize, - const int16_t lenData); + int16_t lenData); /**************************************************************************** * WebRtcIsacfix_DecHistOneStepMulti(...) @@ -144,6 +144,6 @@ int16_t WebRtcIsacfix_DecHistOneStepMulti(int16_t* data, Bitstr_dec* streamData, const uint16_t* const* cdf, const uint16_t* initIndex, - const int16_t lenData); + int16_t lenData); #endif /* MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_ARITH_ROUTINS_H_ */ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.h index ebb74d6c49..f106746f14 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.h @@ -56,17 +56,17 @@ int32_t WebRtcIsacfix_InitBandwidthEstimator(BwEstimatorstr* bwest_str); */ int32_t WebRtcIsacfix_UpdateUplinkBwImpl(BwEstimatorstr* bwest_str, - const uint16_t rtp_number, - const int16_t frameSize, - const uint32_t send_ts, - const uint32_t arr_ts, - const size_t pksize, - const uint16_t Index); + uint16_t rtp_number, + int16_t frameSize, + uint32_t send_ts, + uint32_t arr_ts, + size_t pksize, + uint16_t Index); /* Update receiving estimates. Used when we only receive BWE index, no iSAC data * packet. */ int16_t WebRtcIsacfix_UpdateUplinkBwRec(BwEstimatorstr* bwest_str, - const int16_t Index); + int16_t Index); /**************************************************************************** * WebRtcIsacfix_GetDownlinkBwIndexImpl(...) @@ -100,19 +100,19 @@ int16_t WebRtcIsacfix_GetUplinkMaxDelay(const BwEstimatorstr* bwest_str); */ uint16_t WebRtcIsacfix_GetMinBytes( RateModel* State, - int16_t StreamSize, /* bytes in bitstream */ - const int16_t FrameLen, /* ms per frame */ - const int16_t BottleNeck, /* bottle neck rate; excl headers (bps) */ - const int16_t DelayBuildUp); /* max delay from bottle neck buffering (ms) */ + int16_t StreamSize, /* bytes in bitstream */ + int16_t FrameLen, /* ms per frame */ + int16_t BottleNeck, /* bottle neck rate; excl headers (bps) */ + int16_t DelayBuildUp); /* max delay from bottle neck buffering (ms) */ /* * update long-term average bitrate and amount of data in buffer */ void WebRtcIsacfix_UpdateRateModel( RateModel* State, - int16_t StreamSize, /* bytes in bitstream */ - const int16_t FrameSamples, /* samples per frame */ - const int16_t BottleNeck); /* bottle neck rate; excl headers (bps) */ + int16_t StreamSize, /* bytes in bitstream */ + int16_t FrameSamples, /* samples per frame */ + int16_t BottleNeck); /* bottle neck rate; excl headers (bps) */ void WebRtcIsacfix_InitRateModel(RateModel* State); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding.h index b4251cee1e..ae11394f7c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding.h @@ -101,19 +101,19 @@ void WebRtcIsacfix_TranscodeLpcCoef(int32_t* tmpcoeffs_gQ6, int16_t* index_gQQ); typedef void (*MatrixProduct1)(const int16_t matrix0[], const int32_t matrix1[], int32_t matrix_product[], - const int matrix1_index_factor1, - const int matrix0_index_factor1, - const int matrix1_index_init_case, - const int matrix1_index_step, - const int matrix0_index_step, - const int inner_loop_count, - const int mid_loop_count, - const int shift); + int matrix1_index_factor1, + int matrix0_index_factor1, + int matrix1_index_init_case, + int matrix1_index_step, + int matrix0_index_step, + int inner_loop_count, + int mid_loop_count, + int shift); typedef void (*MatrixProduct2)(const int16_t matrix0[], const int32_t matrix1[], int32_t matrix_product[], - const int matrix0_index_factor, - const int matrix0_index_step); + int matrix0_index_factor, + int matrix0_index_step); extern MatrixProduct1 WebRtcIsacfix_MatrixProduct1; extern MatrixProduct2 WebRtcIsacfix_MatrixProduct2; @@ -121,57 +121,57 @@ extern MatrixProduct2 WebRtcIsacfix_MatrixProduct2; void WebRtcIsacfix_MatrixProduct1C(const int16_t matrix0[], const int32_t matrix1[], int32_t matrix_product[], - const int matrix1_index_factor1, - const int matrix0_index_factor1, - const int matrix1_index_init_case, - const int matrix1_index_step, - const int matrix0_index_step, - const int inner_loop_count, - const int mid_loop_count, - const int shift); + int matrix1_index_factor1, + int matrix0_index_factor1, + int matrix1_index_init_case, + int matrix1_index_step, + int matrix0_index_step, + int inner_loop_count, + int mid_loop_count, + int shift); void WebRtcIsacfix_MatrixProduct2C(const int16_t matrix0[], const int32_t matrix1[], int32_t matrix_product[], - const int matrix0_index_factor, - const int matrix0_index_step); + int matrix0_index_factor, + int matrix0_index_step); #if defined(WEBRTC_HAS_NEON) void WebRtcIsacfix_MatrixProduct1Neon(const int16_t matrix0[], const int32_t matrix1[], int32_t matrix_product[], - const int matrix1_index_factor1, - const int matrix0_index_factor1, - const int matrix1_index_init_case, - const int matrix1_index_step, - const int matrix0_index_step, - const int inner_loop_count, - const int mid_loop_count, - const int shift); + int matrix1_index_factor1, + int matrix0_index_factor1, + int matrix1_index_init_case, + int matrix1_index_step, + int matrix0_index_step, + int inner_loop_count, + int mid_loop_count, + int shift); void WebRtcIsacfix_MatrixProduct2Neon(const int16_t matrix0[], const int32_t matrix1[], int32_t matrix_product[], - const int matrix0_index_factor, - const int matrix0_index_step); + int matrix0_index_factor, + int matrix0_index_step); #endif #if defined(MIPS32_LE) void WebRtcIsacfix_MatrixProduct1MIPS(const int16_t matrix0[], const int32_t matrix1[], int32_t matrix_product[], - const int matrix1_index_factor1, - const int matrix0_index_factor1, - const int matrix1_index_init_case, - const int matrix1_index_step, - const int matrix0_index_step, - const int inner_loop_count, - const int mid_loop_count, - const int shift); + int matrix1_index_factor1, + int matrix0_index_factor1, + int matrix1_index_init_case, + int matrix1_index_step, + int matrix0_index_step, + int inner_loop_count, + int mid_loop_count, + int shift); void WebRtcIsacfix_MatrixProduct2MIPS(const int16_t matrix0[], const int32_t matrix1[], int32_t matrix_product[], - const int matrix0_index_factor, - const int matrix0_index_step); + int matrix0_index_factor, + int matrix0_index_step); #endif #endif // MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_ENTROPY_CODING_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h index 6b99914b64..f741e6f677 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h @@ -46,7 +46,7 @@ typedef void (*AllpassFilter2FixDec16)( int16_t* data_ch2, // Input and output in channel 2, in Q0 const int16_t* factor_ch1, // Scaling factor for channel 1, in Q15 const int16_t* factor_ch2, // Scaling factor for channel 2, in Q15 - const int length, // Length of the data buffers + int length, // Length of the data buffers int32_t* filter_state_ch1, // Filter state for channel 1, in Q16 int32_t* filter_state_ch2); // Filter state for channel 2, in Q16 extern AllpassFilter2FixDec16 WebRtcIsacfix_AllpassFilter2FixDec16; @@ -55,7 +55,7 @@ void WebRtcIsacfix_AllpassFilter2FixDec16C(int16_t* data_ch1, int16_t* data_ch2, const int16_t* factor_ch1, const int16_t* factor_ch2, - const int length, + int length, int32_t* filter_state_ch1, int32_t* filter_state_ch2); @@ -64,7 +64,7 @@ void WebRtcIsacfix_AllpassFilter2FixDec16Neon(int16_t* data_ch1, int16_t* data_ch2, const int16_t* factor_ch1, const int16_t* factor_ch2, - const int length, + int length, int32_t* filter_state_ch1, int32_t* filter_state_ch2); #endif @@ -74,7 +74,7 @@ void WebRtcIsacfix_AllpassFilter2FixDec16MIPS(int16_t* data_ch1, int16_t* data_ch2, const int16_t* factor_ch1, const int16_t* factor_ch2, - const int length, + int length, int32_t* filter_state_ch1, int32_t* filter_state_ch2); #endif diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.c index 9a66591de1..a7d44e883d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.c @@ -381,7 +381,7 @@ int WebRtcIsacfix_Encode(ISACFIX_MainStruct *ISAC_main_inst, /**************************************************************************** * WebRtcIsacfix_GetNewBitStream(...) * - * This function returns encoded data, with the recieved bwe-index in the + * This function returns encoded data, with the received bwe-index in the * stream. It should always return a complete packet, i.e. only called once * even for 60 msec frames * diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/lpc_tables.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/lpc_tables.h index 6965822952..50e1b12459 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/lpc_tables.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/lpc_tables.h @@ -20,6 +20,8 @@ #include +#include "modules/audio_coding/codecs/isac/fix/source/settings.h" + /* indices of KLT coefficients used */ extern const uint16_t WebRtcIsacfix_kSelIndGain[12]; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/main/include/isac.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/main/include/isac.h index f45bbb3897..3b05a8bcda 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/main/include/isac.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/main/include/isac.h @@ -453,7 +453,7 @@ int16_t WebRtcIsac_SetEncSampRate(ISACStruct* ISAC_main_inst, /****************************************************************************** * WebRtcIsac_GetNewBitStream(...) * - * This function returns encoded data, with the recieved bwe-index in the + * This function returns encoded data, with the received bwe-index in the * stream. If the rate is set to a value less than bottleneck of codec * the new bistream will be re-encoded with the given target rate. * It should always return a complete packet, i.e. only called once diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/main/source/arith_routines.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/main/source/arith_routines.h index 6e7ea1da5e..3f9f6de7bb 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/main/source/arith_routines.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/main/source/arith_routines.h @@ -24,9 +24,9 @@ int WebRtcIsac_EncLogisticMulti2( Bitstr* streamdata, /* in-/output struct containing bitstream */ int16_t* dataQ7, /* input: data vector */ const uint16_t* - env, /* input: side info vector defining the width of the pdf */ - const int N, /* input: data vector length */ - const int16_t isSWB12kHz); /* if the codec is working in 12kHz bandwidth */ + env, /* input: side info vector defining the width of the pdf */ + int N, /* input: data vector length */ + int16_t isSWB12kHz); /* if the codec is working in 12kHz bandwidth */ /* returns the number of bytes in the stream */ int WebRtcIsac_EncTerminate( @@ -38,15 +38,15 @@ int WebRtcIsac_DecLogisticMulti2( Bitstr* streamdata, /* in-/output struct containing bitstream */ const uint16_t* env, /* input: side info vector defining the width of the pdf */ - const int16_t* dither, /* input: dither vector */ - const int N, /* input: data vector length */ - const int16_t isSWB12kHz); /* if the codec is working in 12kHz bandwidth */ + const int16_t* dither, /* input: dither vector */ + int N, /* input: data vector length */ + int16_t isSWB12kHz); /* if the codec is working in 12kHz bandwidth */ void WebRtcIsac_EncHistMulti( Bitstr* streamdata, /* in-/output struct containing bitstream */ const int* data, /* input: data vector */ const uint16_t* const* cdf, /* input: array of cdf arrays */ - const int N); /* input: data vector length */ + int N); /* input: data vector length */ int WebRtcIsac_DecHistBisectMulti( int* data, /* output: data vector */ @@ -54,7 +54,7 @@ int WebRtcIsac_DecHistBisectMulti( const uint16_t* const* cdf, /* input: array of cdf arrays */ const uint16_t* cdf_size, /* input: array of cdf table sizes+1 (power of two: 2^k) */ - const int N); /* input: data vector length */ + int N); /* input: data vector length */ int WebRtcIsac_DecHistOneStepMulti( int* data, /* output: data vector */ @@ -62,6 +62,6 @@ int WebRtcIsac_DecHistOneStepMulti( const uint16_t* const* cdf, /* input: array of cdf arrays */ const uint16_t* init_index, /* input: vector of initial cdf table search entries */ - const int N); /* input: data vector length */ + int N); /* input: data vector length */ #endif /* MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_ARITH_ROUTINES_H_ */ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.h index 221e65ff3f..5f4550a3a5 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.h @@ -92,11 +92,11 @@ int32_t WebRtcIsac_InitBandwidthEstimator( * estimated by other side */ /* returns 0 if everything went fine, -1 otherwise */ int16_t WebRtcIsac_UpdateBandwidthEstimator(BwEstimatorstr* bwest_str, - const uint16_t rtp_number, - const int32_t frame_length, - const uint32_t send_ts, - const uint32_t arr_ts, - const size_t pksize); + uint16_t rtp_number, + int32_t frame_length, + uint32_t send_ts, + uint32_t arr_ts, + size_t pksize); /* Update receiving estimates. Used when we only receive BWE index, no iSAC data * packet. */ @@ -131,10 +131,10 @@ int32_t WebRtcIsac_GetUplinkMaxDelay(const BwEstimatorstr* bwest_str); */ int WebRtcIsac_GetMinBytes( RateModel* State, - int StreamSize, /* bytes in bitstream */ - const int FrameLen, /* ms per frame */ - const double BottleNeck, /* bottle neck rate; excl headers (bps) */ - const double DelayBuildUp, /* max delay from bottleneck buffering (ms) */ + int StreamSize, /* bytes in bitstream */ + int FrameLen, /* ms per frame */ + double BottleNeck, /* bottle neck rate; excl headers (bps) */ + double DelayBuildUp, /* max delay from bottleneck buffering (ms) */ enum ISACBandwidth bandwidth /*,int16_t frequentLargePackets*/); @@ -143,9 +143,9 @@ int WebRtcIsac_GetMinBytes( */ void WebRtcIsac_UpdateRateModel( RateModel* State, - int StreamSize, /* bytes in bitstream */ - const int FrameSamples, /* samples per frame */ - const double BottleNeck); /* bottle neck rate; excl headers (bps) */ + int StreamSize, /* bytes in bitstream */ + int FrameSamples, /* samples per frame */ + double BottleNeck); /* bottle neck rate; excl headers (bps) */ void WebRtcIsac_InitRateModel(RateModel* State); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/main/source/filter_functions.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/main/source/filter_functions.h index 48a9b7426b..a747a7f549 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/main/source/filter_functions.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/main/source/filter_functions.h @@ -11,6 +11,8 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_FILTER_FUNCTIONS_H_ #define MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_FILTER_FUNCTIONS_H_ +#include + #include "modules/audio_coding/codecs/isac/main/source/structs.h" void WebRtcIsac_AutoCorr(double* r, const double* x, size_t N, size_t order); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/main/source/isac.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/main/source/isac.c index 73f132c228..456f447d9a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/main/source/isac.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/main/source/isac.c @@ -678,7 +678,7 @@ int WebRtcIsac_Encode(ISACStruct* ISAC_main_inst, /****************************************************************************** * WebRtcIsac_GetNewBitStream(...) * - * This function returns encoded data, with the recieved bwe-index in the + * This function returns encoded data, with the received bwe-index in the * stream. If the rate is set to a value less than bottleneck of codec * the new bistream will be re-encoded with the given target rate. * It should always return a complete packet, i.e. only called once diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_coder_opus_common.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_coder_opus_common.cc index fca87e218a..03c02186d0 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_coder_opus_common.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_coder_opus_common.cc @@ -10,11 +10,13 @@ #include "modules/audio_coding/codecs/opus/audio_coder_opus_common.h" +#include "absl/strings/string_view.h" + namespace webrtc { absl::optional GetFormatParameter(const SdpAudioFormat& format, - const std::string& param) { - auto it = format.parameters.find(param); + absl::string_view param) { + auto it = format.parameters.find(std::string(param)); if (it == format.parameters.end()) return absl::nullopt; @@ -25,7 +27,7 @@ absl::optional GetFormatParameter(const SdpAudioFormat& format, template <> absl::optional> GetFormatParameter( const SdpAudioFormat& format, - const std::string& param) { + absl::string_view param) { std::vector result; const std::string comma_separated_list = GetFormatParameter(format, param).value_or(""); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_coder_opus_common.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_coder_opus_common.h index cad914e557..5ebb51b577 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_coder_opus_common.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_coder_opus_common.h @@ -15,6 +15,7 @@ #include #include +#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/audio_codecs/audio_decoder.h" #include "api/audio_codecs/audio_format.h" @@ -23,18 +24,18 @@ namespace webrtc { absl::optional GetFormatParameter(const SdpAudioFormat& format, - const std::string& param); + absl::string_view param); template absl::optional GetFormatParameter(const SdpAudioFormat& format, - const std::string& param) { + absl::string_view param) { return rtc::StringToNumber(GetFormatParameter(format, param).value_or("")); } template <> absl::optional> GetFormatParameter( const SdpAudioFormat& format, - const std::string& param); + absl::string_view param); class OpusFrame : public AudioDecoder::EncodedAudioFrame { public: diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_decoder_multi_channel_opus_impl.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_decoder_multi_channel_opus_impl.h index efc3f0dda8..2ff47a8a53 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_decoder_multi_channel_opus_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_decoder_multi_channel_opus_impl.h @@ -21,7 +21,6 @@ #include "api/audio_codecs/opus/audio_decoder_multi_channel_opus_config.h" #include "modules/audio_coding/codecs/opus/opus_interface.h" #include "rtc_base/buffer.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -32,6 +31,11 @@ class AudioDecoderMultiChannelOpusImpl final : public AudioDecoder { ~AudioDecoderMultiChannelOpusImpl() override; + AudioDecoderMultiChannelOpusImpl(const AudioDecoderMultiChannelOpusImpl&) = + delete; + AudioDecoderMultiChannelOpusImpl& operator=( + const AudioDecoderMultiChannelOpusImpl&) = delete; + std::vector ParsePayload(rtc::Buffer&& payload, uint32_t timestamp) override; void Reset() override; @@ -63,7 +67,6 @@ class AudioDecoderMultiChannelOpusImpl final : public AudioDecoder { OpusDecInst* dec_state_; const AudioDecoderMultiChannelOpusConfig config_; - RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoderMultiChannelOpusImpl); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_decoder_opus.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_decoder_opus.h index c79272284d..e8fd0440bc 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_decoder_opus.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_decoder_opus.h @@ -19,7 +19,6 @@ #include "api/audio_codecs/audio_decoder.h" #include "modules/audio_coding/codecs/opus/opus_interface.h" #include "rtc_base/buffer.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -29,6 +28,9 @@ class AudioDecoderOpusImpl final : public AudioDecoder { int sample_rate_hz = 48000); ~AudioDecoderOpusImpl() override; + AudioDecoderOpusImpl(const AudioDecoderOpusImpl&) = delete; + AudioDecoderOpusImpl& operator=(const AudioDecoderOpusImpl&) = delete; + std::vector ParsePayload(rtc::Buffer&& payload, uint32_t timestamp) override; void Reset() override; @@ -55,7 +57,6 @@ class AudioDecoderOpusImpl final : public AudioDecoder { OpusDecInst* dec_state_; const size_t channels_; const int sample_rate_hz_; - RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoderOpusImpl); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_encoder_multi_channel_opus_impl.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_encoder_multi_channel_opus_impl.h index eadb4a6eb9..8a7210515c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_encoder_multi_channel_opus_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_encoder_multi_channel_opus_impl.h @@ -21,7 +21,6 @@ #include "api/audio_codecs/opus/audio_encoder_multi_channel_opus_config.h" #include "api/units/time_delta.h" #include "modules/audio_coding/codecs/opus/opus_interface.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -34,6 +33,11 @@ class AudioEncoderMultiChannelOpusImpl final : public AudioEncoder { int payload_type); ~AudioEncoderMultiChannelOpusImpl() override; + AudioEncoderMultiChannelOpusImpl(const AudioEncoderMultiChannelOpusImpl&) = + delete; + AudioEncoderMultiChannelOpusImpl& operator=( + const AudioEncoderMultiChannelOpusImpl&) = delete; + // Static interface for use by BuiltinAudioEncoderFactory. static constexpr const char* GetPayloadName() { return "multiopus"; } static absl::optional QueryAudioEncoder( @@ -81,7 +85,6 @@ class AudioEncoderMultiChannelOpusImpl final : public AudioEncoder { int next_frame_length_ms_; friend struct AudioEncoderMultiChannelOpus; - RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderMultiChannelOpusImpl); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.cc index fe6d85263a..dcd2ce0344 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.cc @@ -17,6 +17,7 @@ #include #include "absl/strings/match.h" +#include "absl/strings/string_view.h" #include "modules/audio_coding/audio_network_adaptor/audio_network_adaptor_impl.h" #include "modules/audio_coding/audio_network_adaptor/controller_manager.h" #include "modules/audio_coding/codecs/opus/audio_coder_opus_common.h" @@ -349,7 +350,7 @@ AudioEncoderOpusImpl::AudioEncoderOpusImpl(const AudioEncoderOpusConfig& config, : AudioEncoderOpusImpl( config, payload_type, - [this](const std::string& config_string, RtcEventLog* event_log) { + [this](absl::string_view config_string, RtcEventLog* event_log) { return DefaultAudioNetworkAdaptorCreator(config_string, event_log); }, // We choose 5sec as initial time constant due to empirical data. @@ -777,7 +778,7 @@ void AudioEncoderOpusImpl::ApplyAudioNetworkAdaptor() { std::unique_ptr AudioEncoderOpusImpl::DefaultAudioNetworkAdaptorCreator( - const std::string& config_string, + absl::string_view config_string, RtcEventLog* event_log) const { AudioNetworkAdaptorImpl::Config config; config.event_log = event_log; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h index c7ee4f4523..a0c42af121 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h @@ -16,6 +16,7 @@ #include #include +#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/audio_codecs/audio_encoder.h" #include "api/audio_codecs/audio_format.h" @@ -23,7 +24,6 @@ #include "common_audio/smoothing_filter.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor.h" #include "modules/audio_coding/codecs/opus/opus_interface.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -46,7 +46,7 @@ class AudioEncoderOpusImpl final : public AudioEncoder { OpusEncInst* inst); using AudioNetworkAdaptorCreator = - std::function(const std::string&, + std::function(absl::string_view, RtcEventLog*)>; AudioEncoderOpusImpl(const AudioEncoderOpusConfig& config, int payload_type); @@ -61,6 +61,9 @@ class AudioEncoderOpusImpl final : public AudioEncoder { AudioEncoderOpusImpl(int payload_type, const SdpAudioFormat& format); ~AudioEncoderOpusImpl() override; + AudioEncoderOpusImpl(const AudioEncoderOpusImpl&) = delete; + AudioEncoderOpusImpl& operator=(const AudioEncoderOpusImpl&) = delete; + int SampleRateHz() const override; size_t NumChannels() const override; int RtpTimestampRateHz() const override; @@ -144,7 +147,7 @@ class AudioEncoderOpusImpl final : public AudioEncoder { void ApplyAudioNetworkAdaptor(); std::unique_ptr DefaultAudioNetworkAdaptorCreator( - const std::string& config_string, + absl::string_view config_string, RtcEventLog* event_log) const; void MaybeUpdateUplinkBandwidth(); @@ -175,7 +178,6 @@ class AudioEncoderOpusImpl final : public AudioEncoder { int consecutive_dtx_frames_; friend struct AudioEncoderOpus; - RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderOpusImpl); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.cc index 1dd2ff289e..7761efe8b3 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.cc @@ -42,7 +42,12 @@ int AudioDecoderPcm16B::DecodeInternal(const uint8_t* encoded, int16_t* decoded, SpeechType* speech_type) { RTC_DCHECK_EQ(sample_rate_hz_, sample_rate_hz); - size_t ret = WebRtcPcm16b_Decode(encoded, encoded_len, decoded); + // Adjust the encoded length down to ensure the same number of samples in each + // channel. + const size_t encoded_len_adjusted = + PacketDuration(encoded, encoded_len) * 2 * + Channels(); // 2 bytes per sample per channel + size_t ret = WebRtcPcm16b_Decode(encoded, encoded_len_adjusted, decoded); *speech_type = ConvertSpeechType(1); return static_cast(ret); } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.h index f08c4a6298..6f50161d3f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.h @@ -18,13 +18,16 @@ #include "api/audio_codecs/audio_decoder.h" #include "rtc_base/buffer.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { class AudioDecoderPcm16B final : public AudioDecoder { public: AudioDecoderPcm16B(int sample_rate_hz, size_t num_channels); + + AudioDecoderPcm16B(const AudioDecoderPcm16B&) = delete; + AudioDecoderPcm16B& operator=(const AudioDecoderPcm16B&) = delete; + void Reset() override; std::vector ParsePayload(rtc::Buffer&& payload, uint32_t timestamp) override; @@ -42,7 +45,6 @@ class AudioDecoderPcm16B final : public AudioDecoder { private: const int sample_rate_hz_; const size_t num_channels_; - RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoderPcm16B); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.h index 71c757250a..c363b40b3f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.h @@ -12,7 +12,6 @@ #define MODULES_AUDIO_CODING_CODECS_PCM16B_AUDIO_ENCODER_PCM16B_H_ #include "modules/audio_coding/codecs/g711/audio_encoder_pcm.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -29,6 +28,9 @@ class AudioEncoderPcm16B final : public AudioEncoderPcm { explicit AudioEncoderPcm16B(const Config& config) : AudioEncoderPcm(config, config.sample_rate_hz) {} + AudioEncoderPcm16B(const AudioEncoderPcm16B&) = delete; + AudioEncoderPcm16B& operator=(const AudioEncoderPcm16B&) = delete; + protected: size_t EncodeCall(const int16_t* audio, size_t input_len, @@ -37,9 +39,6 @@ class AudioEncoderPcm16B final : public AudioEncoderPcm { size_t BytesPerSample() const override; AudioEncoder::CodecType GetCodecType() const override; - - private: - RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderPcm16B); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc index 9643c7b1a5..724bba52d6 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc @@ -15,10 +15,10 @@ #include #include +#include "absl/strings/string_view.h" #include "rtc_base/byte_order.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "system_wrappers/include/field_trial.h" namespace webrtc { static constexpr const int kRedMaxPacketSize = @@ -40,9 +40,9 @@ AudioEncoderCopyRed::Config::Config() = default; AudioEncoderCopyRed::Config::Config(Config&&) = default; AudioEncoderCopyRed::Config::~Config() = default; -size_t GetMaxRedundancyFromFieldTrial() { +size_t GetMaxRedundancyFromFieldTrial(const FieldTrialsView& field_trials) { const std::string red_trial = - webrtc::field_trial::FindFullName("WebRTC-Audio-Red-For-Opus"); + field_trials.Lookup("WebRTC-Audio-Red-For-Opus"); size_t redundancy = 0; if (sscanf(red_trial.c_str(), "Enabled-%zu", &redundancy) != 1 || redundancy > 9) { @@ -51,14 +51,16 @@ size_t GetMaxRedundancyFromFieldTrial() { return redundancy; } -AudioEncoderCopyRed::AudioEncoderCopyRed(Config&& config) +AudioEncoderCopyRed::AudioEncoderCopyRed(Config&& config, + const FieldTrialsView& field_trials) : speech_encoder_(std::move(config.speech_encoder)), primary_encoded_(0, kAudioMaxRtpPacketLen), max_packet_length_(kAudioMaxRtpPacketLen), red_payload_type_(config.payload_type) { RTC_CHECK(speech_encoder_) << "Speech encoder not provided."; - auto number_of_redundant_encodings = GetMaxRedundancyFromFieldTrial(); + auto number_of_redundant_encodings = + GetMaxRedundancyFromFieldTrial(field_trials); for (size_t i = 0; i < number_of_redundant_encodings; i++) { std::pair redundant; redundant.second.EnsureCapacity(kAudioMaxRtpPacketLen); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.h index d5b1bf6868..359b5eaa17 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.h @@ -21,9 +21,9 @@ #include "absl/types/optional.h" #include "api/array_view.h" #include "api/audio_codecs/audio_encoder.h" +#include "api/field_trials_view.h" #include "api/units/time_delta.h" #include "rtc_base/buffer.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -43,10 +43,13 @@ class AudioEncoderCopyRed final : public AudioEncoder { std::unique_ptr speech_encoder; }; - explicit AudioEncoderCopyRed(Config&& config); + AudioEncoderCopyRed(Config&& config, const FieldTrialsView& field_trials); ~AudioEncoderCopyRed() override; + AudioEncoderCopyRed(const AudioEncoderCopyRed&) = delete; + AudioEncoderCopyRed& operator=(const AudioEncoderCopyRed&) = delete; + int SampleRateHz() const override; size_t NumChannels() const override; int RtpTimestampRateHz() const override; @@ -92,8 +95,6 @@ class AudioEncoderCopyRed final : public AudioEncoder { size_t max_packet_length_; int red_payload_type_; std::list> redundant_encodings_; - - RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderCopyRed); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/include/audio_coding_module.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/include/audio_coding_module.h index 003d966fbd..8b518fb979 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/include/audio_coding_module.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/include/audio_coding_module.h @@ -190,7 +190,7 @@ class AudioCodingModule { // 0 if payload is successfully pushed in. // virtual int32_t IncomingPacket(const uint8_t* incoming_payload, - const size_t payload_len_bytes, + size_t payload_len_bytes, const RTPHeader& rtp_header) = 0; /////////////////////////////////////////////////////////////////////////// diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/include/audio_coding_module_typedefs.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/include/audio_coding_module_typedefs.h index e5598e3c45..9d2fcfe22e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/include/audio_coding_module_typedefs.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/include/audio_coding_module_typedefs.h @@ -87,6 +87,8 @@ struct NetworkStatistics { uint64_t silentConcealedSamples; uint64_t concealmentEvents; uint64_t jitterBufferDelayMs; + uint64_t jitterBufferTargetDelayMs; + uint64_t jitterBufferMinimumDelayMs; uint64_t jitterBufferEmittedCount; uint64_t insertedSamplesForDeceleration; uint64_t removedSamplesForAcceleration; @@ -95,8 +97,6 @@ struct NetworkStatistics { // Stats below correspond to similarly-named fields in the WebRTC stats spec. // https://w3c.github.io/webrtc-stats/#dom-rtcreceivedrtpstreamstats uint64_t packetsDiscarded; - // Non standard stats propagated to spec complaint GetStats API. - uint64_t jitterBufferTargetDelayMs; // Stats below DO NOT correspond directly to anything in the WebRTC stats // fraction (of original stream) of synthesized audio inserted through // expansion (in Q14) diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/accelerate.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/accelerate.h index e03f609ffb..01fe874d54 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/accelerate.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/accelerate.h @@ -15,7 +15,6 @@ #include #include "modules/audio_coding/neteq/time_stretch.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -33,6 +32,9 @@ class Accelerate : public TimeStretch { const BackgroundNoise& background_noise) : TimeStretch(sample_rate_hz, num_channels, background_noise) {} + Accelerate(const Accelerate&) = delete; + Accelerate& operator=(const Accelerate&) = delete; + // This method performs the actual Accelerate operation. The samples are // read from `input`, of length `input_length` elements, and are written to // `output`. The number of samples removed through time-stretching is @@ -62,9 +64,6 @@ class Accelerate : public TimeStretch { bool active_speech, bool fast_mode, AudioMultiVector* output) const override; - - private: - RTC_DISALLOW_COPY_AND_ASSIGN(Accelerate); }; struct AccelerateFactory { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/audio_multi_vector.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/audio_multi_vector.cc index 220d5a17d7..14ae94649b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/audio_multi_vector.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/audio_multi_vector.cc @@ -69,6 +69,9 @@ void AudioMultiVector::CopyTo(AudioMultiVector* copy_to) const { void AudioMultiVector::PushBackInterleaved( rtc::ArrayView append_this) { RTC_DCHECK_EQ(append_this.size() % num_channels_, 0); + if (append_this.empty()) { + return; + } if (num_channels_ == 1) { // Special case to avoid extra allocation and data shuffling. channels_[0]->PushBack(append_this.data(), append_this.size()); @@ -78,11 +81,8 @@ void AudioMultiVector::PushBackInterleaved( int16_t* temp_array = new int16_t[length_per_channel]; // Temporary storage. for (size_t channel = 0; channel < num_channels_; ++channel) { // Copy elements to `temp_array`. - // Set `source_ptr` to first element of this channel. - const int16_t* source_ptr = &append_this[channel]; for (size_t i = 0; i < length_per_channel; ++i) { - temp_array[i] = *source_ptr; - source_ptr += num_channels_; // Jump to next element of this channel. + temp_array[i] = append_this[channel + i * num_channels_]; } channels_[channel]->PushBack(temp_array, length_per_channel); } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/audio_multi_vector.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/audio_multi_vector.h index 10179d7f07..715ec6dfc7 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/audio_multi_vector.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/audio_multi_vector.h @@ -18,7 +18,6 @@ #include "api/array_view.h" #include "modules/audio_coding/neteq/audio_vector.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -34,6 +33,9 @@ class AudioMultiVector { virtual ~AudioMultiVector(); + AudioMultiVector(const AudioMultiVector&) = delete; + AudioMultiVector& operator=(const AudioMultiVector&) = delete; + // Deletes all values and make the vector empty. virtual void Clear(); @@ -130,9 +132,6 @@ class AudioMultiVector { protected: std::vector channels_; size_t num_channels_; - - private: - RTC_DISALLOW_COPY_AND_ASSIGN(AudioMultiVector); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/audio_vector.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/audio_vector.h index c722b56965..d68f3ec6be 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/audio_vector.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/audio_vector.h @@ -17,7 +17,6 @@ #include #include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -31,6 +30,9 @@ class AudioVector { virtual ~AudioVector(); + AudioVector(const AudioVector&) = delete; + AudioVector& operator=(const AudioVector&) = delete; + // Deletes all values and make the vector empty. virtual void Clear(); @@ -164,8 +166,6 @@ class AudioVector { // The index of the sample after the last sample in `array_`. size_t end_index_; - - RTC_DISALLOW_COPY_AND_ASSIGN(AudioVector); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/background_noise.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/background_noise.h index 005b3766fc..8e6d5890a0 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/background_noise.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/background_noise.h @@ -16,7 +16,6 @@ #include #include "api/array_view.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -34,6 +33,9 @@ class BackgroundNoise { explicit BackgroundNoise(size_t num_channels); virtual ~BackgroundNoise(); + BackgroundNoise(const BackgroundNoise&) = delete; + BackgroundNoise& operator=(const BackgroundNoise&) = delete; + void Reset(); // Updates the parameter estimates based on the signal currently in the @@ -130,8 +132,6 @@ class BackgroundNoise { size_t num_channels_; std::unique_ptr channel_parameters_; bool initialized_; - - RTC_DISALLOW_COPY_AND_ASSIGN(BackgroundNoise); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/buffer_level_filter.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/buffer_level_filter.cc index 0ccc7bb53d..2c42d0d13f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/buffer_level_filter.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/buffer_level_filter.cc @@ -45,7 +45,8 @@ void BufferLevelFilter::Update(size_t buffer_size_samples, } void BufferLevelFilter::SetFilteredBufferLevel(int buffer_size_samples) { - filtered_current_level_ = buffer_size_samples * 256; + filtered_current_level_ = + rtc::saturated_cast(int64_t{buffer_size_samples} * 256); } void BufferLevelFilter::SetTargetBufferLevel(int target_buffer_level_ms) { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/buffer_level_filter.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/buffer_level_filter.h index 94a37150e4..ced36da9c2 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/buffer_level_filter.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/buffer_level_filter.h @@ -14,14 +14,16 @@ #include #include -#include "rtc_base/constructor_magic.h" - namespace webrtc { class BufferLevelFilter { public: BufferLevelFilter(); virtual ~BufferLevelFilter() {} + + BufferLevelFilter(const BufferLevelFilter&) = delete; + BufferLevelFilter& operator=(const BufferLevelFilter&) = delete; + virtual void Reset(); // Updates the filter. Current buffer size is `buffer_size_samples`. @@ -46,8 +48,6 @@ class BufferLevelFilter { private: int level_factor_; // Filter factor for the buffer level filter in Q8. int filtered_current_level_; // Filtered current buffer level in Q8. - - RTC_DISALLOW_COPY_AND_ASSIGN(BufferLevelFilter); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/comfort_noise.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/comfort_noise.h index 6419d397d3..31fcee31d0 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/comfort_noise.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/comfort_noise.h @@ -13,8 +13,6 @@ #include -#include "rtc_base/constructor_magic.h" - namespace webrtc { // Forward declarations. @@ -42,6 +40,9 @@ class ComfortNoise { decoder_database_(decoder_database), sync_buffer_(sync_buffer) {} + ComfortNoise(const ComfortNoise&) = delete; + ComfortNoise& operator=(const ComfortNoise&) = delete; + // Resets the state. Should be called before each new comfort noise period. void Reset(); @@ -65,7 +66,6 @@ class ComfortNoise { DecoderDatabase* decoder_database_; SyncBuffer* sync_buffer_; int internal_error_code_; - RTC_DISALLOW_COPY_AND_ASSIGN(ComfortNoise); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/decision_logic.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/decision_logic.cc index 30463fcc49..558774dcb6 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/decision_logic.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/decision_logic.cc @@ -12,12 +12,18 @@ #include +#include +#include #include #include "absl/types/optional.h" +#include "api/neteq/neteq.h" +#include "api/neteq/neteq_controller.h" +#include "modules/audio_coding/neteq/packet_arrival_history.h" #include "modules/audio_coding/neteq/packet_buffer.h" #include "rtc_base/checks.h" #include "rtc_base/experiments/field_trial_parser.h" +#include "rtc_base/experiments/struct_parameters_parser.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" #include "system_wrappers/include/field_trial.h" @@ -27,8 +33,11 @@ namespace webrtc { namespace { constexpr int kPostponeDecodingLevel = 50; -constexpr int kDefaultTargetLevelWindowMs = 100; -constexpr int kDecelerationTargetLevelOffsetMs = 85; +constexpr int kTargetLevelWindowMs = 100; +constexpr int kMaxWaitForPacketTicks = 10; +// The granularity of delay adjustments (accelerate/preemptive expand) is 15ms, +// but round up since the clock has a granularity of 10ms. +constexpr int kDelayAdjustmentGranularityMs = 20; std::unique_ptr CreateDelayManager( const NetEqController::Config& neteq_config) { @@ -39,8 +48,42 @@ std::unique_ptr CreateDelayManager( return std::make_unique(config, neteq_config.tick_timer); } +bool IsTimestretch(NetEq::Mode mode) { + return mode == NetEq::Mode::kAccelerateSuccess || + mode == NetEq::Mode::kAccelerateLowEnergy || + mode == NetEq::Mode::kPreemptiveExpandSuccess || + mode == NetEq::Mode::kPreemptiveExpandLowEnergy; +} + +bool IsCng(NetEq::Mode mode) { + return mode == NetEq::Mode::kRfc3389Cng || + mode == NetEq::Mode::kCodecInternalCng; +} + +bool IsExpand(NetEq::Mode mode) { + return mode == NetEq::Mode::kExpand || mode == NetEq::Mode::kCodecPlc; +} + } // namespace +DecisionLogic::Config::Config() { + StructParametersParser::Create( + "enable_stable_playout_delay", &enable_stable_playout_delay, // + "reinit_after_expands", &reinit_after_expands, // + "packet_history_size_ms", &packet_history_size_ms, // + "deceleration_target_level_offset_ms", + &deceleration_target_level_offset_ms) + ->Parse(webrtc::field_trial::FindFullName( + "WebRTC-Audio-NetEqDecisionLogicConfig")); + RTC_LOG(LS_INFO) << "NetEq decision logic config:" + << " enable_stable_playout_delay=" + << enable_stable_playout_delay + << " reinit_after_expands=" << reinit_after_expands + << " packet_history_size_ms=" << packet_history_size_ms + << " deceleration_target_level_offset_ms=" + << deceleration_target_level_offset_ms; +} + DecisionLogic::DecisionLogic(NetEqController::Config config) : DecisionLogic(config, CreateDelayManager(config), @@ -52,60 +95,34 @@ DecisionLogic::DecisionLogic( std::unique_ptr buffer_level_filter) : delay_manager_(std::move(delay_manager)), buffer_level_filter_(std::move(buffer_level_filter)), + packet_arrival_history_(config_.packet_history_size_ms), tick_timer_(config.tick_timer), disallow_time_stretching_(!config.allow_time_stretching), timescale_countdown_( - tick_timer_->GetNewCountdown(kMinTimescaleInterval + 1)), - estimate_dtx_delay_("estimate_dtx_delay", true), - time_stretch_cn_("time_stretch_cn", true), - target_level_window_ms_("target_level_window", - kDefaultTargetLevelWindowMs, - 0, - absl::nullopt) { - const std::string field_trial_name = - field_trial::FindFullName("WebRTC-Audio-NetEqDecisionLogicSettings"); - ParseFieldTrial( - {&estimate_dtx_delay_, &time_stretch_cn_, &target_level_window_ms_}, - field_trial_name); - RTC_LOG(LS_INFO) << "NetEq decision logic settings:" - " estimate_dtx_delay=" - << estimate_dtx_delay_ - << " time_stretch_cn=" << time_stretch_cn_ - << " target_level_window_ms=" << target_level_window_ms_; -} + tick_timer_->GetNewCountdown(kMinTimescaleInterval + 1)) {} DecisionLogic::~DecisionLogic() = default; -void DecisionLogic::Reset() { - cng_state_ = kCngOff; - noise_fast_forward_ = 0; - packet_length_samples_ = 0; - sample_memory_ = 0; - prev_time_scale_ = false; - last_pack_cng_or_dtmf_ = true; - timescale_countdown_.reset(); - num_consecutive_expands_ = 0; - time_stretched_cn_samples_ = 0; -} - void DecisionLogic::SoftReset() { packet_length_samples_ = 0; sample_memory_ = 0; prev_time_scale_ = false; - last_pack_cng_or_dtmf_ = true; timescale_countdown_ = tick_timer_->GetNewCountdown(kMinTimescaleInterval + 1); time_stretched_cn_samples_ = 0; delay_manager_->Reset(); buffer_level_filter_->Reset(); + packet_arrival_history_.Reset(); + last_playout_delay_ms_ = 0; } void DecisionLogic::SetSampleRate(int fs_hz, size_t output_size_samples) { // TODO(hlundin): Change to an enumerator and skip assert. RTC_DCHECK(fs_hz == 8000 || fs_hz == 16000 || fs_hz == 32000 || fs_hz == 48000); - sample_rate_ = fs_hz; + sample_rate_khz_ = fs_hz / 1000; output_size_samples_ = output_size_samples; + packet_arrival_history_.set_sample_rate(fs_hz); } NetEq::Operation DecisionLogic::GetDecision(const NetEqStatus& status, @@ -119,23 +136,22 @@ NetEq::Operation DecisionLogic::GetDecision(const NetEqStatus& status, cng_state_ = kCngInternalOn; } - size_t cur_size_samples = estimate_dtx_delay_ - ? status.packet_buffer_info.span_samples - : status.packet_buffer_info.num_samples; - prev_time_scale_ = - prev_time_scale_ && - (status.last_mode == NetEq::Mode::kAccelerateSuccess || - status.last_mode == NetEq::Mode::kAccelerateLowEnergy || - status.last_mode == NetEq::Mode::kPreemptiveExpandSuccess || - status.last_mode == NetEq::Mode::kPreemptiveExpandLowEnergy); + if (IsExpand(status.last_mode)) { + ++num_consecutive_expands_; + } else { + num_consecutive_expands_ = 0; + } + + if (!IsExpand(status.last_mode) && !IsCng(status.last_mode)) { + last_playout_delay_ms_ = GetPlayoutDelayMs(status); + } - // Do not update buffer history if currently playing CNG since it will bias - // the filtered buffer level. - if (status.last_mode != NetEq::Mode::kRfc3389Cng && - status.last_mode != NetEq::Mode::kCodecInternalCng && - !(status.next_packet && status.next_packet->is_dtx && - !estimate_dtx_delay_)) { - FilterBufferLevel(cur_size_samples); + prev_time_scale_ = prev_time_scale_ && IsTimestretch(status.last_mode); + if (prev_time_scale_) { + timescale_countdown_ = tick_timer_->GetNewCountdown(kMinTimescaleInterval); + } + if (!IsCng(status.last_mode)) { + FilterBufferLevel(status.packet_buffer_info.span_samples); } // Guard for errors, to avoid getting stuck in error mode. @@ -149,19 +165,17 @@ NetEq::Operation DecisionLogic::GetDecision(const NetEqStatus& status, } if (status.next_packet && status.next_packet->is_cng) { - return CngOperation(status.last_mode, status.target_timestamp, - status.next_packet->timestamp, - status.generated_noise_samples); + return CngOperation(status); } // Handle the case with no packet at all available (except maybe DTMF). if (!status.next_packet) { - return NoPacket(status.play_dtmf); + return NoPacket(status); } // If the expand period was very long, reset NetEQ since it is likely that the // sender was restarted. - if (num_consecutive_expands_ > kReinitAfterExpands) { + if (num_consecutive_expands_ > config_.reinit_after_expands) { *reset_decoder = true; return NetEq::Operation::kNormal; } @@ -173,45 +187,55 @@ NetEq::Operation DecisionLogic::GetDecision(const NetEqStatus& status, // if the mute factor is low enough (otherwise the expansion was short enough // to not be noticable). // Note that the MuteFactor is in Q14, so a value of 16384 corresponds to 1. - const size_t current_span = - estimate_dtx_delay_ ? status.packet_buffer_info.span_samples - : status.packet_buffer_info.span_samples_no_dtx; - const int target_level_samples = - delay_manager_->TargetDelayMs() * sample_rate_ / 1000; - if ((status.last_mode == NetEq::Mode::kExpand || - status.last_mode == NetEq::Mode::kCodecPlc) && + const int target_level_samples = TargetLevelMs() * sample_rate_khz_; + if (!config_.enable_stable_playout_delay && IsExpand(status.last_mode) && status.expand_mutefactor < 16384 / 2 && - current_span < static_cast(target_level_samples * - kPostponeDecodingLevel / 100) && + status.packet_buffer_info.span_samples < + static_cast(target_level_samples * kPostponeDecodingLevel / + 100) && !status.packet_buffer_info.dtx_or_cng) { return NetEq::Operation::kExpand; } - const uint32_t five_seconds_samples = static_cast(5 * sample_rate_); + const uint32_t five_seconds_samples = + static_cast(5000 * sample_rate_khz_); // Check if the required packet is available. if (status.target_timestamp == status.next_packet->timestamp) { - return ExpectedPacketAvailable(status.last_mode, status.play_dtmf); - } else if (!PacketBuffer::IsObsoleteTimestamp(status.next_packet->timestamp, - status.target_timestamp, - five_seconds_samples)) { - return FuturePacketAvailable( - status.last_packet_samples, status.last_mode, status.target_timestamp, - status.next_packet->timestamp, status.play_dtmf, - status.generated_noise_samples, status.packet_buffer_info.span_samples, - status.packet_buffer_info.num_packets); - } else { - // This implies that available_timestamp < target_timestamp, which can - // happen when a new stream or codec is received. Signal for a reset. - return NetEq::Operation::kUndefined; + return ExpectedPacketAvailable(status); + } + if (!PacketBuffer::IsObsoleteTimestamp(status.next_packet->timestamp, + status.target_timestamp, + five_seconds_samples)) { + return FuturePacketAvailable(status); } + // This implies that available_timestamp < target_timestamp, which can + // happen when a new stream or codec is received. Signal for a reset. + return NetEq::Operation::kUndefined; } -void DecisionLogic::ExpandDecision(NetEq::Operation operation) { - if (operation == NetEq::Operation::kExpand) { - num_consecutive_expands_++; - } else { - num_consecutive_expands_ = 0; +void DecisionLogic::NotifyMutedState() { + ++num_consecutive_expands_; +} + +int DecisionLogic::TargetLevelMs() const { + int target_delay_ms = delay_manager_->TargetDelayMs(); + if (!config_.enable_stable_playout_delay) { + target_delay_ms = + std::max(target_delay_ms, + static_cast(packet_length_samples_ / sample_rate_khz_)); } + return target_delay_ms; +} + +int DecisionLogic::UnlimitedTargetLevelMs() const { + return delay_manager_->UnlimitedTargetLevelMs(); +} + +int DecisionLogic::GetFilteredBufferLevel() const { + if (config_.enable_stable_playout_delay) { + return last_playout_delay_ms_ * sample_rate_khz_; + } + return buffer_level_filter_->filtered_current_level(); } absl::optional DecisionLogic::PacketArrived( @@ -219,11 +243,7 @@ absl::optional DecisionLogic::PacketArrived( bool should_update_stats, const PacketArrivedInfo& info) { buffer_flush_ = buffer_flush_ || info.buffer_flush; - if (info.is_cng_or_dtmf) { - last_pack_cng_or_dtmf_ = true; - return absl::nullopt; - } - if (!should_update_stats) { + if (!should_update_stats || info.is_cng_or_dtmf) { return absl::nullopt; } if (info.packet_length_samples > 0 && fs_hz > 0 && @@ -231,19 +251,26 @@ absl::optional DecisionLogic::PacketArrived( packet_length_samples_ = info.packet_length_samples; delay_manager_->SetPacketAudioLength(packet_length_samples_ * 1000 / fs_hz); } - auto relative_delay = delay_manager_->Update( - info.main_timestamp, fs_hz, /*reset=*/last_pack_cng_or_dtmf_); - last_pack_cng_or_dtmf_ = false; - return relative_delay; + int64_t time_now_ms = tick_timer_->ticks() * tick_timer_->ms_per_tick(); + packet_arrival_history_.Insert(info.main_timestamp, time_now_ms); + if (packet_arrival_history_.size() < 2) { + // No meaningful delay estimate unless at least 2 packets have arrived. + return absl::nullopt; + } + int arrival_delay_ms = + packet_arrival_history_.GetDelayMs(info.main_timestamp, time_now_ms); + bool reordered = + !packet_arrival_history_.IsNewestRtpTimestamp(info.main_timestamp); + delay_manager_->Update(arrival_delay_ms, reordered); + return arrival_delay_ms; } void DecisionLogic::FilterBufferLevel(size_t buffer_size_samples) { - buffer_level_filter_->SetTargetBufferLevel(delay_manager_->TargetDelayMs()); + buffer_level_filter_->SetTargetBufferLevel(TargetLevelMs()); int time_stretched_samples = time_stretched_cn_samples_; if (prev_time_scale_) { time_stretched_samples += sample_memory_; - timescale_countdown_ = tick_timer_->GetNewCountdown(kMinTimescaleInterval); } if (buffer_flush_) { @@ -256,16 +283,14 @@ void DecisionLogic::FilterBufferLevel(size_t buffer_size_samples) { time_stretched_cn_samples_ = 0; } -NetEq::Operation DecisionLogic::CngOperation(NetEq::Mode prev_mode, - uint32_t target_timestamp, - uint32_t available_timestamp, - size_t generated_noise_samples) { +NetEq::Operation DecisionLogic::CngOperation( + NetEqController::NetEqStatus status) { // Signed difference between target and available timestamp. int32_t timestamp_diff = static_cast( - static_cast(generated_noise_samples + target_timestamp) - - available_timestamp); - int optimal_level_samp = - delay_manager_->TargetDelayMs() * sample_rate_ / 1000; + static_cast(status.generated_noise_samples + + status.target_timestamp) - + status.next_packet->timestamp); + int optimal_level_samp = TargetLevelMs() * sample_rate_khz_; const int64_t excess_waiting_time_samp = -static_cast(timestamp_diff) - optimal_level_samp; @@ -279,7 +304,7 @@ NetEq::Operation DecisionLogic::CngOperation(NetEq::Mode prev_mode, rtc::saturated_cast(timestamp_diff + excess_waiting_time_samp); } - if (timestamp_diff < 0 && prev_mode == NetEq::Mode::kRfc3389Cng) { + if (timestamp_diff < 0 && status.last_mode == NetEq::Mode::kRfc3389Cng) { // Not time to play this packet yet. Wait another round before using this // packet. Keep on playing CNG from previous CNG parameters. return NetEq::Operation::kRfc3389CngNoPacket; @@ -290,14 +315,14 @@ NetEq::Operation DecisionLogic::CngOperation(NetEq::Mode prev_mode, } } -NetEq::Operation DecisionLogic::NoPacket(bool play_dtmf) { +NetEq::Operation DecisionLogic::NoPacket(NetEqController::NetEqStatus status) { if (cng_state_ == kCngRfc3389On) { // Keep on playing comfort noise. return NetEq::Operation::kRfc3389CngNoPacket; } else if (cng_state_ == kCngInternalOn) { // Keep on playing codec internal comfort noise. return NetEq::Operation::kCodecInternalCng; - } else if (play_dtmf) { + } else if (status.play_dtmf) { return NetEq::Operation::kDtmf; } else { // Nothing to play, do expand. @@ -305,54 +330,55 @@ NetEq::Operation DecisionLogic::NoPacket(bool play_dtmf) { } } -NetEq::Operation DecisionLogic::ExpectedPacketAvailable(NetEq::Mode prev_mode, - bool play_dtmf) { - if (!disallow_time_stretching_ && prev_mode != NetEq::Mode::kExpand && - !play_dtmf) { - const int samples_per_ms = sample_rate_ / 1000; - const int target_level_samples = - delay_manager_->TargetDelayMs() * samples_per_ms; - const int low_limit = - std::max(target_level_samples * 3 / 4, - target_level_samples - - kDecelerationTargetLevelOffsetMs * samples_per_ms); - // `higher_limit` is equal to `target_level`, but should at - // least be 20 ms higher than `lower_limit`. - const int high_limit = - std::max(target_level_samples, low_limit + 20 * samples_per_ms); - - const int buffer_level_samples = - buffer_level_filter_->filtered_current_level(); - if (buffer_level_samples >= high_limit << 2) - return NetEq::Operation::kFastAccelerate; - if (TimescaleAllowed()) { - if (buffer_level_samples >= high_limit) - return NetEq::Operation::kAccelerate; - if (buffer_level_samples < low_limit) - return NetEq::Operation::kPreemptiveExpand; +NetEq::Operation DecisionLogic::ExpectedPacketAvailable( + NetEqController::NetEqStatus status) { + if (!disallow_time_stretching_ && status.last_mode != NetEq::Mode::kExpand && + !status.play_dtmf) { + if (config_.enable_stable_playout_delay) { + const int playout_delay_ms = GetPlayoutDelayMs(status); + if (playout_delay_ms >= HighThreshold() << 2) { + return NetEq::Operation::kFastAccelerate; + } + if (TimescaleAllowed()) { + if (playout_delay_ms >= HighThreshold()) { + return NetEq::Operation::kAccelerate; + } + if (playout_delay_ms < LowThreshold()) { + return NetEq::Operation::kPreemptiveExpand; + } + } + } else { + const int target_level_samples = TargetLevelMs() * sample_rate_khz_; + const int low_limit = std::max( + target_level_samples * 3 / 4, + target_level_samples - + config_.deceleration_target_level_offset_ms * sample_rate_khz_); + const int high_limit = std::max( + target_level_samples, + low_limit + kDelayAdjustmentGranularityMs * sample_rate_khz_); + + const int buffer_level_samples = + buffer_level_filter_->filtered_current_level(); + if (buffer_level_samples >= high_limit << 2) + return NetEq::Operation::kFastAccelerate; + if (TimescaleAllowed()) { + if (buffer_level_samples >= high_limit) + return NetEq::Operation::kAccelerate; + if (buffer_level_samples < low_limit) + return NetEq::Operation::kPreemptiveExpand; + } } } return NetEq::Operation::kNormal; } NetEq::Operation DecisionLogic::FuturePacketAvailable( - size_t decoder_frame_length, - NetEq::Mode prev_mode, - uint32_t target_timestamp, - uint32_t available_timestamp, - bool play_dtmf, - size_t generated_noise_samples, - size_t span_samples_in_packet_buffer, - size_t num_packets_in_packet_buffer) { + NetEqController::NetEqStatus status) { // Required packet is not available, but a future packet is. // Check if we should continue with an ongoing expand because the new packet // is too far into the future. - uint32_t timestamp_leap = available_timestamp - target_timestamp; - if ((prev_mode == NetEq::Mode::kExpand || - prev_mode == NetEq::Mode::kCodecPlc) && - !ReinitAfterExpands(timestamp_leap) && !MaxWaitForPacket() && - PacketTooEarly(timestamp_leap) && UnderTargetLevel()) { - if (play_dtmf) { + if (IsExpand(status.last_mode) && ShouldContinueExpand(status)) { + if (status.play_dtmf) { // Still have DTMF to play, so do not do expand. return NetEq::Operation::kDtmf; } else { @@ -361,61 +387,38 @@ NetEq::Operation DecisionLogic::FuturePacketAvailable( } } - if (prev_mode == NetEq::Mode::kCodecPlc) { + if (status.last_mode == NetEq::Mode::kCodecPlc) { return NetEq::Operation::kNormal; } // If previous was comfort noise, then no merge is needed. - if (prev_mode == NetEq::Mode::kRfc3389Cng || - prev_mode == NetEq::Mode::kCodecInternalCng) { - size_t cur_size_samples = - estimate_dtx_delay_ - ? span_samples_in_packet_buffer - : num_packets_in_packet_buffer * decoder_frame_length; - // Target level is in number of packets in Q8. - const size_t target_level_samples = - delay_manager_->TargetDelayMs() * sample_rate_ / 1000; + if (IsCng(status.last_mode)) { + uint32_t timestamp_leap = + status.next_packet->timestamp - status.target_timestamp; const bool generated_enough_noise = - static_cast(generated_noise_samples + target_timestamp) >= - available_timestamp; - - if (time_stretch_cn_) { - const size_t target_threshold_samples = - target_level_window_ms_ / 2 * (sample_rate_ / 1000); - const bool above_target_window = - cur_size_samples > target_level_samples + target_threshold_samples; - const bool below_target_window = - target_level_samples > target_threshold_samples && - cur_size_samples < target_level_samples - target_threshold_samples; - // Keep the delay same as before CNG, but make sure that it is within the - // target window. - if ((generated_enough_noise && !below_target_window) || - above_target_window) { - time_stretched_cn_samples_ = timestamp_leap - generated_noise_samples; - return NetEq::Operation::kNormal; - } - } else { - // Keep the same delay as before the CNG, but make sure that the number of - // samples in buffer is no higher than 4 times the optimal level. - if (generated_enough_noise || - cur_size_samples > target_level_samples * 4) { - // Time to play this new packet. - return NetEq::Operation::kNormal; - } + status.generated_noise_samples >= timestamp_leap; + + int playout_delay_ms = GetNextPacketDelayMs(status); + const bool above_target_delay = playout_delay_ms > HighThresholdCng(); + const bool below_target_delay = playout_delay_ms < LowThresholdCng(); + // Keep the delay same as before CNG, but make sure that it is within the + // target window. + if ((generated_enough_noise && !below_target_delay) || above_target_delay) { + time_stretched_cn_samples_ = + timestamp_leap - status.generated_noise_samples; + return NetEq::Operation::kNormal; } - // Too early to play this new packet; keep on playing comfort noise. - if (prev_mode == NetEq::Mode::kRfc3389Cng) { + if (status.last_mode == NetEq::Mode::kRfc3389Cng) { return NetEq::Operation::kRfc3389CngNoPacket; } - // prevPlayMode == kModeCodecInternalCng. return NetEq::Operation::kCodecInternalCng; } // Do not merge unless we have done an expand before. - if (prev_mode == NetEq::Mode::kExpand) { + if (status.last_mode == NetEq::Mode::kExpand) { return NetEq::Operation::kMerge; - } else if (play_dtmf) { + } else if (status.play_dtmf) { // Play DTMF instead of expand. return NetEq::Operation::kDtmf; } else { @@ -425,12 +428,12 @@ NetEq::Operation DecisionLogic::FuturePacketAvailable( bool DecisionLogic::UnderTargetLevel() const { return buffer_level_filter_->filtered_current_level() < - delay_manager_->TargetDelayMs() * sample_rate_ / 1000; + TargetLevelMs() * sample_rate_khz_; } bool DecisionLogic::ReinitAfterExpands(uint32_t timestamp_leap) const { - return timestamp_leap >= - static_cast(output_size_samples_ * kReinitAfterExpands); + return timestamp_leap >= static_cast(output_size_samples_ * + config_.reinit_after_expands); } bool DecisionLogic::PacketTooEarly(uint32_t timestamp_leap) const { @@ -439,7 +442,67 @@ bool DecisionLogic::PacketTooEarly(uint32_t timestamp_leap) const { } bool DecisionLogic::MaxWaitForPacket() const { - return num_consecutive_expands_ >= kMaxWaitForPacket; + return num_consecutive_expands_ >= kMaxWaitForPacketTicks; +} + +bool DecisionLogic::ShouldContinueExpand( + NetEqController::NetEqStatus status) const { + uint32_t timestamp_leap = + status.next_packet->timestamp - status.target_timestamp; + if (config_.enable_stable_playout_delay) { + return GetNextPacketDelayMs(status) < HighThreshold() && + PacketTooEarly(timestamp_leap); + } + return !ReinitAfterExpands(timestamp_leap) && !MaxWaitForPacket() && + PacketTooEarly(timestamp_leap) && UnderTargetLevel(); +} + +int DecisionLogic::GetNextPacketDelayMs( + NetEqController::NetEqStatus status) const { + if (config_.enable_stable_playout_delay) { + return packet_arrival_history_.GetDelayMs( + status.next_packet->timestamp, + tick_timer_->ticks() * tick_timer_->ms_per_tick()); + } + return status.packet_buffer_info.span_samples / sample_rate_khz_; +} + +int DecisionLogic::GetPlayoutDelayMs( + NetEqController::NetEqStatus status) const { + uint32_t playout_timestamp = + status.target_timestamp - status.sync_buffer_samples; + return packet_arrival_history_.GetDelayMs( + playout_timestamp, tick_timer_->ticks() * tick_timer_->ms_per_tick()); +} + +int DecisionLogic::LowThreshold() const { + int target_delay_ms = TargetLevelMs(); + return std::max( + target_delay_ms * 3 / 4, + target_delay_ms - config_.deceleration_target_level_offset_ms); +} + +int DecisionLogic::HighThreshold() const { + if (config_.enable_stable_playout_delay) { + return std::max(TargetLevelMs(), packet_arrival_history_.GetMaxDelayMs()) + + kDelayAdjustmentGranularityMs; + } + return std::max(TargetLevelMs(), + LowThreshold() + kDelayAdjustmentGranularityMs); +} + +int DecisionLogic::LowThresholdCng() const { + if (config_.enable_stable_playout_delay) { + return LowThreshold(); + } + return std::max(0, TargetLevelMs() - kTargetLevelWindowMs / 2); +} + +int DecisionLogic::HighThresholdCng() const { + if (config_.enable_stable_playout_delay) { + return HighThreshold(); + } + return TargetLevelMs() + kTargetLevelWindowMs / 2; } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/decision_logic.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/decision_logic.h index 693f6169e4..2e55322f8f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/decision_logic.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/decision_logic.h @@ -18,7 +18,7 @@ #include "api/neteq/tick_timer.h" #include "modules/audio_coding/neteq/buffer_level_filter.h" #include "modules/audio_coding/neteq/delay_manager.h" -#include "rtc_base/constructor_magic.h" +#include "modules/audio_coding/neteq/packet_arrival_history.h" #include "rtc_base/experiments/field_trial_parser.h" namespace webrtc { @@ -26,10 +26,6 @@ namespace webrtc { // This is the class for the decision tree implementation. class DecisionLogic : public NetEqController { public: - static const int kReinitAfterExpands = 100; - static const int kMaxWaitForPacket = 10; - - // Constructor. DecisionLogic(NetEqController::Config config); DecisionLogic(NetEqController::Config config, std::unique_ptr delay_manager, @@ -37,8 +33,11 @@ class DecisionLogic : public NetEqController { ~DecisionLogic() override; - // Resets object to a clean state. - void Reset() override; + DecisionLogic(const DecisionLogic&) = delete; + DecisionLogic& operator=(const DecisionLogic&) = delete; + + // Not used. + void Reset() override {} // Resets parts of the state. Typically done when switching codecs. void SoftReset() override; @@ -66,16 +65,14 @@ class DecisionLogic : public NetEqController { // Resets the `cng_state_` to kCngOff. void SetCngOff() override { cng_state_ = kCngOff; } - // Reports back to DecisionLogic whether the decision to do expand remains or - // not. Note that this is necessary, since an expand decision can be changed - // to kNormal in NetEqImpl::GetDecision if there is still enough data in the - // sync buffer. - void ExpandDecision(NetEq::Operation operation) override; + void ExpandDecision(NetEq::Operation operation) override {} // Adds `value` to `sample_memory_`. void AddSampleMemory(int32_t value) override { sample_memory_ += value; } - int TargetLevelMs() const override { return delay_manager_->TargetDelayMs(); } + int TargetLevelMs() const override; + + int UnlimitedTargetLevelMs() const override; absl::optional PacketArrived(int fs_hz, bool should_update_stats, @@ -83,7 +80,7 @@ class DecisionLogic : public NetEqController { void RegisterEmptyPacket() override {} - void NotifyMutedState() override {} + void NotifyMutedState() override; bool SetMaximumDelay(int delay_ms) override { return delay_manager_->SetMaximumDelay(delay_ms); @@ -99,9 +96,7 @@ class DecisionLogic : public NetEqController { } bool PeakFound() const override { return false; } - int GetFilteredBufferLevel() const override { - return buffer_level_filter_->filtered_current_level(); - } + int GetFilteredBufferLevel() const override; // Accessors and mutators. void set_sample_memory(int32_t value) override { sample_memory_ = value; } @@ -126,30 +121,20 @@ class DecisionLogic : public NetEqController { // Returns the operation given that the next available packet is a comfort // noise payload (RFC 3389 only, not codec-internal). - virtual NetEq::Operation CngOperation(NetEq::Mode prev_mode, - uint32_t target_timestamp, - uint32_t available_timestamp, - size_t generated_noise_samples); + virtual NetEq::Operation CngOperation(NetEqController::NetEqStatus status); // Returns the operation given that no packets are available (except maybe // a DTMF event, flagged by setting `play_dtmf` true). - virtual NetEq::Operation NoPacket(bool play_dtmf); + virtual NetEq::Operation NoPacket(NetEqController::NetEqStatus status); // Returns the operation to do given that the expected packet is available. - virtual NetEq::Operation ExpectedPacketAvailable(NetEq::Mode prev_mode, - bool play_dtmf); + virtual NetEq::Operation ExpectedPacketAvailable( + NetEqController::NetEqStatus status); // Returns the operation to do given that the expected packet is not // available, but a packet further into the future is at hand. virtual NetEq::Operation FuturePacketAvailable( - size_t decoder_frame_length, - NetEq::Mode prev_mode, - uint32_t target_timestamp, - uint32_t available_timestamp, - bool play_dtmf, - size_t generated_noise_samples, - size_t span_samples_in_packet_buffer, - size_t num_packets_in_packet_buffer); + NetEqController::NetEqStatus status); // Checks if enough time has elapsed since the last successful timescale // operation was done (i.e., accelerate or preemptive expand). @@ -169,13 +154,34 @@ class DecisionLogic : public NetEqController { // conveyed in `timestamp_leap`. bool PacketTooEarly(uint32_t timestamp_leap) const; - // Checks if num_consecutive_expands_ >= kMaxWaitForPacket. bool MaxWaitForPacket() const; + bool ShouldContinueExpand(NetEqController::NetEqStatus status) const; + + int GetNextPacketDelayMs(NetEqController::NetEqStatus status) const; + int GetPlayoutDelayMs(NetEqController::NetEqStatus status) const; + + int LowThreshold() const; + int HighThreshold() const; + int LowThresholdCng() const; + int HighThresholdCng() const; + + // Runtime configurable options through field trial + // WebRTC-Audio-NetEqDecisionLogicConfig. + struct Config { + Config(); + + bool enable_stable_playout_delay = false; + int reinit_after_expands = 100; + int deceleration_target_level_offset_ms = 85; + int packet_history_size_ms = 2000; + }; + Config config_; std::unique_ptr delay_manager_; std::unique_ptr buffer_level_filter_; + PacketArrivalHistory packet_arrival_history_; const TickTimer* tick_timer_; - int sample_rate_; + int sample_rate_khz_; size_t output_size_samples_; CngState cng_state_ = kCngOff; // Remember if comfort noise is interrupted by // other event (e.g., DTMF). @@ -187,13 +193,8 @@ class DecisionLogic : public NetEqController { std::unique_ptr timescale_countdown_; int num_consecutive_expands_ = 0; int time_stretched_cn_samples_ = 0; - bool last_pack_cng_or_dtmf_ = true; bool buffer_flush_ = false; - FieldTrialParameter estimate_dtx_delay_; - FieldTrialParameter time_stretch_cn_; - FieldTrialConstrained target_level_window_ms_; - - RTC_DISALLOW_COPY_AND_ASSIGN(DecisionLogic); + int last_playout_delay_ms_ = 0; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/decoder_database.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/decoder_database.cc index e9176f41f5..3447ced1da 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/decoder_database.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/decoder_database.cc @@ -18,6 +18,7 @@ #include #include "absl/strings/match.h" +#include "absl/strings/string_view.h" #include "api/audio_codecs/audio_decoder.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -39,7 +40,7 @@ DecoderDatabase::DecoderInfo::DecoderInfo( const SdpAudioFormat& audio_format, absl::optional codec_pair_id, AudioDecoderFactory* factory, - const std::string& codec_name) + absl::string_view codec_name) : name_(codec_name), audio_format_(audio_format), codec_pair_id_(codec_pair_id), @@ -71,14 +72,10 @@ AudioDecoder* DecoderDatabase::DecoderInfo::GetDecoder() const { return decoder_.get(); } -bool DecoderDatabase::DecoderInfo::IsType(const char* name) const { +bool DecoderDatabase::DecoderInfo::IsType(absl::string_view name) const { return absl::EqualsIgnoreCase(audio_format_.name, name); } -bool DecoderDatabase::DecoderInfo::IsType(const std::string& name) const { - return IsType(name.c_str()); -} - absl::optional DecoderDatabase::DecoderInfo::CngDecoder::Create(const SdpAudioFormat& format) { if (absl::EqualsIgnoreCase(format.name, "CN")) { @@ -113,12 +110,6 @@ int DecoderDatabase::Size() const { return static_cast(decoders_.size()); } -void DecoderDatabase::Reset() { - decoders_.clear(); - active_decoder_type_ = -1; - active_cng_decoder_type_ = -1; -} - std::vector DecoderDatabase::SetCodecs( const std::map& codecs) { // First collect all payload types that we'll remove or reassign, then remove @@ -263,16 +254,6 @@ AudioDecoder* DecoderDatabase::GetDecoder(uint8_t rtp_payload_type) const { return info ? info->GetDecoder() : nullptr; } -bool DecoderDatabase::IsType(uint8_t rtp_payload_type, const char* name) const { - const DecoderInfo* info = GetDecoderInfo(rtp_payload_type); - return info && info->IsType(name); -} - -bool DecoderDatabase::IsType(uint8_t rtp_payload_type, - const std::string& name) const { - return IsType(rtp_payload_type, name.c_str()); -} - bool DecoderDatabase::IsComfortNoise(uint8_t rtp_payload_type) const { const DecoderInfo* info = GetDecoderInfo(rtp_payload_type); return info && info->IsComfortNoise(); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/decoder_database.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/decoder_database.h index a63a9cff18..8cf2019135 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/decoder_database.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/decoder_database.h @@ -15,12 +15,12 @@ #include #include +#include "absl/strings/string_view.h" #include "api/audio_codecs/audio_decoder_factory.h" #include "api/audio_codecs/audio_format.h" #include "api/scoped_refptr.h" #include "modules/audio_coding/codecs/cng/webrtc_cng.h" #include "modules/audio_coding/neteq/packet.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -42,7 +42,7 @@ class DecoderDatabase { DecoderInfo(const SdpAudioFormat& audio_format, absl::optional codec_pair_id, AudioDecoderFactory* factory, - const std::string& codec_name); + absl::string_view codec_name); explicit DecoderInfo(const SdpAudioFormat& audio_format, absl::optional codec_pair_id, AudioDecoderFactory* factory = nullptr); @@ -81,9 +81,7 @@ class DecoderDatabase { bool IsRed() const { return subtype_ == Subtype::kRed; } // Returns true if the decoder's format is named `name`. - bool IsType(const char* name) const; - // Returns true if the decoder's format is named `name`. - bool IsType(const std::string& name) const; + bool IsType(absl::string_view name) const; const std::string& get_name() const { return name_; } @@ -122,17 +120,15 @@ class DecoderDatabase { virtual ~DecoderDatabase(); + DecoderDatabase(const DecoderDatabase&) = delete; + DecoderDatabase& operator=(const DecoderDatabase&) = delete; + // Returns true if the database is empty. virtual bool Empty() const; // Returns the number of decoders registered in the database. virtual int Size() const; - // Resets the database, erasing all registered payload types, and deleting - // any AudioDecoder objects that were not externally created and inserted - // using InsertExternal(). - virtual void Reset(); - // Replaces the existing set of decoders with the given set. Returns the // payload types that were reassigned or removed while doing so. virtual std::vector SetCodecs( @@ -180,12 +176,6 @@ class DecoderDatabase { // object does not exist for that decoder, the object is created. AudioDecoder* GetDecoder(uint8_t rtp_payload_type) const; - // Returns if `rtp_payload_type` is registered with a format named `name`. - bool IsType(uint8_t rtp_payload_type, const char* name) const; - - // Returns if `rtp_payload_type` is registered with a format named `name`. - bool IsType(uint8_t rtp_payload_type, const std::string& name) const; - // Returns true if `rtp_payload_type` is registered as comfort noise. bool IsComfortNoise(uint8_t rtp_payload_type) const; @@ -208,8 +198,6 @@ class DecoderDatabase { mutable std::unique_ptr active_cng_decoder_; rtc::scoped_refptr decoder_factory_; const absl::optional codec_pair_id_; - - RTC_DISALLOW_COPY_AND_ASSIGN(DecoderDatabase); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/delay_manager.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/delay_manager.cc index 9f6b269b03..bf3a0f18a1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/delay_manager.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/delay_manager.cc @@ -51,7 +51,6 @@ DelayManager::Config::Config() { "forget_factor", &forget_factor, // "start_forget_weight", &start_forget_weight, // "resample_interval_ms", &resample_interval_ms, // - "max_history_ms", &max_history_ms, // "use_reorder_optimizer", &use_reorder_optimizer, // "reorder_forget_factor", &reorder_forget_factor, // "ms_per_loss_percent", &ms_per_loss_percent) @@ -66,7 +65,6 @@ void DelayManager::Config::Log() { << " start_forget_weight=" << start_forget_weight.value_or(0) << " resample_interval_ms=" << resample_interval_ms.value_or(0) - << " max_history_ms=" << max_history_ms << " use_reorder_optimizer=" << use_reorder_optimizer << " reorder_forget_factor=" << reorder_forget_factor << " ms_per_loss_percent=" << ms_per_loss_percent; @@ -80,7 +78,6 @@ DelayManager::DelayManager(const Config& config, const TickTimer* tick_timer) config.start_forget_weight, config.resample_interval_ms), reorder_optimizer_(MaybeCreateReorderOptimizer(config)), - relative_arrival_delay_tracker_(tick_timer, config.max_history_ms), base_minimum_delay_ms_(config.base_minimum_delay_ms), effective_minimum_delay_ms_(config.base_minimum_delay_ms), minimum_delay_ms_(0), @@ -93,46 +90,29 @@ DelayManager::DelayManager(const Config& config, const TickTimer* tick_timer) DelayManager::~DelayManager() {} -absl::optional DelayManager::Update(uint32_t timestamp, - int sample_rate_hz, - bool reset) { - if (reset) { - relative_arrival_delay_tracker_.Reset(); - } - absl::optional relative_delay = - relative_arrival_delay_tracker_.Update(timestamp, sample_rate_hz); - if (!relative_delay) { - return absl::nullopt; - } - - bool reordered = - relative_arrival_delay_tracker_.newest_timestamp() != timestamp; +void DelayManager::Update(int arrival_delay_ms, bool reordered) { if (!reorder_optimizer_ || !reordered) { - underrun_optimizer_.Update(*relative_delay); + underrun_optimizer_.Update(arrival_delay_ms); } target_level_ms_ = underrun_optimizer_.GetOptimalDelayMs().value_or(kStartDelayMs); if (reorder_optimizer_) { - reorder_optimizer_->Update(*relative_delay, reordered, target_level_ms_); + reorder_optimizer_->Update(arrival_delay_ms, reordered, target_level_ms_); target_level_ms_ = std::max( target_level_ms_, reorder_optimizer_->GetOptimalDelayMs().value_or(0)); } + unlimited_target_level_ms_ = target_level_ms_; target_level_ms_ = std::max(target_level_ms_, effective_minimum_delay_ms_); if (maximum_delay_ms_ > 0) { target_level_ms_ = std::min(target_level_ms_, maximum_delay_ms_); } if (packet_len_ms_ > 0) { - // Target level should be at least one packet. - target_level_ms_ = std::max(target_level_ms_, packet_len_ms_); // Limit to 75% of maximum buffer size. target_level_ms_ = std::min( target_level_ms_, 3 * max_packets_in_buffer_ * packet_len_ms_ / 4); } - - return relative_delay; } - int DelayManager::SetPacketAudioLength(int length_ms) { if (length_ms <= 0) { RTC_LOG_F(LS_ERROR) << "length_ms = " << length_ms; @@ -145,7 +125,6 @@ int DelayManager::SetPacketAudioLength(int length_ms) { void DelayManager::Reset() { packet_len_ms_ = 0; underrun_optimizer_.Reset(); - relative_arrival_delay_tracker_.Reset(); target_level_ms_ = kStartDelayMs; if (reorder_optimizer_) { reorder_optimizer_->Reset(); @@ -156,6 +135,10 @@ int DelayManager::TargetDelayMs() const { return target_level_ms_; } +int DelayManager::UnlimitedTargetLevelMs() const { + return unlimited_target_level_ms_; +} + bool DelayManager::IsValidMinimumDelay(int delay_ms) const { return 0 <= delay_ms && delay_ms <= MinimumDelayUpperBound(); } @@ -178,8 +161,7 @@ bool DelayManager::SetMinimumDelay(int delay_ms) { bool DelayManager::SetMaximumDelay(int delay_ms) { // If `delay_ms` is zero then it unsets the maximum delay and target level is // unconstrained by maximum delay. - if (delay_ms != 0 && - (delay_ms < minimum_delay_ms_ || delay_ms < packet_len_ms_)) { + if (delay_ms != 0 && delay_ms < minimum_delay_ms_) { // Maximum delay shouldn't be less than minimum delay or less than a packet. return false; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/delay_manager.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/delay_manager.h index 410aa94b61..a333681535 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/delay_manager.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/delay_manager.h @@ -19,10 +19,8 @@ #include "absl/types/optional.h" #include "api/neteq/tick_timer.h" #include "modules/audio_coding/neteq/histogram.h" -#include "modules/audio_coding/neteq/relative_arrival_delay_tracker.h" #include "modules/audio_coding/neteq/reorder_optimizer.h" #include "modules/audio_coding/neteq/underrun_optimizer.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -37,7 +35,6 @@ class DelayManager { double forget_factor = 0.983; absl::optional start_forget_weight = 2; absl::optional resample_interval_ms = 500; - int max_history_ms = 2000; bool use_reorder_optimizer = true; double reorder_forget_factor = 0.9993; @@ -52,21 +49,27 @@ class DelayManager { virtual ~DelayManager(); - // Updates the delay manager with a new incoming packet, with `timestamp` from - // the RTP header. This updates the statistics and a new target buffer level - // is calculated. Returns the relative delay if it can be calculated. If - // `reset` is true, restarts the relative arrival delay calculation from this - // packet. - virtual absl::optional Update(uint32_t timestamp, - int sample_rate_hz, - bool reset = false); + DelayManager(const DelayManager&) = delete; + DelayManager& operator=(const DelayManager&) = delete; + + // Updates the delay manager that a new packet arrived with delay + // `arrival_delay_ms`. This updates the statistics and a new target buffer + // level is calculated. The `reordered` flag indicates if the packet was + // reordered. + virtual void Update(int arrival_delay_ms, bool reordered); // Resets all state. virtual void Reset(); - // Gets the target buffer level in milliseconds. + // Gets the target buffer level in milliseconds. If a minimum or maximum delay + // has been set, the target delay reported here also respects the configured + // min/max delay. virtual int TargetDelayMs() const; + // Reports the target delay that would be used if no minimum/maximum delay + // would be set. + virtual int UnlimitedTargetLevelMs() const; + // Notifies the DelayManager of how much audio data is carried in each packet. virtual int SetPacketAudioLength(int length_ms); @@ -103,7 +106,6 @@ class DelayManager { const int max_packets_in_buffer_; UnderrunOptimizer underrun_optimizer_; std::unique_ptr reorder_optimizer_; - RelativeArrivalDelayTracker relative_arrival_delay_tracker_; int base_minimum_delay_ms_; int effective_minimum_delay_ms_; // Used as lower bound for target delay. @@ -111,9 +113,8 @@ class DelayManager { int maximum_delay_ms_; // Externally set maximum allowed delay. int packet_len_ms_ = 0; - int target_level_ms_; // Currently preferred buffer level. - - RTC_DISALLOW_COPY_AND_ASSIGN(DelayManager); + int target_level_ms_ = 0; // Currently preferred buffer level. + int unlimited_target_level_ms_ = 0; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/dsp_helper.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/dsp_helper.h index 7bdeba6ec0..4aead7df18 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/dsp_helper.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/dsp_helper.h @@ -16,7 +16,6 @@ #include "modules/audio_coding/neteq/audio_multi_vector.h" #include "modules/audio_coding/neteq/audio_vector.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -150,11 +149,12 @@ class DspHelper { bool compensate_delay, int16_t* output); + DspHelper(const DspHelper&) = delete; + DspHelper& operator=(const DspHelper&) = delete; + private: // Table of constants used in method DspHelper::ParabolicFit(). static const int16_t kParabolaCoefficients[17][3]; - - RTC_DISALLOW_COPY_AND_ASSIGN(DspHelper); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/dtmf_buffer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/dtmf_buffer.h index 9209cae864..62b751525c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/dtmf_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/dtmf_buffer.h @@ -16,8 +16,6 @@ #include -#include "rtc_base/constructor_magic.h" - namespace webrtc { struct DtmfEvent { @@ -50,6 +48,9 @@ class DtmfBuffer { virtual ~DtmfBuffer(); + DtmfBuffer(const DtmfBuffer&) = delete; + DtmfBuffer& operator=(const DtmfBuffer&) = delete; + // Flushes the buffer. virtual void Flush(); @@ -97,8 +98,6 @@ class DtmfBuffer { static bool CompareEvents(const DtmfEvent& a, const DtmfEvent& b); DtmfList buffer_; - - RTC_DISALLOW_COPY_AND_ASSIGN(DtmfBuffer); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/dtmf_tone_generator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/dtmf_tone_generator.h index 968bc7f8c7..35114f4f49 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/dtmf_tone_generator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/dtmf_tone_generator.h @@ -15,7 +15,6 @@ #include #include "modules/audio_coding/neteq/audio_multi_vector.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -29,6 +28,10 @@ class DtmfToneGenerator { DtmfToneGenerator(); virtual ~DtmfToneGenerator() {} + + DtmfToneGenerator(const DtmfToneGenerator&) = delete; + DtmfToneGenerator& operator=(const DtmfToneGenerator&) = delete; + virtual int Init(int fs, int event, int attenuation); virtual void Reset(); virtual int Generate(size_t num_samples, AudioMultiVector* output); @@ -48,8 +51,6 @@ class DtmfToneGenerator { int amplitude_; // Amplitude for this event. int16_t sample_history1_[2]; // Last 2 samples for the 1st oscillator. int16_t sample_history2_[2]; // Last 2 samples for the 2nd oscillator. - - RTC_DISALLOW_COPY_AND_ASSIGN(DtmfToneGenerator); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/expand.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/expand.h index 2d22b11289..2e64583ec2 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/expand.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/expand.h @@ -15,7 +15,6 @@ #include #include "modules/audio_coding/neteq/audio_vector.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -41,6 +40,9 @@ class Expand { virtual ~Expand(); + Expand(const Expand&) = delete; + Expand& operator=(const Expand&) = delete; + // Resets the object. virtual void Reset(); @@ -134,8 +136,6 @@ class Expand { bool stop_muting_; size_t expand_duration_samples_; std::unique_ptr channel_parameters_; - - RTC_DISALLOW_COPY_AND_ASSIGN(Expand); }; struct ExpandFactory { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/expand_uma_logger.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/expand_uma_logger.cc index 5db6d21306..a91358b489 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/expand_uma_logger.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/expand_uma_logger.cc @@ -9,6 +9,7 @@ #include "modules/audio_coding/neteq/expand_uma_logger.h" +#include "absl/strings/string_view.h" #include "rtc_base/checks.h" #include "system_wrappers/include/metrics.h" @@ -22,7 +23,7 @@ std::unique_ptr GetNewCountdown( } } // namespace -ExpandUmaLogger::ExpandUmaLogger(std::string uma_name, +ExpandUmaLogger::ExpandUmaLogger(absl::string_view uma_name, int logging_period_s, const TickTimer* tick_timer) : uma_name_(uma_name), diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/expand_uma_logger.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/expand_uma_logger.h index 246aaffd4f..cc5c20a886 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/expand_uma_logger.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/expand_uma_logger.h @@ -15,9 +15,9 @@ #include #include +#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/neteq/tick_timer.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -30,12 +30,15 @@ namespace webrtc { // object that outlives the one constructed. class ExpandUmaLogger { public: - ExpandUmaLogger(std::string uma_name, + ExpandUmaLogger(absl::string_view uma_name, int logging_period_s, const TickTimer* tick_timer); ~ExpandUmaLogger(); + ExpandUmaLogger(const ExpandUmaLogger&) = delete; + ExpandUmaLogger& operator=(const ExpandUmaLogger&) = delete; + // In this call, value should be an incremental sample counter. The sample // rate must be strictly positive. void UpdateSampleCounter(uint64_t value, int sample_rate_hz); @@ -48,8 +51,6 @@ class ExpandUmaLogger { absl::optional last_logged_value_; uint64_t last_value_ = 0; int sample_rate_hz_ = 0; - - RTC_DISALLOW_COPY_AND_ASSIGN(ExpandUmaLogger); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/merge.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/merge.h index 13aa31df8e..2f27106bfe 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/merge.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/merge.h @@ -12,7 +12,6 @@ #define MODULES_AUDIO_CODING_NETEQ_MERGE_H_ #include "modules/audio_coding/neteq/audio_multi_vector.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -36,6 +35,9 @@ class Merge { SyncBuffer* sync_buffer); virtual ~Merge(); + Merge(const Merge&) = delete; + Merge& operator=(const Merge&) = delete; + // The main method to produce the audio data. The decoded data is supplied in // `input`, having `input_length` samples in total for all channels // (interleaved). The result is written to `output`. The number of channels @@ -93,8 +95,6 @@ class Merge { int16_t input_downsampled_[kInputDownsampLength]; AudioMultiVector expanded_; std::vector temp_data_; - - RTC_DISALLOW_COPY_AND_ASSIGN(Merge); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h index b8dc031fa4..2394120e99 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h @@ -27,7 +27,6 @@ class MockDecoderDatabase : public DecoderDatabase { MOCK_METHOD(void, Die, ()); MOCK_METHOD(bool, Empty, (), (const, override)); MOCK_METHOD(int, Size, (), (const, override)); - MOCK_METHOD(void, Reset, (), (override)); MOCK_METHOD(int, RegisterPayload, (int rtp_payload_type, const SdpAudioFormat& audio_format), diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/nack_tracker.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/nack_tracker.cc index 35afb736c8..04cc5b52e8 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/nack_tracker.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/nack_tracker.cc @@ -225,8 +225,12 @@ int64_t NackTracker::TimeToPlay(uint32_t timestamp) const { std::vector NackTracker::GetNackList(int64_t round_trip_time_ms) { RTC_DCHECK_GE(round_trip_time_ms, 0); std::vector sequence_numbers; - if (config_.require_valid_rtt && round_trip_time_ms == 0) { - return sequence_numbers; + if (round_trip_time_ms == 0) { + if (config_.require_valid_rtt) { + return sequence_numbers; + } else { + round_trip_time_ms = config_.default_rtt_ms; + } } if (packet_loss_rate_ > static_cast(config_.max_loss_rate * (1 << 30))) { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/nack_tracker.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/nack_tracker.h index 0cc95b0882..14ba2166d1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/nack_tracker.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/nack_tracker.h @@ -111,6 +111,8 @@ class NackTracker { bool never_nack_multiple_times = false; // Only nack if the RTT is valid. bool require_valid_rtt = false; + // Default RTT to use unless `require_valid_rtt` is set. + int default_rtt_ms = 100; // Do not nack if the loss rate is above this value. double max_loss_rate = 1.0; }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/neteq_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/neteq_impl.cc index 30886c3ace..6a6367d045 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/neteq_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/neteq_impl.cc @@ -476,11 +476,6 @@ std::vector NetEqImpl::GetNackList(int64_t round_trip_time_ms) const { return nack_->GetNackList(round_trip_time_ms); } -std::vector NetEqImpl::LastDecodedTimestamps() const { - MutexLock lock(&mutex_); - return last_decoded_timestamps_; -} - int NetEqImpl::SyncBufferSizeMs() const { MutexLock lock(&mutex_); return rtc::dchecked_cast(sync_buffer_->FutureLength() / @@ -779,7 +774,6 @@ int NetEqImpl::GetAudioInternal(AudioFrame* audio_frame, Operation operation; bool play_dtmf; *muted = false; - last_decoded_timestamps_.clear(); last_decoded_packet_infos_.clear(); tick_timer_->Increment(); stats_->IncreaseCounter(output_size_samples_, fs_hz_); @@ -1455,6 +1449,7 @@ int NetEqImpl::DecodeCng(AudioDecoder* decoder, return kDecodedTooMuch; } } + stats_->GeneratedNoiseSamples(*decoded_length); return 0; } @@ -1463,7 +1458,6 @@ int NetEqImpl::DecodeLoop(PacketList* packet_list, AudioDecoder* decoder, int* decoded_length, AudioDecoder::SpeechType* speech_type) { - RTC_DCHECK(last_decoded_timestamps_.empty()); RTC_DCHECK(last_decoded_packet_infos_.empty()); // Do decoding. @@ -1483,7 +1477,6 @@ int NetEqImpl::DecodeLoop(PacketList* packet_list, auto opt_result = packet_list->front().frame->Decode( rtc::ArrayView(&decoded_buffer_[*decoded_length], decoded_buffer_length_ - *decoded_length)); - last_decoded_timestamps_.push_back(packet_list->front().timestamp); last_decoded_packet_infos_.push_back( std::move(packet_list->front().packet_info)); packet_list->pop_front(); @@ -2019,7 +2012,8 @@ int NetEqImpl::ExtractPackets(size_t required_samples, RTC_DCHECK(controller_); stats_->JitterBufferDelay(packet_duration, waiting_time_ms, - controller_->TargetLevelMs()); + controller_->TargetLevelMs(), + controller_->UnlimitedTargetLevelMs()); packet_list->push_back(std::move(*packet)); // Store packet in list. packet = absl::nullopt; // Ensure it's never used after the move. diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/neteq_impl.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/neteq_impl.h index 2522e31a39..6120eab5b6 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/neteq_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/neteq_impl.h @@ -29,7 +29,6 @@ #include "modules/audio_coding/neteq/packet.h" #include "modules/audio_coding/neteq/random_vector.h" #include "modules/audio_coding/neteq/statistics_calculator.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" @@ -124,6 +123,9 @@ class NetEqImpl : public webrtc::NetEq { ~NetEqImpl() override; + NetEqImpl(const NetEqImpl&) = delete; + NetEqImpl& operator=(const NetEqImpl&) = delete; + // Inserts a new packet into NetEq. Returns 0 on success, -1 on failure. int InsertPacket(const RTPHeader& rtp_header, rtc::ArrayView payload) override; @@ -192,8 +194,6 @@ class NetEqImpl : public webrtc::NetEq { std::vector GetNackList(int64_t round_trip_time_ms) const override; - std::vector LastDecodedTimestamps() const override; - int SyncBufferSizeMs() const override; // This accessor method is only intended for testing purposes. @@ -393,15 +393,11 @@ class NetEqImpl : public webrtc::NetEq { AudioFrame::kVadPassive; std::unique_ptr generated_noise_stopwatch_ RTC_GUARDED_BY(mutex_); - std::vector last_decoded_timestamps_ RTC_GUARDED_BY(mutex_); std::vector last_decoded_packet_infos_ RTC_GUARDED_BY(mutex_); ExpandUmaLogger expand_uma_logger_ RTC_GUARDED_BY(mutex_); ExpandUmaLogger speech_expand_uma_logger_ RTC_GUARDED_BY(mutex_); bool no_time_stretching_ RTC_GUARDED_BY(mutex_); // Only used for test. rtc::BufferT concealment_audio_ RTC_GUARDED_BY(mutex_); - - private: - RTC_DISALLOW_COPY_AND_ASSIGN(NetEqImpl); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/normal.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/normal.cc index 6ffae0975f..461ee7fa4a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/normal.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/normal.cc @@ -159,7 +159,7 @@ int Normal::Process(const int16_t* input, if (cng_decoder) { // Generate long enough for 48kHz. - if (!cng_decoder->Generate(cng_output, 0)) { + if (!cng_decoder->Generate(cng_output, false)) { // Error returned; set return vector to all zeros. memset(cng_output, 0, sizeof(cng_output)); } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/normal.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/normal.h index 3607208f11..772293b605 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/normal.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/normal.h @@ -17,7 +17,6 @@ #include "api/neteq/neteq.h" #include "modules/audio_coding/neteq/statistics_calculator.h" #include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/numerics/safe_conversions.h" namespace webrtc { @@ -49,6 +48,9 @@ class Normal { virtual ~Normal() {} + Normal(const Normal&) = delete; + Normal& operator=(const Normal&) = delete; + // Performs the "Normal" operation. The decoder data is supplied in `input`, // having `length` samples in total for all channels (interleaved). The // result is written to `output`. The number of channels allocated in @@ -68,8 +70,6 @@ class Normal { const size_t samples_per_ms_; const int16_t default_win_slope_Q14_; StatisticsCalculator* const statistics_; - - RTC_DISALLOW_COPY_AND_ASSIGN(Normal); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/packet_arrival_history.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/packet_arrival_history.cc new file mode 100644 index 0000000000..7196a6e393 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/packet_arrival_history.cc @@ -0,0 +1,107 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_coding/neteq/packet_arrival_history.h" + +#include + +#include "api/neteq/tick_timer.h" +#include "modules/include/module_common_types_public.h" + +namespace webrtc { + +PacketArrivalHistory::PacketArrivalHistory(int window_size_ms) + : window_size_ms_(window_size_ms) {} + +void PacketArrivalHistory::Insert(uint32_t rtp_timestamp, + int64_t arrival_time_ms) { + RTC_DCHECK(sample_rate_khz_ > 0); + int64_t unwrapped_rtp_timestamp = timestamp_unwrapper_.Unwrap(rtp_timestamp); + if (!newest_rtp_timestamp_ || + unwrapped_rtp_timestamp > *newest_rtp_timestamp_) { + newest_rtp_timestamp_ = unwrapped_rtp_timestamp; + } + history_.emplace_back(unwrapped_rtp_timestamp / sample_rate_khz_, + arrival_time_ms); + MaybeUpdateCachedArrivals(history_.back()); + while (history_.front().rtp_timestamp_ms + window_size_ms_ < + unwrapped_rtp_timestamp / sample_rate_khz_) { + if (&history_.front() == min_packet_arrival_) { + min_packet_arrival_ = nullptr; + } + if (&history_.front() == max_packet_arrival_) { + max_packet_arrival_ = nullptr; + } + history_.pop_front(); + } + if (!min_packet_arrival_ || !max_packet_arrival_) { + for (const PacketArrival& packet : history_) { + MaybeUpdateCachedArrivals(packet); + } + } +} + +void PacketArrivalHistory::MaybeUpdateCachedArrivals( + const PacketArrival& packet_arrival) { + if (!min_packet_arrival_ || packet_arrival <= *min_packet_arrival_) { + min_packet_arrival_ = &packet_arrival; + } + if (!max_packet_arrival_ || packet_arrival >= *max_packet_arrival_) { + max_packet_arrival_ = &packet_arrival; + } +} + +void PacketArrivalHistory::Reset() { + history_.clear(); + min_packet_arrival_ = nullptr; + max_packet_arrival_ = nullptr; + timestamp_unwrapper_ = TimestampUnwrapper(); + newest_rtp_timestamp_ = absl::nullopt; +} + +int PacketArrivalHistory::GetDelayMs(uint32_t rtp_timestamp, + int64_t time_ms) const { + RTC_DCHECK(sample_rate_khz_ > 0); + int64_t unwrapped_rtp_timestamp_ms = + timestamp_unwrapper_.UnwrapWithoutUpdate(rtp_timestamp) / + sample_rate_khz_; + PacketArrival packet(unwrapped_rtp_timestamp_ms, time_ms); + return GetPacketArrivalDelayMs(packet); +} + +int PacketArrivalHistory::GetMaxDelayMs() const { + if (!max_packet_arrival_) { + return 0; + } + return GetPacketArrivalDelayMs(*max_packet_arrival_); +} + +bool PacketArrivalHistory::IsNewestRtpTimestamp(uint32_t rtp_timestamp) const { + if (!newest_rtp_timestamp_) { + return false; + } + int64_t unwrapped_rtp_timestamp = + timestamp_unwrapper_.UnwrapWithoutUpdate(rtp_timestamp); + return unwrapped_rtp_timestamp == *newest_rtp_timestamp_; +} + +int PacketArrivalHistory::GetPacketArrivalDelayMs( + const PacketArrival& packet_arrival) const { + if (!min_packet_arrival_) { + return 0; + } + return std::max(static_cast(packet_arrival.arrival_time_ms - + min_packet_arrival_->arrival_time_ms - + (packet_arrival.rtp_timestamp_ms - + min_packet_arrival_->rtp_timestamp_ms)), + 0); +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/packet_arrival_history.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/packet_arrival_history.h new file mode 100644 index 0000000000..79fc9176bc --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/packet_arrival_history.h @@ -0,0 +1,82 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_CODING_NETEQ_PACKET_ARRIVAL_HISTORY_H_ +#define MODULES_AUDIO_CODING_NETEQ_PACKET_ARRIVAL_HISTORY_H_ + +#include +#include + +#include "absl/types/optional.h" +#include "api/neteq/tick_timer.h" +#include "modules/include/module_common_types_public.h" + +namespace webrtc { + +// Stores timing information about previously received packets. +// The history has a fixed window size beyond which old data is automatically +// pruned. +class PacketArrivalHistory { + public: + explicit PacketArrivalHistory(int window_size_ms); + + // Insert packet with `rtp_timestamp` and `arrival_time_ms` into the history. + void Insert(uint32_t rtp_timestamp, int64_t arrival_time_ms); + + // The delay for `rtp_timestamp` at `time_ms` is calculated as + // `(time_ms - p.arrival_time_ms) - (rtp_timestamp - p.rtp_timestamp)` + // where `p` is chosen as the packet arrival in the history that maximizes the + // delay. + int GetDelayMs(uint32_t rtp_timestamp, int64_t time_ms) const; + + // Get the maximum packet arrival delay observed in the history. + int GetMaxDelayMs() const; + + bool IsNewestRtpTimestamp(uint32_t rtp_timestamp) const; + + void Reset(); + + void set_sample_rate(int sample_rate) { + sample_rate_khz_ = sample_rate / 1000; + } + + size_t size() const { return history_.size(); } + + private: + struct PacketArrival { + PacketArrival(int64_t rtp_timestamp_ms, int64_t arrival_time_ms) + : rtp_timestamp_ms(rtp_timestamp_ms), + arrival_time_ms(arrival_time_ms) {} + int64_t rtp_timestamp_ms; + int64_t arrival_time_ms; + bool operator<=(const PacketArrival& other) const { + return arrival_time_ms - rtp_timestamp_ms <= + other.arrival_time_ms - other.rtp_timestamp_ms; + } + bool operator>=(const PacketArrival& other) const { + return arrival_time_ms - rtp_timestamp_ms >= + other.arrival_time_ms - other.rtp_timestamp_ms; + } + }; + std::deque history_; + int GetPacketArrivalDelayMs(const PacketArrival& packet_arrival) const; + // Updates `min_packet_arrival_` and `max_packet_arrival_`. + void MaybeUpdateCachedArrivals(const PacketArrival& packet); + const PacketArrival* min_packet_arrival_ = nullptr; + const PacketArrival* max_packet_arrival_ = nullptr; + const int window_size_ms_; + TimestampUnwrapper timestamp_unwrapper_; + absl::optional newest_rtp_timestamp_; + int sample_rate_khz_ = 0; +}; + +} // namespace webrtc + +#endif // MODULES_AUDIO_CODING_NETEQ_PACKET_ARRIVAL_HISTORY_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/packet_buffer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/packet_buffer.h index 20a053323a..c6fb47ffbf 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/packet_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/packet_buffer.h @@ -15,7 +15,6 @@ #include "modules/audio_coding/neteq/decoder_database.h" #include "modules/audio_coding/neteq/packet.h" #include "modules/include/module_common_types_public.h" // IsNewerTimestamp -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -51,6 +50,9 @@ class PacketBuffer { // Deletes all packets in the buffer before destroying the buffer. virtual ~PacketBuffer(); + PacketBuffer(const PacketBuffer&) = delete; + PacketBuffer& operator=(const PacketBuffer&) = delete; + // Flushes the buffer and deletes all packets in it. virtual void Flush(StatisticsCalculator* stats); @@ -173,7 +175,6 @@ class PacketBuffer { size_t max_number_of_packets_; PacketList buffer_; const TickTimer* tick_timer_; - RTC_DISALLOW_COPY_AND_ASSIGN(PacketBuffer); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/post_decode_vad.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/post_decode_vad.h index 3134d5f3a9..3bd91b9edb 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/post_decode_vad.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/post_decode_vad.h @@ -16,7 +16,6 @@ #include "api/audio_codecs/audio_decoder.h" #include "common_audio/vad/include/webrtc_vad.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -31,6 +30,9 @@ class PostDecodeVad { virtual ~PostDecodeVad(); + PostDecodeVad(const PostDecodeVad&) = delete; + PostDecodeVad& operator=(const PostDecodeVad&) = delete; + // Enables post-decode VAD. void Enable(); @@ -63,8 +65,6 @@ class PostDecodeVad { bool active_speech_; int sid_interval_counter_; ::VadInst* vad_instance_; - - RTC_DISALLOW_COPY_AND_ASSIGN(PostDecodeVad); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/preemptive_expand.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/preemptive_expand.h index 708ebfd1bd..6338b993fd 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/preemptive_expand.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/preemptive_expand.h @@ -15,7 +15,6 @@ #include #include "modules/audio_coding/neteq/time_stretch.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -36,6 +35,9 @@ class PreemptiveExpand : public TimeStretch { old_data_length_per_channel_(0), overlap_samples_(overlap_samples) {} + PreemptiveExpand(const PreemptiveExpand&) = delete; + PreemptiveExpand& operator=(const PreemptiveExpand&) = delete; + // This method performs the actual PreemptiveExpand operation. The samples are // read from `input`, of length `input_length` elements, and are written to // `output`. The number of samples added through time-stretching is @@ -67,8 +69,6 @@ class PreemptiveExpand : public TimeStretch { private: size_t old_data_length_per_channel_; size_t overlap_samples_; - - RTC_DISALLOW_COPY_AND_ASSIGN(PreemptiveExpand); }; struct PreemptiveExpandFactory { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/random_vector.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/random_vector.h index 1d3760055b..4a782f1116 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/random_vector.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/random_vector.h @@ -14,8 +14,6 @@ #include #include -#include "rtc_base/constructor_magic.h" - namespace webrtc { // This class generates pseudo-random samples. @@ -26,6 +24,9 @@ class RandomVector { RandomVector() : seed_(777), seed_increment_(1) {} + RandomVector(const RandomVector&) = delete; + RandomVector& operator=(const RandomVector&) = delete; + void Reset(); void Generate(size_t length, int16_t* output); @@ -39,8 +40,6 @@ class RandomVector { private: uint32_t seed_; int16_t seed_increment_; - - RTC_DISALLOW_COPY_AND_ASSIGN(RandomVector); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/red_payload_splitter.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/red_payload_splitter.cc index 7438f25301..cec9f2f8a0 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/red_payload_splitter.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/red_payload_splitter.cc @@ -136,9 +136,9 @@ bool RedPayloadSplitter::SplitRed(PacketList* packet_list) { /*ssrc=*/red_packet.packet_info.ssrc(), /*csrcs=*/std::vector(), /*rtp_timestamp=*/new_packet.timestamp, - red_packet.packet_info.audio_level(), - /*absolute_capture_time=*/absl::nullopt, /*receive_time=*/red_packet.packet_info.receive_time()); + new_packet.packet_info.set_audio_level( + red_packet.packet_info.audio_level()); new_packets.push_front(std::move(new_packet)); payload_ptr += payload_length; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/red_payload_splitter.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/red_payload_splitter.h index 55660913d5..2f48e4b7d4 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/red_payload_splitter.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/red_payload_splitter.h @@ -12,7 +12,6 @@ #define MODULES_AUDIO_CODING_NETEQ_RED_PAYLOAD_SPLITTER_H_ #include "modules/audio_coding/neteq/packet.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -30,6 +29,9 @@ class RedPayloadSplitter { virtual ~RedPayloadSplitter() {} + RedPayloadSplitter(const RedPayloadSplitter&) = delete; + RedPayloadSplitter& operator=(const RedPayloadSplitter&) = delete; + // Splits each packet in `packet_list` into its separate RED payloads. Each // RED payload is packetized into a Packet. The original elements in // `packet_list` are properly deleted, and replaced by the new packets. @@ -43,9 +45,6 @@ class RedPayloadSplitter { // is accepted. Any packet with another payload type is discarded. virtual void CheckRedPayloads(PacketList* packet_list, const DecoderDatabase& decoder_database); - - private: - RTC_DISALLOW_COPY_AND_ASSIGN(RedPayloadSplitter); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/relative_arrival_delay_tracker.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/relative_arrival_delay_tracker.cc deleted file mode 100644 index b50ac80bab..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/relative_arrival_delay_tracker.cc +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/neteq/relative_arrival_delay_tracker.h" - -#include - -#include "modules/include/module_common_types_public.h" - -namespace webrtc { - -absl::optional RelativeArrivalDelayTracker::Update(uint32_t timestamp, - int sample_rate_hz) { - if (sample_rate_hz <= 0) { - return absl::nullopt; - } - if (!last_timestamp_) { - // Restart relative delay esimation from this packet. - delay_history_.clear(); - packet_iat_stopwatch_ = tick_timer_->GetNewStopwatch(); - newest_timestamp_ = timestamp; - last_timestamp_ = timestamp; - return absl::nullopt; - } - - const int expected_iat_ms = - 1000ll * static_cast(timestamp - *last_timestamp_) / - sample_rate_hz; - const int iat_ms = packet_iat_stopwatch_->ElapsedMs(); - const int iat_delay_ms = iat_ms - expected_iat_ms; - UpdateDelayHistory(iat_delay_ms, timestamp, sample_rate_hz); - int relative_delay = CalculateRelativePacketArrivalDelay(); - - packet_iat_stopwatch_ = tick_timer_->GetNewStopwatch(); - last_timestamp_ = timestamp; - if (IsNewerTimestamp(timestamp, *newest_timestamp_)) { - newest_timestamp_ = timestamp; - } - - return relative_delay; -} - -void RelativeArrivalDelayTracker::Reset() { - delay_history_.clear(); - packet_iat_stopwatch_.reset(); - newest_timestamp_ = absl::nullopt; - last_timestamp_ = absl::nullopt; -} - -void RelativeArrivalDelayTracker::UpdateDelayHistory(int iat_delay_ms, - uint32_t timestamp, - int sample_rate_hz) { - PacketDelay delay; - delay.iat_delay_ms = iat_delay_ms; - delay.timestamp = timestamp; - delay_history_.push_back(delay); - while (static_cast(timestamp - delay_history_.front().timestamp) > - max_history_ms_ * sample_rate_hz / 1000) { - delay_history_.pop_front(); - } -} - -int RelativeArrivalDelayTracker::CalculateRelativePacketArrivalDelay() const { - // This effectively calculates arrival delay of a packet relative to the - // packet preceding the history window. If the arrival delay ever becomes - // smaller than zero, it means the reference packet is invalid, and we - // move the reference. - int relative_delay = 0; - for (const PacketDelay& delay : delay_history_) { - relative_delay += delay.iat_delay_ms; - relative_delay = std::max(relative_delay, 0); - } - return relative_delay; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/relative_arrival_delay_tracker.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/relative_arrival_delay_tracker.h deleted file mode 100644 index fed56be2b6..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/relative_arrival_delay_tracker.h +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_CODING_NETEQ_RELATIVE_ARRIVAL_DELAY_TRACKER_H_ -#define MODULES_AUDIO_CODING_NETEQ_RELATIVE_ARRIVAL_DELAY_TRACKER_H_ - -#include -#include - -#include "absl/types/optional.h" -#include "api/neteq/tick_timer.h" - -namespace webrtc { - -class RelativeArrivalDelayTracker { - public: - RelativeArrivalDelayTracker(const TickTimer* tick_timer, int max_history_ms) - : tick_timer_(tick_timer), max_history_ms_(max_history_ms) {} - - absl::optional Update(uint32_t timestamp, int sample_rate_hz); - - void Reset(); - - absl::optional newest_timestamp() const { - return newest_timestamp_; - } - - private: - // Updates `delay_history_`. - void UpdateDelayHistory(int iat_delay_ms, - uint32_t timestamp, - int sample_rate_hz); - - // Calculate relative packet arrival delay from `delay_history_`. - int CalculateRelativePacketArrivalDelay() const; - - const TickTimer* tick_timer_; - const int max_history_ms_; - - struct PacketDelay { - int iat_delay_ms; - uint32_t timestamp; - }; - std::deque delay_history_; - - absl::optional newest_timestamp_; - absl::optional last_timestamp_; - - std::unique_ptr - packet_iat_stopwatch_; // Time elapsed since last packet. -}; - -} // namespace webrtc -#endif // MODULES_AUDIO_CODING_NETEQ_RELATIVE_ARRIVAL_DELAY_TRACKER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/statistics_calculator.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/statistics_calculator.cc index 8e281302b7..52d3fa90f1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/statistics_calculator.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/statistics_calculator.cc @@ -14,6 +14,7 @@ #include +#include "absl/strings/string_view.h" #include "modules/audio_coding/neteq/delay_manager.h" #include "rtc_base/checks.h" #include "rtc_base/numerics/safe_conversions.h" @@ -38,7 +39,7 @@ constexpr int kInterruptionLenMs = 150; const size_t StatisticsCalculator::kLenWaitingTimes; StatisticsCalculator::PeriodicUmaLogger::PeriodicUmaLogger( - const std::string& uma_name, + absl::string_view uma_name, int report_interval_ms, int max_value) : uma_name_(uma_name), @@ -64,7 +65,7 @@ void StatisticsCalculator::PeriodicUmaLogger::LogToUma(int value) const { } StatisticsCalculator::PeriodicUmaCount::PeriodicUmaCount( - const std::string& uma_name, + absl::string_view uma_name, int report_interval_ms, int max_value) : PeriodicUmaLogger(uma_name, report_interval_ms, max_value) {} @@ -87,7 +88,7 @@ void StatisticsCalculator::PeriodicUmaCount::Reset() { } StatisticsCalculator::PeriodicUmaAverage::PeriodicUmaAverage( - const std::string& uma_name, + absl::string_view uma_name, int report_interval_ms, int max_value) : PeriodicUmaLogger(uma_name, report_interval_ms, max_value) {} @@ -230,8 +231,12 @@ void StatisticsCalculator::AcceleratedSamples(size_t num_samples) { lifetime_stats_.removed_samples_for_acceleration += num_samples; } +void StatisticsCalculator::GeneratedNoiseSamples(size_t num_samples) { + lifetime_stats_.generated_noise_samples += num_samples; +} + void StatisticsCalculator::PacketsDiscarded(size_t num_packets) { - operations_and_state_.discarded_primary_packets += num_packets; + lifetime_stats_.packets_discarded += num_packets; } void StatisticsCalculator::SecondaryPacketsDiscarded(size_t num_packets) { @@ -257,12 +262,16 @@ void StatisticsCalculator::IncreaseCounter(size_t num_samples, int fs_hz) { lifetime_stats_.total_samples_received += num_samples; } -void StatisticsCalculator::JitterBufferDelay(size_t num_samples, - uint64_t waiting_time_ms, - uint64_t target_delay_ms) { +void StatisticsCalculator::JitterBufferDelay( + size_t num_samples, + uint64_t waiting_time_ms, + uint64_t target_delay_ms, + uint64_t unlimited_target_delay_ms) { lifetime_stats_.jitter_buffer_delay_ms += waiting_time_ms * num_samples; lifetime_stats_.jitter_buffer_target_delay_ms += target_delay_ms * num_samples; + lifetime_stats_.jitter_buffer_minimum_delay_ms += + unlimited_target_delay_ms * num_samples; lifetime_stats_.jitter_buffer_emitted_count += num_samples; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/statistics_calculator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/statistics_calculator.h index 5c3fb75d1b..33a22d02dd 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/statistics_calculator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/statistics_calculator.h @@ -14,8 +14,8 @@ #include #include +#include "absl/strings/string_view.h" #include "api/neteq/neteq.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -28,6 +28,9 @@ class StatisticsCalculator { virtual ~StatisticsCalculator(); + StatisticsCalculator(const StatisticsCalculator&) = delete; + StatisticsCalculator& operator=(const StatisticsCalculator&) = delete; + // Resets most of the counters. void Reset(); @@ -62,6 +65,9 @@ class StatisticsCalculator { // Reports that `num_samples` samples were removed through accelerate. void AcceleratedSamples(size_t num_samples); + // Reports that `num_samples` comfort noise samples were generated. + void GeneratedNoiseSamples(size_t num_samples); + // Reports that `num_packets` packets were discarded. virtual void PacketsDiscarded(size_t num_packets); @@ -79,7 +85,8 @@ class StatisticsCalculator { // Update jitter buffer delay counter. void JitterBufferDelay(size_t num_samples, uint64_t waiting_time_ms, - uint64_t target_delay_ms); + uint64_t target_delay_ms, + uint64_t unlimited_target_delay_ms); // Stores new packet waiting time in waiting time statistics. void StoreWaitingTime(int waiting_time_ms); @@ -120,7 +127,7 @@ class StatisticsCalculator { class PeriodicUmaLogger { public: - PeriodicUmaLogger(const std::string& uma_name, + PeriodicUmaLogger(absl::string_view uma_name, int report_interval_ms, int max_value); virtual ~PeriodicUmaLogger(); @@ -139,7 +146,7 @@ class StatisticsCalculator { class PeriodicUmaCount final : public PeriodicUmaLogger { public: - PeriodicUmaCount(const std::string& uma_name, + PeriodicUmaCount(absl::string_view uma_name, int report_interval_ms, int max_value); ~PeriodicUmaCount() override; @@ -155,7 +162,7 @@ class StatisticsCalculator { class PeriodicUmaAverage final : public PeriodicUmaLogger { public: - PeriodicUmaAverage(const std::string& uma_name, + PeriodicUmaAverage(absl::string_view uma_name, int report_interval_ms, int max_value); ~PeriodicUmaAverage() override; @@ -197,8 +204,6 @@ class StatisticsCalculator { PeriodicUmaAverage excess_buffer_delay_; PeriodicUmaCount buffer_full_counter_; bool decoded_output_played_ = false; - - RTC_DISALLOW_COPY_AND_ASSIGN(StatisticsCalculator); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/sync_buffer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/sync_buffer.h index 7d24730cb3..cf56c432e3 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/sync_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/sync_buffer.h @@ -20,7 +20,6 @@ #include "modules/audio_coding/neteq/audio_multi_vector.h" #include "modules/audio_coding/neteq/audio_vector.h" #include "rtc_base/buffer.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -32,6 +31,9 @@ class SyncBuffer : public AudioMultiVector { end_timestamp_(0), dtmf_index_(0) {} + SyncBuffer(const SyncBuffer&) = delete; + SyncBuffer& operator=(const SyncBuffer&) = delete; + // Returns the number of samples yet to play out from the buffer. size_t FutureLength() const; @@ -102,8 +104,6 @@ class SyncBuffer : public AudioMultiVector { size_t next_index_; uint32_t end_timestamp_; // The timestamp of the last sample in the buffer. size_t dtmf_index_; // Index to the first non-DTMF sample in the buffer. - - RTC_DISALLOW_COPY_AND_ASSIGN(SyncBuffer); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/time_stretch.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/time_stretch.h index 998d080714..f0ddaebeca 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/time_stretch.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/time_stretch.h @@ -14,7 +14,6 @@ #include // memset, size_t #include "modules/audio_coding/neteq/audio_multi_vector.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -49,6 +48,9 @@ class TimeStretch { virtual ~TimeStretch() {} + TimeStretch(const TimeStretch&) = delete; + TimeStretch& operator=(const TimeStretch&) = delete; + // This method performs the processing common to both Accelerate and // PreemptiveExpand. ReturnCodes Process(const int16_t* input, @@ -105,8 +107,6 @@ class TimeStretch { int32_t vec2_energy, size_t peak_index, int scaling) const; - - RTC_DISALLOW_COPY_AND_ASSIGN(TimeStretch); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/timestamp_scaler.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/timestamp_scaler.h index 4d578fc433..f42ce7207a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/timestamp_scaler.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/timestamp_scaler.h @@ -12,7 +12,6 @@ #define MODULES_AUDIO_CODING_NETEQ_TIMESTAMP_SCALER_H_ #include "modules/audio_coding/neteq/packet.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -34,6 +33,9 @@ class TimestampScaler { virtual ~TimestampScaler() {} + TimestampScaler(const TimestampScaler&) = delete; + TimestampScaler& operator=(const TimestampScaler&) = delete; + // Start over. virtual void Reset(); @@ -59,8 +61,6 @@ class TimestampScaler { uint32_t external_ref_; uint32_t internal_ref_; const DecoderDatabase& decoder_database_; - - RTC_DISALLOW_COPY_AND_ASSIGN(TimestampScaler); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/aaudio_player.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/aaudio_player.cc index 5257b2ba1b..81e5bf5427 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/aaudio_player.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/aaudio_player.cc @@ -13,6 +13,7 @@ #include #include "api/array_view.h" +#include "api/task_queue/task_queue_base.h" #include "modules/audio_device/android/audio_manager.h" #include "modules/audio_device/fine_audio_buffer.h" #include "rtc_base/checks.h" @@ -20,12 +21,8 @@ namespace webrtc { -enum AudioDeviceMessageType : uint32_t { - kMessageOutputStreamDisconnected, -}; - AAudioPlayer::AAudioPlayer(AudioManager* audio_manager) - : main_thread_(rtc::Thread::Current()), + : main_thread_(TaskQueueBase::Current()), aaudio_(audio_manager, AAUDIO_DIRECTION_OUTPUT, this) { RTC_LOG(LS_INFO) << "ctor"; thread_checker_aaudio_.Detach(); @@ -147,7 +144,7 @@ void AAudioPlayer::OnErrorCallback(aaudio_result_t error) { // from the callback, use another thread instead". A message is therefore // sent to the main thread to do the restart operation. RTC_DCHECK(main_thread_); - main_thread_->Post(RTC_FROM_HERE, this, kMessageOutputStreamDisconnected); + main_thread_->PostTask([this] { HandleStreamDisconnected(); }); } } @@ -204,15 +201,6 @@ aaudio_data_callback_result_t AAudioPlayer::OnDataCallback(void* audio_data, return AAUDIO_CALLBACK_RESULT_CONTINUE; } -void AAudioPlayer::OnMessage(rtc::Message* msg) { - RTC_DCHECK_RUN_ON(&main_thread_checker_); - switch (msg->message_id) { - case kMessageOutputStreamDisconnected: - HandleStreamDisconnected(); - break; - } -} - void AAudioPlayer::HandleStreamDisconnected() { RTC_DCHECK_RUN_ON(&main_thread_checker_); RTC_DLOG(LS_INFO) << "HandleStreamDisconnected"; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/aaudio_player.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/aaudio_player.h index 4bf3ee3bc0..ea5d578092 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/aaudio_player.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/aaudio_player.h @@ -16,10 +16,9 @@ #include #include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" #include "modules/audio_device/android/aaudio_wrapper.h" #include "modules/audio_device/include/audio_device_defines.h" -#include "rtc_base/message_handler.h" -#include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" namespace webrtc { @@ -48,8 +47,7 @@ class AudioManager; // where the internal AAudio buffer can be increased when needed. It will // reduce the risk of underruns (~glitches) at the expense of an increased // latency. -class AAudioPlayer final : public AAudioObserverInterface, - public rtc::MessageHandler { +class AAudioPlayer final : public AAudioObserverInterface { public: explicit AAudioPlayer(AudioManager* audio_manager); ~AAudioPlayer(); @@ -85,10 +83,6 @@ class AAudioPlayer final : public AAudioObserverInterface, // Called on a real-time thread owned by AAudio. void OnErrorCallback(aaudio_result_t error) override; - // rtc::MessageHandler used for restart messages from the error-callback - // thread to the main (creating) thread. - void OnMessage(rtc::Message* msg) override; - private: // Closes the existing stream and starts a new stream. void HandleStreamDisconnected(); @@ -102,8 +96,8 @@ class AAudioPlayer final : public AAudioObserverInterface, // object. SequenceChecker thread_checker_aaudio_; - // The thread on which this object is created on. - rtc::Thread* main_thread_; + // The task queue on which this object is created on. + TaskQueueBase* main_thread_; // Wraps all AAudio resources. Contains an output stream using the default // output audio device. Can be accessed on both the main thread and the diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/aaudio_recorder.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/aaudio_recorder.cc index 4757cf8cf0..21e5dd8a74 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/aaudio_recorder.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/aaudio_recorder.cc @@ -13,6 +13,7 @@ #include #include "api/array_view.h" +#include "api/task_queue/task_queue_base.h" #include "modules/audio_device/android/audio_manager.h" #include "modules/audio_device/fine_audio_buffer.h" #include "rtc_base/checks.h" @@ -21,12 +22,8 @@ namespace webrtc { -enum AudioDeviceMessageType : uint32_t { - kMessageInputStreamDisconnected, -}; - AAudioRecorder::AAudioRecorder(AudioManager* audio_manager) - : main_thread_(rtc::Thread::Current()), + : main_thread_(TaskQueueBase::Current()), aaudio_(audio_manager, AAUDIO_DIRECTION_INPUT, this) { RTC_LOG(LS_INFO) << "ctor"; thread_checker_aaudio_.Detach(); @@ -142,7 +139,7 @@ void AAudioRecorder::OnErrorCallback(aaudio_result_t error) { // from the callback, use another thread instead". A message is therefore // sent to the main thread to do the restart operation. RTC_DCHECK(main_thread_); - main_thread_->Post(RTC_FROM_HERE, this, kMessageInputStreamDisconnected); + main_thread_->PostTask([this] { HandleStreamDisconnected(); }); } } @@ -190,18 +187,6 @@ aaudio_data_callback_result_t AAudioRecorder::OnDataCallback( return AAUDIO_CALLBACK_RESULT_CONTINUE; } -void AAudioRecorder::OnMessage(rtc::Message* msg) { - RTC_DCHECK_RUN_ON(&thread_checker_); - switch (msg->message_id) { - case kMessageInputStreamDisconnected: - HandleStreamDisconnected(); - break; - default: - RTC_LOG(LS_ERROR) << "Invalid message id: " << msg->message_id; - break; - } -} - void AAudioRecorder::HandleStreamDisconnected() { RTC_DCHECK_RUN_ON(&thread_checker_); RTC_LOG(LS_INFO) << "HandleStreamDisconnected"; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/aaudio_recorder.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/aaudio_recorder.h index d0ad6be43d..6df7eed076 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/aaudio_recorder.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/aaudio_recorder.h @@ -16,10 +16,9 @@ #include #include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" #include "modules/audio_device/android/aaudio_wrapper.h" #include "modules/audio_device/include/audio_device_defines.h" -#include "rtc_base/message_handler.h" -#include "rtc_base/thread.h" namespace webrtc { @@ -41,8 +40,7 @@ class AudioManager; // // TODO(henrika): add comments about device changes and adaptive buffer // management. -class AAudioRecorder : public AAudioObserverInterface, - public rtc::MessageHandler { +class AAudioRecorder : public AAudioObserverInterface { public: explicit AAudioRecorder(AudioManager* audio_manager); ~AAudioRecorder(); @@ -79,9 +77,6 @@ class AAudioRecorder : public AAudioObserverInterface, // Called on a real-time thread owned by AAudio. void OnErrorCallback(aaudio_result_t error) override; - // rtc::MessageHandler used for restart messages. - void OnMessage(rtc::Message* msg) override; - private: // Closes the existing stream and starts a new stream. void HandleStreamDisconnected(); @@ -96,7 +91,7 @@ class AAudioRecorder : public AAudioObserverInterface, SequenceChecker thread_checker_aaudio_; // The thread on which this object is created on. - rtc::Thread* main_thread_; + TaskQueueBase* main_thread_; // Wraps all AAudio resources. Contains an input stream using the default // input audio device. diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/audio_merged_screen_record_jni.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/audio_merged_screen_record_jni.cc index b71b50867a..abef1b7cfd 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/audio_merged_screen_record_jni.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/audio_merged_screen_record_jni.cc @@ -16,7 +16,6 @@ #include "modules/audio_device/android/audio_common.h" #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" -#include "rtc_base/format_macros.h" #include "rtc_base/logging.h" #include "rtc_base/platform_thread.h" #include "rtc_base/time_utils.h" diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/audio_record_jni.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/audio_record_jni.cc index 57eb3135d5..c221e82761 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/audio_record_jni.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/audio_record_jni.cc @@ -16,7 +16,6 @@ #include "modules/audio_device/android/audio_common.h" #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" -#include "rtc_base/format_macros.h" #include "rtc_base/logging.h" #include "rtc_base/platform_thread.h" #include "rtc_base/time_utils.h" diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/audio_screen_record_jni.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/audio_screen_record_jni.cc index 14376994ad..cbaecd0944 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/audio_screen_record_jni.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/audio_screen_record_jni.cc @@ -16,7 +16,6 @@ #include "modules/audio_device/android/audio_common.h" #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" -#include "rtc_base/format_macros.h" #include "rtc_base/logging.h" #include "rtc_base/platform_thread.h" #include "rtc_base/time_utils.h" diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/audio_track_jni.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/audio_track_jni.cc index 178ccadfdb..2b78653c24 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/audio_track_jni.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/audio_track_jni.cc @@ -15,7 +15,6 @@ #include "modules/audio_device/android/audio_manager.h" #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" -#include "rtc_base/format_macros.h" #include "rtc_base/logging.h" #include "rtc_base/platform_thread.h" #include "system_wrappers/include/field_trial.h" @@ -177,7 +176,7 @@ int32_t AudioTrackJni::StartPlayout() { int32_t AudioTrackJni::StopPlayout() { RTC_LOG(LS_INFO) << "StopPlayout"; RTC_DCHECK(thread_checker_.IsCurrent()); - if (!initialized_ || !playing_) { + if (!initialized_ || !playing_ || j_audio_track_ == nullptr) { return 0; } if (!j_audio_track_->StopPlayout()) { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/ensure_initialized.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/ensure_initialized.cc index 37086cc9eb..59e9c8f7a6 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/ensure_initialized.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/ensure_initialized.cc @@ -10,19 +10,13 @@ #include "modules/audio_device/android/ensure_initialized.h" +#include #include +#include -#include "rtc_base/ignore_wundef.h" - -// Note: this dependency is dangerous since it reaches into Chromium's base. -// There's a risk of e.g. macro clashes. This file may only be used in tests. -RTC_PUSH_IGNORING_WUNDEF() -#include "base/android/jni_android.h" -RTC_POP_IGNORING_WUNDEF() -#include "modules/audio_device/android/audio_record_jni.h" -#include "modules/audio_device/android/audio_track_jni.h" #include "modules/utility/include/jvm_android.h" #include "rtc_base/checks.h" +#include "sdk/android/src/jni/jvm.h" namespace webrtc { namespace audiodevicemodule { @@ -30,8 +24,9 @@ namespace audiodevicemodule { static pthread_once_t g_initialize_once = PTHREAD_ONCE_INIT; void EnsureInitializedOnce() { - RTC_CHECK(::base::android::IsVMInitialized()); - JNIEnv* jni = ::base::android::AttachCurrentThread(); + RTC_CHECK(::webrtc::jni::GetJVM() != nullptr); + + JNIEnv* jni = ::webrtc::jni::AttachCurrentThreadIfNeeded(); JavaVM* jvm = NULL; RTC_CHECK_EQ(0, jni->GetJavaVM(&jvm)); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/opensles_player.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/opensles_player.cc index b5851f7582..f2b3a37194 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/opensles_player.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/opensles_player.cc @@ -20,7 +20,6 @@ #include "modules/audio_device/fine_audio_buffer.h" #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" -#include "rtc_base/format_macros.h" #include "rtc_base/platform_thread.h" #include "rtc_base/time_utils.h" @@ -193,7 +192,7 @@ void OpenSLESPlayer::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) { ALOGD("SetPlayoutSampleRate(%d)", sample_rate_hz); audio_device_buffer_->SetPlayoutSampleRate(sample_rate_hz); const size_t channels = audio_parameters_.channels(); - ALOGD("SetPlayoutChannels(%" RTC_PRIuS ")", channels); + ALOGD("SetPlayoutChannels(%zu)", channels); audio_device_buffer_->SetPlayoutChannels(channels); RTC_CHECK(audio_device_buffer_); AllocateDataBuffers(); @@ -214,7 +213,7 @@ void OpenSLESPlayer::AllocateDataBuffers() { // which reduces jitter. const size_t buffer_size_in_samples = audio_parameters_.frames_per_buffer() * audio_parameters_.channels(); - ALOGD("native buffer size: %" RTC_PRIuS, buffer_size_in_samples); + ALOGD("native buffer size: %zu", buffer_size_in_samples); ALOGD("native buffer size in ms: %.2f", audio_parameters_.GetBufferSizeInMilliseconds()); fine_audio_buffer_ = std::make_unique(audio_device_buffer_); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/opensles_recorder.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/opensles_recorder.cc index 8becd202cc..4e0c26dbf0 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/opensles_recorder.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/opensles_recorder.cc @@ -20,7 +20,6 @@ #include "modules/audio_device/fine_audio_buffer.h" #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" -#include "rtc_base/format_macros.h" #include "rtc_base/platform_thread.h" #include "rtc_base/time_utils.h" @@ -178,7 +177,7 @@ void OpenSLESRecorder::AttachAudioBuffer(AudioDeviceBuffer* audio_buffer) { // Ensure that the audio device buffer is informed about the number of // channels preferred by the OS on the recording side. const size_t channels = audio_parameters_.channels(); - ALOGD("SetRecordingChannels(%" RTC_PRIuS ")", channels); + ALOGD("SetRecordingChannels(%zu)", channels); audio_device_buffer_->SetRecordingChannels(channels); // Allocated memory for internal data buffers given existing audio parameters. AllocateDataBuffers(); @@ -334,12 +333,10 @@ void OpenSLESRecorder::AllocateDataBuffers() { // Create a modified audio buffer class which allows us to deliver any number // of samples (and not only multiple of 10ms) to match the native audio unit // buffer size. - ALOGD("frames per native buffer: %" RTC_PRIuS, - audio_parameters_.frames_per_buffer()); - ALOGD("frames per 10ms buffer: %" RTC_PRIuS, + ALOGD("frames per native buffer: %zu", audio_parameters_.frames_per_buffer()); + ALOGD("frames per 10ms buffer: %zu", audio_parameters_.frames_per_10ms_buffer()); - ALOGD("bytes per native buffer: %" RTC_PRIuS, - audio_parameters_.GetBytesPerBuffer()); + ALOGD("bytes per native buffer: %zu", audio_parameters_.GetBytesPerBuffer()); ALOGD("native sample rate: %d", audio_parameters_.sample_rate()); RTC_DCHECK(audio_device_buffer_); fine_audio_buffer_ = std::make_unique(audio_device_buffer_); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_buffer.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_buffer.cc index d393a88770..6232a93d8f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_buffer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_buffer.cc @@ -20,6 +20,7 @@ #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/time_utils.h" +#include "rtc_base/trace_event.h" #include "system_wrappers/include/metrics.h" namespace webrtc { @@ -54,6 +55,7 @@ AudioDeviceBuffer::AudioDeviceBuffer(TaskQueueFactory* task_queue_factory) typing_status_(false), play_delay_ms_(0), rec_delay_ms_(0), + capture_timestamp_ns_(0), num_stat_reports_(0), last_timer_task_time_(0), rec_stat_count_(0), @@ -229,6 +231,12 @@ void AudioDeviceBuffer::SetVQEData(int play_delay_ms, int rec_delay_ms) { int32_t AudioDeviceBuffer::SetRecordedBuffer(const void* audio_buffer, size_t samples_per_channel) { + return SetRecordedBuffer(audio_buffer, samples_per_channel, 0); +} + +int32_t AudioDeviceBuffer::SetRecordedBuffer(const void* audio_buffer, + size_t samples_per_channel, + int64_t capture_timestamp_ns) { // Copy the complete input buffer to the local buffer. const size_t old_size = rec_buffer_.size(); rec_buffer_.SetData(static_cast(audio_buffer), @@ -239,6 +247,17 @@ int32_t AudioDeviceBuffer::SetRecordedBuffer(const void* audio_buffer, RTC_LOG(LS_INFO) << "Size of recording buffer: " << rec_buffer_.size(); } + // If the timestamp is less then or equal to zero, it's not valid and are + // ignored. If we do antimestamp alignment on them they might accidentally + // become greater then zero, and will be handled as if they were a correct + // timestamp. + capture_timestamp_ns_ = + (capture_timestamp_ns > 0) + ? rtc::kNumNanosecsPerMicrosec * + timestamp_aligner_.TranslateTimestamp( + capture_timestamp_ns_ / rtc::kNumNanosecsPerMicrosec, + rtc::TimeMicros()) + : capture_timestamp_ns; // Derive a new level value twice per second and check if it is non-zero. int16_t max_abs = 0; RTC_DCHECK_LT(rec_stat_count_, 50); @@ -271,7 +290,7 @@ int32_t AudioDeviceBuffer::DeliverRecordedData() { int32_t res = audio_transport_cb_->RecordedDataIsAvailable( rec_buffer_.data(), frames, bytes_per_frame, rec_channels_, rec_sample_rate_, total_delay_ms, 0, 0, typing_status_, - new_mic_level_dummy); + new_mic_level_dummy, capture_timestamp_ns_); if (res == -1) { RTC_LOG(LS_ERROR) << "RecordedDataIsAvailable() failed"; } @@ -279,6 +298,9 @@ int32_t AudioDeviceBuffer::DeliverRecordedData() { } int32_t AudioDeviceBuffer::RequestPlayoutData(size_t samples_per_channel) { + TRACE_EVENT1("webrtc", "AudioDeviceBuffer::RequestPlayoutData", + "samples_per_channel", samples_per_channel); + // The consumer can change the requested size on the fly and we therefore // resize the buffer accordingly. Also takes place at the first call to this // method. @@ -460,7 +482,7 @@ void AudioDeviceBuffer::LogStats(LogState state) { // Keep posting new (delayed) tasks until state is changed to kLogStop. task_queue_.PostDelayedTask( [this] { AudioDeviceBuffer::LogStats(AudioDeviceBuffer::LOG_ACTIVE); }, - time_to_wait_ms); + TimeDelta::Millis(time_to_wait_ms)); } void AudioDeviceBuffer::ResetRecStats() { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_buffer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_buffer.h index a0b7953194..9a6a88a1be 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_buffer.h @@ -23,6 +23,7 @@ #include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue.h" #include "rtc_base/thread_annotations.h" +#include "rtc_base/timestamp_aligner.h" namespace webrtc { @@ -97,8 +98,13 @@ class AudioDeviceBuffer { size_t RecordingChannels() const; size_t PlayoutChannels() const; + // TODO(bugs.webrtc.org/13621) Deprecate this function virtual int32_t SetRecordedBuffer(const void* audio_buffer, size_t samples_per_channel); + + virtual int32_t SetRecordedBuffer(const void* audio_buffer, + size_t samples_per_channel, + int64_t capture_timestamp_ns); virtual void SetVQEData(int play_delay_ms, int rec_delay_ms); virtual int32_t DeliverRecordedData(); uint32_t NewMicLevel() const; @@ -187,6 +193,9 @@ class AudioDeviceBuffer { int play_delay_ms_; int rec_delay_ms_; + // Capture timestamp. + int64_t capture_timestamp_ns_; + // Counts number of times LogStats() has been called. size_t num_stat_reports_ RTC_GUARDED_BY(task_queue_); @@ -219,6 +228,10 @@ class AudioDeviceBuffer { // being printed in the LogStats() task. bool log_stats_ RTC_GUARDED_BY(task_queue_); + // Used for converting capture timestaps (received from AudioRecordThread + // via AudioRecordJni::DataIsRecorded) to RTC clock. + rtc::TimestampAligner timestamp_aligner_; + // Should *never* be defined in production builds. Only used for testing. // When defined, the output signal will be replaced by a sinus tone at 440Hz. #ifdef AUDIO_DEVICE_PLAYS_SINUS_TONE diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_data_observer.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_data_observer.cc index f655c5a78b..3775e7ce6d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_data_observer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_data_observer.cc @@ -10,9 +10,9 @@ #include "modules/audio_device/include/audio_device_data_observer.h" +#include "api/make_ref_counted.h" #include "modules/audio_device/include/audio_device_defines.h" #include "rtc_base/checks.h" -#include "rtc_base/ref_counted_object.h" namespace webrtc { @@ -45,17 +45,34 @@ class ADMWrapper : public AudioDeviceModule, public AudioTransport { // Make sure we have a valid ADM before returning it to user. bool IsValid() { return is_valid_; } - // AudioTransport methods overrides. int32_t RecordedDataIsAvailable(const void* audioSamples, - const size_t nSamples, - const size_t nBytesPerSample, - const size_t nChannels, - const uint32_t samples_per_sec, - const uint32_t total_delay_ms, - const int32_t clockDrift, - const uint32_t currentMicLevel, - const bool keyPressed, + size_t nSamples, + size_t nBytesPerSample, + size_t nChannels, + uint32_t samples_per_sec, + uint32_t total_delay_ms, + int32_t clockDrift, + uint32_t currentMicLevel, + bool keyPressed, uint32_t& newMicLevel) override { + return RecordedDataIsAvailable(audioSamples, nSamples, nBytesPerSample, + nChannels, samples_per_sec, total_delay_ms, + clockDrift, currentMicLevel, keyPressed, + newMicLevel, /*capture_timestamp_ns*/ 0); + } + + // AudioTransport methods overrides. + int32_t RecordedDataIsAvailable(const void* audioSamples, + size_t nSamples, + size_t nBytesPerSample, + size_t nChannels, + uint32_t samples_per_sec, + uint32_t total_delay_ms, + int32_t clockDrift, + uint32_t currentMicLevel, + bool keyPressed, + uint32_t& newMicLevel, + int64_t capture_timestamp_ns) override { int32_t res = 0; // Capture PCM data of locally captured audio. if (observer_) { @@ -67,7 +84,8 @@ class ADMWrapper : public AudioDeviceModule, public AudioTransport { if (audio_transport_) { res = audio_transport_->RecordedDataIsAvailable( audioSamples, nSamples, nBytesPerSample, nChannels, samples_per_sec, - total_delay_ms, clockDrift, currentMicLevel, keyPressed, newMicLevel); + total_delay_ms, clockDrift, currentMicLevel, keyPressed, newMicLevel, + capture_timestamp_ns); } return res; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_impl.cc index e65e567e52..c3f5274ea5 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_impl.cc @@ -12,12 +12,12 @@ #include +#include "api/make_ref_counted.h" #include "api/scoped_refptr.h" #include "modules/audio_device/audio_device_config.h" // IWYU pragma: keep #include "modules/audio_device/audio_device_generic.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "rtc_base/ref_counted_object.h" #include "system_wrappers/include/metrics.h" #if defined(_WIN32) diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_name.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_name.cc index 5246c768ff..5318496768 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_name.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_name.cc @@ -10,14 +10,15 @@ #include "modules/audio_device/audio_device_name.h" -#include +#include "absl/strings/string_view.h" namespace webrtc { const char AudioDeviceName::kDefaultDeviceId[] = "default"; -AudioDeviceName::AudioDeviceName(std::string device_name, std::string unique_id) - : device_name(std::move(device_name)), unique_id(std::move(unique_id)) {} +AudioDeviceName::AudioDeviceName(absl::string_view device_name, + absl::string_view unique_id) + : device_name(device_name), unique_id(unique_id) {} bool AudioDeviceName::IsValid() { return !device_name.empty() && !unique_id.empty(); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_name.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_name.h index baabd781a7..db37852e9a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_name.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_name.h @@ -14,6 +14,8 @@ #include #include +#include "absl/strings/string_view.h" + namespace webrtc { struct AudioDeviceName { @@ -25,7 +27,7 @@ struct AudioDeviceName { static const char kDefaultDeviceId[]; AudioDeviceName() = default; - AudioDeviceName(std::string device_name, std::string unique_id); + AudioDeviceName(absl::string_view device_name, absl::string_view unique_id); ~AudioDeviceName() = default; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/dummy/file_audio_device.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_device/dummy/file_audio_device.cc index e345a16c44..8c10ae4186 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/dummy/file_audio_device.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/dummy/file_audio_device.cc @@ -12,6 +12,7 @@ #include +#include "absl/strings/string_view.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/platform_thread.h" @@ -29,8 +30,8 @@ const size_t kPlayoutBufferSize = const size_t kRecordingBufferSize = kRecordingFixedSampleRate / 100 * kRecordingNumChannels * 2; -FileAudioDevice::FileAudioDevice(const char* inputFilename, - const char* outputFilename) +FileAudioDevice::FileAudioDevice(absl::string_view inputFilename, + absl::string_view outputFilename) : _ptrAudioBuffer(NULL), _recordingBuffer(NULL), _playoutBuffer(NULL), @@ -206,7 +207,7 @@ int32_t FileAudioDevice::StartPlayout() { // PLAYOUT if (!_outputFilename.empty()) { - _outputFile = FileWrapper::OpenWriteOnly(_outputFilename.c_str()); + _outputFile = FileWrapper::OpenWriteOnly(_outputFilename); if (!_outputFile.is_open()) { RTC_LOG(LS_ERROR) << "Failed to open playout file: " << _outputFilename; _playing = false; @@ -266,7 +267,7 @@ int32_t FileAudioDevice::StartRecording() { } if (!_inputFilename.empty()) { - _inputFile = FileWrapper::OpenReadOnly(_inputFilename.c_str()); + _inputFile = FileWrapper::OpenReadOnly(_inputFilename); if (!_inputFile.is_open()) { RTC_LOG(LS_ERROR) << "Failed to open audio input file: " << _inputFilename; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/dummy/file_audio_device.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/dummy/file_audio_device.h index 4d6858f992..27979933f2 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/dummy/file_audio_device.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/dummy/file_audio_device.h @@ -16,6 +16,7 @@ #include #include +#include "absl/strings/string_view.h" #include "modules/audio_device/audio_device_generic.h" #include "rtc_base/platform_thread.h" #include "rtc_base/synchronization/mutex.h" @@ -34,7 +35,8 @@ class FileAudioDevice : public AudioDeviceGeneric { // The input file should be a readable 48k stereo raw file, and the output // file should point to a writable location. The output format will also be // 48k stereo raw audio. - FileAudioDevice(const char* inputFilename, const char* outputFilename); + FileAudioDevice(absl::string_view inputFilename, + absl::string_view outputFilename); virtual ~FileAudioDevice(); // Retrieve the currently utilized audio layer diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/dummy/file_audio_device_factory.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_device/dummy/file_audio_device_factory.cc index 0f56e0641a..8c41111478 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/dummy/file_audio_device_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/dummy/file_audio_device_factory.cc @@ -14,8 +14,10 @@ #include +#include "absl/strings/string_view.h" #include "modules/audio_device/dummy/file_audio_device.h" #include "rtc_base/logging.h" +#include "rtc_base/string_utils.h" namespace webrtc { @@ -38,15 +40,15 @@ FileAudioDevice* FileAudioDeviceFactory::CreateFileAudioDevice() { } void FileAudioDeviceFactory::SetFilenamesToUse( - const char* inputAudioFilename, - const char* outputAudioFilename) { + absl::string_view inputAudioFilename, + absl::string_view outputAudioFilename) { #ifdef WEBRTC_DUMMY_FILE_DEVICES - RTC_DCHECK_LT(strlen(inputAudioFilename), MAX_FILENAME_LEN); - RTC_DCHECK_LT(strlen(outputAudioFilename), MAX_FILENAME_LEN); + RTC_DCHECK_LT(inputAudioFilename.size(), MAX_FILENAME_LEN); + RTC_DCHECK_LT(outputAudioFilename.size(), MAX_FILENAME_LEN); // Copy the strings since we don't know the lifetime of the input pointers. - strncpy(_inputAudioFilename, inputAudioFilename, MAX_FILENAME_LEN); - strncpy(_outputAudioFilename, outputAudioFilename, MAX_FILENAME_LEN); + rtc::strcpyn(_inputAudioFilename, MAX_FILENAME_LEN, inputAudioFilename); + rtc::strcpyn(_outputAudioFilename, MAX_FILENAME_LEN, outputAudioFilename); _isConfigured = true; #else // Sanity: must be compiled with the right define to run this. diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/dummy/file_audio_device_factory.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/dummy/file_audio_device_factory.h index 72f4ab2b38..18f9388f21 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/dummy/file_audio_device_factory.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/dummy/file_audio_device_factory.h @@ -13,6 +13,8 @@ #include +#include "absl/strings/string_view.h" + namespace webrtc { class FileAudioDevice; @@ -27,8 +29,8 @@ class FileAudioDeviceFactory { // The input file must be a readable 48k stereo raw file. The output // file must be writable. The strings will be copied. - static void SetFilenamesToUse(const char* inputAudioFilename, - const char* outputAudioFilename); + static void SetFilenamesToUse(absl::string_view inputAudioFilename, + absl::string_view outputAudioFilename); private: enum : uint32_t { MAX_FILENAME_LEN = 512 }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/audio_device_data_observer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/audio_device_data_observer.h index b59cafcb5d..36dc45f19e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/audio_device_data_observer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/audio_device_data_observer.h @@ -26,16 +26,16 @@ namespace webrtc { class AudioDeviceDataObserver { public: virtual void OnCaptureData(const void* audio_samples, - const size_t num_samples, - const size_t bytes_per_sample, - const size_t num_channels, - const uint32_t samples_per_sec) = 0; + size_t num_samples, + size_t bytes_per_sample, + size_t num_channels, + uint32_t samples_per_sec) = 0; virtual void OnRenderData(const void* audio_samples, - const size_t num_samples, - const size_t bytes_per_sample, - const size_t num_channels, - const uint32_t samples_per_sec) = 0; + size_t num_samples, + size_t bytes_per_sample, + size_t num_channels, + uint32_t samples_per_sec) = 0; AudioDeviceDataObserver() = default; virtual ~AudioDeviceDataObserver() = default; @@ -56,14 +56,14 @@ rtc::scoped_refptr CreateAudioDeviceWithDataObserver( // Creates an ADM instance with AudioDeviceDataObserver registered. rtc::scoped_refptr CreateAudioDeviceWithDataObserver( - const AudioDeviceModule::AudioLayer audio_layer, + AudioDeviceModule::AudioLayer audio_layer, TaskQueueFactory* task_queue_factory, std::unique_ptr observer); // Creates an ADM instance with AudioDeviceDataObserver registered. ABSL_DEPRECATED("") rtc::scoped_refptr CreateAudioDeviceWithDataObserver( - const AudioDeviceModule::AudioLayer audio_layer, + AudioDeviceModule::AudioLayer audio_layer, TaskQueueFactory* task_queue_factory, AudioDeviceDataObserver* observer); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/audio_device_defines.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/audio_device_defines.h index 01129a47a9..89d33f8538 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/audio_device_defines.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/audio_device_defines.h @@ -33,22 +33,43 @@ static const int kAdmMaxPlayoutBufferSizeMs = 250; class AudioTransport { public: + // TODO(bugs.webrtc.org/13620) Deprecate this function virtual int32_t RecordedDataIsAvailable(const void* audioSamples, - const size_t nSamples, - const size_t nBytesPerSample, - const size_t nChannels, - const uint32_t samplesPerSec, - const uint32_t totalDelayMS, - const int32_t clockDrift, - const uint32_t currentMicLevel, - const bool keyPressed, + size_t nSamples, + size_t nBytesPerSample, + size_t nChannels, + uint32_t samplesPerSec, + uint32_t totalDelayMS, + int32_t clockDrift, + uint32_t currentMicLevel, + bool keyPressed, uint32_t& newMicLevel) = 0; // NOLINT + virtual int32_t RecordedDataIsAvailable( + const void* audioSamples, + size_t nSamples, + size_t nBytesPerSample, + size_t nChannels, + uint32_t samplesPerSec, + uint32_t totalDelayMS, + int32_t clockDrift, + uint32_t currentMicLevel, + bool keyPressed, + uint32_t& newMicLevel, + int64_t estimatedCaptureTimeNS) { // NOLINT + // TODO(webrtc:13620) Make the default behaver of the new API to behave as + // the old API. This can be pure virtual if all uses of the old API is + // removed. + return RecordedDataIsAvailable( + audioSamples, nSamples, nBytesPerSample, nChannels, samplesPerSec, + totalDelayMS, clockDrift, currentMicLevel, keyPressed, newMicLevel); + } + // Implementation has to setup safe values for all specified out parameters. - virtual int32_t NeedMorePlayData(const size_t nSamples, - const size_t nBytesPerSample, - const size_t nChannels, - const uint32_t samplesPerSec, + virtual int32_t NeedMorePlayData(size_t nSamples, + size_t nBytesPerSample, + size_t nChannels, + uint32_t samplesPerSec, void* audioSamples, size_t& nSamplesOut, // NOLINT int64_t* elapsed_time_ms, diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/fake_audio_device.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/fake_audio_device.h index 9949627a73..2322ce0263 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/fake_audio_device.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/fake_audio_device.h @@ -19,8 +19,9 @@ namespace webrtc { class FakeAudioDeviceModule : public webrtc_impl::AudioDeviceModuleDefault { public: - // TODO(nisse): Fix all users of this class to managed references using - // scoped_refptr. Current code doesn't always use refcounting for this class. + // TODO(bugs.webrtc.org/12701): Fix all users of this class to managed + // references using scoped_refptr. Current code doesn't always use refcounting + // for this class. void AddRef() const override {} rtc::RefCountReleaseStatus Release() const override { return rtc::RefCountReleaseStatus::kDroppedLastRef; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/mock_audio_device.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/mock_audio_device.h index 8483aa3da8..73fbdd547d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/mock_audio_device.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/mock_audio_device.h @@ -13,8 +13,8 @@ #include +#include "api/make_ref_counted.h" #include "modules/audio_device/include/audio_device.h" -#include "rtc_base/ref_counted_object.h" #include "test/gmock.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/mock_audio_transport.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/mock_audio_transport.h index 8f71a2d71f..e1be5f422f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/mock_audio_transport.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/mock_audio_transport.h @@ -25,23 +25,38 @@ class MockAudioTransport : public AudioTransport { MOCK_METHOD(int32_t, RecordedDataIsAvailable, (const void* audioSamples, - const size_t nSamples, - const size_t nBytesPerSample, - const size_t nChannels, - const uint32_t samplesPerSec, - const uint32_t totalDelayMS, - const int32_t clockDrift, - const uint32_t currentMicLevel, - const bool keyPressed, + size_t nSamples, + size_t nBytesPerSample, + size_t nChannels, + uint32_t samplesPerSec, + uint32_t totalDelayMS, + int32_t clockDrift, + uint32_t currentMicLevel, + bool keyPressed, uint32_t& newMicLevel), (override)); + MOCK_METHOD(int32_t, + RecordedDataIsAvailable, + (const void* audioSamples, + size_t nSamples, + size_t nBytesPerSample, + size_t nChannels, + uint32_t samplesPerSec, + uint32_t totalDelayMS, + int32_t clockDrift, + uint32_t currentMicLevel, + bool keyPressed, + uint32_t& newMicLevel, + int64_t estimated_capture_time_ns), + (override)); + MOCK_METHOD(int32_t, NeedMorePlayData, - (const size_t nSamples, - const size_t nBytesPerSample, - const size_t nChannels, - const uint32_t samplesPerSec, + (size_t nSamples, + size_t nBytesPerSample, + size_t nChannels, + uint32_t samplesPerSec, void* audioSamples, size_t& nSamplesOut, int64_t* elapsed_time_ms, diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/test_audio_device.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/test_audio_device.cc index d8ab22f29d..2189646eff 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/test_audio_device.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/test_audio_device.cc @@ -18,7 +18,9 @@ #include #include +#include "absl/strings/string_view.h" #include "api/array_view.h" +#include "api/make_ref_counted.h" #include "common_audio/wav_file.h" #include "modules/audio_device/include/audio_device_default.h" #include "rtc_base/buffer.h" @@ -28,7 +30,6 @@ #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/platform_thread.h" #include "rtc_base/random.h" -#include "rtc_base/ref_counted_object.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue.h" #include "rtc_base/task_utils/repeating_task.h" @@ -141,16 +142,9 @@ class TestAudioDeviceModuleImpl return capturing_; } - // Blocks until the Renderer refuses to receive data. - // Returns false if `timeout_ms` passes before that happens. - bool WaitForPlayoutEnd(int timeout_ms = rtc::Event::kForever) override { - return done_rendering_.Wait(timeout_ms); - } - - // Blocks until the Recorder stops producing data. - // Returns false if `timeout_ms` passes before that happens. - bool WaitForRecordingEnd(int timeout_ms = rtc::Event::kForever) override { - return done_capturing_.Wait(timeout_ms); + // Blocks forever until the Recorder stops producing data. + void WaitForRecordingEnd() override { + done_capturing_.Wait(rtc::Event::kForever); } private: @@ -266,7 +260,7 @@ class PulsedNoiseCapturerImpl final class WavFileReader final : public TestAudioDeviceModule::Capturer { public: - WavFileReader(std::string filename, + WavFileReader(absl::string_view filename, int sampling_frequency_in_hz, int num_channels, bool repeat) @@ -320,7 +314,7 @@ class WavFileReader final : public TestAudioDeviceModule::Capturer { class WavFileWriter final : public TestAudioDeviceModule::Renderer { public: - WavFileWriter(std::string filename, + WavFileWriter(absl::string_view filename, int sampling_frequency_in_hz, int num_channels) : WavFileWriter(std::make_unique(filename, @@ -353,7 +347,7 @@ class WavFileWriter final : public TestAudioDeviceModule::Renderer { class BoundedWavFileWriter : public TestAudioDeviceModule::Renderer { public: - BoundedWavFileWriter(std::string filename, + BoundedWavFileWriter(absl::string_view filename, int sampling_frequency_in_hz, int num_channels) : sampling_frequency_in_hz_(sampling_frequency_in_hz), @@ -467,7 +461,7 @@ TestAudioDeviceModule::CreateDiscardRenderer(int sampling_frequency_in_hz, } std::unique_ptr -TestAudioDeviceModule::CreateWavFileReader(std::string filename, +TestAudioDeviceModule::CreateWavFileReader(absl::string_view filename, int sampling_frequency_in_hz, int num_channels) { return std::make_unique(filename, sampling_frequency_in_hz, @@ -475,7 +469,8 @@ TestAudioDeviceModule::CreateWavFileReader(std::string filename, } std::unique_ptr -TestAudioDeviceModule::CreateWavFileReader(std::string filename, bool repeat) { +TestAudioDeviceModule::CreateWavFileReader(absl::string_view filename, + bool repeat) { WavReader reader(filename); int sampling_frequency_in_hz = reader.sample_rate(); int num_channels = rtc::checked_cast(reader.num_channels()); @@ -484,7 +479,7 @@ TestAudioDeviceModule::CreateWavFileReader(std::string filename, bool repeat) { } std::unique_ptr -TestAudioDeviceModule::CreateWavFileWriter(std::string filename, +TestAudioDeviceModule::CreateWavFileWriter(absl::string_view filename, int sampling_frequency_in_hz, int num_channels) { return std::make_unique(filename, sampling_frequency_in_hz, @@ -492,7 +487,7 @@ TestAudioDeviceModule::CreateWavFileWriter(std::string filename, } std::unique_ptr -TestAudioDeviceModule::CreateBoundedWavFileWriter(std::string filename, +TestAudioDeviceModule::CreateBoundedWavFileWriter(absl::string_view filename, int sampling_frequency_in_hz, int num_channels) { return std::make_unique( diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/test_audio_device.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/test_audio_device.h index fd006a3558..8413479291 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/test_audio_device.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/test_audio_device.h @@ -16,13 +16,13 @@ #include #include +#include "absl/strings/string_view.h" #include "api/array_view.h" #include "api/scoped_refptr.h" #include "api/task_queue/task_queue_factory.h" #include "modules/audio_device/include/audio_device.h" #include "modules/audio_device/include/audio_device_defines.h" #include "rtc_base/buffer.h" -#include "rtc_base/event.h" namespace webrtc { @@ -103,7 +103,7 @@ class TestAudioDeviceModule : public AudioDeviceModule { // Returns a Capturer instance that gets its data from a file. The sample rate // and channels will be checked against the Wav file. static std::unique_ptr CreateWavFileReader( - std::string filename, + absl::string_view filename, int sampling_frequency_in_hz, int num_channels = 1); @@ -111,12 +111,13 @@ class TestAudioDeviceModule : public AudioDeviceModule { // Automatically detects sample rate and num of channels. // `repeat` - if true, the file will be replayed from the start when we reach // the end of file. - static std::unique_ptr CreateWavFileReader(std::string filename, - bool repeat = false); + static std::unique_ptr CreateWavFileReader( + absl::string_view filename, + bool repeat = false); // Returns a Renderer instance that writes its data to a file. static std::unique_ptr CreateWavFileWriter( - std::string filename, + absl::string_view filename, int sampling_frequency_in_hz, int num_channels = 1); @@ -124,7 +125,7 @@ class TestAudioDeviceModule : public AudioDeviceModule { // off silence at the beginning (not necessarily perfect silence, see // kAmplitudeThreshold) and at the end (only actual 0 samples in this case). static std::unique_ptr CreateBoundedWavFileWriter( - std::string filename, + absl::string_view filename, int sampling_frequency_in_hz, int num_channels = 1); @@ -139,12 +140,8 @@ class TestAudioDeviceModule : public AudioDeviceModule { bool Playing() const override = 0; bool Recording() const override = 0; - // Blocks until the Renderer refuses to receive data. - // Returns false if `timeout_ms` passes before that happens. - virtual bool WaitForPlayoutEnd(int timeout_ms = rtc::Event::kForever) = 0; - // Blocks until the Recorder stops producing data. - // Returns false if `timeout_ms` passes before that happens. - virtual bool WaitForRecordingEnd(int timeout_ms = rtc::Event::kForever) = 0; + // Blocks forever until the Recorder stops producing data. + virtual void WaitForRecordingEnd() = 0; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h index 1f4a231640..23e21d3ce9 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h @@ -131,11 +131,11 @@ class AudioDeviceLinuxALSA : public AudioDeviceGeneric { int32_t InitPlayoutLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); int32_t InitSpeakerLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); int32_t InitMicrophoneLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - int32_t GetDevicesInfo(const int32_t function, - const bool playback, - const int32_t enumDeviceNo = 0, + int32_t GetDevicesInfo(int32_t function, + bool playback, + int32_t enumDeviceNo = 0, char* enumDeviceName = NULL, - const int32_t ednLen = 0) const; + int32_t ednLen = 0) const; int32_t ErrorRecovery(int32_t error, snd_pcm_t* deviceHandle); bool KeyPressed() const; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_device_pulse_linux.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_device_pulse_linux.cc index 4876c0fb91..90cd58c497 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_device_pulse_linux.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_device_pulse_linux.cc @@ -1059,7 +1059,7 @@ int32_t AudioDeviceLinuxPulse::StartRecording() { // The audio thread will signal when recording has started. _timeEventRec.Set(); - if (!_recStartEvent.Wait(10000)) { + if (!_recStartEvent.Wait(TimeDelta::Seconds(10))) { { MutexLock lock(&mutex_); _startRec = false; @@ -1174,7 +1174,7 @@ int32_t AudioDeviceLinuxPulse::StartPlayout() { // The audio thread will signal when playout has started. _timeEventPlay.Set(); - if (!_playStartEvent.Wait(10000)) { + if (!_playStartEvent.Wait(TimeDelta::Seconds(10))) { { MutexLock lock(&mutex_); _startPlay = false; @@ -1977,7 +1977,7 @@ int32_t AudioDeviceLinuxPulse::ProcessRecordedData(int8_t* bufferData, } bool AudioDeviceLinuxPulse::PlayThreadProcess() { - if (!_timeEventPlay.Wait(1000)) { + if (!_timeEventPlay.Wait(TimeDelta::Seconds(1))) { return true; } @@ -2149,7 +2149,7 @@ bool AudioDeviceLinuxPulse::PlayThreadProcess() { } bool AudioDeviceLinuxPulse::RecThreadProcess() { - if (!_timeEventRec.Wait(1000)) { + if (!_timeEventRec.Wait(TimeDelta::Seconds(1))) { return true; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.cc index def4866d5a..751edafd8b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.cc @@ -10,6 +10,7 @@ #include "modules/audio_device/linux/latebindingsymboltable_linux.h" +#include "absl/strings/string_view.h" #include "rtc_base/logging.h" #ifdef WEBRTC_LINUX @@ -32,9 +33,9 @@ inline static const char* GetDllError() { #endif } -DllHandle InternalLoadDll(const char dll_name[]) { +DllHandle InternalLoadDll(absl::string_view dll_name) { #ifdef WEBRTC_LINUX - DllHandle handle = dlopen(dll_name, RTLD_NOW); + DllHandle handle = dlopen(std::string(dll_name).c_str(), RTLD_NOW); #else #error Not implemented #endif @@ -64,10 +65,10 @@ void InternalUnloadDll(DllHandle handle) { } static bool LoadSymbol(DllHandle handle, - const char* symbol_name, + absl::string_view symbol_name, void** symbol) { #ifdef WEBRTC_LINUX - *symbol = dlsym(handle, symbol_name); + *symbol = dlsym(handle, std::string(symbol_name).c_str()); char* err = dlerror(); if (err) { RTC_LOG(LS_ERROR) << "Error loading symbol " << symbol_name << " : " << err; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.h index 6cfb659749..00f3c5a449 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.h @@ -14,8 +14,8 @@ #include // for NULL #include +#include "absl/strings/string_view.h" #include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" // This file provides macros for creating "symbol table" classes to simplify the // dynamic loading of symbols from DLLs. Currently the implementation only @@ -34,7 +34,7 @@ const DllHandle kInvalidDllHandle = NULL; #endif // These are helpers for use only by the class below. -DllHandle InternalLoadDll(const char dll_name[]); +DllHandle InternalLoadDll(absl::string_view); void InternalUnloadDll(DllHandle handle); @@ -55,6 +55,9 @@ class LateBindingSymbolTable { ~LateBindingSymbolTable() { Unload(); } + LateBindingSymbolTable(const LateBindingSymbolTable&) = delete; + LateBindingSymbolTable& operator=(LateBindingSymbolTable&) = delete; + static int NumSymbols() { return SYMBOL_TABLE_SIZE; } // We do not use this, but we offer it for theoretical convenience. @@ -109,8 +112,6 @@ class LateBindingSymbolTable { DllHandle handle_; bool undefined_symbols_; void* symbols_[SYMBOL_TABLE_SIZE]; - - RTC_DISALLOW_COPY_AND_ASSIGN(LateBindingSymbolTable); }; // This macro must be invoked in a header to declare a symbol table class. diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/audio_mixer_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/audio_mixer_impl.cc index 8cebc38779..0c203a1d9f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/audio_mixer_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/audio_mixer_impl.cc @@ -21,7 +21,7 @@ #include "modules/audio_mixer/default_output_rate_calculator.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "rtc_base/ref_counted_object.h" +#include "rtc_base/trace_event.h" namespace webrtc { @@ -157,6 +157,7 @@ rtc::scoped_refptr AudioMixerImpl::Create( void AudioMixerImpl::Mix(size_t number_of_channels, AudioFrame* audio_frame_for_mixing) { + TRACE_EVENT0("webrtc", "AudioMixerImpl::Mix"); RTC_DCHECK(number_of_channels >= 1); MutexLock lock(&mutex_); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/audio_mixer_impl.h b/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/audio_mixer_impl.h index 737fcbdc43..76b1131777 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/audio_mixer_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/audio_mixer_impl.h @@ -22,7 +22,6 @@ #include "api/scoped_refptr.h" #include "modules/audio_mixer/frame_combiner.h" #include "modules/audio_mixer/output_rate_calculator.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/race_checker.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" @@ -48,6 +47,9 @@ class AudioMixerImpl : public AudioMixer { ~AudioMixerImpl() override; + AudioMixerImpl(const AudioMixerImpl&) = delete; + AudioMixerImpl& operator=(const AudioMixerImpl&) = delete; + // AudioMixer functions bool AddSource(Source* audio_source) override; void RemoveSource(Source* audio_source) override; @@ -92,8 +94,6 @@ class AudioMixerImpl : public AudioMixer { // Component that handles actual adding of audio frames. FrameCombiner frame_combiner_; - - RTC_DISALLOW_COPY_AND_ASSIGN(AudioMixerImpl); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/adaptive_fir_filter.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/adaptive_fir_filter.h index 7597709460..34c06f4367 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/adaptive_fir_filter.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/adaptive_fir_filter.h @@ -16,6 +16,7 @@ #include #include +#include "absl/strings/string_view.h" #include "api/array_view.h" #include "modules/audio_processing/aec3/aec3_common.h" #include "modules/audio_processing/aec3/aec3_fft.h" @@ -141,7 +142,7 @@ class AdaptiveFirFilter { // Returns the maximum number of partitions for the filter. size_t max_filter_size_partitions() const { return max_size_partitions_; } - void DumpFilter(const char* name_frequency_domain) { + void DumpFilter(absl::string_view name_frequency_domain) { for (size_t p = 0; p < max_size_partitions_; ++p) { data_dumper_->DumpRaw(name_frequency_domain, H_[p][0].re); data_dumper_->DumpRaw(name_frequency_domain, H_[p][0].im); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec3_common.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec3_common.h index 3bfff967a0..32b564f14b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec3_common.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec3_common.h @@ -85,10 +85,10 @@ constexpr size_t GetRenderDelayBufferSize(size_t down_sampling_factor, Aec3Optimization DetectOptimization(); // Computes the log2 of the input in a fast an approximate manner. -float FastApproxLog2f(const float in); +float FastApproxLog2f(float in); // Returns dB from a power quantity expressed in log2. -float Log2TodB(const float in_log2); +float Log2TodB(float in_log2); static_assert(1 << kBlockSizeLog2 == kBlockSize, "Proper number of shifts for blocksize"); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec3_fft.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec3_fft.h index 6f7fbe4d0e..c68de53963 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec3_fft.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec3_fft.h @@ -18,7 +18,6 @@ #include "modules/audio_processing/aec3/aec3_common.h" #include "modules/audio_processing/aec3/fft_data.h" #include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -30,6 +29,9 @@ class Aec3Fft { Aec3Fft(); + Aec3Fft(const Aec3Fft&) = delete; + Aec3Fft& operator=(const Aec3Fft&) = delete; + // Computes the FFT. Note that both the input and output are modified. void Fft(std::array* x, FftData* X) const { RTC_DCHECK(x); @@ -66,8 +68,6 @@ class Aec3Fft { private: const OouraFft ooura_fft_; - - RTC_DISALLOW_COPY_AND_ASSIGN(Aec3Fft); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec_state.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec_state.cc index b09acfd892..81fd91fab9 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec_state.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec_state.cc @@ -20,7 +20,6 @@ #include "api/array_view.h" #include "modules/audio_processing/aec3/aec3_common.h" #include "modules/audio_processing/logging/apm_data_dumper.h" -#include "rtc_base/atomic_ops.h" #include "rtc_base/checks.h" #include "system_wrappers/include/field_trial.h" @@ -97,7 +96,7 @@ void ComputeAvgRenderReverb( } // namespace -int AecState::instance_count_ = 0; +std::atomic AecState::instance_count_(0); void AecState::GetResidualEchoScaling( rtc::ArrayView residual_scaling) const { @@ -115,8 +114,7 @@ void AecState::GetResidualEchoScaling( AecState::AecState(const EchoCanceller3Config& config, size_t num_capture_channels) - : data_dumper_( - new ApmDataDumper(rtc::AtomicOps::Increment(&instance_count_))), + : data_dumper_(new ApmDataDumper(instance_count_.fetch_add(1) + 1)), config_(config), num_capture_channels_(num_capture_channels), deactivate_initial_state_reset_at_echo_path_change_( @@ -206,15 +204,16 @@ void AecState::Update( strong_not_saturated_render_blocks_); } - const std::vector>& aligned_render_block = - render_buffer.Block(-delay_state_.MinDirectPathFilterDelay())[0]; + const Block& aligned_render_block = + render_buffer.GetBlock(-delay_state_.MinDirectPathFilterDelay()); // Update render counters. bool active_render = false; - for (size_t ch = 0; ch < aligned_render_block.size(); ++ch) { - const float render_energy = std::inner_product( - aligned_render_block[ch].begin(), aligned_render_block[ch].end(), - aligned_render_block[ch].begin(), 0.f); + for (int ch = 0; ch < aligned_render_block.NumChannels(); ++ch) { + const float render_energy = + std::inner_product(aligned_render_block.begin(/*block=*/0, ch), + aligned_render_block.end(/*block=*/0, ch), + aligned_render_block.begin(/*block=*/0, ch), 0.f); if (render_energy > (config_.render_levels.active_render_limit * config_.render_levels.active_render_limit) * kFftLengthBy2) { @@ -446,7 +445,7 @@ void AecState::FilteringQualityAnalyzer::Update( } void AecState::SaturationDetector::Update( - rtc::ArrayView> x, + const Block& x, bool saturated_capture, bool usable_linear_estimate, rtc::ArrayView subtractor_output, @@ -466,8 +465,9 @@ void AecState::SaturationDetector::Update( } } else { float max_sample = 0.f; - for (auto& channel : x) { - for (float sample : channel) { + for (int ch = 0; ch < x.NumChannels(); ++ch) { + rtc::ArrayView x_ch = x.View(/*band=*/0, ch); + for (float sample : x_ch) { max_sample = std::max(max_sample, fabsf(sample)); } } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec_state.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec_state.h index 5994465688..a39325c8b8 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec_state.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec_state.h @@ -14,6 +14,7 @@ #include #include +#include #include #include @@ -154,7 +155,7 @@ class AecState { } private: - static int instance_count_; + static std::atomic instance_count_; std::unique_ptr data_dumper_; const EchoCanceller3Config config_; const size_t num_capture_channels_; @@ -272,7 +273,7 @@ class AecState { bool SaturatedEcho() const { return saturated_echo_; } // Updates the detection decision based on new data. - void Update(rtc::ArrayView> x, + void Update(const Block& x, bool saturated_capture, bool usable_linear_estimate, rtc::ArrayView subtractor_output, diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/alignment_mixer.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/alignment_mixer.cc index 87488d2674..7f076dea8e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/alignment_mixer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/alignment_mixer.cc @@ -63,9 +63,10 @@ AlignmentMixer::AlignmentMixer(size_t num_channels, } } -void AlignmentMixer::ProduceOutput(rtc::ArrayView> x, +void AlignmentMixer::ProduceOutput(const Block& x, rtc::ArrayView y) { - RTC_DCHECK_EQ(x.size(), num_channels_); + RTC_DCHECK_EQ(x.NumChannels(), num_channels_); + if (selection_variant_ == MixingVariant::kDownmix) { Downmix(x, y); return; @@ -73,18 +74,20 @@ void AlignmentMixer::ProduceOutput(rtc::ArrayView> x, int ch = selection_variant_ == MixingVariant::kFixed ? 0 : SelectChannel(x); - RTC_DCHECK_GE(x.size(), ch); - std::copy(x[ch].begin(), x[ch].end(), y.begin()); + RTC_DCHECK_GT(x.NumChannels(), ch); + std::copy(x.begin(/*band=*/0, ch), x.end(/*band=*/0, ch), y.begin()); } -void AlignmentMixer::Downmix(rtc::ArrayView> x, +void AlignmentMixer::Downmix(const Block& x, rtc::ArrayView y) const { - RTC_DCHECK_EQ(x.size(), num_channels_); + RTC_DCHECK_EQ(x.NumChannels(), num_channels_); RTC_DCHECK_GE(num_channels_, 2); - std::copy(x[0].begin(), x[0].end(), y.begin()); + std::memcpy(&y[0], x.View(/*band=*/0, /*channel=*/0).data(), + kBlockSize * sizeof(y[0])); for (size_t ch = 1; ch < num_channels_; ++ch) { + const auto x_ch = x.View(/*band=*/0, ch); for (size_t i = 0; i < kBlockSize; ++i) { - y[i] += x[ch][i]; + y[i] += x_ch[i]; } } @@ -93,8 +96,8 @@ void AlignmentMixer::Downmix(rtc::ArrayView> x, } } -int AlignmentMixer::SelectChannel(rtc::ArrayView> x) { - RTC_DCHECK_EQ(x.size(), num_channels_); +int AlignmentMixer::SelectChannel(const Block& x) { + RTC_DCHECK_EQ(x.NumChannels(), num_channels_); RTC_DCHECK_GE(num_channels_, 2); RTC_DCHECK_EQ(cumulative_energies_.size(), num_channels_); @@ -112,10 +115,10 @@ int AlignmentMixer::SelectChannel(rtc::ArrayView> x) { ++block_counter_; for (int ch = 0; ch < num_ch_to_analyze; ++ch) { - RTC_DCHECK_EQ(x[ch].size(), kBlockSize); float x2_sum = 0.f; + rtc::ArrayView x_ch = x.View(/*band=*/0, ch); for (size_t i = 0; i < kBlockSize; ++i) { - x2_sum += x[ch][i] * x[ch][i]; + x2_sum += x_ch[i] * x_ch[i]; } if (ch < 2 && x2_sum > excitation_energy_threshold_) { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/alignment_mixer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/alignment_mixer.h index 682aec9124..b3ed04755c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/alignment_mixer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/alignment_mixer.h @@ -16,6 +16,7 @@ #include "api/array_view.h" #include "api/audio/echo_canceller3_config.h" #include "modules/audio_processing/aec3/aec3_common.h" +#include "modules/audio_processing/aec3/block.h" namespace webrtc { @@ -33,8 +34,7 @@ class AlignmentMixer { float excitation_limit, bool prefer_first_two_channels); - void ProduceOutput(rtc::ArrayView> x, - rtc::ArrayView y); + void ProduceOutput(const Block& x, rtc::ArrayView y); enum class MixingVariant { kDownmix, kAdaptive, kFixed }; @@ -49,9 +49,8 @@ class AlignmentMixer { int selected_channel_ = 0; size_t block_counter_ = 0; - void Downmix(const rtc::ArrayView> x, - rtc::ArrayView y) const; - int SelectChannel(rtc::ArrayView> x); + void Downmix(const Block& x, rtc::ArrayView y) const; + int SelectChannel(const Block& x); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block.h new file mode 100644 index 0000000000..c1fc70722d --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block.h @@ -0,0 +1,91 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_AEC3_BLOCK_H_ +#define MODULES_AUDIO_PROCESSING_AEC3_BLOCK_H_ + +#include +#include + +#include "api/array_view.h" +#include "modules/audio_processing/aec3/aec3_common.h" + +namespace webrtc { + +// Contains one or more channels of 4 milliseconds of audio data. +// The audio is split in one or more frequency bands, each with a sampling +// rate of 16 kHz. +class Block { + public: + Block(int num_bands, int num_channels, float default_value = 0.0f) + : num_bands_(num_bands), + num_channels_(num_channels), + data_(num_bands * num_channels * kBlockSize, default_value) {} + + // Returns the number of bands. + int NumBands() const { return num_bands_; } + + // Returns the number of channels. + int NumChannels() const { return num_channels_; } + + // Modifies the number of channels and sets all samples to zero. + void SetNumChannels(int num_channels) { + num_channels_ = num_channels; + data_.resize(num_bands_ * num_channels_ * kBlockSize); + std::fill(data_.begin(), data_.end(), 0.0f); + } + + // Iterators for accessing the data. + auto begin(int band, int channel) { + return data_.begin() + GetIndex(band, channel); + } + + auto begin(int band, int channel) const { + return data_.begin() + GetIndex(band, channel); + } + + auto end(int band, int channel) { return begin(band, channel) + kBlockSize; } + + auto end(int band, int channel) const { + return begin(band, channel) + kBlockSize; + } + + // Access data via ArrayView. + rtc::ArrayView View(int band, int channel) { + return rtc::ArrayView(&data_[GetIndex(band, channel)], + kBlockSize); + } + + rtc::ArrayView View(int band, int channel) const { + return rtc::ArrayView( + &data_[GetIndex(band, channel)], kBlockSize); + } + + // Lets two Blocks swap audio data. + void Swap(Block& b) { + std::swap(num_bands_, b.num_bands_); + std::swap(num_channels_, b.num_channels_); + data_.swap(b.data_); + } + + private: + // Returns the index of the first sample of the requested |band| and + // |channel|. + int GetIndex(int band, int channel) const { + return (band * num_channels_ + channel) * kBlockSize; + } + + int num_bands_; + int num_channels_; + std::vector data_; +}; + +} // namespace webrtc +#endif // MODULES_AUDIO_PROCESSING_AEC3_BLOCK_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block_buffer.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block_buffer.cc index 77ce3deaf8..289c3f0d10 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block_buffer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block_buffer.cc @@ -14,25 +14,9 @@ namespace webrtc { -BlockBuffer::BlockBuffer(size_t size, - size_t num_bands, - size_t num_channels, - size_t frame_length) +BlockBuffer::BlockBuffer(size_t size, size_t num_bands, size_t num_channels) : size(static_cast(size)), - buffer(size, - std::vector>>( - num_bands, - std::vector>( - num_channels, - std::vector(frame_length, 0.f)))) { - for (auto& block : buffer) { - for (auto& band : block) { - for (auto& channel : band) { - std::fill(channel.begin(), channel.end(), 0.f); - } - } - } -} + buffer(size, Block(num_bands, num_channels)) {} BlockBuffer::~BlockBuffer() = default; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block_buffer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block_buffer.h index b28d659a14..3489d51646 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block_buffer.h @@ -15,6 +15,7 @@ #include +#include "modules/audio_processing/aec3/block.h" #include "rtc_base/checks.h" namespace webrtc { @@ -22,10 +23,7 @@ namespace webrtc { // Struct for bundling a circular buffer of two dimensional vector objects // together with the read and write indices. struct BlockBuffer { - BlockBuffer(size_t size, - size_t num_bands, - size_t num_channels, - size_t frame_length); + BlockBuffer(size_t size, size_t num_bands, size_t num_channels); ~BlockBuffer(); int IncIndex(int index) const { @@ -52,7 +50,7 @@ struct BlockBuffer { void DecReadIndex() { read = DecIndex(read); } const int size; - std::vector>>> buffer; + std::vector buffer; int write = 0; int read = 0; }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block_framer.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block_framer.cc index 8241ce64f2..4243ddeba0 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block_framer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block_framer.cc @@ -34,35 +34,32 @@ BlockFramer::~BlockFramer() = default; // samples for InsertBlockAndExtractSubFrame to produce a frame. In order to // achieve this, the InsertBlockAndExtractSubFrame and InsertBlock methods need // to be called in the correct order. -void BlockFramer::InsertBlock( - const std::vector>>& block) { - RTC_DCHECK_EQ(num_bands_, block.size()); +void BlockFramer::InsertBlock(const Block& block) { + RTC_DCHECK_EQ(num_bands_, block.NumBands()); + RTC_DCHECK_EQ(num_channels_, block.NumChannels()); for (size_t band = 0; band < num_bands_; ++band) { - RTC_DCHECK_EQ(num_channels_, block[band].size()); for (size_t channel = 0; channel < num_channels_; ++channel) { - RTC_DCHECK_EQ(kBlockSize, block[band][channel].size()); RTC_DCHECK_EQ(0, buffer_[band][channel].size()); buffer_[band][channel].insert(buffer_[band][channel].begin(), - block[band][channel].begin(), - block[band][channel].end()); + block.begin(band, channel), + block.end(band, channel)); } } } void BlockFramer::InsertBlockAndExtractSubFrame( - const std::vector>>& block, + const Block& block, std::vector>>* sub_frame) { RTC_DCHECK(sub_frame); - RTC_DCHECK_EQ(num_bands_, block.size()); + RTC_DCHECK_EQ(num_bands_, block.NumBands()); + RTC_DCHECK_EQ(num_channels_, block.NumChannels()); RTC_DCHECK_EQ(num_bands_, sub_frame->size()); for (size_t band = 0; band < num_bands_; ++band) { - RTC_DCHECK_EQ(num_channels_, block[band].size()); RTC_DCHECK_EQ(num_channels_, (*sub_frame)[0].size()); for (size_t channel = 0; channel < num_channels_; ++channel) { RTC_DCHECK_LE(kSubFrameLength, buffer_[band][channel].size() + kBlockSize); - RTC_DCHECK_EQ(kBlockSize, block[band][channel].size()); RTC_DCHECK_GE(kBlockSize, buffer_[band][channel].size()); RTC_DCHECK_EQ(kSubFrameLength, (*sub_frame)[band][channel].size()); @@ -71,14 +68,14 @@ void BlockFramer::InsertBlockAndExtractSubFrame( std::copy(buffer_[band][channel].begin(), buffer_[band][channel].end(), (*sub_frame)[band][channel].begin()); std::copy( - block[band][channel].begin(), - block[band][channel].begin() + samples_to_frame, + block.begin(band, channel), + block.begin(band, channel) + samples_to_frame, (*sub_frame)[band][channel].begin() + buffer_[band][channel].size()); buffer_[band][channel].clear(); buffer_[band][channel].insert( buffer_[band][channel].begin(), - block[band][channel].begin() + samples_to_frame, - block[band][channel].end()); + block.begin(band, channel) + samples_to_frame, + block.end(band, channel)); } } } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block_framer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block_framer.h index 1d378660c3..e2cdd5a17c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block_framer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block_framer.h @@ -15,6 +15,7 @@ #include "api/array_view.h" #include "modules/audio_processing/aec3/aec3_common.h" +#include "modules/audio_processing/aec3/block.h" namespace webrtc { @@ -32,10 +33,10 @@ class BlockFramer { BlockFramer& operator=(const BlockFramer&) = delete; // Adds a 64 sample block into the data that will form the next output frame. - void InsertBlock(const std::vector>>& block); + void InsertBlock(const Block& block); // Adds a 64 sample block and extracts an 80 sample subframe. void InsertBlockAndExtractSubFrame( - const std::vector>>& block, + const Block& block, std::vector>>* sub_frame); private: diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block_processor.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block_processor.cc index 2ee32b82dc..63e3d9cc7c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block_processor.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block_processor.cc @@ -11,6 +11,7 @@ #include +#include #include #include #include @@ -26,7 +27,6 @@ #include "modules/audio_processing/aec3/render_delay_buffer.h" #include "modules/audio_processing/aec3/render_delay_controller.h" #include "modules/audio_processing/logging/apm_data_dumper.h" -#include "rtc_base/atomic_ops.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -49,14 +49,12 @@ class BlockProcessorImpl final : public BlockProcessor { ~BlockProcessorImpl() override; - void ProcessCapture( - bool echo_path_gain_change, - bool capture_signal_saturation, - std::vector>>* linear_output, - std::vector>>* capture_block) override; + void ProcessCapture(bool echo_path_gain_change, + bool capture_signal_saturation, + Block* linear_output, + Block* capture_block) override; - void BufferRender( - const std::vector>>& block) override; + void BufferRender(const Block& block) override; void UpdateEchoLeakageStatus(bool leakage_detected) override; @@ -66,7 +64,7 @@ class BlockProcessorImpl final : public BlockProcessor { void SetCaptureOutputUsage(bool capture_output_used) override; private: - static int instance_count_; + static std::atomic instance_count_; std::unique_ptr data_dumper_; const EchoCanceller3Config config_; bool capture_properly_started_ = false; @@ -81,7 +79,7 @@ class BlockProcessorImpl final : public BlockProcessor { absl::optional estimated_delay_; }; -int BlockProcessorImpl::instance_count_ = 0; +std::atomic BlockProcessorImpl::instance_count_(0); BlockProcessorImpl::BlockProcessorImpl( const EchoCanceller3Config& config, @@ -91,8 +89,7 @@ BlockProcessorImpl::BlockProcessorImpl( std::unique_ptr render_buffer, std::unique_ptr delay_controller, std::unique_ptr echo_remover) - : data_dumper_( - new ApmDataDumper(rtc::AtomicOps::Increment(&instance_count_))), + : data_dumper_(new ApmDataDumper(instance_count_.fetch_add(1) + 1)), config_(config), sample_rate_hz_(sample_rate_hz), render_buffer_(std::move(render_buffer)), @@ -104,21 +101,20 @@ BlockProcessorImpl::BlockProcessorImpl( BlockProcessorImpl::~BlockProcessorImpl() = default; -void BlockProcessorImpl::ProcessCapture( - bool echo_path_gain_change, - bool capture_signal_saturation, - std::vector>>* linear_output, - std::vector>>* capture_block) { +void BlockProcessorImpl::ProcessCapture(bool echo_path_gain_change, + bool capture_signal_saturation, + Block* linear_output, + Block* capture_block) { RTC_DCHECK(capture_block); - RTC_DCHECK_EQ(NumBandsForRate(sample_rate_hz_), capture_block->size()); - RTC_DCHECK_EQ(kBlockSize, (*capture_block)[0][0].size()); + RTC_DCHECK_EQ(NumBandsForRate(sample_rate_hz_), capture_block->NumBands()); capture_call_counter_++; data_dumper_->DumpRaw("aec3_processblock_call_order", static_cast(BlockProcessorApiCall::kCapture)); - data_dumper_->DumpWav("aec3_processblock_capture_input", kBlockSize, - &(*capture_block)[0][0][0], 16000, 1); + data_dumper_->DumpWav("aec3_processblock_capture_input", + capture_block->View(/*band=*/0, /*channel=*/0), 16000, + 1); if (render_properly_started_) { if (!capture_properly_started_) { @@ -159,8 +155,9 @@ void BlockProcessorImpl::ProcessCapture( delay_controller_->Reset(false); } - data_dumper_->DumpWav("aec3_processblock_capture_input2", kBlockSize, - &(*capture_block)[0][0][0], 16000, 1); + data_dumper_->DumpWav("aec3_processblock_capture_input2", + capture_block->View(/*band=*/0, /*channel=*/0), 16000, + 1); bool has_delay_estimator = !config_.delay.use_external_delay_estimator; if (has_delay_estimator) { @@ -169,7 +166,7 @@ void BlockProcessorImpl::ProcessCapture( // alignment. estimated_delay_ = delay_controller_->GetDelay( render_buffer_->GetDownsampledRenderBuffer(), render_buffer_->Delay(), - (*capture_block)[0]); + *capture_block); if (estimated_delay_) { bool delay_change = @@ -202,16 +199,12 @@ void BlockProcessorImpl::ProcessCapture( metrics_.UpdateCapture(false); } -void BlockProcessorImpl::BufferRender( - const std::vector>>& block) { - RTC_DCHECK_EQ(NumBandsForRate(sample_rate_hz_), block.size()); - RTC_DCHECK_EQ(kBlockSize, block[0][0].size()); +void BlockProcessorImpl::BufferRender(const Block& block) { + RTC_DCHECK_EQ(NumBandsForRate(sample_rate_hz_), block.NumBands()); data_dumper_->DumpRaw("aec3_processblock_call_order", static_cast(BlockProcessorApiCall::kRender)); - data_dumper_->DumpWav("aec3_processblock_render_input", kBlockSize, - &block[0][0][0], 16000, 1); - data_dumper_->DumpWav("aec3_processblock_render_input2", kBlockSize, - &block[0][0][0], 16000, 1); + data_dumper_->DumpWav("aec3_processblock_render_input", + block.View(/*band=*/0, /*channel=*/0), 16000, 1); render_event_ = render_buffer_->Insert(block); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block_processor.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block_processor.h index 41ce016dc0..01a83ae5f7 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block_processor.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block_processor.h @@ -18,6 +18,7 @@ #include "api/audio/echo_canceller3_config.h" #include "api/audio/echo_control.h" +#include "modules/audio_processing/aec3/block.h" #include "modules/audio_processing/aec3/echo_remover.h" #include "modules/audio_processing/aec3/render_delay_buffer.h" #include "modules/audio_processing/aec3/render_delay_controller.h" @@ -56,15 +57,13 @@ class BlockProcessor { virtual void SetAudioBufferDelay(int delay_ms) = 0; // Processes a block of capture data. - virtual void ProcessCapture( - bool echo_path_gain_change, - bool capture_signal_saturation, - std::vector>>* linear_output, - std::vector>>* capture_block) = 0; + virtual void ProcessCapture(bool echo_path_gain_change, + bool capture_signal_saturation, + Block* linear_output, + Block* capture_block) = 0; // Buffers a block of render data supplied by a FrameBlocker object. - virtual void BufferRender( - const std::vector>>& render_block) = 0; + virtual void BufferRender(const Block& render_block) = 0; // Reports whether echo leakage has been detected in the echo canceller // output. diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block_processor_metrics.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block_processor_metrics.h index 4ba053683b..a70d0dac5b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block_processor_metrics.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block_processor_metrics.h @@ -11,8 +11,6 @@ #ifndef MODULES_AUDIO_PROCESSING_AEC3_BLOCK_PROCESSOR_METRICS_H_ #define MODULES_AUDIO_PROCESSING_AEC3_BLOCK_PROCESSOR_METRICS_H_ -#include "rtc_base/constructor_magic.h" - namespace webrtc { // Handles the reporting of metrics for the block_processor. @@ -20,6 +18,9 @@ class BlockProcessorMetrics { public: BlockProcessorMetrics() = default; + BlockProcessorMetrics(const BlockProcessorMetrics&) = delete; + BlockProcessorMetrics& operator=(const BlockProcessorMetrics&) = delete; + // Updates the metric with new capture data. void UpdateCapture(bool underrun); @@ -38,8 +39,6 @@ class BlockProcessorMetrics { int render_buffer_underruns_ = 0; int render_buffer_overruns_ = 0; int buffer_render_calls_ = 0; - - RTC_DISALLOW_COPY_AND_ASSIGN(BlockProcessorMetrics); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/comfort_noise_generator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/comfort_noise_generator.h index 16eaf3550f..2785b765c5 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/comfort_noise_generator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/comfort_noise_generator.h @@ -19,7 +19,6 @@ #include "modules/audio_processing/aec3/aec3_common.h" #include "modules/audio_processing/aec3/aec_state.h" #include "modules/audio_processing/aec3/fft_data.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/system/arch.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/config_selector.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/config_selector.cc new file mode 100644 index 0000000000..c55344da79 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/config_selector.cc @@ -0,0 +1,71 @@ + +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/aec3/config_selector.h" + +#include "rtc_base/checks.h" + +namespace webrtc { +namespace { + +// Validates that the mono and the multichannel configs have compatible fields. +bool CompatibleConfigs(const EchoCanceller3Config& mono_config, + const EchoCanceller3Config& multichannel_config) { + if (mono_config.delay.fixed_capture_delay_samples != + multichannel_config.delay.fixed_capture_delay_samples) { + return false; + } + if (mono_config.filter.export_linear_aec_output != + multichannel_config.filter.export_linear_aec_output) { + return false; + } + if (mono_config.filter.high_pass_filter_echo_reference != + multichannel_config.filter.high_pass_filter_echo_reference) { + return false; + } + if (mono_config.multi_channel.detect_stereo_content != + multichannel_config.multi_channel.detect_stereo_content) { + return false; + } + if (mono_config.multi_channel.stereo_detection_timeout_threshold_seconds != + multichannel_config.multi_channel + .stereo_detection_timeout_threshold_seconds) { + return false; + } + return true; +} + +} // namespace + +ConfigSelector::ConfigSelector( + const EchoCanceller3Config& config, + const absl::optional& multichannel_config, + int num_render_input_channels) + : config_(config), multichannel_config_(multichannel_config) { + if (multichannel_config_.has_value()) { + RTC_DCHECK(CompatibleConfigs(config_, *multichannel_config_)); + } + + Update(!config_.multi_channel.detect_stereo_content && + num_render_input_channels > 1); + + RTC_DCHECK(active_config_); +} + +void ConfigSelector::Update(bool multichannel_content) { + if (multichannel_content && multichannel_config_.has_value()) { + active_config_ = &(*multichannel_config_); + } else { + active_config_ = &config_; + } +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/config_selector.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/config_selector.h new file mode 100644 index 0000000000..3b3f94e5ac --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/config_selector.h @@ -0,0 +1,41 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_AEC3_CONFIG_SELECTOR_H_ +#define MODULES_AUDIO_PROCESSING_AEC3_CONFIG_SELECTOR_H_ + +#include "absl/types/optional.h" +#include "api/audio/echo_canceller3_config.h" + +namespace webrtc { + +// Selects the config to use. +class ConfigSelector { + public: + ConfigSelector( + const EchoCanceller3Config& config, + const absl::optional& multichannel_config, + int num_render_input_channels); + + // Updates the config selection based on the detection of multichannel + // content. + void Update(bool multichannel_content); + + const EchoCanceller3Config& active_config() const { return *active_config_; } + + private: + const EchoCanceller3Config config_; + const absl::optional multichannel_config_; + const EchoCanceller3Config* active_config_ = nullptr; +}; + +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_AEC3_CONFIG_SELECTOR_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/decimator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/decimator.h index 3ccd292f08..dbff3d9fff 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/decimator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/decimator.h @@ -17,7 +17,6 @@ #include "api/array_view.h" #include "modules/audio_processing/aec3/aec3_common.h" #include "modules/audio_processing/utility/cascaded_biquad_filter.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -26,6 +25,9 @@ class Decimator { public: explicit Decimator(size_t down_sampling_factor); + Decimator(const Decimator&) = delete; + Decimator& operator=(const Decimator&) = delete; + // Downsamples the signal. void Decimate(rtc::ArrayView in, rtc::ArrayView out); @@ -33,8 +35,6 @@ class Decimator { const size_t down_sampling_factor_; CascadedBiQuadFilter anti_aliasing_filter_; CascadedBiQuadFilter noise_reduction_filter_; - - RTC_DISALLOW_COPY_AND_ASSIGN(Decimator); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/delay_estimate.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/delay_estimate.h index ea5dd27153..7838a0c255 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/delay_estimate.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/delay_estimate.h @@ -11,6 +11,8 @@ #ifndef MODULES_AUDIO_PROCESSING_AEC3_DELAY_ESTIMATE_H_ #define MODULES_AUDIO_PROCESSING_AEC3_DELAY_ESTIMATE_H_ +#include + namespace webrtc { // Stores delay_estimates. diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_audibility.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_audibility.cc index 6ae414e3cf..142a33d5e0 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_audibility.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_audibility.cc @@ -88,7 +88,7 @@ void EchoAudibility::UpdateRenderNoiseEstimator( bool EchoAudibility::IsRenderTooLow(const BlockBuffer& block_buffer) { const int num_render_channels = - static_cast(block_buffer.buffer[0][0].size()); + static_cast(block_buffer.buffer[0].NumChannels()); bool too_low = false; const int render_block_write_current = block_buffer.write; if (render_block_write_current == render_block_write_prev_) { @@ -98,7 +98,8 @@ bool EchoAudibility::IsRenderTooLow(const BlockBuffer& block_buffer) { idx = block_buffer.IncIndex(idx)) { float max_abs_over_channels = 0.f; for (int ch = 0; ch < num_render_channels; ++ch) { - auto block = block_buffer.buffer[idx][0][ch]; + rtc::ArrayView block = + block_buffer.buffer[idx].View(/*band=*/0, /*channel=*/ch); auto r = std::minmax_element(block.cbegin(), block.cend()); float max_abs_channel = std::max(std::fabs(*r.first), std::fabs(*r.second)); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_audibility.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_audibility.h index 1ffc017b7d..b9d6f87d2a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_audibility.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_audibility.h @@ -19,7 +19,6 @@ #include "modules/audio_processing/aec3/render_buffer.h" #include "modules/audio_processing/aec3/spectrum_buffer.h" #include "modules/audio_processing/aec3/stationarity_estimator.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_canceller3.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_canceller3.cc index 58fb6a49a3..e8e2175994 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_canceller3.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_canceller3.cc @@ -12,10 +12,10 @@ #include #include +#include "absl/strings/string_view.h" #include "modules/audio_processing/aec3/aec3_common.h" #include "modules/audio_processing/high_pass_filter.h" #include "modules/audio_processing/logging/apm_data_dumper.h" -#include "rtc_base/atomic_ops.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/logging.h" #include "system_wrappers/include/field_trial.h" @@ -38,7 +38,7 @@ bool DetectSaturation(rtc::ArrayView y) { // Retrieves a value from a field trial if it is available. If no value is // present, the default value is returned. If the retrieved value is beyond the // specified limits, the default value is returned instead. -void RetrieveFieldTrialValue(const char* trial_name, +void RetrieveFieldTrialValue(absl::string_view trial_name, float min, float max, float* value_to_update) { @@ -58,7 +58,7 @@ void RetrieveFieldTrialValue(const char* trial_name, } } -void RetrieveFieldTrialValue(const char* trial_name, +void RetrieveFieldTrialValue(absl::string_view trial_name, int min, int max, int* value_to_update) { @@ -96,18 +96,50 @@ void FillSubFrameView( } void FillSubFrameView( + bool proper_downmix_needed, std::vector>>* frame, size_t sub_frame_index, std::vector>>* sub_frame_view) { RTC_DCHECK_GE(1, sub_frame_index); RTC_DCHECK_EQ(frame->size(), sub_frame_view->size()); - RTC_DCHECK_EQ((*frame)[0].size(), (*sub_frame_view)[0].size()); - for (size_t band = 0; band < frame->size(); ++band) { - for (size_t channel = 0; channel < (*frame)[band].size(); ++channel) { - (*sub_frame_view)[band][channel] = rtc::ArrayView( - &(*frame)[band][channel][sub_frame_index * kSubFrameLength], + const size_t frame_num_channels = (*frame)[0].size(); + const size_t sub_frame_num_channels = (*sub_frame_view)[0].size(); + if (frame_num_channels > sub_frame_num_channels) { + RTC_DCHECK_EQ(sub_frame_num_channels, 1u); + if (proper_downmix_needed) { + // When a proper downmix is needed (which is the case when proper stereo + // is present in the echo reference signal but the echo canceller does the + // processing in mono) downmix the echo reference by averaging the channel + // content (otherwise downmixing is done by selecting channel 0). + for (size_t band = 0; band < frame->size(); ++band) { + for (size_t ch = 1; ch < frame_num_channels; ++ch) { + for (size_t k = 0; k < kSubFrameLength; ++k) { + (*frame)[band][/*channel=*/0] + [sub_frame_index * kSubFrameLength + k] += + (*frame)[band][ch][sub_frame_index * kSubFrameLength + k]; + } + } + const float one_by_num_channels = 1.0f / frame_num_channels; + for (size_t k = 0; k < kSubFrameLength; ++k) { + (*frame)[band][/*channel=*/0][sub_frame_index * kSubFrameLength + + k] *= one_by_num_channels; + } + } + } + for (size_t band = 0; band < frame->size(); ++band) { + (*sub_frame_view)[band][/*channel=*/0] = rtc::ArrayView( + &(*frame)[band][/*channel=*/0][sub_frame_index * kSubFrameLength], kSubFrameLength); } + } else { + RTC_DCHECK_EQ(frame_num_channels, sub_frame_num_channels); + for (size_t band = 0; band < frame->size(); ++band) { + for (size_t channel = 0; channel < (*frame)[band].size(); ++channel) { + (*sub_frame_view)[band][channel] = rtc::ArrayView( + &(*frame)[band][channel][sub_frame_index * kSubFrameLength], + kSubFrameLength); + } + } } } @@ -115,16 +147,17 @@ void ProcessCaptureFrameContent( AudioBuffer* linear_output, AudioBuffer* capture, bool level_change, + bool aec_reference_is_downmixed_stereo, bool saturated_microphone_signal, size_t sub_frame_index, FrameBlocker* capture_blocker, BlockFramer* linear_output_framer, BlockFramer* output_framer, BlockProcessor* block_processor, - std::vector>>* linear_output_block, + Block* linear_output_block, std::vector>>* linear_output_sub_frame_view, - std::vector>>* capture_block, + Block* capture_block, std::vector>>* capture_sub_frame_view) { FillSubFrameView(capture, sub_frame_index, capture_sub_frame_view); @@ -138,8 +171,10 @@ void ProcessCaptureFrameContent( capture_blocker->InsertSubFrameAndExtractBlock(*capture_sub_frame_view, capture_block); - block_processor->ProcessCapture(level_change, saturated_microphone_signal, - linear_output_block, capture_block); + block_processor->ProcessCapture( + /*echo_path_gain_change=*/level_change || + aec_reference_is_downmixed_stereo, + saturated_microphone_signal, linear_output_block, capture_block); output_framer->InsertBlockAndExtractSubFrame(*capture_block, capture_sub_frame_view); @@ -150,22 +185,24 @@ void ProcessCaptureFrameContent( } } -void ProcessRemainingCaptureFrameContent( - bool level_change, - bool saturated_microphone_signal, - FrameBlocker* capture_blocker, - BlockFramer* linear_output_framer, - BlockFramer* output_framer, - BlockProcessor* block_processor, - std::vector>>* linear_output_block, - std::vector>>* block) { +void ProcessRemainingCaptureFrameContent(bool level_change, + bool aec_reference_is_downmixed_stereo, + bool saturated_microphone_signal, + FrameBlocker* capture_blocker, + BlockFramer* linear_output_framer, + BlockFramer* output_framer, + BlockProcessor* block_processor, + Block* linear_output_block, + Block* block) { if (!capture_blocker->IsBlockAvailable()) { return; } capture_blocker->ExtractBlock(block); - block_processor->ProcessCapture(level_change, saturated_microphone_signal, - linear_output_block, block); + block_processor->ProcessCapture( + /*echo_path_gain_change=*/level_change || + aec_reference_is_downmixed_stereo, + saturated_microphone_signal, linear_output_block, block); output_framer->InsertBlock(*block); if (linear_output_framer) { @@ -175,21 +212,22 @@ void ProcessRemainingCaptureFrameContent( } void BufferRenderFrameContent( + bool proper_downmix_needed, std::vector>>* render_frame, size_t sub_frame_index, FrameBlocker* render_blocker, BlockProcessor* block_processor, - std::vector>>* block, + Block* block, std::vector>>* sub_frame_view) { - FillSubFrameView(render_frame, sub_frame_index, sub_frame_view); + FillSubFrameView(proper_downmix_needed, render_frame, sub_frame_index, + sub_frame_view); render_blocker->InsertSubFrameAndExtractBlock(*sub_frame_view, block); block_processor->BufferRender(*block); } -void BufferRemainingRenderFrameContent( - FrameBlocker* render_blocker, - BlockProcessor* block_processor, - std::vector>>* block) { +void BufferRemainingRenderFrameContent(FrameBlocker* render_blocker, + BlockProcessor* block_processor, + Block* block) { if (!render_blocker->IsBlockAvailable()) { return; } @@ -221,6 +259,10 @@ void CopyBufferIntoFrame(const AudioBuffer& buffer, EchoCanceller3Config AdjustConfig(const EchoCanceller3Config& config) { EchoCanceller3Config adjusted_cfg = config; + if (field_trial::IsEnabled("WebRTC-Aec3StereoContentDetectionKillSwitch")) { + adjusted_cfg.multi_channel.detect_stereo_content = false; + } + if (field_trial::IsEnabled("WebRTC-Aec3AntiHowlingMinimizationKillSwitch")) { adjusted_cfg.suppressor.high_bands_suppression .anti_howling_activation_threshold = 25.f; @@ -290,6 +332,10 @@ EchoCanceller3Config AdjustConfig(const EchoCanceller3Config& config) { adjusted_cfg.ep_strength.use_conservative_tail_frequency_response = true; } + if (field_trial::IsDisabled("WebRTC-Aec3ConservativeTailFreqResponse")) { + adjusted_cfg.ep_strength.use_conservative_tail_frequency_response = false; + } + if (field_trial::IsEnabled("WebRTC-Aec3ShortHeadroomKillSwitch")) { // Two blocks headroom. adjusted_cfg.delay.delay_headroom_samples = kBlockSize * 2; @@ -332,6 +378,14 @@ EchoCanceller3Config AdjustConfig(const EchoCanceller3Config& config) { false; } + if (field_trial::IsEnabled("WebRTC-Aec3DelayEstimatorDetectPreEcho")) { + adjusted_cfg.delay.detect_pre_echo = true; + } + + if (field_trial::IsDisabled("WebRTC-Aec3DelayEstimatorDetectPreEcho")) { + adjusted_cfg.delay.detect_pre_echo = false; + } + if (field_trial::IsEnabled("WebRTC-Aec3SensitiveDominantNearendActivation")) { adjusted_cfg.suppressor.dominant_nearend_detection.enr_threshold = 0.5f; } else if (field_trial::IsEnabled( @@ -661,103 +715,117 @@ void EchoCanceller3::RenderWriter::Insert(const AudioBuffer& input) { static_cast(render_transfer_queue_->Insert(&render_queue_input_frame_)); } -int EchoCanceller3::instance_count_ = 0; - -EchoCanceller3::EchoCanceller3(const EchoCanceller3Config& config, - int sample_rate_hz, - size_t num_render_channels, - size_t num_capture_channels) - : EchoCanceller3(AdjustConfig(config), - sample_rate_hz, - num_render_channels, - num_capture_channels, - std::unique_ptr( - BlockProcessor::Create(AdjustConfig(config), - sample_rate_hz, - num_render_channels, - num_capture_channels))) {} -EchoCanceller3::EchoCanceller3(const EchoCanceller3Config& config, - int sample_rate_hz, - size_t num_render_channels, - size_t num_capture_channels, - std::unique_ptr block_processor) - : data_dumper_( - new ApmDataDumper(rtc::AtomicOps::Increment(&instance_count_))), - config_(config), +std::atomic EchoCanceller3::instance_count_(0); + +EchoCanceller3::EchoCanceller3( + const EchoCanceller3Config& config, + const absl::optional& multichannel_config, + int sample_rate_hz, + size_t num_render_channels, + size_t num_capture_channels) + : data_dumper_(new ApmDataDumper(instance_count_.fetch_add(1) + 1)), + config_(AdjustConfig(config)), sample_rate_hz_(sample_rate_hz), num_bands_(NumBandsForRate(sample_rate_hz_)), - num_render_channels_(num_render_channels), + num_render_input_channels_(num_render_channels), num_capture_channels_(num_capture_channels), + config_selector_(AdjustConfig(config), + multichannel_config, + num_render_input_channels_), + multichannel_content_detector_( + config_selector_.active_config().multi_channel.detect_stereo_content, + num_render_input_channels_, + config_selector_.active_config() + .multi_channel.stereo_detection_threshold, + config_selector_.active_config() + .multi_channel.stereo_detection_timeout_threshold_seconds, + config_selector_.active_config() + .multi_channel.stereo_detection_hysteresis_seconds), output_framer_(num_bands_, num_capture_channels_), capture_blocker_(num_bands_, num_capture_channels_), - render_blocker_(num_bands_, num_render_channels_), render_transfer_queue_( kRenderTransferQueueSizeFrames, std::vector>>( num_bands_, std::vector>( - num_render_channels_, + num_render_input_channels_, std::vector(AudioBuffer::kSplitBandSize, 0.f))), Aec3RenderQueueItemVerifier(num_bands_, - num_render_channels_, + num_render_input_channels_, AudioBuffer::kSplitBandSize)), - block_processor_(std::move(block_processor)), render_queue_output_frame_( num_bands_, std::vector>( - num_render_channels_, + num_render_input_channels_, std::vector(AudioBuffer::kSplitBandSize, 0.f))), - render_block_( - num_bands_, - std::vector>(num_render_channels_, - std::vector(kBlockSize, 0.f))), - capture_block_( - num_bands_, - std::vector>(num_capture_channels_, - std::vector(kBlockSize, 0.f))), - render_sub_frame_view_( - num_bands_, - std::vector>(num_render_channels_)), + render_block_(num_bands_, num_render_input_channels_), + capture_block_(num_bands_, num_capture_channels_), capture_sub_frame_view_( num_bands_, std::vector>(num_capture_channels_)) { RTC_DCHECK(ValidFullBandRate(sample_rate_hz_)); - if (config_.delay.fixed_capture_delay_samples > 0) { + if (config_selector_.active_config().delay.fixed_capture_delay_samples > 0) { block_delay_buffer_.reset(new BlockDelayBuffer( num_capture_channels_, num_bands_, AudioBuffer::kSplitBandSize, config_.delay.fixed_capture_delay_samples)); } - render_writer_.reset(new RenderWriter(data_dumper_.get(), config_, - &render_transfer_queue_, num_bands_, - num_render_channels_)); + render_writer_.reset(new RenderWriter( + data_dumper_.get(), config_selector_.active_config(), + &render_transfer_queue_, num_bands_, num_render_input_channels_)); RTC_DCHECK_EQ(num_bands_, std::max(sample_rate_hz_, 16000) / 16000); RTC_DCHECK_GE(kMaxNumBands, num_bands_); - if (config_.filter.export_linear_aec_output) { - linear_output_framer_.reset(new BlockFramer(1, num_capture_channels_)); + if (config_selector_.active_config().filter.export_linear_aec_output) { + linear_output_framer_.reset( + new BlockFramer(/*num_bands=*/1, num_capture_channels_)); linear_output_block_ = - std::make_unique>>>( - 1, std::vector>( - num_capture_channels_, std::vector(kBlockSize, 0.f))); + std::make_unique(/*num_bands=*/1, num_capture_channels_), linear_output_sub_frame_view_ = std::vector>>( 1, std::vector>(num_capture_channels_)); } + Initialize(); + RTC_LOG(LS_INFO) << "AEC3 created with sample rate: " << sample_rate_hz_ - << " Hz, num render channels: " << num_render_channels_ + << " Hz, num render channels: " << num_render_input_channels_ << ", num capture channels: " << num_capture_channels_; } EchoCanceller3::~EchoCanceller3() = default; +void EchoCanceller3::Initialize() { + RTC_DCHECK_RUNS_SERIALIZED(&capture_race_checker_); + + num_render_channels_to_aec_ = + multichannel_content_detector_.IsProperMultiChannelContentDetected() + ? num_render_input_channels_ + : 1; + + config_selector_.Update( + multichannel_content_detector_.IsProperMultiChannelContentDetected()); + + render_block_.SetNumChannels(num_render_channels_to_aec_); + + render_blocker_.reset( + new FrameBlocker(num_bands_, num_render_channels_to_aec_)); + + block_processor_.reset(BlockProcessor::Create( + config_selector_.active_config(), sample_rate_hz_, + num_render_channels_to_aec_, num_capture_channels_)); + + render_sub_frame_view_ = std::vector>>( + num_bands_, + std::vector>(num_render_channels_to_aec_)); +} + void EchoCanceller3::AnalyzeRender(const AudioBuffer& render) { RTC_DCHECK_RUNS_SERIALIZED(&render_race_checker_); - RTC_DCHECK_EQ(render.num_channels(), num_render_channels_); + RTC_DCHECK_EQ(render.num_channels(), num_render_input_channels_); data_dumper_->DumpRaw("aec3_call_order", static_cast(EchoCanceller3ApiCall::kRender)); @@ -805,7 +873,7 @@ void EchoCanceller3::ProcessCapture(AudioBuffer* capture, api_call_metrics_.ReportCaptureCall(); // Optionally delay the capture signal. - if (config_.delay.fixed_capture_delay_samples > 0) { + if (config_selector_.active_config().delay.fixed_capture_delay_samples > 0) { RTC_DCHECK(block_delay_buffer_); block_delay_buffer_->DelaySignal(capture); } @@ -817,22 +885,26 @@ void EchoCanceller3::ProcessCapture(AudioBuffer* capture, EmptyRenderQueue(); - ProcessCaptureFrameContent(linear_output, capture, level_change, - saturated_microphone_signal_, 0, &capture_blocker_, - linear_output_framer_.get(), &output_framer_, - block_processor_.get(), linear_output_block_.get(), - &linear_output_sub_frame_view_, &capture_block_, - &capture_sub_frame_view_); + ProcessCaptureFrameContent( + linear_output, capture, level_change, + multichannel_content_detector_.IsTemporaryMultiChannelContentDetected(), + saturated_microphone_signal_, 0, &capture_blocker_, + linear_output_framer_.get(), &output_framer_, block_processor_.get(), + linear_output_block_.get(), &linear_output_sub_frame_view_, + &capture_block_, &capture_sub_frame_view_); - ProcessCaptureFrameContent(linear_output, capture, level_change, - saturated_microphone_signal_, 1, &capture_blocker_, - linear_output_framer_.get(), &output_framer_, - block_processor_.get(), linear_output_block_.get(), - &linear_output_sub_frame_view_, &capture_block_, - &capture_sub_frame_view_); + ProcessCaptureFrameContent( + linear_output, capture, level_change, + multichannel_content_detector_.IsTemporaryMultiChannelContentDetected(), + saturated_microphone_signal_, 1, &capture_blocker_, + linear_output_framer_.get(), &output_framer_, block_processor_.get(), + linear_output_block_.get(), &linear_output_sub_frame_view_, + &capture_block_, &capture_sub_frame_view_); ProcessRemainingCaptureFrameContent( - level_change, saturated_microphone_signal_, &capture_blocker_, + level_change, + multichannel_content_detector_.IsTemporaryMultiChannelContentDetected(), + saturated_microphone_signal_, &capture_blocker_, linear_output_framer_.get(), &output_framer_, block_processor_.get(), linear_output_block_.get(), &capture_block_); @@ -861,25 +933,28 @@ bool EchoCanceller3::ActiveProcessing() const { return true; } -EchoCanceller3Config EchoCanceller3::CreateDefaultConfig( - size_t num_render_channels, - size_t num_capture_channels) { +EchoCanceller3Config EchoCanceller3::CreateDefaultMultichannelConfig() { EchoCanceller3Config cfg; - if (num_render_channels > 1) { - // Use shorter and more rapidly adapting coarse filter to compensate for - // thge increased number of total filter parameters to adapt. - cfg.filter.coarse.length_blocks = 11; - cfg.filter.coarse.rate = 0.95f; - cfg.filter.coarse_initial.length_blocks = 11; - cfg.filter.coarse_initial.rate = 0.95f; - - // Use more concervative suppressor behavior for non-nearend speech. - cfg.suppressor.normal_tuning.max_dec_factor_lf = 0.35f; - cfg.suppressor.normal_tuning.max_inc_factor = 1.5f; - } + // Use shorter and more rapidly adapting coarse filter to compensate for + // thge increased number of total filter parameters to adapt. + cfg.filter.coarse.length_blocks = 11; + cfg.filter.coarse.rate = 0.95f; + cfg.filter.coarse_initial.length_blocks = 11; + cfg.filter.coarse_initial.rate = 0.95f; + + // Use more concervative suppressor behavior for non-nearend speech. + cfg.suppressor.normal_tuning.max_dec_factor_lf = 0.35f; + cfg.suppressor.normal_tuning.max_inc_factor = 1.5f; return cfg; } +void EchoCanceller3::SetBlockProcessorForTesting( + std::unique_ptr block_processor) { + RTC_DCHECK_RUNS_SERIALIZED(&capture_race_checker_); + RTC_DCHECK(block_processor); + block_processor_ = std::move(block_processor); +} + void EchoCanceller3::EmptyRenderQueue() { RTC_DCHECK_RUNS_SERIALIZED(&capture_race_checker_); bool frame_to_buffer = @@ -888,16 +963,27 @@ void EchoCanceller3::EmptyRenderQueue() { // Report render call in the metrics. api_call_metrics_.ReportRenderCall(); - BufferRenderFrameContent(&render_queue_output_frame_, 0, &render_blocker_, - block_processor_.get(), &render_block_, - &render_sub_frame_view_); - - BufferRenderFrameContent(&render_queue_output_frame_, 1, &render_blocker_, - block_processor_.get(), &render_block_, - &render_sub_frame_view_); + if (multichannel_content_detector_.UpdateDetection( + render_queue_output_frame_)) { + // Reinitialize the AEC when proper stereo is detected. + Initialize(); + } - BufferRemainingRenderFrameContent(&render_blocker_, block_processor_.get(), - &render_block_); + // Buffer frame content. + BufferRenderFrameContent( + /*proper_downmix_needed=*/multichannel_content_detector_ + .IsTemporaryMultiChannelContentDetected(), + &render_queue_output_frame_, 0, render_blocker_.get(), + block_processor_.get(), &render_block_, &render_sub_frame_view_); + + BufferRenderFrameContent( + /*proper_downmix_needed=*/multichannel_content_detector_ + .IsTemporaryMultiChannelContentDetected(), + &render_queue_output_frame_, 1, render_blocker_.get(), + block_processor_.get(), &render_block_, &render_sub_frame_view_); + + BufferRemainingRenderFrameContent(render_blocker_.get(), + block_processor_.get(), &render_block_); frame_to_buffer = render_transfer_queue_.Remove(&render_queue_output_frame_); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_canceller3.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_canceller3.h index a4aab4987f..7bf8e51a4b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_canceller3.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_canceller3.h @@ -13,9 +13,11 @@ #include +#include #include #include +#include "absl/types/optional.h" #include "api/array_view.h" #include "api/audio/echo_canceller3_config.h" #include "api/audio/echo_control.h" @@ -23,7 +25,9 @@ #include "modules/audio_processing/aec3/block_delay_buffer.h" #include "modules/audio_processing/aec3/block_framer.h" #include "modules/audio_processing/aec3/block_processor.h" +#include "modules/audio_processing/aec3/config_selector.h" #include "modules/audio_processing/aec3/frame_blocker.h" +#include "modules/audio_processing/aec3/multi_channel_content_detector.h" #include "modules/audio_processing/audio_buffer.h" #include "modules/audio_processing/logging/apm_data_dumper.h" #include "rtc_base/checks.h" @@ -84,18 +88,15 @@ class Aec3RenderQueueItemVerifier { // AnalyzeRender call which can be called concurrently with the other methods. class EchoCanceller3 : public EchoControl { public: - // Normal c-tor to use. - EchoCanceller3(const EchoCanceller3Config& config, - int sample_rate_hz, - size_t num_render_channels, - size_t num_capture_channels); - // Testing c-tor that is used only for testing purposes. - EchoCanceller3(const EchoCanceller3Config& config, - int sample_rate_hz, - size_t num_render_channels, - size_t num_capture_channels, - std::unique_ptr block_processor); + EchoCanceller3( + const EchoCanceller3Config& config, + const absl::optional& multichannel_config, + int sample_rate_hz, + size_t num_render_channels, + size_t num_capture_channels); + ~EchoCanceller3() override; + EchoCanceller3(const EchoCanceller3&) = delete; EchoCanceller3& operator=(const EchoCanceller3&) = delete; @@ -135,14 +136,39 @@ class EchoCanceller3 : public EchoControl { block_processor_->UpdateEchoLeakageStatus(leakage_detected); } - // Produces a default configuration that is suitable for a certain combination - // of render and capture channels. - static EchoCanceller3Config CreateDefaultConfig(size_t num_render_channels, - size_t num_capture_channels); + // Produces a default configuration for multichannel. + static EchoCanceller3Config CreateDefaultMultichannelConfig(); private: + friend class EchoCanceller3Tester; + FRIEND_TEST_ALL_PREFIXES(EchoCanceller3, DetectionOfProperStereo); + FRIEND_TEST_ALL_PREFIXES(EchoCanceller3, + DetectionOfProperStereoUsingThreshold); + FRIEND_TEST_ALL_PREFIXES(EchoCanceller3, + DetectionOfProperStereoUsingHysteresis); + FRIEND_TEST_ALL_PREFIXES(EchoCanceller3, + StereoContentDetectionForMonoSignals); + class RenderWriter; + // (Re-)Initializes the selected subset of the EchoCanceller3 fields, at + // creation as well as during reconfiguration. + void Initialize(); + + // Only for testing. Replaces the internal block processor. + void SetBlockProcessorForTesting( + std::unique_ptr block_processor); + + // Only for testing. Returns whether stereo processing is active. + bool StereoRenderProcessingActiveForTesting() const { + return multichannel_content_detector_.IsProperMultiChannelContentDetected(); + } + + // Only for testing. + const EchoCanceller3Config& GetActiveConfigForTesting() const { + return config_selector_.active_config(); + } + // Empties the render SwapQueue. void EmptyRenderQueue(); @@ -160,18 +186,22 @@ class EchoCanceller3 : public EchoControl { RTC_GUARDED_BY(render_race_checker_); // State that may be accessed by the capture thread. - static int instance_count_; + static std::atomic instance_count_; std::unique_ptr data_dumper_; const EchoCanceller3Config config_; const int sample_rate_hz_; const int num_bands_; - const size_t num_render_channels_; + const size_t num_render_input_channels_; + size_t num_render_channels_to_aec_; const size_t num_capture_channels_; + ConfigSelector config_selector_; + MultiChannelContentDetector multichannel_content_detector_; std::unique_ptr linear_output_framer_ RTC_GUARDED_BY(capture_race_checker_); BlockFramer output_framer_ RTC_GUARDED_BY(capture_race_checker_); FrameBlocker capture_blocker_ RTC_GUARDED_BY(capture_race_checker_); - FrameBlocker render_blocker_ RTC_GUARDED_BY(capture_race_checker_); + std::unique_ptr render_blocker_ + RTC_GUARDED_BY(capture_race_checker_); SwapQueue>>, Aec3RenderQueueItemVerifier> render_transfer_queue_; @@ -181,12 +211,10 @@ class EchoCanceller3 : public EchoControl { RTC_GUARDED_BY(capture_race_checker_); bool saturated_microphone_signal_ RTC_GUARDED_BY(capture_race_checker_) = false; - std::vector>> render_block_ - RTC_GUARDED_BY(capture_race_checker_); - std::unique_ptr>>> - linear_output_block_ RTC_GUARDED_BY(capture_race_checker_); - std::vector>> capture_block_ + Block render_block_ RTC_GUARDED_BY(capture_race_checker_); + std::unique_ptr linear_output_block_ RTC_GUARDED_BY(capture_race_checker_); + Block capture_block_ RTC_GUARDED_BY(capture_race_checker_); std::vector>> render_sub_frame_view_ RTC_GUARDED_BY(capture_race_checker_); std::vector>> linear_output_sub_frame_view_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_path_delay_estimator.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_path_delay_estimator.cc index 8a78834143..fc83ca2f89 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_path_delay_estimator.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_path_delay_estimator.cc @@ -43,10 +43,11 @@ EchoPathDelayEstimator::EchoPathDelayEstimator( : config.render_levels.poor_excitation_render_limit, config.delay.delay_estimate_smoothing, config.delay.delay_estimate_smoothing_delay_found, - config.delay.delay_candidate_detection_threshold), + config.delay.delay_candidate_detection_threshold, + config.delay.detect_pre_echo), matched_filter_lag_aggregator_(data_dumper_, matched_filter_.GetMaxFilterLag(), - config.delay.delay_selection_thresholds) { + config.delay) { RTC_DCHECK(data_dumper); RTC_DCHECK(down_sampling_factor_ > 0); } @@ -59,9 +60,7 @@ void EchoPathDelayEstimator::Reset(bool reset_delay_confidence) { absl::optional EchoPathDelayEstimator::EstimateDelay( const DownsampledRenderBuffer& render_buffer, - const std::vector>& capture) { - RTC_DCHECK_EQ(kBlockSize, capture[0].size()); - + const Block& capture) { std::array downsampled_capture_data; rtc::ArrayView downsampled_capture(downsampled_capture_data.data(), sub_block_size_); @@ -77,13 +76,14 @@ absl::optional EchoPathDelayEstimator::EstimateDelay( absl::optional aggregated_matched_filter_lag = matched_filter_lag_aggregator_.Aggregate( - matched_filter_.GetLagEstimates()); + matched_filter_.GetBestLagEstimate()); // Run clockdrift detection. if (aggregated_matched_filter_lag && (*aggregated_matched_filter_lag).quality == DelayEstimate::Quality::kRefined) - clockdrift_detector_.Update((*aggregated_matched_filter_lag).delay); + clockdrift_detector_.Update( + matched_filter_lag_aggregator_.GetDelayAtHighestPeak()); // TODO(peah): Move this logging outside of this class once EchoCanceller3 // development is done. diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_path_delay_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_path_delay_estimator.h index 6c8c21282e..b24d0a29ec 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_path_delay_estimator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_path_delay_estimator.h @@ -16,12 +16,12 @@ #include "absl/types/optional.h" #include "api/array_view.h" #include "modules/audio_processing/aec3/alignment_mixer.h" +#include "modules/audio_processing/aec3/block.h" #include "modules/audio_processing/aec3/clockdrift_detector.h" #include "modules/audio_processing/aec3/decimator.h" #include "modules/audio_processing/aec3/delay_estimate.h" #include "modules/audio_processing/aec3/matched_filter.h" #include "modules/audio_processing/aec3/matched_filter_lag_aggregator.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -37,6 +37,9 @@ class EchoPathDelayEstimator { size_t num_capture_channels); ~EchoPathDelayEstimator(); + EchoPathDelayEstimator(const EchoPathDelayEstimator&) = delete; + EchoPathDelayEstimator& operator=(const EchoPathDelayEstimator&) = delete; + // Resets the estimation. If the delay confidence is reset, the reset behavior // is as if the call is restarted. void Reset(bool reset_delay_confidence); @@ -44,7 +47,7 @@ class EchoPathDelayEstimator { // Produce a delay estimate if such is avaliable. absl::optional EstimateDelay( const DownsampledRenderBuffer& render_buffer, - const std::vector>& capture); + const Block& capture); // Log delay estimator properties. void LogDelayEstimationProperties(int sample_rate_hz, size_t shift) const { @@ -71,8 +74,6 @@ class EchoPathDelayEstimator { // Internal reset method with more granularity. void Reset(bool reset_lag_aggregator, bool reset_delay_confidence); - - RTC_DISALLOW_COPY_AND_ASSIGN(EchoPathDelayEstimator); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_remover.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_remover.cc index 2bfaa951d8..673d88af03 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_remover.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_remover.cc @@ -14,6 +14,7 @@ #include #include +#include #include #include @@ -33,7 +34,6 @@ #include "modules/audio_processing/aec3/suppression_filter.h" #include "modules/audio_processing/aec3/suppression_gain.h" #include "modules/audio_processing/logging/apm_data_dumper.h" -#include "rtc_base/atomic_ops.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -118,13 +118,12 @@ class EchoRemoverImpl final : public EchoRemover { // Removes the echo from a block of samples from the capture signal. The // supplied render signal is assumed to be pre-aligned with the capture // signal. - void ProcessCapture( - EchoPathVariability echo_path_variability, - bool capture_signal_saturation, - const absl::optional& external_delay, - RenderBuffer* render_buffer, - std::vector>>* linear_output, - std::vector>>* capture) override; + void ProcessCapture(EchoPathVariability echo_path_variability, + bool capture_signal_saturation, + const absl::optional& external_delay, + RenderBuffer* render_buffer, + Block* linear_output, + Block* capture) override; // Updates the status on whether echo leakage is detected in the output of the // echo remover. @@ -143,7 +142,7 @@ class EchoRemoverImpl final : public EchoRemover { void FormLinearFilterOutput(const SubtractorOutput& subtractor_output, rtc::ArrayView output); - static int instance_count_; + static std::atomic instance_count_; const EchoCanceller3Config config_; const Aec3Fft fft_; std::unique_ptr data_dumper_; @@ -181,7 +180,7 @@ class EchoRemoverImpl final : public EchoRemover { std::vector subtractor_output_heap_; }; -int EchoRemoverImpl::instance_count_ = 0; +std::atomic EchoRemoverImpl::instance_count_(0); EchoRemoverImpl::EchoRemoverImpl(const EchoCanceller3Config& config, int sample_rate_hz, @@ -189,8 +188,7 @@ EchoRemoverImpl::EchoRemoverImpl(const EchoCanceller3Config& config, size_t num_capture_channels) : config_(config), fft_(), - data_dumper_( - new ApmDataDumper(rtc::AtomicOps::Increment(&instance_count_))), + data_dumper_(new ApmDataDumper(instance_count_.fetch_add(1) + 1)), optimization_(DetectOptimization()), sample_rate_hz_(sample_rate_hz), num_render_channels_(num_render_channels), @@ -243,20 +241,17 @@ void EchoRemoverImpl::ProcessCapture( bool capture_signal_saturation, const absl::optional& external_delay, RenderBuffer* render_buffer, - std::vector>>* linear_output, - std::vector>>* capture) { + Block* linear_output, + Block* capture) { ++block_counter_; - const std::vector>>& x = - render_buffer->Block(0); - std::vector>>* y = capture; + const Block& x = render_buffer->GetBlock(0); + Block* y = capture; RTC_DCHECK(render_buffer); RTC_DCHECK(y); - RTC_DCHECK_EQ(x.size(), NumBandsForRate(sample_rate_hz_)); - RTC_DCHECK_EQ(y->size(), NumBandsForRate(sample_rate_hz_)); - RTC_DCHECK_EQ(x[0].size(), num_render_channels_); - RTC_DCHECK_EQ((*y)[0].size(), num_capture_channels_); - RTC_DCHECK_EQ(x[0][0].size(), kBlockSize); - RTC_DCHECK_EQ((*y)[0][0].size(), kBlockSize); + RTC_DCHECK_EQ(x.NumBands(), NumBandsForRate(sample_rate_hz_)); + RTC_DCHECK_EQ(y->NumBands(), NumBandsForRate(sample_rate_hz_)); + RTC_DCHECK_EQ(x.NumChannels(), num_render_channels_); + RTC_DCHECK_EQ(y->NumChannels(), num_capture_channels_); // Stack allocated data to use when the number of channels is low. std::array, kMaxNumChannelsOnStack> e_stack; @@ -321,12 +316,14 @@ void EchoRemoverImpl::ProcessCapture( subtractor_output_heap_.data(), num_capture_channels_); } - data_dumper_->DumpWav("aec3_echo_remover_capture_input", kBlockSize, - &(*y)[0][0][0], 16000, 1); - data_dumper_->DumpWav("aec3_echo_remover_render_input", kBlockSize, - &x[0][0][0], 16000, 1); - data_dumper_->DumpRaw("aec3_echo_remover_capture_input", (*y)[0][0]); - data_dumper_->DumpRaw("aec3_echo_remover_render_input", x[0][0]); + data_dumper_->DumpWav("aec3_echo_remover_capture_input", + y->View(/*band=*/0, /*channel=*/0), 16000, 1); + data_dumper_->DumpWav("aec3_echo_remover_render_input", + x.View(/*band=*/0, /*channel=*/0), 16000, 1); + data_dumper_->DumpRaw("aec3_echo_remover_capture_input", + y->View(/*band=*/0, /*channel=*/0)); + data_dumper_->DumpRaw("aec3_echo_remover_render_input", + x.View(/*band=*/0, /*channel=*/0)); aec_state_.UpdateCaptureSaturation(capture_signal_saturation); @@ -369,13 +366,13 @@ void EchoRemoverImpl::ProcessCapture( } // Perform linear echo cancellation. - subtractor_.Process(*render_buffer, (*y)[0], render_signal_analyzer_, - aec_state_, subtractor_output); + subtractor_.Process(*render_buffer, *y, render_signal_analyzer_, aec_state_, + subtractor_output); // Compute spectra. for (size_t ch = 0; ch < num_capture_channels_; ++ch) { FormLinearFilterOutput(subtractor_output[ch], e[ch]); - WindowedPaddedFft(fft_, (*y)[0][ch], y_old_[ch], &Y[ch]); + WindowedPaddedFft(fft_, y->View(/*band=*/0, ch), y_old_[ch], &Y[ch]); WindowedPaddedFft(fft_, e[ch], e_old_[ch], &E[ch]); LinearEchoPower(E[ch], Y[ch], &S2_linear[ch]); Y[ch].Spectrum(optimization_, Y2[ch]); @@ -384,11 +381,11 @@ void EchoRemoverImpl::ProcessCapture( // Optionally return the linear filter output. if (linear_output) { - RTC_DCHECK_GE(1, linear_output->size()); - RTC_DCHECK_EQ(num_capture_channels_, linear_output[0].size()); + RTC_DCHECK_GE(1, linear_output->NumBands()); + RTC_DCHECK_EQ(num_capture_channels_, linear_output->NumChannels()); for (size_t ch = 0; ch < num_capture_channels_; ++ch) { - RTC_DCHECK_EQ(kBlockSize, (*linear_output)[0][ch].size()); - std::copy(e[ch].begin(), e[ch].end(), (*linear_output)[0][ch].begin()); + std::copy(e[ch].begin(), e[ch].end(), + linear_output->begin(/*band=*/0, ch)); } } @@ -400,8 +397,8 @@ void EchoRemoverImpl::ProcessCapture( // Choose the linear output. const auto& Y_fft = aec_state_.UseLinearFilterOutput() ? E : Y; - data_dumper_->DumpWav("aec3_output_linear", kBlockSize, &(*y)[0][0][0], 16000, - 1); + data_dumper_->DumpWav("aec3_output_linear", + y->View(/*band=*/0, /*channel=*/0), 16000, 1); data_dumper_->DumpWav("aec3_output_linear2", kBlockSize, &e[0][0], 16000, 1); // Estimate the comfort noise. @@ -455,13 +452,12 @@ void EchoRemoverImpl::ProcessCapture( // Debug outputs for the purpose of development and analysis. data_dumper_->DumpWav("aec3_echo_estimate", kBlockSize, &subtractor_output[0].s_refined[0], 16000, 1); - data_dumper_->DumpRaw("aec3_output", (*y)[0][0]); + data_dumper_->DumpRaw("aec3_output", y->View(/*band=*/0, /*channel=*/0)); data_dumper_->DumpRaw("aec3_narrow_render", render_signal_analyzer_.NarrowPeakBand() ? 1 : 0); data_dumper_->DumpRaw("aec3_N2", cng_.NoiseSpectrum()[0]); data_dumper_->DumpRaw("aec3_suppressor_gain", G); - data_dumper_->DumpWav("aec3_output", - rtc::ArrayView(&(*y)[0][0][0], kBlockSize), + data_dumper_->DumpWav("aec3_output", y->View(/*band=*/0, /*channel=*/0), 16000, 1); data_dumper_->DumpRaw("aec3_using_subtractor_output[0]", aec_state_.UseLinearFilterOutput() ? 1 : 0); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_remover.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_remover.h index 486a9a72f4..f2f4f5e64d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_remover.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_remover.h @@ -16,6 +16,7 @@ #include "absl/types/optional.h" #include "api/audio/echo_canceller3_config.h" #include "api/audio/echo_control.h" +#include "modules/audio_processing/aec3/block.h" #include "modules/audio_processing/aec3/delay_estimate.h" #include "modules/audio_processing/aec3/echo_path_variability.h" #include "modules/audio_processing/aec3/render_buffer.h" @@ -42,8 +43,8 @@ class EchoRemover { bool capture_signal_saturation, const absl::optional& external_delay, RenderBuffer* render_buffer, - std::vector>>* linear_output, - std::vector>>* capture) = 0; + Block* linear_output, + Block* capture) = 0; // Updates the status on whether echo leakage is detected in the output of the // echo remover. diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_remover_metrics.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_remover_metrics.h index c3d8e20da1..aec8084d78 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_remover_metrics.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_remover_metrics.h @@ -15,7 +15,6 @@ #include "modules/audio_processing/aec3/aec3_common.h" #include "modules/audio_processing/aec3/aec_state.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -34,6 +33,9 @@ class EchoRemoverMetrics { EchoRemoverMetrics(); + EchoRemoverMetrics(const EchoRemoverMetrics&) = delete; + EchoRemoverMetrics& operator=(const EchoRemoverMetrics&) = delete; + // Updates the metric with new data. void Update( const AecState& aec_state, @@ -52,8 +54,6 @@ class EchoRemoverMetrics { DbMetric erle_time_domain_; bool saturated_capture_ = false; bool metrics_reported_ = false; - - RTC_DISALLOW_COPY_AND_ASSIGN(EchoRemoverMetrics); }; namespace aec3 { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/erl_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/erl_estimator.h index 89bf6ace36..639a52c561 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/erl_estimator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/erl_estimator.h @@ -18,7 +18,6 @@ #include "api/array_view.h" #include "modules/audio_processing/aec3/aec3_common.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -28,6 +27,9 @@ class ErlEstimator { explicit ErlEstimator(size_t startup_phase_length_blocks_); ~ErlEstimator(); + ErlEstimator(const ErlEstimator&) = delete; + ErlEstimator& operator=(const ErlEstimator&) = delete; + // Resets the ERL estimation. void Reset(); @@ -49,7 +51,6 @@ class ErlEstimator { float erl_time_domain_; int hold_counter_time_domain_; size_t blocks_since_reset_ = 0; - RTC_DISALLOW_COPY_AND_ASSIGN(ErlEstimator); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/filter_analyzer.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/filter_analyzer.cc index be954d3a18..d8fd3aa275 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/filter_analyzer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/filter_analyzer.cc @@ -19,7 +19,6 @@ #include "modules/audio_processing/aec3/aec3_common.h" #include "modules/audio_processing/aec3/render_buffer.h" #include "modules/audio_processing/logging/apm_data_dumper.h" -#include "rtc_base/atomic_ops.h" #include "rtc_base/checks.h" namespace webrtc { @@ -45,12 +44,11 @@ size_t FindPeakIndex(rtc::ArrayView filter_time_domain, } // namespace -int FilterAnalyzer::instance_count_ = 0; +std::atomic FilterAnalyzer::instance_count_(0); FilterAnalyzer::FilterAnalyzer(const EchoCanceller3Config& config, size_t num_capture_channels) - : data_dumper_( - new ApmDataDumper(rtc::AtomicOps::Increment(&instance_count_))), + : data_dumper_(new ApmDataDumper(instance_count_.fetch_add(1) + 1)), bounded_erl_(config.ep_strength.bounded_erl), default_gain_(config.ep_strength.default_gain), h_highpass_(num_capture_channels, @@ -131,7 +129,7 @@ void FilterAnalyzer::AnalyzeRegion( st_ch.consistent_estimate = st_ch.consistent_filter_detector.Detect( h_highpass_[ch], region_, - render_buffer.Block(-filter_delays_blocks_[ch])[0], st_ch.peak_index, + render_buffer.GetBlock(-filter_delays_blocks_[ch]), st_ch.peak_index, filter_delays_blocks_[ch]); } } @@ -170,11 +168,16 @@ void FilterAnalyzer::PreProcessFilters( std::fill(h_highpass_[ch].begin() + region_.start_sample_, h_highpass_[ch].begin() + region_.end_sample_ + 1, 0.f); + float* h_highpass_ch = h_highpass_[ch].data(); + const float* filters_time_domain_ch = filters_time_domain[ch].data(); + const size_t region_end = region_.end_sample_; for (size_t k = std::max(h.size() - 1, region_.start_sample_); - k <= region_.end_sample_; ++k) { + k <= region_end; ++k) { + float tmp = h_highpass_ch[k]; for (size_t j = 0; j < h.size(); ++j) { - h_highpass_[ch][k] += filters_time_domain[ch][k - j] * h[j]; + tmp += filters_time_domain_ch[k - j] * h[j]; } + h_highpass_ch[k] = tmp; } } } @@ -219,7 +222,7 @@ void FilterAnalyzer::ConsistentFilterDetector::Reset() { bool FilterAnalyzer::ConsistentFilterDetector::Detect( rtc::ArrayView filter_to_analyze, const FilterRegion& region, - rtc::ArrayView> x_block, + const Block& x_block, size_t peak_index, int delay_blocks) { if (region.start_sample_ == 0) { @@ -230,19 +233,23 @@ bool FilterAnalyzer::ConsistentFilterDetector::Detect( peak_index > filter_to_analyze.size() - 129 ? 0 : peak_index + 128; } + float filter_floor_accum = filter_floor_accum_; + float filter_secondary_peak = filter_secondary_peak_; for (size_t k = region.start_sample_; k < std::min(region.end_sample_ + 1, filter_floor_low_limit_); ++k) { float abs_h = fabsf(filter_to_analyze[k]); - filter_floor_accum_ += abs_h; - filter_secondary_peak_ = std::max(filter_secondary_peak_, abs_h); + filter_floor_accum += abs_h; + filter_secondary_peak = std::max(filter_secondary_peak, abs_h); } for (size_t k = std::max(filter_floor_high_limit_, region.start_sample_); k <= region.end_sample_; ++k) { float abs_h = fabsf(filter_to_analyze[k]); - filter_floor_accum_ += abs_h; - filter_secondary_peak_ = std::max(filter_secondary_peak_, abs_h); + filter_floor_accum += abs_h; + filter_secondary_peak = std::max(filter_secondary_peak, abs_h); } + filter_floor_accum_ = filter_floor_accum; + filter_secondary_peak_ = filter_secondary_peak; if (region.end_sample_ == filter_to_analyze.size() - 1) { float filter_floor = filter_floor_accum_ / @@ -256,7 +263,9 @@ bool FilterAnalyzer::ConsistentFilterDetector::Detect( if (significant_peak_) { bool active_render_block = false; - for (auto& x_channel : x_block) { + for (int ch = 0; ch < x_block.NumChannels(); ++ch) { + rtc::ArrayView x_channel = + x_block.View(/*band=*/0, ch); const float x_energy = std::inner_product( x_channel.begin(), x_channel.end(), x_channel.begin(), 0.f); if (x_energy > active_render_threshold_) { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/filter_analyzer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/filter_analyzer.h index b0b7070119..9aec8b14d7 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/filter_analyzer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/filter_analyzer.h @@ -14,13 +14,14 @@ #include #include +#include #include #include #include "api/array_view.h" #include "api/audio/echo_canceller3_config.h" #include "modules/audio_processing/aec3/aec3_common.h" -#include "rtc_base/constructor_magic.h" +#include "modules/audio_processing/aec3/block.h" namespace webrtc { @@ -94,7 +95,7 @@ class FilterAnalyzer { void Reset(); bool Detect(rtc::ArrayView filter_to_analyze, const FilterRegion& region, - rtc::ArrayView> x_block, + const Block& x_block, size_t peak_index, int delay_blocks); @@ -129,7 +130,7 @@ class FilterAnalyzer { ConsistentFilterDetector consistent_filter_detector; }; - static int instance_count_; + static std::atomic instance_count_; std::unique_ptr data_dumper_; const bool bounded_erl_; const float default_gain_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/frame_blocker.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/frame_blocker.cc index 63aaf098c5..3039dcf7f1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/frame_blocker.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/frame_blocker.cc @@ -33,26 +33,22 @@ FrameBlocker::~FrameBlocker() = default; void FrameBlocker::InsertSubFrameAndExtractBlock( const std::vector>>& sub_frame, - std::vector>>* block) { + Block* block) { RTC_DCHECK(block); - RTC_DCHECK_EQ(num_bands_, block->size()); + RTC_DCHECK_EQ(num_bands_, block->NumBands()); RTC_DCHECK_EQ(num_bands_, sub_frame.size()); for (size_t band = 0; band < num_bands_; ++band) { - RTC_DCHECK_EQ(num_channels_, (*block)[band].size()); + RTC_DCHECK_EQ(num_channels_, block->NumChannels()); RTC_DCHECK_EQ(num_channels_, sub_frame[band].size()); for (size_t channel = 0; channel < num_channels_; ++channel) { RTC_DCHECK_GE(kBlockSize - 16, buffer_[band][channel].size()); - RTC_DCHECK_EQ(kBlockSize, (*block)[band][channel].size()); RTC_DCHECK_EQ(kSubFrameLength, sub_frame[band][channel].size()); const int samples_to_block = kBlockSize - buffer_[band][channel].size(); - (*block)[band][channel].clear(); - (*block)[band][channel].insert((*block)[band][channel].begin(), - buffer_[band][channel].begin(), - buffer_[band][channel].end()); - (*block)[band][channel].insert( - (*block)[band][channel].begin() + buffer_[band][channel].size(), - sub_frame[band][channel].begin(), - sub_frame[band][channel].begin() + samples_to_block); + std::copy(buffer_[band][channel].begin(), buffer_[band][channel].end(), + block->begin(band, channel)); + std::copy(sub_frame[band][channel].begin(), + sub_frame[band][channel].begin() + samples_to_block, + block->begin(band, channel) + kBlockSize - samples_to_block); buffer_[band][channel].clear(); buffer_[band][channel].insert( buffer_[band][channel].begin(), @@ -66,20 +62,16 @@ bool FrameBlocker::IsBlockAvailable() const { return kBlockSize == buffer_[0][0].size(); } -void FrameBlocker::ExtractBlock( - std::vector>>* block) { +void FrameBlocker::ExtractBlock(Block* block) { RTC_DCHECK(block); - RTC_DCHECK_EQ(num_bands_, block->size()); + RTC_DCHECK_EQ(num_bands_, block->NumBands()); + RTC_DCHECK_EQ(num_channels_, block->NumChannels()); RTC_DCHECK(IsBlockAvailable()); for (size_t band = 0; band < num_bands_; ++band) { - RTC_DCHECK_EQ(num_channels_, (*block)[band].size()); for (size_t channel = 0; channel < num_channels_; ++channel) { RTC_DCHECK_EQ(kBlockSize, buffer_[band][channel].size()); - RTC_DCHECK_EQ(kBlockSize, (*block)[band][channel].size()); - (*block)[band][channel].clear(); - (*block)[band][channel].insert((*block)[band][channel].begin(), - buffer_[band][channel].begin(), - buffer_[band][channel].end()); + std::copy(buffer_[band][channel].begin(), buffer_[band][channel].end(), + block->begin(band, channel)); buffer_[band][channel].clear(); } } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/frame_blocker.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/frame_blocker.h index ebd6f776f1..623c812157 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/frame_blocker.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/frame_blocker.h @@ -17,6 +17,7 @@ #include "api/array_view.h" #include "modules/audio_processing/aec3/aec3_common.h" +#include "modules/audio_processing/aec3/block.h" namespace webrtc { @@ -33,12 +34,12 @@ class FrameBlocker { // extracts one 64 sample multiband block. void InsertSubFrameAndExtractBlock( const std::vector>>& sub_frame, - std::vector>>* block); + Block* block); // Reports whether a multiband block of 64 samples is available for // extraction. bool IsBlockAvailable() const; // Extracts a multiband block of 64 samples. - void ExtractBlock(std::vector>>* block); + void ExtractBlock(Block* block); private: const size_t num_bands_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/fullband_erle_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/fullband_erle_estimator.h index 2b720a4de4..7a082176d6 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/fullband_erle_estimator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/fullband_erle_estimator.h @@ -67,7 +67,7 @@ class FullBandErleEstimator { // Updates the estimator with a new point, returns true // if the instantaneous ERLE was updated due to having enough // points for performing the estimate. - bool Update(const float Y2_sum, const float E2_sum); + bool Update(float Y2_sum, float E2_sum); // Resets the instantaneous ERLE estimator to its initial state. void Reset(); // Resets the members related with an instantaneous estimate. diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter.cc index 7d988f22e9..c8ad6212fd 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter.cc @@ -24,16 +24,147 @@ #include #include +#include "absl/types/optional.h" +#include "api/array_view.h" #include "modules/audio_processing/aec3/downsampled_render_buffer.h" #include "modules/audio_processing/logging/apm_data_dumper.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" +namespace { + +// Subsample rate used for computing the accumulated error. +// The implementation of some core functions depends on this constant being +// equal to 4. +constexpr int kAccumulatedErrorSubSampleRate = 4; + +void UpdateAccumulatedError( + const rtc::ArrayView instantaneous_accumulated_error, + const rtc::ArrayView accumulated_error, + float one_over_error_sum_anchor) { + for (size_t k = 0; k < instantaneous_accumulated_error.size(); ++k) { + float error_norm = + instantaneous_accumulated_error[k] * one_over_error_sum_anchor; + if (error_norm < accumulated_error[k]) { + accumulated_error[k] = error_norm; + } else { + accumulated_error[k] += 0.01f * (error_norm - accumulated_error[k]); + } + } +} + +size_t ComputePreEchoLag(const rtc::ArrayView accumulated_error, + size_t lag, + size_t alignment_shift_winner) { + size_t pre_echo_lag_estimate = lag - alignment_shift_winner; + size_t maximum_pre_echo_lag = + std::min(pre_echo_lag_estimate / kAccumulatedErrorSubSampleRate, + accumulated_error.size()); + for (size_t k = 1; k < maximum_pre_echo_lag; ++k) { + if (accumulated_error[k] < 0.5f * accumulated_error[k - 1] && + accumulated_error[k] < 0.5f) { + pre_echo_lag_estimate = (k + 1) * kAccumulatedErrorSubSampleRate - 1; + break; + } + } + return pre_echo_lag_estimate + alignment_shift_winner; +} + +} // namespace + namespace webrtc { namespace aec3 { #if defined(WEBRTC_HAS_NEON) +inline float SumAllElements(float32x4_t elements) { + float32x2_t sum = vpadd_f32(vget_low_f32(elements), vget_high_f32(elements)); + sum = vpadd_f32(sum, sum); + return vget_lane_f32(sum, 0); +} + +void MatchedFilterCoreWithAccumulatedError_NEON( + size_t x_start_index, + float x2_sum_threshold, + float smoothing, + rtc::ArrayView x, + rtc::ArrayView y, + rtc::ArrayView h, + bool* filters_updated, + float* error_sum, + rtc::ArrayView accumulated_error, + rtc::ArrayView scratch_memory) { + const int h_size = static_cast(h.size()); + const int x_size = static_cast(x.size()); + RTC_DCHECK_EQ(0, h_size % 4); + std::fill(accumulated_error.begin(), accumulated_error.end(), 0.0f); + // Process for all samples in the sub-block. + for (size_t i = 0; i < y.size(); ++i) { + // Apply the matched filter as filter * x, and compute x * x. + RTC_DCHECK_GT(x_size, x_start_index); + // Compute loop chunk sizes until, and after, the wraparound of the circular + // buffer for x. + const int chunk1 = + std::min(h_size, static_cast(x_size - x_start_index)); + if (chunk1 != h_size) { + const int chunk2 = h_size - chunk1; + std::copy(x.begin() + x_start_index, x.end(), scratch_memory.begin()); + std::copy(x.begin(), x.begin() + chunk2, scratch_memory.begin() + chunk1); + } + const float* x_p = + chunk1 != h_size ? scratch_memory.data() : &x[x_start_index]; + const float* h_p = &h[0]; + float* accumulated_error_p = &accumulated_error[0]; + // Initialize values for the accumulation. + float32x4_t x2_sum_128 = vdupq_n_f32(0); + float x2_sum = 0.f; + float s = 0; + // Perform 128 bit vector operations. + const int limit_by_4 = h_size >> 2; + for (int k = limit_by_4; k > 0; + --k, h_p += 4, x_p += 4, accumulated_error_p++) { + // Load the data into 128 bit vectors. + const float32x4_t x_k = vld1q_f32(x_p); + const float32x4_t h_k = vld1q_f32(h_p); + // Compute and accumulate x * x. + x2_sum_128 = vmlaq_f32(x2_sum_128, x_k, x_k); + // Compute x * h + float32x4_t hk_xk_128 = vmulq_f32(h_k, x_k); + s += SumAllElements(hk_xk_128); + const float e = s - y[i]; + accumulated_error_p[0] += e * e; + } + // Combine the accumulated vector and scalar values. + x2_sum += SumAllElements(x2_sum_128); + // Compute the matched filter error. + float e = y[i] - s; + const bool saturation = y[i] >= 32000.f || y[i] <= -32000.f; + (*error_sum) += e * e; + // Update the matched filter estimate in an NLMS manner. + if (x2_sum > x2_sum_threshold && !saturation) { + RTC_DCHECK_LT(0.f, x2_sum); + const float alpha = smoothing * e / x2_sum; + const float32x4_t alpha_128 = vmovq_n_f32(alpha); + // filter = filter + smoothing * (y - filter * x) * x / x * x. + float* h_p = &h[0]; + x_p = chunk1 != h_size ? scratch_memory.data() : &x[x_start_index]; + // Perform 128 bit vector operations. + const int limit_by_4 = h_size >> 2; + for (int k = limit_by_4; k > 0; --k, h_p += 4, x_p += 4) { + // Load the data into 128 bit vectors. + float32x4_t h_k = vld1q_f32(h_p); + const float32x4_t x_k = vld1q_f32(x_p); + // Compute h = h + alpha * x. + h_k = vmlaq_f32(h_k, alpha_128, x_k); + // Store the result. + vst1q_f32(h_p, h_k); + } + *filters_updated = true; + } + x_start_index = x_start_index > 0 ? x_start_index - 1 : x_size - 1; + } +} + void MatchedFilterCore_NEON(size_t x_start_index, float x2_sum_threshold, float smoothing, @@ -41,11 +172,20 @@ void MatchedFilterCore_NEON(size_t x_start_index, rtc::ArrayView y, rtc::ArrayView h, bool* filters_updated, - float* error_sum) { + float* error_sum, + bool compute_accumulated_error, + rtc::ArrayView accumulated_error, + rtc::ArrayView scratch_memory) { const int h_size = static_cast(h.size()); const int x_size = static_cast(x.size()); RTC_DCHECK_EQ(0, h_size % 4); + if (compute_accumulated_error) { + return MatchedFilterCoreWithAccumulatedError_NEON( + x_start_index, x2_sum_threshold, smoothing, x, y, h, filters_updated, + error_sum, accumulated_error, scratch_memory); + } + // Process for all samples in the sub-block. for (size_t i = 0; i < y.size(); ++i) { // Apply the matched filter as filter * x, and compute x * x. @@ -90,10 +230,8 @@ void MatchedFilterCore_NEON(size_t x_start_index, } // Combine the accumulated vector and scalar values. - float* v = reinterpret_cast(&x2_sum_128); - x2_sum += v[0] + v[1] + v[2] + v[3]; - v = reinterpret_cast(&s_128); - s += v[0] + v[1] + v[2] + v[3]; + s += SumAllElements(s_128); + x2_sum += SumAllElements(x2_sum_128); // Compute the matched filter error. float e = y[i] - s; @@ -144,6 +282,103 @@ void MatchedFilterCore_NEON(size_t x_start_index, #if defined(WEBRTC_ARCH_X86_FAMILY) +void MatchedFilterCore_AccumulatedError_SSE2( + size_t x_start_index, + float x2_sum_threshold, + float smoothing, + rtc::ArrayView x, + rtc::ArrayView y, + rtc::ArrayView h, + bool* filters_updated, + float* error_sum, + rtc::ArrayView accumulated_error, + rtc::ArrayView scratch_memory) { + const int h_size = static_cast(h.size()); + const int x_size = static_cast(x.size()); + RTC_DCHECK_EQ(0, h_size % 8); + std::fill(accumulated_error.begin(), accumulated_error.end(), 0.0f); + // Process for all samples in the sub-block. + for (size_t i = 0; i < y.size(); ++i) { + // Apply the matched filter as filter * x, and compute x * x. + RTC_DCHECK_GT(x_size, x_start_index); + const int chunk1 = + std::min(h_size, static_cast(x_size - x_start_index)); + if (chunk1 != h_size) { + const int chunk2 = h_size - chunk1; + std::copy(x.begin() + x_start_index, x.end(), scratch_memory.begin()); + std::copy(x.begin(), x.begin() + chunk2, scratch_memory.begin() + chunk1); + } + const float* x_p = + chunk1 != h_size ? scratch_memory.data() : &x[x_start_index]; + const float* h_p = &h[0]; + float* a_p = &accumulated_error[0]; + __m128 s_inst_128; + __m128 s_inst_128_4; + __m128 x2_sum_128 = _mm_set1_ps(0); + __m128 x2_sum_128_4 = _mm_set1_ps(0); + __m128 e_128; + float* const s_p = reinterpret_cast(&s_inst_128); + float* const s_4_p = reinterpret_cast(&s_inst_128_4); + float* const e_p = reinterpret_cast(&e_128); + float x2_sum = 0.0f; + float s_acum = 0; + // Perform 128 bit vector operations. + const int limit_by_8 = h_size >> 3; + for (int k = limit_by_8; k > 0; --k, h_p += 8, x_p += 8, a_p += 2) { + // Load the data into 128 bit vectors. + const __m128 x_k = _mm_loadu_ps(x_p); + const __m128 h_k = _mm_loadu_ps(h_p); + const __m128 x_k_4 = _mm_loadu_ps(x_p + 4); + const __m128 h_k_4 = _mm_loadu_ps(h_p + 4); + const __m128 xx = _mm_mul_ps(x_k, x_k); + const __m128 xx_4 = _mm_mul_ps(x_k_4, x_k_4); + // Compute and accumulate x * x and h * x. + x2_sum_128 = _mm_add_ps(x2_sum_128, xx); + x2_sum_128_4 = _mm_add_ps(x2_sum_128_4, xx_4); + s_inst_128 = _mm_mul_ps(h_k, x_k); + s_inst_128_4 = _mm_mul_ps(h_k_4, x_k_4); + s_acum += s_p[0] + s_p[1] + s_p[2] + s_p[3]; + e_p[0] = s_acum - y[i]; + s_acum += s_4_p[0] + s_4_p[1] + s_4_p[2] + s_4_p[3]; + e_p[1] = s_acum - y[i]; + a_p[0] += e_p[0] * e_p[0]; + a_p[1] += e_p[1] * e_p[1]; + } + // Combine the accumulated vector and scalar values. + x2_sum_128 = _mm_add_ps(x2_sum_128, x2_sum_128_4); + float* v = reinterpret_cast(&x2_sum_128); + x2_sum += v[0] + v[1] + v[2] + v[3]; + // Compute the matched filter error. + float e = y[i] - s_acum; + const bool saturation = y[i] >= 32000.f || y[i] <= -32000.f; + (*error_sum) += e * e; + // Update the matched filter estimate in an NLMS manner. + if (x2_sum > x2_sum_threshold && !saturation) { + RTC_DCHECK_LT(0.f, x2_sum); + const float alpha = smoothing * e / x2_sum; + const __m128 alpha_128 = _mm_set1_ps(alpha); + // filter = filter + smoothing * (y - filter * x) * x / x * x. + float* h_p = &h[0]; + const float* x_p = + chunk1 != h_size ? scratch_memory.data() : &x[x_start_index]; + // Perform 128 bit vector operations. + const int limit_by_4 = h_size >> 2; + for (int k = limit_by_4; k > 0; --k, h_p += 4, x_p += 4) { + // Load the data into 128 bit vectors. + __m128 h_k = _mm_loadu_ps(h_p); + const __m128 x_k = _mm_loadu_ps(x_p); + // Compute h = h + alpha * x. + const __m128 alpha_x = _mm_mul_ps(alpha_128, x_k); + h_k = _mm_add_ps(h_k, alpha_x); + // Store the result. + _mm_storeu_ps(h_p, h_k); + } + *filters_updated = true; + } + x_start_index = x_start_index > 0 ? x_start_index - 1 : x_size - 1; + } +} + void MatchedFilterCore_SSE2(size_t x_start_index, float x2_sum_threshold, float smoothing, @@ -151,77 +386,83 @@ void MatchedFilterCore_SSE2(size_t x_start_index, rtc::ArrayView y, rtc::ArrayView h, bool* filters_updated, - float* error_sum) { + float* error_sum, + bool compute_accumulated_error, + rtc::ArrayView accumulated_error, + rtc::ArrayView scratch_memory) { + if (compute_accumulated_error) { + return MatchedFilterCore_AccumulatedError_SSE2( + x_start_index, x2_sum_threshold, smoothing, x, y, h, filters_updated, + error_sum, accumulated_error, scratch_memory); + } const int h_size = static_cast(h.size()); const int x_size = static_cast(x.size()); RTC_DCHECK_EQ(0, h_size % 4); - // Process for all samples in the sub-block. for (size_t i = 0; i < y.size(); ++i) { // Apply the matched filter as filter * x, and compute x * x. - RTC_DCHECK_GT(x_size, x_start_index); const float* x_p = &x[x_start_index]; const float* h_p = &h[0]; - // Initialize values for the accumulation. __m128 s_128 = _mm_set1_ps(0); + __m128 s_128_4 = _mm_set1_ps(0); __m128 x2_sum_128 = _mm_set1_ps(0); + __m128 x2_sum_128_4 = _mm_set1_ps(0); float x2_sum = 0.f; float s = 0; - // Compute loop chunk sizes until, and after, the wraparound of the circular // buffer for x. const int chunk1 = std::min(h_size, static_cast(x_size - x_start_index)); - // Perform the loop in two chunks. const int chunk2 = h_size - chunk1; for (int limit : {chunk1, chunk2}) { // Perform 128 bit vector operations. - const int limit_by_4 = limit >> 2; - for (int k = limit_by_4; k > 0; --k, h_p += 4, x_p += 4) { + const int limit_by_8 = limit >> 3; + for (int k = limit_by_8; k > 0; --k, h_p += 8, x_p += 8) { // Load the data into 128 bit vectors. const __m128 x_k = _mm_loadu_ps(x_p); const __m128 h_k = _mm_loadu_ps(h_p); + const __m128 x_k_4 = _mm_loadu_ps(x_p + 4); + const __m128 h_k_4 = _mm_loadu_ps(h_p + 4); const __m128 xx = _mm_mul_ps(x_k, x_k); + const __m128 xx_4 = _mm_mul_ps(x_k_4, x_k_4); // Compute and accumulate x * x and h * x. x2_sum_128 = _mm_add_ps(x2_sum_128, xx); + x2_sum_128_4 = _mm_add_ps(x2_sum_128_4, xx_4); const __m128 hx = _mm_mul_ps(h_k, x_k); + const __m128 hx_4 = _mm_mul_ps(h_k_4, x_k_4); s_128 = _mm_add_ps(s_128, hx); + s_128_4 = _mm_add_ps(s_128_4, hx_4); } - // Perform non-vector operations for any remaining items. - for (int k = limit - limit_by_4 * 4; k > 0; --k, ++h_p, ++x_p) { + for (int k = limit - limit_by_8 * 8; k > 0; --k, ++h_p, ++x_p) { const float x_k = *x_p; x2_sum += x_k * x_k; s += *h_p * x_k; } - x_p = &x[0]; } - // Combine the accumulated vector and scalar values. + x2_sum_128 = _mm_add_ps(x2_sum_128, x2_sum_128_4); float* v = reinterpret_cast(&x2_sum_128); x2_sum += v[0] + v[1] + v[2] + v[3]; + s_128 = _mm_add_ps(s_128, s_128_4); v = reinterpret_cast(&s_128); s += v[0] + v[1] + v[2] + v[3]; - // Compute the matched filter error. float e = y[i] - s; const bool saturation = y[i] >= 32000.f || y[i] <= -32000.f; (*error_sum) += e * e; - // Update the matched filter estimate in an NLMS manner. if (x2_sum > x2_sum_threshold && !saturation) { RTC_DCHECK_LT(0.f, x2_sum); const float alpha = smoothing * e / x2_sum; const __m128 alpha_128 = _mm_set1_ps(alpha); - // filter = filter + smoothing * (y - filter * x) * x / x * x. float* h_p = &h[0]; x_p = &x[x_start_index]; - // Perform the loop in two chunks. for (int limit : {chunk1, chunk2}) { // Perform 128 bit vector operations. @@ -234,22 +475,17 @@ void MatchedFilterCore_SSE2(size_t x_start_index, // Compute h = h + alpha * x. const __m128 alpha_x = _mm_mul_ps(alpha_128, x_k); h_k = _mm_add_ps(h_k, alpha_x); - // Store the result. _mm_storeu_ps(h_p, h_k); } - // Perform non-vector operations for any remaining items. for (int k = limit - limit_by_4 * 4; k > 0; --k, ++h_p, ++x_p) { *h_p += alpha * *x_p; } - x_p = &x[0]; } - *filters_updated = true; } - x_start_index = x_start_index > 0 ? x_start_index - 1 : x_size - 1; } } @@ -262,17 +498,35 @@ void MatchedFilterCore(size_t x_start_index, rtc::ArrayView y, rtc::ArrayView h, bool* filters_updated, - float* error_sum) { + float* error_sum, + bool compute_accumulated_error, + rtc::ArrayView accumulated_error) { + if (compute_accumulated_error) { + std::fill(accumulated_error.begin(), accumulated_error.end(), 0.0f); + } + // Process for all samples in the sub-block. for (size_t i = 0; i < y.size(); ++i) { // Apply the matched filter as filter * x, and compute x * x. float x2_sum = 0.f; float s = 0; size_t x_index = x_start_index; - for (size_t k = 0; k < h.size(); ++k) { - x2_sum += x[x_index] * x[x_index]; - s += h[k] * x[x_index]; - x_index = x_index < (x.size() - 1) ? x_index + 1 : 0; + if (compute_accumulated_error) { + for (size_t k = 0; k < h.size(); ++k) { + x2_sum += x[x_index] * x[x_index]; + s += h[k] * x[x_index]; + x_index = x_index < (x.size() - 1) ? x_index + 1 : 0; + if ((k + 1 & 0b11) == 0) { + int idx = k >> 2; + accumulated_error[idx] += (y[i] - s) * (y[i] - s); + } + } + } else { + for (size_t k = 0; k < h.size(); ++k) { + x2_sum += x[x_index] * x[x_index]; + s += h[k] * x[x_index]; + x_index = x_index < (x.size() - 1) ? x_index + 1 : 0; + } } // Compute the matched filter error. @@ -298,6 +552,41 @@ void MatchedFilterCore(size_t x_start_index, } } +size_t MaxSquarePeakIndex(rtc::ArrayView h) { + if (h.size() < 2) { + return 0; + } + float max_element1 = h[0] * h[0]; + float max_element2 = h[1] * h[1]; + size_t lag_estimate1 = 0; + size_t lag_estimate2 = 1; + const size_t last_index = h.size() - 1; + // Keeping track of even & odd max elements separately typically allows the + // compiler to produce more efficient code. + for (size_t k = 2; k < last_index; k += 2) { + float element1 = h[k] * h[k]; + float element2 = h[k + 1] * h[k + 1]; + if (element1 > max_element1) { + max_element1 = element1; + lag_estimate1 = k; + } + if (element2 > max_element2) { + max_element2 = element2; + lag_estimate2 = k + 1; + } + } + if (max_element2 > max_element1) { + max_element1 = max_element2; + lag_estimate1 = lag_estimate2; + } + // In case of odd h size, we have not yet checked the last element. + float last_element = h[last_index] * h[last_index]; + if (last_element > max_element1) { + return last_index; + } + return lag_estimate1; +} + } // namespace aec3 MatchedFilter::MatchedFilter(ApmDataDumper* data_dumper, @@ -309,7 +598,8 @@ MatchedFilter::MatchedFilter(ApmDataDumper* data_dumper, float excitation_limit, float smoothing_fast, float smoothing_slow, - float matching_filter_threshold) + float matching_filter_threshold, + bool detect_pre_echo) : data_dumper_(data_dumper), optimization_(optimization), sub_block_size_(sub_block_size), @@ -317,16 +607,31 @@ MatchedFilter::MatchedFilter(ApmDataDumper* data_dumper, filters_( num_matched_filters, std::vector(window_size_sub_blocks * sub_block_size_, 0.f)), - lag_estimates_(num_matched_filters), filters_offsets_(num_matched_filters, 0), excitation_limit_(excitation_limit), smoothing_fast_(smoothing_fast), smoothing_slow_(smoothing_slow), - matching_filter_threshold_(matching_filter_threshold) { + matching_filter_threshold_(matching_filter_threshold), + detect_pre_echo_(detect_pre_echo) { RTC_DCHECK(data_dumper); RTC_DCHECK_LT(0, window_size_sub_blocks); RTC_DCHECK((kBlockSize % sub_block_size) == 0); RTC_DCHECK((sub_block_size % 4) == 0); + static_assert(kAccumulatedErrorSubSampleRate == 4); + if (detect_pre_echo_) { + accumulated_error_ = std::vector>( + num_matched_filters, + std::vector(window_size_sub_blocks * sub_block_size_ / + kAccumulatedErrorSubSampleRate, + 1.0f)); + + instantaneous_accumulated_error_ = + std::vector(window_size_sub_blocks * sub_block_size_ / + kAccumulatedErrorSubSampleRate, + 0.0f); + scratch_memory_ = + std::vector(window_size_sub_blocks * sub_block_size_); + } } MatchedFilter::~MatchedFilter() = default; @@ -336,9 +641,12 @@ void MatchedFilter::Reset() { std::fill(f.begin(), f.end(), 0.f); } - for (auto& l : lag_estimates_) { - l = MatchedFilter::LagEstimate(); + for (auto& e : accumulated_error_) { + std::fill(e.begin(), e.end(), 1.0f); } + + winner_lag_ = absl::nullopt; + reported_lag_estimate_ = absl::nullopt; } void MatchedFilter::Update(const DownsampledRenderBuffer& render_buffer, @@ -353,11 +661,25 @@ void MatchedFilter::Update(const DownsampledRenderBuffer& render_buffer, const float x2_sum_threshold = filters_[0].size() * excitation_limit_ * excitation_limit_; + // Compute anchor for the matched filter error. + float error_sum_anchor = 0.0f; + for (size_t k = 0; k < y.size(); ++k) { + error_sum_anchor += y[k] * y[k]; + } + // Apply all matched filters. + float winner_error_sum = error_sum_anchor; + winner_lag_ = absl::nullopt; + reported_lag_estimate_ = absl::nullopt; size_t alignment_shift = 0; - for (size_t n = 0; n < filters_.size(); ++n) { + absl::optional previous_lag_estimate; + const int num_filters = static_cast(filters_.size()); + int winner_index = -1; + for (int n = 0; n < num_filters; ++n) { float error_sum = 0.f; bool filters_updated = false; + const bool compute_pre_echo = + detect_pre_echo_ && n == last_detected_best_lag_filter_; size_t x_start_index = (render_buffer.read + alignment_shift + sub_block_size_ - 1) % @@ -366,82 +688,73 @@ void MatchedFilter::Update(const DownsampledRenderBuffer& render_buffer, switch (optimization_) { #if defined(WEBRTC_ARCH_X86_FAMILY) case Aec3Optimization::kSse2: - aec3::MatchedFilterCore_SSE2(x_start_index, x2_sum_threshold, smoothing, - render_buffer.buffer, y, filters_[n], - &filters_updated, &error_sum); + aec3::MatchedFilterCore_SSE2( + x_start_index, x2_sum_threshold, smoothing, render_buffer.buffer, y, + filters_[n], &filters_updated, &error_sum, compute_pre_echo, + instantaneous_accumulated_error_, scratch_memory_); break; #endif #if defined(WEBRTC_HAS_NEON) case Aec3Optimization::kNeon: - aec3::MatchedFilterCore_NEON(x_start_index, x2_sum_threshold, smoothing, - render_buffer.buffer, y, filters_[n], - &filters_updated, &error_sum); + aec3::MatchedFilterCore_NEON( + x_start_index, x2_sum_threshold, smoothing, render_buffer.buffer, y, + filters_[n], &filters_updated, &error_sum, compute_pre_echo, + instantaneous_accumulated_error_, scratch_memory_); break; #endif default: aec3::MatchedFilterCore(x_start_index, x2_sum_threshold, smoothing, render_buffer.buffer, y, filters_[n], - &filters_updated, &error_sum); + &filters_updated, &error_sum, compute_pre_echo, + instantaneous_accumulated_error_); } - // Compute anchor for the matched filter error. - const float error_sum_anchor = - std::inner_product(y.begin(), y.end(), y.begin(), 0.f); - // Estimate the lag in the matched filter as the distance to the portion in // the filter that contributes the most to the matched filter output. This // is detected as the peak of the matched filter. - const size_t lag_estimate = std::distance( - filters_[n].begin(), - std::max_element( - filters_[n].begin(), filters_[n].end(), - [](float a, float b) -> bool { return a * a < b * b; })); - - // Update the lag estimates for the matched filter. - lag_estimates_[n] = LagEstimate( - error_sum_anchor - error_sum, - (lag_estimate > 2 && lag_estimate < (filters_[n].size() - 10) && - error_sum < matching_filter_threshold_ * error_sum_anchor), - lag_estimate + alignment_shift, filters_updated); - - RTC_DCHECK_GE(10, filters_.size()); - switch (n) { - case 0: - data_dumper_->DumpRaw("aec3_correlator_0_h", filters_[0]); - break; - case 1: - data_dumper_->DumpRaw("aec3_correlator_1_h", filters_[1]); - break; - case 2: - data_dumper_->DumpRaw("aec3_correlator_2_h", filters_[2]); - break; - case 3: - data_dumper_->DumpRaw("aec3_correlator_3_h", filters_[3]); - break; - case 4: - data_dumper_->DumpRaw("aec3_correlator_4_h", filters_[4]); - break; - case 5: - data_dumper_->DumpRaw("aec3_correlator_5_h", filters_[5]); - break; - case 6: - data_dumper_->DumpRaw("aec3_correlator_6_h", filters_[6]); - break; - case 7: - data_dumper_->DumpRaw("aec3_correlator_7_h", filters_[7]); - break; - case 8: - data_dumper_->DumpRaw("aec3_correlator_8_h", filters_[8]); - break; - case 9: - data_dumper_->DumpRaw("aec3_correlator_9_h", filters_[9]); - break; - default: - RTC_DCHECK_NOTREACHED(); + const size_t lag_estimate = aec3::MaxSquarePeakIndex(filters_[n]); + const bool reliable = + lag_estimate > 2 && lag_estimate < (filters_[n].size() - 10) && + error_sum < matching_filter_threshold_ * error_sum_anchor; + + // Find the best estimate + const size_t lag = lag_estimate + alignment_shift; + if (filters_updated && reliable && error_sum < winner_error_sum) { + winner_error_sum = error_sum; + winner_index = n; + // In case that 2 matched filters return the same winner candidate + // (overlap region), the one with the smaller index is chosen in order + // to search for pre-echoes. + if (previous_lag_estimate && previous_lag_estimate == lag) { + winner_lag_ = previous_lag_estimate; + winner_index = n - 1; + } else { + winner_lag_ = lag; + } } - + previous_lag_estimate = lag; alignment_shift += filter_intra_lag_shift_; } + + if (winner_index != -1) { + RTC_DCHECK(winner_lag_.has_value()); + reported_lag_estimate_ = + LagEstimate(winner_lag_.value(), /*pre_echo_lag=*/winner_lag_.value()); + if (detect_pre_echo_ && last_detected_best_lag_filter_ == winner_index) { + if (error_sum_anchor > 30.0f * 30.0f * y.size()) { + UpdateAccumulatedError(instantaneous_accumulated_error_, + accumulated_error_[winner_index], + 1.0f / error_sum_anchor); + } + reported_lag_estimate_->pre_echo_lag = ComputePreEchoLag( + accumulated_error_[winner_index], winner_lag_.value(), + winner_index * filter_intra_lag_shift_ /*alignment_shift_winner*/); + } + last_detected_best_lag_filter_ = winner_index; + } + if (ApmDataDumper::IsAvailable()) { + Dump(); + } } void MatchedFilter::LogFilterProperties(int sample_rate_hz, @@ -462,4 +775,27 @@ void MatchedFilter::LogFilterProperties(int sample_rate_hz, } } +void MatchedFilter::Dump() { + for (size_t n = 0; n < filters_.size(); ++n) { + const size_t lag_estimate = aec3::MaxSquarePeakIndex(filters_[n]); + std::string dumper_filter = "aec3_correlator_" + std::to_string(n) + "_h"; + data_dumper_->DumpRaw(dumper_filter.c_str(), filters_[n]); + std::string dumper_lag = "aec3_correlator_lag_" + std::to_string(n); + data_dumper_->DumpRaw(dumper_lag.c_str(), + lag_estimate + n * filter_intra_lag_shift_); + if (detect_pre_echo_) { + std::string dumper_error = + "aec3_correlator_error_" + std::to_string(n) + "_h"; + data_dumper_->DumpRaw(dumper_error.c_str(), accumulated_error_[n]); + + size_t pre_echo_lag = ComputePreEchoLag( + accumulated_error_[n], lag_estimate + n * filter_intra_lag_shift_, + n * filter_intra_lag_shift_); + std::string dumper_pre_lag = + "aec3_correlator_pre_echo_lag_" + std::to_string(n); + data_dumper_->DumpRaw(dumper_pre_lag.c_str(), pre_echo_lag); + } + } +} + } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter.h index c6410ab4ee..760d5e39fd 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter.h @@ -15,6 +15,7 @@ #include +#include "absl/types/optional.h" #include "api/array_view.h" #include "modules/audio_processing/aec3/aec3_common.h" #include "rtc_base/system/arch.h" @@ -36,7 +37,10 @@ void MatchedFilterCore_NEON(size_t x_start_index, rtc::ArrayView y, rtc::ArrayView h, bool* filters_updated, - float* error_sum); + float* error_sum, + bool compute_accumulation_error, + rtc::ArrayView accumulated_error, + rtc::ArrayView scratch_memory); #endif @@ -50,7 +54,10 @@ void MatchedFilterCore_SSE2(size_t x_start_index, rtc::ArrayView y, rtc::ArrayView h, bool* filters_updated, - float* error_sum); + float* error_sum, + bool compute_accumulated_error, + rtc::ArrayView accumulated_error, + rtc::ArrayView scratch_memory); // Filter core for the matched filter that is optimized for AVX2. void MatchedFilterCore_AVX2(size_t x_start_index, @@ -60,7 +67,10 @@ void MatchedFilterCore_AVX2(size_t x_start_index, rtc::ArrayView y, rtc::ArrayView h, bool* filters_updated, - float* error_sum); + float* error_sum, + bool compute_accumulated_error, + rtc::ArrayView accumulated_error, + rtc::ArrayView scratch_memory); #endif @@ -72,7 +82,12 @@ void MatchedFilterCore(size_t x_start_index, rtc::ArrayView y, rtc::ArrayView h, bool* filters_updated, - float* error_sum); + float* error_sum, + bool compute_accumulation_error, + rtc::ArrayView accumulated_error); + +// Find largest peak of squared values in array. +size_t MaxSquarePeakIndex(rtc::ArrayView h); } // namespace aec3 @@ -84,13 +99,10 @@ class MatchedFilter { // shift. struct LagEstimate { LagEstimate() = default; - LagEstimate(float accuracy, bool reliable, size_t lag, bool updated) - : accuracy(accuracy), reliable(reliable), lag(lag), updated(updated) {} - - float accuracy = 0.f; - bool reliable = false; + LagEstimate(size_t lag, size_t pre_echo_lag) + : lag(lag), pre_echo_lag(pre_echo_lag) {} size_t lag = 0; - bool updated = false; + size_t pre_echo_lag = 0; }; MatchedFilter(ApmDataDumper* data_dumper, @@ -102,7 +114,8 @@ class MatchedFilter { float excitation_limit, float smoothing_fast, float smoothing_slow, - float matching_filter_threshold); + float matching_filter_threshold, + bool detect_pre_echo); MatchedFilter() = delete; MatchedFilter(const MatchedFilter&) = delete; @@ -119,8 +132,8 @@ class MatchedFilter { void Reset(); // Returns the current lag estimates. - rtc::ArrayView GetLagEstimates() const { - return lag_estimates_; + absl::optional GetBestLagEstimate() const { + return reported_lag_estimate_; } // Returns the maximum filter lag. @@ -134,17 +147,25 @@ class MatchedFilter { size_t downsampling_factor) const; private: + void Dump(); + ApmDataDumper* const data_dumper_; const Aec3Optimization optimization_; const size_t sub_block_size_; const size_t filter_intra_lag_shift_; std::vector> filters_; - std::vector lag_estimates_; + std::vector> accumulated_error_; + std::vector instantaneous_accumulated_error_; + std::vector scratch_memory_; + absl::optional reported_lag_estimate_; + absl::optional winner_lag_; + int last_detected_best_lag_filter_ = -1; std::vector filters_offsets_; const float excitation_limit_; const float smoothing_fast_; const float smoothing_slow_; const float matching_filter_threshold_; + const bool detect_pre_echo_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter_avx2.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter_avx2.cc index ed32102aa4..8c2ffcbd1e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter_avx2.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter_avx2.cc @@ -8,15 +8,134 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/audio_processing/aec3/matched_filter.h" - #include +#include "modules/audio_processing/aec3/matched_filter.h" #include "rtc_base/checks.h" namespace webrtc { namespace aec3 { +// Let ha denote the horizontal of a, and hb the horizontal sum of b +// returns [ha, hb, ha, hb] +inline __m128 hsum_ab(__m256 a, __m256 b) { + __m256 s_256 = _mm256_hadd_ps(a, b); + const __m256i mask = _mm256_set_epi32(7, 6, 3, 2, 5, 4, 1, 0); + s_256 = _mm256_permutevar8x32_ps(s_256, mask); + __m128 s = _mm_hadd_ps(_mm256_extractf128_ps(s_256, 0), + _mm256_extractf128_ps(s_256, 1)); + s = _mm_hadd_ps(s, s); + return s; +} + +void MatchedFilterCore_AccumulatedError_AVX2( + size_t x_start_index, + float x2_sum_threshold, + float smoothing, + rtc::ArrayView x, + rtc::ArrayView y, + rtc::ArrayView h, + bool* filters_updated, + float* error_sum, + rtc::ArrayView accumulated_error, + rtc::ArrayView scratch_memory) { + const int h_size = static_cast(h.size()); + const int x_size = static_cast(x.size()); + RTC_DCHECK_EQ(0, h_size % 16); + std::fill(accumulated_error.begin(), accumulated_error.end(), 0.0f); + + // Process for all samples in the sub-block. + for (size_t i = 0; i < y.size(); ++i) { + // Apply the matched filter as filter * x, and compute x * x. + RTC_DCHECK_GT(x_size, x_start_index); + const int chunk1 = + std::min(h_size, static_cast(x_size - x_start_index)); + if (chunk1 != h_size) { + const int chunk2 = h_size - chunk1; + std::copy(x.begin() + x_start_index, x.end(), scratch_memory.begin()); + std::copy(x.begin(), x.begin() + chunk2, scratch_memory.begin() + chunk1); + } + const float* x_p = + chunk1 != h_size ? scratch_memory.data() : &x[x_start_index]; + const float* h_p = &h[0]; + float* a_p = &accumulated_error[0]; + __m256 s_inst_hadd_256; + __m256 s_inst_256; + __m256 s_inst_256_8; + __m256 x2_sum_256 = _mm256_set1_ps(0); + __m256 x2_sum_256_8 = _mm256_set1_ps(0); + __m128 e_128; + float x2_sum = 0.0f; + float s_acum = 0; + const int limit_by_16 = h_size >> 4; + for (int k = limit_by_16; k > 0; --k, h_p += 16, x_p += 16, a_p += 4) { + // Load the data into 256 bit vectors. + __m256 x_k = _mm256_loadu_ps(x_p); + __m256 h_k = _mm256_loadu_ps(h_p); + __m256 x_k_8 = _mm256_loadu_ps(x_p + 8); + __m256 h_k_8 = _mm256_loadu_ps(h_p + 8); + // Compute and accumulate x * x and h * x. + x2_sum_256 = _mm256_fmadd_ps(x_k, x_k, x2_sum_256); + x2_sum_256_8 = _mm256_fmadd_ps(x_k_8, x_k_8, x2_sum_256_8); + s_inst_256 = _mm256_mul_ps(h_k, x_k); + s_inst_256_8 = _mm256_mul_ps(h_k_8, x_k_8); + s_inst_hadd_256 = _mm256_hadd_ps(s_inst_256, s_inst_256_8); + s_inst_hadd_256 = _mm256_hadd_ps(s_inst_hadd_256, s_inst_hadd_256); + s_acum += s_inst_hadd_256[0]; + e_128[0] = s_acum - y[i]; + s_acum += s_inst_hadd_256[4]; + e_128[1] = s_acum - y[i]; + s_acum += s_inst_hadd_256[1]; + e_128[2] = s_acum - y[i]; + s_acum += s_inst_hadd_256[5]; + e_128[3] = s_acum - y[i]; + + __m128 accumulated_error = _mm_load_ps(a_p); + accumulated_error = _mm_fmadd_ps(e_128, e_128, accumulated_error); + _mm_storeu_ps(a_p, accumulated_error); + } + // Sum components together. + x2_sum_256 = _mm256_add_ps(x2_sum_256, x2_sum_256_8); + __m128 x2_sum_128 = _mm_add_ps(_mm256_extractf128_ps(x2_sum_256, 0), + _mm256_extractf128_ps(x2_sum_256, 1)); + // Combine the accumulated vector and scalar values. + float* v = reinterpret_cast(&x2_sum_128); + x2_sum += v[0] + v[1] + v[2] + v[3]; + + // Compute the matched filter error. + float e = y[i] - s_acum; + const bool saturation = y[i] >= 32000.f || y[i] <= -32000.f; + (*error_sum) += e * e; + + // Update the matched filter estimate in an NLMS manner. + if (x2_sum > x2_sum_threshold && !saturation) { + RTC_DCHECK_LT(0.f, x2_sum); + const float alpha = smoothing * e / x2_sum; + const __m256 alpha_256 = _mm256_set1_ps(alpha); + + // filter = filter + smoothing * (y - filter * x) * x / x * x. + float* h_p = &h[0]; + const float* x_p = + chunk1 != h_size ? scratch_memory.data() : &x[x_start_index]; + // Perform 256 bit vector operations. + const int limit_by_8 = h_size >> 3; + for (int k = limit_by_8; k > 0; --k, h_p += 8, x_p += 8) { + // Load the data into 256 bit vectors. + __m256 h_k = _mm256_loadu_ps(h_p); + __m256 x_k = _mm256_loadu_ps(x_p); + // Compute h = h + alpha * x. + h_k = _mm256_fmadd_ps(x_k, alpha_256, h_k); + + // Store the result. + _mm256_storeu_ps(h_p, h_k); + } + *filters_updated = true; + } + + x_start_index = x_start_index > 0 ? x_start_index - 1 : x_size - 1; + } +} + void MatchedFilterCore_AVX2(size_t x_start_index, float x2_sum_threshold, float smoothing, @@ -24,7 +143,15 @@ void MatchedFilterCore_AVX2(size_t x_start_index, rtc::ArrayView y, rtc::ArrayView h, bool* filters_updated, - float* error_sum) { + float* error_sum, + bool compute_accumulated_error, + rtc::ArrayView accumulated_error, + rtc::ArrayView scratch_memory) { + if (compute_accumulated_error) { + return MatchedFilterCore_AccumulatedError_AVX2( + x_start_index, x2_sum_threshold, smoothing, x, y, h, filters_updated, + error_sum, accumulated_error, scratch_memory); + } const int h_size = static_cast(h.size()); const int x_size = static_cast(x.size()); RTC_DCHECK_EQ(0, h_size % 8); @@ -39,7 +166,9 @@ void MatchedFilterCore_AVX2(size_t x_start_index, // Initialize values for the accumulation. __m256 s_256 = _mm256_set1_ps(0); + __m256 s_256_8 = _mm256_set1_ps(0); __m256 x2_sum_256 = _mm256_set1_ps(0); + __m256 x2_sum_256_8 = _mm256_set1_ps(0); float x2_sum = 0.f; float s = 0; @@ -52,18 +181,22 @@ void MatchedFilterCore_AVX2(size_t x_start_index, const int chunk2 = h_size - chunk1; for (int limit : {chunk1, chunk2}) { // Perform 256 bit vector operations. - const int limit_by_8 = limit >> 3; - for (int k = limit_by_8; k > 0; --k, h_p += 8, x_p += 8) { + const int limit_by_16 = limit >> 4; + for (int k = limit_by_16; k > 0; --k, h_p += 16, x_p += 16) { // Load the data into 256 bit vectors. __m256 x_k = _mm256_loadu_ps(x_p); __m256 h_k = _mm256_loadu_ps(h_p); + __m256 x_k_8 = _mm256_loadu_ps(x_p + 8); + __m256 h_k_8 = _mm256_loadu_ps(h_p + 8); // Compute and accumulate x * x and h * x. x2_sum_256 = _mm256_fmadd_ps(x_k, x_k, x2_sum_256); + x2_sum_256_8 = _mm256_fmadd_ps(x_k_8, x_k_8, x2_sum_256_8); s_256 = _mm256_fmadd_ps(h_k, x_k, s_256); + s_256_8 = _mm256_fmadd_ps(h_k_8, x_k_8, s_256_8); } // Perform non-vector operations for any remaining items. - for (int k = limit - limit_by_8 * 8; k > 0; --k, ++h_p, ++x_p) { + for (int k = limit - limit_by_16 * 16; k > 0; --k, ++h_p, ++x_p) { const float x_k = *x_p; x2_sum += x_k * x_k; s += *h_p * x_k; @@ -73,15 +206,11 @@ void MatchedFilterCore_AVX2(size_t x_start_index, } // Sum components together. - __m128 x2_sum_128 = _mm_add_ps(_mm256_extractf128_ps(x2_sum_256, 0), - _mm256_extractf128_ps(x2_sum_256, 1)); - __m128 s_128 = _mm_add_ps(_mm256_extractf128_ps(s_256, 0), - _mm256_extractf128_ps(s_256, 1)); - // Combine the accumulated vector and scalar values. - float* v = reinterpret_cast(&x2_sum_128); - x2_sum += v[0] + v[1] + v[2] + v[3]; - v = reinterpret_cast(&s_128); - s += v[0] + v[1] + v[2] + v[3]; + x2_sum_256 = _mm256_add_ps(x2_sum_256, x2_sum_256_8); + s_256 = _mm256_add_ps(s_256, s_256_8); + __m128 sum = hsum_ab(x2_sum_256, s_256); + x2_sum += sum[0]; + s += sum[1]; // Compute the matched filter error. float e = y[i] - s; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter_lag_aggregator.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter_lag_aggregator.cc index 603a864b34..bea7868a91 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter_lag_aggregator.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter_lag_aggregator.cc @@ -14,84 +14,153 @@ #include "modules/audio_processing/logging/apm_data_dumper.h" #include "rtc_base/checks.h" +#include "rtc_base/numerics/safe_minmax.h" namespace webrtc { +namespace { +constexpr int kPreEchoHistogramDataNotUpdated = -1; + +int GetDownSamplingBlockSizeLog2(int down_sampling_factor) { + int down_sampling_factor_log2 = 0; + down_sampling_factor >>= 1; + while (down_sampling_factor > 0) { + down_sampling_factor_log2++; + down_sampling_factor >>= 1; + } + return static_cast(kBlockSizeLog2) > down_sampling_factor_log2 + ? static_cast(kBlockSizeLog2) - down_sampling_factor_log2 + : 0; +} +} // namespace MatchedFilterLagAggregator::MatchedFilterLagAggregator( ApmDataDumper* data_dumper, size_t max_filter_lag, - const EchoCanceller3Config::Delay::DelaySelectionThresholds& thresholds) + const EchoCanceller3Config::Delay& delay_config) : data_dumper_(data_dumper), - histogram_(max_filter_lag + 1, 0), - thresholds_(thresholds) { + thresholds_(delay_config.delay_selection_thresholds), + headroom_(static_cast(delay_config.delay_headroom_samples / + delay_config.down_sampling_factor)), + highest_peak_aggregator_(max_filter_lag) { + if (delay_config.detect_pre_echo) { + pre_echo_lag_aggregator_ = std::make_unique( + max_filter_lag, delay_config.down_sampling_factor); + } RTC_DCHECK(data_dumper); RTC_DCHECK_LE(thresholds_.initial, thresholds_.converged); - histogram_data_.fill(0); } MatchedFilterLagAggregator::~MatchedFilterLagAggregator() = default; void MatchedFilterLagAggregator::Reset(bool hard_reset) { - std::fill(histogram_.begin(), histogram_.end(), 0); - histogram_data_.fill(0); - histogram_data_index_ = 0; + highest_peak_aggregator_.Reset(); + if (pre_echo_lag_aggregator_ != nullptr) { + pre_echo_lag_aggregator_->Reset(); + } if (hard_reset) { significant_candidate_found_ = false; } } absl::optional MatchedFilterLagAggregator::Aggregate( - rtc::ArrayView lag_estimates) { - // Choose the strongest lag estimate as the best one. - float best_accuracy = 0.f; - int best_lag_estimate_index = -1; - for (size_t k = 0; k < lag_estimates.size(); ++k) { - if (lag_estimates[k].updated && lag_estimates[k].reliable) { - if (lag_estimates[k].accuracy > best_accuracy) { - best_accuracy = lag_estimates[k].accuracy; - best_lag_estimate_index = static_cast(k); - } + const absl::optional& lag_estimate) { + if (lag_estimate && pre_echo_lag_aggregator_) { + pre_echo_lag_aggregator_->Dump(data_dumper_); + pre_echo_lag_aggregator_->Aggregate( + std::max(0, static_cast(lag_estimate->pre_echo_lag) - headroom_)); + } + + if (lag_estimate) { + highest_peak_aggregator_.Aggregate( + std::max(0, static_cast(lag_estimate->lag) - headroom_)); + rtc::ArrayView histogram = highest_peak_aggregator_.histogram(); + int candidate = highest_peak_aggregator_.candidate(); + significant_candidate_found_ = significant_candidate_found_ || + histogram[candidate] > thresholds_.converged; + if (histogram[candidate] > thresholds_.converged || + (histogram[candidate] > thresholds_.initial && + !significant_candidate_found_)) { + DelayEstimate::Quality quality = significant_candidate_found_ + ? DelayEstimate::Quality::kRefined + : DelayEstimate::Quality::kCoarse; + int reported_delay = pre_echo_lag_aggregator_ != nullptr + ? pre_echo_lag_aggregator_->pre_echo_candidate() + : candidate; + return DelayEstimate(quality, reported_delay); } } - // TODO(peah): Remove this logging once all development is done. - data_dumper_->DumpRaw("aec3_echo_path_delay_estimator_best_index", - best_lag_estimate_index); - data_dumper_->DumpRaw("aec3_echo_path_delay_estimator_histogram", histogram_); + return absl::nullopt; +} - if (best_lag_estimate_index != -1) { - RTC_DCHECK_GT(histogram_.size(), histogram_data_[histogram_data_index_]); - RTC_DCHECK_LE(0, histogram_data_[histogram_data_index_]); - --histogram_[histogram_data_[histogram_data_index_]]; +MatchedFilterLagAggregator::HighestPeakAggregator::HighestPeakAggregator( + size_t max_filter_lag) + : histogram_(max_filter_lag + 1, 0) { + histogram_data_.fill(0); +} - histogram_data_[histogram_data_index_] = - lag_estimates[best_lag_estimate_index].lag; +void MatchedFilterLagAggregator::HighestPeakAggregator::Reset() { + std::fill(histogram_.begin(), histogram_.end(), 0); + histogram_data_.fill(0); + histogram_data_index_ = 0; +} - RTC_DCHECK_GT(histogram_.size(), histogram_data_[histogram_data_index_]); - RTC_DCHECK_LE(0, histogram_data_[histogram_data_index_]); - ++histogram_[histogram_data_[histogram_data_index_]]; +void MatchedFilterLagAggregator::HighestPeakAggregator::Aggregate(int lag) { + RTC_DCHECK_GT(histogram_.size(), histogram_data_[histogram_data_index_]); + RTC_DCHECK_LE(0, histogram_data_[histogram_data_index_]); + --histogram_[histogram_data_[histogram_data_index_]]; + histogram_data_[histogram_data_index_] = lag; + RTC_DCHECK_GT(histogram_.size(), histogram_data_[histogram_data_index_]); + RTC_DCHECK_LE(0, histogram_data_[histogram_data_index_]); + ++histogram_[histogram_data_[histogram_data_index_]]; + histogram_data_index_ = (histogram_data_index_ + 1) % histogram_data_.size(); + candidate_ = + std::distance(histogram_.begin(), + std::max_element(histogram_.begin(), histogram_.end())); +} - histogram_data_index_ = - (histogram_data_index_ + 1) % histogram_data_.size(); +MatchedFilterLagAggregator::PreEchoLagAggregator::PreEchoLagAggregator( + size_t max_filter_lag, + size_t down_sampling_factor) + : block_size_log2_(GetDownSamplingBlockSizeLog2(down_sampling_factor)), + histogram_( + ((max_filter_lag + 1) * down_sampling_factor) >> kBlockSizeLog2, + 0) { + Reset(); +} - const int candidate = - std::distance(histogram_.begin(), - std::max_element(histogram_.begin(), histogram_.end())); +void MatchedFilterLagAggregator::PreEchoLagAggregator::Reset() { + std::fill(histogram_.begin(), histogram_.end(), 0); + histogram_data_.fill(kPreEchoHistogramDataNotUpdated); + histogram_data_index_ = 0; + pre_echo_candidate_ = 0; +} - significant_candidate_found_ = - significant_candidate_found_ || - histogram_[candidate] > thresholds_.converged; - if (histogram_[candidate] > thresholds_.converged || - (histogram_[candidate] > thresholds_.initial && - !significant_candidate_found_)) { - DelayEstimate::Quality quality = significant_candidate_found_ - ? DelayEstimate::Quality::kRefined - : DelayEstimate::Quality::kCoarse; - return DelayEstimate(quality, candidate); - } +void MatchedFilterLagAggregator::PreEchoLagAggregator::Aggregate( + int pre_echo_lag) { + int pre_echo_block_size = pre_echo_lag >> block_size_log2_; + RTC_DCHECK(pre_echo_block_size >= 0 && + pre_echo_block_size < static_cast(histogram_.size())); + pre_echo_block_size = + rtc::SafeClamp(pre_echo_block_size, 0, histogram_.size() - 1); + // Remove the oldest point from the `histogram_`, it ignores the initial + // points where no updates have been done to the `histogram_data_` array. + if (histogram_data_[histogram_data_index_] != + kPreEchoHistogramDataNotUpdated) { + --histogram_[histogram_data_[histogram_data_index_]]; } + histogram_data_[histogram_data_index_] = pre_echo_block_size; + ++histogram_[histogram_data_[histogram_data_index_]]; + histogram_data_index_ = (histogram_data_index_ + 1) % histogram_data_.size(); + int pre_echo_candidate_block_size = + std::distance(histogram_.begin(), + std::max_element(histogram_.begin(), histogram_.end())); + pre_echo_candidate_ = (pre_echo_candidate_block_size << block_size_log2_); +} - return absl::nullopt; +void MatchedFilterLagAggregator::PreEchoLagAggregator::Dump( + ApmDataDumper* const data_dumper) { + data_dumper->DumpRaw("aec3_pre_echo_delay_candidate", pre_echo_candidate_); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter_lag_aggregator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter_lag_aggregator.h index 612bd5d942..c0598bf226 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter_lag_aggregator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter_lag_aggregator.h @@ -26,10 +26,9 @@ class ApmDataDumper; // reliable combined lag estimate. class MatchedFilterLagAggregator { public: - MatchedFilterLagAggregator( - ApmDataDumper* data_dumper, - size_t max_filter_lag, - const EchoCanceller3Config::Delay::DelaySelectionThresholds& thresholds); + MatchedFilterLagAggregator(ApmDataDumper* data_dumper, + size_t max_filter_lag, + const EchoCanceller3Config::Delay& delay_config); MatchedFilterLagAggregator() = delete; MatchedFilterLagAggregator(const MatchedFilterLagAggregator&) = delete; @@ -43,18 +42,55 @@ class MatchedFilterLagAggregator { // Aggregates the provided lag estimates. absl::optional Aggregate( - rtc::ArrayView lag_estimates); + const absl::optional& lag_estimate); // Returns whether a reliable delay estimate has been found. bool ReliableDelayFound() const { return significant_candidate_found_; } + // Returns the delay candidate that is computed by looking at the highest peak + // on the matched filters. + int GetDelayAtHighestPeak() const { + return highest_peak_aggregator_.candidate(); + } + private: + class PreEchoLagAggregator { + public: + PreEchoLagAggregator(size_t max_filter_lag, size_t down_sampling_factor); + void Reset(); + void Aggregate(int pre_echo_lag); + int pre_echo_candidate() const { return pre_echo_candidate_; } + void Dump(ApmDataDumper* const data_dumper); + + private: + const int block_size_log2_; + std::array histogram_data_; + std::vector histogram_; + int histogram_data_index_ = 0; + int pre_echo_candidate_ = 0; + }; + + class HighestPeakAggregator { + public: + explicit HighestPeakAggregator(size_t max_filter_lag); + void Reset(); + void Aggregate(int lag); + int candidate() const { return candidate_; } + rtc::ArrayView histogram() const { return histogram_; } + + private: + std::vector histogram_; + std::array histogram_data_; + int histogram_data_index_ = 0; + int candidate_ = -1; + }; + ApmDataDumper* const data_dumper_; - std::vector histogram_; - std::array histogram_data_; - int histogram_data_index_ = 0; bool significant_candidate_found_ = false; const EchoCanceller3Config::Delay::DelaySelectionThresholds thresholds_; + const int headroom_; + HighestPeakAggregator highest_peak_aggregator_; + std::unique_ptr pre_echo_lag_aggregator_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/mock/mock_block_processor.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/mock/mock_block_processor.h index aa612257ea..c9ae38c4aa 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/mock/mock_block_processor.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/mock/mock_block_processor.h @@ -28,13 +28,10 @@ class MockBlockProcessor : public BlockProcessor { ProcessCapture, (bool level_change, bool saturated_microphone_signal, - std::vector>>* linear_output, - std::vector>>* capture_block), - (override)); - MOCK_METHOD(void, - BufferRender, - (const std::vector>>& block), + Block* linear_output, + Block* capture_block), (override)); + MOCK_METHOD(void, BufferRender, (const Block& block), (override)); MOCK_METHOD(void, UpdateEchoLeakageStatus, (bool leakage_detected), diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/mock/mock_echo_remover.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/mock/mock_echo_remover.h index 60c5bf433e..31f075ef0a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/mock/mock_echo_remover.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/mock/mock_echo_remover.h @@ -33,8 +33,8 @@ class MockEchoRemover : public EchoRemover { bool capture_signal_saturation, const absl::optional& delay_estimate, RenderBuffer* render_buffer, - std::vector>>* linear_output, - std::vector>>* capture), + Block* linear_output, + Block* capture), (override)); MOCK_METHOD(void, UpdateEchoLeakageStatus, diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/mock/mock_render_delay_buffer.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/mock/mock_render_delay_buffer.cc index d7099b03e3..d4ad09b4bc 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/mock/mock_render_delay_buffer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/mock/mock_render_delay_buffer.cc @@ -17,8 +17,7 @@ MockRenderDelayBuffer::MockRenderDelayBuffer(int sample_rate_hz, size_t num_channels) : block_buffer_(GetRenderDelayBufferSize(4, 4, 12), NumBandsForRate(sample_rate_hz), - num_channels, - kBlockSize), + num_channels), spectrum_buffer_(block_buffer_.buffer.size(), num_channels), fft_buffer_(block_buffer_.buffer.size(), num_channels), render_buffer_(&block_buffer_, &spectrum_buffer_, &fft_buffer_), diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/mock/mock_render_delay_buffer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/mock/mock_render_delay_buffer.h index 9d7b8f4e86..c17fd62caa 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/mock/mock_render_delay_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/mock/mock_render_delay_buffer.h @@ -30,7 +30,7 @@ class MockRenderDelayBuffer : public RenderDelayBuffer { MOCK_METHOD(void, Reset, (), (override)); MOCK_METHOD(RenderDelayBuffer::BufferingEvent, Insert, - (const std::vector>>& block), + (const Block& block), (override)); MOCK_METHOD(void, HandleSkippedCaptureProcessing, (), (override)); MOCK_METHOD(RenderDelayBuffer::BufferingEvent, diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/mock/mock_render_delay_controller.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/mock/mock_render_delay_controller.h index 67d8baefe6..14d499dd28 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/mock/mock_render_delay_controller.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/mock/mock_render_delay_controller.h @@ -31,7 +31,7 @@ class MockRenderDelayController : public RenderDelayController { GetDelay, (const DownsampledRenderBuffer& render_buffer, size_t render_delay_buffer_delay, - const std::vector>& capture), + const Block& capture), (override)); MOCK_METHOD(bool, HasClockdrift, (), (const, override)); }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/multi_channel_content_detector.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/multi_channel_content_detector.cc new file mode 100644 index 0000000000..98068964d9 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/multi_channel_content_detector.cc @@ -0,0 +1,148 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/aec3/multi_channel_content_detector.h" + +#include + +#include "rtc_base/checks.h" +#include "system_wrappers/include/metrics.h" + +namespace webrtc { + +namespace { + +constexpr int kNumFramesPerSecond = 100; + +// Compares the left and right channels in the render `frame` to determine +// whether the signal is a proper stereo signal. To allow for differences +// introduced by hardware drivers, a threshold `detection_threshold` is used for +// the detection. +bool HasStereoContent(const std::vector>>& frame, + float detection_threshold) { + if (frame[0].size() < 2) { + return false; + } + + for (size_t band = 0; band < frame.size(); ++band) { + for (size_t k = 0; k < frame[band][0].size(); ++k) { + if (std::fabs(frame[band][0][k] - frame[band][1][k]) > + detection_threshold) { + return true; + } + } + } + return false; +} + +// In order to avoid logging metrics for very short lifetimes that are unlikely +// to reflect real calls and that may dilute the "real" data, logging is limited +// to lifetimes of at leats 5 seconds. +constexpr int kMinNumberOfFramesRequiredToLogMetrics = 500; + +// Continuous metrics are logged every 10 seconds. +constexpr int kFramesPer10Seconds = 1000; + +} // namespace + +MultiChannelContentDetector::MetricsLogger::MetricsLogger() {} + +MultiChannelContentDetector::MetricsLogger::~MetricsLogger() { + if (frame_counter_ < kMinNumberOfFramesRequiredToLogMetrics) + return; + + RTC_HISTOGRAM_BOOLEAN( + "WebRTC.Audio.EchoCanceller.PersistentMultichannelContentEverDetected", + any_multichannel_content_detected_ ? 1 : 0); +} + +void MultiChannelContentDetector::MetricsLogger::Update( + bool persistent_multichannel_content_detected) { + ++frame_counter_; + if (persistent_multichannel_content_detected) { + any_multichannel_content_detected_ = true; + ++persistent_multichannel_frame_counter_; + } + + if (frame_counter_ < kMinNumberOfFramesRequiredToLogMetrics) + return; + if (frame_counter_ % kFramesPer10Seconds != 0) + return; + const bool mostly_multichannel_last_10_seconds = + (persistent_multichannel_frame_counter_ >= kFramesPer10Seconds / 2); + RTC_HISTOGRAM_BOOLEAN( + "WebRTC.Audio.EchoCanceller.ProcessingPersistentMultichannelContent", + mostly_multichannel_last_10_seconds ? 1 : 0); + + persistent_multichannel_frame_counter_ = 0; +} + +MultiChannelContentDetector::MultiChannelContentDetector( + bool detect_stereo_content, + int num_render_input_channels, + float detection_threshold, + int stereo_detection_timeout_threshold_seconds, + float stereo_detection_hysteresis_seconds) + : detect_stereo_content_(detect_stereo_content), + detection_threshold_(detection_threshold), + detection_timeout_threshold_frames_( + stereo_detection_timeout_threshold_seconds > 0 + ? absl::make_optional(stereo_detection_timeout_threshold_seconds * + kNumFramesPerSecond) + : absl::nullopt), + stereo_detection_hysteresis_frames_(static_cast( + stereo_detection_hysteresis_seconds * kNumFramesPerSecond)), + metrics_logger_((detect_stereo_content && num_render_input_channels > 1) + ? std::make_unique() + : nullptr), + persistent_multichannel_content_detected_( + !detect_stereo_content && num_render_input_channels > 1) {} + +bool MultiChannelContentDetector::UpdateDetection( + const std::vector>>& frame) { + if (!detect_stereo_content_) { + RTC_DCHECK_EQ(frame[0].size() > 1, + persistent_multichannel_content_detected_); + return false; + } + + const bool previous_persistent_multichannel_content_detected = + persistent_multichannel_content_detected_; + const bool stereo_detected_in_frame = + HasStereoContent(frame, detection_threshold_); + + consecutive_frames_with_stereo_ = + stereo_detected_in_frame ? consecutive_frames_with_stereo_ + 1 : 0; + frames_since_stereo_detected_last_ = + stereo_detected_in_frame ? 0 : frames_since_stereo_detected_last_ + 1; + + // Detect persistent multichannel content. + if (consecutive_frames_with_stereo_ > stereo_detection_hysteresis_frames_) { + persistent_multichannel_content_detected_ = true; + } + if (detection_timeout_threshold_frames_.has_value() && + frames_since_stereo_detected_last_ >= + *detection_timeout_threshold_frames_) { + persistent_multichannel_content_detected_ = false; + } + + // Detect temporary multichannel content. + temporary_multichannel_content_detected_ = + persistent_multichannel_content_detected_ ? false + : stereo_detected_in_frame; + + if (metrics_logger_) + metrics_logger_->Update(persistent_multichannel_content_detected_); + + return previous_persistent_multichannel_content_detected != + persistent_multichannel_content_detected_; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/multi_channel_content_detector.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/multi_channel_content_detector.h new file mode 100644 index 0000000000..be8717f3af --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/multi_channel_content_detector.h @@ -0,0 +1,95 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_AEC3_MULTI_CHANNEL_CONTENT_DETECTOR_H_ +#define MODULES_AUDIO_PROCESSING_AEC3_MULTI_CHANNEL_CONTENT_DETECTOR_H_ + +#include + +#include +#include + +#include "absl/types/optional.h" + +namespace webrtc { + +// Analyzes audio content to determine whether the contained audio is proper +// multichannel, or only upmixed mono. To allow for differences introduced by +// hardware drivers, a threshold `detection_threshold` is used for the +// detection. +// Logs metrics continously and upon destruction. +class MultiChannelContentDetector { + public: + // If |stereo_detection_timeout_threshold_seconds| <= 0, no timeout is + // applied: Once multichannel is detected, the detector remains in that state + // for its lifetime. + MultiChannelContentDetector(bool detect_stereo_content, + int num_render_input_channels, + float detection_threshold, + int stereo_detection_timeout_threshold_seconds, + float stereo_detection_hysteresis_seconds); + + // Compares the left and right channels in the render `frame` to determine + // whether the signal is a proper multichannel signal. Returns a bool + // indicating whether a change in the proper multichannel content was + // detected. + bool UpdateDetection( + const std::vector>>& frame); + + bool IsProperMultiChannelContentDetected() const { + return persistent_multichannel_content_detected_; + } + + bool IsTemporaryMultiChannelContentDetected() const { + return temporary_multichannel_content_detected_; + } + + private: + // Tracks and logs metrics for the amount of multichannel content detected. + class MetricsLogger { + public: + MetricsLogger(); + + // The destructor logs call summary statistics. + ~MetricsLogger(); + + // Updates and logs metrics. + void Update(bool persistent_multichannel_content_detected); + + private: + int frame_counter_ = 0; + + // Counts the number of frames of persistent multichannel audio observed + // during the current metrics collection interval. + int persistent_multichannel_frame_counter_ = 0; + + // Indicates whether persistent multichannel content has ever been detected. + bool any_multichannel_content_detected_ = false; + }; + + const bool detect_stereo_content_; + const float detection_threshold_; + const absl::optional detection_timeout_threshold_frames_; + const int stereo_detection_hysteresis_frames_; + + // Collects and reports metrics on the amount of multichannel content + // detected. Only created if |num_render_input_channels| > 1 and + // |detect_stereo_content_| is true. + const std::unique_ptr metrics_logger_; + + bool persistent_multichannel_content_detected_; + bool temporary_multichannel_content_detected_ = false; + int64_t frames_since_stereo_detected_last_ = 0; + int64_t consecutive_frames_with_stereo_ = 0; +}; + +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_AEC3_MULTI_CHANNEL_CONTENT_DETECTOR_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/refined_filter_update_gain.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/refined_filter_update_gain.cc index db5203d542..8e391d6fa6 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/refined_filter_update_gain.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/refined_filter_update_gain.cc @@ -20,7 +20,6 @@ #include "modules/audio_processing/aec3/render_signal_analyzer.h" #include "modules/audio_processing/aec3/subtractor_output.h" #include "modules/audio_processing/logging/apm_data_dumper.h" -#include "rtc_base/atomic_ops.h" #include "rtc_base/checks.h" namespace webrtc { @@ -31,13 +30,12 @@ constexpr int kPoorExcitationCounterInitial = 1000; } // namespace -int RefinedFilterUpdateGain::instance_count_ = 0; +std::atomic RefinedFilterUpdateGain::instance_count_(0); RefinedFilterUpdateGain::RefinedFilterUpdateGain( const EchoCanceller3Config::Filter::RefinedConfiguration& config, size_t config_change_duration_blocks) - : data_dumper_( - new ApmDataDumper(rtc::AtomicOps::Increment(&instance_count_))), + : data_dumper_(new ApmDataDumper(instance_count_.fetch_add(1) + 1)), config_change_duration_blocks_( static_cast(config_change_duration_blocks)), poor_excitation_counter_(kPoorExcitationCounterInitial) { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/refined_filter_update_gain.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/refined_filter_update_gain.h index ae4fe84df5..1a68ebc296 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/refined_filter_update_gain.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/refined_filter_update_gain.h @@ -14,6 +14,7 @@ #include #include +#include #include #include "api/array_view.h" @@ -69,7 +70,7 @@ class RefinedFilterUpdateGain { } private: - static int instance_count_; + static std::atomic instance_count_; std::unique_ptr data_dumper_; const int config_change_duration_blocks_; float one_by_config_change_duration_blocks_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_buffer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_buffer.h index b8be6f517a..8adc996087 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_buffer.h @@ -40,8 +40,7 @@ class RenderBuffer { ~RenderBuffer(); // Get a block. - const std::vector>>& Block( - int buffer_offset_blocks) const { + const Block& GetBlock(int buffer_offset_blocks) const { int position = block_buffer_->OffsetIndex(block_buffer_->read, buffer_offset_blocks); return block_buffer_->buffer[position]; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_delay_buffer.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_delay_buffer.cc index 7bebc6fd47..ec5d35507e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_delay_buffer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_delay_buffer.cc @@ -13,6 +13,7 @@ #include #include +#include #include #include #include @@ -32,7 +33,6 @@ #include "modules/audio_processing/aec3/render_buffer.h" #include "modules/audio_processing/aec3/spectrum_buffer.h" #include "modules/audio_processing/logging/apm_data_dumper.h" -#include "rtc_base/atomic_ops.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "system_wrappers/include/field_trial.h" @@ -54,8 +54,7 @@ class RenderDelayBufferImpl final : public RenderDelayBuffer { ~RenderDelayBufferImpl() override; void Reset() override; - BufferingEvent Insert( - const std::vector>>& block) override; + BufferingEvent Insert(const Block& block) override; BufferingEvent PrepareCaptureProcessing() override; void HandleSkippedCaptureProcessing() override; bool AlignFromDelay(size_t delay) override; @@ -75,7 +74,7 @@ class RenderDelayBufferImpl final : public RenderDelayBuffer { bool HasReceivedBufferDelay() override; private: - static int instance_count_; + static std::atomic instance_count_; std::unique_ptr data_dumper_; const Aec3Optimization optimization_; const EchoCanceller3Config config_; @@ -110,8 +109,7 @@ class RenderDelayBufferImpl final : public RenderDelayBuffer { int MapDelayToTotalDelay(size_t delay) const; int ComputeDelay() const; void ApplyTotalDelay(int delay); - void InsertBlock(const std::vector>>& block, - int previous_write); + void InsertBlock(const Block& block, int previous_write); bool DetectActiveRender(rtc::ArrayView x) const; bool DetectExcessRenderBlocks(); void IncrementWriteIndices(); @@ -121,13 +119,12 @@ class RenderDelayBufferImpl final : public RenderDelayBuffer { bool RenderUnderrun(); }; -int RenderDelayBufferImpl::instance_count_ = 0; +std::atomic RenderDelayBufferImpl::instance_count_ = 0; RenderDelayBufferImpl::RenderDelayBufferImpl(const EchoCanceller3Config& config, int sample_rate_hz, size_t num_render_channels) - : data_dumper_( - new ApmDataDumper(rtc::AtomicOps::Increment(&instance_count_))), + : data_dumper_(new ApmDataDumper(instance_count_.fetch_add(1) + 1)), optimization_(DetectOptimization()), config_(config), update_capture_call_counter_on_skipped_blocks_( @@ -145,8 +142,7 @@ RenderDelayBufferImpl::RenderDelayBufferImpl(const EchoCanceller3Config& config, config.delay.num_filters, config.filter.refined.length_blocks), NumBandsForRate(sample_rate_hz), - num_render_channels, - kBlockSize), + num_render_channels), spectra_(blocks_.buffer.size(), num_render_channels), ffts_(blocks_.buffer.size(), num_render_channels), delay_(config_.delay.default_delay), @@ -161,7 +157,7 @@ RenderDelayBufferImpl::RenderDelayBufferImpl(const EchoCanceller3Config& config, RTC_DCHECK_EQ(blocks_.buffer.size(), ffts_.buffer.size()); RTC_DCHECK_EQ(spectra_.buffer.size(), ffts_.buffer.size()); for (size_t i = 0; i < blocks_.buffer.size(); ++i) { - RTC_DCHECK_EQ(blocks_.buffer[i][0].size(), ffts_.buffer[i].size()); + RTC_DCHECK_EQ(blocks_.buffer[i].NumChannels(), ffts_.buffer[i].size()); RTC_DCHECK_EQ(spectra_.buffer[i].size(), ffts_.buffer[i].size()); } @@ -211,7 +207,7 @@ void RenderDelayBufferImpl::Reset() { // Inserts a new block into the render buffers. RenderDelayBuffer::BufferingEvent RenderDelayBufferImpl::Insert( - const std::vector>>& block) { + const Block& block) { ++render_call_counter_; if (delay_) { if (!last_call_was_render_) { @@ -239,7 +235,8 @@ RenderDelayBuffer::BufferingEvent RenderDelayBufferImpl::Insert( // Detect and update render activity. if (!render_activity_) { - render_activity_counter_ += DetectActiveRender(block[0][0]) ? 1 : 0; + render_activity_counter_ += + DetectActiveRender(block.View(/*band=*/0, /*channel=*/0)) ? 1 : 0; render_activity_ = render_activity_counter_ >= 20; } @@ -394,46 +391,45 @@ void RenderDelayBufferImpl::AlignFromExternalDelay() { } // Inserts a block into the render buffers. -void RenderDelayBufferImpl::InsertBlock( - const std::vector>>& block, - int previous_write) { +void RenderDelayBufferImpl::InsertBlock(const Block& block, + int previous_write) { auto& b = blocks_; auto& lr = low_rate_; auto& ds = render_ds_; auto& f = ffts_; auto& s = spectra_; - const size_t num_bands = b.buffer[b.write].size(); - const size_t num_render_channels = b.buffer[b.write][0].size(); - RTC_DCHECK_EQ(block.size(), b.buffer[b.write].size()); + const size_t num_bands = b.buffer[b.write].NumBands(); + const size_t num_render_channels = b.buffer[b.write].NumChannels(); + RTC_DCHECK_EQ(block.NumBands(), num_bands); + RTC_DCHECK_EQ(block.NumChannels(), num_render_channels); for (size_t band = 0; band < num_bands; ++band) { - RTC_DCHECK_EQ(block[band].size(), num_render_channels); - RTC_DCHECK_EQ(b.buffer[b.write][band].size(), num_render_channels); for (size_t ch = 0; ch < num_render_channels; ++ch) { - RTC_DCHECK_EQ(block[band][ch].size(), b.buffer[b.write][band][ch].size()); - std::copy(block[band][ch].begin(), block[band][ch].end(), - b.buffer[b.write][band][ch].begin()); + std::copy(block.begin(band, ch), block.end(band, ch), + b.buffer[b.write].begin(band, ch)); } } if (render_linear_amplitude_gain_ != 1.f) { for (size_t band = 0; band < num_bands; ++band) { for (size_t ch = 0; ch < num_render_channels; ++ch) { - for (size_t k = 0; k < 64; ++k) { - b.buffer[b.write][band][ch][k] *= render_linear_amplitude_gain_; + rtc::ArrayView b_view = + b.buffer[b.write].View(band, ch); + for (float& sample : b_view) { + sample *= render_linear_amplitude_gain_; } } } } std::array downmixed_render; - render_mixer_.ProduceOutput(b.buffer[b.write][0], downmixed_render); + render_mixer_.ProduceOutput(b.buffer[b.write], downmixed_render); render_decimator_.Decimate(downmixed_render, ds); data_dumper_->DumpWav("aec3_render_decimator_output", ds.size(), ds.data(), 16000 / down_sampling_factor_, 1); std::copy(ds.rbegin(), ds.rend(), lr.buffer.begin() + lr.write); - for (size_t channel = 0; channel < b.buffer[b.write][0].size(); ++channel) { - fft_.PaddedFft(b.buffer[b.write][0][channel], - b.buffer[previous_write][0][channel], + for (int channel = 0; channel < b.buffer[b.write].NumChannels(); ++channel) { + fft_.PaddedFft(b.buffer[b.write].View(/*band=*/0, channel), + b.buffer[previous_write].View(/*band=*/0, channel), &f.buffer[f.write][channel]); f.buffer[f.write][channel].Spectrum(optimization_, s.buffer[s.write][channel]); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_delay_buffer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_delay_buffer.h index 79ffc4d8c9..6dc1aefb85 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_delay_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_delay_buffer.h @@ -16,6 +16,7 @@ #include #include "api/audio/echo_canceller3_config.h" +#include "modules/audio_processing/aec3/block.h" #include "modules/audio_processing/aec3/downsampled_render_buffer.h" #include "modules/audio_processing/aec3/render_buffer.h" @@ -41,8 +42,7 @@ class RenderDelayBuffer { virtual void Reset() = 0; // Inserts a block into the buffer. - virtual BufferingEvent Insert( - const std::vector>>& block) = 0; + virtual BufferingEvent Insert(const Block& block) = 0; // Updates the buffers one step based on the specified buffer delay. Returns // an enum indicating whether there was a special event that occurred. diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_delay_controller.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_delay_controller.cc index 3677085d81..465e77fb7c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_delay_controller.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_delay_controller.cc @@ -12,6 +12,7 @@ #include #include +#include #include #include "absl/types/optional.h" @@ -23,7 +24,6 @@ #include "modules/audio_processing/aec3/echo_path_delay_estimator.h" #include "modules/audio_processing/aec3/render_delay_controller_metrics.h" #include "modules/audio_processing/logging/apm_data_dumper.h" -#include "rtc_base/atomic_ops.h" #include "rtc_base/checks.h" namespace webrtc { @@ -47,14 +47,13 @@ class RenderDelayControllerImpl final : public RenderDelayController { absl::optional GetDelay( const DownsampledRenderBuffer& render_buffer, size_t render_delay_buffer_delay, - const std::vector>& capture) override; + const Block& capture) override; bool HasClockdrift() const override; private: - static int instance_count_; + static std::atomic instance_count_; std::unique_ptr data_dumper_; const int hysteresis_limit_blocks_; - const int delay_headroom_samples_; absl::optional delay_; EchoPathDelayEstimator delay_estimator_; RenderDelayControllerMetrics metrics_; @@ -67,15 +66,9 @@ class RenderDelayControllerImpl final : public RenderDelayController { DelayEstimate ComputeBufferDelay( const absl::optional& current_delay, int hysteresis_limit_blocks, - int delay_headroom_samples, DelayEstimate estimated_delay) { - // Subtract delay headroom. - const int delay_with_headroom_samples = std::max( - static_cast(estimated_delay.delay) - delay_headroom_samples, 0); - // Compute the buffer delay increase required to achieve the desired latency. - size_t new_delay_blocks = delay_with_headroom_samples >> kBlockSizeLog2; - + size_t new_delay_blocks = estimated_delay.delay >> kBlockSizeLog2; // Add hysteresis. if (current_delay) { size_t current_delay_blocks = current_delay->delay; @@ -84,23 +77,20 @@ DelayEstimate ComputeBufferDelay( new_delay_blocks = current_delay_blocks; } } - DelayEstimate new_delay = estimated_delay; new_delay.delay = new_delay_blocks; return new_delay; } -int RenderDelayControllerImpl::instance_count_ = 0; +std::atomic RenderDelayControllerImpl::instance_count_(0); RenderDelayControllerImpl::RenderDelayControllerImpl( const EchoCanceller3Config& config, int sample_rate_hz, size_t num_capture_channels) - : data_dumper_( - new ApmDataDumper(rtc::AtomicOps::Increment(&instance_count_))), + : data_dumper_(new ApmDataDumper(instance_count_.fetch_add(1) + 1)), hysteresis_limit_blocks_( static_cast(config.delay.hysteresis_limit_blocks)), - delay_headroom_samples_(config.delay.delay_headroom_samples), delay_estimator_(data_dumper_.get(), config, num_capture_channels), last_delay_estimate_quality_(DelayEstimate::Quality::kCoarse) { RTC_DCHECK(ValidFullBandRate(sample_rate_hz)); @@ -124,8 +114,7 @@ void RenderDelayControllerImpl::LogRenderCall() {} absl::optional RenderDelayControllerImpl::GetDelay( const DownsampledRenderBuffer& render_buffer, size_t render_delay_buffer_delay, - const std::vector>& capture) { - RTC_DCHECK_EQ(kBlockSize, capture[0].size()); + const Block& capture) { ++capture_call_counter_; auto delay_samples = delay_estimator_.EstimateDelay(render_buffer, capture); @@ -161,15 +150,16 @@ absl::optional RenderDelayControllerImpl::GetDelay( const bool use_hysteresis = last_delay_estimate_quality_ == DelayEstimate::Quality::kRefined && delay_samples_->quality == DelayEstimate::Quality::kRefined; - delay_ = ComputeBufferDelay(delay_, - use_hysteresis ? hysteresis_limit_blocks_ : 0, - delay_headroom_samples_, *delay_samples_); + delay_ = ComputeBufferDelay( + delay_, use_hysteresis ? hysteresis_limit_blocks_ : 0, *delay_samples_); last_delay_estimate_quality_ = delay_samples_->quality; } - metrics_.Update(delay_samples_ ? absl::optional(delay_samples_->delay) - : absl::nullopt, - delay_ ? delay_->delay : 0, 0, delay_estimator_.Clockdrift()); + metrics_.Update( + delay_samples_ ? absl::optional(delay_samples_->delay) + : absl::nullopt, + delay_ ? absl::optional(delay_->delay) : absl::nullopt, + delay_estimator_.Clockdrift()); data_dumper_->DumpRaw("aec3_render_delay_controller_delay", delay_samples ? delay_samples->delay : 0); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_delay_controller.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_delay_controller.h index c45ab1f089..4a18a11e36 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_delay_controller.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_delay_controller.h @@ -14,6 +14,7 @@ #include "absl/types/optional.h" #include "api/array_view.h" #include "api/audio/echo_canceller3_config.h" +#include "modules/audio_processing/aec3/block.h" #include "modules/audio_processing/aec3/delay_estimate.h" #include "modules/audio_processing/aec3/downsampled_render_buffer.h" #include "modules/audio_processing/aec3/render_delay_buffer.h" @@ -40,7 +41,7 @@ class RenderDelayController { virtual absl::optional GetDelay( const DownsampledRenderBuffer& render_buffer, size_t render_delay_buffer_delay, - const std::vector>& capture) = 0; + const Block& capture) = 0; // Returns true if clockdrift has been detected. virtual bool HasClockdrift() const = 0; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_delay_controller_metrics.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_delay_controller_metrics.cc index 582e033482..1e0a0f443e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_delay_controller_metrics.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_delay_controller_metrics.cc @@ -37,16 +37,13 @@ enum class DelayChangesCategory { kNumCategories }; -constexpr int kMaxSkewShiftCount = 20; - } // namespace RenderDelayControllerMetrics::RenderDelayControllerMetrics() = default; void RenderDelayControllerMetrics::Update( absl::optional delay_samples, - size_t buffer_delay_blocks, - absl::optional skew_shift_blocks, + absl::optional buffer_delay_blocks, ClockdriftDetector::Level clockdrift) { ++call_counter_; @@ -54,6 +51,8 @@ void RenderDelayControllerMetrics::Update( size_t delay_blocks; if (delay_samples) { ++reliable_delay_estimate_counter_; + // Add an offset by 1 (metric is halved before reporting) to reserve 0 for + // absent delay. delay_blocks = (*delay_samples) / kBlockSize + 2; } else { delay_blocks = 0; @@ -64,21 +63,21 @@ void RenderDelayControllerMetrics::Update( delay_blocks_ = delay_blocks; } - if (skew_shift_blocks) { - skew_shift_count_ = std::min(kMaxSkewShiftCount, skew_shift_count_); - } } else if (++initial_call_counter_ == 5 * kNumBlocksPerSecond) { initial_update = false; } if (call_counter_ == kMetricsReportingIntervalBlocks) { int value_to_report = static_cast(delay_blocks_); + // Divide by 2 to compress metric range. value_to_report = std::min(124, value_to_report >> 1); RTC_HISTOGRAM_COUNTS_LINEAR("WebRTC.Audio.EchoCanceller.EchoPathDelay", value_to_report, 0, 124, 125); - value_to_report = static_cast(buffer_delay_blocks + 2); - value_to_report = std::min(124, value_to_report >> 1); + // Divide by 2 to compress metric range. + // Offset by 1 to reserve 0 for absent delay. + value_to_report = buffer_delay_blocks ? (*buffer_delay_blocks + 2) >> 1 : 0; + value_to_report = std::min(124, value_to_report); RTC_HISTOGRAM_COUNTS_LINEAR("WebRTC.Audio.EchoCanceller.BufferDelay", value_to_report, 0, 124, 125); @@ -120,20 +119,8 @@ void RenderDelayControllerMetrics::Update( "WebRTC.Audio.EchoCanceller.Clockdrift", static_cast(clockdrift), static_cast(ClockdriftDetector::Level::kNumCategories)); - metrics_reported_ = true; call_counter_ = 0; ResetMetrics(); - } else { - metrics_reported_ = false; - } - - if (!initial_update && ++skew_report_timer_ == 60 * kNumBlocksPerSecond) { - RTC_HISTOGRAM_COUNTS_LINEAR("WebRTC.Audio.EchoCanceller.MaxSkewShiftCount", - skew_shift_count_, 0, kMaxSkewShiftCount, - kMaxSkewShiftCount + 1); - - skew_shift_count_ = 0; - skew_report_timer_ = 0; } } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_delay_controller_metrics.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_delay_controller_metrics.h index 8c527a142e..b81833b43f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_delay_controller_metrics.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_delay_controller_metrics.h @@ -15,7 +15,6 @@ #include "absl/types/optional.h" #include "modules/audio_processing/aec3/clockdrift_detector.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -24,15 +23,15 @@ class RenderDelayControllerMetrics { public: RenderDelayControllerMetrics(); + RenderDelayControllerMetrics(const RenderDelayControllerMetrics&) = delete; + RenderDelayControllerMetrics& operator=(const RenderDelayControllerMetrics&) = + delete; + // Updates the metric with new data. void Update(absl::optional delay_samples, - size_t buffer_delay_blocks, - absl::optional skew_shift_blocks, + absl::optional buffer_delay_blocks, ClockdriftDetector::Level clockdrift); - // Returns true if the metrics have just been reported, otherwise false. - bool MetricsReported() { return metrics_reported_; } - private: // Resets the metrics. void ResetMetrics(); @@ -41,13 +40,8 @@ class RenderDelayControllerMetrics { int reliable_delay_estimate_counter_ = 0; int delay_change_counter_ = 0; int call_counter_ = 0; - int skew_report_timer_ = 0; int initial_call_counter_ = 0; - bool metrics_reported_ = false; bool initial_update = true; - int skew_shift_count_ = 0; - - RTC_DISALLOW_COPY_AND_ASSIGN(RenderDelayControllerMetrics); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_signal_analyzer.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_signal_analyzer.cc index f570aac3a0..bfbeb0ec2e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_signal_analyzer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_signal_analyzer.cc @@ -66,10 +66,9 @@ void IdentifyStrongNarrowBandComponent(const RenderBuffer& render_buffer, *narrow_peak_band = absl::nullopt; } - const std::vector>>& x_latest = - render_buffer.Block(0); + const Block& x_latest = render_buffer.GetBlock(0); float max_peak_level = 0.f; - for (size_t channel = 0; channel < x_latest[0].size(); ++channel) { + for (int channel = 0; channel < x_latest.NumChannels(); ++channel) { rtc::ArrayView X2_latest = render_buffer.Spectrum(0)[channel]; @@ -90,13 +89,14 @@ void IdentifyStrongNarrowBandComponent(const RenderBuffer& render_buffer, } // Assess the render signal strength. - auto result0 = std::minmax_element(x_latest[0][channel].begin(), - x_latest[0][channel].end()); + auto result0 = std::minmax_element(x_latest.begin(/*band=*/0, channel), + x_latest.end(/*band=*/0, channel)); float max_abs = std::max(fabs(*result0.first), fabs(*result0.second)); - if (x_latest.size() > 1) { - const auto result1 = std::minmax_element(x_latest[1][channel].begin(), - x_latest[1][channel].end()); + if (x_latest.NumBands() > 1) { + const auto result1 = + std::minmax_element(x_latest.begin(/*band=*/1, channel), + x_latest.end(/*band=*/1, channel)); max_abs = std::max(max_abs, static_cast(std::max( fabs(*result1.first), fabs(*result1.second)))); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_signal_analyzer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_signal_analyzer.h index c7a3d8b7a0..2e4aaa4ba7 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_signal_analyzer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_signal_analyzer.h @@ -20,7 +20,6 @@ #include "modules/audio_processing/aec3/aec3_common.h" #include "modules/audio_processing/aec3/render_buffer.h" #include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -30,6 +29,9 @@ class RenderSignalAnalyzer { explicit RenderSignalAnalyzer(const EchoCanceller3Config& config); ~RenderSignalAnalyzer(); + RenderSignalAnalyzer(const RenderSignalAnalyzer&) = delete; + RenderSignalAnalyzer& operator=(const RenderSignalAnalyzer&) = delete; + // Updates the render signal analysis with the most recent render signal. void Update(const RenderBuffer& render_buffer, const absl::optional& delay_partitions); @@ -53,8 +55,6 @@ class RenderSignalAnalyzer { std::array narrow_band_counters_; absl::optional narrow_peak_band_; size_t narrow_peak_counter_; - - RTC_DISALLOW_COPY_AND_ASSIGN(RenderSignalAnalyzer); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/stationarity_estimator.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/stationarity_estimator.cc index 01628f3e8a..4d364041b3 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/stationarity_estimator.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/stationarity_estimator.cc @@ -17,7 +17,6 @@ #include "modules/audio_processing/aec3/aec3_common.h" #include "modules/audio_processing/aec3/spectrum_buffer.h" #include "modules/audio_processing/logging/apm_data_dumper.h" -#include "rtc_base/atomic_ops.h" namespace webrtc { @@ -29,8 +28,7 @@ constexpr int kNBlocksInitialPhase = kNumBlocksPerSecond * 2.; } // namespace StationarityEstimator::StationarityEstimator() - : data_dumper_( - new ApmDataDumper(rtc::AtomicOps::Increment(&instance_count_))) { + : data_dumper_(new ApmDataDumper(instance_count_.fetch_add(1) + 1)) { Reset(); } @@ -153,7 +151,7 @@ void StationarityEstimator::SmoothStationaryPerFreq() { stationarity_flags_ = all_ahead_stationary_smooth; } -int StationarityEstimator::instance_count_ = 0; +std::atomic StationarityEstimator::instance_count_(0); StationarityEstimator::NoiseSpectrum::NoiseSpectrum() { Reset(); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/stationarity_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/stationarity_estimator.h index 6f7ad4060f..8bcd3b789e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/stationarity_estimator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/stationarity_estimator.h @@ -14,6 +14,7 @@ #include #include +#include #include #include "api/array_view.h" @@ -110,7 +111,7 @@ class StationarityEstimator { size_t block_counter_; }; - static int instance_count_; + static std::atomic instance_count_; std::unique_ptr data_dumper_; NoiseSpectrum noise_; std::array hangovers_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/subtractor.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/subtractor.cc index 2eae686752..aa36bb272a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/subtractor.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/subtractor.cc @@ -176,11 +176,11 @@ void Subtractor::ExitInitialState() { } void Subtractor::Process(const RenderBuffer& render_buffer, - const std::vector>& capture, + const Block& capture, const RenderSignalAnalyzer& render_signal_analyzer, const AecState& aec_state, rtc::ArrayView outputs) { - RTC_DCHECK_EQ(num_capture_channels_, capture.size()); + RTC_DCHECK_EQ(num_capture_channels_, capture.NumChannels()); // Compute the render powers. const bool same_filter_sizes = refined_filters_[0]->SizePartitions() == @@ -204,9 +204,8 @@ void Subtractor::Process(const RenderBuffer& render_buffer, // Process all capture channels for (size_t ch = 0; ch < num_capture_channels_; ++ch) { - RTC_DCHECK_EQ(kBlockSize, capture[ch].size()); SubtractorOutput& output = outputs[ch]; - rtc::ArrayView y = capture[ch]; + rtc::ArrayView y = capture.View(/*band=*/0, ch); FftData& E_refined = output.E_refined; FftData E_coarse; std::array& e_refined = output.e_refined; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/subtractor.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/subtractor.h index 767e4aad46..86159a3442 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/subtractor.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/subtractor.h @@ -23,6 +23,7 @@ #include "modules/audio_processing/aec3/aec3_common.h" #include "modules/audio_processing/aec3/aec3_fft.h" #include "modules/audio_processing/aec3/aec_state.h" +#include "modules/audio_processing/aec3/block.h" #include "modules/audio_processing/aec3/coarse_filter_update_gain.h" #include "modules/audio_processing/aec3/echo_path_variability.h" #include "modules/audio_processing/aec3/refined_filter_update_gain.h" @@ -48,7 +49,7 @@ class Subtractor { // Performs the echo subtraction. void Process(const RenderBuffer& render_buffer, - const std::vector>& capture, + const Block& capture, const RenderSignalAnalyzer& render_signal_analyzer, const AecState& aec_state, rtc::ArrayView outputs); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/suppression_filter.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/suppression_filter.cc index 1312fa892b..83ded425d5 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/suppression_filter.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/suppression_filter.cc @@ -86,9 +86,9 @@ void SuppressionFilter::ApplyGain( const std::array& suppression_gain, float high_bands_gain, rtc::ArrayView E_lowest_band, - std::vector>>* e) { + Block* e) { RTC_DCHECK(e); - RTC_DCHECK_EQ(e->size(), NumBandsForRate(sample_rate_hz_)); + RTC_DCHECK_EQ(e->NumBands(), NumBandsForRate(sample_rate_hz_)); // Comfort noise gain is sqrt(1-g^2), where g is the suppression gain. std::array noise_gain; @@ -121,7 +121,7 @@ void SuppressionFilter::ApplyGain( constexpr float kIfftNormalization = 2.f / kFftLength; fft_.Ifft(E, &e_extended); - float* e0 = (*e)[0][ch].data(); + auto e0 = e->View(/*band=*/0, ch); float* e0_old = e_output_old_[0][ch].data(); // Window and add the first half of e_extended with the second half of @@ -138,20 +138,20 @@ void SuppressionFilter::ApplyGain( std::begin(e_output_old_[0][ch])); // Apply suppression gain to upper bands. - for (size_t b = 1; b < e->size(); ++b) { - float* e_band = (*e)[b][ch].data(); + for (int b = 1; b < e->NumBands(); ++b) { + auto e_band = e->View(b, ch); for (size_t i = 0; i < kFftLengthBy2; ++i) { e_band[i] *= high_bands_gain; } } // Add comfort noise to band 1. - if (e->size() > 1) { + if (e->NumBands() > 1) { E.Assign(comfort_noise_high_band[ch]); std::array time_domain_high_band_noise; fft_.Ifft(E, &time_domain_high_band_noise); - float* e1 = (*e)[1][ch].data(); + auto e1 = e->View(/*band=*/1, ch); const float gain = high_bands_noise_scaling * kIfftNormalization; for (size_t i = 0; i < kFftLengthBy2; ++i) { e1[i] += time_domain_high_band_noise[i] * gain; @@ -159,8 +159,8 @@ void SuppressionFilter::ApplyGain( } // Delay upper bands to match the delay of the filter bank. - for (size_t b = 1; b < e->size(); ++b) { - float* e_band = (*e)[b][ch].data(); + for (int b = 1; b < e->NumBands(); ++b) { + auto e_band = e->View(b, ch); float* e_band_old = e_output_old_[b][ch].data(); for (size_t i = 0; i < kFftLengthBy2; ++i) { std::swap(e_band[i], e_band_old[i]); @@ -168,8 +168,8 @@ void SuppressionFilter::ApplyGain( } // Clamp output of all bands. - for (size_t b = 0; b < e->size(); ++b) { - float* e_band = (*e)[b][ch].data(); + for (int b = 0; b < e->NumBands(); ++b) { + auto e_band = e->View(b, ch); for (size_t i = 0; i < kFftLengthBy2; ++i) { e_band[i] = rtc::SafeClamp(e_band[i], -32768.f, 32767.f); } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/suppression_filter.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/suppression_filter.h index dcf2292c7f..c18b2334bf 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/suppression_filter.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/suppression_filter.h @@ -16,8 +16,8 @@ #include "modules/audio_processing/aec3/aec3_common.h" #include "modules/audio_processing/aec3/aec3_fft.h" +#include "modules/audio_processing/aec3/block.h" #include "modules/audio_processing/aec3/fft_data.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -27,12 +27,16 @@ class SuppressionFilter { int sample_rate_hz, size_t num_capture_channels_); ~SuppressionFilter(); + + SuppressionFilter(const SuppressionFilter&) = delete; + SuppressionFilter& operator=(const SuppressionFilter&) = delete; + void ApplyGain(rtc::ArrayView comfort_noise, rtc::ArrayView comfort_noise_high_bands, const std::array& suppression_gain, float high_bands_gain, rtc::ArrayView E_lowest_band, - std::vector>>* e); + Block* e); private: const Aec3Optimization optimization_; @@ -40,7 +44,6 @@ class SuppressionFilter { const size_t num_capture_channels_; const Aec3Fft fft_; std::vector>> e_output_old_; - RTC_DISALLOW_COPY_AND_ASSIGN(SuppressionFilter); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/suppression_gain.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/suppression_gain.cc index 14366f1aec..037dabaabe 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/suppression_gain.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/suppression_gain.cc @@ -21,7 +21,6 @@ #include "modules/audio_processing/aec3/subband_nearend_detector.h" #include "modules/audio_processing/aec3/vector_math.h" #include "modules/audio_processing/logging/apm_data_dumper.h" -#include "rtc_base/atomic_ops.h" #include "rtc_base/checks.h" #include "system_wrappers/include/field_trial.h" @@ -102,7 +101,7 @@ void WeightEchoForAudibility(const EchoCanceller3Config& config, } // namespace -int SuppressionGain::instance_count_ = 0; +std::atomic SuppressionGain::instance_count_(0); float SuppressionGain::UpperBandsGain( rtc::ArrayView> echo_spectrum, @@ -110,13 +109,13 @@ float SuppressionGain::UpperBandsGain( comfort_noise_spectrum, const absl::optional& narrow_peak_band, bool saturated_echo, - const std::vector>>& render, + const Block& render, const std::array& low_band_gain) const { - RTC_DCHECK_LT(0, render.size()); - if (render.size() == 1) { + RTC_DCHECK_LT(0, render.NumBands()); + if (render.NumBands() == 1) { return 1.f; } - const size_t num_render_channels = render[0].size(); + const int num_render_channels = render.NumChannels(); if (narrow_peak_band && (*narrow_peak_band > static_cast(kFftLengthBy2Plus1 - 10))) { @@ -135,16 +134,17 @@ float SuppressionGain::UpperBandsGain( // Compute the upper and lower band energies. const auto sum_of_squares = [](float a, float b) { return a + b * b; }; float low_band_energy = 0.f; - for (size_t ch = 0; ch < num_render_channels; ++ch) { - const float channel_energy = std::accumulate( - render[0][0].begin(), render[0][0].end(), 0.f, sum_of_squares); + for (int ch = 0; ch < num_render_channels; ++ch) { + const float channel_energy = + std::accumulate(render.begin(/*band=*/0, ch), + render.end(/*band=*/0, ch), 0.0f, sum_of_squares); low_band_energy = std::max(low_band_energy, channel_energy); } float high_band_energy = 0.f; - for (size_t k = 1; k < render.size(); ++k) { - for (size_t ch = 0; ch < num_render_channels; ++ch) { + for (int k = 1; k < render.NumBands(); ++k) { + for (int ch = 0; ch < num_render_channels; ++ch) { const float energy = std::accumulate( - render[k][ch].begin(), render[k][ch].end(), 0.f, sum_of_squares); + render.begin(k, ch), render.end(k, ch), 0.f, sum_of_squares); high_band_energy = std::max(high_band_energy, energy); } } @@ -325,8 +325,7 @@ SuppressionGain::SuppressionGain(const EchoCanceller3Config& config, Aec3Optimization optimization, int sample_rate_hz, size_t num_capture_channels) - : data_dumper_( - new ApmDataDumper(rtc::AtomicOps::Increment(&instance_count_))), + : data_dumper_(new ApmDataDumper(instance_count_.fetch_add(1) + 1)), optimization_(optimization), config_(config), num_capture_channels_(num_capture_channels), @@ -372,7 +371,7 @@ void SuppressionGain::GetGain( comfort_noise_spectrum, const RenderSignalAnalyzer& render_signal_analyzer, const AecState& aec_state, - const std::vector>>& render, + const Block& render, bool clock_drift, float* high_bands_gain, std::array* low_band_gain) { @@ -417,20 +416,17 @@ void SuppressionGain::SetInitialState(bool state) { // Detects when the render signal can be considered to have low power and // consist of stationary noise. -bool SuppressionGain::LowNoiseRenderDetector::Detect( - const std::vector>>& render) { +bool SuppressionGain::LowNoiseRenderDetector::Detect(const Block& render) { float x2_sum = 0.f; float x2_max = 0.f; - for (const auto& x_ch : render[0]) { - for (const auto& x_k : x_ch) { + for (int ch = 0; ch < render.NumChannels(); ++ch) { + for (float x_k : render.View(/*band=*/0, ch)) { const float x2 = x_k * x_k; x2_sum += x2; x2_max = std::max(x2_max, x2); } } - const size_t num_render_channels = render[0].size(); - x2_sum = x2_sum / num_render_channels; - ; + x2_sum = x2_sum / render.NumChannels(); constexpr float kThreshold = 50.f * 50.f * 64.f; const bool low_noise_render = diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/suppression_gain.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/suppression_gain.h index 7c4a1c9f7d..c19ddd7e30 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/suppression_gain.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/suppression_gain.h @@ -12,6 +12,7 @@ #define MODULES_AUDIO_PROCESSING_AEC3_SUPPRESSION_GAIN_H_ #include +#include #include #include @@ -25,7 +26,6 @@ #include "modules/audio_processing/aec3/nearend_detector.h" #include "modules/audio_processing/aec3/render_signal_analyzer.h" #include "modules/audio_processing/logging/apm_data_dumper.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -36,6 +36,10 @@ class SuppressionGain { int sample_rate_hz, size_t num_capture_channels); ~SuppressionGain(); + + SuppressionGain(const SuppressionGain&) = delete; + SuppressionGain& operator=(const SuppressionGain&) = delete; + void GetGain( rtc::ArrayView> nearend_spectrum, @@ -48,7 +52,7 @@ class SuppressionGain { comfort_noise_spectrum, const RenderSignalAnalyzer& render_signal_analyzer, const AecState& aec_state, - const std::vector>>& render, + const Block& render, bool clock_drift, float* high_bands_gain, std::array* low_band_gain); @@ -68,7 +72,7 @@ class SuppressionGain { comfort_noise_spectrum, const absl::optional& narrow_peak_band, bool saturated_echo, - const std::vector>>& render, + const Block& render, const std::array& low_band_gain) const; void GainToNoAudibleEcho(const std::array& nearend, @@ -97,7 +101,7 @@ class SuppressionGain { class LowNoiseRenderDetector { public: - bool Detect(const std::vector>>& render); + bool Detect(const Block& render); private: float average_power_ = 32768.f * 32768.f; @@ -115,7 +119,7 @@ class SuppressionGain { std::array emr_transparent_; }; - static int instance_count_; + static std::atomic instance_count_; std::unique_ptr data_dumper_; const Aec3Optimization optimization_; const EchoCanceller3Config config_; @@ -134,8 +138,6 @@ class SuppressionGain { // echo spectrum. const bool use_unbounded_echo_spectrum_; std::unique_ptr dominant_nearend_detector_; - - RTC_DISALLOW_COPY_AND_ASSIGN(SuppressionGain); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec_dump/aec_dump_factory.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec_dump/aec_dump_factory.h index c902a58a34..20718c3d7f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec_dump/aec_dump_factory.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec_dump/aec_dump_factory.h @@ -12,8 +12,8 @@ #define MODULES_AUDIO_PROCESSING_AEC_DUMP_AEC_DUMP_FACTORY_H_ #include -#include +#include "absl/strings/string_view.h" #include "modules/audio_processing/include/aec_dump.h" #include "rtc_base/system/file_wrapper.h" #include "rtc_base/system/rtc_export.h" @@ -27,7 +27,7 @@ namespace webrtc { class RTC_EXPORT AecDumpFactory { public: // The `worker_queue` may not be null and must outlive the created - // AecDump instance. |max_log_size_bytes == -1| means the log size + // AecDump instance. `max_log_size_bytes == -1` means the log size // will be unlimited. `handle` may not be null. The AecDump takes // responsibility for `handle` and closes it in the destructor. A // non-null return value indicates that the file has been @@ -35,7 +35,7 @@ class RTC_EXPORT AecDumpFactory { static std::unique_ptr Create(webrtc::FileWrapper file, int64_t max_log_size_bytes, rtc::TaskQueue* worker_queue); - static std::unique_ptr Create(std::string file_name, + static std::unique_ptr Create(absl::string_view file_name, int64_t max_log_size_bytes, rtc::TaskQueue* worker_queue); static std::unique_ptr Create(FILE* handle, diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec_dump/aec_dump_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec_dump/aec_dump_impl.cc index 160583e7c3..94c24048e0 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec_dump/aec_dump_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec_dump/aec_dump_impl.cc @@ -13,9 +13,11 @@ #include #include +#include "absl/strings/string_view.h" #include "modules/audio_processing/aec_dump/aec_dump_factory.h" #include "rtc_base/checks.h" #include "rtc_base/event.h" +#include "rtc_base/task_queue.h" namespace webrtc { @@ -60,8 +62,7 @@ AecDumpImpl::AecDumpImpl(FileWrapper debug_file, rtc::TaskQueue* worker_queue) : debug_file_(std::move(debug_file)), num_bytes_left_for_log_(max_log_size_bytes), - worker_queue_(worker_queue), - capture_stream_info_(CreateWriteToFileTask()) {} + worker_queue_(worker_queue) {} AecDumpImpl::~AecDumpImpl() { // Block until all tasks have finished running. @@ -74,8 +75,7 @@ AecDumpImpl::~AecDumpImpl() { void AecDumpImpl::WriteInitMessage(const ProcessingConfig& api_format, int64_t time_now_ms) { - auto task = CreateWriteToFileTask(); - auto* event = task->GetEvent(); + auto event = std::make_unique(); event->set_type(audioproc::Event::INIT); audioproc::Init* msg = event->mutable_init(); @@ -96,7 +96,7 @@ void AecDumpImpl::WriteInitMessage(const ProcessingConfig& api_format, api_format.reverse_output_stream().num_channels()); msg->set_timestamp_ms(time_now_ms); - worker_queue_->PostTask(std::move(task)); + PostWriteToFileTask(std::move(event)); } void AecDumpImpl::AddCaptureStreamInput( @@ -126,31 +126,24 @@ void AecDumpImpl::AddAudioProcessingState(const AudioProcessingState& state) { } void AecDumpImpl::WriteCaptureStreamMessage() { - auto task = capture_stream_info_.GetTask(); - RTC_DCHECK(task); - worker_queue_->PostTask(std::move(task)); - capture_stream_info_.SetTask(CreateWriteToFileTask()); + PostWriteToFileTask(capture_stream_info_.FetchEvent()); } void AecDumpImpl::WriteRenderStreamMessage(const int16_t* const data, int num_channels, int samples_per_channel) { - auto task = CreateWriteToFileTask(); - auto* event = task->GetEvent(); - + auto event = std::make_unique(); event->set_type(audioproc::Event::REVERSE_STREAM); audioproc::ReverseStream* msg = event->mutable_reverse_stream(); const size_t data_size = sizeof(int16_t) * samples_per_channel * num_channels; msg->set_data(data, data_size); - worker_queue_->PostTask(std::move(task)); + PostWriteToFileTask(std::move(event)); } void AecDumpImpl::WriteRenderStreamMessage( const AudioFrameView& src) { - auto task = CreateWriteToFileTask(); - auto* event = task->GetEvent(); - + auto event = std::make_unique(); event->set_type(audioproc::Event::REVERSE_STREAM); audioproc::ReverseStream* msg = event->mutable_reverse_stream(); @@ -160,23 +153,21 @@ void AecDumpImpl::WriteRenderStreamMessage( msg->add_channel(channel_view.begin(), sizeof(float) * channel_view.size()); } - worker_queue_->PostTask(std::move(task)); + PostWriteToFileTask(std::move(event)); } void AecDumpImpl::WriteConfig(const InternalAPMConfig& config) { RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); - auto task = CreateWriteToFileTask(); - auto* event = task->GetEvent(); + auto event = std::make_unique(); event->set_type(audioproc::Event::CONFIG); CopyFromConfigToEvent(config, event->mutable_config()); - worker_queue_->PostTask(std::move(task)); + PostWriteToFileTask(std::move(event)); } void AecDumpImpl::WriteRuntimeSetting( const AudioProcessing::RuntimeSetting& runtime_setting) { RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); - auto task = CreateWriteToFileTask(); - auto* event = task->GetEvent(); + auto event = std::make_unique(); event->set_type(audioproc::Event::RUNTIME_SETTING); audioproc::RuntimeSetting* setting = event->mutable_runtime_setting(); switch (runtime_setting.type()) { @@ -233,12 +224,34 @@ void AecDumpImpl::WriteRuntimeSetting( RTC_DCHECK_NOTREACHED(); break; } - worker_queue_->PostTask(std::move(task)); + PostWriteToFileTask(std::move(event)); } -std::unique_ptr AecDumpImpl::CreateWriteToFileTask() { - return std::make_unique(&debug_file_, - &num_bytes_left_for_log_); +void AecDumpImpl::PostWriteToFileTask(std::unique_ptr event) { + RTC_DCHECK(event); + worker_queue_->PostTask([event = std::move(event), this] { + std::string event_string = event->SerializeAsString(); + const size_t event_byte_size = event_string.size(); + + if (num_bytes_left_for_log_ >= 0) { + const int64_t next_message_size = sizeof(int32_t) + event_byte_size; + if (num_bytes_left_for_log_ < next_message_size) { + // Ensure that no further events are written, even if they're smaller + // than the current event. + num_bytes_left_for_log_ = 0; + return; + } + num_bytes_left_for_log_ -= next_message_size; + } + + // Write message preceded by its size. + if (!debug_file_.Write(&event_byte_size, sizeof(int32_t))) { + RTC_DCHECK_NOTREACHED(); + } + if (!debug_file_.Write(event_string.data(), event_string.size())) { + RTC_DCHECK_NOTREACHED(); + } + }); } std::unique_ptr AecDumpFactory::Create(webrtc::FileWrapper file, @@ -252,11 +265,11 @@ std::unique_ptr AecDumpFactory::Create(webrtc::FileWrapper file, worker_queue); } -std::unique_ptr AecDumpFactory::Create(std::string file_name, +std::unique_ptr AecDumpFactory::Create(absl::string_view file_name, int64_t max_log_size_bytes, rtc::TaskQueue* worker_queue) { - return Create(FileWrapper::OpenWriteOnly(file_name.c_str()), - max_log_size_bytes, worker_queue); + return Create(FileWrapper::OpenWriteOnly(file_name), max_log_size_bytes, + worker_queue); } std::unique_ptr AecDumpFactory::Create(FILE* handle, diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec_dump/aec_dump_impl.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec_dump/aec_dump_impl.h index a9d3830365..fac3712b7a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec_dump/aec_dump_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec_dump/aec_dump_impl.h @@ -16,7 +16,6 @@ #include #include "modules/audio_processing/aec_dump/capture_stream_info.h" -#include "modules/audio_processing/aec_dump/write_to_file_task.h" #include "modules/audio_processing/include/aec_dump.h" #include "rtc_base/ignore_wundef.h" #include "rtc_base/race_checker.h" @@ -33,21 +32,19 @@ RTC_PUSH_IGNORING_WUNDEF() #endif RTC_POP_IGNORING_WUNDEF() -namespace rtc { -class TaskQueue; -} // namespace rtc - namespace webrtc { // Task-queue based implementation of AecDump. It is thread safe by // relying on locks in TaskQueue. class AecDumpImpl : public AecDump { public: - // Does member variables initialization shared across all c-tors. + // `max_log_size_bytes` - maximum number of bytes to write to the debug file, + // `max_log_size_bytes == -1` means the log size will be unlimited. AecDumpImpl(FileWrapper debug_file, int64_t max_log_size_bytes, rtc::TaskQueue* worker_queue); - + AecDumpImpl(const AecDumpImpl&) = delete; + AecDumpImpl& operator=(const AecDumpImpl&) = delete; ~AecDumpImpl() override; void WriteInitMessage(const ProcessingConfig& api_format, @@ -75,7 +72,7 @@ class AecDumpImpl : public AecDump { const AudioProcessing::RuntimeSetting& runtime_setting) override; private: - std::unique_ptr CreateWriteToFileTask(); + void PostWriteToFileTask(std::unique_ptr event); FileWrapper debug_file_; int64_t num_bytes_left_for_log_ = 0; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec_dump/capture_stream_info.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec_dump/capture_stream_info.cc index efdda2570f..7d82a39729 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec_dump/capture_stream_info.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec_dump/capture_stream_info.cc @@ -11,17 +11,9 @@ #include "modules/audio_processing/aec_dump/capture_stream_info.h" namespace webrtc { -CaptureStreamInfo::CaptureStreamInfo(std::unique_ptr task) - : task_(std::move(task)) { - RTC_DCHECK(task_); - task_->GetEvent()->set_type(audioproc::Event::STREAM); -} - -CaptureStreamInfo::~CaptureStreamInfo() = default; void CaptureStreamInfo::AddInput(const AudioFrameView& src) { - RTC_DCHECK(task_); - auto* stream = task_->GetEvent()->mutable_stream(); + auto* stream = event_->mutable_stream(); for (int i = 0; i < src.num_channels(); ++i) { const auto& channel_view = src.channel(i); @@ -31,8 +23,7 @@ void CaptureStreamInfo::AddInput(const AudioFrameView& src) { } void CaptureStreamInfo::AddOutput(const AudioFrameView& src) { - RTC_DCHECK(task_); - auto* stream = task_->GetEvent()->mutable_stream(); + auto* stream = event_->mutable_stream(); for (int i = 0; i < src.num_channels(); ++i) { const auto& channel_view = src.channel(i); @@ -44,8 +35,7 @@ void CaptureStreamInfo::AddOutput(const AudioFrameView& src) { void CaptureStreamInfo::AddInput(const int16_t* const data, int num_channels, int samples_per_channel) { - RTC_DCHECK(task_); - auto* stream = task_->GetEvent()->mutable_stream(); + auto* stream = event_->mutable_stream(); const size_t data_size = sizeof(int16_t) * samples_per_channel * num_channels; stream->set_input_data(data, data_size); } @@ -53,19 +43,19 @@ void CaptureStreamInfo::AddInput(const int16_t* const data, void CaptureStreamInfo::AddOutput(const int16_t* const data, int num_channels, int samples_per_channel) { - RTC_DCHECK(task_); - auto* stream = task_->GetEvent()->mutable_stream(); + auto* stream = event_->mutable_stream(); const size_t data_size = sizeof(int16_t) * samples_per_channel * num_channels; stream->set_output_data(data, data_size); } void CaptureStreamInfo::AddAudioProcessingState( const AecDump::AudioProcessingState& state) { - RTC_DCHECK(task_); - auto* stream = task_->GetEvent()->mutable_stream(); + auto* stream = event_->mutable_stream(); stream->set_delay(state.delay); stream->set_drift(state.drift); - stream->set_level(state.level); + if (state.applied_input_volume.has_value()) { + stream->set_applied_input_volume(*state.applied_input_volume); + } stream->set_keypress(state.keypress); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec_dump/capture_stream_info.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec_dump/capture_stream_info.h index 26b0e2e0f7..0819bbcb23 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec_dump/capture_stream_info.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec_dump/capture_stream_info.h @@ -13,13 +13,9 @@ #include #include -#include -#include "modules/audio_processing/aec_dump/write_to_file_task.h" #include "modules/audio_processing/include/aec_dump.h" -#include "rtc_base/checks.h" #include "rtc_base/ignore_wundef.h" -#include "rtc_base/logging.h" // Files generated at build-time by the protobuf compiler. RTC_PUSH_IGNORING_WUNDEF() @@ -34,8 +30,11 @@ namespace webrtc { class CaptureStreamInfo { public: - explicit CaptureStreamInfo(std::unique_ptr task); - ~CaptureStreamInfo(); + CaptureStreamInfo() { CreateNewEvent(); } + CaptureStreamInfo(const CaptureStreamInfo&) = delete; + CaptureStreamInfo& operator=(const CaptureStreamInfo&) = delete; + ~CaptureStreamInfo() = default; + void AddInput(const AudioFrameView& src); void AddOutput(const AudioFrameView& src); @@ -48,20 +47,18 @@ class CaptureStreamInfo { void AddAudioProcessingState(const AecDump::AudioProcessingState& state); - std::unique_ptr GetTask() { - RTC_DCHECK(task_); - return std::move(task_); - } - - void SetTask(std::unique_ptr task) { - RTC_DCHECK(!task_); - RTC_DCHECK(task); - task_ = std::move(task); - task_->GetEvent()->set_type(audioproc::Event::STREAM); + std::unique_ptr FetchEvent() { + std::unique_ptr result = std::move(event_); + CreateNewEvent(); + return result; } private: - std::unique_ptr task_; + void CreateNewEvent() { + event_ = std::make_unique(); + event_->set_type(audioproc::Event::STREAM); + } + std::unique_ptr event_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec_dump/null_aec_dump_factory.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec_dump/null_aec_dump_factory.cc index 126adeb4eb..9bd9745069 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec_dump/null_aec_dump_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec_dump/null_aec_dump_factory.cc @@ -8,6 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include "absl/strings/string_view.h" #include "modules/audio_processing/aec_dump/aec_dump_factory.h" #include "modules/audio_processing/include/aec_dump.h" @@ -19,7 +20,7 @@ std::unique_ptr AecDumpFactory::Create(webrtc::FileWrapper file, return nullptr; } -std::unique_ptr AecDumpFactory::Create(std::string file_name, +std::unique_ptr AecDumpFactory::Create(absl::string_view file_name, int64_t max_log_size_bytes, rtc::TaskQueue* worker_queue) { return nullptr; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec_dump/write_to_file_task.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec_dump/write_to_file_task.cc deleted file mode 100644 index cd083d295b..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec_dump/write_to_file_task.cc +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_processing/aec_dump/write_to_file_task.h" - -#include - -namespace webrtc { - -WriteToFileTask::WriteToFileTask(webrtc::FileWrapper* debug_file, - int64_t* num_bytes_left_for_log) - : debug_file_(debug_file), - num_bytes_left_for_log_(num_bytes_left_for_log) {} - -WriteToFileTask::~WriteToFileTask() = default; - -audioproc::Event* WriteToFileTask::GetEvent() { - return &event_; -} - -bool WriteToFileTask::IsRoomForNextEvent(size_t event_byte_size) const { - int64_t next_message_size = event_byte_size + sizeof(int32_t); - return (*num_bytes_left_for_log_ < 0) || - (*num_bytes_left_for_log_ >= next_message_size); -} - -void WriteToFileTask::UpdateBytesLeft(size_t event_byte_size) { - RTC_DCHECK(IsRoomForNextEvent(event_byte_size)); - if (*num_bytes_left_for_log_ >= 0) { - *num_bytes_left_for_log_ -= (sizeof(int32_t) + event_byte_size); - } -} - -bool WriteToFileTask::Run() { - std::string event_string; - event_.SerializeToString(&event_string); - - const size_t event_byte_size = event_.ByteSizeLong(); - - if (!IsRoomForNextEvent(event_byte_size)) { - // Ensure that no further events are written, even if they're smaller than - // the current event. - *num_bytes_left_for_log_ = 0; - return true; - } - - UpdateBytesLeft(event_byte_size); - - // Write message preceded by its size. - if (!debug_file_->Write(&event_byte_size, sizeof(int32_t))) { - RTC_DCHECK_NOTREACHED(); - } - if (!debug_file_->Write(event_string.data(), event_string.length())) { - RTC_DCHECK_NOTREACHED(); - } - return true; // Delete task from queue at once. -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec_dump/write_to_file_task.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec_dump/write_to_file_task.h deleted file mode 100644 index 770121ed02..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec_dump/write_to_file_task.h +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_PROCESSING_AEC_DUMP_WRITE_TO_FILE_TASK_H_ -#define MODULES_AUDIO_PROCESSING_AEC_DUMP_WRITE_TO_FILE_TASK_H_ - -#include -#include -#include - -#include "api/task_queue/queued_task.h" -#include "rtc_base/checks.h" -#include "rtc_base/event.h" -#include "rtc_base/ignore_wundef.h" -#include "rtc_base/system/file_wrapper.h" - -// Files generated at build-time by the protobuf compiler. -RTC_PUSH_IGNORING_WUNDEF() -#ifdef WEBRTC_ANDROID_PLATFORM_BUILD -#include "external/webrtc/webrtc/modules/audio_processing/debug.pb.h" -#else -#include "modules/audio_processing/debug.pb.h" -#endif -RTC_POP_IGNORING_WUNDEF() - -namespace webrtc { - -class WriteToFileTask : public QueuedTask { - public: - WriteToFileTask(webrtc::FileWrapper* debug_file, - int64_t* num_bytes_left_for_log); - ~WriteToFileTask() override; - - audioproc::Event* GetEvent(); - - private: - bool IsRoomForNextEvent(size_t event_byte_size) const; - - void UpdateBytesLeft(size_t event_byte_size); - - bool Run() override; - - webrtc::FileWrapper* const debug_file_; - audioproc::Event event_; - int64_t* const num_bytes_left_for_log_; -}; - -} // namespace webrtc - -#endif // MODULES_AUDIO_PROCESSING_AEC_DUMP_WRITE_TO_FILE_TASK_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aecm/aecm_core.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aecm/aecm_core.h index d6d0d8dafc..3de49315c4 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aecm/aecm_core.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aecm/aecm_core.h @@ -248,7 +248,7 @@ int WebRtcAecm_ProcessBlock(AecmCore* aecm, // void WebRtcAecm_BufferFarFrame(AecmCore* const aecm, const int16_t* const farend, - const int farLen); + int farLen); //////////////////////////////////////////////////////////////////////////////// // WebRtcAecm_FetchFarFrame() @@ -263,8 +263,8 @@ void WebRtcAecm_BufferFarFrame(AecmCore* const aecm, // void WebRtcAecm_FetchFarFrame(AecmCore* const aecm, int16_t* const farend, - const int farLen, - const int knownDelay); + int farLen, + int knownDelay); // All the functions below are intended to be private @@ -339,8 +339,8 @@ int16_t WebRtcAecm_CalcSuppressionGain(AecmCore* const aecm); // void WebRtcAecm_CalcEnergies(AecmCore* aecm, const uint16_t* far_spectrum, - const int16_t far_q, - const uint32_t nearEner, + int16_t far_q, + uint32_t nearEner, int32_t* echoEst); /////////////////////////////////////////////////////////////////////////////// @@ -374,9 +374,9 @@ int16_t WebRtcAecm_CalcStepSize(AecmCore* const aecm); // void WebRtcAecm_UpdateChannel(AecmCore* aecm, const uint16_t* far_spectrum, - const int16_t far_q, + int16_t far_q, const uint16_t* const dfa, - const int16_t mu, + int16_t mu, int32_t* echoEst); extern const int16_t WebRtcAecm_kCosTable[]; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/agc.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/agc.cc index e36d32c878..a018ff9f93 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/agc.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/agc.cc @@ -21,9 +21,11 @@ namespace webrtc { namespace { -const int kDefaultLevelDbfs = -18; -const int kNumAnalysisFrames = 100; -const double kActivityThreshold = 0.3; +constexpr int kDefaultLevelDbfs = -18; +constexpr int kNumAnalysisFrames = 100; +constexpr double kActivityThreshold = 0.3; +constexpr int kNum10msFramesInOneSecond = 100; +constexpr int kMaxSampleRateHz = 384000; } // namespace @@ -35,8 +37,10 @@ Agc::Agc() Agc::~Agc() = default; -void Agc::Process(const int16_t* audio, size_t length, int sample_rate_hz) { - vad_.ProcessChunk(audio, length, sample_rate_hz); +void Agc::Process(rtc::ArrayView audio) { + const int sample_rate_hz = audio.size() * kNum10msFramesInOneSecond; + RTC_DCHECK_LE(sample_rate_hz, kMaxSampleRateHz); + vad_.ProcessChunk(audio.data(), audio.size(), sample_rate_hz); const std::vector& rms = vad_.chunkwise_rms(); const std::vector& probabilities = vad_.chunkwise_voice_probabilities(); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/agc.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/agc.h index 2693d94880..da42808225 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/agc.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/agc.h @@ -13,6 +13,7 @@ #include +#include "api/array_view.h" #include "modules/audio_processing/vad/voice_activity_detector.h" namespace webrtc { @@ -26,7 +27,7 @@ class Agc { // `audio` must be mono; in a multi-channel stream, provide the first (usually // left) channel. - virtual void Process(const int16_t* audio, size_t length, int sample_rate_hz); + virtual void Process(rtc::ArrayView audio); // Retrieves the difference between the target RMS level and the current // signal RMS level in dB. Returns true if an update is available and false diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/agc_manager_direct.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/agc_manager_direct.cc index b2b8a51acd..acff3e8d35 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/agc_manager_direct.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/agc_manager_direct.cc @@ -13,11 +13,11 @@ #include #include +#include "api/array_view.h" #include "common_audio/include/audio_util.h" #include "modules/audio_processing/agc/gain_control.h" -#include "modules/audio_processing/agc/gain_map_internal.h" +#include "modules/audio_processing/agc2/gain_map_internal.h" #include "modules/audio_processing/include/audio_frame_view.h" -#include "rtc_base/atomic_ops.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_minmax.h" @@ -49,48 +49,44 @@ constexpr int kMaxResidualGainChange = 15; // restrictions from clipping events. constexpr int kSurplusCompressionGain = 6; -// History size for the clipping predictor evaluator (unit: number of 10 ms -// frames). -constexpr int kClippingPredictorEvaluatorHistorySize = 32; - -using ClippingPredictorConfig = AudioProcessing::Config::GainController1:: - AnalogGainController::ClippingPredictor; - -// Returns whether a fall-back solution to choose the maximum level should be -// chosen. -bool UseMaxAnalogChannelLevel() { - return field_trial::IsEnabled("WebRTC-UseMaxAnalogAgcChannelLevel"); -} - -// Returns kMinMicLevel if no field trial exists or if it has been disabled. -// Returns a value between 0 and 255 depending on the field-trial string. -// Example: 'WebRTC-Audio-AgcMinMicLevelExperiment/Enabled-80' => returns 80. -int GetMinMicLevel() { - RTC_LOG(LS_INFO) << "[agc] GetMinMicLevel"; +// Target speech level (dBFs) and speech probability threshold used to compute +// the RMS error override in `GetSpeechLevelErrorDb()`. These are only used for +// computing the error override and they are not passed to `agc_`. +// TODO(webrtc:7494): Move these to a config and pass in the ctor. +constexpr float kOverrideTargetSpeechLevelDbfs = -18.0f; +constexpr float kOverrideSpeechProbabilitySilenceThreshold = 0.5f; +// The minimum number of frames between `UpdateGain()` calls. +// TODO(webrtc:7494): Move this to a config and pass in the ctor with +// kOverrideWaitFrames = 100. Default value zero needed for the unit tests. +constexpr int kOverrideWaitFrames = 0; + +using AnalogAgcConfig = + AudioProcessing::Config::GainController1::AnalogGainController; + +// If the "WebRTC-Audio-2ndAgcMinMicLevelExperiment" field trial is specified, +// parses it and returns a value between 0 and 255 depending on the field-trial +// string. Returns an unspecified value if the field trial is not specified, if +// disabled or if it cannot be parsed. Example: +// 'WebRTC-Audio-2ndAgcMinMicLevelExperiment/Enabled-80' => returns 80. +absl::optional GetMinMicLevelOverride() { constexpr char kMinMicLevelFieldTrial[] = - "WebRTC-Audio-AgcMinMicLevelExperiment"; + "WebRTC-Audio-2ndAgcMinMicLevelExperiment"; if (!webrtc::field_trial::IsEnabled(kMinMicLevelFieldTrial)) { - RTC_LOG(LS_INFO) << "[agc] Using default min mic level: " << kMinMicLevel; - return kMinMicLevel; + return absl::nullopt; } const auto field_trial_string = webrtc::field_trial::FindFullName(kMinMicLevelFieldTrial); int min_mic_level = -1; sscanf(field_trial_string.c_str(), "Enabled-%d", &min_mic_level); if (min_mic_level >= 0 && min_mic_level <= 255) { - RTC_LOG(LS_INFO) << "[agc] Experimental min mic level: " << min_mic_level; return min_mic_level; } else { RTC_LOG(LS_WARNING) << "[agc] Invalid parameter for " << kMinMicLevelFieldTrial << ", ignored."; - return kMinMicLevel; + return absl::nullopt; } } -int ClampLevel(int mic_level, int min_mic_level) { - return rtc::SafeClamp(mic_level, min_mic_level, kMaxMicLevel); -} - int LevelFromGainError(int gain_error, int level, int min_mic_level) { RTC_DCHECK_GE(level, 0); RTC_DCHECK_LE(level, kMaxMicLevel); @@ -124,7 +120,7 @@ float ComputeClippedRatio(const float* const* audio, int num_clipped_in_ch = 0; for (size_t i = 0; i < samples_per_channel; ++i) { RTC_DCHECK(audio[ch]); - if (audio[ch][i] >= 32767.f || audio[ch][i] <= -32768.f) { + if (audio[ch][i] >= 32767.0f || audio[ch][i] <= -32768.0f) { ++num_clipped_in_ch; } } @@ -133,40 +129,6 @@ float ComputeClippedRatio(const float* const* audio, return static_cast(num_clipped) / (samples_per_channel); } -void LogClippingPredictorMetrics(const ClippingPredictorEvaluator& evaluator) { - absl::optional metrics = - ComputeClippingPredictionMetrics(evaluator.counters()); - if (metrics.has_value()) { - RTC_LOG(LS_INFO) << "Clipping predictor metrics: P " << metrics->precision - << " R " << metrics->recall << " F1 score " - << metrics->f1_score; - RTC_DCHECK_GE(metrics->f1_score, 0.0f); - RTC_DCHECK_LE(metrics->f1_score, 1.0f); - RTC_DCHECK_GE(metrics->precision, 0.0f); - RTC_DCHECK_LE(metrics->precision, 1.0f); - RTC_DCHECK_GE(metrics->recall, 0.0f); - RTC_DCHECK_LE(metrics->recall, 1.0f); - RTC_HISTOGRAM_COUNTS_LINEAR( - /*name=*/"WebRTC.Audio.Agc.ClippingPredictor.F1Score", - /*sample=*/std::round(metrics->f1_score * 100.0f), - /*min=*/0, - /*max=*/100, - /*bucket_count=*/50); - RTC_HISTOGRAM_COUNTS_LINEAR( - /*name=*/"WebRTC.Audio.Agc.ClippingPredictor.Precision", - /*sample=*/std::round(metrics->precision * 100.0f), - /*min=*/0, - /*max=*/100, - /*bucket_count=*/50); - RTC_HISTOGRAM_COUNTS_LINEAR( - /*name=*/"WebRTC.Audio.Agc.ClippingPredictor.Recall", - /*sample=*/std::round(metrics->recall * 100.0f), - /*min=*/0, - /*max=*/100, - /*bucket_count=*/50); - } -} - void LogClippingMetrics(int clipping_rate) { RTC_LOG(LS_INFO) << "Input clipping rate: " << clipping_rate << "%"; RTC_HISTOGRAM_COUNTS_LINEAR(/*name=*/"WebRTC.Audio.Agc.InputClippingRate", @@ -174,10 +136,30 @@ void LogClippingMetrics(int clipping_rate) { /*bucket_count=*/50); } +// Computes the speech level error in dB. `speech_level_dbfs` is required to be +// in the range [-90.0f, 30.0f] and `speech_probability` in the range +// [0.0f, 1.0f]. +int GetSpeechLevelErrorDb(float speech_level_dbfs, float speech_probability) { + constexpr float kMinSpeechLevelDbfs = -90.0f; + constexpr float kMaxSpeechLevelDbfs = 30.0f; + RTC_DCHECK_GE(speech_level_dbfs, kMinSpeechLevelDbfs); + RTC_DCHECK_LE(speech_level_dbfs, kMaxSpeechLevelDbfs); + RTC_DCHECK_GE(speech_probability, 0.0f); + RTC_DCHECK_LE(speech_probability, 1.0f); + + if (speech_probability < kOverrideSpeechProbabilitySilenceThreshold) { + return 0; + } + + const float speech_level = rtc::SafeClamp( + speech_level_dbfs, kMinSpeechLevelDbfs, kMaxSpeechLevelDbfs); + + return std::round(kOverrideTargetSpeechLevelDbfs - speech_level); +} + } // namespace MonoAgc::MonoAgc(ApmDataDumper* data_dumper, - int startup_min_level, int clipped_level_min, bool disable_digital_adaptive, int min_mic_level) @@ -189,7 +171,6 @@ MonoAgc::MonoAgc(ApmDataDumper* data_dumper, target_compression_(kDefaultCompressionGain), compression_(target_compression_), compression_accumulator_(compression_), - startup_min_level_(ClampLevel(startup_min_level, min_mic_level_)), clipped_level_min_(clipped_level_min) {} MonoAgc::~MonoAgc() = default; @@ -202,11 +183,12 @@ void MonoAgc::Initialize() { compression_accumulator_ = compression_; capture_output_used_ = true; check_volume_on_next_process_ = true; + frames_since_update_gain_ = 0; + is_first_frame_ = true; } -void MonoAgc::Process(const int16_t* audio, - size_t samples_per_channel, - int sample_rate_hz) { +void MonoAgc::Process(rtc::ArrayView audio, + absl::optional rms_error_override) { new_compression_to_set_ = absl::nullopt; if (check_volume_on_next_process_) { @@ -216,15 +198,39 @@ void MonoAgc::Process(const int16_t* audio, CheckVolumeAndReset(); } - agc_->Process(audio, samples_per_channel, sample_rate_hz); + agc_->Process(audio); + + // Always check if `agc_` has a new error available. If yes, `agc_` gets + // reset. + // TODO(webrtc:7494) Replace the `agc_` call `GetRmsErrorDb()` with `Reset()` + // if an error override is used. + int rms_error = 0; + bool update_gain = agc_->GetRmsErrorDb(&rms_error); + if (rms_error_override.has_value()) { + if (is_first_frame_ || frames_since_update_gain_ < kOverrideWaitFrames) { + update_gain = false; + } else { + rms_error = *rms_error_override; + update_gain = true; + } + } + + if (update_gain) { + UpdateGain(rms_error); + } - UpdateGain(); if (!disable_digital_adaptive_) { UpdateCompressor(); } + + is_first_frame_ = false; + if (frames_since_update_gain_ < kOverrideWaitFrames) { + ++frames_since_update_gain_; + } } void MonoAgc::HandleClipping(int clipped_level_step) { + RTC_DCHECK_GT(clipped_level_step, 0); // Always decrease the maximum level, even if the current level is below // threshold. SetMaxLevel(std::max(clipped_level_min_, max_level_ - clipped_level_step)); @@ -239,11 +245,13 @@ void MonoAgc::HandleClipping(int clipped_level_step) { SetLevel(std::max(clipped_level_min_, level_ - clipped_level_step)); // Reset the AGCs for all channels since the level has changed. agc_->Reset(); + frames_since_update_gain_ = 0; + is_first_frame_ = false; } } void MonoAgc::SetLevel(int new_level) { - int voe_level = stream_analog_level_; + int voe_level = recommended_input_volume_; if (voe_level == 0) { RTC_DLOG(LS_INFO) << "[agc] VolumeCallbacks returned level=0, taking no action."; @@ -255,6 +263,10 @@ void MonoAgc::SetLevel(int new_level) { return; } + // Detect manual input volume adjustments by checking if the current level + // `voe_level` is outside of the `[level_ - kLevelQuantizationSlack, level_ + + // kLevelQuantizationSlack]` range where `level_` is the last input volume + // known by this gain controller. if (voe_level > level_ + kLevelQuantizationSlack || voe_level < level_ - kLevelQuantizationSlack) { RTC_DLOG(LS_INFO) << "[agc] Mic volume was manually adjusted. Updating " @@ -269,7 +281,8 @@ void MonoAgc::SetLevel(int new_level) { // was manually adjusted. The compressor will still provide some of the // desired gain change. agc_->Reset(); - + frames_since_update_gain_ = 0; + is_first_frame_ = false; return; } @@ -278,7 +291,7 @@ void MonoAgc::SetLevel(int new_level) { return; } - stream_analog_level_ = new_level; + recommended_input_volume_ = new_level; RTC_DLOG(LS_INFO) << "[agc] voe_level=" << voe_level << ", level_=" << level_ << ", new_level=" << new_level; level_ = new_level; @@ -311,7 +324,7 @@ void MonoAgc::HandleCaptureOutputUsedChange(bool capture_output_used) { } int MonoAgc::CheckVolumeAndReset() { - int level = stream_analog_level_; + int level = recommended_input_volume_; // Reasons for taking action at startup: // 1) A person starting a call is expected to be heard. // 2) Independent of interpretation of `level` == 0 we should raise it so the @@ -328,31 +341,33 @@ int MonoAgc::CheckVolumeAndReset() { } RTC_DLOG(LS_INFO) << "[agc] Initial GetMicVolume()=" << level; - int minLevel = startup_ ? startup_min_level_ : min_mic_level_; - if (level < minLevel) { - level = minLevel; + if (level < min_mic_level_) { + level = min_mic_level_; RTC_DLOG(LS_INFO) << "[agc] Initial volume too low, raising to " << level; - stream_analog_level_ = level; + recommended_input_volume_ = level; } agc_->Reset(); level_ = level; startup_ = false; + frames_since_update_gain_ = 0; + is_first_frame_ = true; return 0; } -// Requests the RMS error from AGC and distributes the required gain change -// between the digital compression stage and volume slider. We use the -// compressor first, providing a slack region around the current slider -// position to reduce movement. +// Distributes the required gain change between the digital compression stage +// and volume slider. We use the compressor first, providing a slack region +// around the current slider position to reduce movement. // // If the slider needs to be moved, we check first if the user has adjusted // it, in which case we take no action and cache the updated level. -void MonoAgc::UpdateGain() { - int rms_error = 0; - if (!agc_->GetRmsErrorDb(&rms_error)) { - // No error update ready. - return; - } +void MonoAgc::UpdateGain(int rms_error_db) { + int rms_error = rms_error_db; + + // Always reset the counter regardless of whether the gain is changed + // or not. This matches with the bahvior of `agc_` where the histogram is + // reset every time an RMS error is successfully read. + frames_since_update_gain_ = 0; + // The compressor will always add at least kMinCompressionGain. In effect, // this adjusts our target gain upward by the same amount and rms_error // needs to reflect that. @@ -441,76 +456,60 @@ void MonoAgc::UpdateCompressor() { } } -int AgcManagerDirect::instance_counter_ = 0; +std::atomic AgcManagerDirect::instance_counter_(0); AgcManagerDirect::AgcManagerDirect( - Agc* agc, - int startup_min_level, - int clipped_level_min, - int sample_rate_hz, - int clipped_level_step, - float clipped_ratio_threshold, - int clipped_wait_frames, - const ClippingPredictorConfig& clipping_config) - : AgcManagerDirect(/*num_capture_channels*/ 1, - startup_min_level, - clipped_level_min, - /*disable_digital_adaptive*/ false, - sample_rate_hz, - clipped_level_step, - clipped_ratio_threshold, - clipped_wait_frames, - clipping_config) { + const AudioProcessing::Config::GainController1::AnalogGainController& + analog_config, + Agc* agc) + : AgcManagerDirect(/*num_capture_channels=*/1, analog_config) { RTC_DCHECK(channel_agcs_[0]); RTC_DCHECK(agc); channel_agcs_[0]->set_agc(agc); } -AgcManagerDirect::AgcManagerDirect( - int num_capture_channels, - int startup_min_level, - int clipped_level_min, - bool disable_digital_adaptive, - int sample_rate_hz, - int clipped_level_step, - float clipped_ratio_threshold, - int clipped_wait_frames, - const ClippingPredictorConfig& clipping_config) - : data_dumper_( - new ApmDataDumper(rtc::AtomicOps::Increment(&instance_counter_))), - use_min_channel_level_(!UseMaxAnalogChannelLevel()), - sample_rate_hz_(sample_rate_hz), +AgcManagerDirect::AgcManagerDirect(int num_capture_channels, + const AnalogAgcConfig& analog_config) + : analog_controller_enabled_(analog_config.enabled), + min_mic_level_override_(GetMinMicLevelOverride()), + data_dumper_(new ApmDataDumper(instance_counter_.fetch_add(1) + 1)), num_capture_channels_(num_capture_channels), - disable_digital_adaptive_(disable_digital_adaptive), - frames_since_clipped_(clipped_wait_frames), + disable_digital_adaptive_(!analog_config.enable_digital_adaptive), + frames_since_clipped_(analog_config.clipped_wait_frames), capture_output_used_(true), - clipped_level_step_(clipped_level_step), - clipped_ratio_threshold_(clipped_ratio_threshold), - clipped_wait_frames_(clipped_wait_frames), + clipped_level_step_(analog_config.clipped_level_step), + clipped_ratio_threshold_(analog_config.clipped_ratio_threshold), + clipped_wait_frames_(analog_config.clipped_wait_frames), channel_agcs_(num_capture_channels), new_compressions_to_set_(num_capture_channels), clipping_predictor_( - CreateClippingPredictor(num_capture_channels, clipping_config)), - use_clipping_predictor_step_(!!clipping_predictor_ && - clipping_config.use_predicted_step), - clipping_predictor_evaluator_(kClippingPredictorEvaluatorHistorySize), - clipping_predictor_log_counter_(0), + CreateClippingPredictor(num_capture_channels, + analog_config.clipping_predictor)), + use_clipping_predictor_step_( + !!clipping_predictor_ && + analog_config.clipping_predictor.use_predicted_step), clipping_rate_log_(0.0f), clipping_rate_log_counter_(0) { - const int min_mic_level = GetMinMicLevel(); + RTC_LOG(LS_INFO) << "[agc] analog controller enabled: " + << (analog_controller_enabled_ ? "yes" : "no"); + const int min_mic_level = min_mic_level_override_.value_or(kMinMicLevel); + RTC_LOG(LS_INFO) << "[agc] Min mic level: " << min_mic_level + << " (overridden: " + << (min_mic_level_override_.has_value() ? "yes" : "no") + << ")"; for (size_t ch = 0; ch < channel_agcs_.size(); ++ch) { ApmDataDumper* data_dumper_ch = ch == 0 ? data_dumper_.get() : nullptr; channel_agcs_[ch] = std::make_unique( - data_dumper_ch, startup_min_level, clipped_level_min, + data_dumper_ch, analog_config.clipped_level_min, disable_digital_adaptive_, min_mic_level); } RTC_DCHECK(!channel_agcs_.empty()); - RTC_DCHECK_GT(clipped_level_step, 0); - RTC_DCHECK_LE(clipped_level_step, 255); - RTC_DCHECK_GT(clipped_ratio_threshold, 0.f); - RTC_DCHECK_LT(clipped_ratio_threshold, 1.f); - RTC_DCHECK_GT(clipped_wait_frames, 0); + RTC_DCHECK_GT(clipped_level_step_, 0); + RTC_DCHECK_LE(clipped_level_step_, 255); + RTC_DCHECK_GT(clipped_ratio_threshold_, 0.0f); + RTC_DCHECK_LT(clipped_ratio_threshold_, 1.0f); + RTC_DCHECK_GT(clipped_wait_frames_, 0); channel_agcs_[0]->ActivateLogging(); } @@ -525,41 +524,35 @@ void AgcManagerDirect::Initialize() { capture_output_used_ = true; AggregateChannelLevels(); - clipping_predictor_evaluator_.Reset(); - clipping_predictor_log_counter_ = 0; clipping_rate_log_ = 0.0f; clipping_rate_log_counter_ = 0; } void AgcManagerDirect::SetupDigitalGainControl( - GainControl* gain_control) const { - RTC_DCHECK(gain_control); - if (gain_control->set_mode(GainControl::kFixedDigital) != 0) { + GainControl& gain_control) const { + if (gain_control.set_mode(GainControl::kFixedDigital) != 0) { RTC_LOG(LS_ERROR) << "set_mode(GainControl::kFixedDigital) failed."; } const int target_level_dbfs = disable_digital_adaptive_ ? 0 : 2; - if (gain_control->set_target_level_dbfs(target_level_dbfs) != 0) { + if (gain_control.set_target_level_dbfs(target_level_dbfs) != 0) { RTC_LOG(LS_ERROR) << "set_target_level_dbfs() failed."; } const int compression_gain_db = disable_digital_adaptive_ ? 0 : kDefaultCompressionGain; - if (gain_control->set_compression_gain_db(compression_gain_db) != 0) { + if (gain_control.set_compression_gain_db(compression_gain_db) != 0) { RTC_LOG(LS_ERROR) << "set_compression_gain_db() failed."; } const bool enable_limiter = !disable_digital_adaptive_; - if (gain_control->enable_limiter(enable_limiter) != 0) { + if (gain_control.enable_limiter(enable_limiter) != 0) { RTC_LOG(LS_ERROR) << "enable_limiter() failed."; } } -void AgcManagerDirect::AnalyzePreProcess(const AudioBuffer* audio) { +void AgcManagerDirect::AnalyzePreProcess(const AudioBuffer& audio_buffer) { + const float* const* audio = audio_buffer.channels_const(); + size_t samples_per_channel = audio_buffer.num_frames(); RTC_DCHECK(audio); - AnalyzePreProcess(audio->channels_const(), audio->num_frames()); -} -void AgcManagerDirect::AnalyzePreProcess(const float* const* audio, - size_t samples_per_channel) { - RTC_DCHECK(audio); AggregateChannelLevels(); if (!capture_output_used_) { return; @@ -602,28 +595,13 @@ void AgcManagerDirect::AnalyzePreProcess(const float* const* audio, if (!!clipping_predictor_) { for (int channel = 0; channel < num_capture_channels_; ++channel) { const auto step = clipping_predictor_->EstimateClippedLevelStep( - channel, stream_analog_level_, clipped_level_step_, + channel, recommended_input_volume_, clipped_level_step_, channel_agcs_[channel]->min_mic_level(), kMaxMicLevel); if (step.has_value()) { predicted_step = std::max(predicted_step, step.value()); clipping_predicted = true; } } - // Clipping prediction evaluation. - absl::optional prediction_interval = - clipping_predictor_evaluator_.Observe(clipping_detected, - clipping_predicted); - if (prediction_interval.has_value()) { - RTC_HISTOGRAM_COUNTS_LINEAR( - "WebRTC.Audio.Agc.ClippingPredictor.PredictionInterval", - prediction_interval.value(), /*min=*/0, - /*max=*/49, /*bucket_count=*/50); - } - clipping_predictor_log_counter_++; - if (clipping_predictor_log_counter_ == kNumFramesIn30Seconds) { - LogClippingPredictorMetrics(clipping_predictor_evaluator_); - clipping_predictor_log_counter_ = 0; - } } if (clipping_detected) { RTC_DLOG(LS_INFO) << "[agc] Clipping detected. clipped_ratio=" @@ -645,34 +623,38 @@ void AgcManagerDirect::AnalyzePreProcess(const float* const* audio, frames_since_clipped_ = 0; if (!!clipping_predictor_) { clipping_predictor_->Reset(); - clipping_predictor_evaluator_.RemoveExpectations(); } } AggregateChannelLevels(); } -void AgcManagerDirect::Process(const AudioBuffer* audio) { +void AgcManagerDirect::Process(const AudioBuffer& audio_buffer) { + Process(audio_buffer, /*speech_probability=*/absl::nullopt, + /*speech_level_dbfs=*/absl::nullopt); +} + +void AgcManagerDirect::Process(const AudioBuffer& audio_buffer, + absl::optional speech_probability, + absl::optional speech_level_dbfs) { AggregateChannelLevels(); if (!capture_output_used_) { return; } + const size_t num_frames_per_band = audio_buffer.num_frames_per_band(); + absl::optional rms_error_override = absl::nullopt; + if (speech_probability.has_value() && speech_level_dbfs.has_value()) { + rms_error_override = + GetSpeechLevelErrorDb(*speech_level_dbfs, *speech_probability); + } for (size_t ch = 0; ch < channel_agcs_.size(); ++ch) { - int16_t* audio_use = nullptr; std::array audio_data; - int num_frames_per_band; - if (audio) { - FloatS16ToS16(audio->split_bands_const_f(ch)[0], - audio->num_frames_per_band(), audio_data.data()); - audio_use = audio_data.data(); - num_frames_per_band = audio->num_frames_per_band(); - } else { - // Only used for testing. - // TODO(peah): Change unittests to only allow on non-null audio input. - num_frames_per_band = 320; - } - channel_agcs_[ch]->Process(audio_use, num_frames_per_band, sample_rate_hz_); + int16_t* audio_use = audio_data.data(); + FloatS16ToS16(audio_buffer.split_bands_const_f(ch)[0], num_frames_per_band, + audio_use); + channel_agcs_[ch]->Process({audio_use, num_frames_per_band}, + rms_error_override); new_compressions_to_set_[ch] = channel_agcs_[ch]->new_compression(); } @@ -700,6 +682,10 @@ float AgcManagerDirect::voice_probability() const { } void AgcManagerDirect::set_stream_analog_level(int level) { + if (!analog_controller_enabled_) { + recommended_input_volume_ = level; + } + for (size_t ch = 0; ch < channel_agcs_.size(); ++ch) { channel_agcs_[ch]->set_stream_analog_level(level); } @@ -708,25 +694,25 @@ void AgcManagerDirect::set_stream_analog_level(int level) { } void AgcManagerDirect::AggregateChannelLevels() { - stream_analog_level_ = channel_agcs_[0]->stream_analog_level(); + int new_recommended_input_volume = + channel_agcs_[0]->recommended_analog_level(); channel_controlling_gain_ = 0; - if (use_min_channel_level_) { - for (size_t ch = 1; ch < channel_agcs_.size(); ++ch) { - int level = channel_agcs_[ch]->stream_analog_level(); - if (level < stream_analog_level_) { - stream_analog_level_ = level; - channel_controlling_gain_ = static_cast(ch); - } - } - } else { - for (size_t ch = 1; ch < channel_agcs_.size(); ++ch) { - int level = channel_agcs_[ch]->stream_analog_level(); - if (level > stream_analog_level_) { - stream_analog_level_ = level; - channel_controlling_gain_ = static_cast(ch); - } + for (size_t ch = 1; ch < channel_agcs_.size(); ++ch) { + int level = channel_agcs_[ch]->recommended_analog_level(); + if (level < new_recommended_input_volume) { + new_recommended_input_volume = level; + channel_controlling_gain_ = static_cast(ch); } } + + if (min_mic_level_override_.has_value() && new_recommended_input_volume > 0) { + new_recommended_input_volume = + std::max(new_recommended_input_volume, *min_mic_level_override_); + } + + if (analog_controller_enabled_) { + recommended_input_volume_ = new_recommended_input_volume; + } } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/agc_manager_direct.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/agc_manager_direct.h index a452ee1c43..d1314c66bd 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/agc_manager_direct.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/agc_manager_direct.h @@ -11,13 +11,15 @@ #ifndef MODULES_AUDIO_PROCESSING_AGC_AGC_MANAGER_DIRECT_H_ #define MODULES_AUDIO_PROCESSING_AGC_AGC_MANAGER_DIRECT_H_ +#include #include #include "absl/types/optional.h" +#include "api/array_view.h" #include "modules/audio_processing/agc/agc.h" -#include "modules/audio_processing/agc/clipping_predictor.h" -#include "modules/audio_processing/agc/clipping_predictor_evaluator.h" +#include "modules/audio_processing/agc2/clipping_predictor.h" #include "modules/audio_processing/audio_buffer.h" +#include "modules/audio_processing/include/audio_processing.h" #include "modules/audio_processing/logging/apm_data_dumper.h" #include "rtc_base/gtest_prod_util.h" @@ -26,117 +28,146 @@ namespace webrtc { class MonoAgc; class GainControl; -// Direct interface to use AGC to set volume and compression values. -// AudioProcessing uses this interface directly to integrate the callback-less -// AGC. -// +// Adaptive Gain Controller (AGC) that controls the input volume and a digital +// gain. The input volume controller recommends what volume to use, handles +// volume changes and clipping. In particular, it handles changes triggered by +// the user (e.g., volume set to zero by a HW mute button). The digital +// controller chooses and applies the digital compression gain. // This class is not thread-safe. +// TODO(bugs.webrtc.org/7494): Use applied/recommended input volume naming +// convention. class AgcManagerDirect final { public: - // AgcManagerDirect will configure GainControl internally. The user is - // responsible for processing the audio using it after the call to Process. - // The operating range of startup_min_level is [12, 255] and any input value - // outside that range will be clamped. `clipped_level_step` is the amount - // the microphone level is lowered with every clipping event, limited to - // (0, 255]. `clipped_ratio_threshold` is the proportion of clipped - // samples required to declare a clipping event, limited to (0.f, 1.f). - // `clipped_wait_frames` is the time in frames to wait after a clipping event - // before checking again, limited to values higher than 0. + // Ctor. `num_capture_channels` specifies the number of channels for the audio + // passed to `AnalyzePreProcess()` and `Process()`. Clamps + // `analog_config.startup_min_level` in the [12, 255] range. AgcManagerDirect( int num_capture_channels, - int startup_min_level, - int clipped_level_min, - bool disable_digital_adaptive, - int sample_rate_hz, - int clipped_level_step, - float clipped_ratio_threshold, - int clipped_wait_frames, - const AudioProcessing::Config::GainController1::AnalogGainController:: - ClippingPredictor& clipping_config); + const AudioProcessing::Config::GainController1::AnalogGainController& + analog_config); ~AgcManagerDirect(); AgcManagerDirect(const AgcManagerDirect&) = delete; AgcManagerDirect& operator=(const AgcManagerDirect&) = delete; void Initialize(); - void SetupDigitalGainControl(GainControl* gain_control) const; - void AnalyzePreProcess(const AudioBuffer* audio); - void Process(const AudioBuffer* audio); + // Configures `gain_control` to work as a fixed digital controller so that the + // adaptive part is only handled by this gain controller. Must be called if + // `gain_control` is also used to avoid the side-effects of running two AGCs. + void SetupDigitalGainControl(GainControl& gain_control) const; + + // Sets the applied input volume. + void set_stream_analog_level(int level); + + // TODO(bugs.webrtc.org/7494): Add argument for the applied input volume and + // remove `set_stream_analog_level()`. + // Analyzes `audio` before `Process()` is called so that the analysis can be + // performed before external digital processing operations take place (e.g., + // echo cancellation). The analysis consists of input clipping detection and + // prediction (if enabled). Must be called after `set_stream_analog_level()`. + void AnalyzePreProcess(const AudioBuffer& audio_buffer); + + // Processes `audio_buffer`. Chooses a digital compression gain and the new + // input volume to recommend. Must be called after `AnalyzePreProcess()`. If + // `speech_probability` (range [0.0f, 1.0f]) and `speech_level_dbfs` (range + // [-90.f, 30.0f]) are given, uses them to override the estimated RMS error. + // TODO(webrtc:7494): This signature is needed for testing purposes, unify + // the signatures when the clean-up is done. + void Process(const AudioBuffer& audio_buffer, + absl::optional speech_probability, + absl::optional speech_level_dbfs); + + // Processes `audio_buffer`. Chooses a digital compression gain and the new + // input volume to recommend. Must be called after `AnalyzePreProcess()`. + void Process(const AudioBuffer& audio_buffer); + + // TODO(bugs.webrtc.org/7494): Return recommended input volume and remove + // `recommended_analog_level()`. + // Returns the recommended input volume. If the input volume contoller is + // disabled, returns the input volume set via the latest + // `set_stream_analog_level()` call. Must be called after + // `AnalyzePreProcess()` and `Process()`. + int recommended_analog_level() const { return recommended_input_volume_; } // Call when the capture stream output has been flagged to be used/not-used. // If unused, the manager disregards all incoming audio. void HandleCaptureOutputUsedChange(bool capture_output_used); + float voice_probability() const; - int stream_analog_level() const { return stream_analog_level_; } - void set_stream_analog_level(int level); int num_channels() const { return num_capture_channels_; } - int sample_rate_hz() const { return sample_rate_hz_; } - // If available, returns a new compression gain for the digital gain control. + // If available, returns the latest digital compression gain that has been + // chosen. absl::optional GetDigitalComressionGain(); // Returns true if clipping prediction is enabled. bool clipping_predictor_enabled() const { return !!clipping_predictor_; } - // Returns true if clipping prediction is used to adjust the analog gain. + // Returns true if clipping prediction is used to adjust the input volume. bool use_clipping_predictor_step() const { return use_clipping_predictor_step_; } private: - friend class AgcManagerDirectTest; + friend class AgcManagerDirectTestHelper; - FRIEND_TEST_ALL_PREFIXES(AgcManagerDirectStandaloneTest, - DisableDigitalDisablesDigital); - FRIEND_TEST_ALL_PREFIXES(AgcManagerDirectStandaloneTest, - AgcMinMicLevelExperiment); - FRIEND_TEST_ALL_PREFIXES(AgcManagerDirectStandaloneTest, + FRIEND_TEST_ALL_PREFIXES(AgcManagerDirectTest, DisableDigitalDisablesDigital); + FRIEND_TEST_ALL_PREFIXES(AgcManagerDirectTest, + AgcMinMicLevelExperimentDefault); + FRIEND_TEST_ALL_PREFIXES(AgcManagerDirectTest, AgcMinMicLevelExperimentDisabled); - FRIEND_TEST_ALL_PREFIXES(AgcManagerDirectStandaloneTest, + FRIEND_TEST_ALL_PREFIXES(AgcManagerDirectTest, AgcMinMicLevelExperimentOutOfRangeAbove); - FRIEND_TEST_ALL_PREFIXES(AgcManagerDirectStandaloneTest, + FRIEND_TEST_ALL_PREFIXES(AgcManagerDirectTest, AgcMinMicLevelExperimentOutOfRangeBelow); - FRIEND_TEST_ALL_PREFIXES(AgcManagerDirectStandaloneTest, + FRIEND_TEST_ALL_PREFIXES(AgcManagerDirectTest, AgcMinMicLevelExperimentEnabled50); - FRIEND_TEST_ALL_PREFIXES(AgcManagerDirectStandaloneTest, + FRIEND_TEST_ALL_PREFIXES(AgcManagerDirectTest, AgcMinMicLevelExperimentEnabledAboveStartupLevel); - FRIEND_TEST_ALL_PREFIXES(AgcManagerDirectStandaloneTest, + FRIEND_TEST_ALL_PREFIXES(AgcManagerDirectParametrizedTest, ClippingParametersVerified); - FRIEND_TEST_ALL_PREFIXES(AgcManagerDirectStandaloneTest, + FRIEND_TEST_ALL_PREFIXES(AgcManagerDirectParametrizedTest, DisableClippingPredictorDoesNotLowerVolume); - FRIEND_TEST_ALL_PREFIXES(AgcManagerDirectStandaloneTest, + FRIEND_TEST_ALL_PREFIXES(AgcManagerDirectParametrizedTest, UsedClippingPredictionsProduceLowerAnalogLevels); - FRIEND_TEST_ALL_PREFIXES(AgcManagerDirectStandaloneTest, + FRIEND_TEST_ALL_PREFIXES(AgcManagerDirectParametrizedTest, UnusedClippingPredictionsProduceEqualAnalogLevels); + FRIEND_TEST_ALL_PREFIXES(AgcManagerDirectParametrizedTest, + EmptyRmsErrorOverrideHasNoEffect); + FRIEND_TEST_ALL_PREFIXES(AgcManagerDirectParametrizedTest, + NonEmptyRmsErrorOverrideHasEffect); - // Dependency injection for testing. Don't delete `agc` as the memory is owned - // by the manager. + // Ctor that creates a single channel AGC and by injecting `agc`. + // `agc` will be owned by this class; hence, do not delete it. AgcManagerDirect( - Agc* agc, - int startup_min_level, - int clipped_level_min, - int sample_rate_hz, - int clipped_level_step, - float clipped_ratio_threshold, - int clipped_wait_frames, - const AudioProcessing::Config::GainController1::AnalogGainController:: - ClippingPredictor& clipping_config); - - void AnalyzePreProcess(const float* const* audio, size_t samples_per_channel); + const AudioProcessing::Config::GainController1::AnalogGainController& + analog_config, + Agc* agc); void AggregateChannelLevels(); + const bool analog_controller_enabled_; + + const absl::optional min_mic_level_override_; std::unique_ptr data_dumper_; - static int instance_counter_; - const bool use_min_channel_level_; - const int sample_rate_hz_; + static std::atomic instance_counter_; const int num_capture_channels_; const bool disable_digital_adaptive_; int frames_since_clipped_; - int stream_analog_level_ = 0; + + // TODO(bugs.webrtc.org/7494): Create a separate member for the applied input + // volume. + // TODO(bugs.webrtc.org/7494): Once + // `AudioProcessingImpl::recommended_stream_analog_level()` becomes a trivial + // getter, leave uninitialized. + // Recommended input volume. After `set_stream_analog_level()` is called it + // holds the observed input volume. Possibly updated by `AnalyzePreProcess()` + // and `Process()`; after these calls, holds the recommended input volume. + int recommended_input_volume_ = 0; + bool capture_output_used_; int channel_controlling_gain_ = 0; @@ -149,16 +180,15 @@ class AgcManagerDirect final { const std::unique_ptr clipping_predictor_; const bool use_clipping_predictor_step_; - ClippingPredictorEvaluator clipping_predictor_evaluator_; - int clipping_predictor_log_counter_; float clipping_rate_log_; int clipping_rate_log_counter_; }; +// TODO(bugs.webrtc.org/7494): Use applied/recommended input volume naming +// convention. class MonoAgc { public: MonoAgc(ApmDataDumper* data_dumper, - int startup_min_level, int clipped_level_min, bool disable_digital_adaptive, int min_mic_level); @@ -169,14 +199,25 @@ class MonoAgc { void Initialize(); void HandleCaptureOutputUsedChange(bool capture_output_used); + // Sets the current input volume. + void set_stream_analog_level(int level) { recommended_input_volume_ = level; } + + // Lowers the recommended input volume in response to clipping based on the + // suggested reduction `clipped_level_step`. Must be called after + // `set_stream_analog_level()`. void HandleClipping(int clipped_level_step); - void Process(const int16_t* audio, - size_t samples_per_channel, - int sample_rate_hz); + // Analyzes `audio`, requests the RMS error from AGC, updates the recommended + // input volume based on the estimated speech level and, if enabled, updates + // the (digital) compression gain to be applied by `agc_`. Must be called + // after `HandleClipping()`. If `rms_error_override` has a value, RMS error + // from AGC is overridden by it. + void Process(rtc::ArrayView audio, + absl::optional rms_error_override); + + // Returns the recommended input volume. Must be called after `Process()`. + int recommended_analog_level() const { return recommended_input_volume_; } - void set_stream_analog_level(int level) { stream_analog_level_ = level; } - int stream_analog_level() const { return stream_analog_level_; } float voice_probability() const { return agc_->voice_probability(); } void ActivateLogging() { log_to_histograms_ = true; } absl::optional new_compression() const { @@ -186,20 +227,19 @@ class MonoAgc { // Only used for testing. void set_agc(Agc* agc) { agc_.reset(agc); } int min_mic_level() const { return min_mic_level_; } - int startup_min_level() const { return startup_min_level_; } private: - // Sets a new microphone level, after first checking that it hasn't been - // updated by the user, in which case no action is taken. + // Sets a new input volume, after first checking that it hasn't been updated + // by the user, in which case no action is taken. void SetLevel(int new_level); - // Set the maximum level the AGC is allowed to apply. Also updates the - // maximum compression gain to compensate. The level must be at least + // Set the maximum input volume the AGC is allowed to apply. Also updates the + // maximum compression gain to compensate. The volume must be at least // `kClippedLevelMin`. void SetMaxLevel(int level); int CheckVolumeAndReset(); - void UpdateGain(); + void UpdateGain(int rms_error_db); void UpdateCompressor(); const int min_mic_level_; @@ -214,12 +254,24 @@ class MonoAgc { bool capture_output_used_ = true; bool check_volume_on_next_process_ = true; bool startup_ = true; - int startup_min_level_; int calls_since_last_gain_log_ = 0; - int stream_analog_level_ = 0; + + // TODO(bugs.webrtc.org/7494): Create a separate member for the applied + // input volume. + // Recommended input volume. After `set_stream_analog_level()` is + // called, it holds the observed applied input volume. Possibly updated by + // `HandleClipping()` and `Process()`; after these calls, holds the + // recommended input volume. + int recommended_input_volume_ = 0; + absl::optional new_compression_to_set_; bool log_to_histograms_ = false; const int clipped_level_min_; + + // Frames since the last `UpdateGain()` call. + int frames_since_update_gain_ = 0; + // Set to true for the first frame after startup and reset, otherwise false. + bool is_first_frame_ = true; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/analog_gain_stats_reporter.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/analog_gain_stats_reporter.cc deleted file mode 100644 index 0d8753a7c8..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/analog_gain_stats_reporter.cc +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_processing/agc/analog_gain_stats_reporter.h" - -#include - -#include "rtc_base/logging.h" -#include "rtc_base/numerics/safe_minmax.h" -#include "system_wrappers/include/metrics.h" - -namespace webrtc { -namespace { - -constexpr int kFramesIn60Seconds = 6000; -constexpr int kMinGain = 0; -constexpr int kMaxGain = 255; -constexpr int kMaxUpdate = kMaxGain - kMinGain; - -float ComputeAverageUpdate(int sum_updates, int num_updates) { - RTC_DCHECK_GE(sum_updates, 0); - RTC_DCHECK_LE(sum_updates, kMaxUpdate * kFramesIn60Seconds); - RTC_DCHECK_GE(num_updates, 0); - RTC_DCHECK_LE(num_updates, kFramesIn60Seconds); - if (num_updates == 0) { - return 0.0f; - } - return std::round(static_cast(sum_updates) / - static_cast(num_updates)); -} -} // namespace - -AnalogGainStatsReporter::AnalogGainStatsReporter() = default; - -AnalogGainStatsReporter::~AnalogGainStatsReporter() = default; - -void AnalogGainStatsReporter::UpdateStatistics(int analog_mic_level) { - RTC_DCHECK_GE(analog_mic_level, kMinGain); - RTC_DCHECK_LE(analog_mic_level, kMaxGain); - if (previous_analog_mic_level_.has_value() && - analog_mic_level != previous_analog_mic_level_.value()) { - const int level_change = - analog_mic_level - previous_analog_mic_level_.value(); - if (level_change < 0) { - ++level_update_stats_.num_decreases; - level_update_stats_.sum_decreases -= level_change; - } else { - ++level_update_stats_.num_increases; - level_update_stats_.sum_increases += level_change; - } - } - // Periodically log analog gain change metrics. - if (++log_level_update_stats_counter_ >= kFramesIn60Seconds) { - LogLevelUpdateStats(); - level_update_stats_ = {}; - log_level_update_stats_counter_ = 0; - } - previous_analog_mic_level_ = analog_mic_level; -} - -void AnalogGainStatsReporter::LogLevelUpdateStats() const { - const float average_decrease = ComputeAverageUpdate( - level_update_stats_.sum_decreases, level_update_stats_.num_decreases); - const float average_increase = ComputeAverageUpdate( - level_update_stats_.sum_increases, level_update_stats_.num_increases); - const int num_updates = - level_update_stats_.num_decreases + level_update_stats_.num_increases; - const float average_update = ComputeAverageUpdate( - level_update_stats_.sum_decreases + level_update_stats_.sum_increases, - num_updates); - RTC_DLOG(LS_INFO) << "Analog gain update rate: " - << "num_updates=" << num_updates - << ", num_decreases=" << level_update_stats_.num_decreases - << ", num_increases=" << level_update_stats_.num_increases; - RTC_DLOG(LS_INFO) << "Analog gain update average: " - << "average_update=" << average_update - << ", average_decrease=" << average_decrease - << ", average_increase=" << average_increase; - RTC_HISTOGRAM_COUNTS_LINEAR( - /*name=*/"WebRTC.Audio.ApmAnalogGainDecreaseRate", - /*sample=*/level_update_stats_.num_decreases, - /*min=*/1, - /*max=*/kFramesIn60Seconds, - /*bucket_count=*/50); - if (level_update_stats_.num_decreases > 0) { - RTC_HISTOGRAM_COUNTS_LINEAR( - /*name=*/"WebRTC.Audio.ApmAnalogGainDecreaseAverage", - /*sample=*/average_decrease, - /*min=*/1, - /*max=*/kMaxUpdate, - /*bucket_count=*/50); - } - RTC_HISTOGRAM_COUNTS_LINEAR( - /*name=*/"WebRTC.Audio.ApmAnalogGainIncreaseRate", - /*sample=*/level_update_stats_.num_increases, - /*min=*/1, - /*max=*/kFramesIn60Seconds, - /*bucket_count=*/50); - if (level_update_stats_.num_increases > 0) { - RTC_HISTOGRAM_COUNTS_LINEAR( - /*name=*/"WebRTC.Audio.ApmAnalogGainIncreaseAverage", - /*sample=*/average_increase, - /*min=*/1, - /*max=*/kMaxUpdate, - /*bucket_count=*/50); - } - RTC_HISTOGRAM_COUNTS_LINEAR( - /*name=*/"WebRTC.Audio.ApmAnalogGainUpdateRate", - /*sample=*/num_updates, - /*min=*/1, - /*max=*/kFramesIn60Seconds, - /*bucket_count=*/50); - if (num_updates > 0) { - RTC_HISTOGRAM_COUNTS_LINEAR( - /*name=*/"WebRTC.Audio.ApmAnalogGainUpdateAverage", - /*sample=*/average_update, - /*min=*/1, - /*max=*/kMaxUpdate, - /*bucket_count=*/50); - } -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/analog_gain_stats_reporter.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/analog_gain_stats_reporter.h deleted file mode 100644 index c9442e8a43..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/analog_gain_stats_reporter.h +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_PROCESSING_AGC_ANALOG_GAIN_STATS_REPORTER_H_ -#define MODULES_AUDIO_PROCESSING_AGC_ANALOG_GAIN_STATS_REPORTER_H_ - -#include "absl/types/optional.h" -#include "rtc_base/gtest_prod_util.h" - -namespace webrtc { - -// Analog gain statistics calculator. Computes aggregate stats based on the -// framewise mic levels processed in `UpdateStatistics()`. Periodically logs the -// statistics into a histogram. -class AnalogGainStatsReporter { - public: - AnalogGainStatsReporter(); - AnalogGainStatsReporter(const AnalogGainStatsReporter&) = delete; - AnalogGainStatsReporter operator=(const AnalogGainStatsReporter&) = delete; - ~AnalogGainStatsReporter(); - - // Updates the stats based on the `analog_mic_level`. Periodically logs the - // stats into a histogram. - void UpdateStatistics(int analog_mic_level); - - private: - FRIEND_TEST_ALL_PREFIXES(AnalogGainStatsReporterTest, - CheckLevelUpdateStatsForEmptyStats); - FRIEND_TEST_ALL_PREFIXES(AnalogGainStatsReporterTest, - CheckLevelUpdateStatsAfterNoGainChange); - FRIEND_TEST_ALL_PREFIXES(AnalogGainStatsReporterTest, - CheckLevelUpdateStatsAfterGainIncrease); - FRIEND_TEST_ALL_PREFIXES(AnalogGainStatsReporterTest, - CheckLevelUpdateStatsAfterGainDecrease); - FRIEND_TEST_ALL_PREFIXES(AnalogGainStatsReporterTest, - CheckLevelUpdateStatsAfterReset); - - // Stores analog gain update stats to enable calculation of update rate and - // average update separately for gain increases and decreases. - struct LevelUpdateStats { - int num_decreases = 0; - int num_increases = 0; - int sum_decreases = 0; - int sum_increases = 0; - } level_update_stats_; - - // Returns a copy of the stored statistics. Use only for testing. - const LevelUpdateStats level_update_stats() const { - return level_update_stats_; - } - - // Computes aggregate stat and logs them into a histogram. - void LogLevelUpdateStats() const; - - int log_level_update_stats_counter_ = 0; - absl::optional previous_analog_mic_level_ = absl::nullopt; -}; -} // namespace webrtc - -#endif // MODULES_AUDIO_PROCESSING_AGC_ANALOG_GAIN_STATS_REPORTER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/clipping_predictor_evaluator.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/clipping_predictor_evaluator.cc deleted file mode 100644 index ed7198d119..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/clipping_predictor_evaluator.cc +++ /dev/null @@ -1,214 +0,0 @@ -/* - * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_processing/agc/clipping_predictor_evaluator.h" - -#include - -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" - -namespace webrtc { -namespace { - -// Returns the index of the oldest item in the ring buffer for a non-empty -// ring buffer with give `size`, `tail` index and `capacity`. -int OldestExpectedDetectionIndex(int size, int tail, int capacity) { - RTC_DCHECK_GT(size, 0); - return tail - size + (tail < size ? capacity : 0); -} - -} // namespace - -ClippingPredictorEvaluator::ClippingPredictorEvaluator(int history_size) - : history_size_(history_size), - ring_buffer_capacity_(history_size + 1), - ring_buffer_(ring_buffer_capacity_) { - RTC_DCHECK_GT(history_size_, 0); - Reset(); - counters_.true_positives = 0; - counters_.true_negatives = 0; - counters_.false_positives = 0; - counters_.false_negatives = 0; -} - -ClippingPredictorEvaluator::~ClippingPredictorEvaluator() = default; - -absl::optional ClippingPredictorEvaluator::Observe( - bool clipping_detected, - bool clipping_predicted) { - RTC_DCHECK_GE(ring_buffer_size_, 0); - RTC_DCHECK_LE(ring_buffer_size_, ring_buffer_capacity_); - RTC_DCHECK_GE(ring_buffer_tail_, 0); - RTC_DCHECK_LT(ring_buffer_tail_, ring_buffer_capacity_); - - DecreaseTimesToLive(); - // Clipping is expected if there are expected detections regardless of - // whether all the expected detections have been previously matched - i.e., - // `ExpectedDetection::detected` is true. - const bool clipping_expected = ring_buffer_size_ > 0; - - absl::optional prediction_interval; - if (clipping_expected && clipping_detected) { - prediction_interval = FindEarliestPredictionInterval(); - // Add a true positive for each unexpired expected detection. - const int num_modified_items = MarkExpectedDetectionAsDetected(); - counters_.true_positives += num_modified_items; - RTC_DCHECK(prediction_interval.has_value() || num_modified_items == 0); - RTC_DCHECK(!prediction_interval.has_value() || num_modified_items > 0); - } else if (clipping_expected && !clipping_detected) { - // Add a false positive if there is one expected detection that has expired - // and that has never been matched before. Note that there is at most one - // unmatched expired detection. - if (HasExpiredUnmatchedExpectedDetection()) { - counters_.false_positives++; - } - } else if (!clipping_expected && clipping_detected) { - counters_.false_negatives++; - } else { - RTC_DCHECK(!clipping_expected && !clipping_detected); - counters_.true_negatives++; - } - - if (clipping_predicted) { - // TODO(bugs.webrtc.org/12874): Use designated initializers one fixed. - Push(/*expected_detection=*/{/*ttl=*/history_size_, /*detected=*/false}); - } - - return prediction_interval; -} - -void ClippingPredictorEvaluator::RemoveExpectations() { - // Empty the ring buffer of expected detections. - ring_buffer_tail_ = 0; - ring_buffer_size_ = 0; -} - -void ClippingPredictorEvaluator::Reset() { - counters_.true_positives = 0; - counters_.true_negatives = 0; - counters_.false_positives = 0; - counters_.false_negatives = 0; - RemoveExpectations(); -} - -// Cost: O(1). -void ClippingPredictorEvaluator::Push(ExpectedDetection value) { - ring_buffer_[ring_buffer_tail_] = value; - ring_buffer_tail_++; - if (ring_buffer_tail_ == ring_buffer_capacity_) { - ring_buffer_tail_ = 0; - } - ring_buffer_size_ = std::min(ring_buffer_capacity_, ring_buffer_size_ + 1); -} - -// Cost: O(N). -void ClippingPredictorEvaluator::DecreaseTimesToLive() { - bool expired_found = false; - for (int i = ring_buffer_tail_ - ring_buffer_size_; i < ring_buffer_tail_; - ++i) { - int index = i >= 0 ? i : ring_buffer_capacity_ + i; - RTC_DCHECK_GE(index, 0); - RTC_DCHECK_LT(index, ring_buffer_.size()); - RTC_DCHECK_GE(ring_buffer_[index].ttl, 0); - if (ring_buffer_[index].ttl == 0) { - RTC_DCHECK(!expired_found) - << "There must be at most one expired item in the ring buffer."; - expired_found = true; - RTC_DCHECK_EQ(index, OldestExpectedDetectionIndex(ring_buffer_size_, - ring_buffer_tail_, - ring_buffer_capacity_)) - << "The expired item must be the oldest in the ring buffer."; - } - ring_buffer_[index].ttl--; - } - if (expired_found) { - ring_buffer_size_--; - } -} - -// Cost: O(N). -absl::optional ClippingPredictorEvaluator::FindEarliestPredictionInterval() - const { - absl::optional prediction_interval; - for (int i = ring_buffer_tail_ - ring_buffer_size_; i < ring_buffer_tail_; - ++i) { - int index = i >= 0 ? i : ring_buffer_capacity_ + i; - RTC_DCHECK_GE(index, 0); - RTC_DCHECK_LT(index, ring_buffer_.size()); - if (!ring_buffer_[index].detected) { - prediction_interval = std::max(prediction_interval.value_or(0), - history_size_ - ring_buffer_[index].ttl); - } - } - return prediction_interval; -} - -// Cost: O(N). -int ClippingPredictorEvaluator::MarkExpectedDetectionAsDetected() { - int num_modified_items = 0; - for (int i = ring_buffer_tail_ - ring_buffer_size_; i < ring_buffer_tail_; - ++i) { - int index = i >= 0 ? i : ring_buffer_capacity_ + i; - RTC_DCHECK_GE(index, 0); - RTC_DCHECK_LT(index, ring_buffer_.size()); - if (!ring_buffer_[index].detected) { - num_modified_items++; - } - ring_buffer_[index].detected = true; - } - return num_modified_items; -} - -// Cost: O(1). -bool ClippingPredictorEvaluator::HasExpiredUnmatchedExpectedDetection() const { - if (ring_buffer_size_ == 0) { - return false; - } - // If an expired item, that is `ttl` equal to 0, exists, it must be the - // oldest. - const int oldest_index = OldestExpectedDetectionIndex( - ring_buffer_size_, ring_buffer_tail_, ring_buffer_capacity_); - RTC_DCHECK_GE(oldest_index, 0); - RTC_DCHECK_LT(oldest_index, ring_buffer_.size()); - return ring_buffer_[oldest_index].ttl == 0 && - !ring_buffer_[oldest_index].detected; -} - -absl::optional ComputeClippingPredictionMetrics( - const ClippingPredictionCounters& counters) { - RTC_DCHECK_GE(counters.true_positives, 0); - RTC_DCHECK_GE(counters.true_negatives, 0); - RTC_DCHECK_GE(counters.false_positives, 0); - RTC_DCHECK_GE(counters.false_negatives, 0); - if (counters.true_positives == 0) { - // Both precision and recall are zero in this case and hence the F1 score - // is undefined. - return absl::nullopt; - } - int precision_denominator = - counters.true_positives + counters.false_positives; - int recall_denominator = counters.true_positives + counters.false_negatives; - if (precision_denominator == 0 || recall_denominator == 0) { - // Both precision and recall must be defined. - return absl::nullopt; - } - ClippingPredictionMetrics metrics; - float true_positives = counters.true_positives; - metrics.precision = true_positives / precision_denominator; - metrics.recall = true_positives / recall_denominator; - float f1_score_denominator = metrics.precision + metrics.recall; - RTC_DCHECK_GT(f1_score_denominator, 0.0f); - metrics.f1_score = - 2 * metrics.precision * metrics.recall / f1_score_denominator; - return metrics; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/clipping_predictor_evaluator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/clipping_predictor_evaluator.h deleted file mode 100644 index 348f753493..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/clipping_predictor_evaluator.h +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_PROCESSING_AGC_CLIPPING_PREDICTOR_EVALUATOR_H_ -#define MODULES_AUDIO_PROCESSING_AGC_CLIPPING_PREDICTOR_EVALUATOR_H_ - -#include - -#include "absl/types/optional.h" - -namespace webrtc { - -// Clipping prediction counters. -struct ClippingPredictionCounters { - int true_positives; // TP. - int true_negatives; // TN. - int false_positives; // FP. - int false_negatives; // FN. -}; - -// Counts true/false positives/negatives while observing sequences of flag pairs -// that indicate whether clipping has been detected and/or if clipping is -// predicted. When a true positive is found measures the time interval between -// prediction and detection events. -// After a prediction is observed and for a period equal to -// `history_size` calls to `Observe()`, one or more detections are expected. If -// the expectation is met, a true positive is added and the time interval -// between the earliest prediction and the detection is recorded; otherwise, -// when the deadline is reached, a false positive is added. Note that one -// detection matches all the expected detections that have not expired - i.e., -// one detection counts as multiple true positives. -// If a detection is observed, but no prediction has been observed over the past -// `history_size` calls to `Observe()`, then a false negative is added; -// otherwise, a true negative is added. -class ClippingPredictorEvaluator { - public: - // Ctor. `history_size` indicates how long to wait for a call to `Observe()` - // having `clipping_detected` set to true from the time clipping is predicted. - explicit ClippingPredictorEvaluator(int history_size); - ClippingPredictorEvaluator(const ClippingPredictorEvaluator&) = delete; - ClippingPredictorEvaluator& operator=(const ClippingPredictorEvaluator&) = - delete; - ~ClippingPredictorEvaluator(); - - // Observes whether clipping has been detected and/or if clipping is - // predicted. When predicted one or more detections are expected in the next - // `history_size_` calls of `Observe()`. When true positives are found returns - // the prediction interval between the earliest prediction and the detection. - absl::optional Observe(bool clipping_detected, bool clipping_predicted); - - // Removes any expectation recently set after a call to `Observe()` having - // `clipping_predicted` set to true. Counters won't be reset. - void RemoveExpectations(); - - // Resets counters and removes any expectation (see `RemoveExpectations()`). - void Reset(); - - ClippingPredictionCounters counters() const { return counters_; } - - private: - const int history_size_; - - // State of a detection expected to be observed after a prediction. - struct ExpectedDetection { - // Time to live (TTL); remaining number of `Observe()` calls to match a call - // having `clipping_detected` set to true. - int ttl; - // True if an `Observe()` call having `clipping_detected` set to true has - // been observed. - bool detected; - }; - // Ring buffer of expected detections. - const int ring_buffer_capacity_; - std::vector ring_buffer_; - int ring_buffer_tail_; - int ring_buffer_size_; - - // Pushes `expected_detection` into `expected_matches_ring_buffer_`. - void Push(ExpectedDetection expected_detection); - // Decreased the TTLs in `expected_matches_ring_buffer_` and removes expired - // items. - void DecreaseTimesToLive(); - // Returns the prediction interval for the earliest unexpired expected - // detection if any. - absl::optional FindEarliestPredictionInterval() const; - // Marks all the items in `expected_matches_ring_buffer_` as `detected` and - // returns the number of updated items. - int MarkExpectedDetectionAsDetected(); - // Returns true if `expected_matches_ring_buffer_` has an item having `ttl` - // equal to 0 (expired) and `detected` equal to false (unmatched). - bool HasExpiredUnmatchedExpectedDetection() const; - - // Counters. - ClippingPredictionCounters counters_; -}; - -// Clipping prediction metrics derived from the clipping prediction counters. -struct ClippingPredictionMetrics { - // Precision (P) is defined as TP / (TP + FP). - float precision; - // Recall (R) is defined as TP / (TP + FN). - float recall; - // The F1 score is defined as 2 * P * R / (P + R). - float f1_score; -}; - -// Derives clipping prediction metrics from the true/false positives/negatives -// `counters`. Returns an unspecified value if one or more metrics are not -// defined. -absl::optional ComputeClippingPredictionMetrics( - const ClippingPredictionCounters& counters); - -} // namespace webrtc - -#endif // MODULES_AUDIO_PROCESSING_AGC_CLIPPING_PREDICTOR_EVALUATOR_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/legacy/gain_control.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/legacy/gain_control.h index abb8e63228..6010a988fa 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/legacy/gain_control.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/legacy/gain_control.h @@ -11,6 +11,9 @@ #ifndef MODULES_AUDIO_PROCESSING_AGC_LEGACY_GAIN_CONTROL_H_ #define MODULES_AUDIO_PROCESSING_AGC_LEGACY_GAIN_CONTROL_H_ +#include +#include + namespace webrtc { enum { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/mock_agc.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/mock_agc.h index 0ef41c6e52..3080e1563c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/mock_agc.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/mock_agc.h @@ -11,6 +11,7 @@ #ifndef MODULES_AUDIO_PROCESSING_AGC_MOCK_AGC_H_ #define MODULES_AUDIO_PROCESSING_AGC_MOCK_AGC_H_ +#include "api/array_view.h" #include "modules/audio_processing/agc/agc.h" #include "test/gmock.h" @@ -19,10 +20,7 @@ namespace webrtc { class MockAgc : public Agc { public: virtual ~MockAgc() {} - MOCK_METHOD(void, - Process, - (const int16_t* audio, size_t length, int sample_rate_hz), - (override)); + MOCK_METHOD(void, Process, (rtc::ArrayView audio), (override)); MOCK_METHOD(bool, GetRmsErrorDb, (int* error), (override)); MOCK_METHOD(void, Reset, (), (override)); MOCK_METHOD(int, set_target_level_dbfs, (int level), (override)); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_digital_gain_controller.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_digital_gain_controller.cc index 381e454868..c396ee044a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_digital_gain_controller.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_digital_gain_controller.cc @@ -13,7 +13,6 @@ #include #include "common_audio/include/audio_util.h" -#include "modules/audio_processing/agc2/vad_wrapper.h" #include "modules/audio_processing/logging/apm_data_dumper.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -105,4 +104,11 @@ void AdaptiveDigitalGainController::HandleInputGainChange() { saturation_protector_->Reset(); } +absl::optional +AdaptiveDigitalGainController::GetSpeechLevelDbfsIfConfident() const { + return speech_level_estimator_.IsConfident() + ? absl::optional(speech_level_estimator_.level_dbfs()) + : absl::nullopt; +} + } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_digital_gain_controller.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_digital_gain_controller.h index 75ea44591e..78c508836b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_digital_gain_controller.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_digital_gain_controller.h @@ -13,10 +13,11 @@ #include +#include "absl/types/optional.h" #include "modules/audio_processing/agc2/adaptive_digital_gain_applier.h" -#include "modules/audio_processing/agc2/adaptive_mode_level_estimator.h" #include "modules/audio_processing/agc2/noise_level_estimator.h" #include "modules/audio_processing/agc2/saturation_protector.h" +#include "modules/audio_processing/agc2/speech_level_estimator.h" #include "modules/audio_processing/include/audio_frame_view.h" #include "modules/audio_processing/include/audio_processing.h" @@ -50,8 +51,12 @@ class AdaptiveDigitalGainController { // Handles a gain change applied to the input signal (e.g., analog gain). void HandleInputGainChange(); + // Returns the most recent speech level (dBFs) if the estimator is confident. + // Otherwise returns absl::nullopt. + absl::optional GetSpeechLevelDbfsIfConfident() const; + private: - AdaptiveModeLevelEstimator speech_level_estimator_; + SpeechLevelEstimator speech_level_estimator_; AdaptiveDigitalGainApplier gain_controller_; ApmDataDumper* const apm_data_dumper_; std::unique_ptr noise_level_estimator_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator.cc deleted file mode 100644 index fe021fec05..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator.cc +++ /dev/null @@ -1,164 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_processing/agc2/adaptive_mode_level_estimator.h" - -#include "modules/audio_processing/agc2/agc2_common.h" -#include "modules/audio_processing/logging/apm_data_dumper.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" -#include "rtc_base/numerics/safe_minmax.h" - -namespace webrtc { -namespace { - -float ClampLevelEstimateDbfs(float level_estimate_dbfs) { - return rtc::SafeClamp(level_estimate_dbfs, -90.0f, 30.0f); -} - -// Returns the initial speech level estimate needed to apply the initial gain. -float GetInitialSpeechLevelEstimateDbfs( - const AudioProcessing::Config::GainController2::AdaptiveDigital& config) { - return ClampLevelEstimateDbfs(-kSaturationProtectorInitialHeadroomDb - - config.initial_gain_db - config.headroom_db); -} - -} // namespace - -bool AdaptiveModeLevelEstimator::LevelEstimatorState::operator==( - const AdaptiveModeLevelEstimator::LevelEstimatorState& b) const { - return time_to_confidence_ms == b.time_to_confidence_ms && - level_dbfs.numerator == b.level_dbfs.numerator && - level_dbfs.denominator == b.level_dbfs.denominator; -} - -float AdaptiveModeLevelEstimator::LevelEstimatorState::Ratio::GetRatio() const { - RTC_DCHECK_NE(denominator, 0.f); - return numerator / denominator; -} - -AdaptiveModeLevelEstimator::AdaptiveModeLevelEstimator( - ApmDataDumper* apm_data_dumper, - const AudioProcessing::Config::GainController2::AdaptiveDigital& config) - : apm_data_dumper_(apm_data_dumper), - initial_speech_level_dbfs_(GetInitialSpeechLevelEstimateDbfs(config)), - adjacent_speech_frames_threshold_( - config.adjacent_speech_frames_threshold), - level_dbfs_(initial_speech_level_dbfs_) { - RTC_DCHECK(apm_data_dumper_); - RTC_DCHECK_GE(adjacent_speech_frames_threshold_, 1); - Reset(); -} - -void AdaptiveModeLevelEstimator::Update(float rms_dbfs, - float peak_dbfs, - float speech_probability) { - RTC_DCHECK_GT(rms_dbfs, -150.0f); - RTC_DCHECK_LT(rms_dbfs, 50.0f); - RTC_DCHECK_GT(peak_dbfs, -150.0f); - RTC_DCHECK_LT(peak_dbfs, 50.0f); - RTC_DCHECK_GE(speech_probability, 0.0f); - RTC_DCHECK_LE(speech_probability, 1.0f); - if (speech_probability < kVadConfidenceThreshold) { - // Not a speech frame. - if (adjacent_speech_frames_threshold_ > 1) { - // When two or more adjacent speech frames are required in order to update - // the state, we need to decide whether to discard or confirm the updates - // based on the speech sequence length. - if (num_adjacent_speech_frames_ >= adjacent_speech_frames_threshold_) { - // First non-speech frame after a long enough sequence of speech frames. - // Update the reliable state. - reliable_state_ = preliminary_state_; - } else if (num_adjacent_speech_frames_ > 0) { - // First non-speech frame after a too short sequence of speech frames. - // Reset to the last reliable state. - preliminary_state_ = reliable_state_; - } - } - num_adjacent_speech_frames_ = 0; - } else { - // Speech frame observed. - num_adjacent_speech_frames_++; - - // Update preliminary level estimate. - RTC_DCHECK_GE(preliminary_state_.time_to_confidence_ms, 0); - const bool buffer_is_full = preliminary_state_.time_to_confidence_ms == 0; - if (!buffer_is_full) { - preliminary_state_.time_to_confidence_ms -= kFrameDurationMs; - } - // Weighted average of levels with speech probability as weight. - RTC_DCHECK_GT(speech_probability, 0.0f); - const float leak_factor = buffer_is_full ? kLevelEstimatorLeakFactor : 1.0f; - preliminary_state_.level_dbfs.numerator = - preliminary_state_.level_dbfs.numerator * leak_factor + - rms_dbfs * speech_probability; - preliminary_state_.level_dbfs.denominator = - preliminary_state_.level_dbfs.denominator * leak_factor + - speech_probability; - - const float level_dbfs = preliminary_state_.level_dbfs.GetRatio(); - - if (num_adjacent_speech_frames_ >= adjacent_speech_frames_threshold_) { - // `preliminary_state_` is now reliable. Update the last level estimation. - level_dbfs_ = ClampLevelEstimateDbfs(level_dbfs); - } - } - DumpDebugData(); -} - -bool AdaptiveModeLevelEstimator::IsConfident() const { - if (adjacent_speech_frames_threshold_ == 1) { - // Ignore `reliable_state_` when a single frame is enough to update the - // level estimate (because it is not used). - return preliminary_state_.time_to_confidence_ms == 0; - } - // Once confident, it remains confident. - RTC_DCHECK(reliable_state_.time_to_confidence_ms != 0 || - preliminary_state_.time_to_confidence_ms == 0); - // During the first long enough speech sequence, `reliable_state_` must be - // ignored since `preliminary_state_` is used. - return reliable_state_.time_to_confidence_ms == 0 || - (num_adjacent_speech_frames_ >= adjacent_speech_frames_threshold_ && - preliminary_state_.time_to_confidence_ms == 0); -} - -void AdaptiveModeLevelEstimator::Reset() { - ResetLevelEstimatorState(preliminary_state_); - ResetLevelEstimatorState(reliable_state_); - level_dbfs_ = initial_speech_level_dbfs_; - num_adjacent_speech_frames_ = 0; -} - -void AdaptiveModeLevelEstimator::ResetLevelEstimatorState( - LevelEstimatorState& state) const { - state.time_to_confidence_ms = kLevelEstimatorTimeToConfidenceMs; - state.level_dbfs.numerator = initial_speech_level_dbfs_; - state.level_dbfs.denominator = 1.0f; -} - -void AdaptiveModeLevelEstimator::DumpDebugData() const { - apm_data_dumper_->DumpRaw( - "agc2_adaptive_level_estimator_num_adjacent_speech_frames", - num_adjacent_speech_frames_); - apm_data_dumper_->DumpRaw( - "agc2_adaptive_level_estimator_preliminary_level_estimate_num", - preliminary_state_.level_dbfs.numerator); - apm_data_dumper_->DumpRaw( - "agc2_adaptive_level_estimator_preliminary_level_estimate_den", - preliminary_state_.level_dbfs.denominator); - apm_data_dumper_->DumpRaw( - "agc2_adaptive_level_estimator_preliminary_time_to_confidence_ms", - preliminary_state_.time_to_confidence_ms); - apm_data_dumper_->DumpRaw( - "agc2_adaptive_level_estimator_reliable_time_to_confidence_ms", - reliable_state_.time_to_confidence_ms); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator.h deleted file mode 100644 index 989c8c3572..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator.h +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_PROCESSING_AGC2_ADAPTIVE_MODE_LEVEL_ESTIMATOR_H_ -#define MODULES_AUDIO_PROCESSING_AGC2_ADAPTIVE_MODE_LEVEL_ESTIMATOR_H_ - -#include - -#include - -#include "modules/audio_processing/agc2/agc2_common.h" -#include "modules/audio_processing/agc2/vad_wrapper.h" -#include "modules/audio_processing/include/audio_processing.h" - -namespace webrtc { -class ApmDataDumper; - -// Level estimator for the digital adaptive gain controller. -class AdaptiveModeLevelEstimator { - public: - AdaptiveModeLevelEstimator( - ApmDataDumper* apm_data_dumper, - const AudioProcessing::Config::GainController2::AdaptiveDigital& config); - AdaptiveModeLevelEstimator(const AdaptiveModeLevelEstimator&) = delete; - AdaptiveModeLevelEstimator& operator=(const AdaptiveModeLevelEstimator&) = - delete; - - // Updates the level estimation. - void Update(float rms_dbfs, float peak_dbfs, float speech_probability); - // Returns the estimated speech plus noise level. - float level_dbfs() const { return level_dbfs_; } - // Returns true if the estimator is confident on its current estimate. - bool IsConfident() const; - - void Reset(); - - private: - // Part of the level estimator state used for check-pointing and restore ops. - struct LevelEstimatorState { - bool operator==(const LevelEstimatorState& s) const; - inline bool operator!=(const LevelEstimatorState& s) const { - return !(*this == s); - } - // TODO(bugs.webrtc.org/7494): Remove `time_to_confidence_ms` if redundant. - int time_to_confidence_ms; - struct Ratio { - float numerator; - float denominator; - float GetRatio() const; - } level_dbfs; - }; - static_assert(std::is_trivially_copyable::value, ""); - - void ResetLevelEstimatorState(LevelEstimatorState& state) const; - - void DumpDebugData() const; - - ApmDataDumper* const apm_data_dumper_; - - const float initial_speech_level_dbfs_; - const int adjacent_speech_frames_threshold_; - LevelEstimatorState preliminary_state_; - LevelEstimatorState reliable_state_; - float level_dbfs_; - int num_adjacent_speech_frames_; -}; - -} // namespace webrtc - -#endif // MODULES_AUDIO_PROCESSING_AGC2_ADAPTIVE_MODE_LEVEL_ESTIMATOR_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/biquad_filter.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/biquad_filter.cc index 453125fde7..c1b80d7320 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/biquad_filter.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/biquad_filter.cc @@ -31,17 +31,30 @@ void BiQuadFilter::Reset() { void BiQuadFilter::Process(rtc::ArrayView x, rtc::ArrayView y) { RTC_DCHECK_EQ(x.size(), y.size()); - for (size_t k = 0; k < x.size(); ++k) { + const float config_a0 = config_.a[0]; + const float config_a1 = config_.a[1]; + const float config_b0 = config_.b[0]; + const float config_b1 = config_.b[1]; + const float config_b2 = config_.b[2]; + float state_a0 = state_.a[0]; + float state_a1 = state_.a[1]; + float state_b0 = state_.b[0]; + float state_b1 = state_.b[1]; + for (size_t k = 0, x_size = x.size(); k < x_size; ++k) { // Use a temporary variable for `x[k]` to allow in-place processing. const float tmp = x[k]; - y[k] = config_.b[0] * tmp + config_.b[1] * state_.b[0] + - config_.b[2] * state_.b[1] - config_.a[0] * state_.a[0] - - config_.a[1] * state_.a[1]; - state_.b[1] = state_.b[0]; - state_.b[0] = tmp; - state_.a[1] = state_.a[0]; - state_.a[0] = y[k]; + float y_k = config_b0 * tmp + config_b1 * state_b0 + config_b2 * state_b1 - + config_a0 * state_a0 - config_a1 * state_a1; + state_b1 = state_b0; + state_b0 = tmp; + state_a1 = state_a0; + state_a0 = y_k; + y[k] = y_k; } + state_.a[0] = state_a0; + state_.a[1] = state_a1; + state_.b[0] = state_b0; + state_.b[1] = state_b1; } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/clipping_predictor.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/clipping_predictor.cc similarity index 98% rename from TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/clipping_predictor.cc rename to TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/clipping_predictor.cc index 58b3a2769c..2bf5fb2e32 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/clipping_predictor.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/clipping_predictor.cc @@ -8,14 +8,14 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/audio_processing/agc/clipping_predictor.h" +#include "modules/audio_processing/agc2/clipping_predictor.h" #include #include #include "common_audio/include/audio_util.h" -#include "modules/audio_processing/agc/clipping_predictor_level_buffer.h" -#include "modules/audio_processing/agc/gain_map_internal.h" +#include "modules/audio_processing/agc2/clipping_predictor_level_buffer.h" +#include "modules/audio_processing/agc2/gain_map_internal.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_minmax.h" diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/clipping_predictor.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/clipping_predictor.h similarity index 92% rename from TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/clipping_predictor.h rename to TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/clipping_predictor.h index ee2b6ef1e7..14612508c0 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/clipping_predictor.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/clipping_predictor.h @@ -8,8 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef MODULES_AUDIO_PROCESSING_AGC_CLIPPING_PREDICTOR_H_ -#define MODULES_AUDIO_PROCESSING_AGC_CLIPPING_PREDICTOR_H_ +#ifndef MODULES_AUDIO_PROCESSING_AGC2_CLIPPING_PREDICTOR_H_ +#define MODULES_AUDIO_PROCESSING_AGC2_CLIPPING_PREDICTOR_H_ #include #include @@ -46,7 +46,6 @@ class ClippingPredictor { int default_step, int min_mic_level, int max_mic_level) const = 0; - }; // Creates a ClippingPredictor based on the provided `config`. When enabled, @@ -60,4 +59,4 @@ std::unique_ptr CreateClippingPredictor( } // namespace webrtc -#endif // MODULES_AUDIO_PROCESSING_AGC_CLIPPING_PREDICTOR_H_ +#endif // MODULES_AUDIO_PROCESSING_AGC2_CLIPPING_PREDICTOR_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/clipping_predictor_level_buffer.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/clipping_predictor_level_buffer.cc similarity index 96% rename from TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/clipping_predictor_level_buffer.cc rename to TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/clipping_predictor_level_buffer.cc index bc33cda040..fe4cf2a154 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/clipping_predictor_level_buffer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/clipping_predictor_level_buffer.cc @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/audio_processing/agc/clipping_predictor_level_buffer.h" +#include "modules/audio_processing/agc2/clipping_predictor_level_buffer.h" #include #include diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/clipping_predictor_level_buffer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/clipping_predictor_level_buffer.h similarity index 91% rename from TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/clipping_predictor_level_buffer.h rename to TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/clipping_predictor_level_buffer.h index f3e8368194..c9032773a6 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/clipping_predictor_level_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/clipping_predictor_level_buffer.h @@ -8,8 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef MODULES_AUDIO_PROCESSING_AGC_CLIPPING_PREDICTOR_LEVEL_BUFFER_H_ -#define MODULES_AUDIO_PROCESSING_AGC_CLIPPING_PREDICTOR_LEVEL_BUFFER_H_ +#ifndef MODULES_AUDIO_PROCESSING_AGC2_CLIPPING_PREDICTOR_LEVEL_BUFFER_H_ +#define MODULES_AUDIO_PROCESSING_AGC2_CLIPPING_PREDICTOR_LEVEL_BUFFER_H_ #include #include @@ -68,4 +68,4 @@ class ClippingPredictorLevelBuffer { } // namespace webrtc -#endif // MODULES_AUDIO_PROCESSING_AGC_CLIPPING_PREDICTOR_LEVEL_BUFFER_H_ +#endif // MODULES_AUDIO_PROCESSING_AGC2_CLIPPING_PREDICTOR_LEVEL_BUFFER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/fixed_digital_level_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/fixed_digital_level_estimator.h index d96aedaf9e..d26b55950c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/fixed_digital_level_estimator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/fixed_digital_level_estimator.h @@ -16,7 +16,6 @@ #include "modules/audio_processing/agc2/agc2_common.h" #include "modules/audio_processing/include/audio_frame_view.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -34,6 +33,10 @@ class FixedDigitalLevelEstimator { FixedDigitalLevelEstimator(int sample_rate_hz, ApmDataDumper* apm_data_dumper); + FixedDigitalLevelEstimator(const FixedDigitalLevelEstimator&) = delete; + FixedDigitalLevelEstimator& operator=(const FixedDigitalLevelEstimator&) = + delete; + // The input is assumed to be in FloatS16 format. Scaled input will // produce similarly scaled output. A frame of with kFrameDurationMs // ms of audio produces a level estimates in the same scale. The @@ -57,8 +60,6 @@ class FixedDigitalLevelEstimator { float filter_state_level_; int samples_in_frame_; int samples_in_sub_frame_; - - RTC_DISALLOW_COPY_AND_ASSIGN(FixedDigitalLevelEstimator); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/gain_map_internal.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/gain_map_internal.h similarity index 91% rename from TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/gain_map_internal.h rename to TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/gain_map_internal.h index 547f0f312e..75e421899f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/gain_map_internal.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/gain_map_internal.h @@ -8,8 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef MODULES_AUDIO_PROCESSING_AGC_GAIN_MAP_INTERNAL_H_ -#define MODULES_AUDIO_PROCESSING_AGC_GAIN_MAP_INTERNAL_H_ +#ifndef MODULES_AUDIO_PROCESSING_AGC2_GAIN_MAP_INTERNAL_H_ +#define MODULES_AUDIO_PROCESSING_AGC2_GAIN_MAP_INTERNAL_H_ namespace webrtc { @@ -37,4 +37,4 @@ static const int kGainMap[kGainMapSize] = { } // namespace webrtc -#endif // MODULES_AUDIO_PROCESSING_AGC_GAIN_MAP_INTERNAL_H_ +#endif // MODULES_AUDIO_PROCESSING_AGC2_GAIN_MAP_INTERNAL_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/input_volume_stats_reporter.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/input_volume_stats_reporter.cc new file mode 100644 index 0000000000..cf6149eb49 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/input_volume_stats_reporter.cc @@ -0,0 +1,151 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/agc2/input_volume_stats_reporter.h" + +#include + +#include "absl/strings/string_view.h" +#include "rtc_base/logging.h" +#include "rtc_base/numerics/safe_minmax.h" +#include "rtc_base/strings/string_builder.h" +#include "system_wrappers/include/metrics.h" + +namespace webrtc { +namespace { + +using InputVolumeType = InputVolumeStatsReporter::InputVolumeType; + +constexpr int kFramesIn60Seconds = 6000; +constexpr int kMinInputVolume = 0; +constexpr int kMaxInputVolume = 255; +constexpr int kMaxUpdate = kMaxInputVolume - kMinInputVolume; + +int ComputeAverageUpdate(int sum_updates, int num_updates) { + RTC_DCHECK_GE(sum_updates, 0); + RTC_DCHECK_LE(sum_updates, kMaxUpdate * kFramesIn60Seconds); + RTC_DCHECK_GE(num_updates, 0); + RTC_DCHECK_LE(num_updates, kFramesIn60Seconds); + if (num_updates == 0) { + return 0; + } + return std::round(static_cast(sum_updates) / + static_cast(num_updates)); +} + +constexpr absl::string_view MetricNamePrefix( + InputVolumeType input_volume_type) { + switch (input_volume_type) { + case InputVolumeType::kApplied: + return "WebRTC.Audio.Apm.AppliedInputVolume."; + case InputVolumeType::kRecommended: + return "WebRTC.Audio.Apm.RecommendedInputVolume."; + } +} + +metrics::Histogram* CreateRateHistogram(InputVolumeType input_volume_type, + absl::string_view name) { + char buffer[64]; + rtc::SimpleStringBuilder builder(buffer); + builder << MetricNamePrefix(input_volume_type) << name; + return metrics::HistogramFactoryGetCountsLinear(/*name=*/builder.str(), + /*min=*/1, + /*max=*/kFramesIn60Seconds, + /*bucket_count=*/50); +} + +metrics::Histogram* CreateAverageHistogram(InputVolumeType input_volume_type, + absl::string_view name) { + char buffer[64]; + rtc::SimpleStringBuilder builder(buffer); + builder << MetricNamePrefix(input_volume_type) << name; + return metrics::HistogramFactoryGetCountsLinear(/*name=*/builder.str(), + /*min=*/1, + /*max=*/kMaxUpdate, + /*bucket_count=*/50); +} + +} // namespace + +InputVolumeStatsReporter::InputVolumeStatsReporter(InputVolumeType type) + : histograms_( + {.decrease_rate = CreateRateHistogram(type, "DecreaseRate"), + .decrease_average = CreateAverageHistogram(type, "DecreaseAverage"), + .increase_rate = CreateRateHistogram(type, "IncreaseRate"), + .increase_average = CreateAverageHistogram(type, "IncreaseAverage"), + .update_rate = CreateRateHistogram(type, "UpdateRate"), + .update_average = CreateAverageHistogram(type, "UpdateAverage")}), + cannot_log_stats_(!histograms_.AllPointersSet()) { + if (cannot_log_stats_) { + RTC_LOG(LS_WARNING) << "Will not log any `" << MetricNamePrefix(type) + << "*` histogram stats."; + } +} + +InputVolumeStatsReporter::~InputVolumeStatsReporter() = default; + +void InputVolumeStatsReporter::UpdateStatistics(int input_volume) { + if (cannot_log_stats_) { + // Since the stats cannot be logged, do not bother updating them. + return; + } + + RTC_DCHECK_GE(input_volume, kMinInputVolume); + RTC_DCHECK_LE(input_volume, kMaxInputVolume); + if (previous_input_volume_.has_value() && + input_volume != previous_input_volume_.value()) { + const int volume_change = input_volume - previous_input_volume_.value(); + if (volume_change < 0) { + ++volume_update_stats_.num_decreases; + volume_update_stats_.sum_decreases -= volume_change; + } else { + ++volume_update_stats_.num_increases; + volume_update_stats_.sum_increases += volume_change; + } + } + // Periodically log input volume change metrics. + if (++log_volume_update_stats_counter_ >= kFramesIn60Seconds) { + LogVolumeUpdateStats(); + volume_update_stats_ = {}; + log_volume_update_stats_counter_ = 0; + } + previous_input_volume_ = input_volume; +} + +void InputVolumeStatsReporter::LogVolumeUpdateStats() const { + // Decrease rate and average. + metrics::HistogramAdd(histograms_.decrease_rate, + volume_update_stats_.num_decreases); + if (volume_update_stats_.num_decreases > 0) { + int average_decrease = ComputeAverageUpdate( + volume_update_stats_.sum_decreases, volume_update_stats_.num_decreases); + metrics::HistogramAdd(histograms_.decrease_average, average_decrease); + } + // Increase rate and average. + metrics::HistogramAdd(histograms_.increase_rate, + volume_update_stats_.num_increases); + if (volume_update_stats_.num_increases > 0) { + int average_increase = ComputeAverageUpdate( + volume_update_stats_.sum_increases, volume_update_stats_.num_increases); + metrics::HistogramAdd(histograms_.increase_average, average_increase); + } + // Update rate and average. + int num_updates = + volume_update_stats_.num_decreases + volume_update_stats_.num_increases; + metrics::HistogramAdd(histograms_.update_rate, num_updates); + if (num_updates > 0) { + int average_update = ComputeAverageUpdate( + volume_update_stats_.sum_decreases + volume_update_stats_.sum_increases, + num_updates); + metrics::HistogramAdd(histograms_.update_average, average_update); + } +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/input_volume_stats_reporter.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/input_volume_stats_reporter.h new file mode 100644 index 0000000000..4df5a85a0c --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/input_volume_stats_reporter.h @@ -0,0 +1,88 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_AGC2_INPUT_VOLUME_STATS_REPORTER_H_ +#define MODULES_AUDIO_PROCESSING_AGC2_INPUT_VOLUME_STATS_REPORTER_H_ + +#include "absl/types/optional.h" +#include "rtc_base/gtest_prod_util.h" +#include "system_wrappers/include/metrics.h" + +namespace webrtc { + +// Input volume statistics calculator. Computes aggregate stats based on the +// framewise input volume observed by `UpdateStatistics()`. Periodically logs +// the statistics into a histogram. +class InputVolumeStatsReporter { + public: + enum class InputVolumeType { + kApplied = 0, + kRecommended = 1, + }; + + explicit InputVolumeStatsReporter(InputVolumeType input_volume_type); + InputVolumeStatsReporter(const InputVolumeStatsReporter&) = delete; + InputVolumeStatsReporter operator=(const InputVolumeStatsReporter&) = delete; + ~InputVolumeStatsReporter(); + + // Updates the stats based on `input_volume`. Periodically logs the stats into + // a histogram. + void UpdateStatistics(int input_volume); + + private: + FRIEND_TEST_ALL_PREFIXES(InputVolumeStatsReporterTest, + CheckVolumeUpdateStatsForEmptyStats); + FRIEND_TEST_ALL_PREFIXES(InputVolumeStatsReporterTest, + CheckVolumeUpdateStatsAfterNoVolumeChange); + FRIEND_TEST_ALL_PREFIXES(InputVolumeStatsReporterTest, + CheckVolumeUpdateStatsAfterVolumeIncrease); + FRIEND_TEST_ALL_PREFIXES(InputVolumeStatsReporterTest, + CheckVolumeUpdateStatsAfterVolumeDecrease); + FRIEND_TEST_ALL_PREFIXES(InputVolumeStatsReporterTest, + CheckVolumeUpdateStatsAfterReset); + + // Stores input volume update stats to enable calculation of update rate and + // average update separately for volume increases and decreases. + struct VolumeUpdateStats { + int num_decreases = 0; + int num_increases = 0; + int sum_decreases = 0; + int sum_increases = 0; + } volume_update_stats_; + + // Returns a copy of the stored statistics. Use only for testing. + VolumeUpdateStats volume_update_stats() const { return volume_update_stats_; } + + // Computes aggregate stat and logs them into a histogram. + void LogVolumeUpdateStats() const; + + // Histograms. + struct Histograms { + metrics::Histogram* const decrease_rate; + metrics::Histogram* const decrease_average; + metrics::Histogram* const increase_rate; + metrics::Histogram* const increase_average; + metrics::Histogram* const update_rate; + metrics::Histogram* const update_average; + bool AllPointersSet() const { + return !!decrease_rate && !!decrease_average && !!increase_rate && + !!increase_average && !!update_rate && !!update_average; + } + } histograms_; + + // True if the stats cannot be logged. + const bool cannot_log_stats_; + + int log_volume_update_stats_counter_ = 0; + absl::optional previous_input_volume_ = absl::nullopt; +}; +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_AGC2_INPUT_VOLUME_STATS_REPORTER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/interpolated_gain_curve.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/interpolated_gain_curve.cc index b522ec372c..bb6e038514 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/interpolated_gain_curve.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/interpolated_gain_curve.cc @@ -13,9 +13,11 @@ #include #include +#include "absl/strings/string_view.h" #include "modules/audio_processing/agc2/agc2_common.h" #include "modules/audio_processing/logging/apm_data_dumper.h" #include "rtc_base/checks.h" +#include "rtc_base/strings/string_builder.h" namespace webrtc { @@ -30,15 +32,21 @@ constexpr std::array InterpolatedGainCurve::InterpolatedGainCurve( ApmDataDumper* apm_data_dumper, - const std::string& histogram_name_prefix) - : region_logger_("WebRTC.Audio." + histogram_name_prefix + - ".FixedDigitalGainCurveRegion.Identity", - "WebRTC.Audio." + histogram_name_prefix + - ".FixedDigitalGainCurveRegion.Knee", - "WebRTC.Audio." + histogram_name_prefix + - ".FixedDigitalGainCurveRegion.Limiter", - "WebRTC.Audio." + histogram_name_prefix + - ".FixedDigitalGainCurveRegion.Saturation"), + absl::string_view histogram_name_prefix) + : region_logger_( + (rtc::StringBuilder("WebRTC.Audio.") + << histogram_name_prefix << ".FixedDigitalGainCurveRegion.Identity") + .str(), + (rtc::StringBuilder("WebRTC.Audio.") + << histogram_name_prefix << ".FixedDigitalGainCurveRegion.Knee") + .str(), + (rtc::StringBuilder("WebRTC.Audio.") + << histogram_name_prefix << ".FixedDigitalGainCurveRegion.Limiter") + .str(), + (rtc::StringBuilder("WebRTC.Audio.") + << histogram_name_prefix + << ".FixedDigitalGainCurveRegion.Saturation") + .str()), apm_data_dumper_(apm_data_dumper) {} InterpolatedGainCurve::~InterpolatedGainCurve() { @@ -57,10 +65,10 @@ InterpolatedGainCurve::~InterpolatedGainCurve() { } InterpolatedGainCurve::RegionLogger::RegionLogger( - const std::string& identity_histogram_name, - const std::string& knee_histogram_name, - const std::string& limiter_histogram_name, - const std::string& saturation_histogram_name) + absl::string_view identity_histogram_name, + absl::string_view knee_histogram_name, + absl::string_view limiter_histogram_name, + absl::string_view saturation_histogram_name) : identity_histogram( metrics::HistogramFactoryGetCounts(identity_histogram_name, 1, diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/interpolated_gain_curve.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/interpolated_gain_curve.h index af993204ce..8dd3e48f21 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/interpolated_gain_curve.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/interpolated_gain_curve.h @@ -12,10 +12,9 @@ #define MODULES_AUDIO_PROCESSING_AGC2_INTERPOLATED_GAIN_CURVE_H_ #include -#include +#include "absl/strings/string_view.h" #include "modules/audio_processing/agc2/agc2_common.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/gtest_prod_util.h" #include "system_wrappers/include/metrics.h" @@ -61,9 +60,12 @@ class InterpolatedGainCurve { }; InterpolatedGainCurve(ApmDataDumper* apm_data_dumper, - const std::string& histogram_name_prefix); + absl::string_view histogram_name_prefix); ~InterpolatedGainCurve(); + InterpolatedGainCurve(const InterpolatedGainCurve&) = delete; + InterpolatedGainCurve& operator=(const InterpolatedGainCurve&) = delete; + Stats get_stats() const { return stats_; } // Given a non-negative input level (linear scale), a scalar factor to apply @@ -84,10 +86,10 @@ class InterpolatedGainCurve { metrics::Histogram* limiter_histogram; metrics::Histogram* saturation_histogram; - RegionLogger(const std::string& identity_histogram_name, - const std::string& knee_histogram_name, - const std::string& limiter_histogram_name, - const std::string& saturation_histogram_name); + RegionLogger(absl::string_view identity_histogram_name, + absl::string_view knee_histogram_name, + absl::string_view limiter_histogram_name, + absl::string_view saturation_histogram_name); ~RegionLogger(); @@ -143,8 +145,6 @@ class InterpolatedGainCurve { // Stats. mutable Stats stats_; - - RTC_DISALLOW_COPY_AND_ASSIGN(InterpolatedGainCurve); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/limiter.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/limiter.cc index 57580924dc..7a1e2202be 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/limiter.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/limiter.cc @@ -14,6 +14,7 @@ #include #include +#include "absl/strings/string_view.h" #include "api/array_view.h" #include "modules/audio_processing/agc2/agc2_common.h" #include "modules/audio_processing/logging/apm_data_dumper.h" @@ -95,7 +96,7 @@ void CheckLimiterSampleRate(int sample_rate_hz) { Limiter::Limiter(int sample_rate_hz, ApmDataDumper* apm_data_dumper, - const std::string& histogram_name) + absl::string_view histogram_name) : interp_gain_curve_(apm_data_dumper, histogram_name), level_estimator_(sample_rate_hz, apm_data_dumper), apm_data_dumper_(apm_data_dumper) { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/limiter.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/limiter.h index f8894a308d..d4d556349c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/limiter.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/limiter.h @@ -11,13 +11,12 @@ #ifndef MODULES_AUDIO_PROCESSING_AGC2_LIMITER_H_ #define MODULES_AUDIO_PROCESSING_AGC2_LIMITER_H_ -#include #include +#include "absl/strings/string_view.h" #include "modules/audio_processing/agc2/fixed_digital_level_estimator.h" #include "modules/audio_processing/agc2/interpolated_gain_curve.h" #include "modules/audio_processing/include/audio_frame_view.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { class ApmDataDumper; @@ -26,7 +25,7 @@ class Limiter { public: Limiter(int sample_rate_hz, ApmDataDumper* apm_data_dumper, - const std::string& histogram_name_prefix); + absl::string_view histogram_name_prefix); Limiter(const Limiter& limiter) = delete; Limiter& operator=(const Limiter& limiter) = delete; ~Limiter(); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/test_utils.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/test_utils.cc index b8ca9c3669..857a9f2706 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/test_utils.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/test_utils.cc @@ -13,9 +13,11 @@ #include #include #include +#include #include #include +#include "absl/strings/string_view.h" #include "rtc_base/checks.h" #include "rtc_base/numerics/safe_compare.h" #include "test/gtest.h" @@ -31,8 +33,8 @@ template class FloatFileReader : public FileReader { public: static_assert(std::is_arithmetic::value, ""); - FloatFileReader(const std::string& filename) - : is_(filename, std::ios::binary | std::ios::ate), + explicit FloatFileReader(absl::string_view filename) + : is_(std::string(filename), std::ios::binary | std::ios::ate), size_(is_.tellg() / sizeof(T)) { RTC_CHECK(is_); SeekBeginning(); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/test_utils.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/test_utils.h index e366e1837e..e64b7b7ecd 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/test_utils.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/test_utils.h @@ -16,6 +16,7 @@ #include #include +#include "absl/strings/string_view.h" #include "api/array_view.h" #include "modules/audio_processing/agc2/rnn_vad/common.h" #include "rtc_base/checks.h" @@ -109,8 +110,8 @@ class PitchTestData { // Writer for binary files. class FileWriter { public: - explicit FileWriter(const std::string& file_path) - : os_(file_path, std::ios::binary) {} + explicit FileWriter(absl::string_view file_path) + : os_(std::string(file_path), std::ios::binary) {} FileWriter(const FileWriter&) = delete; FileWriter& operator=(const FileWriter&) = delete; ~FileWriter() = default; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/speech_level_estimator.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/speech_level_estimator.cc new file mode 100644 index 0000000000..8e234f7d7f --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/speech_level_estimator.cc @@ -0,0 +1,164 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/agc2/speech_level_estimator.h" + +#include "modules/audio_processing/agc2/agc2_common.h" +#include "modules/audio_processing/logging/apm_data_dumper.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "rtc_base/numerics/safe_minmax.h" + +namespace webrtc { +namespace { + +float ClampLevelEstimateDbfs(float level_estimate_dbfs) { + return rtc::SafeClamp(level_estimate_dbfs, -90.0f, 30.0f); +} + +// Returns the initial speech level estimate needed to apply the initial gain. +float GetInitialSpeechLevelEstimateDbfs( + const AudioProcessing::Config::GainController2::AdaptiveDigital& config) { + return ClampLevelEstimateDbfs(-kSaturationProtectorInitialHeadroomDb - + config.initial_gain_db - config.headroom_db); +} + +} // namespace + +bool SpeechLevelEstimator::LevelEstimatorState::operator==( + const SpeechLevelEstimator::LevelEstimatorState& b) const { + return time_to_confidence_ms == b.time_to_confidence_ms && + level_dbfs.numerator == b.level_dbfs.numerator && + level_dbfs.denominator == b.level_dbfs.denominator; +} + +float SpeechLevelEstimator::LevelEstimatorState::Ratio::GetRatio() const { + RTC_DCHECK_NE(denominator, 0.f); + return numerator / denominator; +} + +SpeechLevelEstimator::SpeechLevelEstimator( + ApmDataDumper* apm_data_dumper, + const AudioProcessing::Config::GainController2::AdaptiveDigital& config) + : apm_data_dumper_(apm_data_dumper), + initial_speech_level_dbfs_(GetInitialSpeechLevelEstimateDbfs(config)), + adjacent_speech_frames_threshold_( + config.adjacent_speech_frames_threshold), + level_dbfs_(initial_speech_level_dbfs_) { + RTC_DCHECK(apm_data_dumper_); + RTC_DCHECK_GE(adjacent_speech_frames_threshold_, 1); + Reset(); +} + +void SpeechLevelEstimator::Update(float rms_dbfs, + float peak_dbfs, + float speech_probability) { + RTC_DCHECK_GT(rms_dbfs, -150.0f); + RTC_DCHECK_LT(rms_dbfs, 50.0f); + RTC_DCHECK_GT(peak_dbfs, -150.0f); + RTC_DCHECK_LT(peak_dbfs, 50.0f); + RTC_DCHECK_GE(speech_probability, 0.0f); + RTC_DCHECK_LE(speech_probability, 1.0f); + if (speech_probability < kVadConfidenceThreshold) { + // Not a speech frame. + if (adjacent_speech_frames_threshold_ > 1) { + // When two or more adjacent speech frames are required in order to update + // the state, we need to decide whether to discard or confirm the updates + // based on the speech sequence length. + if (num_adjacent_speech_frames_ >= adjacent_speech_frames_threshold_) { + // First non-speech frame after a long enough sequence of speech frames. + // Update the reliable state. + reliable_state_ = preliminary_state_; + } else if (num_adjacent_speech_frames_ > 0) { + // First non-speech frame after a too short sequence of speech frames. + // Reset to the last reliable state. + preliminary_state_ = reliable_state_; + } + } + num_adjacent_speech_frames_ = 0; + } else { + // Speech frame observed. + num_adjacent_speech_frames_++; + + // Update preliminary level estimate. + RTC_DCHECK_GE(preliminary_state_.time_to_confidence_ms, 0); + const bool buffer_is_full = preliminary_state_.time_to_confidence_ms == 0; + if (!buffer_is_full) { + preliminary_state_.time_to_confidence_ms -= kFrameDurationMs; + } + // Weighted average of levels with speech probability as weight. + RTC_DCHECK_GT(speech_probability, 0.0f); + const float leak_factor = buffer_is_full ? kLevelEstimatorLeakFactor : 1.0f; + preliminary_state_.level_dbfs.numerator = + preliminary_state_.level_dbfs.numerator * leak_factor + + rms_dbfs * speech_probability; + preliminary_state_.level_dbfs.denominator = + preliminary_state_.level_dbfs.denominator * leak_factor + + speech_probability; + + const float level_dbfs = preliminary_state_.level_dbfs.GetRatio(); + + if (num_adjacent_speech_frames_ >= adjacent_speech_frames_threshold_) { + // `preliminary_state_` is now reliable. Update the last level estimation. + level_dbfs_ = ClampLevelEstimateDbfs(level_dbfs); + } + } + DumpDebugData(); +} + +bool SpeechLevelEstimator::IsConfident() const { + if (adjacent_speech_frames_threshold_ == 1) { + // Ignore `reliable_state_` when a single frame is enough to update the + // level estimate (because it is not used). + return preliminary_state_.time_to_confidence_ms == 0; + } + // Once confident, it remains confident. + RTC_DCHECK(reliable_state_.time_to_confidence_ms != 0 || + preliminary_state_.time_to_confidence_ms == 0); + // During the first long enough speech sequence, `reliable_state_` must be + // ignored since `preliminary_state_` is used. + return reliable_state_.time_to_confidence_ms == 0 || + (num_adjacent_speech_frames_ >= adjacent_speech_frames_threshold_ && + preliminary_state_.time_to_confidence_ms == 0); +} + +void SpeechLevelEstimator::Reset() { + ResetLevelEstimatorState(preliminary_state_); + ResetLevelEstimatorState(reliable_state_); + level_dbfs_ = initial_speech_level_dbfs_; + num_adjacent_speech_frames_ = 0; +} + +void SpeechLevelEstimator::ResetLevelEstimatorState( + LevelEstimatorState& state) const { + state.time_to_confidence_ms = kLevelEstimatorTimeToConfidenceMs; + state.level_dbfs.numerator = initial_speech_level_dbfs_; + state.level_dbfs.denominator = 1.0f; +} + +void SpeechLevelEstimator::DumpDebugData() const { + apm_data_dumper_->DumpRaw( + "agc2_adaptive_level_estimator_num_adjacent_speech_frames", + num_adjacent_speech_frames_); + apm_data_dumper_->DumpRaw( + "agc2_adaptive_level_estimator_preliminary_level_estimate_num", + preliminary_state_.level_dbfs.numerator); + apm_data_dumper_->DumpRaw( + "agc2_adaptive_level_estimator_preliminary_level_estimate_den", + preliminary_state_.level_dbfs.denominator); + apm_data_dumper_->DumpRaw( + "agc2_adaptive_level_estimator_preliminary_time_to_confidence_ms", + preliminary_state_.time_to_confidence_ms); + apm_data_dumper_->DumpRaw( + "agc2_adaptive_level_estimator_reliable_time_to_confidence_ms", + reliable_state_.time_to_confidence_ms); +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/speech_level_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/speech_level_estimator.h new file mode 100644 index 0000000000..25e949119c --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/speech_level_estimator.h @@ -0,0 +1,77 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_AGC2_SPEECH_LEVEL_ESTIMATOR_H_ +#define MODULES_AUDIO_PROCESSING_AGC2_SPEECH_LEVEL_ESTIMATOR_H_ + +#include + +#include + +#include "modules/audio_processing/agc2/agc2_common.h" +#include "modules/audio_processing/include/audio_processing.h" + +namespace webrtc { +class ApmDataDumper; + +// Active speech level estimator based on the analysis of the following +// framewise properties: RMS level (dBFS), peak level (dBFS), speech +// probability. +class SpeechLevelEstimator { + public: + SpeechLevelEstimator( + ApmDataDumper* apm_data_dumper, + const AudioProcessing::Config::GainController2::AdaptiveDigital& config); + SpeechLevelEstimator(const SpeechLevelEstimator&) = delete; + SpeechLevelEstimator& operator=(const SpeechLevelEstimator&) = delete; + + // Updates the level estimation. + void Update(float rms_dbfs, float peak_dbfs, float speech_probability); + // Returns the estimated speech plus noise level. + float level_dbfs() const { return level_dbfs_; } + // Returns true if the estimator is confident on its current estimate. + bool IsConfident() const; + + void Reset(); + + private: + // Part of the level estimator state used for check-pointing and restore ops. + struct LevelEstimatorState { + bool operator==(const LevelEstimatorState& s) const; + inline bool operator!=(const LevelEstimatorState& s) const { + return !(*this == s); + } + // TODO(bugs.webrtc.org/7494): Remove `time_to_confidence_ms` if redundant. + int time_to_confidence_ms; + struct Ratio { + float numerator; + float denominator; + float GetRatio() const; + } level_dbfs; + }; + static_assert(std::is_trivially_copyable::value, ""); + + void ResetLevelEstimatorState(LevelEstimatorState& state) const; + + void DumpDebugData() const; + + ApmDataDumper* const apm_data_dumper_; + + const float initial_speech_level_dbfs_; + const int adjacent_speech_frames_threshold_; + LevelEstimatorState preliminary_state_; + LevelEstimatorState reliable_state_; + float level_dbfs_; + int num_adjacent_speech_frames_; +}; + +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_AGC2_SPEECH_LEVEL_ESTIMATOR_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/speech_probability_buffer.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/speech_probability_buffer.cc new file mode 100644 index 0000000000..7746f6c000 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/speech_probability_buffer.cc @@ -0,0 +1,105 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/agc2/speech_probability_buffer.h" + +#include + +#include "rtc_base/checks.h" + +namespace webrtc { +namespace { + +constexpr float kActivityThreshold = 0.9f; +constexpr int kNumAnalysisFrames = 100; +// We use 12 in AGC2 adaptive digital, but with a slightly different logic. +constexpr int kTransientWidthThreshold = 7; + +} // namespace + +SpeechProbabilityBuffer::SpeechProbabilityBuffer( + float low_probability_threshold) + : low_probability_threshold_(low_probability_threshold), + probabilities_(kNumAnalysisFrames) { + RTC_DCHECK_GE(low_probability_threshold, 0.0f); + RTC_DCHECK_LE(low_probability_threshold, 1.0f); + RTC_DCHECK(!probabilities_.empty()); +} + +void SpeechProbabilityBuffer::Update(float probability) { + // Remove the oldest entry if the circular buffer is full. + if (buffer_is_full_) { + const float oldest_probability = probabilities_[buffer_index_]; + sum_probabilities_ -= oldest_probability; + } + + // Check for transients. + if (probability <= low_probability_threshold_) { + // Set a probability lower than the threshold to zero. + probability = 0.0f; + + // Check if this has been a transient. + if (num_high_probability_observations_ <= kTransientWidthThreshold) { + RemoveTransient(); + } + num_high_probability_observations_ = 0; + } else if (num_high_probability_observations_ <= kTransientWidthThreshold) { + ++num_high_probability_observations_; + } + + // Update the circular buffer and the current sum. + probabilities_[buffer_index_] = probability; + sum_probabilities_ += probability; + + // Increment the buffer index and check for wrap-around. + if (++buffer_index_ >= kNumAnalysisFrames) { + buffer_index_ = 0; + buffer_is_full_ = true; + } +} + +void SpeechProbabilityBuffer::RemoveTransient() { + // Don't expect to be here if high-activity region is longer than + // `kTransientWidthThreshold` or there has not been any transient. + RTC_DCHECK_LE(num_high_probability_observations_, kTransientWidthThreshold); + + // Replace previously added probabilities with zero. + int index = + (buffer_index_ > 0) ? (buffer_index_ - 1) : (kNumAnalysisFrames - 1); + + while (num_high_probability_observations_-- > 0) { + sum_probabilities_ -= probabilities_[index]; + probabilities_[index] = 0.0f; + + // Update the circular buffer index. + index = (index > 0) ? (index - 1) : (kNumAnalysisFrames - 1); + } +} + +bool SpeechProbabilityBuffer::IsActiveSegment() const { + if (!buffer_is_full_) { + return false; + } + if (sum_probabilities_ < kActivityThreshold * kNumAnalysisFrames) { + return false; + } + return true; +} + +void SpeechProbabilityBuffer::Reset() { + sum_probabilities_ = 0.0f; + + // Empty the circular buffer. + buffer_index_ = 0; + buffer_is_full_ = false; + num_high_probability_observations_ = 0; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/speech_probability_buffer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/speech_probability_buffer.h new file mode 100644 index 0000000000..3056a3eeab --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/speech_probability_buffer.h @@ -0,0 +1,80 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_AGC2_SPEECH_PROBABILITY_BUFFER_H_ +#define MODULES_AUDIO_PROCESSING_AGC2_SPEECH_PROBABILITY_BUFFER_H_ + +#include + +#include "rtc_base/gtest_prod_util.h" + +namespace webrtc { + +// This class implements a circular buffer that stores speech probabilities +// for a speech segment and estimates speech activity for that segment. +class SpeechProbabilityBuffer { + public: + // Ctor. The value of `low_probability_threshold` is required to be on the + // range [0.0f, 1.0f]. + explicit SpeechProbabilityBuffer(float low_probability_threshold); + ~SpeechProbabilityBuffer() {} + SpeechProbabilityBuffer(const SpeechProbabilityBuffer&) = delete; + SpeechProbabilityBuffer& operator=(const SpeechProbabilityBuffer&) = delete; + + // Adds `probability` in the buffer and computes an updatds sum of the buffer + // probabilities. Value of `probability` is required to be on the range + // [0.0f, 1.0f]. + void Update(float probability); + + // Resets the histogram, forgets the past. + void Reset(); + + // Returns true if the segment is active (a long enough segment with an + // average speech probability above `low_probability_threshold`). + bool IsActiveSegment() const; + + private: + void RemoveTransient(); + + // Use only for testing. + float GetSumProbabilities() const { return sum_probabilities_; } + + FRIEND_TEST_ALL_PREFIXES(SpeechProbabilityBufferTest, + CheckSumAfterInitialization); + FRIEND_TEST_ALL_PREFIXES(SpeechProbabilityBufferTest, CheckSumAfterUpdate); + FRIEND_TEST_ALL_PREFIXES(SpeechProbabilityBufferTest, CheckSumAfterReset); + FRIEND_TEST_ALL_PREFIXES(SpeechProbabilityBufferTest, + CheckSumAfterTransientNotRemoved); + FRIEND_TEST_ALL_PREFIXES(SpeechProbabilityBufferTest, + CheckSumAfterTransientRemoved); + + const float low_probability_threshold_; + + // Sum of probabilities stored in `probabilities_`. Must be updated if + // `probabilities_` is updated. + float sum_probabilities_ = 0.0f; + + // Circular buffer for probabilities. + std::vector probabilities_; + + // Current index of the circular buffer, where the newest data will be written + // to, therefore, pointing to the oldest data if buffer is full. + int buffer_index_ = 0; + + // Indicates if the buffer is full and adding a new value removes the oldest + // value. + int buffer_is_full_ = false; + + int num_high_probability_observations_ = 0; +}; + +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_AGC2_SPEECH_PROBABILITY_BUFFER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_buffer.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_buffer.cc index ff6636df87..3dbe1fe072 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_buffer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_buffer.cc @@ -45,22 +45,11 @@ AudioBuffer::AudioBuffer(size_t input_rate, size_t buffer_num_channels, size_t output_rate, size_t output_num_channels) - : AudioBuffer(static_cast(input_rate) / 100, - input_num_channels, - static_cast(buffer_rate) / 100, - buffer_num_channels, - static_cast(output_rate) / 100) {} - -AudioBuffer::AudioBuffer(size_t input_num_frames, - size_t input_num_channels, - size_t buffer_num_frames, - size_t buffer_num_channels, - size_t output_num_frames) - : input_num_frames_(input_num_frames), + : input_num_frames_(static_cast(input_rate) / 100), input_num_channels_(input_num_channels), - buffer_num_frames_(buffer_num_frames), + buffer_num_frames_(static_cast(buffer_rate) / 100), buffer_num_channels_(buffer_num_channels), - output_num_frames_(output_num_frames), + output_num_frames_(static_cast(output_rate) / 100), output_num_channels_(0), num_channels_(buffer_num_channels), num_bands_(NumBandsFromFramesPerChannel(buffer_num_frames_)), diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_buffer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_buffer.h index ab0af4493c..d866b8bce5 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_buffer.h @@ -40,12 +40,6 @@ class AudioBuffer { size_t output_rate, size_t output_num_channels); - // The constructor below will be deprecated. - AudioBuffer(size_t input_num_frames, - size_t input_num_channels, - size_t buffer_num_frames, - size_t buffer_num_channels, - size_t output_num_frames); virtual ~AudioBuffer(); AudioBuffer(const AudioBuffer&) = delete; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_processing_builder_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_processing_builder_impl.cc index 79c6fbf524..a246448c26 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_processing_builder_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_processing_builder_impl.cc @@ -8,12 +8,11 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/audio_processing/include/audio_processing.h" - #include +#include "api/make_ref_counted.h" #include "modules/audio_processing/audio_processing_impl.h" -#include "rtc_base/ref_counted_object.h" +#include "modules/audio_processing/include/audio_processing.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_processing_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_processing_impl.cc index 8a3ddf5ba6..a0415e2bc3 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_processing_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_processing_impl.cc @@ -17,6 +17,8 @@ #include #include +#include "absl/strings/match.h" +#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/array_view.h" #include "api/audio/audio_frame.h" @@ -24,15 +26,11 @@ #include "common_audio/include/audio_util.h" #include "modules/audio_processing/aec_dump/aec_dump_factory.h" #include "modules/audio_processing/audio_buffer.h" -#include "modules/audio_processing/common.h" #include "modules/audio_processing/include/audio_frame_view.h" #include "modules/audio_processing/logging/apm_data_dumper.h" #include "modules/audio_processing/optionally_built_submodule_creators.h" -#include "rtc_base/atomic_ops.h" #include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/logging.h" -#include "rtc_base/ref_counted_object.h" #include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" #include "system_wrappers/include/denormal_disabler.h" @@ -51,20 +49,6 @@ namespace webrtc { namespace { -static bool LayoutHasKeyboard(AudioProcessing::ChannelLayout layout) { - switch (layout) { - case AudioProcessing::kMono: - case AudioProcessing::kStereo: - return false; - case AudioProcessing::kMonoAndKeyboard: - case AudioProcessing::kStereoAndKeyboard: - return true; - } - - RTC_DCHECK_NOTREACHED(); - return false; -} - bool SampleRateSupportsMultiBand(int sample_rate_hz) { return sample_rate_hz == AudioProcessing::kSampleRate32kHz || sample_rate_hz == AudioProcessing::kSampleRate48kHz; @@ -83,6 +67,29 @@ bool UseSetupSpecificDefaultAec3Congfig() { "WebRTC-Aec3SetupSpecificDefaultConfigDefaultsKillSwitch"); } +// If the "WebRTC-Audio-TransientSuppressorVadMode" field trial is unspecified, +// returns `TransientSuppressor::VadMode::kDefault`, otherwise parses the field +// trial and returns the specified mode: +// - WebRTC-Audio-TransientSuppressorVadMode/Enabled-Default returns `kDefault`; +// - WebRTC-Audio-TransientSuppressorVadMode/Enabled-RnnVad returns `kRnnVad`; +// - WebRTC-Audio-TransientSuppressorVadMode/Enabled-NoVad returns `kNoVad`. +TransientSuppressor::VadMode GetTransientSuppressorVadMode() { + constexpr char kFieldTrial[] = "WebRTC-Audio-TransientSuppressorVadMode"; + std::string full_name = webrtc::field_trial::FindFullName(kFieldTrial); + if (full_name.empty() || absl::EndsWith(full_name, "-Default")) { + return TransientSuppressor::VadMode::kDefault; + } + if (absl::EndsWith(full_name, "-RnnVad")) { + return TransientSuppressor::VadMode::kRnnVad; + } + if (absl::EndsWith(full_name, "-NoVad")) { + return TransientSuppressor::VadMode::kNoVad; + } + // Fallback to default. + RTC_LOG(LS_WARNING) << "Invalid parameter for " << kFieldTrial; + return TransientSuppressor::VadMode::kDefault; +} + // Identify the native processing rate that best handles a sample rate. int SuitableProcessRate(int minimum_rate, int max_splitting_rate, @@ -129,6 +136,15 @@ static const size_t kMaxAllowedValuesOfSamplesPerFrame = 480; // reverse and forward call numbers. static const size_t kMaxNumFramesToBuffer = 100; +void PackRenderAudioBufferForEchoDetector(const AudioBuffer& audio, + std::vector& packed_buffer) { + packed_buffer.clear(); + packed_buffer.insert(packed_buffer.end(), audio.channels_const()[0], + audio.channels_const()[0] + audio.num_frames()); +} + +constexpr int kUnspecifiedDataDumpInputVolume = -100; + } // namespace // Throughout webrtc, it's assumed that success is represented by zero. @@ -145,38 +161,35 @@ AudioProcessingImpl::SubmoduleStates::SubmoduleStates( bool AudioProcessingImpl::SubmoduleStates::Update( bool high_pass_filter_enabled, bool mobile_echo_controller_enabled, - bool residual_echo_detector_enabled, bool noise_suppressor_enabled, bool adaptive_gain_controller_enabled, bool gain_controller2_enabled, + bool voice_activity_detector_enabled, bool gain_adjustment_enabled, bool echo_controller_enabled, - bool voice_detector_enabled, bool transient_suppressor_enabled) { bool changed = false; changed |= (high_pass_filter_enabled != high_pass_filter_enabled_); changed |= (mobile_echo_controller_enabled != mobile_echo_controller_enabled_); - changed |= - (residual_echo_detector_enabled != residual_echo_detector_enabled_); changed |= (noise_suppressor_enabled != noise_suppressor_enabled_); changed |= (adaptive_gain_controller_enabled != adaptive_gain_controller_enabled_); changed |= (gain_controller2_enabled != gain_controller2_enabled_); + changed |= + (voice_activity_detector_enabled != voice_activity_detector_enabled_); changed |= (gain_adjustment_enabled != gain_adjustment_enabled_); changed |= (echo_controller_enabled != echo_controller_enabled_); - changed |= (voice_detector_enabled != voice_detector_enabled_); changed |= (transient_suppressor_enabled != transient_suppressor_enabled_); if (changed) { high_pass_filter_enabled_ = high_pass_filter_enabled; mobile_echo_controller_enabled_ = mobile_echo_controller_enabled; - residual_echo_detector_enabled_ = residual_echo_detector_enabled; noise_suppressor_enabled_ = noise_suppressor_enabled; adaptive_gain_controller_enabled_ = adaptive_gain_controller_enabled; gain_controller2_enabled_ = gain_controller2_enabled; + voice_activity_detector_enabled_ = voice_activity_detector_enabled; gain_adjustment_enabled_ = gain_adjustment_enabled; echo_controller_enabled_ = echo_controller_enabled; - voice_detector_enabled_ = voice_detector_enabled; transient_suppressor_enabled_ = transient_suppressor_enabled; } @@ -187,7 +200,7 @@ bool AudioProcessingImpl::SubmoduleStates::Update( bool AudioProcessingImpl::SubmoduleStates::CaptureMultiBandSubModulesActive() const { - return CaptureMultiBandProcessingPresent() || voice_detector_enabled_; + return CaptureMultiBandProcessingPresent(); } bool AudioProcessingImpl::SubmoduleStates::CaptureMultiBandProcessingPresent() @@ -242,7 +255,7 @@ AudioProcessingImpl::AudioProcessingImpl() /*echo_detector=*/nullptr, /*capture_analyzer=*/nullptr) {} -int AudioProcessingImpl::instance_count_ = 0; +std::atomic AudioProcessingImpl::instance_count_(0); AudioProcessingImpl::AudioProcessingImpl( const AudioProcessing::Config& config, @@ -251,12 +264,12 @@ AudioProcessingImpl::AudioProcessingImpl( std::unique_ptr echo_control_factory, rtc::scoped_refptr echo_detector, std::unique_ptr capture_analyzer) - : data_dumper_( - new ApmDataDumper(rtc::AtomicOps::Increment(&instance_count_))), + : data_dumper_(new ApmDataDumper(instance_count_.fetch_add(1) + 1)), use_setup_specific_default_aec3_config_( UseSetupSpecificDefaultAec3Congfig()), use_denormal_disabler_( !field_trial::IsEnabled("WebRTC-ApmDenormalDisablerKillSwitch")), + transient_suppressor_vad_mode_(GetTransientSuppressorVadMode()), capture_runtime_settings_(RuntimeSettingQueueSize()), render_runtime_settings_(RuntimeSettingQueueSize()), capture_runtime_settings_enqueuer_(&capture_runtime_settings_), @@ -278,7 +291,11 @@ AudioProcessingImpl::AudioProcessingImpl( MinimizeProcessingForUnusedOutput(), field_trial::IsEnabled("WebRTC-TransientSuppressorForcedOff")), capture_(), - capture_nonlocked_() { + capture_nonlocked_(), + applied_input_volume_stats_reporter_( + InputVolumeStatsReporter::InputVolumeType::kApplied), + recommended_input_volume_stats_reporter_( + InputVolumeStatsReporter::InputVolumeType::kRecommended) { RTC_LOG(LS_INFO) << "Injected APM submodules:" "\nEcho control factory: " << !!echo_control_factory_ @@ -296,11 +313,6 @@ AudioProcessingImpl::AudioProcessingImpl( capture_nonlocked_.echo_controller_enabled = static_cast(echo_control_factory_); - // If no echo detector is injected, use the ResidualEchoDetector. - if (!submodules_.echo_detector) { - submodules_.echo_detector = rtc::make_ref_counted(); - } - Initialize(); } @@ -314,26 +326,6 @@ int AudioProcessingImpl::Initialize() { return kNoError; } -int AudioProcessingImpl::Initialize(int capture_input_sample_rate_hz, - int capture_output_sample_rate_hz, - int render_input_sample_rate_hz, - ChannelLayout capture_input_layout, - ChannelLayout capture_output_layout, - ChannelLayout render_input_layout) { - const ProcessingConfig processing_config = { - {{capture_input_sample_rate_hz, ChannelsFromLayout(capture_input_layout), - LayoutHasKeyboard(capture_input_layout)}, - {capture_output_sample_rate_hz, - ChannelsFromLayout(capture_output_layout), - LayoutHasKeyboard(capture_output_layout)}, - {render_input_sample_rate_hz, ChannelsFromLayout(render_input_layout), - LayoutHasKeyboard(render_input_layout)}, - {render_input_sample_rate_hz, ChannelsFromLayout(render_input_layout), - LayoutHasKeyboard(render_input_layout)}}}; - - return Initialize(processing_config); -} - int AudioProcessingImpl::Initialize(const ProcessingConfig& processing_config) { // Run in a single-threaded manner during initialization. MutexLock lock_render(&mutex_render_); @@ -409,10 +401,10 @@ void AudioProcessingImpl::InitializeLocked() { InitializeGainController1(); InitializeTransientSuppressor(); InitializeHighPassFilter(true); - InitializeVoiceDetector(); InitializeResidualEchoDetector(); InitializeEchoController(); InitializeGainController2(/*config_has_changed=*/true); + InitializeVoiceActivityDetector(/*config_has_changed=*/true); InitializeNoiseSuppressor(); InitializeAnalyzer(); InitializePostProcessor(); @@ -544,9 +536,6 @@ void AudioProcessingImpl::ApplyConfig(const AudioProcessing::Config& config) { const bool agc2_config_changed = config_.gain_controller2 != config.gain_controller2; - const bool voice_detection_config_changed = - config_.voice_detection.enabled != config.voice_detection.enabled; - const bool ns_config_changed = config_.noise_suppression.enabled != config.noise_suppression.enabled || config_.noise_suppression.level != config.noise_suppression.level; @@ -590,15 +579,12 @@ void AudioProcessingImpl::ApplyConfig(const AudioProcessing::Config& config) { } InitializeGainController2(agc2_config_changed); + InitializeVoiceActivityDetector(agc2_config_changed); if (pre_amplifier_config_changed || gain_adjustment_config_changed) { InitializeCaptureLevelsAdjuster(); } - if (voice_detection_config_changed) { - InitializeVoiceDetector(); - } - // Reinitialization must happen after all submodule configuration to avoid // additional reinitializations on the next capture / render processing call. if (pipeline_config_changed) { @@ -746,18 +732,20 @@ int AudioProcessingImpl::MaybeInitializeCapture( } if (processing_config.input_stream() != input_config) { - processing_config.input_stream() = input_config; reinitialization_required = true; } if (processing_config.output_stream() != output_config) { - processing_config.output_stream() = output_config; reinitialization_required = true; } if (reinitialization_required) { MutexLock lock_render(&mutex_render_); MutexLock lock_capture(&mutex_capture_); + // Reread the API format since the render format may have changed. + processing_config = formats_.api_format; + processing_config.input_stream() = input_config; + processing_config.output_stream() = output_config; RETURN_ON_ERR(InitializeLocked(processing_config)); } return kNoError; @@ -781,7 +769,6 @@ int AudioProcessingImpl::ProcessStream(const float* const* src, RecordUnprocessedCaptureStream(src); } - capture_.keyboard_info.Extract(src, formats_.api_format.input_stream()); capture_.capture_audio->CopyFrom(src, formats_.api_format.input_stream()); if (capture_.capture_fullband_audio) { capture_.capture_fullband_audio->CopyFrom( @@ -969,16 +956,18 @@ void AudioProcessingImpl::QueueBandedRenderAudio(AudioBuffer* audio) { } void AudioProcessingImpl::QueueNonbandedRenderAudio(AudioBuffer* audio) { - ResidualEchoDetector::PackRenderAudioBuffer(audio, &red_render_queue_buffer_); - - // Insert the samples into the queue. - if (!red_render_signal_queue_->Insert(&red_render_queue_buffer_)) { - // The data queue is full and needs to be emptied. - EmptyQueuedRenderAudio(); + if (submodules_.echo_detector) { + PackRenderAudioBufferForEchoDetector(*audio, red_render_queue_buffer_); + RTC_DCHECK(red_render_signal_queue_); + // Insert the samples into the queue. + if (!red_render_signal_queue_->Insert(&red_render_queue_buffer_)) { + // The data queue is full and needs to be emptied. + EmptyQueuedRenderAudio(); - // Retry the insert (should always work). - bool result = red_render_signal_queue_->Insert(&red_render_queue_buffer_); - RTC_DCHECK(result); + // Retry the insert (should always work). + bool result = red_render_signal_queue_->Insert(&red_render_queue_buffer_); + RTC_DCHECK(result); + } } } @@ -1011,23 +1000,26 @@ void AudioProcessingImpl::AllocateRenderQueue() { agc_render_signal_queue_->Clear(); } - if (red_render_queue_element_max_size_ < - new_red_render_queue_element_max_size) { - red_render_queue_element_max_size_ = new_red_render_queue_element_max_size; + if (submodules_.echo_detector) { + if (red_render_queue_element_max_size_ < + new_red_render_queue_element_max_size) { + red_render_queue_element_max_size_ = + new_red_render_queue_element_max_size; - std::vector template_queue_element( - red_render_queue_element_max_size_); + std::vector template_queue_element( + red_render_queue_element_max_size_); - red_render_signal_queue_.reset( - new SwapQueue, RenderQueueItemVerifier>( - kMaxNumFramesToBuffer, template_queue_element, - RenderQueueItemVerifier( - red_render_queue_element_max_size_))); + red_render_signal_queue_.reset( + new SwapQueue, RenderQueueItemVerifier>( + kMaxNumFramesToBuffer, template_queue_element, + RenderQueueItemVerifier( + red_render_queue_element_max_size_))); - red_render_queue_buffer_.resize(red_render_queue_element_max_size_); - red_capture_queue_buffer_.resize(red_render_queue_element_max_size_); - } else { - red_render_signal_queue_->Clear(); + red_render_queue_buffer_.resize(red_render_queue_element_max_size_); + red_capture_queue_buffer_.resize(red_render_queue_element_max_size_); + } else { + red_render_signal_queue_->Clear(); + } } } @@ -1051,9 +1043,10 @@ void AudioProcessingImpl::EmptyQueuedRenderAudioLocked() { } } - while (red_render_signal_queue_->Remove(&red_capture_queue_buffer_)) { - RTC_DCHECK(submodules_.echo_detector); - submodules_.echo_detector->AnalyzeRenderAudio(red_capture_queue_buffer_); + if (submodules_.echo_detector) { + while (red_render_signal_queue_->Remove(&red_capture_queue_buffer_)) { + submodules_.echo_detector->AnalyzeRenderAudio(red_capture_queue_buffer_); + } } } @@ -1088,7 +1081,6 @@ int AudioProcessingImpl::ProcessStream(const int16_t* const src, if (aec_dump_) { RecordProcessedCaptureStream(dest, output_config); } - return kNoError; } @@ -1103,6 +1095,10 @@ int AudioProcessingImpl::ProcessCaptureStreamLocked() { RTC_DCHECK_LE( !!submodules_.echo_controller + !!submodules_.echo_control_mobile, 1); + data_dumper_->DumpRaw( + "applied_input_volume", + capture_.applied_input_volume.value_or(kUnspecifiedDataDumpInputVolume)); + AudioBuffer* capture_buffer = capture_.capture_audio.get(); // For brevity. AudioBuffer* linear_aec_buffer = capture_.linear_aec_output.get(); @@ -1114,18 +1110,13 @@ int AudioProcessingImpl::ProcessCaptureStreamLocked() { } if (submodules_.capture_levels_adjuster) { - // If the analog mic gain emulation is active, get the emulated analog mic - // gain and pass it to the analog gain control functionality. if (config_.capture_level_adjustment.analog_mic_gain_emulation.enabled) { - int level = submodules_.capture_levels_adjuster->GetAnalogMicGainLevel(); - if (submodules_.agc_manager) { - submodules_.agc_manager->set_stream_analog_level(level); - } else if (submodules_.gain_control) { - int error = submodules_.gain_control->set_stream_analog_level(level); - RTC_DCHECK_EQ(kNoError, error); - } + // When the input volume is emulated, retrieve the volume applied to the + // input audio and notify that to APM so that the volume is passed to the + // active AGC. + set_stream_analog_level_locked( + submodules_.capture_levels_adjuster->GetAnalogMicGainLevel()); } - submodules_.capture_levels_adjuster->ApplyPreLevelAdjustment( *capture_buffer); } @@ -1143,16 +1134,15 @@ int AudioProcessingImpl::ProcessCaptureStreamLocked() { levels.peak, 1, RmsLevel::kMinLevelDb, 64); } - // Detect an analog gain change. - int analog_mic_level = recommended_stream_analog_level_locked(); - const bool analog_mic_level_changed = - capture_.prev_analog_mic_level != analog_mic_level && - capture_.prev_analog_mic_level != -1; - capture_.prev_analog_mic_level = analog_mic_level; - analog_gain_stats_reporter_.UpdateStatistics(analog_mic_level); + if (capture_.applied_input_volume.has_value()) { + applied_input_volume_stats_reporter_.UpdateStatistics( + *capture_.applied_input_volume); + } if (submodules_.echo_controller) { - capture_.echo_path_gain_change = analog_mic_level_changed; + // Determine if the echo path gain has changed by checking all the gains + // applied before AEC. + capture_.echo_path_gain_change = capture_.applied_input_volume_changed; // Detect and flag any change in the capture level adjustment pre-gain. if (submodules_.capture_levels_adjuster) { @@ -1161,7 +1151,7 @@ int AudioProcessingImpl::ProcessCaptureStreamLocked() { capture_.echo_path_gain_change = capture_.echo_path_gain_change || (capture_.prev_pre_adjustment_gain != pre_adjustment_gain && - capture_.prev_pre_adjustment_gain >= 0.f); + capture_.prev_pre_adjustment_gain >= 0.0f); capture_.prev_pre_adjustment_gain = pre_adjustment_gain; } @@ -1176,7 +1166,7 @@ int AudioProcessingImpl::ProcessCaptureStreamLocked() { } if (submodules_.agc_manager) { - submodules_.agc_manager->AnalyzePreProcess(capture_buffer); + submodules_.agc_manager->AnalyzePreProcess(*capture_buffer); } if (submodule_states_.CaptureMultiBandSubModulesActive() && @@ -1248,15 +1238,8 @@ int AudioProcessingImpl::ProcessCaptureStreamLocked() { } } - if (config_.voice_detection.enabled) { - capture_.stats.voice_detected = - submodules_.voice_detector->ProcessCaptureAudio(capture_buffer); - } else { - capture_.stats.voice_detected = absl::nullopt; - } - if (submodules_.agc_manager) { - submodules_.agc_manager->Process(capture_buffer); + submodules_.agc_manager->Process(*capture_buffer); absl::optional new_digital_gain = submodules_.agc_manager->GetDigitalComressionGain(); @@ -1289,29 +1272,48 @@ int AudioProcessingImpl::ProcessCaptureStreamLocked() { capture_buffer = capture_.capture_fullband_audio.get(); } - if (config_.residual_echo_detector.enabled) { - RTC_DCHECK(submodules_.echo_detector); + if (submodules_.echo_detector) { submodules_.echo_detector->AnalyzeCaptureAudio( rtc::ArrayView(capture_buffer->channels()[0], capture_buffer->num_frames())); } - // TODO(aluebs): Investigate if the transient suppression placement should - // be before or after the AGC. + absl::optional voice_probability; + if (!!submodules_.voice_activity_detector) { + voice_probability = submodules_.voice_activity_detector->Analyze( + AudioFrameView(capture_buffer->channels(), + capture_buffer->num_channels(), + capture_buffer->num_frames())); + } + if (submodules_.transient_suppressor) { - float voice_probability = - submodules_.agc_manager.get() - ? submodules_.agc_manager->voice_probability() - : 1.f; - - submodules_.transient_suppressor->Suppress( - capture_buffer->channels()[0], capture_buffer->num_frames(), - capture_buffer->num_channels(), - capture_buffer->split_bands_const(0)[kBand0To8kHz], - capture_buffer->num_frames_per_band(), - capture_.keyboard_info.keyboard_data, - capture_.keyboard_info.num_keyboard_frames, voice_probability, - capture_.key_pressed); + float transient_suppressor_voice_probability = 1.0f; + switch (transient_suppressor_vad_mode_) { + case TransientSuppressor::VadMode::kDefault: + if (submodules_.agc_manager) { + transient_suppressor_voice_probability = + submodules_.agc_manager->voice_probability(); + } + break; + case TransientSuppressor::VadMode::kRnnVad: + RTC_DCHECK(voice_probability.has_value()); + transient_suppressor_voice_probability = *voice_probability; + break; + case TransientSuppressor::VadMode::kNoVad: + // The transient suppressor will ignore `voice_probability`. + break; + } + float delayed_voice_probability = + submodules_.transient_suppressor->Suppress( + capture_buffer->channels()[0], capture_buffer->num_frames(), + capture_buffer->num_channels(), + capture_buffer->split_bands_const(0)[kBand0To8kHz], + capture_buffer->num_frames_per_band(), + /*reference_data=*/nullptr, /*reference_length=*/0, + transient_suppressor_voice_probability, capture_.key_pressed); + if (voice_probability.has_value()) { + *voice_probability = delayed_voice_probability; + } } // Experimental APM sub-module that analyzes `capture_buffer`. @@ -1320,9 +1322,9 @@ int AudioProcessingImpl::ProcessCaptureStreamLocked() { } if (submodules_.gain_controller2) { - submodules_.gain_controller2->NotifyAnalogLevel( - recommended_stream_analog_level_locked()); - submodules_.gain_controller2->Process(capture_buffer); + submodules_.gain_controller2->Process( + voice_probability, capture_.applied_input_volume_changed, + capture_buffer); } if (submodules_.capture_post_processor) { @@ -1341,15 +1343,8 @@ int AudioProcessingImpl::ProcessCaptureStreamLocked() { levels.peak, 1, RmsLevel::kMinLevelDb, 64); } - if (submodules_.agc_manager) { - int level = recommended_stream_analog_level_locked(); - data_dumper_->DumpRaw("experimental_gain_control_stream_analog_level", 1, - &level); - } - // Compute echo-detector stats. - if (config_.residual_echo_detector.enabled) { - RTC_DCHECK(submodules_.echo_detector); + if (submodules_.echo_detector) { auto ed_metrics = submodules_.echo_detector->GetMetrics(); capture_.stats.residual_echo_likelihood = ed_metrics.echo_likelihood; capture_.stats.residual_echo_likelihood_recent_max = @@ -1369,20 +1364,23 @@ int AudioProcessingImpl::ProcessCaptureStreamLocked() { // Pass stats for reporting. stats_reporter_.UpdateStatistics(capture_.stats); + UpdateRecommendedInputVolumeLocked(); + if (capture_.recommended_input_volume.has_value()) { + recommended_input_volume_stats_reporter_.UpdateStatistics( + *capture_.recommended_input_volume); + } + if (submodules_.capture_levels_adjuster) { submodules_.capture_levels_adjuster->ApplyPostLevelAdjustment( *capture_buffer); - // If the analog mic gain emulation is active, retrieve the level from the - // analog gain control and set it to mic gain emulator. if (config_.capture_level_adjustment.analog_mic_gain_emulation.enabled) { - if (submodules_.agc_manager) { - submodules_.capture_levels_adjuster->SetAnalogMicGainLevel( - submodules_.agc_manager->stream_analog_level()); - } else if (submodules_.gain_control) { - submodules_.capture_levels_adjuster->SetAnalogMicGainLevel( - submodules_.gain_control->stream_analog_level()); - } + // If the input volume emulation is used, retrieve the recommended input + // volume and set that to emulate the input volume on the next processed + // audio frame. + RTC_DCHECK(capture_.recommended_input_volume.has_value()); + submodules_.capture_levels_adjuster->SetAnalogMicGainLevel( + *capture_.recommended_input_volume); } } @@ -1401,6 +1399,11 @@ int AudioProcessingImpl::ProcessCaptureStreamLocked() { capture_.capture_output_used_last_frame = capture_.capture_output_used; capture_.was_stream_delay_set = false; + + data_dumper_->DumpRaw("recommended_input_volume", + capture_.recommended_input_volume.value_or( + kUnspecifiedDataDumpInputVolume)); + return kNoError; } @@ -1607,19 +1610,28 @@ void AudioProcessingImpl::set_stream_key_pressed(bool key_pressed) { } void AudioProcessingImpl::set_stream_analog_level(int level) { + // Check that input volume emulation is disabled since, when enabled, there is + // no externally applied input volume to notify to APM. + RTC_DCHECK( + !submodules_.capture_levels_adjuster || + !config_.capture_level_adjustment.analog_mic_gain_emulation.enabled); + MutexLock lock_capture(&mutex_capture_); + set_stream_analog_level_locked(level); +} - if (config_.capture_level_adjustment.analog_mic_gain_emulation.enabled) { - // If the analog mic gain is emulated internally, simply cache the level for - // later reporting back as the recommended stream analog level to use. - capture_.cached_stream_analog_level_ = level; - return; - } +void AudioProcessingImpl::set_stream_analog_level_locked(int level) { + capture_.applied_input_volume_changed = + capture_.applied_input_volume.has_value() && + *capture_.applied_input_volume != level; + capture_.applied_input_volume = level; + + // Invalidate any previously recommended input volume which will be updated by + // `ProcessStream()`. + capture_.recommended_input_volume = absl::nullopt; if (submodules_.agc_manager) { submodules_.agc_manager->set_stream_analog_level(level); - data_dumper_->DumpRaw("experimental_gain_control_set_stream_analog_level", - 1, &level); return; } @@ -1628,34 +1640,47 @@ void AudioProcessingImpl::set_stream_analog_level(int level) { RTC_DCHECK_EQ(kNoError, error); return; } - - // If no analog mic gain control functionality is in place, cache the level - // for later reporting back as the recommended stream analog level to use. - capture_.cached_stream_analog_level_ = level; } int AudioProcessingImpl::recommended_stream_analog_level() const { MutexLock lock_capture(&mutex_capture_); - return recommended_stream_analog_level_locked(); -} - -int AudioProcessingImpl::recommended_stream_analog_level_locked() const { - if (config_.capture_level_adjustment.analog_mic_gain_emulation.enabled) { - return capture_.cached_stream_analog_level_; + if (!capture_.applied_input_volume.has_value()) { + RTC_LOG(LS_ERROR) << "set_stream_analog_level has not been called"; + } + // Input volume to recommend when `set_stream_analog_level()` is not called. + constexpr int kFallBackInputVolume = 255; + // When APM has no input volume to recommend, return the latest applied input + // volume that has been observed in order to possibly produce no input volume + // change. If no applied input volume has been observed, return a fall-back + // value. + return capture_.recommended_input_volume.value_or( + capture_.applied_input_volume.value_or(kFallBackInputVolume)); +} + +void AudioProcessingImpl::UpdateRecommendedInputVolumeLocked() { + if (!capture_.applied_input_volume.has_value()) { + // When `set_stream_analog_level()` is not called, no input level can be + // recommended. + capture_.recommended_input_volume = absl::nullopt; + return; } if (submodules_.agc_manager) { - return submodules_.agc_manager->stream_analog_level(); + capture_.recommended_input_volume = + submodules_.agc_manager->recommended_analog_level(); + return; } if (submodules_.gain_control) { - return submodules_.gain_control->stream_analog_level(); + capture_.recommended_input_volume = + submodules_.gain_control->stream_analog_level(); + return; } - return capture_.cached_stream_analog_level_; + capture_.recommended_input_volume = capture_.applied_input_volume; } -bool AudioProcessingImpl::CreateAndAttachAecDump(const std::string& file_name, +bool AudioProcessingImpl::CreateAndAttachAecDump(absl::string_view file_name, int64_t max_log_size_bytes, rtc::TaskQueue* worker_queue) { std::unique_ptr aec_dump = @@ -1714,11 +1739,11 @@ AudioProcessing::Config AudioProcessingImpl::GetConfig() const { bool AudioProcessingImpl::UpdateActiveSubmoduleStates() { return submodule_states_.Update( config_.high_pass_filter.enabled, !!submodules_.echo_control_mobile, - config_.residual_echo_detector.enabled, !!submodules_.noise_suppressor, - !!submodules_.gain_control, !!submodules_.gain_controller2, + !!submodules_.noise_suppressor, !!submodules_.gain_control, + !!submodules_.gain_controller2, !!submodules_.voice_activity_detector, config_.pre_amplifier.enabled || config_.capture_level_adjustment.enabled, capture_nonlocked_.echo_controller_enabled, - config_.voice_detection.enabled, !!submodules_.transient_suppressor); + !!submodules_.transient_suppressor); } void AudioProcessingImpl::InitializeTransientSuppressor() { @@ -1726,16 +1751,18 @@ void AudioProcessingImpl::InitializeTransientSuppressor() { !constants_.transient_suppressor_forced_off) { // Attempt to create a transient suppressor, if one is not already created. if (!submodules_.transient_suppressor) { - submodules_.transient_suppressor = - CreateTransientSuppressor(submodule_creation_overrides_); - } - if (submodules_.transient_suppressor) { - submodules_.transient_suppressor->Initialize( + submodules_.transient_suppressor = CreateTransientSuppressor( + submodule_creation_overrides_, transient_suppressor_vad_mode_, proc_fullband_sample_rate_hz(), capture_nonlocked_.split_rate, num_proc_channels()); + if (!submodules_.transient_suppressor) { + RTC_LOG(LS_WARNING) + << "No transient suppressor created (probably disabled)"; + } } else { - RTC_LOG(LS_WARNING) - << "No transient suppressor created (probably disabled)"; + submodules_.transient_suppressor->Initialize( + proc_fullband_sample_rate_hz(), capture_nonlocked_.split_rate, + num_proc_channels()); } } else { submodules_.transient_suppressor.reset(); @@ -1768,14 +1795,6 @@ void AudioProcessingImpl::InitializeHighPassFilter(bool forced_reset) { } } -void AudioProcessingImpl::InitializeVoiceDetector() { - if (config_.voice_detection.enabled) { - submodules_.voice_detector = std::make_unique( - proc_split_sample_rate_hz(), VoiceDetection::kVeryLowLikelihood); - } else { - submodules_.voice_detector.reset(); - } -} void AudioProcessingImpl::InitializeEchoController() { bool use_echo_controller = echo_control_factory_ || @@ -1788,14 +1807,14 @@ void AudioProcessingImpl::InitializeEchoController() { proc_sample_rate_hz(), num_reverse_channels(), num_proc_channels()); RTC_DCHECK(submodules_.echo_controller); } else { - EchoCanceller3Config config = - use_setup_specific_default_aec3_config_ - ? EchoCanceller3::CreateDefaultConfig(num_reverse_channels(), - num_proc_channels()) - : EchoCanceller3Config(); + EchoCanceller3Config config; + absl::optional multichannel_config; + if (use_setup_specific_default_aec3_config_) { + multichannel_config = EchoCanceller3::CreateDefaultMultichannelConfig(); + } submodules_.echo_controller = std::make_unique( - config, proc_sample_rate_hz(), num_reverse_channels(), - num_proc_channels()); + config, multichannel_config, proc_sample_rate_hz(), + num_reverse_channels(), num_proc_channels()); } // Setup the storage for returning the linear AEC output. @@ -1862,6 +1881,10 @@ void AudioProcessingImpl::InitializeGainController1() { return; } + RTC_HISTOGRAM_BOOLEAN( + "WebRTC.Audio.GainController.Analog.Enabled", + config_.gain_controller1.analog_gain_controller.enabled); + if (!submodules_.gain_control) { submodules_.gain_control.reset(new GainControlImpl()); } @@ -1893,32 +1916,20 @@ void AudioProcessingImpl::InitializeGainController1() { if (!submodules_.agc_manager.get() || submodules_.agc_manager->num_channels() != - static_cast(num_proc_channels()) || - submodules_.agc_manager->sample_rate_hz() != - capture_nonlocked_.split_rate) { + static_cast(num_proc_channels())) { int stream_analog_level = -1; const bool re_creation = !!submodules_.agc_manager; if (re_creation) { - stream_analog_level = submodules_.agc_manager->stream_analog_level(); + stream_analog_level = submodules_.agc_manager->recommended_analog_level(); } submodules_.agc_manager.reset(new AgcManagerDirect( - num_proc_channels(), - config_.gain_controller1.analog_gain_controller.startup_min_volume, - config_.gain_controller1.analog_gain_controller.clipped_level_min, - !config_.gain_controller1.analog_gain_controller - .enable_digital_adaptive, - capture_nonlocked_.split_rate, - config_.gain_controller1.analog_gain_controller.clipped_level_step, - config_.gain_controller1.analog_gain_controller.clipped_ratio_threshold, - config_.gain_controller1.analog_gain_controller.clipped_wait_frames, - config_.gain_controller1.analog_gain_controller.clipping_predictor)); + num_proc_channels(), config_.gain_controller1.analog_gain_controller)); if (re_creation) { submodules_.agc_manager->set_stream_analog_level(stream_analog_level); } } submodules_.agc_manager->Initialize(); - submodules_.agc_manager->SetupDigitalGainControl( - submodules_.gain_control.get()); + submodules_.agc_manager->SetupDigitalGainControl(*submodules_.gain_control); submodules_.agc_manager->HandleCaptureOutputUsedChange( capture_.capture_output_used); } @@ -1932,9 +1943,35 @@ void AudioProcessingImpl::InitializeGainController2(bool config_has_changed) { return; } if (!submodules_.gain_controller2 || config_has_changed) { + const bool use_internal_vad = + transient_suppressor_vad_mode_ != TransientSuppressor::VadMode::kRnnVad; submodules_.gain_controller2 = std::make_unique( config_.gain_controller2, proc_fullband_sample_rate_hz(), - num_input_channels()); + num_input_channels(), use_internal_vad); + } +} + +void AudioProcessingImpl::InitializeVoiceActivityDetector( + bool config_has_changed) { + if (!config_has_changed) { + return; + } + const bool use_vad = + transient_suppressor_vad_mode_ == TransientSuppressor::VadMode::kRnnVad && + config_.gain_controller2.enabled && + config_.gain_controller2.adaptive_digital.enabled; + if (!use_vad) { + submodules_.voice_activity_detector.reset(); + return; + } + if (!submodules_.voice_activity_detector || config_has_changed) { + RTC_DCHECK(!!submodules_.gain_controller2); + // TODO(bugs.webrtc.org/13663): Cache CPU features in APM and use here. + submodules_.voice_activity_detector = + std::make_unique( + config_.gain_controller2.adaptive_digital.vad_reset_period_ms, + submodules_.gain_controller2->GetCpuFeatures(), + proc_fullband_sample_rate_hz()); } } @@ -1991,10 +2028,11 @@ void AudioProcessingImpl::InitializeCaptureLevelsAdjuster() { } void AudioProcessingImpl::InitializeResidualEchoDetector() { - RTC_DCHECK(submodules_.echo_detector); - submodules_.echo_detector->Initialize( - proc_fullband_sample_rate_hz(), 1, - formats_.render_processing_format.sample_rate_hz(), 1); + if (submodules_.echo_detector) { + submodules_.echo_detector->Initialize( + proc_fullband_sample_rate_hz(), 1, + formats_.render_processing_format.sample_rate_hz(), 1); + } } void AudioProcessingImpl::InitializeAnalyzer() { @@ -2027,10 +2065,6 @@ void AudioProcessingImpl::WriteAecDumpConfigMessage(bool forced) { std::string experiments_description = ""; // TODO(peah): Add semicolon-separated concatenations of experiment // descriptions for other submodules. - if (config_.gain_controller1.analog_gain_controller.clipped_level_min != - kClippedLevelMin) { - experiments_description += "AgcClippingLevelExperiment;"; - } if (!!submodules_.capture_post_processor) { experiments_description += "CapturePostProcessor;"; } @@ -2139,7 +2173,7 @@ void AudioProcessingImpl::RecordAudioProcessingState() { AecDump::AudioProcessingState audio_proc_state; audio_proc_state.delay = capture_nonlocked_.stream_delay_ms; audio_proc_state.drift = 0; - audio_proc_state.level = recommended_stream_analog_level_locked(); + audio_proc_state.applied_input_volume = capture_.applied_input_volume; audio_proc_state.keypress = capture_.key_pressed; aec_dump_->AddAudioProcessingState(audio_proc_state); } @@ -2152,24 +2186,13 @@ AudioProcessingImpl::ApmCaptureState::ApmCaptureState() capture_processing_format(kSampleRate16kHz), split_rate(kSampleRate16kHz), echo_path_gain_change(false), - prev_analog_mic_level(-1), - prev_pre_adjustment_gain(-1.f), + prev_pre_adjustment_gain(-1.0f), playout_volume(-1), - prev_playout_volume(-1) {} + prev_playout_volume(-1), + applied_input_volume_changed(false) {} AudioProcessingImpl::ApmCaptureState::~ApmCaptureState() = default; -void AudioProcessingImpl::ApmCaptureState::KeyboardInfo::Extract( - const float* const* data, - const StreamConfig& stream_config) { - if (stream_config.has_keyboard()) { - keyboard_data = data[stream_config.num_channels()]; - } else { - keyboard_data = NULL; - } - num_keyboard_frames = stream_config.num_frames(); -} - AudioProcessingImpl::ApmRenderState::ApmRenderState() = default; AudioProcessingImpl::ApmRenderState::~ApmRenderState() = default; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_processing_impl.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_processing_impl.h index 22cdaddb2f..5daea9088a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_processing_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_processing_impl.h @@ -13,16 +13,20 @@ #include +#include #include #include #include #include +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/array_view.h" #include "api/function_view.h" #include "modules/audio_processing/aec3/echo_canceller3.h" #include "modules/audio_processing/agc/agc_manager_direct.h" -#include "modules/audio_processing/agc/analog_gain_stats_reporter.h" #include "modules/audio_processing/agc/gain_control.h" +#include "modules/audio_processing/agc2/input_volume_stats_reporter.h" #include "modules/audio_processing/audio_buffer.h" #include "modules/audio_processing/capture_levels_adjuster/capture_levels_adjuster.h" #include "modules/audio_processing/echo_control_mobile_impl.h" @@ -36,10 +40,8 @@ #include "modules/audio_processing/ns/noise_suppressor.h" #include "modules/audio_processing/optionally_built_submodule_creators.h" #include "modules/audio_processing/render_queue_item_verifier.h" -#include "modules/audio_processing/residual_echo_detector.h" #include "modules/audio_processing/rms_level.h" #include "modules/audio_processing/transient/transient_suppressor.h" -#include "modules/audio_processing/voice_detection.h" #include "rtc_base/gtest_prod_util.h" #include "rtc_base/ignore_wundef.h" #include "rtc_base/swap_queue.h" @@ -51,6 +53,10 @@ namespace webrtc { class ApmDataDumper; class AudioConverter; +constexpr int RuntimeSettingQueueSize() { + return 100; +} + class AudioProcessingImpl : public AudioProcessing { public: // Methods forcing APM to run in a single-threaded manner. @@ -64,15 +70,9 @@ class AudioProcessingImpl : public AudioProcessing { std::unique_ptr capture_analyzer); ~AudioProcessingImpl() override; int Initialize() override; - int Initialize(int capture_input_sample_rate_hz, - int capture_output_sample_rate_hz, - int render_sample_rate_hz, - ChannelLayout capture_input_layout, - ChannelLayout capture_output_layout, - ChannelLayout render_input_layout) override; int Initialize(const ProcessingConfig& processing_config) override; void ApplyConfig(const AudioProcessing::Config& config) override; - bool CreateAndAttachAecDump(const std::string& file_name, + bool CreateAndAttachAecDump(absl::string_view file_name, int64_t max_log_size_bytes, rtc::TaskQueue* worker_queue) override; bool CreateAndAttachAecDump(FILE* handle, @@ -161,7 +161,9 @@ class AudioProcessingImpl : public AudioProcessing { FRIEND_TEST_ALL_PREFIXES(ApmWithSubmodulesExcludedTest, BitexactWithDisabledModules); - int recommended_stream_analog_level_locked() const + void set_stream_analog_level_locked(int level) + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_); + void UpdateRecommendedInputVolumeLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_); void OverrideSubmoduleCreationForTesting( @@ -182,12 +184,14 @@ class AudioProcessingImpl : public AudioProcessing { SwapQueue& runtime_settings_; }; - std::unique_ptr data_dumper_; - static int instance_count_; + const std::unique_ptr data_dumper_; + static std::atomic instance_count_; const bool use_setup_specific_default_aec3_config_; const bool use_denormal_disabler_; + const TransientSuppressor::VadMode transient_suppressor_vad_mode_; + SwapQueue capture_runtime_settings_; SwapQueue render_runtime_settings_; @@ -195,7 +199,7 @@ class AudioProcessingImpl : public AudioProcessing { RuntimeSettingEnqueuer render_runtime_settings_enqueuer_; // EchoControl factory. - std::unique_ptr echo_control_factory_; + const std::unique_ptr echo_control_factory_; class SubmoduleStates { public: @@ -205,13 +209,12 @@ class AudioProcessingImpl : public AudioProcessing { // Updates the submodule state and returns true if it has changed. bool Update(bool high_pass_filter_enabled, bool mobile_echo_controller_enabled, - bool residual_echo_detector_enabled, bool noise_suppressor_enabled, bool adaptive_gain_controller_enabled, bool gain_controller2_enabled, + bool voice_activity_detector_enabled, bool gain_adjustment_enabled, bool echo_controller_enabled, - bool voice_detector_enabled, bool transient_suppressor_enabled); bool CaptureMultiBandSubModulesActive() const; bool CaptureMultiBandProcessingPresent() const; @@ -229,13 +232,12 @@ class AudioProcessingImpl : public AudioProcessing { const bool capture_analyzer_enabled_ = false; bool high_pass_filter_enabled_ = false; bool mobile_echo_controller_enabled_ = false; - bool residual_echo_detector_enabled_ = false; bool noise_suppressor_enabled_ = false; bool adaptive_gain_controller_enabled_ = false; + bool voice_activity_detector_enabled_ = false; bool gain_controller2_enabled_ = false; bool gain_adjustment_enabled_ = false; bool echo_controller_enabled_ = false; - bool voice_detector_enabled_ = false; bool transient_suppressor_enabled_ = false; bool first_update_ = true; }; @@ -271,7 +273,6 @@ class AudioProcessingImpl : public AudioProcessing { // already acquired. void InitializeHighPassFilter(bool forced_reset) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_); - void InitializeVoiceDetector() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_); void InitializeGainController1() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_); void InitializeTransientSuppressor() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_); @@ -279,6 +280,11 @@ class AudioProcessingImpl : public AudioProcessing { // and `config_has_changed` is true, recreates the sub-module. void InitializeGainController2(bool config_has_changed) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_); + // Initializes the `VoiceActivityDetectorWrapper` sub-module. If the + // sub-module is enabled and `config_has_changed` is true, recreates the + // sub-module. + void InitializeVoiceActivityDetector(bool config_has_changed) + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_); void InitializeNoiseSuppressor() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_); void InitializeCaptureLevelsAdjuster() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_); @@ -392,19 +398,19 @@ class AudioProcessingImpl : public AudioProcessing { render_pre_processor(std::move(render_pre_processor)), capture_analyzer(std::move(capture_analyzer)) {} // Accessed internally from capture or during initialization. + const rtc::scoped_refptr echo_detector; + const std::unique_ptr capture_post_processor; + const std::unique_ptr render_pre_processor; + const std::unique_ptr capture_analyzer; std::unique_ptr agc_manager; std::unique_ptr gain_control; std::unique_ptr gain_controller2; + std::unique_ptr voice_activity_detector; std::unique_ptr high_pass_filter; - rtc::scoped_refptr echo_detector; std::unique_ptr echo_controller; std::unique_ptr echo_control_mobile; std::unique_ptr noise_suppressor; std::unique_ptr transient_suppressor; - std::unique_ptr capture_post_processor; - std::unique_ptr render_pre_processor; - std::unique_ptr capture_analyzer; - std::unique_ptr voice_detector; std::unique_ptr capture_levels_adjuster; } submodules_; @@ -416,10 +422,10 @@ class AudioProcessingImpl : public AudioProcessing { struct ApmFormatState { ApmFormatState() : // Format of processing streams at input/output call sites. - api_format({{{kSampleRate16kHz, 1, false}, - {kSampleRate16kHz, 1, false}, - {kSampleRate16kHz, 1, false}, - {kSampleRate16kHz, 1, false}}}), + api_format({{{kSampleRate16kHz, 1}, + {kSampleRate16kHz, 1}, + {kSampleRate16kHz, 1}, + {kSampleRate16kHz, 1}}}), render_processing_format(kSampleRate16kHz, 1) {} ProcessingConfig api_format; StreamConfig render_processing_format; @@ -461,17 +467,18 @@ class AudioProcessingImpl : public AudioProcessing { StreamConfig capture_processing_format; int split_rate; bool echo_path_gain_change; - int prev_analog_mic_level; float prev_pre_adjustment_gain; int playout_volume; int prev_playout_volume; AudioProcessingStats stats; - struct KeyboardInfo { - void Extract(const float* const* data, const StreamConfig& stream_config); - size_t num_keyboard_frames = 0; - const float* keyboard_data = nullptr; - } keyboard_info; - int cached_stream_analog_level_ = 0; + // Input volume applied on the audio input device when the audio is + // acquired. Unspecified when unknown. + absl::optional applied_input_volume; + bool applied_input_volume_changed; + // Recommended input volume to apply on the audio input device the next time + // that audio is acquired. Unspecified when no input volume can be + // recommended. + absl::optional recommended_input_volume; } capture_ RTC_GUARDED_BY(mutex_capture_); struct ApmCaptureNonLockedState { @@ -532,7 +539,9 @@ class AudioProcessingImpl : public AudioProcessing { RmsLevel capture_output_rms_ RTC_GUARDED_BY(mutex_capture_); int capture_rms_interval_counter_ RTC_GUARDED_BY(mutex_capture_) = 0; - AnalogGainStatsReporter analog_gain_stats_reporter_ + InputVolumeStatsReporter applied_input_volume_stats_reporter_ + RTC_GUARDED_BY(mutex_capture_); + InputVolumeStatsReporter recommended_input_volume_stats_reporter_ RTC_GUARDED_BY(mutex_capture_); // Lock protection not needed. diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/common.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/common.h deleted file mode 100644 index e14279ec10..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/common.h +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_PROCESSING_COMMON_H_ -#define MODULES_AUDIO_PROCESSING_COMMON_H_ - -#include "modules/audio_processing/include/audio_processing.h" -#include "rtc_base/checks.h" - -namespace webrtc { - -constexpr int RuntimeSettingQueueSize() { - return 100; -} - -static inline size_t ChannelsFromLayout(AudioProcessing::ChannelLayout layout) { - switch (layout) { - case AudioProcessing::kMono: - case AudioProcessing::kMonoAndKeyboard: - return 1; - case AudioProcessing::kStereo: - case AudioProcessing::kStereoAndKeyboard: - return 2; - } - RTC_DCHECK_NOTREACHED(); - return 0; -} - -} // namespace webrtc - -#endif // MODULES_AUDIO_PROCESSING_COMMON_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/echo_control_mobile_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/echo_control_mobile_impl.cc index 667d6bfecb..fa5cb8ffec 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/echo_control_mobile_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/echo_control_mobile_impl.cc @@ -18,7 +18,6 @@ #include "modules/audio_processing/audio_buffer.h" #include "modules/audio_processing/include/audio_processing.h" #include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -85,6 +84,9 @@ class EchoControlMobileImpl::Canceller { WebRtcAecm_Free(state_); } + Canceller(const Canceller&) = delete; + Canceller& operator=(const Canceller&) = delete; + void* state() { RTC_DCHECK(state_); return state_; @@ -98,7 +100,6 @@ class EchoControlMobileImpl::Canceller { private: void* state_; - RTC_DISALLOW_COPY_AND_ASSIGN(Canceller); }; EchoControlMobileImpl::EchoControlMobileImpl() diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/gain_controller2.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/gain_controller2.cc index 466e4b0eb4..ecc286e0c0 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/gain_controller2.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/gain_controller2.cc @@ -18,7 +18,6 @@ #include "modules/audio_processing/audio_buffer.h" #include "modules/audio_processing/include/audio_frame_view.h" #include "modules/audio_processing/logging/apm_data_dumper.h" -#include "rtc_base/atomic_ops.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" @@ -29,7 +28,6 @@ namespace { using Agc2Config = AudioProcessing::Config::GainController2; -constexpr int kUnspecifiedAnalogLevel = -1; constexpr int kLogLimiterStatsPeriodMs = 30'000; constexpr int kFrameLengthMs = 10; constexpr int kLogLimiterStatsPeriodNumFrames = @@ -65,13 +63,14 @@ std::unique_ptr CreateAdaptiveDigitalController( } // namespace -int GainController2::instance_count_ = 0; +std::atomic GainController2::instance_count_(0); GainController2::GainController2(const Agc2Config& config, int sample_rate_hz, - int num_channels) + int num_channels, + bool use_internal_vad) : cpu_features_(GetAllowedCpuFeatures()), - data_dumper_(rtc::AtomicOps::Increment(&instance_count_)), + data_dumper_(instance_count_.fetch_add(1) + 1), fixed_gain_applier_( /*hard_clip_samples=*/false, /*initial_gain_factor=*/DbToRatio(config.fixed_digital.gain_db)), @@ -81,12 +80,11 @@ GainController2::GainController2(const Agc2Config& config, num_channels, &data_dumper_)), limiter_(sample_rate_hz, &data_dumper_, /*histogram_name_prefix=*/"Agc2"), - calls_since_last_limiter_log_(0), - analog_level_(kUnspecifiedAnalogLevel) { + calls_since_last_limiter_log_(0) { RTC_DCHECK(Validate(config)); data_dumper_.InitiateNewSetOfRecordings(); const bool use_vad = config.adaptive_digital.enabled; - if (use_vad) { + if (use_vad && use_internal_vad) { // TODO(bugs.webrtc.org/7494): Move `vad_reset_period_ms` from adaptive // digital to gain controller 2 config. vad_ = std::make_unique( @@ -97,24 +95,6 @@ GainController2::GainController2(const Agc2Config& config, GainController2::~GainController2() = default; -void GainController2::Initialize(int sample_rate_hz, int num_channels) { - RTC_DCHECK(sample_rate_hz == AudioProcessing::kSampleRate8kHz || - sample_rate_hz == AudioProcessing::kSampleRate16kHz || - sample_rate_hz == AudioProcessing::kSampleRate32kHz || - sample_rate_hz == AudioProcessing::kSampleRate48kHz); - // TODO(bugs.webrtc.org/7494): Initialize `fixed_gain_applier_`. - limiter_.SetSampleRate(sample_rate_hz); - if (vad_) { - vad_->Initialize(sample_rate_hz); - } - if (adaptive_digital_controller_) { - adaptive_digital_controller_->Initialize(sample_rate_hz, num_channels); - } - data_dumper_.InitiateNewSetOfRecordings(); - calls_since_last_limiter_log_ = 0; - analog_level_ = kUnspecifiedAnalogLevel; -} - void GainController2::SetFixedGainDb(float gain_db) { const float gain_factor = DbToRatio(gain_db); if (fixed_gain_applier_.GetGainFactor() != gain_factor) { @@ -125,13 +105,24 @@ void GainController2::SetFixedGainDb(float gain_db) { fixed_gain_applier_.SetGainFactor(gain_factor); } -void GainController2::Process(AudioBuffer* audio) { - data_dumper_.DumpRaw("agc2_notified_analog_level", analog_level_); +void GainController2::Process(absl::optional speech_probability, + bool input_volume_changed, + AudioBuffer* audio) { + data_dumper_.DumpRaw("agc2_applied_input_volume_changed", + input_volume_changed); + if (input_volume_changed && !!adaptive_digital_controller_) { + adaptive_digital_controller_->HandleInputGainChange(); + } + AudioFrameView float_frame(audio->channels(), audio->num_channels(), audio->num_frames()); - absl::optional speech_probability; if (vad_) { speech_probability = vad_->Analyze(float_frame); + } else if (speech_probability.has_value()) { + RTC_DCHECK_GE(speech_probability.value(), 0.0f); + RTC_DCHECK_LE(speech_probability.value(), 1.0f); + } + if (speech_probability.has_value()) { data_dumper_.DumpRaw("agc2_speech_probability", speech_probability.value()); } fixed_gain_applier_.ApplyGain(float_frame); @@ -154,13 +145,6 @@ void GainController2::Process(AudioBuffer* audio) { } } -void GainController2::NotifyAnalogLevel(int level) { - if (analog_level_ != level && adaptive_digital_controller_) { - adaptive_digital_controller_->HandleInputGainChange(); - } - analog_level_ = level; -} - bool GainController2::Validate( const AudioProcessing::Config::GainController2& config) { const auto& fixed = config.fixed_digital; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/gain_controller2.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/gain_controller2.h index 8c82d745b5..843917a802 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/gain_controller2.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/gain_controller2.h @@ -11,6 +11,7 @@ #ifndef MODULES_AUDIO_PROCESSING_GAIN_CONTROLLER2_H_ #define MODULES_AUDIO_PROCESSING_GAIN_CONTROLLER2_H_ +#include #include #include @@ -30,29 +31,35 @@ class AudioBuffer; // microphone gain and/or applying digital gain. class GainController2 { public: + // Ctor. If `use_internal_vad` is true, an internal voice activity + // detector is used for digital adaptive gain. GainController2(const AudioProcessing::Config::GainController2& config, int sample_rate_hz, - int num_channels); + int num_channels, + bool use_internal_vad); GainController2(const GainController2&) = delete; GainController2& operator=(const GainController2&) = delete; ~GainController2(); - // Detects and handles changes of sample rate and/or number of channels. - void Initialize(int sample_rate_hz, int num_channels); - // Sets the fixed digital gain. void SetFixedGainDb(float gain_db); // Applies fixed and adaptive digital gains to `audio` and runs a limiter. - void Process(AudioBuffer* audio); - - // Handles analog level changes. - void NotifyAnalogLevel(int level); + // If the internal VAD is used, `speech_probability` is ignored. Otherwise + // `speech_probability` is used for digital adaptive gain if it's available + // (limited to values [0.0, 1.0]). Handles input volume changes; if the caller + // cannot determine whether an input volume change occurred, set + // `input_volume_changed` to false. + void Process(absl::optional speech_probability, + bool input_volume_changed, + AudioBuffer* audio); static bool Validate(const AudioProcessing::Config::GainController2& config); + AvailableCpuFeatures GetCpuFeatures() const { return cpu_features_; } + private: - static int instance_count_; + static std::atomic instance_count_; const AvailableCpuFeatures cpu_features_; ApmDataDumper data_dumper_; GainApplier fixed_gain_applier_; @@ -60,7 +67,6 @@ class GainController2 { std::unique_ptr adaptive_digital_controller_; Limiter limiter_; int calls_since_last_limiter_log_; - int analog_level_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/aec_dump.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/aec_dump.cc index 67809d0dcb..8f788cb802 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/aec_dump.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/aec_dump.cc @@ -17,7 +17,7 @@ InternalAPMConfig::InternalAPMConfig(InternalAPMConfig&&) = default; InternalAPMConfig& InternalAPMConfig::operator=(const InternalAPMConfig&) = default; -bool InternalAPMConfig::operator==(const InternalAPMConfig& other) { +bool InternalAPMConfig::operator==(const InternalAPMConfig& other) const { return aec_enabled == other.aec_enabled && aec_delay_agnostic_enabled == other.aec_delay_agnostic_enabled && aec_drift_compensation_enabled == diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/aec_dump.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/aec_dump.h index a7769d9973..6f2eb64f3a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/aec_dump.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/aec_dump.h @@ -16,6 +16,7 @@ #include #include "absl/base/attributes.h" +#include "absl/types/optional.h" #include "modules/audio_processing/include/audio_frame_view.h" #include "modules/audio_processing/include/audio_processing.h" @@ -31,7 +32,7 @@ struct InternalAPMConfig { InternalAPMConfig& operator=(const InternalAPMConfig&); InternalAPMConfig& operator=(InternalAPMConfig&&) = delete; - bool operator==(const InternalAPMConfig& other); + bool operator==(const InternalAPMConfig& other) const; bool aec_enabled = false; bool aec_delay_agnostic_enabled = false; @@ -67,7 +68,7 @@ class AecDump { struct AudioProcessingState { int delay; int drift; - int level; + absl::optional applied_input_volume; bool keypress; }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_frame_proxies.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_frame_proxies.cc index b960e72e86..7cc4fb75e4 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_frame_proxies.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_frame_proxies.cc @@ -20,10 +20,8 @@ int ProcessAudioFrame(AudioProcessing* ap, AudioFrame* frame) { return AudioProcessing::Error::kNullPointerError; } - StreamConfig input_config(frame->sample_rate_hz_, frame->num_channels_, - /*has_keyboard=*/false); - StreamConfig output_config(frame->sample_rate_hz_, frame->num_channels_, - /*has_keyboard=*/false); + StreamConfig input_config(frame->sample_rate_hz_, frame->num_channels_); + StreamConfig output_config(frame->sample_rate_hz_, frame->num_channels_); RTC_DCHECK_EQ(frame->samples_per_channel(), input_config.num_frames()); int result = ap->ProcessStream(frame->data(), input_config, output_config, @@ -57,10 +55,8 @@ int ProcessReverseAudioFrame(AudioProcessing* ap, AudioFrame* frame) { return AudioProcessing::Error::kBadNumberChannelsError; } - StreamConfig input_config(frame->sample_rate_hz_, frame->num_channels_, - /*has_keyboard=*/false); - StreamConfig output_config(frame->sample_rate_hz_, frame->num_channels_, - /*has_keyboard=*/false); + StreamConfig input_config(frame->sample_rate_hz_, frame->num_channels_); + StreamConfig output_config(frame->sample_rate_hz_, frame->num_channels_); int result = ap->ProcessReverseStream(frame->data(), input_config, output_config, frame->mutable_data()); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_processing.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_processing.cc index 0fd18fd956..86edaee087 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_processing.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_processing.cc @@ -145,7 +145,6 @@ std::string AudioProcessing::Config::ToString() const { << NoiseSuppressionLevelToString(noise_suppression.level) << " }, transient_suppression: { enabled: " << transient_suppression.enabled - << " }, voice_detection: { enabled: " << voice_detection.enabled << " }, gain_controller1: { enabled: " << gain_controller1.enabled << ", mode: " << GainController1ModeToString(gain_controller1.mode) << ", target_level_dbfs: " << gain_controller1.target_level_dbfs @@ -205,8 +204,7 @@ std::string AudioProcessing::Config::ToString() const { << gain_controller2.adaptive_digital.max_gain_change_db_per_second << ", max_output_noise_level_dbfs: " << gain_controller2.adaptive_digital.max_output_noise_level_dbfs - << "}}, residual_echo_detector: { enabled: " - << residual_echo_detector.enabled << " }}"; + << "}}"; return builder.str(); } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_processing.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_processing.h index b3ef3af9bd..03e88acdec 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_processing.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_processing.h @@ -23,6 +23,7 @@ #include +#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/array_view.h" #include "api/audio/echo_canceller3_config.h" @@ -30,7 +31,6 @@ #include "api/scoped_refptr.h" #include "modules/audio_processing/include/audio_processing_statistics.h" #include "rtc_base/arraysize.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/ref_count.h" #include "rtc_base/system/file_wrapper.h" #include "rtc_base/system/rtc_export.h" @@ -51,19 +51,6 @@ class EchoDetector; class CustomAudioAnalyzer; class CustomProcessing; -// Use to enable experimental gain control (AGC). At startup the experimental -// AGC moves the microphone volume up to `startup_min_volume` if the current -// microphone volume is set too low. The value is clamped to its operating range -// [12, 255]. Here, 255 maps to 100%. -// -// Must be provided through AudioProcessingBuilder().Create(config). -#if defined(WEBRTC_CHROMIUM_BUILD) -static constexpr int kAgcStartupMinVolume = 85; -#else -static constexpr int kAgcStartupMinVolume = 0; -#endif // defined(WEBRTC_CHROMIUM_BUILD) -static constexpr int kClippedLevelMin = 70; - // The Audio Processing Module (APM) provides a collection of voice processing // components designed for real-time communications software. // @@ -93,9 +80,9 @@ static constexpr int kClippedLevelMin = 70; // 2. Parameter getters are never called concurrently with the corresponding // setter. // -// APM accepts only linear PCM audio data in chunks of 10 ms. The int16 -// interfaces use interleaved data, while the float interfaces use deinterleaved -// data. +// APM accepts only linear PCM audio data in chunks of ~10 ms (see +// AudioProcessing::GetFrameSize() for details). The int16 interfaces use +// interleaved data, while the float interfaces use deinterleaved data. // // Usage example, omitting error checking: // AudioProcessing* apm = AudioProcessingBuilder().Create(); @@ -114,8 +101,6 @@ static constexpr int kClippedLevelMin = 70; // // config.high_pass_filter.enabled = true; // -// config.voice_detection.enabled = true; -// // apm->ApplyConfig(config) // // apm->noise_reduction()->set_level(kHighSuppression); @@ -161,7 +146,6 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { // submodule resets, affecting the audio quality. Use the RuntimeSetting // construct for runtime configuration. struct RTC_EXPORT Config { - // Sets the properties of the audio processing pipeline. struct RTC_EXPORT Pipeline { // Maximum allowed processing rate used internally. May only be set to @@ -234,11 +218,6 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { bool enabled = false; } transient_suppression; - // Enables reporting of `voice_detected` in webrtc::AudioProcessingStats. - struct VoiceDetection { - bool enabled = false; - } voice_detection; - // Enables automatic gain control (AGC) functionality. // The automatic gain control (AGC) component brings the signal to an // appropriate range. This is done by applying a digital gain directly and, @@ -295,10 +274,12 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { // Enables the analog gain controller functionality. struct AnalogGainController { bool enabled = true; - int startup_min_volume = kAgcStartupMinVolume; + // TODO(bugs.webrtc.org/7494): Deprecated. Stop using and remove. + int startup_min_volume = 0; // Lowest analog microphone level that will be applied in response to // clipping. - int clipped_level_min = kClippedLevelMin; + int clipped_level_min = 70; + // If true, an adaptive digital gain is applied. bool enable_digital_adaptive = true; // Amount the microphone level is lowered with every clipping event. // Limited to (0, 255]. @@ -378,24 +359,9 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { } adaptive_digital; } gain_controller2; - struct ResidualEchoDetector { - bool enabled = true; - } residual_echo_detector; - std::string ToString() const; }; - // TODO(mgraczyk): Remove once all methods that use ChannelLayout are gone. - enum ChannelLayout { - kMono, - // Left, right. - kStereo, - // Mono, keyboard, and mic. - kMonoAndKeyboard, - // Left, right, keyboard, and mic. - kStereoAndKeyboard - }; - // Specifies the properties of a setting to be passed to AudioProcessing at // runtime. class RuntimeSetting { @@ -530,16 +496,6 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { // number of channels as the input. virtual int Initialize(const ProcessingConfig& processing_config) = 0; - // Initialize with unpacked parameters. See Initialize() above for details. - // - // TODO(mgraczyk): Remove once clients are updated to use the new interface. - virtual int Initialize(int capture_input_sample_rate_hz, - int capture_output_sample_rate_hz, - int render_sample_rate_hz, - ChannelLayout capture_input_layout, - ChannelLayout capture_output_layout, - ChannelLayout render_input_layout) = 0; - // TODO(peah): This method is a temporary solution used to take control // over the parameters in the audio processing module and is likely to change. virtual void ApplyConfig(const Config& config) = 0; @@ -567,7 +523,7 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { // enqueueing was successfull. virtual bool PostRuntimeSetting(RuntimeSetting setting) = 0; - // Accepts and produces a 10 ms frame interleaved 16 bit integer audio as + // Accepts and produces a ~10 ms frame of interleaved 16 bit integer audio as // specified in `input_config` and `output_config`. `src` and `dest` may use // the same memory, if desired. virtual int ProcessStream(const int16_t* const src, @@ -587,7 +543,7 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { const StreamConfig& output_config, float* const* dest) = 0; - // Accepts and produces a 10 ms frame of interleaved 16 bit integer audio for + // Accepts and produces a ~10 ms frame of interleaved 16 bit integer audio for // the reverse direction audio stream as specified in `input_config` and // `output_config`. `src` and `dest` may use the same memory, if desired. virtual int ProcessReverseStream(const int16_t* const src, @@ -608,10 +564,10 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { virtual int AnalyzeReverseStream(const float* const* data, const StreamConfig& reverse_config) = 0; - // Returns the most recently produced 10 ms of the linear AEC output at a rate - // of 16 kHz. If there is more than one capture channel, a mono representation - // of the input is returned. Returns true/false to indicate whether an output - // returned. + // Returns the most recently produced ~10 ms of the linear AEC output at a + // rate of 16 kHz. If there is more than one capture channel, a mono + // representation of the input is returned. Returns true/false to indicate + // whether an output returned. virtual bool GetLinearAecOutput( rtc::ArrayView> linear_output) const = 0; @@ -620,9 +576,10 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { // HAL. Must be within the range [0, 255]. virtual void set_stream_analog_level(int level) = 0; - // When an analog mode is set, this should be called after ProcessStream() - // to obtain the recommended new analog level for the audio HAL. It is the - // user's responsibility to apply this level. + // When an analog mode is set, this should be called after + // `set_stream_analog_level()` and `ProcessStream()` to obtain the recommended + // new analog level for the audio HAL. It is the user's responsibility to + // apply this level. virtual int recommended_stream_analog_level() const = 0; // This must be called if and only if echo processing is enabled. @@ -654,7 +611,7 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { // return value of true indicates that the file has been // sucessfully opened, while a value of false indicates that // opening the file failed. - virtual bool CreateAndAttachAecDump(const std::string& file_name, + virtual bool CreateAndAttachAecDump(absl::string_view file_name, int64_t max_log_size_bytes, rtc::TaskQueue* worker_queue) = 0; virtual bool CreateAndAttachAecDump(FILE* handle, @@ -728,7 +685,29 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { static constexpr int kMaxNativeSampleRateHz = kNativeSampleRatesHz[kNumNativeSampleRates - 1]; + // APM processes audio in chunks of about 10 ms. See GetFrameSize() for + // details. static constexpr int kChunkSizeMs = 10; + + // Returns floor(sample_rate_hz/100): the number of samples per channel used + // as input and output to the audio processing module in calls to + // ProcessStream, ProcessReverseStream, AnalyzeReverseStream, and + // GetLinearAecOutput. + // + // This is exactly 10 ms for sample rates divisible by 100. For example: + // - 48000 Hz (480 samples per channel), + // - 44100 Hz (441 samples per channel), + // - 16000 Hz (160 samples per channel). + // + // Sample rates not divisible by 100 are received/produced in frames of + // approximately 10 ms. For example: + // - 22050 Hz (220 samples per channel, or ~9.98 ms per frame), + // - 11025 Hz (110 samples per channel, or ~9.98 ms per frame). + // These nondivisible sample rates yield lower audio quality compared to + // multiples of 100. Internal resampling to 10 ms frames causes a simulated + // clock drift effect which impacts the performance of (for example) echo + // cancellation. + static int GetFrameSize(int sample_rate_hz) { return sample_rate_hz / 100; } }; class RTC_EXPORT AudioProcessingBuilder { @@ -797,23 +776,10 @@ class RTC_EXPORT AudioProcessingBuilder { class StreamConfig { public: // sample_rate_hz: The sampling rate of the stream. - // - // num_channels: The number of audio channels in the stream, excluding the - // keyboard channel if it is present. When passing a - // StreamConfig with an array of arrays T*[N], - // - // N == {num_channels + 1 if has_keyboard - // {num_channels if !has_keyboard - // - // has_keyboard: True if the stream has a keyboard channel. When has_keyboard - // is true, the last channel in any corresponding list of - // channels is the keyboard channel. - StreamConfig(int sample_rate_hz = 0, - size_t num_channels = 0, - bool has_keyboard = false) + // num_channels: The number of audio channels in the stream. + StreamConfig(int sample_rate_hz = 0, size_t num_channels = 0) : sample_rate_hz_(sample_rate_hz), num_channels_(num_channels), - has_keyboard_(has_keyboard), num_frames_(calculate_frames(sample_rate_hz)) {} void set_sample_rate_hz(int value) { @@ -821,35 +787,29 @@ class StreamConfig { num_frames_ = calculate_frames(value); } void set_num_channels(size_t value) { num_channels_ = value; } - void set_has_keyboard(bool value) { has_keyboard_ = value; } int sample_rate_hz() const { return sample_rate_hz_; } - // The number of channels in the stream, not including the keyboard channel if - // present. + // The number of channels in the stream. size_t num_channels() const { return num_channels_; } - bool has_keyboard() const { return has_keyboard_; } size_t num_frames() const { return num_frames_; } size_t num_samples() const { return num_channels_ * num_frames_; } bool operator==(const StreamConfig& other) const { return sample_rate_hz_ == other.sample_rate_hz_ && - num_channels_ == other.num_channels_ && - has_keyboard_ == other.has_keyboard_; + num_channels_ == other.num_channels_; } bool operator!=(const StreamConfig& other) const { return !(*this == other); } private: static size_t calculate_frames(int sample_rate_hz) { - return static_cast(AudioProcessing::kChunkSizeMs * sample_rate_hz / - 1000); + return static_cast(AudioProcessing::GetFrameSize(sample_rate_hz)); } int sample_rate_hz_; size_t num_channels_; - bool has_keyboard_; size_t num_frames_; }; @@ -939,17 +899,13 @@ class EchoDetector : public rtc::RefCountInterface { int render_sample_rate_hz, int num_render_channels) = 0; - // Analysis (not changing) of the render signal. + // Analysis (not changing) of the first channel of the render signal. virtual void AnalyzeRenderAudio(rtc::ArrayView render_audio) = 0; // Analysis (not changing) of the capture signal. virtual void AnalyzeCaptureAudio( rtc::ArrayView capture_audio) = 0; - // Pack an AudioBuffer into a vector. - static void PackRenderAudioBuffer(AudioBuffer* audio, - std::vector* packed_buffer); - struct Metrics { absl::optional echo_likelihood; absl::optional echo_likelihood_recent_max; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_processing_statistics.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_processing_statistics.h index a31dafe49c..3b43319951 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_processing_statistics.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_processing_statistics.h @@ -24,6 +24,8 @@ struct RTC_EXPORT AudioProcessingStats { AudioProcessingStats(const AudioProcessingStats& other); ~AudioProcessingStats(); + // Deprecated. + // TODO(bugs.webrtc.org/11226): Remove. // True if voice is detected in the last capture frame, after processing. // It is conservative in flagging audio as speech, with low likelihood of // incorrectly flagging a frame as voice. diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/mock_audio_processing.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/mock_audio_processing.h index 46c5f0efbe..2ea1a865c3 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/mock_audio_processing.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/mock_audio_processing.h @@ -13,6 +13,7 @@ #include +#include "absl/strings/string_view.h" #include "modules/audio_processing/include/aec_dump.h" #include "modules/audio_processing/include/audio_processing.h" #include "modules/audio_processing/include/audio_processing_statistics.h" @@ -67,6 +68,27 @@ class MockEchoControl : public EchoControl { MOCK_METHOD(bool, ActiveProcessing, (), (const, override)); }; +class MockEchoDetector : public EchoDetector { + public: + virtual ~MockEchoDetector() {} + MOCK_METHOD(void, + Initialize, + (int capture_sample_rate_hz, + int num_capture_channels, + int render_sample_rate_hz, + int num_render_channels), + (override)); + MOCK_METHOD(void, + AnalyzeRenderAudio, + (rtc::ArrayView render_audio), + (override)); + MOCK_METHOD(void, + AnalyzeCaptureAudio, + (rtc::ArrayView capture_audio), + (override)); + MOCK_METHOD(Metrics, GetMetrics, (), (const, override)); +}; + class MockAudioProcessing : public AudioProcessing { public: MockAudioProcessing() {} @@ -74,15 +96,6 @@ class MockAudioProcessing : public AudioProcessing { virtual ~MockAudioProcessing() {} MOCK_METHOD(int, Initialize, (), (override)); - MOCK_METHOD(int, - Initialize, - (int capture_input_sample_rate_hz, - int capture_output_sample_rate_hz, - int render_sample_rate_hz, - ChannelLayout capture_input_layout, - ChannelLayout capture_output_layout, - ChannelLayout render_input_layout), - (override)); MOCK_METHOD(int, Initialize, (const ProcessingConfig& processing_config), @@ -140,7 +153,7 @@ class MockAudioProcessing : public AudioProcessing { MOCK_METHOD(int, recommended_stream_analog_level, (), (const, override)); MOCK_METHOD(bool, CreateAndAttachAecDump, - (const std::string& file_name, + (absl::string_view file_name, int64_t max_log_size_bytes, rtc::TaskQueue* worker_queue), (override)); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/logging/apm_data_dumper.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/logging/apm_data_dumper.cc index 445248b0bf..65d2167d37 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/logging/apm_data_dumper.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/logging/apm_data_dumper.cc @@ -10,6 +10,7 @@ #include "modules/audio_processing/logging/apm_data_dumper.h" +#include "absl/strings/string_view.h" #include "rtc_base/strings/string_builder.h" // Check to verify that the define is properly set. @@ -29,17 +30,16 @@ constexpr char kPathDelimiter = '\\'; constexpr char kPathDelimiter = '/'; #endif -std::string FormFileName(const char* output_dir, - const char* name, +std::string FormFileName(absl::string_view output_dir, + absl::string_view name, int instance_index, int reinit_index, - const std::string& suffix) { + absl::string_view suffix) { char buf[1024]; rtc::SimpleStringBuilder ss(buf); - const size_t output_dir_size = strlen(output_dir); - if (output_dir_size > 0) { + if (!output_dir.empty()) { ss << output_dir; - if (output_dir[output_dir_size - 1] != kPathDelimiter) { + if (output_dir.back() != kPathDelimiter) { ss << kPathDelimiter; } } @@ -64,7 +64,7 @@ bool ApmDataDumper::recording_activated_ = false; absl::optional ApmDataDumper::dump_set_to_use_; char ApmDataDumper::output_dir_[] = ""; -FILE* ApmDataDumper::GetRawFile(const char* name) { +FILE* ApmDataDumper::GetRawFile(absl::string_view name) { std::string filename = FormFileName(output_dir_, name, instance_index_, recording_set_index_, ".dat"); auto& f = raw_files_[filename]; @@ -75,7 +75,7 @@ FILE* ApmDataDumper::GetRawFile(const char* name) { return f.get(); } -WavWriter* ApmDataDumper::GetWavFile(const char* name, +WavWriter* ApmDataDumper::GetWavFile(absl::string_view name, int sample_rate_hz, int num_channels, WavFile::SampleFormat format) { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/logging/apm_data_dumper.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/logging/apm_data_dumper.h index 9c2ac3be5d..4ab6baad83 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/logging/apm_data_dumper.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/logging/apm_data_dumper.h @@ -13,19 +13,20 @@ #include #include -#include -#include #if WEBRTC_APM_DEBUG_DUMP == 1 #include +#include #include #endif +#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/array_view.h" #if WEBRTC_APM_DEBUG_DUMP == 1 #include "common_audio/wav_file.h" #include "rtc_base/checks.h" +#include "rtc_base/string_utils.h" #endif // Check to verify that the define is properly set. @@ -87,10 +88,10 @@ class ApmDataDumper { } // Set an optional output directory. - static void SetOutputDirectory(const std::string& output_dir) { + static void SetOutputDirectory(absl::string_view output_dir) { #if WEBRTC_APM_DEBUG_DUMP == 1 RTC_CHECK_LT(output_dir.size(), kOutputDirMaxLength); - strncpy(output_dir_, output_dir.c_str(), output_dir.size()); + rtc::strcpyn(output_dir_, output_dir.size(), output_dir); #endif } @@ -104,7 +105,9 @@ class ApmDataDumper { // Methods for performing dumping of data of various types into // various formats. - void DumpRaw(const char* name, double v, int dump_set = kDefaultDumpSet) { + void DumpRaw(absl::string_view name, + double v, + int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) return; @@ -116,7 +119,7 @@ class ApmDataDumper { #endif } - void DumpRaw(const char* name, + void DumpRaw(absl::string_view name, size_t v_length, const double* v, int dump_set = kDefaultDumpSet) { @@ -131,7 +134,7 @@ class ApmDataDumper { #endif } - void DumpRaw(const char* name, + void DumpRaw(absl::string_view name, rtc::ArrayView v, int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 @@ -144,7 +147,9 @@ class ApmDataDumper { #endif } - void DumpRaw(const char* name, float v, int dump_set = kDefaultDumpSet) { + void DumpRaw(absl::string_view name, + float v, + int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) return; @@ -156,7 +161,7 @@ class ApmDataDumper { #endif } - void DumpRaw(const char* name, + void DumpRaw(absl::string_view name, size_t v_length, const float* v, int dump_set = kDefaultDumpSet) { @@ -171,7 +176,7 @@ class ApmDataDumper { #endif } - void DumpRaw(const char* name, + void DumpRaw(absl::string_view name, rtc::ArrayView v, int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 @@ -184,7 +189,7 @@ class ApmDataDumper { #endif } - void DumpRaw(const char* name, bool v, int dump_set = kDefaultDumpSet) { + void DumpRaw(absl::string_view name, bool v, int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) return; @@ -195,7 +200,7 @@ class ApmDataDumper { #endif } - void DumpRaw(const char* name, + void DumpRaw(absl::string_view name, size_t v_length, const bool* v, int dump_set = kDefaultDumpSet) { @@ -213,7 +218,7 @@ class ApmDataDumper { #endif } - void DumpRaw(const char* name, + void DumpRaw(absl::string_view name, rtc::ArrayView v, int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 @@ -226,7 +231,9 @@ class ApmDataDumper { #endif } - void DumpRaw(const char* name, int16_t v, int dump_set = kDefaultDumpSet) { + void DumpRaw(absl::string_view name, + int16_t v, + int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) return; @@ -238,7 +245,7 @@ class ApmDataDumper { #endif } - void DumpRaw(const char* name, + void DumpRaw(absl::string_view name, size_t v_length, const int16_t* v, int dump_set = kDefaultDumpSet) { @@ -253,7 +260,7 @@ class ApmDataDumper { #endif } - void DumpRaw(const char* name, + void DumpRaw(absl::string_view name, rtc::ArrayView v, int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 @@ -266,7 +273,9 @@ class ApmDataDumper { #endif } - void DumpRaw(const char* name, int32_t v, int dump_set = kDefaultDumpSet) { + void DumpRaw(absl::string_view name, + int32_t v, + int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) return; @@ -278,7 +287,7 @@ class ApmDataDumper { #endif } - void DumpRaw(const char* name, + void DumpRaw(absl::string_view name, size_t v_length, const int32_t* v, int dump_set = kDefaultDumpSet) { @@ -293,7 +302,9 @@ class ApmDataDumper { #endif } - void DumpRaw(const char* name, size_t v, int dump_set = kDefaultDumpSet) { + void DumpRaw(absl::string_view name, + size_t v, + int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) return; @@ -305,7 +316,7 @@ class ApmDataDumper { #endif } - void DumpRaw(const char* name, + void DumpRaw(absl::string_view name, size_t v_length, const size_t* v, int dump_set = kDefaultDumpSet) { @@ -320,7 +331,7 @@ class ApmDataDumper { #endif } - void DumpRaw(const char* name, + void DumpRaw(absl::string_view name, rtc::ArrayView v, int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 @@ -333,7 +344,7 @@ class ApmDataDumper { #endif } - void DumpRaw(const char* name, + void DumpRaw(absl::string_view name, rtc::ArrayView v, int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 @@ -344,7 +355,7 @@ class ApmDataDumper { #endif } - void DumpWav(const char* name, + void DumpWav(absl::string_view name, size_t v_length, const float* v, int sample_rate_hz, @@ -362,7 +373,7 @@ class ApmDataDumper { #endif } - void DumpWav(const char* name, + void DumpWav(absl::string_view name, rtc::ArrayView v, int sample_rate_hz, int num_channels, @@ -389,8 +400,8 @@ class ApmDataDumper { raw_files_; std::unordered_map> wav_files_; - FILE* GetRawFile(const char* name); - WavWriter* GetWavFile(const char* name, + FILE* GetRawFile(absl::string_view name); + WavWriter* GetWavFile(absl::string_view name, int sample_rate_hz, int num_channels, WavFile::SampleFormat format); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/optionally_built_submodule_creators.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/optionally_built_submodule_creators.cc index 62a1632566..cea5c837dc 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/optionally_built_submodule_creators.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/optionally_built_submodule_creators.cc @@ -17,14 +17,19 @@ namespace webrtc { std::unique_ptr CreateTransientSuppressor( - const ApmSubmoduleCreationOverrides& overrides) { + const ApmSubmoduleCreationOverrides& overrides, + TransientSuppressor::VadMode vad_mode, + int sample_rate_hz, + int detection_rate_hz, + int num_channels) { #ifdef WEBRTC_EXCLUDE_TRANSIENT_SUPPRESSOR return nullptr; #else if (overrides.transient_suppression) { return nullptr; } - return std::make_unique(); + return std::make_unique( + vad_mode, sample_rate_hz, detection_rate_hz, num_channels); #endif } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/optionally_built_submodule_creators.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/optionally_built_submodule_creators.h index 7de337b277..1be2743986 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/optionally_built_submodule_creators.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/optionally_built_submodule_creators.h @@ -31,7 +31,11 @@ struct ApmSubmoduleCreationOverrides { // * WEBRTC_EXCLUDE_TRANSIENT_SUPPRESSOR is defined // * The corresponding override in `overrides` is enabled. std::unique_ptr CreateTransientSuppressor( - const ApmSubmoduleCreationOverrides& overrides); + const ApmSubmoduleCreationOverrides& overrides, + TransientSuppressor::VadMode vad_mode, + int sample_rate_hz, + int detection_rate_hz, + int num_channels); } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/residual_echo_detector.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/residual_echo_detector.cc index 618888361f..2a564fc233 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/residual_echo_detector.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/residual_echo_detector.cc @@ -14,9 +14,7 @@ #include #include "absl/types/optional.h" -#include "modules/audio_processing/audio_buffer.h" #include "modules/audio_processing/logging/apm_data_dumper.h" -#include "rtc_base/atomic_ops.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "system_wrappers/include/metrics.h" @@ -42,11 +40,10 @@ constexpr size_t kAggregationBufferSize = 10 * 100; namespace webrtc { -int ResidualEchoDetector::instance_count_ = 0; +std::atomic ResidualEchoDetector::instance_count_(0); ResidualEchoDetector::ResidualEchoDetector() - : data_dumper_( - new ApmDataDumper(rtc::AtomicOps::Increment(&instance_count_))), + : data_dumper_(new ApmDataDumper(instance_count_.fetch_add(1) + 1)), render_buffer_(kRenderBufferSize), render_power_(kLookbackFrames), render_power_mean_(kLookbackFrames), @@ -199,13 +196,6 @@ void ResidualEchoDetector::Initialize(int /*capture_sample_rate_hz*/, reliability_ = 0.f; } -void EchoDetector::PackRenderAudioBuffer(AudioBuffer* audio, - std::vector* packed_buffer) { - packed_buffer->clear(); - packed_buffer->insert(packed_buffer->end(), audio->channels()[0], - audio->channels()[0] + audio->num_frames()); -} - EchoDetector::Metrics ResidualEchoDetector::GetMetrics() const { EchoDetector::Metrics metrics; metrics.echo_likelihood = echo_likelihood_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/residual_echo_detector.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/residual_echo_detector.h index 44252af655..ac554b17c4 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/residual_echo_detector.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/residual_echo_detector.h @@ -11,6 +11,7 @@ #ifndef MODULES_AUDIO_PROCESSING_RESIDUAL_ECHO_DETECTOR_H_ #define MODULES_AUDIO_PROCESSING_RESIDUAL_ECHO_DETECTOR_H_ +#include #include #include "api/array_view.h" @@ -49,7 +50,7 @@ class ResidualEchoDetector : public EchoDetector { EchoDetector::Metrics GetMetrics() const override; private: - static int instance_count_; + static std::atomic instance_count_; std::unique_ptr data_dumper_; // Keep track if the `Process` function has been previously called. bool first_process_call_ = true; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/rms_level.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/rms_level.cc index 6992a15194..b0a45cb403 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/rms_level.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/rms_level.cc @@ -101,8 +101,18 @@ void RmsLevel::AnalyzeMuted(size_t length) { } int RmsLevel::Average() { - int rms = (sample_count_ == 0) ? RmsLevel::kMinLevelDb - : ComputeRms(sum_square_ / sample_count_); + const bool have_samples = (sample_count_ != 0); + int rms = have_samples ? ComputeRms(sum_square_ / sample_count_) + : RmsLevel::kMinLevelDb; + + // To ensure that kMinLevelDb represents digital silence (muted audio + // sources) we'll check here if the sum_square is actually 0. If it's not + // we'll bump up the return value to `kInaudibleButNotMuted`. + // https://datatracker.ietf.org/doc/html/rfc6464 + if (have_samples && rms == RmsLevel::kMinLevelDb && sum_square_ != 0.0f) { + rms = kInaudibleButNotMuted; + } + Reset(); return rms; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/rms_level.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/rms_level.h index 4955d1b308..fbece19ecd 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/rms_level.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/rms_level.h @@ -34,7 +34,7 @@ class RmsLevel { int peak; }; - enum : int { kMinLevelDb = 127 }; + enum : int { kMinLevelDb = 127, kInaudibleButNotMuted = 126 }; RmsLevel(); ~RmsLevel(); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/three_band_filter_bank.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/three_band_filter_bank.cc index fc665efcc1..bd1c50477a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/three_band_filter_bank.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/three_band_filter_bank.cc @@ -211,8 +211,9 @@ void ThreeBandFilterBank::Analysis( // Band and modulate the output. for (int band = 0; band < ThreeBandFilterBank::kNumBands; ++band) { + float* out_band = out[band].data(); for (int n = 0; n < kSplitBandSize; ++n) { - out[band][n] += dct_modulation[band] * out_subsampled[n]; + out_band[n] += dct_modulation[band] * out_subsampled[n]; } } } @@ -254,8 +255,9 @@ void ThreeBandFilterBank::Synthesis( std::fill(in_subsampled.begin(), in_subsampled.end(), 0.f); for (int band = 0; band < ThreeBandFilterBank::kNumBands; ++band) { RTC_DCHECK_EQ(in[band].size(), kSplitBandSize); + const float* in_band = in[band].data(); for (int n = 0; n < kSplitBandSize; ++n) { - in_subsampled[n] += dct_modulation[band] * in[band][n]; + in_subsampled[n] += dct_modulation[band] * in_band[n]; } } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/transient/transient_suppressor.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/transient/transient_suppressor.h index 982ddbd0ec..ecb3c3baab 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/transient/transient_suppressor.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/transient/transient_suppressor.h @@ -11,9 +11,7 @@ #ifndef MODULES_AUDIO_PROCESSING_TRANSIENT_TRANSIENT_SUPPRESSOR_H_ #define MODULES_AUDIO_PROCESSING_TRANSIENT_TRANSIENT_SUPPRESSOR_H_ -#include -#include -#include +#include namespace webrtc { @@ -21,11 +19,26 @@ namespace webrtc { // restoration algorithm that attenuates unexpected spikes in the spectrum. class TransientSuppressor { public: + // Type of VAD used by the caller to compute the `voice_probability` argument + // `Suppress()`. + enum class VadMode { + // By default, `TransientSuppressor` assumes that `voice_probability` is + // computed by `AgcManagerDirect`. + kDefault = 0, + // Use this mode when `TransientSuppressor` must assume that + // `voice_probability` is computed by the RNN VAD. + kRnnVad, + // Use this mode to let `TransientSuppressor::Suppressor()` ignore + // `voice_probability` and behave as if voice information is unavailable + // (regardless of the passed value). + kNoVad, + }; + virtual ~TransientSuppressor() {} - virtual int Initialize(int sample_rate_hz, - int detector_rate_hz, - int num_channels) = 0; + virtual void Initialize(int sample_rate_hz, + int detector_rate_hz, + int num_channels) = 0; // Processes a `data` chunk, and returns it with keystrokes suppressed from // it. The float format is assumed to be int16 ranged. If there are more than @@ -43,16 +56,18 @@ class TransientSuppressor { // of audio. If voice information is not available, `voice_probability` must // always be set to 1. // `key_pressed` determines if a key was pressed on this audio chunk. - // Returns 0 on success and -1 otherwise. - virtual int Suppress(float* data, - size_t data_length, - int num_channels, - const float* detection_data, - size_t detection_length, - const float* reference_data, - size_t reference_length, - float voice_probability, - bool key_pressed) = 0; + // Returns a delayed version of `voice_probability` according to the + // algorithmic delay introduced by this method. In this way, the modified + // `data` and the returned voice probability will be temporally aligned. + virtual float Suppress(float* data, + size_t data_length, + int num_channels, + const float* detection_data, + size_t detection_length, + const float* reference_data, + size_t reference_length, + float voice_probability, + bool key_pressed) = 0; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/transient/transient_suppressor_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/transient/transient_suppressor_impl.cc index f8161f6428..90428464e3 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/transient/transient_suppressor_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/transient/transient_suppressor_impl.cc @@ -18,6 +18,7 @@ #include #include #include +#include #include "common_audio/include/audio_util.h" #include "common_audio/signal_processing/include/signal_processing_library.h" @@ -32,7 +33,6 @@ namespace webrtc { static const float kMeanIIRCoefficient = 0.5f; -static const float kVoiceThreshold = 0.02f; // TODO(aluebs): Check if these values work also for 48kHz. static const size_t kMinVoiceBin = 3; @@ -44,10 +44,27 @@ float ComplexMagnitude(float a, float b) { return std::abs(a) + std::abs(b); } +std::string GetVadModeLabel(TransientSuppressor::VadMode vad_mode) { + switch (vad_mode) { + case TransientSuppressor::VadMode::kDefault: + return "default"; + case TransientSuppressor::VadMode::kRnnVad: + return "RNN VAD"; + case TransientSuppressor::VadMode::kNoVad: + return "no VAD"; + } +} + } // namespace -TransientSuppressorImpl::TransientSuppressorImpl() - : data_length_(0), +TransientSuppressorImpl::TransientSuppressorImpl(VadMode vad_mode, + int sample_rate_hz, + int detector_rate_hz, + int num_channels) + : vad_mode_(vad_mode), + voice_probability_delay_unit_(/*delay_num_samples=*/0, sample_rate_hz), + analyzed_audio_is_silent_(false), + data_length_(0), detection_length_(0), analysis_length_(0), buffer_delay_(0), @@ -62,13 +79,26 @@ TransientSuppressorImpl::TransientSuppressorImpl() use_hard_restoration_(false), chunks_since_voice_change_(0), seed_(182), - using_reference_(false) {} + using_reference_(false) { + RTC_LOG(LS_INFO) << "VAD mode: " << GetVadModeLabel(vad_mode_); + Initialize(sample_rate_hz, detector_rate_hz, num_channels); +} TransientSuppressorImpl::~TransientSuppressorImpl() {} -int TransientSuppressorImpl::Initialize(int sample_rate_hz, - int detection_rate_hz, - int num_channels) { +void TransientSuppressorImpl::Initialize(int sample_rate_hz, + int detection_rate_hz, + int num_channels) { + RTC_DCHECK(sample_rate_hz == ts::kSampleRate8kHz || + sample_rate_hz == ts::kSampleRate16kHz || + sample_rate_hz == ts::kSampleRate32kHz || + sample_rate_hz == ts::kSampleRate48kHz); + RTC_DCHECK(detection_rate_hz == ts::kSampleRate8kHz || + detection_rate_hz == ts::kSampleRate16kHz || + detection_rate_hz == ts::kSampleRate32kHz || + detection_rate_hz == ts::kSampleRate48kHz); + RTC_DCHECK_GT(num_channels, 0); + switch (sample_rate_hz) { case ts::kSampleRate8kHz: analysis_length_ = 128u; @@ -87,26 +117,18 @@ int TransientSuppressorImpl::Initialize(int sample_rate_hz, window_ = kBlocks480w1024; break; default: - return -1; - } - if (detection_rate_hz != ts::kSampleRate8kHz && - detection_rate_hz != ts::kSampleRate16kHz && - detection_rate_hz != ts::kSampleRate32kHz && - detection_rate_hz != ts::kSampleRate48kHz) { - return -1; - } - if (num_channels <= 0) { - return -1; + RTC_DCHECK_NOTREACHED(); + return; } detector_.reset(new TransientDetector(detection_rate_hz)); data_length_ = sample_rate_hz * ts::kChunkSizeMs / 1000; - if (data_length_ > analysis_length_) { - RTC_DCHECK_NOTREACHED(); - return -1; - } + RTC_DCHECK_LE(data_length_, analysis_length_); buffer_delay_ = analysis_length_ - data_length_; + voice_probability_delay_unit_.Initialize(/*delay_num_samples=*/buffer_delay_, + sample_rate_hz); + complex_analysis_length_ = analysis_length_ / 2 + 1; RTC_DCHECK_GE(complex_analysis_length_, kMaxVoiceBin); num_channels_ = num_channels; @@ -155,28 +177,28 @@ int TransientSuppressorImpl::Initialize(int sample_rate_hz, chunks_since_voice_change_ = 0; seed_ = 182; using_reference_ = false; - return 0; } -int TransientSuppressorImpl::Suppress(float* data, - size_t data_length, - int num_channels, - const float* detection_data, - size_t detection_length, - const float* reference_data, - size_t reference_length, - float voice_probability, - bool key_pressed) { +float TransientSuppressorImpl::Suppress(float* data, + size_t data_length, + int num_channels, + const float* detection_data, + size_t detection_length, + const float* reference_data, + size_t reference_length, + float voice_probability, + bool key_pressed) { if (!data || data_length != data_length_ || num_channels != num_channels_ || detection_length != detection_length_ || voice_probability < 0 || voice_probability > 1) { - return -1; + // The audio is not modified, so the voice probability is returned as is + // (delay not applied). + return voice_probability; } UpdateKeypress(key_pressed); UpdateBuffers(data); - int result = 0; if (detection_enabled_) { UpdateRestoration(voice_probability); @@ -189,7 +211,9 @@ int TransientSuppressorImpl::Suppress(float* data, float detector_result = detector_->Detect(detection_data, detection_length, reference_data, reference_length); if (detector_result < 0) { - return -1; + // The audio is not modified, so the voice probability is returned as is + // (delay not applied). + return voice_probability; } using_reference_ = detector_->using_reference(); @@ -219,7 +243,9 @@ int TransientSuppressorImpl::Suppress(float* data, : &in_buffer_[i * analysis_length_], data_length_ * sizeof(*data)); } - return result; + + // The audio has been modified, return the delayed voice probability. + return voice_probability_delay_unit_.Delay(voice_probability); } // This should only be called when detection is enabled. UpdateBuffers() must @@ -304,16 +330,34 @@ void TransientSuppressorImpl::UpdateKeypress(bool key_pressed) { } void TransientSuppressorImpl::UpdateRestoration(float voice_probability) { - const int kHardRestorationOffsetDelay = 3; - const int kHardRestorationOnsetDelay = 80; - - bool not_voiced = voice_probability < kVoiceThreshold; + bool not_voiced; + switch (vad_mode_) { + case TransientSuppressor::VadMode::kDefault: { + constexpr float kVoiceThreshold = 0.02f; + not_voiced = voice_probability < kVoiceThreshold; + break; + } + case TransientSuppressor::VadMode::kRnnVad: { + constexpr float kVoiceThreshold = 0.7f; + not_voiced = voice_probability < kVoiceThreshold; + break; + } + case TransientSuppressor::VadMode::kNoVad: + // Always assume that voice is detected. + not_voiced = false; + break; + } if (not_voiced == use_hard_restoration_) { chunks_since_voice_change_ = 0; } else { ++chunks_since_voice_change_; + // Number of 10 ms frames to wait to transition to and from hard + // restoration. + constexpr int kHardRestorationOffsetDelay = 3; + constexpr int kHardRestorationOnsetDelay = 80; + if ((use_hard_restoration_ && chunks_since_voice_change_ > kHardRestorationOffsetDelay) || (!use_hard_restoration_ && diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/transient/transient_suppressor_impl.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/transient/transient_suppressor_impl.h index fa8186eed9..4005a16b0a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/transient/transient_suppressor_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/transient/transient_suppressor_impl.h @@ -17,6 +17,7 @@ #include #include "modules/audio_processing/transient/transient_suppressor.h" +#include "modules/audio_processing/transient/voice_probability_delay_unit.h" #include "rtc_base/gtest_prod_util.h" namespace webrtc { @@ -27,42 +28,28 @@ class TransientDetector; // restoration algorithm that attenuates unexpected spikes in the spectrum. class TransientSuppressorImpl : public TransientSuppressor { public: - TransientSuppressorImpl(); + TransientSuppressorImpl(VadMode vad_mode, + int sample_rate_hz, + int detector_rate_hz, + int num_channels); ~TransientSuppressorImpl() override; - int Initialize(int sample_rate_hz, - int detector_rate_hz, - int num_channels) override; - - // Processes a `data` chunk, and returns it with keystrokes suppressed from - // it. The float format is assumed to be int16 ranged. If there are more than - // one channel, the chunks are concatenated one after the other in `data`. - // `data_length` must be equal to `data_length_`. - // `num_channels` must be equal to `num_channels_`. - // A sub-band, ideally the higher, can be used as `detection_data`. If it is - // NULL, `data` is used for the detection too. The `detection_data` is always - // assumed mono. - // If a reference signal (e.g. keyboard microphone) is available, it can be - // passed in as `reference_data`. It is assumed mono and must have the same - // length as `data`. NULL is accepted if unavailable. - // This suppressor performs better if voice information is available. - // `voice_probability` is the probability of voice being present in this chunk - // of audio. If voice information is not available, `voice_probability` must - // always be set to 1. - // `key_pressed` determines if a key was pressed on this audio chunk. - // Returns 0 on success and -1 otherwise. - int Suppress(float* data, - size_t data_length, - int num_channels, - const float* detection_data, - size_t detection_length, - const float* reference_data, - size_t reference_length, - float voice_probability, - bool key_pressed) override; + void Initialize(int sample_rate_hz, + int detector_rate_hz, + int num_channels) override; + + float Suppress(float* data, + size_t data_length, + int num_channels, + const float* detection_data, + size_t detection_length, + const float* reference_data, + size_t reference_length, + float voice_probability, + bool key_pressed) override; private: - FRIEND_TEST_ALL_PREFIXES(TransientSuppressorImplTest, + FRIEND_TEST_ALL_PREFIXES(TransientSuppressorVadModeParametrization, TypingDetectionLogicWorksAsExpectedForMono); void Suppress(float* in_ptr, float* spectral_mean, float* out_ptr); @@ -74,8 +61,13 @@ class TransientSuppressorImpl : public TransientSuppressor { void HardRestoration(float* spectral_mean); void SoftRestoration(float* spectral_mean); + const VadMode vad_mode_; + VoiceProbabilityDelayUnit voice_probability_delay_unit_; + std::unique_ptr detector_; + bool analyzed_audio_is_silent_; + size_t data_length_; size_t detection_length_; size_t analysis_length_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/transient/voice_probability_delay_unit.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/transient/voice_probability_delay_unit.cc new file mode 100644 index 0000000000..27b2b42b38 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/transient/voice_probability_delay_unit.cc @@ -0,0 +1,56 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/transient/voice_probability_delay_unit.h" + +#include + +#include "rtc_base/checks.h" + +namespace webrtc { + +VoiceProbabilityDelayUnit::VoiceProbabilityDelayUnit(int delay_num_samples, + int sample_rate_hz) { + Initialize(delay_num_samples, sample_rate_hz); +} + +void VoiceProbabilityDelayUnit::Initialize(int delay_num_samples, + int sample_rate_hz) { + RTC_DCHECK_GE(delay_num_samples, 0); + RTC_DCHECK_LE(delay_num_samples, sample_rate_hz / 50) + << "The implementation does not support delays greater than 20 ms."; + int frame_size = rtc::CheckedDivExact(sample_rate_hz, 100); // 10 ms. + if (delay_num_samples <= frame_size) { + weights_[0] = 0.0f; + weights_[1] = static_cast(delay_num_samples) / frame_size; + weights_[2] = + static_cast(frame_size - delay_num_samples) / frame_size; + } else { + delay_num_samples -= frame_size; + weights_[0] = static_cast(delay_num_samples) / frame_size; + weights_[1] = + static_cast(frame_size - delay_num_samples) / frame_size; + weights_[2] = 0.0f; + } + + // Resets the delay unit. + last_probabilities_.fill(0.0f); +} + +float VoiceProbabilityDelayUnit::Delay(float voice_probability) { + float weighted_probability = weights_[0] * last_probabilities_[0] + + weights_[1] * last_probabilities_[1] + + weights_[2] * voice_probability; + last_probabilities_[0] = last_probabilities_[1]; + last_probabilities_[1] = voice_probability; + return weighted_probability; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/transient/voice_probability_delay_unit.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/transient/voice_probability_delay_unit.h new file mode 100644 index 0000000000..05961663e3 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/transient/voice_probability_delay_unit.h @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_TRANSIENT_VOICE_PROBABILITY_DELAY_UNIT_H_ +#define MODULES_AUDIO_PROCESSING_TRANSIENT_VOICE_PROBABILITY_DELAY_UNIT_H_ + +#include + +namespace webrtc { + +// Iteratively produces a sequence of delayed voice probability values given a +// fixed delay between 0 and 20 ms and given a sequence of voice probability +// values observed every 10 ms. Supports fractional delays, that are delays +// which are not a multiple integer of 10 ms. Applies interpolation with +// fractional delays; otherwise, returns a previously observed value according +// to the given fixed delay. +class VoiceProbabilityDelayUnit { + public: + // Ctor. `delay_num_samples` is the delay in number of samples and it must be + // non-negative and less than 20 ms. + VoiceProbabilityDelayUnit(int delay_num_samples, int sample_rate_hz); + + // Handles delay and sample rate changes and resets the delay unit. + void Initialize(int delay_num_samples, int sample_rate_hz); + + // Observes `voice_probability` and returns a delayed voice probability. + float Delay(float voice_probability); + + private: + std::array weights_; + std::array last_probabilities_; +}; + +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_TRANSIENT_VOICE_PROBABILITY_DELAY_UNIT_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/typing_detection.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/typing_detection.cc deleted file mode 100644 index e725b264ee..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/typing_detection.cc +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_processing/typing_detection.h" - -namespace webrtc { - -TypingDetection::TypingDetection() - : time_active_(0), - time_since_last_typing_(0), - penalty_counter_(0), - counter_since_last_detection_update_(0), - detection_to_report_(false), - new_detection_to_report_(false), - time_window_(10), - cost_per_typing_(100), - reporting_threshold_(300), - penalty_decay_(1), - type_event_delay_(2), - report_detection_update_period_(1) {} - -TypingDetection::~TypingDetection() {} - -bool TypingDetection::Process(bool key_pressed, bool vad_activity) { - if (vad_activity) - time_active_++; - else - time_active_ = 0; - - // Keep track if time since last typing event - if (key_pressed) - time_since_last_typing_ = 0; - else - ++time_since_last_typing_; - - if (time_since_last_typing_ < type_event_delay_ && vad_activity && - time_active_ < time_window_) { - penalty_counter_ += cost_per_typing_; - if (penalty_counter_ > reporting_threshold_) - new_detection_to_report_ = true; - } - - if (penalty_counter_ > 0) - penalty_counter_ -= penalty_decay_; - - if (++counter_since_last_detection_update_ == - report_detection_update_period_) { - detection_to_report_ = new_detection_to_report_; - new_detection_to_report_ = false; - counter_since_last_detection_update_ = 0; - } - - return detection_to_report_; -} - -int TypingDetection::TimeSinceLastDetectionInSeconds() { - // Round to whole seconds. - return (time_since_last_typing_ + 50) / 100; -} - -void TypingDetection::SetParameters(int time_window, - int cost_per_typing, - int reporting_threshold, - int penalty_decay, - int type_event_delay, - int report_detection_update_period) { - if (time_window) - time_window_ = time_window; - - if (cost_per_typing) - cost_per_typing_ = cost_per_typing; - - if (reporting_threshold) - reporting_threshold_ = reporting_threshold; - - if (penalty_decay) - penalty_decay_ = penalty_decay; - - if (type_event_delay) - type_event_delay_ = type_event_delay; - - if (report_detection_update_period) - report_detection_update_period_ = report_detection_update_period; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/typing_detection.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/typing_detection.h deleted file mode 100644 index 9d96583b98..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/typing_detection.h +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_PROCESSING_TYPING_DETECTION_H_ -#define MODULES_AUDIO_PROCESSING_TYPING_DETECTION_H_ - -#include "rtc_base/system/rtc_export.h" - -namespace webrtc { - -class RTC_EXPORT TypingDetection { - public: - TypingDetection(); - virtual ~TypingDetection(); - - // Run the detection algortihm. Shall be called every 10 ms. Returns true if - // typing is detected, or false if not, based on the update period as set with - // SetParameters(). See `report_detection_update_period_` description below. - bool Process(bool key_pressed, bool vad_activity); - - // Gets the time in seconds since the last detection. - int TimeSinceLastDetectionInSeconds(); - - // Sets the algorithm parameters. A parameter value of 0 leaves it unchanged. - // See the correspondning member variables below for descriptions. - void SetParameters(int time_window, - int cost_per_typing, - int reporting_threshold, - int penalty_decay, - int type_event_delay, - int report_detection_update_period); - - private: - int time_active_; - int time_since_last_typing_; - int penalty_counter_; - - // Counter since last time the detection status reported by Process() was - // updated. See also `report_detection_update_period_`. - int counter_since_last_detection_update_; - - // The detection status to report. Updated every - // `report_detection_update_period_` call to Process(). - bool detection_to_report_; - - // What `detection_to_report_` should be set to next time it is updated. - bool new_detection_to_report_; - - // Settable threshold values. - - // Number of 10 ms slots accepted to count as a hit. - int time_window_; - - // Penalty added for a typing + activity coincide. - int cost_per_typing_; - - // Threshold for `penalty_counter_`. - int reporting_threshold_; - - // How much we reduce `penalty_counter_` every 10 ms. - int penalty_decay_; - - // How old typing events we allow. - int type_event_delay_; - - // Settable update period. - - // Number of 10 ms slots between each update of the detection status returned - // by Process(). This inertia added to the algorithm is usually desirable and - // provided so that consumers of the class don't have to implement that - // themselves if they don't wish. - // If set to 1, each call to Process() will return the detection status for - // that 10 ms slot. - // If set to N (where N > 1), the detection status returned from Process() - // will remain the same until Process() has been called N times. Then, if none - // of the last N calls to Process() has detected typing for each respective - // 10 ms slot, Process() will return false. If at least one of the last N - // calls has detected typing, Process() will return true. And that returned - // status will then remain the same until the next N calls have been done. - int report_detection_update_period_; -}; - -} // namespace webrtc - -#endif // #ifndef MODULES_AUDIO_PROCESSING_TYPING_DETECTION_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/utility/cascaded_biquad_filter.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/utility/cascaded_biquad_filter.cc index 08b9464387..0d236ce0be 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/utility/cascaded_biquad_filter.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/utility/cascaded_biquad_filter.cc @@ -99,19 +99,28 @@ void CascadedBiQuadFilter::ApplyBiQuad(rtc::ArrayView x, rtc::ArrayView y, CascadedBiQuadFilter::BiQuad* biquad) { RTC_DCHECK_EQ(x.size(), y.size()); - const auto* c_b = biquad->coefficients.b; - const auto* c_a = biquad->coefficients.a; - auto* m_x = biquad->x; - auto* m_y = biquad->y; + const float c_a_0 = biquad->coefficients.a[0]; + const float c_a_1 = biquad->coefficients.a[1]; + const float c_b_0 = biquad->coefficients.b[0]; + const float c_b_1 = biquad->coefficients.b[1]; + const float c_b_2 = biquad->coefficients.b[2]; + float m_x_0 = biquad->x[0]; + float m_x_1 = biquad->x[1]; + float m_y_0 = biquad->y[0]; + float m_y_1 = biquad->y[1]; for (size_t k = 0; k < x.size(); ++k) { const float tmp = x[k]; - y[k] = c_b[0] * tmp + c_b[1] * m_x[0] + c_b[2] * m_x[1] - c_a[0] * m_y[0] - - c_a[1] * m_y[1]; - m_x[1] = m_x[0]; - m_x[0] = tmp; - m_y[1] = m_y[0]; - m_y[0] = y[k]; + y[k] = c_b_0 * tmp + c_b_1 * m_x_0 + c_b_2 * m_x_1 - c_a_0 * m_y_0 - + c_a_1 * m_y_1; + m_x_1 = m_x_0; + m_x_0 = tmp; + m_y_1 = m_y_0; + m_y_0 = y[k]; } + biquad->x[0] = m_x_0; + biquad->x[1] = m_x_1; + biquad->y[0] = m_y_0; + biquad->y[1] = m_y_1; } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.cc index 521a8a0997..3b1409cc0b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.cc @@ -21,8 +21,8 @@ namespace webrtc { // Only bit `kBandFirst` through bit `kBandLast` are processed and // `kBandFirst` - `kBandLast` must be < 32. -enum { kBandFirst = 12 }; -enum { kBandLast = 43 }; +constexpr int kBandFirst = 12; +constexpr int kBandLast = 43; static __inline uint32_t SetBit(uint32_t in, int pos) { uint32_t mask = (1 << pos); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/vad/vad_audio_proc.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/vad/vad_audio_proc.h index 4a71ce3800..cbdd707129 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/vad/vad_audio_proc.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/vad/vad_audio_proc.h @@ -35,7 +35,7 @@ class VadAudioProc { size_t length, AudioFeatures* audio_features); - static const size_t kDftSize = 512; + static constexpr size_t kDftSize = 512; private: void PitchAnalysis(double* pitch_gains, double* pitch_lags_hz, size_t length); @@ -51,28 +51,24 @@ class VadAudioProc { // For every 30 ms we compute 3 spectral peak there for 3 LPC analysis. // LPC is computed over 15 ms of windowed audio. For every 10 ms sub-frame // we need 5 ms of past signal to create the input of LPC analysis. - enum : size_t { - kNumPastSignalSamples = static_cast(kSampleRateHz / 200) - }; + static constexpr size_t kNumPastSignalSamples = + static_cast(kSampleRateHz / 200); // TODO(turajs): maybe defining this at a higher level (maybe enum) so that // all the code recognize it as "no-error." - enum : int { kNoError = 0 }; - - enum : size_t { kNum10msSubframes = 3 }; - enum : size_t { - kNumSubframeSamples = static_cast(kSampleRateHz / 100) - }; - enum : size_t { - // Samples in 30 ms @ given sampling rate. - kNumSamplesToProcess = kNum10msSubframes * kNumSubframeSamples - }; - enum : size_t { - kBufferLength = kNumPastSignalSamples + kNumSamplesToProcess - }; - enum : size_t { kIpLength = kDftSize >> 1 }; - enum : size_t { kWLength = kDftSize >> 1 }; - enum : size_t { kLpcOrder = 16 }; + static constexpr int kNoError = 0; + + static constexpr size_t kNum10msSubframes = 3; + static constexpr size_t kNumSubframeSamples = + static_cast(kSampleRateHz / 100); + // Samples in 30 ms @ given sampling rate. + static constexpr size_t kNumSamplesToProcess = + size_t{kNum10msSubframes} * kNumSubframeSamples; + static constexpr size_t kBufferLength = + size_t{kNumPastSignalSamples} + kNumSamplesToProcess; + static constexpr size_t kIpLength = kDftSize >> 1; + static constexpr size_t kWLength = kDftSize >> 1; + static constexpr size_t kLpcOrder = 16; size_t ip_[kIpLength]; float w_fft_[kWLength]; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/vad/vad_audio_proc_internal.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/vad/vad_audio_proc_internal.h index 915524f474..93589affe8 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/vad/vad_audio_proc_internal.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/vad/vad_audio_proc_internal.h @@ -11,6 +11,8 @@ #ifndef MODULES_AUDIO_PROCESSING_VAD_VAD_AUDIO_PROC_INTERNAL_H_ #define MODULES_AUDIO_PROCESSING_VAD_VAD_AUDIO_PROC_INTERNAL_H_ +#include + namespace webrtc { // These values should match MATLAB counterparts for unit-tests to pass. diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/vad/voice_activity_detector.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/vad/voice_activity_detector.cc index ce4d46b9ae..02023d6a72 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/vad/voice_activity_detector.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/vad/voice_activity_detector.cc @@ -38,6 +38,7 @@ void VoiceActivityDetector::ProcessChunk(const int16_t* audio, size_t length, int sample_rate_hz) { RTC_DCHECK_EQ(length, sample_rate_hz / 100); + // TODO(bugs.webrtc.org/7494): Remove resampling and force 16 kHz audio. // Resample to the required rate. const int16_t* resampled_ptr = audio; if (sample_rate_hz != kSampleRateHz) { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/vad/voice_activity_detector.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/vad/voice_activity_detector.h index a19883d51c..92b9a8c208 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/vad/voice_activity_detector.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/vad/voice_activity_detector.h @@ -33,6 +33,8 @@ class VoiceActivityDetector { ~VoiceActivityDetector(); // Processes each audio chunk and estimates the voice probability. + // TODO(bugs.webrtc.org/7494): Switch to rtc::ArrayView and remove + // `sample_rate_hz`. void ProcessChunk(const int16_t* audio, size_t length, int sample_rate_hz); // Returns a vector of voice probabilities for each chunk. It can be empty for diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/voice_detection.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/voice_detection.cc deleted file mode 100644 index 1a633e2286..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/voice_detection.cc +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_processing/voice_detection.h" - -#include "common_audio/vad/include/webrtc_vad.h" -#include "modules/audio_processing/audio_buffer.h" -#include "rtc_base/checks.h" - -namespace webrtc { -class VoiceDetection::Vad { - public: - Vad() { - state_ = WebRtcVad_Create(); - RTC_CHECK(state_); - int error = WebRtcVad_Init(state_); - RTC_DCHECK_EQ(0, error); - } - ~Vad() { WebRtcVad_Free(state_); } - - Vad(Vad&) = delete; - Vad& operator=(Vad&) = delete; - - VadInst* state() { return state_; } - - private: - VadInst* state_ = nullptr; -}; - -VoiceDetection::VoiceDetection(int sample_rate_hz, Likelihood likelihood) - : sample_rate_hz_(sample_rate_hz), - frame_size_samples_(static_cast(sample_rate_hz_ / 100)), - likelihood_(likelihood), - vad_(new Vad()) { - int mode = 2; - switch (likelihood) { - case VoiceDetection::kVeryLowLikelihood: - mode = 3; - break; - case VoiceDetection::kLowLikelihood: - mode = 2; - break; - case VoiceDetection::kModerateLikelihood: - mode = 1; - break; - case VoiceDetection::kHighLikelihood: - mode = 0; - break; - default: - RTC_DCHECK_NOTREACHED(); - break; - } - int error = WebRtcVad_set_mode(vad_->state(), mode); - RTC_DCHECK_EQ(0, error); -} - -VoiceDetection::~VoiceDetection() {} - -bool VoiceDetection::ProcessCaptureAudio(AudioBuffer* audio) { - RTC_DCHECK_GE(AudioBuffer::kMaxSplitFrameLength, - audio->num_frames_per_band()); - std::array mixed_low_pass_data; - rtc::ArrayView mixed_low_pass(mixed_low_pass_data.data(), - audio->num_frames_per_band()); - if (audio->num_channels() == 1) { - FloatS16ToS16(audio->split_bands_const(0)[kBand0To8kHz], - audio->num_frames_per_band(), mixed_low_pass_data.data()); - } else { - const int num_channels = static_cast(audio->num_channels()); - for (size_t i = 0; i < audio->num_frames_per_band(); ++i) { - int32_t value = - FloatS16ToS16(audio->split_channels_const(kBand0To8kHz)[0][i]); - for (int j = 1; j < num_channels; ++j) { - value += FloatS16ToS16(audio->split_channels_const(kBand0To8kHz)[j][i]); - } - mixed_low_pass_data[i] = value / num_channels; - } - } - - int vad_ret = WebRtcVad_Process(vad_->state(), sample_rate_hz_, - mixed_low_pass.data(), frame_size_samples_); - RTC_DCHECK(vad_ret == 0 || vad_ret == 1); - return vad_ret == 0 ? false : true; -} -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/voice_detection.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/voice_detection.h deleted file mode 100644 index 79d44e647c..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/voice_detection.h +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_PROCESSING_VOICE_DETECTION_H_ -#define MODULES_AUDIO_PROCESSING_VOICE_DETECTION_H_ - -#include - -#include - -#include "modules/audio_processing/include/audio_processing.h" - -namespace webrtc { - -class AudioBuffer; - -// The voice activity detection (VAD) component analyzes the stream to -// determine if voice is present. -class VoiceDetection { - public: - // Specifies the likelihood that a frame will be declared to contain voice. - // A higher value makes it more likely that speech will not be clipped, at - // the expense of more noise being detected as voice. - enum Likelihood { - kVeryLowLikelihood, - kLowLikelihood, - kModerateLikelihood, - kHighLikelihood - }; - - VoiceDetection(int sample_rate_hz, Likelihood likelihood); - ~VoiceDetection(); - - VoiceDetection(VoiceDetection&) = delete; - VoiceDetection& operator=(VoiceDetection&) = delete; - - // Returns true if voice is detected in the current frame. - bool ProcessCaptureAudio(AudioBuffer* audio); - - Likelihood likelihood() const { return likelihood_; } - - private: - class Vad; - - int sample_rate_hz_; - size_t frame_size_samples_; - Likelihood likelihood_; - std::unique_ptr vad_; -}; -} // namespace webrtc - -#endif // MODULES_AUDIO_PROCESSING_VOICE_DETECTION_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/OWNERS b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/OWNERS index 3304c672cb..9a836bad06 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/OWNERS +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/OWNERS @@ -1,7 +1,7 @@ -srte@webrtc.org +danilchap@webrtc.org +linderborg@webrtc.org stefan@webrtc.org terelius@webrtc.org -crodbro@webrtc.org -philipel@webrtc.org mflodman@webrtc.org yinwa@webrtc.org +perkj@webrtc.org diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.cc b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.cc index f3c992f571..08b42a8168 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.cc @@ -22,7 +22,7 @@ namespace webrtc { AcknowledgedBitrateEstimator::AcknowledgedBitrateEstimator( - const WebRtcKeyValueConfig* key_value_config) + const FieldTrialsView* key_value_config) : AcknowledgedBitrateEstimator( key_value_config, std::make_unique(key_value_config)) {} @@ -30,7 +30,7 @@ AcknowledgedBitrateEstimator::AcknowledgedBitrateEstimator( AcknowledgedBitrateEstimator::~AcknowledgedBitrateEstimator() {} AcknowledgedBitrateEstimator::AcknowledgedBitrateEstimator( - const WebRtcKeyValueConfig* key_value_config, + const FieldTrialsView* key_value_config, std::unique_ptr bitrate_estimator) : in_alr_(false), bitrate_estimator_(std::move(bitrate_estimator)) {} diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.h index 97dd965fa4..d10846ab3a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.h @@ -15,8 +15,8 @@ #include #include "absl/types/optional.h" +#include "api/field_trials_view.h" #include "api/transport/network_types.h" -#include "api/transport/webrtc_key_value_config.h" #include "api/units/data_rate.h" #include "modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.h" #include "modules/congestion_controller/goog_cc/bitrate_estimator.h" @@ -27,11 +27,11 @@ class AcknowledgedBitrateEstimator : public AcknowledgedBitrateEstimatorInterface { public: AcknowledgedBitrateEstimator( - const WebRtcKeyValueConfig* key_value_config, + const FieldTrialsView* key_value_config, std::unique_ptr bitrate_estimator); explicit AcknowledgedBitrateEstimator( - const WebRtcKeyValueConfig* key_value_config); + const FieldTrialsView* key_value_config); ~AcknowledgedBitrateEstimator() override; void IncomingPacketFeedbackVector( diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.cc b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.cc index d5b1a13fcc..c043353a7a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.cc @@ -12,6 +12,7 @@ #include +#include "api/units/time_delta.h" #include "modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.h" #include "modules/congestion_controller/goog_cc/robust_throughput_estimator.h" #include "rtc_base/logging.h" @@ -21,25 +22,39 @@ namespace webrtc { constexpr char RobustThroughputEstimatorSettings::kKey[]; RobustThroughputEstimatorSettings::RobustThroughputEstimatorSettings( - const WebRtcKeyValueConfig* key_value_config) { + const FieldTrialsView* key_value_config) { Parser()->Parse( key_value_config->Lookup(RobustThroughputEstimatorSettings::kKey)); - if (min_packets < 10 || kMaxPackets < min_packets) { - RTC_LOG(LS_WARNING) << "Window size must be between 10 and " << kMaxPackets - << " packets"; - min_packets = 20; + if (window_packets < 10 || 1000 < window_packets) { + RTC_LOG(LS_WARNING) << "Window size must be between 10 and 1000 packets"; + window_packets = 20; } - if (initial_packets < 10 || kMaxPackets < initial_packets) { - RTC_LOG(LS_WARNING) << "Initial size must be between 10 and " << kMaxPackets - << " packets"; - initial_packets = 20; + if (max_window_packets < 10 || 1000 < max_window_packets) { + RTC_LOG(LS_WARNING) + << "Max window size must be between 10 and 1000 packets"; + max_window_packets = 500; + } + max_window_packets = std::max(max_window_packets, window_packets); + + if (required_packets < 10 || 1000 < required_packets) { + RTC_LOG(LS_WARNING) << "Required number of initial packets must be between " + "10 and 1000 packets"; + required_packets = 10; } - initial_packets = std::min(initial_packets, min_packets); - if (window_duration < TimeDelta::Millis(100) || - TimeDelta::Millis(2000) < window_duration) { - RTC_LOG(LS_WARNING) << "Window duration must be between 100 and 2000 ms"; - window_duration = TimeDelta::Millis(500); + required_packets = std::min(required_packets, window_packets); + + if (min_window_duration < TimeDelta::Millis(100) || + TimeDelta::Millis(3000) < min_window_duration) { + RTC_LOG(LS_WARNING) << "Window duration must be between 100 and 3000 ms"; + min_window_duration = TimeDelta::Millis(750); + } + if (max_window_duration < TimeDelta::Seconds(1) || + TimeDelta::Seconds(15) < max_window_duration) { + RTC_LOG(LS_WARNING) << "Max window duration must be between 1 and 15 s"; + max_window_duration = TimeDelta::Seconds(5); } + min_window_duration = std::min(min_window_duration, max_window_duration); + if (unacked_weight < 0.0 || 1.0 < unacked_weight) { RTC_LOG(LS_WARNING) << "Weight for prior unacked size must be between 0 and 1."; @@ -49,14 +64,14 @@ RobustThroughputEstimatorSettings::RobustThroughputEstimatorSettings( std::unique_ptr RobustThroughputEstimatorSettings::Parser() { - return StructParametersParser::Create("enabled", &enabled, // - "reduce_bias", &reduce_bias, // - "assume_shared_link", // - &assume_shared_link, // - "min_packets", &min_packets, // - "window_duration", &window_duration, // - "initial_packets", &initial_packets, // - "unacked_weight", &unacked_weight); + return StructParametersParser::Create( + "enabled", &enabled, // + "window_packets", &window_packets, // + "max_window_packets", &max_window_packets, // + "window_duration", &min_window_duration, // + "max_window_duration", &max_window_duration, // + "required_packets", &required_packets, // + "unacked_weight", &unacked_weight); } AcknowledgedBitrateEstimatorInterface:: @@ -64,7 +79,7 @@ AcknowledgedBitrateEstimatorInterface:: std::unique_ptr AcknowledgedBitrateEstimatorInterface::Create( - const WebRtcKeyValueConfig* key_value_config) { + const FieldTrialsView* key_value_config) { RobustThroughputEstimatorSettings simplified_estimator_settings( key_value_config); if (simplified_estimator_settings.enabled) { diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.h index b6cee43125..515af1efc9 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.h @@ -11,51 +11,56 @@ #ifndef MODULES_CONGESTION_CONTROLLER_GOOG_CC_ACKNOWLEDGED_BITRATE_ESTIMATOR_INTERFACE_H_ #define MODULES_CONGESTION_CONTROLLER_GOOG_CC_ACKNOWLEDGED_BITRATE_ESTIMATOR_INTERFACE_H_ +#include + #include #include #include "absl/types/optional.h" +#include "api/field_trials_view.h" #include "api/transport/network_types.h" -#include "api/transport/webrtc_key_value_config.h" #include "api/units/data_rate.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "rtc_base/experiments/struct_parameters_parser.h" namespace webrtc { struct RobustThroughputEstimatorSettings { static constexpr char kKey[] = "WebRTC-Bwe-RobustThroughputEstimatorSettings"; - static constexpr size_t kMaxPackets = 500; RobustThroughputEstimatorSettings() = delete; explicit RobustThroughputEstimatorSettings( - const WebRtcKeyValueConfig* key_value_config); + const FieldTrialsView* key_value_config); bool enabled = false; // Set to true to use RobustThroughputEstimator. - // The estimator handles delay spikes by removing the largest receive time - // gap, but this introduces some bias that may lead to overestimation when - // there isn't any delay spike. If `reduce_bias` is true, we instead replace - // the largest receive time gap by the second largest. This reduces the bias - // at the cost of not completely removing the genuine delay spikes. - bool reduce_bias = true; - - // If `assume_shared_link` is false, we ignore the size of the first packet - // when computing the receive rate. Otherwise, we remove half of the first - // and last packet's sizes. - bool assume_shared_link = false; - - // The estimator window keeps at least `min_packets` packets and up to - // kMaxPackets received during the last `window_duration`. - unsigned min_packets = 20; - TimeDelta window_duration = TimeDelta::Millis(500); - - // The estimator window requires at least `initial_packets` packets received - // over at least `initial_duration`. - unsigned initial_packets = 20; - + // The estimator keeps the smallest window containing at least + // `window_packets` and at least the packets received during the last + // `min_window_duration` milliseconds. + // (This means that it may store more than `window_packets` at high bitrates, + // and a longer duration than `min_window_duration` at low bitrates.) + // However, if will never store more than kMaxPackets (for performance + // reasons), and never longer than max_window_duration (to avoid very old + // packets influencing the estimate for example when sending is paused). + unsigned window_packets = 20; + unsigned max_window_packets = 500; + TimeDelta min_window_duration = TimeDelta::Seconds(1); + TimeDelta max_window_duration = TimeDelta::Seconds(5); + + // The estimator window requires at least `required_packets` packets + // to produce an estimate. + unsigned required_packets = 10; + + // If audio packets aren't included in allocation (i.e. the + // estimated available bandwidth is divided only among the video + // streams), then `unacked_weight` should be set to 0. // If audio packets are included in allocation, but not in bandwidth - // estimation and the sent audio packets get double counted, - // then it might be useful to reduce the weight to 0.5. + // estimation (i.e. they don't have transport-wide sequence numbers, + // but we nevertheless divide the estimated available bandwidth among + // both audio and video streams), then `unacked_weight` should be set to 1. + // If all packets have transport-wide sequence numbers, then the value + // of `unacked_weight` doesn't matter. double unacked_weight = 1.0; std::unique_ptr Parser(); @@ -64,7 +69,7 @@ struct RobustThroughputEstimatorSettings { class AcknowledgedBitrateEstimatorInterface { public: static std::unique_ptr Create( - const WebRtcKeyValueConfig* key_value_config); + const FieldTrialsView* key_value_config); virtual ~AcknowledgedBitrateEstimatorInterface(); virtual void IncomingPacketFeedbackVector( diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/alr_detector.cc b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/alr_detector.cc index 6a62954c36..f1e649b7cd 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/alr_detector.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/alr_detector.cc @@ -24,8 +24,7 @@ namespace webrtc { namespace { -AlrDetectorConfig GetConfigFromTrials( - const WebRtcKeyValueConfig* key_value_config) { +AlrDetectorConfig GetConfigFromTrials(const FieldTrialsView* key_value_config) { RTC_CHECK(AlrExperimentSettings::MaxOneFieldTrialEnabled(*key_value_config)); absl::optional experiment_settings = AlrExperimentSettings::CreateFromFieldTrial( @@ -61,10 +60,10 @@ std::unique_ptr AlrDetectorConfig::Parser() { AlrDetector::AlrDetector(AlrDetectorConfig config, RtcEventLog* event_log) : conf_(config), alr_budget_(0, true), event_log_(event_log) {} -AlrDetector::AlrDetector(const WebRtcKeyValueConfig* key_value_config) +AlrDetector::AlrDetector(const FieldTrialsView* key_value_config) : AlrDetector(GetConfigFromTrials(key_value_config), nullptr) {} -AlrDetector::AlrDetector(const WebRtcKeyValueConfig* key_value_config, +AlrDetector::AlrDetector(const FieldTrialsView* key_value_config, RtcEventLog* event_log) : AlrDetector(GetConfigFromTrials(key_value_config), event_log) {} AlrDetector::~AlrDetector() {} diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/alr_detector.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/alr_detector.h index ee3fe92845..5e7a3e1075 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/alr_detector.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/alr_detector.h @@ -13,10 +13,11 @@ #include #include + #include #include "absl/types/optional.h" -#include "api/transport/webrtc_key_value_config.h" +#include "api/field_trials_view.h" #include "modules/pacing/interval_budget.h" #include "rtc_base/experiments/alr_experiment.h" #include "rtc_base/experiments/struct_parameters_parser.h" @@ -46,9 +47,8 @@ struct AlrDetectorConfig { class AlrDetector { public: AlrDetector(AlrDetectorConfig config, RtcEventLog* event_log); - explicit AlrDetector(const WebRtcKeyValueConfig* key_value_config); - AlrDetector(const WebRtcKeyValueConfig* key_value_config, - RtcEventLog* event_log); + explicit AlrDetector(const FieldTrialsView* key_value_config); + AlrDetector(const FieldTrialsView* key_value_config, RtcEventLog* event_log); ~AlrDetector(); void OnBytesSent(size_t bytes_sent, int64_t send_time_ms); diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/bitrate_estimator.cc b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/bitrate_estimator.cc index 09b214a798..9c68e48886 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/bitrate_estimator.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/bitrate_estimator.cc @@ -32,7 +32,7 @@ const char kBweThroughputWindowConfig[] = "WebRTC-BweThroughputWindowConfig"; } // namespace -BitrateEstimator::BitrateEstimator(const WebRtcKeyValueConfig* key_value_config) +BitrateEstimator::BitrateEstimator(const FieldTrialsView* key_value_config) : sum_(0), initial_window_ms_("initial_window_ms", kInitialRateWindowMs, diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/bitrate_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/bitrate_estimator.h index 34114f017c..a6f985800e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/bitrate_estimator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/bitrate_estimator.h @@ -14,7 +14,7 @@ #include #include "absl/types/optional.h" -#include "api/transport/webrtc_key_value_config.h" +#include "api/field_trials_view.h" #include "api/units/data_rate.h" #include "api/units/timestamp.h" #include "rtc_base/experiments/field_trial_parser.h" @@ -28,7 +28,7 @@ namespace webrtc { // unrelated to congestion. class BitrateEstimator { public: - explicit BitrateEstimator(const WebRtcKeyValueConfig* key_value_config); + explicit BitrateEstimator(const FieldTrialsView* key_value_config); virtual ~BitrateEstimator(); virtual void Update(Timestamp at_time, DataSize amount, bool in_alr); diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/congestion_window_pushback_controller.cc b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/congestion_window_pushback_controller.cc index ec642823df..2f188f30ca 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/congestion_window_pushback_controller.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/congestion_window_pushback_controller.cc @@ -23,7 +23,7 @@ namespace webrtc { CongestionWindowPushbackController::CongestionWindowPushbackController( - const WebRtcKeyValueConfig* key_value_config) + const FieldTrialsView* key_value_config) : add_pacing_( absl::StartsWith(key_value_config->Lookup( "WebRTC-AddPacingToCongestionWindowPushback"), diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/congestion_window_pushback_controller.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/congestion_window_pushback_controller.h index 7a49a83d5b..ea9ed97c3d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/congestion_window_pushback_controller.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/congestion_window_pushback_controller.h @@ -15,7 +15,7 @@ #include #include "absl/types/optional.h" -#include "api/transport/webrtc_key_value_config.h" +#include "api/field_trials_view.h" #include "api/units/data_size.h" namespace webrtc { @@ -28,7 +28,7 @@ namespace webrtc { class CongestionWindowPushbackController { public: explicit CongestionWindowPushbackController( - const WebRtcKeyValueConfig* key_value_config); + const FieldTrialsView* key_value_config); void UpdateOutstandingData(int64_t outstanding_bytes); void UpdatePacingQueue(int64_t pacing_bytes); uint32_t UpdateTargetBitrate(uint32_t bitrate_bps); diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/delay_based_bwe.cc b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/delay_based_bwe.cc index 4c5bdb67a0..07ac599148 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/delay_based_bwe.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/delay_based_bwe.cc @@ -32,21 +32,8 @@ namespace webrtc { namespace { constexpr TimeDelta kStreamTimeOut = TimeDelta::Seconds(2); - -// Used with field trial "WebRTC-Bwe-NewInterArrivalDelta/Enabled/ constexpr TimeDelta kSendTimeGroupLength = TimeDelta::Millis(5); -// Used unless field trial "WebRTC-Bwe-NewInterArrivalDelta/Enabled/" -constexpr int kTimestampGroupLengthMs = 5; -constexpr int kAbsSendTimeFraction = 18; -constexpr int kAbsSendTimeInterArrivalUpshift = 8; -constexpr int kInterArrivalShift = - kAbsSendTimeFraction + kAbsSendTimeInterArrivalUpshift; -constexpr int kTimestampGroupTicks = - (kTimestampGroupLengthMs << kInterArrivalShift) / 1000; -constexpr double kTimestampToMs = - 1000.0 / static_cast(1 << kInterArrivalShift); - // This ssrc is used to fulfill the current API but will be removed // after the API has been changed. constexpr uint32_t kFixedSsrc = 0; @@ -55,7 +42,7 @@ constexpr uint32_t kFixedSsrc = 0; constexpr char BweSeparateAudioPacketsSettings::kKey[]; BweSeparateAudioPacketsSettings::BweSeparateAudioPacketsSettings( - const WebRtcKeyValueConfig* key_value_config) { + const FieldTrialsView* key_value_config) { Parser()->Parse( key_value_config->Lookup(BweSeparateAudioPacketsSettings::kKey)); } @@ -72,10 +59,9 @@ DelayBasedBwe::Result::Result() : updated(false), probe(false), target_bitrate(DataRate::Zero()), - recovered_from_overuse(false), - backoff_in_alr(false) {} + recovered_from_overuse(false) {} -DelayBasedBwe::DelayBasedBwe(const WebRtcKeyValueConfig* key_value_config, +DelayBasedBwe::DelayBasedBwe(const FieldTrialsView* key_value_config, RtcEventLog* event_log, NetworkStatePredictor* network_state_predictor) : event_log_(event_log), @@ -93,18 +79,10 @@ DelayBasedBwe::DelayBasedBwe(const WebRtcKeyValueConfig* key_value_config, uma_recorded_(false), rate_control_(key_value_config, /*send_side=*/true), prev_bitrate_(DataRate::Zero()), - has_once_detected_overuse_(false), - prev_state_(BandwidthUsage::kBwNormal), - use_new_inter_arrival_delta_(!absl::StartsWith( - key_value_config->Lookup("WebRTC-Bwe-NewInterArrivalDelta"), - "Disabled")), - alr_limited_backoff_enabled_(absl::StartsWith( - key_value_config->Lookup("WebRTC-Bwe-AlrLimitedBackoff"), - "Enabled")) { + prev_state_(BandwidthUsage::kBwNormal) { RTC_LOG(LS_INFO) << "Initialized DelayBasedBwe with separate audio overuse detection" - << separate_audio_.Parser()->Encode() << " and alr limited backoff " - << (alr_limited_backoff_enabled_ ? "enabled" : "disabled"); + << separate_audio_.Parser()->Encode(); } DelayBasedBwe::~DelayBasedBwe() {} @@ -162,17 +140,11 @@ void DelayBasedBwe::IncomingPacketFeedback(const PacketResult& packet_feedback, // Reset if the stream has timed out. if (last_seen_packet_.IsInfinite() || at_time - last_seen_packet_ > kStreamTimeOut) { - if (use_new_inter_arrival_delta_) { - video_inter_arrival_delta_ = - std::make_unique(kSendTimeGroupLength); - audio_inter_arrival_delta_ = - std::make_unique(kSendTimeGroupLength); - } else { - video_inter_arrival_ = std::make_unique( - kTimestampGroupTicks, kTimestampToMs, true); - audio_inter_arrival_ = std::make_unique( - kTimestampGroupTicks, kTimestampToMs, true); - } + video_inter_arrival_delta_ = + std::make_unique(kSendTimeGroupLength); + audio_inter_arrival_delta_ = + std::make_unique(kSendTimeGroupLength); + video_delay_detector_.reset( new TrendlineEstimator(key_value_config_, network_state_predictor_)); audio_delay_detector_.reset( @@ -203,57 +175,23 @@ void DelayBasedBwe::IncomingPacketFeedback(const PacketResult& packet_feedback, } DataSize packet_size = packet_feedback.sent_packet.size; - if (use_new_inter_arrival_delta_) { - TimeDelta send_delta = TimeDelta::Zero(); - TimeDelta recv_delta = TimeDelta::Zero(); - int size_delta = 0; - - InterArrivalDelta* inter_arrival_for_packet = - (separate_audio_.enabled && packet_feedback.sent_packet.audio) - ? video_inter_arrival_delta_.get() - : audio_inter_arrival_delta_.get(); - bool calculated_deltas = inter_arrival_for_packet->ComputeDeltas( - packet_feedback.sent_packet.send_time, packet_feedback.receive_time, - at_time, packet_size.bytes(), &send_delta, &recv_delta, &size_delta); - - delay_detector_for_packet->Update( - recv_delta.ms(), send_delta.ms(), - packet_feedback.sent_packet.send_time.ms(), - packet_feedback.receive_time.ms(), packet_size.bytes(), - calculated_deltas); - } else { - InterArrival* inter_arrival_for_packet = - (separate_audio_.enabled && packet_feedback.sent_packet.audio) - ? video_inter_arrival_.get() - : audio_inter_arrival_.get(); - - uint32_t send_time_24bits = - static_cast( - ((static_cast(packet_feedback.sent_packet.send_time.ms()) - << kAbsSendTimeFraction) + - 500) / - 1000) & - 0x00FFFFFF; - // Shift up send time to use the full 32 bits that inter_arrival works with, - // so wrapping works properly. - uint32_t timestamp = send_time_24bits << kAbsSendTimeInterArrivalUpshift; - - uint32_t timestamp_delta = 0; - int64_t recv_delta_ms = 0; - int size_delta = 0; - - bool calculated_deltas = inter_arrival_for_packet->ComputeDeltas( - timestamp, packet_feedback.receive_time.ms(), at_time.ms(), - packet_size.bytes(), ×tamp_delta, &recv_delta_ms, &size_delta); - double send_delta_ms = - (1000.0 * timestamp_delta) / (1 << kInterArrivalShift); - - delay_detector_for_packet->Update( - recv_delta_ms, send_delta_ms, - packet_feedback.sent_packet.send_time.ms(), - packet_feedback.receive_time.ms(), packet_size.bytes(), - calculated_deltas); - } + TimeDelta send_delta = TimeDelta::Zero(); + TimeDelta recv_delta = TimeDelta::Zero(); + int size_delta = 0; + + InterArrivalDelta* inter_arrival_for_packet = + (separate_audio_.enabled && packet_feedback.sent_packet.audio) + ? audio_inter_arrival_delta_.get() + : video_inter_arrival_delta_.get(); + bool calculated_deltas = inter_arrival_for_packet->ComputeDeltas( + packet_feedback.sent_packet.send_time, packet_feedback.receive_time, + at_time, packet_size.bytes(), &send_delta, &recv_delta, &size_delta); + + delay_detector_for_packet->Update(recv_delta.ms(), + send_delta.ms(), + packet_feedback.sent_packet.send_time.ms(), + packet_feedback.receive_time.ms(), + packet_size.bytes(), calculated_deltas); } DataRate DelayBasedBwe::TriggerOveruse(Timestamp at_time, @@ -273,14 +211,8 @@ DelayBasedBwe::Result DelayBasedBwe::MaybeUpdateEstimate( // Currently overusing the bandwidth. if (active_delay_detector_->State() == BandwidthUsage::kBwOverusing) { - if (has_once_detected_overuse_ && in_alr && alr_limited_backoff_enabled_) { - if (rate_control_.TimeToReduceFurther(at_time, prev_bitrate_)) { - result.updated = - UpdateEstimate(at_time, prev_bitrate_, &result.target_bitrate); - result.backoff_in_alr = true; - } - } else if (acked_bitrate && - rate_control_.TimeToReduceFurther(at_time, *acked_bitrate)) { + if (acked_bitrate && + rate_control_.TimeToReduceFurther(at_time, *acked_bitrate)) { result.updated = UpdateEstimate(at_time, acked_bitrate, &result.target_bitrate); } else if (!acked_bitrate && rate_control_.ValidEstimate() && @@ -294,13 +226,12 @@ DelayBasedBwe::Result DelayBasedBwe::MaybeUpdateEstimate( result.probe = false; result.target_bitrate = rate_control_.LatestEstimate(); } - has_once_detected_overuse_ = true; } else { if (probe_bitrate) { result.probe = true; result.updated = true; - result.target_bitrate = *probe_bitrate; rate_control_.SetEstimate(*probe_bitrate, at_time); + result.target_bitrate = rate_control_.LatestEstimate(); } else { result.updated = UpdateEstimate(at_time, acked_bitrate, &result.target_bitrate); @@ -322,6 +253,8 @@ DelayBasedBwe::Result DelayBasedBwe::MaybeUpdateEstimate( prev_bitrate_ = bitrate; prev_state_ = detector_state; } + + result.delay_detector_state = detector_state; return result; } @@ -369,8 +302,4 @@ TimeDelta DelayBasedBwe::GetExpectedBwePeriod() const { return rate_control_.GetExpectedBandwidthPeriod(); } -void DelayBasedBwe::SetAlrLimitedBackoffExperiment(bool enabled) { - alr_limited_backoff_enabled_ = enabled; -} - } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/delay_based_bwe.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/delay_based_bwe.h index 85ce6eaa82..21dff35735 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/delay_based_bwe.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/delay_based_bwe.h @@ -18,9 +18,9 @@ #include #include "absl/types/optional.h" +#include "api/field_trials_view.h" #include "api/network_state_predictor.h" #include "api/transport/network_types.h" -#include "api/transport/webrtc_key_value_config.h" #include "modules/congestion_controller/goog_cc/delay_increase_detector_interface.h" #include "modules/congestion_controller/goog_cc/inter_arrival_delta.h" #include "modules/congestion_controller/goog_cc/probe_bitrate_estimator.h" @@ -37,7 +37,7 @@ struct BweSeparateAudioPacketsSettings { BweSeparateAudioPacketsSettings() = default; explicit BweSeparateAudioPacketsSettings( - const WebRtcKeyValueConfig* key_value_config); + const FieldTrialsView* key_value_config); bool enabled = false; int packet_threshold = 10; @@ -55,10 +55,10 @@ class DelayBasedBwe { bool probe; DataRate target_bitrate = DataRate::Zero(); bool recovered_from_overuse; - bool backoff_in_alr; + BandwidthUsage delay_detector_state; }; - explicit DelayBasedBwe(const WebRtcKeyValueConfig* key_value_config, + explicit DelayBasedBwe(const FieldTrialsView* key_value_config, RtcEventLog* event_log, NetworkStatePredictor* network_state_predictor); @@ -79,7 +79,6 @@ class DelayBasedBwe { void SetStartBitrate(DataRate start_bitrate); void SetMinBitrate(DataRate min_bitrate); TimeDelta GetExpectedBwePeriod() const; - void SetAlrLimitedBackoffExperiment(bool enabled); DataRate TriggerOveruse(Timestamp at_time, absl::optional link_capacity); DataRate last_estimate() const { return prev_bitrate_; } @@ -103,7 +102,7 @@ class DelayBasedBwe { rtc::RaceChecker network_race_; RtcEventLog* const event_log_; - const WebRtcKeyValueConfig* const key_value_config_; + const FieldTrialsView* const key_value_config_; // Alternatively, run two separate overuse detectors for audio and video, // and fall back to the audio one if we haven't seen a video packet in a @@ -125,10 +124,7 @@ class DelayBasedBwe { bool uma_recorded_; AimdRateControl rate_control_; DataRate prev_bitrate_; - bool has_once_detected_overuse_; BandwidthUsage prev_state_; - const bool use_new_inter_arrival_delta_; - bool alr_limited_backoff_enabled_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/delay_increase_detector_interface.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/delay_increase_detector_interface.h index eaadb0d124..fc12cff7d5 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/delay_increase_detector_interface.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/delay_increase_detector_interface.h @@ -13,7 +13,6 @@ #include #include "api/network_state_predictor.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -22,6 +21,11 @@ class DelayIncreaseDetectorInterface { DelayIncreaseDetectorInterface() {} virtual ~DelayIncreaseDetectorInterface() {} + DelayIncreaseDetectorInterface(const DelayIncreaseDetectorInterface&) = + delete; + DelayIncreaseDetectorInterface& operator=( + const DelayIncreaseDetectorInterface&) = delete; + // Update the detector with a new sample. The deltas should represent deltas // between timestamp groups as defined by the InterArrival class. virtual void Update(double recv_delta_ms, @@ -32,8 +36,6 @@ class DelayIncreaseDetectorInterface { bool calculated_deltas) = 0; virtual BandwidthUsage State() const = 0; - - RTC_DISALLOW_COPY_AND_ASSIGN(DelayIncreaseDetectorInterface); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/goog_cc_network_control.cc b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/goog_cc_network_control.cc index 2344f45a65..fcb77faacd 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/goog_cc_network_control.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/goog_cc_network_control.cc @@ -22,7 +22,9 @@ #include #include "absl/strings/match.h" +#include "api/units/data_rate.h" #include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "logging/rtc_event_log/events/rtc_event_remote_estimate.h" #include "modules/congestion_controller/goog_cc/alr_detector.h" #include "modules/congestion_controller/goog_cc/probe_controller.h" @@ -50,20 +52,11 @@ constexpr float kDefaultPaceMultiplier = 2.5f; // below the current throughput estimate to drain the network queues. constexpr double kProbeDropThroughputFraction = 0.85; -int64_t GetBpsOrDefault(const absl::optional& rate, - int64_t fallback_bps) { - if (rate && rate->IsFinite()) { - return rate->bps(); - } else { - return fallback_bps; - } -} - -bool IsEnabled(const WebRtcKeyValueConfig* config, absl::string_view key) { +bool IsEnabled(const FieldTrialsView* config, absl::string_view key) { return absl::StartsWith(config->Lookup(key), "Enabled"); } -bool IsNotDisabled(const WebRtcKeyValueConfig* config, absl::string_view key) { +bool IsNotDisabled(const FieldTrialsView* config, absl::string_view key) { return !absl::StartsWith(config->Lookup(key), "Disabled"); } } // namespace @@ -88,6 +81,11 @@ GoogCcNetworkController::GoogCcNetworkController(NetworkControllerConfig config, RateControlSettings::ParseFromKeyValueConfig(key_value_config_)), loss_based_stable_rate_( IsEnabled(key_value_config_, "WebRTC-Bwe-LossBasedStableRate")), + pace_at_max_of_bwe_and_lower_link_capacity_( + IsEnabled(key_value_config_, + "WebRTC-Bwe-PaceAtMaxOfBweAndLowerLinkCapacity")), + pace_at_loss_based_bwe_when_loss_( + IsEnabled(key_value_config_, "WebRTC-Bwe-PaceAtLossBaseBweWhenLoss")), probe_controller_( new ProbeController(key_value_config_, config.event_log)), congestion_window_pushback_controller_( @@ -126,7 +124,7 @@ GoogCcNetworkController::GoogCcNetworkController(NetworkControllerConfig config, {&safe_reset_on_route_change_, &safe_reset_acknowledged_rate_}, key_value_config_->Lookup("WebRTC-Bwe-SafeResetOnRouteChange")); if (delay_based_bwe_) - delay_based_bwe_->SetMinBitrate(congestion_controller::GetMinBitrate()); + delay_based_bwe_->SetMinBitrate(kCongestionControllerMinBitrate); } GoogCcNetworkController::~GoogCcNetworkController() {} @@ -167,7 +165,7 @@ NetworkControlUpdate GoogCcNetworkController::OnNetworkRouteChange( delay_based_bwe_.reset(new DelayBasedBwe(key_value_config_, event_log_, network_state_predictor_.get())); bandwidth_estimation_->OnRouteChange(); - probe_controller_->Reset(msg.at_time.ms()); + probe_controller_->Reset(msg.at_time); NetworkControlUpdate update; update.probe_cluster_configs = ResetConstraints(msg.constraints); MaybeTriggerOnNetworkChanged(&update, msg.at_time); @@ -190,7 +188,7 @@ NetworkControlUpdate GoogCcNetworkController::OnProcessInterval( initial_config_->stream_based_config.max_total_allocated_bitrate; if (total_bitrate) { auto probes = probe_controller_->OnMaxTotalAllocatedBitrate( - total_bitrate->bps(), msg.at_time.ms()); + *total_bitrate, msg.at_time); update.probe_cluster_configs.insert(update.probe_cluster_configs.end(), probes.begin(), probes.end()); @@ -207,7 +205,7 @@ NetworkControlUpdate GoogCcNetworkController::OnProcessInterval( alr_detector_->GetApplicationLimitedRegionStartTime(); probe_controller_->SetAlrStartTimeMs(start_time_ms); - auto probes = probe_controller_->Process(msg.at_time.ms()); + auto probes = probe_controller_->Process(msg.at_time); update.probe_cluster_configs.insert(update.probe_cluster_configs.end(), probes.begin(), probes.end()); @@ -293,9 +291,9 @@ NetworkControlUpdate GoogCcNetworkController::OnStreamsConfig( if (rate_control_settings_.TriggerProbeOnMaxAllocatedBitrateChange()) { update.probe_cluster_configs = probe_controller_->OnMaxTotalAllocatedBitrate( - msg.max_total_allocated_bitrate->bps(), msg.at_time.ms()); + *msg.max_total_allocated_bitrate, msg.at_time); } else { - probe_controller_->SetMaxBitrate(msg.max_total_allocated_bitrate->bps()); + probe_controller_->SetMaxBitrate(*msg.max_total_allocated_bitrate); } max_total_allocated_bitrate_ = *msg.max_total_allocated_bitrate; } @@ -337,8 +335,7 @@ void GoogCcNetworkController::ClampConstraints() { // TODO(holmer): We should make sure the default bitrates are set to 10 kbps, // and that we don't try to set the min bitrate to 0 from any applications. // The congestion controller should allow a min bitrate of 0. - min_data_rate_ = - std::max(min_target_rate_, congestion_controller::GetMinBitrate()); + min_data_rate_ = std::max(min_target_rate_, kCongestionControllerMinBitrate); if (use_min_allocatable_as_lower_bound_) { min_data_rate_ = std::max(min_data_rate_, min_total_allocated_bitrate_); } @@ -368,8 +365,8 @@ std::vector GoogCcNetworkController::ResetConstraints( delay_based_bwe_->SetMinBitrate(min_data_rate_); return probe_controller_->SetBitrates( - min_data_rate_.bps(), GetBpsOrDefault(starting_rate_, -1), - max_data_rate_.bps_or(-1), new_constraints.at_time.ms()); + min_data_rate_, starting_rate_.value_or(DataRate::Zero()), max_data_rate_, + new_constraints.at_time); } NetworkControlUpdate GoogCcNetworkController::OnTransportLossReport( @@ -426,7 +423,7 @@ NetworkControlUpdate GoogCcNetworkController::OnTransportPacketsFeedback( for (const auto& feedback : feedbacks) { TimeDelta feedback_rtt = report.feedback_time - feedback.sent_packet.send_time; - TimeDelta min_pending_time = feedback.receive_time - max_recv_time; + TimeDelta min_pending_time = max_recv_time - feedback.receive_time; TimeDelta propagation_rtt = feedback_rtt - min_pending_time; max_feedback_rtt = std::max(max_feedback_rtt, feedback_rtt); min_propagation_rtt = std::min(min_propagation_rtt, propagation_rtt); @@ -443,8 +440,9 @@ NetworkControlUpdate GoogCcNetworkController::OnTransportPacketsFeedback( } if (packet_feedback_only_) { if (!feedback_max_rtts_.empty()) { - int64_t sum_rtt_ms = std::accumulate(feedback_max_rtts_.begin(), - feedback_max_rtts_.end(), 0); + int64_t sum_rtt_ms = + std::accumulate(feedback_max_rtts_.begin(), feedback_max_rtts_.end(), + static_cast(0)); int64_t mean_rtt_ms = sum_rtt_ms / feedback_max_rtts_.size(); if (delay_based_bwe_) delay_based_bwe_->OnRttUpdate(TimeDelta::Millis(mean_rtt_ms)); @@ -491,7 +489,6 @@ NetworkControlUpdate GoogCcNetworkController::OnTransportPacketsFeedback( auto acknowledged_bitrate = acknowledged_bitrate_estimator_->bitrate(); bandwidth_estimation_->SetAcknowledgedRate(acknowledged_bitrate, report.feedback_time); - bandwidth_estimation_->IncomingPacketFeedbackVector(report); for (const auto& feedback : report.SortedByReceiveTime()) { if (feedback.sent_packet.pacing_info.probe_cluster_id != PacedPacketInfo::kNotAProbe) { @@ -509,6 +506,7 @@ NetworkControlUpdate GoogCcNetworkController::OnTransportPacketsFeedback( prev_estimate->last_feed_time)) { event_log_->Log(std::make_unique( estimate_->link_capacity_lower, estimate_->link_capacity_upper)); + probe_controller_->SetNetworkStateEstimate(*estimate_); } } absl::optional probe_bitrate = @@ -535,7 +533,6 @@ NetworkControlUpdate GoogCcNetworkController::OnTransportPacketsFeedback( NetworkControlUpdate update; bool recovered_from_overuse = false; - bool backoff_in_alr = false; DelayBasedBwe::Result result; result = delay_based_bwe_->IncomingPacketFeedbackVector( @@ -551,20 +548,19 @@ NetworkControlUpdate GoogCcNetworkController::OnTransportPacketsFeedback( // call UpdateDelayBasedEstimate after SetSendBitrate. bandwidth_estimation_->UpdateDelayBasedEstimate(report.feedback_time, result.target_bitrate); + } + bandwidth_estimation_->UpdateLossBasedEstimator(report, + result.delay_detector_state); + if (result.updated) { // Update the estimate in the ProbeController, in case we want to probe. MaybeTriggerOnNetworkChanged(&update, report.feedback_time); } + recovered_from_overuse = result.recovered_from_overuse; - backoff_in_alr = result.backoff_in_alr; if (recovered_from_overuse) { probe_controller_->SetAlrStartTimeMs(alr_start_time); - auto probes = probe_controller_->RequestProbe(report.feedback_time.ms()); - update.probe_cluster_configs.insert(update.probe_cluster_configs.end(), - probes.begin(), probes.end()); - } else if (backoff_in_alr) { - // If we just backed off during ALR, request a new probe. - auto probes = probe_controller_->RequestProbe(report.feedback_time.ms()); + auto probes = probe_controller_->RequestProbe(report.feedback_time); update.probe_cluster_configs.insert(update.probe_cluster_configs.end(), probes.begin(), probes.end()); } @@ -618,6 +614,10 @@ void GoogCcNetworkController::MaybeTriggerOnNetworkChanged( uint8_t fraction_loss = bandwidth_estimation_->fraction_loss(); TimeDelta round_trip_time = bandwidth_estimation_->round_trip_time(); DataRate loss_based_target_rate = bandwidth_estimation_->target_rate(); + bool bwe_limited_due_to_packet_loss = + loss_based_target_rate.IsFinite() && + bandwidth_estimation_->delay_based_limit().IsFinite() && + loss_based_target_rate < bandwidth_estimation_->delay_based_limit(); DataRate pushback_target_rate = loss_based_target_rate; BWE_TEST_LOGGING_PLOT(1, "fraction_loss_%", at_time.ms(), @@ -680,11 +680,10 @@ void GoogCcNetworkController::MaybeTriggerOnNetworkChanged( update->target_rate = target_rate_msg; auto probes = probe_controller_->SetEstimatedBitrate( - loss_based_target_rate.bps(), at_time.ms()); + loss_based_target_rate, bwe_limited_due_to_packet_loss, at_time); update->probe_cluster_configs.insert(update->probe_cluster_configs.end(), probes.begin(), probes.end()); update->pacer_config = GetPacingRates(at_time); - RTC_LOG(LS_VERBOSE) << "bwe " << at_time.ms() << " pushback_target_bps=" << last_pushback_target_rate_.bps() << " estimate_bps=" << loss_based_target_rate.bps(); @@ -694,9 +693,20 @@ void GoogCcNetworkController::MaybeTriggerOnNetworkChanged( PacerConfig GoogCcNetworkController::GetPacingRates(Timestamp at_time) const { // Pacing rate is based on target rate before congestion window pushback, // because we don't want to build queues in the pacer when pushback occurs. - DataRate pacing_rate = - std::max(min_total_allocated_bitrate_, last_loss_based_target_rate_) * - pacing_factor_; + DataRate pacing_rate = DataRate::Zero(); + if ((pace_at_max_of_bwe_and_lower_link_capacity_ || + (pace_at_loss_based_bwe_when_loss_ && + last_loss_based_target_rate_ >= delay_based_bwe_->last_estimate())) && + estimate_) { + pacing_rate = + std::max({min_total_allocated_bitrate_, estimate_->link_capacity_lower, + last_loss_based_target_rate_}) * + pacing_factor_; + } else { + pacing_rate = + std::max(min_total_allocated_bitrate_, last_loss_based_target_rate_) * + pacing_factor_; + } DataRate padding_rate = std::min(max_padding_rate_, last_pushback_target_rate_); PacerConfig msg; diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/goog_cc_network_control.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/goog_cc_network_control.h index 6dd70c8969..884b572740 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/goog_cc_network_control.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/goog_cc_network_control.h @@ -18,12 +18,12 @@ #include #include "absl/types/optional.h" +#include "api/field_trials_view.h" #include "api/network_state_predictor.h" #include "api/rtc_event_log/rtc_event_log.h" #include "api/transport/field_trial_based_config.h" #include "api/transport/network_control.h" #include "api/transport/network_types.h" -#include "api/transport/webrtc_key_value_config.h" #include "api/units/data_rate.h" #include "api/units/data_size.h" #include "api/units/timestamp.h" @@ -84,7 +84,7 @@ class GoogCcNetworkController : public NetworkControllerInterface { PacerConfig GetPacingRates(Timestamp at_time) const; const FieldTrialBasedConfig trial_based_config_; - const WebRtcKeyValueConfig* const key_value_config_; + const FieldTrialsView* const key_value_config_; RtcEventLog* const event_log_; const bool packet_feedback_only_; FieldTrialFlag safe_reset_on_route_change_; @@ -94,6 +94,8 @@ class GoogCcNetworkController : public NetworkControllerInterface { const bool limit_probes_lower_than_throughput_estimate_; const RateControlSettings rate_control_settings_; const bool loss_based_stable_rate_; + const bool pace_at_max_of_bwe_and_lower_link_capacity_; + const bool pace_at_loss_based_bwe_when_loss_; const std::unique_ptr probe_controller_; const std::unique_ptr diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/inter_arrival_delta.cc b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/inter_arrival_delta.cc index ecd5742272..2d50d08e6a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/inter_arrival_delta.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/inter_arrival_delta.cc @@ -71,9 +71,9 @@ bool InterArrivalDelta::ComputeDeltas(Timestamp send_time, ++num_consecutive_reordered_packets_; if (num_consecutive_reordered_packets_ >= kReorderedResetThreshold) { RTC_LOG(LS_WARNING) - << "Packets between send burst arrived out of order, resetting." - << " arrival_time_delta" << arrival_time_delta->ms() - << " send time delta " << send_time_delta->ms(); + << "Packets between send burst arrived out of order, resetting:" + << " arrival_time_delta_ms=" << arrival_time_delta->ms() + << ", send_time_delta_ms=" << send_time_delta->ms(); Reset(); } return false; diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/inter_arrival_delta.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/inter_arrival_delta.h index 0617e34cdd..4046590eeb 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/inter_arrival_delta.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/inter_arrival_delta.h @@ -42,7 +42,7 @@ class InterArrivalDelta { // `arrival_time` is the time at which the packet arrived. // `packet_size` is the size of the packet. // `timestamp_delta` (output) is the computed send time delta. - // `arrival_time_delta_ms` (output) is the computed arrival-time delta. + // `arrival_time_delta` (output) is the computed arrival-time delta. // `packet_size_delta` (output) is the computed size delta. bool ComputeDeltas(Timestamp send_time, Timestamp arrival_time, diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.cc b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.cc index 33974dc900..7524c84d92 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.cc @@ -75,7 +75,7 @@ double ExponentialUpdate(TimeDelta window, TimeDelta interval) { return 1.0f - exp(interval / window * -1.0); } -bool IsEnabled(const webrtc::WebRtcKeyValueConfig& key_value_config, +bool IsEnabled(const webrtc::FieldTrialsView& key_value_config, absl::string_view name) { return absl::StartsWith(key_value_config.Lookup(name), "Enabled"); } @@ -83,7 +83,7 @@ bool IsEnabled(const webrtc::WebRtcKeyValueConfig& key_value_config, } // namespace LossBasedControlConfig::LossBasedControlConfig( - const WebRtcKeyValueConfig* key_value_config) + const FieldTrialsView* key_value_config) : enabled(IsEnabled(*key_value_config, kBweLossBasedControl)), min_increase_factor("min_incr", 1.02), max_increase_factor("max_incr", 1.08), @@ -118,7 +118,7 @@ LossBasedControlConfig::LossBasedControlConfig(const LossBasedControlConfig&) = LossBasedControlConfig::~LossBasedControlConfig() = default; LossBasedBandwidthEstimation::LossBasedBandwidthEstimation( - const WebRtcKeyValueConfig* key_value_config) + const FieldTrialsView* key_value_config) : config_(key_value_config), average_loss_(0), average_loss_max_(0), diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.h index 20ff092e6f..9f69caba89 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.h @@ -13,8 +13,8 @@ #include +#include "api/field_trials_view.h" #include "api/transport/network_types.h" -#include "api/transport/webrtc_key_value_config.h" #include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" @@ -23,7 +23,7 @@ namespace webrtc { struct LossBasedControlConfig { - explicit LossBasedControlConfig(const WebRtcKeyValueConfig* key_value_config); + explicit LossBasedControlConfig(const FieldTrialsView* key_value_config); LossBasedControlConfig(const LossBasedControlConfig&); LossBasedControlConfig& operator=(const LossBasedControlConfig&) = default; ~LossBasedControlConfig(); @@ -52,7 +52,7 @@ struct LossBasedControlConfig { class LossBasedBandwidthEstimation { public: explicit LossBasedBandwidthEstimation( - const WebRtcKeyValueConfig* key_value_config); + const FieldTrialsView* key_value_config); // Returns the new estimate. DataRate Update(Timestamp at_time, DataRate min_bitrate, diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2.cc b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2.cc index 44041143bf..e671bd2599 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2.cc @@ -12,6 +12,7 @@ #include #include +#include #include #include #include @@ -21,12 +22,14 @@ #include "absl/algorithm/container.h" #include "absl/types/optional.h" #include "api/array_view.h" +#include "api/field_trials_view.h" +#include "api/network_state_predictor.h" #include "api/transport/network_types.h" -#include "api/transport/webrtc_key_value_config.h" #include "api/units/data_rate.h" #include "api/units/data_size.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" +#include "modules/remote_bitrate_estimator/include/bwe_defines.h" #include "rtc_base/experiments/field_trial_list.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/logging.h" @@ -89,25 +92,18 @@ double GetLossProbability(double inherent_loss, << ToString(loss_limited_bandwidth); } - // We approximate the loss model - // loss_probability = inherent_loss + (1 - inherent_loss) * - // max(0, sending_rate - bandwidth) / sending_rate - // by - // loss_probability = inherent_loss + - // max(0, sending_rate - bandwidth) / sending_rate - // as it allows for simpler calculations and makes little difference in - // practice. double loss_probability = inherent_loss; if (IsValid(sending_rate) && IsValid(loss_limited_bandwidth) && (sending_rate > loss_limited_bandwidth)) { - loss_probability += (sending_rate - loss_limited_bandwidth) / sending_rate; + loss_probability += (1 - inherent_loss) * + (sending_rate - loss_limited_bandwidth) / sending_rate; } return std::min(std::max(loss_probability, 1.0e-6), 1.0 - 1.0e-6); } } // namespace -LossBasedBweV2::LossBasedBweV2(const WebRtcKeyValueConfig* key_value_config) +LossBasedBweV2::LossBasedBweV2(const FieldTrialsView* key_value_config) : config_(CreateConfig(key_value_config)) { if (!config_.has_value()) { RTC_LOG(LS_VERBOSE) << "The configuration does not specify that the " @@ -138,7 +134,8 @@ bool LossBasedBweV2::IsReady() const { num_observations_ > 0; } -DataRate LossBasedBweV2::GetBandwidthEstimate() const { +DataRate LossBasedBweV2::GetBandwidthEstimate( + DataRate delay_based_limit) const { if (!IsReady()) { if (!IsEnabled()) { RTC_LOG(LS_WARNING) @@ -153,11 +150,17 @@ DataRate LossBasedBweV2::GetBandwidthEstimate() const { "statistics before it can be used."; } } - return DataRate::PlusInfinity(); + return IsValid(delay_based_limit) ? delay_based_limit + : DataRate::PlusInfinity(); } - return std::min(current_estimate_.loss_limited_bandwidth, - GetInstantUpperBound()); + if (delay_based_limit.IsFinite()) { + return std::min({current_estimate_.loss_limited_bandwidth, + GetInstantUpperBound(), delay_based_limit}); + } else { + return std::min(current_estimate_.loss_limited_bandwidth, + GetInstantUpperBound()); + } } void LossBasedBweV2::SetAcknowledgedBitrate(DataRate acknowledged_bitrate) { @@ -178,9 +181,19 @@ void LossBasedBweV2::SetBandwidthEstimate(DataRate bandwidth_estimate) { } } +void LossBasedBweV2::SetMinBitrate(DataRate min_bitrate) { + if (IsValid(min_bitrate)) { + min_bitrate_ = min_bitrate; + } else { + RTC_LOG(LS_WARNING) << "The min bitrate must be finite: " + << ToString(min_bitrate); + } +} + void LossBasedBweV2::UpdateBandwidthEstimate( rtc::ArrayView packet_results, - DataRate delay_based_estimate) { + DataRate delay_based_estimate, + BandwidthUsage delay_detector_state) { if (!IsEnabled()) { RTC_LOG(LS_WARNING) << "The estimator must be enabled before it can be used."; @@ -192,7 +205,7 @@ void LossBasedBweV2::UpdateBandwidthEstimate( return; } - if (!PushBackObservation(packet_results)) { + if (!PushBackObservation(packet_results, delay_detector_state)) { return; } @@ -217,13 +230,51 @@ void LossBasedBweV2::UpdateBandwidthEstimate( current_estimate_.loss_limited_bandwidth) { last_time_estimate_reduced_ = last_send_time_most_recent_observation_; } + + // Do not increase the estimate if the average loss is greater than current + // inherent loss. + if (GetAverageReportedLossRatio() > best_candidate.inherent_loss && + config_->not_increase_if_inherent_loss_less_than_average_loss && + current_estimate_.loss_limited_bandwidth < + best_candidate.loss_limited_bandwidth) { + best_candidate.loss_limited_bandwidth = + current_estimate_.loss_limited_bandwidth; + } + + // Bound the estimate increase if: + // 1. The estimate is limited due to loss, and + // 2. The estimate has been increased for less than `delayed_increase_window` + // ago, and + // 3. The best candidate is greater than bandwidth_limit_in_current_window. + if (limited_due_to_loss_candidate_ && + recovering_after_loss_timestamp_.IsFinite() && + recovering_after_loss_timestamp_ + config_->delayed_increase_window > + last_send_time_most_recent_observation_ && + best_candidate.loss_limited_bandwidth > + bandwidth_limit_in_current_window_) { + best_candidate.loss_limited_bandwidth = bandwidth_limit_in_current_window_; + } + limited_due_to_loss_candidate_ = + delay_based_estimate.IsFinite() && + best_candidate.loss_limited_bandwidth < delay_based_estimate; + + if (limited_due_to_loss_candidate_ && + (recovering_after_loss_timestamp_.IsInfinite() || + recovering_after_loss_timestamp_ + config_->delayed_increase_window < + last_send_time_most_recent_observation_)) { + bandwidth_limit_in_current_window_ = std::max( + kCongestionControllerMinBitrate, + best_candidate.loss_limited_bandwidth * config_->max_increase_factor); + recovering_after_loss_timestamp_ = last_send_time_most_recent_observation_; + } + current_estimate_ = best_candidate; } // Returns a `LossBasedBweV2::Config` iff the `key_value_config` specifies a // configuration for the `LossBasedBweV2` which is explicitly enabled. absl::optional LossBasedBweV2::CreateConfig( - const WebRtcKeyValueConfig* key_value_config) { + const FieldTrialsView* key_value_config) { FieldTrialParameter enabled("Enabled", false); FieldTrialParameter bandwidth_rampup_upper_bound_factor( "BwRampupUpperBoundFactor", 1.1); @@ -239,6 +290,10 @@ absl::optional LossBasedBweV2::CreateConfig( "HigherLogBwBiasFactor", 0.001); FieldTrialParameter inherent_loss_lower_bound( "InherentLossLowerBound", 1.0e-3); + FieldTrialParameter loss_threshold_of_high_bandwidth_preference( + "LossThresholdOfHighBandwidthPreference", 0.99); + FieldTrialParameter bandwidth_preference_smoothing_factor( + "BandwidthPreferenceSmoothingFactor", 0.002); FieldTrialParameter inherent_loss_upper_bound_bandwidth_balance( "InherentLossUpperBoundBwBalance", DataRate::KilobitsPerSec(15.0)); FieldTrialParameter inherent_loss_upper_bound_offset( @@ -264,7 +319,26 @@ absl::optional LossBasedBweV2::CreateConfig( "InstantUpperBoundLossOffset", 0.05); FieldTrialParameter temporal_weight_factor("TemporalWeightFactor", 0.99); - + FieldTrialParameter bandwidth_backoff_lower_bound_factor( + "BwBackoffLowerBoundFactor", 1.0); + FieldTrialParameter trendline_integration_enabled( + "TrendlineIntegrationEnabled", false); + FieldTrialParameter trendline_observations_window_size( + "TrendlineObservationsWindowSize", 20); + FieldTrialParameter max_increase_factor("MaxIncreaseFactor", 1000.0); + FieldTrialParameter delayed_increase_window( + "DelayedIncreaseWindow", TimeDelta::Millis(300)); + FieldTrialParameter use_acked_bitrate_only_when_overusing( + "UseAckedBitrateOnlyWhenOverusing", false); + FieldTrialParameter + not_increase_if_inherent_loss_less_than_average_loss( + "NotIncreaseIfInherentLossLessThanAverageLoss", false); + FieldTrialParameter high_loss_rate_threshold("HighLossRateThreshold", + 1.0); + FieldTrialParameter bandwidth_cap_at_high_loss_rate( + "BandwidthCapAtHighLossRate", DataRate::KilobitsPerSec(500.0)); + FieldTrialParameter slope_of_bwe_high_loss_func( + "SlopeOfBweHighLossFunc", 1000); if (key_value_config) { ParseFieldTrial({&enabled, &bandwidth_rampup_upper_bound_factor, @@ -274,6 +348,8 @@ absl::optional LossBasedBweV2::CreateConfig( &higher_bandwidth_bias_factor, &higher_log_bandwidth_bias_factor, &inherent_loss_lower_bound, + &loss_threshold_of_high_bandwidth_preference, + &bandwidth_preference_smoothing_factor, &inherent_loss_upper_bound_bandwidth_balance, &inherent_loss_upper_bound_offset, &initial_inherent_loss_estimate, @@ -287,7 +363,17 @@ absl::optional LossBasedBweV2::CreateConfig( &instant_upper_bound_temporal_weight_factor, &instant_upper_bound_bandwidth_balance, &instant_upper_bound_loss_offset, - &temporal_weight_factor}, + &temporal_weight_factor, + &bandwidth_backoff_lower_bound_factor, + &trendline_integration_enabled, + &trendline_observations_window_size, + &max_increase_factor, + &delayed_increase_window, + &use_acked_bitrate_only_when_overusing, + ¬_increase_if_inherent_loss_less_than_average_loss, + &high_loss_rate_threshold, + &bandwidth_cap_at_high_loss_rate, + &slope_of_bwe_high_loss_func}, key_value_config->Lookup("WebRTC-Bwe-LossBasedBweV2")); } @@ -306,6 +392,10 @@ absl::optional LossBasedBweV2::CreateConfig( config->higher_log_bandwidth_bias_factor = higher_log_bandwidth_bias_factor.Get(); config->inherent_loss_lower_bound = inherent_loss_lower_bound.Get(); + config->loss_threshold_of_high_bandwidth_preference = + loss_threshold_of_high_bandwidth_preference.Get(); + config->bandwidth_preference_smoothing_factor = + bandwidth_preference_smoothing_factor.Get(); config->inherent_loss_upper_bound_bandwidth_balance = inherent_loss_upper_bound_bandwidth_balance.Get(); config->inherent_loss_upper_bound_offset = @@ -328,6 +418,21 @@ absl::optional LossBasedBweV2::CreateConfig( config->instant_upper_bound_loss_offset = instant_upper_bound_loss_offset.Get(); config->temporal_weight_factor = temporal_weight_factor.Get(); + config->bandwidth_backoff_lower_bound_factor = + bandwidth_backoff_lower_bound_factor.Get(); + config->trendline_integration_enabled = trendline_integration_enabled.Get(); + config->trendline_observations_window_size = + trendline_observations_window_size.Get(); + config->max_increase_factor = max_increase_factor.Get(); + config->delayed_increase_window = delayed_increase_window.Get(); + config->use_acked_bitrate_only_when_overusing = + use_acked_bitrate_only_when_overusing.Get(); + config->not_increase_if_inherent_loss_less_than_average_loss = + not_increase_if_inherent_loss_less_than_average_loss.Get(); + config->high_loss_rate_threshold = high_loss_rate_threshold.Get(); + config->bandwidth_cap_at_high_loss_rate = + bandwidth_cap_at_high_loss_rate.Get(); + config->slope_of_bwe_high_loss_func = slope_of_bwe_high_loss_func.Get(); return config; } @@ -390,6 +495,20 @@ bool LossBasedBweV2::IsConfigValid() const { << config_->inherent_loss_lower_bound; valid = false; } + if (config_->loss_threshold_of_high_bandwidth_preference < 0.0 || + config_->loss_threshold_of_high_bandwidth_preference >= 1.0) { + RTC_LOG(LS_WARNING) + << "The loss threshold of high bandwidth preference must be in [0, 1): " + << config_->loss_threshold_of_high_bandwidth_preference; + valid = false; + } + if (config_->bandwidth_preference_smoothing_factor <= 0.0 || + config_->bandwidth_preference_smoothing_factor > 1.0) { + RTC_LOG(LS_WARNING) + << "The bandwidth preference smoothing factor must be in (0, 1]: " + << config_->bandwidth_preference_smoothing_factor; + valid = false; + } if (config_->inherent_loss_upper_bound_bandwidth_balance <= DataRate::Zero()) { RTC_LOG(LS_WARNING) @@ -470,7 +589,34 @@ bool LossBasedBweV2::IsConfigValid() const { << config_->temporal_weight_factor; valid = false; } - + if (config_->bandwidth_backoff_lower_bound_factor > 1.0) { + RTC_LOG(LS_WARNING) + << "The bandwidth backoff lower bound factor must not be greater than " + "1: " + << config_->bandwidth_backoff_lower_bound_factor; + valid = false; + } + if (config_->trendline_observations_window_size < 1) { + RTC_LOG(LS_WARNING) << "The trendline window size must be at least 1: " + << config_->trendline_observations_window_size; + valid = false; + } + if (config_->max_increase_factor <= 0.0) { + RTC_LOG(LS_WARNING) << "The maximum increase factor must be positive: " + << config_->max_increase_factor; + valid = false; + } + if (config_->delayed_increase_window <= TimeDelta::Zero()) { + RTC_LOG(LS_WARNING) << "The delayed increase window must be positive: " + << config_->delayed_increase_window.ms(); + valid = false; + } + if (config_->high_loss_rate_threshold <= 0.0 || + config_->high_loss_rate_threshold > 1.0) { + RTC_LOG(LS_WARNING) << "The high loss rate threshold must be in (0, 1]: " + << config_->high_loss_rate_threshold; + valid = false; + } return valid; } @@ -479,8 +625,8 @@ double LossBasedBweV2::GetAverageReportedLossRatio() const { return 0.0; } - int num_packets = 0; - int num_lost_packets = 0; + double num_packets = 0; + double num_lost_packets = 0; for (const Observation& observation : observations_) { if (!observation.IsInitialized()) { continue; @@ -493,15 +639,35 @@ double LossBasedBweV2::GetAverageReportedLossRatio() const { num_lost_packets += instant_temporal_weight * observation.num_lost_packets; } - return static_cast(num_lost_packets) / num_packets; + return num_lost_packets / num_packets; } -DataRate LossBasedBweV2::GetCandidateBandwidthUpperBound() const { +DataRate LossBasedBweV2::GetCandidateBandwidthUpperBound( + DataRate delay_based_estimate) const { + DataRate candidate_bandwidth_upper_bound = DataRate::PlusInfinity(); + if (limited_due_to_loss_candidate_) { + candidate_bandwidth_upper_bound = bandwidth_limit_in_current_window_; + } + + if (config_->trendline_integration_enabled) { + candidate_bandwidth_upper_bound = + std::min(GetInstantUpperBound(), candidate_bandwidth_upper_bound); + if (IsValid(delay_based_estimate)) { + candidate_bandwidth_upper_bound = + std::min(delay_based_estimate, candidate_bandwidth_upper_bound); + } + } + if (!acknowledged_bitrate_.has_value()) - return DataRate::PlusInfinity(); + return candidate_bandwidth_upper_bound; - DataRate candidate_bandwidth_upper_bound = - config_->bandwidth_rampup_upper_bound_factor * (*acknowledged_bitrate_); + candidate_bandwidth_upper_bound = + IsValid(candidate_bandwidth_upper_bound) + ? std::min(candidate_bandwidth_upper_bound, + config_->bandwidth_rampup_upper_bound_factor * + (*acknowledged_bitrate_)) + : config_->bandwidth_rampup_upper_bound_factor * + (*acknowledged_bitrate_); if (config_->rampup_acceleration_max_factor > 0.0) { const TimeDelta time_since_bandwidth_reduced = std::min( @@ -521,31 +687,45 @@ DataRate LossBasedBweV2::GetCandidateBandwidthUpperBound() const { std::vector LossBasedBweV2::GetCandidates( DataRate delay_based_estimate) const { std::vector bandwidths; + bool can_increase_bitrate = TrendlineEsimateAllowBitrateIncrease(); for (double candidate_factor : config_->candidate_factors) { + if (!can_increase_bitrate && candidate_factor > 1.0) { + continue; + } bandwidths.push_back(candidate_factor * current_estimate_.loss_limited_bandwidth); } if (acknowledged_bitrate_.has_value() && - config_->append_acknowledged_rate_candidate) { - bandwidths.push_back(*acknowledged_bitrate_); + config_->append_acknowledged_rate_candidate && + TrendlineEsimateAllowEmergencyBackoff()) { + bandwidths.push_back(*acknowledged_bitrate_ * + config_->bandwidth_backoff_lower_bound_factor); } if (IsValid(delay_based_estimate) && config_->append_delay_based_estimate_candidate) { - bandwidths.push_back(delay_based_estimate); + if (can_increase_bitrate && + delay_based_estimate > current_estimate_.loss_limited_bandwidth) { + bandwidths.push_back(delay_based_estimate); + } } const DataRate candidate_bandwidth_upper_bound = - GetCandidateBandwidthUpperBound(); + GetCandidateBandwidthUpperBound(delay_based_estimate); std::vector candidates; candidates.resize(bandwidths.size()); for (size_t i = 0; i < bandwidths.size(); ++i) { ChannelParameters candidate = current_estimate_; - candidate.loss_limited_bandwidth = std::min( - bandwidths[i], std::max(current_estimate_.loss_limited_bandwidth, - candidate_bandwidth_upper_bound)); + if (config_->trendline_integration_enabled) { + candidate.loss_limited_bandwidth = + std::min(bandwidths[i], candidate_bandwidth_upper_bound); + } else { + candidate.loss_limited_bandwidth = std::min( + bandwidths[i], std::max(current_estimate_.loss_limited_bandwidth, + candidate_bandwidth_upper_bound)); + } candidate.inherent_loss = GetFeasibleInherentLoss(candidate); candidates[i] = candidate; } @@ -608,10 +788,23 @@ double LossBasedBweV2::GetInherentLossUpperBound(DataRate bandwidth) const { return std::min(inherent_loss_upper_bound, 1.0); } +double LossBasedBweV2::AdjustBiasFactor(double loss_rate, + double bias_factor) const { + return bias_factor * + (config_->loss_threshold_of_high_bandwidth_preference - loss_rate) / + (config_->bandwidth_preference_smoothing_factor + + std::abs(config_->loss_threshold_of_high_bandwidth_preference - + loss_rate)); +} + double LossBasedBweV2::GetHighBandwidthBias(DataRate bandwidth) const { if (IsValid(bandwidth)) { - return config_->higher_bandwidth_bias_factor * bandwidth.kbps() + - config_->higher_log_bandwidth_bias_factor * + const double average_reported_loss_ratio = GetAverageReportedLossRatio(); + return AdjustBiasFactor(average_reported_loss_ratio, + config_->higher_bandwidth_bias_factor) * + bandwidth.kbps() + + AdjustBiasFactor(average_reported_loss_ratio, + config_->higher_log_bandwidth_bias_factor) * std::log(1.0 + bandwidth.kbps()); } return 0.0; @@ -677,7 +870,16 @@ void LossBasedBweV2::CalculateInstantUpperBound() { instant_limit = config_->instant_upper_bound_bandwidth_balance / (average_reported_loss_ratio - config_->instant_upper_bound_loss_offset); + if (average_reported_loss_ratio > config_->high_loss_rate_threshold) { + instant_limit = std::min( + instant_limit, DataRate::KilobitsPerSec(std::max( + static_cast(min_bitrate_.kbps()), + config_->bandwidth_cap_at_high_loss_rate.kbps() - + config_->slope_of_bwe_high_loss_func * + average_reported_loss_ratio))); + } } + cached_instant_upper_bound_ = instant_limit; } @@ -704,8 +906,47 @@ void LossBasedBweV2::NewtonsMethodUpdate( } } +bool LossBasedBweV2::TrendlineEsimateAllowBitrateIncrease() const { + if (!config_->trendline_integration_enabled) { + return true; + } + + for (const auto& detector_state : delay_detector_states_) { + if (detector_state == BandwidthUsage::kBwOverusing || + detector_state == BandwidthUsage::kBwUnderusing) { + return false; + } + } + return true; +} + +bool LossBasedBweV2::TrendlineEsimateAllowEmergencyBackoff() const { + if (!config_->trendline_integration_enabled) { + return true; + } + + if (!config_->use_acked_bitrate_only_when_overusing) { + return true; + } + + for (const auto& detector_state : delay_detector_states_) { + if (detector_state == BandwidthUsage::kBwOverusing) { + return true; + } + } + + return false; +} + bool LossBasedBweV2::PushBackObservation( - rtc::ArrayView packet_results) { + rtc::ArrayView packet_results, + BandwidthUsage delay_detector_state) { + delay_detector_states_.push_front(delay_detector_state); + if (static_cast(delay_detector_states_.size()) > + config_->trendline_observations_window_size) { + delay_detector_states_.pop_back(); + } + if (packet_results.empty()) { return false; } @@ -727,9 +968,11 @@ bool LossBasedBweV2::PushBackObservation( const Timestamp last_send_time = packet_results_summary.last_send_time; const TimeDelta observation_duration = last_send_time - last_send_time_most_recent_observation_; - // Too small to be meaningful. - if (observation_duration < config_->observation_duration_lower_bound) { + if (observation_duration <= TimeDelta::Zero() || + (observation_duration < config_->observation_duration_lower_bound && + (delay_detector_state != BandwidthUsage::kBwOverusing || + !config_->trendline_integration_enabled))) { return false; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2.h index f764892b55..88cae01418 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2.h @@ -12,12 +12,14 @@ #define MODULES_CONGESTION_CONTROLLER_GOOG_CC_LOSS_BASED_BWE_V2_H_ #include +#include #include #include "absl/types/optional.h" #include "api/array_view.h" +#include "api/field_trials_view.h" +#include "api/network_state_predictor.h" #include "api/transport/network_types.h" -#include "api/transport/webrtc_key_value_config.h" #include "api/units/data_rate.h" #include "api/units/data_size.h" #include "api/units/time_delta.h" @@ -29,7 +31,7 @@ class LossBasedBweV2 { public: // Creates a disabled `LossBasedBweV2` if the // `key_value_config` is not valid. - explicit LossBasedBweV2(const WebRtcKeyValueConfig* key_value_config); + explicit LossBasedBweV2(const FieldTrialsView* key_value_config); LossBasedBweV2(const LossBasedBweV2&) = delete; LossBasedBweV2& operator=(const LossBasedBweV2&) = delete; @@ -42,14 +44,15 @@ class LossBasedBweV2 { bool IsReady() const; // Returns `DataRate::PlusInfinity` if no BWE can be calculated. - DataRate GetBandwidthEstimate() const; + DataRate GetBandwidthEstimate(DataRate delay_based_limit) const; void SetAcknowledgedBitrate(DataRate acknowledged_bitrate); void SetBandwidthEstimate(DataRate bandwidth_estimate); - + void SetMinBitrate(DataRate min_bitrate); void UpdateBandwidthEstimate( rtc::ArrayView packet_results, - DataRate delay_based_estimate); + DataRate delay_based_estimate, + BandwidthUsage delay_detector_state); private: struct ChannelParameters { @@ -65,6 +68,8 @@ class LossBasedBweV2 { double higher_bandwidth_bias_factor = 0.0; double higher_log_bandwidth_bias_factor = 0.0; double inherent_loss_lower_bound = 0.0; + double loss_threshold_of_high_bandwidth_preference = 0.0; + double bandwidth_preference_smoothing_factor = 0.0; DataRate inherent_loss_upper_bound_bandwidth_balance = DataRate::MinusInfinity(); double inherent_loss_upper_bound_offset = 0.0; @@ -80,6 +85,16 @@ class LossBasedBweV2 { DataRate instant_upper_bound_bandwidth_balance = DataRate::MinusInfinity(); double instant_upper_bound_loss_offset = 0.0; double temporal_weight_factor = 0.0; + double bandwidth_backoff_lower_bound_factor = 0.0; + bool trendline_integration_enabled = false; + int trendline_observations_window_size = 0; + double max_increase_factor = 0.0; + TimeDelta delayed_increase_window = TimeDelta::Zero(); + bool use_acked_bitrate_only_when_overusing = false; + bool not_increase_if_inherent_loss_less_than_average_loss = false; + double high_loss_rate_threshold = 1.0; + DataRate bandwidth_cap_at_high_loss_rate = DataRate::MinusInfinity(); + double slope_of_bwe_high_loss_func = 1000.0; }; struct Derivatives { @@ -104,18 +119,19 @@ class LossBasedBweV2 { }; static absl::optional CreateConfig( - const WebRtcKeyValueConfig* key_value_config); + const FieldTrialsView* key_value_config); bool IsConfigValid() const; // Returns `0.0` if not enough loss statistics have been received. double GetAverageReportedLossRatio() const; std::vector GetCandidates( DataRate delay_based_estimate) const; - DataRate GetCandidateBandwidthUpperBound() const; + DataRate GetCandidateBandwidthUpperBound(DataRate delay_based_estimate) const; Derivatives GetDerivatives(const ChannelParameters& channel_parameters) const; double GetFeasibleInherentLoss( const ChannelParameters& channel_parameters) const; double GetInherentLossUpperBound(DataRate bandwidth) const; + double AdjustBiasFactor(double loss_rate, double bias_factor) const; double GetHighBandwidthBias(DataRate bandwidth) const; double GetObjective(const ChannelParameters& channel_parameters) const; DataRate GetSendingRate(DataRate instantaneous_sending_rate) const; @@ -125,8 +141,20 @@ class LossBasedBweV2 { void CalculateTemporalWeights(); void NewtonsMethodUpdate(ChannelParameters& channel_parameters) const; + // Returns false if there exists a kBwOverusing or kBwUnderusing in the + // window. + bool TrendlineEsimateAllowBitrateIncrease() const; + + // Returns true if there exists an overusing state in the window. + bool TrendlineEsimateAllowEmergencyBackoff() const; + // Returns false if no observation was created. - bool PushBackObservation(rtc::ArrayView packet_results); + bool PushBackObservation(rtc::ArrayView packet_results, + BandwidthUsage delay_detector_state); + void UpdateTrendlineEstimator( + const std::vector& packet_feedbacks, + Timestamp at_time); + void UpdateDelayDetector(BandwidthUsage delay_detector_state); absl::optional acknowledged_bitrate_; absl::optional config_; @@ -139,6 +167,11 @@ class LossBasedBweV2 { absl::optional cached_instant_upper_bound_; std::vector instant_upper_bound_temporal_weights_; std::vector temporal_weights_; + std::deque delay_detector_states_; + Timestamp recovering_after_loss_timestamp_ = Timestamp::MinusInfinity(); + DataRate bandwidth_limit_in_current_window_ = DataRate::PlusInfinity(); + bool limited_due_to_loss_candidate_ = false; + DataRate min_bitrate_ = DataRate::KilobitsPerSec(1); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/probe_controller.cc b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/probe_controller.cc index cb75456fde..501f14b874 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/probe_controller.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/probe_controller.cc @@ -16,52 +16,43 @@ #include #include "absl/strings/match.h" +#include "absl/types/optional.h" #include "api/units/data_rate.h" +#include "api/units/data_size.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "logging/rtc_event_log/events/rtc_event_probe_cluster_created.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "rtc_base/numerics/safe_conversions.h" #include "system_wrappers/include/metrics.h" namespace webrtc { namespace { -// The minimum number probing packets used. -constexpr int kMinProbePacketsSent = 5; - -// The minimum probing duration in ms. -constexpr int kMinProbeDurationMs = 15; - // Maximum waiting time from the time of initiating probing to getting // the measured results back. -constexpr int64_t kMaxWaitingTimeForProbingResultMs = 1000; - -// Value of `min_bitrate_to_probe_further_bps_` that indicates -// further probing is disabled. -constexpr int kExponentialProbingDisabled = 0; +constexpr TimeDelta kMaxWaitingTimeForProbingResult = TimeDelta::Seconds(1); // Default probing bitrate limit. Applied only when the application didn't // specify max bitrate. -constexpr int64_t kDefaultMaxProbingBitrateBps = 5000000; +constexpr DataRate kDefaultMaxProbingBitrate = DataRate::KilobitsPerSec(5000); // If the bitrate drops to a factor `kBitrateDropThreshold` or lower // and we recover within `kBitrateDropTimeoutMs`, then we'll send // a probe at a fraction `kProbeFractionAfterDrop` of the original bitrate. constexpr double kBitrateDropThreshold = 0.66; -constexpr int kBitrateDropTimeoutMs = 5000; +constexpr TimeDelta kBitrateDropTimeout = TimeDelta::Seconds(5); constexpr double kProbeFractionAfterDrop = 0.85; // Timeout for probing after leaving ALR. If the bitrate drops significantly, // (as determined by the delay based estimator) and we leave ALR, then we will // send a probe if we recover within `kLeftAlrTimeoutMs` ms. -constexpr int kAlrEndedTimeoutMs = 3000; +constexpr TimeDelta kAlrEndedTimeout = TimeDelta::Seconds(3); // The expected uncertainty of probe result (as a fraction of the target probe // This is a limit on how often probing can be done when there is a BW // drop detected in ALR. -constexpr int64_t kMinTimeBetweenAlrProbesMs = 5000; +constexpr TimeDelta kMinTimeBetweenAlrProbes = TimeDelta::Seconds(5); // bitrate). Used to avoid probing if the probe bitrate is close to our current // estimate. @@ -71,9 +62,6 @@ constexpr double kProbeUncertainty = 0.05; constexpr char kBweRapidRecoveryExperiment[] = "WebRTC-BweRapidRecoveryExperiment"; -// Never probe higher than configured by OnMaxTotalAllocatedBitrate(). -constexpr char kCappedProbingFieldTrialName[] = "WebRTC-BweCappedProbing"; - void MaybeLogProbeClusterCreated(RtcEventLog* event_log, const ProbeClusterConfig& probe) { RTC_DCHECK(event_log); @@ -81,32 +69,53 @@ void MaybeLogProbeClusterCreated(RtcEventLog* event_log, return; } - size_t min_bytes = static_cast(probe.target_data_rate.bps() * - probe.target_duration.ms() / 8000); + DataSize min_data_size = probe.target_data_rate * probe.target_duration; event_log->Log(std::make_unique( probe.id, probe.target_data_rate.bps(), probe.target_probe_count, - min_bytes)); + min_data_size.bytes())); } } // namespace ProbeControllerConfig::ProbeControllerConfig( - const WebRtcKeyValueConfig* key_value_config) + const FieldTrialsView* key_value_config) : first_exponential_probe_scale("p1", 3.0), second_exponential_probe_scale("p2", 6.0), further_exponential_probe_scale("step_size", 2), further_probe_threshold("further_probe_threshold", 0.7), alr_probing_interval("alr_interval", TimeDelta::Seconds(5)), alr_probe_scale("alr_scale", 2), + network_state_estimate_probing_interval("network_state_interval", + TimeDelta::PlusInfinity()), + network_state_estimate_fast_rampup_rate("network_state_fast_rampup_rate", + 0), + network_state_estimate_drop_down_rate("network_state_drop_down_rate", 0), + network_state_probe_scale("network_state_scale", 1.0), + network_state_probe_duration("network_state_probe_duration", + TimeDelta::Millis(15)), + first_allocation_probe_scale("alloc_p1", 1), second_allocation_probe_scale("alloc_p2", 2), allocation_allow_further_probing("alloc_probe_further", false), - allocation_probe_max("alloc_probe_max", DataRate::PlusInfinity()) { + allocation_probe_max("alloc_probe_max", DataRate::PlusInfinity()), + min_probe_packets_sent("min_probe_packets_sent", 5), + min_probe_duration("min_probe_duration", TimeDelta::Millis(15)), + limit_probe_target_rate_to_loss_bwe("limit_probe_target_rate_to_loss_bwe", + false), + skip_if_estimate_larger_than_fraction_of_max( + "skip_if_est_larger_than_fraction_of_max", + 0.0) { ParseFieldTrial( {&first_exponential_probe_scale, &second_exponential_probe_scale, &further_exponential_probe_scale, &further_probe_threshold, &alr_probing_interval, &alr_probe_scale, &first_allocation_probe_scale, - &second_allocation_probe_scale, &allocation_allow_further_probing}, + &second_allocation_probe_scale, &allocation_allow_further_probing, + &min_probe_duration, &network_state_estimate_probing_interval, + &network_state_estimate_fast_rampup_rate, + &network_state_estimate_drop_down_rate, &network_state_probe_scale, + &network_state_probe_duration, &min_probe_packets_sent, + &limit_probe_target_rate_to_loss_bwe, + &skip_if_estimate_larger_than_fraction_of_max}, key_value_config->Lookup("WebRTC-Bwe-ProbingConfiguration")); // Specialized keys overriding subsets of WebRTC-Bwe-ProbingConfiguration @@ -121,49 +130,49 @@ ProbeControllerConfig::ProbeControllerConfig( {&first_allocation_probe_scale, &second_allocation_probe_scale, &allocation_allow_further_probing, &allocation_probe_max}, key_value_config->Lookup("WebRTC-Bwe-AllocationProbing")); + ParseFieldTrial({&min_probe_packets_sent, &min_probe_duration}, + key_value_config->Lookup("WebRTC-Bwe-ProbingBehavior")); } ProbeControllerConfig::ProbeControllerConfig(const ProbeControllerConfig&) = default; ProbeControllerConfig::~ProbeControllerConfig() = default; -ProbeController::ProbeController(const WebRtcKeyValueConfig* key_value_config, +ProbeController::ProbeController(const FieldTrialsView* key_value_config, RtcEventLog* event_log) : enable_periodic_alr_probing_(false), in_rapid_recovery_experiment_(absl::StartsWith( key_value_config->Lookup(kBweRapidRecoveryExperiment), "Enabled")), - limit_probes_with_allocateable_rate_(!absl::StartsWith( - key_value_config->Lookup(kCappedProbingFieldTrialName), - "Disabled")), event_log_(event_log), config_(ProbeControllerConfig(key_value_config)) { - Reset(0); + Reset(Timestamp::Zero()); } ProbeController::~ProbeController() {} std::vector ProbeController::SetBitrates( - int64_t min_bitrate_bps, - int64_t start_bitrate_bps, - int64_t max_bitrate_bps, - int64_t at_time_ms) { - if (start_bitrate_bps > 0) { - start_bitrate_bps_ = start_bitrate_bps; - estimated_bitrate_bps_ = start_bitrate_bps; - } else if (start_bitrate_bps_ == 0) { - start_bitrate_bps_ = min_bitrate_bps; + DataRate min_bitrate, + DataRate start_bitrate, + DataRate max_bitrate, + Timestamp at_time) { + if (start_bitrate > DataRate::Zero()) { + start_bitrate_ = start_bitrate; + estimated_bitrate_ = start_bitrate; + } else if (start_bitrate_.IsZero()) { + start_bitrate_ = min_bitrate; } // The reason we use the variable `old_max_bitrate_pbs` is because we - // need to set `max_bitrate_bps_` before we call InitiateProbing. - int64_t old_max_bitrate_bps = max_bitrate_bps_; - max_bitrate_bps_ = max_bitrate_bps; + // need to set `max_bitrate_` before we call InitiateProbing. + DataRate old_max_bitrate = max_bitrate_; + max_bitrate_ = + max_bitrate.IsFinite() ? max_bitrate : kDefaultMaxProbingBitrate; switch (state_) { case State::kInit: if (network_available_) - return InitiateExponentialProbing(at_time_ms); + return InitiateExponentialProbing(at_time); break; case State::kWaitingForProbingResult: @@ -172,21 +181,20 @@ std::vector ProbeController::SetBitrates( case State::kProbingComplete: // If the new max bitrate is higher than both the old max bitrate and the // estimate then initiate probing. - if (estimated_bitrate_bps_ != 0 && - old_max_bitrate_bps < max_bitrate_bps_ && - estimated_bitrate_bps_ < max_bitrate_bps_) { + if (!estimated_bitrate_.IsZero() && old_max_bitrate < max_bitrate_ && + estimated_bitrate_ < max_bitrate_) { // The assumption is that if we jump more than 20% in the bandwidth // estimate or if the bandwidth estimate is within 90% of the new // max bitrate then the probing attempt was successful. mid_call_probing_succcess_threshold_ = - std::min(estimated_bitrate_bps_ * 1.2, max_bitrate_bps_ * 0.9); + std::min(estimated_bitrate_ * 1.2, max_bitrate_ * 0.9); mid_call_probing_waiting_for_result_ = true; - mid_call_probing_bitrate_bps_ = max_bitrate_bps_; + mid_call_probing_bitrate_ = max_bitrate_; RTC_HISTOGRAM_COUNTS_10000("WebRTC.BWE.MidCallProbing.Initiated", - max_bitrate_bps_ / 1000); + max_bitrate_.kbps()); - return InitiateProbing(at_time_ms, {max_bitrate_bps_}, false); + return InitiateProbing(at_time, {max_bitrate_}, false); } break; } @@ -194,38 +202,36 @@ std::vector ProbeController::SetBitrates( } std::vector ProbeController::OnMaxTotalAllocatedBitrate( - int64_t max_total_allocated_bitrate, - int64_t at_time_ms) { - const bool in_alr = alr_start_time_ms_.has_value(); + DataRate max_total_allocated_bitrate, + Timestamp at_time) { + const bool in_alr = alr_start_time_.has_value(); const bool allow_allocation_probe = in_alr; if (state_ == State::kProbingComplete && max_total_allocated_bitrate != max_total_allocated_bitrate_ && - estimated_bitrate_bps_ != 0 && - (max_bitrate_bps_ <= 0 || estimated_bitrate_bps_ < max_bitrate_bps_) && - estimated_bitrate_bps_ < max_total_allocated_bitrate && + estimated_bitrate_ < max_bitrate_ && + estimated_bitrate_ < max_total_allocated_bitrate && allow_allocation_probe) { max_total_allocated_bitrate_ = max_total_allocated_bitrate; if (!config_.first_allocation_probe_scale) return std::vector(); - DataRate first_probe_rate = - DataRate::BitsPerSec(max_total_allocated_bitrate) * - config_.first_allocation_probe_scale.Value(); + DataRate first_probe_rate = max_total_allocated_bitrate * + config_.first_allocation_probe_scale.Value(); DataRate probe_cap = config_.allocation_probe_max.Get(); first_probe_rate = std::min(first_probe_rate, probe_cap); - std::vector probes = {first_probe_rate.bps()}; + std::vector probes = {first_probe_rate}; if (config_.second_allocation_probe_scale) { DataRate second_probe_rate = - DataRate::BitsPerSec(max_total_allocated_bitrate) * + max_total_allocated_bitrate * config_.second_allocation_probe_scale.Value(); second_probe_rate = std::min(second_probe_rate, probe_cap); if (second_probe_rate > first_probe_rate) - probes.push_back(second_probe_rate.bps()); + probes.push_back(second_probe_rate); } - return InitiateProbing(at_time_ms, probes, - config_.allocation_allow_further_probing); + return InitiateProbing(at_time, probes, + config_.allocation_allow_further_probing.Get()); } max_total_allocated_bitrate_ = max_total_allocated_bitrate; return std::vector(); @@ -237,66 +243,69 @@ std::vector ProbeController::OnNetworkAvailability( if (!network_available_ && state_ == State::kWaitingForProbingResult) { state_ = State::kProbingComplete; - min_bitrate_to_probe_further_bps_ = kExponentialProbingDisabled; + min_bitrate_to_probe_further_ = DataRate::PlusInfinity(); } - if (network_available_ && state_ == State::kInit && start_bitrate_bps_ > 0) - return InitiateExponentialProbing(msg.at_time.ms()); + if (network_available_ && state_ == State::kInit && !start_bitrate_.IsZero()) + return InitiateExponentialProbing(msg.at_time); return std::vector(); } std::vector ProbeController::InitiateExponentialProbing( - int64_t at_time_ms) { + Timestamp at_time) { RTC_DCHECK(network_available_); RTC_DCHECK(state_ == State::kInit); - RTC_DCHECK_GT(start_bitrate_bps_, 0); + RTC_DCHECK_GT(start_bitrate_, DataRate::Zero()); // When probing at 1.8 Mbps ( 6x 300), this represents a threshold of // 1.2 Mbps to continue probing. - std::vector probes = {static_cast( - config_.first_exponential_probe_scale * start_bitrate_bps_)}; - if (config_.second_exponential_probe_scale) { + std::vector probes = {config_.first_exponential_probe_scale * + start_bitrate_}; + if (config_.second_exponential_probe_scale && + config_.second_exponential_probe_scale.GetOptional().value() > 0) { probes.push_back(config_.second_exponential_probe_scale.Value() * - start_bitrate_bps_); + start_bitrate_); } - return InitiateProbing(at_time_ms, probes, true); + return InitiateProbing(at_time, probes, true); } std::vector ProbeController::SetEstimatedBitrate( - int64_t bitrate_bps, - int64_t at_time_ms) { + DataRate bitrate, + bool bwe_limited_due_to_packet_loss, + Timestamp at_time) { + if (bwe_limited_due_to_packet_loss != bwe_limited_due_to_packet_loss_ && + config_.limit_probe_target_rate_to_loss_bwe) { + state_ = State::kProbingComplete; + } + bwe_limited_due_to_packet_loss_ = bwe_limited_due_to_packet_loss; + if (bitrate < kBitrateDropThreshold * estimated_bitrate_) { + time_of_last_large_drop_ = at_time; + bitrate_before_last_large_drop_ = estimated_bitrate_; + } + estimated_bitrate_ = bitrate; + if (mid_call_probing_waiting_for_result_ && - bitrate_bps >= mid_call_probing_succcess_threshold_) { + bitrate >= mid_call_probing_succcess_threshold_) { RTC_HISTOGRAM_COUNTS_10000("WebRTC.BWE.MidCallProbing.Success", - mid_call_probing_bitrate_bps_ / 1000); + mid_call_probing_bitrate_.kbps()); RTC_HISTOGRAM_COUNTS_10000("WebRTC.BWE.MidCallProbing.ProbedKbps", - bitrate_bps / 1000); + bitrate.kbps()); mid_call_probing_waiting_for_result_ = false; } std::vector pending_probes; if (state_ == State::kWaitingForProbingResult) { // Continue probing if probing results indicate channel has greater // capacity. - RTC_LOG(LS_INFO) << "Measured bitrate: " << bitrate_bps + RTC_LOG(LS_INFO) << "Measured bitrate: " << bitrate << " Minimum to probe further: " - << min_bitrate_to_probe_further_bps_; + << min_bitrate_to_probe_further_; - if (min_bitrate_to_probe_further_bps_ != kExponentialProbingDisabled && - bitrate_bps > min_bitrate_to_probe_further_bps_) { + if (bitrate > min_bitrate_to_probe_further_) { pending_probes = InitiateProbing( - at_time_ms, - {static_cast(config_.further_exponential_probe_scale * - bitrate_bps)}, - true); + at_time, {config_.further_exponential_probe_scale * bitrate}, true); } } - if (bitrate_bps < kBitrateDropThreshold * estimated_bitrate_bps_) { - time_of_last_large_drop_ms_ = at_time_ms; - bitrate_before_last_large_drop_bps_ = estimated_bitrate_bps_; - } - - estimated_bitrate_bps_ = bitrate_bps; return pending_probes; } @@ -306,144 +315,217 @@ void ProbeController::EnablePeriodicAlrProbing(bool enable) { void ProbeController::SetAlrStartTimeMs( absl::optional alr_start_time_ms) { - alr_start_time_ms_ = alr_start_time_ms; + if (alr_start_time_ms) { + alr_start_time_ = Timestamp::Millis(*alr_start_time_ms); + } else { + alr_start_time_ = absl::nullopt; + } } void ProbeController::SetAlrEndedTimeMs(int64_t alr_end_time_ms) { - alr_end_time_ms_.emplace(alr_end_time_ms); + alr_end_time_.emplace(Timestamp::Millis(alr_end_time_ms)); } std::vector ProbeController::RequestProbe( - int64_t at_time_ms) { + Timestamp at_time) { // Called once we have returned to normal state after a large drop in // estimated bandwidth. The current response is to initiate a single probe // session (if not already probing) at the previous bitrate. // // If the probe session fails, the assumption is that this drop was a // real one from a competing flow or a network change. - bool in_alr = alr_start_time_ms_.has_value(); + bool in_alr = alr_start_time_.has_value(); bool alr_ended_recently = - (alr_end_time_ms_.has_value() && - at_time_ms - alr_end_time_ms_.value() < kAlrEndedTimeoutMs); + (alr_end_time_.has_value() && + at_time - alr_end_time_.value() < kAlrEndedTimeout); if (in_alr || alr_ended_recently || in_rapid_recovery_experiment_) { if (state_ == State::kProbingComplete) { - uint32_t suggested_probe_bps = - kProbeFractionAfterDrop * bitrate_before_last_large_drop_bps_; - uint32_t min_expected_probe_result_bps = - (1 - kProbeUncertainty) * suggested_probe_bps; - int64_t time_since_drop_ms = at_time_ms - time_of_last_large_drop_ms_; - int64_t time_since_probe_ms = at_time_ms - last_bwe_drop_probing_time_ms_; - if (min_expected_probe_result_bps > estimated_bitrate_bps_ && - time_since_drop_ms < kBitrateDropTimeoutMs && - time_since_probe_ms > kMinTimeBetweenAlrProbesMs) { + DataRate suggested_probe = + kProbeFractionAfterDrop * bitrate_before_last_large_drop_; + DataRate min_expected_probe_result = + (1 - kProbeUncertainty) * suggested_probe; + TimeDelta time_since_drop = at_time - time_of_last_large_drop_; + TimeDelta time_since_probe = at_time - last_bwe_drop_probing_time_; + if (min_expected_probe_result > estimated_bitrate_ && + time_since_drop < kBitrateDropTimeout && + time_since_probe > kMinTimeBetweenAlrProbes) { RTC_LOG(LS_INFO) << "Detected big bandwidth drop, start probing."; // Track how often we probe in response to bandwidth drop in ALR. RTC_HISTOGRAM_COUNTS_10000( "WebRTC.BWE.BweDropProbingIntervalInS", - (at_time_ms - last_bwe_drop_probing_time_ms_) / 1000); - last_bwe_drop_probing_time_ms_ = at_time_ms; - return InitiateProbing(at_time_ms, {suggested_probe_bps}, false); + (at_time - last_bwe_drop_probing_time_).seconds()); + last_bwe_drop_probing_time_ = at_time; + return InitiateProbing(at_time, {suggested_probe}, false); } } } return std::vector(); } -void ProbeController::SetMaxBitrate(int64_t max_bitrate_bps) { - max_bitrate_bps_ = max_bitrate_bps; +void ProbeController::SetMaxBitrate(DataRate max_bitrate) { + max_bitrate_ = max_bitrate; } -void ProbeController::Reset(int64_t at_time_ms) { +void ProbeController::SetNetworkStateEstimate( + webrtc::NetworkStateEstimate estimate) { + if (config_.network_state_estimate_fast_rampup_rate > 0 && + estimated_bitrate_ < estimate.link_capacity_upper && + (!network_estimate_ || + estimate.link_capacity_upper >= + config_.network_state_estimate_fast_rampup_rate * + network_estimate_->link_capacity_upper)) { + send_probe_on_next_process_interval_ = true; + } + if (config_.network_state_estimate_drop_down_rate > 0 && network_estimate_ && + !estimate.link_capacity_upper.IsZero() && + (estimated_bitrate_ > estimate.link_capacity_upper || + bwe_limited_due_to_packet_loss_) && + estimate.link_capacity_upper <= + config_.network_state_estimate_drop_down_rate * + network_estimate_->link_capacity_upper) { + send_probe_on_next_process_interval_ = true; + } + + network_estimate_ = estimate; +} + +void ProbeController::Reset(Timestamp at_time) { network_available_ = true; + bwe_limited_due_to_packet_loss_ = false; state_ = State::kInit; - min_bitrate_to_probe_further_bps_ = kExponentialProbingDisabled; - time_last_probing_initiated_ms_ = 0; - estimated_bitrate_bps_ = 0; - start_bitrate_bps_ = 0; - max_bitrate_bps_ = 0; - int64_t now_ms = at_time_ms; - last_bwe_drop_probing_time_ms_ = now_ms; - alr_end_time_ms_.reset(); + min_bitrate_to_probe_further_ = DataRate::PlusInfinity(); + time_last_probing_initiated_ = Timestamp::Zero(); + estimated_bitrate_ = DataRate::Zero(); + network_estimate_ = absl::nullopt; + start_bitrate_ = DataRate::Zero(); + max_bitrate_ = kDefaultMaxProbingBitrate; + Timestamp now = at_time; + last_bwe_drop_probing_time_ = now; + alr_end_time_.reset(); mid_call_probing_waiting_for_result_ = false; - time_of_last_large_drop_ms_ = now_ms; - bitrate_before_last_large_drop_bps_ = 0; - max_total_allocated_bitrate_ = 0; + time_of_last_large_drop_ = now; + bitrate_before_last_large_drop_ = DataRate::Zero(); + max_total_allocated_bitrate_ = DataRate::Zero(); + send_probe_on_next_process_interval_ = false; } -std::vector ProbeController::Process(int64_t at_time_ms) { - if (at_time_ms - time_last_probing_initiated_ms_ > - kMaxWaitingTimeForProbingResultMs) { +bool ProbeController::TimeForAlrProbe(Timestamp at_time) const { + if (enable_periodic_alr_probing_ && alr_start_time_) { + Timestamp next_probe_time = + std::max(*alr_start_time_, time_last_probing_initiated_) + + config_.alr_probing_interval; + return at_time >= next_probe_time; + } + return false; +} + +bool ProbeController::TimeForNetworkStateProbe(Timestamp at_time) const { + if (config_.network_state_estimate_probing_interval->IsFinite() && + network_estimate_ && network_estimate_->link_capacity_upper.IsFinite() && + estimated_bitrate_ < network_estimate_->link_capacity_upper) { + Timestamp next_probe_time = time_last_probing_initiated_ + + config_.network_state_estimate_probing_interval; + return at_time >= next_probe_time; + } + return false; +} + +std::vector ProbeController::Process(Timestamp at_time) { + if (at_time - time_last_probing_initiated_ > + kMaxWaitingTimeForProbingResult) { mid_call_probing_waiting_for_result_ = false; if (state_ == State::kWaitingForProbingResult) { RTC_LOG(LS_INFO) << "kWaitingForProbingResult: timeout"; state_ = State::kProbingComplete; - min_bitrate_to_probe_further_bps_ = kExponentialProbingDisabled; + min_bitrate_to_probe_further_ = DataRate::PlusInfinity(); } } - - if (enable_periodic_alr_probing_ && state_ == State::kProbingComplete) { - // Probe bandwidth periodically when in ALR state. - if (alr_start_time_ms_ && estimated_bitrate_bps_ > 0) { - int64_t next_probe_time_ms = - std::max(*alr_start_time_ms_, time_last_probing_initiated_ms_) + - config_.alr_probing_interval->ms(); - if (at_time_ms >= next_probe_time_ms) { - return InitiateProbing(at_time_ms, - {static_cast(estimated_bitrate_bps_ * - config_.alr_probe_scale)}, - true); - } - } + if (estimated_bitrate_.IsZero() || state_ != State::kProbingComplete) { + return {}; + } + if (send_probe_on_next_process_interval_ || TimeForAlrProbe(at_time) || + TimeForNetworkStateProbe(at_time)) { + return InitiateProbing( + at_time, {estimated_bitrate_ * config_.alr_probe_scale}, true); } return std::vector(); } std::vector ProbeController::InitiateProbing( - int64_t now_ms, - std::vector bitrates_to_probe, + Timestamp now, + std::vector bitrates_to_probe, bool probe_further) { - int64_t max_probe_bitrate_bps = - max_bitrate_bps_ > 0 ? max_bitrate_bps_ : kDefaultMaxProbingBitrateBps; - if (limit_probes_with_allocateable_rate_ && - max_total_allocated_bitrate_ > 0) { + if (config_.skip_if_estimate_larger_than_fraction_of_max > 0) { + DataRate network_estimate = network_estimate_ + ? network_estimate_->link_capacity_upper + : DataRate::PlusInfinity(); + if (std::min(network_estimate, estimated_bitrate_) > + config_.skip_if_estimate_larger_than_fraction_of_max * max_bitrate_) { + return {}; + } + } + + DataRate max_probe_bitrate = max_bitrate_; + if (bwe_limited_due_to_packet_loss_ && + config_.limit_probe_target_rate_to_loss_bwe) { + max_probe_bitrate = std::min(estimated_bitrate_, max_bitrate_); + } + if (config_.network_state_estimate_probing_interval->IsFinite() && + network_estimate_ && network_estimate_->link_capacity_upper.IsFinite()) { + if (network_estimate_->link_capacity_upper.IsZero()) { + RTC_LOG(LS_INFO) << "Not sending probe, Network state estimate is zero"; + return {}; + } + max_probe_bitrate = + std::min(max_probe_bitrate, network_estimate_->link_capacity_upper * + config_.network_state_probe_scale); + } + if (max_total_allocated_bitrate_ > DataRate::Zero()) { // If a max allocated bitrate has been configured, allow probing up to 2x // that rate. This allows some overhead to account for bursty streams, // which otherwise would have to ramp up when the overshoot is already in // progress. // It also avoids minor quality reduction caused by probes often being // received at slightly less than the target probe bitrate. - max_probe_bitrate_bps = - std::min(max_probe_bitrate_bps, max_total_allocated_bitrate_ * 2); + max_probe_bitrate = + std::min(max_probe_bitrate, max_total_allocated_bitrate_ * 2); } + send_probe_on_next_process_interval_ = false; + std::vector pending_probes; - for (int64_t bitrate : bitrates_to_probe) { - RTC_DCHECK_GT(bitrate, 0); + for (DataRate bitrate : bitrates_to_probe) { + RTC_DCHECK(!bitrate.IsZero()); - if (bitrate > max_probe_bitrate_bps) { - bitrate = max_probe_bitrate_bps; + if (bitrate > max_probe_bitrate) { + bitrate = max_probe_bitrate; probe_further = false; } ProbeClusterConfig config; - config.at_time = Timestamp::Millis(now_ms); - config.target_data_rate = - DataRate::BitsPerSec(rtc::dchecked_cast(bitrate)); - config.target_duration = TimeDelta::Millis(kMinProbeDurationMs); - config.target_probe_count = kMinProbePacketsSent; + config.at_time = now; + config.target_data_rate = bitrate; + if (network_estimate_ && + config_.network_state_estimate_probing_interval->IsFinite()) { + config.target_duration = config_.network_state_probe_duration; + } else { + config.target_duration = config_.min_probe_duration; + } + + config.target_probe_count = config_.min_probe_packets_sent; config.id = next_probe_cluster_id_; next_probe_cluster_id_++; MaybeLogProbeClusterCreated(event_log_, config); pending_probes.push_back(config); } - time_last_probing_initiated_ms_ = now_ms; + time_last_probing_initiated_ = now; if (probe_further) { state_ = State::kWaitingForProbingResult; - min_bitrate_to_probe_further_bps_ = + min_bitrate_to_probe_further_ = (*(bitrates_to_probe.end() - 1)) * config_.further_probe_threshold; } else { state_ = State::kProbingComplete; - min_bitrate_to_probe_further_bps_ = kExponentialProbingDisabled; + min_bitrate_to_probe_further_ = DataRate::PlusInfinity(); } return pending_probes; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/probe_controller.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/probe_controller.h index 7f24ff98c8..e1ee08fc99 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/probe_controller.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/probe_controller.h @@ -18,23 +18,24 @@ #include "absl/base/attributes.h" #include "absl/types/optional.h" +#include "api/field_trials_view.h" #include "api/rtc_event_log/rtc_event_log.h" #include "api/transport/network_control.h" -#include "api/transport/webrtc_key_value_config.h" +#include "api/transport/network_types.h" #include "api/units/data_rate.h" -#include "rtc_base/constructor_magic.h" +#include "api/units/timestamp.h" #include "rtc_base/experiments/field_trial_parser.h" namespace webrtc { struct ProbeControllerConfig { - explicit ProbeControllerConfig(const WebRtcKeyValueConfig* key_value_config); + explicit ProbeControllerConfig(const FieldTrialsView* key_value_config); ProbeControllerConfig(const ProbeControllerConfig&); ProbeControllerConfig& operator=(const ProbeControllerConfig&) = default; ~ProbeControllerConfig(); // These parameters configure the initial probes. First we send one or two - // probes of sizes p1 * start_bitrate_bps_ and p2 * start_bitrate_bps_. + // probes of sizes p1 * start_bitrate_ and p2 * start_bitrate_. // Then whenever we get a bitrate estimate of at least further_probe_threshold // times the size of the last sent probe we'll send another one of size // step_size times the new estimate. @@ -47,11 +48,35 @@ struct ProbeControllerConfig { FieldTrialParameter alr_probing_interval; FieldTrialParameter alr_probe_scale; + // Configures how often we send probes if NetworkStateEstimate is available. + FieldTrialParameter network_state_estimate_probing_interval; + // If the network state estimate increase more than this rate, a probe is sent + // the next process interval. + FieldTrialParameter network_state_estimate_fast_rampup_rate; + // If the network state estimate decreases more than this rate, a probe is + // sent the next process interval. + FieldTrialParameter network_state_estimate_drop_down_rate; + FieldTrialParameter network_state_probe_scale; + // Overrides min_probe_duration if network_state_estimate_probing_interval + // is set and a network state estimate is known. + FieldTrialParameter network_state_probe_duration; + // Configures the probes emitted by changed to the allocated bitrate. FieldTrialOptional first_allocation_probe_scale; FieldTrialOptional second_allocation_probe_scale; FieldTrialFlag allocation_allow_further_probing; FieldTrialParameter allocation_probe_max; + + // The minimum number probing packets used. + FieldTrialParameter min_probe_packets_sent; + // The minimum probing duration. + FieldTrialParameter min_probe_duration; + // Max limit the target rate of a probe to current estimate if BWE is loss + // limited. + FieldTrialParameter limit_probe_target_rate_to_loss_bwe; + // Dont send a probe if min(estimate, network state estimate) is larger than + // this fraction of the set max bitrate. + FieldTrialParameter skip_if_estimate_larger_than_fraction_of_max; }; // This class controls initiation of probing to estimate initial channel @@ -59,28 +84,32 @@ struct ProbeControllerConfig { // bitrate is adjusted by an application. class ProbeController { public: - explicit ProbeController(const WebRtcKeyValueConfig* key_value_config, + explicit ProbeController(const FieldTrialsView* key_value_config, RtcEventLog* event_log); ~ProbeController(); + ProbeController(const ProbeController&) = delete; + ProbeController& operator=(const ProbeController&) = delete; + ABSL_MUST_USE_RESULT std::vector SetBitrates( - int64_t min_bitrate_bps, - int64_t start_bitrate_bps, - int64_t max_bitrate_bps, - int64_t at_time_ms); + DataRate min_bitrate, + DataRate start_bitrate, + DataRate max_bitrate, + Timestamp at_time); // The total bitrate, as opposed to the max bitrate, is the sum of the // configured bitrates for all active streams. ABSL_MUST_USE_RESULT std::vector - OnMaxTotalAllocatedBitrate(int64_t max_total_allocated_bitrate, - int64_t at_time_ms); + OnMaxTotalAllocatedBitrate(DataRate max_total_allocated_bitrate, + Timestamp at_time); ABSL_MUST_USE_RESULT std::vector OnNetworkAvailability( NetworkAvailability msg); ABSL_MUST_USE_RESULT std::vector SetEstimatedBitrate( - int64_t bitrate_bps, - int64_t at_time_ms); + DataRate bitrate, + bool bwe_limited_due_to_packet_loss, + Timestamp at_time); void EnablePeriodicAlrProbing(bool enable); @@ -88,17 +117,18 @@ class ProbeController { void SetAlrEndedTimeMs(int64_t alr_end_time); ABSL_MUST_USE_RESULT std::vector RequestProbe( - int64_t at_time_ms); + Timestamp at_time); // Sets a new maximum probing bitrate, without generating a new probe cluster. - void SetMaxBitrate(int64_t max_bitrate_bps); + void SetMaxBitrate(DataRate max_bitrate); + void SetNetworkStateEstimate(webrtc::NetworkStateEstimate estimate); // Resets the ProbeController to a state equivalent to as if it was just // created EXCEPT for `enable_periodic_alr_probing_`. - void Reset(int64_t at_time_ms); + void Reset(Timestamp at_time); ABSL_MUST_USE_RESULT std::vector Process( - int64_t at_time_ms); + Timestamp at_time); private: enum class State { @@ -111,40 +141,42 @@ class ProbeController { }; ABSL_MUST_USE_RESULT std::vector - InitiateExponentialProbing(int64_t at_time_ms); + InitiateExponentialProbing(Timestamp at_time); ABSL_MUST_USE_RESULT std::vector InitiateProbing( - int64_t now_ms, - std::vector bitrates_to_probe, + Timestamp now, + std::vector bitrates_to_probe, bool probe_further); + bool TimeForAlrProbe(Timestamp at_time) const; + bool TimeForNetworkStateProbe(Timestamp at_time) const; bool network_available_; + bool bwe_limited_due_to_packet_loss_; State state_; - int64_t min_bitrate_to_probe_further_bps_; - int64_t time_last_probing_initiated_ms_; - int64_t estimated_bitrate_bps_; - int64_t start_bitrate_bps_; - int64_t max_bitrate_bps_; - int64_t last_bwe_drop_probing_time_ms_; - absl::optional alr_start_time_ms_; - absl::optional alr_end_time_ms_; + DataRate min_bitrate_to_probe_further_ = DataRate::PlusInfinity(); + Timestamp time_last_probing_initiated_ = Timestamp::MinusInfinity(); + DataRate estimated_bitrate_ = DataRate::Zero(); + bool send_probe_on_next_process_interval_; + absl::optional network_estimate_; + DataRate start_bitrate_ = DataRate::Zero(); + DataRate max_bitrate_ = DataRate::PlusInfinity(); + Timestamp last_bwe_drop_probing_time_ = Timestamp::Zero(); + absl::optional alr_start_time_; + absl::optional alr_end_time_; bool enable_periodic_alr_probing_; - int64_t time_of_last_large_drop_ms_; - int64_t bitrate_before_last_large_drop_bps_; - int64_t max_total_allocated_bitrate_; + Timestamp time_of_last_large_drop_ = Timestamp::MinusInfinity(); + DataRate bitrate_before_last_large_drop_ = DataRate::Zero(); + DataRate max_total_allocated_bitrate_ = DataRate::Zero(); const bool in_rapid_recovery_experiment_; - const bool limit_probes_with_allocateable_rate_; // For WebRTC.BWE.MidCallProbing.* metric. bool mid_call_probing_waiting_for_result_; - int64_t mid_call_probing_bitrate_bps_; - int64_t mid_call_probing_succcess_threshold_; + DataRate mid_call_probing_bitrate_ = DataRate::Zero(); + DataRate mid_call_probing_succcess_threshold_ = DataRate::Zero(); RtcEventLog* event_log_; int32_t next_probe_cluster_id_ = 1; ProbeControllerConfig config_; - - RTC_DISALLOW_COPY_AND_ASSIGN(ProbeController); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/robust_throughput_estimator.cc b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/robust_throughput_estimator.cc index 1169e9f6bb..792a93d41e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/robust_throughput_estimator.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/robust_throughput_estimator.cc @@ -15,24 +15,55 @@ #include #include +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "rtc_base/checks.h" namespace webrtc { RobustThroughputEstimator::RobustThroughputEstimator( const RobustThroughputEstimatorSettings& settings) - : settings_(settings) { + : settings_(settings), + latest_discarded_send_time_(Timestamp::MinusInfinity()) { RTC_DCHECK(settings.enabled); } RobustThroughputEstimator::~RobustThroughputEstimator() {} +bool RobustThroughputEstimator::FirstPacketOutsideWindow() { + if (window_.empty()) + return false; + if (window_.size() > settings_.max_window_packets) + return true; + TimeDelta current_window_duration = + window_.back().receive_time - window_.front().receive_time; + if (current_window_duration > settings_.max_window_duration) + return true; + if (window_.size() > settings_.window_packets && + current_window_duration > settings_.min_window_duration) { + return true; + } + return false; +} + void RobustThroughputEstimator::IncomingPacketFeedbackVector( const std::vector& packet_feedback_vector) { RTC_DCHECK(std::is_sorted(packet_feedback_vector.begin(), packet_feedback_vector.end(), PacketResult::ReceiveTimeOrder())); for (const auto& packet : packet_feedback_vector) { + // Ignore packets without valid send or receive times. + // (This should not happen in production since lost packets are filtered + // out before passing the feedback vector to the throughput estimator. + // However, explicitly handling this case makes the estimator more robust + // and avoids a hard-to-detect bad state.) + if (packet.receive_time.IsInfinite() || + packet.sent_packet.send_time.IsInfinite()) { + continue; + } + // Insert the new packet. window_.push_back(packet); window_.back().sent_packet.prior_unacked_data = @@ -45,24 +76,24 @@ void RobustThroughputEstimator::IncomingPacketFeedbackVector( i > 0 && window_[i].receive_time < window_[i - 1].receive_time; i--) { std::swap(window_[i], window_[i - 1]); } - // Remove old packets. - while (window_.size() > settings_.kMaxPackets || - (window_.size() > settings_.min_packets && - packet.receive_time - window_.front().receive_time > - settings_.window_duration)) { - window_.pop_front(); - } + } + + // Remove old packets. + while (FirstPacketOutsideWindow()) { + latest_discarded_send_time_ = std::max( + latest_discarded_send_time_, window_.front().sent_packet.send_time); + window_.pop_front(); } } absl::optional RobustThroughputEstimator::bitrate() const { - if (window_.size() < settings_.initial_packets) + if (window_.empty() || window_.size() < settings_.required_packets) return absl::nullopt; - TimeDelta largest_recv_gap(TimeDelta::Millis(0)); - TimeDelta second_largest_recv_gap(TimeDelta::Millis(0)); + TimeDelta largest_recv_gap(TimeDelta::Zero()); + TimeDelta second_largest_recv_gap(TimeDelta::Zero()); for (size_t i = 1; i < window_.size(); i++) { - // Find receive time gaps + // Find receive time gaps. TimeDelta gap = window_[i].receive_time - window_[i - 1].receive_time; if (gap > largest_recv_gap) { second_largest_recv_gap = largest_recv_gap; @@ -72,63 +103,86 @@ absl::optional RobustThroughputEstimator::bitrate() const { } } - Timestamp min_send_time = window_[0].sent_packet.send_time; - Timestamp max_send_time = window_[0].sent_packet.send_time; - Timestamp min_recv_time = window_[0].receive_time; - Timestamp max_recv_time = window_[0].receive_time; - DataSize data_size = DataSize::Bytes(0); + Timestamp first_send_time = Timestamp::PlusInfinity(); + Timestamp last_send_time = Timestamp::MinusInfinity(); + Timestamp first_recv_time = Timestamp::PlusInfinity(); + Timestamp last_recv_time = Timestamp::MinusInfinity(); + DataSize recv_size = DataSize::Bytes(0); + DataSize send_size = DataSize::Bytes(0); + DataSize first_recv_size = DataSize::Bytes(0); + DataSize last_send_size = DataSize::Bytes(0); + size_t num_sent_packets_in_window = 0; for (const auto& packet : window_) { - min_send_time = std::min(min_send_time, packet.sent_packet.send_time); - max_send_time = std::max(max_send_time, packet.sent_packet.send_time); - min_recv_time = std::min(min_recv_time, packet.receive_time); - max_recv_time = std::max(max_recv_time, packet.receive_time); - data_size += packet.sent_packet.size; - data_size += packet.sent_packet.prior_unacked_data; + if (packet.receive_time < first_recv_time) { + first_recv_time = packet.receive_time; + first_recv_size = + packet.sent_packet.size + packet.sent_packet.prior_unacked_data; + } + last_recv_time = std::max(last_recv_time, packet.receive_time); + recv_size += packet.sent_packet.size; + recv_size += packet.sent_packet.prior_unacked_data; + + if (packet.sent_packet.send_time < latest_discarded_send_time_) { + // If we have dropped packets from the window that were sent after + // this packet, then this packet was reordered. Ignore it from + // the send rate computation (since the send time may be very far + // in the past, leading to underestimation of the send rate.) + // However, ignoring packets creates a risk that we end up without + // any packets left to compute a send rate. + continue; + } + if (packet.sent_packet.send_time > last_send_time) { + last_send_time = packet.sent_packet.send_time; + last_send_size = + packet.sent_packet.size + packet.sent_packet.prior_unacked_data; + } + first_send_time = std::min(first_send_time, packet.sent_packet.send_time); + + send_size += packet.sent_packet.size; + send_size += packet.sent_packet.prior_unacked_data; + ++num_sent_packets_in_window; } // Suppose a packet of size S is sent every T milliseconds. // A window of N packets would contain N*S bytes, but the time difference // between the first and the last packet would only be (N-1)*T. Thus, we - // need to remove one packet. - DataSize recv_size = data_size; - DataSize send_size = data_size; - if (settings_.assume_shared_link) { - // Depending on how the bottleneck queue is implemented, a large packet - // may delay sending of sebsequent packets, so the delay between packets - // i and i+1 depends on the size of both packets. In this case we minimize - // the maximum error by removing half of both the first and last packet - // size. - DataSize first_last_average_size = - (window_.front().sent_packet.size + - window_.front().sent_packet.prior_unacked_data + - window_.back().sent_packet.size + - window_.back().sent_packet.prior_unacked_data) / - 2; - recv_size -= first_last_average_size; - send_size -= first_last_average_size; - } else { - // In the simpler case where the delay between packets i and i+1 only - // depends on the size of packet i+1, the first packet doesn't give us - // any information. Analogously, we assume that the start send time - // for the last packet doesn't depend on the size of the packet. - recv_size -= (window_.front().sent_packet.size + - window_.front().sent_packet.prior_unacked_data); - send_size -= (window_.back().sent_packet.size + - window_.back().sent_packet.prior_unacked_data); - } + // need to remove the size of one packet to get the correct rate of S/T. + // Which packet to remove (if the packets have varying sizes), + // depends on the network model. + // Suppose that 2 packets with sizes s1 and s2, are received at times t1 + // and t2, respectively. If the packets were transmitted back to back over + // a bottleneck with rate capacity r, then we'd expect t2 = t1 + r * s2. + // Thus, r = (t2-t1) / s2, so the size of the first packet doesn't affect + // the difference between t1 and t2. + // Analoguously, if the first packet is sent at time t1 and the sender + // paces the packets at rate r, then the second packet can be sent at time + // t2 = t1 + r * s1. Thus, the send rate estimate r = (t2-t1) / s1 doesn't + // depend on the size of the last packet. + recv_size -= first_recv_size; + send_size -= last_send_size; - // Remove the largest gap by replacing it by the second largest gap - // or the average gap. - TimeDelta send_duration = max_send_time - min_send_time; - TimeDelta recv_duration = (max_recv_time - min_recv_time) - largest_recv_gap; - if (settings_.reduce_bias) { - recv_duration += second_largest_recv_gap; - } else { - recv_duration += recv_duration / (window_.size() - 2); + // Remove the largest gap by replacing it by the second largest gap. + // This is to ensure that spurious "delay spikes" (i.e. when the + // network stops transmitting packets for a short period, followed + // by a burst of delayed packets), don't cause the estimate to drop. + // This could cause an overestimation, which we guard against by + // never returning an estimate above the send rate. + RTC_DCHECK(first_recv_time.IsFinite()); + RTC_DCHECK(last_recv_time.IsFinite()); + TimeDelta recv_duration = (last_recv_time - first_recv_time) - + largest_recv_gap + second_largest_recv_gap; + recv_duration = std::max(recv_duration, TimeDelta::Millis(1)); + + if (num_sent_packets_in_window < settings_.required_packets) { + // Too few send times to calculate a reliable send rate. + return recv_size / recv_duration; } + RTC_DCHECK(first_send_time.IsFinite()); + RTC_DCHECK(last_send_time.IsFinite()); + TimeDelta send_duration = last_send_time - first_send_time; send_duration = std::max(send_duration, TimeDelta::Millis(1)); - recv_duration = std::max(recv_duration, TimeDelta::Millis(1)); + return std::min(send_size / send_duration, recv_size / recv_duration); } diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/robust_throughput_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/robust_throughput_estimator.h index de48a9b599..9d89856496 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/robust_throughput_estimator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/robust_throughput_estimator.h @@ -12,13 +12,12 @@ #define MODULES_CONGESTION_CONTROLLER_GOOG_CC_ROBUST_THROUGHPUT_ESTIMATOR_H_ #include -#include #include #include "absl/types/optional.h" #include "api/transport/network_types.h" -#include "api/transport/webrtc_key_value_config.h" #include "api/units/data_rate.h" +#include "api/units/timestamp.h" #include "modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.h" namespace webrtc { @@ -39,8 +38,11 @@ class RobustThroughputEstimator : public AcknowledgedBitrateEstimatorInterface { void SetAlrEndedTime(Timestamp /*alr_ended_time*/) override {} private: + bool FirstPacketOutsideWindow(); + const RobustThroughputEstimatorSettings settings_; std::deque window_; + Timestamp latest_discarded_send_time_ = Timestamp::MinusInfinity(); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc index 5bb145cf20..12a7d4191e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc @@ -17,9 +17,10 @@ #include #include "absl/strings/match.h" +#include "api/field_trials_view.h" +#include "api/network_state_predictor.h" #include "api/rtc_event_log/rtc_event.h" #include "api/rtc_event_log/rtc_event_log.h" -#include "api/transport/webrtc_key_value_config.h" #include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h" @@ -157,7 +158,7 @@ DataRate LinkCapacityTracker::estimate() const { return DataRate::BitsPerSec(capacity_estimate_bps_); } -RttBasedBackoff::RttBasedBackoff(const WebRtcKeyValueConfig* key_value_config) +RttBasedBackoff::RttBasedBackoff(const FieldTrialsView* key_value_config) : disabled_("Disabled"), configured_limit_("limit", TimeDelta::Seconds(3)), drop_fraction_("fraction", 0.8), @@ -196,15 +197,14 @@ TimeDelta RttBasedBackoff::CorrectedRtt(Timestamp at_time) const { RttBasedBackoff::~RttBasedBackoff() = default; SendSideBandwidthEstimation::SendSideBandwidthEstimation( - const WebRtcKeyValueConfig* key_value_config, + const FieldTrialsView* key_value_config, RtcEventLog* event_log) : rtt_backoff_(key_value_config), lost_packets_since_last_loss_update_(0), expected_packets_since_last_loss_update_(0), current_target_(DataRate::Zero()), last_logged_target_(DataRate::Zero()), - min_bitrate_configured_( - DataRate::BitsPerSec(congestion_controller::GetMinBitrateBps())), + min_bitrate_configured_(kCongestionControllerMinBitrate), max_bitrate_configured_(kDefaultMaxBitrate), last_low_bitrate_log_(Timestamp::MinusInfinity()), has_decreased_since_last_fraction_loss_(false), @@ -244,6 +244,9 @@ SendSideBandwidthEstimation::SendSideBandwidthEstimation( } ParseFieldTrial({&disable_receiver_limit_caps_only_}, key_value_config->Lookup("WebRTC-Bwe-ReceiverLimitCapsOnly")); + if (LossBasedBandwidthEstimatorV2Enabled()) { + loss_based_bandwidth_estimator_v2_.SetMinBitrate(min_bitrate_configured_); + } } SendSideBandwidthEstimation::~SendSideBandwidthEstimation() {} @@ -252,8 +255,7 @@ void SendSideBandwidthEstimation::OnRouteChange() { lost_packets_since_last_loss_update_ = 0; expected_packets_since_last_loss_update_ = 0; current_target_ = DataRate::Zero(); - min_bitrate_configured_ = - DataRate::BitsPerSec(congestion_controller::GetMinBitrateBps()); + min_bitrate_configured_ = kCongestionControllerMinBitrate; max_bitrate_configured_ = kDefaultMaxBitrate; last_low_bitrate_log_ = Timestamp::MinusInfinity(); has_decreased_since_last_fraction_loss_ = false; @@ -299,7 +301,7 @@ void SendSideBandwidthEstimation::SetSendBitrate(DataRate bitrate, void SendSideBandwidthEstimation::SetMinMaxBitrate(DataRate min_bitrate, DataRate max_bitrate) { min_bitrate_configured_ = - std::max(min_bitrate, congestion_controller::GetMinBitrate()); + std::max(min_bitrate, kCongestionControllerMinBitrate); if (max_bitrate > DataRate::Zero() && max_bitrate.IsFinite()) { max_bitrate_configured_ = std::max(min_bitrate_configured_, max_bitrate); } else { @@ -318,6 +320,10 @@ DataRate SendSideBandwidthEstimation::target_rate() const { return std::max(min_bitrate_configured_, target); } +DataRate SendSideBandwidthEstimation::delay_based_limit() const { + return delay_based_limit_; +} + DataRate SendSideBandwidthEstimation::GetEstimatedLinkCapacity() const { return link_capacity_.estimate(); } @@ -356,15 +362,17 @@ void SendSideBandwidthEstimation::SetAcknowledgedRate( } } -void SendSideBandwidthEstimation::IncomingPacketFeedbackVector( - const TransportPacketsFeedback& report) { +void SendSideBandwidthEstimation::UpdateLossBasedEstimator( + const TransportPacketsFeedback& report, + BandwidthUsage delay_detector_state) { if (LossBasedBandwidthEstimatorV1Enabled()) { loss_based_bandwidth_estimator_v1_.UpdateLossStatistics( report.packet_feedbacks, report.feedback_time); } if (LossBasedBandwidthEstimatorV2Enabled()) { loss_based_bandwidth_estimator_v2_.UpdateBandwidthEstimate( - report.packet_feedbacks, delay_based_limit_); + report.packet_feedbacks, delay_based_limit_, delay_detector_state); + UpdateEstimate(report.feedback_time); } } @@ -389,8 +397,10 @@ void SendSideBandwidthEstimation::UpdatePacketsLost(int64_t packets_lost, } has_decreased_since_last_fraction_loss_ = false; - int64_t lost_q8 = (lost_packets_since_last_loss_update_ + packets_lost) - << 8; + int64_t lost_q8 = + std::max(lost_packets_since_last_loss_update_ + packets_lost, + 0) + << 8; last_fraction_loss_ = std::min(lost_q8 / expected, 255); // Reset accumulators. @@ -399,6 +409,7 @@ void SendSideBandwidthEstimation::UpdatePacketsLost(int64_t packets_lost, last_loss_packet_report_ = at_time; UpdateEstimate(at_time); } + UpdateUmaStatsPacketsLost(at_time, packets_lost); } @@ -509,8 +520,8 @@ void SendSideBandwidthEstimation::UpdateEstimate(Timestamp at_time) { if (LossBasedBandwidthEstimatorV2ReadyForUse()) { DataRate new_bitrate = - loss_based_bandwidth_estimator_v2_.GetBandwidthEstimate(); - new_bitrate = std::min(new_bitrate, delay_based_limit_); + loss_based_bandwidth_estimator_v2_.GetBandwidthEstimate( + delay_based_limit_); UpdateTargetBitrate(new_bitrate, at_time); return; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h index f31f30f70d..4b60689302 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h @@ -20,8 +20,9 @@ #include #include "absl/types/optional.h" +#include "api/field_trials_view.h" +#include "api/network_state_predictor.h" #include "api/transport/network_types.h" -#include "api/transport/webrtc_key_value_config.h" #include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" @@ -56,7 +57,7 @@ class LinkCapacityTracker { class RttBasedBackoff { public: - explicit RttBasedBackoff(const WebRtcKeyValueConfig* key_value_config); + explicit RttBasedBackoff(const FieldTrialsView* key_value_config); ~RttBasedBackoff(); void UpdatePropagationRtt(Timestamp at_time, TimeDelta propagation_rtt); TimeDelta CorrectedRtt(Timestamp at_time) const; @@ -77,13 +78,14 @@ class RttBasedBackoff { class SendSideBandwidthEstimation { public: SendSideBandwidthEstimation() = delete; - SendSideBandwidthEstimation(const WebRtcKeyValueConfig* key_value_config, + SendSideBandwidthEstimation(const FieldTrialsView* key_value_config, RtcEventLog* event_log); ~SendSideBandwidthEstimation(); void OnRouteChange(); DataRate target_rate() const; + DataRate delay_based_limit() const; uint8_t fraction_loss() const { return last_fraction_loss_; } TimeDelta round_trip_time() const { return last_round_trip_time_; } @@ -116,7 +118,8 @@ class SendSideBandwidthEstimation { int GetMinBitrate() const; void SetAcknowledgedRate(absl::optional acknowledged_rate, Timestamp at_time); - void IncomingPacketFeedbackVector(const TransportPacketsFeedback& report); + void UpdateLossBasedEstimator(const TransportPacketsFeedback& report, + BandwidthUsage delay_detector_state); private: friend class GoogCcStatePrinter; diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/test/goog_cc_printer.cc b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/test/goog_cc_printer.cc index 6dadf8b9c4..6a8849ed6d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/test/goog_cc_printer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/test/goog_cc_printer.cc @@ -13,6 +13,7 @@ #include +#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "modules/congestion_controller/goog_cc/alr_detector.h" #include "modules/congestion_controller/goog_cc/delay_based_bwe.h" @@ -44,8 +45,8 @@ void WriteTypedValue(RtcEventLogOutput* out, absl::optional value) { template class TypedFieldLogger : public FieldLogger { public: - TypedFieldLogger(std::string name, F&& getter) - : name_(std::move(name)), getter_(std::forward(getter)) {} + TypedFieldLogger(absl::string_view name, F&& getter) + : name_(name), getter_(std::forward(getter)) {} const std::string& name() const override { return name_; } void WriteValue(RtcEventLogOutput* out) override { WriteTypedValue(out, getter_()); @@ -57,8 +58,8 @@ class TypedFieldLogger : public FieldLogger { }; template -FieldLogger* Log(std::string name, F&& getter) { - return new TypedFieldLogger(std::move(name), std::forward(getter)); +FieldLogger* Log(absl::string_view name, F&& getter) { + return new TypedFieldLogger(name, std::forward(getter)); } } // namespace diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/test/goog_cc_printer.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/test/goog_cc_printer.h index 3eee7814cf..16fa657e71 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/test/goog_cc_printer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/test/goog_cc_printer.h @@ -61,7 +61,7 @@ class GoogCcDebugFactory : public GoogCcNetworkControllerFactory { std::unique_ptr Create( NetworkControllerConfig config) override; - void PrintState(const Timestamp at_time); + void PrintState(Timestamp at_time); void AttachWriter(std::unique_ptr log_writer); diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/trendline_estimator.cc b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/trendline_estimator.cc index 7fdf66c518..88182d4f80 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/trendline_estimator.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/trendline_estimator.cc @@ -34,8 +34,7 @@ constexpr double kDefaultTrendlineThresholdGain = 4.0; const char kBweWindowSizeInPacketsExperiment[] = "WebRTC-BweWindowSizeInPackets"; -size_t ReadTrendlineFilterWindowSize( - const WebRtcKeyValueConfig* key_value_config) { +size_t ReadTrendlineFilterWindowSize(const FieldTrialsView* key_value_config) { std::string experiment_string = key_value_config->Lookup(kBweWindowSizeInPacketsExperiment); size_t window_size; @@ -115,7 +114,7 @@ constexpr int kDeltaCounterMax = 1000; constexpr char TrendlineEstimatorSettings::kKey[]; TrendlineEstimatorSettings::TrendlineEstimatorSettings( - const WebRtcKeyValueConfig* key_value_config) { + const FieldTrialsView* key_value_config) { if (absl::StartsWith( key_value_config->Lookup(kBweWindowSizeInPacketsExperiment), "Enabled")) { @@ -160,7 +159,7 @@ std::unique_ptr TrendlineEstimatorSettings::Parser() { } TrendlineEstimator::TrendlineEstimator( - const WebRtcKeyValueConfig* key_value_config, + const FieldTrialsView* key_value_config, NetworkStatePredictor* network_state_predictor) : settings_(key_value_config), smoothing_coef_(kDefaultTrendlineSmoothingCoeff), diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/trendline_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/trendline_estimator.h index 75b971d187..ffda25df74 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/trendline_estimator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/trendline_estimator.h @@ -17,10 +17,9 @@ #include #include +#include "api/field_trials_view.h" #include "api/network_state_predictor.h" -#include "api/transport/webrtc_key_value_config.h" #include "modules/congestion_controller/goog_cc/delay_increase_detector_interface.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/experiments/struct_parameters_parser.h" namespace webrtc { @@ -30,8 +29,7 @@ struct TrendlineEstimatorSettings { static constexpr unsigned kDefaultTrendlineWindowSize = 20; TrendlineEstimatorSettings() = delete; - explicit TrendlineEstimatorSettings( - const WebRtcKeyValueConfig* key_value_config); + explicit TrendlineEstimatorSettings(const FieldTrialsView* key_value_config); // Sort the packets in the window. Should be redundant, // but then almost no cost. @@ -52,11 +50,14 @@ struct TrendlineEstimatorSettings { class TrendlineEstimator : public DelayIncreaseDetectorInterface { public: - TrendlineEstimator(const WebRtcKeyValueConfig* key_value_config, + TrendlineEstimator(const FieldTrialsView* key_value_config, NetworkStatePredictor* network_state_predictor); ~TrendlineEstimator() override; + TrendlineEstimator(const TrendlineEstimator&) = delete; + TrendlineEstimator& operator=(const TrendlineEstimator&) = delete; + // Update the estimator with a new sample. The deltas should represent deltas // between timestamp groups as defined by the InterArrival class. void Update(double recv_delta_ms, @@ -118,8 +119,6 @@ class TrendlineEstimator : public DelayIncreaseDetectorInterface { BandwidthUsage hypothesis_; BandwidthUsage hypothesis_predicted_; NetworkStatePredictor* network_state_predictor_; - - RTC_DISALLOW_COPY_AND_ASSIGN(TrendlineEstimator); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/include/receive_side_congestion_controller.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/include/receive_side_congestion_controller.h index fdef7f95c8..96ee8a6e3d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/include/receive_side_congestion_controller.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/include/receive_side_congestion_controller.h @@ -17,23 +17,22 @@ #include "api/transport/field_trial_based_config.h" #include "api/transport/network_control.h" #include "api/units/data_rate.h" +#include "api/units/time_delta.h" #include "modules/congestion_controller/remb_throttler.h" -#include "modules/include/module.h" #include "modules/pacing/packet_router.h" #include "modules/remote_bitrate_estimator/remote_estimator_proxy.h" #include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { class RemoteBitrateEstimator; -class RemoteBitrateObserver; // This class represents the congestion control state for receive // streams. For send side bandwidth estimation, this is simply // relaying for each received RTP packet back to the sender. While for // receive side bandwidth estimation, we do the estimation locally and // send our results back to the sender. -class ReceiveSideCongestionController : public CallStatsObserver, - public Module { +class ReceiveSideCongestionController : public CallStatsObserver { public: ReceiveSideCongestionController( Clock* clock, @@ -48,10 +47,6 @@ class ReceiveSideCongestionController : public CallStatsObserver, const RTPHeader& header); void SetSendPeriodicFeedback(bool send_periodic_feedback); - // TODO(nisse): Delete these methods, design a more specific interface. - virtual RemoteBitrateEstimator* GetRemoteBitrateEstimator(bool send_side_bwe); - virtual const RemoteBitrateEstimator* GetRemoteBitrateEstimator( - bool send_side_bwe) const; // Implements CallStatsObserver. void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) override; @@ -63,56 +58,34 @@ class ReceiveSideCongestionController : public CallStatsObserver, // `bitrate` using RTCP REMB. void SetMaxDesiredReceiveBitrate(DataRate bitrate); - // Implements Module. - int64_t TimeUntilNextProcess() override; - void Process() override; + void SetTransportOverhead(DataSize overhead_per_packet); - private: - class WrappingBitrateEstimator : public RemoteBitrateEstimator { - public: - WrappingBitrateEstimator(RemoteBitrateObserver* observer, Clock* clock); - - WrappingBitrateEstimator() = delete; - WrappingBitrateEstimator(const WrappingBitrateEstimator&) = delete; - WrappingBitrateEstimator& operator=(const WrappingBitrateEstimator&) = - delete; - - ~WrappingBitrateEstimator() override; - - void IncomingPacket(int64_t arrival_time_ms, - size_t payload_size, - const RTPHeader& header) override; - - void Process() override; + // Returns latest receive side bandwidth estimation. + // Returns zero if receive side bandwidth estimation is unavailable. + DataRate LatestReceiveSideEstimate() const; - int64_t TimeUntilNextProcess() override; + // Removes stream from receive side bandwidth estimation. + // Noop if receive side bwe is not used or stream doesn't participate in it. + void RemoveStream(uint32_t ssrc); - void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) override; + // Runs periodic tasks if it is time to run them, returns time until next + // call to `MaybeProcess` should be non idle. + TimeDelta MaybeProcess(); - void RemoveStream(unsigned int ssrc) override; - - bool LatestEstimate(std::vector* ssrcs, - unsigned int* bitrate_bps) const override; - - void SetMinBitrate(int min_bitrate_bps) override; - - private: - void PickEstimatorFromHeader(const RTPHeader& header) - RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - void PickEstimator() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - RemoteBitrateObserver* observer_; - Clock* const clock_; - mutable Mutex mutex_; - std::unique_ptr rbe_; - bool using_absolute_send_time_; - uint32_t packets_since_absolute_send_time_; - int min_bitrate_bps_; - }; + private: + void PickEstimatorFromHeader(const RTPHeader& header) + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + void PickEstimator() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + Clock& clock_; const FieldTrialBasedConfig field_trial_config_; RembThrottler remb_throttler_; - WrappingBitrateEstimator remote_bitrate_estimator_; RemoteEstimatorProxy remote_estimator_proxy_; + + mutable Mutex mutex_; + std::unique_ptr rbe_ RTC_GUARDED_BY(mutex_); + bool using_absolute_send_time_ RTC_GUARDED_BY(mutex_); + uint32_t packets_since_absolute_send_time_ RTC_GUARDED_BY(mutex_); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/receive_side_congestion_controller.cc b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/receive_side_congestion_controller.cc index 61a126fbe3..4f238835e4 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/receive_side_congestion_controller.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/receive_side_congestion_controller.cc @@ -23,67 +23,24 @@ namespace { static const uint32_t kTimeOffsetSwitchThreshold = 30; } // namespace -ReceiveSideCongestionController::WrappingBitrateEstimator:: - WrappingBitrateEstimator(RemoteBitrateObserver* observer, Clock* clock) - : observer_(observer), - clock_(clock), - rbe_(new RemoteBitrateEstimatorSingleStream(observer_, clock_)), - using_absolute_send_time_(false), - packets_since_absolute_send_time_(0), - min_bitrate_bps_(congestion_controller::GetMinBitrateBps()) {} - -ReceiveSideCongestionController::WrappingBitrateEstimator:: - ~WrappingBitrateEstimator() = default; - -void ReceiveSideCongestionController::WrappingBitrateEstimator::IncomingPacket( - int64_t arrival_time_ms, - size_t payload_size, - const RTPHeader& header) { - MutexLock lock(&mutex_); - PickEstimatorFromHeader(header); - rbe_->IncomingPacket(arrival_time_ms, payload_size, header); -} - -void ReceiveSideCongestionController::WrappingBitrateEstimator::Process() { - MutexLock lock(&mutex_); - rbe_->Process(); -} - -int64_t ReceiveSideCongestionController::WrappingBitrateEstimator:: - TimeUntilNextProcess() { - MutexLock lock(&mutex_); - return rbe_->TimeUntilNextProcess(); -} - -void ReceiveSideCongestionController::WrappingBitrateEstimator::OnRttUpdate( - int64_t avg_rtt_ms, - int64_t max_rtt_ms) { +void ReceiveSideCongestionController::OnRttUpdate(int64_t avg_rtt_ms, + int64_t max_rtt_ms) { MutexLock lock(&mutex_); rbe_->OnRttUpdate(avg_rtt_ms, max_rtt_ms); } -void ReceiveSideCongestionController::WrappingBitrateEstimator::RemoveStream( - unsigned int ssrc) { +void ReceiveSideCongestionController::RemoveStream(uint32_t ssrc) { MutexLock lock(&mutex_); rbe_->RemoveStream(ssrc); } -bool ReceiveSideCongestionController::WrappingBitrateEstimator::LatestEstimate( - std::vector* ssrcs, - unsigned int* bitrate_bps) const { - MutexLock lock(&mutex_); - return rbe_->LatestEstimate(ssrcs, bitrate_bps); -} - -void ReceiveSideCongestionController::WrappingBitrateEstimator::SetMinBitrate( - int min_bitrate_bps) { +DataRate ReceiveSideCongestionController::LatestReceiveSideEstimate() const { MutexLock lock(&mutex_); - rbe_->SetMinBitrate(min_bitrate_bps); - min_bitrate_bps_ = min_bitrate_bps; + return rbe_->LatestEstimate(); } -void ReceiveSideCongestionController::WrappingBitrateEstimator:: - PickEstimatorFromHeader(const RTPHeader& header) { +void ReceiveSideCongestionController::PickEstimatorFromHeader( + const RTPHeader& header) { if (header.extension.hasAbsoluteSendTime) { // If we see AST in header, switch RBE strategy immediately. if (!using_absolute_send_time_) { @@ -109,14 +66,14 @@ void ReceiveSideCongestionController::WrappingBitrateEstimator:: } // Instantiate RBE for Time Offset or Absolute Send Time extensions. -void ReceiveSideCongestionController::WrappingBitrateEstimator:: - PickEstimator() { +void ReceiveSideCongestionController::PickEstimator() { if (using_absolute_send_time_) { - rbe_.reset(new RemoteBitrateEstimatorAbsSendTime(observer_, clock_)); + rbe_ = std::make_unique(&remb_throttler_, + &clock_); } else { - rbe_.reset(new RemoteBitrateEstimatorSingleStream(observer_, clock_)); + rbe_ = std::make_unique( + &remb_throttler_, &clock_); } - rbe_->SetMinBitrate(min_bitrate_bps_); } ReceiveSideCongestionController::ReceiveSideCongestionController( @@ -124,12 +81,14 @@ ReceiveSideCongestionController::ReceiveSideCongestionController( RemoteEstimatorProxy::TransportFeedbackSender feedback_sender, RembThrottler::RembSender remb_sender, NetworkStateEstimator* network_state_estimator) - : remb_throttler_(std::move(remb_sender), clock), - remote_bitrate_estimator_(&remb_throttler_, clock), - remote_estimator_proxy_(clock, - std::move(feedback_sender), + : clock_(*clock), + remb_throttler_(std::move(remb_sender), clock), + remote_estimator_proxy_(std::move(feedback_sender), &field_trial_config_, - network_state_estimator) {} + network_state_estimator), + rbe_(new RemoteBitrateEstimatorSingleStream(&remb_throttler_, clock)), + using_absolute_send_time_(false), + packets_since_absolute_send_time_(0) {} void ReceiveSideCongestionController::OnReceivedPacket( int64_t arrival_time_ms, @@ -138,8 +97,9 @@ void ReceiveSideCongestionController::OnReceivedPacket( remote_estimator_proxy_.IncomingPacket(arrival_time_ms, payload_size, header); if (!header.extension.hasTransportSequenceNumber) { // Receive-side BWE. - remote_bitrate_estimator_.IncomingPacket(arrival_time_ms, payload_size, - header); + MutexLock lock(&mutex_); + PickEstimatorFromHeader(header); + rbe_->IncomingPacket(arrival_time_ms, payload_size, header); } } @@ -148,40 +108,18 @@ void ReceiveSideCongestionController::SetSendPeriodicFeedback( remote_estimator_proxy_.SetSendPeriodicFeedback(send_periodic_feedback); } -RemoteBitrateEstimator* -ReceiveSideCongestionController::GetRemoteBitrateEstimator(bool send_side_bwe) { - if (send_side_bwe) { - return &remote_estimator_proxy_; - } else { - return &remote_bitrate_estimator_; - } -} - -const RemoteBitrateEstimator* -ReceiveSideCongestionController::GetRemoteBitrateEstimator( - bool send_side_bwe) const { - if (send_side_bwe) { - return &remote_estimator_proxy_; - } else { - return &remote_bitrate_estimator_; - } -} - -void ReceiveSideCongestionController::OnRttUpdate(int64_t avg_rtt_ms, - int64_t max_rtt_ms) { - remote_bitrate_estimator_.OnRttUpdate(avg_rtt_ms, max_rtt_ms); -} - void ReceiveSideCongestionController::OnBitrateChanged(int bitrate_bps) { remote_estimator_proxy_.OnBitrateChanged(bitrate_bps); } -int64_t ReceiveSideCongestionController::TimeUntilNextProcess() { - return remote_bitrate_estimator_.TimeUntilNextProcess(); -} - -void ReceiveSideCongestionController::Process() { - remote_bitrate_estimator_.Process(); +TimeDelta ReceiveSideCongestionController::MaybeProcess() { + Timestamp now = clock_.CurrentTime(); + mutex_.Lock(); + TimeDelta time_until_rbe = rbe_->Process(); + mutex_.Unlock(); + TimeDelta time_until_rep = remote_estimator_proxy_.Process(now); + TimeDelta time_until = std::min(time_until_rbe, time_until_rep); + return std::max(time_until, TimeDelta::Zero()); } void ReceiveSideCongestionController::SetMaxDesiredReceiveBitrate( @@ -189,4 +127,9 @@ void ReceiveSideCongestionController::SetMaxDesiredReceiveBitrate( remb_throttler_.SetMaxDesiredReceiveBitrate(bitrate); } +void ReceiveSideCongestionController::SetTransportOverhead( + DataSize overhead_per_packet) { + remote_estimator_proxy_.SetTransportOverhead(overhead_per_packet); +} + } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/remb_throttler.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/remb_throttler.h index 2f610c1df9..85292cbc09 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/remb_throttler.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/remb_throttler.h @@ -16,7 +16,7 @@ #include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" -#include "modules/remote_bitrate_estimator/remote_estimator_proxy.h" +#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h" #include "rtc_base/synchronization/mutex.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/control_handler.cc b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/control_handler.cc index ba77aacab7..ffa373aeba 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/control_handler.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/control_handler.cc @@ -14,6 +14,7 @@ #include #include "api/units/data_rate.h" +#include "modules/pacing/pacing_controller.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/numerics/safe_minmax.h" @@ -62,7 +63,8 @@ absl::optional CongestionControlHandler::GetUpdate() { if (!network_available_) { pause_encoding = true; } else if (!disable_pacer_emergency_stop_ && - pacer_expected_queue_ms_ > PacedSender::kMaxQueueLengthMs) { + pacer_expected_queue_ms_ > + PacingController::kMaxExpectedQueueLength.ms()) { pause_encoding = true; } if (pause_encoding) diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/control_handler.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/control_handler.h index 1da6463219..d8e7263a02 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/control_handler.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/control_handler.h @@ -18,8 +18,6 @@ #include "api/transport/network_types.h" #include "api/units/data_size.h" #include "api/units/time_delta.h" -#include "modules/pacing/paced_sender.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/system/no_unique_address.h" namespace webrtc { @@ -33,6 +31,9 @@ class CongestionControlHandler { CongestionControlHandler(); ~CongestionControlHandler(); + CongestionControlHandler(const CongestionControlHandler&) = delete; + CongestionControlHandler& operator=(const CongestionControlHandler&) = delete; + void SetTargetRate(TargetTransferRate new_target_rate); void SetNetworkAvailability(bool network_available); void SetPacerQueue(TimeDelta expected_queue_time); @@ -48,7 +49,6 @@ class CongestionControlHandler { int64_t pacer_expected_queue_ms_ = 0; RTC_NO_UNIQUE_ADDRESS SequenceChecker sequenced_checker_; - RTC_DISALLOW_COPY_AND_ASSIGN(CongestionControlHandler); }; } // namespace webrtc #endif // MODULES_CONGESTION_CONTROLLER_RTP_CONTROL_HANDLER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/transport_feedback_adapter.cc b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/transport_feedback_adapter.cc index 87691bf263..d4cc915fd1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/transport_feedback_adapter.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/transport_feedback_adapter.cc @@ -22,7 +22,6 @@ #include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "system_wrappers/include/field_trial.h" namespace webrtc { @@ -206,7 +205,7 @@ TransportFeedbackAdapter::ProcessTransportFeedbackInner( current_offset_ += delta; } } - last_timestamp_ = feedback.GetBaseTime(); + last_timestamp_ = feedback.BaseTime(); std::vector packet_result_vector; packet_result_vector.reserve(feedback.GetPacketStatusCount()); diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/transport_feedback_adapter.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/transport_feedback_adapter.h index deb7925d77..f9f939db9c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/transport_feedback_adapter.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/transport_feedback_adapter.h @@ -18,6 +18,7 @@ #include "api/sequence_checker.h" #include "api/transport/network_types.h" +#include "api/units/timestamp.h" #include "modules/include/module_common_types_public.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "rtc_base/network/sent_packet.h" @@ -91,7 +92,7 @@ class TransportFeedbackAdapter { InFlightBytesTracker in_flight_; Timestamp current_offset_ = Timestamp::MinusInfinity(); - TimeDelta last_timestamp_ = TimeDelta::MinusInfinity(); + Timestamp last_timestamp_ = Timestamp::MinusInfinity(); rtc::NetworkRoute network_route_; }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/transport_feedback_demuxer.cc b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/transport_feedback_demuxer.cc index 6ab3ad80fa..50987b2302 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/transport_feedback_demuxer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/transport_feedback_demuxer.cc @@ -15,10 +15,17 @@ namespace webrtc { namespace { static const size_t kMaxPacketsInHistory = 5000; } + +TransportFeedbackDemuxer::TransportFeedbackDemuxer() { + // In case the construction thread is different from where the registration + // and callbacks occur, detach from the construction thread. + observer_checker_.Detach(); +} + void TransportFeedbackDemuxer::RegisterStreamFeedbackObserver( std::vector ssrcs, StreamFeedbackObserver* observer) { - MutexLock lock(&observers_lock_); + RTC_DCHECK_RUN_ON(&observer_checker_); RTC_DCHECK(observer); RTC_DCHECK(absl::c_find_if(observers_, [=](const auto& pair) { return pair.second == observer; @@ -28,7 +35,7 @@ void TransportFeedbackDemuxer::RegisterStreamFeedbackObserver( void TransportFeedbackDemuxer::DeRegisterStreamFeedbackObserver( StreamFeedbackObserver* observer) { - MutexLock lock(&observers_lock_); + RTC_DCHECK_RUN_ON(&observer_checker_); RTC_DCHECK(observer); const auto it = absl::c_find_if( observers_, [=](const auto& pair) { return pair.second == observer; }); @@ -37,7 +44,7 @@ void TransportFeedbackDemuxer::DeRegisterStreamFeedbackObserver( } void TransportFeedbackDemuxer::AddPacket(const RtpPacketSendInfo& packet_info) { - MutexLock lock(&lock_); + RTC_DCHECK_RUN_ON(&observer_checker_); StreamFeedbackObserver::StreamPacketInfo info; info.ssrc = packet_info.media_ssrc; @@ -55,24 +62,22 @@ void TransportFeedbackDemuxer::AddPacket(const RtpPacketSendInfo& packet_info) { void TransportFeedbackDemuxer::OnTransportFeedback( const rtcp::TransportFeedback& feedback) { + RTC_DCHECK_RUN_ON(&observer_checker_); + std::vector stream_feedbacks; - { - MutexLock lock(&lock_); - for (const auto& packet : feedback.GetAllPackets()) { - int64_t seq_num = - seq_num_unwrapper_.UnwrapWithoutUpdate(packet.sequence_number()); - auto it = history_.find(seq_num); - if (it != history_.end()) { - auto packet_info = it->second; - packet_info.received = packet.received(); - stream_feedbacks.push_back(packet_info); - if (packet.received()) - history_.erase(it); - } + for (const auto& packet : feedback.GetAllPackets()) { + int64_t seq_num = + seq_num_unwrapper_.UnwrapWithoutUpdate(packet.sequence_number()); + auto it = history_.find(seq_num); + if (it != history_.end()) { + auto packet_info = it->second; + packet_info.received = packet.received(); + stream_feedbacks.push_back(std::move(packet_info)); + if (packet.received()) + history_.erase(it); } } - MutexLock lock(&observers_lock_); for (auto& observer : observers_) { std::vector selected_feedback; for (const auto& packet_info : stream_feedbacks) { diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/transport_feedback_demuxer.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/transport_feedback_demuxer.h index 634a37ea1a..7f4f5750d2 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/transport_feedback_demuxer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/transport_feedback_demuxer.h @@ -14,14 +14,26 @@ #include #include +#include "api/sequence_checker.h" #include "modules/include/module_common_types_public.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/system/no_unique_address.h" namespace webrtc { -class TransportFeedbackDemuxer : public StreamFeedbackProvider { +// Implementation of StreamFeedbackProvider that provides a way for +// implementations of StreamFeedbackObserver to register for feedback callbacks +// for a given set of SSRCs. +// Registration methods need to be called from the same execution context +// (thread or task queue) and callbacks to +// StreamFeedbackObserver::OnPacketFeedbackVector will be made in that same +// context. +// TODO(tommi): This appears to be the only implementation of this interface. +// Do we need the interface? +class TransportFeedbackDemuxer final : public StreamFeedbackProvider { public: + TransportFeedbackDemuxer(); + // Implements StreamFeedbackProvider interface void RegisterStreamFeedbackObserver( std::vector ssrcs, @@ -32,17 +44,16 @@ class TransportFeedbackDemuxer : public StreamFeedbackProvider { void OnTransportFeedback(const rtcp::TransportFeedback& feedback); private: - Mutex lock_; - SequenceNumberUnwrapper seq_num_unwrapper_ RTC_GUARDED_BY(&lock_); + RTC_NO_UNIQUE_ADDRESS SequenceChecker observer_checker_; + SequenceNumberUnwrapper seq_num_unwrapper_ RTC_GUARDED_BY(&observer_checker_); std::map history_ - RTC_GUARDED_BY(&lock_); + RTC_GUARDED_BY(&observer_checker_); // Maps a set of ssrcs to corresponding observer. Vectors are used rather than // set/map to ensure that the processing order is consistent independently of // the randomized ssrcs. - Mutex observers_lock_; std::vector, StreamFeedbackObserver*>> - observers_ RTC_GUARDED_BY(&observers_lock_); + observers_ RTC_GUARDED_BY(&observer_checker_); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/include/module.h b/TMessagesProj/jni/voip/webrtc/modules/include/module.h deleted file mode 100644 index 3046390e70..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/include/module.h +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_INCLUDE_MODULE_H_ -#define MODULES_INCLUDE_MODULE_H_ - -#include - -namespace webrtc { - -class ProcessThread; - -class Module { - public: - // Returns the number of milliseconds until the module wants a worker - // thread to call Process. - // This method is called on the same worker thread as Process will - // be called on. - // TODO(tommi): Almost all implementations of this function, need to know - // the current tick count. Consider passing it as an argument. It could - // also improve the accuracy of when the next callback occurs since the - // thread that calls Process() will also have it's tick count reference - // which might not match with what the implementations use. - virtual int64_t TimeUntilNextProcess() = 0; - - // Process any pending tasks such as timeouts. - // Called on a worker thread. - virtual void Process() = 0; - - // This method is called when the module is attached to a *running* process - // thread or detached from one. In the case of detaching, `process_thread` - // will be nullptr. - // - // This method will be called in the following cases: - // - // * Non-null process_thread: - // * ProcessThread::RegisterModule() is called while the thread is running. - // * ProcessThread::Start() is called and RegisterModule has previously - // been called. The thread will be started immediately after notifying - // all modules. - // - // * Null process_thread: - // * ProcessThread::DeRegisterModule() is called while the thread is - // running. - // * ProcessThread::Stop() was called and the thread has been stopped. - // - // NOTE: This method is not called from the worker thread itself, but from - // the thread that registers/deregisters the module or calls Start/Stop. - virtual void ProcessThreadAttached(ProcessThread* process_thread) {} - - protected: - virtual ~Module() {} -}; -} // namespace webrtc - -#endif // MODULES_INCLUDE_MODULE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/pacing/OWNERS b/TMessagesProj/jni/voip/webrtc/modules/pacing/OWNERS index 0a77688b1e..7266156891 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/pacing/OWNERS +++ b/TMessagesProj/jni/voip/webrtc/modules/pacing/OWNERS @@ -1,6 +1,5 @@ stefan@webrtc.org mflodman@webrtc.org -asapersson@webrtc.org philipel@webrtc.org srte@webrtc.org sprang@webrtc.org diff --git a/TMessagesProj/jni/voip/webrtc/modules/pacing/bitrate_prober.cc b/TMessagesProj/jni/voip/webrtc/modules/pacing/bitrate_prober.cc index ed4b7760c7..e01c3ae5a9 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/pacing/bitrate_prober.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/pacing/bitrate_prober.cc @@ -23,30 +23,17 @@ namespace webrtc { namespace { -// The min probe packet size is scaled with the bitrate we're probing at. -// This defines the max min probe packet size, meaning that on high bitrates -// we have a min probe packet size of 200 bytes. -constexpr DataSize kMinProbePacketSize = DataSize::Bytes(200); - constexpr TimeDelta kProbeClusterTimeout = TimeDelta::Seconds(5); } // namespace BitrateProberConfig::BitrateProberConfig( - const WebRtcKeyValueConfig* key_value_config) - : min_probe_packets_sent("min_probe_packets_sent", 5), - min_probe_delta("min_probe_delta", TimeDelta::Millis(1)), - min_probe_duration("min_probe_duration", TimeDelta::Millis(15)), + const FieldTrialsView* key_value_config) + : min_probe_delta("min_probe_delta", TimeDelta::Millis(2)), max_probe_delay("max_probe_delay", TimeDelta::Millis(10)), - abort_delayed_probes("abort_delayed_probes", true) { - ParseFieldTrial( - {&min_probe_packets_sent, &min_probe_delta, &min_probe_duration, - &max_probe_delay, &abort_delayed_probes}, - key_value_config->Lookup("WebRTC-Bwe-ProbingConfiguration")); - ParseFieldTrial( - {&min_probe_packets_sent, &min_probe_delta, &min_probe_duration, - &max_probe_delay, &abort_delayed_probes}, - key_value_config->Lookup("WebRTC-Bwe-ProbingBehavior")); + min_packet_size("min_packet_size", DataSize::Bytes(200)) { + ParseFieldTrial({&min_probe_delta, &max_probe_delay, &min_packet_size}, + key_value_config->Lookup("WebRTC-Bwe-ProbingBehavior")); } BitrateProber::~BitrateProber() { @@ -56,7 +43,7 @@ BitrateProber::~BitrateProber() { total_failed_probe_count_); } -BitrateProber::BitrateProber(const WebRtcKeyValueConfig& field_trials) +BitrateProber::BitrateProber(const FieldTrialsView& field_trials) : probing_state_(ProbingState::kDisabled), next_probe_time_(Timestamp::PlusInfinity()), total_probe_count_(0), @@ -80,41 +67,46 @@ void BitrateProber::SetEnabled(bool enable) { void BitrateProber::OnIncomingPacket(DataSize packet_size) { // Don't initialize probing unless we have something large enough to start // probing. + // Note that the pacer can send several packets at once when sending a probe, + // and thus, packets can be smaller than needed for a probe. if (probing_state_ == ProbingState::kInactive && !clusters_.empty() && - packet_size >= std::min(RecommendedMinProbeSize(), kMinProbePacketSize)) { + packet_size >= + std::min(RecommendedMinProbeSize(), config_.min_packet_size.Get())) { // Send next probe right away. next_probe_time_ = Timestamp::MinusInfinity(); probing_state_ = ProbingState::kActive; } } -void BitrateProber::CreateProbeCluster(DataRate bitrate, - Timestamp now, - int cluster_id) { +void BitrateProber::CreateProbeCluster( + const ProbeClusterConfig& cluster_config) { RTC_DCHECK(probing_state_ != ProbingState::kDisabled); - RTC_DCHECK_GT(bitrate, DataRate::Zero()); total_probe_count_++; while (!clusters_.empty() && - now - clusters_.front().created_at > kProbeClusterTimeout) { + cluster_config.at_time - clusters_.front().requested_at > + kProbeClusterTimeout) { clusters_.pop(); total_failed_probe_count_++; } ProbeCluster cluster; - cluster.created_at = now; - cluster.pace_info.probe_cluster_min_probes = config_.min_probe_packets_sent; + cluster.requested_at = cluster_config.at_time; + cluster.pace_info.probe_cluster_min_probes = + cluster_config.target_probe_count; cluster.pace_info.probe_cluster_min_bytes = - (bitrate * config_.min_probe_duration.Get()).bytes(); + (cluster_config.target_data_rate * cluster_config.target_duration) + .bytes(); RTC_DCHECK_GE(cluster.pace_info.probe_cluster_min_bytes, 0); - cluster.pace_info.send_bitrate_bps = bitrate.bps(); - cluster.pace_info.probe_cluster_id = cluster_id; + cluster.pace_info.send_bitrate_bps = cluster_config.target_data_rate.bps(); + cluster.pace_info.probe_cluster_id = cluster_config.id; clusters_.push(cluster); RTC_LOG(LS_INFO) << "Probe cluster (bitrate:min bytes:min packets): (" << cluster.pace_info.send_bitrate_bps << ":" << cluster.pace_info.probe_cluster_min_bytes << ":" << cluster.pace_info.probe_cluster_min_probes << ")"; + // If we are already probing, continue to do so. Otherwise set it to // kInactive and wait for OnIncomingPacket to start the probing. if (probing_state_ != ProbingState::kActive) @@ -127,16 +119,6 @@ Timestamp BitrateProber::NextProbeTime(Timestamp now) const { return Timestamp::PlusInfinity(); } - // Legacy behavior, just warn about late probe and return as if not probing. - if (!config_.abort_delayed_probes && next_probe_time_.IsFinite() && - now - next_probe_time_ > config_.max_probe_delay.Get()) { - RTC_DLOG(LS_WARNING) << "Probe delay too high" - " (next_ms:" - << next_probe_time_.ms() << ", now_ms: " << now.ms() - << ")"; - return Timestamp::PlusInfinity(); - } - return next_probe_time_; } @@ -145,7 +127,7 @@ absl::optional BitrateProber::CurrentCluster(Timestamp now) { return absl::nullopt; } - if (config_.abort_delayed_probes && next_probe_time_.IsFinite() && + if (next_probe_time_.IsFinite() && now - next_probe_time_ > config_.max_probe_delay.Get()) { RTC_DLOG(LS_WARNING) << "Probe delay too high" " (next_ms:" @@ -163,16 +145,13 @@ absl::optional BitrateProber::CurrentCluster(Timestamp now) { return info; } -// Probe size is recommended based on the probe bitrate required. We choose -// a minimum of twice `kMinProbeDeltaMs` interval to allow scheduling to be -// feasible. DataSize BitrateProber::RecommendedMinProbeSize() const { if (clusters_.empty()) { return DataSize::Zero(); } DataRate send_rate = DataRate::BitsPerSec(clusters_.front().pace_info.send_bitrate_bps); - return 2 * send_rate * config_.min_probe_delta; + return send_rate * config_.min_probe_delta; } void BitrateProber::ProbeSent(Timestamp now, DataSize size) { @@ -215,6 +194,7 @@ Timestamp BitrateProber::CalculateNextProbeTime( DataSize sent_bytes = DataSize::Bytes(cluster.sent_bytes); DataRate send_bitrate = DataRate::BitsPerSec(cluster.pace_info.send_bitrate_bps); + TimeDelta delta = sent_bytes / send_bitrate; return cluster.started_at + delta; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/pacing/bitrate_prober.h b/TMessagesProj/jni/voip/webrtc/modules/pacing/bitrate_prober.h index 3ac431cee3..d2f1394262 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/pacing/bitrate_prober.h +++ b/TMessagesProj/jni/voip/webrtc/modules/pacing/bitrate_prober.h @@ -24,29 +24,27 @@ namespace webrtc { class RtcEventLog; struct BitrateProberConfig { - explicit BitrateProberConfig(const WebRtcKeyValueConfig* key_value_config); + explicit BitrateProberConfig(const FieldTrialsView* key_value_config); BitrateProberConfig(const BitrateProberConfig&) = default; BitrateProberConfig& operator=(const BitrateProberConfig&) = default; ~BitrateProberConfig() = default; - // The minimum number probing packets used. - FieldTrialParameter min_probe_packets_sent; // A minimum interval between probes to allow scheduling to be feasible. FieldTrialParameter min_probe_delta; - // The minimum probing duration. - FieldTrialParameter min_probe_duration; // Maximum amount of time each probe can be delayed. FieldTrialParameter max_probe_delay; - // If NextProbeTime() is called with a delay higher than specified by - // `max_probe_delay`, abort it. - FieldTrialParameter abort_delayed_probes; + // This is used to start sending a probe after a large enough packet. + // The min packet size is scaled with the bitrate we're probing at. + // This defines the max min packet size, meaning that on high bitrates + // a packet of at least this size is needed to trigger sending a probe. + FieldTrialParameter min_packet_size; }; // Note that this class isn't thread-safe by itself and therefore relies // on being protected by the caller. class BitrateProber { public: - explicit BitrateProber(const WebRtcKeyValueConfig& field_trials); + explicit BitrateProber(const FieldTrialsView& field_trials); ~BitrateProber(); void SetEnabled(bool enable); @@ -61,10 +59,8 @@ class BitrateProber { // with. void OnIncomingPacket(DataSize packet_size); - // Create a cluster used to probe for `bitrate_bps` with `num_probes` number - // of probes. - void CreateProbeCluster(DataRate bitrate, Timestamp now, int cluster_id); - + // Create a cluster used to probe. + void CreateProbeCluster(const ProbeClusterConfig& cluster_config); // Returns the time at which the next probe should be sent to get accurate // probing. If probing is not desired at this time, Timestamp::PlusInfinity() // will be returned. @@ -75,7 +71,8 @@ class BitrateProber { absl::optional CurrentCluster(Timestamp now); // Returns the minimum number of bytes that the prober recommends for - // the next probe, or zero if not probing. + // the next probe, or zero if not probing. A probe can consist of multiple + // packets that are sent back to back. DataSize RecommendedMinProbeSize() const; // Called to report to the prober that a probe has been sent. In case of @@ -104,7 +101,7 @@ class BitrateProber { int sent_probes = 0; int sent_bytes = 0; - Timestamp created_at = Timestamp::MinusInfinity(); + Timestamp requested_at = Timestamp::MinusInfinity(); Timestamp started_at = Timestamp::MinusInfinity(); int retries = 0; }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/pacing/paced_sender.cc b/TMessagesProj/jni/voip/webrtc/modules/pacing/paced_sender.cc deleted file mode 100644 index acc492db92..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/pacing/paced_sender.cc +++ /dev/null @@ -1,216 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/pacing/paced_sender.h" - -#include -#include -#include - -#include "absl/memory/memory.h" -#include "absl/strings/match.h" -#include "api/rtc_event_log/rtc_event_log.h" -#include "modules/utility/include/process_thread.h" -#include "rtc_base/checks.h" -#include "rtc_base/location.h" -#include "rtc_base/logging.h" -#include "rtc_base/time_utils.h" -#include "rtc_base/trace_event.h" -#include "system_wrappers/include/clock.h" - -namespace webrtc { -const int64_t PacedSender::kMaxQueueLengthMs = 2000; -const float PacedSender::kDefaultPaceMultiplier = 2.5f; - -PacedSender::PacedSender(Clock* clock, - PacketRouter* packet_router, - RtcEventLog* event_log, - const WebRtcKeyValueConfig* field_trials, - ProcessThread* process_thread) - : process_mode_( - (field_trials != nullptr && - absl::StartsWith(field_trials->Lookup("WebRTC-Pacer-DynamicProcess"), - "Enabled")) - ? PacingController::ProcessMode::kDynamic - : PacingController::ProcessMode::kPeriodic), - pacing_controller_(clock, - packet_router, - event_log, - field_trials, - process_mode_), - clock_(clock), - process_thread_(process_thread) { - if (process_thread_) - process_thread_->RegisterModule(&module_proxy_, RTC_FROM_HERE); -} - -PacedSender::~PacedSender() { - if (process_thread_) { - process_thread_->DeRegisterModule(&module_proxy_); - } -} - -void PacedSender::CreateProbeCluster(DataRate bitrate, int cluster_id) { - MutexLock lock(&mutex_); - return pacing_controller_.CreateProbeCluster(bitrate, cluster_id); -} - -void PacedSender::Pause() { - { - MutexLock lock(&mutex_); - pacing_controller_.Pause(); - } - - // Tell the process thread to call our TimeUntilNextProcess() method to get - // a new (longer) estimate for when to call Process(). - if (process_thread_) { - process_thread_->WakeUp(&module_proxy_); - } -} - -void PacedSender::Resume() { - { - MutexLock lock(&mutex_); - pacing_controller_.Resume(); - } - - // Tell the process thread to call our TimeUntilNextProcess() method to - // refresh the estimate for when to call Process(). - if (process_thread_) { - process_thread_->WakeUp(&module_proxy_); - } -} - -void PacedSender::SetCongestionWindow(DataSize congestion_window_size) { - { - MutexLock lock(&mutex_); - pacing_controller_.SetCongestionWindow(congestion_window_size); - } - MaybeWakupProcessThread(); -} - -void PacedSender::UpdateOutstandingData(DataSize outstanding_data) { - { - MutexLock lock(&mutex_); - pacing_controller_.UpdateOutstandingData(outstanding_data); - } - MaybeWakupProcessThread(); -} - -void PacedSender::SetPacingRates(DataRate pacing_rate, DataRate padding_rate) { - { - MutexLock lock(&mutex_); - pacing_controller_.SetPacingRates(pacing_rate, padding_rate); - } - MaybeWakupProcessThread(); -} - -void PacedSender::EnqueuePackets( - std::vector> packets) { - { - TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("webrtc"), - "PacedSender::EnqueuePackets"); - MutexLock lock(&mutex_); - for (auto& packet : packets) { - TRACE_EVENT2(TRACE_DISABLED_BY_DEFAULT("webrtc"), - "PacedSender::EnqueuePackets::Loop", "sequence_number", - packet->SequenceNumber(), "rtp_timestamp", - packet->Timestamp()); - - RTC_DCHECK_GE(packet->capture_time_ms(), 0); - pacing_controller_.EnqueuePacket(std::move(packet)); - } - } - MaybeWakupProcessThread(); -} - -void PacedSender::SetAccountForAudioPackets(bool account_for_audio) { - MutexLock lock(&mutex_); - pacing_controller_.SetAccountForAudioPackets(account_for_audio); -} - -void PacedSender::SetIncludeOverhead() { - MutexLock lock(&mutex_); - pacing_controller_.SetIncludeOverhead(); -} - -void PacedSender::SetTransportOverhead(DataSize overhead_per_packet) { - MutexLock lock(&mutex_); - pacing_controller_.SetTransportOverhead(overhead_per_packet); -} - -TimeDelta PacedSender::ExpectedQueueTime() const { - MutexLock lock(&mutex_); - return pacing_controller_.ExpectedQueueTime(); -} - -DataSize PacedSender::QueueSizeData() const { - MutexLock lock(&mutex_); - return pacing_controller_.QueueSizeData(); -} - -absl::optional PacedSender::FirstSentPacketTime() const { - MutexLock lock(&mutex_); - return pacing_controller_.FirstSentPacketTime(); -} - -TimeDelta PacedSender::OldestPacketWaitTime() const { - MutexLock lock(&mutex_); - Timestamp oldest_packet = pacing_controller_.OldestPacketEnqueueTime(); - if (oldest_packet.IsInfinite()) - return TimeDelta::Zero(); - - // (webrtc:9716): The clock is not always monotonic. - Timestamp current = clock_->CurrentTime(); - if (current < oldest_packet) - return TimeDelta::Zero(); - return current - oldest_packet; -} - -int64_t PacedSender::TimeUntilNextProcess() { - MutexLock lock(&mutex_); - - Timestamp next_send_time = pacing_controller_.NextSendTime(); - TimeDelta sleep_time = - std::max(TimeDelta::Zero(), next_send_time - clock_->CurrentTime()); - if (process_mode_ == PacingController::ProcessMode::kDynamic) { - return std::max(sleep_time, PacingController::kMinSleepTime).ms(); - } - return sleep_time.ms(); -} - -void PacedSender::Process() { - MutexLock lock(&mutex_); - pacing_controller_.ProcessPackets(); -} - -void PacedSender::ProcessThreadAttached(ProcessThread* process_thread) { - RTC_LOG(LS_INFO) << "ProcessThreadAttached 0x" << process_thread; - RTC_DCHECK(!process_thread || process_thread == process_thread_); -} - -void PacedSender::MaybeWakupProcessThread() { - // Tell the process thread to call our TimeUntilNextProcess() method to get - // a new time for when to call Process(). - if (process_thread_ && - process_mode_ == PacingController::ProcessMode::kDynamic) { - process_thread_->WakeUp(&module_proxy_); - } -} - -void PacedSender::SetQueueTimeLimit(TimeDelta limit) { - { - MutexLock lock(&mutex_); - pacing_controller_.SetQueueTimeLimit(limit); - } - MaybeWakupProcessThread(); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/pacing/paced_sender.h b/TMessagesProj/jni/voip/webrtc/modules/pacing/paced_sender.h deleted file mode 100644 index fe29bc567b..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/pacing/paced_sender.h +++ /dev/null @@ -1,168 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_PACING_PACED_SENDER_H_ -#define MODULES_PACING_PACED_SENDER_H_ - -#include -#include - -#include -#include -#include - -#include "absl/types/optional.h" -#include "api/function_view.h" -#include "api/transport/field_trial_based_config.h" -#include "api/transport/network_types.h" -#include "api/transport/webrtc_key_value_config.h" -#include "modules/include/module.h" -#include "modules/pacing/bitrate_prober.h" -#include "modules/pacing/interval_budget.h" -#include "modules/pacing/pacing_controller.h" -#include "modules/pacing/packet_router.h" -#include "modules/pacing/rtp_packet_pacer.h" -#include "modules/rtp_rtcp/include/rtp_packet_sender.h" -#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" -#include "modules/utility/include/process_thread.h" -#include "rtc_base/synchronization/mutex.h" -#include "rtc_base/thread_annotations.h" - -namespace webrtc { -class Clock; -class RtcEventLog; - -// TODO(bugs.webrtc.org/10937): Remove the inheritance from Module after -// updating dependencies. -class PacedSender : public Module, - public RtpPacketPacer, - public RtpPacketSender { - public: - // Expected max pacer delay in ms. If ExpectedQueueTime() is higher than - // this value, the packet producers should wait (eg drop frames rather than - // encoding them). Bitrate sent may temporarily exceed target set by - // UpdateBitrate() so that this limit will be upheld. - static const int64_t kMaxQueueLengthMs; - // Pacing-rate relative to our target send rate. - // Multiplicative factor that is applied to the target bitrate to calculate - // the number of bytes that can be transmitted per interval. - // Increasing this factor will result in lower delays in cases of bitrate - // overshoots from the encoder. - static const float kDefaultPaceMultiplier; - - // TODO(bugs.webrtc.org/10937): Make the `process_thread` argument be non - // optional once all callers have been updated. - PacedSender(Clock* clock, - PacketRouter* packet_router, - RtcEventLog* event_log, - const WebRtcKeyValueConfig* field_trials = nullptr, - ProcessThread* process_thread = nullptr); - - ~PacedSender() override; - - // Methods implementing RtpPacketSender. - - // Adds the packet to the queue and calls PacketRouter::SendPacket() when - // it's time to send. - void EnqueuePackets( - std::vector> packet) override; - - // Methods implementing RtpPacketPacer: - - void CreateProbeCluster(DataRate bitrate, int cluster_id) override; - - // Temporarily pause all sending. - void Pause() override; - - // Resume sending packets. - void Resume() override; - - void SetCongestionWindow(DataSize congestion_window_size) override; - void UpdateOutstandingData(DataSize outstanding_data) override; - - // Sets the pacing rates. Must be called once before packets can be sent. - void SetPacingRates(DataRate pacing_rate, DataRate padding_rate) override; - - // Currently audio traffic is not accounted by pacer and passed through. - // With the introduction of audio BWE audio traffic will be accounted for - // the pacer budget calculation. The audio traffic still will be injected - // at high priority. - void SetAccountForAudioPackets(bool account_for_audio) override; - - void SetIncludeOverhead() override; - void SetTransportOverhead(DataSize overhead_per_packet) override; - - // Returns the time since the oldest queued packet was enqueued. - TimeDelta OldestPacketWaitTime() const override; - - DataSize QueueSizeData() const override; - - // Returns the time when the first packet was sent; - absl::optional FirstSentPacketTime() const override; - - // Returns the number of milliseconds it will take to send the current - // packets in the queue, given the current size and bitrate, ignoring prio. - TimeDelta ExpectedQueueTime() const override; - - void SetQueueTimeLimit(TimeDelta limit) override; - - // Below are methods specific to this implementation, such as things related - // to module processing thread specifics or methods exposed for test. - - private: - // Methods implementing Module. - // TODO(bugs.webrtc.org/10937): Remove the inheritance from Module once all - // use of it has been cleared up. - - // Returns the number of milliseconds until the module want a worker thread - // to call Process. - int64_t TimeUntilNextProcess() override; - - // TODO(bugs.webrtc.org/10937): Make this private (and non virtual) once - // dependencies have been updated to not call this via the PacedSender - // interface. - public: - // Process any pending packets in the queue(s). - void Process() override; - - private: - // Called when the prober is associated with a process thread. - void ProcessThreadAttached(ProcessThread* process_thread) override; - - // In dynamic process mode, refreshes the next process time. - void MaybeWakupProcessThread(); - - // Private implementation of Module to not expose those implementation details - // publicly and control when the class is registered/deregistered. - class ModuleProxy : public Module { - public: - explicit ModuleProxy(PacedSender* delegate) : delegate_(delegate) {} - - private: - int64_t TimeUntilNextProcess() override { - return delegate_->TimeUntilNextProcess(); - } - void Process() override { return delegate_->Process(); } - void ProcessThreadAttached(ProcessThread* process_thread) override { - return delegate_->ProcessThreadAttached(process_thread); - } - - PacedSender* const delegate_; - } module_proxy_{this}; - - mutable Mutex mutex_; - const PacingController::ProcessMode process_mode_; - PacingController pacing_controller_ RTC_GUARDED_BY(mutex_); - - Clock* const clock_; - ProcessThread* const process_thread_; -}; -} // namespace webrtc -#endif // MODULES_PACING_PACED_SENDER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/pacing/pacing_controller.cc b/TMessagesProj/jni/voip/webrtc/modules/pacing/pacing_controller.cc index 548540a208..a926e32545 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/pacing/pacing_controller.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/pacing/pacing_controller.cc @@ -18,7 +18,6 @@ #include "absl/strings/match.h" #include "modules/pacing/bitrate_prober.h" #include "modules/pacing/interval_budget.h" -#include "modules/utility/include/process_thread.h" #include "rtc_base/checks.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/logging.h" @@ -34,107 +33,59 @@ constexpr TimeDelta kCongestedPacketInterval = TimeDelta::Millis(500); // The maximum debt level, in terms of time, capped when sending packets. constexpr TimeDelta kMaxDebtInTime = TimeDelta::Millis(500); constexpr TimeDelta kMaxElapsedTime = TimeDelta::Seconds(2); +constexpr TimeDelta kTargetPaddingDuration = TimeDelta::Millis(5); -// Upper cap on process interval, in case process has not been called in a long -// time. Applies only to periodic mode. -constexpr TimeDelta kMaxProcessingInterval = TimeDelta::Millis(30); - -// Allow probes to be processed slightly ahead of inteded send time. Currently -// set to 1ms as this is intended to allow times be rounded down to the nearest -// millisecond. -constexpr TimeDelta kMaxEarlyProbeProcessing = TimeDelta::Millis(1); - -constexpr int kFirstPriority = 0; - -bool IsDisabled(const WebRtcKeyValueConfig& field_trials, - absl::string_view key) { +bool IsDisabled(const FieldTrialsView& field_trials, absl::string_view key) { return absl::StartsWith(field_trials.Lookup(key), "Disabled"); } -bool IsEnabled(const WebRtcKeyValueConfig& field_trials, - absl::string_view key) { +bool IsEnabled(const FieldTrialsView& field_trials, absl::string_view key) { return absl::StartsWith(field_trials.Lookup(key), "Enabled"); } -TimeDelta GetDynamicPaddingTarget(const WebRtcKeyValueConfig& field_trials) { - FieldTrialParameter padding_target("timedelta", - TimeDelta::Millis(5)); - ParseFieldTrial({&padding_target}, - field_trials.Lookup("WebRTC-Pacer-DynamicPaddingTarget")); - return padding_target.Get(); -} - -int GetPriorityForType(RtpPacketMediaType type) { - // Lower number takes priority over higher. - switch (type) { - case RtpPacketMediaType::kAudio: - // Audio is always prioritized over other packet types. - return kFirstPriority + 1; - case RtpPacketMediaType::kRetransmission: - // Send retransmissions before new media. - return kFirstPriority + 2; - case RtpPacketMediaType::kVideo: - case RtpPacketMediaType::kForwardErrorCorrection: - // Video has "normal" priority, in the old speak. - // Send redundancy concurrently to video. If it is delayed it might have a - // lower chance of being useful. - return kFirstPriority + 3; - case RtpPacketMediaType::kPadding: - // Packets that are in themselves likely useless, only sent to keep the - // BWE high. - return kFirstPriority + 4; - } - RTC_CHECK_NOTREACHED(); -} - } // namespace const TimeDelta PacingController::kMaxExpectedQueueLength = TimeDelta::Millis(2000); -const float PacingController::kDefaultPaceMultiplier = 2.5f; const TimeDelta PacingController::kPausedProcessInterval = kCongestedPacketInterval; const TimeDelta PacingController::kMinSleepTime = TimeDelta::Millis(1); +const TimeDelta PacingController::kMaxEarlyProbeProcessing = + TimeDelta::Millis(1); PacingController::PacingController(Clock* clock, PacketSender* packet_sender, - RtcEventLog* event_log, - const WebRtcKeyValueConfig* field_trials, - ProcessMode mode) - : mode_(mode), - clock_(clock), + const FieldTrialsView& field_trials) + : clock_(clock), packet_sender_(packet_sender), - fallback_field_trials_( - !field_trials ? std::make_unique() : nullptr), - field_trials_(field_trials ? field_trials : fallback_field_trials_.get()), + field_trials_(field_trials), drain_large_queues_( - !IsDisabled(*field_trials_, "WebRTC-Pacer-DrainQueue")), + !IsDisabled(field_trials_, "WebRTC-Pacer-DrainQueue")), send_padding_if_silent_( - IsEnabled(*field_trials_, "WebRTC-Pacer-PadInSilence")), - pace_audio_(IsEnabled(*field_trials_, "WebRTC-Pacer-BlockAudio")), + IsEnabled(field_trials_, "WebRTC-Pacer-PadInSilence")), + pace_audio_(IsEnabled(field_trials_, "WebRTC-Pacer-BlockAudio")), ignore_transport_overhead_( - IsEnabled(*field_trials_, "WebRTC-Pacer-IgnoreTransportOverhead")), - padding_target_duration_(GetDynamicPaddingTarget(*field_trials_)), + IsEnabled(field_trials_, "WebRTC-Pacer-IgnoreTransportOverhead")), + fast_retransmissions_( + IsEnabled(field_trials_, "WebRTC-Pacer-FastRetransmissions")), min_packet_limit_(kDefaultMinPacketLimit), transport_overhead_per_packet_(DataSize::Zero()), + send_burst_interval_(TimeDelta::Zero()), last_timestamp_(clock_->CurrentTime()), paused_(false), - media_budget_(0), - padding_budget_(0), media_debt_(DataSize::Zero()), padding_debt_(DataSize::Zero()), - media_rate_(DataRate::Zero()), + pacing_rate_(DataRate::Zero()), + adjusted_media_rate_(DataRate::Zero()), padding_rate_(DataRate::Zero()), - prober_(*field_trials_), + prober_(field_trials_), probing_send_failure_(false), - pacing_bitrate_(DataRate::Zero()), last_process_time_(clock->CurrentTime()), last_send_time_(last_process_time_), - packet_queue_(last_process_time_, field_trials_), - packet_counter_(0), - congestion_window_size_(DataSize::PlusInfinity()), - outstanding_data_(DataSize::Zero()), - queue_time_limit(kMaxExpectedQueueLength), + seen_first_packet_(false), + packet_queue_(/*creation_time=*/last_process_time_), + congested_(false), + queue_time_limit_(kMaxExpectedQueueLength), account_for_audio_(false), include_overhead_(false) { if (!drain_large_queues_) { @@ -143,7 +94,7 @@ PacingController::PacingController(Clock* clock, } FieldTrialParameter min_packet_limit_ms("", min_packet_limit_.ms()); ParseFieldTrial({&min_packet_limit_ms}, - field_trials_->Lookup("WebRTC-Pacer-MinPacketLimitMs")); + field_trials_.Lookup("WebRTC-Pacer-MinPacketLimitMs")); min_packet_limit_ = TimeDelta::Millis(min_packet_limit_ms.Get()); UpdateBudgetWithElapsedTime(min_packet_limit_); } @@ -151,7 +102,18 @@ PacingController::PacingController(Clock* clock, PacingController::~PacingController() = default; void PacingController::CreateProbeCluster(DataRate bitrate, int cluster_id) { - prober_.CreateProbeCluster(bitrate, CurrentTime(), cluster_id); + prober_.CreateProbeCluster({.at_time = CurrentTime(), + .target_data_rate = bitrate, + .target_duration = TimeDelta::Millis(15), + .target_probe_count = 5, + .id = cluster_id}); +} + +void PacingController::CreateProbeClusters( + rtc::ArrayView probe_cluster_configs) { + for (const ProbeClusterConfig probe_cluster_config : probe_cluster_configs) { + prober_.CreateProbeCluster(probe_cluster_config); + } } void PacingController::Pause() { @@ -172,29 +134,11 @@ bool PacingController::IsPaused() const { return paused_; } -void PacingController::SetCongestionWindow(DataSize congestion_window_size) { - const bool was_congested = Congested(); - congestion_window_size_ = congestion_window_size; - if (was_congested && !Congested()) { - TimeDelta elapsed_time = UpdateTimeAndGetElapsed(CurrentTime()); - UpdateBudgetWithElapsedTime(elapsed_time); +void PacingController::SetCongested(bool congested) { + if (congested_ && !congested) { + UpdateBudgetWithElapsedTime(UpdateTimeAndGetElapsed(CurrentTime())); } -} - -void PacingController::UpdateOutstandingData(DataSize outstanding_data) { - const bool was_congested = Congested(); - outstanding_data_ = outstanding_data; - if (was_congested && !Congested()) { - TimeDelta elapsed_time = UpdateTimeAndGetElapsed(CurrentTime()); - UpdateBudgetWithElapsedTime(elapsed_time); - } -} - -bool PacingController::Congested() const { - if (congestion_window_size_.IsFinite()) { - return outstanding_data_ >= congestion_window_size_; - } - return false; + congested_ = congested; } bool PacingController::IsProbing() const { @@ -215,31 +159,62 @@ Timestamp PacingController::CurrentTime() const { } void PacingController::SetProbingEnabled(bool enabled) { - RTC_CHECK_EQ(0, packet_counter_); + RTC_CHECK(!seen_first_packet_); prober_.SetEnabled(enabled); } void PacingController::SetPacingRates(DataRate pacing_rate, DataRate padding_rate) { - RTC_DCHECK_GT(pacing_rate, DataRate::Zero()); - media_rate_ = pacing_rate; + static constexpr DataRate kMaxRate = DataRate::KilobitsPerSec(100'000); + RTC_CHECK_GT(pacing_rate, DataRate::Zero()); + RTC_CHECK_GE(padding_rate, DataRate::Zero()); + if (padding_rate > pacing_rate) { + RTC_LOG(LS_WARNING) << "Padding rate " << padding_rate.kbps() + << "kbps is higher than the pacing rate " + << pacing_rate.kbps() << "kbps, capping."; + padding_rate = pacing_rate; + } + + if (pacing_rate > kMaxRate || padding_rate > kMaxRate) { + RTC_LOG(LS_WARNING) << "Very high pacing rates ( > " << kMaxRate.kbps() + << " kbps) configured: pacing = " << pacing_rate.kbps() + << " kbps, padding = " << padding_rate.kbps() + << " kbps."; + } + pacing_rate_ = pacing_rate; padding_rate_ = padding_rate; - pacing_bitrate_ = pacing_rate; - padding_budget_.set_target_rate_kbps(padding_rate.kbps()); + MaybeUpdateMediaRateDueToLongQueue(CurrentTime()); - RTC_LOG(LS_VERBOSE) << "bwe:pacer_updated pacing_kbps=" - << pacing_bitrate_.kbps() + RTC_LOG(LS_VERBOSE) << "bwe:pacer_updated pacing_kbps=" << pacing_rate_.kbps() << " padding_budget_kbps=" << padding_rate.kbps(); } void PacingController::EnqueuePacket(std::unique_ptr packet) { - RTC_DCHECK(pacing_bitrate_ > DataRate::Zero()) + RTC_DCHECK(pacing_rate_ > DataRate::Zero()) << "SetPacingRate must be called before InsertPacket."; RTC_CHECK(packet->packet_type()); - // Get priority first and store in temporary, to avoid chance of object being - // moved before GetPriorityForType() being called. - const int priority = GetPriorityForType(*packet->packet_type()); - EnqueuePacketInternal(std::move(packet), priority); + + prober_.OnIncomingPacket(DataSize::Bytes(packet->payload_size())); + + const Timestamp now = CurrentTime(); + if (packet_queue_.Empty()) { + // If queue is empty, we need to "fast-forward" the last process time, + // so that we don't use passed time as budget for sending the first new + // packet. + Timestamp target_process_time = now; + Timestamp next_send_time = NextSendTime(); + if (next_send_time.IsFinite()) { + // There was already a valid planned send time, such as a keep-alive. + // Use that as last process time only if it's prior to now. + target_process_time = std::min(now, next_send_time); + } + UpdateBudgetWithElapsedTime(UpdateTimeAndGetElapsed(target_process_time)); + } + packet_queue_.Push(now, std::move(packet)); + seen_first_packet_ = true; + + // Queue length has increased, check if we need to change the pacing rate. + MaybeUpdateMediaRateDueToLongQueue(now); } void PacingController::SetAccountForAudioPackets(bool account_for_audio) { @@ -248,29 +223,39 @@ void PacingController::SetAccountForAudioPackets(bool account_for_audio) { void PacingController::SetIncludeOverhead() { include_overhead_ = true; - packet_queue_.SetIncludeOverhead(); } void PacingController::SetTransportOverhead(DataSize overhead_per_packet) { if (ignore_transport_overhead_) return; transport_overhead_per_packet_ = overhead_per_packet; - packet_queue_.SetTransportOverhead(overhead_per_packet); +} + +void PacingController::SetSendBurstInterval(TimeDelta burst_interval) { + send_burst_interval_ = burst_interval; } TimeDelta PacingController::ExpectedQueueTime() const { - RTC_DCHECK_GT(pacing_bitrate_, DataRate::Zero()); - return TimeDelta::Millis( - (QueueSizeData().bytes() * 8 * rtc::kNumMillisecsPerSec) / - pacing_bitrate_.bps()); + RTC_DCHECK_GT(adjusted_media_rate_, DataRate::Zero()); + return QueueSizeData() / adjusted_media_rate_; } size_t PacingController::QueueSizePackets() const { - return packet_queue_.SizeInPackets(); + return rtc::checked_cast(packet_queue_.SizeInPackets()); +} + +const std::array& +PacingController::SizeInPacketsPerRtpPacketMediaType() const { + return packet_queue_.SizeInPacketsPerRtpPacketMediaType(); } DataSize PacingController::QueueSizeData() const { - return packet_queue_.Size(); + DataSize size = packet_queue_.SizeInPayloadBytes(); + if (include_overhead_) { + size += static_cast(packet_queue_.SizeInPackets()) * + transport_overhead_per_packet_; + } + return size; } DataSize PacingController::CurrentBufferLevel() const { @@ -285,39 +270,12 @@ Timestamp PacingController::OldestPacketEnqueueTime() const { return packet_queue_.OldestEnqueueTime(); } -void PacingController::EnqueuePacketInternal( - std::unique_ptr packet, - int priority) { - prober_.OnIncomingPacket(DataSize::Bytes(packet->payload_size())); - - Timestamp now = CurrentTime(); - - if (mode_ == ProcessMode::kDynamic && packet_queue_.Empty()) { - // If queue is empty, we need to "fast-forward" the last process time, - // so that we don't use passed time as budget for sending the first new - // packet. - Timestamp target_process_time = now; - Timestamp next_send_time = NextSendTime(); - if (next_send_time.IsFinite()) { - // There was already a valid planned send time, such as a keep-alive. - // Use that as last process time only if it's prior to now. - target_process_time = std::min(now, next_send_time); - } - - TimeDelta elapsed_time = UpdateTimeAndGetElapsed(target_process_time); - UpdateBudgetWithElapsedTime(elapsed_time); - last_process_time_ = target_process_time; - } - packet_queue_.Push(priority, now, packet_counter_++, std::move(packet)); -} - TimeDelta PacingController::UpdateTimeAndGetElapsed(Timestamp now) { // If no previous processing, or last process was "in the future" because of // early probe processing, then there is no elapsed time to add budget for. if (last_process_time_.IsMinusInfinity() || now < last_process_time_) { return TimeDelta::Zero(); } - RTC_DCHECK_GE(now, last_process_time_); TimeDelta elapsed_time = now - last_process_time_; last_process_time_ = now; if (elapsed_time > kMaxElapsedTime) { @@ -330,12 +288,10 @@ TimeDelta PacingController::UpdateTimeAndGetElapsed(Timestamp now) { } bool PacingController::ShouldSendKeepalive(Timestamp now) const { - if (send_padding_if_silent_ || paused_ || Congested() || - packet_counter_ == 0) { + if (send_padding_if_silent_ || paused_ || congested_ || !seen_first_packet_) { // We send a padding packet every 500 ms to ensure we won't get stuck in // congested state due to no feedback being received. - TimeDelta elapsed_since_last_send = now - last_send_time_; - if (elapsed_since_last_send >= kCongestedPacketInterval) { + if (now - last_send_time_ >= kCongestedPacketInterval) { return true; } } @@ -344,108 +300,77 @@ bool PacingController::ShouldSendKeepalive(Timestamp now) const { Timestamp PacingController::NextSendTime() const { const Timestamp now = CurrentTime(); + Timestamp next_send_time = Timestamp::PlusInfinity(); if (paused_) { return last_send_time_ + kPausedProcessInterval; } // If probing is active, that always takes priority. - if (prober_.is_probing()) { + if (prober_.is_probing() && !probing_send_failure_) { Timestamp probe_time = prober_.NextProbeTime(now); - // `probe_time` == PlusInfinity indicates no probe scheduled. - if (probe_time != Timestamp::PlusInfinity() && !probing_send_failure_) { - return probe_time; + if (!probe_time.IsPlusInfinity()) { + return probe_time.IsMinusInfinity() ? now : probe_time; } } - if (mode_ == ProcessMode::kPeriodic) { - // In periodic non-probing mode, we just have a fixed interval. - return last_process_time_ + min_packet_limit_; + // If queue contains a packet which should not be paced, its target send time + // is the time at which it was enqueued. + Timestamp unpaced_send_time = NextUnpacedSendTime(); + if (unpaced_send_time.IsFinite()) { + return unpaced_send_time; } - // In dynamic mode, figure out when the next packet should be sent, - // given the current conditions. - - if (!pace_audio_) { - // Not pacing audio, if leading packet is audio its target send - // time is the time at which it was enqueued. - absl::optional audio_enqueue_time = - packet_queue_.LeadingAudioPacketEnqueueTime(); - if (audio_enqueue_time.has_value()) { - return *audio_enqueue_time; - } - } - - if (Congested() || packet_counter_ == 0) { + if (congested_ || !seen_first_packet_) { // We need to at least send keep-alive packets with some interval. return last_send_time_ + kCongestedPacketInterval; } - // Check how long until we can send the next media packet. - if (media_rate_ > DataRate::Zero() && !packet_queue_.Empty()) { - return std::min(last_send_time_ + kPausedProcessInterval, - last_process_time_ + media_debt_ / media_rate_); - } - - // If we _don't_ have pending packets, check how long until we have - // bandwidth for padding packets. Both media and padding debts must - // have been drained to do this. - if (padding_rate_ > DataRate::Zero() && packet_queue_.Empty()) { - TimeDelta drain_time = - std::max(media_debt_ / media_rate_, padding_debt_ / padding_rate_); - return std::min(last_send_time_ + kPausedProcessInterval, - last_process_time_ + drain_time); + if (adjusted_media_rate_ > DataRate::Zero() && !packet_queue_.Empty()) { + // If packets are allowed to be sent in a burst, the + // debt is allowed to grow up to one packet more than what can be sent + // during 'send_burst_period_'. + TimeDelta drain_time = media_debt_ / adjusted_media_rate_; + next_send_time = + last_process_time_ + + ((send_burst_interval_ > drain_time) ? TimeDelta::Zero() : drain_time); + } else if (padding_rate_ > DataRate::Zero() && packet_queue_.Empty()) { + // If we _don't_ have pending packets, check how long until we have + // bandwidth for padding packets. Both media and padding debts must + // have been drained to do this. + RTC_DCHECK_GT(adjusted_media_rate_, DataRate::Zero()); + TimeDelta drain_time = std::max(media_debt_ / adjusted_media_rate_, + padding_debt_ / padding_rate_); + + if (drain_time.IsZero() && + (!media_debt_.IsZero() || !padding_debt_.IsZero())) { + // We have a non-zero debt, but drain time is smaller than tick size of + // TimeDelta, round it up to the smallest possible non-zero delta. + drain_time = TimeDelta::Micros(1); + } + next_send_time = last_process_time_ + drain_time; + } else { + // Nothing to do. + next_send_time = last_process_time_ + kPausedProcessInterval; } if (send_padding_if_silent_) { - return last_send_time_ + kPausedProcessInterval; + next_send_time = + std::min(next_send_time, last_send_time_ + kPausedProcessInterval); } - return last_process_time_ + kPausedProcessInterval; + + return next_send_time; } void PacingController::ProcessPackets() { - Timestamp now = CurrentTime(); + const Timestamp now = CurrentTime(); Timestamp target_send_time = now; - if (mode_ == ProcessMode::kDynamic) { - target_send_time = NextSendTime(); - TimeDelta early_execute_margin = - prober_.is_probing() ? kMaxEarlyProbeProcessing : TimeDelta::Zero(); - if (target_send_time.IsMinusInfinity()) { - target_send_time = now; - } else if (now < target_send_time - early_execute_margin) { - // We are too early, but if queue is empty still allow draining some debt. - // Probing is allowed to be sent up to kMinSleepTime early. - TimeDelta elapsed_time = UpdateTimeAndGetElapsed(now); - UpdateBudgetWithElapsedTime(elapsed_time); - return; - } - - if (target_send_time < last_process_time_) { - // After the last process call, at time X, the target send time - // shifted to be earlier than X. This should normally not happen - // but we want to make sure rounding errors or erratic behavior - // of NextSendTime() does not cause issue. In particular, if the - // buffer reduction of - // rate * (target_send_time - previous_process_time) - // in the main loop doesn't clean up the existing debt we may not - // be able to send again. We don't want to check this reordering - // there as it is the normal exit condtion when the buffer is - // exhausted and there are packets in the queue. - UpdateBudgetWithElapsedTime(last_process_time_ - target_send_time); - target_send_time = last_process_time_; - } - } - - Timestamp previous_process_time = last_process_time_; - TimeDelta elapsed_time = UpdateTimeAndGetElapsed(now); if (ShouldSendKeepalive(now)) { + DataSize keepalive_data_sent = DataSize::Zero(); // We can not send padding unless a normal packet has first been sent. If // we do, timestamps get messed up. - if (packet_counter_ == 0) { - last_send_time_ = now; - } else { - DataSize keepalive_data_sent = DataSize::Zero(); + if (seen_first_packet_) { std::vector> keepalive_packets = packet_sender_->GeneratePadding(DataSize::Bytes(1)); for (auto& packet : keepalive_packets) { @@ -456,47 +381,31 @@ void PacingController::ProcessPackets() { EnqueuePacket(std::move(packet)); } } - OnPaddingSent(keepalive_data_sent); } + OnPacketSent(RtpPacketMediaType::kPadding, keepalive_data_sent, now); } if (paused_) { return; } - if (elapsed_time > TimeDelta::Zero()) { - DataRate target_rate = pacing_bitrate_; - DataSize queue_size_data = packet_queue_.Size(); - if (queue_size_data > DataSize::Zero()) { - // Assuming equal size packets and input/output rate, the average packet - // has avg_time_left_ms left to get queue_size_bytes out of the queue, if - // time constraint shall be met. Determine bitrate needed for that. - packet_queue_.UpdateQueueTime(now); - if (drain_large_queues_) { - TimeDelta avg_time_left = - std::max(TimeDelta::Millis(1), - queue_time_limit - packet_queue_.AverageQueueTime()); - DataRate min_rate_needed = queue_size_data / avg_time_left; - if (min_rate_needed > target_rate) { - target_rate = min_rate_needed; - RTC_LOG(LS_VERBOSE) << "bwe:large_pacing_queue pacing_rate_kbps=" - << target_rate.kbps(); - } - } - } + TimeDelta early_execute_margin = + prober_.is_probing() ? kMaxEarlyProbeProcessing : TimeDelta::Zero(); - if (mode_ == ProcessMode::kPeriodic) { - // In periodic processing mode, the IntevalBudget allows positive budget - // up to (process interval duration) * (target rate), so we only need to - // update it once before the packet sending loop. - media_budget_.set_target_rate_kbps(target_rate.kbps()); - UpdateBudgetWithElapsedTime(elapsed_time); - } else { - media_rate_ = target_rate; - } + target_send_time = NextSendTime(); + if (now + early_execute_margin < target_send_time) { + // We are too early, but if queue is empty still allow draining some debt. + // Probing is allowed to be sent up to kMinSleepTime early. + UpdateBudgetWithElapsedTime(UpdateTimeAndGetElapsed(now)); + return; + } + + TimeDelta elapsed_time = UpdateTimeAndGetElapsed(target_send_time); + + if (elapsed_time > TimeDelta::Zero()) { + UpdateBudgetWithElapsedTime(elapsed_time); } - bool first_packet_in_probe = false; PacedPacketInfo pacing_info; DataSize recommended_probe_size = DataSize::Zero(); bool is_probing = prober_.is_probing(); @@ -505,7 +414,6 @@ void PacingController::ProcessPackets() { // use actual send time rather than target. pacing_info = prober_.CurrentCluster(now).value_or(PacedPacketInfo()); if (pacing_info.probe_cluster_id != PacedPacketInfo::kNotAProbe) { - first_packet_in_probe = pacing_info.probe_cluster_bytes_sent == 0; recommended_probe_size = prober_.RecommendedMinProbeSize(); RTC_DCHECK_GT(recommended_probe_size, DataSize::Zero()); } else { @@ -515,101 +423,94 @@ void PacingController::ProcessPackets() { } DataSize data_sent = DataSize::Zero(); - - // The paused state is checked in the loop since it leaves the critical - // section allowing the paused state to be changed from other code. - while (!paused_) { - if (first_packet_in_probe) { - // If first packet in probe, insert a small padding packet so we have a - // more reliable start window for the rate estimation. - auto padding = packet_sender_->GeneratePadding(DataSize::Bytes(1)); - // If no RTP modules sending media are registered, we may not get a - // padding packet back. - if (!padding.empty()) { - // Insert with high priority so larger media packets don't preempt it. - EnqueuePacketInternal(std::move(padding[0]), kFirstPriority); - // We should never get more than one padding packets with a requested - // size of 1 byte. - RTC_DCHECK_EQ(padding.size(), 1u); - } - first_packet_in_probe = false; - } - - if (mode_ == ProcessMode::kDynamic && - previous_process_time < target_send_time) { - // Reduce buffer levels with amount corresponding to time between last - // process and target send time for the next packet. - // If the process call is late, that may be the time between the optimal - // send times for two packets we should already have sent. - UpdateBudgetWithElapsedTime(target_send_time - previous_process_time); - previous_process_time = target_send_time; - } - - // Fetch the next packet, so long as queue is not empty or budget is not + // Circuit breaker, making sure main loop isn't forever. + static constexpr int kMaxIterations = 1 << 16; + int iteration = 0; + int packets_sent = 0; + int padding_packets_generated = 0; + for (; iteration < kMaxIterations; ++iteration) { + // Fetch packet, so long as queue is not empty or budget is not // exhausted. std::unique_ptr rtp_packet = GetPendingPacket(pacing_info, target_send_time, now); - if (rtp_packet == nullptr) { // No packet available to send, check if we should send padding. DataSize padding_to_add = PaddingToAdd(recommended_probe_size, data_sent); if (padding_to_add > DataSize::Zero()) { std::vector> padding_packets = packet_sender_->GeneratePadding(padding_to_add); - if (padding_packets.empty()) { - // No padding packets were generated, quite send loop. - break; + if (!padding_packets.empty()) { + padding_packets_generated += padding_packets.size(); + for (auto& packet : padding_packets) { + EnqueuePacket(std::move(packet)); + } + // Continue loop to send the padding that was just added. + continue; + } else { + // Can't generate padding, still update padding budget for next send + // time. + UpdatePaddingBudgetWithSentData(padding_to_add); } - for (auto& packet : padding_packets) { - EnqueuePacket(std::move(packet)); - } - // Continue loop to send the padding that was just added. - continue; } - // Can't fetch new packet and no padding to send, exit send loop. break; - } - - RTC_DCHECK(rtp_packet); - RTC_DCHECK(rtp_packet->packet_type().has_value()); - const RtpPacketMediaType packet_type = *rtp_packet->packet_type(); - DataSize packet_size = DataSize::Bytes(rtp_packet->payload_size() + - rtp_packet->padding_size()); - - if (include_overhead_) { - packet_size += DataSize::Bytes(rtp_packet->headers_size()) + - transport_overhead_per_packet_; - } + } else { + RTC_DCHECK(rtp_packet); + RTC_DCHECK(rtp_packet->packet_type().has_value()); + const RtpPacketMediaType packet_type = *rtp_packet->packet_type(); + DataSize packet_size = DataSize::Bytes(rtp_packet->payload_size() + + rtp_packet->padding_size()); + + if (include_overhead_) { + packet_size += DataSize::Bytes(rtp_packet->headers_size()) + + transport_overhead_per_packet_; + } - packet_sender_->SendPacket(std::move(rtp_packet), pacing_info); - for (auto& packet : packet_sender_->FetchFec()) { - EnqueuePacket(std::move(packet)); - } - data_sent += packet_size; + packet_sender_->SendPacket(std::move(rtp_packet), pacing_info); + for (auto& packet : packet_sender_->FetchFec()) { + EnqueuePacket(std::move(packet)); + } + data_sent += packet_size; + ++packets_sent; - // Send done, update send/process time to the target send time. - OnPacketSent(packet_type, packet_size, target_send_time); + // Send done, update send time. + OnPacketSent(packet_type, packet_size, now); - // If we are currently probing, we need to stop the send loop when we have - // reached the send target. - if (is_probing && data_sent >= recommended_probe_size) { - break; - } + if (is_probing) { + pacing_info.probe_cluster_bytes_sent += packet_size.bytes(); + // If we are currently probing, we need to stop the send loop when we + // have reached the send target. + if (data_sent >= recommended_probe_size) { + break; + } + } - if (mode_ == ProcessMode::kDynamic) { // Update target send time in case that are more packets that we are late // in processing. - Timestamp next_send_time = NextSendTime(); - if (next_send_time.IsMinusInfinity()) { + target_send_time = NextSendTime(); + if (target_send_time > now) { + // Exit loop if not probing. + if (!is_probing) { + break; + } target_send_time = now; - } else { - target_send_time = std::min(now, next_send_time); } + UpdateBudgetWithElapsedTime(UpdateTimeAndGetElapsed(target_send_time)); } } - last_process_time_ = std::max(last_process_time_, previous_process_time); + if (iteration >= kMaxIterations) { + // Circuit break activated. Log warning, adjust send time and return. + // TODO(sprang): Consider completely clearing state. + RTC_LOG(LS_ERROR) << "PacingController exceeded max iterations in " + "send-loop: packets sent = " + << packets_sent << ", padding packets generated = " + << padding_packets_generated + << ", bytes sent = " << data_sent.bytes(); + last_send_time_ = now; + last_process_time_ = now; + return; + } if (is_probing) { probing_send_failure_ = data_sent == DataSize::Zero(); @@ -617,6 +518,11 @@ void PacingController::ProcessPackets() { prober_.ProbeSent(CurrentTime(), data_sent); } } + + // Queue length has probably decreased, check if pacing rate needs to updated. + // Poll the time again, since we might have enqueued new fec/padding packets + // with a later timestamp than `now`. + MaybeUpdateMediaRateDueToLongQueue(CurrentTime()); } DataSize PacingController::PaddingToAdd(DataSize recommended_probe_size, @@ -626,14 +532,14 @@ DataSize PacingController::PaddingToAdd(DataSize recommended_probe_size, return DataSize::Zero(); } - if (Congested()) { + if (congested_) { // Don't add padding if congested, even if requested for probing. return DataSize::Zero(); } - if (packet_counter_ == 0) { - // We can not send padding unless a normal packet has first been sent. If we - // do, timestamps get messed up. + if (!seen_first_packet_) { + // We can not send padding unless a normal packet has first been sent. If + // we do, timestamps get messed up. return DataSize::Zero(); } @@ -644,11 +550,8 @@ DataSize PacingController::PaddingToAdd(DataSize recommended_probe_size, return DataSize::Zero(); } - if (mode_ == ProcessMode::kPeriodic) { - return DataSize::Bytes(padding_budget_.bytes_remaining()); - } else if (padding_rate_ > DataRate::Zero() && - padding_debt_ == DataSize::Zero()) { - return padding_target_duration_ * padding_rate_; + if (padding_rate_ > DataRate::Zero() && padding_debt_ == DataSize::Zero()) { + return kTargetPaddingDuration * padding_rate_; } return DataSize::Zero(); } @@ -657,38 +560,44 @@ std::unique_ptr PacingController::GetPendingPacket( const PacedPacketInfo& pacing_info, Timestamp target_send_time, Timestamp now) { + const bool is_probe = + pacing_info.probe_cluster_id != PacedPacketInfo::kNotAProbe; + // If first packet in probe, insert a small padding packet so we have a + // more reliable start window for the rate estimation. + if (is_probe && pacing_info.probe_cluster_bytes_sent == 0) { + auto padding = packet_sender_->GeneratePadding(DataSize::Bytes(1)); + // If no RTP modules sending media are registered, we may not get a + // padding packet back. + if (!padding.empty()) { + // We should never get more than one padding packets with a requested + // size of 1 byte. + RTC_DCHECK_EQ(padding.size(), 1u); + return std::move(padding[0]); + } + } + if (packet_queue_.Empty()) { return nullptr; } // First, check if there is any reason _not_ to send the next queued packet. - - // Unpaced audio packets and probes are exempted from send checks. - bool unpaced_audio_packet = - !pace_audio_ && packet_queue_.LeadingAudioPacketEnqueueTime().has_value(); - bool is_probe = pacing_info.probe_cluster_id != PacedPacketInfo::kNotAProbe; - if (!unpaced_audio_packet && !is_probe) { - if (Congested()) { + // Unpaced packets and probes are exempted from send checks. + if (NextUnpacedSendTime().IsInfinite() && !is_probe) { + if (congested_) { // Don't send anything if congested. return nullptr; } - if (mode_ == ProcessMode::kPeriodic) { - if (media_budget_.bytes_remaining() <= 0) { - // Not enough budget. + if (now <= target_send_time && send_burst_interval_.IsZero()) { + // We allow sending slightly early if we think that we would actually + // had been able to, had we been right on time - i.e. the current debt + // is not more than would be reduced to zero at the target sent time. + // If we allow packets to be sent in a burst, packet are allowed to be + // sent early. + TimeDelta flush_time = media_debt_ / adjusted_media_rate_; + if (now + flush_time > target_send_time) { return nullptr; } - } else { - // Dynamic processing mode. - if (now <= target_send_time) { - // We allow sending slightly early if we think that we would actually - // had been able to, had we been right on time - i.e. the current debt - // is not more than would be reduced to zero at the target sent time. - TimeDelta flush_time = media_debt_ / media_rate_; - if (now + flush_time > target_send_time) { - return nullptr; - } - } } } @@ -698,53 +607,79 @@ std::unique_ptr PacingController::GetPendingPacket( void PacingController::OnPacketSent(RtpPacketMediaType packet_type, DataSize packet_size, Timestamp send_time) { - if (!first_sent_packet_time_) { + if (!first_sent_packet_time_ && packet_type != RtpPacketMediaType::kPadding) { first_sent_packet_time_ = send_time; } + bool audio_packet = packet_type == RtpPacketMediaType::kAudio; - if (!audio_packet || account_for_audio_) { - // Update media bytes sent. + if ((!audio_packet || account_for_audio_) && packet_size > DataSize::Zero()) { UpdateBudgetWithSentData(packet_size); } - last_send_time_ = send_time; - last_process_time_ = send_time; -} -void PacingController::OnPaddingSent(DataSize data_sent) { - if (data_sent > DataSize::Zero()) { - UpdateBudgetWithSentData(data_sent); - } - Timestamp now = CurrentTime(); - last_send_time_ = now; - last_process_time_ = now; + last_send_time_ = send_time; } void PacingController::UpdateBudgetWithElapsedTime(TimeDelta delta) { - if (mode_ == ProcessMode::kPeriodic) { - delta = std::min(kMaxProcessingInterval, delta); - media_budget_.IncreaseBudget(delta.ms()); - padding_budget_.IncreaseBudget(delta.ms()); - } else { - media_debt_ -= std::min(media_debt_, media_rate_ * delta); - padding_debt_ -= std::min(padding_debt_, padding_rate_ * delta); - } + media_debt_ -= std::min(media_debt_, adjusted_media_rate_ * delta); + padding_debt_ -= std::min(padding_debt_, padding_rate_ * delta); } void PacingController::UpdateBudgetWithSentData(DataSize size) { - outstanding_data_ += size; - if (mode_ == ProcessMode::kPeriodic) { - media_budget_.UseBudget(size.bytes()); - padding_budget_.UseBudget(size.bytes()); - } else { - media_debt_ += size; - media_debt_ = std::min(media_debt_, media_rate_ * kMaxDebtInTime); - padding_debt_ += size; - padding_debt_ = std::min(padding_debt_, padding_rate_ * kMaxDebtInTime); - } + media_debt_ += size; + media_debt_ = std::min(media_debt_, adjusted_media_rate_ * kMaxDebtInTime); + UpdatePaddingBudgetWithSentData(size); +} + +void PacingController::UpdatePaddingBudgetWithSentData(DataSize size) { + padding_debt_ += size; + padding_debt_ = std::min(padding_debt_, padding_rate_ * kMaxDebtInTime); } void PacingController::SetQueueTimeLimit(TimeDelta limit) { - queue_time_limit = limit; + queue_time_limit_ = limit; +} + +void PacingController::MaybeUpdateMediaRateDueToLongQueue(Timestamp now) { + adjusted_media_rate_ = pacing_rate_; + if (!drain_large_queues_) { + return; + } + + DataSize queue_size_data = QueueSizeData(); + if (queue_size_data > DataSize::Zero()) { + // Assuming equal size packets and input/output rate, the average packet + // has avg_time_left_ms left to get queue_size_bytes out of the queue, if + // time constraint shall be met. Determine bitrate needed for that. + packet_queue_.UpdateAverageQueueTime(now); + TimeDelta avg_time_left = + std::max(TimeDelta::Millis(1), + queue_time_limit_ - packet_queue_.AverageQueueTime()); + DataRate min_rate_needed = queue_size_data / avg_time_left; + if (min_rate_needed > pacing_rate_) { + adjusted_media_rate_ = min_rate_needed; + RTC_LOG(LS_VERBOSE) << "bwe:large_pacing_queue pacing_rate_kbps=" + << pacing_rate_.kbps(); + } + } +} + +Timestamp PacingController::NextUnpacedSendTime() const { + if (!pace_audio_) { + Timestamp leading_audio_send_time = + packet_queue_.LeadingPacketEnqueueTime(RtpPacketMediaType::kAudio); + if (leading_audio_send_time.IsFinite()) { + return leading_audio_send_time; + } + } + if (fast_retransmissions_) { + Timestamp leading_retransmission_send_time = + packet_queue_.LeadingPacketEnqueueTime( + RtpPacketMediaType::kRetransmission); + if (leading_retransmission_send_time.IsFinite()) { + return leading_retransmission_send_time; + } + } + return Timestamp::MinusInfinity(); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/pacing/pacing_controller.h b/TMessagesProj/jni/voip/webrtc/modules/pacing/pacing_controller.h index 5d6d26b917..94d2402de6 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/pacing/pacing_controller.h +++ b/TMessagesProj/jni/voip/webrtc/modules/pacing/pacing_controller.h @@ -14,19 +14,19 @@ #include #include +#include #include #include #include #include "absl/types/optional.h" +#include "api/field_trials_view.h" #include "api/function_view.h" -#include "api/rtc_event_log/rtc_event_log.h" #include "api/transport/field_trial_based_config.h" #include "api/transport/network_types.h" -#include "api/transport/webrtc_key_value_config.h" #include "modules/pacing/bitrate_prober.h" #include "modules/pacing/interval_budget.h" -#include "modules/pacing/round_robin_packet_queue.h" +#include "modules/pacing/prioritized_packet_queue.h" #include "modules/pacing/rtp_packet_pacer.h" #include "modules/rtp_rtcp/include/rtp_packet_sender.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" @@ -43,13 +43,6 @@ namespace webrtc { // externally, via the PacingController::PacketSender interface. class PacingController { public: - // Periodic mode uses the IntervalBudget class for tracking bitrate - // budgets, and expected ProcessPackets() to be called a fixed rate, - // e.g. every 5ms as implemented by PacedSender. - // Dynamic mode allows for arbitrary time delta between calls to - // ProcessPackets. - enum class ProcessMode { kPeriodic, kDynamic }; - class PacketSender { public: virtual ~PacketSender() = default; @@ -59,6 +52,15 @@ class PacingController { virtual std::vector> FetchFec() = 0; virtual std::vector> GeneratePadding( DataSize size) = 0; + + // TODO(bugs.webrtc.org/11340): Make pure virtual once downstream projects + // have been updated. + virtual void OnAbortedRetransmissions( + uint32_t ssrc, + rtc::ArrayView sequence_numbers) {} + virtual absl::optional GetRtxSsrcForMedia(uint32_t ssrc) const { + return absl::nullopt; + } }; // Expected max pacer delay. If ExpectedQueueTime() is higher than @@ -66,12 +68,6 @@ class PacingController { // encoding them). Bitrate sent may temporarily exceed target set by // UpdateBitrate() so that this limit will be upheld. static const TimeDelta kMaxExpectedQueueLength; - // Pacing-rate relative to our target send rate. - // Multiplicative factor that is applied to the target bitrate to calculate - // the number of bytes that can be transmitted per interval. - // Increasing this factor will result in lower delays in cases of bitrate - // overshoots from the encoder. - static const float kDefaultPaceMultiplier; // If no media or paused, wake up at least every `kPausedProcessIntervalMs` in // order to send a keep-alive packet so we don't get stuck in a bad state due // to lack of feedback. @@ -79,11 +75,14 @@ class PacingController { static const TimeDelta kMinSleepTime; + // Allow probes to be processed slightly ahead of inteded send time. Currently + // set to 1ms as this is intended to allow times be rounded down to the + // nearest millisecond. + static const TimeDelta kMaxEarlyProbeProcessing; + PacingController(Clock* clock, PacketSender* packet_sender, - RtcEventLog* event_log, - const WebRtcKeyValueConfig* field_trials, - ProcessMode mode); + const FieldTrialsView& field_trials); ~PacingController(); @@ -91,18 +90,20 @@ class PacingController { // it's time to send. void EnqueuePacket(std::unique_ptr packet); + // ABSL_DEPRECATED("Use CreateProbeClusters instead") void CreateProbeCluster(DataRate bitrate, int cluster_id); + void CreateProbeClusters( + rtc::ArrayView probe_cluster_configs); void Pause(); // Temporarily pause all sending. void Resume(); // Resume sending packets. bool IsPaused() const; - void SetCongestionWindow(DataSize congestion_window_size); - void UpdateOutstandingData(DataSize outstanding_data); + void SetCongested(bool congested); // Sets the pacing rates. Must be called once before packets can be sent. void SetPacingRates(DataRate pacing_rate, DataRate padding_rate); - DataRate pacing_rate() const { return pacing_bitrate_; } + DataRate pacing_rate() const { return adjusted_media_rate_; } // Currently audio traffic is not accounted by pacer and passed through. // With the introduction of audio BWE audio traffic will be accounted for @@ -112,12 +113,20 @@ class PacingController { void SetIncludeOverhead(); void SetTransportOverhead(DataSize overhead_per_packet); + // The pacer is allowed to send enqued packets in bursts and can build up a + // packet "debt" that correspond to approximately the send rate during + // 'burst_interval'. + void SetSendBurstInterval(TimeDelta burst_interval); // Returns the time when the oldest packet was queued. Timestamp OldestPacketEnqueueTime() const; // Number of packets in the pacer queue. size_t QueueSizePackets() const; + // Number of packets in the pacer queue per media type (RtpPacketMediaType + // values are used as lookup index). + const std::array& SizeInPacketsPerRtpPacketMediaType() + const; // Totals size of packets in the pacer queue. DataSize QueueSizeData() const; @@ -145,19 +154,16 @@ class PacingController { // is available. void ProcessPackets(); - bool Congested() const; - bool IsProbing() const; private: - void EnqueuePacketInternal(std::unique_ptr packet, - int priority); TimeDelta UpdateTimeAndGetElapsed(Timestamp now); bool ShouldSendKeepalive(Timestamp now) const; // Updates the number of bytes that can be sent for the next time interval. void UpdateBudgetWithElapsedTime(TimeDelta delta); void UpdateBudgetWithSentData(DataSize size); + void UpdatePaddingBudgetWithSentData(DataSize size); DataSize PaddingToAdd(DataSize recommended_probe_size, DataSize data_sent) const; @@ -169,67 +175,61 @@ class PacingController { void OnPacketSent(RtpPacketMediaType packet_type, DataSize packet_size, Timestamp send_time); - void OnPaddingSent(DataSize padding_sent); + void MaybeUpdateMediaRateDueToLongQueue(Timestamp now); Timestamp CurrentTime() const; - const ProcessMode mode_; + // Helper methods for packet that may not be paced. Returns a finite Timestamp + // if a packet type is configured to not be paced and the packet queue has at + // least one packet of that type. Otherwise returns + // Timestamp::MinusInfinity(). + Timestamp NextUnpacedSendTime() const; + Clock* const clock_; PacketSender* const packet_sender_; - const std::unique_ptr fallback_field_trials_; - const WebRtcKeyValueConfig* field_trials_; + const FieldTrialsView& field_trials_; const bool drain_large_queues_; const bool send_padding_if_silent_; const bool pace_audio_; const bool ignore_transport_overhead_; - // In dynamic mode, indicates the target size when requesting padding, - // expressed as a duration in order to adjust for varying padding rate. - const TimeDelta padding_target_duration_; + const bool fast_retransmissions_; TimeDelta min_packet_limit_; - DataSize transport_overhead_per_packet_; + TimeDelta send_burst_interval_; // TODO(webrtc:9716): Remove this when we are certain clocks are monotonic. // The last millisecond timestamp returned by `clock_`. mutable Timestamp last_timestamp_; bool paused_; - // In dynamic mode, `media_budget_` and `padding_budget_` will be used to - // track when packets can be sent. - // In periodic mode, `media_debt_` and `padding_debt_` will be used together - // with the target rates. - - // This is the media budget, keeping track of how many bits of media - // we can pace out during the current interval. - IntervalBudget media_budget_; - // This is the padding budget, keeping track of how many bits of padding we're - // allowed to send out during the current interval. This budget will be - // utilized when there's no media to send. - IntervalBudget padding_budget_; - + // Amount of outstanding data for media and padding. DataSize media_debt_; DataSize padding_debt_; - DataRate media_rate_; + + // The target pacing rate, signaled via SetPacingRates(). + DataRate pacing_rate_; + // The media send rate, which might adjusted from pacing_rate_, e.g. if the + // pacing queue is growing too long. + DataRate adjusted_media_rate_; + // The padding target rate. We aim to fill up to this rate with padding what + // is not already used by media. DataRate padding_rate_; BitrateProber prober_; bool probing_send_failure_; - DataRate pacing_bitrate_; - Timestamp last_process_time_; Timestamp last_send_time_; absl::optional first_sent_packet_time_; + bool seen_first_packet_; - RoundRobinPacketQueue packet_queue_; - uint64_t packet_counter_; + PrioritizedPacketQueue packet_queue_; - DataSize congestion_window_size_; - DataSize outstanding_data_; + bool congested_; - TimeDelta queue_time_limit; + TimeDelta queue_time_limit_; bool account_for_audio_; bool include_overhead_; }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/pacing/packet_router.cc b/TMessagesProj/jni/voip/webrtc/modules/pacing/packet_router.cc index fcc7ee3449..b28d9776dc 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/pacing/packet_router.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/pacing/packet_router.cc @@ -23,6 +23,7 @@ #include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/system/unused.h" #include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" @@ -85,10 +86,10 @@ void PacketRouter::AddSendRtpModuleToMap(RtpRtcpInterface* rtp_module, } void PacketRouter::RemoveSendRtpModuleFromMap(uint32_t ssrc) { - auto kv = send_modules_map_.find(ssrc); - RTC_DCHECK(kv != send_modules_map_.end()); - send_modules_list_.remove(kv->second); - send_modules_map_.erase(kv); + auto it = send_modules_map_.find(ssrc); + RTC_DCHECK(it != send_modules_map_.end()); + send_modules_list_.remove(it->second); + send_modules_map_.erase(it); } void PacketRouter::RemoveSendRtpModule(RtpRtcpInterface* rtp_module) { @@ -142,13 +143,16 @@ void PacketRouter::SendPacket(std::unique_ptr packet, MutexLock lock(&modules_mutex_); // With the new pacer code path, transport sequence numbers are only set here, // on the pacer thread. Therefore we don't need atomics/synchronization. - if (packet->HasExtension()) { - packet->SetExtension((++transport_seq_) & 0xFFFF); + bool assign_transport_sequence_number = + packet->HasExtension(); + if (assign_transport_sequence_number) { + packet->SetExtension((transport_seq_ + 1) & + 0xFFFF); } uint32_t ssrc = packet->Ssrc(); - auto kv = send_modules_map_.find(ssrc); - if (kv == send_modules_map_.end()) { + auto it = send_modules_map_.find(ssrc); + if (it == send_modules_map_.end()) { RTC_LOG(LS_WARNING) << "Failed to send packet, matching RTP module not found " "or transport error. SSRC = " @@ -156,12 +160,18 @@ void PacketRouter::SendPacket(std::unique_ptr packet, return; } - RtpRtcpInterface* rtp_module = kv->second; + RtpRtcpInterface* rtp_module = it->second; if (!rtp_module->TrySendPacket(packet.get(), cluster_info)) { RTC_LOG(LS_WARNING) << "Failed to send packet, rejected by RTP module."; return; } + // Sending succeeded. + + if (assign_transport_sequence_number) { + ++transport_seq_; + } + if (rtp_module->SupportsRtxPayloadPadding()) { // This is now the last module to send media, and has the desired // properties needed for payload based padding. Cache it for later use. @@ -214,18 +224,38 @@ std::vector> PacketRouter::GeneratePadding( } } -#if RTC_TRACE_EVENTS_ENABLED for (auto& packet : padding_packets) { + RTC_UNUSED(packet); TRACE_EVENT2(TRACE_DISABLED_BY_DEFAULT("webrtc"), "PacketRouter::GeneratePadding::Loop", "sequence_number", packet->SequenceNumber(), "rtp_timestamp", packet->Timestamp()); } -#endif return padding_packets; } +void PacketRouter::OnAbortedRetransmissions( + uint32_t ssrc, + rtc::ArrayView sequence_numbers) { + MutexLock lock(&modules_mutex_); + auto it = send_modules_map_.find(ssrc); + if (it != send_modules_map_.end()) { + it->second->OnAbortedRetransmissions(sequence_numbers); + } +} + +absl::optional PacketRouter::GetRtxSsrcForMedia(uint32_t ssrc) const { + MutexLock lock(&modules_mutex_); + auto it = send_modules_map_.find(ssrc); + if (it != send_modules_map_.end() && it->second->SSRC() == ssrc) { + // A module is registered with the given SSRC, and that SSRC is the main + // media SSRC for that RTP module. + return it->second->RtxSsrc(); + } + return absl::nullopt; +} + uint16_t PacketRouter::CurrentTransportSequenceNumber() const { MutexLock lock(&modules_mutex_); return transport_seq_ & 0xFFFF; diff --git a/TMessagesProj/jni/voip/webrtc/modules/pacing/packet_router.h b/TMessagesProj/jni/voip/webrtc/modules/pacing/packet_router.h index 9958a50b6e..68b82c6bd4 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/pacing/packet_router.h +++ b/TMessagesProj/jni/voip/webrtc/modules/pacing/packet_router.h @@ -25,7 +25,6 @@ #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtcp_packet.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" @@ -44,6 +43,9 @@ class PacketRouter : public PacingController::PacketSender { explicit PacketRouter(uint16_t start_transport_seq); ~PacketRouter() override; + PacketRouter(const PacketRouter&) = delete; + PacketRouter& operator=(const PacketRouter&) = delete; + void AddSendRtpModule(RtpRtcpInterface* rtp_module, bool remb_candidate); void RemoveSendRtpModule(RtpRtcpInterface* rtp_module); @@ -56,6 +58,10 @@ class PacketRouter : public PacingController::PacketSender { std::vector> FetchFec() override; std::vector> GeneratePadding( DataSize size) override; + void OnAbortedRetransmissions( + uint32_t ssrc, + rtc::ArrayView sequence_numbers) override; + absl::optional GetRtxSsrcForMedia(uint32_t ssrc) const override; uint16_t CurrentTransportSequenceNumber() const; @@ -107,8 +113,6 @@ class PacketRouter : public PacingController::PacketSender { // process thread is gone. std::vector> pending_fec_packets_ RTC_GUARDED_BY(modules_mutex_); - - RTC_DISALLOW_COPY_AND_ASSIGN(PacketRouter); }; } // namespace webrtc #endif // MODULES_PACING_PACKET_ROUTER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/pacing/prioritized_packet_queue.cc b/TMessagesProj/jni/voip/webrtc/modules/pacing/prioritized_packet_queue.cc new file mode 100644 index 0000000000..b3874a2324 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/pacing/prioritized_packet_queue.cc @@ -0,0 +1,279 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/pacing/prioritized_packet_queue.h" + +#include + +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "rtc_base/checks.h" + +namespace webrtc { +namespace { + +constexpr int kAudioPrioLevel = 0; + +int GetPriorityForType(RtpPacketMediaType type) { + // Lower number takes priority over higher. + switch (type) { + case RtpPacketMediaType::kAudio: + // Audio is always prioritized over other packet types. + return kAudioPrioLevel; + case RtpPacketMediaType::kRetransmission: + // Send retransmissions before new media. + return kAudioPrioLevel + 1; + case RtpPacketMediaType::kVideo: + case RtpPacketMediaType::kForwardErrorCorrection: + // Video has "normal" priority, in the old speak. + // Send redundancy concurrently to video. If it is delayed it might have a + // lower chance of being useful. + return kAudioPrioLevel + 2; + case RtpPacketMediaType::kPadding: + // Packets that are in themselves likely useless, only sent to keep the + // BWE high. + return kAudioPrioLevel + 3; + } + RTC_CHECK_NOTREACHED(); +} + +} // namespace + +DataSize PrioritizedPacketQueue::QueuedPacket::PacketSize() const { + return DataSize::Bytes(packet->payload_size() + packet->padding_size()); +} + +PrioritizedPacketQueue::StreamQueue::StreamQueue(Timestamp creation_time) + : last_enqueue_time_(creation_time) {} + +bool PrioritizedPacketQueue::StreamQueue::EnqueuePacket(QueuedPacket packet, + int priority_level) { + bool first_packet_at_level = packets_[priority_level].empty(); + packets_[priority_level].push_back(std::move(packet)); + return first_packet_at_level; +} + +PrioritizedPacketQueue::QueuedPacket +PrioritizedPacketQueue::StreamQueue::DequePacket(int priority_level) { + RTC_DCHECK(!packets_[priority_level].empty()); + QueuedPacket packet = std::move(packets_[priority_level].front()); + packets_[priority_level].pop_front(); + return packet; +} + +bool PrioritizedPacketQueue::StreamQueue::HasPacketsAtPrio( + int priority_level) const { + return !packets_[priority_level].empty(); +} + +bool PrioritizedPacketQueue::StreamQueue::IsEmpty() const { + for (const std::deque& queue : packets_) { + if (!queue.empty()) { + return false; + } + } + return true; +} + +Timestamp PrioritizedPacketQueue::StreamQueue::LeadingPacketEnqueueTime( + int priority_level) const { + RTC_DCHECK(!packets_[priority_level].empty()); + return packets_[priority_level].begin()->enqueue_time; +} + +Timestamp PrioritizedPacketQueue::StreamQueue::LastEnqueueTime() const { + return last_enqueue_time_; +} + +PrioritizedPacketQueue::PrioritizedPacketQueue(Timestamp creation_time) + : queue_time_sum_(TimeDelta::Zero()), + pause_time_sum_(TimeDelta::Zero()), + size_packets_(0), + size_packets_per_media_type_({}), + size_payload_(DataSize::Zero()), + last_update_time_(creation_time), + paused_(false), + last_culling_time_(creation_time), + top_active_prio_level_(-1) {} + +void PrioritizedPacketQueue::Push(Timestamp enqueue_time, + std::unique_ptr packet) { + StreamQueue* stream_queue; + auto [it, inserted] = streams_.emplace(packet->Ssrc(), nullptr); + if (inserted) { + it->second = std::make_unique(enqueue_time); + } + stream_queue = it->second.get(); + + auto enqueue_time_iterator = + enqueue_times_.insert(enqueue_times_.end(), enqueue_time); + RTC_DCHECK(packet->packet_type().has_value()); + RtpPacketMediaType packet_type = packet->packet_type().value(); + int prio_level = GetPriorityForType(packet_type); + RTC_DCHECK_GE(prio_level, 0); + RTC_DCHECK_LT(prio_level, kNumPriorityLevels); + QueuedPacket queued_packed = {.packet = std::move(packet), + .enqueue_time = enqueue_time, + .enqueue_time_iterator = enqueue_time_iterator}; + // In order to figure out how much time a packet has spent in the queue + // while not in a paused state, we subtract the total amount of time the + // queue has been paused so far, and when the packet is popped we subtract + // the total amount of time the queue has been paused at that moment. This + // way we subtract the total amount of time the packet has spent in the + // queue while in a paused state. + UpdateAverageQueueTime(enqueue_time); + queued_packed.enqueue_time -= pause_time_sum_; + ++size_packets_; + ++size_packets_per_media_type_[static_cast(packet_type)]; + size_payload_ += queued_packed.PacketSize(); + + if (stream_queue->EnqueuePacket(std::move(queued_packed), prio_level)) { + // Number packets at `prio_level` for this steam is now non-zero. + streams_by_prio_[prio_level].push_back(stream_queue); + } + if (top_active_prio_level_ < 0 || prio_level < top_active_prio_level_) { + top_active_prio_level_ = prio_level; + } + + static constexpr TimeDelta kTimeout = TimeDelta::Millis(500); + if (enqueue_time - last_culling_time_ > kTimeout) { + for (auto it = streams_.begin(); it != streams_.end();) { + if (it->second->IsEmpty() && + it->second->LastEnqueueTime() + kTimeout < enqueue_time) { + streams_.erase(it++); + } else { + ++it; + } + } + last_culling_time_ = enqueue_time; + } +} + +std::unique_ptr PrioritizedPacketQueue::Pop() { + if (size_packets_ == 0) { + return nullptr; + } + + RTC_DCHECK_GE(top_active_prio_level_, 0); + StreamQueue& stream_queue = *streams_by_prio_[top_active_prio_level_].front(); + QueuedPacket packet = stream_queue.DequePacket(top_active_prio_level_); + --size_packets_; + RTC_DCHECK(packet.packet->packet_type().has_value()); + RtpPacketMediaType packet_type = packet.packet->packet_type().value(); + --size_packets_per_media_type_[static_cast(packet_type)]; + RTC_DCHECK_GE(size_packets_per_media_type_[static_cast(packet_type)], + 0); + size_payload_ -= packet.PacketSize(); + + // Calculate the total amount of time spent by this packet in the queue + // while in a non-paused state. Note that the `pause_time_sum_ms_` was + // subtracted from `packet.enqueue_time_ms` when the packet was pushed, and + // by subtracting it now we effectively remove the time spent in in the + // queue while in a paused state. + TimeDelta time_in_non_paused_state = + last_update_time_ - packet.enqueue_time - pause_time_sum_; + queue_time_sum_ -= time_in_non_paused_state; + + // Set the time spent in the send queue, which is the per-packet equivalent of + // totalPacketSendDelay. The notion of being paused is an implementation + // detail that we do not want to expose, so it makes sense to report the + // metric excluding the pause time. This also avoids spikes in the metric. + // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-totalpacketsenddelay + packet.packet->set_time_in_send_queue(time_in_non_paused_state); + + RTC_DCHECK(size_packets_ > 0 || queue_time_sum_ == TimeDelta::Zero()); + + RTC_CHECK(packet.enqueue_time_iterator != enqueue_times_.end()); + enqueue_times_.erase(packet.enqueue_time_iterator); + + // Remove StreamQueue from head of fifo-queue for this prio level, and + // and add it to the end if it still has packets. + streams_by_prio_[top_active_prio_level_].pop_front(); + if (stream_queue.HasPacketsAtPrio(top_active_prio_level_)) { + streams_by_prio_[top_active_prio_level_].push_back(&stream_queue); + } else if (streams_by_prio_[top_active_prio_level_].empty()) { + // No stream queues have packets at this prio level, find top priority + // that is not empty. + if (size_packets_ == 0) { + top_active_prio_level_ = -1; + } else { + for (int i = 0; i < kNumPriorityLevels; ++i) { + if (!streams_by_prio_[i].empty()) { + top_active_prio_level_ = i; + break; + } + } + } + } + + return std::move(packet.packet); +} + +int PrioritizedPacketQueue::SizeInPackets() const { + return size_packets_; +} + +DataSize PrioritizedPacketQueue::SizeInPayloadBytes() const { + return size_payload_; +} + +bool PrioritizedPacketQueue::Empty() const { + return size_packets_ == 0; +} + +const std::array& +PrioritizedPacketQueue::SizeInPacketsPerRtpPacketMediaType() const { + return size_packets_per_media_type_; +} + +Timestamp PrioritizedPacketQueue::LeadingPacketEnqueueTime( + RtpPacketMediaType type) const { + const int priority_level = GetPriorityForType(type); + if (streams_by_prio_[priority_level].empty()) { + return Timestamp::MinusInfinity(); + } + return streams_by_prio_[priority_level].front()->LeadingPacketEnqueueTime( + priority_level); +} + +Timestamp PrioritizedPacketQueue::OldestEnqueueTime() const { + return enqueue_times_.empty() ? Timestamp::MinusInfinity() + : enqueue_times_.front(); +} + +TimeDelta PrioritizedPacketQueue::AverageQueueTime() const { + if (size_packets_ == 0) { + return TimeDelta::Zero(); + } + return queue_time_sum_ / size_packets_; +} + +void PrioritizedPacketQueue::UpdateAverageQueueTime(Timestamp now) { + RTC_CHECK_GE(now, last_update_time_); + if (now == last_update_time_) { + return; + } + + TimeDelta delta = now - last_update_time_; + + if (paused_) { + pause_time_sum_ += delta; + } else { + queue_time_sum_ += delta * size_packets_; + } + + last_update_time_ = now; +} + +void PrioritizedPacketQueue::SetPauseState(bool paused, Timestamp now) { + UpdateAverageQueueTime(now); + paused_ = paused; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/pacing/prioritized_packet_queue.h b/TMessagesProj/jni/voip/webrtc/modules/pacing/prioritized_packet_queue.h new file mode 100644 index 0000000000..3b5748f12a --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/pacing/prioritized_packet_queue.h @@ -0,0 +1,157 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_PACING_PRIORITIZED_PACKET_QUEUE_H_ +#define MODULES_PACING_PRIORITIZED_PACKET_QUEUE_H_ + +#include + +#include +#include +#include +#include + +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" + +namespace webrtc { + +class PrioritizedPacketQueue { + public: + explicit PrioritizedPacketQueue(Timestamp creation_time); + PrioritizedPacketQueue(const PrioritizedPacketQueue&) = delete; + PrioritizedPacketQueue& operator=(const PrioritizedPacketQueue&) = delete; + + // Add a packet to the queue. The enqueue time is used for queue time stats + // and to report the leading packet enqueue time per packet type. + void Push(Timestamp enqueue_time, std::unique_ptr packet); + + // Remove the next packet from the queue. Packets a prioritized first + // according to packet type, in the following order: + // - audio, retransmissions, video / fec, padding + // For each packet type, we use one FIFO-queue per SSRC and emit from + // those queues in a round-robin fashion. + std::unique_ptr Pop(); + + // Number of packets in the queue. + int SizeInPackets() const; + + // Sum of all payload bytes in the queue, where the payload is calculated + // as `packet->payload_size() + packet->padding_size()`. + DataSize SizeInPayloadBytes() const; + + // Convenience method for `SizeInPackets() == 0`. + bool Empty() const; + + // Total packets in the queue per media type (RtpPacketMediaType values are + // used as lookup index). + const std::array& SizeInPacketsPerRtpPacketMediaType() + const; + + // The enqueue time of the next packet this queue will return via the Pop() + // method, for the given packet type. If queue has no packets, of that type, + // returns Timestamp::MinusInfinity(). + Timestamp LeadingPacketEnqueueTime(RtpPacketMediaType type) const; + + // Enqueue time of the oldest packet in the queue, + // Timestamp::MinusInfinity() if queue is empty. + Timestamp OldestEnqueueTime() const; + + // Average queue time for the packets currently in the queue. + // The queuing time is calculated from Push() to the last UpdateQueueTime() + // call - with any time spent in a paused state subtracted. + // Returns TimeDelta::Zero() for an empty queue. + TimeDelta AverageQueueTime() const; + + // Called during packet processing or when pause stats changes. Since the + // AverageQueueTime() method does not look at the wall time, this method + // needs to be called before querying queue time. + void UpdateAverageQueueTime(Timestamp now); + + // Set the pause state, while `paused` is true queuing time is not counted. + void SetPauseState(bool paused, Timestamp now); + + private: + static constexpr int kNumPriorityLevels = 4; + + class QueuedPacket { + public: + DataSize PacketSize() const; + + std::unique_ptr packet; + Timestamp enqueue_time; + std::list::iterator enqueue_time_iterator; + }; + + // Class containing packets for an RTP stream. + // For each priority level, packets are simply stored in a fifo queue. + class StreamQueue { + public: + explicit StreamQueue(Timestamp creation_time); + StreamQueue(StreamQueue&&) = default; + StreamQueue& operator=(StreamQueue&&) = default; + + StreamQueue(const StreamQueue&) = delete; + StreamQueue& operator=(const StreamQueue&) = delete; + + // Enqueue packet at the given priority level. Returns true if the packet + // count for that priority level went from zero to non-zero. + bool EnqueuePacket(QueuedPacket packet, int priority_level); + + QueuedPacket DequePacket(int priority_level); + + bool HasPacketsAtPrio(int priority_level) const; + bool IsEmpty() const; + Timestamp LeadingPacketEnqueueTime(int priority_level) const; + Timestamp LastEnqueueTime() const; + + private: + std::deque packets_[kNumPriorityLevels]; + Timestamp last_enqueue_time_; + }; + + // Cumulative sum, over all packets, of time spent in the queue. + TimeDelta queue_time_sum_; + // Cumulative sum of time the queue has spent in a paused state. + TimeDelta pause_time_sum_; + // Total number of packets stored in this queue. + int size_packets_; + // Total number of packets stored in this queue per RtpPacketMediaType. + std::array size_packets_per_media_type_; + // Sum of payload sizes for all packts stored in this queue. + DataSize size_payload_; + // The last time queue/pause time sums were updated. + Timestamp last_update_time_; + bool paused_; + + // Last time `streams_` was culled for inactive streams. + Timestamp last_culling_time_; + + // Map from SSRC to packet queues for the associated RTP stream. + std::unordered_map> streams_; + + // For each priority level, a queue of StreamQueues which have at least one + // packet pending for that prio level. + std::deque streams_by_prio_[kNumPriorityLevels]; + + // The first index into `stream_by_prio_` that is non-empty. + int top_active_prio_level_; + + // Ordered list of enqueue times. Additions are always increasing and added to + // the end. QueuedPacket instances have a iterators into this list for fast + // removal. + std::list enqueue_times_; +}; + +} // namespace webrtc + +#endif // MODULES_PACING_PRIORITIZED_PACKET_QUEUE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/pacing/round_robin_packet_queue.cc b/TMessagesProj/jni/voip/webrtc/modules/pacing/round_robin_packet_queue.cc deleted file mode 100644 index ef37e5256b..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/pacing/round_robin_packet_queue.cc +++ /dev/null @@ -1,406 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/pacing/round_robin_packet_queue.h" - -#include -#include -#include - -#include "absl/strings/match.h" -#include "rtc_base/checks.h" - -namespace webrtc { -namespace { -static constexpr DataSize kMaxLeadingSize = DataSize::Bytes(1400); -} - -RoundRobinPacketQueue::QueuedPacket::QueuedPacket(const QueuedPacket& rhs) = - default; -RoundRobinPacketQueue::QueuedPacket::~QueuedPacket() = default; - -RoundRobinPacketQueue::QueuedPacket::QueuedPacket( - int priority, - Timestamp enqueue_time, - uint64_t enqueue_order, - std::multiset::iterator enqueue_time_it, - std::unique_ptr packet) - : priority_(priority), - enqueue_time_(enqueue_time), - enqueue_order_(enqueue_order), - is_retransmission_(packet->packet_type() == - RtpPacketMediaType::kRetransmission), - enqueue_time_it_(enqueue_time_it), - owned_packet_(packet.release()) {} - -bool RoundRobinPacketQueue::QueuedPacket::operator<( - const RoundRobinPacketQueue::QueuedPacket& other) const { - if (priority_ != other.priority_) - return priority_ > other.priority_; - if (is_retransmission_ != other.is_retransmission_) - return other.is_retransmission_; - - return enqueue_order_ > other.enqueue_order_; -} - -int RoundRobinPacketQueue::QueuedPacket::Priority() const { - return priority_; -} - -RtpPacketMediaType RoundRobinPacketQueue::QueuedPacket::Type() const { - return *owned_packet_->packet_type(); -} - -uint32_t RoundRobinPacketQueue::QueuedPacket::Ssrc() const { - return owned_packet_->Ssrc(); -} - -Timestamp RoundRobinPacketQueue::QueuedPacket::EnqueueTime() const { - return enqueue_time_; -} - -bool RoundRobinPacketQueue::QueuedPacket::IsRetransmission() const { - return Type() == RtpPacketMediaType::kRetransmission; -} - -uint64_t RoundRobinPacketQueue::QueuedPacket::EnqueueOrder() const { - return enqueue_order_; -} - -RtpPacketToSend* RoundRobinPacketQueue::QueuedPacket::RtpPacket() const { - return owned_packet_; -} - -void RoundRobinPacketQueue::QueuedPacket::UpdateEnqueueTimeIterator( - std::multiset::iterator it) { - enqueue_time_it_ = it; -} - -std::multiset::iterator -RoundRobinPacketQueue::QueuedPacket::EnqueueTimeIterator() const { - return enqueue_time_it_; -} - -void RoundRobinPacketQueue::QueuedPacket::SubtractPauseTime( - TimeDelta pause_time_sum) { - enqueue_time_ -= pause_time_sum; -} - -RoundRobinPacketQueue::PriorityPacketQueue::const_iterator -RoundRobinPacketQueue::PriorityPacketQueue::begin() const { - return c.begin(); -} - -RoundRobinPacketQueue::PriorityPacketQueue::const_iterator -RoundRobinPacketQueue::PriorityPacketQueue::end() const { - return c.end(); -} - -RoundRobinPacketQueue::Stream::Stream() : size(DataSize::Zero()), ssrc(0) {} -RoundRobinPacketQueue::Stream::Stream(const Stream& stream) = default; -RoundRobinPacketQueue::Stream::~Stream() = default; - -bool IsEnabled(const WebRtcKeyValueConfig* field_trials, const char* name) { - if (!field_trials) { - return false; - } - return absl::StartsWith(field_trials->Lookup(name), "Enabled"); -} - -RoundRobinPacketQueue::RoundRobinPacketQueue( - Timestamp start_time, - const WebRtcKeyValueConfig* field_trials) - : transport_overhead_per_packet_(DataSize::Zero()), - time_last_updated_(start_time), - paused_(false), - size_packets_(0), - size_(DataSize::Zero()), - max_size_(kMaxLeadingSize), - queue_time_sum_(TimeDelta::Zero()), - pause_time_sum_(TimeDelta::Zero()), - include_overhead_(false) {} - -RoundRobinPacketQueue::~RoundRobinPacketQueue() { - // Make sure to release any packets owned by raw pointer in QueuedPacket. - while (!Empty()) { - Pop(); - } -} - -void RoundRobinPacketQueue::Push(int priority, - Timestamp enqueue_time, - uint64_t enqueue_order, - std::unique_ptr packet) { - RTC_DCHECK(packet->packet_type().has_value()); - if (size_packets_ == 0) { - // Single packet fast-path. - single_packet_queue_.emplace( - QueuedPacket(priority, enqueue_time, enqueue_order, - enqueue_times_.end(), std::move(packet))); - UpdateQueueTime(enqueue_time); - single_packet_queue_->SubtractPauseTime(pause_time_sum_); - size_packets_ = 1; - size_ += PacketSize(*single_packet_queue_); - } else { - MaybePromoteSinglePacketToNormalQueue(); - Push(QueuedPacket(priority, enqueue_time, enqueue_order, - enqueue_times_.insert(enqueue_time), std::move(packet))); - } -} - -std::unique_ptr RoundRobinPacketQueue::Pop() { - if (single_packet_queue_.has_value()) { - RTC_DCHECK(stream_priorities_.empty()); - std::unique_ptr rtp_packet( - single_packet_queue_->RtpPacket()); - single_packet_queue_.reset(); - queue_time_sum_ = TimeDelta::Zero(); - size_packets_ = 0; - size_ = DataSize::Zero(); - return rtp_packet; - } - - RTC_DCHECK(!Empty()); - Stream* stream = GetHighestPriorityStream(); - const QueuedPacket& queued_packet = stream->packet_queue.top(); - - stream_priorities_.erase(stream->priority_it); - - // Calculate the total amount of time spent by this packet in the queue - // while in a non-paused state. Note that the `pause_time_sum_ms_` was - // subtracted from `packet.enqueue_time_ms` when the packet was pushed, and - // by subtracting it now we effectively remove the time spent in in the - // queue while in a paused state. - TimeDelta time_in_non_paused_state = - time_last_updated_ - queued_packet.EnqueueTime() - pause_time_sum_; - queue_time_sum_ -= time_in_non_paused_state; - - RTC_CHECK(queued_packet.EnqueueTimeIterator() != enqueue_times_.end()); - enqueue_times_.erase(queued_packet.EnqueueTimeIterator()); - - // Update `bytes` of this stream. The general idea is that the stream that - // has sent the least amount of bytes should have the highest priority. - // The problem with that is if streams send with different rates, in which - // case a "budget" will be built up for the stream sending at the lower - // rate. To avoid building a too large budget we limit `bytes` to be within - // kMaxLeading bytes of the stream that has sent the most amount of bytes. - DataSize packet_size = PacketSize(queued_packet); - stream->size = - std::max(stream->size + packet_size, max_size_ - kMaxLeadingSize); - max_size_ = std::max(max_size_, stream->size); - - size_ -= packet_size; - size_packets_ -= 1; - RTC_CHECK(size_packets_ > 0 || queue_time_sum_ == TimeDelta::Zero()); - - std::unique_ptr rtp_packet(queued_packet.RtpPacket()); - stream->packet_queue.pop(); - - // If there are packets left to be sent, schedule the stream again. - RTC_CHECK(!IsSsrcScheduled(stream->ssrc)); - if (stream->packet_queue.empty()) { - stream->priority_it = stream_priorities_.end(); - } else { - int priority = stream->packet_queue.top().Priority(); - stream->priority_it = stream_priorities_.emplace( - StreamPrioKey(priority, stream->size), stream->ssrc); - } - - return rtp_packet; -} - -bool RoundRobinPacketQueue::Empty() const { - if (size_packets_ == 0) { - RTC_DCHECK(!single_packet_queue_.has_value() && stream_priorities_.empty()); - return true; - } - RTC_DCHECK(single_packet_queue_.has_value() || !stream_priorities_.empty()); - return false; -} - -size_t RoundRobinPacketQueue::SizeInPackets() const { - return size_packets_; -} - -DataSize RoundRobinPacketQueue::Size() const { - return size_; -} - -absl::optional RoundRobinPacketQueue::LeadingAudioPacketEnqueueTime() - const { - if (single_packet_queue_.has_value()) { - if (single_packet_queue_->Type() == RtpPacketMediaType::kAudio) { - return single_packet_queue_->EnqueueTime(); - } - return absl::nullopt; - } - - if (stream_priorities_.empty()) { - return absl::nullopt; - } - uint32_t ssrc = stream_priorities_.begin()->second; - - const auto& top_packet = streams_.find(ssrc)->second.packet_queue.top(); - if (top_packet.Type() == RtpPacketMediaType::kAudio) { - return top_packet.EnqueueTime(); - } - return absl::nullopt; -} - -Timestamp RoundRobinPacketQueue::OldestEnqueueTime() const { - if (single_packet_queue_.has_value()) { - return single_packet_queue_->EnqueueTime(); - } - - if (Empty()) - return Timestamp::MinusInfinity(); - RTC_CHECK(!enqueue_times_.empty()); - return *enqueue_times_.begin(); -} - -void RoundRobinPacketQueue::UpdateQueueTime(Timestamp now) { - RTC_CHECK_GE(now, time_last_updated_); - if (now == time_last_updated_) - return; - - TimeDelta delta = now - time_last_updated_; - - if (paused_) { - pause_time_sum_ += delta; - } else { - queue_time_sum_ += TimeDelta::Micros(delta.us() * size_packets_); - } - - time_last_updated_ = now; -} - -void RoundRobinPacketQueue::SetPauseState(bool paused, Timestamp now) { - if (paused_ == paused) - return; - UpdateQueueTime(now); - paused_ = paused; -} - -void RoundRobinPacketQueue::SetIncludeOverhead() { - MaybePromoteSinglePacketToNormalQueue(); - include_overhead_ = true; - // We need to update the size to reflect overhead for existing packets. - for (const auto& stream : streams_) { - for (const QueuedPacket& packet : stream.second.packet_queue) { - size_ += DataSize::Bytes(packet.RtpPacket()->headers_size()) + - transport_overhead_per_packet_; - } - } -} - -void RoundRobinPacketQueue::SetTransportOverhead(DataSize overhead_per_packet) { - MaybePromoteSinglePacketToNormalQueue(); - if (include_overhead_) { - DataSize previous_overhead = transport_overhead_per_packet_; - // We need to update the size to reflect overhead for existing packets. - for (const auto& stream : streams_) { - int packets = stream.second.packet_queue.size(); - size_ -= packets * previous_overhead; - size_ += packets * overhead_per_packet; - } - } - transport_overhead_per_packet_ = overhead_per_packet; -} - -TimeDelta RoundRobinPacketQueue::AverageQueueTime() const { - if (Empty()) - return TimeDelta::Zero(); - return queue_time_sum_ / size_packets_; -} - -void RoundRobinPacketQueue::Push(QueuedPacket packet) { - auto stream_info_it = streams_.find(packet.Ssrc()); - if (stream_info_it == streams_.end()) { - stream_info_it = streams_.emplace(packet.Ssrc(), Stream()).first; - stream_info_it->second.priority_it = stream_priorities_.end(); - stream_info_it->second.ssrc = packet.Ssrc(); - } - - Stream* stream = &stream_info_it->second; - - if (stream->priority_it == stream_priorities_.end()) { - // If the SSRC is not currently scheduled, add it to `stream_priorities_`. - RTC_CHECK(!IsSsrcScheduled(stream->ssrc)); - stream->priority_it = stream_priorities_.emplace( - StreamPrioKey(packet.Priority(), stream->size), packet.Ssrc()); - } else if (packet.Priority() < stream->priority_it->first.priority) { - // If the priority of this SSRC increased, remove the outdated StreamPrioKey - // and insert a new one with the new priority. Note that `priority_` uses - // lower ordinal for higher priority. - stream_priorities_.erase(stream->priority_it); - stream->priority_it = stream_priorities_.emplace( - StreamPrioKey(packet.Priority(), stream->size), packet.Ssrc()); - } - RTC_CHECK(stream->priority_it != stream_priorities_.end()); - - if (packet.EnqueueTimeIterator() == enqueue_times_.end()) { - // Promotion from single-packet queue. Just add to enqueue times. - packet.UpdateEnqueueTimeIterator( - enqueue_times_.insert(packet.EnqueueTime())); - } else { - // In order to figure out how much time a packet has spent in the queue - // while not in a paused state, we subtract the total amount of time the - // queue has been paused so far, and when the packet is popped we subtract - // the total amount of time the queue has been paused at that moment. This - // way we subtract the total amount of time the packet has spent in the - // queue while in a paused state. - UpdateQueueTime(packet.EnqueueTime()); - packet.SubtractPauseTime(pause_time_sum_); - - size_packets_ += 1; - size_ += PacketSize(packet); - } - - stream->packet_queue.push(packet); -} - -DataSize RoundRobinPacketQueue::PacketSize(const QueuedPacket& packet) const { - DataSize packet_size = DataSize::Bytes(packet.RtpPacket()->payload_size() + - packet.RtpPacket()->padding_size()); - if (include_overhead_) { - packet_size += DataSize::Bytes(packet.RtpPacket()->headers_size()) + - transport_overhead_per_packet_; - } - return packet_size; -} - -void RoundRobinPacketQueue::MaybePromoteSinglePacketToNormalQueue() { - if (single_packet_queue_.has_value()) { - Push(*single_packet_queue_); - single_packet_queue_.reset(); - } -} - -RoundRobinPacketQueue::Stream* -RoundRobinPacketQueue::GetHighestPriorityStream() { - RTC_CHECK(!stream_priorities_.empty()); - uint32_t ssrc = stream_priorities_.begin()->second; - - auto stream_info_it = streams_.find(ssrc); - RTC_CHECK(stream_info_it != streams_.end()); - RTC_CHECK(stream_info_it->second.priority_it == stream_priorities_.begin()); - RTC_CHECK(!stream_info_it->second.packet_queue.empty()); - return &stream_info_it->second; -} - -bool RoundRobinPacketQueue::IsSsrcScheduled(uint32_t ssrc) const { - for (const auto& scheduled_stream : stream_priorities_) { - if (scheduled_stream.second == ssrc) - return true; - } - return false; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/pacing/round_robin_packet_queue.h b/TMessagesProj/jni/voip/webrtc/modules/pacing/round_robin_packet_queue.h deleted file mode 100644 index dd35b90d93..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/pacing/round_robin_packet_queue.h +++ /dev/null @@ -1,179 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_PACING_ROUND_ROBIN_PACKET_QUEUE_H_ -#define MODULES_PACING_ROUND_ROBIN_PACKET_QUEUE_H_ - -#include -#include - -#include -#include -#include -#include -#include -#include - -#include "absl/types/optional.h" -#include "api/transport/webrtc_key_value_config.h" -#include "api/units/data_size.h" -#include "api/units/time_delta.h" -#include "api/units/timestamp.h" -#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" -#include "system_wrappers/include/clock.h" - -namespace webrtc { - -class RoundRobinPacketQueue { - public: - RoundRobinPacketQueue(Timestamp start_time, - const WebRtcKeyValueConfig* field_trials); - ~RoundRobinPacketQueue(); - - void Push(int priority, - Timestamp enqueue_time, - uint64_t enqueue_order, - std::unique_ptr packet); - std::unique_ptr Pop(); - - bool Empty() const; - size_t SizeInPackets() const; - DataSize Size() const; - // If the next packet, that would be returned by Pop() if called - // now, is an audio packet this method returns the enqueue time - // of that packet. If queue is empty or top packet is not audio, - // returns nullopt. - absl::optional LeadingAudioPacketEnqueueTime() const; - - Timestamp OldestEnqueueTime() const; - TimeDelta AverageQueueTime() const; - void UpdateQueueTime(Timestamp now); - void SetPauseState(bool paused, Timestamp now); - void SetIncludeOverhead(); - void SetTransportOverhead(DataSize overhead_per_packet); - - private: - struct QueuedPacket { - public: - QueuedPacket(int priority, - Timestamp enqueue_time, - uint64_t enqueue_order, - std::multiset::iterator enqueue_time_it, - std::unique_ptr packet); - QueuedPacket(const QueuedPacket& rhs); - ~QueuedPacket(); - - bool operator<(const QueuedPacket& other) const; - - int Priority() const; - RtpPacketMediaType Type() const; - uint32_t Ssrc() const; - Timestamp EnqueueTime() const; - bool IsRetransmission() const; - uint64_t EnqueueOrder() const; - RtpPacketToSend* RtpPacket() const; - - std::multiset::iterator EnqueueTimeIterator() const; - void UpdateEnqueueTimeIterator(std::multiset::iterator it); - void SubtractPauseTime(TimeDelta pause_time_sum); - - private: - int priority_; - Timestamp enqueue_time_; // Absolute time of pacer queue entry. - uint64_t enqueue_order_; - bool is_retransmission_; // Cached for performance. - std::multiset::iterator enqueue_time_it_; - // Raw pointer since priority_queue doesn't allow for moving - // out of the container. - RtpPacketToSend* owned_packet_; - }; - - class PriorityPacketQueue : public std::priority_queue { - public: - using const_iterator = container_type::const_iterator; - const_iterator begin() const; - const_iterator end() const; - }; - - struct StreamPrioKey { - StreamPrioKey(int priority, DataSize size) - : priority(priority), size(size) {} - - bool operator<(const StreamPrioKey& other) const { - if (priority != other.priority) - return priority < other.priority; - return size < other.size; - } - - const int priority; - const DataSize size; - }; - - struct Stream { - Stream(); - Stream(const Stream&); - - virtual ~Stream(); - - DataSize size; - uint32_t ssrc; - - PriorityPacketQueue packet_queue; - - // Whenever a packet is inserted for this stream we check if `priority_it` - // points to an element in `stream_priorities_`, and if it does it means - // this stream has already been scheduled, and if the scheduled priority is - // lower than the priority of the incoming packet we reschedule this stream - // with the higher priority. - std::multimap::iterator priority_it; - }; - - void Push(QueuedPacket packet); - - DataSize PacketSize(const QueuedPacket& packet) const; - void MaybePromoteSinglePacketToNormalQueue(); - - Stream* GetHighestPriorityStream(); - - // Just used to verify correctness. - bool IsSsrcScheduled(uint32_t ssrc) const; - - DataSize transport_overhead_per_packet_; - - Timestamp time_last_updated_; - - bool paused_; - size_t size_packets_; - DataSize size_; - DataSize max_size_; - TimeDelta queue_time_sum_; - TimeDelta pause_time_sum_; - - // A map of streams used to prioritize from which stream to send next. We use - // a multimap instead of a priority_queue since the priority of a stream can - // change as a new packet is inserted, and a multimap allows us to remove and - // then reinsert a StreamPrioKey if the priority has increased. - std::multimap stream_priorities_; - - // A map of SSRCs to Streams. - std::unordered_map streams_; - - // The enqueue time of every packet currently in the queue. Used to figure out - // the age of the oldest packet in the queue. - std::multiset enqueue_times_; - - absl::optional single_packet_queue_; - - bool include_overhead_; -}; -} // namespace webrtc - -#endif // MODULES_PACING_ROUND_ROBIN_PACKET_QUEUE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/pacing/rtp_packet_pacer.h b/TMessagesProj/jni/voip/webrtc/modules/pacing/rtp_packet_pacer.h index 3dc2b27612..e2cf806385 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/pacing/rtp_packet_pacer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/pacing/rtp_packet_pacer.h @@ -13,6 +13,8 @@ #include +#include + #include "absl/types/optional.h" #include "api/units/data_rate.h" #include "api/units/data_size.h" @@ -26,7 +28,8 @@ class RtpPacketPacer { public: virtual ~RtpPacketPacer() = default; - virtual void CreateProbeCluster(DataRate bitrate, int cluster_id) = 0; + virtual void CreateProbeClusters( + std::vector probe_cluster_configs) = 0; // Temporarily pause all sending. virtual void Pause() = 0; @@ -34,8 +37,7 @@ class RtpPacketPacer { // Resume sending packets. virtual void Resume() = 0; - virtual void SetCongestionWindow(DataSize congestion_window_size) = 0; - virtual void UpdateOutstandingData(DataSize outstanding_data) = 0; + virtual void SetCongested(bool congested) = 0; // Sets the pacing rates. Must be called once before packets can be sent. virtual void SetPacingRates(DataRate pacing_rate, DataRate padding_rate) = 0; diff --git a/TMessagesProj/jni/voip/webrtc/modules/pacing/task_queue_paced_sender.cc b/TMessagesProj/jni/voip/webrtc/modules/pacing/task_queue_paced_sender.cc index 16c2de58c6..a42220b834 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/pacing/task_queue_paced_sender.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/pacing/task_queue_paced_sender.cc @@ -12,112 +12,138 @@ #include #include + #include "absl/memory/memory.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/transport/network_types.h" #include "rtc_base/checks.h" -#include "rtc_base/event.h" -#include "rtc_base/logging.h" -#include "rtc_base/task_utils/to_queued_task.h" +#include "rtc_base/experiments/field_trial_parser.h" +#include "rtc_base/experiments/field_trial_units.h" +#include "rtc_base/system/unused.h" #include "rtc_base/trace_event.h" namespace webrtc { +namespace { + +constexpr const char* kBurstyPacerFieldTrial = "WebRTC-BurstyPacer"; + +constexpr const char* kSlackedTaskQueuePacedSenderFieldTrial = + "WebRTC-SlackedTaskQueuePacedSender"; + +} // namespace + +const int TaskQueuePacedSender::kNoPacketHoldback = -1; + +TaskQueuePacedSender::BurstyPacerFlags::BurstyPacerFlags( + const FieldTrialsView& field_trials) + : burst("burst") { + ParseFieldTrial({&burst}, field_trials.Lookup(kBurstyPacerFieldTrial)); +} + +TaskQueuePacedSender::SlackedPacerFlags::SlackedPacerFlags( + const FieldTrialsView& field_trials) + : allow_low_precision("Enabled"), + max_low_precision_expected_queue_time("max_queue_time"), + send_burst_interval("send_burst_interval") { + ParseFieldTrial({&allow_low_precision, &max_low_precision_expected_queue_time, + &send_burst_interval}, + field_trials.Lookup(kSlackedTaskQueuePacedSenderFieldTrial)); +} + TaskQueuePacedSender::TaskQueuePacedSender( Clock* clock, PacingController::PacketSender* packet_sender, - RtcEventLog* event_log, - const WebRtcKeyValueConfig* field_trials, + const FieldTrialsView& field_trials, TaskQueueFactory* task_queue_factory, TimeDelta max_hold_back_window, int max_hold_back_window_in_packets) : clock_(clock), - max_hold_back_window_(max_hold_back_window), - max_hold_back_window_in_packets_(max_hold_back_window_in_packets), - pacing_controller_(clock, - packet_sender, - event_log, - field_trials, - PacingController::ProcessMode::kDynamic), + bursty_pacer_flags_(field_trials), + slacked_pacer_flags_(field_trials), + max_hold_back_window_(slacked_pacer_flags_.allow_low_precision + ? PacingController::kMinSleepTime + : max_hold_back_window), + max_hold_back_window_in_packets_(slacked_pacer_flags_.allow_low_precision + ? 0 + : max_hold_back_window_in_packets), + pacing_controller_(clock, packet_sender, field_trials), next_process_time_(Timestamp::MinusInfinity()), is_started_(false), is_shutdown_(false), packet_size_(/*alpha=*/0.95), - task_queue_(task_queue_factory->CreateTaskQueue( - "TaskQueuePacedSender", - TaskQueueFactory::Priority::NORMAL)) { - packet_size_.Apply(1, 0); + include_overhead_(false), + task_queue_(field_trials, "TaskQueuePacedSender", task_queue_factory) { + RTC_DCHECK_GE(max_hold_back_window_, PacingController::kMinSleepTime); + // There are multiple field trials that can affect burst. If multiple bursts + // are specified we pick the largest of the values. + absl::optional burst = bursty_pacer_flags_.burst.GetOptional(); + if (slacked_pacer_flags_.allow_low_precision && + slacked_pacer_flags_.send_burst_interval) { + TimeDelta slacked_burst = slacked_pacer_flags_.send_burst_interval.Value(); + if (!burst.has_value() || burst.value() < slacked_burst) { + burst = slacked_burst; + } + } + if (burst.has_value()) { + pacing_controller_.SetSendBurstInterval(burst.value()); + } } TaskQueuePacedSender::~TaskQueuePacedSender() { // Post an immediate task to mark the queue as shutting down. // The rtc::TaskQueue destructor will wait for pending tasks to // complete before continuing. - task_queue_.PostTask([&]() { + task_queue_.RunOrPost([&]() { RTC_DCHECK_RUN_ON(&task_queue_); is_shutdown_ = true; }); } void TaskQueuePacedSender::EnsureStarted() { - task_queue_.PostTask([this]() { + task_queue_.RunOrPost([this]() { RTC_DCHECK_RUN_ON(&task_queue_); is_started_ = true; MaybeProcessPackets(Timestamp::MinusInfinity()); }); } -void TaskQueuePacedSender::CreateProbeCluster(DataRate bitrate, - int cluster_id) { - task_queue_.PostTask([this, bitrate, cluster_id]() { - RTC_DCHECK_RUN_ON(&task_queue_); - pacing_controller_.CreateProbeCluster(bitrate, cluster_id); - MaybeProcessPackets(Timestamp::MinusInfinity()); - }); +void TaskQueuePacedSender::CreateProbeClusters( + std::vector probe_cluster_configs) { + task_queue_.RunOrPost( + [this, probe_cluster_configs = std::move(probe_cluster_configs)]() { + RTC_DCHECK_RUN_ON(&task_queue_); + pacing_controller_.CreateProbeClusters(probe_cluster_configs); + MaybeProcessPackets(Timestamp::MinusInfinity()); + }); } void TaskQueuePacedSender::Pause() { - task_queue_.PostTask([this]() { + task_queue_.RunOrPost([this]() { RTC_DCHECK_RUN_ON(&task_queue_); pacing_controller_.Pause(); }); } void TaskQueuePacedSender::Resume() { - task_queue_.PostTask([this]() { + task_queue_.RunOrPost([this]() { RTC_DCHECK_RUN_ON(&task_queue_); pacing_controller_.Resume(); MaybeProcessPackets(Timestamp::MinusInfinity()); }); } -void TaskQueuePacedSender::SetCongestionWindow( - DataSize congestion_window_size) { - task_queue_.PostTask([this, congestion_window_size]() { - RTC_DCHECK_RUN_ON(&task_queue_); - pacing_controller_.SetCongestionWindow(congestion_window_size); - MaybeProcessPackets(Timestamp::MinusInfinity()); - }); -} - -void TaskQueuePacedSender::UpdateOutstandingData(DataSize outstanding_data) { - if (task_queue_.IsCurrent()) { - RTC_DCHECK_RUN_ON(&task_queue_); - // Fast path since this can be called once per sent packet while on the - // task queue. - pacing_controller_.UpdateOutstandingData(outstanding_data); - MaybeProcessPackets(Timestamp::MinusInfinity()); - return; - } - - task_queue_.PostTask([this, outstanding_data]() { +void TaskQueuePacedSender::SetCongested(bool congested) { + task_queue_.RunOrPost([this, congested]() { RTC_DCHECK_RUN_ON(&task_queue_); - pacing_controller_.UpdateOutstandingData(outstanding_data); + pacing_controller_.SetCongested(congested); MaybeProcessPackets(Timestamp::MinusInfinity()); }); } void TaskQueuePacedSender::SetPacingRates(DataRate pacing_rate, DataRate padding_rate) { - task_queue_.PostTask([this, pacing_rate, padding_rate]() { + task_queue_.RunOrPost([this, pacing_rate, padding_rate]() { RTC_DCHECK_RUN_ON(&task_queue_); pacing_controller_.SetPacingRates(pacing_rate, padding_rate); MaybeProcessPackets(Timestamp::MinusInfinity()); @@ -126,30 +152,31 @@ void TaskQueuePacedSender::SetPacingRates(DataRate pacing_rate, void TaskQueuePacedSender::EnqueuePackets( std::vector> packets) { -#if RTC_TRACE_EVENTS_ENABLED - TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("webrtc"), - "TaskQueuePacedSender::EnqueuePackets"); - for (auto& packet : packets) { - TRACE_EVENT2(TRACE_DISABLED_BY_DEFAULT("webrtc"), - "TaskQueuePacedSender::EnqueuePackets::Loop", - "sequence_number", packet->SequenceNumber(), "rtp_timestamp", - packet->Timestamp()); - } -#endif - - task_queue_.PostTask([this, packets_ = std::move(packets)]() mutable { - RTC_DCHECK_RUN_ON(&task_queue_); - for (auto& packet : packets_) { - packet_size_.Apply(1, packet->size()); - RTC_DCHECK_GE(packet->capture_time_ms(), 0); - pacing_controller_.EnqueuePacket(std::move(packet)); - } - MaybeProcessPackets(Timestamp::MinusInfinity()); - }); + task_queue_.TaskQueueForPost()->PostTask(task_queue_.MaybeSafeTask( + safety_.flag(), [this, packets = std::move(packets)]() mutable { + RTC_DCHECK_RUN_ON(&task_queue_); + TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("webrtc"), + "TaskQueuePacedSender::EnqueuePackets"); + for (auto& packet : packets) { + TRACE_EVENT2(TRACE_DISABLED_BY_DEFAULT("webrtc"), + "TaskQueuePacedSender::EnqueuePackets::Loop", + "sequence_number", packet->SequenceNumber(), + "rtp_timestamp", packet->Timestamp()); + + size_t packet_size = packet->payload_size() + packet->padding_size(); + if (include_overhead_) { + packet_size += packet->headers_size(); + } + packet_size_.Apply(1, packet_size); + RTC_DCHECK_GE(packet->capture_time(), Timestamp::Zero()); + pacing_controller_.EnqueuePacket(std::move(packet)); + } + MaybeProcessPackets(Timestamp::MinusInfinity()); + })); } void TaskQueuePacedSender::SetAccountForAudioPackets(bool account_for_audio) { - task_queue_.PostTask([this, account_for_audio]() { + task_queue_.RunOrPost([this, account_for_audio]() { RTC_DCHECK_RUN_ON(&task_queue_); pacing_controller_.SetAccountForAudioPackets(account_for_audio); MaybeProcessPackets(Timestamp::MinusInfinity()); @@ -157,15 +184,16 @@ void TaskQueuePacedSender::SetAccountForAudioPackets(bool account_for_audio) { } void TaskQueuePacedSender::SetIncludeOverhead() { - task_queue_.PostTask([this]() { + task_queue_.RunOrPost([this]() { RTC_DCHECK_RUN_ON(&task_queue_); + include_overhead_ = true; pacing_controller_.SetIncludeOverhead(); MaybeProcessPackets(Timestamp::MinusInfinity()); }); } void TaskQueuePacedSender::SetTransportOverhead(DataSize overhead_per_packet) { - task_queue_.PostTask([this, overhead_per_packet]() { + task_queue_.RunOrPost([this, overhead_per_packet]() { RTC_DCHECK_RUN_ON(&task_queue_); pacing_controller_.SetTransportOverhead(overhead_per_packet); MaybeProcessPackets(Timestamp::MinusInfinity()); @@ -173,7 +201,7 @@ void TaskQueuePacedSender::SetTransportOverhead(DataSize overhead_per_packet) { } void TaskQueuePacedSender::SetQueueTimeLimit(TimeDelta limit) { - task_queue_.PostTask([this, limit]() { + task_queue_.RunOrPost([this, limit]() { RTC_DCHECK_RUN_ON(&task_queue_); pacing_controller_.SetQueueTimeLimit(limit); MaybeProcessPackets(Timestamp::MinusInfinity()); @@ -194,13 +222,16 @@ absl::optional TaskQueuePacedSender::FirstSentPacketTime() const { TimeDelta TaskQueuePacedSender::OldestPacketWaitTime() const { Timestamp oldest_packet = GetStats().oldest_packet_enqueue_time; - if (oldest_packet.IsInfinite()) + if (oldest_packet.IsInfinite()) { return TimeDelta::Zero(); + } // (webrtc:9716): The clock is not always monotonic. Timestamp current = clock_->CurrentTime(); - if (current < oldest_packet) + if (current < oldest_packet) { return TimeDelta::Zero(); + } + return current - oldest_packet; } @@ -213,70 +244,100 @@ void TaskQueuePacedSender::MaybeProcessPackets( Timestamp scheduled_process_time) { RTC_DCHECK_RUN_ON(&task_queue_); + TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("webrtc"), + "TaskQueuePacedSender::MaybeProcessPackets"); + if (is_shutdown_ || !is_started_) { return; } - // Normally, run ProcessPackets() only if this is the scheduled task. - // If it is not but it is already time to process and there either is - // no scheduled task or the schedule has shifted forward in time, run - // anyway and clear any schedule. - Timestamp next_process_time = pacing_controller_.NextSendTime(); + Timestamp next_send_time = pacing_controller_.NextSendTime(); + RTC_DCHECK(next_send_time.IsFinite()); const Timestamp now = clock_->CurrentTime(); - const bool is_scheduled_call = next_process_time_ == scheduled_process_time; - if (is_scheduled_call) { - // Indicate no pending scheduled call. - next_process_time_ = Timestamp::MinusInfinity(); - } - if (is_scheduled_call || - (now >= next_process_time && (next_process_time_.IsInfinite() || - next_process_time < next_process_time_))) { + TimeDelta early_execute_margin = + pacing_controller_.IsProbing() + ? PacingController::kMaxEarlyProbeProcessing + : TimeDelta::Zero(); + + // Process packets and update stats. + while (next_send_time <= now + early_execute_margin) { pacing_controller_.ProcessPackets(); - next_process_time = pacing_controller_.NextSendTime(); + next_send_time = pacing_controller_.NextSendTime(); + RTC_DCHECK(next_send_time.IsFinite()); + + // Probing state could change. Get margin after process packets. + early_execute_margin = pacing_controller_.IsProbing() + ? PacingController::kMaxEarlyProbeProcessing + : TimeDelta::Zero(); } + UpdateStats(); - TimeDelta hold_back_window = max_hold_back_window_; - DataRate pacing_rate = pacing_controller_.pacing_rate(); - DataSize avg_packet_size = DataSize::Bytes(packet_size_.filtered()); - if (max_hold_back_window_in_packets_ > 0 && !pacing_rate.IsZero() && - !avg_packet_size.IsZero()) { - TimeDelta avg_packet_send_time = avg_packet_size / pacing_rate; - hold_back_window = - std::min(hold_back_window, - avg_packet_send_time * max_hold_back_window_in_packets_); + // Ignore retired scheduled task, otherwise reset `next_process_time_`. + if (scheduled_process_time.IsFinite()) { + if (scheduled_process_time != next_process_time_) { + return; + } + next_process_time_ = Timestamp::MinusInfinity(); } - absl::optional time_to_next_process; - if (pacing_controller_.IsProbing() && - next_process_time != next_process_time_) { - // If we're probing and there isn't already a wakeup scheduled for the next - // process time, always post a task and just round sleep time down to - // nearest millisecond. - if (next_process_time.IsMinusInfinity()) { - time_to_next_process = TimeDelta::Zero(); - } else { - time_to_next_process = - std::max(TimeDelta::Zero(), - (next_process_time - now).RoundDownTo(TimeDelta::Millis(1))); + // Do not hold back in probing. + TimeDelta hold_back_window = TimeDelta::Zero(); + if (!pacing_controller_.IsProbing()) { + hold_back_window = max_hold_back_window_; + DataRate pacing_rate = pacing_controller_.pacing_rate(); + if (max_hold_back_window_in_packets_ != kNoPacketHoldback && + !pacing_rate.IsZero() && + packet_size_.filtered() != rtc::ExpFilter::kValueUndefined) { + TimeDelta avg_packet_send_time = + DataSize::Bytes(packet_size_.filtered()) / pacing_rate; + hold_back_window = + std::min(hold_back_window, + avg_packet_send_time * max_hold_back_window_in_packets_); } - } else if (next_process_time_.IsMinusInfinity() || - next_process_time <= next_process_time_ - hold_back_window) { - // Schedule a new task since there is none currently scheduled - // (`next_process_time_` is infinite), or the new process time is at least - // one holdback window earlier than whatever is currently scheduled. - time_to_next_process = std::max(next_process_time - now, hold_back_window); } - if (time_to_next_process) { - // Set a new scheduled process time and post a delayed task. - next_process_time_ = next_process_time; + // Calculate next process time. + TimeDelta time_to_next_process = + std::max(hold_back_window, next_send_time - now - early_execute_margin); + next_send_time = now + time_to_next_process; + + // If no in flight task or in flight task is later than `next_send_time`, + // schedule a new one. Previous in flight task will be retired. + if (next_process_time_.IsMinusInfinity() || + next_process_time_ > next_send_time) { + // Prefer low precision if allowed and not probing. + TaskQueueBase::DelayPrecision precision = + slacked_pacer_flags_.allow_low_precision && + !pacing_controller_.IsProbing() + ? TaskQueueBase::DelayPrecision::kLow + : TaskQueueBase::DelayPrecision::kHigh; + // Check for cases where we need high precision. + if (precision == TaskQueueBase::DelayPrecision::kLow) { + auto& packets_per_type = + pacing_controller_.SizeInPacketsPerRtpPacketMediaType(); + bool audio_or_retransmission_packets_in_queue = + packets_per_type[static_cast(RtpPacketMediaType::kAudio)] > + 0 || + packets_per_type[static_cast( + RtpPacketMediaType::kRetransmission)] > 0; + bool queue_time_too_large = + slacked_pacer_flags_.max_low_precision_expected_queue_time && + pacing_controller_.ExpectedQueueTime() >= + slacked_pacer_flags_.max_low_precision_expected_queue_time + .Value(); + if (audio_or_retransmission_packets_in_queue || queue_time_too_large) { + precision = TaskQueueBase::DelayPrecision::kHigh; + } + } - task_queue_.PostDelayedTask( - [this, next_process_time]() { MaybeProcessPackets(next_process_time); }, - time_to_next_process->ms()); + task_queue_.TaskQueueForDelayedTasks()->PostDelayedTaskWithPrecision( + precision, + task_queue_.MaybeSafeTask( + safety_.flag(), + [this, next_send_time]() { MaybeProcessPackets(next_send_time); }), + time_to_next_process.RoundUpTo(TimeDelta::Millis(1))); + next_process_time_ = next_send_time; } - - UpdateStats(); } void TaskQueuePacedSender::UpdateStats() { diff --git a/TMessagesProj/jni/voip/webrtc/modules/pacing/task_queue_paced_sender.h b/TMessagesProj/jni/voip/webrtc/modules/pacing/task_queue_paced_sender.h index cb7ca4def8..18be6acef0 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/pacing/task_queue_paced_sender.h +++ b/TMessagesProj/jni/voip/webrtc/modules/pacing/task_queue_paced_sender.h @@ -14,45 +14,41 @@ #include #include -#include #include -#include #include #include "absl/types/optional.h" +#include "api/field_trials_view.h" #include "api/sequence_checker.h" #include "api/task_queue/task_queue_factory.h" #include "api/units/data_size.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" -#include "modules/include/module.h" #include "modules/pacing/pacing_controller.h" #include "modules/pacing/rtp_packet_pacer.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" +#include "modules/utility/maybe_worker_thread.h" +#include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/numerics/exp_filter.h" -#include "rtc_base/synchronization/mutex.h" -#include "rtc_base/task_queue.h" #include "rtc_base/thread_annotations.h" namespace webrtc { class Clock; -class RtcEventLog; class TaskQueuePacedSender : public RtpPacketPacer, public RtpPacketSender { public: + static const int kNoPacketHoldback; + // The `hold_back_window` parameter sets a lower bound on time to sleep if // there is currently a pacer queue and packets can't immediately be // processed. Increasing this reduces thread wakeups at the expense of higher // latency. - // TODO(bugs.webrtc.org/10809): Remove default values. - TaskQueuePacedSender( - Clock* clock, - PacingController::PacketSender* packet_sender, - RtcEventLog* event_log, - const WebRtcKeyValueConfig* field_trials, - TaskQueueFactory* task_queue_factory, - TimeDelta max_hold_back_window = PacingController::kMinSleepTime, - int max_hold_back_window_in_packets = -1); + TaskQueuePacedSender(Clock* clock, + PacingController::PacketSender* packet_sender, + const FieldTrialsView& field_trials, + TaskQueueFactory* task_queue_factory, + TimeDelta max_hold_back_window, + int max_hold_back_window_in_packets); ~TaskQueuePacedSender() override; @@ -68,7 +64,8 @@ class TaskQueuePacedSender : public RtpPacketPacer, public RtpPacketSender { // Methods implementing RtpPacketPacer. - void CreateProbeCluster(DataRate bitrate, int cluster_id) override; + void CreateProbeClusters( + std::vector probe_cluster_configs) override; // Temporarily pause all sending. void Pause() override; @@ -76,8 +73,7 @@ class TaskQueuePacedSender : public RtpPacketPacer, public RtpPacketSender { // Resume sending packets. void Resume() override; - void SetCongestionWindow(DataSize congestion_window_size) override; - void UpdateOutstandingData(DataSize outstanding_data) override; + void SetCongested(bool congested) override; // Sets the pacing rates. Must be called once before packets can be sent. void SetPacingRates(DataRate pacing_rate, DataRate padding_rate) override; @@ -134,6 +130,37 @@ class TaskQueuePacedSender : public RtpPacketPacer, public RtpPacketSender { Stats GetStats() const; Clock* const clock_; + struct BurstyPacerFlags { + // Parses `kBurstyPacerFieldTrial`. Example: + // --force-fieldtrials=WebRTC-BurstyPacer/burst:20ms/ + explicit BurstyPacerFlags(const FieldTrialsView& field_trials); + // If set, the pacer is allowed to build up a packet "debt" that correspond + // to approximately the send rate during the specified interval. + FieldTrialOptional burst; + }; + const BurstyPacerFlags bursty_pacer_flags_; + struct SlackedPacerFlags { + // Parses `kSlackedTaskQueuePacedSenderFieldTrial`. Example: + // --force-fieldtrials=WebRTC-SlackedTaskQueuePacedSender/Enabled,max_queue_time:75ms/ + explicit SlackedPacerFlags(const FieldTrialsView& field_trials); + // When "Enabled", delayed tasks invoking MaybeProcessPackets() are + // scheduled using low precision instead of high precision, resulting in + // less idle wake ups and packets being sent in bursts if the `task_queue_` + // implementation supports slack. When probing, high precision is used + // regardless to ensure good bandwidth estimation. + FieldTrialFlag allow_low_precision; + // Controlled via the "max_queue_time" experiment argument. If set, uses + // high precision scheduling of MaybeProcessPackets() whenever the expected + // queue time is greater than or equal to this value. + FieldTrialOptional max_low_precision_expected_queue_time; + // Controlled via "send_burst_interval" experiment argument. If set, the + // pacer is allowed to build up a packet "debt" that correspond to + // approximately the send rate during the specified interval. + FieldTrialOptional send_burst_interval; + }; + const SlackedPacerFlags slacked_pacer_flags_; + // The holdback window prevents too frequent delayed MaybeProcessPackets() + // calls. These are only applicable if `allow_low_precision` is false. const TimeDelta max_hold_back_window_; const int max_hold_back_window_in_packets_; @@ -157,11 +184,15 @@ class TaskQueuePacedSender : public RtpPacketPacer, public RtpPacketSender { // Filtered size of enqueued packets, in bytes. rtc::ExpFilter packet_size_ RTC_GUARDED_BY(task_queue_); + bool include_overhead_ RTC_GUARDED_BY(task_queue_); + // TODO(webrtc:14502): Remove stats_mutex_ when pacer runs on the worker + // thread. mutable Mutex stats_mutex_; Stats current_stats_ RTC_GUARDED_BY(stats_mutex_); - rtc::TaskQueue task_queue_; + ScopedTaskSafety safety_; + MaybeWorkerThread task_queue_; }; } // namespace webrtc #endif // MODULES_PACING_TASK_QUEUE_PACED_SENDER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/OWNERS b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/OWNERS index 9b97144ac8..993e2fd77b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/OWNERS +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/OWNERS @@ -1,6 +1,5 @@ +danilchap@webrtc.org stefan@webrtc.org terelius@webrtc.org -asapersson@webrtc.org mflodman@webrtc.org -philipel@webrtc.org -srte@webrtc.org +perkj@webrtc.org diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/aimd_rate_control.cc b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/aimd_rate_control.cc index a3da2b5f2d..b625a745df 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/aimd_rate_control.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/aimd_rate_control.cc @@ -35,17 +35,15 @@ constexpr double kDefaultBackoffFactor = 0.85; constexpr char kBweBackOffFactorExperiment[] = "WebRTC-BweBackOffFactor"; -bool IsEnabled(const WebRtcKeyValueConfig& field_trials, - absl::string_view key) { +bool IsEnabled(const FieldTrialsView& field_trials, absl::string_view key) { return absl::StartsWith(field_trials.Lookup(key), "Enabled"); } -bool IsNotDisabled(const WebRtcKeyValueConfig& field_trials, - absl::string_view key) { +bool IsNotDisabled(const FieldTrialsView& field_trials, absl::string_view key) { return !absl::StartsWith(field_trials.Lookup(key), "Disabled"); } -double ReadBackoffFactor(const WebRtcKeyValueConfig& key_value_config) { +double ReadBackoffFactor(const FieldTrialsView& key_value_config) { std::string experiment_string = key_value_config.Lookup(kBweBackOffFactorExperiment); double backoff_factor; @@ -67,12 +65,12 @@ double ReadBackoffFactor(const WebRtcKeyValueConfig& key_value_config) { } // namespace -AimdRateControl::AimdRateControl(const WebRtcKeyValueConfig* key_value_config) +AimdRateControl::AimdRateControl(const FieldTrialsView* key_value_config) : AimdRateControl(key_value_config, /* send_side =*/false) {} -AimdRateControl::AimdRateControl(const WebRtcKeyValueConfig* key_value_config, +AimdRateControl::AimdRateControl(const FieldTrialsView* key_value_config, bool send_side) - : min_configured_bitrate_(congestion_controller::GetMinBitrate()), + : min_configured_bitrate_(kCongestionControllerMinBitrate), max_configured_bitrate_(DataRate::KilobitsPerSec(30000)), current_bitrate_(max_configured_bitrate_), latest_estimated_throughput_(current_bitrate_), @@ -95,11 +93,13 @@ AimdRateControl::AimdRateControl(const WebRtcKeyValueConfig* key_value_config, estimate_bounded_backoff_( IsNotDisabled(*key_value_config, "WebRTC-Bwe-EstimateBoundedBackoff")), - estimate_bounded_increase_( - IsNotDisabled(*key_value_config, - "WebRTC-Bwe-EstimateBoundedIncrease")), initial_backoff_interval_("initial_backoff_interval"), link_capacity_fix_("link_capacity_fix") { + ParseFieldTrial( + {&disable_estimate_bounded_increase_, &estimate_bounded_increase_ratio_, + &ignore_throughput_limit_if_network_estimate_, + &ignore_network_estimate_decrease_, &increase_to_network_estimate_}, + key_value_config->Lookup("WebRTC-Bwe-EstimateBoundedIncrease")); // E.g // WebRTC-BweAimdRateControlConfig/initial_backoff_interval:100ms/ ParseFieldTrial({&initial_backoff_interval_, &link_capacity_fix_}, @@ -272,29 +272,39 @@ void AimdRateControl::ChangeBitrate(const RateControlInput& input, ChangeState(input, at_time); - // We limit the new bitrate based on the troughput to avoid unlimited bitrate - // increases. We allow a bit more lag at very low rates to not too easily get - // stuck if the encoder produces uneven outputs. - const DataRate troughput_based_limit = - 1.5 * estimated_throughput + DataRate::KilobitsPerSec(10); - switch (rate_control_state_) { case RateControlState::kRcHold: break; - case RateControlState::kRcIncrease: + case RateControlState::kRcIncrease: { if (estimated_throughput > link_capacity_.UpperBound()) link_capacity_.Reset(); - // Do not increase the delay based estimate in alr since the estimator - // will not be able to get transport feedback necessary to detect if - // the new estimate is correct. - // If we have previously increased above the limit (for instance due to - // probing), we don't allow further changes. - if (current_bitrate_ < troughput_based_limit && - !(send_side_ && in_alr_ && no_bitrate_increase_in_alr_)) { + // We limit the new bitrate based on the troughput to avoid unlimited + // bitrate increases. We allow a bit more lag at very low rates to not too + // easily get stuck if the encoder produces uneven outputs. + DataRate increase_limit = + 1.5 * estimated_throughput + DataRate::KilobitsPerSec(10); + if (ignore_throughput_limit_if_network_estimate_ && network_estimate_ && + network_estimate_->link_capacity_upper.IsFinite()) { + // If we have a Network estimate, we do allow the estimate to increase. + increase_limit = network_estimate_->link_capacity_upper * + estimate_bounded_increase_ratio_.Get(); + } else if (send_side_ && in_alr_ && no_bitrate_increase_in_alr_) { + // Do not increase the delay based estimate in alr since the estimator + // will not be able to get transport feedback necessary to detect if + // the new estimate is correct. + // If we have previously increased above the limit (for instance due to + // probing), we don't allow further changes. + increase_limit = current_bitrate_; + } + + if (current_bitrate_ < increase_limit) { DataRate increased_bitrate = DataRate::MinusInfinity(); - if (link_capacity_.has_estimate()) { + if (increase_to_network_estimate_ && network_estimate_ && + network_estimate_->link_capacity_upper.IsFinite()) { + increased_bitrate = increase_limit; + } else if (link_capacity_.has_estimate()) { // The link_capacity estimate is reset if the measured throughput // is too far from the estimate. We can therefore assume that our // target rate is reasonably close to link capacity and use additive @@ -309,11 +319,11 @@ void AimdRateControl::ChangeBitrate(const RateControlInput& input, at_time, time_last_bitrate_change_, current_bitrate_); increased_bitrate = current_bitrate_ + multiplicative_increase; } - new_bitrate = std::min(increased_bitrate, troughput_based_limit); + new_bitrate = std::min(increased_bitrate, increase_limit); } - time_last_bitrate_change_ = at_time; break; + } case RateControlState::kRcDecrease: { DataRate decreased_bitrate = DataRate::PlusInfinity(); @@ -329,11 +339,6 @@ void AimdRateControl::ChangeBitrate(const RateControlInput& input, decreased_bitrate = beta_ * link_capacity_.estimate(); } } - if (estimate_bounded_backoff_ && network_estimate_) { - decreased_bitrate = std::max( - decreased_bitrate, network_estimate_->link_capacity_lower * beta_); - } - // Avoid increasing the rate when over-using. if (decreased_bitrate < current_bitrate_) { new_bitrate = decreased_bitrate; @@ -368,9 +373,21 @@ void AimdRateControl::ChangeBitrate(const RateControlInput& input, } DataRate AimdRateControl::ClampBitrate(DataRate new_bitrate) const { - if (estimate_bounded_increase_ && network_estimate_) { - DataRate upper_bound = network_estimate_->link_capacity_upper; - new_bitrate = std::min(new_bitrate, upper_bound); + if (!disable_estimate_bounded_increase_ && network_estimate_ && + network_estimate_->link_capacity_upper.IsFinite()) { + DataRate upper_bound = network_estimate_->link_capacity_upper * + estimate_bounded_increase_ratio_.Get(); + if (ignore_network_estimate_decrease_) { + upper_bound = std::max(upper_bound, current_bitrate_); + } + new_bitrate = std::min(upper_bound, new_bitrate); + } + if (estimate_bounded_backoff_ && network_estimate_ && + network_estimate_->link_capacity_lower.IsFinite() && + new_bitrate < current_bitrate_) { + new_bitrate = std::min( + current_bitrate_, + std::max(new_bitrate, network_estimate_->link_capacity_lower * beta_)); } new_bitrate = std::max(new_bitrate, min_configured_bitrate_); return new_bitrate; diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/aimd_rate_control.h b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/aimd_rate_control.h index 3e0d541b60..6c770cdc45 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/aimd_rate_control.h +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/aimd_rate_control.h @@ -14,8 +14,8 @@ #include #include "absl/types/optional.h" +#include "api/field_trials_view.h" #include "api/transport/network_types.h" -#include "api/transport/webrtc_key_value_config.h" #include "api/units/data_rate.h" #include "api/units/timestamp.h" #include "modules/congestion_controller/goog_cc/link_capacity_estimator.h" @@ -30,8 +30,8 @@ namespace webrtc { // multiplicatively. class AimdRateControl { public: - explicit AimdRateControl(const WebRtcKeyValueConfig* key_value_config); - AimdRateControl(const WebRtcKeyValueConfig* key_value_config, bool send_side); + explicit AimdRateControl(const FieldTrialsView* key_value_config); + AimdRateControl(const FieldTrialsView* key_value_config, bool send_side); ~AimdRateControl(); // Returns true if the target bitrate has been initialized. This happens @@ -107,9 +107,16 @@ class AimdRateControl { // Use estimated link capacity lower bound if it is higher than the // acknowledged rate when backing off due to overuse. const bool estimate_bounded_backoff_; - // Use estimated link capacity upper bound as upper limit for increasing - // bitrate over the acknowledged rate. - const bool estimate_bounded_increase_; + // If false, uses estimated link capacity upper bound * + // `estimate_bounded_increase_ratio_` as upper limit for the estimate. + FieldTrialFlag disable_estimate_bounded_increase_{"Disabled"}; + FieldTrialParameter estimate_bounded_increase_ratio_{"ratio", 1.0}; + FieldTrialParameter ignore_throughput_limit_if_network_estimate_{ + "ignore_acked", false}; + FieldTrialParameter increase_to_network_estimate_{"immediate_incr", + false}; + FieldTrialParameter ignore_network_estimate_decrease_{"ignore_decr", + false}; absl::optional last_decrease_; FieldTrialOptional initial_backoff_interval_; FieldTrialFlag link_capacity_fix_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/bwe_defines.cc b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/bwe_defines.cc index 6afbe133e2..db92f46717 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/bwe_defines.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/bwe_defines.cc @@ -10,24 +10,10 @@ #include "modules/remote_bitrate_estimator/include/bwe_defines.h" -#include "system_wrappers/include/field_trial.h" - namespace webrtc { const char kBweTypeHistogram[] = "WebRTC.BWE.Types"; -namespace congestion_controller { -int GetMinBitrateBps() { - constexpr int kMinBitrateBps = 5000; - return kMinBitrateBps; -} - -DataRate GetMinBitrate() { - return DataRate::BitsPerSec(GetMinBitrateBps()); -} - -} // namespace congestion_controller - RateControlInput::RateControlInput( BandwidthUsage bw_state, const absl::optional& estimated_throughput) diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/include/bwe_defines.h b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/include/bwe_defines.h index b3ca1846f4..d3dd96be75 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/include/bwe_defines.h +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/include/bwe_defines.h @@ -19,10 +19,7 @@ namespace webrtc { -namespace congestion_controller { -int GetMinBitrateBps(); -DataRate GetMinBitrate(); -} // namespace congestion_controller +constexpr DataRate kCongestionControllerMinBitrate = DataRate::BitsPerSec(5000); static const int64_t kBitrateWindowMs = 1000; diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h index dcc08f49cc..0d4e15e9e1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h @@ -17,7 +17,8 @@ #include #include -#include "modules/include/module.h" +#include "api/units/data_rate.h" +#include "api/units/time_delta.h" #include "modules/include/module_common_types.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtcp_packet.h" @@ -38,7 +39,7 @@ class RemoteBitrateObserver { virtual ~RemoteBitrateObserver() {} }; -class RemoteBitrateEstimator : public CallStatsObserver, public Module { +class RemoteBitrateEstimator : public CallStatsObserver { public: ~RemoteBitrateEstimator() override {} @@ -54,13 +55,10 @@ class RemoteBitrateEstimator : public CallStatsObserver, public Module { // Removes all data for `ssrc`. virtual void RemoveStream(uint32_t ssrc) = 0; - // Returns true if a valid estimate exists and sets `bitrate_bps` to the - // estimated payload bitrate in bits per second. `ssrcs` is the list of ssrcs - // currently being received and of which the bitrate estimate is based upon. - virtual bool LatestEstimate(std::vector* ssrcs, - uint32_t* bitrate_bps) const = 0; + // Returns latest estimate or DataRate::Zero() if estimation is unavailable. + virtual DataRate LatestEstimate() const = 0; - virtual void SetMinBitrate(int min_bitrate_bps) = 0; + virtual TimeDelta Process() = 0; protected: static const int64_t kProcessIntervalMs = 500; diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/overuse_detector.cc b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/overuse_detector.cc index 710b3b21d3..672822bbcd 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/overuse_detector.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/overuse_detector.cc @@ -33,7 +33,7 @@ const double kOverUsingTimeThreshold = 10; const int kMaxNumDeltas = 60; bool AdaptiveThresholdExperimentIsDisabled( - const WebRtcKeyValueConfig& key_value_config) { + const FieldTrialsView& key_value_config) { std::string experiment_string = key_value_config.Lookup(kAdaptiveThresholdExperiment); const size_t kMinExperimentLength = kDisabledPrefixLength; @@ -44,7 +44,7 @@ bool AdaptiveThresholdExperimentIsDisabled( // Gets thresholds from the experiment name following the format // "WebRTC-AdaptiveBweThreshold/Enabled-0.5,0.002/". -bool ReadExperimentConstants(const WebRtcKeyValueConfig& key_value_config, +bool ReadExperimentConstants(const FieldTrialsView& key_value_config, double* k_up, double* k_down) { std::string experiment_string = @@ -57,7 +57,7 @@ bool ReadExperimentConstants(const WebRtcKeyValueConfig& key_value_config, "%lf,%lf", k_up, k_down) == 2; } -OveruseDetector::OveruseDetector(const WebRtcKeyValueConfig* key_value_config) +OveruseDetector::OveruseDetector(const FieldTrialsView* key_value_config) // Experiment is on by default, but can be disabled with finch by setting // the field trial string to "WebRTC-AdaptiveBweThreshold/Disabled/". : in_experiment_(!AdaptiveThresholdExperimentIsDisabled(*key_value_config)), @@ -147,7 +147,7 @@ void OveruseDetector::UpdateThreshold(double modified_offset, int64_t now_ms) { } void OveruseDetector::InitializeExperiment( - const WebRtcKeyValueConfig& key_value_config) { + const FieldTrialsView& key_value_config) { RTC_DCHECK(in_experiment_); double k_up = 0.0; double k_down = 0.0; diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/overuse_detector.h b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/overuse_detector.h index d1c6aa8d30..dfaea9187a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/overuse_detector.h +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/overuse_detector.h @@ -12,20 +12,22 @@ #include +#include "api/field_trials_view.h" #include "api/network_state_predictor.h" -#include "api/transport/webrtc_key_value_config.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { bool AdaptiveThresholdExperimentIsDisabled( - const WebRtcKeyValueConfig& key_value_config); + const FieldTrialsView& key_value_config); class OveruseDetector { public: - explicit OveruseDetector(const WebRtcKeyValueConfig* key_value_config); + explicit OveruseDetector(const FieldTrialsView* key_value_config); virtual ~OveruseDetector(); + OveruseDetector(const OveruseDetector&) = delete; + OveruseDetector& operator=(const OveruseDetector&) = delete; + // Update the detection state based on the estimated inter-arrival time delta // offset. `timestamp_delta` is the delta between the last timestamp which the // estimated offset is based on and the last timestamp on which the last @@ -42,7 +44,7 @@ class OveruseDetector { private: void UpdateThreshold(double modified_offset, int64_t now_ms); - void InitializeExperiment(const WebRtcKeyValueConfig& key_value_config); + void InitializeExperiment(const FieldTrialsView& key_value_config); bool in_experiment_; double k_up_; @@ -54,8 +56,6 @@ class OveruseDetector { double time_over_using_; int overuse_counter_; BandwidthUsage hypothesis_; - - RTC_DISALLOW_COPY_AND_ASSIGN(OveruseDetector); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/overuse_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/overuse_estimator.h index d023b36d89..c021f00da7 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/overuse_estimator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/overuse_estimator.h @@ -15,7 +15,6 @@ #include #include "api/network_state_predictor.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -39,6 +38,9 @@ class OveruseEstimator { explicit OveruseEstimator(const OverUseDetectorOptions& options); ~OveruseEstimator(); + OveruseEstimator(const OveruseEstimator&) = delete; + OveruseEstimator& operator=(const OveruseEstimator&) = delete; + // Update the estimator with a new sample. The deltas should represent deltas // between timestamp groups as defined by the InterArrival class. // `current_hypothesis` should be the hypothesis of the over-use detector at @@ -75,8 +77,6 @@ class OveruseEstimator { double avg_noise_; double var_noise_; std::deque ts_delta_hist_; - - RTC_DISALLOW_COPY_AND_ASSIGN(OveruseEstimator); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/packet_arrival_map.cc b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/packet_arrival_map.cc index 72696f6c80..16d400e227 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/packet_arrival_map.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/packet_arrival_map.cc @@ -10,114 +10,184 @@ #include "modules/remote_bitrate_estimator/packet_arrival_map.h" #include +#include -#include "rtc_base/numerics/safe_minmax.h" +#include "api/units/timestamp.h" +#include "rtc_base/checks.h" namespace webrtc { -constexpr size_t PacketArrivalTimeMap::kMaxNumberOfPackets; - void PacketArrivalTimeMap::AddPacket(int64_t sequence_number, - int64_t arrival_time_ms) { - if (!has_seen_packet_) { + Timestamp arrival_time) { + RTC_DCHECK_GE(arrival_time, Timestamp::Zero()); + if (!has_seen_packet()) { // First packet. - has_seen_packet_ = true; + Reallocate(kMinCapacity); begin_sequence_number_ = sequence_number; - arrival_times.push_back(arrival_time_ms); + end_sequence_number_ = sequence_number + 1; + arrival_times_[Index(sequence_number)] = arrival_time; return; } - int64_t pos = sequence_number - begin_sequence_number_; - if (pos >= 0 && pos < static_cast(arrival_times.size())) { + if (sequence_number >= begin_sequence_number() && + sequence_number < end_sequence_number()) { // The packet is within the buffer - no need to expand it. - arrival_times[pos] = arrival_time_ms; + arrival_times_[Index(sequence_number)] = arrival_time; return; } - if (pos < 0) { + if (sequence_number < begin_sequence_number()) { // The packet goes before the current buffer. Expand to add packet, but only // if it fits within kMaxNumberOfPackets. - size_t missing_packets = -pos; - if (missing_packets + arrival_times.size() > kMaxNumberOfPackets) { + int64_t new_size = end_sequence_number() - sequence_number; + if (new_size > kMaxNumberOfPackets) { // Don't expand the buffer further, as that would remove newly received // packets. return; } + AdjustToSize(new_size); - arrival_times.insert(arrival_times.begin(), missing_packets, 0); - arrival_times[0] = arrival_time_ms; + arrival_times_[Index(sequence_number)] = arrival_time; + SetNotReceived(sequence_number + 1, begin_sequence_number_); begin_sequence_number_ = sequence_number; return; } // The packet goes after the buffer. + RTC_DCHECK_GE(sequence_number, end_sequence_number_); + int64_t new_end_sequence_number = sequence_number + 1; - if (static_cast(pos) >= kMaxNumberOfPackets) { - // The buffer grows too large - old packets have to be removed. - size_t packets_to_remove = pos - kMaxNumberOfPackets + 1; - if (packets_to_remove >= arrival_times.size()) { - arrival_times.clear(); - begin_sequence_number_ = sequence_number; - pos = 0; - } else { - // Also trim the buffer to remove leading non-received packets, to - // ensure that the buffer only spans received packets. - while (packets_to_remove < arrival_times.size() && - arrival_times[packets_to_remove] == 0) { - ++packets_to_remove; - } - - arrival_times.erase(arrival_times.begin(), - arrival_times.begin() + packets_to_remove); - begin_sequence_number_ += packets_to_remove; - pos -= packets_to_remove; - RTC_DCHECK_GE(pos, 0); - } + if (new_end_sequence_number >= end_sequence_number_ + kMaxNumberOfPackets) { + // All old packets have to be removed. + begin_sequence_number_ = sequence_number; + end_sequence_number_ = new_end_sequence_number; + arrival_times_[Index(sequence_number)] = arrival_time; + return; } + if (begin_sequence_number_ < new_end_sequence_number - kMaxNumberOfPackets) { + // Remove oldest entries + begin_sequence_number_ = new_end_sequence_number - kMaxNumberOfPackets; + RTC_DCHECK_GT(end_sequence_number_, begin_sequence_number_); + // Also trim the buffer to remove leading non-received packets, to + // ensure that the buffer only spans received packets. + TrimLeadingNotReceivedEntries(); + } + + AdjustToSize(new_end_sequence_number - begin_sequence_number_); + // Packets can be received out-of-order. If this isn't the next expected // packet, add enough placeholders to fill the gap. - size_t missing_gap_packets = pos - arrival_times.size(); - if (missing_gap_packets > 0) { - arrival_times.insert(arrival_times.end(), missing_gap_packets, 0); + SetNotReceived(end_sequence_number_, sequence_number); + end_sequence_number_ = new_end_sequence_number; + arrival_times_[Index(sequence_number)] = arrival_time; +} + +void PacketArrivalTimeMap::TrimLeadingNotReceivedEntries() { + const int begin_index = Index(begin_sequence_number_); + const Timestamp* const begin_it = arrival_times_.get() + begin_index; + const Timestamp* const end_it = arrival_times_.get() + capacity(); + + for (const Timestamp* it = begin_it; it != end_it; ++it) { + if (*it >= Timestamp::Zero()) { + begin_sequence_number_ += (it - begin_it); + return; + } + } + // Reached end of the arrival_times_ and all entries represent not received + // packets. Remove them. + begin_sequence_number_ += (capacity() - begin_index); + // Continue removing entries at the beginning of the circular buffer. + for (const Timestamp* it = arrival_times_.get(); it != begin_it; ++it) { + if (*it >= Timestamp::Zero()) { + begin_sequence_number_ += (it - arrival_times_.get()); + return; + } } - RTC_DCHECK_EQ(arrival_times.size(), pos); - arrival_times.push_back(arrival_time_ms); - RTC_DCHECK_LE(arrival_times.size(), kMaxNumberOfPackets); + + RTC_DCHECK_NOTREACHED() << "There should be at least one non-empty entry"; } -void PacketArrivalTimeMap::RemoveOldPackets(int64_t sequence_number, - int64_t arrival_time_limit) { - while (!arrival_times.empty() && begin_sequence_number_ < sequence_number && - arrival_times.front() <= arrival_time_limit) { - arrival_times.pop_front(); - ++begin_sequence_number_; +void PacketArrivalTimeMap::SetNotReceived( + int64_t begin_sequence_number_inclusive, + int64_t end_sequence_number_exclusive) { + static constexpr Timestamp value = Timestamp::MinusInfinity(); + + int begin_index = Index(begin_sequence_number_inclusive); + int end_index = Index(end_sequence_number_exclusive); + + if (begin_index <= end_index) { + // Entries to clear are in single block: + // [......{-----}....] + std::fill(arrival_times_.get() + begin_index, + arrival_times_.get() + end_index, value); + } else { + // Entries to clear span across arrival_times_ border: + // [--}..........{---] + std::fill(arrival_times_.get() + begin_index, + arrival_times_.get() + capacity(), value); + std::fill(arrival_times_.get(), arrival_times_.get() + end_index, value); } } -bool PacketArrivalTimeMap::has_received(int64_t sequence_number) const { - int64_t pos = sequence_number - begin_sequence_number_; - if (pos >= 0 && pos < static_cast(arrival_times.size()) && - arrival_times[pos] != 0) { - return true; +void PacketArrivalTimeMap::RemoveOldPackets(int64_t sequence_number, + Timestamp arrival_time_limit) { + int64_t check_to = std::min(sequence_number, end_sequence_number_); + while (begin_sequence_number_ < check_to && + arrival_times_[Index(begin_sequence_number_)] <= arrival_time_limit) { + ++begin_sequence_number_; } - return false; + AdjustToSize(end_sequence_number_ - begin_sequence_number_); } void PacketArrivalTimeMap::EraseTo(int64_t sequence_number) { - if (sequence_number > begin_sequence_number_) { - size_t count = - std::min(static_cast(sequence_number - begin_sequence_number_), - arrival_times.size()); + if (sequence_number < begin_sequence_number_) { + return; + } + if (sequence_number >= end_sequence_number_) { + // Erase all. + begin_sequence_number_ = end_sequence_number_; + return; + } + // Remove some. + begin_sequence_number_ = sequence_number; + RTC_DCHECK(has_received(begin_sequence_number_)); + AdjustToSize(end_sequence_number_ - begin_sequence_number_); +} - arrival_times.erase(arrival_times.begin(), arrival_times.begin() + count); - begin_sequence_number_ += count; +void PacketArrivalTimeMap::AdjustToSize(int new_size) { + if (new_size > capacity()) { + int new_capacity = capacity(); + while (new_capacity < new_size) + new_capacity *= 2; + Reallocate(new_capacity); } + if (capacity() > std::max(kMinCapacity, 4 * new_size)) { + int new_capacity = capacity(); + while (new_capacity > 2 * std::max(new_size, kMinCapacity)) { + new_capacity /= 2; + } + Reallocate(new_capacity); + } + RTC_DCHECK_LE(new_size, capacity()); } -int64_t PacketArrivalTimeMap::clamp(int64_t sequence_number) const { - return rtc::SafeClamp(sequence_number, begin_sequence_number(), - end_sequence_number()); +void PacketArrivalTimeMap::Reallocate(int new_capacity) { + int new_capacity_minus_1 = new_capacity - 1; + // Check capacity is a power of 2. + RTC_DCHECK_EQ(new_capacity & new_capacity_minus_1, 0); + // Create uninitialized memory. + // All valid entries should be set by `AddPacket` before use. + void* raw = operator new[](new_capacity * sizeof(Timestamp)); + Timestamp* new_buffer = static_cast(raw); + + for (int64_t sequence_number = begin_sequence_number_; + sequence_number < end_sequence_number_; ++sequence_number) { + new_buffer[sequence_number & new_capacity_minus_1] = + arrival_times_[sequence_number & capacity_minus_1_]; + } + arrival_times_.reset(new_buffer); + capacity_minus_1_ = new_capacity_minus_1; } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/packet_arrival_map.h b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/packet_arrival_map.h index 10659e0f65..e7086d0de4 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/packet_arrival_map.h +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/packet_arrival_map.h @@ -10,10 +10,12 @@ #ifndef MODULES_REMOTE_BITRATE_ESTIMATOR_PACKET_ARRIVAL_MAP_H_ #define MODULES_REMOTE_BITRATE_ESTIMATOR_PACKET_ARRIVAL_MAP_H_ +#include #include #include -#include +#include +#include "api/units/timestamp.h" #include "rtc_base/checks.h" namespace webrtc { @@ -29,12 +31,25 @@ namespace webrtc { // packets out-of-order. class PacketArrivalTimeMap { public: + struct PacketArrivalTime { + Timestamp arrival_time; + int64_t sequence_number; + }; // Impossible to request feedback older than what can be represented by 15 // bits. - static constexpr size_t kMaxNumberOfPackets = (1 << 15); + static constexpr int kMaxNumberOfPackets = (1 << 15); + + PacketArrivalTimeMap() = default; + PacketArrivalTimeMap(const PacketArrivalTimeMap&) = delete; + PacketArrivalTimeMap& operator=(const PacketArrivalTimeMap&) = delete; + ~PacketArrivalTimeMap() = default; // Indicates if the packet with `sequence_number` has already been received. - bool has_received(int64_t sequence_number) const; + bool has_received(int64_t sequence_number) const { + return sequence_number >= begin_sequence_number() && + sequence_number < end_sequence_number() && + arrival_times_[Index(sequence_number)] >= Timestamp::Zero(); + } // Returns the sequence number of the first entry in the map, i.e. the // sequence number that a `begin()` iterator would represent. @@ -42,45 +57,88 @@ class PacketArrivalTimeMap { // Returns the sequence number of the element just after the map, i.e. the // sequence number that an `end()` iterator would represent. - int64_t end_sequence_number() const { - return begin_sequence_number_ + arrival_times.size(); - } + int64_t end_sequence_number() const { return end_sequence_number_; } // Returns an element by `sequence_number`, which must be valid, i.e. // between [begin_sequence_number, end_sequence_number). - int64_t get(int64_t sequence_number) { - int64_t pos = sequence_number - begin_sequence_number_; - RTC_DCHECK(pos >= 0 && pos < static_cast(arrival_times.size())); - return arrival_times[pos]; + Timestamp get(int64_t sequence_number) { + RTC_DCHECK_GE(sequence_number, begin_sequence_number()); + RTC_DCHECK_LT(sequence_number, end_sequence_number()); + return arrival_times_[Index(sequence_number)]; + } + + // Returns timestamp and sequence number of the received packet with sequence + // number equal or larger than `sequence_number`. `sequence_number` must be in + // range [begin_sequence_number, end_sequence_number). + PacketArrivalTime FindNextAtOrAfter(int64_t sequence_number) const { + RTC_DCHECK_GE(sequence_number, begin_sequence_number()); + RTC_DCHECK_LT(sequence_number, end_sequence_number()); + while (true) { + Timestamp t = arrival_times_[Index(sequence_number)]; + if (t >= Timestamp::Zero()) { + return {.arrival_time = t, .sequence_number = sequence_number}; + } + ++sequence_number; + } } // Clamps `sequence_number` between [begin_sequence_number, // end_sequence_number]. - int64_t clamp(int64_t sequence_number) const; + int64_t clamp(int64_t sequence_number) const { + return std::clamp(sequence_number, begin_sequence_number(), + end_sequence_number()); + } // Erases all elements from the beginning of the map until `sequence_number`. void EraseTo(int64_t sequence_number); // Records the fact that a packet with `sequence_number` arrived at // `arrival_time_ms`. - void AddPacket(int64_t sequence_number, int64_t arrival_time_ms); + void AddPacket(int64_t sequence_number, Timestamp arrival_time); // Removes packets from the beginning of the map as long as they are received // before `sequence_number` and with an age older than `arrival_time_limit` - void RemoveOldPackets(int64_t sequence_number, int64_t arrival_time_limit); + void RemoveOldPackets(int64_t sequence_number, Timestamp arrival_time_limit); private: - // Deque representing unwrapped sequence number -> time, where the index + - // `begin_sequence_number_` represents the packet's sequence number. - std::deque arrival_times; + static constexpr int kMinCapacity = 128; + + // Returns index in the `arrival_times_` for value for `sequence_number`. + int Index(int64_t sequence_number) const { + // Note that sequence_number might be negative, thus taking '%' requires + // extra handling and can be slow. Because capacity is a power of two, it + // is much faster to use '&' operator. + return sequence_number & capacity_minus_1_; + } - // The unwrapped sequence number for the first element in - // `arrival_times`. - int64_t begin_sequence_number_ = 0; + void SetNotReceived(int64_t begin_sequence_number_inclusive, + int64_t end_sequence_number_exclusive); + + void TrimLeadingNotReceivedEntries(); + + // Adjust capacity to match new_size, may reduce capacity. + // On return guarantees capacity >= new_size. + void AdjustToSize(int new_size); + void Reallocate(int new_capacity); + + int capacity() const { return capacity_minus_1_ + 1; } + bool has_seen_packet() const { return arrival_times_ != nullptr; } - // Indicates if this map has had any packet added to it. The first packet - // decides the initial sequence number. - bool has_seen_packet_ = false; + // Circular buffer. Packet with sequence number `sequence_number` + // is stored in the slot `sequence_number % capacity_` + std::unique_ptr arrival_times_ = nullptr; + + // Allocated size of the `arrival_times_` + // capacity_ is a power of 2 in range [kMinCapacity, kMaxNumberOfPackets] + // `capacity - 1` is used much more often than `capacity`, thus that value is + // stored. + int capacity_minus_1_ = -1; + + // The unwrapped sequence number for valid range of sequence numbers. + // arrival_times_ entries only valid for sequence numbers in range + // `begin_sequence_number_ <= sequence_number < end_sequence_number_` + int64_t begin_sequence_number_ = 0; + int64_t end_sequence_number_ = 0; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc index ae960ab960..e9fb1b99f6 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc @@ -278,13 +278,7 @@ void RemoteBitrateEstimatorAbsSendTime::IncomingPacketInfo( TimeoutStreams(now); RTC_DCHECK(inter_arrival_); RTC_DCHECK(estimator_); - // TODO(danilchap): Replace 5 lines below with insert_or_assign when that - // c++17 function is available. - auto inserted = ssrcs_.insert(std::make_pair(ssrc, now)); - if (!inserted.second) { - // Already inserted, update. - inserted.first->second = now; - } + ssrcs_.insert_or_assign(ssrc, now); // For now only try to detect probes while we don't have a valid estimate. // We currently assume that only packets larger than 200 bytes are paced by @@ -359,11 +353,8 @@ void RemoteBitrateEstimatorAbsSendTime::IncomingPacketInfo( } } -void RemoteBitrateEstimatorAbsSendTime::Process() {} - -int64_t RemoteBitrateEstimatorAbsSendTime::TimeUntilNextProcess() { - const int64_t kDisabledModuleTime = 1000; - return kDisabledModuleTime; +TimeDelta RemoteBitrateEstimatorAbsSendTime::Process() { + return TimeDelta::PlusInfinity(); } void RemoteBitrateEstimatorAbsSendTime::TimeoutStreams(Timestamp now) { @@ -396,32 +387,13 @@ void RemoteBitrateEstimatorAbsSendTime::RemoveStream(uint32_t ssrc) { ssrcs_.erase(ssrc); } -bool RemoteBitrateEstimatorAbsSendTime::LatestEstimate( - std::vector* ssrcs, - uint32_t* bitrate_bps) const { - // Currently accessed from both the process thread (see - // ModuleRtpRtcpImpl::Process()) and the configuration thread (see - // Call::GetStats()). Should in the future only be accessed from a single - // thread. - RTC_DCHECK(ssrcs); - RTC_DCHECK(bitrate_bps); +DataRate RemoteBitrateEstimatorAbsSendTime::LatestEstimate() const { + // Currently accessed only from the worker thread (see Call::GetStats()). MutexLock lock(&mutex_); - if (!remote_rate_.ValidEstimate()) { - return false; + if (!remote_rate_.ValidEstimate() || ssrcs_.empty()) { + return DataRate::Zero(); } - *ssrcs = Keys(ssrcs_); - if (ssrcs_.empty()) { - *bitrate_bps = 0; - } else { - *bitrate_bps = remote_rate_.LatestEstimate().bps(); - } - return true; + return remote_rate_.LatestEstimate(); } -void RemoteBitrateEstimatorAbsSendTime::SetMinBitrate(int min_bitrate_bps) { - // Called from both the configuration thread and the network thread. Shouldn't - // be called from the network thread in the future. - MutexLock lock(&mutex_); - remote_rate_.SetMinBitrate(DataRate::BitsPerSec(min_bitrate_bps)); -} } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h index 4117382577..fd33c84b04 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h @@ -55,17 +55,10 @@ class RemoteBitrateEstimatorAbsSendTime : public RemoteBitrateEstimator { void IncomingPacket(int64_t arrival_time_ms, size_t payload_size, const RTPHeader& header) override; - // This class relies on Process() being called periodically (at least once - // every other second) for streams to be timed out properly. Therefore it - // shouldn't be detached from the ProcessThread except if it's about to be - // deleted. - void Process() override; - int64_t TimeUntilNextProcess() override; + TimeDelta Process() override; void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) override; void RemoveStream(uint32_t ssrc) override; - bool LatestEstimate(std::vector* ssrcs, - uint32_t* bitrate_bps) const override; - void SetMinBitrate(int min_bitrate_bps) override; + DataRate LatestEstimate() const override; private: struct Probe { diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc index c1c56977cc..6f442e5e2c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc @@ -44,7 +44,7 @@ struct RemoteBitrateEstimatorSingleStream::Detector { explicit Detector(int64_t last_packet_time_ms, const OverUseDetectorOptions& options, bool enable_burst_grouping, - const WebRtcKeyValueConfig* key_value_config) + const FieldTrialsView* key_value_config) : last_packet_time_ms(last_packet_time_ms), inter_arrival(90 * kTimestampGroupLengthMs, kTimestampToMs, @@ -155,22 +155,17 @@ void RemoteBitrateEstimatorSingleStream::IncomingPacket( } } -void RemoteBitrateEstimatorSingleStream::Process() { - { - MutexLock lock(&mutex_); - UpdateEstimate(clock_->TimeInMilliseconds()); +TimeDelta RemoteBitrateEstimatorSingleStream::Process() { + MutexLock lock(&mutex_); + int64_t now_ms = clock_->TimeInMilliseconds(); + int64_t next_process_time_ms = last_process_time_ + process_interval_ms_; + if (last_process_time_ == -1 || now_ms >= next_process_time_ms) { + UpdateEstimate(now_ms); + last_process_time_ = now_ms; + return TimeDelta::Millis(process_interval_ms_); } - last_process_time_ = clock_->TimeInMilliseconds(); -} -int64_t RemoteBitrateEstimatorSingleStream::TimeUntilNextProcess() { - if (last_process_time_ < 0) { - return 0; - } - MutexLock lock_(&mutex_); - RTC_DCHECK_GT(process_interval_ms_, 0); - return last_process_time_ + process_interval_ms_ - - clock_->TimeInMilliseconds(); + return TimeDelta::Millis(next_process_time_ms - now_ms); } void RemoteBitrateEstimatorSingleStream::UpdateEstimate(int64_t now_ms) { @@ -229,20 +224,12 @@ void RemoteBitrateEstimatorSingleStream::RemoveStream(unsigned int ssrc) { } } -bool RemoteBitrateEstimatorSingleStream::LatestEstimate( - std::vector* ssrcs, - uint32_t* bitrate_bps) const { +DataRate RemoteBitrateEstimatorSingleStream::LatestEstimate() const { MutexLock lock(&mutex_); - RTC_DCHECK(bitrate_bps); - if (!remote_rate_->ValidEstimate()) { - return false; + if (!remote_rate_->ValidEstimate() || overuse_detectors_.empty()) { + return DataRate::Zero(); } - GetSsrcs(ssrcs); - if (ssrcs->empty()) - *bitrate_bps = 0; - else - *bitrate_bps = remote_rate_->LatestEstimate().bps(); - return true; + return remote_rate_->LatestEstimate(); } void RemoteBitrateEstimatorSingleStream::GetSsrcs( @@ -262,9 +249,4 @@ AimdRateControl* RemoteBitrateEstimatorSingleStream::GetRemoteRate() { return remote_rate_.get(); } -void RemoteBitrateEstimatorSingleStream::SetMinBitrate(int min_bitrate_bps) { - MutexLock lock(&mutex_); - remote_rate_->SetMinBitrate(DataRate::BitsPerSec(min_bitrate_bps)); -} - } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h index d490485e7a..d62f922e02 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h @@ -19,6 +19,9 @@ #include #include "api/transport/field_trial_based_config.h" +#include "api/units/data_rate.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "modules/remote_bitrate_estimator/aimd_rate_control.h" #include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h" #include "rtc_base/rate_statistics.h" @@ -46,13 +49,10 @@ class RemoteBitrateEstimatorSingleStream : public RemoteBitrateEstimator { void IncomingPacket(int64_t arrival_time_ms, size_t payload_size, const RTPHeader& header) override; - void Process() override; - int64_t TimeUntilNextProcess() override; + TimeDelta Process() override; void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) override; void RemoveStream(uint32_t ssrc) override; - bool LatestEstimate(std::vector* ssrcs, - uint32_t* bitrate_bps) const override; - void SetMinBitrate(int min_bitrate_bps) override; + DataRate LatestEstimate() const override; private: struct Detector; diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.cc b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.cc index 710736876e..dd9fbbc944 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.cc @@ -15,6 +15,8 @@ #include #include +#include "api/units/data_size.h" +#include "modules/rtp_rtcp/source/rtcp_packet/remote_estimate.h" #include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -22,28 +24,43 @@ #include "system_wrappers/include/clock.h" namespace webrtc { - +namespace { // The maximum allowed value for a timestamp in milliseconds. This is lower // than the numerical limit since we often convert to microseconds. static constexpr int64_t kMaxTimeMs = std::numeric_limits::max() / 1000; +TimeDelta GetAbsoluteSendTimeDelta(uint32_t new_sendtime, + uint32_t previous_sendtime) { + static constexpr uint32_t kWrapAroundPeriod = 0x0100'0000; + RTC_DCHECK_LT(new_sendtime, kWrapAroundPeriod); + RTC_DCHECK_LT(previous_sendtime, kWrapAroundPeriod); + uint32_t delta = (new_sendtime - previous_sendtime) % kWrapAroundPeriod; + if (delta >= kWrapAroundPeriod / 2) { + // absolute send time wraps around, thus treat deltas larger than half of + // the wrap around period as negative. Ignore reordering of packets and + // treat them as they have approximately the same send time. + return TimeDelta::Zero(); + } + return TimeDelta::Micros(int64_t{delta} * 1'000'000 / (1 << 18)); +} +} // namespace + RemoteEstimatorProxy::RemoteEstimatorProxy( - Clock* clock, TransportFeedbackSender feedback_sender, - const WebRtcKeyValueConfig* key_value_config, + const FieldTrialsView* key_value_config, NetworkStateEstimator* network_state_estimator) - : clock_(clock), - feedback_sender_(std::move(feedback_sender)), + : feedback_sender_(std::move(feedback_sender)), send_config_(key_value_config), - last_process_time_ms_(-1), + last_process_time_(Timestamp::MinusInfinity()), network_state_estimator_(network_state_estimator), media_ssrc_(0), feedback_packet_count_(0), - send_interval_ms_(send_config_.default_interval->ms()), + packet_overhead_(DataSize::Zero()), + send_interval_(send_config_.default_interval.Get()), send_periodic_feedback_(true), previous_abs_send_time_(0), - abs_send_timestamp_(clock->CurrentTime()) { + abs_send_timestamp_(Timestamp::Zero()) { RTC_LOG(LS_INFO) << "Maximum interval between transport feedback RTCP messages (ms): " << send_config_.max_interval->ms(); @@ -52,33 +69,47 @@ RemoteEstimatorProxy::RemoteEstimatorProxy( RemoteEstimatorProxy::~RemoteEstimatorProxy() {} void RemoteEstimatorProxy::MaybeCullOldPackets(int64_t sequence_number, - int64_t arrival_time_ms) { - if (periodic_window_start_seq_.has_value()) { - if (*periodic_window_start_seq_ >= - packet_arrival_times_.end_sequence_number()) { - // Start new feedback packet, cull old packets. - packet_arrival_times_.RemoveOldPackets( - sequence_number, arrival_time_ms - send_config_.back_window->ms()); - } + Timestamp arrival_time) { + if (periodic_window_start_seq_ >= + packet_arrival_times_.end_sequence_number() && + arrival_time - Timestamp::Zero() >= send_config_.back_window.Get()) { + // Start new feedback packet, cull old packets. + packet_arrival_times_.RemoveOldPackets( + sequence_number, arrival_time - send_config_.back_window.Get()); } } void RemoteEstimatorProxy::IncomingPacket(int64_t arrival_time_ms, size_t payload_size, const RTPHeader& header) { - if (arrival_time_ms < 0 || arrival_time_ms > kMaxTimeMs) { + if (arrival_time_ms < 0 || arrival_time_ms >= kMaxTimeMs) { RTC_LOG(LS_WARNING) << "Arrival time out of bounds: " << arrival_time_ms; return; } + Packet packet = {.arrival_time = Timestamp::Millis(arrival_time_ms), + .size = DataSize::Bytes(header.headerLength + payload_size), + .ssrc = header.ssrc}; + if (header.extension.hasTransportSequenceNumber) { + packet.transport_sequence_number = header.extension.transportSequenceNumber; + } + if (header.extension.hasAbsoluteSendTime) { + packet.absolute_send_time_24bits = header.extension.absoluteSendTime; + } + packet.feedback_request = header.extension.feedback_request; + + IncomingPacket(packet); +} + +void RemoteEstimatorProxy::IncomingPacket(Packet packet) { MutexLock lock(&lock_); - media_ssrc_ = header.ssrc; + media_ssrc_ = packet.ssrc; int64_t seq = 0; - if (header.extension.hasTransportSequenceNumber) { - seq = unwrapper_.Unwrap(header.extension.transportSequenceNumber); + if (packet.transport_sequence_number.has_value()) { + seq = unwrapper_.Unwrap(*packet.transport_sequence_number); if (send_periodic_feedback_) { - MaybeCullOldPackets(seq, arrival_time_ms); + MaybeCullOldPackets(seq, packet.arrival_time); if (!periodic_window_start_seq_ || seq < *periodic_window_start_seq_) { periodic_window_start_seq_ = seq; @@ -90,7 +121,7 @@ void RemoteEstimatorProxy::IncomingPacket(int64_t arrival_time_ms, return; } - packet_arrival_times_.AddPacket(seq, arrival_time_ms); + packet_arrival_times_.AddPacket(seq, packet.arrival_time); // Limit the range of sequence numbers to send feedback for. if (!periodic_window_start_seq_.has_value() || @@ -100,56 +131,38 @@ void RemoteEstimatorProxy::IncomingPacket(int64_t arrival_time_ms, packet_arrival_times_.begin_sequence_number(); } - if (header.extension.feedback_request) { + if (packet.feedback_request) { // Send feedback packet immediately. - SendFeedbackOnRequest(seq, header.extension.feedback_request.value()); + SendFeedbackOnRequest(seq, *packet.feedback_request); } } - if (network_state_estimator_ && header.extension.hasAbsoluteSendTime) { + if (network_state_estimator_ && packet.absolute_send_time_24bits) { PacketResult packet_result; - packet_result.receive_time = Timestamp::Millis(arrival_time_ms); - // Ignore reordering of packets and assume they have approximately the - // same send time. - abs_send_timestamp_ += std::max( - header.extension.GetAbsoluteSendTimeDelta(previous_abs_send_time_), - TimeDelta::Millis(0)); - previous_abs_send_time_ = header.extension.absoluteSendTime; + packet_result.receive_time = packet.arrival_time; + abs_send_timestamp_ += GetAbsoluteSendTimeDelta( + *packet.absolute_send_time_24bits, previous_abs_send_time_); + previous_abs_send_time_ = *packet.absolute_send_time_24bits; packet_result.sent_packet.send_time = abs_send_timestamp_; - // TODO(webrtc:10742): Take IP header and transport overhead into account. - packet_result.sent_packet.size = - DataSize::Bytes(header.headerLength + payload_size); + packet_result.sent_packet.size = packet.size + packet_overhead_; packet_result.sent_packet.sequence_number = seq; network_state_estimator_->OnReceivedPacket(packet_result); } } -bool RemoteEstimatorProxy::LatestEstimate(std::vector* ssrcs, - unsigned int* bitrate_bps) const { - return false; -} - -int64_t RemoteEstimatorProxy::TimeUntilNextProcess() { +TimeDelta RemoteEstimatorProxy::Process(Timestamp now) { MutexLock lock(&lock_); if (!send_periodic_feedback_) { - // Wait a day until next process. - return 24 * 60 * 60 * 1000; - } else if (last_process_time_ms_ != -1) { - int64_t now = clock_->TimeInMilliseconds(); - if (now - last_process_time_ms_ < send_interval_ms_) - return last_process_time_ms_ + send_interval_ms_ - now; + return TimeDelta::PlusInfinity(); } - return 0; -} - -void RemoteEstimatorProxy::Process() { - MutexLock lock(&lock_); - if (!send_periodic_feedback_) { - return; + Timestamp next_process_time = last_process_time_ + send_interval_; + if (now >= next_process_time) { + last_process_time_ = now; + SendPeriodicFeedbacks(); + return send_interval_; } - last_process_time_ms_ = clock_->TimeInMilliseconds(); - SendPeriodicFeedbacks(); + return next_process_time - now; } void RemoteEstimatorProxy::OnBitrateChanged(int bitrate_bps) { @@ -158,18 +171,23 @@ void RemoteEstimatorProxy::OnBitrateChanged(int bitrate_bps) { // TwccReport size at 50ms interval is 24 byte. // TwccReport size at 250ms interval is 36 byte. // AverageTwccReport = (TwccReport(50ms) + TwccReport(250ms)) / 2 - constexpr int kTwccReportSize = 20 + 8 + 10 + 30; - const double kMinTwccRate = - kTwccReportSize * 8.0 * 1000.0 / send_config_.max_interval->ms(); - const double kMaxTwccRate = - kTwccReportSize * 8.0 * 1000.0 / send_config_.min_interval->ms(); + constexpr DataSize kTwccReportSize = DataSize::Bytes(20 + 8 + 10 + 30); + const DataRate kMinTwccRate = + kTwccReportSize / send_config_.max_interval.Get(); // Let TWCC reports occupy 5% of total bandwidth. + DataRate twcc_bitrate = + DataRate::BitsPerSec(send_config_.bandwidth_fraction * bitrate_bps); + + // Check upper send_interval bound by checking bitrate to avoid overflow when + // dividing by small bitrate, in particular avoid dividing by zero bitrate. + TimeDelta send_interval = twcc_bitrate <= kMinTwccRate + ? send_config_.max_interval.Get() + : std::max(kTwccReportSize / twcc_bitrate, + send_config_.min_interval.Get()); + MutexLock lock(&lock_); - send_interval_ms_ = static_cast( - 0.5 + kTwccReportSize * 8.0 * 1000.0 / - rtc::SafeClamp(send_config_.bandwidth_fraction * bitrate_bps, - kMinTwccRate, kMaxTwccRate)); + send_interval_ = send_interval; } void RemoteEstimatorProxy::SetSendPeriodicFeedback( @@ -178,6 +196,11 @@ void RemoteEstimatorProxy::SetSendPeriodicFeedback( send_periodic_feedback_ = send_periodic_feedback; } +void RemoteEstimatorProxy::SetTransportOverhead(DataSize overhead_per_packet) { + MutexLock lock(&lock_); + packet_overhead_ = overhead_per_packet; +} + void RemoteEstimatorProxy::SendPeriodicFeedbacks() { // `periodic_window_start_seq_` is the first sequence number to include in // the current feedback packet. Some older may still be in the map, in case @@ -268,29 +291,28 @@ RemoteEstimatorProxy::MaybeBuildFeedbackPacket( int64_t next_sequence_number = begin_sequence_number_inclusive; for (int64_t seq = start_seq; seq < end_seq; ++seq) { - int64_t arrival_time_ms = packet_arrival_times_.get(seq); - if (arrival_time_ms == 0) { - // Packet not received. - continue; + PacketArrivalTimeMap::PacketArrivalTime packet = + packet_arrival_times_.FindNextAtOrAfter(seq); + seq = packet.sequence_number; + if (seq >= end_seq) { + break; } if (feedback_packet == nullptr) { feedback_packet = std::make_unique(include_timestamps); - // TODO(sprang): Measure receive times in microseconds and remove the - // conversions below. feedback_packet->SetMediaSsrc(media_ssrc_); // Base sequence number is the expected first sequence number. This is // known, but we might not have actually received it, so the base time // shall be the time of the first received packet in the feedback. feedback_packet->SetBase( static_cast(begin_sequence_number_inclusive & 0xFFFF), - arrival_time_ms * 1000); + packet.arrival_time); feedback_packet->SetFeedbackSequenceNumber(feedback_packet_count_++); } if (!feedback_packet->AddReceivedPacket(static_cast(seq & 0xFFFF), - arrival_time_ms * 1000)) { + packet.arrival_time)) { // Could not add timestamp, feedback packet might be full. Return and // try again with a fresh packet. break; diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.h b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.h index 4e9b2b5631..509ad0ba02 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.h +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.h @@ -16,48 +16,57 @@ #include #include +#include "absl/types/optional.h" +#include "api/field_trials_view.h" +#include "api/rtp_headers.h" #include "api/transport/network_control.h" -#include "api/transport/webrtc_key_value_config.h" -#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "modules/remote_bitrate_estimator/packet_arrival_map.h" +#include "modules/rtp_rtcp/source/rtcp_packet.h" +#include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/numerics/sequence_number_util.h" #include "rtc_base/synchronization/mutex.h" namespace webrtc { -class Clock; -namespace rtcp { -class TransportFeedback; -} - // Class used when send-side BWE is enabled: This proxy is instantiated on the // receive side. It buffers a number of receive timestamps and then sends // transport feedback messages back too the send side. -class RemoteEstimatorProxy : public RemoteBitrateEstimator { +class RemoteEstimatorProxy { public: // Used for sending transport feedback messages when send side // BWE is used. using TransportFeedbackSender = std::function> packets)>; - RemoteEstimatorProxy(Clock* clock, - TransportFeedbackSender feedback_sender, - const WebRtcKeyValueConfig* key_value_config, + RemoteEstimatorProxy(TransportFeedbackSender feedback_sender, + const FieldTrialsView* key_value_config, NetworkStateEstimator* network_state_estimator); - ~RemoteEstimatorProxy() override; + ~RemoteEstimatorProxy(); + + struct Packet { + Timestamp arrival_time; + DataSize size; + uint32_t ssrc; + absl::optional absolute_send_time_24bits; + absl::optional transport_sequence_number; + absl::optional feedback_request; + }; + void IncomingPacket(Packet packet); void IncomingPacket(int64_t arrival_time_ms, size_t payload_size, - const RTPHeader& header) override; - void RemoveStream(uint32_t ssrc) override {} - bool LatestEstimate(std::vector* ssrcs, - unsigned int* bitrate_bps) const override; - void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) override {} - void SetMinBitrate(int min_bitrate_bps) override {} - int64_t TimeUntilNextProcess() override; - void Process() override; + const RTPHeader& header); + + // Sends periodic feedback if it is time to send it. + // Returns time until next call to Process should be made. + TimeDelta Process(Timestamp now); + void OnBitrateChanged(int bitrate); void SetSendPeriodicFeedback(bool send_periodic_feedback); + void SetTransportOverhead(DataSize overhead_per_packet); private: struct TransportWideFeedbackConfig { @@ -68,7 +77,7 @@ class RemoteEstimatorProxy : public RemoteBitrateEstimator { TimeDelta::Millis(100)}; FieldTrialParameter bandwidth_fraction{"frac", 0.05}; explicit TransportWideFeedbackConfig( - const WebRtcKeyValueConfig* key_value_config) { + const FieldTrialsView* key_value_config) { ParseFieldTrial({&back_window, &min_interval, &max_interval, &default_interval, &bandwidth_fraction}, key_value_config->Lookup( @@ -76,7 +85,7 @@ class RemoteEstimatorProxy : public RemoteBitrateEstimator { } }; - void MaybeCullOldPackets(int64_t sequence_number, int64_t arrival_time_ms) + void MaybeCullOldPackets(int64_t sequence_number, Timestamp arrival_time) RTC_EXCLUSIVE_LOCKS_REQUIRED(&lock_); void SendPeriodicFeedbacks() RTC_EXCLUSIVE_LOCKS_REQUIRED(&lock_); void SendFeedbackOnRequest(int64_t sequence_number, @@ -101,10 +110,9 @@ class RemoteEstimatorProxy : public RemoteBitrateEstimator { int64_t end_sequence_number_exclusive, bool is_periodic_update) RTC_EXCLUSIVE_LOCKS_REQUIRED(&lock_); - Clock* const clock_; const TransportFeedbackSender feedback_sender_; const TransportWideFeedbackConfig send_config_; - int64_t last_process_time_ms_; + Timestamp last_process_time_; Mutex lock_; // `network_state_estimator_` may be null. @@ -113,6 +121,7 @@ class RemoteEstimatorProxy : public RemoteBitrateEstimator { uint32_t media_ssrc_ RTC_GUARDED_BY(&lock_); uint8_t feedback_packet_count_ RTC_GUARDED_BY(&lock_); SeqNumUnwrapper unwrapper_ RTC_GUARDED_BY(&lock_); + DataSize packet_overhead_ RTC_GUARDED_BY(&lock_); // The next sequence number that should be the start sequence number during // periodic reporting. Will be absl::nullopt before the first seen packet. @@ -121,7 +130,7 @@ class RemoteEstimatorProxy : public RemoteBitrateEstimator { // Packet arrival times, by sequence number. PacketArrivalTimeMap packet_arrival_times_ RTC_GUARDED_BY(&lock_); - int64_t send_interval_ms_ RTC_GUARDED_BY(&lock_); + TimeDelta send_interval_ RTC_GUARDED_BY(&lock_); bool send_periodic_feedback_ RTC_GUARDED_BY(&lock_); // Unwraps absolute send times. diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.cc b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.cc index f99576f59a..c8f6faa127 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.cc @@ -12,13 +12,13 @@ #if BWE_TEST_LOGGING_COMPILE_TIME_ENABLE +#include #include #include #include #include "rtc_base/checks.h" -#include "rtc_base/format_macros.h" #include "rtc_base/platform_thread.h" #include "rtc_base/strings/string_builder.h" diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.h b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.h index e85565596d..49e1e716b2 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.h +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.h @@ -128,7 +128,6 @@ #include #include -#include "rtc_base/constructor_magic.h" #include "rtc_base/synchronization/mutex.h" #define BWE_TEST_LOGGING_GLOBAL_CONTEXT(name) \ @@ -341,6 +340,10 @@ class Logging { Logging(); ~Logging(); + + Logging(const Logging&) = delete; + Logging& operator=(const Logging&) = delete; + void PushState(const std::string& append_to_tag, int64_t timestamp_ms, bool enabled); @@ -348,8 +351,6 @@ class Logging { Mutex mutex_; ThreadMap thread_map_; - - RTC_DISALLOW_COPY_AND_ASSIGN(Logging); }; } // namespace bwe } // namespace testing diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/tools/rtp_to_text.cc b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/tools/rtp_to_text.cc index 98f502a42e..e8dc59f740 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/tools/rtp_to_text.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/tools/rtp_to_text.cc @@ -16,7 +16,6 @@ #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet.h" -#include "rtc_base/format_macros.h" #include "rtc_base/strings/string_builder.h" #include "test/rtp_file_reader.h" @@ -52,10 +51,10 @@ int main(int argc, char* argv[]) { ss << static_cast(packet.time_ms) * 1000000; fprintf(stdout, "%s\n", ss.str().c_str()); } else { - fprintf(stdout, "%u %u %d %u %u %d %u %" RTC_PRIuS " %" RTC_PRIuS "\n", - header.SequenceNumber(), header.Timestamp(), toffset, - abs_send_time, packet.time_ms, header.Marker(), header.Ssrc(), - packet.length, packet.original_length); + fprintf(stdout, "%u %u %d %u %u %d %u %zu %zu\n", header.SequenceNumber(), + header.Timestamp(), toffset, abs_send_time, packet.time_ms, + header.Marker(), header.Ssrc(), packet.length, + packet.original_length); } ++packet_counter; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/flexfec_receiver.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/flexfec_receiver.h index b0caea68ff..d34ba74620 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/flexfec_receiver.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/flexfec_receiver.h @@ -17,9 +17,9 @@ #include "api/sequence_checker.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "modules/rtp_rtcp/include/ulpfec_receiver.h" #include "modules/rtp_rtcp/source/forward_error_correction.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "modules/rtp_rtcp/source/ulpfec_receiver.h" #include "rtc_base/system/no_unique_address.h" #include "rtc_base/thread_annotations.h" diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/flexfec_sender.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/flexfec_sender.h index 737593e04c..f0acfe6c3d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/flexfec_sender.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/flexfec_sender.h @@ -15,6 +15,7 @@ #include #include +#include "absl/strings/string_view.h" #include "api/array_view.h" #include "api/rtp_parameters.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" @@ -39,7 +40,7 @@ class FlexfecSender : public VideoFecGenerator { FlexfecSender(int payload_type, uint32_t ssrc, uint32_t protected_media_ssrc, - const std::string& mid, + absl::string_view mid, const std::vector& rtp_header_extensions, rtc::ArrayView extension_sizes, const RtpState* rtp_state, diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/receive_statistics.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/receive_statistics.h index f973b7cf4f..827fd3a7a8 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/receive_statistics.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/receive_statistics.h @@ -43,8 +43,9 @@ class StreamStatistician { // Returns average over the stream life time. virtual absl::optional GetFractionLostInPercent() const = 0; - // TODO(nisse): Delete, migrate users to the above the GetStats method. - // Gets received stream data counters (includes reset counter values). + // TODO(bugs.webrtc.org/10679): Delete, migrate users to the above GetStats + // method (and extend RtpReceiveStats if needed). + // Gets receive stream data counters. virtual StreamDataCounters GetReceiveStreamDataCounters() const = 0; virtual uint32_t BitrateReceived() const = 0; diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/remote_ntp_time_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/remote_ntp_time_estimator.h index 5734a50e14..01d0c85f94 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/remote_ntp_time_estimator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/remote_ntp_time_estimator.h @@ -14,8 +14,9 @@ #include #include "absl/types/optional.h" -#include "rtc_base/constructor_magic.h" -#include "rtc_base/numerics/moving_median_filter.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "rtc_base/numerics/moving_percentile_filter.h" #include "system_wrappers/include/rtp_to_ntp_estimator.h" namespace webrtc { @@ -29,30 +30,43 @@ class Clock; class RemoteNtpTimeEstimator { public: explicit RemoteNtpTimeEstimator(Clock* clock); + RemoteNtpTimeEstimator(const RemoteNtpTimeEstimator&) = delete; + RemoteNtpTimeEstimator& operator=(const RemoteNtpTimeEstimator&) = delete; + ~RemoteNtpTimeEstimator() = default; - ~RemoteNtpTimeEstimator(); - - // Updates the estimator with round trip time `rtt`, NTP seconds `ntp_secs`, - // NTP fraction `ntp_frac` and RTP timestamp `rtp_timestamp`. - bool UpdateRtcpTimestamp(int64_t rtt, - uint32_t ntp_secs, - uint32_t ntp_frac, + // Updates the estimator with round trip time `rtt` and + // new NTP time <-> RTP timestamp mapping from an RTCP sender report. + bool UpdateRtcpTimestamp(TimeDelta rtt, + NtpTime sender_send_time, uint32_t rtp_timestamp); // Estimates the NTP timestamp in local timebase from `rtp_timestamp`. // Returns the NTP timestamp in ms when success. -1 if failed. - int64_t Estimate(uint32_t rtp_timestamp); + int64_t Estimate(uint32_t rtp_timestamp) { + NtpTime ntp_time = EstimateNtp(rtp_timestamp); + if (!ntp_time.Valid()) { + return -1; + } + return ntp_time.ToMs(); + } + + // Estimates the NTP timestamp in local timebase from `rtp_timestamp`. + // Returns invalid NtpTime (i.e. NtpTime(0)) on failure. + NtpTime EstimateNtp(uint32_t rtp_timestamp); - // Estimates the offset, in milliseconds, between the remote clock and the + // Estimates the offset between the remote clock and the // local one. This is equal to local NTP clock - remote NTP clock. - absl::optional EstimateRemoteToLocalClockOffsetMs(); + // The offset is returned in ntp time resolution, i.e. 1/2^32 sec ~= 0.2 ns. + // Returns nullopt on failure. + absl::optional EstimateRemoteToLocalClockOffset(); private: Clock* clock_; + // Offset is measured with the same precision as NtpTime: in 1/2^32 seconds ~= + // 0.2 ns. MovingMedianFilter ntp_clocks_offset_estimator_; RtpToNtpEstimator rtp_to_ntp_; - int64_t last_timing_log_ms_; - RTC_DISALLOW_COPY_AND_ASSIGN(RemoteNtpTimeEstimator); + Timestamp last_timing_log_ = Timestamp::MinusInfinity(); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/rtcp_statistics.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/rtcp_statistics.h index de70c14943..6d6246d8a8 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/rtcp_statistics.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/rtcp_statistics.h @@ -20,8 +20,7 @@ namespace webrtc { // Statistics for RTCP packet types. struct RtcpPacketTypeCounter { RtcpPacketTypeCounter() - : first_packet_time_ms(-1), - nack_packets(0), + : nack_packets(0), fir_packets(0), pli_packets(0), nack_requests(0), @@ -33,12 +32,6 @@ struct RtcpPacketTypeCounter { pli_packets += other.pli_packets; nack_requests += other.nack_requests; unique_nack_requests += other.unique_nack_requests; - if (other.first_packet_time_ms != -1 && - (other.first_packet_time_ms < first_packet_time_ms || - first_packet_time_ms == -1)) { - // Use oldest time. - first_packet_time_ms = other.first_packet_time_ms; - } } void Subtract(const RtcpPacketTypeCounter& other) { @@ -47,16 +40,6 @@ struct RtcpPacketTypeCounter { pli_packets -= other.pli_packets; nack_requests -= other.nack_requests; unique_nack_requests -= other.unique_nack_requests; - if (other.first_packet_time_ms != -1 && - (other.first_packet_time_ms > first_packet_time_ms || - first_packet_time_ms == -1)) { - // Use youngest time. - first_packet_time_ms = other.first_packet_time_ms; - } - } - - int64_t TimeSinceFirstPacketInMs(int64_t now_ms) const { - return (first_packet_time_ms == -1) ? -1 : (now_ms - first_packet_time_ms); } int UniqueNackRequestsInPercent() const { @@ -67,7 +50,6 @@ struct RtcpPacketTypeCounter { 0.5f); } - int64_t first_packet_time_ms; // Time when first packet is sent/received. uint32_t nack_packets; // Number of RTCP NACK packets. uint32_t fir_packets; // Number of RTCP FIR packets. uint32_t pli_packets; // Number of RTCP PLI packets. diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/rtp_rtcp.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/rtp_rtcp.h index 727fc6e649..c71d7f0c3d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/rtp_rtcp.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/rtp_rtcp.h @@ -14,13 +14,12 @@ #include #include "absl/base/attributes.h" -#include "modules/include/module.h" #include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" namespace webrtc { // DEPRECATED. Do not use. -class RtpRtcp : public Module, public RtpRtcpInterface { +class RtpRtcp : public RtpRtcpInterface { public: // Instantiates a deprecated version of the RtpRtcp module. static std::unique_ptr ABSL_DEPRECATED("") @@ -36,6 +35,9 @@ class RtpRtcp : public Module, public RtpRtcpInterface { void SendPictureLossIndication() { SendRTCP(kRtcpPli); } // using FIR, https://tools.ietf.org/html/rfc5104#section-4.3.1.2 void SendFullIntraRequest() { SendRTCP(kRtcpFir); } + + // Process any pending tasks such as timeouts. + virtual void Process() = 0; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.cc index 78e730f3c9..e4aec93696 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.cc @@ -10,7 +10,6 @@ #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include #include #include @@ -18,6 +17,7 @@ #include "absl/algorithm/container.h" #include "api/array_view.h" #include "modules/rtp_rtcp/source/rtp_packet.h" +#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" namespace webrtc { @@ -53,6 +53,12 @@ RtpPacketCounter::RtpPacketCounter(const RtpPacket& packet) padding_bytes(packet.padding_size()), packets(1) {} +RtpPacketCounter::RtpPacketCounter(const RtpPacketToSend& packet_to_send) + : RtpPacketCounter(static_cast(packet_to_send)) { + total_packet_delay = + packet_to_send.time_in_send_queue().value_or(TimeDelta::Zero()); +} + void RtpPacketCounter::AddPacket(const RtpPacket& packet) { ++packets; header_bytes += packet.headers_size(); @@ -60,4 +66,10 @@ void RtpPacketCounter::AddPacket(const RtpPacket& packet) { payload_bytes += packet.payload_size(); } +void RtpPacketCounter::AddPacket(const RtpPacketToSend& packet_to_send) { + AddPacket(static_cast(packet_to_send)); + total_packet_delay += + packet_to_send.time_in_send_queue().value_or(TimeDelta::Zero()); +} + } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h index 45cb659b5f..1797b3ebc1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h @@ -25,6 +25,7 @@ #include "api/audio_codecs/audio_format.h" #include "api/rtp_headers.h" #include "api/transport/network_types.h" +#include "api/units/time_delta.h" #include "modules/rtp_rtcp/source/rtcp_packet/remote_estimate.h" #include "system_wrappers/include/clock.h" @@ -103,6 +104,12 @@ enum RTCPPacketType : uint32_t { kRtcpXrTargetBitrate = 0x200000 }; +enum class KeyFrameReqMethod : uint8_t { + kNone, // Don't request keyframes. + kPliRtcp, // Request keyframes through Picture Loss Indication. + kFirRtcp // Request keyframes through Full Intra-frame Request. +}; + enum RtxMode { kRtxOff = 0x0, kRtxRetransmitted = 0x1, // Only send retransmissions over RTX. @@ -305,12 +312,14 @@ struct RtpPacketCounter { : header_bytes(0), payload_bytes(0), padding_bytes(0), packets(0) {} explicit RtpPacketCounter(const RtpPacket& packet); + explicit RtpPacketCounter(const RtpPacketToSend& packet_to_send); void Add(const RtpPacketCounter& other) { header_bytes += other.header_bytes; payload_bytes += other.payload_bytes; padding_bytes += other.padding_bytes; packets += other.packets; + total_packet_delay += other.total_packet_delay; } void Subtract(const RtpPacketCounter& other) { @@ -322,16 +331,20 @@ struct RtpPacketCounter { padding_bytes -= other.padding_bytes; RTC_DCHECK_GE(packets, other.packets); packets -= other.packets; + RTC_DCHECK_GE(total_packet_delay, other.total_packet_delay); + total_packet_delay -= other.total_packet_delay; } bool operator==(const RtpPacketCounter& other) const { return header_bytes == other.header_bytes && payload_bytes == other.payload_bytes && - padding_bytes == other.padding_bytes && packets == other.packets; + padding_bytes == other.padding_bytes && packets == other.packets && + total_packet_delay == other.total_packet_delay; } // Not inlined, since use of RtpPacket would result in circular includes. void AddPacket(const RtpPacket& packet); + void AddPacket(const RtpPacketToSend& packet_to_send); size_t TotalBytes() const { return header_bytes + payload_bytes + padding_bytes; @@ -341,6 +354,9 @@ struct RtpPacketCounter { size_t payload_bytes; // Payload bytes, excluding RTP headers and padding. size_t padding_bytes; // Number of padding bytes. uint32_t packets; // Number of packets. + // The total delay of all `packets`. For RtpPacketToSend packets, this is + // `time_in_send_queue()`. For receive packets, this is zero. + webrtc::TimeDelta total_packet_delay = webrtc::TimeDelta::Zero(); }; // Data usage statistics for a (rtp) stream. @@ -436,7 +452,10 @@ struct RtpReceiveStats { // RTCReceivedRtpStreamStats dictionary, see // https://w3c.github.io/webrtc-stats/#receivedrtpstats-dict* int32_t packets_lost = 0; + // Interarrival jitter in samples. uint32_t jitter = 0; + // Interarrival jitter in time. + webrtc::TimeDelta interarrival_jitter = webrtc::TimeDelta::Zero(); // Timestamp and counters exposed in RTCInboundRtpStreamStats, see // https://w3c.github.io/webrtc-stats/#inboundrtpstats-dict* @@ -460,7 +479,6 @@ class SendSideDelayObserver { virtual ~SendSideDelayObserver() {} virtual void SendSideDelayUpdated(int avg_delay_ms, int max_delay_ms, - uint64_t total_delay_ms, uint32_t ssrc) = 0; }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/ulpfec_receiver.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/ulpfec_receiver.h deleted file mode 100644 index 6cbae52c99..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/ulpfec_receiver.h +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_RTP_RTCP_INCLUDE_ULPFEC_RECEIVER_H_ -#define MODULES_RTP_RTCP_INCLUDE_ULPFEC_RECEIVER_H_ - -#include - -#include "api/array_view.h" -#include "api/rtp_parameters.h" -#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "modules/rtp_rtcp/source/rtp_packet_received.h" - -namespace webrtc { - -struct FecPacketCounter { - FecPacketCounter() = default; - size_t num_packets = 0; // Number of received packets. - size_t num_bytes = 0; - size_t num_fec_packets = 0; // Number of received FEC packets. - size_t num_recovered_packets = - 0; // Number of recovered media packets using FEC. - int64_t first_packet_time_ms = -1; // Time when first packet is received. -}; - -class UlpfecReceiver { - public: - static std::unique_ptr Create( - uint32_t ssrc, - RecoveredPacketReceiver* callback, - rtc::ArrayView extensions); - - virtual ~UlpfecReceiver() {} - - // Takes a RED packet, strips the RED header, and adds the resulting - // "virtual" RTP packet(s) into the internal buffer. - // - // TODO(brandtr): Set `ulpfec_payload_type` during constructor call, - // rather than as a parameter here. - virtual bool AddReceivedRedPacket(const RtpPacketReceived& rtp_packet, - uint8_t ulpfec_payload_type) = 0; - - // Sends the received packets to the FEC and returns all packets - // (both original media and recovered) through the callback. - virtual int32_t ProcessReceivedFec() = 0; - - // Returns a counter describing the added and recovered packets. - virtual FecPacketCounter GetPacketCounter() const = 0; - - virtual void SetRtpExtensions( - rtc::ArrayView extensions) = 0; -}; -} // namespace webrtc -#endif // MODULES_RTP_RTCP_INCLUDE_ULPFEC_RECEIVER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h index e1ea45ed1f..5c93e0bfed 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h @@ -17,9 +17,9 @@ #include #include +#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/video/video_bitrate_allocation.h" -#include "modules/include/module.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" @@ -66,8 +66,7 @@ class MockRtpRtcpInterface : public RtpRtcpInterface { MOCK_METHOD(RtpState, GetRtpState, (), (const, override)); MOCK_METHOD(RtpState, GetRtxState, (), (const, override)); MOCK_METHOD(uint32_t, SSRC, (), (const, override)); - MOCK_METHOD(void, SetRid, (const std::string& rid), (override)); - MOCK_METHOD(void, SetMid, (const std::string& mid), (override)); + MOCK_METHOD(void, SetMid, (absl::string_view mid), (override)); MOCK_METHOD(void, SetCsrcs, (const std::vector& csrcs), (override)); MOCK_METHOD(void, SetRtxSendStatus, (int modes), (override)); MOCK_METHOD(int, RtxSendStatus, (), (const, override)); @@ -98,6 +97,10 @@ class MockRtpRtcpInterface : public RtpRtcpInterface { FetchFecPackets, (), (override)); + MOCK_METHOD(void, + OnAbortedRetransmissions, + (rtc::ArrayView), + (override)); MOCK_METHOD(void, OnPacketsAcknowledged, (rtc::ArrayView), @@ -114,10 +117,7 @@ class MockRtpRtcpInterface : public RtpRtcpInterface { MOCK_METHOD(void, OnPacketSendingThreadSwitched, (), (override)); MOCK_METHOD(RtcpMode, RTCP, (), (const, override)); MOCK_METHOD(void, SetRTCPStatus, (RtcpMode method), (override)); - MOCK_METHOD(int32_t, - SetCNAME, - (const char cname[RTCP_CNAME_SIZE]), - (override)); + MOCK_METHOD(int32_t, SetCNAME, (absl::string_view cname), (override)); MOCK_METHOD(int32_t, RemoteNTP, (uint32_t * received_ntp_secs, diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc index c5c06840d2..3687669b2f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc @@ -16,6 +16,7 @@ #include "absl/strings/match.h" #include "api/transport/field_trial_based_config.h" +#include "api/units/timestamp.h" #include "logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h" #include "modules/remote_bitrate_estimator/test/bwe_test_logging.h" #include "rtc_base/logging.h" @@ -27,8 +28,7 @@ constexpr int kSendSideDelayWindowMs = 1000; constexpr int kBitrateStatisticsWindowMs = 1000; constexpr size_t kRtpSequenceNumberMapMaxEntries = 1 << 13; -bool IsDisabled(absl::string_view name, - const WebRtcKeyValueConfig* field_trials) { +bool IsDisabled(absl::string_view name, const FieldTrialsView* field_trials) { FieldTrialBasedConfig default_trials; auto& trials = field_trials ? *field_trials : default_trials; return absl::StartsWith(trials.Lookup(name), "Disabled"); @@ -90,7 +90,6 @@ DEPRECATED_RtpSenderEgress::DEPRECATED_RtpSenderEgress( timestamp_offset_(0), max_delay_it_(send_delays_.end()), sum_delays_ms_(0), - total_packet_send_delay_ms_(0), send_rates_(kNumMediaTypes, {kBitrateStatisticsWindowMs, RateStatistics::kBpsScale}), rtp_sequence_number_map_(need_rtp_packet_infos_ @@ -106,7 +105,8 @@ void DEPRECATED_RtpSenderEgress::SendPacket( const uint32_t packet_ssrc = packet->Ssrc(); RTC_DCHECK(packet->packet_type().has_value()); RTC_DCHECK(HasCorrectSsrc(*packet)); - int64_t now_ms = clock_->TimeInMilliseconds(); + Timestamp now = clock_->CurrentTime(); + int64_t now_ms = now.ms(); if (is_audio_) { #if BWE_TEST_LOGGING_COMPILE_TIME_ENABLE @@ -156,20 +156,19 @@ void DEPRECATED_RtpSenderEgress::SendPacket( // In case of VideoTimingExtension, since it's present not in every packet, // data after rtp header may be corrupted if these packets are protected by // the FEC. - int64_t diff_ms = now_ms - packet->capture_time_ms(); + int64_t diff_ms = now_ms - packet->capture_time().ms(); if (packet->HasExtension()) { packet->SetExtension(kTimestampTicksPerMs * diff_ms); } if (packet->HasExtension()) { - packet->SetExtension( - AbsoluteSendTime::MsTo24Bits(now_ms)); + packet->SetExtension(AbsoluteSendTime::To24Bits(now)); } if (packet->HasExtension()) { if (populate_network2_timestamp_) { - packet->set_network2_time_ms(now_ms); + packet->set_network2_time(now); } else { - packet->set_pacer_exit_time_ms(now_ms); + packet->set_pacer_exit_time(now); } } @@ -190,8 +189,8 @@ void DEPRECATED_RtpSenderEgress::SendPacket( if (packet->packet_type() != RtpPacketMediaType::kPadding && packet->packet_type() != RtpPacketMediaType::kRetransmission) { - UpdateDelayStatistics(packet->capture_time_ms(), now_ms, packet_ssrc); - UpdateOnSendPacket(options.packet_id, packet->capture_time_ms(), + UpdateDelayStatistics(packet->capture_time().ms(), now_ms, packet_ssrc); + UpdateOnSendPacket(options.packet_id, packet->capture_time().ms(), packet_ssrc); } @@ -201,7 +200,7 @@ void DEPRECATED_RtpSenderEgress::SendPacket( // actual sending fails. if (is_media && packet->allow_retransmission()) { packet_history_->PutRtpPacket(std::make_unique(*packet), - now_ms); + now); } else if (packet->retransmitted_sequence_number()) { packet_history_->MarkPacketAsSent(*packet->retransmitted_sequence_number()); } @@ -341,7 +340,6 @@ void DEPRECATED_RtpSenderEgress::UpdateDelayStatistics(int64_t capture_time_ms, int avg_delay_ms = 0; int max_delay_ms = 0; - uint64_t total_packet_send_delay_ms = 0; { MutexLock lock(&lock_); // Compute the max and average of the recent capture-to-send delays. @@ -392,8 +390,6 @@ void DEPRECATED_RtpSenderEgress::UpdateDelayStatistics(int64_t capture_time_ms, max_delay_it_ = it; } sum_delays_ms_ += new_send_delay; - total_packet_send_delay_ms_ += new_send_delay; - total_packet_send_delay_ms = total_packet_send_delay_ms_; size_t num_delays = send_delays_.size(); RTC_DCHECK(max_delay_it_ != send_delays_.end()); @@ -405,8 +401,8 @@ void DEPRECATED_RtpSenderEgress::UpdateDelayStatistics(int64_t capture_time_ms, avg_delay_ms = rtc::dchecked_cast((sum_delays_ms_ + num_delays / 2) / num_delays); } - send_side_delay_observer_->SendSideDelayUpdated( - avg_delay_ms, max_delay_ms, total_packet_send_delay_ms, ssrc); + send_side_delay_observer_->SendSideDelayUpdated(avg_delay_ms, max_delay_ms, + ssrc); } void DEPRECATED_RtpSenderEgress::RecomputeMaxSendDelay() { diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.h index fc440d14f2..fd5dfddc02 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.h @@ -133,7 +133,6 @@ class DEPRECATED_RtpSenderEgress { SendDelayMap::const_iterator max_delay_it_ RTC_GUARDED_BY(lock_); // The sum of delays over a kSendSideDelayWindowMs sliding window. int64_t sum_delays_ms_ RTC_GUARDED_BY(lock_); - uint64_t total_packet_send_delay_ms_ RTC_GUARDED_BY(lock_); StreamDataCounters rtp_stats_ RTC_GUARDED_BY(lock_); StreamDataCounters rtx_rtp_stats_ RTC_GUARDED_BY(lock_); // One element per value in RtpPacketMediaType, with index matching value. diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/flexfec_receiver.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/flexfec_receiver.cc index e01b9205cf..b5198b53a6 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/flexfec_receiver.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/flexfec_receiver.cc @@ -159,18 +159,28 @@ void FlexfecReceiver::ProcessReceivedPacket( // Set this flag first, since OnRecoveredPacket may end up here // again, with the same packet. recovered_packet->returned = true; - RTC_CHECK_GT(recovered_packet->pkt->data.size(), 0); + RTC_CHECK_GE(recovered_packet->pkt->data.size(), kRtpHeaderSize); recovered_packet_receiver_->OnRecoveredPacket( recovered_packet->pkt->data.cdata(), recovered_packet->pkt->data.size()); - // Periodically log the incoming packets. + uint32_t media_ssrc = + ForwardErrorCorrection::ParseSsrc(recovered_packet->pkt->data.data()); + uint16_t media_seq_num = ForwardErrorCorrection::ParseSequenceNumber( + recovered_packet->pkt->data.data()); + // Periodically log the incoming packets at LS_INFO. int64_t now_ms = clock_->TimeInMilliseconds(); - if (now_ms - last_recovered_packet_ms_ > kPacketLogIntervalMs) { - uint32_t media_ssrc = - ForwardErrorCorrection::ParseSsrc(recovered_packet->pkt->data.data()); - RTC_LOG(LS_VERBOSE) << "Recovered media packet with SSRC: " << media_ssrc - << " from FlexFEC stream with SSRC: " << ssrc_ << "."; - last_recovered_packet_ms_ = now_ms; + bool should_log_periodically = + now_ms - last_recovered_packet_ms_ > kPacketLogIntervalMs; + if (RTC_LOG_CHECK_LEVEL(LS_VERBOSE) || should_log_periodically) { + rtc::LoggingSeverity level = + should_log_periodically ? rtc::LS_INFO : rtc::LS_VERBOSE; + RTC_LOG_V(level) << "Recovered media packet with SSRC: " << media_ssrc + << " seq " << media_seq_num << " recovered length " + << recovered_packet->pkt->data.size() + << " from FlexFEC stream with SSRC: " << ssrc_; + if (should_log_periodically) { + last_recovered_packet_ms_ = now_ms; + } } } } diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/flexfec_sender.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/flexfec_sender.cc index 071829f1c0..292fe4a8dd 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/flexfec_sender.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/flexfec_sender.cc @@ -15,6 +15,7 @@ #include #include +#include "absl/strings/string_view.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/forward_error_correction.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" @@ -71,7 +72,7 @@ FlexfecSender::FlexfecSender( int payload_type, uint32_t ssrc, uint32_t protected_media_ssrc, - const std::string& mid, + absl::string_view mid, const std::vector& rtp_header_extensions, rtc::ArrayView extension_sizes, const RtpState* rtp_state, @@ -142,7 +143,7 @@ std::vector> FlexfecSender::GetFecPackets() { clock_->TimeInMilliseconds())); // Set "capture time" so that the TransmissionOffset header extension // can be set by the RTPSender. - fec_packet_to_send->set_capture_time_ms(clock_->TimeInMilliseconds()); + fec_packet_to_send->set_capture_time(clock_->CurrentTime()); fec_packet_to_send->SetSsrc(ssrc_); // Reserve extensions, if registered. These will be set by the RTPSender. fec_packet_to_send->ReserveExtension(); diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc index 989fb3d58a..903d3e7d45 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc @@ -218,13 +218,11 @@ void ForwardErrorCorrection::GenerateFecPayloads( ParseSequenceNumber((*media_packets_it)->data.data()); while (media_packets_it != media_packets.end()) { Packet* const media_packet = media_packets_it->get(); - const uint8_t* media_packet_data = media_packet->data.cdata(); // Should `media_packet` be protected by `fec_packet`? if (packet_masks_[pkt_mask_idx] & (1 << (7 - media_pkt_idx))) { size_t media_payload_length = media_packet->data.size() - kRtpHeaderSize; - bool first_protected_packet = (fec_packet->data.size() == 0); size_t fec_packet_length = fec_header_size + media_payload_length; if (fec_packet_length > fec_packet->data.size()) { // Recall that XORing with zero (which the FEC packets are prefilled @@ -232,26 +230,9 @@ void ForwardErrorCorrection::GenerateFecPayloads( // still correct even though we expand the packet length here. fec_packet->data.SetSize(fec_packet_length); } - if (first_protected_packet) { - uint8_t* data = fec_packet->data.MutableData(); - // Write P, X, CC, M, and PT recovery fields. - // Note that bits 0, 1, and 16 are overwritten in FinalizeFecHeaders. - memcpy(&data[0], &media_packet_data[0], 2); - // Write length recovery field. (This is a temporary location for - // ULPFEC.) - ByteWriter::WriteBigEndian(&data[2], media_payload_length); - // Write timestamp recovery field. - memcpy(&data[4], &media_packet_data[4], 4); - // Write payload. - if (media_payload_length > 0) { - memcpy(&data[fec_header_size], &media_packet_data[kRtpHeaderSize], - media_payload_length); - } - } else { - XorHeaders(*media_packet, fec_packet); - XorPayloads(*media_packet, media_payload_length, fec_header_size, - fec_packet); - } + XorHeaders(*media_packet, fec_packet); + XorPayloads(*media_packet, media_payload_length, fec_header_size, + fec_packet); } media_packets_it++; if (media_packets_it != media_packets.end()) { @@ -651,10 +632,10 @@ bool ForwardErrorCorrection::RecoverPacket(const ReceivedFecPacket& fec_packet, // This is the packet we're recovering. recovered_packet->seq_num = protected_packet->seq_num; } else { - XorHeaders(*protected_packet->pkt, recovered_packet->pkt); + XorHeaders(*protected_packet->pkt, recovered_packet->pkt.get()); XorPayloads(*protected_packet->pkt, protected_packet->pkt->data.size() - kRtpHeaderSize, - kRtpHeaderSize, recovered_packet->pkt); + kRtpHeaderSize, recovered_packet->pkt.get()); } } if (!FinishPacketRecovery(fec_packet, recovered_packet)) { diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/forward_error_correction.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/forward_error_correction.h index d07bb8e422..0ebe1c5091 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/forward_error_correction.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/forward_error_correction.h @@ -75,9 +75,6 @@ class ForwardErrorCorrection { }; // Used for the input to DecodeFec(). - // - // TODO(nisse): Delete class, instead passing `is_fec` and `pkt` as separate - // arguments. class ReceivedPacket : public SortablePacket { public: ReceivedPacket(); diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/packet_sequencer.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/packet_sequencer.cc index 037542ddbd..55edd768a8 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/packet_sequencer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/packet_sequencer.cc @@ -99,7 +99,7 @@ void PacketSequencer::UpdateLastPacketState(const RtpPacketToSend& packet) { // Save timestamps to generate timestamp field and extensions for the padding. last_rtp_timestamp_ = packet.Timestamp(); last_timestamp_time_ms_ = clock_->TimeInMilliseconds(); - last_capture_time_ms_ = packet.capture_time_ms(); + last_capture_time_ms_ = packet.capture_time().ms(); } void PacketSequencer::PopulatePaddingFields(RtpPacketToSend& packet) { @@ -107,7 +107,7 @@ void PacketSequencer::PopulatePaddingFields(RtpPacketToSend& packet) { RTC_DCHECK(CanSendPaddingOnMediaSsrc()); packet.SetTimestamp(last_rtp_timestamp_); - packet.set_capture_time_ms(last_capture_time_ms_); + packet.set_capture_time(Timestamp::Millis(last_capture_time_ms_)); packet.SetPayloadType(last_payload_type_); return; } @@ -119,7 +119,7 @@ void PacketSequencer::PopulatePaddingFields(RtpPacketToSend& packet) { } packet.SetTimestamp(last_rtp_timestamp_); - packet.set_capture_time_ms(last_capture_time_ms_); + packet.set_capture_time(Timestamp::Millis(last_capture_time_ms_)); // Only change the timestamp of padding packets sent over RTX. // Padding only packets over RTP has to be sent as part of a media @@ -129,9 +129,10 @@ void PacketSequencer::PopulatePaddingFields(RtpPacketToSend& packet) { packet.SetTimestamp(packet.Timestamp() + (now_ms - last_timestamp_time_ms_) * kTimestampTicksPerMs); - if (packet.capture_time_ms() > 0) { - packet.set_capture_time_ms(packet.capture_time_ms() + - (now_ms - last_timestamp_time_ms_)); + if (packet.capture_time() > Timestamp::Zero()) { + packet.set_capture_time( + packet.capture_time() + + TimeDelta::Millis(now_ms - last_timestamp_time_ms_)); } } } diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc index b16f122ee1..deae14e8c5 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc @@ -16,34 +16,31 @@ #include #include +#include "api/units/time_delta.h" #include "modules/remote_bitrate_estimator/test/bwe_test_logging.h" #include "modules/rtp_rtcp/source/rtcp_packet/report_block.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_rtcp_config.h" #include "modules/rtp_rtcp/source/time_util.h" #include "rtc_base/logging.h" +#include "rtc_base/time_utils.h" #include "system_wrappers/include/clock.h" namespace webrtc { namespace { constexpr int64_t kStatisticsTimeoutMs = 8000; constexpr int64_t kStatisticsProcessIntervalMs = 1000; - -// Number of seconds since 1900 January 1 00:00 GMT (see -// https://tools.ietf.org/html/rfc868). -constexpr int64_t kNtpJan1970Millisecs = 2'208'988'800'000; } // namespace StreamStatistician::~StreamStatistician() {} -StreamStatisticianImpl::StreamStatisticianImpl(uint32_t ssrc, - Clock* clock, +StreamStatisticianImpl::StreamStatisticianImpl(uint32_t ssrc, Clock* clock, int max_reordering_threshold) : ssrc_(ssrc), clock_(clock), delta_internal_unix_epoch_ms_(clock_->CurrentNtpInMilliseconds() - clock_->TimeInMilliseconds() - - kNtpJan1970Millisecs), + rtc::kNtpJan1970Millisecs), incoming_bitrate_(kStatisticsProcessIntervalMs, RateStatistics::kBpsScale), max_reordering_threshold_(max_reordering_threshold), @@ -57,7 +54,8 @@ StreamStatisticianImpl::StreamStatisticianImpl(uint32_t ssrc, received_seq_first_(-1), received_seq_max_(-1), last_report_cumulative_loss_(0), - last_report_seq_max_(-1) {} + last_report_seq_max_(-1), + last_payload_type_frequency_(0) {} StreamStatisticianImpl::~StreamStatisticianImpl() = default; @@ -157,6 +155,8 @@ void StreamStatisticianImpl::UpdateJitter(const RtpPacketReceived& packet, time_diff_samples = std::abs(time_diff_samples); + ReviseFrequencyAndJitter(packet.payload_type_frequency()); + // lib_jingle sometimes deliver crazy jumps in TS for the same stream. // If this happens, don't update jitter value. Use 5 secs video frequency // as the threshold. @@ -167,6 +167,38 @@ void StreamStatisticianImpl::UpdateJitter(const RtpPacketReceived& packet, } } +void StreamStatisticianImpl::ReviseFrequencyAndJitter( + int payload_type_frequency) { + if (payload_type_frequency == last_payload_type_frequency_) { + return; + } + + if (payload_type_frequency != 0) { + if (last_payload_type_frequency_ != 0) { + // Value in "jitter_q4_" variable is a number of samples. + // I.e. jitter = timestamp (ms) * frequency (kHz). + // Since the frequency has changed we have to update the number of samples + // accordingly. The new value should rely on a new frequency. + + // If we don't do such procedure we end up with the number of samples that + // cannot be converted into milliseconds correctly + // (i.e. jitter_ms = jitter_q4_ >> 4 / (payload_type_frequency / 1000)). + // In such case, the number of samples has a "mix". + + // Doing so we pretend that everything prior and including the current + // packet were computed on packet's frequency. + jitter_q4_ = static_cast(static_cast(jitter_q4_) * + payload_type_frequency / + last_payload_type_frequency_); + } + // If last_payload_type_frequency_ is not present, the jitter_q4_ + // variable has its initial value. + + // Keep last_payload_type_frequency_ up to date and non-zero (set). + last_payload_type_frequency_ = payload_type_frequency; + } +} + void StreamStatisticianImpl::SetMaxReorderingThreshold( int max_reordering_threshold) { max_reordering_threshold_ = max_reordering_threshold; @@ -179,9 +211,14 @@ void StreamStatisticianImpl::EnableRetransmitDetection(bool enable) { RtpReceiveStats StreamStatisticianImpl::GetStats() const { RtpReceiveStats stats; stats.packets_lost = cumulative_loss_; - // TODO(nisse): Can we return a float instead? // Note: internal jitter value is in Q4 and needs to be scaled by 1/16. stats.jitter = jitter_q4_ >> 4; + if (last_payload_type_frequency_ > 0) { + // Divide value in fractional seconds by frequency to get jitter in + // fractional seconds. + stats.interarrival_jitter = + webrtc::TimeDelta::Seconds(stats.jitter) / last_payload_type_frequency_; + } if (receive_counters_.last_packet_received_timestamp_ms.has_value()) { stats.last_packet_received_timestamp_ms = *receive_counters_.last_packet_received_timestamp_ms + diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.h index 1a70fe4ad7..4aac20a74b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.h @@ -65,6 +65,7 @@ class StreamStatisticianImpl : public StreamStatisticianImplInterface { bool IsRetransmitOfOldPacket(const RtpPacketReceived& packet, int64_t now_ms) const; void UpdateJitter(const RtpPacketReceived& packet, int64_t receive_time_ms); + void ReviseFrequencyAndJitter(int payload_type_frequency); // Updates StreamStatistician for out of order packets. // Returns true if packet considered to be out of order. bool UpdateOutOfOrder(const RtpPacketReceived& packet, @@ -108,6 +109,9 @@ class StreamStatisticianImpl : public StreamStatisticianImplInterface { // Counter values when we sent the last report. int32_t last_report_cumulative_loss_; int64_t last_report_seq_max_; + + // The sample frequency of the last received packet. + int last_payload_type_frequency_; }; // Thread-safe implementation of StreamStatisticianImplInterface. diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/remote_ntp_time_estimator.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/remote_ntp_time_estimator.cc index 723064eeba..6f90cd175c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/remote_ntp_time_estimator.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/remote_ntp_time_estimator.cc @@ -22,70 +22,83 @@ namespace webrtc { namespace { constexpr int kMinimumNumberOfSamples = 2; -constexpr int kTimingLogIntervalMs = 10000; +constexpr TimeDelta kTimingLogInterval = TimeDelta::Seconds(10); constexpr int kClocksOffsetSmoothingWindow = 100; +// Subtracts two NtpTime values keeping maximum precision. +int64_t Subtract(NtpTime minuend, NtpTime subtrahend) { + uint64_t a = static_cast(minuend); + uint64_t b = static_cast(subtrahend); + return a >= b ? static_cast(a - b) : -static_cast(b - a); +} + +NtpTime Add(NtpTime lhs, int64_t rhs) { + uint64_t result = static_cast(lhs); + if (rhs >= 0) { + result += static_cast(rhs); + } else { + result -= static_cast(-rhs); + } + return NtpTime(result); +} + } // namespace // TODO(wu): Refactor this class so that it can be shared with // vie_sync_module.cc. RemoteNtpTimeEstimator::RemoteNtpTimeEstimator(Clock* clock) : clock_(clock), - ntp_clocks_offset_estimator_(kClocksOffsetSmoothingWindow), - last_timing_log_ms_(-1) {} - -RemoteNtpTimeEstimator::~RemoteNtpTimeEstimator() {} + ntp_clocks_offset_estimator_(kClocksOffsetSmoothingWindow) {} -bool RemoteNtpTimeEstimator::UpdateRtcpTimestamp(int64_t rtt, - uint32_t ntp_secs, - uint32_t ntp_frac, +bool RemoteNtpTimeEstimator::UpdateRtcpTimestamp(TimeDelta rtt, + NtpTime sender_send_time, uint32_t rtp_timestamp) { - bool new_rtcp_sr = false; - if (!rtp_to_ntp_.UpdateMeasurements(ntp_secs, ntp_frac, rtp_timestamp, - &new_rtcp_sr)) { - return false; - } - if (!new_rtcp_sr) { - // No new RTCP SR since last time this function was called. - return true; + switch (rtp_to_ntp_.UpdateMeasurements(sender_send_time, rtp_timestamp)) { + case RtpToNtpEstimator::kInvalidMeasurement: + return false; + case RtpToNtpEstimator::kSameMeasurement: + // No new RTCP SR since last time this function was called. + return true; + case RtpToNtpEstimator::kNewMeasurement: + break; } + // Assume connection is symmetric and thus time to deliver the packet is half + // the round trip time. + int64_t deliver_time_ntp = ToNtpUnits(rtt) / 2; + // Update extrapolator with the new arrival time. - // The extrapolator assumes the ntp time. - int64_t receiver_arrival_time_ms = clock_->CurrentNtpInMilliseconds(); - int64_t sender_send_time_ms = NtpTime(ntp_secs, ntp_frac).ToMs(); - int64_t sender_arrival_time_ms = sender_send_time_ms + rtt / 2; + NtpTime receiver_arrival_time = clock_->CurrentNtpTime(); int64_t remote_to_local_clocks_offset = - receiver_arrival_time_ms - sender_arrival_time_ms; + Subtract(receiver_arrival_time, sender_send_time) - deliver_time_ntp; ntp_clocks_offset_estimator_.Insert(remote_to_local_clocks_offset); return true; } -int64_t RemoteNtpTimeEstimator::Estimate(uint32_t rtp_timestamp) { - int64_t sender_capture_ntp_ms = 0; - if (!rtp_to_ntp_.Estimate(rtp_timestamp, &sender_capture_ntp_ms)) { - return -1; +NtpTime RemoteNtpTimeEstimator::EstimateNtp(uint32_t rtp_timestamp) { + NtpTime sender_capture = rtp_to_ntp_.Estimate(rtp_timestamp); + if (!sender_capture.Valid()) { + return sender_capture; } int64_t remote_to_local_clocks_offset = ntp_clocks_offset_estimator_.GetFilteredValue(); - int64_t receiver_capture_ntp_ms = - sender_capture_ntp_ms + remote_to_local_clocks_offset; + NtpTime receiver_capture = Add(sender_capture, remote_to_local_clocks_offset); - int64_t now_ms = clock_->TimeInMilliseconds(); - if (now_ms - last_timing_log_ms_ > kTimingLogIntervalMs) { + Timestamp now = clock_->CurrentTime(); + if (now - last_timing_log_ > kTimingLogInterval) { RTC_LOG(LS_INFO) << "RTP timestamp: " << rtp_timestamp - << " in NTP clock: " << sender_capture_ntp_ms + << " in NTP clock: " << sender_capture.ToMs() << " estimated time in receiver NTP clock: " - << receiver_capture_ntp_ms; - last_timing_log_ms_ = now_ms; + << receiver_capture.ToMs(); + last_timing_log_ = now; } - return receiver_capture_ntp_ms; + return receiver_capture; } absl::optional -RemoteNtpTimeEstimator::EstimateRemoteToLocalClockOffsetMs() { +RemoteNtpTimeEstimator::EstimateRemoteToLocalClockOffset() { if (ntp_clocks_offset_estimator_.GetNumberOfSamplesStored() < kMinimumNumberOfSamples) { return absl::nullopt; diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/app.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/app.cc index c839b72a87..d5734c6dd5 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/app.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/app.cc @@ -91,7 +91,9 @@ bool App::Create(uint8_t* packet, ByteWriter::WriteBigEndian(&packet[*index + 0], sender_ssrc()); ByteWriter::WriteBigEndian(&packet[*index + 4], name_); - memcpy(&packet[*index + 8], data_.data(), data_.size()); + if (!data_.empty()) { + memcpy(&packet[*index + 8], data_.data(), data_.size()); + } *index += (8 + data_.size()); RTC_DCHECK_EQ(index_end, *index); return true; diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/bye.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/bye.cc index cefa341b1e..a6471772b1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/bye.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/bye.cc @@ -15,6 +15,7 @@ #include #include +#include "absl/strings/string_view.h" #include "modules/rtp_rtcp/source/byte_io.h" #include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" #include "rtc_base/checks.h" @@ -125,9 +126,9 @@ bool Bye::SetCsrcs(std::vector csrcs) { return true; } -void Bye::SetReason(std::string reason) { +void Bye::SetReason(absl::string_view reason) { RTC_DCHECK_LE(reason.size(), 0xffu); - reason_ = std::move(reason); + reason_ = std::string(reason); } size_t Bye::BlockLength() const { diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/bye.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/bye.h index c96a7578fe..d31205793a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/bye.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/bye.h @@ -15,6 +15,7 @@ #include #include +#include "absl/strings/string_view.h" #include "modules/rtp_rtcp/source/rtcp_packet.h" namespace webrtc { @@ -32,7 +33,7 @@ class Bye : public RtcpPacket { bool Parse(const CommonHeader& packet); bool SetCsrcs(std::vector csrcs); - void SetReason(std::string reason); + void SetReason(absl::string_view reason); const std::vector& csrcs() const { return csrcs_; } const std::string& reason() const { return reason_; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet.h index 8bee600692..d98dbd088d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet.h @@ -16,7 +16,6 @@ #include #include "modules/rtp_rtcp/source/rtcp_packet.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { namespace rtcp { @@ -26,6 +25,9 @@ class CompoundPacket : public RtcpPacket { CompoundPacket(); ~CompoundPacket() override; + CompoundPacket(const CompoundPacket&) = delete; + CompoundPacket& operator=(const CompoundPacket&) = delete; + void Append(std::unique_ptr packet); // Size of this packet in bytes (i.e. total size of nested packets). @@ -38,9 +40,6 @@ class CompoundPacket : public RtcpPacket { protected: std::vector> appended_packets_; - - private: - RTC_DISALLOW_COPY_AND_ASSIGN(CompoundPacket); }; } // namespace rtcp diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/dlrr.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/dlrr.h index 6fe2099fd9..ad91dfdcc6 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/dlrr.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/dlrr.h @@ -24,11 +24,21 @@ struct ReceiveTimeInfo { ReceiveTimeInfo() : ssrc(0), last_rr(0), delay_since_last_rr(0) {} ReceiveTimeInfo(uint32_t ssrc, uint32_t last_rr, uint32_t delay) : ssrc(ssrc), last_rr(last_rr), delay_since_last_rr(delay) {} + uint32_t ssrc; uint32_t last_rr; uint32_t delay_since_last_rr; }; +inline bool operator==(const ReceiveTimeInfo& lhs, const ReceiveTimeInfo& rhs) { + return lhs.ssrc == rhs.ssrc && lhs.last_rr == rhs.last_rr && + lhs.delay_since_last_rr == rhs.delay_since_last_rr; +} + +inline bool operator!=(const ReceiveTimeInfo& lhs, const ReceiveTimeInfo& rhs) { + return !(lhs == rhs); +} + // DLRR Report Block: Delay since the Last Receiver Report (RFC 3611). class Dlrr { public: diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report.cc deleted file mode 100644 index 5e7dadd1f4..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report.cc +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report.h" - -#include -#include - -#include "modules/rtp_rtcp/source/byte_io.h" -#include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" - -namespace webrtc { -namespace rtcp { -constexpr uint8_t ExtendedJitterReport::kPacketType; -// Transmission Time Offsets in RTP Streams (RFC 5450). -// -// 0 1 2 3 -// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 -// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ -// hdr |V=2|P| RC | PT=IJ=195 | length | -// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ -// | inter-arrival jitter | -// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ -// . . -// . . -// . . -// | inter-arrival jitter | -// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ -// -// If present, this RTCP packet must be placed after a receiver report -// (inside a compound RTCP packet), and MUST have the same value for RC -// (reception report count) as the receiver report. - -ExtendedJitterReport::ExtendedJitterReport() = default; - -ExtendedJitterReport::~ExtendedJitterReport() = default; - -bool ExtendedJitterReport::Parse(const CommonHeader& packet) { - RTC_DCHECK_EQ(packet.type(), kPacketType); - - const uint8_t number_of_jitters = packet.count(); - - if (packet.payload_size_bytes() < number_of_jitters * kJitterSizeBytes) { - RTC_LOG(LS_WARNING) << "Packet is too small to contain all the jitter."; - return false; - } - - inter_arrival_jitters_.resize(number_of_jitters); - for (size_t index = 0; index < number_of_jitters; ++index) { - inter_arrival_jitters_[index] = ByteReader::ReadBigEndian( - &packet.payload()[index * kJitterSizeBytes]); - } - - return true; -} - -bool ExtendedJitterReport::SetJitterValues(std::vector values) { - if (values.size() > kMaxNumberOfJitterValues) { - RTC_LOG(LS_WARNING) << "Too many inter-arrival jitter items."; - return false; - } - inter_arrival_jitters_ = std::move(values); - return true; -} - -size_t ExtendedJitterReport::BlockLength() const { - return kHeaderLength + kJitterSizeBytes * inter_arrival_jitters_.size(); -} - -bool ExtendedJitterReport::Create(uint8_t* packet, - size_t* index, - size_t max_length, - PacketReadyCallback callback) const { - while (*index + BlockLength() > max_length) { - if (!OnBufferFull(packet, index, callback)) - return false; - } - const size_t index_end = *index + BlockLength(); - size_t length = inter_arrival_jitters_.size(); - CreateHeader(length, kPacketType, length, packet, index); - - for (uint32_t jitter : inter_arrival_jitters_) { - ByteWriter::WriteBigEndian(packet + *index, jitter); - *index += kJitterSizeBytes; - } - // Sanity check. - RTC_DCHECK_EQ(index_end, *index); - return true; -} - -} // namespace rtcp -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report.h deleted file mode 100644 index c28b9d9dbd..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report.h +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_EXTENDED_JITTER_REPORT_H_ -#define MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_EXTENDED_JITTER_REPORT_H_ - -#include - -#include "modules/rtp_rtcp/source/rtcp_packet.h" - -namespace webrtc { -namespace rtcp { -class CommonHeader; - -class ExtendedJitterReport : public RtcpPacket { - public: - static constexpr uint8_t kPacketType = 195; - static constexpr size_t kMaxNumberOfJitterValues = 0x1f; - - ExtendedJitterReport(); - ~ExtendedJitterReport() override; - - // Parse assumes header is already parsed and validated. - bool Parse(const CommonHeader& packet); - - bool SetJitterValues(std::vector jitter_values); - - const std::vector& jitter_values() { - return inter_arrival_jitters_; - } - - size_t BlockLength() const override; - - bool Create(uint8_t* packet, - size_t* index, - size_t max_length, - PacketReadyCallback callback) const override; - - private: - static constexpr size_t kJitterSizeBytes = 4; - - std::vector inter_arrival_jitters_; -}; - -} // namespace rtcp -} // namespace webrtc -#endif // MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_EXTENDED_JITTER_REPORT_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/loss_notification.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/loss_notification.h index b23008c528..0f70cf75c3 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/loss_notification.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/loss_notification.h @@ -42,8 +42,8 @@ class LossNotification : public Psfb { // Set all of the values transmitted by the loss notification message. // If the values may not be represented by a loss notification message, // false is returned, and no change is made to the object; this happens - // when `last_recieved` is ahead of `last_decoded` by more than 0x7fff. - // This is because `last_recieved` is represented on the wire as a delta, + // when `last_received` is ahead of `last_decoded` by more than 0x7fff. + // This is because `last_received` is represented on the wire as a delta, // and only 15 bits are available for that delta. ABSL_MUST_USE_RESULT bool Set(uint16_t last_decoded, diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/rrtr.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/rrtr.h index 8eb4ce62ad..827bd74399 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/rrtr.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/rrtr.h @@ -46,6 +46,14 @@ class Rrtr { NtpTime ntp_; }; +inline bool operator==(const Rrtr& rrtr1, const Rrtr& rrtr2) { + return rrtr1.ntp() == rrtr2.ntp(); +} + +inline bool operator!=(const Rrtr& rrtr1, const Rrtr& rrtr2) { + return !(rrtr1 == rrtr2); +} + } // namespace rtcp } // namespace webrtc #endif // MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_RRTR_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/sdes.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/sdes.cc index b2d0ff923b..f244ec5f37 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/sdes.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/sdes.cc @@ -14,6 +14,7 @@ #include +#include "absl/strings/string_view.h" #include "modules/rtp_rtcp/source/byte_io.h" #include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" #include "rtc_base/checks.h" @@ -145,7 +146,7 @@ bool Sdes::Parse(const CommonHeader& packet) { return true; } -bool Sdes::AddCName(uint32_t ssrc, std::string cname) { +bool Sdes::AddCName(uint32_t ssrc, absl::string_view cname) { RTC_DCHECK_LE(cname.length(), 0xffu); if (chunks_.size() >= kMaxNumberOfChunks) { RTC_LOG(LS_WARNING) << "Max SDES chunks reached."; @@ -153,7 +154,7 @@ bool Sdes::AddCName(uint32_t ssrc, std::string cname) { } Chunk chunk; chunk.ssrc = ssrc; - chunk.cname = std::move(cname); + chunk.cname = std::string(cname); chunks_.push_back(chunk); block_length_ += ChunkSize(chunk); return true; diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/sdes.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/sdes.h index 70e9385b9b..36b63ba29f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/sdes.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/sdes.h @@ -14,6 +14,7 @@ #include #include +#include "absl/strings/string_view.h" #include "modules/rtp_rtcp/source/rtcp_packet.h" namespace webrtc { @@ -35,7 +36,7 @@ class Sdes : public RtcpPacket { // Parse assumes header is already parsed and validated. bool Parse(const CommonHeader& packet); - bool AddCName(uint32_t ssrc, std::string cname); + bool AddCName(uint32_t ssrc, absl::string_view cname); const std::vector& chunks() const { return chunks_; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.cc index 50102c21a3..bb1578fd8c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.cc @@ -42,9 +42,8 @@ constexpr size_t kMaxSizeBytes = (1 << 16) * 4; // * 8 bytes FeedbackPacket header. // * 2 bytes for one chunk. constexpr size_t kMinPayloadSizeBytes = 8 + 8 + 2; -constexpr int kBaseScaleFactor = - TransportFeedback::kDeltaScaleFactor * (1 << 8); -constexpr int64_t kTimeWrapPeriodUs = (1ll << 24) * kBaseScaleFactor; +constexpr TimeDelta kBaseTimeTick = TransportFeedback::kDeltaTick * (1 << 8); +constexpr TimeDelta kTimeWrapPeriod = kBaseTimeTick * (1 << 24); // Message format // @@ -117,6 +116,15 @@ void TransportFeedback::LastChunk::Add(DeltaSize delta_size) { has_large_delta_ = has_large_delta_ || delta_size == kLarge; } +void TransportFeedback::LastChunk::AddMissingPackets(size_t num_missing) { + RTC_DCHECK_EQ(size_, 0); + RTC_DCHECK(all_same_); + RTC_DCHECK(!has_large_delta_); + RTC_DCHECK_LT(num_missing, kMaxRunLengthCapacity); + absl::c_fill(delta_sizes_, DeltaSize(0)); + size_ = num_missing; +} + uint16_t TransportFeedback::LastChunk::Emit() { RTC_DCHECK(!CanAdd(0) || !CanAdd(1) || !CanAdd(2)); if (all_same_) { @@ -161,7 +169,8 @@ void TransportFeedback::LastChunk::AppendTo( if (all_same_) { deltas->insert(deltas->end(), size_, delta_sizes_[0]); } else { - deltas->insert(deltas->end(), delta_sizes_, delta_sizes_ + size_); + deltas->insert(deltas->end(), delta_sizes_.begin(), + delta_sizes_.begin() + size_); } } @@ -273,7 +282,7 @@ TransportFeedback::TransportFeedback(bool include_timestamps, bool include_lost) base_time_ticks_(0), feedback_seq_(0), include_timestamps_(include_timestamps), - last_timestamp_us_(0), + last_timestamp_(Timestamp::Zero()), size_bytes_(kTransportFeedbackHeaderSizeBytes) {} TransportFeedback::TransportFeedback(const TransportFeedback&) = default; @@ -285,7 +294,7 @@ TransportFeedback::TransportFeedback(TransportFeedback&& other) base_time_ticks_(other.base_time_ticks_), feedback_seq_(other.feedback_seq_), include_timestamps_(other.include_timestamps_), - last_timestamp_us_(other.last_timestamp_us_), + last_timestamp_(other.last_timestamp_), received_packets_(std::move(other.received_packets_)), all_packets_(std::move(other.all_packets_)), encoded_chunks_(std::move(other.encoded_chunks_)), @@ -297,12 +306,12 @@ TransportFeedback::TransportFeedback(TransportFeedback&& other) TransportFeedback::~TransportFeedback() {} void TransportFeedback::SetBase(uint16_t base_sequence, - int64_t ref_timestamp_us) { + Timestamp ref_timestamp) { RTC_DCHECK_EQ(num_seq_no_, 0); - RTC_DCHECK_GE(ref_timestamp_us, 0); base_seq_no_ = base_sequence; - base_time_ticks_ = (ref_timestamp_us % kTimeWrapPeriodUs) / kBaseScaleFactor; - last_timestamp_us_ = GetBaseTimeUs(); + base_time_ticks_ = + (ref_timestamp.us() % kTimeWrapPeriod.us()) / kBaseTimeTick.us(); + last_timestamp_ = BaseTime(); } void TransportFeedback::SetFeedbackSequenceNumber(uint8_t feedback_sequence) { @@ -310,19 +319,25 @@ void TransportFeedback::SetFeedbackSequenceNumber(uint8_t feedback_sequence) { } bool TransportFeedback::AddReceivedPacket(uint16_t sequence_number, - int64_t timestamp_us) { + Timestamp timestamp) { // Set delta to zero if timestamps are not included, this will simplify the // encoding process. int16_t delta = 0; if (include_timestamps_) { // Convert to ticks and round. + if (last_timestamp_ > timestamp) { + timestamp += (last_timestamp_ - timestamp).RoundUpTo(kTimeWrapPeriod); + } + RTC_DCHECK_GE(timestamp, last_timestamp_); int64_t delta_full = - (timestamp_us - last_timestamp_us_) % kTimeWrapPeriodUs; - if (delta_full > kTimeWrapPeriodUs / 2) - delta_full -= kTimeWrapPeriodUs; - delta_full += - delta_full < 0 ? -(kDeltaScaleFactor / 2) : kDeltaScaleFactor / 2; - delta_full /= kDeltaScaleFactor; + (timestamp - last_timestamp_).us() % kTimeWrapPeriod.us(); + if (delta_full > kTimeWrapPeriod.us() / 2) { + delta_full -= kTimeWrapPeriod.us(); + delta_full -= kDeltaTick.us() / 2; + } else { + delta_full += kDeltaTick.us() / 2; + } + delta_full /= kDeltaTick.us(); delta = static_cast(delta_full); // If larger than 16bit signed, we can't represent it - need new fb packet. @@ -337,11 +352,13 @@ bool TransportFeedback::AddReceivedPacket(uint16_t sequence_number, uint16_t last_seq_no = next_seq_no - 1; if (!IsNewerSequenceNumber(sequence_number, last_seq_no)) return false; - for (; next_seq_no != sequence_number; ++next_seq_no) { - if (!AddDeltaSize(0)) - return false; - if (include_lost_) + uint16_t num_missing_packets = sequence_number - next_seq_no; + if (!AddMissingPackets(num_missing_packets)) + return false; + if (include_lost_) { + for (; next_seq_no != sequence_number; ++next_seq_no) { all_packets_.emplace_back(next_seq_no); + } } } @@ -352,7 +369,7 @@ bool TransportFeedback::AddReceivedPacket(uint16_t sequence_number, received_packets_.emplace_back(sequence_number, delta); if (include_lost_) all_packets_.emplace_back(sequence_number, delta); - last_timestamp_us_ += delta * kDeltaScaleFactor; + last_timestamp_ += delta * kDeltaTick; if (include_timestamps_) { size_bytes_ += delta_size; } @@ -374,30 +391,25 @@ uint16_t TransportFeedback::GetBaseSequence() const { return base_seq_no_; } -int64_t TransportFeedback::GetBaseTimeUs() const { - return static_cast(base_time_ticks_) * kBaseScaleFactor; -} - -TimeDelta TransportFeedback::GetBaseTime() const { - return TimeDelta::Micros(GetBaseTimeUs()); +Timestamp TransportFeedback::BaseTime() const { + // Add an extra kTimeWrapPeriod to allow add received packets arrived earlier + // than the first added packet (and thus allow to record negative deltas) + // even when base_time_ticks_ == 0. + return Timestamp::Zero() + kTimeWrapPeriod + + int64_t{base_time_ticks_} * kBaseTimeTick; } -int64_t TransportFeedback::GetBaseDeltaUs(int64_t prev_timestamp_us) const { - int64_t delta = GetBaseTimeUs() - prev_timestamp_us; - - // Detect and compensate for wrap-arounds in base time. - if (std::abs(delta - kTimeWrapPeriodUs) < std::abs(delta)) { - delta -= kTimeWrapPeriodUs; // Wrap backwards. - } else if (std::abs(delta + kTimeWrapPeriodUs) < std::abs(delta)) { - delta += kTimeWrapPeriodUs; // Wrap forwards. +TimeDelta TransportFeedback::GetBaseDelta(Timestamp prev_timestamp) const { + TimeDelta delta = BaseTime() - prev_timestamp; + // Compensate for wrap around. + if ((delta - kTimeWrapPeriod).Abs() < delta.Abs()) { + delta -= kTimeWrapPeriod; // Wrap backwards. + } else if ((delta + kTimeWrapPeriod).Abs() < delta.Abs()) { + delta += kTimeWrapPeriod; // Wrap forwards. } return delta; } -TimeDelta TransportFeedback::GetBaseDelta(TimeDelta prev_timestamp) const { - return TimeDelta::Micros(GetBaseDeltaUs(prev_timestamp.us())); -} - // De-serialize packet. bool TransportFeedback::Parse(const CommonHeader& packet) { RTC_DCHECK_EQ(packet.type(), kPacketType); @@ -417,7 +429,7 @@ bool TransportFeedback::Parse(const CommonHeader& packet) { base_seq_no_ = ByteReader::ReadBigEndian(&payload[8]); uint16_t status_count = ByteReader::ReadBigEndian(&payload[10]); - base_time_ticks_ = ByteReader::ReadBigEndian(&payload[12]); + base_time_ticks_ = ByteReader::ReadBigEndian(&payload[12]); feedback_seq_ = payload[15]; Clear(); size_t index = 16; @@ -465,7 +477,7 @@ bool TransportFeedback::Parse(const CommonHeader& packet) { received_packets_.emplace_back(seq_no, delta); if (include_lost_) all_packets_.emplace_back(seq_no, delta); - last_timestamp_us_ += delta * kDeltaScaleFactor; + last_timestamp_ += delta * kDeltaTick; index += delta_size; break; } @@ -474,7 +486,7 @@ bool TransportFeedback::Parse(const CommonHeader& packet) { received_packets_.emplace_back(seq_no, delta); if (include_lost_) all_packets_.emplace_back(seq_no, delta); - last_timestamp_us_ += delta * kDeltaScaleFactor; + last_timestamp_ += delta * kDeltaTick; index += delta_size; break; } @@ -544,7 +556,7 @@ bool TransportFeedback::IsConsistent() const { << num_seq_no_; return false; } - int64_t timestamp_us = GetBaseTimeUs(); + Timestamp timestamp = BaseTime(); auto packet_it = received_packets_.begin(); uint16_t seq_no = base_seq_no_; for (DeltaSize delta_size : delta_sizes) { @@ -566,7 +578,7 @@ bool TransportFeedback::IsConsistent() const { << " doesn't fit into one byte"; return false; } - timestamp_us += packet_it->delta_us(); + timestamp += packet_it->delta(); ++packet_it; } if (include_timestamps_) { @@ -579,9 +591,10 @@ bool TransportFeedback::IsConsistent() const { << packet_it->sequence_number(); return false; } - if (timestamp_us != last_timestamp_us_) { - RTC_LOG(LS_ERROR) << "Last timestamp mismatch. Calculated: " << timestamp_us - << ". Saved: " << last_timestamp_us_; + if (timestamp != last_timestamp_) { + RTC_LOG(LS_ERROR) << "Last timestamp mismatch. Calculated: " + << ToLogString(timestamp) + << ". Saved: " << ToLogString(last_timestamp_); return false; } if (size_bytes_ != packet_size) { @@ -627,7 +640,7 @@ bool TransportFeedback::Create(uint8_t* packet, ByteWriter::WriteBigEndian(&packet[*position], num_seq_no_); *position += 2; - ByteWriter::WriteBigEndian(&packet[*position], base_time_ticks_); + ByteWriter::WriteBigEndian(&packet[*position], base_time_ticks_); *position += 3; packet[(*position)++] = feedback_seq_; @@ -666,7 +679,7 @@ bool TransportFeedback::Create(uint8_t* packet, void TransportFeedback::Clear() { num_seq_no_ = 0; - last_timestamp_us_ = GetBaseTimeUs(); + last_timestamp_ = BaseTime(); received_packets_.clear(); all_packets_.clear(); encoded_chunks_.clear(); @@ -697,5 +710,38 @@ bool TransportFeedback::AddDeltaSize(DeltaSize delta_size) { return true; } +bool TransportFeedback::AddMissingPackets(size_t num_missing_packets) { + size_t new_num_seq_no = num_seq_no_ + num_missing_packets; + if (new_num_seq_no > kMaxReportedPackets) { + return false; + } + + if (!last_chunk_.Empty()) { + while (num_missing_packets > 0 && last_chunk_.CanAdd(0)) { + last_chunk_.Add(0); + --num_missing_packets; + } + if (num_missing_packets == 0) { + num_seq_no_ = new_num_seq_no; + return true; + } + encoded_chunks_.push_back(last_chunk_.Emit()); + } + RTC_DCHECK(last_chunk_.Empty()); + size_t full_chunks = num_missing_packets / LastChunk::kMaxRunLengthCapacity; + size_t partial_chunk = num_missing_packets % LastChunk::kMaxRunLengthCapacity; + size_t num_chunks = full_chunks + (partial_chunk > 0 ? 1 : 0); + if (size_bytes_ + kChunkSizeBytes * num_chunks > kMaxSizeBytes) { + num_seq_no_ = (new_num_seq_no - num_missing_packets); + return false; + } + size_bytes_ += kChunkSizeBytes * num_chunks; + // T = 0, S = 0, run length = kMaxRunLengthCapacity, see EncodeRunLength(). + encoded_chunks_.insert(encoded_chunks_.end(), full_chunks, + LastChunk::kMaxRunLengthCapacity); + last_chunk_.AddMissingPackets(partial_chunk); + num_seq_no_ = new_num_seq_no; + return true; +} } // namespace rtcp } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h index e30d338154..c580632337 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h @@ -11,10 +11,13 @@ #ifndef MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_TRANSPORT_FEEDBACK_H_ #define MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_TRANSPORT_FEEDBACK_H_ +#include #include #include +#include "absl/base/attributes.h" #include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "modules/rtp_rtcp/source/rtcp_packet/rtpfb.h" namespace webrtc { @@ -36,8 +39,7 @@ class TransportFeedback : public Rtpfb { uint16_t sequence_number() const { return sequence_number_; } int16_t delta_ticks() const { return delta_ticks_; } - int32_t delta_us() const { return delta_ticks_ * kDeltaScaleFactor; } - TimeDelta delta() const { return TimeDelta::Micros(delta_us()); } + TimeDelta delta() const { return delta_ticks_ * kDeltaTick; } bool received() const { return received_; } private: @@ -48,7 +50,7 @@ class TransportFeedback : public Rtpfb { // TODO(sprang): IANA reg? static constexpr uint8_t kFeedbackMessageType = 15; // Convert to multiples of 0.25ms. - static constexpr int kDeltaScaleFactor = 250; + static constexpr TimeDelta kDeltaTick = TimeDelta::Micros(250); // Maximum number of packets (including missing) TransportFeedback can report. static constexpr size_t kMaxReportedPackets = 0xffff; @@ -63,11 +65,12 @@ class TransportFeedback : public Rtpfb { ~TransportFeedback() override; - void SetBase(uint16_t base_sequence, // Seq# of first packet in this msg. - int64_t ref_timestamp_us); // Reference timestamp for this msg. + void SetBase(uint16_t base_sequence, // Seq# of first packet in this msg. + Timestamp ref_timestamp); // Reference timestamp for this msg. + void SetFeedbackSequenceNumber(uint8_t feedback_sequence); // NOTE: This method requires increasing sequence numbers (excepting wraps). - bool AddReceivedPacket(uint16_t sequence_number, int64_t timestamp_us); + bool AddReceivedPacket(uint16_t sequence_number, Timestamp timestamp); const std::vector& GetReceivedPackets() const; const std::vector& GetAllPackets() const; @@ -76,13 +79,11 @@ class TransportFeedback : public Rtpfb { // Returns number of packets (including missing) this feedback describes. size_t GetPacketStatusCount() const { return num_seq_no_; } - // Get the reference time in microseconds, including any precision loss. - int64_t GetBaseTimeUs() const; - TimeDelta GetBaseTime() const; + // Get the reference time including any precision loss. + Timestamp BaseTime() const; - // Get the unwrapped delta between current base time and `prev_timestamp_us`. - int64_t GetBaseDeltaUs(int64_t prev_timestamp_us) const; - TimeDelta GetBaseDelta(TimeDelta prev_timestamp) const; + // Get the unwrapped delta between current base time and `prev_timestamp`. + TimeDelta GetBaseDelta(Timestamp prev_timestamp) const; // Does the feedback packet contain timestamp information? bool IncludeTimestamps() const { return include_timestamps_; } @@ -110,6 +111,7 @@ class TransportFeedback : public Rtpfb { class LastChunk { public: using DeltaSize = TransportFeedback::DeltaSize; + static constexpr size_t kMaxRunLengthCapacity = 0x1fff; LastChunk(); @@ -120,6 +122,8 @@ class TransportFeedback : public Rtpfb { bool CanAdd(DeltaSize delta_size) const; // Add `delta_size`, assumes `CanAdd(delta_size)`, void Add(DeltaSize delta_size); + // Equivalent to calling Add(0) `num_missing` times. Assumes `Empty()`. + void AddMissingPackets(size_t num_missing); // Encode chunk as large as possible removing encoded delta sizes. // Assume CanAdd() == false for some valid delta_size. @@ -133,7 +137,6 @@ class TransportFeedback : public Rtpfb { void AppendTo(std::vector* deltas) const; private: - static constexpr size_t kMaxRunLengthCapacity = 0x1fff; static constexpr size_t kMaxOneBitCapacity = 14; static constexpr size_t kMaxTwoBitCapacity = 7; static constexpr size_t kMaxVectorCapacity = kMaxOneBitCapacity; @@ -148,7 +151,7 @@ class TransportFeedback : public Rtpfb { uint16_t EncodeRunLength() const; void DecodeRunLength(uint16_t chunk, size_t max_size); - DeltaSize delta_sizes_[kMaxVectorCapacity]; + std::array delta_sizes_; size_t size_; bool all_same_; bool has_large_delta_; @@ -158,15 +161,17 @@ class TransportFeedback : public Rtpfb { void Clear(); bool AddDeltaSize(DeltaSize delta_size); + // Adds `num_missing_packets` deltas of size 0. + bool AddMissingPackets(size_t num_missing_packets); const bool include_lost_; uint16_t base_seq_no_; uint16_t num_seq_no_; - int32_t base_time_ticks_; + uint32_t base_time_ticks_; uint8_t feedback_seq_; bool include_timestamps_; - int64_t last_timestamp_us_; + Timestamp last_timestamp_; std::vector received_packets_; std::vector all_packets_; // All but last encoded packet chunks. diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc index 47843be810..0a24481762 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc @@ -143,7 +143,6 @@ RTCPReceiver::RTCPReceiver(const RtpRtcpInterface::Configuration& config, : clock_(config.clock), receiver_only_(config.receiver_only), rtp_rtcp_(owner), - main_ssrc_(config.local_media_ssrc), registered_ssrcs_(false, config), rtcp_bandwidth_observer_(config.bandwidth_callback), rtcp_intra_frame_observer_(config.intra_frame_callback), @@ -177,7 +176,6 @@ RTCPReceiver::RTCPReceiver(const RtpRtcpInterface::Configuration& config, : clock_(config.clock), receiver_only_(config.receiver_only), rtp_rtcp_(owner), - main_ssrc_(config.local_media_ssrc), registered_ssrcs_(true, config), rtcp_bandwidth_observer_(config.bandwidth_callback), rtcp_intra_frame_observer_(config.intra_frame_callback), @@ -468,9 +466,6 @@ bool RTCPReceiver::ParseCompoundPacket(rtc::ArrayView packet, break; } - if (packet_type_counter_.first_packet_time_ms == -1) - packet_type_counter_.first_packet_time_ms = clock_->TimeInMilliseconds(); - switch (rtcp_block.type()) { case rtcp::SenderReport::kPacketType: HandleSenderReport(rtcp_block, packet_information); @@ -552,7 +547,7 @@ bool RTCPReceiver::ParseCompoundPacket(rtc::ArrayView packet, if (packet_type_counter_observer_) { packet_type_counter_observer_->RtcpPacketTypesCounterUpdated( - main_ssrc_, packet_type_counter_); + local_media_ssrc(), packet_type_counter_); } if (num_skipped_packets_ > 0) { @@ -662,21 +657,18 @@ void RTCPReceiver::HandleReportBlock(const ReportBlock& report_block, rtcp_report_block.delay_since_last_sender_report = report_block.delay_since_last_sr(); rtcp_report_block.last_sender_report_timestamp = report_block.last_sr(); - report_block_data->SetReportBlock(rtcp_report_block, rtc::TimeUTCMicros()); + // Number of seconds since 1900 January 1 00:00 GMT (see + // https://tools.ietf.org/html/rfc868). + report_block_data->SetReportBlock( + rtcp_report_block, + (clock_->CurrentNtpInMilliseconds() - rtc::kNtpJan1970Millisecs) * + rtc::kNumMicrosecsPerMillisec); - int64_t rtt_ms = 0; uint32_t send_time_ntp = report_block.last_sr(); // RFC3550, section 6.4.1, LSR field discription states: // If no SR has been received yet, the field is set to zero. // Receiver rtp_rtcp module is not expected to calculate rtt using // Sender Reports even if it accidentally can. - - // TODO(nisse): Use this way to determine the RTT only when `receiver_only_` - // is false. However, that currently breaks the tests of the - // googCaptureStartNtpTimeMs stat for audio receive streams. To fix, either - // delete all dependencies on RTT measurements for audio receive streams, or - // ensure that audio receive streams that need RTT and stats that depend on it - // are configured with an associated audio send stream. if (send_time_ntp != 0) { uint32_t delay_ntp = report_block.delay_since_last_sr(); // Local NTP time. @@ -686,13 +678,13 @@ void RTCPReceiver::HandleReportBlock(const ReportBlock& report_block, // RTT in 1/(2^16) seconds. uint32_t rtt_ntp = receive_time_ntp - delay_ntp - send_time_ntp; // Convert to 1/1000 seconds (milliseconds). - rtt_ms = CompactNtpRttToMs(rtt_ntp); - report_block_data->AddRoundTripTimeSample(rtt_ms); - if (report_block.source_ssrc() == main_ssrc_) { - rtts_[remote_ssrc].AddRtt(TimeDelta::Millis(rtt_ms)); + TimeDelta rtt = CompactNtpRttToTimeDelta(rtt_ntp); + report_block_data->AddRoundTripTimeSample(rtt.ms()); + if (report_block.source_ssrc() == local_media_ssrc()) { + rtts_[remote_ssrc].AddRtt(rtt); } - packet_information->rtt_ms = rtt_ms; + packet_information->rtt_ms = rtt.ms(); } packet_information->report_blocks.push_back( @@ -780,7 +772,7 @@ std::vector RTCPReceiver::BoundingSet(bool* tmmbr_owner) { if (!tmmbr_info) return std::vector(); - *tmmbr_owner = TMMBRHelp::IsOwner(tmmbr_info->tmmbn, main_ssrc_); + *tmmbr_owner = TMMBRHelp::IsOwner(tmmbr_info->tmmbn, local_media_ssrc()); return tmmbr_info->tmmbn; } @@ -807,7 +799,7 @@ void RTCPReceiver::HandleNack(const CommonHeader& rtcp_block, return; } - if (receiver_only_ || main_ssrc_ != nack.media_ssrc()) // Not to us. + if (receiver_only_ || local_media_ssrc() != nack.media_ssrc()) // Not to us. return; packet_information->nack_sequence_numbers.insert( @@ -936,9 +928,10 @@ void RTCPReceiver::HandleXrDlrrReportBlock(uint32_t sender_ssrc, uint32_t now_ntp = CompactNtp(clock_->CurrentNtpTime()); uint32_t rtt_ntp = now_ntp - delay_ntp - send_time_ntp; - xr_rr_rtt_ms_ = CompactNtpRttToMs(rtt_ntp); + TimeDelta rtt = CompactNtpRttToTimeDelta(rtt_ntp); + xr_rr_rtt_ms_ = rtt.ms(); - non_sender_rtts_[sender_ssrc].Update(TimeDelta::Millis(xr_rr_rtt_ms_)); + non_sender_rtts_[sender_ssrc].Update(rtt); } void RTCPReceiver::HandleXrTargetBitrate( @@ -973,7 +966,7 @@ void RTCPReceiver::HandlePli(const CommonHeader& rtcp_block, return; } - if (main_ssrc_ == pli.media_ssrc()) { + if (local_media_ssrc() == pli.media_ssrc()) { ++packet_type_counter_.pli_packets; // Received a signal that we need to send a new key frame. packet_information->packet_type_flags |= kRtcpPli; @@ -996,7 +989,7 @@ void RTCPReceiver::HandleTmmbr(const CommonHeader& rtcp_block, } for (const rtcp::TmmbItem& request : tmmbr.requests()) { - if (main_ssrc_ != request.ssrc() || request.bitrate_bps() == 0) + if (local_media_ssrc() != request.ssrc() || request.bitrate_bps() == 0) continue; TmmbrInformation* tmmbr_info = FindOrCreateTmmbrInfo(tmmbr.sender_ssrc()); @@ -1078,7 +1071,7 @@ void RTCPReceiver::HandleFir(const CommonHeader& rtcp_block, const int64_t now_ms = clock_->TimeInMilliseconds(); for (const rtcp::Fir::Request& fir_request : fir.requests()) { // Is it our sender that is requested to generate a new keyframe. - if (main_ssrc_ != fir_request.ssrc) + if (local_media_ssrc() != fir_request.ssrc) continue; ++packet_type_counter_.fir_packets; @@ -1170,7 +1163,8 @@ void RTCPReceiver::TriggerCallbacksFromRtcpPacket( RTC_LOG(LS_VERBOSE) << "Incoming FIR from SSRC " << packet_information.remote_ssrc; } - rtcp_intra_frame_observer_->OnReceivedIntraFrameRequest(main_ssrc_); + rtcp_intra_frame_observer_->OnReceivedIntraFrameRequest( + local_media_ssrc()); } } if (rtcp_loss_notification_observer_ && @@ -1178,7 +1172,7 @@ void RTCPReceiver::TriggerCallbacksFromRtcpPacket( rtcp::LossNotification* loss_notification = packet_information.loss_notification.get(); RTC_DCHECK(loss_notification); - if (loss_notification->media_ssrc() == main_ssrc_) { + if (loss_notification->media_ssrc() == local_media_ssrc()) { rtcp_loss_notification_observer_->OnReceivedLossNotification( loss_notification->media_ssrc(), loss_notification->last_decoded(), loss_notification->last_received(), @@ -1210,7 +1204,7 @@ void RTCPReceiver::TriggerCallbacksFromRtcpPacket( (packet_information.packet_type_flags & kRtcpTransportFeedback)) { uint32_t media_source_ssrc = packet_information.transport_feedback->media_ssrc(); - if (media_source_ssrc == main_ssrc_ || + if (media_source_ssrc == local_media_ssrc() || registered_ssrcs_.contains(media_source_ssrc)) { transport_feedback_observer_->OnTransportFeedback( *packet_information.transport_feedback); diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h index f45b783701..cdf4cbadf8 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h @@ -376,7 +376,6 @@ class RTCPReceiver final { Clock* const clock_; const bool receiver_only_; ModuleRtpRtcp* const rtp_rtcp_; - const uint32_t main_ssrc_; // The set of registered local SSRCs. RegisteredSsrcs registered_ssrcs_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc index a07e5aa641..7983371097 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc @@ -16,6 +16,7 @@ #include #include +#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/rtc_event_log/rtc_event_log.h" #include "api/rtp_headers.h" @@ -354,13 +355,10 @@ void RTCPSender::SetRemoteSSRC(uint32_t ssrc) { remote_ssrc_ = ssrc; } -int32_t RTCPSender::SetCNAME(const char* c_name) { - if (!c_name) - return -1; - - RTC_DCHECK_LT(strlen(c_name), RTCP_CNAME_SIZE); +int32_t RTCPSender::SetCNAME(absl::string_view c_name) { + RTC_DCHECK_LT(c_name.size(), RTCP_CNAME_SIZE); MutexLock lock(&mutex_rtcp_sender_); - cname_ = c_name; + cname_ = std::string(c_name); return 0; } @@ -484,7 +482,9 @@ void RTCPSender::BuildRR(const RtcpContext& ctx, PacketSender& sender) { rtcp::ReceiverReport report; report.SetSenderSsrc(ssrc_); report.SetReportBlocks(CreateReportBlocks(ctx.feedback_state_)); - sender.AppendPacket(report); + if (method_ == RtcpMode::kCompound || !report.report_blocks().empty()) { + sender.AppendPacket(report); + } } void RTCPSender::BuildPLI(const RtcpContext& ctx, PacketSender& sender) { @@ -713,9 +713,6 @@ absl::optional RTCPSender::ComputeCompoundRTCPPacket( } } - if (packet_type_counter_.first_packet_time_ms == -1) - packet_type_counter_.first_packet_time_ms = clock_->TimeInMilliseconds(); - // We need to send our NTP even if we haven't received any reports. RtcpContext context(feedback_state, nack_size, nack_list, clock_->CurrentTime()); diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_sender.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_sender.h index 00b58b400f..8f51e7947d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_sender.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_sender.h @@ -17,6 +17,7 @@ #include #include +#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/call/transport.h" #include "api/units/time_delta.h" @@ -137,7 +138,8 @@ class RTCPSender final { void SetRemoteSSRC(uint32_t ssrc) RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); - int32_t SetCNAME(const char* cName) RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); + int32_t SetCNAME(absl::string_view cName) + RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); bool TimeToSendRTCPReport(bool sendKeyframeBeforeRTP = false) const RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver.cc index 41fa5e6206..f265bd5825 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver.cc @@ -14,12 +14,12 @@ #include #include +#include "absl/cleanup/cleanup.h" #include "api/units/timestamp.h" #include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" #include "rtc_base/checks.h" #include "rtc_base/event.h" -#include "rtc_base/task_utils/to_queued_task.h" -#include "rtc_base/time_utils.h" +#include "system_wrappers/include/clock.h" namespace webrtc { @@ -34,21 +34,19 @@ RtcpTransceiver::~RtcpTransceiver() { if (!rtcp_transceiver_) return; auto rtcp_transceiver = std::move(rtcp_transceiver_); - task_queue_->PostTask( - ToQueuedTask([rtcp_transceiver = std::move(rtcp_transceiver)] { - rtcp_transceiver->StopPeriodicTask(); - })); + task_queue_->PostTask([rtcp_transceiver = std::move(rtcp_transceiver)] { + rtcp_transceiver->StopPeriodicTask(); + }); RTC_DCHECK(!rtcp_transceiver_); } -void RtcpTransceiver::Stop(std::function on_destroyed) { +void RtcpTransceiver::Stop(absl::AnyInvocable on_destroyed) { RTC_DCHECK(rtcp_transceiver_); auto rtcp_transceiver = std::move(rtcp_transceiver_); - task_queue_->PostTask(ToQueuedTask( - [rtcp_transceiver = std::move(rtcp_transceiver)] { - rtcp_transceiver->StopPeriodicTask(); - }, - std::move(on_destroyed))); + absl::Cleanup cleanup = std::move(on_destroyed); + task_queue_->PostTask( + [rtcp_transceiver = std::move(rtcp_transceiver), + cleanup = std::move(cleanup)] { rtcp_transceiver->StopPeriodicTask(); }); RTC_DCHECK(!rtcp_transceiver_); } @@ -57,28 +55,28 @@ void RtcpTransceiver::AddMediaReceiverRtcpObserver( MediaReceiverRtcpObserver* observer) { RTC_CHECK(rtcp_transceiver_); RtcpTransceiverImpl* ptr = rtcp_transceiver_.get(); - task_queue_->PostTask(ToQueuedTask([ptr, remote_ssrc, observer] { + task_queue_->PostTask([ptr, remote_ssrc, observer] { ptr->AddMediaReceiverRtcpObserver(remote_ssrc, observer); - })); + }); } void RtcpTransceiver::RemoveMediaReceiverRtcpObserver( uint32_t remote_ssrc, MediaReceiverRtcpObserver* observer, - std::function on_removed) { + absl::AnyInvocable on_removed) { RTC_CHECK(rtcp_transceiver_); RtcpTransceiverImpl* ptr = rtcp_transceiver_.get(); - auto remove = [ptr, remote_ssrc, observer] { - ptr->RemoveMediaReceiverRtcpObserver(remote_ssrc, observer); - }; - task_queue_->PostTask(ToQueuedTask(std::move(remove), std::move(on_removed))); + absl::Cleanup cleanup = std::move(on_removed); + task_queue_->PostTask( + [ptr, remote_ssrc, observer, cleanup = std::move(cleanup)] { + ptr->RemoveMediaReceiverRtcpObserver(remote_ssrc, observer); + }); } void RtcpTransceiver::SetReadyToSend(bool ready) { RTC_CHECK(rtcp_transceiver_); RtcpTransceiverImpl* ptr = rtcp_transceiver_.get(); - task_queue_->PostTask( - ToQueuedTask([ptr, ready] { ptr->SetReadyToSend(ready); })); + task_queue_->PostTask([ptr, ready] { ptr->SetReadyToSend(ready); }); } void RtcpTransceiver::ReceivePacket(rtc::CopyOnWriteBuffer packet) { @@ -86,29 +84,28 @@ void RtcpTransceiver::ReceivePacket(rtc::CopyOnWriteBuffer packet) { RtcpTransceiverImpl* ptr = rtcp_transceiver_.get(); Timestamp now = clock_->CurrentTime(); task_queue_->PostTask( - ToQueuedTask([ptr, packet, now] { ptr->ReceivePacket(packet, now); })); + [ptr, packet, now] { ptr->ReceivePacket(packet, now); }); } void RtcpTransceiver::SendCompoundPacket() { RTC_CHECK(rtcp_transceiver_); RtcpTransceiverImpl* ptr = rtcp_transceiver_.get(); - task_queue_->PostTask(ToQueuedTask([ptr] { ptr->SendCompoundPacket(); })); + task_queue_->PostTask([ptr] { ptr->SendCompoundPacket(); }); } void RtcpTransceiver::SetRemb(int64_t bitrate_bps, std::vector ssrcs) { RTC_CHECK(rtcp_transceiver_); RtcpTransceiverImpl* ptr = rtcp_transceiver_.get(); - task_queue_->PostTask( - ToQueuedTask([ptr, bitrate_bps, ssrcs = std::move(ssrcs)]() mutable { - ptr->SetRemb(bitrate_bps, std::move(ssrcs)); - })); + task_queue_->PostTask([ptr, bitrate_bps, ssrcs = std::move(ssrcs)]() mutable { + ptr->SetRemb(bitrate_bps, std::move(ssrcs)); + }); } void RtcpTransceiver::UnsetRemb() { RTC_CHECK(rtcp_transceiver_); RtcpTransceiverImpl* ptr = rtcp_transceiver_.get(); - task_queue_->PostTask(ToQueuedTask([ptr] { ptr->UnsetRemb(); })); + task_queue_->PostTask([ptr] { ptr->UnsetRemb(); }); } void RtcpTransceiver::SendCombinedRtcpPacket( @@ -116,26 +113,25 @@ void RtcpTransceiver::SendCombinedRtcpPacket( RTC_CHECK(rtcp_transceiver_); RtcpTransceiverImpl* ptr = rtcp_transceiver_.get(); task_queue_->PostTask( - ToQueuedTask([ptr, rtcp_packets = std::move(rtcp_packets)]() mutable { + [ptr, rtcp_packets = std::move(rtcp_packets)]() mutable { ptr->SendCombinedRtcpPacket(std::move(rtcp_packets)); - })); + }); } void RtcpTransceiver::SendNack(uint32_t ssrc, std::vector sequence_numbers) { RTC_CHECK(rtcp_transceiver_); RtcpTransceiverImpl* ptr = rtcp_transceiver_.get(); - task_queue_->PostTask(ToQueuedTask( + task_queue_->PostTask( [ptr, ssrc, sequence_numbers = std::move(sequence_numbers)]() mutable { ptr->SendNack(ssrc, std::move(sequence_numbers)); - })); + }); } void RtcpTransceiver::SendPictureLossIndication(uint32_t ssrc) { RTC_CHECK(rtcp_transceiver_); RtcpTransceiverImpl* ptr = rtcp_transceiver_.get(); - task_queue_->PostTask( - ToQueuedTask([ptr, ssrc] { ptr->SendPictureLossIndication(ssrc); })); + task_queue_->PostTask([ptr, ssrc] { ptr->SendPictureLossIndication(ssrc); }); } void RtcpTransceiver::SendFullIntraRequest(std::vector ssrcs) { @@ -146,10 +142,9 @@ void RtcpTransceiver::SendFullIntraRequest(std::vector ssrcs, bool new_request) { RTC_CHECK(rtcp_transceiver_); RtcpTransceiverImpl* ptr = rtcp_transceiver_.get(); - task_queue_->PostTask( - ToQueuedTask([ptr, ssrcs = std::move(ssrcs), new_request] { - ptr->SendFullIntraRequest(ssrcs, new_request); - })); + task_queue_->PostTask([ptr, ssrcs = std::move(ssrcs), new_request] { + ptr->SendFullIntraRequest(ssrcs, new_request); + }); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver.h index 20fda94a85..22fcc73337 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver.h @@ -11,11 +11,11 @@ #ifndef MODULES_RTP_RTCP_SOURCE_RTCP_TRANSCEIVER_H_ #define MODULES_RTP_RTCP_SOURCE_RTCP_TRANSCEIVER_H_ -#include #include #include #include +#include "absl/functional/any_invocable.h" #include "api/task_queue/task_queue_base.h" #include "modules/rtp_rtcp/source/rtcp_transceiver_config.h" #include "modules/rtp_rtcp/source/rtcp_transceiver_impl.h" @@ -44,7 +44,7 @@ class RtcpTransceiver : public RtcpFeedbackSenderInterface { // Note that interfaces provided in constructor or registered with AddObserver // still might be used by the transceiver on the task queue // until `on_destroyed` runs. - void Stop(std::function on_destroyed); + void Stop(absl::AnyInvocable on_destroyed); // Registers observer to be notified about incoming rtcp packets. // Calls to observer will be done on the `config.task_queue`. @@ -52,9 +52,10 @@ class RtcpTransceiver : public RtcpFeedbackSenderInterface { MediaReceiverRtcpObserver* observer); // Deregisters the observer. Might return before observer is deregistered. // Runs `on_removed` when observer is deregistered. - void RemoveMediaReceiverRtcpObserver(uint32_t remote_ssrc, - MediaReceiverRtcpObserver* observer, - std::function on_removed); + void RemoveMediaReceiverRtcpObserver( + uint32_t remote_ssrc, + MediaReceiverRtcpObserver* observer, + absl::AnyInvocable on_removed); // Enables/disables sending rtcp packets eventually. // Packets may be sent after the SetReadyToSend(false) returns, but no new diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_config.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_config.cc index 02c0fef9f9..7acaa0f600 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_config.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_config.cc @@ -23,13 +23,11 @@ RtcpTransceiverConfig& RtcpTransceiverConfig::operator=( RtcpTransceiverConfig::~RtcpTransceiverConfig() = default; bool RtcpTransceiverConfig::Validate() const { - if (feedback_ssrc == 0) + if (feedback_ssrc == 0) { RTC_LOG(LS_WARNING) << debug_id << "Ssrc 0 may be treated by some implementation as invalid."; - if (cname.empty()) - RTC_LOG(LS_WARNING) << debug_id << "missing cname for ssrc " - << feedback_ssrc; + } if (cname.size() > 255) { RTC_LOG(LS_ERROR) << debug_id << "cname can be maximum 255 characters."; return false; @@ -44,17 +42,21 @@ bool RtcpTransceiverConfig::Validate() const { << " more than " << IP_PACKET_SIZE << " is unsupported."; return false; } + if (clock == nullptr) { + RTC_LOG(LS_ERROR) << debug_id << "clock must be set"; + return false; + } if (!outgoing_transport) { RTC_LOG(LS_ERROR) << debug_id << "outgoing transport must be set"; return false; } - if (initial_report_delay_ms < 0) { - RTC_LOG(LS_ERROR) << debug_id << "delay " << initial_report_delay_ms + if (initial_report_delay < TimeDelta::Zero()) { + RTC_LOG(LS_ERROR) << debug_id << "delay " << initial_report_delay.ms() << "ms before first report shouldn't be negative."; return false; } - if (report_period_ms <= 0) { - RTC_LOG(LS_ERROR) << debug_id << "period " << report_period_ms + if (report_period <= TimeDelta::Zero()) { + RTC_LOG(LS_ERROR) << debug_id << "period " << report_period.ms() << "ms between reports should be positive."; return false; } @@ -67,16 +69,11 @@ bool RtcpTransceiverConfig::Validate() const { RTC_LOG(LS_ERROR) << debug_id << "unsupported rtcp mode"; return false; } - if (non_sender_rtt_measurement && !network_link_observer) + if (non_sender_rtt_measurement && !network_link_observer) { RTC_LOG(LS_WARNING) << debug_id << "Enabled special feature to calculate rtt, but no " "rtt observer is provided."; - // TODO(danilchap): Remove or update the warning when RtcpTransceiver supports - // send-only sessions. - if (receive_statistics == nullptr) - RTC_LOG(LS_WARNING) - << debug_id - << "receive statistic should be set to generate rtcp report blocks."; + } return true; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_config.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_config.h index 73b933d0a8..3122ad5c36 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_config.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_config.h @@ -64,6 +64,48 @@ class MediaReceiverRtcpObserver { const VideoBitrateAllocation& allocation) {} }; +// Handles RTCP related messages for a single RTP stream (i.e. single SSRC) +class RtpStreamRtcpHandler { + public: + virtual ~RtpStreamRtcpHandler() = default; + + // Statistic about sent RTP packets to propagate to RTCP sender report. + class RtpStats { + public: + RtpStats() = default; + RtpStats(const RtpStats&) = default; + RtpStats& operator=(const RtpStats&) = default; + ~RtpStats() = default; + + size_t num_sent_packets() const { return num_sent_packets_; } + size_t num_sent_bytes() const { return num_sent_bytes_; } + Timestamp last_capture_time() const { return last_capture_time_; } + uint32_t last_rtp_timestamp() const { return last_rtp_timestamp_; } + int last_clock_rate() const { return last_clock_rate_; } + + void set_num_sent_packets(size_t v) { num_sent_packets_ = v; } + void set_num_sent_bytes(size_t v) { num_sent_bytes_ = v; } + void set_last_capture_time(Timestamp v) { last_capture_time_ = v; } + void set_last_rtp_timestamp(uint32_t v) { last_rtp_timestamp_ = v; } + void set_last_clock_rate(int v) { last_clock_rate_ = v; } + + private: + size_t num_sent_packets_ = 0; + size_t num_sent_bytes_ = 0; + Timestamp last_capture_time_ = Timestamp::Zero(); + uint32_t last_rtp_timestamp_ = 0; + int last_clock_rate_ = 90'000; + }; + virtual RtpStats SentStats() = 0; + + virtual void OnNack(uint32_t sender_ssrc, + rtc::ArrayView sequence_numbers) {} + virtual void OnFir(uint32_t sender_ssrc) {} + virtual void OnPli(uint32_t sender_ssrc) {} + virtual void OnReportBlock(uint32_t sender_ssrc, + const rtcp::ReportBlock& report_block) {} +}; + struct RtcpTransceiverConfig { RtcpTransceiverConfig(); RtcpTransceiverConfig(const RtcpTransceiverConfig&); @@ -114,10 +156,10 @@ struct RtcpTransceiverConfig { // Initial state if `outgoing_transport` ready to accept packets. bool initial_ready_to_send = true; // Delay before 1st periodic compound packet. - int initial_report_delay_ms = 500; + TimeDelta initial_report_delay = TimeDelta::Millis(500); // Period between periodic compound packets. - int report_period_ms = 1000; + TimeDelta report_period = TimeDelta::Seconds(1); // // Flags for features and experiments. @@ -127,6 +169,15 @@ struct RtcpTransceiverConfig { // https://tools.ietf.org/html/rfc3611#section-4.4 and #section-4.5 bool non_sender_rtt_measurement = false; + // Reply to incoming RRTR messages so that remote endpoint may estimate RTT as + // non-sender as described in https://tools.ietf.org/html/rfc3611#section-4.4 + // and #section-4.5 + bool reply_to_non_sender_rtt_measurement = true; + + // Reply to incoming RRTR messages multiple times, one per sender SSRC, to + // support clients that calculate and process RTT per sender SSRC. + bool reply_to_non_sender_rtt_mesaurments_on_all_ssrcs = true; + // Allows a REMB message to be sent immediately when SetRemb is called without // having to wait for the next compount message to be sent. bool send_remb_on_change = false; diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_impl.cc index c7e11981ed..bb4f96b970 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_impl.cc @@ -10,6 +10,7 @@ #include "modules/rtp_rtcp/source/rtcp_transceiver_impl.h" +#include #include #include "absl/algorithm/container.h" @@ -31,10 +32,11 @@ #include "modules/rtp_rtcp/source/rtcp_packet/sender_report.h" #include "modules/rtp_rtcp/source/time_util.h" #include "rtc_base/checks.h" +#include "rtc_base/containers/flat_map.h" #include "rtc_base/logging.h" +#include "rtc_base/numerics/divide_round.h" #include "rtc_base/task_utils/repeating_task.h" -#include "rtc_base/task_utils/to_queued_task.h" -#include "rtc_base/time_utils.h" +#include "system_wrappers/include/clock.h" namespace webrtc { namespace { @@ -52,6 +54,14 @@ struct RtcpTransceiverImpl::RemoteSenderState { std::vector observers; }; +struct RtcpTransceiverImpl::LocalSenderState { + uint32_t ssrc; + size_t last_num_sent_bytes = 0; + // Sequence number of the last FIR message per sender SSRC. + flat_map last_fir; + RtpStreamRtcpHandler* handler = nullptr; +}; + // Helper to put several RTCP packets into lower layer datagram composing // Compound or Reduced-Size RTCP packet, as defined by RFC 5506 section 2. // TODO(danilchap): When in compound mode and packets are so many that several @@ -92,7 +102,7 @@ RtcpTransceiverImpl::RtcpTransceiverImpl(const RtcpTransceiverConfig& config) : config_(config), ready_to_send_(config.initial_ready_to_send) { RTC_CHECK(config_.Validate()); if (ready_to_send_ && config_.schedule_periodic_compound_packets) { - SchedulePeriodicCompoundPackets(config_.initial_report_delay_ms); + SchedulePeriodicCompoundPackets(config_.initial_report_delay); } } @@ -101,6 +111,11 @@ RtcpTransceiverImpl::~RtcpTransceiverImpl() = default; void RtcpTransceiverImpl::AddMediaReceiverRtcpObserver( uint32_t remote_ssrc, MediaReceiverRtcpObserver* observer) { + if (config_.receive_statistics == nullptr && remote_senders_.empty()) { + RTC_LOG(LS_WARNING) << config_.debug_id + << "receive statistic is not set. RTCP report blocks " + "will not be generated."; + } auto& stored = remote_senders_[remote_ssrc].observers; RTC_DCHECK(!absl::c_linear_search(stored, observer)); stored.push_back(observer); @@ -119,13 +134,39 @@ void RtcpTransceiverImpl::RemoveMediaReceiverRtcpObserver( stored.erase(it); } +bool RtcpTransceiverImpl::AddMediaSender(uint32_t local_ssrc, + RtpStreamRtcpHandler* handler) { + RTC_DCHECK(handler != nullptr); + LocalSenderState state; + state.ssrc = local_ssrc; + state.handler = handler; + local_senders_.push_back(state); + auto it = std::prev(local_senders_.end()); + auto [unused, inserted] = local_senders_by_ssrc_.emplace(local_ssrc, it); + if (!inserted) { + local_senders_.pop_back(); + return false; + } + return true; +} + +bool RtcpTransceiverImpl::RemoveMediaSender(uint32_t local_ssrc) { + auto index_it = local_senders_by_ssrc_.find(local_ssrc); + if (index_it == local_senders_by_ssrc_.end()) { + return false; + } + local_senders_.erase(index_it->second); + local_senders_by_ssrc_.erase(index_it); + return true; +} + void RtcpTransceiverImpl::SetReadyToSend(bool ready) { if (config_.schedule_periodic_compound_packets) { if (ready_to_send_ && !ready) periodic_task_handle_.Stop(); if (!ready_to_send_ && ready) // Restart periodic sending. - SchedulePeriodicCompoundPackets(config_.report_period_ms / 2); + SchedulePeriodicCompoundPackets(config_.report_period / 2); } ready_to_send_ = ready; } @@ -279,6 +320,7 @@ void RtcpTransceiverImpl::HandleSenderReport( remote_senders_[sender_report.sender_ssrc()]; remote_sender.last_received_sender_report = {{now, sender_report.ntp()}}; const auto& received_report_blocks = sender_report.report_blocks(); + CallbackOnReportBlocks(sender_report.sender_ssrc(), received_report_blocks); report_blocks.insert(report_blocks.end(), received_report_blocks.begin(), received_report_blocks.end()); @@ -295,32 +337,116 @@ void RtcpTransceiverImpl::HandleReceiverReport( return; } const auto& received_report_blocks = receiver_report.report_blocks(); + CallbackOnReportBlocks(receiver_report.sender_ssrc(), received_report_blocks); report_blocks.insert(report_blocks.end(), received_report_blocks.begin(), received_report_blocks.end()); } +void RtcpTransceiverImpl::CallbackOnReportBlocks( + uint32_t sender_ssrc, + rtc::ArrayView report_blocks) { + if (local_senders_.empty()) { + return; + } + for (const rtcp::ReportBlock& block : report_blocks) { + auto sender_it = local_senders_by_ssrc_.find(block.source_ssrc()); + if (sender_it != local_senders_by_ssrc_.end()) { + sender_it->second->handler->OnReportBlock(sender_ssrc, block); + } + } +} + void RtcpTransceiverImpl::HandlePayloadSpecificFeedback( const rtcp::CommonHeader& rtcp_packet_header, Timestamp now) { - // Remb is the only payload specific message handled right now. - if (rtcp_packet_header.fmt() != rtcp::Psfb::kAfbMessageType || - config_.network_link_observer == nullptr) { + switch (rtcp_packet_header.fmt()) { + case rtcp::Fir::kFeedbackMessageType: + HandleFir(rtcp_packet_header); + break; + case rtcp::Pli::kFeedbackMessageType: + HandlePli(rtcp_packet_header); + break; + case rtcp::Psfb::kAfbMessageType: + HandleRemb(rtcp_packet_header, now); + break; + } +} + +void RtcpTransceiverImpl::HandleFir( + const rtcp::CommonHeader& rtcp_packet_header) { + rtcp::Fir fir; + if (local_senders_.empty() || !fir.Parse(rtcp_packet_header)) { + return; + } + for (const rtcp::Fir::Request& r : fir.requests()) { + auto it = local_senders_by_ssrc_.find(r.ssrc); + if (it == local_senders_by_ssrc_.end()) { + continue; + } + auto [fir_it, is_new] = + it->second->last_fir.emplace(fir.sender_ssrc(), r.seq_nr); + if (is_new || fir_it->second != r.seq_nr) { + it->second->handler->OnFir(fir.sender_ssrc()); + fir_it->second = r.seq_nr; + } + } +} + +void RtcpTransceiverImpl::HandlePli( + const rtcp::CommonHeader& rtcp_packet_header) { + rtcp::Pli pli; + if (local_senders_.empty() || !pli.Parse(rtcp_packet_header)) { return; } + auto it = local_senders_by_ssrc_.find(pli.media_ssrc()); + if (it != local_senders_by_ssrc_.end()) { + it->second->handler->OnPli(pli.sender_ssrc()); + } +} + +void RtcpTransceiverImpl::HandleRemb( + const rtcp::CommonHeader& rtcp_packet_header, + Timestamp now) { rtcp::Remb remb; - if (remb.Parse(rtcp_packet_header)) { - config_.network_link_observer->OnReceiverEstimatedMaxBitrate( - now, DataRate::BitsPerSec(remb.bitrate_bps())); + if (config_.network_link_observer == nullptr || + !remb.Parse(rtcp_packet_header)) { + return; } + config_.network_link_observer->OnReceiverEstimatedMaxBitrate( + now, DataRate::BitsPerSec(remb.bitrate_bps())); } void RtcpTransceiverImpl::HandleRtpFeedback( const rtcp::CommonHeader& rtcp_packet_header, Timestamp now) { - // Transport feedback is the only message handled right now. - if (rtcp_packet_header.fmt() != - rtcp::TransportFeedback::kFeedbackMessageType || - config_.network_link_observer == nullptr) { + switch (rtcp_packet_header.fmt()) { + case rtcp::Nack::kFeedbackMessageType: + HandleNack(rtcp_packet_header); + break; + case rtcp::TransportFeedback::kFeedbackMessageType: + HandleTransportFeedback(rtcp_packet_header, now); + break; + } +} + +void RtcpTransceiverImpl::HandleNack( + const rtcp::CommonHeader& rtcp_packet_header) { + rtcp::Nack nack; + if (local_senders_.empty() || !nack.Parse(rtcp_packet_header)) { + return; + } + auto it = local_senders_by_ssrc_.find(nack.media_ssrc()); + if (it != local_senders_by_ssrc_.end()) { + it->second->handler->OnNack(nack.sender_ssrc(), nack.packet_ids()); + } +} + +void RtcpTransceiverImpl::HandleTransportFeedback( + const rtcp::CommonHeader& rtcp_packet_header, + Timestamp now) { + RTC_DCHECK_EQ(rtcp_packet_header.fmt(), + rtcp::TransportFeedback::kFeedbackMessageType); + if (config_.network_link_observer == nullptr) { return; } rtcp::TransportFeedback feedback; @@ -336,6 +462,14 @@ void RtcpTransceiverImpl::HandleExtendedReports( if (!extended_reports.Parse(rtcp_packet_header)) return; + if (config_.reply_to_non_sender_rtt_measurement && extended_reports.rrtr()) { + RrtrTimes& rrtr = received_rrtrs_[extended_reports.sender_ssrc()]; + rrtr.received_remote_mid_ntp_time = + CompactNtp(extended_reports.rrtr()->ntp()); + rrtr.local_receive_mid_ntp_time = + CompactNtp(config_.clock->ConvertTimestampToNtpTime(now)); + } + if (extended_reports.dlrr()) HandleDlrr(extended_reports.dlrr(), now); @@ -358,8 +492,8 @@ void RtcpTransceiverImpl::HandleDlrr(const rtcp::Dlrr& dlrr, Timestamp now) { if (rti.ssrc != config_.feedback_ssrc) continue; uint32_t rtt_ntp = receive_time_ntp - rti.delay_since_last_rr - rti.last_rr; - int64_t rtt_ms = CompactNtpRttToMs(rtt_ntp); - config_.network_link_observer->OnRttUpdate(now, TimeDelta::Millis(rtt_ms)); + TimeDelta rtt = CompactNtpRttToTimeDelta(rtt_ntp); + config_.network_link_observer->OnRttUpdate(now, rtt); } } @@ -384,7 +518,7 @@ void RtcpTransceiverImpl::ProcessReportBlocks( uint32_t rtt_ntp = receive_time_ntp - report_block.delay_since_last_sr() - report_block.last_sr(); - rtt_sum += TimeDelta::Millis(CompactNtpRttToMs(rtt_ntp)); + rtt_sum += CompactNtpRttToTimeDelta(rtt_ntp); ++num_rtts; } // For backward compatibility, do not report rtt based on report blocks to the @@ -428,54 +562,223 @@ void RtcpTransceiverImpl::ReschedulePeriodicCompoundPackets() { return; periodic_task_handle_.Stop(); RTC_DCHECK(ready_to_send_); - SchedulePeriodicCompoundPackets(config_.report_period_ms); + SchedulePeriodicCompoundPackets(config_.report_period); } -void RtcpTransceiverImpl::SchedulePeriodicCompoundPackets(int64_t delay_ms) { +void RtcpTransceiverImpl::SchedulePeriodicCompoundPackets(TimeDelta delay) { periodic_task_handle_ = RepeatingTaskHandle::DelayedStart( - config_.task_queue, TimeDelta::Millis(delay_ms), [this] { + config_.task_queue, delay, + [this] { RTC_DCHECK(config_.schedule_periodic_compound_packets); RTC_DCHECK(ready_to_send_); SendPeriodicCompoundPacket(); - return TimeDelta::Millis(config_.report_period_ms); - }); + return config_.report_period; + }, + TaskQueueBase::DelayPrecision::kLow, config_.clock); } -void RtcpTransceiverImpl::CreateCompoundPacket(PacketSender* sender) { - RTC_DCHECK(sender->IsEmpty()); - const uint32_t sender_ssrc = config_.feedback_ssrc; - Timestamp now = config_.clock->CurrentTime(); - rtcp::ReceiverReport receiver_report; - receiver_report.SetSenderSsrc(sender_ssrc); - receiver_report.SetReportBlocks(CreateReportBlocks(now)); - if (config_.rtcp_mode == RtcpMode::kCompound || - !receiver_report.report_blocks().empty()) { - sender->AppendPacket(receiver_report); +std::vector RtcpTransceiverImpl::FillReports( + Timestamp now, + ReservedBytes reserved, + PacketSender& rtcp_sender) { + // Sender/receiver reports should be first in the RTCP packet. + RTC_DCHECK(rtcp_sender.IsEmpty()); + + size_t available_bytes = config_.max_packet_size; + if (reserved.per_packet > available_bytes) { + // Because reserved.per_packet is unsigned, substracting would underflow and + // will not produce desired result. + available_bytes = 0; + } else { + available_bytes -= reserved.per_packet; } - if (!config_.cname.empty() && !sender->IsEmpty()) { - rtcp::Sdes sdes; - bool added = sdes.AddCName(config_.feedback_ssrc, config_.cname); - RTC_DCHECK(added) << "Failed to add cname " << config_.cname - << " to rtcp sdes packet."; - sender->AppendPacket(sdes); + const size_t sender_report_size_bytes = 28 + reserved.per_sender; + const size_t full_sender_report_size_bytes = + sender_report_size_bytes + + rtcp::SenderReport::kMaxNumberOfReportBlocks * rtcp::ReportBlock::kLength; + size_t max_full_sender_reports = + available_bytes / full_sender_report_size_bytes; + size_t max_report_blocks = + max_full_sender_reports * rtcp::SenderReport::kMaxNumberOfReportBlocks; + size_t available_bytes_for_last_sender_report = + available_bytes - max_full_sender_reports * full_sender_report_size_bytes; + if (available_bytes_for_last_sender_report >= sender_report_size_bytes) { + max_report_blocks += + (available_bytes_for_last_sender_report - sender_report_size_bytes) / + rtcp::ReportBlock::kLength; } - if (remb_) { - remb_->SetSenderSsrc(sender_ssrc); - sender->AppendPacket(*remb_); + + std::vector report_blocks = + CreateReportBlocks(now, max_report_blocks); + // Previous calculation of max number of sender report made space for max + // number of report blocks per sender report, but if number of report blocks + // is low, more sender reports may fit in. + size_t max_sender_reports = + (available_bytes - report_blocks.size() * rtcp::ReportBlock::kLength) / + sender_report_size_bytes; + + auto last_handled_sender_it = local_senders_.end(); + auto report_block_it = report_blocks.begin(); + std::vector sender_ssrcs; + for (auto it = local_senders_.begin(); + it != local_senders_.end() && sender_ssrcs.size() < max_sender_reports; + ++it) { + LocalSenderState& rtp_sender = *it; + RtpStreamRtcpHandler::RtpStats stats = rtp_sender.handler->SentStats(); + + if (stats.num_sent_bytes() < rtp_sender.last_num_sent_bytes) { + RTC_LOG(LS_ERROR) << "Inconsistent SR for SSRC " << rtp_sender.ssrc + << ". Number of total sent bytes decreased."; + rtp_sender.last_num_sent_bytes = 0; + } + if (stats.num_sent_bytes() == rtp_sender.last_num_sent_bytes) { + // Skip because no RTP packet was send for this SSRC since last report. + continue; + } + rtp_sender.last_num_sent_bytes = stats.num_sent_bytes(); + + last_handled_sender_it = it; + rtcp::SenderReport sender_report; + sender_report.SetSenderSsrc(rtp_sender.ssrc); + sender_report.SetPacketCount(stats.num_sent_packets()); + sender_report.SetOctetCount(stats.num_sent_bytes()); + sender_report.SetNtp(config_.clock->ConvertTimestampToNtpTime(now)); + RTC_DCHECK_GE(now, stats.last_capture_time()); + sender_report.SetRtpTimestamp( + stats.last_rtp_timestamp() + + ((now - stats.last_capture_time()) * stats.last_clock_rate()) + .seconds()); + if (report_block_it != report_blocks.end()) { + size_t num_blocks = + std::min(rtcp::SenderReport::kMaxNumberOfReportBlocks, + report_blocks.end() - report_block_it); + std::vector sub_blocks(report_block_it, + report_block_it + num_blocks); + sender_report.SetReportBlocks(std::move(sub_blocks)); + report_block_it += num_blocks; + } + rtcp_sender.AppendPacket(sender_report); + sender_ssrcs.push_back(rtp_sender.ssrc); + } + if (last_handled_sender_it != local_senders_.end()) { + // Rotate `local_senders_` so that the 1st unhandled sender become first in + // the list, and thus will be first to generate rtcp sender report for on + // the next call to `FillReports`. + local_senders_.splice(local_senders_.end(), local_senders_, + local_senders_.begin(), + std::next(last_handled_sender_it)); + } + + // Calculcate number of receiver reports to attach remaining report blocks to. + size_t num_receiver_reports = + DivideRoundUp(report_blocks.end() - report_block_it, + rtcp::ReceiverReport::kMaxNumberOfReportBlocks); + + // In compound mode each RTCP packet has to start with a sender or receiver + // report. + if (config_.rtcp_mode == RtcpMode::kCompound && sender_ssrcs.empty() && + num_receiver_reports == 0) { + num_receiver_reports = 1; + } + + uint32_t sender_ssrc = + sender_ssrcs.empty() ? config_.feedback_ssrc : sender_ssrcs.front(); + for (size_t i = 0; i < num_receiver_reports; ++i) { + rtcp::ReceiverReport receiver_report; + receiver_report.SetSenderSsrc(sender_ssrc); + size_t num_blocks = + std::min(rtcp::ReceiverReport::kMaxNumberOfReportBlocks, + report_blocks.end() - report_block_it); + std::vector sub_blocks(report_block_it, + report_block_it + num_blocks); + receiver_report.SetReportBlocks(std::move(sub_blocks)); + report_block_it += num_blocks; + rtcp_sender.AppendPacket(receiver_report); + } + // All report blocks should be attached at this point. + RTC_DCHECK_EQ(report_blocks.end() - report_block_it, 0); + return sender_ssrcs; +} + +void RtcpTransceiverImpl::CreateCompoundPacket(Timestamp now, + size_t reserved_bytes, + PacketSender& sender) { + RTC_DCHECK(sender.IsEmpty()); + ReservedBytes reserved = {.per_packet = reserved_bytes}; + absl::optional sdes; + if (!config_.cname.empty()) { + sdes.emplace(); + bool added = sdes->AddCName(config_.feedback_ssrc, config_.cname); + RTC_DCHECK(added) << "Failed to add CNAME " << config_.cname + << " to RTCP SDES packet."; + reserved.per_packet += sdes->BlockLength(); + } + if (remb_.has_value()) { + reserved.per_packet += remb_->BlockLength(); + } + absl::optional xr_with_dlrr; + if (!received_rrtrs_.empty()) { + RTC_DCHECK(config_.reply_to_non_sender_rtt_measurement); + xr_with_dlrr.emplace(); + uint32_t now_ntp = + CompactNtp(config_.clock->ConvertTimestampToNtpTime(now)); + for (const auto& [ssrc, rrtr_info] : received_rrtrs_) { + rtcp::ReceiveTimeInfo reply; + reply.ssrc = ssrc; + reply.last_rr = rrtr_info.received_remote_mid_ntp_time; + reply.delay_since_last_rr = + now_ntp - rrtr_info.local_receive_mid_ntp_time; + xr_with_dlrr->AddDlrrItem(reply); + } + if (config_.reply_to_non_sender_rtt_mesaurments_on_all_ssrcs) { + reserved.per_sender += xr_with_dlrr->BlockLength(); + } else { + reserved.per_packet += xr_with_dlrr->BlockLength(); + } } - // TODO(bugs.webrtc.org/8239): Do not send rrtr if this packet starts with - // SenderReport instead of ReceiverReport - // when RtcpTransceiver supports rtp senders. if (config_.non_sender_rtt_measurement) { - rtcp::ExtendedReports xr; + // It looks like bytes for ExtendedReport header are reserved twice, but in + // practice the same RtcpTransceiver won't both produce RRTR (i.e. it is a + // receiver-only) and reply to RRTR (i.e. remote participant is a receiver + // only). If that happen, then `reserved_bytes` would be slightly larger + // than it should, which is not an issue. + + // 4 bytes for common RTCP header + 4 bytes for the ExtenedReports header. + reserved.per_packet += (4 + 4 + rtcp::Rrtr::kLength); + } + + std::vector sender_ssrcs = FillReports(now, reserved, sender); + bool has_sender_report = !sender_ssrcs.empty(); + uint32_t sender_ssrc = + has_sender_report ? sender_ssrcs.front() : config_.feedback_ssrc; + if (sdes.has_value() && !sender.IsEmpty()) { + sender.AppendPacket(*sdes); + } + if (remb_.has_value()) { + remb_->SetSenderSsrc(sender_ssrc); + sender.AppendPacket(*remb_); + } + if (!has_sender_report && config_.non_sender_rtt_measurement) { + rtcp::ExtendedReports xr_with_rrtr; + xr_with_rrtr.SetSenderSsrc(config_.feedback_ssrc); rtcp::Rrtr rrtr; rrtr.SetNtp(config_.clock->ConvertTimestampToNtpTime(now)); - xr.SetRrtr(rrtr); - - xr.SetSenderSsrc(sender_ssrc); - sender->AppendPacket(xr); + xr_with_rrtr.SetRrtr(rrtr); + sender.AppendPacket(xr_with_rrtr); + } + if (xr_with_dlrr.has_value()) { + rtc::ArrayView ssrcs(&sender_ssrc, 1); + if (config_.reply_to_non_sender_rtt_mesaurments_on_all_ssrcs && + !sender_ssrcs.empty()) { + ssrcs = sender_ssrcs; + } + RTC_DCHECK(!ssrcs.empty()); + for (uint32_t ssrc : ssrcs) { + xr_with_dlrr->SetSenderSsrc(ssrc); + sender.AppendPacket(*xr_with_dlrr); + } } } @@ -483,8 +786,9 @@ void RtcpTransceiverImpl::SendPeriodicCompoundPacket() { auto send_packet = [this](rtc::ArrayView packet) { config_.outgoing_transport->SendRtcp(packet.data(), packet.size()); }; + Timestamp now = config_.clock->CurrentTime(); PacketSender sender(send_packet, config_.max_packet_size); - CreateCompoundPacket(&sender); + CreateCompoundPacket(now, /*reserved_bytes=*/0, sender); sender.Send(); } @@ -510,8 +814,11 @@ void RtcpTransceiverImpl::SendImmediateFeedback( PacketSender sender(send_packet, config_.max_packet_size); // Compound mode requires every sent rtcp packet to be compound, i.e. start // with a sender or receiver report. - if (config_.rtcp_mode == RtcpMode::kCompound) - CreateCompoundPacket(&sender); + if (config_.rtcp_mode == RtcpMode::kCompound) { + Timestamp now = config_.clock->CurrentTime(); + CreateCompoundPacket(now, /*reserved_bytes=*/rtcp_packet.BlockLength(), + sender); + } sender.AppendPacket(rtcp_packet); sender.Send(); @@ -522,14 +829,12 @@ void RtcpTransceiverImpl::SendImmediateFeedback( } std::vector RtcpTransceiverImpl::CreateReportBlocks( - Timestamp now) { + Timestamp now, + size_t num_max_blocks) { if (!config_.receive_statistics) return {}; - // TODO(danilchap): Support sending more than - // `ReceiverReport::kMaxNumberOfReportBlocks` per compound rtcp packet. std::vector report_blocks = - config_.receive_statistics->RtcpReportBlocks( - rtcp::ReceiverReport::kMaxNumberOfReportBlocks); + config_.receive_statistics->RtcpReportBlocks(num_max_blocks); uint32_t last_sr = 0; uint32_t last_delay = 0; for (rtcp::ReportBlock& report_block : report_blocks) { @@ -541,8 +846,8 @@ std::vector RtcpTransceiverImpl::CreateReportBlocks( const SenderReportTimes& last_sender_report = *it->second.last_received_sender_report; last_sr = CompactNtp(last_sender_report.remote_sent_time); - last_delay = SaturatedUsToCompactNtp( - now.us() - last_sender_report.local_received_time.us()); + last_delay = + SaturatedToCompactNtp(now - last_sender_report.local_received_time); report_block.SetLastSr(last_sr); report_block.SetDelayLastSr(last_delay); } diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_impl.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_impl.h index b03db7d786..8a3333d45c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_impl.h @@ -11,7 +11,7 @@ #ifndef MODULES_RTP_RTCP_SOURCE_RTCP_TRANSCEIVER_IMPL_H_ #define MODULES_RTP_RTCP_SOURCE_RTCP_TRANSCEIVER_IMPL_H_ -#include +#include #include #include #include @@ -48,6 +48,11 @@ class RtcpTransceiverImpl { void RemoveMediaReceiverRtcpObserver(uint32_t remote_ssrc, MediaReceiverRtcpObserver* observer); + // Returns false on failure, e.g. when there is already an handler for the + // `local_ssrc`. + bool AddMediaSender(uint32_t local_ssrc, RtpStreamRtcpHandler* handler); + bool RemoveMediaSender(uint32_t local_ssrc); + void SetReadyToSend(bool ready); void ReceivePacket(rtc::ArrayView packet, Timestamp now); @@ -76,6 +81,14 @@ class RtcpTransceiverImpl { private: class PacketSender; struct RemoteSenderState; + struct LocalSenderState; + struct RrtrTimes { + // Received remote NTP timestamp in compact representation. + uint32_t received_remote_mid_ntp_time; + + // Local NTP time when the report was received in compact representation. + uint32_t local_receive_mid_ntp_time; + }; void HandleReceivedPacket(const rtcp::CommonHeader& rtcp_packet_header, Timestamp now, @@ -87,11 +100,20 @@ class RtcpTransceiverImpl { std::vector& report_blocks); void HandleReceiverReport(const rtcp::CommonHeader& rtcp_packet_header, std::vector& report_blocks); + void CallbackOnReportBlocks( + uint32_t sender_ssrc, + rtc::ArrayView report_blocks); void HandlePayloadSpecificFeedback( const rtcp::CommonHeader& rtcp_packet_header, Timestamp now); void HandleRtpFeedback(const rtcp::CommonHeader& rtcp_packet_header, Timestamp now); + void HandleFir(const rtcp::CommonHeader& rtcp_packet_header); + void HandlePli(const rtcp::CommonHeader& rtcp_packet_header); + void HandleRemb(const rtcp::CommonHeader& rtcp_packet_header, Timestamp now); + void HandleNack(const rtcp::CommonHeader& rtcp_packet_header); + void HandleTransportFeedback(const rtcp::CommonHeader& rtcp_packet_header, + Timestamp now); void HandleExtendedReports(const rtcp::CommonHeader& rtcp_packet_header, Timestamp now); // Extended Reports blocks handlers. @@ -103,15 +125,31 @@ class RtcpTransceiverImpl { rtc::ArrayView report_blocks); void ReschedulePeriodicCompoundPackets(); - void SchedulePeriodicCompoundPackets(int64_t delay_ms); + void SchedulePeriodicCompoundPackets(TimeDelta delay); + // Appends RTCP sender and receiver reports to the `sender`. + // Both sender and receiver reports may have attached report blocks. + // Uses up to `config_.max_packet_size - reserved_bytes.per_packet` + // Returns list of sender ssrc in sender reports. + struct ReservedBytes { + size_t per_packet = 0; + size_t per_sender = 0; + }; + std::vector FillReports(Timestamp now, + ReservedBytes reserved_bytes, + PacketSender& rtcp_sender); + // Creates compound RTCP packet, as defined in // https://tools.ietf.org/html/rfc5506#section-2 - void CreateCompoundPacket(PacketSender* sender); + void CreateCompoundPacket(Timestamp now, + size_t reserved_bytes, + PacketSender& rtcp_sender); + // Sends RTCP packets. void SendPeriodicCompoundPacket(); void SendImmediateFeedback(const rtcp::RtcpPacket& rtcp_packet); - // Generate Report Blocks to be send in Sender or Receiver Report. - std::vector CreateReportBlocks(Timestamp now); + // Generate Report Blocks to be send in Sender or Receiver Reports. + std::vector CreateReportBlocks(Timestamp now, + size_t num_max_blocks); const RtcpTransceiverConfig config_; @@ -120,6 +158,10 @@ class RtcpTransceiverImpl { // TODO(danilchap): Remove entries from remote_senders_ that are no longer // needed. flat_map remote_senders_; + std::list local_senders_; + flat_map::iterator> + local_senders_by_ssrc_; + flat_map received_rrtrs_; RepeatingTaskHandle periodic_task_handle_; }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_h264.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_h264.cc index 86f48582a7..cc8d1bff34 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_h264.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_h264.cc @@ -37,12 +37,6 @@ static const size_t kNalHeaderSize = 1; static const size_t kFuAHeaderSize = 2; static const size_t kLengthFieldSize = 2; -// Bit masks for FU (A and B) indicators. -enum NalDefs : uint8_t { kFBit = 0x80, kNriMask = 0x60, kTypeMask = 0x1F }; - -// Bit masks for FU (A and B) headers. -enum FuDefs : uint8_t { kSBit = 0x80, kEBit = 0x40, kRBit = 0x20 }; - } // namespace RtpPacketizerH264::RtpPacketizerH264(rtc::ArrayView payload, @@ -267,7 +261,8 @@ void RtpPacketizerH264::NextAggregatePacket(RtpPacketToSend* rtp_packet) { PacketUnit* packet = &packets_.front(); RTC_CHECK(packet->first_fragment); // STAP-A NALU header. - buffer[0] = (packet->header & (kFBit | kNriMask)) | H264::NaluType::kStapA; + buffer[0] = + (packet->header & (kH264FBit | kH264NriMask)) | H264::NaluType::kStapA; size_t index = kNalHeaderSize; bool is_last_fragment = packet->last_fragment; while (packet->aggregated) { @@ -296,13 +291,13 @@ void RtpPacketizerH264::NextFragmentPacket(RtpPacketToSend* rtp_packet) { // We do not send original NALU header, so it will be replaced by the // FU indicator header of the first packet. uint8_t fu_indicator = - (packet->header & (kFBit | kNriMask)) | H264::NaluType::kFuA; + (packet->header & (kH264FBit | kH264NriMask)) | H264::NaluType::kFuA; uint8_t fu_header = 0; // S | E | R | 5 bit type. - fu_header |= (packet->first_fragment ? kSBit : 0); - fu_header |= (packet->last_fragment ? kEBit : 0); - uint8_t type = packet->header & kTypeMask; + fu_header |= (packet->first_fragment ? kH264SBit : 0); + fu_header |= (packet->last_fragment ? kH264EBit : 0); + uint8_t type = packet->header & kH264TypeMask; fu_header |= type; rtc::ArrayView fragment = packet->source_fragment; uint8_t* buffer = diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_h264.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_h264.h index f658594243..f95c3b6c6b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_h264.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_h264.h @@ -23,10 +23,19 @@ #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "modules/video_coding/codecs/h264/include/h264_globals.h" #include "rtc_base/buffer.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { +// Bit masks for NAL (F, NRI, Type) indicators. +constexpr uint8_t kH264FBit = 0x80; +constexpr uint8_t kH264NriMask = 0x60; +constexpr uint8_t kH264TypeMask = 0x1F; + +// Bit masks for FU (A and B) headers. +constexpr uint8_t kH264SBit = 0x80; +constexpr uint8_t kH264EBit = 0x40; +constexpr uint8_t kH264RBit = 0x20; + class RtpPacketizerH264 : public RtpPacketizer { public: // Initialize with payload from encoder. @@ -37,6 +46,9 @@ class RtpPacketizerH264 : public RtpPacketizer { ~RtpPacketizerH264() override; + RtpPacketizerH264(const RtpPacketizerH264&) = delete; + RtpPacketizerH264& operator=(const RtpPacketizerH264&) = delete; + size_t NumPackets() const override; // Get the next payload with H264 payload header. @@ -82,8 +94,6 @@ class RtpPacketizerH264 : public RtpPacketizer { size_t num_packets_left_; std::deque> input_fragments_; std::queue packets_; - - RTC_DISALLOW_COPY_AND_ASSIGN(RtpPacketizerH264); }; } // namespace webrtc #endif // MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_H264_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.h index 5acd691163..fd44bd1980 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.h @@ -16,7 +16,6 @@ #include "api/array_view.h" #include "modules/rtp_rtcp/source/rtp_format.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -48,6 +47,9 @@ class RtpPacketizerGeneric : public RtpPacketizer { ~RtpPacketizerGeneric() override; + RtpPacketizerGeneric(const RtpPacketizerGeneric&) = delete; + RtpPacketizerGeneric& operator=(const RtpPacketizerGeneric&) = delete; + size_t NumPackets() const override; // Get the next payload. @@ -64,8 +66,6 @@ class RtpPacketizerGeneric : public RtpPacketizer { rtc::ArrayView remaining_payload_; std::vector payload_sizes_; std::vector::const_iterator current_packet_; - - RTC_DISALLOW_COPY_AND_ASSIGN(RtpPacketizerGeneric); }; } // namespace webrtc #endif // MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_VIDEO_GENERIC_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_vp8.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_vp8.h index 21009280e4..d1f569a946 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_vp8.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_vp8.h @@ -35,7 +35,6 @@ #include "modules/rtp_rtcp/source/rtp_format.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "modules/video_coding/codecs/vp8/include/vp8_globals.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -50,6 +49,9 @@ class RtpPacketizerVp8 : public RtpPacketizer { ~RtpPacketizerVp8() override; + RtpPacketizerVp8(const RtpPacketizerVp8&) = delete; + RtpPacketizerVp8& operator=(const RtpPacketizerVp8&) = delete; + size_t NumPackets() const override; // Get the next payload with VP8 payload header. @@ -66,8 +68,6 @@ class RtpPacketizerVp8 : public RtpPacketizer { rtc::ArrayView remaining_payload_; std::vector payload_sizes_; std::vector::const_iterator current_packet_; - - RTC_DISALLOW_COPY_AND_ASSIGN(RtpPacketizerVp8); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h index 916d6577f1..3ecaa476da 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h @@ -21,7 +21,6 @@ #include "modules/rtp_rtcp/source/rtp_format_vp8.h" #include "modules/video_coding/codecs/vp8/include/vp8_globals.h" #include "rtc_base/buffer.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -29,6 +28,10 @@ class RtpFormatVp8TestHelper { public: RtpFormatVp8TestHelper(const RTPVideoHeaderVP8* hdr, size_t payload_len); ~RtpFormatVp8TestHelper(); + + RtpFormatVp8TestHelper(const RtpFormatVp8TestHelper&) = delete; + RtpFormatVp8TestHelper& operator=(const RtpFormatVp8TestHelper&) = delete; + void GetAllPacketsAndCheck(RtpPacketizerVp8* packetizer, rtc::ArrayView expected_sizes); @@ -46,8 +49,6 @@ class RtpFormatVp8TestHelper { const RTPVideoHeaderVP8* const hdr_info_; rtc::Buffer payload_; - - RTC_DISALLOW_COPY_AND_ASSIGN(RtpFormatVp8TestHelper); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_vp9.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_vp9.h index 02458aea6a..3cf4dd56e5 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_vp9.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_vp9.h @@ -30,7 +30,6 @@ #include "modules/rtp_rtcp/source/rtp_format.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "modules/video_coding/codecs/vp9/include/vp9_globals.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -43,6 +42,9 @@ class RtpPacketizerVp9 : public RtpPacketizer { ~RtpPacketizerVp9() override; + RtpPacketizerVp9(const RtpPacketizerVp9&) = delete; + RtpPacketizerVp9& operator=(const RtpPacketizerVp9&) = delete; + size_t NumPackets() const override; // Gets the next payload with VP9 payload header. @@ -64,8 +66,6 @@ class RtpPacketizerVp9 : public RtpPacketizer { rtc::ArrayView remaining_payload_; std::vector payload_sizes_; std::vector::const_iterator current_packet_; - - RTC_DISALLOW_COPY_AND_ASSIGN(RtpPacketizerVp9); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc index 32edd63ef7..81961c69aa 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc @@ -16,6 +16,7 @@ #include #include +#include "absl/strings/string_view.h" #include "modules/rtp_rtcp/include/rtp_cvo.h" #include "modules/rtp_rtcp/source/byte_io.h" // TODO(bug:9855) Move kNoSpatialIdx from vp9_globals.h to common_constants @@ -800,7 +801,7 @@ bool BaseRtpStringExtension::Parse(rtc::ArrayView data, } bool BaseRtpStringExtension::Write(rtc::ArrayView data, - const std::string& str) { + absl::string_view str) { if (str.size() > kMaxValueSizeBytes) { return false; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h index ffbdafda4f..d80e0da4f8 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h @@ -20,6 +20,7 @@ #include "api/array_view.h" #include "api/rtp_headers.h" #include "api/rtp_parameters.h" +#include "api/units/timestamp.h" #include "api/video/color_space.h" #include "api/video/video_content_type.h" #include "api/video/video_rotation.h" @@ -41,8 +42,12 @@ class AbsoluteSendTime { static size_t ValueSize(uint32_t time_24bits) { return kValueSizeBytes; } static bool Write(rtc::ArrayView data, uint32_t time_24bits); - static constexpr uint32_t MsTo24Bits(int64_t time_ms) { - return static_cast(((time_ms << 18) + 500) / 1000) & 0x00FFFFFF; + static constexpr uint32_t To24Bits(Timestamp time) { + int64_t time_us = time.us() % (int64_t{1 << 6} * 1'000'000); + int64_t time6x18 = (time_us << 18) / 1'000'000; + RTC_DCHECK_GE(time6x18, 0); + RTC_DCHECK_LT(time6x18, 1 << 24); + return static_cast(time6x18); } }; @@ -297,8 +302,8 @@ class BaseRtpStringExtension { static constexpr uint8_t kMaxValueSizeBytes = 16; static bool Parse(rtc::ArrayView data, std::string* str); - static size_t ValueSize(const std::string& str) { return str.size(); } - static bool Write(rtc::ArrayView data, const std::string& str); + static size_t ValueSize(absl::string_view str) { return str.size(); } + static bool Write(rtc::ArrayView data, absl::string_view str); }; class RtpStreamId : public BaseRtpStringExtension { diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet.cc index 8523637feb..e26cec5760 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet.cc @@ -672,8 +672,12 @@ bool RtpPacket::RemoveExtension(ExtensionType type) { } // Copy payload data to new packet. - memcpy(new_packet.AllocatePayload(payload_size()), payload().data(), - payload_size()); + if (payload_size() > 0) { + memcpy(new_packet.AllocatePayload(payload_size()), payload().data(), + payload_size()); + } else { + new_packet.SetPayloadSize(0); + } // Allocate padding -- must be last! new_packet.SetPadding(padding_size()); @@ -685,7 +689,7 @@ bool RtpPacket::RemoveExtension(ExtensionType type) { std::string RtpPacket::ToString() const { rtc::StringBuilder result; - result << "{payload_type=" << payload_type_ << "marker=" << marker_ + result << "{payload_type=" << payload_type_ << ", marker=" << marker_ << ", sequence_number=" << sequence_number_ << ", padding_size=" << padding_size_ << ", timestamp=" << timestamp_ << ", ssrc=" << ssrc_ << ", payload_offset=" << payload_offset_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc index fe5ccc708e..c8d400a985 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc @@ -23,26 +23,13 @@ namespace webrtc { -constexpr size_t RtpPacketHistory::kMaxCapacity; -constexpr size_t RtpPacketHistory::kMaxPaddingHistory; -constexpr int64_t RtpPacketHistory::kMinPacketDurationMs; -constexpr int RtpPacketHistory::kMinPacketDurationRtt; -constexpr int RtpPacketHistory::kPacketCullingDelayFactor; - -RtpPacketHistory::PacketState::PacketState() = default; -RtpPacketHistory::PacketState::PacketState(const PacketState&) = default; -RtpPacketHistory::PacketState::~PacketState() = default; - RtpPacketHistory::StoredPacket::StoredPacket( std::unique_ptr packet, - absl::optional send_time_ms, + Timestamp send_time, uint64_t insert_order) - : send_time_ms_(send_time_ms), - packet_(std::move(packet)), - // No send time indicates packet is not sent immediately, but instead will - // be put in the pacer queue and later retrieved via - // GetPacketAndSetSendTime(). - pending_transmission_(!send_time_ms.has_value()), + : packet_(std::move(packet)), + pending_transmission_(false), + send_time_(send_time), insert_order_(insert_order), times_retransmitted_(0) {} @@ -85,7 +72,7 @@ RtpPacketHistory::RtpPacketHistory(Clock* clock, bool enable_padding_prio) enable_padding_prio_(enable_padding_prio), number_to_store_(0), mode_(StorageMode::kDisabled), - rtt_ms_(-1), + rtt_(TimeDelta::MinusInfinity()), packets_inserted_(0) {} RtpPacketHistory::~RtpPacketHistory() {} @@ -107,29 +94,28 @@ RtpPacketHistory::StorageMode RtpPacketHistory::GetStorageMode() const { return mode_; } -void RtpPacketHistory::SetRtt(int64_t rtt_ms) { +void RtpPacketHistory::SetRtt(TimeDelta rtt) { MutexLock lock(&lock_); - RTC_DCHECK_GE(rtt_ms, 0); - rtt_ms_ = rtt_ms; + RTC_DCHECK_GE(rtt, TimeDelta::Zero()); + rtt_ = rtt; // If storage is not disabled, packets will be removed after a timeout // that depends on the RTT. Changing the RTT may thus cause some packets // become "old" and subject to removal. if (mode_ != StorageMode::kDisabled) { - CullOldPackets(clock_->TimeInMilliseconds()); + CullOldPackets(); } } void RtpPacketHistory::PutRtpPacket(std::unique_ptr packet, - absl::optional send_time_ms) { + Timestamp send_time) { RTC_DCHECK(packet); MutexLock lock(&lock_); - int64_t now_ms = clock_->TimeInMilliseconds(); if (mode_ == StorageMode::kDisabled) { return; } RTC_DCHECK(packet->allow_retransmission()); - CullOldPackets(now_ms); + CullOldPackets(); // Store packet. const uint16_t rtp_seq_no = packet->SequenceNumber(); @@ -145,11 +131,11 @@ void RtpPacketHistory::PutRtpPacket(std::unique_ptr packet, // Packet to be inserted ahead of first packet, expand front. for (; packet_index < 0; ++packet_index) { - packet_history_.emplace_front(nullptr, absl::nullopt, 0); + packet_history_.emplace_front(); } // Packet to be inserted behind last packet, expand back. while (static_cast(packet_history_.size()) <= packet_index) { - packet_history_.emplace_back(nullptr, absl::nullopt, 0); + packet_history_.emplace_back(); } RTC_DCHECK_GE(packet_index, 0); @@ -157,7 +143,7 @@ void RtpPacketHistory::PutRtpPacket(std::unique_ptr packet, RTC_DCHECK(packet_history_[packet_index].packet_ == nullptr); packet_history_[packet_index] = - StoredPacket(std::move(packet), send_time_ms, packets_inserted_++); + StoredPacket(std::move(packet), send_time, packets_inserted_++); if (enable_padding_prio_) { if (padding_priority_.size() >= kMaxPaddingHistory - 1) { @@ -168,36 +154,6 @@ void RtpPacketHistory::PutRtpPacket(std::unique_ptr packet, } } -std::unique_ptr RtpPacketHistory::GetPacketAndSetSendTime( - uint16_t sequence_number) { - MutexLock lock(&lock_); - if (mode_ == StorageMode::kDisabled) { - return nullptr; - } - - StoredPacket* packet = GetStoredPacket(sequence_number); - if (packet == nullptr) { - return nullptr; - } - - int64_t now_ms = clock_->TimeInMilliseconds(); - if (!VerifyRtt(*packet, now_ms)) { - return nullptr; - } - - if (packet->send_time_ms_) { - packet->IncrementTimesRetransmitted( - enable_padding_prio_ ? &padding_priority_ : nullptr); - } - - // Update send-time and mark as no long in pacer queue. - packet->send_time_ms_ = now_ms; - packet->pending_transmission_ = false; - - // Return copy of packet instance since it may need to be retransmitted. - return std::make_unique(*packet->packet_); -} - std::unique_ptr RtpPacketHistory::GetPacketAndMarkAsPending( uint16_t sequence_number) { return GetPacketAndMarkAsPending( @@ -225,7 +181,7 @@ std::unique_ptr RtpPacketHistory::GetPacketAndMarkAsPending( return nullptr; } - if (!VerifyRtt(*packet, clock_->TimeInMilliseconds())) { + if (!VerifyRtt(*packet)) { // Packet already resent within too short a time window, ignore. return nullptr; } @@ -251,51 +207,45 @@ void RtpPacketHistory::MarkPacketAsSent(uint16_t sequence_number) { return; } - RTC_DCHECK(packet->send_time_ms_); - // Update send-time, mark as no longer in pacer queue, and increment // transmission count. - packet->send_time_ms_ = clock_->TimeInMilliseconds(); + packet->set_send_time(clock_->CurrentTime()); packet->pending_transmission_ = false; packet->IncrementTimesRetransmitted(enable_padding_prio_ ? &padding_priority_ : nullptr); } -absl::optional RtpPacketHistory::GetPacketState( - uint16_t sequence_number) const { +bool RtpPacketHistory::GetPacketState(uint16_t sequence_number) const { MutexLock lock(&lock_); if (mode_ == StorageMode::kDisabled) { - return absl::nullopt; + return false; } int packet_index = GetPacketIndex(sequence_number); if (packet_index < 0 || static_cast(packet_index) >= packet_history_.size()) { - return absl::nullopt; + return false; } const StoredPacket& packet = packet_history_[packet_index]; if (packet.packet_ == nullptr) { - return absl::nullopt; + return false; } - if (!VerifyRtt(packet, clock_->TimeInMilliseconds())) { - return absl::nullopt; + if (!VerifyRtt(packet)) { + return false; } - return StoredPacketToPacketState(packet); + return true; } -bool RtpPacketHistory::VerifyRtt(const RtpPacketHistory::StoredPacket& packet, - int64_t now_ms) const { - if (packet.send_time_ms_) { - // Send-time already set, this check must be for a retransmission. - if (packet.times_retransmitted() > 0 && - now_ms < *packet.send_time_ms_ + rtt_ms_) { - // This packet has already been retransmitted once, and the time since - // that even is lower than on RTT. Ignore request as this packet is - // likely already in the network pipe. - return false; - } +bool RtpPacketHistory::VerifyRtt( + const RtpPacketHistory::StoredPacket& packet) const { + if (packet.times_retransmitted() > 0 && + clock_->CurrentTime() - packet.send_time() < rtt_) { + // This packet has already been retransmitted once, and the time since + // that even is lower than on RTT. Ignore request as this packet is + // likely already in the network pipe. + return false; } return true; @@ -348,7 +298,7 @@ std::unique_ptr RtpPacketHistory::GetPayloadPaddingPacket( return nullptr; } - best_packet->send_time_ms_ = clock_->TimeInMilliseconds(); + best_packet->set_send_time(clock_->CurrentTime()); best_packet->IncrementTimesRetransmitted( enable_padding_prio_ ? &padding_priority_ : nullptr); @@ -368,21 +318,6 @@ void RtpPacketHistory::CullAcknowledgedPackets( } } -bool RtpPacketHistory::SetPendingTransmission(uint16_t sequence_number) { - MutexLock lock(&lock_); - if (mode_ == StorageMode::kDisabled) { - return false; - } - - StoredPacket* packet = GetStoredPacket(sequence_number); - if (packet == nullptr) { - return false; - } - - packet->pending_transmission_ = true; - return true; -} - void RtpPacketHistory::Clear() { MutexLock lock(&lock_); Reset(); @@ -393,9 +328,12 @@ void RtpPacketHistory::Reset() { padding_priority_.clear(); } -void RtpPacketHistory::CullOldPackets(int64_t now_ms) { - int64_t packet_duration_ms = - std::max(kMinPacketDurationRtt * rtt_ms_, kMinPacketDurationMs); +void RtpPacketHistory::CullOldPackets() { + Timestamp now = clock_->CurrentTime(); + TimeDelta packet_duration = + rtt_.IsFinite() + ? std::max(kMinPacketDurationRtt * rtt_, kMinPacketDuration) + : kMinPacketDuration; while (!packet_history_.empty()) { if (packet_history_.size() >= kMaxCapacity) { // We have reached the absolute max capacity, remove one packet @@ -410,15 +348,15 @@ void RtpPacketHistory::CullOldPackets(int64_t now_ms) { return; } - if (*stored_packet.send_time_ms_ + packet_duration_ms > now_ms) { + if (stored_packet.send_time() + packet_duration > now) { // Don't cull packets too early to avoid failed retransmission requests. return; } if (packet_history_.size() >= number_to_store_ || - *stored_packet.send_time_ms_ + - (packet_duration_ms * kPacketCullingDelayFactor) <= - now_ms) { + stored_packet.send_time() + + (packet_duration * kPacketCullingDelayFactor) <= + now) { // Too many packets in history, or this packet has timed out. Remove it // and continue. RemovePacket(0); @@ -487,17 +425,4 @@ RtpPacketHistory::StoredPacket* RtpPacketHistory::GetStoredPacket( return &packet_history_[index]; } -RtpPacketHistory::PacketState RtpPacketHistory::StoredPacketToPacketState( - const RtpPacketHistory::StoredPacket& stored_packet) { - RtpPacketHistory::PacketState state; - state.rtp_sequence_number = stored_packet.packet_->SequenceNumber(); - state.send_time_ms = stored_packet.send_time_ms_; - state.capture_time_ms = stored_packet.packet_->capture_time_ms(); - state.ssrc = stored_packet.packet_->Ssrc(); - state.packet_size = stored_packet.packet_->size(); - state.times_retransmitted = stored_packet.times_retransmitted(); - state.pending_transmission = stored_packet.pending_transmission_; - return state; -} - } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_history.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_history.h index f87ad4d550..7475a35be3 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_history.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_history.h @@ -15,9 +15,12 @@ #include #include #include +#include #include #include "api/function_view.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" @@ -35,28 +38,12 @@ class RtpPacketHistory { // packets as they time out or as signaled as received. }; - // Snapshot indicating the state of a packet in the history. - struct PacketState { - PacketState(); - PacketState(const PacketState&); - ~PacketState(); - - uint16_t rtp_sequence_number = 0; - absl::optional send_time_ms; - int64_t capture_time_ms = 0; - uint32_t ssrc = 0; - size_t packet_size = 0; - // Number of times RE-transmitted, ie not including the first transmission. - size_t times_retransmitted = 0; - bool pending_transmission = false; - }; - // Maximum number of packets we ever allow in the history. static constexpr size_t kMaxCapacity = 9600; // Maximum number of entries in prioritized queue of padding packets. static constexpr size_t kMaxPaddingHistory = 63; - // Don't remove packets within max(1000ms, 3x RTT). - static constexpr int64_t kMinPacketDurationMs = 1000; + // Don't remove packets within max(1 second, 3x RTT). + static constexpr TimeDelta kMinPacketDuration = TimeDelta::Seconds(1); static constexpr int kMinPacketDurationRtt = 3; // With kStoreAndCull, always remove packets after 3x max(1000ms, 3x rtt). static constexpr int kPacketCullingDelayFactor = 3; @@ -76,17 +63,10 @@ class RtpPacketHistory { // Set RTT, used to avoid premature retransmission and to prevent over-writing // a packet in the history before we are reasonably sure it has been received. - void SetRtt(int64_t rtt_ms); + void SetRtt(TimeDelta rtt); - // If `send_time` is set, packet was sent without using pacer, so state will - // be set accordingly. void PutRtpPacket(std::unique_ptr packet, - absl::optional send_time_ms); - - // Gets stored RTP packet corresponding to the input |sequence number|. - // Returns nullptr if packet is not found or was (re)sent too recently. - std::unique_ptr GetPacketAndSetSendTime( - uint16_t sequence_number); + Timestamp send_time); // Gets stored RTP packet corresponding to the input |sequence number|. // Returns nullptr if packet is not found or was (re)sent too recently. @@ -109,9 +89,9 @@ class RtpPacketHistory { // counter. Marks the packet as no longer being in the pacer queue. void MarkPacketAsSent(uint16_t sequence_number); - // Similar to GetPacketAndSetSendTime(), but only returns a snapshot of the - // current state for packet, and never updates internal state. - absl::optional GetPacketState(uint16_t sequence_number) const; + // Returns true if history contains packet with `sequence_number` and it can + // be retransmitted. + bool GetPacketState(uint16_t sequence_number) const; // Get the packet (if any) from the history, that is deemed most likely to // the remote side. This is calculated from heuristics such as packet age @@ -130,11 +110,6 @@ class RtpPacketHistory { // Cull packets that have been acknowledged as received by the remote end. void CullAcknowledgedPackets(rtc::ArrayView sequence_numbers); - // Mark packet as queued for transmission. This will prevent premature - // removal or duplicate retransmissions in the pacer queue. - // Returns true if status was set, false if packet was not found. - bool SetPendingTransmission(uint16_t sequence_number); - // Remove all pending packets from the history, but keep storage mode and // capacity. void Clear(); @@ -146,8 +121,9 @@ class RtpPacketHistory { class StoredPacket { public: + StoredPacket() = default; StoredPacket(std::unique_ptr packet, - absl::optional send_time_ms, + Timestamp send_time, uint64_t insert_order); StoredPacket(StoredPacket&&); StoredPacket& operator=(StoredPacket&&); @@ -158,7 +134,8 @@ class RtpPacketHistory { void IncrementTimesRetransmitted(PacketPrioritySet* priority_set); // The time of last transmission, including retransmissions. - absl::optional send_time_ms_; + Timestamp send_time() const { return send_time_; } + void set_send_time(Timestamp value) { send_time_ = value; } // The actual packet. std::unique_ptr packet_; @@ -167,6 +144,8 @@ class RtpPacketHistory { bool pending_transmission_; private: + Timestamp send_time_ = Timestamp::Zero(); + // Unique number per StoredPacket, incremented by one for each added // packet. Used to sort on insert order. uint64_t insert_order_; @@ -178,12 +157,11 @@ class RtpPacketHistory { bool operator()(StoredPacket* lhs, StoredPacket* rhs) const; }; - // Helper method used by GetPacketAndSetSendTime() and GetPacketState() to - // check if packet has too recently been sent. - bool VerifyRtt(const StoredPacket& packet, int64_t now_ms) const + // Helper method to check if packet has too recently been sent. + bool VerifyRtt(const StoredPacket& packet) const RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_); void Reset() RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_); - void CullOldPackets(int64_t now_ms) RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_); + void CullOldPackets() RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_); // Removes the packet from the history, and context/mapping that has been // stored. Returns the RTP packet instance contained within the StoredPacket. std::unique_ptr RemovePacket(int packet_index) @@ -192,15 +170,13 @@ class RtpPacketHistory { RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_); StoredPacket* GetStoredPacket(uint16_t sequence_number) RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_); - static PacketState StoredPacketToPacketState( - const StoredPacket& stored_packet); Clock* const clock_; const bool enable_padding_prio_; mutable Mutex lock_; size_t number_to_store_ RTC_GUARDED_BY(lock_); StorageMode mode_ RTC_GUARDED_BY(lock_); - int64_t rtt_ms_ RTC_GUARDED_BY(lock_); + TimeDelta rtt_ RTC_GUARDED_BY(lock_); // Queue of stored packets, ordered by sequence number, with older packets in // the front and new packets being added to the back. Note that there may be diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_received.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_received.h index 431d3f52be..f290a643a4 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_received.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_received.h @@ -14,7 +14,6 @@ #include -#include "absl/base/attributes.h" #include "api/array_view.h" #include "api/ref_counted_base.h" #include "api/rtp_headers.h" @@ -49,15 +48,6 @@ class RtpPacketReceived : public RtpPacket { webrtc::Timestamp arrival_time() const { return arrival_time_; } void set_arrival_time(webrtc::Timestamp time) { arrival_time_ = time; } - ABSL_DEPRECATED("Use arrival_time() instead") - int64_t arrival_time_ms() const { - return arrival_time_.IsMinusInfinity() ? -1 : arrival_time_.ms(); - } - ABSL_DEPRECATED("Use set_arrival_time() instead") - void set_arrival_time_ms(int64_t time) { - arrival_time_ = webrtc::Timestamp::Millis(time); - } - // Flag if packet was recovered via RTX or FEC. bool recovered() const { return recovered_; } void set_recovered(bool value) { recovered_ = value; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h index 12341ef6cf..438ca354ed 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h @@ -19,6 +19,8 @@ #include "api/array_view.h" #include "api/ref_counted_base.h" #include "api/scoped_refptr.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "api/video/video_timing.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" @@ -44,9 +46,8 @@ class RtpPacketToSend : public RtpPacket { ~RtpPacketToSend(); // Time in local time base as close as it can to frame capture time. - int64_t capture_time_ms() const { return capture_time_ms_; } - - void set_capture_time_ms(int64_t time) { capture_time_ms_ = time; } + webrtc::Timestamp capture_time() const { return capture_time_; } + void set_capture_time(webrtc::Timestamp time) { capture_time_ = time; } void set_packet_type(RtpPacketMediaType type) { packet_type_ = type; } absl::optional packet_type() const { @@ -77,27 +78,27 @@ class RtpPacketToSend : public RtpPacket { additional_data_ = std::move(data); } - void set_packetization_finish_time_ms(int64_t time) { + void set_packetization_finish_time(webrtc::Timestamp time) { SetExtension( - VideoSendTiming::GetDeltaCappedMs(capture_time_ms_, time), + VideoSendTiming::GetDeltaCappedMs(time - capture_time_), VideoTimingExtension::kPacketizationFinishDeltaOffset); } - void set_pacer_exit_time_ms(int64_t time) { + void set_pacer_exit_time(webrtc::Timestamp time) { SetExtension( - VideoSendTiming::GetDeltaCappedMs(capture_time_ms_, time), + VideoSendTiming::GetDeltaCappedMs(time - capture_time_), VideoTimingExtension::kPacerExitDeltaOffset); } - void set_network_time_ms(int64_t time) { + void set_network_time(webrtc::Timestamp time) { SetExtension( - VideoSendTiming::GetDeltaCappedMs(capture_time_ms_, time), + VideoSendTiming::GetDeltaCappedMs(time - capture_time_), VideoTimingExtension::kNetworkTimestampDeltaOffset); } - void set_network2_time_ms(int64_t time) { + void set_network2_time(webrtc::Timestamp time) { SetExtension( - VideoSendTiming::GetDeltaCappedMs(capture_time_ms_, time), + VideoSendTiming::GetDeltaCappedMs(time - capture_time_), VideoTimingExtension::kNetwork2TimestampDeltaOffset); } @@ -120,8 +121,17 @@ class RtpPacketToSend : public RtpPacket { void set_is_red(bool is_red) { is_red_ = is_red; } bool is_red() const { return is_red_; } + // The amount of time spent in the send queue, used for totalPacketSendDelay. + // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-totalpacketsenddelay + void set_time_in_send_queue(TimeDelta time_in_send_queue) { + time_in_send_queue_ = time_in_send_queue; + } + absl::optional time_in_send_queue() const { + return time_in_send_queue_; + } + private: - int64_t capture_time_ms_ = 0; + webrtc::Timestamp capture_time_ = webrtc::Timestamp::Zero(); absl::optional packet_type_; bool allow_retransmission_ = false; absl::optional retransmitted_sequence_number_; @@ -130,6 +140,7 @@ class RtpPacketToSend : public RtpPacket { bool is_key_frame_ = false; bool fec_protect_packet_ = false; bool is_red_ = false; + absl::optional time_in_send_queue_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packetizer_av1.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packetizer_av1.cc index 9cca9837ea..c866c608ad 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packetizer_av1.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packetizer_av1.cc @@ -383,7 +383,9 @@ bool RtpPacketizerAv1::NextPacket(RtpPacketToSend* packet) { int payload_offset = std::max(0, obu_offset - (ObuHasExtension(obu.header) ? 2 : 1)); size_t payload_size = obu.payload.size() - payload_offset; - memcpy(write_at, obu.payload.data() + payload_offset, payload_size); + if (!obu.payload.empty() && payload_size > 0) { + memcpy(write_at, obu.payload.data() + payload_offset, payload_size); + } write_at += payload_size; // All obus are stored from the beginning, except, may be, the first one. obu_offset = 0; diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc index 367785846b..a3662f19d9 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc @@ -19,6 +19,7 @@ #include #include +#include "absl/strings/string_view.h" #include "api/transport/field_trial_based_config.h" #include "modules/rtp_rtcp/source/rtcp_packet/dlrr.h" #include "modules/rtp_rtcp/source/rtcp_sender.h" @@ -35,7 +36,6 @@ namespace webrtc { namespace { -const int64_t kRtpRtcpMaxIdleTimeProcessMs = 5; const int64_t kRtpRtcpRttProcessTimeMs = 1000; const int64_t kRtpRtcpBitrateProcessTimeMs = 10; const int64_t kDefaultExpectedRetransmissionTimeMs = 125; @@ -70,12 +70,9 @@ ModuleRtpRtcpImpl::ModuleRtpRtcpImpl(const Configuration& configuration) clock_(configuration.clock), last_bitrate_process_time_(clock_->TimeInMilliseconds()), last_rtt_process_time_(clock_->TimeInMilliseconds()), - next_process_time_(clock_->TimeInMilliseconds() + - kRtpRtcpMaxIdleTimeProcessMs), packet_overhead_(28), // IPV4 UDP. nack_last_time_sent_full_ms_(0), nack_last_seq_number_sent_(0), - remote_bitrate_(configuration.remote_bitrate_estimator), rtt_stats_(configuration.rtt_stats), rtt_ms_(0) { if (!configuration.receiver_only) { @@ -94,29 +91,14 @@ ModuleRtpRtcpImpl::ModuleRtpRtcpImpl(const Configuration& configuration) ModuleRtpRtcpImpl::~ModuleRtpRtcpImpl() = default; -// Returns the number of milliseconds until the module want a worker thread -// to call Process. -int64_t ModuleRtpRtcpImpl::TimeUntilNextProcess() { - return std::max(0, - next_process_time_ - clock_->TimeInMilliseconds()); -} - // Process any pending tasks such as timeouts (non time critical events). void ModuleRtpRtcpImpl::Process() { const int64_t now = clock_->TimeInMilliseconds(); - // TODO(bugs.webrtc.org/11581): Figure out why we need to call Process() 200 - // times a second. - next_process_time_ = now + kRtpRtcpMaxIdleTimeProcessMs; if (rtp_sender_) { if (now >= last_bitrate_process_time_ + kRtpRtcpBitrateProcessTimeMs) { rtp_sender_->packet_sender.ProcessBitrateAndNotifyObservers(); last_bitrate_process_time_ = now; - // TODO(bugs.webrtc.org/11581): Is this a bug? At the top of the function, - // next_process_time_ is incremented by 5ms, here we effectively do a - // std::min() of (now + 5ms, now + 10ms). Seems like this is a no-op? - next_process_time_ = - std::min(next_process_time_, now + kRtpRtcpBitrateProcessTimeMs); } } @@ -158,17 +140,6 @@ void ModuleRtpRtcpImpl::Process() { RTC_LOG_F(LS_WARNING) << "Timeout: No increase in RTCP RR extended " "highest sequence number."; } - - if (remote_bitrate_ && rtcp_sender_.TMMBR()) { - unsigned int target_bitrate = 0; - std::vector ssrcs; - if (remote_bitrate_->LatestEstimate(&ssrcs, &target_bitrate)) { - if (!ssrcs.empty()) { - target_bitrate = target_bitrate / ssrcs.size(); - } - rtcp_sender_.SetTargetBitrate(target_bitrate); - } - } } else { // Report rtt from receiver. if (process_rtt) { @@ -182,11 +153,6 @@ void ModuleRtpRtcpImpl::Process() { // Get processed rtt. if (process_rtt) { last_rtt_process_time_ = now; - // TODO(bugs.webrtc.org/11581): Is this a bug? At the top of the function, - // next_process_time_ is incremented by 5ms, here we effectively do a - // std::min() of (now + 5ms, now + 1000ms). Seems like this is a no-op? - next_process_time_ = std::min( - next_process_time_, last_rtt_process_time_ + kRtpRtcpRttProcessTimeMs); if (rtt_stats_) { // Make sure we have a valid RTT before setting. int64_t last_rtt = rtt_stats_->LastProcessedRtt(); @@ -291,13 +257,7 @@ RtpState ModuleRtpRtcpImpl::GetRtxState() const { return state; } -void ModuleRtpRtcpImpl::SetRid(const std::string& rid) { - if (rtp_sender_) { - rtp_sender_->packet_generator.SetRid(rid); - } -} - -void ModuleRtpRtcpImpl::SetMid(const std::string& mid) { +void ModuleRtpRtcpImpl::SetMid(absl::string_view mid) { if (rtp_sender_) { rtp_sender_->packet_generator.SetMid(mid); } @@ -348,9 +308,6 @@ RTCPSender::FeedbackState ModuleRtpRtcpImpl::GetFeedbackState() { return state; } -// TODO(nisse): This method shouldn't be called for a receive-only -// stream. Delete rtp_sender_ check as soon as all applications are -// updated. int32_t ModuleRtpRtcpImpl::SetSendingStatus(const bool sending) { if (rtcp_sender_.Sending() != sending) { // Sends RTCP BYE when going from true to false @@ -363,15 +320,8 @@ bool ModuleRtpRtcpImpl::Sending() const { return rtcp_sender_.Sending(); } -// TODO(nisse): This method shouldn't be called for a receive-only -// stream. Delete rtp_sender_ check as soon as all applications are -// updated. void ModuleRtpRtcpImpl::SetSendingMediaStatus(const bool sending) { - if (rtp_sender_) { - rtp_sender_->packet_generator.SetSendingMediaStatus(sending); - } else { - RTC_DCHECK(!sending); - } + rtp_sender_->packet_generator.SetSendingMediaStatus(sending); } bool ModuleRtpRtcpImpl::SendingMedia() const { @@ -450,6 +400,12 @@ ModuleRtpRtcpImpl::FetchFecPackets() { return {}; } +void ModuleRtpRtcpImpl::OnAbortedRetransmissions( + rtc::ArrayView sequence_numbers) { + RTC_DCHECK_NOTREACHED() + << "Stream flushing not supported with legacy rtp modules."; +} + void ModuleRtpRtcpImpl::OnPacketsAcknowledged( rtc::ArrayView sequence_numbers) { RTC_DCHECK(rtp_sender_); @@ -517,7 +473,7 @@ void ModuleRtpRtcpImpl::SetRTCPStatus(const RtcpMode method) { rtcp_sender_.SetRTCPStatus(method); } -int32_t ModuleRtpRtcpImpl::SetCNAME(const char* c_name) { +int32_t ModuleRtpRtcpImpl::SetCNAME(absl::string_view c_name) { return rtcp_sender_.SetCNAME(c_name); } @@ -787,7 +743,7 @@ void ModuleRtpRtcpImpl::set_rtt_ms(int64_t rtt_ms) { rtt_ms_ = rtt_ms; } if (rtp_sender_) { - rtp_sender_->packet_history.SetRtt(rtt_ms); + rtp_sender_->packet_history.SetRtt(TimeDelta::Millis(rtt_ms)); } } diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h index 499573cab4..0f4f00453d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h @@ -19,11 +19,11 @@ #include #include +#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/rtp_headers.h" #include "api/video/video_bitrate_allocation.h" #include "modules/include/module_fec_types.h" -#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h" #include "modules/rtp_rtcp/include/rtp_rtcp.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" // RTCPPacketType #include "modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.h" @@ -50,10 +50,6 @@ class ModuleRtpRtcpImpl : public RtpRtcp, public RTCPReceiver::ModuleRtpRtcp { const RtpRtcpInterface::Configuration& configuration); ~ModuleRtpRtcpImpl() override; - // Returns the number of milliseconds until the module want a worker thread to - // call Process. - int64_t TimeUntilNextProcess() override; - // Process any pending tasks such as timeouts. void Process() override; @@ -101,9 +97,7 @@ class ModuleRtpRtcpImpl : public RtpRtcp, public RTCPReceiver::ModuleRtpRtcp { uint32_t SSRC() const override { return rtcp_sender_.SSRC(); } - void SetRid(const std::string& rid) override; - - void SetMid(const std::string& mid) override; + void SetMid(absl::string_view mid) override; void SetCsrcs(const std::vector& csrcs) override; @@ -145,6 +139,9 @@ class ModuleRtpRtcpImpl : public RtpRtcp, public RTCPReceiver::ModuleRtpRtcp { std::vector> FetchFecPackets() override; + void OnAbortedRetransmissions( + rtc::ArrayView sequence_numbers) override; + void OnPacketsAcknowledged( rtc::ArrayView sequence_numbers) override; @@ -167,7 +164,7 @@ class ModuleRtpRtcpImpl : public RtpRtcp, public RTCPReceiver::ModuleRtpRtcp { void SetRTCPStatus(RtcpMode method) override; // Set RTCP CName. - int32_t SetCNAME(const char* c_name) override; + int32_t SetCNAME(absl::string_view c_name) override; // Get remote NTP. int32_t RemoteNTP(uint32_t* received_ntp_secs, @@ -310,15 +307,12 @@ class ModuleRtpRtcpImpl : public RtpRtcp, public RTCPReceiver::ModuleRtpRtcp { int64_t last_bitrate_process_time_; int64_t last_rtt_process_time_; - int64_t next_process_time_; uint16_t packet_overhead_; // Send side int64_t nack_last_time_sent_full_ms_; uint16_t nack_last_seq_number_sent_; - RemoteBitrateEstimator* const remote_bitrate_; - RtcpRttStats* const rtt_stats_; // The processed RTT from RtcpRttStats. diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc index 6f39c5ce53..4329a423cb 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc @@ -19,6 +19,7 @@ #include #include +#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/sequence_checker.h" #include "api/transport/field_trial_based_config.h" @@ -28,7 +29,6 @@ #include "modules/rtp_rtcp/source/rtp_rtcp_config.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/time_utils.h" #include "system_wrappers/include/ntp_time.h" @@ -51,13 +51,6 @@ RTCPSender::Configuration AddRtcpSendEvaluationCallback( return config; } -int DelayMillisForDuration(TimeDelta duration) { - // TimeDelta::ms() rounds downwards sometimes which leads to too little time - // slept. Account for this, unless `duration` is exactly representable in - // millisecs. - return (duration.us() + rtc::kNumMillisecsPerSec - 1) / - rtc::kNumMicrosecsPerMillisec; -} } // namespace ModuleRtpRtcpImpl2::RtpSenderContext::RtpSenderContext( @@ -86,7 +79,6 @@ ModuleRtpRtcpImpl2::ModuleRtpRtcpImpl2(const Configuration& configuration) packet_overhead_(28), // IPV4 UDP. nack_last_time_sent_full_ms_(0), nack_last_seq_number_sent_(0), - remote_bitrate_(configuration.remote_bitrate_estimator), rtt_stats_(configuration.rtt_stats), rtt_ms_(0) { RTC_DCHECK(worker_queue_); @@ -227,13 +219,7 @@ uint32_t ModuleRtpRtcpImpl2::local_media_ssrc() const { return rtcp_receiver_.local_media_ssrc(); } -void ModuleRtpRtcpImpl2::SetRid(const std::string& rid) { - if (rtp_sender_) { - rtp_sender_->packet_generator.SetRid(rid); - } -} - -void ModuleRtpRtcpImpl2::SetMid(const std::string& mid) { +void ModuleRtpRtcpImpl2::SetMid(absl::string_view mid) { if (rtp_sender_) { rtp_sender_->packet_generator.SetMid(mid); } @@ -288,9 +274,6 @@ RTCPSender::FeedbackState ModuleRtpRtcpImpl2::GetFeedbackState() { return state; } -// TODO(nisse): This method shouldn't be called for a receive-only -// stream. Delete rtp_sender_ check as soon as all applications are -// updated. int32_t ModuleRtpRtcpImpl2::SetSendingStatus(const bool sending) { if (rtcp_sender_.Sending() != sending) { // Sends RTCP BYE when going from true to false @@ -303,15 +286,8 @@ bool ModuleRtpRtcpImpl2::Sending() const { return rtcp_sender_.Sending(); } -// TODO(nisse): This method shouldn't be called for a receive-only -// stream. Delete rtp_sender_ check as soon as all applications are -// updated. void ModuleRtpRtcpImpl2::SetSendingMediaStatus(const bool sending) { - if (rtp_sender_) { - rtp_sender_->packet_generator.SetSendingMediaStatus(sending); - } else { - RTC_DCHECK(!sending); - } + rtp_sender_->packet_generator.SetSendingMediaStatus(sending); } bool ModuleRtpRtcpImpl2::SendingMedia() const { @@ -392,6 +368,13 @@ ModuleRtpRtcpImpl2::FetchFecPackets() { return rtp_sender_->packet_sender.FetchFecPackets(); } +void ModuleRtpRtcpImpl2::OnAbortedRetransmissions( + rtc::ArrayView sequence_numbers) { + RTC_DCHECK(rtp_sender_); + RTC_DCHECK_RUN_ON(&rtp_sender_->sequencing_checker); + rtp_sender_->packet_sender.OnAbortedRetransmissions(sequence_numbers); +} + void ModuleRtpRtcpImpl2::OnPacketsAcknowledged( rtc::ArrayView sequence_numbers) { RTC_DCHECK(rtp_sender_); @@ -463,7 +446,7 @@ void ModuleRtpRtcpImpl2::SetRTCPStatus(const RtcpMode method) { rtcp_sender_.SetRTCPStatus(method); } -int32_t ModuleRtpRtcpImpl2::SetCNAME(const char* c_name) { +int32_t ModuleRtpRtcpImpl2::SetCNAME(absl::string_view c_name) { return rtcp_sender_.SetCNAME(c_name); } @@ -745,7 +728,7 @@ void ModuleRtpRtcpImpl2::set_rtt_ms(int64_t rtt_ms) { rtt_ms_ = rtt_ms; } if (rtp_sender_) { - rtp_sender_->packet_history.SetRtt(rtt_ms); + rtp_sender_->packet_history.SetRtt(TimeDelta::Millis(rtt_ms)); } } @@ -781,27 +764,31 @@ void ModuleRtpRtcpImpl2::PeriodicUpdate() { } } -// RTC_RUN_ON(worker_queue_); void ModuleRtpRtcpImpl2::MaybeSendRtcp() { + RTC_DCHECK_RUN_ON(worker_queue_); if (rtcp_sender_.TimeToSendRTCPReport()) rtcp_sender_.SendRTCP(GetFeedbackState(), kRtcpReport); } // TODO(bugs.webrtc.org/12889): Consider removing this function when the issue // is resolved. -// RTC_RUN_ON(worker_queue_); void ModuleRtpRtcpImpl2::MaybeSendRtcpAtOrAfterTimestamp( Timestamp execution_time) { + RTC_DCHECK_RUN_ON(worker_queue_); Timestamp now = clock_->CurrentTime(); if (now >= execution_time) { MaybeSendRtcp(); return; } - RTC_DLOG(LS_WARNING) - << "BUGBUG: Task queue scheduled delayed call too early."; + TimeDelta delta = execution_time - now; + // TaskQueue may run task 1ms earlier, so don't print warning if in this case. + if (delta > TimeDelta::Millis(1)) { + RTC_DLOG(LS_WARNING) << "BUGBUG: Task queue scheduled delayed call " + << delta << " too early."; + } - ScheduleMaybeSendRtcpAtOrAfterTimestamp(execution_time, execution_time - now); + ScheduleMaybeSendRtcpAtOrAfterTimestamp(execution_time, delta); } void ModuleRtpRtcpImpl2::ScheduleRtcpSendEvaluation(TimeDelta duration) { @@ -811,7 +798,7 @@ void ModuleRtpRtcpImpl2::ScheduleRtcpSendEvaluation(TimeDelta duration) { // than the worker queue on which it's created on implies that external // synchronization is present and removes this activity before destruction. if (duration.IsZero()) { - worker_queue_->PostTask(ToQueuedTask(task_safety_, [this] { + worker_queue_->PostTask(SafeTask(task_safety_.flag(), [this] { RTC_DCHECK_RUN_ON(worker_queue_); MaybeSendRtcp(); })); @@ -829,12 +816,12 @@ void ModuleRtpRtcpImpl2::ScheduleMaybeSendRtcpAtOrAfterTimestamp( // See note in ScheduleRtcpSendEvaluation about why `worker_queue_` can be // accessed. worker_queue_->PostDelayedTask( - ToQueuedTask(task_safety_, - [this, execution_time] { - RTC_DCHECK_RUN_ON(worker_queue_); - MaybeSendRtcpAtOrAfterTimestamp(execution_time); - }), - DelayMillisForDuration(duration)); + SafeTask(task_safety_.flag(), + [this, execution_time] { + RTC_DCHECK_RUN_ON(worker_queue_); + MaybeSendRtcpAtOrAfterTimestamp(execution_time); + }), + duration.RoundUpTo(TimeDelta::Millis(1))); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.h index 535907f2cb..34b6b8342d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.h @@ -19,14 +19,15 @@ #include #include +#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/rtp_headers.h" #include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_base.h" #include "api/units/time_delta.h" #include "api/video/video_bitrate_allocation.h" #include "modules/include/module_fec_types.h" -#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" // RTCPPacketType #include "modules/rtp_rtcp/source/packet_sequencer.h" #include "modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h" @@ -39,9 +40,7 @@ #include "rtc_base/gtest_prod_util.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/system/no_unique_address.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/task_utils/repeating_task.h" -#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/thread_annotations.h" namespace webrtc { @@ -113,9 +112,7 @@ class ModuleRtpRtcpImpl2 final : public RtpRtcpInterface, // RtpRtcpInterface::Configuration::local_media_ssrc. uint32_t local_media_ssrc() const; - void SetRid(const std::string& rid) override; - - void SetMid(const std::string& mid) override; + void SetMid(absl::string_view mid) override; void SetCsrcs(const std::vector& csrcs) override; @@ -157,6 +154,9 @@ class ModuleRtpRtcpImpl2 final : public RtpRtcpInterface, std::vector> FetchFecPackets() override; + void OnAbortedRetransmissions( + rtc::ArrayView sequence_numbers) override; + void OnPacketsAcknowledged( rtc::ArrayView sequence_numbers) override; @@ -179,7 +179,7 @@ class ModuleRtpRtcpImpl2 final : public RtpRtcpInterface, void SetRTCPStatus(RtcpMode method) override; // Set RTCP CName. - int32_t SetCNAME(const char* c_name) override; + int32_t SetCNAME(absl::string_view c_name) override; // Get remote NTP. int32_t RemoteNTP(uint32_t* received_ntp_secs, @@ -323,8 +323,6 @@ class ModuleRtpRtcpImpl2 final : public RtpRtcpInterface, int64_t nack_last_time_sent_full_ms_; uint16_t nack_last_seq_number_sent_; - RemoteBitrateEstimator* const remote_bitrate_; - RtcpRttStats* const rtt_stats_; RepeatingTaskHandle rtt_update_task_ RTC_GUARDED_BY(worker_queue_); diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_interface.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_interface.h index f3cb8d2c86..8b1d11aa45 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_interface.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_interface.h @@ -15,10 +15,11 @@ #include #include +#include "absl/strings/string_view.h" #include "absl/types/optional.h" +#include "api/field_trials_view.h" #include "api/frame_transformer_interface.h" #include "api/scoped_refptr.h" -#include "api/transport/webrtc_key_value_config.h" #include "api/video/video_bitrate_allocation.h" #include "modules/rtp_rtcp/include/receive_statistics.h" #include "modules/rtp_rtcp/include/report_block_data.h" @@ -27,7 +28,6 @@ #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "modules/rtp_rtcp/source/rtp_sequence_number_map.h" #include "modules/rtp_rtcp/source/video_fec_generator.h" -#include "rtc_base/constructor_magic.h" #include "system_wrappers/include/ntp_time.h" namespace webrtc { @@ -35,7 +35,6 @@ namespace webrtc { // Forward declarations. class FrameEncryptorInterface; class RateLimiter; -class RemoteBitrateEstimator; class RtcEventLog; class RTPSender; class Transport; @@ -47,6 +46,9 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface { Configuration() = default; Configuration(Configuration&& rhs) = default; + Configuration(const Configuration&) = delete; + Configuration& operator=(const Configuration&) = delete; + // True for a audio version of the RTP/RTCP module object false will create // a video version. bool audio = false; @@ -84,10 +86,6 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface { RtcpCnameCallback* rtcp_cname_callback = nullptr; ReportBlockDataObserver* report_block_data_observer = nullptr; - // Estimates the bandwidth available for a set of streams from the same - // client. - RemoteBitrateEstimator* remote_bitrate_estimator = nullptr; - // Spread any bursts of packets into smaller bursts to minimize packet loss. RtpPacketSender* paced_sender = nullptr; @@ -126,7 +124,7 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface { // If set, field trials are read from `field_trials`, otherwise // defaults to webrtc::FieldTrialBasedConfig. - const WebRtcKeyValueConfig* field_trials = nullptr; + const FieldTrialsView* field_trials = nullptr; // SSRCs for media and retransmission, respectively. // FlexFec SSRC is fetched from `flexfec_sender`. @@ -146,8 +144,11 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface { // https://tools.ietf.org/html/rfc3611#section-4.4 and #section-4.5 bool non_sender_rtt_measurement = false; - private: - RTC_DISALLOW_COPY_AND_ASSIGN(Configuration); + // If non-empty, sets the value for sending in the RID (and Repaired) RTP + // header extension. RIDs are used to identify an RTP stream if SSRCs are + // not negotiated. If the RID and Repaired RID extensions are not + // registered, the RID will not be sent. + std::string rid; }; // Stats for RTCP sender reports (SR) for a specific SSRC. @@ -252,16 +253,10 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface { // Returns SSRC. virtual uint32_t SSRC() const = 0; - // Sets the value for sending in the RID (and Repaired) RTP header extension. - // RIDs are used to identify an RTP stream if SSRCs are not negotiated. - // If the RID and Repaired RID extensions are not registered, the RID will - // not be sent. - virtual void SetRid(const std::string& rid) = 0; - // Sets the value for sending in the MID RTP header extension. // The MID RTP header extension should be registered for this to do anything. // Once set, this value can not be changed or removed. - virtual void SetMid(const std::string& mid) = 0; + virtual void SetMid(absl::string_view mid) = 0; // Sets CSRC. // `csrcs` - vector of CSRCs @@ -336,6 +331,9 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface { // returned from the FEC generator. virtual std::vector> FetchFecPackets() = 0; + virtual void OnAbortedRetransmissions( + rtc::ArrayView sequence_numbers) = 0; + virtual void OnPacketsAcknowledged( rtc::ArrayView sequence_numbers) = 0; @@ -374,7 +372,7 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface { // Sets RTCP CName (i.e unique identifier). // Returns -1 on failure else 0. - virtual int32_t SetCNAME(const char* cname) = 0; + virtual int32_t SetCNAME(absl::string_view cname) = 0; // Returns remote NTP. // Returns -1 on failure else 0. diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender.cc index feda738d06..f1f7544b92 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender.cc @@ -17,6 +17,7 @@ #include #include "absl/strings/match.h" +#include "absl/strings/string_view.h" #include "api/array_view.h" #include "api/rtc_event_log/rtc_event_log.h" #include "logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h" @@ -45,6 +46,10 @@ constexpr size_t kRtpHeaderLength = 12; // Min size needed to get payload padding from packet history. constexpr int kMinPayloadPaddingBytes = 50; +// Determines how much larger a payload padding packet may be, compared to the +// requested padding size. +constexpr double kMaxPaddingSizeFactor = 3.0; + template constexpr RtpExtensionSize CreateExtensionSize() { return {Extension::kId, Extension::kValueSizeBytes}; @@ -138,29 +143,8 @@ bool HasBweExtension(const RtpHeaderExtensionMap& extensions_map) { extensions_map.IsRegistered(kRtpExtensionTransmissionTimeOffset); } -double GetMaxPaddingSizeFactor(const WebRtcKeyValueConfig* field_trials) { - // Too low factor means RTX payload padding is rarely used and ineffective. - // Too high means we risk interrupting regular media packets. - // In practice, 3x seems to yield reasonable results. - constexpr double kDefaultFactor = 3.0; - if (!field_trials) { - return kDefaultFactor; - } - - FieldTrialOptional factor("factor", kDefaultFactor); - ParseFieldTrial({&factor}, field_trials->Lookup("WebRTC-LimitPaddingSize")); - RTC_CHECK_GE(factor.Value(), 0.0); - return factor.Value(); -} - } // namespace -RTPSender::RTPSender(const RtpRtcpInterface::Configuration& config, - RtpPacketHistory* packet_history, - RtpPacketSender* packet_sender, - PacketSequencer*) - : RTPSender(config, packet_history, packet_sender) {} - RTPSender::RTPSender(const RtpRtcpInterface::Configuration& config, RtpPacketHistory* packet_history, RtpPacketSender* packet_sender) @@ -171,13 +155,13 @@ RTPSender::RTPSender(const RtpRtcpInterface::Configuration& config, rtx_ssrc_(config.rtx_send_ssrc), flexfec_ssrc_(config.fec_generator ? config.fec_generator->FecSsrc() : absl::nullopt), - max_padding_size_factor_(GetMaxPaddingSizeFactor(config.field_trials)), packet_history_(packet_history), paced_sender_(packet_sender), sending_media_(true), // Default to sending media. max_packet_size_(IP_PACKET_SIZE - 28), // Default is IP-v4/UDP. rtp_header_extension_map_(config.extmap_allow_mixed), // RTP variables + rid_(config.rid), always_send_mid_and_rid_(config.always_send_mid_and_rid), ssrc_has_acked_(false), rtx_ssrc_has_acked_(false), @@ -185,12 +169,14 @@ RTPSender::RTPSender(const RtpRtcpInterface::Configuration& config, rtx_(kRtxOff), supports_bwe_extension_(false), retransmission_rate_limiter_(config.retransmission_rate_limiter) { - UpdateHeaderSizes(); // This random initialization is not intended to be cryptographic strong. timestamp_offset_ = random_.Rand(); RTC_DCHECK(paced_sender_); RTC_DCHECK(packet_history_); + RTC_DCHECK_LE(rid_.size(), RtpStreamId::kMaxValueSizeBytes); + + UpdateHeaderSizes(); } RTPSender::~RTPSender() { @@ -258,6 +244,12 @@ size_t RTPSender::MaxRtpPacketSize() const { void RTPSender::SetRtxStatus(int mode) { MutexLock lock(&send_mutex_); + if (mode != kRtxOff && + (!rtx_ssrc_.has_value() || rtx_payload_type_map_.empty())) { + RTC_LOG(LS_ERROR) + << "Failed to enable RTX without RTX SSRC or payload types."; + return; + } rtx_ = mode; } @@ -280,16 +272,7 @@ void RTPSender::SetRtxPayloadType(int payload_type, } int32_t RTPSender::ReSendPacket(uint16_t packet_id) { - // Try to find packet in RTP packet history. Also verify RTT here, so that we - // don't retransmit too often. - absl::optional stored_packet = - packet_history_->GetPacketState(packet_id); - if (!stored_packet || stored_packet->pending_transmission) { - // Packet not found or already queued for retransmission, ignore. - return 0; - } - - const int32_t packet_size = static_cast(stored_packet->packet_size); + int32_t packet_size = 0; const bool rtx = (RtxStatus() & kRtxRetransmitted) > 0; std::unique_ptr packet = @@ -298,6 +281,7 @@ int32_t RTPSender::ReSendPacket(uint16_t packet_id) { // Check if we're overusing retransmission bitrate. // TODO(sprang): Add histograms for nack success or failure // reasons. + packet_size = stored_packet.size(); std::unique_ptr retransmit_packet; if (retransmission_rate_limiter_ && !retransmission_rate_limiter_->TryUseRate(packet_size)) { @@ -315,7 +299,14 @@ int32_t RTPSender::ReSendPacket(uint16_t packet_id) { } return retransmit_packet; }); + if (packet_size == 0) { + // Packet not found or already queued for retransmission, ignore. + RTC_DCHECK(!packet); + return 0; + } if (!packet) { + // Packet was found, but lambda helper above chose not to create + // `retransmit_packet` out of it. return -1; } packet->set_packet_type(RtpPacketMediaType::kRetransmission); @@ -349,7 +340,7 @@ void RTPSender::OnReceivedAckOnRtxSsrc( void RTPSender::OnReceivedNack( const std::vector& nack_sequence_numbers, int64_t avg_rtt) { - packet_history_->SetRtt(5 + avg_rtt); + packet_history_->SetRtt(TimeDelta::Millis(5 + avg_rtt)); for (uint16_t seq_no : nack_sequence_numbers) { const int32_t bytes_sent = ReSendPacket(seq_no); if (bytes_sent < 0) { @@ -389,11 +380,10 @@ std::vector> RTPSender::GeneratePadding( packet_history_->GetPayloadPaddingPacket( [&](const RtpPacketToSend& packet) -> std::unique_ptr { - // Limit overshoot, generate <= `max_padding_size_factor_` * - // target_size_bytes. + // Limit overshoot, generate <= `kMaxPaddingSizeFactor` * + // `target_size_bytes`. const size_t max_overshoot_bytes = static_cast( - ((max_padding_size_factor_ - 1.0) * target_size_bytes) + - 0.5); + ((kMaxPaddingSizeFactor - 1.0) * target_size_bytes) + 0.5); if (packet.payload_size() + kRtxHeaderSize > max_overshoot_bytes + bytes_left) { return nullptr; @@ -453,6 +443,7 @@ std::vector> RTPSender::GeneratePadding( } RTC_DCHECK(rtx_ssrc_); + RTC_DCHECK(!rtx_payload_type_map_.empty()); padding_packet->SetSsrc(*rtx_ssrc_); padding_packet->SetPayloadType(rtx_payload_type_map_.begin()->second); } @@ -477,13 +468,11 @@ std::vector> RTPSender::GeneratePadding( bool RTPSender::SendToNetwork(std::unique_ptr packet) { RTC_DCHECK(packet); - int64_t now_ms = clock_->TimeInMilliseconds(); - auto packet_type = packet->packet_type(); RTC_CHECK(packet_type) << "Packet type must be set before sending."; - if (packet->capture_time_ms() <= 0) { - packet->set_capture_time_ms(now_ms); + if (packet->capture_time() <= Timestamp::Zero()) { + packet->set_capture_time(clock_->CurrentTime()); } std::vector> packets; @@ -496,13 +485,13 @@ bool RTPSender::SendToNetwork(std::unique_ptr packet) { void RTPSender::EnqueuePackets( std::vector> packets) { RTC_DCHECK(!packets.empty()); - int64_t now_ms = clock_->TimeInMilliseconds(); + Timestamp now = clock_->CurrentTime(); for (auto& packet : packets) { RTC_DCHECK(packet); RTC_CHECK(packet->packet_type().has_value()) << "Packet type must be set before sending."; - if (packet->capture_time_ms() <= 0) { - packet->set_capture_time_ms(now_ms); + if (packet->capture_time() <= Timestamp::Zero()) { + packet->set_capture_time(now); } } @@ -585,19 +574,11 @@ uint32_t RTPSender::TimestampOffset() const { return timestamp_offset_; } -void RTPSender::SetRid(const std::string& rid) { - // RID is used in simulcast scenario when multiple layers share the same mid. - MutexLock lock(&send_mutex_); - RTC_DCHECK_LE(rid.length(), RtpStreamId::kMaxValueSizeBytes); - rid_ = rid; - UpdateHeaderSizes(); -} - -void RTPSender::SetMid(const std::string& mid) { +void RTPSender::SetMid(absl::string_view mid) { // This is configured via the API. MutexLock lock(&send_mutex_); RTC_DCHECK_LE(mid.length(), RtpMid::kMaxValueSizeBytes); - mid_ = mid; + mid_ = std::string(mid); UpdateHeaderSizes(); } @@ -713,13 +694,15 @@ std::unique_ptr RTPSender::BuildRtxPacket( // Add original payload data. auto payload = packet.payload(); - memcpy(rtx_payload + kRtxHeaderSize, payload.data(), payload.size()); + if (!payload.empty()) { + memcpy(rtx_payload + kRtxHeaderSize, payload.data(), payload.size()); + } // Add original additional data. rtx_packet->set_additional_data(packet.additional_data()); // Copy capture time so e.g. TransmissionOffset is correctly set. - rtx_packet->set_capture_time_ms(packet.capture_time_ms()); + rtx_packet->set_capture_time(packet.capture_time()); return rtx_packet; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender.h index d892970542..55dee7f219 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender.h @@ -21,12 +21,11 @@ #include "absl/types/optional.h" #include "api/array_view.h" #include "api/call/transport.h" -#include "api/transport/webrtc_key_value_config.h" +#include "api/field_trials_view.h" #include "modules/rtp_rtcp/include/flexfec_sender.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" #include "modules/rtp_rtcp/include/rtp_packet_sender.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "modules/rtp_rtcp/source/packet_sequencer.h" #include "modules/rtp_rtcp/source/rtp_packet_history.h" #include "modules/rtp_rtcp/source/rtp_rtcp_config.h" #include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" @@ -47,14 +46,6 @@ class RTPSender { RTPSender(const RtpRtcpInterface::Configuration& config, RtpPacketHistory* packet_history, RtpPacketSender* packet_sender); - - ABSL_DEPRECATED("bugs.webrtc.org/11340") - RTPSender(const RtpRtcpInterface::Configuration& config, - RtpPacketHistory* packet_history, - RtpPacketSender* packet_sender, - PacketSequencer* packet_sequencer); - - RTPSender() = delete; RTPSender(const RTPSender&) = delete; RTPSender& operator=(const RTPSender&) = delete; @@ -67,9 +58,7 @@ class RTPSender { uint32_t TimestampOffset() const RTC_LOCKS_EXCLUDED(send_mutex_); void SetTimestampOffset(uint32_t timestamp) RTC_LOCKS_EXCLUDED(send_mutex_); - void SetRid(const std::string& rid) RTC_LOCKS_EXCLUDED(send_mutex_); - - void SetMid(const std::string& mid) RTC_LOCKS_EXCLUDED(send_mutex_); + void SetMid(absl::string_view mid) RTC_LOCKS_EXCLUDED(send_mutex_); uint16_t SequenceNumber() const RTC_LOCKS_EXCLUDED(send_mutex_); void SetSequenceNumber(uint16_t seq) RTC_LOCKS_EXCLUDED(send_mutex_); @@ -186,9 +175,6 @@ class RTPSender { const uint32_t ssrc_; const absl::optional rtx_ssrc_; const absl::optional flexfec_ssrc_; - // Limits GeneratePadding() outcome to <= - // `max_padding_size_factor_` * `target_size_bytes` - const double max_padding_size_factor_; RtpPacketHistory* const packet_history_; RtpPacketSender* const paced_sender_; @@ -205,7 +191,7 @@ class RTPSender { // RTP variables uint32_t timestamp_offset_ RTC_GUARDED_BY(send_mutex_); // RID value to send in the RID or RepairedRID header extension. - std::string rid_ RTC_GUARDED_BY(send_mutex_); + const std::string rid_; // MID value to send in the MID header extension. std::string mid_ RTC_GUARDED_BY(send_mutex_); // Should we send MID/RID even when ACKed? (see below). diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc index 207d1ca045..244f644bd1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc @@ -35,9 +35,7 @@ namespace webrtc { namespace { - -#if RTC_TRACE_EVENTS_ENABLED -const char* FrameTypeToString(AudioFrameType frame_type) { +[[maybe_unused]] const char* FrameTypeToString(AudioFrameType frame_type) { switch (frame_type) { case AudioFrameType::kEmptyFrame: return "empty"; @@ -48,7 +46,6 @@ const char* FrameTypeToString(AudioFrameType frame_type) { } RTC_CHECK_NOTREACHED(); } -#endif constexpr char kIncludeCaptureClockOffset[] = "WebRTC-IncludeCaptureClockOffset"; @@ -166,10 +163,8 @@ bool RTPSenderAudio::SendAudio(AudioFrameType frame_type, const uint8_t* payload_data, size_t payload_size, int64_t absolute_capture_timestamp_ms) { -#if RTC_TRACE_EVENTS_ENABLED TRACE_EVENT_ASYNC_STEP1("webrtc", "Audio", rtp_timestamp, "Send", "type", FrameTypeToString(frame_type)); - #endif // From RFC 4733: // A source has wide latitude as to how often it sends event updates. A @@ -272,7 +267,7 @@ bool RTPSenderAudio::SendAudio(AudioFrameType frame_type, packet->SetMarker(MarkerBit(frame_type, payload_type)); packet->SetPayloadType(payload_type); packet->SetTimestamp(rtp_timestamp); - packet->set_capture_time_ms(clock_->TimeInMilliseconds()); + packet->set_capture_time(clock_->CurrentTime()); // Update audio level extension, if included. packet->SetExtension( frame_type == AudioFrameType::kAudioFrameSpeech, audio_level_dbov); @@ -370,7 +365,7 @@ bool RTPSenderAudio::SendTelephoneEventPacket(bool ended, packet->SetMarker(marker_bit); packet->SetSsrc(rtp_sender_->SSRC()); packet->SetTimestamp(dtmf_timestamp); - packet->set_capture_time_ms(clock_->TimeInMilliseconds()); + packet->set_capture_time(clock_->CurrentTime()); // Create DTMF data. uint8_t* dtmfbuffer = packet->AllocatePayload(kDtmfSize); diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h index 6d61facc9a..d40fee6386 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h @@ -62,7 +62,7 @@ class RTPSenderAudio { // Store the audio level in dBov for // header-extension-for-audio-level-indication. - // Valid range is [0,100]. Actual value is negative. + // Valid range is [0,127]. Actual value is negative. int32_t SetAudioLevel(uint8_t level_dbov); // Send a DTMF tone using RFC 2833 (4733) @@ -105,7 +105,7 @@ class RTPSenderAudio { // Audio level indication. // (https://datatracker.ietf.org/doc/draft-lennox-avt-rtp-audio-level-exthdr/) - uint8_t audio_level_dbov_ RTC_GUARDED_BY(send_audio_mutex_) = 0; + uint8_t audio_level_dbov_ RTC_GUARDED_BY(send_audio_mutex_) = 127; OneTimeEvent first_packet_sent_; absl::optional encoder_rtp_timestamp_frequency_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_egress.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_egress.cc index eb55378083..e81ea8da19 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_egress.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_egress.cc @@ -19,7 +19,6 @@ #include "api/transport/field_trial_based_config.h" #include "logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h" #include "rtc_base/logging.h" -#include "rtc_base/task_utils/to_queued_task.h" namespace webrtc { namespace { @@ -30,7 +29,7 @@ constexpr size_t kRtpSequenceNumberMapMaxEntries = 1 << 13; constexpr TimeDelta kUpdateInterval = TimeDelta::Millis(kBitrateStatisticsWindowMs); -bool IsTrialSetTo(const WebRtcKeyValueConfig* field_trials, +bool IsTrialSetTo(const FieldTrialsView* field_trials, absl::string_view name, absl::string_view value) { FieldTrialBasedConfig default_trials; @@ -105,7 +104,6 @@ RtpSenderEgress::RtpSenderEgress(const RtpRtcpInterface::Configuration& config, timestamp_offset_(0), max_delay_it_(send_delays_.end()), sum_delays_ms_(0), - total_packet_send_delay_ms_(0), send_rates_(kNumMediaTypes, {kBitrateStatisticsWindowMs, RateStatistics::kBpsScale}), rtp_sequence_number_map_(need_rtp_packet_infos_ @@ -155,19 +153,19 @@ void RtpSenderEgress::SendPacket(RtpPacketToSend* packet, } const uint32_t packet_ssrc = packet->Ssrc(); - const int64_t now_ms = clock_->TimeInMilliseconds(); + const Timestamp now = clock_->CurrentTime(); #if BWE_TEST_LOGGING_COMPILE_TIME_ENABLE worker_queue_->PostTask( - ToQueuedTask(task_safety_, [this, now_ms, packet_ssrc]() { - BweTestLoggingPlot(now_ms, packet_ssrc); + SafeTask(task_safety_.flag(), [this, now, packet_ssrc]() { + BweTestLoggingPlot(now.ms(), packet_ssrc); })); #endif if (need_rtp_packet_infos_ && packet->packet_type() == RtpPacketToSend::Type::kVideo) { - worker_queue_->PostTask(ToQueuedTask( - task_safety_, + worker_queue_->PostTask(SafeTask( + task_safety_.flag(), [this, packet_timestamp = packet->Timestamp(), is_first_packet_of_frame = packet->is_first_packet_of_frame(), is_last_packet_of_frame = packet->Marker(), @@ -225,20 +223,19 @@ void RtpSenderEgress::SendPacket(RtpPacketToSend* packet, // In case of VideoTimingExtension, since it's present not in every packet, // data after rtp header may be corrupted if these packets are protected by // the FEC. - int64_t diff_ms = now_ms - packet->capture_time_ms(); + TimeDelta diff = now - packet->capture_time(); if (packet->HasExtension()) { - packet->SetExtension(kTimestampTicksPerMs * diff_ms); + packet->SetExtension(kTimestampTicksPerMs * diff.ms()); } if (packet->HasExtension()) { - packet->SetExtension( - AbsoluteSendTime::MsTo24Bits(now_ms)); + packet->SetExtension(AbsoluteSendTime::To24Bits(now)); } if (packet->HasExtension()) { if (populate_network2_timestamp_) { - packet->set_network2_time_ms(now_ms); + packet->set_network2_time(now); } else { - packet->set_pacer_exit_time_ms(now_ms); + packet->set_pacer_exit_time(now); } } @@ -265,8 +262,8 @@ void RtpSenderEgress::SendPacket(RtpPacketToSend* packet, if (packet->packet_type() != RtpPacketMediaType::kPadding && packet->packet_type() != RtpPacketMediaType::kRetransmission) { - UpdateDelayStatistics(packet->capture_time_ms(), now_ms, packet_ssrc); - UpdateOnSendPacket(options.packet_id, packet->capture_time_ms(), + UpdateDelayStatistics(packet->capture_time().ms(), now.ms(), packet_ssrc); + UpdateOnSendPacket(options.packet_id, packet->capture_time().ms(), packet_ssrc); } @@ -276,7 +273,7 @@ void RtpSenderEgress::SendPacket(RtpPacketToSend* packet, // actual sending fails. if (is_media && packet->allow_retransmission()) { packet_history_->PutRtpPacket(std::make_unique(*packet), - now_ms); + now); } else if (packet->retransmitted_sequence_number()) { packet_history_->MarkPacketAsSent(*packet->retransmitted_sequence_number()); } @@ -295,10 +292,10 @@ void RtpSenderEgress::SendPacket(RtpPacketToSend* packet, RtpPacketCounter counter(*packet); size_t size = packet->size(); worker_queue_->PostTask( - ToQueuedTask(task_safety_, [this, now_ms, packet_ssrc, packet_type, - counter = std::move(counter), size]() { + SafeTask(task_safety_.flag(), [this, now, packet_ssrc, packet_type, + counter = std::move(counter), size]() { RTC_DCHECK_RUN_ON(worker_queue_); - UpdateRtpStats(now_ms, packet_ssrc, packet_type, std::move(counter), + UpdateRtpStats(now.ms(), packet_ssrc, packet_type, std::move(counter), size); })); } @@ -392,6 +389,17 @@ RtpSenderEgress::FetchFecPackets() { return {}; } +void RtpSenderEgress::OnAbortedRetransmissions( + rtc::ArrayView sequence_numbers) { + RTC_DCHECK_RUN_ON(&pacer_checker_); + // Mark aborted retransmissions as sent, rather than leaving them in + // a 'pending' state - otherwise they can not be requested again and + // will not be cleared until the history has reached its max size. + for (uint16_t seq_no : sequence_numbers) { + packet_history_->MarkPacketAsSent(seq_no); + } +} + bool RtpSenderEgress::HasCorrectSsrc(const RtpPacketToSend& packet) const { switch (*packet.packet_type()) { case RtpPacketMediaType::kAudio: @@ -458,7 +466,6 @@ void RtpSenderEgress::UpdateDelayStatistics(int64_t capture_time_ms, int avg_delay_ms = 0; int max_delay_ms = 0; - uint64_t total_packet_send_delay_ms = 0; { MutexLock lock(&lock_); // Compute the max and average of the recent capture-to-send delays. @@ -509,8 +516,6 @@ void RtpSenderEgress::UpdateDelayStatistics(int64_t capture_time_ms, max_delay_it_ = it; } sum_delays_ms_ += new_send_delay; - total_packet_send_delay_ms_ += new_send_delay; - total_packet_send_delay_ms = total_packet_send_delay_ms_; size_t num_delays = send_delays_.size(); RTC_DCHECK(max_delay_it_ != send_delays_.end()); @@ -522,8 +527,8 @@ void RtpSenderEgress::UpdateDelayStatistics(int64_t capture_time_ms, avg_delay_ms = rtc::dchecked_cast((sum_delays_ms_ + num_delays / 2) / num_delays); } - send_side_delay_observer_->SendSideDelayUpdated( - avg_delay_ms, max_delay_ms, total_packet_send_delay_ms, ssrc); + send_side_delay_observer_->SendSideDelayUpdated(avg_delay_ms, max_delay_ms, + ssrc); } void RtpSenderEgress::RecomputeMaxSendDelay() { diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_egress.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_egress.h index 1fd7128966..c46f6aeb40 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_egress.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_egress.h @@ -20,6 +20,7 @@ #include "api/call/transport.h" #include "api/rtc_event_log/rtc_event_log.h" #include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_base.h" #include "api/units/data_rate.h" #include "modules/remote_bitrate_estimator/test/bwe_test_logging.h" @@ -32,7 +33,6 @@ #include "rtc_base/rate_statistics.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/system/no_unique_address.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/task_utils/repeating_task.h" #include "rtc_base/thread_annotations.h" @@ -91,6 +91,10 @@ class RtpSenderEgress { const FecProtectionParams& key_params); std::vector> FetchFecPackets(); + // Clears pending status for these sequence numbers in the packet history. + void OnAbortedRetransmissions( + rtc::ArrayView sequence_numbers); + private: // Maps capture time in milliseconds to send-side delay in milliseconds. // Send-side delay is the difference between transmission time and capture @@ -161,7 +165,6 @@ class RtpSenderEgress { SendDelayMap::const_iterator max_delay_it_ RTC_GUARDED_BY(lock_); // The sum of delays over a kSendSideDelayWindowMs sliding window. int64_t sum_delays_ms_ RTC_GUARDED_BY(lock_); - uint64_t total_packet_send_delay_ms_ RTC_GUARDED_BY(lock_); StreamDataCounters rtp_stats_ RTC_GUARDED_BY(lock_); StreamDataCounters rtx_rtp_stats_ RTC_GUARDED_BY(lock_); // One element per value in RtpPacketMediaType, with index matching value. diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc index ca8030f74a..810214926f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc @@ -97,8 +97,7 @@ bool IsBaseLayer(const RTPVideoHeader& video_header) { return true; } -#if RTC_TRACE_EVENTS_ENABLED -const char* FrameTypeToString(VideoFrameType frame_type) { +[[maybe_unused]] const char* FrameTypeToString(VideoFrameType frame_type) { switch (frame_type) { case VideoFrameType::kEmptyFrame: return "empty"; @@ -111,14 +110,13 @@ const char* FrameTypeToString(VideoFrameType frame_type) { return ""; } } -#endif bool IsNoopDelay(const VideoPlayoutDelay& delay) { return delay.min_ms == -1 && delay.max_ms == -1; } absl::optional LoadVideoPlayoutDelayOverride( - const WebRtcKeyValueConfig* key_value_config) { + const FieldTrialsView* key_value_config) { RTC_DCHECK(key_value_config); FieldTrialOptional playout_delay_min_ms("min_ms", absl::nullopt); FieldTrialOptional playout_delay_max_ms("max_ms", absl::nullopt); @@ -173,7 +171,7 @@ RTPSenderVideo::RTPSenderVideo(const Config& config) this, config.frame_transformer, rtp_sender_->SSRC(), - config.send_transport_queue) + config.task_queue_factory) : nullptr), include_capture_clock_offset_(!absl::StartsWith( config.field_trials->Lookup(kIncludeCaptureClockOffset), @@ -477,10 +475,8 @@ bool RTPSenderVideo::SendVideo( rtc::ArrayView payload, RTPVideoHeader video_header, absl::optional expected_retransmission_time_ms) { -#if RTC_TRACE_EVENTS_ENABLED TRACE_EVENT_ASYNC_STEP1("webrtc", "Video", capture_time_ms, "Send", "type", FrameTypeToString(video_header.frame_type)); -#endif RTC_CHECK_RUNS_SERIALIZED(&send_checker_); if (video_header.frame_type == VideoFrameType::kEmptyFrame) @@ -534,7 +530,7 @@ bool RTPSenderVideo::SendVideo( RTC_DCHECK_LE(packet_capacity, single_packet->capacity()); single_packet->SetPayloadType(payload_type); single_packet->SetTimestamp(rtp_timestamp); - single_packet->set_capture_time_ms(capture_time_ms); + single_packet->set_capture_time(Timestamp::Millis(capture_time_ms)); // Construct the absolute capture time extension if not provided. if (!video_header.absolute_capture_time.has_value()) { @@ -695,7 +691,7 @@ bool RTPSenderVideo::SendVideo( // Put packetization finish timestamp into extension. if (packet->HasExtension()) { - packet->set_packetization_finish_time_ms(clock_->TimeInMilliseconds()); + packet->set_packetization_finish_time(clock_->CurrentTime()); } packet->set_fec_protect_packet(use_fec); diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h index 5164969489..ecff8d42de 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h @@ -22,6 +22,7 @@ #include "api/scoped_refptr.h" #include "api/sequence_checker.h" #include "api/task_queue/task_queue_base.h" +#include "api/task_queue/task_queue_factory.h" #include "api/transport/rtp/dependency_descriptor.h" #include "api/video/video_codec_type.h" #include "api/video/video_frame_type.h" @@ -79,9 +80,9 @@ class RTPSenderVideo { bool require_frame_encryption = false; bool enable_retransmit_all_layers = false; absl::optional red_payload_type; - const WebRtcKeyValueConfig* field_trials = nullptr; + const FieldTrialsView* field_trials = nullptr; rtc::scoped_refptr frame_transformer; - TaskQueueBase* send_transport_queue = nullptr; + TaskQueueFactory* task_queue_factory = nullptr; }; explicit RTPSenderVideo(const Config& config); diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc index 377f6c4fbf..3d6931fe23 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc @@ -13,10 +13,11 @@ #include #include -#include "absl/memory/memory.h" +#include "api/sequence_checker.h" +#include "api/task_queue/task_queue_factory.h" #include "modules/rtp_rtcp/source/rtp_descriptor_authentication.h" #include "modules/rtp_rtcp/source/rtp_sender_video.h" -#include "rtc_base/task_utils/to_queued_task.h" +#include "rtc_base/checks.h" namespace webrtc { namespace { @@ -98,11 +99,13 @@ RTPSenderVideoFrameTransformerDelegate::RTPSenderVideoFrameTransformerDelegate( RTPSenderVideo* sender, rtc::scoped_refptr frame_transformer, uint32_t ssrc, - TaskQueueBase* send_transport_queue) + TaskQueueFactory* task_queue_factory) : sender_(sender), frame_transformer_(std::move(frame_transformer)), ssrc_(ssrc), - send_transport_queue_(send_transport_queue) {} + task_queue_factory_(task_queue_factory) { + RTC_DCHECK(task_queue_factory_); +} void RTPSenderVideoFrameTransformerDelegate::Init() { frame_transformer_->RegisterTransformedFrameSinkCallback( @@ -116,14 +119,29 @@ bool RTPSenderVideoFrameTransformerDelegate::TransformFrame( const EncodedImage& encoded_image, RTPVideoHeader video_header, absl::optional expected_retransmission_time_ms) { + TaskQueueBase* current = TaskQueueBase::Current(); if (!encoder_queue_) { // Save the current task queue to post the transformed frame for sending // once it is transformed. When there is no current task queue, i.e. // encoding is done on an external thread (for example in the case of - // hardware encoders), use the send transport queue instead. - TaskQueueBase* current = TaskQueueBase::Current(); - encoder_queue_ = current ? current : send_transport_queue_; + // hardware encoders), create a new task queue. + if (current) { + encoder_queue_ = current; + } else { + owned_encoder_queue_ = task_queue_factory_->CreateTaskQueue( + "video_frame_transformer", TaskQueueFactory::Priority::NORMAL); + encoder_queue_ = owned_encoder_queue_.get(); + } } + // DCHECK that the current queue does not change, or if does then it was due + // to a hardware encoder fallback and thus there is an owned queue. + RTC_DCHECK(!current || current == encoder_queue_ || owned_encoder_queue_) + << "Current thread must either be an external thread (nullptr) or be the " + "same as the previous encoder queue. The current thread is " + << (current ? "non-null" : "nullptr") << " and the encoder thread is " + << (current == encoder_queue_ ? "the same queue." + : "not the same queue."); + frame_transformer_->Transform(std::make_unique( encoded_image, video_header, payload_type, codec_type, rtp_timestamp, expected_retransmission_time_ms, ssrc_)); @@ -139,16 +157,17 @@ void RTPSenderVideoFrameTransformerDelegate::OnTransformedFrame( // arrives. if (!sender_ || !encoder_queue_) return; - rtc::scoped_refptr delegate = this; - encoder_queue_->PostTask(ToQueuedTask( + rtc::scoped_refptr delegate(this); + encoder_queue_->PostTask( [delegate = std::move(delegate), frame = std::move(frame)]() mutable { + RTC_DCHECK_RUN_ON(delegate->encoder_queue_); delegate->SendVideo(std::move(frame)); - })); + }); } void RTPSenderVideoFrameTransformerDelegate::SendVideo( std::unique_ptr transformed_frame) const { - RTC_CHECK(encoder_queue_->IsCurrent()); + RTC_DCHECK_RUN_ON(encoder_queue_); RTC_CHECK_EQ(transformed_frame->GetDirection(), TransformableFrameInterface::Direction::kSender); MutexLock lock(&sender_lock_); @@ -161,8 +180,7 @@ void RTPSenderVideoFrameTransformerDelegate::SendVideo( transformed_video_frame->GetCodecType(), transformed_video_frame->GetTimestamp(), transformed_video_frame->GetCaptureTimeMs(), - transformed_video_frame->GetData(), - transformed_video_frame->GetHeader(), + transformed_video_frame->GetData(), transformed_video_frame->GetHeader(), transformed_video_frame->GetExpectedRetransmissionTimeMs()); } diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h index 10d0241455..65d6d3f6cd 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h @@ -15,7 +15,9 @@ #include "api/frame_transformer_interface.h" #include "api/scoped_refptr.h" +#include "api/sequence_checker.h" #include "api/task_queue/task_queue_base.h" +#include "api/task_queue/task_queue_factory.h" #include "api/video/video_layers_allocation.h" #include "rtc_base/synchronization/mutex.h" @@ -32,7 +34,7 @@ class RTPSenderVideoFrameTransformerDelegate : public TransformedFrameCallback { RTPSenderVideo* sender, rtc::scoped_refptr frame_transformer, uint32_t ssrc, - TaskQueueBase* send_transport_queue); + TaskQueueFactory* send_transport_queue); void Init(); @@ -50,7 +52,8 @@ class RTPSenderVideoFrameTransformerDelegate : public TransformedFrameCallback { std::unique_ptr frame) override; // Delegates the call to RTPSendVideo::SendVideo on the `encoder_queue_`. - void SendVideo(std::unique_ptr frame) const; + void SendVideo(std::unique_ptr frame) const + RTC_RUN_ON(encoder_queue_); // Delegates the call to RTPSendVideo::SetVideoStructureAfterTransformation // under `sender_lock_`. @@ -76,7 +79,10 @@ class RTPSenderVideoFrameTransformerDelegate : public TransformedFrameCallback { rtc::scoped_refptr frame_transformer_; const uint32_t ssrc_; TaskQueueBase* encoder_queue_ = nullptr; - TaskQueueBase* send_transport_queue_; + TaskQueueFactory* task_queue_factory_; + // Used when the encoded frames arrives without a current task queue. This can + // happen if a hardware encoder was used. + std::unique_ptr owned_encoder_queue_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/source_tracker.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/source_tracker.cc index f9aa003fb0..7a5cbac77d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/source_tracker.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/source_tracker.cc @@ -13,6 +13,8 @@ #include #include +#include "rtc_base/trace_event.h" + namespace webrtc { constexpr int64_t SourceTracker::kTimeoutMs; @@ -24,10 +26,12 @@ void SourceTracker::OnFrameDelivered(const RtpPacketInfos& packet_infos) { return; } + TRACE_EVENT0("webrtc", "SourceTracker::OnFrameDelivered"); + int64_t now_ms = clock_->TimeInMilliseconds(); MutexLock lock_scope(&lock_); - for (const auto& packet_info : packet_infos) { + for (const RtpPacketInfo& packet_info : packet_infos) { for (uint32_t csrc : packet_info.csrcs()) { SourceKey key(RtpSourceType::CSRC, csrc); SourceEntry& entry = UpdateEntry(key); @@ -35,6 +39,8 @@ void SourceTracker::OnFrameDelivered(const RtpPacketInfos& packet_infos) { entry.timestamp_ms = now_ms; entry.audio_level = packet_info.audio_level(); entry.absolute_capture_time = packet_info.absolute_capture_time(); + entry.local_capture_clock_offset = + packet_info.local_capture_clock_offset(); entry.rtp_timestamp = packet_info.rtp_timestamp(); } @@ -44,6 +50,7 @@ void SourceTracker::OnFrameDelivered(const RtpPacketInfos& packet_infos) { entry.timestamp_ms = now_ms; entry.audio_level = packet_info.audio_level(); entry.absolute_capture_time = packet_info.absolute_capture_time(); + entry.local_capture_clock_offset = packet_info.local_capture_clock_offset(); entry.rtp_timestamp = packet_info.rtp_timestamp(); } @@ -64,7 +71,10 @@ std::vector SourceTracker::GetSources() const { sources.emplace_back( entry.timestamp_ms, key.source, key.source_type, entry.rtp_timestamp, - RtpSource::Extensions{entry.audio_level, entry.absolute_capture_time}); + RtpSource::Extensions{ + .audio_level = entry.audio_level, + .absolute_capture_time = entry.absolute_capture_time, + .local_capture_clock_offset = entry.local_capture_clock_offset}); } return sources; diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/source_tracker.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/source_tracker.h index 3f3ef8cf73..f9e8354d44 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/source_tracker.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/source_tracker.h @@ -20,6 +20,7 @@ #include "absl/types/optional.h" #include "api/rtp_packet_infos.h" #include "api/transport/rtp/rtp_source.h" +#include "api/units/time_delta.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/time_utils.h" #include "system_wrappers/include/clock.h" @@ -95,6 +96,13 @@ class SourceTracker { // https://webrtc.org/experiments/rtp-hdrext/abs-capture-time/ absl::optional absolute_capture_time; + // Clock offset between the local clock and the capturer's clock. + // Do not confuse with `AbsoluteCaptureTime::estimated_capture_clock_offset` + // which instead represents the clock offset between a remote sender and the + // capturer. The following holds: + // Capture's NTP Clock = Local NTP Clock + Local-Capture Clock Offset + absl::optional local_capture_clock_offset; + // RTP timestamp of the most recent packet used to assemble the frame // associated with `timestamp_ms`. uint32_t rtp_timestamp; diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/time_util.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/time_util.cc index fe0cfea11f..44ca07dabe 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/time_util.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/time_util.cc @@ -18,35 +18,37 @@ namespace webrtc { -uint32_t SaturatedUsToCompactNtp(int64_t us) { +uint32_t SaturatedToCompactNtp(TimeDelta delta) { constexpr uint32_t kMaxCompactNtp = 0xFFFFFFFF; constexpr int kCompactNtpInSecond = 0x10000; - if (us <= 0) + if (delta <= TimeDelta::Zero()) return 0; - if (us >= kMaxCompactNtp * rtc::kNumMicrosecsPerSec / kCompactNtpInSecond) + if (delta.us() >= + kMaxCompactNtp * rtc::kNumMicrosecsPerSec / kCompactNtpInSecond) return kMaxCompactNtp; // To convert to compact ntp need to divide by 1e6 to get seconds, // then multiply by 0x10000 to get the final result. // To avoid float operations, multiplication and division swapped. - return DivideRoundToNearest(us * kCompactNtpInSecond, + return DivideRoundToNearest(delta.us() * kCompactNtpInSecond, rtc::kNumMicrosecsPerSec); } -int64_t CompactNtpRttToMs(uint32_t compact_ntp_interval) { - // Interval to convert expected to be positive, e.g. rtt or delay. +TimeDelta CompactNtpRttToTimeDelta(uint32_t compact_ntp_interval) { + static constexpr TimeDelta kMinRtt = TimeDelta::Millis(1); + // Interval to convert expected to be positive, e.g. RTT or delay. // Because interval can be derived from non-monotonic ntp clock, // it might become negative that is indistinguishable from very large values. - // Since very large rtt/delay are less likely than non-monotonic ntp clock, - // those values consider to be negative and convert to minimum value of 1ms. + // Since very large RTT/delay is less likely than non-monotonic ntp clock, + // such value is considered negative and converted to minimum value of 1ms. if (compact_ntp_interval > 0x80000000) - return 1; + return kMinRtt; // Convert to 64bit value to avoid multiplication overflow. int64_t value = static_cast(compact_ntp_interval); - // To convert to milliseconds need to divide by 2^16 to get seconds, - // then multiply by 1000 to get milliseconds. To avoid float operations, - // multiplication and division swapped. - int64_t ms = DivideRoundToNearest(value * 1000, 1 << 16); - // Rtt value 0 considered too good to be true and increases to 1. - return std::max(ms, 1); + // To convert to TimeDelta need to divide by 2^16 to get seconds, + // then multiply by 1'000'000 to get microseconds. To avoid float operations, + // multiplication and division are swapped. + int64_t us = DivideRoundToNearest(value * rtc::kNumMicrosecsPerSec, 1 << 16); + // Small RTT value is considered too good to be true and increased to 1ms. + return std::max(TimeDelta::Micros(us), kMinRtt); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/time_util.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/time_util.h index c883e5ca38..9ff444b12e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/time_util.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/time_util.h @@ -13,6 +13,8 @@ #include +#include "api/units/time_delta.h" +#include "rtc_base/numerics/safe_conversions.h" #include "system_wrappers/include/ntp_time.h" namespace webrtc { @@ -29,14 +31,26 @@ inline uint32_t CompactNtp(NtpTime ntp) { return (ntp.seconds() << 16) | (ntp.fractions() >> 16); } -// Converts interval in microseconds to compact ntp (1/2^16 seconds) resolution. +// Converts interval to compact ntp (1/2^16 seconds) resolution. // Negative values converted to 0, Overlarge values converted to max uint32_t. -uint32_t SaturatedUsToCompactNtp(int64_t us); +uint32_t SaturatedToCompactNtp(TimeDelta delta); + +// Convert interval to the NTP time resolution (1/2^32 seconds ~= 0.2 ns). +// For deltas with absolute value larger than 35 minutes result is unspecified. +inline constexpr int64_t ToNtpUnits(TimeDelta delta) { + // For better precision `delta` is taken with best TimeDelta precision (us), + // then multiplaction and conversion to seconds are swapped to avoid float + // arithmetic. + // 2^31 us ~= 35.8 minutes. + return (rtc::saturated_cast(delta.us()) * (int64_t{1} << 32)) / + 1'000'000; +} -// Converts interval between compact ntp timestamps to milliseconds. +// Converts interval from compact ntp (1/2^16 seconds) resolution to TimeDelta. // This interval can be up to ~9.1 hours (2^15 seconds). -// Values close to 2^16 seconds consider negative and result in minimum rtt = 1. -int64_t CompactNtpRttToMs(uint32_t compact_ntp_interval); +// Values close to 2^16 seconds are considered negative and are converted to +// minimum value of 1ms. +TimeDelta CompactNtpRttToTimeDelta(uint32_t compact_ntp_interval); } // namespace webrtc #endif // MODULES_RTP_RTCP_SOURCE_TIME_UTIL_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/ulpfec_receiver.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/ulpfec_receiver.cc new file mode 100644 index 0000000000..4090d99e8d --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/ulpfec_receiver.cc @@ -0,0 +1,249 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/rtp_rtcp/source/ulpfec_receiver.h" + +#include +#include + +#include "api/scoped_refptr.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "rtc_base/logging.h" +#include "rtc_base/time_utils.h" +#include "system_wrappers/include/metrics.h" + +namespace webrtc { + +UlpfecReceiver::UlpfecReceiver(uint32_t ssrc, + int ulpfec_payload_type, + RecoveredPacketReceiver* callback, + rtc::ArrayView extensions, + Clock* clock) + : ssrc_(ssrc), + ulpfec_payload_type_(ulpfec_payload_type), + clock_(clock), + extensions_(extensions), + recovered_packet_callback_(callback), + fec_(ForwardErrorCorrection::CreateUlpfec(ssrc_)) { + // TODO(tommi, brandtr): Once considerations for red have been split + // away from this implementation, we can require the ulpfec payload type + // to always be valid and use uint8 for storage (as is done elsewhere). + RTC_DCHECK_GE(ulpfec_payload_type_, -1); +} + +UlpfecReceiver::~UlpfecReceiver() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + + if (packet_counter_.first_packet_time != Timestamp::MinusInfinity()) { + const Timestamp now = clock_->CurrentTime(); + TimeDelta elapsed = (now - packet_counter_.first_packet_time); + if (elapsed.seconds() >= metrics::kMinRunTimeInSeconds) { + if (packet_counter_.num_packets > 0) { + RTC_HISTOGRAM_PERCENTAGE( + "WebRTC.Video.ReceivedFecPacketsInPercent", + static_cast(packet_counter_.num_fec_packets * 100 / + packet_counter_.num_packets)); + } + if (packet_counter_.num_fec_packets > 0) { + RTC_HISTOGRAM_PERCENTAGE( + "WebRTC.Video.RecoveredMediaPacketsInPercentOfFec", + static_cast(packet_counter_.num_recovered_packets * 100 / + packet_counter_.num_fec_packets)); + } + if (ulpfec_payload_type_ != -1) { + RTC_HISTOGRAM_COUNTS_10000( + "WebRTC.Video.FecBitrateReceivedInKbps", + static_cast(packet_counter_.num_bytes * 8 / elapsed.seconds() / + 1000)); + } + } + } + + received_packets_.clear(); + fec_->ResetState(&recovered_packets_); +} + +FecPacketCounter UlpfecReceiver::GetPacketCounter() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + return packet_counter_; +} + +void UlpfecReceiver::SetRtpExtensions( + rtc::ArrayView extensions) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + extensions_.Reset(extensions); +} + +// 0 1 2 3 +// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// |F| block PT | timestamp offset | block length | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// +// +// RFC 2198 RTP Payload for Redundant Audio Data September 1997 +// +// The bits in the header are specified as follows: +// +// F: 1 bit First bit in header indicates whether another header block +// follows. If 1 further header blocks follow, if 0 this is the +// last header block. +// If 0 there is only 1 byte RED header +// +// block PT: 7 bits RTP payload type for this block. +// +// timestamp offset: 14 bits Unsigned offset of timestamp of this block +// relative to timestamp given in RTP header. The use of an unsigned +// offset implies that redundant data must be sent after the primary +// data, and is hence a time to be subtracted from the current +// timestamp to determine the timestamp of the data for which this +// block is the redundancy. +// +// block length: 10 bits Length in bytes of the corresponding data +// block excluding header. + +bool UlpfecReceiver::AddReceivedRedPacket(const RtpPacketReceived& rtp_packet) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + // TODO(bugs.webrtc.org/11993): We get here via Call::DeliverRtp, so should be + // moved to the network thread. + + if (rtp_packet.Ssrc() != ssrc_) { + RTC_LOG(LS_WARNING) + << "Received RED packet with different SSRC than expected; dropping."; + return false; + } + if (rtp_packet.size() > IP_PACKET_SIZE) { + RTC_LOG(LS_WARNING) << "Received RED packet with length exceeds maximum IP " + "packet size; dropping."; + return false; + } + + static constexpr uint8_t kRedHeaderLength = 1; + + if (rtp_packet.payload_size() == 0) { + RTC_LOG(LS_WARNING) << "Corrupt/truncated FEC packet."; + return false; + } + + // Remove RED header of incoming packet and store as a virtual RTP packet. + auto received_packet = + std::make_unique(); + received_packet->pkt = new ForwardErrorCorrection::Packet(); + + // Get payload type from RED header and sequence number from RTP header. + uint8_t payload_type = rtp_packet.payload()[0] & 0x7f; + received_packet->is_fec = payload_type == ulpfec_payload_type_; + received_packet->is_recovered = rtp_packet.recovered(); + received_packet->ssrc = rtp_packet.Ssrc(); + received_packet->seq_num = rtp_packet.SequenceNumber(); + + if (rtp_packet.payload()[0] & 0x80) { + // f bit set in RED header, i.e. there are more than one RED header blocks. + // WebRTC never generates multiple blocks in a RED packet for FEC. + RTC_LOG(LS_WARNING) << "More than 1 block in RED packet is not supported."; + return false; + } + + ++packet_counter_.num_packets; + packet_counter_.num_bytes += rtp_packet.size(); + if (packet_counter_.first_packet_time == Timestamp::MinusInfinity()) { + packet_counter_.first_packet_time = clock_->CurrentTime(); + } + + if (received_packet->is_fec) { + ++packet_counter_.num_fec_packets; + // everything behind the RED header + received_packet->pkt->data = + rtp_packet.Buffer().Slice(rtp_packet.headers_size() + kRedHeaderLength, + rtp_packet.payload_size() - kRedHeaderLength); + } else { + received_packet->pkt->data.EnsureCapacity(rtp_packet.size() - + kRedHeaderLength); + // Copy RTP header. + received_packet->pkt->data.SetData(rtp_packet.data(), + rtp_packet.headers_size()); + // Set payload type. + uint8_t& payload_type_byte = received_packet->pkt->data.MutableData()[1]; + payload_type_byte &= 0x80; // Reset RED payload type. + payload_type_byte += payload_type; // Set media payload type. + // Copy payload and padding data, after the RED header. + received_packet->pkt->data.AppendData( + rtp_packet.data() + rtp_packet.headers_size() + kRedHeaderLength, + rtp_packet.size() - rtp_packet.headers_size() - kRedHeaderLength); + } + + if (received_packet->pkt->data.size() > 0) { + received_packets_.push_back(std::move(received_packet)); + } + return true; +} + +void UlpfecReceiver::ProcessReceivedFec() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + + // If we iterate over `received_packets_` and it contains a packet that cause + // us to recurse back to this function (for example a RED packet encapsulating + // a RED packet), then we will recurse forever. To avoid this we swap + // `received_packets_` with an empty vector so that the next recursive call + // wont iterate over the same packet again. This also solves the problem of + // not modifying the vector we are currently iterating over (packets are added + // in AddReceivedRedPacket). + std::vector> + received_packets; + received_packets.swap(received_packets_); + + for (const auto& received_packet : received_packets) { + // Send received media packet to VCM. + if (!received_packet->is_fec) { + ForwardErrorCorrection::Packet* packet = received_packet->pkt.get(); + recovered_packet_callback_->OnRecoveredPacket(packet->data.data(), + packet->data.size()); + // Create a packet with the buffer to modify it. + RtpPacketReceived rtp_packet; + const uint8_t* const original_data = packet->data.cdata(); + if (!rtp_packet.Parse(packet->data)) { + RTC_LOG(LS_WARNING) << "Corrupted media packet"; + } else { + rtp_packet.IdentifyExtensions(extensions_); + // Reset buffer reference, so zeroing would work on a buffer with a + // single reference. + packet->data = rtc::CopyOnWriteBuffer(0); + rtp_packet.ZeroMutableExtensions(); + packet->data = rtp_packet.Buffer(); + // Ensure that zeroing of extensions was done in place. + RTC_DCHECK_EQ(packet->data.cdata(), original_data); + } + } + if (!received_packet->is_recovered) { + // Do not pass recovered packets to FEC. Recovered packet might have + // different set of the RTP header extensions and thus different byte + // representation than the original packet, That will corrupt + // FEC calculation. + fec_->DecodeFec(*received_packet, &recovered_packets_); + } + } + + // Send any recovered media packets to VCM. + for (const auto& recovered_packet : recovered_packets_) { + if (recovered_packet->returned) { + // Already sent to the VCM and the jitter buffer. + continue; + } + ForwardErrorCorrection::Packet* packet = recovered_packet->pkt.get(); + ++packet_counter_.num_recovered_packets; + // Set this flag first; in case the recovered packet carries a RED + // header, OnRecoveredPacket will recurse back here. + recovered_packet->returned = true; + recovered_packet_callback_->OnRecoveredPacket(packet->data.data(), + packet->data.size()); + } +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/ulpfec_receiver.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/ulpfec_receiver.h new file mode 100644 index 0000000000..b8ac8d8c30 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/ulpfec_receiver.h @@ -0,0 +1,82 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_RTP_RTCP_SOURCE_ULPFEC_RECEIVER_H_ +#define MODULES_RTP_RTCP_SOURCE_ULPFEC_RECEIVER_H_ + +#include +#include + +#include +#include + +#include "api/sequence_checker.h" +#include "modules/rtp_rtcp/include/rtp_header_extension_map.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/forward_error_correction.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "rtc_base/system/no_unique_address.h" +#include "system_wrappers/include/clock.h" + +namespace webrtc { + +struct FecPacketCounter { + FecPacketCounter() = default; + size_t num_packets = 0; // Number of received packets. + size_t num_bytes = 0; + size_t num_fec_packets = 0; // Number of received FEC packets. + size_t num_recovered_packets = + 0; // Number of recovered media packets using FEC. + // Time when first packet is received. + Timestamp first_packet_time = Timestamp::MinusInfinity(); +}; + +class UlpfecReceiver { + public: + UlpfecReceiver(uint32_t ssrc, + int ulpfec_payload_type, + RecoveredPacketReceiver* callback, + rtc::ArrayView extensions, + Clock* clock); + ~UlpfecReceiver(); + + int ulpfec_payload_type() const { return ulpfec_payload_type_; } + + bool AddReceivedRedPacket(const RtpPacketReceived& rtp_packet); + + void ProcessReceivedFec(); + + FecPacketCounter GetPacketCounter() const; + + void SetRtpExtensions(rtc::ArrayView extensions); + + private: + const uint32_t ssrc_; + const int ulpfec_payload_type_; + Clock* const clock_; + RtpHeaderExtensionMap extensions_ RTC_GUARDED_BY(&sequence_checker_); + + RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; + RecoveredPacketReceiver* const recovered_packet_callback_; + const std::unique_ptr fec_; + // TODO(nisse): The AddReceivedRedPacket method adds one or two packets to + // this list at a time, after which it is emptied by ProcessReceivedFec. It + // will make things simpler to merge AddReceivedRedPacket and + // ProcessReceivedFec into a single method, and we can then delete this list. + std::vector> + received_packets_ RTC_GUARDED_BY(&sequence_checker_); + ForwardErrorCorrection::RecoveredPacketList recovered_packets_ + RTC_GUARDED_BY(&sequence_checker_); + FecPacketCounter packet_counter_ RTC_GUARDED_BY(&sequence_checker_); +}; + +} // namespace webrtc + +#endif // MODULES_RTP_RTCP_SOURCE_ULPFEC_RECEIVER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/ulpfec_receiver_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/ulpfec_receiver_impl.cc deleted file mode 100644 index 159e21f9d2..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/ulpfec_receiver_impl.cc +++ /dev/null @@ -1,226 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/rtp_rtcp/source/ulpfec_receiver_impl.h" - -#include -#include - -#include "api/scoped_refptr.h" -#include "modules/rtp_rtcp/source/rtp_packet_received.h" -#include "rtc_base/logging.h" -#include "rtc_base/time_utils.h" - -namespace webrtc { - -std::unique_ptr UlpfecReceiver::Create( - uint32_t ssrc, - RecoveredPacketReceiver* callback, - rtc::ArrayView extensions) { - return std::make_unique(ssrc, callback, extensions); -} - -UlpfecReceiverImpl::UlpfecReceiverImpl( - uint32_t ssrc, - RecoveredPacketReceiver* callback, - rtc::ArrayView extensions) - : ssrc_(ssrc), - extensions_(extensions), - recovered_packet_callback_(callback), - fec_(ForwardErrorCorrection::CreateUlpfec(ssrc_)) {} - -UlpfecReceiverImpl::~UlpfecReceiverImpl() { - RTC_DCHECK_RUN_ON(&sequence_checker_); - received_packets_.clear(); - fec_->ResetState(&recovered_packets_); -} - -FecPacketCounter UlpfecReceiverImpl::GetPacketCounter() const { - RTC_DCHECK_RUN_ON(&sequence_checker_); - return packet_counter_; -} - -void UlpfecReceiverImpl::SetRtpExtensions( - rtc::ArrayView extensions) { - RTC_DCHECK_RUN_ON(&sequence_checker_); - extensions_.Reset(extensions); -} - -// 0 1 2 3 -// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 -// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ -// |F| block PT | timestamp offset | block length | -// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ -// -// -// RFC 2198 RTP Payload for Redundant Audio Data September 1997 -// -// The bits in the header are specified as follows: -// -// F: 1 bit First bit in header indicates whether another header block -// follows. If 1 further header blocks follow, if 0 this is the -// last header block. -// If 0 there is only 1 byte RED header -// -// block PT: 7 bits RTP payload type for this block. -// -// timestamp offset: 14 bits Unsigned offset of timestamp of this block -// relative to timestamp given in RTP header. The use of an unsigned -// offset implies that redundant data must be sent after the primary -// data, and is hence a time to be subtracted from the current -// timestamp to determine the timestamp of the data for which this -// block is the redundancy. -// -// block length: 10 bits Length in bytes of the corresponding data -// block excluding header. - -bool UlpfecReceiverImpl::AddReceivedRedPacket( - const RtpPacketReceived& rtp_packet, - uint8_t ulpfec_payload_type) { - RTC_DCHECK_RUN_ON(&sequence_checker_); - // TODO(bugs.webrtc.org/11993): We get here via Call::DeliverRtp, so should be - // moved to the network thread. - - if (rtp_packet.Ssrc() != ssrc_) { - RTC_LOG(LS_WARNING) - << "Received RED packet with different SSRC than expected; dropping."; - return false; - } - if (rtp_packet.size() > IP_PACKET_SIZE) { - RTC_LOG(LS_WARNING) << "Received RED packet with length exceeds maximum IP " - "packet size; dropping."; - return false; - } - - static constexpr uint8_t kRedHeaderLength = 1; - - if (rtp_packet.payload_size() == 0) { - RTC_LOG(LS_WARNING) << "Corrupt/truncated FEC packet."; - return false; - } - - // Remove RED header of incoming packet and store as a virtual RTP packet. - auto received_packet = - std::make_unique(); - received_packet->pkt = new ForwardErrorCorrection::Packet(); - - // Get payload type from RED header and sequence number from RTP header. - uint8_t payload_type = rtp_packet.payload()[0] & 0x7f; - received_packet->is_fec = payload_type == ulpfec_payload_type; - received_packet->is_recovered = rtp_packet.recovered(); - received_packet->ssrc = rtp_packet.Ssrc(); - received_packet->seq_num = rtp_packet.SequenceNumber(); - - if (rtp_packet.payload()[0] & 0x80) { - // f bit set in RED header, i.e. there are more than one RED header blocks. - // WebRTC never generates multiple blocks in a RED packet for FEC. - RTC_LOG(LS_WARNING) << "More than 1 block in RED packet is not supported."; - return false; - } - - ++packet_counter_.num_packets; - packet_counter_.num_bytes += rtp_packet.size(); - if (packet_counter_.first_packet_time_ms == -1) { - packet_counter_.first_packet_time_ms = rtc::TimeMillis(); - } - - if (received_packet->is_fec) { - ++packet_counter_.num_fec_packets; - // everything behind the RED header - received_packet->pkt->data = - rtp_packet.Buffer().Slice(rtp_packet.headers_size() + kRedHeaderLength, - rtp_packet.payload_size() - kRedHeaderLength); - } else { - received_packet->pkt->data.EnsureCapacity(rtp_packet.size() - - kRedHeaderLength); - // Copy RTP header. - received_packet->pkt->data.SetData(rtp_packet.data(), - rtp_packet.headers_size()); - // Set payload type. - uint8_t& payload_type_byte = received_packet->pkt->data.MutableData()[1]; - payload_type_byte &= 0x80; // Reset RED payload type. - payload_type_byte += payload_type; // Set media payload type. - // Copy payload and padding data, after the RED header. - received_packet->pkt->data.AppendData( - rtp_packet.data() + rtp_packet.headers_size() + kRedHeaderLength, - rtp_packet.size() - rtp_packet.headers_size() - kRedHeaderLength); - } - - if (received_packet->pkt->data.size() > 0) { - received_packets_.push_back(std::move(received_packet)); - } - return true; -} - -// TODO(nisse): Drop always-zero return value. -int32_t UlpfecReceiverImpl::ProcessReceivedFec() { - RTC_DCHECK_RUN_ON(&sequence_checker_); - - // If we iterate over `received_packets_` and it contains a packet that cause - // us to recurse back to this function (for example a RED packet encapsulating - // a RED packet), then we will recurse forever. To avoid this we swap - // `received_packets_` with an empty vector so that the next recursive call - // wont iterate over the same packet again. This also solves the problem of - // not modifying the vector we are currently iterating over (packets are added - // in AddReceivedRedPacket). - std::vector> - received_packets; - received_packets.swap(received_packets_); - - for (const auto& received_packet : received_packets) { - // Send received media packet to VCM. - if (!received_packet->is_fec) { - ForwardErrorCorrection::Packet* packet = received_packet->pkt; - recovered_packet_callback_->OnRecoveredPacket(packet->data.data(), - packet->data.size()); - // Create a packet with the buffer to modify it. - RtpPacketReceived rtp_packet; - const uint8_t* const original_data = packet->data.cdata(); - if (!rtp_packet.Parse(packet->data)) { - RTC_LOG(LS_WARNING) << "Corrupted media packet"; - } else { - rtp_packet.IdentifyExtensions(extensions_); - // Reset buffer reference, so zeroing would work on a buffer with a - // single reference. - packet->data = rtc::CopyOnWriteBuffer(0); - rtp_packet.ZeroMutableExtensions(); - packet->data = rtp_packet.Buffer(); - // Ensure that zeroing of extensions was done in place. - RTC_DCHECK_EQ(packet->data.cdata(), original_data); - } - } - if (!received_packet->is_recovered) { - // Do not pass recovered packets to FEC. Recovered packet might have - // different set of the RTP header extensions and thus different byte - // representation than the original packet, That will corrupt - // FEC calculation. - fec_->DecodeFec(*received_packet, &recovered_packets_); - } - } - - // Send any recovered media packets to VCM. - for (const auto& recovered_packet : recovered_packets_) { - if (recovered_packet->returned) { - // Already sent to the VCM and the jitter buffer. - continue; - } - ForwardErrorCorrection::Packet* packet = recovered_packet->pkt; - ++packet_counter_.num_recovered_packets; - // Set this flag first; in case the recovered packet carries a RED - // header, OnRecoveredPacket will recurse back here. - recovered_packet->returned = true; - recovered_packet_callback_->OnRecoveredPacket(packet->data.data(), - packet->data.size()); - } - - return 0; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/ulpfec_receiver_impl.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/ulpfec_receiver_impl.h deleted file mode 100644 index 92e51530b8..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/ulpfec_receiver_impl.h +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_RTP_RTCP_SOURCE_ULPFEC_RECEIVER_IMPL_H_ -#define MODULES_RTP_RTCP_SOURCE_ULPFEC_RECEIVER_IMPL_H_ - -#include -#include - -#include -#include - -#include "api/sequence_checker.h" -#include "modules/rtp_rtcp/include/rtp_header_extension_map.h" -#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "modules/rtp_rtcp/include/ulpfec_receiver.h" -#include "modules/rtp_rtcp/source/forward_error_correction.h" -#include "modules/rtp_rtcp/source/rtp_packet_received.h" -#include "rtc_base/system/no_unique_address.h" - -namespace webrtc { - -class UlpfecReceiverImpl : public UlpfecReceiver { - public: - explicit UlpfecReceiverImpl(uint32_t ssrc, - RecoveredPacketReceiver* callback, - rtc::ArrayView extensions); - ~UlpfecReceiverImpl() override; - - bool AddReceivedRedPacket(const RtpPacketReceived& rtp_packet, - uint8_t ulpfec_payload_type) override; - - int32_t ProcessReceivedFec() override; - - FecPacketCounter GetPacketCounter() const override; - - void SetRtpExtensions(rtc::ArrayView extensions) override; - - private: - const uint32_t ssrc_; - RtpHeaderExtensionMap extensions_ RTC_GUARDED_BY(&sequence_checker_); - - RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; - RecoveredPacketReceiver* const recovered_packet_callback_; - const std::unique_ptr fec_; - // TODO(nisse): The AddReceivedRedPacket method adds one or two packets to - // this list at a time, after which it is emptied by ProcessReceivedFec. It - // will make things simpler to merge AddReceivedRedPacket and - // ProcessReceivedFec into a single method, and we can then delete this list. - std::vector> - received_packets_ RTC_GUARDED_BY(&sequence_checker_); - ForwardErrorCorrection::RecoveredPacketList recovered_packets_ - RTC_GUARDED_BY(&sequence_checker_); - FecPacketCounter packet_counter_ RTC_GUARDED_BY(&sequence_checker_); -}; - -} // namespace webrtc - -#endif // MODULES_RTP_RTCP_SOURCE_ULPFEC_RECEIVER_IMPL_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/video_rtp_depacketizer_h264.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/video_rtp_depacketizer_h264.cc index e87be031a8..9978e5f5fc 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/video_rtp_depacketizer_h264.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/video_rtp_depacketizer_h264.cc @@ -15,7 +15,6 @@ #include #include -#include "absl/base/macros.h" #include "absl/types/optional.h" #include "absl/types/variant.h" #include "common_video/h264/h264_common.h" @@ -23,6 +22,7 @@ #include "common_video/h264/sps_parser.h" #include "common_video/h264/sps_vui_rewriter.h" #include "modules/rtp_rtcp/source/byte_io.h" +#include "modules/rtp_rtcp/source/rtp_format_h264.h" #include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" #include "rtc_base/checks.h" #include "rtc_base/copy_on_write_buffer.h" @@ -36,12 +36,6 @@ constexpr size_t kFuAHeaderSize = 2; constexpr size_t kLengthFieldSize = 2; constexpr size_t kStapAHeaderSize = kNalHeaderSize + kLengthFieldSize; -// Bit masks for FU (A and B) indicators. -enum NalDefs : uint8_t { kFBit = 0x80, kNriMask = 0x60, kTypeMask = 0x1F }; - -// Bit masks for FU (A and B) headers. -enum FuDefs : uint8_t { kSBit = 0x80, kEBit = 0x40, kRBit = 0x20 }; - // TODO(pbos): Avoid parsing this here as well as inside the jitter buffer. bool ParseStapAStartOffsets(const uint8_t* nalu_ptr, size_t length_remaining, @@ -82,7 +76,7 @@ absl::optional ProcessStapAOrSingleNalu( const uint8_t* nalu_start = payload_data + kNalHeaderSize; const size_t nalu_length = rtp_payload.size() - kNalHeaderSize; - uint8_t nal_type = payload_data[0] & kTypeMask; + uint8_t nal_type = payload_data[0] & kH264TypeMask; std::vector nalu_start_offsets; if (nal_type == H264::NaluType::kStapA) { // Skip the StapA header (StapA NAL type + length). @@ -97,7 +91,7 @@ absl::optional ProcessStapAOrSingleNalu( } h264_header.packetization_type = kH264StapA; - nal_type = payload_data[kStapAHeaderSize] & kTypeMask; + nal_type = payload_data[kStapAHeaderSize] & kH264TypeMask; } else { h264_header.packetization_type = kH264SingleNalu; nalu_start_offsets.push_back(0); @@ -118,7 +112,7 @@ absl::optional ProcessStapAOrSingleNalu( } NaluInfo nalu; - nalu.type = payload_data[start_offset] & kTypeMask; + nalu.type = payload_data[start_offset] & kH264TypeMask; nalu.sps_id = -1; nalu.pps_id = -1; start_offset += H264::kNaluTypeSize; @@ -197,7 +191,7 @@ absl::optional ProcessStapAOrSingleNalu( case H264::NaluType::kIdr: parsed_payload->video_header.frame_type = VideoFrameType::kVideoFrameKey; - ABSL_FALLTHROUGH_INTENDED; + [[fallthrough]]; case H264::NaluType::kSlice: { absl::optional pps_id = PpsParser::ParsePpsIdFromSlice( &payload_data[start_offset], end_offset - start_offset); @@ -242,9 +236,9 @@ absl::optional ParseFuaNalu( } absl::optional parsed_payload( absl::in_place); - uint8_t fnri = rtp_payload.cdata()[0] & (kFBit | kNriMask); - uint8_t original_nal_type = rtp_payload.cdata()[1] & kTypeMask; - bool first_fragment = (rtp_payload.cdata()[1] & kSBit) > 0; + uint8_t fnri = rtp_payload.cdata()[0] & (kH264FBit | kH264NriMask); + uint8_t original_nal_type = rtp_payload.cdata()[1] & kH264TypeMask; + bool first_fragment = (rtp_payload.cdata()[1] & kH264SBit) > 0; NaluInfo nalu; nalu.type = original_nal_type; nalu.sps_id = -1; @@ -301,7 +295,7 @@ VideoRtpDepacketizerH264::Parse(rtc::CopyOnWriteBuffer rtp_payload) { return absl::nullopt; } - uint8_t nal_type = rtp_payload.cdata()[0] & kTypeMask; + uint8_t nal_type = rtp_payload.cdata()[0] & kH264TypeMask; if (nal_type == H264::NaluType::kFuA) { // Fragmented NAL units (FU-A). diff --git a/TMessagesProj/jni/voip/webrtc/modules/third_party/g722/g722_enc_dec.h b/TMessagesProj/jni/voip/webrtc/modules/third_party/g722/g722_enc_dec.h index 898fa279cc..47b0936d82 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/third_party/g722/g722_enc_dec.h +++ b/TMessagesProj/jni/voip/webrtc/modules/third_party/g722/g722_enc_dec.h @@ -30,6 +30,7 @@ #ifndef MODULES_THIRD_PARTY_G722_G722_H_ #define MODULES_THIRD_PARTY_G722_G722_H_ +#include #include /*! \page g722_page G.722 encoding and decoding diff --git a/TMessagesProj/jni/voip/webrtc/modules/third_party/portaudio/README.chromium b/TMessagesProj/jni/voip/webrtc/modules/third_party/portaudio/README.chromium index 7a15b2e1fc..722dd94345 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/third_party/portaudio/README.chromium +++ b/TMessagesProj/jni/voip/webrtc/modules/third_party/portaudio/README.chromium @@ -1,8 +1,8 @@ Name: Portaudio library for mac Short Name: portaudio -URL: https://app.assembla.com/spaces/portaudio/git/source/master/src/common -Version: 0 -Date: 2018-02-01 +URL: https://github.com/PortAudio/portaudio/tree/master/src/common +Version: 9d8563100d841300f1689b186d131347ad43a0f6 +Date: 2022-04-12 License: Custom license License File: LICENSE Security Critical: yes @@ -10,3 +10,5 @@ Security Critical: yes Description: Part of portaudio library to operate with memory barriers and ring buffer. +Local changes: + - Minor formatting to make 'git cl format' happy. diff --git a/TMessagesProj/jni/voip/webrtc/modules/third_party/portaudio/pa_memorybarrier.h b/TMessagesProj/jni/voip/webrtc/modules/third_party/portaudio/pa_memorybarrier.h index c1040d1027..f8c1852f4e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/third_party/portaudio/pa_memorybarrier.h +++ b/TMessagesProj/jni/voip/webrtc/modules/third_party/portaudio/pa_memorybarrier.h @@ -64,13 +64,23 @@ #define MODULES_THIRD_PARTY_PORTAUDIO_PA_MEMORYBARRIER_H_ #if defined(__APPLE__) +/* Support for the atomic library was added in C11. + */ +#if (__STDC_VERSION__ < 201112L) || defined(__STDC_NO_ATOMICS__) #include /* Here are the memory barrier functions. Mac OS X only provides full memory barriers, so the three types of barriers are the same, - however, these barriers are superior to compiler-based ones. */ + however, these barriers are superior to compiler-based ones. + These were deprecated in MacOS 10.12. */ #define PaUtil_FullMemoryBarrier() OSMemoryBarrier() #define PaUtil_ReadMemoryBarrier() OSMemoryBarrier() #define PaUtil_WriteMemoryBarrier() OSMemoryBarrier() +#else +#include +#define PaUtil_FullMemoryBarrier() atomic_thread_fence(memory_order_seq_cst) +#define PaUtil_ReadMemoryBarrier() atomic_thread_fence(memory_order_acquire) +#define PaUtil_WriteMemoryBarrier() atomic_thread_fence(memory_order_release) +#endif #elif defined(__GNUC__) /* GCC >= 4.1 has built-in intrinsics. We'll use those */ #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 1) @@ -98,7 +108,7 @@ #define PaUtil_ReadMemoryBarrier() #define PaUtil_WriteMemoryBarrier() #else -# error Memory barriers are not defined on this system. You can still compile by defining ALLOW_SMP_DANGERS, but SMP safety will not be guaranteed. +# error Memory barriers are not defined on this system. You can still compile by defining ALLOW_SMP_DANGERS, but SMP safety will not be guaranteed. #endif #endif #elif (_MSC_VER >= 1400) && !defined(_WIN32_WCE) @@ -106,6 +116,8 @@ #pragma intrinsic(_ReadWriteBarrier) #pragma intrinsic(_ReadBarrier) #pragma intrinsic(_WriteBarrier) +/* note that MSVC intrinsics _ReadWriteBarrier(), _ReadBarrier(), + * _WriteBarrier() are just compiler barriers *not* memory barriers */ #define PaUtil_FullMemoryBarrier() _ReadWriteBarrier() #define PaUtil_ReadMemoryBarrier() _ReadBarrier() #define PaUtil_WriteMemoryBarrier() _WriteBarrier() @@ -125,7 +137,7 @@ #define PaUtil_ReadMemoryBarrier() #define PaUtil_WriteMemoryBarrier() #else -# error Memory barriers are not defined on this system. You can still compile by defining ALLOW_SMP_DANGERS, but SMP safety will not be guaranteed. +# error Memory barriers are not defined on this system. You can still compile by defining ALLOW_SMP_DANGERS, but SMP safety will not be guaranteed. #endif #endif diff --git a/TMessagesProj/jni/voip/webrtc/modules/third_party/portaudio/pa_ringbuffer.c b/TMessagesProj/jni/voip/webrtc/modules/third_party/portaudio/pa_ringbuffer.c index fc1053c5ff..b978d54f19 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/third_party/portaudio/pa_ringbuffer.c +++ b/TMessagesProj/jni/voip/webrtc/modules/third_party/portaudio/pa_ringbuffer.c @@ -1,5 +1,5 @@ /* - * $Id: pa_ringbuffer.c 1421 2009-11-18 16:09:05Z bjornroche $ + * $Id$ * Portable Audio I/O Library * Ring Buffer utility. * @@ -7,7 +7,7 @@ * modified for SMP safety on Mac OS X by Bjorn Roche * modified for SMP safety on Linux by Leland Lucius * also, allowed for const where possible - * modified for multiple-byte-sized data elements by Sven Fischer + * modified for multiple-byte-sized data elements by Sven Fischer * * Note that this is safe only for a single-thread reader and a * single-thread writer. @@ -37,13 +37,13 @@ */ /* - * The text above constitutes the entire PortAudio license; however, + * The text above constitutes the entire PortAudio license; however, * the PortAudio community also makes the following non-binding requests: * * Any person wishing to distribute modifications to the Software is * requested to send the modifications to the original developer so that - * they can be incorporated into the canonical version. It is also - * requested that these non-binding requests be included along with the + * they can be incorporated into the canonical version. It is also + * requested that these non-binding requests be included along with the * license above. */ @@ -52,21 +52,19 @@ @ingroup common_src */ -#include #include #include +#include +#include "pa_ringbuffer.h" #include -#include "modules/third_party/portaudio/pa_memorybarrier.h" -#include "modules/third_party/portaudio/pa_ringbuffer.h" +#include "pa_memorybarrier.h" /*************************************************************************** * Initialize FIFO. * elementCount must be power of 2, returns -1 if not. */ -PaRingBufferSize PaUtil_InitializeRingBuffer(PaUtilRingBuffer* rbuf, - PaRingBufferSize elementSizeBytes, - PaRingBufferSize elementCount, - void* dataPtr) { +ring_buffer_size_t PaUtil_InitializeRingBuffer( PaUtilRingBuffer *rbuf, ring_buffer_size_t elementSizeBytes, ring_buffer_size_t elementCount, void *dataPtr ) +{ if( ((elementCount-1) & elementCount) != 0) return -1; /* Not Power of two. */ rbuf->bufferSize = elementCount; rbuf->buffer = (char *)dataPtr; @@ -79,21 +77,19 @@ PaRingBufferSize PaUtil_InitializeRingBuffer(PaUtilRingBuffer* rbuf, /*************************************************************************** ** Return number of elements available for reading. */ -PaRingBufferSize PaUtil_GetRingBufferReadAvailable( PaUtilRingBuffer *rbuf ) +ring_buffer_size_t PaUtil_GetRingBufferReadAvailable( const PaUtilRingBuffer *rbuf ) { - PaUtil_ReadMemoryBarrier(); return ( (rbuf->writeIndex - rbuf->readIndex) & rbuf->bigMask ); } /*************************************************************************** ** Return number of elements available for writing. */ -PaRingBufferSize PaUtil_GetRingBufferWriteAvailable( PaUtilRingBuffer *rbuf ) +ring_buffer_size_t PaUtil_GetRingBufferWriteAvailable( const PaUtilRingBuffer *rbuf ) { - /* Since we are calling PaUtil_GetRingBufferReadAvailable, we don't need an aditional MB */ return ( rbuf->bufferSize - PaUtil_GetRingBufferReadAvailable(rbuf)); } /*************************************************************************** -** Clear buffer. Should only be called when buffer is NOT being read. */ +** Clear buffer. Should only be called when buffer is NOT being read or written. */ void PaUtil_FlushRingBuffer( PaUtilRingBuffer *rbuf ) { rbuf->writeIndex = rbuf->readIndex = 0; @@ -105,21 +101,19 @@ void PaUtil_FlushRingBuffer( PaUtilRingBuffer *rbuf ) ** If non-contiguous, size2 will be the size of second region. ** Returns room available to be written or elementCount, whichever is smaller. */ -PaRingBufferSize PaUtil_GetRingBufferWriteRegions(PaUtilRingBuffer* rbuf, - PaRingBufferSize elementCount, - void** dataPtr1, - PaRingBufferSize* sizePtr1, - void** dataPtr2, - PaRingBufferSize* sizePtr2) { - PaRingBufferSize index; - PaRingBufferSize available = PaUtil_GetRingBufferWriteAvailable( rbuf ); +ring_buffer_size_t PaUtil_GetRingBufferWriteRegions( PaUtilRingBuffer *rbuf, ring_buffer_size_t elementCount, + void **dataPtr1, ring_buffer_size_t *sizePtr1, + void **dataPtr2, ring_buffer_size_t *sizePtr2 ) +{ + ring_buffer_size_t index; + ring_buffer_size_t available = PaUtil_GetRingBufferWriteAvailable( rbuf ); if( elementCount > available ) elementCount = available; /* Check to see if write is not contiguous. */ index = rbuf->writeIndex & rbuf->smallMask; if( (index + elementCount) > rbuf->bufferSize ) { /* Write data in two blocks that wrap the buffer. */ - PaRingBufferSize firstHalf = rbuf->bufferSize - index; + ring_buffer_size_t firstHalf = rbuf->bufferSize - index; *dataPtr1 = &rbuf->buffer[index*rbuf->elementSizeBytes]; *sizePtr1 = firstHalf; *dataPtr2 = &rbuf->buffer[0]; @@ -132,16 +126,21 @@ PaRingBufferSize PaUtil_GetRingBufferWriteRegions(PaUtilRingBuffer* rbuf, *dataPtr2 = NULL; *sizePtr2 = 0; } + + if( available ) + PaUtil_FullMemoryBarrier(); /* (write-after-read) => full barrier */ + return elementCount; } /*************************************************************************** */ -PaRingBufferSize PaUtil_AdvanceRingBufferWriteIndex( - PaUtilRingBuffer* rbuf, - PaRingBufferSize elementCount) { - /* we need to ensure that previous writes are seen before we update the write index */ +ring_buffer_size_t PaUtil_AdvanceRingBufferWriteIndex( PaUtilRingBuffer *rbuf, ring_buffer_size_t elementCount ) +{ + /* ensure that previous writes are seen before we update the write index + (write after write) + */ PaUtil_WriteMemoryBarrier(); return rbuf->writeIndex = (rbuf->writeIndex + elementCount) & rbuf->bigMask; } @@ -150,23 +149,21 @@ PaRingBufferSize PaUtil_AdvanceRingBufferWriteIndex( ** Get address of region(s) from which we can read data. ** If the region is contiguous, size2 will be zero. ** If non-contiguous, size2 will be the size of second region. -** Returns room available to be written or elementCount, whichever is smaller. +** Returns room available to be read or elementCount, whichever is smaller. */ -PaRingBufferSize PaUtil_GetRingBufferReadRegions(PaUtilRingBuffer* rbuf, - PaRingBufferSize elementCount, - void** dataPtr1, - PaRingBufferSize* sizePtr1, - void** dataPtr2, - PaRingBufferSize* sizePtr2) { - PaRingBufferSize index; - PaRingBufferSize available = PaUtil_GetRingBufferReadAvailable( rbuf ); +ring_buffer_size_t PaUtil_GetRingBufferReadRegions( PaUtilRingBuffer *rbuf, ring_buffer_size_t elementCount, + void **dataPtr1, ring_buffer_size_t *sizePtr1, + void **dataPtr2, ring_buffer_size_t *sizePtr2 ) +{ + ring_buffer_size_t index; + ring_buffer_size_t available = PaUtil_GetRingBufferReadAvailable( rbuf ); /* doesn't use memory barrier */ if( elementCount > available ) elementCount = available; /* Check to see if read is not contiguous. */ index = rbuf->readIndex & rbuf->smallMask; if( (index + elementCount) > rbuf->bufferSize ) { /* Write data in two blocks that wrap the buffer. */ - PaRingBufferSize firstHalf = rbuf->bufferSize - index; + ring_buffer_size_t firstHalf = rbuf->bufferSize - index; *dataPtr1 = &rbuf->buffer[index*rbuf->elementSizeBytes]; *sizePtr1 = firstHalf; *dataPtr2 = &rbuf->buffer[0]; @@ -179,24 +176,28 @@ PaRingBufferSize PaUtil_GetRingBufferReadRegions(PaUtilRingBuffer* rbuf, *dataPtr2 = NULL; *sizePtr2 = 0; } + + if( available ) + PaUtil_ReadMemoryBarrier(); /* (read-after-read) => read barrier */ + return elementCount; } /*************************************************************************** */ -PaRingBufferSize PaUtil_AdvanceRingBufferReadIndex( - PaUtilRingBuffer* rbuf, - PaRingBufferSize elementCount) { - /* we need to ensure that previous writes are always seen before updating the index. */ - PaUtil_WriteMemoryBarrier(); +ring_buffer_size_t PaUtil_AdvanceRingBufferReadIndex( PaUtilRingBuffer *rbuf, ring_buffer_size_t elementCount ) +{ + /* ensure that previous reads (copies out of the ring buffer) are always completed before updating (writing) the read index. + (write-after-read) => full barrier + */ + PaUtil_FullMemoryBarrier(); return rbuf->readIndex = (rbuf->readIndex + elementCount) & rbuf->bigMask; } /*************************************************************************** ** Return elements written. */ -PaRingBufferSize PaUtil_WriteRingBuffer(PaUtilRingBuffer* rbuf, - const void* data, - PaRingBufferSize elementCount) { - PaRingBufferSize size1, size2, numWritten; +ring_buffer_size_t PaUtil_WriteRingBuffer( PaUtilRingBuffer *rbuf, const void *data, ring_buffer_size_t elementCount ) +{ + ring_buffer_size_t size1, size2, numWritten; void *data1, *data2; numWritten = PaUtil_GetRingBufferWriteRegions( rbuf, elementCount, &data1, &size1, &data2, &size2 ); if( size2 > 0 ) @@ -216,10 +217,9 @@ PaRingBufferSize PaUtil_WriteRingBuffer(PaUtilRingBuffer* rbuf, /*************************************************************************** ** Return elements read. */ -PaRingBufferSize PaUtil_ReadRingBuffer(PaUtilRingBuffer* rbuf, - void* data, - PaRingBufferSize elementCount) { - PaRingBufferSize size1, size2, numRead; +ring_buffer_size_t PaUtil_ReadRingBuffer( PaUtilRingBuffer *rbuf, void *data, ring_buffer_size_t elementCount ) +{ + ring_buffer_size_t size1, size2, numRead; void *data1, *data2; numRead = PaUtil_GetRingBufferReadRegions( rbuf, elementCount, &data1, &size1, &data2, &size2 ); if( size2 > 0 ) diff --git a/TMessagesProj/jni/voip/webrtc/modules/third_party/portaudio/pa_ringbuffer.h b/TMessagesProj/jni/voip/webrtc/modules/third_party/portaudio/pa_ringbuffer.h index de5722cb67..aa9d0aa772 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/third_party/portaudio/pa_ringbuffer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/third_party/portaudio/pa_ringbuffer.h @@ -1,7 +1,7 @@ #ifndef MODULES_THIRD_PARTY_PORTAUDIO_PA_RINGBUFFER_H_ #define MODULES_THIRD_PARTY_PORTAUDIO_PA_RINGBUFFER_H_ /* - * $Id: pa_ringbuffer.h 1421 2009-11-18 16:09:05Z bjornroche $ + * $Id$ * Portable Audio I/O Library * Ring Buffer utility. * @@ -65,19 +65,23 @@ The memory area used to store the buffer elements must be allocated by the client prior to calling PaUtil_InitializeRingBuffer() and must outlive the use of the ring buffer. + + @note The ring buffer functions are not normally exposed in the PortAudio + libraries. If you want to call them then you will need to add pa_ringbuffer.c + to your application source code. */ #if defined(__APPLE__) #include -typedef int32_t PaRingBufferSize; +typedef int32_t ring_buffer_size_t; #elif defined(__GNUC__) -typedef long PaRingBufferSize; +typedef long ring_buffer_size_t; #elif (_MSC_VER >= 1400) -typedef long PaRingBufferSize; +typedef long ring_buffer_size_t; #elif defined(_MSC_VER) || defined(__BORLANDC__) -typedef long PaRingBufferSize; +typedef long ring_buffer_size_t; #else -typedef long PaRingBufferSize; +typedef long ring_buffer_size_t; #endif #ifdef __cplusplus @@ -85,38 +89,43 @@ extern "C" { #endif /* __cplusplus */ typedef struct PaUtilRingBuffer { - PaRingBufferSize bufferSize; /**< Number of elements in FIFO. Power of 2. Set - by PaUtil_InitRingBuffer. */ - PaRingBufferSize writeIndex; /**< Index of next writable element. Set by - PaUtil_AdvanceRingBufferWriteIndex. */ - PaRingBufferSize readIndex; /**< Index of next readable element. Set by - PaUtil_AdvanceRingBufferReadIndex. */ - PaRingBufferSize bigMask; /**< Used for wrapping indices with extra bit to - distinguish full/empty. */ - PaRingBufferSize smallMask; /**< Used for fitting indices to buffer. */ - PaRingBufferSize elementSizeBytes; /**< Number of bytes per element. */ + ring_buffer_size_t bufferSize; /**< Number of elements in FIFO. Power of 2. + Set by PaUtil_InitRingBuffer. */ + volatile ring_buffer_size_t + writeIndex; /**< Index of next writable element. Set by + PaUtil_AdvanceRingBufferWriteIndex. */ + volatile ring_buffer_size_t + readIndex; /**< Index of next readable element. Set by + PaUtil_AdvanceRingBufferReadIndex. */ + ring_buffer_size_t bigMask; /**< Used for wrapping indices with extra bit to + distinguish full/empty. */ + ring_buffer_size_t smallMask; /**< Used for fitting indices to buffer. */ + ring_buffer_size_t elementSizeBytes; /**< Number of bytes per element. */ char* buffer; /**< Pointer to the buffer containing the actual data. */ } PaUtilRingBuffer; -/** Initialize Ring Buffer. +/** Initialize Ring Buffer to empty state ready to have elements written to it. @param rbuf The ring buffer. @param elementSizeBytes The size of a single data element in bytes. - @param elementCount The number of elements in the buffer (must be power of 2). + @param elementCount The number of elements in the buffer (must be a power of + 2). @param dataPtr A pointer to a previously allocated area where the data will be maintained. It must be elementCount*elementSizeBytes long. @return -1 if elementCount is not a power of 2, otherwise 0. */ -PaRingBufferSize PaUtil_InitializeRingBuffer(PaUtilRingBuffer* rbuf, - PaRingBufferSize elementSizeBytes, - PaRingBufferSize elementCount, - void* dataPtr); +ring_buffer_size_t PaUtil_InitializeRingBuffer( + PaUtilRingBuffer* rbuf, + ring_buffer_size_t elementSizeBytes, + ring_buffer_size_t elementCount, + void* dataPtr); -/** Clear buffer. Should only be called when buffer is NOT being read. +/** Reset buffer to empty. Should only be called when buffer is NOT being read + or written. @param rbuf The ring buffer. */ @@ -128,7 +137,8 @@ void PaUtil_FlushRingBuffer(PaUtilRingBuffer* rbuf); @return The number of elements available for writing. */ -PaRingBufferSize PaUtil_GetRingBufferWriteAvailable(PaUtilRingBuffer* rbuf); +ring_buffer_size_t PaUtil_GetRingBufferWriteAvailable( + const PaUtilRingBuffer* rbuf); /** Retrieve the number of elements available in the ring buffer for reading. @@ -136,7 +146,8 @@ PaRingBufferSize PaUtil_GetRingBufferWriteAvailable(PaUtilRingBuffer* rbuf); @return The number of elements available for reading. */ -PaRingBufferSize PaUtil_GetRingBufferReadAvailable(PaUtilRingBuffer* rbuf); +ring_buffer_size_t PaUtil_GetRingBufferReadAvailable( + const PaUtilRingBuffer* rbuf); /** Write data to the ring buffer. @@ -148,9 +159,9 @@ PaRingBufferSize PaUtil_GetRingBufferReadAvailable(PaUtilRingBuffer* rbuf); @return The number of elements written. */ -PaRingBufferSize PaUtil_WriteRingBuffer(PaUtilRingBuffer* rbuf, - const void* data, - PaRingBufferSize elementCount); +ring_buffer_size_t PaUtil_WriteRingBuffer(PaUtilRingBuffer* rbuf, + const void* data, + ring_buffer_size_t elementCount); /** Read data from the ring buffer. @@ -162,9 +173,9 @@ PaRingBufferSize PaUtil_WriteRingBuffer(PaUtilRingBuffer* rbuf, @return The number of elements read. */ -PaRingBufferSize PaUtil_ReadRingBuffer(PaUtilRingBuffer* rbuf, - void* data, - PaRingBufferSize elementCount); +ring_buffer_size_t PaUtil_ReadRingBuffer(PaUtilRingBuffer* rbuf, + void* data, + ring_buffer_size_t elementCount); /** Get address of region(s) to which we can write data. @@ -186,12 +197,13 @@ PaRingBufferSize PaUtil_ReadRingBuffer(PaUtilRingBuffer* rbuf, @return The room available to be written or elementCount, whichever is smaller. */ -PaRingBufferSize PaUtil_GetRingBufferWriteRegions(PaUtilRingBuffer* rbuf, - PaRingBufferSize elementCount, - void** dataPtr1, - PaRingBufferSize* sizePtr1, - void** dataPtr2, - PaRingBufferSize* sizePtr2); +ring_buffer_size_t PaUtil_GetRingBufferWriteRegions( + PaUtilRingBuffer* rbuf, + ring_buffer_size_t elementCount, + void** dataPtr1, + ring_buffer_size_t* sizePtr1, + void** dataPtr2, + ring_buffer_size_t* sizePtr2); /** Advance the write index to the next location to be written. @@ -201,11 +213,11 @@ PaRingBufferSize PaUtil_GetRingBufferWriteRegions(PaUtilRingBuffer* rbuf, @return The new position. */ -PaRingBufferSize PaUtil_AdvanceRingBufferWriteIndex( +ring_buffer_size_t PaUtil_AdvanceRingBufferWriteIndex( PaUtilRingBuffer* rbuf, - PaRingBufferSize elementCount); + ring_buffer_size_t elementCount); -/** Get address of region(s) from which we can write data. +/** Get address of region(s) from which we can read data. @param rbuf The ring buffer. @@ -225,12 +237,13 @@ PaRingBufferSize PaUtil_AdvanceRingBufferWriteIndex( @return The number of elements available for reading. */ -PaRingBufferSize PaUtil_GetRingBufferReadRegions(PaUtilRingBuffer* rbuf, - PaRingBufferSize elementCount, - void** dataPtr1, - PaRingBufferSize* sizePtr1, - void** dataPtr2, - PaRingBufferSize* sizePtr2); +ring_buffer_size_t PaUtil_GetRingBufferReadRegions( + PaUtilRingBuffer* rbuf, + ring_buffer_size_t elementCount, + void** dataPtr1, + ring_buffer_size_t* sizePtr1, + void** dataPtr2, + ring_buffer_size_t* sizePtr2); /** Advance the read index to the next location to be read. @@ -240,9 +253,9 @@ PaRingBufferSize PaUtil_GetRingBufferReadRegions(PaUtilRingBuffer* rbuf, @return The new position. */ -PaRingBufferSize PaUtil_AdvanceRingBufferReadIndex( +ring_buffer_size_t PaUtil_AdvanceRingBufferReadIndex( PaUtilRingBuffer* rbuf, - PaRingBufferSize elementCount); + ring_buffer_size_t elementCount); #ifdef __cplusplus } diff --git a/TMessagesProj/jni/voip/webrtc/modules/utility/include/mock/mock_process_thread.h b/TMessagesProj/jni/voip/webrtc/modules/utility/include/mock/mock_process_thread.h deleted file mode 100644 index e356bca99f..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/utility/include/mock/mock_process_thread.h +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_UTILITY_INCLUDE_MOCK_MOCK_PROCESS_THREAD_H_ -#define MODULES_UTILITY_INCLUDE_MOCK_MOCK_PROCESS_THREAD_H_ - -#include - -#include "modules/utility/include/process_thread.h" -#include "rtc_base/location.h" -#include "test/gmock.h" - -namespace webrtc { - -class MockProcessThread : public ProcessThread { - public: - MOCK_METHOD(void, Start, (), (override)); - MOCK_METHOD(void, Stop, (), (override)); - MOCK_METHOD(void, Delete, (), (override)); - MOCK_METHOD(void, WakeUp, (Module*), (override)); - MOCK_METHOD(void, PostTask, (std::unique_ptr), (override)); - MOCK_METHOD(void, - PostDelayedTask, - (std::unique_ptr, uint32_t), - (override)); - MOCK_METHOD(void, - RegisterModule, - (Module*, const rtc::Location&), - (override)); - MOCK_METHOD(void, DeRegisterModule, (Module*), (override)); -}; - -} // namespace webrtc -#endif // MODULES_UTILITY_INCLUDE_MOCK_MOCK_PROCESS_THREAD_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/utility/include/process_thread.h b/TMessagesProj/jni/voip/webrtc/modules/utility/include/process_thread.h deleted file mode 100644 index 7786dacf94..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/utility/include/process_thread.h +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_UTILITY_INCLUDE_PROCESS_THREAD_H_ -#define MODULES_UTILITY_INCLUDE_PROCESS_THREAD_H_ - -#include - -#include "api/task_queue/queued_task.h" -#include "api/task_queue/task_queue_base.h" - -namespace rtc { -class Location; -} - -namespace webrtc { -class Module; - -// TODO(tommi): ProcessThread probably doesn't need to be a virtual -// interface. There exists one override besides ProcessThreadImpl, -// MockProcessThread, but when looking at how it is used, it seems -// a nullptr might suffice (or simply an actual ProcessThread instance). -class ProcessThread : public TaskQueueBase { - public: - ~ProcessThread() override; - - static std::unique_ptr Create(const char* thread_name); - - // Starts the worker thread. Must be called from the construction thread. - virtual void Start() = 0; - - // Stops the worker thread. Must be called from the construction thread. - virtual void Stop() = 0; - - // Wakes the thread up to give a module a chance to do processing right - // away. This causes the worker thread to wake up and requery the specified - // module for when it should be called back. (Typically the module should - // return 0 from TimeUntilNextProcess on the worker thread at that point). - // Can be called on any thread. - virtual void WakeUp(Module* module) = 0; - - // Adds a module that will start to receive callbacks on the worker thread. - // Can be called from any thread. - virtual void RegisterModule(Module* module, const rtc::Location& from) = 0; - - // Removes a previously registered module. - // Can be called from any thread. - virtual void DeRegisterModule(Module* module) = 0; -}; - -} // namespace webrtc - -#endif // MODULES_UTILITY_INCLUDE_PROCESS_THREAD_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/utility/maybe_worker_thread.cc b/TMessagesProj/jni/voip/webrtc/modules/utility/maybe_worker_thread.cc new file mode 100644 index 0000000000..abb52d4691 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/utility/maybe_worker_thread.cc @@ -0,0 +1,99 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/utility/maybe_worker_thread.h" + +#include + +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/task_queue/task_queue_base.h" +#include "rtc_base/checks.h" +#include "rtc_base/event.h" +#include "rtc_base/logging.h" +#include "rtc_base/task_queue.h" + +namespace webrtc { + +MaybeWorkerThread::MaybeWorkerThread(const FieldTrialsView& field_trials, + absl::string_view task_queue_name, + TaskQueueFactory* factory) + : owned_task_queue_( + field_trials.IsEnabled("WebRTC-SendPacketsOnWorkerThread") + ? nullptr + : factory->CreateTaskQueue(task_queue_name, + rtc::TaskQueue::Priority::NORMAL)), + worker_thread_(TaskQueueBase::Current()) { + RTC_DCHECK(worker_thread_); + RTC_LOG(LS_INFO) << "WebRTC-SendPacketsOnWorkerThread" + << (owned_task_queue_ ? " Disabled" : " Enabled"); +} + +MaybeWorkerThread::~MaybeWorkerThread() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + + if (owned_task_queue_) { + // owned_task_queue_ must be a valid pointer when the task queue is + // destroyed since there may be tasks that use this object that run when the + // task queue is deleted. + owned_task_queue_->Delete(); + owned_task_queue_.release(); + } +} + +void MaybeWorkerThread::RunSynchronous(absl::AnyInvocable task) { + if (owned_task_queue_) { + rtc::Event thread_sync_event; + auto closure = [&thread_sync_event, task = std::move(task)]() mutable { + std::move(task)(); + thread_sync_event.Set(); + }; + owned_task_queue_->PostTask(std::move(closure)); + thread_sync_event.Wait(rtc::Event::kForever); + } else { + RTC_DCHECK_RUN_ON(&sequence_checker_); + std::move(task)(); + } +} + +void MaybeWorkerThread::RunOrPost(absl::AnyInvocable task) { + if (owned_task_queue_) { + owned_task_queue_->PostTask(std::move(task)); + } else { + RTC_DCHECK_RUN_ON(&sequence_checker_); + std::move(task)(); + } +} + +TaskQueueBase* MaybeWorkerThread::TaskQueueForDelayedTasks() const { + RTC_DCHECK(IsCurrent()); + return owned_task_queue_ ? owned_task_queue_.get() : worker_thread_; +} + +TaskQueueBase* MaybeWorkerThread::TaskQueueForPost() const { + return owned_task_queue_ ? owned_task_queue_.get() : worker_thread_; +} + +bool MaybeWorkerThread::IsCurrent() const { + if (owned_task_queue_) { + return owned_task_queue_->IsCurrent(); + } + return worker_thread_->IsCurrent(); +} + +absl::AnyInvocable MaybeWorkerThread::MaybeSafeTask( + rtc::scoped_refptr flag, + absl::AnyInvocable task) { + if (owned_task_queue_) { + return task; + } else { + return SafeTask(std::move(flag), std::move(task)); + } +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/utility/maybe_worker_thread.h b/TMessagesProj/jni/voip/webrtc/modules/utility/maybe_worker_thread.h new file mode 100644 index 0000000000..a93a173bf6 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/utility/maybe_worker_thread.h @@ -0,0 +1,86 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_UTILITY_MAYBE_WORKER_THREAD_H_ +#define MODULES_UTILITY_MAYBE_WORKER_THREAD_H_ + +#include + +#include "absl/strings/string_view.h" +#include "api/field_trials_view.h" +#include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/task_queue/task_queue_base.h" +#include "api/task_queue/task_queue_factory.h" +#include "rtc_base/thread_annotations.h" + +namespace webrtc { + +// Helper class used by experiment to replace usage of the +// RTP worker task queue owned by RtpTransportControllerSend, and the pacer task +// queue owned by TaskQueuePacedSender with the one and only worker thread. +// Tasks will run on the target sequence which is either the worker thread or +// one of these task queues depending on the field trial +// "WebRTC-SendPacketsOnWorkerThread". +// This class is assumed to be created on the worker thread and the worker +// thread is assumed to outlive an instance of this class. +// +// Experiment can be tracked in +// https://bugs.chromium.org/p/webrtc/issues/detail?id=14502 +// +// After experiment evaluation, this class should be deleted. +// Calls to RunOrPost and RunSynchronous should be removed and the task should +// be invoked immediately. +// Instead of MaybeSafeTask a SafeTask should be used when posting tasks. +class RTC_LOCKABLE MaybeWorkerThread { + public: + MaybeWorkerThread(const FieldTrialsView& field_trials, + absl::string_view task_queue_name, + TaskQueueFactory* factory); + ~MaybeWorkerThread(); + + // Runs `task` immediately on the worker thread if in experiment, otherwise + // post the task on the task queue. + void RunOrPost(absl::AnyInvocable task); + // Runs `task` immediately on the worker thread if in experiment, otherwise + // post the task on the task queue and use an even to wait for completion. + void RunSynchronous(absl::AnyInvocable task); + + // Used for posting delayed or repeated tasks on the worker thread or task + // queue depending on the field trial. DCHECKs that this method is called on + // the target sequence. + TaskQueueBase* TaskQueueForDelayedTasks() const; + + // Used when a task has to be posted from one sequence to the target + // sequence. A task should only be posted if a sequence hop is needed. + TaskQueueBase* TaskQueueForPost() const; + + // Workaround to use a SafeTask only if the target sequence is the worker + // thread. This is used when a SafeTask can not be used because the object + // that posted the task is not destroyed on the target sequence. Instead, the + // caller has to guarantee that this MaybeWorkerThread is destroyed first + // since that guarantee that the posted task is deleted or run before the + // owning class. + absl::AnyInvocable MaybeSafeTask( + rtc::scoped_refptr flag, + absl::AnyInvocable task); + + // To implement macro RTC_DCHECK_RUN_ON. + // Implementation delegate to the actual used sequence. + bool IsCurrent() const; + + private: + SequenceChecker sequence_checker_; + std::unique_ptr owned_task_queue_; + TaskQueueBase* const worker_thread_; +}; + +} // namespace webrtc + +#endif // MODULES_UTILITY_MAYBE_WORKER_THREAD_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/utility/source/helpers_android.cc b/TMessagesProj/jni/voip/webrtc/modules/utility/source/helpers_android.cc index 9cfee8a2af..0e04817bcb 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/utility/source/helpers_android.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/utility/source/helpers_android.cc @@ -17,6 +17,7 @@ #include "rtc_base/checks.h" #include "rtc_base/platform_thread.h" +#include "tgnet/FileLog.h" #define TAG "HelpersAndroid" #define ALOGD(...) __android_log_print(ANDROID_LOG_DEBUG, TAG, __VA_ARGS__) @@ -76,6 +77,7 @@ jclass FindClass(JNIEnv* jni, const char* name) { } jobject NewGlobalRef(JNIEnv* jni, jobject o) { + DEBUG_REF("webrtc new global ref"); jobject ret = jni->NewGlobalRef(o); CHECK_EXCEPTION(jni) << "Error during NewGlobalRef"; RTC_CHECK(ret); @@ -83,6 +85,7 @@ jobject NewGlobalRef(JNIEnv* jni, jobject o) { } void DeleteGlobalRef(JNIEnv* jni, jobject o) { + DEBUG_DELREF("webrtc remove global ref"); jni->DeleteGlobalRef(o); CHECK_EXCEPTION(jni) << "Error during DeleteGlobalRef"; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/utility/source/jvm_android.cc b/TMessagesProj/jni/voip/webrtc/modules/utility/source/jvm_android.cc index ee9930bcaa..69bfdc9d12 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/utility/source/jvm_android.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/utility/source/jvm_android.cc @@ -17,6 +17,7 @@ #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/platform_thread.h" +#include "tgnet/FileLog.h" namespace webrtc { @@ -44,6 +45,7 @@ void LoadClasses(JNIEnv* jni) { RTC_LOG(LS_INFO) << "name: " << c.name; CHECK_EXCEPTION(jni) << "Error during FindClass: " << c.name; RTC_CHECK(localRef) << c.name; + DEBUG_REF("webrtc 4 globalref"); jclass globalRef = reinterpret_cast(jni->NewGlobalRef(localRef)); CHECK_EXCEPTION(jni) << "Error during NewGlobalRef: " << c.name; RTC_CHECK(globalRef) << c.name; @@ -53,6 +55,7 @@ void LoadClasses(JNIEnv* jni) { void FreeClassReferences(JNIEnv* jni) { for (auto& c : loaded_classes) { + DEBUG_DELREF("FreeClassReferences"); jni->DeleteGlobalRef(c.clazz); c.clazz = nullptr; } @@ -94,11 +97,13 @@ JvmThreadConnector::~JvmThreadConnector() { // GlobalRef implementation. GlobalRef::GlobalRef(JNIEnv* jni, jobject object) : jni_(jni), j_object_(NewGlobalRef(jni, object)) { + DEBUG_REF("webrtc jvm globalref"); RTC_LOG(LS_INFO) << "GlobalRef::ctor"; } GlobalRef::~GlobalRef() { RTC_LOG(LS_INFO) << "GlobalRef::dtor"; + DEBUG_DELREF("webrtc jvm globalref"); DeleteGlobalRef(jni_, j_object_); } @@ -136,7 +141,7 @@ NativeRegistration::NativeRegistration(JNIEnv* jni, jclass clazz) NativeRegistration::~NativeRegistration() { RTC_LOG(LS_INFO) << "NativeRegistration::dtor"; - jni_->UnregisterNatives(j_class_); + //jni_->UnregisterNatives(j_class_); CHECK_EXCEPTION(jni_) << "Error during UnregisterNatives"; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/utility/source/process_thread_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/utility/source/process_thread_impl.cc deleted file mode 100644 index 73fc23400b..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/utility/source/process_thread_impl.cc +++ /dev/null @@ -1,298 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/utility/source/process_thread_impl.h" - -#include - -#include "modules/include/module.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" -#include "rtc_base/time_utils.h" -#include "rtc_base/trace_event.h" - -namespace webrtc { -namespace { - -// We use this constant internally to signal that a module has requested -// a callback right away. When this is set, no call to TimeUntilNextProcess -// should be made, but Process() should be called directly. -const int64_t kCallProcessImmediately = -1; - -int64_t GetNextCallbackTime(Module* module, int64_t time_now) { - int64_t interval = module->TimeUntilNextProcess(); - if (interval < 0) { - // Falling behind, we should call the callback now. - return time_now; - } - return time_now + interval; -} -} // namespace - -ProcessThread::~ProcessThread() {} - -// static -std::unique_ptr ProcessThread::Create(const char* thread_name) { - return std::unique_ptr(new ProcessThreadImpl(thread_name)); -} - -ProcessThreadImpl::ProcessThreadImpl(const char* thread_name) - : stop_(false), thread_name_(thread_name) {} - -ProcessThreadImpl::~ProcessThreadImpl() { - RTC_DCHECK(thread_checker_.IsCurrent()); - RTC_DCHECK(!stop_); - - while (!delayed_tasks_.empty()) { - delete delayed_tasks_.top().task; - delayed_tasks_.pop(); - } - - while (!queue_.empty()) { - delete queue_.front(); - queue_.pop(); - } -} - -void ProcessThreadImpl::Delete() { - RTC_LOG(LS_WARNING) << "Process thread " << thread_name_ - << " is destroyed as a TaskQueue."; - Stop(); - delete this; -} - -// Doesn't need locking, because the contending thread isn't running. -void ProcessThreadImpl::Start() RTC_NO_THREAD_SAFETY_ANALYSIS { - RTC_DCHECK(thread_checker_.IsCurrent()); - RTC_DCHECK(thread_.empty()); - if (!thread_.empty()) - return; - - RTC_DCHECK(!stop_); - - for (ModuleCallback& m : modules_) - m.module->ProcessThreadAttached(this); - - thread_ = rtc::PlatformThread::SpawnJoinable( - [this] { - CurrentTaskQueueSetter set_current(this); - while (Process()) { - } - }, - thread_name_); -} - -void ProcessThreadImpl::Stop() { - RTC_DCHECK(thread_checker_.IsCurrent()); - if (thread_.empty()) - return; - - { - // Need to take lock, for synchronization with `thread_`. - MutexLock lock(&mutex_); - stop_ = true; - } - - wake_up_.Set(); - thread_.Finalize(); - - StopNoLocks(); -} - -// No locking needed, since this is called after the contending thread is -// stopped. -void ProcessThreadImpl::StopNoLocks() RTC_NO_THREAD_SAFETY_ANALYSIS { - RTC_DCHECK(thread_.empty()); - stop_ = false; - - for (ModuleCallback& m : modules_) - m.module->ProcessThreadAttached(nullptr); -} - -void ProcessThreadImpl::WakeUp(Module* module) { - // Allowed to be called on any thread. - auto holds_mutex = [this] { - if (!IsCurrent()) { - return false; - } - RTC_DCHECK_RUN_ON(this); - return holds_mutex_; - }; - if (holds_mutex()) { - // Avoid locking if called on the ProcessThread, via a module's Process), - WakeUpNoLocks(module); - } else { - MutexLock lock(&mutex_); - WakeUpInternal(module); - } - wake_up_.Set(); -} - -// Must be called only indirectly from Process, which already holds the lock. -void ProcessThreadImpl::WakeUpNoLocks(Module* module) - RTC_NO_THREAD_SAFETY_ANALYSIS { - RTC_DCHECK_RUN_ON(this); - WakeUpInternal(module); -} - -void ProcessThreadImpl::WakeUpInternal(Module* module) { - for (ModuleCallback& m : modules_) { - if (m.module == module) - m.next_callback = kCallProcessImmediately; - } -} - -void ProcessThreadImpl::PostTask(std::unique_ptr task) { - // Allowed to be called on any thread, except from a module's Process method. - if (IsCurrent()) { - RTC_DCHECK_RUN_ON(this); - RTC_DCHECK(!holds_mutex_) << "Calling ProcessThread::PostTask from " - "Module::Process is not supported"; - } - { - MutexLock lock(&mutex_); - queue_.push(task.release()); - } - wake_up_.Set(); -} - -void ProcessThreadImpl::PostDelayedTask(std::unique_ptr task, - uint32_t milliseconds) { - int64_t run_at_ms = rtc::TimeMillis() + milliseconds; - bool recalculate_wakeup_time; - { - MutexLock lock(&mutex_); - recalculate_wakeup_time = - delayed_tasks_.empty() || run_at_ms < delayed_tasks_.top().run_at_ms; - delayed_tasks_.emplace(run_at_ms, std::move(task)); - } - if (recalculate_wakeup_time) { - wake_up_.Set(); - } -} - -void ProcessThreadImpl::RegisterModule(Module* module, - const rtc::Location& from) { - TRACE_EVENT0("webrtc", "ProcessThreadImpl::RegisterModule"); - RTC_DCHECK(thread_checker_.IsCurrent()); - RTC_DCHECK(module) << from.ToString(); - -#if RTC_DCHECK_IS_ON - { - // Catch programmer error. - MutexLock lock(&mutex_); - for (const ModuleCallback& mc : modules_) { - RTC_DCHECK(mc.module != module) - << "Already registered here: " << mc.location.ToString() - << "\n" - "Now attempting from here: " - << from.ToString(); - } - } -#endif - - // Now that we know the module isn't in the list, we'll call out to notify - // the module that it's attached to the worker thread. We don't hold - // the lock while we make this call. - if (!thread_.empty()) - module->ProcessThreadAttached(this); - - { - MutexLock lock(&mutex_); - modules_.push_back(ModuleCallback(module, from)); - } - - // Wake the thread calling ProcessThreadImpl::Process() to update the - // waiting time. The waiting time for the just registered module may be - // shorter than all other registered modules. - wake_up_.Set(); -} - -void ProcessThreadImpl::DeRegisterModule(Module* module) { - RTC_DCHECK(thread_checker_.IsCurrent()); - RTC_DCHECK(module); - - { - MutexLock lock(&mutex_); - modules_.remove_if( - [&module](const ModuleCallback& m) { return m.module == module; }); - } - - // Notify the module that it's been detached. - module->ProcessThreadAttached(nullptr); -} - -bool ProcessThreadImpl::Process() { - TRACE_EVENT1("webrtc", "ProcessThreadImpl", "name", thread_name_); - int64_t now = rtc::TimeMillis(); - int64_t next_checkpoint = now + (1000 * 60); - RTC_DCHECK_RUN_ON(this); - { - MutexLock lock(&mutex_); - if (stop_) - return false; - for (ModuleCallback& m : modules_) { - // TODO(tommi): Would be good to measure the time TimeUntilNextProcess - // takes and dcheck if it takes too long (e.g. >=10ms). Ideally this - // operation should not require taking a lock, so querying all modules - // should run in a matter of nanoseconds. - if (m.next_callback == 0) - m.next_callback = GetNextCallbackTime(m.module, now); - - // Set to true for the duration of the calls to modules' Process(). - holds_mutex_ = true; - if (m.next_callback <= now || - m.next_callback == kCallProcessImmediately) { - { - TRACE_EVENT2("webrtc", "ModuleProcess", "function", - m.location.function_name(), "file", - m.location.file_name()); - m.module->Process(); - } - // Use a new 'now' reference to calculate when the next callback - // should occur. We'll continue to use 'now' above for the baseline - // of calculating how long we should wait, to reduce variance. - int64_t new_now = rtc::TimeMillis(); - m.next_callback = GetNextCallbackTime(m.module, new_now); - } - holds_mutex_ = false; - - if (m.next_callback < next_checkpoint) - next_checkpoint = m.next_callback; - } - - while (!delayed_tasks_.empty() && delayed_tasks_.top().run_at_ms <= now) { - queue_.push(delayed_tasks_.top().task); - delayed_tasks_.pop(); - } - - if (!delayed_tasks_.empty()) { - next_checkpoint = - std::min(next_checkpoint, delayed_tasks_.top().run_at_ms); - } - - while (!queue_.empty()) { - QueuedTask* task = queue_.front(); - queue_.pop(); - mutex_.Unlock(); - if (task->Run()) { - delete task; - } - mutex_.Lock(); - } - } - - int64_t time_to_wait = next_checkpoint - rtc::TimeMillis(); - if (time_to_wait > 0) - wake_up_.Wait(static_cast(time_to_wait)); - - return true; -} -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/utility/source/process_thread_impl.h b/TMessagesProj/jni/voip/webrtc/modules/utility/source/process_thread_impl.h deleted file mode 100644 index e9a26eb96f..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/utility/source/process_thread_impl.h +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_UTILITY_SOURCE_PROCESS_THREAD_IMPL_H_ -#define MODULES_UTILITY_SOURCE_PROCESS_THREAD_IMPL_H_ - -#include - -#include -#include -#include - -#include "api/sequence_checker.h" -#include "api/task_queue/queued_task.h" -#include "modules/include/module.h" -#include "modules/utility/include/process_thread.h" -#include "rtc_base/event.h" -#include "rtc_base/location.h" -#include "rtc_base/platform_thread.h" - -namespace webrtc { - -class ProcessThreadImpl : public ProcessThread { - public: - explicit ProcessThreadImpl(const char* thread_name); - ~ProcessThreadImpl() override; - - void Start() override; - void Stop() override; - - void WakeUp(Module* module) override; - void PostTask(std::unique_ptr task) override; - void PostDelayedTask(std::unique_ptr task, - uint32_t milliseconds) override; - - void RegisterModule(Module* module, const rtc::Location& from) override; - void DeRegisterModule(Module* module) override; - - protected: - bool Process(); - - private: - struct ModuleCallback { - ModuleCallback() = delete; - ModuleCallback(ModuleCallback&& cb) = default; - ModuleCallback(const ModuleCallback& cb) = default; - ModuleCallback(Module* module, const rtc::Location& location) - : module(module), location(location) {} - bool operator==(const ModuleCallback& cb) const { - return cb.module == module; - } - - Module* const module; - int64_t next_callback = 0; // Absolute timestamp. - const rtc::Location location; - - private: - ModuleCallback& operator=(ModuleCallback&); - }; - struct DelayedTask { - DelayedTask(int64_t run_at_ms, std::unique_ptr task) - : run_at_ms(run_at_ms), task(task.release()) {} - friend bool operator<(const DelayedTask& lhs, const DelayedTask& rhs) { - // Earliest DelayedTask should be at the top of the priority queue. - return lhs.run_at_ms > rhs.run_at_ms; - } - - int64_t run_at_ms; - // DelayedTask owns the `task`, but some delayed tasks must be removed from - // the std::priority_queue, but mustn't be deleted. std::priority_queue does - // not give non-const access to the values, so storing unique_ptr would - // delete the task as soon as it is remove from the priority queue. - // Thus lifetime of the `task` is managed manually. - QueuedTask* task; - }; - typedef std::list ModuleList; - - void Delete() override; - // The part of Stop processing that doesn't need any locking. - void StopNoLocks(); - void WakeUpNoLocks(Module* module); - void WakeUpInternal(Module* module) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - - // Members protected by this mutex are accessed on the constructor thread and - // on the spawned process thread, and locking is needed only while the process - // thread is running. - Mutex mutex_; - - SequenceChecker thread_checker_; - rtc::Event wake_up_; - rtc::PlatformThread thread_; - - ModuleList modules_ RTC_GUARDED_BY(mutex_); - // Set to true when calling Process, to allow reentrant calls to WakeUp. - bool holds_mutex_ RTC_GUARDED_BY(this) = false; - std::queue queue_; - std::priority_queue delayed_tasks_ RTC_GUARDED_BY(mutex_); - // The `stop_` flag is modified only by the construction thread, protected by - // `thread_checker_`. It is read also by the spawned `thread_`. The latter - // thread must take `mutex_` before access, and for thread safety, the - // constructor thread needs to take `mutex_` when it modifies `stop_` and - // `thread_` is running. Annotations like RTC_GUARDED_BY doesn't support this - // usage pattern. - bool stop_ RTC_GUARDED_BY(mutex_); - const char* thread_name_; -}; - -} // namespace webrtc - -#endif // MODULES_UTILITY_SOURCE_PROCESS_THREAD_IMPL_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_capture/OWNERS b/TMessagesProj/jni/voip/webrtc/modules/video_capture/OWNERS index d728122343..364d66d36f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_capture/OWNERS +++ b/TMessagesProj/jni/voip/webrtc/modules/video_capture/OWNERS @@ -1,3 +1,4 @@ +ilnik@webrtc.org mflodman@webrtc.org perkj@webrtc.org tkchin@webrtc.org diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_capture/device_info_impl.h b/TMessagesProj/jni/voip/webrtc/modules/video_capture/device_info_impl.h index 4b47389609..546265049c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_capture/device_info_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_capture/device_info_impl.h @@ -29,7 +29,7 @@ class DeviceInfoImpl : public VideoCaptureModule::DeviceInfo { ~DeviceInfoImpl(void) override; int32_t NumberOfCapabilities(const char* deviceUniqueIdUTF8) override; int32_t GetCapability(const char* deviceUniqueIdUTF8, - const uint32_t deviceCapabilityNumber, + uint32_t deviceCapabilityNumber, VideoCaptureCapability& capability) override; int32_t GetBestMatchedCapability(const char* deviceUniqueIdUTF8, diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/device_info_linux.cc b/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/device_info_linux.cc index cde3b86d5c..ccbbeae3ab 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/device_info_linux.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/device_info_linux.cc @@ -8,8 +8,6 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/video_capture/linux/device_info_linux.h" - #include #include #include @@ -22,6 +20,7 @@ #include +#include "modules/video_capture/linux/device_info_v4l2.h" #include "modules/video_capture/video_capture.h" #include "modules/video_capture/video_capture_defines.h" #include "modules/video_capture/video_capture_impl.h" @@ -30,265 +29,7 @@ namespace webrtc { namespace videocapturemodule { VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo() { - return new videocapturemodule::DeviceInfoLinux(); -} - -DeviceInfoLinux::DeviceInfoLinux() : DeviceInfoImpl() {} - -int32_t DeviceInfoLinux::Init() { - return 0; -} - -DeviceInfoLinux::~DeviceInfoLinux() {} - -uint32_t DeviceInfoLinux::NumberOfDevices() { - uint32_t count = 0; - char device[20]; - int fd = -1; - struct v4l2_capability cap; - - /* detect /dev/video [0-63]VideoCaptureModule entries */ - for (int n = 0; n < 64; n++) { - sprintf(device, "/dev/video%d", n); - if ((fd = open(device, O_RDONLY)) != -1) { - // query device capabilities and make sure this is a video capture device - if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0 || - !(cap.device_caps & V4L2_CAP_VIDEO_CAPTURE)) { - close(fd); - continue; - } - - close(fd); - count++; - } - } - - return count; -} - -int32_t DeviceInfoLinux::GetDeviceName(uint32_t deviceNumber, - char* deviceNameUTF8, - uint32_t deviceNameLength, - char* deviceUniqueIdUTF8, - uint32_t deviceUniqueIdUTF8Length, - char* /*productUniqueIdUTF8*/, - uint32_t /*productUniqueIdUTF8Length*/) { - // Travel through /dev/video [0-63] - uint32_t count = 0; - char device[20]; - int fd = -1; - bool found = false; - struct v4l2_capability cap; - for (int n = 0; n < 64; n++) { - sprintf(device, "/dev/video%d", n); - if ((fd = open(device, O_RDONLY)) != -1) { - // query device capabilities and make sure this is a video capture device - if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0 || - !(cap.device_caps & V4L2_CAP_VIDEO_CAPTURE)) { - close(fd); - continue; - } - if (count == deviceNumber) { - // Found the device - found = true; - break; - } else { - close(fd); - count++; - } - } - } - - if (!found) - return -1; - - // query device capabilities - if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) { - RTC_LOG(LS_INFO) << "error in querying the device capability for device " - << device << ". errno = " << errno; - close(fd); - return -1; - } - - close(fd); - - char cameraName[64]; - memset(deviceNameUTF8, 0, deviceNameLength); - memcpy(cameraName, cap.card, sizeof(cap.card)); - - if (deviceNameLength > strlen(cameraName)) { - memcpy(deviceNameUTF8, cameraName, strlen(cameraName)); - } else { - RTC_LOG(LS_INFO) << "buffer passed is too small"; - return -1; - } - - if (cap.bus_info[0] != 0) // may not available in all drivers - { - // copy device id - if (deviceUniqueIdUTF8Length > strlen((const char*)cap.bus_info)) { - memset(deviceUniqueIdUTF8, 0, deviceUniqueIdUTF8Length); - memcpy(deviceUniqueIdUTF8, cap.bus_info, - strlen((const char*)cap.bus_info)); - } else { - RTC_LOG(LS_INFO) << "buffer passed is too small"; - return -1; - } - } - - return 0; -} - -int32_t DeviceInfoLinux::CreateCapabilityMap(const char* deviceUniqueIdUTF8) { - int fd; - char device[32]; - bool found = false; - - const int32_t deviceUniqueIdUTF8Length = - (int32_t)strlen((char*)deviceUniqueIdUTF8); - if (deviceUniqueIdUTF8Length >= kVideoCaptureUniqueNameLength) { - RTC_LOG(LS_INFO) << "Device name too long"; - return -1; - } - RTC_LOG(LS_INFO) << "CreateCapabilityMap called for device " - << deviceUniqueIdUTF8; - - /* detect /dev/video [0-63] entries */ - for (int n = 0; n < 64; ++n) { - sprintf(device, "/dev/video%d", n); - fd = open(device, O_RDONLY); - if (fd == -1) - continue; - - // query device capabilities - struct v4l2_capability cap; - if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == 0) { - // skip devices without video capture capability - if (!(cap.device_caps & V4L2_CAP_VIDEO_CAPTURE)) { - continue; - } - - if (cap.bus_info[0] != 0) { - if (strncmp((const char*)cap.bus_info, (const char*)deviceUniqueIdUTF8, - strlen((const char*)deviceUniqueIdUTF8)) == - 0) // match with device id - { - found = true; - break; // fd matches with device unique id supplied - } - } else // match for device name - { - if (IsDeviceNameMatches((const char*)cap.card, - (const char*)deviceUniqueIdUTF8)) { - found = true; - break; - } - } - } - close(fd); // close since this is not the matching device - } - - if (!found) { - RTC_LOG(LS_INFO) << "no matching device found"; - return -1; - } - - // now fd will point to the matching device - // reset old capability list. - _captureCapabilities.clear(); - - int size = FillCapabilities(fd); - close(fd); - - // Store the new used device name - _lastUsedDeviceNameLength = deviceUniqueIdUTF8Length; - _lastUsedDeviceName = - (char*)realloc(_lastUsedDeviceName, _lastUsedDeviceNameLength + 1); - memcpy(_lastUsedDeviceName, deviceUniqueIdUTF8, - _lastUsedDeviceNameLength + 1); - - RTC_LOG(LS_INFO) << "CreateCapabilityMap " << _captureCapabilities.size(); - - return size; -} - -int32_t DeviceInfoLinux::DisplayCaptureSettingsDialogBox( - const char* /*deviceUniqueIdUTF8*/, - const char* /*dialogTitleUTF8*/, - void* /*parentWindow*/, - uint32_t /*positionX*/, - uint32_t /*positionY*/) { - return -1; -} - -bool DeviceInfoLinux::IsDeviceNameMatches(const char* name, - const char* deviceUniqueIdUTF8) { - if (strncmp(deviceUniqueIdUTF8, name, strlen(name)) == 0) - return true; - return false; + return new videocapturemodule::DeviceInfoV4l2(); } - -int32_t DeviceInfoLinux::FillCapabilities(int fd) { - // set image format - struct v4l2_format video_fmt; - memset(&video_fmt, 0, sizeof(struct v4l2_format)); - - video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - video_fmt.fmt.pix.sizeimage = 0; - - int totalFmts = 4; - unsigned int videoFormats[] = {V4L2_PIX_FMT_MJPEG, V4L2_PIX_FMT_YUV420, - V4L2_PIX_FMT_YUYV, V4L2_PIX_FMT_UYVY}; - - int sizes = 13; - unsigned int size[][2] = {{128, 96}, {160, 120}, {176, 144}, {320, 240}, - {352, 288}, {640, 480}, {704, 576}, {800, 600}, - {960, 720}, {1280, 720}, {1024, 768}, {1440, 1080}, - {1920, 1080}}; - - for (int fmts = 0; fmts < totalFmts; fmts++) { - for (int i = 0; i < sizes; i++) { - video_fmt.fmt.pix.pixelformat = videoFormats[fmts]; - video_fmt.fmt.pix.width = size[i][0]; - video_fmt.fmt.pix.height = size[i][1]; - - if (ioctl(fd, VIDIOC_TRY_FMT, &video_fmt) >= 0) { - if ((video_fmt.fmt.pix.width == size[i][0]) && - (video_fmt.fmt.pix.height == size[i][1])) { - VideoCaptureCapability cap; - cap.width = video_fmt.fmt.pix.width; - cap.height = video_fmt.fmt.pix.height; - if (videoFormats[fmts] == V4L2_PIX_FMT_YUYV) { - cap.videoType = VideoType::kYUY2; - } else if (videoFormats[fmts] == V4L2_PIX_FMT_YUV420) { - cap.videoType = VideoType::kI420; - } else if (videoFormats[fmts] == V4L2_PIX_FMT_MJPEG) { - cap.videoType = VideoType::kMJPEG; - } else if (videoFormats[fmts] == V4L2_PIX_FMT_UYVY) { - cap.videoType = VideoType::kUYVY; - } - - // get fps of current camera mode - // V4l2 does not have a stable method of knowing so we just guess. - if (cap.width >= 800 && cap.videoType != VideoType::kMJPEG) { - cap.maxFPS = 15; - } else { - cap.maxFPS = 30; - } - - _captureCapabilities.push_back(cap); - RTC_LOG(LS_VERBOSE) << "Camera capability, width:" << cap.width - << " height:" << cap.height - << " type:" << static_cast(cap.videoType) - << " fps:" << cap.maxFPS; - } - } - } - } - - RTC_LOG(LS_INFO) << "CreateCapabilityMap " << _captureCapabilities.size(); - return _captureCapabilities.size(); -} - } // namespace videocapturemodule } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/device_info_linux.h b/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/device_info_linux.h deleted file mode 100644 index 304ae71230..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/device_info_linux.h +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_VIDEO_CAPTURE_MAIN_SOURCE_LINUX_DEVICE_INFO_LINUX_H_ -#define MODULES_VIDEO_CAPTURE_MAIN_SOURCE_LINUX_DEVICE_INFO_LINUX_H_ - -#include - -#include "modules/video_capture/device_info_impl.h" - -namespace webrtc { -namespace videocapturemodule { -class DeviceInfoLinux : public DeviceInfoImpl { - public: - DeviceInfoLinux(); - ~DeviceInfoLinux() override; - uint32_t NumberOfDevices() override; - int32_t GetDeviceName(uint32_t deviceNumber, - char* deviceNameUTF8, - uint32_t deviceNameLength, - char* deviceUniqueIdUTF8, - uint32_t deviceUniqueIdUTF8Length, - char* productUniqueIdUTF8 = 0, - uint32_t productUniqueIdUTF8Length = 0) override; - /* - * Fills the membervariable _captureCapabilities with capabilites for the - * given device name. - */ - int32_t CreateCapabilityMap(const char* deviceUniqueIdUTF8) override - RTC_EXCLUSIVE_LOCKS_REQUIRED(_apiLock); - int32_t DisplayCaptureSettingsDialogBox(const char* /*deviceUniqueIdUTF8*/, - const char* /*dialogTitleUTF8*/, - void* /*parentWindow*/, - uint32_t /*positionX*/, - uint32_t /*positionY*/) override; - int32_t FillCapabilities(int fd) RTC_EXCLUSIVE_LOCKS_REQUIRED(_apiLock); - int32_t Init() override; - - private: - bool IsDeviceNameMatches(const char* name, const char* deviceUniqueIdUTF8); -}; -} // namespace videocapturemodule -} // namespace webrtc -#endif // MODULES_VIDEO_CAPTURE_MAIN_SOURCE_LINUX_DEVICE_INFO_LINUX_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/device_info_v4l2.cc b/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/device_info_v4l2.cc new file mode 100644 index 0000000000..c1062d4078 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/device_info_v4l2.cc @@ -0,0 +1,286 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_capture/linux/device_info_v4l2.h" + +#include +#include +#include +#include +#include +#include +#include +// v4l includes +#include + +#include + +#include "modules/video_capture/video_capture.h" +#include "modules/video_capture/video_capture_defines.h" +#include "modules/video_capture/video_capture_impl.h" +#include "rtc_base/logging.h" + +namespace webrtc { +namespace videocapturemodule { +DeviceInfoV4l2::DeviceInfoV4l2() : DeviceInfoImpl() {} + +int32_t DeviceInfoV4l2::Init() { + return 0; +} + +DeviceInfoV4l2::~DeviceInfoV4l2() {} + +uint32_t DeviceInfoV4l2::NumberOfDevices() { + uint32_t count = 0; + char device[20]; + int fd = -1; + struct v4l2_capability cap; + + /* detect /dev/video [0-63]VideoCaptureModule entries */ + for (int n = 0; n < 64; n++) { + snprintf(device, sizeof(device), "/dev/video%d", n); + if ((fd = open(device, O_RDONLY)) != -1) { + // query device capabilities and make sure this is a video capture device + if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0 || + !(cap.device_caps & V4L2_CAP_VIDEO_CAPTURE)) { + close(fd); + continue; + } + + close(fd); + count++; + } + } + + return count; +} + +int32_t DeviceInfoV4l2::GetDeviceName(uint32_t deviceNumber, + char* deviceNameUTF8, + uint32_t deviceNameLength, + char* deviceUniqueIdUTF8, + uint32_t deviceUniqueIdUTF8Length, + char* /*productUniqueIdUTF8*/, + uint32_t /*productUniqueIdUTF8Length*/) { + // Travel through /dev/video [0-63] + uint32_t count = 0; + char device[20]; + int fd = -1; + bool found = false; + struct v4l2_capability cap; + for (int n = 0; n < 64; n++) { + snprintf(device, sizeof(device), "/dev/video%d", n); + if ((fd = open(device, O_RDONLY)) != -1) { + // query device capabilities and make sure this is a video capture device + if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0 || + !(cap.device_caps & V4L2_CAP_VIDEO_CAPTURE)) { + close(fd); + continue; + } + if (count == deviceNumber) { + // Found the device + found = true; + break; + } else { + close(fd); + count++; + } + } + } + + if (!found) + return -1; + + // query device capabilities + if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) { + RTC_LOG(LS_INFO) << "error in querying the device capability for device " + << device << ". errno = " << errno; + close(fd); + return -1; + } + + close(fd); + + char cameraName[64]; + memset(deviceNameUTF8, 0, deviceNameLength); + memcpy(cameraName, cap.card, sizeof(cap.card)); + + if (deviceNameLength > strlen(cameraName)) { + memcpy(deviceNameUTF8, cameraName, strlen(cameraName)); + } else { + RTC_LOG(LS_INFO) << "buffer passed is too small"; + return -1; + } + + if (cap.bus_info[0] != 0) { // may not available in all drivers + // copy device id + size_t len = strlen(reinterpret_cast(cap.bus_info)); + if (deviceUniqueIdUTF8Length > len) { + memset(deviceUniqueIdUTF8, 0, deviceUniqueIdUTF8Length); + memcpy(deviceUniqueIdUTF8, cap.bus_info, len); + } else { + RTC_LOG(LS_INFO) << "buffer passed is too small"; + return -1; + } + } + + return 0; +} + +int32_t DeviceInfoV4l2::CreateCapabilityMap(const char* deviceUniqueIdUTF8) { + int fd; + char device[32]; + bool found = false; + + const int32_t deviceUniqueIdUTF8Length = strlen(deviceUniqueIdUTF8); + if (deviceUniqueIdUTF8Length >= kVideoCaptureUniqueNameLength) { + RTC_LOG(LS_INFO) << "Device name too long"; + return -1; + } + RTC_LOG(LS_INFO) << "CreateCapabilityMap called for device " + << deviceUniqueIdUTF8; + + /* detect /dev/video [0-63] entries */ + for (int n = 0; n < 64; ++n) { + snprintf(device, sizeof(device), "/dev/video%d", n); + fd = open(device, O_RDONLY); + if (fd == -1) + continue; + + // query device capabilities + struct v4l2_capability cap; + if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == 0) { + // skip devices without video capture capability + if (!(cap.device_caps & V4L2_CAP_VIDEO_CAPTURE)) { + continue; + } + + if (cap.bus_info[0] != 0) { + if (strncmp(reinterpret_cast(cap.bus_info), + deviceUniqueIdUTF8, + strlen(deviceUniqueIdUTF8)) == 0) { // match with device id + found = true; + break; // fd matches with device unique id supplied + } + } else { // match for device name + if (IsDeviceNameMatches(reinterpret_cast(cap.card), + deviceUniqueIdUTF8)) { + found = true; + break; + } + } + } + close(fd); // close since this is not the matching device + } + + if (!found) { + RTC_LOG(LS_INFO) << "no matching device found"; + return -1; + } + + // now fd will point to the matching device + // reset old capability list. + _captureCapabilities.clear(); + + int size = FillCapabilities(fd); + close(fd); + + // Store the new used device name + _lastUsedDeviceNameLength = deviceUniqueIdUTF8Length; + _lastUsedDeviceName = reinterpret_cast( + realloc(_lastUsedDeviceName, _lastUsedDeviceNameLength + 1)); + memcpy(_lastUsedDeviceName, deviceUniqueIdUTF8, + _lastUsedDeviceNameLength + 1); + + RTC_LOG(LS_INFO) << "CreateCapabilityMap " << _captureCapabilities.size(); + + return size; +} + +int32_t DeviceInfoV4l2::DisplayCaptureSettingsDialogBox( + const char* /*deviceUniqueIdUTF8*/, + const char* /*dialogTitleUTF8*/, + void* /*parentWindow*/, + uint32_t /*positionX*/, + uint32_t /*positionY*/) { + return -1; +} + +bool DeviceInfoV4l2::IsDeviceNameMatches(const char* name, + const char* deviceUniqueIdUTF8) { + if (strncmp(deviceUniqueIdUTF8, name, strlen(name)) == 0) + return true; + return false; +} + +int32_t DeviceInfoV4l2::FillCapabilities(int fd) { + // set image format + struct v4l2_format video_fmt; + memset(&video_fmt, 0, sizeof(struct v4l2_format)); + + video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + video_fmt.fmt.pix.sizeimage = 0; + + int totalFmts = 4; + unsigned int videoFormats[] = {V4L2_PIX_FMT_MJPEG, V4L2_PIX_FMT_YUV420, + V4L2_PIX_FMT_YUYV, V4L2_PIX_FMT_UYVY}; + + int sizes = 13; + unsigned int size[][2] = {{128, 96}, {160, 120}, {176, 144}, {320, 240}, + {352, 288}, {640, 480}, {704, 576}, {800, 600}, + {960, 720}, {1280, 720}, {1024, 768}, {1440, 1080}, + {1920, 1080}}; + + for (int fmts = 0; fmts < totalFmts; fmts++) { + for (int i = 0; i < sizes; i++) { + video_fmt.fmt.pix.pixelformat = videoFormats[fmts]; + video_fmt.fmt.pix.width = size[i][0]; + video_fmt.fmt.pix.height = size[i][1]; + + if (ioctl(fd, VIDIOC_TRY_FMT, &video_fmt) >= 0) { + if ((video_fmt.fmt.pix.width == size[i][0]) && + (video_fmt.fmt.pix.height == size[i][1])) { + VideoCaptureCapability cap; + cap.width = video_fmt.fmt.pix.width; + cap.height = video_fmt.fmt.pix.height; + if (videoFormats[fmts] == V4L2_PIX_FMT_YUYV) { + cap.videoType = VideoType::kYUY2; + } else if (videoFormats[fmts] == V4L2_PIX_FMT_YUV420) { + cap.videoType = VideoType::kI420; + } else if (videoFormats[fmts] == V4L2_PIX_FMT_MJPEG) { + cap.videoType = VideoType::kMJPEG; + } else if (videoFormats[fmts] == V4L2_PIX_FMT_UYVY) { + cap.videoType = VideoType::kUYVY; + } + + // get fps of current camera mode + // V4l2 does not have a stable method of knowing so we just guess. + if (cap.width >= 800 && cap.videoType != VideoType::kMJPEG) { + cap.maxFPS = 15; + } else { + cap.maxFPS = 30; + } + + _captureCapabilities.push_back(cap); + RTC_LOG(LS_VERBOSE) << "Camera capability, width:" << cap.width + << " height:" << cap.height + << " type:" << static_cast(cap.videoType) + << " fps:" << cap.maxFPS; + } + } + } + } + + RTC_LOG(LS_INFO) << "CreateCapabilityMap " << _captureCapabilities.size(); + return _captureCapabilities.size(); +} + +} // namespace videocapturemodule +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/device_info_v4l2.h b/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/device_info_v4l2.h new file mode 100644 index 0000000000..fb95a6020d --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/device_info_v4l2.h @@ -0,0 +1,51 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CAPTURE_LINUX_DEVICE_INFO_V4L2_H_ +#define MODULES_VIDEO_CAPTURE_LINUX_DEVICE_INFO_V4L2_H_ + +#include + +#include "modules/video_capture/device_info_impl.h" + +namespace webrtc { +namespace videocapturemodule { +class DeviceInfoV4l2 : public DeviceInfoImpl { + public: + DeviceInfoV4l2(); + ~DeviceInfoV4l2() override; + uint32_t NumberOfDevices() override; + int32_t GetDeviceName(uint32_t deviceNumber, + char* deviceNameUTF8, + uint32_t deviceNameLength, + char* deviceUniqueIdUTF8, + uint32_t deviceUniqueIdUTF8Length, + char* productUniqueIdUTF8 = 0, + uint32_t productUniqueIdUTF8Length = 0) override; + /* + * Fills the membervariable _captureCapabilities with capabilites for the + * given device name. + */ + int32_t CreateCapabilityMap(const char* deviceUniqueIdUTF8) override + RTC_EXCLUSIVE_LOCKS_REQUIRED(_apiLock); + int32_t DisplayCaptureSettingsDialogBox(const char* /*deviceUniqueIdUTF8*/, + const char* /*dialogTitleUTF8*/, + void* /*parentWindow*/, + uint32_t /*positionX*/, + uint32_t /*positionY*/) override; + int32_t FillCapabilities(int fd) RTC_EXCLUSIVE_LOCKS_REQUIRED(_apiLock); + int32_t Init() override; + + private: + bool IsDeviceNameMatches(const char* name, const char* deviceUniqueIdUTF8); +}; +} // namespace videocapturemodule +} // namespace webrtc +#endif // MODULES_VIDEO_CAPTURE_LINUX_DEVICE_INFO_V4L2_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/video_capture_linux.cc b/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/video_capture_linux.cc index 10f9713ec3..b6c4017927 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/video_capture_linux.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/video_capture_linux.cc @@ -8,8 +8,6 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/video_capture/linux/video_capture_linux.h" - #include #include #include @@ -26,9 +24,9 @@ #include "api/scoped_refptr.h" #include "media/base/video_common.h" +#include "modules/video_capture/linux/video_capture_v4l2.h" #include "modules/video_capture/video_capture.h" #include "rtc_base/logging.h" -#include "rtc_base/ref_counted_object.h" namespace webrtc { namespace videocapturemodule { @@ -41,393 +39,5 @@ rtc::scoped_refptr VideoCaptureImpl::Create( return implementation; } - -VideoCaptureModuleV4L2::VideoCaptureModuleV4L2() - : VideoCaptureImpl(), - _deviceId(-1), - _deviceFd(-1), - _buffersAllocatedByDevice(-1), - _currentWidth(-1), - _currentHeight(-1), - _currentFrameRate(-1), - _captureStarted(false), - _captureVideoType(VideoType::kI420), - _pool(NULL) {} - -int32_t VideoCaptureModuleV4L2::Init(const char* deviceUniqueIdUTF8) { - int len = strlen((const char*)deviceUniqueIdUTF8); - _deviceUniqueId = new (std::nothrow) char[len + 1]; - if (_deviceUniqueId) { - memcpy(_deviceUniqueId, deviceUniqueIdUTF8, len + 1); - } - - int fd; - char device[32]; - bool found = false; - - /* detect /dev/video [0-63] entries */ - int n; - for (n = 0; n < 64; n++) { - sprintf(device, "/dev/video%d", n); - if ((fd = open(device, O_RDONLY)) != -1) { - // query device capabilities - struct v4l2_capability cap; - if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == 0) { - if (cap.bus_info[0] != 0) { - if (strncmp((const char*)cap.bus_info, - (const char*)deviceUniqueIdUTF8, - strlen((const char*)deviceUniqueIdUTF8)) == - 0) // match with device id - { - close(fd); - found = true; - break; // fd matches with device unique id supplied - } - } - } - close(fd); // close since this is not the matching device - } - } - if (!found) { - RTC_LOG(LS_INFO) << "no matching device found"; - return -1; - } - _deviceId = n; // store the device id - return 0; -} - -VideoCaptureModuleV4L2::~VideoCaptureModuleV4L2() { - StopCapture(); - if (_deviceFd != -1) - close(_deviceFd); -} - -int32_t VideoCaptureModuleV4L2::StartCapture( - const VideoCaptureCapability& capability) { - if (_captureStarted) { - if (capability.width == _currentWidth && - capability.height == _currentHeight && - _captureVideoType == capability.videoType) { - return 0; - } else { - StopCapture(); - } - } - - MutexLock lock(&capture_lock_); - // first open /dev/video device - char device[20]; - sprintf(device, "/dev/video%d", (int)_deviceId); - - if ((_deviceFd = open(device, O_RDWR | O_NONBLOCK, 0)) < 0) { - RTC_LOG(LS_INFO) << "error in opening " << device << " errono = " << errno; - return -1; - } - - // Supported video formats in preferred order. - // If the requested resolution is larger than VGA, we prefer MJPEG. Go for - // I420 otherwise. - const int nFormats = 5; - unsigned int fmts[nFormats]; - if (capability.width > 640 || capability.height > 480) { - fmts[0] = V4L2_PIX_FMT_MJPEG; - fmts[1] = V4L2_PIX_FMT_YUV420; - fmts[2] = V4L2_PIX_FMT_YUYV; - fmts[3] = V4L2_PIX_FMT_UYVY; - fmts[4] = V4L2_PIX_FMT_JPEG; - } else { - fmts[0] = V4L2_PIX_FMT_YUV420; - fmts[1] = V4L2_PIX_FMT_YUYV; - fmts[2] = V4L2_PIX_FMT_UYVY; - fmts[3] = V4L2_PIX_FMT_MJPEG; - fmts[4] = V4L2_PIX_FMT_JPEG; - } - - // Enumerate image formats. - struct v4l2_fmtdesc fmt; - int fmtsIdx = nFormats; - memset(&fmt, 0, sizeof(fmt)); - fmt.index = 0; - fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - RTC_LOG(LS_INFO) << "Video Capture enumerats supported image formats:"; - while (ioctl(_deviceFd, VIDIOC_ENUM_FMT, &fmt) == 0) { - RTC_LOG(LS_INFO) << " { pixelformat = " - << cricket::GetFourccName(fmt.pixelformat) - << ", description = '" << fmt.description << "' }"; - // Match the preferred order. - for (int i = 0; i < nFormats; i++) { - if (fmt.pixelformat == fmts[i] && i < fmtsIdx) - fmtsIdx = i; - } - // Keep enumerating. - fmt.index++; - } - - if (fmtsIdx == nFormats) { - RTC_LOG(LS_INFO) << "no supporting video formats found"; - return -1; - } else { - RTC_LOG(LS_INFO) << "We prefer format " - << cricket::GetFourccName(fmts[fmtsIdx]); - } - - struct v4l2_format video_fmt; - memset(&video_fmt, 0, sizeof(struct v4l2_format)); - video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - video_fmt.fmt.pix.sizeimage = 0; - video_fmt.fmt.pix.width = capability.width; - video_fmt.fmt.pix.height = capability.height; - video_fmt.fmt.pix.pixelformat = fmts[fmtsIdx]; - - if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) - _captureVideoType = VideoType::kYUY2; - else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUV420) - _captureVideoType = VideoType::kI420; - else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_UYVY) - _captureVideoType = VideoType::kUYVY; - else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG || - video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_JPEG) - _captureVideoType = VideoType::kMJPEG; - - // set format and frame size now - if (ioctl(_deviceFd, VIDIOC_S_FMT, &video_fmt) < 0) { - RTC_LOG(LS_INFO) << "error in VIDIOC_S_FMT, errno = " << errno; - return -1; - } - - // initialize current width and height - _currentWidth = video_fmt.fmt.pix.width; - _currentHeight = video_fmt.fmt.pix.height; - - // Trying to set frame rate, before check driver capability. - bool driver_framerate_support = true; - struct v4l2_streamparm streamparms; - memset(&streamparms, 0, sizeof(streamparms)); - streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - if (ioctl(_deviceFd, VIDIOC_G_PARM, &streamparms) < 0) { - RTC_LOG(LS_INFO) << "error in VIDIOC_G_PARM errno = " << errno; - driver_framerate_support = false; - // continue - } else { - // check the capability flag is set to V4L2_CAP_TIMEPERFRAME. - if (streamparms.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) { - // driver supports the feature. Set required framerate. - memset(&streamparms, 0, sizeof(streamparms)); - streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - streamparms.parm.capture.timeperframe.numerator = 1; - streamparms.parm.capture.timeperframe.denominator = capability.maxFPS; - if (ioctl(_deviceFd, VIDIOC_S_PARM, &streamparms) < 0) { - RTC_LOG(LS_INFO) << "Failed to set the framerate. errno=" << errno; - driver_framerate_support = false; - } else { - _currentFrameRate = capability.maxFPS; - } - } - } - // If driver doesn't support framerate control, need to hardcode. - // Hardcoding the value based on the frame size. - if (!driver_framerate_support) { - if (_currentWidth >= 800 && _captureVideoType != VideoType::kMJPEG) { - _currentFrameRate = 15; - } else { - _currentFrameRate = 30; - } - } - - if (!AllocateVideoBuffers()) { - RTC_LOG(LS_INFO) << "failed to allocate video capture buffers"; - return -1; - } - - // start capture thread; - if (_captureThread.empty()) { - quit_ = false; - _captureThread = rtc::PlatformThread::SpawnJoinable( - [this] { - while (CaptureProcess()) { - } - }, - "CaptureThread", - rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kHigh)); - } - - // Needed to start UVC camera - from the uvcview application - enum v4l2_buf_type type; - type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - if (ioctl(_deviceFd, VIDIOC_STREAMON, &type) == -1) { - RTC_LOG(LS_INFO) << "Failed to turn on stream"; - return -1; - } - - _captureStarted = true; - return 0; -} - -int32_t VideoCaptureModuleV4L2::StopCapture() { - if (!_captureThread.empty()) { - { - MutexLock lock(&capture_lock_); - quit_ = true; - } - // Make sure the capture thread stops using the mutex. - _captureThread.Finalize(); - } - - MutexLock lock(&capture_lock_); - if (_captureStarted) { - _captureStarted = false; - - DeAllocateVideoBuffers(); - close(_deviceFd); - _deviceFd = -1; - } - - return 0; -} - -// critical section protected by the caller - -bool VideoCaptureModuleV4L2::AllocateVideoBuffers() { - struct v4l2_requestbuffers rbuffer; - memset(&rbuffer, 0, sizeof(v4l2_requestbuffers)); - - rbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - rbuffer.memory = V4L2_MEMORY_MMAP; - rbuffer.count = kNoOfV4L2Bufffers; - - if (ioctl(_deviceFd, VIDIOC_REQBUFS, &rbuffer) < 0) { - RTC_LOG(LS_INFO) << "Could not get buffers from device. errno = " << errno; - return false; - } - - if (rbuffer.count > kNoOfV4L2Bufffers) - rbuffer.count = kNoOfV4L2Bufffers; - - _buffersAllocatedByDevice = rbuffer.count; - - // Map the buffers - _pool = new Buffer[rbuffer.count]; - - for (unsigned int i = 0; i < rbuffer.count; i++) { - struct v4l2_buffer buffer; - memset(&buffer, 0, sizeof(v4l2_buffer)); - buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - buffer.memory = V4L2_MEMORY_MMAP; - buffer.index = i; - - if (ioctl(_deviceFd, VIDIOC_QUERYBUF, &buffer) < 0) { - return false; - } - - _pool[i].start = mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, - MAP_SHARED, _deviceFd, buffer.m.offset); - - if (MAP_FAILED == _pool[i].start) { - for (unsigned int j = 0; j < i; j++) - munmap(_pool[j].start, _pool[j].length); - return false; - } - - _pool[i].length = buffer.length; - - if (ioctl(_deviceFd, VIDIOC_QBUF, &buffer) < 0) { - return false; - } - } - return true; -} - -bool VideoCaptureModuleV4L2::DeAllocateVideoBuffers() { - // unmap buffers - for (int i = 0; i < _buffersAllocatedByDevice; i++) - munmap(_pool[i].start, _pool[i].length); - - delete[] _pool; - - // turn off stream - enum v4l2_buf_type type; - type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - if (ioctl(_deviceFd, VIDIOC_STREAMOFF, &type) < 0) { - RTC_LOG(LS_INFO) << "VIDIOC_STREAMOFF error. errno: " << errno; - } - - return true; -} - -bool VideoCaptureModuleV4L2::CaptureStarted() { - return _captureStarted; -} - -bool VideoCaptureModuleV4L2::CaptureProcess() { - int retVal = 0; - fd_set rSet; - struct timeval timeout; - - FD_ZERO(&rSet); - FD_SET(_deviceFd, &rSet); - timeout.tv_sec = 1; - timeout.tv_usec = 0; - - // _deviceFd written only in StartCapture, when this thread isn't running. - retVal = select(_deviceFd + 1, &rSet, NULL, NULL, &timeout); - if (retVal < 0 && errno != EINTR) // continue if interrupted - { - // select failed - return false; - } else if (retVal == 0) { - // select timed out - return true; - } else if (!FD_ISSET(_deviceFd, &rSet)) { - // not event on camera handle - return true; - } - - { - MutexLock lock(&capture_lock_); - - if (quit_) { - return false; - } - - if (_captureStarted) { - struct v4l2_buffer buf; - memset(&buf, 0, sizeof(struct v4l2_buffer)); - buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - buf.memory = V4L2_MEMORY_MMAP; - // dequeue a buffer - repeat until dequeued properly! - while (ioctl(_deviceFd, VIDIOC_DQBUF, &buf) < 0) { - if (errno != EINTR) { - RTC_LOG(LS_INFO) << "could not sync on a buffer on device " - << strerror(errno); - return true; - } - } - VideoCaptureCapability frameInfo; - frameInfo.width = _currentWidth; - frameInfo.height = _currentHeight; - frameInfo.videoType = _captureVideoType; - - // convert to to I420 if needed - IncomingFrame((unsigned char*)_pool[buf.index].start, buf.bytesused, - frameInfo); - // enqueue the buffer again - if (ioctl(_deviceFd, VIDIOC_QBUF, &buf) == -1) { - RTC_LOG(LS_INFO) << "Failed to enqueue capture buffer"; - } - } - } - usleep(0); - return true; -} - -int32_t VideoCaptureModuleV4L2::CaptureSettings( - VideoCaptureCapability& settings) { - settings.width = _currentWidth; - settings.height = _currentHeight; - settings.maxFPS = _currentFrameRate; - settings.videoType = _captureVideoType; - - return 0; -} } // namespace videocapturemodule } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/video_capture_linux.h b/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/video_capture_linux.h deleted file mode 100644 index fa06d72b8d..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/video_capture_linux.h +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_VIDEO_CAPTURE_MAIN_SOURCE_LINUX_VIDEO_CAPTURE_LINUX_H_ -#define MODULES_VIDEO_CAPTURE_MAIN_SOURCE_LINUX_VIDEO_CAPTURE_LINUX_H_ - -#include -#include - -#include - -#include "modules/video_capture/video_capture_defines.h" -#include "modules/video_capture/video_capture_impl.h" -#include "rtc_base/platform_thread.h" -#include "rtc_base/synchronization/mutex.h" - -namespace webrtc { -namespace videocapturemodule { -class VideoCaptureModuleV4L2 : public VideoCaptureImpl { - public: - VideoCaptureModuleV4L2(); - ~VideoCaptureModuleV4L2() override; - int32_t Init(const char* deviceUniqueId); - int32_t StartCapture(const VideoCaptureCapability& capability) override; - int32_t StopCapture() override; - bool CaptureStarted() override; - int32_t CaptureSettings(VideoCaptureCapability& settings) override; - - private: - enum { kNoOfV4L2Bufffers = 4 }; - - static void CaptureThread(void*); - bool CaptureProcess(); - bool AllocateVideoBuffers(); - bool DeAllocateVideoBuffers(); - - rtc::PlatformThread _captureThread; - Mutex capture_lock_; - bool quit_ RTC_GUARDED_BY(capture_lock_); - int32_t _deviceId; - int32_t _deviceFd; - - int32_t _buffersAllocatedByDevice; - int32_t _currentWidth; - int32_t _currentHeight; - int32_t _currentFrameRate; - bool _captureStarted; - VideoType _captureVideoType; - struct Buffer { - void* start; - size_t length; - }; - Buffer* _pool; -}; -} // namespace videocapturemodule -} // namespace webrtc - -#endif // MODULES_VIDEO_CAPTURE_MAIN_SOURCE_LINUX_VIDEO_CAPTURE_LINUX_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/video_capture_v4l2.cc b/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/video_capture_v4l2.cc new file mode 100644 index 0000000000..2655fbefaa --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/video_capture_v4l2.cc @@ -0,0 +1,421 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_capture/linux/video_capture_v4l2.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +#include "api/scoped_refptr.h" +#include "media/base/video_common.h" +#include "modules/video_capture/video_capture.h" +#include "rtc_base/logging.h" + +namespace webrtc { +namespace videocapturemodule { +VideoCaptureModuleV4L2::VideoCaptureModuleV4L2() + : VideoCaptureImpl(), + _deviceId(-1), + _deviceFd(-1), + _buffersAllocatedByDevice(-1), + _currentWidth(-1), + _currentHeight(-1), + _currentFrameRate(-1), + _captureStarted(false), + _captureVideoType(VideoType::kI420), + _pool(NULL) {} + +int32_t VideoCaptureModuleV4L2::Init(const char* deviceUniqueIdUTF8) { + int len = strlen((const char*)deviceUniqueIdUTF8); + _deviceUniqueId = new (std::nothrow) char[len + 1]; + if (_deviceUniqueId) { + memcpy(_deviceUniqueId, deviceUniqueIdUTF8, len + 1); + } + + int fd; + char device[32]; + bool found = false; + + /* detect /dev/video [0-63] entries */ + int n; + for (n = 0; n < 64; n++) { + snprintf(device, sizeof(device), "/dev/video%d", n); + if ((fd = open(device, O_RDONLY)) != -1) { + // query device capabilities + struct v4l2_capability cap; + if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == 0) { + if (cap.bus_info[0] != 0) { + if (strncmp((const char*)cap.bus_info, + (const char*)deviceUniqueIdUTF8, + strlen((const char*)deviceUniqueIdUTF8)) == + 0) { // match with device id + close(fd); + found = true; + break; // fd matches with device unique id supplied + } + } + } + close(fd); // close since this is not the matching device + } + } + if (!found) { + RTC_LOG(LS_INFO) << "no matching device found"; + return -1; + } + _deviceId = n; // store the device id + return 0; +} + +VideoCaptureModuleV4L2::~VideoCaptureModuleV4L2() { + StopCapture(); + if (_deviceFd != -1) + close(_deviceFd); +} + +int32_t VideoCaptureModuleV4L2::StartCapture( + const VideoCaptureCapability& capability) { + if (_captureStarted) { + if (capability.width == _currentWidth && + capability.height == _currentHeight && + _captureVideoType == capability.videoType) { + return 0; + } else { + StopCapture(); + } + } + + MutexLock lock(&capture_lock_); + // first open /dev/video device + char device[20]; + snprintf(device, sizeof(device), "/dev/video%d", _deviceId); + + if ((_deviceFd = open(device, O_RDWR | O_NONBLOCK, 0)) < 0) { + RTC_LOG(LS_INFO) << "error in opening " << device << " errono = " << errno; + return -1; + } + + // Supported video formats in preferred order. + // If the requested resolution is larger than VGA, we prefer MJPEG. Go for + // I420 otherwise. + const int nFormats = 5; + unsigned int fmts[nFormats]; + if (capability.width > 640 || capability.height > 480) { + fmts[0] = V4L2_PIX_FMT_MJPEG; + fmts[1] = V4L2_PIX_FMT_YUV420; + fmts[2] = V4L2_PIX_FMT_YUYV; + fmts[3] = V4L2_PIX_FMT_UYVY; + fmts[4] = V4L2_PIX_FMT_JPEG; + } else { + fmts[0] = V4L2_PIX_FMT_YUV420; + fmts[1] = V4L2_PIX_FMT_YUYV; + fmts[2] = V4L2_PIX_FMT_UYVY; + fmts[3] = V4L2_PIX_FMT_MJPEG; + fmts[4] = V4L2_PIX_FMT_JPEG; + } + + // Enumerate image formats. + struct v4l2_fmtdesc fmt; + int fmtsIdx = nFormats; + memset(&fmt, 0, sizeof(fmt)); + fmt.index = 0; + fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + RTC_LOG(LS_INFO) << "Video Capture enumerats supported image formats:"; + while (ioctl(_deviceFd, VIDIOC_ENUM_FMT, &fmt) == 0) { + RTC_LOG(LS_INFO) << " { pixelformat = " + << cricket::GetFourccName(fmt.pixelformat) + << ", description = '" << fmt.description << "' }"; + // Match the preferred order. + for (int i = 0; i < nFormats; i++) { + if (fmt.pixelformat == fmts[i] && i < fmtsIdx) + fmtsIdx = i; + } + // Keep enumerating. + fmt.index++; + } + + if (fmtsIdx == nFormats) { + RTC_LOG(LS_INFO) << "no supporting video formats found"; + return -1; + } else { + RTC_LOG(LS_INFO) << "We prefer format " + << cricket::GetFourccName(fmts[fmtsIdx]); + } + + struct v4l2_format video_fmt; + memset(&video_fmt, 0, sizeof(struct v4l2_format)); + video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + video_fmt.fmt.pix.sizeimage = 0; + video_fmt.fmt.pix.width = capability.width; + video_fmt.fmt.pix.height = capability.height; + video_fmt.fmt.pix.pixelformat = fmts[fmtsIdx]; + + if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) + _captureVideoType = VideoType::kYUY2; + else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUV420) + _captureVideoType = VideoType::kI420; + else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_UYVY) + _captureVideoType = VideoType::kUYVY; + else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG || + video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_JPEG) + _captureVideoType = VideoType::kMJPEG; + + // set format and frame size now + if (ioctl(_deviceFd, VIDIOC_S_FMT, &video_fmt) < 0) { + RTC_LOG(LS_INFO) << "error in VIDIOC_S_FMT, errno = " << errno; + return -1; + } + + // initialize current width and height + _currentWidth = video_fmt.fmt.pix.width; + _currentHeight = video_fmt.fmt.pix.height; + + // Trying to set frame rate, before check driver capability. + bool driver_framerate_support = true; + struct v4l2_streamparm streamparms; + memset(&streamparms, 0, sizeof(streamparms)); + streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (ioctl(_deviceFd, VIDIOC_G_PARM, &streamparms) < 0) { + RTC_LOG(LS_INFO) << "error in VIDIOC_G_PARM errno = " << errno; + driver_framerate_support = false; + // continue + } else { + // check the capability flag is set to V4L2_CAP_TIMEPERFRAME. + if (streamparms.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) { + // driver supports the feature. Set required framerate. + memset(&streamparms, 0, sizeof(streamparms)); + streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + streamparms.parm.capture.timeperframe.numerator = 1; + streamparms.parm.capture.timeperframe.denominator = capability.maxFPS; + if (ioctl(_deviceFd, VIDIOC_S_PARM, &streamparms) < 0) { + RTC_LOG(LS_INFO) << "Failed to set the framerate. errno=" << errno; + driver_framerate_support = false; + } else { + _currentFrameRate = capability.maxFPS; + } + } + } + // If driver doesn't support framerate control, need to hardcode. + // Hardcoding the value based on the frame size. + if (!driver_framerate_support) { + if (_currentWidth >= 800 && _captureVideoType != VideoType::kMJPEG) { + _currentFrameRate = 15; + } else { + _currentFrameRate = 30; + } + } + + if (!AllocateVideoBuffers()) { + RTC_LOG(LS_INFO) << "failed to allocate video capture buffers"; + return -1; + } + + // start capture thread; + if (_captureThread.empty()) { + quit_ = false; + _captureThread = rtc::PlatformThread::SpawnJoinable( + [this] { + while (CaptureProcess()) { + } + }, + "CaptureThread", + rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kHigh)); + } + + // Needed to start UVC camera - from the uvcview application + enum v4l2_buf_type type; + type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (ioctl(_deviceFd, VIDIOC_STREAMON, &type) == -1) { + RTC_LOG(LS_INFO) << "Failed to turn on stream"; + return -1; + } + + _captureStarted = true; + return 0; +} + +int32_t VideoCaptureModuleV4L2::StopCapture() { + if (!_captureThread.empty()) { + { + MutexLock lock(&capture_lock_); + quit_ = true; + } + // Make sure the capture thread stops using the mutex. + _captureThread.Finalize(); + } + + MutexLock lock(&capture_lock_); + if (_captureStarted) { + _captureStarted = false; + + DeAllocateVideoBuffers(); + close(_deviceFd); + _deviceFd = -1; + } + + return 0; +} + +// critical section protected by the caller + +bool VideoCaptureModuleV4L2::AllocateVideoBuffers() { + struct v4l2_requestbuffers rbuffer; + memset(&rbuffer, 0, sizeof(v4l2_requestbuffers)); + + rbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + rbuffer.memory = V4L2_MEMORY_MMAP; + rbuffer.count = kNoOfV4L2Bufffers; + + if (ioctl(_deviceFd, VIDIOC_REQBUFS, &rbuffer) < 0) { + RTC_LOG(LS_INFO) << "Could not get buffers from device. errno = " << errno; + return false; + } + + if (rbuffer.count > kNoOfV4L2Bufffers) + rbuffer.count = kNoOfV4L2Bufffers; + + _buffersAllocatedByDevice = rbuffer.count; + + // Map the buffers + _pool = new Buffer[rbuffer.count]; + + for (unsigned int i = 0; i < rbuffer.count; i++) { + struct v4l2_buffer buffer; + memset(&buffer, 0, sizeof(v4l2_buffer)); + buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buffer.memory = V4L2_MEMORY_MMAP; + buffer.index = i; + + if (ioctl(_deviceFd, VIDIOC_QUERYBUF, &buffer) < 0) { + return false; + } + + _pool[i].start = mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, + MAP_SHARED, _deviceFd, buffer.m.offset); + + if (MAP_FAILED == _pool[i].start) { + for (unsigned int j = 0; j < i; j++) + munmap(_pool[j].start, _pool[j].length); + return false; + } + + _pool[i].length = buffer.length; + + if (ioctl(_deviceFd, VIDIOC_QBUF, &buffer) < 0) { + return false; + } + } + return true; +} + +bool VideoCaptureModuleV4L2::DeAllocateVideoBuffers() { + // unmap buffers + for (int i = 0; i < _buffersAllocatedByDevice; i++) + munmap(_pool[i].start, _pool[i].length); + + delete[] _pool; + + // turn off stream + enum v4l2_buf_type type; + type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (ioctl(_deviceFd, VIDIOC_STREAMOFF, &type) < 0) { + RTC_LOG(LS_INFO) << "VIDIOC_STREAMOFF error. errno: " << errno; + } + + return true; +} + +bool VideoCaptureModuleV4L2::CaptureStarted() { + return _captureStarted; +} + +bool VideoCaptureModuleV4L2::CaptureProcess() { + int retVal = 0; + fd_set rSet; + struct timeval timeout; + + FD_ZERO(&rSet); + FD_SET(_deviceFd, &rSet); + timeout.tv_sec = 1; + timeout.tv_usec = 0; + + // _deviceFd written only in StartCapture, when this thread isn't running. + retVal = select(_deviceFd + 1, &rSet, NULL, NULL, &timeout); + + { + MutexLock lock(&capture_lock_); + + if (quit_) { + return false; + } + + if (retVal < 0 && errno != EINTR) { // continue if interrupted + // select failed + return false; + } else if (retVal == 0) { + // select timed out + return true; + } else if (!FD_ISSET(_deviceFd, &rSet)) { + // not event on camera handle + return true; + } + + if (_captureStarted) { + struct v4l2_buffer buf; + memset(&buf, 0, sizeof(struct v4l2_buffer)); + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buf.memory = V4L2_MEMORY_MMAP; + // dequeue a buffer - repeat until dequeued properly! + while (ioctl(_deviceFd, VIDIOC_DQBUF, &buf) < 0) { + if (errno != EINTR) { + RTC_LOG(LS_INFO) << "could not sync on a buffer on device " + << strerror(errno); + return true; + } + } + VideoCaptureCapability frameInfo; + frameInfo.width = _currentWidth; + frameInfo.height = _currentHeight; + frameInfo.videoType = _captureVideoType; + + // convert to to I420 if needed + IncomingFrame(reinterpret_cast(_pool[buf.index].start), + buf.bytesused, frameInfo); + // enqueue the buffer again + if (ioctl(_deviceFd, VIDIOC_QBUF, &buf) == -1) { + RTC_LOG(LS_INFO) << "Failed to enqueue capture buffer"; + } + } + } + usleep(0); + return true; +} + +int32_t VideoCaptureModuleV4L2::CaptureSettings( + VideoCaptureCapability& settings) { + settings.width = _currentWidth; + settings.height = _currentHeight; + settings.maxFPS = _currentFrameRate; + settings.videoType = _captureVideoType; + + return 0; +} +} // namespace videocapturemodule +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/video_capture_v4l2.h b/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/video_capture_v4l2.h new file mode 100644 index 0000000000..65e89e2daa --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/video_capture_v4l2.h @@ -0,0 +1,65 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CAPTURE_LINUX_VIDEO_CAPTURE_V4L2_H_ +#define MODULES_VIDEO_CAPTURE_LINUX_VIDEO_CAPTURE_V4L2_H_ + +#include +#include + +#include + +#include "modules/video_capture/video_capture_defines.h" +#include "modules/video_capture/video_capture_impl.h" +#include "rtc_base/platform_thread.h" +#include "rtc_base/synchronization/mutex.h" + +namespace webrtc { +namespace videocapturemodule { +class VideoCaptureModuleV4L2 : public VideoCaptureImpl { + public: + VideoCaptureModuleV4L2(); + ~VideoCaptureModuleV4L2() override; + int32_t Init(const char* deviceUniqueId); + int32_t StartCapture(const VideoCaptureCapability& capability) override; + int32_t StopCapture() override; + bool CaptureStarted() override; + int32_t CaptureSettings(VideoCaptureCapability& settings) override; + + private: + enum { kNoOfV4L2Bufffers = 4 }; + + static void CaptureThread(void*); + bool CaptureProcess(); + bool AllocateVideoBuffers(); + bool DeAllocateVideoBuffers(); + + rtc::PlatformThread _captureThread; + Mutex capture_lock_; + bool quit_ RTC_GUARDED_BY(capture_lock_); + int32_t _deviceId; + int32_t _deviceFd; + + int32_t _buffersAllocatedByDevice; + int32_t _currentWidth; + int32_t _currentHeight; + int32_t _currentFrameRate; + bool _captureStarted; + VideoType _captureVideoType; + struct Buffer { + void* start; + size_t length; + }; + Buffer* _pool; +}; +} // namespace videocapturemodule +} // namespace webrtc + +#endif // MODULES_VIDEO_CAPTURE_LINUX_VIDEO_CAPTURE_V4L2_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_capture/video_capture.h b/TMessagesProj/jni/voip/webrtc/modules/video_capture/video_capture.h index 0f60092d72..3bbe217cba 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_capture/video_capture.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_capture/video_capture.h @@ -44,7 +44,7 @@ class VideoCaptureModule : public rtc::RefCountInterface { // Gets the capabilities of the named device. virtual int32_t GetCapability(const char* deviceUniqueIdUTF8, - const uint32_t deviceCapabilityNumber, + uint32_t deviceCapabilityNumber, VideoCaptureCapability& capability) = 0; // Gets clockwise angle the captured frames should be rotated in order diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_capture/video_capture_factory_null.cc b/TMessagesProj/jni/voip/webrtc/modules/video_capture/video_capture_factory_null.cc new file mode 100644 index 0000000000..7808d19851 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_capture/video_capture_factory_null.cc @@ -0,0 +1,27 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_capture/video_capture_impl.h" + +namespace webrtc { +namespace videocapturemodule { + +// static +VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo() { + return nullptr; +} + +rtc::scoped_refptr VideoCaptureImpl::Create( + const char* device_id) { + return nullptr; +} + +} // namespace videocapturemodule +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_capture/video_capture_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/video_capture/video_capture_impl.cc index 6619d15924..234c2e131e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_capture/video_capture_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_capture/video_capture_impl.cc @@ -18,7 +18,6 @@ #include "common_video/libyuv/include/webrtc_libyuv.h" #include "modules/video_capture/video_capture_config.h" #include "rtc_base/logging.h" -#include "rtc_base/ref_counted_object.h" #include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" #include "third_party/libyuv/include/libyuv.h" @@ -153,8 +152,6 @@ int32_t VideoCaptureImpl::IncomingFrame(uint8_t* videoFrame, // Setting absolute height (in case it was negative). // In Windows, the image starts bottom left, instead of top left. // Setting a negative source height, inverts the image (within LibYuv). - - // TODO(nisse): Use a pool? rtc::scoped_refptr buffer = I420Buffer::Create( target_width, target_height, stride_y, stride_uv, stride_uv); diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/av1_svc_config.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/av1_svc_config.cc index 2f1026b4df..43dcf96ab7 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/av1_svc_config.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/av1_svc_config.cc @@ -15,28 +15,70 @@ #include #include "modules/video_coding/svc/create_scalability_structure.h" +#include "modules/video_coding/svc/scalability_mode_util.h" #include "modules/video_coding/svc/scalable_video_controller.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/strings/string_builder.h" namespace webrtc { +namespace { +absl::optional BuildScalabilityMode(int num_temporal_layers, + int num_spatial_layers) { + char name[20]; + rtc::SimpleStringBuilder ss(name); + ss << "L" << num_spatial_layers << "T" << num_temporal_layers; + if (num_spatial_layers > 1) { + ss << "_KEY"; + } + + return ScalabilityModeFromString(name); +} +} // namespace -bool SetAv1SvcConfig(VideoCodec& video_codec) { +absl::InlinedVector +LibaomAv1EncoderSupportedScalabilityModes() { + absl::InlinedVector scalability_modes; + for (ScalabilityMode scalability_mode : kAllScalabilityModes) { + if (ScalabilityStructureConfig(scalability_mode) != absl::nullopt) { + scalability_modes.push_back(scalability_mode); + } + } + return scalability_modes; +} + +bool LibaomAv1EncoderSupportsScalabilityMode(ScalabilityMode scalability_mode) { + // For libaom AV1, the scalability mode is supported if we can create the + // scalability structure. + return ScalabilityStructureConfig(scalability_mode) != absl::nullopt; +} + +bool SetAv1SvcConfig(VideoCodec& video_codec, + int num_temporal_layers, + int num_spatial_layers) { RTC_DCHECK_EQ(video_codec.codecType, kVideoCodecAV1); - absl::string_view scalability_mode = video_codec.ScalabilityMode(); - if (scalability_mode.empty()) { - RTC_LOG(LS_WARNING) << "Scalability mode is not set, using 'NONE'."; - scalability_mode = "NONE"; + absl::optional scalability_mode = + video_codec.GetScalabilityMode(); + if (!scalability_mode.has_value()) { + scalability_mode = + BuildScalabilityMode(num_temporal_layers, num_spatial_layers); + if (!scalability_mode) { + RTC_LOG(LS_WARNING) << "Scalability mode is not set, using 'L1T1'."; + scalability_mode = ScalabilityMode::kL1T1; + } } std::unique_ptr structure = - CreateScalabilityStructure(scalability_mode); + CreateScalabilityStructure(*scalability_mode); if (structure == nullptr) { - RTC_LOG(LS_WARNING) << "Failed to create structure " << scalability_mode; + RTC_LOG(LS_WARNING) << "Failed to create structure " + << static_cast(*scalability_mode); return false; } + video_codec.SetScalabilityMode(*scalability_mode); + ScalableVideoController::StreamLayersConfig info = structure->StreamConfig(); for (int sl_idx = 0; sl_idx < info.num_spatial_layers; ++sl_idx) { SpatialLayer& spatial_layer = video_codec.spatialLayers[sl_idx]; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/av1_svc_config.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/av1_svc_config.h index 15d94e03a9..05b886b9f4 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/av1_svc_config.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/av1_svc_config.h @@ -10,12 +10,22 @@ #ifndef MODULES_VIDEO_CODING_CODECS_AV1_AV1_SVC_CONFIG_H_ #define MODULES_VIDEO_CODING_CODECS_AV1_AV1_SVC_CONFIG_H_ +#include + +#include "absl/container/inlined_vector.h" #include "api/video_codecs/video_codec.h" namespace webrtc { +absl::InlinedVector +LibaomAv1EncoderSupportedScalabilityModes(); + +bool LibaomAv1EncoderSupportsScalabilityMode(ScalabilityMode scalability_mode); + // Fills `video_codec.spatialLayers` using other members. -bool SetAv1SvcConfig(VideoCodec& video_codec); +bool SetAv1SvcConfig(VideoCodec& video_codec, + int num_temporal_layers, + int num_spatial_layers); } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/dav1d_decoder.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/dav1d_decoder.cc index a5e4784839..a2cd6d868c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/dav1d_decoder.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/dav1d_decoder.cc @@ -14,12 +14,13 @@ #include "api/scoped_refptr.h" #include "api/video/encoded_image.h" -#include "api/video/i420_buffer.h" -#include "common_video/include/video_frame_buffer_pool.h" +#include "api/video/video_frame_buffer.h" +#include "common_video/include/video_frame_buffer.h" #include "modules/video_coding/include/video_error_codes.h" #include "rtc_base/logging.h" #include "third_party/dav1d/libdav1d/include/dav1d/dav1d.h" #include "third_party/libyuv/include/libyuv/convert.h" +#include "third_party/libyuv/include/libyuv/planar_functions.h" namespace webrtc { namespace { @@ -43,7 +44,6 @@ class Dav1dDecoder : public VideoDecoder { const char* ImplementationName() const override; private: - VideoFrameBufferPool buffer_pool_; Dav1dContext* context_ = nullptr; DecodedImageCallback* decode_complete_callback_ = nullptr; }; @@ -58,11 +58,13 @@ class ScopedDav1dData { Dav1dData data_ = {}; }; -class ScopedDav1dPicture { +class ScopedDav1dPicture + : public rtc::RefCountedNonVirtual { public: ~ScopedDav1dPicture() { dav1d_picture_unref(&picture_); } Dav1dPicture& Picture() { return picture_; } + using rtc::RefCountedNonVirtual::HasOneRef; private: Dav1dPicture picture_ = {}; @@ -73,8 +75,7 @@ constexpr char kDav1dName[] = "dav1d"; // Calling `dav1d_data_wrap` requires a `free_callback` to be registered. void NullFreeCallback(const uint8_t* buffer, void* opaque) {} -Dav1dDecoder::Dav1dDecoder() - : buffer_pool_(/*zero_initialize=*/false, /*max_number_of_buffers=*/150) {} +Dav1dDecoder::Dav1dDecoder() = default; Dav1dDecoder::~Dav1dDecoder() { Release(); @@ -103,7 +104,6 @@ int32_t Dav1dDecoder::Release() { if (context_ != nullptr) { return WEBRTC_VIDEO_CODEC_MEMORY; } - buffer_pool_.Release(); return WEBRTC_VIDEO_CODEC_OK; } @@ -138,8 +138,9 @@ int32_t Dav1dDecoder::Decode(const EncodedImage& encoded_image, return WEBRTC_VIDEO_CODEC_ERROR; } - ScopedDav1dPicture scoped_dav1d_picture; - Dav1dPicture& dav1d_picture = scoped_dav1d_picture.Picture(); + rtc::scoped_refptr scoped_dav1d_picture( + new ScopedDav1dPicture{}); + Dav1dPicture& dav1d_picture = scoped_dav1d_picture->Picture(); if (int get_picture_res = dav1d_get_picture(context_, &dav1d_picture)) { RTC_LOG(LS_WARNING) << "Dav1dDecoder::Decode getting picture failed with error code " @@ -147,36 +148,43 @@ int32_t Dav1dDecoder::Decode(const EncodedImage& encoded_image, return WEBRTC_VIDEO_CODEC_ERROR; } - // Only accept I420 pixel format and 8 bit depth. - if (dav1d_picture.p.layout != DAV1D_PIXEL_LAYOUT_I420 || - dav1d_picture.p.bpc != 8) { + if (dav1d_picture.p.bpc != 8) { + // Only accept 8 bit depth. + RTC_LOG(LS_ERROR) << "Dav1dDecoder::Decode unhandled bit depth: " + << dav1d_picture.p.bpc; return WEBRTC_VIDEO_CODEC_ERROR; } - rtc::scoped_refptr buffer = - buffer_pool_.CreateI420Buffer(dav1d_picture.p.w, dav1d_picture.p.h); - if (!buffer.get()) { - RTC_LOG(LS_WARNING) - << "Dav1dDecoder::Decode failed to get frame from the buffer pool."; + rtc::scoped_refptr wrapped_buffer; + if (dav1d_picture.p.layout == DAV1D_PIXEL_LAYOUT_I420) { + wrapped_buffer = WrapI420Buffer( + dav1d_picture.p.w, dav1d_picture.p.h, + static_cast(dav1d_picture.data[0]), dav1d_picture.stride[0], + static_cast(dav1d_picture.data[1]), dav1d_picture.stride[1], + static_cast(dav1d_picture.data[2]), dav1d_picture.stride[1], + // To keep |scoped_dav1d_picture.Picture()| alive + [scoped_dav1d_picture] {}); + } else if (dav1d_picture.p.layout == DAV1D_PIXEL_LAYOUT_I444) { + wrapped_buffer = WrapI444Buffer( + dav1d_picture.p.w, dav1d_picture.p.h, + static_cast(dav1d_picture.data[0]), dav1d_picture.stride[0], + static_cast(dav1d_picture.data[1]), dav1d_picture.stride[1], + static_cast(dav1d_picture.data[2]), dav1d_picture.stride[1], + // To keep |scoped_dav1d_picture.Picture()| alive + [scoped_dav1d_picture] {}); + } else { + // Only accept I420 or I444 pixel format. + RTC_LOG(LS_ERROR) << "Dav1dDecoder::Decode unhandled pixel layout: " + << dav1d_picture.p.layout; return WEBRTC_VIDEO_CODEC_ERROR; } - uint8_t* y_data = static_cast(dav1d_picture.data[0]); - uint8_t* u_data = static_cast(dav1d_picture.data[1]); - uint8_t* v_data = static_cast(dav1d_picture.data[2]); - int y_stride = dav1d_picture.stride[0]; - int uv_stride = dav1d_picture.stride[1]; - libyuv::I420Copy(y_data, y_stride, // - u_data, uv_stride, // - v_data, uv_stride, // - buffer->MutableDataY(), buffer->StrideY(), // - buffer->MutableDataU(), buffer->StrideU(), // - buffer->MutableDataV(), buffer->StrideV(), // - dav1d_picture.p.w, // - dav1d_picture.p.h); // + if (!wrapped_buffer.get()) { + return WEBRTC_VIDEO_CODEC_ERROR; + } VideoFrame decoded_frame = VideoFrame::Builder() - .set_video_frame_buffer(buffer) + .set_video_frame_buffer(wrapped_buffer) .set_timestamp_rtp(encoded_image.Timestamp()) .set_ntp_time_ms(encoded_image.ntp_time_ms_) .set_color_space(encoded_image.ColorSpace()) diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_decoder.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_decoder.cc deleted file mode 100644 index 2405e2c52c..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_decoder.cc +++ /dev/null @@ -1,200 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "modules/video_coding/codecs/av1/libaom_av1_decoder.h" - -#include - -#include - -#include "absl/types/optional.h" -#include "api/scoped_refptr.h" -#include "api/video/encoded_image.h" -#include "api/video/i420_buffer.h" -#include "api/video_codecs/video_decoder.h" -#include "common_video/include/video_frame_buffer_pool.h" -#include "modules/video_coding/include/video_error_codes.h" -#include "rtc_base/logging.h" -#include "third_party/libaom/source/libaom/aom/aom_decoder.h" -#include "third_party/libaom/source/libaom/aom/aomdx.h" -#include "third_party/libyuv/include/libyuv/convert.h" - -namespace webrtc { -namespace { - -constexpr int kConfigLowBitDepth = 1; // 8-bits per luma/chroma sample. -constexpr int kDecFlags = 0; // 0 signals no post processing. - -class LibaomAv1Decoder final : public VideoDecoder { - public: - LibaomAv1Decoder(); - LibaomAv1Decoder(const LibaomAv1Decoder&) = delete; - LibaomAv1Decoder& operator=(const LibaomAv1Decoder&) = delete; - ~LibaomAv1Decoder(); - - // Implements VideoDecoder. - bool Configure(const Settings& settings) override; - - // Decode an encoded video frame. - int32_t Decode(const EncodedImage& encoded_image, - bool missing_frames, - int64_t render_time_ms) override; - - int32_t RegisterDecodeCompleteCallback( - DecodedImageCallback* callback) override; - - int32_t Release() override; - - DecoderInfo GetDecoderInfo() const override; - const char* ImplementationName() const override; - - private: - aom_codec_ctx_t context_; - bool inited_; - // Pool of memory buffers to store decoded image data for application access. - VideoFrameBufferPool buffer_pool_; - DecodedImageCallback* decode_complete_callback_; -}; - -LibaomAv1Decoder::LibaomAv1Decoder() - : context_(), // Force value initialization instead of default one. - inited_(false), - buffer_pool_(false, /*max_number_of_buffers=*/150), - decode_complete_callback_(nullptr) {} - -LibaomAv1Decoder::~LibaomAv1Decoder() { - Release(); -} - -bool LibaomAv1Decoder::Configure(const Settings& settings) { - aom_codec_dec_cfg_t config = {}; - config.threads = static_cast(settings.number_of_cores()); - config.allow_lowbitdepth = kConfigLowBitDepth; - - aom_codec_err_t ret = - aom_codec_dec_init(&context_, aom_codec_av1_dx(), &config, kDecFlags); - if (ret != AOM_CODEC_OK) { - RTC_LOG(LS_WARNING) << "LibaomAv1Decoder::Configure returned " << ret - << " on aom_codec_dec_init."; - return false; - } - inited_ = true; - return true; -} - -int32_t LibaomAv1Decoder::Decode(const EncodedImage& encoded_image, - bool missing_frames, - int64_t /*render_time_ms*/) { - if (!inited_) { - return WEBRTC_VIDEO_CODEC_UNINITIALIZED; - } - if (decode_complete_callback_ == nullptr) { - return WEBRTC_VIDEO_CODEC_UNINITIALIZED; - } - - // Decode one video frame. - aom_codec_err_t ret = - aom_codec_decode(&context_, encoded_image.data(), encoded_image.size(), - /*user_priv=*/nullptr); - if (ret != AOM_CODEC_OK) { - RTC_LOG(LS_WARNING) << "LibaomAv1Decoder::Decode returned " << ret - << " on aom_codec_decode."; - return WEBRTC_VIDEO_CODEC_ERROR; - } - - // Get decoded frame data. - int corrupted_frame = 0; - aom_codec_iter_t iter = nullptr; - while (aom_image_t* decoded_image = aom_codec_get_frame(&context_, &iter)) { - if (aom_codec_control(&context_, AOMD_GET_FRAME_CORRUPTED, - &corrupted_frame)) { - RTC_LOG(LS_WARNING) << "LibaomAv1Decoder::Decode " - "AOM_GET_FRAME_CORRUPTED."; - } - // Check that decoded image format is I420 and has 8-bit depth. - if (decoded_image->fmt != AOM_IMG_FMT_I420) { - RTC_LOG(LS_WARNING) << "LibaomAv1Decoder::Decode invalid image format"; - return WEBRTC_VIDEO_CODEC_ERROR; - } - - // Return decoded frame data. - int qp; - ret = aom_codec_control(&context_, AOMD_GET_LAST_QUANTIZER, &qp); - if (ret != AOM_CODEC_OK) { - RTC_LOG(LS_WARNING) << "LibaomAv1Decoder::Decode returned " << ret - << " on control AOME_GET_LAST_QUANTIZER."; - return WEBRTC_VIDEO_CODEC_ERROR; - } - - // Allocate memory for decoded frame. - rtc::scoped_refptr buffer = - buffer_pool_.CreateI420Buffer(decoded_image->d_w, decoded_image->d_h); - if (!buffer.get()) { - // Pool has too many pending frames. - RTC_LOG(LS_WARNING) << "LibaomAv1Decoder::Decode returned due to lack of" - " space in decoded frame buffer pool."; - return WEBRTC_VIDEO_CODEC_ERROR; - } - - // Copy decoded_image to decoded_frame. - libyuv::I420Copy( - decoded_image->planes[AOM_PLANE_Y], decoded_image->stride[AOM_PLANE_Y], - decoded_image->planes[AOM_PLANE_U], decoded_image->stride[AOM_PLANE_U], - decoded_image->planes[AOM_PLANE_V], decoded_image->stride[AOM_PLANE_V], - buffer->MutableDataY(), buffer->StrideY(), buffer->MutableDataU(), - buffer->StrideU(), buffer->MutableDataV(), buffer->StrideV(), - decoded_image->d_w, decoded_image->d_h); - VideoFrame decoded_frame = VideoFrame::Builder() - .set_video_frame_buffer(buffer) - .set_timestamp_rtp(encoded_image.Timestamp()) - .set_ntp_time_ms(encoded_image.ntp_time_ms_) - .set_color_space(encoded_image.ColorSpace()) - .build(); - - decode_complete_callback_->Decoded(decoded_frame, absl::nullopt, - absl::nullopt); - } - return WEBRTC_VIDEO_CODEC_OK; -} - -int32_t LibaomAv1Decoder::RegisterDecodeCompleteCallback( - DecodedImageCallback* decode_complete_callback) { - decode_complete_callback_ = decode_complete_callback; - return WEBRTC_VIDEO_CODEC_OK; -} - -int32_t LibaomAv1Decoder::Release() { - if (aom_codec_destroy(&context_) != AOM_CODEC_OK) { - return WEBRTC_VIDEO_CODEC_MEMORY; - } - buffer_pool_.Release(); - inited_ = false; - return WEBRTC_VIDEO_CODEC_OK; -} - -VideoDecoder::DecoderInfo LibaomAv1Decoder::GetDecoderInfo() const { - DecoderInfo info; - info.implementation_name = "libaom"; - info.is_hardware_accelerated = false; - return info; -} - -const char* LibaomAv1Decoder::ImplementationName() const { - return "libaom"; -} - -} // namespace - -const bool kIsLibaomAv1DecoderSupported = true; - -std::unique_ptr CreateLibaomAv1Decoder() { - return std::make_unique(); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_decoder.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_decoder.h deleted file mode 100644 index 9b01285c73..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_decoder.h +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#ifndef MODULES_VIDEO_CODING_CODECS_AV1_LIBAOM_AV1_DECODER_H_ -#define MODULES_VIDEO_CODING_CODECS_AV1_LIBAOM_AV1_DECODER_H_ - -#include - -#include "absl/base/attributes.h" -#include "api/video_codecs/video_decoder.h" - -namespace webrtc { - -ABSL_CONST_INIT extern const bool kIsLibaomAv1DecoderSupported; - -std::unique_ptr CreateLibaomAv1Decoder(); - -} // namespace webrtc - -#endif // MODULES_VIDEO_CODING_CODECS_AV1_LIBAOM_AV1_DECODER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_decoder_absent.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_decoder_absent.cc deleted file mode 100644 index b97b68b33f..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_decoder_absent.cc +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "modules/video_coding/codecs/av1/libaom_av1_decoder.h" - -#include - -#include "api/video_codecs/video_decoder.h" - -namespace webrtc { - -const bool kIsLibaomAv1DecoderSupported = false; - -std::unique_ptr CreateLibaomAv1Decoder() { - return nullptr; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.cc index 967ec7d45a..807513bc7b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.cc @@ -36,34 +36,26 @@ #include "third_party/libaom/source/libaom/aom/aom_encoder.h" #include "third_party/libaom/source/libaom/aom/aomcx.h" +#define SET_ENCODER_PARAM_OR_RETURN_ERROR(param_id, param_value) \ + do { \ + if (!SetEncoderControlParameters(param_id, param_value)) { \ + return WEBRTC_VIDEO_CODEC_ERROR; \ + } \ + } while (0) + namespace webrtc { namespace { // Encoder configuration parameters constexpr int kQpMin = 10; constexpr int kUsageProfile = AOM_USAGE_REALTIME; -constexpr int kMinQindex = 145; // Min qindex threshold for QP scaling. -constexpr int kMaxQindex = 205; // Max qindex threshold for QP scaling. +constexpr int kMinQindex = 145; // Min qindex threshold for QP scaling. +constexpr int kMaxQindex = 205; // Max qindex threshold for QP scaling. constexpr int kBitDepth = 8; constexpr int kLagInFrames = 0; // No look ahead. constexpr int kRtpTicksPerSecond = 90000; constexpr float kMinimumFrameRate = 1.0; -// Only positive speeds, range for real-time coding currently is: 6 - 8. -// Lower means slower/better quality, higher means fastest/lower quality. -int GetCpuSpeed(int width, int height, int number_of_cores) { - // For smaller resolutions, use lower speed setting (get some coding gain at - // the cost of increased encoding complexity). - if (number_of_cores > 4 && width * height < 320 * 180) - return 6; - else if (width * height >= 1280 * 720) - return 9; - else if (width * height >= 640 * 360) - return 8; - else - return 7; -} - aom_superblock_size_t GetSuperblockSize(int width, int height, int threads) { int resolution = width * height; if (threads >= 4 && resolution >= 960 * 540 && resolution < 1920 * 1080) @@ -74,7 +66,8 @@ aom_superblock_size_t GetSuperblockSize(int width, int height, int threads) { class LibaomAv1Encoder final : public VideoEncoder { public: - LibaomAv1Encoder(); + explicit LibaomAv1Encoder( + const absl::optional& aux_config); ~LibaomAv1Encoder(); int InitEncode(const VideoCodec* codec_settings, @@ -93,6 +86,12 @@ class LibaomAv1Encoder final : public VideoEncoder { EncoderInfo GetEncoderInfo() const override; private: + template + bool SetEncoderControlParameters(int param_id, P param_value); + + // Get value to be used for encoder cpu_speed setting + int GetCpuSpeed(int width, int height); + // Determine number of encoder threads to use. int NumberOfThreads(int width, int height, int number_of_cores); @@ -105,12 +104,15 @@ class LibaomAv1Encoder final : public VideoEncoder { // Configures the encoder which buffers next frame updates and can reference. void SetSvcRefFrameConfig( const ScalableVideoController::LayerFrameConfig& layer_frame); + // If pixel format doesn't match, then reallocate. + void MaybeRewrapImgWithFormat(const aom_img_fmt_t fmt); std::unique_ptr svc_controller_; bool inited_; bool rates_configured_; absl::optional svc_params_; VideoCodec encoder_settings_; + absl::optional aux_config_; aom_image_t* frame_for_encode_; aom_codec_ctx_t ctx_; aom_codec_enc_cfg_t cfg_; @@ -142,9 +144,11 @@ int32_t VerifyCodecSettings(const VideoCodec& codec_settings) { return WEBRTC_VIDEO_CODEC_OK; } -LibaomAv1Encoder::LibaomAv1Encoder() +LibaomAv1Encoder::LibaomAv1Encoder( + const absl::optional& aux_config) : inited_(false), rates_configured_(false), + aux_config_(aux_config), frame_for_encode_(nullptr), encoded_image_callback_(nullptr) {} @@ -179,15 +183,16 @@ int LibaomAv1Encoder::InitEncode(const VideoCodec* codec_settings, RTC_LOG(LS_WARNING) << "Simulcast is not implemented by LibaomAv1Encoder."; return result; } - absl::string_view scalability_mode = encoder_settings_.ScalabilityMode(); - if (scalability_mode.empty()) { - RTC_LOG(LS_WARNING) << "Scalability mode is not set, using 'NONE'."; - scalability_mode = "NONE"; + absl::optional scalability_mode = + encoder_settings_.GetScalabilityMode(); + if (!scalability_mode.has_value()) { + RTC_LOG(LS_WARNING) << "Scalability mode is not set, using 'L1T1'."; + scalability_mode = ScalabilityMode::kL1T1; } - svc_controller_ = CreateScalabilityStructure(scalability_mode); + svc_controller_ = CreateScalabilityStructure(*scalability_mode); if (svc_controller_ == nullptr) { RTC_LOG(LS_WARNING) << "Failed to set scalability mode " - << scalability_mode; + << static_cast(*scalability_mode); return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } @@ -228,11 +233,10 @@ int LibaomAv1Encoder::InitEncode(const VideoCodec* codec_settings, cfg_.g_pass = AOM_RC_ONE_PASS; // One-pass rate control cfg_.g_lag_in_frames = kLagInFrames; // No look ahead when lag equals 0. - // Creating a wrapper to the image - setting image data to nullptr. Actual - // pointer will be set in encode. Setting align to 1, as it is meaningless - // (actual memory is not allocated). - frame_for_encode_ = - aom_img_alloc(nullptr, AOM_IMG_FMT_I420, cfg_.g_w, cfg_.g_h, 1); + if (frame_for_encode_ != nullptr) { + aom_img_free(frame_for_encode_); + frame_for_encode_ = nullptr; + } // Flag options: AOM_CODEC_USE_PSNR and AOM_CODEC_USE_HIGHBITDEPTH aom_codec_flags_t flags = 0; @@ -247,176 +251,125 @@ int LibaomAv1Encoder::InitEncode(const VideoCodec* codec_settings, inited_ = true; // Set control parameters - ret = aom_codec_control( - &ctx_, AOME_SET_CPUUSED, - GetCpuSpeed(cfg_.g_w, cfg_.g_h, settings.number_of_cores)); - if (ret != AOM_CODEC_OK) { - RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret - << " on control AV1E_SET_CPUUSED."; - return WEBRTC_VIDEO_CODEC_ERROR; - } - ret = aom_codec_control(&ctx_, AV1E_SET_ENABLE_CDEF, 1); - if (ret != AOM_CODEC_OK) { - RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret - << " on control AV1E_SET_ENABLE_CDEF."; - return WEBRTC_VIDEO_CODEC_ERROR; - } - ret = aom_codec_control(&ctx_, AV1E_SET_ENABLE_TPL_MODEL, 0); - if (ret != AOM_CODEC_OK) { - RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret - << " on control AV1E_SET_ENABLE_TPL_MODEL."; - return WEBRTC_VIDEO_CODEC_ERROR; - } - ret = aom_codec_control(&ctx_, AV1E_SET_DELTAQ_MODE, 0); - if (ret != AOM_CODEC_OK) { - RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret - << " on control AV1E_SET_DELTAQ_MODE."; - return WEBRTC_VIDEO_CODEC_ERROR; - } - ret = aom_codec_control(&ctx_, AV1E_SET_ENABLE_ORDER_HINT, 0); - if (ret != AOM_CODEC_OK) { - RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret - << " on control AV1E_SET_ENABLE_ORDER_HINT."; - return WEBRTC_VIDEO_CODEC_ERROR; - } - ret = aom_codec_control(&ctx_, AV1E_SET_AQ_MODE, 3); - if (ret != AOM_CODEC_OK) { - RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret - << " on control AV1E_SET_AQ_MODE."; - return WEBRTC_VIDEO_CODEC_ERROR; - } - ret = aom_codec_control(&ctx_, AOME_SET_MAX_INTRA_BITRATE_PCT, 300); - if (ret != AOM_CODEC_OK) { - RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret - << " on control AV1E_SET_MAX_INTRA_BITRATE_PCT."; - return WEBRTC_VIDEO_CODEC_ERROR; - } - ret = aom_codec_control(&ctx_, AV1E_SET_COEFF_COST_UPD_FREQ, 3); - if (ret != AOM_CODEC_OK) { - RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret - << " on control AV1E_SET_COEFF_COST_UPD_FREQ."; - return WEBRTC_VIDEO_CODEC_ERROR; - } - ret = aom_codec_control(&ctx_, AV1E_SET_MODE_COST_UPD_FREQ, 3); - if (ret != AOM_CODEC_OK) { - RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret - << " on control AV1E_SET_MODE_COST_UPD_FREQ."; - return WEBRTC_VIDEO_CODEC_ERROR; - } - ret = aom_codec_control(&ctx_, AV1E_SET_MV_COST_UPD_FREQ, 3); - if (ret != AOM_CODEC_OK) { - RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret - << " on control AV1E_SET_MV_COST_UPD_FREQ."; - return WEBRTC_VIDEO_CODEC_ERROR; + SET_ENCODER_PARAM_OR_RETURN_ERROR(AOME_SET_CPUUSED, + GetCpuSpeed(cfg_.g_w, cfg_.g_h)); + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_ENABLE_CDEF, 1); + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_ENABLE_TPL_MODEL, 0); + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_DELTAQ_MODE, 0); + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_ENABLE_ORDER_HINT, 0); + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_AQ_MODE, 3); + SET_ENCODER_PARAM_OR_RETURN_ERROR(AOME_SET_MAX_INTRA_BITRATE_PCT, 300); + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_COEFF_COST_UPD_FREQ, 3); + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_MODE_COST_UPD_FREQ, 3); + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_MV_COST_UPD_FREQ, 3); + + if (codec_settings->mode == VideoCodecMode::kScreensharing) { + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_TUNE_CONTENT, + AOM_CONTENT_SCREEN); + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_ENABLE_PALETTE, 1); + } else { + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_ENABLE_PALETTE, 0); } if (cfg_.g_threads == 4 && cfg_.g_w == 640 && (cfg_.g_h == 360 || cfg_.g_h == 480)) { - ret = aom_codec_control(&ctx_, AV1E_SET_TILE_ROWS, - static_cast(log2(cfg_.g_threads))); - if (ret != AOM_CODEC_OK) { - RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret - << " on control AV1E_SET_TILE_ROWS."; - return WEBRTC_VIDEO_CODEC_ERROR; - } + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_TILE_ROWS, + static_cast(log2(cfg_.g_threads))); } else { - ret = aom_codec_control(&ctx_, AV1E_SET_TILE_COLUMNS, - static_cast(log2(cfg_.g_threads))); - if (ret != AOM_CODEC_OK) { - RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret - << " on control AV1E_SET_TILE_COLUMNS."; - return WEBRTC_VIDEO_CODEC_ERROR; - } - } - - ret = aom_codec_control(&ctx_, AV1E_SET_ROW_MT, 1); - if (ret != AOM_CODEC_OK) { - RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret - << " on control AV1E_SET_ROW_MT."; - return WEBRTC_VIDEO_CODEC_ERROR; - } - - ret = aom_codec_control(&ctx_, AV1E_SET_ENABLE_OBMC, 0); - if (ret != AOM_CODEC_OK) { - RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret - << " on control AV1E_SET_ENABLE_OBMC."; - return WEBRTC_VIDEO_CODEC_ERROR; - } + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_TILE_COLUMNS, + static_cast(log2(cfg_.g_threads))); + } + + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_ROW_MT, 1); + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_ENABLE_OBMC, 0); + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_NOISE_SENSITIVITY, 0); + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_ENABLE_WARPED_MOTION, 0); + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_ENABLE_GLOBAL_MOTION, 0); + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_ENABLE_REF_FRAME_MVS, 0); + SET_ENCODER_PARAM_OR_RETURN_ERROR( + AV1E_SET_SUPERBLOCK_SIZE, + GetSuperblockSize(cfg_.g_w, cfg_.g_h, cfg_.g_threads)); + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_ENABLE_CFL_INTRA, 0); + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_ENABLE_SMOOTH_INTRA, 0); + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_ENABLE_ANGLE_DELTA, 0); + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_ENABLE_FILTER_INTRA, 0); + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_INTRA_DEFAULT_TX_ONLY, 1); + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_DISABLE_TRELLIS_QUANT, 1); + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_ENABLE_DIST_WTD_COMP, 0); + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_ENABLE_DIFF_WTD_COMP, 0); + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_ENABLE_DUAL_FILTER, 0); + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_ENABLE_INTERINTRA_COMP, 0); + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_ENABLE_INTERINTRA_WEDGE, 0); + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_ENABLE_INTRA_EDGE_FILTER, 0); + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_ENABLE_INTRABC, 0); + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_ENABLE_MASKED_COMP, 0); + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_ENABLE_PAETH_INTRA, 0); + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_ENABLE_QM, 0); + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_ENABLE_RECT_PARTITIONS, 0); + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_ENABLE_RESTORATION, 0); + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_ENABLE_SMOOTH_INTERINTRA, 0); + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_ENABLE_TX64, 0); + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_MAX_REFERENCE_FRAMES, 3); - ret = aom_codec_control(&ctx_, AV1E_SET_NOISE_SENSITIVITY, 0); - if (ret != AOM_CODEC_OK) { - RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret - << " on control AV1E_SET_NOISE_SENSITIVITY."; - return WEBRTC_VIDEO_CODEC_ERROR; - } - - ret = aom_codec_control(&ctx_, AV1E_SET_ENABLE_WARPED_MOTION, 0); - if (ret != AOM_CODEC_OK) { - RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret - << " on control AV1E_SET_ENABLE_WARPED_MOTION."; - return WEBRTC_VIDEO_CODEC_ERROR; - } - - ret = aom_codec_control(&ctx_, AV1E_SET_ENABLE_GLOBAL_MOTION, 0); - if (ret != AOM_CODEC_OK) { - RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret - << " on control AV1E_SET_ENABLE_GLOBAL_MOTION."; - return WEBRTC_VIDEO_CODEC_ERROR; - } - - ret = aom_codec_control(&ctx_, AV1E_SET_ENABLE_REF_FRAME_MVS, 0); - if (ret != AOM_CODEC_OK) { - RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret - << " on control AV1E_SET_ENABLE_REF_FRAME_MVS."; - return WEBRTC_VIDEO_CODEC_ERROR; - } - - ret = - aom_codec_control(&ctx_, AV1E_SET_SUPERBLOCK_SIZE, - GetSuperblockSize(cfg_.g_w, cfg_.g_h, cfg_.g_threads)); - if (ret != AOM_CODEC_OK) { - RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret - << " on control AV1E_SET_SUPERBLOCK_SIZE."; - return WEBRTC_VIDEO_CODEC_ERROR; - } - - ret = aom_codec_control(&ctx_, AV1E_SET_ENABLE_CFL_INTRA, 0); - if (ret != AOM_CODEC_OK) { - RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret - << " on control AV1E_SET_ENABLE_CFL_INTRA."; - return WEBRTC_VIDEO_CODEC_ERROR; - } - - ret = aom_codec_control(&ctx_, AV1E_SET_ENABLE_SMOOTH_INTRA, 0); - if (ret != AOM_CODEC_OK) { - RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret - << " on control AV1E_SET_ENABLE_SMOOTH_INTRA."; - return WEBRTC_VIDEO_CODEC_ERROR; - } + return WEBRTC_VIDEO_CODEC_OK; +} - ret = aom_codec_control(&ctx_, AV1E_SET_ENABLE_ANGLE_DELTA, 0); - if (ret != AOM_CODEC_OK) { - RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret - << " on control AV1E_SET_ENABLE_ANGLE_DELTA."; - return WEBRTC_VIDEO_CODEC_ERROR; +template +bool LibaomAv1Encoder::SetEncoderControlParameters(int param_id, + P param_value) { + aom_codec_err_t error_code = aom_codec_control(&ctx_, param_id, param_value); + if (error_code != AOM_CODEC_OK) { + RTC_LOG(LS_WARNING) + << "LibaomAv1Encoder::SetEncoderControlParameters returned " + << error_code << " on id: " << param_id << "."; } + return error_code == AOM_CODEC_OK; +} - ret = aom_codec_control(&ctx_, AV1E_SET_ENABLE_FILTER_INTRA, 0); - if (ret != AOM_CODEC_OK) { - RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret - << " on control AV1E_SET_ENABLE_FILTER_INTRA."; - return WEBRTC_VIDEO_CODEC_ERROR; - } +// Only positive speeds, range for real-time coding currently is: 6 - 8. +// Lower means slower/better quality, higher means fastest/lower quality. +int LibaomAv1Encoder::GetCpuSpeed(int width, int height) { + if (aux_config_) { + if (auto it = aux_config_->max_pixel_count_to_cpu_speed.lower_bound(width * + height); + it != aux_config_->max_pixel_count_to_cpu_speed.end()) { + return it->second; + } - ret = aom_codec_control(&ctx_, AV1E_SET_INTRA_DEFAULT_TX_ONLY, 1); - if (ret != AOM_CODEC_OK) { - RTC_LOG(LS_WARNING) - << "LibaomAv1Encoder::EncodeInit returned " << ret - << " on control AOM_CTRL_AV1E_SET_INTRA_DEFAULT_TX_ONLY."; - return WEBRTC_VIDEO_CODEC_ERROR; + return 10; + } else { + // For smaller resolutions, use lower speed setting (get some coding gain at + // the cost of increased encoding complexity). + switch (encoder_settings_.GetVideoEncoderComplexity()) { + case VideoCodecComplexity::kComplexityHigh: + if (width * height <= 320 * 180) + return 8; + else if (width * height <= 640 * 360) + return 9; + else + return 10; + case VideoCodecComplexity::kComplexityHigher: + if (width * height <= 320 * 180) + return 7; + else if (width * height <= 640 * 360) + return 8; + else if (width * height <= 1280 * 720) + return 9; + else + return 10; + case VideoCodecComplexity::kComplexityMax: + if (width * height <= 320 * 180) + return 6; + else if (width * height <= 640 * 360) + return 7; + else if (width * height <= 1280 * 720) + return 8; + else + return 9; + default: + return 10; + } } - - return WEBRTC_VIDEO_CODEC_OK; } int LibaomAv1Encoder::NumberOfThreads(int width, @@ -491,12 +444,7 @@ void LibaomAv1Encoder::SetSvcLayerId( aom_svc_layer_id_t layer_id = {}; layer_id.spatial_layer_id = layer_frame.SpatialId(); layer_id.temporal_layer_id = layer_frame.TemporalId(); - aom_codec_err_t ret = - aom_codec_control(&ctx_, AV1E_SET_SVC_LAYER_ID, &layer_id); - if (ret != AOM_CODEC_OK) { - RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::Encode returned " << ret - << " on control AV1E_SET_SVC_LAYER_ID."; - } + SetEncoderControlParameters(AV1E_SET_SVC_LAYER_ID, &layer_id); } void LibaomAv1Encoder::SetSvcRefFrameConfig( @@ -526,12 +474,8 @@ void LibaomAv1Encoder::SetSvcRefFrameConfig( ref_frame_config.refresh[buffer.id] = 1; } } - aom_codec_err_t ret = aom_codec_control(&ctx_, AV1E_SET_SVC_REF_FRAME_CONFIG, - &ref_frame_config); - if (ret != AOM_CODEC_OK) { - RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::Encode returned " << ret - << " on control AV1_SET_SVC_REF_FRAME_CONFIG."; - } + + SetEncoderControlParameters(AV1E_SET_SVC_REF_FRAME_CONFIG, &ref_frame_config); } int32_t LibaomAv1Encoder::RegisterEncodeCompleteCallback( @@ -555,6 +499,21 @@ int32_t LibaomAv1Encoder::Release() { return WEBRTC_VIDEO_CODEC_OK; } +void LibaomAv1Encoder::MaybeRewrapImgWithFormat(const aom_img_fmt_t fmt) { + if (!frame_for_encode_) { + frame_for_encode_ = + aom_img_wrap(nullptr, fmt, cfg_.g_w, cfg_.g_h, 1, nullptr); + + } else if (frame_for_encode_->fmt != fmt) { + RTC_LOG(LS_INFO) << "Switching AV1 encoder pixel format to " + << (fmt == AOM_IMG_FMT_NV12 ? "NV12" : "I420"); + aom_img_free(frame_for_encode_); + frame_for_encode_ = + aom_img_wrap(nullptr, fmt, cfg_.g_w, cfg_.g_h, 1, nullptr); + } + // else no-op since the image is already in the right format. +} + int32_t LibaomAv1Encoder::Encode( const VideoFrame& frame, const std::vector* frame_types) { @@ -574,38 +533,73 @@ int32_t LibaomAv1Encoder::Encode( return WEBRTC_VIDEO_CODEC_ERROR; } + rtc::scoped_refptr buffer = frame.video_frame_buffer(); + absl::InlinedVector + supported_formats = {VideoFrameBuffer::Type::kI420, + VideoFrameBuffer::Type::kNV12}; + rtc::scoped_refptr mapped_buffer; + if (buffer->type() != VideoFrameBuffer::Type::kNative) { + // `buffer` is already mapped. + mapped_buffer = buffer; + } else { + // Attempt to map to one of the supported formats. + mapped_buffer = buffer->GetMappedFrameBuffer(supported_formats); + } + // Convert input frame to I420, if needed. - VideoFrame prepped_input_frame = frame; - if (prepped_input_frame.video_frame_buffer()->type() != - VideoFrameBuffer::Type::kI420 && - prepped_input_frame.video_frame_buffer()->type() != - VideoFrameBuffer::Type::kI420A) { - rtc::scoped_refptr converted_buffer( - prepped_input_frame.video_frame_buffer()->ToI420()); + if (!mapped_buffer || + (absl::c_find(supported_formats, mapped_buffer->type()) == + supported_formats.end() && + mapped_buffer->type() != VideoFrameBuffer::Type::kI420A)) { + rtc::scoped_refptr converted_buffer(buffer->ToI420()); if (!converted_buffer) { RTC_LOG(LS_ERROR) << "Failed to convert " << VideoFrameBufferTypeToString( - prepped_input_frame.video_frame_buffer()->type()) + frame.video_frame_buffer()->type()) << " image to I420. Can't encode frame."; return WEBRTC_VIDEO_CODEC_ENCODER_FAILURE; } RTC_CHECK(converted_buffer->type() == VideoFrameBuffer::Type::kI420 || converted_buffer->type() == VideoFrameBuffer::Type::kI420A); - prepped_input_frame = VideoFrame(converted_buffer, frame.timestamp(), - frame.render_time_ms(), frame.rotation()); - } - - // Set frame_for_encode_ data pointers and strides. - auto i420_buffer = prepped_input_frame.video_frame_buffer()->GetI420(); - frame_for_encode_->planes[AOM_PLANE_Y] = - const_cast(i420_buffer->DataY()); - frame_for_encode_->planes[AOM_PLANE_U] = - const_cast(i420_buffer->DataU()); - frame_for_encode_->planes[AOM_PLANE_V] = - const_cast(i420_buffer->DataV()); - frame_for_encode_->stride[AOM_PLANE_Y] = i420_buffer->StrideY(); - frame_for_encode_->stride[AOM_PLANE_U] = i420_buffer->StrideU(); - frame_for_encode_->stride[AOM_PLANE_V] = i420_buffer->StrideV(); + + mapped_buffer = converted_buffer; + } + + switch (mapped_buffer->type()) { + case VideoFrameBuffer::Type::kI420: + case VideoFrameBuffer::Type::kI420A: { + // Set frame_for_encode_ data pointers and strides. + MaybeRewrapImgWithFormat(AOM_IMG_FMT_I420); + auto i420_buffer = mapped_buffer->GetI420(); + RTC_DCHECK(i420_buffer); + frame_for_encode_->planes[AOM_PLANE_Y] = + const_cast(i420_buffer->DataY()); + frame_for_encode_->planes[AOM_PLANE_U] = + const_cast(i420_buffer->DataU()); + frame_for_encode_->planes[AOM_PLANE_V] = + const_cast(i420_buffer->DataV()); + frame_for_encode_->stride[AOM_PLANE_Y] = i420_buffer->StrideY(); + frame_for_encode_->stride[AOM_PLANE_U] = i420_buffer->StrideU(); + frame_for_encode_->stride[AOM_PLANE_V] = i420_buffer->StrideV(); + break; + } + case VideoFrameBuffer::Type::kNV12: { + MaybeRewrapImgWithFormat(AOM_IMG_FMT_NV12); + const NV12BufferInterface* nv12_buffer = mapped_buffer->GetNV12(); + RTC_DCHECK(nv12_buffer); + frame_for_encode_->planes[AOM_PLANE_Y] = + const_cast(nv12_buffer->DataY()); + frame_for_encode_->planes[AOM_PLANE_U] = + const_cast(nv12_buffer->DataUV()); + frame_for_encode_->planes[AOM_PLANE_V] = nullptr; + frame_for_encode_->stride[AOM_PLANE_Y] = nv12_buffer->StrideY(); + frame_for_encode_->stride[AOM_PLANE_U] = nv12_buffer->StrideUV(); + frame_for_encode_->stride[AOM_PLANE_V] = 0; + break; + } + default: + return WEBRTC_VIDEO_CODEC_ENCODER_FAILURE; + } const uint32_t duration = kRtpTicksPerSecond / static_cast(encoder_settings_.maxFramerate); @@ -638,14 +632,8 @@ int32_t LibaomAv1Encoder::Encode( SetSvcLayerId(*layer_frame); SetSvcRefFrameConfig(*layer_frame); - aom_codec_err_t ret = - aom_codec_control(&ctx_, AV1E_SET_ERROR_RESILIENT_MODE, - layer_frame->TemporalId() > 0 ? 1 : 0); - if (ret != AOM_CODEC_OK) { - RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::Encode returned " << ret - << " on control AV1E_SET_ERROR_RESILIENT_MODE."; - return WEBRTC_VIDEO_CODEC_ERROR; - } + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_ERROR_RESILIENT_MODE, + layer_frame->TemporalId() > 0 ? 1 : 0); } // Encode a frame. @@ -696,19 +684,17 @@ int32_t LibaomAv1Encoder::Encode( encoded_image._encodedWidth = cfg_.g_w * n / d; encoded_image._encodedHeight = cfg_.g_h * n / d; encoded_image.SetSpatialIndex(layer_frame->SpatialId()); + encoded_image.SetTemporalIndex(layer_frame->TemporalId()); } else { encoded_image._encodedWidth = cfg_.g_w; encoded_image._encodedHeight = cfg_.g_h; } encoded_image.timing_.flags = VideoSendTiming::kInvalid; + int qp = -1; - ret = aom_codec_control(&ctx_, AOME_GET_LAST_QUANTIZER, &qp); - if (ret != AOM_CODEC_OK) { - RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::Encode returned " << ret - << " on control AOME_GET_LAST_QUANTIZER."; - return WEBRTC_VIDEO_CODEC_ERROR; - } + SET_ENCODER_PARAM_OR_RETURN_ERROR(AOME_GET_LAST_QUANTIZER, &qp); encoded_image.qp_ = qp; + encoded_image.SetColorSpace(frame.color_space()); ++data_pkt_count; } @@ -788,7 +774,7 @@ void LibaomAv1Encoder::SetRates(const RateControlParameters& parameters) { accumulated_bitrate_bps / 1000; } } - aom_codec_control(&ctx_, AV1E_SET_SVC_PARAMS, &*svc_params_); + SetEncoderControlParameters(AV1E_SET_SVC_PARAMS, &*svc_params_); } rates_configured_ = true; @@ -805,7 +791,8 @@ VideoEncoder::EncoderInfo LibaomAv1Encoder::GetEncoderInfo() const { info.has_trusted_rate_controller = true; info.is_hardware_accelerated = false; info.scaling_settings = VideoEncoder::ScalingSettings(kMinQindex, kMaxQindex); - info.preferred_pixel_formats = {VideoFrameBuffer::Type::kI420}; + info.preferred_pixel_formats = {VideoFrameBuffer::Type::kI420, + VideoFrameBuffer::Type::kNV12}; if (SvcEnabled()) { for (int sid = 0; sid < svc_params_->number_spatial_layers; ++sid) { info.fps_allocation[sid].resize(svc_params_->number_temporal_layers); @@ -820,17 +807,13 @@ VideoEncoder::EncoderInfo LibaomAv1Encoder::GetEncoderInfo() const { } // namespace -const bool kIsLibaomAv1EncoderSupported = true; - std::unique_ptr CreateLibaomAv1Encoder() { - return std::make_unique(); + return std::make_unique(absl::nullopt); } -bool LibaomAv1EncoderSupportsScalabilityMode( - absl::string_view scalability_mode) { - // For AV1, the scalability mode is supported if we can create the scalability - // structure. - return ScalabilityStructureConfig(scalability_mode) != absl::nullopt; +std::unique_ptr CreateLibaomAv1Encoder( + const LibaomAv1EncoderAuxConfig& aux_config) { + return std::make_unique(aux_config); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.h index 0d81c9c17e..2fd1d5a754 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.h @@ -10,19 +10,21 @@ #ifndef MODULES_VIDEO_CODING_CODECS_AV1_LIBAOM_AV1_ENCODER_H_ #define MODULES_VIDEO_CODING_CODECS_AV1_LIBAOM_AV1_ENCODER_H_ +#include #include -#include "absl/base/attributes.h" #include "absl/strings/string_view.h" #include "api/video_codecs/video_encoder.h" namespace webrtc { - -ABSL_CONST_INIT extern const bool kIsLibaomAv1EncoderSupported; +struct LibaomAv1EncoderAuxConfig { + // A map of max pixel count --> cpu speed. + std::map max_pixel_count_to_cpu_speed; +}; std::unique_ptr CreateLibaomAv1Encoder(); -bool LibaomAv1EncoderSupportsScalabilityMode( - absl::string_view scalability_mode); +std::unique_ptr CreateLibaomAv1Encoder( + const LibaomAv1EncoderAuxConfig& aux_config); } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder_absent.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder_absent.cc deleted file mode 100644 index fff1dd9ed8..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder_absent.cc +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "modules/video_coding/codecs/av1/libaom_av1_encoder.h" - -#include - -#include "api/video_codecs/video_encoder.h" - -namespace webrtc { - -const bool kIsLibaomAv1EncoderSupported = false; - -std::unique_ptr CreateLibaomAv1Encoder() { - return nullptr; -} - -bool LibaomAv1EncoderSupportsScalabilityMode( - absl::string_view scalability_mode) { - return false; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264.cc index 8324b7c74e..23580d7a4a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264.cc @@ -14,6 +14,7 @@ #include #include +#include "absl/container/inlined_vector.h" #include "absl/types/optional.h" #include "api/video_codecs/sdp_video_format.h" #include "media/base/media_constants.h" @@ -44,21 +45,30 @@ bool IsH264CodecSupported() { #endif } -constexpr absl::string_view kSupportedScalabilityModes[] = {"L1T2", "L1T3"}; +constexpr ScalabilityMode kSupportedScalabilityModes[] = { + ScalabilityMode::kL1T1, ScalabilityMode::kL1T2, ScalabilityMode::kL1T3}; } // namespace SdpVideoFormat CreateH264Format(H264Profile profile, H264Level level, - const std::string& packetization_mode) { + const std::string& packetization_mode, + bool add_scalability_modes) { const absl::optional profile_string = H264ProfileLevelIdToString(H264ProfileLevelId(profile, level)); RTC_CHECK(profile_string); + absl::InlinedVector scalability_modes; + if (add_scalability_modes) { + for (const auto scalability_mode : kSupportedScalabilityModes) { + scalability_modes.push_back(scalability_mode); + } + } return SdpVideoFormat( cricket::kH264CodecName, {{cricket::kH264FmtpProfileLevelId, *profile_string}, {cricket::kH264FmtpLevelAsymmetryAllowed, "1"}, - {cricket::kH264FmtpPacketizationMode, packetization_mode}}); + {cricket::kH264FmtpPacketizationMode, packetization_mode}}, + scalability_modes); } void DisableRtcUseH264() { @@ -67,7 +77,7 @@ void DisableRtcUseH264() { #endif } -std::vector SupportedH264Codecs() { +std::vector SupportedH264Codecs(bool add_scalability_modes) { TRACE_EVENT0("webrtc", __func__); if (!IsH264CodecSupported()) return std::vector(); @@ -81,17 +91,34 @@ std::vector SupportedH264Codecs() { // We support both packetization modes 0 (mandatory) and 1 (optional, // preferred). return {CreateH264Format(H264Profile::kProfileBaseline, H264Level::kLevel3_1, - "1"), + "1", add_scalability_modes), CreateH264Format(H264Profile::kProfileBaseline, H264Level::kLevel3_1, - "0"), + "0", add_scalability_modes), CreateH264Format(H264Profile::kProfileConstrainedBaseline, - H264Level::kLevel3_1, "1"), + H264Level::kLevel3_1, "1", add_scalability_modes), CreateH264Format(H264Profile::kProfileConstrainedBaseline, - H264Level::kLevel3_1, "0"), - CreateH264Format(H264Profile::kProfileMain, - H264Level::kLevel3_1, "1"), - CreateH264Format(H264Profile::kProfileMain, - H264Level::kLevel3_1, "0")}; + H264Level::kLevel3_1, "0", add_scalability_modes), + CreateH264Format(H264Profile::kProfileMain, H264Level::kLevel3_1, "1", + add_scalability_modes), + CreateH264Format(H264Profile::kProfileMain, H264Level::kLevel3_1, "0", + add_scalability_modes)}; +} + +std::vector SupportedH264DecoderCodecs() { + TRACE_EVENT0("webrtc", __func__); + if (!IsH264CodecSupported()) + return std::vector(); + + std::vector supportedCodecs = SupportedH264Codecs(); + + // OpenH264 doesn't yet support High Predictive 4:4:4 encoding but it does + // support decoding. + supportedCodecs.push_back(CreateH264Format( + H264Profile::kProfilePredictiveHigh444, H264Level::kLevel3_1, "1")); + supportedCodecs.push_back(CreateH264Format( + H264Profile::kProfilePredictiveHigh444, H264Level::kLevel3_1, "0")); + + return supportedCodecs; } std::unique_ptr H264Encoder::Create( @@ -111,7 +138,7 @@ bool H264Encoder::IsSupported() { return IsH264CodecSupported(); } -bool H264Encoder::SupportsScalabilityMode(absl::string_view scalability_mode) { +bool H264Encoder::SupportsScalabilityMode(ScalabilityMode scalability_mode) { for (const auto& entry : kSupportedScalabilityModes) { if (entry == scalability_mode) { return true; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc index a08ee84cff..76d04e4188 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc @@ -33,16 +33,16 @@ extern "C" { #include "modules/video_coding/codecs/h264/h264_color_space.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "system_wrappers/include/field_trial.h" #include "system_wrappers/include/metrics.h" -#include "third_party/libyuv/include/libyuv/convert.h" namespace webrtc { namespace { -const AVPixelFormat kPixelFormatDefault = AV_PIX_FMT_YUV420P; -const AVPixelFormat kPixelFormatFullRange = AV_PIX_FMT_YUVJ420P; +constexpr std::array kPixelFormatsSupported = { + AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUV444P, + AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_YUVJ422P, AV_PIX_FMT_YUVJ444P, + AV_PIX_FMT_YUV420P10LE, AV_PIX_FMT_YUV422P10LE}; const size_t kYPlaneIndex = 0; const size_t kUPlaneIndex = 1; const size_t kVPlaneIndex = 2; @@ -76,9 +76,11 @@ int H264DecoderImpl::AVGetBuffer2(AVCodecContext* context, // Necessary capability to be allowed to provide our own buffers. RTC_DCHECK(context->codec->capabilities | AV_CODEC_CAP_DR1); - // Limited or full range YUV420 is expected. - RTC_CHECK(context->pix_fmt == kPixelFormatDefault || - context->pix_fmt == kPixelFormatFullRange); + auto pixelFormatSupported = std::find_if( + kPixelFormatsSupported.begin(), kPixelFormatsSupported.end(), + [context](AVPixelFormat format) { return context->pix_fmt == format; }); + + RTC_CHECK(pixelFormatSupported != kPixelFormatsSupported.end()); // `av_frame->width` and `av_frame->height` are set by FFmpeg. These are the // actual image's dimensions and may be different from `context->width` and @@ -110,30 +112,108 @@ int H264DecoderImpl::AVGetBuffer2(AVCodecContext* context, // FFmpeg expects the initial allocation to be zero-initialized according to // http://crbug.com/390941. Our pool is set up to zero-initialize new buffers. - // TODO(nisse): Delete that feature from the video pool, instead add - // an explicit call to InitializeData here. - rtc::scoped_refptr frame_buffer = - decoder->ffmpeg_buffer_pool_.CreateI420Buffer(width, height); + // TODO(https://crbug.com/390941): Delete that feature from the video pool, + // instead add an explicit call to InitializeData here. + rtc::scoped_refptr frame_buffer; + rtc::scoped_refptr i444_buffer; + rtc::scoped_refptr i420_buffer; + rtc::scoped_refptr i422_buffer; + rtc::scoped_refptr i010_buffer; + rtc::scoped_refptr i210_buffer; + int bytes_per_pixel = 1; + switch (context->pix_fmt) { + case AV_PIX_FMT_YUV420P: + case AV_PIX_FMT_YUVJ420P: + i420_buffer = + decoder->ffmpeg_buffer_pool_.CreateI420Buffer(width, height); + // Set `av_frame` members as required by FFmpeg. + av_frame->data[kYPlaneIndex] = i420_buffer->MutableDataY(); + av_frame->linesize[kYPlaneIndex] = i420_buffer->StrideY(); + av_frame->data[kUPlaneIndex] = i420_buffer->MutableDataU(); + av_frame->linesize[kUPlaneIndex] = i420_buffer->StrideU(); + av_frame->data[kVPlaneIndex] = i420_buffer->MutableDataV(); + av_frame->linesize[kVPlaneIndex] = i420_buffer->StrideV(); + RTC_DCHECK_EQ(av_frame->extended_data, av_frame->data); + frame_buffer = i420_buffer; + break; + case AV_PIX_FMT_YUV444P: + case AV_PIX_FMT_YUVJ444P: + i444_buffer = + decoder->ffmpeg_buffer_pool_.CreateI444Buffer(width, height); + // Set `av_frame` members as required by FFmpeg. + av_frame->data[kYPlaneIndex] = i444_buffer->MutableDataY(); + av_frame->linesize[kYPlaneIndex] = i444_buffer->StrideY(); + av_frame->data[kUPlaneIndex] = i444_buffer->MutableDataU(); + av_frame->linesize[kUPlaneIndex] = i444_buffer->StrideU(); + av_frame->data[kVPlaneIndex] = i444_buffer->MutableDataV(); + av_frame->linesize[kVPlaneIndex] = i444_buffer->StrideV(); + frame_buffer = i444_buffer; + break; + case AV_PIX_FMT_YUV422P: + case AV_PIX_FMT_YUVJ422P: + i422_buffer = + decoder->ffmpeg_buffer_pool_.CreateI422Buffer(width, height); + // Set `av_frame` members as required by FFmpeg. + av_frame->data[kYPlaneIndex] = i422_buffer->MutableDataY(); + av_frame->linesize[kYPlaneIndex] = i422_buffer->StrideY(); + av_frame->data[kUPlaneIndex] = i422_buffer->MutableDataU(); + av_frame->linesize[kUPlaneIndex] = i422_buffer->StrideU(); + av_frame->data[kVPlaneIndex] = i422_buffer->MutableDataV(); + av_frame->linesize[kVPlaneIndex] = i422_buffer->StrideV(); + frame_buffer = i422_buffer; + break; + case AV_PIX_FMT_YUV420P10LE: + i010_buffer = + decoder->ffmpeg_buffer_pool_.CreateI010Buffer(width, height); + // Set `av_frame` members as required by FFmpeg. + av_frame->data[kYPlaneIndex] = + reinterpret_cast(i010_buffer->MutableDataY()); + av_frame->linesize[kYPlaneIndex] = i010_buffer->StrideY() * 2; + av_frame->data[kUPlaneIndex] = + reinterpret_cast(i010_buffer->MutableDataU()); + av_frame->linesize[kUPlaneIndex] = i010_buffer->StrideU() * 2; + av_frame->data[kVPlaneIndex] = + reinterpret_cast(i010_buffer->MutableDataV()); + av_frame->linesize[kVPlaneIndex] = i010_buffer->StrideV() * 2; + frame_buffer = i010_buffer; + bytes_per_pixel = 2; + break; + case AV_PIX_FMT_YUV422P10LE: + i210_buffer = + decoder->ffmpeg_buffer_pool_.CreateI210Buffer(width, height); + // Set `av_frame` members as required by FFmpeg. + av_frame->data[kYPlaneIndex] = + reinterpret_cast(i210_buffer->MutableDataY()); + av_frame->linesize[kYPlaneIndex] = i210_buffer->StrideY() * 2; + av_frame->data[kUPlaneIndex] = + reinterpret_cast(i210_buffer->MutableDataU()); + av_frame->linesize[kUPlaneIndex] = i210_buffer->StrideU() * 2; + av_frame->data[kVPlaneIndex] = + reinterpret_cast(i210_buffer->MutableDataV()); + av_frame->linesize[kVPlaneIndex] = i210_buffer->StrideV() * 2; + frame_buffer = i210_buffer; + bytes_per_pixel = 2; + break; + default: + RTC_LOG(LS_ERROR) << "Unsupported buffer type " << context->pix_fmt + << ". Check supported supported pixel formats!"; + decoder->ReportError(); + return -1; + } - int y_size = width * height; - int uv_size = frame_buffer->ChromaWidth() * frame_buffer->ChromaHeight(); + int y_size = width * height * bytes_per_pixel; + int uv_size = frame_buffer->ChromaWidth() * frame_buffer->ChromaHeight() * + bytes_per_pixel; // DCHECK that we have a continuous buffer as is required. - RTC_DCHECK_EQ(frame_buffer->DataU(), frame_buffer->DataY() + y_size); - RTC_DCHECK_EQ(frame_buffer->DataV(), frame_buffer->DataU() + uv_size); + RTC_DCHECK_EQ(av_frame->data[kUPlaneIndex], + av_frame->data[kYPlaneIndex] + y_size); + RTC_DCHECK_EQ(av_frame->data[kVPlaneIndex], + av_frame->data[kUPlaneIndex] + uv_size); int total_size = y_size + 2 * uv_size; av_frame->format = context->pix_fmt; av_frame->reordered_opaque = context->reordered_opaque; - // Set `av_frame` members as required by FFmpeg. - av_frame->data[kYPlaneIndex] = frame_buffer->MutableDataY(); - av_frame->linesize[kYPlaneIndex] = frame_buffer->StrideY(); - av_frame->data[kUPlaneIndex] = frame_buffer->MutableDataU(); - av_frame->linesize[kUPlaneIndex] = frame_buffer->StrideU(); - av_frame->data[kVPlaneIndex] = frame_buffer->MutableDataV(); - av_frame->linesize[kVPlaneIndex] = frame_buffer->StrideV(); - RTC_DCHECK_EQ(av_frame->extended_data, av_frame->data); - // Create a VideoFrame object, to keep a reference to the buffer. // TODO(nisse): The VideoFrame's timestamp and rotation info is not used. // Refactor to do not use a VideoFrame object at all. @@ -163,10 +243,7 @@ H264DecoderImpl::H264DecoderImpl() : ffmpeg_buffer_pool_(true), decoded_image_callback_(nullptr), has_reported_init_(false), - has_reported_error_(false), - preferred_output_format_(field_trial::IsEnabled("WebRTC-NV12Decode") - ? VideoFrameBuffer::Type::kNV12 - : VideoFrameBuffer::Type::kI420) {} + has_reported_error_(false) {} H264DecoderImpl::~H264DecoderImpl() { Release(); @@ -197,7 +274,6 @@ bool H264DecoderImpl::Configure(const Settings& settings) { av_context_->coded_width = resolution.Width(); av_context_->coded_height = resolution.Height(); } - av_context_->pix_fmt = kPixelFormatDefault; av_context_->extradata = nullptr; av_context_->extradata_size = 0; @@ -232,8 +308,7 @@ bool H264DecoderImpl::Configure(const Settings& settings) { av_frame_.reset(av_frame_alloc()); if (absl::optional buffer_pool_size = settings.buffer_pool_size()) { - if (!ffmpeg_buffer_pool_.Resize(*buffer_pool_size) || - !output_buffer_pool_.Resize(*buffer_pool_size)) { + if (!ffmpeg_buffer_pool_.Resize(*buffer_pool_size)) { return false; } } @@ -317,48 +392,182 @@ int32_t H264DecoderImpl::Decode(const EncodedImage& input_image, RTC_DCHECK(input_frame); rtc::scoped_refptr frame_buffer = input_frame->video_frame_buffer(); - const webrtc::I420BufferInterface* i420_buffer = frame_buffer->GetI420(); + + // Instantiate Planar YUV buffer according to video frame buffer type + const webrtc::PlanarYuvBuffer* planar_yuv_buffer = nullptr; + const webrtc::PlanarYuv8Buffer* planar_yuv8_buffer = nullptr; + const webrtc::PlanarYuv16BBuffer* planar_yuv16_buffer = nullptr; + VideoFrameBuffer::Type video_frame_buffer_type = frame_buffer->type(); + switch (video_frame_buffer_type) { + case VideoFrameBuffer::Type::kI420: + planar_yuv_buffer = frame_buffer->GetI420(); + planar_yuv8_buffer = + reinterpret_cast(planar_yuv_buffer); + break; + case VideoFrameBuffer::Type::kI444: + planar_yuv_buffer = frame_buffer->GetI444(); + planar_yuv8_buffer = + reinterpret_cast(planar_yuv_buffer); + break; + case VideoFrameBuffer::Type::kI422: + planar_yuv_buffer = frame_buffer->GetI422(); + planar_yuv8_buffer = + reinterpret_cast(planar_yuv_buffer); + break; + case VideoFrameBuffer::Type::kI010: + planar_yuv_buffer = frame_buffer->GetI010(); + planar_yuv16_buffer = reinterpret_cast( + planar_yuv_buffer); + break; + case VideoFrameBuffer::Type::kI210: + planar_yuv_buffer = frame_buffer->GetI210(); + planar_yuv16_buffer = reinterpret_cast( + planar_yuv_buffer); + break; + default: + // If this code is changed to allow other video frame buffer type, + // make sure that the code below which wraps I420/I422/I444 buffer and + // code which converts to NV12 is changed + // to work with new video frame buffer type + + RTC_LOG(LS_ERROR) << "frame_buffer type: " + << static_cast(video_frame_buffer_type) + << " is not supported!"; + ReportError(); + return WEBRTC_VIDEO_CODEC_ERROR; + } // When needed, FFmpeg applies cropping by moving plane pointers and adjusting // frame width/height. Ensure that cropped buffers lie within the allocated // memory. - RTC_DCHECK_LE(av_frame_->width, i420_buffer->width()); - RTC_DCHECK_LE(av_frame_->height, i420_buffer->height()); - RTC_DCHECK_GE(av_frame_->data[kYPlaneIndex], i420_buffer->DataY()); - RTC_DCHECK_LE( - av_frame_->data[kYPlaneIndex] + - av_frame_->linesize[kYPlaneIndex] * av_frame_->height, - i420_buffer->DataY() + i420_buffer->StrideY() * i420_buffer->height()); - RTC_DCHECK_GE(av_frame_->data[kUPlaneIndex], i420_buffer->DataU()); - RTC_DCHECK_LE(av_frame_->data[kUPlaneIndex] + - av_frame_->linesize[kUPlaneIndex] * av_frame_->height / 2, - i420_buffer->DataU() + - i420_buffer->StrideU() * i420_buffer->height() / 2); - RTC_DCHECK_GE(av_frame_->data[kVPlaneIndex], i420_buffer->DataV()); - RTC_DCHECK_LE(av_frame_->data[kVPlaneIndex] + - av_frame_->linesize[kVPlaneIndex] * av_frame_->height / 2, - i420_buffer->DataV() + - i420_buffer->StrideV() * i420_buffer->height() / 2); - - rtc::scoped_refptr cropped_buffer = WrapI420Buffer( - av_frame_->width, av_frame_->height, av_frame_->data[kYPlaneIndex], - av_frame_->linesize[kYPlaneIndex], av_frame_->data[kUPlaneIndex], - av_frame_->linesize[kUPlaneIndex], av_frame_->data[kVPlaneIndex], - av_frame_->linesize[kVPlaneIndex], - // To keep reference alive. - [frame_buffer] {}); - - if (preferred_output_format_ == VideoFrameBuffer::Type::kNV12) { - const I420BufferInterface* cropped_i420 = cropped_buffer->GetI420(); - auto nv12_buffer = output_buffer_pool_.CreateNV12Buffer( - cropped_i420->width(), cropped_i420->height()); - libyuv::I420ToNV12(cropped_i420->DataY(), cropped_i420->StrideY(), - cropped_i420->DataU(), cropped_i420->StrideU(), - cropped_i420->DataV(), cropped_i420->StrideV(), - nv12_buffer->MutableDataY(), nv12_buffer->StrideY(), - nv12_buffer->MutableDataUV(), nv12_buffer->StrideUV(), - i420_buffer->width(), i420_buffer->height()); - cropped_buffer = nv12_buffer; + RTC_DCHECK_LE(av_frame_->width, planar_yuv_buffer->width()); + RTC_DCHECK_LE(av_frame_->height, planar_yuv_buffer->height()); + switch (video_frame_buffer_type) { + case VideoFrameBuffer::Type::kI420: + case VideoFrameBuffer::Type::kI444: + case VideoFrameBuffer::Type::kI422: { + RTC_DCHECK_GE(av_frame_->data[kYPlaneIndex], planar_yuv8_buffer->DataY()); + RTC_DCHECK_LE( + av_frame_->data[kYPlaneIndex] + + av_frame_->linesize[kYPlaneIndex] * av_frame_->height, + planar_yuv8_buffer->DataY() + + planar_yuv8_buffer->StrideY() * planar_yuv8_buffer->height()); + RTC_DCHECK_GE(av_frame_->data[kUPlaneIndex], planar_yuv8_buffer->DataU()); + RTC_DCHECK_LE( + av_frame_->data[kUPlaneIndex] + + av_frame_->linesize[kUPlaneIndex] * + planar_yuv8_buffer->ChromaHeight(), + planar_yuv8_buffer->DataU() + planar_yuv8_buffer->StrideU() * + planar_yuv8_buffer->ChromaHeight()); + RTC_DCHECK_GE(av_frame_->data[kVPlaneIndex], planar_yuv8_buffer->DataV()); + RTC_DCHECK_LE( + av_frame_->data[kVPlaneIndex] + + av_frame_->linesize[kVPlaneIndex] * + planar_yuv8_buffer->ChromaHeight(), + planar_yuv8_buffer->DataV() + planar_yuv8_buffer->StrideV() * + planar_yuv8_buffer->ChromaHeight()); + break; + } + case VideoFrameBuffer::Type::kI010: + case VideoFrameBuffer::Type::kI210: { + RTC_DCHECK_GE( + av_frame_->data[kYPlaneIndex], + reinterpret_cast(planar_yuv16_buffer->DataY())); + RTC_DCHECK_LE( + av_frame_->data[kYPlaneIndex] + + av_frame_->linesize[kYPlaneIndex] * av_frame_->height, + reinterpret_cast(planar_yuv16_buffer->DataY()) + + planar_yuv16_buffer->StrideY() * 2 * + planar_yuv16_buffer->height()); + RTC_DCHECK_GE( + av_frame_->data[kUPlaneIndex], + reinterpret_cast(planar_yuv16_buffer->DataU())); + RTC_DCHECK_LE( + av_frame_->data[kUPlaneIndex] + + av_frame_->linesize[kUPlaneIndex] * + planar_yuv16_buffer->ChromaHeight(), + reinterpret_cast(planar_yuv16_buffer->DataU()) + + planar_yuv16_buffer->StrideU() * 2 * + planar_yuv16_buffer->ChromaHeight()); + RTC_DCHECK_GE( + av_frame_->data[kVPlaneIndex], + reinterpret_cast(planar_yuv16_buffer->DataV())); + RTC_DCHECK_LE( + av_frame_->data[kVPlaneIndex] + + av_frame_->linesize[kVPlaneIndex] * + planar_yuv16_buffer->ChromaHeight(), + reinterpret_cast(planar_yuv16_buffer->DataV()) + + planar_yuv16_buffer->StrideV() * 2 * + planar_yuv16_buffer->ChromaHeight()); + break; + } + default: + RTC_LOG(LS_ERROR) << "frame_buffer type: " + << static_cast(video_frame_buffer_type) + << " is not supported!"; + ReportError(); + return WEBRTC_VIDEO_CODEC_ERROR; + } + + rtc::scoped_refptr cropped_buffer; + switch (video_frame_buffer_type) { + case VideoFrameBuffer::Type::kI420: + cropped_buffer = WrapI420Buffer( + av_frame_->width, av_frame_->height, av_frame_->data[kYPlaneIndex], + av_frame_->linesize[kYPlaneIndex], av_frame_->data[kUPlaneIndex], + av_frame_->linesize[kUPlaneIndex], av_frame_->data[kVPlaneIndex], + av_frame_->linesize[kVPlaneIndex], + // To keep reference alive. + [frame_buffer] {}); + break; + case VideoFrameBuffer::Type::kI444: + cropped_buffer = WrapI444Buffer( + av_frame_->width, av_frame_->height, av_frame_->data[kYPlaneIndex], + av_frame_->linesize[kYPlaneIndex], av_frame_->data[kUPlaneIndex], + av_frame_->linesize[kUPlaneIndex], av_frame_->data[kVPlaneIndex], + av_frame_->linesize[kVPlaneIndex], + // To keep reference alive. + [frame_buffer] {}); + break; + case VideoFrameBuffer::Type::kI422: + cropped_buffer = WrapI422Buffer( + av_frame_->width, av_frame_->height, av_frame_->data[kYPlaneIndex], + av_frame_->linesize[kYPlaneIndex], av_frame_->data[kUPlaneIndex], + av_frame_->linesize[kUPlaneIndex], av_frame_->data[kVPlaneIndex], + av_frame_->linesize[kVPlaneIndex], + // To keep reference alive. + [frame_buffer] {}); + break; + case VideoFrameBuffer::Type::kI010: + cropped_buffer = WrapI010Buffer( + av_frame_->width, av_frame_->height, + reinterpret_cast(av_frame_->data[kYPlaneIndex]), + av_frame_->linesize[kYPlaneIndex] / 2, + reinterpret_cast(av_frame_->data[kUPlaneIndex]), + av_frame_->linesize[kUPlaneIndex] / 2, + reinterpret_cast(av_frame_->data[kVPlaneIndex]), + av_frame_->linesize[kVPlaneIndex] / 2, + // To keep reference alive. + [frame_buffer] {}); + break; + case VideoFrameBuffer::Type::kI210: + cropped_buffer = WrapI210Buffer( + av_frame_->width, av_frame_->height, + reinterpret_cast(av_frame_->data[kYPlaneIndex]), + av_frame_->linesize[kYPlaneIndex] / 2, + reinterpret_cast(av_frame_->data[kUPlaneIndex]), + av_frame_->linesize[kUPlaneIndex] / 2, + reinterpret_cast(av_frame_->data[kVPlaneIndex]), + av_frame_->linesize[kVPlaneIndex] / 2, + // To keep reference alive. + [frame_buffer] {}); + break; + default: + RTC_LOG(LS_ERROR) << "frame_buffer type: " + << static_cast(video_frame_buffer_type) + << " is not supported!"; + ReportError(); + return WEBRTC_VIDEO_CODEC_ERROR; } // Pass on color space from input frame if explicitly specified. diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.h index ea83ab45ca..818d22be92 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.h @@ -87,8 +87,6 @@ class H264DecoderImpl : public H264Decoder { // Used by ffmpeg via `AVGetBuffer2()` to allocate I420 images. VideoFrameBufferPool ffmpeg_buffer_pool_; - // Used to allocate NV12 images if NV12 output is preferred. - VideoFrameBufferPool output_buffer_pool_; std::unique_ptr av_context_; std::unique_ptr av_frame_; @@ -98,9 +96,6 @@ class H264DecoderImpl : public H264Decoder { bool has_reported_error_; webrtc::H264BitstreamParser h264_bitstream_parser_; - - // Decoder should produce this format if possible. - const VideoFrameBuffer::Type preferred_output_format_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc index 13dcba84df..fc3fd195fb 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc @@ -22,6 +22,7 @@ #include "absl/strings/match.h" #include "common_video/libyuv/include/webrtc_libyuv.h" +#include "modules/video_coding/svc/create_scalability_structure.h" #include "modules/video_coding/utility/simulcast_rate_allocator.h" #include "modules/video_coding/utility/simulcast_utility.h" #include "rtc_base/checks.h" @@ -110,7 +111,6 @@ static void RtpFragmentize(EncodedImage* encoded_image, SFrameBSInfo* info) { required_capacity += layerInfo.pNalLengthInByte[nal]; } } - // TODO(nisse): Use a cache or buffer pool to avoid allocation? auto buffer = EncodedImageBuffer::Create(required_capacity); encoded_image->SetEncodedData(buffer); @@ -158,6 +158,7 @@ H264EncoderImpl::H264EncoderImpl(const cricket::VideoCodec& codec) encoders_.reserve(kMaxSimulcastStreams); configurations_.reserve(kMaxSimulcastStreams); tl0sync_limit_.reserve(kMaxSimulcastStreams); + svc_controllers_.reserve(kMaxSimulcastStreams); } H264EncoderImpl::~H264EncoderImpl() { @@ -197,6 +198,7 @@ int32_t H264EncoderImpl::InitEncode(const VideoCodec* inst, encoded_images_.resize(number_of_streams); encoders_.resize(number_of_streams); pictures_.resize(number_of_streams); + svc_controllers_.resize(number_of_streams); configurations_.resize(number_of_streams); tl0sync_limit_.resize(number_of_streams); @@ -239,7 +241,7 @@ int32_t H264EncoderImpl::InitEncode(const VideoCodec* inst, configurations_[i].width = codec_.simulcastStream[idx].width; configurations_[i].height = codec_.simulcastStream[idx].height; configurations_[i].max_frame_rate = static_cast(codec_.maxFramerate); - configurations_[i].frame_dropping_on = codec_.H264()->frameDroppingOn; + configurations_[i].frame_dropping_on = codec_.GetFrameDropEnabled(); configurations_[i].key_frame_interval = codec_.H264()->keyFrameInterval; configurations_[i].num_temporal_layers = std::max(codec_.H264()->numberOfTemporalLayers, @@ -282,6 +284,32 @@ int32_t H264EncoderImpl::InitEncode(const VideoCodec* inst, encoded_images_[i].set_size(0); tl0sync_limit_[i] = configurations_[i].num_temporal_layers; + absl::optional scalability_mode; + switch (configurations_[i].num_temporal_layers) { + case 0: + break; + case 1: + scalability_mode = ScalabilityMode::kL1T1; + break; + case 2: + scalability_mode = ScalabilityMode::kL1T2; + break; + case 3: + scalability_mode = ScalabilityMode::kL1T3; + break; + default: + RTC_DCHECK_NOTREACHED(); + } + if (scalability_mode.has_value()) { + svc_controllers_[i] = + CreateScalabilityStructure(scalability_mode.value()); + if (svc_controllers_[i] == nullptr) { + RTC_LOG(LS_ERROR) << "Failed to create scalability structure"; + Release(); + ReportError(); + return WEBRTC_VIDEO_CODEC_ERROR; + } + } } SimulcastRateAllocator init_allocator(codec_); @@ -306,6 +334,7 @@ int32_t H264EncoderImpl::Release() { encoded_images_.clear(); pictures_.clear(); tl0sync_limit_.clear(); + svc_controllers_.clear(); return WEBRTC_VIDEO_CODEC_OK; } @@ -468,6 +497,12 @@ int32_t H264EncoderImpl::Encode( SFrameBSInfo info; memset(&info, 0, sizeof(SFrameBSInfo)); + std::vector layer_frames; + if (svc_controllers_[i]) { + layer_frames = svc_controllers_[i]->NextFrameConfig(send_key_frame); + RTC_CHECK_EQ(layer_frames.size(), 1); + } + // Encode! int enc_ret = encoders_[i]->EncodeFrame(&pictures_[i], &info); if (enc_ret != 0) { @@ -481,6 +516,7 @@ int32_t H264EncoderImpl::Encode( encoded_images_[i]._encodedWidth = configurations_[i].width; encoded_images_[i]._encodedHeight = configurations_[i].height; encoded_images_[i].SetTimestamp(input_frame.timestamp()); + encoded_images_[i].SetColorSpace(input_frame.color_space()); encoded_images_[i]._frameType = ConvertToVideoFrameType(info.eFrameType); encoded_images_[i].SetSpatialIndex(configurations_[i].simulcast_idx); @@ -510,6 +546,15 @@ int32_t H264EncoderImpl::Encode( codec_specific.codecSpecific.H264.temporal_idx = tid; codec_specific.codecSpecific.H264.base_layer_sync = tid > 0 && tid < tl0sync_limit_[i]; + if (svc_controllers_[i]) { + if (layer_frames[0].TemporalId() != tid) { + RTC_LOG(LS_WARNING) + << "Encoder produced a frame for layer S" << (i + 1) << "T" + << tid + 1 << " that wasn't requested."; + continue; + } + encoded_images_[i].SetTemporalIndex(tid); + } if (codec_specific.codecSpecific.H264.base_layer_sync) { tl0sync_limit_[i] = tid; } @@ -517,6 +562,14 @@ int32_t H264EncoderImpl::Encode( tl0sync_limit_[i] = configurations_[i].num_temporal_layers; } } + if (svc_controllers_[i]) { + codec_specific.generic_frame_info = + svc_controllers_[i]->OnEncodeDone(layer_frames[0]); + if (send_key_frame && codec_specific.generic_frame_info.has_value()) { + codec_specific.template_structure = + svc_controllers_[i]->DependencyStructure(); + } + } encoded_image_callback_->OnEncodedImage(encoded_images_[i], &codec_specific); } diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.h index 7a0f4b71ef..6673e3ddd7 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.h @@ -24,6 +24,7 @@ #include "api/video_codecs/video_encoder.h" #include "common_video/h264/h264_bitstream_parser.h" #include "modules/video_coding/codecs/h264/include/h264.h" +#include "modules/video_coding/svc/scalable_video_controller.h" #include "modules/video_coding/utility/quality_scaler.h" #include "third_party/openh264/src/codec/api/svc/codec_app_def.h" @@ -93,6 +94,7 @@ class H264EncoderImpl : public H264Encoder { std::vector> downscaled_buffers_; std::vector configurations_; std::vector encoded_images_; + std::vector> svc_controllers_; VideoCodec codec_; H264PacketizationMode packetization_mode_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/include/h264.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/include/h264.h index 8d1eebc79b..2635b53842 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/include/h264.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/include/h264.h @@ -16,8 +16,8 @@ #include #include -#include "absl/strings/string_view.h" #include "api/video_codecs/h264_profile_level_id.h" +#include "api/video_codecs/scalability_mode.h" #include "media/base/codec.h" #include "modules/video_coding/include/video_codec_interface.h" #include "rtc_base/system/rtc_export.h" @@ -30,7 +30,8 @@ struct SdpVideoFormat; RTC_EXPORT SdpVideoFormat CreateH264Format(H264Profile profile, H264Level level, - const std::string& packetization_mode); + const std::string& packetization_mode, + bool add_scalability_modes = false); // Set to disable the H.264 encoder/decoder implementations that are provided if // `rtc_use_h264` build flag is true (if false, this function does nothing). @@ -38,16 +39,22 @@ CreateH264Format(H264Profile profile, // and is not thread-safe. RTC_EXPORT void DisableRtcUseH264(); -// Returns a vector with all supported internal H264 profiles that we can +// Returns a vector with all supported internal H264 encode profiles that we can // negotiate in SDP, in order of preference. -std::vector SupportedH264Codecs(); +std::vector SupportedH264Codecs( + bool add_scalability_modes = false); + +// Returns a vector with all supported internal H264 decode profiles that we can +// negotiate in SDP, in order of preference. This will be available for receive +// only connections. +std::vector SupportedH264DecoderCodecs(); class RTC_EXPORT H264Encoder : public VideoEncoder { public: static std::unique_ptr Create(const cricket::VideoCodec& codec); // If H.264 is supported (any implementation). static bool IsSupported(); - static bool SupportsScalabilityMode(absl::string_view scalability_mode); + static bool SupportsScalabilityMode(ScalabilityMode scalability_mode); ~H264Encoder() override {} }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.cc index 6bc306dda8..0f05d1a89c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.cc @@ -116,7 +116,7 @@ MultiplexImageComponentHeader UnpackFrameHeader(const uint8_t* buffer) { ByteReader::ReadBigEndian(buffer + offset); offset += sizeof(uint32_t); - // TODO(nisse): This makes the wire format depend on the numeric values of the + // This makes the wire format depend on the numeric values of the // VideoCodecType and VideoFrameType enum constants. frame_header.codec_type = static_cast( ByteReader::ReadBigEndian(buffer + offset)); diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/include/vp8.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/include/vp8.h index 22f8de623d..2fc647874f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/include/vp8.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/include/vp8.h @@ -14,8 +14,6 @@ #include #include -#include "absl/base/attributes.h" -#include "absl/strings/string_view.h" #include "api/video_codecs/video_encoder.h" #include "api/video_codecs/vp8_frame_buffer_controller.h" #include "modules/video_coding/include/video_codec_interface.h" @@ -40,12 +38,6 @@ class VP8Encoder { static std::unique_ptr Create(); static std::unique_ptr Create(Settings settings); - static bool SupportsScalabilityMode(absl::string_view scalability_mode); - - ABSL_DEPRECATED("") - static std::unique_ptr Create( - std::unique_ptr - frame_buffer_controller_factory); }; class VP8Decoder { diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc index e47a0263a9..f2cce59255 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc @@ -128,10 +128,7 @@ LibvpxVp8Decoder::LibvpxVp8Decoder() key_frame_required_(true), deblock_params_(use_postproc_ ? GetPostProcParamsFromFieldTrialGroup() : absl::nullopt), - qp_smoother_(use_postproc_ ? new QpSmoother() : nullptr), - preferred_output_format_(field_trial::IsEnabled("WebRTC-NV12Decode") - ? VideoFrameBuffer::Type::kNV12 - : VideoFrameBuffer::Type::kI420) {} + qp_smoother_(use_postproc_ ? new QpSmoother() : nullptr) {} LibvpxVp8Decoder::~LibvpxVp8Decoder() { inited_ = true; // in order to do the actual release @@ -188,7 +185,7 @@ int LibvpxVp8Decoder::Decode(const EncodedImage& input_image, return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } -// Post process configurations. + // Post process configurations. if (use_postproc_) { vp8_postproc_cfg_t ppcfg; // MFQE enabled to reduce key frame popping. @@ -321,35 +318,17 @@ int LibvpxVp8Decoder::ReturnFrame( // Allocate memory for decoded image. rtc::scoped_refptr buffer; - if (preferred_output_format_ == VideoFrameBuffer::Type::kNV12) { - // Convert instead of making a copy. - // Note: libvpx doesn't support creating NV12 image directly. - // Due to the bitstream structure such a change would just hide the - // conversion operation inside the decode call. - rtc::scoped_refptr nv12_buffer = - buffer_pool_.CreateNV12Buffer(img->d_w, img->d_h); - buffer = nv12_buffer; - if (nv12_buffer.get()) { - libyuv::I420ToNV12(img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y], - img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U], - img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V], - nv12_buffer->MutableDataY(), nv12_buffer->StrideY(), - nv12_buffer->MutableDataUV(), nv12_buffer->StrideUV(), - img->d_w, img->d_h); - } - } else { - rtc::scoped_refptr i420_buffer = - buffer_pool_.CreateI420Buffer(img->d_w, img->d_h); - buffer = i420_buffer; - if (i420_buffer.get()) { - libyuv::I420Copy(img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y], - img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U], - img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V], - i420_buffer->MutableDataY(), i420_buffer->StrideY(), - i420_buffer->MutableDataU(), i420_buffer->StrideU(), - i420_buffer->MutableDataV(), i420_buffer->StrideV(), - img->d_w, img->d_h); - } + rtc::scoped_refptr i420_buffer = + buffer_pool_.CreateI420Buffer(img->d_w, img->d_h); + buffer = i420_buffer; + if (i420_buffer.get()) { + libyuv::I420Copy(img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y], + img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U], + img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V], + i420_buffer->MutableDataY(), i420_buffer->StrideY(), + i420_buffer->MutableDataU(), i420_buffer->StrideU(), + i420_buffer->MutableDataV(), i420_buffer->StrideV(), + img->d_w, img->d_h); } if (!buffer.get()) { diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h index ce283918c5..6031886d2d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h @@ -67,9 +67,6 @@ class LibvpxVp8Decoder : public VideoDecoder { bool key_frame_required_; const absl::optional deblock_params_; const std::unique_ptr qp_smoother_; - - // Decoder should produce this format if possible. - const VideoFrameBuffer::Type preferred_output_format_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc index 32e18edc4c..a24539814a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc @@ -29,7 +29,9 @@ #include "api/video_codecs/vp8_temporal_layers_factory.h" #include "modules/video_coding/codecs/interface/common_constants.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" +#include "modules/video_coding/codecs/vp8/vp8_scalability.h" #include "modules/video_coding/include/video_error_codes.h" +#include "modules/video_coding/svc/scalability_mode_util.h" #include "modules/video_coding/utility/simulcast_rate_allocator.h" #include "modules/video_coding/utility/simulcast_utility.h" #include "rtc_base/checks.h" @@ -49,8 +51,6 @@ constexpr char kVP8IosMaxNumberOfThreadFieldTrial[] = constexpr char kVP8IosMaxNumberOfThreadFieldTrialParameter[] = "max_thread"; #endif -constexpr absl::string_view kSupportedScalabilityModes[] = {"L1T2", "L1T3"}; - constexpr char kVp8ForcePartitionResilience[] = "WebRTC-VP8-ForcePartitionResilience"; @@ -222,25 +222,6 @@ std::unique_ptr VP8Encoder::Create( std::move(settings)); } -std::unique_ptr VP8Encoder::Create( - std::unique_ptr - frame_buffer_controller_factory) { - VP8Encoder::Settings settings; - settings.frame_buffer_controller_factory = - std::move(frame_buffer_controller_factory); - return std::make_unique(LibvpxInterface::Create(), - std::move(settings)); -} - -bool VP8Encoder::SupportsScalabilityMode(absl::string_view scalability_mode) { - for (const auto& entry : kSupportedScalabilityModes) { - if (entry == scalability_mode) { - return true; - } - } - return false; -} - vpx_enc_frame_flags_t LibvpxVp8Encoder::EncodeFlags( const Vp8FrameConfig& references) { RTC_DCHECK(!references.drop_frame); @@ -455,6 +436,13 @@ int LibvpxVp8Encoder::InitEncode(const VideoCodec* inst, return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } + if (absl::optional scalability_mode = + inst->GetScalabilityMode(); + scalability_mode.has_value() && + !VP8SupportsScalabilityMode(*scalability_mode)) { + return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; + } + num_active_streams_ = 0; for (int i = 0; i < inst->numberOfSimulcastStreams; ++i) { if (inst->simulcastStream[i].active) { @@ -591,7 +579,7 @@ int LibvpxVp8Encoder::InitEncode(const VideoCodec* inst, } // Allow the user to set the complexity for the base stream. - switch (inst->VP8().complexity) { + switch (inst->GetVideoEncoderComplexity()) { case VideoCodecComplexity::kComplexityHigh: cpu_speed_[0] = -5; break; @@ -856,16 +844,19 @@ uint32_t LibvpxVp8Encoder::MaxIntraTarget(uint32_t optimalBuffersize) { } uint32_t LibvpxVp8Encoder::FrameDropThreshold(size_t spatial_idx) const { - bool enable_frame_dropping = codec_.VP8().frameDroppingOn; + if (!codec_.GetFrameDropEnabled()) { + return 0; + } + // If temporal layers are used, they get to override the frame dropping // setting, as eg. ScreenshareLayers does not work as intended with frame // dropping on and DefaultTemporalLayers will have performance issues with // frame dropping off. RTC_DCHECK(frame_buffer_controller_); RTC_DCHECK_LT(spatial_idx, frame_buffer_controller_->StreamCount()); - enable_frame_dropping = - frame_buffer_controller_->SupportsEncoderFrameDropping(spatial_idx); - return enable_frame_dropping ? 30 : 0; + return frame_buffer_controller_->SupportsEncoderFrameDropping(spatial_idx) + ? 30 + : 0; } size_t LibvpxVp8Encoder::SteadyStateSize(int sid, int tid) { @@ -1134,8 +1125,6 @@ int LibvpxVp8Encoder::GetEncodedPartitions(const VideoFrame& input_image, } } - // TODO(nisse): Introduce some buffer cache or buffer pool, to reduce - // allocations and/or copy operations. auto buffer = EncodedImageBuffer::Create(encoded_size); iter = NULL; @@ -1165,10 +1154,15 @@ int LibvpxVp8Encoder::GetEncodedPartitions(const VideoFrame& input_image, encoded_images_[encoder_idx].SetSpatialIndex(stream_idx); PopulateCodecSpecific(&codec_specific, *pkt, stream_idx, encoder_idx, input_image.timestamp()); + if (codec_specific.codecSpecific.VP8.temporalIdx != kNoTemporalIdx) { + encoded_images_[encoder_idx].SetTemporalIndex( + codec_specific.codecSpecific.VP8.temporalIdx); + } break; } } encoded_images_[encoder_idx].SetTimestamp(input_image.timestamp()); + encoded_images_[encoder_idx].SetColorSpace(input_image.color_space()); encoded_images_[encoder_idx].SetRetransmissionAllowed( retransmission_allowed); @@ -1362,7 +1356,7 @@ LibvpxVp8Encoder::PrepareBuffers(rtc::scoped_refptr buffer) { // Prepare `raw_images_` from `mapped_buffer` and, if simulcast, scaled // versions of `buffer`. std::vector> prepared_buffers; - SetRawImagePlanes(&raw_images_[0], mapped_buffer); + SetRawImagePlanes(&raw_images_[0], mapped_buffer.get()); prepared_buffers.push_back(mapped_buffer); for (size_t i = 1; i < encoders_.size(); ++i) { // Native buffers should implement optimized scaling and is the preferred @@ -1405,7 +1399,7 @@ LibvpxVp8Encoder::PrepareBuffers(rtc::scoped_refptr buffer) { << VideoFrameBufferTypeToString(mapped_buffer->type()); return {}; } - SetRawImagePlanes(&raw_images_[i], scaled_buffer); + SetRawImagePlanes(&raw_images_[i], scaled_buffer.get()); prepared_buffers.push_back(scaled_buffer); } return prepared_buffers; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h index dba59576a3..bbbf252745 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h @@ -25,6 +25,7 @@ #include "modules/video_coding/codecs/vp8/include/vp8.h" #include "modules/video_coding/include/video_codec_interface.h" #include "modules/video_coding/utility/framerate_controller_deprecated.h" +#include "modules/video_coding/utility/vp8_constants.h" #include "rtc_base/experiments/cpu_speed_experiment.h" #include "rtc_base/experiments/encoder_info_settings.h" #include "rtc_base/experiments/rate_control_settings.h" @@ -138,7 +139,7 @@ class LibvpxVp8Encoder : public VideoEncoder { // Framerate is limited to this value in steady state. float framerate_limit = 5.0; // This qp or below is considered a steady state. - int steady_state_qp = 15; + int steady_state_qp = kVp8SteadyStateQpThreshold; // Frames of at least this percentage below ideal for configured bitrate are // considered in a steady state. int steady_state_undershoot_percentage = 30; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/vp8_scalability.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/vp8_scalability.cc new file mode 100644 index 0000000000..9c7495ddf7 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/vp8_scalability.cc @@ -0,0 +1,24 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_coding/codecs/vp8/vp8_scalability.h" + +namespace webrtc { + +bool VP8SupportsScalabilityMode(ScalabilityMode scalability_mode) { + for (const auto& entry : kVP8SupportedScalabilityModes) { + if (entry == scalability_mode) { + return true; + } + } + return false; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/vp8_scalability.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/vp8_scalability.h new file mode 100644 index 0000000000..923f159118 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/vp8_scalability.h @@ -0,0 +1,24 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CODING_CODECS_VP8_VP8_SCALABILITY_H_ +#define MODULES_VIDEO_CODING_CODECS_VP8_VP8_SCALABILITY_H_ + +#include "api/video_codecs/scalability_mode.h" + +namespace webrtc { + +inline constexpr ScalabilityMode kVP8SupportedScalabilityModes[] = { + ScalabilityMode::kL1T1, ScalabilityMode::kL1T2, ScalabilityMode::kL1T3}; +bool VP8SupportsScalabilityMode(ScalabilityMode scalability_mode); + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_CODECS_VP8_VP8_SCALABILITY_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/include/vp9.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/include/vp9.h index 829680a806..79d403ded3 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/include/vp9.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/include/vp9.h @@ -15,7 +15,7 @@ #include #include -#include "absl/strings/string_view.h" +#include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/sdp_video_format.h" #include "media/base/codec.h" #include "modules/video_coding/include/video_codec_interface.h" @@ -24,7 +24,8 @@ namespace webrtc { // Returns a vector with all supported internal VP9 profiles that we can // negotiate in SDP, in order of preference. -std::vector SupportedVP9Codecs(); +std::vector SupportedVP9Codecs( + bool add_scalability_modes = false); // Returns a vector with all supported internal VP9 decode profiles in order of // preference. These will be availble for receive-only connections. @@ -37,7 +38,7 @@ class VP9Encoder : public VideoEncoder { static std::unique_ptr Create(); // Parses VP9 Profile from `codec` and returns the appropriate implementation. static std::unique_ptr Create(const cricket::VideoCodec& codec); - static bool SupportsScalabilityMode(absl::string_view scalability_mode); + static bool SupportsScalabilityMode(ScalabilityMode scalability_mode); ~VP9Encoder() override {} }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/include/vp9_globals.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/include/vp9_globals.h index 87dafe4cdf..f67215ec77 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/include/vp9_globals.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/include/vp9_globals.h @@ -37,7 +37,6 @@ enum TemporalStructureMode { kTemporalStructureMode1, // 1 temporal layer structure - i.e., IPPP... kTemporalStructureMode2, // 2 temporal layers 01... kTemporalStructureMode3, // 3 temporal layers 0212... - kTemporalStructureMode4 // 3 temporal layers 02120212... }; struct GofInfoVP9 { @@ -46,14 +45,14 @@ struct GofInfoVP9 { case kTemporalStructureMode1: num_frames_in_gof = 1; temporal_idx[0] = 0; - temporal_up_switch[0] = false; + temporal_up_switch[0] = true; num_ref_pics[0] = 1; pid_diff[0][0] = 1; break; case kTemporalStructureMode2: num_frames_in_gof = 2; temporal_idx[0] = 0; - temporal_up_switch[0] = false; + temporal_up_switch[0] = true; num_ref_pics[0] = 1; pid_diff[0][0] = 2; @@ -65,7 +64,7 @@ struct GofInfoVP9 { case kTemporalStructureMode3: num_frames_in_gof = 4; temporal_idx[0] = 0; - temporal_up_switch[0] = false; + temporal_up_switch[0] = true; num_ref_pics[0] = 1; pid_diff[0][0] = 4; @@ -84,52 +83,6 @@ struct GofInfoVP9 { num_ref_pics[3] = 1; pid_diff[3][0] = 1; break; - case kTemporalStructureMode4: - num_frames_in_gof = 8; - temporal_idx[0] = 0; - temporal_up_switch[0] = false; - num_ref_pics[0] = 1; - pid_diff[0][0] = 4; - - temporal_idx[1] = 2; - temporal_up_switch[1] = true; - num_ref_pics[1] = 1; - pid_diff[1][0] = 1; - - temporal_idx[2] = 1; - temporal_up_switch[2] = true; - num_ref_pics[2] = 1; - pid_diff[2][0] = 2; - - temporal_idx[3] = 2; - temporal_up_switch[3] = false; - num_ref_pics[3] = 2; - pid_diff[3][0] = 1; - pid_diff[3][1] = 2; - - temporal_idx[4] = 0; - temporal_up_switch[4] = false; - num_ref_pics[4] = 1; - pid_diff[4][0] = 4; - - temporal_idx[5] = 2; - temporal_up_switch[5] = false; - num_ref_pics[5] = 2; - pid_diff[5][0] = 1; - pid_diff[5][1] = 2; - - temporal_idx[6] = 1; - temporal_up_switch[6] = false; - num_ref_pics[6] = 2; - pid_diff[6][0] = 2; - pid_diff[6][1] = 4; - - temporal_idx[7] = 2; - temporal_up_switch[7] = false; - num_ref_pics[7] = 2; - pid_diff[7][0] = 1; - pid_diff[7][1] = 2; - break; default: RTC_DCHECK_NOTREACHED(); } @@ -195,7 +148,10 @@ struct RTPVideoHeaderVP9 { uint8_t temporal_idx; // Temporal layer index, or kNoTemporalIdx. uint8_t spatial_idx; // Spatial layer index, or kNoSpatialIdx. bool temporal_up_switch; // True if upswitch to higher frame rate is possible - // starting from this frame. + // meaning subsequent higher temporal layer pictures + // will not depend on any picture before the current + // picture (in coding order) with temporal layer ID + // greater than `temporal_idx` of this frame. bool inter_layer_predicted; // Frame is dependent on directly lower spatial // layer frame. diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.cc index 2faacb83ac..3c3a9898de 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.cc @@ -95,16 +95,10 @@ ColorSpace ExtractVP9ColorSpace(vpx_color_space_t space_t, } // namespace LibvpxVp9Decoder::LibvpxVp9Decoder() - : LibvpxVp9Decoder(FieldTrialBasedConfig()) {} -LibvpxVp9Decoder::LibvpxVp9Decoder(const WebRtcKeyValueConfig& trials) : decode_complete_callback_(nullptr), inited_(false), decoder_(nullptr), - key_frame_required_(true), - preferred_output_format_( - absl::StartsWith(trials.Lookup("WebRTC-NV12Decode"), "Enabled") - ? VideoFrameBuffer::Type::kNV12 - : VideoFrameBuffer::Type::kI420) {} + key_frame_required_(true) {} LibvpxVp9Decoder::~LibvpxVp9Decoder() { inited_ = true; // in order to do the actual release @@ -177,8 +171,7 @@ bool LibvpxVp9Decoder::Configure(const Settings& settings) { // Always start with a complete key frame. key_frame_required_ = true; if (absl::optional buffer_pool_size = settings.buffer_pool_size()) { - if (!libvpx_buffer_pool_.Resize(*buffer_pool_size) || - !output_buffer_pool_.Resize(*buffer_pool_size)) { + if (!libvpx_buffer_pool_.Resize(*buffer_pool_size)) { return false; } } @@ -275,61 +268,47 @@ int LibvpxVp9Decoder::ReturnFrame( // This buffer contains all of `img`'s image data, a reference counted // Vp9FrameBuffer. (libvpx is done with the buffers after a few // vpx_codec_decode calls or vpx_codec_destroy). - rtc::scoped_refptr img_buffer = - static_cast(img->fb_priv); + rtc::scoped_refptr img_buffer( + static_cast(img->fb_priv)); // The buffer can be used directly by the VideoFrame (without copy) by // using a Wrapped*Buffer. rtc::scoped_refptr img_wrapped_buffer; - switch (img->bit_depth) { - case 8: - if (img->fmt == VPX_IMG_FMT_I420) { - if (preferred_output_format_ == VideoFrameBuffer::Type::kNV12) { - rtc::scoped_refptr nv12_buffer = - output_buffer_pool_.CreateNV12Buffer(img->d_w, img->d_h); - if (!nv12_buffer.get()) { - // Buffer pool is full. - return WEBRTC_VIDEO_CODEC_NO_OUTPUT; - } - img_wrapped_buffer = nv12_buffer; - libyuv::I420ToNV12(img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y], - img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U], - img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V], - nv12_buffer->MutableDataY(), - nv12_buffer->StrideY(), - nv12_buffer->MutableDataUV(), - nv12_buffer->StrideUV(), img->d_w, img->d_h); - // No holding onto img_buffer as it's no longer needed and can be - // reused. - } else { - img_wrapped_buffer = WrapI420Buffer( - img->d_w, img->d_h, img->planes[VPX_PLANE_Y], - img->stride[VPX_PLANE_Y], img->planes[VPX_PLANE_U], - img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_V], - img->stride[VPX_PLANE_V], - // WrappedI420Buffer's mechanism for allowing the release of its - // frame buffer is through a callback function. This is where we - // should release `img_buffer`. - [img_buffer] {}); - } - } else if (img->fmt == VPX_IMG_FMT_I444) { - img_wrapped_buffer = WrapI444Buffer( - img->d_w, img->d_h, img->planes[VPX_PLANE_Y], - img->stride[VPX_PLANE_Y], img->planes[VPX_PLANE_U], - img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_V], - img->stride[VPX_PLANE_V], - // WrappedI444Buffer's mechanism for allowing the release of its - // frame buffer is through a callback function. This is where we - // should release `img_buffer`. - [img_buffer] {}); - } else { - RTC_LOG(LS_ERROR) - << "Unsupported pixel format produced by the decoder: " - << static_cast(img->fmt); - return WEBRTC_VIDEO_CODEC_NO_OUTPUT; - } + switch (img->fmt) { + case VPX_IMG_FMT_I420: + img_wrapped_buffer = WrapI420Buffer( + img->d_w, img->d_h, img->planes[VPX_PLANE_Y], + img->stride[VPX_PLANE_Y], img->planes[VPX_PLANE_U], + img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_V], + img->stride[VPX_PLANE_V], + // WrappedI420Buffer's mechanism for allowing the release of its + // frame buffer is through a callback function. This is where we + // should release `img_buffer`. + [img_buffer] {}); + break; + case VPX_IMG_FMT_I422: + img_wrapped_buffer = WrapI422Buffer( + img->d_w, img->d_h, img->planes[VPX_PLANE_Y], + img->stride[VPX_PLANE_Y], img->planes[VPX_PLANE_U], + img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_V], + img->stride[VPX_PLANE_V], + // WrappedI444Buffer's mechanism for allowing the release of its + // frame buffer is through a callback function. This is where we + // should release `img_buffer`. + [img_buffer] {}); break; - case 10: + case VPX_IMG_FMT_I444: + img_wrapped_buffer = WrapI444Buffer( + img->d_w, img->d_h, img->planes[VPX_PLANE_Y], + img->stride[VPX_PLANE_Y], img->planes[VPX_PLANE_U], + img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_V], + img->stride[VPX_PLANE_V], + // WrappedI444Buffer's mechanism for allowing the release of its + // frame buffer is through a callback function. This is where we + // should release `img_buffer`. + [img_buffer] {}); + break; + case VPX_IMG_FMT_I42016: img_wrapped_buffer = WrapI010Buffer( img->d_w, img->d_h, reinterpret_cast(img->planes[VPX_PLANE_Y]), @@ -339,9 +318,19 @@ int LibvpxVp9Decoder::ReturnFrame( reinterpret_cast(img->planes[VPX_PLANE_V]), img->stride[VPX_PLANE_V] / 2, [img_buffer] {}); break; + case VPX_IMG_FMT_I42216: + img_wrapped_buffer = WrapI210Buffer( + img->d_w, img->d_h, + reinterpret_cast(img->planes[VPX_PLANE_Y]), + img->stride[VPX_PLANE_Y] / 2, + reinterpret_cast(img->planes[VPX_PLANE_U]), + img->stride[VPX_PLANE_U] / 2, + reinterpret_cast(img->planes[VPX_PLANE_V]), + img->stride[VPX_PLANE_V] / 2, [img_buffer] {}); + break; default: - RTC_LOG(LS_ERROR) << "Unsupported bit depth produced by the decoder: " - << img->bit_depth; + RTC_LOG(LS_ERROR) << "Unsupported pixel format produced by the decoder: " + << static_cast(img->fmt); return WEBRTC_VIDEO_CODEC_NO_OUTPUT; } @@ -384,7 +373,6 @@ int LibvpxVp9Decoder::Release() { // still referenced externally are deleted once fully released, not returning // to the pool. libvpx_buffer_pool_.ClearPool(); - output_buffer_pool_.Release(); inited_ = false; return ret_val; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.h index aaa21936c3..cec0c657c0 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.h @@ -14,9 +14,7 @@ #ifdef RTC_ENABLE_VP9 -#include "api/transport/webrtc_key_value_config.h" #include "api/video_codecs/video_decoder.h" -#include "common_video/include/video_frame_buffer_pool.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" #include "modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h" #include @@ -26,8 +24,6 @@ namespace webrtc { class LibvpxVp9Decoder : public VP9Decoder { public: LibvpxVp9Decoder(); - explicit LibvpxVp9Decoder(const WebRtcKeyValueConfig& trials); - virtual ~LibvpxVp9Decoder(); bool Configure(const Settings& settings) override; @@ -51,16 +47,11 @@ class LibvpxVp9Decoder : public VP9Decoder { // Memory pool used to share buffers between libvpx and webrtc. Vp9FrameBufferPool libvpx_buffer_pool_; - // Buffer pool used to allocate additionally needed NV12 buffers. - VideoFrameBufferPool output_buffer_pool_; DecodedImageCallback* decode_complete_callback_; bool inited_; vpx_codec_ctx_t* decoder_; bool key_frame_required_; Settings current_settings_; - - // Decoder should produce this format if possible. - const VideoFrameBuffer::Type preferred_output_format_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc index 6744cf67c0..5e9d3da9f1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc @@ -27,6 +27,7 @@ #include "common_video/libyuv/include/webrtc_libyuv.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/video_coding/svc/create_scalability_structure.h" +#include "modules/video_coding/svc/scalability_mode_util.h" #include "modules/video_coding/svc/scalable_video_controller.h" #include "modules/video_coding/svc/scalable_video_controller_no_layering.h" #include "modules/video_coding/svc/svc_rate_allocator.h" @@ -142,7 +143,14 @@ std::unique_ptr CreateVp9ScalabilityStructure( } } - auto scalability_structure_controller = CreateScalabilityStructure(name); + absl::optional scalability_mode = + ScalabilityModeFromString(name); + if (!scalability_mode.has_value()) { + RTC_LOG(LS_WARNING) << "Invalid scalability mode " << name; + return nullptr; + } + auto scalability_structure_controller = + CreateScalabilityStructure(*scalability_mode); if (scalability_structure_controller == nullptr) { RTC_LOG(LS_WARNING) << "Unsupported scalability structure " << name; } else { @@ -184,6 +192,17 @@ vpx_svc_ref_frame_config_t Vp9References( return ref_config; } +bool AllowDenoising() { + // Do not enable the denoiser on ARM since optimization is pending. + // Denoiser is on by default on other platforms. +#if !defined(WEBRTC_ARCH_ARM) && !defined(WEBRTC_ARCH_ARM64) && \ + !defined(ANDROID) + return true; +#else + return false; +#endif +} + } // namespace void LibvpxVp9Encoder::EncoderOutputCodedPacketCallback(vpx_codec_cx_pkt* pkt, @@ -194,7 +213,7 @@ void LibvpxVp9Encoder::EncoderOutputCodedPacketCallback(vpx_codec_cx_pkt* pkt, LibvpxVp9Encoder::LibvpxVp9Encoder(const cricket::VideoCodec& codec, std::unique_ptr interface, - const WebRtcKeyValueConfig& trials) + const FieldTrialsView& trials) : libvpx_(std::move(interface)), encoded_image_(), encoded_complete_callback_(nullptr), @@ -386,6 +405,15 @@ bool LibvpxVp9Encoder::SetSvcRates( } } + if (seen_active_layer && performance_flags_.use_per_layer_speed) { + bool denoiser_on = + AllowDenoising() && codec_.VP9()->denoisingOn && + performance_flags_by_spatial_index_[num_active_spatial_layers_ - 1] + .allow_denoising; + libvpx_->codec_control(encoder_, VP9E_SET_NOISE_SENSITIVITY, + denoiser_on ? 1 : 0); + } + if (higher_layers_enabled && !force_key_frame_) { // Prohibit drop of all layers for the next frame, so newly enabled // layer would have a valid spatial reference. @@ -438,7 +466,7 @@ void LibvpxVp9Encoder::EnableSpatialLayer(int sid) { } for (int tid = 0; tid < num_temporal_layers_; ++tid) { config_->layer_target_bitrate[sid * num_temporal_layers_ + tid] = - current_bitrate_allocation_.GetBitrate(sid, tid) / 1000; + current_bitrate_allocation_.GetTemporalLayerSum(sid, tid) / 1000; } config_->ss_target_bitrate[sid] = current_bitrate_allocation_.GetSpatialLayerSum(sid) / 1000; @@ -471,7 +499,7 @@ void LibvpxVp9Encoder::SetActiveSpatialLayers() { void LibvpxVp9Encoder::SetRates(const RateControlParameters& parameters) { if (!inited_) { - RTC_LOG(LS_WARNING) << "SetRates() calll while uninitialzied."; + RTC_LOG(LS_WARNING) << "SetRates() called while uninitialized."; return; } if (encoder_->err) { @@ -542,14 +570,32 @@ int LibvpxVp9Encoder::InitEncode(const VideoCodec* inst, force_key_frame_ = true; pics_since_key_ = 0; - num_spatial_layers_ = inst->VP9().numberOfSpatialLayers; - RTC_DCHECK_GT(num_spatial_layers_, 0); - num_temporal_layers_ = inst->VP9().numberOfTemporalLayers; - if (num_temporal_layers_ == 0) { - num_temporal_layers_ = 1; + absl::optional scalability_mode = inst->GetScalabilityMode(); + if (scalability_mode.has_value()) { + // Use settings from `ScalabilityMode` identifier. + RTC_LOG(LS_INFO) << "Create scalability structure " + << ScalabilityModeToString(*scalability_mode); + svc_controller_ = CreateScalabilityStructure(*scalability_mode); + if (!svc_controller_) { + RTC_LOG(LS_WARNING) << "Failed to create scalability structure."; + return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; + } + ScalableVideoController::StreamLayersConfig info = + svc_controller_->StreamConfig(); + num_spatial_layers_ = info.num_spatial_layers; + num_temporal_layers_ = info.num_temporal_layers; + inter_layer_pred_ = ScalabilityModeToInterLayerPredMode(*scalability_mode); + } else { + num_spatial_layers_ = inst->VP9().numberOfSpatialLayers; + RTC_DCHECK_GT(num_spatial_layers_, 0); + num_temporal_layers_ = inst->VP9().numberOfTemporalLayers; + if (num_temporal_layers_ == 0) { + num_temporal_layers_ = 1; + } + inter_layer_pred_ = inst->VP9().interLayerPred; + svc_controller_ = CreateVp9ScalabilityStructure(*inst); } - svc_controller_ = CreateVp9ScalabilityStructure(*inst); framerate_controller_ = std::vector( num_spatial_layers_, FramerateControllerDeprecated(codec_.maxFramerate)); @@ -582,6 +628,10 @@ int LibvpxVp9Encoder::InitEncode(const VideoCodec* inst, config_->g_profile = 2; config_->g_input_bit_depth = 10; break; + case VP9Profile::kProfile3: + // Encoding of profile 3 is not implemented. + RTC_DCHECK_NOTREACHED(); + break; } // Creating a wrapper to the image - setting image data to nullptr. Actual @@ -601,7 +651,7 @@ int LibvpxVp9Encoder::InitEncode(const VideoCodec* inst, config_->g_lag_in_frames = 0; // 0- no frame lagging config_->g_threads = 1; // Rate control settings. - config_->rc_dropframe_thresh = inst->VP9().frameDroppingOn ? 30 : 0; + config_->rc_dropframe_thresh = inst->GetFrameDropEnabled() ? 30 : 0; config_->rc_end_usage = VPX_CBR; config_->g_pass = VPX_RC_ONE_PASS; config_->rc_min_quantizer = @@ -633,8 +683,6 @@ int LibvpxVp9Encoder::InitEncode(const VideoCodec* inst, is_flexible_mode_ = inst->VP9().flexibleMode; - inter_layer_pred_ = inst->VP9().interLayerPred; - if (num_spatial_layers_ > 1 && codec_.mode == VideoCodecMode::kScreensharing && !is_flexible_mode_) { RTC_LOG(LS_ERROR) << "Flexible mode is required for screenshare with " @@ -690,7 +738,7 @@ int LibvpxVp9Encoder::InitEncode(const VideoCodec* inst, return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } } - ref_buf_.clear(); + ref_buf_ = {}; return InitAndSetControlSettings(inst); } @@ -775,6 +823,10 @@ int LibvpxVp9Encoder::InitAndSetControlSettings(const VideoCodec* inst) { } } + UpdatePerformanceFlags(); + RTC_DCHECK_EQ(performance_flags_by_spatial_index_.size(), + static_cast(num_spatial_layers_)); + SvcRateAllocator init_allocator(codec_); current_bitrate_allocation_ = init_allocator.Allocate(VideoBitrateAllocationParameters( @@ -791,9 +843,6 @@ int LibvpxVp9Encoder::InitAndSetControlSettings(const VideoCodec* inst) { return WEBRTC_VIDEO_CODEC_UNINITIALIZED; } - UpdatePerformanceFlags(); - RTC_DCHECK_EQ(performance_flags_by_spatial_index_.size(), - static_cast(num_spatial_layers_)); if (performance_flags_.use_per_layer_speed) { for (int si = 0; si < num_spatial_layers_; ++si) { svc_params_.speed_per_layer[si] = @@ -801,6 +850,12 @@ int LibvpxVp9Encoder::InitAndSetControlSettings(const VideoCodec* inst) { svc_params_.loopfilter_ctrl[si] = performance_flags_by_spatial_index_[si].deblock_mode; } + bool denoiser_on = + AllowDenoising() && inst->VP9().denoisingOn && + performance_flags_by_spatial_index_[num_spatial_layers_ - 1] + .allow_denoising; + libvpx_->codec_control(encoder_, VP9E_SET_NOISE_SENSITIVITY, + denoiser_on ? 1 : 0); } libvpx_->codec_control(encoder_, VP8E_SET_MAX_INTRA_BITRATE_PCT, @@ -888,13 +943,10 @@ int LibvpxVp9Encoder::InitAndSetControlSettings(const VideoCodec* inst) { // Turn on row-based multithreading. libvpx_->codec_control(encoder_, VP9E_SET_ROW_MT, 1); -#if !defined(WEBRTC_ARCH_ARM) && !defined(WEBRTC_ARCH_ARM64) && \ - !defined(ANDROID) - // Do not enable the denoiser on ARM since optimization is pending. - // Denoiser is on by default on other platforms. - libvpx_->codec_control(encoder_, VP9E_SET_NOISE_SENSITIVITY, - inst->VP9().denoisingOn ? 1 : 0); -#endif + if (AllowDenoising() && !performance_flags_.use_per_layer_speed) { + libvpx_->codec_control(encoder_, VP9E_SET_NOISE_SENSITIVITY, + inst->VP9().denoisingOn ? 1 : 0); + } if (codec_.mode == VideoCodecMode::kScreensharing) { // Adjust internal parameters to screen content. @@ -952,6 +1004,9 @@ int LibvpxVp9Encoder::Encode(const VideoFrame& input_image, if (layer_frames_.empty()) { return WEBRTC_VIDEO_CODEC_ERROR; } + if (layer_frames_.front().IsKeyframe()) { + force_key_frame_ = true; + } } vpx_svc_layer_id_t layer_id = {0}; @@ -959,7 +1014,7 @@ int LibvpxVp9Encoder::Encode(const VideoFrame& input_image, const size_t gof_idx = (pics_since_key_ + 1) % gof_.num_frames_in_gof; layer_id.temporal_layer_id = gof_.temporal_idx[gof_idx]; - if (VideoCodecMode::kScreensharing == codec_.mode) { + if (codec_.mode == VideoCodecMode::kScreensharing) { const uint32_t frame_timestamp_ms = 1000 * input_image.timestamp() / kVideoPayloadTypeFrequency; @@ -1146,6 +1201,10 @@ int LibvpxVp9Encoder::Encode(const VideoFrame& input_image, raw_->stride[VPX_PLANE_V] = i010_buffer->StrideV() * 2; break; } + case VP9Profile::kProfile3: { + RTC_DCHECK_NOTREACHED(); + break; + } } vpx_enc_frame_flags_t flags = 0; @@ -1212,8 +1271,8 @@ int LibvpxVp9Encoder::Encode(const VideoFrame& input_image, bool LibvpxVp9Encoder::PopulateCodecSpecific(CodecSpecificInfo* codec_specific, absl::optional* spatial_idx, - const vpx_codec_cx_pkt& pkt, - uint32_t timestamp) { + absl::optional* temporal_idx, + const vpx_codec_cx_pkt& pkt) { RTC_CHECK(codec_specific != nullptr); codec_specific->codecType = kVideoCodecVP9; CodecSpecificInfoVP9* vp9_info = &(codec_specific->codecSpecific.VP9); @@ -1238,8 +1297,10 @@ bool LibvpxVp9Encoder::PopulateCodecSpecific(CodecSpecificInfo* codec_specific, if (num_temporal_layers_ == 1) { RTC_CHECK_EQ(layer_id.temporal_layer_id, 0); vp9_info->temporal_idx = kNoTemporalIdx; + *temporal_idx = absl::nullopt; } else { vp9_info->temporal_idx = layer_id.temporal_layer_id; + *temporal_idx = layer_id.temporal_layer_id; } if (num_active_spatial_layers_ == 1) { RTC_CHECK_EQ(layer_id.spatial_layer_id, 0); @@ -1248,9 +1309,6 @@ bool LibvpxVp9Encoder::PopulateCodecSpecific(CodecSpecificInfo* codec_specific, *spatial_idx = layer_id.spatial_layer_id; } - // TODO(asapersson): this info has to be obtained from the encoder. - vp9_info->temporal_up_switch = false; - const bool is_key_pic = (pics_since_key_ == 0); const bool is_inter_layer_pred_allowed = (inter_layer_pred_ == InterLayerPredMode::kOn || @@ -1283,6 +1341,20 @@ bool LibvpxVp9Encoder::PopulateCodecSpecific(CodecSpecificInfo* codec_specific, vp9_info); if (vp9_info->flexible_mode) { vp9_info->gof_idx = kNoGofIdx; + if (!svc_controller_) { + if (num_temporal_layers_ == 1) { + vp9_info->temporal_up_switch = true; + } else { + // In flexible mode with > 1 temporal layer but no SVC controller we + // can't techincally determine if a frame is an upswitch point, use + // gof-based data as proxy for now. + // TODO(sprang): Remove once SVC controller is the only choice. + vp9_info->gof_idx = + static_cast(pics_since_key_ % gof_.num_frames_in_gof); + vp9_info->temporal_up_switch = + gof_.temporal_up_switch[vp9_info->gof_idx]; + } + } } else { vp9_info->gof_idx = static_cast(pics_since_key_ % gof_.num_frames_in_gof); @@ -1353,6 +1425,23 @@ bool LibvpxVp9Encoder::PopulateCodecSpecific(CodecSpecificInfo* codec_specific, svc_params_.scaling_factor_den[sid]); } } + if (is_flexible_mode_) { + // Populate data for legacy temporal-upswitch state. + // We can switch up to a higher temporal layer only if all temporal layers + // higher than this (within the current spatial layer) are switch points. + vp9_info->temporal_up_switch = true; + for (int i = layer_id.temporal_layer_id + 1; i < num_temporal_layers_; + ++i) { + // Assumes decode targets are always ordered first by spatial then by + // temporal id. + size_t dti_index = + (layer_id.spatial_layer_id * num_temporal_layers_) + i; + vp9_info->temporal_up_switch &= + (codec_specific->generic_frame_info + ->decode_target_indications[dti_index] == + DecodeTargetIndication::kSwitch); + } + } } return true; } @@ -1373,63 +1462,56 @@ void LibvpxVp9Encoder::FillReferenceIndices(const vpx_codec_cx_pkt& pkt, vpx_svc_ref_frame_config_t enc_layer_conf = {{0}}; libvpx_->codec_control(encoder_, VP9E_GET_SVC_REF_FRAME_CONFIG, &enc_layer_conf); - int ref_buf_flags = 0; + char ref_buf_flags[] = "00000000"; + // There should be one character per buffer + 1 termination '\0'. + static_assert(sizeof(ref_buf_flags) == kNumVp9Buffers + 1); if (enc_layer_conf.reference_last[layer_id.spatial_layer_id]) { const size_t fb_idx = enc_layer_conf.lst_fb_idx[layer_id.spatial_layer_id]; - RTC_DCHECK(ref_buf_.find(fb_idx) != ref_buf_.end()); + RTC_DCHECK_LT(fb_idx, ref_buf_.size()); if (std::find(ref_buf_list.begin(), ref_buf_list.end(), - ref_buf_.at(fb_idx)) == ref_buf_list.end()) { - ref_buf_list.push_back(ref_buf_.at(fb_idx)); - ref_buf_flags |= 1 << fb_idx; + ref_buf_[fb_idx]) == ref_buf_list.end()) { + ref_buf_list.push_back(ref_buf_[fb_idx]); + ref_buf_flags[fb_idx] = '1'; } } if (enc_layer_conf.reference_alt_ref[layer_id.spatial_layer_id]) { const size_t fb_idx = enc_layer_conf.alt_fb_idx[layer_id.spatial_layer_id]; - RTC_DCHECK(ref_buf_.find(fb_idx) != ref_buf_.end()); + RTC_DCHECK_LT(fb_idx, ref_buf_.size()); if (std::find(ref_buf_list.begin(), ref_buf_list.end(), - ref_buf_.at(fb_idx)) == ref_buf_list.end()) { - ref_buf_list.push_back(ref_buf_.at(fb_idx)); - ref_buf_flags |= 1 << fb_idx; + ref_buf_[fb_idx]) == ref_buf_list.end()) { + ref_buf_list.push_back(ref_buf_[fb_idx]); + ref_buf_flags[fb_idx] = '1'; } } if (enc_layer_conf.reference_golden[layer_id.spatial_layer_id]) { const size_t fb_idx = enc_layer_conf.gld_fb_idx[layer_id.spatial_layer_id]; - RTC_DCHECK(ref_buf_.find(fb_idx) != ref_buf_.end()); + RTC_DCHECK_LT(fb_idx, ref_buf_.size()); if (std::find(ref_buf_list.begin(), ref_buf_list.end(), - ref_buf_.at(fb_idx)) == ref_buf_list.end()) { - ref_buf_list.push_back(ref_buf_.at(fb_idx)); - ref_buf_flags |= 1 << fb_idx; + ref_buf_[fb_idx]) == ref_buf_list.end()) { + ref_buf_list.push_back(ref_buf_[fb_idx]); + ref_buf_flags[fb_idx] = '1'; } } RTC_LOG(LS_VERBOSE) << "Frame " << pic_num << " sl " << layer_id.spatial_layer_id << " tl " << layer_id.temporal_layer_id << " refered buffers " - << (ref_buf_flags & (1 << 0) ? 1 : 0) - << (ref_buf_flags & (1 << 1) ? 1 : 0) - << (ref_buf_flags & (1 << 2) ? 1 : 0) - << (ref_buf_flags & (1 << 3) ? 1 : 0) - << (ref_buf_flags & (1 << 4) ? 1 : 0) - << (ref_buf_flags & (1 << 5) ? 1 : 0) - << (ref_buf_flags & (1 << 6) ? 1 : 0) - << (ref_buf_flags & (1 << 7) ? 1 : 0); + << ref_buf_flags; } else if (!is_key_frame) { RTC_DCHECK_EQ(num_spatial_layers_, 1); RTC_DCHECK_EQ(num_temporal_layers_, 1); // In non-SVC mode encoder doesn't provide reference list. Assume each frame // refers previous one, which is stored in buffer 0. - ref_buf_list.push_back(ref_buf_.at(0)); + ref_buf_list.push_back(ref_buf_[0]); } - size_t max_ref_temporal_layer_id = 0; - std::vector ref_pid_list; vp9_info->num_ref_pics = 0; @@ -1461,9 +1543,6 @@ void LibvpxVp9Encoder::FillReferenceIndices(const vpx_codec_cx_pkt& pkt, vp9_info->p_diff[vp9_info->num_ref_pics] = static_cast(p_diff); ++vp9_info->num_ref_pics; - - max_ref_temporal_layer_id = - std::max(max_ref_temporal_layer_id, ref_buf.temporal_layer_id); } else { RTC_DCHECK(inter_layer_predicted); // RTP spec only allows to use previous spatial layer for inter-layer @@ -1471,10 +1550,6 @@ void LibvpxVp9Encoder::FillReferenceIndices(const vpx_codec_cx_pkt& pkt, RTC_DCHECK_EQ(ref_buf.spatial_layer_id + 1, layer_id.spatial_layer_id); } } - - vp9_info->temporal_up_switch = - (max_ref_temporal_layer_id < - static_cast(layer_id.temporal_layer_id)); } void LibvpxVp9Encoder::UpdateReferenceBuffers(const vpx_codec_cx_pkt& pkt, @@ -1482,8 +1557,9 @@ void LibvpxVp9Encoder::UpdateReferenceBuffers(const vpx_codec_cx_pkt& pkt, vpx_svc_layer_id_t layer_id = {0}; libvpx_->codec_control(encoder_, VP9E_GET_SVC_LAYER_ID, &layer_id); - RefFrameBuffer frame_buf(pic_num, layer_id.spatial_layer_id, - layer_id.temporal_layer_id); + RefFrameBuffer frame_buf = {.pic_num = pic_num, + .spatial_layer_id = layer_id.spatial_layer_id, + .temporal_layer_id = layer_id.temporal_layer_id}; if (is_svc_) { vpx_svc_ref_frame_config_t enc_layer_conf = {{0}}; @@ -1492,7 +1568,7 @@ void LibvpxVp9Encoder::UpdateReferenceBuffers(const vpx_codec_cx_pkt& pkt, const int update_buffer_slot = enc_layer_conf.update_buffer_slot[layer_id.spatial_layer_id]; - for (size_t i = 0; i < kNumVp9Buffers; ++i) { + for (size_t i = 0; i < ref_buf_.size(); ++i) { if (update_buffer_slot & (1 << i)) { ref_buf_[i] = frame_buf; } @@ -1520,7 +1596,7 @@ void LibvpxVp9Encoder::UpdateReferenceBuffers(const vpx_codec_cx_pkt& pkt, vpx_svc_ref_frame_config_t LibvpxVp9Encoder::SetReferences( bool is_key_pic, - size_t first_active_spatial_layer_id) { + int first_active_spatial_layer_id) { // kRefBufIdx, kUpdBufIdx need to be updated to support longer GOFs. RTC_DCHECK_LE(gof_.num_frames_in_gof, 4); @@ -1542,7 +1618,7 @@ vpx_svc_ref_frame_config_t LibvpxVp9Encoder::SetReferences( // for temporal references plus 1 buffer for spatial reference. 7 buffers // in total. - for (size_t sl_idx = first_active_spatial_layer_id; + for (int sl_idx = first_active_spatial_layer_id; sl_idx < num_active_spatial_layers_; ++sl_idx) { const size_t curr_pic_num = is_key_pic ? 0 : pics_since_key_ + 1; const size_t gof_idx = curr_pic_num % gof_.num_frames_in_gof; @@ -1629,13 +1705,25 @@ void LibvpxVp9Encoder::GetEncodedLayerFrame(const vpx_codec_cx_pkt* pkt) { DeliverBufferedFrame(end_of_picture); } - // TODO(nisse): Introduce some buffer cache or buffer pool, to reduce - // allocations and/or copy operations. encoded_image_.SetEncodedData(EncodedImageBuffer::Create( static_cast(pkt->data.frame.buf), pkt->data.frame.sz)); + codec_specific_ = {}; + absl::optional spatial_index; + absl::optional temporal_index; + if (!PopulateCodecSpecific(&codec_specific_, &spatial_index, &temporal_index, + *pkt)) { + // Drop the frame. + encoded_image_.set_size(0); + return; + } + encoded_image_.SetSpatialIndex(spatial_index); + encoded_image_.SetTemporalIndex(temporal_index); + const bool is_key_frame = - (pkt->data.frame.flags & VPX_FRAME_IS_KEY) ? true : false; + ((pkt->data.frame.flags & VPX_FRAME_IS_KEY) ? true : false) && + !codec_specific_.codecSpecific.VP9.inter_layer_predicted; + // Ensure encoder issued key frame on request. RTC_DCHECK(is_key_frame || !force_key_frame_); @@ -1646,20 +1734,11 @@ void LibvpxVp9Encoder::GetEncodedLayerFrame(const vpx_codec_cx_pkt* pkt) { force_key_frame_ = false; } - codec_specific_ = {}; - absl::optional spatial_index; - if (!PopulateCodecSpecific(&codec_specific_, &spatial_index, *pkt, - input_image_->timestamp())) { - // Drop the frame. - encoded_image_.set_size(0); - return; - } - encoded_image_.SetSpatialIndex(spatial_index); - UpdateReferenceBuffers(*pkt, pics_since_key_); TRACE_COUNTER1("webrtc", "EncodedFrameSize", encoded_image_.size()); encoded_image_.SetTimestamp(input_image_->timestamp()); + encoded_image_.SetColorSpace(input_image_->color_space()); encoded_image_._encodedHeight = pkt->data.frame.height[layer_id.spatial_layer_id]; encoded_image_._encodedWidth = @@ -1792,8 +1871,7 @@ size_t LibvpxVp9Encoder::SteadyStateSize(int sid, int tid) { // static LibvpxVp9Encoder::VariableFramerateExperiment -LibvpxVp9Encoder::ParseVariableFramerateConfig( - const WebRtcKeyValueConfig& trials) { +LibvpxVp9Encoder::ParseVariableFramerateConfig(const FieldTrialsView& trials) { FieldTrialFlag enabled = FieldTrialFlag("Enabled"); FieldTrialParameter framerate_limit("min_fps", 5.0); FieldTrialParameter qp("min_qp", 32); @@ -1815,7 +1893,7 @@ LibvpxVp9Encoder::ParseVariableFramerateConfig( // static LibvpxVp9Encoder::QualityScalerExperiment -LibvpxVp9Encoder::ParseQualityScalerConfig(const WebRtcKeyValueConfig& trials) { +LibvpxVp9Encoder::ParseQualityScalerConfig(const FieldTrialsView& trials) { FieldTrialFlag disabled = FieldTrialFlag("Disabled"); FieldTrialParameter low_qp("low_qp", kLowVp9QpThreshold); FieldTrialParameter high_qp("hihg_qp", kHighVp9QpThreshold); @@ -1832,14 +1910,30 @@ LibvpxVp9Encoder::ParseQualityScalerConfig(const WebRtcKeyValueConfig& trials) { } void LibvpxVp9Encoder::UpdatePerformanceFlags() { + flat_map params_by_resolution; + if (codec_.GetVideoEncoderComplexity() == + VideoCodecComplexity::kComplexityLow) { + // For low tier devices, always use speed 9. Only disable upper + // layer deblocking below QCIF. + params_by_resolution[0] = {.base_layer_speed = 9, + .high_layer_speed = 9, + .deblock_mode = 1, + .allow_denoising = true}; + params_by_resolution[352 * 288] = {.base_layer_speed = 9, + .high_layer_speed = 9, + .deblock_mode = 0, + .allow_denoising = true}; + } else { + params_by_resolution = performance_flags_.settings_by_resolution; + } + const auto find_speed = [&](int min_pixel_count) { - RTC_DCHECK(!performance_flags_.settings_by_resolution.empty()); - auto it = - performance_flags_.settings_by_resolution.upper_bound(min_pixel_count); + RTC_DCHECK(!params_by_resolution.empty()); + auto it = params_by_resolution.upper_bound(min_pixel_count); return std::prev(it)->second; }; - performance_flags_by_spatial_index_.clear(); + if (is_svc_) { for (int si = 0; si < num_spatial_layers_; ++si) { performance_flags_by_spatial_index_.push_back(find_speed( @@ -1854,7 +1948,7 @@ void LibvpxVp9Encoder::UpdatePerformanceFlags() { // static LibvpxVp9Encoder::PerformanceFlags LibvpxVp9Encoder::ParsePerformanceFlagsFromTrials( - const WebRtcKeyValueConfig& trials) { + const FieldTrialsView& trials) { struct Params : public PerformanceFlags::ParameterSet { int min_pixel_count = 0; }; @@ -1867,7 +1961,9 @@ LibvpxVp9Encoder::ParsePerformanceFlagsFromTrials( FieldTrialStructMember("base_layer_speed", [](Params* p) { return &p->base_layer_speed; }), FieldTrialStructMember("deblock_mode", - [](Params* p) { return &p->deblock_mode; })}, + [](Params* p) { return &p->deblock_mode; }), + FieldTrialStructMember("denoiser", + [](Params* p) { return &p->allow_denoising; })}, {}); FieldTrialFlag per_layer_speed("use_per_layer_speed"); @@ -1908,18 +2004,38 @@ LibvpxVp9Encoder::GetDefaultPerformanceFlags() { flags.use_per_layer_speed = true; #if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64) || defined(ANDROID) // Speed 8 on all layers for all resolutions. - flags.settings_by_resolution[0] = {8, 8, 0}; + flags.settings_by_resolution[0] = {.base_layer_speed = 8, + .high_layer_speed = 8, + .deblock_mode = 0, + .allow_denoising = true}; #else + // For smaller resolutions, use lower speed setting for the temporal base // layer (get some coding gain at the cost of increased encoding complexity). // Set encoder Speed 5 for TL0, encoder Speed 8 for upper temporal layers, and // disable deblocking for upper-most temporal layers. - flags.settings_by_resolution[0] = {5, 8, 1}; + flags.settings_by_resolution[0] = {.base_layer_speed = 5, + .high_layer_speed = 8, + .deblock_mode = 1, + .allow_denoising = true}; // Use speed 7 for QCIF and above. // Set encoder Speed 7 for TL0, encoder Speed 8 for upper temporal layers, and // enable deblocking for all temporal layers. - flags.settings_by_resolution[352 * 288] = {7, 8, 0}; + flags.settings_by_resolution[352 * 288] = {.base_layer_speed = 7, + .high_layer_speed = 8, + .deblock_mode = 0, + .allow_denoising = true}; + + // For very high resolution (1080p and up), turn the speed all the way up + // since this is very CPU intensive. Also disable denoising to save CPU, at + // these resolutions denoising appear less effective and hopefully you also + // have a less noisy video source at this point. + flags.settings_by_resolution[1920 * 1080] = {.base_layer_speed = 9, + .high_layer_speed = 9, + .deblock_mode = 0, + .allow_denoising = false}; + #endif return flags; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h index 29118a7c3d..4612805755 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h @@ -14,12 +14,12 @@ #ifdef RTC_ENABLE_VP9 -#include +#include #include #include #include "api/fec_controller_override.h" -#include "api/transport/webrtc_key_value_config.h" +#include "api/field_trials_view.h" #include "api/video_codecs/video_encoder.h" #include "api/video_codecs/vp9_profile.h" #include "common_video/include/video_frame_buffer_pool.h" @@ -28,6 +28,7 @@ #include "modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h" #include "modules/video_coding/svc/scalable_video_controller.h" #include "modules/video_coding/utility/framerate_controller_deprecated.h" +#include "rtc_base/containers/flat_map.h" #include "rtc_base/experiments/encoder_info_settings.h" #include @@ -37,7 +38,7 @@ class LibvpxVp9Encoder : public VP9Encoder { public: LibvpxVp9Encoder(const cricket::VideoCodec& codec, std::unique_ptr interface, - const WebRtcKeyValueConfig& trials); + const FieldTrialsView& trials); ~LibvpxVp9Encoder() override; @@ -67,17 +68,15 @@ class LibvpxVp9Encoder : public VP9Encoder { bool PopulateCodecSpecific(CodecSpecificInfo* codec_specific, absl::optional* spatial_idx, - const vpx_codec_cx_pkt& pkt, - uint32_t timestamp); + absl::optional* temporal_idx, + const vpx_codec_cx_pkt& pkt); void FillReferenceIndices(const vpx_codec_cx_pkt& pkt, - const size_t pic_num, - const bool inter_layer_predicted, + size_t pic_num, + bool inter_layer_predicted, CodecSpecificInfoVP9* vp9_info); - void UpdateReferenceBuffers(const vpx_codec_cx_pkt& pkt, - const size_t pic_num); - vpx_svc_ref_frame_config_t SetReferences( - bool is_key_pic, - size_t first_active_spatial_layer_id); + void UpdateReferenceBuffers(const vpx_codec_cx_pkt& pkt, size_t pic_num); + vpx_svc_ref_frame_config_t SetReferences(bool is_key_pic, + int first_active_spatial_layer_id); bool ExplicitlyConfiguredSpatialLayers() const; bool SetSvcRates(const VideoBitrateAllocation& bitrate_allocation); @@ -108,7 +107,7 @@ class LibvpxVp9Encoder : public VP9Encoder { size_t SteadyStateSize(int sid, int tid); - void MaybeRewrapRawWithFormat(const vpx_img_fmt fmt); + void MaybeRewrapRawWithFormat(vpx_img_fmt fmt); // Prepares `raw_` to reference image data of `buffer`, or of mapped or scaled // versions of `buffer`. Returns the buffer that got referenced as a result, // allowing the caller to keep a reference to it until after encoding has @@ -157,24 +156,16 @@ class LibvpxVp9Encoder : public VP9Encoder { // Used for flexible mode. bool is_flexible_mode_; struct RefFrameBuffer { - RefFrameBuffer(size_t pic_num, - size_t spatial_layer_id, - size_t temporal_layer_id) - : pic_num(pic_num), - spatial_layer_id(spatial_layer_id), - temporal_layer_id(temporal_layer_id) {} - RefFrameBuffer() {} - bool operator==(const RefFrameBuffer& o) { return pic_num == o.pic_num && spatial_layer_id == o.spatial_layer_id && temporal_layer_id == o.temporal_layer_id; } size_t pic_num = 0; - size_t spatial_layer_id = 0; - size_t temporal_layer_id = 0; + int spatial_layer_id = 0; + int temporal_layer_id = 0; }; - std::map ref_buf_; + std::array ref_buf_; std::vector layer_frames_; // Variable frame-rate related fields and methods. @@ -192,7 +183,7 @@ class LibvpxVp9Encoder : public VP9Encoder { int frames_before_steady_state; } variable_framerate_experiment_; static VariableFramerateExperiment ParseVariableFramerateConfig( - const WebRtcKeyValueConfig& trials); + const FieldTrialsView& trials); FramerateControllerDeprecated variable_framerate_controller_; const struct QualityScalerExperiment { @@ -201,7 +192,7 @@ class LibvpxVp9Encoder : public VP9Encoder { bool enabled; } quality_scaler_experiment_; static QualityScalerExperiment ParseQualityScalerConfig( - const WebRtcKeyValueConfig& trials); + const FieldTrialsView& trials); const bool external_ref_ctrl_; // Flags that can affect speed vs quality tradeoff, and are configureable per @@ -222,11 +213,12 @@ class LibvpxVp9Encoder : public VP9Encoder { // 1 = disable deblock for top-most TL // 2 = disable deblock for all TLs int deblock_mode = 0; + bool allow_denoising = true; }; // Map from min pixel count to settings for that resolution and above. // E.g. if you want some settings A if below wvga (640x360) and some other // setting B at wvga and above, you'd use map {{0, A}, {230400, B}}. - std::map settings_by_resolution; + flat_map settings_by_resolution; }; // Performance flags, ordered by `min_pixel_count`. const PerformanceFlags performance_flags_; @@ -236,7 +228,7 @@ class LibvpxVp9Encoder : public VP9Encoder { performance_flags_by_spatial_index_; void UpdatePerformanceFlags(); static PerformanceFlags ParsePerformanceFlagsFromTrials( - const WebRtcKeyValueConfig& trials); + const FieldTrialsView& trials); static PerformanceFlags GetDefaultPerformanceFlags(); int num_steady_state_frames_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/svc_config.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/svc_config.cc index 92818eb4f9..3a32a43622 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/svc_config.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/svc_config.cc @@ -12,9 +12,13 @@ #include #include +#include #include +#include "media/base/video_common.h" #include "modules/video_coding/codecs/vp9/include/vp9_globals.h" +#include "modules/video_coding/svc/create_scalability_structure.h" +#include "modules/video_coding/svc/scalability_mode_util.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -29,6 +33,19 @@ const size_t kMinScreenSharingLayerBitrateKbps[] = {30, 200, 500}; const size_t kTargetScreenSharingLayerBitrateKbps[] = {150, 350, 950}; const size_t kMaxScreenSharingLayerBitrateKbps[] = {250, 500, 950}; +// Gets limited number of layers for given resolution. +size_t GetLimitedNumSpatialLayers(size_t width, size_t height) { + const bool is_landscape = width >= height; + const size_t min_width = is_landscape ? kMinVp9SpatialLayerLongSideLength + : kMinVp9SpatialLayerShortSideLength; + const size_t min_height = is_landscape ? kMinVp9SpatialLayerShortSideLength + : kMinVp9SpatialLayerLongSideLength; + const size_t num_layers_fit_horz = static_cast( + std::floor(1 + std::max(0.0f, std::log2(1.0f * width / min_width)))); + const size_t num_layers_fit_vert = static_cast( + std::floor(1 + std::max(0.0f, std::log2(1.0f * height / min_height)))); + return std::min(num_layers_fit_horz, num_layers_fit_vert); +} } // namespace std::vector ConfigureSvcScreenSharing(size_t input_width, @@ -59,27 +76,19 @@ std::vector ConfigureSvcScreenSharing(size_t input_width, return spatial_layers; } -std::vector ConfigureSvcNormalVideo(size_t input_width, - size_t input_height, - float max_framerate_fps, - size_t first_active_layer, - size_t num_spatial_layers, - size_t num_temporal_layers) { +std::vector ConfigureSvcNormalVideo( + size_t input_width, + size_t input_height, + float max_framerate_fps, + size_t first_active_layer, + size_t num_spatial_layers, + size_t num_temporal_layers, + absl::optional config) { RTC_DCHECK_LT(first_active_layer, num_spatial_layers); - std::vector spatial_layers; // Limit number of layers for given resolution. - const bool is_landscape = input_width >= input_height; - const size_t min_width = is_landscape ? kMinVp9SpatialLayerLongSideLength - : kMinVp9SpatialLayerShortSideLength; - const size_t min_height = is_landscape ? kMinVp9SpatialLayerShortSideLength - : kMinVp9SpatialLayerLongSideLength; - const size_t num_layers_fit_horz = static_cast(std::floor( - 1 + std::max(0.0f, std::log2(1.0f * input_width / min_width)))); - const size_t num_layers_fit_vert = static_cast(std::floor( - 1 + std::max(0.0f, std::log2(1.0f * input_height / min_height)))); - const size_t limited_num_spatial_layers = - std::min(num_layers_fit_horz, num_layers_fit_vert); + size_t limited_num_spatial_layers = + GetLimitedNumSpatialLayers(input_width, input_height); if (limited_num_spatial_layers < num_spatial_layers) { RTC_LOG(LS_WARNING) << "Reducing number of spatial layers from " << num_spatial_layers << " to " @@ -87,14 +96,23 @@ std::vector ConfigureSvcNormalVideo(size_t input_width, << " due to low input resolution."; num_spatial_layers = limited_num_spatial_layers; } + // First active layer must be configured. num_spatial_layers = std::max(num_spatial_layers, first_active_layer + 1); // Ensure top layer is even enough. int required_divisiblity = 1 << (num_spatial_layers - first_active_layer - 1); + if (config) { + required_divisiblity = 1; + for (size_t sl_idx = 0; sl_idx < num_spatial_layers; ++sl_idx) { + required_divisiblity = cricket::LeastCommonMultiple( + required_divisiblity, config->scaling_factor_den[sl_idx]); + } + } input_width = input_width - input_width % required_divisiblity; input_height = input_height - input_height % required_divisiblity; + std::vector spatial_layers; for (size_t sl_idx = first_active_layer; sl_idx < num_spatial_layers; ++sl_idx) { SpatialLayer spatial_layer = {0}; @@ -104,6 +122,13 @@ std::vector ConfigureSvcNormalVideo(size_t input_width, spatial_layer.numberOfTemporalLayers = num_temporal_layers; spatial_layer.active = true; + if (config) { + spatial_layer.width = input_width * config->scaling_factor_num[sl_idx] / + config->scaling_factor_den[sl_idx]; + spatial_layer.height = input_height * config->scaling_factor_num[sl_idx] / + config->scaling_factor_den[sl_idx]; + } + // minBitrate and maxBitrate formulas were derived from // subjective-quality data to determing bit rates below which video // quality is unacceptable and above which additional bits do not provide @@ -124,7 +149,7 @@ std::vector ConfigureSvcNormalVideo(size_t input_width, spatial_layers.push_back(spatial_layer); } - // A workaround for sitiation when single HD layer is left with minBitrate + // A workaround for situation when single HD layer is left with minBitrate // about 500kbps. This would mean that there will always be at least 500kbps // allocated to video regardless of how low is the actual BWE. // Also, boost maxBitrate for the first layer to account for lost ability to @@ -140,13 +165,63 @@ std::vector ConfigureSvcNormalVideo(size_t input_width, return spatial_layers; } -std::vector GetSvcConfig(size_t input_width, - size_t input_height, - float max_framerate_fps, - size_t first_active_layer, - size_t num_spatial_layers, - size_t num_temporal_layers, - bool is_screen_sharing) { +// Uses scalability mode to configure spatial layers. +std::vector GetVp9SvcConfig(VideoCodec& codec) { + RTC_DCHECK_EQ(codec.codecType, kVideoCodecVP9); + + absl::optional scalability_mode = codec.GetScalabilityMode(); + RTC_DCHECK(scalability_mode.has_value()); + + // Limit number of spatial layers for given resolution. + int limited_num_spatial_layers = + GetLimitedNumSpatialLayers(codec.width, codec.height); + if (limited_num_spatial_layers < + ScalabilityModeToNumSpatialLayers(*scalability_mode)) { + ScalabilityMode limited_scalability_mode = + LimitNumSpatialLayers(*scalability_mode, limited_num_spatial_layers); + RTC_LOG(LS_WARNING) + << "Reducing number of spatial layers due to low input resolution: " + << ScalabilityModeToString(*scalability_mode) << " to " + << ScalabilityModeToString(limited_scalability_mode); + scalability_mode = limited_scalability_mode; + codec.SetScalabilityMode(limited_scalability_mode); + } + + absl::optional info = + ScalabilityStructureConfig(*scalability_mode); + if (!info.has_value()) { + RTC_LOG(LS_WARNING) << "Failed to create structure " + << ScalabilityModeToString(*scalability_mode); + return {}; + } + + // TODO(bugs.webrtc.org/11607): Add support for screensharing. + std::vector spatial_layers = + GetSvcConfig(codec.width, codec.height, codec.maxFramerate, + /*first_active_layer=*/0, info->num_spatial_layers, + info->num_temporal_layers, /*is_screen_sharing=*/false, + codec.GetScalabilityMode() ? info : absl::nullopt); + RTC_DCHECK(!spatial_layers.empty()); + + // Use codec bitrate limits if spatial layering is not requested. + if (info->num_spatial_layers == 1) { + spatial_layers.back().minBitrate = codec.minBitrate; + spatial_layers.back().targetBitrate = codec.maxBitrate; + spatial_layers.back().maxBitrate = codec.maxBitrate; + } + + return spatial_layers; +} + +std::vector GetSvcConfig( + size_t input_width, + size_t input_height, + float max_framerate_fps, + size_t first_active_layer, + size_t num_spatial_layers, + size_t num_temporal_layers, + bool is_screen_sharing, + absl::optional config) { RTC_DCHECK_GT(input_width, 0); RTC_DCHECK_GT(input_height, 0); RTC_DCHECK_GT(num_spatial_layers, 0); @@ -158,7 +233,7 @@ std::vector GetSvcConfig(size_t input_width, } else { return ConfigureSvcNormalVideo(input_width, input_height, max_framerate_fps, first_active_layer, num_spatial_layers, - num_temporal_layers); + num_temporal_layers, config); } } diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/svc_config.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/svc_config.h index f6b562e189..adeaf0f161 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/svc_config.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/svc_config.h @@ -15,16 +15,24 @@ #include #include "api/video_codecs/spatial_layer.h" +#include "api/video_codecs/video_codec.h" +#include "modules/video_coding/svc/scalable_video_controller.h" namespace webrtc { -std::vector GetSvcConfig(size_t input_width, - size_t input_height, - float max_framerate_fps, - size_t first_active_layer, - size_t num_spatial_layers, - size_t num_temporal_layers, - bool is_screen_sharing); +// Uses scalability mode to configure spatial layers. +std::vector GetVp9SvcConfig(VideoCodec& video_codec); + +std::vector GetSvcConfig( + size_t input_width, + size_t input_height, + float max_framerate_fps, + size_t first_active_layer, + size_t num_spatial_layers, + size_t num_temporal_layers, + bool is_screen_sharing, + absl::optional config = + absl::nullopt); } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/vp9.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/vp9.cc index 3a07dcd889..886a3e2e31 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/vp9.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/vp9.cc @@ -12,26 +12,22 @@ #include +#include "absl/container/inlined_vector.h" #include "api/transport/field_trial_based_config.h" +#include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/vp9_profile.h" #include "modules/video_coding/codecs/vp9/libvpx_vp9_decoder.h" #include "modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h" +#include "modules/video_coding/svc/create_scalability_structure.h" #include "rtc_base/checks.h" #include #include #include namespace webrtc { -namespace { -constexpr absl::string_view kSupportedScalabilityModes[] = { - "L1T2", "L1T3", "L2T1", "L2T2", "L2T3", "L3T1", - "L3T2", "L3T3", "L1T2h", "L1T3h", "L2T1h", "L2T2h", - "L2T3h", "L3T1h", "L3T2h", "L3T3h", "L2T2_KEY", "L2T3_KEY", - "L3T1_KEY", "L3T2_KEY", "L3T3_KEY"}; -} // namespace -std::vector SupportedVP9Codecs() { +std::vector SupportedVP9Codecs(bool add_scalability_modes) { #ifdef RTC_ENABLE_VP9 // Profile 2 might not be available on some platforms until // https://bugs.chromium.org/p/webm/issues/detail?id=1544 is solved. @@ -41,13 +37,23 @@ std::vector SupportedVP9Codecs() { (vpx_codec_get_caps(vpx_codec_vp9_dx()) & VPX_CODEC_CAP_HIGHBITDEPTH) != 0; + absl::InlinedVector scalability_modes; + if (add_scalability_modes) { + for (const auto scalability_mode : kAllScalabilityModes) { + if (ScalabilityStructureConfig(scalability_mode).has_value()) { + scalability_modes.push_back(scalability_mode); + } + } + } std::vector supported_formats{SdpVideoFormat( cricket::kVp9CodecName, - {{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}})}; + {{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}}, + scalability_modes)}; if (vpx_supports_high_bit_depth) { supported_formats.push_back(SdpVideoFormat( cricket::kVp9CodecName, - {{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile2)}})); + {{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile2)}}, + scalability_modes)); } return supported_formats; @@ -59,12 +65,15 @@ std::vector SupportedVP9Codecs() { std::vector SupportedVP9DecoderCodecs() { #ifdef RTC_ENABLE_VP9 std::vector supported_formats = SupportedVP9Codecs(); - // The WebRTC internal decoder supports VP9 profile 1. However, there's - // currently no way of sending VP9 profile 1 using the internal encoder. + // The WebRTC internal decoder supports VP9 profile 1 and 3. However, there's + // currently no way of sending VP9 profile 1 or 3 using the internal encoder. // It would require extended support for I444, I422, and I440 buffers. supported_formats.push_back(SdpVideoFormat( cricket::kVp9CodecName, {{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile1)}})); + supported_formats.push_back(SdpVideoFormat( + cricket::kVp9CodecName, + {{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile3)}})); return supported_formats; #else return std::vector(); @@ -93,13 +102,8 @@ std::unique_ptr VP9Encoder::Create( #endif } -bool VP9Encoder::SupportsScalabilityMode(absl::string_view scalability_mode) { - for (const auto& entry : kSupportedScalabilityModes) { - if (entry == scalability_mode) { - return true; - } - } - return false; +bool VP9Encoder::SupportsScalabilityMode(ScalabilityMode scalability_mode) { + return ScalabilityStructureConfig(scalability_mode).has_value(); } std::unique_ptr VP9Decoder::Create() { diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/decoder_database.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/decoder_database.cc index 01120dc669..3410edc624 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/decoder_database.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/decoder_database.cc @@ -10,45 +10,56 @@ #include "modules/video_coding/decoder_database.h" +#include +#include + #include "rtc_base/checks.h" #include "rtc_base/logging.h" namespace webrtc { -bool VCMDecoderDataBase::DeregisterExternalDecoder(uint8_t payload_type) { +VCMDecoderDatabase::VCMDecoderDatabase() { + decoder_sequence_checker_.Detach(); +} + +void VCMDecoderDatabase::DeregisterExternalDecoder(uint8_t payload_type) { + RTC_DCHECK_RUN_ON(&decoder_sequence_checker_); auto it = decoders_.find(payload_type); if (it == decoders_.end()) { - // Not found. - return false; + return; } + // We can't use payload_type to check if the decoder is currently in use, // because payload type may be out of date (e.g. before we decode the first // frame after RegisterReceiveCodec). - if (current_decoder_ && current_decoder_->IsSameDecoder(it->second)) { + if (current_decoder_ && current_decoder_->IsSameDecoder(it->second.get())) { // Release it if it was registered and in use. current_decoder_ = absl::nullopt; } decoders_.erase(it); - return true; } // Add the external decoder object to the list of external decoders. // Won't be registered as a receive codec until RegisterReceiveCodec is called. -void VCMDecoderDataBase::RegisterExternalDecoder( +void VCMDecoderDatabase::RegisterExternalDecoder( uint8_t payload_type, - VideoDecoder* external_decoder) { + std::unique_ptr external_decoder) { + RTC_DCHECK_RUN_ON(&decoder_sequence_checker_); // If payload value already exists, erase old and insert new. DeregisterExternalDecoder(payload_type); - decoders_[payload_type] = external_decoder; + if (external_decoder) { + decoders_.emplace( + std::make_pair(payload_type, std::move(external_decoder))); + } } -bool VCMDecoderDataBase::IsExternalDecoderRegistered( +bool VCMDecoderDatabase::IsExternalDecoderRegistered( uint8_t payload_type) const { - return payload_type == current_payload_type_ || - decoders_.find(payload_type) != decoders_.end(); + RTC_DCHECK_RUN_ON(&decoder_sequence_checker_); + return decoders_.find(payload_type) != decoders_.end(); } -void VCMDecoderDataBase::RegisterReceiveCodec( +void VCMDecoderDatabase::RegisterReceiveCodec( uint8_t payload_type, const VideoDecoder::Settings& settings) { // If payload value already exists, erase old and insert new. @@ -58,7 +69,7 @@ void VCMDecoderDataBase::RegisterReceiveCodec( decoder_settings_[payload_type] = settings; } -bool VCMDecoderDataBase::DeregisterReceiveCodec(uint8_t payload_type) { +bool VCMDecoderDatabase::DeregisterReceiveCodec(uint8_t payload_type) { if (decoder_settings_.erase(payload_type) == 0) { return false; } @@ -69,9 +80,15 @@ bool VCMDecoderDataBase::DeregisterReceiveCodec(uint8_t payload_type) { return true; } -VCMGenericDecoder* VCMDecoderDataBase::GetDecoder( +void VCMDecoderDatabase::DeregisterReceiveCodecs() { + current_payload_type_ = absl::nullopt; + decoder_settings_.clear(); +} + +VCMGenericDecoder* VCMDecoderDatabase::GetDecoder( const VCMEncodedFrame& frame, VCMDecodedFrameCallback* decoded_frame_callback) { + RTC_DCHECK_RUN_ON(&decoder_sequence_checker_); RTC_DCHECK(decoded_frame_callback->UserReceiveCallback()); uint8_t payload_type = frame.PayloadType(); if (payload_type == current_payload_type_ || payload_type == 0) { @@ -100,10 +117,10 @@ VCMGenericDecoder* VCMDecoderDataBase::GetDecoder( return &*current_decoder_; } -void VCMDecoderDataBase::CreateAndInitDecoder(const VCMEncodedFrame& frame) { +void VCMDecoderDatabase::CreateAndInitDecoder(const VCMEncodedFrame& frame) { uint8_t payload_type = frame.PayloadType(); - RTC_LOG(LS_INFO) << "Initializing decoder with payload type '" - << int{payload_type} << "'."; + RTC_DLOG(LS_INFO) << "Initializing decoder with payload type '" + << int{payload_type} << "'."; auto decoder_item = decoder_settings_.find(payload_type); if (decoder_item == decoder_settings_.end()) { RTC_LOG(LS_ERROR) << "Can't find a decoder associated with payload type: " @@ -115,7 +132,7 @@ void VCMDecoderDataBase::CreateAndInitDecoder(const VCMEncodedFrame& frame) { RTC_LOG(LS_ERROR) << "No decoder of this type exists."; return; } - current_decoder_.emplace(external_dec_item->second); + current_decoder_.emplace(external_dec_item->second.get()); // Copy over input resolutions to prevent codec reinitialization due to // the first frame being of a different resolution than the database values. diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/decoder_database.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/decoder_database.h index 98deb1801f..98f4335621 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/decoder_database.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/decoder_database.h @@ -14,29 +14,34 @@ #include #include +#include #include "absl/types/optional.h" +#include "api/sequence_checker.h" #include "api/video_codecs/video_decoder.h" #include "modules/video_coding/encoded_frame.h" #include "modules/video_coding/generic_decoder.h" namespace webrtc { -class VCMDecoderDataBase { +class VCMDecoderDatabase { public: - VCMDecoderDataBase() = default; - VCMDecoderDataBase(const VCMDecoderDataBase&) = delete; - VCMDecoderDataBase& operator=(const VCMDecoderDataBase&) = delete; - ~VCMDecoderDataBase() = default; + VCMDecoderDatabase(); + VCMDecoderDatabase(const VCMDecoderDatabase&) = delete; + VCMDecoderDatabase& operator=(const VCMDecoderDatabase&) = delete; + ~VCMDecoderDatabase() = default; - bool DeregisterExternalDecoder(uint8_t payload_type); + // Returns a pointer to the previously registered decoder or nullptr if none + // was registered for the `payload_type`. + void DeregisterExternalDecoder(uint8_t payload_type); void RegisterExternalDecoder(uint8_t payload_type, - VideoDecoder* external_decoder); + std::unique_ptr external_decoder); bool IsExternalDecoderRegistered(uint8_t payload_type) const; void RegisterReceiveCodec(uint8_t payload_type, const VideoDecoder::Settings& settings); bool DeregisterReceiveCodec(uint8_t payload_type); + void DeregisterReceiveCodecs(); // Returns a decoder specified by frame.PayloadType. The decoded frame // callback of the decoder is set to `decoded_frame_callback`. If no such @@ -48,14 +53,19 @@ class VCMDecoderDataBase { VCMDecodedFrameCallback* decoded_frame_callback); private: - void CreateAndInitDecoder(const VCMEncodedFrame& frame); + void CreateAndInitDecoder(const VCMEncodedFrame& frame) + RTC_RUN_ON(decoder_sequence_checker_); + + SequenceChecker decoder_sequence_checker_; absl::optional current_payload_type_; - absl::optional current_decoder_; + absl::optional current_decoder_ + RTC_GUARDED_BY(decoder_sequence_checker_); // Initialization paramaters for decoders keyed by payload type. std::map decoder_settings_; // Decoders keyed by payload type. - std::map decoders_; + std::map> decoders_ + RTC_GUARDED_BY(decoder_sequence_checker_); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/deprecated/nack_module.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/deprecated/nack_module.cc deleted file mode 100644 index 334eb821a2..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/deprecated/nack_module.cc +++ /dev/null @@ -1,352 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/video_coding/deprecated/nack_module.h" - -#include -#include - -#include "api/units/timestamp.h" -#include "modules/utility/include/process_thread.h" -#include "rtc_base/checks.h" -#include "rtc_base/experiments/field_trial_parser.h" -#include "rtc_base/logging.h" -#include "system_wrappers/include/field_trial.h" - -namespace webrtc { - -namespace { -const int kMaxPacketAge = 10000; -const int kMaxNackPackets = 1000; -const int kDefaultRttMs = 100; -const int kMaxNackRetries = 10; -const int kProcessFrequency = 50; -const int kProcessIntervalMs = 1000 / kProcessFrequency; -const int kMaxReorderedPackets = 128; -const int kNumReorderingBuckets = 10; -const int kDefaultSendNackDelayMs = 0; - -int64_t GetSendNackDelay() { - int64_t delay_ms = strtol( - webrtc::field_trial::FindFullName("WebRTC-SendNackDelayMs").c_str(), - nullptr, 10); - if (delay_ms > 0 && delay_ms <= 20) { - RTC_LOG(LS_INFO) << "SendNackDelay is set to " << delay_ms; - return delay_ms; - } - return kDefaultSendNackDelayMs; -} -} // namespace - -DEPRECATED_NackModule::NackInfo::NackInfo() - : seq_num(0), send_at_seq_num(0), sent_at_time(-1), retries(0) {} - -DEPRECATED_NackModule::NackInfo::NackInfo(uint16_t seq_num, - uint16_t send_at_seq_num, - int64_t created_at_time) - : seq_num(seq_num), - send_at_seq_num(send_at_seq_num), - created_at_time(created_at_time), - sent_at_time(-1), - retries(0) {} - -DEPRECATED_NackModule::BackoffSettings::BackoffSettings(TimeDelta min_retry, - TimeDelta max_rtt, - double base) - : min_retry_interval(min_retry), max_rtt(max_rtt), base(base) {} - -absl::optional -DEPRECATED_NackModule::BackoffSettings::ParseFromFieldTrials() { - // Matches magic number in RTPSender::OnReceivedNack(). - const TimeDelta kDefaultMinRetryInterval = TimeDelta::Millis(5); - // Upper bound on link-delay considered for exponential backoff. - // Selected so that cumulative delay with 1.25 base and 10 retries ends up - // below 3s, since above that there will be a FIR generated instead. - const TimeDelta kDefaultMaxRtt = TimeDelta::Millis(160); - // Default base for exponential backoff, adds 25% RTT delay for each retry. - const double kDefaultBase = 1.25; - - FieldTrialParameter enabled("enabled", false); - FieldTrialParameter min_retry("min_retry", - kDefaultMinRetryInterval); - FieldTrialParameter max_rtt("max_rtt", kDefaultMaxRtt); - FieldTrialParameter base("base", kDefaultBase); - ParseFieldTrial({&enabled, &min_retry, &max_rtt, &base}, - field_trial::FindFullName("WebRTC-ExponentialNackBackoff")); - - if (enabled) { - return DEPRECATED_NackModule::BackoffSettings(min_retry.Get(), - max_rtt.Get(), base.Get()); - } - return absl::nullopt; -} - -DEPRECATED_NackModule::DEPRECATED_NackModule( - Clock* clock, - NackSender* nack_sender, - KeyFrameRequestSender* keyframe_request_sender) - : clock_(clock), - nack_sender_(nack_sender), - keyframe_request_sender_(keyframe_request_sender), - reordering_histogram_(kNumReorderingBuckets, kMaxReorderedPackets), - initialized_(false), - rtt_ms_(kDefaultRttMs), - newest_seq_num_(0), - next_process_time_ms_(-1), - send_nack_delay_ms_(GetSendNackDelay()), - backoff_settings_(BackoffSettings::ParseFromFieldTrials()) { - RTC_DCHECK(clock_); - RTC_DCHECK(nack_sender_); - RTC_DCHECK(keyframe_request_sender_); -} - -int DEPRECATED_NackModule::OnReceivedPacket(uint16_t seq_num, - bool is_keyframe) { - return OnReceivedPacket(seq_num, is_keyframe, false); -} - -int DEPRECATED_NackModule::OnReceivedPacket(uint16_t seq_num, - bool is_keyframe, - bool is_recovered) { - MutexLock lock(&mutex_); - // TODO(philipel): When the packet includes information whether it is - // retransmitted or not, use that value instead. For - // now set it to true, which will cause the reordering - // statistics to never be updated. - bool is_retransmitted = true; - - if (!initialized_) { - newest_seq_num_ = seq_num; - if (is_keyframe) - keyframe_list_.insert(seq_num); - initialized_ = true; - return 0; - } - - // Since the `newest_seq_num_` is a packet we have actually received we know - // that packet has never been Nacked. - if (seq_num == newest_seq_num_) - return 0; - - if (AheadOf(newest_seq_num_, seq_num)) { - // An out of order packet has been received. - auto nack_list_it = nack_list_.find(seq_num); - int nacks_sent_for_packet = 0; - if (nack_list_it != nack_list_.end()) { - nacks_sent_for_packet = nack_list_it->second.retries; - nack_list_.erase(nack_list_it); - } - if (!is_retransmitted) - UpdateReorderingStatistics(seq_num); - return nacks_sent_for_packet; - } - - // Keep track of new keyframes. - if (is_keyframe) - keyframe_list_.insert(seq_num); - - // And remove old ones so we don't accumulate keyframes. - auto it = keyframe_list_.lower_bound(seq_num - kMaxPacketAge); - if (it != keyframe_list_.begin()) - keyframe_list_.erase(keyframe_list_.begin(), it); - - if (is_recovered) { - recovered_list_.insert(seq_num); - - // Remove old ones so we don't accumulate recovered packets. - auto it = recovered_list_.lower_bound(seq_num - kMaxPacketAge); - if (it != recovered_list_.begin()) - recovered_list_.erase(recovered_list_.begin(), it); - - // Do not send nack for packets recovered by FEC or RTX. - return 0; - } - - AddPacketsToNack(newest_seq_num_ + 1, seq_num); - newest_seq_num_ = seq_num; - - // Are there any nacks that are waiting for this seq_num. - std::vector nack_batch = GetNackBatch(kSeqNumOnly); - if (!nack_batch.empty()) { - // This batch of NACKs is triggered externally; the initiator can - // batch them with other feedback messages. - nack_sender_->SendNack(nack_batch, /*buffering_allowed=*/true); - } - - return 0; -} - -void DEPRECATED_NackModule::ClearUpTo(uint16_t seq_num) { - MutexLock lock(&mutex_); - nack_list_.erase(nack_list_.begin(), nack_list_.lower_bound(seq_num)); - keyframe_list_.erase(keyframe_list_.begin(), - keyframe_list_.lower_bound(seq_num)); - recovered_list_.erase(recovered_list_.begin(), - recovered_list_.lower_bound(seq_num)); -} - -void DEPRECATED_NackModule::UpdateRtt(int64_t rtt_ms) { - MutexLock lock(&mutex_); - rtt_ms_ = rtt_ms; -} - -void DEPRECATED_NackModule::Clear() { - MutexLock lock(&mutex_); - nack_list_.clear(); - keyframe_list_.clear(); - recovered_list_.clear(); -} - -int64_t DEPRECATED_NackModule::TimeUntilNextProcess() { - return std::max(next_process_time_ms_ - clock_->TimeInMilliseconds(), - 0); -} - -void DEPRECATED_NackModule::Process() { - if (nack_sender_) { - std::vector nack_batch; - { - MutexLock lock(&mutex_); - nack_batch = GetNackBatch(kTimeOnly); - } - - if (!nack_batch.empty()) { - // This batch of NACKs is triggered externally; there is no external - // initiator who can batch them with other feedback messages. - nack_sender_->SendNack(nack_batch, /*buffering_allowed=*/false); - } - } - - // Update the next_process_time_ms_ in intervals to achieve - // the targeted frequency over time. Also add multiple intervals - // in case of a skip in time as to not make uneccessary - // calls to Process in order to catch up. - int64_t now_ms = clock_->TimeInMilliseconds(); - if (next_process_time_ms_ == -1) { - next_process_time_ms_ = now_ms + kProcessIntervalMs; - } else { - next_process_time_ms_ = next_process_time_ms_ + kProcessIntervalMs + - (now_ms - next_process_time_ms_) / - kProcessIntervalMs * kProcessIntervalMs; - } -} - -bool DEPRECATED_NackModule::RemovePacketsUntilKeyFrame() { - while (!keyframe_list_.empty()) { - auto it = nack_list_.lower_bound(*keyframe_list_.begin()); - - if (it != nack_list_.begin()) { - // We have found a keyframe that actually is newer than at least one - // packet in the nack list. - nack_list_.erase(nack_list_.begin(), it); - return true; - } - - // If this keyframe is so old it does not remove any packets from the list, - // remove it from the list of keyframes and try the next keyframe. - keyframe_list_.erase(keyframe_list_.begin()); - } - return false; -} - -void DEPRECATED_NackModule::AddPacketsToNack(uint16_t seq_num_start, - uint16_t seq_num_end) { - // Remove old packets. - auto it = nack_list_.lower_bound(seq_num_end - kMaxPacketAge); - nack_list_.erase(nack_list_.begin(), it); - - // If the nack list is too large, remove packets from the nack list until - // the latest first packet of a keyframe. If the list is still too large, - // clear it and request a keyframe. - uint16_t num_new_nacks = ForwardDiff(seq_num_start, seq_num_end); - if (nack_list_.size() + num_new_nacks > kMaxNackPackets) { - while (RemovePacketsUntilKeyFrame() && - nack_list_.size() + num_new_nacks > kMaxNackPackets) { - } - - if (nack_list_.size() + num_new_nacks > kMaxNackPackets) { - nack_list_.clear(); - RTC_LOG(LS_WARNING) << "NACK list full, clearing NACK" - " list and requesting keyframe."; - keyframe_request_sender_->RequestKeyFrame(); - return; - } - } - - for (uint16_t seq_num = seq_num_start; seq_num != seq_num_end; ++seq_num) { - // Do not send nack for packets that are already recovered by FEC or RTX - if (recovered_list_.find(seq_num) != recovered_list_.end()) - continue; - NackInfo nack_info(seq_num, seq_num + WaitNumberOfPackets(0.5), - clock_->TimeInMilliseconds()); - RTC_DCHECK(nack_list_.find(seq_num) == nack_list_.end()); - nack_list_[seq_num] = nack_info; - } -} - -std::vector DEPRECATED_NackModule::GetNackBatch( - NackFilterOptions options) { - bool consider_seq_num = options != kTimeOnly; - bool consider_timestamp = options != kSeqNumOnly; - Timestamp now = clock_->CurrentTime(); - std::vector nack_batch; - auto it = nack_list_.begin(); - while (it != nack_list_.end()) { - TimeDelta resend_delay = TimeDelta::Millis(rtt_ms_); - if (backoff_settings_) { - resend_delay = - std::max(resend_delay, backoff_settings_->min_retry_interval); - if (it->second.retries > 1) { - TimeDelta exponential_backoff = - std::min(TimeDelta::Millis(rtt_ms_), backoff_settings_->max_rtt) * - std::pow(backoff_settings_->base, it->second.retries - 1); - resend_delay = std::max(resend_delay, exponential_backoff); - } - } - - bool delay_timed_out = - now.ms() - it->second.created_at_time >= send_nack_delay_ms_; - bool nack_on_rtt_passed = - now.ms() - it->second.sent_at_time >= resend_delay.ms(); - bool nack_on_seq_num_passed = - it->second.sent_at_time == -1 && - AheadOrAt(newest_seq_num_, it->second.send_at_seq_num); - if (delay_timed_out && ((consider_seq_num && nack_on_seq_num_passed) || - (consider_timestamp && nack_on_rtt_passed))) { - nack_batch.emplace_back(it->second.seq_num); - ++it->second.retries; - it->second.sent_at_time = now.ms(); - if (it->second.retries >= kMaxNackRetries) { - RTC_LOG(LS_WARNING) << "Sequence number " << it->second.seq_num - << " removed from NACK list due to max retries."; - it = nack_list_.erase(it); - } else { - ++it; - } - continue; - } - ++it; - } - return nack_batch; -} - -void DEPRECATED_NackModule::UpdateReorderingStatistics(uint16_t seq_num) { - RTC_DCHECK(AheadOf(newest_seq_num_, seq_num)); - uint16_t diff = ReverseDiff(newest_seq_num_, seq_num); - reordering_histogram_.Add(diff); -} - -int DEPRECATED_NackModule::WaitNumberOfPackets(float probability) const { - if (reordering_histogram_.NumValues() == 0) - return 0; - return reordering_histogram_.InverseCdf(probability); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/deprecated/nack_module.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/deprecated/nack_module.h deleted file mode 100644 index ec1a6889bc..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/deprecated/nack_module.h +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_VIDEO_CODING_DEPRECATED_NACK_MODULE_H_ -#define MODULES_VIDEO_CODING_DEPRECATED_NACK_MODULE_H_ - -#include - -#include -#include -#include - -#include "absl/base/attributes.h" -#include "api/units/time_delta.h" -#include "modules/include/module.h" -#include "modules/include/module_common_types.h" -#include "modules/video_coding/histogram.h" -#include "rtc_base/numerics/sequence_number_util.h" -#include "rtc_base/synchronization/mutex.h" -#include "rtc_base/thread_annotations.h" -#include "system_wrappers/include/clock.h" - -namespace webrtc { - -class DEPRECATED_NackModule : public Module { - public: - DEPRECATED_NackModule(Clock* clock, - NackSender* nack_sender, - KeyFrameRequestSender* keyframe_request_sender); - - int OnReceivedPacket(uint16_t seq_num, bool is_keyframe); - int OnReceivedPacket(uint16_t seq_num, bool is_keyframe, bool is_recovered); - - void ClearUpTo(uint16_t seq_num); - void UpdateRtt(int64_t rtt_ms); - void Clear(); - - // Module implementation - int64_t TimeUntilNextProcess() override; - void Process() override; - - private: - // Which fields to consider when deciding which packet to nack in - // GetNackBatch. - enum NackFilterOptions { kSeqNumOnly, kTimeOnly, kSeqNumAndTime }; - - // This class holds the sequence number of the packet that is in the nack list - // as well as the meta data about when it should be nacked and how many times - // we have tried to nack this packet. - struct NackInfo { - NackInfo(); - NackInfo(uint16_t seq_num, - uint16_t send_at_seq_num, - int64_t created_at_time); - - uint16_t seq_num; - uint16_t send_at_seq_num; - int64_t created_at_time; - int64_t sent_at_time; - int retries; - }; - - struct BackoffSettings { - BackoffSettings(TimeDelta min_retry, TimeDelta max_rtt, double base); - static absl::optional ParseFromFieldTrials(); - - // Min time between nacks. - const TimeDelta min_retry_interval; - // Upper bound on link-delay considered for exponential backoff. - const TimeDelta max_rtt; - // Base for the exponential backoff. - const double base; - }; - - void AddPacketsToNack(uint16_t seq_num_start, uint16_t seq_num_end) - RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - - // Removes packets from the nack list until the next keyframe. Returns true - // if packets were removed. - bool RemovePacketsUntilKeyFrame() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - std::vector GetNackBatch(NackFilterOptions options) - RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - - // Update the reordering distribution. - void UpdateReorderingStatistics(uint16_t seq_num) - RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - - // Returns how many packets we have to wait in order to receive the packet - // with probability `probabilty` or higher. - int WaitNumberOfPackets(float probability) const - RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - - Mutex mutex_; - Clock* const clock_; - NackSender* const nack_sender_; - KeyFrameRequestSender* const keyframe_request_sender_; - - // TODO(philipel): Some of the variables below are consistently used on a - // known thread (e.g. see `initialized_`). Those probably do not need - // synchronized access. - std::map> nack_list_ - RTC_GUARDED_BY(mutex_); - std::set> keyframe_list_ - RTC_GUARDED_BY(mutex_); - std::set> recovered_list_ - RTC_GUARDED_BY(mutex_); - video_coding::Histogram reordering_histogram_ RTC_GUARDED_BY(mutex_); - bool initialized_ RTC_GUARDED_BY(mutex_); - int64_t rtt_ms_ RTC_GUARDED_BY(mutex_); - uint16_t newest_seq_num_ RTC_GUARDED_BY(mutex_); - - // Only touched on the process thread. - int64_t next_process_time_ms_; - - // Adds a delay before send nack on packet received. - const int64_t send_nack_delay_ms_; - - const absl::optional backoff_settings_; -}; - -using NackModule ABSL_DEPRECATED("") = DEPRECATED_NackModule; - -} // namespace webrtc - -#endif // MODULES_VIDEO_CODING_DEPRECATED_NACK_MODULE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/event_wrapper.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/event_wrapper.cc index e6a4752401..748c92f637 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/event_wrapper.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/event_wrapper.cc @@ -23,8 +23,10 @@ class EventWrapperImpl : public EventWrapper { return true; } + // TODO(bugs.webrtc.org/14366): Migrate to TimeDelta. EventTypeWrapper Wait(int max_time_ms) override { - return event_.Wait(max_time_ms) ? kEventSignaled : kEventTimeout; + return event_.Wait(TimeDelta::Millis(max_time_ms)) ? kEventSignaled + : kEventTimeout; } private: diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/event_wrapper.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/event_wrapper.h index c1f160c7f7..c5e5661282 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/event_wrapper.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/event_wrapper.h @@ -38,6 +38,7 @@ class EventWrapper { // Depending on timing. // // `max_time_ms` is the maximum time to wait in milliseconds. + // TODO(bugs.webrtc.org/14366): Migrate to TimeDelta. virtual EventTypeWrapper Wait(int max_time_ms) = 0; }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/fec_controller_default.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/fec_controller_default.h index 6b9e8eb8e5..a97dea011b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/fec_controller_default.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/fec_controller_default.h @@ -19,7 +19,6 @@ #include "api/fec_controller.h" #include "modules/video_coding/media_opt_util.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" #include "system_wrappers/include/clock.h" @@ -32,6 +31,10 @@ class FecControllerDefault : public FecController { VCMProtectionCallback* protection_callback); explicit FecControllerDefault(Clock* clock); ~FecControllerDefault() override; + + FecControllerDefault(const FecControllerDefault&) = delete; + FecControllerDefault& operator=(const FecControllerDefault&) = delete; + void SetProtectionCallback( VCMProtectionCallback* protection_callback) override; void SetProtectionMethod(bool enable_fec, bool enable_nack) override; @@ -44,9 +47,8 @@ class FecControllerDefault : public FecController { uint8_t fraction_lost, std::vector loss_mask_vector, int64_t round_trip_time_ms) override; - void UpdateWithEncodedData( - const size_t encoded_image_length, - const VideoFrameType encoded_image_frametype) override; + void UpdateWithEncodedData(size_t encoded_image_length, + VideoFrameType encoded_image_frametype) override; bool UseLossVectorMask() override; float GetProtectionOverheadRateThreshold(); @@ -58,7 +60,7 @@ class FecControllerDefault : public FecController { std::unique_ptr loss_prot_logic_ RTC_GUARDED_BY(mutex_); size_t max_payload_size_ RTC_GUARDED_BY(mutex_); - RTC_DISALLOW_COPY_AND_ASSIGN(FecControllerDefault); + const float overhead_threshold_; }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer2.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer2.cc index ba46327a7b..b289663eec 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer2.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer2.cc @@ -18,18 +18,21 @@ #include #include +#include "absl/container/inlined_vector.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" #include "api/video/encoded_image.h" #include "api/video/video_timing.h" +#include "modules/video_coding/frame_helpers.h" #include "modules/video_coding/include/video_coding_defines.h" -#include "modules/video_coding/jitter_estimator.h" -#include "modules/video_coding/timing.h" +#include "modules/video_coding/timing/jitter_estimator.h" +#include "modules/video_coding/timing/timing.h" #include "rtc_base/checks.h" #include "rtc_base/experiments/rtt_mult_experiment.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/sequence_number_util.h" #include "rtc_base/trace_event.h" #include "system_wrappers/include/clock.h" -#include "system_wrappers/include/field_trial.h" namespace webrtc { namespace video_coding { @@ -54,23 +57,21 @@ constexpr int64_t kLogNonDecodedIntervalMs = 5000; FrameBuffer::FrameBuffer(Clock* clock, VCMTiming* timing, - VCMReceiveStatisticsCallback* stats_callback) + const FieldTrialsView& field_trials) : decoded_frames_history_(kMaxFramesHistory), clock_(clock), callback_queue_(nullptr), - jitter_estimator_(clock), + jitter_estimator_(clock, field_trials), timing_(timing), - inter_frame_delay_(clock_->TimeInMilliseconds()), stopped_(false), protection_mode_(kProtectionNack), - stats_callback_(stats_callback), last_log_non_decoded_ms_(-kLogNonDecodedIntervalMs), rtt_mult_settings_(RttMultExperiment::GetRttMultValue()), zero_playout_delay_max_decode_queue_size_( "max_decode_queue_size", kZeroPlayoutDelayDefaultMaxDecodeQueueSize) { ParseFieldTrial({&zero_playout_delay_max_decode_queue_size_}, - field_trial::FindFullName("WebRTC-ZeroPlayoutDelay")); + field_trials.Lookup("WebRTC-ZeroPlayoutDelay")); callback_checker_.Detach(); } @@ -80,7 +81,7 @@ FrameBuffer::~FrameBuffer() { void FrameBuffer::NextFrame(int64_t max_wait_time_ms, bool keyframe_required, - rtc::TaskQueue* callback_queue, + TaskQueueBase* callback_queue, NextFrameCallback handler) { RTC_DCHECK_RUN_ON(&callback_checker_); RTC_DCHECK(callback_queue->IsCurrent()); @@ -102,9 +103,10 @@ void FrameBuffer::NextFrame(int64_t max_wait_time_ms, void FrameBuffer::StartWaitForNextFrameOnQueue() { RTC_DCHECK(callback_queue_); RTC_DCHECK(!callback_task_.Running()); - int64_t wait_ms = FindNextFrame(clock_->TimeInMilliseconds()); + int64_t wait_ms = FindNextFrame(clock_->CurrentTime()); callback_task_ = RepeatingTaskHandle::DelayedStart( - callback_queue_->Get(), TimeDelta::Millis(wait_ms), [this] { + callback_queue_, TimeDelta::Millis(wait_ms), + [this] { RTC_DCHECK_RUN_ON(&callback_checker_); // If this task has not been cancelled, we did not get any new frames // while waiting. Continue with frame delivery. @@ -115,13 +117,12 @@ void FrameBuffer::StartWaitForNextFrameOnQueue() { if (!frames_to_decode_.empty()) { // We have frames, deliver! frame = GetNextFrame(); - timing_->SetLastDecodeScheduledTimestamp( - clock_->TimeInMilliseconds()); + timing_->SetLastDecodeScheduledTimestamp(clock_->CurrentTime()); } else if (clock_->TimeInMilliseconds() < latest_return_time_ms_) { // If there's no frames to decode and there is still time left, it // means that the frame buffer was cleared between creation and // execution of this task. Continue waiting for the remaining time. - int64_t wait_ms = FindNextFrame(clock_->TimeInMilliseconds()); + int64_t wait_ms = FindNextFrame(clock_->CurrentTime()); return TimeDelta::Millis(wait_ms); } frame_handler = std::move(frame_handler_); @@ -130,11 +131,12 @@ void FrameBuffer::StartWaitForNextFrameOnQueue() { // Deliver frame, if any. Otherwise signal timeout. frame_handler(std::move(frame)); return TimeDelta::Zero(); // Ignored. - }); + }, + TaskQueueBase::DelayPrecision::kHigh); } -int64_t FrameBuffer::FindNextFrame(int64_t now_ms) { - int64_t wait_ms = latest_return_time_ms_ - now_ms; +int64_t FrameBuffer::FindNextFrame(Timestamp now) { + int64_t wait_ms = latest_return_time_ms_ - now.ms(); frames_to_decode_.clear(); // `last_continuous_frame_` may be empty below, but nullopt is smaller @@ -213,14 +215,16 @@ int64_t FrameBuffer::FindNextFrame(int64_t now_ms) { frames_to_decode_ = std::move(current_superframe); - if (frame->RenderTime() == -1) { - frame->SetRenderTime(timing_->RenderTimeMs(frame->Timestamp(), now_ms)); + absl::optional render_time = frame->RenderTimestamp(); + if (!render_time) { + render_time = timing_->RenderTime(frame->Timestamp(), now); + frame->SetRenderTime(render_time->ms()); } bool too_many_frames_queued = frames_.size() > zero_playout_delay_max_decode_queue_size_ ? true : false; - wait_ms = timing_->MaxWaitingTime(frame->RenderTime(), now_ms, - too_many_frames_queued); + wait_ms = + timing_->MaxWaitingTime(*render_time, now, too_many_frames_queued).ms(); // This will cause the frame buffer to prefer high framerate rather // than high resolution in the case of the decoder not decoding fast @@ -232,85 +236,75 @@ int64_t FrameBuffer::FindNextFrame(int64_t now_ms) { break; } - wait_ms = std::min(wait_ms, latest_return_time_ms_ - now_ms); + wait_ms = std::min(wait_ms, latest_return_time_ms_ - now.ms()); wait_ms = std::max(wait_ms, 0); return wait_ms; } std::unique_ptr FrameBuffer::GetNextFrame() { RTC_DCHECK_RUN_ON(&callback_checker_); - int64_t now_ms = clock_->TimeInMilliseconds(); + Timestamp now = clock_->CurrentTime(); // TODO(ilnik): remove `frames_out` use frames_to_decode_ directly. std::vector> frames_out; RTC_DCHECK(!frames_to_decode_.empty()); bool superframe_delayed_by_retransmission = false; - size_t superframe_size = 0; + DataSize superframe_size = DataSize::Zero(); const EncodedFrame& first_frame = *frames_to_decode_[0]->second.frame; - int64_t render_time_ms = first_frame.RenderTime(); + absl::optional render_time = first_frame.RenderTimestamp(); int64_t receive_time_ms = first_frame.ReceivedTime(); // Gracefully handle bad RTP timestamps and render time issues. - if (HasBadRenderTiming(first_frame, now_ms)) { + if (!render_time || FrameHasBadRenderTiming(*render_time, now) || + TargetVideoDelayIsTooLarge(timing_->TargetVideoDelay())) { + RTC_LOG(LS_WARNING) << "Resetting jitter estimator and timing module due " + "to bad render timing for rtp_timestamp=" + << first_frame.Timestamp(); jitter_estimator_.Reset(); timing_->Reset(); - render_time_ms = timing_->RenderTimeMs(first_frame.Timestamp(), now_ms); + render_time = timing_->RenderTime(first_frame.Timestamp(), now); } for (FrameMap::iterator& frame_it : frames_to_decode_) { RTC_DCHECK(frame_it != frames_.end()); std::unique_ptr frame = std::move(frame_it->second.frame); - frame->SetRenderTime(render_time_ms); + frame->SetRenderTime(render_time->ms()); superframe_delayed_by_retransmission |= frame->delayed_by_retransmission(); receive_time_ms = std::max(receive_time_ms, frame->ReceivedTime()); - superframe_size += frame->size(); + superframe_size += DataSize::Bytes(frame->size()); PropagateDecodability(frame_it->second); decoded_frames_history_.InsertDecoded(frame_it->first, frame->Timestamp()); - // Remove decoded frame and all undecoded frames before it. - if (stats_callback_) { - unsigned int dropped_frames = - std::count_if(frames_.begin(), frame_it, - [](const std::pair& frame) { - return frame.second.frame != nullptr; - }); - if (dropped_frames > 0) { - stats_callback_->OnDroppedFrames(dropped_frames); - } - } - frames_.erase(frames_.begin(), ++frame_it); frames_out.emplace_back(std::move(frame)); } if (!superframe_delayed_by_retransmission) { - int64_t frame_delay; + auto frame_delay = inter_frame_delay_.CalculateDelay( + first_frame.Timestamp(), Timestamp::Millis(receive_time_ms)); - if (inter_frame_delay_.CalculateDelay(first_frame.Timestamp(), &frame_delay, - receive_time_ms)) { - jitter_estimator_.UpdateEstimate(frame_delay, superframe_size); + if (frame_delay) { + jitter_estimator_.UpdateEstimate(*frame_delay, superframe_size); } float rtt_mult = protection_mode_ == kProtectionNackFEC ? 0.0 : 1.0; - absl::optional rtt_mult_add_cap_ms = absl::nullopt; + absl::optional rtt_mult_add_cap_ms = absl::nullopt; if (rtt_mult_settings_.has_value()) { rtt_mult = rtt_mult_settings_->rtt_mult_setting; - rtt_mult_add_cap_ms = rtt_mult_settings_->rtt_mult_add_cap_ms; + rtt_mult_add_cap_ms = + TimeDelta::Millis(rtt_mult_settings_->rtt_mult_add_cap_ms); } timing_->SetJitterDelay( jitter_estimator_.GetJitterEstimate(rtt_mult, rtt_mult_add_cap_ms)); - timing_->UpdateCurrentDelay(render_time_ms, now_ms); + timing_->UpdateCurrentDelay(*render_time, now); } else { if (RttMultExperiment::RttMultEnabled()) jitter_estimator_.FrameNacked(); } - UpdateJitterDelay(); - UpdateTimingFrameInfo(); - if (frames_out.size() == 1) { return std::move(frames_out[0]); } else { @@ -318,35 +312,6 @@ std::unique_ptr FrameBuffer::GetNextFrame() { } } -bool FrameBuffer::HasBadRenderTiming(const EncodedFrame& frame, - int64_t now_ms) { - // Assume that render timing errors are due to changes in the video stream. - int64_t render_time_ms = frame.RenderTimeMs(); - // Zero render time means render immediately. - if (render_time_ms == 0) { - return false; - } - if (render_time_ms < 0) { - return true; - } - const int64_t kMaxVideoDelayMs = 10000; - if (std::abs(render_time_ms - now_ms) > kMaxVideoDelayMs) { - int frame_delay = static_cast(std::abs(render_time_ms - now_ms)); - RTC_LOG(LS_WARNING) - << "A frame about to be decoded is out of the configured " - "delay bounds (" - << frame_delay << " > " << kMaxVideoDelayMs - << "). Resetting the video jitter buffer."; - return true; - } - if (static_cast(timing_->TargetVideoDelay()) > kMaxVideoDelayMs) { - RTC_LOG(LS_WARNING) << "The video target delay has grown larger than " - << kMaxVideoDelayMs << " ms."; - return true; - } - return false; -} - void FrameBuffer::SetProtectionMode(VCMVideoProtection mode) { TRACE_EVENT0("webrtc", "FrameBuffer::SetProtectionMode"); MutexLock lock(&mutex_); @@ -375,7 +340,7 @@ int FrameBuffer::Size() { void FrameBuffer::UpdateRtt(int64_t rtt_ms) { MutexLock lock(&mutex_); - jitter_estimator_.UpdateRtt(rtt_ms); + jitter_estimator_.UpdateRtt(TimeDelta::Millis(rtt_ms)); } bool FrameBuffer::ValidReferences(const EncodedFrame& frame) const { @@ -470,16 +435,13 @@ int64_t FrameBuffer::InsertFrame(std::unique_ptr frame) { if (!UpdateFrameInfoWithIncomingFrame(*frame, info)) return last_continuous_frame_id; - if (!frame->delayed_by_retransmission()) - timing_->IncomingTimestamp(frame->Timestamp(), frame->ReceivedTime()); + // If ReceiveTime is negative then it is not a valid timestamp. + if (!frame->delayed_by_retransmission() && frame->ReceivedTime() >= 0) + timing_->IncomingTimestamp(frame->Timestamp(), + Timestamp::Millis(frame->ReceivedTime())); // It can happen that a frame will be reported as fully received even if a // lower spatial layer frame is missing. - if (stats_callback_ && frame->is_last_spatial_layer) { - stats_callback_->OnCompleteFrame(frame->is_keyframe(), frame->size(), - frame->contentType()); - } - info->second.frame = std::move(frame); if (info->second.num_missing_continuous == 0) { @@ -610,45 +572,8 @@ bool FrameBuffer::UpdateFrameInfoWithIncomingFrame(const EncodedFrame& frame, return true; } -void FrameBuffer::UpdateJitterDelay() { - TRACE_EVENT0("webrtc", "FrameBuffer::UpdateJitterDelay"); - if (!stats_callback_) - return; - - int max_decode_ms; - int current_delay_ms; - int target_delay_ms; - int jitter_buffer_ms; - int min_playout_delay_ms; - int render_delay_ms; - if (timing_->GetTimings(&max_decode_ms, ¤t_delay_ms, &target_delay_ms, - &jitter_buffer_ms, &min_playout_delay_ms, - &render_delay_ms)) { - stats_callback_->OnFrameBufferTimingsUpdated( - max_decode_ms, current_delay_ms, target_delay_ms, jitter_buffer_ms, - min_playout_delay_ms, render_delay_ms); - } -} - -void FrameBuffer::UpdateTimingFrameInfo() { - TRACE_EVENT0("webrtc", "FrameBuffer::UpdateTimingFrameInfo"); - absl::optional info = timing_->GetTimingFrameInfo(); - if (info && stats_callback_) - stats_callback_->OnTimingFrameInfoUpdated(*info); -} - void FrameBuffer::ClearFramesAndHistory() { TRACE_EVENT0("webrtc", "FrameBuffer::ClearFramesAndHistory"); - if (stats_callback_) { - unsigned int dropped_frames = - std::count_if(frames_.begin(), frames_.end(), - [](const std::pair& frame) { - return frame.second.frame != nullptr; - }); - if (dropped_frames > 0) { - stats_callback_->OnDroppedFrames(dropped_frames); - } - } frames_.clear(); last_continuous_frame_.reset(); frames_to_decode_.clear(); @@ -660,39 +585,11 @@ void FrameBuffer::ClearFramesAndHistory() { std::unique_ptr FrameBuffer::CombineAndDeleteFrames( std::vector> frames) const { RTC_DCHECK(!frames.empty()); - size_t total_length = 0; - for (const auto& frame : frames) { - total_length += frame->size(); - } - const EncodedFrame& last_frame = *frames.back(); - std::unique_ptr first_frame = std::move(frames[0]); - auto encoded_image_buffer = EncodedImageBuffer::Create(total_length); - uint8_t* buffer = encoded_image_buffer->data(); - first_frame->SetSpatialLayerFrameSize(first_frame->SpatialIndex().value_or(0), - first_frame->size()); - memcpy(buffer, first_frame->data(), first_frame->size()); - buffer += first_frame->size(); - - // Spatial index of combined frame is set equal to spatial index of its top - // spatial layer. - first_frame->SetSpatialIndex(last_frame.SpatialIndex().value_or(0)); - - first_frame->video_timing_mutable()->network2_timestamp_ms = - last_frame.video_timing().network2_timestamp_ms; - first_frame->video_timing_mutable()->receive_finish_ms = - last_frame.video_timing().receive_finish_ms; - - // Append all remaining frames to the first one. - for (size_t i = 1; i < frames.size(); ++i) { - // Let |next_frame| fall out of scope so it is deleted after copying. - std::unique_ptr next_frame = std::move(frames[i]); - first_frame->SetSpatialLayerFrameSize( - next_frame->SpatialIndex().value_or(0), next_frame->size()); - memcpy(buffer, next_frame->data(), next_frame->size()); - buffer += next_frame->size(); + absl::InlinedVector, 4> inlined; + for (auto& frame : frames) { + inlined.push_back(std::move(frame)); } - first_frame->SetEncodedData(encoded_image_buffer); - return first_frame; + return webrtc::CombineAndDeleteFrames(std::move(inlined)); } FrameBuffer::FrameInfo::FrameInfo() = default; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer2.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer2.h index 411c69cefd..1383c40ae3 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer2.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer2.h @@ -18,11 +18,13 @@ #include #include "absl/container/inlined_vector.h" +#include "api/field_trials_view.h" #include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" #include "api/video/encoded_frame.h" #include "modules/video_coding/include/video_coding_defines.h" -#include "modules/video_coding/inter_frame_delay.h" -#include "modules/video_coding/jitter_estimator.h" +#include "modules/video_coding/timing/inter_frame_delay.h" +#include "modules/video_coding/timing/jitter_estimator.h" #include "modules/video_coding/utility/decoded_frames_history.h" #include "rtc_base/event.h" #include "rtc_base/experiments/field_trial_parser.h" @@ -30,7 +32,6 @@ #include "rtc_base/numerics/sequence_number_util.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/system/no_unique_address.h" -#include "rtc_base/task_queue.h" #include "rtc_base/task_utils/repeating_task.h" #include "rtc_base/thread_annotations.h" @@ -38,7 +39,7 @@ namespace webrtc { class Clock; class VCMReceiveStatisticsCallback; -class VCMJitterEstimator; +class JitterEstimator; class VCMTiming; namespace video_coding { @@ -47,7 +48,7 @@ class FrameBuffer { public: FrameBuffer(Clock* clock, VCMTiming* timing, - VCMReceiveStatisticsCallback* stats_callback); + const FieldTrialsView& field_trials); FrameBuffer() = delete; FrameBuffer(const FrameBuffer&) = delete; @@ -64,7 +65,7 @@ class FrameBuffer { // or with nullptr if no frame is ready for decoding after `max_wait_time_ms`. void NextFrame(int64_t max_wait_time_ms, bool keyframe_required, - rtc::TaskQueue* callback_queue, + TaskQueueBase* callback_queue, NextFrameCallback handler); // Tells the FrameBuffer which protection mode that is in use. Affects @@ -118,7 +119,7 @@ class FrameBuffer { // Check that the references of `frame` are valid. bool ValidReferences(const EncodedFrame& frame) const; - int64_t FindNextFrame(int64_t now_ms) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + int64_t FindNextFrame(Timestamp now) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); std::unique_ptr GetNextFrame() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); @@ -141,15 +142,8 @@ class FrameBuffer { FrameMap::iterator info) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - void UpdateJitterDelay() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - - void UpdateTimingFrameInfo() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - void ClearFramesAndHistory() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - bool HasBadRenderTiming(const EncodedFrame& frame, int64_t now_ms) - RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - // The cleaner solution would be to have the NextFrame function return a // vector of frames, but until the decoding pipeline can support decoding // multiple frames at the same time we combine all frames to one frame and @@ -167,20 +161,19 @@ class FrameBuffer { Mutex mutex_; Clock* const clock_; - rtc::TaskQueue* callback_queue_ RTC_GUARDED_BY(mutex_); + TaskQueueBase* callback_queue_ RTC_GUARDED_BY(mutex_); RepeatingTaskHandle callback_task_ RTC_GUARDED_BY(mutex_); NextFrameCallback frame_handler_ RTC_GUARDED_BY(mutex_); int64_t latest_return_time_ms_ RTC_GUARDED_BY(mutex_); bool keyframe_required_ RTC_GUARDED_BY(mutex_); - VCMJitterEstimator jitter_estimator_ RTC_GUARDED_BY(mutex_); + JitterEstimator jitter_estimator_ RTC_GUARDED_BY(mutex_); VCMTiming* const timing_ RTC_GUARDED_BY(mutex_); - VCMInterFrameDelay inter_frame_delay_ RTC_GUARDED_BY(mutex_); + InterFrameDelay inter_frame_delay_ RTC_GUARDED_BY(mutex_); absl::optional last_continuous_frame_ RTC_GUARDED_BY(mutex_); std::vector frames_to_decode_ RTC_GUARDED_BY(mutex_); bool stopped_ RTC_GUARDED_BY(mutex_); VCMVideoProtection protection_mode_ RTC_GUARDED_BY(mutex_); - VCMReceiveStatisticsCallback* const stats_callback_; int64_t last_log_non_decoded_ms_ RTC_GUARDED_BY(mutex_); // rtt_mult experiment settings. diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer3.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer3.cc deleted file mode 100644 index 32de3683c7..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer3.cc +++ /dev/null @@ -1,277 +0,0 @@ -/* - * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/video_coding/frame_buffer3.h" - -#include -#include -#include -#include - -#include "absl/algorithm/container.h" -#include "absl/container/inlined_vector.h" -#include "rtc_base/logging.h" -#include "rtc_base/numerics/sequence_number_util.h" -#include "system_wrappers/include/field_trial.h" - -namespace webrtc { -namespace { -bool ValidReferences(const EncodedFrame& frame) { - // All references must point backwards, and duplicates are not allowed. - for (size_t i = 0; i < frame.num_references; ++i) { - if (frame.references[i] >= frame.Id()) - return false; - - for (size_t j = i + 1; j < frame.num_references; ++j) { - if (frame.references[i] == frame.references[j]) - return false; - } - } - - return true; -} - -// Since FrameBuffer::FrameInfo is private it can't be used in the function -// signature, hence the FrameIteratorT type. -template -rtc::ArrayView GetReferences(const FrameIteratorT& it) { - return {it->second.encoded_frame->references, - std::min(it->second.encoded_frame->num_references, - EncodedFrame::kMaxFrameReferences)}; -} - -template -int64_t GetFrameId(const FrameIteratorT& it) { - return it->first; -} - -template -int64_t GetTimestamp(const FrameIteratorT& it) { - return it->second.encoded_frame->Timestamp(); -} - -template -bool IsLastFrameInTemporalUnit(const FrameIteratorT& it) { - return it->second.encoded_frame->is_last_spatial_layer; -} -} // namespace - -FrameBuffer::FrameBuffer(int max_size, int max_decode_history) - : legacy_frame_id_jump_behavior_( - field_trial::IsEnabled("WebRTC-LegacyFrameIdJumpBehavior")), - max_size_(max_size), - decoded_frame_history_(max_decode_history) {} - -void FrameBuffer::InsertFrame(std::unique_ptr frame) { - if (!ValidReferences(*frame)) { - RTC_DLOG(LS_WARNING) << "Frame " << frame->Id() - << " has invalid references, dropping frame."; - return; - } - - if (frame->Id() <= decoded_frame_history_.GetLastDecodedFrameId()) { - if (legacy_frame_id_jump_behavior_ && frame->is_keyframe() && - AheadOf(frame->Timestamp(), - *decoded_frame_history_.GetLastDecodedFrameTimestamp())) { - RTC_DLOG(LS_WARNING) - << "Keyframe " << frame->Id() - << " has newer timestamp but older picture id, clearing buffer."; - Clear(); - } else { - // Already decoded past this frame. - return; - } - } - - if (frames_.size() == max_size_) { - if (frame->is_keyframe()) { - RTC_DLOG(LS_WARNING) << "Keyframe " << frame->Id() - << " inserted into full buffer, clearing buffer."; - Clear(); - } else { - // No space for this frame. - return; - } - } - - const int64_t frame_id = frame->Id(); - auto insert_res = frames_.emplace(frame_id, FrameInfo{std::move(frame)}); - if (!insert_res.second) { - // Frame has already been inserted. - return; - } - - if (frames_.size() == max_size_) { - RTC_DLOG(LS_WARNING) << "Frame " << frame_id - << " inserted, buffer is now full."; - } - - PropagateContinuity(insert_res.first); - FindNextAndLastDecodableTemporalUnit(); -} - -absl::InlinedVector, 4> -FrameBuffer::ExtractNextDecodableTemporalUnit() { - absl::InlinedVector, 4> res; - if (!next_decodable_temporal_unit_) { - return res; - } - - auto end_it = std::next(next_decodable_temporal_unit_->last_frame); - for (auto it = next_decodable_temporal_unit_->first_frame; it != end_it; - ++it) { - decoded_frame_history_.InsertDecoded(GetFrameId(it), GetTimestamp(it)); - res.push_back(std::move(it->second.encoded_frame)); - } - - DropNextDecodableTemporalUnit(); - return res; -} - -void FrameBuffer::DropNextDecodableTemporalUnit() { - if (!next_decodable_temporal_unit_) { - return; - } - - auto end_it = std::next(next_decodable_temporal_unit_->last_frame); - num_dropped_frames_ += std::count_if( - frames_.begin(), end_it, - [](const auto& f) { return f.second.encoded_frame != nullptr; }); - - frames_.erase(frames_.begin(), end_it); - FindNextAndLastDecodableTemporalUnit(); -} - -absl::optional FrameBuffer::LastContinuousFrameId() const { - return last_continuous_frame_id_; -} - -absl::optional FrameBuffer::LastContinuousTemporalUnitFrameId() const { - return last_continuous_temporal_unit_frame_id_; -} - -absl::optional FrameBuffer::NextDecodableTemporalUnitRtpTimestamp() - const { - if (!next_decodable_temporal_unit_) { - return absl::nullopt; - } - return GetTimestamp(next_decodable_temporal_unit_->first_frame); -} - -absl::optional FrameBuffer::LastDecodableTemporalUnitRtpTimestamp() - const { - return last_decodable_temporal_unit_timestamp_; -} - -int FrameBuffer::GetTotalNumberOfContinuousTemporalUnits() const { - return num_continuous_temporal_units_; -} -int FrameBuffer::GetTotalNumberOfDroppedFrames() const { - return num_dropped_frames_; -} - -bool FrameBuffer::IsContinuous(const FrameIterator& it) const { - for (int64_t reference : GetReferences(it)) { - if (decoded_frame_history_.WasDecoded(reference)) { - continue; - } - - auto reference_frame_it = frames_.find(reference); - if (reference_frame_it != frames_.end() && - reference_frame_it->second.continuous) { - continue; - } - - return false; - } - - return true; -} - -void FrameBuffer::PropagateContinuity(const FrameIterator& frame_it) { - for (auto it = frame_it; it != frames_.end(); ++it) { - if (!it->second.continuous) { - if (IsContinuous(it)) { - it->second.continuous = true; - if (last_continuous_frame_id_ < GetFrameId(it)) { - last_continuous_frame_id_ = GetFrameId(it); - } - if (IsLastFrameInTemporalUnit(it)) { - num_continuous_temporal_units_++; - if (last_continuous_temporal_unit_frame_id_ < GetFrameId(it)) { - last_continuous_temporal_unit_frame_id_ = GetFrameId(it); - } - } - } - } - } -} - -void FrameBuffer::FindNextAndLastDecodableTemporalUnit() { - next_decodable_temporal_unit_.reset(); - last_decodable_temporal_unit_timestamp_.reset(); - - if (!last_continuous_temporal_unit_frame_id_) { - return; - } - - FrameIterator first_frame_it = frames_.begin(); - FrameIterator last_frame_it = frames_.begin(); - absl::InlinedVector frames_in_temporal_unit; - for (auto frame_it = frames_.begin(); frame_it != frames_.end();) { - if (GetFrameId(frame_it) > *last_continuous_temporal_unit_frame_id_) { - break; - } - - if (GetTimestamp(frame_it) != GetTimestamp(first_frame_it)) { - frames_in_temporal_unit.clear(); - first_frame_it = frame_it; - } - - frames_in_temporal_unit.push_back(GetFrameId(frame_it)); - - last_frame_it = frame_it++; - - if (IsLastFrameInTemporalUnit(last_frame_it)) { - bool temporal_unit_decodable = true; - for (auto it = first_frame_it; it != frame_it && temporal_unit_decodable; - ++it) { - for (int64_t reference : GetReferences(it)) { - if (!decoded_frame_history_.WasDecoded(reference) && - !absl::c_linear_search(frames_in_temporal_unit, reference)) { - // A frame in the temporal unit has a non-decoded reference outside - // the temporal unit, so it's not yet ready to be decoded. - temporal_unit_decodable = false; - break; - } - } - } - - if (temporal_unit_decodable) { - if (!next_decodable_temporal_unit_) { - next_decodable_temporal_unit_ = {first_frame_it, last_frame_it}; - } - - last_decodable_temporal_unit_timestamp_ = GetTimestamp(first_frame_it); - } - } - } -} - -void FrameBuffer::Clear() { - frames_.clear(); - next_decodable_temporal_unit_.reset(); - last_decodable_temporal_unit_timestamp_.reset(); - last_continuous_frame_id_.reset(); - last_continuous_temporal_unit_frame_id_.reset(); - decoded_frame_history_.Clear(); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer3.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer3.h deleted file mode 100644 index 796c51db72..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer3.h +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_VIDEO_CODING_FRAME_BUFFER3_H_ -#define MODULES_VIDEO_CODING_FRAME_BUFFER3_H_ - -#include -#include -#include - -#include "absl/container/inlined_vector.h" -#include "absl/types/optional.h" -#include "api/units/timestamp.h" -#include "api/video/encoded_frame.h" -#include "modules/video_coding/utility/decoded_frames_history.h" - -namespace webrtc { -// The high level idea of the FrameBuffer is to order frames received from the -// network into a decodable stream. Frames are order by frame ID, and grouped -// into temporal units by timestamp. A temporal unit is decodable after all -// referenced frames outside the unit has been decoded, and a temporal unit is -// continuous if all referenced frames are directly or indirectly decodable. -// The FrameBuffer is thread-unsafe. -class FrameBuffer { - public: - // The `max_size` determines the maxmimum number of frames the buffer will - // store, and max_decode_history determines how far back (by frame ID) the - // buffer will store if a frame was decoded or not. - FrameBuffer(int max_size, int max_decode_history); - FrameBuffer(const FrameBuffer&) = delete; - FrameBuffer& operator=(const FrameBuffer&) = delete; - ~FrameBuffer() = default; - - // Inserted frames may only reference backwards, and must have no duplicate - // references. - void InsertFrame(std::unique_ptr frame); - - // Mark all frames belonging to the next decodable temporal unit as decoded - // and returns them. - absl::InlinedVector, 4> - ExtractNextDecodableTemporalUnit(); - - // Drop all frames in the next decodable unit. - void DropNextDecodableTemporalUnit(); - - absl::optional LastContinuousFrameId() const; - absl::optional LastContinuousTemporalUnitFrameId() const; - absl::optional NextDecodableTemporalUnitRtpTimestamp() const; - absl::optional LastDecodableTemporalUnitRtpTimestamp() const; - - int GetTotalNumberOfContinuousTemporalUnits() const; - int GetTotalNumberOfDroppedFrames() const; - - private: - struct FrameInfo { - std::unique_ptr encoded_frame; - bool continuous = false; - }; - - using FrameMap = std::map; - using FrameIterator = FrameMap::iterator; - - struct TemporalUnit { - // Both first and last are inclusive. - FrameIterator first_frame; - FrameIterator last_frame; - }; - - bool IsContinuous(const FrameIterator& it) const; - void PropagateContinuity(const FrameIterator& frame_it); - void FindNextAndLastDecodableTemporalUnit(); - void Clear(); - - const bool legacy_frame_id_jump_behavior_; - const size_t max_size_; - FrameMap frames_; - absl::optional next_decodable_temporal_unit_; - absl::optional last_decodable_temporal_unit_timestamp_; - absl::optional last_continuous_frame_id_; - absl::optional last_continuous_temporal_unit_frame_id_; - video_coding::DecodedFramesHistory decoded_frame_history_; - - int num_continuous_temporal_units_ = 0; - int num_dropped_frames_ = 0; -}; - -} // namespace webrtc - -#endif // MODULES_VIDEO_CODING_FRAME_BUFFER3_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_helpers.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_helpers.cc new file mode 100644 index 0000000000..e25eac8a18 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_helpers.cc @@ -0,0 +1,96 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_coding/frame_helpers.h" + +#include + +#include "rtc_base/logging.h" + +namespace webrtc { + +namespace { +constexpr TimeDelta kMaxVideoDelay = TimeDelta::Millis(10000); +} + +bool FrameHasBadRenderTiming(Timestamp render_time, Timestamp now) { + // Zero render time means render immediately. + if (render_time.IsZero()) { + return false; + } + if (render_time < Timestamp::Zero()) { + return true; + } + TimeDelta frame_delay = render_time - now; + if (frame_delay.Abs() > kMaxVideoDelay) { + RTC_LOG(LS_WARNING) << "Frame has bad render timing because it is out of " + "the delay bounds (frame_delay_ms=" + << frame_delay.ms() + << ", kMaxVideoDelay_ms=" << kMaxVideoDelay.ms() << ")"; + return true; + } + return false; +} + +bool TargetVideoDelayIsTooLarge(TimeDelta target_video_delay) { + if (target_video_delay > kMaxVideoDelay) { + RTC_LOG(LS_WARNING) + << "Target video delay is too large. (target_video_delay_ms=" + << target_video_delay.ms() + << ", kMaxVideoDelay_ms=" << kMaxVideoDelay.ms() << ")"; + return true; + } + return false; +} + +std::unique_ptr CombineAndDeleteFrames( + absl::InlinedVector, 4> frames) { + RTC_DCHECK(!frames.empty()); + + if (frames.size() == 1) { + return std::move(frames[0]); + } + + size_t total_length = 0; + for (const auto& frame : frames) { + total_length += frame->size(); + } + const EncodedFrame& last_frame = *frames.back(); + std::unique_ptr first_frame = std::move(frames[0]); + auto encoded_image_buffer = EncodedImageBuffer::Create(total_length); + uint8_t* buffer = encoded_image_buffer->data(); + first_frame->SetSpatialLayerFrameSize(first_frame->SpatialIndex().value_or(0), + first_frame->size()); + memcpy(buffer, first_frame->data(), first_frame->size()); + buffer += first_frame->size(); + + // Spatial index of combined frame is set equal to spatial index of its top + // spatial layer. + first_frame->SetSpatialIndex(last_frame.SpatialIndex().value_or(0)); + + first_frame->video_timing_mutable()->network2_timestamp_ms = + last_frame.video_timing().network2_timestamp_ms; + first_frame->video_timing_mutable()->receive_finish_ms = + last_frame.video_timing().receive_finish_ms; + + // Append all remaining frames to the first one. + for (size_t i = 1; i < frames.size(); ++i) { + // Let |next_frame| fall out of scope so it is deleted after copying. + std::unique_ptr next_frame = std::move(frames[i]); + first_frame->SetSpatialLayerFrameSize( + next_frame->SpatialIndex().value_or(0), next_frame->size()); + memcpy(buffer, next_frame->data(), next_frame->size()); + buffer += next_frame->size(); + } + first_frame->SetEncodedData(encoded_image_buffer); + return first_frame; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_helpers.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_helpers.h new file mode 100644 index 0000000000..56ee593678 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_helpers.h @@ -0,0 +1,30 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CODING_FRAME_HELPERS_H_ +#define MODULES_VIDEO_CODING_FRAME_HELPERS_H_ + +#include + +#include "absl/container/inlined_vector.h" +#include "api/video/encoded_frame.h" + +namespace webrtc { + +bool FrameHasBadRenderTiming(Timestamp render_time, Timestamp now); + +bool TargetVideoDelayIsTooLarge(TimeDelta target_video_delay); + +std::unique_ptr CombineAndDeleteFrames( + absl::InlinedVector, 4> frames); + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_FRAME_HELPERS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/generic_decoder.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/generic_decoder.cc index fe59d5c7f1..b660e02b72 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/generic_decoder.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/generic_decoder.cc @@ -14,36 +14,35 @@ #include #include +#include +#include +#include "absl/algorithm/container.h" +#include "absl/types/optional.h" #include "api/video/video_timing.h" +#include "api/video_codecs/video_decoder.h" +#include "modules/include/module_common_types_public.h" #include "modules/video_coding/include/video_error_codes.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "rtc_base/thread.h" -#include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" #include "system_wrappers/include/clock.h" -#include "system_wrappers/include/field_trial.h" namespace webrtc { -VCMDecodedFrameCallback::VCMDecodedFrameCallback(VCMTiming* timing, - Clock* clock) - : _clock(clock), - _timing(timing), - _timestampMap(kDecoderFrameMemoryLength), - _extra_decode_time("t", absl::nullopt), - low_latency_renderer_enabled_("enabled", true), - low_latency_renderer_include_predecode_buffer_("include_predecode_buffer", - true) { +namespace { + +constexpr size_t kDecoderFrameMemoryLength = 10; + +} + +VCMDecodedFrameCallback::VCMDecodedFrameCallback( + VCMTiming* timing, + Clock* clock, + const FieldTrialsView& field_trials) + : _clock(clock), _timing(timing) { ntp_offset_ = _clock->CurrentNtpInMilliseconds() - _clock->TimeInMilliseconds(); - - ParseFieldTrial({&_extra_decode_time}, - field_trial::FindFullName("WebRTC-SlowDownDecoder")); - ParseFieldTrial({&low_latency_renderer_enabled_, - &low_latency_renderer_include_predecode_buffer_}, - field_trial::FindFullName("WebRTC-LowLatencyRenderer")); } VCMDecodedFrameCallback::~VCMDecodedFrameCallback() {} @@ -78,144 +77,148 @@ int32_t VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage, return WEBRTC_VIDEO_CODEC_OK; } +std::pair, size_t> +VCMDecodedFrameCallback::FindFrameInfo(uint32_t rtp_timestamp) { + absl::optional frame_info; + + auto it = absl::c_find_if(frame_infos_, [rtp_timestamp](const auto& entry) { + return entry.rtp_timestamp == rtp_timestamp || + IsNewerTimestamp(entry.rtp_timestamp, rtp_timestamp); + }); + size_t dropped_frames = std::distance(frame_infos_.begin(), it); + + if (it != frame_infos_.end() && it->rtp_timestamp == rtp_timestamp) { + // Frame was found and should also be removed from the queue. + frame_info = std::move(*it); + ++it; + } + + frame_infos_.erase(frame_infos_.begin(), it); + return std::make_pair(std::move(frame_info), dropped_frames); +} + void VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage, absl::optional decode_time_ms, absl::optional qp) { - // Wait some extra time to simulate a slow decoder. - if (_extra_decode_time) { - rtc::Thread::SleepMs(_extra_decode_time->ms()); - } - RTC_DCHECK(_receiveCallback) << "Callback must not be null at this point"; TRACE_EVENT_INSTANT1("webrtc", "VCMDecodedFrameCallback::Decoded", "timestamp", decodedImage.timestamp()); // TODO(holmer): We should improve this so that we can handle multiple // callbacks from one call to Decode(). - absl::optional frameInfo; + absl::optional frame_info; int timestamp_map_size = 0; int dropped_frames = 0; { MutexLock lock(&lock_); - int initial_timestamp_map_size = _timestampMap.Size(); - frameInfo = _timestampMap.Pop(decodedImage.timestamp()); - timestamp_map_size = _timestampMap.Size(); - // _timestampMap.Pop() erases all frame upto the specified timestamp and - // return the frame info for this timestamp if it exists. Thus, the - // difference in the _timestampMap size before and after Pop() will show - // internally dropped frames. - dropped_frames = - initial_timestamp_map_size - timestamp_map_size - (frameInfo ? 1 : 0); + std::tie(frame_info, dropped_frames) = + FindFrameInfo(decodedImage.timestamp()); + timestamp_map_size = frame_infos_.size(); } - if (dropped_frames > 0) { _receiveCallback->OnDroppedFrames(dropped_frames); } - if (!frameInfo) { + if (!frame_info) { RTC_LOG(LS_WARNING) << "Too many frames backed up in the decoder, dropping " "frame with timestamp " << decodedImage.timestamp(); return; } - decodedImage.set_ntp_time_ms(frameInfo->ntp_time_ms); - decodedImage.set_packet_infos(frameInfo->packet_infos); - decodedImage.set_rotation(frameInfo->rotation); - - if (low_latency_renderer_enabled_) { - absl::optional max_composition_delay_in_frames = - _timing->MaxCompositionDelayInFrames(); - if (max_composition_delay_in_frames) { - // Subtract frames that are in flight. - if (low_latency_renderer_include_predecode_buffer_) { - *max_composition_delay_in_frames -= timestamp_map_size; - *max_composition_delay_in_frames = - std::max(0, *max_composition_delay_in_frames); - } - decodedImage.set_max_composition_delay_in_frames( - max_composition_delay_in_frames); - } + decodedImage.set_ntp_time_ms(frame_info->ntp_time_ms); + decodedImage.set_packet_infos(frame_info->packet_infos); + decodedImage.set_rotation(frame_info->rotation); + VideoFrame::RenderParameters render_parameters = _timing->RenderParameters(); + if (render_parameters.max_composition_delay_in_frames) { + // Subtract frames that are in flight. + render_parameters.max_composition_delay_in_frames = + std::max(0, *render_parameters.max_composition_delay_in_frames - + timestamp_map_size); } + decodedImage.set_render_parameters(render_parameters); - RTC_DCHECK(frameInfo->decodeStart); + RTC_DCHECK(frame_info->decode_start); const Timestamp now = _clock->CurrentTime(); const TimeDelta decode_time = decode_time_ms ? TimeDelta::Millis(*decode_time_ms) - : now - *frameInfo->decodeStart; - _timing->StopDecodeTimer(decode_time.ms(), now.ms()); + : now - *frame_info->decode_start; + _timing->StopDecodeTimer(decode_time, now); decodedImage.set_processing_time( - {*frameInfo->decodeStart, *frameInfo->decodeStart + decode_time}); + {*frame_info->decode_start, *frame_info->decode_start + decode_time}); // Report timing information. TimingFrameInfo timing_frame_info; - if (frameInfo->timing.flags != VideoSendTiming::kInvalid) { + if (frame_info->timing.flags != VideoSendTiming::kInvalid) { int64_t capture_time_ms = decodedImage.ntp_time_ms() - ntp_offset_; // Convert remote timestamps to local time from ntp timestamps. - frameInfo->timing.encode_start_ms -= ntp_offset_; - frameInfo->timing.encode_finish_ms -= ntp_offset_; - frameInfo->timing.packetization_finish_ms -= ntp_offset_; - frameInfo->timing.pacer_exit_ms -= ntp_offset_; - frameInfo->timing.network_timestamp_ms -= ntp_offset_; - frameInfo->timing.network2_timestamp_ms -= ntp_offset_; + frame_info->timing.encode_start_ms -= ntp_offset_; + frame_info->timing.encode_finish_ms -= ntp_offset_; + frame_info->timing.packetization_finish_ms -= ntp_offset_; + frame_info->timing.pacer_exit_ms -= ntp_offset_; + frame_info->timing.network_timestamp_ms -= ntp_offset_; + frame_info->timing.network2_timestamp_ms -= ntp_offset_; int64_t sender_delta_ms = 0; if (decodedImage.ntp_time_ms() < 0) { // Sender clock is not estimated yet. Make sure that sender times are all // negative to indicate that. Yet they still should be relatively correct. sender_delta_ms = - std::max({capture_time_ms, frameInfo->timing.encode_start_ms, - frameInfo->timing.encode_finish_ms, - frameInfo->timing.packetization_finish_ms, - frameInfo->timing.pacer_exit_ms, - frameInfo->timing.network_timestamp_ms, - frameInfo->timing.network2_timestamp_ms}) + + std::max({capture_time_ms, frame_info->timing.encode_start_ms, + frame_info->timing.encode_finish_ms, + frame_info->timing.packetization_finish_ms, + frame_info->timing.pacer_exit_ms, + frame_info->timing.network_timestamp_ms, + frame_info->timing.network2_timestamp_ms}) + 1; } timing_frame_info.capture_time_ms = capture_time_ms - sender_delta_ms; timing_frame_info.encode_start_ms = - frameInfo->timing.encode_start_ms - sender_delta_ms; + frame_info->timing.encode_start_ms - sender_delta_ms; timing_frame_info.encode_finish_ms = - frameInfo->timing.encode_finish_ms - sender_delta_ms; + frame_info->timing.encode_finish_ms - sender_delta_ms; timing_frame_info.packetization_finish_ms = - frameInfo->timing.packetization_finish_ms - sender_delta_ms; + frame_info->timing.packetization_finish_ms - sender_delta_ms; timing_frame_info.pacer_exit_ms = - frameInfo->timing.pacer_exit_ms - sender_delta_ms; + frame_info->timing.pacer_exit_ms - sender_delta_ms; timing_frame_info.network_timestamp_ms = - frameInfo->timing.network_timestamp_ms - sender_delta_ms; + frame_info->timing.network_timestamp_ms - sender_delta_ms; timing_frame_info.network2_timestamp_ms = - frameInfo->timing.network2_timestamp_ms - sender_delta_ms; + frame_info->timing.network2_timestamp_ms - sender_delta_ms; } - timing_frame_info.flags = frameInfo->timing.flags; - timing_frame_info.decode_start_ms = frameInfo->decodeStart->ms(); + timing_frame_info.flags = frame_info->timing.flags; + timing_frame_info.decode_start_ms = frame_info->decode_start->ms(); timing_frame_info.decode_finish_ms = now.ms(); - timing_frame_info.render_time_ms = frameInfo->renderTimeMs; + timing_frame_info.render_time_ms = + frame_info->render_time ? frame_info->render_time->ms() : -1; timing_frame_info.rtp_timestamp = decodedImage.timestamp(); - timing_frame_info.receive_start_ms = frameInfo->timing.receive_start_ms; - timing_frame_info.receive_finish_ms = frameInfo->timing.receive_finish_ms; + timing_frame_info.receive_start_ms = frame_info->timing.receive_start_ms; + timing_frame_info.receive_finish_ms = frame_info->timing.receive_finish_ms; _timing->SetTimingFrameInfo(timing_frame_info); - decodedImage.set_timestamp_us(frameInfo->renderTimeMs * - rtc::kNumMicrosecsPerMillisec); - _receiveCallback->FrameToRender(decodedImage, qp, decode_time.ms(), - frameInfo->content_type); + decodedImage.set_timestamp_us( + frame_info->render_time ? frame_info->render_time->us() : -1); + _receiveCallback->FrameToRender(decodedImage, qp, decode_time, + frame_info->content_type); } -void VCMDecodedFrameCallback::OnDecoderImplementationName( - const char* implementation_name) { - _receiveCallback->OnDecoderImplementationName(implementation_name); +void VCMDecodedFrameCallback::OnDecoderInfoChanged( + const VideoDecoder::DecoderInfo& decoder_info) { + _receiveCallback->OnDecoderInfoChanged(decoder_info); } -void VCMDecodedFrameCallback::Map(uint32_t timestamp, - const VCMFrameInformation& frameInfo) { +void VCMDecodedFrameCallback::Map(FrameInfo frameInfo) { int dropped_frames = 0; { MutexLock lock(&lock_); - int initial_size = _timestampMap.Size(); - _timestampMap.Add(timestamp, frameInfo); + int initial_size = frame_infos_.size(); + if (initial_size == kDecoderFrameMemoryLength) { + frame_infos_.pop_front(); + dropped_frames = 1; + } + frame_infos_.push_back(std::move(frameInfo)); // If no frame is dropped, the new size should be `initial_size` + 1 - dropped_frames = (initial_size + 1) - _timestampMap.Size(); } if (dropped_frames > 0) { _receiveCallback->OnDroppedFrames(dropped_frames); @@ -226,8 +229,8 @@ void VCMDecodedFrameCallback::ClearTimestampMap() { int dropped_frames = 0; { MutexLock lock(&lock_); - dropped_frames = _timestampMap.Size(); - _timestampMap.Clear(); + dropped_frames = frame_infos_.size(); + frame_infos_.clear(); } if (dropped_frames > 0) { _receiveCallback->OnDroppedFrames(dropped_frames); @@ -252,8 +255,7 @@ bool VCMGenericDecoder::Configure(const VideoDecoder::Settings& settings) { decoder_info_ = decoder_->GetDecoderInfo(); RTC_LOG(LS_INFO) << "Decoder implementation: " << decoder_info_.ToString(); if (_callback) { - _callback->OnDecoderImplementationName( - decoder_info_.implementation_name.c_str()); + _callback->OnDecoderInfoChanged(decoder_info_); } return ok; } @@ -261,9 +263,13 @@ bool VCMGenericDecoder::Configure(const VideoDecoder::Settings& settings) { int32_t VCMGenericDecoder::Decode(const VCMEncodedFrame& frame, Timestamp now) { TRACE_EVENT1("webrtc", "VCMGenericDecoder::Decode", "timestamp", frame.Timestamp()); - VCMFrameInformation frame_info; - frame_info.decodeStart = now; - frame_info.renderTimeMs = frame.RenderTimeMs(); + FrameInfo frame_info; + frame_info.rtp_timestamp = frame.Timestamp(); + frame_info.decode_start = now; + frame_info.render_time = + frame.RenderTimeMs() >= 0 + ? absl::make_optional(Timestamp::Millis(frame.RenderTimeMs())) + : absl::nullopt; frame_info.rotation = frame.rotation(); frame_info.timing = frame.video_timing(); frame_info.ntp_time_ms = frame.EncodedImage().ntp_time_ms_; @@ -278,7 +284,7 @@ int32_t VCMGenericDecoder::Decode(const VCMEncodedFrame& frame, Timestamp now) { } else { frame_info.content_type = _last_keyframe_content_type; } - _callback->Map(frame.Timestamp(), frame_info); + _callback->Map(std::move(frame_info)); int32_t ret = decoder_->Decode(frame.EncodedImage(), frame.MissingFrame(), frame.RenderTimeMs()); @@ -287,10 +293,10 @@ int32_t VCMGenericDecoder::Decode(const VCMEncodedFrame& frame, Timestamp now) { RTC_LOG(LS_INFO) << "Changed decoder implementation to: " << decoder_info.ToString(); decoder_info_ = decoder_info; - _callback->OnDecoderImplementationName( - decoder_info.implementation_name.empty() - ? "unknown" - : decoder_info.implementation_name.c_str()); + if (decoder_info.implementation_name.empty()) { + decoder_info.implementation_name = "unknown"; + } + _callback->OnDecoderInfoChanged(std::move(decoder_info)); } if (ret < WEBRTC_VIDEO_CODEC_OK) { RTC_LOG(LS_WARNING) << "Failed to decode frame with timestamp " @@ -308,8 +314,7 @@ int32_t VCMGenericDecoder::RegisterDecodeCompleteCallback( _callback = callback; int32_t ret = decoder_->RegisterDecodeCompleteCallback(callback); if (callback && !decoder_info_.implementation_name.empty()) { - callback->OnDecoderImplementationName( - decoder_info_.implementation_name.c_str()); + callback->OnDecoderInfoChanged(decoder_info_); } return ret; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/generic_decoder.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/generic_decoder.h index 31d8460194..7dc6d34c01 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/generic_decoder.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/generic_decoder.h @@ -11,27 +11,48 @@ #ifndef MODULES_VIDEO_CODING_GENERIC_DECODER_H_ #define MODULES_VIDEO_CODING_GENERIC_DECODER_H_ +#include +#include #include +#include +#include "api/field_trials_view.h" #include "api/sequence_checker.h" -#include "api/units/time_delta.h" #include "api/video_codecs/video_decoder.h" #include "modules/video_coding/encoded_frame.h" -#include "modules/video_coding/include/video_codec_interface.h" -#include "modules/video_coding/timestamp_map.h" -#include "modules/video_coding/timing.h" -#include "rtc_base/experiments/field_trial_parser.h" +#include "modules/video_coding/timing/timing.h" #include "rtc_base/synchronization/mutex.h" namespace webrtc { class VCMReceiveCallback; -enum { kDecoderFrameMemoryLength = 10 }; +struct FrameInfo { + FrameInfo() = default; + FrameInfo(const FrameInfo&) = delete; + FrameInfo& operator=(const FrameInfo&) = delete; + FrameInfo(FrameInfo&&) = default; + FrameInfo& operator=(FrameInfo&&) = default; + + uint32_t rtp_timestamp; + // This is likely not optional, but some inputs seem to sometimes be negative. + // TODO(bugs.webrtc.org/13756): See if this can be replaced with Timestamp + // once all inputs to this field use Timestamp instead of an integer. + absl::optional render_time; + absl::optional decode_start; + VideoRotation rotation; + VideoContentType content_type; + EncodedImage::Timing timing; + int64_t ntp_time_ms; + RtpPacketInfos packet_infos; + // ColorSpace is not stored here, as it might be modified by decoders. +}; class VCMDecodedFrameCallback : public DecodedImageCallback { public: - VCMDecodedFrameCallback(VCMTiming* timing, Clock* clock); + VCMDecodedFrameCallback(VCMTiming* timing, + Clock* clock, + const FieldTrialsView& field_trials); ~VCMDecodedFrameCallback() override; void SetUserReceiveCallback(VCMReceiveCallback* receiveCallback); VCMReceiveCallback* UserReceiveCallback(); @@ -42,14 +63,16 @@ class VCMDecodedFrameCallback : public DecodedImageCallback { absl::optional decode_time_ms, absl::optional qp) override; - void OnDecoderImplementationName(const char* implementation_name); + void OnDecoderInfoChanged(const VideoDecoder::DecoderInfo& decoder_info); - void Map(uint32_t timestamp, const VCMFrameInformation& frameInfo); + void Map(FrameInfo frameInfo); void ClearTimestampMap(); private: + std::pair, size_t> FindFrameInfo( + uint32_t rtp_timestamp) RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_); + SequenceChecker construction_thread_; - // Protect `_timestampMap`. Clock* const _clock; // This callback must be set before the decoder thread starts running // and must only be unset when external threads (e.g decoder thread) @@ -59,20 +82,8 @@ class VCMDecodedFrameCallback : public DecodedImageCallback { VCMReceiveCallback* _receiveCallback = nullptr; VCMTiming* _timing; Mutex lock_; - VCMTimestampMap _timestampMap RTC_GUARDED_BY(lock_); + std::deque frame_infos_ RTC_GUARDED_BY(lock_); int64_t ntp_offset_; - // Set by the field trial WebRTC-SlowDownDecoder to simulate a slow decoder. - FieldTrialOptional _extra_decode_time; - - // Set by the field trial WebRTC-LowLatencyRenderer. The parameter `enabled` - // determines if the low-latency renderer algorithm should be used for the - // case min playout delay=0 and max playout delay>0. - FieldTrialParameter low_latency_renderer_enabled_; - // Set by the field trial WebRTC-LowLatencyRenderer. The parameter - // `include_predecode_buffer` determines if the predecode buffer should be - // taken into account when calculating maximum number of frames in composition - // queue. - FieldTrialParameter low_latency_renderer_include_predecode_buffer_; }; class VCMGenericDecoder { diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/h264_sprop_parameter_sets.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/h264_sprop_parameter_sets.h index dbf27ef034..8a32f31cc0 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/h264_sprop_parameter_sets.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/h264_sprop_parameter_sets.h @@ -15,13 +15,15 @@ #include #include -#include "rtc_base/constructor_magic.h" - namespace webrtc { class H264SpropParameterSets { public: H264SpropParameterSets() {} + + H264SpropParameterSets(const H264SpropParameterSets&) = delete; + H264SpropParameterSets& operator=(const H264SpropParameterSets&) = delete; + bool DecodeSprop(const std::string& sprop); const std::vector& sps_nalu() { return sps_; } const std::vector& pps_nalu() { return pps_; } @@ -29,7 +31,6 @@ class H264SpropParameterSets { private: std::vector sps_; std::vector pps_; - RTC_DISALLOW_COPY_AND_ASSIGN(H264SpropParameterSets); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/include/video_codec_initializer.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/include/video_codec_initializer.h index e979f9c867..270c4dbcd1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/include/video_codec_initializer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/include/video_codec_initializer.h @@ -15,7 +15,7 @@ #include #include -#include "api/video_codecs/video_encoder_config.h" +#include "video/config/video_encoder_config.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/include/video_coding.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/include/video_coding.h index 77b3eac236..ee9326d9fc 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/include/video_coding.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/include/video_coding.h @@ -11,9 +11,9 @@ #ifndef MODULES_VIDEO_CODING_INCLUDE_VIDEO_CODING_H_ #define MODULES_VIDEO_CODING_INCLUDE_VIDEO_CODING_H_ +#include "api/field_trials_view.h" #include "api/video/video_frame.h" #include "api/video_codecs/video_decoder.h" -#include "modules/include/module.h" #include "modules/rtp_rtcp/source/rtp_video_header.h" #include "modules/video_coding/include/video_coding_defines.h" @@ -25,10 +25,14 @@ class VideoDecoder; class VideoEncoder; struct CodecSpecificInfo; -class VideoCodingModule : public Module { +class VideoCodingModule { public: // DEPRECATED. - static VideoCodingModule* Create(Clock* clock); + static VideoCodingModule* Create( + Clock* clock, + const FieldTrialsView* field_trials = nullptr); + + virtual ~VideoCodingModule() = default; /* * Receiver @@ -136,6 +140,9 @@ class VideoCodingModule : public Module { virtual void SetNackSettings(size_t max_nack_list_size, int max_packet_age_to_nack, int max_incomplete_time_ms) = 0; + + // Runs delayed tasks. Expected to be called periodically. + virtual void Process() = 0; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/include/video_coding_defines.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/include/video_coding_defines.h index 2f1d8c82bd..8f70e0298d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/include/video_coding_defines.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/include/video_coding_defines.h @@ -18,6 +18,7 @@ #include "api/video/video_content_type.h" #include "api/video/video_frame.h" #include "api/video/video_timing.h" +#include "api/video_codecs/video_decoder.h" namespace webrtc { @@ -51,14 +52,15 @@ class VCMReceiveCallback { public: virtual int32_t FrameToRender(VideoFrame& videoFrame, // NOLINT absl::optional qp, - int32_t decode_time_ms, + TimeDelta decode_time, VideoContentType content_type) = 0; virtual void OnDroppedFrames(uint32_t frames_dropped); // Called when the current receive codec changes. virtual void OnIncomingPayloadType(int payload_type); - virtual void OnDecoderImplementationName(const char* implementation_name); + virtual void OnDecoderInfoChanged( + const VideoDecoder::DecoderInfo& decoder_info); protected: virtual ~VCMReceiveCallback() {} diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/include/video_error_codes.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/include/video_error_codes.h index 4ae0ca127d..17146ce205 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/include/video_error_codes.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/include/video_error_codes.h @@ -15,15 +15,16 @@ // Define return values +#define WEBRTC_VIDEO_CODEC_TARGET_BITRATE_OVERSHOOT 5 #define WEBRTC_VIDEO_CODEC_OK_REQUEST_KEYFRAME 4 #define WEBRTC_VIDEO_CODEC_NO_OUTPUT 1 #define WEBRTC_VIDEO_CODEC_OK 0 #define WEBRTC_VIDEO_CODEC_ERROR -1 #define WEBRTC_VIDEO_CODEC_MEMORY -3 #define WEBRTC_VIDEO_CODEC_ERR_PARAMETER -4 +#define WEBRTC_VIDEO_CODEC_TIMEOUT -6 #define WEBRTC_VIDEO_CODEC_UNINITIALIZED -7 #define WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE -13 -#define WEBRTC_VIDEO_CODEC_TARGET_BITRATE_OVERSHOOT -14 #define WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED -15 #define WEBRTC_VIDEO_CODEC_ENCODER_FAILURE -16 diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/inter_frame_delay.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/inter_frame_delay.cc deleted file mode 100644 index d0c21aa771..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/inter_frame_delay.cc +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/video_coding/inter_frame_delay.h" - -namespace webrtc { - -VCMInterFrameDelay::VCMInterFrameDelay(int64_t currentWallClock) { - Reset(currentWallClock); -} - -// Resets the delay estimate. -void VCMInterFrameDelay::Reset(int64_t currentWallClock) { - _zeroWallClock = currentWallClock; - _wrapArounds = 0; - _prevWallClock = 0; - _prevTimestamp = 0; - _dTS = 0; -} - -// Calculates the delay of a frame with the given timestamp. -// This method is called when the frame is complete. -bool VCMInterFrameDelay::CalculateDelay(uint32_t timestamp, - int64_t* delay, - int64_t currentWallClock) { - if (_prevWallClock == 0) { - // First set of data, initialization, wait for next frame. - _prevWallClock = currentWallClock; - _prevTimestamp = timestamp; - *delay = 0; - return true; - } - - int32_t prevWrapArounds = _wrapArounds; - CheckForWrapArounds(timestamp); - - // This will be -1 for backward wrap arounds and +1 for forward wrap arounds. - int32_t wrapAroundsSincePrev = _wrapArounds - prevWrapArounds; - - // Account for reordering in jitter variance estimate in the future? - // Note that this also captures incomplete frames which are grabbed for - // decoding after a later frame has been complete, i.e. real packet losses. - if ((wrapAroundsSincePrev == 0 && timestamp < _prevTimestamp) || - wrapAroundsSincePrev < 0) { - *delay = 0; - return false; - } - - // Compute the compensated timestamp difference and convert it to ms and round - // it to closest integer. - _dTS = static_cast( - (timestamp + wrapAroundsSincePrev * (static_cast(1) << 32) - - _prevTimestamp) / - 90.0 + - 0.5); - - // frameDelay is the difference of dT and dTS -- i.e. the difference of the - // wall clock time difference and the timestamp difference between two - // following frames. - *delay = static_cast(currentWallClock - _prevWallClock - _dTS); - - _prevTimestamp = timestamp; - _prevWallClock = currentWallClock; - - return true; -} - -// Investigates if the timestamp clock has overflowed since the last timestamp -// and keeps track of the number of wrap arounds since reset. -void VCMInterFrameDelay::CheckForWrapArounds(uint32_t timestamp) { - if (timestamp < _prevTimestamp) { - // This difference will probably be less than -2^31 if we have had a wrap - // around (e.g. timestamp = 1, _prevTimestamp = 2^32 - 1). Since it is cast - // to a int32_t, it should be positive. - if (static_cast(timestamp - _prevTimestamp) > 0) { - // Forward wrap around. - _wrapArounds++; - } - // This difference will probably be less than -2^31 if we have had a - // backward wrap around. Since it is cast to a int32_t, it should be - // positive. - } else if (static_cast(_prevTimestamp - timestamp) > 0) { - // Backward wrap around. - _wrapArounds--; - } -} -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/inter_frame_delay.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/inter_frame_delay.h deleted file mode 100644 index f121c61498..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/inter_frame_delay.h +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_VIDEO_CODING_INTER_FRAME_DELAY_H_ -#define MODULES_VIDEO_CODING_INTER_FRAME_DELAY_H_ - -#include - -namespace webrtc { - -class VCMInterFrameDelay { - public: - explicit VCMInterFrameDelay(int64_t currentWallClock); - - // Resets the estimate. Zeros are given as parameters. - void Reset(int64_t currentWallClock); - - // Calculates the delay of a frame with the given timestamp. - // This method is called when the frame is complete. - // - // Input: - // - timestamp : RTP timestamp of a received frame. - // - *delay : Pointer to memory where the result should be - // stored. - // - currentWallClock : The current time in milliseconds. - // Should be -1 for normal operation, only used - // for testing. - // Return value : true if OK, false when reordered timestamps. - bool CalculateDelay(uint32_t timestamp, - int64_t* delay, - int64_t currentWallClock); - - private: - // Controls if the RTP timestamp counter has had a wrap around between the - // current and the previously received frame. - // - // Input: - // - timestamp : RTP timestamp of the current frame. - void CheckForWrapArounds(uint32_t timestamp); - - int64_t _zeroWallClock; // Local timestamp of the first video packet received - int32_t _wrapArounds; // Number of wrapArounds detected - // The previous timestamp passed to the delay estimate - uint32_t _prevTimestamp; - // The previous wall clock timestamp used by the delay estimate - int64_t _prevWallClock; - // Wrap-around compensated difference between incoming timestamps - int64_t _dTS; -}; - -} // namespace webrtc - -#endif // MODULES_VIDEO_CODING_INTER_FRAME_DELAY_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/jitter_buffer.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/jitter_buffer.cc index 4c6015e027..39553c9f3f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/jitter_buffer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/jitter_buffer.cc @@ -9,18 +9,18 @@ */ #include "modules/video_coding/jitter_buffer.h" - #include #include #include +#include "api/units/timestamp.h" #include "modules/video_coding/frame_buffer.h" #include "modules/video_coding/include/video_coding.h" -#include "modules/video_coding/inter_frame_delay.h" #include "modules/video_coding/internal_defines.h" #include "modules/video_coding/jitter_buffer_common.h" -#include "modules/video_coding/jitter_estimator.h" #include "modules/video_coding/packet.h" +#include "modules/video_coding/timing/inter_frame_delay.h" +#include "modules/video_coding/timing/jitter_estimator.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "system_wrappers/include/clock.h" @@ -109,7 +109,8 @@ void FrameList::Reset(UnorderedFrameList* free_frames) { } VCMJitterBuffer::VCMJitterBuffer(Clock* clock, - std::unique_ptr event) + std::unique_ptr event, + const FieldTrialsView& field_trials) : clock_(clock), running_(false), frame_event_(std::move(event)), @@ -122,8 +123,7 @@ VCMJitterBuffer::VCMJitterBuffer(Clock* clock, num_consecutive_old_packets_(0), num_packets_(0), num_duplicated_packets_(0), - jitter_estimate_(clock), - inter_frame_delay_(clock_->TimeInMilliseconds()), + jitter_estimate_(clock, field_trials), missing_sequence_numbers_(SequenceNumberLessThan()), latest_received_sequence_number_(0), max_nack_list_size_(0), @@ -192,7 +192,7 @@ void VCMJitterBuffer::Flush() { num_consecutive_old_packets_ = 0; // Also reset the jitter and delay estimates jitter_estimate_.Reset(); - inter_frame_delay_.Reset(clock_->TimeInMilliseconds()); + inter_frame_delay_.Reset(); waiting_for_completion_.frame_size = 0; waiting_for_completion_.timestamp = 0; waiting_for_completion_.latest_packet_time = -1; @@ -392,13 +392,13 @@ VCMFrameBufferEnum VCMJitterBuffer::InsertPacket(const VCMPacket& packet, if (error != kNoError) return error; - int64_t now_ms = clock_->TimeInMilliseconds(); + Timestamp now = clock_->CurrentTime(); // We are keeping track of the first and latest seq numbers, and // the number of wraps to be able to calculate how many packets we expect. if (first_packet_since_reset_) { // Now it's time to start estimating jitter // reset the delay estimate. - inter_frame_delay_.Reset(now_ms); + inter_frame_delay_.Reset(); } // Empty packets may bias the jitter estimate (lacking size component), @@ -408,9 +408,9 @@ VCMFrameBufferEnum VCMJitterBuffer::InsertPacket(const VCMPacket& packet, // This can get bad if we have a lot of duplicate packets, // we will then count some packet multiple times. waiting_for_completion_.frame_size += packet.sizeBytes; - waiting_for_completion_.latest_packet_time = now_ms; + waiting_for_completion_.latest_packet_time = now.ms(); } else if (waiting_for_completion_.latest_packet_time >= 0 && - waiting_for_completion_.latest_packet_time + 2000 <= now_ms) { + waiting_for_completion_.latest_packet_time + 2000 <= now.ms()) { // A packet should never be more than two seconds late UpdateJitterEstimate(waiting_for_completion_, true); waiting_for_completion_.latest_packet_time = -1; @@ -425,7 +425,7 @@ VCMFrameBufferEnum VCMJitterBuffer::InsertPacket(const VCMPacket& packet, frame_data.rtt_ms = kDefaultRtt; frame_data.rolling_average_packets_per_frame = average_packets_per_frame_; VCMFrameBufferEnum buffer_state = - frame->InsertPacket(packet, now_ms, frame_data); + frame->InsertPacket(packet, now.ms(), frame_data); if (buffer_state > 0) { if (first_packet_since_reset_) { @@ -572,7 +572,7 @@ void VCMJitterBuffer::FindAndInsertContinuousFramesWithState( uint32_t VCMJitterBuffer::EstimatedJitterMs() { MutexLock lock(&mutex_); const double rtt_mult = 1.0f; - return jitter_estimate_.GetJitterEstimate(rtt_mult, absl::nullopt); + return jitter_estimate_.GetJitterEstimate(rtt_mult, absl::nullopt).ms(); } void VCMJitterBuffer::SetNackSettings(size_t max_nack_list_size, @@ -869,17 +869,18 @@ void VCMJitterBuffer::UpdateJitterEstimate(const VCMFrameBuffer& frame, void VCMJitterBuffer::UpdateJitterEstimate(int64_t latest_packet_time_ms, uint32_t timestamp, unsigned int frame_size, - bool incomplete_frame) { + bool /*incomplete_frame*/) { if (latest_packet_time_ms == -1) { return; } - int64_t frame_delay; - bool not_reordered = inter_frame_delay_.CalculateDelay( - timestamp, &frame_delay, latest_packet_time_ms); + auto frame_delay = inter_frame_delay_.CalculateDelay( + timestamp, Timestamp::Millis(latest_packet_time_ms)); + + bool not_reordered = frame_delay.has_value(); // Filter out frames which have been reordered in time by the network if (not_reordered) { // Update the jitter estimate with the new samples - jitter_estimate_.UpdateEstimate(frame_delay, frame_size, incomplete_frame); + jitter_estimate_.UpdateEstimate(*frame_delay, DataSize::Bytes(frame_size)); } } diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/jitter_buffer.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/jitter_buffer.h index 137a687ded..7ca8953428 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/jitter_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/jitter_buffer.h @@ -17,17 +17,16 @@ #include #include +#include "api/field_trials_view.h" #include "modules/include/module_common_types.h" #include "modules/include/module_common_types_public.h" -#include "modules/utility/include/process_thread.h" #include "modules/video_coding/decoding_state.h" #include "modules/video_coding/event_wrapper.h" #include "modules/video_coding/include/video_coding.h" #include "modules/video_coding/include/video_coding_defines.h" -#include "modules/video_coding/inter_frame_delay.h" #include "modules/video_coding/jitter_buffer_common.h" -#include "modules/video_coding/jitter_estimator.h" -#include "rtc_base/constructor_magic.h" +#include "modules/video_coding/timing/inter_frame_delay.h" +#include "modules/video_coding/timing/jitter_estimator.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" @@ -71,10 +70,15 @@ class FrameList class VCMJitterBuffer { public: - VCMJitterBuffer(Clock* clock, std::unique_ptr event); + VCMJitterBuffer(Clock* clock, + std::unique_ptr event, + const FieldTrialsView& field_trials); ~VCMJitterBuffer(); + VCMJitterBuffer(const VCMJitterBuffer&) = delete; + VCMJitterBuffer& operator=(const VCMJitterBuffer&) = delete; + // Initializes and starts jitter buffer. void Start(); @@ -248,9 +252,9 @@ class VCMJitterBuffer { // Jitter estimation. // Filter for estimating jitter. - VCMJitterEstimator jitter_estimate_; + JitterEstimator jitter_estimate_; // Calculates network delays used for jitter calculations. - VCMInterFrameDelay inter_frame_delay_; + InterFrameDelay inter_frame_delay_; VCMJitterSample waiting_for_completion_; // Holds the internal NACK list (the missing sequence numbers). @@ -265,8 +269,6 @@ class VCMJitterBuffer { // average_packets_per_frame converges fast if we have fewer than this many // frames. int frame_counter_; - - RTC_DISALLOW_COPY_AND_ASSIGN(VCMJitterBuffer); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/jitter_estimator.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/jitter_estimator.cc deleted file mode 100644 index 87848aec06..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/jitter_estimator.cc +++ /dev/null @@ -1,436 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/video_coding/jitter_estimator.h" - -#include -#include - -#include -#include - -#include "absl/types/optional.h" -#include "modules/video_coding/internal_defines.h" -#include "modules/video_coding/rtt_filter.h" -#include "rtc_base/experiments/jitter_upper_bound_experiment.h" -#include "rtc_base/numerics/safe_conversions.h" -#include "system_wrappers/include/clock.h" -#include "system_wrappers/include/field_trial.h" - -namespace webrtc { -namespace { -static constexpr uint32_t kStartupDelaySamples = 30; -static constexpr int64_t kFsAccuStartupSamples = 5; -static constexpr double kMaxFramerateEstimate = 200.0; -static constexpr int64_t kNackCountTimeoutMs = 60000; -static constexpr double kDefaultMaxTimestampDeviationInSigmas = 3.5; -} // namespace - -VCMJitterEstimator::VCMJitterEstimator(Clock* clock) - : _phi(0.97), - _psi(0.9999), - _alphaCountMax(400), - _thetaLow(0.000001), - _nackLimit(3), - _numStdDevDelayOutlier(15), - _numStdDevFrameSizeOutlier(3), - _noiseStdDevs(2.33), // ~Less than 1% chance - // (look up in normal distribution table)... - _noiseStdDevOffset(30.0), // ...of getting 30 ms freezes - _rttFilter(), - fps_counter_(30), // TODO(sprang): Use an estimator with limit based on - // time, rather than number of samples. - time_deviation_upper_bound_( - JitterUpperBoundExperiment::GetUpperBoundSigmas().value_or( - kDefaultMaxTimestampDeviationInSigmas)), - enable_reduced_delay_( - !field_trial::IsEnabled("WebRTC-ReducedJitterDelayKillSwitch")), - clock_(clock) { - Reset(); -} - -VCMJitterEstimator::~VCMJitterEstimator() {} - -VCMJitterEstimator& VCMJitterEstimator::operator=( - const VCMJitterEstimator& rhs) { - if (this != &rhs) { - memcpy(_thetaCov, rhs._thetaCov, sizeof(_thetaCov)); - memcpy(_Qcov, rhs._Qcov, sizeof(_Qcov)); - - _avgFrameSize = rhs._avgFrameSize; - _varFrameSize = rhs._varFrameSize; - _maxFrameSize = rhs._maxFrameSize; - _fsSum = rhs._fsSum; - _fsCount = rhs._fsCount; - _lastUpdateT = rhs._lastUpdateT; - _prevEstimate = rhs._prevEstimate; - _prevFrameSize = rhs._prevFrameSize; - _avgNoise = rhs._avgNoise; - _alphaCount = rhs._alphaCount; - _filterJitterEstimate = rhs._filterJitterEstimate; - _startupCount = rhs._startupCount; - _latestNackTimestamp = rhs._latestNackTimestamp; - _nackCount = rhs._nackCount; - _rttFilter = rhs._rttFilter; - clock_ = rhs.clock_; - } - return *this; -} - -// Resets the JitterEstimate. -void VCMJitterEstimator::Reset() { - _theta[0] = 1 / (512e3 / 8); - _theta[1] = 0; - _varNoise = 4.0; - - _thetaCov[0][0] = 1e-4; - _thetaCov[1][1] = 1e2; - _thetaCov[0][1] = _thetaCov[1][0] = 0; - _Qcov[0][0] = 2.5e-10; - _Qcov[1][1] = 1e-10; - _Qcov[0][1] = _Qcov[1][0] = 0; - _avgFrameSize = 500; - _maxFrameSize = 500; - _varFrameSize = 100; - _lastUpdateT = -1; - _prevEstimate = -1.0; - _prevFrameSize = 0; - _avgNoise = 0.0; - _alphaCount = 1; - _filterJitterEstimate = 0.0; - _latestNackTimestamp = 0; - _nackCount = 0; - _latestNackTimestamp = 0; - _fsSum = 0; - _fsCount = 0; - _startupCount = 0; - _rttFilter.Reset(); - fps_counter_.Reset(); -} - -// Updates the estimates with the new measurements. -void VCMJitterEstimator::UpdateEstimate(int64_t frameDelayMS, - uint32_t frameSizeBytes, - bool incompleteFrame /* = false */) { - if (frameSizeBytes == 0) { - return; - } - int deltaFS = frameSizeBytes - _prevFrameSize; - if (_fsCount < kFsAccuStartupSamples) { - _fsSum += frameSizeBytes; - _fsCount++; - } else if (_fsCount == kFsAccuStartupSamples) { - // Give the frame size filter. - _avgFrameSize = static_cast(_fsSum) / static_cast(_fsCount); - _fsCount++; - } - if (!incompleteFrame || frameSizeBytes > _avgFrameSize) { - double avgFrameSize = _phi * _avgFrameSize + (1 - _phi) * frameSizeBytes; - if (frameSizeBytes < _avgFrameSize + 2 * sqrt(_varFrameSize)) { - // Only update the average frame size if this sample wasn't a key frame. - _avgFrameSize = avgFrameSize; - } - // Update the variance anyway since we want to capture cases where we only - // get key frames. - _varFrameSize = VCM_MAX( - _phi * _varFrameSize + (1 - _phi) * (frameSizeBytes - avgFrameSize) * - (frameSizeBytes - avgFrameSize), - 1.0); - } - - // Update max frameSize estimate. - _maxFrameSize = - VCM_MAX(_psi * _maxFrameSize, static_cast(frameSizeBytes)); - - if (_prevFrameSize == 0) { - _prevFrameSize = frameSizeBytes; - return; - } - _prevFrameSize = frameSizeBytes; - - // Cap frameDelayMS based on the current time deviation noise. - int64_t max_time_deviation_ms = - static_cast(time_deviation_upper_bound_ * sqrt(_varNoise) + 0.5); - frameDelayMS = std::max(std::min(frameDelayMS, max_time_deviation_ms), - -max_time_deviation_ms); - - // Only update the Kalman filter if the sample is not considered an extreme - // outlier. Even if it is an extreme outlier from a delay point of view, if - // the frame size also is large the deviation is probably due to an incorrect - // line slope. - double deviation = DeviationFromExpectedDelay(frameDelayMS, deltaFS); - - if (fabs(deviation) < _numStdDevDelayOutlier * sqrt(_varNoise) || - frameSizeBytes > - _avgFrameSize + _numStdDevFrameSizeOutlier * sqrt(_varFrameSize)) { - // Update the variance of the deviation from the line given by the Kalman - // filter. - EstimateRandomJitter(deviation, incompleteFrame); - // Prevent updating with frames which have been congested by a large frame, - // and therefore arrives almost at the same time as that frame. - // This can occur when we receive a large frame (key frame) which has been - // delayed. The next frame is of normal size (delta frame), and thus deltaFS - // will be << 0. This removes all frame samples which arrives after a key - // frame. - if ((!incompleteFrame || deviation >= 0.0) && - static_cast(deltaFS) > -0.25 * _maxFrameSize) { - // Update the Kalman filter with the new data - KalmanEstimateChannel(frameDelayMS, deltaFS); - } - } else { - int nStdDev = - (deviation >= 0) ? _numStdDevDelayOutlier : -_numStdDevDelayOutlier; - EstimateRandomJitter(nStdDev * sqrt(_varNoise), incompleteFrame); - } - // Post process the total estimated jitter - if (_startupCount >= kStartupDelaySamples) { - PostProcessEstimate(); - } else { - _startupCount++; - } -} - -// Updates the nack/packet ratio. -void VCMJitterEstimator::FrameNacked() { - if (_nackCount < _nackLimit) { - _nackCount++; - } - _latestNackTimestamp = clock_->TimeInMicroseconds(); -} - -// Updates Kalman estimate of the channel. -// The caller is expected to sanity check the inputs. -void VCMJitterEstimator::KalmanEstimateChannel(int64_t frameDelayMS, - int32_t deltaFSBytes) { - double Mh[2]; - double hMh_sigma; - double kalmanGain[2]; - double measureRes; - double t00, t01; - - // Kalman filtering - - // Prediction - // M = M + Q - _thetaCov[0][0] += _Qcov[0][0]; - _thetaCov[0][1] += _Qcov[0][1]; - _thetaCov[1][0] += _Qcov[1][0]; - _thetaCov[1][1] += _Qcov[1][1]; - - // Kalman gain - // K = M*h'/(sigma2n + h*M*h') = M*h'/(1 + h*M*h') - // h = [dFS 1] - // Mh = M*h' - // hMh_sigma = h*M*h' + R - Mh[0] = _thetaCov[0][0] * deltaFSBytes + _thetaCov[0][1]; - Mh[1] = _thetaCov[1][0] * deltaFSBytes + _thetaCov[1][1]; - // sigma weights measurements with a small deltaFS as noisy and - // measurements with large deltaFS as good - if (_maxFrameSize < 1.0) { - return; - } - double sigma = (300.0 * exp(-fabs(static_cast(deltaFSBytes)) / - (1e0 * _maxFrameSize)) + - 1) * - sqrt(_varNoise); - if (sigma < 1.0) { - sigma = 1.0; - } - hMh_sigma = deltaFSBytes * Mh[0] + Mh[1] + sigma; - if ((hMh_sigma < 1e-9 && hMh_sigma >= 0) || - (hMh_sigma > -1e-9 && hMh_sigma <= 0)) { - RTC_DCHECK_NOTREACHED(); - return; - } - kalmanGain[0] = Mh[0] / hMh_sigma; - kalmanGain[1] = Mh[1] / hMh_sigma; - - // Correction - // theta = theta + K*(dT - h*theta) - measureRes = frameDelayMS - (deltaFSBytes * _theta[0] + _theta[1]); - _theta[0] += kalmanGain[0] * measureRes; - _theta[1] += kalmanGain[1] * measureRes; - - if (_theta[0] < _thetaLow) { - _theta[0] = _thetaLow; - } - - // M = (I - K*h)*M - t00 = _thetaCov[0][0]; - t01 = _thetaCov[0][1]; - _thetaCov[0][0] = (1 - kalmanGain[0] * deltaFSBytes) * t00 - - kalmanGain[0] * _thetaCov[1][0]; - _thetaCov[0][1] = (1 - kalmanGain[0] * deltaFSBytes) * t01 - - kalmanGain[0] * _thetaCov[1][1]; - _thetaCov[1][0] = _thetaCov[1][0] * (1 - kalmanGain[1]) - - kalmanGain[1] * deltaFSBytes * t00; - _thetaCov[1][1] = _thetaCov[1][1] * (1 - kalmanGain[1]) - - kalmanGain[1] * deltaFSBytes * t01; - - // Covariance matrix, must be positive semi-definite. - RTC_DCHECK(_thetaCov[0][0] + _thetaCov[1][1] >= 0 && - _thetaCov[0][0] * _thetaCov[1][1] - - _thetaCov[0][1] * _thetaCov[1][0] >= - 0 && - _thetaCov[0][0] >= 0); -} - -// Calculate difference in delay between a sample and the expected delay -// estimated by the Kalman filter -double VCMJitterEstimator::DeviationFromExpectedDelay( - int64_t frameDelayMS, - int32_t deltaFSBytes) const { - return frameDelayMS - (_theta[0] * deltaFSBytes + _theta[1]); -} - -// Estimates the random jitter by calculating the variance of the sample -// distance from the line given by theta. -void VCMJitterEstimator::EstimateRandomJitter(double d_dT, - bool incompleteFrame) { - uint64_t now = clock_->TimeInMicroseconds(); - if (_lastUpdateT != -1) { - fps_counter_.AddSample(now - _lastUpdateT); - } - _lastUpdateT = now; - - if (_alphaCount == 0) { - RTC_DCHECK_NOTREACHED(); - return; - } - double alpha = - static_cast(_alphaCount - 1) / static_cast(_alphaCount); - _alphaCount++; - if (_alphaCount > _alphaCountMax) - _alphaCount = _alphaCountMax; - - // In order to avoid a low frame rate stream to react slower to changes, - // scale the alpha weight relative a 30 fps stream. - double fps = GetFrameRate(); - if (fps > 0.0) { - double rate_scale = 30.0 / fps; - // At startup, there can be a lot of noise in the fps estimate. - // Interpolate rate_scale linearly, from 1.0 at sample #1, to 30.0 / fps - // at sample #kStartupDelaySamples. - if (_alphaCount < kStartupDelaySamples) { - rate_scale = - (_alphaCount * rate_scale + (kStartupDelaySamples - _alphaCount)) / - kStartupDelaySamples; - } - alpha = pow(alpha, rate_scale); - } - - double avgNoise = alpha * _avgNoise + (1 - alpha) * d_dT; - double varNoise = - alpha * _varNoise + (1 - alpha) * (d_dT - _avgNoise) * (d_dT - _avgNoise); - if (!incompleteFrame || varNoise > _varNoise) { - _avgNoise = avgNoise; - _varNoise = varNoise; - } - if (_varNoise < 1.0) { - // The variance should never be zero, since we might get stuck and consider - // all samples as outliers. - _varNoise = 1.0; - } -} - -double VCMJitterEstimator::NoiseThreshold() const { - double noiseThreshold = _noiseStdDevs * sqrt(_varNoise) - _noiseStdDevOffset; - if (noiseThreshold < 1.0) { - noiseThreshold = 1.0; - } - return noiseThreshold; -} - -// Calculates the current jitter estimate from the filtered estimates. -double VCMJitterEstimator::CalculateEstimate() { - double ret = _theta[0] * (_maxFrameSize - _avgFrameSize) + NoiseThreshold(); - - // A very low estimate (or negative) is neglected. - if (ret < 1.0) { - if (_prevEstimate <= 0.01) { - ret = 1.0; - } else { - ret = _prevEstimate; - } - } - if (ret > 10000.0) { // Sanity - ret = 10000.0; - } - _prevEstimate = ret; - return ret; -} - -void VCMJitterEstimator::PostProcessEstimate() { - _filterJitterEstimate = CalculateEstimate(); -} - -void VCMJitterEstimator::UpdateRtt(int64_t rttMs) { - _rttFilter.Update(rttMs); -} - -// Returns the current filtered estimate if available, -// otherwise tries to calculate an estimate. -int VCMJitterEstimator::GetJitterEstimate( - double rttMultiplier, - absl::optional rttMultAddCapMs) { - double jitterMS = CalculateEstimate() + OPERATING_SYSTEM_JITTER; - uint64_t now = clock_->TimeInMicroseconds(); - - if (now - _latestNackTimestamp > kNackCountTimeoutMs * 1000) - _nackCount = 0; - - if (_filterJitterEstimate > jitterMS) - jitterMS = _filterJitterEstimate; - if (_nackCount >= _nackLimit) { - if (rttMultAddCapMs.has_value()) { - jitterMS += - std::min(_rttFilter.RttMs() * rttMultiplier, rttMultAddCapMs.value()); - } else { - jitterMS += _rttFilter.RttMs() * rttMultiplier; - } - } - - if (enable_reduced_delay_) { - static const double kJitterScaleLowThreshold = 5.0; - static const double kJitterScaleHighThreshold = 10.0; - double fps = GetFrameRate(); - // Ignore jitter for very low fps streams. - if (fps < kJitterScaleLowThreshold) { - if (fps == 0.0) { - return rtc::checked_cast(std::max(0.0, jitterMS) + 0.5); - } - return 0; - } - - // Semi-low frame rate; scale by factor linearly interpolated from 0.0 at - // kJitterScaleLowThreshold to 1.0 at kJitterScaleHighThreshold. - if (fps < kJitterScaleHighThreshold) { - jitterMS = - (1.0 / (kJitterScaleHighThreshold - kJitterScaleLowThreshold)) * - (fps - kJitterScaleLowThreshold) * jitterMS; - } - } - - return rtc::checked_cast(std::max(0.0, jitterMS) + 0.5); -} - -double VCMJitterEstimator::GetFrameRate() const { - if (fps_counter_.ComputeMean() <= 0.0) - return 0; - - double fps = 1000000.0 / fps_counter_.ComputeMean(); - // Sanity check. - RTC_DCHECK_GE(fps, 0.0); - if (fps > kMaxFramerateEstimate) { - fps = kMaxFramerateEstimate; - } - return fps; -} -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/jitter_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/jitter_estimator.h deleted file mode 100644 index 1d69b95769..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/jitter_estimator.h +++ /dev/null @@ -1,159 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_VIDEO_CODING_JITTER_ESTIMATOR_H_ -#define MODULES_VIDEO_CODING_JITTER_ESTIMATOR_H_ - -#include "modules/video_coding/rtt_filter.h" -#include "rtc_base/rolling_accumulator.h" - -namespace webrtc { - -class Clock; - -class VCMJitterEstimator { - public: - explicit VCMJitterEstimator(Clock* clock); - virtual ~VCMJitterEstimator(); - VCMJitterEstimator& operator=(const VCMJitterEstimator& rhs); - - // Resets the estimate to the initial state. - void Reset(); - - // Updates the jitter estimate with the new data. - // - // Input: - // - frameDelay : Delay-delta calculated by UTILDelayEstimate in - // milliseconds. - // - frameSize : Frame size of the current frame. - // - incompleteFrame : Flags if the frame is used to update the - // estimate before it was complete. - // Default is false. - void UpdateEstimate(int64_t frameDelayMS, - uint32_t frameSizeBytes, - bool incompleteFrame = false); - - // Returns the current jitter estimate in milliseconds and adds an RTT - // dependent term in cases of retransmission. - // Input: - // - rttMultiplier : RTT param multiplier (when applicable). - // - // Return value : Jitter estimate in milliseconds. - virtual int GetJitterEstimate(double rttMultiplier, - absl::optional rttMultAddCapMs); - - // Updates the nack counter. - void FrameNacked(); - - // Updates the RTT filter. - // - // Input: - // - rttMs : RTT in ms. - void UpdateRtt(int64_t rttMs); - - // A constant describing the delay from the jitter buffer to the delay on the - // receiving side which is not accounted for by the jitter buffer nor the - // decoding delay estimate. - static const uint32_t OPERATING_SYSTEM_JITTER = 10; - - protected: - // These are protected for better testing possibilities. - double _theta[2]; // Estimated line parameters (slope, offset) - double _varNoise; // Variance of the time-deviation from the line - - private: - // Updates the Kalman filter for the line describing the frame size dependent - // jitter. - // - // Input: - // - frameDelayMS : Delay-delta calculated by UTILDelayEstimate in - // milliseconds. - // - deltaFSBytes : Frame size delta, i.e. frame size at time T - // : minus frame size at time T-1. - void KalmanEstimateChannel(int64_t frameDelayMS, int32_t deltaFSBytes); - - // Updates the random jitter estimate, i.e. the variance of the time - // deviations from the line given by the Kalman filter. - // - // Input: - // - d_dT : The deviation from the kalman estimate. - // - incompleteFrame : True if the frame used to update the - // estimate with was incomplete. - void EstimateRandomJitter(double d_dT, bool incompleteFrame); - - double NoiseThreshold() const; - - // Calculates the current jitter estimate. - // - // Return value : The current jitter estimate in milliseconds. - double CalculateEstimate(); - - // Post process the calculated estimate. - void PostProcessEstimate(); - - // Calculates the difference in delay between a sample and the expected delay - // estimated by the Kalman filter. - // - // Input: - // - frameDelayMS : Delay-delta calculated by UTILDelayEstimate in - // milliseconds. - // - deltaFS : Frame size delta, i.e. frame size at time - // T minus frame size at time T-1. - // - // Return value : The difference in milliseconds. - double DeviationFromExpectedDelay(int64_t frameDelayMS, - int32_t deltaFSBytes) const; - - double GetFrameRate() const; - - // Constants, filter parameters. - const double _phi; - const double _psi; - const uint32_t _alphaCountMax; - const double _thetaLow; - const uint32_t _nackLimit; - const int32_t _numStdDevDelayOutlier; - const int32_t _numStdDevFrameSizeOutlier; - const double _noiseStdDevs; - const double _noiseStdDevOffset; - - double _thetaCov[2][2]; // Estimate covariance - double _Qcov[2][2]; // Process noise covariance - double _avgFrameSize; // Average frame size - double _varFrameSize; // Frame size variance - double _maxFrameSize; // Largest frame size received (descending - // with a factor _psi) - uint32_t _fsSum; - uint32_t _fsCount; - - int64_t _lastUpdateT; - double _prevEstimate; // The previously returned jitter estimate - uint32_t _prevFrameSize; // Frame size of the previous frame - double _avgNoise; // Average of the random jitter - uint32_t _alphaCount; - double _filterJitterEstimate; // The filtered sum of jitter estimates - - uint32_t _startupCount; - - int64_t - _latestNackTimestamp; // Timestamp in ms when the latest nack was seen - uint32_t _nackCount; // Keeps track of the number of nacks received, - // but never goes above _nackLimit - VCMRttFilter _rttFilter; - - rtc::RollingAccumulator fps_counter_; - const double time_deviation_upper_bound_; - const bool enable_reduced_delay_; - Clock* clock_; -}; - -} // namespace webrtc - -#endif // MODULES_VIDEO_CODING_JITTER_ESTIMATOR_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/jitter_estimator_tests.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/jitter_estimator_tests.cc deleted file mode 100644 index 14baae7e81..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/jitter_estimator_tests.cc +++ /dev/null @@ -1,163 +0,0 @@ -/* Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include - -#include -#include - -#include "absl/types/optional.h" -#include "api/array_view.h" -#include "modules/video_coding/jitter_estimator.h" -#include "rtc_base/experiments/jitter_upper_bound_experiment.h" -#include "rtc_base/numerics/histogram_percentile_counter.h" -#include "rtc_base/strings/string_builder.h" -#include "rtc_base/time_utils.h" -#include "system_wrappers/include/clock.h" -#include "test/field_trial.h" -#include "test/gtest.h" - -namespace webrtc { - -class TestVCMJitterEstimator : public ::testing::Test { - protected: - TestVCMJitterEstimator() : fake_clock_(0) {} - - virtual void SetUp() { - estimator_ = std::make_unique(&fake_clock_); - } - - void AdvanceClock(int64_t microseconds) { - fake_clock_.AdvanceTimeMicroseconds(microseconds); - } - - SimulatedClock fake_clock_; - std::unique_ptr estimator_; -}; - -// Generates some simple test data in the form of a sawtooth wave. -class ValueGenerator { - public: - explicit ValueGenerator(int32_t amplitude) - : amplitude_(amplitude), counter_(0) {} - virtual ~ValueGenerator() {} - - int64_t Delay() const { return ((counter_ % 11) - 5) * amplitude_; } - - uint32_t FrameSize() const { return 1000 + Delay(); } - - void Advance() { ++counter_; } - - private: - const int32_t amplitude_; - int64_t counter_; -}; - -// 5 fps, disable jitter delay altogether. -TEST_F(TestVCMJitterEstimator, TestLowRate) { - ValueGenerator gen(10); - uint64_t time_delta_us = rtc::kNumMicrosecsPerSec / 5; - for (int i = 0; i < 60; ++i) { - estimator_->UpdateEstimate(gen.Delay(), gen.FrameSize()); - AdvanceClock(time_delta_us); - if (i > 2) - EXPECT_EQ(estimator_->GetJitterEstimate(0, absl::nullopt), 0); - gen.Advance(); - } -} - -TEST_F(TestVCMJitterEstimator, TestLowRateDisabled) { - test::ScopedFieldTrials field_trials( - "WebRTC-ReducedJitterDelayKillSwitch/Enabled/"); - SetUp(); - - ValueGenerator gen(10); - uint64_t time_delta_us = rtc::kNumMicrosecsPerSec / 5; - for (int i = 0; i < 60; ++i) { - estimator_->UpdateEstimate(gen.Delay(), gen.FrameSize()); - AdvanceClock(time_delta_us); - if (i > 2) - EXPECT_GT(estimator_->GetJitterEstimate(0, absl::nullopt), 0); - gen.Advance(); - } -} - -TEST_F(TestVCMJitterEstimator, TestUpperBound) { - struct TestContext { - TestContext() - : upper_bound(0.0), - rtt_mult(0), - rtt_mult_add_cap_ms(absl::nullopt), - percentiles(1000) {} - double upper_bound; - double rtt_mult; - absl::optional rtt_mult_add_cap_ms; - rtc::HistogramPercentileCounter percentiles; - }; - std::vector test_cases(4); - - // Large upper bound, rtt_mult = 0, and nullopt for rtt_mult addition cap. - test_cases[0].upper_bound = 100.0; - test_cases[0].rtt_mult = 0; - test_cases[0].rtt_mult_add_cap_ms = absl::nullopt; - // Small upper bound, rtt_mult = 0, and nullopt for rtt_mult addition cap. - test_cases[1].upper_bound = 3.5; - test_cases[1].rtt_mult = 0; - test_cases[1].rtt_mult_add_cap_ms = absl::nullopt; - // Large upper bound, rtt_mult = 1, and large rtt_mult addition cap value. - test_cases[2].upper_bound = 1000.0; - test_cases[2].rtt_mult = 1.0; - test_cases[2].rtt_mult_add_cap_ms = 200.0; - // Large upper bound, rtt_mult = 1, and small rtt_mult addition cap value. - test_cases[3].upper_bound = 1000.0; - test_cases[3].rtt_mult = 1.0; - test_cases[3].rtt_mult_add_cap_ms = 10.0; - - // Test jitter buffer upper_bound and rtt_mult addition cap sizes. - for (TestContext& context : test_cases) { - // Set up field trial and reset jitter estimator. - char string_buf[64]; - rtc::SimpleStringBuilder ssb(string_buf); - ssb << JitterUpperBoundExperiment::kJitterUpperBoundExperimentName - << "/Enabled-" << context.upper_bound << "/"; - test::ScopedFieldTrials field_trials(ssb.str()); - SetUp(); - - ValueGenerator gen(50); - uint64_t time_delta_us = rtc::kNumMicrosecsPerSec / 30; - constexpr int64_t kRttMs = 250; - for (int i = 0; i < 100; ++i) { - estimator_->UpdateEstimate(gen.Delay(), gen.FrameSize()); - AdvanceClock(time_delta_us); - estimator_->FrameNacked(); // To test rtt_mult. - estimator_->UpdateRtt(kRttMs); // To test rtt_mult. - context.percentiles.Add( - static_cast(estimator_->GetJitterEstimate( - context.rtt_mult, context.rtt_mult_add_cap_ms))); - gen.Advance(); - } - } - - // Median should be similar after three seconds. Allow 5% error margin. - uint32_t median_unbound = *test_cases[0].percentiles.GetPercentile(0.5); - uint32_t median_bounded = *test_cases[1].percentiles.GetPercentile(0.5); - EXPECT_NEAR(median_unbound, median_bounded, (median_unbound * 5) / 100); - - // Max should be lower for the bounded case. - uint32_t max_unbound = *test_cases[0].percentiles.GetPercentile(1.0); - uint32_t max_bounded = *test_cases[1].percentiles.GetPercentile(1.0); - EXPECT_GT(max_unbound, static_cast(max_bounded * 1.25)); - - // With rtt_mult = 1, max should be lower with small rtt_mult add cap value. - max_unbound = *test_cases[2].percentiles.GetPercentile(1.0); - max_bounded = *test_cases[3].percentiles.GetPercentile(1.0); - EXPECT_GT(max_unbound, static_cast(max_bounded * 1.25)); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/media_opt_util.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/media_opt_util.h index 1d98ea6ca4..a74d1af6cb 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/media_opt_util.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/media_opt_util.h @@ -26,10 +26,10 @@ namespace media_optimization { // Number of time periods used for (max) window filter for packet loss // TODO(marpan): set reasonable window size for filtered packet loss, // adjustment should be based on logged/real data of loss stats/correlation. -enum { kLossPrHistorySize = 10 }; +constexpr int kLossPrHistorySize = 10; // 1000 ms, total filter length is (kLossPrHistorySize * 1000) ms -enum { kLossPrShortFilterWinMs = 1000 }; +constexpr int kLossPrShortFilterWinMs = 1000; // The type of filter used on the received packet loss reports. enum FilterPacketLossMode { @@ -41,11 +41,11 @@ enum FilterPacketLossMode { // Thresholds for hybrid NACK/FEC // common to media optimization and the jitter buffer. -const int64_t kLowRttNackMs = 20; +constexpr int64_t kLowRttNackMs = 20; // If the RTT is higher than this an extra RTT wont be added to to the jitter // buffer delay. -const int kMaxRttDelayThreshold = 500; +constexpr int kMaxRttDelayThreshold = 500; struct VCMProtectionParameters { VCMProtectionParameters(); @@ -175,15 +175,15 @@ class VCMFecMethod : public VCMProtectionMethod { int BitsPerFrame(const VCMProtectionParameters* parameters); protected: - enum { kUpperLimitFramesFec = 6 }; + static constexpr int kUpperLimitFramesFec = 6; // Thresholds values for the bytes/frame and round trip time, below which we // may turn off FEC, depending on `_numLayers` and `_maxFramesFec`. // Max bytes/frame for VGA, corresponds to ~140k at 25fps. - enum { kMaxBytesPerFrameForFec = 700 }; + static constexpr int kMaxBytesPerFrameForFec = 700; // Max bytes/frame for CIF and lower: corresponds to ~80k at 25fps. - enum { kMaxBytesPerFrameForFecLow = 400 }; + static constexpr int kMaxBytesPerFrameForFecLow = 400; // Max bytes/frame for frame size larger than VGA, ~200k at 25fps. - enum { kMaxBytesPerFrameForFecHigh = 1000 }; + static constexpr int kMaxBytesPerFrameForFecHigh = 1000; const RateControlSettings rate_control_settings_; }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/nack_requester.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/nack_requester.cc index dac85fc189..4e74032d01 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/nack_requester.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/nack_requester.cc @@ -19,29 +19,26 @@ #include "rtc_base/checks.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/logging.h" -#include "rtc_base/task_queue.h" -#include "system_wrappers/include/field_trial.h" namespace webrtc { namespace { -const int kMaxPacketAge = 10000; -const int kMaxNackPackets = 1000; -const int kDefaultRttMs = 100; -const int kMaxNackRetries = 10; -const int kMaxReorderedPackets = 128; -const int kNumReorderingBuckets = 10; -const int kDefaultSendNackDelayMs = 0; - -int64_t GetSendNackDelay() { +constexpr int kMaxPacketAge = 10'000; +constexpr int kMaxNackPackets = 1000; +constexpr TimeDelta kDefaultRtt = TimeDelta::Millis(100); +constexpr int kMaxNackRetries = 10; +constexpr int kMaxReorderedPackets = 128; +constexpr int kNumReorderingBuckets = 10; +constexpr TimeDelta kDefaultSendNackDelay = TimeDelta::Zero(); + +TimeDelta GetSendNackDelay(const FieldTrialsView& field_trials) { int64_t delay_ms = strtol( - webrtc::field_trial::FindFullName("WebRTC-SendNackDelayMs").c_str(), - nullptr, 10); + field_trials.Lookup("WebRTC-SendNackDelayMs").c_str(), nullptr, 10); if (delay_ms > 0 && delay_ms <= 20) { RTC_LOG(LS_INFO) << "SendNackDelay is set to " << delay_ms; - return delay_ms; + return TimeDelta::Millis(delay_ms); } - return kDefaultSendNackDelayMs; + return kDefaultSendNackDelay; } } // namespace @@ -74,8 +71,8 @@ void NackPeriodicProcessor::UnregisterNackModule(NackRequesterBase* module) { repeating_task_.Stop(); } -// RTC_RUN_ON(sequence_) void NackPeriodicProcessor::ProcessNackModules() { + RTC_DCHECK_RUN_ON(&sequence_); for (NackRequesterBase* module : modules_) module->ProcessNacks(); } @@ -93,63 +90,36 @@ ScopedNackPeriodicProcessorRegistration:: } NackRequester::NackInfo::NackInfo() - : seq_num(0), send_at_seq_num(0), sent_at_time(-1), retries(0) {} + : seq_num(0), + send_at_seq_num(0), + created_at_time(Timestamp::MinusInfinity()), + sent_at_time(Timestamp::MinusInfinity()), + retries(0) {} NackRequester::NackInfo::NackInfo(uint16_t seq_num, uint16_t send_at_seq_num, - int64_t created_at_time) + Timestamp created_at_time) : seq_num(seq_num), send_at_seq_num(send_at_seq_num), created_at_time(created_at_time), - sent_at_time(-1), + sent_at_time(Timestamp::MinusInfinity()), retries(0) {} -NackRequester::BackoffSettings::BackoffSettings(TimeDelta min_retry, - TimeDelta max_rtt, - double base) - : min_retry_interval(min_retry), max_rtt(max_rtt), base(base) {} - -absl::optional -NackRequester::BackoffSettings::ParseFromFieldTrials() { - // Matches magic number in RTPSender::OnReceivedNack(). - const TimeDelta kDefaultMinRetryInterval = TimeDelta::Millis(5); - // Upper bound on link-delay considered for exponential backoff. - // Selected so that cumulative delay with 1.25 base and 10 retries ends up - // below 3s, since above that there will be a FIR generated instead. - const TimeDelta kDefaultMaxRtt = TimeDelta::Millis(160); - // Default base for exponential backoff, adds 25% RTT delay for each retry. - const double kDefaultBase = 1.25; - - FieldTrialParameter enabled("enabled", false); - FieldTrialParameter min_retry("min_retry", - kDefaultMinRetryInterval); - FieldTrialParameter max_rtt("max_rtt", kDefaultMaxRtt); - FieldTrialParameter base("base", kDefaultBase); - ParseFieldTrial({&enabled, &min_retry, &max_rtt, &base}, - field_trial::FindFullName("WebRTC-ExponentialNackBackoff")); - - if (enabled) { - return NackRequester::BackoffSettings(min_retry.Get(), max_rtt.Get(), - base.Get()); - } - return absl::nullopt; -} - NackRequester::NackRequester(TaskQueueBase* current_queue, NackPeriodicProcessor* periodic_processor, Clock* clock, NackSender* nack_sender, - KeyFrameRequestSender* keyframe_request_sender) + KeyFrameRequestSender* keyframe_request_sender, + const FieldTrialsView& field_trials) : worker_thread_(current_queue), clock_(clock), nack_sender_(nack_sender), keyframe_request_sender_(keyframe_request_sender), reordering_histogram_(kNumReorderingBuckets, kMaxReorderedPackets), initialized_(false), - rtt_ms_(kDefaultRttMs), + rtt_(kDefaultRtt), newest_seq_num_(0), - send_nack_delay_ms_(GetSendNackDelay()), - backoff_settings_(BackoffSettings::ParseFromFieldTrials()), + send_nack_delay_(GetSendNackDelay(field_trials)), processor_registration_(this, periodic_processor) { RTC_DCHECK(clock_); RTC_DCHECK(nack_sender_); @@ -250,7 +220,7 @@ int NackRequester::OnReceivedPacket(uint16_t seq_num, void NackRequester::ClearUpTo(uint16_t seq_num) { // Called via RtpVideoStreamReceiver2::FrameContinuous on the network thread. - worker_thread_->PostTask(ToQueuedTask(task_safety_, [seq_num, this]() { + worker_thread_->PostTask(SafeTask(task_safety_.flag(), [seq_num, this]() { RTC_DCHECK_RUN_ON(worker_thread_); nack_list_.erase(nack_list_.begin(), nack_list_.lower_bound(seq_num)); keyframe_list_.erase(keyframe_list_.begin(), @@ -262,7 +232,7 @@ void NackRequester::ClearUpTo(uint16_t seq_num) { void NackRequester::UpdateRtt(int64_t rtt_ms) { RTC_DCHECK_RUN_ON(worker_thread_); - rtt_ms_ = rtt_ms; + rtt_ = TimeDelta::Millis(rtt_ms); } bool NackRequester::RemovePacketsUntilKeyFrame() { @@ -314,7 +284,7 @@ void NackRequester::AddPacketsToNack(uint16_t seq_num_start, if (recovered_list_.find(seq_num) != recovered_list_.end()) continue; NackInfo nack_info(seq_num, seq_num + WaitNumberOfPackets(0.5), - clock_->TimeInMilliseconds()); + clock_->CurrentTime()); RTC_DCHECK(nack_list_.find(seq_num) == nack_list_.end()); nack_list_[seq_num] = nack_info; } @@ -329,30 +299,16 @@ std::vector NackRequester::GetNackBatch(NackFilterOptions options) { std::vector nack_batch; auto it = nack_list_.begin(); while (it != nack_list_.end()) { - TimeDelta resend_delay = TimeDelta::Millis(rtt_ms_); - if (backoff_settings_) { - resend_delay = - std::max(resend_delay, backoff_settings_->min_retry_interval); - if (it->second.retries > 1) { - TimeDelta exponential_backoff = - std::min(TimeDelta::Millis(rtt_ms_), backoff_settings_->max_rtt) * - std::pow(backoff_settings_->base, it->second.retries - 1); - resend_delay = std::max(resend_delay, exponential_backoff); - } - } - - bool delay_timed_out = - now.ms() - it->second.created_at_time >= send_nack_delay_ms_; - bool nack_on_rtt_passed = - now.ms() - it->second.sent_at_time >= resend_delay.ms(); + bool delay_timed_out = now - it->second.created_at_time >= send_nack_delay_; + bool nack_on_rtt_passed = now - it->second.sent_at_time >= rtt_; bool nack_on_seq_num_passed = - it->second.sent_at_time == -1 && + it->second.sent_at_time.IsInfinite() && AheadOrAt(newest_seq_num_, it->second.send_at_seq_num); if (delay_timed_out && ((consider_seq_num && nack_on_seq_num_passed) || (consider_timestamp && nack_on_rtt_passed))) { nack_batch.emplace_back(it->second.seq_num); ++it->second.retries; - it->second.sent_at_time = now.ms(); + it->second.sent_at_time = now; if (it->second.retries >= kMaxNackRetries) { RTC_LOG(LS_WARNING) << "Sequence number " << it->second.seq_num << " removed from NACK list due to max retries."; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/nack_requester.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/nack_requester.h index 46d904b7a4..c860787dcf 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/nack_requester.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/nack_requester.h @@ -17,13 +17,15 @@ #include #include +#include "api/field_trials_view.h" #include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/task_queue/task_queue_base.h" #include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "modules/include/module_common_types.h" #include "modules/video_coding/histogram.h" #include "rtc_base/numerics/sequence_number_util.h" -#include "rtc_base/task_queue.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/task_utils/repeating_task.h" #include "rtc_base/thread_annotations.h" #include "system_wrappers/include/clock.h" @@ -70,7 +72,8 @@ class NackRequester final : public NackRequesterBase { NackPeriodicProcessor* periodic_processor, Clock* clock, NackSender* nack_sender, - KeyFrameRequestSender* keyframe_request_sender); + KeyFrameRequestSender* keyframe_request_sender, + const FieldTrialsView& field_trials); ~NackRequester(); void ProcessNacks() override; @@ -93,27 +96,15 @@ class NackRequester final : public NackRequesterBase { NackInfo(); NackInfo(uint16_t seq_num, uint16_t send_at_seq_num, - int64_t created_at_time); + Timestamp created_at_time); uint16_t seq_num; uint16_t send_at_seq_num; - int64_t created_at_time; - int64_t sent_at_time; + Timestamp created_at_time; + Timestamp sent_at_time; int retries; }; - struct BackoffSettings { - BackoffSettings(TimeDelta min_retry, TimeDelta max_rtt, double base); - static absl::optional ParseFromFieldTrials(); - - // Min time between nacks. - const TimeDelta min_retry_interval; - // Upper bound on link-delay considered for exponential backoff. - const TimeDelta max_rtt; - // Base for the exponential backoff. - const double base; - }; - void AddPacketsToNack(uint16_t seq_num_start, uint16_t seq_num_end) RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_); @@ -149,13 +140,11 @@ class NackRequester final : public NackRequesterBase { RTC_GUARDED_BY(worker_thread_); video_coding::Histogram reordering_histogram_ RTC_GUARDED_BY(worker_thread_); bool initialized_ RTC_GUARDED_BY(worker_thread_); - int64_t rtt_ms_ RTC_GUARDED_BY(worker_thread_); + TimeDelta rtt_ RTC_GUARDED_BY(worker_thread_); uint16_t newest_seq_num_ RTC_GUARDED_BY(worker_thread_); // Adds a delay before send nack on packet received. - const int64_t send_nack_delay_ms_; - - const absl::optional backoff_settings_; + const TimeDelta send_nack_delay_; ScopedNackPeriodicProcessorRegistration processor_registration_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/packet.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/packet.cc index 9b843d3e22..08d7a8c36a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/packet.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/packet.cc @@ -63,7 +63,6 @@ VCMPacket::VCMPacket(const uint8_t* ptr, completeNALU = kNaluIncomplete; } - // TODO(nisse): Delete? // Playout decisions are made entirely based on first packet in a frame. if (!is_first_packet_in_frame()) { video_header.playout_delay = {-1, -1}; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/packet_buffer.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/packet_buffer.cc index 6ade2e652b..58c877ceaa 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/packet_buffer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/packet_buffer.cc @@ -113,6 +113,10 @@ PacketBuffer::InsertResult PacketBuffer::InsertPacket( UpdateMissingPackets(seq_num); + received_padding_.erase( + received_padding_.begin(), + received_padding_.lower_bound(seq_num - (buffer_.size() / 4))); + result.packets = FindFrames(seq_num); return result; } @@ -146,11 +150,11 @@ void PacketBuffer::ClearTo(uint16_t seq_num) { first_seq_num_ = seq_num; is_cleared_to_first_seq_num_ = true; - auto clear_to_it = missing_packets_.upper_bound(seq_num); - if (clear_to_it != missing_packets_.begin()) { - --clear_to_it; - missing_packets_.erase(missing_packets_.begin(), clear_to_it); - } + missing_packets_.erase(missing_packets_.begin(), + missing_packets_.lower_bound(seq_num)); + + received_padding_.erase(received_padding_.begin(), + received_padding_.lower_bound(seq_num)); } void PacketBuffer::Clear() { @@ -160,6 +164,7 @@ void PacketBuffer::Clear() { PacketBuffer::InsertResult PacketBuffer::InsertPadding(uint16_t seq_num) { PacketBuffer::InsertResult result; UpdateMissingPackets(seq_num); + received_padding_.insert(seq_num); result.packets = FindFrames(static_cast(seq_num + 1)); return result; } @@ -168,6 +173,10 @@ void PacketBuffer::ForceSpsPpsIdrIsH264Keyframe() { sps_pps_idr_is_h264_keyframe_ = true; } +void PacketBuffer::ResetSpsPpsIdrIsH264Keyframe() { + sps_pps_idr_is_h264_keyframe_ = false; +} + void PacketBuffer::ClearInternal() { for (auto& entry : buffer_) { entry = nullptr; @@ -177,6 +186,7 @@ void PacketBuffer::ClearInternal() { is_cleared_to_first_seq_num_ = false; newest_inserted_seq_num_.reset(); missing_packets_.clear(); + received_padding_.clear(); } bool PacketBuffer::ExpandBufferSize() { @@ -225,7 +235,18 @@ bool PacketBuffer::PotentialNewFrame(uint16_t seq_num) const { std::vector> PacketBuffer::FindFrames( uint16_t seq_num) { std::vector> found_frames; - for (size_t i = 0; i < buffer_.size() && PotentialNewFrame(seq_num); ++i) { + auto start = seq_num; + + for (size_t i = 0; i < buffer_.size(); ++i) { + if (received_padding_.find(seq_num) != received_padding_.end()) { + seq_num += 1; + continue; + } + + if (!PotentialNewFrame(seq_num)) { + break; + } + size_t index = seq_num % buffer_.size(); buffer_[index]->continuous = true; @@ -246,7 +267,7 @@ std::vector> PacketBuffer::FindFrames( bool has_h264_pps = false; bool has_h264_idr = false; bool is_h264_keyframe = false; - + bool is_h265 = false; #ifndef DISABLE_H265 is_h265 = buffer_[start_index]->codec() == kVideoCodecH265; @@ -258,11 +279,17 @@ std::vector> PacketBuffer::FindFrames( int idr_width = -1; int idr_height = -1; + bool full_frame_found = false; while (true) { ++tested_packets; - if (!is_h264 && !is_h265 && buffer_[start_index]->is_first_packet_in_frame()) - break; + if (!is_h264 && !is_h265) { + if (buffer_[start_index] == nullptr || + buffer_[start_index]->is_first_packet_in_frame()) { + full_frame_found = buffer_[start_index] != nullptr; + break; + } + } if (is_h264) { const auto* h264_header = absl::get_if( @@ -416,22 +443,26 @@ std::vector> PacketBuffer::FindFrames( } } #endif - const uint16_t end_seq_num = seq_num + 1; - // Use uint16_t type to handle sequence number wrap around case. - uint16_t num_packets = end_seq_num - start_seq_num; - found_frames.reserve(found_frames.size() + num_packets); - for (uint16_t i = start_seq_num; i != end_seq_num; ++i) { - std::unique_ptr& packet = buffer_[i % buffer_.size()]; - RTC_DCHECK(packet); - RTC_DCHECK_EQ(i, packet->seq_num); - // Ensure frame boundary flags are properly set. - packet->video_header.is_first_packet_in_frame = (i == start_seq_num); - packet->video_header.is_last_packet_in_frame = (i == seq_num); - found_frames.push_back(std::move(packet)); - } + if (is_h264 || is_h265 || full_frame_found) { + const uint16_t end_seq_num = seq_num + 1; + // Use uint16_t type to handle sequence number wrap around case. + uint16_t num_packets = end_seq_num - start_seq_num; + found_frames.reserve(found_frames.size() + num_packets); + for (uint16_t i = start_seq_num; i != end_seq_num; ++i) { + std::unique_ptr& packet = buffer_[i % buffer_.size()]; + RTC_DCHECK(packet); + RTC_DCHECK_EQ(i, packet->seq_num); + // Ensure frame boundary flags are properly set. + packet->video_header.is_first_packet_in_frame = (i == start_seq_num); + packet->video_header.is_last_packet_in_frame = (i == seq_num); + found_frames.push_back(std::move(packet)); + } - missing_packets_.erase(missing_packets_.begin(), - missing_packets_.upper_bound(seq_num)); + missing_packets_.erase(missing_packets_.begin(), + missing_packets_.upper_bound(seq_num)); + received_padding_.erase(received_padding_.lower_bound(start), + received_padding_.upper_bound(seq_num)); + } } ++seq_num; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/packet_buffer.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/packet_buffer.h index 51528a6cbb..53e08c95a1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/packet_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/packet_buffer.h @@ -82,6 +82,7 @@ class PacketBuffer { void Clear(); void ForceSpsPpsIdrIsH264Keyframe(); + void ResetSpsPpsIdrIsH264Keyframe(); private: void ClearInternal(); @@ -117,6 +118,8 @@ class PacketBuffer { absl::optional newest_inserted_seq_num_; std::set> missing_packets_; + std::set> received_padding_; + // Indicates if we should require SPS, PPS, and IDR for a particular // RTP timestamp to treat the corresponding frame as a keyframe. bool sps_pps_idr_is_h264_keyframe_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/receiver.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/receiver.cc index e156a1c28d..3f954ec9bf 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/receiver.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/receiver.cc @@ -30,18 +30,22 @@ namespace webrtc { enum { kMaxReceiverDelayMs = 10000 }; -VCMReceiver::VCMReceiver(VCMTiming* timing, Clock* clock) +VCMReceiver::VCMReceiver(VCMTiming* timing, + Clock* clock, + const FieldTrialsView& field_trials) : VCMReceiver::VCMReceiver(timing, clock, absl::WrapUnique(EventWrapper::Create()), - absl::WrapUnique(EventWrapper::Create())) {} + absl::WrapUnique(EventWrapper::Create()), + field_trials) {} VCMReceiver::VCMReceiver(VCMTiming* timing, Clock* clock, std::unique_ptr receiver_event, - std::unique_ptr jitter_buffer_event) + std::unique_ptr jitter_buffer_event, + const FieldTrialsView& field_trials) : clock_(clock), - jitter_buffer_(clock_, std::move(jitter_buffer_event)), + jitter_buffer_(clock_, std::move(jitter_buffer_event), field_trials), timing_(timing), render_wait_event_(std::move(receiver_event)), max_video_delay_ms_(kMaxVideoDelayMs) { @@ -69,7 +73,7 @@ int32_t VCMReceiver::InsertPacket(const VCMPacket& packet) { // We don't want to include timestamps which have suffered from // retransmission here, since we compensate with extra retransmission // delay within the jitter estimate. - timing_->IncomingTimestamp(packet.timestamp, clock_->TimeInMilliseconds()); + timing_->IncomingTimestamp(packet.timestamp, clock_->CurrentTime()); } return VCM_OK; } @@ -94,16 +98,18 @@ VCMEncodedFrame* VCMReceiver::FrameForDecoding(uint16_t max_wait_time_ms, } if (min_playout_delay_ms >= 0) - timing_->set_min_playout_delay(min_playout_delay_ms); + timing_->set_min_playout_delay(TimeDelta::Millis(min_playout_delay_ms)); if (max_playout_delay_ms >= 0) - timing_->set_max_playout_delay(max_playout_delay_ms); + timing_->set_max_playout_delay(TimeDelta::Millis(max_playout_delay_ms)); // We have a frame - Set timing and render timestamp. - timing_->SetJitterDelay(jitter_buffer_.EstimatedJitterMs()); - const int64_t now_ms = clock_->TimeInMilliseconds(); + timing_->SetJitterDelay( + TimeDelta::Millis(jitter_buffer_.EstimatedJitterMs())); + const Timestamp now = clock_->CurrentTime(); + const int64_t now_ms = now.ms(); timing_->UpdateCurrentDelay(frame_timestamp); - render_time_ms = timing_->RenderTimeMs(frame_timestamp, now_ms); + render_time_ms = timing_->RenderTime(frame_timestamp, now).ms(); // Check render timing. bool timing_error = false; // Assume that render timing errors are due to changes in the video stream. @@ -117,7 +123,7 @@ VCMEncodedFrame* VCMReceiver::FrameForDecoding(uint16_t max_wait_time_ms, << frame_delay << " > " << max_video_delay_ms_ << "). Resetting the video jitter buffer."; timing_error = true; - } else if (static_cast(timing_->TargetVideoDelay()) > + } else if (static_cast(timing_->TargetVideoDelay().ms()) > max_video_delay_ms_) { RTC_LOG(LS_WARNING) << "The video target delay has grown larger than " << max_video_delay_ms_ @@ -140,8 +146,11 @@ VCMEncodedFrame* VCMReceiver::FrameForDecoding(uint16_t max_wait_time_ms, uint16_t new_max_wait_time = static_cast(VCM_MAX(available_wait_time, 0)); uint32_t wait_time_ms = rtc::saturated_cast( - timing_->MaxWaitingTime(render_time_ms, clock_->TimeInMilliseconds(), - /*too_many_frames_queued=*/false)); + timing_ + ->MaxWaitingTime(Timestamp::Millis(render_time_ms), + clock_->CurrentTime(), + /*too_many_frames_queued=*/false) + .ms()); if (new_max_wait_time < wait_time_ms) { // We're not allowed to wait until the frame is supposed to be rendered, // waiting as long as we're allowed to avoid busy looping, and then return diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/receiver.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/receiver.h index 8f6b041a5a..069f8c55c7 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/receiver.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/receiver.h @@ -14,12 +14,13 @@ #include #include +#include "api/field_trials_view.h" #include "modules/video_coding/event_wrapper.h" #include "modules/video_coding/include/video_coding.h" #include "modules/video_coding/include/video_coding_defines.h" #include "modules/video_coding/jitter_buffer.h" #include "modules/video_coding/packet.h" -#include "modules/video_coding/timing.h" +#include "modules/video_coding/timing/timing.h" namespace webrtc { @@ -28,7 +29,9 @@ class VCMEncodedFrame; class VCMReceiver { public: - VCMReceiver(VCMTiming* timing, Clock* clock); + VCMReceiver(VCMTiming* timing, + Clock* clock, + const FieldTrialsView& field_trials); // Using this constructor, you can specify a different event implemetation for // the jitter buffer. Useful for unit tests when you want to simulate incoming @@ -37,7 +40,8 @@ class VCMReceiver { VCMReceiver(VCMTiming* timing, Clock* clock, std::unique_ptr receiver_event, - std::unique_ptr jitter_buffer_event); + std::unique_ptr jitter_buffer_event, + const FieldTrialsView& field_trials); ~VCMReceiver(); diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_generic_ref_finder.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_generic_ref_finder.cc index 87fff9c26f..fd5b8afda1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_generic_ref_finder.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_generic_ref_finder.cc @@ -23,6 +23,8 @@ RtpFrameReferenceFinder::ReturnVector RtpGenericFrameRefFinder::ManageFrame( // them here. frame->SetId(descriptor.frame_id); frame->SetSpatialIndex(descriptor.spatial_index); + if (descriptor.temporal_index != kNoTemporalIdx) + frame->SetTemporalIndex(descriptor.temporal_index); RtpFrameReferenceFinder::ReturnVector res; if (EncodedFrame::kMaxFrameReferences < descriptor.dependencies.size()) { diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_seq_num_only_ref_finder.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_seq_num_only_ref_finder.cc index 4381cf0952..59b027e2ce 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_seq_num_only_ref_finder.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_seq_num_only_ref_finder.cc @@ -116,7 +116,7 @@ void RtpSeqNumOnlyRefFinder::RetryStashedFrames( case kHandOff: complete_frame = true; res.push_back(std::move(*frame_it)); - ABSL_FALLTHROUGH_INTENDED; + [[fallthrough]]; case kDrop: frame_it = stashed_frames_.erase(frame_it); } diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_vp8_ref_finder.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_vp8_ref_finder.cc index 66cddacd15..185756ce51 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_vp8_ref_finder.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_vp8_ref_finder.cc @@ -18,14 +18,24 @@ namespace webrtc { RtpFrameReferenceFinder::ReturnVector RtpVp8RefFinder::ManageFrame( std::unique_ptr frame) { - FrameDecision decision = ManageFrameInternal(frame.get()); + const RTPVideoHeaderVP8& codec_header = absl::get( + frame->GetRtpVideoHeader().video_type_header); + + if (codec_header.temporalIdx != kNoTemporalIdx) + frame->SetTemporalIndex(codec_header.temporalIdx); + + int64_t unwrapped_tl0 = tl0_unwrapper_.Unwrap(codec_header.tl0PicIdx & 0xFF); + FrameDecision decision = + ManageFrameInternal(frame.get(), codec_header, unwrapped_tl0); RtpFrameReferenceFinder::ReturnVector res; switch (decision) { case kStash: - if (stashed_frames_.size() > kMaxStashedFrames) + if (stashed_frames_.size() > kMaxStashedFrames) { stashed_frames_.pop_back(); - stashed_frames_.push_front(std::move(frame)); + } + stashed_frames_.push_front( + {.unwrapped_tl0 = unwrapped_tl0, .frame = std::move(frame)}); return res; case kHandOff: res.push_back(std::move(frame)); @@ -39,11 +49,9 @@ RtpFrameReferenceFinder::ReturnVector RtpVp8RefFinder::ManageFrame( } RtpVp8RefFinder::FrameDecision RtpVp8RefFinder::ManageFrameInternal( - RtpFrameObject* frame) { - const RTPVideoHeader& video_header = frame->GetRtpVideoHeader(); - const RTPVideoHeaderVP8& codec_header = - absl::get(video_header.video_type_header); - + RtpFrameObject* frame, + const RTPVideoHeaderVP8& codec_header, + int64_t unwrapped_tl0) { // Protect against corrupted packets with arbitrary large temporal idx. if (codec_header.temporalIdx >= kMaxTemporalLayers) return kDrop; @@ -73,8 +81,6 @@ RtpVp8RefFinder::FrameDecision RtpVp8RefFinder::ManageFrameInternal( } while (last_picture_id_ != frame->Id()); } - int64_t unwrapped_tl0 = tl0_unwrapper_.Unwrap(codec_header.tl0PicIdx & 0xFF); - // Clean up info for base layers that are too old. int64_t old_tl0_pic_idx = unwrapped_tl0 - kMaxLayerInfo; auto clean_layer_info_to = layer_info_.lower_bound(old_tl0_pic_idx); @@ -207,20 +213,22 @@ void RtpVp8RefFinder::RetryStashedFrames( bool complete_frame = false; do { complete_frame = false; - for (auto frame_it = stashed_frames_.begin(); - frame_it != stashed_frames_.end();) { - FrameDecision decision = ManageFrameInternal(frame_it->get()); + for (auto it = stashed_frames_.begin(); it != stashed_frames_.end();) { + const RTPVideoHeaderVP8& codec_header = absl::get( + it->frame->GetRtpVideoHeader().video_type_header); + FrameDecision decision = + ManageFrameInternal(it->frame.get(), codec_header, it->unwrapped_tl0); switch (decision) { case kStash: - ++frame_it; + ++it; break; case kHandOff: complete_frame = true; - res.push_back(std::move(*frame_it)); - ABSL_FALLTHROUGH_INTENDED; + res.push_back(std::move(it->frame)); + [[fallthrough]]; case kDrop: - frame_it = stashed_frames_.erase(frame_it); + it = stashed_frames_.erase(it); } } } while (complete_frame); @@ -235,7 +243,7 @@ void RtpVp8RefFinder::UnwrapPictureIds(RtpFrameObject* frame) { void RtpVp8RefFinder::ClearTo(uint16_t seq_num) { auto it = stashed_frames_.begin(); while (it != stashed_frames_.end()) { - if (AheadOf(seq_num, (*it)->first_seq_num())) { + if (AheadOf(seq_num, it->frame->first_seq_num())) { it = stashed_frames_.erase(it); } else { ++it; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_vp8_ref_finder.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_vp8_ref_finder.h index 0a6cd7e10d..1ae45cdba3 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_vp8_ref_finder.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_vp8_ref_finder.h @@ -38,9 +38,16 @@ class RtpVp8RefFinder { static constexpr int kMaxStashedFrames = 100; static constexpr int kMaxTemporalLayers = 5; + struct UnwrappedTl0Frame { + int64_t unwrapped_tl0; + std::unique_ptr frame; + }; + enum FrameDecision { kStash, kHandOff, kDrop }; - FrameDecision ManageFrameInternal(RtpFrameObject* frame); + FrameDecision ManageFrameInternal(RtpFrameObject* frame, + const RTPVideoHeaderVP8& codec_header, + int64_t unwrapped_tl0); void RetryStashedFrames(RtpFrameReferenceFinder::ReturnVector& res); void UpdateLayerInfoVp8(RtpFrameObject* frame, int64_t unwrapped_tl0, @@ -58,7 +65,7 @@ class RtpVp8RefFinder { // Frames that have been fully received but didn't have all the information // needed to determine their references. - std::deque> stashed_frames_; + std::deque stashed_frames_; // Holds the information about the last completed frame for a given temporal // layer given an unwrapped Tl0 picture index. diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_vp9_ref_finder.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_vp9_ref_finder.cc index cab5792785..175ed3464b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_vp9_ref_finder.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_vp9_ref_finder.cc @@ -16,17 +16,46 @@ #include "rtc_base/logging.h" namespace webrtc { - RtpFrameReferenceFinder::ReturnVector RtpVp9RefFinder::ManageFrame( std::unique_ptr frame) { - FrameDecision decision = ManageFrameInternal(frame.get()); + const RTPVideoHeaderVP9& codec_header = absl::get( + frame->GetRtpVideoHeader().video_type_header); + + if (codec_header.temporal_idx != kNoTemporalIdx) + frame->SetTemporalIndex(codec_header.temporal_idx); + frame->SetSpatialIndex(codec_header.spatial_idx); + frame->SetId(codec_header.picture_id & (kFrameIdLength - 1)); + + FrameDecision decision; + if (codec_header.temporal_idx >= kMaxTemporalLayers || + codec_header.spatial_idx >= kMaxSpatialLayers) { + decision = kDrop; + } else if (codec_header.flexible_mode) { + decision = ManageFrameFlexible(frame.get(), codec_header); + } else { + if (codec_header.tl0_pic_idx == kNoTl0PicIdx) { + RTC_LOG(LS_WARNING) << "TL0PICIDX is expected to be present in " + "non-flexible mode."; + decision = kDrop; + } else { + int64_t unwrapped_tl0 = + tl0_unwrapper_.Unwrap(codec_header.tl0_pic_idx & 0xFF); + decision = ManageFrameGof(frame.get(), codec_header, unwrapped_tl0); + + if (decision == kStash) { + if (stashed_frames_.size() > kMaxStashedFrames) { + stashed_frames_.pop_back(); + } + + stashed_frames_.push_front( + {.unwrapped_tl0 = unwrapped_tl0, .frame = std::move(frame)}); + } + } + } RtpFrameReferenceFinder::ReturnVector res; switch (decision) { case kStash: - if (stashed_frames_.size() > kMaxStashedFrames) - stashed_frames_.pop_back(); - stashed_frames_.push_front(std::move(frame)); return res; case kHandOff: res.push_back(std::move(frame)); @@ -39,46 +68,28 @@ RtpFrameReferenceFinder::ReturnVector RtpVp9RefFinder::ManageFrame( return res; } -RtpVp9RefFinder::FrameDecision RtpVp9RefFinder::ManageFrameInternal( - RtpFrameObject* frame) { - const RTPVideoHeader& video_header = frame->GetRtpVideoHeader(); - const RTPVideoHeaderVP9& codec_header = - absl::get(video_header.video_type_header); - - // Protect against corrupted packets with arbitrary large temporal idx. - if (codec_header.temporal_idx >= kMaxTemporalLayers || - codec_header.spatial_idx >= kMaxSpatialLayers) +RtpVp9RefFinder::FrameDecision RtpVp9RefFinder::ManageFrameFlexible( + RtpFrameObject* frame, + const RTPVideoHeaderVP9& codec_header) { + if (codec_header.num_ref_pics > EncodedFrame::kMaxFrameReferences) { return kDrop; - - frame->SetSpatialIndex(codec_header.spatial_idx); - frame->SetId(codec_header.picture_id & (kFrameIdLength - 1)); - - if (last_picture_id_ == -1) - last_picture_id_ = frame->Id(); - - if (codec_header.flexible_mode) { - if (codec_header.num_ref_pics > EncodedFrame::kMaxFrameReferences) { - return kDrop; - } - frame->num_references = codec_header.num_ref_pics; - for (size_t i = 0; i < frame->num_references; ++i) { - frame->references[i] = - Subtract(frame->Id(), codec_header.pid_diff[i]); - } - - FlattenFrameIdAndRefs(frame, codec_header.inter_layer_predicted); - return kHandOff; } - if (codec_header.tl0_pic_idx == kNoTl0PicIdx) { - RTC_LOG(LS_WARNING) << "TL0PICIDX is expected to be present in " - "non-flexible mode."; - return kDrop; + frame->num_references = codec_header.num_ref_pics; + for (size_t i = 0; i < frame->num_references; ++i) { + frame->references[i] = + Subtract(frame->Id(), codec_header.pid_diff[i]); } + FlattenFrameIdAndRefs(frame, codec_header.inter_layer_predicted); + return kHandOff; +} + +RtpVp9RefFinder::FrameDecision RtpVp9RefFinder::ManageFrameGof( + RtpFrameObject* frame, + const RTPVideoHeaderVP9& codec_header, + int64_t unwrapped_tl0) { GofInfo* info; - int64_t unwrapped_tl0 = - tl0_unwrapper_.Unwrap(codec_header.tl0_pic_idx & 0xFF); if (codec_header.ss_data_available) { if (codec_header.temporal_idx != 0) { RTC_LOG(LS_WARNING) << "Received scalability structure on a non base " @@ -303,20 +314,23 @@ void RtpVp9RefFinder::RetryStashedFrames( bool complete_frame = false; do { complete_frame = false; - for (auto frame_it = stashed_frames_.begin(); - frame_it != stashed_frames_.end();) { - FrameDecision decision = ManageFrameInternal(frame_it->get()); + for (auto it = stashed_frames_.begin(); it != stashed_frames_.end();) { + const RTPVideoHeaderVP9& codec_header = absl::get( + it->frame->GetRtpVideoHeader().video_type_header); + RTC_DCHECK(!codec_header.flexible_mode); + FrameDecision decision = + ManageFrameGof(it->frame.get(), codec_header, it->unwrapped_tl0); switch (decision) { case kStash: - ++frame_it; + ++it; break; case kHandOff: complete_frame = true; - res.push_back(std::move(*frame_it)); - ABSL_FALLTHROUGH_INTENDED; + res.push_back(std::move(it->frame)); + [[fallthrough]]; case kDrop: - frame_it = stashed_frames_.erase(frame_it); + it = stashed_frames_.erase(it); } } } while (complete_frame); @@ -342,7 +356,7 @@ void RtpVp9RefFinder::FlattenFrameIdAndRefs(RtpFrameObject* frame, void RtpVp9RefFinder::ClearTo(uint16_t seq_num) { auto it = stashed_frames_.begin(); while (it != stashed_frames_.end()) { - if (AheadOf(seq_num, (*it)->first_seq_num())) { + if (AheadOf(seq_num, it->frame->first_seq_num())) { it = stashed_frames_.erase(it); } else { ++it; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_vp9_ref_finder.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_vp9_ref_finder.h index 436cb1c84a..2971f686b1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_vp9_ref_finder.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_vp9_ref_finder.h @@ -48,7 +48,16 @@ class RtpVp9RefFinder { uint16_t last_picture_id; }; - FrameDecision ManageFrameInternal(RtpFrameObject* frame); + struct UnwrappedTl0Frame { + int64_t unwrapped_tl0; + std::unique_ptr frame; + }; + + FrameDecision ManageFrameFlexible(RtpFrameObject* frame, + const RTPVideoHeaderVP9& vp9_header); + FrameDecision ManageFrameGof(RtpFrameObject* frame, + const RTPVideoHeaderVP9& vp9_header, + int64_t unwrapped_tl0); void RetryStashedFrames(RtpFrameReferenceFinder::ReturnVector& res); bool MissingRequiredFrameVp9(uint16_t picture_id, const GofInfo& info); @@ -60,13 +69,9 @@ class RtpVp9RefFinder { void FlattenFrameIdAndRefs(RtpFrameObject* frame, bool inter_layer_predicted); - // Save the last picture id in order to detect when there is a gap in frames - // that have not yet been fully received. - int last_picture_id_ = -1; - // Frames that have been fully received but didn't have all the information // needed to determine their references. - std::deque> stashed_frames_; + std::deque stashed_frames_; // Where the current scalability structure is in the // `scalability_structures_` array. diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtt_filter.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtt_filter.cc deleted file mode 100644 index 75813a46ad..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtt_filter.cc +++ /dev/null @@ -1,165 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/video_coding/rtt_filter.h" - -#include -#include -#include - -#include "modules/video_coding/internal_defines.h" - -namespace webrtc { - -VCMRttFilter::VCMRttFilter() - : _filtFactMax(35), - _jumpStdDevs(2.5), - _driftStdDevs(3.5), - _detectThreshold(kMaxDriftJumpCount) { - Reset(); -} - -VCMRttFilter& VCMRttFilter::operator=(const VCMRttFilter& rhs) { - if (this != &rhs) { - _gotNonZeroUpdate = rhs._gotNonZeroUpdate; - _avgRtt = rhs._avgRtt; - _varRtt = rhs._varRtt; - _maxRtt = rhs._maxRtt; - _filtFactCount = rhs._filtFactCount; - _jumpCount = rhs._jumpCount; - _driftCount = rhs._driftCount; - memcpy(_jumpBuf, rhs._jumpBuf, sizeof(_jumpBuf)); - memcpy(_driftBuf, rhs._driftBuf, sizeof(_driftBuf)); - } - return *this; -} - -void VCMRttFilter::Reset() { - _gotNonZeroUpdate = false; - _avgRtt = 0; - _varRtt = 0; - _maxRtt = 0; - _filtFactCount = 1; - _jumpCount = 0; - _driftCount = 0; - memset(_jumpBuf, 0, sizeof(_jumpBuf)); - memset(_driftBuf, 0, sizeof(_driftBuf)); -} - -void VCMRttFilter::Update(int64_t rttMs) { - if (!_gotNonZeroUpdate) { - if (rttMs == 0) { - return; - } - _gotNonZeroUpdate = true; - } - - // Sanity check - if (rttMs > 3000) { - rttMs = 3000; - } - - double filtFactor = 0; - if (_filtFactCount > 1) { - filtFactor = static_cast(_filtFactCount - 1) / _filtFactCount; - } - _filtFactCount++; - if (_filtFactCount > _filtFactMax) { - // This prevents filtFactor from going above - // (_filtFactMax - 1) / _filtFactMax, - // e.g., _filtFactMax = 50 => filtFactor = 49/50 = 0.98 - _filtFactCount = _filtFactMax; - } - double oldAvg = _avgRtt; - double oldVar = _varRtt; - _avgRtt = filtFactor * _avgRtt + (1 - filtFactor) * rttMs; - _varRtt = filtFactor * _varRtt + - (1 - filtFactor) * (rttMs - _avgRtt) * (rttMs - _avgRtt); - _maxRtt = VCM_MAX(rttMs, _maxRtt); - if (!JumpDetection(rttMs) || !DriftDetection(rttMs)) { - // In some cases we don't want to update the statistics - _avgRtt = oldAvg; - _varRtt = oldVar; - } -} - -bool VCMRttFilter::JumpDetection(int64_t rttMs) { - double diffFromAvg = _avgRtt - rttMs; - if (fabs(diffFromAvg) > _jumpStdDevs * sqrt(_varRtt)) { - int diffSign = (diffFromAvg >= 0) ? 1 : -1; - int jumpCountSign = (_jumpCount >= 0) ? 1 : -1; - if (diffSign != jumpCountSign) { - // Since the signs differ the samples currently - // in the buffer is useless as they represent a - // jump in a different direction. - _jumpCount = 0; - } - if (abs(_jumpCount) < kMaxDriftJumpCount) { - // Update the buffer used for the short time - // statistics. - // The sign of the diff is used for updating the counter since - // we want to use the same buffer for keeping track of when - // the RTT jumps down and up. - _jumpBuf[abs(_jumpCount)] = rttMs; - _jumpCount += diffSign; - } - if (abs(_jumpCount) >= _detectThreshold) { - // Detected an RTT jump - ShortRttFilter(_jumpBuf, abs(_jumpCount)); - _filtFactCount = _detectThreshold + 1; - _jumpCount = 0; - } else { - return false; - } - } else { - _jumpCount = 0; - } - return true; -} - -bool VCMRttFilter::DriftDetection(int64_t rttMs) { - if (_maxRtt - _avgRtt > _driftStdDevs * sqrt(_varRtt)) { - if (_driftCount < kMaxDriftJumpCount) { - // Update the buffer used for the short time - // statistics. - _driftBuf[_driftCount] = rttMs; - _driftCount++; - } - if (_driftCount >= _detectThreshold) { - // Detected an RTT drift - ShortRttFilter(_driftBuf, _driftCount); - _filtFactCount = _detectThreshold + 1; - _driftCount = 0; - } - } else { - _driftCount = 0; - } - return true; -} - -void VCMRttFilter::ShortRttFilter(int64_t* buf, uint32_t length) { - if (length == 0) { - return; - } - _maxRtt = 0; - _avgRtt = 0; - for (uint32_t i = 0; i < length; i++) { - if (buf[i] > _maxRtt) { - _maxRtt = buf[i]; - } - _avgRtt += buf[i]; - } - _avgRtt = _avgRtt / static_cast(length); -} - -int64_t VCMRttFilter::RttMs() const { - return static_cast(_maxRtt + 0.5); -} -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtt_filter.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtt_filter.h deleted file mode 100644 index 073fabb85b..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtt_filter.h +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_VIDEO_CODING_RTT_FILTER_H_ -#define MODULES_VIDEO_CODING_RTT_FILTER_H_ - -#include - -namespace webrtc { - -class VCMRttFilter { - public: - VCMRttFilter(); - - VCMRttFilter& operator=(const VCMRttFilter& rhs); - - // Resets the filter. - void Reset(); - // Updates the filter with a new sample. - void Update(int64_t rttMs); - // A getter function for the current RTT level in ms. - int64_t RttMs() const; - - private: - // The size of the drift and jump memory buffers - // and thus also the detection threshold for these - // detectors in number of samples. - enum { kMaxDriftJumpCount = 5 }; - // Detects RTT jumps by comparing the difference between - // samples and average to the standard deviation. - // Returns true if the long time statistics should be updated - // and false otherwise - bool JumpDetection(int64_t rttMs); - // Detects RTT drifts by comparing the difference between - // max and average to the standard deviation. - // Returns true if the long time statistics should be updated - // and false otherwise - bool DriftDetection(int64_t rttMs); - // Computes the short time average and maximum of the vector buf. - void ShortRttFilter(int64_t* buf, uint32_t length); - - bool _gotNonZeroUpdate; - double _avgRtt; - double _varRtt; - int64_t _maxRtt; - uint32_t _filtFactCount; - const uint32_t _filtFactMax; - const double _jumpStdDevs; - const double _driftStdDevs; - int32_t _jumpCount; - int32_t _driftCount; - const int32_t _detectThreshold; - int64_t _jumpBuf[kMaxDriftJumpCount]; - int64_t _driftBuf[kMaxDriftJumpCount]; -}; - -} // namespace webrtc - -#endif // MODULES_VIDEO_CODING_RTT_FILTER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/session_info.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/session_info.cc index 51f9376883..a13b9dc407 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/session_info.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/session_info.cc @@ -293,6 +293,9 @@ size_t VCMSessionInfo::Insert(const uint8_t* buffer, size_t length, bool insert_start_code, uint8_t* frame_buffer) { + if (!buffer || !frame_buffer) { + return 0; + } if (insert_start_code) { const unsigned char startCode[] = {0, 0, 0, 1}; memcpy(frame_buffer, startCode, kH264StartCodeLengthBytes); diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/session_info.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/session_info.h index 876bf86525..c764884f9b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/session_info.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/session_info.h @@ -49,7 +49,6 @@ class VCMSessionInfo { // Returns the number of bytes deleted from the session. size_t MakeDecodable(); - // TODO(nisse): Used by tests only. size_t SessionLength() const; int NumPackets() const; bool HaveFirstPacket() const; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/create_scalability_structure.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/create_scalability_structure.cc index 8e5c06fca9..fbcd27b139 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/create_scalability_structure.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/create_scalability_structure.cc @@ -11,7 +11,7 @@ #include -#include "absl/strings/string_view.h" +#include "api/video_codecs/scalability_mode.h" #include "modules/video_coding/svc/scalability_structure_full_svc.h" #include "modules/video_coding/svc/scalability_structure_key_svc.h" #include "modules/video_coding/svc/scalability_structure_l2t2_key_shift.h" @@ -24,7 +24,7 @@ namespace webrtc { namespace { struct NamedStructureFactory { - absl::string_view name; + ScalabilityMode name; // Use function pointer to make NamedStructureFactory trivally destructable. std::unique_ptr (*factory)(); ScalableVideoController::StreamLayersConfig config; @@ -45,7 +45,7 @@ std::unique_ptr CreateH() { return std::make_unique(factor); } -constexpr ScalableVideoController::StreamLayersConfig kConfigNone = { +constexpr ScalableVideoController::StreamLayersConfig kConfigL1T1 = { /*num_spatial_layers=*/1, /*num_temporal_layers=*/1, /*uses_reference_scaling=*/false}; @@ -78,6 +78,13 @@ constexpr ScalableVideoController::StreamLayersConfig kConfigL2T2 = { {1, 1}, {2, 1}}; +constexpr ScalableVideoController::StreamLayersConfig kConfigL2T2h = { + /*num_spatial_layers=*/2, + /*num_temporal_layers=*/2, + /*uses_reference_scaling=*/true, + {2, 1}, + {3, 1}}; + constexpr ScalableVideoController::StreamLayersConfig kConfigL2T3 = { /*num_spatial_layers=*/2, /*num_temporal_layers=*/3, @@ -85,6 +92,13 @@ constexpr ScalableVideoController::StreamLayersConfig kConfigL2T3 = { {1, 1}, {2, 1}}; +constexpr ScalableVideoController::StreamLayersConfig kConfigL2T3h = { + /*num_spatial_layers=*/2, + /*num_temporal_layers=*/3, + /*uses_reference_scaling=*/true, + {2, 1}, + {3, 1}}; + constexpr ScalableVideoController::StreamLayersConfig kConfigL3T1 = { /*num_spatial_layers=*/3, /*num_temporal_layers=*/1, @@ -92,6 +106,27 @@ constexpr ScalableVideoController::StreamLayersConfig kConfigL3T1 = { {1, 1, 1}, {4, 2, 1}}; +constexpr ScalableVideoController::StreamLayersConfig kConfigL3T1h = { + /*num_spatial_layers=*/3, + /*num_temporal_layers=*/1, + /*uses_reference_scaling=*/true, + {4, 2, 1}, + {9, 3, 1}}; + +constexpr ScalableVideoController::StreamLayersConfig kConfigL3T2 = { + /*num_spatial_layers=*/3, + /*num_temporal_layers=*/2, + /*uses_reference_scaling=*/true, + {1, 1, 1}, + {4, 2, 1}}; + +constexpr ScalableVideoController::StreamLayersConfig kConfigL3T2h = { + /*num_spatial_layers=*/3, + /*num_temporal_layers=*/2, + /*uses_reference_scaling=*/true, + {4, 2, 1}, + {9, 3, 1}}; + constexpr ScalableVideoController::StreamLayersConfig kConfigL3T3 = { /*num_spatial_layers=*/3, /*num_temporal_layers=*/3, @@ -99,6 +134,13 @@ constexpr ScalableVideoController::StreamLayersConfig kConfigL3T3 = { {1, 1, 1}, {4, 2, 1}}; +constexpr ScalableVideoController::StreamLayersConfig kConfigL3T3h = { + /*num_spatial_layers=*/3, + /*num_temporal_layers=*/3, + /*uses_reference_scaling=*/true, + {4, 2, 1}, + {9, 3, 1}}; + constexpr ScalableVideoController::StreamLayersConfig kConfigS2T1 = { /*num_spatial_layers=*/2, /*num_temporal_layers=*/1, @@ -106,6 +148,69 @@ constexpr ScalableVideoController::StreamLayersConfig kConfigS2T1 = { {1, 1}, {2, 1}}; +constexpr ScalableVideoController::StreamLayersConfig kConfigS2T1h = { + /*num_spatial_layers=*/2, + /*num_temporal_layers=*/1, + /*uses_reference_scaling=*/false, + {2, 1}, + {3, 1}}; + +constexpr ScalableVideoController::StreamLayersConfig kConfigS2T2 = { + /*num_spatial_layers=*/2, + /*num_temporal_layers=*/2, + /*uses_reference_scaling=*/false, + {1, 1}, + {2, 1}}; + +constexpr ScalableVideoController::StreamLayersConfig kConfigS2T2h = { + /*num_spatial_layers=*/2, + /*num_temporal_layers=*/2, + /*uses_reference_scaling=*/false, + {2, 1}, + {3, 1}}; + +constexpr ScalableVideoController::StreamLayersConfig kConfigS2T3 = { + /*num_spatial_layers=*/2, + /*num_temporal_layers=*/3, + /*uses_reference_scaling=*/false, + {1, 1}, + {2, 1}}; + +constexpr ScalableVideoController::StreamLayersConfig kConfigS2T3h = { + /*num_spatial_layers=*/2, + /*num_temporal_layers=*/3, + /*uses_reference_scaling=*/false, + {2, 1}, + {3, 1}}; + +constexpr ScalableVideoController::StreamLayersConfig kConfigS3T1 = { + /*num_spatial_layers=*/3, + /*num_temporal_layers=*/1, + /*uses_reference_scaling=*/false, + {1, 1, 1}, + {4, 2, 1}}; + +constexpr ScalableVideoController::StreamLayersConfig kConfigS3T1h = { + /*num_spatial_layers=*/3, + /*num_temporal_layers=*/1, + /*uses_reference_scaling=*/false, + {4, 2, 1}, + {9, 3, 1}}; + +constexpr ScalableVideoController::StreamLayersConfig kConfigS3T2 = { + /*num_spatial_layers=*/3, + /*num_temporal_layers=*/2, + /*uses_reference_scaling=*/false, + {1, 1, 1}, + {4, 2, 1}}; + +constexpr ScalableVideoController::StreamLayersConfig kConfigS3T2h = { + /*num_spatial_layers=*/3, + /*num_temporal_layers=*/2, + /*uses_reference_scaling=*/false, + {4, 2, 1}, + {9, 3, 1}}; + constexpr ScalableVideoController::StreamLayersConfig kConfigS3T3 = { /*num_spatial_layers=*/3, /*num_temporal_layers=*/3, @@ -113,29 +218,62 @@ constexpr ScalableVideoController::StreamLayersConfig kConfigS3T3 = { {1, 1, 1}, {4, 2, 1}}; +constexpr ScalableVideoController::StreamLayersConfig kConfigS3T3h = { + /*num_spatial_layers=*/3, + /*num_temporal_layers=*/3, + /*uses_reference_scaling=*/false, + {4, 2, 1}, + {9, 3, 1}}; + constexpr NamedStructureFactory kFactories[] = { - {"NONE", Create, kConfigNone}, - {"L1T2", Create, kConfigL1T2}, - {"L1T3", Create, kConfigL1T3}, - {"L2T1", Create, kConfigL2T1}, - {"L2T1h", CreateH, kConfigL2T1h}, - {"L2T1_KEY", Create, kConfigL2T1}, - {"L2T2", Create, kConfigL2T2}, - {"L2T2_KEY", Create, kConfigL2T2}, - {"L2T2_KEY_SHIFT", Create, kConfigL2T2}, - {"L2T3_KEY", Create, kConfigL2T3}, - {"L3T1", Create, kConfigL3T1}, - {"L3T3", Create, kConfigL3T3}, - {"L3T3_KEY", Create, kConfigL3T3}, - {"S2T1", Create, kConfigS2T1}, - {"S3T3", Create, kConfigS3T3}, + {ScalabilityMode::kL1T1, Create, + kConfigL1T1}, + {ScalabilityMode::kL1T2, Create, kConfigL1T2}, + {ScalabilityMode::kL1T3, Create, kConfigL1T3}, + {ScalabilityMode::kL2T1, Create, kConfigL2T1}, + {ScalabilityMode::kL2T1h, CreateH, kConfigL2T1h}, + {ScalabilityMode::kL2T1_KEY, Create, + kConfigL2T1}, + {ScalabilityMode::kL2T2, Create, kConfigL2T2}, + {ScalabilityMode::kL2T2h, CreateH, kConfigL2T2h}, + {ScalabilityMode::kL2T2_KEY, Create, + kConfigL2T2}, + {ScalabilityMode::kL2T2_KEY_SHIFT, Create, + kConfigL2T2}, + {ScalabilityMode::kL2T3, Create, kConfigL2T3}, + {ScalabilityMode::kL2T3h, CreateH, kConfigL2T3h}, + {ScalabilityMode::kL2T3_KEY, Create, + kConfigL2T3}, + {ScalabilityMode::kL3T1, Create, kConfigL3T1}, + {ScalabilityMode::kL3T1h, CreateH, kConfigL3T1h}, + {ScalabilityMode::kL3T1_KEY, Create, + kConfigL3T1}, + {ScalabilityMode::kL3T2, Create, kConfigL3T2}, + {ScalabilityMode::kL3T2h, CreateH, kConfigL3T2h}, + {ScalabilityMode::kL3T2_KEY, Create, + kConfigL3T2}, + {ScalabilityMode::kL3T3, Create, kConfigL3T3}, + {ScalabilityMode::kL3T3h, CreateH, kConfigL3T3h}, + {ScalabilityMode::kL3T3_KEY, Create, + kConfigL3T3}, + {ScalabilityMode::kS2T1, Create, kConfigS2T1}, + {ScalabilityMode::kS2T1h, CreateH, kConfigS2T1h}, + {ScalabilityMode::kS2T2, Create, kConfigS2T2}, + {ScalabilityMode::kS2T2h, CreateH, kConfigS2T2h}, + {ScalabilityMode::kS2T3, Create, kConfigS2T3}, + {ScalabilityMode::kS2T3h, CreateH, kConfigS2T3h}, + {ScalabilityMode::kS3T1, Create, kConfigS3T1}, + {ScalabilityMode::kS3T1h, CreateH, kConfigS3T1h}, + {ScalabilityMode::kS3T2, Create, kConfigS3T2}, + {ScalabilityMode::kS3T2h, CreateH, kConfigS3T2h}, + {ScalabilityMode::kS3T3, Create, kConfigS3T3}, + {ScalabilityMode::kS3T3h, CreateH, kConfigS3T3h}, }; } // namespace std::unique_ptr CreateScalabilityStructure( - absl::string_view name) { - RTC_DCHECK(!name.empty()); + ScalabilityMode name) { for (const auto& entry : kFactories) { if (entry.name == name) { return entry.factory(); @@ -145,8 +283,7 @@ std::unique_ptr CreateScalabilityStructure( } absl::optional -ScalabilityStructureConfig(absl::string_view name) { - RTC_DCHECK(!name.empty()); +ScalabilityStructureConfig(ScalabilityMode name) { for (const auto& entry : kFactories) { if (entry.name == name) { return entry.config; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/create_scalability_structure.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/create_scalability_structure.h index fde034433e..3b67443693 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/create_scalability_structure.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/create_scalability_structure.h @@ -13,8 +13,8 @@ #include #include -#include "absl/strings/string_view.h" #include "absl/types/optional.h" +#include "api/video_codecs/scalability_mode.h" #include "modules/video_coding/svc/scalable_video_controller.h" namespace webrtc { @@ -23,12 +23,12 @@ namespace webrtc { // https://w3c.github.io/webrtc-svc/#scalabilitymodes* // Returns nullptr for unknown name. std::unique_ptr CreateScalabilityStructure( - absl::string_view name); + ScalabilityMode name); -// Returns descrption of the scalability structure identified by 'name', +// Returns description of the scalability structure identified by 'name', // Return nullopt for unknown name. absl::optional -ScalabilityStructureConfig(absl::string_view name); +ScalabilityStructureConfig(ScalabilityMode name); } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_mode_util.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_mode_util.cc new file mode 100644 index 0000000000..35d66df203 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_mode_util.cc @@ -0,0 +1,390 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_coding/svc/scalability_mode_util.h" + +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/video_codecs/scalability_mode.h" +#include "rtc_base/checks.h" + +namespace webrtc { + +absl::optional ScalabilityModeFromString( + absl::string_view mode_string) { + if (mode_string == "L1T1") + return ScalabilityMode::kL1T1; + if (mode_string == "L1T2") + return ScalabilityMode::kL1T2; + if (mode_string == "L1T3") + return ScalabilityMode::kL1T3; + + if (mode_string == "L2T1") + return ScalabilityMode::kL2T1; + if (mode_string == "L2T1h") + return ScalabilityMode::kL2T1h; + if (mode_string == "L2T1_KEY") + return ScalabilityMode::kL2T1_KEY; + + if (mode_string == "L2T2") + return ScalabilityMode::kL2T2; + if (mode_string == "L2T2h") + return ScalabilityMode::kL2T2h; + if (mode_string == "L2T2_KEY") + return ScalabilityMode::kL2T2_KEY; + if (mode_string == "L2T2_KEY_SHIFT") + return ScalabilityMode::kL2T2_KEY_SHIFT; + if (mode_string == "L2T3") + return ScalabilityMode::kL2T3; + if (mode_string == "L2T3h") + return ScalabilityMode::kL2T3h; + if (mode_string == "L2T3_KEY") + return ScalabilityMode::kL2T3_KEY; + + if (mode_string == "L3T1") + return ScalabilityMode::kL3T1; + if (mode_string == "L3T1h") + return ScalabilityMode::kL3T1h; + if (mode_string == "L3T1_KEY") + return ScalabilityMode::kL3T1_KEY; + + if (mode_string == "L3T2") + return ScalabilityMode::kL3T2; + if (mode_string == "L3T2h") + return ScalabilityMode::kL3T2h; + if (mode_string == "L3T2_KEY") + return ScalabilityMode::kL3T2_KEY; + + if (mode_string == "L3T3") + return ScalabilityMode::kL3T3; + if (mode_string == "L3T3h") + return ScalabilityMode::kL3T3h; + if (mode_string == "L3T3_KEY") + return ScalabilityMode::kL3T3_KEY; + + if (mode_string == "S2T1") + return ScalabilityMode::kS2T1; + if (mode_string == "S2T1h") + return ScalabilityMode::kS2T1h; + if (mode_string == "S2T2") + return ScalabilityMode::kS2T2; + if (mode_string == "S2T2h") + return ScalabilityMode::kS2T2h; + if (mode_string == "S2T3") + return ScalabilityMode::kS2T3; + if (mode_string == "S2T3h") + return ScalabilityMode::kS2T3h; + if (mode_string == "S3T1") + return ScalabilityMode::kS3T1; + if (mode_string == "S3T1h") + return ScalabilityMode::kS3T1h; + if (mode_string == "S3T2") + return ScalabilityMode::kS3T2; + if (mode_string == "S3T2h") + return ScalabilityMode::kS3T2h; + if (mode_string == "S3T3") + return ScalabilityMode::kS3T3; + if (mode_string == "S3T3h") + return ScalabilityMode::kS3T3h; + + return absl::nullopt; +} + +InterLayerPredMode ScalabilityModeToInterLayerPredMode( + ScalabilityMode scalability_mode) { + switch (scalability_mode) { + case ScalabilityMode::kL1T1: + case ScalabilityMode::kL1T2: + case ScalabilityMode::kL1T3: + case ScalabilityMode::kL2T1: + case ScalabilityMode::kL2T1h: + return InterLayerPredMode::kOn; + case ScalabilityMode::kL2T1_KEY: + return InterLayerPredMode::kOnKeyPic; + case ScalabilityMode::kL2T2: + case ScalabilityMode::kL2T2h: + return InterLayerPredMode::kOn; + case ScalabilityMode::kL2T2_KEY: + case ScalabilityMode::kL2T2_KEY_SHIFT: + return InterLayerPredMode::kOnKeyPic; + case ScalabilityMode::kL2T3: + case ScalabilityMode::kL2T3h: + return InterLayerPredMode::kOn; + case ScalabilityMode::kL2T3_KEY: + return InterLayerPredMode::kOnKeyPic; + case ScalabilityMode::kL3T1: + case ScalabilityMode::kL3T1h: + return InterLayerPredMode::kOn; + case ScalabilityMode::kL3T1_KEY: + return InterLayerPredMode::kOnKeyPic; + case ScalabilityMode::kL3T2: + case ScalabilityMode::kL3T2h: + return InterLayerPredMode::kOn; + case ScalabilityMode::kL3T2_KEY: + return InterLayerPredMode::kOnKeyPic; + case ScalabilityMode::kL3T3: + case ScalabilityMode::kL3T3h: + return InterLayerPredMode::kOn; + case ScalabilityMode::kL3T3_KEY: + return InterLayerPredMode::kOnKeyPic; + case ScalabilityMode::kS2T1: + case ScalabilityMode::kS2T1h: + case ScalabilityMode::kS2T2: + case ScalabilityMode::kS2T2h: + case ScalabilityMode::kS2T3: + case ScalabilityMode::kS2T3h: + case ScalabilityMode::kS3T1: + case ScalabilityMode::kS3T1h: + case ScalabilityMode::kS3T2: + case ScalabilityMode::kS3T2h: + case ScalabilityMode::kS3T3: + case ScalabilityMode::kS3T3h: + return InterLayerPredMode::kOff; + } + RTC_CHECK_NOTREACHED(); +} + +int ScalabilityModeToNumSpatialLayers(ScalabilityMode scalability_mode) { + switch (scalability_mode) { + case ScalabilityMode::kL1T1: + case ScalabilityMode::kL1T2: + case ScalabilityMode::kL1T3: + return 1; + case ScalabilityMode::kL2T1: + case ScalabilityMode::kL2T1h: + case ScalabilityMode::kL2T1_KEY: + case ScalabilityMode::kL2T2: + case ScalabilityMode::kL2T2h: + case ScalabilityMode::kL2T2_KEY: + case ScalabilityMode::kL2T2_KEY_SHIFT: + case ScalabilityMode::kL2T3: + case ScalabilityMode::kL2T3h: + case ScalabilityMode::kL2T3_KEY: + return 2; + case ScalabilityMode::kL3T1: + case ScalabilityMode::kL3T1h: + case ScalabilityMode::kL3T1_KEY: + case ScalabilityMode::kL3T2: + case ScalabilityMode::kL3T2h: + case ScalabilityMode::kL3T2_KEY: + case ScalabilityMode::kL3T3: + case ScalabilityMode::kL3T3h: + case ScalabilityMode::kL3T3_KEY: + return 3; + case ScalabilityMode::kS2T1: + case ScalabilityMode::kS2T1h: + case ScalabilityMode::kS2T2: + case ScalabilityMode::kS2T2h: + case ScalabilityMode::kS2T3: + case ScalabilityMode::kS2T3h: + return 2; + case ScalabilityMode::kS3T1: + case ScalabilityMode::kS3T1h: + case ScalabilityMode::kS3T2: + case ScalabilityMode::kS3T2h: + case ScalabilityMode::kS3T3: + case ScalabilityMode::kS3T3h: + return 3; + } + RTC_CHECK_NOTREACHED(); +} + +int ScalabilityModeToNumTemporalLayers(ScalabilityMode scalability_mode) { + switch (scalability_mode) { + case ScalabilityMode::kL1T1: + return 1; + case ScalabilityMode::kL1T2: + return 2; + case ScalabilityMode::kL1T3: + return 3; + case ScalabilityMode::kL2T1: + case ScalabilityMode::kL2T1h: + case ScalabilityMode::kL2T1_KEY: + return 1; + case ScalabilityMode::kL2T2: + case ScalabilityMode::kL2T2h: + case ScalabilityMode::kL2T2_KEY: + case ScalabilityMode::kL2T2_KEY_SHIFT: + return 2; + case ScalabilityMode::kL2T3: + case ScalabilityMode::kL2T3h: + case ScalabilityMode::kL2T3_KEY: + return 3; + case ScalabilityMode::kL3T1: + case ScalabilityMode::kL3T1h: + case ScalabilityMode::kL3T1_KEY: + return 1; + case ScalabilityMode::kL3T2: + case ScalabilityMode::kL3T2h: + case ScalabilityMode::kL3T2_KEY: + return 2; + case ScalabilityMode::kL3T3: + case ScalabilityMode::kL3T3h: + case ScalabilityMode::kL3T3_KEY: + return 3; + case ScalabilityMode::kS2T1: + case ScalabilityMode::kS2T1h: + case ScalabilityMode::kS3T1: + case ScalabilityMode::kS3T1h: + return 1; + case ScalabilityMode::kS2T2: + case ScalabilityMode::kS2T2h: + case ScalabilityMode::kS3T2: + case ScalabilityMode::kS3T2h: + return 2; + case ScalabilityMode::kS2T3: + case ScalabilityMode::kS2T3h: + case ScalabilityMode::kS3T3: + case ScalabilityMode::kS3T3h: + return 3; + } + RTC_CHECK_NOTREACHED(); +} + +absl::optional ScalabilityModeToResolutionRatio( + ScalabilityMode scalability_mode) { + switch (scalability_mode) { + case ScalabilityMode::kL1T1: + case ScalabilityMode::kL1T2: + case ScalabilityMode::kL1T3: + return absl::nullopt; + case ScalabilityMode::kL2T1: + case ScalabilityMode::kL2T1_KEY: + case ScalabilityMode::kL2T2: + case ScalabilityMode::kL2T2_KEY: + case ScalabilityMode::kL2T2_KEY_SHIFT: + case ScalabilityMode::kL2T3: + case ScalabilityMode::kL2T3_KEY: + case ScalabilityMode::kL3T1: + case ScalabilityMode::kL3T1_KEY: + case ScalabilityMode::kL3T2: + case ScalabilityMode::kL3T2_KEY: + case ScalabilityMode::kL3T3: + case ScalabilityMode::kL3T3_KEY: + case ScalabilityMode::kS2T1: + case ScalabilityMode::kS2T2: + case ScalabilityMode::kS2T3: + case ScalabilityMode::kS3T1: + case ScalabilityMode::kS3T2: + case ScalabilityMode::kS3T3: + return ScalabilityModeResolutionRatio::kTwoToOne; + case ScalabilityMode::kL2T1h: + case ScalabilityMode::kL2T2h: + case ScalabilityMode::kL2T3h: + case ScalabilityMode::kL3T1h: + case ScalabilityMode::kL3T2h: + case ScalabilityMode::kL3T3h: + case ScalabilityMode::kS2T1h: + case ScalabilityMode::kS2T2h: + case ScalabilityMode::kS2T3h: + case ScalabilityMode::kS3T1h: + case ScalabilityMode::kS3T2h: + case ScalabilityMode::kS3T3h: + return ScalabilityModeResolutionRatio::kThreeToTwo; + } + RTC_CHECK_NOTREACHED(); +} + +ScalabilityMode LimitNumSpatialLayers(ScalabilityMode scalability_mode, + int max_spatial_layers) { + int num_spatial_layers = ScalabilityModeToNumSpatialLayers(scalability_mode); + if (max_spatial_layers >= num_spatial_layers) { + return scalability_mode; + } + + switch (scalability_mode) { + case ScalabilityMode::kL1T1: + return ScalabilityMode::kL1T1; + case ScalabilityMode::kL1T2: + return ScalabilityMode::kL1T2; + case ScalabilityMode::kL1T3: + return ScalabilityMode::kL1T3; + case ScalabilityMode::kL2T1: + return ScalabilityMode::kL1T1; + case ScalabilityMode::kL2T1h: + return ScalabilityMode::kL1T1; + case ScalabilityMode::kL2T1_KEY: + return ScalabilityMode::kL1T1; + case ScalabilityMode::kL2T2: + return ScalabilityMode::kL1T2; + case ScalabilityMode::kL2T2h: + return ScalabilityMode::kL1T2; + case ScalabilityMode::kL2T2_KEY: + return ScalabilityMode::kL1T2; + case ScalabilityMode::kL2T2_KEY_SHIFT: + return ScalabilityMode::kL1T2; + case ScalabilityMode::kL2T3: + return ScalabilityMode::kL1T3; + case ScalabilityMode::kL2T3h: + return ScalabilityMode::kL1T3; + case ScalabilityMode::kL2T3_KEY: + return ScalabilityMode::kL1T3; + case ScalabilityMode::kL3T1: + return max_spatial_layers == 2 ? ScalabilityMode::kL2T1 + : ScalabilityMode::kL1T1; + case ScalabilityMode::kL3T1h: + return max_spatial_layers == 2 ? ScalabilityMode::kL2T1h + : ScalabilityMode::kL1T1; + case ScalabilityMode::kL3T1_KEY: + return max_spatial_layers == 2 ? ScalabilityMode::kL2T1_KEY + : ScalabilityMode::kL1T1; + case ScalabilityMode::kL3T2: + return max_spatial_layers == 2 ? ScalabilityMode::kL2T2 + : ScalabilityMode::kL1T2; + case ScalabilityMode::kL3T2h: + return max_spatial_layers == 2 ? ScalabilityMode::kL2T2h + : ScalabilityMode::kL1T2; + case ScalabilityMode::kL3T2_KEY: + return max_spatial_layers == 2 ? ScalabilityMode::kL2T2_KEY + : ScalabilityMode::kL1T2; + case ScalabilityMode::kL3T3: + return max_spatial_layers == 2 ? ScalabilityMode::kL2T3 + : ScalabilityMode::kL1T3; + case ScalabilityMode::kL3T3h: + return max_spatial_layers == 2 ? ScalabilityMode::kL2T3h + : ScalabilityMode::kL1T3; + case ScalabilityMode::kL3T3_KEY: + return max_spatial_layers == 2 ? ScalabilityMode::kL2T3_KEY + : ScalabilityMode::kL1T3; + case ScalabilityMode::kS2T1: + return ScalabilityMode::kL1T1; + case ScalabilityMode::kS2T1h: + return ScalabilityMode::kL1T1; + case ScalabilityMode::kS2T2: + return ScalabilityMode::kL1T2; + case ScalabilityMode::kS2T2h: + return ScalabilityMode::kL1T2; + case ScalabilityMode::kS2T3: + return ScalabilityMode::kL1T3; + case ScalabilityMode::kS2T3h: + return ScalabilityMode::kL1T3; + case ScalabilityMode::kS3T1: + return max_spatial_layers == 2 ? ScalabilityMode::kS2T1 + : ScalabilityMode::kL1T1; + case ScalabilityMode::kS3T1h: + return max_spatial_layers == 2 ? ScalabilityMode::kS2T1h + : ScalabilityMode::kL1T1; + case ScalabilityMode::kS3T2: + return max_spatial_layers == 2 ? ScalabilityMode::kS2T2 + : ScalabilityMode::kL1T2; + case ScalabilityMode::kS3T2h: + return max_spatial_layers == 2 ? ScalabilityMode::kS2T2h + : ScalabilityMode::kL1T2; + case ScalabilityMode::kS3T3: + return max_spatial_layers == 2 ? ScalabilityMode::kS2T3 + : ScalabilityMode::kL1T3; + case ScalabilityMode::kS3T3h: + return max_spatial_layers == 2 ? ScalabilityMode::kS2T3h + : ScalabilityMode::kL1T3; + } + RTC_CHECK_NOTREACHED(); +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_mode_util.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_mode_util.h new file mode 100644 index 0000000000..aef955a9a5 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_mode_util.h @@ -0,0 +1,44 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CODING_SVC_SCALABILITY_MODE_UTIL_H_ +#define MODULES_VIDEO_CODING_SVC_SCALABILITY_MODE_UTIL_H_ + +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/video_codecs/scalability_mode.h" +#include "api/video_codecs/video_codec.h" + +namespace webrtc { + +enum class ScalabilityModeResolutionRatio { + kTwoToOne, // The resolution ratio between spatial layers is 2:1. + kThreeToTwo, // The resolution ratio between spatial layers is 1.5:1. +}; + +absl::optional ScalabilityModeFromString( + absl::string_view scalability_mode_string); + +InterLayerPredMode ScalabilityModeToInterLayerPredMode( + ScalabilityMode scalability_mode); + +int ScalabilityModeToNumSpatialLayers(ScalabilityMode scalability_mode); + +int ScalabilityModeToNumTemporalLayers(ScalabilityMode scalability_mode); + +absl::optional ScalabilityModeToResolutionRatio( + ScalabilityMode scalability_mode); + +ScalabilityMode LimitNumSpatialLayers(ScalabilityMode scalability_mode, + int max_spatial_layers); + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_SVC_SCALABILITY_MODE_UTIL_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_full_svc.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_full_svc.cc index 892059297c..a262317597 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_full_svc.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_full_svc.cc @@ -353,6 +353,26 @@ FrameDependencyStructure ScalabilityStructureL2T2::DependencyStructure() const { return structure; } +FrameDependencyStructure ScalabilityStructureL2T3::DependencyStructure() const { + FrameDependencyStructure structure; + structure.num_decode_targets = 6; + structure.num_chains = 2; + structure.decode_target_protected_by_chain = {0, 0, 0, 1, 1, 1}; + auto& t = structure.templates; + t.resize(10); + t[1].S(0).T(0).Dtis("SSSSSS").ChainDiffs({0, 0}); + t[6].S(1).T(0).Dtis("---SSS").ChainDiffs({1, 1}).FrameDiffs({1}); + t[3].S(0).T(2).Dtis("--D--R").ChainDiffs({2, 1}).FrameDiffs({2}); + t[8].S(1).T(2).Dtis("-----D").ChainDiffs({3, 2}).FrameDiffs({2, 1}); + t[2].S(0).T(1).Dtis("-DS-RR").ChainDiffs({4, 3}).FrameDiffs({4}); + t[7].S(1).T(1).Dtis("----DS").ChainDiffs({5, 4}).FrameDiffs({4, 1}); + t[4].S(0).T(2).Dtis("--D--R").ChainDiffs({6, 5}).FrameDiffs({2}); + t[9].S(1).T(2).Dtis("-----D").ChainDiffs({7, 6}).FrameDiffs({2, 1}); + t[0].S(0).T(0).Dtis("SSSRRR").ChainDiffs({8, 7}).FrameDiffs({8}); + t[5].S(1).T(0).Dtis("---SSS").ChainDiffs({1, 1}).FrameDiffs({8, 1}); + return structure; +} + FrameDependencyStructure ScalabilityStructureL3T1::DependencyStructure() const { FrameDependencyStructure structure; structure.num_decode_targets = 3; @@ -369,6 +389,29 @@ FrameDependencyStructure ScalabilityStructureL3T1::DependencyStructure() const { return structure; } +FrameDependencyStructure ScalabilityStructureL3T2::DependencyStructure() const { + FrameDependencyStructure structure; + structure.num_decode_targets = 6; + structure.num_chains = 3; + structure.decode_target_protected_by_chain = {0, 0, 1, 1, 2, 2}; + auto& t = structure.templates; + t.resize(9); + // Templates are shown in the order frames following them appear in the + // stream, but in `structure.templates` array templates are sorted by + // (`spatial_id`, `temporal_id`) since that is a dependency descriptor + // requirement. + t[1].S(0).T(0).Dtis("SSSSSS").ChainDiffs({0, 0, 0}); + t[4].S(1).T(0).Dtis("--SSSS").ChainDiffs({1, 1, 1}).FrameDiffs({1}); + t[7].S(2).T(0).Dtis("----SS").ChainDiffs({2, 1, 1}).FrameDiffs({1}); + t[2].S(0).T(1).Dtis("-D-R-R").ChainDiffs({3, 2, 1}).FrameDiffs({3}); + t[5].S(1).T(1).Dtis("---D-R").ChainDiffs({4, 3, 2}).FrameDiffs({3, 1}); + t[8].S(2).T(1).Dtis("-----D").ChainDiffs({5, 4, 3}).FrameDiffs({3, 1}); + t[0].S(0).T(0).Dtis("SSRRRR").ChainDiffs({6, 5, 4}).FrameDiffs({6}); + t[3].S(1).T(0).Dtis("--SSRR").ChainDiffs({1, 1, 1}).FrameDiffs({6, 1}); + t[6].S(2).T(0).Dtis("----SS").ChainDiffs({2, 1, 1}).FrameDiffs({6, 1}); + return structure; +} + FrameDependencyStructure ScalabilityStructureL3T3::DependencyStructure() const { FrameDependencyStructure structure; structure.num_decode_targets = 9; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_full_svc.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_full_svc.h index a3cad0af8a..a4ede69342 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_full_svc.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_full_svc.h @@ -133,6 +133,23 @@ class ScalabilityStructureL2T2 : public ScalabilityStructureFullSvc { FrameDependencyStructure DependencyStructure() const override; }; +// S1T2 4 ,8 +// S1T1 / | 6' | +// S1T0 2--+-'+--+-... +// | | | | +// S0T2 | 3 | ,7 +// S0T1 | / 5' +// S0T0 1----'-----... +// Time-> 0 1 2 3 +class ScalabilityStructureL2T3 : public ScalabilityStructureFullSvc { + public: + explicit ScalabilityStructureL2T3(ScalingFactor resolution_factor = {}) + : ScalabilityStructureFullSvc(2, 3, resolution_factor) {} + ~ScalabilityStructureL2T3() override = default; + + FrameDependencyStructure DependencyStructure() const override; +}; + // S2 0-0-0- // | | | // S1 0-0-0-... @@ -148,6 +165,16 @@ class ScalabilityStructureL3T1 : public ScalabilityStructureFullSvc { FrameDependencyStructure DependencyStructure() const override; }; +// https://www.w3.org/TR/webrtc-svc/#L3T2* +class ScalabilityStructureL3T2 : public ScalabilityStructureFullSvc { + public: + explicit ScalabilityStructureL3T2(ScalingFactor resolution_factor = {}) + : ScalabilityStructureFullSvc(3, 2, resolution_factor) {} + ~ScalabilityStructureL3T2() override = default; + + FrameDependencyStructure DependencyStructure() const override; +}; + // https://www.w3.org/TR/webrtc-svc/#L3T3* class ScalabilityStructureL3T3 : public ScalabilityStructureFullSvc { public: diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_key_svc.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_key_svc.cc index 0ef7e8f156..0e6fecfae9 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_key_svc.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_key_svc.cc @@ -343,6 +343,55 @@ FrameDependencyStructure ScalabilityStructureL2T3Key::DependencyStructure() return structure; } +ScalabilityStructureL3T1Key::~ScalabilityStructureL3T1Key() = default; + +FrameDependencyStructure ScalabilityStructureL3T1Key::DependencyStructure() + const { + FrameDependencyStructure structure; + structure.num_decode_targets = 3; + structure.num_chains = 3; + structure.decode_target_protected_by_chain = {0, 1, 2}; + auto& t = structure.templates; + t.resize(6); + // Templates are shown in the order frames following them appear in the + // stream, but in `structure.templates` array templates are sorted by + // (`spatial_id`, `temporal_id`) since that is a dependency descriptor + // requirement. + t[1].S(0).Dtis("SSS").ChainDiffs({0, 0, 0}); + t[3].S(1).Dtis("-SS").ChainDiffs({1, 1, 1}).FrameDiffs({1}); + t[5].S(2).Dtis("--S").ChainDiffs({2, 1, 1}).FrameDiffs({1}); + t[0].S(0).Dtis("S--").ChainDiffs({3, 2, 1}).FrameDiffs({3}); + t[2].S(1).Dtis("-S-").ChainDiffs({1, 3, 2}).FrameDiffs({3}); + t[4].S(2).Dtis("--S").ChainDiffs({2, 1, 3}).FrameDiffs({3}); + return structure; +} + +ScalabilityStructureL3T2Key::~ScalabilityStructureL3T2Key() = default; + +FrameDependencyStructure ScalabilityStructureL3T2Key::DependencyStructure() + const { + FrameDependencyStructure structure; + structure.num_decode_targets = 6; + structure.num_chains = 3; + structure.decode_target_protected_by_chain = {0, 0, 1, 1, 2, 2}; + auto& t = structure.templates; + t.resize(9); + // Templates are shown in the order frames following them appear in the + // stream, but in `structure.templates` array templates are sorted by + // (`spatial_id`, `temporal_id`) since that is a dependency descriptor + // requirement. + t[1].S(0).T(0).Dtis("SSSSSS").ChainDiffs({0, 0, 0}); + t[4].S(1).T(0).Dtis("--SSSS").ChainDiffs({1, 1, 1}).FrameDiffs({1}); + t[7].S(2).T(0).Dtis("----SS").ChainDiffs({2, 1, 1}).FrameDiffs({1}); + t[2].S(0).T(1).Dtis("-D----").ChainDiffs({3, 2, 1}).FrameDiffs({3}); + t[5].S(1).T(1).Dtis("---D--").ChainDiffs({4, 3, 2}).FrameDiffs({3}); + t[8].S(2).T(1).Dtis("-----D").ChainDiffs({5, 4, 3}).FrameDiffs({3}); + t[0].S(0).T(0).Dtis("SS----").ChainDiffs({6, 5, 4}).FrameDiffs({6}); + t[3].S(1).T(0).Dtis("--SS--").ChainDiffs({1, 6, 5}).FrameDiffs({6}); + t[6].S(2).T(0).Dtis("----SS").ChainDiffs({2, 1, 6}).FrameDiffs({6}); + return structure; +} + ScalabilityStructureL3T3Key::~ScalabilityStructureL3T3Key() = default; FrameDependencyStructure ScalabilityStructureL3T3Key::DependencyStructure() diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_key_svc.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_key_svc.h index b66f6f83e4..54760da431 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_key_svc.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_key_svc.h @@ -109,6 +109,22 @@ class ScalabilityStructureL2T3Key : public ScalabilityStructureKeySvc { FrameDependencyStructure DependencyStructure() const override; }; +class ScalabilityStructureL3T1Key : public ScalabilityStructureKeySvc { + public: + ScalabilityStructureL3T1Key() : ScalabilityStructureKeySvc(3, 1) {} + ~ScalabilityStructureL3T1Key() override; + + FrameDependencyStructure DependencyStructure() const override; +}; + +class ScalabilityStructureL3T2Key : public ScalabilityStructureKeySvc { + public: + ScalabilityStructureL3T2Key() : ScalabilityStructureKeySvc(3, 2) {} + ~ScalabilityStructureL3T2Key() override; + + FrameDependencyStructure DependencyStructure() const override; +}; + class ScalabilityStructureL3T3Key : public ScalabilityStructureKeySvc { public: ScalabilityStructureL3T3Key() : ScalabilityStructureKeySvc(3, 3) {} diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_simulcast.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_simulcast.cc index e5fa4c4368..54e27fda5c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_simulcast.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_simulcast.cc @@ -43,9 +43,11 @@ constexpr int ScalabilityStructureSimulcast::kMaxNumTemporalLayers; ScalabilityStructureSimulcast::ScalabilityStructureSimulcast( int num_spatial_layers, - int num_temporal_layers) + int num_temporal_layers, + ScalingFactor resolution_factor) : num_spatial_layers_(num_spatial_layers), num_temporal_layers_(num_temporal_layers), + resolution_factor_(resolution_factor), active_decode_targets_( (uint32_t{1} << (num_spatial_layers * num_temporal_layers)) - 1) { RTC_DCHECK_LE(num_spatial_layers, kMaxNumSpatialLayers); @@ -62,8 +64,10 @@ ScalabilityStructureSimulcast::StreamConfig() const { result.scaling_factor_num[num_spatial_layers_ - 1] = 1; result.scaling_factor_den[num_spatial_layers_ - 1] = 1; for (int sid = num_spatial_layers_ - 1; sid > 0; --sid) { - result.scaling_factor_num[sid - 1] = 1; - result.scaling_factor_den[sid - 1] = 2 * result.scaling_factor_den[sid]; + result.scaling_factor_num[sid - 1] = + resolution_factor_.num * result.scaling_factor_num[sid]; + result.scaling_factor_den[sid - 1] = + resolution_factor_.den * result.scaling_factor_den[sid]; } result.uses_reference_scaling = false; return result; @@ -242,6 +246,81 @@ FrameDependencyStructure ScalabilityStructureS2T1::DependencyStructure() const { return structure; } +FrameDependencyStructure ScalabilityStructureS2T2::DependencyStructure() const { + FrameDependencyStructure structure; + structure.num_decode_targets = 4; + structure.num_chains = 2; + structure.decode_target_protected_by_chain = {0, 0, 1, 1}; + auto& t = structure.templates; + t.resize(6); + t[1].S(0).T(0).Dtis("SS--").ChainDiffs({0, 0}); + t[4].S(1).T(0).Dtis("--SS").ChainDiffs({1, 0}); + t[2].S(0).T(1).Dtis("-D--").ChainDiffs({2, 1}).FrameDiffs({2}); + t[5].S(1).T(1).Dtis("---D").ChainDiffs({3, 2}).FrameDiffs({2}); + t[0].S(0).T(0).Dtis("SS--").ChainDiffs({4, 3}).FrameDiffs({4}); + t[3].S(1).T(0).Dtis("--SS").ChainDiffs({1, 4}).FrameDiffs({4}); + return structure; +} + +FrameDependencyStructure ScalabilityStructureS2T3::DependencyStructure() const { + FrameDependencyStructure structure; + structure.num_decode_targets = 6; + structure.num_chains = 2; + structure.decode_target_protected_by_chain = {0, 0, 0, 1, 1, 1}; + auto& t = structure.templates; + t.resize(10); + t[1].S(0).T(0).Dtis("SSS---").ChainDiffs({0, 0}); + t[6].S(1).T(0).Dtis("---SSS").ChainDiffs({1, 0}); + t[3].S(0).T(2).Dtis("--D---").ChainDiffs({2, 1}).FrameDiffs({2}); + t[8].S(1).T(2).Dtis("-----D").ChainDiffs({3, 2}).FrameDiffs({2}); + t[2].S(0).T(1).Dtis("-DS---").ChainDiffs({4, 3}).FrameDiffs({4}); + t[7].S(1).T(1).Dtis("----DS").ChainDiffs({5, 4}).FrameDiffs({4}); + t[4].S(0).T(2).Dtis("--D---").ChainDiffs({6, 5}).FrameDiffs({2}); + t[9].S(1).T(2).Dtis("-----D").ChainDiffs({7, 6}).FrameDiffs({2}); + t[0].S(0).T(0).Dtis("SSS---").ChainDiffs({8, 7}).FrameDiffs({8}); + t[5].S(1).T(0).Dtis("---SSS").ChainDiffs({1, 8}).FrameDiffs({8}); + return structure; +} + +FrameDependencyStructure ScalabilityStructureS3T1::DependencyStructure() const { + FrameDependencyStructure structure; + structure.num_decode_targets = 3; + structure.num_chains = 3; + structure.decode_target_protected_by_chain = {0, 1, 2}; + auto& t = structure.templates; + t.resize(6); + t[1].S(0).T(0).Dtis("S--").ChainDiffs({0, 0, 0}); + t[3].S(1).T(0).Dtis("-S-").ChainDiffs({1, 0, 0}); + t[5].S(2).T(0).Dtis("--S").ChainDiffs({2, 1, 0}); + t[0].S(0).T(0).Dtis("S--").ChainDiffs({3, 2, 1}).FrameDiffs({3}); + t[2].S(1).T(0).Dtis("-S-").ChainDiffs({1, 3, 2}).FrameDiffs({3}); + t[4].S(2).T(0).Dtis("--S").ChainDiffs({2, 1, 3}).FrameDiffs({3}); + return structure; +} + +FrameDependencyStructure ScalabilityStructureS3T2::DependencyStructure() const { + FrameDependencyStructure structure; + structure.num_decode_targets = 6; + structure.num_chains = 3; + structure.decode_target_protected_by_chain = {0, 0, 1, 1, 2, 2}; + auto& t = structure.templates; + t.resize(9); + // Templates are shown in the order frames following them appear in the + // stream, but in `structure.templates` array templates are sorted by + // (`spatial_id`, `temporal_id`) since that is a dependency descriptor + // requirement. + t[1].S(0).T(0).Dtis("SS----").ChainDiffs({0, 0, 0}); + t[4].S(1).T(0).Dtis("--SS--").ChainDiffs({1, 0, 0}); + t[7].S(2).T(0).Dtis("----SS").ChainDiffs({2, 1, 0}); + t[2].S(0).T(1).Dtis("-D----").ChainDiffs({3, 2, 1}).FrameDiffs({3}); + t[5].S(1).T(1).Dtis("---D--").ChainDiffs({4, 3, 2}).FrameDiffs({3}); + t[8].S(2).T(1).Dtis("-----D").ChainDiffs({5, 4, 3}).FrameDiffs({3}); + t[0].S(0).T(0).Dtis("SS----").ChainDiffs({6, 5, 4}).FrameDiffs({6}); + t[3].S(1).T(0).Dtis("--SS--").ChainDiffs({1, 6, 5}).FrameDiffs({6}); + t[6].S(2).T(0).Dtis("----SS").ChainDiffs({2, 1, 6}).FrameDiffs({6}); + return structure; +} + FrameDependencyStructure ScalabilityStructureS3T3::DependencyStructure() const { FrameDependencyStructure structure; structure.num_decode_targets = 9; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_simulcast.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_simulcast.h index 7b57df2985..99be9f0d58 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_simulcast.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_simulcast.h @@ -23,8 +23,13 @@ namespace webrtc { // same temporal layering. class ScalabilityStructureSimulcast : public ScalableVideoController { public: + struct ScalingFactor { + int num = 1; + int den = 2; + }; ScalabilityStructureSimulcast(int num_spatial_layers, - int num_temporal_layers); + int num_temporal_layers, + ScalingFactor resolution_factor); ~ScalabilityStructureSimulcast() override; StreamLayersConfig StreamConfig() const override; @@ -58,6 +63,7 @@ class ScalabilityStructureSimulcast : public ScalableVideoController { const int num_spatial_layers_; const int num_temporal_layers_; + const ScalingFactor resolution_factor_; FramePattern last_pattern_ = kNone; std::bitset can_reference_t0_frame_for_spatial_id_ = 0; @@ -70,15 +76,65 @@ class ScalabilityStructureSimulcast : public ScalableVideoController { // S0 0--0--0- class ScalabilityStructureS2T1 : public ScalabilityStructureSimulcast { public: - ScalabilityStructureS2T1() : ScalabilityStructureSimulcast(2, 1) {} + explicit ScalabilityStructureS2T1(ScalingFactor resolution_factor = {}) + : ScalabilityStructureSimulcast(2, 1, resolution_factor) {} ~ScalabilityStructureS2T1() override = default; FrameDependencyStructure DependencyStructure() const override; }; +class ScalabilityStructureS2T2 : public ScalabilityStructureSimulcast { + public: + explicit ScalabilityStructureS2T2(ScalingFactor resolution_factor = {}) + : ScalabilityStructureSimulcast(2, 2, resolution_factor) {} + ~ScalabilityStructureS2T2() override = default; + + FrameDependencyStructure DependencyStructure() const override; +}; + +// S1T2 3 7 +// | / +// S1T1 / 5 +// |_/ +// S1T0 1-------9... +// +// S0T2 2 6 +// | / +// S0T1 / 4 +// |_/ +// S0T0 0-------8... +// Time-> 0 1 2 3 4 +class ScalabilityStructureS2T3 : public ScalabilityStructureSimulcast { + public: + explicit ScalabilityStructureS2T3(ScalingFactor resolution_factor = {}) + : ScalabilityStructureSimulcast(2, 3, resolution_factor) {} + ~ScalabilityStructureS2T3() override = default; + + FrameDependencyStructure DependencyStructure() const override; +}; + +class ScalabilityStructureS3T1 : public ScalabilityStructureSimulcast { + public: + explicit ScalabilityStructureS3T1(ScalingFactor resolution_factor = {}) + : ScalabilityStructureSimulcast(3, 1, resolution_factor) {} + ~ScalabilityStructureS3T1() override = default; + + FrameDependencyStructure DependencyStructure() const override; +}; + +class ScalabilityStructureS3T2 : public ScalabilityStructureSimulcast { + public: + explicit ScalabilityStructureS3T2(ScalingFactor resolution_factor = {}) + : ScalabilityStructureSimulcast(3, 2, resolution_factor) {} + ~ScalabilityStructureS3T2() override = default; + + FrameDependencyStructure DependencyStructure() const override; +}; + class ScalabilityStructureS3T3 : public ScalabilityStructureSimulcast { public: - ScalabilityStructureS3T3() : ScalabilityStructureSimulcast(3, 3) {} + explicit ScalabilityStructureS3T3(ScalingFactor resolution_factor = {}) + : ScalabilityStructureSimulcast(3, 3, resolution_factor) {} ~ScalabilityStructureS3T3() override = default; FrameDependencyStructure DependencyStructure() const override; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/svc_rate_allocator.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/svc_rate_allocator.cc index 2d27d47621..b6ae0d7430 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/svc_rate_allocator.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/svc_rate_allocator.cc @@ -174,8 +174,10 @@ DataRate FindLayerTogglingThreshold(const VideoCodec& codec, SvcRateAllocator::NumLayers SvcRateAllocator::GetNumLayers( const VideoCodec& codec) { NumLayers layers; - if (!codec.ScalabilityMode().empty()) { - if (auto structure = CreateScalabilityStructure(codec.ScalabilityMode())) { + if (absl::optional scalability_mode = + codec.GetScalabilityMode(); + scalability_mode.has_value()) { + if (auto structure = CreateScalabilityStructure(*scalability_mode)) { ScalableVideoController::StreamLayersConfig config = structure->StreamConfig(); layers.spatial = config.num_spatial_layers; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/timestamp_map.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timestamp_map.cc deleted file mode 100644 index f843ea693a..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/timestamp_map.cc +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/video_coding/timestamp_map.h" - -#include - -#include "modules/include/module_common_types_public.h" - -namespace webrtc { - -VCMTimestampMap::VCMTimestampMap(size_t capacity) - : ring_buffer_(new TimestampDataTuple[capacity]), - capacity_(capacity), - next_add_idx_(0), - next_pop_idx_(0) {} - -VCMTimestampMap::~VCMTimestampMap() {} - -void VCMTimestampMap::Add(uint32_t timestamp, const VCMFrameInformation& data) { - ring_buffer_[next_add_idx_].timestamp = timestamp; - ring_buffer_[next_add_idx_].data = data; - next_add_idx_ = (next_add_idx_ + 1) % capacity_; - - if (next_add_idx_ == next_pop_idx_) { - // Circular list full; forget oldest entry. - next_pop_idx_ = (next_pop_idx_ + 1) % capacity_; - } -} - -absl::optional VCMTimestampMap::Pop(uint32_t timestamp) { - while (!IsEmpty()) { - if (ring_buffer_[next_pop_idx_].timestamp == timestamp) { - // Found start time for this timestamp. - const VCMFrameInformation& data = ring_buffer_[next_pop_idx_].data; - ring_buffer_[next_pop_idx_].timestamp = 0; - next_pop_idx_ = (next_pop_idx_ + 1) % capacity_; - return data; - } else if (IsNewerTimestamp(ring_buffer_[next_pop_idx_].timestamp, - timestamp)) { - // The timestamp we are looking for is not in the list. - return absl::nullopt; - } - - // Not in this position, check next (and forget this position). - next_pop_idx_ = (next_pop_idx_ + 1) % capacity_; - } - - // Could not find matching timestamp in list. - return absl::nullopt; -} - -bool VCMTimestampMap::IsEmpty() const { - return (next_add_idx_ == next_pop_idx_); -} - -size_t VCMTimestampMap::Size() const { - // The maximum number of elements in the list is `capacity_` - 1. The list is - // empty if the add and pop indices are equal. - return next_add_idx_ >= next_pop_idx_ - ? next_add_idx_ - next_pop_idx_ - : next_add_idx_ + capacity_ - next_pop_idx_; -} - -void VCMTimestampMap::Clear() { - while (!IsEmpty()) { - ring_buffer_[next_pop_idx_].timestamp = 0; - next_pop_idx_ = (next_pop_idx_ + 1) % capacity_; - } -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/timestamp_map.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timestamp_map.h deleted file mode 100644 index dc20a0551c..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/timestamp_map.h +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_VIDEO_CODING_TIMESTAMP_MAP_H_ -#define MODULES_VIDEO_CODING_TIMESTAMP_MAP_H_ - -#include - -#include "absl/types/optional.h" -#include "api/rtp_packet_infos.h" -#include "api/units/timestamp.h" -#include "api/video/encoded_image.h" -#include "api/video/video_content_type.h" -#include "api/video/video_rotation.h" -#include "api/video/video_timing.h" - -namespace webrtc { - -struct VCMFrameInformation { - int64_t renderTimeMs; - absl::optional decodeStart; - void* userData; - VideoRotation rotation; - VideoContentType content_type; - EncodedImage::Timing timing; - int64_t ntp_time_ms; - RtpPacketInfos packet_infos; - // ColorSpace is not stored here, as it might be modified by decoders. -}; - -class VCMTimestampMap { - public: - explicit VCMTimestampMap(size_t capacity); - ~VCMTimestampMap(); - - void Add(uint32_t timestamp, const VCMFrameInformation& data); - absl::optional Pop(uint32_t timestamp); - size_t Size() const; - void Clear(); - - private: - struct TimestampDataTuple { - uint32_t timestamp; - VCMFrameInformation data; - }; - bool IsEmpty() const; - - std::unique_ptr ring_buffer_; - const size_t capacity_; - size_t next_add_idx_; - size_t next_pop_idx_; -}; - -} // namespace webrtc - -#endif // MODULES_VIDEO_CODING_TIMESTAMP_MAP_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing.cc deleted file mode 100644 index 99e525a5d7..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing.cc +++ /dev/null @@ -1,288 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/video_coding/timing.h" - - -#include - -#include "rtc_base/experiments/field_trial_parser.h" -#include "rtc_base/time/timestamp_extrapolator.h" -#include "system_wrappers/include/clock.h" -#include "system_wrappers/include/field_trial.h" - -namespace webrtc { -namespace { -// Default pacing that is used for the low-latency renderer path. -constexpr TimeDelta kZeroPlayoutDelayDefaultMinPacing = TimeDelta::Millis(8); -} // namespace - -VCMTiming::VCMTiming(Clock* clock) - : clock_(clock), - ts_extrapolator_(std::make_unique( - clock_->TimeInMilliseconds())), - codec_timer_(std::make_unique()), - render_delay_ms_(kDefaultRenderDelayMs), - min_playout_delay_ms_(0), - max_playout_delay_ms_(10000), - jitter_delay_ms_(0), - current_delay_ms_(0), - prev_frame_timestamp_(0), - timing_frame_info_(), - num_decoded_frames_(0), - low_latency_renderer_enabled_("enabled", true), - zero_playout_delay_min_pacing_("min_pacing", - kZeroPlayoutDelayDefaultMinPacing), - last_decode_scheduled_ts_(0) { - ParseFieldTrial({&low_latency_renderer_enabled_}, - field_trial::FindFullName("WebRTC-LowLatencyRenderer")); - ParseFieldTrial({&zero_playout_delay_min_pacing_}, - field_trial::FindFullName("WebRTC-ZeroPlayoutDelay")); -} - -void VCMTiming::Reset() { - MutexLock lock(&mutex_); - ts_extrapolator_->Reset(clock_->TimeInMilliseconds()); - codec_timer_ = std::make_unique(); - render_delay_ms_ = kDefaultRenderDelayMs; - min_playout_delay_ms_ = 0; - jitter_delay_ms_ = 0; - current_delay_ms_ = 0; - prev_frame_timestamp_ = 0; -} - -void VCMTiming::set_render_delay(int render_delay_ms) { - MutexLock lock(&mutex_); - render_delay_ms_ = render_delay_ms; -} - -void VCMTiming::set_min_playout_delay(int min_playout_delay_ms) { - MutexLock lock(&mutex_); - min_playout_delay_ms_ = min_playout_delay_ms; -} - -int VCMTiming::min_playout_delay() { - MutexLock lock(&mutex_); - return min_playout_delay_ms_; -} - -void VCMTiming::set_max_playout_delay(int max_playout_delay_ms) { - MutexLock lock(&mutex_); - max_playout_delay_ms_ = max_playout_delay_ms; -} - -int VCMTiming::max_playout_delay() { - MutexLock lock(&mutex_); - return max_playout_delay_ms_; -} - -void VCMTiming::SetJitterDelay(int jitter_delay_ms) { - MutexLock lock(&mutex_); - if (jitter_delay_ms != jitter_delay_ms_) { - jitter_delay_ms_ = jitter_delay_ms; - // When in initial state, set current delay to minimum delay. - if (current_delay_ms_ == 0) { - current_delay_ms_ = jitter_delay_ms_; - } - } -} - -void VCMTiming::UpdateCurrentDelay(uint32_t frame_timestamp) { - MutexLock lock(&mutex_); - int target_delay_ms = TargetDelayInternal(); - - if (current_delay_ms_ == 0) { - // Not initialized, set current delay to target. - current_delay_ms_ = target_delay_ms; - } else if (target_delay_ms != current_delay_ms_) { - int64_t delay_diff_ms = - static_cast(target_delay_ms) - current_delay_ms_; - // Never change the delay with more than 100 ms every second. If we're - // changing the delay in too large steps we will get noticeable freezes. By - // limiting the change we can increase the delay in smaller steps, which - // will be experienced as the video is played in slow motion. When lowering - // the delay the video will be played at a faster pace. - int64_t max_change_ms = 0; - if (frame_timestamp < 0x0000ffff && prev_frame_timestamp_ > 0xffff0000) { - // wrap - max_change_ms = kDelayMaxChangeMsPerS * - (frame_timestamp + (static_cast(1) << 32) - - prev_frame_timestamp_) / - 90000; - } else { - max_change_ms = kDelayMaxChangeMsPerS * - (frame_timestamp - prev_frame_timestamp_) / 90000; - } - - if (max_change_ms <= 0) { - // Any changes less than 1 ms are truncated and will be postponed. - // Negative change will be due to reordering and should be ignored. - return; - } - delay_diff_ms = std::max(delay_diff_ms, -max_change_ms); - delay_diff_ms = std::min(delay_diff_ms, max_change_ms); - - current_delay_ms_ = current_delay_ms_ + delay_diff_ms; - } - prev_frame_timestamp_ = frame_timestamp; -} - -void VCMTiming::UpdateCurrentDelay(int64_t render_time_ms, - int64_t actual_decode_time_ms) { - MutexLock lock(&mutex_); - uint32_t target_delay_ms = TargetDelayInternal(); - int64_t delayed_ms = - actual_decode_time_ms - - (render_time_ms - RequiredDecodeTimeMs() - render_delay_ms_); - if (delayed_ms < 0) { - return; - } - if (current_delay_ms_ + delayed_ms <= target_delay_ms) { - current_delay_ms_ += delayed_ms; - } else { - current_delay_ms_ = target_delay_ms; - } -} - -void VCMTiming::StopDecodeTimer(uint32_t /*time_stamp*/, - int32_t decode_time_ms, - int64_t now_ms, - int64_t /*render_time_ms*/) { - StopDecodeTimer(decode_time_ms, now_ms); -} - -void VCMTiming::StopDecodeTimer(int32_t decode_time_ms, int64_t now_ms) { - MutexLock lock(&mutex_); - codec_timer_->AddTiming(decode_time_ms, now_ms); - RTC_DCHECK_GE(decode_time_ms, 0); - ++num_decoded_frames_; -} - -void VCMTiming::IncomingTimestamp(uint32_t time_stamp, int64_t now_ms) { - MutexLock lock(&mutex_); - ts_extrapolator_->Update(now_ms, time_stamp); -} - -int64_t VCMTiming::RenderTimeMs(uint32_t frame_timestamp, - int64_t now_ms) const { - MutexLock lock(&mutex_); - return RenderTimeMsInternal(frame_timestamp, now_ms); -} - -void VCMTiming::SetLastDecodeScheduledTimestamp( - int64_t last_decode_scheduled_ts) { - MutexLock lock(&mutex_); - last_decode_scheduled_ts_ = last_decode_scheduled_ts; -} - -int64_t VCMTiming::RenderTimeMsInternal(uint32_t frame_timestamp, - int64_t now_ms) const { - constexpr int kLowLatencyRendererMaxPlayoutDelayMs = 500; - if (min_playout_delay_ms_ == 0 && - (max_playout_delay_ms_ == 0 || - (low_latency_renderer_enabled_ && - max_playout_delay_ms_ <= kLowLatencyRendererMaxPlayoutDelayMs))) { - // Render as soon as possible or with low-latency renderer algorithm. - return 0; - } - // Note that TimestampExtrapolator::ExtrapolateLocalTime is not a const - // method; it mutates the object's wraparound state. - int64_t estimated_complete_time_ms = - ts_extrapolator_->ExtrapolateLocalTime(frame_timestamp); - if (estimated_complete_time_ms == -1) { - estimated_complete_time_ms = now_ms; - } - - // Make sure the actual delay stays in the range of `min_playout_delay_ms_` - // and `max_playout_delay_ms_`. - int actual_delay = std::max(current_delay_ms_, min_playout_delay_ms_); - actual_delay = std::min(actual_delay, max_playout_delay_ms_); - return estimated_complete_time_ms + actual_delay; -} - -int VCMTiming::RequiredDecodeTimeMs() const { - const int decode_time_ms = codec_timer_->RequiredDecodeTimeMs(); - RTC_DCHECK_GE(decode_time_ms, 0); - return decode_time_ms; -} - -int64_t VCMTiming::MaxWaitingTime(int64_t render_time_ms, - int64_t now_ms, - bool too_many_frames_queued) const { - MutexLock lock(&mutex_); - - if (render_time_ms == 0 && zero_playout_delay_min_pacing_->us() > 0 && - min_playout_delay_ms_ == 0 && max_playout_delay_ms_ > 0) { - // `render_time_ms` == 0 indicates that the frame should be decoded and - // rendered as soon as possible. However, the decoder can be choked if too - // many frames are sent at once. Therefore, limit the interframe delay to - // |zero_playout_delay_min_pacing_| unless too many frames are queued in - // which case the frames are sent to the decoder at once. - if (too_many_frames_queued) { - return 0; - } - int64_t earliest_next_decode_start_time = - last_decode_scheduled_ts_ + zero_playout_delay_min_pacing_->ms(); - int64_t max_wait_time_ms = now_ms >= earliest_next_decode_start_time - ? 0 - : earliest_next_decode_start_time - now_ms; - return max_wait_time_ms; - } - return render_time_ms - now_ms - RequiredDecodeTimeMs() - render_delay_ms_; -} - -int VCMTiming::TargetVideoDelay() const { - MutexLock lock(&mutex_); - return TargetDelayInternal(); -} - -int VCMTiming::TargetDelayInternal() const { - return std::max(min_playout_delay_ms_, - jitter_delay_ms_ + RequiredDecodeTimeMs() + render_delay_ms_); -} - -bool VCMTiming::GetTimings(int* max_decode_ms, - int* current_delay_ms, - int* target_delay_ms, - int* jitter_buffer_ms, - int* min_playout_delay_ms, - int* render_delay_ms) const { - MutexLock lock(&mutex_); - *max_decode_ms = RequiredDecodeTimeMs(); - *current_delay_ms = current_delay_ms_; - *target_delay_ms = TargetDelayInternal(); - *jitter_buffer_ms = jitter_delay_ms_; - *min_playout_delay_ms = min_playout_delay_ms_; - *render_delay_ms = render_delay_ms_; - return (num_decoded_frames_ > 0); -} - -void VCMTiming::SetTimingFrameInfo(const TimingFrameInfo& info) { - MutexLock lock(&mutex_); - timing_frame_info_.emplace(info); -} - -absl::optional VCMTiming::GetTimingFrameInfo() { - MutexLock lock(&mutex_); - return timing_frame_info_; -} - -void VCMTiming::SetMaxCompositionDelayInFrames( - absl::optional max_composition_delay_in_frames) { - MutexLock lock(&mutex_); - max_composition_delay_in_frames_ = max_composition_delay_in_frames; -} - -absl::optional VCMTiming::MaxCompositionDelayInFrames() const { - MutexLock lock(&mutex_); - return max_composition_delay_in_frames_; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing.h deleted file mode 100644 index 07c12e919a..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing.h +++ /dev/null @@ -1,165 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_VIDEO_CODING_TIMING_H_ -#define MODULES_VIDEO_CODING_TIMING_H_ - -#include - -#include "absl/types/optional.h" -#include "api/units/time_delta.h" -#include "api/video/video_timing.h" -#include "modules/video_coding/codec_timer.h" -#include "rtc_base/experiments/field_trial_parser.h" -#include "rtc_base/synchronization/mutex.h" -#include "rtc_base/thread_annotations.h" -#include "rtc_base/time/timestamp_extrapolator.h" - -namespace webrtc { - -class Clock; -class TimestampExtrapolator; - -class VCMTiming { - public: - explicit VCMTiming(Clock* clock); - virtual ~VCMTiming() = default; - - // Resets the timing to the initial state. - void Reset(); - - // Set the amount of time needed to render an image. Defaults to 10 ms. - void set_render_delay(int render_delay_ms); - - // Set the minimum time the video must be delayed on the receiver to - // get the desired jitter buffer level. - void SetJitterDelay(int required_delay_ms); - - // Set/get the minimum playout delay from capture to render in ms. - void set_min_playout_delay(int min_playout_delay_ms); - int min_playout_delay(); - - // Set/get the maximum playout delay from capture to render in ms. - void set_max_playout_delay(int max_playout_delay_ms); - int max_playout_delay(); - - // Increases or decreases the current delay to get closer to the target delay. - // Calculates how long it has been since the previous call to this function, - // and increases/decreases the delay in proportion to the time difference. - void UpdateCurrentDelay(uint32_t frame_timestamp); - - // Increases or decreases the current delay to get closer to the target delay. - // Given the actual decode time in ms and the render time in ms for a frame, - // this function calculates how late the frame is and increases the delay - // accordingly. - void UpdateCurrentDelay(int64_t render_time_ms, - int64_t actual_decode_time_ms); - - // Stops the decoder timer, should be called when the decoder returns a frame - // or when the decoded frame callback is called. - void StopDecodeTimer(int32_t decode_time_ms, int64_t now_ms); - // TODO(kron): Remove once downstream projects has been changed to use the - // above function. - void StopDecodeTimer(uint32_t time_stamp, - int32_t decode_time_ms, - int64_t now_ms, - int64_t render_time_ms); - - // Used to report that a frame is passed to decoding. Updates the timestamp - // filter which is used to map between timestamps and receiver system time. - void IncomingTimestamp(uint32_t time_stamp, int64_t last_packet_time_ms); - - // Returns the receiver system time when the frame with timestamp - // `frame_timestamp` should be rendered, assuming that the system time - // currently is `now_ms`. - virtual int64_t RenderTimeMs(uint32_t frame_timestamp, int64_t now_ms) const; - - // Returns the maximum time in ms that we can wait for a frame to become - // complete before we must pass it to the decoder. render_time_ms==0 indicates - // that the frames should be processed as quickly as possible, with possibly - // only a small delay added to make sure that the decoder is not overloaded. - // In this case, the parameter too_many_frames_queued is used to signal that - // the decode queue is full and that the frame should be decoded as soon as - // possible. - virtual int64_t MaxWaitingTime(int64_t render_time_ms, - int64_t now_ms, - bool too_many_frames_queued) const; - - // Returns the current target delay which is required delay + decode time + - // render delay. - int TargetVideoDelay() const; - - // Return current timing information. Returns true if the first frame has been - // decoded, false otherwise. - virtual bool GetTimings(int* max_decode_ms, - int* current_delay_ms, - int* target_delay_ms, - int* jitter_buffer_ms, - int* min_playout_delay_ms, - int* render_delay_ms) const; - - void SetTimingFrameInfo(const TimingFrameInfo& info); - absl::optional GetTimingFrameInfo(); - - void SetMaxCompositionDelayInFrames( - absl::optional max_composition_delay_in_frames); - absl::optional MaxCompositionDelayInFrames() const; - - // Updates the last time a frame was scheduled for decoding. - void SetLastDecodeScheduledTimestamp(int64_t last_decode_scheduled_ts); - - enum { kDefaultRenderDelayMs = 10 }; - enum { kDelayMaxChangeMsPerS = 100 }; - - protected: - int RequiredDecodeTimeMs() const RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - int64_t RenderTimeMsInternal(uint32_t frame_timestamp, int64_t now_ms) const - RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - int TargetDelayInternal() const RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - - private: - mutable Mutex mutex_; - Clock* const clock_; - const std::unique_ptr ts_extrapolator_ - RTC_PT_GUARDED_BY(mutex_); - std::unique_ptr codec_timer_ RTC_GUARDED_BY(mutex_) - RTC_PT_GUARDED_BY(mutex_); - int render_delay_ms_ RTC_GUARDED_BY(mutex_); - // Best-effort playout delay range for frames from capture to render. - // The receiver tries to keep the delay between `min_playout_delay_ms_` - // and `max_playout_delay_ms_` taking the network jitter into account. - // A special case is where min_playout_delay_ms_ = max_playout_delay_ms_ = 0, - // in which case the receiver tries to play the frames as they arrive. - int min_playout_delay_ms_ RTC_GUARDED_BY(mutex_); - int max_playout_delay_ms_ RTC_GUARDED_BY(mutex_); - int jitter_delay_ms_ RTC_GUARDED_BY(mutex_); - int current_delay_ms_ RTC_GUARDED_BY(mutex_); - uint32_t prev_frame_timestamp_ RTC_GUARDED_BY(mutex_); - absl::optional timing_frame_info_ RTC_GUARDED_BY(mutex_); - size_t num_decoded_frames_ RTC_GUARDED_BY(mutex_); - // Set by the field trial WebRTC-LowLatencyRenderer. The parameter enabled - // determines if the low-latency renderer algorithm should be used for the - // case min playout delay=0 and max playout delay>0. - FieldTrialParameter low_latency_renderer_enabled_ - RTC_GUARDED_BY(mutex_); - absl::optional max_composition_delay_in_frames_ RTC_GUARDED_BY(mutex_); - // Set by the field trial WebRTC-ZeroPlayoutDelay. The parameter min_pacing - // determines the minimum delay between frames scheduled for decoding that is - // used when min playout delay=0 and max playout delay>=0. - FieldTrialParameter zero_playout_delay_min_pacing_ - RTC_GUARDED_BY(mutex_); - // Timestamp at which the last frame was scheduled to be sent to the decoder. - // Used only when the RTP header extension playout delay is set to min=0 ms - // which is indicated by a render time set to 0. - int64_t last_decode_scheduled_ts_ RTC_GUARDED_BY(mutex_); -}; -} // namespace webrtc - -#endif // MODULES_VIDEO_CODING_TIMING_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codec_timer.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/codec_timer.cc similarity index 79% rename from TMessagesProj/jni/voip/webrtc/modules/video_coding/codec_timer.cc rename to TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/codec_timer.cc index b05195764b..f57d42d40a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codec_timer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/codec_timer.cc @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/video_coding/codec_timer.h" +#include "modules/video_coding/timing/codec_timer.h" #include @@ -25,11 +25,10 @@ const int64_t kTimeLimitMs = 10000; } // anonymous namespace -VCMCodecTimer::VCMCodecTimer() - : ignored_sample_count_(0), filter_(kPercentile) {} -VCMCodecTimer::~VCMCodecTimer() = default; +CodecTimer::CodecTimer() : ignored_sample_count_(0), filter_(kPercentile) {} +CodecTimer::~CodecTimer() = default; -void VCMCodecTimer::AddTiming(int64_t decode_time_ms, int64_t now_ms) { +void CodecTimer::AddTiming(int64_t decode_time_ms, int64_t now_ms) { // Ignore the first `kIgnoredSampleCount` samples. if (ignored_sample_count_ < kIgnoredSampleCount) { ++ignored_sample_count_; @@ -49,11 +48,11 @@ void VCMCodecTimer::AddTiming(int64_t decode_time_ms, int64_t now_ms) { } // Get the 95th percentile observed decode time within a time window. -int64_t VCMCodecTimer::RequiredDecodeTimeMs() const { +int64_t CodecTimer::RequiredDecodeTimeMs() const { return filter_.GetPercentileValue(); } -VCMCodecTimer::Sample::Sample(int64_t decode_time_ms, int64_t sample_time_ms) +CodecTimer::Sample::Sample(int64_t decode_time_ms, int64_t sample_time_ms) : decode_time_ms(decode_time_ms), sample_time_ms(sample_time_ms) {} } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codec_timer.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/codec_timer.h similarity index 86% rename from TMessagesProj/jni/voip/webrtc/modules/video_coding/codec_timer.h rename to TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/codec_timer.h index 2948b82974..9f12d82e98 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codec_timer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/codec_timer.h @@ -8,8 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef MODULES_VIDEO_CODING_CODEC_TIMER_H_ -#define MODULES_VIDEO_CODING_CODEC_TIMER_H_ +#ifndef MODULES_VIDEO_CODING_TIMING_CODEC_TIMER_H_ +#define MODULES_VIDEO_CODING_TIMING_CODEC_TIMER_H_ #include @@ -17,10 +17,10 @@ namespace webrtc { -class VCMCodecTimer { +class CodecTimer { public: - VCMCodecTimer(); - ~VCMCodecTimer(); + CodecTimer(); + ~CodecTimer(); // Add a new decode time to the filter. void AddTiming(int64_t new_decode_time_ms, int64_t now_ms); @@ -47,4 +47,4 @@ class VCMCodecTimer { } // namespace webrtc -#endif // MODULES_VIDEO_CODING_CODEC_TIMER_H_ +#endif // MODULES_VIDEO_CODING_TIMING_CODEC_TIMER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/frame_delay_variation_kalman_filter.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/frame_delay_variation_kalman_filter.cc new file mode 100644 index 0000000000..ec6aa3445a --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/frame_delay_variation_kalman_filter.cc @@ -0,0 +1,148 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_coding/timing/frame_delay_variation_kalman_filter.h" + +#include "api/units/data_size.h" +#include "api/units/time_delta.h" + +namespace webrtc { + +namespace { +// TODO(brandtr): The value below corresponds to 8 Gbps. Is that reasonable? +constexpr double kMaxBandwidth = 0.000001; // Unit: [1 / bytes per ms]. +} // namespace + +FrameDelayVariationKalmanFilter::FrameDelayVariationKalmanFilter() { + // TODO(brandtr): Is there a factor 1000 missing here? + estimate_[0] = 1 / (512e3 / 8); // Unit: [1 / bytes per ms] + estimate_[1] = 0; // Unit: [ms] + + // Initial estimate covariance. + estimate_cov_[0][0] = 1e-4; // Unit: [(1 / bytes per ms)^2] + estimate_cov_[1][1] = 1e2; // Unit: [ms^2] + estimate_cov_[0][1] = estimate_cov_[1][0] = 0; + + // Process noise covariance. + process_noise_cov_diag_[0] = 2.5e-10; // Unit: [(1 / bytes per ms)^2] + process_noise_cov_diag_[1] = 1e-10; // Unit: [ms^2] +} + +void FrameDelayVariationKalmanFilter::PredictAndUpdate( + double frame_delay_variation_ms, + double frame_size_variation_bytes, + double max_frame_size_bytes, + double var_noise) { + // Sanity checks. + if (max_frame_size_bytes < 1) { + return; + } + if (var_noise <= 0.0) { + return; + } + + // This member function follows the data flow in + // https://en.wikipedia.org/wiki/Kalman_filter#Details. + + // 1) Estimate prediction: `x = F*x`. + // For this model, there is no need to explicitly predict the estimate, since + // the state transition matrix is the identity. + + // 2) Estimate covariance prediction: `P = F*P*F' + Q`. + // Again, since the state transition matrix is the identity, this update + // is performed by simply adding the process noise covariance. + estimate_cov_[0][0] += process_noise_cov_diag_[0]; + estimate_cov_[1][1] += process_noise_cov_diag_[1]; + + // 3) Innovation: `y = z - H*x`. + // This is the part of the measurement that cannot be explained by the current + // estimate. + double innovation = + frame_delay_variation_ms - + GetFrameDelayVariationEstimateTotal(frame_size_variation_bytes); + + // 4) Innovation variance: `s = H*P*H' + r`. + double estim_cov_times_obs[2]; + estim_cov_times_obs[0] = + estimate_cov_[0][0] * frame_size_variation_bytes + estimate_cov_[0][1]; + estim_cov_times_obs[1] = + estimate_cov_[1][0] * frame_size_variation_bytes + estimate_cov_[1][1]; + double observation_noise_stddev = + (300.0 * exp(-fabs(frame_size_variation_bytes) / + (1e0 * max_frame_size_bytes)) + + 1) * + sqrt(var_noise); + if (observation_noise_stddev < 1.0) { + observation_noise_stddev = 1.0; + } + // TODO(brandtr): Shouldn't we add observation_noise_stddev^2 here? Otherwise, + // the dimensional analysis fails. + double innovation_var = frame_size_variation_bytes * estim_cov_times_obs[0] + + estim_cov_times_obs[1] + observation_noise_stddev; + if ((innovation_var < 1e-9 && innovation_var >= 0) || + (innovation_var > -1e-9 && innovation_var <= 0)) { + RTC_DCHECK_NOTREACHED(); + return; + } + + // 5) Optimal Kalman gain: `K = P*H'/s`. + // How much to trust the model vs. how much to trust the measurement. + double kalman_gain[2]; + kalman_gain[0] = estim_cov_times_obs[0] / innovation_var; + kalman_gain[1] = estim_cov_times_obs[1] / innovation_var; + + // 6) Estimate update: `x = x + K*y`. + // Optimally weight the new information in the innovation and add it to the + // old estimate. + estimate_[0] += kalman_gain[0] * innovation; + estimate_[1] += kalman_gain[1] * innovation; + + // (This clamping is not part of the linear Kalman filter.) + if (estimate_[0] < kMaxBandwidth) { + estimate_[0] = kMaxBandwidth; + } + + // 7) Estimate covariance update: `P = (I - K*H)*P` + double t00 = estimate_cov_[0][0]; + double t01 = estimate_cov_[0][1]; + estimate_cov_[0][0] = + (1 - kalman_gain[0] * frame_size_variation_bytes) * t00 - + kalman_gain[0] * estimate_cov_[1][0]; + estimate_cov_[0][1] = + (1 - kalman_gain[0] * frame_size_variation_bytes) * t01 - + kalman_gain[0] * estimate_cov_[1][1]; + estimate_cov_[1][0] = estimate_cov_[1][0] * (1 - kalman_gain[1]) - + kalman_gain[1] * frame_size_variation_bytes * t00; + estimate_cov_[1][1] = estimate_cov_[1][1] * (1 - kalman_gain[1]) - + kalman_gain[1] * frame_size_variation_bytes * t01; + + // Covariance matrix, must be positive semi-definite. + RTC_DCHECK(estimate_cov_[0][0] + estimate_cov_[1][1] >= 0 && + estimate_cov_[0][0] * estimate_cov_[1][1] - + estimate_cov_[0][1] * estimate_cov_[1][0] >= + 0 && + estimate_cov_[0][0] >= 0); +} + +double FrameDelayVariationKalmanFilter::GetFrameDelayVariationEstimateSizeBased( + double frame_size_variation_bytes) const { + // Unit: [1 / bytes per millisecond] * [bytes] = [milliseconds]. + return estimate_[0] * frame_size_variation_bytes; +} + +double FrameDelayVariationKalmanFilter::GetFrameDelayVariationEstimateTotal( + double frame_size_variation_bytes) const { + double frame_transmission_delay_ms = + GetFrameDelayVariationEstimateSizeBased(frame_size_variation_bytes); + double link_queuing_delay_ms = estimate_[1]; + return frame_transmission_delay_ms + link_queuing_delay_ms; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/frame_delay_variation_kalman_filter.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/frame_delay_variation_kalman_filter.h new file mode 100644 index 0000000000..a65ceefa10 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/frame_delay_variation_kalman_filter.h @@ -0,0 +1,106 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CODING_TIMING_FRAME_DELAY_VARIATION_KALMAN_FILTER_H_ +#define MODULES_VIDEO_CODING_TIMING_FRAME_DELAY_VARIATION_KALMAN_FILTER_H_ + +#include "api/units/data_size.h" +#include "api/units/time_delta.h" + +namespace webrtc { + +// This class uses a linear Kalman filter (see +// https://en.wikipedia.org/wiki/Kalman_filter) to estimate the frame delay +// variation (i.e., the difference in transmission time between a frame and the +// prior frame) for a frame, given its size variation in bytes (i.e., the +// difference in size between a frame and the prior frame). The idea is that, +// given a fixed link bandwidth, a larger frame (in bytes) would take +// proportionally longer to arrive than a correspondingly smaller frame. Using +// the variations of frame delay and frame size, the underlying bandwidth and +// queuing delay variation of the network link can be estimated. +// +// The filter takes as input the frame delay variation, the difference between +// the actual inter-frame arrival time and the expected inter-frame arrival time +// (based on RTP timestamp), and frame size variation, the inter-frame size +// delta for a single frame. The frame delay variation is seen as the +// measurement and the frame size variation is used in the observation model. +// The hidden state of the filter is the link bandwidth and queuing delay +// buildup. The estimated state can be used to get the expected frame delay +// variation for a frame, given its frame size variation. This information can +// then be used to estimate the frame delay variation coming from network +// jitter. +// +// Mathematical details: +// * The state (`x` in Wikipedia notation) is a 2x1 vector comprising the +// reciprocal of link bandwidth [1 / bytes per ms] and the +// link queuing delay buildup [ms]. +// * The state transition matrix (`F`) is the 2x2 identity matrix, meaning that +// link bandwidth and link queuing delay buildup are modeled as independent. +// * The measurement (`z`) is the (scalar) frame delay variation [ms]. +// * The observation matrix (`H`) is a 1x2 vector set as +// `{frame_size_variation [bytes], 1.0}`. +// * The state estimate covariance (`P`) is a symmetric 2x2 matrix. +// * The process noise covariance (`Q`) is a constant 2x2 diagonal matrix +// [(1 / bytes per ms)^2, ms^2]. +// * The observation noise covariance (`r`) is a scalar [ms^2] that is +// determined externally to this class. +class FrameDelayVariationKalmanFilter { + public: + FrameDelayVariationKalmanFilter(); + ~FrameDelayVariationKalmanFilter() = default; + + // Predicts and updates the filter, given a new pair of frame delay variation + // and frame size variation. + // + // Inputs: + // `frame_delay_variation_ms`: + // Frame delay variation as calculated by the `InterFrameDelay` estimator. + // + // `frame_size_variation_bytes`: + // Frame size variation, i.e., the current frame size minus the previous + // frame size (in bytes). Note that this quantity may be negative. + // + // `max_frame_size_bytes`: + // Filtered largest frame size received since the last reset. + // + // `var_noise`: + // Variance of the estimated random jitter. + // + // TODO(bugs.webrtc.org/14381): For now use doubles as input parameters as + // units defined in api/units have insufficient underlying precision for + // jitter estimation. + void PredictAndUpdate(double frame_delay_variation_ms, + double frame_size_variation_bytes, + double max_frame_size_bytes, + double var_noise); + + // Given a frame size variation, returns the estimated frame delay variation + // explained by the link bandwidth alone. + double GetFrameDelayVariationEstimateSizeBased( + double frame_size_variation_bytes) const; + + // Given a frame size variation, returns the estimated frame delay variation + // explained by both link bandwidth and link queuing delay buildup. + double GetFrameDelayVariationEstimateTotal( + double frame_size_variation_bytes) const; + + private: + // State estimate (bandwidth [1 / bytes per ms], queue buildup [ms]). + double estimate_[2]; + double estimate_cov_[2][2]; // Estimate covariance. + + // Process noise covariance. This is a diagonal matrix, so we only store the + // diagonal entries. + double process_noise_cov_diag_[2]; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_TIMING_FRAME_DELAY_VARIATION_KALMAN_FILTER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/inter_frame_delay.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/inter_frame_delay.cc new file mode 100644 index 0000000000..bed9f875ee --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/inter_frame_delay.cc @@ -0,0 +1,71 @@ +/* + * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_coding/timing/inter_frame_delay.h" + +#include "absl/types/optional.h" +#include "api/units/frequency.h" +#include "api/units/time_delta.h" +#include "modules/include/module_common_types_public.h" + +namespace webrtc { + +namespace { +constexpr Frequency k90kHz = Frequency::KiloHertz(90); +} + +InterFrameDelay::InterFrameDelay() { + Reset(); +} + +// Resets the delay estimate. +void InterFrameDelay::Reset() { + prev_wall_clock_ = absl::nullopt; + prev_rtp_timestamp_unwrapped_ = 0; +} + +// Calculates the delay of a frame with the given timestamp. +// This method is called when the frame is complete. +absl::optional InterFrameDelay::CalculateDelay( + uint32_t rtp_timestamp, + Timestamp now) { + int64_t rtp_timestamp_unwrapped = unwrapper_.Unwrap(rtp_timestamp); + if (!prev_wall_clock_) { + // First set of data, initialization, wait for next frame. + prev_wall_clock_ = now; + prev_rtp_timestamp_unwrapped_ = rtp_timestamp_unwrapped; + return TimeDelta::Zero(); + } + + // Account for reordering in jitter variance estimate in the future? + // Note that this also captures incomplete frames which are grabbed for + // decoding after a later frame has been complete, i.e. real packet losses. + uint32_t cropped_last = static_cast(prev_rtp_timestamp_unwrapped_); + if (rtp_timestamp_unwrapped < prev_rtp_timestamp_unwrapped_ || + !IsNewerTimestamp(rtp_timestamp, cropped_last)) { + return absl::nullopt; + } + + // Compute the compensated timestamp difference. + int64_t d_rtp_ticks = rtp_timestamp_unwrapped - prev_rtp_timestamp_unwrapped_; + TimeDelta dts = d_rtp_ticks / k90kHz; + TimeDelta dt = now - *prev_wall_clock_; + + // frameDelay is the difference of dT and dTS -- i.e. the difference of the + // wall clock time difference and the timestamp difference between two + // following frames. + TimeDelta delay = dt - dts; + + prev_rtp_timestamp_unwrapped_ = rtp_timestamp_unwrapped; + prev_wall_clock_ = now; + return delay; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/inter_frame_delay.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/inter_frame_delay.h new file mode 100644 index 0000000000..579a488cb1 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/inter_frame_delay.h @@ -0,0 +1,46 @@ +/* + * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CODING_TIMING_INTER_FRAME_DELAY_H_ +#define MODULES_VIDEO_CODING_TIMING_INTER_FRAME_DELAY_H_ + +#include + +#include "absl/types/optional.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "modules/include/module_common_types_public.h" + +namespace webrtc { + +class InterFrameDelay { + public: + InterFrameDelay(); + + // Resets the estimate. Zeros are given as parameters. + void Reset(); + + // Calculates the delay of a frame with the given timestamp. + // This method is called when the frame is complete. + absl::optional CalculateDelay(uint32_t rtp_timestamp, + Timestamp now); + + private: + // The previous rtp timestamp passed to the delay estimate + int64_t prev_rtp_timestamp_unwrapped_; + TimestampUnwrapper unwrapper_; + + // The previous wall clock timestamp used by the delay estimate + absl::optional prev_wall_clock_; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_TIMING_INTER_FRAME_DELAY_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/jitter_estimator.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/jitter_estimator.cc new file mode 100644 index 0000000000..62757787a1 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/jitter_estimator.cc @@ -0,0 +1,476 @@ +/* + * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_coding/timing/jitter_estimator.h" + +#include +#include + +#include +#include + +#include "absl/types/optional.h" +#include "api/field_trials_view.h" +#include "api/units/data_size.h" +#include "api/units/frequency.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "modules/video_coding/timing/rtt_filter.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "rtc_base/numerics/safe_conversions.h" +#include "system_wrappers/include/clock.h" + +namespace webrtc { +namespace { + +// Number of frames to wait for before post processing estimate. Also used in +// the frame rate estimator ramp-up. +constexpr size_t kFrameProcessingStartupCount = 30; + +// Number of frames to wait for before enabling the frame size filters. +constexpr size_t kFramesUntilSizeFiltering = 5; + +// Initial value for frame size filters. +constexpr double kInitialAvgAndMaxFrameSizeBytes = 500.0; + +// Time constant for average frame size filter. +constexpr double kPhi = 0.97; +// Time constant for max frame size filter. +constexpr double kPsi = 0.9999; +// Default constants for percentile frame size filter. +constexpr double kDefaultMaxFrameSizePercentile = 0.95; +constexpr int kDefaultFrameSizeWindow = 30 * 10; + +// Outlier rejection constants. +constexpr double kNumStdDevDelayClamp = 3.5; +constexpr double kNumStdDevDelayOutlier = 15.0; +constexpr double kNumStdDevSizeOutlier = 3.0; +constexpr double kCongestionRejectionFactor = -0.25; + +// Rampup constant for deviation noise filters. +constexpr size_t kAlphaCountMax = 400; + +// Noise threshold constants. +// ~Less than 1% chance (look up in normal distribution table)... +constexpr double kNoiseStdDevs = 2.33; +// ...of getting 30 ms freezes +constexpr double kNoiseStdDevOffset = 30.0; + +// Jitter estimate clamping limits. +constexpr TimeDelta kMinJitterEstimate = TimeDelta::Millis(1); +constexpr TimeDelta kMaxJitterEstimate = TimeDelta::Seconds(10); + +// A constant describing the delay from the jitter buffer to the delay on the +// receiving side which is not accounted for by the jitter buffer nor the +// decoding delay estimate. +constexpr TimeDelta OPERATING_SYSTEM_JITTER = TimeDelta::Millis(10); + +// Time constant for reseting the NACK count. +constexpr TimeDelta kNackCountTimeout = TimeDelta::Seconds(60); + +// RTT mult activation. +constexpr size_t kNackLimit = 3; + +// Frame rate estimate clamping limit. +constexpr Frequency kMaxFramerateEstimate = Frequency::Hertz(200); + +} // namespace + +constexpr char JitterEstimator::Config::kFieldTrialsKey[]; + +JitterEstimator::Config JitterEstimator::Config::ParseAndValidate( + absl::string_view field_trial) { + Config config; + config.Parser()->Parse(field_trial); + + // The `MovingPercentileFilter` RTC_CHECKs on the validity of the + // percentile and window length, so we'd better validate the field trial + // provided values here. + if (config.max_frame_size_percentile) { + double original = *config.max_frame_size_percentile; + config.max_frame_size_percentile = std::min(std::max(0.0, original), 1.0); + if (config.max_frame_size_percentile != original) { + RTC_LOG(LS_ERROR) << "Skipping invalid max_frame_size_percentile=" + << original; + } + } + if (config.frame_size_window && config.frame_size_window < 1) { + RTC_LOG(LS_ERROR) << "Skipping invalid frame_size_window=" + << *config.frame_size_window; + config.frame_size_window = 1; + } + + // General sanity checks. + if (config.num_stddev_delay_clamp && config.num_stddev_delay_clamp < 0.0) { + RTC_LOG(LS_ERROR) << "Skipping invalid num_stddev_delay_clamp=" + << *config.num_stddev_delay_clamp; + config.num_stddev_delay_clamp = 0.0; + } + if (config.num_stddev_delay_outlier && + config.num_stddev_delay_outlier < 0.0) { + RTC_LOG(LS_ERROR) << "Skipping invalid num_stddev_delay_outlier=" + << *config.num_stddev_delay_outlier; + config.num_stddev_delay_outlier = 0.0; + } + if (config.num_stddev_size_outlier && config.num_stddev_size_outlier < 0.0) { + RTC_LOG(LS_ERROR) << "Skipping invalid num_stddev_size_outlier=" + << *config.num_stddev_size_outlier; + config.num_stddev_size_outlier = 0.0; + } + + return config; +} + +JitterEstimator::JitterEstimator(Clock* clock, + const FieldTrialsView& field_trials) + : config_(Config::ParseAndValidate( + field_trials.Lookup(Config::kFieldTrialsKey))), + avg_frame_size_median_bytes_(static_cast( + config_.frame_size_window.value_or(kDefaultFrameSizeWindow))), + max_frame_size_bytes_percentile_( + config_.max_frame_size_percentile.value_or( + kDefaultMaxFrameSizePercentile), + static_cast( + config_.frame_size_window.value_or(kDefaultFrameSizeWindow))), + fps_counter_(30), // TODO(sprang): Use an estimator with limit based + // on time, rather than number of samples. + clock_(clock) { + Reset(); +} + +JitterEstimator::~JitterEstimator() = default; + +// Resets the JitterEstimate. +void JitterEstimator::Reset() { + avg_frame_size_bytes_ = kInitialAvgAndMaxFrameSizeBytes; + max_frame_size_bytes_ = kInitialAvgAndMaxFrameSizeBytes; + var_frame_size_bytes2_ = 100; + avg_frame_size_median_bytes_.Reset(); + max_frame_size_bytes_percentile_.Reset(); + last_update_time_ = absl::nullopt; + prev_estimate_ = absl::nullopt; + prev_frame_size_ = absl::nullopt; + avg_noise_ms_ = 0.0; + var_noise_ms2_ = 4.0; + alpha_count_ = 1; + filter_jitter_estimate_ = TimeDelta::Zero(); + latest_nack_ = Timestamp::Zero(); + nack_count_ = 0; + startup_frame_size_sum_bytes_ = 0; + startup_frame_size_count_ = 0; + startup_count_ = 0; + rtt_filter_.Reset(); + fps_counter_.Reset(); + + kalman_filter_ = FrameDelayVariationKalmanFilter(); +} + +// Updates the estimates with the new measurements. +void JitterEstimator::UpdateEstimate(TimeDelta frame_delay, + DataSize frame_size) { + if (frame_size.IsZero()) { + return; + } + // Can't use DataSize since this can be negative. + double delta_frame_bytes = + frame_size.bytes() - prev_frame_size_.value_or(DataSize::Zero()).bytes(); + if (startup_frame_size_count_ < kFramesUntilSizeFiltering) { + startup_frame_size_sum_bytes_ += frame_size.bytes(); + startup_frame_size_count_++; + } else if (startup_frame_size_count_ == kFramesUntilSizeFiltering) { + // Give the frame size filter. + avg_frame_size_bytes_ = startup_frame_size_sum_bytes_ / + static_cast(startup_frame_size_count_); + startup_frame_size_count_++; + } + + double avg_frame_size_bytes = + kPhi * avg_frame_size_bytes_ + (1 - kPhi) * frame_size.bytes(); + double deviation_size_bytes = 2 * sqrt(var_frame_size_bytes2_); + if (frame_size.bytes() < avg_frame_size_bytes_ + deviation_size_bytes) { + // Only update the average frame size if this sample wasn't a key frame. + avg_frame_size_bytes_ = avg_frame_size_bytes; + } + + double delta_bytes = frame_size.bytes() - avg_frame_size_bytes; + var_frame_size_bytes2_ = std::max( + kPhi * var_frame_size_bytes2_ + (1 - kPhi) * (delta_bytes * delta_bytes), + 1.0); + + // Update non-linear IIR estimate of max frame size. + max_frame_size_bytes_ = + std::max(kPsi * max_frame_size_bytes_, frame_size.bytes()); + + // Maybe update percentile estimates of frame sizes. + if (config_.avg_frame_size_median) { + avg_frame_size_median_bytes_.Insert(frame_size.bytes()); + } + if (config_.MaxFrameSizePercentileEnabled()) { + max_frame_size_bytes_percentile_.Insert(frame_size.bytes()); + } + + if (!prev_frame_size_) { + prev_frame_size_ = frame_size; + return; + } + prev_frame_size_ = frame_size; + + // Cap frame_delay based on the current time deviation noise. + double num_stddev_delay_clamp = + config_.num_stddev_delay_clamp.value_or(kNumStdDevDelayClamp); + TimeDelta max_time_deviation = + TimeDelta::Millis(num_stddev_delay_clamp * sqrt(var_noise_ms2_) + 0.5); + frame_delay.Clamp(-max_time_deviation, max_time_deviation); + + double delay_deviation_ms = + frame_delay.ms() - + kalman_filter_.GetFrameDelayVariationEstimateTotal(delta_frame_bytes); + + // Outlier rejection: these conditions depend on filtered versions of the + // delay and frame size _means_, respectively, together with a configurable + // number of standard deviations. If a sample is large with respect to the + // corresponding mean and dispersion (defined by the number of + // standard deviations and the sample standard deviation), it is deemed an + // outlier. This "empirical rule" is further described in + // https://en.wikipedia.org/wiki/68-95-99.7_rule. Note that neither of the + // estimated means are true sample means, which implies that they are possibly + // not normally distributed. Hence, this rejection method is just a heuristic. + double num_stddev_delay_outlier = + config_.num_stddev_delay_outlier.value_or(kNumStdDevDelayOutlier); + // Delay outlier rejection is two-sided. + bool abs_delay_is_not_outlier = + fabs(delay_deviation_ms) < + num_stddev_delay_outlier * sqrt(var_noise_ms2_); + // The reasoning above means, in particular, that we should use the sample + // mean-style `avg_frame_size_bytes_` estimate, as opposed to the + // median-filtered version, even if configured to use latter for the + // calculation in `CalculateEstimate()`. + // Size outlier rejection is one-sided. + double num_stddev_size_outlier = + config_.num_stddev_size_outlier.value_or(kNumStdDevSizeOutlier); + bool size_is_positive_outlier = + frame_size.bytes() > + avg_frame_size_bytes_ + + num_stddev_size_outlier * sqrt(var_frame_size_bytes2_); + + // Only update the Kalman filter if the sample is not considered an extreme + // outlier. Even if it is an extreme outlier from a delay point of view, if + // the frame size also is large the deviation is probably due to an incorrect + // line slope. + if (abs_delay_is_not_outlier || size_is_positive_outlier) { + // Prevent updating with frames which have been congested by a large frame, + // and therefore arrives almost at the same time as that frame. + // This can occur when we receive a large frame (key frame) which has been + // delayed. The next frame is of normal size (delta frame), and thus deltaFS + // will be << 0. This removes all frame samples which arrives after a key + // frame. + double congestion_rejection_factor = + config_.congestion_rejection_factor.value_or( + kCongestionRejectionFactor); + double filtered_max_frame_size_bytes = + config_.MaxFrameSizePercentileEnabled() + ? max_frame_size_bytes_percentile_.GetFilteredValue() + : max_frame_size_bytes_; + bool is_not_congested = + delta_frame_bytes > + congestion_rejection_factor * filtered_max_frame_size_bytes; + + if (is_not_congested || config_.estimate_noise_when_congested) { + // Update the variance of the deviation from the line given by the Kalman + // filter. + EstimateRandomJitter(delay_deviation_ms); + } + if (is_not_congested) { + // Neither a delay outlier nor a congested frame, so we can safely update + // the Kalman filter with the sample. + kalman_filter_.PredictAndUpdate(frame_delay.ms(), delta_frame_bytes, + filtered_max_frame_size_bytes, + var_noise_ms2_); + } + } else { + // Delay outliers affect the noise estimate through a value equal to the + // outlier rejection threshold. + double num_stddev = (delay_deviation_ms >= 0) ? num_stddev_delay_outlier + : -num_stddev_delay_outlier; + EstimateRandomJitter(num_stddev * sqrt(var_noise_ms2_)); + } + // Post process the total estimated jitter + if (startup_count_ >= kFrameProcessingStartupCount) { + PostProcessEstimate(); + } else { + startup_count_++; + } +} + +// Updates the nack/packet ratio. +void JitterEstimator::FrameNacked() { + if (nack_count_ < kNackLimit) { + nack_count_++; + } + latest_nack_ = clock_->CurrentTime(); +} + +void JitterEstimator::UpdateRtt(TimeDelta rtt) { + rtt_filter_.Update(rtt); +} + +JitterEstimator::Config JitterEstimator::GetConfigForTest() const { + return config_; +} + +// Estimates the random jitter by calculating the variance of the sample +// distance from the line given by the Kalman filter. +void JitterEstimator::EstimateRandomJitter(double d_dT) { + Timestamp now = clock_->CurrentTime(); + if (last_update_time_.has_value()) { + fps_counter_.AddSample((now - *last_update_time_).us()); + } + last_update_time_ = now; + + if (alpha_count_ == 0) { + RTC_DCHECK_NOTREACHED(); + return; + } + double alpha = + static_cast(alpha_count_ - 1) / static_cast(alpha_count_); + alpha_count_++; + if (alpha_count_ > kAlphaCountMax) + alpha_count_ = kAlphaCountMax; + + // In order to avoid a low frame rate stream to react slower to changes, + // scale the alpha weight relative a 30 fps stream. + Frequency fps = GetFrameRate(); + if (fps > Frequency::Zero()) { + constexpr Frequency k30Fps = Frequency::Hertz(30); + double rate_scale = k30Fps / fps; + // At startup, there can be a lot of noise in the fps estimate. + // Interpolate rate_scale linearly, from 1.0 at sample #1, to 30.0 / fps + // at sample #kFrameProcessingStartupCount. + if (alpha_count_ < kFrameProcessingStartupCount) { + rate_scale = (alpha_count_ * rate_scale + + (kFrameProcessingStartupCount - alpha_count_)) / + kFrameProcessingStartupCount; + } + alpha = pow(alpha, rate_scale); + } + + double avg_noise_ms = alpha * avg_noise_ms_ + (1 - alpha) * d_dT; + double var_noise_ms2 = alpha * var_noise_ms2_ + (1 - alpha) * + (d_dT - avg_noise_ms_) * + (d_dT - avg_noise_ms_); + avg_noise_ms_ = avg_noise_ms; + var_noise_ms2_ = var_noise_ms2; + if (var_noise_ms2_ < 1.0) { + // The variance should never be zero, since we might get stuck and consider + // all samples as outliers. + var_noise_ms2_ = 1.0; + } +} + +double JitterEstimator::NoiseThreshold() const { + double noise_threshold_ms = + kNoiseStdDevs * sqrt(var_noise_ms2_) - kNoiseStdDevOffset; + if (noise_threshold_ms < 1.0) { + noise_threshold_ms = 1.0; + } + return noise_threshold_ms; +} + +// Calculates the current jitter estimate from the filtered estimates. +TimeDelta JitterEstimator::CalculateEstimate() { + // Using median- and percentile-filtered versions of the frame sizes may be + // more robust than using sample mean-style estimates. + double filtered_avg_frame_size_bytes = + config_.avg_frame_size_median + ? avg_frame_size_median_bytes_.GetFilteredValue() + : avg_frame_size_bytes_; + double filtered_max_frame_size_bytes = + config_.MaxFrameSizePercentileEnabled() + ? max_frame_size_bytes_percentile_.GetFilteredValue() + : max_frame_size_bytes_; + double worst_case_frame_size_deviation_bytes = + filtered_max_frame_size_bytes - filtered_avg_frame_size_bytes; + double ret_ms = kalman_filter_.GetFrameDelayVariationEstimateSizeBased( + worst_case_frame_size_deviation_bytes) + + NoiseThreshold(); + TimeDelta ret = TimeDelta::Millis(ret_ms); + + // A very low estimate (or negative) is neglected. + if (ret < kMinJitterEstimate) { + ret = prev_estimate_.value_or(kMinJitterEstimate); + // Sanity check to make sure that no other method has set `prev_estimate_` + // to a value lower than `kMinJitterEstimate`. + RTC_DCHECK_GE(ret, kMinJitterEstimate); + } else if (ret > kMaxJitterEstimate) { // Sanity + ret = kMaxJitterEstimate; + } + prev_estimate_ = ret; + return ret; +} + +void JitterEstimator::PostProcessEstimate() { + filter_jitter_estimate_ = CalculateEstimate(); +} + +// Returns the current filtered estimate if available, +// otherwise tries to calculate an estimate. +TimeDelta JitterEstimator::GetJitterEstimate( + double rtt_multiplier, + absl::optional rtt_mult_add_cap) { + TimeDelta jitter = CalculateEstimate() + OPERATING_SYSTEM_JITTER; + Timestamp now = clock_->CurrentTime(); + + if (now - latest_nack_ > kNackCountTimeout) + nack_count_ = 0; + + if (filter_jitter_estimate_ > jitter) + jitter = filter_jitter_estimate_; + if (nack_count_ >= kNackLimit) { + if (rtt_mult_add_cap.has_value()) { + jitter += std::min(rtt_filter_.Rtt() * rtt_multiplier, + rtt_mult_add_cap.value()); + } else { + jitter += rtt_filter_.Rtt() * rtt_multiplier; + } + } + + static const Frequency kJitterScaleLowThreshold = Frequency::Hertz(5); + static const Frequency kJitterScaleHighThreshold = Frequency::Hertz(10); + Frequency fps = GetFrameRate(); + // Ignore jitter for very low fps streams. + if (fps < kJitterScaleLowThreshold) { + if (fps.IsZero()) { + return std::max(TimeDelta::Zero(), jitter); + } + return TimeDelta::Zero(); + } + + // Semi-low frame rate; scale by factor linearly interpolated from 0.0 at + // kJitterScaleLowThreshold to 1.0 at kJitterScaleHighThreshold. + if (fps < kJitterScaleHighThreshold) { + jitter = (1.0 / (kJitterScaleHighThreshold - kJitterScaleLowThreshold)) * + (fps - kJitterScaleLowThreshold) * jitter; + } + + return std::max(TimeDelta::Zero(), jitter); +} + +Frequency JitterEstimator::GetFrameRate() const { + TimeDelta mean_frame_period = TimeDelta::Micros(fps_counter_.ComputeMean()); + if (mean_frame_period <= TimeDelta::Zero()) + return Frequency::Zero(); + + Frequency fps = 1 / mean_frame_period; + // Sanity check. + RTC_DCHECK_GE(fps, Frequency::Zero()); + return std::min(fps, kMaxFramerateEstimate); +} +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/jitter_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/jitter_estimator.h new file mode 100644 index 0000000000..a89a4bf1fd --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/jitter_estimator.h @@ -0,0 +1,218 @@ +/* + * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CODING_TIMING_JITTER_ESTIMATOR_H_ +#define MODULES_VIDEO_CODING_TIMING_JITTER_ESTIMATOR_H_ + +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/field_trials_view.h" +#include "api/units/data_size.h" +#include "api/units/frequency.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "modules/video_coding/timing/frame_delay_variation_kalman_filter.h" +#include "modules/video_coding/timing/rtt_filter.h" +#include "rtc_base/experiments/struct_parameters_parser.h" +#include "rtc_base/numerics/moving_percentile_filter.h" +#include "rtc_base/rolling_accumulator.h" + +namespace webrtc { + +class Clock; + +class JitterEstimator { + public: + // Configuration struct for statically overriding some constants and + // behaviour, configurable through field trials. + struct Config { + static constexpr char kFieldTrialsKey[] = "WebRTC-JitterEstimatorConfig"; + + // Parses a field trial string and validates the values. + static Config ParseAndValidate(absl::string_view field_trial); + + std::unique_ptr Parser() { + // clang-format off + return StructParametersParser::Create( + "avg_frame_size_median", &avg_frame_size_median, + "max_frame_size_percentile", &max_frame_size_percentile, + "frame_size_window", &frame_size_window, + "num_stddev_delay_clamp", &num_stddev_delay_clamp, + "num_stddev_delay_outlier", &num_stddev_delay_outlier, + "num_stddev_size_outlier", &num_stddev_size_outlier, + "congestion_rejection_factor", &congestion_rejection_factor, + "estimate_noise_when_congested", &estimate_noise_when_congested); + // clang-format on + } + + bool MaxFrameSizePercentileEnabled() const { + return max_frame_size_percentile.has_value(); + } + + // If true, the "avg" frame size is calculated as the median over a window + // of recent frame sizes. + bool avg_frame_size_median = false; + + // If set, the "max" frame size is calculated as this percentile over a + // window of recent frame sizes. + absl::optional max_frame_size_percentile = absl::nullopt; + + // The length of the percentile filters' window, in number of frames. + absl::optional frame_size_window = absl::nullopt; + + // The incoming frame delay variation samples are clamped to be at most + // this number of standard deviations away from zero. + // + // Increasing this value clamps fewer samples. + absl::optional num_stddev_delay_clamp = absl::nullopt; + + // A (relative) frame delay variation sample is an outlier if its absolute + // deviation from the Kalman filter model falls outside this number of + // sample standard deviations. + // + // Increasing this value rejects fewer samples. + absl::optional num_stddev_delay_outlier = absl::nullopt; + + // An (absolute) frame size sample is an outlier if its positive deviation + // from the estimated average frame size falls outside this number of sample + // standard deviations. + // + // Increasing this value rejects fewer samples. + absl::optional num_stddev_size_outlier = absl::nullopt; + + // A (relative) frame size variation sample is deemed "congested", and is + // thus rejected, if its value is less than this factor times the estimated + // max frame size. + // + // Decreasing this value rejects fewer samples. + absl::optional congestion_rejection_factor = absl::nullopt; + + // If true, the noise estimate will be updated for congestion rejected + // frames. This is currently enabled by default, but that may not be optimal + // since congested frames typically are not spread around the line with + // Gaussian noise. (This is the whole reason for the congestion rejection!) + bool estimate_noise_when_congested = true; + }; + + JitterEstimator(Clock* clock, const FieldTrialsView& field_trials); + JitterEstimator(const JitterEstimator&) = delete; + JitterEstimator& operator=(const JitterEstimator&) = delete; + ~JitterEstimator(); + + // Resets the estimate to the initial state. + void Reset(); + + // Updates the jitter estimate with the new data. + // + // Input: + // - frame_delay : Delay-delta calculated by UTILDelayEstimate. + // - frame_size : Frame size of the current frame. + void UpdateEstimate(TimeDelta frame_delay, DataSize frame_size); + + // Returns the current jitter estimate and adds an RTT dependent term in cases + // of retransmission. + // Input: + // - rtt_multiplier : RTT param multiplier (when applicable). + // - rtt_mult_add_cap : Multiplier cap from the RTTMultExperiment. + // + // Return value : Jitter estimate. + TimeDelta GetJitterEstimate(double rtt_multiplier, + absl::optional rtt_mult_add_cap); + + // Updates the nack counter. + void FrameNacked(); + + // Updates the RTT filter. + // + // Input: + // - rtt : Round trip time. + void UpdateRtt(TimeDelta rtt); + + // Returns the configuration. Only to be used by unit tests. + Config GetConfigForTest() const; + + private: + // Updates the random jitter estimate, i.e. the variance of the time + // deviations from the line given by the Kalman filter. + // + // Input: + // - d_dT : The deviation from the kalman estimate. + void EstimateRandomJitter(double d_dT); + + double NoiseThreshold() const; + + // Calculates the current jitter estimate. + // + // Return value : The current jitter estimate. + TimeDelta CalculateEstimate(); + + // Post process the calculated estimate. + void PostProcessEstimate(); + + // Returns the estimated incoming frame rate. + Frequency GetFrameRate() const; + + // Configuration that may override some internals. + const Config config_; + + // Filters the {frame_delay_delta, frame_size_delta} measurements through + // a linear Kalman filter. + FrameDelayVariationKalmanFilter kalman_filter_; + + // TODO(bugs.webrtc.org/14381): Update `avg_frame_size_bytes_` to DataSize + // when api/units have sufficient precision. + double avg_frame_size_bytes_; // Average frame size + double var_frame_size_bytes2_; // Frame size variance. Unit is bytes^2. + // Largest frame size received (descending with a factor kPsi). + // Used by default. + // TODO(bugs.webrtc.org/14381): Update `max_frame_size_bytes_` to DataSize + // when api/units have sufficient precision. + double max_frame_size_bytes_; + // Percentile frame sized received (over a window). Only used if configured. + MovingMedianFilter avg_frame_size_median_bytes_; + MovingPercentileFilter max_frame_size_bytes_percentile_; + // TODO(bugs.webrtc.org/14381): Update `startup_frame_size_sum_bytes_` to + // DataSize when api/units have sufficient precision. + double startup_frame_size_sum_bytes_; + size_t startup_frame_size_count_; + + absl::optional last_update_time_; + // The previously returned jitter estimate + absl::optional prev_estimate_; + // Frame size of the previous frame + absl::optional prev_frame_size_; + // Average of the random jitter. Unit is milliseconds. + double avg_noise_ms_; + // Variance of the time-deviation from the line. Unit is milliseconds^2. + double var_noise_ms2_; + size_t alpha_count_; + // The filtered sum of jitter estimates + TimeDelta filter_jitter_estimate_ = TimeDelta::Zero(); + + size_t startup_count_; + // Time when the latest nack was seen + Timestamp latest_nack_ = Timestamp::Zero(); + // Keeps track of the number of nacks received, but never goes above + // kNackLimit. + size_t nack_count_; + RttFilter rtt_filter_; + + // Tracks frame rates in microseconds. + rtc::RollingAccumulator fps_counter_; + Clock* clock_; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_TIMING_JITTER_ESTIMATOR_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/rtt_filter.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/rtt_filter.cc new file mode 100644 index 0000000000..6962224d61 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/rtt_filter.cc @@ -0,0 +1,161 @@ +/* + * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_coding/timing/rtt_filter.h" + +#include +#include +#include + +#include + +#include "absl/algorithm/container.h" +#include "absl/container/inlined_vector.h" +#include "api/units/time_delta.h" + +namespace webrtc { + +namespace { + +constexpr TimeDelta kMaxRtt = TimeDelta::Seconds(3); +constexpr uint32_t kFilterFactorMax = 35; +constexpr double kJumpStddev = 2.5; +constexpr double kDriftStdDev = 3.5; + +} // namespace + +RttFilter::RttFilter() + : avg_rtt_(TimeDelta::Zero()), + var_rtt_(0), + max_rtt_(TimeDelta::Zero()), + jump_buf_(kMaxDriftJumpCount, TimeDelta::Zero()), + drift_buf_(kMaxDriftJumpCount, TimeDelta::Zero()) { + Reset(); +} + +void RttFilter::Reset() { + got_non_zero_update_ = false; + avg_rtt_ = TimeDelta::Zero(); + var_rtt_ = 0; + max_rtt_ = TimeDelta::Zero(); + filt_fact_count_ = 1; + absl::c_fill(jump_buf_, TimeDelta::Zero()); + absl::c_fill(drift_buf_, TimeDelta::Zero()); +} + +void RttFilter::Update(TimeDelta rtt) { + if (!got_non_zero_update_) { + if (rtt.IsZero()) { + return; + } + got_non_zero_update_ = true; + } + + // Sanity check + if (rtt > kMaxRtt) { + rtt = kMaxRtt; + } + + double filt_factor = 0; + if (filt_fact_count_ > 1) { + filt_factor = static_cast(filt_fact_count_ - 1) / filt_fact_count_; + } + filt_fact_count_++; + if (filt_fact_count_ > kFilterFactorMax) { + // This prevents filt_factor from going above + // (_filt_fact_max - 1) / filt_fact_max_, + // e.g., filt_fact_max_ = 50 => filt_factor = 49/50 = 0.98 + filt_fact_count_ = kFilterFactorMax; + } + TimeDelta old_avg = avg_rtt_; + int64_t old_var = var_rtt_; + avg_rtt_ = filt_factor * avg_rtt_ + (1 - filt_factor) * rtt; + int64_t delta_ms = (rtt - avg_rtt_).ms(); + var_rtt_ = filt_factor * var_rtt_ + (1 - filt_factor) * (delta_ms * delta_ms); + max_rtt_ = std::max(rtt, max_rtt_); + if (!JumpDetection(rtt) || !DriftDetection(rtt)) { + // In some cases we don't want to update the statistics + avg_rtt_ = old_avg; + var_rtt_ = old_var; + } +} + +bool RttFilter::JumpDetection(TimeDelta rtt) { + TimeDelta diff_from_avg = avg_rtt_ - rtt; + // Unit of var_rtt_ is ms^2. + TimeDelta jump_threshold = TimeDelta::Millis(kJumpStddev * sqrt(var_rtt_)); + if (diff_from_avg.Abs() > jump_threshold) { + bool positive_diff = diff_from_avg >= TimeDelta::Zero(); + if (!jump_buf_.empty() && positive_diff != last_jump_positive_) { + // Since the signs differ the samples currently + // in the buffer is useless as they represent a + // jump in a different direction. + jump_buf_.clear(); + } + if (jump_buf_.size() < kMaxDriftJumpCount) { + // Update the buffer used for the short time statistics. + // The sign of the diff is used for updating the counter since + // we want to use the same buffer for keeping track of when + // the RTT jumps down and up. + jump_buf_.push_back(rtt); + last_jump_positive_ = positive_diff; + } + if (jump_buf_.size() >= kMaxDriftJumpCount) { + // Detected an RTT jump + ShortRttFilter(jump_buf_); + filt_fact_count_ = kMaxDriftJumpCount + 1; + jump_buf_.clear(); + } else { + return false; + } + } else { + jump_buf_.clear(); + } + return true; +} + +bool RttFilter::DriftDetection(TimeDelta rtt) { + // Unit of sqrt of var_rtt_ is ms. + TimeDelta drift_threshold = TimeDelta::Millis(kDriftStdDev * sqrt(var_rtt_)); + if (max_rtt_ - avg_rtt_ > drift_threshold) { + if (drift_buf_.size() < kMaxDriftJumpCount) { + // Update the buffer used for the short time statistics. + drift_buf_.push_back(rtt); + } + if (drift_buf_.size() >= kMaxDriftJumpCount) { + // Detected an RTT drift + ShortRttFilter(drift_buf_); + filt_fact_count_ = kMaxDriftJumpCount + 1; + drift_buf_.clear(); + } + } else { + drift_buf_.clear(); + } + return true; +} + +void RttFilter::ShortRttFilter(const BufferList& buf) { + RTC_DCHECK_EQ(buf.size(), kMaxDriftJumpCount); + max_rtt_ = TimeDelta::Zero(); + avg_rtt_ = TimeDelta::Zero(); + for (const TimeDelta& rtt : buf) { + if (rtt > max_rtt_) { + max_rtt_ = rtt; + } + avg_rtt_ += rtt; + } + avg_rtt_ = avg_rtt_ / static_cast(buf.size()); +} + +TimeDelta RttFilter::Rtt() const { + return max_rtt_; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/rtt_filter.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/rtt_filter.h new file mode 100644 index 0000000000..b8700b23ee --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/rtt_filter.h @@ -0,0 +1,69 @@ +/* + * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CODING_TIMING_RTT_FILTER_H_ +#define MODULES_VIDEO_CODING_TIMING_RTT_FILTER_H_ + +#include + +#include "absl/container/inlined_vector.h" +#include "api/units/time_delta.h" + +namespace webrtc { + +class RttFilter { + public: + RttFilter(); + RttFilter(const RttFilter&) = delete; + RttFilter& operator=(const RttFilter&) = delete; + + // Resets the filter. + void Reset(); + // Updates the filter with a new sample. + void Update(TimeDelta rtt); + // A getter function for the current RTT level. + TimeDelta Rtt() const; + + private: + // The size of the drift and jump memory buffers + // and thus also the detection threshold for these + // detectors in number of samples. + static constexpr int kMaxDriftJumpCount = 5; + using BufferList = absl::InlinedVector; + + // Detects RTT jumps by comparing the difference between + // samples and average to the standard deviation. + // Returns true if the long time statistics should be updated + // and false otherwise + bool JumpDetection(TimeDelta rtt); + + // Detects RTT drifts by comparing the difference between + // max and average to the standard deviation. + // Returns true if the long time statistics should be updated + // and false otherwise + bool DriftDetection(TimeDelta rtt); + + // Computes the short time average and maximum of the vector buf. + void ShortRttFilter(const BufferList& buf); + + bool got_non_zero_update_; + TimeDelta avg_rtt_; + // Variance units are TimeDelta^2. Store as ms^2. + int64_t var_rtt_; + TimeDelta max_rtt_; + uint32_t filt_fact_count_; + bool last_jump_positive_ = false; + BufferList jump_buf_; + BufferList drift_buf_; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_TIMING_RTT_FILTER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/timestamp_extrapolator.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/timestamp_extrapolator.cc new file mode 100644 index 0000000000..d13fa7dd8c --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/timestamp_extrapolator.cc @@ -0,0 +1,162 @@ +/* + * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_coding/timing/timestamp_extrapolator.h" + +#include + +#include "absl/types/optional.h" +#include "modules/include/module_common_types_public.h" + +namespace webrtc { + +namespace { + +constexpr double kLambda = 1; +constexpr uint32_t kStartUpFilterDelayInPackets = 2; +constexpr double kAlarmThreshold = 60e3; +// in timestamp ticks, i.e. 15 ms +constexpr double kAccDrift = 6600; +constexpr double kAccMaxError = 7000; +constexpr double kP11 = 1e10; + +} // namespace + +TimestampExtrapolator::TimestampExtrapolator(Timestamp start) + : start_(Timestamp::Zero()), + prev_(Timestamp::Zero()), + packet_count_(0), + detector_accumulator_pos_(0), + detector_accumulator_neg_(0) { + Reset(start); +} + +void TimestampExtrapolator::Reset(Timestamp start) { + start_ = start; + prev_ = start_; + first_unwrapped_timestamp_ = absl::nullopt; + w_[0] = 90.0; + w_[1] = 0; + p_[0][0] = 1; + p_[1][1] = kP11; + p_[0][1] = p_[1][0] = 0; + unwrapper_ = TimestampUnwrapper(); + packet_count_ = 0; + detector_accumulator_pos_ = 0; + detector_accumulator_neg_ = 0; +} + +void TimestampExtrapolator::Update(Timestamp now, uint32_t ts90khz) { + if (now - prev_ > TimeDelta::Seconds(10)) { + // Ten seconds without a complete frame. + // Reset the extrapolator + Reset(now); + } else { + prev_ = now; + } + + // Remove offset to prevent badly scaled matrices + const TimeDelta offset = now - start_; + double t_ms = offset.ms(); + + int64_t unwrapped_ts90khz = unwrapper_.Unwrap(ts90khz); + + if (!first_unwrapped_timestamp_) { + // Make an initial guess of the offset, + // should be almost correct since t_ms - start + // should about zero at this time. + w_[1] = -w_[0] * t_ms; + first_unwrapped_timestamp_ = unwrapped_ts90khz; + } + + double residual = + (static_cast(unwrapped_ts90khz) - *first_unwrapped_timestamp_) - + t_ms * w_[0] - w_[1]; + if (DelayChangeDetection(residual) && + packet_count_ >= kStartUpFilterDelayInPackets) { + // A sudden change of average network delay has been detected. + // Force the filter to adjust its offset parameter by changing + // the offset uncertainty. Don't do this during startup. + p_[1][1] = kP11; + } + + if (prev_unwrapped_timestamp_ && + unwrapped_ts90khz < prev_unwrapped_timestamp_) { + // Drop reordered frames. + return; + } + + // T = [t(k) 1]'; + // that = T'*w; + // K = P*T/(lambda + T'*P*T); + double K[2]; + K[0] = p_[0][0] * t_ms + p_[0][1]; + K[1] = p_[1][0] * t_ms + p_[1][1]; + double TPT = kLambda + t_ms * K[0] + K[1]; + K[0] /= TPT; + K[1] /= TPT; + // w = w + K*(ts(k) - that); + w_[0] = w_[0] + K[0] * residual; + w_[1] = w_[1] + K[1] * residual; + // P = 1/lambda*(P - K*T'*P); + double p00 = + 1 / kLambda * (p_[0][0] - (K[0] * t_ms * p_[0][0] + K[0] * p_[1][0])); + double p01 = + 1 / kLambda * (p_[0][1] - (K[0] * t_ms * p_[0][1] + K[0] * p_[1][1])); + p_[1][0] = + 1 / kLambda * (p_[1][0] - (K[1] * t_ms * p_[0][0] + K[1] * p_[1][0])); + p_[1][1] = + 1 / kLambda * (p_[1][1] - (K[1] * t_ms * p_[0][1] + K[1] * p_[1][1])); + p_[0][0] = p00; + p_[0][1] = p01; + prev_unwrapped_timestamp_ = unwrapped_ts90khz; + if (packet_count_ < kStartUpFilterDelayInPackets) { + packet_count_++; + } +} + +absl::optional TimestampExtrapolator::ExtrapolateLocalTime( + uint32_t timestamp90khz) const { + int64_t unwrapped_ts90khz = unwrapper_.UnwrapWithoutUpdate(timestamp90khz); + + if (!first_unwrapped_timestamp_) { + return absl::nullopt; + } else if (packet_count_ < kStartUpFilterDelayInPackets) { + constexpr double kRtpTicksPerMs = 90; + TimeDelta diff = TimeDelta::Millis( + (unwrapped_ts90khz - *prev_unwrapped_timestamp_) / kRtpTicksPerMs); + return prev_ + diff; + } else if (w_[0] < 1e-3) { + return start_; + } else { + double timestampDiff = unwrapped_ts90khz - *first_unwrapped_timestamp_; + auto diff_ms = static_cast((timestampDiff - w_[1]) / w_[0] + 0.5); + return start_ + TimeDelta::Millis(diff_ms); + } +} + +bool TimestampExtrapolator::DelayChangeDetection(double error) { + // CUSUM detection of sudden delay changes + error = (error > 0) ? std::min(error, kAccMaxError) + : std::max(error, -kAccMaxError); + detector_accumulator_pos_ = + std::max(detector_accumulator_pos_ + error - kAccDrift, double{0}); + detector_accumulator_neg_ = + std::min(detector_accumulator_neg_ + error + kAccDrift, double{0}); + if (detector_accumulator_pos_ > kAlarmThreshold || + detector_accumulator_neg_ < -kAlarmThreshold) { + // Alarm + detector_accumulator_pos_ = detector_accumulator_neg_ = 0; + return true; + } + return false; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/timestamp_extrapolator.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/timestamp_extrapolator.h new file mode 100644 index 0000000000..b7162ed281 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/timestamp_extrapolator.h @@ -0,0 +1,48 @@ +/* + * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CODING_TIMING_TIMESTAMP_EXTRAPOLATOR_H_ +#define MODULES_VIDEO_CODING_TIMING_TIMESTAMP_EXTRAPOLATOR_H_ + +#include + +#include "absl/types/optional.h" +#include "api/units/timestamp.h" +#include "modules/include/module_common_types_public.h" + +namespace webrtc { + +// Not thread safe. +class TimestampExtrapolator { + public: + explicit TimestampExtrapolator(Timestamp start); + void Update(Timestamp now, uint32_t ts90khz); + absl::optional ExtrapolateLocalTime(uint32_t timestamp90khz) const; + void Reset(Timestamp start); + + private: + void CheckForWrapArounds(uint32_t ts90khz); + bool DelayChangeDetection(double error); + + double w_[2]; + double p_[2][2]; + Timestamp start_; + Timestamp prev_; + absl::optional first_unwrapped_timestamp_; + TimestampUnwrapper unwrapper_; + absl::optional prev_unwrapped_timestamp_; + uint32_t packet_count_; + double detector_accumulator_pos_; + double detector_accumulator_neg_; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_TIMING_TIMESTAMP_EXTRAPOLATOR_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/timing.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/timing.cc new file mode 100644 index 0000000000..0b61d5a35e --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/timing.cc @@ -0,0 +1,297 @@ +/* + * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_coding/timing/timing.h" + +#include + +#include "api/units/time_delta.h" +#include "modules/video_coding/timing/timestamp_extrapolator.h" +#include "rtc_base/experiments/field_trial_parser.h" +#include "rtc_base/logging.h" +#include "system_wrappers/include/clock.h" + +namespace webrtc { +namespace { + +// Default pacing that is used for the low-latency renderer path. +constexpr TimeDelta kZeroPlayoutDelayDefaultMinPacing = TimeDelta::Millis(8); +constexpr TimeDelta kLowLatencyStreamMaxPlayoutDelayThreshold = + TimeDelta::Millis(500); + +void CheckDelaysValid(TimeDelta min_delay, TimeDelta max_delay) { + if (min_delay > max_delay) { + RTC_LOG(LS_ERROR) + << "Playout delays set incorrectly: min playout delay (" << min_delay + << ") > max playout delay (" << max_delay + << "). This is undefined behaviour. Application writers should " + "ensure that the min delay is always less than or equals max " + "delay. If trying to use the playout delay header extensions " + "described in " + "https://webrtc.googlesource.com/src/+/refs/heads/main/docs/" + "native-code/rtp-hdrext/playout-delay/, be careful that a playout " + "delay hint or A/V sync settings may have caused this conflict."; + } +} + +} // namespace + +VCMTiming::VCMTiming(Clock* clock, const FieldTrialsView& field_trials) + : clock_(clock), + ts_extrapolator_( + std::make_unique(clock_->CurrentTime())), + codec_timer_(std::make_unique()), + render_delay_(kDefaultRenderDelay), + min_playout_delay_(TimeDelta::Zero()), + max_playout_delay_(TimeDelta::Seconds(10)), + jitter_delay_(TimeDelta::Zero()), + current_delay_(TimeDelta::Zero()), + prev_frame_timestamp_(0), + num_decoded_frames_(0), + zero_playout_delay_min_pacing_("min_pacing", + kZeroPlayoutDelayDefaultMinPacing), + last_decode_scheduled_(Timestamp::Zero()) { + ParseFieldTrial({&zero_playout_delay_min_pacing_}, + field_trials.Lookup("WebRTC-ZeroPlayoutDelay")); +} + +void VCMTiming::Reset() { + MutexLock lock(&mutex_); + ts_extrapolator_->Reset(clock_->CurrentTime()); + codec_timer_ = std::make_unique(); + render_delay_ = kDefaultRenderDelay; + min_playout_delay_ = TimeDelta::Zero(); + jitter_delay_ = TimeDelta::Zero(); + current_delay_ = TimeDelta::Zero(); + prev_frame_timestamp_ = 0; +} + +void VCMTiming::set_render_delay(TimeDelta render_delay) { + MutexLock lock(&mutex_); + render_delay_ = render_delay; +} + +TimeDelta VCMTiming::min_playout_delay() const { + MutexLock lock(&mutex_); + return min_playout_delay_; +} + +void VCMTiming::set_min_playout_delay(TimeDelta min_playout_delay) { + MutexLock lock(&mutex_); + if (min_playout_delay_ != min_playout_delay) { + CheckDelaysValid(min_playout_delay, max_playout_delay_); + min_playout_delay_ = min_playout_delay; + } +} + +void VCMTiming::set_max_playout_delay(TimeDelta max_playout_delay) { + MutexLock lock(&mutex_); + if (max_playout_delay_ != max_playout_delay) { + CheckDelaysValid(min_playout_delay_, max_playout_delay); + max_playout_delay_ = max_playout_delay; + } +} + +void VCMTiming::SetJitterDelay(TimeDelta jitter_delay) { + MutexLock lock(&mutex_); + if (jitter_delay != jitter_delay_) { + jitter_delay_ = jitter_delay; + // When in initial state, set current delay to minimum delay. + if (current_delay_.IsZero()) { + current_delay_ = jitter_delay_; + } + } +} + +void VCMTiming::UpdateCurrentDelay(uint32_t frame_timestamp) { + MutexLock lock(&mutex_); + TimeDelta target_delay = TargetDelayInternal(); + + if (current_delay_.IsZero()) { + // Not initialized, set current delay to target. + current_delay_ = target_delay; + } else if (target_delay != current_delay_) { + TimeDelta delay_diff = target_delay - current_delay_; + // Never change the delay with more than 100 ms every second. If we're + // changing the delay in too large steps we will get noticeable freezes. By + // limiting the change we can increase the delay in smaller steps, which + // will be experienced as the video is played in slow motion. When lowering + // the delay the video will be played at a faster pace. + TimeDelta max_change = TimeDelta::Zero(); + if (frame_timestamp < 0x0000ffff && prev_frame_timestamp_ > 0xffff0000) { + // wrap + max_change = + TimeDelta::Millis(kDelayMaxChangeMsPerS * + (frame_timestamp + (static_cast(1) << 32) - + prev_frame_timestamp_) / + 90000); + } else { + max_change = + TimeDelta::Millis(kDelayMaxChangeMsPerS * + (frame_timestamp - prev_frame_timestamp_) / 90000); + } + + if (max_change <= TimeDelta::Zero()) { + // Any changes less than 1 ms are truncated and will be postponed. + // Negative change will be due to reordering and should be ignored. + return; + } + delay_diff = std::max(delay_diff, -max_change); + delay_diff = std::min(delay_diff, max_change); + + current_delay_ = current_delay_ + delay_diff; + } + prev_frame_timestamp_ = frame_timestamp; +} + +void VCMTiming::UpdateCurrentDelay(Timestamp render_time, + Timestamp actual_decode_time) { + MutexLock lock(&mutex_); + TimeDelta target_delay = TargetDelayInternal(); + TimeDelta delayed = + (actual_decode_time - render_time) + RequiredDecodeTime() + render_delay_; + + // Only consider `delayed` as negative by more than a few microseconds. + if (delayed.ms() < 0) { + return; + } + if (current_delay_ + delayed <= target_delay) { + current_delay_ += delayed; + } else { + current_delay_ = target_delay; + } +} + +void VCMTiming::StopDecodeTimer(TimeDelta decode_time, Timestamp now) { + MutexLock lock(&mutex_); + codec_timer_->AddTiming(decode_time.ms(), now.ms()); + RTC_DCHECK_GE(decode_time, TimeDelta::Zero()); + ++num_decoded_frames_; +} + +void VCMTiming::IncomingTimestamp(uint32_t rtp_timestamp, Timestamp now) { + MutexLock lock(&mutex_); + ts_extrapolator_->Update(now, rtp_timestamp); +} + +Timestamp VCMTiming::RenderTime(uint32_t frame_timestamp, Timestamp now) const { + MutexLock lock(&mutex_); + return RenderTimeInternal(frame_timestamp, now); +} + +void VCMTiming::SetLastDecodeScheduledTimestamp( + Timestamp last_decode_scheduled) { + MutexLock lock(&mutex_); + last_decode_scheduled_ = last_decode_scheduled; +} + +Timestamp VCMTiming::RenderTimeInternal(uint32_t frame_timestamp, + Timestamp now) const { + if (UseLowLatencyRendering()) { + // Render as soon as possible or with low-latency renderer algorithm. + return Timestamp::Zero(); + } + // Note that TimestampExtrapolator::ExtrapolateLocalTime is not a const + // method; it mutates the object's wraparound state. + Timestamp estimated_complete_time = + ts_extrapolator_->ExtrapolateLocalTime(frame_timestamp).value_or(now); + + // Make sure the actual delay stays in the range of `min_playout_delay_` + // and `max_playout_delay_`. + TimeDelta actual_delay = + current_delay_.Clamped(min_playout_delay_, max_playout_delay_); + return estimated_complete_time + actual_delay; +} + +TimeDelta VCMTiming::RequiredDecodeTime() const { + const int decode_time_ms = codec_timer_->RequiredDecodeTimeMs(); + RTC_DCHECK_GE(decode_time_ms, 0); + return TimeDelta::Millis(decode_time_ms); +} + +TimeDelta VCMTiming::MaxWaitingTime(Timestamp render_time, + Timestamp now, + bool too_many_frames_queued) const { + MutexLock lock(&mutex_); + + if (render_time.IsZero() && zero_playout_delay_min_pacing_->us() > 0 && + min_playout_delay_.IsZero() && max_playout_delay_ > TimeDelta::Zero()) { + // `render_time` == 0 indicates that the frame should be decoded and + // rendered as soon as possible. However, the decoder can be choked if too + // many frames are sent at once. Therefore, limit the interframe delay to + // |zero_playout_delay_min_pacing_| unless too many frames are queued in + // which case the frames are sent to the decoder at once. + if (too_many_frames_queued) { + return TimeDelta::Zero(); + } + Timestamp earliest_next_decode_start_time = + last_decode_scheduled_ + zero_playout_delay_min_pacing_; + TimeDelta max_wait_time = now >= earliest_next_decode_start_time + ? TimeDelta::Zero() + : earliest_next_decode_start_time - now; + return max_wait_time; + } + return render_time - now - RequiredDecodeTime() - render_delay_; +} + +TimeDelta VCMTiming::TargetVideoDelay() const { + MutexLock lock(&mutex_); + return TargetDelayInternal(); +} + +TimeDelta VCMTiming::TargetDelayInternal() const { + return std::max(min_playout_delay_, + jitter_delay_ + RequiredDecodeTime() + render_delay_); +} + +VideoFrame::RenderParameters VCMTiming::RenderParameters() const { + MutexLock lock(&mutex_); + return {.use_low_latency_rendering = UseLowLatencyRendering(), + .max_composition_delay_in_frames = max_composition_delay_in_frames_}; +} + +bool VCMTiming::UseLowLatencyRendering() const { + // min_playout_delay_==0, + // max_playout_delay_<=kLowLatencyStreamMaxPlayoutDelayThreshold indicates + // that the low-latency path should be used, which means that frames should be + // decoded and rendered as soon as possible. + return min_playout_delay_.IsZero() && + max_playout_delay_ <= kLowLatencyStreamMaxPlayoutDelayThreshold; +} + +VCMTiming::VideoDelayTimings VCMTiming::GetTimings() const { + MutexLock lock(&mutex_); + return VideoDelayTimings{.max_decode_duration = RequiredDecodeTime(), + .current_delay = current_delay_, + .target_delay = TargetDelayInternal(), + .jitter_buffer_delay = jitter_delay_, + .min_playout_delay = min_playout_delay_, + .max_playout_delay = max_playout_delay_, + .render_delay = render_delay_, + .num_decoded_frames = num_decoded_frames_}; +} + +void VCMTiming::SetTimingFrameInfo(const TimingFrameInfo& info) { + MutexLock lock(&mutex_); + timing_frame_info_.emplace(info); +} + +absl::optional VCMTiming::GetTimingFrameInfo() { + MutexLock lock(&mutex_); + return timing_frame_info_; +} + +void VCMTiming::SetMaxCompositionDelayInFrames( + absl::optional max_composition_delay_in_frames) { + MutexLock lock(&mutex_); + max_composition_delay_in_frames_ = max_composition_delay_in_frames; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/timing.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/timing.h new file mode 100644 index 0000000000..727527f009 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing/timing.h @@ -0,0 +1,160 @@ +/* + * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CODING_TIMING_TIMING_H_ +#define MODULES_VIDEO_CODING_TIMING_TIMING_H_ + +#include + +#include "absl/types/optional.h" +#include "api/field_trials_view.h" +#include "api/units/time_delta.h" +#include "api/video/video_frame.h" +#include "api/video/video_timing.h" +#include "modules/video_coding/timing/codec_timer.h" +#include "modules/video_coding/timing/timestamp_extrapolator.h" +#include "rtc_base/experiments/field_trial_parser.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread_annotations.h" +#include "system_wrappers/include/clock.h" + +namespace webrtc { + +class VCMTiming { + public: + static constexpr auto kDefaultRenderDelay = TimeDelta::Millis(10); + static constexpr auto kDelayMaxChangeMsPerS = 100; + + VCMTiming(Clock* clock, const FieldTrialsView& field_trials); + virtual ~VCMTiming() = default; + + // Resets the timing to the initial state. + void Reset(); + + // Set the amount of time needed to render an image. Defaults to 10 ms. + void set_render_delay(TimeDelta render_delay); + + // Set the minimum time the video must be delayed on the receiver to + // get the desired jitter buffer level. + void SetJitterDelay(TimeDelta required_delay); + + // Set/get the minimum playout delay from capture to render. + TimeDelta min_playout_delay() const; + void set_min_playout_delay(TimeDelta min_playout_delay); + + // Set/get the maximum playout delay from capture to render in ms. + void set_max_playout_delay(TimeDelta max_playout_delay); + + // Increases or decreases the current delay to get closer to the target delay. + // Calculates how long it has been since the previous call to this function, + // and increases/decreases the delay in proportion to the time difference. + void UpdateCurrentDelay(uint32_t frame_timestamp); + + // Increases or decreases the current delay to get closer to the target delay. + // Given the actual decode time in ms and the render time in ms for a frame, + // this function calculates how late the frame is and increases the delay + // accordingly. + void UpdateCurrentDelay(Timestamp render_time, Timestamp actual_decode_time); + + // Stops the decoder timer, should be called when the decoder returns a frame + // or when the decoded frame callback is called. + void StopDecodeTimer(TimeDelta decode_time, Timestamp now); + + // Used to report that a frame is passed to decoding. Updates the timestamp + // filter which is used to map between timestamps and receiver system time. + virtual void IncomingTimestamp(uint32_t rtp_timestamp, + Timestamp last_packet_time); + + // Returns the receiver system time when the frame with timestamp + // `frame_timestamp` should be rendered, assuming that the system time + // currently is `now`. + virtual Timestamp RenderTime(uint32_t frame_timestamp, Timestamp now) const; + + // Returns the maximum time in ms that we can wait for a frame to become + // complete before we must pass it to the decoder. render_time==0 indicates + // that the frames should be processed as quickly as possible, with possibly + // only a small delay added to make sure that the decoder is not overloaded. + // In this case, the parameter too_many_frames_queued is used to signal that + // the decode queue is full and that the frame should be decoded as soon as + // possible. + virtual TimeDelta MaxWaitingTime(Timestamp render_time, + Timestamp now, + bool too_many_frames_queued) const; + + // Returns the current target delay which is required delay + decode time + + // render delay. + TimeDelta TargetVideoDelay() const; + + // Return current timing information. Returns true if the first frame has been + // decoded, false otherwise. + struct VideoDelayTimings { + TimeDelta max_decode_duration; + TimeDelta current_delay; + TimeDelta target_delay; + TimeDelta jitter_buffer_delay; + TimeDelta min_playout_delay; + TimeDelta max_playout_delay; + TimeDelta render_delay; + size_t num_decoded_frames; + }; + VideoDelayTimings GetTimings() const; + + void SetTimingFrameInfo(const TimingFrameInfo& info); + absl::optional GetTimingFrameInfo(); + + void SetMaxCompositionDelayInFrames( + absl::optional max_composition_delay_in_frames); + + VideoFrame::RenderParameters RenderParameters() const; + + // Updates the last time a frame was scheduled for decoding. + void SetLastDecodeScheduledTimestamp(Timestamp last_decode_scheduled); + + protected: + TimeDelta RequiredDecodeTime() const RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + Timestamp RenderTimeInternal(uint32_t frame_timestamp, Timestamp now) const + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + TimeDelta TargetDelayInternal() const RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + bool UseLowLatencyRendering() const RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + + private: + mutable Mutex mutex_; + Clock* const clock_; + const std::unique_ptr ts_extrapolator_ + RTC_PT_GUARDED_BY(mutex_); + std::unique_ptr codec_timer_ RTC_GUARDED_BY(mutex_) + RTC_PT_GUARDED_BY(mutex_); + TimeDelta render_delay_ RTC_GUARDED_BY(mutex_); + // Best-effort playout delay range for frames from capture to render. + // The receiver tries to keep the delay between `min_playout_delay_ms_` + // and `max_playout_delay_ms_` taking the network jitter into account. + // A special case is where min_playout_delay_ms_ = max_playout_delay_ms_ = 0, + // in which case the receiver tries to play the frames as they arrive. + TimeDelta min_playout_delay_ RTC_GUARDED_BY(mutex_); + TimeDelta max_playout_delay_ RTC_GUARDED_BY(mutex_); + TimeDelta jitter_delay_ RTC_GUARDED_BY(mutex_); + TimeDelta current_delay_ RTC_GUARDED_BY(mutex_); + uint32_t prev_frame_timestamp_ RTC_GUARDED_BY(mutex_); + absl::optional timing_frame_info_ RTC_GUARDED_BY(mutex_); + size_t num_decoded_frames_ RTC_GUARDED_BY(mutex_); + absl::optional max_composition_delay_in_frames_ RTC_GUARDED_BY(mutex_); + // Set by the field trial WebRTC-ZeroPlayoutDelay. The parameter min_pacing + // determines the minimum delay between frames scheduled for decoding that is + // used when min playout delay=0 and max playout delay>=0. + FieldTrialParameter zero_playout_delay_min_pacing_ + RTC_GUARDED_BY(mutex_); + // Timestamp at which the last frame was scheduled to be sent to the decoder. + // Used only when the RTP header extension playout delay is set to min=0 ms + // which is indicated by a render time set to 0. + Timestamp last_decode_scheduled_ RTC_GUARDED_BY(mutex_); +}; +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_TIMING_TIMING_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/bandwidth_quality_scaler.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/bandwidth_quality_scaler.cc index 7632100858..13502a142b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/bandwidth_quality_scaler.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/bandwidth_quality_scaler.cc @@ -21,8 +21,6 @@ #include "rtc_base/experiments/bandwidth_quality_scaler_settings.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/exp_filter.h" -#include "rtc_base/task_queue.h" -#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/time_utils.h" #include "rtc_base/weak_ptr.h" @@ -58,7 +56,7 @@ BandwidthQualityScaler::~BandwidthQualityScaler() { void BandwidthQualityScaler::StartCheckForBitrate() { RTC_DCHECK_RUN_ON(&task_checker_); TaskQueueBase::Current()->PostDelayedTask( - ToQueuedTask([this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), this] { + [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), this] { if (!this_weak_ptr) { // The caller BandwidthQualityScaler has been deleted. return; @@ -84,8 +82,8 @@ void BandwidthQualityScaler::StartCheckForBitrate() { } } StartCheckForBitrate(); - }), - kBitrateStateUpdateInterval.ms()); + }, + kBitrateStateUpdateInterval); } void BandwidthQualityScaler::ReportEncodeInfo(int frame_size_bytes, diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/bandwidth_quality_scaler.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/bandwidth_quality_scaler.h index 2816e588be..7cd1de0dd2 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/bandwidth_quality_scaler.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/bandwidth_quality_scaler.h @@ -26,9 +26,7 @@ #include "rtc_base/numerics/exp_filter.h" #include "rtc_base/rate_statistics.h" #include "rtc_base/ref_count.h" -#include "rtc_base/ref_counted_object.h" #include "rtc_base/system/no_unique_address.h" -#include "rtc_base/task_queue.h" #include "rtc_base/weak_ptr.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_defines.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_defines.h new file mode 100644 index 0000000000..212d381e70 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_defines.h @@ -0,0 +1,25 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +/* + * This file contains definitions that are common to the IvfFileReader and + * IvfFileWriter classes. + */ + +#ifndef MODULES_VIDEO_CODING_UTILITY_IVF_DEFINES_H_ +#define MODULES_VIDEO_CODING_UTILITY_IVF_DEFINES_H_ + +#include + +namespace webrtc { +constexpr size_t kIvfHeaderSize = 32; +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_UTILITY_IVF_DEFINES_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_reader.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_reader.cc index 63925702b5..85d1fa00d7 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_reader.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_reader.cc @@ -15,12 +15,12 @@ #include "api/video_codecs/video_codec.h" #include "modules/rtp_rtcp/source/byte_io.h" +#include "modules/video_coding/utility/ivf_defines.h" #include "rtc_base/logging.h" namespace webrtc { namespace { -constexpr size_t kIvfHeaderSize = 32; constexpr size_t kIvfFrameHeaderSize = 12; constexpr int kCodecTypeBytesCount = 4; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_reader.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_reader.h index 5e0634f9fd..75f2e3ac8c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_reader.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_reader.h @@ -26,6 +26,10 @@ class IvfFileReader { // Creates IvfFileReader. Returns nullptr if error acquired. static std::unique_ptr Create(FileWrapper file); ~IvfFileReader(); + + IvfFileReader(const IvfFileReader&) = delete; + IvfFileReader& operator=(const IvfFileReader&) = delete; + // Reinitializes reader. Returns false if any error acquired. bool Reset(); @@ -71,8 +75,6 @@ class IvfFileReader { absl::optional next_frame_header_; bool has_error_; - - RTC_DISALLOW_COPY_AND_ASSIGN(IvfFileReader); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_writer.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_writer.cc index 77c90ee158..668390a78c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_writer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_writer.cc @@ -14,6 +14,7 @@ #include "api/video_codecs/video_codec.h" #include "modules/rtp_rtcp/source/byte_io.h" +#include "modules/video_coding/utility/ivf_defines.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -22,7 +23,11 @@ namespace webrtc { -const size_t kIvfHeaderSize = 32; +namespace { + +constexpr int kDefaultWidth = 1280; +constexpr int kDefaultHeight = 720; +} // namespace IvfFileWriter::IvfFileWriter(FileWrapper file, size_t byte_limit) : codec_type_(kVideoCodecGeneric), @@ -122,10 +127,14 @@ bool IvfFileWriter::WriteHeader() { bool IvfFileWriter::InitFromFirstFrame(const EncodedImage& encoded_image, VideoCodecType codec_type) { - width_ = encoded_image._encodedWidth; - height_ = encoded_image._encodedHeight; - RTC_CHECK_GT(width_, 0); - RTC_CHECK_GT(height_, 0); + if (encoded_image._encodedWidth == 0 || encoded_image._encodedHeight == 0) { + width_ = kDefaultWidth; + height_ = kDefaultHeight; + } else { + width_ = encoded_image._encodedWidth; + height_ = encoded_image._encodedHeight; + } + using_capture_timestamps_ = encoded_image.Timestamp() == 0; codec_type_ = codec_type; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_writer.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_writer.h index 874f60adfc..b53459b5de 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_writer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_writer.h @@ -18,7 +18,6 @@ #include "api/video/encoded_image.h" #include "api/video/video_codec_type.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/system/file_wrapper.h" #include "rtc_base/time_utils.h" @@ -34,6 +33,9 @@ class IvfFileWriter { size_t byte_limit); ~IvfFileWriter(); + IvfFileWriter(const IvfFileWriter&) = delete; + IvfFileWriter& operator=(const IvfFileWriter&) = delete; + bool WriteFrame(const EncodedImage& encoded_image, VideoCodecType codec_type); bool Close(); @@ -57,8 +59,6 @@ class IvfFileWriter { bool using_capture_timestamps_; rtc::TimestampWrapAroundHandler wrap_handler_; FileWrapper file_; - - RTC_DISALLOW_COPY_AND_ASSIGN(IvfFileWriter); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/quality_scaler.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/quality_scaler.cc index 4d5e2b16fa..9fb41a0ad7 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/quality_scaler.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/quality_scaler.cc @@ -13,25 +13,17 @@ #include #include +#include "api/units/time_delta.h" #include "api/video/video_adaptation_reason.h" #include "rtc_base/checks.h" #include "rtc_base/experiments/quality_scaler_settings.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/exp_filter.h" -#include "rtc_base/task_queue.h" -#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/weak_ptr.h" -// TODO(kthelgason): Some versions of Android have issues with log2. -// See https://code.google.com/p/android/issues/detail?id=212634 for details -#if defined(WEBRTC_ANDROID) -#define log2(x) (log(x) / log(2)) -#endif - namespace webrtc { namespace { -// TODO(nisse): Delete, delegate to encoders. // Threshold constant used until first downscale (to permit fast rampup). static const int kMeasureMs = 2000; static const float kSamplePeriodScaleFactor = 2.5; @@ -98,7 +90,7 @@ class QualityScaler::CheckQpTask { RTC_DCHECK_EQ(state_, State::kNotStarted); state_ = State::kCheckingQp; TaskQueueBase::Current()->PostDelayedTask( - ToQueuedTask([this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), this] { + [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), this] { if (!this_weak_ptr) { // The task has been cancelled through destruction. return; @@ -135,8 +127,8 @@ class QualityScaler::CheckQpTask { // Starting the next task deletes the pending task. After this line, // `this` has been deleted. quality_scaler_->StartNextCheckQpTask(); - }), - GetCheckingQpDelayMs()); + }, + TimeDelta::Millis(GetCheckingQpDelayMs())); } bool HasCompletedTask() const { return state_ == State::kCompleted; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/quality_scaler.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/quality_scaler.h index aae5ce5dd9..93014e36a7 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/quality_scaler.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/quality_scaler.h @@ -23,9 +23,7 @@ #include "rtc_base/experiments/quality_scaling_experiment.h" #include "rtc_base/numerics/moving_average.h" #include "rtc_base/ref_count.h" -#include "rtc_base/ref_counted_object.h" #include "rtc_base/system/no_unique_address.h" -#include "rtc_base/task_queue.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/simulcast_rate_allocator.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/simulcast_rate_allocator.cc index 39e39abca1..1496934e1c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/simulcast_rate_allocator.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/simulcast_rate_allocator.cc @@ -151,7 +151,7 @@ void SimulcastRateAllocator::DistributeAllocationToSimulcastLayers( size_t top_active_layer = active_layer; // Allocate up to the target bitrate for each active simulcast layer. for (; active_layer < codec_.numberOfSimulcastStreams; ++active_layer) { - const SpatialLayer& stream = + const SimulcastStream& stream = codec_.simulcastStream[layer_index[active_layer]]; if (!stream.active) { stream_enabled_[layer_index[active_layer]] = false; @@ -194,7 +194,7 @@ void SimulcastRateAllocator::DistributeAllocationToSimulcastLayers( // TODO(sprang): Allocate up to max bitrate for all layers once we have a // better idea of possible performance implications. if (left_in_total_allocation > DataRate::Zero()) { - const SpatialLayer& stream = codec_.simulcastStream[top_active_layer]; + const SimulcastStream& stream = codec_.simulcastStream[top_active_layer]; DataRate initial_layer_rate = DataRate::BitsPerSec( allocated_bitrates->GetSpatialLayerSum(top_active_layer)); DataRate additional_allocation = std::min( diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/simulcast_rate_allocator.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/simulcast_rate_allocator.h index 9b2f9696e6..6f93dbde74 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/simulcast_rate_allocator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/simulcast_rate_allocator.h @@ -19,7 +19,6 @@ #include "api/video/video_bitrate_allocation.h" #include "api/video/video_bitrate_allocator.h" #include "api/video_codecs/video_codec.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/experiments/rate_control_settings.h" #include "rtc_base/experiments/stable_target_rate_experiment.h" @@ -30,6 +29,9 @@ class SimulcastRateAllocator : public VideoBitrateAllocator { explicit SimulcastRateAllocator(const VideoCodec& codec); ~SimulcastRateAllocator() override; + SimulcastRateAllocator(const SimulcastRateAllocator&) = delete; + SimulcastRateAllocator& operator=(const SimulcastRateAllocator&) = delete; + VideoBitrateAllocation Allocate( VideoBitrateAllocationParameters parameters) override; const VideoCodec& GetCodec() const; @@ -61,8 +63,6 @@ class SimulcastRateAllocator : public VideoBitrateAllocator { const RateControlSettings rate_control_settings_; std::vector stream_enabled_; bool legacy_conference_mode_; - - RTC_DISALLOW_COPY_AND_ASSIGN(SimulcastRateAllocator); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/simulcast_test_fixture_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/simulcast_test_fixture_impl.cc index 0789f6ab00..84cd2e1589 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/simulcast_test_fixture_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/simulcast_test_fixture_impl.cc @@ -188,7 +188,7 @@ void ConfigureStream(int width, int min_bitrate, int target_bitrate, float max_framerate, - SpatialLayer* stream, + SimulcastStream* stream, int num_temporal_layers) { RTC_DCHECK(stream); stream->width = width; @@ -242,13 +242,12 @@ void SimulcastTestFixtureImpl::DefaultSettings( kMinBitrates[2], kTargetBitrates[2], kMaxFramerates[2], &settings->simulcastStream[layer_order[2]], temporal_layer_profile[2]); + settings->SetFrameDropEnabled(true); if (codec_type == kVideoCodecVP8) { settings->VP8()->denoisingOn = true; settings->VP8()->automaticResizeOn = false; - settings->VP8()->frameDroppingOn = true; settings->VP8()->keyFrameInterval = 3000; } else { - settings->H264()->frameDroppingOn = true; settings->H264()->keyFrameInterval = 3000; } } diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/simulcast_test_fixture_impl.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/simulcast_test_fixture_impl.h index a3d3fc66a8..cdfdc609d5 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/simulcast_test_fixture_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/simulcast_test_fixture_impl.h @@ -64,10 +64,10 @@ class SimulcastTestFixtureImpl final : public SimulcastTestFixture { void SetUpCodec(const int* temporal_layer_profile); void SetUpRateAllocator(); void SetRates(uint32_t bitrate_kbps, uint32_t fps); - void RunActiveStreamsTest(const std::vector active_streams); - void UpdateActiveStreams(const std::vector active_streams); + void RunActiveStreamsTest(std::vector active_streams); + void UpdateActiveStreams(std::vector active_streams); void ExpectStreams(VideoFrameType frame_type, - const std::vector expected_streams_active); + std::vector expected_streams_active); void ExpectStreams(VideoFrameType frame_type, int expected_video_streams); void VerifyTemporalIdxAndSyncForAllSpatialLayers( TestEncodedImageCallback* encoder_callback, diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/vp8_constants.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/vp8_constants.h new file mode 100644 index 0000000000..9321864dbc --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/vp8_constants.h @@ -0,0 +1,27 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CODING_UTILITY_VP8_CONSTANTS_H_ +#define MODULES_VIDEO_CODING_UTILITY_VP8_CONSTANTS_H_ + +#include +#include + +#include + +namespace webrtc { + +// QP level below which VP8 variable framerate and zero hertz screencast reduces +// framerate due to diminishing quality enhancement returns. +constexpr int kVp8SteadyStateQpThreshold = 15; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_UTILITY_VP8_CONSTANTS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/vp9_uncompressed_header_parser.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/vp9_uncompressed_header_parser.cc index 867967ddc0..bf9d51f692 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/vp9_uncompressed_header_parser.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/vp9_uncompressed_header_parser.cc @@ -158,7 +158,8 @@ void Vp9ReadQp(BitstreamReader& br, Vp9UncompressedHeader* frame_info) { void Vp9ReadSegmentationParams(BitstreamReader& br, Vp9UncompressedHeader* frame_info) { constexpr int kSegmentationFeatureBits[kVp9SegLvlMax] = {8, 6, 2, 0}; - constexpr bool kSegmentationFeatureSigned[kVp9SegLvlMax] = {1, 1, 0, 0}; + constexpr bool kSegmentationFeatureSigned[kVp9SegLvlMax] = {true, true, false, + false}; frame_info->segmentation_enabled = br.Read(); if (!frame_info->segmentation_enabled) { diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_codec_initializer.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_codec_initializer.cc index 5d06a2c133..e1885d74c8 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_codec_initializer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_codec_initializer.cc @@ -21,8 +21,10 @@ #include "api/video/video_bitrate_allocation.h" #include "api/video_codecs/video_encoder.h" #include "modules/video_coding/codecs/av1/av1_svc_config.h" +#include "modules/video_coding/codecs/vp8/vp8_scalability.h" #include "modules/video_coding/codecs/vp9/svc_config.h" #include "modules/video_coding/include/video_coding_defines.h" +#include "modules/video_coding/svc/scalability_mode_util.h" #include "rtc_base/checks.h" #include "rtc_base/experiments/min_video_bitrate_experiment.h" #include "rtc_base/logging.h" @@ -72,6 +74,7 @@ VideoCodec VideoCodecInitializer::VideoEncoderConfigToVideoCodec( config.content_type == VideoEncoderConfig::ContentType::kScreen && config.legacy_conference_mode; + video_codec.SetFrameDropEnabled(config.frame_drop_enabled); video_codec.numberOfSimulcastStreams = static_cast(streams.size()); video_codec.minBitrate = streams[0].min_bitrate_bps / 1000; @@ -94,9 +97,10 @@ VideoCodec VideoCodecInitializer::VideoEncoderConfigToVideoCodec( int max_framerate = 0; - absl::optional scalability_mode = streams[0].scalability_mode; + absl::optional scalability_mode = + streams[0].scalability_mode; for (size_t i = 0; i < streams.size(); ++i) { - SpatialLayer* sim_stream = &video_codec.simulcastStream[i]; + SimulcastStream* sim_stream = &video_codec.simulcastStream[i]; RTC_DCHECK_GT(streams[i].width, 0); RTC_DCHECK_GT(streams[i].height, 0); RTC_DCHECK_GT(streams[i].max_framerate, 0); @@ -112,8 +116,14 @@ VideoCodec VideoCodecInitializer::VideoEncoderConfigToVideoCodec( sim_stream->targetBitrate = streams[i].target_bitrate_bps / 1000; sim_stream->maxBitrate = streams[i].max_bitrate_bps / 1000; sim_stream->qpMax = streams[i].max_qp; + + int num_temporal_layers = + streams[i].scalability_mode.has_value() + ? ScalabilityModeToNumTemporalLayers(*streams[i].scalability_mode) + : streams[i].num_temporal_layers.value_or(1); + sim_stream->numberOfTemporalLayers = - static_cast(streams[i].num_temporal_layers.value_or(1)); + static_cast(num_temporal_layers); sim_stream->active = streams[i].active; video_codec.width = @@ -128,9 +138,16 @@ VideoCodec VideoCodecInitializer::VideoEncoderConfigToVideoCodec( static_cast(streams[i].max_qp)); max_framerate = std::max(max_framerate, streams[i].max_framerate); + // TODO(bugs.webrtc.org/11607): Since scalability mode is a top-level + // setting on VideoCodec, setting it makes sense only if it is the same for + // all simulcast streams. if (streams[0].scalability_mode != streams[i].scalability_mode) { - RTC_LOG(LS_WARNING) << "Inconsistent scalability modes configured."; scalability_mode.reset(); + // For VP8, top-level scalability mode doesn't matter, since configuration + // is based on the per-simulcast stream configuration of temporal layers. + if (video_codec.codecType != kVideoCodecVP8) { + RTC_LOG(LS_WARNING) << "Inconsistent scalability modes configured."; + } } } @@ -153,7 +170,9 @@ VideoCodec VideoCodecInitializer::VideoEncoderConfigToVideoCodec( video_codec.spatialLayers[0].height = video_codec.height; video_codec.spatialLayers[0].maxFramerate = max_framerate; video_codec.spatialLayers[0].numberOfTemporalLayers = - streams[0].num_temporal_layers.value_or(1); + streams[0].scalability_mode.has_value() + ? ScalabilityModeToNumTemporalLayers(*streams[0].scalability_mode) + : streams[0].num_temporal_layers.value_or(1); // Set codec specific options if (config.encoder_specific_settings) @@ -165,9 +184,26 @@ VideoCodec VideoCodecInitializer::VideoEncoderConfigToVideoCodec( *video_codec.VP8() = VideoEncoder::GetDefaultVp8Settings(); } - video_codec.VP8()->numberOfTemporalLayers = static_cast( - streams.back().num_temporal_layers.value_or( - video_codec.VP8()->numberOfTemporalLayers)); + // Validate specified scalability modes. If some layer has an unsupported + // mode, store it as the top-level scalability mode, which will make + // InitEncode fail with an appropriate error. + for (const auto& stream : streams) { + if (stream.scalability_mode.has_value() && + !VP8SupportsScalabilityMode(*stream.scalability_mode)) { + RTC_LOG(LS_WARNING) + << "Invalid scalability mode for VP8: " + << ScalabilityModeToString(*stream.scalability_mode); + video_codec.SetScalabilityMode(*stream.scalability_mode); + break; + } + } + video_codec.VP8()->numberOfTemporalLayers = + streams.back().scalability_mode.has_value() + ? ScalabilityModeToNumTemporalLayers( + *streams.back().scalability_mode) + : streams.back().num_temporal_layers.value_or( + video_codec.VP8()->numberOfTemporalLayers); + RTC_DCHECK_GE(video_codec.VP8()->numberOfTemporalLayers, 1); RTC_DCHECK_LE(video_codec.VP8()->numberOfTemporalLayers, kMaxTemporalStreams); @@ -197,6 +233,11 @@ VideoCodec VideoCodecInitializer::VideoEncoderConfigToVideoCodec( if (!config.spatial_layers.empty()) { // Layering is set explicitly. spatial_layers = config.spatial_layers; + } else if (scalability_mode.has_value()) { + // Layering is set via scalability mode. + spatial_layers = GetVp9SvcConfig(video_codec); + if (spatial_layers.empty()) + break; } else { size_t first_active_layer = 0; for (size_t spatial_idx = 0; @@ -262,7 +303,11 @@ VideoCodec VideoCodecInitializer::VideoEncoderConfigToVideoCodec( break; } case kVideoCodecAV1: - if (SetAv1SvcConfig(video_codec)) { + if (SetAv1SvcConfig(video_codec, + /*num_temporal_layers=*/ + streams.back().num_temporal_layers.value_or(1), + /*num_spatial_layers=*/ + std::max(config.spatial_layers.size(), 1))) { for (size_t i = 0; i < config.spatial_layers.size(); ++i) { video_codec.spatialLayers[i].active = config.spatial_layers[i].active; } @@ -271,8 +316,9 @@ VideoCodec VideoCodecInitializer::VideoEncoderConfigToVideoCodec( } break; case kVideoCodecH264: { - if (!config.encoder_specific_settings) - *video_codec.H264() = VideoEncoder::GetDefaultH264Settings(); + RTC_CHECK(!config.encoder_specific_settings); + + *video_codec.H264() = VideoEncoder::GetDefaultH264Settings(); video_codec.H264()->numberOfTemporalLayers = static_cast( streams.back().num_temporal_layers.value_or( video_codec.H264()->numberOfTemporalLayers)); diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_coding_defines.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_coding_defines.cc index 424b23f971..436b1a6490 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_coding_defines.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_coding_defines.cc @@ -14,7 +14,7 @@ namespace webrtc { void VCMReceiveCallback::OnDroppedFrames(uint32_t frames_dropped) {} void VCMReceiveCallback::OnIncomingPayloadType(int payload_type) {} -void VCMReceiveCallback::OnDecoderImplementationName( - const char* implementation_name) {} +void VCMReceiveCallback::OnDecoderInfoChanged( + const VideoDecoder::DecoderInfo&) {} } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_coding_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_coding_impl.cc index 0129aa1bf6..2eaecd5011 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_coding_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_coding_impl.cc @@ -13,10 +13,14 @@ #include #include +#include "api/field_trials_view.h" #include "api/sequence_checker.h" +#include "api/transport/field_trial_based_config.h" #include "api/video/encoded_image.h" #include "modules/video_coding/include/video_codec_interface.h" -#include "modules/video_coding/timing.h" +#include "modules/video_coding/timing/timing.h" +#include "rtc_base/logging.h" +#include "rtc_base/memory/always_valid_pointer.h" #include "system_wrappers/include/clock.h" namespace webrtc { @@ -35,24 +39,150 @@ int64_t VCMProcessTimer::TimeUntilProcess() const { void VCMProcessTimer::Processed() { _latestMs = _clock->TimeInMilliseconds(); } + +DEPRECATED_VCMDecoderDataBase::DEPRECATED_VCMDecoderDataBase() { + decoder_sequence_checker_.Detach(); +} + +VideoDecoder* DEPRECATED_VCMDecoderDataBase::DeregisterExternalDecoder( + uint8_t payload_type) { + RTC_DCHECK_RUN_ON(&decoder_sequence_checker_); + auto it = decoders_.find(payload_type); + if (it == decoders_.end()) { + return nullptr; + } + + // We can't use payload_type to check if the decoder is currently in use, + // because payload type may be out of date (e.g. before we decode the first + // frame after RegisterReceiveCodec). + if (current_decoder_ && current_decoder_->IsSameDecoder(it->second)) { + // Release it if it was registered and in use. + current_decoder_ = absl::nullopt; + } + VideoDecoder* ret = it->second; + decoders_.erase(it); + return ret; +} + +// Add the external decoder object to the list of external decoders. +// Won't be registered as a receive codec until RegisterReceiveCodec is called. +void DEPRECATED_VCMDecoderDataBase::RegisterExternalDecoder( + uint8_t payload_type, + VideoDecoder* external_decoder) { + RTC_DCHECK_RUN_ON(&decoder_sequence_checker_); + // If payload value already exists, erase old and insert new. + DeregisterExternalDecoder(payload_type); + decoders_[payload_type] = external_decoder; +} + +bool DEPRECATED_VCMDecoderDataBase::IsExternalDecoderRegistered( + uint8_t payload_type) const { + RTC_DCHECK_RUN_ON(&decoder_sequence_checker_); + return payload_type == current_payload_type_ || + decoders_.find(payload_type) != decoders_.end(); +} + +void DEPRECATED_VCMDecoderDataBase::RegisterReceiveCodec( + uint8_t payload_type, + const VideoDecoder::Settings& settings) { + // If payload value already exists, erase old and insert new. + if (payload_type == current_payload_type_) { + current_payload_type_ = absl::nullopt; + } + decoder_settings_[payload_type] = settings; +} + +bool DEPRECATED_VCMDecoderDataBase::DeregisterReceiveCodec( + uint8_t payload_type) { + if (decoder_settings_.erase(payload_type) == 0) { + return false; + } + if (payload_type == current_payload_type_) { + // This codec is currently in use. + current_payload_type_ = absl::nullopt; + } + return true; +} + +VCMGenericDecoder* DEPRECATED_VCMDecoderDataBase::GetDecoder( + const VCMEncodedFrame& frame, + VCMDecodedFrameCallback* decoded_frame_callback) { + RTC_DCHECK_RUN_ON(&decoder_sequence_checker_); + RTC_DCHECK(decoded_frame_callback->UserReceiveCallback()); + uint8_t payload_type = frame.PayloadType(); + if (payload_type == current_payload_type_ || payload_type == 0) { + return current_decoder_.has_value() ? &*current_decoder_ : nullptr; + } + // If decoder exists - delete. + if (current_decoder_.has_value()) { + current_decoder_ = absl::nullopt; + current_payload_type_ = absl::nullopt; + } + + CreateAndInitDecoder(frame); + if (current_decoder_ == absl::nullopt) { + return nullptr; + } + + VCMReceiveCallback* callback = decoded_frame_callback->UserReceiveCallback(); + callback->OnIncomingPayloadType(payload_type); + if (current_decoder_->RegisterDecodeCompleteCallback(decoded_frame_callback) < + 0) { + current_decoder_ = absl::nullopt; + return nullptr; + } + + current_payload_type_ = payload_type; + return &*current_decoder_; +} + +void DEPRECATED_VCMDecoderDataBase::CreateAndInitDecoder( + const VCMEncodedFrame& frame) { + uint8_t payload_type = frame.PayloadType(); + RTC_LOG(LS_INFO) << "Initializing decoder with payload type '" + << int{payload_type} << "'."; + auto decoder_item = decoder_settings_.find(payload_type); + if (decoder_item == decoder_settings_.end()) { + RTC_LOG(LS_ERROR) << "Can't find a decoder associated with payload type: " + << int{payload_type}; + return; + } + auto external_dec_item = decoders_.find(payload_type); + if (external_dec_item == decoders_.end()) { + RTC_LOG(LS_ERROR) << "No decoder of this type exists."; + return; + } + current_decoder_.emplace(external_dec_item->second); + + // Copy over input resolutions to prevent codec reinitialization due to + // the first frame being of a different resolution than the database values. + // This is best effort, since there's no guarantee that width/height have been + // parsed yet (and may be zero). + RenderResolution frame_resolution(frame.EncodedImage()._encodedWidth, + frame.EncodedImage()._encodedHeight); + if (frame_resolution.Valid()) { + decoder_item->second.set_max_render_resolution(frame_resolution); + } + if (!current_decoder_->Configure(decoder_item->second)) { + current_decoder_ = absl::nullopt; + RTC_LOG(LS_ERROR) << "Failed to initialize decoder."; + } +} + } // namespace vcm namespace { class VideoCodingModuleImpl : public VideoCodingModule { public: - explicit VideoCodingModuleImpl(Clock* clock) + explicit VideoCodingModuleImpl(Clock* clock, + const FieldTrialsView* field_trials) : VideoCodingModule(), - timing_(new VCMTiming(clock)), - receiver_(clock, timing_.get()) {} + field_trials_(field_trials), + timing_(new VCMTiming(clock, *field_trials_)), + receiver_(clock, timing_.get(), *field_trials_) {} - ~VideoCodingModuleImpl() override {} - - int64_t TimeUntilNextProcess() override { - int64_t receiver_time = receiver_.TimeUntilNextProcess(); - RTC_DCHECK_GE(receiver_time, 0); - return receiver_time; - } + ~VideoCodingModuleImpl() override = default; void Process() override { receiver_.Process(); } @@ -104,6 +234,8 @@ class VideoCodingModuleImpl : public VideoCodingModule { } private: + AlwaysValidPointer + field_trials_; SequenceChecker construction_thread_; const std::unique_ptr timing_; vcm::VideoReceiver receiver_; @@ -112,9 +244,11 @@ class VideoCodingModuleImpl : public VideoCodingModule { // DEPRECATED. Create method for current interface, will be removed when the // new jitter buffer is in place. -VideoCodingModule* VideoCodingModule::Create(Clock* clock) { +VideoCodingModule* VideoCodingModule::Create( + Clock* clock, + const FieldTrialsView* field_trials) { RTC_DCHECK(clock); - return new VideoCodingModuleImpl(clock); + return new VideoCodingModuleImpl(clock, field_trials); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_coding_impl.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_coding_impl.h index 10ebd41bc8..927b2da4b8 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_coding_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_coding_impl.h @@ -11,19 +11,20 @@ #ifndef MODULES_VIDEO_CODING_VIDEO_CODING_IMPL_H_ #define MODULES_VIDEO_CODING_VIDEO_CODING_IMPL_H_ +#include #include #include #include #include "absl/types/optional.h" +#include "api/field_trials_view.h" #include "api/sequence_checker.h" -#include "modules/video_coding/decoder_database.h" #include "modules/video_coding/frame_buffer.h" #include "modules/video_coding/generic_decoder.h" #include "modules/video_coding/include/video_coding.h" #include "modules/video_coding/jitter_buffer.h" #include "modules/video_coding/receiver.h" -#include "modules/video_coding/timing.h" +#include "modules/video_coding/timing/timing.h" #include "rtc_base/one_time_event.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" @@ -54,10 +55,56 @@ class VCMProcessTimer { int64_t _latestMs; }; -class VideoReceiver : public Module { +class DEPRECATED_VCMDecoderDataBase { public: - VideoReceiver(Clock* clock, VCMTiming* timing); - ~VideoReceiver() override; + DEPRECATED_VCMDecoderDataBase(); + DEPRECATED_VCMDecoderDataBase(const DEPRECATED_VCMDecoderDataBase&) = delete; + DEPRECATED_VCMDecoderDataBase& operator=( + const DEPRECATED_VCMDecoderDataBase&) = delete; + ~DEPRECATED_VCMDecoderDataBase() = default; + + // Returns a pointer to the previously registered decoder or nullptr if none + // was registered for the `payload_type`. + VideoDecoder* DeregisterExternalDecoder(uint8_t payload_type); + void RegisterExternalDecoder(uint8_t payload_type, + VideoDecoder* external_decoder); + bool IsExternalDecoderRegistered(uint8_t payload_type) const; + + void RegisterReceiveCodec(uint8_t payload_type, + const VideoDecoder::Settings& settings); + bool DeregisterReceiveCodec(uint8_t payload_type); + + // Returns a decoder specified by frame.PayloadType. The decoded frame + // callback of the decoder is set to `decoded_frame_callback`. If no such + // decoder already exists an instance will be created and initialized. + // nullptr is returned if no decoder with the specified payload type was found + // and the function failed to create one. + VCMGenericDecoder* GetDecoder( + const VCMEncodedFrame& frame, + VCMDecodedFrameCallback* decoded_frame_callback); + + private: + void CreateAndInitDecoder(const VCMEncodedFrame& frame) + RTC_RUN_ON(decoder_sequence_checker_); + + SequenceChecker decoder_sequence_checker_; + + absl::optional current_payload_type_; + absl::optional current_decoder_ + RTC_GUARDED_BY(decoder_sequence_checker_); + // Initialization paramaters for decoders keyed by payload type. + std::map decoder_settings_; + // Decoders keyed by payload type. + std::map decoders_ + RTC_GUARDED_BY(decoder_sequence_checker_); +}; + +class VideoReceiver { + public: + VideoReceiver(Clock* clock, + VCMTiming* timing, + const FieldTrialsView& field_trials); + ~VideoReceiver(); void RegisterReceiveCodec(uint8_t payload_type, const VideoDecoder::Settings& settings); @@ -79,9 +126,7 @@ class VideoReceiver : public Module { int max_packet_age_to_nack, int max_incomplete_time_ms); - int64_t TimeUntilNextProcess() override; - void Process() override; - void ProcessThreadAttached(ProcessThread* process_thread) override; + void Process(); protected: int32_t Decode(const webrtc::VCMEncodedFrame& frame); @@ -121,17 +166,12 @@ class VideoReceiver : public Module { // Callbacks are set before the decoder thread starts. // Once the decoder thread has been started, usage of `_codecDataBase` moves // over to the decoder thread. - VCMDecoderDataBase _codecDataBase; + DEPRECATED_VCMDecoderDataBase _codecDataBase; VCMProcessTimer _retransmissionTimer RTC_GUARDED_BY(module_thread_checker_); VCMProcessTimer _keyRequestTimer RTC_GUARDED_BY(module_thread_checker_); ThreadUnsafeOneTimeEvent first_frame_received_ RTC_GUARDED_BY(decoder_thread_checker_); - // Modified on the construction thread. Can be read without a lock and assumed - // to be non-null on the module and decoder threads. - ProcessThread* process_thread_ = nullptr; - bool is_attached_to_process_thread_ - RTC_GUARDED_BY(construction_thread_checker_) = false; }; } // namespace vcm diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_receiver.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_receiver.cc index 079de6de14..38b70f87cd 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_receiver.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_receiver.cc @@ -17,7 +17,6 @@ #include "api/sequence_checker.h" #include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_decoder.h" -#include "modules/utility/include/process_thread.h" #include "modules/video_coding/decoder_database.h" #include "modules/video_coding/encoded_frame.h" #include "modules/video_coding/generic_decoder.h" @@ -28,10 +27,9 @@ #include "modules/video_coding/media_opt_util.h" #include "modules/video_coding/packet.h" #include "modules/video_coding/receiver.h" -#include "modules/video_coding/timing.h" +#include "modules/video_coding/timing/timing.h" #include "modules/video_coding/video_coding_impl.h" #include "rtc_base/checks.h" -#include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/one_time_event.h" #include "rtc_base/trace_event.h" @@ -40,11 +38,13 @@ namespace webrtc { namespace vcm { -VideoReceiver::VideoReceiver(Clock* clock, VCMTiming* timing) +VideoReceiver::VideoReceiver(Clock* clock, + VCMTiming* timing, + const FieldTrialsView& field_trials) : clock_(clock), _timing(timing), - _receiver(_timing, clock_), - _decodedFrameCallback(_timing, clock_), + _receiver(_timing, clock_, field_trials), + _decodedFrameCallback(_timing, clock_, field_trials), _frameTypeCallback(nullptr), _packetRequestCallback(nullptr), _scheduleKeyRequest(false), @@ -101,27 +101,6 @@ void VideoReceiver::Process() { } } -void VideoReceiver::ProcessThreadAttached(ProcessThread* process_thread) { - RTC_DCHECK_RUN_ON(&construction_thread_checker_); - if (process_thread) { - is_attached_to_process_thread_ = true; - RTC_DCHECK(!process_thread_ || process_thread_ == process_thread); - process_thread_ = process_thread; - } else { - is_attached_to_process_thread_ = false; - } -} - -int64_t VideoReceiver::TimeUntilNextProcess() { - RTC_DCHECK_RUN_ON(&module_thread_checker_); - int64_t timeUntilNextProcess = _retransmissionTimer.TimeUntilProcess(); - - timeUntilNextProcess = - VCM_MIN(timeUntilNextProcess, _keyRequestTimer.TimeUntilProcess()); - - return timeUntilNextProcess; -} - // Register a receive callback. Will be called whenever there is a new frame // ready for rendering. int32_t VideoReceiver::RegisterReceiveCallback( @@ -148,7 +127,6 @@ void VideoReceiver::RegisterExternalDecoder(VideoDecoder* externalDecoder, int32_t VideoReceiver::RegisterFrameTypeCallback( VCMFrameTypeCallback* frameTypeCallback) { RTC_DCHECK_RUN_ON(&construction_thread_checker_); - RTC_DCHECK(!is_attached_to_process_thread_); // This callback is used on the module thread, but since we don't get // callbacks on the module thread while the decoder thread isn't running // (and this function must not be called when the decoder is running), @@ -160,7 +138,6 @@ int32_t VideoReceiver::RegisterFrameTypeCallback( int32_t VideoReceiver::RegisterPacketRequestCallback( VCMPacketRequestCallback* callback) { RTC_DCHECK_RUN_ON(&construction_thread_checker_); - RTC_DCHECK(!is_attached_to_process_thread_); // This callback is used on the module thread, but since we don't get // callbacks on the module thread while the decoder thread isn't running // (and this function must not be called when the decoder is running), @@ -187,10 +164,6 @@ int32_t VideoReceiver::Decode(uint16_t maxWaitTimeMs) { if (frame->FrameType() != VideoFrameType::kVideoFrameKey) { drop_frame = true; _scheduleKeyRequest = true; - // TODO(tommi): Consider if we could instead post a task to the module - // thread and call RequestKeyFrame directly. Here we call WakeUp so that - // TimeUntilNextProcess() gets called straight away. - process_thread_->WakeUp(this); } else { drop_frames_until_keyframe_ = false; } @@ -203,8 +176,9 @@ int32_t VideoReceiver::Decode(uint16_t maxWaitTimeMs) { } // If this frame was too late, we should adjust the delay accordingly - _timing->UpdateCurrentDelay(frame->RenderTimeMs(), - clock_->TimeInMilliseconds()); + if (frame->RenderTimeMs() > 0) + _timing->UpdateCurrentDelay(Timestamp::Millis(frame->RenderTimeMs()), + clock_->CurrentTime()); if (first_frame_received_()) { RTC_LOG(LS_INFO) << "Received first complete decodable video frame"; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_receiver2.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_receiver2.cc index ef6dcf98c5..0751869a98 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_receiver2.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_receiver2.cc @@ -8,31 +8,34 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include "modules/video_coding/video_receiver2.h" + #include #include +#include #include -#include "modules/video_coding/video_receiver2.h" - +#include "absl/algorithm/container.h" #include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_decoder.h" #include "modules/video_coding/decoder_database.h" #include "modules/video_coding/encoded_frame.h" #include "modules/video_coding/generic_decoder.h" #include "modules/video_coding/include/video_coding_defines.h" -#include "modules/video_coding/timing.h" +#include "modules/video_coding/timing/timing.h" #include "rtc_base/checks.h" #include "rtc_base/trace_event.h" #include "system_wrappers/include/clock.h" namespace webrtc { -VideoReceiver2::VideoReceiver2(Clock* clock, VCMTiming* timing) +VideoReceiver2::VideoReceiver2(Clock* clock, + VCMTiming* timing, + const FieldTrialsView& field_trials) : clock_(clock), - timing_(timing), - decodedFrameCallback_(timing_, clock_), - codecDataBase_() { + decoded_frame_callback_(timing, clock_, field_trials), + codec_database_() { decoder_sequence_checker_.Detach(); } @@ -43,87 +46,63 @@ VideoReceiver2::~VideoReceiver2() { // Register a receive callback. Will be called whenever there is a new frame // ready for rendering. int32_t VideoReceiver2::RegisterReceiveCallback( - VCMReceiveCallback* receiveCallback) { + VCMReceiveCallback* receive_callback) { RTC_DCHECK_RUN_ON(&construction_sequence_checker_); - RTC_DCHECK(!IsDecoderThreadRunning()); // This value is set before the decoder thread starts and unset after // the decoder thread has been stopped. - decodedFrameCallback_.SetUserReceiveCallback(receiveCallback); + decoded_frame_callback_.SetUserReceiveCallback(receive_callback); return VCM_OK; } -// Register an externally defined decoder object. This may be called on either -// the construction sequence or the decoder sequence to allow for lazy creation -// of video decoders. If called on the decoder sequence `externalDecoder` cannot -// be a nullptr. It's the responsibility of the caller to make sure that the -// access from the two sequences are mutually exclusive. -void VideoReceiver2::RegisterExternalDecoder(VideoDecoder* externalDecoder, - uint8_t payloadType) { - if (IsDecoderThreadRunning()) { - RTC_DCHECK_RUN_ON(&decoder_sequence_checker_); - // Don't allow deregistering decoders on the decoder thread. - RTC_DCHECK(externalDecoder != nullptr); - } else { - RTC_DCHECK_RUN_ON(&construction_sequence_checker_); - } +void VideoReceiver2::RegisterExternalDecoder( + std::unique_ptr decoder, + uint8_t payload_type) { + RTC_DCHECK_RUN_ON(&decoder_sequence_checker_); + RTC_DCHECK(decoded_frame_callback_.UserReceiveCallback()); - if (externalDecoder == nullptr) { - codecDataBase_.DeregisterExternalDecoder(payloadType); - return; + if (decoder) { + RTC_DCHECK(!codec_database_.IsExternalDecoderRegistered(payload_type)); + codec_database_.RegisterExternalDecoder(payload_type, std::move(decoder)); + } else { + codec_database_.DeregisterExternalDecoder(payload_type); } - codecDataBase_.RegisterExternalDecoder(payloadType, externalDecoder); } -bool VideoReceiver2::IsExternalDecoderRegistered(uint8_t payloadType) const { +bool VideoReceiver2::IsExternalDecoderRegistered(uint8_t payload_type) const { RTC_DCHECK_RUN_ON(&decoder_sequence_checker_); - return codecDataBase_.IsExternalDecoderRegistered(payloadType); -} - -void VideoReceiver2::DecoderThreadStarting() { - RTC_DCHECK_RUN_ON(&construction_sequence_checker_); - RTC_DCHECK(!IsDecoderThreadRunning()); -#if RTC_DCHECK_IS_ON - decoder_thread_is_running_ = true; -#endif -} - -void VideoReceiver2::DecoderThreadStopped() { - RTC_DCHECK_RUN_ON(&construction_sequence_checker_); - RTC_DCHECK(IsDecoderThreadRunning()); -#if RTC_DCHECK_IS_ON - decoder_thread_is_running_ = false; - decoder_sequence_checker_.Detach(); -#endif + return codec_database_.IsExternalDecoderRegistered(payload_type); } // Must be called from inside the receive side critical section. int32_t VideoReceiver2::Decode(const VCMEncodedFrame* frame) { RTC_DCHECK_RUN_ON(&decoder_sequence_checker_); TRACE_EVENT0("webrtc", "VideoReceiver2::Decode"); - // Change decoder if payload type has changed + // Change decoder if payload type has changed. VCMGenericDecoder* decoder = - codecDataBase_.GetDecoder(*frame, &decodedFrameCallback_); + codec_database_.GetDecoder(*frame, &decoded_frame_callback_); if (decoder == nullptr) { return VCM_NO_CODEC_REGISTERED; } return decoder->Decode(*frame, clock_->CurrentTime()); } -// Register possible receive codecs, can be called multiple times +// Register possible receive codecs, can be called multiple times. +// Called before decoder thread is started. void VideoReceiver2::RegisterReceiveCodec( uint8_t payload_type, const VideoDecoder::Settings& settings) { RTC_DCHECK_RUN_ON(&construction_sequence_checker_); - RTC_DCHECK(!IsDecoderThreadRunning()); - codecDataBase_.RegisterReceiveCodec(payload_type, settings); + codec_database_.RegisterReceiveCodec(payload_type, settings); } -bool VideoReceiver2::IsDecoderThreadRunning() { -#if RTC_DCHECK_IS_ON - return decoder_thread_is_running_; -#else - return true; -#endif +void VideoReceiver2::DeregisterReceiveCodec(uint8_t payload_type) { + RTC_DCHECK_RUN_ON(&construction_sequence_checker_); + codec_database_.DeregisterReceiveCodec(payload_type); +} + +void VideoReceiver2::DeregisterReceiveCodecs() { + RTC_DCHECK_RUN_ON(&construction_sequence_checker_); + codec_database_.DeregisterReceiveCodecs(); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_receiver2.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_receiver2.h index 5e087d333f..4457a5b5b3 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_receiver2.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_receiver2.h @@ -11,12 +11,17 @@ #ifndef MODULES_VIDEO_CODING_VIDEO_RECEIVER2_H_ #define MODULES_VIDEO_CODING_VIDEO_RECEIVER2_H_ +#include +#include + +#include "api/field_trials_view.h" #include "api/sequence_checker.h" #include "api/video_codecs/video_decoder.h" #include "modules/video_coding/decoder_database.h" #include "modules/video_coding/encoded_frame.h" #include "modules/video_coding/generic_decoder.h" -#include "modules/video_coding/timing.h" +#include "modules/video_coding/timing/timing.h" +#include "rtc_base/system/no_unique_address.h" #include "system_wrappers/include/clock.h" namespace webrtc { @@ -28,47 +33,33 @@ namespace webrtc { // VideoCodingModule api. class VideoReceiver2 { public: - VideoReceiver2(Clock* clock, VCMTiming* timing); + VideoReceiver2(Clock* clock, + VCMTiming* timing, + const FieldTrialsView& field_trials); ~VideoReceiver2(); void RegisterReceiveCodec(uint8_t payload_type, const VideoDecoder::Settings& decoder_settings); + void DeregisterReceiveCodec(uint8_t payload_type); + void DeregisterReceiveCodecs(); - void RegisterExternalDecoder(VideoDecoder* externalDecoder, - uint8_t payloadType); - bool IsExternalDecoderRegistered(uint8_t payloadType) const; - int32_t RegisterReceiveCallback(VCMReceiveCallback* receiveCallback); + void RegisterExternalDecoder(std::unique_ptr decoder, + uint8_t payload_type); - int32_t Decode(const webrtc::VCMEncodedFrame* frame); + bool IsExternalDecoderRegistered(uint8_t payload_type) const; + int32_t RegisterReceiveCallback(VCMReceiveCallback* receive_callback); - // Notification methods that are used to check our internal state and validate - // threading assumptions. These are called by VideoReceiveStream. - // See `IsDecoderThreadRunning()` for more details. - void DecoderThreadStarting(); - void DecoderThreadStopped(); + int32_t Decode(const VCMEncodedFrame* frame); private: - // Used for DCHECKing thread correctness. - // In build where DCHECKs are enabled, will return false before - // DecoderThreadStarting is called, then true until DecoderThreadStopped - // is called. - // In builds where DCHECKs aren't enabled, it will return true. - bool IsDecoderThreadRunning(); - - SequenceChecker construction_sequence_checker_; - SequenceChecker decoder_sequence_checker_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker construction_sequence_checker_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker decoder_sequence_checker_; Clock* const clock_; - VCMTiming* timing_; - VCMDecodedFrameCallback decodedFrameCallback_; - + VCMDecodedFrameCallback decoded_frame_callback_; // Callbacks are set before the decoder thread starts. // Once the decoder thread has been started, usage of `_codecDataBase` moves // over to the decoder thread. - VCMDecoderDataBase codecDataBase_; - -#if RTC_DCHECK_IS_ON - bool decoder_thread_is_running_ = false; -#endif + VCMDecoderDatabase codec_database_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_processing/OWNERS b/TMessagesProj/jni/voip/webrtc/modules/video_processing/OWNERS deleted file mode 100644 index 07c2987707..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_processing/OWNERS +++ /dev/null @@ -1,2 +0,0 @@ -stefan@webrtc.org -marpan@webrtc.org diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter.cc b/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter.cc deleted file mode 100644 index 0e1570114a..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter.cc +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/video_processing/util/denoiser_filter.h" - -#include "modules/video_processing/util/denoiser_filter_c.h" -#include "rtc_base/checks.h" -#include "rtc_base/system/arch.h" -#include "system_wrappers/include/cpu_features_wrapper.h" - -#if defined(WEBRTC_ARCH_X86_FAMILY) -#include "modules/video_processing/util/denoiser_filter_sse2.h" -#elif defined(WEBRTC_HAS_NEON) -#include "modules/video_processing/util/denoiser_filter_neon.h" -#endif - -namespace webrtc { - -const int kMotionMagnitudeThreshold = 8 * 3; -const int kSumDiffThreshold = 96; -const int kSumDiffThresholdHigh = 448; - -std::unique_ptr DenoiserFilter::Create( - bool runtime_cpu_detection, - CpuType* cpu_type) { - std::unique_ptr filter; - - if (cpu_type != nullptr) - *cpu_type = CPU_NOT_NEON; - if (runtime_cpu_detection) { -// If we know the minimum architecture at compile time, avoid CPU detection. -#if defined(WEBRTC_ARCH_X86_FAMILY) -#if defined(__SSE2__) - filter.reset(new DenoiserFilterSSE2()); -#else - // x86 CPU detection required. - if (GetCPUInfo(kSSE2)) { - filter.reset(new DenoiserFilterSSE2()); - } else { - filter.reset(new DenoiserFilterC()); - } -#endif -#elif defined(WEBRTC_HAS_NEON) - filter.reset(new DenoiserFilterNEON()); - if (cpu_type != nullptr) - *cpu_type = CPU_NEON; -#else - filter.reset(new DenoiserFilterC()); -#endif - } else { - filter.reset(new DenoiserFilterC()); - } - - RTC_DCHECK(filter.get() != nullptr); - return filter; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter.h b/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter.h deleted file mode 100644 index 1d574f4a4f..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter.h +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_VIDEO_PROCESSING_UTIL_DENOISER_FILTER_H_ -#define MODULES_VIDEO_PROCESSING_UTIL_DENOISER_FILTER_H_ - -#include - -#include - -namespace webrtc { - -extern const int kMotionMagnitudeThreshold; -extern const int kSumDiffThreshold; -extern const int kSumDiffThresholdHigh; - -enum DenoiserDecision { COPY_BLOCK, FILTER_BLOCK }; -enum CpuType { CPU_NEON, CPU_NOT_NEON }; - -class DenoiserFilter { - public: - static std::unique_ptr Create(bool runtime_cpu_detection, - CpuType* cpu_type); - - virtual ~DenoiserFilter() {} - virtual uint32_t Variance16x8(const uint8_t* a, - int a_stride, - const uint8_t* b, - int b_stride, - unsigned int* sse) = 0; - virtual DenoiserDecision MbDenoise(const uint8_t* mc_running_avg_y, - int mc_avg_y_stride, - uint8_t* running_avg_y, - int avg_y_stride, - const uint8_t* sig, - int sig_stride, - uint8_t motion_magnitude, - int increase_denoising) = 0; -}; - -} // namespace webrtc - -#endif // MODULES_VIDEO_PROCESSING_UTIL_DENOISER_FILTER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_c.cc b/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_c.cc deleted file mode 100644 index 55c0ea7b35..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_c.cc +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/video_processing/util/denoiser_filter_c.h" - -#include -#include - -namespace webrtc { - -uint32_t DenoiserFilterC::Variance16x8(const uint8_t* a, - int a_stride, - const uint8_t* b, - int b_stride, - uint32_t* sse) { - int sum = 0; - *sse = 0; - a_stride <<= 1; - b_stride <<= 1; - - for (int i = 0; i < 8; i++) { - for (int j = 0; j < 16; j++) { - const int diff = a[j] - b[j]; - sum += diff; - *sse += diff * diff; - } - - a += a_stride; - b += b_stride; - } - return *sse - ((static_cast(sum) * sum) >> 7); -} - -DenoiserDecision DenoiserFilterC::MbDenoise(const uint8_t* mc_running_avg_y, - int mc_avg_y_stride, - uint8_t* running_avg_y, - int avg_y_stride, - const uint8_t* sig, - int sig_stride, - uint8_t motion_magnitude, - int increase_denoising) { - int sum_diff_thresh = 0; - int sum_diff = 0; - int adj_val[3] = {3, 4, 6}; - int shift_inc1 = 0; - int shift_inc2 = 1; - int col_sum[16] = {0}; - if (motion_magnitude <= kMotionMagnitudeThreshold) { - if (increase_denoising) { - shift_inc1 = 1; - shift_inc2 = 2; - } - adj_val[0] += shift_inc2; - adj_val[1] += shift_inc2; - adj_val[2] += shift_inc2; - } - - for (int r = 0; r < 16; ++r) { - for (int c = 0; c < 16; ++c) { - int diff = 0; - int adjustment = 0; - int absdiff = 0; - - diff = mc_running_avg_y[c] - sig[c]; - absdiff = abs(diff); - - // When `diff` <= |3 + shift_inc1|, use pixel value from - // last denoised raw. - if (absdiff <= 3 + shift_inc1) { - running_avg_y[c] = mc_running_avg_y[c]; - col_sum[c] += diff; - } else { - if (absdiff >= 4 + shift_inc1 && absdiff <= 7) - adjustment = adj_val[0]; - else if (absdiff >= 8 && absdiff <= 15) - adjustment = adj_val[1]; - else - adjustment = adj_val[2]; - - if (diff > 0) { - if ((sig[c] + adjustment) > 255) - running_avg_y[c] = 255; - else - running_avg_y[c] = sig[c] + adjustment; - - col_sum[c] += adjustment; - } else { - if ((sig[c] - adjustment) < 0) - running_avg_y[c] = 0; - else - running_avg_y[c] = sig[c] - adjustment; - - col_sum[c] -= adjustment; - } - } - } - - // Update pointers for next iteration. - sig += sig_stride; - mc_running_avg_y += mc_avg_y_stride; - running_avg_y += avg_y_stride; - } - - for (int c = 0; c < 16; ++c) { - if (col_sum[c] >= 128) { - col_sum[c] = 127; - } - sum_diff += col_sum[c]; - } - - sum_diff_thresh = - increase_denoising ? kSumDiffThresholdHigh : kSumDiffThreshold; - if (abs(sum_diff) > sum_diff_thresh) - return COPY_BLOCK; - - return FILTER_BLOCK; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_c.h b/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_c.h deleted file mode 100644 index 5633c171f0..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_c.h +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_VIDEO_PROCESSING_UTIL_DENOISER_FILTER_C_H_ -#define MODULES_VIDEO_PROCESSING_UTIL_DENOISER_FILTER_C_H_ - -#include - -#include "modules/video_processing/util/denoiser_filter.h" - -namespace webrtc { - -class DenoiserFilterC : public DenoiserFilter { - public: - DenoiserFilterC() {} - uint32_t Variance16x8(const uint8_t* a, - int a_stride, - const uint8_t* b, - int b_stride, - unsigned int* sse) override; - DenoiserDecision MbDenoise(const uint8_t* mc_running_avg_y, - int mc_avg_y_stride, - uint8_t* running_avg_y, - int avg_y_stride, - const uint8_t* sig, - int sig_stride, - uint8_t motion_magnitude, - int increase_denoising) override; -}; - -} // namespace webrtc - -#endif // MODULES_VIDEO_PROCESSING_UTIL_DENOISER_FILTER_C_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_neon.cc b/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_neon.cc deleted file mode 100644 index e1e6ed4f18..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_neon.cc +++ /dev/null @@ -1,182 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/video_processing/util/denoiser_filter_neon.h" - -#include - -namespace webrtc { - -const int kSumDiffThresholdHighNeon = 600; - -static int HorizontalAddS16x8(const int16x8_t v_16x8) { - const int32x4_t a = vpaddlq_s16(v_16x8); - const int64x2_t b = vpaddlq_s32(a); - const int32x2_t c = vadd_s32(vreinterpret_s32_s64(vget_low_s64(b)), - vreinterpret_s32_s64(vget_high_s64(b))); - return vget_lane_s32(c, 0); -} - -static int HorizontalAddS32x4(const int32x4_t v_32x4) { - const int64x2_t b = vpaddlq_s32(v_32x4); - const int32x2_t c = vadd_s32(vreinterpret_s32_s64(vget_low_s64(b)), - vreinterpret_s32_s64(vget_high_s64(b))); - return vget_lane_s32(c, 0); -} - -static void VarianceNeonW8(const uint8_t* a, - int a_stride, - const uint8_t* b, - int b_stride, - int w, - int h, - uint32_t* sse, - int64_t* sum) { - int16x8_t v_sum = vdupq_n_s16(0); - int32x4_t v_sse_lo = vdupq_n_s32(0); - int32x4_t v_sse_hi = vdupq_n_s32(0); - - for (int i = 0; i < h; ++i) { - for (int j = 0; j < w; j += 8) { - const uint8x8_t v_a = vld1_u8(&a[j]); - const uint8x8_t v_b = vld1_u8(&b[j]); - const uint16x8_t v_diff = vsubl_u8(v_a, v_b); - const int16x8_t sv_diff = vreinterpretq_s16_u16(v_diff); - v_sum = vaddq_s16(v_sum, sv_diff); - v_sse_lo = - vmlal_s16(v_sse_lo, vget_low_s16(sv_diff), vget_low_s16(sv_diff)); - v_sse_hi = - vmlal_s16(v_sse_hi, vget_high_s16(sv_diff), vget_high_s16(sv_diff)); - } - a += a_stride; - b += b_stride; - } - - *sum = HorizontalAddS16x8(v_sum); - *sse = - static_cast(HorizontalAddS32x4(vaddq_s32(v_sse_lo, v_sse_hi))); -} - -uint32_t DenoiserFilterNEON::Variance16x8(const uint8_t* a, - int a_stride, - const uint8_t* b, - int b_stride, - uint32_t* sse) { - int64_t sum = 0; - VarianceNeonW8(a, a_stride << 1, b, b_stride << 1, 16, 8, sse, &sum); - return *sse - ((sum * sum) >> 7); -} - -DenoiserDecision DenoiserFilterNEON::MbDenoise(const uint8_t* mc_running_avg_y, - int mc_running_avg_y_stride, - uint8_t* running_avg_y, - int running_avg_y_stride, - const uint8_t* sig, - int sig_stride, - uint8_t motion_magnitude, - int increase_denoising) { - // If motion_magnitude is small, making the denoiser more aggressive by - // increasing the adjustment for each level, level1 adjustment is - // increased, the deltas stay the same. - int shift_inc = - (increase_denoising && motion_magnitude <= kMotionMagnitudeThreshold) ? 1 - : 0; - int sum_diff_thresh = 0; - const uint8x16_t v_level1_adjustment = vmovq_n_u8( - (motion_magnitude <= kMotionMagnitudeThreshold) ? 4 + shift_inc : 3); - const uint8x16_t v_delta_level_1_and_2 = vdupq_n_u8(1); - const uint8x16_t v_delta_level_2_and_3 = vdupq_n_u8(2); - const uint8x16_t v_level1_threshold = vmovq_n_u8(4 + shift_inc); - const uint8x16_t v_level2_threshold = vdupq_n_u8(8); - const uint8x16_t v_level3_threshold = vdupq_n_u8(16); - int64x2_t v_sum_diff_total = vdupq_n_s64(0); - - // Go over lines. - for (int r = 0; r < 16; ++r) { - // Load inputs. - const uint8x16_t v_sig = vld1q_u8(sig); - const uint8x16_t v_mc_running_avg_y = vld1q_u8(mc_running_avg_y); - - // Calculate absolute difference and sign masks. - const uint8x16_t v_abs_diff = vabdq_u8(v_sig, v_mc_running_avg_y); - const uint8x16_t v_diff_pos_mask = vcltq_u8(v_sig, v_mc_running_avg_y); - const uint8x16_t v_diff_neg_mask = vcgtq_u8(v_sig, v_mc_running_avg_y); - - // Figure out which level that put us in. - const uint8x16_t v_level1_mask = vcleq_u8(v_level1_threshold, v_abs_diff); - const uint8x16_t v_level2_mask = vcleq_u8(v_level2_threshold, v_abs_diff); - const uint8x16_t v_level3_mask = vcleq_u8(v_level3_threshold, v_abs_diff); - - // Calculate absolute adjustments for level 1, 2 and 3. - const uint8x16_t v_level2_adjustment = - vandq_u8(v_level2_mask, v_delta_level_1_and_2); - const uint8x16_t v_level3_adjustment = - vandq_u8(v_level3_mask, v_delta_level_2_and_3); - const uint8x16_t v_level1and2_adjustment = - vaddq_u8(v_level1_adjustment, v_level2_adjustment); - const uint8x16_t v_level1and2and3_adjustment = - vaddq_u8(v_level1and2_adjustment, v_level3_adjustment); - - // Figure adjustment absolute value by selecting between the absolute - // difference if in level0 or the value for level 1, 2 and 3. - const uint8x16_t v_abs_adjustment = - vbslq_u8(v_level1_mask, v_level1and2and3_adjustment, v_abs_diff); - - // Calculate positive and negative adjustments. Apply them to the signal - // and accumulate them. Adjustments are less than eight and the maximum - // sum of them (7 * 16) can fit in a signed char. - const uint8x16_t v_pos_adjustment = - vandq_u8(v_diff_pos_mask, v_abs_adjustment); - const uint8x16_t v_neg_adjustment = - vandq_u8(v_diff_neg_mask, v_abs_adjustment); - - uint8x16_t v_running_avg_y = vqaddq_u8(v_sig, v_pos_adjustment); - v_running_avg_y = vqsubq_u8(v_running_avg_y, v_neg_adjustment); - - // Store results. - vst1q_u8(running_avg_y, v_running_avg_y); - - // Sum all the accumulators to have the sum of all pixel differences - // for this macroblock. - { - const int8x16_t v_sum_diff = - vqsubq_s8(vreinterpretq_s8_u8(v_pos_adjustment), - vreinterpretq_s8_u8(v_neg_adjustment)); - const int16x8_t fe_dc_ba_98_76_54_32_10 = vpaddlq_s8(v_sum_diff); - const int32x4_t fedc_ba98_7654_3210 = - vpaddlq_s16(fe_dc_ba_98_76_54_32_10); - const int64x2_t fedcba98_76543210 = vpaddlq_s32(fedc_ba98_7654_3210); - - v_sum_diff_total = vqaddq_s64(v_sum_diff_total, fedcba98_76543210); - } - - // Update pointers for next iteration. - sig += sig_stride; - mc_running_avg_y += mc_running_avg_y_stride; - running_avg_y += running_avg_y_stride; - } - - // Too much adjustments => copy block. - int64x1_t x = vqadd_s64(vget_high_s64(v_sum_diff_total), - vget_low_s64(v_sum_diff_total)); - int sum_diff = vget_lane_s32(vabs_s32(vreinterpret_s32_s64(x)), 0); - sum_diff_thresh = - increase_denoising ? kSumDiffThresholdHighNeon : kSumDiffThreshold; - if (sum_diff > sum_diff_thresh) - return COPY_BLOCK; - - // Tell above level that block was filtered. - running_avg_y -= running_avg_y_stride * 16; - sig -= sig_stride * 16; - - return FILTER_BLOCK; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_neon.h b/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_neon.h deleted file mode 100644 index 4d9f271e5a..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_neon.h +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_VIDEO_PROCESSING_UTIL_DENOISER_FILTER_NEON_H_ -#define MODULES_VIDEO_PROCESSING_UTIL_DENOISER_FILTER_NEON_H_ - -#include "modules/video_processing/util/denoiser_filter.h" - -namespace webrtc { - -class DenoiserFilterNEON : public DenoiserFilter { - public: - DenoiserFilterNEON() {} - uint32_t Variance16x8(const uint8_t* a, - int a_stride, - const uint8_t* b, - int b_stride, - unsigned int* sse) override; - DenoiserDecision MbDenoise(const uint8_t* mc_running_avg_y, - int mc_avg_y_stride, - uint8_t* running_avg_y, - int avg_y_stride, - const uint8_t* sig, - int sig_stride, - uint8_t motion_magnitude, - int increase_denoising) override; -}; - -} // namespace webrtc - -#endif // MODULES_VIDEO_PROCESSING_UTIL_DENOISER_FILTER_NEON_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_sse2.cc b/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_sse2.cc deleted file mode 100644 index 5ca5f0cf34..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_sse2.cc +++ /dev/null @@ -1,200 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/video_processing/util/denoiser_filter_sse2.h" - -#include -#include -#include - -namespace webrtc { - -static void Get8x8varSse2(const uint8_t* src, - int src_stride, - const uint8_t* ref, - int ref_stride, - unsigned int* sse, - int* sum) { - const __m128i zero = _mm_setzero_si128(); - __m128i vsum = _mm_setzero_si128(); - __m128i vsse = _mm_setzero_si128(); - - for (int i = 0; i < 8; i += 2) { - const __m128i src0 = _mm_unpacklo_epi8( - _mm_loadl_epi64((const __m128i*)(src + i * src_stride)), zero); - const __m128i ref0 = _mm_unpacklo_epi8( - _mm_loadl_epi64((const __m128i*)(ref + i * ref_stride)), zero); - const __m128i diff0 = _mm_sub_epi16(src0, ref0); - - const __m128i src1 = _mm_unpacklo_epi8( - _mm_loadl_epi64((const __m128i*)(src + (i + 1) * src_stride)), zero); - const __m128i ref1 = _mm_unpacklo_epi8( - _mm_loadl_epi64((const __m128i*)(ref + (i + 1) * ref_stride)), zero); - const __m128i diff1 = _mm_sub_epi16(src1, ref1); - - vsum = _mm_add_epi16(vsum, diff0); - vsum = _mm_add_epi16(vsum, diff1); - vsse = _mm_add_epi32(vsse, _mm_madd_epi16(diff0, diff0)); - vsse = _mm_add_epi32(vsse, _mm_madd_epi16(diff1, diff1)); - } - - // sum - vsum = _mm_add_epi16(vsum, _mm_srli_si128(vsum, 8)); - vsum = _mm_add_epi16(vsum, _mm_srli_si128(vsum, 4)); - vsum = _mm_add_epi16(vsum, _mm_srli_si128(vsum, 2)); - *sum = static_cast(_mm_extract_epi16(vsum, 0)); - - // sse - vsse = _mm_add_epi32(vsse, _mm_srli_si128(vsse, 8)); - vsse = _mm_add_epi32(vsse, _mm_srli_si128(vsse, 4)); - *sse = _mm_cvtsi128_si32(vsse); -} - -static void VarianceSSE2(const unsigned char* src, - int src_stride, - const unsigned char* ref, - int ref_stride, - int w, - int h, - uint32_t* sse, - int64_t* sum, - int block_size) { - *sse = 0; - *sum = 0; - - for (int i = 0; i < h; i += block_size) { - for (int j = 0; j < w; j += block_size) { - uint32_t sse0 = 0; - int32_t sum0 = 0; - - Get8x8varSse2(src + src_stride * i + j, src_stride, - ref + ref_stride * i + j, ref_stride, &sse0, &sum0); - *sse += sse0; - *sum += sum0; - } - } -} - -// Compute the sum of all pixel differences of this MB. -static uint32_t AbsSumDiff16x1(__m128i acc_diff) { - const __m128i k_1 = _mm_set1_epi16(1); - const __m128i acc_diff_lo = - _mm_srai_epi16(_mm_unpacklo_epi8(acc_diff, acc_diff), 8); - const __m128i acc_diff_hi = - _mm_srai_epi16(_mm_unpackhi_epi8(acc_diff, acc_diff), 8); - const __m128i acc_diff_16 = _mm_add_epi16(acc_diff_lo, acc_diff_hi); - const __m128i hg_fe_dc_ba = _mm_madd_epi16(acc_diff_16, k_1); - const __m128i hgfe_dcba = - _mm_add_epi32(hg_fe_dc_ba, _mm_srli_si128(hg_fe_dc_ba, 8)); - const __m128i hgfedcba = - _mm_add_epi32(hgfe_dcba, _mm_srli_si128(hgfe_dcba, 4)); - unsigned int sum_diff = abs(_mm_cvtsi128_si32(hgfedcba)); - - return sum_diff; -} - -uint32_t DenoiserFilterSSE2::Variance16x8(const uint8_t* src, - int src_stride, - const uint8_t* ref, - int ref_stride, - uint32_t* sse) { - int64_t sum = 0; - VarianceSSE2(src, src_stride << 1, ref, ref_stride << 1, 16, 8, sse, &sum, 8); - return *sse - ((sum * sum) >> 7); -} - -DenoiserDecision DenoiserFilterSSE2::MbDenoise(const uint8_t* mc_running_avg_y, - int mc_avg_y_stride, - uint8_t* running_avg_y, - int avg_y_stride, - const uint8_t* sig, - int sig_stride, - uint8_t motion_magnitude, - int increase_denoising) { - DenoiserDecision decision = FILTER_BLOCK; - unsigned int sum_diff_thresh = 0; - int shift_inc = - (increase_denoising && motion_magnitude <= kMotionMagnitudeThreshold) ? 1 - : 0; - __m128i acc_diff = _mm_setzero_si128(); - const __m128i k_0 = _mm_setzero_si128(); - const __m128i k_4 = _mm_set1_epi8(4 + shift_inc); - const __m128i k_8 = _mm_set1_epi8(8); - const __m128i k_16 = _mm_set1_epi8(16); - // Modify each level's adjustment according to motion_magnitude. - const __m128i l3 = _mm_set1_epi8( - (motion_magnitude <= kMotionMagnitudeThreshold) ? 7 + shift_inc : 6); - // Difference between level 3 and level 2 is 2. - const __m128i l32 = _mm_set1_epi8(2); - // Difference between level 2 and level 1 is 1. - const __m128i l21 = _mm_set1_epi8(1); - - for (int r = 0; r < 16; ++r) { - // Calculate differences. - const __m128i v_sig = - _mm_loadu_si128(reinterpret_cast(&sig[0])); - const __m128i v_mc_running_avg_y = - _mm_loadu_si128(reinterpret_cast(&mc_running_avg_y[0])); - __m128i v_running_avg_y; - const __m128i pdiff = _mm_subs_epu8(v_mc_running_avg_y, v_sig); - const __m128i ndiff = _mm_subs_epu8(v_sig, v_mc_running_avg_y); - // Obtain the sign. FF if diff is negative. - const __m128i diff_sign = _mm_cmpeq_epi8(pdiff, k_0); - // Clamp absolute difference to 16 to be used to get mask. Doing this - // allows us to use _mm_cmpgt_epi8, which operates on signed byte. - const __m128i clamped_absdiff = - _mm_min_epu8(_mm_or_si128(pdiff, ndiff), k_16); - // Get masks for l2 l1 and l0 adjustments. - const __m128i mask2 = _mm_cmpgt_epi8(k_16, clamped_absdiff); - const __m128i mask1 = _mm_cmpgt_epi8(k_8, clamped_absdiff); - const __m128i mask0 = _mm_cmpgt_epi8(k_4, clamped_absdiff); - // Get adjustments for l2, l1, and l0. - __m128i adj2 = _mm_and_si128(mask2, l32); - const __m128i adj1 = _mm_and_si128(mask1, l21); - const __m128i adj0 = _mm_and_si128(mask0, clamped_absdiff); - __m128i adj, padj, nadj; - - // Combine the adjustments and get absolute adjustments. - adj2 = _mm_add_epi8(adj2, adj1); - adj = _mm_sub_epi8(l3, adj2); - adj = _mm_andnot_si128(mask0, adj); - adj = _mm_or_si128(adj, adj0); - - // Restore the sign and get positive and negative adjustments. - padj = _mm_andnot_si128(diff_sign, adj); - nadj = _mm_and_si128(diff_sign, adj); - - // Calculate filtered value. - v_running_avg_y = _mm_adds_epu8(v_sig, padj); - v_running_avg_y = _mm_subs_epu8(v_running_avg_y, nadj); - _mm_storeu_si128(reinterpret_cast<__m128i*>(running_avg_y), - v_running_avg_y); - - // Adjustments <=7, and each element in acc_diff can fit in signed - // char. - acc_diff = _mm_adds_epi8(acc_diff, padj); - acc_diff = _mm_subs_epi8(acc_diff, nadj); - - // Update pointers for next iteration. - sig += sig_stride; - mc_running_avg_y += mc_avg_y_stride; - running_avg_y += avg_y_stride; - } - - // Compute the sum of all pixel differences of this MB. - unsigned int abs_sum_diff = AbsSumDiff16x1(acc_diff); - sum_diff_thresh = - increase_denoising ? kSumDiffThresholdHigh : kSumDiffThreshold; - if (abs_sum_diff > sum_diff_thresh) - decision = COPY_BLOCK; - return decision; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_sse2.h b/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_sse2.h deleted file mode 100644 index 8fe4b905ae..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_sse2.h +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_VIDEO_PROCESSING_UTIL_DENOISER_FILTER_SSE2_H_ -#define MODULES_VIDEO_PROCESSING_UTIL_DENOISER_FILTER_SSE2_H_ - -#include - -#include "modules/video_processing/util/denoiser_filter.h" - -namespace webrtc { - -class DenoiserFilterSSE2 : public DenoiserFilter { - public: - DenoiserFilterSSE2() {} - uint32_t Variance16x8(const uint8_t* a, - int a_stride, - const uint8_t* b, - int b_stride, - unsigned int* sse) override; - DenoiserDecision MbDenoise(const uint8_t* mc_running_avg_y, - int mc_avg_y_stride, - uint8_t* running_avg_y, - int avg_y_stride, - const uint8_t* sig, - int sig_stride, - uint8_t motion_magnitude, - int increase_denoising) override; -}; - -} // namespace webrtc - -#endif // MODULES_VIDEO_PROCESSING_UTIL_DENOISER_FILTER_SSE2_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/noise_estimation.cc b/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/noise_estimation.cc deleted file mode 100644 index c72f764901..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/noise_estimation.cc +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/video_processing/util/noise_estimation.h" -#if DISPLAYNEON -#include -#endif - -namespace webrtc { - -void NoiseEstimation::Init(int width, int height, CpuType cpu_type) { - int mb_cols = width >> 4; - int mb_rows = height >> 4; - consec_low_var_.reset(new uint32_t[mb_cols * mb_rows]()); - width_ = width; - height_ = height; - mb_cols_ = width_ >> 4; - mb_rows_ = height_ >> 4; - cpu_type_ = cpu_type; -} - -void NoiseEstimation::GetNoise(int mb_index, uint32_t var, uint32_t luma) { - consec_low_var_[mb_index]++; - num_static_block_++; - if (consec_low_var_[mb_index] >= kConsecLowVarFrame && - (luma >> 6) < kAverageLumaMax && (luma >> 6) > kAverageLumaMin) { - // Normalized var by the average luma value, this gives more weight to - // darker blocks. - int nor_var = var / (luma >> 10); - noise_var_ += - nor_var > kBlockSelectionVarMax ? kBlockSelectionVarMax : nor_var; - num_noisy_block_++; - } -} - -void NoiseEstimation::ResetConsecLowVar(int mb_index) { - consec_low_var_[mb_index] = 0; -} - -void NoiseEstimation::UpdateNoiseLevel() { - // TODO(jackychen): Tune a threshold for numb_noisy_block > T to make the - // condition more reasonable. - // No enough samples implies the motion of the camera or too many moving - // objects in the frame. - if (num_static_block_ < - (0.65 * mb_cols_ * mb_rows_ / NOISE_SUBSAMPLE_INTERVAL) || - !num_noisy_block_) { -#if DISPLAY - printf("Not enough samples. %d \n", num_static_block_); -#elif DISPLAYNEON - __android_log_print(ANDROID_LOG_DEBUG, "DISPLAY", - "Not enough samples. %d \n", num_static_block_); -#endif - noise_var_ = 0; - noise_var_accum_ = 0; - num_noisy_block_ = 0; - num_static_block_ = 0; - return; - } else { -#if DISPLAY - printf("%d %d fraction = %.3f\n", num_static_block_, - mb_cols_ * mb_rows_ / NOISE_SUBSAMPLE_INTERVAL, - percent_static_block_); -#elif DISPLAYNEON - __android_log_print(ANDROID_LOG_DEBUG, "DISPLAY", "%d %d fraction = %.3f\n", - num_static_block_, - mb_cols_ * mb_rows_ / NOISE_SUBSAMPLE_INTERVAL, - percent_static_block_); -#endif - // Normalized by the number of noisy blocks. - noise_var_ /= num_noisy_block_; - // Get the percentage of static blocks. - percent_static_block_ = static_cast(num_static_block_) / - (mb_cols_ * mb_rows_ / NOISE_SUBSAMPLE_INTERVAL); - num_noisy_block_ = 0; - num_static_block_ = 0; - } - // For the first frame just update the value with current noise_var_, - // otherwise, use the averaging window. - if (noise_var_accum_ == 0) { - noise_var_accum_ = noise_var_; - } else { - noise_var_accum_ = (noise_var_accum_ * 15 + noise_var_) / 16; - } -#if DISPLAY - printf("noise_var_accum_ = %.1f, noise_var_ = %d.\n", noise_var_accum_, - noise_var_); -#elif DISPLAYNEON - __android_log_print(ANDROID_LOG_DEBUG, "DISPLAY", - "noise_var_accum_ = %.1f, noise_var_ = %d.\n", - noise_var_accum_, noise_var_); -#endif - // Reset noise_var_ for the next frame. - noise_var_ = 0; -} - -uint8_t NoiseEstimation::GetNoiseLevel() { - int noise_thr = cpu_type_ ? kNoiseThreshold : kNoiseThresholdNeon; - UpdateNoiseLevel(); - if (noise_var_accum_ > noise_thr) { - return 1; - } - return 0; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/noise_estimation.h b/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/noise_estimation.h deleted file mode 100644 index 4c5f10f1d8..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/noise_estimation.h +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_VIDEO_PROCESSING_UTIL_NOISE_ESTIMATION_H_ -#define MODULES_VIDEO_PROCESSING_UTIL_NOISE_ESTIMATION_H_ - -#include -#include - -#include "modules/video_processing/util/denoiser_filter.h" - -namespace webrtc { - -#define DISPLAY 0 // Rectangle diagnostics -#define DISPLAYNEON 0 // Rectangle diagnostics on NEON - -const int kNoiseThreshold = 150; -const int kNoiseThresholdNeon = 70; -const int kConsecLowVarFrame = 6; -const int kAverageLumaMin = 20; -const int kAverageLumaMax = 220; -const int kBlockSelectionVarMax = kNoiseThreshold << 1; - -// TODO(jackychen): To test different sampling strategy. -// Collect noise data every NOISE_SUBSAMPLE_INTERVAL blocks. -#define NOISE_SUBSAMPLE_INTERVAL 41 - -class NoiseEstimation { - public: - void Init(int width, int height, CpuType cpu_type); - // Collect noise data from one qualified block. - void GetNoise(int mb_index, uint32_t var, uint32_t luma); - // Reset the counter for consecutive low-var blocks. - void ResetConsecLowVar(int mb_index); - // Update noise level for current frame. - void UpdateNoiseLevel(); - // 0: low noise, 1: high noise - uint8_t GetNoiseLevel(); - - private: - int width_; - int height_; - int mb_rows_; - int mb_cols_; - int num_noisy_block_; - int num_static_block_; - CpuType cpu_type_; - uint32_t noise_var_; - double noise_var_accum_; - double percent_static_block_; - std::unique_ptr consec_low_var_; -}; - -} // namespace webrtc - -#endif // MODULES_VIDEO_PROCESSING_UTIL_NOISE_ESTIMATION_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/skin_detection.cc b/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/skin_detection.cc deleted file mode 100644 index 76399d6e73..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/skin_detection.cc +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/video_processing/util/skin_detection.h" - -namespace webrtc { - -// Fixed-point skin color model parameters. -static const int skin_mean[5][2] = {{7463, 9614}, - {6400, 10240}, - {7040, 10240}, - {8320, 9280}, - {6800, 9614}}; -static const int skin_inv_cov[4] = {4107, 1663, 1663, 2157}; // q16 -static const int skin_threshold[6] = {1570636, 1400000, 800000, - 800000, 800000, 800000}; // q18 - -// Thresholds on luminance. -static const int y_low = 40; -static const int y_high = 220; - -// Evaluates the Mahalanobis distance measure for the input CbCr values. -static int EvaluateSkinColorDifference(int cb, int cr, int idx) { - const int cb_q6 = cb << 6; - const int cr_q6 = cr << 6; - const int cb_diff_q12 = - (cb_q6 - skin_mean[idx][0]) * (cb_q6 - skin_mean[idx][0]); - const int cbcr_diff_q12 = - (cb_q6 - skin_mean[idx][0]) * (cr_q6 - skin_mean[idx][1]); - const int cr_diff_q12 = - (cr_q6 - skin_mean[idx][1]) * (cr_q6 - skin_mean[idx][1]); - const int cb_diff_q2 = (cb_diff_q12 + (1 << 9)) >> 10; - const int cbcr_diff_q2 = (cbcr_diff_q12 + (1 << 9)) >> 10; - const int cr_diff_q2 = (cr_diff_q12 + (1 << 9)) >> 10; - const int skin_diff = - skin_inv_cov[0] * cb_diff_q2 + skin_inv_cov[1] * cbcr_diff_q2 + - skin_inv_cov[2] * cbcr_diff_q2 + skin_inv_cov[3] * cr_diff_q2; - return skin_diff; -} - -static int SkinPixel(const uint8_t y, const uint8_t cb, const uint8_t cr) { - if (y < y_low || y > y_high) { - return 0; - } else { - if (MODEL_MODE == 0) { - return (EvaluateSkinColorDifference(cb, cr, 0) < skin_threshold[0]); - } else { - // Exit on grey. - if (cb == 128 && cr == 128) - return 0; - // Exit on very strong cb. - if (cb > 150 && cr < 110) - return 0; - // Exit on (another) low luminance threshold if either color is high. - if (y < 50 && (cb > 140 || cr > 140)) - return 0; - for (int i = 0; i < 5; i++) { - int diff = EvaluateSkinColorDifference(cb, cr, i); - if (diff < skin_threshold[i + 1]) { - return 1; - } else if (diff > (skin_threshold[i + 1] << 3)) { - // Exit if difference is much large than the threshold. - return 0; - } - } - return 0; - } - } -} - -bool MbHasSkinColor(const uint8_t* y_src, - const uint8_t* u_src, - const uint8_t* v_src, - const int stride_y, - const int stride_u, - const int stride_v, - const int mb_row, - const int mb_col) { - const uint8_t* y = y_src + ((mb_row << 4) + 8) * stride_y + (mb_col << 4) + 8; - const uint8_t* u = u_src + ((mb_row << 3) + 4) * stride_u + (mb_col << 3) + 4; - const uint8_t* v = v_src + ((mb_row << 3) + 4) * stride_v + (mb_col << 3) + 4; - // Use 2x2 average of center pixel to compute skin area. - uint8_t y_avg = (*y + *(y + 1) + *(y + stride_y) + *(y + stride_y + 1)) >> 2; - uint8_t u_avg = (*u + *(u + 1) + *(u + stride_u) + *(u + stride_u + 1)) >> 2; - uint8_t v_avg = (*v + *(v + 1) + *(v + stride_v) + *(v + stride_v + 1)) >> 2; - return SkinPixel(y_avg, u_avg, v_avg) == 1; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/skin_detection.h b/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/skin_detection.h deleted file mode 100644 index 7be791f2d8..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/skin_detection.h +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_VIDEO_PROCESSING_UTIL_SKIN_DETECTION_H_ -#define MODULES_VIDEO_PROCESSING_UTIL_SKIN_DETECTION_H_ - -namespace webrtc { - -#define MODEL_MODE 0 - -typedef unsigned char uint8_t; -bool MbHasSkinColor(const uint8_t* y_src, - const uint8_t* u_src, - const uint8_t* v_src, - const int stride_y, - const int stride_u, - const int stride_v, - const int mb_row, - const int mb_col); - -} // namespace webrtc - -#endif // MODULES_VIDEO_PROCESSING_UTIL_SKIN_DETECTION_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_processing/video_denoiser.cc b/TMessagesProj/jni/voip/webrtc/modules/video_processing/video_denoiser.cc deleted file mode 100644 index 1d844e61de..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_processing/video_denoiser.cc +++ /dev/null @@ -1,339 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/video_processing/video_denoiser.h" - -#include -#include - -#include "api/video/i420_buffer.h" -#include "third_party/libyuv/include/libyuv/planar_functions.h" - -namespace webrtc { - -#if DISPLAY || DISPLAYNEON -static void ShowRect(const std::unique_ptr& filter, - const std::unique_ptr& d_status, - const std::unique_ptr& moving_edge_red, - const std::unique_ptr& x_density, - const std::unique_ptr& y_density, - const uint8_t* u_src, - int stride_u_src, - const uint8_t* v_src, - int stride_v_src, - uint8_t* u_dst, - int stride_u_dst, - uint8_t* v_dst, - int stride_v_dst, - int mb_rows_, - int mb_cols_) { - for (int mb_row = 0; mb_row < mb_rows_; ++mb_row) { - for (int mb_col = 0; mb_col < mb_cols_; ++mb_col) { - int mb_index = mb_row * mb_cols_ + mb_col; - const uint8_t* mb_src_u = - u_src + (mb_row << 3) * stride_u_src + (mb_col << 3); - const uint8_t* mb_src_v = - v_src + (mb_row << 3) * stride_v_src + (mb_col << 3); - uint8_t* mb_dst_u = u_dst + (mb_row << 3) * stride_u_dst + (mb_col << 3); - uint8_t* mb_dst_v = v_dst + (mb_row << 3) * stride_v_dst + (mb_col << 3); - uint8_t uv_tmp[8 * 8]; - memset(uv_tmp, 200, 8 * 8); - if (d_status[mb_index] == 1) { - // Paint to red. - libyuv::CopyPlane(mb_src_u, stride_u_src, mb_dst_u, stride_u_dst, 8, 8); - libyuv::CopyPlane(uv_tmp, 8, mb_dst_v, stride_v_dst, 8, 8); - } else if (moving_edge_red[mb_row * mb_cols_ + mb_col] && - x_density[mb_col] * y_density[mb_row]) { - // Paint to blue. - libyuv::CopyPlane(uv_tmp, 8, mb_dst_u, stride_u_dst, 8, 8); - libyuv::CopyPlane(mb_src_v, stride_v_src, mb_dst_v, stride_v_dst, 8, 8); - } else { - libyuv::CopyPlane(mb_src_u, stride_u_src, mb_dst_u, stride_u_dst, 8, 8); - libyuv::CopyPlane(mb_src_v, stride_v_src, mb_dst_v, stride_v_dst, 8, 8); - } - } - } -} -#endif - -VideoDenoiser::VideoDenoiser(bool runtime_cpu_detection) - : width_(0), - height_(0), - filter_(DenoiserFilter::Create(runtime_cpu_detection, &cpu_type_)), - ne_(new NoiseEstimation()) {} - -void VideoDenoiser::DenoiserReset( - rtc::scoped_refptr frame) { - width_ = frame->width(); - height_ = frame->height(); - mb_cols_ = width_ >> 4; - mb_rows_ = height_ >> 4; - - // Init noise estimator and allocate buffers. - ne_->Init(width_, height_, cpu_type_); - moving_edge_.reset(new uint8_t[mb_cols_ * mb_rows_]); - mb_filter_decision_.reset(new DenoiserDecision[mb_cols_ * mb_rows_]); - x_density_.reset(new uint8_t[mb_cols_]); - y_density_.reset(new uint8_t[mb_rows_]); - moving_object_.reset(new uint8_t[mb_cols_ * mb_rows_]); -} - -int VideoDenoiser::PositionCheck(int mb_row, int mb_col, int noise_level) { - if (noise_level == 0) - return 1; - if ((mb_row <= (mb_rows_ >> 4)) || (mb_col <= (mb_cols_ >> 4)) || - (mb_col >= (15 * mb_cols_ >> 4))) - return 3; - else if ((mb_row <= (mb_rows_ >> 3)) || (mb_col <= (mb_cols_ >> 3)) || - (mb_col >= (7 * mb_cols_ >> 3))) - return 2; - else - return 1; -} - -void VideoDenoiser::ReduceFalseDetection( - const std::unique_ptr& d_status, - std::unique_ptr* moving_edge_red, - int noise_level) { - // From up left corner. - int mb_col_stop = mb_cols_ - 1; - for (int mb_row = 0; mb_row <= mb_rows_ - 1; ++mb_row) { - for (int mb_col = 0; mb_col <= mb_col_stop; ++mb_col) { - if (d_status[mb_row * mb_cols_ + mb_col]) { - mb_col_stop = mb_col - 1; - break; - } - (*moving_edge_red)[mb_row * mb_cols_ + mb_col] = 0; - } - } - // From bottom left corner. - mb_col_stop = mb_cols_ - 1; - for (int mb_row = mb_rows_ - 1; mb_row >= 0; --mb_row) { - for (int mb_col = 0; mb_col <= mb_col_stop; ++mb_col) { - if (d_status[mb_row * mb_cols_ + mb_col]) { - mb_col_stop = mb_col - 1; - break; - } - (*moving_edge_red)[mb_row * mb_cols_ + mb_col] = 0; - } - } - // From up right corner. - mb_col_stop = 0; - for (int mb_row = 0; mb_row <= mb_rows_ - 1; ++mb_row) { - for (int mb_col = mb_cols_ - 1; mb_col >= mb_col_stop; --mb_col) { - if (d_status[mb_row * mb_cols_ + mb_col]) { - mb_col_stop = mb_col + 1; - break; - } - (*moving_edge_red)[mb_row * mb_cols_ + mb_col] = 0; - } - } - // From bottom right corner. - mb_col_stop = 0; - for (int mb_row = mb_rows_ - 1; mb_row >= 0; --mb_row) { - for (int mb_col = mb_cols_ - 1; mb_col >= mb_col_stop; --mb_col) { - if (d_status[mb_row * mb_cols_ + mb_col]) { - mb_col_stop = mb_col + 1; - break; - } - (*moving_edge_red)[mb_row * mb_cols_ + mb_col] = 0; - } - } -} - -bool VideoDenoiser::IsTrailingBlock(const std::unique_ptr& d_status, - int mb_row, - int mb_col) { - bool ret = false; - int mb_index = mb_row * mb_cols_ + mb_col; - if (!mb_row || !mb_col || mb_row == mb_rows_ - 1 || mb_col == mb_cols_ - 1) - ret = false; - else - ret = d_status[mb_index + 1] || d_status[mb_index - 1] || - d_status[mb_index + mb_cols_] || d_status[mb_index - mb_cols_]; - return ret; -} - -void VideoDenoiser::CopySrcOnMOB(const uint8_t* y_src, - int stride_src, - uint8_t* y_dst, - int stride_dst) { - // Loop over to copy src block if the block is marked as moving object block - // or if the block may cause trailing artifacts. - for (int mb_row = 0; mb_row < mb_rows_; ++mb_row) { - const int mb_index_base = mb_row * mb_cols_; - const uint8_t* mb_src_base = y_src + (mb_row << 4) * stride_src; - uint8_t* mb_dst_base = y_dst + (mb_row << 4) * stride_dst; - for (int mb_col = 0; mb_col < mb_cols_; ++mb_col) { - const int mb_index = mb_index_base + mb_col; - const uint32_t offset_col = mb_col << 4; - const uint8_t* mb_src = mb_src_base + offset_col; - uint8_t* mb_dst = mb_dst_base + offset_col; - // Check if the block is a moving object block or may cause a trailing - // artifacts. - if (mb_filter_decision_[mb_index] != FILTER_BLOCK || - IsTrailingBlock(moving_edge_, mb_row, mb_col) || - (x_density_[mb_col] * y_density_[mb_row] && - moving_object_[mb_row * mb_cols_ + mb_col])) { - // Copy y source. - libyuv::CopyPlane(mb_src, stride_src, mb_dst, stride_dst, 16, 16); - } - } - } -} - -void VideoDenoiser::CopyLumaOnMargin(const uint8_t* y_src, - int stride_src, - uint8_t* y_dst, - int stride_dst) { - int height_margin = height_ - (mb_rows_ << 4); - if (height_margin > 0) { - const uint8_t* margin_y_src = y_src + (mb_rows_ << 4) * stride_src; - uint8_t* margin_y_dst = y_dst + (mb_rows_ << 4) * stride_dst; - libyuv::CopyPlane(margin_y_src, stride_src, margin_y_dst, stride_dst, - width_, height_margin); - } - int width_margin = width_ - (mb_cols_ << 4); - if (width_margin > 0) { - const uint8_t* margin_y_src = y_src + (mb_cols_ << 4); - uint8_t* margin_y_dst = y_dst + (mb_cols_ << 4); - libyuv::CopyPlane(margin_y_src, stride_src, margin_y_dst, stride_dst, - width_ - (mb_cols_ << 4), mb_rows_ << 4); - } -} - -rtc::scoped_refptr VideoDenoiser::DenoiseFrame( - rtc::scoped_refptr frame, - bool noise_estimation_enabled) { - // If previous width and height are different from current frame's, need to - // reallocate the buffers and no denoising for the current frame. - if (!prev_buffer_ || width_ != frame->width() || height_ != frame->height()) { - DenoiserReset(frame); - prev_buffer_ = frame; - return frame; - } - - // Set buffer pointers. - const uint8_t* y_src = frame->DataY(); - int stride_y_src = frame->StrideY(); - rtc::scoped_refptr dst = - buffer_pool_.CreateI420Buffer(width_, height_); - - uint8_t* y_dst = dst->MutableDataY(); - int stride_y_dst = dst->StrideY(); - - const uint8_t* y_dst_prev = prev_buffer_->DataY(); - int stride_prev = prev_buffer_->StrideY(); - - memset(x_density_.get(), 0, mb_cols_); - memset(y_density_.get(), 0, mb_rows_); - memset(moving_object_.get(), 1, mb_cols_ * mb_rows_); - - uint8_t noise_level = noise_estimation_enabled ? ne_->GetNoiseLevel() : 0; - int thr_var_base = 16 * 16 * 2; - // Loop over blocks to accumulate/extract noise level and update x/y_density - // factors for moving object detection. - for (int mb_row = 0; mb_row < mb_rows_; ++mb_row) { - const int mb_index_base = mb_row * mb_cols_; - const uint8_t* mb_src_base = y_src + (mb_row << 4) * stride_y_src; - uint8_t* mb_dst_base = y_dst + (mb_row << 4) * stride_y_dst; - const uint8_t* mb_dst_prev_base = y_dst_prev + (mb_row << 4) * stride_prev; - for (int mb_col = 0; mb_col < mb_cols_; ++mb_col) { - const int mb_index = mb_index_base + mb_col; - const bool ne_enable = (mb_index % NOISE_SUBSAMPLE_INTERVAL == 0); - const int pos_factor = PositionCheck(mb_row, mb_col, noise_level); - const uint32_t thr_var_adp = thr_var_base * pos_factor; - const uint32_t offset_col = mb_col << 4; - const uint8_t* mb_src = mb_src_base + offset_col; - uint8_t* mb_dst = mb_dst_base + offset_col; - const uint8_t* mb_dst_prev = mb_dst_prev_base + offset_col; - - // TODO(jackychen): Need SSE2/NEON opt. - int luma = 0; - if (ne_enable) { - for (int i = 4; i < 12; ++i) { - for (int j = 4; j < 12; ++j) { - luma += mb_src[i * stride_y_src + j]; - } - } - } - - // Get the filtered block and filter_decision. - mb_filter_decision_[mb_index] = - filter_->MbDenoise(mb_dst_prev, stride_prev, mb_dst, stride_y_dst, - mb_src, stride_y_src, 0, noise_level); - - // If filter decision is FILTER_BLOCK, no need to check moving edge. - // It is unlikely for a moving edge block to be filtered in current - // setting. - if (mb_filter_decision_[mb_index] == FILTER_BLOCK) { - uint32_t sse_t = 0; - if (ne_enable) { - // The variance used in noise estimation is based on the src block in - // time t (mb_src) and filtered block in time t-1 (mb_dist_prev). - uint32_t noise_var = filter_->Variance16x8( - mb_dst_prev, stride_y_dst, mb_src, stride_y_src, &sse_t); - ne_->GetNoise(mb_index, noise_var, luma); - } - moving_edge_[mb_index] = 0; // Not a moving edge block. - } else { - uint32_t sse_t = 0; - // The variance used in MOD is based on the filtered blocks in time - // T (mb_dst) and T-1 (mb_dst_prev). - uint32_t noise_var = filter_->Variance16x8( - mb_dst_prev, stride_prev, mb_dst, stride_y_dst, &sse_t); - if (noise_var > thr_var_adp) { // Moving edge checking. - if (ne_enable) { - ne_->ResetConsecLowVar(mb_index); - } - moving_edge_[mb_index] = 1; // Mark as moving edge block. - x_density_[mb_col] += (pos_factor < 3); - y_density_[mb_row] += (pos_factor < 3); - } else { - moving_edge_[mb_index] = 0; - if (ne_enable) { - // The variance used in noise estimation is based on the src block - // in time t (mb_src) and filtered block in time t-1 (mb_dist_prev). - uint32_t noise_var = filter_->Variance16x8( - mb_dst_prev, stride_prev, mb_src, stride_y_src, &sse_t); - ne_->GetNoise(mb_index, noise_var, luma); - } - } - } - } // End of for loop - } // End of for loop - - ReduceFalseDetection(moving_edge_, &moving_object_, noise_level); - - CopySrcOnMOB(y_src, stride_y_src, y_dst, stride_y_dst); - - // When frame width/height not divisible by 16, copy the margin to - // denoised_frame. - if ((mb_rows_ << 4) != height_ || (mb_cols_ << 4) != width_) - CopyLumaOnMargin(y_src, stride_y_src, y_dst, stride_y_dst); - - // Copy u/v planes. - libyuv::CopyPlane(frame->DataU(), frame->StrideU(), dst->MutableDataU(), - dst->StrideU(), (width_ + 1) >> 1, (height_ + 1) >> 1); - libyuv::CopyPlane(frame->DataV(), frame->StrideV(), dst->MutableDataV(), - dst->StrideV(), (width_ + 1) >> 1, (height_ + 1) >> 1); - -#if DISPLAY || DISPLAYNEON - // Show rectangular region - ShowRect(filter_, moving_edge_, moving_object_, x_density_, y_density_, - frame->DataU(), frame->StrideU(), frame->DataV(), frame->StrideV(), - dst->MutableDataU(), dst->StrideU(), dst->MutableDataV(), - dst->StrideV(), mb_rows_, mb_cols_); -#endif - prev_buffer_ = dst; - return dst; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_processing/video_denoiser.h b/TMessagesProj/jni/voip/webrtc/modules/video_processing/video_denoiser.h deleted file mode 100644 index eb98c5bc53..0000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_processing/video_denoiser.h +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_VIDEO_PROCESSING_VIDEO_DENOISER_H_ -#define MODULES_VIDEO_PROCESSING_VIDEO_DENOISER_H_ - -#include - -#include "api/scoped_refptr.h" -#include "api/video/video_frame_buffer.h" -#include "common_video/include/video_frame_buffer_pool.h" -#include "modules/video_processing/util/denoiser_filter.h" -#include "modules/video_processing/util/noise_estimation.h" -#include "modules/video_processing/util/skin_detection.h" - -namespace webrtc { - -class VideoDenoiser { - public: - explicit VideoDenoiser(bool runtime_cpu_detection); - - rtc::scoped_refptr DenoiseFrame( - rtc::scoped_refptr frame, - bool noise_estimation_enabled); - - private: - void DenoiserReset(rtc::scoped_refptr frame); - - // Check the mb position, return 1: close to the frame center (between 1/8 - // and 7/8 of width/height), 3: close to the border (out of 1/16 and 15/16 - // of width/height), 2: in between. - int PositionCheck(int mb_row, int mb_col, int noise_level); - - // To reduce false detection in moving object detection (MOD). - void ReduceFalseDetection(const std::unique_ptr& d_status, - std::unique_ptr* d_status_red, - int noise_level); - - // Return whether a block might cause trailing artifact by checking if one of - // its neighbor blocks is a moving edge block. - bool IsTrailingBlock(const std::unique_ptr& d_status, - int mb_row, - int mb_col); - - // Copy input blocks to dst buffer on moving object blocks (MOB). - void CopySrcOnMOB(const uint8_t* y_src, - int stride_src, - uint8_t* y_dst, - int stride_dst); - - // Copy luma margin blocks when frame width/height not divisible by 16. - void CopyLumaOnMargin(const uint8_t* y_src, - int stride_src, - uint8_t* y_dst, - int stride_dst); - - int width_; - int height_; - int mb_rows_; - int mb_cols_; - CpuType cpu_type_; - std::unique_ptr filter_; - std::unique_ptr ne_; - // 1 for moving edge block, 0 for static block. - std::unique_ptr moving_edge_; - // 1 for moving object block, 0 for static block. - std::unique_ptr moving_object_; - // x_density_ and y_density_ are used in MOD process. - std::unique_ptr x_density_; - std::unique_ptr y_density_; - // Save the return values by MbDenoise for each block. - std::unique_ptr mb_filter_decision_; - VideoFrameBufferPool buffer_pool_; - rtc::scoped_refptr prev_buffer_; -}; - -} // namespace webrtc - -#endif // MODULES_VIDEO_PROCESSING_VIDEO_DENOISER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/fuzzers/dcsctp_fuzzers.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/fuzzers/dcsctp_fuzzers.cc index b4b6224ec4..e8fcacffa0 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/fuzzers/dcsctp_fuzzers.cc +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/fuzzers/dcsctp_fuzzers.cc @@ -435,6 +435,7 @@ void FuzzSocket(DcSctpSocketInterface& socket, options.unordered = IsUnordered(flags & 0x01); options.max_retransmissions = (flags & 0x02) != 0 ? absl::make_optional(0) : absl::nullopt; + options.lifecycle_id = LifecycleId(42); size_t payload_exponent = (flags >> 2) % 16; size_t payload_size = static_cast(1) << payload_exponent; socket.Send(DcSctpMessage(StreamID(state.GetByte()), PPID(53), diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/fuzzers/dcsctp_fuzzers.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/fuzzers/dcsctp_fuzzers.h index f3de0722f4..90cfa35099 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/fuzzers/dcsctp_fuzzers.h +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/fuzzers/dcsctp_fuzzers.h @@ -16,6 +16,7 @@ #include #include "api/array_view.h" +#include "api/task_queue/task_queue_base.h" #include "net/dcsctp/public/dcsctp_socket.h" namespace dcsctp { @@ -58,7 +59,9 @@ class FuzzerCallbacks : public DcSctpSocketCallbacks { void SendPacket(rtc::ArrayView data) override { sent_packets_.emplace_back(std::vector(data.begin(), data.end())); } - std::unique_ptr CreateTimeout() override { + std::unique_ptr CreateTimeout( + webrtc::TaskQueueBase::DelayPrecision precision) override { + // The fuzzer timeouts don't implement |precision|. return std::make_unique(active_timeouts_); } TimeMs TimeMillis() override { return TimeMs(42); } diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/bounded_byte_writer.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/bounded_byte_writer.h index 467f26800b..d754549e4f 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/bounded_byte_writer.h +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/bounded_byte_writer.h @@ -88,8 +88,11 @@ class BoundedByteWriter { } void CopyToVariableData(rtc::ArrayView source) { - memcpy(data_.data() + FixedSize, source.data(), - std::min(source.size(), data_.size() - FixedSize)); + size_t copy_size = std::min(source.size(), data_.size() - FixedSize); + if (source.data() == nullptr || copy_size == 0) { + return; + } + memcpy(data_.data() + FixedSize, source.data(), copy_size); } private: diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/forward_tsn_chunk.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/forward_tsn_chunk.cc index f01505094d..e432114c50 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/forward_tsn_chunk.cc +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/forward_tsn_chunk.cc @@ -87,6 +87,9 @@ void ForwardTsnChunk::SerializeTo(std::vector& out) const { std::string ForwardTsnChunk::ToString() const { rtc::StringBuilder sb; sb << "FORWARD-TSN, new_cumulative_tsn=" << *new_cumulative_tsn(); + for (const auto& skipped : skipped_streams()) { + sb << ", skip " << skipped.stream_id.value() << ":" << *skipped.ssn; + } return sb.str(); } } // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/dcsctp_handover_state.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/dcsctp_handover_state.h index a58535d45f..253f4da939 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/dcsctp_handover_state.h +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/dcsctp_handover_state.h @@ -40,6 +40,8 @@ struct DcSctpSocketHandoverState { bool partial_reliability = false; bool message_interleaving = false; bool reconfig = false; + uint16_t negotiated_maximum_incoming_streams = 0; + uint16_t negotiated_maximum_outgoing_streams = 0; }; Capabilities capabilities; @@ -48,6 +50,7 @@ struct DcSctpSocketHandoverState { uint32_t next_ssn = 0; uint32_t next_unordered_mid = 0; uint32_t next_ordered_mid = 0; + uint16_t priority = 0; }; struct Transmission { uint32_t next_tsn = 0; diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/dcsctp_options.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/dcsctp_options.h index c394552e22..4511bed4a4 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/dcsctp_options.h +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/dcsctp_options.h @@ -71,6 +71,11 @@ struct DcSctpOptions { // `max_receiver_window_buffer_size`). size_t max_message_size = 256 * 1024; + // The default stream priority, if not overridden by + // `SctpSocket::SetStreamPriority`. The default value is selected to be + // compatible with https://www.w3.org/TR/webrtc-priority/, section 4.2-4.3. + StreamPriority default_stream_priority = StreamPriority(256); + // Maximum received window buffer size. This should be a bit larger than the // largest sized message you want to be able to receive. This essentially // limits the memory usage on the receive side. Note that memory is allocated diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/dcsctp_socket.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/dcsctp_socket.h index 2b56094ac1..2df6a2c009 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/dcsctp_socket.h +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/dcsctp_socket.h @@ -17,6 +17,7 @@ #include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/array_view.h" +#include "api/task_queue/task_queue_base.h" #include "net/dcsctp/public/dcsctp_handover_state.h" #include "net/dcsctp/public/dcsctp_message.h" #include "net/dcsctp/public/dcsctp_options.h" @@ -53,6 +54,11 @@ struct SendOptions { // If set, limits the number of retransmissions. This is only available // if the peer supports Partial Reliability Extension (RFC3758). absl::optional max_retransmissions = absl::nullopt; + + // If set, will generate lifecycle events for this message. See e.g. + // `DcSctpSocketCallbacks::OnLifecycleMessageFullySent`. This value is decided + // by the client and the library will provide it to all lifecycle callbacks. + LifecycleId lifecycle_id = LifecycleId::NotSet(); }; enum class ErrorKind { @@ -169,6 +175,31 @@ enum class SendPacketStatus { kError, }; +// Represent known SCTP implementations. +enum class SctpImplementation { + // There is not enough information toto determine any SCTP implementation. + kUnknown, + // This implementation. + kDcsctp, + // https://github.com/sctplab/usrsctp. + kUsrSctp, + // Any other implementation. + kOther, +}; + +inline constexpr absl::string_view ToString(SctpImplementation implementation) { + switch (implementation) { + case SctpImplementation::kUnknown: + return "unknown"; + case SctpImplementation::kDcsctp: + return "dcsctp"; + case SctpImplementation::kUsrSctp: + return "usrsctp"; + case SctpImplementation::kOther: + return "other"; + } +} + // Tracked metrics, which is the return value of GetMetrics. Optional members // will be unset when they are not yet known. struct Metrics { @@ -182,10 +213,10 @@ struct Metrics { // The current congestion window (cwnd) in bytes, corresponding to spinfo_cwnd // defined in RFC6458. - absl::optional cwnd_bytes = absl::nullopt; + size_t cwnd_bytes = 0; // Smoothed round trip time, corresponding to spinfo_srtt defined in RFC6458. - absl::optional srtt_ms = absl::nullopt; + int srtt_ms = 0; // Number of data items in the retransmission queue that haven’t been // acked/nacked yet and are in-flight. Corresponding to sstat_unackdata @@ -203,34 +234,25 @@ struct Metrics { // The peer’s last announced receiver window size, corresponding to // sstat_rwnd defined in RFC6458. - absl::optional peer_rwnd_bytes = absl::nullopt; -}; + uint32_t peer_rwnd_bytes = 0; -// Represent known SCTP implementations. -enum class SctpImplementation { - // There is not enough information toto determine any SCTP implementation. - kUnknown, - // This implementation. - kDcsctp, - // https://github.com/sctplab/usrsctp. - kUsrSctp, - // Any other implementation. - kOther, + // Returns the detected SCTP implementation of the peer. As this is not + // explicitly signalled during the connection establishment, heuristics is + // used to analyze e.g. the state cookie in the INIT-ACK chunk. + SctpImplementation peer_implementation = SctpImplementation::kUnknown; + + // Indicates if RFC8260 User Message Interleaving has been negotiated by both + // peers. + bool uses_message_interleaving = false; + + // The number of negotiated incoming and outgoing streams, which is configured + // locally as `DcSctpOptions::announced_maximum_incoming_streams` and + // `DcSctpOptions::announced_maximum_outgoing_streams`, and which will be + // signaled by the peer during connection. + uint16_t negotiated_maximum_incoming_streams = 0; + uint16_t negotiated_maximum_outgoing_streams = 0; }; -inline constexpr absl::string_view ToString(SctpImplementation implementation) { - switch (implementation) { - case SctpImplementation::kUnknown: - return "unknown"; - case SctpImplementation::kDcsctp: - return "dcsctp"; - case SctpImplementation::kUsrSctp: - return "usrsctp"; - case SctpImplementation::kOther: - return "other"; - } -} - // Callbacks that the DcSctpSocket will call synchronously to the owning // client. It is allowed to call back into the library from callbacks that start // with "On". It has been explicitly documented when it's not allowed to call @@ -264,9 +286,26 @@ class DcSctpSocketCallbacks { // Called when the library wants to create a Timeout. The callback must return // an object that implements that interface. // + // Low precision tasks are scheduled more efficiently by using leeway to + // reduce Idle Wake Ups and is the preferred precision whenever possible. High + // precision timeouts do not have this leeway, but is still limited by OS + // timer precision. At the time of writing, kLow's additional leeway may be up + // to 17 ms, but please see webrtc::TaskQueueBase::DelayPrecision for + // up-to-date information. + // // Note that it's NOT ALLOWED to call into this library from within this // callback. - virtual std::unique_ptr CreateTimeout() = 0; + virtual std::unique_ptr CreateTimeout( + webrtc::TaskQueueBase::DelayPrecision precision) { + // TODO(hbos): When dependencies have migrated to this new signature, make + // this pure virtual and delete the other version. + return CreateTimeout(); + } + // TODO(hbos): When dependencies have migrated to the other signature, delete + // this version. + virtual std::unique_ptr CreateTimeout() { + return CreateTimeout(webrtc::TaskQueueBase::DelayPrecision::kLow); + } // Returns the current time in milliseconds (from any epoch). // @@ -362,6 +401,91 @@ class DcSctpSocketCallbacks { // buffer, for all streams) falls to or below the threshold specified in // `DcSctpOptions::total_buffered_amount_low_threshold`. virtual void OnTotalBufferedAmountLow() {} + + // == Lifecycle Events == + // + // If a `lifecycle_id` is provided as `SendOptions`, lifecycle callbacks will + // be triggered as the message is processed by the library. + // + // The possible transitions are shown in the graph below: + // + // DcSctpSocket::Send ────────────────────────┐ + // │ │ + // │ │ + // v v + // OnLifecycleMessageFullySent ───────> OnLifecycleMessageExpired + // │ │ + // │ │ + // v v + // OnLifeCycleMessageDelivered ────────────> OnLifecycleEnd + + // OnLifecycleMessageFullySent will be called when a message has been fully + // sent, meaning that the last fragment has been produced from the send queue + // and sent on the network. Note that this will trigger at most once per + // message even if the message was retransmitted due to packet loss. + // + // This is a lifecycle event. + // + // Note that it's NOT ALLOWED to call into this library from within this + // callback. + virtual void OnLifecycleMessageFullySent(LifecycleId lifecycle_id) {} + + // OnLifecycleMessageExpired will be called when a message has expired. If it + // was expired with data remaining in the send queue that had not been sent + // ever, `maybe_delivered` will be set to false. If `maybe_delivered` is true, + // the message has at least once been sent and may have been correctly + // received by the peer, but it has expired before the receiver managed to + // acknowledge it. This means that if `maybe_delivered` is true, it's unknown + // if the message was lost or was delivered, and if `maybe_delivered` is + // false, it's guaranteed to not be delivered. + // + // It's guaranteed that `OnLifecycleMessageDelivered` is not called if this + // callback has triggered. + // + // This is a lifecycle event. + // + // Note that it's NOT ALLOWED to call into this library from within this + // callback. + virtual void OnLifecycleMessageExpired(LifecycleId lifecycle_id, + bool maybe_delivered) {} + + // OnLifecycleMessageDelivered will be called when a non-expired message has + // been acknowledged by the peer as delivered. + // + // Note that this will trigger only when the peer moves its cumulative TSN ack + // beyond this message, and will not fire for messages acked using + // gap-ack-blocks as those are renegable. This means that this may fire a bit + // later than the message was actually first "acked" by the peer, as - + // according to the protocol - those acks may be unacked later by the client. + // + // It's guaranteed that `OnLifecycleMessageExpired` is not called if this + // callback has triggered. + // + // This is a lifecycle event. + // + // Note that it's NOT ALLOWED to call into this library from within this + // callback. + virtual void OnLifecycleMessageDelivered(LifecycleId lifecycle_id) {} + + // OnLifecycleEnd will be called when a lifecycle event has reached its end. + // It will be called when processing of a message is complete, no matter how + // it completed. It will be called after all other lifecycle events, if any. + // + // Note that it's possible that this callback triggers without any other + // lifecycle callbacks having been called before in case of errors, such as + // attempting to send an empty message or failing to enqueue a message if the + // send queue is full. + // + // NOTE: When the socket is deallocated, there will be no `OnLifecycleEnd` + // callbacks sent for messages that were enqueued. But as long as the socket + // is alive, `OnLifecycleEnd` callbacks are guaranteed to be sent as messages + // are either expired or successfully acknowledged. + // + // This is a lifecycle event. + // + // Note that it's NOT ALLOWED to call into this library from within this + // callback. + virtual void OnLifecycleEnd(LifecycleId lifecycle_id) {} }; // The DcSctpSocket implementation implements the following interface. @@ -407,8 +531,17 @@ class DcSctpSocketInterface { // Update the options max_message_size. virtual void SetMaxMessageSize(size_t max_message_size) = 0; + // Sets the priority of an outgoing stream. The initial value, when not set, + // is `DcSctpOptions::default_stream_priority`. + virtual void SetStreamPriority(StreamID stream_id, + StreamPriority priority) = 0; + + // Returns the currently set priority for an outgoing stream. The initial + // value, when not set, is `DcSctpOptions::default_stream_priority`. + virtual StreamPriority GetStreamPriority(StreamID stream_id) const = 0; + // Sends the message `message` using the provided send options. - // Sending a message is an asynchrous operation, and the `OnError` callback + // Sending a message is an asynchronous operation, and the `OnError` callback // may be invoked to indicate any errors in sending the message. // // The association does not have to be established before calling this method. @@ -449,8 +582,9 @@ class DcSctpSocketInterface { virtual void SetBufferedAmountLowThreshold(StreamID stream_id, size_t bytes) = 0; - // Retrieves the latest metrics. - virtual Metrics GetMetrics() const = 0; + // Retrieves the latest metrics. If the socket is not fully connected, + // `absl::nullopt` will be returned. + virtual absl::optional GetMetrics() const = 0; // Returns empty bitmask if the socket is in the state in which a snapshot of // the state can be made by `GetHandoverStateAndClose()`. Return value is @@ -473,7 +607,10 @@ class DcSctpSocketInterface { // If this method is called too early (before // `DcSctpSocketCallbacks::OnConnected` has triggered), this will likely // return `SctpImplementation::kUnknown`. - virtual SctpImplementation peer_implementation() const = 0; + ABSL_DEPRECATED("See Metrics::peer_implementation instead") + virtual SctpImplementation peer_implementation() const { + return SctpImplementation::kUnknown; + } }; } // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/dcsctp_socket_factory.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/dcsctp_socket_factory.cc index 338d143424..ebcb5553e3 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/dcsctp_socket_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/dcsctp_socket_factory.cc @@ -20,6 +20,9 @@ #include "net/dcsctp/socket/dcsctp_socket.h" namespace dcsctp { + +DcSctpSocketFactory::~DcSctpSocketFactory() = default; + std::unique_ptr DcSctpSocketFactory::Create( absl::string_view log_prefix, DcSctpSocketCallbacks& callbacks, diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/dcsctp_socket_factory.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/dcsctp_socket_factory.h index dcc68d9b54..ca429d3275 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/dcsctp_socket_factory.h +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/dcsctp_socket_factory.h @@ -20,7 +20,8 @@ namespace dcsctp { class DcSctpSocketFactory { public: - std::unique_ptr Create( + virtual ~DcSctpSocketFactory(); + virtual std::unique_ptr Create( absl::string_view log_prefix, DcSctpSocketCallbacks& callbacks, std::unique_ptr packet_observer, diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/mock_dcsctp_socket.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/mock_dcsctp_socket.h index d207899a18..0fd572bd94 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/mock_dcsctp_socket.h +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/mock_dcsctp_socket.h @@ -41,6 +41,16 @@ class MockDcSctpSocket : public DcSctpSocketInterface { MOCK_METHOD(void, SetMaxMessageSize, (size_t max_message_size), (override)); + MOCK_METHOD(void, + SetStreamPriority, + (StreamID stream_id, StreamPriority priority), + (override)); + + MOCK_METHOD(StreamPriority, + GetStreamPriority, + (StreamID stream_id), + (const, override)); + MOCK_METHOD(SendStatus, Send, (DcSctpMessage message, const SendOptions& send_options), @@ -63,7 +73,7 @@ class MockDcSctpSocket : public DcSctpSocketInterface { (StreamID stream_id, size_t bytes), (override)); - MOCK_METHOD(Metrics, GetMetrics, (), (const, override)); + MOCK_METHOD(absl::optional, GetMetrics, (), (const, override)); MOCK_METHOD(HandoverReadinessStatus, GetHandoverReadiness, @@ -73,8 +83,6 @@ class MockDcSctpSocket : public DcSctpSocketInterface { GetHandoverStateAndClose, (), (override)); - - MOCK_METHOD(SctpImplementation, peer_implementation, (), (const)); }; } // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/mock_dcsctp_socket_factory.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/mock_dcsctp_socket_factory.h new file mode 100644 index 0000000000..61f05577f2 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/mock_dcsctp_socket_factory.h @@ -0,0 +1,33 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PUBLIC_MOCK_DCSCTP_SOCKET_FACTORY_H_ +#define NET_DCSCTP_PUBLIC_MOCK_DCSCTP_SOCKET_FACTORY_H_ + +#include + +#include "net/dcsctp/public/dcsctp_socket_factory.h" +#include "test/gmock.h" + +namespace dcsctp { + +class MockDcSctpSocketFactory : public DcSctpSocketFactory { + public: + MOCK_METHOD(std::unique_ptr, + Create, + (absl::string_view log_prefix, + DcSctpSocketCallbacks& callbacks, + std::unique_ptr packet_observer, + const DcSctpOptions& options), + (override)); +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_PUBLIC_MOCK_DCSCTP_SOCKET_FACTORY_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/types.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/types.h index 8faec08ad3..d0725620d8 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/types.h +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/types.h @@ -31,6 +31,10 @@ using TimeoutID = webrtc::StrongAlias; // other messages on the same stream. using IsUnordered = webrtc::StrongAlias; +// Stream priority, where higher values indicate higher priority. The meaning of +// this value and how it's used depends on the stream scheduler. +using StreamPriority = webrtc::StrongAlias; + // Duration, as milliseconds. Overflows after 24 days. class DurationMs : public webrtc::StrongAlias { public: @@ -107,16 +111,33 @@ constexpr inline DurationMs operator-(TimeMs lhs, TimeMs rhs) { // The maximum number of times the socket should attempt to retransmit a // message which fails the first time in unreliable mode. -class MaxRetransmits : public webrtc::StrongAlias { +class MaxRetransmits + : public webrtc::StrongAlias { public: constexpr explicit MaxRetransmits(const UnderlyingType& v) - : webrtc::StrongAlias(v) {} + : webrtc::StrongAlias(v) {} // There should be no limit - the message should be sent reliably. static constexpr MaxRetransmits NoLimit() { return MaxRetransmits(std::numeric_limits::max()); } }; + +// An identifier that can be set on sent messages, and picked by the sending +// client. If different from `::NotSet()`, lifecycle events will be generated, +// and eventually `DcSctpSocketCallbacks::OnLifecycleEnd` will be called to +// indicate that the lifecycle isn't tracked any longer. The value zero (0) is +// not a valid lifecycle identifier, and will be interpreted as not having it +// set. +class LifecycleId : public webrtc::StrongAlias { + public: + constexpr explicit LifecycleId(const UnderlyingType& v) + : webrtc::StrongAlias(v) {} + + constexpr bool IsSet() const { return value_ != 0; } + + static constexpr LifecycleId NotSet() { return LifecycleId(0); } +}; } // namespace dcsctp #endif // NET_DCSCTP_PUBLIC_TYPES_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/data_tracker.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/data_tracker.cc index f31847b524..1f2e43f7f5 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/data_tracker.cc +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/data_tracker.cc @@ -123,8 +123,9 @@ bool DataTracker::IsTSNValid(TSN tsn) const { return true; } -void DataTracker::Observe(TSN tsn, +bool DataTracker::Observe(TSN tsn, AnyDataChunk::ImmediateAckFlag immediate_ack) { + bool is_duplicate = false; UnwrappedTSN unwrapped_tsn = tsn_unwrapper_.Unwrap(tsn); // IsTSNValid must be called prior to calling this method. @@ -143,6 +144,7 @@ void DataTracker::Observe(TSN tsn, // packet arrives with duplicate DATA chunk(s) bundled with new DATA chunks, // the endpoint MAY immediately send a SACK." UpdateAckState(AckState::kImmediate, "duplicate data"); + is_duplicate = true; } else { if (unwrapped_tsn == last_cumulative_acked_tsn_.next_value()) { last_cumulative_acked_tsn_ = unwrapped_tsn; @@ -167,6 +169,7 @@ void DataTracker::Observe(TSN tsn, // delay. If a packet arrives with duplicate DATA chunk(s) bundled with // new DATA chunks, the endpoint MAY immediately send a SACK." // No need to do this. SACKs are sent immediately on packet loss below. + is_duplicate = true; } } } @@ -208,6 +211,7 @@ void DataTracker::Observe(TSN tsn, } else if (ack_state_ == AckState::kDelayed) { UpdateAckState(AckState::kImmediate, "received DATA when already delayed"); } + return !is_duplicate; } void DataTracker::HandleForwardTsn(TSN new_cumulative_ack) { @@ -369,4 +373,14 @@ void DataTracker::AddHandoverState(DcSctpSocketHandoverState& state) { state.rx.seen_packet = seen_packet_; } +void DataTracker::RestoreFromState(const DcSctpSocketHandoverState& state) { + // Validate that the component is in pristine state. + RTC_DCHECK(additional_tsn_blocks_.empty()); + RTC_DCHECK(duplicate_tsns_.empty()); + RTC_DCHECK(!seen_packet_); + + seen_packet_ = state.rx.seen_packet; + last_cumulative_acked_tsn_ = + tsn_unwrapper_.Unwrap(TSN(state.rx.last_cumulative_acked_tsn)); +} } // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/data_tracker.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/data_tracker.h index fb8add82a2..ea077a9b57 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/data_tracker.h +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/data_tracker.h @@ -54,23 +54,21 @@ class DataTracker { DataTracker(absl::string_view log_prefix, Timer* delayed_ack_timer, - TSN peer_initial_tsn, - const DcSctpSocketHandoverState* handover_state = nullptr) + TSN peer_initial_tsn) : log_prefix_(std::string(log_prefix) + "dtrack: "), - seen_packet_(handover_state != nullptr ? handover_state->rx.seen_packet - : false), + seen_packet_(false), delayed_ack_timer_(*delayed_ack_timer), - last_cumulative_acked_tsn_(tsn_unwrapper_.Unwrap( - handover_state ? TSN(handover_state->rx.last_cumulative_acked_tsn) - : TSN(*peer_initial_tsn - 1))) {} + last_cumulative_acked_tsn_( + tsn_unwrapper_.Unwrap(TSN(*peer_initial_tsn - 1))) {} // Indicates if the provided TSN is valid. If this return false, the data // should be dropped and not added to any other buffers, which essentially // means that there is intentional packet loss. bool IsTSNValid(TSN tsn) const; - // Call for every incoming data chunk. - void Observe(TSN tsn, + // Call for every incoming data chunk. Returns `true` if `tsn` was seen for + // the first time, and `false` if it has been seen before (a duplicate `tsn`). + bool Observe(TSN tsn, AnyDataChunk::ImmediateAckFlag immediate_ack = AnyDataChunk::ImmediateAckFlag(false)); // Called at the end of processing an SCTP packet. @@ -109,6 +107,7 @@ class DataTracker { HandoverReadinessStatus GetHandoverReadiness() const; void AddHandoverState(DcSctpSocketHandoverState& state); + void RestoreFromState(const DcSctpSocketHandoverState& state); private: enum class AckState { diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/interleaved_reassembly_streams.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/interleaved_reassembly_streams.cc new file mode 100644 index 0000000000..8b316de676 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/interleaved_reassembly_streams.cc @@ -0,0 +1,272 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/rx/interleaved_reassembly_streams.h" + +#include + +#include +#include +#include +#include +#include +#include +#include +#include + +#include "absl/algorithm/container.h" +#include "api/array_view.h" +#include "net/dcsctp/common/sequence_numbers.h" +#include "net/dcsctp/packet/chunk/forward_tsn_common.h" +#include "net/dcsctp/packet/data.h" +#include "net/dcsctp/public/types.h" +#include "rtc_base/logging.h" + +namespace dcsctp { + +InterleavedReassemblyStreams::InterleavedReassemblyStreams( + absl::string_view log_prefix, + OnAssembledMessage on_assembled_message) + : log_prefix_(log_prefix), on_assembled_message_(on_assembled_message) {} + +size_t InterleavedReassemblyStreams::Stream::TryToAssembleMessage( + UnwrappedMID mid) { + std::map::const_iterator it = + chunks_by_mid_.find(mid); + if (it == chunks_by_mid_.end()) { + RTC_DLOG(LS_VERBOSE) << parent_.log_prefix_ << "TryToAssembleMessage " + << *mid.Wrap() << " - no chunks"; + return 0; + } + const ChunkMap& chunks = it->second; + if (!chunks.begin()->second.second.is_beginning || + !chunks.rbegin()->second.second.is_end) { + RTC_DLOG(LS_VERBOSE) << parent_.log_prefix_ << "TryToAssembleMessage " + << *mid.Wrap() << "- missing beginning or end"; + return 0; + } + int64_t fsn_diff = *chunks.rbegin()->first - *chunks.begin()->first; + if (fsn_diff != (static_cast(chunks.size()) - 1)) { + RTC_DLOG(LS_VERBOSE) << parent_.log_prefix_ << "TryToAssembleMessage " + << *mid.Wrap() << "- not all chunks exist (have " + << chunks.size() << ", expect " << (fsn_diff + 1) + << ")"; + return 0; + } + + size_t removed_bytes = AssembleMessage(chunks); + RTC_DLOG(LS_VERBOSE) << parent_.log_prefix_ << "TryToAssembleMessage " + << *mid.Wrap() << " - succeeded and removed " + << removed_bytes; + + chunks_by_mid_.erase(mid); + return removed_bytes; +} + +size_t InterleavedReassemblyStreams::Stream::AssembleMessage( + const ChunkMap& tsn_chunks) { + size_t count = tsn_chunks.size(); + if (count == 1) { + // Fast path - zero-copy + const Data& data = tsn_chunks.begin()->second.second; + size_t payload_size = data.size(); + UnwrappedTSN tsns[1] = {tsn_chunks.begin()->second.first}; + DcSctpMessage message(data.stream_id, data.ppid, std::move(data.payload)); + parent_.on_assembled_message_(tsns, std::move(message)); + return payload_size; + } + + // Slow path - will need to concatenate the payload. + std::vector tsns; + tsns.reserve(count); + + std::vector payload; + size_t payload_size = absl::c_accumulate( + tsn_chunks, 0, + [](size_t v, const auto& p) { return v + p.second.second.size(); }); + payload.reserve(payload_size); + + for (auto& item : tsn_chunks) { + const UnwrappedTSN tsn = item.second.first; + const Data& data = item.second.second; + tsns.push_back(tsn); + payload.insert(payload.end(), data.payload.begin(), data.payload.end()); + } + + const Data& data = tsn_chunks.begin()->second.second; + + DcSctpMessage message(data.stream_id, data.ppid, std::move(payload)); + parent_.on_assembled_message_(tsns, std::move(message)); + return payload_size; +} + +size_t InterleavedReassemblyStreams::Stream::EraseTo(MID message_id) { + UnwrappedMID unwrapped_mid = mid_unwrapper_.Unwrap(message_id); + + size_t removed_bytes = 0; + auto it = chunks_by_mid_.begin(); + while (it != chunks_by_mid_.end() && it->first <= unwrapped_mid) { + removed_bytes += absl::c_accumulate( + it->second, 0, + [](size_t r2, const auto& q) { return r2 + q.second.second.size(); }); + it = chunks_by_mid_.erase(it); + } + + if (!stream_id_.unordered) { + // For ordered streams, erasing a message might suddenly unblock that queue + // and allow it to deliver any following received messages. + if (unwrapped_mid >= next_mid_) { + next_mid_ = unwrapped_mid.next_value(); + } + + removed_bytes += TryToAssembleMessages(); + } + + return removed_bytes; +} + +int InterleavedReassemblyStreams::Stream::Add(UnwrappedTSN tsn, Data data) { + RTC_DCHECK_EQ(*data.is_unordered, *stream_id_.unordered); + RTC_DCHECK_EQ(*data.stream_id, *stream_id_.stream_id); + int queued_bytes = data.size(); + UnwrappedMID mid = mid_unwrapper_.Unwrap(data.message_id); + FSN fsn = data.fsn; + auto [unused, inserted] = + chunks_by_mid_[mid].emplace(fsn, std::make_pair(tsn, std::move(data))); + if (!inserted) { + return 0; + } + + if (stream_id_.unordered) { + queued_bytes -= TryToAssembleMessage(mid); + } else { + if (mid == next_mid_) { + queued_bytes -= TryToAssembleMessages(); + } + } + + return queued_bytes; +} + +size_t InterleavedReassemblyStreams::Stream::TryToAssembleMessages() { + size_t removed_bytes = 0; + + for (;;) { + size_t removed_bytes_this_iter = TryToAssembleMessage(next_mid_); + if (removed_bytes_this_iter == 0) { + break; + } + + removed_bytes += removed_bytes_this_iter; + next_mid_.Increment(); + } + return removed_bytes; +} + +void InterleavedReassemblyStreams::Stream::AddHandoverState( + DcSctpSocketHandoverState& state) const { + if (stream_id_.unordered) { + DcSctpSocketHandoverState::UnorderedStream state_stream; + state_stream.id = stream_id_.stream_id.value(); + state.rx.unordered_streams.push_back(std::move(state_stream)); + } else { + DcSctpSocketHandoverState::OrderedStream state_stream; + state_stream.id = stream_id_.stream_id.value(); + state_stream.next_ssn = next_mid_.Wrap().value(); + state.rx.ordered_streams.push_back(std::move(state_stream)); + } +} + +InterleavedReassemblyStreams::Stream& +InterleavedReassemblyStreams::GetOrCreateStream(const FullStreamId& stream_id) { + auto it = streams_.find(stream_id); + if (it == streams_.end()) { + it = + streams_ + .emplace(std::piecewise_construct, std::forward_as_tuple(stream_id), + std::forward_as_tuple(stream_id, this)) + .first; + } + return it->second; +} + +int InterleavedReassemblyStreams::Add(UnwrappedTSN tsn, Data data) { + return GetOrCreateStream(FullStreamId(data.is_unordered, data.stream_id)) + .Add(tsn, std::move(data)); +} + +size_t InterleavedReassemblyStreams::HandleForwardTsn( + UnwrappedTSN new_cumulative_ack_tsn, + rtc::ArrayView skipped_streams) { + size_t removed_bytes = 0; + for (const auto& skipped : skipped_streams) { + removed_bytes += + GetOrCreateStream(FullStreamId(skipped.unordered, skipped.stream_id)) + .EraseTo(skipped.message_id); + } + return removed_bytes; +} + +void InterleavedReassemblyStreams::ResetStreams( + rtc::ArrayView stream_ids) { + if (stream_ids.empty()) { + for (auto& entry : streams_) { + entry.second.Reset(); + } + } else { + for (StreamID stream_id : stream_ids) { + GetOrCreateStream(FullStreamId(IsUnordered(true), stream_id)).Reset(); + GetOrCreateStream(FullStreamId(IsUnordered(false), stream_id)).Reset(); + } + } +} + +HandoverReadinessStatus InterleavedReassemblyStreams::GetHandoverReadiness() + const { + HandoverReadinessStatus status; + for (const auto& [stream_id, stream] : streams_) { + if (stream.has_unassembled_chunks()) { + status.Add( + stream_id.unordered + ? HandoverUnreadinessReason::kUnorderedStreamHasUnassembledChunks + : HandoverUnreadinessReason::kOrderedStreamHasUnassembledChunks); + break; + } + } + return status; +} + +void InterleavedReassemblyStreams::AddHandoverState( + DcSctpSocketHandoverState& state) { + for (const auto& [unused, stream] : streams_) { + stream.AddHandoverState(state); + } +} + +void InterleavedReassemblyStreams::RestoreFromState( + const DcSctpSocketHandoverState& state) { + // Validate that the component is in pristine state. + RTC_DCHECK(streams_.empty()); + + for (const DcSctpSocketHandoverState::OrderedStream& state : + state.rx.ordered_streams) { + FullStreamId stream_id(IsUnordered(false), StreamID(state.id)); + streams_.emplace( + std::piecewise_construct, std::forward_as_tuple(stream_id), + std::forward_as_tuple(stream_id, this, MID(state.next_ssn))); + } + for (const DcSctpSocketHandoverState::UnorderedStream& state : + state.rx.unordered_streams) { + FullStreamId stream_id(IsUnordered(true), StreamID(state.id)); + streams_.emplace(std::piecewise_construct, std::forward_as_tuple(stream_id), + std::forward_as_tuple(stream_id, this)); + } +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/interleaved_reassembly_streams.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/interleaved_reassembly_streams.h new file mode 100644 index 0000000000..a7b67707e9 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/interleaved_reassembly_streams.h @@ -0,0 +1,110 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_RX_INTERLEAVED_REASSEMBLY_STREAMS_H_ +#define NET_DCSCTP_RX_INTERLEAVED_REASSEMBLY_STREAMS_H_ + +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/common/sequence_numbers.h" +#include "net/dcsctp/packet/chunk/forward_tsn_common.h" +#include "net/dcsctp/packet/data.h" +#include "net/dcsctp/rx/reassembly_streams.h" + +namespace dcsctp { + +// Handles reassembly of incoming data when interleaved message sending is +// enabled on the association, i.e. when RFC8260 is in use. +class InterleavedReassemblyStreams : public ReassemblyStreams { + public: + InterleavedReassemblyStreams(absl::string_view log_prefix, + OnAssembledMessage on_assembled_message); + + int Add(UnwrappedTSN tsn, Data data) override; + + size_t HandleForwardTsn( + UnwrappedTSN new_cumulative_ack_tsn, + rtc::ArrayView skipped_streams) + override; + + void ResetStreams(rtc::ArrayView stream_ids) override; + + HandoverReadinessStatus GetHandoverReadiness() const override; + void AddHandoverState(DcSctpSocketHandoverState& state) override; + void RestoreFromState(const DcSctpSocketHandoverState& state) override; + + private: + struct FullStreamId { + const IsUnordered unordered; + const StreamID stream_id; + + FullStreamId(IsUnordered unordered, StreamID stream_id) + : unordered(unordered), stream_id(stream_id) {} + + friend bool operator<(FullStreamId a, FullStreamId b) { + return a.unordered < b.unordered || + (!(a.unordered < b.unordered) && (a.stream_id < b.stream_id)); + } + }; + + class Stream { + public: + Stream(FullStreamId stream_id, + InterleavedReassemblyStreams* parent, + MID next_mid = MID(0)) + : stream_id_(stream_id), + parent_(*parent), + next_mid_(mid_unwrapper_.Unwrap(next_mid)) {} + int Add(UnwrappedTSN tsn, Data data); + size_t EraseTo(MID message_id); + void Reset() { + mid_unwrapper_.Reset(); + next_mid_ = mid_unwrapper_.Unwrap(MID(0)); + } + bool has_unassembled_chunks() const { return !chunks_by_mid_.empty(); } + void AddHandoverState(DcSctpSocketHandoverState& state) const; + + private: + using ChunkMap = std::map>; + + // Try to assemble one message identified by `mid`. + // Returns the number of bytes assembled if a message was assembled. + size_t TryToAssembleMessage(UnwrappedMID mid); + size_t AssembleMessage(const ChunkMap& tsn_chunks); + // Try to assemble one or several messages in order from the stream. + // Returns the number of bytes assembled if one or more messages were + // assembled. + size_t TryToAssembleMessages(); + + const FullStreamId stream_id_; + InterleavedReassemblyStreams& parent_; + std::map chunks_by_mid_; + UnwrappedMID::Unwrapper mid_unwrapper_; + UnwrappedMID next_mid_; + }; + + Stream& GetOrCreateStream(const FullStreamId& stream_id); + + const std::string log_prefix_; + + // Callback for when a message has been assembled. + const OnAssembledMessage on_assembled_message_; + + // All unordered and ordered streams, managing not-yet-assembled data. + std::map streams_; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_RX_INTERLEAVED_REASSEMBLY_STREAMS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/reassembly_queue.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/reassembly_queue.cc index 36ade9230a..f72c5cb8c1 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/reassembly_queue.cc +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/reassembly_queue.cc @@ -29,45 +29,53 @@ #include "net/dcsctp/packet/parameter/outgoing_ssn_reset_request_parameter.h" #include "net/dcsctp/packet/parameter/reconfiguration_response_parameter.h" #include "net/dcsctp/public/dcsctp_message.h" +#include "net/dcsctp/rx/interleaved_reassembly_streams.h" #include "net/dcsctp/rx/reassembly_streams.h" #include "net/dcsctp/rx/traditional_reassembly_streams.h" #include "rtc_base/logging.h" namespace dcsctp { -ReassemblyQueue::ReassemblyQueue( +namespace { +std::unique_ptr CreateStreams( absl::string_view log_prefix, - TSN peer_initial_tsn, - size_t max_size_bytes, - const DcSctpSocketHandoverState* handover_state) + ReassemblyStreams::OnAssembledMessage on_assembled_message, + bool use_message_interleaving) { + if (use_message_interleaving) { + return std::make_unique( + log_prefix, std::move(on_assembled_message)); + } + return std::make_unique( + log_prefix, std::move(on_assembled_message)); +} +} // namespace + +ReassemblyQueue::ReassemblyQueue(absl::string_view log_prefix, + TSN peer_initial_tsn, + size_t max_size_bytes, + bool use_message_interleaving) : log_prefix_(std::string(log_prefix) + "reasm: "), max_size_bytes_(max_size_bytes), watermark_bytes_(max_size_bytes * kHighWatermarkLimit), - last_assembled_tsn_watermark_(tsn_unwrapper_.Unwrap( - handover_state ? TSN(handover_state->rx.last_assembled_tsn) - : TSN(*peer_initial_tsn - 1))), - last_completed_reset_req_seq_nbr_( - handover_state - ? ReconfigRequestSN( - handover_state->rx.last_completed_deferred_reset_req_sn) - : ReconfigRequestSN(0)), - streams_(std::make_unique( + last_assembled_tsn_watermark_( + tsn_unwrapper_.Unwrap(TSN(*peer_initial_tsn - 1))), + last_completed_reset_req_seq_nbr_(ReconfigRequestSN(0)), + streams_(CreateStreams( log_prefix_, [this](rtc::ArrayView tsns, DcSctpMessage message) { AddReassembledMessage(tsns, std::move(message)); }, - handover_state)) {} + use_message_interleaving)) {} void ReassemblyQueue::Add(TSN tsn, Data data) { RTC_DCHECK(IsConsistent()); RTC_DLOG(LS_VERBOSE) << log_prefix_ << "added tsn=" << *tsn << ", stream=" << *data.stream_id << ":" << *data.message_id << ":" << *data.fsn << ", type=" - << (data.is_beginning && data.is_end - ? "complete" - : data.is_beginning - ? "first" - : data.is_end ? "last" : "middle"); + << (data.is_beginning && data.is_end ? "complete" + : data.is_beginning ? "first" + : data.is_end ? "last" + : "middle"); UnwrappedTSN unwrapped_tsn = tsn_unwrapper_.Unwrap(tsn); @@ -175,9 +183,7 @@ bool ReassemblyQueue::MaybeResetStreamsDeferred(TSN cum_ack_tsn) { // https://tools.ietf.org/html/rfc6525#section-5.2.2 // "Any queued TSNs (queued at step E2) MUST now be released and processed // normally." - for (auto& p : deferred_chunks) { - const TSN& tsn = p.first; - Data& data = p.second; + for (auto& [tsn, data] : deferred_chunks) { queued_bytes_ -= data.size(); Add(tsn, std::move(data)); } @@ -212,8 +218,16 @@ void ReassemblyQueue::AddReassembledMessage( << ", payload=" << message.payload().size() << " bytes"; for (const UnwrappedTSN tsn : tsns) { - // Update watermark, or insert into delivered_tsns_ - if (tsn == last_assembled_tsn_watermark_.next_value()) { + if (tsn <= last_assembled_tsn_watermark_) { + // This can be provoked by a misbehaving peer by sending FORWARD-TSN with + // invalid SSNs, allowing ordered messages to stay in the queue that + // should've been discarded. + RTC_DLOG(LS_VERBOSE) + << log_prefix_ + << "Message is built from fragments already seen - skipping"; + return; + } else if (tsn == last_assembled_tsn_watermark_.next_value()) { + // Update watermark, or insert into delivered_tsns_ last_assembled_tsn_watermark_.Increment(); } else { delivered_tsns_.insert(tsn); @@ -285,4 +299,14 @@ void ReassemblyQueue::AddHandoverState(DcSctpSocketHandoverState& state) { streams_->AddHandoverState(state); } +void ReassemblyQueue::RestoreFromState(const DcSctpSocketHandoverState& state) { + // Validate that the component is in pristine state. + RTC_DCHECK(last_completed_reset_req_seq_nbr_ == ReconfigRequestSN(0)); + + last_assembled_tsn_watermark_ = + tsn_unwrapper_.Unwrap(TSN(state.rx.last_assembled_tsn)); + last_completed_reset_req_seq_nbr_ = + ReconfigRequestSN(state.rx.last_completed_deferred_reset_req_sn); + streams_->RestoreFromState(state); +} } // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/reassembly_queue.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/reassembly_queue.h index 9cc0c61eb6..91f30a3f69 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/reassembly_queue.h +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/reassembly_queue.h @@ -72,7 +72,7 @@ class ReassemblyQueue { ReassemblyQueue(absl::string_view log_prefix, TSN peer_initial_tsn, size_t max_size_bytes, - const DcSctpSocketHandoverState* handover_state = nullptr); + bool use_message_interleaving = false); // Adds a data chunk to the queue, with a `tsn` and other parameters in // `data`. @@ -123,6 +123,7 @@ class ReassemblyQueue { HandoverReadinessStatus GetHandoverReadiness() const; void AddHandoverState(DcSctpSocketHandoverState& state); + void RestoreFromState(const DcSctpSocketHandoverState& state); private: bool IsConsistent() const; diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/reassembly_streams.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/reassembly_streams.cc new file mode 100644 index 0000000000..9fd52fb15d --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/reassembly_streams.cc @@ -0,0 +1,55 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/rx/reassembly_streams.h" + +#include +#include +#include + +namespace dcsctp { + +ReassembledMessage AssembleMessage(std::map::iterator start, + std::map::iterator end) { + size_t count = std::distance(start, end); + + if (count == 1) { + // Fast path - zero-copy + Data& data = start->second; + + return ReassembledMessage{ + .tsns = {start->first}, + .message = DcSctpMessage(data.stream_id, data.ppid, + std::move(start->second.payload)), + }; + } + + // Slow path - will need to concatenate the payload. + std::vector tsns; + std::vector payload; + + size_t payload_size = std::accumulate( + start, end, 0, + [](size_t v, const auto& p) { return v + p.second.size(); }); + + tsns.reserve(count); + payload.reserve(payload_size); + for (auto it = start; it != end; ++it) { + Data& data = it->second; + tsns.push_back(it->first); + payload.insert(payload.end(), data.payload.begin(), data.payload.end()); + } + + return ReassembledMessage{ + .tsns = std::move(tsns), + .message = DcSctpMessage(start->second.stream_id, start->second.ppid, + std::move(payload)), + }; +} +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/reassembly_streams.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/reassembly_streams.h index 06f1a781ce..0ecfac0c0a 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/reassembly_streams.h +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/reassembly_streams.h @@ -81,6 +81,7 @@ class ReassemblyStreams { virtual HandoverReadinessStatus GetHandoverReadiness() const = 0; virtual void AddHandoverState(DcSctpSocketHandoverState& state) = 0; + virtual void RestoreFromState(const DcSctpSocketHandoverState& state) = 0; }; } // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/traditional_reassembly_streams.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/traditional_reassembly_streams.cc index d004824354..dce6c90131 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/traditional_reassembly_streams.cc +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/traditional_reassembly_streams.cc @@ -80,37 +80,19 @@ absl::optional::iterator> FindEnd( TraditionalReassemblyStreams::TraditionalReassemblyStreams( absl::string_view log_prefix, - OnAssembledMessage on_assembled_message, - const DcSctpSocketHandoverState* handover_state) + OnAssembledMessage on_assembled_message) : log_prefix_(log_prefix), - on_assembled_message_(std::move(on_assembled_message)) { - if (handover_state) { - for (const DcSctpSocketHandoverState::OrderedStream& state_stream : - handover_state->rx.ordered_streams) { - ordered_streams_.emplace( - std::piecewise_construct, - std::forward_as_tuple(StreamID(state_stream.id)), - std::forward_as_tuple(this, SSN(state_stream.next_ssn))); - } - for (const DcSctpSocketHandoverState::UnorderedStream& state_stream : - handover_state->rx.unordered_streams) { - unordered_streams_.emplace( - std::piecewise_construct, - std::forward_as_tuple(StreamID(state_stream.id)), - std::forward_as_tuple(this)); - } - } -} + on_assembled_message_(std::move(on_assembled_message)) {} int TraditionalReassemblyStreams::UnorderedStream::Add(UnwrappedTSN tsn, Data data) { int queued_bytes = data.size(); - auto p = chunks_.emplace(tsn, std::move(data)); - if (!p.second /* !inserted */) { + auto [it, inserted] = chunks_.emplace(tsn, std::move(data)); + if (!inserted) { return 0; } - queued_bytes -= TryToAssembleMessage(p.first); + queued_bytes -= TryToAssembleMessage(it); return queued_bytes; } @@ -225,8 +207,8 @@ int TraditionalReassemblyStreams::OrderedStream::Add(UnwrappedTSN tsn, int queued_bytes = data.size(); UnwrappedSSN ssn = ssn_unwrapper_.Unwrap(data.ssn); - auto p = chunks_by_ssn_[ssn].emplace(tsn, std::move(data)); - if (!p.second /* !inserted */) { + auto [unused, inserted] = chunks_by_ssn_[ssn].emplace(tsn, std::move(data)); + if (!inserted) { return 0; } @@ -261,11 +243,11 @@ size_t TraditionalReassemblyStreams::OrderedStream::EraseTo(SSN ssn) { int TraditionalReassemblyStreams::Add(UnwrappedTSN tsn, Data data) { if (data.is_unordered) { - auto it = unordered_streams_.emplace(data.stream_id, this).first; + auto it = unordered_streams_.try_emplace(data.stream_id, this).first; return it->second.Add(tsn, std::move(data)); } - auto it = ordered_streams_.emplace(data.stream_id, this).first; + auto it = ordered_streams_.try_emplace(data.stream_id, this).first; return it->second.Add(tsn, std::move(data)); } @@ -275,15 +257,14 @@ size_t TraditionalReassemblyStreams::HandleForwardTsn( size_t bytes_removed = 0; // The `skipped_streams` only cover ordered messages - need to // iterate all unordered streams manually to remove those chunks. - for (auto& entry : unordered_streams_) { - bytes_removed += entry.second.EraseTo(new_cumulative_ack_tsn); + for (auto& [unused, stream] : unordered_streams_) { + bytes_removed += stream.EraseTo(new_cumulative_ack_tsn); } for (const auto& skipped_stream : skipped_streams) { - auto it = ordered_streams_.find(skipped_stream.stream_id); - if (it != ordered_streams_.end()) { - bytes_removed += it->second.EraseTo(skipped_stream.ssn); - } + auto it = + ordered_streams_.try_emplace(skipped_stream.stream_id, this).first; + bytes_removed += it->second.EraseTo(skipped_stream.ssn); } return bytes_removed; @@ -292,9 +273,7 @@ size_t TraditionalReassemblyStreams::HandleForwardTsn( void TraditionalReassemblyStreams::ResetStreams( rtc::ArrayView stream_ids) { if (stream_ids.empty()) { - for (auto& entry : ordered_streams_) { - const StreamID& stream_id = entry.first; - OrderedStream& stream = entry.second; + for (auto& [stream_id, stream] : ordered_streams_) { RTC_DLOG(LS_VERBOSE) << log_prefix_ << "Resetting implicit stream_id=" << *stream_id; stream.Reset(); @@ -314,14 +293,14 @@ void TraditionalReassemblyStreams::ResetStreams( HandoverReadinessStatus TraditionalReassemblyStreams::GetHandoverReadiness() const { HandoverReadinessStatus status; - for (const auto& entry : ordered_streams_) { - if (entry.second.has_unassembled_chunks()) { + for (const auto& [unused, stream] : ordered_streams_) { + if (stream.has_unassembled_chunks()) { status.Add(HandoverUnreadinessReason::kOrderedStreamHasUnassembledChunks); break; } } - for (const auto& entry : unordered_streams_) { - if (entry.second.has_unassembled_chunks()) { + for (const auto& [unused, stream] : unordered_streams_) { + if (stream.has_unassembled_chunks()) { status.Add( HandoverUnreadinessReason::kUnorderedStreamHasUnassembledChunks); break; @@ -332,17 +311,38 @@ HandoverReadinessStatus TraditionalReassemblyStreams::GetHandoverReadiness() void TraditionalReassemblyStreams::AddHandoverState( DcSctpSocketHandoverState& state) { - for (const auto& entry : ordered_streams_) { + for (const auto& [stream_id, stream] : ordered_streams_) { DcSctpSocketHandoverState::OrderedStream state_stream; - state_stream.id = entry.first.value(); - state_stream.next_ssn = entry.second.next_ssn().value(); + state_stream.id = stream_id.value(); + state_stream.next_ssn = stream.next_ssn().value(); state.rx.ordered_streams.push_back(std::move(state_stream)); } - for (const auto& entry : unordered_streams_) { + for (const auto& [stream_id, unused] : unordered_streams_) { DcSctpSocketHandoverState::UnorderedStream state_stream; - state_stream.id = entry.first.value(); + state_stream.id = stream_id.value(); state.rx.unordered_streams.push_back(std::move(state_stream)); } } +void TraditionalReassemblyStreams::RestoreFromState( + const DcSctpSocketHandoverState& state) { + // Validate that the component is in pristine state. + RTC_DCHECK(ordered_streams_.empty()); + RTC_DCHECK(unordered_streams_.empty()); + + for (const DcSctpSocketHandoverState::OrderedStream& state_stream : + state.rx.ordered_streams) { + ordered_streams_.emplace( + std::piecewise_construct, + std::forward_as_tuple(StreamID(state_stream.id)), + std::forward_as_tuple(this, SSN(state_stream.next_ssn))); + } + for (const DcSctpSocketHandoverState::UnorderedStream& state_stream : + state.rx.unordered_streams) { + unordered_streams_.emplace(std::piecewise_construct, + std::forward_as_tuple(StreamID(state_stream.id)), + std::forward_as_tuple(this)); + } +} + } // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/traditional_reassembly_streams.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/traditional_reassembly_streams.h index 0c724327e2..4825afd1ba 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/traditional_reassembly_streams.h +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/traditional_reassembly_streams.h @@ -29,10 +29,8 @@ namespace dcsctp { // RFC4960 is to be followed. class TraditionalReassemblyStreams : public ReassemblyStreams { public: - TraditionalReassemblyStreams( - absl::string_view log_prefix, - OnAssembledMessage on_assembled_message, - const DcSctpSocketHandoverState* handover_state = nullptr); + TraditionalReassemblyStreams(absl::string_view log_prefix, + OnAssembledMessage on_assembled_message); int Add(UnwrappedTSN tsn, Data data) override; @@ -45,6 +43,7 @@ class TraditionalReassemblyStreams : public ReassemblyStreams { HandoverReadinessStatus GetHandoverReadiness() const override; void AddHandoverState(DcSctpSocketHandoverState& state) override; + void RestoreFromState(const DcSctpSocketHandoverState& state) override; private: using ChunkMap = std::map; @@ -55,8 +54,7 @@ class TraditionalReassemblyStreams : public ReassemblyStreams { explicit StreamBase(TraditionalReassemblyStreams* parent) : parent_(*parent) {} - size_t AssembleMessage(const ChunkMap::iterator start, - const ChunkMap::iterator end); + size_t AssembleMessage(ChunkMap::iterator start, ChunkMap::iterator end); TraditionalReassemblyStreams& parent_; }; diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/callback_deferrer.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/callback_deferrer.cc index b4af10e88a..123526e782 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/callback_deferrer.cc +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/callback_deferrer.cc @@ -9,6 +9,8 @@ */ #include "net/dcsctp/socket/callback_deferrer.h" +#include "api/make_ref_counted.h" + namespace dcsctp { namespace { // A wrapper around the move-only DcSctpMessage, to let it be captured in a @@ -61,9 +63,10 @@ SendPacketStatus CallbackDeferrer::SendPacketWithStatus( return underlying_.SendPacketWithStatus(data); } -std::unique_ptr CallbackDeferrer::CreateTimeout() { +std::unique_ptr CallbackDeferrer::CreateTimeout( + webrtc::TaskQueueBase::DelayPrecision precision) { // Will not be deferred - call directly. - return underlying_.CreateTimeout(); + return underlying_.CreateTimeout(precision); } TimeMs CallbackDeferrer::TimeMillis() { @@ -157,4 +160,22 @@ void CallbackDeferrer::OnTotalBufferedAmountLow() { deferred_.emplace_back( [](DcSctpSocketCallbacks& cb) { cb.OnTotalBufferedAmountLow(); }); } + +void CallbackDeferrer::OnLifecycleMessageExpired(LifecycleId lifecycle_id, + bool maybe_delivered) { + // Will not be deferred - call directly. + underlying_.OnLifecycleMessageExpired(lifecycle_id, maybe_delivered); +} +void CallbackDeferrer::OnLifecycleMessageFullySent(LifecycleId lifecycle_id) { + // Will not be deferred - call directly. + underlying_.OnLifecycleMessageFullySent(lifecycle_id); +} +void CallbackDeferrer::OnLifecycleMessageDelivered(LifecycleId lifecycle_id) { + // Will not be deferred - call directly. + underlying_.OnLifecycleMessageDelivered(lifecycle_id); +} +void CallbackDeferrer::OnLifecycleEnd(LifecycleId lifecycle_id) { + // Will not be deferred - call directly. + underlying_.OnLifecycleEnd(lifecycle_id); +} } // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/callback_deferrer.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/callback_deferrer.h index 918b1df32d..1c35dda6cf 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/callback_deferrer.h +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/callback_deferrer.h @@ -21,9 +21,9 @@ #include "api/array_view.h" #include "api/ref_counted_base.h" #include "api/scoped_refptr.h" +#include "api/task_queue/task_queue_base.h" #include "net/dcsctp/public/dcsctp_message.h" #include "net/dcsctp/public/dcsctp_socket.h" -#include "rtc_base/ref_counted_object.h" namespace dcsctp { // Defers callbacks until they can be safely triggered. @@ -62,7 +62,8 @@ class CallbackDeferrer : public DcSctpSocketCallbacks { // Implementation of DcSctpSocketCallbacks SendPacketStatus SendPacketWithStatus( rtc::ArrayView data) override; - std::unique_ptr CreateTimeout() override; + std::unique_ptr CreateTimeout( + webrtc::TaskQueueBase::DelayPrecision precision) override; TimeMs TimeMillis() override; uint32_t GetRandomInt(uint32_t low, uint32_t high) override; void OnMessageReceived(DcSctpMessage message) override; @@ -80,6 +81,12 @@ class CallbackDeferrer : public DcSctpSocketCallbacks { void OnBufferedAmountLow(StreamID stream_id) override; void OnTotalBufferedAmountLow() override; + void OnLifecycleMessageExpired(LifecycleId lifecycle_id, + bool maybe_delivered) override; + void OnLifecycleMessageFullySent(LifecycleId lifecycle_id) override; + void OnLifecycleMessageDelivered(LifecycleId lifecycle_id) override; + void OnLifecycleEnd(LifecycleId lifecycle_id) override; + private: void Prepare(); void TriggerDeferred(); diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/capabilities.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/capabilities.h index c6d3692b2d..fa3be37d12 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/capabilities.h +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/capabilities.h @@ -10,6 +10,7 @@ #ifndef NET_DCSCTP_SOCKET_CAPABILITIES_H_ #define NET_DCSCTP_SOCKET_CAPABILITIES_H_ +#include namespace dcsctp { // Indicates what the association supports, meaning that both parties // support it and that feature can be used. @@ -20,6 +21,9 @@ struct Capabilities { bool message_interleaving = false; // RFC6525 Stream Reconfiguration bool reconfig = false; + // Negotiated maximum incoming and outgoing stream count. + uint16_t negotiated_maximum_incoming_streams = 0; + uint16_t negotiated_maximum_outgoing_streams = 0; }; } // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/dcsctp_socket.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/dcsctp_socket.cc index 8153910941..f831ba090c 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/dcsctp_socket.cc +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/dcsctp_socket.cc @@ -22,6 +22,7 @@ #include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/array_view.h" +#include "api/task_queue/task_queue_base.h" #include "net/dcsctp/packet/chunk/abort_chunk.h" #include "net/dcsctp/packet/chunk/chunk.h" #include "net/dcsctp/packet/chunk/cookie_ack_chunk.h" @@ -61,6 +62,7 @@ #include "net/dcsctp/public/dcsctp_options.h" #include "net/dcsctp/public/dcsctp_socket.h" #include "net/dcsctp/public/packet_observer.h" +#include "net/dcsctp/public/types.h" #include "net/dcsctp/rx/data_tracker.h" #include "net/dcsctp/rx/reassembly_queue.h" #include "net/dcsctp/socket/callback_deferrer.h" @@ -88,8 +90,10 @@ constexpr uint32_t kMaxVerificationTag = std::numeric_limits::max(); constexpr uint32_t kMinInitialTsn = 0; constexpr uint32_t kMaxInitialTsn = std::numeric_limits::max(); -Capabilities GetCapabilities(const DcSctpOptions& options, - const Parameters& parameters) { +Capabilities ComputeCapabilities(const DcSctpOptions& options, + uint16_t peer_nbr_outbound_streams, + uint16_t peer_nbr_inbound_streams, + const Parameters& parameters) { Capabilities capabilities; absl::optional supported_extensions = parameters.get(); @@ -112,6 +116,12 @@ Capabilities GetCapabilities(const DcSctpOptions& options, supported_extensions->supports(ReConfigChunk::kType)) { capabilities.reconfig = true; } + + capabilities.negotiated_maximum_incoming_streams = std::min( + options.announced_maximum_incoming_streams, peer_nbr_outbound_streams); + capabilities.negotiated_maximum_outgoing_streams = std::min( + options.announced_maximum_outgoing_streams, peer_nbr_inbound_streams); + return capabilities; } @@ -162,7 +172,9 @@ DcSctpSocket::DcSctpSocket(absl::string_view log_prefix, packet_observer_(std::move(packet_observer)), options_(options), callbacks_(callbacks), - timer_manager_([this]() { return callbacks_.CreateTimeout(); }), + timer_manager_([this](webrtc::TaskQueueBase::DelayPrecision precision) { + return callbacks_.CreateTimeout(precision); + }), t1_init_(timer_manager_.CreateTimer( "t1-init", absl::bind_front(&DcSctpSocket::OnInitTimerExpiry, this), @@ -183,17 +195,15 @@ DcSctpSocket::DcSctpSocket(absl::string_view log_prefix, options.max_retransmissions))), packet_sender_(callbacks_, absl::bind_front(&DcSctpSocket::OnSentPacket, this)), - send_queue_( - log_prefix_, - options_.max_send_buffer_size, - [this](StreamID stream_id) { - callbacks_.OnBufferedAmountLow(stream_id); - }, - options_.total_buffered_amount_low_threshold, - [this]() { callbacks_.OnTotalBufferedAmountLow(); }) {} + send_queue_(log_prefix_, + &callbacks_, + options_.max_send_buffer_size, + options_.mtu, + options_.default_stream_priority, + options_.total_buffered_amount_low_threshold) {} std::string DcSctpSocket::log_prefix() const { - return log_prefix_ + "[" + std::string(ToString(state_)) + "] "; + return log_prefix_ + "[" + std::string(ToString(state_)) + "] "; } bool DcSctpSocket::IsConsistent() const { @@ -301,6 +311,27 @@ void DcSctpSocket::Connect() { RTC_DCHECK(IsConsistent()); } +void DcSctpSocket::CreateTransmissionControlBlock( + const Capabilities& capabilities, + VerificationTag my_verification_tag, + TSN my_initial_tsn, + VerificationTag peer_verification_tag, + TSN peer_initial_tsn, + size_t a_rwnd, + TieTag tie_tag) { + metrics_.uses_message_interleaving = capabilities.message_interleaving; + metrics_.negotiated_maximum_incoming_streams = + capabilities.negotiated_maximum_incoming_streams; + metrics_.negotiated_maximum_outgoing_streams = + capabilities.negotiated_maximum_outgoing_streams; + tcb_ = std::make_unique( + timer_manager_, log_prefix_, options_, capabilities, callbacks_, + send_queue_, my_verification_tag, my_initial_tsn, peer_verification_tag, + peer_initial_tsn, a_rwnd, tie_tag, packet_sender_, + [this]() { return state_ == State::kEstablished; }); + RTC_DLOG(LS_VERBOSE) << log_prefix() << "Created TCB: " << tcb_->ToString(); +} + void DcSctpSocket::RestoreFromState(const DcSctpSocketHandoverState& state) { RTC_DCHECK_RUN_ON(&thread_checker_); CallbackDeferrer::ScopedDeferrer deferrer(callbacks_); @@ -320,18 +351,20 @@ void DcSctpSocket::RestoreFromState(const DcSctpSocketHandoverState& state) { capabilities.message_interleaving = state.capabilities.message_interleaving; capabilities.reconfig = state.capabilities.reconfig; + capabilities.negotiated_maximum_incoming_streams = + state.capabilities.negotiated_maximum_incoming_streams; + capabilities.negotiated_maximum_outgoing_streams = + state.capabilities.negotiated_maximum_outgoing_streams; send_queue_.RestoreFromState(state); - tcb_ = std::make_unique( - timer_manager_, log_prefix_, options_, capabilities, callbacks_, - send_queue_, my_verification_tag, TSN(state.my_initial_tsn), + CreateTransmissionControlBlock( + capabilities, my_verification_tag, TSN(state.my_initial_tsn), VerificationTag(state.peer_verification_tag), TSN(state.peer_initial_tsn), static_cast(0), - TieTag(state.tie_tag), packet_sender_, - [this]() { return state_ == State::kEstablished; }, &state); - RTC_DLOG(LS_VERBOSE) << log_prefix() << "Created peer TCB from state: " - << tcb_->ToString(); + TieTag(state.tie_tag)); + + tcb_->RestoreFromState(state); SetState(State::kEstablished, "restored from handover state"); callbacks_.OnConnected(); @@ -417,17 +450,34 @@ void DcSctpSocket::InternalClose(ErrorKind error, absl::string_view message) { RTC_DCHECK(IsConsistent()); } +void DcSctpSocket::SetStreamPriority(StreamID stream_id, + StreamPriority priority) { + RTC_DCHECK_RUN_ON(&thread_checker_); + send_queue_.SetStreamPriority(stream_id, priority); +} +StreamPriority DcSctpSocket::GetStreamPriority(StreamID stream_id) const { + RTC_DCHECK_RUN_ON(&thread_checker_); + return send_queue_.GetStreamPriority(stream_id); +} + SendStatus DcSctpSocket::Send(DcSctpMessage message, const SendOptions& send_options) { RTC_DCHECK_RUN_ON(&thread_checker_); CallbackDeferrer::ScopedDeferrer deferrer(callbacks_); + LifecycleId lifecycle_id = send_options.lifecycle_id; if (message.payload().empty()) { + if (lifecycle_id.IsSet()) { + callbacks_.OnLifecycleEnd(lifecycle_id); + } callbacks_.OnError(ErrorKind::kProtocolViolation, "Unable to send empty message"); return SendStatus::kErrorMessageEmpty; } if (message.payload().size() > options_.max_message_size) { + if (lifecycle_id.IsSet()) { + callbacks_.OnLifecycleEnd(lifecycle_id); + } callbacks_.OnError(ErrorKind::kProtocolViolation, "Unable to send too large message"); return SendStatus::kErrorMessageTooLarge; @@ -438,11 +488,17 @@ SendStatus DcSctpSocket::Send(DcSctpMessage message, // "An endpoint should reject any new data request from its upper layer // if it is in the SHUTDOWN-PENDING, SHUTDOWN-SENT, SHUTDOWN-RECEIVED, or // SHUTDOWN-ACK-SENT state." + if (lifecycle_id.IsSet()) { + callbacks_.OnLifecycleEnd(lifecycle_id); + } callbacks_.OnError(ErrorKind::kWrongSequence, "Unable to send message as the socket is shutting down"); return SendStatus::kErrorShuttingDown; } if (send_queue_.IsFull()) { + if (lifecycle_id.IsSet()) { + callbacks_.OnLifecycleEnd(lifecycle_id); + } callbacks_.OnError(ErrorKind::kResourceExhaustion, "Unable to send message as the send queue is full"); return SendStatus::kErrorResourceExhaustion; @@ -476,13 +532,7 @@ ResetStreamsStatus DcSctpSocket::ResetStreams( } tcb_->stream_reset_handler().ResetStreams(outgoing_streams); - absl::optional reconfig = - tcb_->stream_reset_handler().MakeStreamResetRequest(); - if (reconfig.has_value()) { - SctpPacket::Builder builder = tcb_->PacketBuilder(); - builder.Add(*reconfig); - packet_sender_.Send(builder); - } + MaybeSendResetStreamsRequest(); RTC_DCHECK(IsConsistent()); return ResetStreamsStatus::kPerformed; @@ -494,17 +544,13 @@ SocketState DcSctpSocket::state() const { case State::kClosed: return SocketState::kClosed; case State::kCookieWait: - ABSL_FALLTHROUGH_INTENDED; case State::kCookieEchoed: return SocketState::kConnecting; case State::kEstablished: return SocketState::kConnected; case State::kShutdownPending: - ABSL_FALLTHROUGH_INTENDED; case State::kShutdownSent: - ABSL_FALLTHROUGH_INTENDED; case State::kShutdownReceived: - ABSL_FALLTHROUGH_INTENDED; case State::kShutdownAckSent: return SocketState::kShuttingDown; } @@ -531,24 +577,28 @@ void DcSctpSocket::SetBufferedAmountLowThreshold(StreamID stream_id, send_queue_.SetBufferedAmountLowThreshold(stream_id, bytes); } -Metrics DcSctpSocket::GetMetrics() const { +absl::optional DcSctpSocket::GetMetrics() const { RTC_DCHECK_RUN_ON(&thread_checker_); - Metrics metrics = metrics_; - if (tcb_ != nullptr) { - // Update the metrics with some stats that are extracted from - // sub-components. - metrics.cwnd_bytes = tcb_->cwnd(); - metrics.srtt_ms = tcb_->current_srtt().value(); - size_t packet_payload_size = - options_.mtu - SctpPacket::kHeaderSize - DataChunk::kHeaderSize; - metrics.unack_data_count = - tcb_->retransmission_queue().outstanding_items() + - (send_queue_.total_buffered_amount() + packet_payload_size - 1) / - packet_payload_size; - metrics.peer_rwnd_bytes = tcb_->retransmission_queue().rwnd(); + if (tcb_ == nullptr) { + return absl::nullopt; } + Metrics metrics = metrics_; + metrics.cwnd_bytes = tcb_->cwnd(); + metrics.srtt_ms = tcb_->current_srtt().value(); + size_t packet_payload_size = + options_.mtu - SctpPacket::kHeaderSize - DataChunk::kHeaderSize; + metrics.unack_data_count = + tcb_->retransmission_queue().outstanding_items() + + (send_queue_.total_buffered_amount() + packet_payload_size - 1) / + packet_payload_size; + metrics.peer_rwnd_bytes = tcb_->retransmission_queue().rwnd(); + metrics.negotiated_maximum_incoming_streams = + tcb_->capabilities().negotiated_maximum_incoming_streams; + metrics.negotiated_maximum_incoming_streams = + tcb_->capabilities().negotiated_maximum_incoming_streams; + return metrics; } @@ -571,6 +621,16 @@ void DcSctpSocket::MaybeSendShutdownOnPacketReceived(const SctpPacket& packet) { } } +void DcSctpSocket::MaybeSendResetStreamsRequest() { + absl::optional reconfig = + tcb_->stream_reset_handler().MakeStreamResetRequest(); + if (reconfig.has_value()) { + SctpPacket::Builder builder = tcb_->PacketBuilder(); + builder.Add(*reconfig); + packet_sender_.Send(builder); + } +} + bool DcSctpSocket::ValidatePacket(const SctpPacket& packet) { const CommonHeader& header = packet.common_header(); VerificationTag my_verification_tag = @@ -799,7 +859,7 @@ bool DcSctpSocket::Dispatch(const CommonHeader& header, HandleIData(header, descriptor); break; case IForwardTsnChunk::kType: - HandleForwardTsn(header, descriptor); + HandleIForwardTsn(header, descriptor); break; default: return HandleUnrecognizedChunk(descriptor); @@ -1025,11 +1085,12 @@ void DcSctpSocket::HandleDataCommon(AnyDataChunk& chunk) { return; } - tcb_->data_tracker().Observe(tsn, immediate_ack); - tcb_->reassembly_queue().MaybeResetStreamsDeferred( - tcb_->data_tracker().last_cumulative_acked_tsn()); - tcb_->reassembly_queue().Add(tsn, std::move(data)); - DeliverReassembledMessages(); + if (tcb_->data_tracker().Observe(tsn, immediate_ack)) { + tcb_->reassembly_queue().Add(tsn, std::move(data)); + tcb_->reassembly_queue().MaybeResetStreamsDeferred( + tcb_->data_tracker().last_cumulative_acked_tsn()); + DeliverReassembledMessages(); + } } void DcSctpSocket::HandleInit(const CommonHeader& header, @@ -1131,7 +1192,9 @@ void DcSctpSocket::HandleInit(const CommonHeader& header, *connect_params_.verification_tag, *connect_params_.initial_tsn, *chunk->initiate_tag(), *chunk->initial_tsn()); - Capabilities capabilities = GetCapabilities(options_, chunk->parameters()); + Capabilities capabilities = + ComputeCapabilities(options_, chunk->nbr_outbound_streams(), + chunk->nbr_inbound_streams(), chunk->parameters()); SctpPacket::Builder b(chunk->initiate_tag(), options_); Parameters::Builder params_builder = @@ -1180,19 +1243,25 @@ void DcSctpSocket::HandleInitAck( "InitAck chunk doesn't contain a cookie"); return; } - Capabilities capabilities = GetCapabilities(options_, chunk->parameters()); + Capabilities capabilities = + ComputeCapabilities(options_, chunk->nbr_outbound_streams(), + chunk->nbr_inbound_streams(), chunk->parameters()); t1_init_->Stop(); - peer_implementation_ = DeterminePeerImplementation(cookie->data()); + metrics_.peer_implementation = DeterminePeerImplementation(cookie->data()); - tcb_ = std::make_unique( - timer_manager_, log_prefix_, options_, capabilities, callbacks_, - send_queue_, connect_params_.verification_tag, - connect_params_.initial_tsn, chunk->initiate_tag(), chunk->initial_tsn(), - chunk->a_rwnd(), MakeTieTag(callbacks_), packet_sender_, - [this]() { return state_ == State::kEstablished; }); - RTC_DLOG(LS_VERBOSE) << log_prefix() - << "Created peer TCB: " << tcb_->ToString(); + // If the connection is re-established (peer restarted, but re-used old + // connection), make sure that all message identifiers are reset and any + // partly sent message is re-sent in full. The same is true when the socket + // is closed and later re-opened, which never happens in WebRTC, but is a + // valid operation on the SCTP level. Note that in case of handover, the + // send queue is already re-configured, and shouldn't be reset. + send_queue_.Reset(); + + CreateTransmissionControlBlock(capabilities, connect_params_.verification_tag, + connect_params_.initial_tsn, + chunk->initiate_tag(), chunk->initial_tsn(), + chunk->a_rwnd(), MakeTieTag(callbacks_)); SetState(State::kCookieEchoed, "INIT_ACK received"); @@ -1246,14 +1315,18 @@ void DcSctpSocket::HandleCookieEcho( } if (tcb_ == nullptr) { - tcb_ = std::make_unique( - timer_manager_, log_prefix_, options_, cookie->capabilities(), - callbacks_, send_queue_, connect_params_.verification_tag, + // If the connection is re-established (peer restarted, but re-used old + // connection), make sure that all message identifiers are reset and any + // partly sent message is re-sent in full. The same is true when the socket + // is closed and later re-opened, which never happens in WebRTC, but is a + // valid operation on the SCTP level. Note that in case of handover, the + // send queue is already re-configured, and shouldn't be reset. + send_queue_.Reset(); + + CreateTransmissionControlBlock( + cookie->capabilities(), connect_params_.verification_tag, connect_params_.initial_tsn, cookie->initiate_tag(), - cookie->initial_tsn(), cookie->a_rwnd(), MakeTieTag(callbacks_), - packet_sender_, [this]() { return state_ == State::kEstablished; }); - RTC_DLOG(LS_VERBOSE) << log_prefix() - << "Created peer TCB: " << tcb_->ToString(); + cookie->initial_tsn(), cookie->a_rwnd(), MakeTieTag(callbacks_)); } SctpPacket::Builder b = tcb_->PacketBuilder(); @@ -1301,9 +1374,6 @@ bool DcSctpSocket::HandleCookieEchoWithTCB(const CommonHeader& header, RTC_DLOG(LS_VERBOSE) << log_prefix() << "Received COOKIE-ECHO indicating a restarted peer"; - // If a message was partly sent, and the peer restarted, resend it in - // full by resetting the send queue. - send_queue_.Reset(); tcb_ = nullptr; callbacks_.OnConnectionRestarted(); } else if (header.verification_tag == tcb_->my_verification_tag() && @@ -1387,6 +1457,17 @@ void DcSctpSocket::HandleSack(const CommonHeader& header, if (tcb_->retransmission_queue().HandleSack(now, sack)) { MaybeSendShutdownOrAck(); + // Receiving an ACK may make the socket go into fast recovery mode. + // https://datatracker.ietf.org/doc/html/rfc4960#section-7.2.4 + // "Determine how many of the earliest (i.e., lowest TSN) DATA chunks + // marked for retransmission will fit into a single packet, subject to + // constraint of the path MTU of the destination transport address to + // which the packet is being sent. Call this value K. Retransmit those K + // DATA chunks in a single packet. When a Fast Retransmit is being + // performed, the sender SHOULD ignore the value of cwnd and SHOULD NOT + // delay retransmission for this single packet." + tcb_->MaybeSendFastRetransmit(); + // Receiving an ACK will decrease outstanding bytes (maybe now below // cwnd?) or indicate packet loss that may result in sending FORWARD-TSN. tcb_->SendBufferedPackets(now); @@ -1460,9 +1541,18 @@ void DcSctpSocket::HandleError(const CommonHeader& header, void DcSctpSocket::HandleReconfig( const CommonHeader& header, const SctpPacket::ChunkDescriptor& descriptor) { + TimeMs now = callbacks_.TimeMillis(); absl::optional chunk = ReConfigChunk::Parse(descriptor.data); if (ValidateParseSuccess(chunk) && ValidateHasTCB()) { tcb_->stream_reset_handler().HandleReConfig(*std::move(chunk)); + // Handling this response may result in outgoing stream resets finishing + // (either successfully or with failure). If there still are pending streams + // that were waiting for this request to finish, continue resetting them. + MaybeSendResetStreamsRequest(); + + // If a response was processed, pending to-be-reset streams may now have + // become unpaused. Try to send more DATA chunks. + tcb_->SendBufferedPackets(now); } } diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/dcsctp_socket.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/dcsctp_socket.h index b1b3ea9d9b..157c515d65 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/dcsctp_socket.h +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/dcsctp_socket.h @@ -96,14 +96,16 @@ class DcSctpSocket : public DcSctpSocketInterface { SocketState state() const override; const DcSctpOptions& options() const override { return options_; } void SetMaxMessageSize(size_t max_message_size) override; + void SetStreamPriority(StreamID stream_id, StreamPriority priority) override; + StreamPriority GetStreamPriority(StreamID stream_id) const override; size_t buffered_amount(StreamID stream_id) const override; size_t buffered_amount_low_threshold(StreamID stream_id) const override; void SetBufferedAmountLowThreshold(StreamID stream_id, size_t bytes) override; - Metrics GetMetrics() const override; + absl::optional GetMetrics() const override; HandoverReadinessStatus GetHandoverReadiness() const override; absl::optional GetHandoverStateAndClose() override; SctpImplementation peer_implementation() const override { - return peer_implementation_; + return metrics_.peer_implementation; } // Returns this socket's verification tag, or zero if not yet connected. VerificationTag verification_tag() const { @@ -136,6 +138,14 @@ class DcSctpSocket : public DcSctpSocketInterface { bool IsConsistent() const; static constexpr absl::string_view ToString(DcSctpSocket::State state); + void CreateTransmissionControlBlock(const Capabilities& capabilities, + VerificationTag my_verification_tag, + TSN my_initial_tsn, + VerificationTag peer_verification_tag, + TSN peer_initial_tsn, + size_t a_rwnd, + TieTag tie_tag); + // Changes the socket state, given a `reason` (for debugging/logging). void SetState(State state, absl::string_view reason); // Fills in `connect_params` with random verification tag and initial TSN. @@ -155,6 +165,8 @@ class DcSctpSocket : public DcSctpSocketInterface { void MaybeSendShutdownOrAck(); // If the socket is shutting down, responds SHUTDOWN to any incoming DATA. void MaybeSendShutdownOnPacketReceived(const SctpPacket& packet); + // If there are streams pending to be reset, send a request to reset them. + void MaybeSendResetStreamsRequest(); // Sends a INIT chunk. void SendInit(); // Sends a SHUTDOWN chunk. @@ -280,8 +292,6 @@ class DcSctpSocket : public DcSctpSocketInterface { State state_ = State::kClosed; // If the connection is established, contains a transmission control block. std::unique_ptr tcb_; - - SctpImplementation peer_implementation_ = SctpImplementation::kUnknown; }; } // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/mock_dcsctp_socket_callbacks.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/mock_dcsctp_socket_callbacks.h index a49a0b3325..8b2a772fa3 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/mock_dcsctp_socket_callbacks.h +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/mock_dcsctp_socket_callbacks.h @@ -20,6 +20,7 @@ #include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/array_view.h" +#include "api/task_queue/task_queue_base.h" #include "net/dcsctp/public/dcsctp_message.h" #include "net/dcsctp/public/dcsctp_socket.h" #include "net/dcsctp/public/timeout.h" @@ -87,7 +88,9 @@ class MockDcSctpSocketCallbacks : public DcSctpSocketCallbacks { (rtc::ArrayView data), (override)); - std::unique_ptr CreateTimeout() override { + std::unique_ptr CreateTimeout( + webrtc::TaskQueueBase::DelayPrecision precision) override { + // The fake timeout manager does not implement |precision|. return timeout_manager_.CreateTimeout(); } @@ -123,6 +126,19 @@ class MockDcSctpSocketCallbacks : public DcSctpSocketCallbacks { (override)); MOCK_METHOD(void, OnBufferedAmountLow, (StreamID stream_id), (override)); MOCK_METHOD(void, OnTotalBufferedAmountLow, (), (override)); + MOCK_METHOD(void, + OnLifecycleMessageExpired, + (LifecycleId lifecycle_id, bool maybe_delivered), + (override)); + MOCK_METHOD(void, + OnLifecycleMessageFullySent, + (LifecycleId lifecycle_id), + (override)); + MOCK_METHOD(void, + OnLifecycleMessageDelivered, + (LifecycleId lifecycle_id), + (override)); + MOCK_METHOD(void, OnLifecycleEnd, (LifecycleId lifecycle_id), (override)); bool HasPacket() const { return !sent_packets_.empty(); } @@ -150,12 +166,6 @@ class MockDcSctpSocketCallbacks : public DcSctpSocketCallbacks { return timeout_manager_.GetNextExpiredTimeout(); } - void Reset() { - sent_packets_.clear(); - received_messages_.clear(); - timeout_manager_.Reset(); - } - private: const std::string log_prefix_; TimeMs now_ = TimeMs(0); diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/state_cookie.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/state_cookie.cc index 7d04cbb0d7..86be77aa34 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/state_cookie.cc +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/state_cookie.cc @@ -40,6 +40,8 @@ std::vector StateCookie::Serialize() { buffer.Store8<28>(capabilities_.partial_reliability); buffer.Store8<29>(capabilities_.message_interleaving); buffer.Store8<30>(capabilities_.reconfig); + buffer.Store16<32>(capabilities_.negotiated_maximum_incoming_streams); + buffer.Store16<34>(capabilities_.negotiated_maximum_outgoing_streams); return cookie; } @@ -70,6 +72,8 @@ absl::optional StateCookie::Deserialize( capabilities.partial_reliability = buffer.Load8<28>() != 0; capabilities.message_interleaving = buffer.Load8<29>() != 0; capabilities.reconfig = buffer.Load8<30>() != 0; + capabilities.negotiated_maximum_incoming_streams = buffer.Load16<32>(); + capabilities.negotiated_maximum_outgoing_streams = buffer.Load16<34>(); return StateCookie(verification_tag, initial_tsn, a_rwnd, tie_tag, capabilities); diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/state_cookie.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/state_cookie.h index df4b801397..a26dbf86f7 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/state_cookie.h +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/state_cookie.h @@ -27,7 +27,7 @@ namespace dcsctp { // Do not trust anything in it; no pointers or anything like that. class StateCookie { public: - static constexpr size_t kCookieSize = 31; + static constexpr size_t kCookieSize = 36; StateCookie(VerificationTag initiate_tag, TSN initial_tsn, diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/stream_reset_handler.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/stream_reset_handler.cc index 1c6ce09e56..c81b34b626 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/stream_reset_handler.cc +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/stream_reset_handler.cc @@ -134,11 +134,16 @@ bool StreamResetHandler::ValidateReqSeqNbr( ReconfigRequestSN req_seq_nbr, std::vector& responses) { if (req_seq_nbr == last_processed_req_seq_nbr_) { - // This has already been performed previously. + // https://www.rfc-editor.org/rfc/rfc6525.html#section-5.2.1 "If the + // received RE-CONFIG chunk contains at least one request and based on the + // analysis of the Re-configuration Request Sequence Numbers this is the + // last received RE-CONFIG chunk (i.e., a retransmission), the same + // RE-CONFIG chunk MUST to be sent back in response, as it was earlier." RTC_DLOG(LS_VERBOSE) << log_prefix_ << "req=" << *req_seq_nbr - << " already processed"; + << " already processed, returning result=" + << ToString(last_processed_req_result_); responses.push_back(ReconfigurationResponseParameter( - req_seq_nbr, ResponseResult::kSuccessNothingToDo)); + req_seq_nbr, last_processed_req_result_)); return false; } @@ -170,20 +175,18 @@ void StreamResetHandler::HandleResetOutgoing( } if (ValidateReqSeqNbr(req->request_sequence_number(), responses)) { - ResponseResult result; - RTC_DLOG(LS_VERBOSE) << log_prefix_ << "Reset outgoing streams with req_seq_nbr=" << *req->request_sequence_number(); - result = reassembly_queue_->ResetStreams( + last_processed_req_seq_nbr_ = req->request_sequence_number(); + last_processed_req_result_ = reassembly_queue_->ResetStreams( *req, data_tracker_->last_cumulative_acked_tsn()); - if (result == ResponseResult::kSuccessPerformed) { - last_processed_req_seq_nbr_ = req->request_sequence_number(); + if (last_processed_req_result_ == ResponseResult::kSuccessPerformed) { ctx_->callbacks().OnIncomingStreamsReset(req->stream_ids()); } responses.push_back(ReconfigurationResponseParameter( - req->request_sequence_number(), result)); + req->request_sequence_number(), last_processed_req_result_)); } } @@ -270,16 +273,13 @@ absl::optional StreamResetHandler::MakeStreamResetRequest() { // Only send stream resets if there are streams to reset, and no current // ongoing request (there can only be one at a time), and if the stream // can be reset. - if (streams_to_reset_.empty() || current_request_.has_value() || - !retransmission_queue_->CanResetStreams()) { + if (current_request_.has_value() || + !retransmission_queue_->HasStreamsReadyToBeReset()) { return absl::nullopt; } - std::vector streams_to_reset(streams_to_reset_.begin(), - streams_to_reset_.end()); current_request_.emplace(TSN(*retransmission_queue_->next_tsn() - 1), - std::move(streams_to_reset)); - streams_to_reset_.clear(); + retransmission_queue_->GetStreamsReadyToBeReset()); reconfig_timer_->set_duration(ctx_->current_rto()); reconfig_timer_->Start(); return MakeReconfigChunk(); @@ -310,18 +310,8 @@ ReConfigChunk StreamResetHandler::MakeReconfigChunk() { void StreamResetHandler::ResetStreams( rtc::ArrayView outgoing_streams) { - // Enqueue streams to be reset - as this may be called multiple times - // while a request is already in progress (and there can only be one). for (StreamID stream_id : outgoing_streams) { - streams_to_reset_.insert(stream_id); - } - if (current_request_.has_value()) { - // Already an ongoing request - will need to wait for it to finish as - // there can only be one in-flight ReConfig chunk with requests at any - // time. - } else { - retransmission_queue_->PrepareResetStreams(std::vector( - streams_to_reset_.begin(), streams_to_reset_.end())); + retransmission_queue_->PrepareResetStream(stream_id); } } @@ -345,7 +335,7 @@ absl::optional StreamResetHandler::OnReconfigTimerExpiry() { HandoverReadinessStatus StreamResetHandler::GetHandoverReadiness() const { HandoverReadinessStatus status; - if (!streams_to_reset_.empty()) { + if (retransmission_queue_->HasStreamsReadyToBeReset()) { status.Add(HandoverUnreadinessReason::kPendingStreamReset); } if (current_request_.has_value()) { diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/stream_reset_handler.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/stream_reset_handler.h index a691eb8312..fa32e5fcc9 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/stream_reset_handler.h +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/stream_reset_handler.h @@ -88,8 +88,9 @@ class StreamResetHandler { last_processed_req_seq_nbr_( handover_state ? ReconfigRequestSN( handover_state->rx.last_completed_reset_req_sn) - : ReconfigRequestSN(*ctx_->peer_initial_tsn() - 1)) { - } + : ReconfigRequestSN(*ctx_->peer_initial_tsn() - 1)), + last_processed_req_result_( + ReconfigurationResponseParameter::Result::kSuccessNothingToDo) {} // Initiates reset of the provided streams. While there can only be one // ongoing stream reset request at any time, this method can be called at any @@ -216,10 +217,6 @@ class StreamResetHandler { RetransmissionQueue* retransmission_queue_; const std::unique_ptr reconfig_timer_; - // Outgoing streams that have been requested to be reset, but hasn't yet - // been included in an outgoing request. - webrtc::flat_set streams_to_reset_; - // The next sequence number for outgoing stream requests. ReconfigRequestSN next_outgoing_req_seq_nbr_; @@ -228,6 +225,8 @@ class StreamResetHandler { // For incoming requests - last processed request sequence number. ReconfigRequestSN last_processed_req_seq_nbr_; + // The result from last processed incoming request + ReconfigurationResponseParameter::Result last_processed_req_result_; }; } // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/transmission_control_block.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/transmission_control_block.cc index 2e4e968737..1dcf394813 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/transmission_control_block.cc +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/transmission_control_block.cc @@ -37,6 +37,78 @@ namespace dcsctp { +TransmissionControlBlock::TransmissionControlBlock( + TimerManager& timer_manager, + absl::string_view log_prefix, + const DcSctpOptions& options, + const Capabilities& capabilities, + DcSctpSocketCallbacks& callbacks, + SendQueue& send_queue, + VerificationTag my_verification_tag, + TSN my_initial_tsn, + VerificationTag peer_verification_tag, + TSN peer_initial_tsn, + size_t a_rwnd, + TieTag tie_tag, + PacketSender& packet_sender, + std::function is_connection_established) + : log_prefix_(log_prefix), + options_(options), + timer_manager_(timer_manager), + capabilities_(capabilities), + callbacks_(callbacks), + t3_rtx_(timer_manager_.CreateTimer( + "t3-rtx", + absl::bind_front(&TransmissionControlBlock::OnRtxTimerExpiry, this), + TimerOptions(options.rto_initial, + TimerBackoffAlgorithm::kExponential, + /*max_restarts=*/absl::nullopt, + options.max_timer_backoff_duration))), + delayed_ack_timer_(timer_manager_.CreateTimer( + "delayed-ack", + absl::bind_front(&TransmissionControlBlock::OnDelayedAckTimerExpiry, + this), + TimerOptions(options.delayed_ack_max_timeout, + TimerBackoffAlgorithm::kExponential, + /*max_restarts=*/0, + /*max_backoff_duration=*/absl::nullopt, + webrtc::TaskQueueBase::DelayPrecision::kHigh))), + my_verification_tag_(my_verification_tag), + my_initial_tsn_(my_initial_tsn), + peer_verification_tag_(peer_verification_tag), + peer_initial_tsn_(peer_initial_tsn), + tie_tag_(tie_tag), + is_connection_established_(std::move(is_connection_established)), + packet_sender_(packet_sender), + rto_(options), + tx_error_counter_(log_prefix, options), + data_tracker_(log_prefix, delayed_ack_timer_.get(), peer_initial_tsn), + reassembly_queue_(log_prefix, + peer_initial_tsn, + options.max_receiver_window_buffer_size, + capabilities.message_interleaving), + retransmission_queue_( + log_prefix, + &callbacks_, + my_initial_tsn, + a_rwnd, + send_queue, + absl::bind_front(&TransmissionControlBlock::ObserveRTT, this), + [this]() { tx_error_counter_.Clear(); }, + *t3_rtx_, + options, + capabilities.partial_reliability, + capabilities.message_interleaving), + stream_reset_handler_(log_prefix, + this, + &timer_manager, + &data_tracker_, + &reassembly_queue_, + &retransmission_queue_), + heartbeat_handler_(log_prefix, options, this, &timer_manager_) { + send_queue.EnableMessageInterleaving(capabilities.message_interleaving); +} + void TransmissionControlBlock::ObserveRTT(DurationMs rtt) { DurationMs prev_rto = rto_.rto(); rto_.ObserveRTT(rtt); @@ -102,11 +174,35 @@ void TransmissionControlBlock::MaybeSendForwardTsn(SctpPacket::Builder& builder, } } +void TransmissionControlBlock::MaybeSendFastRetransmit() { + if (!retransmission_queue_.has_data_to_be_fast_retransmitted()) { + return; + } + + // https://datatracker.ietf.org/doc/html/rfc4960#section-7.2.4 + // "Determine how many of the earliest (i.e., lowest TSN) DATA chunks marked + // for retransmission will fit into a single packet, subject to constraint of + // the path MTU of the destination transport address to which the packet is + // being sent. Call this value K. Retransmit those K DATA chunks in a single + // packet. When a Fast Retransmit is being performed, the sender SHOULD + // ignore the value of cwnd and SHOULD NOT delay retransmission for this + // single packet." + + SctpPacket::Builder builder(peer_verification_tag_, options_); + auto chunks = retransmission_queue_.GetChunksForFastRetransmit( + builder.bytes_remaining()); + for (auto& [tsn, data] : chunks) { + if (capabilities_.message_interleaving) { + builder.Add(IDataChunk(tsn, std::move(data), false)); + } else { + builder.Add(DataChunk(tsn, std::move(data), false)); + } + } + packet_sender_.Send(builder); +} + void TransmissionControlBlock::SendBufferedPackets(SctpPacket::Builder& builder, TimeMs now) { - // FORWARD-TSNs are sent as separate packets to avoid bugs.webrtc.org/12961. - MaybeSendForwardTsn(builder, now); - for (int packet_idx = 0; packet_idx < options_.max_burst && retransmission_queue_.can_send_data(); ++packet_idx) { @@ -131,6 +227,7 @@ void TransmissionControlBlock::SendBufferedPackets(SctpPacket::Builder& builder, builder.Add(data_tracker_.CreateSelectiveAck( reassembly_queue_.remaining_bytes())); } + MaybeSendForwardTsn(builder, now); absl::optional reconfig = stream_reset_handler_.MakeStreamResetRequest(); if (reconfig.has_value()) { @@ -140,9 +237,7 @@ void TransmissionControlBlock::SendBufferedPackets(SctpPacket::Builder& builder, auto chunks = retransmission_queue_.GetChunksToSend(now, builder.bytes_remaining()); - for (auto& elem : chunks) { - TSN tsn = elem.first; - Data data = std::move(elem.second); + for (auto& [tsn, data] : chunks) { if (capabilities_.message_interleaving) { builder.Add(IDataChunk(tsn, std::move(data), false)); } else { @@ -179,6 +274,8 @@ std::string TransmissionControlBlock::ToString() const { if (capabilities_.reconfig) { sb << "Reconfig,"; } + sb << " max_in=" << capabilities_.negotiated_maximum_incoming_streams; + sb << " max_out=" << capabilities_.negotiated_maximum_outgoing_streams; return sb.Release(); } @@ -197,6 +294,10 @@ void TransmissionControlBlock::AddHandoverState( state.capabilities.partial_reliability = capabilities_.partial_reliability; state.capabilities.message_interleaving = capabilities_.message_interleaving; state.capabilities.reconfig = capabilities_.reconfig; + state.capabilities.negotiated_maximum_incoming_streams = + capabilities_.negotiated_maximum_incoming_streams; + state.capabilities.negotiated_maximum_outgoing_streams = + capabilities_.negotiated_maximum_outgoing_streams; state.my_verification_tag = my_verification_tag().value(); state.peer_verification_tag = peer_verification_tag().value(); @@ -209,4 +310,11 @@ void TransmissionControlBlock::AddHandoverState( reassembly_queue_.AddHandoverState(state); retransmission_queue_.AddHandoverState(state); } + +void TransmissionControlBlock::RestoreFromState( + const DcSctpSocketHandoverState& state) { + data_tracker_.RestoreFromState(state); + retransmission_queue_.RestoreFromState(state); + reassembly_queue_.RestoreFromState(state); +} } // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/transmission_control_block.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/transmission_control_block.h index 6d9dfc5e70..8e0e9a3ec5 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/transmission_control_block.h +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/transmission_control_block.h @@ -19,6 +19,7 @@ #include "absl/functional/bind_front.h" #include "absl/strings/string_view.h" +#include "api/task_queue/task_queue_base.h" #include "net/dcsctp/common/sequence_numbers.h" #include "net/dcsctp/packet/chunk/cookie_echo_chunk.h" #include "net/dcsctp/packet/sctp_packet.h" @@ -44,78 +45,20 @@ namespace dcsctp { // closed or restarted, this object will be deleted and/or replaced. class TransmissionControlBlock : public Context { public: - TransmissionControlBlock( - TimerManager& timer_manager, - absl::string_view log_prefix, - const DcSctpOptions& options, - const Capabilities& capabilities, - DcSctpSocketCallbacks& callbacks, - SendQueue& send_queue, - VerificationTag my_verification_tag, - TSN my_initial_tsn, - VerificationTag peer_verification_tag, - TSN peer_initial_tsn, - size_t a_rwnd, - TieTag tie_tag, - PacketSender& packet_sender, - std::function is_connection_established, - const DcSctpSocketHandoverState* handover_state = nullptr) - : log_prefix_(log_prefix), - options_(options), - timer_manager_(timer_manager), - capabilities_(capabilities), - callbacks_(callbacks), - t3_rtx_(timer_manager_.CreateTimer( - "t3-rtx", - absl::bind_front(&TransmissionControlBlock::OnRtxTimerExpiry, this), - TimerOptions(options.rto_initial, - TimerBackoffAlgorithm::kExponential, - /*max_restarts=*/absl::nullopt, - options.max_timer_backoff_duration))), - delayed_ack_timer_(timer_manager_.CreateTimer( - "delayed-ack", - absl::bind_front(&TransmissionControlBlock::OnDelayedAckTimerExpiry, - this), - TimerOptions(options.delayed_ack_max_timeout, - TimerBackoffAlgorithm::kExponential, - /*max_restarts=*/0))), - my_verification_tag_(my_verification_tag), - my_initial_tsn_(my_initial_tsn), - peer_verification_tag_(peer_verification_tag), - peer_initial_tsn_(peer_initial_tsn), - tie_tag_(tie_tag), - is_connection_established_(std::move(is_connection_established)), - packet_sender_(packet_sender), - rto_(options), - tx_error_counter_(log_prefix, options), - data_tracker_(log_prefix, - delayed_ack_timer_.get(), - peer_initial_tsn, - handover_state), - reassembly_queue_(log_prefix, - peer_initial_tsn, - options.max_receiver_window_buffer_size, - handover_state), - retransmission_queue_( - log_prefix, - my_initial_tsn, - a_rwnd, - send_queue, - absl::bind_front(&TransmissionControlBlock::ObserveRTT, this), - [this]() { tx_error_counter_.Clear(); }, - *t3_rtx_, - options, - capabilities.partial_reliability, - capabilities.message_interleaving, - handover_state), - stream_reset_handler_(log_prefix, - this, - &timer_manager, - &data_tracker_, - &reassembly_queue_, - &retransmission_queue_, - handover_state), - heartbeat_handler_(log_prefix, options, this, &timer_manager_) {} + TransmissionControlBlock(TimerManager& timer_manager, + absl::string_view log_prefix, + const DcSctpOptions& options, + const Capabilities& capabilities, + DcSctpSocketCallbacks& callbacks, + SendQueue& send_queue, + VerificationTag my_verification_tag, + TSN my_initial_tsn, + VerificationTag peer_verification_tag, + TSN peer_initial_tsn, + size_t a_rwnd, + TieTag tie_tag, + PacketSender& packet_sender, + std::function is_connection_established); // Implementation of `Context`. bool is_connection_established() const override { @@ -180,6 +123,8 @@ class TransmissionControlBlock : public Context { bool has_cookie_echo_chunk() const { return cookie_echo_chunk_.has_value(); } + void MaybeSendFastRetransmit(); + // Fills `builder` (which may already be filled with control chunks) with // other control and data chunks, and sends packets as much as can be // allowed by the congestion control algorithm. @@ -199,6 +144,7 @@ class TransmissionControlBlock : public Context { HandoverReadinessStatus GetHandoverReadiness() const; void AddHandoverState(DcSctpSocketHandoverState& state); + void RestoreFromState(const DcSctpSocketHandoverState& handover_state); private: // Will be called when the retransmission timer (t3-rtx) expires. diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/testing/data_generator.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/testing/data_generator.h index 859450b1c3..f917c740a7 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/testing/data_generator.h +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/testing/data_generator.h @@ -38,14 +38,14 @@ class DataGenerator { // "is_end" flag. Data Ordered(std::vector payload, absl::string_view flags = "", - const DataGeneratorOptions opts = {}); + DataGeneratorOptions opts = {}); // Generates unordered "data" with the provided `payload` and flags, which can // contain "B" for setting the "is_beginning" flag, and/or "E" for setting the // "is_end" flag. Data Unordered(std::vector payload, absl::string_view flags = "", - const DataGeneratorOptions opts = {}); + DataGeneratorOptions opts = {}); // Resets the Message ID identifier - simulating a "stream reset". void ResetStream() { message_id_ = MID(0); } diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/fake_timeout.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/fake_timeout.h index e8f50d93cb..74ffe5af29 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/fake_timeout.h +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/fake_timeout.h @@ -18,6 +18,7 @@ #include #include "absl/types/optional.h" +#include "api/task_queue/task_queue_base.h" #include "net/dcsctp/public/timeout.h" #include "rtc_base/checks.h" #include "rtc_base/containers/flat_set.h" @@ -27,8 +28,8 @@ namespace dcsctp { // A timeout used in tests. class FakeTimeout : public Timeout { public: - explicit FakeTimeout(std::function get_time, - std::function on_delete) + FakeTimeout(std::function get_time, + std::function on_delete) : get_time_(std::move(get_time)), on_delete_(std::move(on_delete)) {} ~FakeTimeout() override { on_delete_(this); } @@ -68,12 +69,17 @@ class FakeTimeoutManager { explicit FakeTimeoutManager(std::function get_time) : get_time_(std::move(get_time)) {} - std::unique_ptr CreateTimeout() { + std::unique_ptr CreateTimeout() { auto timer = std::make_unique( get_time_, [this](FakeTimeout* timer) { timers_.erase(timer); }); timers_.insert(timer.get()); return timer; } + std::unique_ptr CreateTimeout( + webrtc::TaskQueueBase::DelayPrecision precision) { + // FakeTimeout does not support implement |precision|. + return CreateTimeout(); + } // NOTE: This can't return a vector, as calling EvaluateHasExpired requires // calling socket->HandleTimeout directly afterwards, as the owning Timer @@ -91,8 +97,6 @@ class FakeTimeoutManager { return absl::nullopt; } - void Reset() { timers_.clear(); } - private: const std::function get_time_; webrtc::flat_set timers_; diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/task_queue_timeout.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/task_queue_timeout.cc index 6d3054eeb8..6c43640d39 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/task_queue_timeout.cc +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/task_queue_timeout.cc @@ -9,15 +9,17 @@ */ #include "net/dcsctp/timer/task_queue_timeout.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/units/time_delta.h" #include "rtc_base/logging.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" -#include "rtc_base/task_utils/to_queued_task.h" namespace dcsctp { TaskQueueTimeoutFactory::TaskQueueTimeout::TaskQueueTimeout( - TaskQueueTimeoutFactory& parent) + TaskQueueTimeoutFactory& parent, + webrtc::TaskQueueBase::DelayPrecision precision) : parent_(parent), + precision_(precision), pending_task_safety_flag_(webrtc::PendingTaskSafetyFlag::Create()) {} TaskQueueTimeoutFactory::TaskQueueTimeout::~TaskQueueTimeout() { @@ -54,8 +56,9 @@ void TaskQueueTimeoutFactory::TaskQueueTimeout::Start(DurationMs duration_ms, } posted_task_expiration_ = timeout_expiration_; - parent_.task_queue_.PostDelayedTask( - webrtc::ToQueuedTask( + parent_.task_queue_.PostDelayedTaskWithPrecision( + precision_, + webrtc::SafeTask( pending_task_safety_flag_, [timeout_id, this]() { RTC_DLOG(LS_VERBOSE) << "Timout expired: " << timeout_id.value(); @@ -83,7 +86,7 @@ void TaskQueueTimeoutFactory::TaskQueueTimeout::Start(DurationMs duration_ms, } } }), - duration_ms.value()); + webrtc::TimeDelta::Millis(duration_ms.value())); } void TaskQueueTimeoutFactory::TaskQueueTimeout::Stop() { diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/task_queue_timeout.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/task_queue_timeout.h index e8d12df592..faae14464f 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/task_queue_timeout.h +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/task_queue_timeout.h @@ -13,9 +13,9 @@ #include #include +#include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_base.h" #include "net/dcsctp/public/timeout.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" namespace dcsctp { @@ -45,14 +45,17 @@ class TaskQueueTimeoutFactory { on_expired_(std::move(on_expired)) {} // Creates an implementation of `Timeout`. - std::unique_ptr CreateTimeout() { - return std::make_unique(*this); + std::unique_ptr CreateTimeout( + webrtc::TaskQueueBase::DelayPrecision precision = + webrtc::TaskQueueBase::DelayPrecision::kLow) { + return std::make_unique(*this, precision); } private: class TaskQueueTimeout : public Timeout { public: - explicit TaskQueueTimeout(TaskQueueTimeoutFactory& parent); + TaskQueueTimeout(TaskQueueTimeoutFactory& parent, + webrtc::TaskQueueBase::DelayPrecision precision); ~TaskQueueTimeout(); void Start(DurationMs duration_ms, TimeoutID timeout_id) override; @@ -60,6 +63,7 @@ class TaskQueueTimeoutFactory { private: TaskQueueTimeoutFactory& parent_; + const webrtc::TaskQueueBase::DelayPrecision precision_; // A safety flag to ensure that posted tasks to the task queue don't // reference these object when they go out of scope. Note that this safety // flag will be re-created if the scheduled-but-not-yet-expired task is not diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/timer.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/timer.cc index deee7db0c5..bde07638a5 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/timer.cc +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/timer.cc @@ -144,7 +144,7 @@ std::unique_ptr TimerManager::CreateTimer(absl::string_view name, // after 800 million reconnections on a single socket. Ensure this will never // happen. RTC_CHECK_NE(*id, std::numeric_limits::max()); - std::unique_ptr timeout = create_timeout_(); + std::unique_ptr timeout = create_timeout_(options.precision); RTC_CHECK(timeout != nullptr); auto timer = absl::WrapUnique(new Timer( id, name, std::move(on_expired), [this, id]() { timers_.erase(id); }, diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/timer.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/timer.h index 49dff34e4c..31b496dc81 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/timer.h +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/timer.h @@ -21,6 +21,7 @@ #include "absl/strings/string_view.h" #include "absl/types/optional.h" +#include "api/task_queue/task_queue_base.h" #include "net/dcsctp/public/timeout.h" #include "rtc_base/strong_alias.h" @@ -52,10 +53,21 @@ struct TimerOptions { TimerBackoffAlgorithm backoff_algorithm, absl::optional max_restarts, absl::optional max_backoff_duration) + : TimerOptions(duration, + backoff_algorithm, + max_restarts, + max_backoff_duration, + webrtc::TaskQueueBase::DelayPrecision::kLow) {} + TimerOptions(DurationMs duration, + TimerBackoffAlgorithm backoff_algorithm, + absl::optional max_restarts, + absl::optional max_backoff_duration, + webrtc::TaskQueueBase::DelayPrecision precision) : duration(duration), backoff_algorithm(backoff_algorithm), max_restarts(max_restarts), - max_backoff_duration(max_backoff_duration) {} + max_backoff_duration(max_backoff_duration), + precision(precision) {} // The initial timer duration. Can be overridden with `set_duration`. const DurationMs duration; @@ -67,6 +79,8 @@ struct TimerOptions { const absl::optional max_restarts; // The maximum timeout value for exponential backoff. const absl::optional max_backoff_duration; + // The precision of the webrtc::TaskQueueBase used for scheduling. + const webrtc::TaskQueueBase::DelayPrecision precision; }; // A high-level timer (in contrast to the low-level `Timeout` class). @@ -172,7 +186,8 @@ class Timer { class TimerManager { public: explicit TimerManager( - std::function()> create_timeout) + std::function( + webrtc::TaskQueueBase::DelayPrecision)> create_timeout) : create_timeout_(std::move(create_timeout)) {} // Creates a timer with name `name` that will expire (when started) after @@ -185,7 +200,9 @@ class TimerManager { void HandleTimeout(TimeoutID timeout_id); private: - const std::function()> create_timeout_; + const std::function( + webrtc::TaskQueueBase::DelayPrecision)> + create_timeout_; std::map timers_; TimerID next_id_ = TimerID(0); }; diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/mock_send_queue.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/mock_send_queue.h index 0cf64583ae..0c8f5d141d 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/mock_send_queue.h +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/mock_send_queue.h @@ -11,6 +11,7 @@ #define NET_DCSCTP_TX_MOCK_SEND_QUEUE_H_ #include +#include #include "absl/types/optional.h" #include "api/array_view.h" @@ -35,11 +36,9 @@ class MockSendQueue : public SendQueue { Discard, (IsUnordered unordered, StreamID stream_id, MID message_id), (override)); - MOCK_METHOD(void, - PrepareResetStreams, - (rtc::ArrayView streams), - (override)); - MOCK_METHOD(bool, CanResetStreams, (), (const, override)); + MOCK_METHOD(void, PrepareResetStream, (StreamID stream_id), (override)); + MOCK_METHOD(bool, HasStreamsReadyToBeReset, (), (const, override)); + MOCK_METHOD(std::vector, GetStreamsReadyToBeReset, (), (override)); MOCK_METHOD(void, CommitResetStreams, (), (override)); MOCK_METHOD(void, RollbackResetStreams, (), (override)); MOCK_METHOD(void, Reset, (), (override)); @@ -53,6 +52,7 @@ class MockSendQueue : public SendQueue { SetBufferedAmountLowThreshold, (StreamID stream_id, size_t bytes), (override)); + MOCK_METHOD(void, EnableMessageInterleaving, (bool enabled), (override)); }; } // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/outstanding_data.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/outstanding_data.cc index dc998def2f..4f1e863056 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/outstanding_data.cc +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/outstanding_data.cc @@ -31,19 +31,21 @@ size_t OutstandingData::GetSerializedChunkSize(const Data& data) const { } void OutstandingData::Item::Ack() { + if (lifecycle_ != Lifecycle::kAbandoned) { + lifecycle_ = Lifecycle::kActive; + } ack_state_ = AckState::kAcked; - should_be_retransmitted_ = false; } OutstandingData::Item::NackAction OutstandingData::Item::Nack( bool retransmit_now) { ack_state_ = AckState::kNacked; ++nack_count_; - if ((retransmit_now || nack_count_ >= kNumberOfNacksForRetransmission) && - !is_abandoned_) { + if (!should_be_retransmitted() && !is_abandoned() && + (retransmit_now || nack_count_ >= kNumberOfNacksForRetransmission)) { // Nacked enough times - it's considered lost. if (num_retransmissions_ < *max_retransmissions_) { - should_be_retransmitted_ = true; + lifecycle_ = Lifecycle::kToBeRetransmitted; return NackAction::kRetransmit; } Abandon(); @@ -52,17 +54,16 @@ OutstandingData::Item::NackAction OutstandingData::Item::Nack( return NackAction::kNothing; } -void OutstandingData::Item::Retransmit() { +void OutstandingData::Item::MarkAsRetransmitted() { + lifecycle_ = Lifecycle::kActive; ack_state_ = AckState::kUnacked; - should_be_retransmitted_ = false; nack_count_ = 0; ++num_retransmissions_; } void OutstandingData::Item::Abandon() { - is_abandoned_ = true; - should_be_retransmitted_ = false; + lifecycle_ = Lifecycle::kAbandoned; } bool OutstandingData::Item::has_expired(TimeMs now) const { @@ -73,15 +74,21 @@ bool OutstandingData::IsConsistent() const { size_t actual_outstanding_bytes = 0; size_t actual_outstanding_items = 0; - std::set actual_to_be_retransmitted; - for (const auto& elem : outstanding_data_) { - if (elem.second.is_outstanding()) { - actual_outstanding_bytes += GetSerializedChunkSize(elem.second.data()); + std::set combined_to_be_retransmitted; + combined_to_be_retransmitted.insert(to_be_retransmitted_.begin(), + to_be_retransmitted_.end()); + combined_to_be_retransmitted.insert(to_be_fast_retransmitted_.begin(), + to_be_fast_retransmitted_.end()); + + std::set actual_combined_to_be_retransmitted; + for (const auto& [tsn, item] : outstanding_data_) { + if (item.is_outstanding()) { + actual_outstanding_bytes += GetSerializedChunkSize(item.data()); ++actual_outstanding_items; } - if (elem.second.should_be_retransmitted()) { - actual_to_be_retransmitted.insert(elem.first); + if (item.should_be_retransmitted()) { + actual_combined_to_be_retransmitted.insert(tsn); } } @@ -92,7 +99,7 @@ bool OutstandingData::IsConsistent() const { return actual_outstanding_bytes == outstanding_bytes_ && actual_outstanding_items == outstanding_items_ && - actual_to_be_retransmitted == to_be_retransmitted_; + actual_combined_to_be_retransmitted == combined_to_be_retransmitted; } void OutstandingData::AckChunk(AckInfo& ack_info, @@ -105,6 +112,8 @@ void OutstandingData::AckChunk(AckInfo& ack_info, --outstanding_items_; } if (iter->second.should_be_retransmitted()) { + RTC_DCHECK(to_be_fast_retransmitted_.find(iter->first) == + to_be_fast_retransmitted_.end()); to_be_retransmitted_.erase(iter->first); } iter->second.Ack(); @@ -116,7 +125,7 @@ void OutstandingData::AckChunk(AckInfo& ack_info, OutstandingData::AckInfo OutstandingData::HandleSack( UnwrappedTSN cumulative_tsn_ack, rtc::ArrayView gap_ack_blocks, - bool is_in_fast_retransmit) { + bool is_in_fast_recovery) { OutstandingData::AckInfo ack_info(cumulative_tsn_ack); // Erase all items up to cumulative_tsn_ack. RemoveAcked(cumulative_tsn_ack, ack_info); @@ -125,8 +134,8 @@ OutstandingData::AckInfo OutstandingData::HandleSack( AckGapBlocks(cumulative_tsn_ack, gap_ack_blocks, ack_info); // NACK and possibly mark for retransmit chunks that weren't acked. - NackBetweenAckBlocks(cumulative_tsn_ack, gap_ack_blocks, - is_in_fast_retransmit, ack_info); + NackBetweenAckBlocks(cumulative_tsn_ack, gap_ack_blocks, is_in_fast_recovery, + ack_info); RTC_DCHECK(IsConsistent()); return ack_info; @@ -138,6 +147,14 @@ void OutstandingData::RemoveAcked(UnwrappedTSN cumulative_tsn_ack, for (auto iter = outstanding_data_.begin(); iter != first_unacked; ++iter) { AckChunk(ack_info, iter); + if (iter->second.lifecycle_id().IsSet()) { + RTC_DCHECK(iter->second.data().is_end); + if (iter->second.is_abandoned()) { + ack_info.abandoned_lifecycle_ids.push_back(iter->second.lifecycle_id()); + } else { + ack_info.acked_lifecycle_ids.push_back(iter->second.lifecycle_id()); + } + } } outstanding_data_.erase(outstanding_data_.begin(), first_unacked); @@ -197,8 +214,9 @@ void OutstandingData::NackBetweenAckBlocks( for (auto iter = outstanding_data_.upper_bound(prev_block_last_acked); iter != outstanding_data_.lower_bound(cur_block_first_acked); ++iter) { if (iter->first <= max_tsn_to_nack) { - ack_info.has_packet_loss = - NackItem(iter->first, iter->second, /*retransmit_now=*/false); + ack_info.has_packet_loss |= + NackItem(iter->first, iter->second, /*retransmit_now=*/false, + /*do_fast_retransmit=*/!is_in_fast_recovery); } } prev_block_last_acked = UnwrappedTSN::AddTo(cumulative_tsn_ack, block.end); @@ -212,7 +230,8 @@ void OutstandingData::NackBetweenAckBlocks( bool OutstandingData::NackItem(UnwrappedTSN tsn, Item& item, - bool retransmit_now) { + bool retransmit_now, + bool do_fast_retransmit) { if (item.is_outstanding()) { outstanding_bytes_ -= GetSerializedChunkSize(item.data()); --outstanding_items_; @@ -222,7 +241,11 @@ bool OutstandingData::NackItem(UnwrappedTSN tsn, case Item::NackAction::kNothing: return false; case Item::NackAction::kRetransmit: - to_be_retransmitted_.insert(tsn); + if (do_fast_retransmit) { + to_be_fast_retransmitted_.insert(tsn); + } else { + to_be_retransmitted_.insert(tsn); + } RTC_DLOG(LS_VERBOSE) << *tsn.Wrap() << " marked for retransmission"; break; case Item::NackAction::kAbandon: @@ -252,9 +275,11 @@ void OutstandingData::AbandonAllFor(const Item& item) { Data::IsEnd(true), item.data().is_unordered); Item& added_item = outstanding_data_ - .emplace(tsn, - Item(std::move(message_end), MaxRetransmits::NoLimit(), - TimeMs(0), TimeMs::InfiniteFuture())) + .emplace(std::piecewise_construct, std::forward_as_tuple(tsn), + std::forward_as_tuple(std::move(message_end), TimeMs(0), + MaxRetransmits::NoLimit(), + TimeMs::InfiniteFuture(), + LifecycleId::NotSet())) .first->second; // The added chunk shouldn't be included in `outstanding_bytes`, so set it // as acked. @@ -263,10 +288,7 @@ void OutstandingData::AbandonAllFor(const Item& item) { << *tsn.Wrap(); } - for (auto& elem : outstanding_data_) { - UnwrappedTSN tsn = elem.first; - Item& other = elem.second; - + for (auto& [tsn, other] : outstanding_data_) { if (!other.is_abandoned() && other.data().stream_id == item.data().stream_id && other.data().is_unordered == item.data().is_unordered && @@ -274,6 +296,7 @@ void OutstandingData::AbandonAllFor(const Item& item) { RTC_DLOG(LS_VERBOSE) << "Marking chunk " << *tsn.Wrap() << " as abandoned"; if (other.should_be_retransmitted()) { + to_be_fast_retransmitted_.erase(tsn); to_be_retransmitted_.erase(tsn); } other.Abandon(); @@ -281,12 +304,12 @@ void OutstandingData::AbandonAllFor(const Item& item) { } } -std::vector> OutstandingData::GetChunksToBeRetransmitted( +std::vector> OutstandingData::ExtractChunksThatCanFit( + std::set& chunks, size_t max_size) { std::vector> result; - for (auto it = to_be_retransmitted_.begin(); - it != to_be_retransmitted_.end();) { + for (auto it = chunks.begin(); it != chunks.end();) { UnwrappedTSN tsn = *it; auto elem = outstanding_data_.find(tsn); RTC_DCHECK(elem != outstanding_data_.end()); @@ -298,12 +321,12 @@ std::vector> OutstandingData::GetChunksToBeRetransmitted( size_t serialized_size = GetSerializedChunkSize(item.data()); if (serialized_size <= max_size) { - item.Retransmit(); + item.MarkAsRetransmitted(); result.emplace_back(tsn.Wrap(), item.data().Clone()); max_size -= serialized_size; outstanding_bytes_ += serialized_size; ++outstanding_items_; - it = to_be_retransmitted_.erase(it); + it = chunks.erase(it); } else { ++it; } @@ -312,16 +335,39 @@ std::vector> OutstandingData::GetChunksToBeRetransmitted( break; } } + return result; +} + +std::vector> +OutstandingData::GetChunksToBeFastRetransmitted(size_t max_size) { + std::vector> result = + ExtractChunksThatCanFit(to_be_fast_retransmitted_, max_size); + + // https://datatracker.ietf.org/doc/html/rfc4960#section-7.2.4 + // "Those TSNs marked for retransmission due to the Fast-Retransmit algorithm + // that did not fit in the sent datagram carrying K other TSNs are also marked + // as ineligible for a subsequent Fast Retransmit. However, as they are + // marked for retransmission they will be retransmitted later on as soon as + // cwnd allows." + if (!to_be_fast_retransmitted_.empty()) { + to_be_retransmitted_.insert(to_be_fast_retransmitted_.begin(), + to_be_fast_retransmitted_.end()); + to_be_fast_retransmitted_.clear(); + } RTC_DCHECK(IsConsistent()); return result; } -void OutstandingData::ExpireOutstandingChunks(TimeMs now) { - for (const auto& elem : outstanding_data_) { - UnwrappedTSN tsn = elem.first; - const Item& item = elem.second; +std::vector> OutstandingData::GetChunksToBeRetransmitted( + size_t max_size) { + // Chunks scheduled for fast retransmission must be sent first. + RTC_DCHECK(to_be_fast_retransmitted_.empty()); + return ExtractChunksThatCanFit(to_be_retransmitted_, max_size); +} +void OutstandingData::ExpireOutstandingChunks(TimeMs now) { + for (const auto& [tsn, item] : outstanding_data_) { // Chunks that are nacked can be expired. Care should be taken not to expire // unacked (in-flight) chunks as they might have been received, but the SACK // is either delayed or in-flight and may be received later. @@ -347,9 +393,10 @@ UnwrappedTSN OutstandingData::highest_outstanding_tsn() const { absl::optional OutstandingData::Insert( const Data& data, - MaxRetransmits max_retransmissions, TimeMs time_sent, - TimeMs expires_at) { + MaxRetransmits max_retransmissions, + TimeMs expires_at, + LifecycleId lifecycle_id) { UnwrappedTSN tsn = next_tsn_; next_tsn_.Increment(); @@ -358,8 +405,10 @@ absl::optional OutstandingData::Insert( outstanding_bytes_ += chunk_size; ++outstanding_items_; auto it = outstanding_data_ - .emplace(tsn, Item(data.Clone(), max_retransmissions, time_sent, - expires_at)) + .emplace(std::piecewise_construct, std::forward_as_tuple(tsn), + std::forward_as_tuple(data.Clone(), time_sent, + max_retransmissions, expires_at, + lifecycle_id)) .first; if (it->second.has_expired(time_sent)) { @@ -378,11 +427,10 @@ absl::optional OutstandingData::Insert( } void OutstandingData::NackAll() { - for (auto& elem : outstanding_data_) { - UnwrappedTSN tsn = elem.first; - Item& item = elem.second; + for (auto& [tsn, item] : outstanding_data_) { if (!item.is_acked()) { - NackItem(tsn, item, /*retransmit_now=*/true); + NackItem(tsn, item, /*retransmit_now=*/true, + /*do_fast_retransmit=*/false); } } RTC_DCHECK(IsConsistent()); @@ -406,21 +454,21 @@ std::vector> OutstandingData::GetChunkStatesForTesting() const { std::vector> states; states.emplace_back(last_cumulative_tsn_ack_.Wrap(), State::kAcked); - for (const auto& elem : outstanding_data_) { + for (const auto& [tsn, item] : outstanding_data_) { State state; - if (elem.second.is_abandoned()) { + if (item.is_abandoned()) { state = State::kAbandoned; - } else if (elem.second.should_be_retransmitted()) { + } else if (item.should_be_retransmitted()) { state = State::kToBeRetransmitted; - } else if (elem.second.is_acked()) { + } else if (item.is_acked()) { state = State::kAcked; - } else if (elem.second.is_outstanding()) { + } else if (item.is_outstanding()) { state = State::kInFlight; } else { state = State::kNacked; } - states.emplace_back(elem.first.Wrap(), state); + states.emplace_back(tsn.Wrap(), state); } return states; } @@ -438,10 +486,7 @@ ForwardTsnChunk OutstandingData::CreateForwardTsn() const { std::map skipped_per_ordered_stream; UnwrappedTSN new_cumulative_ack = last_cumulative_tsn_ack_; - for (const auto& elem : outstanding_data_) { - UnwrappedTSN tsn = elem.first; - const Item& item = elem.second; - + for (const auto& [tsn, item] : outstanding_data_) { if ((tsn != new_cumulative_ack.next_value()) || !item.is_abandoned()) { break; } @@ -454,8 +499,8 @@ ForwardTsnChunk OutstandingData::CreateForwardTsn() const { std::vector skipped_streams; skipped_streams.reserve(skipped_per_ordered_stream.size()); - for (const auto& elem : skipped_per_ordered_stream) { - skipped_streams.emplace_back(elem.first, elem.second); + for (const auto& [stream_id, ssn] : skipped_per_ordered_stream) { + skipped_streams.emplace_back(stream_id, ssn); } return ForwardTsnChunk(new_cumulative_ack.Wrap(), std::move(skipped_streams)); } @@ -464,10 +509,7 @@ IForwardTsnChunk OutstandingData::CreateIForwardTsn() const { std::map, MID> skipped_per_stream; UnwrappedTSN new_cumulative_ack = last_cumulative_tsn_ack_; - for (const auto& elem : outstanding_data_) { - UnwrappedTSN tsn = elem.first; - const Item& item = elem.second; - + for (const auto& [tsn, item] : outstanding_data_) { if ((tsn != new_cumulative_ack.next_value()) || !item.is_abandoned()) { break; } @@ -482,9 +524,7 @@ IForwardTsnChunk OutstandingData::CreateIForwardTsn() const { std::vector skipped_streams; skipped_streams.reserve(skipped_per_stream.size()); - for (const auto& elem : skipped_per_stream) { - const std::pair& stream = elem.first; - MID message_id = elem.second; + for (const auto& [stream, message_id] : skipped_per_stream) { skipped_streams.emplace_back(stream.first, stream.second, message_id); } @@ -492,4 +532,12 @@ IForwardTsnChunk OutstandingData::CreateIForwardTsn() const { std::move(skipped_streams)); } +void OutstandingData::ResetSequenceNumbers(UnwrappedTSN next_tsn, + UnwrappedTSN last_cumulative_tsn) { + RTC_DCHECK(outstanding_data_.empty()); + RTC_DCHECK(next_tsn_ == last_cumulative_tsn_ack_.next_value()); + RTC_DCHECK(next_tsn == last_cumulative_tsn.next_value()); + next_tsn_ = next_tsn; + last_cumulative_tsn_ack_ = last_cumulative_tsn; +} } // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/outstanding_data.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/outstanding_data.h index dc9aab7f96..6b4b7121fb 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/outstanding_data.h +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/outstanding_data.h @@ -21,6 +21,7 @@ #include "net/dcsctp/packet/chunk/iforward_tsn_chunk.h" #include "net/dcsctp/packet/chunk/sack_chunk.h" #include "net/dcsctp/packet/data.h" +#include "net/dcsctp/public/types.h" namespace dcsctp { @@ -62,6 +63,11 @@ class OutstandingData { // Highest TSN Newly Acknowledged, an SCTP variable. UnwrappedTSN highest_tsn_acked; + + // The set of lifecycle IDs that were acked using cumulative_tsn_ack. + std::vector acked_lifecycle_ids; + // The set of lifecycle IDs that were acked, but had been abandoned. + std::vector abandoned_lifecycle_ids; }; OutstandingData( @@ -77,7 +83,14 @@ class OutstandingData { AckInfo HandleSack( UnwrappedTSN cumulative_tsn_ack, rtc::ArrayView gap_ack_blocks, - bool is_in_fast_retransmit); + bool is_in_fast_recovery); + + // Returns as many of the chunks that are eligible for fast retransmissions + // and that would fit in a single packet of `max_size`. The eligible chunks + // that didn't fit will be marked for (normal) retransmission and will not be + // returned if this method is called again. + std::vector> GetChunksToBeFastRetransmitted( + size_t max_size); // Given `max_size` of space left in a packet, which chunks can be added to // it? @@ -94,8 +107,12 @@ class OutstandingData { bool empty() const { return outstanding_data_.empty(); } + bool has_data_to_be_fast_retransmitted() const { + return !to_be_fast_retransmitted_.empty(); + } + bool has_data_to_be_retransmitted() const { - return !to_be_retransmitted_.empty(); + return !to_be_retransmitted_.empty() || !to_be_fast_retransmitted_.empty(); } UnwrappedTSN last_cumulative_tsn_ack() const { @@ -109,10 +126,12 @@ class OutstandingData { // Schedules `data` to be sent, with the provided partial reliability // parameters. Returns the TSN if the item was actually added and scheduled to // be sent, and absl::nullopt if it shouldn't be sent. - absl::optional Insert(const Data& data, - MaxRetransmits max_retransmissions, - TimeMs time_sent, - TimeMs expires_at); + absl::optional Insert( + const Data& data, + TimeMs time_sent, + MaxRetransmits max_retransmissions = MaxRetransmits::NoLimit(), + TimeMs expires_at = TimeMs::InfiniteFuture(), + LifecycleId lifecycle_id = LifecycleId::NotSet()); // Nacks all outstanding data. void NackAll(); @@ -136,6 +155,10 @@ class OutstandingData { // abandoned, which means that a FORWARD-TSN should be sent. bool ShouldSendForwardTsn() const; + // Sets the next TSN to be used. This is used in handover. + void ResetSequenceNumbers(UnwrappedTSN next_tsn, + UnwrappedTSN last_cumulative_tsn); + private: // A fragmented message's DATA chunk while in the retransmission queue, and // its associated metadata. @@ -147,15 +170,20 @@ class OutstandingData { kAbandon, }; - explicit Item(Data data, - MaxRetransmits max_retransmissions, - TimeMs time_sent, - TimeMs expires_at) - : max_retransmissions_(max_retransmissions), - time_sent_(time_sent), + Item(Data data, + TimeMs time_sent, + MaxRetransmits max_retransmissions, + TimeMs expires_at, + LifecycleId lifecycle_id) + : time_sent_(time_sent), + max_retransmissions_(max_retransmissions), expires_at_(expires_at), + lifecycle_id_(lifecycle_id), data_(std::move(data)) {} + Item(const Item&) = delete; + Item& operator=(const Item&) = delete; + TimeMs time_sent() const { return time_sent_; } const Data& data() const { return data_; } @@ -167,11 +195,11 @@ class OutstandingData { // is set, it might be marked for retransmission. If the item has reached // its max retransmission value, it will instead be abandoned. The action // performed is indicated as return value. - NackAction Nack(bool retransmit_now = false); + NackAction Nack(bool retransmit_now); // Prepares the item to be retransmitted. Sets it as outstanding and // clears all nack counters. - void Retransmit(); + void MarkAsRetransmitted(); // Marks this item as abandoned. void Abandon(); @@ -179,10 +207,12 @@ class OutstandingData { bool is_outstanding() const { return ack_state_ == AckState::kUnacked; } bool is_acked() const { return ack_state_ == AckState::kAcked; } bool is_nacked() const { return ack_state_ == AckState::kNacked; } - bool is_abandoned() const { return is_abandoned_; } + bool is_abandoned() const { return lifecycle_ == Lifecycle::kAbandoned; } // Indicates if this chunk should be retransmitted. - bool should_be_retransmitted() const { return should_be_retransmitted_; } + bool should_be_retransmitted() const { + return lifecycle_ == Lifecycle::kToBeRetransmitted; + } // Indicates if this chunk has ever been retransmitted. bool has_been_retransmitted() const { return num_retransmissions_ > 0; } @@ -190,36 +220,58 @@ class OutstandingData { // indicate if it has expired (SCTP Partial Reliability Extension). bool has_expired(TimeMs now) const; + LifecycleId lifecycle_id() const { return lifecycle_id_; } + private: - enum class AckState { + enum class Lifecycle : uint8_t { + // The chunk is alive (sent, received, etc) + kActive, + // The chunk is scheduled to be retransmitted, and will then transition to + // become active. + kToBeRetransmitted, + // The chunk has been abandoned. This is a terminal state. + kAbandoned + }; + enum class AckState : uint8_t { + // The chunk is in-flight. kUnacked, + // The chunk has been received and acknowledged. kAcked, - kNacked, + // The chunk has been nacked and is possibly lost. + kNacked }; + + // NOTE: This data structure has been optimized for size, by ordering fields + // to avoid unnecessary padding. + + // When the packet was sent, and placed in this queue. + const TimeMs time_sent_; + // If the message was sent with a maximum number of retransmissions, this is + // set to that number. The value zero (0) means that it will never be + // retransmitted. + const MaxRetransmits max_retransmissions_; + + // Indicates the life cycle status of this chunk. + Lifecycle lifecycle_ = Lifecycle::kActive; // Indicates the presence of this chunk, if it's in flight (Unacked), has - // been received (Acked) or is lost (Nacked). + // been received (Acked) or is possibly lost (Nacked). AckState ack_state_ = AckState::kUnacked; - // Indicates if this chunk has been abandoned, which is a terminal state. - bool is_abandoned_ = false; - // Indicates if this chunk should be retransmitted. - bool should_be_retransmitted_ = false; // The number of times the DATA chunk has been nacked (by having received a // SACK which doesn't include it). Will be cleared on retransmissions. uint8_t nack_count_ = 0; // The number of times the DATA chunk has been retransmitted. uint16_t num_retransmissions_ = 0; - // If the message was sent with a maximum number of retransmissions, this is - // set to that number. The value zero (0) means that it will never be - // retransmitted. - const MaxRetransmits max_retransmissions_; - // When the packet was sent, and placed in this queue. - const TimeMs time_sent_; + // At this exact millisecond, the item is considered expired. If the message // is not to be expired, this is set to the infinite future. const TimeMs expires_at_; + + // An optional lifecycle id, which may only be set for the last fragment. + const LifecycleId lifecycle_id_; + // The actual data to send/retransmit. - Data data_; + const Data data_; }; // Returns how large a chunk will be, serialized, carrying the data @@ -246,21 +298,32 @@ class OutstandingData { bool is_in_fast_recovery, OutstandingData::AckInfo& ack_info); - // Acks the chunk referenced by `iter` and updates state in `ack_info` and the - // object's state. + // Process the acknowledgement of the chunk referenced by `iter` and updates + // state in `ack_info` and the object's state. void AckChunk(AckInfo& ack_info, std::map::iterator iter); - // Helper method to nack an item and perform the correct operations given the - // action indicated when nacking an item (e.g. retransmitting or abandoning). - // The return value indicate if an action was performed, meaning that packet - // loss was detected and acted upon. - bool NackItem(UnwrappedTSN tsn, Item& item, bool retransmit_now); + // Helper method to process an incoming nack of an item and perform the + // correct operations given the action indicated when nacking an item (e.g. + // retransmitting or abandoning). The return value indicate if an action was + // performed, meaning that packet loss was detected and acted upon. If + // `do_fast_retransmit` is set and if the item has been nacked sufficiently + // many times so that it should be retransmitted, this will schedule it to be + // "fast retransmitted". This is only done just before going into fast + // recovery. + bool NackItem(UnwrappedTSN tsn, + Item& item, + bool retransmit_now, + bool do_fast_retransmit); // Given that a message fragment, `item` has been abandoned, abandon all other // fragments that share the same message - both never-before-sent fragments // that are still in the SendQueue and outstanding chunks. void AbandonAllFor(const OutstandingData::Item& item); + std::vector> ExtractChunksThatCanFit( + std::set& chunks, + size_t max_size); + bool IsConsistent() const; // The size of the data chunk (DATA/I-DATA) header that is used. @@ -278,6 +341,8 @@ class OutstandingData { // The number of DATA chunks that are in-flight (sent but not yet acked or // nacked). size_t outstanding_items_ = 0; + // Data chunks that are eligible for fast retransmission. + std::set to_be_fast_retransmitted_; // Data chunks that are to be retransmitted. std::set to_be_retransmitted_; }; diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/retransmission_queue.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/retransmission_queue.cc index d980710826..36e2a859ba 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/retransmission_queue.cc +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/retransmission_queue.cc @@ -51,6 +51,7 @@ constexpr float kMinBytesRequiredToSendFactor = 0.9; RetransmissionQueue::RetransmissionQueue( absl::string_view log_prefix, + DcSctpSocketCallbacks* callbacks, TSN my_initial_tsn, size_t a_rwnd, SendQueue& send_queue, @@ -59,9 +60,9 @@ RetransmissionQueue::RetransmissionQueue( Timer& t3_rtx, const DcSctpOptions& options, bool supports_partial_reliability, - bool use_message_interleaving, - const DcSctpSocketHandoverState* handover_state) - : options_(options), + bool use_message_interleaving) + : callbacks_(*callbacks), + options_(options), min_bytes_required_to_send_(options.mtu * kMinBytesRequiredToSendFactor), partial_reliability_(supports_partial_reliability), log_prefix_(std::string(log_prefix) + "tx: "), @@ -72,25 +73,19 @@ RetransmissionQueue::RetransmissionQueue( on_clear_retransmission_counter_( std::move(on_clear_retransmission_counter)), t3_rtx_(t3_rtx), - cwnd_(handover_state ? handover_state->tx.cwnd - : options_.cwnd_mtus_initial * options_.mtu), - rwnd_(handover_state ? handover_state->tx.rwnd : a_rwnd), + cwnd_(options_.cwnd_mtus_initial * options_.mtu), + rwnd_(a_rwnd), // https://tools.ietf.org/html/rfc4960#section-7.2.1 // "The initial value of ssthresh MAY be arbitrarily high (for // example, implementations MAY use the size of the receiver advertised // window)."" - ssthresh_(handover_state ? handover_state->tx.ssthresh : rwnd_), - partial_bytes_acked_( - handover_state ? handover_state->tx.partial_bytes_acked : 0), + ssthresh_(rwnd_), + partial_bytes_acked_(0), send_queue_(send_queue), outstanding_data_( data_chunk_header_size_, - tsn_unwrapper_.Unwrap(handover_state - ? TSN(handover_state->tx.next_tsn) - : my_initial_tsn), - tsn_unwrapper_.Unwrap(handover_state - ? TSN(handover_state->tx.next_tsn - 1) - : TSN(*my_initial_tsn - 1)), + tsn_unwrapper_.Unwrap(my_initial_tsn), + tsn_unwrapper_.Unwrap(TSN(*my_initial_tsn - 1)), [this](IsUnordered unordered, StreamID stream_id, MID message_id) { return send_queue_.Discard(unordered, stream_id, message_id); }) {} @@ -278,8 +273,27 @@ bool RetransmissionQueue::HandleSack(TimeMs now, const SackChunk& sack) { UpdateRTT(now, cumulative_tsn_ack); } + // Exit fast recovery before continuing processing, in case it needs to go + // into fast recovery again due to new reported packet loss. + MaybeExitFastRecovery(cumulative_tsn_ack); + OutstandingData::AckInfo ack_info = outstanding_data_.HandleSack( - cumulative_tsn_ack, sack.gap_ack_blocks(), is_in_fast_retransmit_); + cumulative_tsn_ack, sack.gap_ack_blocks(), is_in_fast_recovery()); + + // Add lifecycle events for delivered messages. + for (LifecycleId lifecycle_id : ack_info.acked_lifecycle_ids) { + RTC_DLOG(LS_VERBOSE) << "Triggering OnLifecycleMessageDelivered(" + << lifecycle_id.value() << ")"; + callbacks_.OnLifecycleMessageDelivered(lifecycle_id); + callbacks_.OnLifecycleEnd(lifecycle_id); + } + for (LifecycleId lifecycle_id : ack_info.abandoned_lifecycle_ids) { + RTC_DLOG(LS_VERBOSE) << "Triggering OnLifecycleMessageExpired(" + << lifecycle_id.value() << ", true)"; + callbacks_.OnLifecycleMessageExpired(lifecycle_id, + /*maybe_delivered=*/true); + callbacks_.OnLifecycleEnd(lifecycle_id); + } // Update of outstanding_data_ is now done. Congestion control remains. UpdateReceiverWindow(sack.a_rwnd()); @@ -292,8 +306,6 @@ bool RetransmissionQueue::HandleSack(TimeMs now, const SackChunk& sack) { << old_outstanding_bytes << "), rwnd=" << rwnd_ << " (" << old_rwnd << ")"; - MaybeExitFastRecovery(cumulative_tsn_ack); - if (cumulative_tsn_ack > old_last_cumulative_tsn_ack) { // https://tools.ietf.org/html/rfc4960#section-6.3.2 // "Whenever a SACK is received that acknowledges the DATA chunk @@ -308,7 +320,6 @@ bool RetransmissionQueue::HandleSack(TimeMs now, const SackChunk& sack) { } if (ack_info.has_packet_loss) { - is_in_fast_retransmit_ = true; HandlePacketLoss(ack_info.highest_tsn_acked); } @@ -386,6 +397,54 @@ void RetransmissionQueue::HandleT3RtxTimerExpiry() { RTC_DCHECK(IsConsistent()); } +std::vector> +RetransmissionQueue::GetChunksForFastRetransmit(size_t bytes_in_packet) { + RTC_DCHECK(outstanding_data_.has_data_to_be_fast_retransmitted()); + RTC_DCHECK(IsDivisibleBy4(bytes_in_packet)); + std::vector> to_be_sent; + size_t old_outstanding_bytes = outstanding_bytes(); + + to_be_sent = + outstanding_data_.GetChunksToBeFastRetransmitted(bytes_in_packet); + RTC_DCHECK(!to_be_sent.empty()); + + // https://tools.ietf.org/html/rfc4960#section-7.2.4 + // "4) Restart the T3-rtx timer only if ... the endpoint is retransmitting + // the first outstanding DATA chunk sent to that address." + if (to_be_sent[0].first == + outstanding_data_.last_cumulative_tsn_ack().next_value().Wrap()) { + RTC_DLOG(LS_VERBOSE) + << log_prefix_ + << "First outstanding DATA to be retransmitted - restarting T3-RTX"; + t3_rtx_.Stop(); + } + + // https://tools.ietf.org/html/rfc4960#section-6.3.2 + // "Every time a DATA chunk is sent to any address (including a + // retransmission), if the T3-rtx timer of that address is not running, + // start it running so that it will expire after the RTO of that address." + if (!t3_rtx_.is_running()) { + t3_rtx_.Start(); + } + RTC_DLOG(LS_VERBOSE) << log_prefix_ << "Fast-retransmitting TSN " + << StrJoin(to_be_sent, ",", + [&](rtc::StringBuilder& sb, + const std::pair& c) { + sb << *c.first; + }) + << " - " + << absl::c_accumulate( + to_be_sent, 0, + [&](size_t r, const std::pair& d) { + return r + GetSerializedChunkSize(d.second); + }) + << " bytes. outstanding_bytes=" << outstanding_bytes() + << " (" << old_outstanding_bytes << ")"; + + RTC_DCHECK(IsConsistent()); + return to_be_sent; +} + std::vector> RetransmissionQueue::GetChunksToSend( TimeMs now, size_t bytes_remaining_in_packet) { @@ -395,60 +454,45 @@ std::vector> RetransmissionQueue::GetChunksToSend( std::vector> to_be_sent; size_t old_outstanding_bytes = outstanding_bytes(); size_t old_rwnd = rwnd_; - if (is_in_fast_retransmit_) { - // https://tools.ietf.org/html/rfc4960#section-7.2.4 - // "Determine how many of the earliest (i.e., lowest TSN) DATA chunks - // marked for retransmission will fit into a single packet ... Retransmit - // those K DATA chunks in a single packet. When a Fast Retransmit is being - // performed, the sender SHOULD ignore the value of cwnd and SHOULD NOT - // delay retransmission for this single packet." - is_in_fast_retransmit_ = false; - to_be_sent = - outstanding_data_.GetChunksToBeRetransmitted(bytes_remaining_in_packet); - size_t to_be_sent_bytes = absl::c_accumulate( - to_be_sent, 0, [&](size_t r, const std::pair& d) { - return r + GetSerializedChunkSize(d.second); - }); - RTC_DLOG(LS_VERBOSE) << log_prefix_ << "fast-retransmit: sending " - << to_be_sent.size() << " chunks, " << to_be_sent_bytes - << " bytes"; - } else { - // Normal sending. Calculate the bandwidth budget (how many bytes that is - // allowed to be sent), and fill that up first with chunks that are - // scheduled to be retransmitted. If there is still budget, send new chunks - // (which will have their TSN assigned here.) - size_t max_bytes = - RoundDownTo4(std::min(max_bytes_to_send(), bytes_remaining_in_packet)); - - to_be_sent = outstanding_data_.GetChunksToBeRetransmitted(max_bytes); - max_bytes -= absl::c_accumulate( - to_be_sent, 0, [&](size_t r, const std::pair& d) { - return r + GetSerializedChunkSize(d.second); - }); - - while (max_bytes > data_chunk_header_size_) { - RTC_DCHECK(IsDivisibleBy4(max_bytes)); - absl::optional chunk_opt = - send_queue_.Produce(now, max_bytes - data_chunk_header_size_); - if (!chunk_opt.has_value()) { - break; - } - - size_t chunk_size = GetSerializedChunkSize(chunk_opt->data); - max_bytes -= chunk_size; - rwnd_ -= chunk_size; - absl::optional tsn = outstanding_data_.Insert( - chunk_opt->data, - partial_reliability_ ? chunk_opt->max_retransmissions - : MaxRetransmits::NoLimit(), - now, - partial_reliability_ ? chunk_opt->expires_at - : TimeMs::InfiniteFuture()); + // Calculate the bandwidth budget (how many bytes that is + // allowed to be sent), and fill that up first with chunks that are + // scheduled to be retransmitted. If there is still budget, send new chunks + // (which will have their TSN assigned here.) + size_t max_bytes = + RoundDownTo4(std::min(max_bytes_to_send(), bytes_remaining_in_packet)); + + to_be_sent = outstanding_data_.GetChunksToBeRetransmitted(max_bytes); + max_bytes -= absl::c_accumulate(to_be_sent, 0, + [&](size_t r, const std::pair& d) { + return r + GetSerializedChunkSize(d.second); + }); + + while (max_bytes > data_chunk_header_size_) { + RTC_DCHECK(IsDivisibleBy4(max_bytes)); + absl::optional chunk_opt = + send_queue_.Produce(now, max_bytes - data_chunk_header_size_); + if (!chunk_opt.has_value()) { + break; + } - if (tsn.has_value()) { - to_be_sent.emplace_back(tsn->Wrap(), std::move(chunk_opt->data)); + size_t chunk_size = GetSerializedChunkSize(chunk_opt->data); + max_bytes -= chunk_size; + rwnd_ -= chunk_size; + + absl::optional tsn = outstanding_data_.Insert( + chunk_opt->data, now, + partial_reliability_ ? chunk_opt->max_retransmissions + : MaxRetransmits::NoLimit(), + partial_reliability_ ? chunk_opt->expires_at : TimeMs::InfiniteFuture(), + chunk_opt->lifecycle_id); + + if (tsn.has_value()) { + if (chunk_opt->lifecycle_id.IsSet()) { + RTC_DCHECK(chunk_opt->data.is_end); + callbacks_.OnLifecycleMessageFullySent(chunk_opt->lifecycle_id); } + to_be_sent.emplace_back(tsn->Wrap(), std::move(chunk_opt->data)); } } @@ -509,16 +553,15 @@ size_t RetransmissionQueue::max_bytes_to_send() const { return std::min(rwnd(), left); } -void RetransmissionQueue::PrepareResetStreams( - rtc::ArrayView streams) { +void RetransmissionQueue::PrepareResetStream(StreamID stream_id) { // TODO(boivie): These calls are now only affecting the send queue. The // packet buffer can also change behavior - for example draining the chunk // producer and eagerly assign TSNs so that an "Outgoing SSN Reset Request" // can be sent quickly, with a known `sender_last_assigned_tsn`. - send_queue_.PrepareResetStreams(streams); + send_queue_.PrepareResetStream(stream_id); } -bool RetransmissionQueue::CanResetStreams() const { - return send_queue_.CanResetStreams(); +bool RetransmissionQueue::HasStreamsReadyToBeReset() const { + return send_queue_.HasStreamsReadyToBeReset(); } void RetransmissionQueue::CommitResetStreams() { send_queue_.CommitResetStreams(); @@ -548,4 +591,21 @@ void RetransmissionQueue::AddHandoverState(DcSctpSocketHandoverState& state) { state.tx.ssthresh = ssthresh_; state.tx.partial_bytes_acked = partial_bytes_acked_; } + +void RetransmissionQueue::RestoreFromState( + const DcSctpSocketHandoverState& state) { + // Validate that the component is in pristine state. + RTC_DCHECK(outstanding_data_.empty()); + RTC_DCHECK(!t3_rtx_.is_running()); + RTC_DCHECK(partial_bytes_acked_ == 0); + + cwnd_ = state.tx.cwnd; + rwnd_ = state.tx.rwnd; + ssthresh_ = state.tx.ssthresh; + partial_bytes_acked_ = state.tx.partial_bytes_acked; + + outstanding_data_.ResetSequenceNumbers( + tsn_unwrapper_.Unwrap(TSN(state.tx.next_tsn)), + tsn_unwrapper_.Unwrap(TSN(state.tx.next_tsn - 1))); +} } // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/retransmission_queue.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/retransmission_queue.h index 08f11db744..830c0b346d 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/retransmission_queue.h +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/retransmission_queue.h @@ -28,6 +28,7 @@ #include "net/dcsctp/packet/data.h" #include "net/dcsctp/public/dcsctp_handover_state.h" #include "net/dcsctp/public/dcsctp_options.h" +#include "net/dcsctp/public/dcsctp_socket.h" #include "net/dcsctp/timer/timer.h" #include "net/dcsctp/tx/outstanding_data.h" #include "net/dcsctp/tx/retransmission_timeout.h" @@ -54,18 +55,17 @@ class RetransmissionQueue { // outstanding chunk has been ACKed, it will call // `on_clear_retransmission_counter` and will also use `t3_rtx`, which is the // SCTP retransmission timer to manage retransmissions. - RetransmissionQueue( - absl::string_view log_prefix, - TSN my_initial_tsn, - size_t a_rwnd, - SendQueue& send_queue, - std::function on_new_rtt, - std::function on_clear_retransmission_counter, - Timer& t3_rtx, - const DcSctpOptions& options, - bool supports_partial_reliability = true, - bool use_message_interleaving = false, - const DcSctpSocketHandoverState* handover_state = nullptr); + RetransmissionQueue(absl::string_view log_prefix, + DcSctpSocketCallbacks* callbacks, + TSN my_initial_tsn, + size_t a_rwnd, + SendQueue& send_queue, + std::function on_new_rtt, + std::function on_clear_retransmission_counter, + Timer& t3_rtx, + const DcSctpOptions& options, + bool supports_partial_reliability = true, + bool use_message_interleaving = false); // Handles a received SACK. Returns true if the `sack` was processed and // false if it was discarded due to received out-of-order and not relevant. @@ -74,6 +74,16 @@ class RetransmissionQueue { // Handles an expired retransmission timer. void HandleT3RtxTimerExpiry(); + bool has_data_to_be_fast_retransmitted() const { + return outstanding_data_.has_data_to_be_fast_retransmitted(); + } + + // Returns a list of chunks to "fast retransmit" that would fit in one SCTP + // packet with `bytes_in_packet` bytes available. The current value + // of `cwnd` is ignored. + std::vector> GetChunksForFastRetransmit( + size_t bytes_in_packet); + // Returns a list of chunks to send that would fit in one SCTP packet with // `bytes_remaining_in_packet` bytes available. This may be further limited by // the congestion control windows. Note that `ShouldSendForwardTSN` must be @@ -133,14 +143,18 @@ class RetransmissionQueue { // See the SendQueue for a longer description of these methods related // to stream resetting. - void PrepareResetStreams(rtc::ArrayView streams); - bool CanResetStreams() const; + void PrepareResetStream(StreamID stream_id); + bool HasStreamsReadyToBeReset() const; + std::vector GetStreamsReadyToBeReset() const { + return send_queue_.GetStreamsReadyToBeReset(); + } void CommitResetStreams(); void RollbackResetStreams(); HandoverReadinessStatus GetHandoverReadiness() const; void AddHandoverState(DcSctpSocketHandoverState& state); + void RestoreFromState(const DcSctpSocketHandoverState& state); private: enum class CongestionAlgorithmPhase { @@ -200,6 +214,7 @@ class RetransmissionQueue { // to the congestion control algorithm. size_t max_bytes_to_send() const; + DcSctpSocketCallbacks& callbacks_; const DcSctpOptions options_; // The minimum bytes required to be available in the congestion window to // allow packets to be sent - to avoid sending too small packets. @@ -229,8 +244,6 @@ class RetransmissionQueue { // If set, fast recovery is enabled until this TSN has been cumulative // acked. absl::optional fast_recovery_exit_tsn_ = absl::nullopt; - // Indicates if the congestion algorithm is in fast retransmit. - bool is_in_fast_retransmit_ = false; // The send queue. SendQueue& send_queue_; diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/retransmission_timeout.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/retransmission_timeout.cc index aa2863f931..7d8fb9761c 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/retransmission_timeout.cc +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/retransmission_timeout.cc @@ -21,6 +21,7 @@ RetransmissionTimeout::RetransmissionTimeout(const DcSctpOptions& options) max_rto_(*options.rto_max), max_rtt_(*options.rtt_max), min_rtt_variance_(*options.min_rtt_variance), + scaled_srtt_(*options.rto_initial << kRttShift), rto_(*options.rto_initial) {} void RetransmissionTimeout::ObserveRTT(DurationMs measured_rtt) { diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/retransmission_timeout.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/retransmission_timeout.h index 7cbcc6fcc9..01530cb3b5 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/retransmission_timeout.h +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/retransmission_timeout.h @@ -48,7 +48,7 @@ class RetransmissionTimeout { // If this is the first measurement bool first_measurement_ = true; // Smoothed Round-Trip Time, shifted by kRttShift - int32_t scaled_srtt_ = 0; + int32_t scaled_srtt_; // Round-Trip Time Variation, shifted by kRttVarShift int32_t scaled_rtt_var_ = 0; // Retransmission Timeout diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/rr_send_queue.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/rr_send_queue.cc index 21744cc0a0..b1812f0f8a 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/rr_send_queue.cc +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/rr_send_queue.cc @@ -13,12 +13,14 @@ #include #include #include +#include #include #include #include "absl/algorithm/container.h" #include "absl/types/optional.h" #include "api/array_view.h" +#include "net/dcsctp/common/str_join.h" #include "net/dcsctp/packet/data.h" #include "net/dcsctp/public/dcsctp_message.h" #include "net/dcsctp/public/dcsctp_socket.h" @@ -29,42 +31,33 @@ namespace dcsctp { RRSendQueue::RRSendQueue(absl::string_view log_prefix, + DcSctpSocketCallbacks* callbacks, size_t buffer_size, - std::function on_buffered_amount_low, - size_t total_buffered_amount_low_threshold, - std::function on_total_buffered_amount_low, - const DcSctpSocketHandoverState* handover_state) + size_t mtu, + StreamPriority default_priority, + size_t total_buffered_amount_low_threshold) : log_prefix_(std::string(log_prefix) + "fcfs: "), + callbacks_(*callbacks), buffer_size_(buffer_size), - on_buffered_amount_low_(std::move(on_buffered_amount_low)), - total_buffered_amount_(std::move(on_total_buffered_amount_low)) { + default_priority_(default_priority), + scheduler_(mtu), + total_buffered_amount_( + [this]() { callbacks_.OnTotalBufferedAmountLow(); }) { total_buffered_amount_.SetLowThreshold(total_buffered_amount_low_threshold); } -bool RRSendQueue::OutgoingStream::HasDataToSend(TimeMs now) { - while (!items_.empty()) { - RRSendQueue::OutgoingStream::Item& item = items_.front(); - if (item.message_id.has_value()) { - // Already partially sent messages can always continue to be sent. - return true; - } - - // Message has expired. Remove it and inspect the next one. - if (item.expires_at <= now) { - buffered_amount_.Decrease(item.remaining_size); - total_buffered_amount_.Decrease(item.remaining_size); - items_.pop_front(); - RTC_DCHECK(IsConsistent()); - continue; - } +size_t RRSendQueue::OutgoingStream::bytes_to_send_in_next_message() const { + if (pause_state_ == PauseState::kPaused || + pause_state_ == PauseState::kResetting) { + // The stream has paused (and there is no partially sent message). + return 0; + } - if (is_paused_) { - // The stream has paused (and there is no partially sent message). - return false; - } - return true; + if (items_.empty()) { + return 0; } - return false; + + return items_.front().remaining_size; } void RRSendQueue::OutgoingStream::AddHandoverState( @@ -72,29 +65,29 @@ void RRSendQueue::OutgoingStream::AddHandoverState( state.next_ssn = next_ssn_.value(); state.next_ordered_mid = next_ordered_mid_.value(); state.next_unordered_mid = next_unordered_mid_.value(); + state.priority = *scheduler_stream_->priority(); } bool RRSendQueue::IsConsistent() const { - size_t total_buffered_amount = 0; - for (const auto& stream_entry : streams_) { - total_buffered_amount += stream_entry.second.buffered_amount().value(); - } + std::set expected_active_streams; + std::set actual_active_streams = + scheduler_.ActiveStreamsForTesting(); - if (previous_message_has_ended_) { - auto it = streams_.find(current_stream_id_); - if (it != streams_.end() && it->second.has_partially_sent_message()) { - RTC_DLOG(LS_ERROR) - << "Previous message has ended, but still partial message in stream"; - return false; - } - } else { - auto it = streams_.find(current_stream_id_); - if (it == streams_.end() || !it->second.has_partially_sent_message()) { - RTC_DLOG(LS_ERROR) - << "Previous message has NOT ended, but there is no partial message"; - return false; + size_t total_buffered_amount = 0; + for (const auto& [stream_id, stream] : streams_) { + total_buffered_amount += stream.buffered_amount().value(); + if (stream.bytes_to_send_in_next_message() > 0) { + expected_active_streams.emplace(stream_id); } } + if (expected_active_streams != actual_active_streams) { + auto fn = [&](rtc::StringBuilder& sb, const auto& p) { sb << *p; }; + RTC_DLOG(LS_ERROR) << "Active streams mismatch, is=[" + << StrJoin(actual_active_streams, ",", fn) + << "], expected=[" + << StrJoin(expected_active_streams, ",", fn) << "]"; + return false; + } return total_buffered_amount == total_buffered_amount_.value(); } @@ -126,11 +119,15 @@ void RRSendQueue::ThresholdWatcher::SetLowThreshold(size_t low_threshold) { } void RRSendQueue::OutgoingStream::Add(DcSctpMessage message, - TimeMs expires_at, - const SendOptions& send_options) { + MessageAttributes attributes) { + bool was_active = bytes_to_send_in_next_message() > 0; buffered_amount_.Increase(message.payload().size()); - total_buffered_amount_.Increase(message.payload().size()); - items_.emplace_back(std::move(message), expires_at, send_options); + parent_.total_buffered_amount_.Increase(message.payload().size()); + items_.emplace_back(std::move(message), std::move(attributes)); + + if (!was_active) { + scheduler_stream_->MaybeMakeActive(); + } RTC_DCHECK(IsConsistent()); } @@ -138,78 +135,100 @@ void RRSendQueue::OutgoingStream::Add(DcSctpMessage message, absl::optional RRSendQueue::OutgoingStream::Produce( TimeMs now, size_t max_size) { - RTC_DCHECK(!items_.empty()); + RTC_DCHECK(pause_state_ != PauseState::kPaused && + pause_state_ != PauseState::kResetting); + + while (!items_.empty()) { + Item& item = items_.front(); + DcSctpMessage& message = item.message; + + // Allocate Message ID and SSN when the first fragment is sent. + if (!item.message_id.has_value()) { + // Oops, this entire message has already expired. Try the next one. + if (item.attributes.expires_at <= now) { + HandleMessageExpired(item); + items_.pop_front(); + continue; + } + + MID& mid = + item.attributes.unordered ? next_unordered_mid_ : next_ordered_mid_; + item.message_id = mid; + mid = MID(*mid + 1); + } + if (!item.attributes.unordered && !item.ssn.has_value()) { + item.ssn = next_ssn_; + next_ssn_ = SSN(*next_ssn_ + 1); + } - Item* item = &items_.front(); - DcSctpMessage& message = item->message; + // Grab the next `max_size` fragment from this message and calculate flags. + rtc::ArrayView chunk_payload = + item.message.payload().subview(item.remaining_offset, max_size); + rtc::ArrayView message_payload = message.payload(); + Data::IsBeginning is_beginning(chunk_payload.data() == + message_payload.data()); + Data::IsEnd is_end((chunk_payload.data() + chunk_payload.size()) == + (message_payload.data() + message_payload.size())); + + StreamID stream_id = message.stream_id(); + PPID ppid = message.ppid(); + + // Zero-copy the payload if the message fits in a single chunk. + std::vector payload = + is_beginning && is_end + ? std::move(message).ReleasePayload() + : std::vector(chunk_payload.begin(), chunk_payload.end()); + + FSN fsn(item.current_fsn); + item.current_fsn = FSN(*item.current_fsn + 1); + buffered_amount_.Decrease(payload.size()); + parent_.total_buffered_amount_.Decrease(payload.size()); + + SendQueue::DataToSend chunk(Data(stream_id, item.ssn.value_or(SSN(0)), + item.message_id.value(), fsn, ppid, + std::move(payload), is_beginning, is_end, + item.attributes.unordered)); + chunk.max_retransmissions = item.attributes.max_retransmissions; + chunk.expires_at = item.attributes.expires_at; + chunk.lifecycle_id = + is_end ? item.attributes.lifecycle_id : LifecycleId::NotSet(); + + if (is_end) { + // The entire message has been sent, and its last data copied to `chunk`, + // so it can safely be discarded. + items_.pop_front(); - if (item->remaining_size > max_size && max_size < kMinimumFragmentedPayload) { + if (pause_state_ == PauseState::kPending) { + RTC_DLOG(LS_VERBOSE) << "Pause state on " << *stream_id + << " is moving from pending to paused"; + pause_state_ = PauseState::kPaused; + } + } else { + item.remaining_offset += chunk_payload.size(); + item.remaining_size -= chunk_payload.size(); + RTC_DCHECK(item.remaining_offset + item.remaining_size == + item.message.payload().size()); + RTC_DCHECK(item.remaining_size > 0); + } RTC_DCHECK(IsConsistent()); - return absl::nullopt; + return chunk; } + RTC_DCHECK(IsConsistent()); + return absl::nullopt; +} - // Allocate Message ID and SSN when the first fragment is sent. - if (!item->message_id.has_value()) { - MID& mid = - item->send_options.unordered ? next_unordered_mid_ : next_ordered_mid_; - item->message_id = mid; - mid = MID(*mid + 1); - } - if (!item->send_options.unordered && !item->ssn.has_value()) { - item->ssn = next_ssn_; - next_ssn_ = SSN(*next_ssn_ + 1); - } +void RRSendQueue::OutgoingStream::HandleMessageExpired( + OutgoingStream::Item& item) { + buffered_amount_.Decrease(item.remaining_size); + parent_.total_buffered_amount_.Decrease(item.remaining_size); + if (item.attributes.lifecycle_id.IsSet()) { + RTC_DLOG(LS_VERBOSE) << "Triggering OnLifecycleMessageExpired(" + << item.attributes.lifecycle_id.value() << ", false)"; - // Grab the next `max_size` fragment from this message and calculate flags. - rtc::ArrayView chunk_payload = - item->message.payload().subview(item->remaining_offset, max_size); - rtc::ArrayView message_payload = message.payload(); - Data::IsBeginning is_beginning(chunk_payload.data() == - message_payload.data()); - Data::IsEnd is_end((chunk_payload.data() + chunk_payload.size()) == - (message_payload.data() + message_payload.size())); - - StreamID stream_id = message.stream_id(); - PPID ppid = message.ppid(); - - // Zero-copy the payload if the message fits in a single chunk. - std::vector payload = - is_beginning && is_end - ? std::move(message).ReleasePayload() - : std::vector(chunk_payload.begin(), chunk_payload.end()); - - FSN fsn(item->current_fsn); - item->current_fsn = FSN(*item->current_fsn + 1); - buffered_amount_.Decrease(payload.size()); - total_buffered_amount_.Decrease(payload.size()); - - SendQueue::DataToSend chunk(Data(stream_id, item->ssn.value_or(SSN(0)), - item->message_id.value(), fsn, ppid, - std::move(payload), is_beginning, is_end, - item->send_options.unordered)); - if (item->send_options.max_retransmissions.has_value() && - *item->send_options.max_retransmissions >= - std::numeric_limits::min() && - *item->send_options.max_retransmissions <= - std::numeric_limits::max()) { - chunk.max_retransmissions = - MaxRetransmits(*item->send_options.max_retransmissions); + parent_.callbacks_.OnLifecycleMessageExpired(item.attributes.lifecycle_id, + /*maybe_delivered=*/false); + parent_.callbacks_.OnLifecycleEnd(item.attributes.lifecycle_id); } - chunk.expires_at = item->expires_at; - - if (is_end) { - // The entire message has been sent, and its last data copied to `chunk`, so - // it can safely be discarded. - items_.pop_front(); - } else { - item->remaining_offset += chunk_payload.size(); - item->remaining_size -= chunk_payload.size(); - RTC_DCHECK(item->remaining_offset + item->remaining_size == - item->message.payload().size()); - RTC_DCHECK(item->remaining_size > 0); - } - RTC_DCHECK(IsConsistent()); - return chunk; } bool RRSendQueue::OutgoingStream::Discard(IsUnordered unordered, @@ -217,11 +236,22 @@ bool RRSendQueue::OutgoingStream::Discard(IsUnordered unordered, bool result = false; if (!items_.empty()) { Item& item = items_.front(); - if (item.send_options.unordered == unordered && - item.message_id.has_value() && *item.message_id == message_id) { - buffered_amount_.Decrease(item.remaining_size); - total_buffered_amount_.Decrease(item.remaining_size); + if (item.attributes.unordered == unordered && item.message_id.has_value() && + *item.message_id == message_id) { + HandleMessageExpired(item); items_.pop_front(); + + // Only partially sent messages are discarded, so if a message was + // discarded, then it was the currently sent message. + scheduler_stream_->ForceReschedule(); + + if (pause_state_ == PauseState::kPending) { + pause_state_ = PauseState::kPaused; + scheduler_stream_->MakeInactive(); + } else if (bytes_to_send_in_next_message() == 0) { + scheduler_stream_->MakeInactive(); + } + // As the item still existed, it had unsent data. result = true; } @@ -231,45 +261,82 @@ bool RRSendQueue::OutgoingStream::Discard(IsUnordered unordered, } void RRSendQueue::OutgoingStream::Pause() { - is_paused_ = true; + if (pause_state_ != PauseState::kNotPaused) { + // Already in progress. + return; + } + + bool had_pending_items = !items_.empty(); + + // https://datatracker.ietf.org/doc/html/rfc8831#section-6.7 + // "Closing of a data channel MUST be signaled by resetting the corresponding + // outgoing streams [RFC6525]. This means that if one side decides to close + // the data channel, it resets the corresponding outgoing stream." + // ... "[RFC6525] also guarantees that all the messages are delivered (or + // abandoned) before the stream is reset." // A stream is paused when it's about to be reset. In this implementation, - // it will throw away all non-partially send messages. This is subject to - // change. It will however not discard any partially sent messages - only - // whole messages. Partially delivered messages (at the time of receiving a - // Stream Reset command) will always deliver all the fragments before - // actually resetting the stream. + // it will throw away all non-partially send messages - they will be abandoned + // as noted above. This is subject to change. It will however not discard any + // partially sent messages - only whole messages. Partially delivered messages + // (at the time of receiving a Stream Reset command) will always deliver all + // the fragments before actually resetting the stream. for (auto it = items_.begin(); it != items_.end();) { if (it->remaining_offset == 0) { - buffered_amount_.Decrease(it->remaining_size); - total_buffered_amount_.Decrease(it->remaining_size); + HandleMessageExpired(*it); it = items_.erase(it); } else { ++it; } } + + pause_state_ = (items_.empty() || items_.front().remaining_offset == 0) + ? PauseState::kPaused + : PauseState::kPending; + + if (had_pending_items && pause_state_ == PauseState::kPaused) { + RTC_DLOG(LS_VERBOSE) << "Stream " << *stream_id() + << " was previously active, but is now paused."; + scheduler_stream_->MakeInactive(); + } + + RTC_DCHECK(IsConsistent()); +} + +void RRSendQueue::OutgoingStream::Resume() { + RTC_DCHECK(pause_state_ == PauseState::kResetting); + pause_state_ = PauseState::kNotPaused; + scheduler_stream_->MaybeMakeActive(); RTC_DCHECK(IsConsistent()); } void RRSendQueue::OutgoingStream::Reset() { + // This can be called both when an outgoing stream reset has been responded + // to, or when the entire SendQueue is reset due to detecting the peer having + // restarted. The stream may be in any state at this time. + PauseState old_pause_state = pause_state_; + pause_state_ = PauseState::kNotPaused; + next_ordered_mid_ = MID(0); + next_unordered_mid_ = MID(0); + next_ssn_ = SSN(0); if (!items_.empty()) { // If this message has been partially sent, reset it so that it will be // re-sent. auto& item = items_.front(); buffered_amount_.Increase(item.message.payload().size() - item.remaining_size); - total_buffered_amount_.Increase(item.message.payload().size() - - item.remaining_size); + parent_.total_buffered_amount_.Increase(item.message.payload().size() - + item.remaining_size); item.remaining_offset = 0; item.remaining_size = item.message.payload().size(); item.message_id = absl::nullopt; item.ssn = absl::nullopt; item.current_fsn = FSN(0); + if (old_pause_state == PauseState::kPaused || + old_pause_state == PauseState::kResetting) { + scheduler_stream_->MaybeMakeActive(); + } } - is_paused_ = false; - next_ordered_mid_ = MID(0); - next_unordered_mid_ = MID(0); - next_ssn_ = SSN(0); RTC_DCHECK(IsConsistent()); } @@ -286,15 +353,23 @@ void RRSendQueue::Add(TimeMs now, RTC_DCHECK(!message.payload().empty()); // Any limited lifetime should start counting from now - when the message // has been added to the queue. - TimeMs expires_at = TimeMs::InfiniteFuture(); - if (send_options.lifetime.has_value()) { - // `expires_at` is the time when it expires. Which is slightly larger than - // the message's lifetime, as the message is alive during its entire - // lifetime (which may be zero). - expires_at = now + *send_options.lifetime + DurationMs(1); - } + + // `expires_at` is the time when it expires. Which is slightly larger than the + // message's lifetime, as the message is alive during its entire lifetime + // (which may be zero). + MessageAttributes attributes = { + .unordered = send_options.unordered, + .max_retransmissions = + send_options.max_retransmissions.has_value() + ? MaxRetransmits(send_options.max_retransmissions.value()) + : MaxRetransmits::NoLimit(), + .expires_at = send_options.lifetime.has_value() + ? now + *send_options.lifetime + DurationMs(1) + : TimeMs::InfiniteFuture(), + .lifecycle_id = send_options.lifecycle_id, + }; GetOrCreateStreamInfo(message.stream_id()) - .Add(std::move(message), expires_at, send_options); + .Add(std::move(message), std::move(attributes)); RTC_DCHECK(IsConsistent()); } @@ -306,65 +381,9 @@ bool RRSendQueue::IsEmpty() const { return total_buffered_amount() == 0; } -std::map::iterator -RRSendQueue::GetNextStream(TimeMs now) { - auto start_it = streams_.lower_bound(StreamID(*current_stream_id_ + 1)); - - for (auto it = start_it; it != streams_.end(); ++it) { - if (it->second.HasDataToSend(now)) { - current_stream_id_ = it->first; - return it; - } - } - - for (auto it = streams_.begin(); it != start_it; ++it) { - if (it->second.HasDataToSend(now)) { - current_stream_id_ = it->first; - return it; - } - } - return streams_.end(); -} - absl::optional RRSendQueue::Produce(TimeMs now, size_t max_size) { - std::map::iterator stream_it; - - if (previous_message_has_ended_) { - // Previous message has ended. Round-robin to a different stream, if there - // even is one with data to send. - stream_it = GetNextStream(now); - if (stream_it == streams_.end()) { - RTC_DLOG(LS_VERBOSE) - << log_prefix_ - << "There is no stream with data; Can't produce any data."; - return absl::nullopt; - } - } else { - // The previous message has not ended; Continue from the current stream. - stream_it = streams_.find(current_stream_id_); - RTC_DCHECK(stream_it != streams_.end()); - } - - absl::optional data = stream_it->second.Produce(now, max_size); - if (data.has_value()) { - RTC_DLOG(LS_VERBOSE) << log_prefix_ << "Producing DATA, type=" - << (data->data.is_unordered ? "unordered" : "ordered") - << "::" - << (*data->data.is_beginning && *data->data.is_end - ? "complete" - : *data->data.is_beginning - ? "first" - : *data->data.is_end ? "last" : "middle") - << ", stream_id=" << *stream_it->first - << ", ppid=" << *data->data.ppid - << ", length=" << data->data.payload.size(); - - previous_message_has_ended_ = *data->data.is_end; - } - - RTC_DCHECK(IsConsistent()); - return data; + return scheduler_.Produce(now, max_size); } bool RRSendQueue::Discard(IsUnordered unordered, @@ -372,46 +391,58 @@ bool RRSendQueue::Discard(IsUnordered unordered, MID message_id) { bool has_discarded = GetOrCreateStreamInfo(stream_id).Discard(unordered, message_id); - if (has_discarded) { - // Only partially sent messages are discarded, so if a message was - // discarded, then it was the currently sent message. - previous_message_has_ended_ = true; - } + RTC_DCHECK(IsConsistent()); return has_discarded; } -void RRSendQueue::PrepareResetStreams(rtc::ArrayView streams) { - for (StreamID stream_id : streams) { - GetOrCreateStreamInfo(stream_id).Pause(); - } +void RRSendQueue::PrepareResetStream(StreamID stream_id) { + GetOrCreateStreamInfo(stream_id).Pause(); RTC_DCHECK(IsConsistent()); } -bool RRSendQueue::CanResetStreams() const { - // Streams can be reset if those streams that are paused don't have any - // messages that are partially sent. - for (auto& stream : streams_) { - if (stream.second.is_paused() && - stream.second.has_partially_sent_message()) { - return false; +bool RRSendQueue::HasStreamsReadyToBeReset() const { + for (auto& [unused, stream] : streams_) { + if (stream.IsReadyToBeReset()) { + return true; } } - return true; + return false; +} +std::vector RRSendQueue::GetStreamsReadyToBeReset() { + RTC_DCHECK(absl::c_count_if(streams_, [](const auto& p) { + return p.second.IsResetting(); + }) == 0); + std::vector ready; + for (auto& [stream_id, stream] : streams_) { + if (stream.IsReadyToBeReset()) { + stream.SetAsResetting(); + ready.push_back(stream_id); + } + } + return ready; } void RRSendQueue::CommitResetStreams() { - for (auto& stream_entry : streams_) { - if (stream_entry.second.is_paused()) { - stream_entry.second.Reset(); + RTC_DCHECK(absl::c_count_if(streams_, [](const auto& p) { + return p.second.IsResetting(); + }) > 0); + for (auto& [unused, stream] : streams_) { + if (stream.IsResetting()) { + stream.Reset(); } } RTC_DCHECK(IsConsistent()); } void RRSendQueue::RollbackResetStreams() { - for (auto& stream_entry : streams_) { - stream_entry.second.Resume(); + RTC_DCHECK(absl::c_count_if(streams_, [](const auto& p) { + return p.second.IsResetting(); + }) > 0); + for (auto& [unused, stream] : streams_) { + if (stream.IsResetting()) { + stream.Resume(); + } } RTC_DCHECK(IsConsistent()); } @@ -419,11 +450,10 @@ void RRSendQueue::RollbackResetStreams() { void RRSendQueue::Reset() { // Recalculate buffered amount, as partially sent messages may have been put // fully back in the queue. - for (auto& stream_entry : streams_) { - OutgoingStream& stream = stream_entry.second; + for (auto& [unused, stream] : streams_) { stream.Reset(); } - previous_message_has_ended_ = true; + scheduler_.ForceReschedule(); } size_t RRSendQueue::buffered_amount(StreamID stream_id) const { @@ -455,13 +485,31 @@ RRSendQueue::OutgoingStream& RRSendQueue::GetOrCreateStreamInfo( } return streams_ - .emplace(stream_id, - OutgoingStream( - [this, stream_id]() { on_buffered_amount_low_(stream_id); }, - total_buffered_amount_)) + .emplace( + std::piecewise_construct, std::forward_as_tuple(stream_id), + std::forward_as_tuple(this, &scheduler_, stream_id, default_priority_, + [this, stream_id]() { + callbacks_.OnBufferedAmountLow(stream_id); + })) .first->second; } +void RRSendQueue::SetStreamPriority(StreamID stream_id, + StreamPriority priority) { + OutgoingStream& stream = GetOrCreateStreamInfo(stream_id); + + stream.SetPriority(priority); + RTC_DCHECK(IsConsistent()); +} + +StreamPriority RRSendQueue::GetStreamPriority(StreamID stream_id) const { + auto stream_it = streams_.find(stream_id); + if (stream_it == streams_.end()) { + return default_priority_; + } + return stream_it->second.priority(); +} + HandoverReadinessStatus RRSendQueue::GetHandoverReadiness() const { HandoverReadinessStatus status; if (!IsEmpty()) { @@ -471,10 +519,10 @@ HandoverReadinessStatus RRSendQueue::GetHandoverReadiness() const { } void RRSendQueue::AddHandoverState(DcSctpSocketHandoverState& state) { - for (const auto& entry : streams_) { + for (const auto& [stream_id, stream] : streams_) { DcSctpSocketHandoverState::OutgoingStream state_stream; - state_stream.id = entry.first.value(); - entry.second.AddHandoverState(state_stream); + state_stream.id = stream_id.value(); + stream.AddHandoverState(state_stream); state.tx.streams.push_back(std::move(state_stream)); } } @@ -483,11 +531,12 @@ void RRSendQueue::RestoreFromState(const DcSctpSocketHandoverState& state) { for (const DcSctpSocketHandoverState::OutgoingStream& state_stream : state.tx.streams) { StreamID stream_id(state_stream.id); - streams_.emplace(stream_id, OutgoingStream( - [this, stream_id]() { - on_buffered_amount_low_(stream_id); - }, - total_buffered_amount_, &state_stream)); + streams_.emplace( + std::piecewise_construct, std::forward_as_tuple(stream_id), + std::forward_as_tuple( + this, &scheduler_, stream_id, StreamPriority(state_stream.priority), + [this, stream_id]() { callbacks_.OnBufferedAmountLow(stream_id); }, + &state_stream)); } } } // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/rr_send_queue.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/rr_send_queue.h index 3007cd0fe8..e9b8cd2081 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/rr_send_queue.h +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/rr_send_queue.h @@ -13,8 +13,10 @@ #include #include #include +#include #include #include +#include #include "absl/algorithm/container.h" #include "absl/strings/string_view.h" @@ -24,6 +26,7 @@ #include "net/dcsctp/public/dcsctp_socket.h" #include "net/dcsctp/public/types.h" #include "net/dcsctp/tx/send_queue.h" +#include "net/dcsctp/tx/stream_scheduler.h" namespace dcsctp { @@ -38,17 +41,24 @@ namespace dcsctp { // // As messages can be (requested to be) sent before the connection is properly // established, this send queue is always present - even for closed connections. +// +// The send queue may trigger callbacks: +// * `OnBufferedAmountLow`, `OnTotalBufferedAmountLow` +// These will be triggered as defined in their documentation. +// * `OnLifecycleMessageExpired(/*maybe_delivered=*/false)`, `OnLifecycleEnd` +// These will be triggered when messages have been expired, abandoned or +// discarded from the send queue. If a message is fully produced, meaning +// that the last fragment has been produced, the responsibility to send +// lifecycle events is then transferred to the retransmission queue, which +// is the one asking to produce the message. class RRSendQueue : public SendQueue { public: - // How small a data chunk's payload may be, if having to fragment a message. - static constexpr size_t kMinimumFragmentedPayload = 10; - RRSendQueue(absl::string_view log_prefix, + DcSctpSocketCallbacks* callbacks, size_t buffer_size, - std::function on_buffered_amount_low, - size_t total_buffered_amount_low_threshold, - std::function on_total_buffered_amount_low, - const DcSctpSocketHandoverState* handover_state = nullptr); + size_t mtu, + StreamPriority default_priority, + size_t total_buffered_amount_low_threshold); // Indicates if the buffer is full. Note that it's up to the caller to ensure // that the buffer is not full prior to adding new items to it. @@ -69,8 +79,9 @@ class RRSendQueue : public SendQueue { bool Discard(IsUnordered unordered, StreamID stream_id, MID message_id) override; - void PrepareResetStreams(rtc::ArrayView streams) override; - bool CanResetStreams() const override; + void PrepareResetStream(StreamID streams) override; + bool HasStreamsReadyToBeReset() const override; + std::vector GetStreamsReadyToBeReset() override; void CommitResetStreams() override; void RollbackResetStreams() override; void Reset() override; @@ -80,12 +91,24 @@ class RRSendQueue : public SendQueue { } size_t buffered_amount_low_threshold(StreamID stream_id) const override; void SetBufferedAmountLowThreshold(StreamID stream_id, size_t bytes) override; + void EnableMessageInterleaving(bool enabled) override { + scheduler_.EnableMessageInterleaving(enabled); + } + void SetStreamPriority(StreamID stream_id, StreamPriority priority); + StreamPriority GetStreamPriority(StreamID stream_id) const; HandoverReadinessStatus GetHandoverReadiness() const; void AddHandoverState(DcSctpSocketHandoverState& state); void RestoreFromState(const DcSctpSocketHandoverState& state); private: + struct MessageAttributes { + IsUnordered unordered; + MaxRetransmits max_retransmissions; + TimeMs expires_at; + LifecycleId lifecycle_id; + }; + // Represents a value and a "low threshold" that when the value reaches or // goes under the "low threshold", will trigger `on_threshold_reached` // callback. @@ -110,25 +133,31 @@ class RRSendQueue : public SendQueue { }; // Per-stream information. - class OutgoingStream { + class OutgoingStream : public StreamScheduler::StreamProducer { public: - explicit OutgoingStream( + OutgoingStream( + RRSendQueue* parent, + StreamScheduler* scheduler, + StreamID stream_id, + StreamPriority priority, std::function on_buffered_amount_low, - ThresholdWatcher& total_buffered_amount, const DcSctpSocketHandoverState::OutgoingStream* state = nullptr) - : next_unordered_mid_(MID(state ? state->next_unordered_mid : 0)), + : parent_(*parent), + scheduler_stream_(scheduler->CreateStream(this, stream_id, priority)), + next_unordered_mid_(MID(state ? state->next_unordered_mid : 0)), next_ordered_mid_(MID(state ? state->next_ordered_mid : 0)), next_ssn_(SSN(state ? state->next_ssn : 0)), - buffered_amount_(std::move(on_buffered_amount_low)), - total_buffered_amount_(total_buffered_amount) {} + buffered_amount_(std::move(on_buffered_amount_low)) {} + + StreamID stream_id() const { return scheduler_stream_->stream_id(); } // Enqueues a message to this stream. - void Add(DcSctpMessage message, - TimeMs expires_at, - const SendOptions& send_options); + void Add(DcSctpMessage message, MessageAttributes attributes); - // Possibly produces a data chunk to send. - absl::optional Produce(TimeMs now, size_t max_size); + // Implementing `StreamScheduler::StreamProducer`. + absl::optional Produce(TimeMs now, + size_t max_size) override; + size_t bytes_to_send_in_next_message() const override; const ThresholdWatcher& buffered_amount() const { return buffered_amount_; } ThresholdWatcher& buffered_amount() { return buffered_amount_; } @@ -140,9 +169,18 @@ class RRSendQueue : public SendQueue { void Pause(); // Resumes a paused stream. - void Resume() { is_paused_ = false; } + void Resume(); + + bool IsReadyToBeReset() const { + return pause_state_ == PauseState::kPaused; + } - bool is_paused() const { return is_paused_; } + bool IsResetting() const { return pause_state_ == PauseState::kResetting; } + + void SetAsResetting() { + RTC_DCHECK(pause_state_ == PauseState::kPaused); + pause_state_ = PauseState::kResetting; + } // Resets this stream, meaning MIDs and SSNs are set to zero. void Reset(); @@ -150,27 +188,44 @@ class RRSendQueue : public SendQueue { // Indicates if this stream has a partially sent message in it. bool has_partially_sent_message() const; - // Indicates if the stream has data to send. It will also try to remove any - // expired non-partially sent message. - bool HasDataToSend(TimeMs now); + StreamPriority priority() const { return scheduler_stream_->priority(); } + void SetPriority(StreamPriority priority) { + scheduler_stream_->SetPriority(priority); + } void AddHandoverState( DcSctpSocketHandoverState::OutgoingStream& state) const; private: + // Streams are paused before they can be reset. To reset a stream, the + // socket sends an outgoing stream reset command with the TSN of the last + // fragment of the last message, so that receivers and senders can agree on + // when it stopped. And if the send queue is in the middle of sending a + // message, and without fragments not yet sent and without TSNs allocated to + // them, it will keep sending data until that message has ended. + enum class PauseState { + // The stream is not paused, and not scheduled to be reset. + kNotPaused, + // The stream has requested to be reset/paused but is still producing + // fragments of a message that hasn't ended yet. When it does, it will + // transition to the `kPaused` state. + kPending, + // The stream is fully paused and can be reset. + kPaused, + // The stream has been added to an outgoing stream reset request and a + // response from the peer hasn't been received yet. + kResetting, + }; + // An enqueued message and metadata. struct Item { - explicit Item(DcSctpMessage msg, - TimeMs expires_at, - const SendOptions& send_options) + explicit Item(DcSctpMessage msg, MessageAttributes attributes) : message(std::move(msg)), - expires_at(expires_at), - send_options(send_options), + attributes(std::move(attributes)), remaining_offset(0), remaining_size(message.payload().size()) {} DcSctpMessage message; - TimeMs expires_at; - SendOptions send_options; + MessageAttributes attributes; // The remaining payload (offset and size) to be sent, when it has been // fragmented. size_t remaining_offset; @@ -184,9 +239,13 @@ class RRSendQueue : public SendQueue { }; bool IsConsistent() const; + void HandleMessageExpired(OutgoingStream::Item& item); + + RRSendQueue& parent_; + + const std::unique_ptr scheduler_stream_; - // Streams are pause when they are about to be reset. - bool is_paused_ = false; + PauseState pause_state_ = PauseState::kNotPaused; // MIDs are different for unordered and ordered messages sent on a stream. MID next_unordered_mid_; MID next_ordered_mid_; @@ -197,10 +256,6 @@ class RRSendQueue : public SendQueue { // The current amount of buffered data. ThresholdWatcher buffered_amount_; - - // Reference to the total buffered amount, which is updated directly by each - // stream. - ThresholdWatcher& total_buffered_amount_; }; bool IsConsistent() const; @@ -210,32 +265,15 @@ class RRSendQueue : public SendQueue { TimeMs now, size_t max_size); - // Return the next stream, in round-robin fashion. - std::map::iterator GetNextStream(TimeMs now); - const std::string log_prefix_; + DcSctpSocketCallbacks& callbacks_; const size_t buffer_size_; - - // Called when the buffered amount is below what has been set using - // `SetBufferedAmountLowThreshold`. - const std::function on_buffered_amount_low_; - - // Called when the total buffered amount is below what has been set using - // `SetTotalBufferedAmountLowThreshold`. - const std::function on_total_buffered_amount_low_; + const StreamPriority default_priority_; + StreamScheduler scheduler_; // The total amount of buffer data, for all streams. ThresholdWatcher total_buffered_amount_; - // Indicates if the previous fragment sent was the end of a message. For - // non-interleaved sending, this means that the next message may come from a - // different stream. If not true, the next fragment must be produced from the - // same stream as last time. - bool previous_message_has_ended_ = true; - - // The current stream to send chunks from. Modified by `GetNextStream`. - StreamID current_stream_id_ = StreamID(0); - // All streams, and messages added to those. std::map streams_; }; diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/send_queue.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/send_queue.h index a821d20785..0b96e9041a 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/send_queue.h +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/send_queue.h @@ -34,6 +34,10 @@ class SendQueue { // Partial reliability - RFC3758 MaxRetransmits max_retransmissions = MaxRetransmits::NoLimit(); TimeMs expires_at = TimeMs::InfiniteFuture(); + + // Lifecycle - set for the last fragment, and `LifecycleId::NotSet()` for + // all other fragments. + LifecycleId lifecycle_id = LifecycleId::NotSet(); }; virtual ~SendQueue() = default; @@ -67,11 +71,11 @@ class SendQueue { StreamID stream_id, MID message_id) = 0; - // Prepares the streams to be reset. This is used to close a WebRTC data + // Prepares the stream to be reset. This is used to close a WebRTC data // channel and will be signaled to the other side. // // Concretely, it discards all whole (not partly sent) messages in the given - // streams and pauses those streams so that future added messages aren't + // stream and pauses that stream so that future added messages aren't // produced until `ResumeStreams` is called. // // TODO(boivie): Investigate if it really should discard any message at all. @@ -82,24 +86,28 @@ class SendQueue { // reset, and paused while they are resetting. This is the first part of the // two-phase commit protocol to reset streams, where the caller completes the // procedure by either calling `CommitResetStreams` or `RollbackResetStreams`. - virtual void PrepareResetStreams(rtc::ArrayView streams) = 0; + virtual void PrepareResetStream(StreamID stream_id) = 0; + + // Indicates if there are any streams that are ready to be reset. + virtual bool HasStreamsReadyToBeReset() const = 0; - // Returns true if all non-discarded messages during `PrepareResetStreams` - // (which are those that was partially sent before that method was called) - // have been sent. - virtual bool CanResetStreams() const = 0; + // Returns a list of streams that are ready to be included in an outgoing + // stream reset request. Any streams that are returned here must be included + // in an outgoing stream reset request, and there must not be concurrent + // requests. Before calling this method again, you must have called + virtual std::vector GetStreamsReadyToBeReset() = 0; - // Called to commit to reset the streams provided to `PrepareResetStreams`. - // It will reset the stream sequence numbers (SSNs) and message identifiers - // (MIDs) and resume the paused streams. + // Called to commit to reset the streams returned by + // `GetStreamsReadyToBeReset`. It will reset the stream sequence numbers + // (SSNs) and message identifiers (MIDs) and resume the paused streams. virtual void CommitResetStreams() = 0; - // Called to abort the resetting of streams provided to `PrepareResetStreams`. - // Will resume the paused streams without resetting the stream sequence - // numbers (SSNs) or message identifiers (MIDs). Note that the non-partial - // messages that were discarded when calling `PrepareResetStreams` will not be - // recovered, to better match the intention from the sender to "close the - // channel". + // Called to abort the resetting of streams returned by + // `GetStreamsReadyToBeReset`. Will resume the paused streams without + // resetting the stream sequence numbers (SSNs) or message identifiers (MIDs). + // Note that the non-partial messages that were discarded when calling + // `PrepareResetStreams` will not be recovered, to better match the intention + // from the sender to "close the channel". virtual void RollbackResetStreams() = 0; // Resets all message identifier counters (MID, SSN) and makes all partially @@ -122,6 +130,12 @@ class SendQueue { // Sets a limit for the `OnBufferedAmountLow` event. virtual void SetBufferedAmountLowThreshold(StreamID stream_id, size_t bytes) = 0; + + // Configures the send queue to support interleaved message sending as + // described in RFC8260. Every send queue starts with this value set as + // disabled, but can later change it when the capabilities of the connection + // have been negotiated. This affects the behavior of the `Produce` method. + virtual void EnableMessageInterleaving(bool enabled) = 0; }; } // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/stream_scheduler.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/stream_scheduler.cc new file mode 100644 index 0000000000..d1560a75e4 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/stream_scheduler.cc @@ -0,0 +1,199 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/tx/stream_scheduler.h" + +#include + +#include "absl/algorithm/container.h" +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/common/str_join.h" +#include "net/dcsctp/packet/data.h" +#include "net/dcsctp/public/dcsctp_message.h" +#include "net/dcsctp/public/dcsctp_socket.h" +#include "net/dcsctp/public/types.h" +#include "net/dcsctp/tx/send_queue.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" + +namespace dcsctp { + +void StreamScheduler::Stream::SetPriority(StreamPriority priority) { + priority_ = priority; + inverse_weight_ = InverseWeight(priority); +} + +absl::optional StreamScheduler::Produce( + TimeMs now, + size_t max_size) { + // For non-interleaved streams, avoid rescheduling while still sending a + // message as it needs to be sent in full. For interleaved messaging, + // reschedule for every I-DATA chunk sent. + bool rescheduling = + enable_message_interleaving_ || !currently_sending_a_message_; + + RTC_LOG(LS_VERBOSE) << "Producing data, rescheduling=" << rescheduling + << ", active=" + << StrJoin(active_streams_, ", ", + [&](rtc::StringBuilder& sb, const auto& p) { + sb << *p->stream_id() << "@" + << *p->next_finish_time(); + }); + + RTC_DCHECK(rescheduling || current_stream_ != nullptr); + + absl::optional data; + while (!data.has_value() && !active_streams_.empty()) { + if (rescheduling) { + auto it = active_streams_.begin(); + current_stream_ = *it; + RTC_DLOG(LS_VERBOSE) << "Rescheduling to stream " + << *current_stream_->stream_id(); + + active_streams_.erase(it); + current_stream_->ForceMarkInactive(); + } else { + RTC_DLOG(LS_VERBOSE) << "Producing from previous stream: " + << *current_stream_->stream_id(); + RTC_DCHECK(absl::c_any_of(active_streams_, [this](const auto* p) { + return p == current_stream_; + })); + } + + data = current_stream_->Produce(now, max_size); + } + + if (!data.has_value()) { + RTC_DLOG(LS_VERBOSE) + << "There is no stream with data; Can't produce any data."; + RTC_DCHECK(IsConsistent()); + + return absl::nullopt; + } + + RTC_DCHECK(data->data.stream_id == current_stream_->stream_id()); + + RTC_DLOG(LS_VERBOSE) << "Producing DATA, type=" + << (data->data.is_unordered ? "unordered" : "ordered") + << "::" + << (*data->data.is_beginning && *data->data.is_end + ? "complete" + : *data->data.is_beginning ? "first" + : *data->data.is_end ? "last" + : "middle") + << ", stream_id=" << *current_stream_->stream_id() + << ", ppid=" << *data->data.ppid + << ", length=" << data->data.payload.size(); + + currently_sending_a_message_ = !*data->data.is_end; + virtual_time_ = current_stream_->current_time(); + + // One side-effect of rescheduling is that the new stream will not be present + // in `active_streams`. + size_t bytes_to_send_next = current_stream_->bytes_to_send_in_next_message(); + if (rescheduling && bytes_to_send_next > 0) { + current_stream_->MakeActive(bytes_to_send_next); + } else if (!rescheduling && bytes_to_send_next == 0) { + current_stream_->MakeInactive(); + } + + RTC_DCHECK(IsConsistent()); + return data; +} + +StreamScheduler::VirtualTime StreamScheduler::Stream::CalculateFinishTime( + size_t bytes_to_send_next) const { + if (parent_.enable_message_interleaving_) { + // Perform weighted fair queuing scheduling. + return VirtualTime(*current_virtual_time_ + + bytes_to_send_next * *inverse_weight_); + } + + // Perform round-robin scheduling by letting the stream have its next virtual + // finish time in the future. It doesn't matter how far into the future, just + // any positive number so that any other stream that has the same virtual + // finish time as this stream gets to produce their data before revisiting + // this stream. + return VirtualTime(*current_virtual_time_ + 1); +} + +absl::optional StreamScheduler::Stream::Produce( + TimeMs now, + size_t max_size) { + absl::optional data = producer_.Produce(now, max_size); + + if (data.has_value()) { + VirtualTime new_current = CalculateFinishTime(data->data.payload.size()); + RTC_DLOG(LS_VERBOSE) << "Virtual time changed: " << *current_virtual_time_ + << " -> " << *new_current; + current_virtual_time_ = new_current; + } + + return data; +} + +bool StreamScheduler::IsConsistent() const { + for (Stream* stream : active_streams_) { + if (stream->next_finish_time_ == VirtualTime::Zero()) { + RTC_DLOG(LS_VERBOSE) << "Stream " << *stream->stream_id() + << " is active, but has no next-finish-time"; + return false; + } + } + return true; +} + +void StreamScheduler::Stream::MaybeMakeActive() { + RTC_DLOG(LS_VERBOSE) << "MaybeMakeActive(" << *stream_id() << ")"; + RTC_DCHECK(next_finish_time_ == VirtualTime::Zero()); + size_t bytes_to_send_next = bytes_to_send_in_next_message(); + if (bytes_to_send_next == 0) { + return; + } + + MakeActive(bytes_to_send_next); +} + +void StreamScheduler::Stream::MakeActive(size_t bytes_to_send_next) { + current_virtual_time_ = parent_.virtual_time_; + RTC_DCHECK_GT(bytes_to_send_next, 0); + VirtualTime next_finish_time = CalculateFinishTime( + std::min(bytes_to_send_next, parent_.max_payload_bytes_)); + RTC_DCHECK_GT(*next_finish_time, 0); + RTC_DLOG(LS_VERBOSE) << "Making stream " << *stream_id() + << " active, expiring at " << *next_finish_time; + RTC_DCHECK(next_finish_time_ == VirtualTime::Zero()); + next_finish_time_ = next_finish_time; + RTC_DCHECK(!absl::c_any_of(parent_.active_streams_, + [this](const auto* p) { return p == this; })); + parent_.active_streams_.emplace(this); +} + +void StreamScheduler::Stream::ForceMarkInactive() { + RTC_DLOG(LS_VERBOSE) << "Making stream " << *stream_id() << " inactive"; + RTC_DCHECK(next_finish_time_ != VirtualTime::Zero()); + next_finish_time_ = VirtualTime::Zero(); +} + +void StreamScheduler::Stream::MakeInactive() { + ForceMarkInactive(); + webrtc::EraseIf(parent_.active_streams_, + [&](const auto* s) { return s == this; }); +} + +std::set StreamScheduler::ActiveStreamsForTesting() const { + std::set stream_ids; + for (const auto& stream : active_streams_) { + stream_ids.insert(stream->stream_id()); + } + return stream_ids; +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/stream_scheduler.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/stream_scheduler.h new file mode 100644 index 0000000000..9c523edbfc --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/stream_scheduler.h @@ -0,0 +1,222 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_TX_STREAM_SCHEDULER_H_ +#define NET_DCSCTP_TX_STREAM_SCHEDULER_H_ + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "absl/algorithm/container.h" +#include "absl/memory/memory.h" +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/chunk/idata_chunk.h" +#include "net/dcsctp/packet/sctp_packet.h" +#include "net/dcsctp/public/dcsctp_message.h" +#include "net/dcsctp/public/dcsctp_socket.h" +#include "net/dcsctp/public/types.h" +#include "net/dcsctp/tx/send_queue.h" +#include "rtc_base/containers/flat_set.h" +#include "rtc_base/strong_alias.h" + +namespace dcsctp { + +// A parameterized stream scheduler. Currently, it implements the round robin +// scheduling algorithm using virtual finish time. It is to be used as a part of +// a send queue and will track all active streams (streams that have any data +// that can be sent). +// +// The stream scheduler works with the concept of associating active streams +// with a "virtual finish time", which is the time when a stream is allowed to +// produce data. Streams are ordered by their virtual finish time, and the +// "current virtual time" will advance to the next following virtual finish time +// whenever a chunk is to be produced. +// +// When message interleaving is enabled, the WFQ - Weighted Fair Queueing - +// scheduling algorithm will be used. And when it's not, round-robin scheduling +// will be used instead. +// +// In the round robin scheduling algorithm, a stream's virtual finish time will +// just increment by one (1) after having produced a chunk, which results in a +// round-robin scheduling. +// +// In WFQ scheduling algorithm, a stream's virtual finish time will be defined +// as the number of bytes in the next fragment to be sent, multiplied by the +// inverse of the stream's priority, meaning that a high priority - or a smaller +// fragment - results in a closer virtual finish time, compared to a stream with +// either a lower priority or a larger fragment to be sent. +class StreamScheduler { + private: + class VirtualTime : public webrtc::StrongAlias { + public: + constexpr explicit VirtualTime(const UnderlyingType& v) + : webrtc::StrongAlias(v) {} + + static constexpr VirtualTime Zero() { return VirtualTime(0); } + }; + class InverseWeight + : public webrtc::StrongAlias { + public: + constexpr explicit InverseWeight(StreamPriority priority) + : webrtc::StrongAlias( + 1.0 / std::max(static_cast(*priority), 0.000001)) {} + }; + + public: + class StreamProducer { + public: + virtual ~StreamProducer() = default; + + // Produces a fragment of data to send. The current wall time is specified + // as `now` and should be used to skip chunks with expired limited lifetime. + // The parameter `max_size` specifies the maximum amount of actual payload + // that may be returned. If these constraints prevents the stream from + // sending some data, `absl::nullopt` should be returned. + virtual absl::optional Produce(TimeMs now, + size_t max_size) = 0; + + // Returns the number of payload bytes that is scheduled to be sent in the + // next enqueued message, or zero if there are no enqueued messages or if + // the stream has been actively paused. + virtual size_t bytes_to_send_in_next_message() const = 0; + }; + + class Stream { + public: + StreamID stream_id() const { return stream_id_; } + + StreamPriority priority() const { return priority_; } + void SetPriority(StreamPriority priority); + + // Will activate the stream _if_ it has any data to send. That is, if the + // callback to `bytes_to_send_in_next_message` returns non-zero. If the + // callback returns zero, the stream will not be made active. + void MaybeMakeActive(); + + // Will remove the stream from the list of active streams, and will not try + // to produce data from it. To make it active again, call `MaybeMakeActive`. + void MakeInactive(); + + // Make the scheduler move to another message, or another stream. This is + // used to abort the scheduler from continuing producing fragments for the + // current message in case it's deleted. + void ForceReschedule() { parent_.ForceReschedule(); } + + private: + friend class StreamScheduler; + + Stream(StreamScheduler* parent, + StreamProducer* producer, + StreamID stream_id, + StreamPriority priority) + : parent_(*parent), + producer_(*producer), + stream_id_(stream_id), + priority_(priority), + inverse_weight_(priority) {} + + // Produces a message from this stream. This will only be called on streams + // that have data. + absl::optional Produce(TimeMs now, size_t max_size); + + void MakeActive(size_t bytes_to_send_next); + void ForceMarkInactive(); + + VirtualTime current_time() const { return current_virtual_time_; } + VirtualTime next_finish_time() const { return next_finish_time_; } + size_t bytes_to_send_in_next_message() const { + return producer_.bytes_to_send_in_next_message(); + } + + VirtualTime CalculateFinishTime(size_t bytes_to_send_next) const; + + StreamScheduler& parent_; + StreamProducer& producer_; + const StreamID stream_id_; + StreamPriority priority_; + InverseWeight inverse_weight_; + // This outgoing stream's "current" virtual_time. + VirtualTime current_virtual_time_ = VirtualTime::Zero(); + VirtualTime next_finish_time_ = VirtualTime::Zero(); + }; + + // The `mtu` parameter represents the maximum SCTP packet size, which should + // be the same as `DcSctpOptions::mtu`. + explicit StreamScheduler(size_t mtu) + : max_payload_bytes_(mtu - SctpPacket::kHeaderSize - + IDataChunk::kHeaderSize) {} + + std::unique_ptr CreateStream(StreamProducer* producer, + StreamID stream_id, + StreamPriority priority) { + return absl::WrapUnique(new Stream(this, producer, stream_id, priority)); + } + + void EnableMessageInterleaving(bool enabled) { + enable_message_interleaving_ = enabled; + } + + // Makes the scheduler stop producing message from the current stream and + // re-evaluates which stream to produce from. + void ForceReschedule() { currently_sending_a_message_ = false; } + + // Produces a fragment of data to send. The current wall time is specified as + // `now` and will be used to skip chunks with expired limited lifetime. The + // parameter `max_size` specifies the maximum amount of actual payload that + // may be returned. If no data can be produced, `absl::nullopt` is returned. + absl::optional Produce(TimeMs now, size_t max_size); + + std::set ActiveStreamsForTesting() const; + + private: + struct ActiveStreamComparator { + // Ordered by virtual finish time (primary), stream-id (secondary). + bool operator()(Stream* a, Stream* b) const { + VirtualTime a_vft = a->next_finish_time(); + VirtualTime b_vft = b->next_finish_time(); + if (a_vft == b_vft) { + return a->stream_id() < b->stream_id(); + } + return a_vft < b_vft; + } + }; + + bool IsConsistent() const; + + const size_t max_payload_bytes_; + + // The current virtual time, as defined in the WFQ algorithm. + VirtualTime virtual_time_ = VirtualTime::Zero(); + + // The current stream to send chunks from. + Stream* current_stream_ = nullptr; + + bool enable_message_interleaving_ = false; + + // Indicates if the streams is currently sending a message, and should then + // - if message interleaving is not enabled - continue sending from this + // stream until that message has been sent in full. + bool currently_sending_a_message_ = false; + + // The currently active streams, ordered by virtual finish time. + webrtc::flat_set active_streams_; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_TX_STREAM_SCHEDULER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/active_ice_controller_factory_interface.h b/TMessagesProj/jni/voip/webrtc/p2p/base/active_ice_controller_factory_interface.h new file mode 100644 index 0000000000..6a47f2253f --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/active_ice_controller_factory_interface.h @@ -0,0 +1,39 @@ +/* + * Copyright 2022 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef P2P_BASE_ACTIVE_ICE_CONTROLLER_FACTORY_INTERFACE_H_ +#define P2P_BASE_ACTIVE_ICE_CONTROLLER_FACTORY_INTERFACE_H_ + +#include + +#include "p2p/base/active_ice_controller_interface.h" +#include "p2p/base/ice_agent_interface.h" +#include "p2p/base/ice_controller_factory_interface.h" + +namespace cricket { + +// An active ICE controller may be constructed with the same arguments as a +// legacy ICE controller. Additionally, an ICE agent must be provided for the +// active ICE controller to interact with. +struct ActiveIceControllerFactoryArgs { + IceControllerFactoryArgs legacy_args; + IceAgentInterface* ice_agent; +}; + +class ActiveIceControllerFactoryInterface { + public: + virtual ~ActiveIceControllerFactoryInterface() = default; + virtual std::unique_ptr Create( + const ActiveIceControllerFactoryArgs&) = 0; +}; + +} // namespace cricket + +#endif // P2P_BASE_ACTIVE_ICE_CONTROLLER_FACTORY_INTERFACE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/active_ice_controller_interface.h b/TMessagesProj/jni/voip/webrtc/p2p/base/active_ice_controller_interface.h new file mode 100644 index 0000000000..e54838ee64 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/active_ice_controller_interface.h @@ -0,0 +1,84 @@ +/* + * Copyright 2022 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef P2P_BASE_ACTIVE_ICE_CONTROLLER_INTERFACE_H_ +#define P2P_BASE_ACTIVE_ICE_CONTROLLER_INTERFACE_H_ + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "p2p/base/connection.h" +#include "p2p/base/ice_switch_reason.h" +#include "p2p/base/ice_transport_internal.h" +#include "p2p/base/transport_description.h" + +namespace cricket { + +// ActiveIceControllerInterface defines the methods for a module that actively +// manages the connection used by an ICE transport. +// +// An active ICE controller receives updates from the ICE transport when +// - the connections state is mutated +// - a new connection should be selected as a result of an external event (eg. +// a different connection nominated by the remote peer) +// +// The active ICE controller takes the appropriate decisions and requests the +// ICE agent to perform the necessary actions through the IceAgentInterface. +class ActiveIceControllerInterface { + public: + virtual ~ActiveIceControllerInterface() = default; + + // Sets the current ICE configuration. + virtual void SetIceConfig(const IceConfig& config) = 0; + + // Called when a new connection is added to the ICE transport. + virtual void OnConnectionAdded(const Connection* connection) = 0; + + // Called when the transport switches that connection in active use. + virtual void OnConnectionSwitched(const Connection* connection) = 0; + + // Called when a connection is destroyed. + virtual void OnConnectionDestroyed(const Connection* connection) = 0; + + // Called when a STUN ping has been sent on a connection. This does not + // indicate that a STUN response has been received. + virtual void OnConnectionPinged(const Connection* connection) = 0; + + // Called when one of the following changes for a connection. + // - rtt estimate + // - write state + // - receiving + // - connected + // - nominated + virtual void OnConnectionUpdated(const Connection* connection) = 0; + + // Compute "STUN_ATTR_USE_CANDIDATE" for a STUN ping on the given connection. + virtual bool GetUseCandidateAttribute(const Connection* connection, + NominationMode mode, + IceMode remote_ice_mode) const = 0; + + // Called to enque a request to pick and switch to the best available + // connection. + virtual void OnSortAndSwitchRequest(IceSwitchReason reason) = 0; + + // Called to pick and switch to the best available connection immediately. + virtual void OnImmediateSortAndSwitchRequest(IceSwitchReason reason) = 0; + + // Called to switch to the given connection immediately without checking for + // the best available connection. + virtual bool OnImmediateSwitchRequest(IceSwitchReason reason, + const Connection* selected) = 0; + + // Only for unit tests + virtual const Connection* FindNextPingableConnection() = 0; +}; + +} // namespace cricket + +#endif // P2P_BASE_ACTIVE_ICE_CONTROLLER_INTERFACE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/async_stun_tcp_socket.h b/TMessagesProj/jni/voip/webrtc/p2p/base/async_stun_tcp_socket.h index eb4eef7cdc..f0df42b52a 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/async_stun_tcp_socket.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/async_stun_tcp_socket.h @@ -15,7 +15,6 @@ #include "rtc_base/async_packet_socket.h" #include "rtc_base/async_tcp_socket.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/socket.h" #include "rtc_base/socket_address.h" @@ -32,6 +31,9 @@ class AsyncStunTCPSocket : public rtc::AsyncTCPSocketBase { explicit AsyncStunTCPSocket(rtc::Socket* socket); + AsyncStunTCPSocket(const AsyncStunTCPSocket&) = delete; + AsyncStunTCPSocket& operator=(const AsyncStunTCPSocket&) = delete; + int Send(const void* pv, size_t cb, const rtc::PacketOptions& options) override; @@ -42,8 +44,6 @@ class AsyncStunTCPSocket : public rtc::AsyncTCPSocketBase { // This method also returns the number of padding bytes needed/added to the // turn message. `pad_bytes` should be used only when `is_turn` is true. size_t GetExpectedLength(const void* data, size_t len, int* pad_bytes); - - RTC_DISALLOW_COPY_AND_ASSIGN(AsyncStunTCPSocket); }; } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/basic_async_resolver_factory.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/basic_async_resolver_factory.cc index 6824357821..3fdf75b12f 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/basic_async_resolver_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/basic_async_resolver_factory.cc @@ -36,7 +36,17 @@ WrappingAsyncDnsResolverFactory::CreateAndResolve( const rtc::SocketAddress& addr, std::function callback) { std::unique_ptr resolver = Create(); - resolver->Start(addr, callback); + resolver->Start(addr, std::move(callback)); + return resolver; +} + +std::unique_ptr +WrappingAsyncDnsResolverFactory::CreateAndResolve( + const rtc::SocketAddress& addr, + int family, + std::function callback) { + std::unique_ptr resolver = Create(); + resolver->Start(addr, family, std::move(callback)); return resolver; } diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/basic_async_resolver_factory.h b/TMessagesProj/jni/voip/webrtc/p2p/base/basic_async_resolver_factory.h index c988913068..9a0ba1ab28 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/basic_async_resolver_factory.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/basic_async_resolver_factory.h @@ -45,6 +45,11 @@ class WrappingAsyncDnsResolverFactory final const rtc::SocketAddress& addr, std::function callback) override; + std::unique_ptr CreateAndResolve( + const rtc::SocketAddress& addr, + int family, + std::function callback) override; + std::unique_ptr Create() override; private: diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/basic_ice_controller.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/basic_ice_controller.cc index 81fb324d1f..55f187cb9a 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/basic_ice_controller.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/basic_ice_controller.cc @@ -83,6 +83,8 @@ void BasicIceController::OnConnectionDestroyed(const Connection* connection) { pinged_connections_.erase(connection); unpinged_connections_.erase(connection); connections_.erase(absl::c_find(connections_, connection)); + if (selected_connection_ == connection) + selected_connection_ = nullptr; } bool BasicIceController::HasPingableConnection() const { @@ -413,7 +415,7 @@ BasicIceController::GetBestWritableConnectionPerNetwork() const { IceControllerInterface::SwitchResult BasicIceController::HandleInitialSelectDampening( - IceControllerEvent reason, + IceSwitchReason reason, const Connection* new_connection) { if (!field_trials_->initial_select_dampening.has_value() && !field_trials_->initial_select_dampening_ping_received.has_value()) { @@ -462,13 +464,13 @@ BasicIceController::HandleInitialSelectDampening( } RTC_LOG(LS_INFO) << "delay initial selection up to " << min_delay << "ms"; - reason.type = IceControllerEvent::ICE_CONTROLLER_RECHECK; - reason.recheck_delay_ms = min_delay; - return {absl::nullopt, reason}; + return {.connection = absl::nullopt, + .recheck_event = IceRecheckEvent( + IceSwitchReason::ICE_CONTROLLER_RECHECK, min_delay)}; } IceControllerInterface::SwitchResult BasicIceController::ShouldSwitchConnection( - IceControllerEvent reason, + IceSwitchReason reason, const Connection* new_connection) { if (!ReadyToSend(new_connection) || selected_connection_ == new_connection) { return {absl::nullopt, absl::nullopt}; @@ -494,16 +496,15 @@ IceControllerInterface::SwitchResult BasicIceController::ShouldSwitchConnection( receiving_unchanged_threshold, &missed_receiving_unchanged_threshold); - absl::optional recheck_event; + absl::optional recheck_event; if (missed_receiving_unchanged_threshold && config_.receiving_switching_delay_or_default()) { // If we do not switch to the connection because it missed the receiving // threshold, the new connection is in a better receiving state than the // currently selected connection. So we need to re-check whether it needs // to be switched at a later time. - recheck_event = reason; - recheck_event->recheck_delay_ms = - config_.receiving_switching_delay_or_default(); + recheck_event.emplace(reason, + config_.receiving_switching_delay_or_default()); } if (cmp < 0) { @@ -522,7 +523,7 @@ IceControllerInterface::SwitchResult BasicIceController::ShouldSwitchConnection( } IceControllerInterface::SwitchResult -BasicIceController::SortAndSwitchConnection(IceControllerEvent reason) { +BasicIceController::SortAndSwitchConnection(IceSwitchReason reason) { // Find the best alternative connection by sorting. It is important to note // that amongst equal preference, writable connections, this will choose the // one whose estimated latency is lowest. So it is the only one that we @@ -795,7 +796,7 @@ std::vector BasicIceController::PruneConnections() { auto best_connection_by_network = GetBestConnectionByNetwork(); for (const Connection* conn : connections_) { const Connection* best_conn = selected_connection_; - if (!rtc::IPIsAny(conn->network()->ip())) { + if (!rtc::IPIsAny(conn->network()->GetBestIP())) { // If the connection is bound to a specific network interface (not an // "any address" network), compare it against the best connection for // that network interface rather than the best connection overall. This diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/basic_ice_controller.h b/TMessagesProj/jni/voip/webrtc/p2p/base/basic_ice_controller.h index e8861952c3..b941a0dd7e 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/basic_ice_controller.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/basic_ice_controller.h @@ -46,9 +46,9 @@ class BasicIceController : public IceControllerInterface { NominationMode mode, IceMode remote_ice_mode) const override; - SwitchResult ShouldSwitchConnection(IceControllerEvent reason, + SwitchResult ShouldSwitchConnection(IceSwitchReason reason, const Connection* connection) override; - SwitchResult SortAndSwitchConnection(IceControllerEvent reason) override; + SwitchResult SortAndSwitchConnection(IceSwitchReason reason) override; std::vector PruneConnections() override; @@ -136,7 +136,7 @@ class BasicIceController : public IceControllerInterface { absl::optional receiving_unchanged_threshold, bool* missed_receiving_unchanged_threshold) const; - SwitchResult HandleInitialSelectDampening(IceControllerEvent reason, + SwitchResult HandleInitialSelectDampening(IceSwitchReason reason, const Connection* new_connection); std::function ice_transport_state_func_; diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/connection.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/connection.cc index e0e5c150b2..18280f1940 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/connection.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/connection.cc @@ -19,6 +19,7 @@ #include "absl/algorithm/container.h" #include "absl/strings/match.h" +#include "absl/strings/string_view.h" #include "p2p/base/port_allocator.h" #include "rtc_base/checks.h" #include "rtc_base/crc32.h" @@ -32,14 +33,14 @@ #include "rtc_base/string_utils.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/third_party/base64/base64.h" -#include "system_wrappers/include/field_trial.h" +namespace cricket { namespace { // Determines whether we have seen at least the given maximum number of // pings fail to have a response. inline bool TooManyFailures( - const std::vector& pings_since_last_response, + const std::vector& pings_since_last_response, uint32_t maximum_failures, int rtt_estimate, int64_t now) { @@ -56,7 +57,7 @@ inline bool TooManyFailures( // Determines whether we have gone too long without seeing any response. inline bool TooLongWithoutResponse( - const std::vector& pings_since_last_response, + const std::vector& pings_since_last_response, int64_t maximum_time, int64_t now) { if (pings_since_last_response.size() == 0) @@ -68,28 +69,28 @@ inline bool TooLongWithoutResponse( // Helper methods for converting string values of log description fields to // enum. -webrtc::IceCandidateType GetCandidateTypeByString(const std::string& type) { - if (type == cricket::LOCAL_PORT_TYPE) { +webrtc::IceCandidateType GetCandidateTypeByString(absl::string_view type) { + if (type == LOCAL_PORT_TYPE) { return webrtc::IceCandidateType::kLocal; - } else if (type == cricket::STUN_PORT_TYPE) { + } else if (type == STUN_PORT_TYPE) { return webrtc::IceCandidateType::kStun; - } else if (type == cricket::PRFLX_PORT_TYPE) { + } else if (type == PRFLX_PORT_TYPE) { return webrtc::IceCandidateType::kPrflx; - } else if (type == cricket::RELAY_PORT_TYPE) { + } else if (type == RELAY_PORT_TYPE) { return webrtc::IceCandidateType::kRelay; } return webrtc::IceCandidateType::kUnknown; } webrtc::IceCandidatePairProtocol GetProtocolByString( - const std::string& protocol) { - if (protocol == cricket::UDP_PROTOCOL_NAME) { + absl::string_view protocol) { + if (protocol == UDP_PROTOCOL_NAME) { return webrtc::IceCandidatePairProtocol::kUdp; - } else if (protocol == cricket::TCP_PROTOCOL_NAME) { + } else if (protocol == TCP_PROTOCOL_NAME) { return webrtc::IceCandidatePairProtocol::kTcp; - } else if (protocol == cricket::SSLTCP_PROTOCOL_NAME) { + } else if (protocol == SSLTCP_PROTOCOL_NAME) { return webrtc::IceCandidatePairProtocol::kSsltcp; - } else if (protocol == cricket::TLS_PROTOCOL_NAME) { + } else if (protocol == TLS_PROTOCOL_NAME) { return webrtc::IceCandidatePairProtocol::kTls; } return webrtc::IceCandidatePairProtocol::kUnknown; @@ -148,143 +149,72 @@ const int RTT_RATIO = 3; // 3 : 1 constexpr int64_t kMinExtraPingDelayMs = 100; // Default field trials. -const cricket::IceFieldTrials kDefaultFieldTrials; +const IceFieldTrials kDefaultFieldTrials; -constexpr int kSupportGoogPingVersionRequestIndex = - static_cast(cricket::IceGoogMiscInfoBindingRequestAttributeIndex:: - SUPPORT_GOOG_PING_VERSION); +constexpr int kSupportGoogPingVersionRequestIndex = static_cast( + IceGoogMiscInfoBindingRequestAttributeIndex::SUPPORT_GOOG_PING_VERSION); -constexpr int kSupportGoogPingVersionResponseIndex = - static_cast(cricket::IceGoogMiscInfoBindingResponseAttributeIndex:: - SUPPORT_GOOG_PING_VERSION); +constexpr int kSupportGoogPingVersionResponseIndex = static_cast( + IceGoogMiscInfoBindingResponseAttributeIndex::SUPPORT_GOOG_PING_VERSION); } // namespace -namespace cricket { - // A ConnectionRequest is a STUN binding used to determine writability. -ConnectionRequest::ConnectionRequest(Connection* connection) - : StunRequest(new IceMessage()), connection_(connection) {} - -void ConnectionRequest::Prepare(StunMessage* request) { - request->SetType(STUN_BINDING_REQUEST); - std::string username; - connection_->port()->CreateStunUsername( - connection_->remote_candidate().username(), &username); - // Note that the order of attributes does not impact the parsing on the - // receiver side. The attribute is retrieved then by iterating and matching - // over all parsed attributes. See StunMessage::GetAttribute. - request->AddAttribute( - std::make_unique(STUN_ATTR_USERNAME, username)); - - // connection_ already holds this ping, so subtract one from count. - if (connection_->port()->send_retransmit_count_attribute()) { - request->AddAttribute(std::make_unique( - STUN_ATTR_RETRANSMIT_COUNT, - static_cast(connection_->pings_since_last_response_.size() - - 1))); - } - uint32_t network_info = connection_->port()->Network()->id(); - network_info = (network_info << 16) | connection_->port()->network_cost(); - request->AddAttribute(std::make_unique( - STUN_ATTR_GOOG_NETWORK_INFO, network_info)); - - if (webrtc::field_trial::IsEnabled( - "WebRTC-PiggybackIceCheckAcknowledgement") && - connection_->last_ping_id_received()) { - request->AddAttribute(std::make_unique( - STUN_ATTR_GOOG_LAST_ICE_CHECK_RECEIVED, - connection_->last_ping_id_received().value())); - } - - // Adding ICE_CONTROLLED or ICE_CONTROLLING attribute based on the role. - if (connection_->port()->GetIceRole() == ICEROLE_CONTROLLING) { - request->AddAttribute(std::make_unique( - STUN_ATTR_ICE_CONTROLLING, connection_->port()->IceTiebreaker())); - // We should have either USE_CANDIDATE attribute or ICE_NOMINATION - // attribute but not both. That was enforced in p2ptransportchannel. - if (connection_->use_candidate_attr()) { - request->AddAttribute( - std::make_unique(STUN_ATTR_USE_CANDIDATE)); - } - if (connection_->nomination() && - connection_->nomination() != connection_->acked_nomination()) { - request->AddAttribute(std::make_unique( - STUN_ATTR_NOMINATION, connection_->nomination())); - } - } else if (connection_->port()->GetIceRole() == ICEROLE_CONTROLLED) { - request->AddAttribute(std::make_unique( - STUN_ATTR_ICE_CONTROLLED, connection_->port()->IceTiebreaker())); - } else { - RTC_DCHECK_NOTREACHED(); - } - - // Adding PRIORITY Attribute. - // Changing the type preference to Peer Reflexive and local preference - // and component id information is unchanged from the original priority. - // priority = (2^24)*(type preference) + - // (2^8)*(local preference) + - // (2^0)*(256 - component ID) - uint32_t type_preference = - (connection_->local_candidate().protocol() == TCP_PROTOCOL_NAME) - ? ICE_TYPE_PREFERENCE_PRFLX_TCP - : ICE_TYPE_PREFERENCE_PRFLX; - uint32_t prflx_priority = - type_preference << 24 | - (connection_->local_candidate().priority() & 0x00FFFFFF); - request->AddAttribute(std::make_unique( - STUN_ATTR_PRIORITY, prflx_priority)); - - if (connection_->field_trials_->enable_goog_ping && - !connection_->remote_support_goog_ping_.has_value()) { - // Check if remote supports GOOG PING by announcing which version we - // support. This is sent on all STUN_BINDING_REQUEST until we get a - // STUN_BINDING_RESPONSE. - auto list = - StunAttribute::CreateUInt16ListAttribute(STUN_ATTR_GOOG_MISC_INFO); - list->AddTypeAtIndex(kSupportGoogPingVersionRequestIndex, kGoogPingVersion); - request->AddAttribute(std::move(list)); - } - - if (connection_->ShouldSendGoogPing(request)) { - request->SetType(GOOG_PING_REQUEST); - request->ClearAttributes(); - request->AddMessageIntegrity32(connection_->remote_candidate().password()); - } else { - request->AddMessageIntegrity(connection_->remote_candidate().password()); - request->AddFingerprint(); - } -} - -void ConnectionRequest::OnResponse(StunMessage* response) { +class Connection::ConnectionRequest : public StunRequest { + public: + ConnectionRequest(StunRequestManager& manager, + Connection* connection, + std::unique_ptr message); + void OnResponse(StunMessage* response) override; + void OnErrorResponse(StunMessage* response) override; + void OnTimeout() override; + void OnSent() override; + int resend_delay() override; + + private: + Connection* const connection_; +}; + +Connection::ConnectionRequest::ConnectionRequest( + StunRequestManager& manager, + Connection* connection, + std::unique_ptr message) + : StunRequest(manager, std::move(message)), connection_(connection) {} + +void Connection::ConnectionRequest::OnResponse(StunMessage* response) { + RTC_DCHECK_RUN_ON(connection_->network_thread_); connection_->OnConnectionRequestResponse(this, response); } -void ConnectionRequest::OnErrorResponse(StunMessage* response) { +void Connection::ConnectionRequest::OnErrorResponse(StunMessage* response) { + RTC_DCHECK_RUN_ON(connection_->network_thread_); connection_->OnConnectionRequestErrorResponse(this, response); } -void ConnectionRequest::OnTimeout() { +void Connection::ConnectionRequest::OnTimeout() { + RTC_DCHECK_RUN_ON(connection_->network_thread_); connection_->OnConnectionRequestTimeout(this); } -void ConnectionRequest::OnSent() { +void Connection::ConnectionRequest::OnSent() { + RTC_DCHECK_RUN_ON(connection_->network_thread_); connection_->OnConnectionRequestSent(this); // Each request is sent only once. After a single delay , the request will // time out. - timeout_ = true; + set_timed_out(); } -int ConnectionRequest::resend_delay() { +int Connection::ConnectionRequest::resend_delay() { return CONNECTION_RESPONSE_TIMEOUT; } -Connection::Connection(Port* port, +Connection::Connection(rtc::WeakPtr port, size_t index, const Candidate& remote_candidate) - : id_(rtc::CreateRandomId()), - port_(port), - local_candidate_index_(index), + : network_thread_(port->thread()), + id_(rtc::CreateRandomId()), + port_(std::move(port)), + local_candidate_(port_->Candidates()[index]), remote_candidate_(remote_candidate), recv_rate_tracker_(100, 10u), send_rate_tracker_(100, 10u), @@ -293,30 +223,36 @@ Connection::Connection(Port* port, connected_(true), pruned_(false), use_candidate_attr_(false), - remote_ice_mode_(ICEMODE_FULL), - requests_(port->thread()), + requests_(port_->thread(), + [this](const void* data, size_t size, StunRequest* request) { + OnSendStunPacket(data, size, request); + }), rtt_(DEFAULT_RTT), last_ping_sent_(0), last_ping_received_(0), last_data_received_(0), last_ping_response_received_(0), - reported_(false), state_(IceCandidatePairState::WAITING), time_created_ms_(rtc::TimeMillis()), field_trials_(&kDefaultFieldTrials), rtt_estimate_(DEFAULT_RTT_ESTIMATE_HALF_TIME_MS) { - // All of our connections start in WAITING state. - // TODO(mallinath) - Start connections from STATE_FROZEN. - // Wire up to send stun packets - requests_.SignalSendPacket.connect(this, &Connection::OnSendStunPacket); + RTC_DCHECK_RUN_ON(network_thread_); + RTC_DCHECK(port_); RTC_LOG(LS_INFO) << ToString() << ": Connection created"; } -Connection::~Connection() {} +Connection::~Connection() { + RTC_DCHECK_RUN_ON(network_thread_); + RTC_DCHECK(!port_); +} + +webrtc::TaskQueueBase* Connection::network_thread() const { + return network_thread_; +} const Candidate& Connection::local_candidate() const { - RTC_DCHECK(local_candidate_index_ < port_->Candidates().size()); - return port_->Candidates()[local_candidate_index_]; + RTC_DCHECK_RUN_ON(network_thread_); + return local_candidate_; } const Candidate& Connection::remote_candidate() const { @@ -332,6 +268,9 @@ int Connection::generation() const { } uint64_t Connection::priority() const { + if (!port_) + return 0; + uint64_t priority = 0; // RFC 5245 - 5.7.2. Computing Pair Priority and Ordering Pairs // Let G be the priority for the candidate provided by the controlling @@ -357,6 +296,7 @@ uint64_t Connection::priority() const { } void Connection::set_write_state(WriteState value) { + RTC_DCHECK_RUN_ON(network_thread_); WriteState old_value = write_state_; write_state_ = value; if (value != old_value) { @@ -367,6 +307,7 @@ void Connection::set_write_state(WriteState value) { } void Connection::UpdateReceiving(int64_t now) { + RTC_DCHECK_RUN_ON(network_thread_); bool receiving; if (last_ping_sent() < last_ping_response_received()) { // We consider any candidate pair that has its last connectivity check @@ -392,6 +333,7 @@ void Connection::UpdateReceiving(int64_t now) { } void Connection::set_state(IceCandidatePairState state) { + RTC_DCHECK_RUN_ON(network_thread_); IceCandidatePairState old_state = state_; state_ = state; if (state != old_state) { @@ -400,6 +342,7 @@ void Connection::set_state(IceCandidatePairState state) { } void Connection::set_connected(bool value) { + RTC_DCHECK_RUN_ON(network_thread_); bool old_value = connected_; connected_ = value; if (value != old_value) { @@ -408,27 +351,74 @@ void Connection::set_connected(bool value) { } } +bool Connection::use_candidate_attr() const { + RTC_DCHECK_RUN_ON(network_thread_); + return use_candidate_attr_; +} + void Connection::set_use_candidate_attr(bool enable) { + RTC_DCHECK_RUN_ON(network_thread_); use_candidate_attr_ = enable; } +void Connection::set_nomination(uint32_t value) { + RTC_DCHECK_RUN_ON(network_thread_); + nomination_ = value; +} + +uint32_t Connection::remote_nomination() const { + RTC_DCHECK_RUN_ON(network_thread_); + return remote_nomination_; +} + +bool Connection::nominated() const { + RTC_DCHECK_RUN_ON(network_thread_); + return acked_nomination_ || remote_nomination_; +} + int Connection::unwritable_timeout() const { + RTC_DCHECK_RUN_ON(network_thread_); return unwritable_timeout_.value_or(CONNECTION_WRITE_CONNECT_TIMEOUT); } +void Connection::set_unwritable_timeout(const absl::optional& value_ms) { + RTC_DCHECK_RUN_ON(network_thread_); + unwritable_timeout_ = value_ms; +} + int Connection::unwritable_min_checks() const { + RTC_DCHECK_RUN_ON(network_thread_); return unwritable_min_checks_.value_or(CONNECTION_WRITE_CONNECT_FAILURES); } +void Connection::set_unwritable_min_checks(const absl::optional& value) { + RTC_DCHECK_RUN_ON(network_thread_); + unwritable_min_checks_ = value; +} + int Connection::inactive_timeout() const { + RTC_DCHECK_RUN_ON(network_thread_); return inactive_timeout_.value_or(CONNECTION_WRITE_TIMEOUT); } +void Connection::set_inactive_timeout(const absl::optional& value) { + RTC_DCHECK_RUN_ON(network_thread_); + inactive_timeout_ = value; +} + int Connection::receiving_timeout() const { + RTC_DCHECK_RUN_ON(network_thread_); return receiving_timeout_.value_or(WEAK_CONNECTION_RECEIVE_TIMEOUT); } +void Connection::set_receiving_timeout( + absl::optional receiving_timeout_ms) { + RTC_DCHECK_RUN_ON(network_thread_); + receiving_timeout_ = receiving_timeout_ms; +} + void Connection::SetIceFieldTrials(const IceFieldTrials* field_trials) { + RTC_DCHECK_RUN_ON(network_thread_); field_trials_ = field_trials; rtt_estimate_.SetHalfTime(field_trials->rtt_estimate_halftime_ms); } @@ -436,6 +426,7 @@ void Connection::SetIceFieldTrials(const IceFieldTrials* field_trials) { void Connection::OnSendStunPacket(const void* data, size_t size, StunRequest* req) { + RTC_DCHECK_RUN_ON(network_thread_); rtc::PacketOptions options(port_->StunDscpValue()); options.info_signaled_after_sent.packet_type = rtc::PacketType::kIceConnectivityCheck; @@ -452,6 +443,7 @@ void Connection::OnSendStunPacket(const void* data, void Connection::OnReadPacket(const char* data, size_t size, int64_t packet_time_us) { + RTC_DCHECK_RUN_ON(network_thread_); std::unique_ptr msg; std::string remote_ufrag; const rtc::SocketAddress& addr(remote_candidate_.address()); @@ -480,7 +472,28 @@ void Connection::OnReadPacket(const char* data, // If this is a STUN response, then update the writable bit. // Log at LS_INFO if we receive a ping on an unwritable connection. rtc::LoggingSeverity sev = (!writable() ? rtc::LS_INFO : rtc::LS_VERBOSE); - msg->ValidateMessageIntegrity(remote_candidate().password()); + switch (msg->integrity()) { + case StunMessage::IntegrityStatus::kNotSet: + // Late computation of integrity status, but not an error. + msg->ValidateMessageIntegrity(remote_candidate().password()); + break; + case StunMessage::IntegrityStatus::kIntegrityOk: + if (remote_candidate().password() != msg->password()) { + // Password has changed. Recheck message. + // TODO(crbug.com/1177125): Redesign logic to check only once. + msg->RevalidateMessageIntegrity(remote_candidate().password()); + } + break; + case StunMessage::IntegrityStatus::kIntegrityBad: + // Possibly we have a new password to try. + // TODO(crbug.com/1177125): Redesign logic to check only once. + msg->RevalidateMessageIntegrity(remote_candidate().password()); + break; + default: + // This shouldn't happen. + RTC_DCHECK_NOTREACHED(); + break; + } switch (msg->type()) { case STUN_BINDING_REQUEST: RTC_LOG_V(sev) << ToString() << ": Received " @@ -509,7 +522,7 @@ void Connection::OnReadPacket(const char* data, if (msg->IntegrityOk()) { requests_.CheckResponse(msg.get()); } - // Otherwise silently discard the response message. + // Otherwise silently discard the response. break; // Remote end point sent an STUN indication instead of regular binding @@ -535,10 +548,10 @@ void Connection::OnReadPacket(const char* data, } void Connection::HandleStunBindingOrGoogPingRequest(IceMessage* msg) { + RTC_DCHECK_RUN_ON(network_thread_); // This connection should now be receiving. ReceivedPing(msg->transaction_id()); - if (webrtc::field_trial::IsEnabled("WebRTC-ExtraICEPing") && - last_ping_response_received_ == 0) { + if (field_trials_->extra_ice_ping && last_ping_response_received_ == 0) { if (local_candidate().type() == RELAY_PORT_TYPE || local_candidate().type() == PRFLX_PORT_TYPE || remote_candidate().type() == RELAY_PORT_TYPE || @@ -627,30 +640,28 @@ void Connection::HandleStunBindingOrGoogPingRequest(IceMessage* msg) { } } - if (webrtc::field_trial::IsEnabled( - "WebRTC-PiggybackIceCheckAcknowledgement")) { + if (field_trials_->piggyback_ice_check_acknowledgement) { HandlePiggybackCheckAcknowledgementIfAny(msg); } } -void Connection::SendStunBindingResponse(const StunMessage* request) { - RTC_DCHECK(request->type() == STUN_BINDING_REQUEST); +void Connection::SendStunBindingResponse(const StunMessage* message) { + RTC_DCHECK_RUN_ON(network_thread_); + RTC_DCHECK_EQ(message->type(), STUN_BINDING_REQUEST); - // Retrieve the username from the request. + // Retrieve the username from the `message`. const StunByteStringAttribute* username_attr = - request->GetByteString(STUN_ATTR_USERNAME); + message->GetByteString(STUN_ATTR_USERNAME); RTC_DCHECK(username_attr != NULL); if (username_attr == NULL) { // No valid username, skip the response. return; } - // Fill in the response message. - StunMessage response; - response.SetType(STUN_BINDING_RESPONSE); - response.SetTransactionID(request->transaction_id()); + // Fill in the response. + StunMessage response(STUN_BINDING_RESPONSE, message->transaction_id()); const StunUInt32Attribute* retransmit_attr = - request->GetUInt32(STUN_ATTR_RETRANSMIT_COUNT); + message->GetUInt32(STUN_ATTR_RETRANSMIT_COUNT); if (retransmit_attr) { // Inherit the incoming retransmit value in the response so the other side // can see our view of lost pings. @@ -669,8 +680,8 @@ void Connection::SendStunBindingResponse(const StunMessage* request) { STUN_ATTR_XOR_MAPPED_ADDRESS, remote_candidate_.address())); if (field_trials_->announce_goog_ping) { - // Check if request contains a announce-request. - auto goog_misc = request->GetUInt16List(STUN_ATTR_GOOG_MISC_INFO); + // Check if message contains a announce-request. + auto goog_misc = message->GetUInt16List(STUN_ATTR_GOOG_MISC_INFO); if (goog_misc != nullptr && goog_misc->Size() >= kSupportGoogPingVersionRequestIndex && // Which version can we handle...currently any >= 1 @@ -689,22 +700,22 @@ void Connection::SendStunBindingResponse(const StunMessage* request) { SendResponseMessage(response); } -void Connection::SendGoogPingResponse(const StunMessage* request) { - RTC_DCHECK(request->type() == GOOG_PING_REQUEST); +void Connection::SendGoogPingResponse(const StunMessage* message) { + RTC_DCHECK_RUN_ON(network_thread_); + RTC_DCHECK(message->type() == GOOG_PING_REQUEST); - // Fill in the response message. - StunMessage response; - response.SetType(GOOG_PING_RESPONSE); - response.SetTransactionID(request->transaction_id()); + // Fill in the response. + StunMessage response(GOOG_PING_RESPONSE, message->transaction_id()); response.AddMessageIntegrity32(local_candidate().password()); SendResponseMessage(response); } void Connection::SendResponseMessage(const StunMessage& response) { + RTC_DCHECK_RUN_ON(network_thread_); // Where I send the response. const rtc::SocketAddress& addr = remote_candidate_.address(); - // Send the response message. + // Send the response. rtc::ByteBufferWriter buf; response.Write(&buf); rtc::PacketOptions options(port_->StunDscpValue()); @@ -731,11 +742,28 @@ void Connection::SendResponseMessage(const StunMessage& response) { } } +uint32_t Connection::acked_nomination() const { + RTC_DCHECK_RUN_ON(network_thread_); + return acked_nomination_; +} + +void Connection::set_remote_nomination(uint32_t remote_nomination) { + RTC_DCHECK_RUN_ON(network_thread_); + remote_nomination_ = remote_nomination; +} + void Connection::OnReadyToSend() { + RTC_DCHECK_RUN_ON(network_thread_); SignalReadyToSend(this); } +bool Connection::pruned() const { + RTC_DCHECK_RUN_ON(network_thread_); + return pruned_; +} + void Connection::Prune() { + RTC_DCHECK_RUN_ON(network_thread_); if (!pruned_ || active()) { RTC_LOG(LS_INFO) << ToString() << ": Connection pruned"; pruned_ = true; @@ -745,27 +773,56 @@ void Connection::Prune() { } void Connection::Destroy() { - // TODO(deadbeef, nisse): This may leak if an application closes a - // PeerConnection and then quickly destroys the PeerConnectionFactory (along - // with the networking thread on which this message is posted). Also affects - // tests, with a workaround in - // AutoSocketServerThread::~AutoSocketServerThread. - RTC_LOG(LS_VERBOSE) << ToString() << ": Connection destroyed"; - port_->thread()->Post(RTC_FROM_HERE, this, MSG_DELETE); - LogCandidatePairConfig(webrtc::IceCandidatePairConfigType::kDestroyed); + RTC_DCHECK_RUN_ON(network_thread_); + RTC_DCHECK(port_) << "Calling Destroy() twice?"; + if (port_) + port_->DestroyConnection(this); } -void Connection::FailAndDestroy() { - set_state(IceCandidatePairState::FAILED); - Destroy(); +bool Connection::Shutdown() { + RTC_DCHECK_RUN_ON(network_thread_); + if (!port_) + return false; // already shut down. + + RTC_DLOG(LS_VERBOSE) << ToString() << ": Connection destroyed"; + + // Fire the 'destroyed' event before deleting the object. This is done + // intentionally to avoid a situation whereby the signal might have dangling + // pointers to objects that have been deleted by the time the async task + // that deletes the connection object runs. + auto destroyed_signals = SignalDestroyed; + SignalDestroyed.disconnect_all(); + destroyed_signals(this); + + LogCandidatePairConfig(webrtc::IceCandidatePairConfigType::kDestroyed); + + // Reset the `port_` after logging and firing the destroyed signal since + // information required for logging needs access to `port_`. + port_.reset(); + + return true; } void Connection::FailAndPrune() { + RTC_DCHECK_RUN_ON(network_thread_); + + // TODO(bugs.webrtc.org/13865): There's a circular dependency between Port + // and Connection. In some cases (Port dtor), a Connection object is deleted + // without using the `Destroy` method (port_ won't be nulled and some + // functionality won't run as expected), while in other cases + // the Connection object is deleted asynchronously and in that case `port_` + // will be nulled. + // In such a case, there's a chance that the Port object gets + // deleted before the Connection object ends up being deleted. + if (!port_) + return; + set_state(IceCandidatePairState::FAILED); Prune(); } void Connection::PrintPingsSinceLastResponse(std::string* s, size_t max) { + RTC_DCHECK_RUN_ON(network_thread_); rtc::StringBuilder oss; if (pings_since_last_response_.size() > max) { for (size_t i = 0; i < max; i++) { @@ -781,7 +838,21 @@ void Connection::PrintPingsSinceLastResponse(std::string* s, size_t max) { *s = oss.str(); } +bool Connection::selected() const { + RTC_DCHECK_RUN_ON(network_thread_); + return selected_; +} + +void Connection::set_selected(bool selected) { + RTC_DCHECK_RUN_ON(network_thread_); + selected_ = selected; +} + void Connection::UpdateState(int64_t now) { + RTC_DCHECK_RUN_ON(network_thread_); + if (!port_) + return; + int rtt = ConservativeRTTEstimate(rtt_); if (RTC_LOG_CHECK_LEVEL(LS_VERBOSE)) { @@ -834,13 +905,31 @@ void Connection::UpdateState(int64_t now) { // Update the receiving state. UpdateReceiving(now); if (dead(now)) { - Destroy(); + port_->DestroyConnectionAsync(this); } } +void Connection::UpdateLocalIceParameters(int component, + absl::string_view username_fragment, + absl::string_view password) { + RTC_DCHECK_RUN_ON(network_thread_); + local_candidate_.set_component(component); + local_candidate_.set_username(username_fragment); + local_candidate_.set_password(password); +} + +int64_t Connection::last_ping_sent() const { + RTC_DCHECK_RUN_ON(network_thread_); + return last_ping_sent_; +} + void Connection::Ping(int64_t now) { + RTC_DCHECK_RUN_ON(network_thread_); + if (!port_) + return; + last_ping_sent_ = now; - ConnectionRequest* req = new ConnectionRequest(this); + // If not using renomination, we use "1" to mean "nominated" and "0" to mean // "not nominated". If using renomination, values greater than 1 are used for // re-nominated pairs. @@ -848,28 +937,126 @@ void Connection::Ping(int64_t now) { if (nomination_ > 0) { nomination = nomination_; } + + auto req = + std::make_unique(requests_, this, BuildPingRequest()); + + if (ShouldSendGoogPing(req->msg())) { + auto message = std::make_unique(GOOG_PING_REQUEST, req->id()); + message->AddMessageIntegrity32(remote_candidate_.password()); + req.reset(new ConnectionRequest(requests_, this, std::move(message))); + } + pings_since_last_response_.push_back(SentPing(req->id(), now, nomination)); RTC_LOG(LS_VERBOSE) << ToString() << ": Sending STUN ping, id=" << rtc::hex_encode(req->id()) << ", nomination=" << nomination_; - requests_.Send(req); + requests_.Send(req.release()); state_ = IceCandidatePairState::IN_PROGRESS; num_pings_sent_++; } +std::unique_ptr Connection::BuildPingRequest() { + auto message = std::make_unique(STUN_BINDING_REQUEST); + // Note that the order of attributes does not impact the parsing on the + // receiver side. The attribute is retrieved then by iterating and matching + // over all parsed attributes. See StunMessage::GetAttribute. + message->AddAttribute(std::make_unique( + STUN_ATTR_USERNAME, + port()->CreateStunUsername(remote_candidate_.username()))); + message->AddAttribute(std::make_unique( + STUN_ATTR_GOOG_NETWORK_INFO, + (port_->Network()->id() << 16) | port_->network_cost())); + + if (field_trials_->piggyback_ice_check_acknowledgement && + last_ping_id_received_) { + message->AddAttribute(std::make_unique( + STUN_ATTR_GOOG_LAST_ICE_CHECK_RECEIVED, *last_ping_id_received_)); + } + + // Adding ICE_CONTROLLED or ICE_CONTROLLING attribute based on the role. + IceRole ice_role = port_->GetIceRole(); + RTC_DCHECK(ice_role == ICEROLE_CONTROLLING || ice_role == ICEROLE_CONTROLLED); + message->AddAttribute(std::make_unique( + ice_role == ICEROLE_CONTROLLING ? STUN_ATTR_ICE_CONTROLLING + : STUN_ATTR_ICE_CONTROLLED, + port_->IceTiebreaker())); + + if (ice_role == ICEROLE_CONTROLLING) { + // We should have either USE_CANDIDATE attribute or ICE_NOMINATION + // attribute but not both. That was enforced in p2ptransportchannel. + if (use_candidate_attr()) { + message->AddAttribute( + std::make_unique(STUN_ATTR_USE_CANDIDATE)); + } + if (nomination_ && nomination_ != acked_nomination()) { + message->AddAttribute(std::make_unique( + STUN_ATTR_NOMINATION, nomination_)); + } + } + + message->AddAttribute(std::make_unique( + STUN_ATTR_PRIORITY, prflx_priority())); + + if (port()->send_retransmit_count_attribute()) { + message->AddAttribute(std::make_unique( + STUN_ATTR_RETRANSMIT_COUNT, pings_since_last_response_.size())); + } + if (field_trials_->enable_goog_ping && + !remote_support_goog_ping_.has_value()) { + // Check if remote supports GOOG PING by announcing which version we + // support. This is sent on all STUN_BINDING_REQUEST until we get a + // STUN_BINDING_RESPONSE. + auto list = + StunAttribute::CreateUInt16ListAttribute(STUN_ATTR_GOOG_MISC_INFO); + list->AddTypeAtIndex(kSupportGoogPingVersionRequestIndex, kGoogPingVersion); + message->AddAttribute(std::move(list)); + } + message->AddMessageIntegrity(remote_candidate_.password()); + message->AddFingerprint(); + + return message; +} + +int64_t Connection::last_ping_response_received() const { + RTC_DCHECK_RUN_ON(network_thread_); + return last_ping_response_received_; +} + +const absl::optional& Connection::last_ping_id_received() const { + RTC_DCHECK_RUN_ON(network_thread_); + return last_ping_id_received_; +} + +// Used to check if any STUN ping response has been received. +int Connection::rtt_samples() const { + RTC_DCHECK_RUN_ON(network_thread_); + return rtt_samples_; +} + +// Called whenever a valid ping is received on this connection. This is +// public because the connection intercepts the first ping for us. +int64_t Connection::last_ping_received() const { + RTC_DCHECK_RUN_ON(network_thread_); + return last_ping_received_; +} + void Connection::ReceivedPing(const absl::optional& request_id) { + RTC_DCHECK_RUN_ON(network_thread_); last_ping_received_ = rtc::TimeMillis(); last_ping_id_received_ = request_id; UpdateReceiving(last_ping_received_); } void Connection::HandlePiggybackCheckAcknowledgementIfAny(StunMessage* msg) { + RTC_DCHECK_RUN_ON(network_thread_); RTC_DCHECK(msg->type() == STUN_BINDING_REQUEST || msg->type() == GOOG_PING_REQUEST); const StunByteStringAttribute* last_ice_check_received_attr = msg->GetByteString(STUN_ATTR_GOOG_LAST_ICE_CHECK_RECEIVED); if (last_ice_check_received_attr) { - const std::string request_id = last_ice_check_received_attr->GetString(); + const absl::string_view request_id = + last_ice_check_received_attr->string_view(); auto iter = absl::c_find_if( pings_since_last_response_, [&request_id](const SentPing& ping) { return ping.id == request_id; }); @@ -884,10 +1071,21 @@ void Connection::HandlePiggybackCheckAcknowledgementIfAny(StunMessage* msg) { } } +int64_t Connection::last_send_data() const { + RTC_DCHECK_RUN_ON(network_thread_); + return last_send_data_; +} + +int64_t Connection::last_data_received() const { + RTC_DCHECK_RUN_ON(network_thread_); + return last_data_received_; +} + void Connection::ReceivedPingResponse( int rtt, - const std::string& request_id, + absl::string_view request_id, const absl::optional& nomination) { + RTC_DCHECK_RUN_ON(network_thread_); RTC_DCHECK_GE(rtt, 0); // We've already validated that this is a STUN binding response with // the correct local and remote username for this connection. @@ -916,7 +1114,39 @@ void Connection::ReceivedPingResponse( rtt_samples_++; } +Connection::WriteState Connection::write_state() const { + RTC_DCHECK_RUN_ON(network_thread_); + return write_state_; +} + +bool Connection::writable() const { + RTC_DCHECK_RUN_ON(network_thread_); + return write_state_ == STATE_WRITABLE; +} + +bool Connection::receiving() const { + RTC_DCHECK_RUN_ON(network_thread_); + return receiving_; +} + +// Determines whether the connection has finished connecting. This can only +// be false for TCP connections. +bool Connection::connected() const { + RTC_DCHECK_RUN_ON(network_thread_); + return connected_; +} + +bool Connection::weak() const { + return !(writable() && receiving() && connected()); +} + +bool Connection::active() const { + RTC_DCHECK_RUN_ON(network_thread_); + return write_state_ != STATE_WRITE_TIMEOUT; +} + bool Connection::dead(int64_t now) const { + RTC_DCHECK_RUN_ON(network_thread_); if (last_received() > 0) { // If it has ever received anything, we keep it alive // - if it has recevied last DEAD_CONNECTION_RECEIVE_TIMEOUT (30s) @@ -960,6 +1190,11 @@ bool Connection::dead(int64_t now) const { return now > (time_created_ms_ + MIN_CONNECTION_LIFETIME); } +int Connection::rtt() const { + RTC_DCHECK_RUN_ON(network_thread_); + return rtt_; +} + bool Connection::stable(int64_t now) const { // A connection is stable if it's RTT has converged and it isn't missing any // responses. We should send pings at a higher rate until the RTT converges @@ -978,49 +1213,66 @@ uint32_t Connection::ComputeNetworkCost() const { } std::string Connection::ToString() const { - const absl::string_view CONNECT_STATE_ABBREV[2] = { + RTC_DCHECK_RUN_ON(network_thread_); + constexpr absl::string_view CONNECT_STATE_ABBREV[2] = { "-", // not connected (false) "C", // connected (true) }; - const absl::string_view RECEIVE_STATE_ABBREV[2] = { + constexpr absl::string_view RECEIVE_STATE_ABBREV[2] = { "-", // not receiving (false) "R", // receiving (true) }; - const absl::string_view WRITE_STATE_ABBREV[4] = { + constexpr absl::string_view WRITE_STATE_ABBREV[4] = { "W", // STATE_WRITABLE "w", // STATE_WRITE_UNRELIABLE "-", // STATE_WRITE_INIT "x", // STATE_WRITE_TIMEOUT }; - const absl::string_view ICESTATE[4] = { + constexpr absl::string_view ICESTATE[4] = { "W", // STATE_WAITING "I", // STATE_INPROGRESS "S", // STATE_SUCCEEDED "F" // STATE_FAILED }; - const absl::string_view SELECTED_STATE_ABBREV[2] = { + constexpr absl::string_view SELECTED_STATE_ABBREV[2] = { "-", // candidate pair not selected (false) "S", // selected (true) }; + rtc::StringBuilder ss; + ss << "Conn[" << ToDebugId(); + + if (!port_) { + // No content or network names for pending delete. Temporarily substitute + // the names with a hash (rhyming with trash). + ss << ":#:#:"; + } else { + ss << ":" << port_->content_name() << ":" << port_->Network()->ToString() + << ":"; + } + const Candidate& local = local_candidate(); const Candidate& remote = remote_candidate(); - rtc::StringBuilder ss; - ss << "Conn[" << ToDebugId() << ":" << port_->content_name() << ":" - << port_->Network()->ToString() << ":" << local.id() << ":" - << local.component() << ":" << local.generation() << ":" << local.type() - << ":" << local.protocol() << ":" << local.address().ToSensitiveString() - << "->" << remote.id() << ":" << remote.component() << ":" - << remote.priority() << ":" << remote.type() << ":" << remote.protocol() - << ":" << remote.address().ToSensitiveString() << "|" - << CONNECT_STATE_ABBREV[connected()] << RECEIVE_STATE_ABBREV[receiving()] - << WRITE_STATE_ABBREV[write_state()] << ICESTATE[static_cast(state())] - << "|" << SELECTED_STATE_ABBREV[selected()] << "|" << remote_nomination() - << "|" << nomination() << "|" << priority() << "|"; + ss << local.id() << ":" << local.component() << ":" << local.generation() + << ":" << local.type() << ":" << local.protocol() << ":" + << local.address().ToSensitiveString() << "->" << remote.id() << ":" + << remote.component() << ":" << remote.priority() << ":" << remote.type() + << ":" << remote.protocol() << ":" << remote.address().ToSensitiveString() + << "|"; + + ss << CONNECT_STATE_ABBREV[connected_] << RECEIVE_STATE_ABBREV[receiving_] + << WRITE_STATE_ABBREV[write_state_] << ICESTATE[static_cast(state_)] + << "|" << SELECTED_STATE_ABBREV[selected_] << "|" << remote_nomination_ + << "|" << nomination_ << "|"; + + if (port_) + ss << priority() << "|"; + if (rtt_ < DEFAULT_RTT) { ss << rtt_ << "]"; } else { ss << "-]"; } + return ss.Release(); } @@ -1029,6 +1281,7 @@ std::string Connection::ToSensitiveString() const { } const webrtc::IceCandidatePairDescription& Connection::ToLogDescription() { + RTC_DCHECK_RUN_ON(network_thread_); if (log_description_.has_value()) { return log_description_.value(); } @@ -1052,8 +1305,14 @@ const webrtc::IceCandidatePairDescription& Connection::ToLogDescription() { return log_description_.value(); } +void Connection::set_ice_event_log(webrtc::IceEventLog* ice_event_log) { + RTC_DCHECK_RUN_ON(network_thread_); + ice_event_log_ = ice_event_log; +} + void Connection::LogCandidatePairConfig( webrtc::IceCandidatePairConfigType type) { + RTC_DCHECK_RUN_ON(network_thread_); if (ice_event_log_ == nullptr) { return; } @@ -1062,14 +1321,16 @@ void Connection::LogCandidatePairConfig( void Connection::LogCandidatePairEvent(webrtc::IceCandidatePairEventType type, uint32_t transaction_id) { + RTC_DCHECK_RUN_ON(network_thread_); if (ice_event_log_ == nullptr) { return; } ice_event_log_->LogCandidatePairEvent(type, id(), transaction_id); } -void Connection::OnConnectionRequestResponse(ConnectionRequest* request, +void Connection::OnConnectionRequestResponse(StunRequest* request, StunMessage* response) { + RTC_DCHECK_RUN_ON(network_thread_); // Log at LS_INFO if we receive a ping response on an unwritable // connection. rtc::LoggingSeverity sev = !writable() ? rtc::LS_INFO : rtc::LS_VERBOSE; @@ -1126,6 +1387,9 @@ void Connection::OnConnectionRequestResponse(ConnectionRequest* request, void Connection::OnConnectionRequestErrorResponse(ConnectionRequest* request, StunMessage* response) { + if (!port_) + return; + int error_code = response->GetErrorCodeValue(); RTC_LOG(LS_WARNING) << ToString() << ": Received " << StunMethodToString(response->type()) @@ -1139,7 +1403,7 @@ void Connection::OnConnectionRequestErrorResponse(ConnectionRequest* request, error_code == STUN_ERROR_UNAUTHORIZED) { // Recoverable error, retry } else if (error_code == STUN_ERROR_ROLE_CONFLICT) { - HandleRoleConflictFromPeer(); + port_->SignalRoleConflict(port_.get()); } else if (request->msg()->type() == GOOG_PING_REQUEST) { // Race, retry. } else { @@ -1147,7 +1411,8 @@ void Connection::OnConnectionRequestErrorResponse(ConnectionRequest* request, RTC_LOG(LS_ERROR) << ToString() << ": Received STUN error response, code=" << error_code << "; killing connection"; - FailAndDestroy(); + set_state(IceCandidatePairState::FAILED); + port_->DestroyConnectionAsync(this); } } @@ -1160,13 +1425,14 @@ void Connection::OnConnectionRequestTimeout(ConnectionRequest* request) { } void Connection::OnConnectionRequestSent(ConnectionRequest* request) { + RTC_DCHECK_RUN_ON(network_thread_); // Log at LS_INFO if we send a ping on an unwritable connection. rtc::LoggingSeverity sev = !writable() ? rtc::LS_INFO : rtc::LS_VERBOSE; RTC_LOG_V(sev) << ToString() << ": Sent " << StunMethodToString(request->msg()->type()) << ", id=" << rtc::hex_encode(request->id()) << ", use_candidate=" << use_candidate_attr() - << ", nomination=" << nomination(); + << ", nomination=" << nomination_; stats_.sent_ping_requests_total++; LogCandidatePairEvent(webrtc::IceCandidatePairEventType::kCheckSent, request->reduced_transaction_id()); @@ -1175,8 +1441,14 @@ void Connection::OnConnectionRequestSent(ConnectionRequest* request) { } } -void Connection::HandleRoleConflictFromPeer() { - port_->SignalRoleConflict(port_); +IceCandidatePairState Connection::state() const { + RTC_DCHECK_RUN_ON(network_thread_); + return state_; +} + +int Connection::num_pings_sent() const { + RTC_DCHECK_RUN_ON(network_thread_); + return num_pings_sent_; } void Connection::MaybeSetRemoteIceParametersAndGeneration( @@ -1209,20 +1481,34 @@ void Connection::MaybeUpdatePeerReflexiveCandidate( } } -void Connection::OnMessage(rtc::Message* pmsg) { - RTC_DCHECK(pmsg->message_id == MSG_DELETE); - RTC_LOG(LS_INFO) << "Connection deleted with number of pings sent: " - << num_pings_sent_; - SignalDestroyed(this); - delete this; -} - int64_t Connection::last_received() const { + RTC_DCHECK_RUN_ON(network_thread_); return std::max(last_data_received_, std::max(last_ping_received_, last_ping_response_received_)); } +int64_t Connection::receiving_unchanged_since() const { + RTC_DCHECK_RUN_ON(network_thread_); + return receiving_unchanged_since_; +} + +uint32_t Connection::prflx_priority() const { + RTC_DCHECK_RUN_ON(network_thread_); + // PRIORITY Attribute. + // Changing the type preference to Peer Reflexive and local preference + // and component id information is unchanged from the original priority. + // priority = (2^24)*(type preference) + + // (2^8)*(local preference) + + // (2^0)*(256 - component ID) + IcePriorityValue type_preference = + (local_candidate_.protocol() == TCP_PROTOCOL_NAME) + ? ICE_TYPE_PREFERENCE_PRFLX_TCP + : ICE_TYPE_PREFERENCE_PRFLX; + return type_preference << 24 | (local_candidate_.priority() & 0x00FFFFFF); +} + ConnectionInfo Connection::stats() { + RTC_DCHECK_RUN_ON(network_thread_); stats_.recv_bytes_second = round(recv_rate_tracker_.ComputeRate()); stats_.recv_total_bytes = recv_rate_tracker_.TotalSampleCount(); stats_.sent_bytes_second = round(send_rate_tracker_.ComputeRate()); @@ -1230,21 +1516,25 @@ ConnectionInfo Connection::stats() { stats_.receiving = receiving_; stats_.writable = write_state_ == STATE_WRITABLE; stats_.timeout = write_state_ == STATE_WRITE_TIMEOUT; - stats_.new_connection = !reported_; stats_.rtt = rtt_; stats_.key = this; stats_.state = state_; - stats_.priority = priority(); + if (port_) { + stats_.priority = priority(); + stats_.local_candidate = local_candidate(); + } stats_.nominated = nominated(); stats_.total_round_trip_time_ms = total_round_trip_time_ms_; stats_.current_round_trip_time_ms = current_round_trip_time_ms_; - stats_.local_candidate = local_candidate(); stats_.remote_candidate = remote_candidate(); return stats_; } -void Connection::MaybeUpdateLocalCandidate(ConnectionRequest* request, +void Connection::MaybeUpdateLocalCandidate(StunRequest* request, StunMessage* response) { + if (!port_) + return; + // RFC 5245 // The agent checks the mapped address from the STUN response. If the // transport address does not match any of the local candidates that the @@ -1260,12 +1550,20 @@ void Connection::MaybeUpdateLocalCandidate(ConnectionRequest* request, return; } - for (size_t i = 0; i < port_->Candidates().size(); ++i) { - if (port_->Candidates()[i].address() == addr->GetAddress()) { - if (local_candidate_index_ != i) { + for (const Candidate& candidate : port_->Candidates()) { + if (absl::EndsWith(candidate.address().hostname(), ".reflector")) { + Candidate testCandidate = candidate; + testCandidate.set_address(local_candidate_.address()); + if (testCandidate == local_candidate_) { + return; + } + } + + if (candidate.address() == addr->GetAddress()) { + if (local_candidate_ != candidate) { RTC_LOG(LS_INFO) << ToString() << ": Updating local candidate type to srflx."; - local_candidate_index_ = i; + local_candidate_ = candidate; // SignalStateChange to force a re-sort in P2PTransportChannel as this // Connection's local candidate has changed. SignalStateChange(this); @@ -1289,19 +1587,20 @@ void Connection::MaybeUpdateLocalCandidate(ConnectionRequest* request, std::string id = rtc::CreateRandomString(8); // Create a peer-reflexive candidate based on the local candidate. - Candidate new_local_candidate(local_candidate()); - new_local_candidate.set_id(id); - new_local_candidate.set_type(PRFLX_PORT_TYPE); - new_local_candidate.set_address(addr->GetAddress()); - new_local_candidate.set_priority(priority); - new_local_candidate.set_related_address(local_candidate().address()); - new_local_candidate.set_foundation(Port::ComputeFoundation( - PRFLX_PORT_TYPE, local_candidate().protocol(), - local_candidate().relay_protocol(), local_candidate().address())); + local_candidate_.set_id(id); + local_candidate_.set_type(PRFLX_PORT_TYPE); + // Set the related address and foundation attributes before changing the + // address. + local_candidate_.set_related_address(local_candidate_.address()); + local_candidate_.set_foundation(port()->ComputeFoundation( + PRFLX_PORT_TYPE, local_candidate_.protocol(), + local_candidate_.relay_protocol(), local_candidate_.address())); + local_candidate_.set_priority(priority); + local_candidate_.set_address(addr->GetAddress()); // Change the local candidate of this Connection to the new prflx candidate. RTC_LOG(LS_INFO) << ToString() << ": Updating local candidate type to prflx."; - local_candidate_index_ = port_->AddPrflxCandidate(new_local_candidate); + port_->AddPrflxCandidate(local_candidate_); // SignalStateChange to force a re-sort in P2PTransportChannel as this // Connection's local candidate has changed. @@ -1309,10 +1608,12 @@ void Connection::MaybeUpdateLocalCandidate(ConnectionRequest* request, } bool Connection::rtt_converged() const { + RTC_DCHECK_RUN_ON(network_thread_); return rtt_samples_ > (RTT_RATIO + 1); } bool Connection::missing_responses(int64_t now) const { + RTC_DCHECK_RUN_ON(network_thread_); if (pings_since_last_response_.empty()) { return false; } @@ -1323,6 +1624,7 @@ bool Connection::missing_responses(int64_t now) const { bool Connection::TooManyOutstandingPings( const absl::optional& max_outstanding_pings) const { + RTC_DCHECK_RUN_ON(network_thread_); if (!max_outstanding_pings.has_value()) { return false; } @@ -1333,7 +1635,22 @@ bool Connection::TooManyOutstandingPings( return true; } +void Connection::SetLocalCandidateNetworkCost(uint16_t cost) { + RTC_DCHECK_RUN_ON(network_thread_); + + if (cost == local_candidate_.network_cost()) + return; + + local_candidate_.set_network_cost(cost); + + // Network cost change will affect the connection selection criteria. + // Signal the connection state change to force a re-sort in + // P2PTransportChannel. + SignalStateChange(this); +} + bool Connection::ShouldSendGoogPing(const StunMessage* message) { + RTC_DCHECK_RUN_ON(network_thread_); if (remote_support_goog_ping_ == true && cached_stun_binding_ && cached_stun_binding_->EqualAttributes(message, [](int type) { // Ignore these attributes. @@ -1350,6 +1667,7 @@ bool Connection::ShouldSendGoogPing(const StunMessage* message) { } void Connection::ForgetLearnedState() { + RTC_DCHECK_RUN_ON(network_thread_); RTC_LOG(LS_INFO) << ToString() << ": Connection forget learned state"; requests_.Clear(); receiving_ = false; @@ -1358,14 +1676,17 @@ void Connection::ForgetLearnedState() { pings_since_last_response_.clear(); } -ProxyConnection::ProxyConnection(Port* port, +ProxyConnection::ProxyConnection(rtc::WeakPtr port, size_t index, const Candidate& remote_candidate) - : Connection(port, index, remote_candidate) {} + : Connection(std::move(port), index, remote_candidate) {} int ProxyConnection::Send(const void* data, size_t size, const rtc::PacketOptions& options) { + if (!port_) + return SOCKET_ERROR; + stats_.sent_total_packets++; int sent = port_->SendTo(data, size, remote_candidate_.address(), options, true); diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/connection.h b/TMessagesProj/jni/voip/webrtc/p2p/base/connection.h index 7efe7d65e9..7baff0287c 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/connection.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/connection.h @@ -15,6 +15,7 @@ #include #include +#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/candidate.h" #include "api/transport/stun.h" @@ -25,10 +26,10 @@ #include "p2p/base/stun_request.h" #include "p2p/base/transport_description.h" #include "rtc_base/async_packet_socket.h" -#include "rtc_base/message_handler.h" #include "rtc_base/network.h" #include "rtc_base/numerics/event_based_exponential_moving_average.h" #include "rtc_base/rate_tracker.h" +#include "rtc_base/weak_ptr.h" namespace cricket { @@ -53,29 +54,12 @@ struct CandidatePair final : public CandidatePairInterface { Candidate remote; }; -// A ConnectionRequest is a simple STUN ping used to determine writability. -class ConnectionRequest : public StunRequest { - public: - explicit ConnectionRequest(Connection* connection); - void Prepare(StunMessage* request) override; - void OnResponse(StunMessage* response) override; - void OnErrorResponse(StunMessage* response) override; - void OnTimeout() override; - void OnSent() override; - int resend_delay() override; - - private: - Connection* const connection_; -}; - // Represents a communication link between a port on the local client and a // port on the remote client. -class Connection : public CandidatePairInterface, - public rtc::MessageHandlerAutoCleanup, - public sigslot::has_slots<> { +class Connection : public CandidatePairInterface { public: struct SentPing { - SentPing(const std::string id, int64_t sent_time, uint32_t nomination) + SentPing(absl::string_view id, int64_t sent_time, uint32_t nomination) : id(id), sent_time(sent_time), nomination(nomination) {} std::string id; @@ -88,6 +72,8 @@ class Connection : public CandidatePairInterface, // A unique ID assigned when the connection is created. uint32_t id() const { return id_; } + webrtc::TaskQueueBase* network_thread() const; + // Implementation of virtual methods in CandidatePairInterface. // Returns the description of the local port const Candidate& local_candidate() const override; @@ -109,34 +95,33 @@ class Connection : public CandidatePairInterface, STATE_WRITE_TIMEOUT = 3, // we have had a large number of ping failures }; - WriteState write_state() const { return write_state_; } - bool writable() const { return write_state_ == STATE_WRITABLE; } - bool receiving() const { return receiving_; } + WriteState write_state() const; + bool writable() const; + bool receiving() const; + + const Port* port() const { + RTC_DCHECK_RUN_ON(network_thread_); + return port_.get(); + } // Determines whether the connection has finished connecting. This can only // be false for TCP connections. - bool connected() const { return connected_; } - bool weak() const { return !(writable() && receiving() && connected()); } - bool active() const { return write_state_ != STATE_WRITE_TIMEOUT; } + bool connected() const; + bool weak() const; + bool active() const; // A connection is dead if it can be safely deleted. bool dead(int64_t now) const; // Estimate of the round-trip time over this connection. - int rtt() const { return rtt_; } + int rtt() const; int unwritable_timeout() const; - void set_unwritable_timeout(const absl::optional& value_ms) { - unwritable_timeout_ = value_ms; - } + void set_unwritable_timeout(const absl::optional& value_ms); int unwritable_min_checks() const; - void set_unwritable_min_checks(const absl::optional& value) { - unwritable_min_checks_ = value; - } + void set_unwritable_min_checks(const absl::optional& value); int inactive_timeout() const; - void set_inactive_timeout(const absl::optional& value) { - inactive_timeout_ = value; - } + void set_inactive_timeout(const absl::optional& value); // Gets the `ConnectionInfo` stats, where `best_connection` has not been // populated (default value false). @@ -172,15 +157,15 @@ class Connection : public CandidatePairInterface, // still keep it around in case the other side wants to use it. But we can // safely stop pinging on it and we can allow it to time out if the other // side stops using it as well. - bool pruned() const { return pruned_; } + bool pruned() const; void Prune(); - bool use_candidate_attr() const { return use_candidate_attr_; } + bool use_candidate_attr() const; void set_use_candidate_attr(bool enable); - void set_nomination(uint32_t value) { nomination_ = value; } + void set_nomination(uint32_t value); - uint32_t remote_nomination() const { return remote_nomination_; } + uint32_t remote_nomination() const; // One or several pairs may be nominated based on if Regular or Aggressive // Nomination is used. https://tools.ietf.org/html/rfc5245#section-8 // `nominated` is defined both for the controlling or controlled agent based @@ -188,19 +173,22 @@ class Connection : public CandidatePairInterface, // gets its `remote_nomination_` set when pinged by the controlling agent with // a nomination value. The controlling agent gets its `acked_nomination_` set // when receiving a response to a nominating ping. - bool nominated() const { return acked_nomination_ || remote_nomination_; } - void set_remote_ice_mode(IceMode mode) { remote_ice_mode_ = mode; } + bool nominated() const; int receiving_timeout() const; - void set_receiving_timeout(absl::optional receiving_timeout_ms) { - receiving_timeout_ = receiving_timeout_ms; - } + void set_receiving_timeout(absl::optional receiving_timeout_ms); - // Makes the connection go away. + // Deletes a `Connection` instance is by calling the `DestroyConnection` + // method in `Port`. + // Note: When the function returns, the object has been deleted. void Destroy(); - // Makes the connection go away, in a failed state. - void FailAndDestroy(); + // Signals object destruction, releases outstanding references and performs + // final logging. + // The function will return `true` when shutdown was performed, signals + // emitted and outstanding references released. If the function returns + // `false`, `Shutdown()` has previously been called. + bool Shutdown(); // Prunes the connection and sets its state to STATE_FAILED, // It will not be used or send pings although it can still receive packets. @@ -210,25 +198,29 @@ class Connection : public CandidatePairInterface, // the current time, which is compared against various timeouts. void UpdateState(int64_t now); + void UpdateLocalIceParameters(int component, + absl::string_view username_fragment, + absl::string_view password); + // Called when this connection should try checking writability again. - int64_t last_ping_sent() const { return last_ping_sent_; } + int64_t last_ping_sent() const; void Ping(int64_t now); void ReceivedPingResponse( int rtt, - const std::string& request_id, + absl::string_view request_id, const absl::optional& nomination = absl::nullopt); - int64_t last_ping_response_received() const { - return last_ping_response_received_; - } - const absl::optional& last_ping_id_received() const { - return last_ping_id_received_; - } + std::unique_ptr BuildPingRequest() RTC_RUN_ON(network_thread_); + + int64_t last_ping_response_received() const; + const absl::optional& last_ping_id_received() const; + // Used to check if any STUN ping response has been received. - int rtt_samples() const { return rtt_samples_; } + int rtt_samples() const; // Called whenever a valid ping is received on this connection. This is // public because the connection intercepts the first ping for us. - int64_t last_ping_received() const { return last_ping_received_; } + int64_t last_ping_received() const; + void ReceivedPing( const absl::optional& request_id = absl::nullopt); // Handles the binding request; sends a response if this is a valid request. @@ -238,8 +230,8 @@ class Connection : public CandidatePairInterface, // connectivity check from the peer. void HandlePiggybackCheckAcknowledgementIfAny(StunMessage* msg); // Timestamp when data was last sent (or attempted to be sent). - int64_t last_send_data() const { return last_send_data_; } - int64_t last_data_received() const { return last_data_received_; } + int64_t last_send_data() const; + int64_t last_data_received() const; // Debugging description of this connection std::string ToDebugId() const; @@ -247,32 +239,24 @@ class Connection : public CandidatePairInterface, std::string ToSensitiveString() const; // Structured description of this candidate pair. const webrtc::IceCandidatePairDescription& ToLogDescription(); - void set_ice_event_log(webrtc::IceEventLog* ice_event_log) { - ice_event_log_ = ice_event_log; - } + void set_ice_event_log(webrtc::IceEventLog* ice_event_log); + // Prints pings_since_last_response_ into a string. void PrintPingsSinceLastResponse(std::string* pings, size_t max); - bool reported() const { return reported_; } - void set_reported(bool reported) { reported_ = reported; } - // The following two methods are only used for logging in ToString above, and - // this flag is set true by P2PTransportChannel for its selected candidate - // pair. - bool selected() const { return selected_; } - void set_selected(bool selected) { selected_ = selected; } + // `set_selected` is only used for logging in ToString above. The flag is + // set true by P2PTransportChannel for its selected candidate pair. + // TODO(tommi): Remove `selected()` once not referenced downstream. + bool selected() const; + void set_selected(bool selected); // This signal will be fired if this connection is nominated by the // controlling side. sigslot::signal1 SignalNominated; - // Invoked when Connection receives STUN error response with 487 code. - void HandleRoleConflictFromPeer(); - - IceCandidatePairState state() const { return state_; } + IceCandidatePairState state() const; - int num_pings_sent() const { return num_pings_sent_; } - - IceMode remote_ice_mode() const { return remote_ice_mode_; } + int num_pings_sent() const; uint32_t ComputeNetworkCost() const; @@ -291,15 +275,20 @@ class Connection : public CandidatePairInterface, // response in milliseconds int64_t last_received() const; // Returns the last time when the connection changed its receiving state. - int64_t receiving_unchanged_since() const { - return receiving_unchanged_since_; - } + int64_t receiving_unchanged_since() const; + + // Constructs the prflx priority as described in + // https://datatracker.ietf.org/doc/html/rfc5245#section-4.1.2.1 + uint32_t prflx_priority() const; bool stable(int64_t now) const; // Check if we sent `val` pings without receving a response. bool TooManyOutstandingPings(const absl::optional& val) const; + // Called by Port when the network cost changes. + void SetLocalCandidateNetworkCost(uint16_t cost); + void SetIceFieldTrials(const IceFieldTrials* field_trials); const rtc::EventBasedExponentialMovingAverage& GetRttEstimate() const { return rtt_estimate_; @@ -319,38 +308,38 @@ class Connection : public CandidatePairInterface, // Does not trigger SignalStateChange void ForgetLearnedState(); - void SendStunBindingResponse(const StunMessage* request); - void SendGoogPingResponse(const StunMessage* request); + void SendStunBindingResponse(const StunMessage* message); + void SendGoogPingResponse(const StunMessage* message); void SendResponseMessage(const StunMessage& response); // An accessor for unit tests. - Port* PortForTest() { return port_; } - const Port* PortForTest() const { return port_; } + Port* PortForTest() { return port_.get(); } + const Port* PortForTest() const { return port_.get(); } // Public for unit tests. - uint32_t acked_nomination() const { return acked_nomination_; } - - // Public for unit tests. - void set_remote_nomination(uint32_t remote_nomination) { - remote_nomination_ = remote_nomination; - } + uint32_t acked_nomination() const; + void set_remote_nomination(uint32_t remote_nomination); protected: - enum { MSG_DELETE = 0, MSG_FIRST_AVAILABLE }; + // A ConnectionRequest is a simple STUN ping used to determine writability. + class ConnectionRequest; // Constructs a new connection to the given remote port. - Connection(Port* port, size_t index, const Candidate& candidate); + Connection(rtc::WeakPtr port, size_t index, const Candidate& candidate); // Called back when StunRequestManager has a stun packet to send void OnSendStunPacket(const void* data, size_t size, StunRequest* req); // Callbacks from ConnectionRequest - virtual void OnConnectionRequestResponse(ConnectionRequest* req, + virtual void OnConnectionRequestResponse(StunRequest* req, StunMessage* response); void OnConnectionRequestErrorResponse(ConnectionRequest* req, - StunMessage* response); - void OnConnectionRequestTimeout(ConnectionRequest* req); - void OnConnectionRequestSent(ConnectionRequest* req); + StunMessage* response) + RTC_RUN_ON(network_thread_); + void OnConnectionRequestTimeout(ConnectionRequest* req) + RTC_RUN_ON(network_thread_); + void OnConnectionRequestSent(ConnectionRequest* req) + RTC_RUN_ON(network_thread_); bool rtt_converged() const; @@ -364,17 +353,18 @@ class Connection : public CandidatePairInterface, void set_state(IceCandidatePairState state); void set_connected(bool value); - uint32_t nomination() const { return nomination_; } - - void OnMessage(rtc::Message* pmsg) override; - // The local port where this connection sends and receives packets. - Port* port() { return port_; } - const Port* port() const { return port_; } - - uint32_t id_; - Port* port_; - size_t local_candidate_index_; + Port* port() { return port_.get(); } + + // NOTE: A pointer to the network thread is held by `port_` so in theory we + // shouldn't need to hold on to this pointer here, but rather defer to + // port_->thread(). However, some tests delete the classes in the wrong order + // so `port_` may be deleted before an instance of this class is deleted. + // TODO(tommi): This ^^^ should be fixed. + webrtc::TaskQueueBase* const network_thread_; + const uint32_t id_; + rtc::WeakPtr port_; + Candidate local_candidate_ RTC_GUARDED_BY(network_thread_); Candidate remote_candidate_; ConnectionInfo stats_; @@ -385,92 +375,100 @@ class Connection : public CandidatePairInterface, private: // Update the local candidate based on the mapped address attribute. // If the local candidate changed, fires SignalStateChange. - void MaybeUpdateLocalCandidate(ConnectionRequest* request, - StunMessage* response); + void MaybeUpdateLocalCandidate(StunRequest* request, StunMessage* response) + RTC_RUN_ON(network_thread_); - void LogCandidatePairConfig(webrtc::IceCandidatePairConfigType type); + void LogCandidatePairConfig(webrtc::IceCandidatePairConfigType type) + RTC_RUN_ON(network_thread_); void LogCandidatePairEvent(webrtc::IceCandidatePairEventType type, - uint32_t transaction_id); + uint32_t transaction_id) + RTC_RUN_ON(network_thread_); // Check if this IceMessage is identical // to last message ack:ed STUN_BINDING_REQUEST. - bool ShouldSendGoogPing(const StunMessage* message); - - WriteState write_state_; - bool receiving_; - bool connected_; - bool pruned_; - bool selected_ = false; + bool ShouldSendGoogPing(const StunMessage* message) + RTC_RUN_ON(network_thread_); + + WriteState write_state_ RTC_GUARDED_BY(network_thread_); + bool receiving_ RTC_GUARDED_BY(network_thread_); + bool connected_ RTC_GUARDED_BY(network_thread_); + bool pruned_ RTC_GUARDED_BY(network_thread_); + bool selected_ RTC_GUARDED_BY(network_thread_) = false; // By default `use_candidate_attr_` flag will be true, // as we will be using aggressive nomination. // But when peer is ice-lite, this flag "must" be initialized to false and // turn on when connection becomes "best connection". - bool use_candidate_attr_; + bool use_candidate_attr_ RTC_GUARDED_BY(network_thread_); // Used by the controlling side to indicate that this connection will be // selected for transmission if the peer supports ICE-renomination when this // value is positive. A larger-value indicates that a connection is nominated // later and should be selected by the controlled side with higher precedence. // A zero-value indicates not nominating this connection. - uint32_t nomination_ = 0; + uint32_t nomination_ RTC_GUARDED_BY(network_thread_) = 0; // The last nomination that has been acknowledged. - uint32_t acked_nomination_ = 0; + uint32_t acked_nomination_ RTC_GUARDED_BY(network_thread_) = 0; // Used by the controlled side to remember the nomination value received from // the controlling side. When the peer does not support ICE re-nomination, its // value will be 1 if the connection has been nominated. - uint32_t remote_nomination_ = 0; + uint32_t remote_nomination_ RTC_GUARDED_BY(network_thread_) = 0; - IceMode remote_ice_mode_; - StunRequestManager requests_; - int rtt_; - int rtt_samples_ = 0; + StunRequestManager requests_ RTC_GUARDED_BY(network_thread_); + int rtt_ RTC_GUARDED_BY(network_thread_); + int rtt_samples_ RTC_GUARDED_BY(network_thread_) = 0; // https://w3c.github.io/webrtc-stats/#dom-rtcicecandidatepairstats-totalroundtriptime - uint64_t total_round_trip_time_ms_ = 0; + uint64_t total_round_trip_time_ms_ RTC_GUARDED_BY(network_thread_) = 0; // https://w3c.github.io/webrtc-stats/#dom-rtcicecandidatepairstats-currentroundtriptime - absl::optional current_round_trip_time_ms_; - int64_t last_ping_sent_; // last time we sent a ping to the other side - int64_t last_ping_received_; // last time we received a ping from the other - // side - int64_t last_data_received_; - int64_t last_ping_response_received_; - int64_t receiving_unchanged_since_ = 0; - std::vector pings_since_last_response_; + absl::optional current_round_trip_time_ms_ + RTC_GUARDED_BY(network_thread_); + int64_t last_ping_sent_ RTC_GUARDED_BY( + network_thread_); // last time we sent a ping to the other side + int64_t last_ping_received_ + RTC_GUARDED_BY(network_thread_); // last time we received a ping from the + // other side + int64_t last_data_received_ RTC_GUARDED_BY(network_thread_); + int64_t last_ping_response_received_ RTC_GUARDED_BY(network_thread_); + int64_t receiving_unchanged_since_ RTC_GUARDED_BY(network_thread_) = 0; + std::vector pings_since_last_response_ + RTC_GUARDED_BY(network_thread_); // Transaction ID of the last connectivity check received. Null if having not // received a ping yet. - absl::optional last_ping_id_received_; + absl::optional last_ping_id_received_ + RTC_GUARDED_BY(network_thread_); - absl::optional unwritable_timeout_; - absl::optional unwritable_min_checks_; - absl::optional inactive_timeout_; + absl::optional unwritable_timeout_ RTC_GUARDED_BY(network_thread_); + absl::optional unwritable_min_checks_ RTC_GUARDED_BY(network_thread_); + absl::optional inactive_timeout_ RTC_GUARDED_BY(network_thread_); - bool reported_; - IceCandidatePairState state_; + IceCandidatePairState state_ RTC_GUARDED_BY(network_thread_); // Time duration to switch from receiving to not receiving. - absl::optional receiving_timeout_; - int64_t time_created_ms_; - int num_pings_sent_ = 0; + absl::optional receiving_timeout_ RTC_GUARDED_BY(network_thread_); + int64_t time_created_ms_ RTC_GUARDED_BY(network_thread_); + int num_pings_sent_ RTC_GUARDED_BY(network_thread_) = 0; - absl::optional log_description_; - webrtc::IceEventLog* ice_event_log_ = nullptr; + absl::optional log_description_ + RTC_GUARDED_BY(network_thread_); + webrtc::IceEventLog* ice_event_log_ RTC_GUARDED_BY(network_thread_) = nullptr; // GOOG_PING_REQUEST is sent in place of STUN_BINDING_REQUEST // if configured via field trial, the remote peer supports it (signaled // in STUN_BINDING) and if the last STUN BINDING is identical to the one // that is about to be sent. - absl::optional remote_support_goog_ping_; - std::unique_ptr cached_stun_binding_; + absl::optional remote_support_goog_ping_ + RTC_GUARDED_BY(network_thread_); + std::unique_ptr cached_stun_binding_ + RTC_GUARDED_BY(network_thread_); const IceFieldTrials* field_trials_; - rtc::EventBasedExponentialMovingAverage rtt_estimate_; - - friend class Port; - friend class ConnectionRequest; - friend class P2PTransportChannel; + rtc::EventBasedExponentialMovingAverage rtt_estimate_ + RTC_GUARDED_BY(network_thread_); }; // ProxyConnection defers all the interesting work to the port. class ProxyConnection : public Connection { public: - ProxyConnection(Port* port, size_t index, const Candidate& remote_candidate); + ProxyConnection(rtc::WeakPtr port, + size_t index, + const Candidate& remote_candidate); int Send(const void* data, size_t size, diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/connection_info.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/connection_info.cc index d0cd3239f1..363d32954e 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/connection_info.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/connection_info.cc @@ -17,7 +17,6 @@ ConnectionInfo::ConnectionInfo() writable(false), receiving(false), timeout(false), - new_connection(false), rtt(0), sent_discarded_bytes(0), sent_total_bytes(0), diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/connection_info.h b/TMessagesProj/jni/voip/webrtc/p2p/base/connection_info.h index 1117595481..a30b636d86 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/connection_info.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/connection_info.h @@ -39,7 +39,6 @@ struct ConnectionInfo { bool writable; // Has this connection received a STUN response? bool receiving; // Has this connection received anything? bool timeout; // Has this connection timed out? - bool new_connection; // Is this a newly created connection? size_t rtt; // The STUN RTT for this connection. size_t sent_discarded_bytes; // Number of outgoing bytes discarded due to // socket errors. diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/default_ice_transport_factory.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/default_ice_transport_factory.cc index 0a7175cfd8..313d608750 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/default_ice_transport_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/default_ice_transport_factory.cc @@ -12,6 +12,7 @@ #include +#include "api/make_ref_counted.h" #include "p2p/base/basic_ice_controller.h" #include "p2p/base/ice_controller_factory_interface.h" @@ -44,10 +45,10 @@ DefaultIceTransportFactory::CreateIceTransport( int component, IceTransportInit init) { BasicIceControllerFactory factory; + init.set_ice_controller_factory(&factory); return rtc::make_ref_counted( - cricket::P2PTransportChannel::Create( - transport_name, component, init.port_allocator(), - init.async_dns_resolver_factory(), init.event_log(), &factory)); + cricket::P2PTransportChannel::Create(transport_name, component, + std::move(init))); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport.cc index bf8583f670..904a0cbbc9 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport.cc @@ -15,6 +15,7 @@ #include #include "absl/memory/memory.h" +#include "absl/strings/string_view.h" #include "api/dtls_transport_interface.h" #include "api/rtc_event_log/rtc_event_log.h" #include "logging/rtc_event_log/events/rtc_event_dtls_transport_state.h" @@ -226,7 +227,35 @@ bool DtlsTransport::GetSslCipherSuite(int* cipher) { return dtls_->GetSslCipherSuite(cipher); } -bool DtlsTransport::SetRemoteFingerprint(const std::string& digest_alg, +webrtc::RTCError DtlsTransport::SetRemoteParameters( + absl::string_view digest_alg, + const uint8_t* digest, + size_t digest_len, + absl::optional role) { + rtc::Buffer remote_fingerprint_value(digest, digest_len); + bool is_dtls_restart = + dtls_active_ && remote_fingerprint_value_ != remote_fingerprint_value; + // Set SSL role. Role must be set before fingerprint is applied, which + // initiates DTLS setup. + if (role) { + if (is_dtls_restart) { + dtls_role_ = *role; + } else { + if (!SetDtlsRole(*role)) { + return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, + "Failed to set SSL role for the transport."); + } + } + } + // Apply remote fingerprint. + if (!SetRemoteFingerprint(digest_alg, digest, digest_len)) { + return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, + "Failed to apply remote fingerprint."); + } + return webrtc::RTCError::OK(); +} + +bool DtlsTransport::SetRemoteFingerprint(absl::string_view digest_alg, const uint8_t* digest, size_t digest_len) { rtc::Buffer remote_fingerprint_value(digest, digest_len); @@ -264,7 +293,7 @@ bool DtlsTransport::SetRemoteFingerprint(const std::string& digest_alg, // At this point we know we are doing DTLS bool fingerprint_changing = remote_fingerprint_value_.size() > 0u; remote_fingerprint_value_ = std::move(remote_fingerprint_value); - remote_fingerprint_algorithm_ = digest_alg; + remote_fingerprint_algorithm_ = std::string(digest_alg); if (dtls_ && !fingerprint_changing) { // This can occur if DTLS is set up before a remote fingerprint is @@ -312,7 +341,7 @@ std::unique_ptr DtlsTransport::GetRemoteSSLCertChain() return dtls_->GetPeerSSLCertChain(); } -bool DtlsTransport::ExportKeyingMaterial(const std::string& label, +bool DtlsTransport::ExportKeyingMaterial(absl::string_view label, const uint8_t* context, size_t context_len, bool use_context, diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport.h b/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport.h index edfa8896ce..2b26e2553f 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport.h @@ -15,6 +15,7 @@ #include #include +#include "absl/strings/string_view.h" #include "api/crypto/crypto_options.h" #include "api/dtls_transport_interface.h" #include "api/sequence_checker.h" @@ -22,7 +23,6 @@ #include "p2p/base/ice_transport_internal.h" #include "rtc_base/buffer.h" #include "rtc_base/buffer_queue.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/ssl_stream_adapter.h" #include "rtc_base/stream.h" #include "rtc_base/strings/string_builder.h" @@ -40,6 +40,9 @@ class StreamInterfaceChannel : public rtc::StreamInterface { public: explicit StreamInterfaceChannel(IceTransportInternal* ice_transport); + StreamInterfaceChannel(const StreamInterfaceChannel&) = delete; + StreamInterfaceChannel& operator=(const StreamInterfaceChannel&) = delete; + // Push in a packet; this gets pulled out from Read(). bool OnPacketReceived(const char* data, size_t size); @@ -60,8 +63,6 @@ class StreamInterfaceChannel : public rtc::StreamInterface { IceTransportInternal* const ice_transport_; // owned by DtlsTransport rtc::StreamState state_ RTC_GUARDED_BY(sequence_checker_); rtc::BufferQueue packets_ RTC_GUARDED_BY(sequence_checker_); - - RTC_DISALLOW_COPY_AND_ASSIGN(StreamInterfaceChannel); }; // This class provides a DTLS SSLStreamAdapter inside a TransportChannel-style @@ -110,6 +111,9 @@ class DtlsTransport : public DtlsTransportInternal { ~DtlsTransport() override; + DtlsTransport(const DtlsTransport&) = delete; + DtlsTransport& operator=(const DtlsTransport&) = delete; + webrtc::DtlsTransportState dtls_state() const override; const std::string& transport_name() const override; int component() const override; @@ -132,10 +136,17 @@ class DtlsTransport : public DtlsTransportInternal { // SetRemoteFingerprint must be called after SetLocalCertificate, and any // other methods like SetDtlsRole. It's what triggers the actual DTLS setup. // TODO(deadbeef): Rename to "Start" like in ORTC? - bool SetRemoteFingerprint(const std::string& digest_alg, + bool SetRemoteFingerprint(absl::string_view digest_alg, const uint8_t* digest, size_t digest_len) override; + // SetRemoteParameters must be called after SetLocalCertificate. + webrtc::RTCError SetRemoteParameters( + absl::string_view digest_alg, + const uint8_t* digest, + size_t digest_len, + absl::optional role) override; + // Called to send a packet (via DTLS, if turned on). int SendPacket(const char* data, size_t size, @@ -164,7 +175,7 @@ class DtlsTransport : public DtlsTransportInternal { // method extracts the keys negotiated during the DTLS handshake, for use in // external encryption. DTLS-SRTP uses this to extract the needed SRTP keys. // See the SSLStreamAdapter documentation for info on the specific parameters. - bool ExportKeyingMaterial(const std::string& label, + bool ExportKeyingMaterial(absl::string_view label, const uint8_t* context, size_t context_len, bool use_context, @@ -248,8 +259,6 @@ class DtlsTransport : public DtlsTransportInternal { bool writable_ = false; webrtc::RtcEventLog* const event_log_; - - RTC_DISALLOW_COPY_AND_ASSIGN(DtlsTransport); }; } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport_internal.h b/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport_internal.h index 0b26a7fd7a..3d20d1bfd6 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport_internal.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport_internal.h @@ -19,13 +19,13 @@ #include #include "absl/base/attributes.h" +#include "absl/strings/string_view.h" #include "api/crypto/crypto_options.h" #include "api/dtls_transport_interface.h" #include "api/scoped_refptr.h" #include "p2p/base/ice_transport_internal.h" #include "p2p/base/packet_transport_internal.h" #include "rtc_base/callback_list.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_fingerprint.h" #include "rtc_base/ssl_stream_adapter.h" @@ -48,6 +48,9 @@ class DtlsTransportInternal : public rtc::PacketTransportInternal { public: ~DtlsTransportInternal() override; + DtlsTransportInternal(const DtlsTransportInternal&) = delete; + DtlsTransportInternal& operator=(const DtlsTransportInternal&) = delete; + virtual webrtc::DtlsTransportState dtls_state() const = 0; virtual int component() const = 0; @@ -79,7 +82,7 @@ class DtlsTransportInternal : public rtc::PacketTransportInternal { virtual std::unique_ptr GetRemoteSSLCertChain() const = 0; // Allows key material to be extracted for external encryption. - virtual bool ExportKeyingMaterial(const std::string& label, + virtual bool ExportKeyingMaterial(absl::string_view label, const uint8_t* context, size_t context_len, bool use_context, @@ -87,10 +90,18 @@ class DtlsTransportInternal : public rtc::PacketTransportInternal { size_t result_len) = 0; // Set DTLS remote fingerprint. Must be after local identity set. - virtual bool SetRemoteFingerprint(const std::string& digest_alg, + ABSL_DEPRECATED("Use SetRemoteParameters instead.") + virtual bool SetRemoteFingerprint(absl::string_view digest_alg, const uint8_t* digest, size_t digest_len) = 0; + // Set DTLS remote fingerprint and role. Must be after local identity set. + virtual webrtc::RTCError SetRemoteParameters( + absl::string_view digest_alg, + const uint8_t* digest, + size_t digest_len, + absl::optional role) = 0; + ABSL_DEPRECATED("Set the max version via construction.") bool SetSslMaxProtocolVersion(rtc::SSLProtocolVersion version) { return true; @@ -135,7 +146,6 @@ class DtlsTransportInternal : public rtc::PacketTransportInternal { DtlsTransportInternal(); private: - RTC_DISALLOW_COPY_AND_ASSIGN(DtlsTransportInternal); webrtc::CallbackList dtls_handshake_error_callback_list_; webrtc::CallbackList diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/fake_dtls_transport.h b/TMessagesProj/jni/voip/webrtc/p2p/base/fake_dtls_transport.h index 5f0ea2ee07..283488bc38 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/fake_dtls_transport.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/fake_dtls_transport.h @@ -16,6 +16,7 @@ #include #include +#include "absl/strings/string_view.h" #include "api/crypto/crypto_options.h" #include "api/dtls_transport_interface.h" #include "p2p/base/dtls_transport_internal.h" @@ -140,9 +141,19 @@ class FakeDtlsTransport : public DtlsTransportInternal { const rtc::SSLFingerprint& dtls_fingerprint() const { return dtls_fingerprint_; } - bool SetRemoteFingerprint(const std::string& alg, + webrtc::RTCError SetRemoteParameters(absl::string_view alg, + const uint8_t* digest, + size_t digest_len, + absl::optional role) { + if (role) { + SetDtlsRole(*role); + } + SetRemoteFingerprint(alg, digest, digest_len); + return webrtc::RTCError::OK(); + } + bool SetRemoteFingerprint(absl::string_view alg, const uint8_t* digest, - size_t digest_len) override { + size_t digest_len) { dtls_fingerprint_ = rtc::SSLFingerprint(alg, rtc::MakeArrayView(digest, digest_len)); return true; @@ -203,7 +214,7 @@ class FakeDtlsTransport : public DtlsTransportInternal { } return std::make_unique(remote_cert_->Clone()); } - bool ExportKeyingMaterial(const std::string& label, + bool ExportKeyingMaterial(absl::string_view label, const uint8_t* context, size_t context_len, bool use_context, diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/fake_ice_transport.h b/TMessagesProj/jni/voip/webrtc/p2p/base/fake_ice_transport.h index c053abd5f9..ae7bf8947e 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/fake_ice_transport.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/fake_ice_transport.h @@ -17,27 +17,33 @@ #include #include "absl/algorithm/container.h" +#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/ice_transport_interface.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/units/time_delta.h" #include "p2p/base/ice_transport_internal.h" #include "rtc_base/copy_on_write_buffer.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" -#include "rtc_base/task_utils/to_queued_task.h" +#include "rtc_base/task_queue_for_test.h" namespace cricket { +using ::webrtc::SafeTask; +using ::webrtc::TimeDelta; // All methods must be called on the network thread (which is either the thread // calling the constructor, or the separate thread explicitly passed to the // constructor). class FakeIceTransport : public IceTransportInternal { public: - explicit FakeIceTransport(const std::string& name, + explicit FakeIceTransport(absl::string_view name, int component, rtc::Thread* network_thread = nullptr) : name_(name), component_(component), network_thread_(network_thread ? network_thread - : rtc::Thread::Current()) {} + : rtc::Thread::Current()) { + RTC_DCHECK(network_thread_); + } // Must be called either on the network thread, or after the network thread // has been shut down. ~FakeIceTransport() override { @@ -307,12 +313,12 @@ class FakeIceTransport : public IceTransportInternal { rtc::CopyOnWriteBuffer packet(std::move(send_packet_)); if (async_) { network_thread_->PostDelayedTask( - ToQueuedTask(task_safety_.flag(), - [this, packet] { - RTC_DCHECK_RUN_ON(network_thread_); - FakeIceTransport::SendPacketInternal(packet); - }), - async_delay_ms_); + SafeTask(task_safety_.flag(), + [this, packet] { + RTC_DCHECK_RUN_ON(network_thread_); + FakeIceTransport::SendPacketInternal(packet); + }), + TimeDelta::Millis(async_delay_ms_)); } else { SendPacketInternal(packet); } @@ -352,7 +358,7 @@ class FakeIceTransport : public IceTransportInternal { void SetNetworkRoute(absl::optional network_route) { RTC_DCHECK_RUN_ON(network_thread_); network_route_ = network_route; - network_thread_->Invoke(RTC_FROM_HERE, [this] { + SendTask(network_thread_, [this] { RTC_DCHECK_RUN_ON(network_thread_); SignalNetworkRouteChanged(network_route_); }); diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/fake_port_allocator.h b/TMessagesProj/jni/voip/webrtc/p2p/base/fake_port_allocator.h index 6366ea84db..174d6304be 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/fake_port_allocator.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/fake_port_allocator.h @@ -13,13 +13,18 @@ #include #include +#include #include +#include "absl/strings/string_view.h" #include "p2p/base/basic_packet_socket_factory.h" #include "p2p/base/port_allocator.h" #include "p2p/base/udp_port.h" +#include "rtc_base/memory/always_valid_pointer.h" #include "rtc_base/net_helpers.h" +#include "rtc_base/task_queue_for_test.h" #include "rtc_base/thread.h" +#include "test/scoped_key_value_config.h" namespace rtc { class SocketFactory; @@ -31,15 +36,16 @@ class TestUDPPort : public UDPPort { public: static TestUDPPort* Create(rtc::Thread* thread, rtc::PacketSocketFactory* factory, - rtc::Network* network, + const rtc::Network* network, uint16_t min_port, uint16_t max_port, - const std::string& username, - const std::string& password, - bool emit_localhost_for_anyaddress) { + absl::string_view username, + absl::string_view password, + bool emit_localhost_for_anyaddress, + const webrtc::FieldTrialsView* field_trials) { TestUDPPort* port = new TestUDPPort(thread, factory, network, min_port, max_port, username, - password, emit_localhost_for_anyaddress); + password, emit_localhost_for_anyaddress, field_trials); if (!port->Init()) { delete port; port = nullptr; @@ -50,12 +56,13 @@ class TestUDPPort : public UDPPort { protected: TestUDPPort(rtc::Thread* thread, rtc::PacketSocketFactory* factory, - rtc::Network* network, + const rtc::Network* network, uint16_t min_port, uint16_t max_port, - const std::string& username, - const std::string& password, - bool emit_localhost_for_anyaddress) + absl::string_view username, + absl::string_view password, + bool emit_localhost_for_anyaddress, + const webrtc::FieldTrialsView* field_trials) : UDPPort(thread, factory, network, @@ -63,7 +70,8 @@ class TestUDPPort : public UDPPort { max_port, username, password, - emit_localhost_for_anyaddress) {} + emit_localhost_for_anyaddress, + field_trials) {} }; // A FakePortAllocatorSession can be used with either a real or fake socket @@ -74,10 +82,11 @@ class FakePortAllocatorSession : public PortAllocatorSession { FakePortAllocatorSession(PortAllocator* allocator, rtc::Thread* network_thread, rtc::PacketSocketFactory* factory, - const std::string& content_name, + absl::string_view content_name, int component, - const std::string& ice_ufrag, - const std::string& ice_pwd) + absl::string_view ice_ufrag, + absl::string_view ice_pwd, + const webrtc::FieldTrialsView& field_trials) : PortAllocatorSession(content_name, component, ice_ufrag, @@ -96,7 +105,8 @@ class FakePortAllocatorSession : public PortAllocatorSession { port_(), port_config_count_(0), stun_servers_(allocator->stun_servers()), - turn_servers_(allocator->turn_servers()) { + turn_servers_(allocator->turn_servers()), + field_trials_(field_trials) { ipv4_network_.AddIP(rtc::IPAddress(INADDR_LOOPBACK)); ipv6_network_.AddIP(rtc::IPAddress(in6addr_loopback)); } @@ -112,7 +122,8 @@ class FakePortAllocatorSession : public PortAllocatorSession { ? ipv6_network_ : ipv4_network_; port_.reset(TestUDPPort::Create(network_thread_, factory_, &network, 0, 0, - username(), password(), false)); + username(), password(), false, + &field_trials_)); RTC_DCHECK(port_); port_->SubscribePortDestroyed( [this](PortInterface* port) { OnPortDestroyed(port); }); @@ -200,38 +211,29 @@ class FakePortAllocatorSession : public PortAllocatorSession { uint32_t candidate_filter_ = CF_ALL; int transport_info_update_count_ = 0; bool running_ = false; + const webrtc::FieldTrialsView& field_trials_; }; class FakePortAllocator : public cricket::PortAllocator { public: - // TODO(bugs.webrtc.org/13145): Require non-null `factory`. FakePortAllocator(rtc::Thread* network_thread, rtc::PacketSocketFactory* factory) - : network_thread_(network_thread), factory_(factory) { - if (factory_ == NULL) { - owned_factory_.reset(new rtc::BasicPacketSocketFactory( - network_thread_ ? network_thread_->socketserver() : nullptr)); - factory_ = owned_factory_.get(); - } + : FakePortAllocator(network_thread, factory, nullptr) {} - if (network_thread_ == nullptr) { - network_thread_ = rtc::Thread::Current(); - Initialize(); - return; - } - network_thread_->Invoke(RTC_FROM_HERE, [this] { Initialize(); }); - } + FakePortAllocator(rtc::Thread* network_thread, + std::unique_ptr factory) + : FakePortAllocator(network_thread, nullptr, std::move(factory)) {} void SetNetworkIgnoreMask(int network_ignore_mask) override {} cricket::PortAllocatorSession* CreateSessionInternal( - const std::string& content_name, + absl::string_view content_name, int component, - const std::string& ice_ufrag, - const std::string& ice_pwd) override { - return new FakePortAllocatorSession(this, network_thread_, factory_, - content_name, component, ice_ufrag, - ice_pwd); + absl::string_view ice_ufrag, + absl::string_view ice_pwd) override { + return new FakePortAllocatorSession( + this, network_thread_, factory_.get(), std::string(content_name), + component, std::string(ice_ufrag), std::string(ice_pwd), field_trials_); } bool initialized() const { return initialized_; } @@ -245,9 +247,22 @@ class FakePortAllocator : public cricket::PortAllocator { } private: + FakePortAllocator(rtc::Thread* network_thread, + rtc::PacketSocketFactory* factory, + std::unique_ptr owned_factory) + : network_thread_(network_thread), + factory_(std::move(owned_factory), factory) { + if (network_thread_ == nullptr) { + network_thread_ = rtc::Thread::Current(); + Initialize(); + return; + } + SendTask(network_thread_, [this] { Initialize(); }); + } + + webrtc::test::ScopedKeyValueConfig field_trials_; rtc::Thread* network_thread_; - rtc::PacketSocketFactory* factory_; - std::unique_ptr owned_factory_; + const webrtc::AlwaysValidPointerNoDefault factory_; bool mdns_obfuscation_enabled_ = false; }; diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/ice_agent_interface.h b/TMessagesProj/jni/voip/webrtc/p2p/base/ice_agent_interface.h new file mode 100644 index 0000000000..30b6ade6e6 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/ice_agent_interface.h @@ -0,0 +1,80 @@ +/* + * Copyright 2022 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef P2P_BASE_ICE_AGENT_INTERFACE_H_ +#define P2P_BASE_ICE_AGENT_INTERFACE_H_ + +#include "api/array_view.h" +#include "p2p/base/connection.h" +#include "p2p/base/ice_switch_reason.h" + +namespace cricket { + +// IceAgentInterface provides methods that allow an ICE controller to manipulate +// the connections available to a transport, and used by the transport to +// transfer data. +class IceAgentInterface { + public: + virtual ~IceAgentInterface() = default; + + // Get the time when the last ping was sent. + // This is only needed in some scenarios if the agent decides to ping on its + // own, eg. in some switchover scenarios. Otherwise the ICE controller could + // keep this state on its own. + // TODO(bugs.webrtc.org/14367): route extra pings through the ICE controller. + virtual int64_t GetLastPingSentMs() const = 0; + + // Get the ICE role of this ICE agent. + virtual IceRole GetIceRole() const = 0; + + // Called when a pingable connection first becomes available. + virtual void OnStartedPinging() = 0; + + // Update the state of all available connections. + virtual void UpdateConnectionStates() = 0; + + // Update the internal state of the ICE agent. An ICE controller should call + // this at the end of a sequence of actions to combine several mutations into + // a single state refresh. + // TODO(bugs.webrtc.org/14431): ICE agent state updates should be internal to + // the agent. If batching is necessary, use a more appropriate interface. + virtual void UpdateState() = 0; + + // Reset the given connections to a state of newly connected connections. + // - STATE_WRITE_INIT + // - receving = false + // - throw away all pending request + // - reset RttEstimate + // + // Keep the following unchanged: + // - connected + // - remote_candidate + // - statistics + // + // SignalStateChange will not be triggered. + virtual void ForgetLearnedStateForConnections( + rtc::ArrayView connections) = 0; + + // Send a STUN ping request for the given connection. + virtual void SendPingRequest(const Connection* connection) = 0; + + // Switch the transport to use the given connection. + virtual void SwitchSelectedConnection(const Connection* new_connection, + IceSwitchReason reason) = 0; + + // Prune away the given connections. Returns true if pruning is permitted and + // successfully performed. + virtual bool PruneConnections( + rtc::ArrayView connections) = 0; +}; + +} // namespace cricket + +#endif // P2P_BASE_ICE_AGENT_INTERFACE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/ice_controller_interface.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/ice_controller_interface.cc index 6d9bb85343..9fb3b055f9 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/ice_controller_interface.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/ice_controller_interface.cc @@ -12,46 +12,16 @@ #include +#include "p2p/base/ice_switch_reason.h" + namespace cricket { -std::string IceControllerEvent::ToString() const { - std::string reason; - switch (type) { - case REMOTE_CANDIDATE_GENERATION_CHANGE: - reason = "remote candidate generation maybe changed"; - break; - case NETWORK_PREFERENCE_CHANGE: - reason = "network preference changed"; - break; - case NEW_CONNECTION_FROM_LOCAL_CANDIDATE: - reason = "new candidate pairs created from a new local candidate"; - break; - case NEW_CONNECTION_FROM_REMOTE_CANDIDATE: - reason = "new candidate pairs created from a new remote candidate"; - break; - case NEW_CONNECTION_FROM_UNKNOWN_REMOTE_ADDRESS: - reason = "a new candidate pair created from an unknown remote address"; - break; - case NOMINATION_ON_CONTROLLED_SIDE: - reason = "nomination on the controlled side"; - break; - case DATA_RECEIVED: - reason = "data received"; - break; - case CONNECT_STATE_CHANGE: - reason = "candidate pair state changed"; - break; - case SELECTED_CONNECTION_DESTROYED: - reason = "selected candidate pair destroyed"; - break; - case ICE_CONTROLLER_RECHECK: - reason = "ice-controller-request-recheck"; - break; - } +std::string IceRecheckEvent::ToString() const { + std::string str = IceSwitchReasonToString(reason); if (recheck_delay_ms) { - reason += " (after delay: " + std::to_string(recheck_delay_ms) + ")"; + str += " (after delay: " + std::to_string(recheck_delay_ms) + ")"; } - return reason; + return str; } } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/ice_controller_interface.h b/TMessagesProj/jni/voip/webrtc/p2p/base/ice_controller_interface.h index a33315a338..482043ef35 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/ice_controller_interface.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/ice_controller_interface.h @@ -15,36 +15,23 @@ #include #include +#include "absl/types/optional.h" #include "p2p/base/connection.h" +#include "p2p/base/ice_switch_reason.h" #include "p2p/base/ice_transport_internal.h" namespace cricket { struct IceFieldTrials; // Forward declaration to avoid circular dependency. -struct IceControllerEvent { - enum Type { - REMOTE_CANDIDATE_GENERATION_CHANGE, - NETWORK_PREFERENCE_CHANGE, - NEW_CONNECTION_FROM_LOCAL_CANDIDATE, - NEW_CONNECTION_FROM_REMOTE_CANDIDATE, - NEW_CONNECTION_FROM_UNKNOWN_REMOTE_ADDRESS, - NOMINATION_ON_CONTROLLED_SIDE, - DATA_RECEIVED, - CONNECT_STATE_CHANGE, - SELECTED_CONNECTION_DESTROYED, - // The ICE_CONTROLLER_RECHECK enum value lets an IceController request - // P2PTransportChannel to recheck a switch periodically without an event - // taking place. - ICE_CONTROLLER_RECHECK, - }; +struct IceRecheckEvent { + IceRecheckEvent(IceSwitchReason _reason, int _recheck_delay_ms) + : reason(_reason), recheck_delay_ms(_recheck_delay_ms) {} - IceControllerEvent(const Type& _type) // NOLINT: runtime/explicit - : type(_type) {} std::string ToString() const; - Type type; - int recheck_delay_ms = 0; + IceSwitchReason reason; + int recheck_delay_ms; }; // Defines the interface for a module that control @@ -78,7 +65,7 @@ class IceControllerInterface { absl::optional connection; // An optional recheck event for when a Switch() should be attempted again. - absl::optional recheck_event; + absl::optional recheck_event; // A vector with connection to run ForgetLearnedState on. std::vector connections_to_forget_state_on; @@ -134,13 +121,13 @@ class IceControllerInterface { virtual void MarkConnectionPinged(const Connection* con) = 0; // Check if we should switch to `connection`. - // This method is called for IceControllerEvent's that can switch directly + // This method is called for IceSwitchReasons that can switch directly // i.e without resorting. - virtual SwitchResult ShouldSwitchConnection(IceControllerEvent reason, + virtual SwitchResult ShouldSwitchConnection(IceSwitchReason reason, const Connection* connection) = 0; // Sort connections and check if we should switch. - virtual SwitchResult SortAndSwitchConnection(IceControllerEvent reason) = 0; + virtual SwitchResult SortAndSwitchConnection(IceSwitchReason reason) = 0; // Prune connections. virtual std::vector PruneConnections() = 0; diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/ice_switch_reason.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/ice_switch_reason.cc new file mode 100644 index 0000000000..61f0fa7d5b --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/ice_switch_reason.cc @@ -0,0 +1,44 @@ +/* + * Copyright 2022 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "p2p/base/ice_switch_reason.h" + +#include + +namespace cricket { + +std::string IceSwitchReasonToString(IceSwitchReason reason) { + switch (reason) { + case IceSwitchReason::REMOTE_CANDIDATE_GENERATION_CHANGE: + return "remote candidate generation maybe changed"; + case IceSwitchReason::NETWORK_PREFERENCE_CHANGE: + return "network preference changed"; + case IceSwitchReason::NEW_CONNECTION_FROM_LOCAL_CANDIDATE: + return "new candidate pairs created from a new local candidate"; + case IceSwitchReason::NEW_CONNECTION_FROM_REMOTE_CANDIDATE: + return "new candidate pairs created from a new remote candidate"; + case IceSwitchReason::NEW_CONNECTION_FROM_UNKNOWN_REMOTE_ADDRESS: + return "a new candidate pair created from an unknown remote address"; + case IceSwitchReason::NOMINATION_ON_CONTROLLED_SIDE: + return "nomination on the controlled side"; + case IceSwitchReason::DATA_RECEIVED: + return "data received"; + case IceSwitchReason::CONNECT_STATE_CHANGE: + return "candidate pair state changed"; + case IceSwitchReason::SELECTED_CONNECTION_DESTROYED: + return "selected candidate pair destroyed"; + case IceSwitchReason::ICE_CONTROLLER_RECHECK: + return "ice-controller-request-recheck"; + default: + return "unknown"; + } +} + +} // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/ice_switch_reason.h b/TMessagesProj/jni/voip/webrtc/p2p/base/ice_switch_reason.h new file mode 100644 index 0000000000..2c4fe31884 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/ice_switch_reason.h @@ -0,0 +1,38 @@ +/* + * Copyright 2022 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef P2P_BASE_ICE_SWITCH_REASON_H_ +#define P2P_BASE_ICE_SWITCH_REASON_H_ + +#include + +namespace cricket { + +enum class IceSwitchReason { + REMOTE_CANDIDATE_GENERATION_CHANGE, + NETWORK_PREFERENCE_CHANGE, + NEW_CONNECTION_FROM_LOCAL_CANDIDATE, + NEW_CONNECTION_FROM_REMOTE_CANDIDATE, + NEW_CONNECTION_FROM_UNKNOWN_REMOTE_ADDRESS, + NOMINATION_ON_CONTROLLED_SIDE, + DATA_RECEIVED, + CONNECT_STATE_CHANGE, + SELECTED_CONNECTION_DESTROYED, + // The ICE_CONTROLLER_RECHECK enum value lets an IceController request + // P2PTransportChannel to recheck a switch periodically without an event + // taking place. + ICE_CONTROLLER_RECHECK, +}; + +std::string IceSwitchReasonToString(IceSwitchReason reason); + +} // namespace cricket + +#endif // P2P_BASE_ICE_SWITCH_REASON_H_ diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/ice_transport_internal.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/ice_transport_internal.cc index 104a95b5af..fab6f2037a 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/ice_transport_internal.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/ice_transport_internal.cc @@ -10,6 +10,7 @@ #include "p2p/base/ice_transport_internal.h" +#include "absl/strings/string_view.h" #include "p2p/base/p2p_constants.h" namespace cricket { @@ -126,13 +127,13 @@ IceTransportInternal::IceTransportInternal() = default; IceTransportInternal::~IceTransportInternal() = default; -void IceTransportInternal::SetIceCredentials(const std::string& ice_ufrag, - const std::string& ice_pwd) { +void IceTransportInternal::SetIceCredentials(absl::string_view ice_ufrag, + absl::string_view ice_pwd) { SetIceParameters(IceParameters(ice_ufrag, ice_pwd, false)); } -void IceTransportInternal::SetRemoteIceCredentials(const std::string& ice_ufrag, - const std::string& ice_pwd) { +void IceTransportInternal::SetRemoteIceCredentials(absl::string_view ice_ufrag, + absl::string_view ice_pwd) { SetRemoteIceParameters(IceParameters(ice_ufrag, ice_pwd, false)); } diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/ice_transport_internal.h b/TMessagesProj/jni/voip/webrtc/p2p/base/ice_transport_internal.h index 20730e1cfd..3a93ab0484 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/ice_transport_internal.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/ice_transport_internal.h @@ -16,6 +16,7 @@ #include #include +#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/candidate.h" #include "api/rtc_error.h" @@ -38,6 +39,19 @@ struct IceTransportStats { // Initially 0 and 1 once the first candidate pair has been selected. // The counter is increase also when "unselecting" a connection. uint32_t selected_candidate_pair_changes = 0; + + // Bytes/packets sent/received. + // note: Is not the same as sum(connection_infos.bytes_sent) + // as connections are created and destroyed while the ICE transport + // is alive. + uint64_t bytes_sent = 0; + uint64_t bytes_received = 0; + uint64_t packets_sent = 0; + uint64_t packets_received = 0; + + IceRole ice_role = ICEROLE_UNKNOWN; + std::string ice_local_username_fragment; + webrtc::IceTransportState ice_state = webrtc::IceTransportState::kNew; }; typedef std::vector Candidates; @@ -246,11 +260,11 @@ class RTC_EXPORT IceTransportInternal : public rtc::PacketTransportInternal { // remoting/protocol/libjingle_transport_factory.cc virtual void SetIceProtocolType(IceProtocolType type) {} - virtual void SetIceCredentials(const std::string& ice_ufrag, - const std::string& ice_pwd); + virtual void SetIceCredentials(absl::string_view ice_ufrag, + absl::string_view ice_pwd); - virtual void SetRemoteIceCredentials(const std::string& ice_ufrag, - const std::string& ice_pwd); + virtual void SetRemoteIceCredentials(absl::string_view ice_ufrag, + absl::string_view ice_pwd); // The ufrag and pwd in `ice_params` must be set // before candidate gathering can start. diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/mock_active_ice_controller.h b/TMessagesProj/jni/voip/webrtc/p2p/base/mock_active_ice_controller.h new file mode 100644 index 0000000000..908967bd1d --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/mock_active_ice_controller.h @@ -0,0 +1,89 @@ +/* + * Copyright 2018 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef P2P_BASE_MOCK_ACTIVE_ICE_CONTROLLER_H_ +#define P2P_BASE_MOCK_ACTIVE_ICE_CONTROLLER_H_ + +#include + +#include "p2p/base/active_ice_controller_factory_interface.h" +#include "p2p/base/active_ice_controller_interface.h" +#include "test/gmock.h" + +namespace cricket { + +class MockActiveIceController : public cricket::ActiveIceControllerInterface { + public: + explicit MockActiveIceController( + const cricket::ActiveIceControllerFactoryArgs& args) {} + ~MockActiveIceController() override = default; + + MOCK_METHOD(void, SetIceConfig, (const cricket::IceConfig&), (override)); + MOCK_METHOD(void, + OnConnectionAdded, + (const cricket::Connection*), + (override)); + MOCK_METHOD(void, + OnConnectionSwitched, + (const cricket::Connection*), + (override)); + MOCK_METHOD(void, + OnConnectionDestroyed, + (const cricket::Connection*), + (override)); + MOCK_METHOD(void, + OnConnectionPinged, + (const cricket::Connection*), + (override)); + MOCK_METHOD(void, + OnConnectionUpdated, + (const cricket::Connection*), + (override)); + MOCK_METHOD(bool, + GetUseCandidateAttribute, + (const cricket::Connection*, + cricket::NominationMode, + cricket::IceMode), + (const, override)); + MOCK_METHOD(void, + OnSortAndSwitchRequest, + (cricket::IceSwitchReason), + (override)); + MOCK_METHOD(void, + OnImmediateSortAndSwitchRequest, + (cricket::IceSwitchReason), + (override)); + MOCK_METHOD(bool, + OnImmediateSwitchRequest, + (cricket::IceSwitchReason, const cricket::Connection*), + (override)); + MOCK_METHOD(const cricket::Connection*, + FindNextPingableConnection, + (), + (override)); +}; + +class MockActiveIceControllerFactory + : public cricket::ActiveIceControllerFactoryInterface { + public: + ~MockActiveIceControllerFactory() override = default; + + std::unique_ptr Create( + const cricket::ActiveIceControllerFactoryArgs& args) { + RecordActiveIceControllerCreated(); + return std::make_unique(args); + } + + MOCK_METHOD(void, RecordActiveIceControllerCreated, ()); +}; + +} // namespace cricket + +#endif // P2P_BASE_MOCK_ACTIVE_ICE_CONTROLLER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/mock_async_resolver.h b/TMessagesProj/jni/voip/webrtc/p2p/base/mock_async_resolver.h index 8bc0eb9cff..44164716b2 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/mock_async_resolver.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/mock_async_resolver.h @@ -30,6 +30,7 @@ class MockAsyncResolver : public AsyncResolverInterface { ~MockAsyncResolver() = default; MOCK_METHOD(void, Start, (const rtc::SocketAddress&), (override)); + MOCK_METHOD(void, Start, (const rtc::SocketAddress&, int family), (override)); MOCK_METHOD(bool, GetResolvedAddress, (int family, SocketAddress* addr), diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/mock_ice_agent.h b/TMessagesProj/jni/voip/webrtc/p2p/base/mock_ice_agent.h new file mode 100644 index 0000000000..a1c0ebffbf --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/mock_ice_agent.h @@ -0,0 +1,50 @@ +/* + * Copyright 2018 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef P2P_BASE_MOCK_ICE_AGENT_H_ +#define P2P_BASE_MOCK_ICE_AGENT_H_ + +#include + +#include "p2p/base/connection.h" +#include "p2p/base/ice_agent_interface.h" +#include "p2p/base/ice_switch_reason.h" +#include "p2p/base/transport_description.h" +#include "test/gmock.h" + +namespace cricket { + +class MockIceAgent : public IceAgentInterface { + public: + ~MockIceAgent() override = default; + + MOCK_METHOD(int64_t, GetLastPingSentMs, (), (override, const)); + MOCK_METHOD(IceRole, GetIceRole, (), (override, const)); + MOCK_METHOD(void, OnStartedPinging, (), (override)); + MOCK_METHOD(void, UpdateConnectionStates, (), (override)); + MOCK_METHOD(void, UpdateState, (), (override)); + MOCK_METHOD(void, + ForgetLearnedStateForConnections, + (rtc::ArrayView), + (override)); + MOCK_METHOD(void, SendPingRequest, (const Connection*), (override)); + MOCK_METHOD(void, + SwitchSelectedConnection, + (const Connection*, IceSwitchReason), + (override)); + MOCK_METHOD(bool, + PruneConnections, + (rtc::ArrayView), + (override)); +}; + +} // namespace cricket + +#endif // P2P_BASE_MOCK_ICE_AGENT_H_ diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/mock_ice_controller.h b/TMessagesProj/jni/voip/webrtc/p2p/base/mock_ice_controller.h new file mode 100644 index 0000000000..bde9254e7d --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/mock_ice_controller.h @@ -0,0 +1,90 @@ +/* + * Copyright 2018 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef P2P_BASE_MOCK_ICE_CONTROLLER_H_ +#define P2P_BASE_MOCK_ICE_CONTROLLER_H_ + +#include +#include + +#include "p2p/base/ice_controller_factory_interface.h" +#include "p2p/base/ice_controller_interface.h" +#include "test/gmock.h" + +namespace cricket { + +class MockIceController : public cricket::IceControllerInterface { + public: + explicit MockIceController(const cricket::IceControllerFactoryArgs& args) {} + ~MockIceController() override = default; + + MOCK_METHOD(void, SetIceConfig, (const cricket::IceConfig&), (override)); + MOCK_METHOD(void, + SetSelectedConnection, + (const cricket::Connection*), + (override)); + MOCK_METHOD(void, AddConnection, (const cricket::Connection*), (override)); + MOCK_METHOD(void, + OnConnectionDestroyed, + (const cricket::Connection*), + (override)); + MOCK_METHOD(rtc::ArrayView, + connections, + (), + (const, override)); + MOCK_METHOD(bool, HasPingableConnection, (), (const, override)); + MOCK_METHOD(cricket::IceControllerInterface::PingResult, + SelectConnectionToPing, + (int64_t), + (override)); + MOCK_METHOD(bool, + GetUseCandidateAttr, + (const cricket::Connection*, + cricket::NominationMode, + cricket::IceMode), + (const, override)); + MOCK_METHOD(const cricket::Connection*, + FindNextPingableConnection, + (), + (override)); + MOCK_METHOD(void, + MarkConnectionPinged, + (const cricket::Connection*), + (override)); + MOCK_METHOD(cricket::IceControllerInterface::SwitchResult, + ShouldSwitchConnection, + (cricket::IceSwitchReason, const cricket::Connection*), + (override)); + MOCK_METHOD(cricket::IceControllerInterface::SwitchResult, + SortAndSwitchConnection, + (cricket::IceSwitchReason), + (override)); + MOCK_METHOD(std::vector, + PruneConnections, + (), + (override)); +}; + +class MockIceControllerFactory : public cricket::IceControllerFactoryInterface { + public: + ~MockIceControllerFactory() override = default; + + std::unique_ptr Create( + const cricket::IceControllerFactoryArgs& args) override { + RecordIceControllerCreated(); + return std::make_unique(args); + } + + MOCK_METHOD(void, RecordIceControllerCreated, ()); +}; + +} // namespace cricket + +#endif // P2P_BASE_MOCK_ICE_CONTROLLER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel.cc index f6a3858a6c..ad365d955c 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel.cc @@ -22,15 +22,18 @@ #include "absl/algorithm/container.h" #include "absl/memory/memory.h" #include "absl/strings/match.h" +#include "absl/strings/string_view.h" #include "api/async_dns_resolver.h" #include "api/candidate.h" -#include "api/task_queue/queued_task.h" +#include "api/field_trials_view.h" +#include "api/units/time_delta.h" #include "logging/rtc_event_log/ice_logger.h" #include "p2p/base/basic_async_resolver_factory.h" #include "p2p/base/basic_ice_controller.h" #include "p2p/base/connection.h" #include "p2p/base/connection_info.h" #include "p2p/base/port.h" +#include "p2p/base/wrapping_active_ice_controller.h" #include "rtc_base/checks.h" #include "rtc_base/crc32.h" #include "rtc_base/experiments/struct_parameters_parser.h" @@ -40,11 +43,9 @@ #include "rtc_base/network.h" #include "rtc_base/network_constants.h" #include "rtc_base/string_encode.h" -#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" -#include "system_wrappers/include/field_trial.h" #include "system_wrappers/include/metrics.h" namespace { @@ -60,50 +61,28 @@ cricket::PortInterface::CandidateOrigin GetOrigin( return cricket::PortInterface::ORIGIN_OTHER_PORT; } -uint32_t GetWeakPingIntervalInFieldTrial() { - uint32_t weak_ping_interval = ::strtoul( - webrtc::field_trial::FindFullName("WebRTC-StunInterPacketDelay").c_str(), - nullptr, 10); - if (weak_ping_interval) { - return static_cast(weak_ping_interval); +uint32_t GetWeakPingIntervalInFieldTrial( + const webrtc::FieldTrialsView* field_trials) { + if (field_trials != nullptr) { + uint32_t weak_ping_interval = + ::strtoul(field_trials->Lookup("WebRTC-StunInterPacketDelay").c_str(), + nullptr, 10); + if (weak_ping_interval) { + return static_cast(weak_ping_interval); + } } return cricket::WEAK_PING_INTERVAL; } -rtc::AdapterType GuessAdapterTypeFromNetworkCost(int network_cost) { - // The current network costs have been unchanged since they were added - // to webrtc. If they ever were to change we would need to reconsider - // this method. - switch (network_cost) { - case rtc::kNetworkCostMin: - return rtc::ADAPTER_TYPE_ETHERNET; - case rtc::kNetworkCostLow: - return rtc::ADAPTER_TYPE_WIFI; - case rtc::kNetworkCostCellular: - return rtc::ADAPTER_TYPE_CELLULAR; - case rtc::kNetworkCostCellular2G: - return rtc::ADAPTER_TYPE_CELLULAR_2G; - case rtc::kNetworkCostCellular3G: - return rtc::ADAPTER_TYPE_CELLULAR_3G; - case rtc::kNetworkCostCellular4G: - return rtc::ADAPTER_TYPE_CELLULAR_4G; - case rtc::kNetworkCostCellular5G: - return rtc::ADAPTER_TYPE_CELLULAR_5G; - case rtc::kNetworkCostUnknown: - return rtc::ADAPTER_TYPE_UNKNOWN; - case rtc::kNetworkCostMax: - return rtc::ADAPTER_TYPE_ANY; - } - return rtc::ADAPTER_TYPE_UNKNOWN; -} - rtc::RouteEndpoint CreateRouteEndpointFromCandidate( bool local, const cricket::Candidate& candidate, bool uses_turn) { auto adapter_type = candidate.network_type(); if (!local && adapter_type == rtc::ADAPTER_TYPE_UNKNOWN) { - adapter_type = GuessAdapterTypeFromNetworkCost(candidate.network_cost()); + bool vpn; + std::tie(adapter_type, vpn) = + rtc::Network::GuessAdapterFromNetworkCost(candidate.network_cost()); } // TODO(bugs.webrtc.org/9446) : Rewrite if information about remote network @@ -115,18 +94,27 @@ rtc::RouteEndpoint CreateRouteEndpointFromCandidate( uses_turn); } +bool UseActiveIceControllerFieldTrialEnabled( + const webrtc::FieldTrialsView* field_trials) { + // Feature to refactor ICE controller and enable active ICE controllers. + // Field trial key reserved in bugs.webrtc.org/14367 + return field_trials && + field_trials->IsEnabled("WebRTC-UseActiveIceController"); +} + +using ::webrtc::RTCError; +using ::webrtc::RTCErrorType; +using ::webrtc::SafeTask; +using ::webrtc::TimeDelta; + } // unnamed namespace namespace cricket { -using webrtc::RTCError; -using webrtc::RTCErrorType; -using webrtc::ToQueuedTask; - -bool IceCredentialsChanged(const std::string& old_ufrag, - const std::string& old_pwd, - const std::string& new_ufrag, - const std::string& new_pwd) { +bool IceCredentialsChanged(absl::string_view old_ufrag, + absl::string_view old_pwd, + absl::string_view new_ufrag, + absl::string_view new_pwd) { // The standard (RFC 5245 Section 9.1.1.1) says that ICE restarts MUST change // both the ufrag and password. However, section 9.2.1.1 says changing the // ufrag OR password indicates an ICE restart. So, to keep compatibility with @@ -134,41 +122,53 @@ bool IceCredentialsChanged(const std::string& old_ufrag, return (old_ufrag != new_ufrag) || (old_pwd != new_pwd); } -// static std::unique_ptr P2PTransportChannel::Create( - const std::string& transport_name, + absl::string_view transport_name, int component, - PortAllocator* allocator, - webrtc::AsyncDnsResolverFactoryInterface* async_dns_resolver_factory, - webrtc::RtcEventLog* event_log, - IceControllerFactoryInterface* ice_controller_factory) { - return absl::WrapUnique(new P2PTransportChannel( - transport_name, component, allocator, async_dns_resolver_factory, - /* owned_dns_resolver_factory= */ nullptr, event_log, - ice_controller_factory)); + webrtc::IceTransportInit init) { + if (init.async_resolver_factory()) { + return absl::WrapUnique(new P2PTransportChannel( + transport_name, component, init.port_allocator(), nullptr, + std::make_unique( + init.async_resolver_factory()), + init.event_log(), init.ice_controller_factory(), + init.active_ice_controller_factory(), init.field_trials())); + } else { + return absl::WrapUnique(new P2PTransportChannel( + transport_name, component, init.port_allocator(), + init.async_dns_resolver_factory(), nullptr, init.event_log(), + init.ice_controller_factory(), init.active_ice_controller_factory(), + init.field_trials())); + } } -P2PTransportChannel::P2PTransportChannel(const std::string& transport_name, - int component, - PortAllocator* allocator) +P2PTransportChannel::P2PTransportChannel( + absl::string_view transport_name, + int component, + PortAllocator* allocator, + const webrtc::FieldTrialsView* field_trials) : P2PTransportChannel(transport_name, component, allocator, /* async_dns_resolver_factory= */ nullptr, /* owned_dns_resolver_factory= */ nullptr, /* event_log= */ nullptr, - /* ice_controller_factory= */ nullptr) {} + /* ice_controller_factory= */ nullptr, + /* active_ice_controller_factory= */ nullptr, + field_trials) {} // Private constructor, called from Create() P2PTransportChannel::P2PTransportChannel( - const std::string& transport_name, + absl::string_view transport_name, int component, PortAllocator* allocator, webrtc::AsyncDnsResolverFactoryInterface* async_dns_resolver_factory, std::unique_ptr owned_dns_resolver_factory, webrtc::RtcEventLog* event_log, - IceControllerFactoryInterface* ice_controller_factory) + IceControllerFactoryInterface* ice_controller_factory, + ActiveIceControllerFactoryInterface* active_ice_controller_factory, + const webrtc::FieldTrialsView* field_trials) : transport_name_(transport_name), component_(component), allocator_(allocator), @@ -186,6 +186,7 @@ P2PTransportChannel::P2PTransportChannel( ice_role_(ICEROLE_UNKNOWN), tiebreaker_(0), gathering_state_(kIceGatheringNew), + weak_ping_interval_(GetWeakPingIntervalInFieldTrial(field_trials)), config_(RECEIVING_TIMEOUT, BACKUP_CONNECTION_PING_INTERVAL, GATHER_ONCE /* continual_gathering_policy */, @@ -196,7 +197,6 @@ P2PTransportChannel::P2PTransportChannel( RECEIVING_SWITCHING_DELAY) { TRACE_EVENT0("webrtc", "P2PTransportChannel::P2PTransportChannel"); RTC_DCHECK(allocator_ != nullptr); - weak_ping_interval_ = GetWeakPingIntervalInFieldTrial(); // Validate IceConfig even for mostly built-in constant default values in case // we change them. RTC_DCHECK(ValidateIceConfig(config_).ok()); @@ -212,48 +212,30 @@ P2PTransportChannel::P2PTransportChannel( this, &P2PTransportChannel::OnCandidateFilterChanged); ice_event_log_.set_event_log(event_log); + ParseFieldTrials(field_trials); + IceControllerFactoryArgs args{ [this] { return GetState(); }, [this] { return GetIceRole(); }, [this](const Connection* connection) { - // TODO(webrtc:10647/jonaso): Figure out a way to remove friendship - // between P2PTransportChannel and Connection. return IsPortPruned(connection->port()) || IsRemoteCandidatePruned(connection->remote_candidate()); }, - &field_trials_, - webrtc::field_trial::FindFullName("WebRTC-IceControllerFieldTrials")}; - if (ice_controller_factory != nullptr) { - ice_controller_ = ice_controller_factory->Create(args); - } else { - ice_controller_ = std::make_unique(args); - } + &ice_field_trials_, + field_trials ? field_trials->Lookup("WebRTC-IceControllerFieldTrials") + : ""}; + ice_adapter_ = std::make_unique( + args, ice_controller_factory, active_ice_controller_factory, field_trials, + /* transport= */ this); } -// Public constructor, exposed for backwards compatibility. -// Deprecated. -P2PTransportChannel::P2PTransportChannel( - const std::string& transport_name, - int component, - PortAllocator* allocator, - webrtc::AsyncResolverFactory* async_resolver_factory, - webrtc::RtcEventLog* event_log, - IceControllerFactoryInterface* ice_controller_factory) - : P2PTransportChannel( - transport_name, - component, - allocator, - nullptr, - std::make_unique( - async_resolver_factory), - event_log, - ice_controller_factory) {} - P2PTransportChannel::~P2PTransportChannel() { TRACE_EVENT0("webrtc", "P2PTransportChannel::~P2PTransportChannel"); RTC_DCHECK_RUN_ON(network_thread_); std::vector copy(connections().begin(), connections().end()); - for (Connection* con : copy) { - con->Destroy(); + for (Connection* connection : copy) { + connection->SignalDestroyed.disconnect(this); + RemoveConnection(connection); + connection->Destroy(); } resolvers_.clear(); } @@ -289,7 +271,6 @@ void P2PTransportChannel::AddAllocatorSession( void P2PTransportChannel::AddConnection(Connection* connection) { RTC_DCHECK_RUN_ON(network_thread_); - connection->set_remote_ice_mode(remote_ice_mode_); connection->set_receiving_timeout(config_.receiving_timeout); connection->set_unwritable_timeout(config_.ice_unwritable_timeout); connection->set_unwritable_min_checks(config_.ice_unwritable_min_checks); @@ -307,29 +288,33 @@ void P2PTransportChannel::AddConnection(Connection* connection) { had_connection_ = true; connection->set_ice_event_log(&ice_event_log_); - connection->SetIceFieldTrials(&field_trials_); + connection->SetIceFieldTrials(&ice_field_trials_); LogCandidatePairConfig(connection, webrtc::IceCandidatePairConfigType::kAdded); - ice_controller_->AddConnection(connection); + connections_.push_back(connection); + ice_adapter_->OnConnectionAdded(connection); } +// TODO(bugs.webrtc.org/14367) remove once refactor lands. bool P2PTransportChannel::MaybeSwitchSelectedConnection( - Connection* new_connection, - IceControllerEvent reason) { + const Connection* new_connection, + IceSwitchReason reason) { RTC_DCHECK_RUN_ON(network_thread_); return MaybeSwitchSelectedConnection( - reason, ice_controller_->ShouldSwitchConnection(reason, new_connection)); + reason, + ice_adapter_->LegacyShouldSwitchConnection(reason, new_connection)); } +// TODO(bugs.webrtc.org/14367) remove once refactor lands. bool P2PTransportChannel::MaybeSwitchSelectedConnection( - IceControllerEvent reason, + IceSwitchReason reason, IceControllerInterface::SwitchResult result) { RTC_DCHECK_RUN_ON(network_thread_); if (result.connection.has_value()) { RTC_LOG(LS_INFO) << "Switching selected connection due to: " - << reason.ToString(); + << IceSwitchReasonToString(reason); SwitchSelectedConnection(FromIceController(*result.connection), reason); } @@ -339,11 +324,11 @@ bool P2PTransportChannel::MaybeSwitchSelectedConnection( // currently selected connection. So we need to re-check whether it needs // to be switched at a later time. network_thread_->PostDelayedTask( - ToQueuedTask(task_safety_, - [this, recheck = *result.recheck_event]() { - SortConnectionsAndUpdateState(recheck); - }), - result.recheck_event->recheck_delay_ms); + SafeTask(task_safety_.flag(), + [this, reason = result.recheck_event->reason]() { + SortConnectionsAndUpdateState(reason); + }), + TimeDelta::Millis(result.recheck_event->recheck_delay_ms)); } for (const auto* con : result.connections_to_forget_state_on) { @@ -353,6 +338,13 @@ bool P2PTransportChannel::MaybeSwitchSelectedConnection( return result.connection.has_value(); } +void P2PTransportChannel::ForgetLearnedStateForConnections( + rtc::ArrayView connections) { + for (const Connection* con : connections) { + FromIceController(con)->ForgetLearnedState(); + } +} + void P2PTransportChannel::SetIceRole(IceRole ice_role) { RTC_DCHECK_RUN_ON(network_thread_); if (ice_role_ != ice_role) { @@ -552,8 +544,8 @@ void P2PTransportChannel::SetRemoteIceParameters( ice_params, static_cast(remote_ice_parameters_.size() - 1)); } // Updating the remote ICE candidate generation could change the sort order. - RequestSortAndStateUpdate( - IceControllerEvent::REMOTE_CANDIDATE_GENERATION_CHANGE); + ice_adapter_->OnSortAndSwitchRequest( + IceSwitchReason::REMOTE_CANDIDATE_GENERATION_CHANGE); } void P2PTransportChannel::SetRemoteIceMode(IceMode mode) { @@ -707,7 +699,8 @@ void P2PTransportChannel::SetIceConfig(const IceConfig& config) { if (config_.network_preference != config.network_preference) { config_.network_preference = config.network_preference; - RequestSortAndStateUpdate(IceControllerEvent::NETWORK_PREFERENCE_CHANGE); + ice_adapter_->OnSortAndSwitchRequest( + IceSwitchReason::NETWORK_PREFERENCE_CHANGE); RTC_LOG(LS_INFO) << "Set network preference to " << (config_.network_preference.has_value() ? config_.network_preference.value() @@ -725,83 +718,118 @@ void P2PTransportChannel::SetIceConfig(const IceConfig& config) { << config.stun_keepalive_interval_or_default(); } - if (webrtc::field_trial::IsEnabled("WebRTC-ExtraICEPing")) { + webrtc::BasicRegatheringController::Config regathering_config; + regathering_config.regather_on_failed_networks_interval = + config_.regather_on_failed_networks_interval_or_default(); + regathering_controller_->SetConfig(regathering_config); + + config_.vpn_preference = config.vpn_preference; + allocator_->SetVpnPreference(config_.vpn_preference); + + ice_adapter_->SetIceConfig(config_); + + RTC_DCHECK(ValidateIceConfig(config_).ok()); +} + +void P2PTransportChannel::ParseFieldTrials( + const webrtc::FieldTrialsView* field_trials) { + if (field_trials == nullptr) { + return; + } + + if (field_trials->IsEnabled("WebRTC-ExtraICEPing")) { RTC_LOG(LS_INFO) << "Set WebRTC-ExtraICEPing: Enabled"; } - if (webrtc::field_trial::IsEnabled("WebRTC-TurnAddMultiMapping")) { + if (field_trials->IsEnabled("WebRTC-TurnAddMultiMapping")) { RTC_LOG(LS_INFO) << "Set WebRTC-TurnAddMultiMapping: Enabled"; } webrtc::StructParametersParser::Create( // go/skylift-light "skip_relay_to_non_relay_connections", - &field_trials_.skip_relay_to_non_relay_connections, + &ice_field_trials_.skip_relay_to_non_relay_connections, // Limiting pings sent. - "max_outstanding_pings", &field_trials_.max_outstanding_pings, + "max_outstanding_pings", &ice_field_trials_.max_outstanding_pings, // Delay initial selection of connection. - "initial_select_dampening", &field_trials_.initial_select_dampening, + "initial_select_dampening", &ice_field_trials_.initial_select_dampening, // Delay initial selection of connections, that are receiving. "initial_select_dampening_ping_received", - &field_trials_.initial_select_dampening_ping_received, + &ice_field_trials_.initial_select_dampening_ping_received, // Reply that we support goog ping. - "announce_goog_ping", &field_trials_.announce_goog_ping, + "announce_goog_ping", &ice_field_trials_.announce_goog_ping, // Use goog ping if remote support it. - "enable_goog_ping", &field_trials_.enable_goog_ping, + "enable_goog_ping", &ice_field_trials_.enable_goog_ping, // How fast does a RTT sample decay. - "rtt_estimate_halftime_ms", &field_trials_.rtt_estimate_halftime_ms, + "rtt_estimate_halftime_ms", &ice_field_trials_.rtt_estimate_halftime_ms, // Make sure that nomination reaching ICE controlled asap. "send_ping_on_switch_ice_controlling", - &field_trials_.send_ping_on_switch_ice_controlling, + &ice_field_trials_.send_ping_on_switch_ice_controlling, // Make sure that nomination reaching ICE controlled asap. "send_ping_on_selected_ice_controlling", - &field_trials_.send_ping_on_selected_ice_controlling, + &ice_field_trials_.send_ping_on_selected_ice_controlling, // Reply to nomination ASAP. "send_ping_on_nomination_ice_controlled", - &field_trials_.send_ping_on_nomination_ice_controlled, + &ice_field_trials_.send_ping_on_nomination_ice_controlled, // Allow connections to live untouched longer that 30s. - "dead_connection_timeout_ms", &field_trials_.dead_connection_timeout_ms, + "dead_connection_timeout_ms", + &ice_field_trials_.dead_connection_timeout_ms, // Stop gathering on strongly connected. "stop_gather_on_strongly_connected", - &field_trials_.stop_gather_on_strongly_connected) - ->Parse(webrtc::field_trial::FindFullName("WebRTC-IceFieldTrials")); + &ice_field_trials_.stop_gather_on_strongly_connected) + ->Parse(field_trials->Lookup("WebRTC-IceFieldTrials")); - if (field_trials_.dead_connection_timeout_ms < 30000) { + if (ice_field_trials_.dead_connection_timeout_ms < 30000) { RTC_LOG(LS_WARNING) << "dead_connection_timeout_ms set to " - << field_trials_.dead_connection_timeout_ms + << ice_field_trials_.dead_connection_timeout_ms << " increasing it to 30000"; - field_trials_.dead_connection_timeout_ms = 30000; + ice_field_trials_.dead_connection_timeout_ms = 30000; } - if (field_trials_.skip_relay_to_non_relay_connections) { + if (ice_field_trials_.skip_relay_to_non_relay_connections) { RTC_LOG(LS_INFO) << "Set skip_relay_to_non_relay_connections"; } - if (field_trials_.max_outstanding_pings.has_value()) { + if (ice_field_trials_.max_outstanding_pings.has_value()) { RTC_LOG(LS_INFO) << "Set max_outstanding_pings: " - << *field_trials_.max_outstanding_pings; + << *ice_field_trials_.max_outstanding_pings; } - if (field_trials_.initial_select_dampening.has_value()) { + if (ice_field_trials_.initial_select_dampening.has_value()) { RTC_LOG(LS_INFO) << "Set initial_select_dampening: " - << *field_trials_.initial_select_dampening; + << *ice_field_trials_.initial_select_dampening; } - if (field_trials_.initial_select_dampening_ping_received.has_value()) { - RTC_LOG(LS_INFO) << "Set initial_select_dampening_ping_received: " - << *field_trials_.initial_select_dampening_ping_received; + if (ice_field_trials_.initial_select_dampening_ping_received.has_value()) { + RTC_LOG(LS_INFO) + << "Set initial_select_dampening_ping_received: " + << *ice_field_trials_.initial_select_dampening_ping_received; } - webrtc::BasicRegatheringController::Config regathering_config; - regathering_config.regather_on_failed_networks_interval = - config_.regather_on_failed_networks_interval_or_default(); - regathering_controller_->SetConfig(regathering_config); + // DSCP override, allow user to specify (any) int value + // that will be used for tagging all packets. + webrtc::StructParametersParser::Create("override_dscp", + &ice_field_trials_.override_dscp) + ->Parse(field_trials->Lookup("WebRTC-DscpFieldTrial")); - config_.vpn_preference = config.vpn_preference; - allocator_->SetVpnPreference(config_.vpn_preference); + if (ice_field_trials_.override_dscp) { + SetOption(rtc::Socket::OPT_DSCP, *ice_field_trials_.override_dscp); + } + + std::string field_trial_string = + field_trials->Lookup("WebRTC-SetSocketReceiveBuffer"); + int receive_buffer_size_kb = 0; + sscanf(field_trial_string.c_str(), "Enabled-%d", &receive_buffer_size_kb); + if (receive_buffer_size_kb > 0) { + RTC_LOG(LS_INFO) << "Set WebRTC-SetSocketReceiveBuffer: Enabled and set to " + << receive_buffer_size_kb << "kb"; + SetOption(rtc::Socket::OPT_RCVBUF, receive_buffer_size_kb * 1024); + } - ice_controller_->SetIceConfig(config_); + ice_field_trials_.piggyback_ice_check_acknowledgement = + field_trials->IsEnabled("WebRTC-PiggybackIceCheckAcknowledgement"); - RTC_DCHECK(ValidateIceConfig(config_).ok()); + ice_field_trials_.extra_ice_ping = + field_trials->IsEnabled("WebRTC-ExtraICEPing"); } const IceConfig& P2PTransportChannel::config() const { @@ -815,7 +843,7 @@ const IceConfig& P2PTransportChannel::config() const { RTCError P2PTransportChannel::ValidateIceConfig(const IceConfig& config) { if (config.ice_check_interval_strong_connectivity_or_default() < config.ice_check_interval_weak_connectivity.value_or( - GetWeakPingIntervalInFieldTrial())) { + GetWeakPingIntervalInFieldTrial(nullptr))) { return RTCError(RTCErrorType::INVALID_PARAMETER, "Ping interval of candidate pairs is shorter when ICE is " "strongly connected than that when ICE is weakly " @@ -869,6 +897,7 @@ int P2PTransportChannel::check_receiving_interval() const { void P2PTransportChannel::MaybeStartGathering() { RTC_DCHECK_RUN_ON(network_thread_); + // TODO(bugs.webrtc.org/14605): ensure tie_breaker_ is set. if (ice_parameters_.ufrag.empty() || ice_parameters_.pwd.empty()) { RTC_LOG(LS_ERROR) << "Cannot gather candidates because ICE parameters are empty" @@ -913,6 +942,7 @@ void P2PTransportChannel::MaybeStartGathering() { ice_parameters_.ufrag, ice_parameters_.pwd); if (pooled_session) { + pooled_session->set_ice_tiebreaker(tiebreaker_); AddAllocatorSession(std::move(pooled_session)); PortAllocatorSession* raw_pooled_session = allocator_sessions_.back().get(); @@ -929,6 +959,7 @@ void P2PTransportChannel::MaybeStartGathering() { AddAllocatorSession(allocator_->CreateSession( transport_name(), component(), ice_parameters_.ufrag, ice_parameters_.pwd)); + allocator_sessions_.back()->set_ice_tiebreaker(tiebreaker_); allocator_sessions_.back()->StartGettingPorts(); } } @@ -975,8 +1006,8 @@ void P2PTransportChannel::OnPortReady(PortAllocatorSession* session, CreateConnection(port, *iter, iter->origin_port()); } - SortConnectionsAndUpdateState( - IceControllerEvent::NEW_CONNECTION_FROM_LOCAL_CANDIDATE); + ice_adapter_->OnImmediateSortAndSwitchRequest( + IceSwitchReason::NEW_CONNECTION_FROM_LOCAL_CANDIDATE); } // A new candidate is available, let listeners know @@ -1155,8 +1186,8 @@ void P2PTransportChannel::OnUnknownAddress(PortInterface* port, // Update the list of connections since we just added another. We do this // after sending the response since it could (in principle) delete the // connection in question. - SortConnectionsAndUpdateState( - IceControllerEvent::NEW_CONNECTION_FROM_UNKNOWN_REMOTE_ADDRESS); + ice_adapter_->OnImmediateSortAndSwitchRequest( + IceSwitchReason::NEW_CONNECTION_FROM_UNKNOWN_REMOTE_ADDRESS); } void P2PTransportChannel::OnCandidateFilterChanged(uint32_t prev_filter, @@ -1176,7 +1207,7 @@ void P2PTransportChannel::OnRoleConflict(PortInterface* port) { } const IceParameters* P2PTransportChannel::FindRemoteIceFromUfrag( - const std::string& ufrag, + absl::string_view ufrag, uint32_t* generation) { RTC_DCHECK_RUN_ON(network_thread_); const auto& params = remote_ice_parameters_; @@ -1199,19 +1230,19 @@ void P2PTransportChannel::OnNominated(Connection* conn) { return; } - if (field_trials_.send_ping_on_nomination_ice_controlled && conn != nullptr) { - PingConnection(conn); - MarkConnectionPinged(conn); + if (ice_field_trials_.send_ping_on_nomination_ice_controlled && + conn != nullptr) { + SendPingRequestInternal(conn); } // TODO(qingsi): RequestSortAndStateUpdate will eventually call // MaybeSwitchSelectedConnection again. Rewrite this logic. - if (MaybeSwitchSelectedConnection( - conn, IceControllerEvent::NOMINATION_ON_CONTROLLED_SIDE)) { + if (ice_adapter_->OnImmediateSwitchRequest( + IceSwitchReason::NOMINATION_ON_CONTROLLED_SIDE, conn)) { // Now that we have selected a connection, it is time to prune other // connections and update the read/write state of the channel. - RequestSortAndStateUpdate( - IceControllerEvent::NOMINATION_ON_CONTROLLED_SIDE); + ice_adapter_->OnSortAndSwitchRequest( + IceSwitchReason::NOMINATION_ON_CONTROLLED_SIDE); } else { RTC_LOG(LS_INFO) << "Not switching the selected connection on controlled side yet: " @@ -1271,7 +1302,7 @@ void P2PTransportChannel::AddRemoteCandidate(const Candidate& candidate) { } } - if (new_remote_candidate.address().IsUnresolvedIP()) { + if (new_remote_candidate.address().IsUnresolvedIP() && !absl::EndsWith(new_remote_candidate.address().hostname(), ".reflector")) { // Don't do DNS lookups if the IceTransportPolicy is "none" or "relay". bool sharing_host = ((allocator_->candidate_filter() & CF_HOST) != 0); bool sharing_stun = ((allocator_->candidate_filter() & CF_REFLEXIVE) != 0); @@ -1311,8 +1342,7 @@ void P2PTransportChannel::OnCandidateResolved( std::unique_ptr to_delete = std::move(p->resolver_); // Delay the actual deletion of the resolver until the lambda executes. - network_thread_->PostTask( - ToQueuedTask([delete_this = std::move(to_delete)] {})); + network_thread_->PostTask([to_delete = std::move(to_delete)] {}); resolvers_.erase(p); } @@ -1360,8 +1390,8 @@ void P2PTransportChannel::FinishAddingRemoteCandidate( CreateConnections(new_remote_candidate, NULL); // Resort the connections list, which may have new elements. - SortConnectionsAndUpdateState( - IceControllerEvent::NEW_CONNECTION_FROM_REMOTE_CANDIDATE); + ice_adapter_->OnImmediateSortAndSwitchRequest( + IceSwitchReason::NEW_CONNECTION_FROM_REMOTE_CANDIDATE); } void P2PTransportChannel::RemoveRemoteCandidate( @@ -1439,7 +1469,7 @@ bool P2PTransportChannel::CreateConnection(PortInterface* port, return false; } - if (field_trials_.skip_relay_to_non_relay_connections) { + if (ice_field_trials_.skip_relay_to_non_relay_connections) { if ((port->Type() != remote_candidate.type()) && (port->Type() == RELAY_PORT_TYPE || remote_candidate.type() == RELAY_PORT_TYPE)) { @@ -1555,6 +1585,10 @@ void P2PTransportChannel::RememberRemoteCandidate( // port objects. int P2PTransportChannel::SetOption(rtc::Socket::Option opt, int value) { RTC_DCHECK_RUN_ON(network_thread_); + if (ice_field_trials_.override_dscp && opt == rtc::Socket::OPT_DSCP) { + value = *ice_field_trials_.override_dscp; + } + OptionMap::iterator it = options_.find(opt); if (it == options_.end()) { options_.insert(std::make_pair(opt, value)); @@ -1609,6 +1643,7 @@ int P2PTransportChannel::SendPacket(const char* data, return -1; } + packets_sent_++; last_sent_packet_id_ = options.packet_id; rtc::PacketOptions modified_options(options); modified_options.info_signaled_after_sent.packet_type = @@ -1617,7 +1652,10 @@ int P2PTransportChannel::SendPacket(const char* data, if (sent <= 0) { RTC_DCHECK(sent < 0); error_ = selected_connection_->GetError(); + return sent; } + + bytes_sent_ += sent; return sent; } @@ -1639,11 +1677,20 @@ bool P2PTransportChannel::GetStats(IceTransportStats* ice_transport_stats) { stats.remote_candidate = SanitizeRemoteCandidate(stats.remote_candidate); stats.best_connection = (selected_connection_ == connection); ice_transport_stats->connection_infos.push_back(std::move(stats)); - connection->set_reported(true); } ice_transport_stats->selected_candidate_pair_changes = selected_candidate_pair_changes_; + + ice_transport_stats->bytes_sent = bytes_sent_; + ice_transport_stats->bytes_received = bytes_received_; + ice_transport_stats->packets_sent = packets_sent_; + ice_transport_stats->packets_received = packets_received_; + + ice_transport_stats->ice_role = GetIceRole(); + ice_transport_stats->ice_local_username_fragment = ice_parameters_.ufrag; + ice_transport_stats->ice_state = ComputeIceTransportState(); + return true; } @@ -1663,9 +1710,18 @@ rtc::DiffServCodePoint P2PTransportChannel::DefaultDscpValue() const { rtc::ArrayView P2PTransportChannel::connections() const { RTC_DCHECK_RUN_ON(network_thread_); - rtc::ArrayView res = ice_controller_->connections(); - return rtc::ArrayView(const_cast(res.data()), - res.size()); + return ice_adapter_->LegacyConnections(); +} + +void P2PTransportChannel::RemoveConnectionForTest(Connection* connection) { + RTC_DCHECK_RUN_ON(network_thread_); + RTC_DCHECK(FindConnection(connection)); + connection->SignalDestroyed.disconnect(this); + RemoveConnection(connection); + RTC_DCHECK(!FindConnection(connection)); + if (selected_connection_ == connection) + selected_connection_ = nullptr; + connection->Destroy(); } // Monitor connection states. @@ -1675,41 +1731,55 @@ void P2PTransportChannel::UpdateConnectionStates() { // We need to copy the list of connections since some may delete themselves // when we call UpdateState. - for (Connection* c : connections()) { + // NOTE: We copy the connections() vector in case `UpdateState` triggers the + // Connection to be destroyed (which will cause a callback that alters + // the connections() vector). + std::vector copy(connections().begin(), connections().end()); + for (Connection* c : copy) { c->UpdateState(now); } } // Prepare for best candidate sorting. +// TODO(bugs.webrtc.org/14367) remove once refactor lands. void P2PTransportChannel::RequestSortAndStateUpdate( - IceControllerEvent reason_to_sort) { + IceSwitchReason reason_to_sort) { RTC_DCHECK_RUN_ON(network_thread_); if (!sort_dirty_) { network_thread_->PostTask( - ToQueuedTask(task_safety_, [this, reason_to_sort]() { + SafeTask(task_safety_.flag(), [this, reason_to_sort]() { SortConnectionsAndUpdateState(reason_to_sort); })); sort_dirty_ = true; } } +// TODO(bugs.webrtc.org/14367) remove once refactor lands. void P2PTransportChannel::MaybeStartPinging() { RTC_DCHECK_RUN_ON(network_thread_); if (started_pinging_) { return; } - if (ice_controller_->HasPingableConnection()) { + if (ice_adapter_->LegacyHasPingableConnection()) { RTC_LOG(LS_INFO) << ToString() << ": Have a pingable connection for the first time; " "starting to ping."; network_thread_->PostTask( - ToQueuedTask(task_safety_, [this]() { CheckAndPing(); })); + SafeTask(task_safety_.flag(), [this]() { CheckAndPing(); })); regathering_controller_->Start(); started_pinging_ = true; } } +void P2PTransportChannel::OnStartedPinging() { + RTC_DCHECK_RUN_ON(network_thread_); + RTC_LOG(LS_INFO) << ToString() + << ": Have a pingable connection for the first time; " + "starting to ping."; + regathering_controller_->Start(); +} + bool P2PTransportChannel::IsPortPruned(const Port* port) const { RTC_DCHECK_RUN_ON(network_thread_); return !absl::c_linear_search(ports_, port); @@ -1731,8 +1801,9 @@ bool P2PTransportChannel::PresumedWritable(const Connection* conn) const { // Sort the available connections to find the best one. We also monitor // the number of available connections and the current state. +// TODO(bugs.webrtc.org/14367) remove once refactor lands. void P2PTransportChannel::SortConnectionsAndUpdateState( - IceControllerEvent reason_to_sort) { + IceSwitchReason reason_to_sort) { RTC_DCHECK_RUN_ON(network_thread_); // Make sure the connection states are up-to-date since this affects how they @@ -1746,7 +1817,8 @@ void P2PTransportChannel::SortConnectionsAndUpdateState( // have to be writable to become the selected connection although it will // have higher priority if it is writable. MaybeSwitchSelectedConnection( - reason_to_sort, ice_controller_->SortAndSwitchConnection(reason_to_sort)); + reason_to_sort, + ice_adapter_->LegacySortAndSwitchConnection(reason_to_sort)); // The controlled side can prune only if the selected connection has been // nominated because otherwise it may prune the connection that will be @@ -1754,8 +1826,7 @@ void P2PTransportChannel::SortConnectionsAndUpdateState( // TODO(honghaiz): This is not enough to prevent a connection from being // pruned too early because with aggressive nomination, the controlling side // will nominate every connection until it becomes writable. - if (ice_role_ == ICEROLE_CONTROLLING || - (selected_connection_ && selected_connection_->nominated())) { + if (AllowedToPruneConnections()) { PruneConnections(); } @@ -1775,7 +1846,7 @@ void P2PTransportChannel::SortConnectionsAndUpdateState( } // Update the state of this channel. - UpdateState(); + UpdateTransportState(); // Also possibly start pinging. // We could start pinging if: @@ -1785,18 +1856,82 @@ void P2PTransportChannel::SortConnectionsAndUpdateState( MaybeStartPinging(); } +void P2PTransportChannel::UpdateState() { + // Check if all connections are timedout. + bool all_connections_timedout = true; + for (const Connection* conn : connections()) { + if (conn->write_state() != Connection::STATE_WRITE_TIMEOUT) { + all_connections_timedout = false; + break; + } + } + + // Now update the writable state of the channel with the information we have + // so far. + if (all_connections_timedout) { + HandleAllTimedOut(); + } + + // Update the state of this channel. + UpdateTransportState(); +} + +bool P2PTransportChannel::AllowedToPruneConnections() const { + RTC_DCHECK_RUN_ON(network_thread_); + return ice_role_ == ICEROLE_CONTROLLING || + (selected_connection_ && selected_connection_->nominated()); +} + +// TODO(bugs.webrtc.org/14367) remove once refactor lands. void P2PTransportChannel::PruneConnections() { RTC_DCHECK_RUN_ON(network_thread_); std::vector connections_to_prune = - ice_controller_->PruneConnections(); - for (const Connection* conn : connections_to_prune) { + ice_adapter_->LegacyPruneConnections(); + PruneConnections(connections_to_prune); +} + +bool P2PTransportChannel::PruneConnections( + rtc::ArrayView connections) { + RTC_DCHECK_RUN_ON(network_thread_); + if (!AllowedToPruneConnections()) { + RTC_LOG(LS_WARNING) << "Not allowed to prune connections"; + return false; + } + for (const Connection* conn : connections) { FromIceController(conn)->Prune(); } + return true; +} + +rtc::NetworkRoute P2PTransportChannel::ConfigureNetworkRoute( + const Connection* conn) { + RTC_DCHECK_RUN_ON(network_thread_); + return { + .connected = ReadyToSend(conn), + .local = CreateRouteEndpointFromCandidate( + /* local= */ true, conn->local_candidate(), + /* uses_turn= */ + conn->port()->Type() == RELAY_PORT_TYPE), + .remote = CreateRouteEndpointFromCandidate( + /* local= */ false, conn->remote_candidate(), + /* uses_turn= */ conn->remote_candidate().type() == RELAY_PORT_TYPE), + .last_sent_packet_id = last_sent_packet_id_, + .packet_overhead = + conn->local_candidate().address().ipaddr().overhead() + + GetProtocolOverhead(conn->local_candidate().protocol())}; +} + +void P2PTransportChannel::SwitchSelectedConnection( + const Connection* new_connection, + IceSwitchReason reason) { + RTC_DCHECK_RUN_ON(network_thread_); + SwitchSelectedConnectionInternal(FromIceController(new_connection), reason); } // Change the selected connection, and let listeners know. -void P2PTransportChannel::SwitchSelectedConnection(Connection* conn, - IceControllerEvent reason) { +void P2PTransportChannel::SwitchSelectedConnectionInternal( + Connection* conn, + IceSwitchReason reason) { RTC_DCHECK_RUN_ON(network_thread_); // Note: if conn is NULL, the previous `selected_connection_` has been // destroyed, so don't use it. @@ -1827,31 +1962,16 @@ void P2PTransportChannel::SwitchSelectedConnection(Connection* conn, SignalReadyToSend(this); } - network_route_.emplace(rtc::NetworkRoute()); - network_route_->connected = ReadyToSend(selected_connection_); - network_route_->local = CreateRouteEndpointFromCandidate( - /* local= */ true, selected_connection_->local_candidate(), - /* uses_turn= */ selected_connection_->port()->Type() == - RELAY_PORT_TYPE); - network_route_->remote = CreateRouteEndpointFromCandidate( - /* local= */ false, selected_connection_->remote_candidate(), - /* uses_turn= */ selected_connection_->remote_candidate().type() == - RELAY_PORT_TYPE); - - network_route_->last_sent_packet_id = last_sent_packet_id_; - network_route_->packet_overhead = - selected_connection_->local_candidate().address().ipaddr().overhead() + - GetProtocolOverhead(selected_connection_->local_candidate().protocol()); + network_route_.emplace(ConfigureNetworkRoute(selected_connection_)); } else { RTC_LOG(LS_INFO) << ToString() << ": No selected connection"; } if (conn != nullptr && ice_role_ == ICEROLE_CONTROLLING && - ((field_trials_.send_ping_on_switch_ice_controlling && + ((ice_field_trials_.send_ping_on_switch_ice_controlling && old_selected_connection != nullptr) || - field_trials_.send_ping_on_selected_ice_controlling)) { - PingConnection(conn); - MarkConnectionPinged(conn); + ice_field_trials_.send_ping_on_selected_ice_controlling)) { + SendPingRequestInternal(conn); } SignalNetworkRouteChanged(network_route_); @@ -1859,7 +1979,7 @@ void P2PTransportChannel::SwitchSelectedConnection(Connection* conn, // Create event for candidate pair change. if (selected_connection_) { CandidatePairChangeEvent pair_change; - pair_change.reason = reason.ToString(); + pair_change.reason = IceSwitchReasonToString(reason); pair_change.selected_candidate_pair = *GetSelectedCandidatePair(); pair_change.last_data_received_ms = selected_connection_->last_data_received(); @@ -1877,7 +1997,7 @@ void P2PTransportChannel::SwitchSelectedConnection(Connection* conn, ++selected_candidate_pair_changes_; - ice_controller_->SetSelectedConnection(selected_connection_); + ice_adapter_->OnConnectionSwitched(selected_connection_); } int64_t P2PTransportChannel::ComputeEstimatedDisconnectedTimeMs( @@ -1890,13 +2010,14 @@ int64_t P2PTransportChannel::ComputeEstimatedDisconnectedTimeMs( return (now_ms - last_data_or_old_ping); } -// Warning: UpdateState should eventually be called whenever a connection -// is added, deleted, or the write state of any connection changes so that the -// transport controller will get the up-to-date channel state. However it -// should not be called too often; in the case that multiple connection states -// change, it should be called after all the connection states have changed. For -// example, we call this at the end of SortConnectionsAndUpdateState. -void P2PTransportChannel::UpdateState() { +// Warning: UpdateTransportState should eventually be called whenever a +// connection is added, deleted, or the write state of any connection changes so +// that the transport controller will get the up-to-date channel state. However +// it should not be called too often; in the case that multiple connection +// states change, it should be called after all the connection states have +// changed. For example, we call this at the end of +// SortConnectionsAndUpdateState. +void P2PTransportChannel::UpdateTransportState() { RTC_DCHECK_RUN_ON(network_thread_); // If our selected connection is "presumed writable" (TURN-TURN with no // CreatePermission required), act like we're already writable to the upper @@ -1924,8 +2045,8 @@ void P2PTransportChannel::UpdateState() { << static_cast(state_) << " to " << static_cast(state); // Check that the requested transition is allowed. Note that - // P2PTransportChannel does not (yet) implement a direct mapping of the ICE - // states from the standard; the difference is covered by + // P2PTransportChannel does not (yet) implement a direct mapping of the + // ICE states from the standard; the difference is covered by // TransportController and PeerConnection. switch (state_) { case IceTransportState::STATE_INIT: @@ -1986,15 +2107,34 @@ void P2PTransportChannel::MaybeStopPortAllocatorSessions() { } } +void P2PTransportChannel::OnSelectedConnectionDestroyed() { + RTC_DCHECK_RUN_ON(network_thread_); + RTC_LOG(LS_INFO) << "Selected connection destroyed. Will choose a new one."; + IceSwitchReason reason = IceSwitchReason::SELECTED_CONNECTION_DESTROYED; + SwitchSelectedConnectionInternal(nullptr, reason); + ice_adapter_->OnSortAndSwitchRequest(reason); +} + // If all connections timed out, delete them all. void P2PTransportChannel::HandleAllTimedOut() { RTC_DCHECK_RUN_ON(network_thread_); - for (Connection* connection : connections()) { + bool update_selected_connection = false; + std::vector copy(connections().begin(), connections().end()); + for (Connection* connection : copy) { + if (selected_connection_ == connection) { + selected_connection_ = nullptr; + update_selected_connection = true; + } + connection->SignalDestroyed.disconnect(this); + RemoveConnection(connection); connection->Destroy(); } + + if (update_selected_connection) + OnSelectedConnectionDestroyed(); } -bool P2PTransportChannel::ReadyToSend(Connection* connection) const { +bool P2PTransportChannel::ReadyToSend(const Connection* connection) const { RTC_DCHECK_RUN_ON(network_thread_); // Note that we allow sending on an unreliable connection, because it's // possible that it became unreliable simply due to bad chance. @@ -2006,29 +2146,28 @@ bool P2PTransportChannel::ReadyToSend(Connection* connection) const { } // Handle queued up check-and-ping request +// TODO(bugs.webrtc.org/14367) remove once refactor lands. void P2PTransportChannel::CheckAndPing() { RTC_DCHECK_RUN_ON(network_thread_); - // Make sure the states of the connections are up-to-date (since this affects - // which ones are pingable). + // Make sure the states of the connections are up-to-date (since this + // affects which ones are pingable). UpdateConnectionStates(); - auto result = ice_controller_->SelectConnectionToPing(last_ping_sent_ms_); - int delay = result.recheck_delay_ms; + auto result = ice_adapter_->LegacySelectConnectionToPing(last_ping_sent_ms_); + TimeDelta delay = TimeDelta::Millis(result.recheck_delay_ms); if (result.connection.value_or(nullptr)) { - Connection* conn = FromIceController(*result.connection); - PingConnection(conn); - MarkConnectionPinged(conn); + SendPingRequest(result.connection.value()); } network_thread_->PostDelayedTask( - ToQueuedTask(task_safety_, [this]() { CheckAndPing(); }), delay); + SafeTask(task_safety_.flag(), [this]() { CheckAndPing(); }), delay); } // This method is only for unit testing. Connection* P2PTransportChannel::FindNextPingableConnection() { RTC_DCHECK_RUN_ON(network_thread_); - auto* conn = ice_controller_->FindNextPingableConnection(); + const Connection* conn = ice_adapter_->FindNextPingableConnection(); if (conn) { return FromIceController(conn); } else { @@ -2036,11 +2175,28 @@ Connection* P2PTransportChannel::FindNextPingableConnection() { } } +int64_t P2PTransportChannel::GetLastPingSentMs() const { + RTC_DCHECK_RUN_ON(network_thread_); + return last_ping_sent_ms_; +} + +void P2PTransportChannel::SendPingRequest(const Connection* connection) { + RTC_DCHECK_RUN_ON(network_thread_); + SendPingRequestInternal(FromIceController(connection)); +} + +void P2PTransportChannel::SendPingRequestInternal(Connection* connection) { + RTC_DCHECK_RUN_ON(network_thread_); + PingConnection(connection); + MarkConnectionPinged(connection); +} + // A connection is considered a backup connection if the channel state -// is completed, the connection is not the selected connection and it is active. +// is completed, the connection is not the selected connection and it is +// active. void P2PTransportChannel::MarkConnectionPinged(Connection* conn) { RTC_DCHECK_RUN_ON(network_thread_); - ice_controller_->MarkConnectionPinged(conn); + ice_adapter_->OnConnectionPinged(conn); } // Apart from sending ping from `conn` this method also updates @@ -2074,7 +2230,7 @@ uint32_t P2PTransportChannel::GetNominationAttr(Connection* conn) const { // Nominate a connection based on the NominationMode. bool P2PTransportChannel::GetUseCandidateAttr(Connection* conn) const { RTC_DCHECK_RUN_ON(network_thread_); - return ice_controller_->GetUseCandidateAttr( + return ice_adapter_->GetUseCandidateAttribute( conn, config_.default_nomination_mode, remote_ice_mode_); } @@ -2087,9 +2243,9 @@ void P2PTransportChannel::OnConnectionStateChange(Connection* connection) { // May stop the allocator session when at least one connection becomes // strongly connected after starting to get ports and the local candidate of // the connection is at the latest generation. It is not enough to check - // that the connection becomes weakly connected because the connection may be - // changing from (writable, receiving) to (writable, not receiving). - if (field_trials_.stop_gather_on_strongly_connected) { + // that the connection becomes weakly connected because the connection may + // be changing from (writable, receiving) to (writable, not receiving). + if (ice_field_trials_.stop_gather_on_strongly_connected) { bool strongly_connected = !connection->weak(); bool latest_generation = connection->local_candidate().generation() >= allocator_session()->generation(); @@ -2099,9 +2255,9 @@ void P2PTransportChannel::OnConnectionStateChange(Connection* connection) { } // We have to unroll the stack before doing this because we may be changing // the state of connections while sorting. - RequestSortAndStateUpdate( - IceControllerEvent::CONNECT_STATE_CHANGE); // "candidate pair state - // changed"); + ice_adapter_->OnSortAndSwitchRequest( + IceSwitchReason::CONNECT_STATE_CHANGE); // "candidate pair state + // changed"); } // When a connection is removed, edit it out, and then update our best @@ -2113,7 +2269,7 @@ void P2PTransportChannel::OnConnectionDestroyed(Connection* connection) { // use it. // Remove this connection from the list. - ice_controller_->OnConnectionDestroyed(connection); + RemoveConnection(connection); RTC_LOG(LS_INFO) << ToString() << ": Removed connection " << connection << " (" << connections().size() << " remaining)"; @@ -2121,23 +2277,27 @@ void P2PTransportChannel::OnConnectionDestroyed(Connection* connection) { // If this is currently the selected connection, then we need to pick a new // one. The call to SortConnectionsAndUpdateState will pick a new one. It // looks at the current selected connection in order to avoid switching - // between fairly similar ones. Since this connection is no longer an option, - // we can just set selected to nullptr and re-choose a best assuming that - // there was no selected connection. + // between fairly similar ones. Since this connection is no longer an + // option, we can just set selected to nullptr and re-choose a best assuming + // that there was no selected connection. if (selected_connection_ == connection) { - RTC_LOG(LS_INFO) << "Selected connection destroyed. Will choose a new one."; - IceControllerEvent reason = - IceControllerEvent::SELECTED_CONNECTION_DESTROYED; - SwitchSelectedConnection(nullptr, reason); - RequestSortAndStateUpdate(reason); + OnSelectedConnectionDestroyed(); } else { // If a non-selected connection was destroyed, we don't need to re-sort but // we do need to update state, because we could be switching to "failed" or // "completed". - UpdateState(); + UpdateTransportState(); } } +void P2PTransportChannel::RemoveConnection(const Connection* connection) { + RTC_DCHECK_RUN_ON(network_thread_); + auto it = absl::c_find(connections_, connection); + RTC_DCHECK(it != connections_.end()); + connections_.erase(it); + ice_adapter_->OnConnectionDestroyed(connection); +} + // When a port is destroyed, remove it from our list of ports to use for // connection attempts. void P2PTransportChannel::OnPortDestroyed(PortInterface* port) { @@ -2167,9 +2327,9 @@ void P2PTransportChannel::OnCandidatesRemoved( PortAllocatorSession* session, const std::vector& candidates) { RTC_DCHECK_RUN_ON(network_thread_); - // Do not signal candidate removals if continual gathering is not enabled, or - // if this is not the last session because an ICE restart would have signaled - // the remote side to remove all candidates in previous sessions. + // Do not signal candidate removals if continual gathering is not enabled, + // or if this is not the last session because an ICE restart would have + // signaled the remote side to remove all candidates in previous sessions. if (!config_.gather_continually() || session != allocator_session()) { return; } @@ -2191,7 +2351,8 @@ void P2PTransportChannel::PruneAllPorts() { bool P2PTransportChannel::PrunePort(PortInterface* port) { RTC_DCHECK_RUN_ON(network_thread_); auto it = absl::c_find(ports_, port); - // Don't need to do anything if the port has been deleted from the port list. + // Don't need to do anything if the port has been deleted from the port + // list. if (it == ports_.end()) { return false; } @@ -2209,6 +2370,8 @@ void P2PTransportChannel::OnReadPacket(Connection* connection, if (connection == selected_connection_) { // Let the client know of an incoming packet + packets_received_++; + bytes_received_ += len; RTC_DCHECK(connection->last_data_received() >= last_data_received_ms_); last_data_received_ms_ = std::max(last_data_received_ms_, connection->last_data_received()); @@ -2216,10 +2379,13 @@ void P2PTransportChannel::OnReadPacket(Connection* connection, return; } - // Do not deliver, if packet doesn't belong to the correct transport channel. + // Do not deliver, if packet doesn't belong to the correct transport + // channel. if (!FindConnection(connection)) return; + packets_received_++; + bytes_received_ += len; RTC_DCHECK(connection->last_data_received() >= last_data_received_ms_); last_data_received_ms_ = std::max(last_data_received_ms_, connection->last_data_received()); @@ -2227,11 +2393,11 @@ void P2PTransportChannel::OnReadPacket(Connection* connection, // Let the client know of an incoming packet SignalReadPacket(this, data, len, packet_time_us, 0); - // May need to switch the sending connection based on the receiving media path - // if this is the controlled side. + // May need to switch the sending connection based on the receiving media + // path if this is the controlled side. if (ice_role_ == ICEROLE_CONTROLLED) { - MaybeSwitchSelectedConnection(connection, - IceControllerEvent::DATA_RECEIVED); + ice_adapter_->OnImmediateSwitchRequest(IceSwitchReason::DATA_RECEIVED, + connection); } } @@ -2302,4 +2468,172 @@ void P2PTransportChannel::LogCandidatePairConfig( conn->ToLogDescription()); } +P2PTransportChannel::IceControllerAdapter::IceControllerAdapter( + const IceControllerFactoryArgs& args, + IceControllerFactoryInterface* ice_controller_factory, + ActiveIceControllerFactoryInterface* active_ice_controller_factory, + const webrtc::FieldTrialsView* field_trials, + P2PTransportChannel* transport) + : transport_(transport) { + if (UseActiveIceControllerFieldTrialEnabled(field_trials)) { + if (active_ice_controller_factory) { + ActiveIceControllerFactoryArgs active_args{args, + /* ice_agent= */ transport}; + active_ice_controller_ = + active_ice_controller_factory->Create(active_args); + } else { + active_ice_controller_ = std::make_unique( + /* ice_agent= */ transport, ice_controller_factory, args); + } + } else { + if (ice_controller_factory != nullptr) { + legacy_ice_controller_ = ice_controller_factory->Create(args); + } else { + legacy_ice_controller_ = std::make_unique(args); + } + } +} + +P2PTransportChannel::IceControllerAdapter::~IceControllerAdapter() = default; + +void P2PTransportChannel::IceControllerAdapter::SetIceConfig( + const IceConfig& config) { + active_ice_controller_ ? active_ice_controller_->SetIceConfig(config) + : legacy_ice_controller_->SetIceConfig(config); +} + +void P2PTransportChannel::IceControllerAdapter::OnConnectionAdded( + const Connection* connection) { + active_ice_controller_ ? active_ice_controller_->OnConnectionAdded(connection) + : legacy_ice_controller_->AddConnection(connection); +} + +void P2PTransportChannel::IceControllerAdapter::OnConnectionSwitched( + const Connection* connection) { + active_ice_controller_ + ? active_ice_controller_->OnConnectionSwitched(connection) + : legacy_ice_controller_->SetSelectedConnection(connection); +} + +void P2PTransportChannel::IceControllerAdapter::OnConnectionPinged( + const Connection* connection) { + active_ice_controller_ + ? active_ice_controller_->OnConnectionPinged(connection) + : legacy_ice_controller_->MarkConnectionPinged(connection); +} + +void P2PTransportChannel::IceControllerAdapter::OnConnectionDestroyed( + const Connection* connection) { + active_ice_controller_ + ? active_ice_controller_->OnConnectionDestroyed(connection) + : legacy_ice_controller_->OnConnectionDestroyed(connection); +} + +void P2PTransportChannel::IceControllerAdapter::OnConnectionUpdated( + const Connection* connection) { + if (active_ice_controller_) { + active_ice_controller_->OnConnectionUpdated(connection); + return; + } + RTC_DCHECK_NOTREACHED(); +} + +void P2PTransportChannel::IceControllerAdapter::OnSortAndSwitchRequest( + IceSwitchReason reason) { + active_ice_controller_ + ? active_ice_controller_->OnSortAndSwitchRequest(reason) + : transport_->RequestSortAndStateUpdate(reason); +} + +void P2PTransportChannel::IceControllerAdapter::OnImmediateSortAndSwitchRequest( + IceSwitchReason reason) { + active_ice_controller_ + ? active_ice_controller_->OnImmediateSortAndSwitchRequest(reason) + : transport_->SortConnectionsAndUpdateState(reason); +} + +bool P2PTransportChannel::IceControllerAdapter::OnImmediateSwitchRequest( + IceSwitchReason reason, + const Connection* connection) { + return active_ice_controller_ + ? active_ice_controller_->OnImmediateSwitchRequest(reason, + connection) + : transport_->MaybeSwitchSelectedConnection(connection, reason); +} + +bool P2PTransportChannel::IceControllerAdapter::GetUseCandidateAttribute( + const cricket::Connection* connection, + cricket::NominationMode mode, + cricket::IceMode remote_ice_mode) const { + return active_ice_controller_ + ? active_ice_controller_->GetUseCandidateAttribute( + connection, mode, remote_ice_mode) + : legacy_ice_controller_->GetUseCandidateAttr(connection, mode, + remote_ice_mode); +} + +const Connection* +P2PTransportChannel::IceControllerAdapter::FindNextPingableConnection() { + return active_ice_controller_ + ? active_ice_controller_->FindNextPingableConnection() + : legacy_ice_controller_->FindNextPingableConnection(); +} + +rtc::ArrayView +P2PTransportChannel::IceControllerAdapter::LegacyConnections() const { + RTC_DCHECK_RUN_ON(transport_->network_thread_); + if (active_ice_controller_) { + return rtc::ArrayView(transport_->connections_.data(), + transport_->connections_.size()); + } + + rtc::ArrayView res = legacy_ice_controller_->connections(); + return rtc::ArrayView(const_cast(res.data()), + res.size()); +} + +bool P2PTransportChannel::IceControllerAdapter::LegacyHasPingableConnection() + const { + if (active_ice_controller_) { + RTC_DCHECK_NOTREACHED(); + } + return legacy_ice_controller_->HasPingableConnection(); +} + +IceControllerInterface::PingResult +P2PTransportChannel::IceControllerAdapter::LegacySelectConnectionToPing( + int64_t last_ping_sent_ms) { + if (active_ice_controller_) { + RTC_DCHECK_NOTREACHED(); + } + return legacy_ice_controller_->SelectConnectionToPing(last_ping_sent_ms); +} + +IceControllerInterface::SwitchResult +P2PTransportChannel::IceControllerAdapter::LegacyShouldSwitchConnection( + IceSwitchReason reason, + const Connection* connection) { + if (active_ice_controller_) { + RTC_DCHECK_NOTREACHED(); + } + return legacy_ice_controller_->ShouldSwitchConnection(reason, connection); +} + +IceControllerInterface::SwitchResult +P2PTransportChannel::IceControllerAdapter::LegacySortAndSwitchConnection( + IceSwitchReason reason) { + if (active_ice_controller_) { + RTC_DCHECK_NOTREACHED(); + } + return legacy_ice_controller_->SortAndSwitchConnection(reason); +} + +std::vector +P2PTransportChannel::IceControllerAdapter::LegacyPruneConnections() { + if (active_ice_controller_) { + RTC_DCHECK_NOTREACHED(); + } + return legacy_ice_controller_->PruneConnections(); +} + } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel.h b/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel.h index 28248e7948..f7bfce0e17 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel.h @@ -28,24 +28,32 @@ #include #include #include +#include #include #include "absl/base/attributes.h" +#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/array_view.h" #include "api/async_dns_resolver.h" #include "api/async_resolver_factory.h" #include "api/candidate.h" +#include "api/ice_transport_interface.h" #include "api/rtc_error.h" #include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "api/transport/enums.h" #include "api/transport/stun.h" #include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h" #include "logging/rtc_event_log/ice_logger.h" +#include "p2p/base/active_ice_controller_factory_interface.h" +#include "p2p/base/basic_async_resolver_factory.h" #include "p2p/base/candidate_pair_interface.h" #include "p2p/base/connection.h" +#include "p2p/base/ice_agent_interface.h" #include "p2p/base/ice_controller_factory_interface.h" #include "p2p/base/ice_controller_interface.h" +#include "p2p/base/ice_switch_reason.h" #include "p2p/base/ice_transport_internal.h" #include "p2p/base/p2p_constants.h" #include "p2p/base/p2p_transport_channel_ice_field_trials.h" @@ -56,7 +64,6 @@ #include "p2p/base/transport_description.h" #include "rtc_base/async_packet_socket.h" #include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/dscp.h" #include "rtc_base/network/sent_packet.h" #include "rtc_base/network_route.h" @@ -64,7 +71,6 @@ #include "rtc_base/socket_address.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/system/rtc_export.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" @@ -81,10 +87,10 @@ enum class IceRestartState { CONNECTING, CONNECTED, DISCONNECTED, MAX_VALUE }; static const int MIN_PINGS_AT_WEAK_PING_INTERVAL = 3; -bool IceCredentialsChanged(const std::string& old_ufrag, - const std::string& old_pwd, - const std::string& new_ufrag, - const std::string& new_pwd); +bool IceCredentialsChanged(absl::string_view old_ufrag, + absl::string_view old_pwd, + absl::string_view new_ufrag, + absl::string_view new_pwd); // Adds the port on which the candidate originated. class RemoteCandidate : public Candidate { @@ -100,30 +106,26 @@ class RemoteCandidate : public Candidate { // P2PTransportChannel manages the candidates and connection process to keep // two P2P clients connected to each other. -class RTC_EXPORT P2PTransportChannel : public IceTransportInternal { +class RTC_EXPORT P2PTransportChannel : public IceTransportInternal, + public IceAgentInterface { public: static std::unique_ptr Create( - const std::string& transport_name, + absl::string_view transport_name, int component, - PortAllocator* allocator, - webrtc::AsyncDnsResolverFactoryInterface* async_dns_resolver_factory, - webrtc::RtcEventLog* event_log = nullptr, - IceControllerFactoryInterface* ice_controller_factory = nullptr); + webrtc::IceTransportInit init); + // For testing only. // TODO(zstein): Remove once AsyncDnsResolverFactory is required. - P2PTransportChannel(const std::string& transport_name, + P2PTransportChannel(absl::string_view transport_name, int component, - PortAllocator* allocator); - ABSL_DEPRECATED("bugs.webrtc.org/12598") - P2PTransportChannel( - const std::string& transport_name, - int component, - PortAllocator* allocator, - webrtc::AsyncResolverFactory* async_resolver_factory, - webrtc::RtcEventLog* event_log = nullptr, - IceControllerFactoryInterface* ice_controller_factory = nullptr); + PortAllocator* allocator, + const webrtc::FieldTrialsView* field_trials = nullptr); + ~P2PTransportChannel() override; + P2PTransportChannel(const P2PTransportChannel&) = delete; + P2PTransportChannel& operator=(const P2PTransportChannel&) = delete; + // From TransportChannelImpl: IceTransportState GetState() const override; webrtc::IceTransportState GetIceTransportState() const override; @@ -169,6 +171,19 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal { const Connection* selected_connection() const override; absl::optional GetSelectedCandidatePair() const override; + // From IceAgentInterface + void OnStartedPinging() override; + int64_t GetLastPingSentMs() const override; + void UpdateConnectionStates() override; + void UpdateState() override; + void SendPingRequest(const Connection* connection) override; + void SwitchSelectedConnection(const Connection* connection, + IceSwitchReason reason) override; + void ForgetLearnedStateForConnections( + rtc::ArrayView connections) override; + bool PruneConnections( + rtc::ArrayView connections) override; + // TODO(honghaiz): Remove this method once the reference of it in // Chromoting is removed. const Connection* best_connection() const { @@ -201,6 +216,8 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal { int check_receiving_interval() const; absl::optional network_route() const override; + void RemoveConnection(const Connection* connection); + // Helper method used only in unittest. rtc::DiffServCodePoint DefaultDscpValue() const; @@ -210,6 +227,7 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal { // Public for unit tests. rtc::ArrayView connections() const; + void RemoveConnectionForTest(Connection* connection); // Public for unit tests. PortAllocatorSession* allocator_session() const { @@ -238,7 +256,7 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal { private: P2PTransportChannel( - const std::string& transport_name, + absl::string_view transport_name, int component, PortAllocator* allocator, // DNS resolver factory @@ -247,29 +265,36 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal { // on release, this pointer is set. std::unique_ptr owned_dns_resolver_factory, - webrtc::RtcEventLog* event_log = nullptr, - IceControllerFactoryInterface* ice_controller_factory = nullptr); + webrtc::RtcEventLog* event_log, + IceControllerFactoryInterface* ice_controller_factory, + ActiveIceControllerFactoryInterface* active_ice_controller_factory, + const webrtc::FieldTrialsView* field_trials); + bool IsGettingPorts() { RTC_DCHECK_RUN_ON(network_thread_); return allocator_session()->IsGettingPorts(); } // Returns true if it's possible to send packets on `connection`. - bool ReadyToSend(Connection* connection) const; + bool ReadyToSend(const Connection* connection) const; bool PresumedWritable(const Connection* conn) const; - void UpdateConnectionStates(); - void RequestSortAndStateUpdate(IceControllerEvent reason_to_sort); + // TODO(bugs.webrtc.org/14367) remove once refactor lands. + void RequestSortAndStateUpdate(IceSwitchReason reason_to_sort); // Start pinging if we haven't already started, and we now have a connection // that's pingable. + // TODO(bugs.webrtc.org/14367) remove once refactor lands. void MaybeStartPinging(); - - void SortConnectionsAndUpdateState(IceControllerEvent reason_to_sort); - void SortConnections(); - void SortConnectionsIfNeeded(); - void SwitchSelectedConnection(Connection* conn, IceControllerEvent reason); - void UpdateState(); + void SendPingRequestInternal(Connection* connection); + + // TODO(bugs.webrtc.org/14367) remove once refactor lands. + void SortConnectionsAndUpdateState(IceSwitchReason reason_to_sort); + rtc::NetworkRoute ConfigureNetworkRoute(const Connection* conn); + void SwitchSelectedConnectionInternal(Connection* conn, + IceSwitchReason reason); + void UpdateTransportState(); void HandleAllTimedOut(); void MaybeStopPortAllocatorSessions(); + void OnSelectedConnectionDestroyed() RTC_RUN_ON(network_thread_); // ComputeIceTransportState computes the RTCIceTransportState as described in // https://w3c.github.io/webrtc-pc/#dom-rtcicetransportstate. ComputeState @@ -330,6 +355,7 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal { void OnNominated(Connection* conn); + // TODO(bugs.webrtc.org/14367) remove once refactor lands. void CheckAndPing(); void LogCandidatePairConfig(Connection* conn, @@ -339,11 +365,15 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal { bool GetUseCandidateAttr(Connection* conn) const; // Returns true if the new_connection is selected for transmission. - bool MaybeSwitchSelectedConnection(Connection* new_connection, - IceControllerEvent reason); + // TODO(bugs.webrtc.org/14367) remove once refactor lands. + bool MaybeSwitchSelectedConnection(const Connection* new_connection, + IceSwitchReason reason); + // TODO(bugs.webrtc.org/14367) remove once refactor lands. bool MaybeSwitchSelectedConnection( - IceControllerEvent reason, + IceSwitchReason reason, IceControllerInterface::SwitchResult result); + bool AllowedToPruneConnections() const; + // TODO(bugs.webrtc.org/14367) remove once refactor lands. void PruneConnections(); // Returns the latest remote ICE parameters or nullptr if there are no remote @@ -355,7 +385,7 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal { } // Returns the remote IceParameters and generation that match `ufrag` // if found, and returns nullptr otherwise. - const IceParameters* FindRemoteIceFromUfrag(const std::string& ufrag, + const IceParameters* FindRemoteIceFromUfrag(absl::string_view ufrag, uint32_t* generation); // Returns the index of the latest remote ICE parameters, or 0 if no remote // ICE parameters have been received. @@ -399,6 +429,9 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal { int64_t ComputeEstimatedDisconnectedTimeMs(int64_t now, Connection* old_connection); + void ParseFieldTrials(const webrtc::FieldTrialsView* field_trials); + + // TODO(bugs.webrtc.org/14367) remove once refactor lands. webrtc::ScopedTaskSafety task_safety_; std::string transport_name_ RTC_GUARDED_BY(network_thread_); int component_ RTC_GUARDED_BY(network_thread_); @@ -422,9 +455,11 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal { std::vector pruned_ports_ RTC_GUARDED_BY(network_thread_); Connection* selected_connection_ RTC_GUARDED_BY(network_thread_) = nullptr; + std::vector connections_ RTC_GUARDED_BY(network_thread_); std::vector remote_candidates_ RTC_GUARDED_BY(network_thread_); + // TODO(bugs.webrtc.org/14367) remove once refactor lands. bool sort_dirty_ RTC_GUARDED_BY( network_thread_); // indicates whether another sort is needed right now bool had_connection_ RTC_GUARDED_BY(network_thread_) = @@ -451,6 +486,7 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal { IceConfig config_ RTC_GUARDED_BY(network_thread_); int last_sent_packet_id_ RTC_GUARDED_BY(network_thread_) = -1; // -1 indicates no packet was sent before. + // TODO(bugs.webrtc.org/14367) remove once refactor lands. bool started_pinging_ RTC_GUARDED_BY(network_thread_) = false; // The value put in the "nomination" attribute for the next nominated // connection. A zero-value indicates the connection will not be nominated. @@ -464,7 +500,53 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal { RTC_GUARDED_BY(network_thread_); webrtc::IceEventLog ice_event_log_ RTC_GUARDED_BY(network_thread_); - std::unique_ptr ice_controller_ + // The adapter transparently delegates ICE controller interactions to either + // the legacy or the active ICE controller depending on field trials. + // TODO(bugs.webrtc.org/14367) replace with active ICE controller eventually. + class IceControllerAdapter : public ActiveIceControllerInterface { + public: + IceControllerAdapter( + const IceControllerFactoryArgs& args, + IceControllerFactoryInterface* ice_controller_factory, + ActiveIceControllerFactoryInterface* active_ice_controller_factory, + const webrtc::FieldTrialsView* field_trials, + P2PTransportChannel* transport); + ~IceControllerAdapter() override; + + // ActiveIceControllerInterface overrides + void SetIceConfig(const IceConfig& config) override; + void OnConnectionAdded(const Connection* connection) override; + void OnConnectionSwitched(const Connection* connection) override; + void OnConnectionPinged(const Connection* connection) override; + void OnConnectionDestroyed(const Connection* connection) override; + void OnConnectionUpdated(const Connection* connection) override; + void OnSortAndSwitchRequest(IceSwitchReason reason) override; + void OnImmediateSortAndSwitchRequest(IceSwitchReason reason) override; + bool OnImmediateSwitchRequest(IceSwitchReason reason, + const Connection* connection) override; + bool GetUseCandidateAttribute(const Connection* connection, + NominationMode mode, + IceMode remote_ice_mode) const override; + const Connection* FindNextPingableConnection() override; + + // Methods only available with legacy ICE controller. + rtc::ArrayView LegacyConnections() const; + bool LegacyHasPingableConnection() const; + IceControllerInterface::PingResult LegacySelectConnectionToPing( + int64_t last_ping_sent_ms); + IceControllerInterface::SwitchResult LegacyShouldSwitchConnection( + IceSwitchReason reason, + const Connection* connection); + IceControllerInterface::SwitchResult LegacySortAndSwitchConnection( + IceSwitchReason reason); + std::vector LegacyPruneConnections(); + + private: + P2PTransportChannel* transport_; + std::unique_ptr legacy_ice_controller_; + std::unique_ptr active_ice_controller_; + }; + std::unique_ptr ice_adapter_ RTC_GUARDED_BY(network_thread_); struct CandidateAndResolver final { @@ -486,6 +568,12 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal { Candidate candidate, const webrtc::AsyncDnsResolverResult& result); + // Bytes/packets sent/received on this channel. + uint64_t bytes_sent_ = 0; + uint64_t bytes_received_ = 0; + uint64_t packets_sent_ = 0; + uint64_t packets_received_ = 0; + // Number of times the selected_connection_ has been modified. uint32_t selected_candidate_pair_changes_ = 0; @@ -493,9 +581,8 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal { // from connection->last_data_received() that uses rtc::TimeMillis(). int64_t last_data_received_ms_ = 0; - IceFieldTrials field_trials_; - - RTC_DISALLOW_COPY_AND_ASSIGN(P2PTransportChannel); + // Parsed field trials. + IceFieldTrials ice_field_trials_; }; } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel_ice_field_trials.h b/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel_ice_field_trials.h index 4987f1cbcb..f19823b21e 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel_ice_field_trials.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel_ice_field_trials.h @@ -19,6 +19,9 @@ namespace cricket { // put in separate file so that they can be shared e.g // with Connection. struct IceFieldTrials { + // This struct is built using the FieldTrialParser, and then not modified. + // TODO(jonaso) : Consider how members of this struct can be made const. + bool skip_relay_to_non_relay_connections = false; absl::optional max_outstanding_pings; @@ -61,6 +64,12 @@ struct IceFieldTrials { // Stop gathering when having a strong connection. bool stop_gather_on_strongly_connected = true; + + // DSCP taging. + absl::optional override_dscp; + + bool piggyback_ice_check_acknowledgement = false; + bool extra_ice_ping = false; }; } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/port.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/port.cc index 51297c46c6..dea18a4c2a 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/port.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/port.cc @@ -18,7 +18,9 @@ #include #include "absl/algorithm/container.h" +#include "absl/memory/memory.h" #include "absl/strings/match.h" +#include "absl/strings/string_view.h" #include "p2p/base/connection.h" #include "p2p/base/port_allocator.h" #include "rtc_base/checks.h" @@ -34,10 +36,15 @@ #include "rtc_base/strings/string_builder.h" #include "rtc_base/third_party/base64/base64.h" #include "rtc_base/trace_event.h" -#include "system_wrappers/include/field_trial.h" +namespace cricket { namespace { +using ::webrtc::RTCError; +using ::webrtc::RTCErrorType; +using ::webrtc::TaskQueueBase; +using ::webrtc::TimeDelta; + rtc::PacketInfoProtocolType ConvertProtocolTypeToPacketInfoProtocolType( cricket::ProtocolType type) { switch (type) { @@ -60,11 +67,6 @@ const int kPortTimeoutDelay = cricket::STUN_TOTAL_TIMEOUT + 5000; } // namespace -namespace cricket { - -using webrtc::RTCError; -using webrtc::RTCErrorType; - // TODO(ronghuawu): Use "local", "srflx", "prflx" and "relay". But this requires // the signaling part be updated correspondingly as well. const char LOCAL_PORT_TYPE[] = "local"; @@ -80,14 +82,13 @@ const char* ProtoToString(ProtocolType proto) { return PROTO_NAMES[proto]; } -bool StringToProto(const char* value, ProtocolType* proto) { +absl::optional StringToProto(absl::string_view proto_name) { for (size_t i = 0; i <= PROTO_LAST; ++i) { - if (absl::EqualsIgnoreCase(PROTO_NAMES[i], value)) { - *proto = static_cast(i); - return true; + if (absl::EqualsIgnoreCase(PROTO_NAMES[i], proto_name)) { + return static_cast(i); } } - return false; + return absl::nullopt; } // RFC 6544, TCP candidate encoding rules. @@ -96,21 +97,24 @@ const char TCPTYPE_ACTIVE_STR[] = "active"; const char TCPTYPE_PASSIVE_STR[] = "passive"; const char TCPTYPE_SIMOPEN_STR[] = "so"; -std::string Port::ComputeFoundation(const std::string& type, - const std::string& protocol, - const std::string& relay_protocol, +std::string Port::ComputeFoundation(absl::string_view type, + absl::string_view protocol, + absl::string_view relay_protocol, const rtc::SocketAddress& base_address) { + // TODO(bugs.webrtc.org/14605): ensure IceTiebreaker() is set. rtc::StringBuilder sb; - sb << type << base_address.ipaddr().ToString() << protocol << relay_protocol; + sb << type << base_address.ipaddr().ToString() << protocol << relay_protocol + << rtc::ToString(IceTiebreaker()); return rtc::ToString(rtc::ComputeCrc32(sb.Release())); } -Port::Port(rtc::Thread* thread, - const std::string& type, +Port::Port(TaskQueueBase* thread, + absl::string_view type, rtc::PacketSocketFactory* factory, - rtc::Network* network, - const std::string& username_fragment, - const std::string& password) + const rtc::Network* network, + absl::string_view username_fragment, + absl::string_view password, + const webrtc::FieldTrialsView* field_trials) : thread_(thread), factory_(factory), type_(type), @@ -127,19 +131,21 @@ Port::Port(rtc::Thread* thread, ice_role_(ICEROLE_UNKNOWN), tiebreaker_(0), shared_socket_(true), - weak_factory_(this) { + weak_factory_(this), + field_trials_(field_trials) { RTC_DCHECK(factory_ != NULL); Construct(); } -Port::Port(rtc::Thread* thread, - const std::string& type, +Port::Port(TaskQueueBase* thread, + absl::string_view type, rtc::PacketSocketFactory* factory, - rtc::Network* network, + const rtc::Network* network, uint16_t min_port, uint16_t max_port, - const std::string& username_fragment, - const std::string& password) + absl::string_view username_fragment, + absl::string_view password, + const webrtc::FieldTrialsView* field_trials) : thread_(thread), factory_(factory), type_(type), @@ -156,7 +162,8 @@ Port::Port(rtc::Thread* thread, ice_role_(ICEROLE_UNKNOWN), tiebreaker_(0), shared_socket_(false), - weak_factory_(this) { + weak_factory_(this), + field_trials_(field_trials) { RTC_DCHECK(factory_ != NULL); Construct(); } @@ -171,10 +178,9 @@ void Port::Construct() { password_ = rtc::CreateRandomString(ICE_PWD_LENGTH); } network_->SignalTypeChanged.connect(this, &Port::OnNetworkTypeChanged); - network_cost_ = network_->GetCost(); + network_cost_ = network_->GetCost(field_trials()); - thread_->PostDelayed(RTC_FROM_HERE, timeout_delay_, this, - MSG_DESTROY_IF_DEAD); + PostDestroyIfDead(/*delayed=*/true); RTC_LOG(LS_INFO) << ToString() << ": Port created with network cost " << network_cost_; } @@ -182,26 +188,13 @@ void Port::Construct() { Port::~Port() { RTC_DCHECK_RUN_ON(thread_); CancelPendingTasks(); - - // Delete all of the remaining connections. We copy the list up front - // because each deletion will cause it to be modified. - - std::vector list; - - AddressMap::iterator iter = connections_.begin(); - while (iter != connections_.end()) { - list.push_back(iter->second); - ++iter; - } - - for (uint32_t i = 0; i < list.size(); i++) - delete list[i]; + DestroyAllConnections(); } const std::string& Port::Type() const { return type_; } -rtc::Network* Port::Network() const { +const rtc::Network* Port::Network() const { return network_; } @@ -225,16 +218,23 @@ bool Port::SharedSocket() const { } void Port::SetIceParameters(int component, - const std::string& username_fragment, - const std::string& password) { + absl::string_view username_fragment, + absl::string_view password) { + RTC_DCHECK_RUN_ON(thread_); component_ = component; - ice_username_fragment_ = username_fragment; - password_ = password; + ice_username_fragment_ = std::string(username_fragment); + password_ = std::string(password); for (Candidate& c : candidates_) { c.set_component(component); c.set_username(username_fragment); c.set_password(password); } + + // In case any connections exist make sure we update them too. + for (auto& [unused, connection] : connections_) { + connection->UpdateLocalIceParameters(component, username_fragment, + password); + } } const std::vector& Port::Candidates() const { @@ -252,14 +252,15 @@ Connection* Port::GetConnection(const rtc::SocketAddress& remote_addr) { void Port::AddAddress(const rtc::SocketAddress& address, const rtc::SocketAddress& base_address, const rtc::SocketAddress& related_address, - const std::string& protocol, - const std::string& relay_protocol, - const std::string& tcptype, - const std::string& type, + absl::string_view protocol, + absl::string_view relay_protocol, + absl::string_view tcptype, + absl::string_view type, uint32_t type_preference, uint32_t relay_preference, - const std::string& url, + absl::string_view url, bool is_final) { + RTC_DCHECK_RUN_ON(thread_); if (protocol == TCP_PROTOCOL_NAME && type == LOCAL_PORT_TYPE) { RTC_DCHECK(!tcptype.empty()); } @@ -274,6 +275,7 @@ void Port::AddAddress(const rtc::SocketAddress& address, c.set_tcptype(tcptype); c.set_network_name(network_->name()); c.set_network_type(network_->type()); + c.set_underlying_type_for_vpn(network_->underlying_type_for_vpn()); c.set_url(url); c.set_related_address(related_address); @@ -285,7 +287,7 @@ void Port::AddAddress(const rtc::SocketAddress& address, } bool Port::MaybeObfuscateAddress(Candidate* c, - const std::string& type, + absl::string_view type, bool is_final) { // TODO(bugs.webrtc.org/9723): Use a config to control the feature of IP // handling with mDNS. @@ -299,7 +301,7 @@ bool Port::MaybeObfuscateAddress(Candidate* c, auto copy = *c; auto weak_ptr = weak_factory_.GetWeakPtr(); auto callback = [weak_ptr, copy, is_final](const rtc::IPAddress& addr, - const std::string& name) mutable { + absl::string_view name) mutable { RTC_DCHECK(copy.address().ipaddr() == addr); rtc::SocketAddress hostname_address(name, copy.address().port()); // In Port and Connection, we need the IP address information to @@ -310,6 +312,7 @@ bool Port::MaybeObfuscateAddress(Candidate* c, copy.set_address(hostname_address); copy.set_related_address(rtc::SocketAddress()); if (weak_ptr != nullptr) { + RTC_DCHECK_RUN_ON(weak_ptr->thread_); weak_ptr->set_mdns_name_registration_status( MdnsNameRegistrationStatus::kCompleted); weak_ptr->FinishAddingAddress(copy, is_final); @@ -345,12 +348,11 @@ void Port::AddOrReplaceConnection(Connection* conn) { << ": A new connection was created on an existing remote address. " "New remote candidate: " << conn->remote_candidate().ToSensitiveString(); - ret.first->second->SignalDestroyed.disconnect(this); - ret.first->second->Destroy(); + std::unique_ptr old_conn = absl::WrapUnique(ret.first->second); ret.first->second = conn; + HandleConnectionDestroyed(old_conn.get()); + old_conn->Shutdown(); } - conn->SignalDestroyed.connect(this, &Port::OnConnectionDestroyed); - SignalConnectionCreated(this, conn); } void Port::OnReadPacket(const char* data, @@ -415,9 +417,9 @@ void Port::OnReadyToSend() { } } -size_t Port::AddPrflxCandidate(const Candidate& local) { +void Port::AddPrflxCandidate(const Candidate& local) { + RTC_DCHECK_RUN_ON(thread_); candidates_.push_back(local); - return (candidates_.size() - 1); } bool Port::GetStunMessage(const char* data, @@ -606,6 +608,15 @@ rtc::DiffServCodePoint Port::StunDscpValue() const { return rtc::DSCP_NO_CHANGE; } +void Port::DestroyAllConnections() { + RTC_DCHECK_RUN_ON(thread_); + for (auto& [unused, connection] : connections_) { + connection->Shutdown(); + delete connection; + } + connections_.clear(); +} + void Port::set_timeout_delay(int delay) { RTC_DCHECK_RUN_ON(thread_); // Although this method is meant to only be used by tests, some downstream @@ -630,20 +641,20 @@ bool Port::ParseStunUsername(const StunMessage* stun_msg, return false; // RFRAG:LFRAG - const std::string username = username_attr->GetString(); + const absl::string_view username = username_attr->string_view(); size_t colon_pos = username.find(':'); - if (colon_pos == std::string::npos) { + if (colon_pos == absl::string_view::npos) { return false; } - *local_ufrag = username.substr(0, colon_pos); - *remote_ufrag = username.substr(colon_pos + 1, username.size()); + *local_ufrag = std::string(username.substr(0, colon_pos)); + *remote_ufrag = std::string(username.substr(colon_pos + 1, username.size())); return true; } bool Port::MaybeIceRoleConflict(const rtc::SocketAddress& addr, IceMessage* stun_msg, - const std::string& remote_ufrag) { + absl::string_view remote_ufrag) { // Validate ICE_CONTROLLING or ICE_CONTROLLED attributes. bool ret = true; IceRole remote_ice_role = ICEROLE_UNKNOWN; @@ -702,12 +713,8 @@ bool Port::MaybeIceRoleConflict(const rtc::SocketAddress& addr, return ret; } -void Port::CreateStunUsername(const std::string& remote_username, - std::string* stun_username_attr_str) const { - stun_username_attr_str->clear(); - *stun_username_attr_str = remote_username; - stun_username_attr_str->append(":"); - stun_username_attr_str->append(username_fragment()); +std::string Port::CreateStunUsername(absl::string_view remote_username) const { + return std::string(remote_username) + ":" + username_fragment(); } bool Port::HandleIncomingPacket(rtc::AsyncPacketSocket* socket, @@ -723,42 +730,39 @@ bool Port::CanHandleIncomingPacketsFrom(const rtc::SocketAddress&) const { return false; } -void Port::SendBindingErrorResponse(StunMessage* request, +void Port::SendBindingErrorResponse(StunMessage* message, const rtc::SocketAddress& addr, int error_code, - const std::string& reason) { - RTC_DCHECK(request->type() == STUN_BINDING_REQUEST || - request->type() == GOOG_PING_REQUEST); + absl::string_view reason) { + RTC_DCHECK(message->type() == STUN_BINDING_REQUEST || + message->type() == GOOG_PING_REQUEST); // Fill in the response message. - StunMessage response; - if (request->type() == STUN_BINDING_REQUEST) { - response.SetType(STUN_BINDING_ERROR_RESPONSE); - } else { - response.SetType(GOOG_PING_ERROR_RESPONSE); - } - response.SetTransactionID(request->transaction_id()); + StunMessage response(message->type() == STUN_BINDING_REQUEST + ? STUN_BINDING_ERROR_RESPONSE + : GOOG_PING_ERROR_RESPONSE, + message->transaction_id()); // When doing GICE, we need to write out the error code incorrectly to // maintain backwards compatiblility. auto error_attr = StunAttribute::CreateErrorCode(); error_attr->SetCode(error_code); - error_attr->SetReason(reason); + error_attr->SetReason(std::string(reason)); response.AddAttribute(std::move(error_attr)); // Per Section 10.1.2, certain error cases don't get a MESSAGE-INTEGRITY, // because we don't have enough information to determine the shared secret. if (error_code != STUN_ERROR_BAD_REQUEST && error_code != STUN_ERROR_UNAUTHORIZED && - request->type() != GOOG_PING_REQUEST) { - if (request->type() == STUN_BINDING_REQUEST) { + message->type() != GOOG_PING_REQUEST) { + if (message->type() == STUN_BINDING_REQUEST) { response.AddMessageIntegrity(password_); } else { response.AddMessageIntegrity32(password_); } } - if (request->type() == STUN_BINDING_REQUEST) { + if (message->type() == STUN_BINDING_REQUEST) { response.AddFingerprint(); } @@ -776,15 +780,13 @@ void Port::SendBindingErrorResponse(StunMessage* request, } void Port::SendUnknownAttributesErrorResponse( - StunMessage* request, + StunMessage* message, const rtc::SocketAddress& addr, const std::vector& unknown_types) { - RTC_DCHECK(request->type() == STUN_BINDING_REQUEST); + RTC_DCHECK(message->type() == STUN_BINDING_REQUEST); // Fill in the response message. - StunMessage response; - response.SetType(STUN_BINDING_ERROR_RESPONSE); - response.SetTransactionID(request->transaction_id()); + StunMessage response(STUN_BINDING_ERROR_RESPONSE, message->transaction_id()); auto error_attr = StunAttribute::CreateErrorCode(); error_attr->SetCode(STUN_ERROR_UNKNOWN_ATTRIBUTE); @@ -822,19 +824,33 @@ void Port::KeepAliveUntilPruned() { void Port::Prune() { state_ = State::PRUNED; - thread_->Post(RTC_FROM_HERE, this, MSG_DESTROY_IF_DEAD); + PostDestroyIfDead(/*delayed=*/false); } // Call to stop any currently pending operations from running. void Port::CancelPendingTasks() { TRACE_EVENT0("webrtc", "Port::CancelPendingTasks"); RTC_DCHECK_RUN_ON(thread_); - thread_->Clear(this); + weak_factory_.InvalidateWeakPtrs(); +} + +void Port::PostDestroyIfDead(bool delayed) { + rtc::WeakPtr weak_ptr = NewWeakPtr(); + auto task = [weak_ptr = std::move(weak_ptr)] { + if (weak_ptr) { + weak_ptr->DestroyIfDead(); + } + }; + if (delayed) { + thread_->PostDelayedTask(std::move(task), + TimeDelta::Millis(timeout_delay_)); + } else { + thread_->PostTask(std::move(task)); + } } -void Port::OnMessage(rtc::Message* pmsg) { +void Port::DestroyIfDead() { RTC_DCHECK_RUN_ON(thread_); - RTC_DCHECK(pmsg->message_id == MSG_DESTROY_IF_DEAD); bool dead = (state_ == State::INIT || state_ == State::PRUNED) && connections_.empty() && @@ -868,7 +884,8 @@ std::string Port::ToString() const { // TODO(honghaiz): Make the network cost configurable from user setting. void Port::UpdateNetworkCost() { - uint16_t new_cost = network_->GetCost(); + RTC_DCHECK_RUN_ON(thread_); + uint16_t new_cost = network_->GetCost(field_trials()); if (network_cost_ == new_cost) { return; } @@ -878,27 +895,26 @@ void Port::UpdateNetworkCost() { << ". Number of connections created: " << connections_.size(); network_cost_ = new_cost; - for (cricket::Candidate& candidate : candidates_) { + for (cricket::Candidate& candidate : candidates_) candidate.set_network_cost(network_cost_); - } - // Network cost change will affect the connection selection criteria. - // Signal the connection state change on each connection to force a - // re-sort in P2PTransportChannel. - for (const auto& kv : connections_) { - Connection* conn = kv.second; - conn->SignalStateChange(conn); - } + + for (auto& [unused, connection] : connections_) + connection->SetLocalCandidateNetworkCost(network_cost_); } void Port::EnablePortPackets() { enable_port_packets_ = true; } -void Port::OnConnectionDestroyed(Connection* conn) { - AddressMap::iterator iter = - connections_.find(conn->remote_candidate().address()); - RTC_DCHECK(iter != connections_.end()); - connections_.erase(iter); +bool Port::OnConnectionDestroyed(Connection* conn) { + if (connections_.erase(conn->remote_candidate().address()) == 0) { + // This could indicate a programmer error outside of webrtc so while we + // do have this check here to alert external developers, we also need to + // handle it since it might be a corner case not caught in tests. + RTC_DCHECK_NOTREACHED() << "Calling Destroy recursively?"; + return false; + } + HandleConnectionDestroyed(conn); // Ports time out after all connections fail if it is not marked as @@ -908,8 +924,28 @@ void Port::OnConnectionDestroyed(Connection* conn) { // not cause the Port to be destroyed. if (connections_.empty()) { last_time_all_connections_removed_ = rtc::TimeMillis(); - thread_->PostDelayed(RTC_FROM_HERE, timeout_delay_, this, - MSG_DESTROY_IF_DEAD); + PostDestroyIfDead(/*delayed=*/true); + } + + return true; +} + +void Port::DestroyConnectionInternal(Connection* conn, bool async) { + RTC_DCHECK_RUN_ON(thread_); + if (!OnConnectionDestroyed(conn)) + return; + + conn->Shutdown(); + if (async) { + // Unwind the stack before deleting the object in case upstream callers + // need to refer to the Connection's state as part of teardown. + // NOTE: We move ownership of `conn` into the capture section of the lambda + // so that the object will always be deleted, including if PostTask fails. + // In such a case (only tests), deletion would happen inside of the call + // to `DestroyConnection()`. + thread_->PostTask([conn = absl::WrapUnique(conn)]() {}); + } else { + delete conn; } } diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/port.h b/TMessagesProj/jni/voip/webrtc/p2p/base/port.h index 991872902a..d746f3de02 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/port.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/port.h @@ -18,10 +18,14 @@ #include #include +#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/candidate.h" +#include "api/field_trials_view.h" #include "api/packet_socket_factory.h" #include "api/rtc_error.h" +#include "api/task_queue/task_queue_base.h" +#include "api/transport/field_trial_based_config.h" #include "api/transport/stun.h" #include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h" #include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h" @@ -35,6 +39,7 @@ #include "rtc_base/async_packet_socket.h" #include "rtc_base/callback_list.h" #include "rtc_base/checks.h" +#include "rtc_base/memory/always_valid_pointer.h" #include "rtc_base/net_helper.h" #include "rtc_base/network.h" #include "rtc_base/proxy_info.h" @@ -42,7 +47,6 @@ #include "rtc_base/socket_address.h" #include "rtc_base/system/rtc_export.h" #include "rtc_base/third_party/sigslot/sigslot.h" -#include "rtc_base/thread.h" #include "rtc_base/weak_ptr.h" namespace cricket { @@ -58,7 +62,9 @@ extern const char TCPTYPE_ACTIVE_STR[]; extern const char TCPTYPE_PASSIVE_STR[]; extern const char TCPTYPE_SIMOPEN_STR[]; -enum IcePriorityValue { +// The type preference MUST be an integer from 0 to 126 inclusive. +// https://datatracker.ietf.org/doc/html/rfc5245#section-4.1.2.1 +enum IcePriorityValue : uint8_t { ICE_TYPE_PREFERENCE_RELAY_TLS = 0, ICE_TYPE_PREFERENCE_RELAY_TCP = 1, ICE_TYPE_PREFERENCE_RELAY_UDP = 2, @@ -122,7 +128,7 @@ class CandidateStats { typedef std::vector CandidateStatsList; const char* ProtoToString(ProtocolType proto); -bool StringToProto(const char* value, ProtocolType* proto); +absl::optional StringToProto(absl::string_view proto_name); struct ProtocolAddress { rtc::SocketAddress address; @@ -139,11 +145,11 @@ struct ProtocolAddress { struct IceCandidateErrorEvent { IceCandidateErrorEvent() = default; - IceCandidateErrorEvent(std::string address, + IceCandidateErrorEvent(absl::string_view address, int port, - std::string url, + absl::string_view url, int error_code, - std::string error_text) + absl::string_view error_text) : address(std::move(address)), port(port), url(std::move(url)), @@ -170,9 +176,7 @@ typedef std::set ServerAddresses; // Represents a local communication mechanism that can be used to create // connections to similar mechanisms of the other client. Subclasses of this // one add support for specific mechanisms like local UDP ports. -class Port : public PortInterface, - public rtc::MessageHandler, - public sigslot::has_slots<> { +class Port : public PortInterface, public sigslot::has_slots<> { public: // INIT: The state when a port is just created. // KEEP_ALIVE_UNTIL_PRUNED: A port should not be destroyed even if no @@ -180,20 +184,22 @@ class Port : public PortInterface, // PRUNED: It will be destroyed if no connection is using it for a period of // 30 seconds. enum class State { INIT, KEEP_ALIVE_UNTIL_PRUNED, PRUNED }; - Port(rtc::Thread* thread, - const std::string& type, + Port(webrtc::TaskQueueBase* thread, + absl::string_view type, rtc::PacketSocketFactory* factory, - rtc::Network* network, - const std::string& username_fragment, - const std::string& password); - Port(rtc::Thread* thread, - const std::string& type, + const rtc::Network* network, + absl::string_view username_fragment, + absl::string_view password, + const webrtc::FieldTrialsView* field_trials = nullptr); + Port(webrtc::TaskQueueBase* thread, + absl::string_view type, rtc::PacketSocketFactory* factory, - rtc::Network* network, + const rtc::Network* network, uint16_t min_port, uint16_t max_port, - const std::string& username_fragment, - const std::string& password); + absl::string_view username_fragment, + absl::string_view password, + const webrtc::FieldTrialsView* field_trials = nullptr); ~Port() override; // Note that the port type does NOT uniquely identify different subclasses of @@ -202,7 +208,7 @@ class Port : public PortInterface, // conflit in the value of the 2-tuple, make sure that the implementation that // relies on this 2-tuple for RTTI is properly changed. const std::string& Type() const override; - rtc::Network* Network() const override; + const rtc::Network* Network() const override; // Methods to set/get ICE role and tiebreaker values. IceRole GetIceRole() const override; @@ -224,15 +230,15 @@ class Port : public PortInterface, void CancelPendingTasks(); // The thread on which this port performs its I/O. - rtc::Thread* thread() { return thread_; } + webrtc::TaskQueueBase* thread() { return thread_; } // The factory used to create the sockets of this port. rtc::PacketSocketFactory* socket_factory() const { return factory_; } // For debugging purposes. const std::string& content_name() const { return content_name_; } - void set_content_name(const std::string& content_name) { - content_name_ = content_name; + void set_content_name(absl::string_view content_name) { + content_name_ = std::string(content_name); } int component() const { return component_; } @@ -256,8 +262,8 @@ class Port : public PortInterface, // PortAllocatorSession, and is now being assigned to an ICE transport. // Updates the information for candidates as well. void SetIceParameters(int component, - const std::string& username_fragment, - const std::string& password); + absl::string_view username_fragment, + absl::string_view password); // Fired when candidates are discovered by the port. When all candidates // are discovered that belong to port SignalAddressReady is fired. @@ -288,8 +294,18 @@ class Port : public PortInterface, // Returns the connection to the given address or NULL if none exists. Connection* GetConnection(const rtc::SocketAddress& remote_addr) override; - // Called each time a connection is created. - sigslot::signal2 SignalConnectionCreated; + // Removes and deletes a connection object. `DestroyConnection` will + // delete the connection object directly whereas `DestroyConnectionAsync` + // defers the `delete` operation to when the call stack has been unwound. + // Async may be needed when deleting a connection object from within a + // callback. + void DestroyConnection(Connection* conn) { + DestroyConnectionInternal(conn, false); + } + + void DestroyConnectionAsync(Connection* conn) { + DestroyConnectionInternal(conn, true); + } // In a shared socket mode each port which shares the socket will decide // to accept the packet based on the `remote_addr`. Currently only UDP @@ -307,17 +323,17 @@ class Port : public PortInterface, const rtc::SocketAddress& remote_addr) const; // Sends a response error to the given request. - void SendBindingErrorResponse(StunMessage* request, + void SendBindingErrorResponse(StunMessage* message, const rtc::SocketAddress& addr, int error_code, - const std::string& reason) override; + absl::string_view reason) override; void SendUnknownAttributesErrorResponse( - StunMessage* request, + StunMessage* message, const rtc::SocketAddress& addr, const std::vector& unknown_types); - void set_proxy(const std::string& user_agent, const rtc::ProxyInfo& proxy) { - user_agent_ = user_agent; + void set_proxy(absl::string_view user_agent, const rtc::ProxyInfo& proxy) { + user_agent_ = std::string(user_agent); proxy_ = proxy; } const std::string& user_agent() { return user_agent_; } @@ -328,8 +344,6 @@ class Port : public PortInterface, // Called if the port has no connections and is no longer useful. void Destroy(); - void OnMessage(rtc::Message* pmsg) override; - // Debugging description of this port std::string ToString() const override; uint16_t min_port() { return min_port_; } @@ -343,12 +357,11 @@ class Port : public PortInterface, bool ParseStunUsername(const StunMessage* stun_msg, std::string* local_username, std::string* remote_username) const; - void CreateStunUsername(const std::string& remote_username, - std::string* stun_username_attr_str) const; + std::string CreateStunUsername(absl::string_view remote_username) const; bool MaybeIceRoleConflict(const rtc::SocketAddress& addr, IceMessage* stun_msg, - const std::string& remote_ufrag); + absl::string_view remote_ufrag); // Called when a packet has been sent to the socket. // This is made pure virtual to notify subclasses of Port that they MUST @@ -361,8 +374,7 @@ class Port : public PortInterface, void OnReadyToSend(); // Called when the Connection discovers a local peer reflexive candidate. - // Returns the index of the new local candidate. - size_t AddPrflxCandidate(const Candidate& local); + void AddPrflxCandidate(const Candidate& local); int16_t network_cost() const { return network_cost_; } @@ -374,31 +386,32 @@ class Port : public PortInterface, // then the foundation will be different. Two candidate pairs with // the same foundation pairs are likely to have similar network // characteristics. Foundations are used in the frozen algorithm. - static std::string ComputeFoundation(const std::string& type, - const std::string& protocol, - const std::string& relay_protocol, - const rtc::SocketAddress& base_address); + std::string ComputeFoundation(absl::string_view type, + absl::string_view protocol, + absl::string_view relay_protocol, + const rtc::SocketAddress& base_address); protected: - enum { MSG_DESTROY_IF_DEAD = 0, MSG_FIRST_AVAILABLE }; - virtual void UpdateNetworkCost(); - void set_type(const std::string& type) { type_ = type; } + void set_type(absl::string_view type) { type_ = std::string(type); } + + rtc::WeakPtr NewWeakPtr() { return weak_factory_.GetWeakPtr(); } void AddAddress(const rtc::SocketAddress& address, const rtc::SocketAddress& base_address, const rtc::SocketAddress& related_address, - const std::string& protocol, - const std::string& relay_protocol, - const std::string& tcptype, - const std::string& type, + absl::string_view protocol, + absl::string_view relay_protocol, + absl::string_view tcptype, + absl::string_view type, uint32_t type_preference, uint32_t relay_preference, - const std::string& url, + absl::string_view url, bool is_final); - void FinishAddingAddress(const Candidate& c, bool is_final); + void FinishAddingAddress(const Candidate& c, bool is_final) + RTC_RUN_ON(thread_); virtual void PostAddAddress(bool is_final); @@ -435,6 +448,8 @@ class Port : public PortInterface, // Extra work to be done in subclasses when a connection is destroyed. virtual void HandleConnectionDestroyed(Connection* conn) {} + void DestroyAllConnections(); + void CopyPortInformationToPacketInfo(rtc::PacketInfo* info) const; MdnsNameRegistrationStatus mdns_name_registration_status() const { @@ -444,18 +459,34 @@ class Port : public PortInterface, mdns_name_registration_status_ = status; } + const webrtc::FieldTrialsView& field_trials() const { return *field_trials_; } + private: void Construct(); - // Called when one of our connections deletes itself. - void OnConnectionDestroyed(Connection* conn); + + void PostDestroyIfDead(bool delayed); + void DestroyIfDead(); + + // Called internally when deleting a connection object. + // Returns true if the connection object was removed from the `connections_` + // list and the state updated accordingly. If the connection was not found + // in the list, the return value is false. Note that this may indicate + // incorrect behavior of external code that might be attempting to delete + // connection objects from within a 'on destroyed' callback notification + // for the connection object itself. + bool OnConnectionDestroyed(Connection* conn); + + // Private implementation of DestroyConnection to keep the async usage + // distinct. + void DestroyConnectionInternal(Connection* conn, bool async); void OnNetworkTypeChanged(const rtc::Network* network); - rtc::Thread* const thread_; + webrtc::TaskQueueBase* const thread_; rtc::PacketSocketFactory* const factory_; std::string type_; bool send_retransmit_count_attribute_; - rtc::Network* network_; + const rtc::Network* network_; uint16_t min_port_; uint16_t max_port_; std::string content_name_; @@ -471,7 +502,7 @@ class Port : public PortInterface, // username_fragment(). std::string ice_username_fragment_; std::string password_; - std::vector candidates_; + std::vector candidates_ RTC_GUARDED_BY(thread_); AddressMap connections_; int timeout_delay_; bool enable_port_packets_; @@ -492,10 +523,13 @@ class Port : public PortInterface, MdnsNameRegistrationStatus::kNotStarted; rtc::WeakPtrFactory weak_factory_; + webrtc::AlwaysValidPointer + field_trials_; bool MaybeObfuscateAddress(Candidate* c, - const std::string& type, - bool is_final); + absl::string_view type, + bool is_final) RTC_RUN_ON(thread_); friend class Connection; webrtc::CallbackList port_destroyed_callback_list_; diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/port_allocator.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/port_allocator.cc index 6c3ccc8e46..522f0beb98 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/port_allocator.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/port_allocator.cc @@ -14,6 +14,7 @@ #include #include +#include "absl/strings/string_view.h" #include "p2p/base/ice_credentials_iterator.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -23,17 +24,17 @@ namespace cricket { RelayServerConfig::RelayServerConfig() {} RelayServerConfig::RelayServerConfig(const rtc::SocketAddress& address, - const std::string& username, - const std::string& password, + absl::string_view username, + absl::string_view password, ProtocolType proto) : credentials(username, password) { ports.push_back(ProtocolAddress(address, proto)); } -RelayServerConfig::RelayServerConfig(const std::string& address, +RelayServerConfig::RelayServerConfig(absl::string_view address, int port, - const std::string& username, - const std::string& password, + absl::string_view username, + absl::string_view password, ProtocolType proto) : RelayServerConfig(rtc::SocketAddress(address, port), username, @@ -41,10 +42,10 @@ RelayServerConfig::RelayServerConfig(const std::string& address, proto) {} // Legacy constructor where "secure" and PROTO_TCP implies PROTO_TLS. -RelayServerConfig::RelayServerConfig(const std::string& address, +RelayServerConfig::RelayServerConfig(absl::string_view address, int port, - const std::string& username, - const std::string& password, + absl::string_view username, + absl::string_view password, ProtocolType proto, bool secure) : RelayServerConfig(address, @@ -57,17 +58,18 @@ RelayServerConfig::RelayServerConfig(const RelayServerConfig&) = default; RelayServerConfig::~RelayServerConfig() = default; -PortAllocatorSession::PortAllocatorSession(const std::string& content_name, +PortAllocatorSession::PortAllocatorSession(absl::string_view content_name, int component, - const std::string& ice_ufrag, - const std::string& ice_pwd, + absl::string_view ice_ufrag, + absl::string_view ice_pwd, uint32_t flags) : flags_(flags), generation_(0), content_name_(content_name), component_(component), ice_ufrag_(ice_ufrag), - ice_pwd_(ice_pwd) { + ice_pwd_(ice_pwd), + tiebreaker_(0) { // Pooled sessions are allowed to be created with empty content name, // component, ufrag and password. RTC_DCHECK(ice_ufrag.empty() == ice_pwd.empty()); @@ -98,7 +100,8 @@ PortAllocator::PortAllocator() max_ipv6_networks_(kDefaultMaxIPv6Networks), step_delay_(kDefaultStepDelay), allow_tcp_listen_(true), - candidate_filter_(CF_ALL) { + candidate_filter_(CF_ALL), + tiebreaker_(0) { // The allocator will be attached to a thread in Initialize. thread_checker_.Detach(); } @@ -198,6 +201,7 @@ bool PortAllocator::SetConfiguration( PortAllocatorSession* pooled_session = CreateSessionInternal("", 0, iceCredentials.ufrag, iceCredentials.pwd); pooled_session->set_pooled(true); + pooled_session->set_ice_tiebreaker(tiebreaker_); pooled_session->StartGettingPorts(); pooled_sessions_.push_back( std::unique_ptr(pooled_session)); @@ -205,23 +209,31 @@ bool PortAllocator::SetConfiguration( return true; } +void PortAllocator::SetIceTiebreaker(uint64_t tiebreaker) { + tiebreaker_ = tiebreaker; + for (auto& pooled_session : pooled_sessions_) { + pooled_session->set_ice_tiebreaker(tiebreaker_); + } +} + std::unique_ptr PortAllocator::CreateSession( - const std::string& content_name, + absl::string_view content_name, int component, - const std::string& ice_ufrag, - const std::string& ice_pwd) { + absl::string_view ice_ufrag, + absl::string_view ice_pwd) { CheckRunOnValidThreadAndInitialized(); auto session = std::unique_ptr( CreateSessionInternal(content_name, component, ice_ufrag, ice_pwd)); session->SetCandidateFilter(candidate_filter()); + session->set_ice_tiebreaker(tiebreaker_); return session; } std::unique_ptr PortAllocator::TakePooledSession( - const std::string& content_name, + absl::string_view content_name, int component, - const std::string& ice_ufrag, - const std::string& ice_pwd) { + absl::string_view ice_ufrag, + absl::string_view ice_pwd) { CheckRunOnValidThreadAndInitialized(); RTC_DCHECK(!ice_ufrag.empty()); RTC_DCHECK(!ice_pwd.empty()); diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/port_allocator.h b/TMessagesProj/jni/voip/webrtc/p2p/base/port_allocator.h index 08584d988b..46358439ac 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/port_allocator.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/port_allocator.h @@ -16,6 +16,7 @@ #include #include +#include "absl/strings/string_view.h" #include "api/sequence_checker.h" #include "api/transport/enums.h" #include "p2p/base/port.h" @@ -134,7 +135,7 @@ enum class TlsCertPolicy { // TODO(deadbeef): Rename to TurnCredentials (and username to ufrag). struct RelayCredentials { RelayCredentials() {} - RelayCredentials(const std::string& username, const std::string& password) + RelayCredentials(absl::string_view username, absl::string_view password) : username(username), password(password) {} bool operator==(const RelayCredentials& o) const { @@ -151,19 +152,19 @@ typedef std::vector PortList; struct RTC_EXPORT RelayServerConfig { RelayServerConfig(); RelayServerConfig(const rtc::SocketAddress& address, - const std::string& username, - const std::string& password, + absl::string_view username, + absl::string_view password, ProtocolType proto); - RelayServerConfig(const std::string& address, + RelayServerConfig(absl::string_view address, int port, - const std::string& username, - const std::string& password, + absl::string_view username, + absl::string_view password, ProtocolType proto); // Legacy constructor where "secure" and PROTO_TCP implies PROTO_TLS. - RelayServerConfig(const std::string& address, + RelayServerConfig(absl::string_view address, int port, - const std::string& username, - const std::string& password, + absl::string_view username, + absl::string_view password, ProtocolType proto, bool secure); RelayServerConfig(const RelayServerConfig&); @@ -188,10 +189,10 @@ struct RTC_EXPORT RelayServerConfig { class RTC_EXPORT PortAllocatorSession : public sigslot::has_slots<> { public: // Content name passed in mostly for logging and debugging. - PortAllocatorSession(const std::string& content_name, + PortAllocatorSession(absl::string_view content_name, int component, - const std::string& ice_ufrag, - const std::string& ice_pwd, + absl::string_view ice_ufrag, + absl::string_view ice_pwd, uint32_t flags); // Subclasses should clean up any ports created. @@ -205,6 +206,10 @@ class RTC_EXPORT PortAllocatorSession : public sigslot::has_slots<> { const std::string& ice_pwd() const { return ice_pwd_; } bool pooled() const { return pooled_; } + // TODO(bugs.webrtc.org/14605): move this to the constructor + void set_ice_tiebreaker(uint64_t tiebreaker) { tiebreaker_ = tiebreaker; } + uint64_t ice_tiebreaker() const { return tiebreaker_; } + // Setting this filter should affect not only candidates gathered in the // future, but candidates already gathered and ports already "ready", // which would be returned by ReadyCandidates() and ReadyPorts(). @@ -299,14 +304,14 @@ class RTC_EXPORT PortAllocatorSession : public sigslot::has_slots<> { const std::string& password() const { return ice_pwd_; } private: - void SetIceParameters(const std::string& content_name, + void SetIceParameters(absl::string_view content_name, int component, - const std::string& ice_ufrag, - const std::string& ice_pwd) { - content_name_ = content_name; + absl::string_view ice_ufrag, + absl::string_view ice_pwd) { + content_name_ = std::string(content_name); component_ = component; - ice_ufrag_ = ice_ufrag; - ice_pwd_ = ice_pwd; + ice_ufrag_ = std::string(ice_ufrag); + ice_pwd_ = std::string(ice_pwd); UpdateIceParametersInternal(); } @@ -321,6 +326,9 @@ class RTC_EXPORT PortAllocatorSession : public sigslot::has_slots<> { bool pooled_ = false; + // TODO(bugs.webrtc.org/14605): move this to the constructor + uint64_t tiebreaker_; + // SetIceParameters is an implementation detail which only PortAllocator // should be able to call. friend class PortAllocator; @@ -373,6 +381,9 @@ class RTC_EXPORT PortAllocator : public sigslot::has_slots<> { const absl::optional& stun_candidate_keepalive_interval = absl::nullopt); + void SetIceTiebreaker(uint64_t tiebreaker); + uint64_t IceTiebreaker() const { return tiebreaker_; } + const ServerAddresses& stun_servers() const { CheckRunOnValidThreadIfInitialized(); return stun_servers_; @@ -411,10 +422,10 @@ class RTC_EXPORT PortAllocator : public sigslot::has_slots<> { virtual void SetVpnList(const std::vector& vpn_list) {} std::unique_ptr CreateSession( - const std::string& content_name, + absl::string_view content_name, int component, - const std::string& ice_ufrag, - const std::string& ice_pwd); + absl::string_view ice_ufrag, + absl::string_view ice_pwd); // Get an available pooled session and set the transport information on it. // @@ -424,10 +435,10 @@ class RTC_EXPORT PortAllocator : public sigslot::has_slots<> { // return a pooled session with matching ice credentials. // If no pooled sessions are available, returns null. std::unique_ptr TakePooledSession( - const std::string& content_name, + absl::string_view content_name, int component, - const std::string& ice_ufrag, - const std::string& ice_pwd); + absl::string_view ice_ufrag, + absl::string_view ice_pwd); // Returns the next session that would be returned by TakePooledSession // optionally restricting it to sessions with specified ice credentials. @@ -476,9 +487,9 @@ class RTC_EXPORT PortAllocator : public sigslot::has_slots<> { return proxy_; } - void set_proxy(const std::string& agent, const rtc::ProxyInfo& proxy) { + void set_proxy(absl::string_view agent, const rtc::ProxyInfo& proxy) { CheckRunOnValidThreadIfInitialized(); - agent_ = agent; + agent_ = std::string(agent); proxy_ = proxy; } @@ -602,11 +613,13 @@ class RTC_EXPORT PortAllocator : public sigslot::has_slots<> { SignalCandidateFilterChanged; protected: + // TODO(webrtc::13579): Remove std::string version once downstream users have + // migrated to the absl::string_view version. virtual PortAllocatorSession* CreateSessionInternal( - const std::string& content_name, + absl::string_view content_name, int component, - const std::string& ice_ufrag, - const std::string& ice_pwd) = 0; + absl::string_view ice_ufrag, + absl::string_view ice_pwd) = 0; const std::vector>& pooled_sessions() { return pooled_sessions_; @@ -662,6 +675,9 @@ class RTC_EXPORT PortAllocator : public sigslot::has_slots<> { // if ice_credentials is nullptr. std::vector>::const_iterator FindPooledSession(const IceParameters* ice_credentials = nullptr) const; + + // ICE tie breaker. + uint64_t tiebreaker_; }; } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/port_interface.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/port_interface.cc index 9175ca54e3..b07cdf9ee6 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/port_interface.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/port_interface.cc @@ -10,6 +10,10 @@ #include "p2p/base/port_interface.h" +#include + +#include "absl/strings/string_view.h" + namespace cricket { PortInterface::PortInterface() = default; diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/port_interface.h b/TMessagesProj/jni/voip/webrtc/p2p/base/port_interface.h index 73c8e36c78..29c2741bab 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/port_interface.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/port_interface.h @@ -15,6 +15,7 @@ #include #include +#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/candidate.h" #include "p2p/base/transport_description.h" @@ -49,7 +50,7 @@ class PortInterface { virtual ~PortInterface(); virtual const std::string& Type() const = 0; - virtual rtc::Network* Network() const = 0; + virtual const rtc::Network* Network() const = 0; // Methods to set/get ICE role and tiebreaker values. virtual void SetIceRole(IceRole role) = 0; @@ -60,7 +61,7 @@ class PortInterface { virtual bool SharedSocket() const = 0; - virtual bool SupportsProtocol(const std::string& protocol) const = 0; + virtual bool SupportsProtocol(absl::string_view protocol) const = 0; // PrepareAddress will attempt to get an address for this port that other // clients can send to. It may take some time before the address is ready. @@ -107,10 +108,10 @@ class PortInterface { // Sends a response message (normal or error) to the given request. One of // these methods should be called as a response to SignalUnknownAddress. - virtual void SendBindingErrorResponse(StunMessage* request, + virtual void SendBindingErrorResponse(StunMessage* message, const rtc::SocketAddress& addr, int error_code, - const std::string& reason) = 0; + absl::string_view reason) = 0; // Signaled when this port decides to delete itself because it no longer has // any usefulness. diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/regathering_controller.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/regathering_controller.cc index 293e9dbcfd..572c2a616f 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/regathering_controller.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/regathering_controller.cc @@ -9,7 +9,9 @@ */ #include "p2p/base/regathering_controller.h" -#include "rtc_base/task_utils/to_queued_task.h" + +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/units/time_delta.h" namespace webrtc { @@ -18,6 +20,7 @@ BasicRegatheringController::BasicRegatheringController( cricket::IceTransportInternal* ice_transport, rtc::Thread* thread) : config_(config), ice_transport_(ice_transport), thread_(thread) { + RTC_DCHECK(thread_); RTC_DCHECK_RUN_ON(thread_); RTC_DCHECK(ice_transport_); ice_transport_->SignalStateChanged.connect( @@ -58,21 +61,20 @@ void BasicRegatheringController:: pending_regathering_.reset(new ScopedTaskSafety()); thread_->PostDelayedTask( - ToQueuedTask(*pending_regathering_.get(), - [this]() { - RTC_DCHECK_RUN_ON(thread_); - // Only regather when the current session is in the CLEARED - // state (i.e., not running or stopped). It is only - // possible to enter this state when we gather continually, - // so there is an implicit check on continual gathering - // here. - if (allocator_session_ && - allocator_session_->IsCleared()) { - allocator_session_->RegatherOnFailedNetworks(); - } - ScheduleRecurringRegatheringOnFailedNetworks(); - }), - config_.regather_on_failed_networks_interval); + SafeTask(pending_regathering_->flag(), + [this]() { + RTC_DCHECK_RUN_ON(thread_); + // Only regather when the current session is in the CLEARED + // state (i.e., not running or stopped). It is only + // possible to enter this state when we gather continually, + // so there is an implicit check on continual gathering + // here. + if (allocator_session_ && allocator_session_->IsCleared()) { + allocator_session_->RegatherOnFailedNetworks(); + } + ScheduleRecurringRegatheringOnFailedNetworks(); + }), + TimeDelta::Millis(config_.regather_on_failed_networks_interval)); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/regathering_controller.h b/TMessagesProj/jni/voip/webrtc/p2p/base/regathering_controller.h index 116d820a82..a0dfb8053d 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/regathering_controller.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/regathering_controller.h @@ -13,9 +13,9 @@ #include +#include "api/task_queue/pending_task_safety_flag.h" #include "p2p/base/ice_transport_internal.h" #include "p2p/base/port_allocator.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/thread.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/stun_port.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/stun_port.cc index c9fbefdc16..5d57d1ac54 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/stun_port.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/stun_port.cc @@ -14,12 +14,14 @@ #include #include "absl/memory/memory.h" +#include "absl/strings/string_view.h" #include "api/transport/stun.h" #include "p2p/base/connection.h" #include "p2p/base/p2p_constants.h" #include "p2p/base/port_allocator.h" #include "rtc_base/async_resolver_interface.h" #include "rtc_base/checks.h" +#include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/helpers.h" #include "rtc_base/ip_address.h" #include "rtc_base/logging.h" @@ -27,6 +29,26 @@ namespace cricket { +namespace { + +bool ResolveStunHostnameForFamily(const webrtc::FieldTrialsView& field_trials) { + // Bug fix for STUN hostname resolution on IPv6. + // Field trial key reserved in bugs.webrtc.org/14334 + static constexpr char field_trial_name[] = + "WebRTC-IPv6NetworkResolutionFixes"; + if (!field_trials.IsEnabled(field_trial_name)) { + return false; + } + + webrtc::FieldTrialParameter resolve_stun_hostname_for_family( + "ResolveStunHostnameForFamily", /*default_value=*/false); + webrtc::ParseFieldTrial({&resolve_stun_hostname_for_family}, + field_trials.Lookup(field_trial_name)); + return resolve_stun_hostname_for_family; +} + +} // namespace + // TODO(?): Move these to a common place (used in relayport too) const int RETRY_TIMEOUT = 50 * 1000; // 50 seconds @@ -40,14 +62,14 @@ class StunBindingRequest : public StunRequest { StunBindingRequest(UDPPort* port, const rtc::SocketAddress& addr, int64_t start_time) - : port_(port), server_addr_(addr), start_time_(start_time) {} + : StunRequest(port->request_manager(), + std::make_unique(STUN_BINDING_REQUEST)), + port_(port), + server_addr_(addr), + start_time_(start_time) {} const rtc::SocketAddress& server_addr() const { return server_addr_; } - void Prepare(StunMessage* request) override { - request->SetType(STUN_BINDING_REQUEST); - } - void OnResponse(StunMessage* response) override { const StunAddressAttribute* addr_attr = response->GetAddress(STUN_ATTR_MAPPED_ADDRESS); @@ -63,7 +85,7 @@ class StunBindingRequest : public StunRequest { // The keep-alive requests will be stopped after its lifetime has passed. if (WithinLifetime(rtc::TimeMillis())) { - port_->requests_.SendDelayed( + port_->request_manager_.SendDelayed( new StunBindingRequest(port_, server_addr_, start_time_), port_->stun_keepalive_delay()); } @@ -88,7 +110,7 @@ class StunBindingRequest : public StunRequest { int64_t now = rtc::TimeMillis(); if (WithinLifetime(now) && rtc::TimeDiff(now, start_time_) < RETRY_TIMEOUT) { - port_->requests_.SendDelayed( + port_->request_manager_.SendDelayed( new StunBindingRequest(port_, server_addr_, start_time_), port_->stun_keepalive_delay()); } @@ -99,7 +121,7 @@ class StunBindingRequest : public StunRequest { << port_->Network()->name() << ")"; port_->OnStunBindingOrResolveRequestFailed( server_addr_, SERVER_NOT_REACHABLE_ERROR, - "STUN allocate request timed out."); + "STUN binding request timed out."); } private: @@ -121,7 +143,10 @@ UDPPort::AddressResolver::AddressResolver( std::function done_callback) : socket_factory_(factory), done_(std::move(done_callback)) {} -void UDPPort::AddressResolver::Resolve(const rtc::SocketAddress& address) { +void UDPPort::AddressResolver::Resolve( + const rtc::SocketAddress& address, + int family, + const webrtc::FieldTrialsView& field_trials) { if (resolvers_.find(address) != resolvers_.end()) return; @@ -132,12 +157,17 @@ void UDPPort::AddressResolver::Resolve(const rtc::SocketAddress& address) { pair = std::make_pair(address, std::move(resolver)); resolvers_.insert(std::move(pair)); - resolver_ptr->Start(address, [this, address] { + auto callback = [this, address] { ResolverMap::const_iterator it = resolvers_.find(address); if (it != resolvers_.end()) { done_(it->first, it->second->result().GetError()); } - }); + }; + if (ResolveStunHostnameForFamily(field_trials)) { + resolver_ptr->Start(address, family, std::move(callback)); + } else { + resolver_ptr->Start(address, std::move(callback)); + } } bool UDPPort::AddressResolver::GetResolvedAddress( @@ -153,13 +183,24 @@ bool UDPPort::AddressResolver::GetResolvedAddress( UDPPort::UDPPort(rtc::Thread* thread, rtc::PacketSocketFactory* factory, - rtc::Network* network, + const rtc::Network* network, rtc::AsyncPacketSocket* socket, - const std::string& username, - const std::string& password, - bool emit_local_for_anyaddress) - : Port(thread, LOCAL_PORT_TYPE, factory, network, username, password), - requests_(thread), + absl::string_view username, + absl::string_view password, + bool emit_local_for_anyaddress, + const webrtc::FieldTrialsView* field_trials) + : Port(thread, + LOCAL_PORT_TYPE, + factory, + network, + username, + password, + field_trials), + request_manager_( + thread, + [this](const void* data, size_t size, StunRequest* request) { + OnSendPacket(data, size, request); + }), socket_(socket), error_(0), ready_(false), @@ -169,12 +210,13 @@ UDPPort::UDPPort(rtc::Thread* thread, UDPPort::UDPPort(rtc::Thread* thread, rtc::PacketSocketFactory* factory, - rtc::Network* network, + const rtc::Network* network, uint16_t min_port, uint16_t max_port, - const std::string& username, - const std::string& password, - bool emit_local_for_anyaddress) + absl::string_view username, + absl::string_view password, + bool emit_local_for_anyaddress, + const webrtc::FieldTrialsView* field_trials) : Port(thread, LOCAL_PORT_TYPE, factory, @@ -182,8 +224,13 @@ UDPPort::UDPPort(rtc::Thread* thread, min_port, max_port, username, - password), - requests_(thread), + password, + field_trials), + request_manager_( + thread, + [this](const void* data, size_t size, StunRequest* request) { + OnSendPacket(data, size, request); + }), socket_(nullptr), error_(0), ready_(false), @@ -206,7 +253,6 @@ bool UDPPort::Init() { socket_->SignalSentPacket.connect(this, &UDPPort::OnSentPacket); socket_->SignalReadyToSend.connect(this, &UDPPort::OnReadyToSend); socket_->SignalAddressReady.connect(this, &UDPPort::OnLocalAddressReady); - requests_.SignalSendPacket.connect(this, &UDPPort::OnSendPacket); return true; } @@ -216,7 +262,7 @@ UDPPort::~UDPPort() { } void UDPPort::PrepareAddress() { - RTC_DCHECK(requests_.empty()); + RTC_DCHECK(request_manager_.empty()); if (socket_->GetState() == rtc::AsyncPacketSocket::STATE_BOUND) { OnLocalAddressReady(socket_, socket_->GetLocalAddress()); } @@ -266,7 +312,7 @@ Connection* UDPPort::CreateConnection(const Candidate& address, mdns_name_registration_status() != MdnsNameRegistrationStatus::kNotStarted); - Connection* conn = new ProxyConnection(this, 0, address); + Connection* conn = new ProxyConnection(NewWeakPtr(), 0, address); AddOrReplaceConnection(conn); return conn; } @@ -331,7 +377,7 @@ bool UDPPort::HandleIncomingPacket(rtc::AsyncPacketSocket* socket, return true; } -bool UDPPort::SupportsProtocol(const std::string& protocol) const { +bool UDPPort::SupportsProtocol(absl::string_view protocol) const { return protocol == UDP_PROTOCOL_NAME; } @@ -381,7 +427,7 @@ void UDPPort::OnReadPacket(rtc::AsyncPacketSocket* socket, // will eat it because it might be a response to a retransmitted packet, and // we already cleared the request when we got the first response. if (server_addresses_.find(remote_addr) != server_addresses_.end()) { - requests_.CheckResponse(data, size); + request_manager_.CheckResponse(data, size); return; } @@ -404,11 +450,16 @@ void UDPPort::OnReadyToSend(rtc::AsyncPacketSocket* socket) { void UDPPort::SendStunBindingRequests() { // We will keep pinging the stun server to make sure our NAT pin-hole stays // open until the deadline (specified in SendStunBindingRequest). - RTC_DCHECK(requests_.empty()); + RTC_DCHECK(request_manager_.empty()); for (ServerAddresses::const_iterator it = server_addresses_.begin(); - it != server_addresses_.end(); ++it) { - SendStunBindingRequest(*it); + it != server_addresses_.end();) { + // sending a STUN binding request may cause the current SocketAddress to be + // erased from the set, invalidating the loop iterator before it is + // incremented (even if the SocketAddress itself still exists). So make a + // copy of the loop iterator, which may be safely invalidated. + ServerAddresses::const_iterator addr = it++; + SendStunBindingRequest(*addr); } } @@ -422,7 +473,7 @@ void UDPPort::ResolveStunAddress(const rtc::SocketAddress& stun_addr) { RTC_LOG(LS_INFO) << ToString() << ": Starting STUN host lookup for " << stun_addr.ToSensitiveString(); - resolver_->Resolve(stun_addr); + resolver_->Resolve(stun_addr, Network()->family(), field_trials()); } void UDPPort::OnResolveResult(const rtc::SocketAddress& input, int error) { @@ -454,7 +505,7 @@ void UDPPort::SendStunBindingRequest(const rtc::SocketAddress& stun_addr) { } else if (socket_->GetState() == rtc::AsyncPacketSocket::STATE_BOUND) { // Check if `server_addr_` is compatible with the port's ip. if (IsCompatibleAddress(stun_addr)) { - requests_.Send( + request_manager_.Send( new StunBindingRequest(this, stun_addr, rtc::TimeMillis())); } else { // Since we can't send stun messages to the server, we should mark this @@ -512,7 +563,7 @@ void UDPPort::OnStunBindingRequestSucceeded( } rtc::StringBuilder url; - url << "stun:" << stun_server_addr.ipaddr().ToString() << ":" + url << "stun:" << stun_server_addr.hostname() << ":" << stun_server_addr.port(); AddAddress(stun_reflected_addr, socket_->GetLocalAddress(), related_address, UDP_PROTOCOL_NAME, "", "", STUN_PORT_TYPE, @@ -524,7 +575,7 @@ void UDPPort::OnStunBindingRequestSucceeded( void UDPPort::OnStunBindingOrResolveRequestFailed( const rtc::SocketAddress& stun_server_addr, int error_code, - const std::string& reason) { + absl::string_view reason) { rtc::StringBuilder url; url << "stun:" << stun_server_addr.ToString(); SignalCandidateError( @@ -599,17 +650,18 @@ bool UDPPort::HasCandidateWithAddress(const rtc::SocketAddress& addr) const { std::unique_ptr StunPort::Create( rtc::Thread* thread, rtc::PacketSocketFactory* factory, - rtc::Network* network, + const rtc::Network* network, uint16_t min_port, uint16_t max_port, - const std::string& username, - const std::string& password, + absl::string_view username, + absl::string_view password, const ServerAddresses& servers, - absl::optional stun_keepalive_interval) { + absl::optional stun_keepalive_interval, + const webrtc::FieldTrialsView* field_trials) { // Using `new` to access a non-public constructor. - auto port = - absl::WrapUnique(new StunPort(thread, factory, network, min_port, - max_port, username, password, servers)); + auto port = absl::WrapUnique(new StunPort(thread, factory, network, min_port, + max_port, username, password, + servers, field_trials)); port->set_stun_keepalive_delay(stun_keepalive_interval); if (!port->Init()) { return nullptr; @@ -619,12 +671,13 @@ std::unique_ptr StunPort::Create( StunPort::StunPort(rtc::Thread* thread, rtc::PacketSocketFactory* factory, - rtc::Network* network, + const rtc::Network* network, uint16_t min_port, uint16_t max_port, - const std::string& username, - const std::string& password, - const ServerAddresses& servers) + absl::string_view username, + absl::string_view password, + const ServerAddresses& servers, + const webrtc::FieldTrialsView* field_trials) : UDPPort(thread, factory, network, @@ -632,7 +685,8 @@ StunPort::StunPort(rtc::Thread* thread, max_port, username, password, - false) { + false, + field_trials) { // UDPPort will set these to local udp, updating these to STUN. set_type(STUN_PORT_TYPE); set_server_addresses(servers); diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/stun_port.h b/TMessagesProj/jni/voip/webrtc/p2p/base/stun_port.h index 394c1336e2..06b5e1ae1c 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/stun_port.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/stun_port.h @@ -17,10 +17,11 @@ #include #include "absl/memory/memory.h" +#include "absl/strings/string_view.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "p2p/base/port.h" #include "p2p/base/stun_request.h" #include "rtc_base/async_packet_socket.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" namespace cricket { @@ -35,16 +36,17 @@ class UDPPort : public Port { static std::unique_ptr Create( rtc::Thread* thread, rtc::PacketSocketFactory* factory, - rtc::Network* network, + const rtc::Network* network, rtc::AsyncPacketSocket* socket, - const std::string& username, - const std::string& password, + absl::string_view username, + absl::string_view password, bool emit_local_for_anyaddress, - absl::optional stun_keepalive_interval) { + absl::optional stun_keepalive_interval, + const webrtc::FieldTrialsView* field_trials = nullptr) { // Using `new` to access a non-public constructor. - auto port = - absl::WrapUnique(new UDPPort(thread, factory, network, socket, username, - password, emit_local_for_anyaddress)); + auto port = absl::WrapUnique( + new UDPPort(thread, factory, network, socket, username, password, + emit_local_for_anyaddress, field_trials)); port->set_stun_keepalive_delay(stun_keepalive_interval); if (!port->Init()) { return nullptr; @@ -55,17 +57,18 @@ class UDPPort : public Port { static std::unique_ptr Create( rtc::Thread* thread, rtc::PacketSocketFactory* factory, - rtc::Network* network, + const rtc::Network* network, uint16_t min_port, uint16_t max_port, - const std::string& username, - const std::string& password, + absl::string_view username, + absl::string_view password, bool emit_local_for_anyaddress, - absl::optional stun_keepalive_interval) { + absl::optional stun_keepalive_interval, + const webrtc::FieldTrialsView* field_trials = nullptr) { // Using `new` to access a non-public constructor. - auto port = absl::WrapUnique(new UDPPort(thread, factory, network, min_port, - max_port, username, password, - emit_local_for_anyaddress)); + auto port = absl::WrapUnique( + new UDPPort(thread, factory, network, min_port, max_port, username, + password, emit_local_for_anyaddress, field_trials)); port->set_stun_keepalive_delay(stun_keepalive_interval); if (!port->Init()) { return nullptr; @@ -98,7 +101,7 @@ class UDPPort : public Port { const rtc::SocketAddress& remote_addr, int64_t packet_time_us) override; - bool SupportsProtocol(const std::string& protocol) const override; + bool SupportsProtocol(absl::string_view protocol) const override; ProtocolType GetProtocol() const override; void GetStunStats(absl::optional* stats) override; @@ -111,28 +114,28 @@ class UDPPort : public Port { void set_stun_keepalive_lifetime(int lifetime) { stun_keepalive_lifetime_ = lifetime; } - // Returns true if there is a pending request with type `msg_type`. - bool HasPendingRequest(int msg_type) { - return requests_.HasRequest(msg_type); - } + + StunRequestManager& request_manager() { return request_manager_; } protected: UDPPort(rtc::Thread* thread, rtc::PacketSocketFactory* factory, - rtc::Network* network, + const rtc::Network* network, uint16_t min_port, uint16_t max_port, - const std::string& username, - const std::string& password, - bool emit_local_for_anyaddress); + absl::string_view username, + absl::string_view password, + bool emit_local_for_anyaddress, + const webrtc::FieldTrialsView* field_trials); UDPPort(rtc::Thread* thread, rtc::PacketSocketFactory* factory, - rtc::Network* network, + const rtc::Network* network, rtc::AsyncPacketSocket* socket, - const std::string& username, - const std::string& password, - bool emit_local_for_anyaddress); + absl::string_view username, + absl::string_view password, + bool emit_local_for_anyaddress, + const webrtc::FieldTrialsView* field_trials); bool Init(); @@ -183,7 +186,9 @@ class UDPPort : public Port { rtc::PacketSocketFactory* factory, std::function done_callback); - void Resolve(const rtc::SocketAddress& address); + void Resolve(const rtc::SocketAddress& address, + int family, + const webrtc::FieldTrialsView& field_trials); bool GetResolvedAddress(const rtc::SocketAddress& input, int family, rtc::SocketAddress* output) const; @@ -207,6 +212,9 @@ class UDPPort : public Port { void ResolveStunAddress(const rtc::SocketAddress& stun_addr); void OnResolveResult(const rtc::SocketAddress& input, int error); + // Send a STUN binding request to the given address. Calling this method may + // cause the set of known server addresses to be modified, eg. by replacing an + // unresolved server address with a resolved address. void SendStunBindingRequest(const rtc::SocketAddress& stun_addr); // Below methods handles binding request responses. @@ -217,7 +225,7 @@ class UDPPort : public Port { void OnStunBindingOrResolveRequestFailed( const rtc::SocketAddress& stun_server_addr, int error_code, - const std::string& reason); + absl::string_view reason); // Sends STUN requests to the server. void OnSendPacket(const void* data, size_t size, StunRequest* req); @@ -240,7 +248,7 @@ class UDPPort : public Port { ServerAddresses server_addresses_; ServerAddresses bind_request_succeeded_servers_; ServerAddresses bind_request_failed_servers_; - StunRequestManager requests_; + StunRequestManager request_manager_; rtc::AsyncPacketSocket* socket_; int error_; int send_error_count_ = 0; @@ -264,25 +272,27 @@ class StunPort : public UDPPort { static std::unique_ptr Create( rtc::Thread* thread, rtc::PacketSocketFactory* factory, - rtc::Network* network, + const rtc::Network* network, uint16_t min_port, uint16_t max_port, - const std::string& username, - const std::string& password, + absl::string_view username, + absl::string_view password, const ServerAddresses& servers, - absl::optional stun_keepalive_interval); + absl::optional stun_keepalive_interval, + const webrtc::FieldTrialsView* field_trials); void PrepareAddress() override; protected: StunPort(rtc::Thread* thread, rtc::PacketSocketFactory* factory, - rtc::Network* network, + const rtc::Network* network, uint16_t min_port, uint16_t max_port, - const std::string& username, - const std::string& password, - const ServerAddresses& servers); + absl::string_view username, + absl::string_view password, + const ServerAddresses& servers, + const webrtc::FieldTrialsView* field_trials); }; } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/stun_request.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/stun_request.cc index 09a7a8345e..d15a3e65e2 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/stun_request.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/stun_request.cc @@ -12,18 +12,19 @@ #include #include +#include #include +#include "absl/memory/memory.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "rtc_base/checks.h" #include "rtc_base/helpers.h" #include "rtc_base/logging.h" #include "rtc_base/string_encode.h" #include "rtc_base/time_utils.h" // For TimeMillis -#include "system_wrappers/include/field_trial.h" namespace cricket { - -const uint32_t MSG_STUN_SEND = 1; +using ::webrtc::SafeTask; // RFC 5389 says SHOULD be 500ms. // For years, this was 100ms, but for networks that @@ -42,103 +43,105 @@ const int STUN_MAX_RETRANSMISSIONS = 8; // Total sends: 9 // work well. const int STUN_MAX_RTO = 8000; // milliseconds, or 5 doublings -StunRequestManager::StunRequestManager(rtc::Thread* thread) : thread_(thread) {} +StunRequestManager::StunRequestManager( + webrtc::TaskQueueBase* thread, + std::function send_packet) + : thread_(thread), send_packet_(std::move(send_packet)) {} -StunRequestManager::~StunRequestManager() { - while (requests_.begin() != requests_.end()) { - StunRequest* request = requests_.begin()->second; - requests_.erase(requests_.begin()); - delete request; - } -} +StunRequestManager::~StunRequestManager() = default; void StunRequestManager::Send(StunRequest* request) { SendDelayed(request, 0); } void StunRequestManager::SendDelayed(StunRequest* request, int delay) { - request->set_manager(this); - RTC_DCHECK(requests_.find(request->id()) == requests_.end()); - request->Construct(); - requests_[request->id()] = request; - if (delay > 0) { - thread_->PostDelayed(RTC_FROM_HERE, delay, request, MSG_STUN_SEND, NULL); - } else { - thread_->Send(RTC_FROM_HERE, request, MSG_STUN_SEND, NULL); - } + RTC_DCHECK_RUN_ON(thread_); + RTC_DCHECK_EQ(this, request->manager()); + auto [iter, was_inserted] = + requests_.emplace(request->id(), absl::WrapUnique(request)); + RTC_DCHECK(was_inserted); + request->Send(webrtc::TimeDelta::Millis(delay)); } -void StunRequestManager::Flush(int msg_type) { - for (const auto& kv : requests_) { - StunRequest* request = kv.second; - if (msg_type == kAllRequests || msg_type == request->type()) { - thread_->Clear(request, MSG_STUN_SEND); - thread_->Send(RTC_FROM_HERE, request, MSG_STUN_SEND, NULL); +void StunRequestManager::FlushForTest(int msg_type) { + RTC_DCHECK_RUN_ON(thread_); + for (const auto& [unused, request] : requests_) { + if (msg_type == kAllRequestsForTest || msg_type == request->type()) { + // Calling `Send` implies starting the send operation which may be posted + // on a timer and be repeated on a timer until timeout. To make sure that + // a call to `Send` doesn't conflict with a previously started `Send` + // operation, we reset the `task_safety_` flag here, which has the effect + // of canceling any outstanding tasks and prepare a new flag for + // operations related to this call to `Send`. + request->ResetTasksForTest(); + request->Send(webrtc::TimeDelta::Zero()); } } } -bool StunRequestManager::HasRequest(int msg_type) { - for (const auto& kv : requests_) { - StunRequest* request = kv.second; - if (msg_type == kAllRequests || msg_type == request->type()) { +bool StunRequestManager::HasRequestForTest(int msg_type) { + RTC_DCHECK_RUN_ON(thread_); + RTC_DCHECK_NE(msg_type, kAllRequestsForTest); + for (const auto& [unused, request] : requests_) { + if (msg_type == request->type()) { return true; } } return false; } -void StunRequestManager::Remove(StunRequest* request) { - RTC_DCHECK(request->manager() == this); - RequestMap::iterator iter = requests_.find(request->id()); - if (iter != requests_.end()) { - RTC_DCHECK(iter->second == request); - requests_.erase(iter); - thread_->Clear(request); - } -} - void StunRequestManager::Clear() { - std::vector requests; - for (RequestMap::iterator i = requests_.begin(); i != requests_.end(); ++i) - requests.push_back(i->second); - - for (uint32_t i = 0; i < requests.size(); ++i) { - // StunRequest destructor calls Remove() which deletes requests - // from `requests_`. - delete requests[i]; - } + RTC_DCHECK_RUN_ON(thread_); + requests_.clear(); } bool StunRequestManager::CheckResponse(StunMessage* msg) { + RTC_DCHECK_RUN_ON(thread_); RequestMap::iterator iter = requests_.find(msg->transaction_id()); - if (iter == requests_.end()) { - // TODO(pthatcher): Log unknown responses without being too spammy - // in the logs. + if (iter == requests_.end()) return false; - } - StunRequest* request = iter->second; + StunRequest* request = iter->second.get(); // Now that we know the request, we can see if the response is // integrity-protected or not. // For some tests, the message integrity is not set in the request. // Complain, and then don't check. - bool skip_integrity_checking = false; - if (request->msg()->integrity() == StunMessage::IntegrityStatus::kNotSet) { - skip_integrity_checking = true; + bool skip_integrity_checking = + (request->msg()->integrity() == StunMessage::IntegrityStatus::kNotSet); + if (skip_integrity_checking) { + // This indicates lazy test writing (not adding integrity attribute). + // Complain, but only in debug mode (while developing). + RTC_DLOG(LS_ERROR) + << "CheckResponse called on a passwordless request. Fix test!"; } else { - msg->ValidateMessageIntegrity(request->msg()->password()); + if (msg->integrity() == StunMessage::IntegrityStatus::kNotSet) { + // Checking status for the first time. Normal. + msg->ValidateMessageIntegrity(request->msg()->password()); + } else if (msg->integrity() == StunMessage::IntegrityStatus::kIntegrityOk && + msg->password() == request->msg()->password()) { + // Status is already checked, with the same password. This is the case + // we would want to see happen. + } else if (msg->integrity() == + StunMessage::IntegrityStatus::kIntegrityBad) { + // This indicates that the original check had the wrong password. + // Bad design, needs revisiting. + // TODO(crbug.com/1177125): Fix this. + msg->RevalidateMessageIntegrity(request->msg()->password()); + } else { + RTC_CHECK_NOTREACHED(); + } } + bool success = true; + if (!msg->GetNonComprehendedAttributes().empty()) { // If a response contains unknown comprehension-required attributes, it's // simply discarded and the transaction is considered failed. See RFC5389 // sections 7.3.3 and 7.3.4. RTC_LOG(LS_ERROR) << ": Discarding response due to unknown " "comprehension-required attribute."; - delete request; - return false; + success = false; } else if (msg->type() == GetStunSuccessResponseType(request->type())) { if (!msg->IntegrityOk() && !skip_integrity_checking) { return false; @@ -153,11 +156,17 @@ bool StunRequestManager::CheckResponse(StunMessage* msg) { return false; } - delete request; - return true; + requests_.erase(iter); + return success; +} + +bool StunRequestManager::empty() const { + RTC_DCHECK_RUN_ON(thread_); + return requests_.empty(); } bool StunRequestManager::CheckResponse(const char* data, size_t size) { + RTC_DCHECK_RUN_ON(thread_); // Check the appropriate bytes of the stream to see if they match the // transaction ID of a response we are expecting. @@ -168,11 +177,8 @@ bool StunRequestManager::CheckResponse(const char* data, size_t size) { id.append(data + kStunTransactionIdOffset, kStunTransactionIdLength); RequestMap::iterator iter = requests_.find(id); - if (iter == requests_.end()) { - // TODO(pthatcher): Log unknown responses without being too spammy - // in the logs. + if (iter == requests_.end()) return false; - } // Parse the STUN message and continue processing as usual. @@ -187,65 +193,59 @@ bool StunRequestManager::CheckResponse(const char* data, size_t size) { return CheckResponse(response.get()); } -StunRequest::StunRequest() - : count_(0), - timeout_(false), - manager_(0), - msg_(new StunMessage()), - tstamp_(0) { - msg_->SetTransactionID(rtc::CreateRandomString(kStunTransactionIdLength)); +void StunRequestManager::OnRequestTimedOut(StunRequest* request) { + RTC_DCHECK_RUN_ON(thread_); + requests_.erase(request->id()); } -StunRequest::StunRequest(StunMessage* request) - : count_(0), timeout_(false), manager_(0), msg_(request), tstamp_(0) { - msg_->SetTransactionID(rtc::CreateRandomString(kStunTransactionIdLength)); +void StunRequestManager::SendPacket(const void* data, + size_t size, + StunRequest* request) { + RTC_DCHECK_EQ(this, request->manager()); + send_packet_(data, size, request); } -StunRequest::~StunRequest() { - RTC_DCHECK(manager_ != NULL); - if (manager_) { - manager_->Remove(this); - manager_->thread_->Clear(this); - } - delete msg_; +StunRequest::StunRequest(StunRequestManager& manager) + : manager_(manager), + msg_(new StunMessage(STUN_INVALID_MESSAGE_TYPE)), + tstamp_(0), + count_(0), + timeout_(false) { + RTC_DCHECK_RUN_ON(network_thread()); } -void StunRequest::Construct() { - if (msg_->type() == 0) { - Prepare(msg_); - RTC_DCHECK(msg_->type() != 0); - } +StunRequest::StunRequest(StunRequestManager& manager, + std::unique_ptr message) + : manager_(manager), + msg_(std::move(message)), + tstamp_(0), + count_(0), + timeout_(false) { + RTC_DCHECK_RUN_ON(network_thread()); + RTC_DCHECK(!msg_->transaction_id().empty()); } +StunRequest::~StunRequest() {} + int StunRequest::type() { RTC_DCHECK(msg_ != NULL); return msg_->type(); } const StunMessage* StunRequest::msg() const { - return msg_; -} - -StunMessage* StunRequest::mutable_msg() { - return msg_; + return msg_.get(); } int StunRequest::Elapsed() const { + RTC_DCHECK_RUN_ON(network_thread()); return static_cast(rtc::TimeMillis() - tstamp_); } -void StunRequest::set_manager(StunRequestManager* manager) { - RTC_DCHECK(!manager_); - manager_ = manager; -} - -void StunRequest::OnMessage(rtc::Message* pmsg) { - RTC_DCHECK(manager_ != NULL); - RTC_DCHECK(pmsg->message_id == MSG_STUN_SEND); - +void StunRequest::SendInternal() { + RTC_DCHECK_RUN_ON(network_thread()); if (timeout_) { OnTimeout(); - delete this; + manager_.OnRequestTimedOut(this); return; } @@ -253,24 +253,47 @@ void StunRequest::OnMessage(rtc::Message* pmsg) { rtc::ByteBufferWriter buf; msg_->Write(&buf); - manager_->SignalSendPacket(buf.Data(), buf.Length(), this); + manager_.SendPacket(buf.Data(), buf.Length(), this); OnSent(); - manager_->thread_->PostDelayed(RTC_FROM_HERE, resend_delay(), this, - MSG_STUN_SEND, NULL); + SendDelayed(webrtc::TimeDelta::Millis(resend_delay())); +} + +void StunRequest::SendDelayed(webrtc::TimeDelta delay) { + network_thread()->PostDelayedTask( + SafeTask(task_safety_.flag(), [this]() { SendInternal(); }), delay); +} + +void StunRequest::Send(webrtc::TimeDelta delay) { + RTC_DCHECK_RUN_ON(network_thread()); + RTC_DCHECK_GE(delay.ms(), 0); + + RTC_DCHECK(!task_safety_.flag()->alive()) << "Send already called?"; + task_safety_.flag()->SetAlive(); + + delay.IsZero() ? SendInternal() : SendDelayed(delay); +} + +void StunRequest::ResetTasksForTest() { + RTC_DCHECK_RUN_ON(network_thread()); + task_safety_.reset(webrtc::PendingTaskSafetyFlag::CreateDetachedInactive()); + count_ = 0; + RTC_DCHECK(!timeout_); } void StunRequest::OnSent() { + RTC_DCHECK_RUN_ON(network_thread()); count_ += 1; int retransmissions = (count_ - 1); if (retransmissions >= STUN_MAX_RETRANSMISSIONS) { timeout_ = true; } - RTC_LOG(LS_VERBOSE) << "Sent STUN request " << count_ - << "; resend delay = " << resend_delay(); + RTC_DLOG(LS_VERBOSE) << "Sent STUN request " << count_ + << "; resend delay = " << resend_delay(); } int StunRequest::resend_delay() { + RTC_DCHECK_RUN_ON(network_thread()); if (count_ == 0) { return 0; } @@ -279,4 +302,9 @@ int StunRequest::resend_delay() { return std::min(rto, STUN_MAX_RTO); } +void StunRequest::set_timed_out() { + RTC_DCHECK_RUN_ON(network_thread()); + timeout_ = true; +} + } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/stun_request.h b/TMessagesProj/jni/voip/webrtc/p2p/base/stun_request.h index b417c705cd..6e83be3830 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/stun_request.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/stun_request.h @@ -14,19 +14,21 @@ #include #include +#include #include +#include #include +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/task_queue/task_queue_base.h" #include "api/transport/stun.h" -#include "rtc_base/message_handler.h" -#include "rtc_base/third_party/sigslot/sigslot.h" -#include "rtc_base/thread.h" +#include "api/units/time_delta.h" namespace cricket { class StunRequest; -const int kAllRequests = 0; +const int kAllRequestsForTest = 0; // Total max timeouts: 39.75 seconds // For years, this was 9.5 seconds, but for networks that experience moments of @@ -37,25 +39,27 @@ const int STUN_TOTAL_TIMEOUT = 39750; // milliseconds // response or determine that the request has timed out. class StunRequestManager { public: - explicit StunRequestManager(rtc::Thread* thread); + StunRequestManager( + webrtc::TaskQueueBase* thread, + std::function send_packet); ~StunRequestManager(); // Starts sending the given request (perhaps after a delay). void Send(StunRequest* request); void SendDelayed(StunRequest* request, int delay); - // If `msg_type` is kAllRequests, sends all pending requests right away. - // Otherwise, sends those that have a matching type right away. - // Only for testing. - void Flush(int msg_type); + // If `msg_type` is kAllRequestsForTest, sends all pending requests right + // away. Otherwise, sends those that have a matching type right away. Only for + // testing. + // TODO(tommi): Remove this method and update tests that use it to simulate + // production code. + void FlushForTest(int msg_type); // Returns true if at least one request with `msg_type` is scheduled for // transmission. For testing only. - bool HasRequest(int msg_type); - - // Removes a stun request that was added previously. This will happen - // automatically when a request succeeds, fails, or times out. - void Remove(StunRequest* request); + // TODO(tommi): Remove this method and update tests that use it to simulate + // production code. + bool HasRequestForTest(int msg_type); // Removes all stun requests that were added previously. void Clear(); @@ -65,36 +69,37 @@ class StunRequestManager { bool CheckResponse(StunMessage* msg); bool CheckResponse(const char* data, size_t size); - bool empty() { return requests_.empty(); } + // Called from a StunRequest when a timeout occurs. + void OnRequestTimedOut(StunRequest* request); - // Raised when there are bytes to be sent. - sigslot::signal3 SignalSendPacket; + bool empty() const; - private: - typedef std::map RequestMap; + webrtc::TaskQueueBase* network_thread() const { return thread_; } - rtc::Thread* const thread_; - RequestMap requests_; + void SendPacket(const void* data, size_t size, StunRequest* request); + + private: + typedef std::map> RequestMap; - friend class StunRequest; + webrtc::TaskQueueBase* const thread_; + RequestMap requests_ RTC_GUARDED_BY(thread_); + const std::function send_packet_; }; // Represents an individual request to be sent. The STUN message can either be // constructed beforehand or built on demand. -class StunRequest : public rtc::MessageHandler { +class StunRequest { public: - StunRequest(); - explicit StunRequest(StunMessage* request); - ~StunRequest() override; - - // Causes our wrapped StunMessage to be Prepared - void Construct(); + explicit StunRequest(StunRequestManager& manager); + StunRequest(StunRequestManager& manager, + std::unique_ptr message); + virtual ~StunRequest(); // The manager handling this request (if it has been scheduled for sending). - StunRequestManager* manager() { return manager_; } + StunRequestManager* manager() { return &manager_; } // Returns the transaction ID of this request. - const std::string& id() { return msg_->transaction_id(); } + const std::string& id() const { return msg_->transaction_id(); } // Returns the reduced transaction ID of this request. uint32_t reduced_transaction_id() const { @@ -107,19 +112,20 @@ class StunRequest : public rtc::MessageHandler { // Returns a const pointer to `msg_`. const StunMessage* msg() const; - // Returns a mutable pointer to `msg_`. - StunMessage* mutable_msg(); - // Time elapsed since last send (in ms) int Elapsed() const; protected: - int count_; - bool timeout_; + friend class StunRequestManager; - // Fills in a request object to be sent. Note that request's transaction ID - // will already be set and cannot be changed. - virtual void Prepare(StunMessage* request) {} + // Called by StunRequestManager. + void Send(webrtc::TimeDelta delay); + + // Called from FlushForTest. + // TODO(tommi): Remove when FlushForTest gets removed. + void ResetTasksForTest(); + + StunMessage* mutable_msg() { return msg_.get(); } // Called when the message receives a response or times out. virtual void OnResponse(StunMessage* response) {} @@ -127,20 +133,28 @@ class StunRequest : public rtc::MessageHandler { virtual void OnTimeout() {} // Called when the message is sent. virtual void OnSent(); - // Returns the next delay for resends. + // Returns the next delay for resends in milliseconds. virtual int resend_delay(); - private: - void set_manager(StunRequestManager* manager); - - // Handles messages for sending and timeout. - void OnMessage(rtc::Message* pmsg) override; + webrtc::TaskQueueBase* network_thread() const { + return manager_.network_thread(); + } - StunRequestManager* manager_; - StunMessage* msg_; - int64_t tstamp_; + void set_timed_out(); - friend class StunRequestManager; + private: + void SendInternal(); + // Calls `PostDelayedTask` to queue up a call to SendInternal after the + // specified timeout. + void SendDelayed(webrtc::TimeDelta delay); + + StunRequestManager& manager_; + const std::unique_ptr msg_; + int64_t tstamp_ RTC_GUARDED_BY(network_thread()); + int count_ RTC_GUARDED_BY(network_thread()); + bool timeout_ RTC_GUARDED_BY(network_thread()); + webrtc::ScopedTaskSafety task_safety_{ + webrtc::PendingTaskSafetyFlag::CreateDetachedInactive()}; }; } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/stun_server.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/stun_server.cc index 382b787951..7827a0bb81 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/stun_server.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/stun_server.cc @@ -10,8 +10,10 @@ #include "p2p/base/stun_server.h" +#include #include +#include "absl/strings/string_view.h" #include "rtc_base/byte_buffer.h" #include "rtc_base/logging.h" @@ -53,7 +55,7 @@ void StunServer::OnPacket(rtc::AsyncPacketSocket* socket, void StunServer::OnBindingRequest(StunMessage* msg, const rtc::SocketAddress& remote_addr) { - StunMessage response; + StunMessage response(STUN_BINDING_RESPONSE, msg->transaction_id()); GetStunBindResponse(msg, remote_addr, &response); SendResponse(response, remote_addr); } @@ -61,14 +63,13 @@ void StunServer::OnBindingRequest(StunMessage* msg, void StunServer::SendErrorResponse(const StunMessage& msg, const rtc::SocketAddress& addr, int error_code, - const char* error_desc) { - StunMessage err_msg; - err_msg.SetType(GetStunErrorResponseType(msg.type())); - err_msg.SetTransactionID(msg.transaction_id()); + absl::string_view error_desc) { + StunMessage err_msg(GetStunErrorResponseType(msg.type()), + msg.transaction_id()); auto err_code = StunAttribute::CreateErrorCode(); err_code->SetCode(error_code); - err_code->SetReason(error_desc); + err_code->SetReason(std::string(error_desc)); err_msg.AddAttribute(std::move(err_code)); SendResponse(err_msg, addr); @@ -83,15 +84,15 @@ void StunServer::SendResponse(const StunMessage& msg, RTC_LOG_ERR(LS_ERROR) << "sendto"; } -void StunServer::GetStunBindResponse(StunMessage* request, +void StunServer::GetStunBindResponse(StunMessage* message, const rtc::SocketAddress& remote_addr, StunMessage* response) const { - response->SetType(STUN_BINDING_RESPONSE); - response->SetTransactionID(request->transaction_id()); + RTC_DCHECK_EQ(response->type(), STUN_BINDING_RESPONSE); + RTC_DCHECK_EQ(response->transaction_id(), message->transaction_id()); - // Tell the user the address that we received their request from. + // Tell the user the address that we received their message from. std::unique_ptr mapped_addr; - if (request->IsLegacy()) { + if (message->IsLegacy()) { mapped_addr = StunAttribute::CreateAddress(STUN_ATTR_MAPPED_ADDRESS); } else { mapped_addr = StunAttribute::CreateXorAddress(STUN_ATTR_XOR_MAPPED_ADDRESS); diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/stun_server.h b/TMessagesProj/jni/voip/webrtc/p2p/base/stun_server.h index f2126db191..505773b052 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/stun_server.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/stun_server.h @@ -16,6 +16,7 @@ #include +#include "absl/strings/string_view.h" #include "api/transport/stun.h" #include "rtc_base/async_packet_socket.h" #include "rtc_base/async_udp_socket.h" @@ -52,13 +53,13 @@ class StunServer : public sigslot::has_slots<> { void SendErrorResponse(const StunMessage& msg, const rtc::SocketAddress& addr, int error_code, - const char* error_desc); + absl::string_view error_desc); // Sends the given message to the appropriate destination. void SendResponse(const StunMessage& msg, const rtc::SocketAddress& addr); // A helper method to compose a STUN binding response. - void GetStunBindResponse(StunMessage* request, + void GetStunBindResponse(StunMessage* message, const rtc::SocketAddress& remote_addr, StunMessage* response) const; diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/tcp_port.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/tcp_port.cc index 9d542074a4..fbda2999f9 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/tcp_port.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/tcp_port.cc @@ -68,29 +68,35 @@ #include +#include #include #include "absl/algorithm/container.h" #include "absl/memory/memory.h" +#include "absl/strings/string_view.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/units/time_delta.h" #include "p2p/base/p2p_constants.h" #include "rtc_base/checks.h" #include "rtc_base/ip_address.h" -#include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/net_helper.h" #include "rtc_base/rate_tracker.h" #include "rtc_base/third_party/sigslot/sigslot.h" namespace cricket { +using ::webrtc::SafeTask; +using ::webrtc::TimeDelta; TCPPort::TCPPort(rtc::Thread* thread, rtc::PacketSocketFactory* factory, - rtc::Network* network, + const rtc::Network* network, uint16_t min_port, uint16_t max_port, - const std::string& username, - const std::string& password, - bool allow_listen) + absl::string_view username, + absl::string_view password, + bool allow_listen, + const webrtc::FieldTrialsView* field_trials) : Port(thread, LOCAL_PORT_TYPE, factory, @@ -98,7 +104,8 @@ TCPPort::TCPPort(rtc::Thread* thread, min_port, max_port, username, - password), + password, + field_trials), allow_listen_(allow_listen), error_(0) { // TODO(mallinath) - Set preference value as per RFC 6544. @@ -154,11 +161,11 @@ Connection* TCPPort::CreateConnection(const Candidate& address, // so we need to hand off the "read packet" responsibility to // TCPConnection. socket->SignalReadPacket.disconnect(this); - conn = new TCPConnection(this, address, socket); + conn = new TCPConnection(NewWeakPtr(), address, socket); } else { // Outgoing connection, which will create a new socket for which we still // need to connect SignalReadyToSend and SignalSentPacket. - conn = new TCPConnection(this, address); + conn = new TCPConnection(NewWeakPtr(), address); if (conn->socket()) { conn->socket()->SignalReadyToSend.connect(this, &TCPPort::OnReadyToSend); conn->socket()->SignalSentPacket.connect(this, &TCPPort::OnSentPacket); @@ -264,7 +271,7 @@ int TCPPort::GetError() { return error_; } -bool TCPPort::SupportsProtocol(const std::string& protocol) const { +bool TCPPort::SupportsProtocol(absl::string_view protocol) const { return protocol == TCP_PROTOCOL_NAME || protocol == SSLTCP_PROTOCOL_NAME; } @@ -274,7 +281,7 @@ ProtocolType TCPPort::GetProtocol() const { void TCPPort::OnNewConnection(rtc::AsyncListenSocket* socket, rtc::AsyncPacketSocket* new_socket) { - RTC_DCHECK(socket == listen_socket_.get()); + RTC_DCHECK_EQ(socket, listen_socket_.get()); for (const auto& option : socket_options_) { new_socket->SetOption(option.first, option.second); @@ -340,16 +347,17 @@ void TCPPort::OnReadyToSend(rtc::AsyncPacketSocket* socket) { // `ice_unwritable_timeout` in IceConfig when determining the writability state. // Replace this constant with the config parameter assuming the default value if // we decide it is also applicable here. -TCPConnection::TCPConnection(TCPPort* port, +TCPConnection::TCPConnection(rtc::WeakPtr tcp_port, const Candidate& candidate, rtc::AsyncPacketSocket* socket) - : Connection(port, 0, candidate), + : Connection(std::move(tcp_port), 0, candidate), socket_(socket), error_(0), outgoing_(socket == NULL), connection_pending_(false), pretending_to_be_writable_(false), reconnection_timeout_(cricket::CONNECTION_WRITE_CONNECT_TIMEOUT) { + RTC_DCHECK_EQ(port()->GetProtocol(), PROTO_TCP); // Needs to be TCPPort. if (outgoing_) { CreateOutgoingTcpSocket(); } else { @@ -357,7 +365,7 @@ TCPConnection::TCPConnection(TCPPort* port, // what's being checked in OnConnect, but just DCHECKing here. RTC_LOG(LS_VERBOSE) << ToString() << ": socket ipaddr: " << socket_->GetLocalAddress().ToSensitiveString() - << ", port() Network:" << port->Network()->ToString(); + << ", port() Network:" << port()->Network()->ToString(); RTC_DCHECK(absl::c_any_of( port_->Network()->GetIPs(), [this](const rtc::InterfaceAddress& addr) { return socket_->GetLocalAddress().ipaddr() == addr; @@ -366,7 +374,9 @@ TCPConnection::TCPConnection(TCPPort* port, } } -TCPConnection::~TCPConnection() {} +TCPConnection::~TCPConnection() { + RTC_DCHECK_RUN_ON(network_thread_); +} int TCPConnection::Send(const void* data, size_t size, @@ -394,7 +404,7 @@ int TCPConnection::Send(const void* data, } stats_.sent_total_packets++; rtc::PacketOptions modified_options(options); - static_cast(port_)->CopyPortInformationToPacketInfo( + tcp_port()->CopyPortInformationToPacketInfo( &modified_options.info_signaled_after_sent); int sent = socket_->Send(data, size, modified_options); int64_t now = rtc::TimeMillis(); @@ -412,7 +422,7 @@ int TCPConnection::GetError() { return error_; } -void TCPConnection::OnConnectionRequestResponse(ConnectionRequest* req, +void TCPConnection::OnConnectionRequestResponse(StunRequest* req, StunMessage* response) { // Process the STUN response before we inform upper layer ready to send. Connection::OnConnectionRequestResponse(req, response); @@ -428,7 +438,13 @@ void TCPConnection::OnConnectionRequestResponse(ConnectionRequest* req, } void TCPConnection::OnConnect(rtc::AsyncPacketSocket* socket) { - RTC_DCHECK(socket == socket_.get()); + RTC_DCHECK_EQ(socket, socket_.get()); + + if (!port_) { + RTC_LOG(LS_ERROR) << "TCPConnection: Port has been deleted."; + return; + } + // Do not use this port if the socket bound to an address not associated with // the desired network interface. This is seen in Chrome, where TCP sockets // cannot be given a binding address, and the platform is expected to pick @@ -481,9 +497,14 @@ void TCPConnection::OnConnect(rtc::AsyncPacketSocket* socket) { } void TCPConnection::OnClose(rtc::AsyncPacketSocket* socket, int error) { - RTC_DCHECK(socket == socket_.get()); + RTC_DCHECK_EQ(socket, socket_.get()); RTC_LOG(LS_INFO) << ToString() << ": Connection closed with error " << error; + if (!port_) { + RTC_LOG(LS_ERROR) << "TCPConnection: Port has been deleted."; + return; + } + // Guard against the condition where IPC socket will call OnClose for every // packet it can't send. if (connected()) { @@ -493,35 +514,27 @@ void TCPConnection::OnClose(rtc::AsyncPacketSocket* socket, int error) { // events. pretending_to_be_writable_ = true; + // If this connection can't become connected and writable again in 5 + // seconds, it's time to tear this down. This is the case for the original + // TCP connection on passive side during a reconnect. // We don't attempt reconnect right here. This is to avoid a case where the // shutdown is intentional and reconnect is not necessary. We only reconnect // when the connection is used to Send() or Ping(). - port()->thread()->PostDelayed(RTC_FROM_HERE, reconnection_timeout(), this, - MSG_TCPCONNECTION_DELAYED_ONCLOSE); + network_thread()->PostDelayedTask( + SafeTask(network_safety_.flag(), + [this]() { + if (pretending_to_be_writable_) { + Destroy(); + } + }), + TimeDelta::Millis(reconnection_timeout())); } else if (!pretending_to_be_writable_) { // OnClose could be called when the underneath socket times out during the // initial connect() (i.e. `pretending_to_be_writable_` is false) . We have // to manually destroy here as this connection, as never connected, will not // be scheduled for ping to trigger destroy. - Destroy(); - } -} - -void TCPConnection::OnMessage(rtc::Message* pmsg) { - switch (pmsg->message_id) { - case MSG_TCPCONNECTION_DELAYED_ONCLOSE: - // If this connection can't become connected and writable again in 5 - // seconds, it's time to tear this down. This is the case for the original - // TCP connection on passive side during a reconnect. - if (pretending_to_be_writable_) { - Destroy(); - } - break; - case MSG_TCPCONNECTION_FAILED_CREATE_SOCKET: - FailAndPrune(); - break; - default: - Connection::OnMessage(pmsg); + socket_->UnsubscribeClose(this); + port()->DestroyConnectionAsync(this); } } @@ -545,12 +558,12 @@ void TCPConnection::OnReadPacket(rtc::AsyncPacketSocket* socket, size_t size, const rtc::SocketAddress& remote_addr, const int64_t& packet_time_us) { - RTC_DCHECK(socket == socket_.get()); + RTC_DCHECK_EQ(socket, socket_.get()); Connection::OnReadPacket(data, size, packet_time_us); } void TCPConnection::OnReadyToSend(rtc::AsyncPacketSocket* socket) { - RTC_DCHECK(socket == socket_.get()); + RTC_DCHECK_EQ(socket, socket_.get()); Connection::OnReadyToSend(); } @@ -559,6 +572,11 @@ void TCPConnection::CreateOutgoingTcpSocket() { int opts = (remote_candidate().protocol() == SSLTCP_PROTOCOL_NAME) ? rtc::PacketSocketFactory::OPT_TLS_FAKE : 0; + + if (socket_) { + socket_->UnsubscribeClose(this); + } + rtc::PacketSocketTcpOptions tcp_opts; tcp_opts.opts = opts; socket_.reset(port()->socket_factory()->CreateClientTcpSocket( @@ -576,13 +594,13 @@ void TCPConnection::CreateOutgoingTcpSocket() { } else { RTC_LOG(LS_WARNING) << ToString() << ": Failed to create connection to " << remote_candidate().address().ToSensitiveString(); + set_state(IceCandidatePairState::FAILED); // We can't FailAndPrune directly here. FailAndPrune and deletes all // the StunRequests from the request_map_. And if this is in the stack // of Connection::Ping(), we are still using the request. // Unwind the stack and defer the FailAndPrune. - set_state(IceCandidatePairState::FAILED); - port()->thread()->Post(RTC_FROM_HERE, this, - MSG_TCPCONNECTION_FAILED_CREATE_SOCKET); + network_thread()->PostTask( + SafeTask(network_safety_.flag(), [this]() { FailAndPrune(); })); } } @@ -592,7 +610,11 @@ void TCPConnection::ConnectSocketSignals(rtc::AsyncPacketSocket* socket) { } socket->SignalReadPacket.connect(this, &TCPConnection::OnReadPacket); socket->SignalReadyToSend.connect(this, &TCPConnection::OnReadyToSend); - socket->SignalClose.connect(this, &TCPConnection::OnClose); + socket->SubscribeClose(this, [this, safety = network_safety_.flag()]( + rtc::AsyncPacketSocket* s, int err) { + if (safety->alive()) + OnClose(s, err); + }); } } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/tcp_port.h b/TMessagesProj/jni/voip/webrtc/p2p/base/tcp_port.h index 932af50aa4..ff69e6e48b 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/tcp_port.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/tcp_port.h @@ -16,6 +16,8 @@ #include #include "absl/memory/memory.h" +#include "absl/strings/string_view.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "p2p/base/connection.h" #include "p2p/base/port.h" #include "rtc_base/async_packet_socket.h" @@ -33,18 +35,20 @@ class TCPConnection; // call this TCPPort::OnReadPacket (3 arg) to dispatch to a connection. class TCPPort : public Port { public: - static std::unique_ptr Create(rtc::Thread* thread, - rtc::PacketSocketFactory* factory, - rtc::Network* network, - uint16_t min_port, - uint16_t max_port, - const std::string& username, - const std::string& password, - bool allow_listen) { + static std::unique_ptr Create( + rtc::Thread* thread, + rtc::PacketSocketFactory* factory, + const rtc::Network* network, + uint16_t min_port, + uint16_t max_port, + absl::string_view username, + absl::string_view password, + bool allow_listen, + const webrtc::FieldTrialsView* field_trials = nullptr) { // Using `new` to access a non-public constructor. return absl::WrapUnique(new TCPPort(thread, factory, network, min_port, max_port, username, password, - allow_listen)); + allow_listen, field_trials)); } ~TCPPort() override; @@ -59,18 +63,19 @@ class TCPPort : public Port { int GetOption(rtc::Socket::Option opt, int* value) override; int SetOption(rtc::Socket::Option opt, int value) override; int GetError() override; - bool SupportsProtocol(const std::string& protocol) const override; + bool SupportsProtocol(absl::string_view protocol) const override; ProtocolType GetProtocol() const override; protected: TCPPort(rtc::Thread* thread, rtc::PacketSocketFactory* factory, - rtc::Network* network, + const rtc::Network* network, uint16_t min_port, uint16_t max_port, - const std::string& username, - const std::string& password, - bool allow_listen); + absl::string_view username, + absl::string_view password, + bool allow_listen, + const webrtc::FieldTrialsView* field_trials); // Handles sending using the local TCP socket. int SendTo(const void* data, @@ -120,12 +125,12 @@ class TCPPort : public Port { friend class TCPConnection; }; -class TCPConnection : public Connection { +class TCPConnection : public Connection, public sigslot::has_slots<> { public: // Connection is outgoing unless socket is specified - TCPConnection(TCPPort* port, + TCPConnection(rtc::WeakPtr tcp_port, const Candidate& candidate, - rtc::AsyncPacketSocket* socket = 0); + rtc::AsyncPacketSocket* socket = nullptr); ~TCPConnection() override; int Send(const void* data, @@ -135,8 +140,6 @@ class TCPConnection : public Connection { rtc::AsyncPacketSocket* socket() { return socket_.get(); } - void OnMessage(rtc::Message* pmsg) override; - // Allow test cases to overwrite the default timeout period. int reconnection_timeout() const { return reconnection_timeout_; } void set_reconnection_timeout(int timeout_in_ms) { @@ -144,14 +147,9 @@ class TCPConnection : public Connection { } protected: - enum { - MSG_TCPCONNECTION_DELAYED_ONCLOSE = Connection::MSG_FIRST_AVAILABLE, - MSG_TCPCONNECTION_FAILED_CREATE_SOCKET, - }; - // Set waiting_for_stun_binding_complete_ to false to allow data packets in // addition to what Port::OnConnectionRequestResponse does. - void OnConnectionRequestResponse(ConnectionRequest* req, + void OnConnectionRequestResponse(StunRequest* req, StunMessage* response) override; private: @@ -172,6 +170,11 @@ class TCPConnection : public Connection { const int64_t& packet_time_us); void OnReadyToSend(rtc::AsyncPacketSocket* socket); + TCPPort* tcp_port() { + RTC_DCHECK_EQ(port()->GetProtocol(), PROTO_TCP); + return static_cast(port()); + } + std::unique_ptr socket_; int error_; bool outgoing_; @@ -190,6 +193,8 @@ class TCPConnection : public Connection { // Allow test case to overwrite the default timeout period. int reconnection_timeout_; + webrtc::ScopedTaskSafety network_safety_; + friend class TCPPort; }; diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/test_stun_server.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/test_stun_server.cc index 3bd793cfbb..d4c3b2d851 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/test_stun_server.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/test_stun_server.cc @@ -28,7 +28,7 @@ void TestStunServer::OnBindingRequest(StunMessage* msg, if (fake_stun_addr_.IsNil()) { StunServer::OnBindingRequest(msg, remote_addr); } else { - StunMessage response; + StunMessage response(STUN_BINDING_RESPONSE, msg->transaction_id()); GetStunBindResponse(msg, fake_stun_addr_, &response); SendResponse(response, remote_addr); } diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/test_turn_server.h b/TMessagesProj/jni/voip/webrtc/p2p/base/test_turn_server.h index 7110a8a5a0..4070372db2 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/test_turn_server.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/test_turn_server.h @@ -16,6 +16,7 @@ #include #include +#include "absl/strings/string_view.h" #include "api/sequence_checker.h" #include "api/transport/stun.h" #include "p2p/base/basic_packet_socket_factory.h" @@ -58,7 +59,7 @@ class TestTurnServer : public TurnAuthInterface { const rtc::SocketAddress& udp_ext_addr, ProtocolType int_protocol = PROTO_UDP, bool ignore_bad_cert = true, - const std::string& common_name = "test turn server") + absl::string_view common_name = "test turn server") : server_(thread), socket_factory_(socket_factory) { AddInternalSocket(int_addr, int_protocol, ignore_bad_cert, common_name); server_.SetExternalSocketFactory( @@ -93,7 +94,7 @@ class TestTurnServer : public TurnAuthInterface { void AddInternalSocket(const rtc::SocketAddress& int_addr, ProtocolType proto, bool ignore_bad_cert = true, - const std::string& common_name = "test turn server") { + absl::string_view common_name = "test turn server") { RTC_DCHECK(thread_checker_.IsCurrent()); if (proto == cricket::PROTO_UDP) { server_.AddInternalSocket( @@ -142,11 +143,12 @@ class TestTurnServer : public TurnAuthInterface { private: // For this test server, succeed if the password is the same as the username. // Obviously, do not use this in a production environment. - virtual bool GetKey(const std::string& username, - const std::string& realm, + virtual bool GetKey(absl::string_view username, + absl::string_view realm, std::string* key) { RTC_DCHECK(thread_checker_.IsCurrent()); - return ComputeStunCredentialHash(username, realm, username, key); + return ComputeStunCredentialHash(std::string(username), std::string(realm), + std::string(username), key); } TurnServer server_; diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/transport_description.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/transport_description.cc index 96fb9597e0..f3b1fbb6ea 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/transport_description.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/transport_description.cc @@ -12,6 +12,7 @@ #include "absl/strings/ascii.h" #include "absl/strings/match.h" +#include "absl/strings/string_view.h" #include "p2p/base/p2p_constants.h" #include "rtc_base/arraysize.h" #include "rtc_base/logging.h" @@ -108,18 +109,18 @@ RTCError IceParameters::Validate() const { return RTCError::OK(); } -bool StringToConnectionRole(const std::string& role_str, ConnectionRole* role) { +absl::optional StringToConnectionRole( + absl::string_view role_str) { const char* const roles[] = { CONNECTIONROLE_ACTIVE_STR, CONNECTIONROLE_PASSIVE_STR, CONNECTIONROLE_ACTPASS_STR, CONNECTIONROLE_HOLDCONN_STR}; for (size_t i = 0; i < arraysize(roles); ++i) { if (absl::EqualsIgnoreCase(roles[i], role_str)) { - *role = static_cast(CONNECTIONROLE_ACTIVE + i); - return true; + return static_cast(CONNECTIONROLE_ACTIVE + i); } } - return false; + return absl::nullopt; } bool ConnectionRoleToString(const ConnectionRole& role, std::string* role_str) { @@ -147,8 +148,8 @@ TransportDescription::TransportDescription() TransportDescription::TransportDescription( const std::vector& transport_options, - const std::string& ice_ufrag, - const std::string& ice_pwd, + absl::string_view ice_ufrag, + absl::string_view ice_pwd, IceMode ice_mode, ConnectionRole role, const rtc::SSLFingerprint* identity_fingerprint) @@ -159,8 +160,8 @@ TransportDescription::TransportDescription( connection_role(role), identity_fingerprint(CopyFingerprint(identity_fingerprint)) {} -TransportDescription::TransportDescription(const std::string& ice_ufrag, - const std::string& ice_pwd) +TransportDescription::TransportDescription(absl::string_view ice_ufrag, + absl::string_view ice_pwd) : ice_ufrag(ice_ufrag), ice_pwd(ice_pwd), ice_mode(ICEMODE_FULL), diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/transport_description.h b/TMessagesProj/jni/voip/webrtc/p2p/base/transport_description.h index 32fdb5c9b3..7d28ad52e9 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/transport_description.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/transport_description.h @@ -16,6 +16,7 @@ #include #include "absl/algorithm/container.h" +#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/rtc_error.h" #include "p2p/base/p2p_constants.h" @@ -71,8 +72,8 @@ struct IceParameters { std::string pwd; bool renomination = false; IceParameters() = default; - IceParameters(const std::string& ice_ufrag, - const std::string& ice_pwd, + IceParameters(absl::string_view ice_ufrag, + absl::string_view ice_pwd, bool ice_renomination) : ufrag(ice_ufrag), pwd(ice_pwd), renomination(ice_renomination) {} @@ -97,30 +98,30 @@ extern const char CONNECTIONROLE_HOLDCONN_STR[]; constexpr auto* ICE_OPTION_TRICKLE = "trickle"; constexpr auto* ICE_OPTION_RENOMINATION = "renomination"; -bool StringToConnectionRole(const std::string& role_str, ConnectionRole* role); +absl::optional StringToConnectionRole( + absl::string_view role_str); bool ConnectionRoleToString(const ConnectionRole& role, std::string* role_str); struct TransportDescription { TransportDescription(); TransportDescription(const std::vector& transport_options, - const std::string& ice_ufrag, - const std::string& ice_pwd, + absl::string_view ice_ufrag, + absl::string_view ice_pwd, IceMode ice_mode, ConnectionRole role, const rtc::SSLFingerprint* identity_fingerprint); - TransportDescription(const std::string& ice_ufrag, - const std::string& ice_pwd); + TransportDescription(absl::string_view ice_ufrag, absl::string_view ice_pwd); TransportDescription(const TransportDescription& from); ~TransportDescription(); TransportDescription& operator=(const TransportDescription& from); // TODO(deadbeef): Rename to HasIceOption, etc. - bool HasOption(const std::string& option) const { + bool HasOption(absl::string_view option) const { return absl::c_linear_search(transport_options, option); } - void AddOption(const std::string& option) { - transport_options.push_back(option); + void AddOption(absl::string_view option) { + transport_options.emplace_back(option); } bool secure() const { return identity_fingerprint != nullptr; } diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/transport_description_factory.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/transport_description_factory.cc index e46114ed83..7eb21da166 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/transport_description_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/transport_description_factory.cc @@ -21,8 +21,9 @@ namespace cricket { -TransportDescriptionFactory::TransportDescriptionFactory() - : secure_(SEC_DISABLED) {} +TransportDescriptionFactory::TransportDescriptionFactory( + const webrtc::FieldTrialsView& field_trials) + : secure_(SEC_DISABLED), field_trials_(field_trials) {} TransportDescriptionFactory::~TransportDescriptionFactory() = default; diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/transport_description_factory.h b/TMessagesProj/jni/voip/webrtc/p2p/base/transport_description_factory.h index 0be7f32929..11352f88b4 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/transport_description_factory.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/transport_description_factory.h @@ -12,7 +12,9 @@ #define P2P_BASE_TRANSPORT_DESCRIPTION_FACTORY_H_ #include +#include +#include "api/field_trials_view.h" #include "p2p/base/ice_credentials_iterator.h" #include "p2p/base/transport_description.h" #include "rtc_base/rtc_certificate.h" @@ -37,7 +39,8 @@ struct TransportOptions { class TransportDescriptionFactory { public: // Default ctor; use methods below to set configuration. - TransportDescriptionFactory(); + explicit TransportDescriptionFactory( + const webrtc::FieldTrialsView& field_trials); ~TransportDescriptionFactory(); SecurePolicy secure() const { return secure_; } @@ -49,9 +52,8 @@ class TransportDescriptionFactory { // Specifies the transport security policy to use. void set_secure(SecurePolicy s) { secure_ = s; } // Specifies the certificate to use (only used when secure != SEC_DISABLED). - void set_certificate( - const rtc::scoped_refptr& certificate) { - certificate_ = certificate; + void set_certificate(rtc::scoped_refptr certificate) { + certificate_ = std::move(certificate); } // Creates a transport description suitable for use in an offer. @@ -73,12 +75,15 @@ class TransportDescriptionFactory { const TransportDescription* current_description, IceCredentialsIterator* ice_credentials) const; + const webrtc::FieldTrialsView& trials() const { return field_trials_; } + private: bool SetSecurityInfo(TransportDescription* description, ConnectionRole role) const; SecurePolicy secure_; rtc::scoped_refptr certificate_; + const webrtc::FieldTrialsView& field_trials_; }; } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/turn_port.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/turn_port.cc index 07c1060432..970d6adec6 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/turn_port.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/turn_port.cc @@ -17,7 +17,9 @@ #include "absl/algorithm/container.h" #include "absl/strings/match.h" +#include "absl/strings/string_view.h" #include "absl/types/optional.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "api/transport/stun.h" #include "p2p/base/connection.h" #include "p2p/base/p2p_constants.h" @@ -28,24 +30,26 @@ #include "rtc_base/net_helpers.h" #include "rtc_base/socket_address.h" #include "rtc_base/strings/string_builder.h" -#include "rtc_base/task_utils/to_queued_task.h" -#include "system_wrappers/include/field_trial.h" namespace cricket { +using ::webrtc::SafeTask; +using ::webrtc::TaskQueueBase; +using ::webrtc::TimeDelta; + // TODO(juberti): Move to stun.h when relay messages have been renamed. static const int TURN_ALLOCATE_REQUEST = STUN_ALLOCATE_REQUEST; // Attributes in comprehension-optional range, // ignored by TURN server that doesn't know about them. // https://tools.ietf.org/html/rfc5389#section-18.2 -static const int STUN_ATTR_MULTI_MAPPING = 0xff04; const int STUN_ATTR_TURN_LOGGING_ID = 0xff05; // TODO(juberti): Extract to turnmessage.h static const int TURN_DEFAULT_PORT = 3478; static const int TURN_CHANNEL_NUMBER_START = 0x4000; -static const int TURN_PERMISSION_TIMEOUT = 5 * 60 * 1000; // 5 minutes + +static constexpr TimeDelta kTurnPermissionTimeout = TimeDelta::Minutes(5); static const size_t TURN_CHANNEL_HEADER_SIZE = 4U; @@ -74,7 +78,6 @@ static int GetRelayPreference(cricket::ProtocolType proto) { class TurnAllocateRequest : public StunRequest { public: explicit TurnAllocateRequest(TurnPort* port); - void Prepare(StunMessage* request) override; void OnSent() override; void OnResponse(StunMessage* response) override; void OnErrorResponse(StunMessage* response) override; @@ -91,56 +94,46 @@ class TurnAllocateRequest : public StunRequest { class TurnRefreshRequest : public StunRequest { public: - explicit TurnRefreshRequest(TurnPort* port); - void Prepare(StunMessage* request) override; + explicit TurnRefreshRequest(TurnPort* port, int lifetime = -1); void OnSent() override; void OnResponse(StunMessage* response) override; void OnErrorResponse(StunMessage* response) override; void OnTimeout() override; - void set_lifetime(int lifetime) { lifetime_ = lifetime; } private: TurnPort* port_; - int lifetime_; }; -class TurnCreatePermissionRequest : public StunRequest, - public sigslot::has_slots<> { +class TurnCreatePermissionRequest : public StunRequest { public: TurnCreatePermissionRequest(TurnPort* port, TurnEntry* entry, - const rtc::SocketAddress& ext_addr, - const std::string& remote_ufrag); - void Prepare(StunMessage* request) override; + const rtc::SocketAddress& ext_addr); + ~TurnCreatePermissionRequest() override; void OnSent() override; void OnResponse(StunMessage* response) override; void OnErrorResponse(StunMessage* response) override; void OnTimeout() override; private: - void OnEntryDestroyed(TurnEntry* entry); - TurnPort* port_; TurnEntry* entry_; rtc::SocketAddress ext_addr_; - std::string remote_ufrag_; }; -class TurnChannelBindRequest : public StunRequest, public sigslot::has_slots<> { +class TurnChannelBindRequest : public StunRequest { public: TurnChannelBindRequest(TurnPort* port, TurnEntry* entry, int channel_id, const rtc::SocketAddress& ext_addr); - void Prepare(StunMessage* request) override; + ~TurnChannelBindRequest() override; void OnSent() override; void OnResponse(StunMessage* response) override; void OnErrorResponse(StunMessage* response) override; void OnTimeout() override; private: - void OnEntryDestroyed(TurnEntry* entry); - TurnPort* port_; TurnEntry* entry_; int channel_id_; @@ -152,10 +145,8 @@ class TurnChannelBindRequest : public StunRequest, public sigslot::has_slots<> { class TurnEntry : public sigslot::has_slots<> { public: enum BindState { STATE_UNBOUND, STATE_BINDING, STATE_BOUND }; - TurnEntry(TurnPort* port, - int channel_id, - const rtc::SocketAddress& ext_addr, - const std::string remote_ufrag); + TurnEntry(TurnPort* port, Connection* conn, int channel_id); + ~TurnEntry(); TurnPort* port() { return port_; } @@ -166,15 +157,20 @@ class TurnEntry : public sigslot::has_slots<> { const rtc::SocketAddress& address() const { return ext_addr_; } BindState state() const { return state_; } - // If the destruction timestamp is set, that means destruction has been - // scheduled (will occur TURN_PERMISSION_TIMEOUT after it's scheduled). - absl::optional destruction_timestamp() { - return destruction_timestamp_; - } - void set_destruction_timestamp(int64_t destruction_timestamp) { - destruction_timestamp_.emplace(destruction_timestamp); - } - void reset_destruction_timestamp() { destruction_timestamp_.reset(); } + // Adds a new connection object to the list of connections that are associated + // with this entry. If prior to this call there were no connections being + // tracked (i.e. count goes from 0 -> 1), the internal safety flag is reset + // which cancels any potential pending deletion tasks. + void TrackConnection(Connection* conn); + + // Removes a connection from the list of tracked connections. + // * If `conn` was the last connection removed, the function returns a + // safety flag that's used to schedule the deletion of the entry after a + // timeout expires. If during this timeout `TrackConnection` is called, the + // flag will be reset and pending tasks associated with it, cancelled. + // * If `conn` was not the last connection, the return value will be nullptr. + rtc::scoped_refptr UntrackConnection( + Connection* conn); // Helper methods to send permission and channel bind requests. void SendCreatePermissionRequest(int delay); @@ -193,67 +189,75 @@ class TurnEntry : public sigslot::has_slots<> { void OnChannelBindError(StunMessage* response, int code); void OnChannelBindTimeout(); // Signal sent when TurnEntry is destroyed. - sigslot::signal1 SignalDestroyed; - - const std::string& get_remote_ufrag() const { return remote_ufrag_; } - void set_remote_ufrag(const std::string& remote_ufrag) { - remote_ufrag_ = remote_ufrag; - } + webrtc::CallbackList destroyed_callback_list_; private: TurnPort* port_; int channel_id_; rtc::SocketAddress ext_addr_; BindState state_; - // An unset value indicates that this entry is scheduled to be destroyed. It - // is also used as an ID of the event scheduling. When the destruction event - // actually fires, the TurnEntry will be destroyed only if the timestamp here - // matches the one in the firing event. - absl::optional destruction_timestamp_; - - std::string remote_ufrag_; + // List of associated connection instances to keep track of how many and + // which connections are associated with this entry. Once this is empty, + // the entry can be deleted. + std::vector connections_; + webrtc::ScopedTaskSafety task_safety_; }; -TurnPort::TurnPort(rtc::Thread* thread, +TurnPort::TurnPort(TaskQueueBase* thread, rtc::PacketSocketFactory* factory, - rtc::Network* network, + const rtc::Network* network, rtc::AsyncPacketSocket* socket, - const std::string& username, - const std::string& password, + absl::string_view username, + absl::string_view password, const ProtocolAddress& server_address, const RelayCredentials& credentials, int server_priority, - webrtc::TurnCustomizer* customizer) - : Port(thread, RELAY_PORT_TYPE, factory, network, username, password), + const std::vector& tls_alpn_protocols, + const std::vector& tls_elliptic_curves, + webrtc::TurnCustomizer* customizer, + rtc::SSLCertificateVerifier* tls_cert_verifier, + const webrtc::FieldTrialsView* field_trials) + : Port(thread, + RELAY_PORT_TYPE, + factory, + network, + username, + password, + field_trials), server_address_(server_address), - tls_cert_verifier_(nullptr), + tls_alpn_protocols_(tls_alpn_protocols), + tls_elliptic_curves_(tls_elliptic_curves), + tls_cert_verifier_(tls_cert_verifier), credentials_(credentials), socket_(socket), error_(0), stun_dscp_value_(rtc::DSCP_NO_CHANGE), - request_manager_(thread), + request_manager_( + thread, + [this](const void* data, size_t size, StunRequest* request) { + OnSendStunPacket(data, size, request); + }), next_channel_number_(TURN_CHANNEL_NUMBER_START), state_(STATE_CONNECTING), server_priority_(server_priority), allocate_mismatch_retries_(0), - turn_customizer_(customizer) { - request_manager_.SignalSendPacket.connect(this, &TurnPort::OnSendStunPacket); -} + turn_customizer_(customizer) {} -TurnPort::TurnPort(rtc::Thread* thread, +TurnPort::TurnPort(TaskQueueBase* thread, rtc::PacketSocketFactory* factory, - rtc::Network* network, + const rtc::Network* network, uint16_t min_port, uint16_t max_port, - const std::string& username, - const std::string& password, + absl::string_view username, + absl::string_view password, const ProtocolAddress& server_address, const RelayCredentials& credentials, int server_priority, const std::vector& tls_alpn_protocols, const std::vector& tls_elliptic_curves, webrtc::TurnCustomizer* customizer, - rtc::SSLCertificateVerifier* tls_cert_verifier) + rtc::SSLCertificateVerifier* tls_cert_verifier, + const webrtc::FieldTrialsView* field_trials) : Port(thread, RELAY_PORT_TYPE, factory, @@ -261,23 +265,26 @@ TurnPort::TurnPort(rtc::Thread* thread, min_port, max_port, username, - password), + password, + field_trials), server_address_(server_address), tls_alpn_protocols_(tls_alpn_protocols), tls_elliptic_curves_(tls_elliptic_curves), tls_cert_verifier_(tls_cert_verifier), credentials_(credentials), - socket_(NULL), + socket_(nullptr), error_(0), stun_dscp_value_(rtc::DSCP_NO_CHANGE), - request_manager_(thread), + request_manager_( + thread, + [this](const void* data, size_t size, StunRequest* request) { + OnSendStunPacket(data, size, request); + }), next_channel_number_(TURN_CHANNEL_NUMBER_START), state_(STATE_CONNECTING), server_priority_(server_priority), allocate_mismatch_retries_(0), - turn_customizer_(customizer) { - request_manager_.SignalSendPacket.connect(this, &TurnPort::OnSendStunPacket); -} + turn_customizer_(customizer) {} TurnPort::~TurnPort() { // TODO(juberti): Should this even be necessary? @@ -288,9 +295,11 @@ TurnPort::~TurnPort() { Release(); } - while (!entries_.empty()) { - DestroyEntry(entries_.front()); - } + entries_.clear(); + + if (socket_) + socket_->UnsubscribeClose(this); + if (!SharedSocket()) { delete socket_; } @@ -312,8 +321,8 @@ void TurnPort::SetTlsCertPolicy(TlsCertPolicy tls_cert_policy) { tls_cert_policy_ = tls_cert_policy; } -void TurnPort::SetTurnLoggingId(const std::string& turn_logging_id) { - turn_logging_id_ = turn_logging_id; +void TurnPort::SetTurnLoggingId(absl::string_view turn_logging_id) { + turn_logging_id_ = std::string(turn_logging_id); } std::vector TurnPort::GetTlsAlpnProtocols() const { @@ -338,7 +347,7 @@ void TurnPort::PrepareAddress() { server_address_.address.SetPort(TURN_DEFAULT_PORT); } - if (!AllowedTurnPort(server_address_.address.port())) { + if (!AllowedTurnPort(server_address_.address.port(), &field_trials())) { // This can only happen after a 300 ALTERNATE SERVER, since the port can't // be created with a disallowed port number. RTC_LOG(LS_ERROR) << "Attempt to start allocation with disallowed port# " @@ -436,7 +445,9 @@ bool TurnPort::CreateTurnClientSocket() { if (server_address_.proto == PROTO_TCP || server_address_.proto == PROTO_TLS) { socket_->SignalConnect.connect(this, &TurnPort::OnSocketConnect); - socket_->SignalClose.connect(this, &TurnPort::OnSocketClose); + socket_->SubscribeClose(this, [this](rtc::AsyncPacketSocket* s, int err) { + OnSocketClose(s, err); + }); } else { state_ = STATE_CONNECTED; } @@ -527,12 +538,15 @@ void TurnPort::OnAllocateMismatch() { << ": Allocating a new socket after " "STUN_ERROR_ALLOCATION_MISMATCH, retry: " << allocate_mismatch_retries_ + 1; + + socket_->UnsubscribeClose(this); + if (SharedSocket()) { ResetSharedSocket(); } else { delete socket_; } - socket_ = NULL; + socket_ = nullptr; ResetNonce(); PrepareAddress(); @@ -557,7 +571,7 @@ Connection* TurnPort::CreateConnection(const Candidate& remote_candidate, return nullptr; } - // A TURN port will have two candiates, STUN and TURN. STUN may not + // A TURN port will have two candidates, STUN and TURN. STUN may not // present in all cases. If present stun candidate will be added first // and TURN candidate later. for (size_t index = 0; index < Candidates().size(); ++index) { @@ -565,15 +579,13 @@ Connection* TurnPort::CreateConnection(const Candidate& remote_candidate, if (local_candidate.type() == RELAY_PORT_TYPE && local_candidate.address().family() == remote_candidate.address().family()) { + ProxyConnection* conn = + new ProxyConnection(NewWeakPtr(), index, remote_candidate); // Create an entry, if needed, so we can get our permissions set up // correctly. - if (CreateOrRefreshEntry(remote_candidate.address(), next_channel_number_, - remote_candidate.username())) { - // An entry was created. + if (CreateOrRefreshEntry(conn, next_channel_number_)) { next_channel_number_++; } - ProxyConnection* conn = - new ProxyConnection(this, index, remote_candidate); AddOrReplaceConnection(conn); return conn; } @@ -628,11 +640,7 @@ int TurnPort::SendTo(const void* data, bool payload) { // Try to find an entry for this specific address; we should have one. TurnEntry* entry = FindEntry(addr); - if (!entry) { - RTC_LOG(LS_ERROR) << "Did not find the TurnEntry for address " - << addr.ToSensitiveString(); - return 0; - } + RTC_DCHECK(entry); if (!ready()) { error_ = ENOTCONN; @@ -644,6 +652,7 @@ int TurnPort::SendTo(const void* data, CopyPortInformationToPacketInfo(&modified_options.info_signaled_after_sent); int sent = entry->Send(data, size, payload, modified_options); if (sent <= 0) { + error_ = socket_->GetError(); return SOCKET_ERROR; } @@ -740,7 +749,7 @@ void TurnPort::OnReadyToSend(rtc::AsyncPacketSocket* socket) { } } -bool TurnPort::SupportsProtocol(const std::string& protocol) const { +bool TurnPort::SupportsProtocol(absl::string_view protocol) const { // Turn port only connects to UDP candidates. return protocol == UDP_PROTOCOL_NAME; } @@ -818,10 +827,6 @@ void TurnPort::ResolveTurnAddress(const rtc::SocketAddress& address) { "TURN host lookup received error."); return; } - // Signal needs both resolved and unresolved address. After signal is sent - // we can copy resolved address back into `server_address_`. - SignalResolvedServerAddress(this, server_address_.address, - resolved_address); server_address_.address = resolved_address; PrepareAddress(); }); @@ -862,17 +867,17 @@ void TurnPort::OnAllocateSuccess(const rtc::SocketAddress& address, related_address, // Related address. UDP_PROTOCOL_NAME, ProtoToString(server_address_.proto), // The first hop protocol. - "", // TCP canddiate type, empty for turn candidates. + "", // TCP candidate type, empty for turn candidates. RELAY_PORT_TYPE, GetRelayPreference(server_address_.proto), - server_priority_, ReconstructedServerUrl(false /* use_hostname */), - true); + server_priority_, ReconstructedServerUrl(), true); } -void TurnPort::OnAllocateError(int error_code, const std::string& reason) { +void TurnPort::OnAllocateError(int error_code, absl::string_view reason) { // We will send SignalPortError asynchronously as this can be sent during // port initialization. This way it will not be blocking other port // creation. - thread()->Post(RTC_FROM_HERE, this, MSG_ALLOCATE_ERROR); + thread()->PostTask( + SafeTask(task_safety_.flag(), [this] { SignalPortError(this); })); std::string address = GetLocalAddress().HostAsSensitiveURIString(); int port = GetLocalAddress().port(); if (server_address_.proto == PROTO_TCP && @@ -881,16 +886,16 @@ void TurnPort::OnAllocateError(int error_code, const std::string& reason) { port = 0; } SignalCandidateError( - this, IceCandidateErrorEvent( - address, port, ReconstructedServerUrl(true /* use_hostname */), - error_code, reason)); + this, IceCandidateErrorEvent(address, port, ReconstructedServerUrl(), + error_code, reason)); } void TurnPort::OnRefreshError() { // Need to clear the requests asynchronously because otherwise, the refresh // request may be deleted twice: once at the end of the message processing // and the other in HandleRefreshError(). - thread()->Post(RTC_FROM_HERE, this, MSG_REFRESH_ERROR); + thread()->PostTask( + SafeTask(task_safety_.flag(), [this] { HandleRefreshError(); })); } void TurnPort::HandleRefreshError() { @@ -907,8 +912,7 @@ void TurnPort::Release() { request_manager_.Clear(); // Send refresh with lifetime 0. - TurnRefreshRequest* req = new TurnRefreshRequest(this); - req->set_lifetime(0); + TurnRefreshRequest* req = new TurnRefreshRequest(this, 0); SendRequest(req, 0); state_ = STATE_RECEIVEONLY; @@ -922,11 +926,10 @@ void TurnPort::Close() { // Stop the port from creating new connections. state_ = STATE_DISCONNECTED; // Delete all existing connections; stop sending data. - for (auto kv : connections()) { - kv.second->Destroy(); + DestroyAllConnections(); + if (callbacks_for_test_) { + callbacks_for_test_->OnTurnPortClosed(); } - - SignalTurnPortClosed(this); } rtc::DiffServCodePoint TurnPort::StunDscpValue() const { @@ -934,7 +937,8 @@ rtc::DiffServCodePoint TurnPort::StunDscpValue() const { } // static -bool TurnPort::AllowedTurnPort(int port) { +bool TurnPort::AllowedTurnPort(int port, + const webrtc::FieldTrialsView* field_trials) { // Port 53, 80 and 443 are used for existing deployments. // Ports above 1024 are assumed to be OK to use. if (port == 53 || port == 80 || port == 443 || port >= 1024) { @@ -942,45 +946,27 @@ bool TurnPort::AllowedTurnPort(int port) { } // Allow any port if relevant field trial is set. This allows disabling the // check. - if (webrtc::field_trial::IsEnabled("WebRTC-Turn-AllowSystemPorts")) { + if (field_trials && field_trials->IsEnabled("WebRTC-Turn-AllowSystemPorts")) { return true; } return false; } -void TurnPort::OnMessage(rtc::Message* message) { - switch (message->message_id) { - case MSG_ALLOCATE_ERROR: - SignalPortError(this); - break; - case MSG_ALLOCATE_MISMATCH: - OnAllocateMismatch(); - break; - case MSG_REFRESH_ERROR: - HandleRefreshError(); - break; - case MSG_TRY_ALTERNATE_SERVER: - if (server_address().proto == PROTO_UDP) { - // Send another allocate request to alternate server, with the received - // realm and nonce values. - SendRequest(new TurnAllocateRequest(this), 0); - } else { - // Since it's TCP, we have to delete the connected socket and reconnect - // with the alternate server. PrepareAddress will send stun binding once - // the new socket is connected. - RTC_DCHECK(server_address().proto == PROTO_TCP || - server_address().proto == PROTO_TLS); - RTC_DCHECK(!SharedSocket()); - delete socket_; - socket_ = NULL; - PrepareAddress(); - } - break; - case MSG_ALLOCATION_RELEASED: - Close(); - break; - default: - Port::OnMessage(message); +void TurnPort::TryAlternateServer() { + if (server_address().proto == PROTO_UDP) { + // Send another allocate request to alternate server, with the received + // realm and nonce values. + SendRequest(new TurnAllocateRequest(this), 0); + } else { + // Since it's TCP, we have to delete the connected socket and reconnect + // with the alternate server. PrepareAddress will send stun binding once + // the new socket is connected. + RTC_DCHECK(server_address().proto == PROTO_TCP || + server_address().proto == PROTO_TLS); + RTC_DCHECK(!SharedSocket()); + delete socket_; + socket_ = nullptr; + PrepareAddress(); } } @@ -1160,7 +1146,7 @@ bool TurnPort::UpdateNonce(StunMessage* response) { "stale nonce error response."; return false; } - set_realm(realm_attr->GetString()); + set_realm(realm_attr->string_view()); const StunByteStringAttribute* nonce_attr = response->GetByteString(STUN_ATTR_NONCE); @@ -1169,7 +1155,7 @@ bool TurnPort::UpdateNonce(StunMessage* response) { "stale nonce error response."; return false; } - set_nonce(nonce_attr->GetString()); + set_nonce(nonce_attr->string_view()); return true; } @@ -1180,111 +1166,67 @@ void TurnPort::ResetNonce() { } bool TurnPort::HasPermission(const rtc::IPAddress& ipaddr) const { - return absl::c_any_of(entries_, [&ipaddr](const TurnEntry* e) { + return absl::c_any_of(entries_, [&ipaddr](const auto& e) { return e->address().ipaddr() == ipaddr; }); } TurnEntry* TurnPort::FindEntry(const rtc::SocketAddress& addr) const { auto it = absl::c_find_if( - entries_, [&addr](const TurnEntry* e) { return e->address() == addr; }); - return (it != entries_.end()) ? *it : NULL; + entries_, [&addr](const auto& e) { return e->address() == addr; }); + return (it != entries_.end()) ? it->get() : nullptr; } TurnEntry* TurnPort::FindEntry(int channel_id) const { - auto it = absl::c_find_if(entries_, [&channel_id](const TurnEntry* e) { + auto it = absl::c_find_if(entries_, [&channel_id](const auto& e) { return e->channel_id() == channel_id; }); - return (it != entries_.end()) ? *it : NULL; -} - -bool TurnPort::EntryExists(TurnEntry* e) { - return absl::c_linear_search(entries_, e); -} - -bool TurnPort::CreateOrRefreshEntry(const rtc::SocketAddress& addr, - int channel_number) { - return CreateOrRefreshEntry(addr, channel_number, ""); + return (it != entries_.end()) ? it->get() : nullptr; } -bool TurnPort::CreateOrRefreshEntry(const rtc::SocketAddress& addr, - int channel_number, - const std::string& remote_ufrag) { - TurnEntry* entry = FindEntry(addr); +bool TurnPort::CreateOrRefreshEntry(Connection* conn, int channel_number) { + const Candidate& remote_candidate = conn->remote_candidate(); + TurnEntry* entry = FindEntry(remote_candidate.address()); if (entry == nullptr) { - entry = new TurnEntry(this, channel_number, addr, remote_ufrag); - entries_.push_back(entry); + entries_.push_back(std::make_unique(this, conn, channel_number)); return true; - } else { - if (entry->destruction_timestamp()) { - // Destruction should have only been scheduled (indicated by - // destruction_timestamp being set) if there were no connections using - // this address. - RTC_DCHECK(!GetConnection(addr)); - // Resetting the destruction timestamp will ensure that any queued - // destruction tasks, when executed, will see that the timestamp doesn't - // match and do nothing. We do this because (currently) there's not a - // convenient way to cancel queued tasks. - entry->reset_destruction_timestamp(); - } else { - // The only valid reason for destruction not being scheduled is that - // there's still one connection. - RTC_DCHECK(GetConnection(addr)); - } - - if (webrtc::field_trial::IsEnabled("WebRTC-TurnAddMultiMapping")) { - if (entry->get_remote_ufrag() != remote_ufrag) { - RTC_LOG(LS_INFO) << ToString() - << ": remote ufrag updated." - " Sending new permission request"; - entry->set_remote_ufrag(remote_ufrag); - entry->SendCreatePermissionRequest(0); - } - } } - return false; -} -void TurnPort::DestroyEntry(TurnEntry* entry) { - RTC_DCHECK(entry != NULL); - entry->SignalDestroyed(entry); - entries_.remove(entry); - delete entry; -} + // Associate this connection object with an existing entry. If the entry + // has been scheduled for deletion, this will cancel that task. + entry->TrackConnection(conn); -void TurnPort::DestroyEntryIfNotCancelled(TurnEntry* entry, int64_t timestamp) { - if (!EntryExists(entry)) { - return; - } - // The destruction timestamp is used to manage pending destructions. Proceed - // with destruction if it's set, and matches the timestamp from the posted - // task. Note that CreateOrRefreshEntry will unset the timestamp, canceling - // destruction. - if (entry->destruction_timestamp() && - timestamp == *entry->destruction_timestamp()) { - DestroyEntry(entry); - } + return false; } void TurnPort::HandleConnectionDestroyed(Connection* conn) { // Schedule an event to destroy TurnEntry for the connection, which is - // already destroyed. + // being destroyed. const rtc::SocketAddress& remote_address = conn->remote_candidate().address(); + // We should always have an entry for this connection. TurnEntry* entry = FindEntry(remote_address); - RTC_DCHECK(entry != NULL); - ScheduleEntryDestruction(entry); -} - -void TurnPort::ScheduleEntryDestruction(TurnEntry* entry) { - RTC_DCHECK(!entry->destruction_timestamp().has_value()); - int64_t timestamp = rtc::TimeMillis(); - entry->set_destruction_timestamp(timestamp); - thread()->PostDelayedTask(ToQueuedTask(task_safety_.flag(), - [this, entry, timestamp] { - DestroyEntryIfNotCancelled( - entry, timestamp); - }), - TURN_PERMISSION_TIMEOUT); + rtc::scoped_refptr flag = + entry->UntrackConnection(conn); + if (flag) { + // An assumption here is that the lifetime flag for the entry, is within + // the lifetime scope of `task_safety_` and therefore use of `this` is safe. + // If an entry gets reused (associated with a new connection) while this + // task is pending, the entry will reset the safety flag, thus cancel this + // task. + thread()->PostDelayedTask(SafeTask(flag, + [this, entry] { + entries_.erase(absl::c_find_if( + entries_, [entry](const auto& e) { + return e.get() == entry; + })); + }), + kTurnPermissionTimeout); + } +} + +void TurnPort::SetCallbacksForTest(CallbacksForTest* callbacks) { + RTC_DCHECK(!callbacks_for_test_); + callbacks_for_test_ = callbacks; } bool TurnPort::SetEntryChannelId(const rtc::SocketAddress& address, @@ -1297,7 +1239,7 @@ bool TurnPort::SetEntryChannelId(const rtc::SocketAddress& address, return true; } -std::string TurnPort::ReconstructedServerUrl(bool use_hostname) { +std::string TurnPort::ReconstructedServerUrl() { // draft-petithuguenin-behave-turn-uris-01 // turnURI = scheme ":" turn-host [ ":" turn-port ] // [ "?transport=" transport ] @@ -1320,10 +1262,8 @@ std::string TurnPort::ReconstructedServerUrl(bool use_hostname) { break; } rtc::StringBuilder url; - url << scheme << ":" - << (use_hostname ? server_address_.address.hostname() - : server_address_.address.ipaddr().ToString()) - << ":" << server_address_.address.port() << "?transport=" << transport; + url << scheme << ":" << server_address_.address.hostname() << ":" + << server_address_.address.port() << "?transport=" << transport; return url.Release(); } @@ -1354,20 +1294,21 @@ void TurnPort::MaybeAddTurnLoggingId(StunMessage* msg) { } TurnAllocateRequest::TurnAllocateRequest(TurnPort* port) - : StunRequest(new TurnMessage()), port_(port) {} - -void TurnAllocateRequest::Prepare(StunMessage* request) { + : StunRequest(port->request_manager(), + std::make_unique(TURN_ALLOCATE_REQUEST)), + port_(port) { + StunMessage* message = mutable_msg(); // Create the request as indicated in RFC 5766, Section 6.1. - request->SetType(TURN_ALLOCATE_REQUEST); + RTC_DCHECK_EQ(message->type(), TURN_ALLOCATE_REQUEST); auto transport_attr = StunAttribute::CreateUInt32(STUN_ATTR_REQUESTED_TRANSPORT); transport_attr->SetValue(IPPROTO_UDP << 24); - request->AddAttribute(std::move(transport_attr)); + message->AddAttribute(std::move(transport_attr)); if (!port_->hash().empty()) { - port_->AddRequestAuthInfo(request); + port_->AddRequestAuthInfo(message); } - port_->MaybeAddTurnLoggingId(request); - port_->TurnCustomizerMaybeModifyOutgoingStunMessage(request); + port_->MaybeAddTurnLoggingId(message); + port_->TurnCustomizerMaybeModifyOutgoingStunMessage(message); } void TurnAllocateRequest::OnSent() { @@ -1435,12 +1376,13 @@ void TurnAllocateRequest::OnErrorResponse(StunMessage* response) { case STUN_ERROR_TRY_ALTERNATE: OnTryAlternate(response, error_code); break; - case STUN_ERROR_ALLOCATION_MISMATCH: + case STUN_ERROR_ALLOCATION_MISMATCH: { // We must handle this error async because trying to delete the socket in // OnErrorResponse will cause a deadlock on the socket. - port_->thread()->Post(RTC_FROM_HERE, port_, - TurnPort::MSG_ALLOCATE_MISMATCH); - break; + TurnPort* port = port_; + port->thread()->PostTask(SafeTask( + port->task_safety_.flag(), [port] { port->OnAllocateMismatch(); })); + } break; default: RTC_LOG(LS_WARNING) << port_->ToString() << ": Received TURN allocate error response, id=" @@ -1477,7 +1419,7 @@ void TurnAllocateRequest::OnAuthChallenge(StunMessage* response, int code) { "allocate unauthorized response."; return; } - port_->set_realm(realm_attr->GetString()); + port_->set_realm(realm_attr->string_view()); const StunByteStringAttribute* nonce_attr = response->GetByteString(STUN_ATTR_NONCE); @@ -1487,7 +1429,7 @@ void TurnAllocateRequest::OnAuthChallenge(StunMessage* response, int code) { "allocate unauthorized response."; return; } - port_->set_nonce(nonce_attr->GetString()); + port_->set_nonce(nonce_attr->string_view()); // Send another allocate request, with the received realm and nonce values. port_->SendRequest(new TurnAllocateRequest(port_), 0); @@ -1522,7 +1464,7 @@ void TurnAllocateRequest::OnTryAlternate(StunMessage* response, int code) { RTC_LOG(LS_INFO) << port_->ToString() << ": Applying STUN_ATTR_REALM attribute in " "try alternate error response."; - port_->set_realm(realm_attr->GetString()); + port_->set_realm(realm_attr->string_view()); } const StunByteStringAttribute* nonce_attr = @@ -1531,30 +1473,32 @@ void TurnAllocateRequest::OnTryAlternate(StunMessage* response, int code) { RTC_LOG(LS_INFO) << port_->ToString() << ": Applying STUN_ATTR_NONCE attribute in " "try alternate error response."; - port_->set_nonce(nonce_attr->GetString()); + port_->set_nonce(nonce_attr->string_view()); } // For TCP, we can't close the original Tcp socket during handling a 300 as // we're still inside that socket's event handler. Doing so will cause // deadlock. - port_->thread()->Post(RTC_FROM_HERE, port_, - TurnPort::MSG_TRY_ALTERNATE_SERVER); + TurnPort* port = port_; + port->thread()->PostTask(SafeTask(port->task_safety_.flag(), + [port] { port->TryAlternateServer(); })); } -TurnRefreshRequest::TurnRefreshRequest(TurnPort* port) - : StunRequest(new TurnMessage()), port_(port), lifetime_(-1) {} - -void TurnRefreshRequest::Prepare(StunMessage* request) { +TurnRefreshRequest::TurnRefreshRequest(TurnPort* port, int lifetime /*= -1*/) + : StunRequest(port->request_manager(), + std::make_unique(TURN_REFRESH_REQUEST)), + port_(port) { + StunMessage* message = mutable_msg(); // Create the request as indicated in RFC 5766, Section 7.1. // No attributes need to be included. - request->SetType(TURN_REFRESH_REQUEST); - if (lifetime_ > -1) { - request->AddAttribute( - std::make_unique(STUN_ATTR_LIFETIME, lifetime_)); + RTC_DCHECK_EQ(message->type(), TURN_REFRESH_REQUEST); + if (lifetime > -1) { + message->AddAttribute( + std::make_unique(STUN_ATTR_LIFETIME, lifetime)); } - port_->AddRequestAuthInfo(request); - port_->TurnCustomizerMaybeModifyOutgoingStunMessage(request); + port_->AddRequestAuthInfo(message); + port_->TurnCustomizerMaybeModifyOutgoingStunMessage(message); } void TurnRefreshRequest::OnSent() { @@ -1587,11 +1531,14 @@ void TurnRefreshRequest::OnResponse(StunMessage* response) { } else { // If we scheduled a refresh with lifetime 0, we're releasing this // allocation; see TurnPort::Release. - port_->thread()->Post(RTC_FROM_HERE, port_, - TurnPort::MSG_ALLOCATION_RELEASED); + TurnPort* port = port_; + port->thread()->PostTask( + SafeTask(port->task_safety_.flag(), [port] { port->Close(); })); } - port_->SignalTurnRefreshResult(port_, TURN_SUCCESS_RESULT_CODE); + if (port_->callbacks_for_test_) { + port_->callbacks_for_test_->OnTurnRefreshResult(TURN_SUCCESS_RESULT_CODE); + } } void TurnRefreshRequest::OnErrorResponse(StunMessage* response) { @@ -1608,7 +1555,9 @@ void TurnRefreshRequest::OnErrorResponse(StunMessage* response) { << rtc::hex_encode(id()) << ", code=" << error_code << ", rtt=" << Elapsed(); port_->OnRefreshError(); - port_->SignalTurnRefreshResult(port_, error_code); + if (port_->callbacks_for_test_) { + port_->callbacks_for_test_->OnTurnRefreshResult(error_code); + } } } @@ -1621,28 +1570,31 @@ void TurnRefreshRequest::OnTimeout() { TurnCreatePermissionRequest::TurnCreatePermissionRequest( TurnPort* port, TurnEntry* entry, - const rtc::SocketAddress& ext_addr, - const std::string& remote_ufrag) - : StunRequest(new TurnMessage()), + const rtc::SocketAddress& ext_addr) + : StunRequest( + port->request_manager(), + std::make_unique(TURN_CREATE_PERMISSION_REQUEST)), port_(port), entry_(entry), - ext_addr_(ext_addr), - remote_ufrag_(remote_ufrag) { - entry_->SignalDestroyed.connect( - this, &TurnCreatePermissionRequest::OnEntryDestroyed); -} - -void TurnCreatePermissionRequest::Prepare(StunMessage* request) { + ext_addr_(ext_addr) { + RTC_DCHECK(entry_); + entry_->destroyed_callback_list_.AddReceiver(this, [this](TurnEntry* entry) { + RTC_DCHECK(entry_ == entry); + entry_ = nullptr; + }); + StunMessage* message = mutable_msg(); // Create the request as indicated in RFC5766, Section 9.1. - request->SetType(TURN_CREATE_PERMISSION_REQUEST); - request->AddAttribute(std::make_unique( + RTC_DCHECK_EQ(message->type(), TURN_CREATE_PERMISSION_REQUEST); + message->AddAttribute(std::make_unique( STUN_ATTR_XOR_PEER_ADDRESS, ext_addr_)); - if (webrtc::field_trial::IsEnabled("WebRTC-TurnAddMultiMapping")) { - request->AddAttribute(std::make_unique( - STUN_ATTR_MULTI_MAPPING, remote_ufrag_)); + port_->AddRequestAuthInfo(message); + port_->TurnCustomizerMaybeModifyOutgoingStunMessage(message); +} + +TurnCreatePermissionRequest::~TurnCreatePermissionRequest() { + if (entry_) { + entry_->destroyed_callback_list_.RemoveReceivers(this); } - port_->AddRequestAuthInfo(request); - port_->TurnCustomizerMaybeModifyOutgoingStunMessage(request); } void TurnCreatePermissionRequest::OnSent() { @@ -1685,34 +1637,37 @@ void TurnCreatePermissionRequest::OnTimeout() { } } -void TurnCreatePermissionRequest::OnEntryDestroyed(TurnEntry* entry) { - RTC_DCHECK(entry_ == entry); - entry_ = NULL; -} - TurnChannelBindRequest::TurnChannelBindRequest( TurnPort* port, TurnEntry* entry, int channel_id, const rtc::SocketAddress& ext_addr) - : StunRequest(new TurnMessage()), + : StunRequest(port->request_manager(), + std::make_unique(TURN_CHANNEL_BIND_REQUEST)), port_(port), entry_(entry), channel_id_(channel_id), ext_addr_(ext_addr) { - entry_->SignalDestroyed.connect(this, - &TurnChannelBindRequest::OnEntryDestroyed); -} - -void TurnChannelBindRequest::Prepare(StunMessage* request) { + RTC_DCHECK(entry_); + entry_->destroyed_callback_list_.AddReceiver(this, [this](TurnEntry* entry) { + RTC_DCHECK(entry_ == entry); + entry_ = nullptr; + }); + StunMessage* message = mutable_msg(); // Create the request as indicated in RFC5766, Section 11.1. - request->SetType(TURN_CHANNEL_BIND_REQUEST); - request->AddAttribute(std::make_unique( + RTC_DCHECK_EQ(message->type(), TURN_CHANNEL_BIND_REQUEST); + message->AddAttribute(std::make_unique( STUN_ATTR_CHANNEL_NUMBER, channel_id_ << 16)); - request->AddAttribute(std::make_unique( + message->AddAttribute(std::make_unique( STUN_ATTR_XOR_PEER_ADDRESS, ext_addr_)); - port_->AddRequestAuthInfo(request); - port_->TurnCustomizerMaybeModifyOutgoingStunMessage(request); + port_->AddRequestAuthInfo(message); + port_->TurnCustomizerMaybeModifyOutgoingStunMessage(message); +} + +TurnChannelBindRequest::~TurnChannelBindRequest() { + if (entry_) { + entry_->destroyed_callback_list_.RemoveReceivers(this); + } } void TurnChannelBindRequest::OnSent() { @@ -1736,10 +1691,10 @@ void TurnChannelBindRequest::OnResponse(StunMessage* response) { // threshold. The channel binding has a longer lifetime, but // this is the easiest way to keep both the channel and the // permission from expiring. - int delay = TURN_PERMISSION_TIMEOUT - 60000; - entry_->SendChannelBindRequest(delay); + TimeDelta delay = kTurnPermissionTimeout - TimeDelta::Minutes(1); + entry_->SendChannelBindRequest(delay.ms()); RTC_LOG(LS_INFO) << port_->ToString() << ": Scheduled channel bind in " - << delay << "ms."; + << delay.ms() << "ms."; } } @@ -1762,28 +1717,37 @@ void TurnChannelBindRequest::OnTimeout() { } } -void TurnChannelBindRequest::OnEntryDestroyed(TurnEntry* entry) { - RTC_DCHECK(entry_ == entry); - entry_ = NULL; -} - -TurnEntry::TurnEntry(TurnPort* port, - int channel_id, - const rtc::SocketAddress& ext_addr, - const std::string remote_ufrag) +TurnEntry::TurnEntry(TurnPort* port, Connection* conn, int channel_id) : port_(port), channel_id_(channel_id), - ext_addr_(ext_addr), + ext_addr_(conn->remote_candidate().address()), state_(STATE_UNBOUND), - remote_ufrag_(remote_ufrag) { + connections_({conn}) { // Creating permission for `ext_addr_`. SendCreatePermissionRequest(0); } +TurnEntry::~TurnEntry() { + destroyed_callback_list_.Send(this); +} + +void TurnEntry::TrackConnection(Connection* conn) { + RTC_DCHECK(absl::c_find(connections_, conn) == connections_.end()); + if (connections_.empty()) { + task_safety_.reset(); + } + connections_.push_back(conn); +} + +rtc::scoped_refptr TurnEntry::UntrackConnection( + Connection* conn) { + connections_.erase(absl::c_find(connections_, conn)); + return connections_.empty() ? task_safety_.flag() : nullptr; +} + void TurnEntry::SendCreatePermissionRequest(int delay) { - port_->SendRequest( - new TurnCreatePermissionRequest(port_, this, ext_addr_, remote_ufrag_), - delay); + port_->SendRequest(new TurnCreatePermissionRequest(port_, this, ext_addr_), + delay); } void TurnEntry::SendChannelBindRequest(int delay) { @@ -1800,9 +1764,7 @@ int TurnEntry::Send(const void* data, !port_->TurnCustomizerAllowChannelData(data, size, payload)) { // If we haven't bound the channel yet, we have to use a Send Indication. // The turn_customizer_ can also make us use Send Indication. - TurnMessage msg; - msg.SetType(TURN_SEND_INDICATION); - msg.SetTransactionID(rtc::CreateRandomString(kStunTransactionIdLength)); + TurnMessage msg(TURN_SEND_INDICATION); msg.AddAttribute(std::make_unique( STUN_ATTR_XOR_PEER_ADDRESS, ext_addr_)); msg.AddAttribute( @@ -1833,19 +1795,21 @@ int TurnEntry::Send(const void* data, void TurnEntry::OnCreatePermissionSuccess() { RTC_LOG(LS_INFO) << port_->ToString() << ": Create permission for " << ext_addr_.ToSensitiveString() << " succeeded"; - port_->SignalCreatePermissionResult(port_, ext_addr_, - TURN_SUCCESS_RESULT_CODE); + if (port_->callbacks_for_test_) { + port_->callbacks_for_test_->OnTurnCreatePermissionResult( + TURN_SUCCESS_RESULT_CODE); + } // If `state_` is STATE_BOUND, the permission will be refreshed // by ChannelBindRequest. if (state_ != STATE_BOUND) { // Refresh the permission request about 1 minute before the permission // times out. - int delay = TURN_PERMISSION_TIMEOUT - 60000; - SendCreatePermissionRequest(delay); + TimeDelta delay = kTurnPermissionTimeout - TimeDelta::Minutes(1); + SendCreatePermissionRequest(delay.ms()); RTC_LOG(LS_INFO) << port_->ToString() - << ": Scheduled create-permission-request in " << delay - << "ms."; + << ": Scheduled create-permission-request in " + << delay.ms() << "ms."; } } @@ -1861,8 +1825,9 @@ void TurnEntry::OnCreatePermissionError(StunMessage* response, int code) { "code=" << code << "; pruned connection."; } - // Send signal with error code. - port_->SignalCreatePermissionResult(port_, ext_addr_, code); + } + if (port_->callbacks_for_test_) { + port_->callbacks_for_test_->OnTurnCreatePermissionResult(code); } } diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/turn_port.h b/TMessagesProj/jni/voip/webrtc/p2p/base/turn_port.h index 7b8e3b9af9..d0b2079465 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/turn_port.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/turn_port.h @@ -13,7 +13,6 @@ #include -#include #include #include #include @@ -21,12 +20,14 @@ #include #include "absl/memory/memory.h" +#include "absl/strings/string_view.h" #include "api/async_dns_resolver.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/task_queue/task_queue_base.h" #include "p2p/base/port.h" #include "p2p/client/basic_port_allocator.h" #include "rtc_base/async_packet_socket.h" #include "rtc_base/ssl_certificate.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" namespace webrtc { class TurnCustomizer; @@ -52,108 +53,56 @@ class TurnPort : public Port { // packets. }; - // Create a TURN port using the shared UDP socket, `socket`. - static std::unique_ptr Create( - rtc::Thread* thread, - rtc::PacketSocketFactory* factory, - rtc::Network* network, - rtc::AsyncPacketSocket* socket, - const std::string& username, // ice username. - const std::string& password, // ice password. - const ProtocolAddress& server_address, - const RelayCredentials& credentials, - int server_priority, - webrtc::TurnCustomizer* customizer) { + static bool Validate(const CreateRelayPortArgs& args) { // Do basic parameter validation. - if (credentials.username.size() > kMaxTurnUsernameLength) { + if (args.config->credentials.username.size() > kMaxTurnUsernameLength) { RTC_LOG(LS_ERROR) << "Attempt to use TURN with a too long username " - << "of length " << credentials.username.size(); - return nullptr; + << "of length " + << args.config->credentials.username.size(); + return false; } // Do not connect to low-numbered ports. The default STUN port is 3478. - if (!AllowedTurnPort(server_address.address.port())) { + if (!AllowedTurnPort(args.server_address->address.port(), + args.field_trials)) { RTC_LOG(LS_ERROR) << "Attempt to use TURN to connect to port " - << server_address.address.port(); + << args.server_address->address.port(); + return false; + } + return true; + } + + // Create a TURN port using the shared UDP socket, `socket`. + static std::unique_ptr Create(const CreateRelayPortArgs& args, + rtc::AsyncPacketSocket* socket) { + if (!Validate(args)) { return nullptr; } // Using `new` to access a non-public constructor. return absl::WrapUnique( - new TurnPort(thread, factory, network, socket, username, password, - server_address, credentials, server_priority, customizer)); - } - - // TODO(steveanton): Remove once downstream clients have moved to `Create`. - static std::unique_ptr CreateUnique( - rtc::Thread* thread, - rtc::PacketSocketFactory* factory, - rtc::Network* network, - rtc::AsyncPacketSocket* socket, - const std::string& username, // ice username. - const std::string& password, // ice password. - const ProtocolAddress& server_address, - const RelayCredentials& credentials, - int server_priority, - webrtc::TurnCustomizer* customizer) { - return Create(thread, factory, network, socket, username, password, - server_address, credentials, server_priority, customizer); + new TurnPort(args.network_thread, args.socket_factory, args.network, + socket, args.username, args.password, *args.server_address, + args.config->credentials, args.config->priority, + args.config->tls_alpn_protocols, + args.config->tls_elliptic_curves, args.turn_customizer, + args.config->tls_cert_verifier, args.field_trials)); } // Create a TURN port that will use a new socket, bound to `network` and // using a port in the range between `min_port` and `max_port`. - static std::unique_ptr Create( - rtc::Thread* thread, - rtc::PacketSocketFactory* factory, - rtc::Network* network, - uint16_t min_port, - uint16_t max_port, - const std::string& username, // ice username. - const std::string& password, // ice password. - const ProtocolAddress& server_address, - const RelayCredentials& credentials, - int server_priority, - const std::vector& tls_alpn_protocols, - const std::vector& tls_elliptic_curves, - webrtc::TurnCustomizer* customizer, - rtc::SSLCertificateVerifier* tls_cert_verifier = nullptr) { - // Do basic parameter validation. - if (credentials.username.size() > kMaxTurnUsernameLength) { - RTC_LOG(LS_ERROR) << "Attempt to use TURN with a too long username " - << "of length " << credentials.username.size(); - return nullptr; - } - // Do not connect to low-numbered ports. The default STUN port is 3478. - if (!AllowedTurnPort(server_address.address.port())) { - RTC_LOG(LS_ERROR) << "Attempt to use TURN to connect to port " - << server_address.address.port(); + static std::unique_ptr Create(const CreateRelayPortArgs& args, + int min_port, + int max_port) { + if (!Validate(args)) { return nullptr; } // Using `new` to access a non-public constructor. - return absl::WrapUnique(new TurnPort( - thread, factory, network, min_port, max_port, username, password, - server_address, credentials, server_priority, tls_alpn_protocols, - tls_elliptic_curves, customizer, tls_cert_verifier)); - } - - // TODO(steveanton): Remove once downstream clients have moved to `Create`. - static std::unique_ptr CreateUnique( - rtc::Thread* thread, - rtc::PacketSocketFactory* factory, - rtc::Network* network, - uint16_t min_port, - uint16_t max_port, - const std::string& username, // ice username. - const std::string& password, // ice password. - const ProtocolAddress& server_address, - const RelayCredentials& credentials, - int server_priority, - const std::vector& tls_alpn_protocols, - const std::vector& tls_elliptic_curves, - webrtc::TurnCustomizer* customizer, - rtc::SSLCertificateVerifier* tls_cert_verifier = nullptr) { - return Create(thread, factory, network, min_port, max_port, username, - password, server_address, credentials, server_priority, - tls_alpn_protocols, tls_elliptic_curves, customizer, - tls_cert_verifier); + return absl::WrapUnique( + new TurnPort(args.network_thread, args.socket_factory, args.network, + min_port, max_port, args.username, args.password, + *args.server_address, args.config->credentials, + args.config->priority, args.config->tls_alpn_protocols, + args.config->tls_elliptic_curves, args.turn_customizer, + args.config->tls_cert_verifier, args.field_trials)); } ~TurnPort() override; @@ -173,7 +122,7 @@ class TurnPort : public Port { virtual TlsCertPolicy GetTlsCertPolicy() const; virtual void SetTlsCertPolicy(TlsCertPolicy tls_cert_policy); - void SetTurnLoggingId(const std::string& turn_logging_id); + void SetTurnLoggingId(absl::string_view turn_logging_id); virtual std::vector GetTlsAlpnProtocols() const; virtual std::vector GetTlsEllipticCurves() const; @@ -210,7 +159,7 @@ class TurnPort : public Port { void OnSentPacket(rtc::AsyncPacketSocket* socket, const rtc::SentPacket& sent_packet) override; virtual void OnReadyToSend(rtc::AsyncPacketSocket* socket); - bool SupportsProtocol(const std::string& protocol) const override; + bool SupportsProtocol(absl::string_view protocol) const override; void OnSocketConnect(rtc::AsyncPacketSocket* socket); void OnSocketClose(rtc::AsyncPacketSocket* socket, int error); @@ -223,24 +172,8 @@ class TurnPort : public Port { void OnAllocateMismatch(); rtc::AsyncPacketSocket* socket() const { return socket_; } + StunRequestManager& request_manager() { return request_manager_; } - // Signal with resolved server address. - // Parameters are port, server address and resolved server address. - // This signal will be sent only if server address is resolved successfully. - sigslot:: - signal3 - SignalResolvedServerAddress; - - // Signal when TurnPort is closed, - // e.g remote socket closed (TCP) - // or receiveing a REFRESH response with lifetime 0. - sigslot::signal1 SignalTurnPortClosed; - - // All public methods/signals below are for testing only. - sigslot::signal2 SignalTurnRefreshResult; - sigslot::signal3 - SignalCreatePermissionResult; - void FlushRequests(int msg_type) { request_manager_.Flush(msg_type); } bool HasRequests() { return !request_manager_.empty(); } void set_credentials(const RelayCredentials& credentials) { credentials_ = credentials; @@ -248,71 +181,76 @@ class TurnPort : public Port { // Finds the turn entry with `address` and sets its channel id. // Returns true if the entry is found. bool SetEntryChannelId(const rtc::SocketAddress& address, int channel_id); - // Visible for testing. - // Shuts down the turn port, usually because of some fatal errors. - void Close(); void HandleConnectionDestroyed(Connection* conn) override; + void CloseForTest() { Close(); } + + // TODO(solenberg): Tests should be refactored to not peek at internal state. + class CallbacksForTest { + public: + virtual ~CallbacksForTest() {} + virtual void OnTurnCreatePermissionResult(int code) = 0; + virtual void OnTurnRefreshResult(int code) = 0; + virtual void OnTurnPortClosed() = 0; + }; + void SetCallbacksForTest(CallbacksForTest* callbacks); + protected: - TurnPort(rtc::Thread* thread, + TurnPort(webrtc::TaskQueueBase* thread, rtc::PacketSocketFactory* factory, - rtc::Network* network, + const rtc::Network* network, rtc::AsyncPacketSocket* socket, - const std::string& username, - const std::string& password, + absl::string_view username, + absl::string_view password, const ProtocolAddress& server_address, const RelayCredentials& credentials, int server_priority, - webrtc::TurnCustomizer* customizer); + const std::vector& tls_alpn_protocols, + const std::vector& tls_elliptic_curves, + webrtc::TurnCustomizer* customizer, + rtc::SSLCertificateVerifier* tls_cert_verifier = nullptr, + const webrtc::FieldTrialsView* field_trials = nullptr); - TurnPort(rtc::Thread* thread, + TurnPort(webrtc::TaskQueueBase* thread, rtc::PacketSocketFactory* factory, - rtc::Network* network, + const rtc::Network* network, uint16_t min_port, uint16_t max_port, - const std::string& username, - const std::string& password, + absl::string_view username, + absl::string_view password, const ProtocolAddress& server_address, const RelayCredentials& credentials, int server_priority, const std::vector& tls_alpn_protocols, const std::vector& tls_elliptic_curves, webrtc::TurnCustomizer* customizer, - rtc::SSLCertificateVerifier* tls_cert_verifier = nullptr); + rtc::SSLCertificateVerifier* tls_cert_verifier = nullptr, + const webrtc::FieldTrialsView* field_trials = nullptr); // NOTE: This method needs to be accessible for StunPort // return true if entry was created (i.e channel_number consumed). - bool CreateOrRefreshEntry(const rtc::SocketAddress& addr, int channel_number); - - bool CreateOrRefreshEntry(const rtc::SocketAddress& addr, - int channel_number, - const std::string& remote_ufrag); + bool CreateOrRefreshEntry(Connection* conn, int channel_number); rtc::DiffServCodePoint StunDscpValue() const override; - private: - enum { - MSG_ALLOCATE_ERROR = MSG_FIRST_AVAILABLE, - MSG_ALLOCATE_MISMATCH, - MSG_TRY_ALTERNATE_SERVER, - MSG_REFRESH_ERROR, - MSG_ALLOCATION_RELEASED - }; + // Shuts down the turn port, frees requests and deletes connections. + void Close(); - typedef std::list EntryList; + private: typedef std::map SocketOptionsMap; typedef std::set AttemptedServerSet; - static bool AllowedTurnPort(int port); - void OnMessage(rtc::Message* pmsg) override; + static bool AllowedTurnPort(int port, + const webrtc::FieldTrialsView* field_trials); + void TryAlternateServer(); bool CreateTurnClientSocket(); - void set_nonce(const std::string& nonce) { nonce_ = nonce; } - void set_realm(const std::string& realm) { + void set_nonce(absl::string_view nonce) { nonce_ = std::string(nonce); } + void set_realm(absl::string_view realm) { if (realm != realm_) { - realm_ = realm; + realm_ = std::string(realm); UpdateHash(); } } @@ -330,7 +268,7 @@ class TurnPort : public Port { void OnStunAddress(const rtc::SocketAddress& address); void OnAllocateSuccess(const rtc::SocketAddress& address, const rtc::SocketAddress& stun_address); - void OnAllocateError(int error_code, const std::string& reason); + void OnAllocateError(int error_code, absl::string_view reason); void OnAllocateRequestTimeout(); void HandleDataIndication(const char* data, @@ -356,19 +294,13 @@ class TurnPort : public Port { bool HasPermission(const rtc::IPAddress& ipaddr) const; TurnEntry* FindEntry(const rtc::SocketAddress& address) const; TurnEntry* FindEntry(int channel_id) const; - bool EntryExists(TurnEntry* e); - void DestroyEntry(TurnEntry* entry); - // Destroys the entry only if `timestamp` matches the destruction timestamp - // in `entry`. - void DestroyEntryIfNotCancelled(TurnEntry* entry, int64_t timestamp); - void ScheduleEntryDestruction(TurnEntry* entry); // Marks the connection with remote address `address` failed and // pruned (a.k.a. write-timed-out). Returns true if a connection is found. bool FailAndPruneConnection(const rtc::SocketAddress& address); // Reconstruct the URL of the server which the candidate is gathered from. - std::string ReconstructedServerUrl(bool use_hostname); + std::string ReconstructedServerUrl(); void MaybeAddTurnLoggingId(StunMessage* message); @@ -397,7 +329,7 @@ class TurnPort : public Port { std::string hash_; // Digest of username:realm:password int next_channel_number_; - EntryList entries_; + std::vector> entries_; PortState state_; // By default the value will be set to 0. This value will be used in @@ -421,6 +353,8 @@ class TurnPort : public Port { webrtc::ScopedTaskSafety task_safety_; + CallbacksForTest* callbacks_for_test_ = nullptr; + friend class TurnEntry; friend class TurnAllocateRequest; friend class TurnRefreshRequest; diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/turn_server.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/turn_server.cc index 863319e916..e11b52aecd 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/turn_server.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/turn_server.cc @@ -10,13 +10,17 @@ #include "p2p/base/turn_server.h" +#include #include #include // for std::tie #include #include "absl/algorithm/container.h" #include "absl/memory/memory.h" +#include "absl/strings/string_view.h" +#include "api/array_view.h" #include "api/packet_socket_factory.h" +#include "api/task_queue/task_queue_base.h" #include "api/transport/stun.h" #include "p2p/base/async_stun_tcp_socket.h" #include "rtc_base/byte_buffer.h" @@ -26,103 +30,52 @@ #include "rtc_base/message_digest.h" #include "rtc_base/socket_adapters.h" #include "rtc_base/strings/string_builder.h" -#include "rtc_base/task_utils/to_queued_task.h" -#include "rtc_base/thread.h" namespace cricket { +namespace { +using ::webrtc::TimeDelta; // TODO(juberti): Move this all to a future turnmessage.h // static const int IPPROTO_UDP = 17; -static const int kNonceTimeout = 60 * 60 * 1000; // 60 minutes -static const int kDefaultAllocationTimeout = 10 * 60 * 1000; // 10 minutes -static const int kPermissionTimeout = 5 * 60 * 1000; // 5 minutes -static const int kChannelTimeout = 10 * 60 * 1000; // 10 minutes +constexpr TimeDelta kNonceTimeout = TimeDelta::Minutes(60); +constexpr TimeDelta kDefaultAllocationTimeout = TimeDelta::Minutes(10); +constexpr TimeDelta kPermissionTimeout = TimeDelta::Minutes(5); +constexpr TimeDelta kChannelTimeout = TimeDelta::Minutes(10); -static const int kMinChannelNumber = 0x4000; -static const int kMaxChannelNumber = 0x7FFF; +constexpr int kMinChannelNumber = 0x4000; +constexpr int kMaxChannelNumber = 0x7FFF; -static const size_t kNonceKeySize = 16; -static const size_t kNonceSize = 48; +constexpr size_t kNonceKeySize = 16; +constexpr size_t kNonceSize = 48; -static const size_t TURN_CHANNEL_HEADER_SIZE = 4U; +constexpr size_t TURN_CHANNEL_HEADER_SIZE = 4U; // TODO(mallinath) - Move these to a common place. -inline bool IsTurnChannelData(uint16_t msg_type) { +bool IsTurnChannelData(uint16_t msg_type) { // The first two bits of a channel data message are 0b01. return ((msg_type & 0xC000) == 0x4000); } -// IDs used for posted messages for TurnServerAllocation. -enum { - MSG_ALLOCATION_TIMEOUT, -}; +} // namespace -// Encapsulates a TURN permission. -// The object is created when a create permission request is received by an -// allocation, and self-deletes when its lifetime timer expires. -class TurnServerAllocation::Permission : public rtc::MessageHandlerAutoCleanup { - public: - Permission(rtc::Thread* thread, const rtc::IPAddress& peer); - ~Permission() override; - - const rtc::IPAddress& peer() const { return peer_; } - void Refresh(); - - sigslot::signal1 SignalDestroyed; - - private: - void OnMessage(rtc::Message* msg) override; - - rtc::Thread* thread_; - rtc::IPAddress peer_; -}; - -// Encapsulates a TURN channel binding. -// The object is created when a channel bind request is received by an -// allocation, and self-deletes when its lifetime timer expires. -class TurnServerAllocation::Channel : public rtc::MessageHandlerAutoCleanup { - public: - Channel(rtc::Thread* thread, int id, const rtc::SocketAddress& peer); - ~Channel() override; - - int id() const { return id_; } - const rtc::SocketAddress& peer() const { return peer_; } - void Refresh(); - - sigslot::signal1 SignalDestroyed; - - private: - void OnMessage(rtc::Message* msg) override; - - rtc::Thread* thread_; - int id_; - rtc::SocketAddress peer_; -}; +int GetStunSuccessResponseTypeOrZero(const StunMessage& req) { + const int resp_type = GetStunSuccessResponseType(req.type()); + return resp_type == -1 ? 0 : resp_type; +} -static bool InitResponse(const StunMessage* req, StunMessage* resp) { - int resp_type = (req) ? GetStunSuccessResponseType(req->type()) : -1; - if (resp_type == -1) - return false; - resp->SetType(resp_type); - resp->SetTransactionID(req->transaction_id()); - return true; +int GetStunErrorResponseTypeOrZero(const StunMessage& req) { + const int resp_type = GetStunErrorResponseType(req.type()); + return resp_type == -1 ? 0 : resp_type; } -static bool InitErrorResponse(const StunMessage* req, - int code, - const std::string& reason, +static void InitErrorResponse(int code, + absl::string_view reason, StunMessage* resp) { - int resp_type = (req) ? GetStunErrorResponseType(req->type()) : -1; - if (resp_type == -1) - return false; - resp->SetType(resp_type); - resp->SetTransactionID(req->transaction_id()); resp->AddAttribute(std::make_unique( - STUN_ATTR_ERROR_CODE, code, reason)); - return true; + STUN_ATTR_ERROR_CODE, code, std::string(reason))); } -TurnServer::TurnServer(rtc::Thread* thread) +TurnServer::TurnServer(webrtc::TaskQueueBase* thread) : thread_(thread), nonce_key_(rtc::CreateRandomString(kNonceKeySize)), auth_hook_(NULL), @@ -194,7 +147,10 @@ void TurnServer::AcceptConnection(rtc::Socket* server_socket) { cricket::AsyncStunTCPSocket* tcp_socket = new cricket::AsyncStunTCPSocket(accepted_socket); - tcp_socket->SignalClose.connect(this, &TurnServer::OnInternalSocketClose); + tcp_socket->SubscribeClose(this, + [this](rtc::AsyncPacketSocket* s, int err) { + OnInternalSocketClose(s, err); + }); // Finally add the socket so it can start communicating with the client. AddInternalSocket(tcp_socket, info.proto); } @@ -288,7 +244,7 @@ void TurnServer::HandleStunMessage(TurnServerConnection* conn, // This is a non-allocate request, or a retransmit of an allocate. // Check that the username matches the previous username used. if (IsStunRequestType(msg.type()) && - msg.GetByteString(STUN_ATTR_USERNAME)->GetString() != + msg.GetByteString(STUN_ATTR_USERNAME)->string_view() != allocation->username()) { SendErrorResponse(conn, &msg, STUN_ERROR_WRONG_CREDENTIALS, STUN_ERROR_REASON_WRONG_CREDENTIALS); @@ -309,15 +265,16 @@ bool TurnServer::GetKey(const StunMessage* msg, std::string* key) { return false; } - std::string username = username_attr->GetString(); - return (auth_hook_ != NULL && auth_hook_->GetKey(username, realm_, key)); + return (auth_hook_ != NULL && + auth_hook_->GetKey(std::string(username_attr->string_view()), realm_, + key)); } bool TurnServer::CheckAuthorization(TurnServerConnection* conn, StunMessage* msg, const char* data, size_t size, - const std::string& key) { + absl::string_view key) { // RFC 5389, 10.2.2. RTC_DCHECK(IsStunRequestType(msg->type())); const StunByteStringAttribute* mi_attr = @@ -344,14 +301,14 @@ bool TurnServer::CheckAuthorization(TurnServerConnection* conn, } // Fail if bad nonce. - if (!ValidateNonce(nonce_attr->GetString())) { + if (!ValidateNonce(nonce_attr->string_view())) { SendErrorResponseWithRealmAndNonce(conn, msg, STUN_ERROR_STALE_NONCE, STUN_ERROR_REASON_STALE_NONCE); return false; } // Fail if bad MESSAGE_INTEGRITY. - if (key.empty() || msg->ValidateMessageIntegrity(key) != + if (key.empty() || msg->ValidateMessageIntegrity(std::string(key)) != StunMessage::IntegrityStatus::kIntegrityOk) { SendErrorResponseWithRealmAndNonce(conn, msg, STUN_ERROR_UNAUTHORIZED, STUN_ERROR_REASON_UNAUTHORIZED); @@ -361,14 +318,14 @@ bool TurnServer::CheckAuthorization(TurnServerConnection* conn, // Fail if one-time-use nonce feature is enabled. TurnServerAllocation* allocation = FindAllocation(conn); if (enable_otu_nonce_ && allocation && - allocation->last_nonce() == nonce_attr->GetString()) { + allocation->last_nonce() == nonce_attr->string_view()) { SendErrorResponseWithRealmAndNonce(conn, msg, STUN_ERROR_STALE_NONCE, STUN_ERROR_REASON_STALE_NONCE); return false; } if (allocation) { - allocation->set_last_nonce(nonce_attr->GetString()); + allocation->set_last_nonce(nonce_attr->string_view()); } // Success. return true; @@ -376,9 +333,8 @@ bool TurnServer::CheckAuthorization(TurnServerConnection* conn, void TurnServer::HandleBindingRequest(TurnServerConnection* conn, const StunMessage* req) { - StunMessage response; - InitResponse(req, &response); - + StunMessage response(GetStunSuccessResponseTypeOrZero(*req), + req->transaction_id()); // Tell the user the address that we received their request from. auto mapped_addr_attr = std::make_unique( STUN_ATTR_XOR_MAPPED_ADDRESS, conn->src()); @@ -389,7 +345,7 @@ void TurnServer::HandleBindingRequest(TurnServerConnection* conn, void TurnServer::HandleAllocateRequest(TurnServerConnection* conn, const TurnMessage* msg, - const std::string& key) { + absl::string_view key) { // Check the parameters in the request. const StunUInt32Attribute* transport_attr = msg->GetUInt32(STUN_ATTR_REQUESTED_TRANSPORT); @@ -421,14 +377,14 @@ void TurnServer::HandleAllocateRequest(TurnServerConnection* conn, std::string TurnServer::GenerateNonce(int64_t now) const { // Generate a nonce of the form hex(now + HMAC-MD5(nonce_key_, now)) std::string input(reinterpret_cast(&now), sizeof(now)); - std::string nonce = rtc::hex_encode(input.c_str(), input.size()); + std::string nonce = rtc::hex_encode(input); nonce += rtc::ComputeHmac(rtc::DIGEST_MD5, nonce_key_, input); RTC_DCHECK(nonce.size() == kNonceSize); return nonce; } -bool TurnServer::ValidateNonce(const std::string& nonce) const { +bool TurnServer::ValidateNonce(absl::string_view nonce) const { // Check the size. if (nonce.size() != kNonceSize) { return false; @@ -437,8 +393,8 @@ bool TurnServer::ValidateNonce(const std::string& nonce) const { // Decode the timestamp. int64_t then; char* p = reinterpret_cast(&then); - size_t len = - rtc::hex_decode(p, sizeof(then), nonce.substr(0, sizeof(then) * 2)); + size_t len = rtc::hex_decode(rtc::ArrayView(p, sizeof(then)), + nonce.substr(0, sizeof(then) * 2)); if (len != sizeof(then)) { return false; } @@ -451,7 +407,7 @@ bool TurnServer::ValidateNonce(const std::string& nonce) const { } // Validate the timestamp. - return rtc::TimeMillis() - then < kNonceTimeout; + return TimeDelta::Millis(rtc::TimeMillis() - then) < kNonceTimeout; } TurnServerAllocation* TurnServer::FindAllocation(TurnServerConnection* conn) { @@ -461,7 +417,7 @@ TurnServerAllocation* TurnServer::FindAllocation(TurnServerConnection* conn) { TurnServerAllocation* TurnServer::CreateAllocation(TurnServerConnection* conn, int proto, - const std::string& key) { + absl::string_view key) { rtc::AsyncPacketSocket* external_socket = (external_socket_factory_) ? external_socket_factory_->CreateUdpSocket(external_addr_, 0, 0) @@ -473,7 +429,6 @@ TurnServerAllocation* TurnServer::CreateAllocation(TurnServerConnection* conn, // The Allocation takes ownership of the socket. TurnServerAllocation* allocation = new TurnServerAllocation(this, thread_, *conn, external_socket, key); - allocation->SignalDestroyed.connect(this, &TurnServer::OnAllocationDestroyed); allocations_[*conn].reset(allocation); return allocation; } @@ -481,10 +436,11 @@ TurnServerAllocation* TurnServer::CreateAllocation(TurnServerConnection* conn, void TurnServer::SendErrorResponse(TurnServerConnection* conn, const StunMessage* req, int code, - const std::string& reason) { + absl::string_view reason) { RTC_DCHECK_RUN_ON(thread_); - TurnMessage resp; - InitErrorResponse(req, code, reason, &resp); + TurnMessage resp(GetStunErrorResponseTypeOrZero(*req), req->transaction_id()); + InitErrorResponse(code, reason, &resp); + RTC_LOG(LS_INFO) << "Sending error response, type=" << resp.type() << ", code=" << code << ", reason=" << reason; SendStun(conn, &resp); @@ -493,9 +449,9 @@ void TurnServer::SendErrorResponse(TurnServerConnection* conn, void TurnServer::SendErrorResponseWithRealmAndNonce(TurnServerConnection* conn, const StunMessage* msg, int code, - const std::string& reason) { - TurnMessage resp; - InitErrorResponse(msg, code, reason, &resp); + absl::string_view reason) { + TurnMessage resp(GetStunErrorResponseTypeOrZero(*msg), msg->transaction_id()); + InitErrorResponse(code, reason, &resp); int64_t timestamp = rtc::TimeMillis(); if (ts_for_next_nonce_) { @@ -513,8 +469,8 @@ void TurnServer::SendErrorResponseWithAlternateServer( TurnServerConnection* conn, const StunMessage* msg, const rtc::SocketAddress& addr) { - TurnMessage resp; - InitErrorResponse(msg, STUN_ERROR_TRY_ALTERNATE, + TurnMessage resp(GetStunErrorResponseTypeOrZero(*msg), msg->transaction_id()); + InitErrorResponse(STUN_ERROR_TRY_ALTERNATE, STUN_ERROR_REASON_TRY_ALTERNATE_SERVER, &resp); resp.AddAttribute( std::make_unique(STUN_ATTR_ALTERNATE_SERVER, addr)); @@ -540,7 +496,7 @@ void TurnServer::Send(TurnServerConnection* conn, conn->socket()->SendTo(buf.Data(), buf.Length(), conn->src(), options); } -void TurnServer::OnAllocationDestroyed(TurnServerAllocation* allocation) { +void TurnServer::DestroyAllocation(TurnServerAllocation* allocation) { // Removing the internal socket if the connection is not udp. rtc::AsyncPacketSocket* socket = allocation->conn()->socket(); InternalSocketMap::iterator iter = server_sockets_.find(socket); @@ -552,17 +508,14 @@ void TurnServer::OnAllocationDestroyed(TurnServerAllocation* allocation) { DestroyInternalSocket(socket); } - AllocationMap::iterator it = allocations_.find(*(allocation->conn())); - if (it != allocations_.end()) { - it->second.release(); - allocations_.erase(it); - } + allocations_.erase(*(allocation->conn())); } void TurnServer::DestroyInternalSocket(rtc::AsyncPacketSocket* socket) { InternalSocketMap::iterator iter = server_sockets_.find(socket); if (iter != server_sockets_.end()) { rtc::AsyncPacketSocket* socket = iter->first; + socket->UnsubscribeClose(this); socket->SignalReadPacket.disconnect(this); server_sockets_.erase(iter); std::unique_ptr socket_to_delete = @@ -570,8 +523,7 @@ void TurnServer::DestroyInternalSocket(rtc::AsyncPacketSocket* socket) { // We must destroy the socket async to avoid invalidating the sigslot // callback list iterator inside a sigslot callback. (In other words, // deleting an object from within a callback from that object). - thread_->PostTask(webrtc::ToQueuedTask( - [socket_to_delete = std::move(socket_to_delete)] {})); + thread_->PostTask([socket_to_delete = std::move(socket_to_delete)] {}); } } @@ -600,10 +552,10 @@ std::string TurnServerConnection::ToString() const { } TurnServerAllocation::TurnServerAllocation(TurnServer* server, - rtc::Thread* thread, + webrtc::TaskQueueBase* thread, const TurnServerConnection& conn, rtc::AsyncPacketSocket* socket, - const std::string& key) + absl::string_view key) : server_(server), thread_(thread), conn_(conn), @@ -614,14 +566,8 @@ TurnServerAllocation::TurnServerAllocation(TurnServer* server, } TurnServerAllocation::~TurnServerAllocation() { - for (ChannelList::iterator it = channels_.begin(); it != channels_.end(); - ++it) { - delete *it; - } - for (PermissionList::iterator it = perms_.begin(); it != perms_.end(); ++it) { - delete *it; - } - thread_->Clear(this, MSG_ALLOCATION_TIMEOUT); + channels_.clear(); + perms_.clear(); RTC_LOG(LS_INFO) << ToString() << ": Allocation destroyed"; } @@ -663,26 +609,25 @@ void TurnServerAllocation::HandleAllocateRequest(const TurnMessage* msg) { const StunByteStringAttribute* username_attr = msg->GetByteString(STUN_ATTR_USERNAME); RTC_DCHECK(username_attr != NULL); - username_ = username_attr->GetString(); + username_ = std::string(username_attr->string_view()); // Figure out the lifetime and start the allocation timer. - int lifetime_secs = ComputeLifetime(msg); - thread_->PostDelayed(RTC_FROM_HERE, lifetime_secs * 1000, this, - MSG_ALLOCATION_TIMEOUT); + TimeDelta lifetime = ComputeLifetime(*msg); + PostDeleteSelf(lifetime); - RTC_LOG(LS_INFO) << ToString() - << ": Created allocation with lifetime=" << lifetime_secs; + RTC_LOG(LS_INFO) << ToString() << ": Created allocation with lifetime=" + << lifetime.seconds(); // We've already validated all the important bits; just send a response here. - TurnMessage response; - InitResponse(msg, &response); + TurnMessage response(GetStunSuccessResponseTypeOrZero(*msg), + msg->transaction_id()); auto mapped_addr_attr = std::make_unique( STUN_ATTR_XOR_MAPPED_ADDRESS, conn_.src()); auto relayed_addr_attr = std::make_unique( STUN_ATTR_XOR_RELAYED_ADDRESS, external_socket_->GetLocalAddress()); - auto lifetime_attr = - std::make_unique(STUN_ATTR_LIFETIME, lifetime_secs); + auto lifetime_attr = std::make_unique( + STUN_ATTR_LIFETIME, lifetime.seconds()); response.AddAttribute(std::move(mapped_addr_attr)); response.AddAttribute(std::move(relayed_addr_attr)); response.AddAttribute(std::move(lifetime_attr)); @@ -692,22 +637,21 @@ void TurnServerAllocation::HandleAllocateRequest(const TurnMessage* msg) { void TurnServerAllocation::HandleRefreshRequest(const TurnMessage* msg) { // Figure out the new lifetime. - int lifetime_secs = ComputeLifetime(msg); + TimeDelta lifetime = ComputeLifetime(*msg); // Reset the expiration timer. - thread_->Clear(this, MSG_ALLOCATION_TIMEOUT); - thread_->PostDelayed(RTC_FROM_HERE, lifetime_secs * 1000, this, - MSG_ALLOCATION_TIMEOUT); + safety_.reset(); + PostDeleteSelf(lifetime); RTC_LOG(LS_INFO) << ToString() - << ": Refreshed allocation, lifetime=" << lifetime_secs; + << ": Refreshed allocation, lifetime=" << lifetime.seconds(); // Send a success response with a LIFETIME attribute. - TurnMessage response; - InitResponse(msg, &response); + TurnMessage response(GetStunSuccessResponseTypeOrZero(*msg), + msg->transaction_id()); - auto lifetime_attr = - std::make_unique(STUN_ATTR_LIFETIME, lifetime_secs); + auto lifetime_attr = std::make_unique( + STUN_ATTR_LIFETIME, lifetime.seconds()); response.AddAttribute(std::move(lifetime_attr)); SendResponse(&response); @@ -758,8 +702,8 @@ void TurnServerAllocation::HandleCreatePermissionRequest( << peer_attr->GetAddress().ToSensitiveString(); // Send a success response. - TurnMessage response; - InitResponse(msg, &response); + TurnMessage response(GetStunSuccessResponseTypeOrZero(*msg), + msg->transaction_id()); SendResponse(&response); } @@ -783,22 +727,24 @@ void TurnServerAllocation::HandleChannelBindRequest(const TurnMessage* msg) { // Check that this channel id isn't bound to another transport address, and // that this transport address isn't bound to another channel id. - Channel* channel1 = FindChannel(channel_id); - Channel* channel2 = FindChannel(peer_attr->GetAddress()); + auto channel1 = FindChannel(channel_id); + auto channel2 = FindChannel(peer_attr->GetAddress()); if (channel1 != channel2) { SendBadRequestResponse(msg); return; } // Add or refresh this channel. - if (!channel1) { - channel1 = new Channel(thread_, channel_id, peer_attr->GetAddress()); - channel1->SignalDestroyed.connect( - this, &TurnServerAllocation::OnChannelDestroyed); - channels_.push_back(channel1); + if (channel1 == channels_.end()) { + channel1 = channels_.insert( + channels_.end(), {.id = channel_id, .peer = peer_attr->GetAddress()}); } else { - channel1->Refresh(); + channel1->pending_delete.reset(); } + thread_->PostDelayedTask( + SafeTask(channel1->pending_delete.flag(), + [this, channel1] { channels_.erase(channel1); }), + kChannelTimeout); // Channel binds also refresh permissions. AddPermission(peer_attr->GetAddress().ipaddr()); @@ -807,19 +753,19 @@ void TurnServerAllocation::HandleChannelBindRequest(const TurnMessage* msg) { << ", peer=" << peer_attr->GetAddress().ToSensitiveString(); // Send a success response. - TurnMessage response; - InitResponse(msg, &response); + TurnMessage response(GetStunSuccessResponseTypeOrZero(*msg), + msg->transaction_id()); SendResponse(&response); } void TurnServerAllocation::HandleChannelData(const char* data, size_t size) { // Extract the channel number from the data. uint16_t channel_id = rtc::GetBE16(data); - Channel* channel = FindChannel(channel_id); - if (channel) { + auto channel = FindChannel(channel_id); + if (channel != channels_.end()) { // Send the data to the peer address. SendExternal(data + TURN_CHANNEL_HEADER_SIZE, - size - TURN_CHANNEL_HEADER_SIZE, channel->peer()); + size - TURN_CHANNEL_HEADER_SIZE, channel->peer); } else { RTC_LOG(LS_WARNING) << ToString() << ": Received channel data for invalid channel, id=" @@ -834,20 +780,18 @@ void TurnServerAllocation::OnExternalPacket( const rtc::SocketAddress& addr, const int64_t& /* packet_time_us */) { RTC_DCHECK(external_socket_.get() == socket); - Channel* channel = FindChannel(addr); - if (channel) { + auto channel = FindChannel(addr); + if (channel != channels_.end()) { // There is a channel bound to this address. Send as a channel message. rtc::ByteBufferWriter buf; - buf.WriteUInt16(channel->id()); + buf.WriteUInt16(channel->id); buf.WriteUInt16(static_cast(size)); buf.WriteBytes(data, size); server_->Send(&conn_, buf); } else if (!server_->enable_permission_checks_ || HasPermission(addr.ipaddr())) { // No channel, but a permission exists. Send as a data indication. - TurnMessage msg; - msg.SetType(TURN_DATA_INDICATION); - msg.SetTransactionID(rtc::CreateRandomString(kStunTransactionIdLength)); + TurnMessage msg(TURN_DATA_INDICATION); msg.AddAttribute(std::make_unique( STUN_ATTR_XOR_PEER_ADDRESS, addr)); msg.AddAttribute( @@ -860,60 +804,46 @@ void TurnServerAllocation::OnExternalPacket( } } -int TurnServerAllocation::ComputeLifetime(const TurnMessage* msg) { - // Return the smaller of our default lifetime and the requested lifetime. - int lifetime = kDefaultAllocationTimeout / 1000; // convert to seconds - const StunUInt32Attribute* lifetime_attr = msg->GetUInt32(STUN_ATTR_LIFETIME); - if (lifetime_attr && static_cast(lifetime_attr->value()) < lifetime) { - lifetime = static_cast(lifetime_attr->value()); +TimeDelta TurnServerAllocation::ComputeLifetime(const TurnMessage& msg) { + if (const StunUInt32Attribute* attr = msg.GetUInt32(STUN_ATTR_LIFETIME)) { + return std::min(TimeDelta::Seconds(static_cast(attr->value())), + kDefaultAllocationTimeout); } - return lifetime; + return kDefaultAllocationTimeout; } bool TurnServerAllocation::HasPermission(const rtc::IPAddress& addr) { - return (FindPermission(addr) != NULL); + return FindPermission(addr) != perms_.end(); } void TurnServerAllocation::AddPermission(const rtc::IPAddress& addr) { - Permission* perm = FindPermission(addr); - if (!perm) { - perm = new Permission(thread_, addr); - perm->SignalDestroyed.connect(this, - &TurnServerAllocation::OnPermissionDestroyed); - perms_.push_back(perm); + auto perm = FindPermission(addr); + if (perm == perms_.end()) { + perm = perms_.insert(perms_.end(), {.peer = addr}); } else { - perm->Refresh(); + perm->pending_delete.reset(); } + thread_->PostDelayedTask(SafeTask(perm->pending_delete.flag(), + [this, perm] { perms_.erase(perm); }), + kPermissionTimeout); } -TurnServerAllocation::Permission* TurnServerAllocation::FindPermission( - const rtc::IPAddress& addr) const { - for (PermissionList::const_iterator it = perms_.begin(); it != perms_.end(); - ++it) { - if ((*it)->peer() == addr) - return *it; - } - return NULL; +TurnServerAllocation::PermissionList::iterator +TurnServerAllocation::FindPermission(const rtc::IPAddress& addr) { + return absl::c_find_if(perms_, + [&](const Permission& p) { return p.peer == addr; }); } -TurnServerAllocation::Channel* TurnServerAllocation::FindChannel( - int channel_id) const { - for (ChannelList::const_iterator it = channels_.begin(); - it != channels_.end(); ++it) { - if ((*it)->id() == channel_id) - return *it; - } - return NULL; +TurnServerAllocation::ChannelList::iterator TurnServerAllocation::FindChannel( + int channel_id) { + return absl::c_find_if(channels_, + [&](const Channel& c) { return c.id == channel_id; }); } -TurnServerAllocation::Channel* TurnServerAllocation::FindChannel( - const rtc::SocketAddress& addr) const { - for (ChannelList::const_iterator it = channels_.begin(); - it != channels_.end(); ++it) { - if ((*it)->peer() == addr) - return *it; - } - return NULL; +TurnServerAllocation::ChannelList::iterator TurnServerAllocation::FindChannel( + const rtc::SocketAddress& addr) { + return absl::c_find_if(channels_, + [&](const Channel& c) { return c.peer == addr; }); } void TurnServerAllocation::SendResponse(TurnMessage* msg) { @@ -928,7 +858,7 @@ void TurnServerAllocation::SendBadRequestResponse(const TurnMessage* req) { void TurnServerAllocation::SendErrorResponse(const TurnMessage* req, int code, - const std::string& reason) { + absl::string_view reason) { server_->SendErrorResponse(&conn_, req, code, reason); } @@ -939,67 +869,13 @@ void TurnServerAllocation::SendExternal(const void* data, external_socket_->SendTo(data, size, peer, options); } -void TurnServerAllocation::OnMessage(rtc::Message* msg) { - RTC_DCHECK(msg->message_id == MSG_ALLOCATION_TIMEOUT); - SignalDestroyed(this); - delete this; -} - -void TurnServerAllocation::OnPermissionDestroyed(Permission* perm) { - auto it = absl::c_find(perms_, perm); - RTC_DCHECK(it != perms_.end()); - perms_.erase(it); -} - -void TurnServerAllocation::OnChannelDestroyed(Channel* channel) { - auto it = absl::c_find(channels_, channel); - RTC_DCHECK(it != channels_.end()); - channels_.erase(it); -} - -TurnServerAllocation::Permission::Permission(rtc::Thread* thread, - const rtc::IPAddress& peer) - : thread_(thread), peer_(peer) { - Refresh(); -} - -TurnServerAllocation::Permission::~Permission() { - thread_->Clear(this, MSG_ALLOCATION_TIMEOUT); -} - -void TurnServerAllocation::Permission::Refresh() { - thread_->Clear(this, MSG_ALLOCATION_TIMEOUT); - thread_->PostDelayed(RTC_FROM_HERE, kPermissionTimeout, this, - MSG_ALLOCATION_TIMEOUT); -} - -void TurnServerAllocation::Permission::OnMessage(rtc::Message* msg) { - RTC_DCHECK(msg->message_id == MSG_ALLOCATION_TIMEOUT); - SignalDestroyed(this); - delete this; -} - -TurnServerAllocation::Channel::Channel(rtc::Thread* thread, - int id, - const rtc::SocketAddress& peer) - : thread_(thread), id_(id), peer_(peer) { - Refresh(); -} - -TurnServerAllocation::Channel::~Channel() { - thread_->Clear(this, MSG_ALLOCATION_TIMEOUT); -} - -void TurnServerAllocation::Channel::Refresh() { - thread_->Clear(this, MSG_ALLOCATION_TIMEOUT); - thread_->PostDelayed(RTC_FROM_HERE, kChannelTimeout, this, - MSG_ALLOCATION_TIMEOUT); -} - -void TurnServerAllocation::Channel::OnMessage(rtc::Message* msg) { - RTC_DCHECK(msg->message_id == MSG_ALLOCATION_TIMEOUT); - SignalDestroyed(this); - delete this; +void TurnServerAllocation::PostDeleteSelf(TimeDelta delay) { + auto delete_self = [this] { + RTC_DCHECK_RUN_ON(server_->thread_); + server_->DestroyAllocation(this); + }; + thread_->PostDelayedTask(SafeTask(safety_.flag(), std::move(delete_self)), + delay); } } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/turn_server.h b/TMessagesProj/jni/voip/webrtc/p2p/base/turn_server.h index bb70defd65..e951d089af 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/turn_server.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/turn_server.h @@ -19,13 +19,16 @@ #include #include +#include "absl/strings/string_view.h" #include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/task_queue/task_queue_base.h" +#include "api/units/time_delta.h" #include "p2p/base/port_interface.h" #include "rtc_base/async_packet_socket.h" #include "rtc_base/socket_address.h" #include "rtc_base/ssl_adapter.h" #include "rtc_base/third_party/sigslot/sigslot.h" -#include "rtc_base/thread.h" namespace rtc { class ByteBufferWriter; @@ -65,15 +68,14 @@ class TurnServerConnection { // The object is created when an allocation request is received, and then // handles TURN messages (via HandleTurnMessage) and channel data messages // (via HandleChannelData) for this allocation when received by the server. -// The object self-deletes and informs the server if its lifetime timer expires. -class TurnServerAllocation : public rtc::MessageHandlerAutoCleanup, - public sigslot::has_slots<> { +// The object informs the server when its lifetime timer expires. +class TurnServerAllocation : public sigslot::has_slots<> { public: TurnServerAllocation(TurnServer* server_, - rtc::Thread* thread, + webrtc::TaskQueueBase* thread, const TurnServerConnection& conn, rtc::AsyncPacketSocket* server_socket, - const std::string& key); + absl::string_view key); ~TurnServerAllocation() override; TurnServerConnection* conn() { return &conn_; } @@ -81,20 +83,29 @@ class TurnServerAllocation : public rtc::MessageHandlerAutoCleanup, const std::string& transaction_id() const { return transaction_id_; } const std::string& username() const { return username_; } const std::string& last_nonce() const { return last_nonce_; } - void set_last_nonce(const std::string& nonce) { last_nonce_ = nonce; } + void set_last_nonce(absl::string_view nonce) { + last_nonce_ = std::string(nonce); + } std::string ToString() const; void HandleTurnMessage(const TurnMessage* msg); void HandleChannelData(const char* data, size_t size); - sigslot::signal1 SignalDestroyed; - private: - class Channel; - class Permission; - typedef std::list PermissionList; - typedef std::list ChannelList; + struct Channel { + webrtc::ScopedTaskSafety pending_delete; + int id; + rtc::SocketAddress peer; + }; + struct Permission { + webrtc::ScopedTaskSafety pending_delete; + rtc::IPAddress peer; + }; + using PermissionList = std::list; + using ChannelList = std::list; + + void PostDeleteSelf(webrtc::TimeDelta delay); void HandleAllocateRequest(const TurnMessage* msg); void HandleRefreshRequest(const TurnMessage* msg); @@ -108,28 +119,24 @@ class TurnServerAllocation : public rtc::MessageHandlerAutoCleanup, const rtc::SocketAddress& addr, const int64_t& packet_time_us); - static int ComputeLifetime(const TurnMessage* msg); + static webrtc::TimeDelta ComputeLifetime(const TurnMessage& msg); bool HasPermission(const rtc::IPAddress& addr); void AddPermission(const rtc::IPAddress& addr); - Permission* FindPermission(const rtc::IPAddress& addr) const; - Channel* FindChannel(int channel_id) const; - Channel* FindChannel(const rtc::SocketAddress& addr) const; + PermissionList::iterator FindPermission(const rtc::IPAddress& addr); + ChannelList::iterator FindChannel(int channel_id); + ChannelList::iterator FindChannel(const rtc::SocketAddress& addr); void SendResponse(TurnMessage* msg); void SendBadRequestResponse(const TurnMessage* req); void SendErrorResponse(const TurnMessage* req, int code, - const std::string& reason); + absl::string_view reason); void SendExternal(const void* data, size_t size, const rtc::SocketAddress& peer); - void OnPermissionDestroyed(Permission* perm); - void OnChannelDestroyed(Channel* channel); - void OnMessage(rtc::Message* msg) override; - TurnServer* const server_; - rtc::Thread* const thread_; + webrtc::TaskQueueBase* const thread_; TurnServerConnection conn_; std::unique_ptr external_socket_; std::string key_; @@ -138,6 +145,7 @@ class TurnServerAllocation : public rtc::MessageHandlerAutoCleanup, std::string last_nonce_; PermissionList perms_; ChannelList channels_; + webrtc::ScopedTaskSafety safety_; }; // An interface through which the MD5 credential hash can be retrieved. @@ -146,8 +154,8 @@ class TurnAuthInterface { // Gets HA1 for the specified user and realm. // HA1 = MD5(A1) = MD5(username:realm:password). // Return true if the given username and realm are valid, or false if not. - virtual bool GetKey(const std::string& username, - const std::string& realm, + virtual bool GetKey(absl::string_view username, + absl::string_view realm, std::string* key) = 0; virtual ~TurnAuthInterface() = default; }; @@ -176,7 +184,7 @@ class TurnServer : public sigslot::has_slots<> { typedef std::map> AllocationMap; - explicit TurnServer(rtc::Thread* thread); + explicit TurnServer(webrtc::TaskQueueBase* thread); ~TurnServer() override; // Gets/sets the realm value to use for the server. @@ -184,9 +192,9 @@ class TurnServer : public sigslot::has_slots<> { RTC_DCHECK_RUN_ON(thread_); return realm_; } - void set_realm(const std::string& realm) { + void set_realm(absl::string_view realm) { RTC_DCHECK_RUN_ON(thread_); - realm_ = realm; + realm_ = std::string(realm); } // Gets/sets the value for the SOFTWARE attribute for TURN messages. @@ -194,9 +202,9 @@ class TurnServer : public sigslot::has_slots<> { RTC_DCHECK_RUN_ON(thread_); return software_; } - void set_software(const std::string& software) { + void set_software(absl::string_view software) { RTC_DCHECK_RUN_ON(thread_); - software_ = software; + software_ = std::string(software); } const AllocationMap& allocations() const { @@ -280,32 +288,32 @@ class TurnServer : public sigslot::has_slots<> { RTC_RUN_ON(thread_); void HandleAllocateRequest(TurnServerConnection* conn, const TurnMessage* msg, - const std::string& key) RTC_RUN_ON(thread_); + absl::string_view key) RTC_RUN_ON(thread_); bool GetKey(const StunMessage* msg, std::string* key) RTC_RUN_ON(thread_); bool CheckAuthorization(TurnServerConnection* conn, StunMessage* msg, const char* data, size_t size, - const std::string& key) RTC_RUN_ON(thread_); - bool ValidateNonce(const std::string& nonce) const RTC_RUN_ON(thread_); + absl::string_view key) RTC_RUN_ON(thread_); + bool ValidateNonce(absl::string_view nonce) const RTC_RUN_ON(thread_); TurnServerAllocation* FindAllocation(TurnServerConnection* conn) RTC_RUN_ON(thread_); TurnServerAllocation* CreateAllocation(TurnServerConnection* conn, int proto, - const std::string& key) + absl::string_view key) RTC_RUN_ON(thread_); void SendErrorResponse(TurnServerConnection* conn, const StunMessage* req, int code, - const std::string& reason); + absl::string_view reason); void SendErrorResponseWithRealmAndNonce(TurnServerConnection* conn, const StunMessage* req, int code, - const std::string& reason) + absl::string_view reason) RTC_RUN_ON(thread_); void SendErrorResponseWithAlternateServer(TurnServerConnection* conn, @@ -316,8 +324,7 @@ class TurnServer : public sigslot::has_slots<> { void SendStun(TurnServerConnection* conn, StunMessage* msg); void Send(TurnServerConnection* conn, const rtc::ByteBufferWriter& buf); - void OnAllocationDestroyed(TurnServerAllocation* allocation) - RTC_RUN_ON(thread_); + void DestroyAllocation(TurnServerAllocation* allocation) RTC_RUN_ON(thread_); void DestroyInternalSocket(rtc::AsyncPacketSocket* socket) RTC_RUN_ON(thread_); @@ -329,7 +336,7 @@ class TurnServer : public sigslot::has_slots<> { }; typedef std::map ServerSocketMap; - rtc::Thread* const thread_; + webrtc::TaskQueueBase* const thread_; const std::string nonce_key_; std::string realm_ RTC_GUARDED_BY(thread_); std::string software_ RTC_GUARDED_BY(thread_); diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/wrapping_active_ice_controller.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/wrapping_active_ice_controller.cc new file mode 100644 index 0000000000..c6659217fc --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/wrapping_active_ice_controller.cc @@ -0,0 +1,253 @@ +/* + * Copyright 2022 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "p2p/base/wrapping_active_ice_controller.h" + +#include +#include +#include + +#include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/units/time_delta.h" +#include "p2p/base/basic_ice_controller.h" +#include "p2p/base/connection.h" +#include "p2p/base/ice_agent_interface.h" +#include "p2p/base/ice_controller_interface.h" +#include "p2p/base/ice_switch_reason.h" +#include "p2p/base/ice_transport_internal.h" +#include "p2p/base/transport_description.h" +#include "rtc_base/logging.h" +#include "rtc_base/thread.h" +#include "rtc_base/time_utils.h" + +namespace { +using ::webrtc::SafeTask; +using ::webrtc::TimeDelta; +} // unnamed namespace + +namespace cricket { + +WrappingActiveIceController::WrappingActiveIceController( + IceAgentInterface* ice_agent, + std::unique_ptr wrapped) + : network_thread_(rtc::Thread::Current()), + wrapped_(std::move(wrapped)), + agent_(*ice_agent) { + RTC_DCHECK(ice_agent != nullptr); +} + +WrappingActiveIceController::WrappingActiveIceController( + IceAgentInterface* ice_agent, + IceControllerFactoryInterface* wrapped_factory, + const IceControllerFactoryArgs& wrapped_factory_args) + : network_thread_(rtc::Thread::Current()), agent_(*ice_agent) { + RTC_DCHECK(ice_agent != nullptr); + if (wrapped_factory) { + wrapped_ = wrapped_factory->Create(wrapped_factory_args); + } else { + wrapped_ = std::make_unique(wrapped_factory_args); + } +} + +WrappingActiveIceController::~WrappingActiveIceController() {} + +void WrappingActiveIceController::SetIceConfig(const IceConfig& config) { + RTC_DCHECK_RUN_ON(network_thread_); + wrapped_->SetIceConfig(config); +} + +bool WrappingActiveIceController::GetUseCandidateAttribute( + const Connection* connection, + NominationMode mode, + IceMode remote_ice_mode) const { + RTC_DCHECK_RUN_ON(network_thread_); + return wrapped_->GetUseCandidateAttr(connection, mode, remote_ice_mode); +} + +void WrappingActiveIceController::OnConnectionAdded( + const Connection* connection) { + RTC_DCHECK_RUN_ON(network_thread_); + wrapped_->AddConnection(connection); +} + +void WrappingActiveIceController::OnConnectionPinged( + const Connection* connection) { + RTC_DCHECK_RUN_ON(network_thread_); + wrapped_->MarkConnectionPinged(connection); +} + +void WrappingActiveIceController::OnConnectionUpdated( + const Connection* connection) { + RTC_LOG(LS_VERBOSE) << "Connection report for " << connection->ToString(); + // Do nothing. Native ICE controllers have direct access to Connection, so no + // need to update connection state separately. +} + +void WrappingActiveIceController::OnConnectionSwitched( + const Connection* connection) { + RTC_DCHECK_RUN_ON(network_thread_); + selected_connection_ = connection; + wrapped_->SetSelectedConnection(connection); +} + +void WrappingActiveIceController::OnConnectionDestroyed( + const Connection* connection) { + RTC_DCHECK_RUN_ON(network_thread_); + wrapped_->OnConnectionDestroyed(connection); +} + +void WrappingActiveIceController::MaybeStartPinging() { + RTC_DCHECK_RUN_ON(network_thread_); + if (started_pinging_) { + return; + } + + if (wrapped_->HasPingableConnection()) { + network_thread_->PostTask( + SafeTask(task_safety_.flag(), [this]() { SelectAndPingConnection(); })); + agent_.OnStartedPinging(); + started_pinging_ = true; + } +} + +void WrappingActiveIceController::SelectAndPingConnection() { + RTC_DCHECK_RUN_ON(network_thread_); + agent_.UpdateConnectionStates(); + + IceControllerInterface::PingResult result = + wrapped_->SelectConnectionToPing(agent_.GetLastPingSentMs()); + HandlePingResult(result); +} + +void WrappingActiveIceController::HandlePingResult( + IceControllerInterface::PingResult result) { + RTC_DCHECK_RUN_ON(network_thread_); + + if (result.connection.has_value()) { + agent_.SendPingRequest(result.connection.value()); + } + + network_thread_->PostDelayedTask( + SafeTask(task_safety_.flag(), [this]() { SelectAndPingConnection(); }), + TimeDelta::Millis(result.recheck_delay_ms)); +} + +void WrappingActiveIceController::OnSortAndSwitchRequest( + IceSwitchReason reason) { + RTC_DCHECK_RUN_ON(network_thread_); + if (!sort_pending_) { + network_thread_->PostTask(SafeTask(task_safety_.flag(), [this, reason]() { + SortAndSwitchToBestConnection(reason); + })); + sort_pending_ = true; + } +} + +void WrappingActiveIceController::OnImmediateSortAndSwitchRequest( + IceSwitchReason reason) { + RTC_DCHECK_RUN_ON(network_thread_); + SortAndSwitchToBestConnection(reason); +} + +void WrappingActiveIceController::SortAndSwitchToBestConnection( + IceSwitchReason reason) { + RTC_DCHECK_RUN_ON(network_thread_); + + // Make sure the connection states are up-to-date since this affects how they + // will be sorted. + agent_.UpdateConnectionStates(); + + // Any changes after this point will require a re-sort. + sort_pending_ = false; + + IceControllerInterface::SwitchResult result = + wrapped_->SortAndSwitchConnection(reason); + HandleSwitchResult(reason, result); + UpdateStateOnConnectionsResorted(); +} + +bool WrappingActiveIceController::OnImmediateSwitchRequest( + IceSwitchReason reason, + const Connection* selected) { + RTC_DCHECK_RUN_ON(network_thread_); + IceControllerInterface::SwitchResult result = + wrapped_->ShouldSwitchConnection(reason, selected); + HandleSwitchResult(reason, result); + return result.connection.has_value(); +} + +void WrappingActiveIceController::HandleSwitchResult( + IceSwitchReason reason_for_switch, + IceControllerInterface::SwitchResult result) { + RTC_DCHECK_RUN_ON(network_thread_); + if (result.connection.has_value()) { + RTC_LOG(LS_INFO) << "Switching selected connection due to: " + << IceSwitchReasonToString(reason_for_switch); + agent_.SwitchSelectedConnection(result.connection.value(), + reason_for_switch); + } + + if (result.recheck_event.has_value()) { + // If we do not switch to the connection because it missed the receiving + // threshold, the new connection is in a better receiving state than the + // currently selected connection. So we need to re-check whether it needs + // to be switched at a later time. + network_thread_->PostDelayedTask( + SafeTask(task_safety_.flag(), + [this, recheck_reason = result.recheck_event->reason]() { + SortAndSwitchToBestConnection(recheck_reason); + }), + TimeDelta::Millis(result.recheck_event->recheck_delay_ms)); + } + + agent_.ForgetLearnedStateForConnections( + result.connections_to_forget_state_on); +} + +void WrappingActiveIceController::UpdateStateOnConnectionsResorted() { + RTC_DCHECK_RUN_ON(network_thread_); + PruneConnections(); + + // Update the internal state of the ICE agentl. + agent_.UpdateState(); + + // Also possibly start pinging. + // We could start pinging if: + // * The first connection was created. + // * ICE credentials were provided. + // * A TCP connection became connected. + MaybeStartPinging(); +} + +void WrappingActiveIceController::PruneConnections() { + RTC_DCHECK_RUN_ON(network_thread_); + + // The controlled side can prune only if the selected connection has been + // nominated because otherwise it may prune the connection that will be + // selected by the controlling side. + // TODO(honghaiz): This is not enough to prevent a connection from being + // pruned too early because with aggressive nomination, the controlling side + // will nominate every connection until it becomes writable. + if (agent_.GetIceRole() == ICEROLE_CONTROLLING || + (selected_connection_ && selected_connection_->nominated())) { + std::vector connections_to_prune = + wrapped_->PruneConnections(); + agent_.PruneConnections(connections_to_prune); + } +} + +// Only for unit tests +const Connection* WrappingActiveIceController::FindNextPingableConnection() { + RTC_DCHECK_RUN_ON(network_thread_); + return wrapped_->FindNextPingableConnection(); +} + +} // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/wrapping_active_ice_controller.h b/TMessagesProj/jni/voip/webrtc/p2p/base/wrapping_active_ice_controller.h new file mode 100644 index 0000000000..449c0f0ee1 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/wrapping_active_ice_controller.h @@ -0,0 +1,97 @@ +/* + * Copyright 2022 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef P2P_BASE_WRAPPING_ACTIVE_ICE_CONTROLLER_H_ +#define P2P_BASE_WRAPPING_ACTIVE_ICE_CONTROLLER_H_ + +#include + +#include "absl/types/optional.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "p2p/base/active_ice_controller_interface.h" +#include "p2p/base/connection.h" +#include "p2p/base/ice_agent_interface.h" +#include "p2p/base/ice_controller_factory_interface.h" +#include "p2p/base/ice_controller_interface.h" +#include "p2p/base/ice_switch_reason.h" +#include "p2p/base/ice_transport_internal.h" +#include "p2p/base/transport_description.h" +#include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" + +namespace cricket { + +// WrappingActiveIceController provides the functionality of a legacy passive +// ICE controller but packaged as an active ICE Controller. +class WrappingActiveIceController : public ActiveIceControllerInterface { + public: + // Constructs an active ICE controller wrapping an already constructed legacy + // ICE controller. Does not take ownership of the ICE agent, which must + // already exist and outlive the ICE controller. + WrappingActiveIceController(IceAgentInterface* ice_agent, + std::unique_ptr wrapped); + // Constructs an active ICE controller that wraps over a legacy ICE + // controller. The legacy ICE controller is constructed through a factory, if + // one is supplied. If not, a default BasicIceController is wrapped instead. + // Does not take ownership of the ICE agent, which must already exist and + // outlive the ICE controller. + WrappingActiveIceController( + IceAgentInterface* ice_agent, + IceControllerFactoryInterface* wrapped_factory, + const IceControllerFactoryArgs& wrapped_factory_args); + virtual ~WrappingActiveIceController(); + + void SetIceConfig(const IceConfig& config) override; + bool GetUseCandidateAttribute(const Connection* connection, + NominationMode mode, + IceMode remote_ice_mode) const override; + + void OnConnectionAdded(const Connection* connection) override; + void OnConnectionPinged(const Connection* connection) override; + void OnConnectionUpdated(const Connection* connection) override; + void OnConnectionSwitched(const Connection* connection) override; + void OnConnectionDestroyed(const Connection* connection) override; + + void OnSortAndSwitchRequest(IceSwitchReason reason) override; + void OnImmediateSortAndSwitchRequest(IceSwitchReason reason) override; + bool OnImmediateSwitchRequest(IceSwitchReason reason, + const Connection* selected) override; + + // Only for unit tests + const Connection* FindNextPingableConnection() override; + + private: + void MaybeStartPinging(); + void SelectAndPingConnection(); + void HandlePingResult(IceControllerInterface::PingResult result); + + void SortAndSwitchToBestConnection(IceSwitchReason reason); + void HandleSwitchResult(IceSwitchReason reason_for_switch, + IceControllerInterface::SwitchResult result); + void UpdateStateOnConnectionsResorted(); + + void PruneConnections(); + + rtc::Thread* const network_thread_; + webrtc::ScopedTaskSafety task_safety_; + + bool started_pinging_ RTC_GUARDED_BY(network_thread_) = false; + bool sort_pending_ RTC_GUARDED_BY(network_thread_) = false; + const Connection* selected_connection_ RTC_GUARDED_BY(network_thread_) = + nullptr; + + std::unique_ptr wrapped_ + RTC_GUARDED_BY(network_thread_); + IceAgentInterface& agent_ RTC_GUARDED_BY(network_thread_); +}; + +} // namespace cricket + +#endif // P2P_BASE_WRAPPING_ACTIVE_ICE_CONTROLLER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/p2p/client/basic_port_allocator.cc b/TMessagesProj/jni/voip/webrtc/p2p/client/basic_port_allocator.cc index 9967d9a449..5611403bb2 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/client/basic_port_allocator.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/client/basic_port_allocator.cc @@ -20,6 +20,10 @@ #include "absl/algorithm/container.h" #include "absl/memory/memory.h" +#include "absl/strings/string_view.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/transport/field_trial_based_config.h" +#include "api/units/time_delta.h" #include "p2p/base/basic_packet_socket_factory.h" #include "p2p/base/port.h" #include "p2p/base/stun_port.h" @@ -27,17 +31,19 @@ #include "p2p/base/turn_port.h" #include "p2p/base/udp_port.h" #include "rtc_base/checks.h" +#include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/helpers.h" #include "rtc_base/logging.h" -#include "rtc_base/task_utils/to_queued_task.h" +#include "rtc_base/network_constants.h" +#include "rtc_base/strings/string_builder.h" #include "rtc_base/trace_event.h" -#include "system_wrappers/include/field_trial.h" #include "system_wrappers/include/metrics.h" -using rtc::CreateRandomId; - namespace cricket { namespace { +using ::rtc::CreateRandomId; +using ::webrtc::SafeTask; +using ::webrtc::TimeDelta; const int PHASE_UDP = 0; const int PHASE_RELAY = 1; @@ -88,16 +94,17 @@ int ComparePort(const cricket::Port* a, const cricket::Port* b) { } struct NetworkFilter { - using Predicate = std::function; - NetworkFilter(Predicate pred, const std::string& description) - : predRemain([pred](rtc::Network* network) { return !pred(network); }), + using Predicate = std::function; + NetworkFilter(Predicate pred, absl::string_view description) + : predRemain( + [pred](const rtc::Network* network) { return !pred(network); }), description(description) {} Predicate predRemain; const std::string description; }; -using NetworkList = rtc::NetworkManager::NetworkList; -void FilterNetworks(NetworkList* networks, NetworkFilter filter) { +void FilterNetworks(std::vector* networks, + NetworkFilter filter) { auto start_to_remove = std::partition(networks->begin(), networks->end(), filter.predRemain); if (start_to_remove == networks->end()) { @@ -139,6 +146,29 @@ bool IsAllowedByCandidateFilter(const Candidate& c, uint32_t filter) { return false; } +std::string NetworksToString(const std::vector& networks) { + rtc::StringBuilder ost; + for (auto n : networks) { + ost << n->name() << " "; + } + return ost.Release(); +} + +bool IsDiversifyIpv6InterfacesEnabled( + const webrtc::FieldTrialsView* field_trials) { + // webrtc:14334: Improve IPv6 network resolution and candidate creation + if (field_trials && + field_trials->IsEnabled("WebRTC-IPv6NetworkResolutionFixes")) { + webrtc::FieldTrialParameter diversify_ipv6_interfaces( + "DiversifyIpv6Interfaces", false); + webrtc::ParseFieldTrial( + {&diversify_ipv6_interfaces}, + field_trials->Lookup("WebRTC-IPv6NetworkResolutionFixes")); + return diversify_ipv6_interfaces; + } + return false; +} + } // namespace const uint32_t DISABLE_ALL_PHASES = @@ -150,35 +180,60 @@ BasicPortAllocator::BasicPortAllocator( rtc::NetworkManager* network_manager, rtc::PacketSocketFactory* socket_factory, webrtc::TurnCustomizer* customizer, - RelayPortFactoryInterface* relay_port_factory) - : network_manager_(network_manager), socket_factory_(socket_factory) { - InitRelayPortFactory(relay_port_factory); + RelayPortFactoryInterface* relay_port_factory, + const webrtc::FieldTrialsView* field_trials) + : field_trials_(field_trials), + network_manager_(network_manager), + socket_factory_(socket_factory) { + Init(relay_port_factory); RTC_DCHECK(relay_port_factory_ != nullptr); RTC_DCHECK(network_manager_ != nullptr); - RTC_DCHECK(socket_factory_ != nullptr); + RTC_CHECK(socket_factory_ != nullptr); SetConfiguration(ServerAddresses(), std::vector(), 0, webrtc::NO_PRUNE, customizer); } -BasicPortAllocator::BasicPortAllocator(rtc::NetworkManager* network_manager) - : network_manager_(network_manager), socket_factory_(nullptr) { - InitRelayPortFactory(nullptr); +BasicPortAllocator::BasicPortAllocator( + rtc::NetworkManager* network_manager, + std::unique_ptr owned_socket_factory, + const webrtc::FieldTrialsView* field_trials) + : field_trials_(field_trials), + network_manager_(network_manager), + socket_factory_(std::move(owned_socket_factory)) { + Init(nullptr); RTC_DCHECK(relay_port_factory_ != nullptr); RTC_DCHECK(network_manager_ != nullptr); + RTC_CHECK(socket_factory_ != nullptr); } -BasicPortAllocator::BasicPortAllocator(rtc::NetworkManager* network_manager, - const ServerAddresses& stun_servers) - : BasicPortAllocator(network_manager, - /*socket_factory=*/nullptr, - stun_servers) {} +BasicPortAllocator::BasicPortAllocator( + rtc::NetworkManager* network_manager, + std::unique_ptr owned_socket_factory, + const ServerAddresses& stun_servers, + const webrtc::FieldTrialsView* field_trials) + : field_trials_(field_trials), + network_manager_(network_manager), + socket_factory_(std::move(owned_socket_factory)) { + Init(nullptr); + RTC_DCHECK(relay_port_factory_ != nullptr); + RTC_DCHECK(network_manager_ != nullptr); + RTC_CHECK(socket_factory_ != nullptr); + SetConfiguration(stun_servers, std::vector(), 0, + webrtc::NO_PRUNE, nullptr); +} -BasicPortAllocator::BasicPortAllocator(rtc::NetworkManager* network_manager, - rtc::PacketSocketFactory* socket_factory, - const ServerAddresses& stun_servers) - : network_manager_(network_manager), socket_factory_(socket_factory) { - InitRelayPortFactory(nullptr); +BasicPortAllocator::BasicPortAllocator( + rtc::NetworkManager* network_manager, + rtc::PacketSocketFactory* socket_factory, + const ServerAddresses& stun_servers, + const webrtc::FieldTrialsView* field_trials) + : field_trials_(field_trials), + network_manager_(network_manager), + socket_factory_(socket_factory) { + Init(nullptr); RTC_DCHECK(relay_port_factory_ != nullptr); + RTC_DCHECK(network_manager_ != nullptr); + RTC_CHECK(socket_factory_ != nullptr); SetConfiguration(stun_servers, std::vector(), 0, webrtc::NO_PRUNE, nullptr); } @@ -230,13 +285,14 @@ int BasicPortAllocator::GetNetworkIgnoreMask() const { } PortAllocatorSession* BasicPortAllocator::CreateSessionInternal( - const std::string& content_name, + absl::string_view content_name, int component, - const std::string& ice_ufrag, - const std::string& ice_pwd) { + absl::string_view ice_ufrag, + absl::string_view ice_pwd) { CheckRunOnValidThreadAndInitialized(); PortAllocatorSession* session = new BasicPortAllocatorSession( - this, content_name, component, ice_ufrag, ice_pwd); + this, std::string(content_name), component, std::string(ice_ufrag), + std::string(ice_pwd)); session->SignalIceRegathering.connect(this, &BasicPortAllocator::OnIceRegathering); return session; @@ -250,8 +306,7 @@ void BasicPortAllocator::AddTurnServer(const RelayServerConfig& turn_server) { turn_port_prune_policy(), turn_customizer()); } -void BasicPortAllocator::InitRelayPortFactory( - RelayPortFactoryInterface* relay_port_factory) { +void BasicPortAllocator::Init(RelayPortFactoryInterface* relay_port_factory) { if (relay_port_factory != nullptr) { relay_port_factory_ = relay_port_factory; } else { @@ -263,10 +318,10 @@ void BasicPortAllocator::InitRelayPortFactory( // BasicPortAllocatorSession BasicPortAllocatorSession::BasicPortAllocatorSession( BasicPortAllocator* allocator, - const std::string& content_name, + absl::string_view content_name, int component, - const std::string& ice_ufrag, - const std::string& ice_pwd) + absl::string_view ice_ufrag, + absl::string_view ice_pwd) : PortAllocatorSession(content_name, component, ice_ufrag, @@ -376,14 +431,9 @@ void BasicPortAllocatorSession::SetCandidateFilter(uint32_t filter) { void BasicPortAllocatorSession::StartGettingPorts() { RTC_DCHECK_RUN_ON(network_thread_); state_ = SessionState::GATHERING; - if (!socket_factory_) { - owned_socket_factory_.reset( - new rtc::BasicPacketSocketFactory(network_thread_->socketserver())); - socket_factory_ = owned_socket_factory_.get(); - } - network_thread_->PostTask(webrtc::ToQueuedTask( - network_safety_, [this] { GetPortConfigurations(); })); + network_thread_->PostTask( + SafeTask(network_safety_.flag(), [this] { GetPortConfigurations(); })); RTC_LOG(LS_INFO) << "Start getting ports with turn_port_prune_policy " << turn_port_prune_policy_; @@ -404,7 +454,7 @@ void BasicPortAllocatorSession::ClearGettingPorts() { sequences_[i]->Stop(); } network_thread_->PostTask( - webrtc::ToQueuedTask(network_safety_, [this] { OnConfigStop(); })); + SafeTask(network_safety_.flag(), [this] { OnConfigStop(); })); state_ = SessionState::CLEARED; } @@ -423,11 +473,11 @@ bool BasicPortAllocatorSession::IsStopped() const { return state_ == SessionState::STOPPED; } -std::vector BasicPortAllocatorSession::GetFailedNetworks() { +std::vector +BasicPortAllocatorSession::GetFailedNetworks() { RTC_DCHECK_RUN_ON(network_thread_); - std::vector networks = GetNetworks(); - + std::vector networks = GetNetworks(); // A network interface may have both IPv4 and IPv6 networks. Only if // neither of the networks has any connections, the network interface // is considered failed and need to be regathered on. @@ -441,7 +491,7 @@ std::vector BasicPortAllocatorSession::GetFailedNetworks() { networks.erase( std::remove_if(networks.begin(), networks.end(), - [networks_with_connection](rtc::Network* network) { + [networks_with_connection](const rtc::Network* network) { // If a network does not have any connection, it is // considered failed. return networks_with_connection.find(network->name()) != @@ -455,7 +505,7 @@ void BasicPortAllocatorSession::RegatherOnFailedNetworks() { RTC_DCHECK_RUN_ON(network_thread_); // Find the list of networks that have no connection. - std::vector failed_networks = GetFailedNetworks(); + std::vector failed_networks = GetFailedNetworks(); if (failed_networks.empty()) { return; } @@ -478,7 +528,7 @@ void BasicPortAllocatorSession::RegatherOnFailedNetworks() { } void BasicPortAllocatorSession::Regather( - const std::vector& networks, + const std::vector& networks, bool disable_equivalent_phases, IceRegatheringReason reason) { RTC_DCHECK_RUN_ON(network_thread_); @@ -601,8 +651,9 @@ void BasicPortAllocatorSession::UpdateIceParametersInternal() { void BasicPortAllocatorSession::GetPortConfigurations() { RTC_DCHECK_RUN_ON(network_thread_); - auto config = std::make_unique(allocator_->stun_servers(), - username(), password()); + auto config = std::make_unique( + allocator_->stun_servers(), username(), password(), + allocator()->field_trials()); for (const RelayServerConfig& turn_server : allocator_->turn_servers()) { config->AddRelay(turn_server); @@ -618,8 +669,8 @@ void BasicPortAllocatorSession::ConfigReady(PortConfiguration* config) { void BasicPortAllocatorSession::ConfigReady( std::unique_ptr config) { RTC_DCHECK_RUN_ON(network_thread_); - network_thread_->PostTask(webrtc::ToQueuedTask( - network_safety_, [this, config = std::move(config)]() mutable { + network_thread_->PostTask(SafeTask( + network_safety_.flag(), [this, config = std::move(config)]() mutable { OnConfigReady(std::move(config)); })); } @@ -667,8 +718,8 @@ void BasicPortAllocatorSession::OnConfigStop() { void BasicPortAllocatorSession::AllocatePorts() { RTC_DCHECK_RUN_ON(network_thread_); - network_thread_->PostTask(webrtc::ToQueuedTask( - network_safety_, [this, allocation_epoch = allocation_epoch_] { + network_thread_->PostTask(SafeTask( + network_safety_.flag(), [this, allocation_epoch = allocation_epoch_] { OnAllocate(allocation_epoch); })); } @@ -686,9 +737,9 @@ void BasicPortAllocatorSession::OnAllocate(int allocation_epoch) { allocation_started_ = true; } -std::vector BasicPortAllocatorSession::GetNetworks() { +std::vector BasicPortAllocatorSession::GetNetworks() { RTC_DCHECK_RUN_ON(network_thread_); - std::vector networks; + std::vector networks; rtc::NetworkManager* network_manager = allocator_->network_manager(); RTC_DCHECK(network_manager != nullptr); // If the network permission state is BLOCKED, we just act as if the flag has @@ -702,35 +753,41 @@ std::vector BasicPortAllocatorSession::GetNetworks() { // traffic by OS is also used here to avoid any local or public IP leakage // during stun process. if (flags() & PORTALLOCATOR_DISABLE_ADAPTER_ENUMERATION) { - network_manager->GetAnyAddressNetworks(&networks); + networks = network_manager->GetAnyAddressNetworks(); } else { - network_manager->GetNetworks(&networks); + networks = network_manager->GetNetworks(); // If network enumeration fails, use the ANY address as a fallback, so we // can at least try gathering candidates using the default route chosen by // the OS. Or, if the PORTALLOCATOR_ENABLE_ANY_ADDRESS_PORTS flag is // set, we'll use ANY address candidates either way. - if (networks.empty() || flags() & PORTALLOCATOR_ENABLE_ANY_ADDRESS_PORTS) { - network_manager->GetAnyAddressNetworks(&networks); + if (networks.empty() || + (flags() & PORTALLOCATOR_ENABLE_ANY_ADDRESS_PORTS)) { + std::vector any_address_networks = + network_manager->GetAnyAddressNetworks(); + networks.insert(networks.end(), any_address_networks.begin(), + any_address_networks.end()); } } // Filter out link-local networks if needed. if (flags() & PORTALLOCATOR_DISABLE_LINK_LOCAL_NETWORKS) { NetworkFilter link_local_filter( - [](rtc::Network* network) { return IPIsLinkLocal(network->prefix()); }, + [](const rtc::Network* network) { + return IPIsLinkLocal(network->prefix()); + }, "link-local"); FilterNetworks(&networks, link_local_filter); } // Do some more filtering, depending on the network ignore mask and "disable // costly networks" flag. NetworkFilter ignored_filter( - [this](rtc::Network* network) { + [this](const rtc::Network* network) { return allocator_->GetNetworkIgnoreMask() & network->type(); }, "ignored"); FilterNetworks(&networks, ignored_filter); if (flags() & PORTALLOCATOR_DISABLE_COSTLY_NETWORKS) { uint16_t lowest_cost = rtc::kNetworkCostMax; - for (rtc::Network* network : networks) { + for (const rtc::Network* network : networks) { // Don't determine the lowest cost from a link-local network. // On iOS, a device connected to the computer will get a link-local // network for communicating with the computer, however this network can't @@ -738,11 +795,13 @@ std::vector BasicPortAllocatorSession::GetNetworks() { if (rtc::IPIsLinkLocal(network->GetBestIP())) { continue; } - lowest_cost = std::min(lowest_cost, network->GetCost()); + lowest_cost = std::min( + lowest_cost, network->GetCost(*allocator()->field_trials())); } NetworkFilter costly_filter( - [lowest_cost](rtc::Network* network) { - return network->GetCost() > lowest_cost + rtc::kNetworkCostLow; + [lowest_cost, this](const rtc::Network* network) { + return network->GetCost(*allocator()->field_trials()) > + lowest_cost + rtc::kNetworkCostLow; }, "costly"); FilterNetworks(&networks, costly_filter); @@ -756,33 +815,87 @@ std::vector BasicPortAllocatorSession::GetNetworks() { // hard to define what that means though; it's not just "lowest cost". // Alternatively, we could just focus on making our ICE pinging logic smarter // such that this filtering isn't necessary in the first place. - int ipv6_networks = 0; - for (auto it = networks.begin(); it != networks.end();) { - if ((*it)->prefix().family() == AF_INET6) { - if (ipv6_networks >= allocator_->max_ipv6_networks()) { + const webrtc::FieldTrialsView* field_trials = allocator_->field_trials(); + if (IsDiversifyIpv6InterfacesEnabled(field_trials)) { + std::vector ipv6_networks; + for (auto it = networks.begin(); it != networks.end();) { + if ((*it)->prefix().family() == AF_INET6) { + ipv6_networks.push_back(*it); it = networks.erase(it); continue; - } else { - ++ipv6_networks; } + ++it; + } + ipv6_networks = + SelectIPv6Networks(ipv6_networks, allocator_->max_ipv6_networks()); + networks.insert(networks.end(), ipv6_networks.begin(), ipv6_networks.end()); + } else { + int ipv6_networks = 0; + for (auto it = networks.begin(); it != networks.end();) { + if ((*it)->prefix().family() == AF_INET6) { + if (ipv6_networks >= allocator_->max_ipv6_networks()) { + it = networks.erase(it); + continue; + } else { + ++ipv6_networks; + } + } + ++it; } - ++it; } return networks; } +std::vector BasicPortAllocatorSession::SelectIPv6Networks( + std::vector& all_ipv6_networks, + int max_ipv6_networks) { + if (static_cast(all_ipv6_networks.size()) <= max_ipv6_networks) { + return all_ipv6_networks; + } + // Adapter types are placed in priority order. Cellular type is an alias of + // cellular, 2G..5G types. + std::vector adapter_types = { + rtc::ADAPTER_TYPE_ETHERNET, rtc::ADAPTER_TYPE_LOOPBACK, + rtc::ADAPTER_TYPE_WIFI, rtc::ADAPTER_TYPE_CELLULAR, + rtc::ADAPTER_TYPE_VPN, rtc::ADAPTER_TYPE_UNKNOWN, + rtc::ADAPTER_TYPE_ANY}; + int adapter_types_cnt = adapter_types.size(); + std::vector selected_networks; + int adapter_types_pos = 0; + + while (static_cast(selected_networks.size()) < max_ipv6_networks && + adapter_types_pos < adapter_types_cnt * max_ipv6_networks) { + int network_pos = 0; + while (network_pos < static_cast(all_ipv6_networks.size())) { + if (adapter_types[adapter_types_pos % adapter_types_cnt] == + all_ipv6_networks[network_pos]->type() || + (adapter_types[adapter_types_pos % adapter_types_cnt] == + rtc::ADAPTER_TYPE_CELLULAR && + all_ipv6_networks[network_pos]->IsCellular())) { + selected_networks.push_back(all_ipv6_networks[network_pos]); + all_ipv6_networks.erase(all_ipv6_networks.begin() + network_pos); + break; + } + network_pos++; + } + adapter_types_pos++; + } + + return selected_networks; +} + // For each network, see if we have a sequence that covers it already. If not, // create a new sequence to create the appropriate ports. void BasicPortAllocatorSession::DoAllocate(bool disable_equivalent) { RTC_DCHECK_RUN_ON(network_thread_); bool done_signal_needed = false; - std::vector networks = GetNetworks(); + std::vector networks = GetNetworks(); if (networks.empty()) { RTC_LOG(LS_WARNING) << "Machine has no networks; no ports will be allocated"; done_signal_needed = true; } else { - RTC_LOG(LS_INFO) << "Allocate ports on " << networks.size() << " networks"; + RTC_LOG(LS_INFO) << "Allocate ports on " << NetworksToString(networks); PortConfiguration* config = configs_.empty() ? nullptr : configs_.back().get(); for (uint32_t i = 0; i < networks.size(); ++i) { @@ -836,15 +949,16 @@ void BasicPortAllocatorSession::DoAllocate(bool disable_equivalent) { } } if (done_signal_needed) { - network_thread_->PostTask(webrtc::ToQueuedTask( - network_safety_, [this] { OnAllocationSequenceObjectsCreated(); })); + network_thread_->PostTask(SafeTask(network_safety_.flag(), [this] { + OnAllocationSequenceObjectsCreated(); + })); } } void BasicPortAllocatorSession::OnNetworksChanged() { RTC_DCHECK_RUN_ON(network_thread_); - std::vector networks = GetNetworks(); - std::vector failed_networks; + std::vector networks = GetNetworks(); + std::vector failed_networks; for (AllocationSequence* sequence : sequences_) { // Mark the sequence as "network failed" if its network is not in // `networks`. @@ -877,7 +991,7 @@ void BasicPortAllocatorSession::OnNetworksChanged() { } void BasicPortAllocatorSession::DisableEquivalentPhases( - rtc::Network* network, + const rtc::Network* network, PortConfiguration* config, uint32_t* flags) { RTC_DCHECK_RUN_ON(network_thread_); @@ -999,7 +1113,7 @@ void BasicPortAllocatorSession::OnCandidateError( } Port* BasicPortAllocatorSession::GetBestTurnPortForNetwork( - const std::string& network_name) const { + absl::string_view network_name) const { RTC_DCHECK_RUN_ON(network_thread_); Port* best_turn_port = nullptr; for (const PortData& data : ports_) { @@ -1190,7 +1304,7 @@ BasicPortAllocatorSession::PortData* BasicPortAllocatorSession::FindPort( std::vector BasicPortAllocatorSession::GetUnprunedPorts( - const std::vector& networks) { + const std::vector& networks) { RTC_DCHECK_RUN_ON(network_thread_); std::vector unpruned_ports; for (PortData& port : ports_) { @@ -1237,7 +1351,7 @@ void BasicPortAllocator::SetVpnList( AllocationSequence::AllocationSequence( BasicPortAllocatorSession* session, - rtc::Network* network, + const rtc::Network* network, PortConfiguration* config, uint32_t flags, std::function port_allocation_complete_callback) @@ -1279,7 +1393,7 @@ void AllocationSequence::OnNetworkFailed() { Stop(); } -void AllocationSequence::DisableEquivalentPhases(rtc::Network* network, +void AllocationSequence::DisableEquivalentPhases(const rtc::Network* network, PortConfiguration* config, uint32_t* flags) { if (network_failed_) { @@ -1354,8 +1468,8 @@ void AllocationSequence::DisableEquivalentPhases(rtc::Network* network, void AllocationSequence::Start() { state_ = kRunning; - session_->network_thread()->PostTask(webrtc::ToQueuedTask( - safety_, [this, epoch = epoch_] { Process(epoch); })); + session_->network_thread()->PostTask( + SafeTask(safety_.flag(), [this, epoch = epoch_] { Process(epoch); })); // Take a snapshot of the best IP, so that when DisableEquivalentPhases is // called next time, we enable all phases if the best IP has since changed. previous_best_ip_ = network_->GetBestIP(); @@ -1403,9 +1517,8 @@ void AllocationSequence::Process(int epoch) { if (state() == kRunning) { ++phase_; session_->network_thread()->PostDelayedTask( - webrtc::ToQueuedTask(safety_, - [this, epoch = epoch_] { Process(epoch); }), - session_->allocator()->step_delay()); + SafeTask(safety_.flag(), [this, epoch = epoch_] { Process(epoch); }), + TimeDelta::Millis(session_->allocator()->step_delay())); } else { // No allocation steps needed further if all phases in AllocationSequence // are completed. Cause further Process calls in the previous epoch to be @@ -1431,17 +1544,20 @@ void AllocationSequence::CreateUDPPorts() { session_->network_thread(), session_->socket_factory(), network_, udp_socket_.get(), session_->username(), session_->password(), emit_local_candidate_for_anyaddress, - session_->allocator()->stun_candidate_keepalive_interval()); + session_->allocator()->stun_candidate_keepalive_interval(), + session_->allocator()->field_trials()); } else { port = UDPPort::Create( session_->network_thread(), session_->socket_factory(), network_, session_->allocator()->min_port(), session_->allocator()->max_port(), session_->username(), session_->password(), emit_local_candidate_for_anyaddress, - session_->allocator()->stun_candidate_keepalive_interval()); + session_->allocator()->stun_candidate_keepalive_interval(), + session_->allocator()->field_trials()); } if (port) { + port->SetIceTiebreaker(session_->ice_tiebreaker()); // If shared socket is enabled, STUN candidate will be allocated by the // UDPPort. if (IsFlagSet(PORTALLOCATOR_ENABLE_SHARED_SOCKET)) { @@ -1474,8 +1590,10 @@ void AllocationSequence::CreateTCPPorts() { session_->network_thread(), session_->socket_factory(), network_, session_->allocator()->min_port(), session_->allocator()->max_port(), session_->username(), session_->password(), - session_->allocator()->allow_tcp_listen()); + session_->allocator()->allow_tcp_listen(), + session_->allocator()->field_trials()); if (port) { + port->SetIceTiebreaker(session_->ice_tiebreaker()); session_->AddAllocatedPort(port.release(), this); // Since TCPPort is not created using shared socket, `port` will not be // added to the dequeue. @@ -1502,8 +1620,10 @@ void AllocationSequence::CreateStunPorts() { session_->network_thread(), session_->socket_factory(), network_, session_->allocator()->min_port(), session_->allocator()->max_port(), session_->username(), session_->password(), config_->StunServers(), - session_->allocator()->stun_candidate_keepalive_interval()); + session_->allocator()->stun_candidate_keepalive_interval(), + session_->allocator()->field_trials()); if (port) { + port->SetIceTiebreaker(session_->ice_tiebreaker()); session_->AddAllocatedPort(port.release(), this); // Since StunPort is not created using shared socket, `port` will not be // added to the dequeue. @@ -1564,6 +1684,7 @@ void AllocationSequence::CreateTurnPort(const RelayServerConfig& config) { args.server_address = &(*relay_port); args.config = &config; args.turn_customizer = session_->allocator()->turn_customizer(); + args.field_trials = session_->allocator()->field_trials(); std::unique_ptr port; // Shared socket mode must be enabled only for UDP based ports. Hence @@ -1599,6 +1720,7 @@ void AllocationSequence::CreateTurnPort(const RelayServerConfig& config) { } } RTC_DCHECK(port != NULL); + port->SetIceTiebreaker(session_->ice_tiebreaker()); session_->AddAllocatedPort(port.release(), this); } } @@ -1657,24 +1779,19 @@ void AllocationSequence::OnPortDestroyed(PortInterface* port) { } } -// PortConfiguration -PortConfiguration::PortConfiguration(const rtc::SocketAddress& stun_address, - const std::string& username, - const std::string& password) - : stun_address(stun_address), username(username), password(password) { - if (!stun_address.IsNil()) - stun_servers.insert(stun_address); -} - -PortConfiguration::PortConfiguration(const ServerAddresses& stun_servers, - const std::string& username, - const std::string& password) +PortConfiguration::PortConfiguration( + const ServerAddresses& stun_servers, + absl::string_view username, + absl::string_view password, + const webrtc::FieldTrialsView* field_trials) : stun_servers(stun_servers), username(username), password(password) { if (!stun_servers.empty()) stun_address = *(stun_servers.begin()); // Note that this won't change once the config is initialized. - use_turn_server_as_stun_server_disabled = - webrtc::field_trial::IsDisabled("WebRTC-UseTurnServerAsStunServer"); + if (field_trials) { + use_turn_server_as_stun_server_disabled = + field_trials->IsDisabled("WebRTC-UseTurnServerAsStunServer"); + } } ServerAddresses PortConfiguration::StunServers() { diff --git a/TMessagesProj/jni/voip/webrtc/p2p/client/basic_port_allocator.h b/TMessagesProj/jni/voip/webrtc/p2p/client/basic_port_allocator.h index c043cae6e4..173d789545 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/client/basic_port_allocator.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/client/basic_port_allocator.h @@ -15,14 +15,17 @@ #include #include +#include "absl/strings/string_view.h" +#include "api/field_trials_view.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "api/turn_customizer.h" #include "p2p/base/port_allocator.h" #include "p2p/client/relay_port_factory_interface.h" #include "p2p/client/turn_port_factory.h" #include "rtc_base/checks.h" +#include "rtc_base/memory/always_valid_pointer.h" #include "rtc_base/network.h" #include "rtc_base/system/rtc_export.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" @@ -30,18 +33,27 @@ namespace cricket { class RTC_EXPORT BasicPortAllocator : public PortAllocator { public: - // note: The (optional) relay_port_factory is owned by caller - // and must have a life time that exceeds that of BasicPortAllocator. + // The NetworkManager is a mandatory argument. The other arguments are + // optional. All pointers are owned by caller and must have a life time + // that exceeds that of BasicPortAllocator. BasicPortAllocator(rtc::NetworkManager* network_manager, rtc::PacketSocketFactory* socket_factory, webrtc::TurnCustomizer* customizer = nullptr, - RelayPortFactoryInterface* relay_port_factory = nullptr); - explicit BasicPortAllocator(rtc::NetworkManager* network_manager); - BasicPortAllocator(rtc::NetworkManager* network_manager, - const ServerAddresses& stun_servers); + RelayPortFactoryInterface* relay_port_factory = nullptr, + const webrtc::FieldTrialsView* field_trials = nullptr); + BasicPortAllocator( + rtc::NetworkManager* network_manager, + std::unique_ptr owned_socket_factory, + const webrtc::FieldTrialsView* field_trials = nullptr); + BasicPortAllocator( + rtc::NetworkManager* network_manager, + std::unique_ptr owned_socket_factory, + const ServerAddresses& stun_servers, + const webrtc::FieldTrialsView* field_trials = nullptr); BasicPortAllocator(rtc::NetworkManager* network_manager, rtc::PacketSocketFactory* socket_factory, - const ServerAddresses& stun_servers); + const ServerAddresses& stun_servers, + const webrtc::FieldTrialsView* field_trials = nullptr); ~BasicPortAllocator() override; // Set to kDefaultNetworkIgnoreMask by default. @@ -57,14 +69,14 @@ class RTC_EXPORT BasicPortAllocator : public PortAllocator { // creates its own socket factory. rtc::PacketSocketFactory* socket_factory() { CheckRunOnValidThreadIfInitialized(); - return socket_factory_; + return socket_factory_.get(); } PortAllocatorSession* CreateSessionInternal( - const std::string& content_name, + absl::string_view content_name, int component, - const std::string& ice_ufrag, - const std::string& ice_pwd) override; + absl::string_view ice_ufrag, + absl::string_view ice_pwd) override; // Convenience method that adds a TURN server to the configuration. void AddTurnServer(const RelayServerConfig& turn_server); @@ -76,17 +88,25 @@ class RTC_EXPORT BasicPortAllocator : public PortAllocator { void SetVpnList(const std::vector& vpn_list) override; + const webrtc::FieldTrialsView* field_trials() const { + return field_trials_.get(); + } + private: void OnIceRegathering(PortAllocatorSession* session, IceRegatheringReason reason); // This function makes sure that relay_port_factory_ is set properly. - void InitRelayPortFactory(RelayPortFactoryInterface* relay_port_factory); + void Init(RelayPortFactoryInterface* relay_port_factory); bool MdnsObfuscationEnabled() const override; + webrtc::AlwaysValidPointer + field_trials_; rtc::NetworkManager* network_manager_; - rtc::PacketSocketFactory* socket_factory_; + const webrtc::AlwaysValidPointerNoDefault + socket_factory_; int network_ignore_mask_ = rtc::kDefaultNetworkIgnoreMask; // This is the factory being used. @@ -112,10 +132,10 @@ enum class SessionState { class RTC_EXPORT BasicPortAllocatorSession : public PortAllocatorSession { public: BasicPortAllocatorSession(BasicPortAllocator* allocator, - const std::string& content_name, + absl::string_view content_name, int component, - const std::string& ice_ufrag, - const std::string& ice_pwd); + absl::string_view ice_ufrag, + absl::string_view ice_pwd); ~BasicPortAllocatorSession() override; virtual BasicPortAllocator* allocator(); @@ -148,6 +168,9 @@ class RTC_EXPORT BasicPortAllocatorSession : public PortAllocatorSession { void SetStunKeepaliveIntervalForReadyPorts( const absl::optional& stun_keepalive_interval) override; void PruneAllPorts() override; + static std::vector SelectIPv6Networks( + std::vector& all_ipv6_networks, + int max_ipv6_networks); protected: void UpdateIceParametersInternal() override; @@ -223,7 +246,7 @@ class RTC_EXPORT BasicPortAllocatorSession : public PortAllocatorSession { void DoAllocate(bool disable_equivalent_phases); void OnNetworksChanged(); void OnAllocationSequenceObjectsCreated(); - void DisableEquivalentPhases(rtc::Network* network, + void DisableEquivalentPhases(const rtc::Network* network, PortConfiguration* config, uint32_t* flags); void AddAllocatedPort(Port* port, AllocationSequence* seq); @@ -236,9 +259,9 @@ class RTC_EXPORT BasicPortAllocatorSession : public PortAllocatorSession { void MaybeSignalCandidatesAllocationDone(); void OnPortAllocationComplete(); PortData* FindPort(Port* port); - std::vector GetNetworks(); - std::vector GetFailedNetworks(); - void Regather(const std::vector& networks, + std::vector GetNetworks(); + std::vector GetFailedNetworks(); + void Regather(const std::vector& networks, bool disable_equivalent_phases, IceRegatheringReason reason); @@ -246,7 +269,7 @@ class RTC_EXPORT BasicPortAllocatorSession : public PortAllocatorSession { bool CandidatePairable(const Candidate& c, const Port* port) const; std::vector GetUnprunedPorts( - const std::vector& networks); + const std::vector& networks); // Prunes ports and signal the remote side to remove the candidates that // were previously signaled from these ports. void PrunePortsAndRemoveCandidates( @@ -255,14 +278,13 @@ class RTC_EXPORT BasicPortAllocatorSession : public PortAllocatorSession { // append to `candidates`. void GetCandidatesFromPort(const PortData& data, std::vector* candidates) const; - Port* GetBestTurnPortForNetwork(const std::string& network_name) const; + Port* GetBestTurnPortForNetwork(absl::string_view network_name) const; // Returns true if at least one TURN port is pruned. bool PruneTurnPorts(Port* newly_pairable_turn_port); bool PruneNewlyPairableTurnPort(PortData* newly_pairable_turn_port); BasicPortAllocator* allocator_; rtc::Thread* network_thread_; - std::unique_ptr owned_socket_factory_; rtc::PacketSocketFactory* socket_factory_; bool allocation_started_; bool network_manager_started_; @@ -294,14 +316,10 @@ struct RTC_EXPORT PortConfiguration { typedef std::vector RelayList; RelayList relays; - // TODO(jiayl): remove this ctor when Chrome is updated. - PortConfiguration(const rtc::SocketAddress& stun_address, - const std::string& username, - const std::string& password); - PortConfiguration(const ServerAddresses& stun_servers, - const std::string& username, - const std::string& password); + absl::string_view username, + absl::string_view password, + const webrtc::FieldTrialsView* field_trials = nullptr); // Returns addresses of both the explicitly configured STUN servers, // and TURN servers that should be used as STUN servers. @@ -343,7 +361,7 @@ class AllocationSequence : public sigslot::has_slots<> { // event to trigger signal. This can also be achieved by starting a timer in // BPAS, but this is less deterministic. AllocationSequence(BasicPortAllocatorSession* session, - rtc::Network* network, + const rtc::Network* network, PortConfiguration* config, uint32_t flags, std::function port_allocation_complete_callback); @@ -352,14 +370,14 @@ class AllocationSequence : public sigslot::has_slots<> { void OnNetworkFailed(); State state() const { return state_; } - rtc::Network* network() const { return network_; } + const rtc::Network* network() const { return network_; } bool network_failed() const { return network_failed_; } void set_network_failed() { network_failed_ = true; } // Disables the phases for a new sequence that this one already covers for an // equivalent network setup. - void DisableEquivalentPhases(rtc::Network* network, + void DisableEquivalentPhases(const rtc::Network* network, PortConfiguration* config, uint32_t* flags); @@ -392,7 +410,7 @@ class AllocationSequence : public sigslot::has_slots<> { BasicPortAllocatorSession* session_; bool network_failed_ = false; - rtc::Network* network_; + const rtc::Network* network_; // Compared with the new best IP in DisableEquivalentPhases. rtc::IPAddress previous_best_ip_; PortConfiguration* config_; diff --git a/TMessagesProj/jni/voip/webrtc/p2p/client/relay_port_factory_interface.h b/TMessagesProj/jni/voip/webrtc/p2p/client/relay_port_factory_interface.h index d3884126a6..4eec5dbf28 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/client/relay_port_factory_interface.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/client/relay_port_factory_interface.h @@ -26,6 +26,7 @@ class Thread; namespace webrtc { class TurnCustomizer; +class FieldTrialsView; } // namespace webrtc namespace cricket { @@ -35,19 +36,17 @@ struct RelayServerConfig; // A struct containing arguments to RelayPortFactory::Create() struct CreateRelayPortArgs { - CreateRelayPortArgs(); rtc::Thread* network_thread; rtc::PacketSocketFactory* socket_factory; - rtc::Network* network; + const rtc::Network* network; const ProtocolAddress* server_address; const RelayServerConfig* config; std::string username; std::string password; - webrtc::TurnCustomizer* turn_customizer; + webrtc::TurnCustomizer* turn_customizer = nullptr; + const webrtc::FieldTrialsView* field_trials = nullptr; }; -inline CreateRelayPortArgs::CreateRelayPortArgs() {} - // A factory for creating RelayPort's. class RelayPortFactoryInterface { public: diff --git a/TMessagesProj/jni/voip/webrtc/p2p/client/turn_port_factory.cc b/TMessagesProj/jni/voip/webrtc/p2p/client/turn_port_factory.cc index feaada3a1c..555387dbbf 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/client/turn_port_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/client/turn_port_factory.cc @@ -23,10 +23,7 @@ TurnPortFactory::~TurnPortFactory() {} std::unique_ptr TurnPortFactory::Create( const CreateRelayPortArgs& args, rtc::AsyncPacketSocket* udp_socket) { - auto port = TurnPort::CreateUnique( - args.network_thread, args.socket_factory, args.network, udp_socket, - args.username, args.password, *args.server_address, - args.config->credentials, args.config->priority, args.turn_customizer); + auto port = TurnPort::Create(args, udp_socket); if (!port) return nullptr; port->SetTlsCertPolicy(args.config->tls_cert_policy); @@ -37,12 +34,7 @@ std::unique_ptr TurnPortFactory::Create( std::unique_ptr TurnPortFactory::Create(const CreateRelayPortArgs& args, int min_port, int max_port) { - auto port = TurnPort::CreateUnique( - args.network_thread, args.socket_factory, args.network, min_port, - max_port, args.username, args.password, *args.server_address, - args.config->credentials, args.config->priority, - args.config->tls_alpn_protocols, args.config->tls_elliptic_curves, - args.turn_customizer, args.config->tls_cert_verifier); + auto port = TurnPort::Create(args, min_port, max_port); if (!port) return nullptr; port->SetTlsCertPolicy(args.config->tls_cert_policy); diff --git a/TMessagesProj/jni/voip/webrtc/p2p/stunprober/stun_prober.cc b/TMessagesProj/jni/voip/webrtc/p2p/stunprober/stun_prober.cc index 4195230313..977ead4d72 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/stunprober/stun_prober.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/stunprober/stun_prober.cc @@ -17,20 +17,22 @@ #include #include "api/packet_socket_factory.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "api/transport/stun.h" +#include "api/units/time_delta.h" #include "rtc_base/async_packet_socket.h" #include "rtc_base/async_resolver_interface.h" #include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/helpers.h" #include "rtc_base/logging.h" -#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/thread.h" #include "rtc_base/time_utils.h" namespace stunprober { namespace { +using ::webrtc::SafeTask; +using ::webrtc::TimeDelta; const int THREAD_WAKE_UP_INTERVAL_MS = 5; @@ -69,6 +71,9 @@ class StunProber::Requester : public sigslot::has_slots<> { const std::vector& server_ips); ~Requester() override; + Requester(const Requester&) = delete; + Requester& operator=(const Requester&) = delete; + // There is no callback for SendStunRequest as the underneath socket send is // expected to be completed immediately. Otherwise, it'll skip this request // and move to the next one. @@ -105,8 +110,6 @@ class StunProber::Requester : public sigslot::has_slots<> { int16_t num_response_received_ = 0; webrtc::SequenceChecker& thread_checker_; - - RTC_DISALLOW_COPY_AND_ASSIGN(Requester); }; StunProber::Requester::Requester( @@ -137,12 +140,8 @@ void StunProber::Requester::SendStunRequest() { RTC_DCHECK(thread_checker_.IsCurrent()); requests_.push_back(new Request()); Request& request = *(requests_.back()); - cricket::StunMessage message; - // Random transaction ID, STUN_BINDING_REQUEST - message.SetTransactionID( - rtc::CreateRandomString(cricket::kStunTransactionIdLength)); - message.SetType(cricket::STUN_BINDING_REQUEST); + cricket::StunMessage message(cricket::STUN_BINDING_REQUEST); std::unique_ptr request_packet( new rtc::ByteBufferWriter(nullptr, kMaxUdpBufferSize)); @@ -255,11 +254,11 @@ void StunProber::ObserverAdapter::OnFinished(StunProber* stunprober, StunProber::StunProber(rtc::PacketSocketFactory* socket_factory, rtc::Thread* thread, - const rtc::NetworkManager::NetworkList& networks) + std::vector networks) : interval_ms_(0), socket_factory_(socket_factory), thread_(thread), - networks_(networks) {} + networks_(std::move(networks)) {} StunProber::~StunProber() { RTC_DCHECK(thread_checker_.IsCurrent()); @@ -359,8 +358,7 @@ void StunProber::OnServerResolved(rtc::AsyncResolverInterface* resolver) { // Deletion of AsyncResolverInterface can't be done in OnResolveResult which // handles SignalDone. - thread_->PostTask( - webrtc::ToQueuedTask([resolver] { resolver->Destroy(false); })); + thread_->PostTask([resolver] { resolver->Destroy(false); }); servers_.pop_back(); if (servers_.size()) { @@ -458,9 +456,8 @@ void StunProber::MaybeScheduleStunRequests() { if (Done()) { thread_->PostDelayedTask( - webrtc::ToQueuedTask(task_safety_.flag(), - [this] { ReportOnFinished(SUCCESS); }), - timeout_ms_); + SafeTask(task_safety_.flag(), [this] { ReportOnFinished(SUCCESS); }), + TimeDelta::Millis(timeout_ms_)); return; } if (should_send_next_request(now)) { @@ -471,9 +468,8 @@ void StunProber::MaybeScheduleStunRequests() { next_request_time_ms_ = now + interval_ms_; } thread_->PostDelayedTask( - webrtc::ToQueuedTask(task_safety_.flag(), - [this] { MaybeScheduleStunRequests(); }), - get_wake_up_interval_ms()); + SafeTask(task_safety_.flag(), [this] { MaybeScheduleStunRequests(); }), + TimeDelta::Millis(get_wake_up_interval_ms())); } bool StunProber::GetStats(StunProber::Stats* prob_stats) const { diff --git a/TMessagesProj/jni/voip/webrtc/p2p/stunprober/stun_prober.h b/TMessagesProj/jni/voip/webrtc/p2p/stunprober/stun_prober.h index fe2f14ca4e..7d5094a3b9 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/stunprober/stun_prober.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/stunprober/stun_prober.h @@ -16,13 +16,12 @@ #include #include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "rtc_base/byte_buffer.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/ip_address.h" #include "rtc_base/network.h" #include "rtc_base/socket_address.h" #include "rtc_base/system/rtc_export.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/thread.h" namespace rtc { @@ -98,9 +97,12 @@ class RTC_EXPORT StunProber : public sigslot::has_slots<> { StunProber(rtc::PacketSocketFactory* socket_factory, rtc::Thread* thread, - const rtc::NetworkManager::NetworkList& networks); + std::vector networks); ~StunProber() override; + StunProber(const StunProber&) = delete; + StunProber& operator=(const StunProber&) = delete; + // Begin performing the probe test against the `servers`. If // `shared_socket_mode` is false, each request will be done with a new socket. // Otherwise, a unique socket will be used for a single round of requests @@ -119,7 +121,7 @@ class RTC_EXPORT StunProber : public sigslot::has_slots<> { int stun_ta_interval_ms, int requests_per_ip, int timeout_ms, - const AsyncCallback finish_callback); + AsyncCallback finish_callback); // TODO(guoweis): The combination of Prepare() and Run() are equivalent to the // Start() above. Remove Start() once everything is migrated. @@ -238,11 +240,9 @@ class RTC_EXPORT StunProber : public sigslot::has_slots<> { // AsyncCallback. ObserverAdapter observer_adapter_; - rtc::NetworkManager::NetworkList networks_; + const std::vector networks_; webrtc::ScopedTaskSafety task_safety_; - - RTC_DISALLOW_COPY_AND_ASSIGN(StunProber); }; } // namespace stunprober diff --git a/TMessagesProj/jni/voip/webrtc/pc/OWNERS b/TMessagesProj/jni/voip/webrtc/pc/OWNERS index c194b50643..4f82de419e 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/OWNERS +++ b/TMessagesProj/jni/voip/webrtc/pc/OWNERS @@ -3,3 +3,4 @@ hta@webrtc.org perkj@webrtc.org tommi@webrtc.org deadbeef@webrtc.org +orphis@webrtc.org diff --git a/TMessagesProj/jni/voip/webrtc/pc/audio_rtp_receiver.cc b/TMessagesProj/jni/voip/webrtc/pc/audio_rtp_receiver.cc index 7890d9b1e0..0dbdf0b713 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/audio_rtp_receiver.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/audio_rtp_receiver.cc @@ -12,6 +12,7 @@ #include +#include #include #include @@ -19,26 +20,27 @@ #include "pc/audio_track.h" #include "pc/media_stream_track_proxy.h" #include "rtc_base/checks.h" -#include "rtc_base/location.h" -#include "rtc_base/logging.h" -#include "rtc_base/task_utils/to_queued_task.h" namespace webrtc { -AudioRtpReceiver::AudioRtpReceiver(rtc::Thread* worker_thread, - std::string receiver_id, - std::vector stream_ids, - bool is_unified_plan) +AudioRtpReceiver::AudioRtpReceiver( + rtc::Thread* worker_thread, + std::string receiver_id, + std::vector stream_ids, + bool is_unified_plan, + cricket::VoiceMediaChannel* voice_channel /*= nullptr*/) : AudioRtpReceiver(worker_thread, receiver_id, CreateStreamsFromIds(std::move(stream_ids)), - is_unified_plan) {} + is_unified_plan, + voice_channel) {} AudioRtpReceiver::AudioRtpReceiver( rtc::Thread* worker_thread, const std::string& receiver_id, const std::vector>& streams, - bool is_unified_plan) + bool is_unified_plan, + cricket::VoiceMediaChannel* voice_channel /*= nullptr*/) : worker_thread_(worker_thread), id_(receiver_id), source_(rtc::make_ref_counted( @@ -49,7 +51,8 @@ AudioRtpReceiver::AudioRtpReceiver( track_(AudioTrackProxyWithInternal::Create( rtc::Thread::Current(), AudioTrack::Create(receiver_id, source_))), - cached_track_enabled_(track_->enabled()), + media_channel_(voice_channel), + cached_track_enabled_(track_->internal()->enabled()), attachment_id_(GenerateUniqueId()), worker_thread_safety_(PendingTaskSafetyFlag::CreateDetachedInactive()) { RTC_DCHECK(worker_thread_); @@ -61,7 +64,6 @@ AudioRtpReceiver::AudioRtpReceiver( AudioRtpReceiver::~AudioRtpReceiver() { RTC_DCHECK_RUN_ON(&signaling_thread_checker_); - RTC_DCHECK(stopped_); RTC_DCHECK(!media_channel_); track_->GetSource()->UnregisterAudioObserver(this); @@ -70,21 +72,24 @@ AudioRtpReceiver::~AudioRtpReceiver() { void AudioRtpReceiver::OnChanged() { RTC_DCHECK_RUN_ON(&signaling_thread_checker_); - if (cached_track_enabled_ != track_->enabled()) { - cached_track_enabled_ = track_->enabled(); - worker_thread_->PostTask(ToQueuedTask( - worker_thread_safety_, - [this, enabled = cached_track_enabled_, volume = cached_volume_]() { - RTC_DCHECK_RUN_ON(worker_thread_); - Reconfigure(enabled, volume); - })); - } + const bool enabled = track_->internal()->enabled(); + if (cached_track_enabled_ == enabled) + return; + cached_track_enabled_ = enabled; + worker_thread_->PostTask(SafeTask(worker_thread_safety_, [this, enabled]() { + RTC_DCHECK_RUN_ON(worker_thread_); + Reconfigure(enabled); + })); } -// RTC_RUN_ON(worker_thread_) void AudioRtpReceiver::SetOutputVolume_w(double volume) { + RTC_DCHECK_RUN_ON(worker_thread_); RTC_DCHECK_GE(volume, 0.0); RTC_DCHECK_LE(volume, 10.0); + + if (!media_channel_) + return; + ssrc_ ? media_channel_->SetOutputVolume(*ssrc_, volume) : media_channel_->SetDefaultOutputVolume(volume); } @@ -94,23 +99,18 @@ void AudioRtpReceiver::OnSetVolume(double volume) { RTC_DCHECK_GE(volume, 0); RTC_DCHECK_LE(volume, 10); - // Update the cached_volume_ even when stopped_, to allow clients to set the - // volume before starting/restarting, eg see crbug.com/1272566. - cached_volume_ = volume; - - if (stopped_) - return; - - // When the track is disabled, the volume of the source, which is the - // corresponding WebRtc Voice Engine channel will be 0. So we do not allow - // setting the volume to the source when the track is disabled. - if (track_->enabled()) { - worker_thread_->PostTask( - ToQueuedTask(worker_thread_safety_, [this, volume = cached_volume_]() { - RTC_DCHECK_RUN_ON(worker_thread_); - SetOutputVolume_w(volume); - })); - } + bool track_enabled = track_->internal()->enabled(); + worker_thread_->BlockingCall([&]() { + RTC_DCHECK_RUN_ON(worker_thread_); + // Update the cached_volume_ even when stopped, to allow clients to set + // the volume before starting/restarting, eg see crbug.com/1272566. + cached_volume_ = volume; + // When the track is disabled, the volume of the source, which is the + // corresponding WebRtc Voice Engine channel will be 0. So we do not + // allow setting the volume to the source when the track is disabled. + if (track_enabled) + SetOutputVolume_w(volume); + }); } rtc::scoped_refptr AudioRtpReceiver::dtls_transport() @@ -159,63 +159,47 @@ AudioRtpReceiver::GetFrameDecryptor() const { void AudioRtpReceiver::Stop() { RTC_DCHECK_RUN_ON(&signaling_thread_checker_); - // TODO(deadbeef): Need to do more here to fully stop receiving packets. - if (!stopped_) { - source_->SetState(MediaSourceInterface::kEnded); - stopped_ = true; - } + source_->SetState(MediaSourceInterface::kEnded); + track_->internal()->set_ended(); +} - worker_thread_->Invoke(RTC_FROM_HERE, [&]() { +void AudioRtpReceiver::RestartMediaChannel(absl::optional ssrc) { + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); + bool enabled = track_->internal()->enabled(); + MediaSourceInterface::SourceState state = source_->state(); + worker_thread_->BlockingCall([&]() { RTC_DCHECK_RUN_ON(worker_thread_); - - if (media_channel_) - SetOutputVolume_w(0.0); - - SetMediaChannel_w(nullptr); + RestartMediaChannel_w(std::move(ssrc), enabled, state); }); + source_->SetState(MediaSourceInterface::kLive); } -void AudioRtpReceiver::StopAndEndTrack() { - RTC_DCHECK_RUN_ON(&signaling_thread_checker_); - Stop(); - track_->internal()->set_ended(); -} +void AudioRtpReceiver::RestartMediaChannel_w( + absl::optional ssrc, + bool track_enabled, + MediaSourceInterface::SourceState state) { + RTC_DCHECK_RUN_ON(worker_thread_); + if (!media_channel_) + return; // Can't restart. -void AudioRtpReceiver::RestartMediaChannel(absl::optional ssrc) { - RTC_DCHECK_RUN_ON(&signaling_thread_checker_); - bool ok = worker_thread_->Invoke( - RTC_FROM_HERE, [&, enabled = cached_track_enabled_, - volume = cached_volume_, was_stopped = stopped_]() { - RTC_DCHECK_RUN_ON(worker_thread_); - if (!media_channel_) { - RTC_DCHECK(was_stopped); - return false; // Can't restart. - } - - if (!was_stopped && ssrc_ == ssrc) { - // Already running with that ssrc. - RTC_DCHECK(worker_thread_safety_->alive()); - return true; - } - - if (!was_stopped) { - source_->Stop(media_channel_, ssrc_); - } - - ssrc_ = std::move(ssrc); - source_->Start(media_channel_, ssrc_); - if (ssrc_) { - media_channel_->SetBaseMinimumPlayoutDelayMs(*ssrc_, delay_.GetMs()); - } - - Reconfigure(enabled, volume); - return true; - }); - - if (!ok) - return; + // Make sure the safety flag is marked as `alive` for cases where the media + // channel was provided via the ctor and not an explicit call to + // SetMediaChannel. + worker_thread_safety_->SetAlive(); + + if (state != MediaSourceInterface::kInitializing) { + if (ssrc_ == ssrc) + return; + source_->Stop(media_channel_, ssrc_); + } + + ssrc_ = std::move(ssrc); + source_->Start(media_channel_, ssrc_); + if (ssrc_) { + media_channel_->SetBaseMinimumPlayoutDelayMs(*ssrc_, delay_.GetMs()); + } - stopped_ = false; + Reconfigure(track_enabled); } void AudioRtpReceiver::SetupMediaChannel(uint32_t ssrc) { @@ -258,7 +242,7 @@ void AudioRtpReceiver::SetStreams( } } if (removed) { - existing_stream->RemoveTrack(track_); + existing_stream->RemoveTrack(audio_track()); } } // Add remote track to any streams that are new. @@ -272,7 +256,7 @@ void AudioRtpReceiver::SetStreams( } } if (added) { - stream->AddTrack(track_); + stream->AddTrack(audio_track()); } } streams_ = streams; @@ -296,11 +280,11 @@ void AudioRtpReceiver::SetDepacketizerToDecoderFrameTransformer( frame_transformer_ = std::move(frame_transformer); } -// RTC_RUN_ON(worker_thread_) -void AudioRtpReceiver::Reconfigure(bool track_enabled, double volume) { +void AudioRtpReceiver::Reconfigure(bool track_enabled) { + RTC_DCHECK_RUN_ON(worker_thread_); RTC_DCHECK(media_channel_); - SetOutputVolume_w(track_enabled ? volume : 0); + SetOutputVolume_w(track_enabled ? cached_volume_ : 0); if (ssrc_ && frame_decryptor_) { // Reattach the frame decryptor if we were reconfigured. @@ -331,21 +315,12 @@ void AudioRtpReceiver::SetJitterBufferMinimumDelay( } void AudioRtpReceiver::SetMediaChannel(cricket::MediaChannel* media_channel) { - RTC_DCHECK_RUN_ON(&signaling_thread_checker_); + RTC_DCHECK_RUN_ON(worker_thread_); RTC_DCHECK(media_channel == nullptr || media_channel->media_type() == media_type()); + if (!media_channel && media_channel_) + SetOutputVolume_w(0.0); - if (stopped_ && !media_channel) - return; - - worker_thread_->Invoke(RTC_FROM_HERE, [&] { - RTC_DCHECK_RUN_ON(worker_thread_); - SetMediaChannel_w(media_channel); - }); -} - -// RTC_RUN_ON(worker_thread_) -void AudioRtpReceiver::SetMediaChannel_w(cricket::MediaChannel* media_channel) { media_channel ? worker_thread_safety_->SetAlive() : worker_thread_safety_->SetNotAlive(); media_channel_ = static_cast(media_channel); diff --git a/TMessagesProj/jni/voip/webrtc/pc/audio_rtp_receiver.h b/TMessagesProj/jni/voip/webrtc/pc/audio_rtp_receiver.h index aef497db76..c68315882c 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/audio_rtp_receiver.h +++ b/TMessagesProj/jni/voip/webrtc/pc/audio_rtp_receiver.h @@ -26,6 +26,7 @@ #include "api/rtp_receiver_interface.h" #include "api/scoped_refptr.h" #include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "api/transport/rtp/rtp_source.h" #include "media/base/media_channel.h" #include "pc/audio_track.h" @@ -33,9 +34,7 @@ #include "pc/media_stream_track_proxy.h" #include "pc/remote_audio_source.h" #include "pc/rtp_receiver.h" -#include "rtc_base/ref_counted_object.h" #include "rtc_base/system/no_unique_address.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" @@ -45,16 +44,24 @@ class AudioRtpReceiver : public ObserverInterface, public AudioSourceInterface::AudioObserver, public RtpReceiverInternal { public: + // The constructor supports optionally passing the voice channel to the + // instance at construction time without having to call `SetMediaChannel()` + // on the worker thread straight after construction. + // However, when using that, the assumption is that right after construction, + // a call to either `SetupUnsignaledMediaChannel` or `SetupMediaChannel` + // will be made, which will internally start the source on the worker thread. AudioRtpReceiver(rtc::Thread* worker_thread, std::string receiver_id, std::vector stream_ids, - bool is_unified_plan); + bool is_unified_plan, + cricket::VoiceMediaChannel* voice_channel = nullptr); // TODO(https://crbug.com/webrtc/9480): Remove this when streams() is removed. AudioRtpReceiver( rtc::Thread* worker_thread, const std::string& receiver_id, const std::vector>& streams, - bool is_unified_plan); + bool is_unified_plan, + cricket::VoiceMediaChannel* media_channel = nullptr); virtual ~AudioRtpReceiver(); // ObserverInterface implementation @@ -90,7 +97,6 @@ class AudioRtpReceiver : public ObserverInterface, // RtpReceiverInternal implementation. void Stop() override; - void StopAndEndTrack() override; void SetupMediaChannel(uint32_t ssrc) override; void SetupUnsignaledMediaChannel() override; uint32_t ssrc() const override; @@ -114,12 +120,14 @@ class AudioRtpReceiver : public ObserverInterface, override; private: - void RestartMediaChannel(absl::optional ssrc); - void Reconfigure(bool track_enabled, double volume) + void RestartMediaChannel(absl::optional ssrc) + RTC_RUN_ON(&signaling_thread_checker_); + void RestartMediaChannel_w(absl::optional ssrc, + bool track_enabled, + MediaSourceInterface::SourceState state) RTC_RUN_ON(worker_thread_); + void Reconfigure(bool track_enabled) RTC_RUN_ON(worker_thread_); void SetOutputVolume_w(double volume) RTC_RUN_ON(worker_thread_); - void SetMediaChannel_w(cricket::MediaChannel* media_channel) - RTC_RUN_ON(worker_thread_); RTC_NO_UNIQUE_ADDRESS SequenceChecker signaling_thread_checker_; rtc::Thread* const worker_thread_; @@ -132,8 +140,7 @@ class AudioRtpReceiver : public ObserverInterface, std::vector> streams_ RTC_GUARDED_BY(&signaling_thread_checker_); bool cached_track_enabled_ RTC_GUARDED_BY(&signaling_thread_checker_); - double cached_volume_ RTC_GUARDED_BY(&signaling_thread_checker_) = 1.0; - bool stopped_ RTC_GUARDED_BY(&signaling_thread_checker_) = true; + double cached_volume_ RTC_GUARDED_BY(worker_thread_) = 1.0; RtpReceiverObserverInterface* observer_ RTC_GUARDED_BY(&signaling_thread_checker_) = nullptr; bool received_first_packet_ RTC_GUARDED_BY(&signaling_thread_checker_) = diff --git a/TMessagesProj/jni/voip/webrtc/pc/audio_track.cc b/TMessagesProj/jni/voip/webrtc/pc/audio_track.cc index be087f693b..c012442d13 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/audio_track.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/audio_track.cc @@ -11,18 +11,17 @@ #include "pc/audio_track.h" #include "rtc_base/checks.h" -#include "rtc_base/ref_counted_object.h" namespace webrtc { // static rtc::scoped_refptr AudioTrack::Create( - const std::string& id, + absl::string_view id, const rtc::scoped_refptr& source) { return rtc::make_ref_counted(id, source); } -AudioTrack::AudioTrack(const std::string& label, +AudioTrack::AudioTrack(absl::string_view label, const rtc::scoped_refptr& source) : MediaStreamTrack(label), audio_source_(source) { if (audio_source_) { @@ -32,7 +31,7 @@ AudioTrack::AudioTrack(const std::string& label, } AudioTrack::~AudioTrack() { - RTC_DCHECK_RUN_ON(&thread_checker_); + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); set_state(MediaStreamTrackInterface::kEnded); if (audio_source_) audio_source_->UnregisterObserver(this); @@ -48,19 +47,19 @@ AudioSourceInterface* AudioTrack::GetSource() const { } void AudioTrack::AddSink(AudioTrackSinkInterface* sink) { - RTC_DCHECK_RUN_ON(&thread_checker_); + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); if (audio_source_) audio_source_->AddSink(sink); } void AudioTrack::RemoveSink(AudioTrackSinkInterface* sink) { - RTC_DCHECK_RUN_ON(&thread_checker_); + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); if (audio_source_) audio_source_->RemoveSink(sink); } void AudioTrack::OnChanged() { - RTC_DCHECK_RUN_ON(&thread_checker_); + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); if (audio_source_->state() == MediaSourceInterface::kEnded) { set_state(kEnded); } else { diff --git a/TMessagesProj/jni/voip/webrtc/pc/audio_track.h b/TMessagesProj/jni/voip/webrtc/pc/audio_track.h index 8a705cf8fb..ae326b304b 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/audio_track.h +++ b/TMessagesProj/jni/voip/webrtc/pc/audio_track.h @@ -17,14 +17,19 @@ #include "api/media_stream_track.h" #include "api/scoped_refptr.h" #include "api/sequence_checker.h" +#include "rtc_base/system/no_unique_address.h" namespace webrtc { +// TODO(tommi): Instead of inheriting from `MediaStreamTrack<>`, implement the +// properties directly in this class. `MediaStreamTrack` doesn't guard against +// conflicting access, so we'd need to override those methods anyway in this +// class in order to make sure things are correctly checked. class AudioTrack : public MediaStreamTrack, public ObserverInterface { protected: // Protected ctor to force use of factory method. - AudioTrack(const std::string& label, + AudioTrack(absl::string_view label, const rtc::scoped_refptr& source); AudioTrack() = delete; @@ -35,7 +40,7 @@ class AudioTrack : public MediaStreamTrack, public: static rtc::scoped_refptr Create( - const std::string& id, + absl::string_view id, const rtc::scoped_refptr& source); // MediaStreamTrack implementation. @@ -53,7 +58,7 @@ class AudioTrack : public MediaStreamTrack, private: const rtc::scoped_refptr audio_source_; - SequenceChecker thread_checker_; + RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker signaling_thread_checker_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/channel.cc b/TMessagesProj/jni/voip/webrtc/pc/channel.cc index c2e3250a79..96924978d0 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/channel.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/channel.cc @@ -12,37 +12,35 @@ #include #include -#include -#include +#include +#include #include -#include "absl/algorithm/container.h" #include "absl/strings/string_view.h" #include "api/rtp_parameters.h" #include "api/sequence_checker.h" -#include "api/task_queue/queued_task.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/units/timestamp.h" #include "media/base/codec.h" #include "media/base/rid_description.h" #include "media/base/rtp_utils.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "p2p/base/dtls_transport_internal.h" #include "pc/rtp_media_utils.h" #include "rtc_base/checks.h" #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/logging.h" #include "rtc_base/network_route.h" -#include "rtc_base/strings/string_builder.h" -#include "rtc_base/synchronization/mutex.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" -#include "rtc_base/task_utils/to_queued_task.h" +#include "rtc_base/strings/string_format.h" #include "rtc_base/trace_event.h" namespace cricket { namespace { +using ::rtc::StringFormat; using ::rtc::UniqueRandomIdGenerator; using ::webrtc::PendingTaskSafetyFlag; using ::webrtc::SdpType; -using ::webrtc::ToQueuedTask; // Finds a stream based on target's Primary SSRC or RIDs. // This struct is used in BaseChannel::UpdateLocalStreams_w. @@ -79,12 +77,6 @@ struct StreamFinder { } // namespace -static void SafeSetError(const std::string& message, std::string* error_desc) { - if (error_desc) { - *error_desc = message; - } -} - template void RtpParametersFromMediaDescription( const MediaContentDescriptionImpl* desc, @@ -105,9 +97,13 @@ void RtpParametersFromMediaDescription( template void RtpSendParametersFromMediaDescription( const MediaContentDescriptionImpl* desc, - const RtpHeaderExtensions& extensions, - bool is_stream_active, + webrtc::RtpExtension::Filter extensions_filter, RtpSendParameters* send_params) { + RtpHeaderExtensions extensions = + webrtc::RtpExtension::DeduplicateHeaderExtensions( + desc->rtp_header_extensions(), extensions_filter); + const bool is_stream_active = + webrtc::RtpTransceiverDirectionHasRecv(desc->direction()); RtpParametersFromMediaDescription(desc, extensions, is_stream_active, send_params); send_params->max_bandwidth_bps = desc->bandwidth(); @@ -118,7 +114,7 @@ BaseChannel::BaseChannel(rtc::Thread* worker_thread, rtc::Thread* network_thread, rtc::Thread* signaling_thread, std::unique_ptr media_channel, - const std::string& content_name, + absl::string_view mid, bool srtp_required, webrtc::CryptoOptions crypto_options, UniqueRandomIdGenerator* ssrc_generator) @@ -126,15 +122,18 @@ BaseChannel::BaseChannel(rtc::Thread* worker_thread, network_thread_(network_thread), signaling_thread_(signaling_thread), alive_(PendingTaskSafetyFlag::Create()), - content_name_(content_name), srtp_required_(srtp_required), - crypto_options_(crypto_options), + extensions_filter_( + crypto_options.srtp.enable_encrypted_rtp_header_extensions + ? webrtc::RtpExtension::kPreferEncryptedExtension + : webrtc::RtpExtension::kDiscardEncryptedExtension), media_channel_(std::move(media_channel)), + demuxer_criteria_(mid), ssrc_generator_(ssrc_generator) { RTC_DCHECK_RUN_ON(worker_thread_); + RTC_DCHECK(media_channel_); RTC_DCHECK(ssrc_generator_); - demuxer_criteria_.mid = content_name; - RTC_LOG(LS_INFO) << "Created channel: " << ToString(); + RTC_DLOG(LS_INFO) << "Created channel: " << ToString(); } BaseChannel::~BaseChannel() { @@ -149,27 +148,17 @@ BaseChannel::~BaseChannel() { } std::string BaseChannel::ToString() const { - rtc::StringBuilder sb; - sb << "{mid: " << content_name_; - if (media_channel_) { - sb << ", media_type: " << MediaTypeToString(media_channel_->media_type()); - } - sb << "}"; - return sb.Release(); + return StringFormat("{mid: %s, media_type: %s}", mid().c_str(), + MediaTypeToString(media_channel_->media_type()).c_str()); } -bool BaseChannel::ConnectToRtpTransport() { +bool BaseChannel::ConnectToRtpTransport_n() { RTC_DCHECK(rtp_transport_); RTC_DCHECK(media_channel()); // We don't need to call OnDemuxerCriteriaUpdatePending/Complete because // there's no previous criteria to worry about. - bool result = rtp_transport_->RegisterRtpDemuxerSink(demuxer_criteria_, this); - if (result) { - previous_demuxer_criteria_ = demuxer_criteria_; - } else { - previous_demuxer_criteria_ = {}; - RTC_LOG(LS_ERROR) << "Failed to set up demuxing for " << ToString(); + if (!rtp_transport_->RegisterRtpDemuxerSink(demuxer_criteria_, this)) { return false; } rtp_transport_->SignalReadyToSend.connect( @@ -183,7 +172,7 @@ bool BaseChannel::ConnectToRtpTransport() { return true; } -void BaseChannel::DisconnectFromRtpTransport() { +void BaseChannel::DisconnectFromRtpTransport_n() { RTC_DCHECK(rtp_transport_); RTC_DCHECK(media_channel()); rtp_transport_->UnregisterRtpDemuxerSink(this); @@ -191,32 +180,8 @@ void BaseChannel::DisconnectFromRtpTransport() { rtp_transport_->SignalNetworkRouteChanged.disconnect(this); rtp_transport_->SignalWritableState.disconnect(this); rtp_transport_->SignalSentPacket.disconnect(this); -} - -void BaseChannel::Init_w(webrtc::RtpTransportInternal* rtp_transport) { - RTC_DCHECK_RUN_ON(worker_thread()); - - network_thread_->Invoke(RTC_FROM_HERE, [this, rtp_transport] { - SetRtpTransport(rtp_transport); - // Both RTP and RTCP channels should be set, we can call SetInterface on - // the media channel and it can set network options. - media_channel_->SetInterface(this); - }); -} - -void BaseChannel::Deinit() { - RTC_DCHECK_RUN_ON(worker_thread()); - // Packets arrive on the network thread, processing packets calls virtual - // functions, so need to stop this process in Deinit that is called in - // derived classes destructor. - network_thread_->Invoke(RTC_FROM_HERE, [&] { - RTC_DCHECK_RUN_ON(network_thread()); - media_channel_->SetInterface(/*iface=*/nullptr); - - if (rtp_transport_) { - DisconnectFromRtpTransport(); - } - }); + rtp_transport_ = nullptr; + media_channel_->SetInterface(nullptr); } bool BaseChannel::SetRtpTransport(webrtc::RtpTransportInternal* rtp_transport) { @@ -227,16 +192,24 @@ bool BaseChannel::SetRtpTransport(webrtc::RtpTransportInternal* rtp_transport) { } if (rtp_transport_) { - DisconnectFromRtpTransport(); + DisconnectFromRtpTransport_n(); + // Clear the cached header extensions on the worker. + worker_thread_->PostTask(SafeTask(alive_, [this] { + RTC_DCHECK_RUN_ON(worker_thread()); + rtp_header_extensions_.clear(); + })); } rtp_transport_ = rtp_transport; if (rtp_transport_) { - transport_name_ = rtp_transport_->transport_name(); - if (!ConnectToRtpTransport()) { + if (!ConnectToRtpTransport_n()) { return false; } - OnTransportReadyToSend(rtp_transport_->IsReadyToSend()); + + RTC_DCHECK(!media_channel_->HasNetworkInterface()); + media_channel_->SetInterface(this); + + media_channel_->OnReadyToSend(rtp_transport_->IsReadyToSend()); UpdateWritableState_n(); // Set the cached socket options. @@ -249,6 +222,7 @@ bool BaseChannel::SetRtpTransport(webrtc::RtpTransportInternal* rtp_transport) { } } } + return true; } @@ -260,7 +234,7 @@ void BaseChannel::Enable(bool enable) { enabled_s_ = enable; - worker_thread_->PostTask(ToQueuedTask(alive_, [this, enable] { + worker_thread_->PostTask(SafeTask(alive_, [this, enable] { RTC_DCHECK_RUN_ON(worker_thread()); // Sanity check to make sure that enabled_ and enabled_s_ // stay in sync. @@ -275,7 +249,7 @@ void BaseChannel::Enable(bool enable) { bool BaseChannel::SetLocalContent(const MediaContentDescription* content, SdpType type, - std::string* error_desc) { + std::string& error_desc) { RTC_DCHECK_RUN_ON(worker_thread()); TRACE_EVENT0("webrtc", "BaseChannel::SetLocalContent"); return SetLocalContent_w(content, type, error_desc); @@ -283,31 +257,31 @@ bool BaseChannel::SetLocalContent(const MediaContentDescription* content, bool BaseChannel::SetRemoteContent(const MediaContentDescription* content, SdpType type, - std::string* error_desc) { + std::string& error_desc) { RTC_DCHECK_RUN_ON(worker_thread()); TRACE_EVENT0("webrtc", "BaseChannel::SetRemoteContent"); return SetRemoteContent_w(content, type, error_desc); } bool BaseChannel::SetPayloadTypeDemuxingEnabled(bool enabled) { + // TODO(bugs.webrtc.org/11993): The demuxer state needs to be managed on the + // network thread. At the moment there's a workaround for inconsistent state + // between the worker and network thread because of this (see + // OnDemuxerCriteriaUpdatePending elsewhere in this file) and + // SetPayloadTypeDemuxingEnabled_w has a BlockingCall over to the network + // thread to apply state updates. RTC_DCHECK_RUN_ON(worker_thread()); TRACE_EVENT0("webrtc", "BaseChannel::SetPayloadTypeDemuxingEnabled"); return SetPayloadTypeDemuxingEnabled_w(enabled); } -bool BaseChannel::IsReadyToReceiveMedia_w() const { - // Receive data if we are enabled and have local content, - return enabled_ && - webrtc::RtpTransceiverDirectionHasRecv(local_content_direction_); -} - bool BaseChannel::IsReadyToSendMedia_w() const { // Send outgoing data if we are enabled, have local and remote content, // and we have had some form of connectivity. return enabled_ && webrtc::RtpTransceiverDirectionHasRecv(remote_content_direction_) && webrtc::RtpTransceiverDirectionHasSend(local_content_direction_) && - was_ever_writable(); + was_ever_writable_; } bool BaseChannel::SendPacket(rtc::CopyOnWriteBuffer* packet, @@ -324,6 +298,7 @@ int BaseChannel::SetOption(SocketType type, rtc::Socket::Option opt, int value) { RTC_DCHECK_RUN_ON(network_thread()); + RTC_DCHECK(network_initialized()); RTC_DCHECK(rtp_transport_); switch (type) { case ST_RTP: @@ -340,6 +315,7 @@ int BaseChannel::SetOption(SocketType type, void BaseChannel::OnWritableState(bool writable) { RTC_DCHECK_RUN_ON(network_thread()); + RTC_DCHECK(network_initialized()); if (writable) { ChannelWritable_n(); } else { @@ -349,9 +325,11 @@ void BaseChannel::OnWritableState(bool writable) { void BaseChannel::OnNetworkRouteChanged( absl::optional network_route) { + RTC_DCHECK_RUN_ON(network_thread()); + RTC_DCHECK(network_initialized()); + RTC_LOG(LS_INFO) << "Network route changed for " << ToString(); - RTC_DCHECK_RUN_ON(network_thread()); rtc::NetworkRoute new_route; if (network_route) { new_route = *(network_route); @@ -360,18 +338,26 @@ void BaseChannel::OnNetworkRouteChanged( // use the same transport name and MediaChannel::OnNetworkRouteChanged cannot // work correctly. Intentionally leave it broken to simplify the code and // encourage the users to stop using non-muxing RTCP. - media_channel_->OnNetworkRouteChanged(transport_name_, new_route); + media_channel_->OnNetworkRouteChanged(transport_name(), new_route); } void BaseChannel::SetFirstPacketReceivedCallback( std::function callback) { RTC_DCHECK_RUN_ON(network_thread()); RTC_DCHECK(!on_first_packet_received_ || !callback); + + // TODO(bugs.webrtc.org/11992): Rename SetFirstPacketReceivedCallback to + // something that indicates network thread initialization/uninitialization and + // call Init_n() / Deinit_n() respectively. + // if (!callback) + // Deinit_n(); + on_first_packet_received_ = std::move(callback); } void BaseChannel::OnTransportReadyToSend(bool ready) { RTC_DCHECK_RUN_ON(network_thread()); + RTC_DCHECK(network_initialized()); media_channel_->OnReadyToSend(ready); } @@ -379,22 +365,15 @@ bool BaseChannel::SendPacket(bool rtcp, rtc::CopyOnWriteBuffer* packet, const rtc::PacketOptions& options) { RTC_DCHECK_RUN_ON(network_thread()); + RTC_DCHECK(network_initialized()); + TRACE_EVENT0("webrtc", "BaseChannel::SendPacket"); + // Until all the code is migrated to use RtpPacketType instead of bool. RtpPacketType packet_type = rtcp ? RtpPacketType::kRtcp : RtpPacketType::kRtp; - // SendPacket gets called from MediaEngine, on a pacer or an encoder thread. - // If the thread is not our network thread, we will post to our network - // so that the real work happens on our network. This avoids us having to - // synchronize access to all the pieces of the send path, including - // SRTP and the inner workings of the transport channels. - // The only downside is that we can't return a proper failure code if - // needed. Since UDP is unreliable anyway, this should be a non-issue. - TRACE_EVENT0("webrtc", "BaseChannel::SendPacket"); - - // Now that we are on the correct thread, ensure we have a place to send this - // packet before doing anything. (We might get RTCP packets that we don't - // intend to send.) If we've negotiated RTCP mux, send RTCP over the RTP - // transport. + // Ensure we have a place to send this packet before doing anything. We might + // get RTCP packets that we don't intend to send. If we've negotiated RTCP + // mux, send RTCP over the RTP transport. if (!rtp_transport_ || !rtp_transport_->IsWritable(rtcp)) { return false; } @@ -412,30 +391,25 @@ bool BaseChannel::SendPacket(bool rtcp, // The audio/video engines may attempt to send RTCP packets as soon as the // streams are created, so don't treat this as an error for RTCP. // See: https://bugs.chromium.org/p/webrtc/issues/detail?id=6809 - if (rtcp) { - return false; - } - // However, there shouldn't be any RTP packets sent before SRTP is set up - // (and SetSend(true) is called). - RTC_LOG(LS_ERROR) << "Can't send outgoing RTP packet for " << ToString() - << " when SRTP is inactive and crypto is required"; - RTC_DCHECK_NOTREACHED(); + // However, there shouldn't be any RTP packets sent before SRTP is set + // up (and SetSend(true) is called). + RTC_DCHECK(rtcp) << "Can't send outgoing RTP packet for " << ToString() + << " when SRTP is inactive and crypto is required"; return false; } - std::string packet_type = rtcp ? "RTCP" : "RTP"; - RTC_DLOG(LS_WARNING) << "Sending an " << packet_type + RTC_DLOG(LS_WARNING) << "Sending an " << (rtcp ? "RTCP" : "RTP") << " packet without encryption for " << ToString() << "."; } - // Bon voyage. return rtcp ? rtp_transport_->SendRtcpPacket(packet, options, PF_SRTP_BYPASS) : rtp_transport_->SendRtpPacket(packet, options, PF_SRTP_BYPASS); } void BaseChannel::OnRtpPacket(const webrtc::RtpPacketReceived& parsed_packet) { RTC_DCHECK_RUN_ON(network_thread()); + RTC_DCHECK(network_initialized()); if (on_first_packet_received_) { on_first_packet_received_(); @@ -468,41 +442,74 @@ void BaseChannel::OnRtpPacket(const webrtc::RtpPacketReceived& parsed_packet) { } } -void BaseChannel::UpdateRtpHeaderExtensionMap( - const RtpHeaderExtensions& header_extensions) { - // Update the header extension map on network thread in case there is data - // race. - // - // NOTE: This doesn't take the BUNDLE case in account meaning the RTP header - // extension maps are not merged when BUNDLE is enabled. This is fine because - // the ID for MID should be consistent among all the RTP transports. - network_thread_->Invoke(RTC_FROM_HERE, [this, &header_extensions] { +bool BaseChannel::MaybeUpdateDemuxerAndRtpExtensions_w( + bool update_demuxer, + absl::optional extensions, + std::string& error_desc) { + if (extensions) { + if (rtp_header_extensions_ == extensions) { + extensions.reset(); // No need to update header extensions. + } else { + rtp_header_extensions_ = *extensions; + } + } + + if (!update_demuxer && !extensions) + return true; // No update needed. + + // TODO(bugs.webrtc.org/13536): See if we can do this asynchronously. + + if (update_demuxer) + media_channel()->OnDemuxerCriteriaUpdatePending(); + + bool success = network_thread()->BlockingCall([&]() mutable { RTC_DCHECK_RUN_ON(network_thread()); - rtp_transport_->UpdateRtpHeaderExtensionMap(header_extensions); + // NOTE: This doesn't take the BUNDLE case in account meaning the RTP header + // extension maps are not merged when BUNDLE is enabled. This is fine + // because the ID for MID should be consistent among all the RTP transports. + if (extensions) + rtp_transport_->UpdateRtpHeaderExtensionMap(*extensions); + + if (!update_demuxer) + return true; + + if (!rtp_transport_->RegisterRtpDemuxerSink(demuxer_criteria_, this)) { + error_desc = + StringFormat("Failed to apply demuxer criteria for '%s': '%s'.", + mid().c_str(), demuxer_criteria_.ToString().c_str()); + return false; + } + return true; }); + + if (update_demuxer) + media_channel()->OnDemuxerCriteriaUpdateComplete(); + + return success; } bool BaseChannel::RegisterRtpDemuxerSink_w() { - if (demuxer_criteria_ == previous_demuxer_criteria_) { - return true; - } media_channel_->OnDemuxerCriteriaUpdatePending(); // Copy demuxer criteria, since they're a worker-thread variable // and we want to pass them to the network thread - return network_thread_->Invoke( - RTC_FROM_HERE, [this, demuxer_criteria = demuxer_criteria_] { + bool ret = network_thread_->BlockingCall( + [this, demuxer_criteria = demuxer_criteria_] { RTC_DCHECK_RUN_ON(network_thread()); - RTC_DCHECK(rtp_transport_); - bool result = - rtp_transport_->RegisterRtpDemuxerSink(demuxer_criteria, this); - if (result) { - previous_demuxer_criteria_ = demuxer_criteria; - } else { - previous_demuxer_criteria_ = {}; + if (!rtp_transport_) { + // Transport was disconnected before attempting to update the + // criteria. This can happen while setting the remote description. + // See chromium:1295469 for an example. + return false; } - media_channel_->OnDemuxerCriteriaUpdateComplete(); - return result; + // Note that RegisterRtpDemuxerSink first unregisters the sink if + // already registered. So this will change the state of the class + // whether the call succeeds or not. + return rtp_transport_->RegisterRtpDemuxerSink(demuxer_criteria, this); }); + + media_channel_->OnDemuxerCriteriaUpdateComplete(); + + return ret; } void BaseChannel::EnableMedia_w() { @@ -544,7 +551,7 @@ void BaseChannel::ChannelWritable_n() { // We only have to do this PostTask once, when first transitioning to // writable. if (!was_ever_writable_n_) { - worker_thread_->PostTask(ToQueuedTask(alive_, [this] { + worker_thread_->PostTask(SafeTask(alive_, [this] { RTC_DCHECK_RUN_ON(worker_thread()); was_ever_writable_ = true; UpdateMediaSendRecvState_w(); @@ -562,23 +569,17 @@ void BaseChannel::ChannelNotWritable_n() { RTC_LOG(LS_INFO) << "Channel not writable (" << ToString() << ")"; } -bool BaseChannel::AddRecvStream_w(const StreamParams& sp) { - return media_channel()->AddRecvStream(sp); -} - -bool BaseChannel::RemoveRecvStream_w(uint32_t ssrc) { - return media_channel()->RemoveRecvStream(ssrc); -} - -void BaseChannel::ResetUnsignaledRecvStream_w() { - media_channel()->ResetUnsignaledRecvStream(); -} - bool BaseChannel::SetPayloadTypeDemuxingEnabled_w(bool enabled) { + RTC_LOG_THREAD_BLOCK_COUNT(); + if (enabled == payload_type_demuxing_enabled_) { return true; } + payload_type_demuxing_enabled_ = enabled; + + bool config_changed = false; + if (!enabled) { // TODO(crbug.com/11477): This will remove *all* unsignaled streams (those // without an explicitly signaled SSRC), which may include streams that @@ -586,27 +587,32 @@ bool BaseChannel::SetPayloadTypeDemuxingEnabled_w(bool enabled) { // streams that were matched based on payload type alone, but currently // there is no straightforward way to identify those streams. media_channel()->ResetUnsignaledRecvStream(); - demuxer_criteria_.payload_types.clear(); - if (!RegisterRtpDemuxerSink_w()) { - RTC_LOG(LS_ERROR) << "Failed to disable payload type demuxing for " - << ToString(); - return false; + if (!demuxer_criteria_.payload_types().empty()) { + config_changed = true; + demuxer_criteria_.payload_types().clear(); } } else if (!payload_types_.empty()) { - demuxer_criteria_.payload_types.insert(payload_types_.begin(), - payload_types_.end()); - if (!RegisterRtpDemuxerSink_w()) { - RTC_LOG(LS_ERROR) << "Failed to enable payload type demuxing for " - << ToString(); - return false; + for (const auto& type : payload_types_) { + if (demuxer_criteria_.payload_types().insert(type).second) { + config_changed = true; + } } + } else { + RTC_DCHECK(demuxer_criteria_.payload_types().empty()); } - return true; + + RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(0); + + if (!config_changed) + return true; + + // Note: This synchronously hops to the network thread. + return RegisterRtpDemuxerSink_w(); } bool BaseChannel::UpdateLocalStreams_w(const std::vector& streams, SdpType type, - std::string* error_desc) { + std::string& error_desc) { // In the case of RIDs (where SSRCs are not negotiated), this method will // generate an SSRC for each layer in StreamParams. That representation will // be stored internally in `local_streams_`. @@ -626,11 +632,10 @@ bool BaseChannel::UpdateLocalStreams_w(const std::vector& streams, continue; } if (!media_channel()->RemoveSendStream(old_stream.first_ssrc())) { - rtc::StringBuilder desc; - desc << "Failed to remove send stream with ssrc " - << old_stream.first_ssrc() << " from m-section with mid='" - << content_name() << "'."; - SafeSetError(desc.str(), error_desc); + error_desc = StringFormat( + "Failed to remove send stream with ssrc %u from m-section with " + "mid='%s'.", + old_stream.first_ssrc(), mid().c_str()); ret = false; } } @@ -653,11 +658,10 @@ bool BaseChannel::UpdateLocalStreams_w(const std::vector& streams, RTC_DCHECK(new_stream.has_ssrcs() || new_stream.has_rids()); if (new_stream.has_ssrcs() && new_stream.has_rids()) { - rtc::StringBuilder desc; - desc << "Failed to add send stream: " << new_stream.first_ssrc() - << " into m-section with mid='" << content_name() - << "'. Stream has both SSRCs and RIDs."; - SafeSetError(desc.str(), error_desc); + error_desc = StringFormat( + "Failed to add send stream: %u into m-section with mid='%s'. Stream " + "has both SSRCs and RIDs.", + new_stream.first_ssrc(), mid().c_str()); ret = false; continue; } @@ -674,10 +678,9 @@ bool BaseChannel::UpdateLocalStreams_w(const std::vector& streams, RTC_LOG(LS_INFO) << "Add send stream ssrc: " << new_stream.ssrcs[0] << " into " << ToString(); } else { - rtc::StringBuilder desc; - desc << "Failed to add send stream ssrc: " << new_stream.first_ssrc() - << " into m-section with mid='" << content_name() << "'"; - SafeSetError(desc.str(), error_desc); + error_desc = StringFormat( + "Failed to add send stream ssrc: %u into m-section with mid='%s'", + new_stream.first_ssrc(), mid().c_str()); ret = false; } } @@ -685,96 +688,128 @@ bool BaseChannel::UpdateLocalStreams_w(const std::vector& streams, return ret; } -bool BaseChannel::UpdateRemoteStreams_w( - const std::vector& streams, - SdpType type, - std::string* error_desc) { +bool BaseChannel::UpdateRemoteStreams_w(const MediaContentDescription* content, + SdpType type, + std::string& error_desc) { + RTC_LOG_THREAD_BLOCK_COUNT(); + bool needs_re_registration = false; + if (!webrtc::RtpTransceiverDirectionHasSend(content->direction())) { + RTC_DLOG(LS_VERBOSE) << "UpdateRemoteStreams_w: remote side will not send " + "- disable payload type demuxing for " + << ToString(); + if (ClearHandledPayloadTypes()) { + needs_re_registration = payload_type_demuxing_enabled_; + } + } + + const std::vector& streams = content->streams(); + const bool new_has_unsignaled_ssrcs = HasStreamWithNoSsrcs(streams); + const bool old_has_unsignaled_ssrcs = HasStreamWithNoSsrcs(remote_streams_); + // Check for streams that have been removed. - bool ret = true; for (const StreamParams& old_stream : remote_streams_) { // If we no longer have an unsignaled stream, we would like to remove // the unsignaled stream params that are cached. - if (!old_stream.has_ssrcs() && !HasStreamWithNoSsrcs(streams)) { - ResetUnsignaledRecvStream_w(); + if (!old_stream.has_ssrcs() && !new_has_unsignaled_ssrcs) { + media_channel()->ResetUnsignaledRecvStream(); RTC_LOG(LS_INFO) << "Reset unsignaled remote stream for " << ToString() << "."; } else if (old_stream.has_ssrcs() && !GetStreamBySsrc(streams, old_stream.first_ssrc())) { - if (RemoveRecvStream_w(old_stream.first_ssrc())) { + if (media_channel()->RemoveRecvStream(old_stream.first_ssrc())) { RTC_LOG(LS_INFO) << "Remove remote ssrc: " << old_stream.first_ssrc() << " from " << ToString() << "."; } else { - rtc::StringBuilder desc; - desc << "Failed to remove remote stream with ssrc " - << old_stream.first_ssrc() << " from m-section with mid='" - << content_name() << "'."; - SafeSetError(desc.str(), error_desc); - ret = false; + error_desc = StringFormat( + "Failed to remove remote stream with ssrc %u from m-section with " + "mid='%s'.", + old_stream.first_ssrc(), mid().c_str()); + return false; } } } - demuxer_criteria_.ssrcs.clear(); + // Check for new streams. + webrtc::flat_set ssrcs; for (const StreamParams& new_stream : streams) { // We allow a StreamParams with an empty list of SSRCs, in which case the // MediaChannel will cache the parameters and use them for any unsignaled // stream received later. - if ((!new_stream.has_ssrcs() && !HasStreamWithNoSsrcs(remote_streams_)) || + if ((!new_stream.has_ssrcs() && !old_has_unsignaled_ssrcs) || !GetStreamBySsrc(remote_streams_, new_stream.first_ssrc())) { - if (AddRecvStream_w(new_stream)) { + if (media_channel()->AddRecvStream(new_stream)) { RTC_LOG(LS_INFO) << "Add remote ssrc: " << (new_stream.has_ssrcs() ? std::to_string(new_stream.first_ssrc()) : "unsignaled") << " to " << ToString(); } else { - rtc::StringBuilder desc; - desc << "Failed to add remote stream ssrc: " - << (new_stream.has_ssrcs() - ? std::to_string(new_stream.first_ssrc()) - : "unsignaled") - << " to " << ToString(); - SafeSetError(desc.str(), error_desc); - ret = false; + error_desc = + StringFormat("Failed to add remote stream ssrc: %s to %s", + new_stream.has_ssrcs() + ? std::to_string(new_stream.first_ssrc()).c_str() + : "unsignaled", + ToString().c_str()); + return false; } } // Update the receiving SSRCs. - demuxer_criteria_.ssrcs.insert(new_stream.ssrcs.begin(), - new_stream.ssrcs.end()); + ssrcs.insert(new_stream.ssrcs.begin(), new_stream.ssrcs.end()); } - // Re-register the sink to update the receiving ssrcs. - if (!RegisterRtpDemuxerSink_w()) { - RTC_LOG(LS_ERROR) << "Failed to set up demuxing for " << ToString(); - ret = false; + + if (demuxer_criteria_.ssrcs() != ssrcs) { + demuxer_criteria_.ssrcs() = std::move(ssrcs); + needs_re_registration = true; + } + + RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(0); + + // Re-register the sink to update after changing the demuxer criteria. + if (needs_re_registration && !RegisterRtpDemuxerSink_w()) { + error_desc = StringFormat("Failed to set up audio demuxing for mid='%s'.", + mid().c_str()); + return false; } + remote_streams_ = streams; - return ret; + + set_remote_content_direction(content->direction()); + UpdateMediaSendRecvState_w(); + + RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(1); + + return true; } RtpHeaderExtensions BaseChannel::GetDeduplicatedRtpHeaderExtensions( const RtpHeaderExtensions& extensions) { - return webrtc::RtpExtension::DeduplicateHeaderExtensions( - extensions, crypto_options_.srtp.enable_encrypted_rtp_header_extensions - ? webrtc::RtpExtension::kPreferEncryptedExtension - : webrtc::RtpExtension::kDiscardEncryptedExtension); + return webrtc::RtpExtension::DeduplicateHeaderExtensions(extensions, + extensions_filter_); } -void BaseChannel::MaybeAddHandledPayloadType(int payload_type) { +bool BaseChannel::MaybeAddHandledPayloadType(int payload_type) { + bool demuxer_criteria_modified = false; if (payload_type_demuxing_enabled_) { - demuxer_criteria_.payload_types.insert(static_cast(payload_type)); + demuxer_criteria_modified = demuxer_criteria_.payload_types() + .insert(static_cast(payload_type)) + .second; } // Even if payload type demuxing is currently disabled, we need to remember // the payload types in case it's re-enabled later. payload_types_.insert(static_cast(payload_type)); + return demuxer_criteria_modified; } -void BaseChannel::ClearHandledPayloadTypes() { - demuxer_criteria_.payload_types.clear(); +bool BaseChannel::ClearHandledPayloadTypes() { + const bool was_empty = demuxer_criteria_.payload_types().empty(); + demuxer_criteria_.payload_types().clear(); payload_types_.clear(); + return !was_empty; } void BaseChannel::SignalSentPacket_n(const rtc::SentPacket& sent_packet) { RTC_DCHECK_RUN_ON(network_thread()); + RTC_DCHECK(network_initialized()); media_channel()->OnPacketSent(sent_packet); } @@ -782,7 +817,7 @@ VoiceChannel::VoiceChannel(rtc::Thread* worker_thread, rtc::Thread* network_thread, rtc::Thread* signaling_thread, std::unique_ptr media_channel, - const std::string& content_name, + absl::string_view mid, bool srtp_required, webrtc::CryptoOptions crypto_options, UniqueRandomIdGenerator* ssrc_generator) @@ -790,7 +825,7 @@ VoiceChannel::VoiceChannel(rtc::Thread* worker_thread, network_thread, signaling_thread, std::move(media_channel), - content_name, + mid, srtp_required, crypto_options, ssrc_generator) {} @@ -799,148 +834,113 @@ VoiceChannel::~VoiceChannel() { TRACE_EVENT0("webrtc", "VoiceChannel::~VoiceChannel"); // this can't be done in the base class, since it calls a virtual DisableMedia_w(); - Deinit(); } void VoiceChannel::UpdateMediaSendRecvState_w() { // Render incoming data if we're the active call, and we have the local // content. We receive data on the default channel and multiplexed streams. - RTC_DCHECK_RUN_ON(worker_thread()); - bool recv = IsReadyToReceiveMedia_w(); - media_channel()->SetPlayout(recv); + bool ready_to_receive = enabled() && webrtc::RtpTransceiverDirectionHasRecv( + local_content_direction()); + media_channel()->SetPlayout(ready_to_receive); // Send outgoing data if we're the active call, we have the remote content, // and we have had some form of connectivity. bool send = IsReadyToSendMedia_w(); media_channel()->SetSend(send); - RTC_LOG(LS_INFO) << "Changing voice state, recv=" << recv << " send=" << send - << " for " << ToString(); + RTC_LOG(LS_INFO) << "Changing voice state, recv=" << ready_to_receive + << " send=" << send << " for " << ToString(); } bool VoiceChannel::SetLocalContent_w(const MediaContentDescription* content, SdpType type, - std::string* error_desc) { + std::string& error_desc) { TRACE_EVENT0("webrtc", "VoiceChannel::SetLocalContent_w"); - RTC_DCHECK_RUN_ON(worker_thread()); - RTC_LOG(LS_INFO) << "Setting local voice description for " << ToString(); + RTC_DLOG(LS_INFO) << "Setting local voice description for " << ToString(); - RtpHeaderExtensions rtp_header_extensions = + RTC_LOG_THREAD_BLOCK_COUNT(); + + RtpHeaderExtensions header_extensions = GetDeduplicatedRtpHeaderExtensions(content->rtp_header_extensions()); - // TODO(tommi): There's a hop to the network thread here. - // some of the below is also network thread related. - UpdateRtpHeaderExtensionMap(rtp_header_extensions); + bool update_header_extensions = true; media_channel()->SetExtmapAllowMixed(content->extmap_allow_mixed()); AudioRecvParameters recv_params = last_recv_params_; RtpParametersFromMediaDescription( - content->as_audio(), rtp_header_extensions, + content->as_audio(), header_extensions, webrtc::RtpTransceiverDirectionHasRecv(content->direction()), &recv_params); if (!media_channel()->SetRecvParameters(recv_params)) { - SafeSetError( + error_desc = StringFormat( "Failed to set local audio description recv parameters for m-section " - "with mid='" + - content_name() + "'.", - error_desc); + "with mid='%s'.", + mid().c_str()); return false; } + bool criteria_modified = false; if (webrtc::RtpTransceiverDirectionHasRecv(content->direction())) { for (const AudioCodec& codec : content->as_audio()->codecs()) { - MaybeAddHandledPayloadType(codec.id); - } - // Need to re-register the sink to update the handled payload. - if (!RegisterRtpDemuxerSink_w()) { - RTC_LOG(LS_ERROR) << "Failed to set up audio demuxing for " << ToString(); - return false; + if (MaybeAddHandledPayloadType(codec.id)) { + criteria_modified = true; + } } } last_recv_params_ = recv_params; - // TODO(pthatcher): Move local streams into AudioSendParameters, and - // only give it to the media channel once we have a remote - // description too (without a remote description, we won't be able - // to send them anyway). if (!UpdateLocalStreams_w(content->as_audio()->streams(), type, error_desc)) { - SafeSetError( - "Failed to set local audio description streams for m-section with " - "mid='" + - content_name() + "'.", - error_desc); + RTC_DCHECK(!error_desc.empty()); return false; } set_local_content_direction(content->direction()); UpdateMediaSendRecvState_w(); - return true; + + RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(0); + + bool success = MaybeUpdateDemuxerAndRtpExtensions_w( + criteria_modified, + update_header_extensions + ? absl::optional(std::move(header_extensions)) + : absl::nullopt, + error_desc); + + RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(1); + + return success; } bool VoiceChannel::SetRemoteContent_w(const MediaContentDescription* content, SdpType type, - std::string* error_desc) { + std::string& error_desc) { TRACE_EVENT0("webrtc", "VoiceChannel::SetRemoteContent_w"); - RTC_DCHECK_RUN_ON(worker_thread()); RTC_LOG(LS_INFO) << "Setting remote voice description for " << ToString(); - const AudioContentDescription* audio = content->as_audio(); - - RtpHeaderExtensions rtp_header_extensions = - GetDeduplicatedRtpHeaderExtensions(audio->rtp_header_extensions()); - AudioSendParameters send_params = last_send_params_; - RtpSendParametersFromMediaDescription( - audio, rtp_header_extensions, - webrtc::RtpTransceiverDirectionHasRecv(audio->direction()), &send_params); - send_params.mid = content_name(); + RtpSendParametersFromMediaDescription(content->as_audio(), + extensions_filter(), &send_params); + send_params.mid = mid(); bool parameters_applied = media_channel()->SetSendParameters(send_params); if (!parameters_applied) { - SafeSetError( + error_desc = StringFormat( "Failed to set remote audio description send parameters for m-section " - "with mid='" + - content_name() + "'.", - error_desc); + "with mid='%s'.", + mid().c_str()); return false; } last_send_params_ = send_params; - if (!webrtc::RtpTransceiverDirectionHasSend(content->direction())) { - RTC_DLOG(LS_VERBOSE) << "SetRemoteContent_w: remote side will not send - " - "disable payload type demuxing for " - << ToString(); - ClearHandledPayloadTypes(); - if (!RegisterRtpDemuxerSink_w()) { - RTC_LOG(LS_ERROR) << "Failed to update audio demuxing for " << ToString(); - return false; - } - } - - // TODO(pthatcher): Move remote streams into AudioRecvParameters, - // and only give it to the media channel once we have a local - // description too (without a local description, we won't be able to - // recv them anyway). - if (!UpdateRemoteStreams_w(audio->streams(), type, error_desc)) { - SafeSetError( - "Failed to set remote audio description streams for m-section with " - "mid='" + - content_name() + "'.", - error_desc); - return false; - } - - set_remote_content_direction(content->direction()); - UpdateMediaSendRecvState_w(); - return true; + return UpdateRemoteStreams_w(content, type, error_desc); } VideoChannel::VideoChannel(rtc::Thread* worker_thread, rtc::Thread* network_thread, rtc::Thread* signaling_thread, std::unique_ptr media_channel, - const std::string& content_name, + absl::string_view mid, bool srtp_required, webrtc::CryptoOptions crypto_options, UniqueRandomIdGenerator* ssrc_generator) @@ -948,7 +948,7 @@ VideoChannel::VideoChannel(rtc::Thread* worker_thread, network_thread, signaling_thread, std::move(media_channel), - content_name, + mid, srtp_required, crypto_options, ssrc_generator) {} @@ -957,45 +957,34 @@ VideoChannel::~VideoChannel() { TRACE_EVENT0("webrtc", "VideoChannel::~VideoChannel"); // this can't be done in the base class, since it calls a virtual DisableMedia_w(); - Deinit(); } void VideoChannel::UpdateMediaSendRecvState_w() { // Send outgoing data if we're the active call, we have the remote content, // and we have had some form of connectivity. - RTC_DCHECK_RUN_ON(worker_thread()); bool send = IsReadyToSendMedia_w(); - if (!media_channel()->SetSend(send)) { - RTC_LOG(LS_ERROR) << "Failed to SetSend on video channel: " + ToString(); - // TODO(gangji): Report error back to server. - } - + media_channel()->SetSend(send); RTC_LOG(LS_INFO) << "Changing video state, send=" << send << " for " << ToString(); } -void VideoChannel::FillBitrateInfo(BandwidthEstimationInfo* bwe_info) { - RTC_DCHECK_RUN_ON(worker_thread()); - VideoMediaChannel* mc = media_channel(); - mc->FillBitrateInfo(bwe_info); -} - bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content, SdpType type, - std::string* error_desc) { + std::string& error_desc) { TRACE_EVENT0("webrtc", "VideoChannel::SetLocalContent_w"); - RTC_DCHECK_RUN_ON(worker_thread()); - RTC_LOG(LS_INFO) << "Setting local video description for " << ToString(); + RTC_DLOG(LS_INFO) << "Setting local video description for " << ToString(); + + RTC_LOG_THREAD_BLOCK_COUNT(); - RtpHeaderExtensions rtp_header_extensions = + RtpHeaderExtensions header_extensions = GetDeduplicatedRtpHeaderExtensions(content->rtp_header_extensions()); - UpdateRtpHeaderExtensionMap(rtp_header_extensions); + bool update_header_extensions = true; media_channel()->SetExtmapAllowMixed(content->extmap_allow_mixed()); VideoRecvParameters recv_params = last_recv_params_; RtpParametersFromMediaDescription( - content->as_video(), rtp_header_extensions, + content->as_video(), header_extensions, webrtc::RtpTransceiverDirectionHasRecv(content->direction()), &recv_params); @@ -1010,11 +999,10 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content, send_codec.packetization.reset(); needs_send_params_update = true; } else if (recv_codec->packetization != send_codec.packetization) { - SafeSetError( + error_desc = StringFormat( "Failed to set local answer due to invalid codec packetization " - "specified in m-section with mid='" + - content_name() + "'.", - error_desc); + "specified in m-section with mid='%s'.", + mid().c_str()); return false; } } @@ -1022,22 +1010,18 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content, } if (!media_channel()->SetRecvParameters(recv_params)) { - SafeSetError( + error_desc = StringFormat( "Failed to set local video description recv parameters for m-section " - "with mid='" + - content_name() + "'.", - error_desc); + "with mid='%s'.", + mid().c_str()); return false; } + bool criteria_modified = false; if (webrtc::RtpTransceiverDirectionHasRecv(content->direction())) { for (const VideoCodec& codec : content->as_video()->codecs()) { - MaybeAddHandledPayloadType(codec.id); - } - // Need to re-register the sink to update the handled payload. - if (!RegisterRtpDemuxerSink_w()) { - RTC_LOG(LS_ERROR) << "Failed to set up video demuxing for " << ToString(); - return false; + if (MaybeAddHandledPayloadType(codec.id)) + criteria_modified = true; } } @@ -1045,52 +1029,49 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content, if (needs_send_params_update) { if (!media_channel()->SetSendParameters(send_params)) { - SafeSetError("Failed to set send parameters for m-section with mid='" + - content_name() + "'.", - error_desc); + error_desc = StringFormat( + "Failed to set send parameters for m-section with mid='%s'.", + mid().c_str()); return false; } last_send_params_ = send_params; } - // TODO(pthatcher): Move local streams into VideoSendParameters, and - // only give it to the media channel once we have a remote - // description too (without a remote description, we won't be able - // to send them anyway). if (!UpdateLocalStreams_w(content->as_video()->streams(), type, error_desc)) { - SafeSetError( - "Failed to set local video description streams for m-section with " - "mid='" + - content_name() + "'.", - error_desc); + RTC_DCHECK(!error_desc.empty()); return false; } set_local_content_direction(content->direction()); UpdateMediaSendRecvState_w(); - return true; + + RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(0); + + bool success = MaybeUpdateDemuxerAndRtpExtensions_w( + criteria_modified, + update_header_extensions + ? absl::optional(std::move(header_extensions)) + : absl::nullopt, + error_desc); + + RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(1); + + return success; } bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content, SdpType type, - std::string* error_desc) { + std::string& error_desc) { TRACE_EVENT0("webrtc", "VideoChannel::SetRemoteContent_w"); - RTC_DCHECK_RUN_ON(worker_thread()); RTC_LOG(LS_INFO) << "Setting remote video description for " << ToString(); const VideoContentDescription* video = content->as_video(); - RtpHeaderExtensions rtp_header_extensions = - GetDeduplicatedRtpHeaderExtensions(video->rtp_header_extensions()); - VideoSendParameters send_params = last_send_params_; - RtpSendParametersFromMediaDescription( - video, rtp_header_extensions, - webrtc::RtpTransceiverDirectionHasRecv(video->direction()), &send_params); - if (video->conference_mode()) { - send_params.conference_mode = true; - } - send_params.mid = content_name(); + RtpSendParametersFromMediaDescription(video, extensions_filter(), + &send_params); + send_params.mid = mid(); + send_params.conference_mode = video->conference_mode(); VideoRecvParameters recv_params = last_recv_params_; @@ -1103,11 +1084,10 @@ bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content, recv_codec.packetization.reset(); needs_recv_params_update = true; } else if (send_codec->packetization != recv_codec.packetization) { - SafeSetError( + error_desc = StringFormat( "Failed to set remote answer due to invalid codec packetization " - "specifid in m-section with mid='" + - content_name() + "'.", - error_desc); + "specifid in m-section with mid='%s'.", + mid().c_str()); return false; } } @@ -1115,51 +1095,25 @@ bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content, } if (!media_channel()->SetSendParameters(send_params)) { - SafeSetError( + error_desc = StringFormat( "Failed to set remote video description send parameters for m-section " - "with mid='" + - content_name() + "'.", - error_desc); + "with mid='%s'.", + mid().c_str()); return false; } last_send_params_ = send_params; if (needs_recv_params_update) { if (!media_channel()->SetRecvParameters(recv_params)) { - SafeSetError("Failed to set recv parameters for m-section with mid='" + - content_name() + "'.", - error_desc); + error_desc = StringFormat( + "Failed to set recv parameters for m-section with mid='%s'.", + mid().c_str()); return false; } last_recv_params_ = recv_params; } - if (!webrtc::RtpTransceiverDirectionHasSend(content->direction())) { - RTC_DLOG(LS_VERBOSE) << "SetRemoteContent_w: remote side will not send - " - "disable payload type demuxing for " - << ToString(); - ClearHandledPayloadTypes(); - if (!RegisterRtpDemuxerSink_w()) { - RTC_LOG(LS_ERROR) << "Failed to update video demuxing for " << ToString(); - return false; - } - } - - // TODO(pthatcher): Move remote streams into VideoRecvParameters, - // and only give it to the media channel once we have a local - // description too (without a local description, we won't be able to - // recv them anyway). - if (!UpdateRemoteStreams_w(video->streams(), type, error_desc)) { - SafeSetError( - "Failed to set remote video description streams for m-section with " - "mid='" + - content_name() + "'.", - error_desc); - return false; - } - set_remote_content_direction(content->direction()); - UpdateMediaSendRecvState_w(); - return true; + return UpdateRemoteStreams_w(content, type, error_desc); } } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/pc/channel.h b/TMessagesProj/jni/voip/webrtc/pc/channel.h index 4628c86bd8..5bf4823be2 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/channel.h +++ b/TMessagesProj/jni/voip/webrtc/pc/channel.h @@ -11,68 +11,46 @@ #ifndef PC_CHANNEL_H_ #define PC_CHANNEL_H_ -#include #include -#include +#include #include -#include #include #include #include +#include "absl/strings/string_view.h" #include "absl/types/optional.h" -#include "api/call/audio_sink.h" #include "api/crypto/crypto_options.h" -#include "api/function_view.h" #include "api/jsep.h" #include "api/media_types.h" -#include "api/rtp_receiver_interface.h" +#include "api/rtp_parameters.h" #include "api/rtp_transceiver_direction.h" #include "api/scoped_refptr.h" #include "api/sequence_checker.h" -#include "api/video/video_sink_interface.h" -#include "api/video/video_source_interface.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "call/rtp_demuxer.h" #include "call/rtp_packet_sink_interface.h" #include "media/base/media_channel.h" -#include "media/base/media_engine.h" #include "media/base/stream_params.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" -#include "p2p/base/dtls_transport_internal.h" -#include "p2p/base/packet_transport_internal.h" #include "pc/channel_interface.h" -#include "pc/dtls_srtp_transport.h" -#include "pc/media_session.h" -#include "pc/rtp_transport.h" #include "pc/rtp_transport_internal.h" #include "pc/session_description.h" -#include "pc/srtp_filter.h" -#include "pc/srtp_transport.h" #include "rtc_base/async_packet_socket.h" -#include "rtc_base/async_udp_socket.h" #include "rtc_base/checks.h" +#include "rtc_base/containers/flat_set.h" #include "rtc_base/copy_on_write_buffer.h" -#include "rtc_base/location.h" -#include "rtc_base/network.h" #include "rtc_base/network/sent_packet.h" #include "rtc_base/network_route.h" #include "rtc_base/socket.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" -#include "rtc_base/thread_message.h" #include "rtc_base/unique_id_generator.h" -namespace webrtc { -class AudioSinkInterface; -} // namespace webrtc - namespace cricket { -struct CryptoParams; - // BaseChannel contains logic common to voice and video, including enable, // marshaling calls to a worker and network threads, and connection and media // monitors. @@ -86,10 +64,6 @@ struct CryptoParams; // and methods with _s suffix on signaling thread. // Network and worker threads may be the same thread. // -// WARNING! SUBCLASSES MUST CALL Deinit() IN THEIR DESTRUCTORS! -// This is required to avoid a data race between the destructor modifying the -// vtable, and the media channel's thread using BaseChannel as the -// NetworkInterface. class BaseChannel : public ChannelInterface, // TODO(tommi): Remove has_slots inheritance. @@ -109,27 +83,21 @@ class BaseChannel : public ChannelInterface, rtc::Thread* network_thread, rtc::Thread* signaling_thread, std::unique_ptr media_channel, - const std::string& content_name, + absl::string_view mid, bool srtp_required, webrtc::CryptoOptions crypto_options, rtc::UniqueRandomIdGenerator* ssrc_generator); virtual ~BaseChannel(); - virtual void Init_w(webrtc::RtpTransportInternal* rtp_transport); - - // Deinit may be called multiple times and is simply ignored if it's already - // done. - void Deinit(); rtc::Thread* worker_thread() const { return worker_thread_; } rtc::Thread* network_thread() const { return network_thread_; } - const std::string& content_name() const override { return content_name_; } + const std::string& mid() const override { return demuxer_criteria_.mid(); } // TODO(deadbeef): This is redundant; remove this. - const std::string& transport_name() const override { + absl::string_view transport_name() const override { RTC_DCHECK_RUN_ON(network_thread()); if (rtp_transport_) return rtp_transport_->transport_name(); - // TODO(tommi): Delete this variable. - return transport_name_; + return ""; } // This function returns true if using SRTP (DTLS-based keying or SDES). @@ -152,10 +120,10 @@ class BaseChannel : public ChannelInterface, // Channel control bool SetLocalContent(const MediaContentDescription* content, webrtc::SdpType type, - std::string* error_desc) override; + std::string& error_desc) override; bool SetRemoteContent(const MediaContentDescription* content, webrtc::SdpType type, - std::string* error_desc) override; + std::string& error_desc) override; // Controls whether this channel will receive packets on the basis of // matching payload type alone. This is needed for legacy endpoints that // don't signal SSRCs or use MID/RID, but doesn't make sense if there is @@ -190,33 +158,57 @@ class BaseChannel : public ChannelInterface, MediaChannel* media_channel() const override { return media_channel_.get(); } + VideoMediaChannel* video_media_channel() const override { + RTC_CHECK(false) << "Attempt to fetch video channel from non-video"; + return nullptr; + } + VoiceMediaChannel* voice_media_channel() const override { + RTC_CHECK(false) << "Attempt to fetch voice channel from non-voice"; + return nullptr; + } protected: - bool was_ever_writable() const { - RTC_DCHECK_RUN_ON(worker_thread()); - return was_ever_writable_; - } - void set_local_content_direction(webrtc::RtpTransceiverDirection direction) { - RTC_DCHECK_RUN_ON(worker_thread()); + void set_local_content_direction(webrtc::RtpTransceiverDirection direction) + RTC_RUN_ON(worker_thread()) { local_content_direction_ = direction; } - void set_remote_content_direction(webrtc::RtpTransceiverDirection direction) { - RTC_DCHECK_RUN_ON(worker_thread()); + + webrtc::RtpTransceiverDirection local_content_direction() const + RTC_RUN_ON(worker_thread()) { + return local_content_direction_; + } + + void set_remote_content_direction(webrtc::RtpTransceiverDirection direction) + RTC_RUN_ON(worker_thread()) { remote_content_direction_ = direction; } - // These methods verify that: + + webrtc::RtpTransceiverDirection remote_content_direction() const + RTC_RUN_ON(worker_thread()) { + return remote_content_direction_; + } + + webrtc::RtpExtension::Filter extensions_filter() const { + return extensions_filter_; + } + + bool network_initialized() RTC_RUN_ON(network_thread()) { + return media_channel_->HasNetworkInterface(); + } + + bool enabled() const RTC_RUN_ON(worker_thread()) { return enabled_; } + rtc::Thread* signaling_thread() const { return signaling_thread_; } + + // Call to verify that: // * The required content description directions have been set. // * The channel is enabled. - // * And for sending: - // - The SRTP filter is active if it's needed. - // - The transport has been writable before, meaning it should be at least - // possible to succeed in sending a packet. + // * The SRTP filter is active if it's needed. + // * The transport has been writable before, meaning it should be at least + // possible to succeed in sending a packet. // // When any of these properties change, UpdateMediaSendRecvState_w should be // called. - bool IsReadyToReceiveMedia_w() const RTC_RUN_ON(worker_thread()); bool IsReadyToSendMedia_w() const RTC_RUN_ON(worker_thread()); - rtc::Thread* signaling_thread() const { return signaling_thread_; } // NetworkInterface implementation, called by MediaEngine bool SendPacket(rtc::CopyOnWriteBuffer* packet, @@ -243,13 +235,8 @@ class BaseChannel : public ChannelInterface, void ChannelWritable_n() RTC_RUN_ON(network_thread()); void ChannelNotWritable_n() RTC_RUN_ON(network_thread()); - bool AddRecvStream_w(const StreamParams& sp) RTC_RUN_ON(worker_thread()); - bool RemoveRecvStream_w(uint32_t ssrc) RTC_RUN_ON(worker_thread()); - void ResetUnsignaledRecvStream_w() RTC_RUN_ON(worker_thread()); bool SetPayloadTypeDemuxingEnabled_w(bool enabled) RTC_RUN_ON(worker_thread()); - bool AddSendStream_w(const StreamParams& sp) RTC_RUN_ON(worker_thread()); - bool RemoveSendStream_w(uint32_t ssrc) RTC_RUN_ON(worker_thread()); // Should be called whenever the conditions for // IsReadyToReceiveMedia/IsReadyToSendMedia are satisfied (or unsatisfied). @@ -258,19 +245,19 @@ class BaseChannel : public ChannelInterface, bool UpdateLocalStreams_w(const std::vector& streams, webrtc::SdpType type, - std::string* error_desc) + std::string& error_desc) RTC_RUN_ON(worker_thread()); - bool UpdateRemoteStreams_w(const std::vector& streams, + bool UpdateRemoteStreams_w(const MediaContentDescription* content, webrtc::SdpType type, - std::string* error_desc) + std::string& error_desc) RTC_RUN_ON(worker_thread()); virtual bool SetLocalContent_w(const MediaContentDescription* content, webrtc::SdpType type, - std::string* error_desc) + std::string& error_desc) RTC_RUN_ON(worker_thread()) = 0; virtual bool SetRemoteContent_w(const MediaContentDescription* content, webrtc::SdpType type, - std::string* error_desc) + std::string& error_desc) RTC_RUN_ON(worker_thread()) = 0; // Returns a list of RTP header extensions where any extension URI is unique. @@ -281,12 +268,26 @@ class BaseChannel : public ChannelInterface, // Add `payload_type` to `demuxer_criteria_` if payload type demuxing is // enabled. - void MaybeAddHandledPayloadType(int payload_type) RTC_RUN_ON(worker_thread()); - - void ClearHandledPayloadTypes() RTC_RUN_ON(worker_thread()); - - void UpdateRtpHeaderExtensionMap( - const RtpHeaderExtensions& header_extensions); + // Returns true if the demuxer payload type changed and a re-registration + // is needed. + bool MaybeAddHandledPayloadType(int payload_type) RTC_RUN_ON(worker_thread()); + + // Returns true if the demuxer payload type criteria was non-empty before + // clearing. + bool ClearHandledPayloadTypes() RTC_RUN_ON(worker_thread()); + + // Hops to the network thread to update the transport if an update is + // requested. If `update_demuxer` is false and `extensions` is not set, the + // function simply returns. If either of these is set, the function updates + // the transport with either or both of the demuxer criteria and the supplied + // rtp header extensions. + // Returns `true` if either an update wasn't needed or one was successfully + // applied. If the return value is `false`, then updating the demuxer criteria + // failed, which needs to be treated as an error. + bool MaybeUpdateDemuxerAndRtpExtensions_w( + bool update_demuxer, + absl::optional extensions, + std::string& error_desc) RTC_RUN_ON(worker_thread()); bool RegisterRtpDemuxerSink_w() RTC_RUN_ON(worker_thread()); @@ -294,8 +295,8 @@ class BaseChannel : public ChannelInterface, std::string ToString() const; private: - bool ConnectToRtpTransport() RTC_RUN_ON(network_thread()); - void DisconnectFromRtpTransport() RTC_RUN_ON(network_thread()); + bool ConnectToRtpTransport_n() RTC_RUN_ON(network_thread()); + void DisconnectFromRtpTransport_n() RTC_RUN_ON(network_thread()); void SignalSentPacket_n(const rtc::SentPacket& sent_packet); rtc::Thread* const worker_thread_; @@ -303,19 +304,9 @@ class BaseChannel : public ChannelInterface, rtc::Thread* const signaling_thread_; rtc::scoped_refptr alive_; - const std::string content_name_; - std::function on_first_packet_received_ RTC_GUARDED_BY(network_thread()); - // Won't be set when using raw packet transports. SDP-specific thing. - // TODO(bugs.webrtc.org/12230): Written on network thread, read on - // worker thread (at least). - // TODO(tommi): Remove this variable and instead use rtp_transport_ to - // return the transport name. This variable is currently required for - // "for_test" methods. - std::string transport_name_; - webrtc::RtpTransportInternal* rtp_transport_ RTC_GUARDED_BY(network_thread()) = nullptr; @@ -328,24 +319,9 @@ class BaseChannel : public ChannelInterface, bool was_ever_writable_ RTC_GUARDED_BY(worker_thread()) = false; const bool srtp_required_ = true; - // TODO(tommi): This field shouldn't be necessary. It's a copy of - // PeerConnection::GetCryptoOptions(), which is const state. It's also only - // used to filter header extensions when calling - // `rtp_transport_->UpdateRtpHeaderExtensionMap()` when the local/remote - // content description is updated. Since the transport is actually owned - // by the transport controller that also gets updated whenever the content - // description changes, it seems we have two paths into the transports, along - // with several thread hops via various classes (such as the Channel classes) - // that only serve as additional layers and store duplicate state. The Jsep* - // family of classes already apply session description updates on the network - // thread every time it changes. - // For the Channel classes, we should be able to get rid of: - // * crypto_options (and fewer construction parameters)_ - // * UpdateRtpHeaderExtensionMap - // * GetFilteredRtpHeaderExtensions - // * Blocking thread hop to the network thread for every call to set - // local/remote content is updated. - const webrtc::CryptoOptions crypto_options_; + // Set to either kPreferEncryptedExtension or kDiscardEncryptedExtension + // based on the supplied CryptoOptions. + const webrtc::RtpExtension::Filter extensions_filter_; // MediaChannel related members that should be accessed from the worker // thread. @@ -358,22 +334,18 @@ class BaseChannel : public ChannelInterface, bool payload_type_demuxing_enabled_ RTC_GUARDED_BY(worker_thread()) = true; std::vector local_streams_ RTC_GUARDED_BY(worker_thread()); std::vector remote_streams_ RTC_GUARDED_BY(worker_thread()); - // TODO(bugs.webrtc.org/12230): local_content_direction and - // remote_content_direction are set on the worker thread, but accessed on the - // network thread. - webrtc::RtpTransceiverDirection local_content_direction_ = - webrtc::RtpTransceiverDirection::kInactive; - webrtc::RtpTransceiverDirection remote_content_direction_ = - webrtc::RtpTransceiverDirection::kInactive; + webrtc::RtpTransceiverDirection local_content_direction_ RTC_GUARDED_BY( + worker_thread()) = webrtc::RtpTransceiverDirection::kInactive; + webrtc::RtpTransceiverDirection remote_content_direction_ RTC_GUARDED_BY( + worker_thread()) = webrtc::RtpTransceiverDirection::kInactive; // Cached list of payload types, used if payload type demuxing is re-enabled. - std::set payload_types_ RTC_GUARDED_BY(worker_thread()); + webrtc::flat_set payload_types_ RTC_GUARDED_BY(worker_thread()); + // A stored copy of the rtp header extensions as applied to the transport. + RtpHeaderExtensions rtp_header_extensions_ RTC_GUARDED_BY(worker_thread()); // TODO(bugs.webrtc.org/12239): Modified on worker thread, accessed // on network thread in RegisterRtpDemuxerSink_n (called from Init_w) webrtc::RtpDemuxerCriteria demuxer_criteria_; - // Accessed on the worker thread, modified on the network thread from - // RegisterRtpDemuxerSink_w's Invoke. - webrtc::RtpDemuxerCriteria previous_demuxer_criteria_; // This generator is used to generate SSRCs for local streams. // This is needed in cases where SSRCs are not negotiated or set explicitly // like in Simulcast. @@ -389,7 +361,7 @@ class VoiceChannel : public BaseChannel { rtc::Thread* network_thread, rtc::Thread* signaling_thread, std::unique_ptr channel, - const std::string& content_name, + absl::string_view mid, bool srtp_required, webrtc::CryptoOptions crypto_options, rtc::UniqueRandomIdGenerator* ssrc_generator); @@ -400,26 +372,32 @@ class VoiceChannel : public BaseChannel { return static_cast(BaseChannel::media_channel()); } + VoiceMediaChannel* voice_media_channel() const override { + return static_cast(media_channel()); + } + cricket::MediaType media_type() const override { return cricket::MEDIA_TYPE_AUDIO; } private: // overrides from BaseChannel - void UpdateMediaSendRecvState_w() override; + void UpdateMediaSendRecvState_w() RTC_RUN_ON(worker_thread()) override; bool SetLocalContent_w(const MediaContentDescription* content, webrtc::SdpType type, - std::string* error_desc) override; + std::string& error_desc) + RTC_RUN_ON(worker_thread()) override; bool SetRemoteContent_w(const MediaContentDescription* content, webrtc::SdpType type, - std::string* error_desc) override; + std::string& error_desc) + RTC_RUN_ON(worker_thread()) override; // Last AudioSendParameters sent down to the media_channel() via // SetSendParameters. - AudioSendParameters last_send_params_; + AudioSendParameters last_send_params_ RTC_GUARDED_BY(worker_thread()); // Last AudioRecvParameters sent down to the media_channel() via // SetRecvParameters. - AudioRecvParameters last_recv_params_; + AudioRecvParameters last_recv_params_ RTC_GUARDED_BY(worker_thread()); }; // VideoChannel is a specialization for video. @@ -429,7 +407,7 @@ class VideoChannel : public BaseChannel { rtc::Thread* network_thread, rtc::Thread* signaling_thread, std::unique_ptr media_channel, - const std::string& content_name, + absl::string_view mid, bool srtp_required, webrtc::CryptoOptions crypto_options, rtc::UniqueRandomIdGenerator* ssrc_generator); @@ -440,7 +418,9 @@ class VideoChannel : public BaseChannel { return static_cast(BaseChannel::media_channel()); } - void FillBitrateInfo(BandwidthEstimationInfo* bwe_info); + VideoMediaChannel* video_media_channel() const override { + return static_cast(media_channel()); + } cricket::MediaType media_type() const override { return cricket::MEDIA_TYPE_VIDEO; @@ -448,20 +428,22 @@ class VideoChannel : public BaseChannel { private: // overrides from BaseChannel - void UpdateMediaSendRecvState_w() override; + void UpdateMediaSendRecvState_w() RTC_RUN_ON(worker_thread()) override; bool SetLocalContent_w(const MediaContentDescription* content, webrtc::SdpType type, - std::string* error_desc) override; + std::string& error_desc) + RTC_RUN_ON(worker_thread()) override; bool SetRemoteContent_w(const MediaContentDescription* content, webrtc::SdpType type, - std::string* error_desc) override; + std::string& error_desc) + RTC_RUN_ON(worker_thread()) override; // Last VideoSendParameters sent down to the media_channel() via // SetSendParameters. - VideoSendParameters last_send_params_; + VideoSendParameters last_send_params_ RTC_GUARDED_BY(worker_thread()); // Last VideoRecvParameters sent down to the media_channel() via // SetRecvParameters. - VideoRecvParameters last_recv_params_; + VideoRecvParameters last_recv_params_ RTC_GUARDED_BY(worker_thread()); }; } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/pc/channel_interface.h b/TMessagesProj/jni/voip/webrtc/pc/channel_interface.h index 3b71f0f8b5..3c6ca6fe6a 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/channel_interface.h +++ b/TMessagesProj/jni/voip/webrtc/pc/channel_interface.h @@ -11,31 +11,56 @@ #ifndef PC_CHANNEL_INTERFACE_H_ #define PC_CHANNEL_INTERFACE_H_ +#include #include #include +#include "absl/strings/string_view.h" #include "api/jsep.h" #include "api/media_types.h" #include "media/base/media_channel.h" #include "pc/rtp_transport_internal.h" +namespace webrtc { +class Call; +class VideoBitrateAllocatorFactory; +} // namespace webrtc + namespace cricket { class MediaContentDescription; +struct MediaConfig; + +// A Channel is a construct that groups media streams of the same type +// (audio or video), both outgoing and incoming. +// When the PeerConnection API is used, a Channel corresponds one to one +// to an RtpTransceiver. +// When Unified Plan is used, there can only be at most one outgoing and +// one incoming stream. With Plan B, there can be more than one. -// ChannelInterface contains methods common to voice, video and data channels. +// ChannelInterface contains methods common to voice and video channels. // As more methods are added to BaseChannel, they should be included in the // interface as well. +// TODO(bugs.webrtc.org/13931): Merge this class into RtpTransceiver. class ChannelInterface { public: + virtual ~ChannelInterface() = default; virtual cricket::MediaType media_type() const = 0; virtual MediaChannel* media_channel() const = 0; - + // Typecasts of media_channel(). Will cause an exception if the + // channel is of the wrong type. + virtual VideoMediaChannel* video_media_channel() const = 0; + virtual VoiceMediaChannel* voice_media_channel() const = 0; + + // Returns a string view for the transport name. Fetching the transport name + // must be done on the network thread only and note that the lifetime of + // the returned object should be assumed to only be the calling scope. // TODO(deadbeef): This is redundant; remove this. - virtual const std::string& transport_name() const = 0; + virtual absl::string_view transport_name() const = 0; - virtual const std::string& content_name() const = 0; + // TODO(tommi): Change return type to string_view. + virtual const std::string& mid() const = 0; // Enables or disables this channel virtual void Enable(bool enable) = 0; @@ -47,10 +72,10 @@ class ChannelInterface { // Channel control virtual bool SetLocalContent(const MediaContentDescription* content, webrtc::SdpType type, - std::string* error_desc) = 0; + std::string& error_desc) = 0; virtual bool SetRemoteContent(const MediaContentDescription* content, webrtc::SdpType type, - std::string* error_desc) = 0; + std::string& error_desc) = 0; virtual bool SetPayloadTypeDemuxingEnabled(bool enabled) = 0; // Access to the local and remote streams that were set on the channel. @@ -63,9 +88,6 @@ class ChannelInterface { // * An SrtpTransport for SDES. // * A DtlsSrtpTransport for DTLS-SRTP. virtual bool SetRtpTransport(webrtc::RtpTransportInternal* rtp_transport) = 0; - - protected: - virtual ~ChannelInterface() = default; }; } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/pc/channel_manager.cc b/TMessagesProj/jni/voip/webrtc/pc/channel_manager.cc deleted file mode 100644 index b58830b215..0000000000 --- a/TMessagesProj/jni/voip/webrtc/pc/channel_manager.cc +++ /dev/null @@ -1,276 +0,0 @@ -/* - * Copyright 2004 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "pc/channel_manager.h" - -#include -#include - -#include "absl/algorithm/container.h" -#include "absl/memory/memory.h" -#include "absl/strings/match.h" -#include "api/sequence_checker.h" -#include "media/base/media_constants.h" -#include "rtc_base/checks.h" -#include "rtc_base/location.h" -#include "rtc_base/logging.h" -#include "rtc_base/trace_event.h" - -namespace cricket { - -// static -std::unique_ptr ChannelManager::Create( - std::unique_ptr media_engine, - bool enable_rtx, - rtc::Thread* worker_thread, - rtc::Thread* network_thread) { - RTC_DCHECK_RUN_ON(worker_thread); - RTC_DCHECK(network_thread); - RTC_DCHECK(worker_thread); - - if (media_engine) - media_engine->Init(); - - return absl::WrapUnique(new ChannelManager( - std::move(media_engine), enable_rtx, worker_thread, network_thread)); -} - -ChannelManager::ChannelManager( - std::unique_ptr media_engine, - bool enable_rtx, - rtc::Thread* worker_thread, - rtc::Thread* network_thread) - : media_engine_(std::move(media_engine)), - worker_thread_(worker_thread), - network_thread_(network_thread), - enable_rtx_(enable_rtx) { - RTC_DCHECK(worker_thread_); - RTC_DCHECK(network_thread_); - RTC_DCHECK_RUN_ON(worker_thread_); -} - -ChannelManager::~ChannelManager() { - RTC_DCHECK_RUN_ON(worker_thread_); -} - -void ChannelManager::GetSupportedAudioSendCodecs( - std::vector* codecs) const { - if (!media_engine_) { - return; - } - *codecs = media_engine_->voice().send_codecs(); -} - -void ChannelManager::GetSupportedAudioReceiveCodecs( - std::vector* codecs) const { - if (!media_engine_) { - return; - } - *codecs = media_engine_->voice().recv_codecs(); -} - -void ChannelManager::GetSupportedVideoSendCodecs( - std::vector* codecs) const { - if (!media_engine_) { - return; - } - codecs->clear(); - - std::vector video_codecs = media_engine_->video().send_codecs(); - for (const auto& video_codec : video_codecs) { - if (!enable_rtx_ && - absl::EqualsIgnoreCase(kRtxCodecName, video_codec.name)) { - continue; - } - codecs->push_back(video_codec); - } -} - -void ChannelManager::GetSupportedVideoReceiveCodecs( - std::vector* codecs) const { - if (!media_engine_) { - return; - } - codecs->clear(); - - std::vector video_codecs = media_engine_->video().recv_codecs(); - for (const auto& video_codec : video_codecs) { - if (!enable_rtx_ && - absl::EqualsIgnoreCase(kRtxCodecName, video_codec.name)) { - continue; - } - codecs->push_back(video_codec); - } -} - -RtpHeaderExtensions ChannelManager::GetDefaultEnabledAudioRtpHeaderExtensions() - const { - if (!media_engine_) - return {}; - return GetDefaultEnabledRtpHeaderExtensions(media_engine_->voice()); -} - -std::vector -ChannelManager::GetSupportedAudioRtpHeaderExtensions() const { - if (!media_engine_) - return {}; - return media_engine_->voice().GetRtpHeaderExtensions(); -} - -RtpHeaderExtensions ChannelManager::GetDefaultEnabledVideoRtpHeaderExtensions() - const { - if (!media_engine_) - return {}; - return GetDefaultEnabledRtpHeaderExtensions(media_engine_->video()); -} - -std::vector -ChannelManager::GetSupportedVideoRtpHeaderExtensions() const { - if (!media_engine_) - return {}; - return media_engine_->video().GetRtpHeaderExtensions(); -} - -VoiceChannel* ChannelManager::CreateVoiceChannel( - webrtc::Call* call, - const MediaConfig& media_config, - webrtc::RtpTransportInternal* rtp_transport, - rtc::Thread* signaling_thread, - const std::string& content_name, - bool srtp_required, - const webrtc::CryptoOptions& crypto_options, - rtc::UniqueRandomIdGenerator* ssrc_generator, - const AudioOptions& options) { - RTC_DCHECK(call); - RTC_DCHECK(media_engine_); - // TODO(bugs.webrtc.org/11992): Remove this workaround after updates in - // PeerConnection and add the expectation that we're already on the right - // thread. - if (!worker_thread_->IsCurrent()) { - return worker_thread_->Invoke(RTC_FROM_HERE, [&] { - return CreateVoiceChannel(call, media_config, rtp_transport, - signaling_thread, content_name, srtp_required, - crypto_options, ssrc_generator, options); - }); - } - - RTC_DCHECK_RUN_ON(worker_thread_); - - VoiceMediaChannel* media_channel = media_engine_->voice().CreateMediaChannel( - call, media_config, options, crypto_options); - if (!media_channel) { - return nullptr; - } - - auto voice_channel = std::make_unique( - worker_thread_, network_thread_, signaling_thread, - absl::WrapUnique(media_channel), content_name, srtp_required, - crypto_options, ssrc_generator); - - voice_channel->Init_w(rtp_transport); - - VoiceChannel* voice_channel_ptr = voice_channel.get(); - voice_channels_.push_back(std::move(voice_channel)); - return voice_channel_ptr; -} - -void ChannelManager::DestroyVoiceChannel(VoiceChannel* voice_channel) { - TRACE_EVENT0("webrtc", "ChannelManager::DestroyVoiceChannel"); - RTC_DCHECK(voice_channel); - - if (!worker_thread_->IsCurrent()) { - worker_thread_->Invoke(RTC_FROM_HERE, - [&] { DestroyVoiceChannel(voice_channel); }); - return; - } - - RTC_DCHECK_RUN_ON(worker_thread_); - - voice_channels_.erase(absl::c_find_if( - voice_channels_, [&](const std::unique_ptr& p) { - return p.get() == voice_channel; - })); -} - -VideoChannel* ChannelManager::CreateVideoChannel( - webrtc::Call* call, - const MediaConfig& media_config, - webrtc::RtpTransportInternal* rtp_transport, - rtc::Thread* signaling_thread, - const std::string& content_name, - bool srtp_required, - const webrtc::CryptoOptions& crypto_options, - rtc::UniqueRandomIdGenerator* ssrc_generator, - const VideoOptions& options, - webrtc::VideoBitrateAllocatorFactory* video_bitrate_allocator_factory) { - RTC_DCHECK(call); - RTC_DCHECK(media_engine_); - // TODO(bugs.webrtc.org/11992): Remove this workaround after updates in - // PeerConnection and add the expectation that we're already on the right - // thread. - if (!worker_thread_->IsCurrent()) { - return worker_thread_->Invoke(RTC_FROM_HERE, [&] { - return CreateVideoChannel(call, media_config, rtp_transport, - signaling_thread, content_name, srtp_required, - crypto_options, ssrc_generator, options, - video_bitrate_allocator_factory); - }); - } - - RTC_DCHECK_RUN_ON(worker_thread_); - - VideoMediaChannel* media_channel = media_engine_->video().CreateMediaChannel( - call, media_config, options, crypto_options, - video_bitrate_allocator_factory); - if (!media_channel) { - return nullptr; - } - - auto video_channel = std::make_unique( - worker_thread_, network_thread_, signaling_thread, - absl::WrapUnique(media_channel), content_name, srtp_required, - crypto_options, ssrc_generator); - - video_channel->Init_w(rtp_transport); - - VideoChannel* video_channel_ptr = video_channel.get(); - video_channels_.push_back(std::move(video_channel)); - return video_channel_ptr; -} - -void ChannelManager::DestroyVideoChannel(VideoChannel* video_channel) { - TRACE_EVENT0("webrtc", "ChannelManager::DestroyVideoChannel"); - RTC_DCHECK(video_channel); - - if (!worker_thread_->IsCurrent()) { - worker_thread_->Invoke(RTC_FROM_HERE, - [&] { DestroyVideoChannel(video_channel); }); - return; - } - RTC_DCHECK_RUN_ON(worker_thread_); - - video_channels_.erase(absl::c_find_if( - video_channels_, [&](const std::unique_ptr& p) { - return p.get() == video_channel; - })); -} - -bool ChannelManager::StartAecDump(webrtc::FileWrapper file, - int64_t max_size_bytes) { - RTC_DCHECK_RUN_ON(worker_thread_); - return media_engine_->voice().StartAecDump(std::move(file), max_size_bytes); -} - -void ChannelManager::StopAecDump() { - RTC_DCHECK_RUN_ON(worker_thread_); - media_engine_->voice().StopAecDump(); -} - -} // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/pc/channel_manager.h b/TMessagesProj/jni/voip/webrtc/pc/channel_manager.h index 363f459d36..e69de29bb2 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/channel_manager.h +++ b/TMessagesProj/jni/voip/webrtc/pc/channel_manager.h @@ -1,141 +0,0 @@ -/* - * Copyright 2004 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef PC_CHANNEL_MANAGER_H_ -#define PC_CHANNEL_MANAGER_H_ - -#include - -#include -#include -#include - -#include "api/audio_options.h" -#include "api/crypto/crypto_options.h" -#include "api/rtp_parameters.h" -#include "api/video/video_bitrate_allocator_factory.h" -#include "call/call.h" -#include "media/base/codec.h" -#include "media/base/media_channel.h" -#include "media/base/media_config.h" -#include "media/base/media_engine.h" -#include "pc/channel.h" -#include "pc/rtp_transport_internal.h" -#include "pc/session_description.h" -#include "rtc_base/system/file_wrapper.h" -#include "rtc_base/thread.h" -#include "rtc_base/unique_id_generator.h" - -namespace cricket { - -// ChannelManager allows the MediaEngine to run on a separate thread, and takes -// care of marshalling calls between threads. It also creates and keeps track of -// voice and video channels; by doing so, it can temporarily pause all the -// channels when a new audio or video device is chosen. The voice and video -// channels are stored in separate vectors, to easily allow operations on just -// voice or just video channels. -// ChannelManager also allows the application to discover what devices it has -// using device manager. -class ChannelManager final { - public: - // Returns an initialized instance of ChannelManager. - // If media_engine is non-nullptr, then the returned ChannelManager instance - // will own that reference and media engine initialization - static std::unique_ptr Create( - std::unique_ptr media_engine, - bool enable_rtx, - rtc::Thread* worker_thread, - rtc::Thread* network_thread); - - ChannelManager() = delete; - ~ChannelManager(); - - rtc::Thread* worker_thread() const { return worker_thread_; } - rtc::Thread* network_thread() const { return network_thread_; } - MediaEngineInterface* media_engine() { return media_engine_.get(); } - - // Retrieves the list of supported audio & video codec types. - // Can be called before starting the media engine. - void GetSupportedAudioSendCodecs(std::vector* codecs) const; - void GetSupportedAudioReceiveCodecs(std::vector* codecs) const; - void GetSupportedVideoSendCodecs(std::vector* codecs) const; - void GetSupportedVideoReceiveCodecs(std::vector* codecs) const; - RtpHeaderExtensions GetDefaultEnabledAudioRtpHeaderExtensions() const; - std::vector - GetSupportedAudioRtpHeaderExtensions() const; - RtpHeaderExtensions GetDefaultEnabledVideoRtpHeaderExtensions() const; - std::vector - GetSupportedVideoRtpHeaderExtensions() const; - - // The operations below all occur on the worker thread. - // ChannelManager retains ownership of the created channels, so clients should - // call the appropriate Destroy*Channel method when done. - - // Creates a voice channel, to be associated with the specified session. - VoiceChannel* CreateVoiceChannel(webrtc::Call* call, - const MediaConfig& media_config, - webrtc::RtpTransportInternal* rtp_transport, - rtc::Thread* signaling_thread, - const std::string& content_name, - bool srtp_required, - const webrtc::CryptoOptions& crypto_options, - rtc::UniqueRandomIdGenerator* ssrc_generator, - const AudioOptions& options); - // Destroys a voice channel created by CreateVoiceChannel. - void DestroyVoiceChannel(VoiceChannel* voice_channel); - - // Creates a video channel, synced with the specified voice channel, and - // associated with the specified session. - // Version of the above that takes PacketTransportInternal. - VideoChannel* CreateVideoChannel( - webrtc::Call* call, - const MediaConfig& media_config, - webrtc::RtpTransportInternal* rtp_transport, - rtc::Thread* signaling_thread, - const std::string& content_name, - bool srtp_required, - const webrtc::CryptoOptions& crypto_options, - rtc::UniqueRandomIdGenerator* ssrc_generator, - const VideoOptions& options, - webrtc::VideoBitrateAllocatorFactory* video_bitrate_allocator_factory); - // Destroys a video channel created by CreateVideoChannel. - void DestroyVideoChannel(VideoChannel* video_channel); - - // Starts AEC dump using existing file, with a specified maximum file size in - // bytes. When the limit is reached, logging will stop and the file will be - // closed. If max_size_bytes is set to <= 0, no limit will be used. - bool StartAecDump(webrtc::FileWrapper file, int64_t max_size_bytes); - - // Stops recording AEC dump. - void StopAecDump(); - -protected: - ChannelManager(std::unique_ptr media_engine, - bool enable_rtx, - rtc::Thread* worker_thread, - rtc::Thread* network_thread); - - private: - const std::unique_ptr media_engine_; // Nullable. - rtc::Thread* const worker_thread_; - rtc::Thread* const network_thread_; - - // Vector contents are non-null. - std::vector> voice_channels_ - RTC_GUARDED_BY(worker_thread_); - std::vector> video_channels_ - RTC_GUARDED_BY(worker_thread_); - - const bool enable_rtx_; -}; - -} // namespace cricket - -#endif // PC_CHANNEL_MANAGER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/pc/connection_context.cc b/TMessagesProj/jni/voip/webrtc/pc/connection_context.cc index d058c9d564..ec6f21cc13 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/connection_context.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/connection_context.cc @@ -10,33 +10,35 @@ #include "pc/connection_context.h" -#include #include #include +#include #include "api/transport/field_trial_based_config.h" +#include "media/base/media_engine.h" #include "media/sctp/sctp_transport_factory.h" #include "rtc_base/helpers.h" -#include "rtc_base/task_utils/to_queued_task.h" +#include "rtc_base/internal/default_socket_server.h" +#include "rtc_base/socket_server.h" #include "rtc_base/time_utils.h" namespace webrtc { namespace { -rtc::Thread* MaybeStartThread(rtc::Thread* old_thread, - const std::string& thread_name, - bool with_socket_server, - std::unique_ptr& thread_holder) { +rtc::Thread* MaybeStartNetworkThread( + rtc::Thread* old_thread, + std::unique_ptr& socket_factory_holder, + std::unique_ptr& thread_holder) { if (old_thread) { return old_thread; } - if (with_socket_server) { - thread_holder = rtc::Thread::CreateWithSocketServer(); - } else { - thread_holder = rtc::Thread::Create(); - } - thread_holder->SetName(thread_name, nullptr); + std::unique_ptr socket_server = + rtc::CreateDefaultSocketServer(); + thread_holder = std::make_unique(socket_server.get()); + socket_factory_holder = std::move(socket_server); + + thread_holder->SetName("pc_network_thread", nullptr); thread_holder->Start(); return thread_holder.get(); } @@ -59,7 +61,8 @@ rtc::Thread* MaybeWrapThread(rtc::Thread* signaling_thread, std::unique_ptr MaybeCreateSctpFactory( std::unique_ptr factory, - rtc::Thread* network_thread) { + rtc::Thread* network_thread, + const FieldTrialsView& field_trials) { if (factory) { return factory; } @@ -75,65 +78,83 @@ std::unique_ptr MaybeCreateSctpFactory( // Static rtc::scoped_refptr ConnectionContext::Create( PeerConnectionFactoryDependencies* dependencies) { - return new ConnectionContext(dependencies); + return rtc::scoped_refptr( + new ConnectionContext(dependencies)); } ConnectionContext::ConnectionContext( PeerConnectionFactoryDependencies* dependencies) - : network_thread_(MaybeStartThread(dependencies->network_thread, - "pc_network_thread", - true, - owned_network_thread_)), - worker_thread_(MaybeStartThread(dependencies->worker_thread, - "pc_worker_thread", - false, - owned_worker_thread_)), + : network_thread_(MaybeStartNetworkThread(dependencies->network_thread, + owned_socket_factory_, + owned_network_thread_)), + worker_thread_(dependencies->worker_thread, + []() { + auto thread_holder = rtc::Thread::Create(); + thread_holder->SetName("pc_worker_thread", nullptr); + thread_holder->Start(); + return thread_holder; + }), signaling_thread_(MaybeWrapThread(dependencies->signaling_thread, wraps_current_thread_)), + trials_(dependencies->trials ? std::move(dependencies->trials) + : std::make_unique()), + media_engine_(std::move(dependencies->media_engine)), network_monitor_factory_( std::move(dependencies->network_monitor_factory)), + default_network_manager_(std::move(dependencies->network_manager)), call_factory_(std::move(dependencies->call_factory)), + default_socket_factory_(std::move(dependencies->packet_socket_factory)), sctp_factory_( MaybeCreateSctpFactory(std::move(dependencies->sctp_factory), - network_thread())), - trials_(dependencies->trials - ? std::move(dependencies->trials) - : std::make_unique()) { - signaling_thread_->AllowInvokesToThread(worker_thread_); + network_thread(), + *trials_.get())) { + RTC_DCHECK_RUN_ON(signaling_thread_); + RTC_DCHECK(!(default_network_manager_ && network_monitor_factory_)) + << "You can't set both network_manager and network_monitor_factory."; + + signaling_thread_->AllowInvokesToThread(worker_thread()); signaling_thread_->AllowInvokesToThread(network_thread_); worker_thread_->AllowInvokesToThread(network_thread_); - if (network_thread_->IsCurrent()) { - // TODO(https://crbug.com/webrtc/12802) switch to DisallowAllInvokes - network_thread_->AllowInvokesToThread(network_thread_); - } else { - network_thread_->PostTask(ToQueuedTask([thread = network_thread_] { - thread->DisallowBlockingCalls(); - // TODO(https://crbug.com/webrtc/12802) switch to DisallowAllInvokes - thread->AllowInvokesToThread(thread); - })); + if (!network_thread_->IsCurrent()) { + // network_thread_->IsCurrent() == true means signaling_thread_ is + // network_thread_. In this case, no further action is required as + // signaling_thread_ can already invoke network_thread_. + network_thread_->PostTask( + [thread = network_thread_, worker_thread = worker_thread_.get()] { + thread->DisallowBlockingCalls(); + thread->DisallowAllInvokes(); + if (worker_thread == thread) { + // In this case, worker_thread_ == network_thread_ + thread->AllowInvokesToThread(thread); + } + }); } - RTC_DCHECK_RUN_ON(signaling_thread_); rtc::InitRandom(rtc::Time32()); - // If network_monitor_factory_ is non-null, it will be used to create a - // network monitor while on the network thread. - default_network_manager_ = std::make_unique( - network_monitor_factory_.get(), network_thread()->socketserver()); - - // TODO(bugs.webrtc.org/13145): Either require that a PacketSocketFactory - // always is injected (with no need to construct this default factory), or get - // the appropriate underlying SocketFactory without going through the - // rtc::Thread::socketserver() accessor. - default_socket_factory_ = std::make_unique( - network_thread()->socketserver()); - - worker_thread_->Invoke(RTC_FROM_HERE, [&]() { - channel_manager_ = cricket::ChannelManager::Create( - std::move(dependencies->media_engine), - /*enable_rtx=*/true, worker_thread(), network_thread()); - }); - + rtc::SocketFactory* socket_factory = dependencies->socket_factory; + if (socket_factory == nullptr) { + if (owned_socket_factory_) { + socket_factory = owned_socket_factory_.get(); + } else { + // TODO(bugs.webrtc.org/13145): This case should be deleted. Either + // require that a PacketSocketFactory and NetworkManager always are + // injected (with no need to construct these default objects), or require + // that if a network_thread is injected, an approprite rtc::SocketServer + // should be injected too. + socket_factory = network_thread()->socketserver(); + } + } + if (!default_network_manager_) { + // If network_monitor_factory_ is non-null, it will be used to create a + // network monitor while on the network thread. + default_network_manager_ = std::make_unique( + network_monitor_factory_.get(), socket_factory, &field_trials()); + } + if (!default_socket_factory_) { + default_socket_factory_ = + std::make_unique(socket_factory); + } // Set warning levels on the threads, to give warnings when response // may be slower than is expected of the thread. // Since some of the threads may be the same, start with the least @@ -142,12 +163,25 @@ ConnectionContext::ConnectionContext( signaling_thread_->SetDispatchWarningMs(100); worker_thread_->SetDispatchWarningMs(30); network_thread_->SetDispatchWarningMs(10); + + if (media_engine_) { + // TODO(tommi): Change VoiceEngine to do ctor time initialization so that + // this isn't necessary. + worker_thread_->BlockingCall([&] { media_engine_->Init(); }); + } } ConnectionContext::~ConnectionContext() { RTC_DCHECK_RUN_ON(signaling_thread_); - worker_thread_->Invoke(RTC_FROM_HERE, - [&]() { channel_manager_.reset(nullptr); }); + worker_thread_->BlockingCall([&] { + RTC_DCHECK_RUN_ON(worker_thread()); + // While `media_engine_` is const throughout the ConnectionContext's + // lifetime, it requires destruction to happen on the worker thread. Instead + // of marking the pointer as non-const, we live with this const_cast<> in + // the destructor. + const_cast&>(media_engine_) + .reset(); + }); // Make sure `worker_thread()` and `signaling_thread()` outlive // `default_socket_factory_` and `default_network_manager_`. @@ -158,8 +192,4 @@ ConnectionContext::~ConnectionContext() { rtc::ThreadManager::Instance()->UnwrapCurrentThread(); } -cricket::ChannelManager* ConnectionContext::channel_manager() const { - return channel_manager_.get(); -} - } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/connection_context.h b/TMessagesProj/jni/voip/webrtc/pc/connection_context.h index 8fad13c10c..415ae121b5 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/connection_context.h +++ b/TMessagesProj/jni/voip/webrtc/pc/connection_context.h @@ -15,26 +15,30 @@ #include #include "api/call/call_factory_interface.h" +#include "api/field_trials_view.h" #include "api/media_stream_interface.h" #include "api/peer_connection_interface.h" #include "api/ref_counted_base.h" #include "api/scoped_refptr.h" #include "api/sequence_checker.h" #include "api/transport/sctp_transport_factory_interface.h" -#include "api/transport/webrtc_key_value_config.h" #include "media/base/media_engine.h" #include "p2p/base/basic_packet_socket_factory.h" -#include "pc/channel_manager.h" #include "rtc_base/checks.h" #include "rtc_base/network.h" #include "rtc_base/network_monitor_factory.h" #include "rtc_base/rtc_certificate_generator.h" +#include "rtc_base/socket_factory.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" +namespace cricket { +class ChannelManager; +} + namespace rtc { -class BasicNetworkManager; class BasicPacketSocketFactory; +class UniqueRandomIdGenerator; } // namespace rtc namespace webrtc { @@ -65,30 +69,42 @@ class ConnectionContext final return sctp_factory_.get(); } - cricket::ChannelManager* channel_manager() const; + cricket::MediaEngineInterface* media_engine() const { + return media_engine_.get(); + } rtc::Thread* signaling_thread() { return signaling_thread_; } const rtc::Thread* signaling_thread() const { return signaling_thread_; } - rtc::Thread* worker_thread() { return worker_thread_; } - const rtc::Thread* worker_thread() const { return worker_thread_; } + rtc::Thread* worker_thread() { return worker_thread_.get(); } + const rtc::Thread* worker_thread() const { return worker_thread_.get(); } rtc::Thread* network_thread() { return network_thread_; } const rtc::Thread* network_thread() const { return network_thread_; } - const WebRtcKeyValueConfig& trials() const { return *trials_.get(); } + // Field trials associated with the PeerConnectionFactory. + // Note: that there can be different field trials for different + // PeerConnections (but they are not supposed change after creating the + // PeerConnection). + const FieldTrialsView& field_trials() const { return *trials_.get(); } // Accessors only used from the PeerConnectionFactory class - rtc::BasicNetworkManager* default_network_manager() { + rtc::NetworkManager* default_network_manager() { RTC_DCHECK_RUN_ON(signaling_thread_); return default_network_manager_.get(); } - rtc::BasicPacketSocketFactory* default_socket_factory() { + rtc::PacketSocketFactory* default_socket_factory() { RTC_DCHECK_RUN_ON(signaling_thread_); return default_socket_factory_.get(); } CallFactoryInterface* call_factory() { - RTC_DCHECK_RUN_ON(worker_thread_); + RTC_DCHECK_RUN_ON(worker_thread()); return call_factory_.get(); } + rtc::UniqueRandomIdGenerator* ssrc_generator() { return &ssrc_generator_; } + // Note: There is lots of code that wants to know whether or not we + // use RTX, but so far, no code has been found that sets it to false. + // Kept in the API in order to ease introduction if we want to resurrect + // the functionality. + bool use_rtx() { return true; } protected: explicit ConnectionContext(PeerConnectionFactoryDependencies* dependencies); @@ -100,30 +116,33 @@ class ConnectionContext final // The following three variables are used to communicate between the // constructor and the destructor, and are never exposed externally. bool wraps_current_thread_; - // Note: Since owned_network_thread_ and owned_worker_thread_ are used - // in the initialization of network_thread_ and worker_thread_, they - // must be declared before them, so that they are initialized first. + std::unique_ptr owned_socket_factory_; std::unique_ptr owned_network_thread_ RTC_GUARDED_BY(signaling_thread_); - std::unique_ptr owned_worker_thread_ - RTC_GUARDED_BY(signaling_thread_); rtc::Thread* const network_thread_; - rtc::Thread* const worker_thread_; + AlwaysValidPointer const worker_thread_; rtc::Thread* const signaling_thread_; - // channel_manager is accessed both on signaling thread and worker thread. - std::unique_ptr channel_manager_; + + // Accessed both on signaling thread and worker thread. + std::unique_ptr const trials_; + + const std::unique_ptr media_engine_; + + // This object should be used to generate any SSRC that is not explicitly + // specified by the user (or by the remote party). + // TODO(bugs.webrtc.org/12666): This variable is used from both the signaling + // and worker threads. See if we can't restrict usage to a single thread. + rtc::UniqueRandomIdGenerator ssrc_generator_; std::unique_ptr const network_monitor_factory_ RTC_GUARDED_BY(signaling_thread_); - std::unique_ptr default_network_manager_ + std::unique_ptr default_network_manager_ RTC_GUARDED_BY(signaling_thread_); std::unique_ptr const call_factory_ - RTC_GUARDED_BY(worker_thread_); + RTC_GUARDED_BY(worker_thread()); - std::unique_ptr default_socket_factory_ + std::unique_ptr default_socket_factory_ RTC_GUARDED_BY(signaling_thread_); std::unique_ptr const sctp_factory_; - // Accessed both on signaling thread and worker thread. - std::unique_ptr const trials_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/data_channel_controller.cc b/TMessagesProj/jni/voip/webrtc/pc/data_channel_controller.cc index e11647f2ca..36f11fe12e 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/data_channel_controller.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/data_channel_controller.cc @@ -10,21 +10,25 @@ #include "pc/data_channel_controller.h" -#include #include -#include "absl/algorithm/container.h" -#include "absl/types/optional.h" #include "api/peer_connection_interface.h" #include "api/rtc_error.h" -#include "pc/peer_connection.h" +#include "pc/peer_connection_internal.h" #include "pc/sctp_utils.h" -#include "rtc_base/location.h" #include "rtc_base/logging.h" -#include "rtc_base/task_utils/to_queued_task.h" namespace webrtc { +DataChannelController::~DataChannelController() { + // Since channels may have multiple owners, we cannot guarantee that + // they will be deallocated before destroying the controller. + // Therefore, detach them from the controller. + for (auto channel : sctp_data_channels_) { + channel->DetachFromController(); + } +} + bool DataChannelController::HasDataChannels() const { RTC_DCHECK_RUN_ON(signaling_thread()); return !sctp_data_channels_.empty(); @@ -76,7 +80,7 @@ void DataChannelController::DisconnectDataChannel( void DataChannelController::AddSctpDataStream(int sid) { if (data_channel_transport()) { - network_thread()->Invoke(RTC_FROM_HERE, [this, sid] { + network_thread()->BlockingCall([this, sid] { if (data_channel_transport()) { data_channel_transport()->OpenChannel(sid); } @@ -86,7 +90,7 @@ void DataChannelController::AddSctpDataStream(int sid) { void DataChannelController::RemoveSctpDataStream(int sid) { if (data_channel_transport()) { - network_thread()->Invoke(RTC_FROM_HERE, [this, sid] { + network_thread()->BlockingCall([this, sid] { if (data_channel_transport()) { data_channel_transport()->CloseChannel(sid); } @@ -108,7 +112,7 @@ void DataChannelController::OnDataReceived( params.sid = channel_id; params.type = type; signaling_thread()->PostTask( - ToQueuedTask([self = weak_factory_.GetWeakPtr(), params, buffer] { + [self = weak_factory_.GetWeakPtr(), params, buffer] { if (self) { RTC_DCHECK_RUN_ON(self->signaling_thread()); // TODO(bugs.webrtc.org/11547): The data being received should be @@ -123,53 +127,49 @@ void DataChannelController::OnDataReceived( self->SignalDataChannelTransportReceivedData_s(params, buffer); } } - })); + }); } void DataChannelController::OnChannelClosing(int channel_id) { RTC_DCHECK_RUN_ON(network_thread()); - signaling_thread()->PostTask( - ToQueuedTask([self = weak_factory_.GetWeakPtr(), channel_id] { - if (self) { - RTC_DCHECK_RUN_ON(self->signaling_thread()); - self->SignalDataChannelTransportChannelClosing_s(channel_id); - } - })); + signaling_thread()->PostTask([self = weak_factory_.GetWeakPtr(), channel_id] { + if (self) { + RTC_DCHECK_RUN_ON(self->signaling_thread()); + self->SignalDataChannelTransportChannelClosing_s(channel_id); + } + }); } void DataChannelController::OnChannelClosed(int channel_id) { RTC_DCHECK_RUN_ON(network_thread()); - signaling_thread()->PostTask( - ToQueuedTask([self = weak_factory_.GetWeakPtr(), channel_id] { - if (self) { - RTC_DCHECK_RUN_ON(self->signaling_thread()); - self->SignalDataChannelTransportChannelClosed_s(channel_id); - } - })); + signaling_thread()->PostTask([self = weak_factory_.GetWeakPtr(), channel_id] { + if (self) { + RTC_DCHECK_RUN_ON(self->signaling_thread()); + self->SignalDataChannelTransportChannelClosed_s(channel_id); + } + }); } void DataChannelController::OnReadyToSend() { RTC_DCHECK_RUN_ON(network_thread()); - signaling_thread()->PostTask( - ToQueuedTask([self = weak_factory_.GetWeakPtr()] { - if (self) { - RTC_DCHECK_RUN_ON(self->signaling_thread()); - self->data_channel_transport_ready_to_send_ = true; - self->SignalDataChannelTransportWritable_s( - self->data_channel_transport_ready_to_send_); - } - })); + signaling_thread()->PostTask([self = weak_factory_.GetWeakPtr()] { + if (self) { + RTC_DCHECK_RUN_ON(self->signaling_thread()); + self->data_channel_transport_ready_to_send_ = true; + self->SignalDataChannelTransportWritable_s( + self->data_channel_transport_ready_to_send_); + } + }); } void DataChannelController::OnTransportClosed(RTCError error) { RTC_DCHECK_RUN_ON(network_thread()); - signaling_thread()->PostTask( - ToQueuedTask([self = weak_factory_.GetWeakPtr(), error] { - if (self) { - RTC_DCHECK_RUN_ON(self->signaling_thread()); - self->OnTransportChannelClosed(error); - } - })); + signaling_thread()->PostTask([self = weak_factory_.GetWeakPtr(), error] { + if (self) { + RTC_DCHECK_RUN_ON(self->signaling_thread()); + self->OnTransportChannelClosed(error); + } + }); } void DataChannelController::SetupDataChannelTransport_n() { @@ -298,7 +298,8 @@ DataChannelController::InternalCreateSctpDataChannel( return nullptr; } sctp_data_channels_.push_back(channel); - channel->SignalClosed.connect(pc_, &PeerConnection::OnSctpDataChannelClosed); + channel->SignalClosed.connect( + pc_, &PeerConnectionInternal::OnSctpDataChannelClosed); SignalSctpDataChannelCreated_(channel.get()); return channel; } @@ -338,13 +339,12 @@ void DataChannelController::OnSctpDataChannelClosed(SctpDataChannel* channel) { // we can't free it directly here; we need to free it asynchronously. sctp_data_channels_to_free_.push_back(*it); sctp_data_channels_.erase(it); - signaling_thread()->PostTask( - ToQueuedTask([self = weak_factory_.GetWeakPtr()] { - if (self) { - RTC_DCHECK_RUN_ON(self->signaling_thread()); - self->sctp_data_channels_to_free_.clear(); - } - })); + signaling_thread()->PostTask([self = weak_factory_.GetWeakPtr()] { + if (self) { + RTC_DCHECK_RUN_ON(self->signaling_thread()); + self->sctp_data_channels_to_free_.clear(); + } + }); return; } } @@ -361,16 +361,6 @@ void DataChannelController::OnTransportChannelClosed(RTCError error) { } } -SctpDataChannel* DataChannelController::FindDataChannelBySid(int sid) const { - RTC_DCHECK_RUN_ON(signaling_thread()); - for (const auto& channel : sctp_data_channels_) { - if (channel->id() == sid) { - return channel; - } - } - return nullptr; -} - DataChannelTransportInterface* DataChannelController::data_channel_transport() const { // TODO(bugs.webrtc.org/11547): Only allow this accessor to be called on the @@ -391,15 +381,14 @@ bool DataChannelController::DataChannelSendData( const rtc::CopyOnWriteBuffer& payload, cricket::SendDataResult* result) { // TODO(bugs.webrtc.org/11547): Expect method to be called on the network - // thread instead. Remove the Invoke() below and move assocated state to + // thread instead. Remove the BlockingCall() below and move assocated state to // the network thread. RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(data_channel_transport()); - RTCError error = network_thread()->Invoke( - RTC_FROM_HERE, [this, sid, params, payload] { - return data_channel_transport()->SendData(sid, params, payload); - }); + RTCError error = network_thread()->BlockingCall([this, sid, params, payload] { + return data_channel_transport()->SendData(sid, params, payload); + }); if (error.ok()) { *result = cricket::SendDataResult::SDR_SUCCESS; @@ -416,15 +405,14 @@ bool DataChannelController::DataChannelSendData( void DataChannelController::NotifyDataChannelsOfTransportCreated() { RTC_DCHECK_RUN_ON(network_thread()); - signaling_thread()->PostTask( - ToQueuedTask([self = weak_factory_.GetWeakPtr()] { - if (self) { - RTC_DCHECK_RUN_ON(self->signaling_thread()); - for (const auto& channel : self->sctp_data_channels_) { - channel->OnTransportChannelCreated(); - } - } - })); + signaling_thread()->PostTask([self = weak_factory_.GetWeakPtr()] { + if (self) { + RTC_DCHECK_RUN_ON(self->signaling_thread()); + for (const auto& channel : self->sctp_data_channels_) { + channel->OnTransportChannelCreated(); + } + } + }); } rtc::Thread* DataChannelController::network_thread() const { diff --git a/TMessagesProj/jni/voip/webrtc/pc/data_channel_controller.h b/TMessagesProj/jni/voip/webrtc/pc/data_channel_controller.h index af0e06353f..cec79038c6 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/data_channel_controller.h +++ b/TMessagesProj/jni/voip/webrtc/pc/data_channel_controller.h @@ -11,21 +11,15 @@ #ifndef PC_DATA_CHANNEL_CONTROLLER_H_ #define PC_DATA_CHANNEL_CONTROLLER_H_ -#include - -#include -#include #include #include #include "api/data_channel_interface.h" +#include "api/rtc_error.h" #include "api/scoped_refptr.h" #include "api/sequence_checker.h" #include "api/transport/data_channel_transport_interface.h" #include "media/base/media_channel.h" -#include "media/base/media_engine.h" -#include "media/base/stream_params.h" -#include "pc/channel.h" #include "pc/data_channel_utils.h" #include "pc/sctp_data_channel.h" #include "rtc_base/checks.h" @@ -38,12 +32,13 @@ namespace webrtc { -class PeerConnection; +class PeerConnectionInternal; -class DataChannelController : public SctpDataChannelProviderInterface, +class DataChannelController : public SctpDataChannelControllerInterface, public DataChannelSink { public: - explicit DataChannelController(PeerConnection* pc) : pc_(pc) {} + explicit DataChannelController(PeerConnectionInternal* pc) : pc_(pc) {} + ~DataChannelController(); // Not copyable or movable. DataChannelController(DataChannelController&) = delete; @@ -93,8 +88,6 @@ class DataChannelController : public SctpDataChannelProviderInterface, config) /* RTC_RUN_ON(signaling_thread()) */; void AllocateSctpSids(rtc::SSLRole role); - SctpDataChannel* FindDataChannelBySid(int sid) const; - // Checks if any data channel has been added. bool HasDataChannels() const; bool HasSctpDataChannels() const { @@ -180,7 +173,7 @@ class DataChannelController : public SctpDataChannelProviderInterface, RTC_GUARDED_BY(signaling_thread()); // Owning PeerConnection. - PeerConnection* const pc_; + PeerConnectionInternal* const pc_; // The weak pointers must be dereferenced and invalidated on the signalling // thread only. rtc::WeakPtrFactory weak_factory_{this}; diff --git a/TMessagesProj/jni/voip/webrtc/pc/dtls_srtp_transport.cc b/TMessagesProj/jni/voip/webrtc/pc/dtls_srtp_transport.cc index 9a4135bb2f..28de50b2ae 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/dtls_srtp_transport.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/dtls_srtp_transport.cc @@ -27,8 +27,9 @@ static const char kDtlsSrtpExporterLabel[] = "EXTRACTOR-dtls_srtp"; namespace webrtc { -DtlsSrtpTransport::DtlsSrtpTransport(bool rtcp_mux_enabled) - : SrtpTransport(rtcp_mux_enabled) {} +DtlsSrtpTransport::DtlsSrtpTransport(bool rtcp_mux_enabled, + const FieldTrialsView& field_trials) + : SrtpTransport(rtcp_mux_enabled, field_trials) {} void DtlsSrtpTransport::SetDtlsTransports( cricket::DtlsTransportInternal* rtp_dtls_transport, diff --git a/TMessagesProj/jni/voip/webrtc/pc/dtls_srtp_transport.h b/TMessagesProj/jni/voip/webrtc/pc/dtls_srtp_transport.h index da068c9b8a..7958210c99 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/dtls_srtp_transport.h +++ b/TMessagesProj/jni/voip/webrtc/pc/dtls_srtp_transport.h @@ -11,6 +11,7 @@ #ifndef PC_DTLS_SRTP_TRANSPORT_H_ #define PC_DTLS_SRTP_TRANSPORT_H_ +#include #include #include @@ -31,7 +32,7 @@ namespace webrtc { // configures the SrtpSessions in the base class. class DtlsSrtpTransport : public SrtpTransport { public: - explicit DtlsSrtpTransport(bool rtcp_mux_enabled); + DtlsSrtpTransport(bool rtcp_mux_enabled, const FieldTrialsView& field_trials); // Set P2P layer RTP/RTCP DtlsTransports. When using RTCP-muxing, // `rtcp_dtls_transport` is null. diff --git a/TMessagesProj/jni/voip/webrtc/pc/dtls_transport.cc b/TMessagesProj/jni/voip/webrtc/pc/dtls_transport.cc index 074f44e22b..15eed9e47b 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/dtls_transport.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/dtls_transport.cc @@ -14,12 +14,12 @@ #include "absl/types/optional.h" #include "api/dtls_transport_interface.h" +#include "api/make_ref_counted.h" #include "api/sequence_checker.h" #include "pc/ice_transport.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "rtc_base/ref_counted_object.h" -#include "rtc_base/ssl_certificate.h" +#include "rtc_base/ssl_stream_adapter.h" namespace webrtc { @@ -105,22 +105,35 @@ void DtlsTransport::UpdateInformation() { if (internal_dtls_transport_->dtls_state() == DtlsTransportState::kConnected) { bool success = true; + rtc::SSLRole internal_role; + absl::optional role; int ssl_cipher_suite; int tls_version; int srtp_cipher; + success &= internal_dtls_transport_->GetDtlsRole(&internal_role); + if (success) { + switch (internal_role) { + case rtc::SSL_CLIENT: + role = DtlsTransportTlsRole::kClient; + break; + case rtc::SSL_SERVER: + role = DtlsTransportTlsRole::kServer; + break; + } + } success &= internal_dtls_transport_->GetSslVersionBytes(&tls_version); success &= internal_dtls_transport_->GetSslCipherSuite(&ssl_cipher_suite); success &= internal_dtls_transport_->GetSrtpCryptoSuite(&srtp_cipher); if (success) { info_ = DtlsTransportInformation( - internal_dtls_transport_->dtls_state(), tls_version, + internal_dtls_transport_->dtls_state(), role, tls_version, ssl_cipher_suite, srtp_cipher, internal_dtls_transport_->GetRemoteSSLCertChain()); } else { RTC_LOG(LS_ERROR) << "DtlsTransport in connected state has incomplete " "TLS information"; info_ = DtlsTransportInformation( - internal_dtls_transport_->dtls_state(), absl::nullopt, + internal_dtls_transport_->dtls_state(), role, absl::nullopt, absl::nullopt, absl::nullopt, internal_dtls_transport_->GetRemoteSSLCertChain()); } diff --git a/TMessagesProj/jni/voip/webrtc/pc/dtmf_sender.cc b/TMessagesProj/jni/voip/webrtc/pc/dtmf_sender.cc index 46811c881b..45a4a58abb 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/dtmf_sender.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/dtmf_sender.cc @@ -13,13 +13,11 @@ #include #include -#include - +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/task_queue/task_queue_base.h" +#include "api/units/time_delta.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "rtc_base/ref_counted_object.h" -#include "rtc_base/task_utils/to_queued_task.h" -#include "rtc_base/thread.h" namespace webrtc { @@ -60,7 +58,7 @@ bool GetDtmfCode(char tone, int* code) { } rtc::scoped_refptr DtmfSender::Create( - rtc::Thread* signaling_thread, + TaskQueueBase* signaling_thread, DtmfProviderInterface* provider) { if (!signaling_thread) { return nullptr; @@ -68,7 +66,7 @@ rtc::scoped_refptr DtmfSender::Create( return rtc::make_ref_counted(signaling_thread, provider); } -DtmfSender::DtmfSender(rtc::Thread* signaling_thread, +DtmfSender::DtmfSender(TaskQueueBase* signaling_thread, DtmfProviderInterface* provider) : observer_(nullptr), signaling_thread_(signaling_thread), @@ -77,11 +75,14 @@ DtmfSender::DtmfSender(rtc::Thread* signaling_thread, inter_tone_gap_(kDtmfDefaultGapMs), comma_delay_(kDtmfDefaultCommaDelayMs) { RTC_DCHECK(signaling_thread_); - if (provider_) { - RTC_DCHECK(provider_->GetOnDestroyedSignal()); - provider_->GetOnDestroyedSignal()->connect( - this, &DtmfSender::OnProviderDestroyed); - } + RTC_DCHECK(provider_); +} + +void DtmfSender::OnDtmfProviderDestroyed() { + RTC_DCHECK_RUN_ON(signaling_thread_); + RTC_DLOG(LS_INFO) << "The Dtmf provider is deleted. Clear the sending queue."; + StopSending(); + provider_ = nullptr; } DtmfSender::~DtmfSender() { @@ -141,7 +142,7 @@ bool DtmfSender::InsertDtmf(const std::string& tones, } safety_flag_ = PendingTaskSafetyFlag::Create(); // Kick off a new DTMF task. - QueueInsertDtmf(RTC_FROM_HERE, 1 /*ms*/); + QueueInsertDtmf(1 /*ms*/); return true; } @@ -165,15 +166,14 @@ int DtmfSender::comma_delay() const { return comma_delay_; } -void DtmfSender::QueueInsertDtmf(const rtc::Location& posted_from, - uint32_t delay_ms) { - signaling_thread_->PostDelayedTask( - ToQueuedTask(safety_flag_, - [this] { - RTC_DCHECK_RUN_ON(signaling_thread_); - DoInsertDtmf(); - }), - delay_ms); +void DtmfSender::QueueInsertDtmf(uint32_t delay_ms) { + signaling_thread_->PostDelayedHighPrecisionTask( + SafeTask(safety_flag_, + [this] { + RTC_DCHECK_RUN_ON(signaling_thread_); + DoInsertDtmf(); + }), + TimeDelta::Millis(delay_ms)); } void DtmfSender::DoInsertDtmf() { @@ -231,15 +231,7 @@ void DtmfSender::DoInsertDtmf() { tones_.erase(0, first_tone_pos + 1); // Continue with the next tone. - QueueInsertDtmf(RTC_FROM_HERE, tone_gap); -} - -void DtmfSender::OnProviderDestroyed() { - RTC_DCHECK_RUN_ON(signaling_thread_); - - RTC_LOG(LS_INFO) << "The Dtmf provider is deleted. Clear the sending queue."; - StopSending(); - provider_ = nullptr; + QueueInsertDtmf(tone_gap); } void DtmfSender::StopSending() { diff --git a/TMessagesProj/jni/voip/webrtc/pc/dtmf_sender.h b/TMessagesProj/jni/voip/webrtc/pc/dtmf_sender.h index a208b100d4..c99c7bee50 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/dtmf_sender.h +++ b/TMessagesProj/jni/voip/webrtc/pc/dtmf_sender.h @@ -17,13 +17,12 @@ #include "api/dtmf_sender_interface.h" #include "api/scoped_refptr.h" +#include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/task_queue/task_queue_base.h" #include "pc/proxy.h" -#include "rtc_base/constructor_magic.h" -#include "rtc_base/location.h" #include "rtc_base/ref_count.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" -#include "rtc_base/third_party/sigslot/sigslot.h" -#include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" // DtmfSender is the native implementation of the RTCDTMFSender defined by // the WebRTC W3C Editor's Draft. @@ -42,19 +41,18 @@ class DtmfProviderInterface { // The `duration` indicates the length of the DTMF tone in ms. // Returns true on success and false on failure. virtual bool InsertDtmf(int code, int duration) = 0; - // Returns a `sigslot::signal0<>` signal. The signal should fire before - // the provider is destroyed. - virtual sigslot::signal0<>* GetOnDestroyedSignal() = 0; protected: virtual ~DtmfProviderInterface() {} }; -class DtmfSender : public DtmfSenderInterface, public sigslot::has_slots<> { +class DtmfSender : public DtmfSenderInterface { public: - static rtc::scoped_refptr Create(rtc::Thread* signaling_thread, + static rtc::scoped_refptr Create(TaskQueueBase* signaling_thread, DtmfProviderInterface* provider); + void OnDtmfProviderDestroyed(); + // Implements DtmfSenderInterface. void RegisterObserver(DtmfSenderObserverInterface* observer) override; void UnregisterObserver() override; @@ -69,24 +67,24 @@ class DtmfSender : public DtmfSenderInterface, public sigslot::has_slots<> { int comma_delay() const override; protected: - DtmfSender(rtc::Thread* signaling_thread, DtmfProviderInterface* provider); + DtmfSender(TaskQueueBase* signaling_thread, DtmfProviderInterface* provider); virtual ~DtmfSender(); + DtmfSender(const DtmfSender&) = delete; + DtmfSender& operator=(const DtmfSender&) = delete; + private: DtmfSender(); - void QueueInsertDtmf(const rtc::Location& posted_from, uint32_t delay_ms) - RTC_RUN_ON(signaling_thread_); + void QueueInsertDtmf(uint32_t delay_ms) RTC_RUN_ON(signaling_thread_); // The DTMF sending task. void DoInsertDtmf() RTC_RUN_ON(signaling_thread_); - void OnProviderDestroyed(); - void StopSending() RTC_RUN_ON(signaling_thread_); DtmfSenderObserverInterface* observer_ RTC_GUARDED_BY(signaling_thread_); - rtc::Thread* signaling_thread_; + TaskQueueBase* const signaling_thread_; DtmfProviderInterface* provider_ RTC_GUARDED_BY(signaling_thread_); std::string tones_ RTC_GUARDED_BY(signaling_thread_); int duration_ RTC_GUARDED_BY(signaling_thread_); @@ -96,12 +94,11 @@ class DtmfSender : public DtmfSenderInterface, public sigslot::has_slots<> { // For cancelling the tasks which feed the DTMF provider one tone at a time. rtc::scoped_refptr safety_flag_ RTC_GUARDED_BY( signaling_thread_) RTC_PT_GUARDED_BY(signaling_thread_) = nullptr; - - RTC_DISALLOW_COPY_AND_ASSIGN(DtmfSender); }; // Define proxy for DtmfSenderInterface. BEGIN_PRIMARY_PROXY_MAP(DtmfSender) + PROXY_PRIMARY_THREAD_DESTRUCTOR() PROXY_METHOD1(void, RegisterObserver, DtmfSenderObserverInterface*) PROXY_METHOD0(void, UnregisterObserver) diff --git a/TMessagesProj/jni/voip/webrtc/pc/external_hmac.h b/TMessagesProj/jni/voip/webrtc/pc/external_hmac.h index 3319beaed4..c5071fc192 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/external_hmac.h +++ b/TMessagesProj/jni/voip/webrtc/pc/external_hmac.h @@ -30,9 +30,9 @@ #include -#include "third_party/libsrtp/crypto/include/auth.h" #include "third_party/libsrtp/crypto/include/crypto_types.h" #include "third_party/libsrtp/include/srtp.h" +#include "third_party/libsrtp/include/srtp_priv.h" #define EXTERNAL_HMAC_SHA1 SRTP_HMAC_SHA1 + 1 #define HMAC_KEY_LENGTH 20 diff --git a/TMessagesProj/jni/voip/webrtc/pc/ice_server_parsing.cc b/TMessagesProj/jni/voip/webrtc/pc/ice_server_parsing.cc index 88f77bf0a9..1a30d2def5 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/ice_server_parsing.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/ice_server_parsing.cc @@ -12,10 +12,9 @@ #include -#include #include // For std::isdigit. -#include #include +#include #include "p2p/base/port_interface.h" #include "rtc_base/arraysize.h" @@ -24,18 +23,20 @@ #include "rtc_base/logging.h" #include "rtc_base/socket_address.h" #include "rtc_base/string_encode.h" +#include "rtc_base/string_to_number.h" namespace webrtc { +namespace { // Number of tokens must be preset when TURN uri has transport param. -static const size_t kTurnTransportTokensNum = 2; +const size_t kTurnTransportTokensNum = 2; // The default stun port. -static const int kDefaultStunPort = 3478; -static const int kDefaultStunTlsPort = 5349; -static const char kTransport[] = "transport"; +const int kDefaultStunPort = 3478; +const int kDefaultStunTlsPort = 5349; +const char kTransport[] = "transport"; // Allowed characters in hostname per RFC 3986 Appendix A "reg-name" -static const char kRegNameCharacters[] = +const char kRegNameCharacters[] = "abcdefghijklmnopqrstuvwxyz" "ABCDEFGHIJKLMNOPQRSTUVWXYZ" "0123456789" @@ -44,18 +45,19 @@ static const char kRegNameCharacters[] = "!$&'()*+,;="; // sub-delims // NOTE: Must be in the same order as the ServiceType enum. -static const char* kValidIceServiceTypes[] = {"stun", "stuns", "turn", "turns"}; +const char* kValidIceServiceTypes[] = {"stun", "stuns", "turn", "turns"}; // NOTE: A loop below assumes that the first value of this enum is 0 and all // other values are incremental. -enum ServiceType { +enum class ServiceType { STUN = 0, // Indicates a STUN server. STUNS, // Indicates a STUN server used with a TLS session. TURN, // Indicates a TURN server TURNS, // Indicates a TURN server used with a TLS session. INVALID, // Unknown. }; -static_assert(INVALID == arraysize(kValidIceServiceTypes), +static_assert(static_cast(ServiceType::INVALID) == + arraysize(kValidIceServiceTypes), "kValidIceServiceTypes must have as many strings as ServiceType " "has values."); @@ -65,40 +67,36 @@ static_assert(INVALID == arraysize(kValidIceServiceTypes), // scheme = "stun" / "stuns" / "turn" / "turns" // host = IP-literal / IPv4address / reg-name // port = *DIGIT -static bool GetServiceTypeAndHostnameFromUri(const std::string& in_str, - ServiceType* service_type, - std::string* hostname) { - const std::string::size_type colonpos = in_str.find(':'); - if (colonpos == std::string::npos) { + +// Return tuple is service_type, host, with service_type == ServiceType::INVALID +// on failure. +std::tuple GetServiceTypeAndHostnameFromUri( + absl::string_view in_str) { + const auto colonpos = in_str.find(':'); + if (colonpos == absl::string_view::npos) { RTC_LOG(LS_WARNING) << "Missing ':' in ICE URI: " << in_str; - return false; + return {ServiceType::INVALID, ""}; } if ((colonpos + 1) == in_str.length()) { RTC_LOG(LS_WARNING) << "Empty hostname in ICE URI: " << in_str; - return false; + return {ServiceType::INVALID, ""}; } - *service_type = INVALID; for (size_t i = 0; i < arraysize(kValidIceServiceTypes); ++i) { if (in_str.compare(0, colonpos, kValidIceServiceTypes[i]) == 0) { - *service_type = static_cast(i); - break; + return {static_cast(i), in_str.substr(colonpos + 1)}; } } - if (*service_type == INVALID) { - return false; - } - *hostname = in_str.substr(colonpos + 1, std::string::npos); - return true; + return {ServiceType::INVALID, ""}; } -static bool ParsePort(const std::string& in_str, int* port) { - // Make sure port only contains digits. FromString doesn't check this. +absl::optional ParsePort(absl::string_view in_str) { + // Make sure port only contains digits. StringToNumber doesn't check this. for (const char& c : in_str) { - if (!std::isdigit(c)) { + if (!std::isdigit(static_cast(c))) { return false; } } - return rtc::FromString(in_str, port); + return rtc::StringToNumber(in_str); } // This method parses IPv6 and IPv4 literal strings, along with hostnames in @@ -106,49 +104,60 @@ static bool ParsePort(const std::string& in_str, int* port) { // Consider following formats as correct. // `hostname:port`, |[IPV6 address]:port|, |IPv4 address|:port, // `hostname`, |[IPv6 address]|, |IPv4 address|. -static bool ParseHostnameAndPortFromString(const std::string& in_str, - std::string* host, - int* port) { - RTC_DCHECK(host->empty()); + +// Return tuple is success, host, port. +std::tuple ParseHostnameAndPortFromString( + absl::string_view in_str, + int default_port) { + if (in_str.empty()) { + return {false, "", 0}; + } + absl::string_view host; + int port = default_port; + if (in_str.at(0) == '[') { // IP_literal syntax - std::string::size_type closebracket = in_str.rfind(']'); - if (closebracket != std::string::npos) { - std::string::size_type colonpos = in_str.find(':', closebracket); - if (std::string::npos != colonpos) { - if (!ParsePort(in_str.substr(closebracket + 2, std::string::npos), - port)) { - return false; - } + auto closebracket = in_str.rfind(']'); + if (closebracket == absl::string_view::npos) { + return {false, "", 0}; + } + auto colonpos = in_str.find(':', closebracket); + if (absl::string_view::npos != colonpos) { + if (absl::optional opt_port = + ParsePort(in_str.substr(closebracket + 2))) { + port = *opt_port; + } else { + return {false, "", 0}; } - *host = in_str.substr(1, closebracket - 1); - } else { - return false; } + host = in_str.substr(1, closebracket - 1); } else { // IPv4address or reg-name syntax - std::string::size_type colonpos = in_str.find(':'); - if (std::string::npos != colonpos) { - if (!ParsePort(in_str.substr(colonpos + 1, std::string::npos), port)) { - return false; + auto colonpos = in_str.find(':'); + if (absl::string_view::npos != colonpos) { + if (absl::optional opt_port = + ParsePort(in_str.substr(colonpos + 1))) { + port = *opt_port; + } else { + return {false, "", 0}; } - *host = in_str.substr(0, colonpos); + host = in_str.substr(0, colonpos); } else { - *host = in_str; + host = in_str; } // RFC 3986 section 3.2.2 and Appendix A - "reg-name" syntax - if (host->find_first_not_of(kRegNameCharacters) != std::string::npos) { - return false; + if (host.find_first_not_of(kRegNameCharacters) != absl::string_view::npos) { + return {false, "", 0}; } } - return !host->empty(); + return {!host.empty(), host, port}; } // Adds a STUN or TURN server to the appropriate list, // by parsing `url` and using the username/password in `server`. -static RTCErrorType ParseIceServerUrl( +RTCError ParseIceServerUrl( const PeerConnectionInterface::IceServer& server, - const std::string& url, + absl::string_view url, cricket::ServerAddresses* stun_servers, std::vector* turn_servers) { // RFC 7064 @@ -168,82 +177,96 @@ static RTCErrorType ParseIceServerUrl( RTC_DCHECK(stun_servers != nullptr); RTC_DCHECK(turn_servers != nullptr); - std::vector tokens; cricket::ProtocolType turn_transport_type = cricket::PROTO_UDP; RTC_DCHECK(!url.empty()); - rtc::split(url, '?', &tokens); - std::string uri_without_transport = tokens[0]; + std::vector tokens = rtc::split(url, '?'); + absl::string_view uri_without_transport = tokens[0]; // Let's look into transport= param, if it exists. if (tokens.size() == kTurnTransportTokensNum) { // ?transport= is present. - std::string uri_transport_param = tokens[1]; - rtc::split(uri_transport_param, '=', &tokens); - if (tokens[0] != kTransport) { - RTC_LOG(LS_WARNING) << "Invalid transport parameter key."; - return RTCErrorType::SYNTAX_ERROR; + std::vector transport_tokens = + rtc::split(tokens[1], '='); + if (transport_tokens[0] != kTransport) { + LOG_AND_RETURN_ERROR( + RTCErrorType::SYNTAX_ERROR, + "ICE server parsing failed: Invalid transport parameter key."); } - if (tokens.size() < 2) { - RTC_LOG(LS_WARNING) << "Transport parameter missing value."; - return RTCErrorType::SYNTAX_ERROR; + if (transport_tokens.size() < 2) { + LOG_AND_RETURN_ERROR( + RTCErrorType::SYNTAX_ERROR, + "ICE server parsing failed: Transport parameter missing value."); } - if (!cricket::StringToProto(tokens[1].c_str(), &turn_transport_type) || - (turn_transport_type != cricket::PROTO_UDP && - turn_transport_type != cricket::PROTO_TCP)) { - RTC_LOG(LS_WARNING) << "Transport parameter should always be udp or tcp."; - return RTCErrorType::SYNTAX_ERROR; + + absl::optional proto = + cricket::StringToProto(transport_tokens[1]); + if (!proto || + (*proto != cricket::PROTO_UDP && *proto != cricket::PROTO_TCP)) { + LOG_AND_RETURN_ERROR( + RTCErrorType::SYNTAX_ERROR, + "ICE server parsing failed: Transport parameter should " + "always be udp or tcp."); } + turn_transport_type = *proto; } - std::string hoststring; - ServiceType service_type; - if (!GetServiceTypeAndHostnameFromUri(uri_without_transport, &service_type, - &hoststring)) { - RTC_LOG(LS_WARNING) << "Invalid transport parameter in ICE URI: " << url; - return RTCErrorType::SYNTAX_ERROR; + auto [service_type, hoststring] = + GetServiceTypeAndHostnameFromUri(uri_without_transport); + if (service_type == ServiceType::INVALID) { + RTC_LOG(LS_ERROR) << "Invalid transport parameter in ICE URI: " << url; + LOG_AND_RETURN_ERROR( + RTCErrorType::SYNTAX_ERROR, + "ICE server parsing failed: Invalid transport parameter in ICE URI"); } // GetServiceTypeAndHostnameFromUri should never give an empty hoststring RTC_DCHECK(!hoststring.empty()); - int port = kDefaultStunPort; - if (service_type == TURNS) { - port = kDefaultStunTlsPort; + int default_port = kDefaultStunPort; + if (service_type == ServiceType::TURNS) { + default_port = kDefaultStunTlsPort; turn_transport_type = cricket::PROTO_TLS; } - if (hoststring.find('@') != std::string::npos) { - RTC_LOG(LS_WARNING) << "Invalid url: " << uri_without_transport; - RTC_LOG(LS_WARNING) - << "Note that user-info@ in turn:-urls is long-deprecated."; - return RTCErrorType::SYNTAX_ERROR; + if (hoststring.find('@') != absl::string_view::npos) { + RTC_LOG(LS_ERROR) << "Invalid url with long deprecated user@host syntax: " + << uri_without_transport; + LOG_AND_RETURN_ERROR(RTCErrorType::SYNTAX_ERROR, + "ICE server parsing failed: Invalid url with long " + "deprecated user@host syntax"); } - std::string address; - if (!ParseHostnameAndPortFromString(hoststring, &address, &port)) { - RTC_LOG(LS_WARNING) << "Invalid hostname format: " << uri_without_transport; - return RTCErrorType::SYNTAX_ERROR; + + auto [success, address, port] = + ParseHostnameAndPortFromString(hoststring, default_port); + if (!success) { + RTC_LOG(LS_ERROR) << "Invalid hostname format: " << uri_without_transport; + LOG_AND_RETURN_ERROR(RTCErrorType::SYNTAX_ERROR, + "ICE server parsing failed: Invalid hostname format"); } if (port <= 0 || port > 0xffff) { - RTC_LOG(LS_WARNING) << "Invalid port: " << port; - return RTCErrorType::SYNTAX_ERROR; + RTC_LOG(LS_ERROR) << "Invalid port: " << port; + LOG_AND_RETURN_ERROR(RTCErrorType::SYNTAX_ERROR, + "ICE server parsing failed: Invalid port"); } switch (service_type) { - case STUN: - case STUNS: + case ServiceType::STUN: + case ServiceType::STUNS: stun_servers->insert(rtc::SocketAddress(address, port)); break; - case TURN: - case TURNS: { + case ServiceType::TURN: + case ServiceType::TURNS: { if (server.username.empty() || server.password.empty()) { // The WebRTC spec requires throwing an InvalidAccessError when username // or credential are ommitted; this is the native equivalent. - RTC_LOG(LS_WARNING) << "TURN server with empty username or password"; - return RTCErrorType::INVALID_PARAMETER; + LOG_AND_RETURN_ERROR( + RTCErrorType::INVALID_PARAMETER, + "ICE server parsing failed: TURN server with empty " + "username or password"); } // If the hostname field is not empty, then the server address must be // the resolved IP for that host, the hostname is needed later for TLS // handshake (SNI and Certificate verification). - const std::string& hostname = + absl::string_view hostname = server.hostname.empty() ? address : server.hostname; rtc::SocketAddress socket_address(hostname, port); if (!server.hostname.empty()) { @@ -251,10 +274,11 @@ static RTCErrorType ParseIceServerUrl( if (!IPFromString(address, &ip)) { // When hostname is set, the server address must be a // resolved ip address. - RTC_LOG(LS_WARNING) - << "IceServer has hostname field set, but URI does not " - "contain an IP address."; - return RTCErrorType::INVALID_PARAMETER; + LOG_AND_RETURN_ERROR( + RTCErrorType::INVALID_PARAMETER, + "ICE server parsing failed: " + "IceServer has hostname field set, but URI does not " + "contain an IP address."); } socket_address.SetResolvedIP(ip); } @@ -275,13 +299,16 @@ static RTCErrorType ParseIceServerUrl( default: // We shouldn't get to this point with an invalid service_type, we should // have returned an error already. - RTC_DCHECK_NOTREACHED() << "Unexpected service type"; - return RTCErrorType::INTERNAL_ERROR; + LOG_AND_RETURN_ERROR( + RTCErrorType::INTERNAL_ERROR, + "ICE server parsing failed: Unexpected service type"); } - return RTCErrorType::NONE; + return RTCError::OK(); } -RTCErrorType ParseIceServers( +} // namespace + +RTCError ParseIceServersOrError( const PeerConnectionInterface::IceServers& servers, cricket::ServerAddresses* stun_servers, std::vector* turn_servers) { @@ -289,25 +316,26 @@ RTCErrorType ParseIceServers( if (!server.urls.empty()) { for (const std::string& url : server.urls) { if (url.empty()) { - RTC_LOG(LS_WARNING) << "Empty uri."; - return RTCErrorType::SYNTAX_ERROR; + LOG_AND_RETURN_ERROR(RTCErrorType::SYNTAX_ERROR, + "ICE server parsing failed: Empty uri."); } - RTCErrorType err = + RTCError err = ParseIceServerUrl(server, url, stun_servers, turn_servers); - if (err != RTCErrorType::NONE) { + if (!err.ok()) { return err; } } } else if (!server.uri.empty()) { // Fallback to old .uri if new .urls isn't present. - RTCErrorType err = + RTCError err = ParseIceServerUrl(server, server.uri, stun_servers, turn_servers); - if (err != RTCErrorType::NONE) { + + if (!err.ok()) { return err; } } else { - RTC_LOG(LS_WARNING) << "Empty uri."; - return RTCErrorType::SYNTAX_ERROR; + LOG_AND_RETURN_ERROR(RTCErrorType::SYNTAX_ERROR, + "ICE server parsing failed: Empty uri."); } } // Candidates must have unique priorities, so that connectivity checks @@ -317,7 +345,14 @@ RTCErrorType ParseIceServers( // First in the list gets highest priority. turn_server.priority = priority--; } - return RTCErrorType::NONE; + return RTCError::OK(); +} + +RTCErrorType ParseIceServers( + const PeerConnectionInterface::IceServers& servers, + cricket::ServerAddresses* stun_servers, + std::vector* turn_servers) { + return ParseIceServersOrError(servers, stun_servers, turn_servers).type(); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/ice_server_parsing.h b/TMessagesProj/jni/voip/webrtc/pc/ice_server_parsing.h index da5de1042b..549964e285 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/ice_server_parsing.h +++ b/TMessagesProj/jni/voip/webrtc/pc/ice_server_parsing.h @@ -27,7 +27,12 @@ namespace webrtc { // // Intended to be used to convert/validate the servers passed into a // PeerConnection through RTCConfiguration. -RTC_EXPORT RTCErrorType +RTC_EXPORT RTCError +ParseIceServersOrError(const PeerConnectionInterface::IceServers& servers, + cricket::ServerAddresses* stun_servers, + std::vector* turn_servers); + +[[deprecated("use ParseIceServersOrError")]] RTC_EXPORT RTCErrorType ParseIceServers(const PeerConnectionInterface::IceServers& servers, cricket::ServerAddresses* stun_servers, std::vector* turn_servers); diff --git a/TMessagesProj/jni/voip/webrtc/pc/ice_transport.h b/TMessagesProj/jni/voip/webrtc/pc/ice_transport.h index 11f3de5d27..e31ec546b2 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/ice_transport.h +++ b/TMessagesProj/jni/voip/webrtc/pc/ice_transport.h @@ -12,7 +12,6 @@ #define PC_ICE_TRANSPORT_H_ #include "api/ice_transport_interface.h" -#include "api/sequence_checker.h" #include "rtc_base/checks.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" diff --git a/TMessagesProj/jni/voip/webrtc/pc/jitter_buffer_delay.h b/TMessagesProj/jni/voip/webrtc/pc/jitter_buffer_delay.h index dc10e3d2ba..a6bec01ce7 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/jitter_buffer_delay.h +++ b/TMessagesProj/jni/voip/webrtc/pc/jitter_buffer_delay.h @@ -16,6 +16,7 @@ #include "absl/types/optional.h" #include "api/sequence_checker.h" #include "rtc_base/system/no_unique_address.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/pc/jsep_ice_candidate.cc b/TMessagesProj/jni/voip/webrtc/pc/jsep_ice_candidate.cc index 6dacde629c..1e97ad42d8 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/jsep_ice_candidate.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/jsep_ice_candidate.cc @@ -10,8 +10,6 @@ #include "api/jsep_ice_candidate.h" -#include - #include "pc/webrtc_sdp.h" // This file contains JsepIceCandidate-related functions that are not diff --git a/TMessagesProj/jni/voip/webrtc/pc/jsep_session_description.cc b/TMessagesProj/jni/voip/webrtc/pc/jsep_session_description.cc index 57ccf7ca6e..4c57396f08 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/jsep_session_description.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/jsep_session_description.cc @@ -11,11 +11,20 @@ #include "api/jsep_session_description.h" #include +#include +#include "absl/types/optional.h" +#include "p2p/base/p2p_constants.h" #include "p2p/base/port.h" -#include "pc/media_session.h" +#include "p2p/base/transport_description.h" +#include "p2p/base/transport_info.h" +#include "pc/media_session.h" // IWYU pragma: keep #include "pc/webrtc_sdp.h" -#include "rtc_base/arraysize.h" +#include "rtc_base/checks.h" +#include "rtc_base/ip_address.h" +#include "rtc_base/logging.h" +#include "rtc_base/net_helper.h" +#include "rtc_base/socket_address.h" using cricket::SessionDescription; diff --git a/TMessagesProj/jni/voip/webrtc/pc/jsep_transport.cc b/TMessagesProj/jni/voip/webrtc/pc/jsep_transport.cc index 5c7f01ae62..ec186626b7 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/jsep_transport.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/jsep_transport.cc @@ -15,17 +15,16 @@ #include #include +#include #include #include "api/array_view.h" #include "api/candidate.h" #include "p2p/base/p2p_constants.h" #include "p2p/base/p2p_transport_channel.h" -#include "pc/sctp_data_channel_transport.h" #include "rtc_base/checks.h" #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/logging.h" -#include "rtc_base/ref_counted_object.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/trace_event.h" @@ -99,10 +98,6 @@ JsepTransport::JsepTransport( ? rtc::make_ref_counted( std::move(rtcp_dtls_transport)) : nullptr), - sctp_data_channel_transport_( - sctp_transport ? std::make_unique( - sctp_transport.get()) - : nullptr), sctp_transport_(sctp_transport ? rtc::make_ref_counted( std::move(sctp_transport)) @@ -201,23 +196,21 @@ webrtc::RTCError JsepTransport::SetLocalJsepTransportDescription( if (!local_fp) { local_certificate_ = nullptr; } else { - error = VerifyCertificateFingerprint(local_certificate_, local_fp); + error = VerifyCertificateFingerprint(local_certificate_.get(), local_fp); if (!error.ok()) { local_description_.reset(); return error; } } - RTC_DCHECK(rtp_dtls_transport_->internal()); - rtp_dtls_transport_->internal()->ice_transport()->SetIceParameters( - ice_parameters); + RTC_DCHECK(rtp_dtls_transport_->internal()); + rtp_dtls_transport_->internal()->ice_transport()->SetIceParameters( + ice_parameters); - { - if (rtcp_dtls_transport_) { - RTC_DCHECK(rtcp_dtls_transport_->internal()); - rtcp_dtls_transport_->internal()->ice_transport()->SetIceParameters( - ice_parameters); - } - } + if (rtcp_dtls_transport_) { + RTC_DCHECK(rtcp_dtls_transport_->internal()); + rtcp_dtls_transport_->internal()->ice_transport()->SetIceParameters( + ice_parameters); + } // If PRANSWER/ANSWER is set, we should decide transport protocol type. if (type == SdpType::kPrAnswer || type == SdpType::kAnswer) { error = NegotiateAndSetDtlsParameters(type); @@ -422,21 +415,9 @@ webrtc::RTCError JsepTransport::SetNegotiatedDtlsParameters( absl::optional dtls_role, rtc::SSLFingerprint* remote_fingerprint) { RTC_DCHECK(dtls_transport); - // Set SSL role. Role must be set before fingerprint is applied, which - // initiates DTLS setup. - if (dtls_role && !dtls_transport->SetDtlsRole(*dtls_role)) { - return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, - "Failed to set SSL role for the transport."); - } - // Apply remote fingerprint. - if (!remote_fingerprint || - !dtls_transport->SetRemoteFingerprint( - remote_fingerprint->algorithm, remote_fingerprint->digest.cdata(), - remote_fingerprint->digest.size())) { - return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, - "Failed to apply remote fingerprint."); - } - return webrtc::RTCError::OK(); + return dtls_transport->SetRemoteParameters( + remote_fingerprint->algorithm, remote_fingerprint->digest.cdata(), + remote_fingerprint->digest.size(), dtls_role); } bool JsepTransport::SetRtcpMux(bool enable, @@ -715,6 +696,10 @@ bool JsepTransport::GetTransportStats(DtlsTransportInternal* dtls_transport, dtls_transport->GetSrtpCryptoSuite(&substats.srtp_crypto_suite); dtls_transport->GetSslCipherSuite(&substats.ssl_cipher_suite); substats.dtls_state = dtls_transport->dtls_state(); + rtc::SSLRole dtls_role; + if (dtls_transport->GetDtlsRole(&dtls_role)) { + substats.dtls_role = dtls_role; + } if (!dtls_transport->ice_transport()->GetStats( &substats.ice_transport_stats)) { return false; diff --git a/TMessagesProj/jni/voip/webrtc/pc/jsep_transport.h b/TMessagesProj/jni/voip/webrtc/pc/jsep_transport.h index e3e929bfd2..f2643070a1 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/jsep_transport.h +++ b/TMessagesProj/jni/voip/webrtc/pc/jsep_transport.h @@ -44,7 +44,6 @@ #include "pc/srtp_transport.h" #include "pc/transport_stats.h" #include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/rtc_certificate.h" #include "rtc_base/ssl_fingerprint.h" #include "rtc_base/ssl_stream_adapter.h" @@ -106,6 +105,9 @@ class JsepTransport { ~JsepTransport(); + JsepTransport(const JsepTransport&) = delete; + JsepTransport& operator=(const JsepTransport&) = delete; + // Returns the MID of this transport. This is only used for logging. const std::string& mid() const { return mid_; } @@ -221,10 +223,7 @@ class JsepTransport { // TODO(bugs.webrtc.org/9719): Delete method, update callers to use // SctpTransport() instead. webrtc::DataChannelTransportInterface* data_channel_transport() const { - if (sctp_data_channel_transport_) { - return sctp_data_channel_transport_.get(); - } - return nullptr; + return sctp_transport_.get(); } // TODO(deadbeef): The methods below are only public for testing. Should make @@ -309,8 +308,6 @@ class JsepTransport { rtc::scoped_refptr rtcp_dtls_transport_ RTC_GUARDED_BY(network_thread_); - const std::unique_ptr - sctp_data_channel_transport_; const rtc::scoped_refptr sctp_transport_; SrtpFilter sdes_negotiator_ RTC_GUARDED_BY(network_thread_); @@ -326,8 +323,6 @@ class JsepTransport { // `rtcp_dtls_transport_` is destroyed. The JsepTransportController will // receive the callback and update the aggregate transport states. std::function rtcp_mux_active_callback_; - - RTC_DISALLOW_COPY_AND_ASSIGN(JsepTransport); }; } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_collection.cc b/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_collection.cc index df44a9c157..b50d303d77 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_collection.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_collection.cc @@ -219,6 +219,22 @@ const cricket::JsepTransport* JsepTransportCollection::GetTransportForMid( return it == mid_to_transport_.end() ? nullptr : it->second; } +cricket::JsepTransport* JsepTransportCollection::GetTransportForMid( + absl::string_view mid) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + // TODO(hta): should be a better way. + auto it = mid_to_transport_.find(std::string(mid)); + return it == mid_to_transport_.end() ? nullptr : it->second; +} + +const cricket::JsepTransport* JsepTransportCollection::GetTransportForMid( + absl::string_view mid) const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + // TODO(hta): Should be a better way + auto it = mid_to_transport_.find(std::string(mid)); + return it == mid_to_transport_.end() ? nullptr : it->second; +} + bool JsepTransportCollection::SetTransportForMid( const std::string& mid, cricket::JsepTransport* jsep_transport) { diff --git a/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_collection.h b/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_collection.h index aa5293475e..0bb19a3260 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_collection.h +++ b/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_collection.h @@ -18,6 +18,7 @@ #include #include +#include "api/jsep.h" #include "api/peer_connection_interface.h" #include "api/sequence_checker.h" #include "pc/jsep_transport.h" @@ -117,6 +118,8 @@ class JsepTransportCollection { cricket::JsepTransport* GetTransportForMid(const std::string& mid); const cricket::JsepTransport* GetTransportForMid( const std::string& mid) const; + cricket::JsepTransport* GetTransportForMid(absl::string_view mid); + const cricket::JsepTransport* GetTransportForMid(absl::string_view mid) const; // Set transport for a MID. This may destroy a transport if it is no // longer in use. bool SetTransportForMid(const std::string& mid, diff --git a/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_controller.cc b/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_controller.cc index b7e9f361bc..c3fc89ac57 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_controller.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_controller.cc @@ -12,9 +12,9 @@ #include -#include #include #include +#include #include #include @@ -29,7 +29,6 @@ #include "p2p/base/p2p_constants.h" #include "p2p/base/port.h" #include "rtc_base/checks.h" -#include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/thread.h" #include "rtc_base/trace_event.h" @@ -62,6 +61,10 @@ JsepTransportController::JsepTransportController( RTC_DCHECK(config_.rtcp_handler); RTC_DCHECK(config_.ice_transport_factory); RTC_DCHECK(config_.on_dtls_handshake_error_); + RTC_DCHECK(config_.field_trials); + if (port_allocator_) { + port_allocator_->SetIceTiebreaker(ice_tiebreaker_); + } } JsepTransportController::~JsepTransportController() { @@ -76,8 +79,8 @@ RTCError JsepTransportController::SetLocalDescription( const cricket::SessionDescription* description) { TRACE_EVENT0("webrtc", "JsepTransportController::SetLocalDescription"); if (!network_thread_->IsCurrent()) { - return network_thread_->Invoke( - RTC_FROM_HERE, [=] { return SetLocalDescription(type, description); }); + return network_thread_->BlockingCall( + [=] { return SetLocalDescription(type, description); }); } RTC_DCHECK_RUN_ON(network_thread_); @@ -97,8 +100,8 @@ RTCError JsepTransportController::SetRemoteDescription( const cricket::SessionDescription* description) { TRACE_EVENT0("webrtc", "JsepTransportController::SetRemoteDescription"); if (!network_thread_->IsCurrent()) { - return network_thread_->Invoke( - RTC_FROM_HERE, [=] { return SetRemoteDescription(type, description); }); + return network_thread_->BlockingCall( + [=] { return SetRemoteDescription(type, description); }); } RTC_DCHECK_RUN_ON(network_thread_); @@ -106,7 +109,7 @@ RTCError JsepTransportController::SetRemoteDescription( } RtpTransportInternal* JsepTransportController::GetRtpTransport( - const std::string& mid) const { + absl::string_view mid) const { RTC_DCHECK_RUN_ON(network_thread_); auto jsep_transport = GetJsepTransportForMid(mid); if (!jsep_transport) { @@ -198,8 +201,7 @@ absl::optional JsepTransportController::GetDtlsRole( // thread during negotiations, potentially multiple times. // WebRtcSessionDescriptionFactory::InternalCreateAnswer is one example. if (!network_thread_->IsCurrent()) { - return network_thread_->Invoke>( - RTC_FROM_HERE, [&] { return GetDtlsRole(mid); }); + return network_thread_->BlockingCall([&] { return GetDtlsRole(mid); }); } RTC_DCHECK_RUN_ON(network_thread_); @@ -214,8 +216,8 @@ absl::optional JsepTransportController::GetDtlsRole( bool JsepTransportController::SetLocalCertificate( const rtc::scoped_refptr& certificate) { if (!network_thread_->IsCurrent()) { - return network_thread_->Invoke( - RTC_FROM_HERE, [&] { return SetLocalCertificate(certificate); }); + return network_thread_->BlockingCall( + [&] { return SetLocalCertificate(certificate); }); } RTC_DCHECK_RUN_ON(network_thread_); @@ -273,8 +275,7 @@ JsepTransportController::GetRemoteSSLCertChain( void JsepTransportController::MaybeStartGathering() { if (!network_thread_->IsCurrent()) { - network_thread_->Invoke(RTC_FROM_HERE, - [&] { MaybeStartGathering(); }); + network_thread_->BlockingCall([&] { MaybeStartGathering(); }); return; } @@ -300,8 +301,8 @@ RTCError JsepTransportController::AddRemoteCandidates( RTCError JsepTransportController::RemoveRemoteCandidates( const cricket::Candidates& candidates) { if (!network_thread_->IsCurrent()) { - return network_thread_->Invoke( - RTC_FROM_HERE, [&] { return RemoveRemoteCandidates(candidates); }); + return network_thread_->BlockingCall( + [&] { return RemoveRemoteCandidates(candidates); }); } RTC_DCHECK_RUN_ON(network_thread_); @@ -360,9 +361,8 @@ bool JsepTransportController::GetStats(const std::string& transport_name, void JsepTransportController::SetActiveResetSrtpParams( bool active_reset_srtp_params) { if (!network_thread_->IsCurrent()) { - network_thread_->Invoke(RTC_FROM_HERE, [=] { - SetActiveResetSrtpParams(active_reset_srtp_params); - }); + network_thread_->BlockingCall( + [=] { SetActiveResetSrtpParams(active_reset_srtp_params); }); return; } RTC_DCHECK_RUN_ON(network_thread_); @@ -377,8 +377,7 @@ void JsepTransportController::SetActiveResetSrtpParams( RTCError JsepTransportController::RollbackTransports() { if (!network_thread_->IsCurrent()) { - return network_thread_->Invoke( - RTC_FROM_HERE, [=] { return RollbackTransports(); }); + return network_thread_->BlockingCall([=] { return RollbackTransports(); }); } RTC_DCHECK_RUN_ON(network_thread_); bundles_.Rollback(); @@ -399,6 +398,7 @@ JsepTransportController::CreateIceTransport(const std::string& transport_name, init.set_port_allocator(port_allocator_); init.set_async_dns_resolver_factory(async_dns_resolver_factory_); init.set_event_log(config_.event_log); + init.set_field_trials(config_.field_trials); return config_.ice_transport_factory->CreateIceTransport( transport_name, component, std::move(init)); } @@ -477,8 +477,8 @@ JsepTransportController::CreateSdesTransport( cricket::DtlsTransportInternal* rtp_dtls_transport, cricket::DtlsTransportInternal* rtcp_dtls_transport) { RTC_DCHECK_RUN_ON(network_thread_); - auto srtp_transport = - std::make_unique(rtcp_dtls_transport == nullptr); + auto srtp_transport = std::make_unique( + rtcp_dtls_transport == nullptr, *config_.field_trials); RTC_DCHECK(rtp_dtls_transport); srtp_transport->SetRtpPacketTransport(rtp_dtls_transport); if (rtcp_dtls_transport) { @@ -497,7 +497,7 @@ JsepTransportController::CreateDtlsSrtpTransport( cricket::DtlsTransportInternal* rtcp_dtls_transport) { RTC_DCHECK_RUN_ON(network_thread_); auto dtls_srtp_transport = std::make_unique( - rtcp_dtls_transport == nullptr); + rtcp_dtls_transport == nullptr, *config_.field_trials); if (config_.enable_external_auth) { dtls_srtp_transport->EnableExternalAuth(); } @@ -998,6 +998,15 @@ cricket::JsepTransport* JsepTransportController::GetJsepTransportForMid( const std::string& mid) { return transports_.GetTransportForMid(mid); } +const cricket::JsepTransport* JsepTransportController::GetJsepTransportForMid( + absl::string_view mid) const { + return transports_.GetTransportForMid(mid); +} + +cricket::JsepTransport* JsepTransportController::GetJsepTransportForMid( + absl::string_view mid) { + return transports_.GetTransportForMid(mid); +} const cricket::JsepTransport* JsepTransportController::GetJsepTransportByName( const std::string& transport_name) const { diff --git a/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_controller.h b/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_controller.h index fb420090d4..4779bf9227 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_controller.h +++ b/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_controller.h @@ -17,6 +17,7 @@ #include #include #include +#include #include #include @@ -45,7 +46,6 @@ #include "p2p/base/port_allocator.h" #include "p2p/base/transport_description.h" #include "p2p/base/transport_info.h" -#include "pc/channel.h" #include "pc/dtls_srtp_transport.h" #include "pc/dtls_transport.h" #include "pc/jsep_transport.h" @@ -58,10 +58,8 @@ #include "pc/transport_stats.h" #include "rtc_base/callback_list.h" #include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/helpers.h" -#include "rtc_base/ref_counted_object.h" #include "rtc_base/rtc_certificate.h" #include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_stream_adapter.h" @@ -136,7 +134,10 @@ class JsepTransportController : public sigslot::has_slots<> { // Factory for SCTP transports. SctpTransportFactoryInterface* sctp_factory = nullptr; - std::function on_dtls_handshake_error_; + std::function on_dtls_handshake_error_; + + // Field trials. + const webrtc::FieldTrialsView* field_trials; }; // The ICE related events are fired on the `network_thread`. @@ -150,6 +151,9 @@ class JsepTransportController : public sigslot::has_slots<> { Config config); virtual ~JsepTransportController(); + JsepTransportController(const JsepTransportController&) = delete; + JsepTransportController& operator=(const JsepTransportController&) = delete; + // The main method to be called; applies a description at the transport // level, creating/destroying transport objects as needed and updating their // properties. This includes RTP, DTLS, and ICE (but not SCTP). At least not @@ -162,7 +166,7 @@ class JsepTransportController : public sigslot::has_slots<> { // Get transports to be used for the provided `mid`. If bundling is enabled, // calling GetRtpTransport for multiple MIDs may yield the same object. - RtpTransportInternal* GetRtpTransport(const std::string& mid) const; + RtpTransportInternal* GetRtpTransport(absl::string_view mid) const; cricket::DtlsTransportInternal* GetDtlsTransport(const std::string& mid); const cricket::DtlsTransportInternal* GetRtcpDtlsTransport( const std::string& mid) const; @@ -357,6 +361,10 @@ class JsepTransportController : public sigslot::has_slots<> { const std::string& mid) const RTC_RUN_ON(network_thread_); cricket::JsepTransport* GetJsepTransportForMid(const std::string& mid) RTC_RUN_ON(network_thread_); + const cricket::JsepTransport* GetJsepTransportForMid( + absl::string_view mid) const RTC_RUN_ON(network_thread_); + cricket::JsepTransport* GetJsepTransportForMid(absl::string_view mid) + RTC_RUN_ON(network_thread_); // Get the JsepTransport without considering the BUNDLE group. Return nullptr // if the JsepTransport is destroyed. @@ -478,8 +486,6 @@ class JsepTransportController : public sigslot::has_slots<> { rtc::scoped_refptr certificate_; BundleManager bundles_; - - RTC_DISALLOW_COPY_AND_ASSIGN(JsepTransportController); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/legacy_stats_collector.cc b/TMessagesProj/jni/voip/webrtc/pc/legacy_stats_collector.cc new file mode 100644 index 0000000000..b710bc16e6 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/pc/legacy_stats_collector.cc @@ -0,0 +1,1374 @@ +/* + * Copyright 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "pc/legacy_stats_collector.h" + +#include +#include + +#include +#include +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/audio_codecs/audio_encoder.h" +#include "api/candidate.h" +#include "api/data_channel_interface.h" +#include "api/field_trials_view.h" +#include "api/media_types.h" +#include "api/rtp_sender_interface.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" +#include "api/video/video_content_type.h" +#include "api/video/video_timing.h" +#include "call/call.h" +#include "media/base/media_channel.h" +#include "modules/audio_processing/include/audio_processing_statistics.h" +#include "p2p/base/ice_transport_internal.h" +#include "p2p/base/p2p_constants.h" +#include "pc/channel_interface.h" +#include "pc/data_channel_utils.h" +#include "pc/rtp_receiver.h" +#include "pc/rtp_receiver_proxy.h" +#include "pc/rtp_sender_proxy.h" +#include "pc/rtp_transceiver.h" +#include "pc/transport_stats.h" +#include "rtc_base/checks.h" +#include "rtc_base/ip_address.h" +#include "rtc_base/logging.h" +#include "rtc_base/rtc_certificate.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/ssl_stream_adapter.h" +#include "rtc_base/string_encode.h" +#include "rtc_base/thread.h" +#include "rtc_base/time_utils.h" +#include "rtc_base/trace_event.h" + +namespace webrtc { +namespace { + +// Field trial which controls whether to report standard-compliant bytes +// sent/received per stream. If enabled, padding and headers are not included +// in bytes sent or received. +constexpr char kUseStandardBytesStats[] = "WebRTC-UseStandardBytesStats"; + +// The following is the enum RTCStatsIceCandidateType from +// http://w3c.github.io/webrtc-stats/#rtcstatsicecandidatetype-enum such that +// our stats report for ice candidate type could conform to that. +const char STATSREPORT_LOCAL_PORT_TYPE[] = "host"; +const char STATSREPORT_STUN_PORT_TYPE[] = "serverreflexive"; +const char STATSREPORT_PRFLX_PORT_TYPE[] = "peerreflexive"; +const char STATSREPORT_RELAY_PORT_TYPE[] = "relayed"; + +// Strings used by the stats collector to report adapter types. This fits the +// general stype of http://w3c.github.io/webrtc-stats than what +// AdapterTypeToString does. +const char* STATSREPORT_ADAPTER_TYPE_ETHERNET = "lan"; +const char* STATSREPORT_ADAPTER_TYPE_WIFI = "wlan"; +const char* STATSREPORT_ADAPTER_TYPE_WWAN = "wwan"; +const char* STATSREPORT_ADAPTER_TYPE_VPN = "vpn"; +const char* STATSREPORT_ADAPTER_TYPE_LOOPBACK = "loopback"; +const char* STATSREPORT_ADAPTER_TYPE_WILDCARD = "wildcard"; + +template +struct TypeForAdd { + const StatsReport::StatsValueName name; + const ValueType& value; +}; + +typedef TypeForAdd BoolForAdd; +typedef TypeForAdd FloatForAdd; +typedef TypeForAdd Int64ForAdd; +typedef TypeForAdd IntForAdd; + +StatsReport* AddTrackReport(StatsCollection* reports, + const std::string& track_id) { + // Adds an empty track report. + StatsReport::Id id( + StatsReport::NewTypedId(StatsReport::kStatsReportTypeTrack, track_id)); + StatsReport* report = reports->ReplaceOrAddNew(id); + report->AddString(StatsReport::kStatsValueNameTrackId, track_id); + return report; +} + +template +void CreateTrackReport(const Track* track, + StatsCollection* reports, + TrackIdMap* track_ids) { + const std::string& track_id = track->id(); + StatsReport* report = AddTrackReport(reports, track_id); + RTC_DCHECK(report != nullptr); + (*track_ids)[track_id] = report; +} + +template +void CreateTrackReports(const TrackVector& tracks, + StatsCollection* reports, + TrackIdMap* track_ids) { + for (const auto& track : tracks) { + CreateTrackReport(track.get(), reports, track_ids); + } +} + +void ExtractCommonSendProperties(const cricket::MediaSenderInfo& info, + StatsReport* report, + bool use_standard_bytes_stats) { + report->AddString(StatsReport::kStatsValueNameCodecName, info.codec_name); + int64_t bytes_sent = info.payload_bytes_sent; + if (!use_standard_bytes_stats) { + bytes_sent += info.header_and_padding_bytes_sent; + } + report->AddInt64(StatsReport::kStatsValueNameBytesSent, bytes_sent); + if (info.rtt_ms >= 0) { + report->AddInt64(StatsReport::kStatsValueNameRtt, info.rtt_ms); + } +} + +void ExtractCommonReceiveProperties(const cricket::MediaReceiverInfo& info, + StatsReport* report) { + report->AddString(StatsReport::kStatsValueNameCodecName, info.codec_name); +} + +void SetAudioProcessingStats(StatsReport* report, + const AudioProcessingStats& apm_stats) { + if (apm_stats.delay_median_ms) { + report->AddInt(StatsReport::kStatsValueNameEchoDelayMedian, + *apm_stats.delay_median_ms); + } + if (apm_stats.delay_standard_deviation_ms) { + report->AddInt(StatsReport::kStatsValueNameEchoDelayStdDev, + *apm_stats.delay_standard_deviation_ms); + } + if (apm_stats.echo_return_loss) { + report->AddInt(StatsReport::kStatsValueNameEchoReturnLoss, + *apm_stats.echo_return_loss); + } + if (apm_stats.echo_return_loss_enhancement) { + report->AddInt(StatsReport::kStatsValueNameEchoReturnLossEnhancement, + *apm_stats.echo_return_loss_enhancement); + } + if (apm_stats.residual_echo_likelihood) { + report->AddFloat(StatsReport::kStatsValueNameResidualEchoLikelihood, + static_cast(*apm_stats.residual_echo_likelihood)); + } + if (apm_stats.residual_echo_likelihood_recent_max) { + report->AddFloat( + StatsReport::kStatsValueNameResidualEchoLikelihoodRecentMax, + static_cast(*apm_stats.residual_echo_likelihood_recent_max)); + } + if (apm_stats.divergent_filter_fraction) { + report->AddFloat(StatsReport::kStatsValueNameAecDivergentFilterFraction, + static_cast(*apm_stats.divergent_filter_fraction)); + } +} + +void ExtractStats(const cricket::VoiceReceiverInfo& info, + StatsReport* report, + bool use_standard_bytes_stats) { + ExtractCommonReceiveProperties(info, report); + const FloatForAdd floats[] = { + {StatsReport::kStatsValueNameExpandRate, info.expand_rate}, + {StatsReport::kStatsValueNameSecondaryDecodedRate, + info.secondary_decoded_rate}, + {StatsReport::kStatsValueNameSecondaryDiscardedRate, + info.secondary_discarded_rate}, + {StatsReport::kStatsValueNameSpeechExpandRate, info.speech_expand_rate}, + {StatsReport::kStatsValueNameAccelerateRate, info.accelerate_rate}, + {StatsReport::kStatsValueNamePreemptiveExpandRate, + info.preemptive_expand_rate}, + {StatsReport::kStatsValueNameTotalAudioEnergy, info.total_output_energy}, + {StatsReport::kStatsValueNameTotalSamplesDuration, + info.total_output_duration}}; + + const IntForAdd ints[] = { + {StatsReport::kStatsValueNameCurrentDelayMs, info.delay_estimate_ms}, + {StatsReport::kStatsValueNameDecodingCNG, info.decoding_cng}, + {StatsReport::kStatsValueNameDecodingCTN, info.decoding_calls_to_neteq}, + {StatsReport::kStatsValueNameDecodingCTSG, + info.decoding_calls_to_silence_generator}, + {StatsReport::kStatsValueNameDecodingMutedOutput, + info.decoding_muted_output}, + {StatsReport::kStatsValueNameDecodingNormal, info.decoding_normal}, + {StatsReport::kStatsValueNameDecodingPLC, info.decoding_plc}, + {StatsReport::kStatsValueNameDecodingPLCCNG, info.decoding_plc_cng}, + {StatsReport::kStatsValueNameJitterBufferMs, info.jitter_buffer_ms}, + {StatsReport::kStatsValueNameJitterReceived, info.jitter_ms}, + {StatsReport::kStatsValueNamePacketsLost, info.packets_lost}, + {StatsReport::kStatsValueNamePacketsReceived, info.packets_rcvd}, + {StatsReport::kStatsValueNamePreferredJitterBufferMs, + info.jitter_buffer_preferred_ms}, + }; + + for (const auto& f : floats) + report->AddFloat(f.name, f.value); + + for (const auto& i : ints) + report->AddInt(i.name, i.value); + if (info.audio_level >= 0) { + report->AddInt(StatsReport::kStatsValueNameAudioOutputLevel, + info.audio_level); + } + if (info.decoding_codec_plc) + report->AddInt(StatsReport::kStatsValueNameDecodingCodecPLC, + info.decoding_codec_plc); + + int64_t bytes_rcvd = info.payload_bytes_rcvd; + if (!use_standard_bytes_stats) { + bytes_rcvd += info.header_and_padding_bytes_rcvd; + } + report->AddInt64(StatsReport::kStatsValueNameBytesReceived, bytes_rcvd); + if (info.capture_start_ntp_time_ms >= 0) { + report->AddInt64(StatsReport::kStatsValueNameCaptureStartNtpTimeMs, + info.capture_start_ntp_time_ms); + } + report->AddString(StatsReport::kStatsValueNameMediaType, "audio"); +} + +void ExtractStats(const cricket::VoiceSenderInfo& info, + StatsReport* report, + bool use_standard_bytes_stats) { + ExtractCommonSendProperties(info, report, use_standard_bytes_stats); + + SetAudioProcessingStats(report, info.apm_statistics); + + const FloatForAdd floats[] = { + {StatsReport::kStatsValueNameTotalAudioEnergy, info.total_input_energy}, + {StatsReport::kStatsValueNameTotalSamplesDuration, + info.total_input_duration}}; + + RTC_DCHECK_GE(info.audio_level, 0); + const IntForAdd ints[] = { + {StatsReport::kStatsValueNameAudioInputLevel, info.audio_level}, + {StatsReport::kStatsValueNameJitterReceived, info.jitter_ms}, + {StatsReport::kStatsValueNamePacketsLost, info.packets_lost}, + {StatsReport::kStatsValueNamePacketsSent, info.packets_sent}, + }; + + for (const auto& f : floats) { + report->AddFloat(f.name, f.value); + } + + for (const auto& i : ints) { + if (i.value >= 0) { + report->AddInt(i.name, i.value); + } + } + report->AddString(StatsReport::kStatsValueNameMediaType, "audio"); + if (info.ana_statistics.bitrate_action_counter) { + report->AddInt(StatsReport::kStatsValueNameAnaBitrateActionCounter, + *info.ana_statistics.bitrate_action_counter); + } + if (info.ana_statistics.channel_action_counter) { + report->AddInt(StatsReport::kStatsValueNameAnaChannelActionCounter, + *info.ana_statistics.channel_action_counter); + } + if (info.ana_statistics.dtx_action_counter) { + report->AddInt(StatsReport::kStatsValueNameAnaDtxActionCounter, + *info.ana_statistics.dtx_action_counter); + } + if (info.ana_statistics.fec_action_counter) { + report->AddInt(StatsReport::kStatsValueNameAnaFecActionCounter, + *info.ana_statistics.fec_action_counter); + } + if (info.ana_statistics.frame_length_increase_counter) { + report->AddInt(StatsReport::kStatsValueNameAnaFrameLengthIncreaseCounter, + *info.ana_statistics.frame_length_increase_counter); + } + if (info.ana_statistics.frame_length_decrease_counter) { + report->AddInt(StatsReport::kStatsValueNameAnaFrameLengthDecreaseCounter, + *info.ana_statistics.frame_length_decrease_counter); + } + if (info.ana_statistics.uplink_packet_loss_fraction) { + report->AddFloat(StatsReport::kStatsValueNameAnaUplinkPacketLossFraction, + *info.ana_statistics.uplink_packet_loss_fraction); + } +} + +void ExtractStats(const cricket::VideoReceiverInfo& info, + StatsReport* report, + bool use_standard_bytes_stats) { + ExtractCommonReceiveProperties(info, report); + report->AddString(StatsReport::kStatsValueNameCodecImplementationName, + info.decoder_implementation_name); + int64_t bytes_rcvd = info.payload_bytes_rcvd; + if (!use_standard_bytes_stats) { + bytes_rcvd += info.header_and_padding_bytes_rcvd; + } + report->AddInt64(StatsReport::kStatsValueNameBytesReceived, bytes_rcvd); + if (info.capture_start_ntp_time_ms >= 0) { + report->AddInt64(StatsReport::kStatsValueNameCaptureStartNtpTimeMs, + info.capture_start_ntp_time_ms); + } + if (info.first_frame_received_to_decoded_ms >= 0) { + report->AddInt64(StatsReport::kStatsValueNameFirstFrameReceivedToDecodedMs, + info.first_frame_received_to_decoded_ms); + } + if (info.qp_sum) + report->AddInt64(StatsReport::kStatsValueNameQpSum, *info.qp_sum); + + if (info.nacks_sent) { + report->AddInt(StatsReport::kStatsValueNameNacksSent, *info.nacks_sent); + } + + const IntForAdd ints[] = { + {StatsReport::kStatsValueNameCurrentDelayMs, info.current_delay_ms}, + {StatsReport::kStatsValueNameDecodeMs, info.decode_ms}, + {StatsReport::kStatsValueNameFirsSent, info.firs_sent}, + {StatsReport::kStatsValueNameFrameHeightReceived, info.frame_height}, + {StatsReport::kStatsValueNameFrameRateDecoded, info.framerate_decoded}, + {StatsReport::kStatsValueNameFrameRateOutput, info.framerate_output}, + {StatsReport::kStatsValueNameFrameRateReceived, info.framerate_rcvd}, + {StatsReport::kStatsValueNameFrameWidthReceived, info.frame_width}, + {StatsReport::kStatsValueNameJitterBufferMs, info.jitter_buffer_ms}, + {StatsReport::kStatsValueNameMaxDecodeMs, info.max_decode_ms}, + {StatsReport::kStatsValueNameMinPlayoutDelayMs, + info.min_playout_delay_ms}, + {StatsReport::kStatsValueNamePacketsLost, info.packets_lost}, + {StatsReport::kStatsValueNamePacketsReceived, info.packets_rcvd}, + {StatsReport::kStatsValueNamePlisSent, info.plis_sent}, + {StatsReport::kStatsValueNameRenderDelayMs, info.render_delay_ms}, + {StatsReport::kStatsValueNameTargetDelayMs, info.target_delay_ms}, + {StatsReport::kStatsValueNameFramesDecoded, info.frames_decoded}, + }; + + for (const auto& i : ints) + report->AddInt(i.name, i.value); + report->AddString(StatsReport::kStatsValueNameMediaType, "video"); + + if (info.timing_frame_info) { + report->AddString(StatsReport::kStatsValueNameTimingFrameInfo, + info.timing_frame_info->ToString()); + } + + report->AddInt64(StatsReport::kStatsValueNameInterframeDelayMaxMs, + info.interframe_delay_max_ms); + + report->AddString( + StatsReport::kStatsValueNameContentType, + webrtc::videocontenttypehelpers::ToString(info.content_type)); +} + +void ExtractStats(const cricket::VideoSenderInfo& info, + StatsReport* report, + bool use_standard_bytes_stats) { + ExtractCommonSendProperties(info, report, use_standard_bytes_stats); + + report->AddString(StatsReport::kStatsValueNameCodecImplementationName, + info.encoder_implementation_name); + report->AddBoolean(StatsReport::kStatsValueNameBandwidthLimitedResolution, + (info.adapt_reason & 0x2) > 0); + report->AddBoolean(StatsReport::kStatsValueNameCpuLimitedResolution, + (info.adapt_reason & 0x1) > 0); + report->AddBoolean(StatsReport::kStatsValueNameHasEnteredLowResolution, + info.has_entered_low_resolution); + + if (info.qp_sum) + report->AddInt(StatsReport::kStatsValueNameQpSum, *info.qp_sum); + + const IntForAdd ints[] = { + {StatsReport::kStatsValueNameAdaptationChanges, info.adapt_changes}, + {StatsReport::kStatsValueNameAvgEncodeMs, info.avg_encode_ms}, + {StatsReport::kStatsValueNameEncodeUsagePercent, + info.encode_usage_percent}, + {StatsReport::kStatsValueNameFirsReceived, info.firs_rcvd}, + {StatsReport::kStatsValueNameFrameHeightSent, info.send_frame_height}, + {StatsReport::kStatsValueNameFrameRateInput, round(info.framerate_input)}, + {StatsReport::kStatsValueNameFrameRateSent, info.framerate_sent}, + {StatsReport::kStatsValueNameFrameWidthSent, info.send_frame_width}, + {StatsReport::kStatsValueNameNacksReceived, info.nacks_rcvd}, + {StatsReport::kStatsValueNamePacketsLost, info.packets_lost}, + {StatsReport::kStatsValueNamePacketsSent, info.packets_sent}, + {StatsReport::kStatsValueNamePlisReceived, info.plis_rcvd}, + {StatsReport::kStatsValueNameFramesEncoded, info.frames_encoded}, + {StatsReport::kStatsValueNameHugeFramesSent, info.huge_frames_sent}, + }; + + for (const auto& i : ints) + report->AddInt(i.name, i.value); + report->AddString(StatsReport::kStatsValueNameMediaType, "video"); + report->AddString( + StatsReport::kStatsValueNameContentType, + webrtc::videocontenttypehelpers::ToString(info.content_type)); +} + +void ExtractStats(const cricket::BandwidthEstimationInfo& info, + double stats_gathering_started, + StatsReport* report) { + RTC_DCHECK(report->type() == StatsReport::kStatsReportTypeBwe); + + report->set_timestamp(stats_gathering_started); + const IntForAdd ints[] = { + {StatsReport::kStatsValueNameAvailableSendBandwidth, + info.available_send_bandwidth}, + {StatsReport::kStatsValueNameAvailableReceiveBandwidth, + info.available_recv_bandwidth}, + {StatsReport::kStatsValueNameTargetEncBitrate, info.target_enc_bitrate}, + {StatsReport::kStatsValueNameActualEncBitrate, info.actual_enc_bitrate}, + {StatsReport::kStatsValueNameRetransmitBitrate, info.retransmit_bitrate}, + {StatsReport::kStatsValueNameTransmitBitrate, info.transmit_bitrate}, + }; + for (const auto& i : ints) + report->AddInt(i.name, i.value); + report->AddInt64(StatsReport::kStatsValueNameBucketDelay, info.bucket_delay); +} + +void ExtractRemoteStats(const cricket::MediaSenderInfo& info, + StatsReport* report) { + report->set_timestamp(info.remote_stats[0].timestamp); + // TODO(hta): Extract some stats here. +} + +void ExtractRemoteStats(const cricket::MediaReceiverInfo& info, + StatsReport* report) { + report->set_timestamp(info.remote_stats[0].timestamp); + // TODO(hta): Extract some stats here. +} + +std::string GetTrackIdBySsrc( + uint32_t ssrc, + StatsReport::Direction direction, + const std::map& track_id_by_ssrc) { + auto it = track_id_by_ssrc.find(ssrc); + if (it != track_id_by_ssrc.end()) { + return it->second; + } + if (direction == StatsReport::kReceive) { + // If the track ID was not found, this might be an unsignaled receive + // SSRC, so try looking up by the special SSRC 0. + it = track_id_by_ssrc.find(0); + if (it != track_id_by_ssrc.end()) { + RTC_LOG(LS_INFO) << "Assuming SSRC=" << ssrc + << " is an unsignalled receive stream corresponding " + "to the RtpReceiver with track ID \"" + << it->second << "\"."; + return it->second; + } + } + return ""; +} + +// Template to extract stats from a data vector. +// In order to use the template, the functions that are called from it, +// ExtractStats and ExtractRemoteStats, must be defined and overloaded +// for each type. +template +void ExtractStatsFromList( + const std::vector& data, + const StatsReport::Id& transport_id, + LegacyStatsCollector* collector, + StatsReport::Direction direction, + const std::map& track_id_by_ssrc) { + for (const auto& d : data) { + uint32_t ssrc = d.ssrc(); + std::string track_id = GetTrackIdBySsrc(ssrc, direction, track_id_by_ssrc); + // Each track can have stats for both local and remote objects. + // TODO(hta): Handle the case of multiple SSRCs per object. + StatsReport* report = + collector->PrepareReport(true, ssrc, track_id, transport_id, direction); + if (report) + ExtractStats(d, report, collector->UseStandardBytesStats()); + + if (!d.remote_stats.empty()) { + report = collector->PrepareReport(false, ssrc, track_id, transport_id, + direction); + if (report) + ExtractRemoteStats(d, report); + } + } +} + +} // namespace + +const char* IceCandidateTypeToStatsType(const std::string& candidate_type) { + if (candidate_type == cricket::LOCAL_PORT_TYPE) { + return STATSREPORT_LOCAL_PORT_TYPE; + } + if (candidate_type == cricket::STUN_PORT_TYPE) { + return STATSREPORT_STUN_PORT_TYPE; + } + if (candidate_type == cricket::PRFLX_PORT_TYPE) { + return STATSREPORT_PRFLX_PORT_TYPE; + } + if (candidate_type == cricket::RELAY_PORT_TYPE) { + return STATSREPORT_RELAY_PORT_TYPE; + } + RTC_DCHECK_NOTREACHED(); + return "unknown"; +} + +const char* AdapterTypeToStatsType(rtc::AdapterType type) { + switch (type) { + case rtc::ADAPTER_TYPE_UNKNOWN: + return "unknown"; + case rtc::ADAPTER_TYPE_ETHERNET: + return STATSREPORT_ADAPTER_TYPE_ETHERNET; + case rtc::ADAPTER_TYPE_WIFI: + return STATSREPORT_ADAPTER_TYPE_WIFI; + case rtc::ADAPTER_TYPE_CELLULAR: + case rtc::ADAPTER_TYPE_CELLULAR_2G: + case rtc::ADAPTER_TYPE_CELLULAR_3G: + case rtc::ADAPTER_TYPE_CELLULAR_4G: + case rtc::ADAPTER_TYPE_CELLULAR_5G: + return STATSREPORT_ADAPTER_TYPE_WWAN; + case rtc::ADAPTER_TYPE_VPN: + return STATSREPORT_ADAPTER_TYPE_VPN; + case rtc::ADAPTER_TYPE_LOOPBACK: + return STATSREPORT_ADAPTER_TYPE_LOOPBACK; + case rtc::ADAPTER_TYPE_ANY: + return STATSREPORT_ADAPTER_TYPE_WILDCARD; + default: + RTC_DCHECK_NOTREACHED(); + return ""; + } +} + +LegacyStatsCollector::LegacyStatsCollector(PeerConnectionInternal* pc) + : pc_(pc), + stats_gathering_started_(0), + use_standard_bytes_stats_( + pc->trials().IsEnabled(kUseStandardBytesStats)) { + RTC_DCHECK(pc_); +} + +LegacyStatsCollector::~LegacyStatsCollector() { + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); +} + +// Wallclock time in ms. +double LegacyStatsCollector::GetTimeNow() { + return static_cast(rtc::TimeUTCMillis()); +} + +// Adds a MediaStream with tracks that can be used as a `selector` in a call +// to GetStats. +void LegacyStatsCollector::AddStream(MediaStreamInterface* stream) { + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); + RTC_DCHECK(stream != NULL); + + CreateTrackReports(stream->GetAudioTracks(), &reports_, + &track_ids_); + CreateTrackReports(stream->GetVideoTracks(), &reports_, + &track_ids_); +} + +void LegacyStatsCollector::AddTrack(MediaStreamTrackInterface* track) { + if (track->kind() == MediaStreamTrackInterface::kAudioKind) { + CreateTrackReport(static_cast(track), &reports_, + &track_ids_); + } else if (track->kind() == MediaStreamTrackInterface::kVideoKind) { + CreateTrackReport(static_cast(track), &reports_, + &track_ids_); + } else { + RTC_DCHECK_NOTREACHED() << "Illegal track kind"; + } +} + +void LegacyStatsCollector::AddLocalAudioTrack(AudioTrackInterface* audio_track, + uint32_t ssrc) { + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); + RTC_DCHECK(audio_track != NULL); +#if RTC_DCHECK_IS_ON + for (const auto& track : local_audio_tracks_) + RTC_DCHECK(track.first != audio_track || track.second != ssrc); +#endif + + local_audio_tracks_.push_back(std::make_pair(audio_track, ssrc)); + + // Create the kStatsReportTypeTrack report for the new track if there is no + // report yet. + StatsReport::Id id(StatsReport::NewTypedId(StatsReport::kStatsReportTypeTrack, + audio_track->id())); + StatsReport* report = reports_.Find(id); + if (!report) { + report = reports_.InsertNew(id); + report->AddString(StatsReport::kStatsValueNameTrackId, audio_track->id()); + } +} + +void LegacyStatsCollector::RemoveLocalAudioTrack( + AudioTrackInterface* audio_track, + uint32_t ssrc) { + RTC_DCHECK(audio_track != NULL); + local_audio_tracks_.erase( + std::remove_if( + local_audio_tracks_.begin(), local_audio_tracks_.end(), + [audio_track, ssrc](const LocalAudioTrackVector::value_type& track) { + return track.first == audio_track && track.second == ssrc; + }), + local_audio_tracks_.end()); +} + +void LegacyStatsCollector::GetStats(MediaStreamTrackInterface* track, + StatsReports* reports) { + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); + RTC_DCHECK(reports != NULL); + RTC_DCHECK(reports->empty()); + + rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + + if (!track) { + reports->reserve(reports_.size()); + for (auto* r : reports_) + reports->push_back(r); + return; + } + + StatsReport* report = reports_.Find(StatsReport::NewTypedId( + StatsReport::kStatsReportTypeSession, pc_->session_id())); + if (report) + reports->push_back(report); + + report = reports_.Find( + StatsReport::NewTypedId(StatsReport::kStatsReportTypeTrack, track->id())); + + if (!report) + return; + + reports->push_back(report); + + std::string track_id; + for (const auto* r : reports_) { + if (r->type() != StatsReport::kStatsReportTypeSsrc) + continue; + + const StatsReport::Value* v = + r->FindValue(StatsReport::kStatsValueNameTrackId); + if (v && v->string_val() == track->id()) + reports->push_back(r); + } +} + +void LegacyStatsCollector::UpdateStats( + PeerConnectionInterface::StatsOutputLevel level) { + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); + // Calls to UpdateStats() that occur less than kMinGatherStatsPeriodMs apart + // will be ignored. Using a monotonic clock specifically for this, while using + // a UTC clock for the reports themselves. + const int64_t kMinGatherStatsPeriodMs = 50; + int64_t cache_now_ms = rtc::TimeMillis(); + if (cache_timestamp_ms_ != 0 && + cache_timestamp_ms_ + kMinGatherStatsPeriodMs > cache_now_ms) { + return; + } + cache_timestamp_ms_ = cache_now_ms; + stats_gathering_started_ = GetTimeNow(); + + // TODO(tommi): ExtractSessionInfo now has a single hop to the network thread + // to fetch stats, then applies them on the signaling thread. See if we need + // to do this synchronously or if updating the stats without blocking is safe. + std::map transport_names_by_mid = + ExtractSessionInfo(); + + // TODO(tommi): All of these hop over to the worker thread to fetch + // information. We could post a task to run all of these and post + // the information back to the signaling thread where we can create and + // update stats reports. That would also clean up the threading story a bit + // since we'd be creating/updating the stats report objects consistently on + // the same thread (this class has no locks right now). + ExtractBweInfo(); + ExtractMediaInfo(transport_names_by_mid); + ExtractSenderInfo(); + ExtractDataInfo(); + UpdateTrackReports(); +} + +StatsReport* LegacyStatsCollector::PrepareReport( + bool local, + uint32_t ssrc, + const std::string& track_id, + const StatsReport::Id& transport_id, + StatsReport::Direction direction) { + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); + StatsReport::Id id(StatsReport::NewIdWithDirection( + local ? StatsReport::kStatsReportTypeSsrc + : StatsReport::kStatsReportTypeRemoteSsrc, + rtc::ToString(ssrc), direction)); + StatsReport* report = reports_.Find(id); + if (!report) { + report = reports_.InsertNew(id); + } + + // FYI - for remote reports, the timestamp will be overwritten later. + report->set_timestamp(stats_gathering_started_); + + report->AddInt64(StatsReport::kStatsValueNameSsrc, ssrc); + if (!track_id.empty()) { + report->AddString(StatsReport::kStatsValueNameTrackId, track_id); + } + // Add the mapping of SSRC to transport. + report->AddId(StatsReport::kStatsValueNameTransportId, transport_id); + return report; +} + +StatsReport* LegacyStatsCollector::PrepareADMReport() { + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); + StatsReport::Id id(StatsReport::NewTypedId( + StatsReport::kStatsReportTypeSession, pc_->session_id())); + StatsReport* report = reports_.FindOrAddNew(id); + return report; +} + +bool LegacyStatsCollector::IsValidTrack(const std::string& track_id) { + return reports_.Find(StatsReport::NewTypedId( + StatsReport::kStatsReportTypeTrack, track_id)) != nullptr; +} + +StatsReport* LegacyStatsCollector::AddCertificateReports( + std::unique_ptr cert_stats) { + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); + + StatsReport* first_report = nullptr; + StatsReport* prev_report = nullptr; + for (rtc::SSLCertificateStats* stats = cert_stats.get(); stats; + stats = stats->issuer.get()) { + StatsReport::Id id(StatsReport::NewTypedId( + StatsReport::kStatsReportTypeCertificate, stats->fingerprint)); + + StatsReport* report = reports_.ReplaceOrAddNew(id); + report->set_timestamp(stats_gathering_started_); + report->AddString(StatsReport::kStatsValueNameFingerprint, + stats->fingerprint); + report->AddString(StatsReport::kStatsValueNameFingerprintAlgorithm, + stats->fingerprint_algorithm); + report->AddString(StatsReport::kStatsValueNameDer, + stats->base64_certificate); + if (!first_report) + first_report = report; + else + prev_report->AddId(StatsReport::kStatsValueNameIssuerId, id); + prev_report = report; + } + return first_report; +} + +StatsReport* LegacyStatsCollector::AddConnectionInfoReport( + const std::string& content_name, + int component, + int connection_id, + const StatsReport::Id& channel_report_id, + const cricket::ConnectionInfo& info) { + StatsReport::Id id( + StatsReport::NewCandidatePairId(content_name, component, connection_id)); + StatsReport* report = reports_.ReplaceOrAddNew(id); + report->set_timestamp(stats_gathering_started_); + + const BoolForAdd bools[] = { + {StatsReport::kStatsValueNameActiveConnection, info.best_connection}, + {StatsReport::kStatsValueNameReceiving, info.receiving}, + {StatsReport::kStatsValueNameWritable, info.writable}, + }; + for (const auto& b : bools) + report->AddBoolean(b.name, b.value); + + report->AddId(StatsReport::kStatsValueNameChannelId, channel_report_id); + cricket::CandidateStats local_candidate_stats(info.local_candidate); + cricket::CandidateStats remote_candidate_stats(info.remote_candidate); + report->AddId(StatsReport::kStatsValueNameLocalCandidateId, + AddCandidateReport(local_candidate_stats, true)->id()); + report->AddId(StatsReport::kStatsValueNameRemoteCandidateId, + AddCandidateReport(remote_candidate_stats, false)->id()); + + const Int64ForAdd int64s[] = { + {StatsReport::kStatsValueNameBytesReceived, info.recv_total_bytes}, + {StatsReport::kStatsValueNameBytesSent, info.sent_total_bytes}, + {StatsReport::kStatsValueNamePacketsSent, info.sent_total_packets}, + {StatsReport::kStatsValueNameRtt, info.rtt}, + {StatsReport::kStatsValueNameSendPacketsDiscarded, + info.sent_discarded_packets}, + {StatsReport::kStatsValueNameSentPingRequestsTotal, + info.sent_ping_requests_total}, + {StatsReport::kStatsValueNameSentPingRequestsBeforeFirstResponse, + info.sent_ping_requests_before_first_response}, + {StatsReport::kStatsValueNameSentPingResponses, info.sent_ping_responses}, + {StatsReport::kStatsValueNameRecvPingRequests, info.recv_ping_requests}, + {StatsReport::kStatsValueNameRecvPingResponses, info.recv_ping_responses}, + }; + for (const auto& i : int64s) + report->AddInt64(i.name, i.value); + + report->AddString(StatsReport::kStatsValueNameLocalAddress, + info.local_candidate.address().ToString()); + report->AddString(StatsReport::kStatsValueNameLocalCandidateType, + info.local_candidate.type()); + report->AddString(StatsReport::kStatsValueNameRemoteAddress, + info.remote_candidate.address().ToString()); + report->AddString(StatsReport::kStatsValueNameRemoteCandidateType, + info.remote_candidate.type()); + report->AddString(StatsReport::kStatsValueNameTransportType, + info.local_candidate.protocol()); + report->AddString(StatsReport::kStatsValueNameLocalCandidateRelayProtocol, + info.local_candidate.relay_protocol()); + + return report; +} + +StatsReport* LegacyStatsCollector::AddCandidateReport( + const cricket::CandidateStats& candidate_stats, + bool local) { + const auto& candidate = candidate_stats.candidate(); + StatsReport::Id id(StatsReport::NewCandidateId(local, candidate.id())); + StatsReport* report = reports_.Find(id); + if (!report) { + report = reports_.InsertNew(id); + report->set_timestamp(stats_gathering_started_); + if (local) { + report->AddString(StatsReport::kStatsValueNameCandidateNetworkType, + AdapterTypeToStatsType(candidate.network_type())); + } + report->AddString(StatsReport::kStatsValueNameCandidateIPAddress, + candidate.address().ipaddr().ToString()); + report->AddString(StatsReport::kStatsValueNameCandidatePortNumber, + candidate.address().PortAsString()); + report->AddInt(StatsReport::kStatsValueNameCandidatePriority, + candidate.priority()); + report->AddString(StatsReport::kStatsValueNameCandidateType, + IceCandidateTypeToStatsType(candidate.type())); + report->AddString(StatsReport::kStatsValueNameCandidateTransportType, + candidate.protocol()); + } + report->set_timestamp(stats_gathering_started_); + + if (local && candidate_stats.stun_stats().has_value()) { + const auto& stun_stats = candidate_stats.stun_stats().value(); + report->AddInt64(StatsReport::kStatsValueNameSentStunKeepaliveRequests, + stun_stats.stun_binding_requests_sent); + report->AddInt64(StatsReport::kStatsValueNameRecvStunKeepaliveResponses, + stun_stats.stun_binding_responses_received); + report->AddFloat(StatsReport::kStatsValueNameStunKeepaliveRttTotal, + stun_stats.stun_binding_rtt_ms_total); + report->AddFloat(StatsReport::kStatsValueNameStunKeepaliveRttSquaredTotal, + stun_stats.stun_binding_rtt_ms_squared_total); + } + + return report; +} + +std::map LegacyStatsCollector::ExtractSessionInfo() { + TRACE_EVENT0("webrtc", "LegacyStatsCollector::ExtractSessionInfo"); + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); + + SessionStats stats; + auto transceivers = pc_->GetTransceiversInternal(); + pc_->network_thread()->BlockingCall( + [&, sctp_transport_name = pc_->sctp_transport_name(), + sctp_mid = pc_->sctp_mid()]() mutable { + stats = ExtractSessionInfo_n( + transceivers, std::move(sctp_transport_name), std::move(sctp_mid)); + }); + + ExtractSessionInfo_s(stats); + + return std::move(stats.transport_names_by_mid); +} + +LegacyStatsCollector::SessionStats LegacyStatsCollector::ExtractSessionInfo_n( + const std::vector>>& transceivers, + absl::optional sctp_transport_name, + absl::optional sctp_mid) { + TRACE_EVENT0("webrtc", "LegacyStatsCollector::ExtractSessionInfo_n"); + RTC_DCHECK_RUN_ON(pc_->network_thread()); + rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + SessionStats stats; + stats.candidate_stats = pc_->GetPooledCandidateStats(); + for (auto& transceiver : transceivers) { + cricket::ChannelInterface* channel = transceiver->internal()->channel(); + if (channel) { + stats.transport_names_by_mid[channel->mid()] = + std::string(channel->transport_name()); + } + } + + if (sctp_transport_name) { + RTC_DCHECK(sctp_mid); + stats.transport_names_by_mid[*sctp_mid] = *sctp_transport_name; + } + + std::set transport_names; + for (const auto& entry : stats.transport_names_by_mid) { + transport_names.insert(entry.second); + } + + std::map transport_stats_by_name = + pc_->GetTransportStatsByNames(transport_names); + + for (auto& entry : transport_stats_by_name) { + stats.transport_stats.emplace_back(entry.first, std::move(entry.second)); + TransportStats& transport = stats.transport_stats.back(); + + // Attempt to get a copy of the certificates from the transport and + // expose them in stats reports. All channels in a transport share the + // same local and remote certificates. + // + StatsReport::Id local_cert_report_id, remote_cert_report_id; + rtc::scoped_refptr certificate; + if (pc_->GetLocalCertificate(transport.name, &certificate)) { + transport.local_cert_stats = + certificate->GetSSLCertificateChain().GetStats(); + } + + std::unique_ptr remote_cert_chain = + pc_->GetRemoteSSLCertChain(transport.name); + if (remote_cert_chain) { + transport.remote_cert_stats = remote_cert_chain->GetStats(); + } + } + + return stats; +} + +void LegacyStatsCollector::ExtractSessionInfo_s(SessionStats& session_stats) { + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); + rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + + StatsReport::Id id(StatsReport::NewTypedId( + StatsReport::kStatsReportTypeSession, pc_->session_id())); + StatsReport* report = reports_.ReplaceOrAddNew(id); + report->set_timestamp(stats_gathering_started_); + report->AddBoolean(StatsReport::kStatsValueNameInitiator, + pc_->initial_offerer()); + + for (const cricket::CandidateStats& stats : session_stats.candidate_stats) { + AddCandidateReport(stats, true); + } + + for (auto& transport : session_stats.transport_stats) { + // Attempt to get a copy of the certificates from the transport and + // expose them in stats reports. All channels in a transport share the + // same local and remote certificates. + // + StatsReport::Id local_cert_report_id, remote_cert_report_id; + if (transport.local_cert_stats) { + StatsReport* r = + AddCertificateReports(std::move(transport.local_cert_stats)); + if (r) + local_cert_report_id = r->id(); + } + + if (transport.remote_cert_stats) { + StatsReport* r = + AddCertificateReports(std::move(transport.remote_cert_stats)); + if (r) + remote_cert_report_id = r->id(); + } + + for (const auto& channel_iter : transport.stats.channel_stats) { + StatsReport::Id channel_stats_id( + StatsReport::NewComponentId(transport.name, channel_iter.component)); + StatsReport* channel_report = reports_.ReplaceOrAddNew(channel_stats_id); + channel_report->set_timestamp(stats_gathering_started_); + channel_report->AddInt(StatsReport::kStatsValueNameComponent, + channel_iter.component); + if (local_cert_report_id.get()) { + channel_report->AddId(StatsReport::kStatsValueNameLocalCertificateId, + local_cert_report_id); + } + if (remote_cert_report_id.get()) { + channel_report->AddId(StatsReport::kStatsValueNameRemoteCertificateId, + remote_cert_report_id); + } + int srtp_crypto_suite = channel_iter.srtp_crypto_suite; + if (srtp_crypto_suite != rtc::kSrtpInvalidCryptoSuite && + rtc::SrtpCryptoSuiteToName(srtp_crypto_suite).length()) { + channel_report->AddString( + StatsReport::kStatsValueNameSrtpCipher, + rtc::SrtpCryptoSuiteToName(srtp_crypto_suite)); + } + int ssl_cipher_suite = channel_iter.ssl_cipher_suite; + if (ssl_cipher_suite != rtc::kTlsNullWithNullNull && + rtc::SSLStreamAdapter::SslCipherSuiteToName(ssl_cipher_suite) + .length()) { + channel_report->AddString( + StatsReport::kStatsValueNameDtlsCipher, + rtc::SSLStreamAdapter::SslCipherSuiteToName(ssl_cipher_suite)); + } + + // Collect stats for non-pooled candidates. Note that the reports + // generated here supersedes the candidate reports generated in + // AddConnectionInfoReport below, and they may report candidates that are + // not paired. Also, the candidate report generated in + // AddConnectionInfoReport do not report port stats like StunStats. + for (const cricket::CandidateStats& stats : + channel_iter.ice_transport_stats.candidate_stats_list) { + AddCandidateReport(stats, true); + } + + int connection_id = 0; + for (const cricket::ConnectionInfo& info : + channel_iter.ice_transport_stats.connection_infos) { + StatsReport* connection_report = AddConnectionInfoReport( + transport.name, channel_iter.component, connection_id++, + channel_report->id(), info); + if (info.best_connection) { + channel_report->AddId( + StatsReport::kStatsValueNameSelectedCandidatePairId, + connection_report->id()); + } + } + } + } +} + +void LegacyStatsCollector::ExtractBweInfo() { + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); + + if (pc_->signaling_state() == PeerConnectionInterface::kClosed) + return; + + webrtc::Call::Stats call_stats = pc_->GetCallStats(); + cricket::BandwidthEstimationInfo bwe_info; + bwe_info.available_send_bandwidth = call_stats.send_bandwidth_bps; + bwe_info.available_recv_bandwidth = call_stats.recv_bandwidth_bps; + bwe_info.bucket_delay = call_stats.pacer_delay_ms; + + // Fill in target encoder bitrate, actual encoder bitrate, rtx bitrate, etc. + // TODO(holmer): Also fill this in for audio. + auto transceivers = pc_->GetTransceiversInternal(); + std::vector video_media_channels; + for (const auto& transceiver : transceivers) { + if (transceiver->media_type() != cricket::MEDIA_TYPE_VIDEO) { + continue; + } + auto* video_channel = transceiver->internal()->channel(); + if (video_channel) { + video_media_channels.push_back(static_cast( + video_channel->media_channel())); + } + } + + if (!video_media_channels.empty()) { + pc_->worker_thread()->BlockingCall([&] { + for (const auto& channel : video_media_channels) { + channel->FillBitrateInfo(&bwe_info); + } + }); + } + + StatsReport::Id report_id(StatsReport::NewBandwidthEstimationId()); + StatsReport* report = reports_.FindOrAddNew(report_id); + ExtractStats(bwe_info, stats_gathering_started_, report); +} + +namespace { + +class MediaChannelStatsGatherer { + public: + virtual ~MediaChannelStatsGatherer() = default; + + virtual bool GetStatsOnWorkerThread() = 0; + + virtual void ExtractStats(LegacyStatsCollector* collector) const = 0; + + virtual bool HasRemoteAudio() const = 0; + + std::string mid; + std::string transport_name; + std::map sender_track_id_by_ssrc; + std::map receiver_track_id_by_ssrc; + + protected: + template + void ExtractSenderReceiverStats( + LegacyStatsCollector* collector, + const std::vector& receiver_data, + const std::vector& sender_data) const { + RTC_DCHECK(collector); + StatsReport::Id transport_id = StatsReport::NewComponentId( + transport_name, cricket::ICE_CANDIDATE_COMPONENT_RTP); + ExtractStatsFromList(receiver_data, transport_id, collector, + StatsReport::kReceive, receiver_track_id_by_ssrc); + ExtractStatsFromList(sender_data, transport_id, collector, + StatsReport::kSend, sender_track_id_by_ssrc); + } +}; + +class VoiceMediaChannelStatsGatherer final : public MediaChannelStatsGatherer { + public: + VoiceMediaChannelStatsGatherer( + cricket::VoiceMediaChannel* voice_media_channel) + : voice_media_channel_(voice_media_channel) { + RTC_DCHECK(voice_media_channel_); + } + + bool GetStatsOnWorkerThread() override { + return voice_media_channel_->GetStats(&voice_media_info, + /*get_and_clear_legacy_stats=*/true); + } + + void ExtractStats(LegacyStatsCollector* collector) const override { + ExtractSenderReceiverStats(collector, voice_media_info.receivers, + voice_media_info.senders); + if (voice_media_info.device_underrun_count == -2 || + voice_media_info.device_underrun_count > 0) { + StatsReport* report = collector->PrepareADMReport(); + report->AddInt(StatsReport::kStatsValueNameAudioDeviceUnderrunCounter, + voice_media_info.device_underrun_count); + } + } + + bool HasRemoteAudio() const override { + return !voice_media_info.receivers.empty(); + } + + private: + cricket::VoiceMediaChannel* voice_media_channel_; + cricket::VoiceMediaInfo voice_media_info; +}; + +class VideoMediaChannelStatsGatherer final : public MediaChannelStatsGatherer { + public: + VideoMediaChannelStatsGatherer( + cricket::VideoMediaChannel* video_media_channel) + : video_media_channel_(video_media_channel) { + RTC_DCHECK(video_media_channel_); + } + + bool GetStatsOnWorkerThread() override { + return video_media_channel_->GetStats(&video_media_info); + } + + void ExtractStats(LegacyStatsCollector* collector) const override { + ExtractSenderReceiverStats(collector, video_media_info.receivers, + video_media_info.aggregated_senders); + } + + bool HasRemoteAudio() const override { return false; } + + private: + cricket::VideoMediaChannel* video_media_channel_; + cricket::VideoMediaInfo video_media_info; +}; + +std::unique_ptr CreateMediaChannelStatsGatherer( + cricket::MediaChannel* channel) { + RTC_DCHECK(channel); + if (channel->media_type() == cricket::MEDIA_TYPE_AUDIO) { + return std::make_unique( + static_cast(channel)); + } else { + RTC_DCHECK_EQ(channel->media_type(), cricket::MEDIA_TYPE_VIDEO); + return std::make_unique( + static_cast(channel)); + } +} + +} // namespace + +void LegacyStatsCollector::ExtractMediaInfo( + const std::map& transport_names_by_mid) { + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); + + std::vector> gatherers; + + auto transceivers = pc_->GetTransceiversInternal(); + { + rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + for (const auto& transceiver : transceivers) { + cricket::ChannelInterface* channel = transceiver->internal()->channel(); + if (!channel) { + continue; + } + std::unique_ptr gatherer = + CreateMediaChannelStatsGatherer(channel->media_channel()); + gatherer->mid = channel->mid(); + gatherer->transport_name = transport_names_by_mid.at(gatherer->mid); + + for (const auto& sender : transceiver->internal()->senders()) { + auto track = sender->track(); + std::string track_id = (track ? track->id() : ""); + gatherer->sender_track_id_by_ssrc.insert( + std::make_pair(sender->ssrc(), track_id)); + } + + // Populating `receiver_track_id_by_ssrc` will be done on the worker + // thread as the `ssrc` property of the receiver needs to be accessed + // there. + + gatherers.push_back(std::move(gatherer)); + } + } + + pc_->worker_thread()->BlockingCall([&] { + rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + // Populate `receiver_track_id_by_ssrc` for the gatherers. + int i = 0; + for (const auto& transceiver : transceivers) { + cricket::ChannelInterface* channel = transceiver->internal()->channel(); + if (!channel) + continue; + MediaChannelStatsGatherer* gatherer = gatherers[i++].get(); + RTC_DCHECK_EQ(gatherer->mid, channel->mid()); + + for (const auto& receiver : transceiver->internal()->receivers()) { + gatherer->receiver_track_id_by_ssrc.insert(std::make_pair( + receiver->internal()->ssrc(), receiver->track()->id())); + } + } + + for (auto it = gatherers.begin(); it != gatherers.end(); + /* incremented manually */) { + MediaChannelStatsGatherer* gatherer = it->get(); + if (!gatherer->GetStatsOnWorkerThread()) { + RTC_LOG(LS_ERROR) << "Failed to get media channel stats for mid=" + << gatherer->mid; + it = gatherers.erase(it); + continue; + } + ++it; + } + }); + + rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + + bool has_remote_audio = false; + for (const auto& gatherer : gatherers) { + gatherer->ExtractStats(this); + has_remote_audio |= gatherer->HasRemoteAudio(); + } + + UpdateStatsFromExistingLocalAudioTracks(has_remote_audio); +} + +void LegacyStatsCollector::ExtractSenderInfo() { + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); + + for (const auto& sender : pc_->GetSenders()) { + // TODO(bugs.webrtc.org/8694): SSRC == 0 currently means none. Delete check + // when that is fixed. + if (!sender->ssrc()) { + continue; + } + const rtc::scoped_refptr track(sender->track()); + if (!track || track->kind() != MediaStreamTrackInterface::kVideoKind) { + continue; + } + // Safe, because kind() == kVideoKind implies a subclass of + // VideoTrackInterface; see mediastreaminterface.h. + VideoTrackSourceInterface* source = + static_cast(track.get())->GetSource(); + + VideoTrackSourceInterface::Stats stats; + if (!source->GetStats(&stats)) { + continue; + } + const StatsReport::Id stats_id = StatsReport::NewIdWithDirection( + StatsReport::kStatsReportTypeSsrc, rtc::ToString(sender->ssrc()), + StatsReport::kSend); + StatsReport* report = reports_.FindOrAddNew(stats_id); + report->AddInt(StatsReport::kStatsValueNameFrameWidthInput, + stats.input_width); + report->AddInt(StatsReport::kStatsValueNameFrameHeightInput, + stats.input_height); + } +} + +void LegacyStatsCollector::ExtractDataInfo() { + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); + + rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + + std::vector data_stats = pc_->GetDataChannelStats(); + for (const auto& stats : data_stats) { + StatsReport::Id id(StatsReport::NewTypedIntId( + StatsReport::kStatsReportTypeDataChannel, stats.id)); + StatsReport* report = reports_.ReplaceOrAddNew(id); + report->set_timestamp(stats_gathering_started_); + report->AddString(StatsReport::kStatsValueNameLabel, stats.label); + // Filter out the initial id (-1). + if (stats.id >= 0) { + report->AddInt(StatsReport::kStatsValueNameDataChannelId, stats.id); + } + report->AddString(StatsReport::kStatsValueNameProtocol, stats.protocol); + report->AddString(StatsReport::kStatsValueNameState, + DataChannelInterface::DataStateString(stats.state)); + } +} + +StatsReport* LegacyStatsCollector::GetReport(const StatsReport::StatsType& type, + const std::string& id, + StatsReport::Direction direction) { + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); + RTC_DCHECK(type == StatsReport::kStatsReportTypeSsrc || + type == StatsReport::kStatsReportTypeRemoteSsrc); + return reports_.Find(StatsReport::NewIdWithDirection(type, id, direction)); +} + +void LegacyStatsCollector::UpdateStatsFromExistingLocalAudioTracks( + bool has_remote_tracks) { + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); + // Loop through the existing local audio tracks. + for (const auto& it : local_audio_tracks_) { + AudioTrackInterface* track = it.first; + uint32_t ssrc = it.second; + StatsReport* report = GetReport(StatsReport::kStatsReportTypeSsrc, + rtc::ToString(ssrc), StatsReport::kSend); + if (report == NULL) { + // This can happen if a local audio track is added to a stream on the + // fly and the report has not been set up yet. Do nothing in this case. + RTC_LOG(LS_ERROR) << "Stats report does not exist for ssrc " << ssrc; + continue; + } + + // The same ssrc can be used by both local and remote audio tracks. + const StatsReport::Value* v = + report->FindValue(StatsReport::kStatsValueNameTrackId); + if (!v || v->string_val() != track->id()) + continue; + + report->set_timestamp(stats_gathering_started_); + UpdateReportFromAudioTrack(track, report, has_remote_tracks); + } +} + +void LegacyStatsCollector::UpdateReportFromAudioTrack( + AudioTrackInterface* track, + StatsReport* report, + bool has_remote_tracks) { + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); + RTC_DCHECK(track != NULL); + + // Don't overwrite report values if they're not available. + int signal_level; + if (track->GetSignalLevel(&signal_level)) { + RTC_DCHECK_GE(signal_level, 0); + report->AddInt(StatsReport::kStatsValueNameAudioInputLevel, signal_level); + } + + auto audio_processor(track->GetAudioProcessor()); + + if (audio_processor.get()) { + AudioProcessorInterface::AudioProcessorStatistics stats = + audio_processor->GetStats(has_remote_tracks); + + SetAudioProcessingStats(report, stats.apm_statistics); + } +} + +void LegacyStatsCollector::UpdateTrackReports() { + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); + + rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + + for (const auto& entry : track_ids_) { + StatsReport* report = entry.second; + report->set_timestamp(stats_gathering_started_); + } +} + +void LegacyStatsCollector::InvalidateCache() { + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); + cache_timestamp_ms_ = 0; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/legacy_stats_collector.h b/TMessagesProj/jni/voip/webrtc/pc/legacy_stats_collector.h new file mode 100644 index 0000000000..21f51c5143 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/pc/legacy_stats_collector.h @@ -0,0 +1,216 @@ +/* + * Copyright 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// This file contains a class used for gathering statistics from an ongoing +// libjingle PeerConnection. + +#ifndef PC_LEGACY_STATS_COLLECTOR_H_ +#define PC_LEGACY_STATS_COLLECTOR_H_ + +#include + +#include +#include +#include +#include +#include +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/field_trials_view.h" +#include "api/media_stream_interface.h" +#include "api/peer_connection_interface.h" +#include "api/scoped_refptr.h" +#include "api/stats_types.h" +#include "p2p/base/connection_info.h" +#include "p2p/base/port.h" +#include "pc/legacy_stats_collector_interface.h" +#include "pc/peer_connection_internal.h" +#include "pc/rtp_transceiver.h" +#include "pc/transport_stats.h" +#include "rtc_base/network_constants.h" +#include "rtc_base/ssl_certificate.h" +#include "rtc_base/thread_annotations.h" + +namespace webrtc { + +// Conversion function to convert candidate type string to the corresponding one +// from enum RTCStatsIceCandidateType. +const char* IceCandidateTypeToStatsType(const std::string& candidate_type); + +// Conversion function to convert adapter type to report string which are more +// fitting to the general style of http://w3c.github.io/webrtc-stats. This is +// only used by stats collector. +const char* AdapterTypeToStatsType(rtc::AdapterType type); + +// A mapping between track ids and their StatsReport. +typedef std::map TrackIdMap; + +class LegacyStatsCollector : public LegacyStatsCollectorInterface { + public: + // The caller is responsible for ensuring that the pc outlives the + // LegacyStatsCollector instance. + explicit LegacyStatsCollector(PeerConnectionInternal* pc); + virtual ~LegacyStatsCollector(); + + // Adds a MediaStream with tracks that can be used as a `selector` in a call + // to GetStats. + void AddStream(MediaStreamInterface* stream); + void AddTrack(MediaStreamTrackInterface* track); + + // Adds a local audio track that is used for getting some voice statistics. + void AddLocalAudioTrack(AudioTrackInterface* audio_track, + uint32_t ssrc) override; + + // Removes a local audio tracks that is used for getting some voice + // statistics. + void RemoveLocalAudioTrack(AudioTrackInterface* audio_track, + uint32_t ssrc) override; + + // Gather statistics from the session and store them for future use. + void UpdateStats(PeerConnectionInterface::StatsOutputLevel level); + + // Gets a StatsReports of the last collected stats. Note that UpdateStats must + // be called before this function to get the most recent stats. `selector` is + // a track label or empty string. The most recent reports are stored in + // `reports`. + // TODO(tommi): Change this contract to accept a callback object instead + // of filling in `reports`. As is, there's a requirement that the caller + // uses `reports` immediately without allowing any async activity on + // the thread (message handling etc) and then discard the results. + void GetStats(MediaStreamTrackInterface* track, + StatsReports* reports) override; + + // Prepare a local or remote SSRC report for the given ssrc. Used internally + // in the ExtractStatsFromList template. + StatsReport* PrepareReport(bool local, + uint32_t ssrc, + const std::string& track_id, + const StatsReport::Id& transport_id, + StatsReport::Direction direction); + + StatsReport* PrepareADMReport(); + + // A track is invalid if there is no report data for it. + bool IsValidTrack(const std::string& track_id); + + // Reset the internal cache timestamp to force an update of the stats next + // time UpdateStats() is called. This call needs to be made on the signaling + // thread and should be made every time configuration changes that affect + // stats have been made. + void InvalidateCache(); + + bool UseStandardBytesStats() const { return use_standard_bytes_stats_; } + + private: + friend class LegacyStatsCollectorTest; + + // Struct that's populated on the network thread and carries the values to + // the signaling thread where the stats are added to the stats reports. + struct TransportStats { + TransportStats() = default; + TransportStats(std::string transport_name, + cricket::TransportStats transport_stats) + : name(std::move(transport_name)), stats(std::move(transport_stats)) {} + TransportStats(TransportStats&&) = default; + TransportStats(const TransportStats&) = delete; + + std::string name; + cricket::TransportStats stats; + std::unique_ptr local_cert_stats; + std::unique_ptr remote_cert_stats; + }; + + struct SessionStats { + SessionStats() = default; + SessionStats(SessionStats&&) = default; + SessionStats(const SessionStats&) = delete; + + SessionStats& operator=(SessionStats&&) = default; + SessionStats& operator=(SessionStats&) = delete; + + cricket::CandidateStatsList candidate_stats; + std::vector transport_stats; + std::map transport_names_by_mid; + }; + + // Overridden in unit tests to fake timing. + virtual double GetTimeNow(); + + bool CopySelectedReports(const std::string& selector, StatsReports* reports); + + // Helper method for creating IceCandidate report. `is_local` indicates + // whether this candidate is local or remote. + StatsReport* AddCandidateReport( + const cricket::CandidateStats& candidate_stats, + bool local); + + // Adds a report for this certificate and every certificate in its chain, and + // returns the leaf certificate's report (`cert_stats`'s report). + StatsReport* AddCertificateReports( + std::unique_ptr cert_stats); + + StatsReport* AddConnectionInfoReport(const std::string& content_name, + int component, + int connection_id, + const StatsReport::Id& channel_report_id, + const cricket::ConnectionInfo& info); + + void ExtractDataInfo(); + + // Returns the `transport_names_by_mid` member from the SessionStats as + // gathered and used to populate the stats. + std::map ExtractSessionInfo(); + + void ExtractBweInfo(); + void ExtractMediaInfo( + const std::map& transport_names_by_mid); + void ExtractSenderInfo(); + webrtc::StatsReport* GetReport(const StatsReport::StatsType& type, + const std::string& id, + StatsReport::Direction direction); + + // Helper method to get stats from the local audio tracks. + void UpdateStatsFromExistingLocalAudioTracks(bool has_remote_tracks); + void UpdateReportFromAudioTrack(AudioTrackInterface* track, + StatsReport* report, + bool has_remote_tracks); + + // Helper method to update the timestamp of track records. + void UpdateTrackReports(); + + SessionStats ExtractSessionInfo_n( + const std::vector>>& transceivers, + absl::optional sctp_transport_name, + absl::optional sctp_mid); + void ExtractSessionInfo_s(SessionStats& session_stats); + + // A collection for all of our stats reports. + StatsCollection reports_; + TrackIdMap track_ids_; + // Raw pointer to the peer connection the statistics are gathered from. + PeerConnectionInternal* const pc_; + int64_t cache_timestamp_ms_ RTC_GUARDED_BY(pc_->signaling_thread()) = 0; + double stats_gathering_started_; + const bool use_standard_bytes_stats_; + + // TODO(tommi): We appear to be holding on to raw pointers to reference + // counted objects? We should be using scoped_refptr here. + typedef std::vector> + LocalAudioTrackVector; + LocalAudioTrackVector local_audio_tracks_; +}; + +} // namespace webrtc + +#endif // PC_LEGACY_STATS_COLLECTOR_H_ diff --git a/TMessagesProj/jni/voip/webrtc/pc/legacy_stats_collector_interface.h b/TMessagesProj/jni/voip/webrtc/pc/legacy_stats_collector_interface.h new file mode 100644 index 0000000000..3cddb284f8 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/pc/legacy_stats_collector_interface.h @@ -0,0 +1,43 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// This file contains an interface for the (obsolete) StatsCollector class that +// is used by compilation units that do not wish to depend on the StatsCollector +// implementation. + +#ifndef PC_LEGACY_STATS_COLLECTOR_INTERFACE_H_ +#define PC_LEGACY_STATS_COLLECTOR_INTERFACE_H_ + +#include + +#include "api/media_stream_interface.h" +#include "api/stats_types.h" + +namespace webrtc { + +class LegacyStatsCollectorInterface { + public: + virtual ~LegacyStatsCollectorInterface() {} + + // Adds a local audio track that is used for getting some voice statistics. + virtual void AddLocalAudioTrack(AudioTrackInterface* audio_track, + uint32_t ssrc) = 0; + + // Removes a local audio tracks that is used for getting some voice + // statistics. + virtual void RemoveLocalAudioTrack(AudioTrackInterface* audio_track, + uint32_t ssrc) = 0; + virtual void GetStats(MediaStreamTrackInterface* track, + StatsReports* reports) = 0; +}; + +} // namespace webrtc + +#endif // PC_LEGACY_STATS_COLLECTOR_INTERFACE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/pc/local_audio_source.cc b/TMessagesProj/jni/voip/webrtc/pc/local_audio_source.cc index 3fcad50a1d..51949f7f4d 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/local_audio_source.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/local_audio_source.cc @@ -10,8 +10,6 @@ #include "pc/local_audio_source.h" -#include "rtc_base/ref_counted_object.h" - using webrtc::MediaSourceInterface; namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/pc/media_protocol_names.cc b/TMessagesProj/jni/voip/webrtc/pc/media_protocol_names.cc index 667535bcbd..52d676daf5 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/media_protocol_names.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/media_protocol_names.cc @@ -77,7 +77,7 @@ bool IsRtpProtocol(absl::string_view protocol) { return false; } // RTP must be at the beginning of a string or not preceded by alpha - if (pos == 0 || !isalpha(protocol[pos - 1])) { + if (pos == 0 || !isalpha(static_cast(protocol[pos - 1]))) { return true; } return false; diff --git a/TMessagesProj/jni/voip/webrtc/pc/media_session.cc b/TMessagesProj/jni/voip/webrtc/pc/media_session.cc index 45cedfb05d..e703b44101 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/media_session.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/media_session.cc @@ -14,8 +14,7 @@ #include #include -#include -#include +#include #include #include @@ -24,13 +23,12 @@ #include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/crypto_params.h" -#include "api/video_codecs/h264_profile_level_id.h" #include "media/base/codec.h" #include "media/base/media_constants.h" +#include "media/base/media_engine.h" #include "media/base/sdp_video_format_utils.h" #include "media/sctp/sctp_transport_internal.h" #include "p2p/base/p2p_constants.h" -#include "pc/channel_manager.h" #include "pc/media_protocol_names.h" #include "pc/rtp_media_utils.h" #include "pc/used_ids.h" @@ -41,7 +39,6 @@ #include "rtc_base/string_encode.h" #include "rtc_base/third_party/base64/base64.h" #include "rtc_base/unique_id_generator.h" -#include "system_wrappers/include/field_trial.h" namespace { @@ -303,7 +300,8 @@ static StreamParams CreateStreamParamsForNewSenderWithSsrcs( const std::string& rtcp_cname, bool include_rtx_streams, bool include_flexfec_stream, - UniqueRandomIdGenerator* ssrc_generator) { + UniqueRandomIdGenerator* ssrc_generator, + const webrtc::FieldTrialsView& field_trials) { StreamParams result; result.id = sender.track_id; @@ -315,8 +313,7 @@ static StreamParams CreateStreamParamsForNewSenderWithSsrcs( "a single media streams. This session has multiple " "media streams however, so no FlexFEC SSRC will be generated."; } - if (include_flexfec_stream && - !webrtc::field_trial::IsEnabled("WebRTC-FlexFEC-03")) { + if (include_flexfec_stream && !field_trials.IsEnabled("WebRTC-FlexFEC-03")) { include_flexfec_stream = false; RTC_LOG(LS_WARNING) << "WebRTC-FlexFEC trial is not enabled, not sending FlexFEC"; @@ -398,12 +395,12 @@ static void AddSimulcastToMediaDescription( // content_description. // `current_params` - All currently known StreamParams of any media type. template -static bool AddStreamParams( - const std::vector& sender_options, - const std::string& rtcp_cname, - UniqueRandomIdGenerator* ssrc_generator, - StreamParamsVec* current_streams, - MediaContentDescriptionImpl* content_description) { +static bool AddStreamParams(const std::vector& sender_options, + const std::string& rtcp_cname, + UniqueRandomIdGenerator* ssrc_generator, + StreamParamsVec* current_streams, + MediaContentDescriptionImpl* content_description, + const webrtc::FieldTrialsView& field_trials) { // SCTP streams are not negotiated using SDP/ContentDescriptions. if (IsSctpProtocol(content_description->protocol())) { return true; @@ -425,7 +422,7 @@ static bool AddStreamParams( // Signal SSRCs and legacy simulcast (if requested). CreateStreamParamsForNewSenderWithSsrcs( sender, rtcp_cname, include_rtx_streams, - include_flexfec_stream, ssrc_generator) + include_flexfec_stream, ssrc_generator, field_trials) : // Signal RIDs and spec-compliant simulcast (if requested). CreateStreamParamsForNewSenderWithRids(sender, rtcp_cname); @@ -648,6 +645,16 @@ static bool IsFlexfecCodec(const C& codec) { return absl::EqualsIgnoreCase(codec.name, kFlexfecCodecName); } +template +static bool IsUlpfecCodec(const C& codec) { + return absl::EqualsIgnoreCase(codec.name, kUlpfecCodecName); +} + +template +static bool IsComfortNoiseCodec(const C& codec) { + return absl::EqualsIgnoreCase(codec.name, kComfortNoiseCodecName); +} + // Create a media content to be offered for the given `sender_options`, // according to the given options.rtcp_mux, session_options.is_muc, codecs, // secure_transport, crypto, and current_streams. If we don't currently have @@ -713,11 +720,12 @@ static bool CreateMediaContentOffer( const RtpHeaderExtensions& rtp_extensions, UniqueRandomIdGenerator* ssrc_generator, StreamParamsVec* current_streams, - MediaContentDescriptionImpl* offer) { + MediaContentDescriptionImpl* offer, + const webrtc::FieldTrialsView& field_trials) { offer->AddCodecs(codecs); if (!AddStreamParams(media_description_options.sender_options, session_options.rtcp_cname, ssrc_generator, - current_streams, offer)) { + current_streams, offer, field_trials)) { return false; } @@ -731,10 +739,12 @@ template static bool ReferencedCodecsMatch(const std::vector& codecs1, const int codec1_id, const std::vector& codecs2, - const int codec2_id) { + const int codec2_id, + const webrtc::FieldTrialsView* field_trials) { const C* codec1 = FindCodecById(codecs1, codec1_id); const C* codec2 = FindCodecById(codecs2, codec2_id); - return codec1 != nullptr && codec2 != nullptr && codec1->Matches(*codec2); + return codec1 != nullptr && codec2 != nullptr && + codec1->Matches(*codec2, field_trials); } template @@ -754,12 +764,14 @@ template static void NegotiateCodecs(const std::vector& local_codecs, const std::vector& offered_codecs, std::vector* negotiated_codecs, - bool keep_offer_order) { + bool keep_offer_order, + const webrtc::FieldTrialsView* field_trials) { for (const C& ours : local_codecs) { C theirs; // Note that we intentionally only find one matching codec for each of our // local codecs, in case the remote offer contains duplicate codecs. - if (FindMatchingCodec(local_codecs, offered_codecs, ours, &theirs)) { + if (FindMatchingCodec(local_codecs, offered_codecs, ours, &theirs, + field_trials)) { C negotiated = ours; NegotiatePacketization(ours, theirs, &negotiated); negotiated.IntersectFeedbackParams(theirs); @@ -815,7 +827,8 @@ template static bool FindMatchingCodec(const std::vector& codecs1, const std::vector& codecs2, const C& codec_to_match, - C* found_codec) { + C* found_codec, + const webrtc::FieldTrialsView* field_trials) { // `codec_to_match` should be a member of `codecs1`, in order to look up // RED/RTX codecs' associated codecs correctly. If not, that's a programming // error. @@ -823,7 +836,7 @@ static bool FindMatchingCodec(const std::vector& codecs1, return &codec == &codec_to_match; })); for (const C& potential_match : codecs2) { - if (potential_match.Matches(codec_to_match)) { + if (potential_match.Matches(codec_to_match, field_trials)) { if (IsRtxCodec(codec_to_match)) { int apt_value_1 = 0; int apt_value_2 = 0; @@ -834,8 +847,8 @@ static bool FindMatchingCodec(const std::vector& codecs1, RTC_LOG(LS_WARNING) << "RTX missing associated payload type."; continue; } - if (!ReferencedCodecsMatch(codecs1, apt_value_1, codecs2, - apt_value_2)) { + if (!ReferencedCodecsMatch(codecs1, apt_value_1, codecs2, apt_value_2, + field_trials)) { continue; } } else if (IsRedCodec(codec_to_match)) { @@ -850,10 +863,10 @@ static bool FindMatchingCodec(const std::vector& codecs1, // Mixed reference codecs (i.e. 111/112) are not supported. // Different levels of redundancy between offer and answer are // since RED is considered to be declarative. - std::vector redundant_payloads_1; - std::vector redundant_payloads_2; - rtc::split(red_parameters_1->second, '/', &redundant_payloads_1); - rtc::split(red_parameters_2->second, '/', &redundant_payloads_2); + std::vector redundant_payloads_1 = + rtc::split(red_parameters_1->second, '/'); + std::vector redundant_payloads_2 = + rtc::split(red_parameters_2->second, '/'); if (redundant_payloads_1.size() > 0 && redundant_payloads_2.size() > 0) { bool consistent = true; @@ -878,7 +891,7 @@ static bool FindMatchingCodec(const std::vector& codecs1, if (rtc::FromString(redundant_payloads_1[0], &red_value_1) && rtc::FromString(redundant_payloads_2[0], &red_value_2)) { if (!ReferencedCodecsMatch(codecs1, red_value_1, codecs2, - red_value_2)) { + red_value_2, field_trials)) { continue; } } @@ -938,13 +951,12 @@ static const C* GetAssociatedCodecForRed(const std::vector& codec_list, return nullptr; } - std::vector redundant_payloads; - rtc::split(fmtp, '/', &redundant_payloads); + std::vector redundant_payloads = rtc::split(fmtp, '/'); if (redundant_payloads.size() < 2) { return nullptr; } - std::string associated_pt_str = redundant_payloads[0]; + absl::string_view associated_pt_str = redundant_payloads[0]; int associated_pt; if (!rtc::FromString(associated_pt_str, &associated_pt)) { RTC_LOG(LS_WARNING) << "Couldn't convert first payload type " @@ -969,14 +981,15 @@ static const C* GetAssociatedCodecForRed(const std::vector& codec_list, template static void MergeCodecs(const std::vector& reference_codecs, std::vector* offered_codecs, - UsedPayloadTypes* used_pltypes) { + UsedPayloadTypes* used_pltypes, + const webrtc::FieldTrialsView* field_trials) { // Add all new codecs that are not RTX/RED codecs. // The two-pass splitting of the loops means preferring payload types // of actual codecs with respect to collisions. for (const C& reference_codec : reference_codecs) { if (!IsRtxCodec(reference_codec) && !IsRedCodec(reference_codec) && !FindMatchingCodec(reference_codecs, *offered_codecs, - reference_codec, nullptr)) { + reference_codec, nullptr, field_trials)) { C codec = reference_codec; used_pltypes->FindAndSetIdUsed(&codec); offered_codecs->push_back(codec); @@ -987,7 +1000,7 @@ static void MergeCodecs(const std::vector& reference_codecs, for (const C& reference_codec : reference_codecs) { if (IsRtxCodec(reference_codec) && !FindMatchingCodec(reference_codecs, *offered_codecs, - reference_codec, nullptr)) { + reference_codec, nullptr, field_trials)) { C rtx_codec = reference_codec; const C* associated_codec = GetAssociatedCodecForRtx(reference_codecs, rtx_codec); @@ -998,7 +1011,8 @@ static void MergeCodecs(const std::vector& reference_codecs, // Its payload type may be different than the reference codec. C matching_codec; if (!FindMatchingCodec(reference_codecs, *offered_codecs, - *associated_codec, &matching_codec)) { + *associated_codec, &matching_codec, + field_trials)) { RTC_LOG(LS_WARNING) << "Couldn't find matching " << associated_codec->name << " codec."; continue; @@ -1010,14 +1024,15 @@ static void MergeCodecs(const std::vector& reference_codecs, offered_codecs->push_back(rtx_codec); } else if (IsRedCodec(reference_codec) && !FindMatchingCodec(reference_codecs, *offered_codecs, - reference_codec, nullptr)) { + reference_codec, nullptr, field_trials)) { C red_codec = reference_codec; const C* associated_codec = GetAssociatedCodecForRed(reference_codecs, red_codec); if (associated_codec) { C matching_codec; if (!FindMatchingCodec(reference_codecs, *offered_codecs, - *associated_codec, &matching_codec)) { + *associated_codec, &matching_codec, + field_trials)) { RTC_LOG(LS_WARNING) << "Couldn't find matching " << associated_codec->name << " codec."; continue; @@ -1041,7 +1056,8 @@ template static Codecs MatchCodecPreference( const std::vector& codec_preferences, const Codecs& codecs, - const Codecs& supported_codecs) { + const Codecs& supported_codecs, + const webrtc::FieldTrialsView* field_trials) { Codecs filtered_codecs; bool want_rtx = false; bool want_red = false; @@ -1070,7 +1086,7 @@ static Codecs MatchCodecPreference( if (found_codec != supported_codecs.end()) { typename Codecs::value_type found_codec_with_correct_pt; if (FindMatchingCodec(supported_codecs, codecs, *found_codec, - &found_codec_with_correct_pt)) { + &found_codec_with_correct_pt, field_trials)) { filtered_codecs.push_back(found_codec_with_correct_pt); std::string id = rtc::ToString(found_codec_with_correct_pt.id); // Search for the matching rtx or red codec. @@ -1090,8 +1106,8 @@ static Codecs MatchCodecPreference( const auto fmtp = codec.params.find(cricket::kCodecParamNotInNameValueFormat); if (fmtp != codec.params.end()) { - std::vector redundant_payloads; - rtc::split(fmtp->second, '/', &redundant_payloads); + std::vector redundant_payloads = + rtc::split(fmtp->second, '/'); if (redundant_payloads.size() > 0 && redundant_payloads[0] == id) { if (std::find(filtered_codecs.begin(), filtered_codecs.end(), @@ -1111,6 +1127,26 @@ static Codecs MatchCodecPreference( return filtered_codecs; } +// Compute the union of `codecs1` and `codecs2`. +template +std::vector ComputeCodecsUnion(const std::vector& codecs1, + const std::vector& codecs2, + const webrtc::FieldTrialsView* field_trials) { + std::vector all_codecs; + UsedPayloadTypes used_payload_types; + for (const C& codec : codecs1) { + C codec_mutable = codec; + used_payload_types.FindAndSetIdUsed(&codec_mutable); + all_codecs.push_back(codec_mutable); + } + + // Use MergeCodecs to merge the second half of our list as it already checks + // and fixes problems with duplicate payload types. + MergeCodecs(codecs2, &all_codecs, &used_payload_types, field_trials); + + return all_codecs; +} + // Adds all extensions from `reference_extensions` to `offered_extensions` that // don't already exist in `offered_extensions` and ensure the IDs don't // collide. If an extension is added, it's also added to `regular_extensions` or @@ -1320,8 +1356,7 @@ static void NegotiateRtpHeaderExtensions( static void StripCNCodecs(AudioCodecs* audio_codecs) { audio_codecs->erase(std::remove_if(audio_codecs->begin(), audio_codecs->end(), [](const AudioCodec& codec) { - return absl::EqualsIgnoreCase( - codec.name, kComfortNoiseCodecName); + return IsComfortNoiseCodec(codec); }), audio_codecs->end()); } @@ -1334,15 +1369,17 @@ static bool SetCodecsInAnswer( const MediaSessionOptions& session_options, UniqueRandomIdGenerator* ssrc_generator, StreamParamsVec* current_streams, - MediaContentDescriptionImpl* answer) { + MediaContentDescriptionImpl* answer, + const webrtc::FieldTrialsView& field_trials) { std::vector negotiated_codecs; NegotiateCodecs(local_codecs, offer->codecs(), &negotiated_codecs, - media_description_options.codec_preferences.empty()); + media_description_options.codec_preferences.empty(), + &field_trials); answer->AddCodecs(negotiated_codecs); answer->set_protocol(offer->protocol()); if (!AddStreamParams(media_description_options.sender_options, session_options.rtcp_cname, ssrc_generator, - current_streams, answer)) { + current_streams, answer, field_trials)) { return false; // Something went seriously wrong. } return true; @@ -1528,19 +1565,20 @@ MediaSessionDescriptionFactory::MediaSessionDescriptionFactory( const TransportDescriptionFactory* transport_desc_factory, rtc::UniqueRandomIdGenerator* ssrc_generator) : ssrc_generator_(ssrc_generator), - transport_desc_factory_(transport_desc_factory) { - RTC_DCHECK(ssrc_generator_); -} + transport_desc_factory_(transport_desc_factory) {} MediaSessionDescriptionFactory::MediaSessionDescriptionFactory( - ChannelManager* channel_manager, - const TransportDescriptionFactory* transport_desc_factory, - rtc::UniqueRandomIdGenerator* ssrc_generator) + cricket::MediaEngineInterface* media_engine, + bool rtx_enabled, + rtc::UniqueRandomIdGenerator* ssrc_generator, + const TransportDescriptionFactory* transport_desc_factory) : MediaSessionDescriptionFactory(transport_desc_factory, ssrc_generator) { - channel_manager->GetSupportedAudioSendCodecs(&audio_send_codecs_); - channel_manager->GetSupportedAudioReceiveCodecs(&audio_recv_codecs_); - channel_manager->GetSupportedVideoSendCodecs(&video_send_codecs_); - channel_manager->GetSupportedVideoReceiveCodecs(&video_recv_codecs_); + if (media_engine) { + audio_send_codecs_ = media_engine->voice().send_codecs(); + audio_recv_codecs_ = media_engine->voice().recv_codecs(); + video_send_codecs_ = media_engine->video().send_codecs(rtx_enabled); + video_recv_codecs_ = media_engine->video().recv_codecs(rtx_enabled); + } ComputeAudioCodecsIntersectionAndUnion(); ComputeVideoCodecsIntersectionAndUnion(); } @@ -2021,16 +2059,19 @@ void MergeCodecsFromDescription( const std::vector& current_active_contents, AudioCodecs* audio_codecs, VideoCodecs* video_codecs, - UsedPayloadTypes* used_pltypes) { + UsedPayloadTypes* used_pltypes, + const webrtc::FieldTrialsView* field_trials) { for (const ContentInfo* content : current_active_contents) { if (IsMediaContentOfType(content, MEDIA_TYPE_AUDIO)) { const AudioContentDescription* audio = content->media_description()->as_audio(); - MergeCodecs(audio->codecs(), audio_codecs, used_pltypes); + MergeCodecs(audio->codecs(), audio_codecs, used_pltypes, + field_trials); } else if (IsMediaContentOfType(content, MEDIA_TYPE_VIDEO)) { const VideoContentDescription* video = content->media_description()->as_video(); - MergeCodecs(video->codecs(), video_codecs, used_pltypes); + MergeCodecs(video->codecs(), video_codecs, used_pltypes, + field_trials); } } } @@ -2045,16 +2086,20 @@ void MediaSessionDescriptionFactory::GetCodecsForOffer( const std::vector& current_active_contents, AudioCodecs* audio_codecs, VideoCodecs* video_codecs) const { + const webrtc::FieldTrialsView* field_trials = + &transport_desc_factory_->trials(); // First - get all codecs from the current description if the media type // is used. Add them to `used_pltypes` so the payload type is not reused if a // new media type is added. UsedPayloadTypes used_pltypes; MergeCodecsFromDescription(current_active_contents, audio_codecs, - video_codecs, &used_pltypes); + video_codecs, &used_pltypes, field_trials); // Add our codecs that are not in the current description. - MergeCodecs(all_audio_codecs_, audio_codecs, &used_pltypes); - MergeCodecs(all_video_codecs_, video_codecs, &used_pltypes); + MergeCodecs(all_audio_codecs_, audio_codecs, &used_pltypes, + field_trials); + MergeCodecs(all_video_codecs_, video_codecs, &used_pltypes, + field_trials); } // Getting codecs for an answer involves these steps: @@ -2069,12 +2114,14 @@ void MediaSessionDescriptionFactory::GetCodecsForAnswer( const SessionDescription& remote_offer, AudioCodecs* audio_codecs, VideoCodecs* video_codecs) const { + const webrtc::FieldTrialsView* field_trials = + &transport_desc_factory_->trials(); // First - get all codecs from the current description if the media type // is used. Add them to `used_pltypes` so the payload type is not reused if a // new media type is added. UsedPayloadTypes used_pltypes; MergeCodecsFromDescription(current_active_contents, audio_codecs, - video_codecs, &used_pltypes); + video_codecs, &used_pltypes, field_trials); // Second - filter out codecs that we don't support at all and should ignore. AudioCodecs filtered_offered_audio_codecs; @@ -2084,11 +2131,12 @@ void MediaSessionDescriptionFactory::GetCodecsForAnswer( const AudioContentDescription* audio = content.media_description()->as_audio(); for (const AudioCodec& offered_audio_codec : audio->codecs()) { - if (!FindMatchingCodec(audio->codecs(), - filtered_offered_audio_codecs, - offered_audio_codec, nullptr) && + if (!FindMatchingCodec( + audio->codecs(), filtered_offered_audio_codecs, + offered_audio_codec, nullptr, field_trials) && FindMatchingCodec(audio->codecs(), all_audio_codecs_, - offered_audio_codec, nullptr)) { + offered_audio_codec, nullptr, + field_trials)) { filtered_offered_audio_codecs.push_back(offered_audio_codec); } } @@ -2096,11 +2144,12 @@ void MediaSessionDescriptionFactory::GetCodecsForAnswer( const VideoContentDescription* video = content.media_description()->as_video(); for (const VideoCodec& offered_video_codec : video->codecs()) { - if (!FindMatchingCodec(video->codecs(), - filtered_offered_video_codecs, - offered_video_codec, nullptr) && + if (!FindMatchingCodec( + video->codecs(), filtered_offered_video_codecs, + offered_video_codec, nullptr, field_trials) && FindMatchingCodec(video->codecs(), all_video_codecs_, - offered_video_codec, nullptr)) { + offered_video_codec, nullptr, + field_trials)) { filtered_offered_video_codecs.push_back(offered_video_codec); } } @@ -2110,9 +2159,9 @@ void MediaSessionDescriptionFactory::GetCodecsForAnswer( // Add codecs that are not in the current description but were in // `remote_offer`. MergeCodecs(filtered_offered_audio_codecs, audio_codecs, - &used_pltypes); + &used_pltypes, field_trials); MergeCodecs(filtered_offered_video_codecs, video_codecs, - &used_pltypes); + &used_pltypes, field_trials); } MediaSessionDescriptionFactory::AudioVideoRtpHeaderExtensions @@ -2254,6 +2303,8 @@ bool MediaSessionDescriptionFactory::AddAudioContentForOffer( StreamParamsVec* current_streams, SessionDescription* desc, IceCredentialsIterator* ice_credentials) const { + const webrtc::FieldTrialsView* field_trials = + &transport_desc_factory_->trials(); // Filter audio_codecs (which includes all codecs, with correctly remapped // payload types) based on transceiver direction. const AudioCodecs& supported_audio_codecs = @@ -2264,9 +2315,9 @@ bool MediaSessionDescriptionFactory::AddAudioContentForOffer( if (!media_description_options.codec_preferences.empty()) { // Add the codecs from the current transceiver's codec preferences. // They override any existing codecs from previous negotiations. - filtered_codecs = - MatchCodecPreference(media_description_options.codec_preferences, - audio_codecs, supported_audio_codecs); + filtered_codecs = MatchCodecPreference( + media_description_options.codec_preferences, audio_codecs, + supported_audio_codecs, field_trials); } else { // Add the codecs from current content if it exists and is not rejected nor // recycled. @@ -2277,7 +2328,7 @@ bool MediaSessionDescriptionFactory::AddAudioContentForOffer( current_content->media_description()->as_audio(); for (const AudioCodec& codec : acd->codecs()) { if (FindMatchingCodec(acd->codecs(), audio_codecs, codec, - nullptr)) { + nullptr, field_trials)) { filtered_codecs.push_back(codec); } } @@ -2286,9 +2337,10 @@ bool MediaSessionDescriptionFactory::AddAudioContentForOffer( AudioCodec found_codec; for (const AudioCodec& codec : supported_audio_codecs) { if (FindMatchingCodec(supported_audio_codecs, audio_codecs, - codec, &found_codec) && + codec, &found_codec, field_trials) && !FindMatchingCodec(supported_audio_codecs, - filtered_codecs, codec, nullptr)) { + filtered_codecs, codec, nullptr, + field_trials)) { // Use the `found_codec` from `audio_codecs` because it has the // correctly mapped payload type. filtered_codecs.push_back(found_codec); @@ -2308,11 +2360,11 @@ bool MediaSessionDescriptionFactory::AddAudioContentForOffer( std::vector crypto_suites; GetSupportedAudioSdesCryptoSuiteNames(session_options.crypto_options, &crypto_suites); - if (!CreateMediaContentOffer(media_description_options, session_options, - filtered_codecs, sdes_policy, - GetCryptos(current_content), crypto_suites, - audio_rtp_extensions, ssrc_generator_, - current_streams, audio.get())) { + if (!CreateMediaContentOffer( + media_description_options, session_options, filtered_codecs, + sdes_policy, GetCryptos(current_content), crypto_suites, + audio_rtp_extensions, ssrc_generator(), current_streams, audio.get(), + transport_desc_factory_->trials())) { return false; } @@ -2344,6 +2396,8 @@ bool MediaSessionDescriptionFactory::AddVideoContentForOffer( StreamParamsVec* current_streams, SessionDescription* desc, IceCredentialsIterator* ice_credentials) const { + const webrtc::FieldTrialsView* field_trials = + &transport_desc_factory_->trials(); // Filter video_codecs (which includes all codecs, with correctly remapped // payload types) based on transceiver direction. const VideoCodecs& supported_video_codecs = @@ -2354,9 +2408,9 @@ bool MediaSessionDescriptionFactory::AddVideoContentForOffer( if (!media_description_options.codec_preferences.empty()) { // Add the codecs from the current transceiver's codec preferences. // They override any existing codecs from previous negotiations. - filtered_codecs = - MatchCodecPreference(media_description_options.codec_preferences, - video_codecs, supported_video_codecs); + filtered_codecs = MatchCodecPreference( + media_description_options.codec_preferences, video_codecs, + supported_video_codecs, field_trials); } else { // Add the codecs from current content if it exists and is not rejected nor // recycled. @@ -2367,7 +2421,7 @@ bool MediaSessionDescriptionFactory::AddVideoContentForOffer( current_content->media_description()->as_video(); for (const VideoCodec& codec : vcd->codecs()) { if (FindMatchingCodec(vcd->codecs(), video_codecs, codec, - nullptr)) { + nullptr, field_trials)) { filtered_codecs.push_back(codec); } } @@ -2376,9 +2430,10 @@ bool MediaSessionDescriptionFactory::AddVideoContentForOffer( VideoCodec found_codec; for (const VideoCodec& codec : supported_video_codecs) { if (FindMatchingCodec(supported_video_codecs, video_codecs, - codec, &found_codec) && + codec, &found_codec, field_trials) && !FindMatchingCodec(supported_video_codecs, - filtered_codecs, codec, nullptr)) { + filtered_codecs, codec, nullptr, + field_trials)) { // Use the `found_codec` from `video_codecs` because it has the // correctly mapped payload type. if (IsRtxCodec(codec)) { @@ -2390,9 +2445,9 @@ bool MediaSessionDescriptionFactory::AddVideoContentForOffer( // Find the codec we should be referencing and point to it. VideoCodec changed_referenced_codec; - if (FindMatchingCodec(supported_video_codecs, - filtered_codecs, *referenced_codec, - &changed_referenced_codec)) { + if (FindMatchingCodec( + supported_video_codecs, filtered_codecs, *referenced_codec, + &changed_referenced_codec, field_trials)) { found_codec.SetParam(kCodecParamAssociatedPayloadType, changed_referenced_codec.id); } @@ -2417,11 +2472,11 @@ bool MediaSessionDescriptionFactory::AddVideoContentForOffer( std::vector crypto_suites; GetSupportedVideoSdesCryptoSuiteNames(session_options.crypto_options, &crypto_suites); - if (!CreateMediaContentOffer(media_description_options, session_options, - filtered_codecs, sdes_policy, - GetCryptos(current_content), crypto_suites, - video_rtp_extensions, ssrc_generator_, - current_streams, video.get())) { + if (!CreateMediaContentOffer( + media_description_options, session_options, filtered_codecs, + sdes_policy, GetCryptos(current_content), crypto_suites, + video_rtp_extensions, ssrc_generator(), current_streams, video.get(), + transport_desc_factory_->trials())) { return false; } @@ -2473,8 +2528,8 @@ bool MediaSessionDescriptionFactory::AddDataContentForOffer( if (!CreateContentOffer(media_description_options, session_options, sdes_policy, GetCryptos(current_content), - crypto_suites, RtpHeaderExtensions(), ssrc_generator_, - current_streams, data.get())) { + crypto_suites, RtpHeaderExtensions(), + ssrc_generator(), current_streams, data.get())) { return false; } @@ -2538,6 +2593,8 @@ bool MediaSessionDescriptionFactory::AddAudioContentForAnswer( StreamParamsVec* current_streams, SessionDescription* answer, IceCredentialsIterator* ice_credentials) const { + const webrtc::FieldTrialsView* field_trials = + &transport_desc_factory_->trials(); RTC_CHECK(IsMediaContentOfType(offer_content, MEDIA_TYPE_AUDIO)); const AudioContentDescription* offer_audio_description = offer_content->media_description()->as_audio(); @@ -2562,9 +2619,9 @@ bool MediaSessionDescriptionFactory::AddAudioContentForAnswer( AudioCodecs filtered_codecs; if (!media_description_options.codec_preferences.empty()) { - filtered_codecs = - MatchCodecPreference(media_description_options.codec_preferences, - audio_codecs, supported_audio_codecs); + filtered_codecs = MatchCodecPreference( + media_description_options.codec_preferences, audio_codecs, + supported_audio_codecs, field_trials); } else { // Add the codecs from current content if it exists and is not rejected nor // recycled. @@ -2575,7 +2632,7 @@ bool MediaSessionDescriptionFactory::AddAudioContentForAnswer( current_content->media_description()->as_audio(); for (const AudioCodec& codec : acd->codecs()) { if (FindMatchingCodec(acd->codecs(), audio_codecs, codec, - nullptr)) { + nullptr, field_trials)) { filtered_codecs.push_back(codec); } } @@ -2583,9 +2640,10 @@ bool MediaSessionDescriptionFactory::AddAudioContentForAnswer( // Add other supported audio codecs. for (const AudioCodec& codec : supported_audio_codecs) { if (FindMatchingCodec(supported_audio_codecs, audio_codecs, - codec, nullptr) && + codec, nullptr, field_trials) && !FindMatchingCodec(supported_audio_codecs, - filtered_codecs, codec, nullptr)) { + filtered_codecs, codec, nullptr, + field_trials)) { // We should use the local codec with local parameters and the codec id // would be correctly mapped in `NegotiateCodecs`. filtered_codecs.push_back(codec); @@ -2597,6 +2655,13 @@ bool MediaSessionDescriptionFactory::AddAudioContentForAnswer( StripCNCodecs(&filtered_codecs); } + // Determine if we have media codecs in common. + bool has_common_media_codecs = + std::find_if(filtered_codecs.begin(), filtered_codecs.end(), + [](const AudioCodec& c) { + return !(IsRedCodec(c) || IsComfortNoiseCodec(c)); + }) != filtered_codecs.end(); + bool bundle_enabled = offer_description->HasGroup(GROUP_TYPE_BUNDLE) && session_options.bundle_enabled; auto audio_answer = std::make_unique(); @@ -2605,15 +2670,15 @@ bool MediaSessionDescriptionFactory::AddAudioContentForAnswer( audio_transport->secure() ? cricket::SEC_DISABLED : secure(); if (!SetCodecsInAnswer(offer_audio_description, filtered_codecs, media_description_options, session_options, - ssrc_generator_, current_streams, - audio_answer.get())) { + ssrc_generator(), current_streams, audio_answer.get(), + transport_desc_factory_->trials())) { return false; } if (!CreateMediaContentAnswer( offer_audio_description, media_description_options, session_options, sdes_policy, GetCryptos(current_content), filtered_rtp_header_extensions(default_audio_rtp_header_extensions), - ssrc_generator_, enable_encrypted_rtp_header_extensions_, + ssrc_generator(), enable_encrypted_rtp_header_extensions_, current_streams, bundle_enabled, audio_answer.get())) { return false; // Fails the session setup. } @@ -2621,7 +2686,7 @@ bool MediaSessionDescriptionFactory::AddAudioContentForAnswer( bool secure = bundle_transport ? bundle_transport->description.secure() : audio_transport->secure(); bool rejected = media_description_options.stopped || - offer_content->rejected || + offer_content->rejected || !has_common_media_codecs || !IsMediaProtocolSupported(MEDIA_TYPE_AUDIO, audio_answer->protocol(), secure); if (!AddTransportAnswer(media_description_options.mid, @@ -2654,6 +2719,8 @@ bool MediaSessionDescriptionFactory::AddVideoContentForAnswer( StreamParamsVec* current_streams, SessionDescription* answer, IceCredentialsIterator* ice_credentials) const { + const webrtc::FieldTrialsView* field_trials = + &transport_desc_factory_->trials(); RTC_CHECK(IsMediaContentOfType(offer_content, MEDIA_TYPE_VIDEO)); const VideoContentDescription* offer_video_description = offer_content->media_description()->as_video(); @@ -2678,9 +2745,9 @@ bool MediaSessionDescriptionFactory::AddVideoContentForAnswer( VideoCodecs filtered_codecs; if (!media_description_options.codec_preferences.empty()) { - filtered_codecs = - MatchCodecPreference(media_description_options.codec_preferences, - video_codecs, supported_video_codecs); + filtered_codecs = MatchCodecPreference( + media_description_options.codec_preferences, video_codecs, + supported_video_codecs, field_trials); } else { // Add the codecs from current content if it exists and is not rejected nor // recycled. @@ -2691,23 +2758,37 @@ bool MediaSessionDescriptionFactory::AddVideoContentForAnswer( current_content->media_description()->as_video(); for (const VideoCodec& codec : vcd->codecs()) { if (FindMatchingCodec(vcd->codecs(), video_codecs, codec, - nullptr)) { + nullptr, field_trials)) { filtered_codecs.push_back(codec); } } } + // Add other supported video codecs. + VideoCodecs other_video_codecs; for (const VideoCodec& codec : supported_video_codecs) { if (FindMatchingCodec(supported_video_codecs, video_codecs, - codec, nullptr) && + codec, nullptr, field_trials) && !FindMatchingCodec(supported_video_codecs, - filtered_codecs, codec, nullptr)) { + filtered_codecs, codec, nullptr, + field_trials)) { // We should use the local codec with local parameters and the codec id // would be correctly mapped in `NegotiateCodecs`. - filtered_codecs.push_back(codec); + other_video_codecs.push_back(codec); } } + + // Use ComputeCodecsUnion to avoid having duplicate payload IDs + filtered_codecs = ComputeCodecsUnion( + filtered_codecs, other_video_codecs, field_trials); } + // Determine if we have media codecs in common. + bool has_common_media_codecs = + std::find_if( + filtered_codecs.begin(), filtered_codecs.end(), + [](const VideoCodec& c) { + return !(IsRedCodec(c) || IsUlpfecCodec(c) || IsFlexfecCodec(c)); + }) != filtered_codecs.end(); if (session_options.raw_packetization_for_video) { for (VideoCodec& codec : filtered_codecs) { @@ -2725,22 +2806,22 @@ bool MediaSessionDescriptionFactory::AddVideoContentForAnswer( video_transport->secure() ? cricket::SEC_DISABLED : secure(); if (!SetCodecsInAnswer(offer_video_description, filtered_codecs, media_description_options, session_options, - ssrc_generator_, current_streams, - video_answer.get())) { + ssrc_generator(), current_streams, video_answer.get(), + transport_desc_factory_->trials())) { return false; } if (!CreateMediaContentAnswer( offer_video_description, media_description_options, session_options, sdes_policy, GetCryptos(current_content), filtered_rtp_header_extensions(default_video_rtp_header_extensions), - ssrc_generator_, enable_encrypted_rtp_header_extensions_, + ssrc_generator(), enable_encrypted_rtp_header_extensions_, current_streams, bundle_enabled, video_answer.get())) { - return false; // Failed the sessin setup. + return false; // Failed the session setup. } bool secure = bundle_transport ? bundle_transport->description.secure() : video_transport->secure(); bool rejected = media_description_options.stopped || - offer_content->rejected || + offer_content->rejected || !has_common_media_codecs || !IsMediaProtocolSupported(MEDIA_TYPE_VIDEO, video_answer->protocol(), secure); if (!AddTransportAnswer(media_description_options.mid, @@ -2806,7 +2887,7 @@ bool MediaSessionDescriptionFactory::AddDataContentForAnswer( if (!CreateMediaContentAnswer( offer_data_description, media_description_options, session_options, sdes_policy, GetCryptos(current_content), RtpHeaderExtensions(), - ssrc_generator_, enable_encrypted_rtp_header_extensions_, + ssrc_generator(), enable_encrypted_rtp_header_extensions_, current_streams, bundle_enabled, data_answer.get())) { return false; // Fails the session setup. } @@ -2871,13 +2952,15 @@ bool MediaSessionDescriptionFactory::AddUnsupportedContentForAnswer( } void MediaSessionDescriptionFactory::ComputeAudioCodecsIntersectionAndUnion() { + const webrtc::FieldTrialsView* field_trials = + &transport_desc_factory_->trials(); audio_sendrecv_codecs_.clear(); all_audio_codecs_.clear(); // Compute the audio codecs union. for (const AudioCodec& send : audio_send_codecs_) { all_audio_codecs_.push_back(send); if (!FindMatchingCodec(audio_send_codecs_, audio_recv_codecs_, - send, nullptr)) { + send, nullptr, field_trials)) { // It doesn't make sense to have an RTX codec we support sending but not // receiving. RTC_DCHECK(!IsRtxCodec(send)); @@ -2885,7 +2968,7 @@ void MediaSessionDescriptionFactory::ComputeAudioCodecsIntersectionAndUnion() { } for (const AudioCodec& recv : audio_recv_codecs_) { if (!FindMatchingCodec(audio_recv_codecs_, audio_send_codecs_, - recv, nullptr)) { + recv, nullptr, field_trials)) { all_audio_codecs_.push_back(recv); } } @@ -2895,39 +2978,25 @@ void MediaSessionDescriptionFactory::ComputeAudioCodecsIntersectionAndUnion() { // expensive than decoding, and prioritizing a codec in the send list probably // means it's a codec we can handle efficiently. NegotiateCodecs(audio_recv_codecs_, audio_send_codecs_, - &audio_sendrecv_codecs_, true); + &audio_sendrecv_codecs_, true, field_trials); } void MediaSessionDescriptionFactory::ComputeVideoCodecsIntersectionAndUnion() { + const webrtc::FieldTrialsView* field_trials = + &transport_desc_factory_->trials(); video_sendrecv_codecs_.clear(); - all_video_codecs_.clear(); - // Compute the video codecs union. - for (const VideoCodec& send : video_send_codecs_) { - all_video_codecs_.push_back(send); - if (!FindMatchingCodec(video_send_codecs_, video_recv_codecs_, - send, nullptr)) { - // TODO(kron): This check is violated by the unit test: - // MediaSessionDescriptionFactoryTest.RtxWithoutApt - // Remove either the test or the check. - // It doesn't make sense to have an RTX codec we support sending but not - // receiving. - // RTC_DCHECK(!IsRtxCodec(send)); - } - } - for (const VideoCodec& recv : video_recv_codecs_) { - if (!FindMatchingCodec(video_recv_codecs_, video_send_codecs_, - recv, nullptr)) { - all_video_codecs_.push_back(recv); - } - } + // Use ComputeCodecsUnion to avoid having duplicate payload IDs + all_video_codecs_ = + ComputeCodecsUnion(video_recv_codecs_, video_send_codecs_, field_trials); + // Use NegotiateCodecs to merge our codec lists, since the operation is // essentially the same. Put send_codecs as the offered_codecs, which is the // order we'd like to follow. The reasoning is that encoding is usually more // expensive than decoding, and prioritizing a codec in the send list probably // means it's a codec we can handle efficiently. NegotiateCodecs(video_recv_codecs_, video_send_codecs_, - &video_sendrecv_codecs_, true); + &video_sendrecv_codecs_, true, field_trials); } bool IsMediaContent(const ContentInfo* content) { diff --git a/TMessagesProj/jni/voip/webrtc/pc/media_session.h b/TMessagesProj/jni/voip/webrtc/pc/media_session.h index bb97f42b27..3711110ccd 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/media_session.h +++ b/TMessagesProj/jni/voip/webrtc/pc/media_session.h @@ -19,6 +19,7 @@ #include #include "api/crypto/crypto_options.h" +#include "api/field_trials_view.h" #include "api/media_types.h" #include "api/rtp_parameters.h" #include "api/rtp_transceiver_direction.h" @@ -33,11 +34,20 @@ #include "pc/media_protocol_names.h" #include "pc/session_description.h" #include "pc/simulcast_description.h" +#include "rtc_base/memory/always_valid_pointer.h" #include "rtc_base/unique_id_generator.h" +namespace webrtc { + +// Forward declaration due to circular dependecy. +class ConnectionContext; + +} // namespace webrtc + namespace cricket { class ChannelManager; +class MediaEngineInterface; // Default RTCP CNAME for unit tests. const char kDefaultRtcpCname[] = "DefaultRtcpCname"; @@ -139,10 +149,11 @@ class MediaSessionDescriptionFactory { MediaSessionDescriptionFactory(const TransportDescriptionFactory* factory, rtc::UniqueRandomIdGenerator* ssrc_generator); // This helper automatically sets up the factory to get its configuration - // from the specified ChannelManager. - MediaSessionDescriptionFactory(ChannelManager* cmanager, - const TransportDescriptionFactory* factory, - rtc::UniqueRandomIdGenerator* ssrc_generator); + // from the specified MediaEngine + MediaSessionDescriptionFactory(cricket::MediaEngineInterface* media_engine, + bool rtx_enabled, + rtc::UniqueRandomIdGenerator* ssrc_generator, + const TransportDescriptionFactory* factory); const AudioCodecs& audio_sendrecv_codecs() const; const AudioCodecs& audio_send_codecs() const; @@ -321,6 +332,10 @@ class MediaSessionDescriptionFactory { void ComputeVideoCodecsIntersectionAndUnion(); + rtc::UniqueRandomIdGenerator* ssrc_generator() const { + return ssrc_generator_.get(); + } + bool is_unified_plan_ = false; AudioCodecs audio_send_codecs_; AudioCodecs audio_recv_codecs_; @@ -334,8 +349,9 @@ class MediaSessionDescriptionFactory { VideoCodecs video_sendrecv_codecs_; // Union of send and recv. VideoCodecs all_video_codecs_; - // This object is not owned by the channel so it must outlive it. - rtc::UniqueRandomIdGenerator* const ssrc_generator_; + // This object may or may not be owned by this class. + webrtc::AlwaysValidPointer const + ssrc_generator_; bool enable_encrypted_rtp_header_extensions_ = false; // TODO(zhihuang): Rename secure_ to sdec_policy_; rename the related getter // and setter. diff --git a/TMessagesProj/jni/voip/webrtc/pc/media_stream.cc b/TMessagesProj/jni/voip/webrtc/pc/media_stream.cc index 08a2a723d0..57be76c6c8 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/media_stream.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/media_stream.cc @@ -12,10 +12,9 @@ #include -#include +#include #include "rtc_base/checks.h" -#include "rtc_base/ref_counted_object.h" namespace webrtc { @@ -36,19 +35,19 @@ rtc::scoped_refptr MediaStream::Create(const std::string& id) { MediaStream::MediaStream(const std::string& id) : id_(id) {} -bool MediaStream::AddTrack(AudioTrackInterface* track) { +bool MediaStream::AddTrack(rtc::scoped_refptr track) { return AddTrack(&audio_tracks_, track); } -bool MediaStream::AddTrack(VideoTrackInterface* track) { +bool MediaStream::AddTrack(rtc::scoped_refptr track) { return AddTrack(&video_tracks_, track); } -bool MediaStream::RemoveTrack(AudioTrackInterface* track) { +bool MediaStream::RemoveTrack(rtc::scoped_refptr track) { return RemoveTrack(&audio_tracks_, track); } -bool MediaStream::RemoveTrack(VideoTrackInterface* track) { +bool MediaStream::RemoveTrack(rtc::scoped_refptr track) { return RemoveTrack(&video_tracks_, track); } @@ -56,7 +55,7 @@ rtc::scoped_refptr MediaStream::FindAudioTrack( const std::string& track_id) { AudioTrackVector::iterator it = FindTrack(&audio_tracks_, track_id); if (it == audio_tracks_.end()) - return NULL; + return nullptr; return *it; } @@ -64,23 +63,25 @@ rtc::scoped_refptr MediaStream::FindVideoTrack( const std::string& track_id) { VideoTrackVector::iterator it = FindTrack(&video_tracks_, track_id); if (it == video_tracks_.end()) - return NULL; + return nullptr; return *it; } template -bool MediaStream::AddTrack(TrackVector* tracks, Track* track) { +bool MediaStream::AddTrack(TrackVector* tracks, + rtc::scoped_refptr track) { typename TrackVector::iterator it = FindTrack(tracks, track->id()); if (it != tracks->end()) return false; - tracks->push_back(track); + tracks->emplace_back(std::move((track))); FireOnChanged(); return true; } template -bool MediaStream::RemoveTrack(TrackVector* tracks, - MediaStreamTrackInterface* track) { +bool MediaStream::RemoveTrack( + TrackVector* tracks, + rtc::scoped_refptr track) { RTC_DCHECK(tracks != NULL); if (!track) return false; diff --git a/TMessagesProj/jni/voip/webrtc/pc/media_stream.h b/TMessagesProj/jni/voip/webrtc/pc/media_stream.h index 6f16bea1d9..c033cf6f35 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/media_stream.h +++ b/TMessagesProj/jni/voip/webrtc/pc/media_stream.h @@ -27,10 +27,10 @@ class MediaStream : public Notifier { std::string id() const override { return id_; } - bool AddTrack(AudioTrackInterface* track) override; - bool AddTrack(VideoTrackInterface* track) override; - bool RemoveTrack(AudioTrackInterface* track) override; - bool RemoveTrack(VideoTrackInterface* track) override; + bool AddTrack(rtc::scoped_refptr track) override; + bool AddTrack(rtc::scoped_refptr track) override; + bool RemoveTrack(rtc::scoped_refptr track) override; + bool RemoveTrack(rtc::scoped_refptr track) override; rtc::scoped_refptr FindAudioTrack( const std::string& track_id) override; rtc::scoped_refptr FindVideoTrack( @@ -44,9 +44,10 @@ class MediaStream : public Notifier { private: template - bool AddTrack(TrackVector* Tracks, Track* track); + bool AddTrack(TrackVector* Tracks, rtc::scoped_refptr track); template - bool RemoveTrack(TrackVector* Tracks, MediaStreamTrackInterface* track); + bool RemoveTrack(TrackVector* Tracks, + rtc::scoped_refptr track); const std::string id_; AudioTrackVector audio_tracks_; diff --git a/TMessagesProj/jni/voip/webrtc/pc/media_stream_observer.cc b/TMessagesProj/jni/voip/webrtc/pc/media_stream_observer.cc index 28caccf5d5..6264a7657a 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/media_stream_observer.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/media_stream_observer.cc @@ -54,7 +54,7 @@ void MediaStreamObserver::OnChanged() { [cached_track](const AudioTrackVector::value_type& new_track) { return new_track->id() == cached_track->id(); })) { - audio_track_removed_callback_(cached_track.get(), stream_); + audio_track_removed_callback_(cached_track.get(), stream_.get()); } } @@ -65,7 +65,7 @@ void MediaStreamObserver::OnChanged() { [new_track](const AudioTrackVector::value_type& cached_track) { return new_track->id() == cached_track->id(); })) { - audio_track_added_callback_(new_track.get(), stream_); + audio_track_added_callback_(new_track.get(), stream_.get()); } } @@ -76,7 +76,7 @@ void MediaStreamObserver::OnChanged() { [cached_track](const VideoTrackVector::value_type& new_track) { return new_track->id() == cached_track->id(); })) { - video_track_removed_callback_(cached_track.get(), stream_); + video_track_removed_callback_(cached_track.get(), stream_.get()); } } @@ -87,7 +87,7 @@ void MediaStreamObserver::OnChanged() { [new_track](const VideoTrackVector::value_type& cached_track) { return new_track->id() == cached_track->id(); })) { - video_track_added_callback_(new_track.get(), stream_); + video_track_added_callback_(new_track.get(), stream_.get()); } } diff --git a/TMessagesProj/jni/voip/webrtc/pc/media_stream_observer.h b/TMessagesProj/jni/voip/webrtc/pc/media_stream_observer.h index 4c4f22168b..83bbd20994 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/media_stream_observer.h +++ b/TMessagesProj/jni/voip/webrtc/pc/media_stream_observer.h @@ -34,7 +34,7 @@ class MediaStreamObserver : public ObserverInterface { video_track_removed_callback); ~MediaStreamObserver() override; - const MediaStreamInterface* stream() const { return stream_; } + const MediaStreamInterface* stream() const { return stream_.get(); } void OnChanged() override; diff --git a/TMessagesProj/jni/voip/webrtc/pc/media_stream_proxy.h b/TMessagesProj/jni/voip/webrtc/pc/media_stream_proxy.h index 36069a4369..3e263bfd8b 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/media_stream_proxy.h +++ b/TMessagesProj/jni/voip/webrtc/pc/media_stream_proxy.h @@ -31,10 +31,10 @@ PROXY_METHOD1(rtc::scoped_refptr, PROXY_METHOD1(rtc::scoped_refptr, FindVideoTrack, const std::string&) -PROXY_METHOD1(bool, AddTrack, AudioTrackInterface*) -PROXY_METHOD1(bool, AddTrack, VideoTrackInterface*) -PROXY_METHOD1(bool, RemoveTrack, AudioTrackInterface*) -PROXY_METHOD1(bool, RemoveTrack, VideoTrackInterface*) +PROXY_METHOD1(bool, AddTrack, rtc::scoped_refptr) +PROXY_METHOD1(bool, AddTrack, rtc::scoped_refptr) +PROXY_METHOD1(bool, RemoveTrack, rtc::scoped_refptr) +PROXY_METHOD1(bool, RemoveTrack, rtc::scoped_refptr) PROXY_METHOD1(void, RegisterObserver, ObserverInterface*) PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*) END_PROXY_MAP(MediaStream) diff --git a/TMessagesProj/jni/voip/webrtc/pc/media_stream_track_proxy.h b/TMessagesProj/jni/voip/webrtc/pc/media_stream_track_proxy.h index f563137c77..2af3aedb22 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/media_stream_track_proxy.h +++ b/TMessagesProj/jni/voip/webrtc/pc/media_stream_track_proxy.h @@ -44,15 +44,16 @@ PROXY_PRIMARY_THREAD_DESTRUCTOR() BYPASS_PROXY_CONSTMETHOD0(std::string, kind) BYPASS_PROXY_CONSTMETHOD0(std::string, id) PROXY_SECONDARY_CONSTMETHOD0(TrackState, state) -PROXY_SECONDARY_CONSTMETHOD0(bool, enabled) -PROXY_SECONDARY_METHOD1(bool, set_enabled, bool) -PROXY_SECONDARY_CONSTMETHOD0(ContentHint, content_hint) -PROXY_SECONDARY_METHOD1(void, set_content_hint, ContentHint) +PROXY_CONSTMETHOD0(bool, enabled) +PROXY_METHOD1(bool, set_enabled, bool) +PROXY_CONSTMETHOD0(ContentHint, content_hint) +PROXY_METHOD1(void, set_content_hint, ContentHint) PROXY_SECONDARY_METHOD2(void, AddOrUpdateSink, rtc::VideoSinkInterface*, const rtc::VideoSinkWants&) PROXY_SECONDARY_METHOD1(void, RemoveSink, rtc::VideoSinkInterface*) +PROXY_SECONDARY_METHOD0(void, RequestRefreshFrame) BYPASS_PROXY_CONSTMETHOD0(VideoTrackSourceInterface*, GetSource) PROXY_METHOD1(void, RegisterObserver, ObserverInterface*) diff --git a/TMessagesProj/jni/voip/webrtc/pc/peer_connection.cc b/TMessagesProj/jni/voip/webrtc/pc/peer_connection.cc index 885f5573d5..86877e5aeb 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/peer_connection.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/peer_connection.cc @@ -16,21 +16,22 @@ #include #include #include +#include #include #include "absl/algorithm/container.h" #include "absl/strings/match.h" +#include "absl/strings/string_view.h" #include "api/jsep_ice_candidate.h" #include "api/rtp_parameters.h" #include "api/rtp_transceiver_direction.h" -#include "api/task_queue/queued_task.h" -#include "api/transport/webrtc_key_value_config.h" #include "api/uma_metrics.h" #include "api/video/video_codec_constants.h" #include "call/audio_state.h" #include "call/packet_receiver.h" #include "media/base/media_channel.h" #include "media/base/media_config.h" +#include "media/base/media_engine.h" #include "media/base/rid_description.h" #include "media/base/stream_params.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" @@ -41,23 +42,22 @@ #include "p2p/base/p2p_constants.h" #include "p2p/base/p2p_transport_channel.h" #include "p2p/base/transport_info.h" -#include "pc/channel.h" #include "pc/ice_server_parsing.h" #include "pc/rtp_receiver.h" +#include "pc/rtp_receiver_proxy.h" #include "pc/rtp_sender.h" +#include "pc/rtp_sender_proxy.h" #include "pc/sctp_transport.h" #include "pc/simulcast_description.h" #include "pc/webrtc_session_description_factory.h" #include "rtc_base/helpers.h" #include "rtc_base/ip_address.h" -#include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/net_helper.h" +#include "rtc_base/network.h" #include "rtc_base/network_constants.h" -#include "rtc_base/ref_counted_object.h" #include "rtc_base/socket_address.h" #include "rtc_base/string_encode.h" -#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/trace_event.h" #include "rtc_base/unique_id_generator.h" #include "system_wrappers/include/metrics.h" @@ -276,8 +276,12 @@ bool DtlsEnabled(const PeerConnectionInterface::RTCConfiguration& configuration, bool default_enabled = (dependencies.cert_generator || !configuration.certificates.empty()); +#if defined(WEBRTC_FUCHSIA) // The `configuration` can override the default value. return configuration.enable_dtls_srtp.value_or(default_enabled); +#else + return default_enabled; +#endif } } // namespace @@ -297,16 +301,16 @@ bool PeerConnectionInterface::RTCConfiguration::operator==( bool disable_ipv6_on_wifi; int max_ipv6_networks; bool disable_link_local_networks; - bool enable_rtp_data_channel; absl::optional screencast_min_bitrate; absl::optional combined_audio_video_bwe; +#if defined(WEBRTC_FUCHSIA) absl::optional enable_dtls_srtp; +#endif TcpCandidatePolicy tcp_candidate_policy; CandidateNetworkPolicy candidate_network_policy; int audio_jitter_buffer_max_packets; bool audio_jitter_buffer_fast_accelerate; int audio_jitter_buffer_min_delay_ms; - bool audio_jitter_buffer_enable_rtx_handling; int ice_connection_receiving_timeout; int ice_backup_candidate_pair_ping_interval; ContinualGatheringPolicy continual_gathering_policy; @@ -353,8 +357,6 @@ bool PeerConnectionInterface::RTCConfiguration::operator==( o.audio_jitter_buffer_fast_accelerate && audio_jitter_buffer_min_delay_ms == o.audio_jitter_buffer_min_delay_ms && - audio_jitter_buffer_enable_rtx_handling == - o.audio_jitter_buffer_enable_rtx_handling && ice_connection_receiving_timeout == o.ice_connection_receiving_timeout && ice_backup_candidate_pair_ping_interval == @@ -369,7 +371,9 @@ bool PeerConnectionInterface::RTCConfiguration::operator==( disable_link_local_networks == o.disable_link_local_networks && screencast_min_bitrate == o.screencast_min_bitrate && combined_audio_video_bwe == o.combined_audio_video_bwe && +#if defined(WEBRTC_FUCHSIA) enable_dtls_srtp == o.enable_dtls_srtp && +#endif ice_candidate_pool_size == o.ice_candidate_pool_size && prune_turn_ports == o.prune_turn_ports && turn_port_prune_policy == o.turn_port_prune_policy && @@ -419,6 +423,12 @@ RTCErrorOr> PeerConnection::Create( std::unique_ptr call, const PeerConnectionInterface::RTCConfiguration& configuration, PeerConnectionDependencies dependencies) { + // TODO(https://crbug.com/webrtc/13528): Remove support for kPlanB. + if (configuration.sdp_semantics == SdpSemantics::kPlanB_DEPRECATED) { + RTC_LOG(LS_WARNING) + << "PeerConnection constructed with legacy SDP semantics!"; + } + RTCError config_error = cricket::P2PTransportChannel::ValidateIceConfig( ParseIceConfig(configuration)); if (!config_error.ok()) { @@ -492,6 +502,7 @@ PeerConnection::PeerConnection( PeerConnectionDependencies& dependencies, bool dtls_enabled) : context_(context), + trials_(std::move(dependencies.trials), &context->field_trials()), options_(options), observer_(dependencies.observer), is_unified_plan_(is_unified_plan), @@ -513,7 +524,7 @@ PeerConnection::PeerConnection( data_channel_controller_(this), message_handler_(signaling_thread()), weak_factory_(this) { - worker_thread()->Invoke(RTC_FROM_HERE, [this] { + worker_thread()->BlockingCall([this] { RTC_DCHECK_RUN_ON(worker_thread()); worker_thread_safety_ = PendingTaskSafetyFlag::Create(); if (!call_) @@ -530,15 +541,15 @@ PeerConnection::~PeerConnection() { } // Need to stop transceivers before destroying the stats collector because - // AudioRtpSender has a reference to the StatsCollector it will update when - // stopping. + // AudioRtpSender has a reference to the LegacyStatsCollector it will update + // when stopping. if (rtp_manager()) { for (const auto& transceiver : rtp_manager()->transceivers()->List()) { transceiver->StopInternal(); } } - stats_.reset(nullptr); + legacy_stats_.reset(nullptr); if (stats_collector_) { stats_collector_->WaitForPendingRequest(); stats_collector_ = nullptr; @@ -556,7 +567,8 @@ PeerConnection::~PeerConnection() { // port_allocator_ and transport_controller_ live on the network thread and // should be destroyed there. - network_thread()->Invoke(RTC_FROM_HERE, [this] { + transport_controller_copy_ = nullptr; + network_thread()->BlockingCall([this] { RTC_DCHECK_RUN_ON(network_thread()); TeardownDataChannelTransport_n(); transport_controller_.reset(); @@ -566,7 +578,7 @@ PeerConnection::~PeerConnection() { }); // call_ and event_log_ must be destroyed on the worker thread. - worker_thread()->Invoke(RTC_FROM_HERE, [this] { + worker_thread()->BlockingCall([this] { RTC_DCHECK_RUN_ON(worker_thread()); worker_thread_safety_->SetNotAlive(); call_.reset(); @@ -584,10 +596,10 @@ RTCError PeerConnection::Initialize( cricket::ServerAddresses stun_servers; std::vector turn_servers; - RTCErrorType parse_error = - ParseIceServers(configuration.servers, &stun_servers, &turn_servers); - if (parse_error != RTCErrorType::NONE) { - return RTCError(parse_error, "ICE server parse failed"); + RTCError parse_error = ParseIceServersOrError(configuration.servers, + &stun_servers, &turn_servers); + if (!parse_error.ok()) { + return parse_error; } // Add the turn logging id to all turn servers @@ -604,32 +616,32 @@ RTCError PeerConnection::Initialize( } // Network thread initialization. - network_thread()->Invoke(RTC_FROM_HERE, [this, &stun_servers, - &turn_servers, &configuration, - &dependencies] { - RTC_DCHECK_RUN_ON(network_thread()); - network_thread_safety_ = PendingTaskSafetyFlag::Create(); - InitializePortAllocatorResult pa_result = - InitializePortAllocator_n(stun_servers, turn_servers, configuration); - // Send information about IPv4/IPv6 status. - PeerConnectionAddressFamilyCounter address_family = - pa_result.enable_ipv6 ? kPeerConnection_IPv6 : kPeerConnection_IPv4; - RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.IPMetrics", address_family, - kPeerConnectionAddressFamilyCounter_Max); - InitializeTransportController_n(configuration, dependencies); - }); + transport_controller_copy_ = + network_thread()->BlockingCall([&] { + RTC_DCHECK_RUN_ON(network_thread()); + network_thread_safety_ = PendingTaskSafetyFlag::Create(); + InitializePortAllocatorResult pa_result = InitializePortAllocator_n( + stun_servers, turn_servers, configuration); + // Send information about IPv4/IPv6 status. + PeerConnectionAddressFamilyCounter address_family = + pa_result.enable_ipv6 ? kPeerConnection_IPv6 : kPeerConnection_IPv4; + RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.IPMetrics", + address_family, + kPeerConnectionAddressFamilyCounter_Max); + return InitializeTransportController_n(configuration, dependencies); + }); configuration_ = configuration; - stats_ = std::make_unique(this); + legacy_stats_ = std::make_unique(this); stats_collector_ = RTCStatsCollector::Create(this); - sdp_handler_ = - SdpOfferAnswerHandler::Create(this, configuration, dependencies); + sdp_handler_ = SdpOfferAnswerHandler::Create(this, configuration, + dependencies, context_.get()); rtp_manager_ = std::make_unique( - IsUnifiedPlan(), signaling_thread(), worker_thread(), channel_manager(), - &usage_pattern_, observer_, stats_.get(), [this]() { + IsUnifiedPlan(), context_.get(), &usage_pattern_, observer_, + legacy_stats_.get(), [this]() { RTC_DCHECK_RUN_ON(signaling_thread()); sdp_handler_->UpdateNegotiationNeeded(); }); @@ -638,12 +650,12 @@ RTCError PeerConnection::Initialize( if (!IsUnifiedPlan()) { rtp_manager()->transceivers()->Add( RtpTransceiverProxyWithInternal::Create( - signaling_thread(), - new RtpTransceiver(cricket::MEDIA_TYPE_AUDIO, channel_manager()))); + signaling_thread(), rtc::make_ref_counted( + cricket::MEDIA_TYPE_AUDIO, context()))); rtp_manager()->transceivers()->Add( RtpTransceiverProxyWithInternal::Create( - signaling_thread(), - new RtpTransceiver(cricket::MEDIA_TYPE_VIDEO, channel_manager()))); + signaling_thread(), rtc::make_ref_counted( + cricket::MEDIA_TYPE_VIDEO, context()))); } int delay_ms = configuration.report_usage_pattern_delay_ms @@ -656,14 +668,10 @@ RTCError PeerConnection::Initialize( }, delay_ms); - // Record the number of configured ICE servers for all connections. - RTC_HISTOGRAM_COUNTS_LINEAR("WebRTC.PeerConnection.IceServers.Configured", - configuration_.servers.size(), 0, 31, 32); - return RTCError::OK(); } -void PeerConnection::InitializeTransportController_n( +JsepTransportController* PeerConnection::InitializeTransportController_n( const RTCConfiguration& configuration, const PeerConnectionDependencies& dependencies) { JsepTransportController::Config config; @@ -699,6 +707,8 @@ void PeerConnection::InitializeTransportController_n( } }; + config.field_trials = trials_.get(); + transport_controller_.reset( new JsepTransportController(network_thread(), port_allocator_.get(), async_dns_resolver_factory_.get(), config)); @@ -710,7 +720,7 @@ void PeerConnection::InitializeTransportController_n( ReportTransportStats(); } signaling_thread()->PostTask( - ToQueuedTask(signaling_thread_safety_.flag(), [this, s]() { + SafeTask(signaling_thread_safety_.flag(), [this, s]() { RTC_DCHECK_RUN_ON(signaling_thread()); OnTransportControllerConnectionState(s); })); @@ -719,7 +729,7 @@ void PeerConnection::InitializeTransportController_n( [this](PeerConnectionInterface::PeerConnectionState s) { RTC_DCHECK_RUN_ON(network_thread()); signaling_thread()->PostTask( - ToQueuedTask(signaling_thread_safety_.flag(), [this, s]() { + SafeTask(signaling_thread_safety_.flag(), [this, s]() { RTC_DCHECK_RUN_ON(signaling_thread()); SetConnectionState(s); })); @@ -728,7 +738,7 @@ void PeerConnection::InitializeTransportController_n( [this](PeerConnectionInterface::IceConnectionState s) { RTC_DCHECK_RUN_ON(network_thread()); signaling_thread()->PostTask( - ToQueuedTask(signaling_thread_safety_.flag(), [this, s]() { + SafeTask(signaling_thread_safety_.flag(), [this, s]() { RTC_DCHECK_RUN_ON(signaling_thread()); SetStandardizedIceConnectionState(s); })); @@ -737,7 +747,7 @@ void PeerConnection::InitializeTransportController_n( [this](cricket::IceGatheringState s) { RTC_DCHECK_RUN_ON(network_thread()); signaling_thread()->PostTask( - ToQueuedTask(signaling_thread_safety_.flag(), [this, s]() { + SafeTask(signaling_thread_safety_.flag(), [this, s]() { RTC_DCHECK_RUN_ON(signaling_thread()); OnTransportControllerGatheringState(s); })); @@ -747,17 +757,17 @@ void PeerConnection::InitializeTransportController_n( const std::vector& candidates) { RTC_DCHECK_RUN_ON(network_thread()); signaling_thread()->PostTask( - ToQueuedTask(signaling_thread_safety_.flag(), - [this, t = transport, c = candidates]() { - RTC_DCHECK_RUN_ON(signaling_thread()); - OnTransportControllerCandidatesGathered(t, c); - })); + SafeTask(signaling_thread_safety_.flag(), + [this, t = transport, c = candidates]() { + RTC_DCHECK_RUN_ON(signaling_thread()); + OnTransportControllerCandidatesGathered(t, c); + })); }); transport_controller_->SubscribeIceCandidateError( [this](const cricket::IceCandidateErrorEvent& event) { RTC_DCHECK_RUN_ON(network_thread()); - signaling_thread()->PostTask(ToQueuedTask( - signaling_thread_safety_.flag(), [this, event = event]() { + signaling_thread()->PostTask( + SafeTask(signaling_thread_safety_.flag(), [this, event = event]() { RTC_DCHECK_RUN_ON(signaling_thread()); OnTransportControllerCandidateError(event); })); @@ -766,7 +776,7 @@ void PeerConnection::InitializeTransportController_n( [this](const std::vector& c) { RTC_DCHECK_RUN_ON(network_thread()); signaling_thread()->PostTask( - ToQueuedTask(signaling_thread_safety_.flag(), [this, c = c]() { + SafeTask(signaling_thread_safety_.flag(), [this, c = c]() { RTC_DCHECK_RUN_ON(signaling_thread()); OnTransportControllerCandidatesRemoved(c); })); @@ -774,14 +784,15 @@ void PeerConnection::InitializeTransportController_n( transport_controller_->SubscribeIceCandidatePairChanged( [this](const cricket::CandidatePairChangeEvent& event) { RTC_DCHECK_RUN_ON(network_thread()); - signaling_thread()->PostTask(ToQueuedTask( - signaling_thread_safety_.flag(), [this, event = event]() { + signaling_thread()->PostTask( + SafeTask(signaling_thread_safety_.flag(), [this, event = event]() { RTC_DCHECK_RUN_ON(signaling_thread()); OnTransportControllerCandidateChanged(event); })); }); transport_controller_->SetIceConfig(ParseIceConfig(configuration)); + return transport_controller_.get(); } rtc::scoped_refptr PeerConnection::local_streams() { @@ -805,11 +816,16 @@ bool PeerConnection::AddStream(MediaStreamInterface* local_stream) { RTC_CHECK(!IsUnifiedPlan()) << "AddStream is not available with Unified Plan " "SdpSemantics. Please use AddTrack instead."; TRACE_EVENT0("webrtc", "PeerConnection::AddStream"); + if (!ConfiguredForMedia()) { + RTC_LOG(LS_ERROR) << "AddStream: Not configured for media"; + return false; + } return sdp_handler_->AddStream(local_stream); } void PeerConnection::RemoveStream(MediaStreamInterface* local_stream) { RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(ConfiguredForMedia()); RTC_CHECK(!IsUnifiedPlan()) << "RemoveStream is not available with Unified " "Plan SdpSemantics. Please use RemoveTrack " "instead."; @@ -820,8 +836,26 @@ void PeerConnection::RemoveStream(MediaStreamInterface* local_stream) { RTCErrorOr> PeerConnection::AddTrack( rtc::scoped_refptr track, const std::vector& stream_ids) { + return AddTrack(std::move(track), stream_ids, nullptr); +} + +RTCErrorOr> PeerConnection::AddTrack( + rtc::scoped_refptr track, + const std::vector& stream_ids, + const std::vector& init_send_encodings) { + return AddTrack(std::move(track), stream_ids, &init_send_encodings); +} + +RTCErrorOr> PeerConnection::AddTrack( + rtc::scoped_refptr track, + const std::vector& stream_ids, + const std::vector* init_send_encodings) { RTC_DCHECK_RUN_ON(signaling_thread()); TRACE_EVENT0("webrtc", "PeerConnection::AddTrack"); + if (!ConfiguredForMedia()) { + LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_OPERATION, + "Not configured for media"); + } if (!track) { LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, "Track is null."); } @@ -834,27 +868,27 @@ RTCErrorOr> PeerConnection::AddTrack( LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_STATE, "PeerConnection is closed."); } - if (rtp_manager()->FindSenderForTrack(track)) { + if (rtp_manager()->FindSenderForTrack(track.get())) { LOG_AND_RETURN_ERROR( RTCErrorType::INVALID_PARAMETER, "Sender already exists for track " + track->id() + "."); } - auto sender_or_error = rtp_manager()->AddTrack(track, stream_ids); + auto sender_or_error = + rtp_manager()->AddTrack(track, stream_ids, init_send_encodings); if (sender_or_error.ok()) { sdp_handler_->UpdateNegotiationNeeded(); - stats_->AddTrack(track); + legacy_stats_->AddTrack(track.get()); } return sender_or_error; } -bool PeerConnection::RemoveTrack(RtpSenderInterface* sender) { - TRACE_EVENT0("webrtc", "PeerConnection::RemoveTrack"); - return RemoveTrackNew(sender).ok(); -} - -RTCError PeerConnection::RemoveTrackNew( +RTCError PeerConnection::RemoveTrackOrError( rtc::scoped_refptr sender) { RTC_DCHECK_RUN_ON(signaling_thread()); + if (!ConfiguredForMedia()) { + LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_OPERATION, + "Not configured for media"); + } if (!sender) { LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, "Sender is null."); } @@ -879,11 +913,11 @@ RTCError PeerConnection::RemoveTrackNew( bool removed; if (sender->media_type() == cricket::MEDIA_TYPE_AUDIO) { removed = rtp_manager()->GetAudioTransceiver()->internal()->RemoveSender( - sender); + sender.get()); } else { RTC_DCHECK_EQ(cricket::MEDIA_TYPE_VIDEO, sender->media_type()); removed = rtp_manager()->GetVideoTransceiver()->internal()->RemoveSender( - sender); + sender.get()); } if (!removed) { LOG_AND_RETURN_ERROR( @@ -904,17 +938,24 @@ PeerConnection::FindTransceiverBySender( RTCErrorOr> PeerConnection::AddTransceiver( rtc::scoped_refptr track) { + if (!ConfiguredForMedia()) { + LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_OPERATION, + "Not configured for media"); + } + return AddTransceiver(track, RtpTransceiverInit()); } RtpTransportInternal* PeerConnection::GetRtpTransport(const std::string& mid) { + // TODO(bugs.webrtc.org/9987): Avoid the thread jump. + // This might be done by caching the value on the signaling thread. RTC_DCHECK_RUN_ON(signaling_thread()); - return network_thread()->Invoke( - RTC_FROM_HERE, [this, &mid] { - auto rtp_transport = transport_controller_->GetRtpTransport(mid); - RTC_DCHECK(rtp_transport); - return rtp_transport; - }); + return network_thread()->BlockingCall([this, &mid] { + RTC_DCHECK_RUN_ON(network_thread()); + auto rtp_transport = transport_controller_->GetRtpTransport(mid); + RTC_DCHECK(rtp_transport); + return rtp_transport; + }); } RTCErrorOr> @@ -922,6 +963,10 @@ PeerConnection::AddTransceiver( rtc::scoped_refptr track, const RtpTransceiverInit& init) { RTC_DCHECK_RUN_ON(signaling_thread()); + if (!ConfiguredForMedia()) { + LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_OPERATION, + "Not configured for media"); + } RTC_CHECK(IsUnifiedPlan()) << "AddTransceiver is only available with Unified Plan SdpSemantics"; if (!track) { @@ -948,6 +993,10 @@ RTCErrorOr> PeerConnection::AddTransceiver(cricket::MediaType media_type, const RtpTransceiverInit& init) { RTC_DCHECK_RUN_ON(signaling_thread()); + if (!ConfiguredForMedia()) { + LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_OPERATION, + "Not configured for media"); + } RTC_CHECK(IsUnifiedPlan()) << "AddTransceiver is only available with Unified Plan SdpSemantics"; if (!(media_type == cricket::MEDIA_TYPE_AUDIO || @@ -965,6 +1014,10 @@ PeerConnection::AddTransceiver( const RtpTransceiverInit& init, bool update_negotiation_needed) { RTC_DCHECK_RUN_ON(signaling_thread()); + if (!ConfiguredForMedia()) { + LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_OPERATION, + "Not configured for media"); + } RTC_DCHECK((media_type == cricket::MEDIA_TYPE_AUDIO || media_type == cricket::MEDIA_TYPE_VIDEO)); if (track) { @@ -1037,7 +1090,14 @@ PeerConnection::AddTransceiver( "Attempted to set an unimplemented parameter of RtpParameters."); } - auto result = cricket::CheckRtpParametersValues(parameters); + std::vector codecs; + if (media_type == cricket::MEDIA_TYPE_VIDEO) { + // Gather the current codec capabilities to allow checking scalabilityMode + // against supported values. + codecs = context_->media_engine()->video().send_codecs(false); + } + + auto result = cricket::CheckRtpParametersValues(parameters, codecs); if (!result.ok()) { LOG_AND_RETURN_ERROR(result.type(), result.message()); } @@ -1073,6 +1133,10 @@ rtc::scoped_refptr PeerConnection::CreateSender( const std::string& kind, const std::string& stream_id) { RTC_DCHECK_RUN_ON(signaling_thread()); + if (!ConfiguredForMedia()) { + RTC_LOG(LS_ERROR) << "Not configured for media"; + return nullptr; + } RTC_CHECK(!IsUnifiedPlan()) << "CreateSender is not available with Unified " "Plan SdpSemantics. Please use AddTransceiver " "instead."; @@ -1096,8 +1160,9 @@ rtc::scoped_refptr PeerConnection::CreateSender( // TODO(steveanton): Move construction of the RtpSenders to RtpTransceiver. rtc::scoped_refptr> new_sender; if (kind == MediaStreamTrackInterface::kAudioKind) { - auto audio_sender = AudioRtpSender::Create( - worker_thread(), rtc::CreateRandomUuid(), stats_.get(), rtp_manager()); + auto audio_sender = + AudioRtpSender::Create(worker_thread(), rtc::CreateRandomUuid(), + legacy_stats_.get(), rtp_manager()); audio_sender->SetMediaChannel(rtp_manager()->voice_media_channel()); new_sender = RtpSenderProxyWithInternal::Create( signaling_thread(), audio_sender); @@ -1122,8 +1187,10 @@ std::vector> PeerConnection::GetSenders() const { RTC_DCHECK_RUN_ON(signaling_thread()); std::vector> ret; - for (const auto& sender : rtp_manager()->GetSendersInternal()) { - ret.push_back(sender); + if (ConfiguredForMedia()) { + for (const auto& sender : rtp_manager()->GetSendersInternal()) { + ret.push_back(sender); + } } return ret; } @@ -1132,8 +1199,10 @@ std::vector> PeerConnection::GetReceivers() const { RTC_DCHECK_RUN_ON(signaling_thread()); std::vector> ret; - for (const auto& receiver : rtp_manager()->GetReceiversInternal()) { - ret.push_back(receiver); + if (ConfiguredForMedia()) { + for (const auto& receiver : rtp_manager()->GetReceiversInternal()) { + ret.push_back(receiver); + } } return ret; } @@ -1144,8 +1213,10 @@ PeerConnection::GetTransceivers() const { RTC_CHECK(IsUnifiedPlan()) << "GetTransceivers is only supported with Unified Plan SdpSemantics."; std::vector> all_transceivers; - for (const auto& transceiver : rtp_manager()->transceivers()->List()) { - all_transceivers.push_back(transceiver); + if (ConfiguredForMedia()) { + for (const auto& transceiver : rtp_manager()->transceivers()->List()) { + all_transceivers.push_back(transceiver); + } } return all_transceivers; } @@ -1153,24 +1224,27 @@ PeerConnection::GetTransceivers() const { bool PeerConnection::GetStats(StatsObserver* observer, MediaStreamTrackInterface* track, StatsOutputLevel level) { - TRACE_EVENT0("webrtc", "PeerConnection::GetStats"); + TRACE_EVENT0("webrtc", "PeerConnection::GetStats (legacy)"); RTC_DCHECK_RUN_ON(signaling_thread()); if (!observer) { - RTC_LOG(LS_ERROR) << "GetStats - observer is NULL."; + RTC_LOG(LS_ERROR) << "Legacy GetStats - observer is NULL."; return false; } RTC_LOG_THREAD_BLOCK_COUNT(); - stats_->UpdateStats(level); - // The StatsCollector is used to tell if a track is valid because it may + legacy_stats_->UpdateStats(level); + + RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(4); + + // The LegacyStatsCollector is used to tell if a track is valid because it may // remember tracks that the PeerConnection previously removed. - if (track && !stats_->IsValidTrack(track->id())) { - RTC_LOG(LS_WARNING) << "GetStats is called with an invalid track: " + if (track && !legacy_stats_->IsValidTrack(track->id())) { + RTC_LOG(LS_WARNING) << "Legacy GetStats is called with an invalid track: " << track->id(); return false; } - message_handler_.PostGetStats(observer, stats_.get(), track); + message_handler_.PostGetStats(observer, legacy_stats_.get(), track); return true; } @@ -1181,7 +1255,9 @@ void PeerConnection::GetStats(RTCStatsCollectorCallback* callback) { RTC_DCHECK(stats_collector_); RTC_DCHECK(callback); RTC_LOG_THREAD_BLOCK_COUNT(); - stats_collector_->GetStatsReport(callback); + stats_collector_->GetStatsReport( + rtc::scoped_refptr(callback)); + RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(2); } void PeerConnection::GetStats( @@ -1191,6 +1267,7 @@ void PeerConnection::GetStats( RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(callback); RTC_DCHECK(stats_collector_); + RTC_LOG_THREAD_BLOCK_COUNT(); rtc::scoped_refptr internal_sender; if (selector) { for (const auto& proxy_transceiver : @@ -1212,6 +1289,7 @@ void PeerConnection::GetStats( // selector" is an empty set. Invoking GetStatsReport() with a null selector // produces an empty stats report. stats_collector_->GetStatsReport(internal_sender, callback); + RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(2); } void PeerConnection::GetStats( @@ -1221,6 +1299,7 @@ void PeerConnection::GetStats( RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(callback); RTC_DCHECK(stats_collector_); + RTC_LOG_THREAD_BLOCK_COUNT(); rtc::scoped_refptr internal_receiver; if (selector) { for (const auto& proxy_transceiver : @@ -1242,6 +1321,7 @@ void PeerConnection::GetStats( // the selector" is an empty set. Invoking GetStatsReport() with a null // selector produces an empty stats report. stats_collector_->GetStatsReport(internal_receiver, callback); + RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(2); } PeerConnectionInterface::SignalingState PeerConnection::signaling_state() { @@ -1461,10 +1541,10 @@ RTCError PeerConnection::SetConfiguration( // Parse ICE servers before hopping to network thread. cricket::ServerAddresses stun_servers; std::vector turn_servers; - RTCErrorType parse_error = - ParseIceServers(configuration.servers, &stun_servers, &turn_servers); - if (parse_error != RTCErrorType::NONE) { - return RTCError(parse_error, "ICE server parse failed"); + RTCError parse_error = ParseIceServersOrError(configuration.servers, + &stun_servers, &turn_servers); + if (!parse_error.ok()) { + return parse_error; } // Add the turn logging id to all turn servers for (cricket::RelayServerConfig& turn_server : turn_servers) { @@ -1492,10 +1572,10 @@ RTCError PeerConnection::SetConfiguration( // Apply part of the configuration on the network thread. In theory this // shouldn't fail. - if (!network_thread()->Invoke( - RTC_FROM_HERE, + if (!network_thread()->BlockingCall( [this, needs_ice_restart, &ice_config, &stun_servers, &turn_servers, &modified_config, has_local_description] { + RTC_DCHECK_RUN_ON(network_thread()); // As described in JSEP, calling setConfiguration with new ICE // servers or candidate policy must set a "needs-ice-restart" bit so // that the next offer triggers an ICE restart which will pick up @@ -1518,9 +1598,12 @@ RTCError PeerConnection::SetConfiguration( if (configuration_.active_reset_srtp_params != modified_config.active_reset_srtp_params) { - // TODO(tommi): move to the network thread - this hides an invoke. - transport_controller_->SetActiveResetSrtpParams( - modified_config.active_reset_srtp_params); + // TODO(tommi): merge BlockingCalls + network_thread()->BlockingCall([this, &modified_config] { + RTC_DCHECK_RUN_ON(network_thread()); + transport_controller_->SetActiveResetSrtpParams( + modified_config.active_reset_srtp_params); + }); } if (modified_config.allow_codec_switching.has_value()) { @@ -1529,14 +1612,13 @@ RTCError PeerConnection::SetConfiguration( if (transceiver->media_type() != cricket::MEDIA_TYPE_VIDEO) continue; - auto* video_channel = static_cast( - transceiver->internal()->channel()); + auto* video_channel = transceiver->internal()->channel(); if (video_channel) - channels.push_back(video_channel->media_channel()); + channels.push_back(static_cast( + video_channel->media_channel())); } - worker_thread()->Invoke( - RTC_FROM_HERE, + worker_thread()->BlockingCall( [channels = std::move(channels), allow_codec_switching = *modified_config.allow_codec_switching]() { for (auto* ch : channels) @@ -1551,6 +1633,7 @@ RTCError PeerConnection::SetConfiguration( bool PeerConnection::AddIceCandidate( const IceCandidateInterface* ice_candidate) { RTC_DCHECK_RUN_ON(signaling_thread()); + ClearStatsCache(); return sdp_handler_->AddIceCandidate(ice_candidate); } @@ -1558,7 +1641,11 @@ void PeerConnection::AddIceCandidate( std::unique_ptr candidate, std::function callback) { RTC_DCHECK_RUN_ON(signaling_thread()); - sdp_handler_->AddIceCandidate(std::move(candidate), callback); + sdp_handler_->AddIceCandidate(std::move(candidate), + [this, callback](webrtc::RTCError result) { + ClearStatsCache(); + callback(result); + }); } bool PeerConnection::RemoveIceCandidates( @@ -1570,8 +1657,7 @@ bool PeerConnection::RemoveIceCandidates( RTCError PeerConnection::SetBitrate(const BitrateSettings& bitrate) { if (!worker_thread()->IsCurrent()) { - return worker_thread()->Invoke( - RTC_FROM_HERE, [&]() { return SetBitrate(bitrate); }); + return worker_thread()->BlockingCall([&]() { return SetBitrate(bitrate); }); } RTC_DCHECK_RUN_ON(worker_thread()); @@ -1612,32 +1698,29 @@ RTCError PeerConnection::SetBitrate(const BitrateSettings& bitrate) { void PeerConnection::SetAudioPlayout(bool playout) { if (!worker_thread()->IsCurrent()) { - worker_thread()->Invoke( - RTC_FROM_HERE, [this, playout] { SetAudioPlayout(playout); }); + worker_thread()->BlockingCall( + [this, playout] { SetAudioPlayout(playout); }); return; } - auto audio_state = - context_->channel_manager()->media_engine()->voice().GetAudioState(); + auto audio_state = context_->media_engine()->voice().GetAudioState(); audio_state->SetPlayout(playout); } void PeerConnection::SetAudioRecording(bool recording) { if (!worker_thread()->IsCurrent()) { - worker_thread()->Invoke( - RTC_FROM_HERE, [this, recording] { SetAudioRecording(recording); }); + worker_thread()->BlockingCall( + [this, recording] { SetAudioRecording(recording); }); return; } - auto audio_state = - context_->channel_manager()->media_engine()->voice().GetAudioState(); + auto audio_state = context_->media_engine()->voice().GetAudioState(); audio_state->SetRecording(recording); } void PeerConnection::AddAdaptationResource( rtc::scoped_refptr resource) { if (!worker_thread()->IsCurrent()) { - return worker_thread()->Invoke(RTC_FROM_HERE, [this, resource]() { - return AddAdaptationResource(resource); - }); + return worker_thread()->BlockingCall( + [this, resource]() { return AddAdaptationResource(resource); }); } RTC_DCHECK_RUN_ON(worker_thread()); if (!call_) { @@ -1647,10 +1730,13 @@ void PeerConnection::AddAdaptationResource( call_->AddAdaptationResource(resource); } +bool PeerConnection::ConfiguredForMedia() const { + return context_->media_engine(); +} + bool PeerConnection::StartRtcEventLog(std::unique_ptr output, int64_t output_period_ms) { - return worker_thread()->Invoke( - RTC_FROM_HERE, + return worker_thread()->BlockingCall( [this, output = std::move(output), output_period_ms]() mutable { return StartRtcEventLog_w(std::move(output), output_period_ms); }); @@ -1659,15 +1745,14 @@ bool PeerConnection::StartRtcEventLog(std::unique_ptr output, bool PeerConnection::StartRtcEventLog( std::unique_ptr output) { int64_t output_period_ms = webrtc::RtcEventLog::kImmediateOutput; - if (absl::StartsWith(context_->trials().Lookup("WebRTC-RtcEventLogNewFormat"), - "Enabled")) { + if (trials().IsEnabled("WebRTC-RtcEventLogNewFormat")) { output_period_ms = 5000; } return StartRtcEventLog(std::move(output), output_period_ms); } void PeerConnection::StopRtcEventLog() { - worker_thread()->Invoke(RTC_FROM_HERE, [this] { StopRtcEventLog_w(); }); + worker_thread()->BlockingCall([this] { StopRtcEventLog_w(); }); } rtc::scoped_refptr @@ -1679,7 +1764,12 @@ PeerConnection::LookupDtlsTransportByMid(const std::string& mid) { rtc::scoped_refptr PeerConnection::LookupDtlsTransportByMidInternal(const std::string& mid) { RTC_DCHECK_RUN_ON(signaling_thread()); - return transport_controller_->LookupDtlsTransportByMid(mid); + // TODO(bugs.webrtc.org/9987): Avoid the thread jump. + // This might be done by caching the value on the signaling thread. + return network_thread()->BlockingCall([this, mid]() { + RTC_DCHECK_RUN_ON(network_thread()); + return transport_controller_->LookupDtlsTransportByMid(mid); + }); } rtc::scoped_refptr PeerConnection::GetSctpTransport() @@ -1736,7 +1826,7 @@ void PeerConnection::Close() { } // Update stats here so that we have the most recent stats for tracks and // streams before the channels are closed. - stats_->UpdateStats(kStatsOutputLevelStandard); + legacy_stats_->UpdateStats(kStatsOutputLevelStandard); ice_connection_state_ = PeerConnectionInterface::kIceConnectionClosed; Observer()->OnIceConnectionChange(ice_connection_state_); @@ -1749,12 +1839,13 @@ void PeerConnection::Close() { NoteUsageEvent(UsageEvent::CLOSE_CALLED); - for (const auto& transceiver : rtp_manager()->transceivers()->List()) { - transceiver->internal()->SetPeerConnectionClosed(); - if (!transceiver->stopped()) - transceiver->StopInternal(); + if (ConfiguredForMedia()) { + for (const auto& transceiver : rtp_manager()->transceivers()->List()) { + transceiver->internal()->SetPeerConnectionClosed(); + if (!transceiver->stopped()) + transceiver->StopInternal(); + } } - // Ensure that all asynchronous stats requests are completed before destroying // the transport controller below. if (stats_collector_) { @@ -1771,14 +1862,17 @@ void PeerConnection::Close() { // WebRTC session description factory, the session description factory would // call the transport controller. sdp_handler_->ResetSessionDescFactory(); - rtp_manager_->Close(); + if (ConfiguredForMedia()) { + rtp_manager_->Close(); + } - network_thread()->Invoke(RTC_FROM_HERE, [this] { + network_thread()->BlockingCall([this] { // Data channels will already have been unset via the DestroyAllChannels() // call above, which triggers a call to TeardownDataChannelTransport_n(). // TODO(tommi): ^^ That's not exactly optimal since this is yet another // blocking hop to the network thread during Close(). Further still, the // voice/video/data channels will be cleared on the worker thread. + RTC_DCHECK_RUN_ON(network_thread()); transport_controller_.reset(); port_allocator_->DiscardCandidatePool(); if (network_thread_safety_) { @@ -1786,7 +1880,7 @@ void PeerConnection::Close() { } }); - worker_thread()->Invoke(RTC_FROM_HERE, [this] { + worker_thread()->BlockingCall([this] { RTC_DCHECK_RUN_ON(worker_thread()); worker_thread_safety_->SetNotAlive(); call_.reset(); @@ -1850,64 +1944,60 @@ void PeerConnection::SetConnectionState( connection_state_ = new_state; Observer()->OnConnectionChange(new_state); + // The first connection state change to connected happens once per + // connection which makes it a good point to report metrics. if (new_state == PeerConnectionState::kConnected && !was_ever_connected_) { was_ever_connected_ = true; + ReportFirstConnectUsageMetrics(); + } +} - // The first connection state change to connected happens once per - // connection which makes it a good point to report metrics. - // Record bundle-policy from configuration. Done here from - // connectionStateChange to limit to actually established connections. - BundlePolicyUsage policy = kBundlePolicyUsageMax; - switch (configuration_.bundle_policy) { - case kBundlePolicyBalanced: - policy = kBundlePolicyUsageBalanced; - break; - case kBundlePolicyMaxBundle: - policy = kBundlePolicyUsageMaxBundle; - break; - case kBundlePolicyMaxCompat: - policy = kBundlePolicyUsageMaxCompat; - break; - } - RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.BundlePolicy", policy, - kBundlePolicyUsageMax); - - // Record configured ice candidate pool size depending on the - // BUNDLE policy. See - // https://w3c.github.io/webrtc-pc/#dom-rtcconfiguration-icecandidatepoolsize - // The ICE candidate pool size is an optimization and it may be desirable - // to restrict the maximum size of the pre-gathered candidates. - switch (configuration_.bundle_policy) { - case kBundlePolicyBalanced: - RTC_HISTOGRAM_COUNTS_LINEAR( - "WebRTC.PeerConnection.CandidatePoolUsage.Balanced", - configuration_.ice_candidate_pool_size, 0, 255, 256); - break; - case kBundlePolicyMaxBundle: - RTC_HISTOGRAM_COUNTS_LINEAR( - "WebRTC.PeerConnection.CandidatePoolUsage.MaxBundle", - configuration_.ice_candidate_pool_size, 0, 255, 256); - break; - case kBundlePolicyMaxCompat: - RTC_HISTOGRAM_COUNTS_LINEAR( - "WebRTC.PeerConnection.CandidatePoolUsage.MaxCompat", - configuration_.ice_candidate_pool_size, 0, 255, 256); - break; - } - - // Record whether there was a local or remote provisional answer. - ProvisionalAnswerUsage pranswer = kProvisionalAnswerNotUsed; - if (local_description()->GetType() == SdpType::kPrAnswer) { - pranswer = kProvisionalAnswerLocal; - } else if (remote_description()->GetType() == SdpType::kPrAnswer) { - pranswer = kProvisionalAnswerRemote; - } - RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.ProvisionalAnswer", - pranswer, kProvisionalAnswerMax); - - // Record the number of configured ICE servers for connected connections. - RTC_HISTOGRAM_COUNTS_LINEAR("WebRTC.PeerConnection.IceServers.Connected", - configuration_.servers.size(), 0, 31, 32); +void PeerConnection::ReportFirstConnectUsageMetrics() { + // Record bundle-policy from configuration. Done here from + // connectionStateChange to limit to actually established connections. + BundlePolicyUsage policy = kBundlePolicyUsageMax; + switch (configuration_.bundle_policy) { + case kBundlePolicyBalanced: + policy = kBundlePolicyUsageBalanced; + break; + case kBundlePolicyMaxBundle: + policy = kBundlePolicyUsageMaxBundle; + break; + case kBundlePolicyMaxCompat: + policy = kBundlePolicyUsageMaxCompat; + break; + } + RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.BundlePolicy", policy, + kBundlePolicyUsageMax); + + // Record whether there was a local or remote provisional answer. + ProvisionalAnswerUsage pranswer = kProvisionalAnswerNotUsed; + if (local_description()->GetType() == SdpType::kPrAnswer) { + pranswer = kProvisionalAnswerLocal; + } else if (remote_description()->GetType() == SdpType::kPrAnswer) { + pranswer = kProvisionalAnswerRemote; + } + RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.ProvisionalAnswer", pranswer, + kProvisionalAnswerMax); + + // Record the number of valid / invalid ice-ufrag. We do allow certain + // non-spec ice-char for backward-compat reasons. At this point we know + // that the ufrag/pwd consists of a valid ice-char or one of the four + // not allowed characters since we have passed the IsIceChar check done + // by the p2p transport description on setRemoteDescription calls. + auto transport_infos = remote_description()->description()->transport_infos(); + if (transport_infos.size() > 0) { + auto ice_parameters = transport_infos[0].description.GetIceParameters(); + auto is_invalid_char = [](char c) { + return c == '-' || c == '=' || c == '#' || c == '_'; + }; + bool isUsingInvalidIceCharInUfrag = + absl::c_any_of(ice_parameters.ufrag, is_invalid_char); + bool isUsingInvalidIceCharInPwd = + absl::c_any_of(ice_parameters.pwd, is_invalid_char); + RTC_HISTOGRAM_BOOLEAN( + "WebRTC.PeerConnection.ValidIceChars", + !(isUsingInvalidIceCharInUfrag || isUsingInvalidIceCharInPwd)); } } @@ -1926,6 +2016,7 @@ void PeerConnection::OnIceCandidate( return; } ReportIceCandidateCollected(candidate->candidate()); + ClearStatsCache(); Observer()->OnIceCandidate(candidate.get()); } @@ -1938,8 +2029,6 @@ void PeerConnection::OnIceCandidateError(const std::string& address, return; } Observer()->OnIceCandidateError(address, port, url, error_code, error_text); - // Leftover not to break wpt test during migration to the new API. - Observer()->OnIceCandidateError(address + ":", url, error_code, error_text); } void PeerConnection::OnIceCandidatesRemoved( @@ -1989,10 +2078,6 @@ void PeerConnection::OnSctpDataChannelClosed(DataChannelInterface* channel) { static_cast(channel)); } -SctpDataChannel* PeerConnection::FindDataChannelBySid(int sid) const { - return data_channel_controller_.FindDataChannelBySid(sid); -} - PeerConnection::InitializePortAllocatorResult PeerConnection::InitializePortAllocator_n( const cricket::ServerAddresses& stun_servers, @@ -2011,8 +2096,7 @@ PeerConnection::InitializePortAllocator_n( // by experiment. if (configuration.disable_ipv6) { port_allocator_flags &= ~(cricket::PORTALLOCATOR_ENABLE_IPV6); - } else if (absl::StartsWith(context_->trials().Lookup("WebRTC-IPv6Default"), - "Disabled")) { + } else if (trials().IsDisabled("WebRTC-IPv6Default")) { port_allocator_flags &= ~(cricket::PORTALLOCATOR_ENABLE_IPV6); } if (configuration.disable_ipv6_on_wifi) { @@ -2091,10 +2175,6 @@ bool PeerConnection::ReconfigurePortAllocator_n( stun_candidate_keepalive_interval); } -cricket::ChannelManager* PeerConnection::channel_manager() const { - return context_->channel_manager(); -} - bool PeerConnection::StartRtcEventLog_w( std::unique_ptr output, int64_t output_period_ms) { @@ -2112,17 +2192,6 @@ void PeerConnection::StopRtcEventLog_w() { } } -cricket::ChannelInterface* PeerConnection::GetChannel( - const std::string& content_name) { - for (const auto& transceiver : rtp_manager()->transceivers()->UnsafeList()) { - cricket::ChannelInterface* channel = transceiver->internal()->channel(); - if (channel && channel->content_name() == content_name) { - return channel; - } - } - return nullptr; -} - bool PeerConnection::GetSctpSslRole(rtc::SSLRole* role) { RTC_DCHECK_RUN_ON(signaling_thread()); if (!local_description() || !remote_description()) { @@ -2139,13 +2208,25 @@ bool PeerConnection::GetSctpSslRole(rtc::SSLRole* role) { absl::optional dtls_role; if (sctp_mid_s_) { - dtls_role = transport_controller_->GetDtlsRole(*sctp_mid_s_); + dtls_role = network_thread()->BlockingCall([this] { + RTC_DCHECK_RUN_ON(network_thread()); + return transport_controller_->GetDtlsRole(*sctp_mid_n_); + }); if (!dtls_role && sdp_handler_->is_caller().has_value()) { + // This works fine if we are the offerer, but can be a mistake if + // we are the answerer and the remote offer is ACTIVE. In that + // case, we will guess the role wrong. + // TODO(bugs.webrtc.org/13668): Check if this actually happens. + RTC_LOG(LS_ERROR) + << "Possible risk: DTLS role guesser is active, is_caller is " + << *sdp_handler_->is_caller(); dtls_role = *sdp_handler_->is_caller() ? rtc::SSL_SERVER : rtc::SSL_CLIENT; } - *role = *dtls_role; - return true; + if (dtls_role) { + *role = *dtls_role; + return true; + } } return false; } @@ -2160,7 +2241,10 @@ bool PeerConnection::GetSslRole(const std::string& content_name, return false; } - auto dtls_role = transport_controller_->GetDtlsRole(content_name); + auto dtls_role = network_thread()->BlockingCall([this, content_name]() { + RTC_DCHECK_RUN_ON(network_thread()); + return transport_controller_->GetDtlsRole(content_name); + }); if (dtls_role) { *role = *dtls_role; return true; @@ -2191,7 +2275,7 @@ std::vector PeerConnection::GetDataChannelStats() const { absl::optional PeerConnection::sctp_transport_name() const { RTC_DCHECK_RUN_ON(signaling_thread()); - if (sctp_mid_s_ && transport_controller_) + if (sctp_mid_s_ && transport_controller_copy_) return sctp_transport_name_s_; return absl::optional(); } @@ -2205,9 +2289,9 @@ cricket::CandidateStatsList PeerConnection::GetPooledCandidateStats() const { RTC_DCHECK_RUN_ON(network_thread()); if (!network_thread_safety_->alive()) return {}; - cricket::CandidateStatsList candidate_states_list; - port_allocator_->GetCandidateStatsFromPooledSessions(&candidate_states_list); - return candidate_states_list; + cricket::CandidateStatsList candidate_stats_list; + port_allocator_->GetCandidateStatsFromPooledSessions(&candidate_stats_list); + return candidate_stats_list; } std::map @@ -2257,7 +2341,7 @@ bool PeerConnection::IceRestartPending(const std::string& content_name) const { } bool PeerConnection::NeedsIceRestart(const std::string& content_name) const { - return network_thread()->Invoke(RTC_FROM_HERE, [this, &content_name] { + return network_thread()->BlockingCall([this, &content_name] { RTC_DCHECK_RUN_ON(network_thread()); return transport_controller_->NeedsIceRestart(content_name); }); @@ -2386,8 +2470,7 @@ bool PeerConnection::GetLocalCandidateMediaIndex( Call::Stats PeerConnection::GetCallStats() { if (!worker_thread()->IsCurrent()) { - return worker_thread()->Invoke( - RTC_FROM_HERE, [this] { return GetCallStats(); }); + return worker_thread()->BlockingCall([this] { return GetCallStats(); }); } RTC_DCHECK_RUN_ON(worker_thread()); rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; @@ -2416,11 +2499,11 @@ bool PeerConnection::SetupDataChannelTransport_n(const std::string& mid) { transport_controller_->GetDtlsTransport(mid); if (dtls_transport) { signaling_thread()->PostTask( - ToQueuedTask(signaling_thread_safety_.flag(), - [this, name = dtls_transport->transport_name()] { - RTC_DCHECK_RUN_ON(signaling_thread()); - sctp_transport_name_s_ = std::move(name); - })); + SafeTask(signaling_thread_safety_.flag(), + [this, name = dtls_transport->transport_name()] { + RTC_DCHECK_RUN_ON(signaling_thread()); + sctp_transport_name_s_ = std::move(name); + })); } // Note: setting the data sink and checking initial state must be done last, @@ -2467,50 +2550,6 @@ bool PeerConnection::ValidateBundleSettings( return true; } -void PeerConnection::ReportSdpFormatReceived( - const SessionDescriptionInterface& remote_description) { - int num_audio_mlines = 0; - int num_video_mlines = 0; - int num_audio_tracks = 0; - int num_video_tracks = 0; - for (const ContentInfo& content : - remote_description.description()->contents()) { - cricket::MediaType media_type = content.media_description()->type(); - int num_tracks = std::max( - 1, static_cast(content.media_description()->streams().size())); - if (media_type == cricket::MEDIA_TYPE_AUDIO) { - num_audio_mlines += 1; - num_audio_tracks += num_tracks; - } else if (media_type == cricket::MEDIA_TYPE_VIDEO) { - num_video_mlines += 1; - num_video_tracks += num_tracks; - } - } - SdpFormatReceived format = kSdpFormatReceivedNoTracks; - if (num_audio_mlines > 1 || num_video_mlines > 1) { - format = kSdpFormatReceivedComplexUnifiedPlan; - } else if (num_audio_tracks > 1 || num_video_tracks > 1) { - format = kSdpFormatReceivedComplexPlanB; - } else if (num_audio_tracks > 0 || num_video_tracks > 0) { - format = kSdpFormatReceivedSimple; - } - switch (remote_description.GetType()) { - case SdpType::kOffer: - // Historically only offers were counted. - RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.SdpFormatReceived", - format, kSdpFormatReceivedMax); - break; - case SdpType::kAnswer: - RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.SdpFormatReceivedAnswer", - format, kSdpFormatReceivedMax); - break; - default: - RTC_LOG(LS_ERROR) << "Can not report SdpFormatReceived for " - << SdpTypeToString(remote_description.GetType()); - break; - } -} - void PeerConnection::ReportSdpBundleUsage( const SessionDescriptionInterface& remote_description) { RTC_DCHECK_RUN_ON(signaling_thread()); @@ -2581,14 +2620,25 @@ void PeerConnection::AddRemoteCandidate(const std::string& mid, const cricket::Candidate& candidate) { RTC_DCHECK_RUN_ON(signaling_thread()); - network_thread()->PostTask(ToQueuedTask( - network_thread_safety_, [this, mid = mid, candidate = candidate] { + if (candidate.network_type() != rtc::ADAPTER_TYPE_UNKNOWN) { + RTC_DLOG(LS_WARNING) << "Using candidate with adapter type set - this " + "should only happen in test"; + } + + // Clear fields that do not make sense as remote candidates. + cricket::Candidate new_candidate(candidate); + new_candidate.set_network_type(rtc::ADAPTER_TYPE_UNKNOWN); + new_candidate.set_relay_protocol(""); + new_candidate.set_underlying_type_for_vpn(rtc::ADAPTER_TYPE_UNKNOWN); + + network_thread()->PostTask(SafeTask( + network_thread_safety_, [this, mid = mid, candidate = new_candidate] { RTC_DCHECK_RUN_ON(network_thread()); std::vector candidates = {candidate}; RTCError error = transport_controller_->AddRemoteCandidates(mid, candidates); if (error.ok()) { - signaling_thread()->PostTask(ToQueuedTask( + signaling_thread()->PostTask(SafeTask( signaling_thread_safety_.flag(), [this, candidate = std::move(candidate)] { ReportRemoteIceCandidateAdded(candidate); @@ -2666,12 +2716,15 @@ void PeerConnection::ReportTransportStats() { rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; std::map> media_types_by_transport_name; - for (const auto& transceiver : rtp_manager()->transceivers()->UnsafeList()) { - if (transceiver->internal()->channel()) { - const std::string& transport_name = - transceiver->internal()->channel()->transport_name(); - media_types_by_transport_name[transport_name].insert( - transceiver->media_type()); + if (ConfiguredForMedia()) { + for (const auto& transceiver : + rtp_manager()->transceivers()->UnsafeList()) { + if (transceiver->internal()->channel()) { + std::string transport_name( + transceiver->internal()->channel()->transport_name()); + media_types_by_transport_name[transport_name].insert( + transceiver->media_type()); + } } } @@ -2819,17 +2872,23 @@ bool PeerConnection::OnTransportChanged( DataChannelTransportInterface* data_channel_transport) { RTC_DCHECK_RUN_ON(network_thread()); bool ret = true; - auto base_channel = GetChannel(mid); - if (base_channel) { - ret = base_channel->SetRtpTransport(rtp_transport); + if (ConfiguredForMedia()) { + for (const auto& transceiver : + rtp_manager()->transceivers()->UnsafeList()) { + cricket::ChannelInterface* channel = transceiver->internal()->channel(); + if (channel && channel->mid() == mid) { + ret = channel->SetRtpTransport(rtp_transport); + } + } } if (mid == sctp_mid_n_) { data_channel_controller_.OnTransportChanged(data_channel_transport); if (dtls_transport) { - signaling_thread()->PostTask(ToQueuedTask( + signaling_thread()->PostTask(SafeTask( signaling_thread_safety_.flag(), - [this, name = dtls_transport->internal()->transport_name()] { + [this, + name = std::string(dtls_transport->internal()->transport_name())] { RTC_DCHECK_RUN_ON(signaling_thread()); sctp_transport_name_s_ = std::move(name); })); @@ -2852,11 +2911,11 @@ void PeerConnection::StartSctpTransport(int local_port, if (!sctp_mid_s_) return; - network_thread()->PostTask(ToQueuedTask( + network_thread()->PostTask(SafeTask( network_thread_safety_, [this, mid = *sctp_mid_s_, local_port, remote_port, max_message_size] { rtc::scoped_refptr sctp_transport = - transport_controller()->GetSctpTransport(mid); + transport_controller_n()->GetSctpTransport(mid); if (sctp_transport) sctp_transport->Start(local_port, remote_port, max_message_size); })); @@ -2873,6 +2932,9 @@ CryptoOptions PeerConnection::GetCryptoOptions() { void PeerConnection::ClearStatsCache() { RTC_DCHECK_RUN_ON(signaling_thread()); + if (legacy_stats_) { + legacy_stats_->InvalidateCache(); + } if (stats_collector_) { stats_collector_->ClearCachedStatsReport(); } diff --git a/TMessagesProj/jni/voip/webrtc/pc/peer_connection.h b/TMessagesProj/jni/voip/webrtc/pc/peer_connection.h index 7326bccd10..7d0eb18781 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/peer_connection.h +++ b/TMessagesProj/jni/voip/webrtc/pc/peer_connection.h @@ -18,28 +18,24 @@ #include #include #include -#include #include #include "absl/types/optional.h" #include "api/adaptation/resource.h" #include "api/async_dns_resolver.h" -#include "api/async_resolver_factory.h" -#include "api/audio_options.h" #include "api/candidate.h" #include "api/crypto/crypto_options.h" #include "api/data_channel_interface.h" #include "api/dtls_transport_interface.h" +#include "api/field_trials_view.h" #include "api/ice_transport_interface.h" #include "api/jsep.h" #include "api/media_stream_interface.h" #include "api/media_types.h" -#include "api/packet_socket_factory.h" #include "api/peer_connection_interface.h" #include "api/rtc_error.h" #include "api/rtc_event_log/rtc_event_log.h" #include "api/rtc_event_log_output.h" -#include "api/rtp_parameters.h" #include "api/rtp_receiver_interface.h" #include "api/rtp_sender_interface.h" #include "api/rtp_transceiver_interface.h" @@ -49,56 +45,49 @@ #include "api/set_local_description_observer_interface.h" #include "api/set_remote_description_observer_interface.h" #include "api/stats/rtc_stats_collector_callback.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "api/transport/bitrate_settings.h" #include "api/transport/data_channel_transport_interface.h" #include "api/transport/enums.h" #include "api/turn_customizer.h" -#include "api/video/video_bitrate_allocator_factory.h" #include "call/call.h" -#include "media/base/media_channel.h" -#include "media/base/media_engine.h" #include "p2p/base/ice_transport_internal.h" #include "p2p/base/port.h" #include "p2p/base/port_allocator.h" #include "p2p/base/transport_description.h" -#include "pc/channel.h" #include "pc/channel_interface.h" -#include "pc/channel_manager.h" #include "pc/connection_context.h" #include "pc/data_channel_controller.h" #include "pc/data_channel_utils.h" #include "pc/dtls_transport.h" #include "pc/jsep_transport_controller.h" +#include "pc/legacy_stats_collector.h" #include "pc/peer_connection_internal.h" #include "pc/peer_connection_message_handler.h" #include "pc/rtc_stats_collector.h" -#include "pc/rtp_receiver.h" -#include "pc/rtp_sender.h" #include "pc/rtp_transceiver.h" #include "pc/rtp_transmission_manager.h" #include "pc/rtp_transport_internal.h" #include "pc/sctp_data_channel.h" -#include "pc/sctp_transport.h" #include "pc/sdp_offer_answer.h" #include "pc/session_description.h" -#include "pc/stats_collector.h" -#include "pc/stream_collection.h" #include "pc/transceiver_list.h" #include "pc/transport_stats.h" #include "pc/usage_pattern.h" #include "rtc_base/checks.h" #include "rtc_base/copy_on_write_buffer.h" -#include "rtc_base/network/sent_packet.h" #include "rtc_base/rtc_certificate.h" #include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_stream_adapter.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" -#include "rtc_base/unique_id_generator.h" #include "rtc_base/weak_ptr.h" +namespace cricket { +class ChannelManager; +} + namespace webrtc { // PeerConnection is the implementation of the PeerConnection object as defined @@ -116,8 +105,7 @@ namespace webrtc { // - The ICE state machine. // - Generating stats. class PeerConnection : public PeerConnectionInternal, - public JsepTransportController::Observer, - public sigslot::has_slots<> { + public JsepTransportController::Observer { public: // Creates a PeerConnection and initializes it with the given values. // If the initialization fails, the function releases the PeerConnection @@ -141,8 +129,15 @@ class PeerConnection : public PeerConnectionInternal, RTCErrorOr> AddTrack( rtc::scoped_refptr track, const std::vector& stream_ids) override; - bool RemoveTrack(RtpSenderInterface* sender) override; - RTCError RemoveTrackNew( + RTCErrorOr> AddTrack( + rtc::scoped_refptr track, + const std::vector& stream_ids, + const std::vector& init_send_encodings) override; + RTCErrorOr> AddTrack( + rtc::scoped_refptr track, + const std::vector& stream_ids, + const std::vector* init_send_encodings); + RTCError RemoveTrackOrError( rtc::scoped_refptr sender) override; RTCErrorOr> AddTransceiver( @@ -187,6 +182,9 @@ class PeerConnection : public PeerConnectionInternal, SignalingState signaling_state() override; IceConnectionState ice_connection_state() override; + IceConnectionState ice_connection_state_internal() override { + return ice_connection_state(); + } IceConnectionState standardized_ice_connection_state() override; PeerConnectionState peer_connection_state() override; IceGatheringState ice_gathering_state() override; @@ -265,7 +263,6 @@ class PeerConnection : public PeerConnectionInternal, return context_->signaling_thread(); } - // PeerConnectionInternal implementation. rtc::Thread* network_thread() const final { return context_->network_thread(); } @@ -277,13 +274,16 @@ class PeerConnection : public PeerConnectionInternal, bool initial_offerer() const override { RTC_DCHECK_RUN_ON(signaling_thread()); - return transport_controller_ && transport_controller_->initial_offerer(); + return sdp_handler_->initial_offerer(); } std::vector< rtc::scoped_refptr>> GetTransceiversInternal() const override { RTC_DCHECK_RUN_ON(signaling_thread()); + if (!ConfiguredForMedia()) { + return {}; + } return rtp_manager()->transceivers()->List(); } @@ -312,73 +312,79 @@ class PeerConnection : public PeerConnectionInternal, bool GetSslRole(const std::string& content_name, rtc::SSLRole* role) override; // Functions needed by DataChannelController - void NoteDataAddedEvent() { NoteUsageEvent(UsageEvent::DATA_ADDED); } + void NoteDataAddedEvent() override { NoteUsageEvent(UsageEvent::DATA_ADDED); } // Returns the observer. Will crash on CHECK if the observer is removed. - PeerConnectionObserver* Observer() const; - bool IsClosed() const { + PeerConnectionObserver* Observer() const override; + bool IsClosed() const override { RTC_DCHECK_RUN_ON(signaling_thread()); return !sdp_handler_ || sdp_handler_->signaling_state() == PeerConnectionInterface::kClosed; } // Get current SSL role used by SCTP's underlying transport. - bool GetSctpSslRole(rtc::SSLRole* role); + bool GetSctpSslRole(rtc::SSLRole* role) override; // Handler for the "channel closed" signal - void OnSctpDataChannelClosed(DataChannelInterface* channel); + void OnSctpDataChannelClosed(DataChannelInterface* channel) override; bool ShouldFireNegotiationNeededEvent(uint32_t event_id) override; // Functions needed by SdpOfferAnswerHandler - StatsCollector* stats() { + LegacyStatsCollector* legacy_stats() override { RTC_DCHECK_RUN_ON(signaling_thread()); - return stats_.get(); + return legacy_stats_.get(); } - DataChannelController* data_channel_controller() { + DataChannelController* data_channel_controller() override { RTC_DCHECK_RUN_ON(signaling_thread()); return &data_channel_controller_; } - bool dtls_enabled() const { + bool dtls_enabled() const override { RTC_DCHECK_RUN_ON(signaling_thread()); return dtls_enabled_; } - const PeerConnectionInterface::RTCConfiguration* configuration() const { + const PeerConnectionInterface::RTCConfiguration* configuration() + const override { RTC_DCHECK_RUN_ON(signaling_thread()); return &configuration_; } - PeerConnectionMessageHandler* message_handler() { + PeerConnectionMessageHandler* message_handler() override { RTC_DCHECK_RUN_ON(signaling_thread()); return &message_handler_; } - RtpTransmissionManager* rtp_manager() { return rtp_manager_.get(); } - const RtpTransmissionManager* rtp_manager() const { + RtpTransmissionManager* rtp_manager() override { return rtp_manager_.get(); } + const RtpTransmissionManager* rtp_manager() const override { return rtp_manager_.get(); } - cricket::ChannelManager* channel_manager() const; - JsepTransportController* transport_controller() { + JsepTransportController* transport_controller_s() override { + RTC_DCHECK_RUN_ON(signaling_thread()); + return transport_controller_copy_; + } + JsepTransportController* transport_controller_n() override { + RTC_DCHECK_RUN_ON(network_thread()); return transport_controller_.get(); } - cricket::PortAllocator* port_allocator() { return port_allocator_.get(); } - Call* call_ptr() { return call_ptr_; } + cricket::PortAllocator* port_allocator() override { + return port_allocator_.get(); + } + Call* call_ptr() override { return call_ptr_; } ConnectionContext* context() { return context_.get(); } - const PeerConnectionFactoryInterface::Options* options() const { + const PeerConnectionFactoryInterface::Options* options() const override { return &options_; } - void SetIceConnectionState(IceConnectionState new_state); - void NoteUsageEvent(UsageEvent event); + void SetIceConnectionState(IceConnectionState new_state) override; + void NoteUsageEvent(UsageEvent event) override; // Asynchronously adds a remote candidate on the network thread. void AddRemoteCandidate(const std::string& mid, - const cricket::Candidate& candidate); - - // Report the UMA metric SdpFormatReceived for the given remote description. - void ReportSdpFormatReceived( - const SessionDescriptionInterface& remote_description); + const cricket::Candidate& candidate) override; // Report the UMA metric BundleUsage for the given remote description. void ReportSdpBundleUsage( - const SessionDescriptionInterface& remote_description); + const SessionDescriptionInterface& remote_description) override; + + // Report several UMA metrics on establishing the connection. + void ReportFirstConnectUsageMetrics() RTC_RUN_ON(signaling_thread()); // Returns true if the PeerConnection is configured to use Unified Plan // semantics for creating offers/answers and setting local/remote @@ -386,34 +392,34 @@ class PeerConnection : public PeerConnectionInternal, // to the user. If this is false, Plan B semantics are assumed. // TODO(bugs.webrtc.org/8530): Flip the default to be Unified Plan once // sufficient time has passed. - bool IsUnifiedPlan() const { + bool IsUnifiedPlan() const override { RTC_DCHECK_RUN_ON(signaling_thread()); return is_unified_plan_; } bool ValidateBundleSettings( const cricket::SessionDescription* desc, const std::map& - bundle_groups_by_mid); + bundle_groups_by_mid) override; // Returns the MID for the data section associated with the // SCTP data channel, if it has been set. If no data // channels are configured this will return nullopt. - absl::optional GetDataMid() const; + absl::optional GetDataMid() const override; - void SetSctpDataMid(const std::string& mid); + void SetSctpDataMid(const std::string& mid) override; - void ResetSctpDataMid(); + void ResetSctpDataMid() override; // Asynchronously calls SctpTransport::Start() on the network thread for // `sctp_mid()` if set. Called as part of setting the local description. void StartSctpTransport(int local_port, int remote_port, - int max_message_size); + int max_message_size) override; // Returns the CryptoOptions for this PeerConnection. This will always // return the RTCConfiguration.crypto_options if set and will only default // back to the PeerConnectionFactory settings if nothing was set. - CryptoOptions GetCryptoOptions(); + CryptoOptions GetCryptoOptions() override; // Internal implementation for AddTransceiver family of methods. If // `fire_callback` is set, fires OnRenegotiationNeeded callback if successful. @@ -421,20 +427,22 @@ class PeerConnection : public PeerConnectionInternal, cricket::MediaType media_type, rtc::scoped_refptr track, const RtpTransceiverInit& init, - bool fire_callback = true); + bool fire_callback = true) override; // Returns rtp transport, result can not be nullptr. RtpTransportInternal* GetRtpTransport(const std::string& mid); // Returns true if SRTP (either using DTLS-SRTP or SDES) is required by // this session. - bool SrtpRequired() const; + bool SrtpRequired() const override; - bool SetupDataChannelTransport_n(const std::string& mid) - RTC_RUN_ON(network_thread()); - void TeardownDataChannelTransport_n() RTC_RUN_ON(network_thread()); - cricket::ChannelInterface* GetChannel(const std::string& content_name) + bool SetupDataChannelTransport_n(const std::string& mid) override RTC_RUN_ON(network_thread()); + void TeardownDataChannelTransport_n() override RTC_RUN_ON(network_thread()); + + const FieldTrialsView& trials() const override { return *trials_; } + + bool ConfiguredForMedia() const; // Functions made public for testing. void ReturnHistogramVeryQuicklyForTesting() { @@ -459,7 +467,7 @@ class PeerConnection : public PeerConnectionInternal, RTCError Initialize( const PeerConnectionInterface::RTCConfiguration& configuration, PeerConnectionDependencies dependencies); - void InitializeTransportController_n( + JsepTransportController* InitializeTransportController_n( const RTCConfiguration& configuration, const PeerConnectionDependencies& dependencies) RTC_RUN_ON(network_thread()); @@ -498,11 +506,6 @@ class PeerConnection : public PeerConnectionInternal, void OnNegotiationNeeded(); - // Returns the specified SCTP DataChannel in sctp_data_channels_, - // or nullptr if not found. - SctpDataChannel* FindDataChannelBySid(int sid) const - RTC_RUN_ON(signaling_thread()); - // Called when first configuring the port allocator. struct InitializePortAllocatorResult { bool enable_ipv6; @@ -601,6 +604,12 @@ class PeerConnection : public PeerConnectionInternal, InitializeRtcpCallback(); const rtc::scoped_refptr context_; + // Field trials active for this PeerConnection is the first of: + // a) Specified in PeerConnectionDependencies (owned). + // b) Accessed via ConnectionContext (e.g PeerConnectionFactoryDependencies> + // c) Created as Default (FieldTrialBasedConfig). + const webrtc::AlwaysValidPointer + trials_; const PeerConnectionFactoryInterface::Options options_; PeerConnectionObserver* observer_ RTC_GUARDED_BY(signaling_thread()) = nullptr; @@ -653,16 +662,21 @@ class PeerConnection : public PeerConnectionInternal, // pointer). Call* const call_ptr_; - std::unique_ptr stats_ + std::unique_ptr legacy_stats_ RTC_GUARDED_BY(signaling_thread()); // A pointer is passed to senders_ rtc::scoped_refptr stats_collector_ RTC_GUARDED_BY(signaling_thread()); const std::string session_id_; - std::unique_ptr - transport_controller_; // TODO(bugs.webrtc.org/9987): Accessed on both - // signaling and network thread. + // The transport controller is set and used on the network thread. + // Some functions pass the value of the transport_controller_ pointer + // around as arguments while running on the signaling thread; these + // use the transport_controller_copy. + std::unique_ptr transport_controller_ + RTC_GUARDED_BY(network_thread()); + JsepTransportController* transport_controller_copy_ + RTC_GUARDED_BY(signaling_thread()) = nullptr; // `sctp_mid_` is the content name (MID) in SDP. // Note: this is used as the data channel MID by both SCTP and data channel diff --git a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_factory.cc b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_factory.cc index 3ab969dc32..cec909f03e 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_factory.cc @@ -10,7 +10,7 @@ #include "pc/peer_connection_factory.h" -#include +#include #include #include "absl/strings/match.h" @@ -46,10 +46,8 @@ #include "rtc_base/checks.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/experiments/field_trial_units.h" -#include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" -#include "rtc_base/ref_counted_object.h" #include "rtc_base/rtc_certificate_generator.h" #include "rtc_base/system/file_wrapper.h" @@ -61,12 +59,9 @@ CreateModularPeerConnectionFactory( // The PeerConnectionFactory must be created on the signaling thread. if (dependencies.signaling_thread && !dependencies.signaling_thread->IsCurrent()) { - return dependencies.signaling_thread - ->Invoke>( - RTC_FROM_HERE, [&dependencies] { - return CreateModularPeerConnectionFactory( - std::move(dependencies)); - }); + return dependencies.signaling_thread->BlockingCall([&dependencies] { + return CreateModularPeerConnectionFactory(std::move(dependencies)); + }); } auto pc_factory = PeerConnectionFactory::Create(std::move(dependencies)); @@ -105,7 +100,8 @@ PeerConnectionFactory::PeerConnectionFactory( transport_controller_send_factory_( (dependencies->transport_controller_send_factory) ? std::move(dependencies->transport_controller_send_factory) - : std::make_unique()) {} + : std::make_unique()), + metronome_(std::move(dependencies->metronome)) {} PeerConnectionFactory::PeerConnectionFactory( PeerConnectionFactoryDependencies dependencies) @@ -127,17 +123,17 @@ RtpCapabilities PeerConnectionFactory::GetRtpSenderCapabilities( switch (kind) { case cricket::MEDIA_TYPE_AUDIO: { cricket::AudioCodecs cricket_codecs; - channel_manager()->GetSupportedAudioSendCodecs(&cricket_codecs); - return ToRtpCapabilities( - cricket_codecs, - channel_manager()->GetDefaultEnabledAudioRtpHeaderExtensions()); + cricket_codecs = media_engine()->voice().send_codecs(); + auto extensions = + GetDefaultEnabledRtpHeaderExtensions(media_engine()->voice()); + return ToRtpCapabilities(cricket_codecs, extensions); } case cricket::MEDIA_TYPE_VIDEO: { cricket::VideoCodecs cricket_codecs; - channel_manager()->GetSupportedVideoSendCodecs(&cricket_codecs); - return ToRtpCapabilities( - cricket_codecs, - channel_manager()->GetDefaultEnabledVideoRtpHeaderExtensions()); + cricket_codecs = media_engine()->video().send_codecs(); + auto extensions = + GetDefaultEnabledRtpHeaderExtensions(media_engine()->video()); + return ToRtpCapabilities(cricket_codecs, extensions); } case cricket::MEDIA_TYPE_DATA: return RtpCapabilities(); @@ -154,17 +150,17 @@ RtpCapabilities PeerConnectionFactory::GetRtpReceiverCapabilities( switch (kind) { case cricket::MEDIA_TYPE_AUDIO: { cricket::AudioCodecs cricket_codecs; - channel_manager()->GetSupportedAudioReceiveCodecs(&cricket_codecs); - return ToRtpCapabilities( - cricket_codecs, - channel_manager()->GetDefaultEnabledAudioRtpHeaderExtensions()); + cricket_codecs = media_engine()->voice().recv_codecs(); + auto extensions = + GetDefaultEnabledRtpHeaderExtensions(media_engine()->voice()); + return ToRtpCapabilities(cricket_codecs, extensions); } case cricket::MEDIA_TYPE_VIDEO: { - cricket::VideoCodecs cricket_codecs; - channel_manager()->GetSupportedVideoReceiveCodecs(&cricket_codecs); - return ToRtpCapabilities( - cricket_codecs, - channel_manager()->GetDefaultEnabledVideoRtpHeaderExtensions()); + cricket::VideoCodecs cricket_codecs = + media_engine()->video().recv_codecs(context_->use_rtx()); + auto extensions = + GetDefaultEnabledRtpHeaderExtensions(media_engine()->video()); + return ToRtpCapabilities(cricket_codecs, extensions); } case cricket::MEDIA_TYPE_DATA: return RtpCapabilities(); @@ -185,12 +181,18 @@ PeerConnectionFactory::CreateAudioSource(const cricket::AudioOptions& options) { bool PeerConnectionFactory::StartAecDump(FILE* file, int64_t max_size_bytes) { RTC_DCHECK_RUN_ON(worker_thread()); - return channel_manager()->StartAecDump(FileWrapper(file), max_size_bytes); + return media_engine()->voice().StartAecDump(FileWrapper(file), + max_size_bytes); } void PeerConnectionFactory::StopAecDump() { RTC_DCHECK_RUN_ON(worker_thread()); - channel_manager()->StopAecDump(); + media_engine()->voice().StopAecDump(); +} + +cricket::MediaEngineInterface* PeerConnectionFactory::media_engine() const { + RTC_DCHECK(context_); + return context_->media_engine(); } RTCErrorOr> @@ -198,9 +200,6 @@ PeerConnectionFactory::CreatePeerConnectionOrError( const PeerConnectionInterface::RTCConfiguration& configuration, PeerConnectionDependencies dependencies) { RTC_DCHECK_RUN_ON(signaling_thread()); - RTC_DCHECK(!(dependencies.allocator && dependencies.packet_socket_factory)) - << "You can't set both allocator and packet_socket_factory; " - "the former is going away (see bugs.webrtc.org/7447"; // Set internal defaults if optional dependencies are not set. if (!dependencies.cert_generator) { @@ -209,15 +208,11 @@ PeerConnectionFactory::CreatePeerConnectionOrError( network_thread()); } if (!dependencies.allocator) { - rtc::PacketSocketFactory* packet_socket_factory; - if (dependencies.packet_socket_factory) - packet_socket_factory = dependencies.packet_socket_factory.get(); - else - packet_socket_factory = context_->default_socket_factory(); - + const FieldTrialsView* trials = + dependencies.trials ? dependencies.trials.get() : &field_trials(); dependencies.allocator = std::make_unique( - context_->default_network_manager(), packet_socket_factory, - configuration.turn_customizer); + context_->default_network_manager(), context_->default_socket_factory(), + configuration.turn_customizer, /*relay_port_factory=*/nullptr, trials); dependencies.allocator->SetPortRange( configuration.port_allocator_config.min_port, configuration.port_allocator_config.max_port); @@ -239,12 +234,14 @@ PeerConnectionFactory::CreatePeerConnectionOrError( dependencies.allocator->SetVpnList(configuration.vpn_list); std::unique_ptr event_log = - worker_thread()->Invoke>( - RTC_FROM_HERE, [this] { return CreateRtcEventLog_w(); }); + worker_thread()->BlockingCall([this] { return CreateRtcEventLog_w(); }); - std::unique_ptr call = worker_thread()->Invoke>( - RTC_FROM_HERE, - [this, &event_log] { return CreateCall_w(event_log.get()); }); + const FieldTrialsView* trials = + dependencies.trials ? dependencies.trials.get() : &field_trials(); + std::unique_ptr call = + worker_thread()->BlockingCall([this, &event_log, trials] { + return CreateCall_w(event_log.get(), *trials); + }); auto result = PeerConnection::Create(context_, options_, std::move(event_log), std::move(call), configuration, @@ -275,8 +272,9 @@ rtc::scoped_refptr PeerConnectionFactory::CreateVideoTrack( const std::string& id, VideoTrackSourceInterface* source) { RTC_DCHECK(signaling_thread()->IsCurrent()); - rtc::scoped_refptr track( - VideoTrack::Create(id, source, worker_thread())); + rtc::scoped_refptr track = VideoTrack::Create( + id, rtc::scoped_refptr(source), + worker_thread()); return VideoTrackProxy::Create(signaling_thread(), worker_thread(), track); } @@ -284,35 +282,31 @@ rtc::scoped_refptr PeerConnectionFactory::CreateAudioTrack( const std::string& id, AudioSourceInterface* source) { RTC_DCHECK(signaling_thread()->IsCurrent()); - rtc::scoped_refptr track(AudioTrack::Create(id, source)); + rtc::scoped_refptr track = + AudioTrack::Create(id, rtc::scoped_refptr(source)); return AudioTrackProxy::Create(signaling_thread(), track); } -cricket::ChannelManager* PeerConnectionFactory::channel_manager() { - return context_->channel_manager(); -} - std::unique_ptr PeerConnectionFactory::CreateRtcEventLog_w() { RTC_DCHECK_RUN_ON(worker_thread()); auto encoding_type = RtcEventLog::EncodingType::Legacy; if (IsTrialEnabled("WebRTC-RtcEventLogNewFormat")) encoding_type = RtcEventLog::EncodingType::NewFormat; - return event_log_factory_ - ? event_log_factory_->CreateRtcEventLog(encoding_type) - : std::make_unique(); + return event_log_factory_ ? event_log_factory_->Create(encoding_type) + : std::make_unique(); } std::unique_ptr PeerConnectionFactory::CreateCall_w( - RtcEventLog* event_log) { + RtcEventLog* event_log, + const FieldTrialsView& field_trials) { RTC_DCHECK_RUN_ON(worker_thread()); webrtc::Call::Config call_config(event_log, network_thread()); - if (!channel_manager()->media_engine() || !context_->call_factory()) { + if (!media_engine() || !context_->call_factory()) { return nullptr; } - call_config.audio_state = - channel_manager()->media_engine()->voice().GetAudioState(); + call_config.audio_state = media_engine()->voice().GetAudioState(); FieldTrialParameter min_bandwidth("min", DataRate::KilobitsPerSec(30)); @@ -321,7 +315,7 @@ std::unique_ptr PeerConnectionFactory::CreateCall_w( FieldTrialParameter max_bandwidth("max", DataRate::KilobitsPerSec(2000)); ParseFieldTrial({&min_bandwidth, &start_bandwidth, &max_bandwidth}, - trials().Lookup("WebRTC-PcFactoryDefaultBitrates")); + field_trials.Lookup("WebRTC-PcFactoryDefaultBitrates")); call_config.bitrate_config.min_bitrate_bps = rtc::saturated_cast(min_bandwidth->bps()); @@ -344,15 +338,16 @@ std::unique_ptr PeerConnectionFactory::CreateCall_w( RTC_LOG(LS_INFO) << "Using default network controller factory"; } - call_config.trials = &trials(); + call_config.trials = &field_trials; call_config.rtp_transport_controller_send_factory = transport_controller_send_factory_.get(); + call_config.metronome = metronome_.get(); return std::unique_ptr( context_->call_factory()->CreateCall(call_config)); } bool PeerConnectionFactory::IsTrialEnabled(absl::string_view key) const { - return absl::StartsWith(trials().Lookup(key), "Enabled"); + return absl::StartsWith(field_trials().Lookup(key), "Enabled"); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_factory.h b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_factory.h index 4946ec6ea2..036329f57a 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_factory.h +++ b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_factory.h @@ -21,8 +21,10 @@ #include "absl/strings/string_view.h" #include "api/audio_options.h" #include "api/fec_controller.h" +#include "api/field_trials_view.h" #include "api/media_stream_interface.h" #include "api/media_types.h" +#include "api/metronome/metronome.h" #include "api/neteq/neteq_factory.h" #include "api/network_state_predictor.h" #include "api/peer_connection_interface.h" @@ -35,17 +37,19 @@ #include "api/task_queue/task_queue_factory.h" #include "api/transport/network_control.h" #include "api/transport/sctp_transport_factory_interface.h" -#include "api/transport/webrtc_key_value_config.h" #include "call/call.h" #include "call/rtp_transport_controller_send_factory_interface.h" #include "p2p/base/port_allocator.h" -#include "pc/channel_manager.h" #include "pc/connection_context.h" #include "rtc_base/checks.h" #include "rtc_base/rtc_certificate_generator.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" +namespace cricket { +class ChannelManager; +} + namespace rtc { class BasicNetworkManager; class BasicPacketSocketFactory; @@ -99,8 +103,6 @@ class PeerConnectionFactory : public PeerConnectionFactoryInterface { return context_->sctp_transport_factory(); } - virtual cricket::ChannelManager* channel_manager(); - rtc::Thread* signaling_thread() const { // This method can be called on a different thread when the factory is // created in CreatePeerConnectionFactory(). @@ -114,7 +116,11 @@ class PeerConnectionFactory : public PeerConnectionFactoryInterface { return options_; } - const WebRtcKeyValueConfig& trials() const { return context_->trials(); } + const FieldTrialsView& field_trials() const { + return context_->field_trials(); + } + + cricket::MediaEngineInterface* media_engine() const; protected: // Constructor used by the static Create() method. Modifies the dependencies. @@ -132,12 +138,10 @@ class PeerConnectionFactory : public PeerConnectionFactoryInterface { rtc::Thread* network_thread() const { return context_->network_thread(); } bool IsTrialEnabled(absl::string_view key) const; - const cricket::ChannelManager* channel_manager() const { - return context_->channel_manager(); - } std::unique_ptr CreateRtcEventLog_w(); - std::unique_ptr CreateCall_w(RtcEventLog* event_log); + std::unique_ptr CreateCall_w(RtcEventLog* event_log, + const FieldTrialsView& field_trials); rtc::scoped_refptr context_; PeerConnectionFactoryInterface::Options options_ @@ -152,6 +156,7 @@ class PeerConnectionFactory : public PeerConnectionFactoryInterface { std::unique_ptr neteq_factory_; const std::unique_ptr transport_controller_send_factory_; + std::unique_ptr metronome_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_internal.h b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_internal.h index 6f97612914..ecf8fbfc83 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_internal.h +++ b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_internal.h @@ -19,20 +19,121 @@ #include "api/peer_connection_interface.h" #include "call/call.h" +#include "pc/jsep_transport_controller.h" +#include "pc/peer_connection_message_handler.h" #include "pc/rtp_transceiver.h" +#include "pc/rtp_transmission_manager.h" #include "pc/sctp_data_channel.h" namespace webrtc { -// Internal interface for extra PeerConnection methods. -class PeerConnectionInternal : public PeerConnectionInterface { +class DataChannelController; +class LegacyStatsCollector; + +// This interface defines the functions that are needed for +// SdpOfferAnswerHandler to access PeerConnection internal state. +class PeerConnectionSdpMethods { public: - virtual rtc::Thread* network_thread() const = 0; - virtual rtc::Thread* worker_thread() const = 0; + virtual ~PeerConnectionSdpMethods() = default; // The SDP session ID as defined by RFC 3264. virtual std::string session_id() const = 0; + // Returns true if the ICE restart flag above was set, and no ICE restart has + // occurred yet for this transport (by applying a local description with + // changed ufrag/password). If the transport has been deleted as a result of + // bundling, returns false. + virtual bool NeedsIceRestart(const std::string& content_name) const = 0; + + virtual absl::optional sctp_mid() const = 0; + + // Functions below this comment are known to only be accessed + // from SdpOfferAnswerHandler. + // Return a pointer to the active configuration. + virtual const PeerConnectionInterface::RTCConfiguration* configuration() + const = 0; + + // Report the UMA metric BundleUsage for the given remote description. + virtual void ReportSdpBundleUsage( + const SessionDescriptionInterface& remote_description) = 0; + + virtual PeerConnectionMessageHandler* message_handler() = 0; + virtual RtpTransmissionManager* rtp_manager() = 0; + virtual const RtpTransmissionManager* rtp_manager() const = 0; + virtual bool dtls_enabled() const = 0; + virtual const PeerConnectionFactoryInterface::Options* options() const = 0; + + // Returns the CryptoOptions for this PeerConnection. This will always + // return the RTCConfiguration.crypto_options if set and will only default + // back to the PeerConnectionFactory settings if nothing was set. + virtual CryptoOptions GetCryptoOptions() = 0; + virtual JsepTransportController* transport_controller_s() = 0; + virtual JsepTransportController* transport_controller_n() = 0; + virtual DataChannelController* data_channel_controller() = 0; + virtual cricket::PortAllocator* port_allocator() = 0; + virtual LegacyStatsCollector* legacy_stats() = 0; + // Returns the observer. Will crash on CHECK if the observer is removed. + virtual PeerConnectionObserver* Observer() const = 0; + virtual bool GetSctpSslRole(rtc::SSLRole* role) = 0; + virtual PeerConnectionInterface::IceConnectionState + ice_connection_state_internal() = 0; + virtual void SetIceConnectionState( + PeerConnectionInterface::IceConnectionState new_state) = 0; + virtual void NoteUsageEvent(UsageEvent event) = 0; + virtual bool IsClosed() const = 0; + // Returns true if the PeerConnection is configured to use Unified Plan + // semantics for creating offers/answers and setting local/remote + // descriptions. If this is true the RtpTransceiver API will also be available + // to the user. If this is false, Plan B semantics are assumed. + // TODO(bugs.webrtc.org/8530): Flip the default to be Unified Plan once + // sufficient time has passed. + virtual bool IsUnifiedPlan() const = 0; + virtual bool ValidateBundleSettings( + const cricket::SessionDescription* desc, + const std::map& + bundle_groups_by_mid) = 0; + + virtual absl::optional GetDataMid() const = 0; + // Internal implementation for AddTransceiver family of methods. If + // `fire_callback` is set, fires OnRenegotiationNeeded callback if successful. + virtual RTCErrorOr> + AddTransceiver(cricket::MediaType media_type, + rtc::scoped_refptr track, + const RtpTransceiverInit& init, + bool fire_callback = true) = 0; + // Asynchronously calls SctpTransport::Start() on the network thread for + // `sctp_mid()` if set. Called as part of setting the local description. + virtual void StartSctpTransport(int local_port, + int remote_port, + int max_message_size) = 0; + + // Asynchronously adds a remote candidate on the network thread. + virtual void AddRemoteCandidate(const std::string& mid, + const cricket::Candidate& candidate) = 0; + + virtual Call* call_ptr() = 0; + // Returns true if SRTP (either using DTLS-SRTP or SDES) is required by + // this session. + virtual bool SrtpRequired() const = 0; + virtual bool SetupDataChannelTransport_n(const std::string& mid) = 0; + virtual void TeardownDataChannelTransport_n() = 0; + virtual void SetSctpDataMid(const std::string& mid) = 0; + virtual void ResetSctpDataMid() = 0; + + virtual const FieldTrialsView& trials() const = 0; + + virtual void ClearStatsCache() = 0; +}; + +// Functions defined in this class are called by other objects, +// but not by SdpOfferAnswerHandler. +class PeerConnectionInternal : public PeerConnectionInterface, + public PeerConnectionSdpMethods, + public sigslot::has_slots<> { + public: + virtual rtc::Thread* network_thread() const = 0; + virtual rtc::Thread* worker_thread() const = 0; + // Returns true if we were the initial offerer. virtual bool initial_offerer() const = 0; @@ -50,7 +151,6 @@ class PeerConnectionInternal : public PeerConnectionInterface { } virtual absl::optional sctp_transport_name() const = 0; - virtual absl::optional sctp_mid() const = 0; virtual cricket::CandidateStatsList GetPooledCandidateStats() const = 0; @@ -71,15 +171,13 @@ class PeerConnectionInternal : public PeerConnectionInterface { // Returns true if there was an ICE restart initiated by the remote offer. virtual bool IceRestartPending(const std::string& content_name) const = 0; - // Returns true if the ICE restart flag above was set, and no ICE restart has - // occurred yet for this transport (by applying a local description with - // changed ufrag/password). If the transport has been deleted as a result of - // bundling, returns false. - virtual bool NeedsIceRestart(const std::string& content_name) const = 0; - // Get SSL role for an arbitrary m= section (handles bundling correctly). virtual bool GetSslRole(const std::string& content_name, rtc::SSLRole* role) = 0; + // Functions needed by DataChannelController + virtual void NoteDataAddedEvent() {} + // Handler for the "channel closed" signal + virtual void OnSctpDataChannelClosed(DataChannelInterface* channel) {} }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_message_handler.cc b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_message_handler.cc index 54f75f00a9..2d674aad4d 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_message_handler.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_message_handler.cc @@ -18,163 +18,68 @@ #include "api/scoped_refptr.h" #include "api/sequence_checker.h" #include "api/stats_types.h" -#include "pc/stats_collector_interface.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "pc/legacy_stats_collector_interface.h" #include "rtc_base/checks.h" -#include "rtc_base/location.h" namespace webrtc { - namespace { -enum { - MSG_SET_SESSIONDESCRIPTION_SUCCESS = 0, - MSG_SET_SESSIONDESCRIPTION_FAILED, - MSG_CREATE_SESSIONDESCRIPTION_FAILED, - MSG_GETSTATS, - MSG_REPORT_USAGE_PATTERN, -}; - -struct SetSessionDescriptionMsg : public rtc::MessageData { - explicit SetSessionDescriptionMsg( - webrtc::SetSessionDescriptionObserver* observer) - : observer(observer) {} - - rtc::scoped_refptr observer; - RTCError error; -}; - -struct CreateSessionDescriptionMsg : public rtc::MessageData { - explicit CreateSessionDescriptionMsg( - webrtc::CreateSessionDescriptionObserver* observer) - : observer(observer) {} - - rtc::scoped_refptr observer; - RTCError error; -}; - -struct GetStatsMsg : public rtc::MessageData { - GetStatsMsg(webrtc::StatsObserver* observer, - StatsCollectorInterface* stats, - webrtc::MediaStreamTrackInterface* track) - : observer(observer), stats(stats), track(track) {} - rtc::scoped_refptr observer; - StatsCollectorInterface* stats; - rtc::scoped_refptr track; -}; - -struct RequestUsagePatternMsg : public rtc::MessageData { - explicit RequestUsagePatternMsg(std::function func) - : function(func) {} - std::function function; -}; - -} // namespace - -PeerConnectionMessageHandler::~PeerConnectionMessageHandler() { - // Process all pending notifications in the message queue. If we don't do - // this, requests will linger and not know they succeeded or failed. - rtc::MessageList list; - signaling_thread()->Clear(this, rtc::MQID_ANY, &list); - for (auto& msg : list) { - if (msg.message_id == MSG_CREATE_SESSIONDESCRIPTION_FAILED) { - // Processing CreateOffer() and CreateAnswer() messages ensures their - // observers are invoked even if the PeerConnection is destroyed early. - OnMessage(&msg); - } else { - // TODO(hbos): Consider processing all pending messages. This would mean - // that SetLocalDescription() and SetRemoteDescription() observers are - // informed of successes and failures; this is currently NOT the case. - delete msg.pdata; - } - } +template +rtc::scoped_refptr WrapScoped(T* ptr) { + return rtc::scoped_refptr(ptr); } -void PeerConnectionMessageHandler::OnMessage(rtc::Message* msg) { - RTC_DCHECK_RUN_ON(signaling_thread()); - switch (msg->message_id) { - case MSG_SET_SESSIONDESCRIPTION_SUCCESS: { - SetSessionDescriptionMsg* param = - static_cast(msg->pdata); - param->observer->OnSuccess(); - delete param; - break; - } - case MSG_SET_SESSIONDESCRIPTION_FAILED: { - SetSessionDescriptionMsg* param = - static_cast(msg->pdata); - param->observer->OnFailure(std::move(param->error)); - delete param; - break; - } - case MSG_CREATE_SESSIONDESCRIPTION_FAILED: { - CreateSessionDescriptionMsg* param = - static_cast(msg->pdata); - param->observer->OnFailure(std::move(param->error)); - delete param; - break; - } - case MSG_GETSTATS: { - GetStatsMsg* param = static_cast(msg->pdata); - StatsReports reports; - param->stats->GetStats(param->track, &reports); - param->observer->OnComplete(reports); - delete param; - break; - } - case MSG_REPORT_USAGE_PATTERN: { - RequestUsagePatternMsg* param = - static_cast(msg->pdata); - param->function(); - delete param; - break; - } - default: - RTC_DCHECK_NOTREACHED() << "Not implemented"; - break; - } -} +} // namespace void PeerConnectionMessageHandler::PostSetSessionDescriptionSuccess( SetSessionDescriptionObserver* observer) { - SetSessionDescriptionMsg* msg = new SetSessionDescriptionMsg(observer); - signaling_thread()->Post(RTC_FROM_HERE, this, - MSG_SET_SESSIONDESCRIPTION_SUCCESS, msg); + signaling_thread_->PostTask( + SafeTask(safety_.flag(), + [observer = WrapScoped(observer)] { observer->OnSuccess(); })); } void PeerConnectionMessageHandler::PostSetSessionDescriptionFailure( SetSessionDescriptionObserver* observer, RTCError&& error) { RTC_DCHECK(!error.ok()); - SetSessionDescriptionMsg* msg = new SetSessionDescriptionMsg(observer); - msg->error = std::move(error); - signaling_thread()->Post(RTC_FROM_HERE, this, - MSG_SET_SESSIONDESCRIPTION_FAILED, msg); + signaling_thread_->PostTask(SafeTask( + safety_.flag(), + [observer = WrapScoped(observer), error = std::move(error)]() mutable { + observer->OnFailure(std::move(error)); + })); } void PeerConnectionMessageHandler::PostCreateSessionDescriptionFailure( CreateSessionDescriptionObserver* observer, RTCError error) { RTC_DCHECK(!error.ok()); - CreateSessionDescriptionMsg* msg = new CreateSessionDescriptionMsg(observer); - msg->error = std::move(error); - signaling_thread()->Post(RTC_FROM_HERE, this, - MSG_CREATE_SESSIONDESCRIPTION_FAILED, msg); + // Do not protect this task with the safety_.flag() to ensure + // observer is invoked even if the PeerConnection is destroyed early. + signaling_thread_->PostTask( + [observer = WrapScoped(observer), error = std::move(error)]() mutable { + observer->OnFailure(std::move(error)); + }); } void PeerConnectionMessageHandler::PostGetStats( StatsObserver* observer, - StatsCollectorInterface* stats, + LegacyStatsCollectorInterface* legacy_stats, MediaStreamTrackInterface* track) { - signaling_thread()->Post(RTC_FROM_HERE, this, MSG_GETSTATS, - new GetStatsMsg(observer, stats, track)); + signaling_thread_->PostTask( + SafeTask(safety_.flag(), [observer = WrapScoped(observer), legacy_stats, + track = WrapScoped(track)] { + StatsReports reports; + legacy_stats->GetStats(track.get(), &reports); + observer->OnComplete(reports); + })); } void PeerConnectionMessageHandler::RequestUsagePatternReport( std::function func, int delay_ms) { - signaling_thread()->PostDelayed(RTC_FROM_HERE, delay_ms, this, - MSG_REPORT_USAGE_PATTERN, - new RequestUsagePatternMsg(func)); + signaling_thread_->PostDelayedTask(SafeTask(safety_.flag(), std::move(func)), + TimeDelta::Millis(delay_ms)); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_message_handler.h b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_message_handler.h index c19f5a4e50..1351a279b6 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_message_handler.h +++ b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_message_handler.h @@ -18,27 +18,18 @@ #include "api/peer_connection_interface.h" #include "api/rtc_error.h" #include "api/stats_types.h" -#include "pc/stats_collector_interface.h" -#include "rtc_base/message_handler.h" -#include "rtc_base/thread.h" -#include "rtc_base/thread_message.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/task_queue/task_queue_base.h" +#include "pc/legacy_stats_collector_interface.h" namespace webrtc { -class CreateSessionDescriptionObserver; -class SetSessionDescriptionObserver; -class StatsCollectorInterface; -class StatsObserver; -class MediaStreamTrackInterface; - -class PeerConnectionMessageHandler : public rtc::MessageHandler { +class PeerConnectionMessageHandler { public: explicit PeerConnectionMessageHandler(rtc::Thread* signaling_thread) : signaling_thread_(signaling_thread) {} - ~PeerConnectionMessageHandler(); + ~PeerConnectionMessageHandler() = default; - // Implements MessageHandler. - void OnMessage(rtc::Message* msg) override; void PostSetSessionDescriptionSuccess( SetSessionDescriptionObserver* observer); void PostSetSessionDescriptionFailure(SetSessionDescriptionObserver* observer, @@ -47,14 +38,13 @@ class PeerConnectionMessageHandler : public rtc::MessageHandler { CreateSessionDescriptionObserver* observer, RTCError error); void PostGetStats(StatsObserver* observer, - StatsCollectorInterface* stats, + LegacyStatsCollectorInterface* legacy_stats, MediaStreamTrackInterface* track); void RequestUsagePatternReport(std::function, int delay_ms); private: - rtc::Thread* signaling_thread() const { return signaling_thread_; } - - rtc::Thread* const signaling_thread_; + ScopedTaskSafety safety_; + TaskQueueBase* const signaling_thread_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_proxy.h b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_proxy.h index 7601c9d053..6db27f2dd5 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_proxy.h +++ b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_proxy.h @@ -35,8 +35,14 @@ PROXY_METHOD2(RTCErrorOr>, AddTrack, rtc::scoped_refptr, const std::vector&) -PROXY_METHOD1(bool, RemoveTrack, RtpSenderInterface*) -PROXY_METHOD1(RTCError, RemoveTrackNew, rtc::scoped_refptr) +PROXY_METHOD3(RTCErrorOr>, + AddTrack, + rtc::scoped_refptr, + const std::vector&, + const std::vector&) +PROXY_METHOD1(RTCError, + RemoveTrackOrError, + rtc::scoped_refptr) PROXY_METHOD1(RTCErrorOr>, AddTransceiver, rtc::scoped_refptr) diff --git a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_wrapper.cc b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_wrapper.cc index 3b4d28f0d9..653d8b7b0d 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_wrapper.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_wrapper.cc @@ -12,8 +12,6 @@ #include -#include -#include #include #include @@ -24,7 +22,6 @@ #include "rtc_base/checks.h" #include "rtc_base/gunit.h" #include "rtc_base/logging.h" -#include "rtc_base/ref_counted_object.h" #include "test/gtest.h" namespace webrtc { @@ -137,7 +134,7 @@ std::unique_ptr PeerConnectionWrapper::CreateSdp( rtc::FunctionView fn, std::string* error_out) { auto observer = rtc::make_ref_counted(); - fn(observer); + fn(observer.get()); EXPECT_EQ_WAIT(true, observer->called(), kDefaultTimeout); if (error_out && !observer->result()) { *error_out = observer->error(); @@ -168,8 +165,7 @@ bool PeerConnectionWrapper::SetRemoteDescription( bool PeerConnectionWrapper::SetRemoteDescription( std::unique_ptr desc, RTCError* error_out) { - rtc::scoped_refptr observer = - new FakeSetRemoteDescriptionObserver(); + auto observer = rtc::make_ref_counted(); pc()->SetRemoteDescription(std::move(desc), observer); EXPECT_EQ_WAIT(true, observer->called(), kDefaultTimeout); bool ok = observer->error().ok(); @@ -182,7 +178,7 @@ bool PeerConnectionWrapper::SetSdp( rtc::FunctionView fn, std::string* error_out) { auto observer = rtc::make_ref_counted(); - fn(observer); + fn(observer.get()); EXPECT_EQ_WAIT(true, observer->called(), kDefaultTimeout); if (error_out && !observer->result()) { *error_out = observer->error(); @@ -280,7 +276,8 @@ rtc::scoped_refptr PeerConnectionWrapper::CreateAudioTrack( rtc::scoped_refptr PeerConnectionWrapper::CreateVideoTrack( const std::string& label) { - return pc_factory()->CreateVideoTrack(label, FakeVideoTrackSource::Create()); + return pc_factory()->CreateVideoTrack(label, + FakeVideoTrackSource::Create().get()); } rtc::scoped_refptr PeerConnectionWrapper::AddTrack( @@ -292,6 +289,16 @@ rtc::scoped_refptr PeerConnectionWrapper::AddTrack( return result.MoveValue(); } +rtc::scoped_refptr PeerConnectionWrapper::AddTrack( + rtc::scoped_refptr track, + const std::vector& stream_ids, + const std::vector& init_send_encodings) { + RTCErrorOr> result = + pc()->AddTrack(track, stream_ids, init_send_encodings); + EXPECT_EQ(RTCErrorType::NONE, result.error().type()); + return result.MoveValue(); +} + rtc::scoped_refptr PeerConnectionWrapper::AddAudioTrack( const std::string& track_label, const std::vector& stream_ids) { @@ -332,7 +339,7 @@ bool PeerConnectionWrapper::IsIceConnected() { rtc::scoped_refptr PeerConnectionWrapper::GetStats() { auto callback = rtc::make_ref_counted(); - pc()->GetStats(callback); + pc()->GetStats(callback.get()); EXPECT_TRUE_WAIT(callback->called(), kDefaultTimeout); return callback->report(); } diff --git a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_wrapper.h b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_wrapper.h index 4d2bc284a7..c503a48099 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_wrapper.h +++ b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_wrapper.h @@ -149,6 +149,11 @@ class PeerConnectionWrapper { rtc::scoped_refptr track, const std::vector& stream_ids = {}); + rtc::scoped_refptr AddTrack( + rtc::scoped_refptr track, + const std::vector& stream_ids, + const std::vector& init_send_encodings); + // Calls the underlying PeerConnection's AddTrack method with an audio media // stream track not bound to any source. rtc::scoped_refptr AddAudioTrack( diff --git a/TMessagesProj/jni/voip/webrtc/pc/proxy.h b/TMessagesProj/jni/voip/webrtc/pc/proxy.h index 565ae80175..2be115fdf3 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/proxy.h +++ b/TMessagesProj/jni/voip/webrtc/pc/proxy.h @@ -10,7 +10,6 @@ // This file contains Macros for creating proxies for webrtc MediaStream and // PeerConnection classes. -// TODO(deadbeef): Move this to pc/; this is part of the implementation. // The proxied objects are initialized with either one or two thread // objects that operations can be proxied to: The primary and secondary @@ -53,12 +52,12 @@ // The variant defined with BEGIN_PRIMARY_PROXY_MAP is unaware of // the secondary thread, and invokes all methods on the primary thread. // -// The variant defined with BEGIN_OWNED_PROXY_MAP does not use -// refcounting, and instead just takes ownership of the object being proxied. #ifndef PC_PROXY_H_ #define PC_PROXY_H_ +#include + #include #include #include @@ -66,11 +65,8 @@ #include #include "api/scoped_refptr.h" -#include "api/task_queue/queued_task.h" #include "api/task_queue/task_queue_base.h" #include "rtc_base/event.h" -#include "rtc_base/message_handler.h" -#include "rtc_base/ref_counted_object.h" #include "rtc_base/string_utils.h" #include "rtc_base/system/rtc_export.h" #include "rtc_base/thread.h" @@ -79,10 +75,6 @@ #define RTC_DISABLE_PROXY_TRACE_EVENTS #endif -namespace rtc { -class Location; -} - namespace webrtc { namespace proxy_internal { @@ -93,7 +85,7 @@ class ScopedTrace { ~ScopedTrace(); private: - const char* const class_and_method_name_; + [[maybe_unused]] const char* const class_and_method_name_; }; } // namespace proxy_internal @@ -123,7 +115,7 @@ class ReturnType { }; template -class MethodCall : public QueuedTask { +class MethodCall { public: typedef R (C::*Method)(Args...); MethodCall(C* c, Method m, Args&&... args) @@ -131,23 +123,20 @@ class MethodCall : public QueuedTask { m_(m), args_(std::forward_as_tuple(std::forward(args)...)) {} - R Marshal(const rtc::Location& posted_from, rtc::Thread* t) { + R Marshal(rtc::Thread* t) { if (t->IsCurrent()) { Invoke(std::index_sequence_for()); } else { - t->PostTask(std::unique_ptr(this)); + t->PostTask([this] { + Invoke(std::index_sequence_for()); + event_.Set(); + }); event_.Wait(rtc::Event::kForever); } return r_.moved_result(); } private: - bool Run() override { - Invoke(std::index_sequence_for()); - event_.Set(); - return false; - } - template void Invoke(std::index_sequence) { r_.Invoke(c_, m_, std::move(std::get(args_))...); @@ -161,7 +150,7 @@ class MethodCall : public QueuedTask { }; template -class ConstMethodCall : public QueuedTask { +class ConstMethodCall { public: typedef R (C::*Method)(Args...) const; ConstMethodCall(const C* c, Method m, Args&&... args) @@ -169,23 +158,20 @@ class ConstMethodCall : public QueuedTask { m_(m), args_(std::forward_as_tuple(std::forward(args)...)) {} - R Marshal(const rtc::Location& posted_from, rtc::Thread* t) { + R Marshal(rtc::Thread* t) { if (t->IsCurrent()) { Invoke(std::index_sequence_for()); } else { - t->PostTask(std::unique_ptr(this)); + t->PostTask([this] { + Invoke(std::index_sequence_for()); + event_.Set(); + }); event_.Wait(rtc::Event::kForever); } return r_.moved_result(); } private: - bool Run() override { - Invoke(std::index_sequence_for()); - event_.Set(); - return false; - } - template void Invoke(std::index_sequence) { r_.Invoke(c_, m_, std::move(std::get(args_))...); @@ -202,61 +188,66 @@ class ConstMethodCall : public QueuedTask { #define PROXY_STRINGIZE(x) PROXY_STRINGIZE_IMPL(x) // Helper macros to reduce code duplication. -#define PROXY_MAP_BOILERPLATE(c) \ - template \ - class c##ProxyWithInternal; \ - typedef c##ProxyWithInternal c##Proxy; \ - template \ - class c##ProxyWithInternal : public c##Interface { \ - protected: \ - static constexpr char proxy_name_[] = #c "Proxy"; \ - typedef c##Interface C; \ - \ - public: \ - const INTERNAL_CLASS* internal() const { return c_; } \ - INTERNAL_CLASS* internal() { return c_; } +#define PROXY_MAP_BOILERPLATE(class_name) \ + template \ + class class_name##ProxyWithInternal; \ + typedef class_name##ProxyWithInternal \ + class_name##Proxy; \ + template \ + class class_name##ProxyWithInternal : public class_name##Interface { \ + protected: \ + static constexpr char proxy_name_[] = #class_name "Proxy"; \ + typedef class_name##Interface C; \ + \ + public: \ + const INTERNAL_CLASS* internal() const { return c(); } \ + INTERNAL_CLASS* internal() { return c(); } // clang-format off // clang-format would put the semicolon alone, // leading to a presubmit error (cpplint.py) -#define END_PROXY_MAP(c) \ - }; \ - template \ - constexpr char c##ProxyWithInternal::proxy_name_[]; +#define END_PROXY_MAP(class_name) \ + }; \ + template \ + constexpr char class_name##ProxyWithInternal::proxy_name_[]; // clang-format on -#define PRIMARY_PROXY_MAP_BOILERPLATE(c) \ - protected: \ - c##ProxyWithInternal(rtc::Thread* primary_thread, INTERNAL_CLASS* c) \ - : primary_thread_(primary_thread), c_(c) {} \ - \ - private: \ +#define PRIMARY_PROXY_MAP_BOILERPLATE(class_name) \ + protected: \ + class_name##ProxyWithInternal(rtc::Thread* primary_thread, \ + rtc::scoped_refptr c) \ + : primary_thread_(primary_thread), c_(std::move(c)) {} \ + \ + private: \ mutable rtc::Thread* primary_thread_; -#define SECONDARY_PROXY_MAP_BOILERPLATE(c) \ - protected: \ - c##ProxyWithInternal(rtc::Thread* primary_thread, \ - rtc::Thread* secondary_thread, INTERNAL_CLASS* c) \ - : primary_thread_(primary_thread), \ - secondary_thread_(secondary_thread), \ - c_(c) {} \ - \ - private: \ - mutable rtc::Thread* primary_thread_; \ +#define SECONDARY_PROXY_MAP_BOILERPLATE(class_name) \ + protected: \ + class_name##ProxyWithInternal(rtc::Thread* primary_thread, \ + rtc::Thread* secondary_thread, \ + rtc::scoped_refptr c) \ + : primary_thread_(primary_thread), \ + secondary_thread_(secondary_thread), \ + c_(std::move(c)) {} \ + \ + private: \ + mutable rtc::Thread* primary_thread_; \ mutable rtc::Thread* secondary_thread_; // Note that the destructor is protected so that the proxy can only be // destroyed via RefCountInterface. -#define REFCOUNTED_PROXY_MAP_BOILERPLATE(c) \ - protected: \ - ~c##ProxyWithInternal() { \ - MethodCall call( \ - this, &c##ProxyWithInternal::DestroyInternal); \ - call.Marshal(RTC_FROM_HERE, destructor_thread()); \ - } \ - \ - private: \ - void DestroyInternal() { c_ = nullptr; } \ +#define REFCOUNTED_PROXY_MAP_BOILERPLATE(class_name) \ + protected: \ + ~class_name##ProxyWithInternal() { \ + MethodCall call( \ + this, &class_name##ProxyWithInternal::DestroyInternal); \ + call.Marshal(destructor_thread()); \ + } \ + \ + private: \ + const INTERNAL_CLASS* c() const { return c_.get(); } \ + INTERNAL_CLASS* c() { return c_.get(); } \ + void DestroyInternal() { c_ = nullptr; } \ rtc::scoped_refptr c_; // Note: This doesn't use a unique_ptr, because it intends to handle a corner @@ -264,50 +255,41 @@ class ConstMethodCall : public QueuedTask { // this proxy object. If relying on a unique_ptr to delete the object, its // inner pointer would be set to null before this reentrant callback would have // a chance to run, resulting in a segfault. -#define OWNED_PROXY_MAP_BOILERPLATE(c) \ - public: \ - ~c##ProxyWithInternal() { \ - MethodCall call( \ - this, &c##ProxyWithInternal::DestroyInternal); \ - call.Marshal(RTC_FROM_HERE, destructor_thread()); \ - } \ - \ - private: \ - void DestroyInternal() { delete c_; } \ +#define OWNED_PROXY_MAP_BOILERPLATE(class_name) \ + public: \ + ~class_name##ProxyWithInternal() { \ + MethodCall call( \ + this, &class_name##ProxyWithInternal::DestroyInternal); \ + call.Marshal(destructor_thread()); \ + } \ + \ + private: \ + const INTERNAL_CLASS* c() const { return c_; } \ + INTERNAL_CLASS* c() { return c_; } \ + void DestroyInternal() { delete c_; } \ INTERNAL_CLASS* c_; -#define BEGIN_PRIMARY_PROXY_MAP(c) \ - PROXY_MAP_BOILERPLATE(c) \ - PRIMARY_PROXY_MAP_BOILERPLATE(c) \ - REFCOUNTED_PROXY_MAP_BOILERPLATE(c) \ +#define BEGIN_PRIMARY_PROXY_MAP(class_name) \ + PROXY_MAP_BOILERPLATE(class_name) \ + PRIMARY_PROXY_MAP_BOILERPLATE(class_name) \ + REFCOUNTED_PROXY_MAP_BOILERPLATE(class_name) \ public: \ - static rtc::scoped_refptr Create( \ - rtc::Thread* primary_thread, INTERNAL_CLASS* c) { \ - return rtc::make_ref_counted(primary_thread, c); \ - } - -#define BEGIN_PROXY_MAP(c) \ - PROXY_MAP_BOILERPLATE(c) \ - SECONDARY_PROXY_MAP_BOILERPLATE(c) \ - REFCOUNTED_PROXY_MAP_BOILERPLATE(c) \ - public: \ - static rtc::scoped_refptr Create( \ - rtc::Thread* primary_thread, rtc::Thread* secondary_thread, \ - INTERNAL_CLASS* c) { \ - return rtc::make_ref_counted(primary_thread, \ - secondary_thread, c); \ + static rtc::scoped_refptr Create( \ + rtc::Thread* primary_thread, rtc::scoped_refptr c) { \ + return rtc::make_ref_counted( \ + primary_thread, std::move(c)); \ } -#define BEGIN_OWNED_PROXY_MAP(c) \ - PROXY_MAP_BOILERPLATE(c) \ - SECONDARY_PROXY_MAP_BOILERPLATE(c) \ - OWNED_PROXY_MAP_BOILERPLATE(c) \ +#define BEGIN_PROXY_MAP(class_name) \ + PROXY_MAP_BOILERPLATE(class_name) \ + SECONDARY_PROXY_MAP_BOILERPLATE(class_name) \ + REFCOUNTED_PROXY_MAP_BOILERPLATE(class_name) \ public: \ - static std::unique_ptr Create( \ + static rtc::scoped_refptr Create( \ rtc::Thread* primary_thread, rtc::Thread* secondary_thread, \ - std::unique_ptr c) { \ - return std::unique_ptr(new c##ProxyWithInternal( \ - primary_thread, secondary_thread, c.release())); \ + rtc::scoped_refptr c) { \ + return rtc::make_ref_counted( \ + primary_thread, secondary_thread, std::move(c)); \ } #define PROXY_PRIMARY_THREAD_DESTRUCTOR() \ @@ -336,127 +318,127 @@ class ConstMethodCall : public QueuedTask { #endif // if defined(RTC_DISABLE_PROXY_TRACE_EVENTS) -#define PROXY_METHOD0(r, method) \ - r method() override { \ - TRACE_BOILERPLATE(method); \ - MethodCall call(c_, &C::method); \ - return call.Marshal(RTC_FROM_HERE, primary_thread_); \ +#define PROXY_METHOD0(r, method) \ + r method() override { \ + TRACE_BOILERPLATE(method); \ + MethodCall call(c(), &C::method); \ + return call.Marshal(primary_thread_); \ } -#define PROXY_CONSTMETHOD0(r, method) \ - r method() const override { \ - TRACE_BOILERPLATE(method); \ - ConstMethodCall call(c_, &C::method); \ - return call.Marshal(RTC_FROM_HERE, primary_thread_); \ +#define PROXY_CONSTMETHOD0(r, method) \ + r method() const override { \ + TRACE_BOILERPLATE(method); \ + ConstMethodCall call(c(), &C::method); \ + return call.Marshal(primary_thread_); \ } -#define PROXY_METHOD1(r, method, t1) \ - r method(t1 a1) override { \ - TRACE_BOILERPLATE(method); \ - MethodCall call(c_, &C::method, std::move(a1)); \ - return call.Marshal(RTC_FROM_HERE, primary_thread_); \ +#define PROXY_METHOD1(r, method, t1) \ + r method(t1 a1) override { \ + TRACE_BOILERPLATE(method); \ + MethodCall call(c(), &C::method, std::move(a1)); \ + return call.Marshal(primary_thread_); \ } -#define PROXY_CONSTMETHOD1(r, method, t1) \ - r method(t1 a1) const override { \ - TRACE_BOILERPLATE(method); \ - ConstMethodCall call(c_, &C::method, std::move(a1)); \ - return call.Marshal(RTC_FROM_HERE, primary_thread_); \ +#define PROXY_CONSTMETHOD1(r, method, t1) \ + r method(t1 a1) const override { \ + TRACE_BOILERPLATE(method); \ + ConstMethodCall call(c(), &C::method, std::move(a1)); \ + return call.Marshal(primary_thread_); \ } -#define PROXY_METHOD2(r, method, t1, t2) \ - r method(t1 a1, t2 a2) override { \ - TRACE_BOILERPLATE(method); \ - MethodCall call(c_, &C::method, std::move(a1), \ - std::move(a2)); \ - return call.Marshal(RTC_FROM_HERE, primary_thread_); \ +#define PROXY_METHOD2(r, method, t1, t2) \ + r method(t1 a1, t2 a2) override { \ + TRACE_BOILERPLATE(method); \ + MethodCall call(c(), &C::method, std::move(a1), \ + std::move(a2)); \ + return call.Marshal(primary_thread_); \ } -#define PROXY_METHOD3(r, method, t1, t2, t3) \ - r method(t1 a1, t2 a2, t3 a3) override { \ - TRACE_BOILERPLATE(method); \ - MethodCall call(c_, &C::method, std::move(a1), \ - std::move(a2), std::move(a3)); \ - return call.Marshal(RTC_FROM_HERE, primary_thread_); \ +#define PROXY_METHOD3(r, method, t1, t2, t3) \ + r method(t1 a1, t2 a2, t3 a3) override { \ + TRACE_BOILERPLATE(method); \ + MethodCall call(c(), &C::method, std::move(a1), \ + std::move(a2), std::move(a3)); \ + return call.Marshal(primary_thread_); \ } -#define PROXY_METHOD4(r, method, t1, t2, t3, t4) \ - r method(t1 a1, t2 a2, t3 a3, t4 a4) override { \ - TRACE_BOILERPLATE(method); \ - MethodCall call(c_, &C::method, std::move(a1), \ - std::move(a2), std::move(a3), \ - std::move(a4)); \ - return call.Marshal(RTC_FROM_HERE, primary_thread_); \ +#define PROXY_METHOD4(r, method, t1, t2, t3, t4) \ + r method(t1 a1, t2 a2, t3 a3, t4 a4) override { \ + TRACE_BOILERPLATE(method); \ + MethodCall call(c(), &C::method, std::move(a1), \ + std::move(a2), std::move(a3), \ + std::move(a4)); \ + return call.Marshal(primary_thread_); \ } -#define PROXY_METHOD5(r, method, t1, t2, t3, t4, t5) \ - r method(t1 a1, t2 a2, t3 a3, t4 a4, t5 a5) override { \ - TRACE_BOILERPLATE(method); \ - MethodCall call(c_, &C::method, std::move(a1), \ - std::move(a2), std::move(a3), \ - std::move(a4), std::move(a5)); \ - return call.Marshal(RTC_FROM_HERE, primary_thread_); \ +#define PROXY_METHOD5(r, method, t1, t2, t3, t4, t5) \ + r method(t1 a1, t2 a2, t3 a3, t4 a4, t5 a5) override { \ + TRACE_BOILERPLATE(method); \ + MethodCall call(c(), &C::method, std::move(a1), \ + std::move(a2), std::move(a3), \ + std::move(a4), std::move(a5)); \ + return call.Marshal(primary_thread_); \ } // Define methods which should be invoked on the secondary thread. -#define PROXY_SECONDARY_METHOD0(r, method) \ - r method() override { \ - TRACE_BOILERPLATE(method); \ - MethodCall call(c_, &C::method); \ - return call.Marshal(RTC_FROM_HERE, secondary_thread_); \ +#define PROXY_SECONDARY_METHOD0(r, method) \ + r method() override { \ + TRACE_BOILERPLATE(method); \ + MethodCall call(c(), &C::method); \ + return call.Marshal(secondary_thread_); \ } -#define PROXY_SECONDARY_CONSTMETHOD0(r, method) \ - r method() const override { \ - TRACE_BOILERPLATE(method); \ - ConstMethodCall call(c_, &C::method); \ - return call.Marshal(RTC_FROM_HERE, secondary_thread_); \ +#define PROXY_SECONDARY_CONSTMETHOD0(r, method) \ + r method() const override { \ + TRACE_BOILERPLATE(method); \ + ConstMethodCall call(c(), &C::method); \ + return call.Marshal(secondary_thread_); \ } -#define PROXY_SECONDARY_METHOD1(r, method, t1) \ - r method(t1 a1) override { \ - TRACE_BOILERPLATE(method); \ - MethodCall call(c_, &C::method, std::move(a1)); \ - return call.Marshal(RTC_FROM_HERE, secondary_thread_); \ +#define PROXY_SECONDARY_METHOD1(r, method, t1) \ + r method(t1 a1) override { \ + TRACE_BOILERPLATE(method); \ + MethodCall call(c(), &C::method, std::move(a1)); \ + return call.Marshal(secondary_thread_); \ } -#define PROXY_SECONDARY_CONSTMETHOD1(r, method, t1) \ - r method(t1 a1) const override { \ - TRACE_BOILERPLATE(method); \ - ConstMethodCall call(c_, &C::method, std::move(a1)); \ - return call.Marshal(RTC_FROM_HERE, secondary_thread_); \ +#define PROXY_SECONDARY_CONSTMETHOD1(r, method, t1) \ + r method(t1 a1) const override { \ + TRACE_BOILERPLATE(method); \ + ConstMethodCall call(c(), &C::method, std::move(a1)); \ + return call.Marshal(secondary_thread_); \ } -#define PROXY_SECONDARY_METHOD2(r, method, t1, t2) \ - r method(t1 a1, t2 a2) override { \ - TRACE_BOILERPLATE(method); \ - MethodCall call(c_, &C::method, std::move(a1), \ - std::move(a2)); \ - return call.Marshal(RTC_FROM_HERE, secondary_thread_); \ +#define PROXY_SECONDARY_METHOD2(r, method, t1, t2) \ + r method(t1 a1, t2 a2) override { \ + TRACE_BOILERPLATE(method); \ + MethodCall call(c(), &C::method, std::move(a1), \ + std::move(a2)); \ + return call.Marshal(secondary_thread_); \ } -#define PROXY_SECONDARY_CONSTMETHOD2(r, method, t1, t2) \ - r method(t1 a1, t2 a2) const override { \ - TRACE_BOILERPLATE(method); \ - ConstMethodCall call(c_, &C::method, std::move(a1), \ - std::move(a2)); \ - return call.Marshal(RTC_FROM_HERE, secondary_thread_); \ +#define PROXY_SECONDARY_CONSTMETHOD2(r, method, t1, t2) \ + r method(t1 a1, t2 a2) const override { \ + TRACE_BOILERPLATE(method); \ + ConstMethodCall call(c(), &C::method, std::move(a1), \ + std::move(a2)); \ + return call.Marshal(secondary_thread_); \ } -#define PROXY_SECONDARY_METHOD3(r, method, t1, t2, t3) \ - r method(t1 a1, t2 a2, t3 a3) override { \ - TRACE_BOILERPLATE(method); \ - MethodCall call(c_, &C::method, std::move(a1), \ - std::move(a2), std::move(a3)); \ - return call.Marshal(RTC_FROM_HERE, secondary_thread_); \ +#define PROXY_SECONDARY_METHOD3(r, method, t1, t2, t3) \ + r method(t1 a1, t2 a2, t3 a3) override { \ + TRACE_BOILERPLATE(method); \ + MethodCall call(c(), &C::method, std::move(a1), \ + std::move(a2), std::move(a3)); \ + return call.Marshal(secondary_thread_); \ } -#define PROXY_SECONDARY_CONSTMETHOD3(r, method, t1, t2) \ - r method(t1 a1, t2 a2, t3 a3) const override { \ - TRACE_BOILERPLATE(method); \ - ConstMethodCall call(c_, &C::method, std::move(a1), \ - std::move(a2), std::move(a3)); \ - return call.Marshal(RTC_FROM_HERE, secondary_thread_); \ +#define PROXY_SECONDARY_CONSTMETHOD3(r, method, t1, t2) \ + r method(t1 a1, t2 a2, t3 a3) const override { \ + TRACE_BOILERPLATE(method); \ + ConstMethodCall call(c(), &C::method, std::move(a1), \ + std::move(a2), std::move(a3)); \ + return call.Marshal(secondary_thread_); \ } // For use when returning purely const state (set during construction). diff --git a/TMessagesProj/jni/voip/webrtc/pc/remote_audio_source.cc b/TMessagesProj/jni/voip/webrtc/pc/remote_audio_source.cc index dc890e737c..1058d1cbf9 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/remote_audio_source.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/remote_audio_source.cc @@ -13,15 +13,17 @@ #include #include +#include +#include #include "absl/algorithm/container.h" #include "api/scoped_refptr.h" #include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" #include "rtc_base/checks.h" -#include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_format.h" -#include "rtc_base/thread.h" +#include "rtc_base/trace_event.h" namespace webrtc { @@ -50,18 +52,17 @@ class RemoteAudioSource::AudioDataProxy : public AudioSinkInterface { }; RemoteAudioSource::RemoteAudioSource( - rtc::Thread* worker_thread, + TaskQueueBase* worker_thread, OnAudioChannelGoneAction on_audio_channel_gone_action) - : main_thread_(rtc::Thread::Current()), + : main_thread_(TaskQueueBase::Current()), worker_thread_(worker_thread), on_audio_channel_gone_action_(on_audio_channel_gone_action), - state_(MediaSourceInterface::kLive) { + state_(MediaSourceInterface::kInitializing) { RTC_DCHECK(main_thread_); RTC_DCHECK(worker_thread_); } RemoteAudioSource::~RemoteAudioSource() { - RTC_DCHECK_RUN_ON(main_thread_); RTC_DCHECK(audio_observers_.empty()); if (!sinks_.empty()) { RTC_LOG(LS_WARNING) @@ -134,11 +135,6 @@ void RemoteAudioSource::AddSink(AudioTrackSinkInterface* sink) { RTC_DCHECK_RUN_ON(main_thread_); RTC_DCHECK(sink); - if (state_ != MediaSourceInterface::kLive) { - RTC_LOG(LS_ERROR) << "Can't register sink as the source isn't live."; - return; - } - MutexLock lock(&sink_lock_); RTC_DCHECK(!absl::c_linear_search(sinks_, sink)); sinks_.push_back(sink); @@ -154,6 +150,7 @@ void RemoteAudioSource::RemoveSink(AudioTrackSinkInterface* sink) { void RemoteAudioSource::OnData(const AudioSinkInterface::Data& audio) { // Called on the externally-owned audio callback thread, via/from webrtc. + TRACE_EVENT0("webrtc", "RemoteAudioSource::OnData"); MutexLock lock(&sink_lock_); for (auto* sink : sinks_) { // When peerconnection acts as an audio source, it should not provide @@ -168,24 +165,18 @@ void RemoteAudioSource::OnAudioChannelGone() { if (on_audio_channel_gone_action_ != OnAudioChannelGoneAction::kEnd) { return; } - // Called when the audio channel is deleted. It may be the worker thread - // in libjingle or may be a different worker thread. - // This object needs to live long enough for the cleanup logic in OnMessage to - // run, so take a reference to it as the data. Sometimes the message may not - // be processed (because the thread was destroyed shortly after this call), - // but that is fine because the thread destructor will take care of destroying - // the message data which will release the reference on RemoteAudioSource. - main_thread_->Post(RTC_FROM_HERE, this, 0, - new rtc::ScopedRefMessageData(this)); -} - -void RemoteAudioSource::OnMessage(rtc::Message* msg) { - RTC_DCHECK_RUN_ON(main_thread_); - sinks_.clear(); - SetState(MediaSourceInterface::kEnded); - // Will possibly delete this RemoteAudioSource since it is reference counted - // in the message. - delete msg->pdata; + // Called when the audio channel is deleted. It may be the worker thread or + // may be a different task queue. + // This object needs to live long enough for the cleanup logic in the posted + // task to run, so take a reference to it. Sometimes the task may not be + // processed (because the task queue was destroyed shortly after this call), + // but that is fine because the task queue destructor will take care of + // destroying task which will release the reference on RemoteAudioSource. + rtc::scoped_refptr thiz(this); + main_thread_->PostTask([thiz = std::move(thiz)] { + thiz->sinks_.clear(); + thiz->SetState(MediaSourceInterface::kEnded); + }); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/remote_audio_source.h b/TMessagesProj/jni/voip/webrtc/pc/remote_audio_source.h index 2eae073272..d294a0f0fb 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/remote_audio_source.h +++ b/TMessagesProj/jni/voip/webrtc/pc/remote_audio_source.h @@ -20,25 +20,16 @@ #include "api/call/audio_sink.h" #include "api/media_stream_interface.h" #include "api/notifier.h" +#include "api/task_queue/task_queue_base.h" #include "media/base/media_channel.h" -#include "pc/channel.h" -#include "rtc_base/message_handler.h" #include "rtc_base/synchronization/mutex.h" -#include "rtc_base/thread.h" -#include "rtc_base/thread_message.h" - -namespace rtc { -struct Message; -class Thread; -} // namespace rtc namespace webrtc { // This class implements the audio source used by the remote audio track. // This class works by configuring itself as a sink with the underlying media // engine, then when receiving data will fan out to all added sinks. -class RemoteAudioSource : public Notifier, - rtc::MessageHandler { +class RemoteAudioSource : public Notifier { public: // In Unified Plan, receivers map to m= sections and their tracks and sources // survive SSRCs being reconfigured. The life cycle of the remote audio source @@ -53,7 +44,7 @@ class RemoteAudioSource : public Notifier, }; explicit RemoteAudioSource( - rtc::Thread* worker_thread, + TaskQueueBase* worker_thread, OnAudioChannelGoneAction on_audio_channel_gone_action); // Register and unregister remote audio source with the underlying media @@ -86,10 +77,8 @@ class RemoteAudioSource : public Notifier, void OnData(const AudioSinkInterface::Data& audio); void OnAudioChannelGone(); - void OnMessage(rtc::Message* msg) override; - - rtc::Thread* const main_thread_; - rtc::Thread* const worker_thread_; + TaskQueueBase* const main_thread_; + TaskQueueBase* const worker_thread_; const OnAudioChannelGoneAction on_audio_channel_gone_action_; std::list audio_observers_; Mutex sink_lock_; diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_collector.cc b/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_collector.cc index 025feb9298..4a55467c15 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_collector.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_collector.cc @@ -10,26 +10,28 @@ #include "pc/rtc_stats_collector.h" +#include #include -#include #include #include #include #include +#include #include #include +#include "absl/functional/bind_front.h" +#include "absl/strings/string_view.h" #include "api/array_view.h" #include "api/candidate.h" +#include "api/dtls_transport_interface.h" #include "api/media_stream_interface.h" #include "api/rtp_parameters.h" -#include "api/rtp_receiver_interface.h" -#include "api/rtp_sender_interface.h" #include "api/sequence_checker.h" #include "api/stats/rtc_stats.h" #include "api/stats/rtcstats_objects.h" -#include "api/task_queue/queued_task.h" +#include "api/units/time_delta.h" #include "api/video/video_content_type.h" #include "common_video/include/quality_limitation_reason.h" #include "media/base/media_channel.h" @@ -37,21 +39,19 @@ #include "modules/rtp_rtcp/include/report_block_data.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "p2p/base/connection_info.h" -#include "p2p/base/dtls_transport_internal.h" #include "p2p/base/ice_transport_internal.h" #include "p2p/base/p2p_constants.h" #include "p2p/base/port.h" -#include "pc/channel.h" #include "pc/channel_interface.h" #include "pc/data_channel_utils.h" #include "pc/rtc_stats_traversal.h" +#include "pc/rtp_receiver_proxy.h" +#include "pc/rtp_sender_proxy.h" #include "pc/webrtc_sdp.h" #include "rtc_base/checks.h" #include "rtc_base/ip_address.h" -#include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/network_constants.h" -#include "rtc_base/ref_counted_object.h" #include "rtc_base/rtc_certificate.h" #include "rtc_base/socket_address.h" #include "rtc_base/ssl_stream_adapter.h" @@ -64,18 +64,29 @@ namespace webrtc { namespace { +const char kDirectionInbound = 'I'; +const char kDirectionOutbound = 'O'; + // TODO(https://crbug.com/webrtc/10656): Consider making IDs less predictable. std::string RTCCertificateIDFromFingerprint(const std::string& fingerprint) { - return "RTCCertificate_" + fingerprint; + return "CF" + fingerprint; } -std::string RTCCodecStatsIDFromMidDirectionAndPayload(const std::string& mid, - bool inbound, - uint32_t payload_type) { +// `direction` is either kDirectionInbound or kDirectionOutbound. +std::string RTCCodecStatsIDFromTransportAndCodecParameters( + const char direction, + const std::string& transport_id, + const RtpCodecParameters& codec_params) { char buf[1024]; rtc::SimpleStringBuilder sb(buf); - sb << "RTCCodec_" << mid << (inbound ? "_Inbound_" : "_Outbound_") - << payload_type; + sb << 'C' << direction << transport_id << '_' << codec_params.payload_type; + // TODO(https://crbug.com/webrtc/14420): If we stop supporting different FMTP + // lines for the same PT and transport, which should be illegal SDP, then we + // wouldn't need `fmtp` to be part of the ID here. + rtc::StringBuilder fmtp; + if (WriteFmtpParameters(codec_params.parameters, &fmtp)) { + sb << '_' << fmtp.Release(); + } return sb.str(); } @@ -83,20 +94,17 @@ std::string RTCIceCandidatePairStatsIDFromConnectionInfo( const cricket::ConnectionInfo& info) { char buf[4096]; rtc::SimpleStringBuilder sb(buf); - sb << "RTCIceCandidatePair_" << info.local_candidate.id() << "_" - << info.remote_candidate.id(); + sb << "CP" << info.local_candidate.id() << "_" << info.remote_candidate.id(); return sb.str(); } -const char kSender[] = "sender"; -const char kReceiver[] = "receiver"; - -std::string RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( - const char* direction, +// `direction` is either kDirectionInbound or kDirectionOutbound. +std::string DEPRECATED_RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( + const char direction, int attachment_id) { char buf[1024]; rtc::SimpleStringBuilder sb(buf); - sb << "RTCMediaStreamTrack_" << direction << "_" << attachment_id; + sb << "DEPRECATED_T" << direction << attachment_id; return sb.str(); } @@ -105,27 +113,27 @@ std::string RTCTransportStatsIDFromTransportChannel( int channel_component) { char buf[1024]; rtc::SimpleStringBuilder sb(buf); - sb << "RTCTransport_" << transport_name << "_" << channel_component; + sb << 'T' << transport_name << channel_component; return sb.str(); } -std::string RTCInboundRTPStreamStatsIDFromSSRC(cricket::MediaType media_type, +std::string RTCInboundRTPStreamStatsIDFromSSRC(const std::string& transport_id, + cricket::MediaType media_type, uint32_t ssrc) { char buf[1024]; rtc::SimpleStringBuilder sb(buf); - sb << "RTCInboundRTP" - << (media_type == cricket::MEDIA_TYPE_AUDIO ? "Audio" : "Video") - << "Stream_" << ssrc; + sb << 'I' << transport_id + << (media_type == cricket::MEDIA_TYPE_AUDIO ? 'A' : 'V') << ssrc; return sb.str(); } -std::string RTCOutboundRTPStreamStatsIDFromSSRC(cricket::MediaType media_type, +std::string RTCOutboundRTPStreamStatsIDFromSSRC(const std::string& transport_id, + cricket::MediaType media_type, uint32_t ssrc) { char buf[1024]; rtc::SimpleStringBuilder sb(buf); - sb << "RTCOutboundRTP" - << (media_type == cricket::MEDIA_TYPE_AUDIO ? "Audio" : "Video") - << "Stream_" << ssrc; + sb << 'O' << transport_id + << (media_type == cricket::MEDIA_TYPE_AUDIO ? 'A' : 'V') << ssrc; return sb.str(); } @@ -134,9 +142,8 @@ std::string RTCRemoteInboundRtpStreamStatsIdFromSourceSsrc( uint32_t source_ssrc) { char buf[1024]; rtc::SimpleStringBuilder sb(buf); - sb << "RTCRemoteInboundRtp" - << (media_type == cricket::MEDIA_TYPE_AUDIO ? "Audio" : "Video") - << "Stream_" << source_ssrc; + sb << "RI" << (media_type == cricket::MEDIA_TYPE_AUDIO ? 'A' : 'V') + << source_ssrc; return sb.str(); } @@ -145,9 +152,8 @@ std::string RTCRemoteOutboundRTPStreamStatsIDFromSSRC( uint32_t source_ssrc) { char buf[1024]; rtc::SimpleStringBuilder sb(buf); - sb << "RTCRemoteOutboundRTP" - << (media_type == cricket::MEDIA_TYPE_AUDIO ? "Audio" : "Video") - << "Stream_" << source_ssrc; + sb << "RO" << (media_type == cricket::MEDIA_TYPE_AUDIO ? 'A' : 'V') + << source_ssrc; return sb.str(); } @@ -156,8 +162,8 @@ std::string RTCMediaSourceStatsIDFromKindAndAttachment( int attachment_id) { char buf[1024]; rtc::SimpleStringBuilder sb(buf); - sb << "RTC" << (media_type == cricket::MEDIA_TYPE_AUDIO ? "Audio" : "Video") - << "Source_" << attachment_id; + sb << 'S' << (media_type == cricket::MEDIA_TYPE_AUDIO ? 'A' : 'V') + << attachment_id; return sb.str(); } @@ -208,6 +214,20 @@ const char* IceCandidatePairStateToRTCStatsIceCandidatePairState( } } +const char* IceRoleToRTCIceRole(cricket::IceRole role) { + switch (role) { + case cricket::IceRole::ICEROLE_UNKNOWN: + return RTCIceRole::kUnknown; + case cricket::IceRole::ICEROLE_CONTROLLED: + return RTCIceRole::kControlled; + case cricket::IceRole::ICEROLE_CONTROLLING: + return RTCIceRole::kControlling; + default: + RTC_DCHECK_NOTREACHED(); + return nullptr; + } +} + const char* DtlsTransportStateToRTCDtlsTransportState( DtlsTransportState state) { switch (state) { @@ -227,7 +247,29 @@ const char* DtlsTransportStateToRTCDtlsTransportState( } } -const char* NetworkAdapterTypeToStatsType(rtc::AdapterType type) { +const char* IceTransportStateToRTCIceTransportState(IceTransportState state) { + switch (state) { + case IceTransportState::kNew: + return RTCIceTransportState::kNew; + case IceTransportState::kChecking: + return RTCIceTransportState::kChecking; + case IceTransportState::kConnected: + return RTCIceTransportState::kConnected; + case IceTransportState::kCompleted: + return RTCIceTransportState::kCompleted; + case IceTransportState::kFailed: + return RTCIceTransportState::kFailed; + case IceTransportState::kDisconnected: + return RTCIceTransportState::kDisconnected; + case IceTransportState::kClosed: + return RTCIceTransportState::kClosed; + default: + RTC_CHECK_NOTREACHED(); + return nullptr; + } +} + +const char* NetworkTypeToStatsType(rtc::AdapterType type) { switch (type) { case rtc::ADAPTER_TYPE_CELLULAR: case rtc::ADAPTER_TYPE_CELLULAR_2G: @@ -250,6 +292,36 @@ const char* NetworkAdapterTypeToStatsType(rtc::AdapterType type) { return nullptr; } +absl::string_view NetworkTypeToStatsNetworkAdapterType(rtc::AdapterType type) { + switch (type) { + case rtc::ADAPTER_TYPE_CELLULAR: + return RTCNetworkAdapterType::kCellular; + case rtc::ADAPTER_TYPE_CELLULAR_2G: + return RTCNetworkAdapterType::kCellular2g; + case rtc::ADAPTER_TYPE_CELLULAR_3G: + return RTCNetworkAdapterType::kCellular3g; + case rtc::ADAPTER_TYPE_CELLULAR_4G: + return RTCNetworkAdapterType::kCellular4g; + case rtc::ADAPTER_TYPE_CELLULAR_5G: + return RTCNetworkAdapterType::kCellular5g; + case rtc::ADAPTER_TYPE_ETHERNET: + return RTCNetworkAdapterType::kEthernet; + case rtc::ADAPTER_TYPE_WIFI: + return RTCNetworkAdapterType::kWifi; + case rtc::ADAPTER_TYPE_UNKNOWN: + return RTCNetworkAdapterType::kUnknown; + case rtc::ADAPTER_TYPE_LOOPBACK: + return RTCNetworkAdapterType::kLoopback; + case rtc::ADAPTER_TYPE_ANY: + return RTCNetworkAdapterType::kAny; + case rtc::ADAPTER_TYPE_VPN: + /* should not be handled here. Vpn is modelled as a bool */ + break; + } + RTC_DCHECK_NOTREACHED(); + return {}; +} + const char* QualityLimitationReasonToRTCQualityLimitationReason( QualityLimitationReason reason) { switch (reason) { @@ -269,9 +341,12 @@ std::map QualityLimitationDurationToRTCQualityLimitationDuration( std::map durations_ms) { std::map result; + // The internal duration is defined in milliseconds while the spec defines + // the value in seconds: + // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-qualitylimitationdurations for (const auto& elem : durations_ms) { result[QualityLimitationReasonToRTCQualityLimitationReason(elem.first)] = - elem.second; + elem.second / static_cast(rtc::kNumMillisecsPerSec); } return result; } @@ -282,19 +357,27 @@ double DoubleAudioLevelFromIntAudioLevel(int audio_level) { return audio_level / 32767.0; } -std::unique_ptr CodecStatsFromRtpCodecParameters( +// Gets the `codecId` identified by `transport_id` and `codec_params`. If no +// such `RTCCodecStats` exist yet, create it and add it to `report`. +std::string GetCodecIdAndMaybeCreateCodecStats( uint64_t timestamp_us, - const std::string& mid, + const char direction, const std::string& transport_id, - bool inbound, - const RtpCodecParameters& codec_params) { + const RtpCodecParameters& codec_params, + RTCStatsReport* report) { RTC_DCHECK_GE(codec_params.payload_type, 0); RTC_DCHECK_LE(codec_params.payload_type, 127); RTC_DCHECK(codec_params.clock_rate); uint32_t payload_type = static_cast(codec_params.payload_type); - std::unique_ptr codec_stats(new RTCCodecStats( - RTCCodecStatsIDFromMidDirectionAndPayload(mid, inbound, payload_type), - timestamp_us)); + std::string codec_id = RTCCodecStatsIDFromTransportAndCodecParameters( + direction, transport_id, codec_params); + if (report->Get(codec_id) != nullptr) { + // The `RTCCodecStats` already exists. + return codec_id; + } + // Create the `RTCCodecStats` that we want to reference. + std::unique_ptr codec_stats( + std::make_unique(codec_id, timestamp_us)); codec_stats->payload_type = payload_type; codec_stats->mime_type = codec_params.mime_type(); if (codec_params.clock_rate) { @@ -309,12 +392,13 @@ std::unique_ptr CodecStatsFromRtpCodecParameters( codec_stats->sdp_fmtp_line = fmtp.Release(); } codec_stats->transport_id = transport_id; - return codec_stats; + report->AddStats(std::move(codec_stats)); + return codec_id; } void SetMediaStreamTrackStatsFromMediaStreamTrackInterface( const MediaStreamTrackInterface& track, - RTCMediaStreamTrackStats* track_stats) { + DEPRECATED_RTCMediaStreamTrackStats* track_stats) { track_stats->track_identifier = track.id(); track_stats->ended = (track.state() == MediaStreamTrackInterface::kEnded); } @@ -335,6 +419,14 @@ void SetInboundRTPStreamStatsFromMediaReceiverInfo( static_cast(media_receiver_info.packets_lost); inbound_stats->jitter_buffer_delay = media_receiver_info.jitter_buffer_delay_seconds; + if (media_receiver_info.jitter_buffer_target_delay_seconds) { + inbound_stats->jitter_buffer_target_delay = + *media_receiver_info.jitter_buffer_target_delay_seconds; + } + if (media_receiver_info.jitter_buffer_minimum_delay_seconds) { + inbound_stats->jitter_buffer_minimum_delay = + *media_receiver_info.jitter_buffer_minimum_delay_seconds; + } inbound_stats->jitter_buffer_emitted_count = media_receiver_info.jitter_buffer_emitted_count; if (media_receiver_info.nacks_sent) { @@ -343,20 +435,31 @@ void SetInboundRTPStreamStatsFromMediaReceiverInfo( } std::unique_ptr CreateInboundAudioStreamStats( + const cricket::VoiceMediaInfo& voice_media_info, const cricket::VoiceReceiverInfo& voice_receiver_info, + const std::string& transport_id, const std::string& mid, - int64_t timestamp_us) { + int64_t timestamp_us, + RTCStatsReport* report) { auto inbound_audio = std::make_unique( - /*id=*/RTCInboundRTPStreamStatsIDFromSSRC(cricket::MEDIA_TYPE_AUDIO, - voice_receiver_info.ssrc()), + /*id=*/RTCInboundRTPStreamStatsIDFromSSRC( + transport_id, cricket::MEDIA_TYPE_AUDIO, voice_receiver_info.ssrc()), timestamp_us); SetInboundRTPStreamStatsFromMediaReceiverInfo(voice_receiver_info, inbound_audio.get()); + inbound_audio->transport_id = transport_id; + inbound_audio->mid = mid; inbound_audio->media_type = "audio"; inbound_audio->kind = "audio"; - if (voice_receiver_info.codec_payload_type) { - inbound_audio->codec_id = RTCCodecStatsIDFromMidDirectionAndPayload( - mid, /*inbound=*/true, *voice_receiver_info.codec_payload_type); + if (voice_receiver_info.codec_payload_type.has_value()) { + auto codec_param_it = voice_media_info.receive_codecs.find( + voice_receiver_info.codec_payload_type.value()); + RTC_DCHECK(codec_param_it != voice_media_info.receive_codecs.end()); + if (codec_param_it != voice_media_info.receive_codecs.end()) { + inbound_audio->codec_id = GetCodecIdAndMaybeCreateCodecStats( + inbound_audio->timestamp_us(), kDirectionInbound, transport_id, + codec_param_it->second, report); + } } inbound_audio->jitter = static_cast(voice_receiver_info.jitter_ms) / rtc::kNumMillisecsPerSec; @@ -393,6 +496,19 @@ std::unique_ptr CreateInboundAudioStreamStats( inbound_audio->fec_packets_discarded = voice_receiver_info.fec_packets_discarded; inbound_audio->packets_discarded = voice_receiver_info.packets_discarded; + inbound_audio->jitter_buffer_flushes = + voice_receiver_info.jitter_buffer_flushes; + inbound_audio->delayed_packet_outage_samples = + voice_receiver_info.delayed_packet_outage_samples; + inbound_audio->relative_packet_arrival_delay = + voice_receiver_info.relative_packet_arrival_delay_seconds; + inbound_audio->interruption_count = + voice_receiver_info.interruption_count >= 0 + ? voice_receiver_info.interruption_count + : 0; + inbound_audio->total_interruption_duration = + static_cast(voice_receiver_info.total_interruption_duration_ms) / + rtc::kNumMillisecsPerSec; return inbound_audio; } @@ -400,7 +516,7 @@ std::unique_ptr CreateRemoteOutboundAudioStreamStats( const cricket::VoiceReceiverInfo& voice_receiver_info, const std::string& mid, - const std::string& inbound_audio_id, + const RTCInboundRTPStreamStats& inbound_audio_stats, const std::string& transport_id) { if (!voice_receiver_info.last_sender_report_timestamp_ms.has_value()) { // Cannot create `RTCRemoteOutboundRtpStreamStats` when the RTCP SR arrival @@ -422,15 +538,14 @@ CreateRemoteOutboundAudioStreamStats( stats->ssrc = voice_receiver_info.ssrc(); stats->kind = "audio"; stats->transport_id = transport_id; - stats->codec_id = RTCCodecStatsIDFromMidDirectionAndPayload( - mid, - /*inbound=*/true, // Remote-outbound same as local-inbound. - *voice_receiver_info.codec_payload_type); + if (inbound_audio_stats.codec_id.is_defined()) { + stats->codec_id = *inbound_audio_stats.codec_id; + } // - RTCSentRtpStreamStats. stats->packets_sent = voice_receiver_info.sender_reports_packets_sent; stats->bytes_sent = voice_receiver_info.sender_reports_bytes_sent; // - RTCRemoteOutboundRtpStreamStats. - stats->local_id = inbound_audio_id; + stats->local_id = inbound_audio_stats.id(); RTC_DCHECK( voice_receiver_info.last_sender_report_remote_timestamp_ms.has_value()); stats->remote_timestamp = static_cast( @@ -449,16 +564,27 @@ CreateRemoteOutboundAudioStreamStats( } void SetInboundRTPStreamStatsFromVideoReceiverInfo( + const std::string& transport_id, const std::string& mid, + const cricket::VideoMediaInfo& video_media_info, const cricket::VideoReceiverInfo& video_receiver_info, - RTCInboundRTPStreamStats* inbound_video) { + RTCInboundRTPStreamStats* inbound_video, + RTCStatsReport* report) { SetInboundRTPStreamStatsFromMediaReceiverInfo(video_receiver_info, inbound_video); + inbound_video->transport_id = transport_id; + inbound_video->mid = mid; inbound_video->media_type = "video"; inbound_video->kind = "video"; - if (video_receiver_info.codec_payload_type) { - inbound_video->codec_id = RTCCodecStatsIDFromMidDirectionAndPayload( - mid, /*inbound=*/true, *video_receiver_info.codec_payload_type); + if (video_receiver_info.codec_payload_type.has_value()) { + auto codec_param_it = video_media_info.receive_codecs.find( + video_receiver_info.codec_payload_type.value()); + RTC_DCHECK(codec_param_it != video_media_info.receive_codecs.end()); + if (codec_param_it != video_media_info.receive_codecs.end()) { + inbound_video->codec_id = GetCodecIdAndMaybeCreateCodecStats( + inbound_video->timestamp_us(), kDirectionInbound, transport_id, + codec_param_it->second, report); + } } inbound_video->jitter = static_cast(video_receiver_info.jitter_ms) / rtc::kNumMillisecsPerSec; @@ -478,18 +604,39 @@ void SetInboundRTPStreamStatsFromVideoReceiverInfo( inbound_video->frame_height = static_cast(video_receiver_info.frame_height); } - if (video_receiver_info.framerate_rcvd > 0) { - inbound_video->frames_per_second = video_receiver_info.framerate_rcvd; + if (video_receiver_info.framerate_decoded > 0) { + inbound_video->frames_per_second = video_receiver_info.framerate_decoded; } - if (video_receiver_info.qp_sum) + if (video_receiver_info.qp_sum) { inbound_video->qp_sum = *video_receiver_info.qp_sum; + } + if (video_receiver_info.timing_frame_info.has_value()) { + inbound_video->goog_timing_frame_info = + video_receiver_info.timing_frame_info->ToString(); + } inbound_video->total_decode_time = - static_cast(video_receiver_info.total_decode_time_ms) / - rtc::kNumMillisecsPerSec; + video_receiver_info.total_decode_time.seconds(); + inbound_video->total_processing_delay = + video_receiver_info.total_processing_delay.seconds(); + inbound_video->total_assembly_time = + video_receiver_info.total_assembly_time.seconds(); + inbound_video->frames_assembled_from_multiple_packets = + video_receiver_info.frames_assembled_from_multiple_packets; inbound_video->total_inter_frame_delay = video_receiver_info.total_inter_frame_delay; inbound_video->total_squared_inter_frame_delay = video_receiver_info.total_squared_inter_frame_delay; + inbound_video->pause_count = video_receiver_info.pause_count; + inbound_video->total_pauses_duration = + static_cast(video_receiver_info.total_pauses_duration_ms) / + rtc::kNumMillisecsPerSec; + inbound_video->freeze_count = video_receiver_info.freeze_count; + inbound_video->total_freezes_duration = + static_cast(video_receiver_info.total_freezes_duration_ms) / + rtc::kNumMillisecsPerSec; + inbound_video->min_playout_delay = + static_cast(video_receiver_info.min_playout_delay_ms) / + rtc::kNumMillisecsPerSec; if (video_receiver_info.last_packet_received_timestamp_ms) { inbound_video->last_packet_received_timestamp = static_cast( *video_receiver_info.last_packet_received_timestamp_ms); @@ -509,7 +656,8 @@ void SetInboundRTPStreamStatsFromVideoReceiverInfo( } } -// Provides the media independent counters (both audio and video). +// Provides the media independent counters and information (both audio and +// video). void SetOutboundRTPStreamStatsFromMediaSenderInfo( const cricket::MediaSenderInfo& media_sender_info, RTCOutboundRTPStreamStats* outbound_stats) { @@ -517,6 +665,8 @@ void SetOutboundRTPStreamStatsFromMediaSenderInfo( outbound_stats->ssrc = media_sender_info.ssrc(); outbound_stats->packets_sent = static_cast(media_sender_info.packets_sent); + outbound_stats->total_packet_send_delay = + media_sender_info.total_packet_send_delay.seconds(); outbound_stats->retransmitted_packets_sent = media_sender_info.retransmitted_packets_sent; outbound_stats->bytes_sent = @@ -526,38 +676,64 @@ void SetOutboundRTPStreamStatsFromMediaSenderInfo( outbound_stats->retransmitted_bytes_sent = media_sender_info.retransmitted_bytes_sent; outbound_stats->nack_count = media_sender_info.nacks_rcvd; + if (media_sender_info.active.has_value()) { + outbound_stats->active = *media_sender_info.active; + } } void SetOutboundRTPStreamStatsFromVoiceSenderInfo( + const std::string& transport_id, const std::string& mid, + const cricket::VoiceMediaInfo& voice_media_info, const cricket::VoiceSenderInfo& voice_sender_info, - RTCOutboundRTPStreamStats* outbound_audio) { + RTCOutboundRTPStreamStats* outbound_audio, + RTCStatsReport* report) { SetOutboundRTPStreamStatsFromMediaSenderInfo(voice_sender_info, outbound_audio); + outbound_audio->transport_id = transport_id; + outbound_audio->mid = mid; outbound_audio->media_type = "audio"; outbound_audio->kind = "audio"; - if (voice_sender_info.target_bitrate > 0) { - outbound_audio->target_bitrate = voice_sender_info.target_bitrate; - } - if (voice_sender_info.codec_payload_type) { - outbound_audio->codec_id = RTCCodecStatsIDFromMidDirectionAndPayload( - mid, /*inbound=*/false, *voice_sender_info.codec_payload_type); + if (voice_sender_info.target_bitrate && + *voice_sender_info.target_bitrate > 0) { + outbound_audio->target_bitrate = *voice_sender_info.target_bitrate; + } + if (voice_sender_info.codec_payload_type.has_value()) { + auto codec_param_it = voice_media_info.send_codecs.find( + voice_sender_info.codec_payload_type.value()); + RTC_DCHECK(codec_param_it != voice_media_info.send_codecs.end()); + if (codec_param_it != voice_media_info.send_codecs.end()) { + outbound_audio->codec_id = GetCodecIdAndMaybeCreateCodecStats( + outbound_audio->timestamp_us(), kDirectionOutbound, transport_id, + codec_param_it->second, report); + } } // `fir_count`, `pli_count` and `sli_count` are only valid for video and are // purposefully left undefined for audio. } void SetOutboundRTPStreamStatsFromVideoSenderInfo( + const std::string& transport_id, const std::string& mid, + const cricket::VideoMediaInfo& video_media_info, const cricket::VideoSenderInfo& video_sender_info, - RTCOutboundRTPStreamStats* outbound_video) { + RTCOutboundRTPStreamStats* outbound_video, + RTCStatsReport* report) { SetOutboundRTPStreamStatsFromMediaSenderInfo(video_sender_info, outbound_video); + outbound_video->transport_id = transport_id; + outbound_video->mid = mid; outbound_video->media_type = "video"; outbound_video->kind = "video"; - if (video_sender_info.codec_payload_type) { - outbound_video->codec_id = RTCCodecStatsIDFromMidDirectionAndPayload( - mid, /*inbound=*/false, *video_sender_info.codec_payload_type); + if (video_sender_info.codec_payload_type.has_value()) { + auto codec_param_it = video_media_info.send_codecs.find( + video_sender_info.codec_payload_type.value()); + RTC_DCHECK(codec_param_it != video_media_info.send_codecs.end()); + if (codec_param_it != video_media_info.send_codecs.end()) { + outbound_video->codec_id = GetCodecIdAndMaybeCreateCodecStats( + outbound_video->timestamp_us(), kDirectionOutbound, transport_id, + codec_param_it->second, report); + } } outbound_video->fir_count = static_cast(video_sender_info.firs_rcvd); @@ -565,6 +741,10 @@ void SetOutboundRTPStreamStatsFromVideoSenderInfo( static_cast(video_sender_info.plis_rcvd); if (video_sender_info.qp_sum) outbound_video->qp_sum = *video_sender_info.qp_sum; + if (video_sender_info.target_bitrate && + video_sender_info.target_bitrate > 0) { + outbound_video->target_bitrate = *video_sender_info.target_bitrate; + } outbound_video->frames_encoded = video_sender_info.frames_encoded; outbound_video->key_frames_encoded = video_sender_info.key_frames_encoded; outbound_video->total_encode_time = @@ -585,9 +765,6 @@ void SetOutboundRTPStreamStatsFromVideoSenderInfo( } outbound_video->frames_sent = video_sender_info.frames_sent; outbound_video->huge_frames_sent = video_sender_info.huge_frames_sent; - outbound_video->total_packet_send_delay = - static_cast(video_sender_info.total_packet_send_delay_ms) / - rtc::kNumMillisecsPerSec; outbound_video->quality_limitation_reason = QualityLimitationReasonToRTCQualityLimitationReason( video_sender_info.quality_limitation_reason); @@ -611,6 +788,7 @@ void SetOutboundRTPStreamStatsFromVideoSenderInfo( std::unique_ptr ProduceRemoteInboundRtpStreamStatsFromReportBlockData( + const std::string& transport_id, const ReportBlockData& report_block_data, cricket::MediaType media_type, const std::map& outbound_rtps, @@ -638,8 +816,8 @@ ProduceRemoteInboundRtpStreamStatsFromReportBlockData( remote_inbound->round_trip_time_measurements = report_block_data.num_rtts(); - std::string local_id = - RTCOutboundRTPStreamStatsIDFromSSRC(media_type, report_block.source_ssrc); + std::string local_id = RTCOutboundRTPStreamStatsIDFromSSRC( + transport_id, media_type, report_block.source_ssrc); // Look up local stat from `outbound_rtps` where the pointers are non-const. auto local_id_it = outbound_rtps.find(local_id); if (local_id_it != outbound_rtps.end()) { @@ -648,9 +826,7 @@ ProduceRemoteInboundRtpStreamStatsFromReportBlockData( outbound_rtp.remote_id = remote_inbound->id(); // The RTP/RTCP transport is obtained from the // RTCOutboundRtpStreamStats's transport. - const auto* transport_from_id = outbound_rtp.transport_id.is_defined() - ? report.Get(*outbound_rtp.transport_id) - : nullptr; + const auto* transport_from_id = report.Get(transport_id); if (transport_from_id) { const auto& transport = transport_from_id->cast_to(); // If RTP and RTCP are not multiplexed, there is a separate RTCP @@ -716,20 +892,23 @@ const std::string& ProduceIceCandidateStats(int64_t timestamp_us, bool is_local, const std::string& transport_id, RTCStatsReport* report) { - const std::string& id = "RTCIceCandidate_" + candidate.id(); + const std::string& id = "I" + candidate.id(); const RTCStats* stats = report->Get(id); if (!stats) { std::unique_ptr candidate_stats; if (is_local) - candidate_stats.reset(new RTCLocalIceCandidateStats(id, timestamp_us)); + candidate_stats = + std::make_unique(id, timestamp_us); else - candidate_stats.reset(new RTCRemoteIceCandidateStats(id, timestamp_us)); + candidate_stats = + std::make_unique(id, timestamp_us); candidate_stats->transport_id = transport_id; if (is_local) { candidate_stats->network_type = - NetworkAdapterTypeToStatsType(candidate.network_type()); + NetworkTypeToStatsType(candidate.network_type()); const std::string& candidate_type = candidate.type(); const std::string& relay_protocol = candidate.relay_protocol(); + const std::string& url = candidate.url(); if (candidate_type == cricket::RELAY_PORT_TYPE || (candidate_type == cricket::PRFLX_PORT_TYPE && !relay_protocol.empty())) { @@ -737,10 +916,30 @@ const std::string& ProduceIceCandidateStats(int64_t timestamp_us, relay_protocol.compare("tcp") == 0 || relay_protocol.compare("tls") == 0); candidate_stats->relay_protocol = relay_protocol; + if (!url.empty()) { + candidate_stats->url = url; + } + } else if (candidate_type == cricket::STUN_PORT_TYPE) { + if (!url.empty()) { + candidate_stats->url = url; + } + } + if (candidate.network_type() == rtc::ADAPTER_TYPE_VPN) { + candidate_stats->vpn = true; + candidate_stats->network_adapter_type = + std::string(NetworkTypeToStatsNetworkAdapterType( + candidate.underlying_type_for_vpn())); + } else { + candidate_stats->vpn = false; + candidate_stats->network_adapter_type = std::string( + NetworkTypeToStatsNetworkAdapterType(candidate.network_type())); } } else { // We don't expect to know the adapter type of remote candidates. RTC_DCHECK_EQ(rtc::ADAPTER_TYPE_UNKNOWN, candidate.network_type()); + RTC_DCHECK_EQ(0, candidate.relay_protocol().compare("")); + RTC_DCHECK_EQ(rtc::ADAPTER_TYPE_UNKNOWN, + candidate.underlying_type_for_vpn()); } candidate_stats->ip = candidate.address().ipaddr().ToString(); candidate_stats->address = candidate.address().ipaddr().ToString(); @@ -749,6 +948,17 @@ const std::string& ProduceIceCandidateStats(int64_t timestamp_us, candidate_stats->candidate_type = CandidateTypeToRTCIceCandidateType(candidate.type()); candidate_stats->priority = static_cast(candidate.priority()); + candidate_stats->foundation = candidate.foundation(); + auto related_address = candidate.related_address(); + if (related_address.port() != 0) { + candidate_stats->related_address = related_address.ipaddr().ToString(); + candidate_stats->related_port = + static_cast(related_address.port()); + } + candidate_stats->username_fragment = candidate.username(); + if (candidate.protocol() == "tcp") { + candidate_stats->tcp_type = candidate.tcptype(); + } stats = candidate_stats.get(); report->AddStats(std::move(candidate_stats)); @@ -770,16 +980,16 @@ void SetAudioProcessingStats(StatsType* stats, } } -std::unique_ptr +std::unique_ptr ProduceMediaStreamTrackStatsFromVoiceSenderInfo( int64_t timestamp_us, AudioTrackInterface& audio_track, const cricket::VoiceSenderInfo& voice_sender_info, int attachment_id) { - std::unique_ptr audio_track_stats( - new RTCMediaStreamTrackStats( - RTCMediaStreamTrackStatsIDFromDirectionAndAttachment(kSender, - attachment_id), + std::unique_ptr audio_track_stats( + std::make_unique( + DEPRECATED_RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( + kDirectionOutbound, attachment_id), timestamp_us, RTCMediaStreamTrackKind::kAudio)); SetMediaStreamTrackStatsFromMediaStreamTrackInterface( audio_track, audio_track_stats.get()); @@ -803,7 +1013,7 @@ ProduceMediaStreamTrackStatsFromVoiceSenderInfo( return audio_track_stats; } -std::unique_ptr +std::unique_ptr ProduceMediaStreamTrackStatsFromVoiceReceiverInfo( int64_t timestamp_us, const AudioTrackInterface& audio_track, @@ -811,10 +1021,10 @@ ProduceMediaStreamTrackStatsFromVoiceReceiverInfo( int attachment_id) { // Since receiver tracks can't be reattached, we use the SSRC as // an attachment identifier. - std::unique_ptr audio_track_stats( - new RTCMediaStreamTrackStats( - RTCMediaStreamTrackStatsIDFromDirectionAndAttachment(kReceiver, - attachment_id), + std::unique_ptr audio_track_stats( + std::make_unique( + DEPRECATED_RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( + kDirectionInbound, attachment_id), timestamp_us, RTCMediaStreamTrackKind::kAudio)); SetMediaStreamTrackStatsFromMediaStreamTrackInterface( audio_track, audio_track_stats.get()); @@ -843,14 +1053,15 @@ ProduceMediaStreamTrackStatsFromVoiceReceiverInfo( voice_receiver_info.silent_concealed_samples; audio_track_stats->concealment_events = voice_receiver_info.concealment_events; + + // TODO(crbug.com/webrtc/14524): These metrics have been moved from "track" + // stats, delete them. audio_track_stats->jitter_buffer_flushes = voice_receiver_info.jitter_buffer_flushes; audio_track_stats->delayed_packet_outage_samples = voice_receiver_info.delayed_packet_outage_samples; audio_track_stats->relative_packet_arrival_delay = voice_receiver_info.relative_packet_arrival_delay_seconds; - audio_track_stats->jitter_buffer_target_delay = - voice_receiver_info.jitter_buffer_target_delay_seconds; audio_track_stats->interruption_count = voice_receiver_info.interruption_count >= 0 ? voice_receiver_info.interruption_count @@ -858,19 +1069,20 @@ ProduceMediaStreamTrackStatsFromVoiceReceiverInfo( audio_track_stats->total_interruption_duration = static_cast(voice_receiver_info.total_interruption_duration_ms) / rtc::kNumMillisecsPerSec; + return audio_track_stats; } -std::unique_ptr +std::unique_ptr ProduceMediaStreamTrackStatsFromVideoSenderInfo( int64_t timestamp_us, const VideoTrackInterface& video_track, const cricket::VideoSenderInfo& video_sender_info, int attachment_id) { - std::unique_ptr video_track_stats( - new RTCMediaStreamTrackStats( - RTCMediaStreamTrackStatsIDFromDirectionAndAttachment(kSender, - attachment_id), + std::unique_ptr video_track_stats( + std::make_unique( + DEPRECATED_RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( + kDirectionOutbound, attachment_id), timestamp_us, RTCMediaStreamTrackKind::kVideo)); SetMediaStreamTrackStatsFromMediaStreamTrackInterface( video_track, video_track_stats.get()); @@ -890,17 +1102,16 @@ ProduceMediaStreamTrackStatsFromVideoSenderInfo( return video_track_stats; } -std::unique_ptr +std::unique_ptr ProduceMediaStreamTrackStatsFromVideoReceiverInfo( int64_t timestamp_us, const VideoTrackInterface& video_track, const cricket::VideoReceiverInfo& video_receiver_info, int attachment_id) { - std::unique_ptr video_track_stats( - new RTCMediaStreamTrackStats( - RTCMediaStreamTrackStatsIDFromDirectionAndAttachment(kReceiver, - - attachment_id), + std::unique_ptr video_track_stats( + std::make_unique( + DEPRECATED_RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( + kDirectionInbound, attachment_id), timestamp_us, RTCMediaStreamTrackKind::kVideo)); SetMediaStreamTrackStatsFromMediaStreamTrackInterface( video_track, video_track_stats.get()); @@ -924,6 +1135,14 @@ ProduceMediaStreamTrackStatsFromVideoReceiverInfo( // value as "RTCInboundRTPStreamStats.framesDecoded". https://crbug.com/659137 video_track_stats->frames_decoded = video_receiver_info.frames_decoded; video_track_stats->frames_dropped = video_receiver_info.frames_dropped; + video_track_stats->total_frames_duration = + static_cast(video_receiver_info.total_frames_duration_ms) / + rtc::kNumMillisecsPerSec; + video_track_stats->sum_squared_frame_durations = + video_receiver_info.sum_squared_frame_durations; + + // TODO(crbug.com/webrtc/14521): These metrics have been moved, delete them + // from "track". video_track_stats->freeze_count = video_receiver_info.freeze_count; video_track_stats->pause_count = video_receiver_info.pause_count; video_track_stats->total_freezes_duration = @@ -932,11 +1151,6 @@ ProduceMediaStreamTrackStatsFromVideoReceiverInfo( video_track_stats->total_pauses_duration = static_cast(video_receiver_info.total_pauses_duration_ms) / rtc::kNumMillisecsPerSec; - video_track_stats->total_frames_duration = - static_cast(video_receiver_info.total_frames_duration_ms) / - rtc::kNumMillisecsPerSec; - video_track_stats->sum_squared_frame_durations = - video_receiver_info.sum_squared_frame_durations; return video_track_stats; } @@ -948,9 +1162,8 @@ void ProduceSenderMediaTrackStats( RTCStatsReport* report) { // This function iterates over the senders to generate outgoing track stats. - // TODO(hbos): Return stats of detached tracks. We have to perform stats - // gathering at the time of detachment to get accurate stats and timestamps. - // https://crbug.com/659137 + // TODO(https://crbug.com/webrtc/14175): Stop collecting "track" stats, + // they're deprecated. for (const auto& sender : senders) { if (sender->media_type() == cricket::MEDIA_TYPE_AUDIO) { AudioTrackInterface* track = @@ -971,12 +1184,12 @@ void ProduceSenderMediaTrackStats( if (sender_info) { voice_sender_info = sender_info; } else { - RTC_LOG(LS_INFO) + RTC_DLOG(LS_INFO) << "RTCStatsCollector: No voice sender info for sender with ssrc " << sender->ssrc(); } } - std::unique_ptr audio_track_stats = + std::unique_ptr audio_track_stats = ProduceMediaStreamTrackStatsFromVoiceSenderInfo( timestamp_us, *track, *voice_sender_info, sender->AttachmentId()); report->AddStats(std::move(audio_track_stats)); @@ -999,11 +1212,11 @@ void ProduceSenderMediaTrackStats( if (sender_info) { video_sender_info = sender_info; } else { - RTC_LOG(LS_INFO) << "No video sender info for sender with ssrc " - << sender->ssrc(); + RTC_DLOG(LS_INFO) + << "No video sender info for sender with ssrc " << sender->ssrc(); } } - std::unique_ptr video_track_stats = + std::unique_ptr video_track_stats = ProduceMediaStreamTrackStatsFromVideoSenderInfo( timestamp_us, *track, *video_sender_info, sender->AttachmentId()); report->AddStats(std::move(video_track_stats)); @@ -1028,7 +1241,7 @@ void ProduceReceiverMediaTrackStats( if (!voice_receiver_info) { continue; } - std::unique_ptr audio_track_stats = + std::unique_ptr audio_track_stats = ProduceMediaStreamTrackStatsFromVoiceReceiverInfo( timestamp_us, *track, *voice_receiver_info, receiver->AttachmentId()); @@ -1041,7 +1254,7 @@ void ProduceReceiverMediaTrackStats( if (!video_receiver_info) { continue; } - std::unique_ptr video_track_stats = + std::unique_ptr video_track_stats = ProduceMediaStreamTrackStatsFromVideoReceiverInfo( timestamp_us, *track, *video_receiver_info, receiver->AttachmentId()); @@ -1066,8 +1279,8 @@ rtc::scoped_refptr CreateReportFilteredBySelector( // Because we do not implement sender stats, we look at outbound-rtp(s) // that reference the track attachment stats for the sender instead. std::string track_id = - RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( - kSender, sender_selector->AttachmentId()); + DEPRECATED_RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( + kDirectionOutbound, sender_selector->AttachmentId()); for (const auto& stats : *report) { if (stats.type() != RTCOutboundRTPStreamStats::kType) continue; @@ -1086,8 +1299,8 @@ rtc::scoped_refptr CreateReportFilteredBySelector( // Because we do not implement receiver stats, we look at inbound-rtp(s) // that reference the track attachment stats for the receiver instead. std::string track_id = - RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( - kReceiver, receiver_selector->AttachmentId()); + DEPRECATED_RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( + kDirectionInbound, receiver_selector->AttachmentId()); for (const auto& stats : *report) { if (stats.type() != RTCInboundRTPStreamStats::kType) continue; @@ -1100,12 +1313,20 @@ rtc::scoped_refptr CreateReportFilteredBySelector( } } if (rtpstream_ids.empty()) - return RTCStatsReport::Create(report->timestamp_us()); + return RTCStatsReport::Create(report->timestamp()); return TakeReferencedStats(report->Copy(), rtpstream_ids); } } // namespace +RTCStatsCollector::CertificateStatsPair +RTCStatsCollector::CertificateStatsPair::Copy() const { + CertificateStatsPair copy; + copy.local = local ? local->Copy() : nullptr; + copy.remote = remote ? remote->Copy() : nullptr; + return copy; +} + RTCStatsCollector::RequestInfo::RequestInfo( rtc::scoped_refptr callback) : RequestInfo(FilterMode::kAll, std::move(callback), nullptr, nullptr) {} @@ -1199,32 +1420,10 @@ void RTCStatsCollector::GetStatsReportInternal( // We have a fresh cached report to deliver. Deliver asynchronously, since // the caller may not be expecting a synchronous callback, and it avoids // reentrancy problems. - std::vector requests; - requests.swap(requests_); - - // Task subclass to take ownership of the requests. - // TODO(nisse): Delete when we can use C++14, and do lambda capture with - // std::move. - class DeliveryTask : public QueuedTask { - public: - DeliveryTask(rtc::scoped_refptr collector, - rtc::scoped_refptr cached_report, - std::vector requests) - : collector_(collector), - cached_report_(cached_report), - requests_(std::move(requests)) {} - bool Run() override { - collector_->DeliverCachedReport(cached_report_, std::move(requests_)); - return true; - } - - private: - rtc::scoped_refptr collector_; - rtc::scoped_refptr cached_report_; - std::vector requests_; - }; - signaling_thread_->PostTask(std::make_unique( - this, cached_report_, std::move(requests))); + signaling_thread_->PostTask( + absl::bind_front(&RTCStatsCollector::DeliverCachedReport, + rtc::scoped_refptr(this), + cached_report_, std::move(requests_))); } else if (!num_pending_partial_reports_) { // Only start gathering stats if we're not already gathering stats. In the // case of already gathering stats, `callback_` will be invoked when there @@ -1248,7 +1447,6 @@ void RTCStatsCollector::GetStatsReportInternal( network_report_event_.Reset(); rtc::scoped_refptr collector(this); network_thread_->PostTask( - RTC_FROM_HERE, [collector, sctp_transport_name = pc_->sctp_transport_name(), timestamp_us]() mutable { collector->ProducePartialResultsOnNetworkThread( @@ -1261,6 +1459,8 @@ void RTCStatsCollector::GetStatsReportInternal( void RTCStatsCollector::ClearCachedStatsReport() { RTC_DCHECK_RUN_ON(signaling_thread_); cached_report_ = nullptr; + MutexLock lock(&cached_certificates_mutex_); + cached_certificates_by_transport_.clear(); } void RTCStatsCollector::WaitForPendingRequest() { @@ -1275,7 +1475,7 @@ void RTCStatsCollector::ProducePartialResultsOnSignalingThread( RTC_DCHECK_RUN_ON(signaling_thread_); rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; - partial_report_ = RTCStatsReport::Create(timestamp_us); + partial_report_ = RTCStatsReport::Create(Timestamp::Micros(timestamp_us)); ProducePartialResultsOnSignalingThreadImpl(timestamp_us, partial_report_.get()); @@ -1311,7 +1511,7 @@ void RTCStatsCollector::ProducePartialResultsOnNetworkThread( // Touching `network_report_` on this thread is safe by this method because // `network_report_event_` is reset before this method is invoked. - network_report_ = RTCStatsReport::Create(timestamp_us); + network_report_ = RTCStatsReport::Create(Timestamp::Micros(timestamp_us)); std::set transport_names; if (sctp_transport_name) { @@ -1337,7 +1537,7 @@ void RTCStatsCollector::ProducePartialResultsOnNetworkThread( network_report_event_.Set(); rtc::scoped_refptr collector(this); signaling_thread_->PostTask( - RTC_FROM_HERE, [collector] { collector->MergeNetworkReport_s(); }); + [collector] { collector->MergeNetworkReport_s(); }); } void RTCStatsCollector::ProducePartialResultsOnNetworkThreadImpl( @@ -1350,7 +1550,6 @@ void RTCStatsCollector::ProducePartialResultsOnNetworkThreadImpl( rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; ProduceCertificateStats_n(timestamp_us, transport_cert_stats, partial_report); - ProduceCodecStats_n(timestamp_us, transceiver_stats_infos_, partial_report); ProduceIceCandidateAndPairStats_n(timestamp_us, transport_stats_by_name, call_stats_, partial_report); ProduceTransportStats_n(timestamp_us, transport_stats_by_name, @@ -1449,53 +1648,6 @@ void RTCStatsCollector::ProduceCertificateStats_n( } } -void RTCStatsCollector::ProduceCodecStats_n( - int64_t timestamp_us, - const std::vector& transceiver_stats_infos, - RTCStatsReport* report) const { - RTC_DCHECK_RUN_ON(network_thread_); - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; - - for (const auto& stats : transceiver_stats_infos) { - if (!stats.mid) { - continue; - } - std::string transport_id = RTCTransportStatsIDFromTransportChannel( - *stats.transport_name, cricket::ICE_CANDIDATE_COMPONENT_RTP); - - const cricket::VoiceMediaInfo* voice_media_info = - stats.track_media_info_map->voice_media_info(); - const cricket::VideoMediaInfo* video_media_info = - stats.track_media_info_map->video_media_info(); - // Audio - if (voice_media_info) { - // Inbound - for (const auto& pair : voice_media_info->receive_codecs) { - report->AddStats(CodecStatsFromRtpCodecParameters( - timestamp_us, *stats.mid, transport_id, true, pair.second)); - } - // Outbound - for (const auto& pair : voice_media_info->send_codecs) { - report->AddStats(CodecStatsFromRtpCodecParameters( - timestamp_us, *stats.mid, transport_id, false, pair.second)); - } - } - // Video - if (video_media_info) { - // Inbound - for (const auto& pair : video_media_info->receive_codecs) { - report->AddStats(CodecStatsFromRtpCodecParameters( - timestamp_us, *stats.mid, transport_id, true, pair.second)); - } - // Outbound - for (const auto& pair : video_media_info->send_codecs) { - report->AddStats(CodecStatsFromRtpCodecParameters( - timestamp_us, *stats.mid, transport_id, false, pair.second)); - } - } - } -} - void RTCStatsCollector::ProduceDataChannelStats_s( int64_t timestamp_us, RTCStatsReport* report) const { @@ -1504,9 +1656,8 @@ void RTCStatsCollector::ProduceDataChannelStats_s( std::vector data_stats = pc_->GetDataChannelStats(); for (const auto& stats : data_stats) { std::unique_ptr data_channel_stats( - new RTCDataChannelStats( - "RTCDataChannel_" + rtc::ToString(stats.internal_id), - timestamp_us)); + std::make_unique( + "D" + rtc::ToString(stats.internal_id), timestamp_us)); data_channel_stats->label = std::move(stats.label); data_channel_stats->protocol = std::move(stats.protocol); data_channel_stats->data_channel_identifier = stats.id; @@ -1534,10 +1685,10 @@ void RTCStatsCollector::ProduceIceCandidateAndPairStats_n( for (const auto& channel_stats : transport_stats.channel_stats) { std::string transport_id = RTCTransportStatsIDFromTransportChannel( transport_name, channel_stats.component); - for (const cricket::ConnectionInfo& info : + for (const auto& info : channel_stats.ice_transport_stats.connection_infos) { std::unique_ptr candidate_pair_stats( - new RTCIceCandidatePairStats( + std::make_unique( RTCIceCandidatePairStatsIDFromConnectionInfo(info), timestamp_us)); @@ -1596,8 +1747,8 @@ void RTCStatsCollector::ProduceIceCandidateAndPairStats_n( } candidate_pair_stats->requests_received = static_cast(info.recv_ping_requests); - candidate_pair_stats->requests_sent = static_cast( - info.sent_ping_requests_before_first_response); + candidate_pair_stats->requests_sent = + static_cast(info.sent_ping_requests_total); candidate_pair_stats->responses_received = static_cast(info.recv_ping_responses); candidate_pair_stats->responses_sent = @@ -1610,6 +1761,15 @@ void RTCStatsCollector::ProduceIceCandidateAndPairStats_n( report->AddStats(std::move(candidate_pair_stats)); } + + // Produce local candidate stats. If a transport exists these will already + // have been produced. + for (const auto& candidate_stats : + channel_stats.ice_transport_stats.candidate_stats_list) { + const auto& candidate = candidate_stats.candidate(); + ProduceIceCandidateStats(timestamp_us, candidate, true, transport_id, + report); + } } } } @@ -1625,16 +1785,16 @@ void RTCStatsCollector::ProduceMediaStreamStats_s( for (const auto& stats : transceiver_stats_infos_) { for (const auto& sender : stats.transceiver->senders()) { std::string track_id = - RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( - kSender, sender->internal()->AttachmentId()); + DEPRECATED_RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( + kDirectionOutbound, sender->internal()->AttachmentId()); for (auto& stream_id : sender->stream_ids()) { track_ids[stream_id].push_back(track_id); } } for (const auto& receiver : stats.transceiver->receivers()) { std::string track_id = - RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( - kReceiver, receiver->internal()->AttachmentId()); + DEPRECATED_RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( + kDirectionInbound, receiver->internal()->AttachmentId()); for (auto& stream : receiver->streams()) { track_ids[stream->id()].push_back(track_id); } @@ -1643,8 +1803,9 @@ void RTCStatsCollector::ProduceMediaStreamStats_s( // Build stats for each stream ID known. for (auto& it : track_ids) { - std::unique_ptr stream_stats( - new RTCMediaStreamStats("RTCMediaStream_" + it.first, timestamp_us)); + std::unique_ptr stream_stats( + std::make_unique( + "DEPRECATED_S" + it.first, timestamp_us)); stream_stats->stream_identifier = it.first; stream_stats->track_ids = it.second; report->AddStats(std::move(stream_stats)); @@ -1660,16 +1821,18 @@ void RTCStatsCollector::ProduceMediaStreamTrackStats_s( for (const RtpTransceiverStatsInfo& stats : transceiver_stats_infos_) { std::vector> senders; for (const auto& sender : stats.transceiver->senders()) { - senders.push_back(sender->internal()); + senders.push_back( + rtc::scoped_refptr(sender->internal())); } - ProduceSenderMediaTrackStats(timestamp_us, *stats.track_media_info_map, + ProduceSenderMediaTrackStats(timestamp_us, stats.track_media_info_map, senders, report); std::vector> receivers; for (const auto& receiver : stats.transceiver->receivers()) { - receivers.push_back(receiver->internal()); + receivers.push_back( + rtc::scoped_refptr(receiver->internal())); } - ProduceReceiverMediaTrackStats(timestamp_us, *stats.track_media_info_map, + ProduceReceiverMediaTrackStats(timestamp_us, stats.track_media_info_map, receivers, report); } } @@ -1711,7 +1874,7 @@ void RTCStatsCollector::ProduceMediaSourceStats_s( // value indicating no SSRC. if (sender_internal->ssrc() != 0) { auto* voice_sender_info = - track_media_info_map->GetVoiceSenderInfoBySsrc( + track_media_info_map.GetVoiceSenderInfoBySsrc( sender_internal->ssrc()); if (voice_sender_info) { audio_source_stats->audio_level = DoubleAudioLevelFromIntAudioLevel( @@ -1756,7 +1919,7 @@ void RTCStatsCollector::ProduceMediaSourceStats_s( // value indicating no SSRC. if (sender_internal->ssrc() != 0) { auto* video_sender_info = - track_media_info_map->GetVideoSenderInfoBySsrc( + track_media_info_map.GetVideoSenderInfoBySsrc( sender_internal->ssrc()); if (video_sender_info) { video_source_stats->frames_per_second = @@ -1780,7 +1943,7 @@ void RTCStatsCollector::ProducePeerConnectionStats_s( rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; std::unique_ptr stats( - new RTCPeerConnectionStats("RTCPeerConnection", timestamp_us)); + std::make_unique("P", timestamp_us)); stats->data_channels_opened = internal_record_.data_channels_opened; stats->data_channels_closed = internal_record_.data_channels_closed; report->AddStats(std::move(stats)); @@ -1814,9 +1977,7 @@ void RTCStatsCollector::ProduceAudioRTPStreamStats_n( if (!stats.mid || !stats.transport_name) { return; } - RTC_DCHECK(stats.track_media_info_map); - const TrackMediaInfoMap& track_media_info_map = *stats.track_media_info_map; - RTC_DCHECK(track_media_info_map.voice_media_info()); + RTC_DCHECK(stats.track_media_info_map.voice_media_info().has_value()); std::string mid = *stats.mid; std::string transport_id = RTCTransportStatsIDFromTransportChannel( *stats.transport_name, cricket::ICE_CANDIDATE_COMPONENT_RTP); @@ -1824,62 +1985,82 @@ void RTCStatsCollector::ProduceAudioRTPStreamStats_n( // The remote-outbound stats are based on RTCP sender reports sent from the // remote endpoint providing metrics about the remote outbound streams. for (const cricket::VoiceReceiverInfo& voice_receiver_info : - track_media_info_map.voice_media_info()->receivers) { + stats.track_media_info_map.voice_media_info()->receivers) { if (!voice_receiver_info.connected()) continue; // Inbound. - auto inbound_audio = - CreateInboundAudioStreamStats(voice_receiver_info, mid, timestamp_us); + auto inbound_audio = CreateInboundAudioStreamStats( + stats.track_media_info_map.voice_media_info().value(), + voice_receiver_info, transport_id, mid, timestamp_us, report); // TODO(hta): This lookup should look for the sender, not the track. rtc::scoped_refptr audio_track = - track_media_info_map.GetAudioTrack(voice_receiver_info); + stats.track_media_info_map.GetAudioTrack(voice_receiver_info); if (audio_track) { inbound_audio->track_id = - RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( - kReceiver, - track_media_info_map.GetAttachmentIdByTrack(audio_track).value()); + DEPRECATED_RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( + kDirectionInbound, stats.track_media_info_map + .GetAttachmentIdByTrack(audio_track.get()) + .value()); + inbound_audio->track_identifier = audio_track->id(); + } + auto* inbound_audio_ptr = report->TryAddStats(std::move(inbound_audio)); + if (!inbound_audio_ptr) { + RTC_LOG(LS_ERROR) + << "Unable to add audio 'inbound-rtp' to report, ID is not unique."; + continue; } - inbound_audio->transport_id = transport_id; // Remote-outbound. auto remote_outbound_audio = CreateRemoteOutboundAudioStreamStats( - voice_receiver_info, mid, inbound_audio->id(), transport_id); + voice_receiver_info, mid, *inbound_audio_ptr, transport_id); // Add stats. if (remote_outbound_audio) { // When the remote outbound stats are available, the remote ID for the // local inbound stats is set. - inbound_audio->remote_id = remote_outbound_audio->id(); - report->AddStats(std::move(remote_outbound_audio)); + auto* remote_outbound_audio_ptr = + report->TryAddStats(std::move(remote_outbound_audio)); + if (remote_outbound_audio_ptr) { + inbound_audio_ptr->remote_id = remote_outbound_audio_ptr->id(); + } else { + RTC_LOG(LS_ERROR) << "Unable to add audio 'remote-outbound-rtp' to " + << "report, ID is not unique."; + } } - report->AddStats(std::move(inbound_audio)); } // Outbound. std::map audio_outbound_rtps; for (const cricket::VoiceSenderInfo& voice_sender_info : - track_media_info_map.voice_media_info()->senders) { + stats.track_media_info_map.voice_media_info()->senders) { if (!voice_sender_info.connected()) continue; auto outbound_audio = std::make_unique( - RTCOutboundRTPStreamStatsIDFromSSRC(cricket::MEDIA_TYPE_AUDIO, - voice_sender_info.ssrc()), + RTCOutboundRTPStreamStatsIDFromSSRC( + transport_id, cricket::MEDIA_TYPE_AUDIO, voice_sender_info.ssrc()), timestamp_us); - SetOutboundRTPStreamStatsFromVoiceSenderInfo(mid, voice_sender_info, - outbound_audio.get()); + SetOutboundRTPStreamStatsFromVoiceSenderInfo( + transport_id, mid, + stats.track_media_info_map.voice_media_info().value(), + voice_sender_info, outbound_audio.get(), report); rtc::scoped_refptr audio_track = - track_media_info_map.GetAudioTrack(voice_sender_info); + stats.track_media_info_map.GetAudioTrack(voice_sender_info); if (audio_track) { int attachment_id = - track_media_info_map.GetAttachmentIdByTrack(audio_track).value(); + stats.track_media_info_map.GetAttachmentIdByTrack(audio_track.get()) + .value(); outbound_audio->track_id = - RTCMediaStreamTrackStatsIDFromDirectionAndAttachment(kSender, - attachment_id); + DEPRECATED_RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( + kDirectionOutbound, attachment_id); outbound_audio->media_source_id = RTCMediaSourceStatsIDFromKindAndAttachment(cricket::MEDIA_TYPE_AUDIO, attachment_id); } - outbound_audio->transport_id = transport_id; - audio_outbound_rtps.insert( - std::make_pair(outbound_audio->id(), outbound_audio.get())); - report->AddStats(std::move(outbound_audio)); + auto audio_outbound_pair = + std::make_pair(outbound_audio->id(), outbound_audio.get()); + if (report->TryAddStats(std::move(outbound_audio))) { + audio_outbound_rtps.insert(std::move(audio_outbound_pair)); + } else { + RTC_LOG(LS_ERROR) + << "Unable to add audio 'outbound-rtp' to report, ID is not unique."; + } } // Remote-inbound. // These are Report Block-based, information sent from the remote endpoint, @@ -1887,11 +2068,11 @@ void RTCStatsCollector::ProduceAudioRTPStreamStats_n( // that RTCOutboundRtpStreamStats, RTCCodecStats and RTCTransport have already // been added to the report. for (const cricket::VoiceSenderInfo& voice_sender_info : - track_media_info_map.voice_media_info()->senders) { + stats.track_media_info_map.voice_media_info()->senders) { for (const auto& report_block_data : voice_sender_info.report_block_datas) { report->AddStats(ProduceRemoteInboundRtpStreamStatsFromReportBlockData( - report_block_data, cricket::MEDIA_TYPE_AUDIO, audio_outbound_rtps, - *report)); + transport_id, report_block_data, cricket::MEDIA_TYPE_AUDIO, + audio_outbound_rtps, *report)); } } } @@ -1906,63 +2087,74 @@ void RTCStatsCollector::ProduceVideoRTPStreamStats_n( if (!stats.mid || !stats.transport_name) { return; } - RTC_DCHECK(stats.track_media_info_map); - const TrackMediaInfoMap& track_media_info_map = *stats.track_media_info_map; - RTC_DCHECK(track_media_info_map.video_media_info()); + RTC_DCHECK(stats.track_media_info_map.video_media_info().has_value()); std::string mid = *stats.mid; std::string transport_id = RTCTransportStatsIDFromTransportChannel( *stats.transport_name, cricket::ICE_CANDIDATE_COMPONENT_RTP); // Inbound for (const cricket::VideoReceiverInfo& video_receiver_info : - track_media_info_map.video_media_info()->receivers) { + stats.track_media_info_map.video_media_info()->receivers) { if (!video_receiver_info.connected()) continue; auto inbound_video = std::make_unique( - RTCInboundRTPStreamStatsIDFromSSRC(cricket::MEDIA_TYPE_VIDEO, + RTCInboundRTPStreamStatsIDFromSSRC(transport_id, + cricket::MEDIA_TYPE_VIDEO, video_receiver_info.ssrc()), timestamp_us); - SetInboundRTPStreamStatsFromVideoReceiverInfo(mid, video_receiver_info, - inbound_video.get()); + SetInboundRTPStreamStatsFromVideoReceiverInfo( + transport_id, mid, + stats.track_media_info_map.video_media_info().value(), + video_receiver_info, inbound_video.get(), report); rtc::scoped_refptr video_track = - track_media_info_map.GetVideoTrack(video_receiver_info); + stats.track_media_info_map.GetVideoTrack(video_receiver_info); if (video_track) { inbound_video->track_id = - RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( - kReceiver, - track_media_info_map.GetAttachmentIdByTrack(video_track).value()); + DEPRECATED_RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( + kDirectionInbound, stats.track_media_info_map + .GetAttachmentIdByTrack(video_track.get()) + .value()); + inbound_video->track_identifier = video_track->id(); + } + if (!report->TryAddStats(std::move(inbound_video))) { + RTC_LOG(LS_ERROR) + << "Unable to add video 'inbound-rtp' to report, ID is not unique."; } - inbound_video->transport_id = transport_id; - report->AddStats(std::move(inbound_video)); - // TODO(crbug.com/webrtc/12529): Add remote-outbound stats. } // Outbound std::map video_outbound_rtps; for (const cricket::VideoSenderInfo& video_sender_info : - track_media_info_map.video_media_info()->senders) { + stats.track_media_info_map.video_media_info()->senders) { if (!video_sender_info.connected()) continue; auto outbound_video = std::make_unique( - RTCOutboundRTPStreamStatsIDFromSSRC(cricket::MEDIA_TYPE_VIDEO, - video_sender_info.ssrc()), + RTCOutboundRTPStreamStatsIDFromSSRC( + transport_id, cricket::MEDIA_TYPE_VIDEO, video_sender_info.ssrc()), timestamp_us); - SetOutboundRTPStreamStatsFromVideoSenderInfo(mid, video_sender_info, - outbound_video.get()); + SetOutboundRTPStreamStatsFromVideoSenderInfo( + transport_id, mid, + stats.track_media_info_map.video_media_info().value(), + video_sender_info, outbound_video.get(), report); rtc::scoped_refptr video_track = - track_media_info_map.GetVideoTrack(video_sender_info); + stats.track_media_info_map.GetVideoTrack(video_sender_info); if (video_track) { int attachment_id = - track_media_info_map.GetAttachmentIdByTrack(video_track).value(); + stats.track_media_info_map.GetAttachmentIdByTrack(video_track.get()) + .value(); outbound_video->track_id = - RTCMediaStreamTrackStatsIDFromDirectionAndAttachment(kSender, - attachment_id); + DEPRECATED_RTCMediaStreamTrackStatsIDFromDirectionAndAttachment( + kDirectionOutbound, attachment_id); outbound_video->media_source_id = RTCMediaSourceStatsIDFromKindAndAttachment(cricket::MEDIA_TYPE_VIDEO, attachment_id); } - outbound_video->transport_id = transport_id; - video_outbound_rtps.insert( - std::make_pair(outbound_video->id(), outbound_video.get())); - report->AddStats(std::move(outbound_video)); + auto video_outbound_pair = + std::make_pair(outbound_video->id(), outbound_video.get()); + if (report->TryAddStats(std::move(outbound_video))) { + video_outbound_rtps.insert(std::move(video_outbound_pair)); + } else { + RTC_LOG(LS_ERROR) + << "Unable to add video 'outbound-rtp' to report, ID is not unique."; + } } // Remote-inbound // These are Report Block-based, information sent from the remote endpoint, @@ -1970,11 +2162,11 @@ void RTCStatsCollector::ProduceVideoRTPStreamStats_n( // that RTCOutboundRtpStreamStats, RTCCodecStats and RTCTransport have already // been added to the report. for (const cricket::VideoSenderInfo& video_sender_info : - track_media_info_map.video_media_info()->senders) { + stats.track_media_info_map.video_media_info()->senders) { for (const auto& report_block_data : video_sender_info.report_block_datas) { report->AddStats(ProduceRemoteInboundRtpStreamStatsFromReportBlockData( - report_block_data, cricket::MEDIA_TYPE_VIDEO, video_outbound_rtps, - *report)); + transport_id, report_block_data, cricket::MEDIA_TYPE_VIDEO, + video_outbound_rtps, *report)); } } } @@ -2023,24 +2215,30 @@ void RTCStatsCollector::ProduceTransportStats_n( for (const cricket::TransportChannelStats& channel_stats : transport_stats.channel_stats) { std::unique_ptr transport_stats( - new RTCTransportStats(RTCTransportStatsIDFromTransportChannel( - transport_name, channel_stats.component), - timestamp_us)); - transport_stats->bytes_sent = 0; - transport_stats->packets_sent = 0; - transport_stats->bytes_received = 0; - transport_stats->packets_received = 0; + std::make_unique( + RTCTransportStatsIDFromTransportChannel(transport_name, + channel_stats.component), + timestamp_us)); + transport_stats->packets_sent = + channel_stats.ice_transport_stats.packets_sent; + transport_stats->packets_received = + channel_stats.ice_transport_stats.packets_received; + transport_stats->bytes_sent = + channel_stats.ice_transport_stats.bytes_sent; + transport_stats->bytes_received = + channel_stats.ice_transport_stats.bytes_received; transport_stats->dtls_state = DtlsTransportStateToRTCDtlsTransportState(channel_stats.dtls_state); transport_stats->selected_candidate_pair_changes = channel_stats.ice_transport_stats.selected_candidate_pair_changes; + transport_stats->ice_role = + IceRoleToRTCIceRole(channel_stats.ice_transport_stats.ice_role); + transport_stats->ice_local_username_fragment = + channel_stats.ice_transport_stats.ice_local_username_fragment; + transport_stats->ice_state = IceTransportStateToRTCIceTransportState( + channel_stats.ice_transport_stats.ice_state); for (const cricket::ConnectionInfo& info : channel_stats.ice_transport_stats.connection_infos) { - *transport_stats->bytes_sent += info.sent_total_bytes; - *transport_stats->packets_sent += - info.sent_total_packets - info.sent_discarded_packets; - *transport_stats->bytes_received += info.recv_total_bytes; - *transport_stats->packets_received += info.packets_received; if (info.best_connection) { transport_stats->selected_candidate_pair_id = RTCIceCandidatePairStatsIDFromConnectionInfo(info); @@ -2060,6 +2258,15 @@ void RTCStatsCollector::ProduceTransportStats_n( snprintf(bytes, sizeof(bytes), "%04X", channel_stats.ssl_version_bytes); transport_stats->tls_version = bytes; } + + if (channel_stats.dtls_role) { + transport_stats->dtls_role = *channel_stats.dtls_role == rtc::SSL_CLIENT + ? webrtc::RTCDtlsRole::kClient + : webrtc::RTCDtlsRole::kServer; + } else { + transport_stats->dtls_role = webrtc::RTCDtlsRole::kUnknown; + } + if (channel_stats.ssl_cipher_suite != rtc::kTlsNullWithNullNull && rtc::SSLStreamAdapter::SslCipherSuiteToName( channel_stats.ssl_cipher_suite) @@ -2082,29 +2289,47 @@ void RTCStatsCollector::ProduceTransportStats_n( std::map RTCStatsCollector::PrepareTransportCertificateStats_n( const std::map& - transport_stats_by_name) const { + transport_stats_by_name) { RTC_DCHECK_RUN_ON(network_thread_); rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; std::map transport_cert_stats; - for (const auto& entry : transport_stats_by_name) { - const std::string& transport_name = entry.first; - - CertificateStatsPair certificate_stats_pair; - rtc::scoped_refptr local_certificate; - if (pc_->GetLocalCertificate(transport_name, &local_certificate)) { - certificate_stats_pair.local = - local_certificate->GetSSLCertificateChain().GetStats(); + { + MutexLock lock(&cached_certificates_mutex_); + // Copy the certificate info from the cache, avoiding expensive + // rtc::SSLCertChain::GetStats() calls. + for (const auto& pair : cached_certificates_by_transport_) { + transport_cert_stats.insert( + std::make_pair(pair.first, pair.second.Copy())); } + } + if (transport_cert_stats.empty()) { + // Collect certificate info. + for (const auto& entry : transport_stats_by_name) { + const std::string& transport_name = entry.first; - std::unique_ptr remote_cert_chain = - pc_->GetRemoteSSLCertChain(transport_name); - if (remote_cert_chain) { - certificate_stats_pair.remote = remote_cert_chain->GetStats(); - } + CertificateStatsPair certificate_stats_pair; + rtc::scoped_refptr local_certificate; + if (pc_->GetLocalCertificate(transport_name, &local_certificate)) { + certificate_stats_pair.local = + local_certificate->GetSSLCertificateChain().GetStats(); + } - transport_cert_stats.insert( - std::make_pair(transport_name, std::move(certificate_stats_pair))); + std::unique_ptr remote_cert_chain = + pc_->GetRemoteSSLCertChain(transport_name); + if (remote_cert_chain) { + certificate_stats_pair.remote = remote_cert_chain->GetStats(); + } + + transport_cert_stats.insert( + std::make_pair(transport_name, std::move(certificate_stats_pair))); + } + // Copy the result into the certificate cache for future reference. + MutexLock lock(&cached_certificates_mutex_); + for (const auto& pair : transport_cert_stats) { + cached_certificates_by_transport_.insert( + std::make_pair(pair.first, pair.second.Copy())); + } } return transport_cert_stats; } @@ -2115,19 +2340,15 @@ void RTCStatsCollector::PrepareTransceiverStatsInfosAndCallStats_s_w_n() { transceiver_stats_infos_.clear(); // These are used to invoke GetStats for all the media channels together in // one worker thread hop. - std::map> - voice_stats; - std::map> - video_stats; + std::map voice_stats; + std::map video_stats; auto transceivers = pc_->GetTransceiversInternal(); // TODO(tommi): See if we can avoid synchronously blocking the signaling - // thread while we do this (or avoid the Invoke at all). - network_thread_->Invoke(RTC_FROM_HERE, [this, &transceivers, - &voice_stats, &video_stats] { + // thread while we do this (or avoid the BlockingCall at all). + network_thread_->BlockingCall([this, &transceivers, &voice_stats, + &video_stats] { rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; for (const auto& transceiver_proxy : transceivers) { @@ -2147,21 +2368,21 @@ void RTCStatsCollector::PrepareTransceiverStatsInfosAndCallStats_s_w_n() { continue; } - stats.mid = channel->content_name(); - stats.transport_name = channel->transport_name(); + stats.mid = channel->mid(); + stats.transport_name = std::string(channel->transport_name()); if (media_type == cricket::MEDIA_TYPE_AUDIO) { - auto* voice_channel = static_cast(channel); - RTC_DCHECK(voice_stats.find(voice_channel->media_channel()) == - voice_stats.end()); - voice_stats[voice_channel->media_channel()] = - std::make_unique(); + cricket::VoiceMediaChannel* voice_channel = + static_cast(channel->media_channel()); + RTC_DCHECK(voice_stats.find(voice_channel) == voice_stats.end()); + voice_stats.insert( + std::make_pair(voice_channel, cricket::VoiceMediaInfo())); } else if (media_type == cricket::MEDIA_TYPE_VIDEO) { - auto* video_channel = static_cast(channel); - RTC_DCHECK(video_stats.find(video_channel->media_channel()) == - video_stats.end()); - video_stats[video_channel->media_channel()] = - std::make_unique(); + cricket::VideoMediaChannel* video_channel = + static_cast(channel->media_channel()); + RTC_DCHECK(video_stats.find(video_channel) == video_stats.end()); + video_stats.insert( + std::make_pair(video_channel, cricket::VideoMediaInfo())); } else { RTC_DCHECK_NOTREACHED(); } @@ -2172,17 +2393,17 @@ void RTCStatsCollector::PrepareTransceiverStatsInfosAndCallStats_s_w_n() { // well as GetCallStats(). At the same time we construct the // TrackMediaInfoMaps, which also needs info from the worker thread. This // minimizes the number of thread jumps. - worker_thread_->Invoke(RTC_FROM_HERE, [&] { + worker_thread_->BlockingCall([&] { rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; - for (const auto& entry : voice_stats) { - if (!entry.first->GetStats(entry.second.get(), - /*get_and_clear_legacy_stats=*/false)) { + for (auto& pair : voice_stats) { + if (!pair.first->GetStats(&pair.second, + /*get_and_clear_legacy_stats=*/false)) { RTC_LOG(LS_WARNING) << "Failed to get voice stats."; } } - for (const auto& entry : video_stats) { - if (!entry.first->GetStats(entry.second.get())) { + for (auto& pair : video_stats) { + if (!pair.first->GetStats(&pair.second)) { RTC_LOG(LS_WARNING) << "Failed to get video stats."; } } @@ -2190,35 +2411,38 @@ void RTCStatsCollector::PrepareTransceiverStatsInfosAndCallStats_s_w_n() { // Create the TrackMediaInfoMap for each transceiver stats object. for (auto& stats : transceiver_stats_infos_) { auto transceiver = stats.transceiver; - std::unique_ptr voice_media_info; - std::unique_ptr video_media_info; - if (transceiver->channel()) { + absl::optional voice_media_info; + absl::optional video_media_info; + auto channel = transceiver->channel(); + if (channel) { cricket::MediaType media_type = transceiver->media_type(); if (media_type == cricket::MEDIA_TYPE_AUDIO) { - auto* voice_channel = - static_cast(transceiver->channel()); - RTC_DCHECK(voice_stats[voice_channel->media_channel()]); - voice_media_info = - std::move(voice_stats[voice_channel->media_channel()]); + cricket::VoiceMediaChannel* voice_channel = + static_cast( + channel->media_channel()); + RTC_DCHECK(voice_stats.find(voice_channel) != voice_stats.end()); + voice_media_info = std::move(voice_stats[voice_channel]); } else if (media_type == cricket::MEDIA_TYPE_VIDEO) { - auto* video_channel = - static_cast(transceiver->channel()); - RTC_DCHECK(video_stats[video_channel->media_channel()]); - video_media_info = - std::move(video_stats[video_channel->media_channel()]); + cricket::VideoMediaChannel* video_channel = + static_cast( + channel->media_channel()); + RTC_DCHECK(video_stats.find(video_channel) != video_stats.end()); + video_media_info = std::move(video_stats[video_channel]); } } std::vector> senders; for (const auto& sender : transceiver->senders()) { - senders.push_back(sender->internal()); + senders.push_back( + rtc::scoped_refptr(sender->internal())); } std::vector> receivers; for (const auto& receiver : transceiver->receivers()) { - receivers.push_back(receiver->internal()); + receivers.push_back( + rtc::scoped_refptr(receiver->internal())); } - stats.track_media_info_map = std::make_unique( - std::move(voice_media_info), std::move(video_media_info), senders, - receivers); + stats.track_media_info_map.Initialize(std::move(voice_media_info), + std::move(video_media_info), + senders, receivers); } call_stats_ = pc_->GetCallStats(); diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_collector.h b/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_collector.h index c84e6d3fef..91175289e8 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_collector.h +++ b/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_collector.h @@ -12,6 +12,8 @@ #define PC_RTC_STATS_COLLECTOR_H_ #include + +#include #include #include #include @@ -40,6 +42,7 @@ #include "rtc_base/ref_count.h" #include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_identity.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/thread.h" #include "rtc_base/time_utils.h" @@ -77,7 +80,11 @@ class RTCStatsCollector : public rtc::RefCountInterface, void GetStatsReport(rtc::scoped_refptr selector, rtc::scoped_refptr callback); // Clears the cache's reference to the most recent stats report. Subsequently - // calling `GetStatsReport` guarantees fresh stats. + // calling `GetStatsReport` guarantees fresh stats. This method must be called + // any time the PeerConnection visibly changes as a result of an API call as + // per + // https://w3c.github.io/webrtc-stats/#guidelines-for-getstats-results-caching-throttling + // and it must be called any time negotiation happens. void ClearCachedStatsReport(); // If there is a `GetStatsReport` requests in-flight, waits until it has been @@ -91,6 +98,8 @@ class RTCStatsCollector : public rtc::RefCountInterface, struct CertificateStatsPair { std::unique_ptr local; std::unique_ptr remote; + + CertificateStatsPair Copy() const; }; // Stats gathering on a particular thread. Virtual for the sake of testing. @@ -160,7 +169,7 @@ class RTCStatsCollector : public rtc::RefCountInterface, cricket::MediaType media_type; absl::optional mid; absl::optional transport_name; - std::unique_ptr track_media_info_map; + TrackMediaInfoMap track_media_info_map; }; void DeliverCachedReport( @@ -172,11 +181,6 @@ class RTCStatsCollector : public rtc::RefCountInterface, int64_t timestamp_us, const std::map& transport_cert_stats, RTCStatsReport* report) const; - // Produces `RTCCodecStats`. - void ProduceCodecStats_n( - int64_t timestamp_us, - const std::vector& transceiver_stats_infos, - RTCStatsReport* report) const; // Produces `RTCDataChannelStats`. void ProduceDataChannelStats_s(int64_t timestamp_us, RTCStatsReport* report) const; @@ -200,9 +204,11 @@ class RTCStatsCollector : public rtc::RefCountInterface, // Produces `RTCPeerConnectionStats`. void ProducePeerConnectionStats_s(int64_t timestamp_us, RTCStatsReport* report) const; - // Produces `RTCInboundRTPStreamStats` and `RTCOutboundRTPStreamStats`. - // This has to be invoked after codecs and transport stats have been created - // because some metrics are calculated through lookup of other metrics. + // Produces `RTCInboundRTPStreamStats`, `RTCOutboundRTPStreamStats`, + // `RTCRemoteInboundRtpStreamStats`, `RTCRemoteOutboundRtpStreamStats` and any + // referenced `RTCCodecStats`. This has to be invoked after transport stats + // have been created because some metrics are calculated through lookup of + // other metrics. void ProduceRTPStreamStats_n( int64_t timestamp_us, const std::vector& transceiver_stats_infos, @@ -225,7 +231,7 @@ class RTCStatsCollector : public rtc::RefCountInterface, std::map PrepareTransportCertificateStats_n( const std::map& - transport_stats_by_name) const; + transport_stats_by_name); // The results are stored in `transceiver_stats_infos_` and `call_stats_`. void PrepareTransceiverStatsInfosAndCallStats_s_w_n(); @@ -277,6 +283,12 @@ class RTCStatsCollector : public rtc::RefCountInterface, // now get rid of the variable and keep the data scoped within a stats // collection sequence. std::vector transceiver_stats_infos_; + // This cache avoids having to call rtc::SSLCertChain::GetStats(), which can + // relatively expensive. ClearCachedStatsReport() needs to be called on + // negotiation to ensure the cache is not obsolete. + Mutex cached_certificates_mutex_; + std::map cached_certificates_by_transport_ + RTC_GUARDED_BY(cached_certificates_mutex_); Call::Stats call_stats_; diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_traversal.cc b/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_traversal.cc index 6d886f5099..b3f6155e3f 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_traversal.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_traversal.cc @@ -62,7 +62,7 @@ rtc::scoped_refptr TakeReferencedStats( rtc::scoped_refptr report, const std::vector& ids) { rtc::scoped_refptr result = - RTCStatsReport::Create(report->timestamp_us()); + RTCStatsReport::Create(report->timestamp()); for (const auto& id : ids) { TraverseAndTakeVisitedStats(report.get(), result.get(), id); } @@ -91,11 +91,13 @@ std::vector GetStatsReferencedIds(const RTCStats& stats) { const auto& local_or_remote_candidate = static_cast(stats); AddIdIfDefined(local_or_remote_candidate.transport_id, &neighbor_ids); - } else if (type == RTCMediaStreamStats::kType) { - const auto& stream = static_cast(stats); + } else if (type == DEPRECATED_RTCMediaStreamStats::kType) { + const auto& stream = + static_cast(stats); AddIdsIfDefined(stream.track_ids, &neighbor_ids); - } else if (type == RTCMediaStreamTrackStats::kType) { - const auto& track = static_cast(stats); + } else if (type == DEPRECATED_RTCMediaStreamTrackStats::kType) { + const auto& track = + static_cast(stats); AddIdIfDefined(track.media_source_id, &neighbor_ids); } else if (type == RTCPeerConnectionStats::kType) { // RTCPeerConnectionStats does not have any neighbor references. diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_media_utils.h b/TMessagesProj/jni/voip/webrtc/pc/rtp_media_utils.h index 6f7986f096..240274fe05 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_media_utils.h +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_media_utils.h @@ -11,8 +11,9 @@ #ifndef PC_RTP_MEDIA_UTILS_H_ #define PC_RTP_MEDIA_UTILS_H_ +#include // no-presubmit-check TODO(webrtc:8982) + #include "api/rtp_transceiver_direction.h" -#include "api/rtp_transceiver_interface.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_parameters_conversion.cc b/TMessagesProj/jni/voip/webrtc/pc/rtp_parameters_conversion.cc index 8d3064ed93..196cb79ea5 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_parameters_conversion.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_parameters_conversion.cc @@ -10,10 +10,10 @@ #include "pc/rtp_parameters_conversion.h" -#include #include #include #include +#include #include #include "api/array_view.h" @@ -308,7 +308,18 @@ void ToRtpCodecCapabilityTypeSpecific( template <> void ToRtpCodecCapabilityTypeSpecific( const cricket::VideoCodec& cricket_codec, - RtpCodecCapability* codec) {} + RtpCodecCapability* codec) { + if (cricket_codec.scalability_modes.empty() || + (cricket_codec.scalability_modes.size() == 1 && + cricket_codec.scalability_modes[0] == ScalabilityMode::kL1T1)) { + // https://w3c.github.io/webrtc-svc/#dom-rtcrtpcodeccapability-scalabilitymodes + // If a codec does not support encoding of scalability modes other than + // "L1T1", then the scalabilityModes member is not provided. + return; + } + + codec->scalability_modes = cricket_codec.scalability_modes; +} template RtpCodecCapability ToRtpCodecCapability(const C& cricket_codec) { diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_parameters_conversion.h b/TMessagesProj/jni/voip/webrtc/pc/rtp_parameters_conversion.h index 62e4685722..959f3fde47 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_parameters_conversion.h +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_parameters_conversion.h @@ -11,7 +11,6 @@ #ifndef PC_RTP_PARAMETERS_CONVERSION_H_ #define PC_RTP_PARAMETERS_CONVERSION_H_ -#include #include #include "absl/types/optional.h" diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_receiver.cc b/TMessagesProj/jni/voip/webrtc/pc/rtp_receiver.cc index 2444c9b60d..a2b3353c0e 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_receiver.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_receiver.cc @@ -17,7 +17,7 @@ #include "pc/media_stream.h" #include "pc/media_stream_proxy.h" -#include "rtc_base/location.h" +#include "rtc_base/thread.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_receiver.h b/TMessagesProj/jni/voip/webrtc/pc/rtp_receiver.h index 73fc5b9858..8c49f56b75 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_receiver.h +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_receiver.h @@ -34,7 +34,6 @@ #include "media/base/media_channel.h" #include "media/base/video_broadcaster.h" #include "pc/video_track_source.h" -#include "rtc_base/ref_counted_object.h" #include "rtc_base/thread.h" namespace webrtc { @@ -42,16 +41,18 @@ namespace webrtc { // Internal class used by PeerConnection. class RtpReceiverInternal : public RtpReceiverInterface { public: - // Stops receiving. The track may be reactivated. + // Call on the signaling thread, to let the receiver know that the the + // embedded source object should enter a stopped/ended state and the track's + // state set to `kEnded`, a final state that cannot be reversed. virtual void Stop() = 0; - // Stops the receiver permanently. - // Causes the associated track to enter kEnded state. Cannot be reversed. - virtual void StopAndEndTrack() = 0; // Sets the underlying MediaEngine channel associated with this RtpSender. // A VoiceMediaChannel should be used for audio RtpSenders and // a VideoMediaChannel should be used for video RtpSenders. - // Must call SetMediaChannel(nullptr) before the media channel is destroyed. + // NOTE: + // * SetMediaChannel(nullptr) must be called before the media channel is + // destroyed. + // * This method must be invoked on the worker thread. virtual void SetMediaChannel(cricket::MediaChannel* media_channel) = 0; // Configures the RtpReceiver with the underlying media channel, with the diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_sender.cc b/TMessagesProj/jni/voip/webrtc/pc/rtp_sender.cc index d4286371be..98e86b3a51 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_sender.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_sender.cc @@ -12,6 +12,7 @@ #include #include +#include #include #include @@ -20,12 +21,10 @@ #include "api/media_stream_interface.h" #include "api/priority.h" #include "media/base/media_engine.h" -#include "pc/stats_collector_interface.h" +#include "pc/legacy_stats_collector_interface.h" #include "rtc_base/checks.h" #include "rtc_base/helpers.h" -#include "rtc_base/location.h" #include "rtc_base/logging.h" -#include "rtc_base/ref_counted_object.h" #include "rtc_base/trace_event.h" namespace webrtc { @@ -74,8 +73,8 @@ RtpParameters RestoreEncodingLayers( const RtpParameters& parameters, const std::vector& removed_rids, const std::vector& all_layers) { - RTC_DCHECK_EQ(parameters.encodings.size() + removed_rids.size(), - all_layers.size()); + RTC_CHECK_EQ(parameters.encodings.size() + removed_rids.size(), + all_layers.size()); RtpParameters result(parameters); result.encodings.clear(); size_t index = 0; @@ -111,7 +110,8 @@ bool UnimplementedRtpParameterHasValue(const RtpParameters& parameters) { RtpSenderBase::RtpSenderBase(rtc::Thread* worker_thread, const std::string& id, SetStreamsObserver* set_streams_observer) - : worker_thread_(worker_thread), + : signaling_thread_(rtc::Thread::Current()), + worker_thread_(worker_thread), id_(id), set_streams_observer_(set_streams_observer) { RTC_DCHECK(worker_thread); @@ -120,11 +120,28 @@ RtpSenderBase::RtpSenderBase(rtc::Thread* worker_thread, void RtpSenderBase::SetFrameEncryptor( rtc::scoped_refptr frame_encryptor) { + RTC_DCHECK_RUN_ON(signaling_thread_); frame_encryptor_ = std::move(frame_encryptor); // Special Case: Set the frame encryptor to any value on any existing channel. if (media_channel_ && ssrc_ && !stopped_) { - worker_thread_->Invoke(RTC_FROM_HERE, [&] { - media_channel_->SetFrameEncryptor(ssrc_, frame_encryptor_); + worker_thread_->BlockingCall( + [&] { media_channel_->SetFrameEncryptor(ssrc_, frame_encryptor_); }); + } +} + +void RtpSenderBase::SetEncoderSelector( + std::unique_ptr + encoder_selector) { + RTC_DCHECK_RUN_ON(signaling_thread_); + encoder_selector_ = std::move(encoder_selector); + SetEncoderSelectorOnChannel(); +} + +void RtpSenderBase::SetEncoderSelectorOnChannel() { + RTC_DCHECK_RUN_ON(signaling_thread_); + if (media_channel_ && ssrc_ && !stopped_) { + worker_thread_->BlockingCall([&] { + media_channel_->SetEncoderSelector(ssrc_, encoder_selector_.get()); }); } } @@ -136,20 +153,36 @@ void RtpSenderBase::SetMediaChannel(cricket::MediaChannel* media_channel) { } RtpParameters RtpSenderBase::GetParametersInternal() const { + RTC_DCHECK_RUN_ON(signaling_thread_); if (stopped_) { return RtpParameters(); } if (!media_channel_ || !ssrc_) { return init_parameters_; } - return worker_thread_->Invoke(RTC_FROM_HERE, [&] { + return worker_thread_->BlockingCall([&] { RtpParameters result = media_channel_->GetRtpSendParameters(ssrc_); RemoveEncodingLayers(disabled_rids_, &result.encodings); return result; }); } +RtpParameters RtpSenderBase::GetParametersInternalWithAllLayers() const { + RTC_DCHECK_RUN_ON(signaling_thread_); + if (stopped_) { + return RtpParameters(); + } + if (!media_channel_ || !ssrc_) { + return init_parameters_; + } + return worker_thread_->BlockingCall([&] { + RtpParameters result = media_channel_->GetRtpSendParameters(ssrc_); + return result; + }); +} + RtpParameters RtpSenderBase::GetParameters() const { + RTC_DCHECK_RUN_ON(signaling_thread_); RtpParameters result = GetParametersInternal(); last_transaction_id_ = rtc::CreateRandomUuid(); result.transaction_id = last_transaction_id_.value(); @@ -157,6 +190,7 @@ RtpParameters RtpSenderBase::GetParameters() const { } RTCError RtpSenderBase::SetParametersInternal(const RtpParameters& parameters) { + RTC_DCHECK_RUN_ON(signaling_thread_); RTC_DCHECK(!stopped_); if (UnimplementedRtpParameterHasValue(parameters)) { @@ -166,26 +200,60 @@ RTCError RtpSenderBase::SetParametersInternal(const RtpParameters& parameters) { } if (!media_channel_ || !ssrc_) { auto result = cricket::CheckRtpParametersInvalidModificationAndValues( - init_parameters_, parameters); + init_parameters_, parameters, video_codec_preferences_); if (result.ok()) { init_parameters_ = parameters; } return result; } - return worker_thread_->Invoke(RTC_FROM_HERE, [&] { + return worker_thread_->BlockingCall([&] { RtpParameters rtp_parameters = parameters; + RtpParameters old_parameters = media_channel_->GetRtpSendParameters(ssrc_); if (!disabled_rids_.empty()) { // Need to add the inactive layers. - RtpParameters old_parameters = - media_channel_->GetRtpSendParameters(ssrc_); rtp_parameters = RestoreEncodingLayers(parameters, disabled_rids_, old_parameters.encodings); } + + auto result = cricket::CheckRtpParametersInvalidModificationAndValues( + old_parameters, rtp_parameters); + if (!result.ok()) + return result; + + result = CheckSVCParameters(rtp_parameters); + if (!result.ok()) + return result; + + return media_channel_->SetRtpSendParameters(ssrc_, rtp_parameters); + }); +} + +RTCError RtpSenderBase::SetParametersInternalWithAllLayers( + const RtpParameters& parameters) { + RTC_DCHECK_RUN_ON(signaling_thread_); + RTC_DCHECK(!stopped_); + + if (UnimplementedRtpParameterHasValue(parameters)) { + LOG_AND_RETURN_ERROR( + RTCErrorType::UNSUPPORTED_PARAMETER, + "Attempted to set an unimplemented parameter of RtpParameters."); + } + if (!media_channel_ || !ssrc_) { + auto result = cricket::CheckRtpParametersInvalidModificationAndValues( + init_parameters_, parameters, video_codec_preferences_); + if (result.ok()) { + init_parameters_ = parameters; + } + return result; + } + return worker_thread_->BlockingCall([&] { + RtpParameters rtp_parameters = parameters; return media_channel_->SetRtpSendParameters(ssrc_, rtp_parameters); }); } RTCError RtpSenderBase::SetParameters(const RtpParameters& parameters) { + RTC_DCHECK_RUN_ON(signaling_thread_); TRACE_EVENT0("webrtc", "RtpSenderBase::SetParameters"); if (is_transceiver_stopped_) { LOG_AND_RETURN_ERROR( @@ -225,6 +293,7 @@ void RtpSenderBase::SetStreams(const std::vector& stream_ids) { } bool RtpSenderBase::SetTrack(MediaStreamTrackInterface* track) { + RTC_DCHECK_RUN_ON(signaling_thread_); TRACE_EVENT0("webrtc", "RtpSenderBase::SetTrack"); if (stopped_) { RTC_LOG(LS_ERROR) << "SetTrack can't be called on a stopped RtpSender."; @@ -266,6 +335,7 @@ bool RtpSenderBase::SetTrack(MediaStreamTrackInterface* track) { } void RtpSenderBase::SetSsrc(uint32_t ssrc) { + RTC_DCHECK_RUN_ON(signaling_thread_); TRACE_EVENT0("webrtc", "RtpSenderBase::SetSsrc"); if (stopped_ || ssrc == ssrc_) { return; @@ -280,8 +350,9 @@ void RtpSenderBase::SetSsrc(uint32_t ssrc) { SetSend(); AddTrackToStats(); } - if (!init_parameters_.encodings.empty()) { - worker_thread_->Invoke(RTC_FROM_HERE, [&] { + if (!init_parameters_.encodings.empty() || + init_parameters_.degradation_preference.has_value()) { + worker_thread_->BlockingCall([&] { RTC_DCHECK(media_channel_); // Get the current parameters, which are constructed from the SDP. // The number of layers in the SDP is currently authoritative to support @@ -291,8 +362,8 @@ void RtpSenderBase::SetSsrc(uint32_t ssrc) { // we need to copy. RtpParameters current_parameters = media_channel_->GetRtpSendParameters(ssrc_); - RTC_DCHECK_GE(current_parameters.encodings.size(), - init_parameters_.encodings.size()); + RTC_CHECK_GE(current_parameters.encodings.size(), + init_parameters_.encodings.size()); for (size_t i = 0; i < init_parameters_.encodings.size(); ++i) { init_parameters_.encodings[i].ssrc = current_parameters.encodings[i].ssrc; @@ -303,6 +374,7 @@ void RtpSenderBase::SetSsrc(uint32_t ssrc) { init_parameters_.degradation_preference; media_channel_->SetRtpSendParameters(ssrc_, current_parameters); init_parameters_.encodings.clear(); + init_parameters_.degradation_preference = absl::nullopt; }); } // Attempt to attach the frame decryptor to the current media channel. @@ -312,9 +384,13 @@ void RtpSenderBase::SetSsrc(uint32_t ssrc) { if (frame_transformer_) { SetEncoderToPacketizerFrameTransformer(frame_transformer_); } + if (encoder_selector_) { + SetEncoderSelectorOnChannel(); + } } void RtpSenderBase::Stop() { + RTC_DCHECK_RUN_ON(signaling_thread_); TRACE_EVENT0("webrtc", "RtpSenderBase::Stop"); // TODO(deadbeef): Need to do more here to fully stop sending packets. if (stopped_) { @@ -335,6 +411,7 @@ void RtpSenderBase::Stop() { RTCError RtpSenderBase::DisableEncodingLayers( const std::vector& rids) { + RTC_DCHECK_RUN_ON(signaling_thread_); if (stopped_) { LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_STATE, "Cannot disable encodings on a stopped sender."); @@ -345,7 +422,7 @@ RTCError RtpSenderBase::DisableEncodingLayers( } // Check that all the specified layers exist and disable them in the channel. - RtpParameters parameters = GetParametersInternal(); + RtpParameters parameters = GetParametersInternalWithAllLayers(); for (const std::string& rid : rids) { if (absl::c_none_of(parameters.encodings, [&rid](const RtpEncodingParameters& encoding) { @@ -370,7 +447,7 @@ RTCError RtpSenderBase::DisableEncodingLayers( [&encoding](const std::string& rid) { return encoding.rid == rid; }); } - RTCError result = SetParametersInternal(parameters); + RTCError result = SetParametersInternalWithAllLayers(parameters); if (result.ok()) { disabled_rids_.insert(disabled_rids_.end(), rids.begin(), rids.end()); // Invalidate any transaction upon success. @@ -381,9 +458,10 @@ RTCError RtpSenderBase::DisableEncodingLayers( void RtpSenderBase::SetEncoderToPacketizerFrameTransformer( rtc::scoped_refptr frame_transformer) { + RTC_DCHECK_RUN_ON(signaling_thread_); frame_transformer_ = std::move(frame_transformer); if (media_channel_ && ssrc_ && !stopped_) { - worker_thread_->Invoke(RTC_FROM_HERE, [&] { + worker_thread_->BlockingCall([&] { media_channel_->SetEncoderToPacketizerFrameTransformer( ssrc_, frame_transformer_); }); @@ -405,6 +483,8 @@ void LocalAudioSinkAdapter::OnData( size_t number_of_channels, size_t number_of_frames, absl::optional absolute_capture_timestamp_ms) { + TRACE_EVENT2("webrtc", "LocalAudioSinkAdapter::OnData", "sample_rate", + sample_rate, "number_of_frames", number_of_frames); MutexLock lock(&lock_); if (sink_) { sink_->OnData(audio_data, bits_per_sample, sample_rate, number_of_channels, @@ -422,7 +502,7 @@ void LocalAudioSinkAdapter::SetSink(cricket::AudioSource::Sink* sink) { rtc::scoped_refptr AudioRtpSender::Create( rtc::Thread* worker_thread, const std::string& id, - StatsCollectorInterface* stats, + LegacyStatsCollectorInterface* stats, SetStreamsObserver* set_streams_observer) { return rtc::make_ref_counted(worker_thread, id, stats, set_streams_observer); @@ -430,18 +510,17 @@ rtc::scoped_refptr AudioRtpSender::Create( AudioRtpSender::AudioRtpSender(rtc::Thread* worker_thread, const std::string& id, - StatsCollectorInterface* stats, + LegacyStatsCollectorInterface* legacy_stats, SetStreamsObserver* set_streams_observer) : RtpSenderBase(worker_thread, id, set_streams_observer), - stats_(stats), - dtmf_sender_proxy_(DtmfSenderProxy::Create( - rtc::Thread::Current(), - DtmfSender::Create(rtc::Thread::Current(), this))), + legacy_stats_(legacy_stats), + dtmf_sender_(DtmfSender::Create(rtc::Thread::Current(), this)), + dtmf_sender_proxy_( + DtmfSenderProxy::Create(rtc::Thread::Current(), dtmf_sender_)), sink_adapter_(new LocalAudioSinkAdapter()) {} AudioRtpSender::~AudioRtpSender() { - // For DtmfSender. - SignalDestroyed(); + dtmf_sender_->OnDtmfProviderDestroyed(); Stop(); } @@ -456,8 +535,8 @@ bool AudioRtpSender::CanInsertDtmf() { RTC_LOG(LS_ERROR) << "CanInsertDtmf: Sender does not have SSRC."; return false; } - return worker_thread_->Invoke( - RTC_FROM_HERE, [&] { return voice_media_channel()->CanInsertDtmf(); }); + return worker_thread_->BlockingCall( + [&] { return voice_media_channel()->CanInsertDtmf(); }); } bool AudioRtpSender::InsertDtmf(int code, int duration) { @@ -469,20 +548,16 @@ bool AudioRtpSender::InsertDtmf(int code, int duration) { RTC_LOG(LS_ERROR) << "InsertDtmf: Sender does not have SSRC."; return false; } - bool success = worker_thread_->Invoke(RTC_FROM_HERE, [&] { - return voice_media_channel()->InsertDtmf(ssrc_, code, duration); - }); + bool success = worker_thread_->BlockingCall( + [&] { return voice_media_channel()->InsertDtmf(ssrc_, code, duration); }); if (!success) { RTC_LOG(LS_ERROR) << "Failed to insert DTMF to channel."; } return success; } -sigslot::signal0<>* AudioRtpSender::GetOnDestroyedSignal() { - return &SignalDestroyed; -} - void AudioRtpSender::OnChanged() { + RTC_DCHECK_RUN_ON(signaling_thread_); TRACE_EVENT0("webrtc", "AudioRtpSender::OnChanged"); RTC_DCHECK(!stopped_); if (cached_track_enabled_ != track_->enabled()) { @@ -505,22 +580,31 @@ void AudioRtpSender::AttachTrack() { } void AudioRtpSender::AddTrackToStats() { - if (can_send_track() && stats_) { - stats_->AddLocalAudioTrack(audio_track().get(), ssrc_); + if (can_send_track() && legacy_stats_) { + legacy_stats_->AddLocalAudioTrack(audio_track().get(), ssrc_); } } void AudioRtpSender::RemoveTrackFromStats() { - if (can_send_track() && stats_) { - stats_->RemoveLocalAudioTrack(audio_track().get(), ssrc_); + if (can_send_track() && legacy_stats_) { + legacy_stats_->RemoveLocalAudioTrack(audio_track().get(), ssrc_); } } rtc::scoped_refptr AudioRtpSender::GetDtmfSender() const { + RTC_DCHECK_RUN_ON(signaling_thread_); return dtmf_sender_proxy_; } +RTCError AudioRtpSender::GenerateKeyFrame() { + RTC_DCHECK_RUN_ON(signaling_thread_); + RTC_DLOG(LS_ERROR) << "Tried to get generate a key frame for audio."; + return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, + "Generating key frames for audio is not supported."); +} + void AudioRtpSender::SetSend() { + RTC_DCHECK_RUN_ON(signaling_thread_); RTC_DCHECK(!stopped_); RTC_DCHECK(can_send_track()); if (!media_channel_) { @@ -541,7 +625,7 @@ void AudioRtpSender::SetSend() { // `track_->enabled()` hops to the signaling thread, so call it before we hop // to the worker thread or else it will deadlock. bool track_enabled = track_->enabled(); - bool success = worker_thread_->Invoke(RTC_FROM_HERE, [&] { + bool success = worker_thread_->BlockingCall([&] { return voice_media_channel()->SetAudioSend(ssrc_, track_enabled, &options, sink_adapter_.get()); }); @@ -551,6 +635,7 @@ void AudioRtpSender::SetSend() { } void AudioRtpSender::ClearSend() { + RTC_DCHECK_RUN_ON(signaling_thread_); RTC_DCHECK(ssrc_ != 0); RTC_DCHECK(!stopped_); if (!media_channel_) { @@ -558,7 +643,7 @@ void AudioRtpSender::ClearSend() { return; } cricket::AudioOptions options; - bool success = worker_thread_->Invoke(RTC_FROM_HERE, [&] { + bool success = worker_thread_->BlockingCall([&] { return voice_media_channel()->SetAudioSend(ssrc_, false, &options, nullptr); }); if (!success) { @@ -584,10 +669,13 @@ VideoRtpSender::~VideoRtpSender() { } void VideoRtpSender::OnChanged() { + RTC_DCHECK_RUN_ON(signaling_thread_); TRACE_EVENT0("webrtc", "VideoRtpSender::OnChanged"); RTC_DCHECK(!stopped_); - if (cached_track_content_hint_ != video_track()->content_hint()) { - cached_track_content_hint_ = video_track()->content_hint(); + + auto content_hint = video_track()->content_hint(); + if (cached_track_content_hint_ != content_hint) { + cached_track_content_hint_ = content_hint; if (can_send_track()) { SetSend(); } @@ -600,11 +688,25 @@ void VideoRtpSender::AttachTrack() { } rtc::scoped_refptr VideoRtpSender::GetDtmfSender() const { - RTC_LOG(LS_ERROR) << "Tried to get DTMF sender from video sender."; + RTC_DCHECK_RUN_ON(signaling_thread_); + RTC_DLOG(LS_ERROR) << "Tried to get DTMF sender from video sender."; return nullptr; } +RTCError VideoRtpSender::GenerateKeyFrame() { + RTC_DCHECK_RUN_ON(signaling_thread_); + if (video_media_channel() && ssrc_ && !stopped_) { + worker_thread_->PostTask( + [&] { video_media_channel()->GenerateSendKeyFrame(ssrc_); }); + } else { + RTC_LOG(LS_WARNING) << "Tried to generate key frame for sender that is " + "stopped or has no media channel."; + } + return RTCError::OK(); +} + void VideoRtpSender::SetSend() { + RTC_DCHECK_RUN_ON(signaling_thread_); RTC_DCHECK(!stopped_); RTC_DCHECK(can_send_track()); if (!media_channel_) { @@ -629,13 +731,15 @@ void VideoRtpSender::SetSend() { options.is_screencast = true; break; } - bool success = worker_thread_->Invoke(RTC_FROM_HERE, [&] { - return video_media_channel()->SetVideoSend(ssrc_, &options, video_track()); + bool success = worker_thread_->BlockingCall([&] { + return video_media_channel()->SetVideoSend(ssrc_, &options, + video_track().get()); }); RTC_DCHECK(success); } void VideoRtpSender::ClearSend() { + RTC_DCHECK_RUN_ON(signaling_thread_); RTC_DCHECK(ssrc_ != 0); RTC_DCHECK(!stopped_); if (!media_channel_) { @@ -645,9 +749,24 @@ void VideoRtpSender::ClearSend() { // Allow SetVideoSend to fail since `enable` is false and `source` is null. // This the normal case when the underlying media channel has already been // deleted. - worker_thread_->Invoke(RTC_FROM_HERE, [&] { - return video_media_channel()->SetVideoSend(ssrc_, nullptr, nullptr); - }); + worker_thread_->BlockingCall( + [&] { video_media_channel()->SetVideoSend(ssrc_, nullptr, nullptr); }); +} + +RTCError VideoRtpSender::CheckSVCParameters(const RtpParameters& parameters) { + cricket::VideoCodec codec; + video_media_channel()->GetSendCodec(&codec); + + // Match the currently used codec against the codec preferences to gather + // the SVC capabilities. + std::vector codecs; + for (const auto& codec_preference : video_codec_preferences_) { + if (codec.Matches(codec_preference)) { + codecs.push_back(codec_preference); + } + } + + return cricket::CheckScalabilityModeValues(parameters, codecs); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_sender.h b/TMessagesProj/jni/voip/webrtc/pc/rtp_sender.h index 4bc16c796f..2cfa08dc07 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_sender.h +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_sender.h @@ -17,6 +17,7 @@ #include #include + #include #include #include @@ -32,18 +33,18 @@ #include "api/rtp_parameters.h" #include "api/rtp_sender_interface.h" #include "api/scoped_refptr.h" +#include "api/sequence_checker.h" #include "media/base/audio_source.h" #include "media/base/media_channel.h" #include "pc/dtmf_sender.h" -#include "pc/stats_collector_interface.h" +#include "pc/legacy_stats_collector_interface.h" +#include "rtc_base/checks.h" #include "rtc_base/synchronization/mutex.h" -#include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { -class StatsCollectorInterface; - bool UnimplementedRtpParameterHasValue(const RtpParameters& parameters); // Internal interface used by PeerConnection. @@ -74,6 +75,18 @@ class RtpSenderInternal : public RtpSenderInterface { virtual RtpParameters GetParametersInternal() const = 0; virtual RTCError SetParametersInternal(const RtpParameters& parameters) = 0; + // GetParameters and SetParameters will remove deactivated simulcast layers + // and restore them on SetParameters. This is probably a Bad Idea, but we + // do not know who depends on this behavior + virtual RtpParameters GetParametersInternalWithAllLayers() const = 0; + virtual RTCError SetParametersInternalWithAllLayers( + const RtpParameters& parameters) = 0; + + // Additional checks that are specific to the Sender type + virtual RTCError CheckSVCParameters(const RtpParameters& parameters) { + return webrtc::RTCError::OK(); + } + // Returns an ID that changes every time SetTrack() is called, but // otherwise remains constant. Used to generate IDs for stats. // The special value zero means that no track is attached. @@ -85,6 +98,11 @@ class RtpSenderInternal : public RtpSenderInterface { const std::vector& rid) = 0; virtual void SetTransceiverAsStopped() = 0; + + // Used by the owning transceiver to inform the sender on the currently + // selected codecs. + virtual void SetVideoCodecPreferences( + std::vector codec_preferences) = 0; }; // Shared implementation for RtpSenderInternal interface. @@ -104,6 +122,9 @@ class RtpSenderBase : public RtpSenderInternal, public ObserverInterface { bool SetTrack(MediaStreamTrackInterface* track) override; rtc::scoped_refptr track() const override { + // This method is currently called from the worker thread by + // RTCStatsCollector::PrepareTransceiverStatsInfosAndCallStats_s_w_n. + // RTC_DCHECK_RUN_ON(signaling_thread_); return track_; } @@ -114,15 +135,26 @@ class RtpSenderBase : public RtpSenderInternal, public ObserverInterface { // Allow access to get/set parameters without invalidating transaction id. RtpParameters GetParametersInternal() const override; RTCError SetParametersInternal(const RtpParameters& parameters) override; + RtpParameters GetParametersInternalWithAllLayers() const override; + RTCError SetParametersInternalWithAllLayers( + const RtpParameters& parameters) override; // Used to set the SSRC of the sender, once a local description has been set. // If `ssrc` is 0, this indiates that the sender should disconnect from the // underlying transport (this occurs if the sender isn't seen in a local // description). void SetSsrc(uint32_t ssrc) override; - uint32_t ssrc() const override { return ssrc_; } + uint32_t ssrc() const override { + // This method is currently called from the worker thread by + // RTCStatsCollector::PrepareTransceiverStatsInfosAndCallStats_s_w_n. + // RTC_DCHECK_RUN_ON(signaling_thread_); + return ssrc_; + } - std::vector stream_ids() const override { return stream_ids_; } + std::vector stream_ids() const override { + RTC_DCHECK_RUN_ON(signaling_thread_); + return stream_ids_; + } void set_stream_ids(const std::vector& stream_ids) override { stream_ids_ = stream_ids; } @@ -135,6 +167,7 @@ class RtpSenderBase : public RtpSenderInternal, public ObserverInterface { init_parameters_.encodings = init_send_encodings; } std::vector init_send_encodings() const override { + RTC_DCHECK_RUN_ON(signaling_thread_); return init_parameters_.encodings; } @@ -143,6 +176,7 @@ class RtpSenderBase : public RtpSenderInternal, public ObserverInterface { dtls_transport_ = dtls_transport; } rtc::scoped_refptr dtls_transport() const override { + RTC_DCHECK_RUN_ON(signaling_thread_); return dtls_transport_; } @@ -168,7 +202,21 @@ class RtpSenderBase : public RtpSenderInternal, public ObserverInterface { void SetEncoderToPacketizerFrameTransformer( rtc::scoped_refptr frame_transformer) override; - void SetTransceiverAsStopped() override { is_transceiver_stopped_ = true; } + void SetEncoderSelector( + std::unique_ptr + encoder_selector) override; + + void SetEncoderSelectorOnChannel(); + + void SetTransceiverAsStopped() override { + RTC_DCHECK_RUN_ON(signaling_thread_); + is_transceiver_stopped_ = true; + } + + void SetVideoCodecPreferences( + std::vector codec_preferences) override { + video_codec_preferences_ = codec_preferences; + } protected: // If `set_streams_observer` is not null, it is invoked when SetStreams() @@ -177,8 +225,8 @@ class RtpSenderBase : public RtpSenderInternal, public ObserverInterface { RtpSenderBase(rtc::Thread* worker_thread, const std::string& id, SetStreamsObserver* set_streams_observer); - // TODO(nisse): Since SSRC == 0 is technically valid, figure out - // some other way to test if we have a valid SSRC. + // TODO(bugs.webrtc.org/8694): Since SSRC == 0 is technically valid, figure + // out some other way to test if we have a valid SSRC. bool can_send_track() const { return track_ && ssrc_; } virtual std::string track_kind() const = 0; @@ -195,16 +243,23 @@ class RtpSenderBase : public RtpSenderInternal, public ObserverInterface { virtual void AddTrackToStats() {} virtual void RemoveTrackFromStats() {} - rtc::Thread* worker_thread_; + rtc::Thread* const signaling_thread_; + rtc::Thread* const worker_thread_; uint32_t ssrc_ = 0; - bool stopped_ = false; - bool is_transceiver_stopped_ = false; + bool stopped_ RTC_GUARDED_BY(signaling_thread_) = false; + bool is_transceiver_stopped_ RTC_GUARDED_BY(signaling_thread_) = false; int attachment_id_ = 0; const std::string id_; std::vector stream_ids_; RtpParameters init_parameters_; + std::vector video_codec_preferences_; + // TODO(tommi): `media_channel_` and several other member variables in this + // class (ssrc_, stopped_, etc) are accessed from more than one thread without + // a guard or lock. Internally there are also several Invoke()s that we could + // remove since the upstream code may already be performing several operations + // on the worker thread. cricket::MediaChannel* media_channel_ = nullptr; rtc::scoped_refptr track_; @@ -221,6 +276,8 @@ class RtpSenderBase : public RtpSenderInternal, public ObserverInterface { SetStreamsObserver* set_streams_observer_ = nullptr; rtc::scoped_refptr frame_transformer_; + std::unique_ptr + encoder_selector_; }; // LocalAudioSinkAdapter receives data callback as a sink to the local @@ -275,14 +332,13 @@ class AudioRtpSender : public DtmfProviderInterface, public RtpSenderBase { static rtc::scoped_refptr Create( rtc::Thread* worker_thread, const std::string& id, - StatsCollectorInterface* stats, + LegacyStatsCollectorInterface* stats, SetStreamsObserver* set_streams_observer); virtual ~AudioRtpSender(); // DtmfSenderProvider implementation. bool CanInsertDtmf() override; bool InsertDtmf(int code, int duration) override; - sigslot::signal0<>* GetOnDestroyedSignal() override; // ObserverInterface implementation. void OnChanged() override; @@ -295,11 +351,12 @@ class AudioRtpSender : public DtmfProviderInterface, public RtpSenderBase { } rtc::scoped_refptr GetDtmfSender() const override; + RTCError GenerateKeyFrame() override; protected: AudioRtpSender(rtc::Thread* worker_thread, const std::string& id, - StatsCollectorInterface* stats, + LegacyStatsCollectorInterface* legacy_stats, SetStreamsObserver* set_streams_observer); void SetSend() override; @@ -319,9 +376,9 @@ class AudioRtpSender : public DtmfProviderInterface, public RtpSenderBase { return rtc::scoped_refptr( static_cast(track_.get())); } - sigslot::signal0<> SignalDestroyed; - StatsCollectorInterface* stats_ = nullptr; + LegacyStatsCollectorInterface* legacy_stats_ = nullptr; + rtc::scoped_refptr dtmf_sender_; rtc::scoped_refptr dtmf_sender_proxy_; bool cached_track_enabled_ = false; @@ -354,6 +411,9 @@ class VideoRtpSender : public RtpSenderBase { } rtc::scoped_refptr GetDtmfSender() const override; + RTCError GenerateKeyFrame() override; + + RTCError CheckSVCParameters(const RtpParameters& parameters) override; protected: VideoRtpSender(rtc::Thread* worker_thread, diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_sender_proxy.h b/TMessagesProj/jni/voip/webrtc/pc/rtp_sender_proxy.h index 2f8fe2c0bf..376fd29d24 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_sender_proxy.h +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_sender_proxy.h @@ -11,6 +11,7 @@ #ifndef PC_RTP_SENDER_PROXY_H_ #define PC_RTP_SENDER_PROXY_H_ +#include #include #include @@ -44,6 +45,10 @@ PROXY_METHOD1(void, SetStreams, const std::vector&) PROXY_METHOD1(void, SetEncoderToPacketizerFrameTransformer, rtc::scoped_refptr) +PROXY_METHOD1(void, + SetEncoderSelector, + std::unique_ptr) +PROXY_METHOD0(RTCError, GenerateKeyFrame) END_PROXY_MAP(RtpSender) } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_transceiver.cc b/TMessagesProj/jni/voip/webrtc/pc/rtp_transceiver.cc index a78b9d6be6..8b65dbf4fe 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_transceiver.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_transceiver.cc @@ -10,22 +10,25 @@ #include "pc/rtp_transceiver.h" +#include #include #include #include #include #include "absl/algorithm/container.h" +#include "absl/memory/memory.h" +#include "api/peer_connection_interface.h" #include "api/rtp_parameters.h" #include "api/sequence_checker.h" #include "media/base/codec.h" #include "media/base/media_constants.h" -#include "pc/channel_manager.h" +#include "media/base/media_engine.h" +#include "pc/channel.h" #include "pc/rtp_media_utils.h" #include "pc/session_description.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/thread.h" namespace webrtc { @@ -104,6 +107,25 @@ RTCError VerifyCodecPreferences(const std::vector& codecs, return RTCError::OK(); } +// Matches the list of codecs as capabilities (potentially without SVC related +// information) to the list of send codecs and returns the list of codecs with +// all the SVC related information. +std::vector MatchCodecPreferences( + const std::vector& codecs, + const std::vector& send_codecs) { + std::vector result; + + for (const auto& codec_preference : codecs) { + for (const cricket::VideoCodec& send_codec : send_codecs) { + if (send_codec.MatchesCapability(codec_preference)) { + result.push_back(send_codec); + } + } + } + + return result; +} + TaskQueueBase* GetCurrentTaskQueueOrThread() { TaskQueueBase* current = TaskQueueBase::Current(); if (!current) @@ -113,35 +135,35 @@ TaskQueueBase* GetCurrentTaskQueueOrThread() { } // namespace -RtpTransceiver::RtpTransceiver( - cricket::MediaType media_type, - cricket::ChannelManager* channel_manager /* = nullptr*/) +RtpTransceiver::RtpTransceiver(cricket::MediaType media_type, + ConnectionContext* context) : thread_(GetCurrentTaskQueueOrThread()), unified_plan_(false), media_type_(media_type), - channel_manager_(channel_manager) { + context_(context) { RTC_DCHECK(media_type == cricket::MEDIA_TYPE_AUDIO || media_type == cricket::MEDIA_TYPE_VIDEO); - RTC_DCHECK(channel_manager_); } RtpTransceiver::RtpTransceiver( rtc::scoped_refptr> sender, rtc::scoped_refptr> receiver, - cricket::ChannelManager* channel_manager, + ConnectionContext* context, std::vector header_extensions_offered, std::function on_negotiation_needed) : thread_(GetCurrentTaskQueueOrThread()), unified_plan_(true), media_type_(sender->media_type()), - channel_manager_(channel_manager), + context_(context), header_extensions_to_offer_(std::move(header_extensions_offered)), on_negotiation_needed_(std::move(on_negotiation_needed)) { RTC_DCHECK(media_type_ == cricket::MEDIA_TYPE_AUDIO || media_type_ == cricket::MEDIA_TYPE_VIDEO); RTC_DCHECK_EQ(sender->media_type(), receiver->media_type()); - RTC_DCHECK(channel_manager_); + if (sender->media_type() == cricket::MEDIA_TYPE_VIDEO) + sender->internal()->SetVideoCodecPreferences( + media_engine()->video().send_codecs(false)); senders_.push_back(sender); receivers_.push_back(receiver); } @@ -154,28 +176,100 @@ RtpTransceiver::~RtpTransceiver() { RTC_DCHECK_RUN_ON(thread_); StopInternal(); } + + RTC_CHECK(!channel_) << "Missing call to ClearChannel?"; } -void RtpTransceiver::SetChannel(cricket::ChannelInterface* channel) { +RTCError RtpTransceiver::CreateChannel( + absl::string_view mid, + Call* call_ptr, + const cricket::MediaConfig& media_config, + bool srtp_required, + CryptoOptions crypto_options, + const cricket::AudioOptions& audio_options, + const cricket::VideoOptions& video_options, + VideoBitrateAllocatorFactory* video_bitrate_allocator_factory, + std::function transport_lookup) { RTC_DCHECK_RUN_ON(thread_); - // Cannot set a non-null channel on a stopped transceiver. - if (stopped_ && channel) { - return; + if (!media_engine()) { + // TODO(hta): Must be a better way + return RTCError(RTCErrorType::INTERNAL_ERROR, + "No media engine for mid=" + std::string(mid)); + } + std::unique_ptr new_channel; + if (media_type() == cricket::MEDIA_TYPE_AUDIO) { + // TODO(bugs.webrtc.org/11992): CreateVideoChannel internally switches to + // the worker thread. We shouldn't be using the `call_ptr_` hack here but + // simply be on the worker thread and use `call_` (update upstream code). + RTC_DCHECK(call_ptr); + RTC_DCHECK(media_engine()); + // TODO(bugs.webrtc.org/11992): Remove this workaround after updates in + // PeerConnection and add the expectation that we're already on the right + // thread. + context()->worker_thread()->BlockingCall([&] { + RTC_DCHECK_RUN_ON(context()->worker_thread()); + + cricket::VoiceMediaChannel* media_channel = + media_engine()->voice().CreateMediaChannel( + call_ptr, media_config, audio_options, crypto_options); + if (!media_channel) { + return; + } + + new_channel = std::make_unique( + context()->worker_thread(), context()->network_thread(), + context()->signaling_thread(), absl::WrapUnique(media_channel), mid, + srtp_required, crypto_options, context()->ssrc_generator()); + }); + } else { + RTC_DCHECK_EQ(cricket::MEDIA_TYPE_VIDEO, media_type()); + + // TODO(bugs.webrtc.org/11992): CreateVideoChannel internally switches to + // the worker thread. We shouldn't be using the `call_ptr_` hack here but + // simply be on the worker thread and use `call_` (update upstream code). + context()->worker_thread()->BlockingCall([&] { + RTC_DCHECK_RUN_ON(context()->worker_thread()); + cricket::VideoMediaChannel* media_channel = + media_engine()->video().CreateMediaChannel( + call_ptr, media_config, video_options, crypto_options, + video_bitrate_allocator_factory); + if (!media_channel) { + return; + } + + new_channel = std::make_unique( + context()->worker_thread(), context()->network_thread(), + context()->signaling_thread(), absl::WrapUnique(media_channel), mid, + srtp_required, crypto_options, context()->ssrc_generator()); + }); + } + if (!new_channel) { + // TODO(hta): Must be a better way + return RTCError(RTCErrorType::INTERNAL_ERROR, + "Failed to create channel for mid=" + std::string(mid)); } + SetChannel(std::move(new_channel), transport_lookup); + return RTCError::OK(); +} - RTC_DCHECK(channel || channel_); +void RtpTransceiver::SetChannel( + std::unique_ptr channel, + std::function transport_lookup) { + RTC_DCHECK_RUN_ON(thread_); + RTC_DCHECK(channel); + RTC_DCHECK(transport_lookup); + RTC_DCHECK(!channel_); + // Cannot set a channel on a stopped transceiver. + if (stopped_) { + return; + } RTC_LOG_THREAD_BLOCK_COUNT(); - if (channel_) { - signaling_thread_safety_->SetNotAlive(); - signaling_thread_safety_ = nullptr; - } + RTC_DCHECK_EQ(media_type(), channel->media_type()); + signaling_thread_safety_ = PendingTaskSafetyFlag::Create(); - if (channel) { - RTC_DCHECK_EQ(media_type(), channel->media_type()); - signaling_thread_safety_ = PendingTaskSafetyFlag::Create(); - } + std::unique_ptr channel_to_delete; // An alternative to this, could be to require SetChannel to be called // on the network thread. The channel object operates for the most part @@ -186,36 +280,80 @@ void RtpTransceiver::SetChannel(cricket::ChannelInterface* channel) { // Similarly, if the channel() accessor is limited to the network thread, that // helps with keeping the channel implementation requirements being met and // avoids synchronization for accessing the pointer or network related state. - channel_manager_->network_thread()->Invoke(RTC_FROM_HERE, [&]() { + context()->network_thread()->BlockingCall([&]() { if (channel_) { channel_->SetFirstPacketReceivedCallback(nullptr); + channel_->SetRtpTransport(nullptr); + channel_to_delete = std::move(channel_); } - channel_ = channel; + channel_ = std::move(channel); + + channel_->SetRtpTransport(transport_lookup(channel_->mid())); + channel_->SetFirstPacketReceivedCallback( + [thread = thread_, flag = signaling_thread_safety_, this]() mutable { + thread->PostTask( + SafeTask(std::move(flag), [this]() { OnFirstPacketReceived(); })); + }); + }); + PushNewMediaChannelAndDeleteChannel(nullptr); + RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(2); +} + +void RtpTransceiver::ClearChannel() { + RTC_DCHECK_RUN_ON(thread_); + + if (!channel_) { + return; + } + + RTC_LOG_THREAD_BLOCK_COUNT(); + + if (channel_) { + signaling_thread_safety_->SetNotAlive(); + signaling_thread_safety_ = nullptr; + } + std::unique_ptr channel_to_delete; + + context()->network_thread()->BlockingCall([&]() { if (channel_) { - channel_->SetFirstPacketReceivedCallback( - [thread = thread_, flag = signaling_thread_safety_, this]() mutable { - thread->PostTask(ToQueuedTask( - std::move(flag), [this]() { OnFirstPacketReceived(); })); - }); + channel_->SetFirstPacketReceivedCallback(nullptr); + channel_->SetRtpTransport(nullptr); + channel_to_delete = std::move(channel_); } }); - for (const auto& sender : senders_) { - sender->internal()->SetMediaChannel(channel_ ? channel_->media_channel() - : nullptr); + RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(1); + PushNewMediaChannelAndDeleteChannel(std::move(channel_to_delete)); + + RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(2); +} + +void RtpTransceiver::PushNewMediaChannelAndDeleteChannel( + std::unique_ptr channel_to_delete) { + // The clumsy combination of pushing down media channel and deleting + // the channel is due to the desire to do both things in one Invoke(). + if (!channel_to_delete && senders_.empty() && receivers_.empty()) { + return; } + context()->worker_thread()->BlockingCall([&]() { + // Push down the new media_channel, if any, otherwise clear it. + auto* media_channel = channel_ ? channel_->media_channel() : nullptr; + for (const auto& sender : senders_) { + sender->internal()->SetMediaChannel(media_channel); + } - RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(1); + for (const auto& receiver : receivers_) { + receiver->internal()->SetMediaChannel(media_channel); + } - for (const auto& receiver : receivers_) { - if (!channel_) { - receiver->internal()->Stop(); - } else { - receiver->internal()->SetMediaChannel(channel_->media_channel()); + // Destroy the channel, if we had one, now _after_ updating the receivers + // who might have had references to the previous channel. + if (channel_to_delete) { + channel_to_delete.reset(nullptr); } - } + }); } void RtpTransceiver::AddSender( @@ -226,6 +364,14 @@ void RtpTransceiver::AddSender( RTC_DCHECK(sender); RTC_DCHECK_EQ(media_type(), sender->media_type()); RTC_DCHECK(!absl::c_linear_search(senders_, sender)); + if (media_type() == cricket::MEDIA_TYPE_VIDEO) { + std::vector send_codecs = + media_engine()->video().send_codecs(false); + sender->internal()->SetVideoCodecPreferences( + codec_preferences_.empty() + ? send_codecs + : MatchCodecPreferences(codec_preferences_, send_codecs)); + } senders_.push_back(sender); } @@ -256,6 +402,7 @@ void RtpTransceiver::AddReceiver( } bool RtpTransceiver::RemoveReceiver(RtpReceiverInterface* receiver) { + RTC_DCHECK_RUN_ON(thread_); RTC_DCHECK(!unified_plan_); if (receiver) { RTC_DCHECK_EQ(media_type(), receiver->media_type()); @@ -264,8 +411,13 @@ bool RtpTransceiver::RemoveReceiver(RtpReceiverInterface* receiver) { if (it == receivers_.end()) { return false; } - // `Stop()` will clear the internally cached pointer to the media channel. + (*it)->internal()->Stop(); + context()->worker_thread()->BlockingCall([&]() { + // `Stop()` will clear the receiver's pointer to the media channel. + (*it)->internal()->SetMediaChannel(nullptr); + }); + receivers_.erase(it); return true; } @@ -273,14 +425,14 @@ bool RtpTransceiver::RemoveReceiver(RtpReceiverInterface* receiver) { rtc::scoped_refptr RtpTransceiver::sender_internal() const { RTC_DCHECK(unified_plan_); RTC_CHECK_EQ(1u, senders_.size()); - return senders_[0]->internal(); + return rtc::scoped_refptr(senders_[0]->internal()); } rtc::scoped_refptr RtpTransceiver::receiver_internal() const { RTC_DCHECK(unified_plan_); RTC_CHECK_EQ(1u, receivers_.size()); - return receivers_[0]->internal(); + return rtc::scoped_refptr(receivers_[0]->internal()); } cricket::MediaType RtpTransceiver::media_type() const { @@ -323,7 +475,8 @@ void RtpTransceiver::set_current_direction(RtpTransceiverDirection direction) { } } -void RtpTransceiver::set_fired_direction(RtpTransceiverDirection direction) { +void RtpTransceiver::set_fired_direction( + absl::optional direction) { fired_direction_ = direction; } @@ -383,15 +536,22 @@ void RtpTransceiver::StopSendingAndReceiving() { // // 3. Stop sending media with sender. // + RTC_DCHECK_RUN_ON(thread_); + // 4. Send an RTCP BYE for each RTP stream that was being sent by sender, as // specified in [RFC3550]. - RTC_DCHECK_RUN_ON(thread_); for (const auto& sender : senders_) sender->internal()->Stop(); - // 5. Stop receiving media with receiver. + // Signal to receiver sources that we're stopping. for (const auto& receiver : receivers_) - receiver->internal()->StopAndEndTrack(); + receiver->internal()->Stop(); + + context()->worker_thread()->BlockingCall([&]() { + // 5 Stop receiving media with receiver. + for (const auto& receiver : receivers_) + receiver->internal()->SetMediaChannel(nullptr); + }); stopping_ = true; direction_ = webrtc::RtpTransceiverDirection::kInactive; @@ -456,11 +616,13 @@ void RtpTransceiver::StopTransceiverProcedure() { RTCError RtpTransceiver::SetCodecPreferences( rtc::ArrayView codec_capabilities) { RTC_DCHECK(unified_plan_); - // 3. If codecs is an empty list, set transceiver's [[PreferredCodecs]] slot // to codecs and abort these steps. if (codec_capabilities.empty()) { codec_preferences_.clear(); + if (media_type() == cricket::MEDIA_TYPE_VIDEO) + senders_.front()->internal()->SetVideoCodecPreferences( + media_engine()->video().send_codecs(false)); return RTCError::OK(); } @@ -475,16 +637,19 @@ RTCError RtpTransceiver::SetCodecPreferences( RTCError result; if (media_type_ == cricket::MEDIA_TYPE_AUDIO) { std::vector recv_codecs, send_codecs; - channel_manager_->GetSupportedAudioReceiveCodecs(&recv_codecs); - channel_manager_->GetSupportedAudioSendCodecs(&send_codecs); - + send_codecs = media_engine()->voice().send_codecs(); + recv_codecs = media_engine()->voice().recv_codecs(); result = VerifyCodecPreferences(codecs, send_codecs, recv_codecs); } else if (media_type_ == cricket::MEDIA_TYPE_VIDEO) { std::vector recv_codecs, send_codecs; - channel_manager_->GetSupportedVideoReceiveCodecs(&recv_codecs); - channel_manager_->GetSupportedVideoSendCodecs(&send_codecs); - + send_codecs = media_engine()->video().send_codecs(context()->use_rtx()); + recv_codecs = media_engine()->video().recv_codecs(context()->use_rtx()); result = VerifyCodecPreferences(codecs, send_codecs, recv_codecs); + + if (result.ok()) { + senders_.front()->internal()->SetVideoCodecPreferences( + MatchCodecPreferences(codecs, send_codecs)); + } } if (result.ok()) { diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_transceiver.h b/TMessagesProj/jni/voip/webrtc/pc/rtp_transceiver.h index c995329273..625d0a52c5 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_transceiver.h +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_transceiver.h @@ -13,34 +13,48 @@ #include -#include #include +#include #include #include #include "absl/types/optional.h" #include "api/array_view.h" +#include "api/audio_options.h" +#include "api/jsep.h" #include "api/media_types.h" #include "api/rtc_error.h" #include "api/rtp_parameters.h" +#include "api/rtp_receiver_interface.h" +#include "api/rtp_sender_interface.h" #include "api/rtp_transceiver_direction.h" #include "api/rtp_transceiver_interface.h" #include "api/scoped_refptr.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_base.h" +#include "api/video/video_bitrate_allocator_factory.h" +#include "media/base/media_channel.h" #include "pc/channel_interface.h" -#include "pc/channel_manager.h" +#include "pc/connection_context.h" #include "pc/proxy.h" #include "pc/rtp_receiver.h" #include "pc/rtp_receiver_proxy.h" #include "pc/rtp_sender.h" #include "pc/rtp_sender_proxy.h" -#include "rtc_base/ref_counted_object.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" +#include "pc/rtp_transport_internal.h" +#include "pc/session_description.h" #include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/thread_annotations.h" +namespace cricket { +class ChannelManager; +class MediaEngineInterface; +} + namespace webrtc { +class PeerConnectionSdpMethods; + // Implementation of the public RtpTransceiverInterface. // // The RtpTransceiverInterface is only intended to be used with a PeerConnection @@ -62,25 +76,21 @@ namespace webrtc { // with this m= section. Since the transceiver, senders, and receivers are // reference counted and can be referenced from JavaScript (in Chromium), these // objects must be ready to live for an arbitrary amount of time. The -// BaseChannel is not reference counted and is owned by the ChannelManager, so -// the PeerConnection must take care of creating/deleting the BaseChannel and -// setting the channel reference in the transceiver to null when it has been -// deleted. +// BaseChannel is not reference counted, so +// the PeerConnection must take care of creating/deleting the BaseChannel. // // The RtpTransceiver is specialized to either audio or video according to the // MediaType specified in the constructor. Audio RtpTransceivers will have // AudioRtpSenders, AudioRtpReceivers, and a VoiceChannel. Video RtpTransceivers // will have VideoRtpSenders, VideoRtpReceivers, and a VideoChannel. -class RtpTransceiver final - : public rtc::RefCountedObject, - public sigslot::has_slots<> { +class RtpTransceiver : public RtpTransceiverInterface, + public sigslot::has_slots<> { public: // Construct a Plan B-style RtpTransceiver with no senders, receivers, or // channel set. // `media_type` specifies the type of RtpTransceiver (and, by transitivity, // the type of senders, receivers, and channel). Can either by audio or video. - RtpTransceiver(cricket::MediaType media_type, - cricket::ChannelManager* channel_manager); + RtpTransceiver(cricket::MediaType media_type, ConnectionContext* context); // Construct a Unified Plan-style RtpTransceiver with the given sender and // receiver. The media type will be derived from the media types of the sender // and receiver. The sender and receiver should have the same media type. @@ -90,18 +100,64 @@ class RtpTransceiver final rtc::scoped_refptr> sender, rtc::scoped_refptr> receiver, - cricket::ChannelManager* channel_manager, + ConnectionContext* context, std::vector HeaderExtensionsToOffer, std::function on_negotiation_needed); ~RtpTransceiver() override; + // Not copyable or movable. + RtpTransceiver(const RtpTransceiver&) = delete; + RtpTransceiver& operator=(const RtpTransceiver&) = delete; + RtpTransceiver(RtpTransceiver&&) = delete; + RtpTransceiver& operator=(RtpTransceiver&&) = delete; + // Returns the Voice/VideoChannel set for this transceiver. May be null if // the transceiver is not in the currently set local/remote description. - cricket::ChannelInterface* channel() const { return channel_; } + cricket::ChannelInterface* channel() const { return channel_.get(); } + + // Creates the Voice/VideoChannel and sets it. + RTCError CreateChannel( + absl::string_view mid, + Call* call_ptr, + const cricket::MediaConfig& media_config, + bool srtp_required, + CryptoOptions crypto_options, + const cricket::AudioOptions& audio_options, + const cricket::VideoOptions& video_options, + VideoBitrateAllocatorFactory* video_bitrate_allocator_factory, + std::function transport_lookup); // Sets the Voice/VideoChannel. The caller must pass in the correct channel - // implementation based on the type of the transceiver. - void SetChannel(cricket::ChannelInterface* channel); + // implementation based on the type of the transceiver. The call must + // furthermore be made on the signaling thread. + // + // `channel`: The channel instance to be associated with the transceiver. + // This must be a valid pointer. + // The state of the object + // is expected to be newly constructed and not initalized for network + // activity (see next parameter for more). + // + // The transceiver takes ownership of `channel`. + // + // `transport_lookup`: This + // callback function will be used to look up the `RtpTransport` object + // to associate with the channel via `BaseChannel::SetRtpTransport`. + // The lookup function will be called on the network thread, synchronously + // during the call to `SetChannel`. This means that the caller of + // `SetChannel()` may provide a callback function that references state + // that exists within the calling scope of SetChannel (e.g. a variable + // on the stack). + // The reason for this design is to limit the number of times we jump + // synchronously to the network thread from the signaling thread. + // The callback allows us to combine the transport lookup with network + // state initialization of the channel object. + // ClearChannel() must be used before calling SetChannel() again. + void SetChannel(std::unique_ptr channel, + std::function + transport_lookup); + + // Clear the association between the transceiver and the channel. + void ClearChannel(); // Adds an RtpSender of the appropriate type to be owned by this transceiver. // Must not be null. @@ -170,8 +226,8 @@ class RtpTransceiver final // Sets the fired direction for this transceiver. The fired direction is null // until SetRemoteDescription is called or an answer is set (either local or - // remote). - void set_fired_direction(RtpTransceiverDirection direction); + // remote) after which the only valid reason to go back to null is rollback. + void set_fired_direction(absl::optional direction); // According to JSEP rules for SetRemoteDescription, RtpTransceivers can be // reused only if they were added by AddTrack. @@ -244,8 +300,16 @@ class RtpTransceiver final const cricket::MediaContentDescription* content); private: + cricket::MediaEngineInterface* media_engine() const { + return context_->media_engine(); + } + ConnectionContext* context() const { return context_; } void OnFirstPacketReceived(); void StopSendingAndReceiving(); + // Delete a channel, and ensure that references to its media channel + // are updated before deleting it. + void PushNewMediaChannelAndDeleteChannel( + std::unique_ptr channel_to_delete); // Enforce that this object is created, used and destroyed on one thread. TaskQueueBase* const thread_; @@ -270,8 +334,11 @@ class RtpTransceiver final bool reused_for_addtrack_ = false; bool has_ever_been_used_to_send_ = false; - cricket::ChannelInterface* channel_ = nullptr; - cricket::ChannelManager* channel_manager_ = nullptr; + // Accessed on both thread_ and the network thread. Considered safe + // because all access on the network thread is within an invoke() + // from thread_. + std::unique_ptr channel_ = nullptr; + ConnectionContext* const context_; std::vector codec_preferences_; std::vector header_extensions_to_offer_; diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_transmission_manager.cc b/TMessagesProj/jni/voip/webrtc/pc/rtp_transmission_manager.cc index e91715496c..a81f17a95b 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_transmission_manager.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_transmission_manager.cc @@ -10,15 +10,15 @@ #include "pc/rtp_transmission_manager.h" -#include +#include #include #include "absl/types/optional.h" #include "api/peer_connection_interface.h" #include "api/rtp_transceiver_direction.h" #include "pc/audio_rtp_receiver.h" -#include "pc/channel.h" -#include "pc/stats_collector_interface.h" +#include "pc/channel_interface.h" +#include "pc/legacy_stats_collector_interface.h" #include "pc/video_rtp_receiver.h" #include "rtc_base/checks.h" #include "rtc_base/helpers.h" @@ -35,20 +35,16 @@ static const char kDefaultVideoSenderId[] = "defaultv0"; RtpTransmissionManager::RtpTransmissionManager( bool is_unified_plan, - rtc::Thread* signaling_thread, - rtc::Thread* worker_thread, - cricket::ChannelManager* channel_manager, + ConnectionContext* context, UsagePattern* usage_pattern, PeerConnectionObserver* observer, - StatsCollectorInterface* stats, + LegacyStatsCollectorInterface* legacy_stats, std::function on_negotiation_needed) : is_unified_plan_(is_unified_plan), - signaling_thread_(signaling_thread), - worker_thread_(worker_thread), - channel_manager_(channel_manager), + context_(context), usage_pattern_(usage_pattern), observer_(observer), - stats_(stats), + legacy_stats_(legacy_stats), on_negotiation_needed_(on_negotiation_needed), weak_ptr_factory_(this) {} @@ -80,10 +76,9 @@ cricket::VoiceMediaChannel* RtpTransmissionManager::voice_media_channel() const { RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(!IsUnifiedPlan()); - auto* voice_channel = static_cast( - GetAudioTransceiver()->internal()->channel()); + auto* voice_channel = GetAudioTransceiver()->internal()->channel(); if (voice_channel) { - return voice_channel->media_channel(); + return voice_channel->voice_media_channel(); } else { return nullptr; } @@ -93,10 +88,9 @@ cricket::VideoMediaChannel* RtpTransmissionManager::video_media_channel() const { RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(!IsUnifiedPlan()); - auto* video_channel = static_cast( - GetVideoTransceiver()->internal()->channel()); + auto* video_channel = GetVideoTransceiver()->internal()->channel(); if (video_channel) { - return video_channel->media_channel(); + return video_channel->video_media_channel(); } else { return nullptr; } @@ -105,17 +99,20 @@ cricket::VideoMediaChannel* RtpTransmissionManager::video_media_channel() RTCErrorOr> RtpTransmissionManager::AddTrack( rtc::scoped_refptr track, - const std::vector& stream_ids) { + const std::vector& stream_ids, + const std::vector* init_send_encodings) { RTC_DCHECK_RUN_ON(signaling_thread()); - return (IsUnifiedPlan() ? AddTrackUnifiedPlan(track, stream_ids) - : AddTrackPlanB(track, stream_ids)); + return (IsUnifiedPlan() + ? AddTrackUnifiedPlan(track, stream_ids, init_send_encodings) + : AddTrackPlanB(track, stream_ids, init_send_encodings)); } RTCErrorOr> RtpTransmissionManager::AddTrackPlanB( rtc::scoped_refptr track, - const std::vector& stream_ids) { + const std::vector& stream_ids, + const std::vector* init_send_encodings) { RTC_DCHECK_RUN_ON(signaling_thread()); if (stream_ids.size() > 1u) { LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_OPERATION, @@ -131,7 +128,9 @@ RtpTransmissionManager::AddTrackPlanB( ? cricket::MEDIA_TYPE_AUDIO : cricket::MEDIA_TYPE_VIDEO); auto new_sender = - CreateSender(media_type, track->id(), track, adjusted_stream_ids, {}); + CreateSender(media_type, track->id(), track, adjusted_stream_ids, + init_send_encodings ? *init_send_encodings + : std::vector()); if (track->kind() == MediaStreamTrackInterface::kAudioKind) { new_sender->internal()->SetMediaChannel(voice_media_channel()); GetAudioTransceiver()->internal()->AddSender(new_sender); @@ -158,8 +157,10 @@ RtpTransmissionManager::AddTrackPlanB( RTCErrorOr> RtpTransmissionManager::AddTrackUnifiedPlan( rtc::scoped_refptr track, - const std::vector& stream_ids) { - auto transceiver = FindFirstTransceiverForAddedTrack(track); + const std::vector& stream_ids, + const std::vector* init_send_encodings) { + auto transceiver = + FindFirstTransceiverForAddedTrack(track, init_send_encodings); if (transceiver) { RTC_LOG(LS_INFO) << "Reusing an existing " << cricket::MediaTypeToString(transceiver->media_type()) @@ -176,7 +177,7 @@ RtpTransmissionManager::AddTrackUnifiedPlan( transceiver->internal()->set_direction( RtpTransceiverDirection::kSendOnly); } - transceiver->sender()->SetTrack(track); + transceiver->sender()->SetTrack(track.get()); transceiver->internal()->sender_internal()->set_stream_ids(stream_ids); transceiver->internal()->set_reused_for_addtrack(true); } else { @@ -193,7 +194,10 @@ RtpTransmissionManager::AddTrackUnifiedPlan( if (FindSenderById(sender_id)) { sender_id = rtc::CreateRandomUuid(); } - auto sender = CreateSender(media_type, sender_id, track, stream_ids, {}); + auto sender = CreateSender(media_type, sender_id, track, stream_ids, + init_send_encodings + ? *init_send_encodings + : std::vector()); auto receiver = CreateReceiver(media_type, rtc::CreateRandomUuid()); transceiver = CreateAndAddTransceiver(sender, receiver); transceiver->internal()->set_created_by_addtrack(true); @@ -216,7 +220,7 @@ RtpTransmissionManager::CreateSender( (track->kind() == MediaStreamTrackInterface::kAudioKind)); sender = RtpSenderProxyWithInternal::Create( signaling_thread(), - AudioRtpSender::Create(worker_thread(), id, stats_, this)); + AudioRtpSender::Create(worker_thread(), id, legacy_stats_, this)); NoteUsageEvent(UsageEvent::AUDIO_ADDED); } else { RTC_DCHECK_EQ(media_type, cricket::MEDIA_TYPE_VIDEO); @@ -226,7 +230,7 @@ RtpTransmissionManager::CreateSender( signaling_thread(), VideoRtpSender::Create(worker_thread(), id, this)); NoteUsageEvent(UsageEvent::VIDEO_ADDED); } - bool set_track_succeeded = sender->SetTrack(track); + bool set_track_succeeded = sender->SetTrack(track.get()); RTC_DCHECK(set_track_succeeded); sender->internal()->set_stream_ids(stream_ids); sender->internal()->set_init_send_encodings(send_encodings); @@ -270,11 +274,11 @@ RtpTransmissionManager::CreateAndAddTransceiver( RTC_DCHECK(!FindSenderById(sender->id())); auto transceiver = RtpTransceiverProxyWithInternal::Create( signaling_thread(), - new RtpTransceiver( - sender, receiver, channel_manager(), + rtc::make_ref_counted( + sender, receiver, context_, sender->media_type() == cricket::MEDIA_TYPE_AUDIO - ? channel_manager()->GetSupportedAudioRtpHeaderExtensions() - : channel_manager()->GetSupportedVideoRtpHeaderExtensions(), + ? media_engine()->voice().GetRtpHeaderExtensions() + : media_engine()->video().GetRtpHeaderExtensions(), [this_weak_ptr = weak_ptr_factory_.GetWeakPtr()]() { if (this_weak_ptr) { this_weak_ptr->OnNegotiationNeeded(); @@ -286,9 +290,13 @@ RtpTransmissionManager::CreateAndAddTransceiver( rtc::scoped_refptr> RtpTransmissionManager::FindFirstTransceiverForAddedTrack( - rtc::scoped_refptr track) { + rtc::scoped_refptr track, + const std::vector* init_send_encodings) { RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(track); + if (init_send_encodings != nullptr) { + return nullptr; + } for (auto transceiver : transceivers()->List()) { if (!transceiver->sender()->track() && cricket::MediaTypeToString(transceiver->media_type()) == @@ -378,7 +386,8 @@ void RtpTransmissionManager::AddAudioTrack(AudioTrackInterface* track, } // Normal case; we've never seen this track before. - auto new_sender = CreateSender(cricket::MEDIA_TYPE_AUDIO, track->id(), track, + auto new_sender = CreateSender(cricket::MEDIA_TYPE_AUDIO, track->id(), + rtc::scoped_refptr(track), {stream->id()}, {}); new_sender->internal()->SetMediaChannel(voice_media_channel()); GetAudioTransceiver()->internal()->AddSender(new_sender); @@ -407,7 +416,7 @@ void RtpTransmissionManager::RemoveAudioTrack(AudioTrackInterface* track, << " doesn't exist."; return; } - GetAudioTransceiver()->internal()->RemoveSender(sender); + GetAudioTransceiver()->internal()->RemoveSender(sender.get()); } void RtpTransmissionManager::AddVideoTrack(VideoTrackInterface* track, @@ -424,7 +433,8 @@ void RtpTransmissionManager::AddVideoTrack(VideoTrackInterface* track, } // Normal case; we've never seen this track before. - auto new_sender = CreateSender(cricket::MEDIA_TYPE_VIDEO, track->id(), track, + auto new_sender = CreateSender(cricket::MEDIA_TYPE_VIDEO, track->id(), + rtc::scoped_refptr(track), {stream->id()}, {}); new_sender->internal()->SetMediaChannel(video_media_channel()); GetVideoTransceiver()->internal()->AddSender(new_sender); @@ -445,7 +455,7 @@ void RtpTransmissionManager::RemoveVideoTrack(VideoTrackInterface* track, << " doesn't exist."; return; } - GetVideoTransceiver()->internal()->RemoveSender(sender); + GetVideoTransceiver()->internal()->RemoveSender(sender.get()); } void RtpTransmissionManager::CreateAudioReceiver( @@ -457,13 +467,14 @@ void RtpTransmissionManager::CreateAudioReceiver( // TODO(https://crbug.com/webrtc/9480): When we remove remote_streams(), use // the constructor taking stream IDs instead. auto audio_receiver = rtc::make_ref_counted( - worker_thread(), remote_sender_info.sender_id, streams, IsUnifiedPlan()); - audio_receiver->SetMediaChannel(voice_media_channel()); + worker_thread(), remote_sender_info.sender_id, streams, IsUnifiedPlan(), + voice_media_channel()); if (remote_sender_info.sender_id == kDefaultAudioSenderId) { audio_receiver->SetupUnsignaledMediaChannel(); } else { audio_receiver->SetupMediaChannel(remote_sender_info.first_ssrc); } + auto receiver = RtpReceiverProxyWithInternal::Create( signaling_thread(), worker_thread(), std::move(audio_receiver)); GetAudioTransceiver()->internal()->AddReceiver(receiver); @@ -481,12 +492,13 @@ void RtpTransmissionManager::CreateVideoReceiver( // the constructor taking stream IDs instead. auto video_receiver = rtc::make_ref_counted( worker_thread(), remote_sender_info.sender_id, streams); - video_receiver->SetMediaChannel(video_media_channel()); - if (remote_sender_info.sender_id == kDefaultVideoSenderId) { - video_receiver->SetupUnsignaledMediaChannel(); - } else { - video_receiver->SetupMediaChannel(remote_sender_info.first_ssrc); - } + + video_receiver->SetupMediaChannel( + remote_sender_info.sender_id == kDefaultVideoSenderId + ? absl::nullopt + : absl::optional(remote_sender_info.first_ssrc), + video_media_channel()); + auto receiver = RtpReceiverProxyWithInternal::Create( signaling_thread(), worker_thread(), std::move(video_receiver)); GetVideoTransceiver()->internal()->AddReceiver(receiver); @@ -506,9 +518,9 @@ RtpTransmissionManager::RemoveAndStopReceiver( return nullptr; } if (receiver->media_type() == cricket::MEDIA_TYPE_AUDIO) { - GetAudioTransceiver()->internal()->RemoveReceiver(receiver); + GetAudioTransceiver()->internal()->RemoveReceiver(receiver.get()); } else { - GetVideoTransceiver()->internal()->RemoveReceiver(receiver); + GetVideoTransceiver()->internal()->RemoveReceiver(receiver.get()); } return receiver; } @@ -636,7 +648,7 @@ std::vector* RtpTransmissionManager::GetLocalSenderInfos( const RtpSenderInfo* RtpTransmissionManager::FindSenderInfo( const std::vector& infos, const std::string& stream_id, - const std::string sender_id) const { + const std::string& sender_id) const { for (const RtpSenderInfo& sender_info : infos) { if (sender_info.stream_id == stream_id && sender_info.sender_id == sender_id) { @@ -686,4 +698,8 @@ RtpTransmissionManager::FindReceiverById(const std::string& receiver_id) const { return nullptr; } +cricket::MediaEngineInterface* RtpTransmissionManager::media_engine() const { + return context_->media_engine(); +} + } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_transmission_manager.h b/TMessagesProj/jni/voip/webrtc/pc/rtp_transmission_manager.h index f616d9d0f6..c48b345c16 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_transmission_manager.h +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_transmission_manager.h @@ -27,11 +27,12 @@ #include "api/scoped_refptr.h" #include "api/sequence_checker.h" #include "media/base/media_channel.h" -#include "pc/channel_manager.h" +#include "pc/legacy_stats_collector_interface.h" #include "pc/rtp_receiver.h" +#include "pc/rtp_receiver_proxy.h" #include "pc/rtp_sender.h" +#include "pc/rtp_sender_proxy.h" #include "pc/rtp_transceiver.h" -#include "pc/stats_collector_interface.h" #include "pc/transceiver_list.h" #include "pc/usage_pattern.h" #include "rtc_base/third_party/sigslot/sigslot.h" @@ -39,6 +40,10 @@ #include "rtc_base/thread_annotations.h" #include "rtc_base/weak_ptr.h" +namespace cricket { +class ChannelManager; +} + namespace rtc { class Thread; } @@ -50,7 +55,7 @@ namespace webrtc { struct RtpSenderInfo { RtpSenderInfo() : first_ssrc(0) {} RtpSenderInfo(const std::string& stream_id, - const std::string sender_id, + const std::string& sender_id, uint32_t ssrc) : stream_id(stream_id), sender_id(sender_id), first_ssrc(ssrc) {} bool operator==(const RtpSenderInfo& other) { @@ -71,12 +76,10 @@ struct RtpSenderInfo { class RtpTransmissionManager : public RtpSenderBase::SetStreamsObserver { public: RtpTransmissionManager(bool is_unified_plan, - rtc::Thread* signaling_thread, - rtc::Thread* worker_thread, - cricket::ChannelManager* channel_manager, + ConnectionContext* context, UsagePattern* usage_pattern, PeerConnectionObserver* observer, - StatsCollectorInterface* stats_, + LegacyStatsCollectorInterface* legacy_stats, std::function on_negotiation_needed); // No move or copy permitted. @@ -92,7 +95,8 @@ class RtpTransmissionManager : public RtpSenderBase::SetStreamsObserver { // Add a new track, creating transceiver if required. RTCErrorOr> AddTrack( rtc::scoped_refptr track, - const std::vector& stream_ids); + const std::vector& stream_ids, + const std::vector* init_send_encodings); // Create a new RTP sender. Does not associate with a transceiver. rtc::scoped_refptr> @@ -118,7 +122,8 @@ class RtpTransmissionManager : public RtpSenderBase::SetStreamsObserver { // transceiver is available. rtc::scoped_refptr> FindFirstTransceiverForAddedTrack( - rtc::scoped_refptr track); + rtc::scoped_refptr track, + const std::vector* init_send_encodings); // Returns the list of senders currently associated with some // registered transceiver @@ -184,7 +189,7 @@ class RtpTransmissionManager : public RtpSenderBase::SetStreamsObserver { cricket::MediaType media_type); const RtpSenderInfo* FindSenderInfo(const std::vector& infos, const std::string& stream_id, - const std::string sender_id) const; + const std::string& sender_id) const; // Return the RtpSender with the given track attached. rtc::scoped_refptr> @@ -207,9 +212,8 @@ class RtpTransmissionManager : public RtpSenderBase::SetStreamsObserver { cricket::VideoMediaChannel* video_media_channel() const; private: - rtc::Thread* signaling_thread() const { return signaling_thread_; } - rtc::Thread* worker_thread() const { return worker_thread_; } - cricket::ChannelManager* channel_manager() const { return channel_manager_; } + rtc::Thread* signaling_thread() const { return context_->signaling_thread(); } + rtc::Thread* worker_thread() const { return context_->worker_thread(); } bool IsUnifiedPlan() const { return is_unified_plan_; } void NoteUsageEvent(UsageEvent event) { usage_pattern_->NoteUsageEvent(event); @@ -218,11 +222,13 @@ class RtpTransmissionManager : public RtpSenderBase::SetStreamsObserver { // AddTrack implementation when Unified Plan is specified. RTCErrorOr> AddTrackUnifiedPlan( rtc::scoped_refptr track, - const std::vector& stream_ids); + const std::vector& stream_ids, + const std::vector* init_send_encodings); // AddTrack implementation when Plan B is specified. RTCErrorOr> AddTrackPlanB( rtc::scoped_refptr track, - const std::vector& stream_ids); + const std::vector& stream_ids, + const std::vector* init_send_encodings); // Create an RtpReceiver that sources an audio track. void CreateAudioReceiver(MediaStreamInterface* stream, @@ -239,6 +245,12 @@ class RtpTransmissionManager : public RtpSenderBase::SetStreamsObserver { PeerConnectionObserver* Observer() const; void OnNegotiationNeeded(); + cricket::MediaEngineInterface* media_engine() const; + + rtc::UniqueRandomIdGenerator* ssrc_generator() const { + return context_->ssrc_generator(); + } + TransceiverList transceivers_; // These lists store sender info seen in local/remote descriptions. @@ -253,12 +265,10 @@ class RtpTransmissionManager : public RtpSenderBase::SetStreamsObserver { bool closed_ = false; bool const is_unified_plan_; - rtc::Thread* signaling_thread_; - rtc::Thread* worker_thread_; - cricket::ChannelManager* channel_manager_; + ConnectionContext* context_; UsagePattern* usage_pattern_; PeerConnectionObserver* observer_; - StatsCollectorInterface* const stats_; + LegacyStatsCollectorInterface* const legacy_stats_; std::function on_negotiation_needed_; rtc::WeakPtrFactory weak_ptr_factory_ RTC_GUARDED_BY(signaling_thread()); diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_transport.cc b/TMessagesProj/jni/voip/webrtc/pc/rtp_transport.cc index 347b2440fd..2e98632879 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_transport.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_transport.cc @@ -11,17 +11,18 @@ #include "pc/rtp_transport.h" #include -#include + +#include #include #include "absl/strings/string_view.h" #include "api/array_view.h" +#include "api/units/timestamp.h" #include "media/base/rtp_utils.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "rtc_base/checks.h" #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/logging.h" -#include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/trace_event.h" namespace webrtc { @@ -73,8 +74,6 @@ void RtpTransport::SetRtpPacketTransport( &RtpTransport::OnSentPacket); // Set the network route for the new transport. SignalNetworkRouteChanged(new_packet_transport->network_route()); - } else { - RTC_LOG(LS_WARNING) << "set empty packet"; } rtp_packet_transport_ = new_packet_transport; @@ -187,19 +186,20 @@ void RtpTransport::DemuxPacket(rtc::CopyOnWriteBuffer packet, &header_extension_map_, packet_time_us == -1 ? Timestamp::MinusInfinity() : Timestamp::Micros(packet_time_us)); - if (!parsed_packet.Parse(packet)) { + if (!parsed_packet.Parse(std::move(packet))) { RTC_LOG(LS_ERROR) << "Failed to parse the incoming RTP packet before demuxing. Drop it."; return; } - + + bool isUnresolved = true; if (!rtp_demuxer_.OnRtpPacket(parsed_packet)) { - SignalRtpPacketReceived.emit(&packet, packet_time_us, true); + isUnresolved = true; RTC_LOG(LS_WARNING) << "Failed to demux RTP packet: " << RtpDemuxer::DescribePacket(parsed_packet); - } else { - SignalRtpPacketReceived.emit(&packet, packet_time_us, false); } + + ProcessRtpPacket(parsed_packet, isUnresolved); } bool RtpTransport::IsTransportWritable() { diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_transport.h b/TMessagesProj/jni/voip/webrtc/pc/rtp_transport.h index 893d91e734..b3c7625866 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_transport.h +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_transport.h @@ -18,6 +18,7 @@ #include "absl/types/optional.h" #include "call/rtp_demuxer.h" +#include "call/video_receive_stream.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" #include "p2p/base/packet_transport_internal.h" #include "pc/rtp_transport_internal.h" @@ -27,7 +28,6 @@ #include "rtc_base/network/sent_packet.h" #include "rtc_base/network_route.h" #include "rtc_base/socket.h" -#include "rtc_base/third_party/sigslot/sigslot.h" namespace rtc { @@ -86,6 +86,8 @@ class RtpTransport : public RtpTransportInternal { RtpPacketSinkInterface* sink) override; bool UnregisterRtpDemuxerSink(RtpPacketSinkInterface* sink) override; + + virtual void ProcessRtpPacket(webrtc::RtpPacketReceived const &packet, bool isUnresolved) {} protected: // These methods will be used in the subclasses. diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_transport_internal.h b/TMessagesProj/jni/voip/webrtc/pc/rtp_transport_internal.h index 8eabebc59d..18dd42efb8 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_transport_internal.h +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_transport_internal.h @@ -27,10 +27,10 @@ struct PacketOptions; namespace webrtc { -// This represents the internal interface beneath SrtpTransportInterface; -// it is not accessible to API consumers but is accessible to internal classes -// in order to send and receive RTP and RTCP packets belonging to a single RTP -// session. Additional convenience and configuration methods are also provided. +// This class is an internal interface; it is not accessible to API consumers +// but is accessible to internal classes in order to send and receive RTP and +// RTCP packets belonging to a single RTP session. Additional convenience and +// configuration methods are also provided. class RtpTransportInternal : public sigslot::has_slots<> { public: virtual ~RtpTransportInternal() = default; diff --git a/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel.cc b/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel.cc index c63f820acf..0591bcc087 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel.cc @@ -15,15 +15,13 @@ #include #include +#include "absl/cleanup/cleanup.h" #include "media/sctp/sctp_transport_internal.h" #include "pc/proxy.h" #include "pc/sctp_utils.h" #include "rtc_base/checks.h" -#include "rtc_base/location.h" #include "rtc_base/logging.h" -#include "rtc_base/ref_counted_object.h" #include "rtc_base/system/unused.h" -#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/thread.h" namespace webrtc { @@ -141,13 +139,13 @@ bool SctpSidAllocator::IsSidAvailable(int sid) const { } rtc::scoped_refptr SctpDataChannel::Create( - SctpDataChannelProviderInterface* provider, + SctpDataChannelControllerInterface* controller, const std::string& label, const InternalDataChannelInit& config, rtc::Thread* signaling_thread, rtc::Thread* network_thread) { auto channel = rtc::make_ref_counted( - config, provider, label, signaling_thread, network_thread); + config, controller, label, signaling_thread, network_thread); if (!channel->Init()) { return nullptr; } @@ -158,15 +156,12 @@ rtc::scoped_refptr SctpDataChannel::Create( rtc::scoped_refptr SctpDataChannel::CreateProxy( rtc::scoped_refptr channel) { // TODO(bugs.webrtc.org/11547): incorporate the network thread in the proxy. - // Also, consider allowing the proxy object to own the reference (std::move). - // As is, the proxy has a raw pointer and no reference to the channel object - // and trusting that the lifetime management aligns with the - // sctp_data_channels_ array in SctpDataChannelController. - return DataChannelProxy::Create(channel->signaling_thread_, channel.get()); + auto* signaling_thread = channel->signaling_thread_; + return DataChannelProxy::Create(signaling_thread, std::move(channel)); } SctpDataChannel::SctpDataChannel(const InternalDataChannelInit& config, - SctpDataChannelProviderInterface* provider, + SctpDataChannelControllerInterface* controller, const std::string& label, rtc::Thread* signaling_thread, rtc::Thread* network_thread) @@ -176,11 +171,16 @@ SctpDataChannel::SctpDataChannel(const InternalDataChannelInit& config, label_(label), config_(config), observer_(nullptr), - provider_(provider) { + controller_(controller) { RTC_DCHECK_RUN_ON(signaling_thread_); RTC_UNUSED(network_thread_); } +void SctpDataChannel::DetachFromController() { + RTC_DCHECK_RUN_ON(signaling_thread_); + controller_detached_ = true; +} + bool SctpDataChannel::Init() { RTC_DCHECK_RUN_ON(signaling_thread_); if (config_.id < -1 || @@ -217,15 +217,15 @@ bool SctpDataChannel::Init() { // This has to be done async because the upper layer objects (e.g. // Chrome glue and WebKit) are not wired up properly until after this // function returns. - if (provider_->ReadyToSendData()) { + RTC_DCHECK(!controller_detached_); + if (controller_->ReadyToSendData()) { AddRef(); - rtc::Thread::Current()->PostTask(ToQueuedTask( - [this] { - RTC_DCHECK_RUN_ON(signaling_thread_); - if (state_ != kClosed) - OnTransportReady(true); - }, - [this] { Release(); })); + absl::Cleanup release = [this] { Release(); }; + rtc::Thread::Current()->PostTask([this, release = std::move(release)] { + RTC_DCHECK_RUN_ON(signaling_thread_); + if (state_ != kClosed) + OnTransportReady(true); + }); } return true; @@ -258,7 +258,7 @@ uint64_t SctpDataChannel::buffered_amount() const { void SctpDataChannel::Close() { RTC_DCHECK_RUN_ON(signaling_thread_); - if (state_ == kClosed) + if (state_ == kClosing || state_ == kClosed) return; SetState(kClosing); // Will send queued data before beginning the underlying closing procedure. @@ -333,7 +333,8 @@ void SctpDataChannel::SetSctpSid(int sid) { } const_cast(config_).id = sid; - provider_->AddSctpDataStream(sid); + RTC_DCHECK(!controller_detached_); + controller_->AddSctpDataStream(sid); } void SctpDataChannel::OnClosingProcedureStartedRemotely(int sid) { @@ -359,20 +360,23 @@ void SctpDataChannel::OnClosingProcedureComplete(int sid) { // all pending data and transitioned to kClosing already. RTC_DCHECK_EQ(state_, kClosing); RTC_DCHECK(queued_send_data_.Empty()); - DisconnectFromProvider(); + DisconnectFromTransport(); SetState(kClosed); } } void SctpDataChannel::OnTransportChannelCreated() { RTC_DCHECK_RUN_ON(signaling_thread_); - if (!connected_to_provider_) { - connected_to_provider_ = provider_->ConnectDataChannel(this); + if (controller_detached_) { + return; + } + if (!connected_to_transport_) { + connected_to_transport_ = controller_->ConnectDataChannel(this); } - // The sid may have been unassigned when provider_->ConnectDataChannel was - // done. So always add the streams even if connected_to_provider_ is true. + // The sid may have been unassigned when controller_->ConnectDataChannel was + // done. So always add the streams even if connected_to_transport_ is true. if (config_.id >= 0) { - provider_->AddSctpDataStream(config_.id); + controller_->AddSctpDataStream(config_.id); } } @@ -475,8 +479,8 @@ void SctpDataChannel::CloseAbruptlyWithError(RTCError error) { return; } - if (connected_to_provider_) { - DisconnectFromProvider(); + if (connected_to_transport_) { + DisconnectFromTransport(); } // Closing abruptly means any queued data gets thrown away. @@ -506,7 +510,7 @@ void SctpDataChannel::UpdateState() { switch (state_) { case kConnecting: { - if (connected_to_provider_) { + if (connected_to_transport_) { if (handshake_state_ == kHandshakeShouldSendOpen) { rtc::CopyOnWriteBuffer payload; WriteDataChannelOpenMessage(label_, config_, &payload); @@ -537,10 +541,10 @@ void SctpDataChannel::UpdateState() { // to complete; after calling RemoveSctpDataStream, // OnClosingProcedureComplete will end up called asynchronously // afterwards. - if (connected_to_provider_ && !started_closing_procedure_ && - config_.id >= 0) { + if (connected_to_transport_ && !started_closing_procedure_ && + !controller_detached_ && config_.id >= 0) { started_closing_procedure_ = true; - provider_->RemoveSctpDataStream(config_.id); + controller_->RemoveSctpDataStream(config_.id); } } break; @@ -567,13 +571,13 @@ void SctpDataChannel::SetState(DataState state) { } } -void SctpDataChannel::DisconnectFromProvider() { +void SctpDataChannel::DisconnectFromTransport() { RTC_DCHECK_RUN_ON(signaling_thread_); - if (!connected_to_provider_) + if (!connected_to_transport_ || controller_detached_) return; - provider_->DisconnectDataChannel(this); - connected_to_provider_ = false; + controller_->DisconnectDataChannel(this); + connected_to_transport_ = false; } void SctpDataChannel::DeliverQueuedReceivedData() { @@ -612,6 +616,9 @@ bool SctpDataChannel::SendDataMessage(const DataBuffer& buffer, bool queue_if_blocked) { RTC_DCHECK_RUN_ON(signaling_thread_); SendDataParams send_params; + if (controller_detached_) { + return false; + } send_params.ordered = config_.ordered; // Send as ordered if it is still going through OPEN/ACK signaling. @@ -629,7 +636,7 @@ bool SctpDataChannel::SendDataMessage(const DataBuffer& buffer, cricket::SendDataResult send_result = cricket::SDR_SUCCESS; bool success = - provider_->SendData(config_.id, send_params, buffer.data, &send_result); + controller_->SendData(config_.id, send_params, buffer.data, &send_result); if (success) { ++messages_sent_; @@ -691,6 +698,9 @@ bool SctpDataChannel::SendControlMessage(const rtc::CopyOnWriteBuffer& buffer) { RTC_DCHECK(writable_); RTC_DCHECK_GE(config_.id, 0); + if (controller_detached_) { + return false; + } bool is_open_message = handshake_state_ == kHandshakeShouldSendOpen; RTC_DCHECK(!is_open_message || !config_.negotiated); @@ -703,7 +713,7 @@ bool SctpDataChannel::SendControlMessage(const rtc::CopyOnWriteBuffer& buffer) { cricket::SendDataResult send_result = cricket::SDR_SUCCESS; bool retval = - provider_->SendData(config_.id, send_params, buffer, &send_result); + controller_->SendData(config_.id, send_params, buffer, &send_result); if (retval) { RTC_LOG(LS_VERBOSE) << "Sent CONTROL message on channel " << config_.id; @@ -729,4 +739,10 @@ void SctpDataChannel::ResetInternalIdAllocatorForTesting(int new_value) { g_unique_id = new_value; } +SctpDataChannel* DowncastProxiedDataChannelInterfaceToSctpDataChannelForTesting( + DataChannelInterface* channel) { + return static_cast( + static_cast(channel)->internal()); +} + } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel.h b/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel.h index 56f99df3e5..a8442c59cc 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel.h +++ b/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel.h @@ -37,7 +37,7 @@ class SctpDataChannel; // TODO(deadbeef): Get rid of this and have SctpDataChannel depend on // SctpTransportInternal (pure virtual SctpTransport interface) instead. -class SctpDataChannelProviderInterface { +class SctpDataChannelControllerInterface { public: // Sends the data to the transport. virtual bool SendData(int sid, @@ -57,7 +57,7 @@ class SctpDataChannelProviderInterface { virtual bool ReadyToSendData() const = 0; protected: - virtual ~SctpDataChannelProviderInterface() {} + virtual ~SctpDataChannelControllerInterface() {} }; // TODO(tommi): Change to not inherit from DataChannelInit but to have it as @@ -120,7 +120,7 @@ class SctpDataChannel : public DataChannelInterface, public sigslot::has_slots<> { public: static rtc::scoped_refptr Create( - SctpDataChannelProviderInterface* provider, + SctpDataChannelControllerInterface* controller, const std::string& label, const InternalDataChannelInit& config, rtc::Thread* signaling_thread, @@ -131,6 +131,9 @@ class SctpDataChannel : public DataChannelInterface, static rtc::scoped_refptr CreateProxy( rtc::scoped_refptr channel); + // Invalidate the link to the controller (DataChannelController); + void DetachFromController(); + void RegisterObserver(DataChannelObserver* observer) override; void UnregisterObserver() override; @@ -178,10 +181,10 @@ class SctpDataChannel : public DataChannelInterface, // Specializations of CloseAbruptlyWithError void CloseAbruptlyWithDataChannelFailure(const std::string& message); - // Slots for provider to connect signals to. + // Slots for controller to connect signals to. // // TODO(deadbeef): Make these private once we're hooking up signals ourselves, - // instead of relying on SctpDataChannelProviderInterface. + // instead of relying on SctpDataChannelControllerInterface. // Called when the SctpTransport's ready to use. That can happen when we've // finished negotiation, or if the channel was created after negotiation has @@ -223,7 +226,7 @@ class SctpDataChannel : public DataChannelInterface, protected: SctpDataChannel(const InternalDataChannelInit& config, - SctpDataChannelProviderInterface* client, + SctpDataChannelControllerInterface* client, const std::string& label, rtc::Thread* signaling_thread, rtc::Thread* network_thread); @@ -242,7 +245,7 @@ class SctpDataChannel : public DataChannelInterface, bool Init(); void UpdateState(); void SetState(DataState state); - void DisconnectFromProvider(); + void DisconnectFromTransport(); void DeliverQueuedReceivedData(); @@ -266,11 +269,12 @@ class SctpDataChannel : public DataChannelInterface, uint64_t bytes_sent_ RTC_GUARDED_BY(signaling_thread_) = 0; uint32_t messages_received_ RTC_GUARDED_BY(signaling_thread_) = 0; uint64_t bytes_received_ RTC_GUARDED_BY(signaling_thread_) = 0; - SctpDataChannelProviderInterface* const provider_ + SctpDataChannelControllerInterface* const controller_ RTC_GUARDED_BY(signaling_thread_); + bool controller_detached_ RTC_GUARDED_BY(signaling_thread_) = false; HandshakeState handshake_state_ RTC_GUARDED_BY(signaling_thread_) = kHandshakeInit; - bool connected_to_provider_ RTC_GUARDED_BY(signaling_thread_) = false; + bool connected_to_transport_ RTC_GUARDED_BY(signaling_thread_) = false; bool writable_ RTC_GUARDED_BY(signaling_thread_) = false; // Did we already start the graceful SCTP closing procedure? bool started_closing_procedure_ RTC_GUARDED_BY(signaling_thread_) = false; @@ -281,6 +285,11 @@ class SctpDataChannel : public DataChannelInterface, PacketQueue queued_send_data_ RTC_GUARDED_BY(signaling_thread_); }; +// Downcast a PeerConnectionInterface that points to a proxy object +// to its underlying SctpDataChannel object. For testing only. +SctpDataChannel* DowncastProxiedDataChannelInterfaceToSctpDataChannelForTesting( + DataChannelInterface* channel); + } // namespace webrtc #endif // PC_SCTP_DATA_CHANNEL_H_ diff --git a/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel_transport.cc b/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel_transport.cc deleted file mode 100644 index f01f86ebd8..0000000000 --- a/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel_transport.cc +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright 2019 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "pc/sctp_data_channel_transport.h" - -#include "absl/types/optional.h" -#include "pc/sctp_utils.h" - -namespace webrtc { - -SctpDataChannelTransport::SctpDataChannelTransport( - cricket::SctpTransportInternal* sctp_transport) - : sctp_transport_(sctp_transport) { - sctp_transport_->SignalReadyToSendData.connect( - this, &SctpDataChannelTransport::OnReadyToSendData); - sctp_transport_->SignalDataReceived.connect( - this, &SctpDataChannelTransport::OnDataReceived); - sctp_transport_->SignalClosingProcedureStartedRemotely.connect( - this, &SctpDataChannelTransport::OnClosingProcedureStartedRemotely); - sctp_transport_->SignalClosingProcedureComplete.connect( - this, &SctpDataChannelTransport::OnClosingProcedureComplete); - sctp_transport_->SignalClosedAbruptly.connect( - this, &SctpDataChannelTransport::OnClosedAbruptly); -} - -RTCError SctpDataChannelTransport::OpenChannel(int channel_id) { - sctp_transport_->OpenStream(channel_id); - return RTCError::OK(); -} - -RTCError SctpDataChannelTransport::SendData( - int channel_id, - const SendDataParams& params, - const rtc::CopyOnWriteBuffer& buffer) { - cricket::SendDataResult result; - sctp_transport_->SendData(channel_id, params, buffer, &result); - - // TODO(mellem): See about changing the interfaces to not require mapping - // SendDataResult to RTCError and back again. - switch (result) { - case cricket::SendDataResult::SDR_SUCCESS: - return RTCError::OK(); - case cricket::SendDataResult::SDR_BLOCK: { - // Send buffer is full. - ready_to_send_ = false; - return RTCError(RTCErrorType::RESOURCE_EXHAUSTED); - } - case cricket::SendDataResult::SDR_ERROR: - return RTCError(RTCErrorType::NETWORK_ERROR); - } - return RTCError(RTCErrorType::NETWORK_ERROR); -} - -RTCError SctpDataChannelTransport::CloseChannel(int channel_id) { - sctp_transport_->ResetStream(channel_id); - return RTCError::OK(); -} - -void SctpDataChannelTransport::SetDataSink(DataChannelSink* sink) { - sink_ = sink; - if (sink_ && ready_to_send_) { - sink_->OnReadyToSend(); - } -} - -bool SctpDataChannelTransport::IsReadyToSend() const { - return ready_to_send_; -} - -void SctpDataChannelTransport::OnReadyToSendData() { - ready_to_send_ = true; - if (sink_) { - sink_->OnReadyToSend(); - } -} - -void SctpDataChannelTransport::OnDataReceived( - const cricket::ReceiveDataParams& params, - const rtc::CopyOnWriteBuffer& buffer) { - if (sink_) { - sink_->OnDataReceived(params.sid, params.type, buffer); - } -} - -void SctpDataChannelTransport::OnClosingProcedureStartedRemotely( - int channel_id) { - if (sink_) { - sink_->OnChannelClosing(channel_id); - } -} - -void SctpDataChannelTransport::OnClosingProcedureComplete(int channel_id) { - if (sink_) { - sink_->OnChannelClosed(channel_id); - } -} - -void SctpDataChannelTransport::OnClosedAbruptly(RTCError error) { - if (sink_) { - sink_->OnTransportClosed(error); - } -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel_transport.h b/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel_transport.h deleted file mode 100644 index 4b89205ea1..0000000000 --- a/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel_transport.h +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright 2019 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef PC_SCTP_DATA_CHANNEL_TRANSPORT_H_ -#define PC_SCTP_DATA_CHANNEL_TRANSPORT_H_ - -#include "api/rtc_error.h" -#include "api/transport/data_channel_transport_interface.h" -#include "media/base/media_channel.h" -#include "media/sctp/sctp_transport_internal.h" -#include "rtc_base/copy_on_write_buffer.h" -#include "rtc_base/third_party/sigslot/sigslot.h" - -namespace webrtc { - -// SCTP implementation of DataChannelTransportInterface. -class SctpDataChannelTransport : public DataChannelTransportInterface, - public sigslot::has_slots<> { - public: - explicit SctpDataChannelTransport( - cricket::SctpTransportInternal* sctp_transport); - - RTCError OpenChannel(int channel_id) override; - RTCError SendData(int channel_id, - const SendDataParams& params, - const rtc::CopyOnWriteBuffer& buffer) override; - RTCError CloseChannel(int channel_id) override; - void SetDataSink(DataChannelSink* sink) override; - bool IsReadyToSend() const override; - - private: - void OnReadyToSendData(); - void OnDataReceived(const cricket::ReceiveDataParams& params, - const rtc::CopyOnWriteBuffer& buffer); - void OnClosingProcedureStartedRemotely(int channel_id); - void OnClosingProcedureComplete(int channel_id); - void OnClosedAbruptly(RTCError error); - - cricket::SctpTransportInternal* const sctp_transport_; - - DataChannelSink* sink_ = nullptr; - bool ready_to_send_ = false; -}; - -} // namespace webrtc - -#endif // PC_SCTP_DATA_CHANNEL_TRANSPORT_H_ diff --git a/TMessagesProj/jni/voip/webrtc/pc/sctp_transport.cc b/TMessagesProj/jni/voip/webrtc/pc/sctp_transport.cc index 7d4e4551f1..eabaa04a57 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/sctp_transport.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/sctp_transport.cc @@ -17,7 +17,6 @@ #include "api/dtls_transport_interface.h" #include "api/sequence_checker.h" #include "rtc_base/checks.h" -#include "rtc_base/location.h" #include "rtc_base/logging.h" namespace webrtc { @@ -28,9 +27,8 @@ SctpTransport::SctpTransport( info_(SctpTransportState::kNew), internal_sctp_transport_(std::move(internal)) { RTC_DCHECK(internal_sctp_transport_.get()); - internal_sctp_transport_->SignalAssociationChangeCommunicationUp.connect( - this, &SctpTransport::OnAssociationChangeCommunicationUp); - // TODO(https://bugs.webrtc.org/10360): Add handlers for transport closing. + internal_sctp_transport_->SetOnConnectedCallback( + [this]() { OnAssociationChangeCommunicationUp(); }); if (dtls_transport_) { UpdateInformation(SctpTransportState::kConnecting); @@ -51,8 +49,7 @@ SctpTransportInformation SctpTransport::Information() const { // expected thread. Chromium currently calls this method from // TransceiverStateSurfacer. if (!owner_thread_->IsCurrent()) { - return owner_thread_->Invoke( - RTC_FROM_HERE, [this] { return Information(); }); + return owner_thread_->BlockingCall([this] { return Information(); }); } RTC_DCHECK_RUN_ON(owner_thread_); return info_; @@ -70,6 +67,54 @@ void SctpTransport::UnregisterObserver() { observer_ = nullptr; } +RTCError SctpTransport::OpenChannel(int channel_id) { + RTC_DCHECK_RUN_ON(owner_thread_); + RTC_DCHECK(internal_sctp_transport_); + internal_sctp_transport_->OpenStream(channel_id); + return RTCError::OK(); +} + +RTCError SctpTransport::SendData(int channel_id, + const SendDataParams& params, + const rtc::CopyOnWriteBuffer& buffer) { + RTC_DCHECK_RUN_ON(owner_thread_); + RTC_DCHECK(internal_sctp_transport_); + cricket::SendDataResult result; + internal_sctp_transport_->SendData(channel_id, params, buffer, &result); + + // TODO(mellem): See about changing the interfaces to not require mapping + // SendDataResult to RTCError and back again. + switch (result) { + case cricket::SendDataResult::SDR_SUCCESS: + return RTCError::OK(); + case cricket::SendDataResult::SDR_BLOCK: + // Send buffer is full. + return RTCError(RTCErrorType::RESOURCE_EXHAUSTED); + case cricket::SendDataResult::SDR_ERROR: + return RTCError(RTCErrorType::NETWORK_ERROR); + } + return RTCError(RTCErrorType::NETWORK_ERROR); +} + +RTCError SctpTransport::CloseChannel(int channel_id) { + RTC_DCHECK_RUN_ON(owner_thread_); + RTC_DCHECK(internal_sctp_transport_); + internal_sctp_transport_->ResetStream(channel_id); + return RTCError::OK(); +} + +void SctpTransport::SetDataSink(DataChannelSink* sink) { + RTC_DCHECK_RUN_ON(owner_thread_); + RTC_DCHECK(internal_sctp_transport_); + internal_sctp_transport_->SetDataChannelSink(sink); +} + +bool SctpTransport::IsReadyToSend() const { + RTC_DCHECK_RUN_ON(owner_thread_); + RTC_DCHECK(internal_sctp_transport_); + return internal_sctp_transport_->ReadyToSendData(); +} + rtc::scoped_refptr SctpTransport::dtls_transport() const { RTC_DCHECK_RUN_ON(owner_thread_); diff --git a/TMessagesProj/jni/voip/webrtc/pc/sctp_transport.h b/TMessagesProj/jni/voip/webrtc/pc/sctp_transport.h index 16b98407b6..35e7656100 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/sctp_transport.h +++ b/TMessagesProj/jni/voip/webrtc/pc/sctp_transport.h @@ -16,10 +16,12 @@ #include "api/dtls_transport_interface.h" #include "api/scoped_refptr.h" #include "api/sctp_transport_interface.h" +#include "api/sequence_checker.h" +#include "api/transport/data_channel_transport_interface.h" #include "media/sctp/sctp_transport_internal.h" #include "p2p/base/dtls_transport_internal.h" #include "pc/dtls_transport.h" -#include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/checks.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" @@ -31,16 +33,26 @@ namespace webrtc { // the same thread as the one the cricket::SctpTransportInternal object // lives on. class SctpTransport : public SctpTransportInterface, - public sigslot::has_slots<> { + public DataChannelTransportInterface { public: explicit SctpTransport( std::unique_ptr internal); + // SctpTransportInterface rtc::scoped_refptr dtls_transport() const override; SctpTransportInformation Information() const override; void RegisterObserver(SctpTransportObserverInterface* observer) override; void UnregisterObserver() override; + // DataChannelTransportInterface + RTCError OpenChannel(int channel_id) override; + RTCError SendData(int channel_id, + const SendDataParams& params, + const rtc::CopyOnWriteBuffer& buffer) override; + RTCError CloseChannel(int channel_id) override; + void SetDataSink(DataChannelSink* sink) override; + bool IsReadyToSend() const override; + // Internal functions void Clear(); void SetDtlsTransport(rtc::scoped_refptr); diff --git a/TMessagesProj/jni/voip/webrtc/pc/sctp_utils.cc b/TMessagesProj/jni/voip/webrtc/pc/sctp_utils.cc index f7458405ea..c60e339b08 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/sctp_utils.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/sctp_utils.cc @@ -11,12 +11,12 @@ #include "pc/sctp_utils.h" #include -#include + +#include #include "absl/types/optional.h" #include "api/priority.h" #include "rtc_base/byte_buffer.h" -#include "rtc_base/checks.h" #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/logging.h" diff --git a/TMessagesProj/jni/voip/webrtc/pc/sdp_offer_answer.cc b/TMessagesProj/jni/voip/webrtc/pc/sdp_offer_answer.cc index 4332cd6df8..758a50c2c9 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/sdp_offer_answer.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/sdp_offer_answer.cc @@ -11,25 +11,27 @@ #include "pc/sdp_offer_answer.h" #include +#include #include #include #include #include -#include +#include #include #include "absl/algorithm/container.h" #include "absl/memory/memory.h" +#include "absl/strings/match.h" #include "absl/strings/string_view.h" #include "api/array_view.h" #include "api/crypto/crypto_options.h" #include "api/dtls_transport_interface.h" +#include "api/field_trials_view.h" #include "api/rtp_parameters.h" #include "api/rtp_receiver_interface.h" #include "api/rtp_sender_interface.h" #include "api/video/builtin_video_bitrate_allocator_factory.h" #include "media/base/codec.h" -#include "media/base/media_engine.h" #include "media/base/rid_description.h" #include "p2p/base/ice_transport_internal.h" #include "p2p/base/p2p_constants.h" @@ -38,30 +40,27 @@ #include "p2p/base/transport_description.h" #include "p2p/base/transport_description_factory.h" #include "p2p/base/transport_info.h" -#include "pc/data_channel_utils.h" +#include "pc/channel_interface.h" #include "pc/dtls_transport.h" +#include "pc/legacy_stats_collector.h" #include "pc/media_stream.h" #include "pc/media_stream_proxy.h" -#include "pc/peer_connection.h" +#include "pc/peer_connection_internal.h" #include "pc/peer_connection_message_handler.h" #include "pc/rtp_media_utils.h" +#include "pc/rtp_receiver_proxy.h" #include "pc/rtp_sender.h" -#include "pc/rtp_transport_internal.h" +#include "pc/rtp_sender_proxy.h" #include "pc/simulcast_description.h" -#include "pc/stats_collector.h" #include "pc/usage_pattern.h" #include "pc/webrtc_session_description_factory.h" #include "rtc_base/helpers.h" -#include "rtc_base/location.h" #include "rtc_base/logging.h" -#include "rtc_base/ref_counted_object.h" #include "rtc_base/rtc_certificate.h" #include "rtc_base/ssl_stream_adapter.h" #include "rtc_base/string_encode.h" #include "rtc_base/strings/string_builder.h" -#include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/trace_event.h" -#include "system_wrappers/include/field_trial.h" #include "system_wrappers/include/metrics.h" using cricket::ContentInfo; @@ -89,9 +88,6 @@ namespace { typedef webrtc::PeerConnectionInterface::RTCOfferAnswerOptions RTCOfferAnswerOptions; -constexpr const char* kAlwaysAllowPayloadTypeDemuxingFieldTrialName = - "WebRTC-AlwaysAllowPayloadTypeDemuxing"; - // Error messages const char kInvalidSdp[] = "Invalid session description."; const char kInvalidCandidates[] = "Description contains invalid candidates."; @@ -137,42 +133,6 @@ void NoteAddIceCandidateResult(int result) { kAddIceCandidateMax); } -void NoteKeyProtocolAndMedia(KeyExchangeProtocolType protocol_type, - cricket::MediaType media_type) { - // Array of structs needed to map {KeyExchangeProtocolType, - // cricket::MediaType} to KeyExchangeProtocolMedia without using std::map in - // order to avoid -Wglobal-constructors and -Wexit-time-destructors. - static constexpr struct { - KeyExchangeProtocolType protocol_type; - cricket::MediaType media_type; - KeyExchangeProtocolMedia protocol_media; - } kEnumCounterKeyProtocolMediaMap[] = { - {kEnumCounterKeyProtocolDtls, cricket::MEDIA_TYPE_AUDIO, - kEnumCounterKeyProtocolMediaTypeDtlsAudio}, - {kEnumCounterKeyProtocolDtls, cricket::MEDIA_TYPE_VIDEO, - kEnumCounterKeyProtocolMediaTypeDtlsVideo}, - {kEnumCounterKeyProtocolDtls, cricket::MEDIA_TYPE_DATA, - kEnumCounterKeyProtocolMediaTypeDtlsData}, - {kEnumCounterKeyProtocolSdes, cricket::MEDIA_TYPE_AUDIO, - kEnumCounterKeyProtocolMediaTypeSdesAudio}, - {kEnumCounterKeyProtocolSdes, cricket::MEDIA_TYPE_VIDEO, - kEnumCounterKeyProtocolMediaTypeSdesVideo}, - {kEnumCounterKeyProtocolSdes, cricket::MEDIA_TYPE_DATA, - kEnumCounterKeyProtocolMediaTypeSdesData}, - }; - - RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.KeyProtocol", protocol_type, - kEnumCounterKeyProtocolMax); - - for (const auto& i : kEnumCounterKeyProtocolMediaMap) { - if (i.protocol_type == protocol_type && i.media_type == media_type) { - RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.KeyProtocolByMedia", - i.protocol_media, - kEnumCounterKeyProtocolMediaTypeMax); - } - } -} - std::map GetBundleGroupsByMid( const SessionDescription* desc) { std::vector bundle_groups = @@ -225,7 +185,9 @@ std::string GetSetDescriptionErrorMessage(cricket::ContentSource source, const RTCError& error) { rtc::StringBuilder oss; oss << "Failed to set " << (source == cricket::CS_LOCAL ? "local" : "remote") - << " " << SdpTypeToString(type) << " sdp: " << error.message(); + << " " << SdpTypeToString(type) << " sdp: "; + RTC_DCHECK(!absl::StartsWith(error.message(), oss.str())) << error.message(); + oss << error.message(); return oss.Release(); } @@ -364,10 +326,9 @@ RTCError VerifyCrypto(const SessionDescription* desc, if (content_info.rejected) { continue; } - // Note what media is used with each crypto protocol, for all sections. - NoteKeyProtocolAndMedia(dtls_enabled ? webrtc::kEnumCounterKeyProtocolDtls - : webrtc::kEnumCounterKeyProtocolSdes, - content_info.media_description()->type()); +#if !defined(WEBRTC_FUCHSIA) + RTC_CHECK(dtls_enabled) << "SDES protocol is only allowed in Fuchsia"; +#endif const std::string& mid = content_info.name; auto it = bundle_groups_by_mid.find(mid); const cricket::ContentGroup* bundle = @@ -512,7 +473,7 @@ RTCError UpdateSimulcastLayerStatusInSender( const std::vector& layers, rtc::scoped_refptr sender) { RTC_DCHECK(sender); - RtpParameters parameters = sender->GetParametersInternal(); + RtpParameters parameters = sender->GetParametersInternalWithAllLayers(); std::vector disabled_layers; // The simulcast envelope cannot be changed, only the status of the streams. @@ -531,7 +492,7 @@ RTCError UpdateSimulcastLayerStatusInSender( encoding.active = !iter->is_paused; } - RTCError result = sender->SetParametersInternal(parameters); + RTCError result = sender->SetParametersInternalWithAllLayers(parameters); if (result.ok()) { result = sender->DisableEncodingLayers(disabled_layers); } @@ -557,7 +518,7 @@ bool SimulcastIsRejected(const ContentInfo* local_content, RTCError DisableSimulcastInSender( rtc::scoped_refptr sender) { RTC_DCHECK(sender); - RtpParameters parameters = sender->GetParametersInternal(); + RtpParameters parameters = sender->GetParametersInternalWithAllLayers(); if (parameters.encodings.size() <= 1) { return RTCError::OK(); } @@ -643,7 +604,7 @@ cricket::MediaDescriptionOptions GetMediaDescriptionOptionsForTransceiver( // The following sets up RIDs and Simulcast. // RIDs are included if Simulcast is requested or if any RID was specified. RtpParameters send_parameters = - transceiver->sender_internal()->GetParametersInternal(); + transceiver->sender_internal()->GetParametersInternalWithAllLayers(); bool has_rids = std::any_of(send_parameters.encodings.begin(), send_parameters.encodings.end(), [](const RtpEncodingParameters& encoding) { @@ -724,13 +685,12 @@ rtc::scoped_refptr LookupDtlsTransportByMid( JsepTransportController* controller, const std::string& mid) { // TODO(tommi): Can we post this (and associated operations where this - // function is called) to the network thread and avoid this Invoke? + // function is called) to the network thread and avoid this BlockingCall? // We might be able to simplify a few things if we set the transport on // the network thread and then update the implementation to check that // the set_ and relevant get methods are always called on the network // thread (we'll need to update proxy maps). - return network_thread->Invoke>( - RTC_FROM_HERE, + return network_thread->BlockingCall( [controller, &mid] { return controller->LookupDtlsTransportByMid(mid); }); } @@ -747,6 +707,249 @@ bool ContentHasHeaderExtension(const cricket::ContentInfo& content_info, } // namespace +// This class stores state related to a SetRemoteDescription operation, captures +// and reports potential errors that migth occur and makes sure to notify the +// observer of the operation and the operations chain of completion. +class SdpOfferAnswerHandler::RemoteDescriptionOperation { + public: + RemoteDescriptionOperation( + SdpOfferAnswerHandler* handler, + std::unique_ptr desc, + rtc::scoped_refptr observer, + std::function operations_chain_callback) + : handler_(handler), + desc_(std::move(desc)), + observer_(std::move(observer)), + operations_chain_callback_(std::move(operations_chain_callback)), + unified_plan_(handler_->IsUnifiedPlan()) { + if (!desc_) { + type_ = static_cast(-1); + InvalidParam("SessionDescription is NULL."); + } else { + type_ = desc_->GetType(); + } + } + + ~RemoteDescriptionOperation() { + RTC_DCHECK_RUN_ON(handler_->signaling_thread()); + SignalCompletion(); + operations_chain_callback_(); + } + + bool ok() const { return error_.ok(); } + + // Notifies the observer that the operation is complete and releases the + // reference to the observer. + void SignalCompletion() { + if (!observer_) + return; + + if (!error_.ok() && type_ != static_cast(-1)) { + std::string error_message = + GetSetDescriptionErrorMessage(cricket::CS_REMOTE, type_, error_); + RTC_LOG(LS_ERROR) << error_message; + error_.set_message(std::move(error_message)); + } + + observer_->OnSetRemoteDescriptionComplete(error_); + observer_ = nullptr; // Only fire the notification once. + } + + // If a session error has occurred the PeerConnection is in a possibly + // inconsistent state so fail right away. + bool HaveSessionError() { + RTC_DCHECK(ok()); + if (handler_->session_error() != SessionError::kNone) + InternalError(handler_->GetSessionErrorMsg()); + return !ok(); + } + + // Returns true if the operation was a rollback operation. If this function + // returns true, the caller should consider the operation complete. Otherwise + // proceed to the next step. + bool MaybeRollback() { + RTC_DCHECK_RUN_ON(handler_->signaling_thread()); + RTC_DCHECK(ok()); + if (type_ != SdpType::kRollback) { + // Check if we can do an implicit rollback. + if (type_ == SdpType::kOffer && unified_plan_ && + handler_->pc_->configuration()->enable_implicit_rollback && + handler_->signaling_state() == + PeerConnectionInterface::kHaveLocalOffer) { + handler_->Rollback(type_); + } + return false; + } + + if (unified_plan_) { + error_ = handler_->Rollback(type_); + } else if (type_ == SdpType::kRollback) { + Unsupported("Rollback not supported in Plan B"); + } + + return true; + } + + // Report to UMA the format of the received offer or answer. + void ReportOfferAnswerUma() { + RTC_DCHECK(ok()); + if (type_ == SdpType::kOffer || type_ == SdpType::kAnswer) { + handler_->pc_->ReportSdpBundleUsage(*desc_.get()); + } + } + + // Checks if the session description for the operation is valid. If not, the + // function captures error information and returns false. Note that if the + // return value is false, the operation should be considered done. + bool IsDescriptionValid() { + RTC_DCHECK_RUN_ON(handler_->signaling_thread()); + RTC_DCHECK(ok()); + RTC_DCHECK(bundle_groups_by_mid_.empty()) << "Already called?"; + bundle_groups_by_mid_ = GetBundleGroupsByMid(description()); + error_ = handler_->ValidateSessionDescription( + desc_.get(), cricket::CS_REMOTE, bundle_groups_by_mid_); + return ok(); + } + + // Transfers ownership of the session description object over to `handler_`. + bool ReplaceRemoteDescriptionAndCheckEror() { + RTC_DCHECK_RUN_ON(handler_->signaling_thread()); + RTC_DCHECK(ok()); + RTC_DCHECK(desc_); + RTC_DCHECK(!replaced_remote_description_); +#if RTC_DCHECK_IS_ON + const auto* existing_remote_description = handler_->remote_description(); +#endif + + error_ = handler_->ReplaceRemoteDescription(std::move(desc_), type_, + &replaced_remote_description_); + + if (ok()) { +#if RTC_DCHECK_IS_ON + // Sanity check that our `old_remote_description()` method always returns + // the same value as `remote_description()` did before the call to + // ReplaceRemoteDescription. + RTC_DCHECK_EQ(existing_remote_description, old_remote_description()); +#endif + } else { + SetAsSessionError(); + } + + return ok(); + } + + bool UpdateChannels() { + RTC_DCHECK(ok()); + RTC_DCHECK(!desc_) << "ReplaceRemoteDescription hasn't been called"; + + const auto* remote_description = handler_->remote_description(); + + const cricket::SessionDescription* session_desc = + remote_description->description(); + + // Transport and Media channels will be created only when offer is set. + if (unified_plan_) { + error_ = handler_->UpdateTransceiversAndDataChannels( + cricket::CS_REMOTE, *remote_description, + handler_->local_description(), old_remote_description(), + bundle_groups_by_mid_); + } else { + // Media channels will be created only when offer is set. These may use + // new transports just created by PushdownTransportDescription. + if (type_ == SdpType::kOffer) { + // TODO(mallinath) - Handle CreateChannel failure, as new local + // description is applied. Restore back to old description. + error_ = handler_->CreateChannels(*session_desc); + } + // Remove unused channels if MediaContentDescription is rejected. + handler_->RemoveUnusedChannels(session_desc); + } + + return ok(); + } + + bool UpdateSessionState() { + RTC_DCHECK(ok()); + error_ = handler_->UpdateSessionState( + type_, cricket::CS_REMOTE, + handler_->remote_description()->description(), bundle_groups_by_mid_); + if (!ok()) + SetAsSessionError(); + return ok(); + } + + bool UseCandidatesInRemoteDescription() { + RTC_DCHECK(ok()); + if (handler_->local_description() && + !handler_->UseCandidatesInRemoteDescription()) { + InvalidParam(kInvalidCandidates); + } + return ok(); + } + + // Convenience getter for desc_->GetType(). + SdpType type() const { return type_; } + bool unified_plan() const { return unified_plan_; } + cricket::SessionDescription* description() { return desc_->description(); } + + const SessionDescriptionInterface* old_remote_description() const { + RTC_DCHECK(!desc_) << "Called before replacing the remote description"; + if (type_ == SdpType::kAnswer) + return replaced_remote_description_.get(); + return replaced_remote_description_ + ? replaced_remote_description_.get() + : handler_->current_remote_description(); + } + + // Returns a reference to a cached map of bundle groups ordered by mid. + // Note that this will only be valid after a successful call to + // `IsDescriptionValid`. + const std::map& + bundle_groups_by_mid() const { + RTC_DCHECK(ok()); + return bundle_groups_by_mid_; + } + + private: + // Convenience methods for populating the embedded `error_` object. + void Unsupported(std::string message) { + SetError(RTCErrorType::UNSUPPORTED_OPERATION, std::move(message)); + } + + void InvalidParam(std::string message) { + SetError(RTCErrorType::INVALID_PARAMETER, std::move(message)); + } + + void InternalError(std::string message) { + SetError(RTCErrorType::INTERNAL_ERROR, std::move(message)); + } + + void SetError(RTCErrorType type, std::string message) { + RTC_DCHECK(ok()) << "Overwriting an existing error?"; + error_ = RTCError(type, std::move(message)); + } + + // Called when the PeerConnection could be in an inconsistent state and we set + // the session error so that future calls to + // SetLocalDescription/SetRemoteDescription fail. + void SetAsSessionError() { + RTC_DCHECK(!ok()); + handler_->SetSessionError(SessionError::kContent, error_.message()); + } + + SdpOfferAnswerHandler* const handler_; + std::unique_ptr desc_; + // Keeps the replaced session description object alive while the operation + // is taking place since methods that depend on `old_remote_description()` + // for updating the state, need it. + std::unique_ptr replaced_remote_description_; + rtc::scoped_refptr observer_; + std::function operations_chain_callback_; + RTCError error_ = RTCError::OK(); + std::map bundle_groups_by_mid_; + SdpType type_; + const bool unified_plan_; +}; // Used by parameterless SetLocalDescription() to create an offer or answer. // Upon completion of creating the session description, SetLocalDescription() is // invoked with the result. @@ -886,10 +1089,10 @@ class SdpOfferAnswerHandler::SetSessionDescriptionObserverAdapter return; if (error.ok()) { handler_->pc_->message_handler()->PostSetSessionDescriptionSuccess( - inner_observer_); + inner_observer_.get()); } else { handler_->pc_->message_handler()->PostSetSessionDescriptionFailure( - inner_observer_, std::move(error)); + inner_observer_.get(), std::move(error)); } } @@ -946,8 +1149,10 @@ class SdpOfferAnswerHandler::LocalIceCredentialsToReplace { std::set> ice_credentials_; }; -SdpOfferAnswerHandler::SdpOfferAnswerHandler(PeerConnection* pc) +SdpOfferAnswerHandler::SdpOfferAnswerHandler(PeerConnectionSdpMethods* pc, + ConnectionContext* context) : pc_(pc), + context_(context), local_streams_(StreamCollection::Create()), remote_streams_(StreamCollection::Create()), operations_chain_(rtc::OperationsChain::Create()), @@ -966,20 +1171,24 @@ SdpOfferAnswerHandler::~SdpOfferAnswerHandler() {} // Static std::unique_ptr SdpOfferAnswerHandler::Create( - PeerConnection* pc, + PeerConnectionSdpMethods* pc, const PeerConnectionInterface::RTCConfiguration& configuration, - PeerConnectionDependencies& dependencies) { - auto handler = absl::WrapUnique(new SdpOfferAnswerHandler(pc)); - handler->Initialize(configuration, dependencies); + PeerConnectionDependencies& dependencies, + ConnectionContext* context) { + auto handler = absl::WrapUnique(new SdpOfferAnswerHandler(pc, context)); + handler->Initialize(configuration, dependencies, context); return handler; } void SdpOfferAnswerHandler::Initialize( const PeerConnectionInterface::RTCConfiguration& configuration, - PeerConnectionDependencies& dependencies) { + PeerConnectionDependencies& dependencies, + ConnectionContext* context) { RTC_DCHECK_RUN_ON(signaling_thread()); + // 100 kbps is used by default, but can be overriden by a non-standard + // RTCConfiguration value (not available on Web). video_options_.screencast_min_bitrate_kbps = - configuration.screencast_min_bitrate; + configuration.screencast_min_bitrate.value_or(100); audio_options_.combined_audio_video_bwe = configuration.combined_audio_video_bwe; @@ -992,9 +1201,6 @@ void SdpOfferAnswerHandler::Initialize( audio_options_.audio_jitter_buffer_min_delay_ms = configuration.audio_jitter_buffer_min_delay_ms; - audio_options_.audio_jitter_buffer_enable_rtx_handling = - configuration.audio_jitter_buffer_enable_rtx_handling; - // Obtain a certificate from RTCConfiguration if any were provided (optional). rtc::scoped_refptr certificate; if (!configuration.certificates.empty()) { @@ -1006,12 +1212,13 @@ void SdpOfferAnswerHandler::Initialize( webrtc_session_desc_factory_ = std::make_unique( - signaling_thread(), channel_manager(), this, pc_->session_id(), - pc_->dtls_enabled(), std::move(dependencies.cert_generator), - certificate, &ssrc_generator_, + context, this, pc_->session_id(), pc_->dtls_enabled(), + std::move(dependencies.cert_generator), std::move(certificate), [this](const rtc::scoped_refptr& certificate) { - transport_controller()->SetLocalCertificate(certificate); - }); + RTC_DCHECK_RUN_ON(signaling_thread()); + transport_controller_s()->SetLocalCertificate(certificate); + }, + pc_->trials()); if (pc_->options()->disable_encryption) { webrtc_session_desc_factory_->SetSdesPolicy(cricket::SEC_DISABLED); @@ -1032,27 +1239,37 @@ void SdpOfferAnswerHandler::Initialize( // ================================================================== // Access to pc_ variables -cricket::ChannelManager* SdpOfferAnswerHandler::channel_manager() const { - return pc_->channel_manager(); +cricket::MediaEngineInterface* SdpOfferAnswerHandler::media_engine() const { + RTC_DCHECK(context_); + return context_->media_engine(); } + TransceiverList* SdpOfferAnswerHandler::transceivers() { if (!pc_->rtp_manager()) { return nullptr; } return pc_->rtp_manager()->transceivers(); } + const TransceiverList* SdpOfferAnswerHandler::transceivers() const { if (!pc_->rtp_manager()) { return nullptr; } return pc_->rtp_manager()->transceivers(); } -JsepTransportController* SdpOfferAnswerHandler::transport_controller() { - return pc_->transport_controller(); +JsepTransportController* SdpOfferAnswerHandler::transport_controller_s() { + return pc_->transport_controller_s(); } -const JsepTransportController* SdpOfferAnswerHandler::transport_controller() +JsepTransportController* SdpOfferAnswerHandler::transport_controller_n() { + return pc_->transport_controller_n(); +} +const JsepTransportController* SdpOfferAnswerHandler::transport_controller_s() const { - return pc_->transport_controller(); + return pc_->transport_controller_s(); +} +const JsepTransportController* SdpOfferAnswerHandler::transport_controller_n() + const { + return pc_->transport_controller_n(); } DataChannelController* SdpOfferAnswerHandler::data_channel_controller() { return pc_->data_channel_controller(); @@ -1093,7 +1310,11 @@ void SdpOfferAnswerHandler::RestartIce() { } rtc::Thread* SdpOfferAnswerHandler::signaling_thread() const { - return pc_->signaling_thread(); + return context_->signaling_thread(); +} + +rtc::Thread* SdpOfferAnswerHandler::network_thread() const { + return context_->network_thread(); } void SdpOfferAnswerHandler::CreateOffer( @@ -1192,7 +1413,8 @@ void SdpOfferAnswerHandler::SetLocalDescription( RTC_DCHECK_RUN_ON(signaling_thread()); SetLocalDescription( rtc::make_ref_counted( - weak_ptr_factory_.GetWeakPtr(), observer)); + weak_ptr_factory_.GetWeakPtr(), + rtc::scoped_refptr(observer))); } void SdpOfferAnswerHandler::SetLocalDescription( @@ -1258,9 +1480,10 @@ RTCError SdpOfferAnswerHandler::ApplyLocalDescription( RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(desc); - // Update stats here so that we have the most recent stats for tracks and - // streams that might be removed by updating the session description. - pc_->stats()->UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard); + // Invalidate the stats caches to make sure that they get + // updated the next time getStats() gets called, as updating the session + // description affects the stats. + pc_->ClearStatsCache(); // Take a reference to the old local description since it's used below to // compare against the new local description. When setting the new local @@ -1282,6 +1505,9 @@ RTCError SdpOfferAnswerHandler::ApplyLocalDescription( replaced_local_description = std::move(pending_local_description_); pending_local_description_ = std::move(desc); } + if (!initial_offerer_) { + initial_offerer_.emplace(type == SdpType::kOffer); + } // The session description to apply now must be accessed by // `local_description()`. RTC_DCHECK(local_description()); @@ -1306,61 +1532,68 @@ RTCError SdpOfferAnswerHandler::ApplyLocalDescription( } if (IsUnifiedPlan()) { - RTCError error = UpdateTransceiversAndDataChannels( + error = UpdateTransceiversAndDataChannels( cricket::CS_LOCAL, *local_description(), old_local_description, remote_description(), bundle_groups_by_mid); if (!error.ok()) { + RTC_LOG(LS_ERROR) << error.message() << " (" << SdpTypeToString(type) + << ")"; return error; } - std::vector> remove_list; - std::vector> removed_streams; - for (const auto& transceiver_ext : transceivers()->List()) { - auto transceiver = transceiver_ext->internal(); - if (transceiver->stopped()) { - continue; - } + if (ConfiguredForMedia()) { + std::vector> remove_list; + std::vector> removed_streams; + for (const auto& transceiver_ext : transceivers()->List()) { + auto transceiver = transceiver_ext->internal(); + if (transceiver->stopped()) { + continue; + } - // 2.2.7.1.1.(6-9): Set sender and receiver's transport slots. - // Note that code paths that don't set MID won't be able to use - // information about DTLS transports. - if (transceiver->mid()) { - auto dtls_transport = LookupDtlsTransportByMid( - pc_->network_thread(), transport_controller(), *transceiver->mid()); - transceiver->sender_internal()->set_transport(dtls_transport); - transceiver->receiver_internal()->set_transport(dtls_transport); - } + // 2.2.7.1.1.(6-9): Set sender and receiver's transport slots. + // Note that code paths that don't set MID won't be able to use + // information about DTLS transports. + if (transceiver->mid()) { + auto dtls_transport = LookupDtlsTransportByMid( + context_->network_thread(), transport_controller_s(), + *transceiver->mid()); + transceiver->sender_internal()->set_transport(dtls_transport); + transceiver->receiver_internal()->set_transport(dtls_transport); + } - const ContentInfo* content = - FindMediaSectionForTransceiver(transceiver, local_description()); - if (!content) { - continue; - } - const MediaContentDescription* media_desc = content->media_description(); - // 2.2.7.1.6: If description is of type "answer" or "pranswer", then run - // the following steps: - if (type == SdpType::kPrAnswer || type == SdpType::kAnswer) { - // 2.2.7.1.6.1: If direction is "sendonly" or "inactive", and - // transceiver's [[FiredDirection]] slot is either "sendrecv" or - // "recvonly", process the removal of a remote track for the media - // description, given transceiver, removeList, and muteTracks. - if (!RtpTransceiverDirectionHasRecv(media_desc->direction()) && - (transceiver->fired_direction() && - RtpTransceiverDirectionHasRecv(*transceiver->fired_direction()))) { - ProcessRemovalOfRemoteTrack(transceiver_ext, &remove_list, - &removed_streams); + const ContentInfo* content = + FindMediaSectionForTransceiver(transceiver, local_description()); + if (!content) { + continue; + } + const MediaContentDescription* media_desc = + content->media_description(); + // 2.2.7.1.6: If description is of type "answer" or "pranswer", then run + // the following steps: + if (type == SdpType::kPrAnswer || type == SdpType::kAnswer) { + // 2.2.7.1.6.1: If direction is "sendonly" or "inactive", and + // transceiver's [[FiredDirection]] slot is either "sendrecv" or + // "recvonly", process the removal of a remote track for the media + // description, given transceiver, removeList, and muteTracks. + if (!RtpTransceiverDirectionHasRecv(media_desc->direction()) && + (transceiver->fired_direction() && + RtpTransceiverDirectionHasRecv( + *transceiver->fired_direction()))) { + ProcessRemovalOfRemoteTrack(transceiver_ext, &remove_list, + &removed_streams); + } + // 2.2.7.1.6.2: Set transceiver's [[CurrentDirection]] and + // [[FiredDirection]] slots to direction. + transceiver->set_current_direction(media_desc->direction()); + transceiver->set_fired_direction(media_desc->direction()); } - // 2.2.7.1.6.2: Set transceiver's [[CurrentDirection]] and - // [[FiredDirection]] slots to direction. - transceiver->set_current_direction(media_desc->direction()); - transceiver->set_fired_direction(media_desc->direction()); } - } - auto observer = pc_->Observer(); - for (const auto& transceiver : remove_list) { - observer->OnRemoveTrack(transceiver->receiver()); - } - for (const auto& stream : removed_streams) { - observer->OnRemoveStream(stream); + auto observer = pc_->Observer(); + for (const auto& transceiver : remove_list) { + observer->OnRemoveTrack(transceiver->receiver()); + } + for (const auto& stream : removed_streams) { + observer->OnRemoveStream(stream); + } } } else { // Media channels will be created only when offer is set. These may use new @@ -1370,6 +1603,8 @@ RTCError SdpOfferAnswerHandler::ApplyLocalDescription( // description is applied. Restore back to old description. RTCError error = CreateChannels(*local_description()->description()); if (!error.ok()) { + RTC_LOG(LS_ERROR) << error.message() << " (" << SdpTypeToString(type) + << ")"; return error; } } @@ -1381,13 +1616,13 @@ RTCError SdpOfferAnswerHandler::ApplyLocalDescription( local_description()->description(), bundle_groups_by_mid); if (!error.ok()) { + RTC_LOG(LS_ERROR) << error.message() << " (" << SdpTypeToString(type) + << ")"; return error; } - if (remote_description()) { - // Now that we have a local description, we can push down remote candidates. - UseCandidatesInSessionDescription(remote_description()); - } + // Now that we have a local description, we can push down remote candidates. + UseCandidatesInRemoteDescription(); pending_ice_restarts_.clear(); if (session_error() != SessionError::kNone) { @@ -1402,35 +1637,39 @@ RTCError SdpOfferAnswerHandler::ApplyLocalDescription( } if (IsUnifiedPlan()) { - // We must use List and not ListInternal here because - // transceivers()->StableState() is indexed by the non-internal refptr. - for (const auto& transceiver_ext : transceivers()->List()) { - auto transceiver = transceiver_ext->internal(); - if (transceiver->stopped()) { - continue; - } - const ContentInfo* content = - FindMediaSectionForTransceiver(transceiver, local_description()); - if (!content) { - continue; - } - cricket::ChannelInterface* channel = transceiver->channel(); - if (content->rejected || !channel || channel->local_streams().empty()) { - // 0 is a special value meaning "this sender has no associated send - // stream". Need to call this so the sender won't attempt to configure - // a no longer existing stream and run into DCHECKs in the lower - // layers. - transceiver->sender_internal()->SetSsrc(0); - } else { - // Get the StreamParams from the channel which could generate SSRCs. - const std::vector& streams = channel->local_streams(); - transceiver->sender_internal()->set_stream_ids(streams[0].stream_ids()); - auto encodings = transceiver->sender_internal()->init_send_encodings(); - transceiver->sender_internal()->SetSsrc(streams[0].first_ssrc()); - if (!encodings.empty()) { - transceivers() - ->StableState(transceiver_ext) - ->SetInitSendEncodings(encodings); + if (ConfiguredForMedia()) { + // We must use List and not ListInternal here because + // transceivers()->StableState() is indexed by the non-internal refptr. + for (const auto& transceiver_ext : transceivers()->List()) { + auto transceiver = transceiver_ext->internal(); + if (transceiver->stopped()) { + continue; + } + const ContentInfo* content = + FindMediaSectionForTransceiver(transceiver, local_description()); + if (!content) { + continue; + } + cricket::ChannelInterface* channel = transceiver->channel(); + if (content->rejected || !channel || channel->local_streams().empty()) { + // 0 is a special value meaning "this sender has no associated send + // stream". Need to call this so the sender won't attempt to configure + // a no longer existing stream and run into DCHECKs in the lower + // layers. + transceiver->sender_internal()->SetSsrc(0); + } else { + // Get the StreamParams from the channel which could generate SSRCs. + const std::vector& streams = channel->local_streams(); + transceiver->sender_internal()->set_stream_ids( + streams[0].stream_ids()); + auto encodings = + transceiver->sender_internal()->init_send_encodings(); + transceiver->sender_internal()->SetSsrc(streams[0].first_ssrc()); + if (!encodings.empty()) { + transceivers() + ->StableState(transceiver_ext) + ->SetInitSendEncodings(encodings); + } } } } @@ -1499,15 +1738,11 @@ void SdpOfferAnswerHandler::SetRemoteDescription( // SetSessionDescriptionObserverAdapter takes care of making sure the // `observer_refptr` is invoked in a posted message. this_weak_ptr->DoSetRemoteDescription( - std::move(desc), - rtc::make_ref_counted( - this_weak_ptr, observer_refptr)); - // For backwards-compatability reasons, we declare the operation as - // completed here (rather than in a post), so that the operation chain - // is not blocked by this operation when the observer is invoked. This - // allows the observer to trigger subsequent offer/answer operations - // synchronously if the operation chain is now empty. - operations_chain_callback(); + std::make_unique( + this_weak_ptr.get(), std::move(desc), + rtc::make_ref_counted( + this_weak_ptr, observer_refptr), + std::move(operations_chain_callback))); }); } @@ -1522,6 +1757,12 @@ void SdpOfferAnswerHandler::SetRemoteDescription( [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), observer, desc = std::move(desc)]( std::function operations_chain_callback) mutable { + if (!observer) { + RTC_DLOG(LS_ERROR) << "SetRemoteDescription - observer is NULL."; + operations_chain_callback(); + return; + } + // Abort early if `this_weak_ptr` is no longer valid. if (!this_weak_ptr) { observer->OnSetRemoteDescriptionComplete(RTCError( @@ -1530,107 +1771,80 @@ void SdpOfferAnswerHandler::SetRemoteDescription( operations_chain_callback(); return; } - this_weak_ptr->DoSetRemoteDescription(std::move(desc), - std::move(observer)); - // DoSetRemoteDescription() is implemented as a synchronous operation. - // The `observer` will already have been informed that it completed, and - // we can mark this operation as complete without any loose ends. - operations_chain_callback(); + + this_weak_ptr->DoSetRemoteDescription( + std::make_unique( + this_weak_ptr.get(), std::move(desc), std::move(observer), + std::move(operations_chain_callback))); }); } -RTCError SdpOfferAnswerHandler::ApplyRemoteDescription( +RTCError SdpOfferAnswerHandler::ReplaceRemoteDescription( std::unique_ptr desc, - const std::map& - bundle_groups_by_mid) { - TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::ApplyRemoteDescription"); - RTC_DCHECK_RUN_ON(signaling_thread()); - RTC_DCHECK(desc); - - // Update stats here so that we have the most recent stats for tracks and - // streams that might be removed by updating the session description. - pc_->stats()->UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard); - - // Take a reference to the old remote description since it's used below to - // compare against the new remote description. When setting the new remote - // description, grab ownership of the replaced session description in case it - // is the same as `old_remote_description`, to keep it alive for the duration - // of the method. - const SessionDescriptionInterface* old_remote_description = - remote_description(); - std::unique_ptr replaced_remote_description; - SdpType type = desc->GetType(); - if (type == SdpType::kAnswer) { - replaced_remote_description = pending_remote_description_ - ? std::move(pending_remote_description_) - : std::move(current_remote_description_); + SdpType sdp_type, + std::unique_ptr* replaced_description) { + RTC_DCHECK(replaced_description); + if (sdp_type == SdpType::kAnswer) { + *replaced_description = pending_remote_description_ + ? std::move(pending_remote_description_) + : std::move(current_remote_description_); current_remote_description_ = std::move(desc); pending_remote_description_ = nullptr; current_local_description_ = std::move(pending_local_description_); } else { - replaced_remote_description = std::move(pending_remote_description_); + *replaced_description = std::move(pending_remote_description_); pending_remote_description_ = std::move(desc); } + // The session description to apply now must be accessed by // `remote_description()`. - RTC_DCHECK(remote_description()); + const cricket::SessionDescription* session_desc = + remote_description()->description(); // Report statistics about any use of simulcast. ReportSimulcastApiVersion(kSimulcastVersionApplyRemoteDescription, - *remote_description()->description()); + *session_desc); - RTCError error = PushdownTransportDescription(cricket::CS_REMOTE, type); - if (!error.ok()) { - return error; - } - // Transport and Media channels will be created only when offer is set. - if (IsUnifiedPlan()) { - RTCError error = UpdateTransceiversAndDataChannels( - cricket::CS_REMOTE, *remote_description(), local_description(), - old_remote_description, bundle_groups_by_mid); - if (!error.ok()) { - return error; - } - } else { - // Media channels will be created only when offer is set. These may use new - // transports just created by PushdownTransportDescription. - if (type == SdpType::kOffer) { - // TODO(mallinath) - Handle CreateChannel failure, as new local - // description is applied. Restore back to old description. - RTCError error = CreateChannels(*remote_description()->description()); - if (!error.ok()) { - return error; - } - } - // Remove unused channels if MediaContentDescription is rejected. - RemoveUnusedChannels(remote_description()->description()); - } + // NOTE: This will perform a BlockingCall() to the network thread. + return transport_controller_s()->SetRemoteDescription(sdp_type, session_desc); +} + +void SdpOfferAnswerHandler::ApplyRemoteDescription( + std::unique_ptr operation) { + TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::ApplyRemoteDescription"); + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(operation->description()); + + // Invalidate the stats caches to make sure that they get + // updated next time getStats() gets called, as updating the session + // description affects the stats. + pc_->ClearStatsCache(); + + if (!operation->ReplaceRemoteDescriptionAndCheckEror()) + return; + + if (!operation->UpdateChannels()) + return; // NOTE: Candidates allocation will be initiated only when // SetLocalDescription is called. - error = UpdateSessionState(type, cricket::CS_REMOTE, - remote_description()->description(), - bundle_groups_by_mid); - if (!error.ok()) { - return error; - } + if (!operation->UpdateSessionState()) + return; - if (local_description() && - !UseCandidatesInSessionDescription(remote_description())) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, kInvalidCandidates); - } + if (!operation->UseCandidatesInRemoteDescription()) + return; - if (old_remote_description) { + if (operation->old_remote_description()) { for (const cricket::ContentInfo& content : - old_remote_description->description()->contents()) { + operation->old_remote_description()->description()->contents()) { // Check if this new SessionDescription contains new ICE ufrag and // password that indicates the remote peer requests an ICE restart. // TODO(deadbeef): When we start storing both the current and pending // remote description, this should reset pending_ice_restarts and compare // against the current description. - if (CheckForRemoteIceRestart(old_remote_description, remote_description(), - content.name)) { - if (type == SdpType::kOffer) { + if (CheckForRemoteIceRestart(operation->old_remote_description(), + remote_description(), content.name)) { + if (operation->type() == SdpType::kOffer) { pending_ice_restarts_.insert(content.name); } } else { @@ -1642,14 +1856,14 @@ RTCError SdpOfferAnswerHandler::ApplyRemoteDescription( // description plus any candidates added since then. We should remove // this once we're sure it won't break anything. WebRtcSessionDescriptionFactory::CopyCandidatesFromSessionDescription( - old_remote_description, content.name, mutable_remote_description()); + operation->old_remote_description(), content.name, + mutable_remote_description()); } } } - if (session_error() != SessionError::kNone) { - LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, GetSessionErrorMsg()); - } + if (operation->HaveSessionError()) + return; // Set the the ICE connection state to connecting since the connection may // become writable with peer reflexive candidates before any remote candidate @@ -1661,7 +1875,7 @@ RTCError SdpOfferAnswerHandler::ApplyRemoteDescription( // actually means "gathering candidates", so cannot be be used here. if (remote_description()->GetType() != SdpType::kOffer && remote_description()->number_of_mediasections() > 0u && - pc_->ice_connection_state() == + pc_->ice_connection_state_internal() == PeerConnectionInterface::kIceConnectionNew) { pc_->SetIceConnectionState(PeerConnectionInterface::kIceConnectionChecking); } @@ -1673,122 +1887,10 @@ RTCError SdpOfferAnswerHandler::ApplyRemoteDescription( data_channel_controller()->AllocateSctpSids(role); } - if (IsUnifiedPlan()) { - std::vector> - now_receiving_transceivers; - std::vector> remove_list; - std::vector> added_streams; - std::vector> removed_streams; - for (const auto& transceiver_ext : transceivers()->List()) { - const auto transceiver = transceiver_ext->internal(); - const ContentInfo* content = - FindMediaSectionForTransceiver(transceiver, remote_description()); - if (!content) { - continue; - } - const MediaContentDescription* media_desc = content->media_description(); - RtpTransceiverDirection local_direction = - RtpTransceiverDirectionReversed(media_desc->direction()); - // Roughly the same as steps 2.2.8.6 of section 4.4.1.6 "Set the - // RTCSessionDescription: Set the associated remote streams given - // transceiver.[[Receiver]], msids, addList, and removeList". - // https://w3c.github.io/webrtc-pc/#set-the-rtcsessiondescription - if (RtpTransceiverDirectionHasRecv(local_direction)) { - std::vector stream_ids; - if (!media_desc->streams().empty()) { - // The remote description has signaled the stream IDs. - stream_ids = media_desc->streams()[0].stream_ids(); - } - transceivers() - ->StableState(transceiver_ext) - ->SetRemoteStreamIdsIfUnset(transceiver->receiver()->stream_ids()); - - RTC_LOG(LS_INFO) << "Processing the MSIDs for MID=" << content->name - << " (" << GetStreamIdsString(stream_ids) << ")."; - SetAssociatedRemoteStreams(transceiver->receiver_internal(), stream_ids, - &added_streams, &removed_streams); - // From the WebRTC specification, steps 2.2.8.5/6 of section 4.4.1.6 - // "Set the RTCSessionDescription: If direction is sendrecv or recvonly, - // and transceiver's current direction is neither sendrecv nor recvonly, - // process the addition of a remote track for the media description. - if (!transceiver->fired_direction() || - !RtpTransceiverDirectionHasRecv(*transceiver->fired_direction())) { - RTC_LOG(LS_INFO) - << "Processing the addition of a remote track for MID=" - << content->name << "."; - // Since the transceiver is passed to the user in an - // OnTrack event, we must use the proxied transceiver. - now_receiving_transceivers.push_back(transceiver_ext); - } - } - // 2.2.8.1.9: If direction is "sendonly" or "inactive", and transceiver's - // [[FiredDirection]] slot is either "sendrecv" or "recvonly", process the - // removal of a remote track for the media description, given transceiver, - // removeList, and muteTracks. - if (!RtpTransceiverDirectionHasRecv(local_direction) && - (transceiver->fired_direction() && - RtpTransceiverDirectionHasRecv(*transceiver->fired_direction()))) { - ProcessRemovalOfRemoteTrack(transceiver_ext, &remove_list, - &removed_streams); - } - // 2.2.8.1.10: Set transceiver's [[FiredDirection]] slot to direction. - transceiver->set_fired_direction(local_direction); - // 2.2.8.1.11: If description is of type "answer" or "pranswer", then run - // the following steps: - if (type == SdpType::kPrAnswer || type == SdpType::kAnswer) { - // 2.2.8.1.11.1: Set transceiver's [[CurrentDirection]] slot to - // direction. - transceiver->set_current_direction(local_direction); - // 2.2.8.1.11.[3-6]: Set the transport internal slots. - if (transceiver->mid()) { - auto dtls_transport = LookupDtlsTransportByMid(pc_->network_thread(), - transport_controller(), - *transceiver->mid()); - transceiver->sender_internal()->set_transport(dtls_transport); - transceiver->receiver_internal()->set_transport(dtls_transport); - } - } - // 2.2.8.1.12: If the media description is rejected, and transceiver is - // not already stopped, stop the RTCRtpTransceiver transceiver. - if (content->rejected && !transceiver->stopped()) { - RTC_LOG(LS_INFO) << "Stopping transceiver for MID=" << content->name - << " since the media section was rejected."; - transceiver->StopTransceiverProcedure(); - } - if (!content->rejected && - RtpTransceiverDirectionHasRecv(local_direction)) { - if (!media_desc->streams().empty() && - media_desc->streams()[0].has_ssrcs()) { - uint32_t ssrc = media_desc->streams()[0].first_ssrc(); - transceiver->receiver_internal()->SetupMediaChannel(ssrc); - } else { - transceiver->receiver_internal()->SetupUnsignaledMediaChannel(); - } - } - } - // Once all processing has finished, fire off callbacks. - auto observer = pc_->Observer(); - for (const auto& transceiver : now_receiving_transceivers) { - pc_->stats()->AddTrack(transceiver->receiver()->track()); - observer->OnTrack(transceiver); - observer->OnAddTrack(transceiver->receiver(), - transceiver->receiver()->streams()); - } - for (const auto& stream : added_streams) { - observer->OnAddStream(stream); - } - for (const auto& transceiver : remove_list) { - observer->OnRemoveTrack(transceiver->receiver()); - } - for (const auto& stream : removed_streams) { - observer->OnRemoveStream(stream); - } + if (operation->unified_plan()) { + ApplyRemoteDescriptionUpdateTransceiverState(operation->type()); } - const cricket::ContentInfo* audio_content = - GetFirstAudioContent(remote_description()->description()); - const cricket::ContentInfo* video_content = - GetFirstVideoContent(remote_description()->description()); const cricket::AudioContentDescription* audio_desc = GetFirstAudioContentDescription(remote_description()->description()); const cricket::VideoContentDescription* video_desc = @@ -1802,68 +1904,214 @@ RTCError SdpOfferAnswerHandler::ApplyRemoteDescription( remote_peer_supports_msid_ = true; } - // We wait to signal new streams until we finish processing the description, - // since only at that point will new streams have all their tracks. - rtc::scoped_refptr new_streams(StreamCollection::Create()); + if (!operation->unified_plan()) { + PlanBUpdateSendersAndReceivers( + GetFirstAudioContent(remote_description()->description()), audio_desc, + GetFirstVideoContent(remote_description()->description()), video_desc); + } - if (!IsUnifiedPlan()) { - // TODO(steveanton): When removing RTP senders/receivers in response to a - // rejected media section, there is some cleanup logic that expects the - // voice/ video channel to still be set. But in this method the voice/video - // channel would have been destroyed by the SetRemoteDescription caller - // above so the cleanup that relies on them fails to run. The RemoveSenders - // calls should be moved to right before the DestroyChannel calls to fix - // this. - - // Find all audio rtp streams and create corresponding remote AudioTracks - // and MediaStreams. - if (audio_content) { - if (audio_content->rejected) { - RemoveSenders(cricket::MEDIA_TYPE_AUDIO); - } else { - bool default_audio_track_needed = - !remote_peer_supports_msid_ && - RtpTransceiverDirectionHasSend(audio_desc->direction()); - UpdateRemoteSendersList(GetActiveStreams(audio_desc), - default_audio_track_needed, audio_desc->type(), - new_streams); - } + if (operation->type() == SdpType::kAnswer) { + if (local_ice_credentials_to_replace_->SatisfiesIceRestart( + *current_local_description_)) { + local_ice_credentials_to_replace_->ClearIceCredentials(); } - // Find all video rtp streams and create corresponding remote VideoTracks - // and MediaStreams. - if (video_content) { - if (video_content->rejected) { - RemoveSenders(cricket::MEDIA_TYPE_VIDEO); + RemoveStoppedTransceivers(); + } + + // Consider the operation complete at this point. + operation->SignalCompletion(); + + SetRemoteDescriptionPostProcess(operation->type() == SdpType::kAnswer); +} + +void SdpOfferAnswerHandler::ApplyRemoteDescriptionUpdateTransceiverState( + SdpType sdp_type) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(IsUnifiedPlan()); + if (!ConfiguredForMedia()) { + return; + } + std::vector> + now_receiving_transceivers; + std::vector> remove_list; + std::vector> added_streams; + std::vector> removed_streams; + for (const auto& transceiver_ext : transceivers()->List()) { + const auto transceiver = transceiver_ext->internal(); + const ContentInfo* content = + FindMediaSectionForTransceiver(transceiver, remote_description()); + if (!content) { + continue; + } + const MediaContentDescription* media_desc = content->media_description(); + RtpTransceiverDirection local_direction = + RtpTransceiverDirectionReversed(media_desc->direction()); + // Remember the previous remote streams if this is a remote offer. This + // makes it possible to rollback modifications to the streams. + if (sdp_type == SdpType::kOffer) { + transceivers() + ->StableState(transceiver_ext) + ->SetRemoteStreamIds(transceiver->receiver()->stream_ids()); + } + // Roughly the same as steps 2.2.8.6 of section 4.4.1.6 "Set the + // RTCSessionDescription: Set the associated remote streams given + // transceiver.[[Receiver]], msids, addList, and removeList". + // https://w3c.github.io/webrtc-pc/#set-the-rtcsessiondescription + if (RtpTransceiverDirectionHasRecv(local_direction)) { + std::vector stream_ids; + if (!media_desc->streams().empty()) { + // The remote description has signaled the stream IDs. + stream_ids = media_desc->streams()[0].stream_ids(); + } + + RTC_LOG(LS_INFO) << "Processing the MSIDs for MID=" << content->name + << " (" << GetStreamIdsString(stream_ids) << ")."; + SetAssociatedRemoteStreams(transceiver->receiver_internal(), stream_ids, + &added_streams, &removed_streams); + // From the WebRTC specification, steps 2.2.8.5/6 of section 4.4.1.6 + // "Set the RTCSessionDescription: If direction is sendrecv or recvonly, + // and transceiver's current direction is neither sendrecv nor recvonly, + // process the addition of a remote track for the media description. + if (!transceiver->fired_direction() || + !RtpTransceiverDirectionHasRecv(*transceiver->fired_direction())) { + RTC_LOG(LS_INFO) << "Processing the addition of a remote track for MID=" + << content->name << "."; + // Since the transceiver is passed to the user in an + // OnTrack event, we must use the proxied transceiver. + now_receiving_transceivers.push_back(transceiver_ext); + } + } + // 2.2.8.1.9: If direction is "sendonly" or "inactive", and transceiver's + // [[FiredDirection]] slot is either "sendrecv" or "recvonly", process the + // removal of a remote track for the media description, given transceiver, + // removeList, and muteTracks. + if (!RtpTransceiverDirectionHasRecv(local_direction) && + (transceiver->fired_direction() && + RtpTransceiverDirectionHasRecv(*transceiver->fired_direction()))) { + ProcessRemovalOfRemoteTrack(transceiver_ext, &remove_list, + &removed_streams); + } + // 2.2.8.1.10: Set transceiver's [[FiredDirection]] slot to direction. + if (sdp_type == SdpType::kOffer) { + // Remember the previous fired direction if this is a remote offer. This + // makes it possible to rollback modifications to [[FiredDirection]], + // which is necessary for "ontrack" to fire in or after rollback. + transceivers() + ->StableState(transceiver_ext) + ->SetFiredDirection(transceiver->fired_direction()); + } + transceiver->set_fired_direction(local_direction); + // 2.2.8.1.11: If description is of type "answer" or "pranswer", then run + // the following steps: + if (sdp_type == SdpType::kPrAnswer || sdp_type == SdpType::kAnswer) { + // 2.2.8.1.11.1: Set transceiver's [[CurrentDirection]] slot to + // direction. + transceiver->set_current_direction(local_direction); + // 2.2.8.1.11.[3-6]: Set the transport internal slots. + if (transceiver->mid()) { + auto dtls_transport = LookupDtlsTransportByMid( + context_->network_thread(), transport_controller_s(), + *transceiver->mid()); + transceiver->sender_internal()->set_transport(dtls_transport); + transceiver->receiver_internal()->set_transport(dtls_transport); + } + } + // 2.2.8.1.12: If the media description is rejected, and transceiver is + // not already stopped, stop the RTCRtpTransceiver transceiver. + if (content->rejected && !transceiver->stopped()) { + RTC_LOG(LS_INFO) << "Stopping transceiver for MID=" << content->name + << " since the media section was rejected."; + transceiver->StopTransceiverProcedure(); + } + if (!content->rejected && RtpTransceiverDirectionHasRecv(local_direction)) { + if (!media_desc->streams().empty() && + media_desc->streams()[0].has_ssrcs()) { + uint32_t ssrc = media_desc->streams()[0].first_ssrc(); + transceiver->receiver_internal()->SetupMediaChannel(ssrc); } else { - bool default_video_track_needed = - !remote_peer_supports_msid_ && - RtpTransceiverDirectionHasSend(video_desc->direction()); - UpdateRemoteSendersList(GetActiveStreams(video_desc), - default_video_track_needed, video_desc->type(), - new_streams); + transceiver->receiver_internal()->SetupUnsignaledMediaChannel(); } } + } + // Once all processing has finished, fire off callbacks. + auto observer = pc_->Observer(); + for (const auto& transceiver : now_receiving_transceivers) { + pc_->legacy_stats()->AddTrack(transceiver->receiver()->track().get()); + observer->OnTrack(transceiver); + observer->OnAddTrack(transceiver->receiver(), + transceiver->receiver()->streams()); + } + for (const auto& stream : added_streams) { + observer->OnAddStream(stream); + } + for (const auto& transceiver : remove_list) { + observer->OnRemoveTrack(transceiver->receiver()); + } + for (const auto& stream : removed_streams) { + observer->OnRemoveStream(stream); + } +} + +void SdpOfferAnswerHandler::PlanBUpdateSendersAndReceivers( + const cricket::ContentInfo* audio_content, + const cricket::AudioContentDescription* audio_desc, + const cricket::ContentInfo* video_content, + const cricket::VideoContentDescription* video_desc) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(!IsUnifiedPlan()); + + // We wait to signal new streams until we finish processing the description, + // since only at that point will new streams have all their tracks. + rtc::scoped_refptr new_streams(StreamCollection::Create()); - // Iterate new_streams and notify the observer about new MediaStreams. - auto observer = pc_->Observer(); - for (size_t i = 0; i < new_streams->count(); ++i) { - MediaStreamInterface* new_stream = new_streams->at(i); - pc_->stats()->AddStream(new_stream); - observer->OnAddStream( - rtc::scoped_refptr(new_stream)); + // TODO(steveanton): When removing RTP senders/receivers in response to a + // rejected media section, there is some cleanup logic that expects the + // voice/ video channel to still be set. But in this method the voice/video + // channel would have been destroyed by the SetRemoteDescription caller + // above so the cleanup that relies on them fails to run. The RemoveSenders + // calls should be moved to right before the DestroyChannel calls to fix + // this. + + // Find all audio rtp streams and create corresponding remote AudioTracks + // and MediaStreams. + if (audio_content) { + if (audio_content->rejected) { + RemoveSenders(cricket::MEDIA_TYPE_AUDIO); + } else { + bool default_audio_track_needed = + !remote_peer_supports_msid_ && + RtpTransceiverDirectionHasSend(audio_desc->direction()); + UpdateRemoteSendersList(GetActiveStreams(audio_desc), + default_audio_track_needed, audio_desc->type(), + new_streams.get()); } + } - UpdateEndedRemoteMediaStreams(); + // Find all video rtp streams and create corresponding remote VideoTracks + // and MediaStreams. + if (video_content) { + if (video_content->rejected) { + RemoveSenders(cricket::MEDIA_TYPE_VIDEO); + } else { + bool default_video_track_needed = + !remote_peer_supports_msid_ && + RtpTransceiverDirectionHasSend(video_desc->direction()); + UpdateRemoteSendersList(GetActiveStreams(video_desc), + default_video_track_needed, video_desc->type(), + new_streams.get()); + } } - if (type == SdpType::kAnswer && - local_ice_credentials_to_replace_->SatisfiesIceRestart( - *current_local_description_)) { - local_ice_credentials_to_replace_->ClearIceCredentials(); + // Iterate new_streams and notify the observer about new MediaStreams. + auto observer = pc_->Observer(); + for (size_t i = 0; i < new_streams->count(); ++i) { + MediaStreamInterface* new_stream = new_streams->at(i); + pc_->legacy_stats()->AddStream(new_stream); + observer->OnAddStream(rtc::scoped_refptr(new_stream)); } - return RTCError::OK(); + UpdateEndedRemoteMediaStreams(); } void SdpOfferAnswerHandler::DoSetLocalDescription( @@ -1944,10 +2192,8 @@ void SdpOfferAnswerHandler::DoSetLocalDescription( // TODO(deadbeef): We already had to hop to the network thread for // MaybeStartGathering... - pc_->network_thread()->Invoke( - RTC_FROM_HERE, [this] { port_allocator()->DiscardCandidatePool(); }); - // Make UMA notes about what was agreed to. - ReportNegotiatedSdpSemantics(*local_description()); + context_->network_thread()->BlockingCall( + [this] { port_allocator()->DiscardCandidatePool(); }); } observer->OnSetLocalDescriptionComplete(RTCError::OK()); @@ -1971,7 +2217,7 @@ void SdpOfferAnswerHandler::DoSetLocalDescription( // MaybeStartGathering needs to be called after informing the observer so that // we don't signal any candidates before signaling that SetLocalDescription // completed. - transport_controller()->MaybeStartGathering(); + transport_controller_s()->MaybeStartGathering(); } void SdpOfferAnswerHandler::DoCreateOffer( @@ -1989,7 +2235,8 @@ void SdpOfferAnswerHandler::DoCreateOffer( std::string error = "CreateOffer called when PeerConnection is closed."; RTC_LOG(LS_ERROR) << error; pc_->message_handler()->PostCreateSessionDescriptionFailure( - observer, RTCError(RTCErrorType::INVALID_STATE, std::move(error))); + observer.get(), + RTCError(RTCErrorType::INVALID_STATE, std::move(error))); return; } @@ -1999,7 +2246,7 @@ void SdpOfferAnswerHandler::DoCreateOffer( std::string error_message = GetSessionErrorMsg(); RTC_LOG(LS_ERROR) << "CreateOffer: " << error_message; pc_->message_handler()->PostCreateSessionDescriptionFailure( - observer, + observer.get(), RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error_message))); return; } @@ -2008,7 +2255,8 @@ void SdpOfferAnswerHandler::DoCreateOffer( std::string error = "CreateOffer called with invalid options."; RTC_LOG(LS_ERROR) << error; pc_->message_handler()->PostCreateSessionDescriptionFailure( - observer, RTCError(RTCErrorType::INVALID_PARAMETER, std::move(error))); + observer.get(), + RTCError(RTCErrorType::INVALID_PARAMETER, std::move(error))); return; } @@ -2018,14 +2266,15 @@ void SdpOfferAnswerHandler::DoCreateOffer( RTCError error = HandleLegacyOfferOptions(options); if (!error.ok()) { pc_->message_handler()->PostCreateSessionDescriptionFailure( - observer, std::move(error)); + observer.get(), std::move(error)); return; } } cricket::MediaSessionOptions session_options; GetOptionsForOffer(options, &session_options); - webrtc_session_desc_factory_->CreateOffer(observer, options, session_options); + webrtc_session_desc_factory_->CreateOffer(observer.get(), options, + session_options); } void SdpOfferAnswerHandler::CreateAnswer( @@ -2073,7 +2322,7 @@ void SdpOfferAnswerHandler::DoCreateAnswer( std::string error_message = GetSessionErrorMsg(); RTC_LOG(LS_ERROR) << "CreateAnswer: " << error_message; pc_->message_handler()->PostCreateSessionDescriptionFailure( - observer, + observer.get(), RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error_message))); return; } @@ -2085,7 +2334,8 @@ void SdpOfferAnswerHandler::DoCreateAnswer( "have-remote-offer or have-local-pranswer."; RTC_LOG(LS_ERROR) << error; pc_->message_handler()->PostCreateSessionDescriptionFailure( - observer, RTCError(RTCErrorType::INVALID_STATE, std::move(error))); + observer.get(), + RTCError(RTCErrorType::INVALID_STATE, std::move(error))); return; } @@ -2109,114 +2359,50 @@ void SdpOfferAnswerHandler::DoCreateAnswer( cricket::MediaSessionOptions session_options; GetOptionsForAnswer(options, &session_options); - webrtc_session_desc_factory_->CreateAnswer(observer, session_options); + webrtc_session_desc_factory_->CreateAnswer(observer.get(), session_options); } void SdpOfferAnswerHandler::DoSetRemoteDescription( - std::unique_ptr desc, - rtc::scoped_refptr observer) { + std::unique_ptr operation) { RTC_DCHECK_RUN_ON(signaling_thread()); - TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::DoSetRemoteDescription"); - - if (!observer) { - RTC_LOG(LS_ERROR) << "SetRemoteDescription - observer is NULL."; - return; - } + TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::DoSetRemoteDescription"); - if (!desc) { - observer->OnSetRemoteDescriptionComplete(RTCError( - RTCErrorType::INVALID_PARAMETER, "SessionDescription is NULL.")); + if (!operation->ok()) return; - } - // If a session error has occurred the PeerConnection is in a possibly - // inconsistent state so fail right away. - if (session_error() != SessionError::kNone) { - std::string error_message = GetSessionErrorMsg(); - RTC_LOG(LS_ERROR) << "SetRemoteDescription: " << error_message; - observer->OnSetRemoteDescriptionComplete( - RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error_message))); + if (operation->HaveSessionError()) return; - } - if (IsUnifiedPlan()) { - if (pc_->configuration()->enable_implicit_rollback) { - if (desc->GetType() == SdpType::kOffer && - signaling_state() == PeerConnectionInterface::kHaveLocalOffer) { - Rollback(desc->GetType()); - } - } - // Explicit rollback. - if (desc->GetType() == SdpType::kRollback) { - observer->OnSetRemoteDescriptionComplete(Rollback(desc->GetType())); - return; - } - } else if (desc->GetType() == SdpType::kRollback) { - observer->OnSetRemoteDescriptionComplete( - RTCError(RTCErrorType::UNSUPPORTED_OPERATION, - "Rollback not supported in Plan B")); + + if (operation->MaybeRollback()) return; - } - if (desc->GetType() == SdpType::kOffer || - desc->GetType() == SdpType::kAnswer) { - // Report to UMA the format of the received offer or answer. - pc_->ReportSdpFormatReceived(*desc); - pc_->ReportSdpBundleUsage(*desc); - } - // Handle remote descriptions missing a=mid lines for interop with legacy end - // points. - FillInMissingRemoteMids(desc->description()); + operation->ReportOfferAnswerUma(); - std::map bundle_groups_by_mid = - GetBundleGroupsByMid(desc->description()); - RTCError error = ValidateSessionDescription(desc.get(), cricket::CS_REMOTE, - bundle_groups_by_mid); - if (!error.ok()) { - std::string error_message = GetSetDescriptionErrorMessage( - cricket::CS_REMOTE, desc->GetType(), error); - RTC_LOG(LS_ERROR) << error_message; - observer->OnSetRemoteDescriptionComplete( - RTCError(error.type(), std::move(error_message))); + // Handle remote descriptions missing a=mid lines for interop with legacy + // end points. + FillInMissingRemoteMids(operation->description()); + if (!operation->IsDescriptionValid()) return; - } - // Grab the description type before moving ownership to - // ApplyRemoteDescription, which may destroy it before returning. - const SdpType type = desc->GetType(); - - error = ApplyRemoteDescription(std::move(desc), bundle_groups_by_mid); - // `desc` may be destroyed at this point. + ApplyRemoteDescription(std::move(operation)); +} - if (!error.ok()) { - // If ApplyRemoteDescription fails, the PeerConnection could be in an - // inconsistent state, so act conservatively here and set the session error - // so that future calls to SetLocalDescription/SetRemoteDescription fail. - SetSessionError(SessionError::kContent, error.message()); - std::string error_message = - GetSetDescriptionErrorMessage(cricket::CS_REMOTE, type, error); - RTC_LOG(LS_ERROR) << error_message; - observer->OnSetRemoteDescriptionComplete( - RTCError(error.type(), std::move(error_message))); - return; - } +// Called after a DoSetRemoteDescription operation completes. +void SdpOfferAnswerHandler::SetRemoteDescriptionPostProcess(bool was_answer) { RTC_DCHECK(remote_description()); - if (type == SdpType::kAnswer) { - RemoveStoppedTransceivers(); + if (was_answer) { // TODO(deadbeef): We already had to hop to the network thread for // MaybeStartGathering... - pc_->network_thread()->Invoke( - RTC_FROM_HERE, [this] { port_allocator()->DiscardCandidatePool(); }); - // Make UMA notes about what was agreed to. - ReportNegotiatedSdpSemantics(*remote_description()); + context_->network_thread()->BlockingCall( + [this] { port_allocator()->DiscardCandidatePool(); }); } - observer->OnSetRemoteDescriptionComplete(RTCError::OK()); pc_->NoteUsageEvent(UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED); // Check if negotiation is needed. We must do this after informing the - // observer that SetRemoteDescription() has completed to ensure negotiation is - // not needed prior to the promise resolving. + // observer that SetRemoteDescription() has completed to ensure negotiation + // is not needed prior to the promise resolving. if (IsUnifiedPlan()) { bool was_negotiation_needed = is_negotiation_needed_; UpdateNegotiationNeeded(); @@ -2238,8 +2424,8 @@ void SdpOfferAnswerHandler::SetAssociatedRemoteStreams( RTC_DCHECK_RUN_ON(signaling_thread()); std::vector> media_streams; for (const std::string& stream_id : stream_ids) { - rtc::scoped_refptr stream = - remote_streams_->find(stream_id); + rtc::scoped_refptr stream( + remote_streams_->find(stream_id)); if (!stream) { stream = MediaStreamProxy::Create(rtc::Thread::Current(), MediaStream::Create(stream_id)); @@ -2261,10 +2447,10 @@ void SdpOfferAnswerHandler::SetAssociatedRemoteStreams( } std::vector> previous_streams = receiver->streams(); - // SetStreams() will add/remove the receiver's track to/from the streams. This - // differs from the spec - the spec uses an "addList" and "removeList" to - // update the stream-track relationships in a later step. We do this earlier, - // changing the order of things, but the end-result is the same. + // SetStreams() will add/remove the receiver's track to/from the streams. + // This differs from the spec - the spec uses an "addList" and "removeList" + // to update the stream-track relationships in a later step. We do this + // earlier, changing the order of things, but the end-result is the same. // TODO(hbos): When we remove remote_streams(), use set_stream_ids() // instead. https://crbug.com/webrtc/9480 receiver->SetStreams(media_streams); @@ -2275,8 +2461,8 @@ bool SdpOfferAnswerHandler::AddIceCandidate( const IceCandidateInterface* ice_candidate) { const AddIceCandidateResult result = AddIceCandidateInternal(ice_candidate); NoteAddIceCandidateResult(result); - // If the return value is kAddIceCandidateFailNotReady, the candidate has been - // added, although not 'ready', but that's a success. + // If the return value is kAddIceCandidateFailNotReady, the candidate has + // been added, although not 'ready', but that's a success. return result == kAddIceCandidateSuccess || result == kAddIceCandidateFailNotReady; } @@ -2332,9 +2518,9 @@ void SdpOfferAnswerHandler::AddIceCandidate( std::function callback) { TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::AddIceCandidate"); RTC_DCHECK_RUN_ON(signaling_thread()); - // Chain this operation. If asynchronous operations are pending on the chain, - // this operation will be queued to be invoked, otherwise the contents of the - // lambda will execute immediately. + // Chain this operation. If asynchronous operations are pending on the + // chain, this operation will be queued to be invoked, otherwise the + // contents of the lambda will execute immediately. operations_chain_->ChainOperation( [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), candidate = std::move(candidate), callback = std::move(callback)]( @@ -2345,18 +2531,42 @@ void SdpOfferAnswerHandler::AddIceCandidate( : kAddIceCandidateFailClosed; NoteAddIceCandidateResult(result); operations_chain_callback(); - if (result == kAddIceCandidateFailClosed) { - callback(RTCError( - RTCErrorType::INVALID_STATE, - "AddIceCandidate failed because the session was shut down")); - } else if (result != kAddIceCandidateSuccess && - result != kAddIceCandidateFailNotReady) { - // Fail with an error type and message consistent with Chromium. - // TODO(hbos): Fail with error types according to spec. - callback(RTCError(RTCErrorType::UNSUPPORTED_OPERATION, - "Error processing ICE candidate")); - } else { - callback(RTCError::OK()); + switch (result) { + case AddIceCandidateResult::kAddIceCandidateSuccess: + case AddIceCandidateResult::kAddIceCandidateFailNotReady: + // Success! + callback(RTCError::OK()); + break; + case AddIceCandidateResult::kAddIceCandidateFailClosed: + // Note that the spec says to just abort without resolving the + // promise in this case, but this layer must return an RTCError. + callback(RTCError( + RTCErrorType::INVALID_STATE, + "AddIceCandidate failed because the session was shut down")); + break; + case AddIceCandidateResult::kAddIceCandidateFailNoRemoteDescription: + // Spec: "If remoteDescription is null return a promise rejected + // with a newly created InvalidStateError." + callback(RTCError(RTCErrorType::INVALID_STATE, + "The remote description was null")); + break; + case AddIceCandidateResult::kAddIceCandidateFailNullCandidate: + // TODO(https://crbug.com/935898): Handle end-of-candidates instead + // of treating null candidate as an error. + callback(RTCError(RTCErrorType::UNSUPPORTED_OPERATION, + "Error processing ICE candidate")); + break; + case AddIceCandidateResult::kAddIceCandidateFailNotValid: + case AddIceCandidateResult::kAddIceCandidateFailInAddition: + case AddIceCandidateResult::kAddIceCandidateFailNotUsable: + // Spec: "If candidate could not be successfully added [...] Reject + // p with a newly created OperationError and abort these steps." + // UNSUPPORTED_OPERATION maps to OperationError. + callback(RTCError(RTCErrorType::UNSUPPORTED_OPERATION, + "Error processing ICE candidate")); + break; + default: + RTC_DCHECK_NOTREACHED(); } }); } @@ -2391,7 +2601,7 @@ bool SdpOfferAnswerHandler::RemoveIceCandidates( } // Remove the candidates from the transport controller. - RTCError error = transport_controller()->RemoveRemoteCandidates(candidates); + RTCError error = transport_controller_s()->RemoveRemoteCandidates(candidates); if (!error.ok()) { RTC_LOG(LS_ERROR) << "RemoveIceCandidates: Error when removing remote candidates: " @@ -2483,8 +2693,8 @@ RTCError SdpOfferAnswerHandler::UpdateSessionState( bundle_groups_by_mid) { RTC_DCHECK_RUN_ON(signaling_thread()); - // If there's already a pending error then no state transition should happen. - // But all call-sites should be verifying this before calling us! + // If there's already a pending error then no state transition should + // happen. But all call-sites should be verifying this before calling us! RTC_DCHECK(session_error() == SessionError::kNone); // If this is answer-ish we're ready to let media flow. @@ -2505,7 +2715,9 @@ RTCError SdpOfferAnswerHandler::UpdateSessionState( } else { RTC_DCHECK(type == SdpType::kAnswer); ChangeSignalingState(PeerConnectionInterface::kStable); - transceivers()->DiscardStableStates(); + if (ConfiguredForMedia()) { + transceivers()->DiscardStableStates(); + } } // Update internal objects according to the session description's media @@ -2530,10 +2742,10 @@ bool SdpOfferAnswerHandler::ShouldFireNegotiationNeededEvent( // one obsolete. if (!operations_chain_->IsEmpty()) { // Since we just suppressed an event that would have been fired, if - // negotiation is still needed by the time the chain becomes empty again, we - // must make sure to generate another event if negotiation is needed then. - // This happens when `is_negotiation_needed_` goes from false to true, so we - // set it to false until UpdateNegotiationNeeded() is called. + // negotiation is still needed by the time the chain becomes empty again, + // we must make sure to generate another event if negotiation is needed + // then. This happens when `is_negotiation_needed_` goes from false to + // true, so we set it to false until UpdateNegotiationNeeded() is called. is_negotiation_needed_ = false; update_negotiation_needed_on_empty_chain_ = true; return false; @@ -2573,11 +2785,12 @@ bool SdpOfferAnswerHandler::AddStream(MediaStreamInterface* local_stream) { if (pc_->IsClosed()) { return false; } - if (!CanAddLocalMediaStream(local_streams_, local_stream)) { + if (!CanAddLocalMediaStream(local_streams_.get(), local_stream)) { return false; } - local_streams_->AddStream(local_stream); + local_streams_->AddStream( + rtc::scoped_refptr(local_stream)); auto observer = std::make_unique( local_stream, [this](AudioTrackInterface* audio_track, @@ -2609,7 +2822,7 @@ bool SdpOfferAnswerHandler::AddStream(MediaStreamInterface* local_stream) { rtp_manager()->AddVideoTrack(track.get(), local_stream); } - pc_->stats()->AddStream(local_stream); + pc_->legacy_stats()->AddStream(local_stream); UpdateNegotiationNeeded(); return true; } @@ -2691,6 +2904,8 @@ RTCError SdpOfferAnswerHandler::Rollback(SdpType desc_type) { } RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(IsUnifiedPlan()); + std::vector> + now_receiving_transceivers; std::vector> all_added_streams; std::vector> all_removed_streams; std::vector> removed_receivers; @@ -2699,6 +2914,22 @@ RTCError SdpOfferAnswerHandler::Rollback(SdpType desc_type) { auto transceiver = transceivers_stable_state_pair.first; auto state = transceivers_stable_state_pair.second; + if (state.did_set_fired_direction()) { + // If this rollback triggers going from not receiving to receving again, + // we need to fire "ontrack". + bool previously_fired_direction_is_recv = + transceiver->fired_direction().has_value() && + RtpTransceiverDirectionHasRecv(*transceiver->fired_direction()); + bool currently_fired_direction_is_recv = + state.fired_direction().has_value() && + RtpTransceiverDirectionHasRecv(state.fired_direction().value()); + if (!previously_fired_direction_is_recv && + currently_fired_direction_is_recv) { + now_receiving_transceivers.push_back(transceiver); + } + transceiver->internal()->set_fired_direction(state.fired_direction()); + } + if (state.remote_stream_ids()) { std::vector> added_streams; std::vector> removed_streams; @@ -2715,8 +2946,12 @@ RTCError SdpOfferAnswerHandler::Rollback(SdpType desc_type) { } } + // Due to the above `continue` statement, the below code only runs if there + // is a change in mid association (has_m_section), if the transceiver was + // newly created (newly_created) or if remote streams were not set. + RTC_DCHECK(transceiver->internal()->mid().has_value()); - DestroyTransceiverChannel(transceiver); + transceiver->internal()->ClearChannel(); if (signaling_state() == PeerConnectionInterface::kHaveRemoteOffer && transceiver->receiver()) { @@ -2726,6 +2961,7 @@ RTCError SdpOfferAnswerHandler::Rollback(SdpType desc_type) { if (transceiver->internal()->reused_for_addtrack()) { transceiver->internal()->set_created_by_addtrack(true); } else { + transceiver->internal()->StopTransceiverProcedure(); transceivers()->Remove(transceiver); } } @@ -2738,7 +2974,7 @@ RTCError SdpOfferAnswerHandler::Rollback(SdpType desc_type) { transceiver->internal()->set_mid(state.mid()); transceiver->internal()->set_mline_index(state.mline_index()); } - RTCError e = transport_controller()->RollbackTransports(); + RTCError e = transport_controller_s()->RollbackTransports(); if (!e.ok()) { return e; } @@ -2748,6 +2984,11 @@ RTCError SdpOfferAnswerHandler::Rollback(SdpType desc_type) { ChangeSignalingState(PeerConnectionInterface::kStable); // Once all processing has finished, fire off callbacks. + for (const auto& transceiver : now_receiving_transceivers) { + pc_->Observer()->OnTrack(transceiver); + pc_->Observer()->OnAddTrack(transceiver->receiver(), + transceiver->receiver()->streams()); + } for (const auto& receiver : removed_receivers) { pc_->Observer()->OnRemoveTrack(receiver); } @@ -2758,8 +2999,8 @@ RTCError SdpOfferAnswerHandler::Rollback(SdpType desc_type) { pc_->Observer()->OnRemoveStream(stream); } - // The assumption is that in case of implicit rollback UpdateNegotiationNeeded - // gets called in SetRemoteDescription. + // The assumption is that in case of implicit rollback + // UpdateNegotiationNeeded gets called in SetRemoteDescription. if (desc_type == SdpType::kRollback) { UpdateNegotiationNeeded(); if (is_negotiation_needed_) { @@ -2781,9 +3022,9 @@ void SdpOfferAnswerHandler::OnOperationsChainEmpty() { if (pc_->IsClosed() || !update_negotiation_needed_on_empty_chain_) return; update_negotiation_needed_on_empty_chain_ = false; - // Firing when chain is empty is only supported in Unified Plan to avoid Plan - // B regressions. (In Plan B, onnegotiationneeded is already broken anyway, so - // firing it even more might just be confusing.) + // Firing when chain is empty is only supported in Unified Plan to avoid + // Plan B regressions. (In Plan B, onnegotiationneeded is already broken + // anyway, so firing it even more might just be confusing.) if (IsUnifiedPlan()) { UpdateNegotiationNeeded(); } @@ -2813,7 +3054,8 @@ bool SdpOfferAnswerHandler::NeedsIceRestart( absl::optional SdpOfferAnswerHandler::GetDtlsRole( const std::string& mid) const { - return transport_controller()->GetDtlsRole(mid); + RTC_DCHECK_RUN_ON(signaling_thread()); + return transport_controller_s()->GetDtlsRole(mid); } void SdpOfferAnswerHandler::UpdateNegotiationNeeded() { @@ -2825,8 +3067,8 @@ void SdpOfferAnswerHandler::UpdateNegotiationNeeded() { } // In the spec, a task is queued here to run the following steps - this is - // meant to ensure we do not fire onnegotiationneeded prematurely if multiple - // changes are being made at once. In order to support Chromium's + // meant to ensure we do not fire onnegotiationneeded prematurely if + // multiple changes are being made at once. In order to support Chromium's // implementation where the JavaScript representation of the PeerConnection // lives on a separate thread though, the queuing of a task is instead // performed by the PeerConnectionObserver posting from the signaling thread @@ -2849,8 +3091,8 @@ void SdpOfferAnswerHandler::UpdateNegotiationNeeded() { // "stable", as part of the steps for setting an RTCSessionDescription. // If the result of checking if negotiation is needed is false, clear the - // negotiation-needed flag by setting connection's [[NegotiationNeeded]] slot - // to false, and abort these steps. + // negotiation-needed flag by setting connection's [[NegotiationNeeded]] + // slot to false, and abort these steps. bool is_negotiation_needed = CheckIfNegotiationIsNeeded(); if (!is_negotiation_needed) { is_negotiation_needed_ = false; @@ -2873,16 +3115,16 @@ void SdpOfferAnswerHandler::UpdateNegotiationNeeded() { // If connection's [[NegotiationNeeded]] slot is false, abort these steps. // Fire an event named negotiationneeded at connection. pc_->Observer()->OnRenegotiationNeeded(); - // Fire the spec-compliant version; when ShouldFireNegotiationNeededEvent() is - // used in the task queued by the observer, this event will only fire when the - // chain is empty. + // Fire the spec-compliant version; when ShouldFireNegotiationNeededEvent() + // is used in the task queued by the observer, this event will only fire + // when the chain is empty. GenerateNegotiationNeededEvent(); } bool SdpOfferAnswerHandler::CheckIfNegotiationIsNeeded() { RTC_DCHECK_RUN_ON(signaling_thread()); - // 1. If any implementation-specific negotiation is required, as described at - // the start of this section, return true. + // 1. If any implementation-specific negotiation is required, as described + // at the start of this section, return true. // 2. If connection.[[LocalIceCredentialsToReplace]] is not empty, return // true. @@ -2901,6 +3143,9 @@ bool SdpOfferAnswerHandler::CheckIfNegotiationIsNeeded() { if (!cricket::GetFirstDataContent(description->description()->contents())) return true; } + if (!ConfiguredForMedia()) { + return false; + } // 5. For each transceiver in connection's set of transceivers, perform the // following checks: @@ -3012,7 +3257,6 @@ bool SdpOfferAnswerHandler::CheckIfNegotiationIsNeeded() { } } } - // If all the preceding checks were performed and true was not returned, // nothing remains to be negotiated; return false. return false; @@ -3029,22 +3273,20 @@ RTCError SdpOfferAnswerHandler::ValidateSessionDescription( cricket::ContentSource source, const std::map& bundle_groups_by_mid) { - if (session_error() != SessionError::kNone) { - LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, GetSessionErrorMsg()); - } + // An assumption is that a check for session error is done at a higher level. + RTC_DCHECK_EQ(SessionError::kNone, session_error()); if (!sdesc || !sdesc->description()) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, kInvalidSdp); + return RTCError(RTCErrorType::INVALID_PARAMETER, kInvalidSdp); } SdpType type = sdesc->GetType(); if ((source == cricket::CS_LOCAL && !ExpectSetLocalDescription(type)) || (source == cricket::CS_REMOTE && !ExpectSetRemoteDescription(type))) { - LOG_AND_RETURN_ERROR( - RTCErrorType::INVALID_STATE, - (rtc::StringBuilder("Called in wrong state: ") - << PeerConnectionInterface::AsString(signaling_state())) - .Release()); + return RTCError(RTCErrorType::INVALID_STATE, + (rtc::StringBuilder("Called in wrong state: ") + << PeerConnectionInterface::AsString(signaling_state())) + .Release()); } RTCError error = ValidateMids(*sdesc->description()); @@ -3065,14 +3307,12 @@ RTCError SdpOfferAnswerHandler::ValidateSessionDescription( // Verify ice-ufrag and ice-pwd. if (!VerifyIceUfragPwdPresent(sdesc->description(), bundle_groups_by_mid)) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - kSdpWithoutIceUfragPwd); + return RTCError(RTCErrorType::INVALID_PARAMETER, kSdpWithoutIceUfragPwd); } if (!pc_->ValidateBundleSettings(sdesc->description(), bundle_groups_by_mid)) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - kBundleWithoutRtcpMux); + return RTCError(RTCErrorType::INVALID_PARAMETER, kBundleWithoutRtcpMux); } // TODO(skvlad): When the local rtcp-mux policy is Require, reject any @@ -3080,25 +3320,25 @@ RTCError SdpOfferAnswerHandler::ValidateSessionDescription( // Verify m-lines in Answer when compared against Offer. if (type == SdpType::kPrAnswer || type == SdpType::kAnswer) { - // With an answer we want to compare the new answer session description with - // the offer's session description from the current negotiation. + // With an answer we want to compare the new answer session description + // with the offer's session description from the current negotiation. const cricket::SessionDescription* offer_desc = (source == cricket::CS_LOCAL) ? remote_description()->description() : local_description()->description(); if (!MediaSectionsHaveSameCount(*offer_desc, *sdesc->description()) || !MediaSectionsInSameOrder(*offer_desc, nullptr, *sdesc->description(), type)) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - kMlineMismatchInAnswer); + return RTCError(RTCErrorType::INVALID_PARAMETER, kMlineMismatchInAnswer); } } else { // The re-offers should respect the order of m= sections in current // description. See RFC3264 Section 8 paragraph 4 for more details. - // With a re-offer, either the current local or current remote descriptions - // could be the most up to date, so we would like to check against both of - // them if they exist. It could be the case that one of them has a 0 port - // for a media section, but the other does not. This is important to check - // against in the case that we are recycling an m= section. + // With a re-offer, either the current local or current remote + // descriptions could be the most up to date, so we would like to check + // against both of them if they exist. It could be the case that one of + // them has a 0 port for a media section, but the other does not. This is + // important to check against in the case that we are recycling an m= + // section. const cricket::SessionDescription* current_desc = nullptr; const cricket::SessionDescription* secondary_current_desc = nullptr; if (local_description()) { @@ -3112,8 +3352,8 @@ RTCError SdpOfferAnswerHandler::ValidateSessionDescription( if (current_desc && !MediaSectionsInSameOrder(*current_desc, secondary_current_desc, *sdesc->description(), type)) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - kMlineMismatchInSubsequentOffer); + return RTCError(RTCErrorType::INVALID_PARAMETER, + kMlineMismatchInSubsequentOffer); } } @@ -3128,10 +3368,10 @@ RTCError SdpOfferAnswerHandler::ValidateSessionDescription( if ((desc.type() == cricket::MEDIA_TYPE_AUDIO || desc.type() == cricket::MEDIA_TYPE_VIDEO) && desc.streams().size() > 1u) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - "Media section has more than one track specified " - "with a=ssrc lines which is not supported with " - "Unified Plan."); + return RTCError( + RTCErrorType::INVALID_PARAMETER, + "Media section has more than one track specified with a=ssrc lines " + "which is not supported with Unified Plan."); } } } @@ -3153,14 +3393,15 @@ RTCError SdpOfferAnswerHandler::UpdateTransceiversAndDataChannels( if (new_session.GetType() == SdpType::kOffer) { // If the BUNDLE policy is max-bundle, then we know for sure that all - // transports will be bundled from the start. Return an error if max-bundle - // is specified but the session description does not have a BUNDLE group. + // transports will be bundled from the start. Return an error if + // max-bundle is specified but the session description does not have a + // BUNDLE group. if (pc_->configuration()->bundle_policy == PeerConnectionInterface::kBundlePolicyMaxBundle && bundle_groups_by_mid.empty()) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - "max-bundle configured but session description " - "has no BUNDLE group"); + return RTCError( + RTCErrorType::INVALID_PARAMETER, + "max-bundle configured but session description has no BUNDLE group"); } } @@ -3190,9 +3431,9 @@ RTCError SdpOfferAnswerHandler::UpdateTransceiversAndDataChannels( AssociateTransceiver(source, new_session.GetType(), i, new_content, old_local_content, old_remote_content); if (!transceiver_or_error.ok()) { - // In the case where a transceiver is rejected locally, we don't - // expect to find a transceiver, but might find it in the case - // where state is still "stopping", not "stopped". + // In the case where a transceiver is rejected locally prior to being + // associated, we don't expect to find a transceiver, but might find it + // in the case where state is still "stopping", not "stopped". if (new_content.rejected) { continue; } @@ -3201,6 +3442,36 @@ RTCError SdpOfferAnswerHandler::UpdateTransceiversAndDataChannels( auto transceiver = transceiver_or_error.MoveValue(); RTCError error = UpdateTransceiverChannel(transceiver, new_content, bundle_group); + // Handle locally rejected content. This code path is only needed for apps + // that SDP munge. Remote rejected content is handled in + // ApplyRemoteDescriptionUpdateTransceiverState(). + if (source == cricket::ContentSource::CS_LOCAL && new_content.rejected) { + // Local offer. + if (new_session.GetType() == SdpType::kOffer) { + // If the RtpTransceiver API was used, it would already have made the + // transceiver stopping. But if the rejection was caused by SDP + // munging then we need to ensure the transceiver is stopping here. + if (!transceiver->internal()->stopping()) { + transceiver->internal()->StopStandard(); + } + RTC_DCHECK(transceiver->internal()->stopping()); + } else { + // Local answer. + RTC_DCHECK(new_session.GetType() == SdpType::kAnswer || + new_session.GetType() == SdpType::kPrAnswer); + // When RtpTransceiver API is used, rejection happens in the offer and + // the transceiver will already be stopped at local answer time + // (calling stop between SRD(offer) and SLD(answer) would not reject + // the content in the answer - instead this would trigger a follow-up + // O/A exchange). So if the content was rejected but the transceiver + // is not already stopped, SDP munging has happened and we need to + // ensure the transceiver is stopped. + if (!transceiver->internal()->stopped()) { + transceiver->internal()->StopTransceiverProcedure(); + } + RTC_DCHECK(transceiver->internal()->stopped()); + } + } if (!error.ok()) { return error; } @@ -3218,8 +3489,7 @@ RTCError SdpOfferAnswerHandler::UpdateTransceiversAndDataChannels( } else if (media_type == cricket::MEDIA_TYPE_UNSUPPORTED) { RTC_LOG(LS_INFO) << "Ignoring unsupported media type"; } else { - LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, - "Unknown section type."); + return RTCError(RTCErrorType::INTERNAL_ERROR, "Unknown section type."); } } @@ -3264,8 +3534,8 @@ SdpOfferAnswerHandler::AssociateTransceiver( } if (!transceiver) { // This may happen normally when media sections are rejected. - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - "Transceiver not found based on m-line index"); + return RTCError(RTCErrorType::INVALID_PARAMETER, + "Transceiver not found based on m-line index"); } } else { RTC_DCHECK_EQ(source, cricket::CS_REMOTE); @@ -3325,9 +3595,8 @@ SdpOfferAnswerHandler::AssociateTransceiver( } if (transceiver->media_type() != media_desc->type()) { - LOG_AND_RETURN_ERROR( - RTCErrorType::INVALID_PARAMETER, - "Transceiver type does not match media description type."); + return RTCError(RTCErrorType::INVALID_PARAMETER, + "Transceiver type does not match media description type."); } if (media_desc->HasSimulcast()) { @@ -3374,23 +3643,21 @@ RTCError SdpOfferAnswerHandler::UpdateTransceiverChannel( cricket::ChannelInterface* channel = transceiver->internal()->channel(); if (content.rejected) { if (channel) { - transceiver->internal()->SetChannel(nullptr); - DestroyChannelInterface(channel); + transceiver->internal()->ClearChannel(); } } else { if (!channel) { - if (transceiver->media_type() == cricket::MEDIA_TYPE_AUDIO) { - channel = CreateVoiceChannel(content.name); - } else { - RTC_DCHECK_EQ(cricket::MEDIA_TYPE_VIDEO, transceiver->media_type()); - channel = CreateVideoChannel(content.name); - } - if (!channel) { - LOG_AND_RETURN_ERROR( - RTCErrorType::INTERNAL_ERROR, - "Failed to create channel for mid=" + content.name); + auto error = transceiver->internal()->CreateChannel( + content.name, pc_->call_ptr(), pc_->configuration()->media_config, + pc_->SrtpRequired(), pc_->GetCryptoOptions(), audio_options(), + video_options(), video_bitrate_allocator_factory_.get(), + [&](absl::string_view mid) { + RTC_DCHECK_RUN_ON(network_thread()); + return transport_controller_n()->GetRtpTransport(mid); + }); + if (!error.ok()) { + return error; } - transceiver->internal()->SetChannel(channel); } } return RTCError::OK(); @@ -3413,8 +3680,8 @@ RTCError SdpOfferAnswerHandler::UpdateDataChannel( if (!data_channel_controller()->data_channel_transport()) { RTC_LOG(LS_INFO) << "Creating data channel, mid=" << content.mid(); if (!CreateDataChannel(content.name)) { - LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, - "Failed to create data channel."); + return RTCError(RTCErrorType::INTERNAL_ERROR, + "Failed to create data channel."); } } } @@ -3552,8 +3819,7 @@ void SdpOfferAnswerHandler::GetOptionsForOffer( session_options->rtcp_cname = rtcp_cname_; session_options->crypto_options = pc_->GetCryptoOptions(); session_options->pooled_ice_credentials = - pc_->network_thread()->Invoke>( - RTC_FROM_HERE, + context_->network_thread()->BlockingCall( [this] { return port_allocator()->GetPooledIceCredentials(); }); session_options->offer_extmap_allow_mixed = pc_->configuration()->offer_extmap_allow_mixed; @@ -3568,38 +3834,43 @@ void SdpOfferAnswerHandler::GetOptionsForOffer( void SdpOfferAnswerHandler::GetOptionsForPlanBOffer( const PeerConnectionInterface::RTCOfferAnswerOptions& offer_answer_options, cricket::MediaSessionOptions* session_options) { - // Figure out transceiver directional preferences. - bool send_audio = - !rtp_manager()->GetAudioTransceiver()->internal()->senders().empty(); - bool send_video = - !rtp_manager()->GetVideoTransceiver()->internal()->senders().empty(); - - // By default, generate sendrecv/recvonly m= sections. - bool recv_audio = true; - bool recv_video = true; - - // By default, only offer a new m= section if we have media to send with it. - bool offer_new_audio_description = send_audio; - bool offer_new_video_description = send_video; bool offer_new_data_description = data_channel_controller()->HasDataChannels(); - - // The "offer_to_receive_X" options allow those defaults to be overridden. - if (offer_answer_options.offer_to_receive_audio != - PeerConnectionInterface::RTCOfferAnswerOptions::kUndefined) { - recv_audio = (offer_answer_options.offer_to_receive_audio > 0); - offer_new_audio_description = - offer_new_audio_description || - (offer_answer_options.offer_to_receive_audio > 0); + bool send_audio = false; + bool send_video = false; + bool recv_audio = false; + bool recv_video = false; + if (ConfiguredForMedia()) { + // Figure out transceiver directional preferences. + send_audio = + !rtp_manager()->GetAudioTransceiver()->internal()->senders().empty(); + send_video = + !rtp_manager()->GetVideoTransceiver()->internal()->senders().empty(); + + // By default, generate sendrecv/recvonly m= sections. + recv_audio = true; + recv_video = true; } - if (offer_answer_options.offer_to_receive_video != - RTCOfferAnswerOptions::kUndefined) { - recv_video = (offer_answer_options.offer_to_receive_video > 0); - offer_new_video_description = - offer_new_video_description || - (offer_answer_options.offer_to_receive_video > 0); + // By default, only offer a new m= section if we have media to send with it. + bool offer_new_audio_description = send_audio; + bool offer_new_video_description = send_video; + if (ConfiguredForMedia()) { + // The "offer_to_receive_X" options allow those defaults to be overridden. + if (offer_answer_options.offer_to_receive_audio != + PeerConnectionInterface::RTCOfferAnswerOptions::kUndefined) { + recv_audio = (offer_answer_options.offer_to_receive_audio > 0); + offer_new_audio_description = + offer_new_audio_description || + (offer_answer_options.offer_to_receive_audio > 0); + } + if (offer_answer_options.offer_to_receive_video != + RTCOfferAnswerOptions::kUndefined) { + recv_video = (offer_answer_options.offer_to_receive_video > 0); + offer_new_video_description = + offer_new_video_description || + (offer_answer_options.offer_to_receive_video > 0); + } } - absl::optional audio_index; absl::optional video_index; absl::optional data_index; @@ -3614,42 +3885,44 @@ void SdpOfferAnswerHandler::GetOptionsForPlanBOffer( &audio_index, &video_index, &data_index, session_options); } - // Add audio/video/data m= sections to the end if needed. - if (!audio_index && offer_new_audio_description) { - cricket::MediaDescriptionOptions options( - cricket::MEDIA_TYPE_AUDIO, cricket::CN_AUDIO, - RtpTransceiverDirectionFromSendRecv(send_audio, recv_audio), false); - options.header_extensions = - channel_manager()->GetSupportedAudioRtpHeaderExtensions(); - session_options->media_description_options.push_back(options); - audio_index = session_options->media_description_options.size() - 1; - } - if (!video_index && offer_new_video_description) { - cricket::MediaDescriptionOptions options( - cricket::MEDIA_TYPE_VIDEO, cricket::CN_VIDEO, - RtpTransceiverDirectionFromSendRecv(send_video, recv_video), false); - options.header_extensions = - channel_manager()->GetSupportedVideoRtpHeaderExtensions(); - session_options->media_description_options.push_back(options); - video_index = session_options->media_description_options.size() - 1; + if (ConfiguredForMedia()) { + // Add audio/video/data m= sections to the end if needed. + if (!audio_index && offer_new_audio_description) { + cricket::MediaDescriptionOptions options( + cricket::MEDIA_TYPE_AUDIO, cricket::CN_AUDIO, + RtpTransceiverDirectionFromSendRecv(send_audio, recv_audio), false); + options.header_extensions = + media_engine()->voice().GetRtpHeaderExtensions(); + session_options->media_description_options.push_back(options); + audio_index = session_options->media_description_options.size() - 1; + } + if (!video_index && offer_new_video_description) { + cricket::MediaDescriptionOptions options( + cricket::MEDIA_TYPE_VIDEO, cricket::CN_VIDEO, + RtpTransceiverDirectionFromSendRecv(send_video, recv_video), false); + options.header_extensions = + media_engine()->video().GetRtpHeaderExtensions(); + session_options->media_description_options.push_back(options); + video_index = session_options->media_description_options.size() - 1; + } + cricket::MediaDescriptionOptions* audio_media_description_options = + !audio_index + ? nullptr + : &session_options->media_description_options[*audio_index]; + cricket::MediaDescriptionOptions* video_media_description_options = + !video_index + ? nullptr + : &session_options->media_description_options[*video_index]; + + AddPlanBRtpSenderOptions(rtp_manager()->GetSendersInternal(), + audio_media_description_options, + video_media_description_options, + offer_answer_options.num_simulcast_layers); } if (!data_index && offer_new_data_description) { session_options->media_description_options.push_back( GetMediaDescriptionOptionsForActiveData(cricket::CN_DATA)); - data_index = session_options->media_description_options.size() - 1; } - - cricket::MediaDescriptionOptions* audio_media_description_options = - !audio_index ? nullptr - : &session_options->media_description_options[*audio_index]; - cricket::MediaDescriptionOptions* video_media_description_options = - !video_index ? nullptr - : &session_options->media_description_options[*video_index]; - - AddPlanBRtpSenderOptions(rtp_manager()->GetSendersInternal(), - audio_media_description_options, - video_media_description_options, - offer_answer_options.num_simulcast_layers); } void SdpOfferAnswerHandler::GetOptionsForUnifiedPlanOffer( @@ -3755,27 +4028,29 @@ void SdpOfferAnswerHandler::GetOptionsForUnifiedPlanOffer( // and not associated). Reuse media sections marked as recyclable first, // otherwise append to the end of the offer. New media sections should be // added in the order they were added to the PeerConnection. - for (const auto& transceiver : transceivers()->ListInternal()) { - if (transceiver->mid() || transceiver->stopping()) { - continue; - } - size_t mline_index; - if (!recycleable_mline_indices.empty()) { - mline_index = recycleable_mline_indices.front(); - recycleable_mline_indices.pop(); - session_options->media_description_options[mline_index] = - GetMediaDescriptionOptionsForTransceiver( - transceiver, mid_generator_.GenerateString(), - /*is_create_offer=*/true); - } else { - mline_index = session_options->media_description_options.size(); - session_options->media_description_options.push_back( - GetMediaDescriptionOptionsForTransceiver( - transceiver, mid_generator_.GenerateString(), - /*is_create_offer=*/true)); + if (ConfiguredForMedia()) { + for (const auto& transceiver : transceivers()->ListInternal()) { + if (transceiver->mid() || transceiver->stopping()) { + continue; + } + size_t mline_index; + if (!recycleable_mline_indices.empty()) { + mline_index = recycleable_mline_indices.front(); + recycleable_mline_indices.pop(); + session_options->media_description_options[mline_index] = + GetMediaDescriptionOptionsForTransceiver( + transceiver, mid_generator_.GenerateString(), + /*is_create_offer=*/true); + } else { + mline_index = session_options->media_description_options.size(); + session_options->media_description_options.push_back( + GetMediaDescriptionOptionsForTransceiver( + transceiver, mid_generator_.GenerateString(), + /*is_create_offer=*/true)); + } + // See comment above for why CreateOffer changes the transceiver's state. + transceiver->set_mline_index(mline_index); } - // See comment above for why CreateOffer changes the transceiver's state. - transceiver->set_mline_index(mline_index); } // Lastly, add a m-section if we have local data channels and an m section // does not already exist. @@ -3807,33 +4082,39 @@ void SdpOfferAnswerHandler::GetOptionsForAnswer( session_options->rtcp_cname = rtcp_cname_; session_options->crypto_options = pc_->GetCryptoOptions(); session_options->pooled_ice_credentials = - pc_->network_thread()->Invoke>( - RTC_FROM_HERE, + context_->network_thread()->BlockingCall( [this] { return port_allocator()->GetPooledIceCredentials(); }); } void SdpOfferAnswerHandler::GetOptionsForPlanBAnswer( const PeerConnectionInterface::RTCOfferAnswerOptions& offer_answer_options, cricket::MediaSessionOptions* session_options) { - // Figure out transceiver directional preferences. - bool send_audio = - !rtp_manager()->GetAudioTransceiver()->internal()->senders().empty(); - bool send_video = - !rtp_manager()->GetVideoTransceiver()->internal()->senders().empty(); + bool send_audio = false; + bool recv_audio = false; + bool send_video = false; + bool recv_video = false; - // By default, generate sendrecv/recvonly m= sections. The direction is also - // restricted by the direction in the offer. - bool recv_audio = true; - bool recv_video = true; + if (ConfiguredForMedia()) { + // Figure out transceiver directional preferences. + send_audio = + !rtp_manager()->GetAudioTransceiver()->internal()->senders().empty(); + send_video = + !rtp_manager()->GetVideoTransceiver()->internal()->senders().empty(); - // The "offer_to_receive_X" options allow those defaults to be overridden. - if (offer_answer_options.offer_to_receive_audio != - RTCOfferAnswerOptions::kUndefined) { - recv_audio = (offer_answer_options.offer_to_receive_audio > 0); - } - if (offer_answer_options.offer_to_receive_video != - RTCOfferAnswerOptions::kUndefined) { - recv_video = (offer_answer_options.offer_to_receive_video > 0); + // By default, generate sendrecv/recvonly m= sections. The direction is also + // restricted by the direction in the offer. + recv_audio = true; + recv_video = true; + + // The "offer_to_receive_X" options allow those defaults to be overridden. + if (offer_answer_options.offer_to_receive_audio != + RTCOfferAnswerOptions::kUndefined) { + recv_audio = (offer_answer_options.offer_to_receive_audio > 0); + } + if (offer_answer_options.offer_to_receive_video != + RTCOfferAnswerOptions::kUndefined) { + recv_video = (offer_answer_options.offer_to_receive_video > 0); + } } absl::optional audio_index; @@ -3856,10 +4137,12 @@ void SdpOfferAnswerHandler::GetOptionsForPlanBAnswer( !video_index ? nullptr : &session_options->media_description_options[*video_index]; - AddPlanBRtpSenderOptions(rtp_manager()->GetSendersInternal(), - audio_media_description_options, - video_media_description_options, - offer_answer_options.num_simulcast_layers); + if (ConfiguredForMedia()) { + AddPlanBRtpSenderOptions(rtp_manager()->GetSendersInternal(), + audio_media_description_options, + video_media_description_options, + offer_answer_options.num_simulcast_layers); + } } void SdpOfferAnswerHandler::GetOptionsForUnifiedPlanAnswer( @@ -4043,7 +4326,7 @@ void SdpOfferAnswerHandler::RemoveRemoteStreamsIfEmpty( for (const auto& remote_stream : remote_streams) { if (remote_stream->GetAudioTracks().empty() && remote_stream->GetVideoTracks().empty()) { - remote_streams_->RemoveStream(remote_stream); + remote_streams_->RemoveStream(remote_stream.get()); removed_streams->push_back(remote_stream); } } @@ -4157,8 +4440,8 @@ void SdpOfferAnswerHandler::UpdateRemoteSendersList( const std::string& sender_id = params.id; uint32_t ssrc = params.first_ssrc(); - rtc::scoped_refptr stream = - remote_streams_->find(stream_id); + rtc::scoped_refptr stream( + remote_streams_->find(stream_id)); if (!stream) { // This is a new MediaStream. Create a new remote MediaStream. stream = MediaStreamProxy::Create(rtc::Thread::Current(), @@ -4171,15 +4454,15 @@ void SdpOfferAnswerHandler::UpdateRemoteSendersList( rtp_manager()->FindSenderInfo(*current_senders, stream_id, sender_id); if (!sender_info) { current_senders->push_back(RtpSenderInfo(stream_id, sender_id, ssrc)); - rtp_manager()->OnRemoteSenderAdded(current_senders->back(), stream, + rtp_manager()->OnRemoteSenderAdded(current_senders->back(), stream.get(), media_type); } } // Add default sender if necessary. if (default_sender_needed) { - rtc::scoped_refptr default_stream = - remote_streams_->find(kDefaultStreamId); + rtc::scoped_refptr default_stream( + remote_streams_->find(kDefaultStreamId)); if (!default_stream) { // Create the new default MediaStream. default_stream = MediaStreamProxy::Create( @@ -4196,7 +4479,7 @@ void SdpOfferAnswerHandler::UpdateRemoteSendersList( current_senders->push_back( RtpSenderInfo(kDefaultStreamId, default_sender_id, /*ssrc=*/0)); rtp_manager()->OnRemoteSenderAdded(current_senders->back(), - default_stream, media_type); + default_stream.get(), media_type); } } } @@ -4204,6 +4487,9 @@ void SdpOfferAnswerHandler::UpdateRemoteSendersList( void SdpOfferAnswerHandler::EnableSending() { TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::EnableSending"); RTC_DCHECK_RUN_ON(signaling_thread()); + if (!ConfiguredForMedia()) { + return; + } for (const auto& transceiver : transceivers()->ListInternal()) { cricket::ChannelInterface* channel = transceiver->channel(); if (channel) { @@ -4224,63 +4510,63 @@ RTCError SdpOfferAnswerHandler::PushdownMediaDescription( RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(sdesc); - if (!UpdatePayloadTypeDemuxingState(source, bundle_groups_by_mid)) { - // Note that this is never expected to fail, since RtpDemuxer doesn't return - // an error when changing payload type demux criteria, which is all this - // does. - LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, - "Failed to update payload type demuxing state."); - } - - // Push down the new SDP media section for each audio/video transceiver. - auto rtp_transceivers = transceivers()->ListInternal(); - std::vector< - std::pair> - channels; - for (const auto& transceiver : rtp_transceivers) { - const ContentInfo* content_info = - FindMediaSectionForTransceiver(transceiver, sdesc); - cricket::ChannelInterface* channel = transceiver->channel(); - if (!channel || !content_info || content_info->rejected) { - continue; - } - const MediaContentDescription* content_desc = - content_info->media_description(); - if (!content_desc) { - continue; - } + if (ConfiguredForMedia()) { + // Note: This will perform a BlockingCall over to the worker thread, which + // we'll also do in a loop below. + if (!UpdatePayloadTypeDemuxingState(source, bundle_groups_by_mid)) { + // Note that this is never expected to fail, since RtpDemuxer doesn't + // return an error when changing payload type demux criteria, which is all + // this does. + return RTCError(RTCErrorType::INTERNAL_ERROR, + "Failed to update payload type demuxing state."); + } + + // Push down the new SDP media section for each audio/video transceiver. + auto rtp_transceivers = transceivers()->ListInternal(); + std::vector< + std::pair> + channels; + for (const auto& transceiver : rtp_transceivers) { + const ContentInfo* content_info = + FindMediaSectionForTransceiver(transceiver, sdesc); + cricket::ChannelInterface* channel = transceiver->channel(); + if (!channel || !content_info || content_info->rejected) { + continue; + } + const MediaContentDescription* content_desc = + content_info->media_description(); + if (!content_desc) { + continue; + } - transceiver->OnNegotiationUpdate(type, content_desc); - channels.push_back(std::make_pair(channel, content_desc)); - } - - // This for-loop of invokes helps audio impairment during re-negotiations. - // One of the causes is that downstairs decoder creation is synchronous at the - // moment, and that a decoder is created for each codec listed in the SDP. - // - // TODO(bugs.webrtc.org/12840): consider merging the invokes again after - // these projects have shipped: - // - bugs.webrtc.org/12462 - // - crbug.com/1157227 - // - crbug.com/1187289 - for (const auto& entry : channels) { - RTCError error = - pc_->worker_thread()->Invoke(RTC_FROM_HERE, [&]() { - std::string error; - bool success = - (source == cricket::CS_LOCAL) - ? entry.first->SetLocalContent(entry.second, type, &error) - : entry.first->SetRemoteContent(entry.second, type, &error); - if (!success) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, error); - } - return RTCError::OK(); - }); - if (!error.ok()) { - return error; + transceiver->OnNegotiationUpdate(type, content_desc); + channels.push_back(std::make_pair(channel, content_desc)); + } + + // This for-loop of invokes helps audio impairment during re-negotiations. + // One of the causes is that downstairs decoder creation is synchronous at + // the moment, and that a decoder is created for each codec listed in the + // SDP. + // + // TODO(bugs.webrtc.org/12840): consider merging the invokes again after + // these projects have shipped: + // - bugs.webrtc.org/12462 + // - crbug.com/1157227 + // - crbug.com/1187289 + for (const auto& entry : channels) { + std::string error; + bool success = + context_->worker_thread()->BlockingCall([&]() { + return (source == cricket::CS_LOCAL) + ? entry.first->SetLocalContent(entry.second, type, error) + : entry.first->SetRemoteContent(entry.second, type, + error); + }); + if (!success) { + return RTCError(RTCErrorType::INVALID_PARAMETER, error); + } } } - // Need complete offer/answer with an SCTP m= section before starting SCTP, // according to https://tools.ietf.org/html/draft-ietf-mmusic-sctp-sdp-19 if (pc_->sctp_mid() && local_description() && remote_description()) { @@ -4317,13 +4603,13 @@ RTCError SdpOfferAnswerHandler::PushdownTransportDescription( if (source == cricket::CS_LOCAL) { const SessionDescriptionInterface* sdesc = local_description(); RTC_DCHECK(sdesc); - return transport_controller()->SetLocalDescription(type, - sdesc->description()); + return transport_controller_s()->SetLocalDescription(type, + sdesc->description()); } else { const SessionDescriptionInterface* sdesc = remote_description(); RTC_DCHECK(sdesc); - return transport_controller()->SetRemoteDescription(type, - sdesc->description()); + return transport_controller_s()->SetRemoteDescription(type, + sdesc->description()); } } @@ -4334,6 +4620,9 @@ void SdpOfferAnswerHandler::RemoveStoppedTransceivers() { // run the following steps: if (!IsUnifiedPlan()) return; + if (!ConfiguredForMedia()) { + return; + } // Traverse a copy of the transceiver list. auto transceiver_list = transceivers()->List(); for (auto transceiver : transceiver_list) { @@ -4368,18 +4657,21 @@ void SdpOfferAnswerHandler::RemoveStoppedTransceivers() { void SdpOfferAnswerHandler::RemoveUnusedChannels( const SessionDescription* desc) { RTC_DCHECK_RUN_ON(signaling_thread()); - // Destroy video channel first since it may have a pointer to the - // voice channel. - const cricket::ContentInfo* video_info = cricket::GetFirstVideoContent(desc); - if (!video_info || video_info->rejected) { - DestroyTransceiverChannel(rtp_manager()->GetVideoTransceiver()); - } + if (ConfiguredForMedia()) { + // Destroy video channel first since it may have a pointer to the + // voice channel. + const cricket::ContentInfo* video_info = + cricket::GetFirstVideoContent(desc); + if (!video_info || video_info->rejected) { + rtp_manager()->GetVideoTransceiver()->internal()->ClearChannel(); + } - const cricket::ContentInfo* audio_info = cricket::GetFirstAudioContent(desc); - if (!audio_info || audio_info->rejected) { - DestroyTransceiverChannel(rtp_manager()->GetAudioTransceiver()); + const cricket::ContentInfo* audio_info = + cricket::GetFirstAudioContent(desc); + if (!audio_info || audio_info->rejected) { + rtp_manager()->GetAudioTransceiver()->internal()->ClearChannel(); + } } - const cricket::ContentInfo* data_info = cricket::GetFirstDataContent(desc); if (!data_info) { RTCError error(RTCErrorType::OPERATION_ERROR_WITH_DATA, @@ -4396,49 +4688,26 @@ void SdpOfferAnswerHandler::RemoveUnusedChannels( } } -void SdpOfferAnswerHandler::ReportNegotiatedSdpSemantics( - const SessionDescriptionInterface& answer) { - SdpSemanticNegotiated semantics_negotiated; - switch (answer.description()->msid_signaling()) { - case 0: - semantics_negotiated = kSdpSemanticNegotiatedNone; - break; - case cricket::kMsidSignalingMediaSection: - semantics_negotiated = kSdpSemanticNegotiatedUnifiedPlan; - break; - case cricket::kMsidSignalingSsrcAttribute: - semantics_negotiated = kSdpSemanticNegotiatedPlanB; - break; - case cricket::kMsidSignalingMediaSection | - cricket::kMsidSignalingSsrcAttribute: - semantics_negotiated = kSdpSemanticNegotiatedMixed; - break; - default: - RTC_DCHECK_NOTREACHED(); - } - RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.SdpSemanticNegotiated", - semantics_negotiated, kSdpSemanticNegotiatedMax); -} - void SdpOfferAnswerHandler::UpdateEndedRemoteMediaStreams() { RTC_DCHECK_RUN_ON(signaling_thread()); std::vector> streams_to_remove; for (size_t i = 0; i < remote_streams_->count(); ++i) { MediaStreamInterface* stream = remote_streams_->at(i); if (stream->GetAudioTracks().empty() && stream->GetVideoTracks().empty()) { - streams_to_remove.push_back(stream); + streams_to_remove.push_back( + rtc::scoped_refptr(stream)); } } for (auto& stream : streams_to_remove) { - remote_streams_->RemoveStream(stream); + remote_streams_->RemoveStream(stream.get()); pc_->Observer()->OnRemoveStream(std::move(stream)); } } -bool SdpOfferAnswerHandler::UseCandidatesInSessionDescription( - const SessionDescriptionInterface* remote_desc) { +bool SdpOfferAnswerHandler::UseCandidatesInRemoteDescription() { RTC_DCHECK_RUN_ON(signaling_thread()); + auto* remote_desc = remote_description(); if (!remote_desc) { return true; } @@ -4452,7 +4721,7 @@ bool SdpOfferAnswerHandler::UseCandidatesInSessionDescription( if (!ReadyToUseRemoteCandidate(candidate, remote_desc, &valid)) { if (valid) { RTC_LOG(LS_INFO) - << "UseCandidatesInSessionDescription: Not ready to use " + << "UseCandidatesInRemoteDescription: Not ready to use " "candidate."; } continue; @@ -4565,81 +4834,54 @@ RTCError SdpOfferAnswerHandler::CreateChannels(const SessionDescription& desc) { const cricket::ContentInfo* voice = cricket::GetFirstAudioContent(&desc); if (voice && !voice->rejected && !rtp_manager()->GetAudioTransceiver()->internal()->channel()) { - cricket::VoiceChannel* voice_channel = CreateVoiceChannel(voice->name); - if (!voice_channel) { - LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, - "Failed to create voice channel."); + auto error = + rtp_manager()->GetAudioTransceiver()->internal()->CreateChannel( + voice->name, pc_->call_ptr(), pc_->configuration()->media_config, + pc_->SrtpRequired(), pc_->GetCryptoOptions(), audio_options(), + video_options(), video_bitrate_allocator_factory_.get(), + [&](absl::string_view mid) { + RTC_DCHECK_RUN_ON(network_thread()); + return transport_controller_n()->GetRtpTransport(mid); + }); + if (!error.ok()) { + return error; } - rtp_manager()->GetAudioTransceiver()->internal()->SetChannel(voice_channel); } const cricket::ContentInfo* video = cricket::GetFirstVideoContent(&desc); if (video && !video->rejected && !rtp_manager()->GetVideoTransceiver()->internal()->channel()) { - cricket::VideoChannel* video_channel = CreateVideoChannel(video->name); - if (!video_channel) { - LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, - "Failed to create video channel."); + auto error = + rtp_manager()->GetVideoTransceiver()->internal()->CreateChannel( + video->name, pc_->call_ptr(), pc_->configuration()->media_config, + pc_->SrtpRequired(), pc_->GetCryptoOptions(), + + audio_options(), video_options(), + video_bitrate_allocator_factory_.get(), [&](absl::string_view mid) { + RTC_DCHECK_RUN_ON(network_thread()); + return transport_controller_n()->GetRtpTransport(mid); + }); + if (!error.ok()) { + return error; } - rtp_manager()->GetVideoTransceiver()->internal()->SetChannel(video_channel); } const cricket::ContentInfo* data = cricket::GetFirstDataContent(&desc); if (data && !data->rejected && !data_channel_controller()->data_channel_transport()) { if (!CreateDataChannel(data->name)) { - LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, - "Failed to create data channel."); + return RTCError(RTCErrorType::INTERNAL_ERROR, + "Failed to create data channel."); } } return RTCError::OK(); } -// TODO(steveanton): Perhaps this should be managed by the RtpTransceiver. -cricket::VoiceChannel* SdpOfferAnswerHandler::CreateVoiceChannel( - const std::string& mid) { - TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::CreateVoiceChannel"); - RTC_DCHECK_RUN_ON(signaling_thread()); - if (!channel_manager()->media_engine()) - return nullptr; - - RtpTransportInternal* rtp_transport = pc_->GetRtpTransport(mid); - - // TODO(bugs.webrtc.org/11992): CreateVoiceChannel internally switches to the - // worker thread. We shouldn't be using the `call_ptr_` hack here but simply - // be on the worker thread and use `call_` (update upstream code). - return channel_manager()->CreateVoiceChannel( - pc_->call_ptr(), pc_->configuration()->media_config, rtp_transport, - signaling_thread(), mid, pc_->SrtpRequired(), pc_->GetCryptoOptions(), - &ssrc_generator_, audio_options()); -} - -// TODO(steveanton): Perhaps this should be managed by the RtpTransceiver. -cricket::VideoChannel* SdpOfferAnswerHandler::CreateVideoChannel( - const std::string& mid) { - TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::CreateVideoChannel"); - RTC_DCHECK_RUN_ON(signaling_thread()); - if (!channel_manager()->media_engine()) - return nullptr; - - // NOTE: This involves a non-ideal hop (Invoke) over to the network thread. - RtpTransportInternal* rtp_transport = pc_->GetRtpTransport(mid); - - // TODO(bugs.webrtc.org/11992): CreateVideoChannel internally switches to the - // worker thread. We shouldn't be using the `call_ptr_` hack here but simply - // be on the worker thread and use `call_` (update upstream code). - return channel_manager()->CreateVideoChannel( - pc_->call_ptr(), pc_->configuration()->media_config, rtp_transport, - signaling_thread(), mid, pc_->SrtpRequired(), pc_->GetCryptoOptions(), - &ssrc_generator_, video_options(), - video_bitrate_allocator_factory_.get()); -} - bool SdpOfferAnswerHandler::CreateDataChannel(const std::string& mid) { RTC_DCHECK_RUN_ON(signaling_thread()); - if (!pc_->network_thread()->Invoke(RTC_FROM_HERE, [this, &mid] { - RTC_DCHECK_RUN_ON(pc_->network_thread()); + if (!context_->network_thread()->BlockingCall([this, &mid] { + RTC_DCHECK_RUN_ON(context_->network_thread()); return pc_->SetupDataChannelTransport_n(mid); })) { return false; @@ -4653,40 +4895,6 @@ bool SdpOfferAnswerHandler::CreateDataChannel(const std::string& mid) { return true; } -void SdpOfferAnswerHandler::DestroyTransceiverChannel( - rtc::scoped_refptr> - transceiver) { - TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::DestroyTransceiverChannel"); - RTC_DCHECK(transceiver); - RTC_LOG_THREAD_BLOCK_COUNT(); - - // TODO(tommi): We're currently on the signaling thread. - // There are multiple hops to the worker ahead. - // Consider if we can make the call to SetChannel() on the worker thread - // (and require that to be the context it's always called in) and also - // call DestroyChannelInterface there, since it also needs to hop to the - // worker. - - cricket::ChannelInterface* channel = transceiver->internal()->channel(); - RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(0); - if (channel) { - // TODO(tommi): VideoRtpReceiver::SetMediaChannel blocks and jumps to the - // worker thread. When being set to nullptr, there are additional - // blocking calls to e.g. ClearRecordableEncodedFrameCallback which triggers - // another blocking call or Stop() for video channels. - // The channel object also needs to be de-initialized on the network thread - // so if ownership of the channel object lies with the transceiver, we could - // un-set the channel pointer and uninitialize/destruct the channel object - // at the same time, rather than in separate steps. - transceiver->internal()->SetChannel(nullptr); - // TODO(tommi): All channel objects end up getting deleted on the - // worker thread (ideally should be on the network thread but the - // MediaChannel objects are tied to the worker. Can the teardown be done - // asynchronously across the threads rather than blocking? - DestroyChannelInterface(channel); - } -} - void SdpOfferAnswerHandler::DestroyDataChannelTransport(RTCError error) { RTC_DCHECK_RUN_ON(signaling_thread()); const bool has_sctp = pc_->sctp_mid().has_value(); @@ -4694,8 +4902,8 @@ void SdpOfferAnswerHandler::DestroyDataChannelTransport(RTCError error) { if (has_sctp) data_channel_controller()->OnTransportChannelClosed(error); - pc_->network_thread()->Invoke(RTC_FROM_HERE, [this] { - RTC_DCHECK_RUN_ON(pc_->network_thread()); + context_->network_thread()->BlockingCall([this] { + RTC_DCHECK_RUN_ON(context_->network_thread()); pc_->TeardownDataChannelTransport_n(); }); @@ -4703,45 +4911,6 @@ void SdpOfferAnswerHandler::DestroyDataChannelTransport(RTCError error) { pc_->ResetSctpDataMid(); } -void SdpOfferAnswerHandler::DestroyChannelInterface( - cricket::ChannelInterface* channel) { - TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::DestroyChannelInterface"); - RTC_DCHECK_RUN_ON(signaling_thread()); - RTC_DCHECK(channel_manager()->media_engine()); - RTC_DCHECK(channel); - - // TODO(bugs.webrtc.org/11992): All the below methods should be called on the - // worker thread. (they switch internally anyway). Change - // DestroyChannelInterface to either be called on the worker thread, or do - // this asynchronously on the worker. - RTC_LOG_THREAD_BLOCK_COUNT(); - - switch (channel->media_type()) { - case cricket::MEDIA_TYPE_AUDIO: - channel_manager()->DestroyVoiceChannel( - static_cast(channel)); - break; - case cricket::MEDIA_TYPE_VIDEO: - channel_manager()->DestroyVideoChannel( - static_cast(channel)); - break; - case cricket::MEDIA_TYPE_DATA: - RTC_DCHECK_NOTREACHED() - << "Trying to destroy datachannel through DestroyChannelInterface"; - break; - default: - RTC_DCHECK_NOTREACHED() - << "Unknown media type: " << channel->media_type(); - break; - } - - // TODO(tommi): Figure out why we can get 2 blocking calls when running - // PeerConnectionCryptoTest.CreateAnswerWithDifferentSslRoles. - // and 3 when running - // PeerConnectionCryptoTest.CreateAnswerWithDifferentSslRoles - // RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(1); -} - void SdpOfferAnswerHandler::DestroyAllChannels() { RTC_DCHECK_RUN_ON(signaling_thread()); if (!transceivers()) { @@ -4757,12 +4926,12 @@ void SdpOfferAnswerHandler::DestroyAllChannels() { for (const auto& transceiver : list) { if (transceiver->media_type() == cricket::MEDIA_TYPE_VIDEO) { - DestroyTransceiverChannel(transceiver); + transceiver->internal()->ClearChannel(); } } for (const auto& transceiver : list) { if (transceiver->media_type() == cricket::MEDIA_TYPE_AUDIO) { - DestroyTransceiverChannel(transceiver); + transceiver->internal()->ClearChannel(); } } @@ -4796,7 +4965,7 @@ void SdpOfferAnswerHandler::GenerateMediaDescriptionOptions( *audio_index = session_options->media_description_options.size() - 1; } session_options->media_description_options.back().header_extensions = - channel_manager()->GetSupportedAudioRtpHeaderExtensions(); + media_engine()->voice().GetRtpHeaderExtensions(); } else if (IsVideoContent(&content)) { // If we already have an video m= section, reject this extra one. if (*video_index) { @@ -4813,7 +4982,7 @@ void SdpOfferAnswerHandler::GenerateMediaDescriptionOptions( *video_index = session_options->media_description_options.size() - 1; } session_options->media_description_options.back().header_extensions = - channel_manager()->GetSupportedVideoRtpHeaderExtensions(); + media_engine()->video().GetRtpHeaderExtensions(); } else if (IsUnsupportedContent(&content)) { session_options->media_description_options.push_back( cricket::MediaDescriptionOptions(cricket::MEDIA_TYPE_UNSUPPORTED, @@ -4947,10 +5116,28 @@ bool SdpOfferAnswerHandler::UpdatePayloadTypeDemuxingState( } } + // In Unified Plan, payload type demuxing is useful for legacy endpoints that + // don't support the MID header extension, but it can also cause incorrrect + // forwarding of packets when going from one m= section to multiple m= + // sections in the same BUNDLE. This only happens if media arrives prior to + // negotiation, but this can cause missing video and unsignalled ssrc bugs + // severe enough to warrant disabling PT demuxing in such cases. Therefore, if + // a MID header extension is present on all m= sections for a given kind + // (audio/video) then we use that as an OK to disable payload type demuxing in + // BUNDLEs of that kind. However if PT demuxing was ever turned on (e.g. MID + // was ever removed on ANY m= section of that kind) then we continue to allow + // PT demuxing in order to prevent disabling it in follow-up O/A exchanges and + // allowing early media by PT. + bool bundled_pt_demux_allowed_audio = !IsUnifiedPlan() || + mid_header_extension_missing_audio || + pt_demuxing_has_been_used_audio_; + bool bundled_pt_demux_allowed_video = !IsUnifiedPlan() || + mid_header_extension_missing_video || + pt_demuxing_has_been_used_video_; + // Gather all updates ahead of time so that all channels can be updated in a - // single Invoke; necessary due to thread guards. - std::vector> - channels_to_update; + // single BlockingCall; necessary due to thread guards. + std::vector> channels_to_update; for (const auto& transceiver : transceivers()->ListInternal()) { cricket::ChannelInterface* channel = transceiver->channel(); const ContentInfo* content = @@ -4958,86 +5145,68 @@ bool SdpOfferAnswerHandler::UpdatePayloadTypeDemuxingState( if (!channel || !content) { continue; } + + const cricket::MediaType media_type = channel->media_type(); + if (media_type != cricket::MediaType::MEDIA_TYPE_AUDIO && + media_type != cricket::MediaType::MEDIA_TYPE_VIDEO) { + continue; + } + RtpTransceiverDirection local_direction = content->media_description()->direction(); if (source == cricket::CS_REMOTE) { local_direction = RtpTransceiverDirectionReversed(local_direction); } - channels_to_update.emplace_back(local_direction, transceiver->channel()); + + auto bundle_it = bundle_groups_by_mid.find(channel->mid()); + const cricket::ContentGroup* bundle_group = + bundle_it != bundle_groups_by_mid.end() ? bundle_it->second : nullptr; + bool pt_demux_enabled = RtpTransceiverDirectionHasRecv(local_direction); + if (media_type == cricket::MediaType::MEDIA_TYPE_AUDIO) { + pt_demux_enabled &= + !bundle_group || + (bundled_pt_demux_allowed_audio && + payload_types_by_bundle[bundle_group].pt_demuxing_possible_audio); + if (pt_demux_enabled) { + pt_demuxing_has_been_used_audio_ = true; + } + } else { + RTC_DCHECK_EQ(media_type, cricket::MediaType::MEDIA_TYPE_VIDEO); + pt_demux_enabled &= + !bundle_group || + (bundled_pt_demux_allowed_video && + payload_types_by_bundle[bundle_group].pt_demuxing_possible_video); + if (pt_demux_enabled) { + pt_demuxing_has_been_used_video_ = true; + } + } + + channels_to_update.emplace_back(pt_demux_enabled, transceiver->channel()); } if (channels_to_update.empty()) { return true; } - // In Unified Plan, payload type demuxing is useful for legacy endpoints that - // don't support the MID header extension, but it can also cause incorrrect - // forwarding of packets when going from one m= section to multiple m= - // sections in the same BUNDLE. This only happens if media arrives prior to - // negotiation, but this can cause missing video and unsignalled ssrc bugs - // severe enough to warrant disabling PT demuxing in such cases. Therefore, if - // a MID header extension is present on all m= sections for a given kind - // (audio/video) then we use that as an OK to disable payload type demuxing in - // BUNDLEs of that kind. However if PT demuxing was ever turned on (e.g. MID - // was ever removed on ANY m= section of that kind) then we continue to allow - // PT demuxing in order to prevent disabling it in follow-up O/A exchanges and - // allowing early media by PT. - bool bundled_pt_demux_allowed_audio = !IsUnifiedPlan() || - mid_header_extension_missing_audio || - pt_demuxing_has_been_used_audio_; - bool bundled_pt_demux_allowed_video = !IsUnifiedPlan() || - mid_header_extension_missing_video || - pt_demuxing_has_been_used_video_; - // Kill switch for the above change. - if (field_trial::IsEnabled(kAlwaysAllowPayloadTypeDemuxingFieldTrialName)) { - // TODO(https://crbug.com/webrtc/12814): If disabling PT-based demux does - // not trigger regressions, remove this kill switch. - bundled_pt_demux_allowed_audio = true; - bundled_pt_demux_allowed_video = true; - } - - return pc_->worker_thread()->Invoke( - RTC_FROM_HERE, - [&channels_to_update, &bundle_groups_by_mid, &payload_types_by_bundle, - bundled_pt_demux_allowed_audio, bundled_pt_demux_allowed_video, - pt_demuxing_has_been_used_audio = &pt_demuxing_has_been_used_audio_, - pt_demuxing_has_been_used_video = &pt_demuxing_has_been_used_video_]() { - for (const auto& it : channels_to_update) { - RtpTransceiverDirection local_direction = it.first; - cricket::ChannelInterface* channel = it.second; - cricket::MediaType media_type = channel->media_type(); - auto bundle_it = bundle_groups_by_mid.find(channel->content_name()); - const cricket::ContentGroup* bundle_group = - bundle_it != bundle_groups_by_mid.end() ? bundle_it->second - : nullptr; - if (media_type == cricket::MediaType::MEDIA_TYPE_AUDIO) { - bool pt_demux_enabled = - RtpTransceiverDirectionHasRecv(local_direction) && - (!bundle_group || (bundled_pt_demux_allowed_audio && - payload_types_by_bundle[bundle_group] - .pt_demuxing_possible_audio)); - if (pt_demux_enabled) { - *pt_demuxing_has_been_used_audio = true; - } - if (!channel->SetPayloadTypeDemuxingEnabled(pt_demux_enabled)) { - return false; - } - } else if (media_type == cricket::MediaType::MEDIA_TYPE_VIDEO) { - bool pt_demux_enabled = - RtpTransceiverDirectionHasRecv(local_direction) && - (!bundle_group || (bundled_pt_demux_allowed_video && - payload_types_by_bundle[bundle_group] - .pt_demuxing_possible_video)); - if (pt_demux_enabled) { - *pt_demuxing_has_been_used_video = true; - } - if (!channel->SetPayloadTypeDemuxingEnabled(pt_demux_enabled)) { - return false; - } - } - } - return true; - }); + // TODO(bugs.webrtc.org/11993): This BlockingCall() will also block on the + // network thread for every demuxer sink that needs to be updated. The demuxer + // state needs to be fully (and only) managed on the network thread and once + // that's the case, there's no need to stop by on the worker. Ideally we could + // also do this without blocking. + return context_->worker_thread()->BlockingCall([&channels_to_update]() { + for (const auto& it : channels_to_update) { + if (!it.second->SetPayloadTypeDemuxingEnabled(it.first)) { + // Note that the state has already been irrevocably changed at this + // point. Is it useful to stop the loop? + return false; + } + } + return true; + }); +} + +bool SdpOfferAnswerHandler::ConfiguredForMedia() const { + return context_->media_engine(); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/sdp_offer_answer.h b/TMessagesProj/jni/voip/webrtc/pc/sdp_offer_answer.h index 6c116f660d..c493dc0229 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/sdp_offer_answer.h +++ b/TMessagesProj/jni/voip/webrtc/pc/sdp_offer_answer.h @@ -19,7 +19,6 @@ #include #include #include -#include #include #include "absl/types/optional.h" @@ -37,47 +36,37 @@ #include "api/sequence_checker.h" #include "api/set_local_description_observer_interface.h" #include "api/set_remote_description_observer_interface.h" -#include "api/transport/data_channel_transport_interface.h" -#include "api/turn_customizer.h" #include "api/uma_metrics.h" #include "api/video/video_bitrate_allocator_factory.h" #include "media/base/media_channel.h" #include "media/base/stream_params.h" #include "p2p/base/port_allocator.h" -#include "pc/channel.h" -#include "pc/channel_interface.h" -#include "pc/channel_manager.h" +#include "pc/connection_context.h" #include "pc/data_channel_controller.h" -#include "pc/ice_server_parsing.h" #include "pc/jsep_transport_controller.h" #include "pc/media_session.h" #include "pc/media_stream_observer.h" -#include "pc/peer_connection_factory.h" #include "pc/peer_connection_internal.h" -#include "pc/rtc_stats_collector.h" #include "pc/rtp_receiver.h" -#include "pc/rtp_sender.h" #include "pc/rtp_transceiver.h" #include "pc/rtp_transmission_manager.h" -#include "pc/sctp_transport.h" #include "pc/sdp_state_provider.h" #include "pc/session_description.h" -#include "pc/stats_collector.h" #include "pc/stream_collection.h" #include "pc/transceiver_list.h" #include "pc/webrtc_session_description_factory.h" #include "rtc_base/checks.h" -#include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/operations_chain.h" -#include "rtc_base/race_checker.h" -#include "rtc_base/rtc_certificate.h" #include "rtc_base/ssl_stream_adapter.h" -#include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" #include "rtc_base/unique_id_generator.h" #include "rtc_base/weak_ptr.h" +namespace cricket { +class ChannelManager; +} + namespace webrtc { // SdpOfferAnswerHandler is a component @@ -87,16 +76,16 @@ namespace webrtc { // - Parsing and interpreting SDP. // - Generating offers and answers based on the current state. // This class lives on the signaling thread. -class SdpOfferAnswerHandler : public SdpStateProvider, - public sigslot::has_slots<> { +class SdpOfferAnswerHandler : public SdpStateProvider { public: ~SdpOfferAnswerHandler(); // Creates an SdpOfferAnswerHandler. Modifies dependencies. static std::unique_ptr Create( - PeerConnection* pc, + PeerConnectionSdpMethods* pc, const PeerConnectionInterface::RTCConfiguration& configuration, - PeerConnectionDependencies& dependencies); + PeerConnectionDependencies& dependencies, + ConnectionContext* context); void ResetSessionDescFactory() { RTC_DCHECK_RUN_ON(signaling_thread()); @@ -181,7 +170,16 @@ class SdpOfferAnswerHandler : public SdpStateProvider, rtc::scoped_refptr local_streams(); rtc::scoped_refptr remote_streams(); + bool initial_offerer() { + RTC_DCHECK_RUN_ON(signaling_thread()); + if (initial_offerer_) { + return *initial_offerer_; + } + return false; + } + private: + class RemoteDescriptionOperation; class ImplicitCreateSessionDescriptionObserver; friend class ImplicitCreateSessionDescriptionObserver; @@ -204,14 +202,17 @@ class SdpOfferAnswerHandler : public SdpStateProvider, class LocalIceCredentialsToReplace; // Only called by the Create() function. - explicit SdpOfferAnswerHandler(PeerConnection* pc); + explicit SdpOfferAnswerHandler(PeerConnectionSdpMethods* pc, + ConnectionContext* context); // Called from the `Create()` function. Can only be called // once. Modifies dependencies. void Initialize( const PeerConnectionInterface::RTCConfiguration& configuration, - PeerConnectionDependencies& dependencies); + PeerConnectionDependencies& dependencies, + ConnectionContext* context); rtc::Thread* signaling_thread() const; + rtc::Thread* network_thread() const; // Non-const versions of local_description()/remote_description(), for use // internally. SessionDescriptionInterface* mutable_local_description() @@ -231,10 +232,24 @@ class SdpOfferAnswerHandler : public SdpStateProvider, std::unique_ptr desc, const std::map& bundle_groups_by_mid); - RTCError ApplyRemoteDescription( + void ApplyRemoteDescription( + std::unique_ptr operation); + + RTCError ReplaceRemoteDescription( std::unique_ptr desc, - const std::map& - bundle_groups_by_mid); + SdpType sdp_type, + std::unique_ptr* replaced_description) + RTC_RUN_ON(signaling_thread()); + + // Part of ApplyRemoteDescription steps specific to Unified Plan. + void ApplyRemoteDescriptionUpdateTransceiverState(SdpType sdp_type); + + // Part of ApplyRemoteDescription steps specific to plan b. + void PlanBUpdateSendersAndReceivers( + const cricket::ContentInfo* audio_content, + const cricket::AudioContentDescription* audio_desc, + const cricket::ContentInfo* video_content, + const cricket::VideoContentDescription* video_desc); // Implementation of the offer/answer exchange operations. These are chained // onto the `operations_chain_` when the public CreateOffer(), CreateAnswer(), @@ -249,8 +264,11 @@ class SdpOfferAnswerHandler : public SdpStateProvider, std::unique_ptr desc, rtc::scoped_refptr observer); void DoSetRemoteDescription( - std::unique_ptr desc, - rtc::scoped_refptr observer); + std::unique_ptr operation); + + // Called after a DoSetRemoteDescription operation completes. + void SetRemoteDescriptionPostProcess(bool was_answer) + RTC_RUN_ON(signaling_thread()); // Update the state, signaling if necessary. void ChangeSignalingState( @@ -356,7 +374,7 @@ class SdpOfferAnswerHandler : public SdpStateProvider, // to the SDP semantics. void FillInMissingRemoteMids(cricket::SessionDescription* remote_description); - // Returns an RtpTransciever, if available, that can be used to receive the + // Returns an RtpTransceiver, if available, that can be used to receive the // given media type according to JSEP rules. rtc::scoped_refptr> FindAvailableTransceiverToReceive(cricket::MediaType media_type) const; @@ -458,7 +476,7 @@ class SdpOfferAnswerHandler : public SdpStateProvider, // This enables media to flow on all configured audio/video channels. void EnableSending(); // Push the media parts of the local or remote session description - // down to all of the channels. + // down to all of the channels, and start SCTP if needed. RTCError PushdownMediaDescription( SdpType type, cricket::ContentSource source, @@ -473,18 +491,16 @@ class SdpOfferAnswerHandler : public SdpStateProvider, // `desc` can be null. This means that all channels are deleted. void RemoveUnusedChannels(const cricket::SessionDescription* desc); - // Report inferred negotiated SDP semantics from a local/remote answer to the - // UMA observer. - void ReportNegotiatedSdpSemantics(const SessionDescriptionInterface& answer); - // Finds remote MediaStreams without any tracks and removes them from // `remote_streams_` and notifies the observer that the MediaStreams no longer // exist. void UpdateEndedRemoteMediaStreams(); - // Uses all remote candidates in `remote_desc` in this session. - bool UseCandidatesInSessionDescription( - const SessionDescriptionInterface* remote_desc); + // Uses all remote candidates in the currently set remote_description(). + // If no remote description is currently set (nullptr), the return value will + // be true. If `UseCandidate()` fails for any candidate in the remote + // description, the return value will be false. + bool UseCandidatesInRemoteDescription(); // Uses `candidate` in this session. bool UseCandidate(const IceCandidateInterface* candidate); // Returns true if we are ready to push down the remote candidate. @@ -508,27 +524,14 @@ class SdpOfferAnswerHandler : public SdpStateProvider, // This method will also delete any existing media channels before creating. RTCError CreateChannels(const cricket::SessionDescription& desc); - // Helper methods to create media channels. - cricket::VoiceChannel* CreateVoiceChannel(const std::string& mid); - cricket::VideoChannel* CreateVideoChannel(const std::string& mid); bool CreateDataChannel(const std::string& mid); - // Destroys and clears the BaseChannel associated with the given transceiver, - // if such channel is set. - void DestroyTransceiverChannel( - rtc::scoped_refptr> - transceiver); - // Destroys the RTP data channel transport and/or the SCTP data channel // transport and clears it. void DestroyDataChannelTransport(RTCError error); - // Destroys the given ChannelInterface. - // The channel cannot be accessed after this method is called. - void DestroyChannelInterface(cricket::ChannelInterface* channel); // Generates MediaDescriptionOptions for the `session_opts` based on existing // local description or remote description. - void GenerateMediaDescriptionOptions( const SessionDescriptionInterface* session_desc, RtpTransceiverDirection audio_direction, @@ -567,6 +570,7 @@ class SdpOfferAnswerHandler : public SdpStateProvider, // ================================================================== // Access to pc_ variables cricket::ChannelManager* channel_manager() const; + cricket::MediaEngineInterface* media_engine() const; TransceiverList* transceivers(); const TransceiverList* transceivers() const; DataChannelController* data_channel_controller(); @@ -575,13 +579,21 @@ class SdpOfferAnswerHandler : public SdpStateProvider, const cricket::PortAllocator* port_allocator() const; RtpTransmissionManager* rtp_manager(); const RtpTransmissionManager* rtp_manager() const; - JsepTransportController* transport_controller(); - const JsepTransportController* transport_controller() const; + JsepTransportController* transport_controller_s() + RTC_RUN_ON(signaling_thread()); + const JsepTransportController* transport_controller_s() const + RTC_RUN_ON(signaling_thread()); + JsepTransportController* transport_controller_n() + RTC_RUN_ON(network_thread()); + const JsepTransportController* transport_controller_n() const + RTC_RUN_ON(network_thread()); // =================================================================== const cricket::AudioOptions& audio_options() { return audio_options_; } const cricket::VideoOptions& video_options() { return video_options_; } + bool ConfiguredForMedia() const; - PeerConnection* const pc_; + PeerConnectionSdpMethods* const pc_; + ConnectionContext* const context_; std::unique_ptr webrtc_session_desc_factory_ RTC_GUARDED_BY(signaling_thread()); @@ -662,14 +674,6 @@ class SdpOfferAnswerHandler : public SdpStateProvider, cricket::AudioOptions audio_options_ RTC_GUARDED_BY(signaling_thread()); cricket::VideoOptions video_options_ RTC_GUARDED_BY(signaling_thread()); - // This object should be used to generate any SSRC that is not explicitly - // specified by the user (or by the remote party). - // The generator is not used directly, instead it is passed on to the - // channel manager and the session description factory. - // TODO(bugs.webrtc.org/12666): This variable is used from both the signaling - // and worker threads. See if we can't restrict usage to a single thread. - rtc::UniqueRandomIdGenerator ssrc_generator_; - // A video bitrate allocator factory. // This can be injected using the PeerConnectionDependencies, // or else the CreateBuiltinVideoBitrateAllocatorFactory() will be called. @@ -678,6 +682,10 @@ class SdpOfferAnswerHandler : public SdpStateProvider, std::unique_ptr video_bitrate_allocator_factory_ RTC_GUARDED_BY(signaling_thread()); + // Whether we are the initial offerer on the association. This + // determines the SSL role. + absl::optional initial_offerer_ RTC_GUARDED_BY(signaling_thread()); + rtc::WeakPtrFactory weak_ptr_factory_ RTC_GUARDED_BY(signaling_thread()); }; diff --git a/TMessagesProj/jni/voip/webrtc/pc/sdp_serializer.cc b/TMessagesProj/jni/voip/webrtc/pc/sdp_serializer.cc index 3f46db5bb0..6d405d07a9 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/sdp_serializer.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/sdp_serializer.cc @@ -10,9 +10,9 @@ #include "pc/sdp_serializer.h" -#include #include #include +#include #include #include @@ -101,33 +101,33 @@ rtc::StringBuilder& operator<<(rtc::StringBuilder& builder, // sc-id = [sc-id-paused] rid-id // rid-id = 1*(alpha-numeric / "-" / "_") ; see: I-D.ietf-mmusic-rid RTCErrorOr ParseSimulcastLayerList(const std::string& str) { - std::vector tokens; - rtc::split(str, kDelimiterSemicolonChar, &tokens); + std::vector tokens = + rtc::split(str, kDelimiterSemicolonChar); if (tokens.empty()) { return ParseError("Layer list cannot be empty."); } SimulcastLayerList result; - for (const std::string& token : tokens) { + for (const absl::string_view& token : tokens) { if (token.empty()) { return ParseError("Simulcast alternative layer list is empty."); } - std::vector rid_tokens; - rtc::split(token, kDelimiterCommaChar, &rid_tokens); + std::vector rid_tokens = + rtc::split(token, kDelimiterCommaChar); if (rid_tokens.empty()) { return ParseError("Simulcast alternative layer list is malformed."); } std::vector layers; - for (const std::string& rid_token : rid_tokens) { + for (const absl::string_view& rid_token : rid_tokens) { if (rid_token.empty() || rid_token == kSimulcastPausedStream) { return ParseError("Rid must not be empty."); } bool paused = rid_token[0] == kSimulcastPausedStreamChar; - std::string rid = paused ? rid_token.substr(1) : rid_token; + absl::string_view rid = paused ? rid_token.substr(1) : rid_token; layers.push_back(SimulcastLayer(rid, paused)); } diff --git a/TMessagesProj/jni/voip/webrtc/pc/sdp_utils.cc b/TMessagesProj/jni/voip/webrtc/pc/sdp_utils.cc index b750b04a46..ca61f0013f 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/sdp_utils.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/sdp_utils.cc @@ -10,8 +10,8 @@ #include "pc/sdp_utils.h" -#include #include +#include #include "api/jsep_session_description.h" #include "rtc_base/checks.h" diff --git a/TMessagesProj/jni/voip/webrtc/pc/session_description.cc b/TMessagesProj/jni/voip/webrtc/pc/session_description.cc index 7b878cbf7b..0346f8c149 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/session_description.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/session_description.cc @@ -10,11 +10,10 @@ #include "pc/session_description.h" -#include - #include "absl/algorithm/container.h" #include "absl/memory/memory.h" #include "rtc_base/checks.h" +#include "rtc_base/strings/string_builder.h" namespace cricket { namespace { @@ -66,17 +65,17 @@ const std::string* ContentGroup::FirstContentName() const { return (!content_names_.empty()) ? &(*content_names_.begin()) : NULL; } -bool ContentGroup::HasContentName(const std::string& content_name) const { +bool ContentGroup::HasContentName(absl::string_view content_name) const { return absl::c_linear_search(content_names_, content_name); } -void ContentGroup::AddContentName(const std::string& content_name) { +void ContentGroup::AddContentName(absl::string_view content_name) { if (!HasContentName(content_name)) { - content_names_.push_back(content_name); + content_names_.emplace_back(content_name); } } -bool ContentGroup::RemoveContentName(const std::string& content_name) { +bool ContentGroup::RemoveContentName(absl::string_view content_name) { ContentNames::iterator iter = absl::c_find(content_names_, content_name); if (iter == content_names_.end()) { return false; diff --git a/TMessagesProj/jni/voip/webrtc/pc/session_description.h b/TMessagesProj/jni/voip/webrtc/pc/session_description.h index ee7a91c84c..a7259e1f1d 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/session_description.h +++ b/TMessagesProj/jni/voip/webrtc/pc/session_description.h @@ -15,13 +15,14 @@ #include #include -#include #include #include +#include #include #include #include "absl/memory/memory.h" +#include "absl/strings/string_view.h" #include "api/crypto_params.h" #include "api/media_types.h" #include "api/rtp_parameters.h" @@ -95,8 +96,8 @@ class MediaContentDescription { // `protocol` is the expected media transport protocol, such as RTP/AVPF, // RTP/SAVPF or SCTP/DTLS. virtual std::string protocol() const { return protocol_; } - virtual void set_protocol(const std::string& protocol) { - protocol_ = protocol; + virtual void set_protocol(absl::string_view protocol) { + protocol_ = std::string(protocol); } virtual webrtc::RtpTransceiverDirection direction() const { @@ -182,14 +183,6 @@ class MediaContentDescription { AddStream(sp); } - // Sets the CNAME of all StreamParams if it have not been set. - virtual void SetCnameIfEmpty(const std::string& cname) { - for (cricket::StreamParamsVec::iterator it = send_streams_.begin(); - it != send_streams_.end(); ++it) { - if (it->cname.empty()) - it->cname = cname; - } - } virtual uint32_t first_ssrc() const { if (send_streams_.empty()) { return 0; @@ -282,9 +275,9 @@ class MediaContentDescription { template class MediaContentDescriptionImpl : public MediaContentDescription { public: - void set_protocol(const std::string& protocol) override { + void set_protocol(absl::string_view protocol) override { RTC_DCHECK(IsRtpProtocol(protocol)); - protocol_ = protocol; + protocol_ = std::string(protocol); } typedef C CodecType; @@ -365,9 +358,9 @@ class SctpDataContentDescription : public MediaContentDescription { const SctpDataContentDescription* as_sctp() const override { return this; } bool has_codecs() const override { return false; } - void set_protocol(const std::string& protocol) override { + void set_protocol(absl::string_view protocol) override { RTC_DCHECK(IsSctpProtocol(protocol)); - protocol_ = protocol; + protocol_ = std::string(protocol); } bool use_sctpmap() const { return use_sctpmap_; } @@ -392,7 +385,7 @@ class SctpDataContentDescription : public MediaContentDescription { class UnsupportedContentDescription : public MediaContentDescription { public: - explicit UnsupportedContentDescription(const std::string& media_type) + explicit UnsupportedContentDescription(absl::string_view media_type) : media_type_(media_type) {} MediaType type() const override { return MEDIA_TYPE_UNSUPPORTED; } @@ -478,9 +471,9 @@ class ContentGroup { const ContentNames& content_names() const { return content_names_; } const std::string* FirstContentName() const; - bool HasContentName(const std::string& content_name) const; - void AddContentName(const std::string& content_name); - bool RemoveContentName(const std::string& content_name); + bool HasContentName(absl::string_view content_name) const; + void AddContentName(absl::string_view content_name); + bool RemoveContentName(absl::string_view content_name); // for debugging std::string ToString() const; diff --git a/TMessagesProj/jni/voip/webrtc/pc/simulcast_description.cc b/TMessagesProj/jni/voip/webrtc/pc/simulcast_description.cc index 0ae3e2074e..ec87415677 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/simulcast_description.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/simulcast_description.cc @@ -14,7 +14,7 @@ namespace cricket { -SimulcastLayer::SimulcastLayer(const std::string& rid, bool is_paused) +SimulcastLayer::SimulcastLayer(absl::string_view rid, bool is_paused) : rid{rid}, is_paused{is_paused} { RTC_DCHECK(!rid.empty()); } diff --git a/TMessagesProj/jni/voip/webrtc/pc/simulcast_description.h b/TMessagesProj/jni/voip/webrtc/pc/simulcast_description.h index f7ae28837e..7caf164de5 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/simulcast_description.h +++ b/TMessagesProj/jni/voip/webrtc/pc/simulcast_description.h @@ -16,6 +16,8 @@ #include #include +#include "absl/strings/string_view.h" + namespace cricket { // Describes a Simulcast Layer. @@ -23,7 +25,7 @@ namespace cricket { // See also: https://tools.ietf.org/html/draft-ietf-mmusic-rid-15 for // an explanation about rids. struct SimulcastLayer final { - SimulcastLayer(const std::string& rid, bool is_paused); + SimulcastLayer(absl::string_view rid, bool is_paused); SimulcastLayer(const SimulcastLayer& other) = default; SimulcastLayer& operator=(const SimulcastLayer& other) = default; diff --git a/TMessagesProj/jni/voip/webrtc/pc/srtp_filter.cc b/TMessagesProj/jni/voip/webrtc/pc/srtp_filter.cc index c48dfdb4cd..9d7f39a7a3 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/srtp_filter.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/srtp_filter.cc @@ -11,8 +11,8 @@ #include "pc/srtp_filter.h" #include -#include -#include + +#include #include "absl/strings/match.h" #include "rtc_base/logging.h" diff --git a/TMessagesProj/jni/voip/webrtc/pc/srtp_filter.h b/TMessagesProj/jni/voip/webrtc/pc/srtp_filter.h index f1e164936c..e2848a1090 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/srtp_filter.h +++ b/TMessagesProj/jni/voip/webrtc/pc/srtp_filter.h @@ -27,7 +27,6 @@ #include "api/sequence_checker.h" #include "pc/session_description.h" #include "rtc_base/buffer.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/ssl_stream_adapter.h" // Forward declaration to avoid pulling in libsrtp headers here diff --git a/TMessagesProj/jni/voip/webrtc/pc/srtp_session.cc b/TMessagesProj/jni/voip/webrtc/pc/srtp_session.cc index 76ab3a8fe8..7d1aaf2d65 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/srtp_session.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/srtp_session.cc @@ -10,23 +10,106 @@ #include "pc/srtp_session.h" +#include + #include +#include #include "absl/base/attributes.h" +#include "absl/base/const_init.h" +#include "absl/strings/string_view.h" #include "api/array_view.h" +#include "api/field_trials_view.h" #include "modules/rtp_rtcp/source/rtp_util.h" #include "pc/external_hmac.h" +#include "rtc_base/byte_order.h" +#include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/ssl_stream_adapter.h" #include "rtc_base/string_encode.h" +#include "rtc_base/thread_annotations.h" #include "rtc_base/time_utils.h" -#include "system_wrappers/include/field_trial.h" #include "system_wrappers/include/metrics.h" #include "third_party/libsrtp/include/srtp.h" #include "third_party/libsrtp/include/srtp_priv.h" namespace cricket { +namespace { +class LibSrtpInitializer { + public: + // Returns singleton instance of this class. Instance created on first use, + // and never destroyed. + static LibSrtpInitializer& Get() { + static LibSrtpInitializer* const instance = new LibSrtpInitializer(); + return *instance; + } + void ProhibitLibsrtpInitialization(); + + // These methods are responsible for initializing libsrtp (if the usage count + // is incremented from 0 to 1) or deinitializing it (when decremented from 1 + // to 0). + // + // Returns true if successful (will always be successful if already inited). + bool IncrementLibsrtpUsageCountAndMaybeInit( + srtp_event_handler_func_t* handler); + void DecrementLibsrtpUsageCountAndMaybeDeinit(); + + private: + LibSrtpInitializer() = default; + + webrtc::Mutex mutex_; + int usage_count_ RTC_GUARDED_BY(mutex_) = 0; +}; + +void LibSrtpInitializer::ProhibitLibsrtpInitialization() { + webrtc::MutexLock lock(&mutex_); + ++usage_count_; +} + +bool LibSrtpInitializer::IncrementLibsrtpUsageCountAndMaybeInit( + srtp_event_handler_func_t* handler) { + webrtc::MutexLock lock(&mutex_); + + RTC_DCHECK_GE(usage_count_, 0); + if (usage_count_ == 0) { + int err; + err = srtp_init(); + if (err != srtp_err_status_ok) { + RTC_LOG(LS_ERROR) << "Failed to init SRTP, err=" << err; + return false; + } + + err = srtp_install_event_handler(handler); + if (err != srtp_err_status_ok) { + RTC_LOG(LS_ERROR) << "Failed to install SRTP event handler, err=" << err; + return false; + } + + err = external_crypto_init(); + if (err != srtp_err_status_ok) { + RTC_LOG(LS_ERROR) << "Failed to initialize fake auth, err=" << err; + return false; + } + } + ++usage_count_; + return true; +} + +void LibSrtpInitializer::DecrementLibsrtpUsageCountAndMaybeDeinit() { + webrtc::MutexLock lock(&mutex_); + + RTC_DCHECK_GE(usage_count_, 1); + if (--usage_count_ == 0) { + int err = srtp_shutdown(); + if (err) { + RTC_LOG(LS_ERROR) << "srtp_shutdown failed. err=" << err; + } + } +} + +} // namespace + using ::webrtc::ParseRtpSequenceNumber; // One more than the maximum libsrtp error code. Required by @@ -34,8 +117,10 @@ using ::webrtc::ParseRtpSequenceNumber; // in srtp.h. constexpr int kSrtpErrorCodeBoundary = 28; -SrtpSession::SrtpSession() { - dump_plain_rtp_ = webrtc::field_trial::IsEnabled("WebRTC-Debugging-RtpDump"); +SrtpSession::SrtpSession() {} + +SrtpSession::SrtpSession(const webrtc::FieldTrialsView& field_trials) { + dump_plain_rtp_ = field_trials.IsEnabled("WebRTC-Debugging-RtpDump"); } SrtpSession::~SrtpSession() { @@ -44,7 +129,7 @@ SrtpSession::~SrtpSession() { srtp_dealloc(session_); } if (inited_) { - DecrementLibsrtpUsageCountAndMaybeDeinit(); + LibSrtpInitializer::Get().DecrementLibsrtpUsageCountAndMaybeDeinit(); } } @@ -345,7 +430,8 @@ bool SrtpSession::SetKey(int type, // This is the first time we need to actually interact with libsrtp, so // initialize it if needed. - if (IncrementLibsrtpUsageCountAndMaybeInit()) { + if (LibSrtpInitializer::Get().IncrementLibsrtpUsageCountAndMaybeInit( + &SrtpSession::HandleEventThunk)) { inited_ = true; } else { return false; @@ -368,54 +454,8 @@ bool SrtpSession::UpdateKey(int type, return DoSetKey(type, cs, key, len, extension_ids); } -ABSL_CONST_INIT int g_libsrtp_usage_count = 0; -ABSL_CONST_INIT webrtc::GlobalMutex g_libsrtp_lock(absl::kConstInit); - void ProhibitLibsrtpInitialization() { - webrtc::GlobalMutexLock ls(&g_libsrtp_lock); - ++g_libsrtp_usage_count; -} - -// static -bool SrtpSession::IncrementLibsrtpUsageCountAndMaybeInit() { - webrtc::GlobalMutexLock ls(&g_libsrtp_lock); - - RTC_DCHECK_GE(g_libsrtp_usage_count, 0); - if (g_libsrtp_usage_count == 0) { - int err; - err = srtp_init(); - if (err != srtp_err_status_ok) { - RTC_LOG(LS_ERROR) << "Failed to init SRTP, err=" << err; - return false; - } - - err = srtp_install_event_handler(&SrtpSession::HandleEventThunk); - if (err != srtp_err_status_ok) { - RTC_LOG(LS_ERROR) << "Failed to install SRTP event handler, err=" << err; - return false; - } - - err = external_crypto_init(); - if (err != srtp_err_status_ok) { - RTC_LOG(LS_ERROR) << "Failed to initialize fake auth, err=" << err; - return false; - } - } - ++g_libsrtp_usage_count; - return true; -} - -// static -void SrtpSession::DecrementLibsrtpUsageCountAndMaybeDeinit() { - webrtc::GlobalMutexLock ls(&g_libsrtp_lock); - - RTC_DCHECK_GE(g_libsrtp_usage_count, 1); - if (--g_libsrtp_usage_count == 0) { - int err = srtp_shutdown(); - if (err) { - RTC_LOG(LS_ERROR) << "srtp_shutdown failed. err=" << err; - } - } + LibSrtpInitializer::Get().ProhibitLibsrtpInitialization(); } void SrtpSession::HandleEvent(const srtp_event_data_t* ev) { @@ -463,13 +503,16 @@ void SrtpSession::DumpPacket(const void* buf, int len, bool outbound) { int64_t minutes = (time_of_day / (60 * 1000)) % 60; int64_t seconds = (time_of_day / 1000) % 60; int64_t millis = time_of_day % 1000; - RTC_LOG(LS_VERBOSE) << "\n" << (outbound ? "O" : "I") << " " - << std::setfill('0') << std::setw(2) << hours << ":" - << std::setfill('0') << std::setw(2) << minutes << ":" - << std::setfill('0') << std::setw(2) << seconds << "." - << std::setfill('0') << std::setw(3) << millis << " " - << "000000 " << rtc::hex_encode_with_delimiter((const char *)buf, len, ' ') - << " # RTP_DUMP"; + RTC_LOG(LS_VERBOSE) << "\n" + << (outbound ? "O" : "I") << " " << std::setfill('0') + << std::setw(2) << hours << ":" << std::setfill('0') + << std::setw(2) << minutes << ":" << std::setfill('0') + << std::setw(2) << seconds << "." << std::setfill('0') + << std::setw(3) << millis << " " + << "000000 " + << rtc::hex_encode_with_delimiter( + absl::string_view((const char*)buf, len), ' ') + << " # RTP_DUMP"; } } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/pc/srtp_session.h b/TMessagesProj/jni/voip/webrtc/pc/srtp_session.h index 89fab0daf2..048e665644 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/srtp_session.h +++ b/TMessagesProj/jni/voip/webrtc/pc/srtp_session.h @@ -11,11 +11,14 @@ #ifndef PC_SRTP_SESSION_H_ #define PC_SRTP_SESSION_H_ +#include +#include + #include +#include "api/field_trials_view.h" #include "api/scoped_refptr.h" #include "api/sequence_checker.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/synchronization/mutex.h" // Forward declaration to avoid pulling in libsrtp headers here @@ -33,8 +36,12 @@ void ProhibitLibsrtpInitialization(); class SrtpSession { public: SrtpSession(); + explicit SrtpSession(const webrtc::FieldTrialsView& field_trials); ~SrtpSession(); + SrtpSession(const SrtpSession&) = delete; + SrtpSession& operator=(const SrtpSession&) = delete; + // Configures the session for sending data using the specified // cipher-suite and key. Receiving must be done by a separate session. bool SetSend(int cs, @@ -113,14 +120,6 @@ class SrtpSession { // for debugging. void DumpPacket(const void* buf, int len, bool outbound); - // These methods are responsible for initializing libsrtp (if the usage count - // is incremented from 0 to 1) or deinitializing it (when decremented from 1 - // to 0). - // - // Returns true if successful (will always be successful if already inited). - static bool IncrementLibsrtpUsageCountAndMaybeInit(); - static void DecrementLibsrtpUsageCountAndMaybeDeinit(); - void HandleEvent(const srtp_event_data_t* ev); static void HandleEventThunk(srtp_event_data_t* ev); @@ -135,13 +134,11 @@ class SrtpSession { int rtcp_auth_tag_len_ = 0; bool inited_ = false; - static webrtc::GlobalMutex lock_; int last_send_seq_num_ = -1; bool external_auth_active_ = false; bool external_auth_enabled_ = false; int decryption_failure_count_ = 0; bool dump_plain_rtp_ = false; - RTC_DISALLOW_COPY_AND_ASSIGN(SrtpSession); }; } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/pc/srtp_transport.cc b/TMessagesProj/jni/voip/webrtc/pc/srtp_transport.cc index 230c1a347b..838040876c 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/srtp_transport.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/srtp_transport.cc @@ -28,14 +28,14 @@ #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/ssl_stream_adapter.h" #include "rtc_base/third_party/base64/base64.h" -#include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/trace_event.h" #include "rtc_base/zero_memory.h" namespace webrtc { -SrtpTransport::SrtpTransport(bool rtcp_mux_enabled) - : RtpTransport(rtcp_mux_enabled) {} +SrtpTransport::SrtpTransport(bool rtcp_mux_enabled, + const FieldTrialsView& field_trials) + : RtpTransport(rtcp_mux_enabled), field_trials_(field_trials) {} RTCError SrtpTransport::SetSrtpSendKey(const cricket::CryptoParams& params) { if (send_params_) { @@ -324,13 +324,13 @@ bool SrtpTransport::SetRtcpParams(int send_cs, return false; } - send_rtcp_session_.reset(new cricket::SrtpSession()); + send_rtcp_session_.reset(new cricket::SrtpSession(field_trials_)); if (!send_rtcp_session_->SetSend(send_cs, send_key, send_key_len, send_extension_ids)) { return false; } - recv_rtcp_session_.reset(new cricket::SrtpSession()); + recv_rtcp_session_.reset(new cricket::SrtpSession(field_trials_)); if (!recv_rtcp_session_->SetRecv(recv_cs, recv_key, recv_key_len, recv_extension_ids)) { return false; @@ -361,8 +361,8 @@ void SrtpTransport::ResetParams() { } void SrtpTransport::CreateSrtpSessions() { - send_session_.reset(new cricket::SrtpSession()); - recv_session_.reset(new cricket::SrtpSession()); + send_session_.reset(new cricket::SrtpSession(field_trials_)); + recv_session_.reset(new cricket::SrtpSession(field_trials_)); if (external_auth_enabled_) { send_session_->EnableExternalAuth(); } diff --git a/TMessagesProj/jni/voip/webrtc/pc/srtp_transport.h b/TMessagesProj/jni/voip/webrtc/pc/srtp_transport.h index 4bc028d68e..ae62d5b780 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/srtp_transport.h +++ b/TMessagesProj/jni/voip/webrtc/pc/srtp_transport.h @@ -20,6 +20,7 @@ #include "absl/types/optional.h" #include "api/crypto_params.h" +#include "api/field_trials_view.h" #include "api/rtc_error.h" #include "p2p/base/packet_transport_internal.h" #include "pc/rtp_transport.h" @@ -36,11 +37,10 @@ namespace webrtc { // parameters for the SrtpSession underneath. class SrtpTransport : public RtpTransport { public: - explicit SrtpTransport(bool rtcp_mux_enabled); + SrtpTransport(bool rtcp_mux_enabled, const FieldTrialsView& field_trials); virtual ~SrtpTransport() = default; - // SrtpTransportInterface specific implementation. virtual RTCError SetSrtpSendKey(const cricket::CryptoParams& params); virtual RTCError SetSrtpReceiveKey(const cricket::CryptoParams& params); @@ -167,6 +167,8 @@ class SrtpTransport : public RtpTransport { int rtp_abs_sendtime_extn_id_ = -1; int decryption_failure_count_ = 0; + + const FieldTrialsView& field_trials_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/stats_collector.cc b/TMessagesProj/jni/voip/webrtc/pc/stats_collector.cc deleted file mode 100644 index dc172599da..0000000000 --- a/TMessagesProj/jni/voip/webrtc/pc/stats_collector.cc +++ /dev/null @@ -1,1374 +0,0 @@ -/* - * Copyright 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "pc/stats_collector.h" - -#include -#include - -#include -#include -#include -#include -#include - -#include "absl/types/optional.h" -#include "api/audio_codecs/audio_encoder.h" -#include "api/candidate.h" -#include "api/data_channel_interface.h" -#include "api/media_types.h" -#include "api/rtp_receiver_interface.h" -#include "api/rtp_sender_interface.h" -#include "api/scoped_refptr.h" -#include "api/sequence_checker.h" -#include "api/video/video_content_type.h" -#include "api/video/video_timing.h" -#include "call/call.h" -#include "media/base/media_channel.h" -#include "modules/audio_processing/include/audio_processing_statistics.h" -#include "p2p/base/ice_transport_internal.h" -#include "p2p/base/p2p_constants.h" -#include "pc/channel.h" -#include "pc/channel_interface.h" -#include "pc/data_channel_utils.h" -#include "pc/rtp_receiver.h" -#include "pc/rtp_transceiver.h" -#include "pc/transport_stats.h" -#include "rtc_base/checks.h" -#include "rtc_base/ip_address.h" -#include "rtc_base/location.h" -#include "rtc_base/logging.h" -#include "rtc_base/rtc_certificate.h" -#include "rtc_base/socket_address.h" -#include "rtc_base/ssl_stream_adapter.h" -#include "rtc_base/string_encode.h" -#include "rtc_base/thread.h" -#include "rtc_base/time_utils.h" -#include "rtc_base/trace_event.h" -#include "system_wrappers/include/field_trial.h" - -namespace webrtc { -namespace { - -// Field trial which controls whether to report standard-compliant bytes -// sent/received per stream. If enabled, padding and headers are not included -// in bytes sent or received. -constexpr char kUseStandardBytesStats[] = "WebRTC-UseStandardBytesStats"; - -// The following is the enum RTCStatsIceCandidateType from -// http://w3c.github.io/webrtc-stats/#rtcstatsicecandidatetype-enum such that -// our stats report for ice candidate type could conform to that. -const char STATSREPORT_LOCAL_PORT_TYPE[] = "host"; -const char STATSREPORT_STUN_PORT_TYPE[] = "serverreflexive"; -const char STATSREPORT_PRFLX_PORT_TYPE[] = "peerreflexive"; -const char STATSREPORT_RELAY_PORT_TYPE[] = "relayed"; - -// Strings used by the stats collector to report adapter types. This fits the -// general stype of http://w3c.github.io/webrtc-stats than what -// AdapterTypeToString does. -const char* STATSREPORT_ADAPTER_TYPE_ETHERNET = "lan"; -const char* STATSREPORT_ADAPTER_TYPE_WIFI = "wlan"; -const char* STATSREPORT_ADAPTER_TYPE_WWAN = "wwan"; -const char* STATSREPORT_ADAPTER_TYPE_VPN = "vpn"; -const char* STATSREPORT_ADAPTER_TYPE_LOOPBACK = "loopback"; -const char* STATSREPORT_ADAPTER_TYPE_WILDCARD = "wildcard"; - -template -struct TypeForAdd { - const StatsReport::StatsValueName name; - const ValueType& value; -}; - -typedef TypeForAdd BoolForAdd; -typedef TypeForAdd FloatForAdd; -typedef TypeForAdd Int64ForAdd; -typedef TypeForAdd IntForAdd; - -StatsReport* AddTrackReport(StatsCollection* reports, - const std::string& track_id) { - // Adds an empty track report. - StatsReport::Id id( - StatsReport::NewTypedId(StatsReport::kStatsReportTypeTrack, track_id)); - StatsReport* report = reports->ReplaceOrAddNew(id); - report->AddString(StatsReport::kStatsValueNameTrackId, track_id); - return report; -} - -template -void CreateTrackReport(const Track* track, - StatsCollection* reports, - TrackIdMap* track_ids) { - const std::string& track_id = track->id(); - StatsReport* report = AddTrackReport(reports, track_id); - RTC_DCHECK(report != nullptr); - (*track_ids)[track_id] = report; -} - -template -void CreateTrackReports(const TrackVector& tracks, - StatsCollection* reports, - TrackIdMap* track_ids) { - for (const auto& track : tracks) { - CreateTrackReport(track.get(), reports, track_ids); - } -} - -void ExtractCommonSendProperties(const cricket::MediaSenderInfo& info, - StatsReport* report, - bool use_standard_bytes_stats) { - report->AddString(StatsReport::kStatsValueNameCodecName, info.codec_name); - int64_t bytes_sent = info.payload_bytes_sent; - if (!use_standard_bytes_stats) { - bytes_sent += info.header_and_padding_bytes_sent; - } - report->AddInt64(StatsReport::kStatsValueNameBytesSent, bytes_sent); - if (info.rtt_ms >= 0) { - report->AddInt64(StatsReport::kStatsValueNameRtt, info.rtt_ms); - } -} - -void ExtractCommonReceiveProperties(const cricket::MediaReceiverInfo& info, - StatsReport* report) { - report->AddString(StatsReport::kStatsValueNameCodecName, info.codec_name); -} - -void SetAudioProcessingStats(StatsReport* report, - bool typing_noise_detected, - const AudioProcessingStats& apm_stats) { - report->AddBoolean(StatsReport::kStatsValueNameTypingNoiseState, - typing_noise_detected); - if (apm_stats.delay_median_ms) { - report->AddInt(StatsReport::kStatsValueNameEchoDelayMedian, - *apm_stats.delay_median_ms); - } - if (apm_stats.delay_standard_deviation_ms) { - report->AddInt(StatsReport::kStatsValueNameEchoDelayStdDev, - *apm_stats.delay_standard_deviation_ms); - } - if (apm_stats.echo_return_loss) { - report->AddInt(StatsReport::kStatsValueNameEchoReturnLoss, - *apm_stats.echo_return_loss); - } - if (apm_stats.echo_return_loss_enhancement) { - report->AddInt(StatsReport::kStatsValueNameEchoReturnLossEnhancement, - *apm_stats.echo_return_loss_enhancement); - } - if (apm_stats.residual_echo_likelihood) { - report->AddFloat(StatsReport::kStatsValueNameResidualEchoLikelihood, - static_cast(*apm_stats.residual_echo_likelihood)); - } - if (apm_stats.residual_echo_likelihood_recent_max) { - report->AddFloat( - StatsReport::kStatsValueNameResidualEchoLikelihoodRecentMax, - static_cast(*apm_stats.residual_echo_likelihood_recent_max)); - } - if (apm_stats.divergent_filter_fraction) { - report->AddFloat(StatsReport::kStatsValueNameAecDivergentFilterFraction, - static_cast(*apm_stats.divergent_filter_fraction)); - } -} - -void ExtractStats(const cricket::VoiceReceiverInfo& info, - StatsReport* report, - bool use_standard_bytes_stats) { - ExtractCommonReceiveProperties(info, report); - const FloatForAdd floats[] = { - {StatsReport::kStatsValueNameExpandRate, info.expand_rate}, - {StatsReport::kStatsValueNameSecondaryDecodedRate, - info.secondary_decoded_rate}, - {StatsReport::kStatsValueNameSecondaryDiscardedRate, - info.secondary_discarded_rate}, - {StatsReport::kStatsValueNameSpeechExpandRate, info.speech_expand_rate}, - {StatsReport::kStatsValueNameAccelerateRate, info.accelerate_rate}, - {StatsReport::kStatsValueNamePreemptiveExpandRate, - info.preemptive_expand_rate}, - {StatsReport::kStatsValueNameTotalAudioEnergy, info.total_output_energy}, - {StatsReport::kStatsValueNameTotalSamplesDuration, - info.total_output_duration}}; - - const IntForAdd ints[] = { - {StatsReport::kStatsValueNameCurrentDelayMs, info.delay_estimate_ms}, - {StatsReport::kStatsValueNameDecodingCNG, info.decoding_cng}, - {StatsReport::kStatsValueNameDecodingCTN, info.decoding_calls_to_neteq}, - {StatsReport::kStatsValueNameDecodingCTSG, - info.decoding_calls_to_silence_generator}, - {StatsReport::kStatsValueNameDecodingMutedOutput, - info.decoding_muted_output}, - {StatsReport::kStatsValueNameDecodingNormal, info.decoding_normal}, - {StatsReport::kStatsValueNameDecodingPLC, info.decoding_plc}, - {StatsReport::kStatsValueNameDecodingPLCCNG, info.decoding_plc_cng}, - {StatsReport::kStatsValueNameJitterBufferMs, info.jitter_buffer_ms}, - {StatsReport::kStatsValueNameJitterReceived, info.jitter_ms}, - {StatsReport::kStatsValueNamePacketsLost, info.packets_lost}, - {StatsReport::kStatsValueNamePacketsReceived, info.packets_rcvd}, - {StatsReport::kStatsValueNamePreferredJitterBufferMs, - info.jitter_buffer_preferred_ms}, - }; - - for (const auto& f : floats) - report->AddFloat(f.name, f.value); - - for (const auto& i : ints) - report->AddInt(i.name, i.value); - if (info.audio_level >= 0) { - report->AddInt(StatsReport::kStatsValueNameAudioOutputLevel, - info.audio_level); - } - if (info.decoding_codec_plc) - report->AddInt(StatsReport::kStatsValueNameDecodingCodecPLC, - info.decoding_codec_plc); - - int64_t bytes_rcvd = info.payload_bytes_rcvd; - if (!use_standard_bytes_stats) { - bytes_rcvd += info.header_and_padding_bytes_rcvd; - } - report->AddInt64(StatsReport::kStatsValueNameBytesReceived, bytes_rcvd); - if (info.capture_start_ntp_time_ms >= 0) { - report->AddInt64(StatsReport::kStatsValueNameCaptureStartNtpTimeMs, - info.capture_start_ntp_time_ms); - } - report->AddString(StatsReport::kStatsValueNameMediaType, "audio"); -} - -void ExtractStats(const cricket::VoiceSenderInfo& info, - StatsReport* report, - bool use_standard_bytes_stats) { - ExtractCommonSendProperties(info, report, use_standard_bytes_stats); - - SetAudioProcessingStats(report, info.typing_noise_detected, - info.apm_statistics); - - const FloatForAdd floats[] = { - {StatsReport::kStatsValueNameTotalAudioEnergy, info.total_input_energy}, - {StatsReport::kStatsValueNameTotalSamplesDuration, - info.total_input_duration}}; - - RTC_DCHECK_GE(info.audio_level, 0); - const IntForAdd ints[] = { - {StatsReport::kStatsValueNameAudioInputLevel, info.audio_level}, - {StatsReport::kStatsValueNameJitterReceived, info.jitter_ms}, - {StatsReport::kStatsValueNamePacketsLost, info.packets_lost}, - {StatsReport::kStatsValueNamePacketsSent, info.packets_sent}, - }; - - for (const auto& f : floats) { - report->AddFloat(f.name, f.value); - } - - for (const auto& i : ints) { - if (i.value >= 0) { - report->AddInt(i.name, i.value); - } - } - report->AddString(StatsReport::kStatsValueNameMediaType, "audio"); - if (info.ana_statistics.bitrate_action_counter) { - report->AddInt(StatsReport::kStatsValueNameAnaBitrateActionCounter, - *info.ana_statistics.bitrate_action_counter); - } - if (info.ana_statistics.channel_action_counter) { - report->AddInt(StatsReport::kStatsValueNameAnaChannelActionCounter, - *info.ana_statistics.channel_action_counter); - } - if (info.ana_statistics.dtx_action_counter) { - report->AddInt(StatsReport::kStatsValueNameAnaDtxActionCounter, - *info.ana_statistics.dtx_action_counter); - } - if (info.ana_statistics.fec_action_counter) { - report->AddInt(StatsReport::kStatsValueNameAnaFecActionCounter, - *info.ana_statistics.fec_action_counter); - } - if (info.ana_statistics.frame_length_increase_counter) { - report->AddInt(StatsReport::kStatsValueNameAnaFrameLengthIncreaseCounter, - *info.ana_statistics.frame_length_increase_counter); - } - if (info.ana_statistics.frame_length_decrease_counter) { - report->AddInt(StatsReport::kStatsValueNameAnaFrameLengthDecreaseCounter, - *info.ana_statistics.frame_length_decrease_counter); - } - if (info.ana_statistics.uplink_packet_loss_fraction) { - report->AddFloat(StatsReport::kStatsValueNameAnaUplinkPacketLossFraction, - *info.ana_statistics.uplink_packet_loss_fraction); - } -} - -void ExtractStats(const cricket::VideoReceiverInfo& info, - StatsReport* report, - bool use_standard_bytes_stats) { - ExtractCommonReceiveProperties(info, report); - report->AddString(StatsReport::kStatsValueNameCodecImplementationName, - info.decoder_implementation_name); - int64_t bytes_rcvd = info.payload_bytes_rcvd; - if (!use_standard_bytes_stats) { - bytes_rcvd += info.header_and_padding_bytes_rcvd; - } - report->AddInt64(StatsReport::kStatsValueNameBytesReceived, bytes_rcvd); - if (info.capture_start_ntp_time_ms >= 0) { - report->AddInt64(StatsReport::kStatsValueNameCaptureStartNtpTimeMs, - info.capture_start_ntp_time_ms); - } - if (info.first_frame_received_to_decoded_ms >= 0) { - report->AddInt64(StatsReport::kStatsValueNameFirstFrameReceivedToDecodedMs, - info.first_frame_received_to_decoded_ms); - } - if (info.qp_sum) - report->AddInt64(StatsReport::kStatsValueNameQpSum, *info.qp_sum); - - if (info.nacks_sent) { - report->AddInt(StatsReport::kStatsValueNameNacksSent, *info.nacks_sent); - } - - const IntForAdd ints[] = { - {StatsReport::kStatsValueNameCurrentDelayMs, info.current_delay_ms}, - {StatsReport::kStatsValueNameDecodeMs, info.decode_ms}, - {StatsReport::kStatsValueNameFirsSent, info.firs_sent}, - {StatsReport::kStatsValueNameFrameHeightReceived, info.frame_height}, - {StatsReport::kStatsValueNameFrameRateDecoded, info.framerate_decoded}, - {StatsReport::kStatsValueNameFrameRateOutput, info.framerate_output}, - {StatsReport::kStatsValueNameFrameRateReceived, info.framerate_rcvd}, - {StatsReport::kStatsValueNameFrameWidthReceived, info.frame_width}, - {StatsReport::kStatsValueNameJitterBufferMs, info.jitter_buffer_ms}, - {StatsReport::kStatsValueNameMaxDecodeMs, info.max_decode_ms}, - {StatsReport::kStatsValueNameMinPlayoutDelayMs, - info.min_playout_delay_ms}, - {StatsReport::kStatsValueNamePacketsLost, info.packets_lost}, - {StatsReport::kStatsValueNamePacketsReceived, info.packets_rcvd}, - {StatsReport::kStatsValueNamePlisSent, info.plis_sent}, - {StatsReport::kStatsValueNameRenderDelayMs, info.render_delay_ms}, - {StatsReport::kStatsValueNameTargetDelayMs, info.target_delay_ms}, - {StatsReport::kStatsValueNameFramesDecoded, info.frames_decoded}, - }; - - for (const auto& i : ints) - report->AddInt(i.name, i.value); - report->AddString(StatsReport::kStatsValueNameMediaType, "video"); - - if (info.timing_frame_info) { - report->AddString(StatsReport::kStatsValueNameTimingFrameInfo, - info.timing_frame_info->ToString()); - } - - report->AddInt64(StatsReport::kStatsValueNameInterframeDelayMaxMs, - info.interframe_delay_max_ms); - - report->AddString( - StatsReport::kStatsValueNameContentType, - webrtc::videocontenttypehelpers::ToString(info.content_type)); -} - -void ExtractStats(const cricket::VideoSenderInfo& info, - StatsReport* report, - bool use_standard_bytes_stats) { - ExtractCommonSendProperties(info, report, use_standard_bytes_stats); - - report->AddString(StatsReport::kStatsValueNameCodecImplementationName, - info.encoder_implementation_name); - report->AddBoolean(StatsReport::kStatsValueNameBandwidthLimitedResolution, - (info.adapt_reason & 0x2) > 0); - report->AddBoolean(StatsReport::kStatsValueNameCpuLimitedResolution, - (info.adapt_reason & 0x1) > 0); - report->AddBoolean(StatsReport::kStatsValueNameHasEnteredLowResolution, - info.has_entered_low_resolution); - - if (info.qp_sum) - report->AddInt(StatsReport::kStatsValueNameQpSum, *info.qp_sum); - - const IntForAdd ints[] = { - {StatsReport::kStatsValueNameAdaptationChanges, info.adapt_changes}, - {StatsReport::kStatsValueNameAvgEncodeMs, info.avg_encode_ms}, - {StatsReport::kStatsValueNameEncodeUsagePercent, - info.encode_usage_percent}, - {StatsReport::kStatsValueNameFirsReceived, info.firs_rcvd}, - {StatsReport::kStatsValueNameFrameHeightSent, info.send_frame_height}, - {StatsReport::kStatsValueNameFrameRateInput, round(info.framerate_input)}, - {StatsReport::kStatsValueNameFrameRateSent, info.framerate_sent}, - {StatsReport::kStatsValueNameFrameWidthSent, info.send_frame_width}, - {StatsReport::kStatsValueNameNacksReceived, info.nacks_rcvd}, - {StatsReport::kStatsValueNamePacketsLost, info.packets_lost}, - {StatsReport::kStatsValueNamePacketsSent, info.packets_sent}, - {StatsReport::kStatsValueNamePlisReceived, info.plis_rcvd}, - {StatsReport::kStatsValueNameFramesEncoded, info.frames_encoded}, - {StatsReport::kStatsValueNameHugeFramesSent, info.huge_frames_sent}, - }; - - for (const auto& i : ints) - report->AddInt(i.name, i.value); - report->AddString(StatsReport::kStatsValueNameMediaType, "video"); - report->AddString( - StatsReport::kStatsValueNameContentType, - webrtc::videocontenttypehelpers::ToString(info.content_type)); -} - -void ExtractStats(const cricket::BandwidthEstimationInfo& info, - double stats_gathering_started, - StatsReport* report) { - RTC_DCHECK(report->type() == StatsReport::kStatsReportTypeBwe); - - report->set_timestamp(stats_gathering_started); - const IntForAdd ints[] = { - {StatsReport::kStatsValueNameAvailableSendBandwidth, - info.available_send_bandwidth}, - {StatsReport::kStatsValueNameAvailableReceiveBandwidth, - info.available_recv_bandwidth}, - {StatsReport::kStatsValueNameTargetEncBitrate, info.target_enc_bitrate}, - {StatsReport::kStatsValueNameActualEncBitrate, info.actual_enc_bitrate}, - {StatsReport::kStatsValueNameRetransmitBitrate, info.retransmit_bitrate}, - {StatsReport::kStatsValueNameTransmitBitrate, info.transmit_bitrate}, - }; - for (const auto& i : ints) - report->AddInt(i.name, i.value); - report->AddInt64(StatsReport::kStatsValueNameBucketDelay, info.bucket_delay); -} - -void ExtractRemoteStats(const cricket::MediaSenderInfo& info, - StatsReport* report) { - report->set_timestamp(info.remote_stats[0].timestamp); - // TODO(hta): Extract some stats here. -} - -void ExtractRemoteStats(const cricket::MediaReceiverInfo& info, - StatsReport* report) { - report->set_timestamp(info.remote_stats[0].timestamp); - // TODO(hta): Extract some stats here. -} - -std::string GetTrackIdBySsrc( - uint32_t ssrc, - StatsReport::Direction direction, - const std::map& track_id_by_ssrc) { - auto it = track_id_by_ssrc.find(ssrc); - if (it != track_id_by_ssrc.end()) { - return it->second; - } - if (direction == StatsReport::kReceive) { - // If the track ID was not found, this might be an unsignaled receive - // SSRC, so try looking up by the special SSRC 0. - it = track_id_by_ssrc.find(0); - if (it != track_id_by_ssrc.end()) { - RTC_LOG(LS_INFO) << "Assuming SSRC=" << ssrc - << " is an unsignalled receive stream corresponding " - "to the RtpReceiver with track ID \"" - << it->second << "\"."; - return it->second; - } - } - return ""; -} - -// Template to extract stats from a data vector. -// In order to use the template, the functions that are called from it, -// ExtractStats and ExtractRemoteStats, must be defined and overloaded -// for each type. -template -void ExtractStatsFromList( - const std::vector& data, - const StatsReport::Id& transport_id, - StatsCollector* collector, - StatsReport::Direction direction, - const std::map& track_id_by_ssrc) { - for (const auto& d : data) { - uint32_t ssrc = d.ssrc(); - std::string track_id = GetTrackIdBySsrc(ssrc, direction, track_id_by_ssrc); - // Each track can have stats for both local and remote objects. - // TODO(hta): Handle the case of multiple SSRCs per object. - StatsReport* report = - collector->PrepareReport(true, ssrc, track_id, transport_id, direction); - if (report) - ExtractStats(d, report, collector->UseStandardBytesStats()); - - if (!d.remote_stats.empty()) { - report = collector->PrepareReport(false, ssrc, track_id, transport_id, - direction); - if (report) - ExtractRemoteStats(d, report); - } - } -} - -} // namespace - -const char* IceCandidateTypeToStatsType(const std::string& candidate_type) { - if (candidate_type == cricket::LOCAL_PORT_TYPE) { - return STATSREPORT_LOCAL_PORT_TYPE; - } - if (candidate_type == cricket::STUN_PORT_TYPE) { - return STATSREPORT_STUN_PORT_TYPE; - } - if (candidate_type == cricket::PRFLX_PORT_TYPE) { - return STATSREPORT_PRFLX_PORT_TYPE; - } - if (candidate_type == cricket::RELAY_PORT_TYPE) { - return STATSREPORT_RELAY_PORT_TYPE; - } - RTC_DCHECK_NOTREACHED(); - return "unknown"; -} - -const char* AdapterTypeToStatsType(rtc::AdapterType type) { - switch (type) { - case rtc::ADAPTER_TYPE_UNKNOWN: - return "unknown"; - case rtc::ADAPTER_TYPE_ETHERNET: - return STATSREPORT_ADAPTER_TYPE_ETHERNET; - case rtc::ADAPTER_TYPE_WIFI: - return STATSREPORT_ADAPTER_TYPE_WIFI; - case rtc::ADAPTER_TYPE_CELLULAR: - case rtc::ADAPTER_TYPE_CELLULAR_2G: - case rtc::ADAPTER_TYPE_CELLULAR_3G: - case rtc::ADAPTER_TYPE_CELLULAR_4G: - case rtc::ADAPTER_TYPE_CELLULAR_5G: - return STATSREPORT_ADAPTER_TYPE_WWAN; - case rtc::ADAPTER_TYPE_VPN: - return STATSREPORT_ADAPTER_TYPE_VPN; - case rtc::ADAPTER_TYPE_LOOPBACK: - return STATSREPORT_ADAPTER_TYPE_LOOPBACK; - case rtc::ADAPTER_TYPE_ANY: - return STATSREPORT_ADAPTER_TYPE_WILDCARD; - default: - RTC_DCHECK_NOTREACHED(); - return ""; - } -} - -StatsCollector::StatsCollector(PeerConnectionInternal* pc) - : pc_(pc), - stats_gathering_started_(0), - use_standard_bytes_stats_( - webrtc::field_trial::IsEnabled(kUseStandardBytesStats)) { - RTC_DCHECK(pc_); -} - -StatsCollector::~StatsCollector() { - RTC_DCHECK_RUN_ON(pc_->signaling_thread()); -} - -// Wallclock time in ms. -double StatsCollector::GetTimeNow() { - return static_cast(rtc::TimeUTCMillis()); -} - -// Adds a MediaStream with tracks that can be used as a `selector` in a call -// to GetStats. -void StatsCollector::AddStream(MediaStreamInterface* stream) { - RTC_DCHECK_RUN_ON(pc_->signaling_thread()); - RTC_DCHECK(stream != NULL); - - CreateTrackReports(stream->GetAudioTracks(), &reports_, - &track_ids_); - CreateTrackReports(stream->GetVideoTracks(), &reports_, - &track_ids_); -} - -void StatsCollector::AddTrack(MediaStreamTrackInterface* track) { - if (track->kind() == MediaStreamTrackInterface::kAudioKind) { - CreateTrackReport(static_cast(track), &reports_, - &track_ids_); - } else if (track->kind() == MediaStreamTrackInterface::kVideoKind) { - CreateTrackReport(static_cast(track), &reports_, - &track_ids_); - } else { - RTC_DCHECK_NOTREACHED() << "Illegal track kind"; - } -} - -void StatsCollector::AddLocalAudioTrack(AudioTrackInterface* audio_track, - uint32_t ssrc) { - RTC_DCHECK_RUN_ON(pc_->signaling_thread()); - RTC_DCHECK(audio_track != NULL); -#if RTC_DCHECK_IS_ON - for (const auto& track : local_audio_tracks_) - RTC_DCHECK(track.first != audio_track || track.second != ssrc); -#endif - - local_audio_tracks_.push_back(std::make_pair(audio_track, ssrc)); - - // Create the kStatsReportTypeTrack report for the new track if there is no - // report yet. - StatsReport::Id id(StatsReport::NewTypedId(StatsReport::kStatsReportTypeTrack, - audio_track->id())); - StatsReport* report = reports_.Find(id); - if (!report) { - report = reports_.InsertNew(id); - report->AddString(StatsReport::kStatsValueNameTrackId, audio_track->id()); - } -} - -void StatsCollector::RemoveLocalAudioTrack(AudioTrackInterface* audio_track, - uint32_t ssrc) { - RTC_DCHECK(audio_track != NULL); - local_audio_tracks_.erase( - std::remove_if( - local_audio_tracks_.begin(), local_audio_tracks_.end(), - [audio_track, ssrc](const LocalAudioTrackVector::value_type& track) { - return track.first == audio_track && track.second == ssrc; - }), - local_audio_tracks_.end()); -} - -void StatsCollector::GetStats(MediaStreamTrackInterface* track, - StatsReports* reports) { - RTC_DCHECK_RUN_ON(pc_->signaling_thread()); - RTC_DCHECK(reports != NULL); - RTC_DCHECK(reports->empty()); - - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; - - if (!track) { - reports->reserve(reports_.size()); - for (auto* r : reports_) - reports->push_back(r); - return; - } - - StatsReport* report = reports_.Find(StatsReport::NewTypedId( - StatsReport::kStatsReportTypeSession, pc_->session_id())); - if (report) - reports->push_back(report); - - report = reports_.Find( - StatsReport::NewTypedId(StatsReport::kStatsReportTypeTrack, track->id())); - - if (!report) - return; - - reports->push_back(report); - - std::string track_id; - for (const auto* r : reports_) { - if (r->type() != StatsReport::kStatsReportTypeSsrc) - continue; - - const StatsReport::Value* v = - r->FindValue(StatsReport::kStatsValueNameTrackId); - if (v && v->string_val() == track->id()) - reports->push_back(r); - } -} - -void StatsCollector::UpdateStats( - PeerConnectionInterface::StatsOutputLevel level) { - RTC_DCHECK_RUN_ON(pc_->signaling_thread()); - // Calls to UpdateStats() that occur less than kMinGatherStatsPeriodMs apart - // will be ignored. Using a monotonic clock specifically for this, while using - // a UTC clock for the reports themselves. - const int64_t kMinGatherStatsPeriodMs = 50; - int64_t cache_now_ms = rtc::TimeMillis(); - if (cache_timestamp_ms_ != 0 && - cache_timestamp_ms_ + kMinGatherStatsPeriodMs > cache_now_ms) { - return; - } - cache_timestamp_ms_ = cache_now_ms; - stats_gathering_started_ = GetTimeNow(); - - // TODO(tommi): ExtractSessionInfo now has a single hop to the network thread - // to fetch stats, then applies them on the signaling thread. See if we need - // to do this synchronously or if updating the stats without blocking is safe. - std::map transport_names_by_mid = - ExtractSessionInfo(); - - // TODO(tommi): All of these hop over to the worker thread to fetch - // information. We could post a task to run all of these and post - // the information back to the signaling thread where we can create and - // update stats reports. That would also clean up the threading story a bit - // since we'd be creating/updating the stats report objects consistently on - // the same thread (this class has no locks right now). - ExtractBweInfo(); - ExtractMediaInfo(transport_names_by_mid); - ExtractSenderInfo(); - ExtractDataInfo(); - UpdateTrackReports(); -} - -StatsReport* StatsCollector::PrepareReport(bool local, - uint32_t ssrc, - const std::string& track_id, - const StatsReport::Id& transport_id, - StatsReport::Direction direction) { - RTC_DCHECK_RUN_ON(pc_->signaling_thread()); - StatsReport::Id id(StatsReport::NewIdWithDirection( - local ? StatsReport::kStatsReportTypeSsrc - : StatsReport::kStatsReportTypeRemoteSsrc, - rtc::ToString(ssrc), direction)); - StatsReport* report = reports_.Find(id); - if (!report) { - report = reports_.InsertNew(id); - } - - // FYI - for remote reports, the timestamp will be overwritten later. - report->set_timestamp(stats_gathering_started_); - - report->AddInt64(StatsReport::kStatsValueNameSsrc, ssrc); - if (!track_id.empty()) { - report->AddString(StatsReport::kStatsValueNameTrackId, track_id); - } - // Add the mapping of SSRC to transport. - report->AddId(StatsReport::kStatsValueNameTransportId, transport_id); - return report; -} - -StatsReport* StatsCollector::PrepareADMReport() { - RTC_DCHECK_RUN_ON(pc_->signaling_thread()); - StatsReport::Id id(StatsReport::NewTypedId( - StatsReport::kStatsReportTypeSession, pc_->session_id())); - StatsReport* report = reports_.FindOrAddNew(id); - return report; -} - -bool StatsCollector::IsValidTrack(const std::string& track_id) { - return reports_.Find(StatsReport::NewTypedId( - StatsReport::kStatsReportTypeTrack, track_id)) != nullptr; -} - -StatsReport* StatsCollector::AddCertificateReports( - std::unique_ptr cert_stats) { - RTC_DCHECK_RUN_ON(pc_->signaling_thread()); - - StatsReport* first_report = nullptr; - StatsReport* prev_report = nullptr; - for (rtc::SSLCertificateStats* stats = cert_stats.get(); stats; - stats = stats->issuer.get()) { - StatsReport::Id id(StatsReport::NewTypedId( - StatsReport::kStatsReportTypeCertificate, stats->fingerprint)); - - StatsReport* report = reports_.ReplaceOrAddNew(id); - report->set_timestamp(stats_gathering_started_); - report->AddString(StatsReport::kStatsValueNameFingerprint, - stats->fingerprint); - report->AddString(StatsReport::kStatsValueNameFingerprintAlgorithm, - stats->fingerprint_algorithm); - report->AddString(StatsReport::kStatsValueNameDer, - stats->base64_certificate); - if (!first_report) - first_report = report; - else - prev_report->AddId(StatsReport::kStatsValueNameIssuerId, id); - prev_report = report; - } - return first_report; -} - -StatsReport* StatsCollector::AddConnectionInfoReport( - const std::string& content_name, - int component, - int connection_id, - const StatsReport::Id& channel_report_id, - const cricket::ConnectionInfo& info) { - StatsReport::Id id( - StatsReport::NewCandidatePairId(content_name, component, connection_id)); - StatsReport* report = reports_.ReplaceOrAddNew(id); - report->set_timestamp(stats_gathering_started_); - - const BoolForAdd bools[] = { - {StatsReport::kStatsValueNameActiveConnection, info.best_connection}, - {StatsReport::kStatsValueNameReceiving, info.receiving}, - {StatsReport::kStatsValueNameWritable, info.writable}, - }; - for (const auto& b : bools) - report->AddBoolean(b.name, b.value); - - report->AddId(StatsReport::kStatsValueNameChannelId, channel_report_id); - cricket::CandidateStats local_candidate_stats(info.local_candidate); - cricket::CandidateStats remote_candidate_stats(info.remote_candidate); - report->AddId(StatsReport::kStatsValueNameLocalCandidateId, - AddCandidateReport(local_candidate_stats, true)->id()); - report->AddId(StatsReport::kStatsValueNameRemoteCandidateId, - AddCandidateReport(remote_candidate_stats, false)->id()); - - const Int64ForAdd int64s[] = { - {StatsReport::kStatsValueNameBytesReceived, info.recv_total_bytes}, - {StatsReport::kStatsValueNameBytesSent, info.sent_total_bytes}, - {StatsReport::kStatsValueNamePacketsSent, info.sent_total_packets}, - {StatsReport::kStatsValueNameRtt, info.rtt}, - {StatsReport::kStatsValueNameSendPacketsDiscarded, - info.sent_discarded_packets}, - {StatsReport::kStatsValueNameSentPingRequestsTotal, - info.sent_ping_requests_total}, - {StatsReport::kStatsValueNameSentPingRequestsBeforeFirstResponse, - info.sent_ping_requests_before_first_response}, - {StatsReport::kStatsValueNameSentPingResponses, info.sent_ping_responses}, - {StatsReport::kStatsValueNameRecvPingRequests, info.recv_ping_requests}, - {StatsReport::kStatsValueNameRecvPingResponses, info.recv_ping_responses}, - }; - for (const auto& i : int64s) - report->AddInt64(i.name, i.value); - - report->AddString(StatsReport::kStatsValueNameLocalAddress, - info.local_candidate.address().ToString()); - report->AddString(StatsReport::kStatsValueNameLocalCandidateType, - info.local_candidate.type()); - report->AddString(StatsReport::kStatsValueNameRemoteAddress, - info.remote_candidate.address().ToString()); - report->AddString(StatsReport::kStatsValueNameRemoteCandidateType, - info.remote_candidate.type()); - report->AddString(StatsReport::kStatsValueNameTransportType, - info.local_candidate.protocol()); - report->AddString(StatsReport::kStatsValueNameLocalCandidateRelayProtocol, - info.local_candidate.relay_protocol()); - - return report; -} - -StatsReport* StatsCollector::AddCandidateReport( - const cricket::CandidateStats& candidate_stats, - bool local) { - const auto& candidate = candidate_stats.candidate(); - StatsReport::Id id(StatsReport::NewCandidateId(local, candidate.id())); - StatsReport* report = reports_.Find(id); - if (!report) { - report = reports_.InsertNew(id); - report->set_timestamp(stats_gathering_started_); - if (local) { - report->AddString(StatsReport::kStatsValueNameCandidateNetworkType, - AdapterTypeToStatsType(candidate.network_type())); - } - report->AddString(StatsReport::kStatsValueNameCandidateIPAddress, - candidate.address().ipaddr().ToString()); - report->AddString(StatsReport::kStatsValueNameCandidatePortNumber, - candidate.address().PortAsString()); - report->AddInt(StatsReport::kStatsValueNameCandidatePriority, - candidate.priority()); - report->AddString(StatsReport::kStatsValueNameCandidateType, - IceCandidateTypeToStatsType(candidate.type())); - report->AddString(StatsReport::kStatsValueNameCandidateTransportType, - candidate.protocol()); - } - report->set_timestamp(stats_gathering_started_); - - if (local && candidate_stats.stun_stats().has_value()) { - const auto& stun_stats = candidate_stats.stun_stats().value(); - report->AddInt64(StatsReport::kStatsValueNameSentStunKeepaliveRequests, - stun_stats.stun_binding_requests_sent); - report->AddInt64(StatsReport::kStatsValueNameRecvStunKeepaliveResponses, - stun_stats.stun_binding_responses_received); - report->AddFloat(StatsReport::kStatsValueNameStunKeepaliveRttTotal, - stun_stats.stun_binding_rtt_ms_total); - report->AddFloat(StatsReport::kStatsValueNameStunKeepaliveRttSquaredTotal, - stun_stats.stun_binding_rtt_ms_squared_total); - } - - return report; -} - -std::map StatsCollector::ExtractSessionInfo() { - TRACE_EVENT0("webrtc", "StatsCollector::ExtractSessionInfo"); - RTC_DCHECK_RUN_ON(pc_->signaling_thread()); - - SessionStats stats; - auto transceivers = pc_->GetTransceiversInternal(); - pc_->network_thread()->Invoke( - RTC_FROM_HERE, [&, sctp_transport_name = pc_->sctp_transport_name(), - sctp_mid = pc_->sctp_mid()]() mutable { - stats = ExtractSessionInfo_n( - transceivers, std::move(sctp_transport_name), std::move(sctp_mid)); - }); - - ExtractSessionInfo_s(stats); - - return std::move(stats.transport_names_by_mid); -} - -StatsCollector::SessionStats StatsCollector::ExtractSessionInfo_n( - const std::vector>>& transceivers, - absl::optional sctp_transport_name, - absl::optional sctp_mid) { - TRACE_EVENT0("webrtc", "StatsCollector::ExtractSessionInfo_n"); - RTC_DCHECK_RUN_ON(pc_->network_thread()); - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; - SessionStats stats; - stats.candidate_stats = pc_->GetPooledCandidateStats(); - for (auto& transceiver : transceivers) { - cricket::ChannelInterface* channel = transceiver->internal()->channel(); - if (channel) { - stats.transport_names_by_mid[channel->content_name()] = - channel->transport_name(); - } - } - - if (sctp_transport_name) { - RTC_DCHECK(sctp_mid); - stats.transport_names_by_mid[*sctp_mid] = *sctp_transport_name; - } - - std::set transport_names; - for (const auto& entry : stats.transport_names_by_mid) { - transport_names.insert(entry.second); - } - - std::map transport_stats_by_name = - pc_->GetTransportStatsByNames(transport_names); - - for (auto& entry : transport_stats_by_name) { - stats.transport_stats.emplace_back(entry.first, std::move(entry.second)); - TransportStats& transport = stats.transport_stats.back(); - - // Attempt to get a copy of the certificates from the transport and - // expose them in stats reports. All channels in a transport share the - // same local and remote certificates. - // - StatsReport::Id local_cert_report_id, remote_cert_report_id; - rtc::scoped_refptr certificate; - if (pc_->GetLocalCertificate(transport.name, &certificate)) { - transport.local_cert_stats = - certificate->GetSSLCertificateChain().GetStats(); - } - - std::unique_ptr remote_cert_chain = - pc_->GetRemoteSSLCertChain(transport.name); - if (remote_cert_chain) { - transport.remote_cert_stats = remote_cert_chain->GetStats(); - } - } - - return stats; -} - -void StatsCollector::ExtractSessionInfo_s(SessionStats& session_stats) { - RTC_DCHECK_RUN_ON(pc_->signaling_thread()); - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; - - StatsReport::Id id(StatsReport::NewTypedId( - StatsReport::kStatsReportTypeSession, pc_->session_id())); - StatsReport* report = reports_.ReplaceOrAddNew(id); - report->set_timestamp(stats_gathering_started_); - report->AddBoolean(StatsReport::kStatsValueNameInitiator, - pc_->initial_offerer()); - - for (const cricket::CandidateStats& stats : session_stats.candidate_stats) { - AddCandidateReport(stats, true); - } - - for (auto& transport : session_stats.transport_stats) { - // Attempt to get a copy of the certificates from the transport and - // expose them in stats reports. All channels in a transport share the - // same local and remote certificates. - // - StatsReport::Id local_cert_report_id, remote_cert_report_id; - if (transport.local_cert_stats) { - StatsReport* r = - AddCertificateReports(std::move(transport.local_cert_stats)); - if (r) - local_cert_report_id = r->id(); - } - - if (transport.remote_cert_stats) { - StatsReport* r = - AddCertificateReports(std::move(transport.remote_cert_stats)); - if (r) - remote_cert_report_id = r->id(); - } - - for (const auto& channel_iter : transport.stats.channel_stats) { - StatsReport::Id channel_stats_id( - StatsReport::NewComponentId(transport.name, channel_iter.component)); - StatsReport* channel_report = reports_.ReplaceOrAddNew(channel_stats_id); - channel_report->set_timestamp(stats_gathering_started_); - channel_report->AddInt(StatsReport::kStatsValueNameComponent, - channel_iter.component); - if (local_cert_report_id.get()) { - channel_report->AddId(StatsReport::kStatsValueNameLocalCertificateId, - local_cert_report_id); - } - if (remote_cert_report_id.get()) { - channel_report->AddId(StatsReport::kStatsValueNameRemoteCertificateId, - remote_cert_report_id); - } - int srtp_crypto_suite = channel_iter.srtp_crypto_suite; - if (srtp_crypto_suite != rtc::kSrtpInvalidCryptoSuite && - rtc::SrtpCryptoSuiteToName(srtp_crypto_suite).length()) { - channel_report->AddString( - StatsReport::kStatsValueNameSrtpCipher, - rtc::SrtpCryptoSuiteToName(srtp_crypto_suite)); - } - int ssl_cipher_suite = channel_iter.ssl_cipher_suite; - if (ssl_cipher_suite != rtc::kTlsNullWithNullNull && - rtc::SSLStreamAdapter::SslCipherSuiteToName(ssl_cipher_suite) - .length()) { - channel_report->AddString( - StatsReport::kStatsValueNameDtlsCipher, - rtc::SSLStreamAdapter::SslCipherSuiteToName(ssl_cipher_suite)); - } - - // Collect stats for non-pooled candidates. Note that the reports - // generated here supersedes the candidate reports generated in - // AddConnectionInfoReport below, and they may report candidates that are - // not paired. Also, the candidate report generated in - // AddConnectionInfoReport do not report port stats like StunStats. - for (const cricket::CandidateStats& stats : - channel_iter.ice_transport_stats.candidate_stats_list) { - AddCandidateReport(stats, true); - } - - int connection_id = 0; - for (const cricket::ConnectionInfo& info : - channel_iter.ice_transport_stats.connection_infos) { - StatsReport* connection_report = AddConnectionInfoReport( - transport.name, channel_iter.component, connection_id++, - channel_report->id(), info); - if (info.best_connection) { - channel_report->AddId( - StatsReport::kStatsValueNameSelectedCandidatePairId, - connection_report->id()); - } - } - } - } -} - -void StatsCollector::ExtractBweInfo() { - RTC_DCHECK_RUN_ON(pc_->signaling_thread()); - - if (pc_->signaling_state() == PeerConnectionInterface::kClosed) - return; - - webrtc::Call::Stats call_stats = pc_->GetCallStats(); - cricket::BandwidthEstimationInfo bwe_info; - bwe_info.available_send_bandwidth = call_stats.send_bandwidth_bps; - bwe_info.available_recv_bandwidth = call_stats.recv_bandwidth_bps; - bwe_info.bucket_delay = call_stats.pacer_delay_ms; - - // Fill in target encoder bitrate, actual encoder bitrate, rtx bitrate, etc. - // TODO(holmer): Also fill this in for audio. - auto transceivers = pc_->GetTransceiversInternal(); - std::vector video_channels; - for (const auto& transceiver : transceivers) { - if (transceiver->media_type() != cricket::MEDIA_TYPE_VIDEO) { - continue; - } - auto* video_channel = - static_cast(transceiver->internal()->channel()); - if (video_channel) { - video_channels.push_back(video_channel); - } - } - - if (!video_channels.empty()) { - pc_->worker_thread()->Invoke(RTC_FROM_HERE, [&] { - for (const auto& channel : video_channels) { - channel->FillBitrateInfo(&bwe_info); - } - }); - } - - StatsReport::Id report_id(StatsReport::NewBandwidthEstimationId()); - StatsReport* report = reports_.FindOrAddNew(report_id); - ExtractStats(bwe_info, stats_gathering_started_, report); -} - -namespace { - -class MediaChannelStatsGatherer { - public: - virtual ~MediaChannelStatsGatherer() = default; - - virtual bool GetStatsOnWorkerThread() = 0; - - virtual void ExtractStats(StatsCollector* collector) const = 0; - - virtual bool HasRemoteAudio() const = 0; - - std::string mid; - std::string transport_name; - std::map sender_track_id_by_ssrc; - std::map receiver_track_id_by_ssrc; - - protected: - template - void ExtractSenderReceiverStats( - StatsCollector* collector, - const std::vector& receiver_data, - const std::vector& sender_data) const { - RTC_DCHECK(collector); - StatsReport::Id transport_id = StatsReport::NewComponentId( - transport_name, cricket::ICE_CANDIDATE_COMPONENT_RTP); - ExtractStatsFromList(receiver_data, transport_id, collector, - StatsReport::kReceive, receiver_track_id_by_ssrc); - ExtractStatsFromList(sender_data, transport_id, collector, - StatsReport::kSend, sender_track_id_by_ssrc); - } -}; - -class VoiceMediaChannelStatsGatherer final : public MediaChannelStatsGatherer { - public: - VoiceMediaChannelStatsGatherer( - cricket::VoiceMediaChannel* voice_media_channel) - : voice_media_channel_(voice_media_channel) { - RTC_DCHECK(voice_media_channel_); - } - - bool GetStatsOnWorkerThread() override { - return voice_media_channel_->GetStats(&voice_media_info, - /*get_and_clear_legacy_stats=*/true); - } - - void ExtractStats(StatsCollector* collector) const override { - ExtractSenderReceiverStats(collector, voice_media_info.receivers, - voice_media_info.senders); - if (voice_media_info.device_underrun_count == -2 || - voice_media_info.device_underrun_count > 0) { - StatsReport* report = collector->PrepareADMReport(); - report->AddInt(StatsReport::kStatsValueNameAudioDeviceUnderrunCounter, - voice_media_info.device_underrun_count); - } - } - - bool HasRemoteAudio() const override { - return !voice_media_info.receivers.empty(); - } - - private: - cricket::VoiceMediaChannel* voice_media_channel_; - cricket::VoiceMediaInfo voice_media_info; -}; - -class VideoMediaChannelStatsGatherer final : public MediaChannelStatsGatherer { - public: - VideoMediaChannelStatsGatherer( - cricket::VideoMediaChannel* video_media_channel) - : video_media_channel_(video_media_channel) { - RTC_DCHECK(video_media_channel_); - } - - bool GetStatsOnWorkerThread() override { - return video_media_channel_->GetStats(&video_media_info); - } - - void ExtractStats(StatsCollector* collector) const override { - ExtractSenderReceiverStats(collector, video_media_info.receivers, - video_media_info.aggregated_senders); - } - - bool HasRemoteAudio() const override { return false; } - - private: - cricket::VideoMediaChannel* video_media_channel_; - cricket::VideoMediaInfo video_media_info; -}; - -std::unique_ptr CreateMediaChannelStatsGatherer( - cricket::MediaChannel* channel) { - RTC_DCHECK(channel); - if (channel->media_type() == cricket::MEDIA_TYPE_AUDIO) { - return std::make_unique( - static_cast(channel)); - } else { - RTC_DCHECK_EQ(channel->media_type(), cricket::MEDIA_TYPE_VIDEO); - return std::make_unique( - static_cast(channel)); - } -} - -} // namespace - -void StatsCollector::ExtractMediaInfo( - const std::map& transport_names_by_mid) { - RTC_DCHECK_RUN_ON(pc_->signaling_thread()); - - std::vector> gatherers; - - auto transceivers = pc_->GetTransceiversInternal(); - { - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; - for (const auto& transceiver : transceivers) { - cricket::ChannelInterface* channel = transceiver->internal()->channel(); - if (!channel) { - continue; - } - std::unique_ptr gatherer = - CreateMediaChannelStatsGatherer(channel->media_channel()); - gatherer->mid = channel->content_name(); - gatherer->transport_name = transport_names_by_mid.at(gatherer->mid); - - for (const auto& sender : transceiver->internal()->senders()) { - auto track = sender->track(); - std::string track_id = (track ? track->id() : ""); - gatherer->sender_track_id_by_ssrc.insert( - std::make_pair(sender->ssrc(), track_id)); - } - - // Populating `receiver_track_id_by_ssrc` will be done on the worker - // thread as the `ssrc` property of the receiver needs to be accessed - // there. - - gatherers.push_back(std::move(gatherer)); - } - } - - pc_->worker_thread()->Invoke(RTC_FROM_HERE, [&] { - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; - // Populate `receiver_track_id_by_ssrc` for the gatherers. - int i = 0; - for (const auto& transceiver : transceivers) { - cricket::ChannelInterface* channel = transceiver->internal()->channel(); - if (!channel) - continue; - MediaChannelStatsGatherer* gatherer = gatherers[i++].get(); - RTC_DCHECK_EQ(gatherer->mid, channel->content_name()); - - for (const auto& receiver : transceiver->internal()->receivers()) { - gatherer->receiver_track_id_by_ssrc.insert(std::make_pair( - receiver->internal()->ssrc(), receiver->track()->id())); - } - } - - for (auto it = gatherers.begin(); it != gatherers.end(); - /* incremented manually */) { - MediaChannelStatsGatherer* gatherer = it->get(); - if (!gatherer->GetStatsOnWorkerThread()) { - RTC_LOG(LS_ERROR) << "Failed to get media channel stats for mid=" - << gatherer->mid; - it = gatherers.erase(it); - continue; - } - ++it; - } - }); - - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; - - bool has_remote_audio = false; - for (const auto& gatherer : gatherers) { - gatherer->ExtractStats(this); - has_remote_audio |= gatherer->HasRemoteAudio(); - } - - UpdateStatsFromExistingLocalAudioTracks(has_remote_audio); -} - -void StatsCollector::ExtractSenderInfo() { - RTC_DCHECK_RUN_ON(pc_->signaling_thread()); - - for (const auto& sender : pc_->GetSenders()) { - // TODO(nisse): SSRC == 0 currently means none. Delete check when - // that is fixed. - if (!sender->ssrc()) { - continue; - } - const rtc::scoped_refptr track(sender->track()); - if (!track || track->kind() != MediaStreamTrackInterface::kVideoKind) { - continue; - } - // Safe, because kind() == kVideoKind implies a subclass of - // VideoTrackInterface; see mediastreaminterface.h. - VideoTrackSourceInterface* source = - static_cast(track.get())->GetSource(); - - VideoTrackSourceInterface::Stats stats; - if (!source->GetStats(&stats)) { - continue; - } - const StatsReport::Id stats_id = StatsReport::NewIdWithDirection( - StatsReport::kStatsReportTypeSsrc, rtc::ToString(sender->ssrc()), - StatsReport::kSend); - StatsReport* report = reports_.FindOrAddNew(stats_id); - report->AddInt(StatsReport::kStatsValueNameFrameWidthInput, - stats.input_width); - report->AddInt(StatsReport::kStatsValueNameFrameHeightInput, - stats.input_height); - } -} - -void StatsCollector::ExtractDataInfo() { - RTC_DCHECK_RUN_ON(pc_->signaling_thread()); - - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; - - std::vector data_stats = pc_->GetDataChannelStats(); - for (const auto& stats : data_stats) { - StatsReport::Id id(StatsReport::NewTypedIntId( - StatsReport::kStatsReportTypeDataChannel, stats.id)); - StatsReport* report = reports_.ReplaceOrAddNew(id); - report->set_timestamp(stats_gathering_started_); - report->AddString(StatsReport::kStatsValueNameLabel, stats.label); - // Filter out the initial id (-1). - if (stats.id >= 0) { - report->AddInt(StatsReport::kStatsValueNameDataChannelId, stats.id); - } - report->AddString(StatsReport::kStatsValueNameProtocol, stats.protocol); - report->AddString(StatsReport::kStatsValueNameState, - DataChannelInterface::DataStateString(stats.state)); - } -} - -StatsReport* StatsCollector::GetReport(const StatsReport::StatsType& type, - const std::string& id, - StatsReport::Direction direction) { - RTC_DCHECK_RUN_ON(pc_->signaling_thread()); - RTC_DCHECK(type == StatsReport::kStatsReportTypeSsrc || - type == StatsReport::kStatsReportTypeRemoteSsrc); - return reports_.Find(StatsReport::NewIdWithDirection(type, id, direction)); -} - -void StatsCollector::UpdateStatsFromExistingLocalAudioTracks( - bool has_remote_tracks) { - RTC_DCHECK_RUN_ON(pc_->signaling_thread()); - // Loop through the existing local audio tracks. - for (const auto& it : local_audio_tracks_) { - AudioTrackInterface* track = it.first; - uint32_t ssrc = it.second; - StatsReport* report = GetReport(StatsReport::kStatsReportTypeSsrc, - rtc::ToString(ssrc), StatsReport::kSend); - if (report == NULL) { - // This can happen if a local audio track is added to a stream on the - // fly and the report has not been set up yet. Do nothing in this case. - RTC_LOG(LS_ERROR) << "Stats report does not exist for ssrc " << ssrc; - continue; - } - - // The same ssrc can be used by both local and remote audio tracks. - const StatsReport::Value* v = - report->FindValue(StatsReport::kStatsValueNameTrackId); - if (!v || v->string_val() != track->id()) - continue; - - report->set_timestamp(stats_gathering_started_); - UpdateReportFromAudioTrack(track, report, has_remote_tracks); - } -} - -void StatsCollector::UpdateReportFromAudioTrack(AudioTrackInterface* track, - StatsReport* report, - bool has_remote_tracks) { - RTC_DCHECK_RUN_ON(pc_->signaling_thread()); - RTC_DCHECK(track != NULL); - - // Don't overwrite report values if they're not available. - int signal_level; - if (track->GetSignalLevel(&signal_level)) { - RTC_DCHECK_GE(signal_level, 0); - report->AddInt(StatsReport::kStatsValueNameAudioInputLevel, signal_level); - } - - auto audio_processor(track->GetAudioProcessor()); - - if (audio_processor.get()) { - AudioProcessorInterface::AudioProcessorStatistics stats = - audio_processor->GetStats(has_remote_tracks); - - SetAudioProcessingStats(report, stats.typing_noise_detected, - stats.apm_statistics); - } -} - -void StatsCollector::UpdateTrackReports() { - RTC_DCHECK_RUN_ON(pc_->signaling_thread()); - - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; - - for (const auto& entry : track_ids_) { - StatsReport* report = entry.second; - report->set_timestamp(stats_gathering_started_); - } -} - -void StatsCollector::ClearUpdateStatsCacheForTest() { - cache_timestamp_ms_ = 0; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/stats_collector.h b/TMessagesProj/jni/voip/webrtc/pc/stats_collector.h deleted file mode 100644 index 2fd5d9d8f8..0000000000 --- a/TMessagesProj/jni/voip/webrtc/pc/stats_collector.h +++ /dev/null @@ -1,208 +0,0 @@ -/* - * Copyright 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// This file contains a class used for gathering statistics from an ongoing -// libjingle PeerConnection. - -#ifndef PC_STATS_COLLECTOR_H_ -#define PC_STATS_COLLECTOR_H_ - -#include - -#include -#include -#include -#include -#include -#include -#include - -#include "api/media_stream_interface.h" -#include "api/peer_connection_interface.h" -#include "api/stats_types.h" -#include "p2p/base/connection_info.h" -#include "p2p/base/port.h" -#include "pc/peer_connection_internal.h" -#include "pc/stats_collector_interface.h" -#include "rtc_base/network_constants.h" -#include "rtc_base/ssl_certificate.h" - -namespace webrtc { - -// Conversion function to convert candidate type string to the corresponding one -// from enum RTCStatsIceCandidateType. -const char* IceCandidateTypeToStatsType(const std::string& candidate_type); - -// Conversion function to convert adapter type to report string which are more -// fitting to the general style of http://w3c.github.io/webrtc-stats. This is -// only used by stats collector. -const char* AdapterTypeToStatsType(rtc::AdapterType type); - -// A mapping between track ids and their StatsReport. -typedef std::map TrackIdMap; - -class StatsCollector : public StatsCollectorInterface { - public: - // The caller is responsible for ensuring that the pc outlives the - // StatsCollector instance. - explicit StatsCollector(PeerConnectionInternal* pc); - virtual ~StatsCollector(); - - // Adds a MediaStream with tracks that can be used as a `selector` in a call - // to GetStats. - void AddStream(MediaStreamInterface* stream); - void AddTrack(MediaStreamTrackInterface* track); - - // Adds a local audio track that is used for getting some voice statistics. - void AddLocalAudioTrack(AudioTrackInterface* audio_track, - uint32_t ssrc) override; - - // Removes a local audio tracks that is used for getting some voice - // statistics. - void RemoveLocalAudioTrack(AudioTrackInterface* audio_track, - uint32_t ssrc) override; - - // Gather statistics from the session and store them for future use. - void UpdateStats(PeerConnectionInterface::StatsOutputLevel level); - - // Gets a StatsReports of the last collected stats. Note that UpdateStats must - // be called before this function to get the most recent stats. `selector` is - // a track label or empty string. The most recent reports are stored in - // `reports`. - // TODO(tommi): Change this contract to accept a callback object instead - // of filling in `reports`. As is, there's a requirement that the caller - // uses `reports` immediately without allowing any async activity on - // the thread (message handling etc) and then discard the results. - void GetStats(MediaStreamTrackInterface* track, - StatsReports* reports) override; - - // Prepare a local or remote SSRC report for the given ssrc. Used internally - // in the ExtractStatsFromList template. - StatsReport* PrepareReport(bool local, - uint32_t ssrc, - const std::string& track_id, - const StatsReport::Id& transport_id, - StatsReport::Direction direction); - - StatsReport* PrepareADMReport(); - - // A track is invalid if there is no report data for it. - bool IsValidTrack(const std::string& track_id); - - // Method used by the unittest to force a update of stats since UpdateStats() - // that occur less than kMinGatherStatsPeriod number of ms apart will be - // ignored. - void ClearUpdateStatsCacheForTest(); - - bool UseStandardBytesStats() const { return use_standard_bytes_stats_; } - - private: - friend class StatsCollectorTest; - - // Struct that's populated on the network thread and carries the values to - // the signaling thread where the stats are added to the stats reports. - struct TransportStats { - TransportStats() = default; - TransportStats(std::string transport_name, - cricket::TransportStats transport_stats) - : name(std::move(transport_name)), stats(std::move(transport_stats)) {} - TransportStats(TransportStats&&) = default; - TransportStats(const TransportStats&) = delete; - - std::string name; - cricket::TransportStats stats; - std::unique_ptr local_cert_stats; - std::unique_ptr remote_cert_stats; - }; - - struct SessionStats { - SessionStats() = default; - SessionStats(SessionStats&&) = default; - SessionStats(const SessionStats&) = delete; - - SessionStats& operator=(SessionStats&&) = default; - SessionStats& operator=(SessionStats&) = delete; - - cricket::CandidateStatsList candidate_stats; - std::vector transport_stats; - std::map transport_names_by_mid; - }; - - // Overridden in unit tests to fake timing. - virtual double GetTimeNow(); - - bool CopySelectedReports(const std::string& selector, StatsReports* reports); - - // Helper method for creating IceCandidate report. `is_local` indicates - // whether this candidate is local or remote. - StatsReport* AddCandidateReport( - const cricket::CandidateStats& candidate_stats, - bool local); - - // Adds a report for this certificate and every certificate in its chain, and - // returns the leaf certificate's report (`cert_stats`'s report). - StatsReport* AddCertificateReports( - std::unique_ptr cert_stats); - - StatsReport* AddConnectionInfoReport(const std::string& content_name, - int component, - int connection_id, - const StatsReport::Id& channel_report_id, - const cricket::ConnectionInfo& info); - - void ExtractDataInfo(); - - // Returns the `transport_names_by_mid` member from the SessionStats as - // gathered and used to populate the stats. - std::map ExtractSessionInfo(); - - void ExtractBweInfo(); - void ExtractMediaInfo( - const std::map& transport_names_by_mid); - void ExtractSenderInfo(); - webrtc::StatsReport* GetReport(const StatsReport::StatsType& type, - const std::string& id, - StatsReport::Direction direction); - - // Helper method to get stats from the local audio tracks. - void UpdateStatsFromExistingLocalAudioTracks(bool has_remote_tracks); - void UpdateReportFromAudioTrack(AudioTrackInterface* track, - StatsReport* report, - bool has_remote_tracks); - - // Helper method to update the timestamp of track records. - void UpdateTrackReports(); - - SessionStats ExtractSessionInfo_n( - const std::vector>>& transceivers, - absl::optional sctp_transport_name, - absl::optional sctp_mid); - void ExtractSessionInfo_s(SessionStats& session_stats); - - // A collection for all of our stats reports. - StatsCollection reports_; - TrackIdMap track_ids_; - // Raw pointer to the peer connection the statistics are gathered from. - PeerConnectionInternal* const pc_; - int64_t cache_timestamp_ms_ = 0; - double stats_gathering_started_; - const bool use_standard_bytes_stats_; - - // TODO(tommi): We appear to be holding on to raw pointers to reference - // counted objects? We should be using scoped_refptr here. - typedef std::vector > - LocalAudioTrackVector; - LocalAudioTrackVector local_audio_tracks_; -}; - -} // namespace webrtc - -#endif // PC_STATS_COLLECTOR_H_ diff --git a/TMessagesProj/jni/voip/webrtc/pc/stats_collector_interface.h b/TMessagesProj/jni/voip/webrtc/pc/stats_collector_interface.h deleted file mode 100644 index 4d5c98a4ab..0000000000 --- a/TMessagesProj/jni/voip/webrtc/pc/stats_collector_interface.h +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// This file contains an interface for the (obsolete) StatsCollector class that -// is used by compilation units that do not wish to depend on the StatsCollector -// implementation. - -#ifndef PC_STATS_COLLECTOR_INTERFACE_H_ -#define PC_STATS_COLLECTOR_INTERFACE_H_ - -#include - -#include "api/media_stream_interface.h" -#include "api/stats_types.h" - -namespace webrtc { - -class StatsCollectorInterface { - public: - virtual ~StatsCollectorInterface() {} - - // Adds a local audio track that is used for getting some voice statistics. - virtual void AddLocalAudioTrack(AudioTrackInterface* audio_track, - uint32_t ssrc) = 0; - - // Removes a local audio tracks that is used for getting some voice - // statistics. - virtual void RemoveLocalAudioTrack(AudioTrackInterface* audio_track, - uint32_t ssrc) = 0; - virtual void GetStats(MediaStreamTrackInterface* track, - StatsReports* reports) = 0; -}; - -} // namespace webrtc - -#endif // PC_STATS_COLLECTOR_INTERFACE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/pc/stream_collection.h b/TMessagesProj/jni/voip/webrtc/pc/stream_collection.h index 9bbf957efd..f0f3f07b4b 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/stream_collection.h +++ b/TMessagesProj/jni/voip/webrtc/pc/stream_collection.h @@ -12,6 +12,7 @@ #define PC_STREAM_COLLECTION_H_ #include +#include #include #include "api/peer_connection_interface.h" @@ -33,14 +34,14 @@ class StreamCollection : public StreamCollectionInterface { virtual size_t count() { return media_streams_.size(); } virtual MediaStreamInterface* at(size_t index) { - return media_streams_.at(index); + return media_streams_.at(index).get(); } virtual MediaStreamInterface* find(const std::string& id) { for (StreamVector::iterator it = media_streams_.begin(); it != media_streams_.end(); ++it) { if ((*it)->id().compare(id) == 0) { - return (*it); + return (*it).get(); } } return NULL; @@ -48,7 +49,8 @@ class StreamCollection : public StreamCollectionInterface { virtual MediaStreamTrackInterface* FindAudioTrack(const std::string& id) { for (size_t i = 0; i < media_streams_.size(); ++i) { - MediaStreamTrackInterface* track = media_streams_[i]->FindAudioTrack(id); + MediaStreamTrackInterface* track = + media_streams_[i]->FindAudioTrack(id).get(); if (track) { return track; } @@ -58,7 +60,8 @@ class StreamCollection : public StreamCollectionInterface { virtual MediaStreamTrackInterface* FindVideoTrack(const std::string& id) { for (size_t i = 0; i < media_streams_.size(); ++i) { - MediaStreamTrackInterface* track = media_streams_[i]->FindVideoTrack(id); + MediaStreamTrackInterface* track = + media_streams_[i]->FindVideoTrack(id).get(); if (track) { return track; } @@ -66,13 +69,13 @@ class StreamCollection : public StreamCollectionInterface { return NULL; } - void AddStream(MediaStreamInterface* stream) { + void AddStream(rtc::scoped_refptr stream) { for (StreamVector::iterator it = media_streams_.begin(); it != media_streams_.end(); ++it) { if ((*it)->id().compare(stream->id()) == 0) return; } - media_streams_.push_back(stream); + media_streams_.push_back(std::move(stream)); } void RemoveStream(MediaStreamInterface* remove_stream) { diff --git a/TMessagesProj/jni/voip/webrtc/pc/track_media_info_map.cc b/TMessagesProj/jni/voip/webrtc/pc/track_media_info_map.cc index e68f2f7a52..c5d2daa9a6 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/track_media_info_map.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/track_media_info_map.cc @@ -12,7 +12,7 @@ #include #include -#include +#include #include #include "api/media_types.h" @@ -38,8 +38,8 @@ const V* FindAddressOrNull(const std::map& map, const K& key) { } void GetAudioAndVideoTrackBySsrc( - const std::vector>& rtp_senders, - const std::vector>& rtp_receivers, + rtc::ArrayView> rtp_senders, + rtc::ArrayView> rtp_receivers, std::map* local_audio_track_by_ssrc, std::map* local_video_track_by_ssrc, std::map* remote_audio_track_by_ssrc, @@ -52,7 +52,7 @@ void GetAudioAndVideoTrackBySsrc( RTC_DCHECK(remote_video_track_by_ssrc->empty()); for (const auto& rtp_sender : rtp_senders) { cricket::MediaType media_type = rtp_sender->media_type(); - MediaStreamTrackInterface* track = rtp_sender->track(); + MediaStreamTrackInterface* track = rtp_sender->track().get(); if (!track) { continue; } @@ -74,7 +74,7 @@ void GetAudioAndVideoTrackBySsrc( } for (const auto& rtp_receiver : rtp_receivers) { cricket::MediaType media_type = rtp_receiver->media_type(); - MediaStreamTrackInterface* track = rtp_receiver->track(); + MediaStreamTrackInterface* track = rtp_receiver->track().get(); RTC_DCHECK(track); RtpParameters params = rtp_receiver->GetParameters(); for (const RtpEncodingParameters& encoding : params.encodings) { @@ -104,14 +104,18 @@ void GetAudioAndVideoTrackBySsrc( } // namespace -TrackMediaInfoMap::TrackMediaInfoMap( - std::unique_ptr voice_media_info, - std::unique_ptr video_media_info, - const std::vector>& rtp_senders, - const std::vector>& rtp_receivers) - : voice_media_info_(std::move(voice_media_info)), - video_media_info_(std::move(video_media_info)) { +TrackMediaInfoMap::TrackMediaInfoMap() = default; + +void TrackMediaInfoMap::Initialize( + absl::optional voice_media_info, + absl::optional video_media_info, + rtc::ArrayView> rtp_senders, + rtc::ArrayView> rtp_receivers) { rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + RTC_DCHECK(!is_initialized_); + is_initialized_ = true; + voice_media_info_ = std::move(voice_media_info); + video_media_info_ = std::move(video_media_info); std::map local_audio_track_by_ssrc; std::map local_video_track_by_ssrc; @@ -126,13 +130,13 @@ TrackMediaInfoMap::TrackMediaInfoMap( &unsignaled_video_track); for (const auto& sender : rtp_senders) { - attachment_id_by_track_[sender->track()] = sender->AttachmentId(); + attachment_id_by_track_[sender->track().get()] = sender->AttachmentId(); } for (const auto& receiver : rtp_receivers) { - attachment_id_by_track_[receiver->track()] = receiver->AttachmentId(); + attachment_id_by_track_[receiver->track().get()] = receiver->AttachmentId(); } - if (voice_media_info_) { + if (voice_media_info_.has_value()) { for (auto& sender_info : voice_media_info_->senders) { AudioTrackInterface* associated_track = FindValueOrNull(local_audio_track_by_ssrc, sender_info.ssrc()); @@ -167,7 +171,7 @@ TrackMediaInfoMap::TrackMediaInfoMap( voice_info_by_receiver_ssrc_[receiver_info.ssrc()] = &receiver_info; } } - if (video_media_info_) { + if (video_media_info_.has_value()) { for (auto& sender_info : video_media_info_->senders) { std::set ssrcs; ssrcs.insert(sender_info.ssrc()); @@ -224,66 +228,79 @@ TrackMediaInfoMap::TrackMediaInfoMap( const std::vector* TrackMediaInfoMap::GetVoiceSenderInfos( const AudioTrackInterface& local_audio_track) const { + RTC_DCHECK(is_initialized_); return FindAddressOrNull(voice_infos_by_local_track_, &local_audio_track); } const cricket::VoiceReceiverInfo* TrackMediaInfoMap::GetVoiceReceiverInfo( const AudioTrackInterface& remote_audio_track) const { + RTC_DCHECK(is_initialized_); return FindValueOrNull(voice_info_by_remote_track_, &remote_audio_track); } const std::vector* TrackMediaInfoMap::GetVideoSenderInfos( const VideoTrackInterface& local_video_track) const { + RTC_DCHECK(is_initialized_); return FindAddressOrNull(video_infos_by_local_track_, &local_video_track); } const cricket::VideoReceiverInfo* TrackMediaInfoMap::GetVideoReceiverInfo( const VideoTrackInterface& remote_video_track) const { + RTC_DCHECK(is_initialized_); return FindValueOrNull(video_info_by_remote_track_, &remote_video_track); } const cricket::VoiceSenderInfo* TrackMediaInfoMap::GetVoiceSenderInfoBySsrc( uint32_t ssrc) const { + RTC_DCHECK(is_initialized_); return FindValueOrNull(voice_info_by_sender_ssrc_, ssrc); } const cricket::VoiceReceiverInfo* TrackMediaInfoMap::GetVoiceReceiverInfoBySsrc( uint32_t ssrc) const { + RTC_DCHECK(is_initialized_); return FindValueOrNull(voice_info_by_receiver_ssrc_, ssrc); } const cricket::VideoSenderInfo* TrackMediaInfoMap::GetVideoSenderInfoBySsrc( uint32_t ssrc) const { + RTC_DCHECK(is_initialized_); return FindValueOrNull(video_info_by_sender_ssrc_, ssrc); } const cricket::VideoReceiverInfo* TrackMediaInfoMap::GetVideoReceiverInfoBySsrc( uint32_t ssrc) const { + RTC_DCHECK(is_initialized_); return FindValueOrNull(video_info_by_receiver_ssrc_, ssrc); } rtc::scoped_refptr TrackMediaInfoMap::GetAudioTrack( const cricket::VoiceSenderInfo& voice_sender_info) const { + RTC_DCHECK(is_initialized_); return FindValueOrNull(audio_track_by_sender_info_, &voice_sender_info); } rtc::scoped_refptr TrackMediaInfoMap::GetAudioTrack( const cricket::VoiceReceiverInfo& voice_receiver_info) const { + RTC_DCHECK(is_initialized_); return FindValueOrNull(audio_track_by_receiver_info_, &voice_receiver_info); } rtc::scoped_refptr TrackMediaInfoMap::GetVideoTrack( const cricket::VideoSenderInfo& video_sender_info) const { + RTC_DCHECK(is_initialized_); return FindValueOrNull(video_track_by_sender_info_, &video_sender_info); } rtc::scoped_refptr TrackMediaInfoMap::GetVideoTrack( const cricket::VideoReceiverInfo& video_receiver_info) const { + RTC_DCHECK(is_initialized_); return FindValueOrNull(video_track_by_receiver_info_, &video_receiver_info); } absl::optional TrackMediaInfoMap::GetAttachmentIdByTrack( const MediaStreamTrackInterface* track) const { + RTC_DCHECK(is_initialized_); auto it = attachment_id_by_track_.find(track); return it != attachment_id_by_track_.end() ? absl::optional(it->second) : absl::nullopt; diff --git a/TMessagesProj/jni/voip/webrtc/pc/track_media_info_map.h b/TMessagesProj/jni/voip/webrtc/pc/track_media_info_map.h index c8c6da2701..5a24aaad2b 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/track_media_info_map.h +++ b/TMessagesProj/jni/voip/webrtc/pc/track_media_info_map.h @@ -19,6 +19,7 @@ #include #include "absl/types/optional.h" +#include "api/array_view.h" #include "api/media_stream_interface.h" #include "api/scoped_refptr.h" #include "media/base/media_channel.h" @@ -38,20 +39,34 @@ namespace webrtc { // |[Voice/Video][Sender/Receiver]Info| has statistical information for a set of // SSRCs. Looking at the RTP senders and receivers uncovers the track <-> info // relationships, which this class does. +// +// In the spec, "track" attachment stats have been made obsolete, and in Unified +// Plan there is just one sender and one receiver per transceiver, so we may be +// able to simplify/delete this class. +// TODO(https://crbug.com/webrtc/14175): Simplify or delete this class when +// "track" stats have been deleted. +// TODO(https://crbug.com/webrtc/13528): Simplify or delete this class when +// Plan B is gone from the native library (already gone for Chrome). class TrackMediaInfoMap { public: - TrackMediaInfoMap( - std::unique_ptr voice_media_info, - std::unique_ptr video_media_info, - const std::vector>& rtp_senders, - const std::vector>& - rtp_receivers); + TrackMediaInfoMap(); + + // Takes ownership of the "infos". Does not affect the lifetime of the senders + // or receivers, but TrackMediaInfoMap will keep their associated tracks alive + // through reference counting until the map is destroyed. + void Initialize( + absl::optional voice_media_info, + absl::optional video_media_info, + rtc::ArrayView> rtp_senders, + rtc::ArrayView> rtp_receivers); - const cricket::VoiceMediaInfo* voice_media_info() const { - return voice_media_info_.get(); + const absl::optional& voice_media_info() const { + RTC_DCHECK(is_initialized_); + return voice_media_info_; } - const cricket::VideoMediaInfo* video_media_info() const { - return video_media_info_.get(); + const absl::optional& video_media_info() const { + RTC_DCHECK(is_initialized_); + return video_media_info_; } const std::vector* GetVoiceSenderInfos( @@ -87,12 +102,13 @@ class TrackMediaInfoMap { const MediaStreamTrackInterface* track) const; private: - absl::optional voice_mid_; - absl::optional video_mid_; - std::unique_ptr voice_media_info_; - std::unique_ptr video_media_info_; + bool is_initialized_ = false; + absl::optional voice_media_info_; + absl::optional video_media_info_; // These maps map tracks (identified by a pointer) to their corresponding info // object of the correct kind. One track can map to multiple info objects. + // Known tracks are guaranteed to be alive because they are also stored as + // entries in the reverse maps below. std::map> voice_infos_by_local_track_; std::map @@ -103,7 +119,8 @@ class TrackMediaInfoMap { video_info_by_remote_track_; // These maps map info objects to their corresponding tracks. They are always // the inverse of the maps above. One info object always maps to only one - // track. + // track. The use of scoped_refptr<> here ensures the tracks outlive + // TrackMediaInfoMap. std::map> audio_track_by_sender_info_; diff --git a/TMessagesProj/jni/voip/webrtc/pc/transceiver_list.cc b/TMessagesProj/jni/voip/webrtc/pc/transceiver_list.cc index 235c9af036..250dfbc9e2 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/transceiver_list.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/transceiver_list.cc @@ -10,6 +10,8 @@ #include "pc/transceiver_list.h" +#include + #include "rtc_base/checks.h" namespace webrtc { @@ -29,7 +31,7 @@ void TransceiverStableState::SetMSectionIfUnset( } } -void TransceiverStableState::SetRemoteStreamIdsIfUnset( +void TransceiverStableState::SetRemoteStreamIds( const std::vector& ids) { if (!remote_stream_ids_.has_value()) { remote_stream_ids_ = ids; diff --git a/TMessagesProj/jni/voip/webrtc/pc/transceiver_list.h b/TMessagesProj/jni/voip/webrtc/pc/transceiver_list.h index 568c9c7e7a..848ccc2c3b 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/transceiver_list.h +++ b/TMessagesProj/jni/voip/webrtc/pc/transceiver_list.h @@ -43,9 +43,13 @@ class TransceiverStableState { void set_newly_created(); void SetMSectionIfUnset(absl::optional mid, absl::optional mline_index); - void SetRemoteStreamIdsIfUnset(const std::vector& ids); + void SetRemoteStreamIds(const std::vector& ids); void SetInitSendEncodings( const std::vector& encodings); + void SetFiredDirection( + absl::optional fired_direction) { + fired_direction_ = fired_direction; + } absl::optional mid() const { return mid_; } absl::optional mline_index() const { return mline_index_; } absl::optional> remote_stream_ids() const { @@ -57,6 +61,13 @@ class TransceiverStableState { } bool has_m_section() const { return has_m_section_; } bool newly_created() const { return newly_created_; } + bool did_set_fired_direction() const { return fired_direction_.has_value(); } + // Because fired_direction() is nullable, did_set_fired_direction() is used to + // distinguish beteen "no value" and "null value". + absl::optional fired_direction() const { + RTC_DCHECK(did_set_fired_direction()); + return fired_direction_.value(); + } private: absl::optional mid_; @@ -71,6 +82,9 @@ class TransceiverStableState { // description to track potential need for removing transceiver during // rollback. bool newly_created_ = false; + // `fired_direction_` is nullable, so an optional of an optional is used to + // distinguish between null and not set (sorry if this hurts your eyes). + absl::optional> fired_direction_; }; // This class encapsulates the active list of transceivers on a diff --git a/TMessagesProj/jni/voip/webrtc/pc/transport_stats.h b/TMessagesProj/jni/voip/webrtc/pc/transport_stats.h index 2f43d45808..e554385954 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/transport_stats.h +++ b/TMessagesProj/jni/voip/webrtc/pc/transport_stats.h @@ -31,6 +31,7 @@ struct TransportChannelStats { int ssl_version_bytes = 0; int srtp_crypto_suite = rtc::kSrtpInvalidCryptoSuite; int ssl_cipher_suite = rtc::kTlsNullWithNullNull; + absl::optional dtls_role; webrtc::DtlsTransportState dtls_state = webrtc::DtlsTransportState::kNew; IceTransportStats ice_transport_stats; }; diff --git a/TMessagesProj/jni/voip/webrtc/pc/used_ids.h b/TMessagesProj/jni/voip/webrtc/pc/used_ids.h index e88927aaf2..1236a786df 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/used_ids.h +++ b/TMessagesProj/jni/voip/webrtc/pc/used_ids.h @@ -52,8 +52,7 @@ class UsedIds { if (IsIdUsed(original_id)) { new_id = FindUnusedId(); - RTC_LOG(LS_WARNING) << "Duplicate id found. Reassigning from " - << original_id << " to " << new_id; + // Duplicate id found. Reassign from the original id to the new. idstruct->id = new_id; } SetIdUsed(new_id); diff --git a/TMessagesProj/jni/voip/webrtc/pc/video_rtp_receiver.cc b/TMessagesProj/jni/voip/webrtc/pc/video_rtp_receiver.cc index 8db4d9f02f..098ffde7cd 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/video_rtp_receiver.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/video_rtp_receiver.cc @@ -12,14 +12,13 @@ #include +#include #include #include #include "api/video/recordable_encoded_frame.h" -#include "api/video_track_source_proxy_factory.h" #include "pc/video_track.h" #include "rtc_base/checks.h" -#include "rtc_base/location.h" #include "rtc_base/logging.h" namespace webrtc { @@ -41,20 +40,15 @@ VideoRtpReceiver::VideoRtpReceiver( track_(VideoTrackProxyWithInternal::Create( rtc::Thread::Current(), worker_thread, - VideoTrack::Create(receiver_id, - CreateVideoTrackSourceProxy(rtc::Thread::Current(), - worker_thread, - source_), - worker_thread))), + VideoTrack::Create(receiver_id, source_, worker_thread))), attachment_id_(GenerateUniqueId()) { RTC_DCHECK(worker_thread_); SetStreams(streams); - RTC_DCHECK_EQ(source_->state(), MediaSourceInterface::kLive); + RTC_DCHECK_EQ(source_->state(), MediaSourceInterface::kInitializing); } VideoRtpReceiver::~VideoRtpReceiver() { RTC_DCHECK_RUN_ON(&signaling_thread_checker_); - RTC_DCHECK(stopped_); RTC_DCHECK(!media_channel_); } @@ -114,91 +108,65 @@ void VideoRtpReceiver::SetDepacketizerToDecoderFrameTransformer( void VideoRtpReceiver::Stop() { RTC_DCHECK_RUN_ON(&signaling_thread_checker_); - // TODO(deadbeef): Need to do more here to fully stop receiving packets. - - if (!stopped_) { - source_->SetState(MediaSourceInterface::kEnded); - stopped_ = true; - } - - worker_thread_->Invoke(RTC_FROM_HERE, [&] { - RTC_DCHECK_RUN_ON(worker_thread_); - if (media_channel_) { - SetSink(nullptr); - SetMediaChannel_w(nullptr); - } - source_->ClearCallback(); - }); -} - -void VideoRtpReceiver::StopAndEndTrack() { - RTC_DCHECK_RUN_ON(&signaling_thread_checker_); - Stop(); + source_->SetState(MediaSourceInterface::kEnded); track_->internal()->set_ended(); } void VideoRtpReceiver::RestartMediaChannel(absl::optional ssrc) { RTC_DCHECK_RUN_ON(&signaling_thread_checker_); - - // `stopped_` will be `true` on construction. RestartMediaChannel - // can in this case function like "ensure started" and flip `stopped_` - // to false. - + MediaSourceInterface::SourceState state = source_->state(); // TODO(tommi): Can we restart the media channel without blocking? - bool ok = worker_thread_->Invoke(RTC_FROM_HERE, [&, was_stopped = - stopped_] { + worker_thread_->BlockingCall([&] { RTC_DCHECK_RUN_ON(worker_thread_); - if (!media_channel_) { - // Ignore further negotiations if we've already been stopped and don't - // have an associated media channel. - RTC_DCHECK(was_stopped); - return false; // Can't restart. - } - - if (!was_stopped && ssrc_ == ssrc) { - // Already running with that ssrc. - return true; - } + RestartMediaChannel_w(std::move(ssrc), state); + }); + source_->SetState(MediaSourceInterface::kLive); +} - // Disconnect from the previous ssrc. - if (!was_stopped) { - SetSink(nullptr); - } +void VideoRtpReceiver::RestartMediaChannel_w( + absl::optional ssrc, + MediaSourceInterface::SourceState state) { + RTC_DCHECK_RUN_ON(worker_thread_); + if (!media_channel_) { + return; // Can't restart. + } - bool encoded_sink_enabled = saved_encoded_sink_enabled_; - SetEncodedSinkEnabled(false); + const bool encoded_sink_enabled = saved_encoded_sink_enabled_; - // Set up the new ssrc. - ssrc_ = std::move(ssrc); - SetSink(source_->sink()); - if (encoded_sink_enabled) { - SetEncodedSinkEnabled(true); - } + if (state != MediaSourceInterface::kInitializing) { + if (ssrc == ssrc_) + return; - if (frame_transformer_ && media_channel_) { - media_channel_->SetDepacketizerToDecoderFrameTransformer( - ssrc_.value_or(0), frame_transformer_); - } + // Disconnect from a previous ssrc. + SetSink(nullptr); - if (media_channel_ && ssrc_) { - if (frame_decryptor_) { - media_channel_->SetFrameDecryptor(*ssrc_, frame_decryptor_); - } + if (encoded_sink_enabled) + SetEncodedSinkEnabled(false); + } - media_channel_->SetBaseMinimumPlayoutDelayMs(*ssrc_, delay_.GetMs()); - } + // Set up the new ssrc. + ssrc_ = std::move(ssrc); + SetSink(source_->sink()); + if (encoded_sink_enabled) { + SetEncodedSinkEnabled(true); + } - return true; - }); + if (frame_transformer_ && media_channel_) { + media_channel_->SetDepacketizerToDecoderFrameTransformer( + ssrc_.value_or(0), frame_transformer_); + } - if (!ok) - return; + if (media_channel_ && ssrc_) { + if (frame_decryptor_) { + media_channel_->SetFrameDecryptor(*ssrc_, frame_decryptor_); + } - stopped_ = false; + media_channel_->SetBaseMinimumPlayoutDelayMs(*ssrc_, delay_.GetMs()); + } } -// RTC_RUN_ON(worker_thread_) void VideoRtpReceiver::SetSink(rtc::VideoSinkInterface* sink) { + RTC_DCHECK_RUN_ON(worker_thread_); if (ssrc_) { media_channel_->SetSink(*ssrc_, sink); } else { @@ -246,7 +214,7 @@ void VideoRtpReceiver::SetStreams( } } if (removed) { - existing_stream->RemoveTrack(track_); + existing_stream->RemoveTrack(video_track()); } } // Add remote track to any streams that are new. @@ -260,7 +228,7 @@ void VideoRtpReceiver::SetStreams( } } if (added) { - stream->AddTrack(track_); + stream->AddTrack(video_track()); } } streams_ = streams; @@ -284,24 +252,22 @@ void VideoRtpReceiver::SetJitterBufferMinimumDelay( } void VideoRtpReceiver::SetMediaChannel(cricket::MediaChannel* media_channel) { - RTC_DCHECK_RUN_ON(&signaling_thread_checker_); + RTC_DCHECK_RUN_ON(worker_thread_); RTC_DCHECK(media_channel == nullptr || media_channel->media_type() == media_type()); - if (stopped_ && !media_channel) - return; - - worker_thread_->Invoke(RTC_FROM_HERE, [&] { - RTC_DCHECK_RUN_ON(worker_thread_); - SetMediaChannel_w(media_channel); - }); + SetMediaChannel_w(media_channel); } -// RTC_RUN_ON(worker_thread_) void VideoRtpReceiver::SetMediaChannel_w(cricket::MediaChannel* media_channel) { + RTC_DCHECK_RUN_ON(worker_thread_); if (media_channel == media_channel_) return; + if (!media_channel) { + SetSink(nullptr); + } + bool encoded_sink_enabled = saved_encoded_sink_enabled_; if (encoded_sink_enabled && media_channel_) { // Turn off the old sink, if any. @@ -313,7 +279,7 @@ void VideoRtpReceiver::SetMediaChannel_w(cricket::MediaChannel* media_channel) { if (media_channel_) { if (saved_generate_keyframe_) { // TODO(bugs.webrtc.org/8694): Stop using 0 to mean unsignalled SSRC - media_channel_->GenerateKeyFrame(ssrc_.value_or(0)); + media_channel_->RequestRecvKeyFrame(ssrc_.value_or(0)); saved_generate_keyframe_ = false; } if (encoded_sink_enabled) { @@ -324,6 +290,9 @@ void VideoRtpReceiver::SetMediaChannel_w(cricket::MediaChannel* media_channel) { ssrc_.value_or(0), frame_transformer_); } } + + if (!media_channel) + source_->ClearCallback(); } void VideoRtpReceiver::NotifyFirstPacketReceived() { @@ -341,6 +310,19 @@ std::vector VideoRtpReceiver::GetSources() const { return media_channel_->GetSources(*ssrc_); } +void VideoRtpReceiver::SetupMediaChannel(absl::optional ssrc, + cricket::MediaChannel* media_channel) { + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); + RTC_DCHECK(media_channel); + MediaSourceInterface::SourceState state = source_->state(); + worker_thread_->BlockingCall([&] { + RTC_DCHECK_RUN_ON(worker_thread_); + SetMediaChannel_w(media_channel); + RestartMediaChannel_w(std::move(ssrc), state); + }); + source_->SetState(MediaSourceInterface::kLive); +} + void VideoRtpReceiver::OnGenerateKeyFrame() { RTC_DCHECK_RUN_ON(worker_thread_); if (!media_channel_) { @@ -349,7 +331,7 @@ void VideoRtpReceiver::OnGenerateKeyFrame() { return; } // TODO(bugs.webrtc.org/8694): Stop using 0 to mean unsignalled SSRC - media_channel_->GenerateKeyFrame(ssrc_.value_or(0)); + media_channel_->RequestRecvKeyFrame(ssrc_.value_or(0)); // We need to remember to request generation of a new key frame if the media // channel changes, because there's no feedback whether the keyframe // generation has completed on the channel. @@ -364,8 +346,8 @@ void VideoRtpReceiver::OnEncodedSinkEnabled(bool enable) { saved_encoded_sink_enabled_ = enable; } -// RTC_RUN_ON(worker_thread_) void VideoRtpReceiver::SetEncodedSinkEnabled(bool enable) { + RTC_DCHECK_RUN_ON(worker_thread_); if (!media_channel_) return; diff --git a/TMessagesProj/jni/voip/webrtc/pc/video_rtp_receiver.h b/TMessagesProj/jni/voip/webrtc/pc/video_rtp_receiver.h index b5381860b3..8b1f3c4140 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/video_rtp_receiver.h +++ b/TMessagesProj/jni/voip/webrtc/pc/video_rtp_receiver.h @@ -36,7 +36,6 @@ #include "pc/rtp_receiver.h" #include "pc/video_rtp_track_source.h" #include "pc/video_track.h" -#include "rtc_base/ref_counted_object.h" #include "rtc_base/system/no_unique_address.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" @@ -88,7 +87,6 @@ class VideoRtpReceiver : public RtpReceiverInternal { // RtpReceiverInternal implementation. void Stop() override; - void StopAndEndTrack() override; void SetupMediaChannel(uint32_t ssrc) override; void SetupUnsignaledMediaChannel() override; uint32_t ssrc() const override; @@ -110,8 +108,17 @@ class VideoRtpReceiver : public RtpReceiverInternal { std::vector GetSources() const override; + // Combines SetMediaChannel, SetupMediaChannel and + // SetupUnsignaledMediaChannel. + void SetupMediaChannel(absl::optional ssrc, + cricket::MediaChannel* media_channel); + private: - void RestartMediaChannel(absl::optional ssrc); + void RestartMediaChannel(absl::optional ssrc) + RTC_RUN_ON(&signaling_thread_checker_); + void RestartMediaChannel_w(absl::optional ssrc, + MediaSourceInterface::SourceState state) + RTC_RUN_ON(worker_thread_); void SetSink(rtc::VideoSinkInterface* sink) RTC_RUN_ON(worker_thread_); void SetMediaChannel_w(cricket::MediaChannel* media_channel) @@ -141,8 +148,6 @@ class VideoRtpReceiver : public RtpReceiverInternal { rtc::Thread* const worker_thread_; const std::string id_; - // See documentation for `stopped_` below for when a valid media channel - // has been assigned and when this pointer will be null. cricket::VideoMediaChannel* media_channel_ RTC_GUARDED_BY(worker_thread_) = nullptr; absl::optional ssrc_ RTC_GUARDED_BY(worker_thread_); @@ -152,15 +157,6 @@ class VideoRtpReceiver : public RtpReceiverInternal { const rtc::scoped_refptr> track_; std::vector> streams_ RTC_GUARDED_BY(&signaling_thread_checker_); - // `stopped` is state that's used on the signaling thread to indicate whether - // a valid `media_channel_` has been assigned and configured. When an instance - // of VideoRtpReceiver is initially created, `stopped_` is true and will - // remain true until either `SetupMediaChannel` or - // `SetupUnsignaledMediaChannel` is called after assigning a media channel. - // After that, `stopped_` will remain false until `Stop()` is called. - // Note, for checking the state of the class on the worker thread, - // check `media_channel_` instead, as that's the main worker thread state. - bool stopped_ RTC_GUARDED_BY(&signaling_thread_checker_) = true; RtpReceiverObserverInterface* observer_ RTC_GUARDED_BY(&signaling_thread_checker_) = nullptr; bool received_first_packet_ RTC_GUARDED_BY(&signaling_thread_checker_) = diff --git a/TMessagesProj/jni/voip/webrtc/pc/video_rtp_track_source.h b/TMessagesProj/jni/voip/webrtc/pc/video_rtp_track_source.h index 23a7cd224f..a9e43f6667 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/video_rtp_track_source.h +++ b/TMessagesProj/jni/voip/webrtc/pc/video_rtp_track_source.h @@ -20,7 +20,6 @@ #include "api/video/video_source_interface.h" #include "media/base/video_broadcaster.h" #include "pc/video_track_source.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/system/no_unique_address.h" #include "rtc_base/thread_annotations.h" @@ -45,6 +44,9 @@ class VideoRtpTrackSource : public VideoTrackSource { explicit VideoRtpTrackSource(Callback* callback); + VideoRtpTrackSource(const VideoRtpTrackSource&) = delete; + VideoRtpTrackSource& operator=(const VideoRtpTrackSource&) = delete; + // Call before the object implementing Callback finishes it's destructor. No // more callbacks will be fired after completion. Must be called on the // worker thread @@ -83,8 +85,6 @@ class VideoRtpTrackSource : public VideoTrackSource { std::vector*> encoded_sinks_ RTC_GUARDED_BY(mu_); Callback* callback_ RTC_GUARDED_BY(worker_sequence_checker_); - - RTC_DISALLOW_COPY_AND_ASSIGN(VideoRtpTrackSource); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/video_track.cc b/TMessagesProj/jni/voip/webrtc/pc/video_track.cc index d0246faa87..0bf8687af3 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/video_track.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/video_track.cc @@ -10,24 +10,23 @@ #include "pc/video_track.h" -#include #include #include #include "api/notifier.h" #include "api/sequence_checker.h" #include "rtc_base/checks.h" -#include "rtc_base/location.h" -#include "rtc_base/ref_counted_object.h" namespace webrtc { -VideoTrack::VideoTrack(const std::string& label, - VideoTrackSourceInterface* video_source, - rtc::Thread* worker_thread) +VideoTrack::VideoTrack( + absl::string_view label, + rtc::scoped_refptr< + VideoTrackSourceProxyWithInternal> source, + rtc::Thread* worker_thread) : MediaStreamTrack(label), worker_thread_(worker_thread), - video_source_(video_source), + video_source_(std::move(source)), content_hint_(ContentHint::kNone) { RTC_DCHECK_RUN_ON(&signaling_thread_); // Detach the thread checker for VideoSourceBaseGuarded since we'll make calls @@ -53,14 +52,19 @@ void VideoTrack::AddOrUpdateSink(rtc::VideoSinkInterface* sink, RTC_DCHECK_RUN_ON(worker_thread_); VideoSourceBaseGuarded::AddOrUpdateSink(sink, wants); rtc::VideoSinkWants modified_wants = wants; - modified_wants.black_frames = !enabled(); - video_source_->AddOrUpdateSink(sink, modified_wants); + modified_wants.black_frames = !enabled_w_; + video_source_->internal()->AddOrUpdateSink(sink, modified_wants); } void VideoTrack::RemoveSink(rtc::VideoSinkInterface* sink) { RTC_DCHECK_RUN_ON(worker_thread_); VideoSourceBaseGuarded::RemoveSink(sink); - video_source_->RemoveSink(sink); + video_source_->internal()->RemoveSink(sink); +} + +void VideoTrack::RequestRefreshFrame() { + RTC_DCHECK_RUN_ON(worker_thread_); + video_source_->internal()->RequestRefreshFrame(); } VideoTrackSourceInterface* VideoTrack::GetSource() const { @@ -68,13 +72,17 @@ VideoTrackSourceInterface* VideoTrack::GetSource() const { return video_source_.get(); } +VideoTrackSourceInterface* VideoTrack::GetSourceInternal() const { + return video_source_->internal(); +} + VideoTrackInterface::ContentHint VideoTrack::content_hint() const { - RTC_DCHECK_RUN_ON(worker_thread_); + RTC_DCHECK_RUN_ON(&signaling_thread_); return content_hint_; } void VideoTrack::set_content_hint(ContentHint hint) { - RTC_DCHECK_RUN_ON(worker_thread_); + RTC_DCHECK_RUN_ON(&signaling_thread_); if (content_hint_ == hint) return; content_hint_ = hint; @@ -82,17 +90,29 @@ void VideoTrack::set_content_hint(ContentHint hint) { } bool VideoTrack::set_enabled(bool enable) { - RTC_DCHECK_RUN_ON(worker_thread_); - for (auto& sink_pair : sink_pairs()) { - rtc::VideoSinkWants modified_wants = sink_pair.wants; - modified_wants.black_frames = !enable; - video_source_->AddOrUpdateSink(sink_pair.sink, modified_wants); - } - return MediaStreamTrack::set_enabled(enable); + RTC_DCHECK_RUN_ON(&signaling_thread_); + + bool ret = MediaStreamTrack::set_enabled(enable); + + worker_thread_->BlockingCall([&]() { + RTC_DCHECK_RUN_ON(worker_thread_); + enabled_w_ = enable; + for (auto& sink_pair : sink_pairs()) { + rtc::VideoSinkWants modified_wants = sink_pair.wants; + modified_wants.black_frames = !enable; + video_source_->AddOrUpdateSink(sink_pair.sink, modified_wants); + } + }); + + return ret; } bool VideoTrack::enabled() const { - RTC_DCHECK_RUN_ON(worker_thread_); + if (worker_thread_->IsCurrent()) { + RTC_DCHECK_RUN_ON(worker_thread_); + return enabled_w_; + } + RTC_DCHECK_RUN_ON(&signaling_thread_); return MediaStreamTrack::enabled(); } @@ -103,22 +123,22 @@ MediaStreamTrackInterface::TrackState VideoTrack::state() const { void VideoTrack::OnChanged() { RTC_DCHECK_RUN_ON(&signaling_thread_); - worker_thread_->Invoke( - RTC_FROM_HERE, [this, state = video_source_->state()]() { - // TODO(tommi): Calling set_state() this way isn't ideal since we're - // currently blocking the signaling thread and set_state() may - // internally fire notifications via `FireOnChanged()` which may further - // amplify the blocking effect on the signaling thread. - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; - set_state(state == MediaSourceInterface::kEnded ? kEnded : kLive); - }); + rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + MediaSourceInterface::SourceState state = video_source_->state(); + set_state(state == MediaSourceInterface::kEnded ? kEnded : kLive); } rtc::scoped_refptr VideoTrack::Create( - const std::string& id, - VideoTrackSourceInterface* source, + absl::string_view id, + rtc::scoped_refptr source, rtc::Thread* worker_thread) { - return rtc::make_ref_counted(id, source, worker_thread); + rtc::scoped_refptr< + VideoTrackSourceProxyWithInternal> + source_proxy = VideoTrackSourceProxy::Create( + rtc::Thread::Current(), worker_thread, std::move(source)); + + return rtc::make_ref_counted(id, std::move(source_proxy), + worker_thread); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/video_track.h b/TMessagesProj/jni/voip/webrtc/pc/video_track.h index 49deaee76a..13a51c454b 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/video_track.h +++ b/TMessagesProj/jni/voip/webrtc/pc/video_track.h @@ -13,6 +13,7 @@ #include +#include "absl/types/optional.h" #include "api/media_stream_interface.h" #include "api/media_stream_track.h" #include "api/scoped_refptr.h" @@ -21,23 +22,30 @@ #include "api/video/video_sink_interface.h" #include "api/video/video_source_interface.h" #include "media/base/video_source_base.h" +#include "pc/video_track_source_proxy.h" +#include "rtc_base/system/no_unique_address.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" namespace webrtc { +// TODO(tommi): Instead of inheriting from `MediaStreamTrack<>`, implement the +// properties directly in this class. `MediaStreamTrack` doesn't guard against +// conflicting access, so we'd need to override those methods anyway in this +// class in order to make sure things are correctly checked. class VideoTrack : public MediaStreamTrack, public rtc::VideoSourceBaseGuarded, public ObserverInterface { public: static rtc::scoped_refptr Create( - const std::string& label, - VideoTrackSourceInterface* source, + absl::string_view label, + rtc::scoped_refptr source, rtc::Thread* worker_thread); void AddOrUpdateSink(rtc::VideoSinkInterface* sink, const rtc::VideoSinkWants& wants) override; void RemoveSink(rtc::VideoSinkInterface* sink) override; + void RequestRefreshFrame() override; VideoTrackSourceInterface* GetSource() const override; ContentHint content_hint() const override; @@ -47,10 +55,15 @@ class VideoTrack : public MediaStreamTrack, MediaStreamTrackInterface::TrackState state() const override; std::string kind() const override; + // Direct access to the non-proxied source object for internal implementation. + VideoTrackSourceInterface* GetSourceInternal() const; + protected: - VideoTrack(const std::string& id, - VideoTrackSourceInterface* video_source, - rtc::Thread* worker_thread); + VideoTrack( + absl::string_view id, + rtc::scoped_refptr< + VideoTrackSourceProxyWithInternal> source, + rtc::Thread* worker_thread); ~VideoTrack(); private: @@ -59,8 +72,15 @@ class VideoTrack : public MediaStreamTrack, RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker signaling_thread_; rtc::Thread* const worker_thread_; - const rtc::scoped_refptr video_source_; - ContentHint content_hint_ RTC_GUARDED_BY(worker_thread_); + const rtc::scoped_refptr< + VideoTrackSourceProxyWithInternal> + video_source_; + ContentHint content_hint_ RTC_GUARDED_BY(&signaling_thread_); + // Cached `enabled` state for the worker thread. This is kept in sync with + // the state maintained on the signaling thread via set_enabled() but can + // be queried without blocking on the worker thread by callers that don't + // use an api proxy to call the `enabled()` method. + bool enabled_w_ RTC_GUARDED_BY(worker_thread_) = true; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/video_track_source.cc b/TMessagesProj/jni/voip/webrtc/pc/video_track_source.cc index d15eaaf43c..64e99cc064 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/video_track_source.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/video_track_source.cc @@ -15,11 +15,12 @@ namespace webrtc { VideoTrackSource::VideoTrackSource(bool remote) - : state_(kLive), remote_(remote) { + : state_(kInitializing), remote_(remote) { worker_thread_checker_.Detach(); } void VideoTrackSource::SetState(SourceState new_state) { + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); if (state_ != new_state) { state_ = new_state; FireOnChanged(); diff --git a/TMessagesProj/jni/voip/webrtc/pc/video_track_source.h b/TMessagesProj/jni/voip/webrtc/pc/video_track_source.h index 4a29381c4c..723b10d8f3 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/video_track_source.h +++ b/TMessagesProj/jni/voip/webrtc/pc/video_track_source.h @@ -20,7 +20,10 @@ #include "api/video/video_sink_interface.h" #include "api/video/video_source_interface.h" #include "media/base/media_channel.h" +#include "rtc_base/checks.h" +#include "rtc_base/system/no_unique_address.h" #include "rtc_base/system/rtc_export.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { @@ -31,7 +34,10 @@ class RTC_EXPORT VideoTrackSource : public Notifier { explicit VideoTrackSource(bool remote); void SetState(SourceState new_state); - SourceState state() const override { return state_; } + SourceState state() const override { + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); + return state_; + } bool remote() const override { return remote_; } bool is_screencast() const override { return false; } @@ -56,8 +62,9 @@ class RTC_EXPORT VideoTrackSource : public Notifier { virtual rtc::VideoSourceInterface* source() = 0; private: - SequenceChecker worker_thread_checker_; - SourceState state_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker worker_thread_checker_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker signaling_thread_checker_; + SourceState state_ RTC_GUARDED_BY(&signaling_thread_checker_); const bool remote_; }; diff --git a/TMessagesProj/jni/voip/webrtc/pc/video_track_source_proxy.cc b/TMessagesProj/jni/voip/webrtc/pc/video_track_source_proxy.cc index 309c1f20f8..c3e95e23cc 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/video_track_source_proxy.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/video_track_source_proxy.cc @@ -11,7 +11,9 @@ #include "pc/video_track_source_proxy.h" #include "api/media_stream_interface.h" +#include "api/scoped_refptr.h" #include "api/video_track_source_proxy_factory.h" +#include "rtc_base/thread.h" namespace webrtc { @@ -19,7 +21,9 @@ rtc::scoped_refptr CreateVideoTrackSourceProxy( rtc::Thread* signaling_thread, rtc::Thread* worker_thread, VideoTrackSourceInterface* source) { - return VideoTrackSourceProxy::Create(signaling_thread, worker_thread, source); + return VideoTrackSourceProxy::Create( + signaling_thread, worker_thread, + rtc::scoped_refptr(source)); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/video_track_source_proxy.h b/TMessagesProj/jni/voip/webrtc/pc/video_track_source_proxy.h index 6e71bb1615..8500a98766 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/video_track_source_proxy.h +++ b/TMessagesProj/jni/voip/webrtc/pc/video_track_source_proxy.h @@ -11,7 +11,13 @@ #ifndef PC_VIDEO_TRACK_SOURCE_PROXY_H_ #define PC_VIDEO_TRACK_SOURCE_PROXY_H_ +#include "absl/types/optional.h" #include "api/media_stream_interface.h" +#include "api/video/recordable_encoded_frame.h" +#include "api/video/video_frame.h" +#include "api/video/video_sink_interface.h" +#include "api/video/video_source_interface.h" +#include "api/video_track_source_constraints.h" #include "pc/proxy.h" namespace webrtc { @@ -21,6 +27,7 @@ namespace webrtc { // TODO(deadbeef): Move this to .cc file. What threads methods are called on is // an implementation detail. BEGIN_PROXY_MAP(VideoTrackSource) + PROXY_PRIMARY_THREAD_DESTRUCTOR() PROXY_CONSTMETHOD0(SourceState, state) BYPASS_PROXY_CONSTMETHOD0(bool, remote) @@ -32,6 +39,7 @@ PROXY_SECONDARY_METHOD2(void, rtc::VideoSinkInterface*, const rtc::VideoSinkWants&) PROXY_SECONDARY_METHOD1(void, RemoveSink, rtc::VideoSinkInterface*) +PROXY_SECONDARY_METHOD0(void, RequestRefreshFrame) PROXY_METHOD1(void, RegisterObserver, ObserverInterface*) PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*) PROXY_CONSTMETHOD0(bool, SupportsEncodedOutput) diff --git a/TMessagesProj/jni/voip/webrtc/pc/webrtc_sdp.cc b/TMessagesProj/jni/voip/webrtc/pc/webrtc_sdp.cc index 3f06f307a4..39b16901a1 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/webrtc_sdp.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/webrtc_sdp.cc @@ -12,9 +12,9 @@ #include #include -#include #include +#include #include #include #include @@ -25,12 +25,15 @@ #include #include "absl/algorithm/container.h" +#include "absl/strings/ascii.h" +#include "absl/strings/match.h" #include "api/candidate.h" #include "api/crypto_params.h" #include "api/jsep_ice_candidate.h" #include "api/jsep_session_description.h" #include "api/media_types.h" // for RtpExtension +#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/rtc_error.h" #include "api/rtp_parameters.h" @@ -152,8 +155,6 @@ static const char kMediaStreamSemantic[] = "WMS"; static const char kSsrcAttributeMsid[] = "msid"; static const char kDefaultMsid[] = "default"; static const char kNoStreamMsid[] = "-"; -static const char kSsrcAttributeMslabel[] = "mslabel"; -static const char kSSrcAttributeLabel[] = "label"; static const char kAttributeSsrcGroup[] = "ssrc-group"; static const char kAttributeCrypto[] = "crypto"; static const char kAttributeCandidate[] = "candidate"; @@ -218,7 +219,6 @@ static const char kSdpDelimiterColonChar = ':'; static const char kSdpDelimiterSemicolon[] = ";"; static const char kSdpDelimiterSemicolonChar = ';'; static const char kSdpDelimiterSlashChar = '/'; -static const char kNewLine[] = "\n"; static const char kNewLineChar = '\n'; static const char kReturnChar = '\r'; static const char kLineBreak[] = "\r\n"; @@ -265,11 +265,6 @@ struct SsrcInfo { std::string cname; std::string stream_id; std::string track_id; - - // For backward compatibility. - // TODO(ronghuawu): Remove below 2 fields once all the clients support msid. - std::string label; - std::string mslabel; }; typedef std::vector SsrcInfoVec; typedef std::vector SsrcGroupVec; @@ -286,7 +281,7 @@ static void BuildRtpContentAttributes(const MediaContentDescription* media_desc, const cricket::MediaType media_type, int msid_signaling, std::string* message); -static void BuildRtpMap(const MediaContentDescription* media_desc, +static void BuildRtpmap(const MediaContentDescription* media_desc, const cricket::MediaType media_type, std::string* message); static void BuildCandidate(const std::vector& candidates, @@ -294,7 +289,7 @@ static void BuildCandidate(const std::vector& candidates, std::string* message); static void BuildIceOptions(const std::vector& transport_options, std::string* message); -static bool ParseSessionDescription(const std::string& message, +static bool ParseSessionDescription(absl::string_view message, size_t* pos, std::string* session_id, std::string* session_version, @@ -304,7 +299,7 @@ static bool ParseSessionDescription(const std::string& message, cricket::SessionDescription* desc, SdpParseError* error); static bool ParseMediaDescription( - const std::string& message, + absl::string_view message, const TransportDescription& session_td, const RtpHeaderExtensions& session_extmaps, size_t* pos, @@ -313,10 +308,10 @@ static bool ParseMediaDescription( std::vector>* candidates, SdpParseError* error); static bool ParseContent( - const std::string& message, + absl::string_view message, const cricket::MediaType media_type, int mline_index, - const std::string& protocol, + absl::string_view protocol, const std::vector& payload_types, size_t* pos, std::string* content_name, @@ -326,54 +321,54 @@ static bool ParseContent( TransportDescription* transport, std::vector>* candidates, SdpParseError* error); -static bool ParseGroupAttribute(const std::string& line, +static bool ParseGroupAttribute(absl::string_view line, cricket::SessionDescription* desc, SdpParseError* error); -static bool ParseSsrcAttribute(const std::string& line, +static bool ParseSsrcAttribute(absl::string_view line, SsrcInfoVec* ssrc_infos, int* msid_signaling, SdpParseError* error); -static bool ParseSsrcGroupAttribute(const std::string& line, +static bool ParseSsrcGroupAttribute(absl::string_view line, SsrcGroupVec* ssrc_groups, SdpParseError* error); -static bool ParseCryptoAttribute(const std::string& line, +static bool ParseCryptoAttribute(absl::string_view line, MediaContentDescription* media_desc, SdpParseError* error); -static bool ParseRtpmapAttribute(const std::string& line, +static bool ParseRtpmapAttribute(absl::string_view line, const cricket::MediaType media_type, const std::vector& payload_types, MediaContentDescription* media_desc, SdpParseError* error); -static bool ParseFmtpAttributes(const std::string& line, +static bool ParseFmtpAttributes(absl::string_view line, const cricket::MediaType media_type, MediaContentDescription* media_desc, SdpParseError* error); -static bool ParseFmtpParam(const std::string& line, +static bool ParseFmtpParam(absl::string_view line, std::string* parameter, std::string* value, SdpParseError* error); -static bool ParsePacketizationAttribute(const std::string& line, +static bool ParsePacketizationAttribute(absl::string_view line, const cricket::MediaType media_type, MediaContentDescription* media_desc, SdpParseError* error); -static bool ParseRtcpFbAttribute(const std::string& line, +static bool ParseRtcpFbAttribute(absl::string_view line, const cricket::MediaType media_type, MediaContentDescription* media_desc, SdpParseError* error); -static bool ParseIceOptions(const std::string& line, +static bool ParseIceOptions(absl::string_view line, std::vector* transport_options, SdpParseError* error); -static bool ParseExtmap(const std::string& line, +static bool ParseExtmap(absl::string_view line, RtpExtension* extmap, SdpParseError* error); static bool ParseFingerprintAttribute( - const std::string& line, + absl::string_view line, std::unique_ptr* fingerprint, SdpParseError* error); -static bool ParseDtlsSetup(const std::string& line, +static bool ParseDtlsSetup(absl::string_view line, cricket::ConnectionRole* role, SdpParseError* error); -static bool ParseMsidAttribute(const std::string& line, +static bool ParseMsidAttribute(absl::string_view line, std::vector* stream_ids, std::string* track_id, SdpParseError* error); @@ -406,7 +401,7 @@ static bool ParseFailed(absl::string_view message, SdpParseError* error) { // Get the first line of `message` from `line_start`. absl::string_view first_line; - size_t line_end = message.find(kNewLine, line_start); + size_t line_end = message.find(kNewLineChar, line_start); if (line_end != std::string::npos) { if (line_end > 0 && (message.at(line_end - 1) == kReturnChar)) { --line_end; @@ -464,7 +459,7 @@ static bool ParseFailedExpectMinFieldNum(absl::string_view line, // `line` is the failing line. The failure is due to the fact that it failed to // get the value of `attribute`. static bool ParseFailedGetValue(absl::string_view line, - const std::string& attribute, + absl::string_view attribute, SdpParseError* error) { rtc::StringBuilder description; description << "Failed to get the value of attribute: " << attribute; @@ -486,30 +481,34 @@ static bool ParseFailedExpectLine(absl::string_view message, return ParseFailed(message, line_start, description.Release(), error); } -static bool AddLine(const std::string& line, std::string* message) { +static bool AddLine(absl::string_view line, std::string* message) { if (!message) return false; - message->append(line); + message->append(line.data(), line.size()); message->append(kLineBreak); return true; } -static bool GetLine(const std::string& message, - size_t* pos, - std::string* line) { - size_t line_begin = *pos; - size_t line_end = message.find(kNewLine, line_begin); - if (line_end == std::string::npos) { - return false; +// Trim return character, if any. +static absl::string_view TrimReturnChar(absl::string_view line) { + if (!line.empty() && line.back() == kReturnChar) { + line.remove_suffix(1); } - // Update the new start position - *pos = line_end + 1; - if (line_end > 0 && (message.at(line_end - 1) == kReturnChar)) { - --line_end; + return line; +} + +// Gets line of `message` starting at `pos`, and checks overall SDP syntax. On +// success, advances `pos` to the next line. +static absl::optional GetLine(absl::string_view message, + size_t* pos) { + size_t line_end = message.find(kNewLineChar, *pos); + if (line_end == absl::string_view::npos) { + return absl::nullopt; } - *line = message.substr(line_begin, (line_end - line_begin)); - const char* cline = line->c_str(); + absl::string_view line = + TrimReturnChar(message.substr(*pos, line_end - *pos)); + // RFC 4566 // An SDP session description consists of a number of lines of text of // the form: @@ -523,31 +522,30 @@ static bool GetLine(const std::string& message, // // If a session has no meaningful name, the value "s= " SHOULD be used // (i.e., a single space as the session name). - if (line->length() < 3 || !islower(cline[0]) || - cline[1] != kSdpDelimiterEqualChar || - (cline[0] != kLineTypeSessionName && - cline[2] == kSdpDelimiterSpaceChar)) { - *pos = line_begin; - return false; + if (line.length() < 3 || !islower(static_cast(line[0])) || + line[1] != kSdpDelimiterEqualChar || + (line[0] != kLineTypeSessionName && line[2] == kSdpDelimiterSpaceChar)) { + return absl::nullopt; } - return true; + *pos = line_end + 1; + return line; } // Init `os` to "`type`=`value`". static void InitLine(const char type, - const std::string& value, + absl::string_view value, rtc::StringBuilder* os) { os->Clear(); *os << std::string(1, type) << kSdpDelimiterEqual << value; } // Init `os` to "a=`attribute`". -static void InitAttrLine(const std::string& attribute, rtc::StringBuilder* os) { +static void InitAttrLine(absl::string_view attribute, rtc::StringBuilder* os) { InitLine(kLineTypeAttributes, attribute, os); } // Writes a SDP attribute line based on `attribute` and `value` to `message`. -static void AddAttributeLine(const std::string& attribute, +static void AddAttributeLine(absl::string_view attribute, int value, std::string* message) { rtc::StringBuilder os; @@ -556,37 +554,29 @@ static void AddAttributeLine(const std::string& attribute, AddLine(os.str(), message); } -static bool IsLineType(const std::string& message, +static bool IsLineType(absl::string_view message, const char type, size_t line_start) { if (message.size() < line_start + kLinePrefixLength) { return false; } - const char* cmessage = message.c_str(); - return (cmessage[line_start] == type && - cmessage[line_start + 1] == kSdpDelimiterEqualChar); + return (message[line_start] == type && + message[line_start + 1] == kSdpDelimiterEqualChar); } -static bool IsLineType(const std::string& line, const char type) { +static bool IsLineType(absl::string_view line, const char type) { return IsLineType(line, type, 0); } -static bool GetLineWithType(const std::string& message, - size_t* pos, - std::string* line, - const char type) { - if (!IsLineType(message, type, *pos)) { - return false; +static absl::optional +GetLineWithType(absl::string_view message, size_t* pos, const char type) { + if (IsLineType(message, type, *pos)) { + return GetLine(message, pos); } - - if (!GetLine(message, pos, line)) - return false; - - return true; + return absl::nullopt; } -static bool HasAttribute(const std::string& line, - const std::string& attribute) { +static bool HasAttribute(absl::string_view line, absl::string_view attribute) { if (line.compare(kLinePrefixLength, attribute.size(), attribute) == 0) { // Make sure that the match is not only a partial match. If length of // strings doesn't match, the next character of the line must be ':' or ' '. @@ -603,8 +593,8 @@ static bool HasAttribute(const std::string& line, } static bool AddSsrcLine(uint32_t ssrc_id, - const std::string& attribute, - const std::string& value, + absl::string_view attribute, + absl::string_view value, std::string* message) { // RFC 5576 // a=ssrc: : @@ -616,8 +606,8 @@ static bool AddSsrcLine(uint32_t ssrc_id, } // Get value only from :. -static bool GetValue(const std::string& message, - const std::string& attribute, +static bool GetValue(absl::string_view message, + absl::string_view attribute, std::string* value, SdpParseError* error) { std::string leftpart; @@ -626,16 +616,17 @@ static bool GetValue(const std::string& message, } // The left part should end with the expected attribute. if (leftpart.length() < attribute.length() || - leftpart.compare(leftpart.length() - attribute.length(), - attribute.length(), attribute) != 0) { + absl::string_view(leftpart).compare( + leftpart.length() - attribute.length(), attribute.length(), + attribute) != 0) { return ParseFailedGetValue(message, attribute, error); } return true; } // Get a single [token] from : -static bool GetSingleTokenValue(const std::string& message, - const std::string& attribute, +static bool GetSingleTokenValue(absl::string_view message, + absl::string_view attribute, std::string* value, SdpParseError* error) { if (!GetValue(message, attribute, value, error)) { @@ -656,8 +647,8 @@ static bool CaseInsensitiveFind(std::string str1, std::string str2) { } template -static bool GetValueFromString(const std::string& line, - const std::string& s, +static bool GetValueFromString(absl::string_view line, + absl::string_view s, T* t, SdpParseError* error) { if (!rtc::FromString(s, t)) { @@ -668,8 +659,8 @@ static bool GetValueFromString(const std::string& line, return true; } -static bool GetPayloadTypeFromString(const std::string& line, - const std::string& s, +static bool GetPayloadTypeFromString(absl::string_view line, + absl::string_view s, int* payload_type, SdpParseError* error) { return GetValueFromString(line, s, payload_type, error) && @@ -680,7 +671,7 @@ static bool GetPayloadTypeFromString(const std::string& line, // This is a track that does not contain SSRCs and only contains // stream_ids/track_id if it's signaled with a=msid lines. void CreateTrackWithNoSsrcs(const std::vector& msid_stream_ids, - const std::string& msid_track_id, + absl::string_view msid_track_id, const std::vector& rids, StreamParamsVec* tracks) { StreamParams track; @@ -690,7 +681,7 @@ void CreateTrackWithNoSsrcs(const std::vector& msid_stream_ids, return; } track.set_stream_ids(msid_stream_ids); - track.id = msid_track_id; + track.id = std::string(msid_track_id); track.set_rids(rids); tracks->push_back(track); } @@ -701,7 +692,7 @@ void CreateTrackWithNoSsrcs(const std::vector& msid_stream_ids, // exist. We prioritize getting stream_ids/track_ids signaled in a=msid lines. void CreateTracksFromSsrcInfos(const SsrcInfoVec& ssrc_infos, const std::vector& msid_stream_ids, - const std::string& msid_track_id, + absl::string_view msid_track_id, StreamParamsVec* tracks, int msid_signaling) { RTC_DCHECK(tracks != NULL); @@ -717,17 +708,11 @@ void CreateTracksFromSsrcInfos(const SsrcInfoVec& ssrc_infos, if (msid_signaling & cricket::kMsidSignalingMediaSection) { // This is the case with Unified Plan SDP msid signaling. stream_ids = msid_stream_ids; - track_id = msid_track_id; + track_id = std::string(msid_track_id); } else if (msid_signaling & cricket::kMsidSignalingSsrcAttribute) { // This is the case with Plan B SDP msid signaling. stream_ids.push_back(ssrc_info.stream_id); track_id = ssrc_info.track_id; - } else if (!ssrc_info.mslabel.empty()) { - // Since there's no a=msid or a=ssrc msid signaling, this is a sdp from - // an older version of client that doesn't support msid. - // In that case, we use the mslabel and label to construct the track. - stream_ids.push_back(ssrc_info.mslabel); - track_id = ssrc_info.label; } else { // Since no media streams isn't supported with older SDP signaling, we // use a default a stream id. @@ -776,7 +761,7 @@ static const int kPreferenceHost = 1; static const int kPreferenceReflexive = 2; static const int kPreferenceRelayed = 3; -static int GetCandidatePreferenceFromType(const std::string& type) { +static int GetCandidatePreferenceFromType(absl::string_view type) { int preference = kPreferenceUnknown; if (type == cricket::LOCAL_PORT_TYPE) { preference = kPreferenceHost; @@ -988,7 +973,7 @@ std::string SdpSerializeCandidate(const cricket::Candidate& candidate) { return message; } -bool SdpDeserialize(const std::string& message, +bool SdpDeserialize(absl::string_view message, JsepSessionDescription* jdesc, SdpParseError* error) { std::string session_id; @@ -1022,7 +1007,7 @@ bool SdpDeserialize(const std::string& message, return true; } -bool SdpDeserializeCandidate(const std::string& message, +bool SdpDeserializeCandidate(absl::string_view message, JsepIceCandidate* jcandidate, SdpParseError* error) { RTC_DCHECK(jcandidate != NULL); @@ -1034,8 +1019,8 @@ bool SdpDeserializeCandidate(const std::string& message, return true; } -bool SdpDeserializeCandidate(const std::string& transport_name, - const std::string& message, +bool SdpDeserializeCandidate(absl::string_view transport_name, + absl::string_view message, cricket::Candidate* candidate, SdpParseError* error) { RTC_DCHECK(candidate != nullptr); @@ -1046,26 +1031,27 @@ bool SdpDeserializeCandidate(const std::string& transport_name, return true; } -bool ParseCandidate(const std::string& message, +bool ParseCandidate(absl::string_view message, Candidate* candidate, SdpParseError* error, bool is_raw) { RTC_DCHECK(candidate != NULL); - // Get the first line from `message`. - std::string first_line = message; - size_t pos = 0; - GetLine(message, &pos, &first_line); - // Makes sure `message` contains only one line. - if (message.size() > first_line.size()) { - std::string left, right; - if (rtc::tokenize_first(message, kNewLineChar, &left, &right) && - !right.empty()) { - return ParseFailed(message, 0, "Expect one line only", error); - } + absl::string_view first_line; + + size_t line_end = message.find(kNewLineChar); + if (line_end == absl::string_view::npos) { + first_line = message; + } else if (line_end + 1 == message.size()) { + first_line = message.substr(0, line_end); + } else { + return ParseFailed(message, 0, "Expect one line only", error); } + // Trim return char, if any. + first_line = TrimReturnChar(first_line); + // From WebRTC draft section 4.8.1.1 candidate-attribute should be // candidate: when trickled, but we still support // a=candidate:CRLF for backward compatibility and for parsing a line @@ -1093,8 +1079,8 @@ bool ParseCandidate(const std::string& message, } } - std::vector fields; - rtc::split(candidate_value, kSdpDelimiterSpaceChar, &fields); + std::vector fields = + rtc::split(candidate_value, kSdpDelimiterSpaceChar); // RFC 5245 // a=candidate: @@ -1106,18 +1092,18 @@ bool ParseCandidate(const std::string& message, (fields[6] != kAttributeCandidateTyp)) { return ParseFailedExpectMinFieldNum(first_line, expected_min_fields, error); } - const std::string& foundation = fields[0]; + const absl::string_view foundation = fields[0]; int component_id = 0; if (!GetValueFromString(first_line, fields[1], &component_id, error)) { return false; } - const std::string& transport = fields[2]; + const absl::string_view transport = fields[2]; uint32_t priority = 0; if (!GetValueFromString(first_line, fields[3], &priority, error)) { return false; } - const std::string& connection_address = fields[4]; + const absl::string_view connection_address = fields[4]; int port = 0; if (!GetValueFromString(first_line, fields[5], &port, error)) { return false; @@ -1127,12 +1113,13 @@ bool ParseCandidate(const std::string& message, } SocketAddress address(connection_address, port); - cricket::ProtocolType protocol; - if (!StringToProto(transport.c_str(), &protocol)) { + absl::optional protocol = + cricket::StringToProto(transport); + if (!protocol) { return ParseFailed(first_line, "Unsupported transport type.", error); } bool tcp_protocol = false; - switch (protocol) { + switch (*protocol) { // Supported protocols. case cricket::PROTO_UDP: break; @@ -1145,7 +1132,7 @@ bool ParseCandidate(const std::string& message, } std::string candidate_type; - const std::string& type = fields[7]; + const absl::string_view type = fields[7]; if (type == kCandidateHost) { candidate_type = cricket::LOCAL_PORT_TYPE; } else if (type == kCandidateSrflx) { @@ -1183,7 +1170,7 @@ bool ParseCandidate(const std::string& message, // If this is a TCP candidate, it has additional extension as defined in // RFC 6544. - std::string tcptype; + absl::string_view tcptype; if (fields.size() >= (current_position + 2) && fields[current_position] == kTcpCandidateType) { tcptype = fields[++current_position]; @@ -1209,8 +1196,8 @@ bool ParseCandidate(const std::string& message, // Though non-standard, we support the ICE ufrag and pwd being signaled on // the candidate to avoid issues with confusing which generation a candidate // belongs to when trickling multiple generations at the same time. - std::string username; - std::string password; + absl::string_view username; + absl::string_view password; uint32_t generation = 0; uint16_t network_id = 0; uint16_t network_cost = 0; @@ -1240,7 +1227,7 @@ bool ParseCandidate(const std::string& message, } } - *candidate = Candidate(component_id, cricket::ProtoToString(protocol), + *candidate = Candidate(component_id, cricket::ProtoToString(*protocol), address, priority, username, password, candidate_type, generation, foundation, network_id, network_cost); candidate->set_related_address(related_address); @@ -1248,32 +1235,31 @@ bool ParseCandidate(const std::string& message, return true; } -bool ParseIceOptions(const std::string& line, +bool ParseIceOptions(absl::string_view line, std::vector* transport_options, SdpParseError* error) { std::string ice_options; if (!GetValue(line, kAttributeIceOption, &ice_options, error)) { return false; } - std::vector fields; - rtc::split(ice_options, kSdpDelimiterSpaceChar, &fields); + std::vector fields = + rtc::split(ice_options, kSdpDelimiterSpaceChar); for (size_t i = 0; i < fields.size(); ++i) { - transport_options->push_back(fields[i]); + transport_options->emplace_back(fields[i]); } return true; } -bool ParseSctpPort(const std::string& line, +bool ParseSctpPort(absl::string_view line, int* sctp_port, SdpParseError* error) { // draft-ietf-mmusic-sctp-sdp-26 // a=sctp-port - std::vector fields; const size_t expected_min_fields = 2; - rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterColonChar, &fields); + std::vector fields = + rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterColonChar); if (fields.size() < expected_min_fields) { - fields.resize(0); - rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterSpaceChar, &fields); + fields = rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterSpaceChar); } if (fields.size() < expected_min_fields) { return ParseFailedExpectMinFieldNum(line, expected_min_fields, error); @@ -1284,14 +1270,14 @@ bool ParseSctpPort(const std::string& line, return true; } -bool ParseSctpMaxMessageSize(const std::string& line, +bool ParseSctpMaxMessageSize(absl::string_view line, int* max_message_size, SdpParseError* error) { // draft-ietf-mmusic-sctp-sdp-26 // a=max-message-size:199999 - std::vector fields; const size_t expected_min_fields = 2; - rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterColonChar, &fields); + std::vector fields = + rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterColonChar); if (fields.size() < expected_min_fields) { return ParseFailedExpectMinFieldNum(line, expected_min_fields, error); } @@ -1301,25 +1287,25 @@ bool ParseSctpMaxMessageSize(const std::string& line, return true; } -bool ParseExtmap(const std::string& line, +bool ParseExtmap(absl::string_view line, RtpExtension* extmap, SdpParseError* error) { // RFC 5285 // a=extmap:["/"] - std::vector fields; - rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterSpaceChar, &fields); + std::vector fields = + rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterSpaceChar); const size_t expected_min_fields = 2; if (fields.size() < expected_min_fields) { return ParseFailedExpectMinFieldNum(line, expected_min_fields, error); } - std::string uri = fields[1]; + absl::string_view uri = fields[1]; std::string value_direction; if (!GetValue(fields[0], kAttributeExtmap, &value_direction, error)) { return false; } - std::vector sub_fields; - rtc::split(value_direction, kSdpDelimiterSlashChar, &sub_fields); + std::vector sub_fields = + rtc::split(value_direction, kSdpDelimiterSlashChar); int value = 0; if (!GetValueFromString(line, sub_fields[0], &value, error)) { return false; @@ -1696,7 +1682,7 @@ void BuildRtpContentAttributes(const MediaContentDescription* media_desc, // RFC 4566 // a=rtpmap: / // [/] - BuildRtpMap(media_desc, media_type, message); + BuildRtpmap(media_desc, media_type, message); for (const StreamParams& track : media_desc->streams()) { // Build the ssrc-group lines. @@ -1738,15 +1724,6 @@ void BuildRtpContentAttributes(const MediaContentDescription* media_desc, << kSsrcAttributeMsid << kSdpDelimiterColon << stream_id << kSdpDelimiterSpace << track.id; AddLine(os.str(), message); - - // TODO(ronghuawu): Remove below code which is for backward - // compatibility. - // draft-alvestrand-rtcweb-mid-01 - // a=ssrc: mslabel: - // The label isn't yet defined. - // a=ssrc: label: - AddSsrcLine(ssrc, kSsrcAttributeMslabel, stream_id, message); - AddSsrcLine(ssrc, kSSrcAttributeLabel, track.id, message); } } @@ -1807,10 +1784,10 @@ void WriteRtcpFbHeader(int payload_type, rtc::StringBuilder* os) { } } -void WriteFmtpParameter(const std::string& parameter_name, - const std::string& parameter_value, +void WriteFmtpParameter(absl::string_view parameter_name, + absl::string_view parameter_value, rtc::StringBuilder* os) { - if (parameter_name == "") { + if (parameter_name.empty()) { // RFC 2198 and RFC 4733 don't use key-value pairs. *os << parameter_value; } else { @@ -1819,7 +1796,7 @@ void WriteFmtpParameter(const std::string& parameter_name, } } -bool IsFmtpParam(const std::string& name) { +bool IsFmtpParam(absl::string_view name) { // RFC 4855, section 3 specifies the mapping of media format parameters to SDP // parameters. Only ptime, maxptime, channels and rate are placed outside of // the fmtp line. In WebRTC, channels and rate are already handled separately @@ -1905,7 +1882,7 @@ bool GetParameter(const std::string& name, return true; } -void BuildRtpMap(const MediaContentDescription* media_desc, +void BuildRtpmap(const MediaContentDescription* media_desc, const cricket::MediaType media_type, std::string* message) { RTC_DCHECK(message != NULL); @@ -2060,7 +2037,7 @@ void BuildIceOptions(const std::vector& transport_options, } } -bool ParseConnectionData(const std::string& line, +bool ParseConnectionData(absl::string_view line, rtc::SocketAddress* addr, SdpParseError* error) { // Parse the line from left to right. @@ -2111,7 +2088,7 @@ bool ParseConnectionData(const std::string& line, return true; } -bool ParseSessionDescription(const std::string& message, +bool ParseSessionDescription(absl::string_view message, size_t* pos, std::string* session_id, std::string* session_version, @@ -2120,69 +2097,74 @@ bool ParseSessionDescription(const std::string& message, rtc::SocketAddress* connection_addr, cricket::SessionDescription* desc, SdpParseError* error) { - std::string line; + absl::optional line; desc->set_msid_supported(false); desc->set_extmap_allow_mixed(false); // RFC 4566 // v= (protocol version) - if (!GetLineWithType(message, pos, &line, kLineTypeVersion)) { + line = GetLineWithType(message, pos, kLineTypeVersion); + if (!line) { return ParseFailedExpectLine(message, *pos, kLineTypeVersion, std::string(), error); } // RFC 4566 // o= // - if (!GetLineWithType(message, pos, &line, kLineTypeOrigin)) { + line = GetLineWithType(message, pos, kLineTypeOrigin); + if (!line) { return ParseFailedExpectLine(message, *pos, kLineTypeOrigin, std::string(), error); } - std::vector fields; - rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterSpaceChar, &fields); + std::vector fields = + rtc::split(line->substr(kLinePrefixLength), kSdpDelimiterSpaceChar); const size_t expected_fields = 6; if (fields.size() != expected_fields) { - return ParseFailedExpectFieldNum(line, expected_fields, error); + return ParseFailedExpectFieldNum(*line, expected_fields, error); } - *session_id = fields[1]; - *session_version = fields[2]; + *session_id = std::string(fields[1]); + *session_version = std::string(fields[2]); // RFC 4566 // s= (session name) - if (!GetLineWithType(message, pos, &line, kLineTypeSessionName)) { + line = GetLineWithType(message, pos, kLineTypeSessionName); + if (!line) { return ParseFailedExpectLine(message, *pos, kLineTypeSessionName, std::string(), error); } - // absl::optional lines + // optional lines // Those are the optional lines, so shouldn't return false if not present. // RFC 4566 // i=* (session information) - GetLineWithType(message, pos, &line, kLineTypeSessionInfo); + GetLineWithType(message, pos, kLineTypeSessionInfo); // RFC 4566 // u=* (URI of description) - GetLineWithType(message, pos, &line, kLineTypeSessionUri); + GetLineWithType(message, pos, kLineTypeSessionUri); // RFC 4566 // e=* (email address) - GetLineWithType(message, pos, &line, kLineTypeSessionEmail); + GetLineWithType(message, pos, kLineTypeSessionEmail); // RFC 4566 // p=* (phone number) - GetLineWithType(message, pos, &line, kLineTypeSessionPhone); + GetLineWithType(message, pos, kLineTypeSessionPhone); // RFC 4566 // c=* (connection information -- not required if included in // all media) - if (GetLineWithType(message, pos, &line, kLineTypeConnection)) { - if (!ParseConnectionData(line, connection_addr, error)) { + if (absl::optional cline = + GetLineWithType(message, pos, kLineTypeConnection); + cline.has_value()) { + if (!ParseConnectionData(*cline, connection_addr, error)) { return false; } } // RFC 4566 // b=* (zero or more bandwidth information lines) - while (GetLineWithType(message, pos, &line, kLineTypeSessionBandwidth)) { + while (GetLineWithType(message, pos, kLineTypeSessionBandwidth).has_value()) { // By pass zero or more b lines. } @@ -2191,80 +2173,81 @@ bool ParseSessionDescription(const std::string& message, // t= (time the session is active) // r=* (zero or more repeat times) // Ensure there's at least one time description - if (!GetLineWithType(message, pos, &line, kLineTypeTiming)) { + if (!GetLineWithType(message, pos, kLineTypeTiming).has_value()) { return ParseFailedExpectLine(message, *pos, kLineTypeTiming, std::string(), error); } - while (GetLineWithType(message, pos, &line, kLineTypeRepeatTimes)) { + while (GetLineWithType(message, pos, kLineTypeRepeatTimes).has_value()) { // By pass zero or more r lines. } // Go through the rest of the time descriptions - while (GetLineWithType(message, pos, &line, kLineTypeTiming)) { - while (GetLineWithType(message, pos, &line, kLineTypeRepeatTimes)) { + while (GetLineWithType(message, pos, kLineTypeTiming).has_value()) { + while (GetLineWithType(message, pos, kLineTypeRepeatTimes).has_value()) { // By pass zero or more r lines. } } // RFC 4566 // z=* (time zone adjustments) - GetLineWithType(message, pos, &line, kLineTypeTimeZone); + GetLineWithType(message, pos, kLineTypeTimeZone); // RFC 4566 // k=* (encryption key) - GetLineWithType(message, pos, &line, kLineTypeEncryptionKey); + GetLineWithType(message, pos, kLineTypeEncryptionKey); // RFC 4566 // a=* (zero or more session attribute lines) - while (GetLineWithType(message, pos, &line, kLineTypeAttributes)) { - if (HasAttribute(line, kAttributeGroup)) { - if (!ParseGroupAttribute(line, desc, error)) { + while (absl::optional aline = + GetLineWithType(message, pos, kLineTypeAttributes)) { + if (HasAttribute(*aline, kAttributeGroup)) { + if (!ParseGroupAttribute(*aline, desc, error)) { return false; } - } else if (HasAttribute(line, kAttributeIceUfrag)) { - if (!GetValue(line, kAttributeIceUfrag, &(session_td->ice_ufrag), + } else if (HasAttribute(*aline, kAttributeIceUfrag)) { + if (!GetValue(*aline, kAttributeIceUfrag, &(session_td->ice_ufrag), error)) { return false; } - } else if (HasAttribute(line, kAttributeIcePwd)) { - if (!GetValue(line, kAttributeIcePwd, &(session_td->ice_pwd), error)) { + } else if (HasAttribute(*aline, kAttributeIcePwd)) { + if (!GetValue(*aline, kAttributeIcePwd, &(session_td->ice_pwd), error)) { return false; } - } else if (HasAttribute(line, kAttributeIceLite)) { + } else if (HasAttribute(*aline, kAttributeIceLite)) { session_td->ice_mode = cricket::ICEMODE_LITE; - } else if (HasAttribute(line, kAttributeIceOption)) { - if (!ParseIceOptions(line, &(session_td->transport_options), error)) { + } else if (HasAttribute(*aline, kAttributeIceOption)) { + if (!ParseIceOptions(*aline, &(session_td->transport_options), error)) { return false; } - } else if (HasAttribute(line, kAttributeFingerprint)) { + } else if (HasAttribute(*aline, kAttributeFingerprint)) { if (session_td->identity_fingerprint.get()) { return ParseFailed( - line, + *aline, "Can't have multiple fingerprint attributes at the same level.", error); } std::unique_ptr fingerprint; - if (!ParseFingerprintAttribute(line, &fingerprint, error)) { + if (!ParseFingerprintAttribute(*aline, &fingerprint, error)) { return false; } session_td->identity_fingerprint = std::move(fingerprint); - } else if (HasAttribute(line, kAttributeSetup)) { - if (!ParseDtlsSetup(line, &(session_td->connection_role), error)) { + } else if (HasAttribute(*aline, kAttributeSetup)) { + if (!ParseDtlsSetup(*aline, &(session_td->connection_role), error)) { return false; } - } else if (HasAttribute(line, kAttributeMsidSemantics)) { + } else if (HasAttribute(*aline, kAttributeMsidSemantics)) { std::string semantics; - if (!GetValue(line, kAttributeMsidSemantics, &semantics, error)) { + if (!GetValue(*aline, kAttributeMsidSemantics, &semantics, error)) { return false; } desc->set_msid_supported( CaseInsensitiveFind(semantics, kMediaStreamSemantic)); - } else if (HasAttribute(line, kAttributeExtmapAllowMixed)) { + } else if (HasAttribute(*aline, kAttributeExtmapAllowMixed)) { desc->set_extmap_allow_mixed(true); - } else if (HasAttribute(line, kAttributeExtmap)) { + } else if (HasAttribute(*aline, kAttributeExtmap)) { RtpExtension extmap; - if (!ParseExtmap(line, &extmap, error)) { + if (!ParseExtmap(*aline, &extmap, error)) { return false; } session_extmaps->push_back(extmap); @@ -2273,15 +2256,15 @@ bool ParseSessionDescription(const std::string& message, return true; } -bool ParseGroupAttribute(const std::string& line, +bool ParseGroupAttribute(absl::string_view line, cricket::SessionDescription* desc, SdpParseError* error) { RTC_DCHECK(desc != NULL); // RFC 5888 and draft-holmberg-mmusic-sdp-bundle-negotiation-00 // a=group:BUNDLE video voice - std::vector fields; - rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterSpaceChar, &fields); + std::vector fields = + rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterSpaceChar); std::string semantics; if (!GetValue(fields[0], kAttributeGroup, &semantics, error)) { return false; @@ -2295,11 +2278,11 @@ bool ParseGroupAttribute(const std::string& line, } static bool ParseFingerprintAttribute( - const std::string& line, + absl::string_view line, std::unique_ptr* fingerprint, SdpParseError* error) { - std::vector fields; - rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterSpaceChar, &fields); + std::vector fields = + rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterSpaceChar); const size_t expected_fields = 2; if (fields.size() != expected_fields) { return ParseFailedExpectFieldNum(line, expected_fields, error); @@ -2326,25 +2309,27 @@ static bool ParseFingerprintAttribute( return true; } -static bool ParseDtlsSetup(const std::string& line, - cricket::ConnectionRole* role, +static bool ParseDtlsSetup(absl::string_view line, + cricket::ConnectionRole* role_ptr, SdpParseError* error) { // setup-attr = "a=setup:" role // role = "active" / "passive" / "actpass" / "holdconn" - std::vector fields; - rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterColonChar, &fields); + std::vector fields = + rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterColonChar); const size_t expected_fields = 2; if (fields.size() != expected_fields) { return ParseFailedExpectFieldNum(line, expected_fields, error); } - std::string role_str = fields[1]; - if (!cricket::StringToConnectionRole(role_str, role)) { - return ParseFailed(line, "Invalid attribute value.", error); + if (absl::optional role = + cricket::StringToConnectionRole(fields[1]); + role.has_value()) { + *role_ptr = *role; + return true; } - return true; + return ParseFailed(line, "Invalid attribute value.", error); } -static bool ParseMsidAttribute(const std::string& line, +static bool ParseMsidAttribute(absl::string_view line, std::vector* stream_ids, std::string* track_id, SdpParseError* error) { @@ -2582,10 +2567,10 @@ void MaybeCreateStaticPayloadAudioCodecs(const std::vector& fmts, template static std::unique_ptr ParseContentDescription( - const std::string& message, + absl::string_view message, const cricket::MediaType media_type, int mline_index, - const std::string& protocol, + absl::string_view protocol, const std::vector& payload_types, size_t* pos, std::string* content_name, @@ -2621,7 +2606,7 @@ static std::unique_ptr ParseContentDescription( } bool ParseMediaDescription( - const std::string& message, + absl::string_view message, const TransportDescription& session_td, const RtpHeaderExtensions& session_extmaps, size_t* pos, @@ -2630,22 +2615,22 @@ bool ParseMediaDescription( std::vector>* candidates, SdpParseError* error) { RTC_DCHECK(desc != NULL); - std::string line; int mline_index = -1; int msid_signaling = 0; // Zero or more media descriptions // RFC 4566 // m= - while (GetLineWithType(message, pos, &line, kLineTypeMedia)) { + while (absl::optional mline = + GetLineWithType(message, pos, kLineTypeMedia)) { ++mline_index; - std::vector fields; - rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterSpaceChar, &fields); + std::vector fields = + rtc::split(mline->substr(kLinePrefixLength), kSdpDelimiterSpaceChar); const size_t expected_min_fields = 4; if (fields.size() < expected_min_fields) { - return ParseFailedExpectMinFieldNum(line, expected_min_fields, error); + return ParseFailedExpectMinFieldNum(*mline, expected_min_fields, error); } bool port_rejected = false; // RFC 3264 @@ -2657,16 +2642,16 @@ bool ParseMediaDescription( int port = 0; if (!rtc::FromString(fields[1], &port) || !IsValidPort(port)) { - return ParseFailed(line, "The port number is invalid", error); + return ParseFailed(*mline, "The port number is invalid", error); } - const std::string& protocol = fields[2]; + absl::string_view protocol = fields[2]; // std::vector payload_types; if (cricket::IsRtpProtocol(protocol)) { for (size_t j = 3; j < fields.size(); ++j) { int pl = 0; - if (!GetPayloadTypeFromString(line, fields[j], &pl, error)) { + if (!GetPayloadTypeFromString(*mline, fields[j], &pl, error)) { return false; } payload_types.push_back(pl); @@ -2684,10 +2669,10 @@ bool ParseMediaDescription( std::string content_name; bool bundle_only = false; int section_msid_signaling = 0; - const std::string& media_type = fields[0]; + absl::string_view media_type = fields[0]; if ((media_type == kMediaTypeVideo || media_type == kMediaTypeAudio) && !cricket::IsRtpProtocol(protocol)) { - return ParseFailed(line, "Unsupported protocol for media type", error); + return ParseFailed(*mline, "Unsupported protocol for media type", error); } if (media_type == kMediaTypeVideo) { content = ParseContentDescription( @@ -2726,10 +2711,11 @@ bool ParseMediaDescription( data_desc->set_protocol(protocol); content = std::move(data_desc); } else { - return ParseFailed(line, "Unsupported protocol for media type", error); + return ParseFailed(*mline, "Unsupported protocol for media type", + error); } } else { - RTC_LOG(LS_WARNING) << "Unsupported media type: " << line; + RTC_LOG(LS_WARNING) << "Unsupported media type: " << *mline; auto unsupported_desc = std::make_unique(media_type); if (!ParseContent(message, cricket::MEDIA_TYPE_UNSUPPORTED, mline_index, @@ -2771,7 +2757,7 @@ bool ParseMediaDescription( if (content->as_unsupported()) { content_rejected = true; } else if (cricket::IsRtpProtocol(protocol) && !content->as_sctp()) { - content->set_protocol(protocol); + content->set_protocol(std::string(protocol)); // Set the extmap. if (!session_extmaps.empty() && !content->rtp_header_extensions().empty()) { @@ -2867,7 +2853,7 @@ T GetCodecWithPayloadType(const std::vector& codecs, int payload_type) { return ret_val; } -// Updates or creates a new codec entry in the audio description. +// Updates or creates a new codec entry in the media description. template void AddOrReplaceCodec(MediaContentDescription* content_desc, const U& codec) { T* desc = static_cast(content_desc); @@ -2918,7 +2904,7 @@ void UpdateCodec(MediaContentDescription* content_desc, // according to `packetization`. void UpdateVideoCodecPacketization(VideoContentDescription* video_desc, int payload_type, - const std::string& packetization) { + absl::string_view packetization) { if (packetization != cricket::kPacketizationParamRaw) { // Ignore unsupported packetization attribute. return; @@ -2927,7 +2913,7 @@ void UpdateVideoCodecPacketization(VideoContentDescription* video_desc, // Codec might already have been populated (from rtpmap). cricket::VideoCodec codec = GetCodecWithPayloadType(video_desc->codecs(), payload_type); - codec.packetization = packetization; + codec.packetization = std::string(packetization); AddOrReplaceCodec(video_desc, codec); } @@ -2958,22 +2944,22 @@ void UpdateFromWildcardCodecs(cricket::MediaContentDescriptionImpl* desc) { } void AddAudioAttribute(const std::string& name, - const std::string& value, + absl::string_view value, AudioContentDescription* audio_desc) { if (value.empty()) { return; } std::vector codecs = audio_desc->codecs(); for (cricket::AudioCodec& codec : codecs) { - codec.params[name] = value; + codec.params[name] = std::string(value); } audio_desc->set_codecs(codecs); } -bool ParseContent(const std::string& message, +bool ParseContent(absl::string_view message, const cricket::MediaType media_type, int mline_index, - const std::string& protocol, + absl::string_view protocol, const std::vector& payload_types, size_t* pos, std::string* content_name, @@ -2994,7 +2980,6 @@ bool ParseContent(const std::string& message, // The media level "ice-ufrag" and "ice-pwd". // The candidates before update the media level "ice-pwd" and "ice-ufrag". Candidates candidates_orig; - std::string line; std::string mline_id; // Tracks created out of the ssrc attributes. StreamParamsVec tracks; @@ -3010,7 +2995,8 @@ bool ParseContent(const std::string& message, // Loop until the next m line while (!IsLineType(message, kLineTypeMedia, *pos)) { - if (!GetLine(message, pos, &line)) { + absl::optional line = GetLine(message, pos); + if (!line.has_value()) { if (*pos >= message.size()) { break; // Done parsing } else { @@ -3020,14 +3006,14 @@ bool ParseContent(const std::string& message, // RFC 4566 // b=* (zero or more bandwidth information lines) - if (IsLineType(line, kLineTypeSessionBandwidth)) { + if (IsLineType(*line, kLineTypeSessionBandwidth)) { std::string bandwidth; std::string bandwidth_type; - if (!rtc::tokenize_first(line.substr(kLinePrefixLength), + if (!rtc::tokenize_first(line->substr(kLinePrefixLength), kSdpDelimiterColonChar, &bandwidth_type, &bandwidth)) { return ParseFailed( - line, + *line, "b= syntax error, does not match b=:.", error); } @@ -3037,7 +3023,7 @@ bool ParseContent(const std::string& message, continue; } int b = 0; - if (!GetValueFromString(line, bandwidth, &b, error)) { + if (!GetValueFromString(*line, bandwidth, &b, error)) { return false; } // TODO(deadbeef): Historically, applications may be setting a value @@ -3052,7 +3038,7 @@ bool ParseContent(const std::string& message, } if (b < 0) { return ParseFailed( - line, "b=" + bandwidth_type + " value can't be negative.", error); + *line, "b=" + bandwidth_type + " value can't be negative.", error); } // Convert values. Prevent integer overflow. if (bandwidth_type == kApplicationSpecificBandwidth) { @@ -3066,36 +3052,36 @@ bool ParseContent(const std::string& message, } // Parse the media level connection data. - if (IsLineType(line, kLineTypeConnection)) { + if (IsLineType(*line, kLineTypeConnection)) { rtc::SocketAddress addr; - if (!ParseConnectionData(line, &addr, error)) { + if (!ParseConnectionData(*line, &addr, error)) { return false; } media_desc->set_connection_address(addr); continue; } - if (!IsLineType(line, kLineTypeAttributes)) { + if (!IsLineType(*line, kLineTypeAttributes)) { // TODO(deadbeef): Handle other lines if needed. - RTC_LOG(LS_VERBOSE) << "Ignored line: " << line; + RTC_LOG(LS_VERBOSE) << "Ignored line: " << *line; continue; } // Handle attributes common to SCTP and RTP. - if (HasAttribute(line, kAttributeMid)) { + if (HasAttribute(*line, kAttributeMid)) { // RFC 3388 // mid-attribute = "a=mid:" identification-tag // identification-tag = token // Use the mid identification-tag as the content name. - if (!GetSingleTokenValue(line, kAttributeMid, &mline_id, error)) { + if (!GetSingleTokenValue(*line, kAttributeMid, &mline_id, error)) { return false; } *content_name = mline_id; - } else if (HasAttribute(line, kAttributeBundleOnly)) { + } else if (HasAttribute(*line, kAttributeBundleOnly)) { *bundle_only = true; - } else if (HasAttribute(line, kAttributeCandidate)) { + } else if (HasAttribute(*line, kAttributeCandidate)) { Candidate candidate; - if (!ParseCandidate(line, &candidate, error, false)) { + if (!ParseCandidate(*line, &candidate, error, false)) { return false; } // ParseCandidate will parse non-standard ufrag and password attributes, @@ -3105,30 +3091,30 @@ bool ParseContent(const std::string& message, candidate.set_username(std::string()); candidate.set_password(std::string()); candidates_orig.push_back(candidate); - } else if (HasAttribute(line, kAttributeIceUfrag)) { - if (!GetValue(line, kAttributeIceUfrag, &transport->ice_ufrag, error)) { + } else if (HasAttribute(*line, kAttributeIceUfrag)) { + if (!GetValue(*line, kAttributeIceUfrag, &transport->ice_ufrag, error)) { return false; } - } else if (HasAttribute(line, kAttributeIcePwd)) { - if (!GetValue(line, kAttributeIcePwd, &transport->ice_pwd, error)) { + } else if (HasAttribute(*line, kAttributeIcePwd)) { + if (!GetValue(*line, kAttributeIcePwd, &transport->ice_pwd, error)) { return false; } - } else if (HasAttribute(line, kAttributeIceOption)) { - if (!ParseIceOptions(line, &transport->transport_options, error)) { + } else if (HasAttribute(*line, kAttributeIceOption)) { + if (!ParseIceOptions(*line, &transport->transport_options, error)) { return false; } - } else if (HasAttribute(line, kAttributeFmtp)) { - if (!ParseFmtpAttributes(line, media_type, media_desc, error)) { + } else if (HasAttribute(*line, kAttributeFmtp)) { + if (!ParseFmtpAttributes(*line, media_type, media_desc, error)) { return false; } - } else if (HasAttribute(line, kAttributeFingerprint)) { + } else if (HasAttribute(*line, kAttributeFingerprint)) { std::unique_ptr fingerprint; - if (!ParseFingerprintAttribute(line, &fingerprint, error)) { + if (!ParseFingerprintAttribute(*line, &fingerprint, error)) { return false; } transport->identity_fingerprint = std::move(fingerprint); - } else if (HasAttribute(line, kAttributeSetup)) { - if (!ParseDtlsSetup(line, &(transport->connection_role), error)) { + } else if (HasAttribute(*line, kAttributeSetup)) { + if (!ParseDtlsSetup(*line, &(transport->connection_role), error)) { return false; } } else if (cricket::IsDtlsSctp(protocol) && @@ -3136,23 +3122,23 @@ bool ParseContent(const std::string& message, // // SCTP specific attributes // - if (HasAttribute(line, kAttributeSctpPort)) { + if (HasAttribute(*line, kAttributeSctpPort)) { if (media_desc->as_sctp()->use_sctpmap()) { return ParseFailed( - line, "sctp-port attribute can't be used with sctpmap.", error); + *line, "sctp-port attribute can't be used with sctpmap.", error); } int sctp_port; - if (!ParseSctpPort(line, &sctp_port, error)) { + if (!ParseSctpPort(*line, &sctp_port, error)) { return false; } media_desc->as_sctp()->set_port(sctp_port); - } else if (HasAttribute(line, kAttributeMaxMessageSize)) { + } else if (HasAttribute(*line, kAttributeMaxMessageSize)) { int max_message_size; - if (!ParseSctpMaxMessageSize(line, &max_message_size, error)) { + if (!ParseSctpMaxMessageSize(*line, &max_message_size, error)) { return false; } media_desc->as_sctp()->set_max_message_size(max_message_size); - } else if (HasAttribute(line, kAttributeSctpmap)) { + } else if (HasAttribute(*line, kAttributeSctpmap)) { // Ignore a=sctpmap: from early versions of draft-ietf-mmusic-sctp-sdp continue; } @@ -3160,132 +3146,133 @@ bool ParseContent(const std::string& message, // // RTP specific attributes // - if (HasAttribute(line, kAttributeRtcpMux)) { + if (HasAttribute(*line, kAttributeRtcpMux)) { media_desc->set_rtcp_mux(true); - } else if (HasAttribute(line, kAttributeRtcpReducedSize)) { + } else if (HasAttribute(*line, kAttributeRtcpReducedSize)) { media_desc->set_rtcp_reduced_size(true); - } else if (HasAttribute(line, kAttributeRtcpRemoteEstimate)) { + } else if (HasAttribute(*line, kAttributeRtcpRemoteEstimate)) { media_desc->set_remote_estimate(true); - } else if (HasAttribute(line, kAttributeSsrcGroup)) { - if (!ParseSsrcGroupAttribute(line, &ssrc_groups, error)) { + } else if (HasAttribute(*line, kAttributeSsrcGroup)) { + if (!ParseSsrcGroupAttribute(*line, &ssrc_groups, error)) { return false; } - } else if (HasAttribute(line, kAttributeSsrc)) { - if (!ParseSsrcAttribute(line, &ssrc_infos, msid_signaling, error)) { + } else if (HasAttribute(*line, kAttributeSsrc)) { + if (!ParseSsrcAttribute(*line, &ssrc_infos, msid_signaling, error)) { return false; } - } else if (HasAttribute(line, kAttributeCrypto)) { - if (!ParseCryptoAttribute(line, media_desc, error)) { + } else if (HasAttribute(*line, kAttributeCrypto)) { + if (!ParseCryptoAttribute(*line, media_desc, error)) { return false; } - } else if (HasAttribute(line, kAttributeRtpmap)) { - if (!ParseRtpmapAttribute(line, media_type, payload_types, media_desc, + } else if (HasAttribute(*line, kAttributeRtpmap)) { + if (!ParseRtpmapAttribute(*line, media_type, payload_types, media_desc, error)) { return false; } - } else if (HasAttribute(line, kCodecParamMaxPTime)) { - if (!GetValue(line, kCodecParamMaxPTime, &maxptime_as_string, error)) { + } else if (HasAttribute(*line, kCodecParamMaxPTime)) { + if (!GetValue(*line, kCodecParamMaxPTime, &maxptime_as_string, error)) { return false; } - } else if (HasAttribute(line, kAttributePacketization)) { - if (!ParsePacketizationAttribute(line, media_type, media_desc, error)) { + } else if (HasAttribute(*line, kAttributePacketization)) { + if (!ParsePacketizationAttribute(*line, media_type, media_desc, + error)) { return false; } - } else if (HasAttribute(line, kAttributeRtcpFb)) { - if (!ParseRtcpFbAttribute(line, media_type, media_desc, error)) { + } else if (HasAttribute(*line, kAttributeRtcpFb)) { + if (!ParseRtcpFbAttribute(*line, media_type, media_desc, error)) { return false; } - } else if (HasAttribute(line, kCodecParamPTime)) { - if (!GetValue(line, kCodecParamPTime, &ptime_as_string, error)) { + } else if (HasAttribute(*line, kCodecParamPTime)) { + if (!GetValue(*line, kCodecParamPTime, &ptime_as_string, error)) { return false; } - } else if (HasAttribute(line, kAttributeSendOnly)) { + } else if (HasAttribute(*line, kAttributeSendOnly)) { media_desc->set_direction(RtpTransceiverDirection::kSendOnly); - } else if (HasAttribute(line, kAttributeRecvOnly)) { + } else if (HasAttribute(*line, kAttributeRecvOnly)) { media_desc->set_direction(RtpTransceiverDirection::kRecvOnly); - } else if (HasAttribute(line, kAttributeInactive)) { + } else if (HasAttribute(*line, kAttributeInactive)) { media_desc->set_direction(RtpTransceiverDirection::kInactive); - } else if (HasAttribute(line, kAttributeSendRecv)) { + } else if (HasAttribute(*line, kAttributeSendRecv)) { media_desc->set_direction(RtpTransceiverDirection::kSendRecv); - } else if (HasAttribute(line, kAttributeExtmapAllowMixed)) { + } else if (HasAttribute(*line, kAttributeExtmapAllowMixed)) { media_desc->set_extmap_allow_mixed_enum( MediaContentDescription::kMedia); - } else if (HasAttribute(line, kAttributeExtmap)) { + } else if (HasAttribute(*line, kAttributeExtmap)) { RtpExtension extmap; - if (!ParseExtmap(line, &extmap, error)) { + if (!ParseExtmap(*line, &extmap, error)) { return false; } media_desc->AddRtpHeaderExtension(extmap); - } else if (HasAttribute(line, kAttributeXGoogleFlag)) { + } else if (HasAttribute(*line, kAttributeXGoogleFlag)) { // Experimental attribute. Conference mode activates more aggressive // AEC and NS settings. // TODO(deadbeef): expose API to set these directly. std::string flag_value; - if (!GetValue(line, kAttributeXGoogleFlag, &flag_value, error)) { + if (!GetValue(*line, kAttributeXGoogleFlag, &flag_value, error)) { return false; } if (flag_value.compare(kValueConference) == 0) media_desc->set_conference_mode(true); - } else if (HasAttribute(line, kAttributeMsid)) { - if (!ParseMsidAttribute(line, &stream_ids, &track_id, error)) { + } else if (HasAttribute(*line, kAttributeMsid)) { + if (!ParseMsidAttribute(*line, &stream_ids, &track_id, error)) { return false; } *msid_signaling |= cricket::kMsidSignalingMediaSection; - } else if (HasAttribute(line, kAttributeRid)) { + } else if (HasAttribute(*line, kAttributeRid)) { const size_t kRidPrefixLength = kLinePrefixLength + arraysize(kAttributeRid); - if (line.size() <= kRidPrefixLength) { - RTC_LOG(LS_INFO) << "Ignoring empty RID attribute: " << line; + if (line->size() <= kRidPrefixLength) { + RTC_LOG(LS_INFO) << "Ignoring empty RID attribute: " << *line; continue; } RTCErrorOr error_or_rid_description = deserializer.DeserializeRidDescription( - line.substr(kRidPrefixLength)); + line->substr(kRidPrefixLength)); // Malformed a=rid lines are discarded. if (!error_or_rid_description.ok()) { - RTC_LOG(LS_INFO) << "Ignoring malformed RID line: '" << line + RTC_LOG(LS_INFO) << "Ignoring malformed RID line: '" << *line << "'. Error: " << error_or_rid_description.error().message(); continue; } rids.push_back(error_or_rid_description.MoveValue()); - } else if (HasAttribute(line, kAttributeSimulcast)) { + } else if (HasAttribute(*line, kAttributeSimulcast)) { const size_t kSimulcastPrefixLength = kLinePrefixLength + arraysize(kAttributeSimulcast); - if (line.size() <= kSimulcastPrefixLength) { - return ParseFailed(line, "Simulcast attribute is empty.", error); + if (line->size() <= kSimulcastPrefixLength) { + return ParseFailed(*line, "Simulcast attribute is empty.", error); } if (!simulcast.empty()) { - return ParseFailed(line, "Multiple Simulcast attributes specified.", + return ParseFailed(*line, "Multiple Simulcast attributes specified.", error); } RTCErrorOr error_or_simulcast = deserializer.DeserializeSimulcastDescription( - line.substr(kSimulcastPrefixLength)); + line->substr(kSimulcastPrefixLength)); if (!error_or_simulcast.ok()) { - return ParseFailed(line, + return ParseFailed(*line, std::string("Malformed simulcast line: ") + error_or_simulcast.error().message(), error); } simulcast = error_or_simulcast.value(); - } else if (HasAttribute(line, kAttributeRtcp)) { + } else if (HasAttribute(*line, kAttributeRtcp)) { // Ignore and do not log a=rtcp line. // JSEP section 5.8.2 (media section parsing) says to ignore it. continue; } else { // Unrecognized attribute in RTP protocol. - RTC_LOG(LS_VERBOSE) << "Ignored line: " << line; + RTC_LOG(LS_VERBOSE) << "Ignored line: " << *line; continue; } } else { // Only parse lines that we are interested of. - RTC_LOG(LS_VERBOSE) << "Ignored line: " << line; + RTC_LOG(LS_VERBOSE) << "Ignored line: " << *line; continue; } } @@ -3295,7 +3282,6 @@ bool ParseContent(const std::string& message, // If simulcast is specifed, split the rids into send and receive. // Rids that do not appear in simulcast attribute will be removed. - // If it is not specified, we assume that all rids are for send layers. std::vector send_rids; std::vector receive_rids; if (!simulcast.empty()) { @@ -3322,7 +3308,11 @@ bool ParseContent(const std::string& message, media_desc->set_simulcast_description(simulcast); } else { - send_rids = rids; + // RID is specified in RFC 8851, which identifies a lot of usages. + // We only support RFC 8853 usage of RID, not anything else. + // Ignore all RID parameters when a=simulcast is missing. + // In particular do NOT do send_rids = rids; + RTC_LOG(LS_VERBOSE) << "Ignoring send_rids without simulcast"; } media_desc->set_receive_rids(receive_rids); @@ -3399,7 +3389,7 @@ bool ParseContent(const std::string& message, return true; } -bool ParseSsrcAttribute(const std::string& line, +bool ParseSsrcAttribute(absl::string_view line, SsrcInfoVec* ssrc_infos, int* msid_signaling, SdpParseError* error) { @@ -3456,37 +3446,31 @@ bool ParseSsrcAttribute(const std::string& line, } else if (attribute == kSsrcAttributeMsid) { // draft-alvestrand-mmusic-msid-00 // msid:identifier [appdata] - std::vector fields; - rtc::split(value, kSdpDelimiterSpaceChar, &fields); + std::vector fields = + rtc::split(value, kSdpDelimiterSpaceChar); if (fields.size() < 1 || fields.size() > 2) { return ParseFailed( line, "Expected format \"msid:[ ]\".", error); } - ssrc_info.stream_id = fields[0]; + ssrc_info.stream_id = std::string(fields[0]); if (fields.size() == 2) { - ssrc_info.track_id = fields[1]; + ssrc_info.track_id = std::string(fields[1]); } *msid_signaling |= cricket::kMsidSignalingSsrcAttribute; - } else if (attribute == kSsrcAttributeMslabel) { - // draft-alvestrand-rtcweb-mid-01 - // mslabel: - ssrc_info.mslabel = value; - } else if (attribute == kSSrcAttributeLabel) { - // The label isn't defined. - // label: - ssrc_info.label = value; + } else { + RTC_LOG(LS_INFO) << "Ignored unknown ssrc-specific attribute: " << line; } return true; } -bool ParseSsrcGroupAttribute(const std::string& line, +bool ParseSsrcGroupAttribute(absl::string_view line, SsrcGroupVec* ssrc_groups, SdpParseError* error) { RTC_DCHECK(ssrc_groups != NULL); // RFC 5576 // a=ssrc-group: ... - std::vector fields; - rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterSpaceChar, &fields); + std::vector fields = + rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterSpaceChar); const size_t expected_min_fields = 2; if (fields.size() < expected_min_fields) { return ParseFailedExpectMinFieldNum(line, expected_min_fields, error); @@ -3507,11 +3491,11 @@ bool ParseSsrcGroupAttribute(const std::string& line, return true; } -bool ParseCryptoAttribute(const std::string& line, +bool ParseCryptoAttribute(absl::string_view line, MediaContentDescription* media_desc, SdpParseError* error) { - std::vector fields; - rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterSpaceChar, &fields); + std::vector fields = + rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterSpaceChar); // RFC 4568 // a=crypto: [] const size_t expected_min_fields = 3; @@ -3526,12 +3510,13 @@ bool ParseCryptoAttribute(const std::string& line, if (!GetValueFromString(line, tag_value, &tag, error)) { return false; } - const std::string& crypto_suite = fields[1]; - const std::string& key_params = fields[2]; - std::string session_params; + const absl::string_view crypto_suite = fields[1]; + const absl::string_view key_params = fields[2]; + absl::string_view session_params; if (fields.size() > 3) { session_params = fields[3]; } + media_desc->AddCrypto( CryptoParams(tag, crypto_suite, key_params, session_params)); return true; @@ -3540,7 +3525,7 @@ bool ParseCryptoAttribute(const std::string& line, // Updates or creates a new codec entry in the audio description with according // to `name`, `clockrate`, `bitrate`, and `channels`. void UpdateCodec(int payload_type, - const std::string& name, + absl::string_view name, int clockrate, int bitrate, size_t channels, @@ -3549,7 +3534,7 @@ void UpdateCodec(int payload_type, // (from an fmtp). cricket::AudioCodec codec = GetCodecWithPayloadType(audio_desc->codecs(), payload_type); - codec.name = name; + codec.name = std::string(name); codec.clockrate = clockrate; codec.bitrate = bitrate; codec.channels = channels; @@ -3560,24 +3545,25 @@ void UpdateCodec(int payload_type, // Updates or creates a new codec entry in the video description according to // `name`, `width`, `height`, and `framerate`. void UpdateCodec(int payload_type, - const std::string& name, + absl::string_view name, VideoContentDescription* video_desc) { // Codec may already be populated with (only) optional parameters // (from an fmtp). cricket::VideoCodec codec = GetCodecWithPayloadType(video_desc->codecs(), payload_type); - codec.name = name; + codec.name = std::string(name); AddOrReplaceCodec(video_desc, codec); } -bool ParseRtpmapAttribute(const std::string& line, +bool ParseRtpmapAttribute(absl::string_view line, const cricket::MediaType media_type, const std::vector& payload_types, MediaContentDescription* media_desc, SdpParseError* error) { - std::vector fields; - rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterSpaceChar, &fields); + static const int kFirstDynamicPayloadTypeLowerRange = 35; + std::vector fields = + rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterSpaceChar); // RFC 4566 // a=rtpmap: /[/] const size_t expected_min_fields = 2; @@ -3600,9 +3586,7 @@ bool ParseRtpmapAttribute(const std::string& line, << line; return true; } - const std::string& encoder = fields[1]; - std::vector codec_params; - rtc::split(encoder, '/', &codec_params); + std::vector codec_params = rtc::split(fields[1], '/'); // /[/] // 2 mandatory fields if (codec_params.size() < 2 || codec_params.size() > 3) { @@ -3611,13 +3595,28 @@ bool ParseRtpmapAttribute(const std::string& line, "[/]\".", error); } - const std::string& encoding_name = codec_params[0]; + const absl::string_view encoding_name = codec_params[0]; int clock_rate = 0; if (!GetValueFromString(line, codec_params[1], &clock_rate, error)) { return false; } + if (media_type == cricket::MEDIA_TYPE_VIDEO) { VideoContentDescription* video_desc = media_desc->as_video(); + for (const cricket::VideoCodec& existing_codec : video_desc->codecs()) { + if (!existing_codec.name.empty() && payload_type == existing_codec.id && + (!absl::EqualsIgnoreCase(encoding_name, existing_codec.name) || + clock_rate != existing_codec.clockrate)) { + rtc::StringBuilder description; + description + << "Duplicate " + << (payload_type < kFirstDynamicPayloadTypeLowerRange + ? "statically assigned" + : "") + << " payload type with conflicting codec name or clock rate."; + return ParseFailed(line, description.Release(), error); + } + } UpdateCodec(payload_type, encoding_name, video_desc); } else if (media_type == cricket::MEDIA_TYPE_AUDIO) { // RFC 4566 @@ -3636,27 +3635,42 @@ bool ParseRtpmapAttribute(const std::string& line, } AudioContentDescription* audio_desc = media_desc->as_audio(); + for (const cricket::AudioCodec& existing_codec : audio_desc->codecs()) { + // TODO(crbug.com/1338902) re-add checks for clockrate and number of + // channels. + if (!existing_codec.name.empty() && payload_type == existing_codec.id && + (!absl::EqualsIgnoreCase(encoding_name, existing_codec.name))) { + rtc::StringBuilder description; + description + << "Duplicate " + << (payload_type < kFirstDynamicPayloadTypeLowerRange + ? "statically assigned" + : "") + << " payload type with conflicting codec name or clock rate."; + return ParseFailed(line, description.Release(), error); + } + } UpdateCodec(payload_type, encoding_name, clock_rate, 0, channels, audio_desc); } return true; } -bool ParseFmtpParam(const std::string& line, +bool ParseFmtpParam(absl::string_view line, std::string* parameter, std::string* value, SdpParseError* error) { if (!rtc::tokenize_first(line, kSdpDelimiterEqualChar, parameter, value)) { // Support for non-key-value lines like RFC 2198 or RFC 4733. *parameter = ""; - *value = line; + *value = std::string(line); return true; } // a=fmtp: =; =; ... return true; } -bool ParseFmtpAttributes(const std::string& line, +bool ParseFmtpAttributes(absl::string_view line, const cricket::MediaType media_type, MediaContentDescription* media_desc, SdpParseError* error) { @@ -3692,14 +3706,13 @@ bool ParseFmtpAttributes(const std::string& line, } // Parse out format specific parameters. - std::vector fields; - rtc::split(line_params, kSdpDelimiterSemicolonChar, &fields); - cricket::CodecParameterMap codec_params; - for (auto& iter : fields) { + for (absl::string_view param : + rtc::split(line_params, kSdpDelimiterSemicolonChar)) { std::string name; std::string value; - if (!ParseFmtpParam(rtc::string_trim(iter), &name, &value, error)) { + if (!ParseFmtpParam(absl::StripAsciiWhitespace(param), &name, &value, + error)) { return false; } if (codec_params.find(name) != codec_params.end()) { @@ -3719,15 +3732,15 @@ bool ParseFmtpAttributes(const std::string& line, return true; } -bool ParsePacketizationAttribute(const std::string& line, +bool ParsePacketizationAttribute(absl::string_view line, const cricket::MediaType media_type, MediaContentDescription* media_desc, SdpParseError* error) { if (media_type != cricket::MEDIA_TYPE_VIDEO) { return true; } - std::vector packetization_fields; - rtc::split(line.c_str(), kSdpDelimiterSpaceChar, &packetization_fields); + std::vector packetization_fields = + rtc::split(line, kSdpDelimiterSpaceChar); if (packetization_fields.size() < 2) { return ParseFailedGetValue(line, kAttributePacketization, error); } @@ -3741,13 +3754,13 @@ bool ParsePacketizationAttribute(const std::string& line, error)) { return false; } - std::string packetization = packetization_fields[1]; + absl::string_view packetization = packetization_fields[1]; UpdateVideoCodecPacketization(media_desc->as_video(), payload_type, packetization); return true; } -bool ParseRtcpFbAttribute(const std::string& line, +bool ParseRtcpFbAttribute(absl::string_view line, const cricket::MediaType media_type, MediaContentDescription* media_desc, SdpParseError* error) { @@ -3755,8 +3768,8 @@ bool ParseRtcpFbAttribute(const std::string& line, media_type != cricket::MEDIA_TYPE_VIDEO) { return true; } - std::vector rtcp_fb_fields; - rtc::split(line.c_str(), kSdpDelimiterSpaceChar, &rtcp_fb_fields); + std::vector rtcp_fb_fields = + rtc::split(line, kSdpDelimiterSpaceChar); if (rtcp_fb_fields.size() < 2) { return ParseFailedGetValue(line, kAttributeRtcpFb, error); } @@ -3772,11 +3785,11 @@ bool ParseRtcpFbAttribute(const std::string& line, return false; } } - std::string id = rtcp_fb_fields[1]; + absl::string_view id = rtcp_fb_fields[1]; std::string param = ""; - for (std::vector::iterator iter = rtcp_fb_fields.begin() + 2; - iter != rtcp_fb_fields.end(); ++iter) { - param.append(*iter); + for (auto iter = rtcp_fb_fields.begin() + 2; iter != rtcp_fb_fields.end(); + ++iter) { + param.append(iter->data(), iter->length()); } const cricket::FeedbackParam feedback_param(id, param); diff --git a/TMessagesProj/jni/voip/webrtc/pc/webrtc_sdp.h b/TMessagesProj/jni/voip/webrtc/pc/webrtc_sdp.h index 6d6980a989..cc6813caec 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/webrtc_sdp.h +++ b/TMessagesProj/jni/voip/webrtc/pc/webrtc_sdp.h @@ -22,6 +22,7 @@ #include +#include "absl/strings/string_view.h" #include "api/candidate.h" #include "api/jsep.h" #include "api/jsep_ice_candidate.h" @@ -66,7 +67,7 @@ RTC_EXPORT std::string SdpSerializeCandidate( // jdesc - The JsepSessionDescription deserialized from the SDP string. // error - The detail error information when parsing fails. // return - true on success, false on failure. -bool SdpDeserialize(const std::string& message, +bool SdpDeserialize(absl::string_view message, JsepSessionDescription* jdesc, SdpParseError* error); @@ -77,7 +78,7 @@ bool SdpDeserialize(const std::string& message, // candidates - The JsepIceCandidate from the SDP string. // error - The detail error information when parsing fails. // return - true on success, false on failure. -RTC_EXPORT bool SdpDeserializeCandidate(const std::string& message, +RTC_EXPORT bool SdpDeserializeCandidate(absl::string_view message, JsepIceCandidate* candidate, SdpParseError* error); @@ -89,8 +90,8 @@ RTC_EXPORT bool SdpDeserializeCandidate(const std::string& message, // candidate - The cricket Candidate from the SDP string. // error - The detail error information when parsing fails. // return - true on success, false on failure. -RTC_EXPORT bool SdpDeserializeCandidate(const std::string& transport_name, - const std::string& message, +RTC_EXPORT bool SdpDeserializeCandidate(absl::string_view transport_name, + absl::string_view message, cricket::Candidate* candidate, SdpParseError* error); @@ -100,7 +101,7 @@ RTC_EXPORT bool SdpDeserializeCandidate(const std::string& transport_name, // `error` is not null. // If `is_raw` is false, `message` is expected to be prefixed with "a=". // If `is_raw` is true, no prefix is expected in `messaage`. -RTC_EXPORT bool ParseCandidate(const std::string& message, +RTC_EXPORT bool ParseCandidate(absl::string_view message, cricket::Candidate* candidate, SdpParseError* error, bool is_raw); diff --git a/TMessagesProj/jni/voip/webrtc/pc/webrtc_session_description_factory.cc b/TMessagesProj/jni/voip/webrtc/pc/webrtc_session_description_factory.cc index ac20308df9..3d398e3b6e 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/webrtc_session_description_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/webrtc_session_description_factory.cc @@ -11,8 +11,8 @@ #include "pc/webrtc_session_description_factory.h" #include -#include -#include + +#include #include #include #include @@ -23,15 +23,16 @@ #include "api/jsep.h" #include "api/jsep_session_description.h" #include "api/rtc_error.h" +#include "api/sequence_checker.h" +#include "pc/connection_context.h" #include "pc/sdp_state_provider.h" #include "pc/session_description.h" #include "rtc_base/checks.h" -#include "rtc_base/location.h" #include "rtc_base/logging.h" -#include "rtc_base/ref_counted_object.h" #include "rtc_base/ssl_identity.h" #include "rtc_base/ssl_stream_adapter.h" #include "rtc_base/string_encode.h" +#include "rtc_base/unique_id_generator.h" using cricket::MediaSessionOptions; using rtc::UniqueRandomIdGenerator; @@ -65,34 +66,8 @@ static bool ValidMediaSessionOptions( return sender1.track_id == sender2.track_id; }) == sorted_senders.end(); } - -enum { - MSG_CREATE_SESSIONDESCRIPTION_SUCCESS, - MSG_CREATE_SESSIONDESCRIPTION_FAILED, - MSG_USE_CONSTRUCTOR_CERTIFICATE -}; - -struct CreateSessionDescriptionMsg : public rtc::MessageData { - explicit CreateSessionDescriptionMsg( - webrtc::CreateSessionDescriptionObserver* observer, - RTCError error_in) - : observer(observer), error(std::move(error_in)) {} - - rtc::scoped_refptr observer; - RTCError error; - std::unique_ptr description; -}; } // namespace -void WebRtcCertificateGeneratorCallback::OnFailure() { - SignalRequestFailed(); -} - -void WebRtcCertificateGeneratorCallback::OnSuccess( - const rtc::scoped_refptr& certificate) { - SignalCertificateReady(certificate); -} - // static void WebRtcSessionDescriptionFactory::CopyCandidatesFromSessionDescription( const SessionDescriptionInterface* source_desc, @@ -125,20 +100,21 @@ void WebRtcSessionDescriptionFactory::CopyCandidatesFromSessionDescription( } WebRtcSessionDescriptionFactory::WebRtcSessionDescriptionFactory( - rtc::Thread* signaling_thread, - cricket::ChannelManager* channel_manager, + ConnectionContext* context, const SdpStateProvider* sdp_info, const std::string& session_id, bool dtls_enabled, std::unique_ptr cert_generator, - const rtc::scoped_refptr& certificate, - UniqueRandomIdGenerator* ssrc_generator, + rtc::scoped_refptr certificate, std::function&)> - on_certificate_ready) - : signaling_thread_(signaling_thread), - session_desc_factory_(channel_manager, - &transport_desc_factory_, - ssrc_generator), + on_certificate_ready, + const FieldTrialsView& field_trials) + : signaling_thread_(context->signaling_thread()), + transport_desc_factory_(field_trials), + session_desc_factory_(context->media_engine(), + context->use_rtx(), + context->ssrc_generator(), + &transport_desc_factory_), // RFC 4566 suggested a Network Time Protocol (NTP) format timestamp // as the session id and session version. To simplify, it should be fine // to just use a random number as session id and start version from @@ -164,31 +140,32 @@ WebRtcSessionDescriptionFactory::WebRtcSessionDescriptionFactory( certificate_request_state_ = CERTIFICATE_WAITING; RTC_LOG(LS_VERBOSE) << "DTLS-SRTP enabled; has certificate parameter."; - // We already have a certificate but we wait to do `SetIdentity`; if we do - // it in the constructor then the caller has not had a chance to connect to - // `SignalCertificateReady`. - signaling_thread_->Post( - RTC_FROM_HERE, this, MSG_USE_CONSTRUCTOR_CERTIFICATE, - new rtc::ScopedRefMessageData(certificate)); + RTC_LOG(LS_INFO) << "Using certificate supplied to the constructor."; + SetCertificate(certificate); } else { // Generate certificate. certificate_request_state_ = CERTIFICATE_WAITING; - auto callback = rtc::make_ref_counted(); - callback->SignalRequestFailed.connect( - this, &WebRtcSessionDescriptionFactory::OnCertificateRequestFailed); - callback->SignalCertificateReady.connect( - this, &WebRtcSessionDescriptionFactory::SetCertificate); + auto callback = [weak_ptr = weak_factory_.GetWeakPtr()]( + rtc::scoped_refptr certificate) { + if (!weak_ptr) { + return; + } + if (certificate) { + weak_ptr->SetCertificate(std::move(certificate)); + } else { + weak_ptr->OnCertificateRequestFailed(); + } + }; rtc::KeyParams key_params = rtc::KeyParams(); RTC_LOG(LS_VERBOSE) << "DTLS-SRTP enabled; sending DTLS identity request (key type: " << key_params.type() << ")."; - // Request certificate. This happens asynchronously, so that the caller gets - // a chance to connect to `SignalCertificateReady`. + // Request certificate. This happens asynchronously on a different thread. cert_generator_->GenerateCertificateAsync(key_params, absl::nullopt, - callback); + std::move(callback)); } } @@ -198,22 +175,14 @@ WebRtcSessionDescriptionFactory::~WebRtcSessionDescriptionFactory() { // Fail any requests that were asked for before identity generation completed. FailPendingRequests(kFailedDueToSessionShutdown); - // Process all pending notifications in the message queue. If we don't do - // this, requests will linger and not know they succeeded or failed. - rtc::MessageList list; - signaling_thread_->Clear(this, rtc::MQID_ANY, &list); - for (auto& msg : list) { - if (msg.message_id != MSG_USE_CONSTRUCTOR_CERTIFICATE) { - OnMessage(&msg); - } else { - // Skip MSG_USE_CONSTRUCTOR_CERTIFICATE because we don't want to trigger - // SetIdentity-related callbacks in the destructor. This can be a problem - // when WebRtcSession listens to the callback but it was the WebRtcSession - // destructor that caused WebRtcSessionDescriptionFactory's destruction. - // The callback is then ignored, leaking memory allocated by OnMessage for - // MSG_USE_CONSTRUCTOR_CERTIFICATE. - delete msg.pdata; - } + // Process all pending notifications. If we don't do this, requests will + // linger and not know they succeeded or failed. + // All tasks that suppose to run them are protected with weak_factory_ and + // will be cancelled. If we don't protect them, they might trigger after peer + // connection is destroyed, which might be surprising. + while (!callbacks_.empty()) { + std::move(callbacks_.front())(); + callbacks_.pop(); } } @@ -298,37 +267,6 @@ cricket::SecurePolicy WebRtcSessionDescriptionFactory::SdesPolicy() const { return session_desc_factory_.secure(); } -void WebRtcSessionDescriptionFactory::OnMessage(rtc::Message* msg) { - switch (msg->message_id) { - case MSG_CREATE_SESSIONDESCRIPTION_SUCCESS: { - CreateSessionDescriptionMsg* param = - static_cast(msg->pdata); - param->observer->OnSuccess(param->description.release()); - delete param; - break; - } - case MSG_CREATE_SESSIONDESCRIPTION_FAILED: { - CreateSessionDescriptionMsg* param = - static_cast(msg->pdata); - param->observer->OnFailure(std::move(param->error)); - delete param; - break; - } - case MSG_USE_CONSTRUCTOR_CERTIFICATE: { - rtc::ScopedRefMessageData* param = - static_cast*>( - msg->pdata); - RTC_LOG(LS_INFO) << "Using certificate supplied to the constructor."; - SetCertificate(param->data()); - delete param; - break; - } - default: - RTC_DCHECK_NOTREACHED(); - break; - } -} - void WebRtcSessionDescriptionFactory::InternalCreateOffer( CreateSessionDescriptionRequest request) { if (sdp_info_->local_description()) { @@ -348,7 +286,7 @@ void WebRtcSessionDescriptionFactory::InternalCreateOffer( ? sdp_info_->local_description()->description() : nullptr); if (!desc) { - PostCreateSessionDescriptionFailed(request.observer, + PostCreateSessionDescriptionFailed(request.observer.get(), "Failed to initialize the offer."); return; } @@ -375,7 +313,8 @@ void WebRtcSessionDescriptionFactory::InternalCreateOffer( } } } - PostCreateSessionDescriptionSucceeded(request.observer, std::move(offer)); + PostCreateSessionDescriptionSucceeded(request.observer.get(), + std::move(offer)); } void WebRtcSessionDescriptionFactory::InternalCreateAnswer( @@ -409,7 +348,7 @@ void WebRtcSessionDescriptionFactory::InternalCreateAnswer( ? sdp_info_->local_description()->description() : nullptr); if (!desc) { - PostCreateSessionDescriptionFailed(request.observer, + PostCreateSessionDescriptionFailed(request.observer.get(), "Failed to initialize the answer."); return; } @@ -436,7 +375,8 @@ void WebRtcSessionDescriptionFactory::InternalCreateAnswer( } } } - PostCreateSessionDescriptionSucceeded(request.observer, std::move(answer)); + PostCreateSessionDescriptionSucceeded(request.observer.get(), + std::move(answer)); } void WebRtcSessionDescriptionFactory::FailPendingRequests( @@ -446,7 +386,7 @@ void WebRtcSessionDescriptionFactory::FailPendingRequests( const CreateSessionDescriptionRequest& request = create_session_description_requests_.front(); PostCreateSessionDescriptionFailed( - request.observer, + request.observer.get(), ((request.type == CreateSessionDescriptionRequest::kOffer) ? "CreateOffer" : "CreateAnswer") + @@ -458,21 +398,39 @@ void WebRtcSessionDescriptionFactory::FailPendingRequests( void WebRtcSessionDescriptionFactory::PostCreateSessionDescriptionFailed( CreateSessionDescriptionObserver* observer, const std::string& error) { - CreateSessionDescriptionMsg* msg = new CreateSessionDescriptionMsg( - observer, RTCError(RTCErrorType::INTERNAL_ERROR, std::string(error))); - signaling_thread_->Post(RTC_FROM_HERE, this, - MSG_CREATE_SESSIONDESCRIPTION_FAILED, msg); + Post([observer = + rtc::scoped_refptr(observer), + error]() mutable { + observer->OnFailure( + RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error))); + }); RTC_LOG(LS_ERROR) << "Create SDP failed: " << error; } void WebRtcSessionDescriptionFactory::PostCreateSessionDescriptionSucceeded( CreateSessionDescriptionObserver* observer, std::unique_ptr description) { - CreateSessionDescriptionMsg* msg = - new CreateSessionDescriptionMsg(observer, RTCError::OK()); - msg->description = std::move(description); - signaling_thread_->Post(RTC_FROM_HERE, this, - MSG_CREATE_SESSIONDESCRIPTION_SUCCESS, msg); + Post([observer = + rtc::scoped_refptr(observer), + description = std::move(description)]() mutable { + observer->OnSuccess(description.release()); + }); +} + +void WebRtcSessionDescriptionFactory::Post( + absl::AnyInvocable callback) { + RTC_DCHECK_RUN_ON(signaling_thread_); + callbacks_.push(std::move(callback)); + signaling_thread_->PostTask([weak_ptr = weak_factory_.GetWeakPtr()] { + if (weak_ptr) { + auto& callbacks = weak_ptr->callbacks_; + // Callbacks are pushed from the same thread, thus this task should + // corresond to the first entry in the queue. + RTC_DCHECK(!callbacks.empty()); + std::move(callbacks.front())(); + callbacks.pop(); + } + }); } void WebRtcSessionDescriptionFactory::OnCertificateRequestFailed() { @@ -485,7 +443,7 @@ void WebRtcSessionDescriptionFactory::OnCertificateRequestFailed() { } void WebRtcSessionDescriptionFactory::SetCertificate( - const rtc::scoped_refptr& certificate) { + rtc::scoped_refptr certificate) { RTC_DCHECK(certificate); RTC_LOG(LS_VERBOSE) << "Setting new certificate."; @@ -493,7 +451,7 @@ void WebRtcSessionDescriptionFactory::SetCertificate( on_certificate_ready_(certificate); - transport_desc_factory_.set_certificate(certificate); + transport_desc_factory_.set_certificate(std::move(certificate)); transport_desc_factory_.set_secure(cricket::SEC_ENABLED); while (!create_session_description_requests_.empty()) { diff --git a/TMessagesProj/jni/voip/webrtc/pc/webrtc_session_description_factory.h b/TMessagesProj/jni/voip/webrtc/pc/webrtc_session_description_factory.h index 8e80fb556d..122a720162 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/webrtc_session_description_factory.h +++ b/TMessagesProj/jni/voip/webrtc/pc/webrtc_session_description_factory.h @@ -18,79 +18,48 @@ #include #include +#include "absl/functional/any_invocable.h" #include "api/jsep.h" #include "api/peer_connection_interface.h" #include "api/scoped_refptr.h" +#include "api/task_queue/task_queue_base.h" #include "p2p/base/transport_description.h" #include "p2p/base/transport_description_factory.h" -#include "pc/channel_manager.h" #include "pc/media_session.h" #include "pc/sdp_state_provider.h" -#include "rtc_base/constructor_magic.h" -#include "rtc_base/message_handler.h" #include "rtc_base/rtc_certificate.h" #include "rtc_base/rtc_certificate_generator.h" -#include "rtc_base/third_party/sigslot/sigslot.h" -#include "rtc_base/thread.h" -#include "rtc_base/thread_message.h" #include "rtc_base/unique_id_generator.h" +#include "rtc_base/weak_ptr.h" namespace webrtc { - -// DTLS certificate request callback class. -class WebRtcCertificateGeneratorCallback - : public rtc::RTCCertificateGeneratorCallback { - public: - // `rtc::RTCCertificateGeneratorCallback` overrides. - void OnSuccess( - const rtc::scoped_refptr& certificate) override; - void OnFailure() override; - - sigslot::signal0<> SignalRequestFailed; - sigslot::signal1&> - SignalCertificateReady; -}; - -struct CreateSessionDescriptionRequest { - enum Type { - kOffer, - kAnswer, - }; - - CreateSessionDescriptionRequest(Type type, - CreateSessionDescriptionObserver* observer, - const cricket::MediaSessionOptions& options) - : type(type), observer(observer), options(options) {} - - Type type; - rtc::scoped_refptr observer; - cricket::MediaSessionOptions options; -}; - // This class is used to create offer/answer session description. Certificates // for WebRtcSession/DTLS are either supplied at construction or generated // asynchronously. It queues the create offer/answer request until the // certificate generation has completed, i.e. when OnCertificateRequestFailed or // OnCertificateReady is called. -class WebRtcSessionDescriptionFactory : public rtc::MessageHandler, - public sigslot::has_slots<> { +class WebRtcSessionDescriptionFactory { public: // Can specify either a `cert_generator` or `certificate` to enable DTLS. If // a certificate generator is given, starts generating the certificate // asynchronously. If a certificate is given, will use that for identifying // over DTLS. If neither is specified, DTLS is disabled. WebRtcSessionDescriptionFactory( - rtc::Thread* signaling_thread, - cricket::ChannelManager* channel_manager, + ConnectionContext* context, const SdpStateProvider* sdp_info, const std::string& session_id, bool dtls_enabled, std::unique_ptr cert_generator, - const rtc::scoped_refptr& certificate, - rtc::UniqueRandomIdGenerator* ssrc_generator, + rtc::scoped_refptr certificate, std::function&)> - on_certificate_ready); - virtual ~WebRtcSessionDescriptionFactory(); + on_certificate_ready, + const FieldTrialsView& field_trials); + ~WebRtcSessionDescriptionFactory(); + + WebRtcSessionDescriptionFactory(const WebRtcSessionDescriptionFactory&) = + delete; + WebRtcSessionDescriptionFactory& operator=( + const WebRtcSessionDescriptionFactory&) = delete; static void CopyCandidatesFromSessionDescription( const SessionDescriptionInterface* source_desc, @@ -128,8 +97,21 @@ class WebRtcSessionDescriptionFactory : public rtc::MessageHandler, CERTIFICATE_FAILED, }; - // MessageHandler implementation. - virtual void OnMessage(rtc::Message* msg); + struct CreateSessionDescriptionRequest { + enum Type { + kOffer, + kAnswer, + }; + + CreateSessionDescriptionRequest(Type type, + CreateSessionDescriptionObserver* observer, + const cricket::MediaSessionOptions& options) + : type(type), observer(observer), options(options) {} + + Type type; + rtc::scoped_refptr observer; + cricket::MediaSessionOptions options; + }; void InternalCreateOffer(CreateSessionDescriptionRequest request); void InternalCreateAnswer(CreateSessionDescriptionRequest request); @@ -141,14 +123,16 @@ class WebRtcSessionDescriptionFactory : public rtc::MessageHandler, void PostCreateSessionDescriptionSucceeded( CreateSessionDescriptionObserver* observer, std::unique_ptr description); + // Posts `callback` to `signaling_thread_`, and ensures it will be called no + // later than in the destructor. + void Post(absl::AnyInvocable callback); void OnCertificateRequestFailed(); - void SetCertificate( - const rtc::scoped_refptr& certificate); + void SetCertificate(rtc::scoped_refptr certificate); std::queue create_session_description_requests_; - rtc::Thread* const signaling_thread_; + TaskQueueBase* const signaling_thread_; cricket::TransportDescriptionFactory transport_desc_factory_; cricket::MediaSessionDescriptionFactory session_desc_factory_; uint64_t session_version_; @@ -156,11 +140,11 @@ class WebRtcSessionDescriptionFactory : public rtc::MessageHandler, const SdpStateProvider* sdp_info_; const std::string session_id_; CertificateRequestState certificate_request_state_; + std::queue> callbacks_; std::function&)> on_certificate_ready_; - - RTC_DISALLOW_COPY_AND_ASSIGN(WebRtcSessionDescriptionFactory); + rtc::WeakPtrFactory weak_factory_{this}; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/async_invoker.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/async_invoker.cc deleted file mode 100644 index 61c153499f..0000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/async_invoker.cc +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright 2014 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "rtc_base/async_invoker.h" - -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" - -namespace rtc { - -DEPRECATED_AsyncInvoker::DEPRECATED_AsyncInvoker() - : pending_invocations_(0), - invocation_complete_(make_ref_counted()), - destroying_(false) {} - -DEPRECATED_AsyncInvoker::~DEPRECATED_AsyncInvoker() { - destroying_.store(true, std::memory_order_relaxed); - // Messages for this need to be cleared *before* our destructor is complete. - ThreadManager::Clear(this); - // And we need to wait for any invocations that are still in progress on - // other threads. Using memory_order_acquire for synchronization with - // AsyncClosure destructors. - while (pending_invocations_.load(std::memory_order_acquire) > 0) { - // If the destructor was called while AsyncInvoke was being called by - // another thread, WITHIN an AsyncInvoked functor, it may do another - // Thread::Post even after we called ThreadManager::Clear(this). So - // we need to keep calling Clear to discard these posts. - Thread::Current()->Clear(this); - invocation_complete_->Wait(Event::kForever); - } -} - -void DEPRECATED_AsyncInvoker::OnMessage(Message* msg) { - // Get the AsyncClosure shared ptr from this message's data. - ScopedMessageData* data = - static_cast*>(msg->pdata); - // Execute the closure and trigger the return message if needed. - data->data().Execute(); - delete data; -} - -void DEPRECATED_AsyncInvoker::Clear() { - ThreadManager::Clear(this); -} - -void DEPRECATED_AsyncInvoker::DoInvoke(const Location& posted_from, - Thread* thread, - std::unique_ptr closure, - uint32_t id) { - if (destroying_.load(std::memory_order_relaxed)) { - // Note that this may be expected, if the application is AsyncInvoking - // tasks that AsyncInvoke other tasks. But otherwise it indicates a race - // between a thread destroying the AsyncInvoker and a thread still trying - // to use it. - RTC_LOG(LS_WARNING) << "Tried to invoke while destroying the invoker."; - return; - } - thread->Post(posted_from, this, id, - new ScopedMessageData(std::move(closure))); -} - -void DEPRECATED_AsyncInvoker::DoInvokeDelayed( - const Location& posted_from, - Thread* thread, - std::unique_ptr closure, - uint32_t delay_ms, - uint32_t id) { - if (destroying_.load(std::memory_order_relaxed)) { - // See above comment. - RTC_LOG(LS_WARNING) << "Tried to invoke while destroying the invoker."; - return; - } - thread->PostDelayed(posted_from, delay_ms, this, id, - new ScopedMessageData(std::move(closure))); -} - -AsyncClosure::AsyncClosure(DEPRECATED_AsyncInvoker* invoker) - : invoker_(invoker), invocation_complete_(invoker_->invocation_complete_) { - invoker_->pending_invocations_.fetch_add(1, std::memory_order_relaxed); -} - -AsyncClosure::~AsyncClosure() { - // Using memory_order_release for synchronization with the AsyncInvoker - // destructor. - invoker_->pending_invocations_.fetch_sub(1, std::memory_order_release); - - // After `pending_invocations_` is decremented, we may need to signal - // `invocation_complete_` in case the AsyncInvoker is being destroyed and - // waiting for pending tasks to complete. - // - // It's also possible that the destructor finishes before "Set()" is called, - // which is safe because the event is reference counted (and in a thread-safe - // way). - invocation_complete_->Set(); -} - -} // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/async_invoker.h b/TMessagesProj/jni/voip/webrtc/rtc_base/async_invoker.h deleted file mode 100644 index e5a3c15cac..0000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/async_invoker.h +++ /dev/null @@ -1,168 +0,0 @@ -/* - * Copyright 2014 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_BASE_ASYNC_INVOKER_H_ -#define RTC_BASE_ASYNC_INVOKER_H_ - -#include -#include -#include - -#include "absl/base/attributes.h" -#include "api/scoped_refptr.h" -#include "rtc_base/async_invoker_inl.h" -#include "rtc_base/constructor_magic.h" -#include "rtc_base/event.h" -#include "rtc_base/ref_counted_object.h" -#include "rtc_base/third_party/sigslot/sigslot.h" -#include "rtc_base/thread.h" - -namespace rtc { - -// DEPRECATED - do not use. -// -// Invokes function objects (aka functors) asynchronously on a Thread, and -// owns the lifetime of calls (ie, when this object is destroyed, calls in -// flight are cancelled). AsyncInvoker can optionally execute a user-specified -// function when the asynchronous call is complete, or operates in -// fire-and-forget mode otherwise. -// -// AsyncInvoker does not own the thread it calls functors on. -// -// A note about async calls and object lifetimes: users should -// be mindful of object lifetimes when calling functions asynchronously and -// ensure objects used by the function _cannot_ be deleted between the -// invocation and execution of the functor. AsyncInvoker is designed to -// help: any calls in flight will be cancelled when the AsyncInvoker used to -// make the call is destructed, and any calls executing will be allowed to -// complete before AsyncInvoker destructs. -// -// The easiest way to ensure lifetimes are handled correctly is to create a -// class that owns the Thread and AsyncInvoker objects, and then call its -// methods asynchronously as needed. -// -// Example: -// class MyClass { -// public: -// void FireAsyncTaskWithResult(Thread* thread, int x) { -// // Specify a callback to get the result upon completion. -// invoker_.AsyncInvoke(RTC_FROM_HERE, -// thread, Bind(&MyClass::AsyncTaskWithResult, this, x), -// &MyClass::OnTaskComplete, this); -// } -// void FireAnotherAsyncTask(Thread* thread) { -// // No callback specified means fire-and-forget. -// invoker_.AsyncInvoke(RTC_FROM_HERE, -// thread, Bind(&MyClass::AnotherAsyncTask, this)); -// -// private: -// int AsyncTaskWithResult(int x) { -// // Some long running process... -// return x * x; -// } -// void AnotherAsyncTask() { -// // Some other long running process... -// } -// void OnTaskComplete(int result) { result_ = result; } -// -// AsyncInvoker invoker_; -// int result_; -// }; -// -// More details about threading: -// - It's safe to construct/destruct AsyncInvoker on different threads. -// - It's safe to call AsyncInvoke from different threads. -// - It's safe to call AsyncInvoke recursively from *within* a functor that's -// being AsyncInvoked. -// - However, it's *not* safe to call AsyncInvoke from *outside* a functor -// that's being AsyncInvoked while the AsyncInvoker is being destroyed on -// another thread. This is just inherently unsafe and there's no way to -// prevent that. So, the user of this class should ensure that the start of -// each "chain" of invocations is synchronized somehow with the AsyncInvoker's -// destruction. This can be done by starting each chain of invocations on the -// same thread on which it will be destroyed, or by using some other -// synchronization method. -class DEPRECATED_AsyncInvoker : public MessageHandlerAutoCleanup { - public: - DEPRECATED_AsyncInvoker(); - ~DEPRECATED_AsyncInvoker() override; - - // Call `functor` asynchronously on `thread`, with no callback upon - // completion. Returns immediately. - template - void AsyncInvoke(const Location& posted_from, - Thread* thread, - FunctorT&& functor, - uint32_t id = 0) { - std::unique_ptr closure( - new FireAndForgetAsyncClosure( - this, std::forward(functor))); - DoInvoke(posted_from, thread, std::move(closure), id); - } - - // Call `functor` asynchronously on `thread` with `delay_ms`, with no callback - // upon completion. Returns immediately. - template - void AsyncInvokeDelayed(const Location& posted_from, - Thread* thread, - FunctorT&& functor, - uint32_t delay_ms, - uint32_t id = 0) { - std::unique_ptr closure( - new FireAndForgetAsyncClosure( - this, std::forward(functor))); - DoInvokeDelayed(posted_from, thread, std::move(closure), delay_ms, id); - } - - // Cancels any outstanding calls we own that are pending on any thread, and - // which have not yet started to execute. This does not wait for any calls - // that have already started executing to complete. - void Clear(); - - private: - void OnMessage(Message* msg) override; - void DoInvoke(const Location& posted_from, - Thread* thread, - std::unique_ptr closure, - uint32_t id); - void DoInvokeDelayed(const Location& posted_from, - Thread* thread, - std::unique_ptr closure, - uint32_t delay_ms, - uint32_t id); - - // Used to keep track of how many invocations (AsyncClosures) are still - // alive, so that the destructor can wait for them to finish, as described in - // the class documentation. - // - // TODO(deadbeef): Using a raw std::atomic like this is prone to error and - // difficult to maintain. We should try to wrap this functionality in a - // separate class to reduce the chance of errors being introduced in the - // future. - std::atomic pending_invocations_; - - // Reference counted so that if the destructor finishes before an - // AsyncClosure's destructor that's about to call - // "invocation_complete_->Set()", it's not dereferenced after being destroyed. - rtc::Ref::Ptr invocation_complete_; - - // This flag is used to ensure that if an application AsyncInvokes tasks that - // recursively AsyncInvoke other tasks ad infinitum, the cycle eventually - // terminates. - std::atomic destroying_; - - friend class AsyncClosure; - - RTC_DISALLOW_COPY_AND_ASSIGN(DEPRECATED_AsyncInvoker); -}; - -} // namespace rtc - -#endif // RTC_BASE_ASYNC_INVOKER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/async_invoker_inl.h b/TMessagesProj/jni/voip/webrtc/rtc_base/async_invoker_inl.h deleted file mode 100644 index c2b6413519..0000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/async_invoker_inl.h +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright 2014 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_BASE_ASYNC_INVOKER_INL_H_ -#define RTC_BASE_ASYNC_INVOKER_INL_H_ - -#include "api/scoped_refptr.h" -#include "rtc_base/event.h" -#include "rtc_base/message_handler.h" -#include "rtc_base/ref_counted_object.h" -#include "rtc_base/third_party/sigslot/sigslot.h" -#include "rtc_base/thread.h" -#include "rtc_base/thread_annotations.h" - -namespace rtc { - -class DEPRECATED_AsyncInvoker; - -// Helper class for DEPRECATED_AsyncInvoker. Runs a task and triggers a callback -// on the calling thread if necessary. -class AsyncClosure { - public: - explicit AsyncClosure(DEPRECATED_AsyncInvoker* invoker); - virtual ~AsyncClosure(); - // Runs the asynchronous task, and triggers a callback to the calling - // thread if needed. Should be called from the target thread. - virtual void Execute() = 0; - - protected: - DEPRECATED_AsyncInvoker* invoker_; - // Reference counted so that if the AsyncInvoker destructor finishes before - // an AsyncClosure's destructor that's about to call - // "invocation_complete_->Set()", it's not dereferenced after being - // destroyed. - rtc::Ref::Ptr invocation_complete_; -}; - -// Simple closure that doesn't trigger a callback for the calling thread. -template -class FireAndForgetAsyncClosure : public AsyncClosure { - public: - explicit FireAndForgetAsyncClosure(DEPRECATED_AsyncInvoker* invoker, - FunctorT&& functor) - : AsyncClosure(invoker), functor_(std::forward(functor)) {} - virtual void Execute() { functor_(); } - - private: - typename std::decay::type functor_; -}; - -} // namespace rtc - -#endif // RTC_BASE_ASYNC_INVOKER_INL_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/async_packet_socket.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/async_packet_socket.cc index d5435d71d0..1ce0d3b122 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/async_packet_socket.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/async_packet_socket.cc @@ -24,10 +24,24 @@ PacketOptions::PacketOptions(DiffServCodePoint dscp) : dscp(dscp) {} PacketOptions::PacketOptions(const PacketOptions& other) = default; PacketOptions::~PacketOptions() = default; -AsyncPacketSocket::AsyncPacketSocket() = default; +AsyncPacketSocket::AsyncPacketSocket() { + network_checker_.Detach(); +} AsyncPacketSocket::~AsyncPacketSocket() = default; +void AsyncPacketSocket::SubscribeClose( + const void* removal_tag, + std::function callback) { + RTC_DCHECK_RUN_ON(&network_checker_); + on_close_.AddReceiver(removal_tag, std::move(callback)); +} + +void AsyncPacketSocket::UnsubscribeClose(const void* removal_tag) { + RTC_DCHECK_RUN_ON(&network_checker_); + on_close_.RemoveReceivers(removal_tag); +} + void CopySocketInformationToPacketInfo(size_t packet_size_bytes, const AsyncPacketSocket& socket_from, bool is_connectionless, diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/async_packet_socket.h b/TMessagesProj/jni/voip/webrtc/rtc_base/async_packet_socket.h index b5fcf8edb7..aa31e25eab 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/async_packet_socket.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/async_packet_socket.h @@ -13,10 +13,12 @@ #include -#include "rtc_base/constructor_magic.h" +#include "api/sequence_checker.h" +#include "rtc_base/callback_list.h" #include "rtc_base/dscp.h" #include "rtc_base/network/sent_packet.h" #include "rtc_base/socket.h" +#include "rtc_base/system/no_unique_address.h" #include "rtc_base/system/rtc_export.h" #include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/time_utils.h" @@ -69,6 +71,9 @@ class RTC_EXPORT AsyncPacketSocket : public sigslot::has_slots<> { AsyncPacketSocket(); ~AsyncPacketSocket() override; + AsyncPacketSocket(const AsyncPacketSocket&) = delete; + AsyncPacketSocket& operator=(const AsyncPacketSocket&) = delete; + // Returns current local address. Address may be set to null if the // socket is not bound yet (GetState() returns STATE_BINDING). virtual SocketAddress GetLocalAddress() const = 0; @@ -98,6 +103,11 @@ class RTC_EXPORT AsyncPacketSocket : public sigslot::has_slots<> { virtual int GetError() const = 0; virtual void SetError(int error) = 0; + // Register a callback to be called when the socket is closed. + void SubscribeClose(const void* removal_tag, + std::function callback); + void UnsubscribeClose(const void* removal_tag); + // Emitted each time a packet is read. Used only for UDP and // connected TCP sockets. sigslot::signal5 { // CONNECTING to CONNECTED. sigslot::signal1 SignalConnect; - // Emitted for client TCP sockets when state is changed from - // CONNECTED to CLOSED. - sigslot::signal2 SignalClose; + void NotifyClosedForTest(int err) { NotifyClosed(err); } + + protected: + // TODO(bugs.webrtc.org/11943): Remove after updating downstream code. + void SignalClose(AsyncPacketSocket* s, int err) { + RTC_DCHECK_EQ(s, this); + NotifyClosed(err); + } + + void NotifyClosed(int err) { + RTC_DCHECK_RUN_ON(&network_checker_); + on_close_.Send(this, err); + } + + RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker network_checker_; private: - RTC_DISALLOW_COPY_AND_ASSIGN(AsyncPacketSocket); + webrtc::CallbackList on_close_ + RTC_GUARDED_BY(&network_checker_); }; // Listen socket, producing an AsyncPacketSocket when a peer connects. diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/async_resolver.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/async_resolver.cc index ad1598f214..7c1a6fe78d 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/async_resolver.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/async_resolver.cc @@ -14,6 +14,7 @@ #include #include +#include "absl/strings/string_view.h" #include "api/ref_counted_base.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" @@ -37,7 +38,6 @@ #include "rtc_base/logging.h" #include "rtc_base/platform_thread.h" #include "rtc_base/task_queue.h" -#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/third_party/sigslot/sigslot.h" // for signal_with_thread... #if defined(WEBRTC_MAC) || defined(WEBRTC_IOS) @@ -50,23 +50,23 @@ namespace rtc { namespace { void GlobalGcdRunTask(void* context) { - std::unique_ptr task( - static_cast(context)); - task->Run(); + std::unique_ptr> task( + static_cast*>(context)); + std::move (*task)(); } // Post a task into the system-defined global concurrent queue. -void PostTaskToGlobalQueue(std::unique_ptr task) { +void PostTaskToGlobalQueue( + std::unique_ptr> task) { dispatch_queue_global_t global_queue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0); - webrtc::QueuedTask* context = task.release(); - dispatch_async_f(global_queue, context, &GlobalGcdRunTask); + dispatch_async_f(global_queue, task.release(), &GlobalGcdRunTask); } } // namespace #endif -int ResolveHostname(const std::string& hostname, +int ResolveHostname(absl::string_view hostname, int family, std::vector* addresses) { #ifdef __native_client__ @@ -99,7 +99,8 @@ int ResolveHostname(const std::string& hostname, // https://android.googlesource.com/platform/bionic/+/ // 7e0bfb511e85834d7c6cb9631206b62f82701d60/libc/netbsd/net/getaddrinfo.c#1657 hints.ai_flags = AI_ADDRCONFIG; - int ret = getaddrinfo(hostname.c_str(), nullptr, &hints, &result); + int ret = + getaddrinfo(std::string(hostname).c_str(), nullptr, &hints, &result); if (ret != 0) { return ret; } @@ -144,18 +145,21 @@ void RunResolution(void* obj) { } void AsyncResolver::Start(const SocketAddress& addr) { + Start(addr, addr.family()); +} + +void AsyncResolver::Start(const SocketAddress& addr, int family) { RTC_DCHECK_RUN_ON(&sequence_checker_); RTC_DCHECK(!destroy_called_); addr_ = addr; auto thread_function = - [this, addr, caller_task_queue = webrtc::TaskQueueBase::Current(), + [this, addr, family, caller_task_queue = webrtc::TaskQueueBase::Current(), state = state_] { std::vector addresses; - int error = - ResolveHostname(addr.hostname().c_str(), addr.family(), &addresses); + int error = ResolveHostname(addr.hostname(), family, &addresses); webrtc::MutexLock lock(&state->mutex); if (state->status == State::Status::kLive) { - caller_task_queue->PostTask(webrtc::ToQueuedTask( + caller_task_queue->PostTask( [this, error, addresses = std::move(addresses), state] { bool live; { @@ -168,11 +172,12 @@ void AsyncResolver::Start(const SocketAddress& addr) { RTC_DCHECK_RUN_ON(&sequence_checker_); ResolveDone(std::move(addresses), error); } - })); + }); } }; #if defined(WEBRTC_MAC) || defined(WEBRTC_IOS) - PostTaskToGlobalQueue(webrtc::ToQueuedTask(std::move(thread_function))); + PostTaskToGlobalQueue( + std::make_unique>(thread_function)); #else PlatformThread::SpawnDetached(std::move(thread_function), "AsyncResolver"); #endif diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/async_resolver.h b/TMessagesProj/jni/voip/webrtc/rtc_base/async_resolver.h index 0c053eed81..46be43860e 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/async_resolver.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/async_resolver.h @@ -20,14 +20,13 @@ #include #include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "rtc_base/async_resolver_interface.h" #include "rtc_base/event.h" #include "rtc_base/ip_address.h" -#include "rtc_base/ref_counted_object.h" #include "rtc_base/socket_address.h" #include "rtc_base/system/no_unique_address.h" #include "rtc_base/system/rtc_export.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" @@ -46,6 +45,7 @@ class RTC_EXPORT AsyncResolver : public AsyncResolverInterface { ~AsyncResolver() override; void Start(const SocketAddress& addr) override; + void Start(const SocketAddress& addr, int family) override; bool GetResolvedAddress(int family, SocketAddress* addr) const override; int GetError() const override; void Destroy(bool wait) override; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/async_resolver_interface.h b/TMessagesProj/jni/voip/webrtc/rtc_base/async_resolver_interface.h index 6916ea4860..829dc7e23b 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/async_resolver_interface.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/async_resolver_interface.h @@ -11,6 +11,7 @@ #ifndef RTC_BASE_ASYNC_RESOLVER_INTERFACE_H_ #define RTC_BASE_ASYNC_RESOLVER_INTERFACE_H_ +#include "rtc_base/checks.h" #include "rtc_base/socket_address.h" #include "rtc_base/system/rtc_export.h" #include "rtc_base/third_party/sigslot/sigslot.h" @@ -25,6 +26,8 @@ class RTC_EXPORT AsyncResolverInterface { // Start address resolution of the hostname in `addr`. virtual void Start(const SocketAddress& addr) = 0; + // Start address resolution of the hostname in `addr` matching `family`. + virtual void Start(const SocketAddress& addr, int family) = 0; // Returns true iff the address from `Start` was successfully resolved. // If the address was successfully resolved, sets `addr` to a copy of the // address from `Start` with the IP address set to the top most resolved diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/async_tcp_socket.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/async_tcp_socket.cc index c2480755b4..d29eafddb9 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/async_tcp_socket.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/async_tcp_socket.cc @@ -68,7 +68,6 @@ AsyncTCPSocketBase::AsyncTCPSocketBase(Socket* socket, max_outsize_(max_packet_size) { inbuf_.EnsureCapacity(kMinimumRecvSize); - RTC_DCHECK(socket_.get() != nullptr); socket_->SignalConnectEvent.connect(this, &AsyncTCPSocketBase::OnConnectEvent); socket_->SignalReadEvent.connect(this, &AsyncTCPSocketBase::OnReadEvent); @@ -237,7 +236,7 @@ void AsyncTCPSocketBase::OnWriteEvent(Socket* socket) { } void AsyncTCPSocketBase::OnCloseEvent(Socket* socket, int error) { - SignalClose(this, error); + NotifyClosed(error); } // AsyncTCPSocket diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/async_tcp_socket.h b/TMessagesProj/jni/voip/webrtc/rtc_base/async_tcp_socket.h index ca61b54d78..541080fba7 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/async_tcp_socket.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/async_tcp_socket.h @@ -17,7 +17,6 @@ #include "rtc_base/async_packet_socket.h" #include "rtc_base/buffer.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/socket.h" #include "rtc_base/socket_address.h" @@ -31,6 +30,9 @@ class AsyncTCPSocketBase : public AsyncPacketSocket { AsyncTCPSocketBase(Socket* socket, size_t max_packet_size); ~AsyncTCPSocketBase() override; + AsyncTCPSocketBase(const AsyncTCPSocketBase&) = delete; + AsyncTCPSocketBase& operator=(const AsyncTCPSocketBase&) = delete; + // Pure virtual methods to send and recv data. int Send(const void* pv, size_t cb, @@ -78,8 +80,6 @@ class AsyncTCPSocketBase : public AsyncPacketSocket { Buffer outbuf_; size_t max_insize_; size_t max_outsize_; - - RTC_DISALLOW_COPY_AND_ASSIGN(AsyncTCPSocketBase); }; class AsyncTCPSocket : public AsyncTCPSocketBase { @@ -93,13 +93,13 @@ class AsyncTCPSocket : public AsyncTCPSocketBase { explicit AsyncTCPSocket(Socket* socket); ~AsyncTCPSocket() override {} + AsyncTCPSocket(const AsyncTCPSocket&) = delete; + AsyncTCPSocket& operator=(const AsyncTCPSocket&) = delete; + int Send(const void* pv, size_t cb, const rtc::PacketOptions& options) override; void ProcessInput(char* data, size_t* len) override; - - private: - RTC_DISALLOW_COPY_AND_ASSIGN(AsyncTCPSocket); }; class AsyncTcpListenSocket : public AsyncListenSocket { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/atomic_ops.h b/TMessagesProj/jni/voip/webrtc/rtc_base/atomic_ops.h deleted file mode 100644 index 18a24a8e2e..0000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/atomic_ops.h +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright 2011 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_BASE_ATOMIC_OPS_H_ -#define RTC_BASE_ATOMIC_OPS_H_ - -#if defined(WEBRTC_WIN) -// clang-format off -// clang formating would change include order. - -// Include winsock2.h before including to maintain consistency with -// win32.h. To include win32.h directly, it must be broken out into its own -// build target. -#include -#include -// clang-format on -#endif // defined(WEBRTC_WIN) - -namespace rtc { -class AtomicOps { - public: -#if defined(WEBRTC_WIN) - // Assumes sizeof(int) == sizeof(LONG), which it is on Win32 and Win64. - static int Increment(volatile int* i) { - return ::InterlockedIncrement(reinterpret_cast(i)); - } - static int Decrement(volatile int* i) { - return ::InterlockedDecrement(reinterpret_cast(i)); - } - static int AcquireLoad(volatile const int* i) { return *i; } - static void ReleaseStore(volatile int* i, int value) { *i = value; } - static int CompareAndSwap(volatile int* i, int old_value, int new_value) { - return ::InterlockedCompareExchange(reinterpret_cast(i), - new_value, old_value); - } - // Pointer variants. - template - static T* AcquireLoadPtr(T* volatile* ptr) { - return *ptr; - } - template - static T* CompareAndSwapPtr(T* volatile* ptr, T* old_value, T* new_value) { - return static_cast(::InterlockedCompareExchangePointer( - reinterpret_cast(ptr), new_value, old_value)); - } -#else - static int Increment(volatile int* i) { return __sync_add_and_fetch(i, 1); } - static int Decrement(volatile int* i) { return __sync_sub_and_fetch(i, 1); } - static int AcquireLoad(volatile const int* i) { - return __atomic_load_n(i, __ATOMIC_ACQUIRE); - } - static void ReleaseStore(volatile int* i, int value) { - __atomic_store_n(i, value, __ATOMIC_RELEASE); - } - static int CompareAndSwap(volatile int* i, int old_value, int new_value) { - return __sync_val_compare_and_swap(i, old_value, new_value); - } - // Pointer variants. - template - static T* AcquireLoadPtr(T* volatile* ptr) { - return __atomic_load_n(ptr, __ATOMIC_ACQUIRE); - } - template - static T* CompareAndSwapPtr(T* volatile* ptr, T* old_value, T* new_value) { - return __sync_val_compare_and_swap(ptr, old_value, new_value); - } -#endif -}; - -} // namespace rtc - -#endif // RTC_BASE_ATOMIC_OPS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/bit_buffer.h b/TMessagesProj/jni/voip/webrtc/rtc_base/bit_buffer.h index 2cdac88e26..21e0e9e005 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/bit_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/bit_buffer.h @@ -14,8 +14,6 @@ #include // For size_t. #include // For integer types. -#include "rtc_base/constructor_magic.h" - namespace rtc { // A BitBuffer API for write operations. Supports symmetric write APIs to the @@ -27,6 +25,9 @@ class BitBufferWriter { // Constructs a bit buffer for the writable buffer of `bytes`. BitBufferWriter(uint8_t* bytes, size_t byte_count); + BitBufferWriter(const BitBufferWriter&) = delete; + BitBufferWriter& operator=(const BitBufferWriter&) = delete; + // Gets the current offset, in bytes/bits, from the start of the buffer. The // bit offset is the offset into the current byte, in the range [0,7]. void GetCurrentOffset(size_t* out_byte_offset, size_t* out_bit_offset); @@ -148,8 +149,6 @@ class BitBufferWriter { size_t byte_offset_; // The current offset, in bits, into the current byte. size_t bit_offset_; - - RTC_DISALLOW_COPY_AND_ASSIGN(BitBufferWriter); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/boringssl_certificate.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/boringssl_certificate.cc index 99b2ab3e24..a866224496 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/boringssl_certificate.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/boringssl_certificate.cc @@ -10,6 +10,8 @@ #include "rtc_base/boringssl_certificate.h" +#include "absl/strings/string_view.h" + #if defined(WEBRTC_WIN) // Must be included first before openssl headers. #include "rtc_base/win32.h" // NOLINT @@ -116,7 +118,7 @@ bool AddSHA256SignatureAlgorithm(CBB* cbb, KeyType key_type) { } // Adds an X.509 Common Name to `cbb`. -bool AddCommonName(CBB* cbb, const std::string& common_name) { +bool AddCommonName(CBB* cbb, absl::string_view common_name) { // See RFC 4519. static const uint8_t kCommonName[] = {0x55, 0x04, 0x03}; @@ -138,7 +140,7 @@ bool AddCommonName(CBB* cbb, const std::string& common_name) { !CBB_add_bytes(&type, kCommonName, sizeof(kCommonName)) || !CBB_add_asn1(&attr, &value, CBS_ASN1_UTF8STRING) || !CBB_add_bytes(&value, - reinterpret_cast(common_name.c_str()), + reinterpret_cast(common_name.data()), common_name.size()) || !CBB_flush(cbb)) { return false; @@ -275,7 +277,7 @@ std::unique_ptr BoringSSLCertificate::Generate( } std::unique_ptr BoringSSLCertificate::FromPEMString( - const std::string& pem_string) { + absl::string_view pem_string) { std::string der; if (!SSLIdentity::PemToDer(kPemTypeCertificate, pem_string, &der)) { return nullptr; @@ -340,7 +342,7 @@ bool BoringSSLCertificate::GetSignatureDigestAlgorithm( return false; } -bool BoringSSLCertificate::ComputeDigest(const std::string& algorithm, +bool BoringSSLCertificate::ComputeDigest(absl::string_view algorithm, unsigned char* digest, size_t size, size_t* length) const { @@ -348,7 +350,7 @@ bool BoringSSLCertificate::ComputeDigest(const std::string& algorithm, } bool BoringSSLCertificate::ComputeDigest(const CRYPTO_BUFFER* cert_buffer, - const std::string& algorithm, + absl::string_view algorithm, unsigned char* digest, size_t size, size_t* length) { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/boringssl_certificate.h b/TMessagesProj/jni/voip/webrtc/rtc_base/boringssl_certificate.h index 40a4bd8f38..bd331686b7 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/boringssl_certificate.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/boringssl_certificate.h @@ -18,8 +18,8 @@ #include #include +#include "absl/strings/string_view.h" #include "rtc_base/buffer.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_identity.h" @@ -39,10 +39,13 @@ class BoringSSLCertificate final : public SSLCertificate { OpenSSLKeyPair* key_pair, const SSLIdentityParams& params); static std::unique_ptr FromPEMString( - const std::string& pem_string); + absl::string_view pem_string); ~BoringSSLCertificate() override; + BoringSSLCertificate(const BoringSSLCertificate&) = delete; + BoringSSLCertificate& operator=(const BoringSSLCertificate&) = delete; + std::unique_ptr Clone() const override; CRYPTO_BUFFER* cert_buffer() const { return cert_buffer_.get(); } @@ -53,14 +56,14 @@ class BoringSSLCertificate final : public SSLCertificate { bool operator!=(const BoringSSLCertificate& other) const; // Compute the digest of the certificate given `algorithm`. - bool ComputeDigest(const std::string& algorithm, + bool ComputeDigest(absl::string_view algorithm, unsigned char* digest, size_t size, size_t* length) const override; // Compute the digest of a certificate as a CRYPTO_BUFFER. static bool ComputeDigest(const CRYPTO_BUFFER* cert_buffer, - const std::string& algorithm, + absl::string_view algorithm, unsigned char* digest, size_t size, size_t* length); @@ -72,7 +75,6 @@ class BoringSSLCertificate final : public SSLCertificate { private: // A handle to the DER encoded certificate data. bssl::UniquePtr cert_buffer_; - RTC_DISALLOW_COPY_AND_ASSIGN(BoringSSLCertificate); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/boringssl_identity.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/boringssl_identity.cc index d22c8ce529..a61524a679 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/boringssl_identity.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/boringssl_identity.cc @@ -22,6 +22,7 @@ #include #include "absl/memory/memory.h" +#include "absl/strings/string_view.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" @@ -67,12 +68,12 @@ std::unique_ptr BoringSSLIdentity::CreateInternal( // static std::unique_ptr BoringSSLIdentity::CreateWithExpiration( - const std::string& common_name, + absl::string_view common_name, const KeyParams& key_params, time_t certificate_lifetime) { SSLIdentityParams params; params.key_params = key_params; - params.common_name = common_name; + params.common_name = std::string(common_name); time_t now = time(nullptr); params.not_before = now + kCertificateWindowInSeconds; params.not_after = now + certificate_lifetime; @@ -87,8 +88,8 @@ std::unique_ptr BoringSSLIdentity::CreateForTest( } std::unique_ptr BoringSSLIdentity::CreateFromPEMStrings( - const std::string& private_key, - const std::string& certificate) { + absl::string_view private_key, + absl::string_view certificate) { std::unique_ptr cert( BoringSSLCertificate::FromPEMString(certificate)); if (!cert) { @@ -108,8 +109,8 @@ std::unique_ptr BoringSSLIdentity::CreateFromPEMStrings( } std::unique_ptr BoringSSLIdentity::CreateFromPEMChainStrings( - const std::string& private_key, - const std::string& certificate_chain) { + absl::string_view private_key, + absl::string_view certificate_chain) { bssl::UniquePtr bio( BIO_new_mem_buf(certificate_chain.data(), rtc::dchecked_cast(certificate_chain.size()))); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/boringssl_identity.h b/TMessagesProj/jni/voip/webrtc/rtc_base/boringssl_identity.h index 71b29b486d..ffc8812af2 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/boringssl_identity.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/boringssl_identity.h @@ -17,8 +17,8 @@ #include #include +#include "absl/strings/string_view.h" #include "rtc_base/boringssl_certificate.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/openssl_key_pair.h" #include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_identity.h" @@ -31,19 +31,22 @@ namespace rtc { class BoringSSLIdentity final : public SSLIdentity { public: static std::unique_ptr CreateWithExpiration( - const std::string& common_name, + absl::string_view common_name, const KeyParams& key_params, time_t certificate_lifetime); static std::unique_ptr CreateForTest( const SSLIdentityParams& params); static std::unique_ptr CreateFromPEMStrings( - const std::string& private_key, - const std::string& certificate); + absl::string_view private_key, + absl::string_view certificate); static std::unique_ptr CreateFromPEMChainStrings( - const std::string& private_key, - const std::string& certificate_chain); + absl::string_view private_key, + absl::string_view certificate_chain); ~BoringSSLIdentity() override; + BoringSSLIdentity(const BoringSSLIdentity&) = delete; + BoringSSLIdentity& operator=(const BoringSSLIdentity&) = delete; + const BoringSSLCertificate& certificate() const override; const SSLCertChain& cert_chain() const override; @@ -67,8 +70,6 @@ class BoringSSLIdentity final : public SSLIdentity { std::unique_ptr key_pair_; std::unique_ptr cert_chain_; - - RTC_DISALLOW_COPY_AND_ASSIGN(BoringSSLIdentity); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/buffer.h b/TMessagesProj/jni/voip/webrtc/rtc_base/buffer.h index c9bf2ccebf..6663c687b8 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/buffer.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/buffer.h @@ -19,6 +19,7 @@ #include #include +#include "absl/strings/string_view.h" #include "api/array_view.h" #include "rtc_base/checks.h" #include "rtc_base/type_traits.h" @@ -105,7 +106,10 @@ class BufferT { internal::BufferCompat::value>::type* = nullptr> BufferT(U* data, size_t size, size_t capacity) : BufferT(size, capacity) { static_assert(sizeof(T) == sizeof(U), ""); - std::memcpy(data_.get(), data, size * sizeof(U)); + if (size > 0) { + RTC_DCHECK(data); + std::memcpy(data_.get(), data, size * sizeof(U)); + } } // Construct a buffer from the contents of an array. @@ -117,6 +121,13 @@ class BufferT { ~BufferT() { MaybeZeroCompleteBuffer(); } + // Implicit conversion to absl::string_view if T is compatible with char. + template + operator typename std::enable_if::value, + absl::string_view>::type() const { + return absl::string_view(data(), size()); + } + // Get a pointer to the data. Just .data() will give you a (const) T*, but if // T is a byte-sized integer, you may also use .data() for any other // byte-sized integer U. @@ -259,6 +270,10 @@ class BufferT { typename std::enable_if< internal::BufferCompat::value>::type* = nullptr> void AppendData(const U* data, size_t size) { + if (size == 0) { + return; + } + RTC_DCHECK(data); RTC_DCHECK(IsConsistent()); const size_t new_size = size_ + size; EnsureCapacityWithHeadroom(new_size, true); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/buffer_queue.h b/TMessagesProj/jni/voip/webrtc/rtc_base/buffer_queue.h index ee435f4694..b018e160a1 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/buffer_queue.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/buffer_queue.h @@ -18,7 +18,6 @@ #include "api/sequence_checker.h" #include "rtc_base/buffer.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/system/no_unique_address.h" #include "rtc_base/thread_annotations.h" @@ -30,6 +29,9 @@ class BufferQueue final { BufferQueue(size_t capacity, size_t default_size); ~BufferQueue(); + BufferQueue(const BufferQueue&) = delete; + BufferQueue& operator=(const BufferQueue&) = delete; + // Return number of queued buffers. size_t size() const; @@ -61,8 +63,6 @@ class BufferQueue final { const size_t default_size_; std::deque queue_ RTC_GUARDED_BY(sequence_checker_); std::vector free_list_ RTC_GUARDED_BY(sequence_checker_); - - RTC_DISALLOW_COPY_AND_ASSIGN(BufferQueue); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/byte_buffer.h b/TMessagesProj/jni/voip/webrtc/rtc_base/byte_buffer.h index fc383f0a33..9bcbb838aa 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/byte_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/byte_buffer.h @@ -16,9 +16,9 @@ #include +#include "absl/strings/string_view.h" #include "rtc_base/buffer.h" #include "rtc_base/byte_order.h" -#include "rtc_base/constructor_magic.h" // Reads/Writes from/to buffer using network byte order (big endian) namespace rtc { @@ -29,6 +29,9 @@ class ByteBufferWriterT { ByteBufferWriterT() { Construct(nullptr, kDefaultCapacity); } ByteBufferWriterT(const char* bytes, size_t len) { Construct(bytes, len); } + ByteBufferWriterT(const ByteBufferWriterT&) = delete; + ByteBufferWriterT& operator=(const ByteBufferWriterT&) = delete; + const char* Data() const { return buffer_.data(); } size_t Length() const { return buffer_.size(); } size_t Capacity() const { return buffer_.capacity(); } @@ -70,8 +73,8 @@ class ByteBufferWriterT { char last_byte = static_cast(val); WriteBytes(&last_byte, 1); } - void WriteString(const std::string& val) { - WriteBytes(val.c_str(), val.size()); + void WriteString(absl::string_view val) { + WriteBytes(val.data(), val.size()); } void WriteBytes(const char* val, size_t len) { buffer_.AppendData(val, len); } @@ -104,7 +107,6 @@ class ByteBufferWriterT { // There are sensible ways to define these, but they aren't needed in our code // base. - RTC_DISALLOW_COPY_AND_ASSIGN(ByteBufferWriterT); }; class ByteBufferWriter : public ByteBufferWriterT> { @@ -112,8 +114,8 @@ class ByteBufferWriter : public ByteBufferWriterT> { ByteBufferWriter(); ByteBufferWriter(const char* bytes, size_t len); - private: - RTC_DISALLOW_COPY_AND_ASSIGN(ByteBufferWriter); + ByteBufferWriter(const ByteBufferWriter&) = delete; + ByteBufferWriter& operator=(const ByteBufferWriter&) = delete; }; // The ByteBufferReader references the passed data, i.e. the pointer must be @@ -129,6 +131,9 @@ class ByteBufferReader { explicit ByteBufferReader(const ByteBufferWriter& buf); + ByteBufferReader(const ByteBufferReader&) = delete; + ByteBufferReader& operator=(const ByteBufferReader&) = delete; + // Returns start of unprocessed data. const char* Data() const { return bytes_ + start_; } // Returns number of unprocessed bytes. @@ -161,9 +166,6 @@ class ByteBufferReader { size_t size_; size_t start_; size_t end_; - - private: - RTC_DISALLOW_COPY_AND_ASSIGN(ByteBufferReader); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/callback_list.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/callback_list.cc index 88d0b6fc71..c452c79b38 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/callback_list.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/callback_list.cc @@ -22,8 +22,7 @@ CallbackListReceivers::~CallbackListReceivers() { } void CallbackListReceivers::RemoveReceivers(const void* removal_tag) { - RTC_CHECK(!send_in_progress_); - RTC_DCHECK(removal_tag != nullptr); + RTC_DCHECK(removal_tag); // We divide the receivers_ vector into three regions: from right to left, the // "keep" region, the "todo" region, and the "remove" region. The "todo" @@ -42,8 +41,13 @@ void CallbackListReceivers::RemoveReceivers(const void* removal_tag) { } else if (receivers_[first_remove - 1].removal_tag == removal_tag) { // The last element of the "todo" region should be removed. Move the // "todo"/"remove" boundary. + if (send_in_progress_) { + // Tag this receiver for removal, which will be done when `ForEach` + // has completed. + receivers_[first_remove - 1].removal_tag = pending_removal_tag(); + } --first_remove; - } else { + } else if (!send_in_progress_) { // The first element of the "todo" region should be removed, and the last // element of the "todo" region should be kept. Swap them, and then shrink // the "todo" region from both ends. @@ -57,18 +61,28 @@ void CallbackListReceivers::RemoveReceivers(const void* removal_tag) { } } - // Discard the remove region. - receivers_.resize(first_remove); + if (!send_in_progress_) { + // Discard the remove region. + receivers_.resize(first_remove); + } } void CallbackListReceivers::Foreach( rtc::FunctionView fv) { RTC_CHECK(!send_in_progress_); + bool removals_detected = false; send_in_progress_ = true; for (auto& r : receivers_) { + RTC_DCHECK_NE(r.removal_tag, pending_removal_tag()); fv(r.function); + if (r.removal_tag == pending_removal_tag()) { + removals_detected = true; + } } send_in_progress_ = false; + if (removals_detected) { + RemoveReceivers(pending_removal_tag()); + } } template void CallbackListReceivers::AddReceiver( diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/callback_list.h b/TMessagesProj/jni/voip/webrtc/rtc_base/callback_list.h index 18d48b02ee..a9d71a6562 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/callback_list.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/callback_list.h @@ -18,12 +18,13 @@ #include "rtc_base/checks.h" #include "rtc_base/system/assume.h" #include "rtc_base/system/inline.h" +#include "rtc_base/system/rtc_export.h" #include "rtc_base/untyped_function.h" namespace webrtc { namespace callback_list_impl { -class CallbackListReceivers { +class RTC_EXPORT CallbackListReceivers { public: CallbackListReceivers(); CallbackListReceivers(const CallbackListReceivers&) = delete; @@ -51,10 +52,18 @@ class CallbackListReceivers { void Foreach(rtc::FunctionView fv); private: + // Special protected pointer value that's used as a removal_tag for + // receivers that want to unsubscribe from within a callback. + // Note we could use `&receivers_` too, but since it's the first member + // variable of the class, its address will be the same as the instance + // CallbackList instance, so we take an extra step to avoid collision. + const void* pending_removal_tag() const { return &send_in_progress_; } + struct Callback { const void* removal_tag; UntypedFunction function; }; + std::vector receivers_; bool send_in_progress_ = false; }; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/checks.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/checks.cc index 239ea9f0da..e732a2659d 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/checks.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/checks.cc @@ -15,6 +15,8 @@ #include #include +#include "absl/strings/string_view.h" + #if defined(WEBRTC_ANDROID) #define RTC_LOG_TAG_ANDROID "rtc" #include // NOLINT @@ -37,20 +39,6 @@ namespace { -RTC_NORETURN void WriteFatalLogAndAbort(const std::string& output) { - const char* output_c = output.c_str(); -#if defined(WEBRTC_ANDROID) - __android_log_print(ANDROID_LOG_ERROR, RTC_LOG_TAG_ANDROID, "%s\n", output_c); -#endif - fflush(stdout); - fprintf(stderr, "%s", output_c); - fflush(stderr); -#if defined(WEBRTC_WIN) - DebugBreak(); -#endif - abort(); -} - #if defined(__GNUC__) __attribute__((__format__(__printf__, 2, 3))) #endif @@ -74,6 +62,30 @@ void AppendFormat(std::string* s, const char* fmt, ...) { namespace rtc { namespace webrtc_checks_impl { +#if !defined(WEBRTC_CHROMIUM_BUILD) +RTC_NORETURN void WriteFatalLog(absl::string_view output) { +#if defined(WEBRTC_ANDROID) + std::string output_str(output); + __android_log_print(ANDROID_LOG_ERROR, RTC_LOG_TAG_ANDROID, "%s\n", + output_str.c_str()); +#endif + fflush(stdout); + fwrite(output.data(), output.size(), 1, stderr); + fflush(stderr); +#if defined(WEBRTC_WIN) + DebugBreak(); +#endif + abort(); +} + +RTC_NORETURN void WriteFatalLog(const char* file, + int line, + absl::string_view output) { + WriteFatalLog(output); +} + +#endif // !defined(WEBRTC_CHROMIUM_BUILD) + #if RTC_CHECK_MSG_ENABLED // Reads one argument from args, appends it to s and advances fmt. // Returns true iff an argument was sucessfully parsed. @@ -164,7 +176,7 @@ RTC_NORETURN void FatalLog(const char* file, va_end(args); - WriteFatalLogAndAbort(s); + WriteFatalLog(file, line, s); } #else // RTC_CHECK_MSG_ENABLED RTC_NORETURN void FatalLog(const char* file, int line) { @@ -177,7 +189,7 @@ RTC_NORETURN void FatalLog(const char* file, int line) { "# Check failed.\n" "# ", file, line, LAST_SYSTEM_ERROR); - WriteFatalLogAndAbort(s); + WriteFatalLog(file, line, s); } #endif // RTC_CHECK_MSG_ENABLED @@ -192,7 +204,7 @@ RTC_NORETURN void UnreachableCodeReached(const char* file, int line) { "# last system error: %u\n" "# ", file, line, LAST_SYSTEM_ERROR); - WriteFatalLogAndAbort(s); + WriteFatalLog(file, line, s); } #else // !RTC_DCHECK_IS_ON @@ -206,7 +218,7 @@ RTC_NORETURN void UnreachableCodeReached() { "# last system error: %u\n" "# ", LAST_SYSTEM_ERROR); - WriteFatalLogAndAbort(s); + WriteFatalLog(s); } #endif // !RTC_DCHECK_IS_ON diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/checks.h b/TMessagesProj/jni/voip/webrtc/rtc_base/checks.h index 863e39d651..459c6a5ebb 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/checks.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/checks.h @@ -56,6 +56,7 @@ RTC_NORETURN void rtc_FatalMessage(const char* file, int line, const char* msg); #include "absl/meta/type_traits.h" #include "absl/strings/string_view.h" +#include "api/scoped_refptr.h" #include "rtc_base/numerics/safe_compare.h" #include "rtc_base/system/inline.h" #include "rtc_base/system/rtc_export.h" @@ -121,6 +122,13 @@ enum class CheckArgType : int8_t { kCheckOp, }; +// These two functions are public so they can be overridden from +// webrtc_overrides in chromium. +RTC_NORETURN void WriteFatalLog(const char* file, + int line, + absl::string_view output); +RTC_NORETURN void WriteFatalLog(absl::string_view output); + #if RTC_CHECK_MSG_ENABLED RTC_NORETURN RTC_EXPORT void FatalLog(const char* file, int line, @@ -192,6 +200,12 @@ inline Val MakeVal(const void* x) { return {x}; } +template +inline Val MakeVal( + const rtc::scoped_refptr& p) { + return {p.get()}; +} + // The enum class types are not implicitly convertible to arithmetic types. template ::value && diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/containers/as_const.h b/TMessagesProj/jni/voip/webrtc/rtc_base/containers/as_const.h deleted file mode 100644 index a41b3bc378..0000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/containers/as_const.h +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// This implementation is borrowed from Chromium. - -#ifndef RTC_BASE_CONTAINERS_AS_CONST_H_ -#define RTC_BASE_CONTAINERS_AS_CONST_H_ - -#include - -namespace webrtc { - -// C++14 implementation of C++17's std::as_const(): -// https://en.cppreference.com/w/cpp/utility/as_const -template -constexpr std::add_const_t& as_const(T& t) noexcept { - return t; -} - -template -void as_const(const T&& t) = delete; - -} // namespace webrtc - -#endif // RTC_BASE_CONTAINERS_AS_CONST_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/containers/flat_tree.h b/TMessagesProj/jni/voip/webrtc/rtc_base/containers/flat_tree.h index c79b62b16f..480784ced4 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/containers/flat_tree.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/containers/flat_tree.h @@ -21,9 +21,6 @@ #include "absl/algorithm/container.h" #include "rtc_base/checks.h" -#include "rtc_base/containers/as_const.h" -#include "rtc_base/containers/not_fn.h" -#include "rtc_base/containers/void_t.h" #include "rtc_base/system/no_unique_address.h" namespace webrtc { @@ -44,7 +41,7 @@ constexpr bool is_sorted_and_unique(const Range& range, Comp comp) { // Being unique implies that there are no adjacent elements that // compare equal. So this checks that each element is strictly less // than the element after it. - return absl::c_adjacent_find(range, webrtc::not_fn(comp)) == std::end(range); + return absl::c_adjacent_find(range, std::not_fn(comp)) == std::end(range); } // This is a convenience trait inheriting from std::true_type if Iterator is at @@ -58,7 +55,7 @@ using is_multipass = template struct IsTransparentCompare : std::false_type {}; template -struct IsTransparentCompare> +struct IsTransparentCompare> : std::true_type {}; // Helper inspired by C++20's std::to_array to convert a C-style array to a @@ -543,7 +540,7 @@ class flat_tree { std::stable_sort(first, last, value_comp()); // lhs is already <= rhs due to sort, therefore !(lhs < rhs) <=> lhs == rhs. - auto equal_comp = webrtc::not_fn(value_comp()); + auto equal_comp = std::not_fn(value_comp()); erase(std::unique(first, last, equal_comp), last); } @@ -946,7 +943,7 @@ template template auto flat_tree::find(const K& key) -> iterator { - return const_cast_it(webrtc::as_const(*this).find(key)); + return const_cast_it(std::as_const(*this).find(key)); } template @@ -969,7 +966,7 @@ template template auto flat_tree::equal_range( const K& key) -> std::pair { - auto res = webrtc::as_const(*this).equal_range(key); + auto res = std::as_const(*this).equal_range(key); return {const_cast_it(res.first), const_cast_it(res.second)}; } @@ -990,7 +987,7 @@ template template auto flat_tree::lower_bound( const K& key) -> iterator { - return const_cast_it(webrtc::as_const(*this).lower_bound(key)); + return const_cast_it(std::as_const(*this).lower_bound(key)); } template @@ -1011,7 +1008,7 @@ template template auto flat_tree::upper_bound( const K& key) -> iterator { - return const_cast_it(webrtc::as_const(*this).upper_bound(key)); + return const_cast_it(std::as_const(*this).upper_bound(key)); } template diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/containers/not_fn.h b/TMessagesProj/jni/voip/webrtc/rtc_base/containers/not_fn.h deleted file mode 100644 index 39cfd2763c..0000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/containers/not_fn.h +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// This implementation is borrowed from Chromium. - -#ifndef RTC_BASE_CONTAINERS_NOT_FN_H_ -#define RTC_BASE_CONTAINERS_NOT_FN_H_ - -#include -#include - -#include "rtc_base/containers/invoke.h" - -namespace webrtc { - -namespace not_fn_internal { - -template -struct NotFnImpl { - F f; - - template - constexpr decltype(auto) operator()(Args&&... args) & noexcept { - return !webrtc::invoke(f, std::forward(args)...); - } - - template - constexpr decltype(auto) operator()(Args&&... args) const& noexcept { - return !webrtc::invoke(f, std::forward(args)...); - } - - template - constexpr decltype(auto) operator()(Args&&... args) && noexcept { - return !webrtc::invoke(std::move(f), std::forward(args)...); - } - - template - constexpr decltype(auto) operator()(Args&&... args) const&& noexcept { - return !webrtc::invoke(std::move(f), std::forward(args)...); - } -}; - -} // namespace not_fn_internal - -// Implementation of C++17's std::not_fn. -// -// Reference: -// - https://en.cppreference.com/w/cpp/utility/functional/not_fn -// - https://wg21.link/func.not.fn -template -constexpr not_fn_internal::NotFnImpl> not_fn(F&& f) { - return {std::forward(f)}; -} - -} // namespace webrtc - -#endif // RTC_BASE_CONTAINERS_NOT_FN_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/containers/void_t.h b/TMessagesProj/jni/voip/webrtc/rtc_base/containers/void_t.h deleted file mode 100644 index 149fc70c11..0000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/containers/void_t.h +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// This implementation is borrowed from Chromium. - -#ifndef RTC_BASE_CONTAINERS_VOID_T_H_ -#define RTC_BASE_CONTAINERS_VOID_T_H_ - -namespace webrtc { -namespace void_t_internal { -// Implementation detail of webrtc::void_t below. -template -struct make_void { - using type = void; -}; - -} // namespace void_t_internal - -// webrtc::void_t is an implementation of std::void_t from C++17. -// -// We use `webrtc::void_t_internal::make_void` as a helper struct to avoid a -// C++14 defect: -// http://en.cppreference.com/w/cpp/types/void_t -// http://open-std.org/JTC1/SC22/WG21/docs/cwg_defects.html#1558 -template -using void_t = typename ::webrtc::void_t_internal::make_void::type; -} // namespace webrtc - -#endif // RTC_BASE_CONTAINERS_VOID_T_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/copy_on_write_buffer.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/copy_on_write_buffer.cc index f3cc710f85..850327b088 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/copy_on_write_buffer.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/copy_on_write_buffer.cc @@ -12,6 +12,8 @@ #include +#include "absl/strings/string_view.h" + namespace rtc { CopyOnWriteBuffer::CopyOnWriteBuffer() : offset_(0), size_(0) { @@ -28,7 +30,7 @@ CopyOnWriteBuffer::CopyOnWriteBuffer(CopyOnWriteBuffer&& buf) RTC_DCHECK(IsConsistent()); } -CopyOnWriteBuffer::CopyOnWriteBuffer(const std::string& s) +CopyOnWriteBuffer::CopyOnWriteBuffer(absl::string_view s) : CopyOnWriteBuffer(s.data(), s.length()) {} CopyOnWriteBuffer::CopyOnWriteBuffer(size_t size) diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/copy_on_write_buffer.h b/TMessagesProj/jni/voip/webrtc/rtc_base/copy_on_write_buffer.h index 6837f06526..849f5f5df2 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/copy_on_write_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/copy_on_write_buffer.h @@ -19,6 +19,7 @@ #include #include +#include "absl/strings/string_view.h" #include "api/scoped_refptr.h" #include "rtc_base/buffer.h" #include "rtc_base/checks.h" @@ -38,7 +39,7 @@ class RTC_EXPORT CopyOnWriteBuffer { CopyOnWriteBuffer(CopyOnWriteBuffer&& buf); // Construct a buffer from a string, convenient for unittests. - CopyOnWriteBuffer(const std::string& s); + explicit CopyOnWriteBuffer(absl::string_view s); // Construct a buffer with the specified number of uninitialized bytes. explicit CopyOnWriteBuffer(size_t size); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/crc32.h b/TMessagesProj/jni/voip/webrtc/rtc_base/crc32.h index ca8578d69c..93376a5a12 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/crc32.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/crc32.h @@ -16,6 +16,8 @@ #include +#include "absl/strings/string_view.h" + namespace rtc { // Updates a CRC32 checksum with `len` bytes from `buf`. `initial` holds the @@ -26,8 +28,8 @@ uint32_t UpdateCrc32(uint32_t initial, const void* buf, size_t len); inline uint32_t ComputeCrc32(const void* buf, size_t len) { return UpdateCrc32(0, buf, len); } -inline uint32_t ComputeCrc32(const std::string& str) { - return ComputeCrc32(str.c_str(), str.size()); +inline uint32_t ComputeCrc32(absl::string_view str) { + return ComputeCrc32(str.data(), str.size()); } } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/deprecated/recursive_critical_section.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/deprecated/recursive_critical_section.cc index 068b9aa808..540819888e 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/deprecated/recursive_critical_section.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/deprecated/recursive_critical_section.cc @@ -12,7 +12,6 @@ #include -#include "rtc_base/atomic_ops.h" #include "rtc_base/checks.h" #include "rtc_base/platform_thread_types.h" #include "rtc_base/synchronization/yield.h" diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/deprecated/recursive_critical_section.h b/TMessagesProj/jni/voip/webrtc/rtc_base/deprecated/recursive_critical_section.h index 15b1f97e9f..da1e92b9b0 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/deprecated/recursive_critical_section.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/deprecated/recursive_critical_section.h @@ -11,7 +11,8 @@ #ifndef RTC_BASE_DEPRECATED_RECURSIVE_CRITICAL_SECTION_H_ #define RTC_BASE_DEPRECATED_RECURSIVE_CRITICAL_SECTION_H_ -#include "rtc_base/constructor_magic.h" +#include + #include "rtc_base/platform_thread_types.h" #include "rtc_base/thread_annotations.h" @@ -69,7 +70,7 @@ class RTC_LOCKABLE RecursiveCriticalSection { // Number of times the lock has been locked + number of threads waiting. // TODO(tommi): We could use this number and subtract the recursion count // to find places where we have multiple threads contending on the same lock. - mutable volatile int lock_queue_; + mutable std::atomic lock_queue_; // `recursion_` represents the recursion count + 1 for the thread that owns // the lock. Only modified by the thread that owns the lock. mutable int recursion_; @@ -94,9 +95,11 @@ class RTC_SCOPED_LOCKABLE CritScope { RTC_EXCLUSIVE_LOCK_FUNCTION(cs); ~CritScope() RTC_UNLOCK_FUNCTION(); + CritScope(const CritScope&) = delete; + CritScope& operator=(const CritScope&) = delete; + private: const RecursiveCriticalSection* const cs_; - RTC_DISALLOW_COPY_AND_ASSIGN(CritScope); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/event.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/event.cc index 67c8746205..c2f6f8abab 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/event.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/event.cc @@ -25,9 +25,12 @@ #include "rtc_base/checks.h" #include "rtc_base/synchronization/yield_policy.h" #include "rtc_base/system/warn_current_thread_is_deadlocked.h" +#include "rtc_base/time_utils.h" namespace rtc { +using ::webrtc::TimeDelta; + Event::Event() : Event(false, false) {} #if defined(WEBRTC_WIN) @@ -51,9 +54,12 @@ void Event::Reset() { ResetEvent(event_handle_); } -bool Event::Wait(const int give_up_after_ms, int /*warn_after_ms*/) { +bool Event::Wait(TimeDelta give_up_after, TimeDelta /*warn_after*/) { ScopedYieldPolicy::YieldExecution(); - const DWORD ms = give_up_after_ms == kForever ? INFINITE : give_up_after_ms; + const DWORD ms = + give_up_after.IsPlusInfinity() + ? INFINITE + : give_up_after.RoundUpTo(webrtc::TimeDelta::Millis(1)).ms(); return (WaitForSingleObject(event_handle_, ms) == WAIT_OBJECT_0); } @@ -108,7 +114,7 @@ void Event::Reset() { namespace { -timespec GetTimespec(const int milliseconds_from_now) { +timespec GetTimespec(TimeDelta duration_from_now) { timespec ts; // Get the current time. @@ -118,17 +124,19 @@ timespec GetTimespec(const int milliseconds_from_now) { timeval tv; gettimeofday(&tv, nullptr); ts.tv_sec = tv.tv_sec; - ts.tv_nsec = tv.tv_usec * 1000; + ts.tv_nsec = tv.tv_usec * kNumNanosecsPerMicrosec; #endif // Add the specified number of milliseconds to it. - ts.tv_sec += (milliseconds_from_now / 1000); - ts.tv_nsec += (milliseconds_from_now % 1000) * 1000000; + int64_t microsecs_from_now = duration_from_now.us(); + ts.tv_sec += microsecs_from_now / kNumMicrosecsPerSec; + ts.tv_nsec += + (microsecs_from_now % kNumMicrosecsPerSec) * kNumNanosecsPerMicrosec; // Normalize. - if (ts.tv_nsec >= 1000000000) { + if (ts.tv_nsec >= kNumNanosecsPerSec) { ts.tv_sec++; - ts.tv_nsec -= 1000000000; + ts.tv_nsec -= kNumNanosecsPerSec; } return ts; @@ -136,22 +144,21 @@ timespec GetTimespec(const int milliseconds_from_now) { } // namespace -bool Event::Wait(const int give_up_after_ms, const int warn_after_ms) { +bool Event::Wait(TimeDelta give_up_after, TimeDelta warn_after) { // Instant when we'll log a warning message (because we've been waiting so // long it might be a bug), but not yet give up waiting. nullopt if we // shouldn't log a warning. const absl::optional warn_ts = - warn_after_ms == kForever || - (give_up_after_ms != kForever && warn_after_ms > give_up_after_ms) + warn_after >= give_up_after ? absl::nullopt - : absl::make_optional(GetTimespec(warn_after_ms)); + : absl::make_optional(GetTimespec(warn_after)); // Instant when we'll stop waiting and return an error. nullopt if we should // never give up. const absl::optional give_up_ts = - give_up_after_ms == kForever + give_up_after.IsPlusInfinity() ? absl::nullopt - : absl::make_optional(GetTimespec(give_up_after_ms)); + : absl::make_optional(GetTimespec(give_up_after)); ScopedYieldPolicy::YieldExecution(); pthread_mutex_lock(&event_mutex_); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/event.h b/TMessagesProj/jni/voip/webrtc/rtc_base/event.h index 584ad5d35a..12f6a7dca2 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/event.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/event.h @@ -11,6 +11,8 @@ #ifndef RTC_BASE_EVENT_H_ #define RTC_BASE_EVENT_H_ +#include "api/units/time_delta.h" + #if defined(WEBRTC_WIN) #include #elif defined(WEBRTC_POSIX) @@ -19,11 +21,43 @@ #error "Must define either WEBRTC_WIN or WEBRTC_POSIX." #endif +#include "rtc_base/synchronization/yield_policy.h" + namespace rtc { +// RTC_DISALLOW_WAIT() utility +// +// Sets a stack-scoped flag that disallows use of `rtc::Event::Wait` by means +// of raising a DCHECK when a call to `rtc::Event::Wait()` is made.. +// This is useful to guard synchronization-free scopes against regressions. +// +// Example of what this would catch (`ScopeToProtect` calls `Foo`): +// +// void Foo(TaskQueue* tq) { +// Event event; +// tq->PostTask([&event]() { +// event.Set(); +// }); +// event.Wait(Event::kForever); // <- Will trigger a DCHECK. +// } +// +// void ScopeToProtect() { +// TaskQueue* tq = GetSomeTaskQueue(); +// RTC_DISALLOW_WAIT(); // Policy takes effect. +// Foo(tq); +// } +// +#if RTC_DCHECK_IS_ON +#define RTC_DISALLOW_WAIT() ScopedDisallowWait disallow_wait_##__LINE__ +#else +#define RTC_DISALLOW_WAIT() +#endif + class Event { public: - static const int kForever = -1; + // TODO(bugs.webrtc.org/14366): Consider removing this redundant alias. + static constexpr webrtc::TimeDelta kForever = + webrtc::TimeDelta::PlusInfinity(); Event(); Event(bool manual_reset, bool initially_signaled); @@ -35,19 +69,22 @@ class Event { void Reset(); // Waits for the event to become signaled, but logs a warning if it takes more - // than `warn_after_ms` milliseconds, and gives up completely if it takes more - // than `give_up_after_ms` milliseconds. (If `warn_after_ms >= - // give_up_after_ms`, no warning will be logged.) Either or both may be - // `kForever`, which means wait indefinitely. + // than `warn_after`, and gives up completely if it takes more than + // `give_up_after`. (If `warn_after >= give_up_after`, no warning will be + // logged.) Either or both may be `kForever`, which means wait indefinitely. + // + // Care is taken so that the underlying OS wait call isn't requested to sleep + // shorter than `give_up_after`. // // Returns true if the event was signaled, false if there was a timeout or // some other error. - bool Wait(int give_up_after_ms, int warn_after_ms); + bool Wait(webrtc::TimeDelta give_up_after, webrtc::TimeDelta warn_after); // Waits with the given timeout and a reasonable default warning timeout. - bool Wait(int give_up_after_ms) { - return Wait(give_up_after_ms, - give_up_after_ms == kForever ? 3000 : kForever); + bool Wait(webrtc::TimeDelta give_up_after) { + return Wait(give_up_after, give_up_after.IsPlusInfinity() + ? webrtc::TimeDelta::Seconds(3) + : kForever); } private: @@ -81,6 +118,20 @@ class ScopedAllowBaseSyncPrimitivesForTesting { ~ScopedAllowBaseSyncPrimitivesForTesting() {} }; +#if RTC_DCHECK_IS_ON +class ScopedDisallowWait { + public: + ScopedDisallowWait() = default; + + private: + class DisallowYieldHandler : public YieldInterface { + public: + void YieldExecution() override { RTC_DCHECK_NOTREACHED(); } + } handler_; + rtc::ScopedYieldPolicy policy{&handler_}; +}; +#endif + } // namespace rtc #endif // RTC_BASE_EVENT_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/event_tracer.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/event_tracer.cc index 1a2b41ec5c..992a2b5e08 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/event_tracer.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/event_tracer.cc @@ -14,11 +14,12 @@ #include #include +#include #include #include +#include "absl/strings/string_view.h" #include "api/sequence_checker.h" -#include "rtc_base/atomic_ops.h" #include "rtc_base/checks.h" #include "rtc_base/event.h" #include "rtc_base/logging.h" @@ -80,7 +81,7 @@ namespace tracing { namespace { // Atomic-int fast path for avoiding logging when disabled. -static volatile int g_event_logging_active = 0; +static std::atomic g_event_logging_active(0); // TODO(pbos): Log metadata for all threads, etc. class EventLogger final { @@ -122,11 +123,12 @@ class EventLogger final { // https://docs.google.com/document/d/1CvAClvFfyA5R-PhYUmn5OOQtYMH4h6I0nSsKchNAySU/preview void Log() { RTC_DCHECK(output_file_); - static const int kLoggingIntervalMs = 100; + static constexpr webrtc::TimeDelta kLoggingInterval = + webrtc::TimeDelta::Millis(100); fprintf(output_file_, "{ \"traceEvents\": [\n"); bool has_logged_event = false; while (true) { - bool shutting_down = shutdown_event_.Wait(kLoggingIntervalMs); + bool shutting_down = shutdown_event_.Wait(kLoggingInterval); std::vector events; { webrtc::MutexLock lock(&mutex_); @@ -198,8 +200,8 @@ class EventLogger final { } // Enable event logging (fast-path). This should be disabled since starting // shouldn't be done twice. - RTC_CHECK_EQ(0, - rtc::AtomicOps::CompareAndSwap(&g_event_logging_active, 0, 1)); + int zero = 0; + RTC_CHECK(g_event_logging_active.compare_exchange_strong(zero, 1)); // Finally start, everything should be set up now. logging_thread_ = @@ -211,7 +213,8 @@ class EventLogger final { RTC_DCHECK(thread_checker_.IsCurrent()); TRACE_EVENT_INSTANT0("webrtc", "EventLogger::Stop"); // Try to stop. Abort if we're not currently logging. - if (rtc::AtomicOps::CompareAndSwap(&g_event_logging_active, 1, 0) == 0) + int one = 1; + if (g_event_logging_active.compare_exchange_strong(one, 0)) return; // Wake up logging thread to finish writing. @@ -320,7 +323,7 @@ class EventLogger final { bool output_file_owned_ = false; }; -static EventLogger* volatile g_event_logger = nullptr; +static std::atomic g_event_logger(nullptr); static const char* const kDisabledTracePrefix = TRACE_DISABLED_BY_DEFAULT(""); const unsigned char* InternalGetCategoryEnabled(const char* name) { const char* prefix_ptr = &kDisabledTracePrefix[0]; @@ -334,6 +337,10 @@ const unsigned char* InternalGetCategoryEnabled(const char* name) { : name); } +const unsigned char* InternalEnableAllCategories(const char* name) { + return reinterpret_cast(name); +} + void InternalAddTraceEvent(char phase, const unsigned char* category_enabled, const char* name, @@ -344,56 +351,59 @@ void InternalAddTraceEvent(char phase, const unsigned long long* arg_values, unsigned char flags) { // Fast path for when event tracing is inactive. - if (rtc::AtomicOps::AcquireLoad(&g_event_logging_active) == 0) + if (g_event_logging_active.load() == 0) return; - g_event_logger->AddTraceEvent(name, category_enabled, phase, num_args, - arg_names, arg_types, arg_values, - rtc::TimeMicros(), 1, rtc::CurrentThreadId()); + g_event_logger.load()->AddTraceEvent( + name, category_enabled, phase, num_args, arg_names, arg_types, arg_values, + rtc::TimeMicros(), 1, rtc::CurrentThreadId()); } } // namespace -void SetupInternalTracer() { - RTC_CHECK(rtc::AtomicOps::CompareAndSwapPtr( - &g_event_logger, static_cast(nullptr), - new EventLogger()) == nullptr); - webrtc::SetupEventTracer(InternalGetCategoryEnabled, InternalAddTraceEvent); +void SetupInternalTracer(bool enable_all_categories) { + EventLogger* null_logger = nullptr; + RTC_CHECK( + g_event_logger.compare_exchange_strong(null_logger, new EventLogger())); + webrtc::SetupEventTracer(enable_all_categories ? InternalEnableAllCategories + : InternalGetCategoryEnabled, + InternalAddTraceEvent); } void StartInternalCaptureToFile(FILE* file) { - if (g_event_logger) { - g_event_logger->Start(file, false); + EventLogger* event_logger = g_event_logger.load(); + if (event_logger) { + event_logger->Start(file, false); } } -bool StartInternalCapture(const char* filename) { - if (!g_event_logger) +bool StartInternalCapture(absl::string_view filename) { + EventLogger* event_logger = g_event_logger.load(); + if (!event_logger) return false; - FILE* file = fopen(filename, "w"); + FILE* file = fopen(std::string(filename).c_str(), "w"); if (!file) { RTC_LOG(LS_ERROR) << "Failed to open trace file '" << filename << "' for writing."; return false; } - g_event_logger->Start(file, true); + event_logger->Start(file, true); return true; } void StopInternalCapture() { - if (g_event_logger) { - g_event_logger->Stop(); + EventLogger* event_logger = g_event_logger.load(); + if (event_logger) { + event_logger->Stop(); } } void ShutdownInternalTracer() { StopInternalCapture(); - EventLogger* old_logger = rtc::AtomicOps::AcquireLoadPtr(&g_event_logger); + EventLogger* old_logger = g_event_logger.load(std::memory_order_acquire); RTC_DCHECK(old_logger); - RTC_CHECK(rtc::AtomicOps::CompareAndSwapPtr( - &g_event_logger, old_logger, - static_cast(nullptr)) == old_logger); + RTC_CHECK(g_event_logger.compare_exchange_strong(old_logger, nullptr)); delete old_logger; webrtc::SetupEventTracer(nullptr, nullptr); } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/event_tracer.h b/TMessagesProj/jni/voip/webrtc/rtc_base/event_tracer.h index 4bbda579bc..dc2eaed669 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/event_tracer.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/event_tracer.h @@ -28,6 +28,9 @@ #include +#include "absl/strings/string_view.h" +#include "rtc_base/system/rtc_export.h" + namespace webrtc { typedef const unsigned char* (*GetCategoryEnabledPtr)(const char* name); @@ -70,12 +73,12 @@ class EventTracer { namespace rtc { namespace tracing { // Set up internal event tracer. -void SetupInternalTracer(); -bool StartInternalCapture(const char* filename); -void StartInternalCaptureToFile(FILE* file); -void StopInternalCapture(); +RTC_EXPORT void SetupInternalTracer(bool enable_all_categories = true); +RTC_EXPORT bool StartInternalCapture(absl::string_view filename); +RTC_EXPORT void StartInternalCaptureToFile(FILE* file); +RTC_EXPORT void StopInternalCapture(); // Make sure we run this, this will tear down the internal tracing. -void ShutdownInternalTracer(); +RTC_EXPORT void ShutdownInternalTracer(); } // namespace tracing } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/OWNERS b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/OWNERS index f057a5ddc7..0a3b89533d 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/OWNERS +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/OWNERS @@ -2,15 +2,12 @@ asapersson@webrtc.org sprang@webrtc.org srte@webrtc.org -per-file alr_experiment*=sprang@webrtc.org per-file audio_allocation_settings*=srte@webrtc.org per-file congestion_controller_experiment*=srte@webrtc.org per-file cpu_speed_experiment*=asapersson@webrtc.org per-file field_trial*=srte@webrtc.org -per-file jitter_upper_bound_experiment*=sprang@webrtc.org per-file keyframe_interval_settings*=brandtr@webrtc.org per-file normalize_simulcast_size_experiment*=asapersson@webrtc.org per-file quality_scaling_experiment*=asapersson@webrtc.org per-file rtt_mult_experiment*=mhoro@webrtc.org -per-file rate_control_settings*=sprang@webrtc.org per-file rate_control_settings*=srte@webrtc.org diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/alr_experiment.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/alr_experiment.cc index 119a4011e1..f5d36f6867 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/alr_experiment.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/alr_experiment.cc @@ -15,6 +15,7 @@ #include +#include "absl/strings/string_view.h" #include "api/transport/field_trial_based_config.h" #include "rtc_base/logging.h" @@ -32,22 +33,22 @@ bool AlrExperimentSettings::MaxOneFieldTrialEnabled() { } bool AlrExperimentSettings::MaxOneFieldTrialEnabled( - const WebRtcKeyValueConfig& key_value_config) { + const FieldTrialsView& key_value_config) { return key_value_config.Lookup(kStrictPacingAndProbingExperimentName) .empty() || key_value_config.Lookup(kScreenshareProbingBweExperimentName).empty(); } absl::optional -AlrExperimentSettings::CreateFromFieldTrial(const char* experiment_name) { +AlrExperimentSettings::CreateFromFieldTrial(absl::string_view experiment_name) { return AlrExperimentSettings::CreateFromFieldTrial(FieldTrialBasedConfig(), experiment_name); } absl::optional AlrExperimentSettings::CreateFromFieldTrial( - const WebRtcKeyValueConfig& key_value_config, - const char* experiment_name) { + const FieldTrialsView& key_value_config, + absl::string_view experiment_name) { absl::optional ret; std::string group_name = key_value_config.Lookup(experiment_name); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/alr_experiment.h b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/alr_experiment.h index 5b0661c5b4..048fd90cab 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/alr_experiment.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/alr_experiment.h @@ -13,8 +13,9 @@ #include +#include "absl/strings/string_view.h" #include "absl/types/optional.h" -#include "api/transport/webrtc_key_value_config.h" +#include "api/field_trials_view.h" namespace webrtc { struct AlrExperimentSettings { @@ -32,13 +33,12 @@ struct AlrExperimentSettings { static const char kScreenshareProbingBweExperimentName[]; static const char kStrictPacingAndProbingExperimentName[]; static absl::optional CreateFromFieldTrial( - const char* experiment_name); + absl::string_view experiment_name); static absl::optional CreateFromFieldTrial( - const WebRtcKeyValueConfig& key_value_config, - const char* experiment_name); + const FieldTrialsView& key_value_config, + absl::string_view experiment_name); static bool MaxOneFieldTrialEnabled(); - static bool MaxOneFieldTrialEnabled( - const WebRtcKeyValueConfig& key_value_config); + static bool MaxOneFieldTrialEnabled(const FieldTrialsView& key_value_config); private: AlrExperimentSettings() = default; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/balanced_degradation_settings.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/balanced_degradation_settings.cc index 90d44efb10..1652e31704 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/balanced_degradation_settings.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/balanced_degradation_settings.cc @@ -15,7 +15,6 @@ #include "rtc_base/experiments/field_trial_list.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/logging.h" -#include "system_wrappers/include/field_trial.h" namespace webrtc { namespace { @@ -332,7 +331,8 @@ BalancedDegradationSettings::Config::Config(int pixels, av1(av1), generic(generic) {} -BalancedDegradationSettings::BalancedDegradationSettings() { +BalancedDegradationSettings::BalancedDegradationSettings( + const FieldTrialsView& field_trials) { FieldTrialStructList configs( {FieldTrialStructMember("pixels", [](Config* c) { return &c->pixels; }), FieldTrialStructMember("fps", [](Config* c) { return &c->fps; }), @@ -390,7 +390,7 @@ BalancedDegradationSettings::BalancedDegradationSettings() { [](Config* c) { return &c->generic.kbps_res; })}, {}); - ParseFieldTrial({&configs}, field_trial::FindFullName(kFieldTrial)); + ParseFieldTrial({&configs}, field_trials.Lookup(kFieldTrial)); configs_ = GetValidOrDefault(configs.Get()); RTC_DCHECK_GT(configs_.size(), 1); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/balanced_degradation_settings.h b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/balanced_degradation_settings.h index 0b2f2f5993..0b5e03df3b 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/balanced_degradation_settings.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/balanced_degradation_settings.h @@ -14,6 +14,7 @@ #include #include "absl/types/optional.h" +#include "api/field_trials_view.h" #include "api/video_codecs/video_encoder.h" namespace webrtc { @@ -22,7 +23,7 @@ class BalancedDegradationSettings { public: static constexpr int kNoFpsDiff = -100; - BalancedDegradationSettings(); + BalancedDegradationSettings(const FieldTrialsView& field_trials); ~BalancedDegradationSettings(); struct CodecTypeSpecific { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/bandwidth_quality_scaler_settings.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/bandwidth_quality_scaler_settings.cc index 332ab6be4b..0a9df493ed 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/bandwidth_quality_scaler_settings.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/bandwidth_quality_scaler_settings.cc @@ -16,7 +16,7 @@ namespace webrtc { BandwidthQualityScalerSettings::BandwidthQualityScalerSettings( - const WebRtcKeyValueConfig* const key_value_config) + const FieldTrialsView* const key_value_config) : bitrate_state_update_interval_s_("bitrate_state_update_interval_s_") { ParseFieldTrial( {&bitrate_state_update_interval_s_}, diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/bandwidth_quality_scaler_settings.h b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/bandwidth_quality_scaler_settings.h index 959aea5bd3..21e115df01 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/bandwidth_quality_scaler_settings.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/bandwidth_quality_scaler_settings.h @@ -12,7 +12,7 @@ #define RTC_BASE_EXPERIMENTS_BANDWIDTH_QUALITY_SCALER_SETTINGS_H_ #include "absl/types/optional.h" -#include "api/transport/webrtc_key_value_config.h" +#include "api/field_trials_view.h" #include "rtc_base/experiments/field_trial_parser.h" namespace webrtc { @@ -25,7 +25,7 @@ class BandwidthQualityScalerSettings final { private: explicit BandwidthQualityScalerSettings( - const WebRtcKeyValueConfig* const key_value_config); + const FieldTrialsView* const key_value_config); FieldTrialOptional bitrate_state_update_interval_s_; }; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/encoder_info_settings.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/encoder_info_settings.cc index b39c68468f..8af52d6646 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/encoder_info_settings.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/encoder_info_settings.cc @@ -12,6 +12,7 @@ #include +#include "absl/strings/string_view.h" #include "rtc_base/experiments/field_trial_list.h" #include "rtc_base/logging.h" #include "system_wrappers/include/field_trial.h" @@ -155,7 +156,7 @@ EncoderInfoSettings::GetSinglecastBitrateLimitForResolutionWhenQpIsUntrusted( } } -EncoderInfoSettings::EncoderInfoSettings(std::string name) +EncoderInfoSettings::EncoderInfoSettings(absl::string_view name) : requested_resolution_alignment_("requested_resolution_alignment"), apply_alignment_to_all_simulcast_layers_( "apply_alignment_to_all_simulcast_layers") { @@ -174,14 +175,15 @@ EncoderInfoSettings::EncoderInfoSettings(std::string name) [](BitrateLimit* b) { return &b->max_bitrate_bps; })}, {}); - if (field_trial::FindFullName(name).empty()) { + std::string name_str(name); + if (field_trial::FindFullName(name_str).empty()) { // Encoder name not found, use common string applying to all encoders. - name = "WebRTC-GetEncoderInfoOverride"; + name_str = "WebRTC-GetEncoderInfoOverride"; } ParseFieldTrial({&bitrate_limits, &requested_resolution_alignment_, &apply_alignment_to_all_simulcast_layers_}, - field_trial::FindFullName(name)); + field_trial::FindFullName(name_str)); resolution_bitrate_limits_ = ToResolutionBitrateLimits(bitrate_limits.Get()); } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/encoder_info_settings.h b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/encoder_info_settings.h index e4dc459fcf..d450697f47 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/encoder_info_settings.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/encoder_info_settings.h @@ -14,6 +14,7 @@ #include #include +#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/video_codecs/video_encoder.h" #include "rtc_base/experiments/field_trial_parser.h" @@ -58,7 +59,7 @@ class EncoderInfoSettings { resolution_bitrate_limits); protected: - explicit EncoderInfoSettings(std::string name); + explicit EncoderInfoSettings(absl::string_view name); private: FieldTrialOptional requested_resolution_alignment_; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/field_trial_list.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/field_trial_list.cc index ac3fd88f49..72cd79f2d2 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/field_trial_list.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/field_trial_list.cc @@ -9,9 +9,11 @@ */ #include "rtc_base/experiments/field_trial_list.h" +#include "absl/strings/string_view.h" + namespace webrtc { -FieldTrialListBase::FieldTrialListBase(std::string key) +FieldTrialListBase::FieldTrialListBase(absl::string_view key) : FieldTrialParameterInterface(key), failed_(false), parse_got_called_(false) {} diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/field_trial_list.h b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/field_trial_list.h index 877e29a699..261977243a 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/field_trial_list.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/field_trial_list.h @@ -15,6 +15,7 @@ #include #include +#include "absl/strings/string_view.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/string_encode.h" @@ -36,7 +37,7 @@ namespace webrtc { class FieldTrialListBase : public FieldTrialParameterInterface { protected: friend class FieldTrialListWrapper; - explicit FieldTrialListBase(std::string key); + explicit FieldTrialListBase(absl::string_view key); bool Failed() const; bool Used() const; @@ -52,13 +53,15 @@ class FieldTrialListBase : public FieldTrialParameterInterface { template class FieldTrialList : public FieldTrialListBase { public: - explicit FieldTrialList(std::string key) : FieldTrialList(key, {}) {} - FieldTrialList(std::string key, std::initializer_list default_values) + explicit FieldTrialList(absl::string_view key) : FieldTrialList(key, {}) {} + FieldTrialList(absl::string_view key, std::initializer_list default_values) : FieldTrialListBase(key), values_(default_values) {} std::vector Get() const { return values_; } operator std::vector() const { return Get(); } - const T& operator[](size_t index) const { return values_[index]; } + typename std::vector::const_reference operator[](size_t index) const { + return values_[index]; + } const std::vector* operator->() const { return &values_; } protected: @@ -70,11 +73,9 @@ class FieldTrialList : public FieldTrialListBase { return true; } - std::vector tokens; std::vector new_values_; - rtc::split(str_value.value(), '|', &tokens); - for (std::string token : tokens) { + for (const absl::string_view token : rtc::split(str_value.value(), '|')) { absl::optional value = ParseTypedParameter(token); if (value) { new_values_.push_back(*value); @@ -130,7 +131,7 @@ struct LambdaTypeTraits { template struct TypedFieldTrialListWrapper : FieldTrialListWrapper { public: - TypedFieldTrialListWrapper(std::string key, + TypedFieldTrialListWrapper(absl::string_view key, std::function sink) : list_(key), sink_(sink) {} @@ -149,7 +150,8 @@ struct TypedFieldTrialListWrapper : FieldTrialListWrapper { template > -FieldTrialListWrapper* FieldTrialStructMember(std::string key, F accessor) { +FieldTrialListWrapper* FieldTrialStructMember(absl::string_view key, + F accessor) { return new field_trial_list_impl::TypedFieldTrialListWrapper< typename Traits::ret>(key, [accessor](void* s, typename Traits::ret t) { *accessor(static_cast(s)) = t; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/field_trial_parser.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/field_trial_parser.cc index 952250b767..78d5489f5e 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/field_trial_parser.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/field_trial_parser.cc @@ -23,7 +23,8 @@ namespace webrtc { -FieldTrialParameterInterface::FieldTrialParameterInterface(std::string key) +FieldTrialParameterInterface::FieldTrialParameterInterface( + absl::string_view key) : key_(key) {} FieldTrialParameterInterface::~FieldTrialParameterInterface() { RTC_DCHECK(used_) << "Field trial parameter with key: '" << key_ @@ -111,7 +112,7 @@ void ParseFieldTrial( } template <> -absl::optional ParseTypedParameter(std::string str) { +absl::optional ParseTypedParameter(absl::string_view str) { if (str == "true" || str == "1") { return true; } else if (str == "false" || str == "0") { @@ -121,10 +122,10 @@ absl::optional ParseTypedParameter(std::string str) { } template <> -absl::optional ParseTypedParameter(std::string str) { +absl::optional ParseTypedParameter(absl::string_view str) { double value; char unit[2]{0, 0}; - if (sscanf(str.c_str(), "%lf%1s", &value, unit) >= 1) { + if (sscanf(std::string(str).c_str(), "%lf%1s", &value, unit) >= 1) { if (unit[0] == '%') return value / 100; return value; @@ -134,9 +135,9 @@ absl::optional ParseTypedParameter(std::string str) { } template <> -absl::optional ParseTypedParameter(std::string str) { +absl::optional ParseTypedParameter(absl::string_view str) { int64_t value; - if (sscanf(str.c_str(), "%" SCNd64, &value) == 1) { + if (sscanf(std::string(str).c_str(), "%" SCNd64, &value) == 1) { if (rtc::IsValueInRangeForNumericType(value)) { return static_cast(value); } @@ -145,9 +146,9 @@ absl::optional ParseTypedParameter(std::string str) { } template <> -absl::optional ParseTypedParameter(std::string str) { +absl::optional ParseTypedParameter(absl::string_view str) { int64_t value; - if (sscanf(str.c_str(), "%" SCNd64, &value) == 1) { + if (sscanf(std::string(str).c_str(), "%" SCNd64, &value) == 1) { if (rtc::IsValueInRangeForNumericType(value)) { return static_cast(value); } @@ -156,34 +157,36 @@ absl::optional ParseTypedParameter(std::string str) { } template <> -absl::optional ParseTypedParameter(std::string str) { - return std::move(str); +absl::optional ParseTypedParameter( + absl::string_view str) { + return std::string(str); } template <> absl::optional> ParseTypedParameter>( - std::string str) { + absl::string_view str) { return ParseOptionalParameter(str); } template <> absl::optional> ParseTypedParameter>( - std::string str) { + absl::string_view str) { return ParseOptionalParameter(str); } template <> absl::optional> -ParseTypedParameter>(std::string str) { +ParseTypedParameter>(absl::string_view str) { return ParseOptionalParameter(str); } template <> absl::optional> -ParseTypedParameter>(std::string str) { +ParseTypedParameter>(absl::string_view str) { return ParseOptionalParameter(str); } -FieldTrialFlag::FieldTrialFlag(std::string key) : FieldTrialFlag(key, false) {} +FieldTrialFlag::FieldTrialFlag(absl::string_view key) + : FieldTrialFlag(key, false) {} -FieldTrialFlag::FieldTrialFlag(std::string key, bool default_value) +FieldTrialFlag::FieldTrialFlag(absl::string_view key, bool default_value) : FieldTrialParameterInterface(key), value_(default_value) {} bool FieldTrialFlag::Get() const { @@ -208,7 +211,7 @@ bool FieldTrialFlag::Parse(absl::optional str_value) { } AbstractFieldTrialEnum::AbstractFieldTrialEnum( - std::string key, + absl::string_view key, int default_value, std::map mapping) : FieldTrialParameterInterface(key), diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/field_trial_parser.h b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/field_trial_parser.h index c67ef542d3..822895e70b 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/field_trial_parser.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/field_trial_parser.h @@ -46,7 +46,7 @@ class FieldTrialParameterInterface { FieldTrialParameterInterface(const FieldTrialParameterInterface&) = default; FieldTrialParameterInterface& operator=(const FieldTrialParameterInterface&) = default; - explicit FieldTrialParameterInterface(std::string key); + explicit FieldTrialParameterInterface(absl::string_view key); friend void ParseFieldTrial( std::initializer_list fields, absl::string_view trial_string); @@ -71,14 +71,14 @@ void ParseFieldTrial( // Specialize this in code file for custom types. Should return absl::nullopt if // the given string cannot be properly parsed. template -absl::optional ParseTypedParameter(std::string); +absl::optional ParseTypedParameter(absl::string_view); // This class uses the ParseTypedParameter function to implement a parameter // implementation with an enforced default value. template class FieldTrialParameter : public FieldTrialParameterInterface { public: - FieldTrialParameter(std::string key, T default_value) + FieldTrialParameter(absl::string_view key, T default_value) : FieldTrialParameterInterface(key), value_(default_value) {} T Get() const { return value_; } operator T() const { return Get(); } @@ -108,7 +108,7 @@ class FieldTrialParameter : public FieldTrialParameterInterface { template class FieldTrialConstrained : public FieldTrialParameterInterface { public: - FieldTrialConstrained(std::string key, + FieldTrialConstrained(absl::string_view key, T default_value, absl::optional lower_limit, absl::optional upper_limit) @@ -141,7 +141,7 @@ class FieldTrialConstrained : public FieldTrialParameterInterface { class AbstractFieldTrialEnum : public FieldTrialParameterInterface { public: - AbstractFieldTrialEnum(std::string key, + AbstractFieldTrialEnum(absl::string_view key, int default_value, std::map mapping); ~AbstractFieldTrialEnum() override; @@ -162,7 +162,7 @@ class AbstractFieldTrialEnum : public FieldTrialParameterInterface { template class FieldTrialEnum : public AbstractFieldTrialEnum { public: - FieldTrialEnum(std::string key, + FieldTrialEnum(absl::string_view key, T default_value, std::map mapping) : AbstractFieldTrialEnum(key, @@ -185,9 +185,9 @@ class FieldTrialEnum : public AbstractFieldTrialEnum { template class FieldTrialOptional : public FieldTrialParameterInterface { public: - explicit FieldTrialOptional(std::string key) + explicit FieldTrialOptional(absl::string_view key) : FieldTrialParameterInterface(key) {} - FieldTrialOptional(std::string key, absl::optional default_value) + FieldTrialOptional(absl::string_view key, absl::optional default_value) : FieldTrialParameterInterface(key), value_(default_value) {} absl::optional GetOptional() const { return value_; } const T& Value() const { return value_.value(); } @@ -217,10 +217,10 @@ class FieldTrialOptional : public FieldTrialParameterInterface { // explicit value is provided, the flag evaluates to true. class FieldTrialFlag : public FieldTrialParameterInterface { public: - explicit FieldTrialFlag(std::string key); - FieldTrialFlag(std::string key, bool default_value); + explicit FieldTrialFlag(absl::string_view key); + FieldTrialFlag(absl::string_view key, bool default_value); bool Get() const; - operator bool() const; + explicit operator bool() const; protected: bool Parse(absl::optional str_value) override; @@ -230,7 +230,8 @@ class FieldTrialFlag : public FieldTrialParameterInterface { }; template -absl::optional> ParseOptionalParameter(std::string str) { +absl::optional> ParseOptionalParameter( + absl::string_view str) { if (str.empty()) return absl::optional(); auto parsed = ParseTypedParameter(str); @@ -240,28 +241,29 @@ absl::optional> ParseOptionalParameter(std::string str) { } template <> -absl::optional ParseTypedParameter(std::string str); +absl::optional ParseTypedParameter(absl::string_view str); template <> -absl::optional ParseTypedParameter(std::string str); +absl::optional ParseTypedParameter(absl::string_view str); template <> -absl::optional ParseTypedParameter(std::string str); +absl::optional ParseTypedParameter(absl::string_view str); template <> -absl::optional ParseTypedParameter(std::string str); +absl::optional ParseTypedParameter(absl::string_view str); template <> -absl::optional ParseTypedParameter(std::string str); +absl::optional ParseTypedParameter( + absl::string_view str); template <> absl::optional> ParseTypedParameter>( - std::string str); + absl::string_view str); template <> absl::optional> ParseTypedParameter>( - std::string str); + absl::string_view str); template <> absl::optional> -ParseTypedParameter>(std::string str); +ParseTypedParameter>(absl::string_view str); template <> absl::optional> -ParseTypedParameter>(std::string str); +ParseTypedParameter>(absl::string_view str); // Accepts true, false, else parsed with sscanf %i, true if != 0. extern template class FieldTrialParameter; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/field_trial_units.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/field_trial_units.cc index 5aceab76a0..92af46a9e3 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/field_trial_units.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/field_trial_units.cc @@ -14,6 +14,7 @@ #include #include +#include "absl/strings/string_view.h" #include "absl/types/optional.h" // Large enough to fit "seconds", the longest supported unit name. @@ -28,7 +29,7 @@ struct ValueWithUnit { std::string unit; }; -absl::optional ParseValueWithUnit(std::string str) { +absl::optional ParseValueWithUnit(absl::string_view str) { if (str == "inf") { return ValueWithUnit{std::numeric_limits::infinity(), ""}; } else if (str == "-inf") { @@ -37,8 +38,8 @@ absl::optional ParseValueWithUnit(std::string str) { double double_val; char unit_char[RTC_TRIAL_UNIT_SIZE]; unit_char[0] = 0; - if (sscanf(str.c_str(), "%lf%" RTC_TRIAL_UNIT_LENGTH_STR "s", &double_val, - unit_char) >= 1) { + if (sscanf(std::string(str).c_str(), "%lf%" RTC_TRIAL_UNIT_LENGTH_STR "s", + &double_val, unit_char) >= 1) { return ValueWithUnit{double_val, unit_char}; } } @@ -47,7 +48,7 @@ absl::optional ParseValueWithUnit(std::string str) { } // namespace template <> -absl::optional ParseTypedParameter(std::string str) { +absl::optional ParseTypedParameter(absl::string_view str) { absl::optional result = ParseValueWithUnit(str); if (result) { if (result->unit.empty() || result->unit == "kbps") { @@ -60,7 +61,7 @@ absl::optional ParseTypedParameter(std::string str) { } template <> -absl::optional ParseTypedParameter(std::string str) { +absl::optional ParseTypedParameter(absl::string_view str) { absl::optional result = ParseValueWithUnit(str); if (result) { if (result->unit.empty() || result->unit == "bytes") @@ -70,7 +71,8 @@ absl::optional ParseTypedParameter(std::string str) { } template <> -absl::optional ParseTypedParameter(std::string str) { +absl::optional ParseTypedParameter( + absl::string_view str) { absl::optional result = ParseValueWithUnit(str); if (result) { if (result->unit == "s" || result->unit == "seconds") { @@ -86,17 +88,17 @@ absl::optional ParseTypedParameter(std::string str) { template <> absl::optional> -ParseTypedParameter>(std::string str) { +ParseTypedParameter>(absl::string_view str) { return ParseOptionalParameter(str); } template <> absl::optional> -ParseTypedParameter>(std::string str) { +ParseTypedParameter>(absl::string_view str) { return ParseOptionalParameter(str); } template <> absl::optional> -ParseTypedParameter>(std::string str) { +ParseTypedParameter>(absl::string_view str) { return ParseOptionalParameter(str); } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/field_trial_units.h b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/field_trial_units.h index d85b2f04ba..408367c031 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/field_trial_units.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/field_trial_units.h @@ -10,6 +10,7 @@ #ifndef RTC_BASE_EXPERIMENTS_FIELD_TRIAL_UNITS_H_ #define RTC_BASE_EXPERIMENTS_FIELD_TRIAL_UNITS_H_ +#include "absl/strings/string_view.h" #include "api/units/data_rate.h" #include "api/units/data_size.h" #include "api/units/time_delta.h" @@ -18,11 +19,11 @@ namespace webrtc { template <> -absl::optional ParseTypedParameter(std::string str); +absl::optional ParseTypedParameter(absl::string_view str); template <> -absl::optional ParseTypedParameter(std::string str); +absl::optional ParseTypedParameter(absl::string_view str); template <> -absl::optional ParseTypedParameter(std::string str); +absl::optional ParseTypedParameter(absl::string_view str); extern template class FieldTrialParameter; extern template class FieldTrialParameter; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/jitter_upper_bound_experiment.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/jitter_upper_bound_experiment.cc deleted file mode 100644 index ea95e84d15..0000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/jitter_upper_bound_experiment.cc +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "rtc_base/experiments/jitter_upper_bound_experiment.h" - -#include - -#include - -#include "rtc_base/logging.h" -#include "system_wrappers/include/field_trial.h" - -namespace webrtc { - -const char JitterUpperBoundExperiment::kJitterUpperBoundExperimentName[] = - "WebRTC-JitterUpperBound"; - -absl::optional JitterUpperBoundExperiment::GetUpperBoundSigmas() { - if (!field_trial::IsEnabled(kJitterUpperBoundExperimentName)) { - return absl::nullopt; - } - const std::string group = - webrtc::field_trial::FindFullName(kJitterUpperBoundExperimentName); - - double upper_bound_sigmas; - if (sscanf(group.c_str(), "Enabled-%lf", &upper_bound_sigmas) != 1) { - RTC_LOG(LS_WARNING) << "Invalid number of parameters provided."; - return absl::nullopt; - } - - if (upper_bound_sigmas < 0) { - RTC_LOG(LS_WARNING) << "Invalid jitter upper bound sigmas, must be >= 0.0: " - << upper_bound_sigmas; - return absl::nullopt; - } - - return upper_bound_sigmas; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/jitter_upper_bound_experiment.h b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/jitter_upper_bound_experiment.h deleted file mode 100644 index 262cd79efa..0000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/jitter_upper_bound_experiment.h +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_BASE_EXPERIMENTS_JITTER_UPPER_BOUND_EXPERIMENT_H_ -#define RTC_BASE_EXPERIMENTS_JITTER_UPPER_BOUND_EXPERIMENT_H_ - -#include "absl/types/optional.h" - -namespace webrtc { - -class JitterUpperBoundExperiment { - public: - // Returns nullopt if experiment is not on, otherwise returns the configured - // upper bound for frame delay delta used in jitter estimation, expressed as - // number of standard deviations of the current deviation from the expected - // delay. - static absl::optional GetUpperBoundSigmas(); - - static const char kJitterUpperBoundExperimentName[]; -}; - -} // namespace webrtc - -#endif // RTC_BASE_EXPERIMENTS_JITTER_UPPER_BOUND_EXPERIMENT_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/keyframe_interval_settings.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/keyframe_interval_settings.cc index 76c85cbbad..413e2a91d5 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/keyframe_interval_settings.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/keyframe_interval_settings.cc @@ -21,7 +21,7 @@ constexpr char kFieldTrialName[] = "WebRTC-KeyframeInterval"; } // namespace KeyframeIntervalSettings::KeyframeIntervalSettings( - const WebRtcKeyValueConfig* const key_value_config) + const FieldTrialsView* const key_value_config) : min_keyframe_send_interval_ms_("min_keyframe_send_interval_ms") { ParseFieldTrial({&min_keyframe_send_interval_ms_}, key_value_config->Lookup(kFieldTrialName)); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/keyframe_interval_settings.h b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/keyframe_interval_settings.h index 3f253f0022..aff7854516 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/keyframe_interval_settings.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/keyframe_interval_settings.h @@ -12,7 +12,7 @@ #define RTC_BASE_EXPERIMENTS_KEYFRAME_INTERVAL_SETTINGS_H_ #include "absl/types/optional.h" -#include "api/transport/webrtc_key_value_config.h" +#include "api/field_trials_view.h" #include "rtc_base/experiments/field_trial_parser.h" namespace webrtc { @@ -29,8 +29,7 @@ class KeyframeIntervalSettings final { absl::optional MinKeyframeSendIntervalMs() const; private: - explicit KeyframeIntervalSettings( - const WebRtcKeyValueConfig* key_value_config); + explicit KeyframeIntervalSettings(const FieldTrialsView* key_value_config); FieldTrialOptional min_keyframe_send_interval_ms_; }; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/quality_rampup_experiment.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/quality_rampup_experiment.cc index 35c83f7011..509ba91dc3 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/quality_rampup_experiment.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/quality_rampup_experiment.cc @@ -18,7 +18,7 @@ namespace webrtc { QualityRampupExperiment::QualityRampupExperiment( - const WebRtcKeyValueConfig* const key_value_config) + const FieldTrialsView* const key_value_config) : min_pixels_("min_pixels"), min_duration_ms_("min_duration_ms"), max_bitrate_factor_("max_bitrate_factor") { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/quality_rampup_experiment.h b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/quality_rampup_experiment.h index 719b1893f6..e8048a3c1c 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/quality_rampup_experiment.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/quality_rampup_experiment.h @@ -12,7 +12,7 @@ #define RTC_BASE_EXPERIMENTS_QUALITY_RAMPUP_EXPERIMENT_H_ #include "absl/types/optional.h" -#include "api/transport/webrtc_key_value_config.h" +#include "api/field_trials_view.h" #include "rtc_base/experiments/field_trial_parser.h" namespace webrtc { @@ -38,7 +38,7 @@ class QualityRampupExperiment final { private: explicit QualityRampupExperiment( - const WebRtcKeyValueConfig* const key_value_config); + const FieldTrialsView* const key_value_config); FieldTrialOptional min_pixels_; FieldTrialOptional min_duration_ms_; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/quality_scaler_settings.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/quality_scaler_settings.cc index d2443b05ce..85c99255ab 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/quality_scaler_settings.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/quality_scaler_settings.cc @@ -20,7 +20,7 @@ const double kMinScaleFactor = 0.01; } // namespace QualityScalerSettings::QualityScalerSettings( - const WebRtcKeyValueConfig* const key_value_config) + const FieldTrialsView* const key_value_config) : sampling_period_ms_("sampling_period_ms"), average_qp_window_("average_qp_window"), min_frames_("min_frames"), diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/quality_scaler_settings.h b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/quality_scaler_settings.h index b4b6a427a0..99827aac6b 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/quality_scaler_settings.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/quality_scaler_settings.h @@ -12,7 +12,7 @@ #define RTC_BASE_EXPERIMENTS_QUALITY_SCALER_SETTINGS_H_ #include "absl/types/optional.h" -#include "api/transport/webrtc_key_value_config.h" +#include "api/field_trials_view.h" #include "rtc_base/experiments/field_trial_parser.h" namespace webrtc { @@ -30,8 +30,7 @@ class QualityScalerSettings final { absl::optional InitialBitrateFactor() const; private: - explicit QualityScalerSettings( - const WebRtcKeyValueConfig* const key_value_config); + explicit QualityScalerSettings(const FieldTrialsView* const key_value_config); FieldTrialOptional sampling_period_ms_; FieldTrialOptional average_qp_window_; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/rate_control_settings.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/rate_control_settings.cc index bed194e683..ea5f90ab39 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/rate_control_settings.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/rate_control_settings.cc @@ -34,27 +34,11 @@ const char kCongestionWindowDefaultFieldTrialString[] = const char kUseBaseHeavyVp8Tl3RateAllocationFieldTrialName[] = "WebRTC-UseBaseHeavyVP8TL3RateAllocation"; -const char* kVideoHysteresisFieldTrialname = - "WebRTC-SimulcastUpswitchHysteresisPercent"; -const char* kScreenshareHysteresisFieldTrialname = - "WebRTC-SimulcastScreenshareUpswitchHysteresisPercent"; - -bool IsEnabled(const WebRtcKeyValueConfig* const key_value_config, +bool IsEnabled(const FieldTrialsView* const key_value_config, absl::string_view key) { return absl::StartsWith(key_value_config->Lookup(key), "Enabled"); } -void ParseHysteresisFactor(const WebRtcKeyValueConfig* const key_value_config, - absl::string_view key, - double* output_value) { - std::string group_name = key_value_config->Lookup(key); - int percent = 0; - if (!group_name.empty() && sscanf(group_name.c_str(), "%d", &percent) == 1 && - percent >= 0) { - *output_value = 1.0 + (percent / 100.0); - } -} - } // namespace constexpr char CongestionWindowConfig::kKey[]; @@ -78,23 +62,21 @@ constexpr char VideoRateControlConfig::kKey[]; std::unique_ptr VideoRateControlConfig::Parser() { // The empty comments ensures that each pair is on a separate line. return StructParametersParser::Create( - "pacing_factor", &pacing_factor, // - "alr_probing", &alr_probing, // - "vp8_qp_max", &vp8_qp_max, // - "vp8_min_pixels", &vp8_min_pixels, // - "trust_vp8", &trust_vp8, // - "trust_vp9", &trust_vp9, // - "video_hysteresis", &video_hysteresis, // - "screenshare_hysteresis", &screenshare_hysteresis, // - "probe_max_allocation", &probe_max_allocation, // - "bitrate_adjuster", &bitrate_adjuster, // - "adjuster_use_headroom", &adjuster_use_headroom, // - "vp8_s0_boost", &vp8_s0_boost, // + "pacing_factor", &pacing_factor, // + "alr_probing", &alr_probing, // + "vp8_qp_max", &vp8_qp_max, // + "vp8_min_pixels", &vp8_min_pixels, // + "trust_vp8", &trust_vp8, // + "trust_vp9", &trust_vp9, // + "probe_max_allocation", &probe_max_allocation, // + "bitrate_adjuster", &bitrate_adjuster, // + "adjuster_use_headroom", &adjuster_use_headroom, // + "vp8_s0_boost", &vp8_s0_boost, // "vp8_base_heavy_tl3_alloc", &vp8_base_heavy_tl3_alloc); } RateControlSettings::RateControlSettings( - const WebRtcKeyValueConfig* const key_value_config) { + const FieldTrialsView* const key_value_config) { std::string congestion_window_config = key_value_config->Lookup(CongestionWindowConfig::kKey).empty() ? kCongestionWindowDefaultFieldTrialString @@ -103,10 +85,6 @@ RateControlSettings::RateControlSettings( CongestionWindowConfig::Parse(congestion_window_config); video_config_.vp8_base_heavy_tl3_alloc = IsEnabled( key_value_config, kUseBaseHeavyVp8Tl3RateAllocationFieldTrialName); - ParseHysteresisFactor(key_value_config, kVideoHysteresisFieldTrialname, - &video_config_.video_hysteresis); - ParseHysteresisFactor(key_value_config, kScreenshareHysteresisFieldTrialname, - &video_config_.screenshare_hysteresis); video_config_.Parser()->Parse( key_value_config->Lookup(VideoRateControlConfig::kKey)); } @@ -120,7 +98,7 @@ RateControlSettings RateControlSettings::ParseFromFieldTrials() { } RateControlSettings RateControlSettings::ParseFromKeyValueConfig( - const WebRtcKeyValueConfig* const key_value_config) { + const FieldTrialsView* const key_value_config) { FieldTrialBasedConfig field_trial_config; return RateControlSettings(key_value_config ? key_value_config : &field_trial_config); @@ -191,22 +169,6 @@ bool RateControlSettings::LibvpxVp9TrustedRateController() const { return video_config_.trust_vp9; } -double RateControlSettings::GetSimulcastHysteresisFactor( - VideoCodecMode mode) const { - if (mode == VideoCodecMode::kScreensharing) { - return video_config_.screenshare_hysteresis; - } - return video_config_.video_hysteresis; -} - -double RateControlSettings::GetSimulcastHysteresisFactor( - VideoEncoderConfig::ContentType content_type) const { - if (content_type == VideoEncoderConfig::ContentType::kScreen) { - return video_config_.screenshare_hysteresis; - } - return video_config_.video_hysteresis; -} - bool RateControlSettings::Vp8BaseHeavyTl3RateAllocation() const { return video_config_.vp8_base_heavy_tl3_alloc; } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/rate_control_settings.h b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/rate_control_settings.h index 1c38e927dc..6aff70a686 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/rate_control_settings.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/rate_control_settings.h @@ -12,11 +12,11 @@ #define RTC_BASE_EXPERIMENTS_RATE_CONTROL_SETTINGS_H_ #include "absl/types/optional.h" -#include "api/transport/webrtc_key_value_config.h" +#include "api/field_trials_view.h" #include "api/units/data_size.h" #include "api/video_codecs/video_codec.h" -#include "api/video_codecs/video_encoder_config.h" #include "rtc_base/experiments/struct_parameters_parser.h" +#include "video/config/video_encoder_config.h" namespace webrtc { @@ -38,9 +38,6 @@ struct VideoRateControlConfig { absl::optional vp8_min_pixels; bool trust_vp8 = true; bool trust_vp9 = true; - double video_hysteresis = 1.2; - // Default to 35% hysteresis for simulcast screenshare. - double screenshare_hysteresis = 1.35; bool probe_max_allocation = true; bool bitrate_adjuster = true; bool adjuster_use_headroom = true; @@ -57,7 +54,7 @@ class RateControlSettings final { static RateControlSettings ParseFromFieldTrials(); static RateControlSettings ParseFromKeyValueConfig( - const WebRtcKeyValueConfig* const key_value_config); + const FieldTrialsView* const key_value_config); // When CongestionWindowPushback is enabled, the pacer is oblivious to // the congestion window. The relation between outstanding data and @@ -80,12 +77,6 @@ class RateControlSettings final { bool LibvpxVp9TrustedRateController() const; bool Vp9DynamicRateSettings() const; - // TODO(bugs.webrtc.org/10272): Remove one of these when we have merged - // VideoCodecMode and VideoEncoderConfig::ContentType. - double GetSimulcastHysteresisFactor(VideoCodecMode mode) const; - double GetSimulcastHysteresisFactor( - VideoEncoderConfig::ContentType content_type) const; - bool Vp8BaseHeavyTl3RateAllocation() const; bool TriggerProbeOnMaxAllocatedBitrateChange() const; @@ -93,8 +84,7 @@ class RateControlSettings final { bool BitrateAdjusterCanUseNetworkHeadroom() const; private: - explicit RateControlSettings( - const WebRtcKeyValueConfig* const key_value_config); + explicit RateControlSettings(const FieldTrialsView* const key_value_config); CongestionWindowConfig congestion_window_config_; VideoRateControlConfig video_config_; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/stable_target_rate_experiment.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/stable_target_rate_experiment.cc index fa7a97b51f..fa04fa35b4 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/stable_target_rate_experiment.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/stable_target_rate_experiment.cc @@ -11,7 +11,6 @@ #include "rtc_base/experiments/stable_target_rate_experiment.h" #include "api/transport/field_trial_based_config.h" -#include "rtc_base/experiments/rate_control_settings.h" namespace webrtc { namespace { @@ -19,7 +18,7 @@ constexpr char kFieldTrialName[] = "WebRTC-StableTargetRate"; } // namespace StableTargetRateExperiment::StableTargetRateExperiment( - const WebRtcKeyValueConfig* const key_value_config, + const FieldTrialsView* const key_value_config, double default_video_hysteresis, double default_screenshare_hysteresis) : enabled_("enabled", false), @@ -43,14 +42,10 @@ StableTargetRateExperiment StableTargetRateExperiment::ParseFromFieldTrials() { } StableTargetRateExperiment StableTargetRateExperiment::ParseFromKeyValueConfig( - const WebRtcKeyValueConfig* const key_value_config) { - RateControlSettings rate_control = - RateControlSettings::ParseFromKeyValueConfig(key_value_config); - return StableTargetRateExperiment( - key_value_config, - rate_control.GetSimulcastHysteresisFactor(VideoCodecMode::kRealtimeVideo), - rate_control.GetSimulcastHysteresisFactor( - VideoCodecMode::kScreensharing)); + const FieldTrialsView* const key_value_config) { + return StableTargetRateExperiment(key_value_config, + /*default_video_hysteresis=*/1.2, + /*default_screenshare_hysteresis=*/1.35); } bool StableTargetRateExperiment::IsEnabled() const { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/stable_target_rate_experiment.h b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/stable_target_rate_experiment.h index 299299ce87..be0f9da129 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/stable_target_rate_experiment.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/stable_target_rate_experiment.h @@ -11,7 +11,7 @@ #ifndef RTC_BASE_EXPERIMENTS_STABLE_TARGET_RATE_EXPERIMENT_H_ #define RTC_BASE_EXPERIMENTS_STABLE_TARGET_RATE_EXPERIMENT_H_ -#include "api/transport/webrtc_key_value_config.h" +#include "api/field_trials_view.h" #include "rtc_base/experiments/field_trial_parser.h" namespace webrtc { @@ -22,7 +22,7 @@ class StableTargetRateExperiment { StableTargetRateExperiment(StableTargetRateExperiment&&); static StableTargetRateExperiment ParseFromFieldTrials(); static StableTargetRateExperiment ParseFromKeyValueConfig( - const WebRtcKeyValueConfig* const key_value_config); + const FieldTrialsView* const key_value_config); bool IsEnabled() const; double GetVideoHysteresisFactor() const; @@ -30,7 +30,7 @@ class StableTargetRateExperiment { private: explicit StableTargetRateExperiment( - const WebRtcKeyValueConfig* const key_value_config, + const FieldTrialsView* const key_value_config, double default_video_hysteresis, double default_screenshare_hysteresis); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/struct_parameters_parser.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/struct_parameters_parser.cc index d62eb6f1ea..011df3eaba 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/struct_parameters_parser.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/struct_parameters_parser.cc @@ -11,13 +11,14 @@ #include +#include "absl/strings/string_view.h" #include "rtc_base/logging.h" namespace webrtc { namespace { size_t FindOrEnd(absl::string_view str, size_t start, char delimiter) { size_t pos = str.find(delimiter, start); - pos = (pos == std::string::npos) ? str.length() : pos; + pos = (pos == absl::string_view::npos) ? str.length() : pos; return pos; } } // namespace diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/struct_parameters_parser.h b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/struct_parameters_parser.h index 523ecfb05d..f5f8340209 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/struct_parameters_parser.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/struct_parameters_parser.h @@ -28,7 +28,7 @@ namespace webrtc { namespace struct_parser_impl { struct TypedMemberParser { public: - bool (*parse)(const absl::string_view src, void* target); + bool (*parse)(absl::string_view src, void* target); void (*encode)(const void* src, std::string* target); }; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/fake_mdns_responder.h b/TMessagesProj/jni/voip/webrtc/rtc_base/fake_mdns_responder.h index 1f87cf4b81..706c11b913 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/fake_mdns_responder.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/fake_mdns_responder.h @@ -15,10 +15,9 @@ #include #include +#include "absl/strings/string_view.h" #include "rtc_base/ip_address.h" -#include "rtc_base/location.h" #include "rtc_base/mdns_responder_interface.h" -#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/thread.h" namespace webrtc { @@ -40,8 +39,7 @@ class FakeMdnsResponder : public MdnsResponderInterface { name = std::to_string(next_available_id_++) + ".local"; addr_name_map_[addr] = name; } - thread_->PostTask( - ToQueuedTask([callback, addr, name]() { callback(addr, name); })); + thread_->PostTask([callback, addr, name]() { callback(addr, name); }); } void RemoveNameForAddress(const rtc::IPAddress& addr, NameRemovedCallback callback) override { @@ -50,10 +48,10 @@ class FakeMdnsResponder : public MdnsResponderInterface { addr_name_map_.erase(it); } bool result = it != addr_name_map_.end(); - thread_->PostTask(ToQueuedTask([callback, result]() { callback(result); })); + thread_->PostTask([callback, result]() { callback(result); }); } - rtc::IPAddress GetMappedAddressForName(const std::string& name) const { + rtc::IPAddress GetMappedAddressForName(absl::string_view name) const { for (const auto& addr_name_pair : addr_name_map_) { if (addr_name_pair.second == name) { return addr_name_pair.first; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/fake_network.h b/TMessagesProj/jni/voip/webrtc/rtc_base/fake_network.h index 53664cb8f8..d78d8a7730 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/fake_network.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/fake_network.h @@ -17,9 +17,7 @@ #include #include "absl/memory/memory.h" -#include "rtc_base/checks.h" #include "rtc_base/mdns_responder_interface.h" -#include "rtc_base/message_handler.h" #include "rtc_base/network.h" #include "rtc_base/socket_address.h" #include "rtc_base/string_encode.h" @@ -31,8 +29,7 @@ const int kFakeIPv4NetworkPrefixLength = 24; const int kFakeIPv6NetworkPrefixLength = 64; // Fake network manager that allows us to manually specify the IPs to use. -class FakeNetworkManager : public NetworkManagerBase, - public MessageHandlerAutoCleanup { +class FakeNetworkManager : public NetworkManagerBase { public: FakeNetworkManager() {} @@ -48,13 +45,13 @@ class FakeNetworkManager : public NetworkManagerBase, AddInterface(iface, "test" + rtc::ToString(next_index_++)); } - void AddInterface(const SocketAddress& iface, const std::string& if_name) { + void AddInterface(const SocketAddress& iface, absl::string_view if_name) { AddInterface(iface, if_name, ADAPTER_TYPE_UNKNOWN); } void AddInterface( const SocketAddress& iface, - const std::string& if_name, + absl::string_view if_name, AdapterType type, absl::optional underlying_vpn_adapter_type = absl::nullopt) { SocketAddress address(if_name, 0); @@ -77,28 +74,14 @@ class FakeNetworkManager : public NetworkManagerBase, ++start_count_; if (start_count_ == 1) { sent_first_update_ = false; - rtc::Thread::Current()->Post(RTC_FROM_HERE, this, kUpdateNetworksMessage); - } else { - if (sent_first_update_) { - rtc::Thread::Current()->Post(RTC_FROM_HERE, this, - kSignalNetworksMessage); - } + Thread::Current()->PostTask([this] { DoUpdateNetworks(); }); + } else if (sent_first_update_) { + Thread::Current()->PostTask([this] { SignalNetworksChanged(); }); } } void StopUpdating() override { --start_count_; } - // MessageHandler interface. - void OnMessage(Message* msg) override { - if (msg->message_id == kUpdateNetworksMessage) { - DoUpdateNetworks(); - } else if (msg->message_id == kSignalNetworksMessage) { - SignalNetworksChanged(); - } else { - RTC_CHECK(false); - } - } - using NetworkManagerBase::set_default_local_addresses; using NetworkManagerBase::set_enumeration_permission; @@ -116,7 +99,7 @@ class FakeNetworkManager : public NetworkManagerBase, void DoUpdateNetworks() { if (start_count_ == 0) return; - std::vector networks; + std::vector> networks; for (IfaceList::iterator it = ifaces_.begin(); it != ifaces_.end(); ++it) { int prefix_length = 0; if (it->socket_address.ipaddr().family() == AF_INET) { @@ -125,18 +108,18 @@ class FakeNetworkManager : public NetworkManagerBase, prefix_length = kFakeIPv6NetworkPrefixLength; } IPAddress prefix = TruncateIP(it->socket_address.ipaddr(), prefix_length); - std::unique_ptr net(new Network( + auto net = std::make_unique( it->socket_address.hostname(), it->socket_address.hostname(), prefix, - prefix_length, it->adapter_type)); + prefix_length, it->adapter_type, /*field_trials=*/nullptr); if (it->underlying_vpn_adapter_type.has_value()) { net->set_underlying_type_for_vpn(*it->underlying_vpn_adapter_type); } net->set_default_local_address_provider(this); net->AddIP(it->socket_address.ipaddr()); - networks.push_back(net.release()); + networks.push_back(std::move(net)); } bool changed; - MergeNetworkList(networks, &changed); + MergeNetworkList(std::move(networks), &changed); if (changed || !sent_first_update_) { SignalNetworksChanged(); sent_first_update_ = true; @@ -148,9 +131,6 @@ class FakeNetworkManager : public NetworkManagerBase, int start_count_ = 0; bool sent_first_update_ = false; - static constexpr uint32_t kUpdateNetworksMessage = 1; - static constexpr uint32_t kSignalNetworksMessage = 2; - std::unique_ptr mdns_responder_; }; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/fake_ssl_identity.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/fake_ssl_identity.cc index 87ede73985..73c843a2e7 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/fake_ssl_identity.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/fake_ssl_identity.cc @@ -14,12 +14,13 @@ #include #include +#include "absl/strings/string_view.h" #include "rtc_base/checks.h" #include "rtc_base/message_digest.h" namespace rtc { -FakeSSLCertificate::FakeSSLCertificate(const std::string& pem_string) +FakeSSLCertificate::FakeSSLCertificate(absl::string_view pem_string) : pem_string_(pem_string), digest_algorithm_(DIGEST_SHA_1), expiration_time_(-1) {} @@ -51,8 +52,8 @@ void FakeSSLCertificate::SetCertificateExpirationTime(int64_t expiration_time) { expiration_time_ = expiration_time; } -void FakeSSLCertificate::set_digest_algorithm(const std::string& algorithm) { - digest_algorithm_ = algorithm; +void FakeSSLCertificate::set_digest_algorithm(absl::string_view algorithm) { + digest_algorithm_ = std::string(algorithm); } bool FakeSSLCertificate::GetSignatureDigestAlgorithm( @@ -61,7 +62,7 @@ bool FakeSSLCertificate::GetSignatureDigestAlgorithm( return true; } -bool FakeSSLCertificate::ComputeDigest(const std::string& algorithm, +bool FakeSSLCertificate::ComputeDigest(absl::string_view algorithm, unsigned char* digest, size_t size, size_t* length) const { @@ -70,7 +71,7 @@ bool FakeSSLCertificate::ComputeDigest(const std::string& algorithm, return (*length != 0); } -FakeSSLIdentity::FakeSSLIdentity(const std::string& pem_string) +FakeSSLIdentity::FakeSSLIdentity(absl::string_view pem_string) : FakeSSLIdentity(FakeSSLCertificate(pem_string)) {} FakeSSLIdentity::FakeSSLIdentity(const std::vector& pem_strings) { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/fake_ssl_identity.h b/TMessagesProj/jni/voip/webrtc/rtc_base/fake_ssl_identity.h index 512baba9fb..2b4ae2e57a 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/fake_ssl_identity.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/fake_ssl_identity.h @@ -14,6 +14,7 @@ #include #include +#include "absl/strings/string_view.h" #include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_identity.h" @@ -23,7 +24,7 @@ class FakeSSLCertificate : public SSLCertificate { public: // SHA-1 is the default digest algorithm because it is available in all build // configurations used for unit testing. - explicit FakeSSLCertificate(const std::string& pem_string); + explicit FakeSSLCertificate(absl::string_view pem_string); FakeSSLCertificate(const FakeSSLCertificate&); ~FakeSSLCertificate() override; @@ -34,14 +35,14 @@ class FakeSSLCertificate : public SSLCertificate { void ToDER(Buffer* der_buffer) const override; int64_t CertificateExpirationTime() const override; bool GetSignatureDigestAlgorithm(std::string* algorithm) const override; - bool ComputeDigest(const std::string& algorithm, + bool ComputeDigest(absl::string_view algorithm, unsigned char* digest, size_t size, size_t* length) const override; void SetCertificateExpirationTime(int64_t expiration_time); - void set_digest_algorithm(const std::string& algorithm); + void set_digest_algorithm(absl::string_view algorithm); private: std::string pem_string_; @@ -52,7 +53,7 @@ class FakeSSLCertificate : public SSLCertificate { class FakeSSLIdentity : public SSLIdentity { public: - explicit FakeSSLIdentity(const std::string& pem_string); + explicit FakeSSLIdentity(absl::string_view pem_string); // For a certificate chain. explicit FakeSSLIdentity(const std::vector& pem_strings); explicit FakeSSLIdentity(const FakeSSLCertificate& cert); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/file_rotating_stream.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/file_rotating_stream.cc index 5a004a937b..c56396f157 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/file_rotating_stream.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/file_rotating_stream.cc @@ -14,6 +14,8 @@ #include #include +#include "absl/strings/string_view.h" + #if defined(WEBRTC_WIN) #include @@ -29,6 +31,7 @@ #include "absl/types/optional.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/strings/string_builder.h" // Note: We use fprintf for logging in the write paths of this stream to avoid // infinite loops when logging. @@ -39,54 +42,58 @@ namespace { const char kCallSessionLogPrefix[] = "webrtc_log"; -std::string AddTrailingPathDelimiterIfNeeded(std::string directory); +std::string AddTrailingPathDelimiterIfNeeded(absl::string_view directory); // `dir` must have a trailing delimiter. `prefix` must not include wild card // characters. -std::vector GetFilesWithPrefix(const std::string& directory, - const std::string& prefix); -bool DeleteFile(const std::string& file); -bool MoveFile(const std::string& old_file, const std::string& new_file); -bool IsFile(const std::string& file); -bool IsFolder(const std::string& file); -absl::optional GetFileSize(const std::string& file); +std::vector GetFilesWithPrefix(absl::string_view directory, + absl::string_view prefix); +bool DeleteFile(absl::string_view file); +bool MoveFile(absl::string_view old_file, absl::string_view new_file); +bool IsFile(absl::string_view file); +bool IsFolder(absl::string_view file); +absl::optional GetFileSize(absl::string_view file); #if defined(WEBRTC_WIN) -std::string AddTrailingPathDelimiterIfNeeded(std::string directory) { +std::string AddTrailingPathDelimiterIfNeeded(absl::string_view directory) { if (absl::EndsWith(directory, "\\")) { - return directory; + return std::string(directory); } - return directory + "\\"; + return std::string(directory) + "\\"; } -std::vector GetFilesWithPrefix(const std::string& directory, - const std::string& prefix) { +std::vector GetFilesWithPrefix(absl::string_view directory, + absl::string_view prefix) { RTC_DCHECK(absl::EndsWith(directory, "\\")); WIN32_FIND_DATAW data; HANDLE handle; - handle = ::FindFirstFileW(ToUtf16(directory + prefix + '*').c_str(), &data); + StringBuilder pattern_builder{directory}; + pattern_builder << prefix << "*"; + handle = ::FindFirstFileW(ToUtf16(pattern_builder.str()).c_str(), &data); if (handle == INVALID_HANDLE_VALUE) return {}; std::vector file_list; do { - file_list.emplace_back(directory + ToUtf8(data.cFileName)); + StringBuilder file_builder{directory}; + file_builder << ToUtf8(data.cFileName); + file_list.emplace_back(file_builder.Release()); } while (::FindNextFileW(handle, &data) == TRUE); ::FindClose(handle); return file_list; } -bool DeleteFile(const std::string& file) { +bool DeleteFile(absl::string_view file) { return ::DeleteFileW(ToUtf16(file).c_str()) != 0; } -bool MoveFile(const std::string& old_file, const std::string& new_file) { +bool MoveFile(absl::string_view old_file, absl::string_view new_file) { return ::MoveFileW(ToUtf16(old_file).c_str(), ToUtf16(new_file).c_str()) != 0; } -bool IsFile(const std::string& file) { +bool IsFile(absl::string_view file) { WIN32_FILE_ATTRIBUTE_DATA data = {0}; if (0 == ::GetFileAttributesExW(ToUtf16(file).c_str(), GetFileExInfoStandard, &data)) @@ -94,7 +101,7 @@ bool IsFile(const std::string& file) { return (data.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY) == 0; } -bool IsFolder(const std::string& file) { +bool IsFolder(absl::string_view file) { WIN32_FILE_ATTRIBUTE_DATA data = {0}; if (0 == ::GetFileAttributesExW(ToUtf16(file).c_str(), GetFileExInfoStandard, &data)) @@ -103,7 +110,7 @@ bool IsFolder(const std::string& file) { FILE_ATTRIBUTE_DIRECTORY; } -absl::optional GetFileSize(const std::string& file) { +absl::optional GetFileSize(absl::string_view file) { WIN32_FILE_ATTRIBUTE_DATA data = {0}; if (::GetFileAttributesExW(ToUtf16(file).c_str(), GetFileExInfoStandard, &data) == 0) @@ -113,55 +120,57 @@ absl::optional GetFileSize(const std::string& file) { #else // defined(WEBRTC_WIN) -std::string AddTrailingPathDelimiterIfNeeded(std::string directory) { +std::string AddTrailingPathDelimiterIfNeeded(absl::string_view directory) { if (absl::EndsWith(directory, "/")) { - return directory; + return std::string(directory); } - return directory + "/"; + return std::string(directory) + "/"; } -std::vector GetFilesWithPrefix(const std::string& directory, - const std::string& prefix) { +std::vector GetFilesWithPrefix(absl::string_view directory, + absl::string_view prefix) { RTC_DCHECK(absl::EndsWith(directory, "/")); - DIR* dir = ::opendir(directory.c_str()); + std::string directory_str(directory); + DIR* dir = ::opendir(directory_str.c_str()); if (dir == nullptr) return {}; std::vector file_list; for (struct dirent* dirent = ::readdir(dir); dirent; dirent = ::readdir(dir)) { std::string name = dirent->d_name; - if (name.compare(0, prefix.size(), prefix) == 0) { - file_list.emplace_back(directory + name); + if (name.compare(0, prefix.size(), prefix.data(), prefix.size()) == 0) { + file_list.emplace_back(directory_str + name); } } ::closedir(dir); return file_list; } -bool DeleteFile(const std::string& file) { - return ::unlink(file.c_str()) == 0; +bool DeleteFile(absl::string_view file) { + return ::unlink(std::string(file).c_str()) == 0; } -bool MoveFile(const std::string& old_file, const std::string& new_file) { - return ::rename(old_file.c_str(), new_file.c_str()) == 0; +bool MoveFile(absl::string_view old_file, absl::string_view new_file) { + return ::rename(std::string(old_file).c_str(), + std::string(new_file).c_str()) == 0; } -bool IsFile(const std::string& file) { +bool IsFile(absl::string_view file) { struct stat st; - int res = ::stat(file.c_str(), &st); + int res = ::stat(std::string(file).c_str(), &st); // Treat symlinks, named pipes, etc. all as files. return res == 0 && !S_ISDIR(st.st_mode); } -bool IsFolder(const std::string& file) { +bool IsFolder(absl::string_view file) { struct stat st; - int res = ::stat(file.c_str(), &st); + int res = ::stat(std::string(file).c_str(), &st); return res == 0 && S_ISDIR(st.st_mode); } -absl::optional GetFileSize(const std::string& file) { +absl::optional GetFileSize(absl::string_view file) { struct stat st; - if (::stat(file.c_str(), &st) != 0) + if (::stat(std::string(file).c_str(), &st) != 0) return absl::nullopt; return st.st_size; } @@ -170,8 +179,8 @@ absl::optional GetFileSize(const std::string& file) { } // namespace -FileRotatingStream::FileRotatingStream(const std::string& dir_path, - const std::string& file_prefix, +FileRotatingStream::FileRotatingStream(absl::string_view dir_path, + absl::string_view file_prefix, size_t max_file_size, size_t num_files) : dir_path_(AddTrailingPathDelimiterIfNeeded(dir_path)), @@ -331,7 +340,7 @@ std::string FileRotatingStream::GetFilePath(size_t index, } CallSessionFileRotatingStream::CallSessionFileRotatingStream( - const std::string& dir_path, + absl::string_view dir_path, size_t max_total_log_size) : FileRotatingStream(dir_path, kCallSessionLogPrefix, @@ -376,8 +385,8 @@ size_t CallSessionFileRotatingStream::GetNumRotatingLogFiles( } FileRotatingStreamReader::FileRotatingStreamReader( - const std::string& dir_path, - const std::string& file_prefix) { + absl::string_view dir_path, + absl::string_view file_prefix) { file_names_ = GetFilesWithPrefix(AddTrailingPathDelimiterIfNeeded(dir_path), file_prefix); @@ -413,7 +422,7 @@ size_t FileRotatingStreamReader::ReadAll(void* buffer, size_t size) const { } CallSessionFileRotatingStreamReader::CallSessionFileRotatingStreamReader( - const std::string& dir_path) + absl::string_view dir_path) : FileRotatingStreamReader(dir_path, kCallSessionLogPrefix) {} } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/file_rotating_stream.h b/TMessagesProj/jni/voip/webrtc/rtc_base/file_rotating_stream.h index beb47c83da..6ae2753098 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/file_rotating_stream.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/file_rotating_stream.h @@ -17,7 +17,7 @@ #include #include -#include "rtc_base/constructor_magic.h" +#include "absl/strings/string_view.h" #include "rtc_base/system/file_wrapper.h" namespace rtc { @@ -30,13 +30,16 @@ class FileRotatingStream { public: // Use this constructor for writing to a directory. Files in the directory // matching the prefix will be deleted on open. - FileRotatingStream(const std::string& dir_path, - const std::string& file_prefix, + FileRotatingStream(absl::string_view dir_path, + absl::string_view file_prefix, size_t max_file_size, size_t num_files); virtual ~FileRotatingStream(); + FileRotatingStream(const FileRotatingStream&) = delete; + FileRotatingStream& operator=(const FileRotatingStream&) = delete; + bool IsOpen() const; bool Write(const void* data, size_t data_len); @@ -100,8 +103,6 @@ class FileRotatingStream { // buffering the file size read from disk might not be accurate. size_t current_bytes_written_; bool disable_buffering_; - - RTC_DISALLOW_COPY_AND_ASSIGN(FileRotatingStream); }; // CallSessionFileRotatingStream is meant to be used in situations where we will @@ -126,10 +127,14 @@ class CallSessionFileRotatingStream : public FileRotatingStream { // Use this constructor for writing to a directory. Files in the directory // matching what's used by the stream will be deleted. `max_total_log_size` // must be at least 4. - CallSessionFileRotatingStream(const std::string& dir_path, + CallSessionFileRotatingStream(absl::string_view dir_path, size_t max_total_log_size); ~CallSessionFileRotatingStream() override {} + CallSessionFileRotatingStream(const CallSessionFileRotatingStream&) = delete; + CallSessionFileRotatingStream& operator=( + const CallSessionFileRotatingStream&) = delete; + protected: void OnRotation() override; @@ -140,8 +145,6 @@ class CallSessionFileRotatingStream : public FileRotatingStream { const size_t max_total_log_size_; size_t num_rotations_; - - RTC_DISALLOW_COPY_AND_ASSIGN(CallSessionFileRotatingStream); }; // This is a convenience class, to read all files produced by a @@ -150,8 +153,8 @@ class CallSessionFileRotatingStream : public FileRotatingStream { // directory at construction time. class FileRotatingStreamReader { public: - FileRotatingStreamReader(const std::string& dir_path, - const std::string& file_prefix); + FileRotatingStreamReader(absl::string_view dir_path, + absl::string_view file_prefix); ~FileRotatingStreamReader(); size_t GetSize() const; size_t ReadAll(void* buffer, size_t size) const; @@ -162,7 +165,7 @@ class FileRotatingStreamReader { class CallSessionFileRotatingStreamReader : public FileRotatingStreamReader { public: - CallSessionFileRotatingStreamReader(const std::string& dir_path); + CallSessionFileRotatingStreamReader(absl::string_view dir_path); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/firewall_socket_server.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/firewall_socket_server.cc index edb0cd2398..db88d19a15 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/firewall_socket_server.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/firewall_socket_server.cc @@ -210,8 +210,9 @@ void FirewallSocketServer::SetMessageQueue(Thread* queue) { server_->SetMessageQueue(queue); } -bool FirewallSocketServer::Wait(int cms, bool process_io) { - return server_->Wait(cms, process_io); +bool FirewallSocketServer::Wait(webrtc::TimeDelta max_wait_duration, + bool process_io) { + return server_->Wait(max_wait_duration, process_io); } void FirewallSocketServer::WakeUp() { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/firewall_socket_server.h b/TMessagesProj/jni/voip/webrtc/rtc_base/firewall_socket_server.h index 8a82f885c6..63f9e1ac6c 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/firewall_socket_server.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/firewall_socket_server.h @@ -79,7 +79,7 @@ class FirewallSocketServer : public SocketServer { Socket* CreateSocket(int family, int type) override; void SetMessageQueue(Thread* queue) override; - bool Wait(int cms, bool process_io) override; + bool Wait(webrtc::TimeDelta max_wait_duration, bool process_io) override; void WakeUp() override; Socket* WrapSocket(Socket* sock, int type); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/format_macros.h b/TMessagesProj/jni/voip/webrtc/rtc_base/format_macros.h deleted file mode 100644 index 83240fb501..0000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/format_macros.h +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_BASE_FORMAT_MACROS_H_ -#define RTC_BASE_FORMAT_MACROS_H_ - -// This file defines the format macros for some integer types and is derived -// from Chromium's base/format_macros.h. - -// To print a 64-bit value in a portable way: -// int64_t value; -// printf("xyz:%" PRId64, value); -// The "d" in the macro corresponds to %d; you can also use PRIu64 etc. -// -// To print a size_t value in a portable way: -// size_t size; -// printf("xyz: %" RTC_PRIuS, size); -// The "u" in the macro corresponds to %u, and S is for "size". - -#if defined(WEBRTC_POSIX) - -#if (defined(_INTTYPES_H) || defined(_INTTYPES_H_)) && !defined(PRId64) -#error "inttypes.h has already been included before this header file, but " -#error "without __STDC_FORMAT_MACROS defined." -#endif - -#if !defined(__STDC_FORMAT_MACROS) -#define __STDC_FORMAT_MACROS -#endif - -#include - -#include "rtc_base/system/arch.h" - -#define RTC_PRIuS "zu" - -#else // WEBRTC_WIN - -#include - -#if !defined(PRId64) || !defined(PRIu64) || !defined(PRIx64) -#error "inttypes.h provided by win toolchain should define these." -#endif - -// PRI*64 were added in MSVC 2013, while "%zu" is supported since MSVC 2015 -// (so needs to be special-cased to "%Iu" instead). - -#define RTC_PRIuS "Iu" - -#endif - -#endif // RTC_BASE_FORMAT_MACROS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/gunit.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/gunit.cc index 83ee8075fb..7cd60fe9ee 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/gunit.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/gunit.cc @@ -13,6 +13,7 @@ #include #include "absl/strings/match.h" +#include "absl/strings/string_view.h" ::testing::AssertionResult AssertStartsWith(const char* text_expr, const char* prefix_expr, @@ -30,9 +31,9 @@ ::testing::AssertionResult AssertStartsWith(const char* text_expr, ::testing::AssertionResult AssertStringContains(const char* str_expr, const char* substr_expr, - const std::string& str, - const std::string& substr) { - if (str.find(substr) != std::string::npos) { + absl::string_view str, + absl::string_view substr) { + if (str.find(substr) != absl::string_view::npos) { return ::testing::AssertionSuccess(); } else { return ::testing::AssertionFailure() diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/gunit.h b/TMessagesProj/jni/voip/webrtc/rtc_base/gunit.h index dedf3ee067..6bc1419729 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/gunit.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/gunit.h @@ -11,6 +11,7 @@ #ifndef RTC_BASE_GUNIT_H_ #define RTC_BASE_GUNIT_H_ +#include "absl/strings/string_view.h" #include "rtc_base/fake_clock.h" #include "rtc_base/logging.h" #include "rtc_base/thread.h" @@ -30,11 +31,11 @@ #define WAIT_(ex, timeout, res) \ do { \ int64_t start = rtc::SystemTimeMillis(); \ - res = (ex); \ + res = (ex) && true; \ while (!res && rtc::SystemTimeMillis() < start + (timeout)) { \ rtc::Thread::Current()->ProcessMessages(0); \ rtc::Thread::Current()->SleepMs(1); \ - res = (ex); \ + res = (ex) && true; \ } \ } while (0) @@ -162,7 +163,7 @@ testing::AssertionResult AssertStartsWith(const char* text_expr, // Usage: EXPECT_PRED_FORMAT2(AssertStringContains, str, "substring"); testing::AssertionResult AssertStringContains(const char* str_expr, const char* substr_expr, - const std::string& str, - const std::string& substr); + absl::string_view str, + absl::string_view substr); #endif // RTC_BASE_GUNIT_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/helpers.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/helpers.cc index 64cab10335..337239894a 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/helpers.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/helpers.cc @@ -16,6 +16,7 @@ #include #include +#include "absl/strings/string_view.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -145,10 +146,8 @@ bool CreateRandomString(size_t len, std::string* str) { return CreateRandomString(len, kBase64, 64, str); } -bool CreateRandomString(size_t len, - const std::string& table, - std::string* str) { - return CreateRandomString(len, table.c_str(), static_cast(table.size()), +bool CreateRandomString(size_t len, absl::string_view table, std::string* str) { + return CreateRandomString(len, table.data(), static_cast(table.size()), str); } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/helpers.h b/TMessagesProj/jni/voip/webrtc/rtc_base/helpers.h index 2fd2fc5218..c214f5212f 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/helpers.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/helpers.h @@ -16,6 +16,7 @@ #include +#include "absl/strings/string_view.h" #include "rtc_base/system/rtc_export.h" namespace rtc { @@ -42,7 +43,7 @@ RTC_EXPORT bool CreateRandomString(size_t length, std::string* str); // For ease of implementation, the function requires that the table // size evenly divide 256; otherwise, it returns false. RTC_EXPORT bool CreateRandomString(size_t length, - const std::string& table, + absl::string_view table, std::string* str); // Generates (cryptographically) random data of the given length. diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/http_common.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/http_common.cc index 0d7832264b..621b854b53 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/http_common.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/http_common.cc @@ -10,6 +10,8 @@ #include +#include "absl/strings/string_view.h" + #if defined(WEBRTC_WIN) #include #include @@ -118,14 +120,14 @@ const ConstantToLabel SECURITY_ERRORS[] = { typedef std::pair HttpAttribute; typedef std::vector HttpAttributeList; -inline bool IsEndOfAttributeName(size_t pos, size_t len, const char* data) { - if (pos >= len) +inline bool IsEndOfAttributeName(size_t pos, absl::string_view data) { + if (pos >= data.size()) return true; if (isspace(static_cast(data[pos]))) return true; // The reason for this complexity is that some attributes may contain trailing // equal signs (like base64 tokens in Negotiate auth headers) - if ((pos + 1 < len) && (data[pos] == '=') && + if ((pos + 1 < data.size()) && (data[pos] == '=') && !isspace(static_cast(data[pos + 1])) && (data[pos + 1] != '=')) { return true; @@ -133,10 +135,10 @@ inline bool IsEndOfAttributeName(size_t pos, size_t len, const char* data) { return false; } -void HttpParseAttributes(const char* data, - size_t len, +void HttpParseAttributes(absl::string_view data, HttpAttributeList& attributes) { size_t pos = 0; + const size_t len = data.size(); while (true) { // Skip leading whitespace while ((pos < len) && isspace(static_cast(data[pos]))) { @@ -149,12 +151,12 @@ void HttpParseAttributes(const char* data, // Find end of attribute name size_t start = pos; - while (!IsEndOfAttributeName(pos, len, data)) { + while (!IsEndOfAttributeName(pos, data)) { ++pos; } HttpAttribute attribute; - attribute.first.assign(data + start, data + pos); + attribute.first.assign(data.data() + start, data.data() + pos); // Attribute has value? if ((pos < len) && (data[pos] == '=')) { @@ -185,7 +187,7 @@ void HttpParseAttributes(const char* data, } bool HttpHasAttribute(const HttpAttributeList& attributes, - const std::string& name, + absl::string_view name, std::string* value) { for (HttpAttributeList::const_iterator it = attributes.begin(); it != attributes.end(); ++it) { @@ -213,7 +215,7 @@ bool HttpHasNthAttribute(HttpAttributeList& attributes, return true; } -std::string quote(const std::string& str) { +std::string quote(absl::string_view str) { std::string result; result.push_back('"'); for (size_t i = 0; i < str.size(); ++i) { @@ -232,7 +234,7 @@ struct NegotiateAuthContext : public HttpAuthContext { size_t steps; bool specified_credentials; - NegotiateAuthContext(const std::string& auth, CredHandle c1, CtxtHandle c2) + NegotiateAuthContext(absl::string_view auth, CredHandle c1, CtxtHandle c2) : HttpAuthContext(auth), cred(c1), ctx(c2), @@ -248,18 +250,17 @@ struct NegotiateAuthContext : public HttpAuthContext { } // anonymous namespace -HttpAuthResult HttpAuthenticate(const char* challenge, - size_t len, +HttpAuthResult HttpAuthenticate(absl::string_view challenge, const SocketAddress& server, - const std::string& method, - const std::string& uri, - const std::string& username, + absl::string_view method, + absl::string_view uri, + absl::string_view username, const CryptString& password, HttpAuthContext*& context, std::string& response, std::string& auth_method) { HttpAttributeList args; - HttpParseAttributes(challenge, len, args); + HttpParseAttributes(challenge, args); HttpHasNthAttribute(args, 0, &auth_method, nullptr); if (context && (context->auth_method != auth_method)) @@ -280,7 +281,7 @@ HttpAuthResult HttpAuthenticate(const char* challenge, // std::string decoded = username + ":" + password; size_t len = username.size() + password.GetLength() + 2; char* sensitive = new char[len]; - size_t pos = strcpyn(sensitive, len, username.data(), username.size()); + size_t pos = strcpyn(sensitive, len, username); pos += strcpyn(sensitive + pos, len - pos, ":"); password.CopyTo(sensitive + pos, true); @@ -304,7 +305,7 @@ HttpAuthResult HttpAuthenticate(const char* challenge, std::string cnonce, ncount; char buffer[256]; - sprintf(buffer, "%d", static_cast(time(0))); + snprintf(buffer, sizeof(buffer), "%d", static_cast(time(0))); cnonce = MD5(buffer); ncount = "00000001"; @@ -320,13 +321,13 @@ HttpAuthResult HttpAuthenticate(const char* challenge, // std::string A1 = username + ":" + realm + ":" + password; size_t len = username.size() + realm.size() + password.GetLength() + 3; char* sensitive = new char[len]; // A1 - size_t pos = strcpyn(sensitive, len, username.data(), username.size()); + size_t pos = strcpyn(sensitive, len, username); pos += strcpyn(sensitive + pos, len - pos, ":"); - pos += strcpyn(sensitive + pos, len - pos, realm.c_str()); + pos += strcpyn(sensitive + pos, len - pos, realm); pos += strcpyn(sensitive + pos, len - pos, ":"); password.CopyTo(sensitive + pos, true); - std::string A2 = method + ":" + uri; + std::string A2 = std::string(method) + ":" + std::string(uri); std::string middle; if (has_qop) { qop = "auth"; @@ -459,11 +460,11 @@ HttpAuthResult HttpAuthenticate(const char* challenge, size_t len = password.GetLength() + 1; char* sensitive = new char[len]; password.CopyTo(sensitive, true); - std::string::size_type pos = username.find('\\'); - if (pos == std::string::npos) { + absl::string_view::size_type pos = username.find('\\'); + if (pos == absl::string_view::npos) { auth_id.UserLength = static_cast( std::min(sizeof(userbuf) - 1, username.size())); - memcpy(userbuf, username.c_str(), auth_id.UserLength); + memcpy(userbuf, username.data(), auth_id.UserLength); userbuf[auth_id.UserLength] = 0; auth_id.DomainLength = 0; domainbuf[auth_id.DomainLength] = 0; @@ -474,11 +475,11 @@ HttpAuthResult HttpAuthenticate(const char* challenge, } else { auth_id.UserLength = static_cast( std::min(sizeof(userbuf) - 1, username.size() - pos - 1)); - memcpy(userbuf, username.c_str() + pos + 1, auth_id.UserLength); + memcpy(userbuf, username.data() + pos + 1, auth_id.UserLength); userbuf[auth_id.UserLength] = 0; auth_id.DomainLength = static_cast(std::min(sizeof(domainbuf) - 1, pos)); - memcpy(domainbuf, username.c_str(), auth_id.DomainLength); + memcpy(domainbuf, username.data(), auth_id.DomainLength); domainbuf[auth_id.DomainLength] = 0; auth_id.PasswordLength = static_cast( std::min(sizeof(passbuf) - 1, password.GetLength())); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/http_common.h b/TMessagesProj/jni/voip/webrtc/rtc_base/http_common.h index edf161fb4c..06e42c6703 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/http_common.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/http_common.h @@ -13,6 +13,8 @@ #include +#include "absl/strings/string_view.h" + namespace rtc { class CryptString; @@ -24,7 +26,7 @@ class SocketAddress; struct HttpAuthContext { std::string auth_method; - HttpAuthContext(const std::string& auth) : auth_method(auth) {} + HttpAuthContext(absl::string_view auth) : auth_method(auth) {} virtual ~HttpAuthContext() {} }; @@ -34,12 +36,11 @@ enum HttpAuthResult { HAR_RESPONSE, HAR_IGNORE, HAR_CREDENTIALS, HAR_ERROR }; // Start by passing a null pointer, then pass the same pointer each additional // call. When the authentication attempt is finished, delete the context. // TODO(bugs.webrtc.org/8905): Change "response" to "ZeroOnFreeBuffer". -HttpAuthResult HttpAuthenticate(const char* challenge, - size_t len, +HttpAuthResult HttpAuthenticate(absl::string_view challenge, const SocketAddress& server, - const std::string& method, - const std::string& uri, - const std::string& username, + absl::string_view method, + absl::string_view uri, + absl::string_view username, const CryptString& password, HttpAuthContext*& context, std::string& response, diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/ifaddrs_android.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/ifaddrs_android.cc index 1cc63fe9f3..6474fb7244 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/ifaddrs_android.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/ifaddrs_android.cc @@ -24,6 +24,8 @@ #include #include +#include "absl/cleanup/cleanup.h" + namespace { struct netlinkrequest { @@ -138,10 +140,12 @@ int populate_ifaddrs(struct ifaddrs* ifaddr, } int getifaddrs(struct ifaddrs** result) { + *result = nullptr; int fd = socket(PF_NETLINK, SOCK_RAW, NETLINK_ROUTE); if (fd < 0) { return -1; } + absl::Cleanup close_file = [fd] { close(fd); }; netlinkrequest ifaddr_request; memset(&ifaddr_request, 0, sizeof(ifaddr_request)); @@ -151,10 +155,10 @@ int getifaddrs(struct ifaddrs** result) { ssize_t count = send(fd, &ifaddr_request, ifaddr_request.header.nlmsg_len, 0); if (static_cast(count) != ifaddr_request.header.nlmsg_len) { - close(fd); return -1; } struct ifaddrs* start = nullptr; + absl::Cleanup cleanup_start = [&start] { freeifaddrs(start); }; struct ifaddrs* current = nullptr; char buf[kMaxReadSize]; ssize_t amount_read = recv(fd, &buf, kMaxReadSize, 0); @@ -165,13 +169,12 @@ int getifaddrs(struct ifaddrs** result) { header = NLMSG_NEXT(header, header_size)) { switch (header->nlmsg_type) { case NLMSG_DONE: - // Success. Return. + // Success. Return `start`. Cancel `start` cleanup because it + // becomes callers responsibility. + std::move(cleanup_start).Cancel(); *result = start; - close(fd); return 0; case NLMSG_ERROR: - close(fd); - freeifaddrs(start); return -1; case RTM_NEWADDR: { ifaddrmsg* address_msg = @@ -192,8 +195,6 @@ int getifaddrs(struct ifaddrs** result) { } if (populate_ifaddrs(newest, address_msg, RTA_DATA(rta), RTA_PAYLOAD(rta)) != 0) { - freeifaddrs(start); - *result = nullptr; return -1; } current = newest; @@ -206,8 +207,6 @@ int getifaddrs(struct ifaddrs** result) { } amount_read = recv(fd, &buf, kMaxReadSize, 0); } - close(fd); - freeifaddrs(start); return -1; } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/ip_address.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/ip_address.cc index 86f42e0bf9..d544b611e1 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/ip_address.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/ip_address.cc @@ -11,6 +11,8 @@ #if defined(WEBRTC_POSIX) #include #include + +#include "absl/strings/string_view.h" #ifdef OPENBSD #include #endif @@ -276,14 +278,14 @@ bool IPFromAddrInfo(struct addrinfo* info, IPAddress* out) { return false; } -bool IPFromString(const std::string& str, IPAddress* out) { +bool IPFromString(absl::string_view str, IPAddress* out) { if (!out) { return false; } in_addr addr; - if (rtc::inet_pton(AF_INET, str.c_str(), &addr) == 0) { + if (rtc::inet_pton(AF_INET, str, &addr) == 0) { in6_addr addr6; - if (rtc::inet_pton(AF_INET6, str.c_str(), &addr6) == 0) { + if (rtc::inet_pton(AF_INET6, str, &addr6) == 0) { *out = IPAddress(); return false; } @@ -294,7 +296,7 @@ bool IPFromString(const std::string& str, IPAddress* out) { return true; } -bool IPFromString(const std::string& str, int flags, InterfaceAddress* out) { +bool IPFromString(absl::string_view str, int flags, InterfaceAddress* out) { IPAddress ip; if (!IPFromString(str, &ip)) { return false; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/ip_address.h b/TMessagesProj/jni/voip/webrtc/rtc_base/ip_address.h index 8725417393..58ad8ba4b2 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/ip_address.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/ip_address.h @@ -16,6 +16,8 @@ #include #include #include + +#include "absl/strings/string_view.h" #endif #if defined(WEBRTC_WIN) #include @@ -29,8 +31,8 @@ #if defined(WEBRTC_WIN) #include "rtc_base/win32.h" #endif +#include "absl/strings/string_view.h" #include "rtc_base/system/rtc_export.h" - namespace rtc { enum IPv6AddressFlag { @@ -155,8 +157,8 @@ class RTC_EXPORT InterfaceAddress : public IPAddress { }; bool IPFromAddrInfo(struct addrinfo* info, IPAddress* out); -RTC_EXPORT bool IPFromString(const std::string& str, IPAddress* out); -RTC_EXPORT bool IPFromString(const std::string& str, +RTC_EXPORT bool IPFromString(absl::string_view str, IPAddress* out); +RTC_EXPORT bool IPFromString(absl::string_view str, int flags, InterfaceAddress* out); bool IPIsAny(const IPAddress& ip); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/java/src/org/webrtc/ContextUtils.java b/TMessagesProj/jni/voip/webrtc/rtc_base/java/src/org/webrtc/ContextUtils.java new file mode 100644 index 0000000000..e36ab72878 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/java/src/org/webrtc/ContextUtils.java @@ -0,0 +1,45 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.content.Context; + +/** + * Class for storing the application context and retrieving it in a static context. Similar to + * org.chromium.base.ContextUtils. + */ +public class ContextUtils { + private static final String TAG = "ContextUtils"; + private static Context applicationContext; + + /** + * Stores the application context that will be returned by getApplicationContext. This is called + * by PeerConnectionFactory.initialize. The application context must be set before creating + * a PeerConnectionFactory and must not be modified while it is alive. + */ + public static void initialize(Context applicationContext) { + if (applicationContext == null) { + throw new IllegalArgumentException( + "Application context cannot be null for ContextUtils.initialize."); + } + ContextUtils.applicationContext = applicationContext; + } + + /** + * Returns the stored application context. + * + * @deprecated crbug.com/webrtc/8937 + */ + @Deprecated + public static Context getApplicationContext() { + return applicationContext; + } +} diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/java/src/org/webrtc/Loggable.java b/TMessagesProj/jni/voip/webrtc/rtc_base/java/src/org/webrtc/Loggable.java new file mode 100644 index 0000000000..cd66aa1214 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/java/src/org/webrtc/Loggable.java @@ -0,0 +1,22 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import org.webrtc.Logging.Severity; + +/** + * Java interface for WebRTC logging. The default implementation uses webrtc.Logging. + * + * When injected, the Loggable will receive logging from both Java and native. + */ +public interface Loggable { + public void onLogMessage(String message, Severity severity, String tag); +} diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/java/src/org/webrtc/Logging.java b/TMessagesProj/jni/voip/webrtc/rtc_base/java/src/org/webrtc/Logging.java new file mode 100644 index 0000000000..e7a9921f4d --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/java/src/org/webrtc/Logging.java @@ -0,0 +1,201 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; +import java.io.PrintWriter; +import java.io.StringWriter; +import java.util.EnumSet; +import java.util.logging.Level; +import java.util.logging.Logger; +import org.webrtc.Loggable; + +/** + * Java wrapper for WebRTC logging. Logging defaults to java.util.logging.Logger, but a custom + * logger implementing the Loggable interface can be injected along with a Severity. All subsequent + * log messages will then be redirected to the injected Loggable, except those with a severity lower + * than the specified severity, which will be discarded. + * + * It is also possible to switch to native logging (rtc::LogMessage) if one of the following static + * functions are called from the app: + * - Logging.enableLogThreads + * - Logging.enableLogTimeStamps + * - Logging.enableLogToDebugOutput + * + * The priority goes: + * 1. Injected loggable + * 2. Native logging + * 3. Fallback logging. + * Only one method will be used at a time. + * + * Injecting a Loggable or using any of the enable... methods requires that the native library is + * loaded, using PeerConnectionFactory.initialize. + */ +public class Logging { + private static final Logger fallbackLogger = createFallbackLogger(); + private static volatile boolean loggingEnabled; + @Nullable private static Loggable loggable; + private static Severity loggableSeverity; + + private static Logger createFallbackLogger() { + final Logger fallbackLogger = Logger.getLogger("org.webrtc.Logging"); + fallbackLogger.setLevel(Level.ALL); + return fallbackLogger; + } + + static void injectLoggable(Loggable injectedLoggable, Severity severity) { + if (injectedLoggable != null) { + loggable = injectedLoggable; + loggableSeverity = severity; + } + } + + static void deleteInjectedLoggable() { + loggable = null; + } + + // TODO(solenberg): Remove once dependent projects updated. + @Deprecated + public enum TraceLevel { + TRACE_NONE(0x0000), + TRACE_STATEINFO(0x0001), + TRACE_WARNING(0x0002), + TRACE_ERROR(0x0004), + TRACE_CRITICAL(0x0008), + TRACE_APICALL(0x0010), + TRACE_DEFAULT(0x00ff), + TRACE_MODULECALL(0x0020), + TRACE_MEMORY(0x0100), + TRACE_TIMER(0x0200), + TRACE_STREAM(0x0400), + TRACE_DEBUG(0x0800), + TRACE_INFO(0x1000), + TRACE_TERSEINFO(0x2000), + TRACE_ALL(0xffff); + + public final int level; + TraceLevel(int level) { + this.level = level; + } + } + + // Keep in sync with webrtc/rtc_base/logging.h:LoggingSeverity. + public enum Severity { LS_VERBOSE, LS_INFO, LS_WARNING, LS_ERROR, LS_NONE } + + public static void enableLogThreads() { + nativeEnableLogThreads(); + } + + public static void enableLogTimeStamps() { + nativeEnableLogTimeStamps(); + } + + // TODO(solenberg): Remove once dependent projects updated. + @Deprecated + public static void enableTracing(String path, EnumSet levels) {} + + // Enable diagnostic logging for messages of `severity` to the platform debug + // output. On Android, the output will be directed to Logcat. + // Note: this function starts collecting the output of the RTC_LOG() macros. + // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression. + @SuppressWarnings("NoSynchronizedMethodCheck") + public static synchronized void enableLogToDebugOutput(Severity severity) { + if (loggable != null) { + throw new IllegalStateException( + "Logging to native debug output not supported while Loggable is injected. " + + "Delete the Loggable before calling this method."); + } + nativeEnableLogToDebugOutput(severity.ordinal()); + loggingEnabled = true; + } + + public static void log(Severity severity, String tag, String message) { + if (tag == null || message == null) { + throw new IllegalArgumentException("Logging tag or message may not be null."); + } + if (loggable != null) { + // Filter log messages below loggableSeverity. + if (severity.ordinal() < loggableSeverity.ordinal()) { + return; + } + loggable.onLogMessage(message, severity, tag); + return; + } + + // Try native logging if no loggable is injected. + if (loggingEnabled) { + nativeLog(severity.ordinal(), tag, message); + return; + } + + // Fallback to system log. + Level level; + switch (severity) { + case LS_ERROR: + level = Level.SEVERE; + break; + case LS_WARNING: + level = Level.WARNING; + break; + case LS_INFO: + level = Level.INFO; + break; + default: + level = Level.FINE; + break; + } + fallbackLogger.log(level, tag + ": " + message); + } + + public static void d(String tag, String message) { + log(Severity.LS_INFO, tag, message); + } + + public static void e(String tag, String message) { + log(Severity.LS_ERROR, tag, message); + } + + public static void w(String tag, String message) { + log(Severity.LS_WARNING, tag, message); + } + + public static void e(String tag, String message, Throwable e) { + log(Severity.LS_ERROR, tag, message); + log(Severity.LS_ERROR, tag, e.toString()); + log(Severity.LS_ERROR, tag, getStackTraceString(e)); + } + + public static void w(String tag, String message, Throwable e) { + log(Severity.LS_WARNING, tag, message); + log(Severity.LS_WARNING, tag, e.toString()); + log(Severity.LS_WARNING, tag, getStackTraceString(e)); + } + + public static void v(String tag, String message) { + log(Severity.LS_VERBOSE, tag, message); + } + + private static String getStackTraceString(Throwable e) { + if (e == null) { + return ""; + } + + StringWriter sw = new StringWriter(); + PrintWriter pw = new PrintWriter(sw); + e.printStackTrace(pw); + return sw.toString(); + } + + private static native void nativeEnableLogToDebugOutput(int nativeSeverity); + private static native void nativeEnableLogThreads(); + private static native void nativeEnableLogTimeStamps(); + private static native void nativeLog(int severity, String tag, String message); +} diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/java/src/org/webrtc/OWNERS b/TMessagesProj/jni/voip/webrtc/rtc_base/java/src/org/webrtc/OWNERS new file mode 100644 index 0000000000..109bea2725 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/java/src/org/webrtc/OWNERS @@ -0,0 +1,2 @@ +magjed@webrtc.org +xalep@webrtc.org diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/java/src/org/webrtc/Size.java b/TMessagesProj/jni/voip/webrtc/rtc_base/java/src/org/webrtc/Size.java new file mode 100644 index 0000000000..a711b5d2ca --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/java/src/org/webrtc/Size.java @@ -0,0 +1,45 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +/** + * Class for representing size of an object. Very similar to android.util.Size but available on all + * devices. + */ +public class Size { + public int width; + public int height; + + public Size(int width, int height) { + this.width = width; + this.height = height; + } + + @Override + public String toString() { + return width + "x" + height; + } + + @Override + public boolean equals(Object other) { + if (!(other instanceof Size)) { + return false; + } + final Size otherSize = (Size) other; + return width == otherSize.width && height == otherSize.height; + } + + @Override + public int hashCode() { + // Use prime close to 2^16 to avoid collisions for normal values less than 2^16. + return 1 + 65537 * width + height; + } +} diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/java/src/org/webrtc/ThreadUtils.java b/TMessagesProj/jni/voip/webrtc/rtc_base/java/src/org/webrtc/ThreadUtils.java new file mode 100644 index 0000000000..0c502b1bc3 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/java/src/org/webrtc/ThreadUtils.java @@ -0,0 +1,212 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.os.Handler; +import android.os.Looper; +import android.os.SystemClock; +import androidx.annotation.Nullable; +import java.util.concurrent.Callable; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +public class ThreadUtils { + /** + * Utility class to be used for checking that a method is called on the correct thread. + */ + public static class ThreadChecker { + @Nullable private Thread thread = Thread.currentThread(); + + public void checkIsOnValidThread() { + if (thread == null) { + thread = Thread.currentThread(); + } + if (Thread.currentThread() != thread) { + throw new IllegalStateException("Wrong thread"); + } + } + + public void detachThread() { + thread = null; + } + } + + /** + * Throws exception if called from other than main thread. + */ + public static void checkIsOnMainThread() { + if (Thread.currentThread() != Looper.getMainLooper().getThread()) { + throw new IllegalStateException("Not on main thread!"); + } + } + + /** + * Utility interface to be used with executeUninterruptibly() to wait for blocking operations + * to complete without getting interrupted.. + */ + public interface BlockingOperation { void run() throws InterruptedException; } + + /** + * Utility method to make sure a blocking operation is executed to completion without getting + * interrupted. This should be used in cases where the operation is waiting for some critical + * work, e.g. cleanup, that must complete before returning. If the thread is interrupted during + * the blocking operation, this function will re-run the operation until completion, and only then + * re-interrupt the thread. + */ + public static void executeUninterruptibly(BlockingOperation operation) { + boolean wasInterrupted = false; + while (true) { + try { + operation.run(); + break; + } catch (InterruptedException e) { + // Someone is asking us to return early at our convenience. We can't cancel this operation, + // but we should preserve the information and pass it along. + wasInterrupted = true; + } + } + // Pass interruption information along. + if (wasInterrupted) { + Thread.currentThread().interrupt(); + } + } + + public static boolean joinUninterruptibly(final Thread thread, long timeoutMs) { + final long startTimeMs = SystemClock.elapsedRealtime(); + long timeRemainingMs = timeoutMs; + boolean wasInterrupted = false; + while (timeRemainingMs > 0) { + try { + thread.join(timeRemainingMs); + break; + } catch (InterruptedException e) { + // Someone is asking us to return early at our convenience. We can't cancel this operation, + // but we should preserve the information and pass it along. + wasInterrupted = true; + final long elapsedTimeMs = SystemClock.elapsedRealtime() - startTimeMs; + timeRemainingMs = timeoutMs - elapsedTimeMs; + } + } + // Pass interruption information along. + if (wasInterrupted) { + Thread.currentThread().interrupt(); + } + return !thread.isAlive(); + } + + public static void joinUninterruptibly(final Thread thread) { + executeUninterruptibly(new BlockingOperation() { + @Override + public void run() throws InterruptedException { + thread.join(); + } + }); + } + + public static void awaitUninterruptibly(final CountDownLatch latch) { + executeUninterruptibly(new BlockingOperation() { + @Override + public void run() throws InterruptedException { + latch.await(); + } + }); + } + + public static boolean awaitUninterruptibly(CountDownLatch barrier, long timeoutMs) { + final long startTimeMs = SystemClock.elapsedRealtime(); + long timeRemainingMs = timeoutMs; + boolean wasInterrupted = false; + boolean result = false; + do { + try { + result = barrier.await(timeRemainingMs, TimeUnit.MILLISECONDS); + break; + } catch (InterruptedException e) { + // Someone is asking us to return early at our convenience. We can't cancel this operation, + // but we should preserve the information and pass it along. + wasInterrupted = true; + final long elapsedTimeMs = SystemClock.elapsedRealtime() - startTimeMs; + timeRemainingMs = timeoutMs - elapsedTimeMs; + } + } while (timeRemainingMs > 0); + // Pass interruption information along. + if (wasInterrupted) { + Thread.currentThread().interrupt(); + } + return result; + } + + /** + * Post `callable` to `handler` and wait for the result. + */ + public static V invokeAtFrontUninterruptibly( + final Handler handler, final Callable callable) { + if (handler.getLooper().getThread() == Thread.currentThread()) { + try { + return callable.call(); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + // Place-holder classes that are assignable inside nested class. + class CaughtException { + Exception e; + } + class Result { + public V value; + } + final Result result = new Result(); + final CaughtException caughtException = new CaughtException(); + final CountDownLatch barrier = new CountDownLatch(1); + handler.post(new Runnable() { + @Override + public void run() { + try { + result.value = callable.call(); + } catch (Exception e) { + caughtException.e = e; + } + barrier.countDown(); + } + }); + awaitUninterruptibly(barrier); + // Re-throw any runtime exception caught inside the other thread. Since this is an invoke, add + // stack trace for the waiting thread as well. + if (caughtException.e != null) { + final RuntimeException runtimeException = new RuntimeException(caughtException.e); + runtimeException.setStackTrace( + concatStackTraces(caughtException.e.getStackTrace(), runtimeException.getStackTrace())); + throw runtimeException; + } + return result.value; + } + + /** + * Post `runner` to `handler`, at the front, and wait for completion. + */ + public static void invokeAtFrontUninterruptibly(final Handler handler, final Runnable runner) { + invokeAtFrontUninterruptibly(handler, new Callable() { + @Override + public Void call() { + runner.run(); + return null; + } + }); + } + + static StackTraceElement[] concatStackTraces( + StackTraceElement[] inner, StackTraceElement[] outer) { + final StackTraceElement[] combined = new StackTraceElement[inner.length + outer.length]; + System.arraycopy(inner, 0, combined, 0, inner.length); + System.arraycopy(outer, 0, combined, inner.length, outer.length); + return combined; + } +} diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/location.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/location.cc deleted file mode 100644 index 08425494aa..0000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/location.cc +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright 2016 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "rtc_base/location.h" - -#include - -namespace rtc { - -std::string Location::ToString() const { - char buf[256]; - snprintf(buf, sizeof(buf), "%s@%s:%d", function_name_, file_name_, - line_number_); - return buf; -} - -} // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/location.h b/TMessagesProj/jni/voip/webrtc/rtc_base/location.h deleted file mode 100644 index c7335c2cb5..0000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/location.h +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright 2016 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_BASE_LOCATION_H_ -#define RTC_BASE_LOCATION_H_ - -#include - -#include "rtc_base/system/rtc_export.h" - -namespace rtc { - -// Location provides basic info where of an object was constructed, or was -// significantly brought to life. -// This is a stripped down version of: -// https://code.google.com/p/chromium/codesearch#chromium/src/base/location.h -class RTC_EXPORT Location { - public: - // Constructor should be called with a long-lived char*, such as __FILE__. - // It assumes the provided value will persist as a global constant, and it - // will not make a copy of it. - Location(const char* function_name, const char* file_name, int line_number) - : function_name_(function_name), - file_name_(file_name), - line_number_(line_number) {} - Location() = default; - - const char* function_name() const { return function_name_; } - const char* file_name() const { return file_name_; } - int line_number() const { return line_number_; } - // TODO(steveanton): Remove once all downstream users have been updated to use - // `file_name()` and/or `line_number()`. - const char* file_and_line() const { return file_name_; } - - std::string ToString() const; - - private: - const char* function_name_ = "Unknown"; - const char* file_name_ = "Unknown"; - int line_number_ = -1; -}; - -// Define a macro to record the current source location. -#define RTC_FROM_HERE RTC_FROM_HERE_WITH_FUNCTION(__FUNCTION__) - -#define RTC_FROM_HERE_WITH_FUNCTION(function_name) \ - ::rtc::Location(function_name, __FILE__, __LINE__) - -} // namespace rtc - -#endif // RTC_BASE_LOCATION_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/log_sinks.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/log_sinks.cc index 4365142517..f511948ed3 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/log_sinks.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/log_sinks.cc @@ -15,12 +15,13 @@ #include #include +#include "absl/strings/string_view.h" #include "rtc_base/checks.h" namespace rtc { -FileRotatingLogSink::FileRotatingLogSink(const std::string& log_dir_path, - const std::string& log_prefix, +FileRotatingLogSink::FileRotatingLogSink(absl::string_view log_dir_path, + absl::string_view log_prefix, size_t max_log_size, size_t num_log_files) : FileRotatingLogSink(new FileRotatingStream(log_dir_path, @@ -36,23 +37,33 @@ FileRotatingLogSink::FileRotatingLogSink(FileRotatingStream* stream) FileRotatingLogSink::~FileRotatingLogSink() {} void FileRotatingLogSink::OnLogMessage(const std::string& message) { + OnLogMessage(absl::string_view(message)); +} + +void FileRotatingLogSink::OnLogMessage(absl::string_view message) { if (!stream_->IsOpen()) { std::fprintf(stderr, "Init() must be called before adding this sink.\n"); return; } - stream_->Write(message.c_str(), message.size()); + stream_->Write(message.data(), message.size()); } void FileRotatingLogSink::OnLogMessage(const std::string& message, LoggingSeverity sev, const char* tag) { + OnLogMessage(absl::string_view(message), sev, tag); +} + +void FileRotatingLogSink::OnLogMessage(absl::string_view message, + LoggingSeverity sev, + const char* tag) { if (!stream_->IsOpen()) { std::fprintf(stderr, "Init() must be called before adding this sink.\n"); return; } stream_->Write(tag, strlen(tag)); stream_->Write(": ", 2); - stream_->Write(message.c_str(), message.size()); + stream_->Write(message.data(), message.size()); } bool FileRotatingLogSink::Init() { @@ -64,7 +75,7 @@ bool FileRotatingLogSink::DisableBuffering() { } CallSessionFileRotatingLogSink::CallSessionFileRotatingLogSink( - const std::string& log_dir_path, + absl::string_view log_dir_path, size_t max_total_log_size) : FileRotatingLogSink( new CallSessionFileRotatingStream(log_dir_path, max_total_log_size)) { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/log_sinks.h b/TMessagesProj/jni/voip/webrtc/rtc_base/log_sinks.h index 87bec6dba8..62a93b85a8 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/log_sinks.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/log_sinks.h @@ -16,7 +16,7 @@ #include #include -#include "rtc_base/constructor_magic.h" +#include "absl/strings/string_view.h" #include "rtc_base/file_rotating_stream.h" #include "rtc_base/logging.h" @@ -28,18 +28,25 @@ class FileRotatingLogSink : public LogSink { public: // `num_log_files` must be greater than 1 and `max_log_size` must be greater // than 0. - FileRotatingLogSink(const std::string& log_dir_path, - const std::string& log_prefix, + FileRotatingLogSink(absl::string_view log_dir_path, + absl::string_view log_prefix, size_t max_log_size, size_t num_log_files); ~FileRotatingLogSink() override; + FileRotatingLogSink(const FileRotatingLogSink&) = delete; + FileRotatingLogSink& operator=(const FileRotatingLogSink&) = delete; + // Writes the message to the current file. It will spill over to the next // file if needed. void OnLogMessage(const std::string& message) override; + void OnLogMessage(absl::string_view message) override; void OnLogMessage(const std::string& message, LoggingSeverity sev, const char* tag) override; + void OnLogMessage(absl::string_view message, + LoggingSeverity sev, + const char* tag) override; // Deletes any existing files in the directory and creates a new log file. virtual bool Init(); @@ -52,20 +59,20 @@ class FileRotatingLogSink : public LogSink { private: std::unique_ptr stream_; - - RTC_DISALLOW_COPY_AND_ASSIGN(FileRotatingLogSink); }; // Log sink that uses a CallSessionFileRotatingStream to write to disk. // Init() must be called before adding this sink. class CallSessionFileRotatingLogSink : public FileRotatingLogSink { public: - CallSessionFileRotatingLogSink(const std::string& log_dir_path, + CallSessionFileRotatingLogSink(absl::string_view log_dir_path, size_t max_total_log_size); ~CallSessionFileRotatingLogSink() override; - private: - RTC_DISALLOW_COPY_AND_ASSIGN(CallSessionFileRotatingLogSink); + CallSessionFileRotatingLogSink(const CallSessionFileRotatingLogSink&) = + delete; + CallSessionFileRotatingLogSink& operator=( + const CallSessionFileRotatingLogSink&) = delete; }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/logging.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/logging.cc index 4ebeebebf7..4bc9183d97 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/logging.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/logging.cc @@ -42,6 +42,8 @@ static const int kMaxLogLineSize = 1024 - 60; #include #include "absl/base/attributes.h" +#include "absl/strings/string_view.h" +#include "api/units/timestamp.h" #include "rtc_base/checks.h" #include "rtc_base/platform_thread_types.h" #include "rtc_base/string_encode.h" @@ -53,15 +55,18 @@ static const int kMaxLogLineSize = 1024 - 60; namespace rtc { namespace { + // By default, release builds don't log, debug builds at info level #if !defined(NDEBUG) -static LoggingSeverity g_min_sev = LS_INFO; -static LoggingSeverity g_dbg_sev = LS_INFO; +constexpr LoggingSeverity kDefaultLoggingSeverity = LS_INFO; #else -static LoggingSeverity g_min_sev = LS_NONE; -static LoggingSeverity g_dbg_sev = LS_NONE; +constexpr LoggingSeverity kDefaultLoggingSeverity = LS_NONE; #endif +// Note: `g_min_sev` and `g_dbg_sev` can be changed while running. +LoggingSeverity g_min_sev = kDefaultLoggingSeverity; +LoggingSeverity g_dbg_sev = kDefaultLoggingSeverity; + // Return the filename portion of the string (that following the last slash). const char* FilenameFromPath(const char* file) { const char* end1 = ::strrchr(file, '/'); @@ -80,6 +85,31 @@ webrtc::Mutex& GetLoggingLock() { } // namespace +std::string LogLineRef::DefaultLogLine() const { + rtc::StringBuilder log_output; + if (timestamp_ != webrtc::Timestamp::MinusInfinity()) { + // TODO(kwiberg): Switch to absl::StrFormat, if binary size is ok. + char timestamp[50]; // Maximum string length of an int64_t is 20. + int len = + snprintf(timestamp, sizeof(timestamp), "[%03" PRId64 ":%03" PRId64 "]", + timestamp_.ms() / 1000, timestamp_.ms() % 1000); + RTC_DCHECK_LT(len, sizeof(timestamp)); + log_output << timestamp; + } + if (thread_id_.has_value()) { + log_output << "[" << *thread_id_ << "] "; + } + if (!filename_.empty()) { +#if defined(WEBRTC_ANDROID) + log_output << "(line " << line_ << "): "; +#else + log_output << "(" << filename_ << ":" << line_ << "): "; +#endif + } + log_output << message_; + return log_output.Release(); +} + ///////////////////////////////////////////////////////////////////////////// // LogMessage ///////////////////////////////////////////////////////////////////////////// @@ -94,8 +124,9 @@ ABSL_CONST_INIT LogSink* LogMessage::streams_ RTC_GUARDED_BY(GetLoggingLock()) = nullptr; ABSL_CONST_INIT std::atomic LogMessage::streams_empty_ = {true}; -// Boolean options default to false (0) -bool LogMessage::thread_, LogMessage::timestamp_; +// Boolean options default to false. +ABSL_CONST_INIT bool LogMessage::log_thread_ = false; +ABSL_CONST_INIT bool LogMessage::log_timestamp_ = false; LogMessage::LogMessage(const char* file, int line, LoggingSeverity sev) : LogMessage(file, line, sev, ERRCTX_NONE, 0) {} @@ -104,35 +135,28 @@ LogMessage::LogMessage(const char* file, int line, LoggingSeverity sev, LogErrorContext err_ctx, - int err) - : severity_(sev) { - if (timestamp_) { + int err) { + log_line_.set_severity(sev); + if (log_timestamp_) { + int64_t log_start_time = LogStartTime(); // Use SystemTimeMillis so that even if tests use fake clocks, the timestamp // in log messages represents the real system time. - int64_t time = TimeDiff(SystemTimeMillis(), LogStartTime()); + int64_t time = TimeDiff(SystemTimeMillis(), log_start_time); // Also ensure WallClockStartTime is initialized, so that it matches // LogStartTime. WallClockStartTime(); - // TODO(kwiberg): Switch to absl::StrFormat, if binary size is ok. - char timestamp[50]; // Maximum string length of an int64_t is 20. - int len = - snprintf(timestamp, sizeof(timestamp), "[%03" PRId64 ":%03" PRId64 "]", - time / 1000, time % 1000); - RTC_DCHECK_LT(len, sizeof(timestamp)); - print_stream_ << timestamp; + log_line_.set_timestamp(webrtc::Timestamp::Millis(time)); } - if (thread_) { - PlatformThreadId id = CurrentThreadId(); - print_stream_ << "[" << id << "] "; + if (log_thread_) { + log_line_.set_thread_id(CurrentThreadId()); } if (file != nullptr) { + log_line_.set_filename(FilenameFromPath(file)); + log_line_.set_line(line); #if defined(WEBRTC_ANDROID) - tag_ = FilenameFromPath(file); - print_stream_ << "(line " << line << "): "; -#else - print_stream_ << "(" << FilenameFromPath(file) << ":" << line << "): "; + log_line_.set_tag(log_line_.filename()); #endif } @@ -173,51 +197,32 @@ LogMessage::LogMessage(const char* file, int line, LoggingSeverity sev, const char* tag) - : LogMessage(file, line, sev, ERRCTX_NONE, 0 /* err */) { - tag_ = tag; + : LogMessage(file, line, sev, ERRCTX_NONE, /*err=*/0) { + log_line_.set_tag(tag); print_stream_ << tag << ": "; } #endif -// DEPRECATED. Currently only used by downstream projects that use -// implementation details of logging.h. Work is ongoing to remove those -// dependencies. -LogMessage::LogMessage(const char* file, - int line, - LoggingSeverity sev, - const std::string& tag) - : LogMessage(file, line, sev) { - print_stream_ << tag << ": "; -} - LogMessage::~LogMessage() { FinishPrintStream(); - const std::string str = print_stream_.Release(); + log_line_.set_message(print_stream_.Release()); - if (severity_ >= g_dbg_sev) { -#if defined(WEBRTC_ANDROID) - OutputToDebug(str, severity_, tag_); -#else - OutputToDebug(str, severity_); -#endif + if (log_line_.severity() >= g_dbg_sev) { + OutputToDebug(log_line_); } webrtc::MutexLock lock(&GetLoggingLock()); for (LogSink* entry = streams_; entry != nullptr; entry = entry->next_) { - if (severity_ >= entry->min_severity_) { -#if defined(WEBRTC_ANDROID) - entry->OnLogMessage(str, severity_, tag_); -#else - entry->OnLogMessage(str, severity_); -#endif + if (log_line_.severity() >= entry->min_severity_) { + entry->OnLogMessage(log_line_); } } } void LogMessage::AddTag(const char* tag) { #ifdef WEBRTC_ANDROID - tag_ = tag; + log_line_.set_tag(tag); #endif } @@ -243,11 +248,11 @@ uint32_t LogMessage::WallClockStartTime() { } void LogMessage::LogThreads(bool on) { - thread_ = on; + log_thread_ = on; } void LogMessage::LogTimestamps(bool on) { - timestamp_ = on; + log_timestamp_ = on; } void LogMessage::LogToDebug(LoggingSeverity min_sev) { @@ -293,7 +298,7 @@ void LogMessage::RemoveLogToStream(LogSink* stream) { UpdateMinLogSeverity(); } -void LogMessage::ConfigureLogging(const char* params) { +void LogMessage::ConfigureLogging(absl::string_view params) { LoggingSeverity current_level = LS_VERBOSE; LoggingSeverity debug_level = GetLogToDebug(); @@ -351,14 +356,8 @@ void LogMessage::UpdateMinLogSeverity() g_min_sev = min_sev; } -#if defined(WEBRTC_ANDROID) -void LogMessage::OutputToDebug(const std::string& str, - LoggingSeverity severity, - const char* tag) { -#else -void LogMessage::OutputToDebug(const std::string& str, - LoggingSeverity severity) { -#endif +void LogMessage::OutputToDebug(const LogLineRef& log_line) { + std::string msg_str = log_line.DefaultLogLine(); bool log_to_stderr = log_to_stderr_; #if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) && defined(NDEBUG) // On the Mac, all stderr output goes to the Console log and causes clutter. @@ -383,14 +382,14 @@ void LogMessage::OutputToDebug(const std::string& str, #if defined(WEBRTC_WIN) // Always log to the debugger. // Perhaps stderr should be controlled by a preference, as on Mac? - OutputDebugStringA(str.c_str()); + OutputDebugStringA(msg_str.c_str()); if (log_to_stderr) { // This handles dynamically allocated consoles, too. if (HANDLE error_handle = ::GetStdHandle(STD_ERROR_HANDLE)) { log_to_stderr = false; DWORD written = 0; - ::WriteFile(error_handle, str.data(), static_cast(str.size()), - &written, 0); + ::WriteFile(error_handle, msg_str.c_str(), + static_cast(msg_str.size()), &written, 0); } } #endif // WEBRTC_WIN @@ -401,7 +400,7 @@ void LogMessage::OutputToDebug(const std::string& str, // Also write to stderr which maybe available to executable started // from the shell. int prio; - switch (severity) { + switch (log_line.severity()) { case LS_VERBOSE: prio = ANDROID_LOG_VERBOSE; break; @@ -418,27 +417,29 @@ void LogMessage::OutputToDebug(const std::string& str, prio = ANDROID_LOG_UNKNOWN; } - int size = str.size(); - int line = 0; + int size = msg_str.size(); + int current_line = 0; int idx = 0; const int max_lines = size / kMaxLogLineSize + 1; if (max_lines == 1) { - __android_log_print(prio, tag, "%.*s", size, str.c_str()); + __android_log_print(prio, log_line.tag().data(), "%.*s", size, + msg_str.c_str()); } else { while (size > 0) { const int len = std::min(size, kMaxLogLineSize); - // Use the size of the string in the format (str may have \0 in the + // Use the size of the string in the format (msg may have \0 in the // middle). - __android_log_print(prio, tag, "[%d/%d] %.*s", line + 1, max_lines, len, - str.c_str() + idx); + __android_log_print(prio, log_line.tag().data(), "[%d/%d] %.*s", + current_line + 1, max_lines, len, + msg_str.c_str() + idx); idx += len; size -= len; - ++line; + ++current_line; } } #endif // WEBRTC_ANDROID if (log_to_stderr) { - fprintf(stderr, "%s", str.c_str()); + fprintf(stderr, "%s", msg_str.c_str()); fflush(stderr); } } @@ -550,6 +551,16 @@ void Log(const LogArgType* fmt, ...) { #endif namespace rtc { +// Default implementation, override is recomended. +void LogSink::OnLogMessage(const LogLineRef& log_line) { +#if defined(WEBRTC_ANDROID) + OnLogMessage(log_line.DefaultLogLine(), log_line.severity(), + log_line.tag().data()); +#else + OnLogMessage(log_line.DefaultLogLine(), log_line.severity()); +#endif +} + // Inefficient default implementation, override is recommended. void LogSink::OnLogMessage(const std::string& msg, LoggingSeverity severity, @@ -561,4 +572,20 @@ void LogSink::OnLogMessage(const std::string& msg, LoggingSeverity /* severity */) { OnLogMessage(msg); } + +// Inefficient default implementation, override is recommended. +void LogSink::OnLogMessage(absl::string_view msg, + LoggingSeverity severity, + const char* tag) { + OnLogMessage(tag + (": " + std::string(msg)), severity); +} + +void LogSink::OnLogMessage(absl::string_view msg, + LoggingSeverity /* severity */) { + OnLogMessage(msg); +} + +void LogSink::OnLogMessage(absl::string_view msg) { + OnLogMessage(std::string(msg)); +} } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/logging.h b/TMessagesProj/jni/voip/webrtc/rtc_base/logging.h index 3ac12d7e6f..d59b9a0ef7 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/logging.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/logging.h @@ -21,9 +21,13 @@ // RTC_LOG(sev) logs the given stream at severity "sev", which must be a // compile-time constant of the LoggingSeverity type, without the namespace // prefix. +// RTC_LOG_IF(sev, condition) logs the given stream at severitye "sev" if +// "condition" is true. // RTC_LOG_V(sev) Like RTC_LOG(), but sev is a run-time variable of the // LoggingSeverity type (basically, it just doesn't prepend the namespace). // RTC_LOG_F(sev) Like RTC_LOG(), but includes the name of the current function. +// RTC_LOG_IF_F(sev, condition), Like RTC_LOG_IF(), but includes the name of +// the current function. // RTC_LOG_T(sev) Like RTC_LOG(), but includes the this pointer. // RTC_LOG_T_F(sev) Like RTC_LOG_F(), but includes the this pointer. // RTC_LOG_GLE(sev [, mod]) attempt to add a string description of the @@ -49,12 +53,15 @@ #include #include // no-presubmit-check TODO(webrtc:8982) #include +#include #include #include "absl/base/attributes.h" #include "absl/meta/type_traits.h" #include "absl/strings/string_view.h" -#include "rtc_base/constructor_magic.h" +#include "absl/types/optional.h" +#include "api/units/timestamp.h" +#include "rtc_base/platform_thread_types.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/system/inline.h" @@ -73,9 +80,7 @@ namespace rtc { ////////////////////////////////////////////////////////////////////// - -// Note that the non-standard LoggingSeverity aliases exist because they are -// still in broad use. The meanings of the levels are: +// The meanings of the levels are: // LS_VERBOSE: This level is for data which we do not want to appear in the // normal debug log, but should appear in diagnostic logs. // LS_INFO: Chatty level used in debugging for all sorts of things, the default @@ -89,11 +94,6 @@ enum LoggingSeverity { LS_WARNING, LS_ERROR, LS_NONE, - // Compatibility aliases, to be deleted. - // TODO(bugs.webrtc.org/13362): Remove usage and delete. - INFO [[deprecated("Use LS_INFO")]] = LS_INFO, - WARNING [[deprecated("Use LS_WARNING")]] = LS_WARNING, - LERROR [[deprecated("Use LS_ERROR")]] = LS_ERROR }; // LogErrorContext assists in interpreting the meaning of an error value. @@ -108,6 +108,50 @@ enum LogErrorContext { }; class LogMessage; + +// LogLineRef encapsulates all the information required to generate a log line. +// It is used both internally to LogMessage but also as a parameter to +// LogSink::OnLogMessage, allowing custom LogSinks to format the log in +// the most flexible way. +class LogLineRef { + public: + absl::string_view message() const { return message_; } + absl::string_view filename() const { return filename_; } + int line() const { return line_; } + absl::optional thread_id() const { return thread_id_; } + webrtc::Timestamp timestamp() const { return timestamp_; } + absl::string_view tag() const { return tag_; } + LoggingSeverity severity() const { return severity_; } + +#if RTC_LOG_ENABLED() + std::string DefaultLogLine() const; +#else + std::string DefaultLogLine() const { return ""; } +#endif + + private: + friend class LogMessage; + void set_message(std::string message) { message_ = std::move(message); } + void set_filename(absl::string_view filename) { filename_ = filename; } + void set_line(int line) { line_ = line; } + void set_thread_id(absl::optional thread_id) { + thread_id_ = thread_id; + } + void set_timestamp(webrtc::Timestamp timestamp) { timestamp_ = timestamp; } + void set_tag(absl::string_view tag) { tag_ = tag; } + void set_severity(LoggingSeverity severity) { severity_ = severity; } + + std::string message_; + absl::string_view filename_; + int line_ = 0; + absl::optional thread_id_; + webrtc::Timestamp timestamp_ = webrtc::Timestamp::MinusInfinity(); + // The default Android debug output tag. + absl::string_view tag_ = "libjingle"; + // The severity level of this message + LoggingSeverity severity_; +}; + // Virtual sink interface that can receive log messages. class LogSink { public: @@ -120,6 +164,14 @@ class LogSink { LoggingSeverity severity); virtual void OnLogMessage(const std::string& message) = 0; + virtual void OnLogMessage(absl::string_view msg, + LoggingSeverity severity, + const char* tag); + virtual void OnLogMessage(absl::string_view message, + LoggingSeverity severity); + virtual void OnLogMessage(absl::string_view message); + virtual void OnLogMessage(const LogLineRef& line); + private: friend class ::rtc::LogMessage; #if RTC_LOG_ENABLED() @@ -278,8 +330,15 @@ inline Val MakeVal( template struct has_to_log_string : std::false_type {}; template -struct has_to_log_string()))> - : std::true_type {}; +struct has_to_log_string())), + std::string>::value>> : std::true_type {}; + +template ::value>* = nullptr> +ToStringVal MakeVal(const T& x) { + return {ToLogString(x)}; +} // Handle arbitrary types other than the above by falling back to stringstream. // TODO(bugs.webrtc.org/9278): Get rid of this overload when callers don't need @@ -301,11 +360,6 @@ ToStringVal MakeVal(const T& x) { return {os.str()}; } -template ::value>* = nullptr> -ToStringVal MakeVal(const T& x) { - return {ToLogString(x)}; -} - #if RTC_LOG_ENABLED() void Log(const LogArgType* fmt, ...); #else @@ -433,16 +487,11 @@ class LogMessage { #if defined(WEBRTC_ANDROID) LogMessage(const char* file, int line, LoggingSeverity sev, const char* tag); #endif - // DEPRECATED - DO NOT USE - PLEASE USE THE MACROS INSTEAD OF THE CLASS. - // Android code should use the 'const char*' version since tags are static - // and we want to avoid allocating a std::string copy per log line. - ABSL_DEPRECATED("Use RTC_LOG macros instead of accessing this class directly") - LogMessage(const char* file, - int line, - LoggingSeverity sev, - const std::string& tag); ~LogMessage(); + LogMessage(const LogMessage&) = delete; + LogMessage& operator=(const LogMessage&) = delete; + void AddTag(const char* tag); rtc::StringBuilder& stream(); // Returns the time at which this function was called for the first time. @@ -483,7 +532,7 @@ class LogMessage { static int GetMinLogSeverity(); // Parses the provided parameter stream to configure the options above. // Useful for configuring logging from the command line. - static void ConfigureLogging(const char* params); + static void ConfigureLogging(absl::string_view params); // Checks the current global debug severity and if the `streams_` collection // is empty. If `severity` is smaller than the global severity and if the // `streams_` collection is empty, the LogMessage will be considered a noop @@ -507,14 +556,6 @@ class LogMessage { LogMessage(const char* file, int line, LoggingSeverity sev, const char* tag) { } #endif - // DEPRECATED - DO NOT USE - PLEASE USE THE MACROS INSTEAD OF THE CLASS. - // Android code should use the 'const char*' version since tags are static - // and we want to avoid allocating a std::string copy per log line. - ABSL_DEPRECATED("Use RTC_LOG macros instead of accessing this class directly") - LogMessage(const char* file, - int line, - LoggingSeverity sev, - const std::string& tag) {} ~LogMessage() = default; inline void AddTag(const char* tag) {} @@ -532,7 +573,7 @@ class LogMessage { inline static void RemoveLogToStream(LogSink* stream) {} inline static int GetLogToStream(LogSink* stream = nullptr) { return 0; } inline static int GetMinLogSeverity() { return 0; } - inline static void ConfigureLogging(const char* params) {} + inline static void ConfigureLogging(absl::string_view params) {} static constexpr bool IsNoop(LoggingSeverity severity) { return true; } template static constexpr bool IsNoop() { @@ -547,26 +588,14 @@ class LogMessage { // Updates min_sev_ appropriately when debug sinks change. static void UpdateMinLogSeverity(); -// These write out the actual log messages. -#if defined(WEBRTC_ANDROID) - static void OutputToDebug(const std::string& msg, - LoggingSeverity severity, - const char* tag); -#else - static void OutputToDebug(const std::string& msg, LoggingSeverity severity); -#endif // defined(WEBRTC_ANDROID) + // This writes out the actual log messages. + static void OutputToDebug(const LogLineRef& log_line_ref); // Called from the dtor (or from a test) to append optional extra error // information to the log stream and a newline character. void FinishPrintStream(); - // The severity level of this message - LoggingSeverity severity_; - -#if defined(WEBRTC_ANDROID) - // The default Android debug output tag. - const char* tag_ = "libjingle"; -#endif + LogLineRef log_line_; // String data generated in the constructor, that should be appended to // the message before output. @@ -581,8 +610,9 @@ class LogMessage { // are added/removed. static std::atomic streams_empty_; - // Flags for formatting options - static bool thread_, timestamp_; + // Flags for formatting options and their potential values. + static bool log_thread_; + static bool log_timestamp_; // Determines if logs will be directed to stderr in debug mode. static bool log_to_stderr_; @@ -590,11 +620,15 @@ class LogMessage { // Next methods do nothing; no one will call these functions. inline static void UpdateMinLogSeverity() {} #if defined(WEBRTC_ANDROID) - inline static void OutputToDebug(const std::string& msg, + inline static void OutputToDebug(absl::string_view filename, + int line, + absl::string_view msg, LoggingSeverity severity, const char* tag) {} #else - inline static void OutputToDebug(const std::string& msg, + inline static void OutputToDebug(absl::string_view filename, + int line, + absl::string_view msg, LoggingSeverity severity) {} #endif // defined(WEBRTC_ANDROID) inline void FinishPrintStream() {} @@ -602,8 +636,6 @@ class LogMessage { // The stringbuilder that buffers the formatted message before output rtc::StringBuilder print_stream_; - - RTC_DISALLOW_COPY_AND_ASSIGN(LogMessage); }; ////////////////////////////////////////////////////////////////////// @@ -619,6 +651,10 @@ class LogMessage { !rtc::LogMessage::IsNoop<::rtc::sev>() && \ RTC_LOG_FILE_LINE(::rtc::sev, __FILE__, __LINE__) +#define RTC_LOG_IF(sev, condition) \ + !rtc::LogMessage::IsNoop<::rtc::sev>() && (condition) && \ + RTC_LOG_FILE_LINE(::rtc::sev, __FILE__, __LINE__) + // The _V version is for when a variable is passed in. #define RTC_LOG_V(sev) \ !rtc::LogMessage::IsNoop(sev) && RTC_LOG_FILE_LINE(sev, __FILE__, __LINE__) @@ -626,10 +662,14 @@ class LogMessage { // The _F version prefixes the message with the current function name. #if (defined(__GNUC__) && !defined(NDEBUG)) || defined(WANT_PRETTY_LOG_F) #define RTC_LOG_F(sev) RTC_LOG(sev) << __PRETTY_FUNCTION__ << ": " +#define RTC_LOG_IF_F(sev, condition) \ + RTC_LOG_IF(sev, condition) << __PRETTY_FUNCTION__ << ": " #define RTC_LOG_T_F(sev) \ RTC_LOG(sev) << this << ": " << __PRETTY_FUNCTION__ << ": " #else #define RTC_LOG_F(sev) RTC_LOG(sev) << __FUNCTION__ << ": " +#define RTC_LOG_IF_F(sev, condition) \ + RTC_LOG_IF(sev, condition) << __FUNCTION__ << ": " #define RTC_LOG_T_F(sev) RTC_LOG(sev) << this << ": " << __FUNCTION__ << ": " #endif @@ -697,16 +737,20 @@ inline const char* AdaptString(const std::string& str) { // they only generate code in debug builds. #if RTC_DLOG_IS_ON #define RTC_DLOG(sev) RTC_LOG(sev) +#define RTC_DLOG_IF(sev, condition) RTC_LOG_IF(sev, condition) #define RTC_DLOG_V(sev) RTC_LOG_V(sev) #define RTC_DLOG_F(sev) RTC_LOG_F(sev) +#define RTC_DLOG_IF_F(sev, condition) RTC_LOG_IF_F(sev, condition) #else #define RTC_DLOG_EAT_STREAM_PARAMS() \ while (false) \ ::rtc::webrtc_logging_impl::LogMessageVoidify() & \ (::rtc::webrtc_logging_impl::LogStreamer<>()) #define RTC_DLOG(sev) RTC_DLOG_EAT_STREAM_PARAMS() +#define RTC_DLOG_IF(sev, condition) RTC_DLOG_EAT_STREAM_PARAMS() #define RTC_DLOG_V(sev) RTC_DLOG_EAT_STREAM_PARAMS() #define RTC_DLOG_F(sev) RTC_DLOG_EAT_STREAM_PARAMS() +#define RTC_DLOG_IF_F(sev, condition) RTC_DLOG_EAT_STREAM_PARAMS() #endif } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/mdns_responder_interface.h b/TMessagesProj/jni/voip/webrtc/rtc_base/mdns_responder_interface.h index 64fb3cebff..14ef9a202d 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/mdns_responder_interface.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/mdns_responder_interface.h @@ -14,6 +14,7 @@ #include #include +#include "absl/strings/string_view.h" #include "rtc_base/ip_address.h" namespace webrtc { @@ -23,7 +24,7 @@ namespace webrtc { class MdnsResponderInterface { public: using NameCreatedCallback = - std::function; + std::function; using NameRemovedCallback = std::function; MdnsResponderInterface() = default; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/memory/always_valid_pointer.h b/TMessagesProj/jni/voip/webrtc/rtc_base/memory/always_valid_pointer.h new file mode 100644 index 0000000000..4e68104b55 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/memory/always_valid_pointer.h @@ -0,0 +1,248 @@ +/* + * Copyright 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef RTC_BASE_MEMORY_ALWAYS_VALID_POINTER_H_ +#define RTC_BASE_MEMORY_ALWAYS_VALID_POINTER_H_ + +#include +#include + +#include "rtc_base/checks.h" + +namespace webrtc { + +// This template allows the instantiation of a pointer to Interface in such a +// way that if it is passed a null pointer, an object of class Default will be +// created, which will be deallocated when the pointer is deleted. +template +class AlwaysValidPointer { + public: + explicit AlwaysValidPointer(Interface* pointer) + : owned_instance_(pointer ? nullptr : std::make_unique()), + pointer_(pointer ? pointer : owned_instance_.get()) { + RTC_DCHECK(pointer_); + } + + template ::value), + bool>::type = true> + AlwaysValidPointer(Interface* pointer, Arg arg) + : owned_instance_(pointer ? nullptr + : std::make_unique(std::move(arg))), + pointer_(pointer ? pointer : owned_instance_.get()) { + RTC_DCHECK(pointer_); + } + + // Multiple arguments + template + AlwaysValidPointer(Interface* pointer, Arg1 arg1, Args... args) + : owned_instance_(pointer + ? nullptr + : std::make_unique(std::move(arg1), + std::move(args...))), + pointer_(pointer ? pointer : owned_instance_.get()) { + RTC_DCHECK(pointer_); + } + + // Create a pointer by + // a) using |pointer|, without taking ownership + // b) calling |function| and taking ownership of the result + template ::value, + bool>::type = true> + AlwaysValidPointer(Interface* pointer, Func function) + : owned_instance_(pointer ? nullptr : function()), + pointer_(owned_instance_ ? owned_instance_.get() : pointer) { + RTC_DCHECK(pointer_); + } + + // Create a pointer by + // a) taking over ownership of |instance| + // b) or fallback to |pointer|, without taking ownership. + // c) or Default. + AlwaysValidPointer(std::unique_ptr&& instance, Interface* pointer) + : owned_instance_( + instance + ? std::move(instance) + : (pointer == nullptr ? std::make_unique() : nullptr)), + pointer_(owned_instance_ ? owned_instance_.get() : pointer) { + RTC_DCHECK(pointer_); + } + + // Create a pointer by + // a) taking over ownership of |instance| + // b) or fallback to |pointer|, without taking ownership. + // c) or Default (with forwarded args). + template + AlwaysValidPointer(std::unique_ptr&& instance, + Interface* pointer, + Args... args) + : owned_instance_( + instance ? std::move(instance) + : (pointer == nullptr + ? std::make_unique(std::move(args...)) + : nullptr)), + pointer_(owned_instance_ ? owned_instance_.get() : pointer) { + RTC_DCHECK(pointer_); + } + + Interface* get() { return pointer_; } + Interface* operator->() { return pointer_; } + Interface& operator*() { return *pointer_; } + + Interface* get() const { return pointer_; } + Interface* operator->() const { return pointer_; } + Interface& operator*() const { return *pointer_; } + + private: + const std::unique_ptr owned_instance_; + Interface* const pointer_; +}; + +// This class is similar to AlwaysValidPointer, but it does not create +// a default object and crashes if none of the input pointers are non-null. +template +class AlwaysValidPointerNoDefault { + public: + explicit AlwaysValidPointerNoDefault(Interface* pointer) : pointer_(pointer) { + RTC_CHECK(pointer_); + } + + // Create a pointer by + // a) taking over ownership of |instance| + // b) or fallback to |pointer|, without taking ownership. + // At least one of the arguments must be non-null. + explicit AlwaysValidPointerNoDefault(std::unique_ptr instance, + Interface* pointer = nullptr) + : owned_instance_(std::move(instance)), + pointer_(owned_instance_ ? owned_instance_.get() : pointer) { + RTC_CHECK(pointer_); + } + + Interface* get() { return pointer_; } + Interface* operator->() { return pointer_; } + Interface& operator*() { return *pointer_; } + + Interface* get() const { return pointer_; } + Interface* operator->() const { return pointer_; } + Interface& operator*() const { return *pointer_; } + + private: + const std::unique_ptr owned_instance_; + Interface* const pointer_; +}; + +template +bool operator==(const AlwaysValidPointer& a, + const AlwaysValidPointer& b) { + return a.get() == b.get(); +} + +template +bool operator!=(const AlwaysValidPointer& a, + const AlwaysValidPointer& b) { + return !(a == b); +} + +template +bool operator==(const AlwaysValidPointer& a, std::nullptr_t) { + return a.get() == nullptr; +} + +template +bool operator!=(const AlwaysValidPointer& a, std::nullptr_t) { + return !(a == nullptr); +} + +template +bool operator==(std::nullptr_t, const AlwaysValidPointer& a) { + return a.get() == nullptr; +} + +template +bool operator!=(std::nullptr_t, const AlwaysValidPointer& a) { + return !(a == nullptr); +} + +template +bool operator==(const AlwaysValidPointerNoDefault& a, + const AlwaysValidPointerNoDefault& b) { + return a.get() == b.get(); +} + +template +bool operator!=(const AlwaysValidPointerNoDefault& a, + const AlwaysValidPointerNoDefault& b) { + return !(a == b); +} + +template +bool operator==(const AlwaysValidPointerNoDefault& a, std::nullptr_t) { + return a.get() == nullptr; +} + +template +bool operator!=(const AlwaysValidPointerNoDefault& a, std::nullptr_t) { + return !(a == nullptr); +} + +template +bool operator==(std::nullptr_t, const AlwaysValidPointerNoDefault& a) { + return a.get() == nullptr; +} + +template +bool operator!=(std::nullptr_t, const AlwaysValidPointerNoDefault& a) { + return !(a == nullptr); +} + +// Comparison with raw pointer. +template +bool operator==(const AlwaysValidPointer& a, const V* b) { + return a.get() == b; +} + +template +bool operator!=(const AlwaysValidPointer& a, const V* b) { + return !(a == b); +} + +template +bool operator==(const T* a, const AlwaysValidPointer& b) { + return a == b.get(); +} + +template +bool operator!=(const T* a, const AlwaysValidPointer& b) { + return !(a == b); +} + +template +bool operator==(const AlwaysValidPointerNoDefault& a, const U* b) { + return a.get() == b; +} + +template +bool operator!=(const AlwaysValidPointerNoDefault& a, const U* b) { + return !(a == b); +} + +template +bool operator==(const T* a, const AlwaysValidPointerNoDefault& b) { + return a == b.get(); +} + +template +bool operator!=(const T* a, const AlwaysValidPointerNoDefault& b) { + return !(a == b); +} + +} // namespace webrtc + +#endif // RTC_BASE_MEMORY_ALWAYS_VALID_POINTER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/memory/fifo_buffer.h b/TMessagesProj/jni/voip/webrtc/rtc_base/memory/fifo_buffer.h index 0bc943ac36..aa3164f09a 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/memory/fifo_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/memory/fifo_buffer.h @@ -13,10 +13,9 @@ #include +#include "api/task_queue/pending_task_safety_flag.h" #include "rtc_base/stream.h" #include "rtc_base/synchronization/mutex.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" -#include "rtc_base/task_utils/to_queued_task.h" namespace rtc { @@ -29,6 +28,10 @@ class FifoBuffer final : public StreamInterface { // Creates a FIFO buffer with the specified capacity and owner FifoBuffer(size_t length, Thread* owner); ~FifoBuffer() override; + + FifoBuffer(const FifoBuffer&) = delete; + FifoBuffer& operator=(const FifoBuffer&) = delete; + // Gets the amount of data currently readable from the buffer. bool GetBuffered(size_t* data_len) const; @@ -77,9 +80,9 @@ class FifoBuffer final : public StreamInterface { private: void PostEvent(int events, int err) { - owner_->PostTask(webrtc::ToQueuedTask(task_safety_, [this, events, err]() { - SignalEvent(this, events, err); - })); + owner_->PostTask(webrtc::SafeTask( + task_safety_.flag(), + [this, events, err]() { SignalEvent(this, events, err); })); } // Helper method that implements Read. Caller must acquire a lock @@ -110,7 +113,6 @@ class FifoBuffer final : public StreamInterface { Thread* const owner_; // object lock mutable webrtc::Mutex mutex_; - RTC_DISALLOW_COPY_AND_ASSIGN(FifoBuffer); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/memory_stream.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/memory_stream.cc index 94d31adf13..cbd78ac14a 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/memory_stream.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/memory_stream.cc @@ -128,7 +128,9 @@ StreamResult MemoryStream::DoReserve(size_t size, int* error) { return SR_SUCCESS; if (char* new_buffer = new char[size]) { - memcpy(new_buffer, buffer_, data_length_); + if (buffer_ != nullptr && data_length_ > 0) { + memcpy(new_buffer, buffer_, data_length_); + } delete[] buffer_; buffer_ = new_buffer; buffer_length_ = size; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/message_digest.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/message_digest.cc index 62b4a6bc97..56abcd2c7b 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/message_digest.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/message_digest.cc @@ -15,6 +15,7 @@ #include #include +#include "absl/strings/string_view.h" #include "rtc_base/openssl_digest.h" #include "rtc_base/string_encode.h" @@ -30,7 +31,7 @@ const char DIGEST_SHA_512[] = "sha-512"; static const size_t kBlockSize = 64; // valid for SHA-256 and down -MessageDigest* MessageDigestFactory::Create(const std::string& alg) { +MessageDigest* MessageDigestFactory::Create(absl::string_view alg) { MessageDigest* digest = new OpenSSLDigest(alg); if (digest->Size() == 0) { // invalid algorithm delete digest; @@ -39,7 +40,7 @@ MessageDigest* MessageDigestFactory::Create(const std::string& alg) { return digest; } -bool IsFips180DigestAlgorithm(const std::string& alg) { +bool IsFips180DigestAlgorithm(absl::string_view alg) { // These are the FIPS 180 algorithms. According to RFC 4572 Section 5, // "Self-signed certificates (for which legacy certificates are not a // consideration) MUST use one of the FIPS 180 algorithms (SHA-1, @@ -59,7 +60,7 @@ size_t ComputeDigest(MessageDigest* digest, return digest->Finish(output, out_len); } -size_t ComputeDigest(const std::string& alg, +size_t ComputeDigest(absl::string_view alg, const void* input, size_t in_len, void* output, @@ -69,15 +70,15 @@ size_t ComputeDigest(const std::string& alg, : 0; } -std::string ComputeDigest(MessageDigest* digest, const std::string& input) { +std::string ComputeDigest(MessageDigest* digest, absl::string_view input) { std::unique_ptr output(new char[digest->Size()]); ComputeDigest(digest, input.data(), input.size(), output.get(), digest->Size()); - return hex_encode(output.get(), digest->Size()); + return hex_encode(absl::string_view(output.get(), digest->Size())); } -bool ComputeDigest(const std::string& alg, - const std::string& input, +bool ComputeDigest(absl::string_view alg, + absl::string_view input, std::string* output) { std::unique_ptr digest(MessageDigestFactory::Create(alg)); if (!digest) { @@ -87,7 +88,7 @@ bool ComputeDigest(const std::string& alg, return true; } -std::string ComputeDigest(const std::string& alg, const std::string& input) { +std::string ComputeDigest(absl::string_view alg, absl::string_view input) { std::string output; ComputeDigest(alg, input, &output); return output; @@ -135,7 +136,7 @@ size_t ComputeHmac(MessageDigest* digest, return digest->Finish(output, out_len); } -size_t ComputeHmac(const std::string& alg, +size_t ComputeHmac(absl::string_view alg, const void* key, size_t key_len, const void* input, @@ -151,17 +152,17 @@ size_t ComputeHmac(const std::string& alg, } std::string ComputeHmac(MessageDigest* digest, - const std::string& key, - const std::string& input) { + absl::string_view key, + absl::string_view input) { std::unique_ptr output(new char[digest->Size()]); ComputeHmac(digest, key.data(), key.size(), input.data(), input.size(), output.get(), digest->Size()); - return hex_encode(output.get(), digest->Size()); + return hex_encode(absl::string_view(output.get(), digest->Size())); } -bool ComputeHmac(const std::string& alg, - const std::string& key, - const std::string& input, +bool ComputeHmac(absl::string_view alg, + absl::string_view key, + absl::string_view input, std::string* output) { std::unique_ptr digest(MessageDigestFactory::Create(alg)); if (!digest) { @@ -171,9 +172,9 @@ bool ComputeHmac(const std::string& alg, return true; } -std::string ComputeHmac(const std::string& alg, - const std::string& key, - const std::string& input) { +std::string ComputeHmac(absl::string_view alg, + absl::string_view key, + absl::string_view input) { std::string output; ComputeHmac(alg, key, input, &output); return output; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/message_digest.h b/TMessagesProj/jni/voip/webrtc/rtc_base/message_digest.h index 02e0bfd561..632b9af075 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/message_digest.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/message_digest.h @@ -15,6 +15,8 @@ #include +#include "absl/strings/string_view.h" + namespace rtc { // Definitions for the digest algorithms. @@ -42,12 +44,12 @@ class MessageDigest { // A factory class for creating digest objects. class MessageDigestFactory { public: - static MessageDigest* Create(const std::string& alg); + static MessageDigest* Create(absl::string_view alg); }; // A check that an algorithm is in a list of approved digest algorithms // from RFC 4572 (FIPS 180). -bool IsFips180DigestAlgorithm(const std::string& alg); +bool IsFips180DigestAlgorithm(absl::string_view alg); // Functions to create hashes. @@ -63,25 +65,25 @@ size_t ComputeDigest(MessageDigest* digest, // Like the previous function, but creates a digest implementation based on // the desired digest name `alg`, e.g. DIGEST_SHA_1. Returns 0 if there is no // digest with the given name. -size_t ComputeDigest(const std::string& alg, +size_t ComputeDigest(absl::string_view alg, const void* input, size_t in_len, void* output, size_t out_len); // Computes the hash of `input` using the `digest` hash implementation, and // returns it as a hex-encoded string. -std::string ComputeDigest(MessageDigest* digest, const std::string& input); +std::string ComputeDigest(MessageDigest* digest, absl::string_view input); // Like the previous function, but creates a digest implementation based on // the desired digest name `alg`, e.g. DIGEST_SHA_1. Returns empty string if // there is no digest with the given name. -std::string ComputeDigest(const std::string& alg, const std::string& input); +std::string ComputeDigest(absl::string_view alg, absl::string_view input); // Like the previous function, but returns an explicit result code. -bool ComputeDigest(const std::string& alg, - const std::string& input, +bool ComputeDigest(absl::string_view alg, + absl::string_view input, std::string* output); // Shorthand way to compute a hex-encoded hash using MD5. -inline std::string MD5(const std::string& input) { +inline std::string MD5(absl::string_view input) { return ComputeDigest(DIGEST_MD5, input); } @@ -102,7 +104,7 @@ size_t ComputeHmac(MessageDigest* digest, // Like the previous function, but creates a digest implementation based on // the desired digest name `alg`, e.g. DIGEST_SHA_1. Returns 0 if there is no // digest with the given name. -size_t ComputeHmac(const std::string& alg, +size_t ComputeHmac(absl::string_view alg, const void* key, size_t key_len, const void* input, @@ -112,18 +114,18 @@ size_t ComputeHmac(const std::string& alg, // Computes the HMAC of `input` using the `digest` hash implementation and `key` // to key the HMAC, and returns it as a hex-encoded string. std::string ComputeHmac(MessageDigest* digest, - const std::string& key, - const std::string& input); + absl::string_view key, + absl::string_view input); // Like the previous function, but creates a digest implementation based on // the desired digest name `alg`, e.g. DIGEST_SHA_1. Returns empty string if // there is no digest with the given name. -std::string ComputeHmac(const std::string& alg, - const std::string& key, - const std::string& input); +std::string ComputeHmac(absl::string_view alg, + absl::string_view key, + absl::string_view input); // Like the previous function, but returns an explicit result code. -bool ComputeHmac(const std::string& alg, - const std::string& key, - const std::string& input, +bool ComputeHmac(absl::string_view alg, + absl::string_view key, + absl::string_view input, std::string* output); } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/message_handler.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/message_handler.cc deleted file mode 100644 index e6e973dbd9..0000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/message_handler.cc +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright 2004 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "rtc_base/message_handler.h" - -#include "rtc_base/thread.h" - -namespace rtc { - -MessageHandlerAutoCleanup::MessageHandlerAutoCleanup() {} - -MessageHandlerAutoCleanup::~MessageHandlerAutoCleanup() { - // Note that even though this clears currently pending messages for the - // message handler, it's still racy since it doesn't prevent threads that - // might be in the process of posting new messages with would-be dangling - // pointers. - // This is related to the design of Message having a raw pointer. - // We could consider whether it would be safer to require message handlers - // to be reference counted (as some are). - ThreadManager::Clear(this); -} - -} // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/message_handler.h b/TMessagesProj/jni/voip/webrtc/rtc_base/message_handler.h deleted file mode 100644 index 62c8344e1f..0000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/message_handler.h +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright 2004 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_BASE_MESSAGE_HANDLER_H_ -#define RTC_BASE_MESSAGE_HANDLER_H_ - -#include - -#include "api/function_view.h" -#include "rtc_base/constructor_magic.h" -#include "rtc_base/system/rtc_export.h" - -namespace rtc { - -struct Message; - -// MessageQueue/Thread Messages get dispatched via the MessageHandler interface. -class RTC_EXPORT MessageHandler { - public: - virtual ~MessageHandler() {} - virtual void OnMessage(Message* msg) = 0; -}; - -// Warning: Provided for backwards compatibility. -// -// This class performs expensive cleanup in the dtor that will affect all -// instances of Thread (and their pending message queues) and will block the -// current thread as well as all other threads. -class RTC_EXPORT MessageHandlerAutoCleanup : public MessageHandler { - public: - ~MessageHandlerAutoCleanup() override; - - protected: - MessageHandlerAutoCleanup(); - - private: - RTC_DISALLOW_COPY_AND_ASSIGN(MessageHandlerAutoCleanup); -}; - -} // namespace rtc - -#endif // RTC_BASE_MESSAGE_HANDLER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/nat_server.h b/TMessagesProj/jni/voip/webrtc/rtc_base/nat_server.h index 5078fbb2c1..acbd62a092 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/nat_server.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/nat_server.h @@ -15,7 +15,6 @@ #include #include "rtc_base/async_udp_socket.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/nat_types.h" #include "rtc_base/proxy_server.h" #include "rtc_base/socket_address_pair.h" @@ -69,6 +68,9 @@ class NATServer : public sigslot::has_slots<> { const SocketAddress& external_ip); ~NATServer() override; + NATServer(const NATServer&) = delete; + NATServer& operator=(const NATServer&) = delete; + SocketAddress internal_udp_address() const { return udp_server_socket_->GetLocalAddress(); } @@ -122,7 +124,6 @@ class NATServer : public sigslot::has_slots<> { ProxyServer* tcp_proxy_server_; InternalMap* int_map_; ExternalMap* ext_map_; - RTC_DISALLOW_COPY_AND_ASSIGN(NATServer); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/nat_socket_factory.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/nat_socket_factory.cc index f6492a9305..fe021b95ff 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/nat_socket_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/nat_socket_factory.cc @@ -384,8 +384,9 @@ void NATSocketServer::SetMessageQueue(Thread* queue) { server_->SetMessageQueue(queue); } -bool NATSocketServer::Wait(int cms, bool process_io) { - return server_->Wait(cms, process_io); +bool NATSocketServer::Wait(webrtc::TimeDelta max_wait_duration, + bool process_io) { + return server_->Wait(max_wait_duration, process_io); } void NATSocketServer::WakeUp() { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/nat_socket_factory.h b/TMessagesProj/jni/voip/webrtc/rtc_base/nat_socket_factory.h index 9b1d2f09e3..0b301b5844 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/nat_socket_factory.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/nat_socket_factory.h @@ -17,7 +17,6 @@ #include #include -#include "rtc_base/constructor_magic.h" #include "rtc_base/nat_server.h" #include "rtc_base/nat_types.h" #include "rtc_base/socket.h" @@ -50,6 +49,9 @@ class NATSocketFactory : public SocketFactory, public NATInternalSocketFactory { const SocketAddress& nat_udp_addr, const SocketAddress& nat_tcp_addr); + NATSocketFactory(const NATSocketFactory&) = delete; + NATSocketFactory& operator=(const NATSocketFactory&) = delete; + // SocketFactory implementation Socket* CreateSocket(int family, int type) override; @@ -63,7 +65,6 @@ class NATSocketFactory : public SocketFactory, public NATInternalSocketFactory { SocketFactory* factory_; SocketAddress nat_udp_addr_; SocketAddress nat_tcp_addr_; - RTC_DISALLOW_COPY_AND_ASSIGN(NATSocketFactory); }; // Creates sockets that will send traffic through a NAT depending on what @@ -135,6 +136,9 @@ class NATSocketServer : public SocketServer, public NATInternalSocketFactory { explicit NATSocketServer(SocketServer* ss); + NATSocketServer(const NATSocketServer&) = delete; + NATSocketServer& operator=(const NATSocketServer&) = delete; + SocketServer* socketserver() { return server_; } Thread* queue() { return msg_queue_; } @@ -148,7 +152,7 @@ class NATSocketServer : public SocketServer, public NATInternalSocketFactory { Socket* CreateSocket(int family, int type) override; void SetMessageQueue(Thread* queue) override; - bool Wait(int cms, bool process_io) override; + bool Wait(webrtc::TimeDelta max_wait_duration, bool process_io) override; void WakeUp() override; // NATInternalSocketFactory implementation @@ -161,7 +165,6 @@ class NATSocketServer : public SocketServer, public NATInternalSocketFactory { SocketServer* server_; Thread* msg_queue_; TranslatorMap nats_; - RTC_DISALLOW_COPY_AND_ASSIGN(NATSocketServer); }; // Free-standing NAT helper functions. diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/net_helper.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/net_helper.cc index 893b500d56..4afee7bfb0 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/net_helper.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/net_helper.cc @@ -10,6 +10,8 @@ #include "rtc_base/net_helper.h" +#include "absl/strings/string_view.h" + namespace cricket { const char UDP_PROTOCOL_NAME[] = "udp"; @@ -17,7 +19,7 @@ const char TCP_PROTOCOL_NAME[] = "tcp"; const char SSLTCP_PROTOCOL_NAME[] = "ssltcp"; const char TLS_PROTOCOL_NAME[] = "tls"; -int GetProtocolOverhead(const std::string& protocol) { +int GetProtocolOverhead(absl::string_view protocol) { if (protocol == TCP_PROTOCOL_NAME || protocol == SSLTCP_PROTOCOL_NAME) { return kTcpHeaderSize; } else if (protocol == UDP_PROTOCOL_NAME) { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/net_helper.h b/TMessagesProj/jni/voip/webrtc/rtc_base/net_helper.h index 9abbbdefb2..758c0faad9 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/net_helper.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/net_helper.h @@ -12,6 +12,8 @@ #include +#include "absl/strings/string_view.h" + // This header contains helper functions and constants used by different types // of transports. namespace cricket { @@ -25,7 +27,7 @@ constexpr int kTcpHeaderSize = 20; constexpr int kUdpHeaderSize = 8; // Get the transport layer overhead per packet based on the protocol. -int GetProtocolOverhead(const std::string& protocol); +int GetProtocolOverhead(absl::string_view protocol); } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/net_helpers.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/net_helpers.cc index f521f0f64b..73fe862313 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/net_helpers.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/net_helpers.cc @@ -11,6 +11,9 @@ #include "rtc_base/net_helpers.h" #include +#include + +#include "absl/strings/string_view.h" #if defined(WEBRTC_WIN) #include @@ -37,11 +40,12 @@ const char* inet_ntop(int af, const void* src, char* dst, socklen_t size) { #endif } -int inet_pton(int af, const char* src, void* dst) { +int inet_pton(int af, absl::string_view src, void* dst) { + std::string src_str(src); #if defined(WEBRTC_WIN) - return win32_inet_pton(af, src, dst); + return win32_inet_pton(af, src_str.c_str(), dst); #else - return ::inet_pton(af, src, dst); + return ::inet_pton(af, src_str.c_str(), dst); #endif } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/net_helpers.h b/TMessagesProj/jni/voip/webrtc/rtc_base/net_helpers.h index 4ed84786b3..631c6348a7 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/net_helpers.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/net_helpers.h @@ -19,12 +19,14 @@ #include "rtc_base/win32.h" #endif +#include "absl/strings/string_view.h" + namespace rtc { // rtc namespaced wrappers for inet_ntop and inet_pton so we can avoid // the windows-native versions of these. const char* inet_ntop(int af, const void* src, char* dst, socklen_t size); -int inet_pton(int af, const char* src, void* dst); +int inet_pton(int af, absl::string_view src, void* dst); bool HasIPv4Enabled(); bool HasIPv6Enabled(); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/network.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/network.cc index 870f22a3a9..5ff8d143d9 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/network.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/network.cc @@ -10,6 +10,9 @@ #include "rtc_base/network.h" +#include "absl/strings/string_view.h" +#include "rtc_base/experiments/field_trial_parser.h" + #if defined(WEBRTC_POSIX) #include #endif // WEBRTC_POSIX @@ -25,20 +28,26 @@ #include #include "absl/algorithm/container.h" +#include "absl/memory/memory.h" #include "absl/strings/match.h" #include "absl/strings/string_view.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/transport/field_trial_based_config.h" +#include "api/units/time_delta.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/memory/always_valid_pointer.h" #include "rtc_base/network_monitor.h" #include "rtc_base/socket.h" // includes something that makes windows happy #include "rtc_base/string_encode.h" #include "rtc_base/string_utils.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/thread.h" -#include "system_wrappers/include/field_trial.h" namespace rtc { namespace { +using ::webrtc::SafeTask; +using ::webrtc::TimeDelta; // List of MAC addresses of known VPN (for windows). constexpr uint8_t kVpns[2][6] = { @@ -48,27 +57,15 @@ constexpr uint8_t kVpns[2][6] = { {0x2, 0x50, 0x41, 0x0, 0x0, 0x1}, }; -const uint32_t kUpdateNetworksMessage = 1; -const uint32_t kSignalNetworksMessage = 2; - // Fetch list of networks every two seconds. const int kNetworksUpdateIntervalMs = 2000; const int kHighestNetworkPreference = 127; -typedef struct { - Network* net; +struct AddressList { + std::unique_ptr net; std::vector ips; -} AddressList; - -bool CompareNetworks(const Network* a, const Network* b) { - if (a->prefix_length() == b->prefix_length()) { - if (a->name() == b->name()) { - return a->prefix() < b->prefix(); - } - } - return a->name() < b->name(); -} +}; bool SortNetworks(const Network* a, const Network* b) { // Network types will be preferred above everything else while sorting @@ -184,6 +181,21 @@ bool ShouldAdapterChangeTriggerNetworkChange(rtc::AdapterType old_type, return true; } +bool PreferGlobalIPv6Address(const webrtc::FieldTrialsView* field_trials) { + // Bug fix to prefer global IPv6 address over link local. + // Field trial key reserved in bugs.webrtc.org/14334 + if (field_trials && + field_trials->IsEnabled("WebRTC-IPv6NetworkResolutionFixes")) { + webrtc::FieldTrialParameter prefer_global_ipv6_address_enabled( + "PreferGlobalIPv6Address", false); + webrtc::ParseFieldTrial( + {&prefer_global_ipv6_address_enabled}, + field_trials->Lookup("WebRTC-IPv6NetworkResolutionFixes")); + return prefer_global_ipv6_address_enabled; + } + return false; +} + } // namespace // These addresses are used as the targets to find out the default local address @@ -192,7 +204,20 @@ const char kPublicIPv4Host[] = "8.8.8.8"; const char kPublicIPv6Host[] = "2001:4860:4860::8888"; const int kPublicPort = 53; // DNS port. -std::string MakeNetworkKey(const std::string& name, +namespace webrtc_network_internal { +bool CompareNetworks(const std::unique_ptr& a, + const std::unique_ptr& b) { + if (a->prefix_length() != b->prefix_length()) { + return a->prefix_length() < b->prefix_length(); + } + if (a->name() != b->name()) { + return a->name() < b->name(); + } + return a->prefix() < b->prefix(); +} +} // namespace webrtc_network_internal + +std::string MakeNetworkKey(absl::string_view name, const IPAddress& prefix, int prefix_length) { rtc::StringBuilder ost; @@ -215,7 +240,7 @@ bool MatchTypeNameWithIndexPattern(absl::string_view network_name, // result of the downstream network filtering, see e.g. // BasicPortAllocatorSession::GetNetworks when // PORTALLOCATOR_DISABLE_COSTLY_NETWORKS is turned on. -AdapterType GetAdapterTypeFromName(const char* network_name) { +AdapterType GetAdapterTypeFromName(absl::string_view network_name) { if (MatchTypeNameWithIndexPattern(network_name, "lo")) { // Note that we have a more robust way to determine if a network interface // is a loopback interface by checking the flag IFF_LOOPBACK in ifa_flags of @@ -255,7 +280,8 @@ AdapterType GetAdapterTypeFromName(const char* network_name) { MatchTypeNameWithIndexPattern(network_name, "rmnet_data") || MatchTypeNameWithIndexPattern(network_name, "v4-rmnet") || MatchTypeNameWithIndexPattern(network_name, "v4-rmnet_data") || - MatchTypeNameWithIndexPattern(network_name, "clat")) { + MatchTypeNameWithIndexPattern(network_name, "clat") || + MatchTypeNameWithIndexPattern(network_name, "ccmni")) { return ADAPTER_TYPE_CELLULAR; } #endif @@ -263,10 +289,6 @@ AdapterType GetAdapterTypeFromName(const char* network_name) { return ADAPTER_TYPE_UNKNOWN; } -NetworkManager::NetworkManager() {} - -NetworkManager::~NetworkManager() {} - NetworkManager::EnumerationPermission NetworkManager::enumeration_permission() const { return ENUMERATION_ALLOWED; @@ -280,84 +302,84 @@ webrtc::MdnsResponderInterface* NetworkManager::GetMdnsResponder() const { return nullptr; } -NetworkManagerBase::NetworkManagerBase() - : enumeration_permission_(NetworkManager::ENUMERATION_ALLOWED), - signal_network_preference_change_(webrtc::field_trial::IsEnabled( - "WebRTC-SignalNetworkPreferenceChange")) {} - -NetworkManagerBase::~NetworkManagerBase() { - for (const auto& kv : networks_map_) { - delete kv.second; - } -} +NetworkManagerBase::NetworkManagerBase( + const webrtc::FieldTrialsView* field_trials) + : field_trials_(field_trials), + enumeration_permission_(NetworkManager::ENUMERATION_ALLOWED), + signal_network_preference_change_( + field_trials + ? field_trials->IsEnabled("WebRTC-SignalNetworkPreferenceChange") + : false) {} NetworkManager::EnumerationPermission NetworkManagerBase::enumeration_permission() const { return enumeration_permission_; } -void NetworkManagerBase::GetAnyAddressNetworks(NetworkList* networks) { +std::vector NetworkManagerBase::GetAnyAddressNetworks() { + std::vector networks; if (!ipv4_any_address_network_) { const rtc::IPAddress ipv4_any_address(INADDR_ANY); - ipv4_any_address_network_.reset( - new rtc::Network("any", "any", ipv4_any_address, 0, ADAPTER_TYPE_ANY)); + ipv4_any_address_network_ = std::make_unique( + "any", "any", ipv4_any_address, 0, ADAPTER_TYPE_ANY, field_trials_); ipv4_any_address_network_->set_default_local_address_provider(this); ipv4_any_address_network_->set_mdns_responder_provider(this); ipv4_any_address_network_->AddIP(ipv4_any_address); } - networks->push_back(ipv4_any_address_network_.get()); + networks.push_back(ipv4_any_address_network_.get()); if (!ipv6_any_address_network_) { const rtc::IPAddress ipv6_any_address(in6addr_any); - ipv6_any_address_network_.reset( - new rtc::Network("any", "any", ipv6_any_address, 0, ADAPTER_TYPE_ANY)); + ipv6_any_address_network_ = std::make_unique( + "any", "any", ipv6_any_address, 0, ADAPTER_TYPE_ANY, field_trials_); ipv6_any_address_network_->set_default_local_address_provider(this); ipv6_any_address_network_->set_mdns_responder_provider(this); ipv6_any_address_network_->AddIP(ipv6_any_address); } - networks->push_back(ipv6_any_address_network_.get()); + networks.push_back(ipv6_any_address_network_.get()); + return networks; } -void NetworkManagerBase::GetNetworks(NetworkList* result) const { - result->clear(); - result->insert(result->begin(), networks_.begin(), networks_.end()); +std::vector NetworkManagerBase::GetNetworks() const { + std::vector result; + result.insert(result.begin(), networks_.begin(), networks_.end()); + return result; } -void NetworkManagerBase::MergeNetworkList(const NetworkList& new_networks, - bool* changed) { +void NetworkManagerBase::MergeNetworkList( + std::vector> new_networks, + bool* changed) { NetworkManager::Stats stats; - MergeNetworkList(new_networks, changed, &stats); + MergeNetworkList(std::move(new_networks), changed, &stats); } -void NetworkManagerBase::MergeNetworkList(const NetworkList& new_networks, - bool* changed, - NetworkManager::Stats* stats) { +void NetworkManagerBase::MergeNetworkList( + std::vector> new_networks, + bool* changed, + NetworkManager::Stats* stats) { *changed = false; // AddressList in this map will track IP addresses for all Networks // with the same key. std::map consolidated_address_list; - NetworkList list(new_networks); - absl::c_sort(list, CompareNetworks); + absl::c_sort(new_networks, rtc::webrtc_network_internal::CompareNetworks); // First, build a set of network-keys to the ipaddresses. - for (Network* network : list) { + for (auto& network : new_networks) { bool might_add_to_merged_list = false; std::string key = MakeNetworkKey(network->name(), network->prefix(), network->prefix_length()); + const std::vector& addresses = network->GetIPs(); if (consolidated_address_list.find(key) == consolidated_address_list.end()) { AddressList addrlist; - addrlist.net = network; - consolidated_address_list[key] = addrlist; + addrlist.net = std::move(network); + consolidated_address_list[key] = std::move(addrlist); might_add_to_merged_list = true; } - const std::vector& addresses = network->GetIPs(); AddressList& current_list = consolidated_address_list[key]; for (const InterfaceAddress& address : addresses) { current_list.ips.push_back(address); } - if (!might_add_to_merged_list) { - delete network; - } else { + if (might_add_to_merged_list) { if (current_list.ips[0].family() == AF_INET) { stats->ipv4_network_count++; } else { @@ -369,23 +391,24 @@ void NetworkManagerBase::MergeNetworkList(const NetworkList& new_networks, // Next, look for existing network objects to re-use. // Result of Network merge. Element in this list should have unique key. - NetworkList merged_list; - for (const auto& kv : consolidated_address_list) { + std::vector merged_list; + for (auto& kv : consolidated_address_list) { const std::string& key = kv.first; - Network* net = kv.second.net; + std::unique_ptr net = std::move(kv.second.net); auto existing = networks_map_.find(key); if (existing == networks_map_.end()) { - // This network is new. Place it in the network map. - merged_list.push_back(net); - networks_map_[key] = net; + // This network is new. net->set_id(next_available_network_id_++); - // Also, we might have accumulated IPAddresses from the first + // We might have accumulated IPAddresses from the first // step, set it here. net->SetIPs(kv.second.ips, true); + // Place it in the network map. + merged_list.push_back(net.get()); + networks_map_[key] = std::move(net); *changed = true; } else { // This network exists in the map already. Reset its IP addresses. - Network* existing_net = existing->second; + Network* existing_net = existing->second.get(); *changed = existing_net->SetIPs(kv.second.ips, *changed); merged_list.push_back(existing_net); if (net->type() != ADAPTER_TYPE_UNKNOWN && @@ -407,9 +430,6 @@ void NetworkManagerBase::MergeNetworkList(const NetworkList& new_networks, } } RTC_DCHECK(net->active()); - if (existing_net != net) { - delete net; - } } networks_map_[key]->set_mdns_responder_provider(this); } @@ -425,9 +445,9 @@ void NetworkManagerBase::MergeNetworkList(const NetworkList& new_networks, networks_ = merged_list; // Reset the active states of all networks. for (const auto& kv : networks_map_) { - Network* network = kv.second; + const std::unique_ptr& network = kv.second; // If `network` is in the newly generated `networks_`, it is active. - bool found = absl::c_linear_search(networks_, network); + bool found = absl::c_linear_search(networks_, network.get()); network->set_active(found); } absl::c_sort(networks_, SortNetworks); @@ -508,27 +528,25 @@ bool NetworkManagerBase::IsVpnMacAddress( return false; } -BasicNetworkManager::BasicNetworkManager() - : BasicNetworkManager(nullptr, nullptr) {} - -BasicNetworkManager::BasicNetworkManager(SocketFactory* socket_factory) - : BasicNetworkManager(nullptr, socket_factory) {} - -BasicNetworkManager::BasicNetworkManager( - NetworkMonitorFactory* network_monitor_factory) - : BasicNetworkManager(network_monitor_factory, nullptr) {} - BasicNetworkManager::BasicNetworkManager( NetworkMonitorFactory* network_monitor_factory, - SocketFactory* socket_factory) - : network_monitor_factory_(network_monitor_factory), + SocketFactory* socket_factory, + const webrtc::FieldTrialsView* field_trials) + : field_trials_(field_trials), + network_monitor_factory_(network_monitor_factory), socket_factory_(socket_factory), allow_mac_based_ipv6_( - webrtc::field_trial::IsEnabled("WebRTC-AllowMACBasedIPv6")), + field_trials_->IsEnabled("WebRTC-AllowMACBasedIPv6")), bind_using_ifname_( - !webrtc::field_trial::IsDisabled("WebRTC-BindUsingInterfaceName")) {} + !field_trials_->IsDisabled("WebRTC-BindUsingInterfaceName")) { + RTC_DCHECK(socket_factory_); +} -BasicNetworkManager::~BasicNetworkManager() {} +BasicNetworkManager::~BasicNetworkManager() { + if (task_safety_flag_) { + task_safety_flag_->SetNotAlive(); + } +} void BasicNetworkManager::OnNetworksChanged() { RTC_DCHECK_RUN_ON(thread_); @@ -538,19 +556,40 @@ void BasicNetworkManager::OnNetworksChanged() { #if defined(__native_client__) -bool BasicNetworkManager::CreateNetworks(bool include_ignored, - NetworkList* networks) const { +bool BasicNetworkManager::CreateNetworks( + bool include_ignored, + std::vector>* networks) const { RTC_DCHECK_NOTREACHED(); RTC_LOG(LS_WARNING) << "BasicNetworkManager doesn't work on NaCl yet"; return false; } #elif defined(WEBRTC_POSIX) -void BasicNetworkManager::ConvertIfAddrs(struct ifaddrs* interfaces, - IfAddrsConverter* ifaddrs_converter, - bool include_ignored, - NetworkList* networks) const { - NetworkMap current_networks; +NetworkMonitorInterface::InterfaceInfo BasicNetworkManager::GetInterfaceInfo( + struct ifaddrs* cursor) const { + if (cursor->ifa_flags & IFF_LOOPBACK) { + return { + .adapter_type = ADAPTER_TYPE_LOOPBACK, + .underlying_type_for_vpn = ADAPTER_TYPE_UNKNOWN, + .network_preference = NetworkPreference::NEUTRAL, + .available = true, + }; + } else if (network_monitor_) { + return network_monitor_->GetInterfaceInfo(cursor->ifa_name); + } else { + return {.adapter_type = GetAdapterTypeFromName(cursor->ifa_name), + .underlying_type_for_vpn = ADAPTER_TYPE_UNKNOWN, + .network_preference = NetworkPreference::NEUTRAL, + .available = true}; + } +} + +void BasicNetworkManager::ConvertIfAddrs( + struct ifaddrs* interfaces, + IfAddrsConverter* ifaddrs_converter, + bool include_ignored, + std::vector>* networks) const { + std::map current_networks; for (struct ifaddrs* cursor = interfaces; cursor != nullptr; cursor = cursor->ifa_next) { @@ -587,71 +626,71 @@ void BasicNetworkManager::ConvertIfAddrs(struct ifaddrs* interfaces, reinterpret_cast(cursor->ifa_addr)->sin6_scope_id; } - AdapterType adapter_type = ADAPTER_TYPE_UNKNOWN; - AdapterType vpn_underlying_adapter_type = ADAPTER_TYPE_UNKNOWN; - NetworkPreference network_preference = NetworkPreference::NEUTRAL; - if (cursor->ifa_flags & IFF_LOOPBACK) { - adapter_type = ADAPTER_TYPE_LOOPBACK; - } else { - // If there is a network_monitor, use it to get the adapter type. - // Otherwise, get the adapter type based on a few name matching rules. - if (network_monitor_) { - adapter_type = network_monitor_->GetAdapterType(cursor->ifa_name); - network_preference = - network_monitor_->GetNetworkPreference(cursor->ifa_name); + int prefix_length = CountIPMaskBits(mask); + prefix = TruncateIP(ip, prefix_length); + std::string key = + MakeNetworkKey(std::string(cursor->ifa_name), prefix, prefix_length); + + auto iter = current_networks.find(key); + if (iter != current_networks.end()) { + // We have already added this network, simply add extra IP. + iter->second->AddIP(ip); +#if RTC_DCHECK_IS_ON + // Validate that different IP of same network has same properties + auto existing_network = iter->second; + + NetworkMonitorInterface::InterfaceInfo if_info = GetInterfaceInfo(cursor); + if (if_info.adapter_type != ADAPTER_TYPE_VPN && + IsConfiguredVpn(prefix, prefix_length)) { + if_info.underlying_type_for_vpn = if_info.adapter_type; + if_info.adapter_type = ADAPTER_TYPE_VPN; } - if (adapter_type == ADAPTER_TYPE_UNKNOWN) { - adapter_type = GetAdapterTypeFromName(cursor->ifa_name); + + RTC_DCHECK(existing_network->type() == if_info.adapter_type); + RTC_DCHECK(existing_network->underlying_type_for_vpn() == + if_info.underlying_type_for_vpn); + RTC_DCHECK(existing_network->network_preference() == + if_info.network_preference); + if (!if_info.available) { + RTC_DCHECK(existing_network->ignored()); } +#endif // RTC_DCHECK_IS_ON + continue; } - if (adapter_type == ADAPTER_TYPE_VPN && network_monitor_) { - vpn_underlying_adapter_type = - network_monitor_->GetVpnUnderlyingAdapterType(cursor->ifa_name); - } + // Create a new network. + NetworkMonitorInterface::InterfaceInfo if_info = GetInterfaceInfo(cursor); - int prefix_length = CountIPMaskBits(mask); - prefix = TruncateIP(ip, prefix_length); - - if (adapter_type != ADAPTER_TYPE_VPN && + // Check manually configured VPN override. + if (if_info.adapter_type != ADAPTER_TYPE_VPN && IsConfiguredVpn(prefix, prefix_length)) { - vpn_underlying_adapter_type = adapter_type; - adapter_type = ADAPTER_TYPE_VPN; + if_info.underlying_type_for_vpn = if_info.adapter_type; + if_info.adapter_type = ADAPTER_TYPE_VPN; } - std::string key = - MakeNetworkKey(std::string(cursor->ifa_name), prefix, prefix_length); - auto iter = current_networks.find(key); - if (iter == current_networks.end()) { - // TODO(phoglund): Need to recognize other types as well. - std::unique_ptr network( - new Network(cursor->ifa_name, cursor->ifa_name, prefix, prefix_length, - adapter_type)); - network->set_default_local_address_provider(this); - network->set_scope_id(scope_id); - network->AddIP(ip); - network->set_ignored(IsIgnoredNetwork(*network)); - network->set_underlying_type_for_vpn(vpn_underlying_adapter_type); - network->set_network_preference(network_preference); - if (include_ignored || !network->ignored()) { - current_networks[key] = network.get(); - networks->push_back(network.release()); - } + auto network = std::make_unique( + cursor->ifa_name, cursor->ifa_name, prefix, prefix_length, + if_info.adapter_type, field_trials_.get()); + network->set_default_local_address_provider(this); + network->set_scope_id(scope_id); + network->AddIP(ip); + if (!if_info.available) { + network->set_ignored(true); } else { - Network* existing_network = iter->second; - existing_network->AddIP(ip); - if (adapter_type != ADAPTER_TYPE_UNKNOWN) { - existing_network->set_type(adapter_type); - existing_network->set_underlying_type_for_vpn( - vpn_underlying_adapter_type); - } - existing_network->set_network_preference(network_preference); + network->set_ignored(IsIgnoredNetwork(*network)); + } + network->set_underlying_type_for_vpn(if_info.underlying_type_for_vpn); + network->set_network_preference(if_info.network_preference); + if (include_ignored || !network->ignored()) { + current_networks[key] = network.get(); + networks->push_back(std::move(network)); } } } -bool BasicNetworkManager::CreateNetworks(bool include_ignored, - NetworkList* networks) const { +bool BasicNetworkManager::CreateNetworks( + bool include_ignored, + std::vector>* networks) const { struct ifaddrs* interfaces; int error = getifaddrs(&interfaces); if (error != 0) { @@ -712,9 +751,10 @@ unsigned int GetPrefix(PIP_ADAPTER_PREFIX prefixlist, return best_length; } -bool BasicNetworkManager::CreateNetworks(bool include_ignored, - NetworkList* networks) const { - NetworkMap current_networks; +bool BasicNetworkManager::CreateNetworks( + bool include_ignored, + std::vector>* networks) const { + std::map current_networks; // MSDN recommends a 15KB buffer for the first try at GetAdaptersAddresses. size_t buffer_size = 16384; std::unique_ptr adapter_info(new char[buffer_size]); @@ -737,17 +777,14 @@ bool BasicNetworkManager::CreateNetworks(bool include_ignored, if (adapter_addrs->OperStatus == IfOperStatusUp) { PIP_ADAPTER_UNICAST_ADDRESS address = adapter_addrs->FirstUnicastAddress; PIP_ADAPTER_PREFIX prefixlist = adapter_addrs->FirstPrefix; - std::string name; - std::string description; -#if !defined(NDEBUG) - name = ToUtf8(adapter_addrs->FriendlyName, - wcslen(adapter_addrs->FriendlyName)); -#endif - description = ToUtf8(adapter_addrs->Description, - wcslen(adapter_addrs->Description)); + std::string description = ToUtf8(adapter_addrs->Description, + wcslen(adapter_addrs->Description)); + for (; address; address = address->Next) { -#if defined(NDEBUG) - name = rtc::ToString(count); + std::string name = rtc::ToString(count); +#if !defined(NDEBUG) + name = ToUtf8(adapter_addrs->FriendlyName, + wcslen(adapter_addrs->FriendlyName)); #endif IPAddress ip; @@ -807,10 +844,10 @@ bool BasicNetworkManager::CreateNetworks(bool include_ignored, adapter_type = ADAPTER_TYPE_UNKNOWN; break; } - auto vpn_underlying_adapter_type = ADAPTER_TYPE_UNKNOWN; + auto underlying_type_for_vpn = ADAPTER_TYPE_UNKNOWN; if (adapter_type != ADAPTER_TYPE_VPN && IsConfiguredVpn(prefix, prefix_length)) { - vpn_underlying_adapter_type = adapter_type; + underlying_type_for_vpn = adapter_type; adapter_type = ADAPTER_TYPE_VPN; } if (adapter_type != ADAPTER_TYPE_VPN && @@ -818,13 +855,13 @@ bool BasicNetworkManager::CreateNetworks(bool include_ignored, reinterpret_cast( adapter_addrs->PhysicalAddress), adapter_addrs->PhysicalAddressLength))) { - vpn_underlying_adapter_type = adapter_type; + underlying_type_for_vpn = adapter_type; adapter_type = ADAPTER_TYPE_VPN; } - std::unique_ptr network(new Network( - name, description, prefix, prefix_length, adapter_type)); - network->set_underlying_type_for_vpn(vpn_underlying_adapter_type); + auto network = std::make_unique(name, description, prefix, + prefix_length, adapter_type); + network->set_underlying_type_for_vpn(underlying_type_for_vpn); network->set_default_local_address_provider(this); network->set_mdns_responder_provider(this); network->set_scope_id(scope_id); @@ -833,7 +870,7 @@ bool BasicNetworkManager::CreateNetworks(bool include_ignored, network->set_ignored(ignored); if (include_ignored || !network->ignored()) { current_networks[key] = network.get(); - networks->push_back(network.release()); + networks->push_back(std::move(network)); } } else { (*existing_network).second->AddIP(ip); @@ -875,11 +912,6 @@ bool BasicNetworkManager::IsIgnoredNetwork(const Network& network) const { } #endif - if (network_monitor_ && - !network_monitor_->IsAdapterAvailable(network.name())) { - return true; - } - // Ignore any networks with a 0.x.y.z IP if (network.prefix().family() == AF_INET) { return (network.prefix().v4AddressAsHostOrderInteger() < 0x01000000); @@ -897,9 +929,17 @@ void BasicNetworkManager::StartUpdating() { // we should trigger network signal immediately for the new clients // to start allocating ports. if (sent_first_update_) - thread_->Post(RTC_FROM_HERE, this, kSignalNetworksMessage); + thread_->PostTask(SafeTask(task_safety_flag_, [this] { + RTC_DCHECK_RUN_ON(thread_); + SignalNetworksChanged(); + })); } else { - thread_->Post(RTC_FROM_HERE, this, kUpdateNetworksMessage); + RTC_DCHECK(task_safety_flag_ == nullptr); + task_safety_flag_ = webrtc::PendingTaskSafetyFlag::Create(); + thread_->PostTask(SafeTask(task_safety_flag_, [this] { + RTC_DCHECK_RUN_ON(thread_); + UpdateNetworksContinually(); + })); StartNetworkMonitor(); } ++start_count_; @@ -912,7 +952,8 @@ void BasicNetworkManager::StopUpdating() { --start_count_; if (!start_count_) { - thread_->Clear(this); + task_safety_flag_->SetNotAlive(); + task_safety_flag_ = nullptr; sent_first_update_ = false; StopNetworkMonitor(); } @@ -923,7 +964,8 @@ void BasicNetworkManager::StartNetworkMonitor() { return; } if (!network_monitor_) { - network_monitor_.reset(network_monitor_factory_->CreateNetworkMonitor()); + network_monitor_.reset( + network_monitor_factory_->CreateNetworkMonitor(*field_trials_)); if (!network_monitor_) { return; } @@ -956,35 +998,11 @@ void BasicNetworkManager::StopNetworkMonitor() { } } -void BasicNetworkManager::OnMessage(Message* msg) { - RTC_DCHECK_RUN_ON(thread_); - switch (msg->message_id) { - case kUpdateNetworksMessage: { - UpdateNetworksContinually(); - break; - } - case kSignalNetworksMessage: { - SignalNetworksChanged(); - break; - } - default: - RTC_DCHECK_NOTREACHED(); - } -} - IPAddress BasicNetworkManager::QueryDefaultLocalAddress(int family) const { RTC_DCHECK(family == AF_INET || family == AF_INET6); - // TODO(bugs.webrtc.org/13145): Delete support for null `socket_factory_`, - // require socket factory to be provided to constructor. - SocketFactory* socket_factory = socket_factory_; - if (!socket_factory) { - socket_factory = thread_->socketserver(); - } - RTC_DCHECK(socket_factory); - std::unique_ptr socket( - socket_factory->CreateSocket(family, SOCK_DGRAM)); + socket_factory_->CreateSocket(family, SOCK_DGRAM)); if (!socket) { RTC_LOG_ERR(LS_ERROR) << "Socket creation failed"; return IPAddress(); @@ -1008,13 +1026,13 @@ void BasicNetworkManager::UpdateNetworksOnce() { if (!start_count_) return; - NetworkList list; + std::vector> list; if (!CreateNetworks(false, &list)) { SignalError(); } else { bool changed; NetworkManager::Stats stats; - MergeNetworkList(list, &changed, &stats); + MergeNetworkList(std::move(list), &changed, &stats); set_default_local_addresses(QueryDefaultLocalAddress(AF_INET), QueryDefaultLocalAddress(AF_INET6)); if (changed || !sent_first_update_) { @@ -1026,14 +1044,17 @@ void BasicNetworkManager::UpdateNetworksOnce() { void BasicNetworkManager::UpdateNetworksContinually() { UpdateNetworksOnce(); - thread_->PostDelayed(RTC_FROM_HERE, kNetworksUpdateIntervalMs, this, - kUpdateNetworksMessage); + thread_->PostDelayedTask(SafeTask(task_safety_flag_, + [this] { + RTC_DCHECK_RUN_ON(thread_); + UpdateNetworksContinually(); + }), + TimeDelta::Millis(kNetworksUpdateIntervalMs)); } void BasicNetworkManager::DumpNetworks() { RTC_DCHECK_RUN_ON(thread_); - NetworkList list; - GetNetworks(&list); + std::vector list = GetNetworks(); RTC_LOG(LS_INFO) << "NetworkManager detected " << list.size() << " networks:"; for (const Network* network : list) { RTC_LOG(LS_INFO) << network->ToString() << ": " << network->description() @@ -1056,30 +1077,14 @@ NetworkBindingResult BasicNetworkManager::BindSocketToNetwork( return network_monitor_->BindSocketToNetwork(socket_fd, address, if_name); } -Network::Network(const std::string& name, - const std::string& desc, - const IPAddress& prefix, - int prefix_length) - : name_(name), - description_(desc), - prefix_(prefix), - prefix_length_(prefix_length), - key_(MakeNetworkKey(name, prefix, prefix_length)), - scope_id_(0), - ignored_(false), - type_(ADAPTER_TYPE_UNKNOWN), - preference_(0), - use_differentiated_cellular_costs_(webrtc::field_trial::IsEnabled( - "WebRTC-UseDifferentiatedCellularCosts")), - add_network_cost_to_vpn_( - webrtc::field_trial::IsEnabled("WebRTC-AddNetworkCostToVpn")) {} - -Network::Network(const std::string& name, - const std::string& desc, +Network::Network(absl::string_view name, + absl::string_view desc, const IPAddress& prefix, int prefix_length, - AdapterType type) - : name_(name), + AdapterType type, + const webrtc::FieldTrialsView* field_trials) + : field_trials_(field_trials), + name_(name), description_(desc), prefix_(prefix), prefix_length_(prefix_length), @@ -1087,11 +1092,7 @@ Network::Network(const std::string& name, scope_id_(0), ignored_(false), type_(type), - preference_(0), - use_differentiated_cellular_costs_(webrtc::field_trial::IsEnabled( - "WebRTC-UseDifferentiatedCellularCosts")), - add_network_cost_to_vpn_( - webrtc::field_trial::IsEnabled("WebRTC-AddNetworkCostToVpn")) {} + preference_(0) {} Network::Network(const Network&) = default; @@ -1126,13 +1127,20 @@ IPAddress Network::GetBestIP() const { return static_cast(ips_.at(0)); } - InterfaceAddress selected_ip, ula_ip; + InterfaceAddress selected_ip, link_local_ip, ula_ip; + const bool prefer_global_ipv6_to_link_local = + PreferGlobalIPv6Address(field_trials_); for (const InterfaceAddress& ip : ips_) { // Ignore any address which has been deprecated already. if (ip.ipv6_flags() & IPV6_ADDRESS_FLAG_DEPRECATED) continue; + if (prefer_global_ipv6_to_link_local && IPIsLinkLocal(ip)) { + link_local_ip = ip; + continue; + } + // ULA address should only be returned when we have no other // global IP. if (IPIsULA(static_cast(ip))) { @@ -1146,9 +1154,14 @@ IPAddress Network::GetBestIP() const { break; } - // No proper global IPv6 address found, use ULA instead. - if (IPIsUnspec(selected_ip) && !IPIsUnspec(ula_ip)) { - selected_ip = ula_ip; + if (IPIsUnspec(selected_ip)) { + if (prefer_global_ipv6_to_link_local && !IPIsUnspec(link_local_ip)) { + // No proper global IPv6 address found, use link local address instead. + selected_ip = link_local_ip; + } else if (!IPIsUnspec(ula_ip)) { + // No proper global and link local address found, use ULA instead. + selected_ip = ula_ip; + } } return static_cast(selected_ip); @@ -1161,11 +1174,66 @@ webrtc::MdnsResponderInterface* Network::GetMdnsResponder() const { return mdns_responder_provider_->GetMdnsResponder(); } -uint16_t Network::GetCost() const { +uint16_t Network::GetCost(const webrtc::FieldTrialsView* field_trials) const { + return GetCost( + *webrtc::AlwaysValidPointer(field_trials)); +} + +uint16_t Network::GetCost(const webrtc::FieldTrialsView& field_trials) const { AdapterType type = IsVpn() ? underlying_type_for_vpn_ : type_; + const bool use_differentiated_cellular_costs = + field_trials.IsEnabled("WebRTC-UseDifferentiatedCellularCosts"); + const bool add_network_cost_to_vpn = + field_trials.IsEnabled("WebRTC-AddNetworkCostToVpn"); return ComputeNetworkCostByType(type, IsVpn(), - use_differentiated_cellular_costs_, - add_network_cost_to_vpn_); + use_differentiated_cellular_costs, + add_network_cost_to_vpn); +} + +// This is the inverse of ComputeNetworkCostByType(). +std::pair +Network::GuessAdapterFromNetworkCost(int network_cost) { + switch (network_cost) { + case kNetworkCostMin: + return {rtc::ADAPTER_TYPE_ETHERNET, false}; + case kNetworkCostMin + kNetworkCostVpn: + return {rtc::ADAPTER_TYPE_ETHERNET, true}; + case kNetworkCostLow: + return {rtc::ADAPTER_TYPE_WIFI, false}; + case kNetworkCostLow + kNetworkCostVpn: + return {rtc::ADAPTER_TYPE_WIFI, true}; + case kNetworkCostCellular: + return {rtc::ADAPTER_TYPE_CELLULAR, false}; + case kNetworkCostCellular + kNetworkCostVpn: + return {rtc::ADAPTER_TYPE_CELLULAR, true}; + case kNetworkCostCellular2G: + return {rtc::ADAPTER_TYPE_CELLULAR_2G, false}; + case kNetworkCostCellular2G + kNetworkCostVpn: + return {rtc::ADAPTER_TYPE_CELLULAR_2G, true}; + case kNetworkCostCellular3G: + return {rtc::ADAPTER_TYPE_CELLULAR_3G, false}; + case kNetworkCostCellular3G + kNetworkCostVpn: + return {rtc::ADAPTER_TYPE_CELLULAR_3G, true}; + case kNetworkCostCellular4G: + return {rtc::ADAPTER_TYPE_CELLULAR_4G, false}; + case kNetworkCostCellular4G + kNetworkCostVpn: + return {rtc::ADAPTER_TYPE_CELLULAR_4G, true}; + case kNetworkCostCellular5G: + return {rtc::ADAPTER_TYPE_CELLULAR_5G, false}; + case kNetworkCostCellular5G + kNetworkCostVpn: + return {rtc::ADAPTER_TYPE_CELLULAR_5G, true}; + case kNetworkCostUnknown: + return {rtc::ADAPTER_TYPE_UNKNOWN, false}; + case kNetworkCostUnknown + kNetworkCostVpn: + return {rtc::ADAPTER_TYPE_UNKNOWN, true}; + case kNetworkCostMax: + return {rtc::ADAPTER_TYPE_ANY, false}; + case kNetworkCostMax + kNetworkCostVpn: + return {rtc::ADAPTER_TYPE_ANY, true}; + } + RTC_LOG(LS_VERBOSE) << "Unknown network cost: " << network_cost; + return {rtc::ADAPTER_TYPE_UNKNOWN, false}; } std::string Network::ToString() const { @@ -1186,7 +1254,7 @@ void BasicNetworkManager::set_vpn_list(const std::vector& vpn) { if (thread_ == nullptr) { vpn_ = vpn; } else { - thread_->Invoke(RTC_FROM_HERE, [this, vpn] { vpn_ = vpn; }); + thread_->BlockingCall([this, vpn] { vpn_ = vpn; }); } } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/network.h b/TMessagesProj/jni/voip/webrtc/rtc_base/network.h index 0b462bdede..d82ddeed88 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/network.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/network.h @@ -19,11 +19,16 @@ #include #include +#include "absl/base/attributes.h" +#include "absl/strings/string_view.h" #include "api/array_view.h" +#include "api/field_trials_view.h" #include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/transport/field_trial_based_config.h" #include "rtc_base/ip_address.h" #include "rtc_base/mdns_responder_interface.h" -#include "rtc_base/message_handler.h" +#include "rtc_base/memory/always_valid_pointer.h" #include "rtc_base/network_monitor.h" #include "rtc_base/network_monitor_factory.h" #include "rtc_base/socket_factory.h" @@ -48,17 +53,22 @@ class Thread; // By default, ignore loopback interfaces on the host. const int kDefaultNetworkIgnoreMask = ADAPTER_TYPE_LOOPBACK; +namespace webrtc_network_internal { +bool CompareNetworks(const std::unique_ptr& a, + const std::unique_ptr& b); +} // namespace webrtc_network_internal + // Makes a string key for this network. Used in the network manager's maps. // Network objects are keyed on interface name, network prefix and the // length of that prefix. -std::string MakeNetworkKey(const std::string& name, +std::string MakeNetworkKey(absl::string_view name, const IPAddress& prefix, int prefix_length); // Utility function that attempts to determine an adapter type by an interface // name (e.g., "wlan0"). Can be used by NetworkManager subclasses when other // mechanisms fail to determine the type. -RTC_EXPORT AdapterType GetAdapterTypeFromName(const char* network_name); +RTC_EXPORT AdapterType GetAdapterTypeFromName(absl::string_view network_name); class DefaultLocalAddressProvider { public: @@ -112,8 +122,6 @@ class NetworkMask { class RTC_EXPORT NetworkManager : public DefaultLocalAddressProvider, public MdnsResponderProvider { public: - typedef std::vector NetworkList; - // This enum indicates whether adapter enumeration is allowed. enum EnumerationPermission { ENUMERATION_ALLOWED, // Adapter enumeration is allowed. Getting 0 network @@ -123,9 +131,6 @@ class RTC_EXPORT NetworkManager : public DefaultLocalAddressProvider, // GetAnyAddressNetworks() should be used instead. }; - NetworkManager(); - ~NetworkManager() override; - // Called when network list is updated. sigslot::signal0<> SignalNetworksChanged; @@ -148,7 +153,9 @@ class RTC_EXPORT NetworkManager : public DefaultLocalAddressProvider, // It makes sure that repeated calls return the same object for a // given network, so that quality is tracked appropriately. Does not // include ignored networks. - virtual void GetNetworks(NetworkList* networks) const = 0; + // The returned vector of Network* is valid as long as the NetworkManager is + // alive. + virtual std::vector GetNetworks() const = 0; // Returns the current permission state of GetNetworks(). virtual EnumerationPermission enumeration_permission() const; @@ -160,9 +167,7 @@ class RTC_EXPORT NetworkManager : public DefaultLocalAddressProvider, // // This method appends the "any address" networks to the list, such that this // can optionally be called after GetNetworks. - // - // TODO(guoweis): remove this body when chromium implements this. - virtual void GetAnyAddressNetworks(NetworkList* networks) {} + virtual std::vector GetAnyAddressNetworks() = 0; // Dumps the current list of networks in the network manager. virtual void DumpNetworks() {} @@ -186,11 +191,10 @@ class RTC_EXPORT NetworkManager : public DefaultLocalAddressProvider, // Base class for NetworkManager implementations. class RTC_EXPORT NetworkManagerBase : public NetworkManager { public: - NetworkManagerBase(); - ~NetworkManagerBase() override; + NetworkManagerBase(const webrtc::FieldTrialsView* field_trials = nullptr); - void GetNetworks(NetworkList* networks) const override; - void GetAnyAddressNetworks(NetworkList* networks) override; + std::vector GetNetworks() const override; + std::vector GetAnyAddressNetworks() override; EnumerationPermission enumeration_permission() const override; @@ -201,16 +205,16 @@ class RTC_EXPORT NetworkManagerBase : public NetworkManager { static bool IsVpnMacAddress(rtc::ArrayView address); protected: - typedef std::map NetworkMap; // Updates `networks_` with the networks listed in `list`. If - // `network_map_` already has a Network object for a network listed + // `networks_map_` already has a Network object for a network listed // in the `list` then it is reused. Accept ownership of the Network // objects in the `list`. `changed` will be set to true if there is // any change in the network list. - void MergeNetworkList(const NetworkList& list, bool* changed); + void MergeNetworkList(std::vector> list, + bool* changed); // `stats` will be populated even if |*changed| is false. - void MergeNetworkList(const NetworkList& list, + void MergeNetworkList(std::vector> list, bool* changed, NetworkManager::Stats* stats); @@ -223,14 +227,18 @@ class RTC_EXPORT NetworkManagerBase : public NetworkManager { Network* GetNetworkFromAddress(const rtc::IPAddress& ip) const; + // To enable subclasses to get the networks list, without interfering with + // refactoring of the interface GetNetworks method. + const std::vector& GetNetworksInternal() const { return networks_; } + private: friend class NetworkTest; - + const webrtc::FieldTrialsView* field_trials_ = nullptr; EnumerationPermission enumeration_permission_; - NetworkList networks_; + std::vector networks_; - NetworkMap networks_map_; + std::map> networks_map_; std::unique_ptr ipv4_any_address_network_; std::unique_ptr ipv6_any_address_network_; @@ -251,19 +259,19 @@ class RTC_EXPORT NetworkManagerBase : public NetworkManager { // Basic implementation of the NetworkManager interface that gets list // of networks using OS APIs. class RTC_EXPORT BasicNetworkManager : public NetworkManagerBase, - public MessageHandlerAutoCleanup, public NetworkBinderInterface, public sigslot::has_slots<> { public: - ABSL_DEPRECATED( - "Use the version with socket_factory, see bugs.webrtc.org/13145") - BasicNetworkManager(); - explicit BasicNetworkManager(SocketFactory* socket_factory); - ABSL_DEPRECATED( - "Use the version with socket_factory, see bugs.webrtc.org/13145") - explicit BasicNetworkManager(NetworkMonitorFactory* network_monitor_factory); + // This is used by lots of downstream code. + BasicNetworkManager(SocketFactory* socket_factory, + const webrtc::FieldTrialsView* field_trials = nullptr) + : BasicNetworkManager(/* network_monitor_factory= */ nullptr, + socket_factory, + field_trials) {} + BasicNetworkManager(NetworkMonitorFactory* network_monitor_factory, - SocketFactory* socket_factory); + SocketFactory* socket_factory, + const webrtc::FieldTrialsView* field_trials = nullptr); ~BasicNetworkManager() override; void StartUpdating() override; @@ -271,8 +279,6 @@ class RTC_EXPORT BasicNetworkManager : public NetworkManagerBase, void DumpNetworks() override; - // MessageHandler interface. - void OnMessage(Message* msg) override; bool started() { return start_count_ > 0; } // Sets the network ignore list, which is empty by default. Any network on the @@ -304,11 +310,15 @@ class RTC_EXPORT BasicNetworkManager : public NetworkManagerBase, void ConvertIfAddrs(ifaddrs* interfaces, IfAddrsConverter* converter, bool include_ignored, - NetworkList* networks) const RTC_RUN_ON(thread_); + std::vector>* networks) const + RTC_RUN_ON(thread_); + NetworkMonitorInterface::InterfaceInfo GetInterfaceInfo( + struct ifaddrs* cursor) const RTC_RUN_ON(thread_); #endif // defined(WEBRTC_POSIX) // Creates a network object for each network available on the machine. - bool CreateNetworks(bool include_ignored, NetworkList* networks) const + bool CreateNetworks(bool include_ignored, + std::vector>* networks) const RTC_RUN_ON(thread_); // Determines if a network should be ignored. This should only be determined @@ -338,6 +348,10 @@ class RTC_EXPORT BasicNetworkManager : public NetworkManagerBase, Thread* thread_ = nullptr; bool sent_first_update_ = true; int start_count_ = 0; + // Chromium create BasicNetworkManager() w/o field trials. + webrtc::AlwaysValidPointer + field_trials_; std::vector network_ignore_list_; NetworkMonitorFactory* const network_monitor_factory_; SocketFactory* const socket_factory_; @@ -347,31 +361,43 @@ class RTC_EXPORT BasicNetworkManager : public NetworkManagerBase, bool bind_using_ifname_ RTC_GUARDED_BY(thread_) = false; std::vector vpn_; + rtc::scoped_refptr task_safety_flag_; }; // Represents a Unix-type network interface, with a name and single address. class RTC_EXPORT Network { public: - Network(const std::string& name, - const std::string& description, + Network(absl::string_view name, + absl::string_view description, const IPAddress& prefix, - int prefix_length); - - Network(const std::string& name, - const std::string& description, + int prefix_length, + const webrtc::FieldTrialsView* field_trials = nullptr) + : Network(name, + description, + prefix, + prefix_length, + rtc::ADAPTER_TYPE_UNKNOWN, + field_trials) {} + + Network(absl::string_view name, + absl::string_view description, const IPAddress& prefix, int prefix_length, - AdapterType type); + AdapterType type, + const webrtc::FieldTrialsView* field_trials = nullptr); + Network(const Network&); ~Network(); // This signal is fired whenever type() or underlying_type_for_vpn() changes. - sigslot::signal1 SignalTypeChanged; + // Mutable, to support connecting on the const Network passed to cricket::Port + // constructor. + mutable sigslot::signal1 SignalTypeChanged; // This signal is fired whenever network preference changes. sigslot::signal1 SignalNetworkPreferenceChanged; - const DefaultLocalAddressProvider* default_local_address_provider() { + const DefaultLocalAddressProvider* default_local_address_provider() const { return default_local_address_provider_; } void set_default_local_address_provider( @@ -395,6 +421,9 @@ class RTC_EXPORT Network { // Returns the length, in bits, of this network's prefix. int prefix_length() const { return prefix_length_; } + // Returns the family for the network prefix. + int family() const { return prefix_.family(); } + // `key_` has unique value per network interface. Used in sorting network // interfaces. Key is derived from interface name and it's prefix. std::string key() const { return key_; } @@ -404,9 +433,11 @@ class RTC_EXPORT Network { // Here is the rule on how we mark the IPv6 address as ignorable for WebRTC. // 1) return all global temporary dynamic and non-deprecated ones. // 2) if #1 not available, return global ones. - // 3) if #2 not available, use ULA ipv6 as last resort. (ULA stands - // for unique local address, which is not route-able in open - // internet but might be useful for a close WebRTC deployment. + // 3) if #2 not available and WebRTC-IPv6NetworkResolutionFixes enabled, + // return local link ones. + // 4) if #3 not available, use ULA ipv6 as last resort. (ULA stands for + // unique local address, which is not route-able in open internet but might + // be useful for a close WebRTC deployment. // TODO(guoweis): rule #3 actually won't happen at current // implementation. The reason being that ULA address starting with @@ -419,10 +450,6 @@ class RTC_EXPORT Network { // IPv6 address IPAddress GetBestIP() const; - // Keep the original function here for now. - // TODO(guoweis): Remove this when all callers are migrated to GetBestIP(). - IPAddress ip() const { return GetBestIP(); } - // Adds an active IP address to this network. Does not check for duplicates. void AddIP(const InterfaceAddress& ip) { ips_.push_back(ip); } void AddIP(const IPAddress& ip) { ips_.push_back(rtc::InterfaceAddress(ip)); } @@ -496,7 +523,15 @@ class RTC_EXPORT Network { } } - uint16_t GetCost() const; + // Note: This function is called "rarely". + // Twice per Network in BasicPortAllocator if + // PORTALLOCATOR_DISABLE_COSTLY_NETWORKS. Once in Port::Construct() (and when + // Port::OnNetworkTypeChanged is called). + ABSL_DEPRECATED( + "Use the version with field trials, see bugs.webrtc.org/webrtc:10335") + uint16_t GetCost(const webrtc::FieldTrialsView* field_trials = nullptr) const; + uint16_t GetCost(const webrtc::FieldTrialsView& field_trials) const; + // A unique id assigned by the network manager, which may be signaled // to the remote side in the candidate. uint16_t id() const { return id_; } @@ -526,10 +561,14 @@ class RTC_EXPORT Network { SignalNetworkPreferenceChanged(this); } + static std::pair + GuessAdapterFromNetworkCost(int network_cost); + // Debugging description of this network std::string ToString() const; private: + const webrtc::FieldTrialsView* field_trials_ = nullptr; const DefaultLocalAddressProvider* default_local_address_provider_ = nullptr; const MdnsResponderProvider* mdns_responder_provider_ = nullptr; std::string name_; @@ -545,8 +584,6 @@ class RTC_EXPORT Network { int preference_; bool active_ = true; uint16_t id_ = 0; - bool use_differentiated_cellular_costs_ = false; - bool add_network_cost_to_vpn_ = false; NetworkPreference network_preference_ = NetworkPreference::NEUTRAL; friend class NetworkManager; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/network_constants.h b/TMessagesProj/jni/voip/webrtc/rtc_base/network_constants.h index 0495afdcc9..578b9710d0 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/network_constants.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/network_constants.h @@ -57,6 +57,16 @@ enum AdapterType { std::string AdapterTypeToString(AdapterType type); +// Useful for testing! +constexpr AdapterType kAllAdapterTypes[] = { + ADAPTER_TYPE_UNKNOWN, ADAPTER_TYPE_ETHERNET, + ADAPTER_TYPE_WIFI, ADAPTER_TYPE_CELLULAR, + ADAPTER_TYPE_VPN, ADAPTER_TYPE_LOOPBACK, + ADAPTER_TYPE_ANY, ADAPTER_TYPE_CELLULAR_2G, + ADAPTER_TYPE_CELLULAR_3G, ADAPTER_TYPE_CELLULAR_4G, + ADAPTER_TYPE_CELLULAR_5G, +}; + } // namespace rtc #endif // RTC_BASE_NETWORK_CONSTANTS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/network_monitor.h b/TMessagesProj/jni/voip/webrtc/rtc_base/network_monitor.h index c0eea1ff52..605854f6ea 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/network_monitor.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/network_monitor.h @@ -14,6 +14,7 @@ #include #include +#include "absl/strings/string_view.h" #include "rtc_base/network_constants.h" namespace rtc { @@ -72,18 +73,38 @@ class NetworkBinderInterface { // changes, and fires the SignalNetworksChanged event when networks change. class NetworkMonitorInterface { public: + struct InterfaceInfo { + // The type of adapter if known. + AdapterType adapter_type; + + // Is ADAPTER_TYPE_UNKNOWN unless adapter_type == ADAPTER_TYPE_VPN. + AdapterType underlying_type_for_vpn = ADAPTER_TYPE_UNKNOWN; + + // The OS/firmware specific preference of this interface. + NetworkPreference network_preference = NetworkPreference::NEUTRAL; + + // Is this interface available to use? WebRTC shouldn't attempt to use it if + // this returns false. + // + // It's possible for this status to change, in which case + // SignalNetworksChanged will be fired. + // + // The specific use case this was added for was a phone with two SIM + // cards, where attempting to use all interfaces returned from getifaddrs + // caused the connection to be dropped. + bool available = true; + }; + NetworkMonitorInterface(); virtual ~NetworkMonitorInterface(); virtual void Start() = 0; virtual void Stop() = 0; - virtual AdapterType GetAdapterType(const std::string& interface_name) = 0; - virtual AdapterType GetVpnUnderlyingAdapterType( - const std::string& interface_name) = 0; - - virtual NetworkPreference GetNetworkPreference( - const std::string& interface_name) = 0; + // Get information about an interface. + // If the interface is not known, the return struct will have set + // `adapter_type` to ADAPTER_TYPE_UNKNOWN and `available` to false. + virtual InterfaceInfo GetInterfaceInfo(absl::string_view interface_name) = 0; // Does `this` NetworkMonitorInterface implement BindSocketToNetwork? // Only Android returns true. @@ -94,23 +115,10 @@ class NetworkMonitorInterface { virtual NetworkBindingResult BindSocketToNetwork( int socket_fd, const IPAddress& address, - const std::string& interface_name) { + absl::string_view interface_name) { return NetworkBindingResult::NOT_IMPLEMENTED; } - // Is this interface available to use? WebRTC shouldn't attempt to use it if - // this returns false. - // - // It's possible for this status to change, in which case - // SignalNetworksChanged will be fired. - // - // These specific use case this was added for was a phone with two SIM cards, - // where attempting to use all interfaces returned from getifaddrs caused the - // connection to be dropped. - virtual bool IsAdapterAvailable(const std::string& interface_name) { - return true; - } - void SetNetworksChangedCallback(std::function callback) { networks_changed_callback_ = std::move(callback); } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/network_monitor_factory.h b/TMessagesProj/jni/voip/webrtc/rtc_base/network_monitor_factory.h index dadcd4aa8a..c76ed97d8c 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/network_monitor_factory.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/network_monitor_factory.h @@ -11,6 +11,10 @@ #ifndef RTC_BASE_NETWORK_MONITOR_FACTORY_H_ #define RTC_BASE_NETWORK_MONITOR_FACTORY_H_ +namespace webrtc { +class FieldTrialsView; +} // namespace webrtc + namespace rtc { // Forward declaring this so it's not part of the API surface; it's only @@ -24,7 +28,8 @@ class NetworkMonitorInterface; */ class NetworkMonitorFactory { public: - virtual NetworkMonitorInterface* CreateNetworkMonitor() = 0; + virtual NetworkMonitorInterface* CreateNetworkMonitor( + const webrtc::FieldTrialsView& field_trials) = 0; virtual ~NetworkMonitorFactory(); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/null_socket_server.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/null_socket_server.cc index 4705163c4a..366349db3a 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/null_socket_server.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/null_socket_server.cc @@ -10,18 +10,22 @@ #include "rtc_base/null_socket_server.h" +#include "api/units/time_delta.h" #include "rtc_base/checks.h" +#include "rtc_base/event.h" +#include "rtc_base/socket_server.h" namespace rtc { NullSocketServer::NullSocketServer() = default; NullSocketServer::~NullSocketServer() {} -bool NullSocketServer::Wait(int cms, bool process_io) { +bool NullSocketServer::Wait(webrtc::TimeDelta max_wait_duration, + bool process_io) { // Wait with the given timeout. Do not log a warning if we end up waiting for // a long time; that just means no one has any work for us, which is perfectly // legitimate. - event_.Wait(/*give_up_after_ms=*/cms, /*warn_after_ms=*/Event::kForever); + event_.Wait(max_wait_duration, /*warn_after=*/Event::kForever); return true; } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/null_socket_server.h b/TMessagesProj/jni/voip/webrtc/rtc_base/null_socket_server.h index 6d4ae848e5..87f49f436e 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/null_socket_server.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/null_socket_server.h @@ -23,7 +23,7 @@ class RTC_EXPORT NullSocketServer : public SocketServer { NullSocketServer(); ~NullSocketServer() override; - bool Wait(int cms, bool process_io) override; + bool Wait(webrtc::TimeDelta max_wait_duration, bool process_io) override; void WakeUp() override; Socket* CreateSocket(int family, int type) override; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/numerics/moving_max_counter.h b/TMessagesProj/jni/voip/webrtc/rtc_base/numerics/moving_max_counter.h index 26dd506d63..5eb45d392b 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/numerics/moving_max_counter.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/numerics/moving_max_counter.h @@ -19,7 +19,6 @@ #include "absl/types/optional.h" #include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" namespace rtc { @@ -34,6 +33,10 @@ template class MovingMaxCounter { public: explicit MovingMaxCounter(int64_t window_length_ms); + + MovingMaxCounter(const MovingMaxCounter&) = delete; + MovingMaxCounter& operator=(const MovingMaxCounter&) = delete; + // Advances the current time, and adds a new sample. The new current time must // be at least as large as the old current time. void Add(const T& sample, int64_t current_time_ms); @@ -57,7 +60,6 @@ class MovingMaxCounter { #if RTC_DCHECK_IS_ON int64_t last_call_time_ms_ = std::numeric_limits::min(); #endif - RTC_DISALLOW_COPY_AND_ASSIGN(MovingMaxCounter); }; template diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/numerics/moving_median_filter.h b/TMessagesProj/jni/voip/webrtc/rtc_base/numerics/moving_median_filter.h deleted file mode 100644 index 157eb152c3..0000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/numerics/moving_median_filter.h +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_BASE_NUMERICS_MOVING_MEDIAN_FILTER_H_ -#define RTC_BASE_NUMERICS_MOVING_MEDIAN_FILTER_H_ - -#include - -#include -#include - -#include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" -#include "rtc_base/numerics/percentile_filter.h" - -namespace webrtc { - -// Class to efficiently get moving median filter from a stream of samples. -template -class MovingMedianFilter { - public: - // Construct filter. `window_size` is how many latest samples are stored and - // used to take median. `window_size` must be positive. - explicit MovingMedianFilter(size_t window_size); - - // Insert a new sample. - void Insert(const T& value); - - // Removes all samples; - void Reset(); - - // Get median over the latest window. - T GetFilteredValue() const; - - // The number of samples that are currently stored. - size_t GetNumberOfSamplesStored() const; - - private: - PercentileFilter percentile_filter_; - std::list samples_; - size_t samples_stored_; - const size_t window_size_; - - RTC_DISALLOW_COPY_AND_ASSIGN(MovingMedianFilter); -}; - -template -MovingMedianFilter::MovingMedianFilter(size_t window_size) - : percentile_filter_(0.5f), samples_stored_(0), window_size_(window_size) { - RTC_CHECK_GT(window_size, 0); -} - -template -void MovingMedianFilter::Insert(const T& value) { - percentile_filter_.Insert(value); - samples_.emplace_back(value); - ++samples_stored_; - if (samples_stored_ > window_size_) { - percentile_filter_.Erase(samples_.front()); - samples_.pop_front(); - --samples_stored_; - } -} - -template -T MovingMedianFilter::GetFilteredValue() const { - return percentile_filter_.GetPercentileValue(); -} - -template -void MovingMedianFilter::Reset() { - percentile_filter_.Reset(); - samples_.clear(); - samples_stored_ = 0; -} - -template -size_t MovingMedianFilter::GetNumberOfSamplesStored() const { - return samples_stored_; -} - -} // namespace webrtc -#endif // RTC_BASE_NUMERICS_MOVING_MEDIAN_FILTER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/numerics/moving_percentile_filter.h b/TMessagesProj/jni/voip/webrtc/rtc_base/numerics/moving_percentile_filter.h new file mode 100644 index 0000000000..d68814a25b --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/numerics/moving_percentile_filter.h @@ -0,0 +1,103 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_NUMERICS_MOVING_PERCENTILE_FILTER_H_ +#define RTC_BASE_NUMERICS_MOVING_PERCENTILE_FILTER_H_ + +#include + +#include +#include + +#include "rtc_base/checks.h" +#include "rtc_base/numerics/percentile_filter.h" + +namespace webrtc { + +// Class to efficiently get moving percentile filter from a stream of samples. +template +class MovingPercentileFilter { + public: + // Construct filter. `percentile` defines what percentile to track and + // `window_size` is how many latest samples are stored for finding the + // percentile. `percentile` must be between 0.0 and 1.0 (inclusive) and + // `window_size` must be greater than 0. + MovingPercentileFilter(float percentile, size_t window_size); + + MovingPercentileFilter(const MovingPercentileFilter&) = delete; + MovingPercentileFilter& operator=(const MovingPercentileFilter&) = delete; + + // Insert a new sample. + void Insert(const T& value); + + // Removes all samples; + void Reset(); + + // Get percentile over the latest window. + T GetFilteredValue() const; + + // The number of samples that are currently stored. + size_t GetNumberOfSamplesStored() const; + + private: + PercentileFilter percentile_filter_; + std::list samples_; + size_t samples_stored_; + const size_t window_size_; +}; + +// Convenience type for the common median case. +template +class MovingMedianFilter : public MovingPercentileFilter { + public: + explicit MovingMedianFilter(size_t window_size) + : MovingPercentileFilter(0.5f, window_size) {} +}; + +template +MovingPercentileFilter::MovingPercentileFilter(float percentile, + size_t window_size) + : percentile_filter_(percentile), + samples_stored_(0), + window_size_(window_size) { + RTC_CHECK_GT(window_size, 0); +} + +template +void MovingPercentileFilter::Insert(const T& value) { + percentile_filter_.Insert(value); + samples_.emplace_back(value); + ++samples_stored_; + if (samples_stored_ > window_size_) { + percentile_filter_.Erase(samples_.front()); + samples_.pop_front(); + --samples_stored_; + } +} + +template +T MovingPercentileFilter::GetFilteredValue() const { + return percentile_filter_.GetPercentileValue(); +} + +template +void MovingPercentileFilter::Reset() { + percentile_filter_.Reset(); + samples_.clear(); + samples_stored_ = 0; +} + +template +size_t MovingPercentileFilter::GetNumberOfSamplesStored() const { + return samples_stored_; +} + +} // namespace webrtc +#endif // RTC_BASE_NUMERICS_MOVING_PERCENTILE_FILTER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_adapter.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_adapter.cc index bc10e619eb..7ac922041d 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_adapter.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_adapter.cc @@ -13,6 +13,8 @@ #include #include #include + +#include "absl/strings/string_view.h" #ifdef OPENSSL_IS_BORINGSSL #include #endif @@ -31,8 +33,8 @@ #endif #include "absl/memory/memory.h" +#include "api/units/time_delta.h" #include "rtc_base/checks.h" -#include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/openssl.h" @@ -42,7 +44,7 @@ #include "rtc_base/openssl_identity.h" #endif #include "rtc_base/openssl_utility.h" -#include "rtc_base/string_encode.h" +#include "rtc_base/strings/string_builder.h" #include "rtc_base/thread.h" ////////////////////////////////////////////////////////////////////// @@ -164,6 +166,25 @@ static void LogSslError() { namespace rtc { +using ::webrtc::TimeDelta; + +namespace webrtc_openssl_adapter_internal { + +// Simple O(n^2) implementation is sufficient for current use case. +std::string StrJoin(const std::vector& list, char delimiter) { + RTC_CHECK(!list.empty()); + StringBuilder sb; + sb << list[0]; + for (size_t i = 1; i < list.size(); i++) { + sb.AppendFormat("%c", delimiter); + sb << list[i]; + } + return sb.Release(); +} +} // namespace webrtc_openssl_adapter_internal + +using webrtc_openssl_adapter_internal::StrJoin; + bool OpenSSLAdapter::InitializeSSL() { if (!SSL_library_init()) return false; @@ -250,11 +271,11 @@ void OpenSSLAdapter::SetRole(SSLRole role) { role_ = role; } -int OpenSSLAdapter::StartSSL(const char* hostname) { +int OpenSSLAdapter::StartSSL(absl::string_view hostname) { if (state_ != SSL_NONE) return -1; - ssl_host_name_ = hostname; + ssl_host_name_.assign(hostname.data(), hostname.size()); if (GetSocket()->GetState() != Socket::CS_CONNECTED) { state_ = SSL_WAIT; @@ -352,7 +373,7 @@ int OpenSSLAdapter::BeginSSL() { } if (!elliptic_curves_.empty()) { - SSL_set1_curves_list(ssl_, rtc::join(elliptic_curves_, ':').c_str()); + SSL_set1_curves_list(ssl_, StrJoin(elliptic_curves_, ':').c_str()); } // Now that the initial config is done, transfer ownership of `bio` to the @@ -373,7 +394,7 @@ int OpenSSLAdapter::ContinueSSL() { RTC_DCHECK(state_ == SSL_CONNECTING); // Clear the DTLS timer - Thread::Current()->Clear(this, MSG_TIMEOUT); + timer_.reset(); int code = (role_ == SSL_CLIENT) ? SSL_connect(ssl_) : SSL_accept(ssl_); switch (SSL_get_error(ssl_, code)) { @@ -401,10 +422,10 @@ int OpenSSLAdapter::ContinueSSL() { RTC_LOG(LS_VERBOSE) << " -- error want read"; struct timeval timeout; if (DTLSv1_get_timeout(ssl_, &timeout)) { - int delay = timeout.tv_sec * 1000 + timeout.tv_usec / 1000; - - Thread::Current()->PostDelayed(RTC_FROM_HERE, delay, this, MSG_TIMEOUT, - 0); + TimeDelta delay = TimeDelta::Seconds(timeout.tv_sec) + + TimeDelta::Micros(timeout.tv_usec); + Thread::Current()->PostDelayedTask( + SafeTask(timer_.flag(), [this] { OnTimeout(); }), delay); } break; @@ -420,7 +441,7 @@ int OpenSSLAdapter::ContinueSSL() { return 0; } -void OpenSSLAdapter::Error(const char* context, int err, bool signal) { +void OpenSSLAdapter::Error(absl::string_view context, int err, bool signal) { RTC_LOG(LS_WARNING) << "OpenSSLAdapter::Error(" << context << ", " << err << ")"; state_ = SSL_ERROR; @@ -451,7 +472,7 @@ void OpenSSLAdapter::Cleanup() { identity_.reset(); // Clear the DTLS timer - Thread::Current()->Clear(this, MSG_TIMEOUT); + timer_.reset(); } int OpenSSLAdapter::DoSslWrite(const void* pv, size_t cb, int* error) { @@ -654,12 +675,10 @@ bool OpenSSLAdapter::IsResumedSession() { return (ssl_ && SSL_session_reused(ssl_) == 1); } -void OpenSSLAdapter::OnMessage(Message* msg) { - if (MSG_TIMEOUT == msg->message_id) { - RTC_LOG(LS_INFO) << "DTLS timeout expired"; - DTLSv1_handle_timeout(ssl_); - ContinueSSL(); - } +void OpenSSLAdapter::OnTimeout() { + RTC_LOG(LS_INFO) << "DTLS timeout expired"; + DTLSv1_handle_timeout(ssl_); + ContinueSSL(); } void OpenSSLAdapter::OnConnectEvent(Socket* socket) { @@ -744,7 +763,7 @@ void OpenSSLAdapter::OnCloseEvent(Socket* socket, int err) { AsyncSocketAdapter::OnCloseEvent(socket, err); } -bool OpenSSLAdapter::SSLPostConnectionCheck(SSL* ssl, const std::string& host) { +bool OpenSSLAdapter::SSLPostConnectionCheck(SSL* ssl, absl::string_view host) { bool is_valid_cert_name = openssl::VerifyPeerCertMatchesHost(ssl, host) && (SSL_get_verify_result(ssl) == X509_V_OK || custom_cert_verifier_status_); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_adapter.h b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_adapter.h index 7e1f87b8ab..3ce33e1f5b 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_adapter.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_adapter.h @@ -19,8 +19,9 @@ #include #include +#include "absl/strings/string_view.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "rtc_base/buffer.h" -#include "rtc_base/message_handler.h" #ifdef OPENSSL_IS_BORINGSSL #include "rtc_base/boringssl_identity.h" #else @@ -36,8 +37,15 @@ namespace rtc { -class OpenSSLAdapter final : public SSLAdapter, - public MessageHandlerAutoCleanup { +namespace webrtc_openssl_adapter_internal { + +// Local definition, since absl::StrJoin is not allow-listed. Declared in header +// file only for unittests. +std::string StrJoin(const std::vector& list, char delimiter); + +} // namespace webrtc_openssl_adapter_internal + +class OpenSSLAdapter final : public SSLAdapter { public: static bool InitializeSSL(); static bool CleanupSSL(); @@ -60,7 +68,7 @@ class OpenSSLAdapter final : public SSLAdapter, void SetCertVerifier(SSLCertificateVerifier* ssl_cert_verifier) override; void SetIdentity(std::unique_ptr identity) override; void SetRole(SSLRole role) override; - int StartSSL(const char* hostname) override; + int StartSSL(absl::string_view hostname) override; int Send(const void* pv, size_t cb) override; int SendTo(const void* pv, size_t cb, const SocketAddress& addr) override; int Recv(void* pv, size_t cb, int64_t* timestamp) override; @@ -105,18 +113,16 @@ class OpenSSLAdapter final : public SSLAdapter, SSL_ERROR }; - enum { MSG_TIMEOUT }; - int BeginSSL(); int ContinueSSL(); - void Error(const char* context, int err, bool signal = true); + void Error(absl::string_view context, int err, bool signal = true); void Cleanup(); + void OnTimeout(); // Return value and arguments have the same meanings as for Send; `error` is // an output parameter filled with the result of SSL_get_error. int DoSslWrite(const void* pv, size_t cb, int* error); - void OnMessage(Message* msg) override; - bool SSLPostConnectionCheck(SSL* ssl, const std::string& host); + bool SSLPostConnectionCheck(SSL* ssl, absl::string_view host); #if !defined(NDEBUG) // In debug builds, logs info about the state of the SSL connection. @@ -176,6 +182,8 @@ class OpenSSLAdapter final : public SSLAdapter, std::vector elliptic_curves_; // Holds the result of the call to run of the ssl_cert_verify_->Verify() bool custom_cert_verifier_status_; + // Flag to cancel pending timeout task. + webrtc::ScopedTaskSafety timer_; }; // The OpenSSLAdapterFactory is responsbile for creating multiple new diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_certificate.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_certificate.cc index 802787dcfb..faed72b4db 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_certificate.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_certificate.cc @@ -144,8 +144,8 @@ std::unique_ptr OpenSSLCertificate::Generate( } std::unique_ptr OpenSSLCertificate::FromPEMString( - const std::string& pem_string) { - BIO* bio = BIO_new_mem_buf(const_cast(pem_string.c_str()), -1); + absl::string_view pem_string) { + BIO* bio = BIO_new_mem_buf(const_cast(pem_string.data()), -1); if (!bio) { return nullptr; } @@ -208,7 +208,7 @@ bool OpenSSLCertificate::GetSignatureDigestAlgorithm( return true; } -bool OpenSSLCertificate::ComputeDigest(const std::string& algorithm, +bool OpenSSLCertificate::ComputeDigest(absl::string_view algorithm, unsigned char* digest, size_t size, size_t* length) const { @@ -216,7 +216,7 @@ bool OpenSSLCertificate::ComputeDigest(const std::string& algorithm, } bool OpenSSLCertificate::ComputeDigest(const X509* x509, - const std::string& algorithm, + absl::string_view algorithm, unsigned char* digest, size_t size, size_t* length) { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_certificate.h b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_certificate.h index c317a72110..3f1b8c82f9 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_certificate.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_certificate.h @@ -18,7 +18,6 @@ #include #include "rtc_base/buffer.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_identity.h" @@ -38,10 +37,13 @@ class OpenSSLCertificate final : public SSLCertificate { OpenSSLKeyPair* key_pair, const SSLIdentityParams& params); static std::unique_ptr FromPEMString( - const std::string& pem_string); + absl::string_view pem_string); ~OpenSSLCertificate() override; + OpenSSLCertificate(const OpenSSLCertificate&) = delete; + OpenSSLCertificate& operator=(const OpenSSLCertificate&) = delete; + std::unique_ptr Clone() const override; X509* x509() const { return x509_; } @@ -52,14 +54,14 @@ class OpenSSLCertificate final : public SSLCertificate { bool operator!=(const OpenSSLCertificate& other) const; // Compute the digest of the certificate given algorithm - bool ComputeDigest(const std::string& algorithm, + bool ComputeDigest(absl::string_view algorithm, unsigned char* digest, size_t size, size_t* length) const override; // Compute the digest of a certificate as an X509 * static bool ComputeDigest(const X509* x509, - const std::string& algorithm, + absl::string_view algorithm, unsigned char* digest, size_t size, size_t* length); @@ -70,7 +72,6 @@ class OpenSSLCertificate final : public SSLCertificate { private: X509* x509_; // NOT OWNED - RTC_DISALLOW_COPY_AND_ASSIGN(OpenSSLCertificate); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_digest.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_digest.cc index 1cf5bc09b4..bbf39570f6 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_digest.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_digest.cc @@ -10,12 +10,13 @@ #include "rtc_base/openssl_digest.h" +#include "absl/strings/string_view.h" #include "rtc_base/checks.h" // RTC_DCHECK, RTC_CHECK #include "rtc_base/openssl.h" namespace rtc { -OpenSSLDigest::OpenSSLDigest(const std::string& algorithm) { +OpenSSLDigest::OpenSSLDigest(absl::string_view algorithm) { ctx_ = EVP_MD_CTX_new(); RTC_CHECK(ctx_ != nullptr); EVP_MD_CTX_init(ctx_); @@ -55,7 +56,7 @@ size_t OpenSSLDigest::Finish(void* buf, size_t len) { return md_len; } -bool OpenSSLDigest::GetDigestEVP(const std::string& algorithm, +bool OpenSSLDigest::GetDigestEVP(absl::string_view algorithm, const EVP_MD** mdp) { const EVP_MD* md; if (algorithm == DIGEST_MD5) { @@ -105,8 +106,7 @@ bool OpenSSLDigest::GetDigestName(const EVP_MD* md, std::string* algorithm) { return true; } -bool OpenSSLDigest::GetDigestSize(const std::string& algorithm, - size_t* length) { +bool OpenSSLDigest::GetDigestSize(absl::string_view algorithm, size_t* length) { const EVP_MD* md; if (!GetDigestEVP(algorithm, &md)) return false; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_digest.h b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_digest.h index 6da01a0ded..c6cc3bb86d 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_digest.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_digest.h @@ -16,6 +16,7 @@ #include +#include "absl/strings/string_view.h" #include "rtc_base/message_digest.h" namespace rtc { @@ -24,7 +25,7 @@ namespace rtc { class OpenSSLDigest final : public MessageDigest { public: // Creates an OpenSSLDigest with `algorithm` as the hash algorithm. - explicit OpenSSLDigest(const std::string& algorithm); + explicit OpenSSLDigest(absl::string_view algorithm); ~OpenSSLDigest() override; // Returns the digest output size (e.g. 16 bytes for MD5). size_t Size() const override; @@ -34,11 +35,11 @@ class OpenSSLDigest final : public MessageDigest { size_t Finish(void* buf, size_t len) override; // Helper function to look up a digest's EVP by name. - static bool GetDigestEVP(const std::string& algorithm, const EVP_MD** md); + static bool GetDigestEVP(absl::string_view algorithm, const EVP_MD** md); // Helper function to look up a digest's name by EVP. static bool GetDigestName(const EVP_MD* md, std::string* algorithm); // Helper function to get the length of a digest. - static bool GetDigestSize(const std::string& algorithm, size_t* len); + static bool GetDigestSize(absl::string_view algorithm, size_t* len); private: EVP_MD_CTX* ctx_ = nullptr; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_identity.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_identity.cc index 3794d981ce..186497836d 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_identity.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_identity.cc @@ -70,12 +70,12 @@ std::unique_ptr OpenSSLIdentity::CreateInternal( // static std::unique_ptr OpenSSLIdentity::CreateWithExpiration( - const std::string& common_name, + absl::string_view common_name, const KeyParams& key_params, time_t certificate_lifetime) { SSLIdentityParams params; params.key_params = key_params; - params.common_name = common_name; + params.common_name = std::string(common_name); time_t now = time(nullptr); params.not_before = now + kCertificateWindowInSeconds; params.not_after = now + certificate_lifetime; @@ -90,8 +90,8 @@ std::unique_ptr OpenSSLIdentity::CreateForTest( } std::unique_ptr OpenSSLIdentity::CreateFromPEMStrings( - const std::string& private_key, - const std::string& certificate) { + absl::string_view private_key, + absl::string_view certificate) { std::unique_ptr cert( OpenSSLCertificate::FromPEMString(certificate)); if (!cert) { @@ -110,8 +110,8 @@ std::unique_ptr OpenSSLIdentity::CreateFromPEMStrings( } std::unique_ptr OpenSSLIdentity::CreateFromPEMChainStrings( - const std::string& private_key, - const std::string& certificate_chain) { + absl::string_view private_key, + absl::string_view certificate_chain) { BIO* bio = BIO_new_mem_buf(certificate_chain.data(), rtc::dchecked_cast(certificate_chain.size())); if (!bio) diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_identity.h b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_identity.h index 00d6c74922..a7372109c3 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_identity.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_identity.h @@ -17,7 +17,6 @@ #include #include -#include "rtc_base/constructor_magic.h" #include "rtc_base/openssl_certificate.h" #include "rtc_base/openssl_key_pair.h" #include "rtc_base/ssl_certificate.h" @@ -30,19 +29,22 @@ namespace rtc { class OpenSSLIdentity final : public SSLIdentity { public: static std::unique_ptr CreateWithExpiration( - const std::string& common_name, + absl::string_view common_name, const KeyParams& key_params, time_t certificate_lifetime); static std::unique_ptr CreateForTest( const SSLIdentityParams& params); static std::unique_ptr CreateFromPEMStrings( - const std::string& private_key, - const std::string& certificate); + absl::string_view private_key, + absl::string_view certificate); static std::unique_ptr CreateFromPEMChainStrings( - const std::string& private_key, - const std::string& certificate_chain); + absl::string_view private_key, + absl::string_view certificate_chain); ~OpenSSLIdentity() override; + OpenSSLIdentity(const OpenSSLIdentity&) = delete; + OpenSSLIdentity& operator=(const OpenSSLIdentity&) = delete; + const OpenSSLCertificate& certificate() const override; const SSLCertChain& cert_chain() const override; @@ -66,8 +68,6 @@ class OpenSSLIdentity final : public SSLIdentity { std::unique_ptr key_pair_; std::unique_ptr cert_chain_; - - RTC_DISALLOW_COPY_AND_ASSIGN(OpenSSLIdentity); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_key_pair.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_key_pair.cc index 6ac546e9bb..4c474f2d54 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_key_pair.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_key_pair.cc @@ -13,6 +13,8 @@ #include #include +#include "absl/strings/string_view.h" + #if defined(WEBRTC_WIN) // Must be included first before openssl headers. #include "rtc_base/win32.h" // NOLINT @@ -103,7 +105,7 @@ std::unique_ptr OpenSSLKeyPair::Generate( } std::unique_ptr OpenSSLKeyPair::FromPrivateKeyPEMString( - const std::string& pem_string) { + absl::string_view pem_string) { BIO* bio = BIO_new_mem_buf(const_cast(pem_string.data()), pem_string.size()); if (!bio) { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_key_pair.h b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_key_pair.h index a84c43b6bd..d09bdb0d5e 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_key_pair.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_key_pair.h @@ -16,8 +16,8 @@ #include #include +#include "absl/strings/string_view.h" #include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/ssl_identity.h" namespace rtc { @@ -35,10 +35,13 @@ class OpenSSLKeyPair final { // Constructs a key pair from the private key PEM string. This must not result // in missing public key parameters. Returns null on error. static std::unique_ptr FromPrivateKeyPEMString( - const std::string& pem_string); + absl::string_view pem_string); ~OpenSSLKeyPair(); + OpenSSLKeyPair(const OpenSSLKeyPair&) = delete; + OpenSSLKeyPair& operator=(const OpenSSLKeyPair&) = delete; + std::unique_ptr Clone(); EVP_PKEY* pkey() const { return pkey_; } @@ -51,8 +54,6 @@ class OpenSSLKeyPair final { void AddReference(); EVP_PKEY* pkey_; - - RTC_DISALLOW_COPY_AND_ASSIGN(OpenSSLKeyPair); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_session_cache.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_session_cache.cc index f8fcd473dc..d63724242a 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_session_cache.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_session_cache.cc @@ -10,6 +10,7 @@ #include "rtc_base/openssl_session_cache.h" +#include "absl/strings/string_view.h" #include "rtc_base/checks.h" #include "rtc_base/openssl.h" @@ -30,16 +31,16 @@ OpenSSLSessionCache::~OpenSSLSessionCache() { } SSL_SESSION* OpenSSLSessionCache::LookupSession( - const std::string& hostname) const { + absl::string_view hostname) const { auto it = sessions_.find(hostname); return (it != sessions_.end()) ? it->second : nullptr; } -void OpenSSLSessionCache::AddSession(const std::string& hostname, +void OpenSSLSessionCache::AddSession(absl::string_view hostname, SSL_SESSION* new_session) { SSL_SESSION* old_session = LookupSession(hostname); SSL_SESSION_free(old_session); - sessions_[hostname] = new_session; + sessions_.insert_or_assign(std::string(hostname), new_session); } SSL_CTX* OpenSSLSessionCache::GetSSLContext() const { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_session_cache.h b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_session_cache.h index b049e64dd6..75d8d9a0cf 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_session_cache.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_session_cache.h @@ -16,8 +16,9 @@ #include #include -#include "rtc_base/constructor_magic.h" +#include "absl/strings/string_view.h" #include "rtc_base/ssl_stream_adapter.h" +#include "rtc_base/string_utils.h" #ifndef OPENSSL_IS_BORINGSSL typedef struct ssl_session_st SSL_SESSION; @@ -36,11 +37,15 @@ class OpenSSLSessionCache final { OpenSSLSessionCache(SSLMode ssl_mode, SSL_CTX* ssl_ctx); // Frees the cached SSL_SESSIONS and then frees the SSL_CTX. ~OpenSSLSessionCache(); + + OpenSSLSessionCache(const OpenSSLSessionCache&) = delete; + OpenSSLSessionCache& operator=(const OpenSSLSessionCache&) = delete; + // Looks up a session by hostname. The returned SSL_SESSION is not up_refed. - SSL_SESSION* LookupSession(const std::string& hostname) const; + SSL_SESSION* LookupSession(absl::string_view hostname) const; // Adds a session to the cache, and up_refs it. Any existing session with the // same hostname is replaced. - void AddSession(const std::string& hostname, SSL_SESSION* session); + void AddSession(absl::string_view hostname, SSL_SESSION* session); // Returns the true underlying SSL Context that holds these cached sessions. SSL_CTX* GetSSLContext() const; // The SSL Mode tht the OpenSSLSessionCache was constructed with. This cannot @@ -58,9 +63,8 @@ class OpenSSLSessionCache final { // Map of hostnames to SSL_SESSIONs; holds references to the SSL_SESSIONs, // which are cleaned up when the factory is destroyed. // TODO(juberti): Add LRU eviction to keep the cache from growing forever. - std::map sessions_; + std::map sessions_; // The cache should never be copied or assigned directly. - RTC_DISALLOW_COPY_AND_ASSIGN(OpenSSLSessionCache); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_stream_adapter.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_stream_adapter.cc index dd82e4f061..61bf6743d6 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_stream_adapter.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_stream_adapter.cc @@ -16,6 +16,8 @@ #include #include #include + +#include "absl/strings/string_view.h" #ifndef OPENSSL_IS_BORINGSSL #include #include @@ -40,7 +42,7 @@ #include "rtc_base/openssl_utility.h" #include "rtc_base/ssl_certificate.h" #include "rtc_base/stream.h" -#include "rtc_base/task_utils/to_queued_task.h" +#include "rtc_base/string_encode.h" #include "rtc_base/thread.h" #include "rtc_base/time_utils.h" #include "system_wrappers/include/field_trial.h" @@ -57,6 +59,7 @@ namespace rtc { namespace { +using ::webrtc::SafeTask; // SRTP cipher suite table. `internal_name` is used to construct a // colon-separated profile strings which is needed by // SSL_CTX_set_tlsext_use_srtp(). @@ -327,7 +330,7 @@ void OpenSSLStreamAdapter::SetServerRole(SSLRole role) { } bool OpenSSLStreamAdapter::SetPeerCertificateDigest( - const std::string& digest_alg, + absl::string_view digest_alg, const unsigned char* digest_val, size_t digest_len, SSLPeerCertificateDigestError* error) { @@ -353,7 +356,7 @@ bool OpenSSLStreamAdapter::SetPeerCertificateDigest( } peer_certificate_digest_value_.SetData(digest_val, digest_len); - peer_certificate_digest_algorithm_ = digest_alg; + peer_certificate_digest_algorithm_ = std::string(digest_alg); if (!peer_cert_chain_) { // Normal case, where the digest is set before we obtain the certificate @@ -387,9 +390,10 @@ std::string OpenSSLStreamAdapter::SslCipherSuiteToName(int cipher_suite) { } return SSL_CIPHER_standard_name(ssl_cipher); #else + const int openssl_cipher_id = 0x03000000L | cipher_suite; for (const SslCipherMapEntry* entry = kSslCipherMap; entry->rfc_name; ++entry) { - if (cipher_suite == static_cast(entry->openssl_id)) { + if (openssl_cipher_id == static_cast(entry->openssl_id)) { return entry->rfc_name; } } @@ -445,15 +449,15 @@ bool OpenSSLStreamAdapter::GetSslVersionBytes(int* version) const { } // Key Extractor interface -bool OpenSSLStreamAdapter::ExportKeyingMaterial(const std::string& label, +bool OpenSSLStreamAdapter::ExportKeyingMaterial(absl::string_view label, const uint8_t* context, size_t context_len, bool use_context, uint8_t* result, size_t result_len) { - if (SSL_export_keying_material(ssl_, result, result_len, label.c_str(), - label.length(), const_cast(context), - context_len, use_context) != 1) { + if (SSL_export_keying_material(ssl_, result, result_len, label.data(), + label.length(), context, context_len, + use_context) != 1) { return false; } return true; @@ -818,8 +822,9 @@ void OpenSSLStreamAdapter::OnEvent(StreamInterface* stream, } void OpenSSLStreamAdapter::PostEvent(int events, int err) { - owner_->PostTask(webrtc::ToQueuedTask( - task_safety_, [this, events, err]() { SignalEvent(this, events, err); })); + owner_->PostTask(SafeTask(task_safety_.flag(), [this, events, err]() { + SignalEvent(this, events, err); + })); } void OpenSSLStreamAdapter::SetTimeout(int delay_ms) { @@ -839,6 +844,8 @@ void OpenSSLStreamAdapter::SetTimeout(int delay_ms) { RTC_LOG(LS_INFO) << "DTLS retransmission"; } else if (res < 0) { RTC_LOG(LS_INFO) << "DTLSv1_handle_timeout() return -1"; + Error("DTLSv1_handle_timeout", res, -1, true); + return webrtc::TimeDelta::PlusInfinity(); } ContinueSSL(); } else { @@ -954,7 +961,7 @@ int OpenSSLStreamAdapter::ContinueSSL() { return 0; } -void OpenSSLStreamAdapter::Error(const char* context, +void OpenSSLStreamAdapter::Error(absl::string_view context, int err, uint8_t alert, bool signal) { @@ -1127,7 +1134,10 @@ bool OpenSSLStreamAdapter::VerifyPeerCertificate() { Buffer computed_digest(digest, digest_length); if (computed_digest != peer_certificate_digest_value_) { RTC_LOG(LS_WARNING) - << "Rejected peer certificate due to mismatched digest."; + << "Rejected peer certificate due to mismatched digest using " + << peer_certificate_digest_algorithm_ << ". Expected " + << rtc::hex_encode_with_delimiter(peer_certificate_digest_value_, ':') + << " got " << rtc::hex_encode_with_delimiter(computed_digest, ':'); return false; } // Ignore any verification error if the digest matches, since there is no @@ -1263,7 +1273,7 @@ bool OpenSSLStreamAdapter::IsAcceptableCipher(int cipher, KeyType key_type) { return false; } -bool OpenSSLStreamAdapter::IsAcceptableCipher(const std::string& cipher, +bool OpenSSLStreamAdapter::IsAcceptableCipher(absl::string_view cipher, KeyType key_type) { if (key_type == KT_RSA) { for (const cipher_list& c : OK_RSA_ciphers) { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_stream_adapter.h b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_stream_adapter.h index 236bdfdfea..891f0e6193 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_stream_adapter.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_stream_adapter.h @@ -19,6 +19,7 @@ #include #include +#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "rtc_base/buffer.h" #ifdef OPENSSL_IS_BORINGSSL @@ -26,11 +27,11 @@ #else #include "rtc_base/openssl_identity.h" #endif +#include "api/task_queue/pending_task_safety_flag.h" #include "rtc_base/ssl_identity.h" #include "rtc_base/ssl_stream_adapter.h" #include "rtc_base/stream.h" #include "rtc_base/system/rtc_export.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/task_utils/repeating_task.h" namespace rtc { @@ -80,7 +81,7 @@ class OpenSSLStreamAdapter final : public SSLStreamAdapter { // Default argument is for compatibility void SetServerRole(SSLRole role = SSL_SERVER) override; bool SetPeerCertificateDigest( - const std::string& digest_alg, + absl::string_view digest_alg, const unsigned char* digest_val, size_t digest_len, SSLPeerCertificateDigestError* error = nullptr) override; @@ -113,7 +114,7 @@ class OpenSSLStreamAdapter final : public SSLStreamAdapter { SSLProtocolVersion GetSslVersion() const override; bool GetSslVersionBytes(int* version) const override; // Key Extractor interface - bool ExportKeyingMaterial(const std::string& label, + bool ExportKeyingMaterial(absl::string_view label, const uint8_t* context, size_t context_len, bool use_context, @@ -130,7 +131,7 @@ class OpenSSLStreamAdapter final : public SSLStreamAdapter { static bool IsBoringSsl(); static bool IsAcceptableCipher(int cipher, KeyType key_type); - static bool IsAcceptableCipher(const std::string& cipher, KeyType key_type); + static bool IsAcceptableCipher(absl::string_view cipher, KeyType key_type); // Use our timeutils.h source of timing in BoringSSL, allowing us to test // using a fake clock. @@ -172,7 +173,7 @@ class OpenSSLStreamAdapter final : public SSLStreamAdapter { // `alert` indicates an alert description (one of the SSL_AD constants) to // send to the remote endpoint when closing the association. If 0, a normal // shutdown will be performed. - void Error(const char* context, int err, uint8_t alert, bool signal); + void Error(absl::string_view context, int err, uint8_t alert, bool signal); void Cleanup(uint8_t alert); // Flush the input buffers by reading left bytes (for DTLS) diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_utility.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_utility.cc index b5d649ca51..eba3788a94 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_utility.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_utility.cc @@ -9,6 +9,8 @@ */ #include "rtc_base/openssl_utility.h" + +#include "absl/strings/string_view.h" #if defined(WEBRTC_WIN) // Must be included first before openssl headers. #include "rtc_base/win32.h" // NOLINT @@ -184,7 +186,7 @@ bool ParseCertificate(CRYPTO_BUFFER* cert_buffer, } #endif // OPENSSL_IS_BORINGSSL -bool VerifyPeerCertMatchesHost(SSL* ssl, const std::string& host) { +bool VerifyPeerCertMatchesHost(SSL* ssl, absl::string_view host) { if (host.empty()) { RTC_DLOG(LS_ERROR) << "Hostname is empty. Cannot verify peer certificate."; return false; @@ -211,8 +213,7 @@ bool VerifyPeerCertMatchesHost(SSL* ssl, const std::string& host) { return false; } LogCertificates(ssl, x509.get()); - return X509_check_host(x509.get(), host.c_str(), host.size(), 0, nullptr) == - 1; + return X509_check_host(x509.get(), host.data(), host.size(), 0, nullptr) == 1; #else // OPENSSL_IS_BORINGSSL X509* certificate = SSL_get_peer_certificate(ssl); if (certificate == nullptr) { @@ -224,13 +225,13 @@ bool VerifyPeerCertMatchesHost(SSL* ssl, const std::string& host) { LogCertificates(ssl, certificate); bool is_valid_cert_name = - X509_check_host(certificate, host.c_str(), host.size(), 0, nullptr) == 1; + X509_check_host(certificate, host.data(), host.size(), 0, nullptr) == 1; X509_free(certificate); return is_valid_cert_name; #endif // !defined(OPENSSL_IS_BORINGSSL) } -void LogSSLErrors(const std::string& prefix) { +void LogSSLErrors(absl::string_view prefix) { char error_buf[200]; unsigned long err; // NOLINT diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_utility.h b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_utility.h index ee29ccd602..dd183c283a 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_utility.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_utility.h @@ -15,6 +15,8 @@ #include +#include "absl/strings/string_view.h" + namespace rtc { // The openssl namespace holds static helper methods. All methods related // to OpenSSL that are commonly used and don't require global state should be @@ -35,11 +37,11 @@ bool ParseCertificate(CRYPTO_BUFFER* cert_buffer, // TODO(crbug.com/webrtc/11710): When OS certificate verification is available, // skip compiling this as it adds a dependency on OpenSSL X509 objects, which we // are trying to avoid in favor of CRYPTO_BUFFERs (see crbug.com/webrtc/11410). -bool VerifyPeerCertMatchesHost(SSL* ssl, const std::string& host); +bool VerifyPeerCertMatchesHost(SSL* ssl, absl::string_view host); // Logs all the errors in the OpenSSL errror queue from the current thread. A // prefix can be provided for context. -void LogSSLErrors(const std::string& prefix); +void LogSSLErrors(absl::string_view prefix); #ifndef WEBRTC_EXCLUDE_BUILT_IN_SSL_ROOT_CERTS // Attempt to add the certificates from the loader into the SSL_CTX. False is diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/operations_chain.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/operations_chain.cc index 59d30d350e..4398bb16c1 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/operations_chain.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/operations_chain.cc @@ -10,6 +10,7 @@ #include "rtc_base/operations_chain.h" +#include "api/make_ref_counted.h" #include "rtc_base/checks.h" namespace rtc { @@ -37,10 +38,11 @@ void OperationsChain::CallbackHandle::OnOperationComplete() { // static scoped_refptr OperationsChain::Create() { - return new OperationsChain(); + // Explicit new, to access private constructor. + return rtc::scoped_refptr(new OperationsChain()); } -OperationsChain::OperationsChain() : RefCountedObject() { +OperationsChain::OperationsChain() { RTC_DCHECK_RUN_ON(&sequence_checker_); } @@ -63,8 +65,10 @@ bool OperationsChain::IsEmpty() const { } std::function OperationsChain::CreateOperationsChainCallback() { - return [handle = rtc::scoped_refptr( - new CallbackHandle(this))]() { handle->OnOperationComplete(); }; + return [handle = rtc::make_ref_counted( + rtc::scoped_refptr(this))]() { + handle->OnOperationComplete(); + }; } void OperationsChain::OnOperationComplete() { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/operations_chain.h b/TMessagesProj/jni/voip/webrtc/rtc_base/operations_chain.h index 7823f6e238..0e8c0681ba 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/operations_chain.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/operations_chain.h @@ -19,10 +19,10 @@ #include #include "absl/types/optional.h" +#include "api/ref_counted_base.h" #include "api/scoped_refptr.h" #include "api/sequence_checker.h" #include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/ref_count.h" #include "rtc_base/ref_counted_object.h" #include "rtc_base/system/no_unique_address.h" @@ -113,11 +113,14 @@ class OperationWithFunctor final : public Operation { // The OperationsChain is kept-alive through reference counting if there are // operations pending. This, together with the contract, guarantees that all // operations that are chained get executed. -class OperationsChain final : public RefCountedObject { +class OperationsChain final : public RefCountedNonVirtual { public: static scoped_refptr Create(); ~OperationsChain(); + OperationsChain(const OperationsChain&) = delete; + OperationsChain& operator=(const OperationsChain&) = delete; + void SetOnChainEmptyCallback(std::function on_chain_empty_callback); bool IsEmpty() const; @@ -163,11 +166,14 @@ class OperationsChain final : public RefCountedObject { // std::function, which is a copyable type. To allow the callback to // be copyable, it is backed up by this reference counted handle. See // CreateOperationsChainCallback(). - class CallbackHandle final : public RefCountedObject { + class CallbackHandle final : public RefCountedNonVirtual { public: explicit CallbackHandle(scoped_refptr operations_chain); ~CallbackHandle(); + CallbackHandle(const CallbackHandle&) = delete; + CallbackHandle& operator=(const CallbackHandle&) = delete; + void OnOperationComplete(); private: @@ -175,8 +181,6 @@ class OperationsChain final : public RefCountedObject { #if RTC_DCHECK_IS_ON bool has_run_ = false; #endif // RTC_DCHECK_IS_ON - - RTC_DISALLOW_COPY_AND_ASSIGN(CallbackHandle); }; OperationsChain(); @@ -192,8 +196,6 @@ class OperationsChain final : public RefCountedObject { chained_operations_ RTC_GUARDED_BY(sequence_checker_); absl::optional> on_chain_empty_callback_ RTC_GUARDED_BY(sequence_checker_); - - RTC_DISALLOW_COPY_AND_ASSIGN(OperationsChain); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/physical_socket_server.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/physical_socket_server.cc index 33ebb69e2d..7c01815d30 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/physical_socket_server.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/physical_socket_server.cc @@ -9,6 +9,8 @@ */ #include "rtc_base/physical_socket_server.h" +#include + #if defined(_MSC_VER) && _MSC_VER < 1300 #pragma warning(disable : 4786) #endif @@ -1164,12 +1166,20 @@ void PhysicalSocketServer::Update(Dispatcher* pdispatcher) { #endif } +int PhysicalSocketServer::ToCmsWait(webrtc::TimeDelta max_wait_duration) { + return max_wait_duration == Event::kForever + ? kForeverMs + : max_wait_duration.RoundUpTo(webrtc::TimeDelta::Millis(1)).ms(); +} + #if defined(WEBRTC_POSIX) -bool PhysicalSocketServer::Wait(int cmsWait, bool process_io) { +bool PhysicalSocketServer::Wait(webrtc::TimeDelta max_wait_duration, + bool process_io) { // We don't support reentrant waiting. RTC_DCHECK(!waiting_); ScopedSetTrue s(&waiting_); + const int cmsWait = ToCmsWait(max_wait_duration); #if defined(WEBRTC_USE_EPOLL) // We don't keep a dedicated "epoll" descriptor containing only the non-IO // (i.e. signaling) dispatcher, so "poll" will be used instead of the default @@ -1256,7 +1266,7 @@ bool PhysicalSocketServer::WaitSelect(int cmsWait, bool process_io) { struct timeval* ptvWait = nullptr; struct timeval tvWait; int64_t stop_us; - if (cmsWait != kForever) { + if (cmsWait != kForeverMs) { // Calculate wait timeval tvWait.tv_sec = cmsWait / 1000; tvWait.tv_usec = (cmsWait % 1000) * 1000; @@ -1266,7 +1276,6 @@ bool PhysicalSocketServer::WaitSelect(int cmsWait, bool process_io) { stop_us = rtc::TimeMicros() + cmsWait * 1000; } - fd_set fdsRead; fd_set fdsWrite; // Explicitly unpoison these FDs on MemorySanitizer which doesn't handle the @@ -1454,7 +1463,7 @@ bool PhysicalSocketServer::WaitEpoll(int cmsWait) { RTC_DCHECK(epoll_fd_ != INVALID_SOCKET); int64_t tvWait = -1; int64_t tvStop = -1; - if (cmsWait != kForever) { + if (cmsWait != kForeverMs) { tvWait = cmsWait; tvStop = TimeAfter(cmsWait); } @@ -1499,7 +1508,7 @@ bool PhysicalSocketServer::WaitEpoll(int cmsWait) { } } - if (cmsWait != kForever) { + if (cmsWait != kForeverMs) { tvWait = TimeDiff(tvStop, TimeMillis()); if (tvWait <= 0) { // Return success on timeout. @@ -1515,7 +1524,7 @@ bool PhysicalSocketServer::WaitPoll(int cmsWait, Dispatcher* dispatcher) { RTC_DCHECK(dispatcher); int64_t tvWait = -1; int64_t tvStop = -1; - if (cmsWait != kForever) { + if (cmsWait != kForeverMs) { tvWait = cmsWait; tvStop = TimeAfter(cmsWait); } @@ -1566,7 +1575,7 @@ bool PhysicalSocketServer::WaitPoll(int cmsWait, Dispatcher* dispatcher) { ProcessEvents(dispatcher, readable, writable, error, error); } - if (cmsWait != kForever) { + if (cmsWait != kForeverMs) { tvWait = TimeDiff(tvStop, TimeMillis()); if (tvWait < 0) { // Return success on timeout. @@ -1583,11 +1592,13 @@ bool PhysicalSocketServer::WaitPoll(int cmsWait, Dispatcher* dispatcher) { #endif // WEBRTC_POSIX #if defined(WEBRTC_WIN) -bool PhysicalSocketServer::Wait(int cmsWait, bool process_io) { +bool PhysicalSocketServer::Wait(webrtc::TimeDelta max_wait_duration, + bool process_io) { // We don't support reentrant waiting. RTC_DCHECK(!waiting_); ScopedSetTrue s(&waiting_); + int cmsWait = ToCmsWait(max_wait_duration); int64_t cmsTotal = cmsWait; int64_t cmsElapsed = 0; int64_t msStart = Time(); @@ -1634,7 +1645,7 @@ bool PhysicalSocketServer::Wait(int cmsWait, bool process_io) { // Which is shorter, the delay wait or the asked wait? int64_t cmsNext; - if (cmsWait == kForever) { + if (cmsWait == kForeverMs) { cmsNext = cmsWait; } else { cmsNext = std::max(0, cmsTotal - cmsElapsed); @@ -1750,7 +1761,7 @@ bool PhysicalSocketServer::Wait(int cmsWait, bool process_io) { if (!fWait_) break; cmsElapsed = TimeSince(msStart); - if ((cmsWait != kForever) && (cmsElapsed >= cmsWait)) { + if ((cmsWait != kForeverMs) && (cmsElapsed >= cmsWait)) { break; } } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/physical_socket_server.h b/TMessagesProj/jni/voip/webrtc/rtc_base/physical_socket_server.h index a01229d593..f97271f422 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/physical_socket_server.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/physical_socket_server.h @@ -11,6 +11,7 @@ #ifndef RTC_BASE_PHYSICAL_SOCKET_SERVER_H_ #define RTC_BASE_PHYSICAL_SOCKET_SERVER_H_ +#include "api/units/time_delta.h" #if defined(WEBRTC_POSIX) && defined(WEBRTC_LINUX) #include #define WEBRTC_USE_EPOLL 1 @@ -74,7 +75,7 @@ class RTC_EXPORT PhysicalSocketServer : public SocketServer { virtual Socket* WrapSocket(SOCKET s); // SocketServer: - bool Wait(int cms, bool process_io) override; + bool Wait(webrtc::TimeDelta max_wait_duration, bool process_io) override; void WakeUp() override; void Add(Dispatcher* dispatcher); @@ -84,16 +85,19 @@ class RTC_EXPORT PhysicalSocketServer : public SocketServer { private: // The number of events to process with one call to "epoll_wait". static constexpr size_t kNumEpollEvents = 128; + // A local historical definition of "foreverness", in milliseconds. + static constexpr int kForeverMs = -1; + static int ToCmsWait(webrtc::TimeDelta max_wait_duration); #if defined(WEBRTC_POSIX) - bool WaitSelect(int cms, bool process_io); + bool WaitSelect(int cmsWait, bool process_io); #endif // WEBRTC_POSIX #if defined(WEBRTC_USE_EPOLL) void AddEpoll(Dispatcher* dispatcher, uint64_t key); void RemoveEpoll(Dispatcher* dispatcher); void UpdateEpoll(Dispatcher* dispatcher, uint64_t key); - bool WaitEpoll(int cms); - bool WaitPoll(int cms, Dispatcher* dispatcher); + bool WaitEpoll(int cmsWait); + bool WaitPoll(int cmsWait, Dispatcher* dispatcher); // This array is accessed in isolation by a thread calling into Wait(). // It's useless to use a SequenceChecker to guard it because a socket diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/platform_thread.h b/TMessagesProj/jni/voip/webrtc/rtc_base/platform_thread.h index 2c82c02455..3ab2761f43 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/platform_thread.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/platform_thread.h @@ -55,6 +55,10 @@ class PlatformThread final { // removed. PlatformThread(PlatformThread&& rhs); + // Copies won't work since we'd have problems with joinable threads. + PlatformThread(const PlatformThread&) = delete; + PlatformThread& operator=(const PlatformThread&) = delete; + // Moves `rhs` into this, storing an empty state in `rhs`. // TODO(bugs.webrtc.org/12727) Look into if default and move support can be // removed. diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/proxy_server.h b/TMessagesProj/jni/voip/webrtc/rtc_base/proxy_server.h index 6db0e12897..0b9b655a5e 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/proxy_server.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/proxy_server.h @@ -15,7 +15,6 @@ #include #include "absl/memory/memory.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/memory/fifo_buffer.h" #include "rtc_base/server_socket_adapters.h" #include "rtc_base/socket.h" @@ -36,6 +35,10 @@ class ProxyBinding : public sigslot::has_slots<> { public: ProxyBinding(AsyncProxyServerSocket* in_socket, Socket* out_socket); ~ProxyBinding() override; + + ProxyBinding(const ProxyBinding&) = delete; + ProxyBinding& operator=(const ProxyBinding&) = delete; + sigslot::signal1 SignalDestroyed; private: @@ -59,7 +62,6 @@ class ProxyBinding : public sigslot::has_slots<> { bool connected_; FifoBuffer out_buffer_; FifoBuffer in_buffer_; - RTC_DISALLOW_COPY_AND_ASSIGN(ProxyBinding); }; class ProxyServer : public sigslot::has_slots<> { @@ -70,6 +72,9 @@ class ProxyServer : public sigslot::has_slots<> { const SocketAddress& ext_ip); ~ProxyServer() override; + ProxyServer(const ProxyServer&) = delete; + ProxyServer& operator=(const ProxyServer&) = delete; + // Returns the address to which the proxy server is bound SocketAddress GetServerAddress(); @@ -82,7 +87,6 @@ class ProxyServer : public sigslot::has_slots<> { SocketAddress ext_ip_; std::unique_ptr server_socket_; std::vector> bindings_; - RTC_DISALLOW_COPY_AND_ASSIGN(ProxyServer); }; // SocksProxyServer is a simple extension of ProxyServer to implement SOCKS. @@ -94,9 +98,11 @@ class SocksProxyServer : public ProxyServer { const SocketAddress& ext_ip) : ProxyServer(int_factory, int_addr, ext_factory, ext_ip) {} + SocksProxyServer(const SocksProxyServer&) = delete; + SocksProxyServer& operator=(const SocksProxyServer&) = delete; + protected: AsyncProxyServerSocket* WrapSocket(Socket* socket) override; - RTC_DISALLOW_COPY_AND_ASSIGN(SocksProxyServer); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/race_checker.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/race_checker.cc index bf9dfdcdaa..f0d4e868c2 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/race_checker.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/race_checker.cc @@ -25,7 +25,9 @@ RaceChecker::RaceChecker() {} bool RaceChecker::Acquire() const { const PlatformThreadRef current_thread = CurrentThreadRef(); // Set new accessing thread if this is a new use. - if (access_count_++ == 0) + const int current_access_count = access_count_; + access_count_ = access_count_ + 1; + if (current_access_count == 0) accessing_thread_ = current_thread; // If this is being used concurrently this check will fail for the second // thread entering since it won't set the thread. Recursive use of checked @@ -35,7 +37,7 @@ bool RaceChecker::Acquire() const { } void RaceChecker::Release() const { - --access_count_; + access_count_ = access_count_ - 1; } namespace internal { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/ref_counted_object.h b/TMessagesProj/jni/voip/webrtc/rtc_base/ref_counted_object.h index 2a55d863c1..418c3d80cc 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/ref_counted_object.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/ref_counted_object.h @@ -10,11 +10,7 @@ #ifndef RTC_BASE_REF_COUNTED_OBJECT_H_ #define RTC_BASE_REF_COUNTED_OBJECT_H_ -#include -#include - #include "api/scoped_refptr.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/ref_count.h" #include "rtc_base/ref_counter.h" @@ -25,6 +21,9 @@ class RefCountedObject : public T { public: RefCountedObject() {} + RefCountedObject(const RefCountedObject&) = delete; + RefCountedObject& operator=(const RefCountedObject&) = delete; + template explicit RefCountedObject(P0&& p0) : T(std::forward(p0)) {} @@ -56,18 +55,15 @@ class RefCountedObject : public T { ~RefCountedObject() override {} mutable webrtc::webrtc_impl::RefCounter ref_count_{0}; - - RTC_DISALLOW_COPY_AND_ASSIGN(RefCountedObject); }; template class FinalRefCountedObject final : public T { public: using T::T; - // Until c++17 compilers are allowed not to inherit the default constructors. - // Thus the default constructors are forwarded explicitly. - FinalRefCountedObject() = default; - explicit FinalRefCountedObject(const T& other) : T(other) {} + // Above using declaration propagates a default move constructor + // FinalRefCountedObject(FinalRefCountedObject&& other), but we also need + // move construction from T. explicit FinalRefCountedObject(T&& other) : T(std::move(other)) {} FinalRefCountedObject(const FinalRefCountedObject&) = delete; FinalRefCountedObject& operator=(const FinalRefCountedObject&) = delete; @@ -88,113 +84,6 @@ class FinalRefCountedObject final : public T { mutable webrtc::webrtc_impl::RefCounter ref_count_{0}; }; -// General utilities for constructing a reference counted class and the -// appropriate reference count implementation for that class. -// -// These utilities select either the `RefCountedObject` implementation or -// `FinalRefCountedObject` depending on whether the to-be-shared class is -// derived from the RefCountInterface interface or not (respectively). - -// `make_ref_counted`: -// -// Use this when you want to construct a reference counted object of type T and -// get a `scoped_refptr<>` back. Example: -// -// auto p = make_ref_counted("bar", 123); -// -// For a class that inherits from RefCountInterface, this is equivalent to: -// -// auto p = scoped_refptr(new RefCountedObject("bar", 123)); -// -// If the class does not inherit from RefCountInterface, the example is -// equivalent to: -// -// auto p = scoped_refptr>( -// new FinalRefCountedObject("bar", 123)); -// -// In these cases, `make_ref_counted` reduces the amount of boilerplate code but -// also helps with the most commonly intended usage of RefCountedObject whereby -// methods for reference counting, are virtual and designed to satisfy the need -// of an interface. When such a need does not exist, it is more efficient to use -// the `FinalRefCountedObject` template, which does not add the vtable overhead. -// -// Note that in some cases, using RefCountedObject directly may still be what's -// needed. - -// `make_ref_counted` for classes that are convertible to RefCountInterface. -template < - typename T, - typename... Args, - typename std::enable_if::value, - T>::type* = nullptr> -scoped_refptr make_ref_counted(Args&&... args) { - return new RefCountedObject(std::forward(args)...); -} - -// `make_ref_counted` for complete classes that are not convertible to -// RefCountInterface. -template < - typename T, - typename... Args, - typename std::enable_if::value, - T>::type* = nullptr> -scoped_refptr> make_ref_counted(Args&&... args) { - return new FinalRefCountedObject(std::forward(args)...); -} - -// `Ref<>`, `Ref<>::Type` and `Ref<>::Ptr`: -// -// `Ref` is a type declaring utility that is compatible with `make_ref_counted` -// and can be used in classes and methods where it's more convenient (or -// readable) to have the compiler figure out the fully fleshed out type for a -// class rather than spell it out verbatim in all places the type occurs (which -// can mean maintenance work if the class layout changes). -// -// Usage examples: -// -// If you want to declare the parameter type that's always compatible with -// this code: -// -// Bar(make_ref_counted()); -// -// You can use `Ref<>::Ptr` to declare a compatible scoped_refptr type: -// -// void Bar(Ref::Ptr p); -// -// This might be more practically useful in templates though. -// -// In rare cases you might need to be able to declare a parameter that's fully -// compatible with the reference counted T type - and just using T* is not -// enough. To give a code example, we can declare a function, `Foo` that is -// compatible with this code: -// auto p = make_ref_counted(); -// Foo(p.get()); -// -// void Foo(Ref::Type* foo_ptr); -// -// Alternatively this would be: -// void Foo(Foo* foo_ptr); -// or -// void Foo(FinalRefCountedObject* foo_ptr); - -// Declares the approprate reference counted type for T depending on whether -// T is convertible to RefCountInterface or not. -// For classes that are convertible, the type will simply be T. -// For classes that cannot be converted to RefCountInterface, the type will be -// FinalRefCountedObject. -// This is most useful for declaring a scoped_refptr instance for a class -// that may or may not implement a virtual reference counted interface: -// * scoped_refptr::Type> my_ptr; -template -struct Ref { - typedef typename std::conditional< - std::is_convertible::value, - T, - FinalRefCountedObject>::type Type; - - typedef scoped_refptr Ptr; -}; - } // namespace rtc #endif // RTC_BASE_REF_COUNTED_OBJECT_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/rolling_accumulator.h b/TMessagesProj/jni/voip/webrtc/rtc_base/rolling_accumulator.h index 241bd72a11..84d791edd1 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/rolling_accumulator.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/rolling_accumulator.h @@ -17,7 +17,6 @@ #include #include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/numerics/running_statistics.h" namespace rtc { @@ -35,6 +34,9 @@ class RollingAccumulator { } ~RollingAccumulator() {} + RollingAccumulator(const RollingAccumulator&) = delete; + RollingAccumulator& operator=(const RollingAccumulator&) = delete; + size_t max_count() const { return samples_.size(); } size_t count() const { return static_cast(stats_.Size()); } @@ -136,8 +138,6 @@ class RollingAccumulator { mutable T min_; mutable bool min_stale_; std::vector samples_; - - RTC_DISALLOW_COPY_AND_ASSIGN(RollingAccumulator); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/rtc_certificate.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/rtc_certificate.cc index 496b4ac4b4..e0b6b3258e 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/rtc_certificate.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/rtc_certificate.cc @@ -21,7 +21,9 @@ namespace rtc { scoped_refptr RTCCertificate::Create( std::unique_ptr identity) { - return new RTCCertificate(identity.release()); + // Explicit new to access proteced constructor. + return rtc::scoped_refptr( + new RTCCertificate(identity.release())); } RTCCertificate::RTCCertificate(SSLIdentity* identity) : identity_(identity) { @@ -61,7 +63,7 @@ scoped_refptr RTCCertificate::FromPEM( SSLIdentity::CreateFromPEMStrings(pem.private_key(), pem.certificate())); if (!identity) return nullptr; - return new RTCCertificate(identity.release()); + return RTCCertificate::Create(std::move(identity)); } bool RTCCertificate::operator==(const RTCCertificate& certificate) const { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/rtc_certificate.h b/TMessagesProj/jni/voip/webrtc/rtc_base/rtc_certificate.h index 0102c4f98c..67c5c29a89 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/rtc_certificate.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/rtc_certificate.h @@ -17,6 +17,7 @@ #include #include "absl/base/attributes.h" +#include "absl/strings/string_view.h" #include "api/ref_counted_base.h" #include "api/scoped_refptr.h" #include "rtc_base/system/rtc_export.h" @@ -35,8 +36,8 @@ class SSLIdentity; // the string representations used by OpenSSL. class RTCCertificatePEM { public: - RTCCertificatePEM(const std::string& private_key, - const std::string& certificate) + RTCCertificatePEM(absl::string_view private_key, + absl::string_view certificate) : private_key_(private_key), certificate_(certificate) {} const std::string& private_key() const { return private_key_; } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/rtc_certificate_generator.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/rtc_certificate_generator.cc index 16ff23c740..ffc51aa8da 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/rtc_certificate_generator.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/rtc_certificate_generator.cc @@ -17,9 +17,6 @@ #include #include "rtc_base/checks.h" -#include "rtc_base/location.h" -#include "rtc_base/message_handler.h" -#include "rtc_base/ref_counted_object.h" #include "rtc_base/ssl_identity.h" namespace rtc { @@ -72,21 +69,18 @@ RTCCertificateGenerator::RTCCertificateGenerator(Thread* signaling_thread, void RTCCertificateGenerator::GenerateCertificateAsync( const KeyParams& key_params, const absl::optional& expires_ms, - const scoped_refptr& callback) { + RTCCertificateGenerator::Callback callback) { RTC_DCHECK(signaling_thread_->IsCurrent()); RTC_DCHECK(callback); - // Create a new `RTCCertificateGenerationTask` for this generation request. It - // is reference counted and referenced by the message data, ensuring it lives - // until the task has completed (independent of `RTCCertificateGenerator`). - worker_thread_->PostTask(RTC_FROM_HERE, [key_params, expires_ms, - signaling_thread = signaling_thread_, - cb = callback]() { + worker_thread_->PostTask([key_params, expires_ms, + signaling_thread = signaling_thread_, + cb = std::move(callback)]() mutable { scoped_refptr certificate = RTCCertificateGenerator::GenerateCertificate(key_params, expires_ms); signaling_thread->PostTask( - RTC_FROM_HERE, [cert = std::move(certificate), cb = std::move(cb)]() { - cert ? cb->OnSuccess(cert) : cb->OnFailure(); + [cert = std::move(certificate), cb = std::move(cb)]() mutable { + std::move(cb)(std::move(cert)); }); }); } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/rtc_certificate_generator.h b/TMessagesProj/jni/voip/webrtc/rtc_base/rtc_certificate_generator.h index 065b8b5002..a881f1a369 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/rtc_certificate_generator.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/rtc_certificate_generator.h @@ -13,9 +13,9 @@ #include +#include "absl/functional/any_invocable.h" #include "absl/types/optional.h" #include "api/scoped_refptr.h" -#include "rtc_base/ref_count.h" #include "rtc_base/rtc_certificate.h" #include "rtc_base/ssl_identity.h" #include "rtc_base/system/rtc_export.h" @@ -23,21 +23,15 @@ namespace rtc { -// See `RTCCertificateGeneratorInterface::GenerateCertificateAsync`. -class RTCCertificateGeneratorCallback : public RefCountInterface { - public: - virtual void OnSuccess(const scoped_refptr& certificate) = 0; - virtual void OnFailure() = 0; - - protected: - ~RTCCertificateGeneratorCallback() override {} -}; - // Generates `RTCCertificate`s. // See `RTCCertificateGenerator` for the WebRTC repo's implementation. class RTCCertificateGeneratorInterface { public: - virtual ~RTCCertificateGeneratorInterface() {} + // Functor that will be called when certificate is generated asynchroniosly. + // Called with nullptr as the parameter on failure. + using Callback = absl::AnyInvocable) &&>; + + virtual ~RTCCertificateGeneratorInterface() = default; // Generates a certificate asynchronously on the worker thread. // Must be called on the signaling thread. The `callback` is invoked with the @@ -47,7 +41,7 @@ class RTCCertificateGeneratorInterface { virtual void GenerateCertificateAsync( const KeyParams& key_params, const absl::optional& expires_ms, - const scoped_refptr& callback) = 0; + Callback callback) = 0; }; // Standard implementation of `RTCCertificateGeneratorInterface`. @@ -74,10 +68,9 @@ class RTC_EXPORT RTCCertificateGenerator // that many milliseconds from now. `expires_ms` is limited to a year, a // larger value than that is clamped down to a year. If `expires_ms` is not // specified, a default expiration time is used. - void GenerateCertificateAsync( - const KeyParams& key_params, - const absl::optional& expires_ms, - const scoped_refptr& callback) override; + void GenerateCertificateAsync(const KeyParams& key_params, + const absl::optional& expires_ms, + Callback callback) override; private: Thread* const signaling_thread_; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/server_socket_adapters.h b/TMessagesProj/jni/voip/webrtc/rtc_base/server_socket_adapters.h index 07e9636756..b18c7a6a65 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/server_socket_adapters.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/server_socket_adapters.h @@ -31,9 +31,11 @@ class AsyncSSLServerSocket : public BufferedReadAdapter { public: explicit AsyncSSLServerSocket(Socket* socket); + AsyncSSLServerSocket(const AsyncSSLServerSocket&) = delete; + AsyncSSLServerSocket& operator=(const AsyncSSLServerSocket&) = delete; + protected: void ProcessInput(char* data, size_t* len) override; - RTC_DISALLOW_COPY_AND_ASSIGN(AsyncSSLServerSocket); }; // Implements a proxy server socket for the SOCKS protocol. @@ -41,6 +43,10 @@ class AsyncSocksProxyServerSocket : public AsyncProxyServerSocket { public: explicit AsyncSocksProxyServerSocket(Socket* socket); + AsyncSocksProxyServerSocket(const AsyncSocksProxyServerSocket&) = delete; + AsyncSocksProxyServerSocket& operator=(const AsyncSocksProxyServerSocket&) = + delete; + private: void ProcessInput(char* data, size_t* len) override; void DirectSend(const ByteBufferWriter& buf); @@ -64,7 +70,6 @@ class AsyncSocksProxyServerSocket : public AsyncProxyServerSocket { SS_ERROR }; State state_; - RTC_DISALLOW_COPY_AND_ASSIGN(AsyncSocksProxyServerSocket); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/sigslot_repeater.h b/TMessagesProj/jni/voip/webrtc/rtc_base/sigslot_repeater.h deleted file mode 100644 index f562c5aa7b..0000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/sigslot_repeater.h +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright 2017 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_BASE_SIGSLOT_REPEATER_H__ -#define RTC_BASE_SIGSLOT_REPEATER_H__ - -// repeaters are both signals and slots, which are designed as intermediate -// pass-throughs for signals and slots which don't know about each other (for -// modularity or encapsulation). This eliminates the need to declare a signal -// handler whose sole purpose is to fire another signal. The repeater connects -// to the originating signal using the 'repeat' method. When the repeated -// signal fires, the repeater will also fire. -// -// TODO(deadbeef): Actually use this, after we decide on some style points on -// using signals, so it doesn't get deleted again. - -#include "rtc_base/third_party/sigslot/sigslot.h" - -namespace sigslot { - -template -class repeater_with_thread_policy - : public signal_with_thread_policy, - public has_slots { - private: - // These typedefs are just to make the code below more readable. Code using - // repeaters shouldn't need to reference these types directly. - typedef signal_with_thread_policy base_type; - typedef repeater_with_thread_policy this_type; - - public: - repeater_with_thread_policy() {} - repeater_with_thread_policy(const this_type& s) : base_type(s) {} - - void reemit(Args... args) { base_type::emit(args...); } - void repeat(base_type& s) { s.connect(this, &this_type::reemit); } - void stop(base_type& s) { s.disconnect(this); } -}; - -// Alias with default thread policy. Needed because both default arguments -// and variadic template arguments must go at the end of the list, so we -// can't have both at once. -template -using repeater = - repeater_with_thread_policy; - -} // namespace sigslot - -#endif // RTC_BASE_SIGSLOT_REPEATER_H__ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/sigslot_tester.h b/TMessagesProj/jni/voip/webrtc/rtc_base/sigslot_tester.h index 58be511ef6..92483c0b8d 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/sigslot_tester.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/sigslot_tester.h @@ -38,7 +38,6 @@ // EXPECT_EQ("hello", capture); // /* See unit-tests for more examples */ -#include "rtc_base/constructor_magic.h" #include "rtc_base/third_party/sigslot/sigslot.h" namespace rtc { @@ -50,13 +49,14 @@ class SigslotTester0 : public sigslot::has_slots<> { signal->connect(this, &SigslotTester0::OnSignalCallback); } + SigslotTester0(const SigslotTester0&) = delete; + SigslotTester0& operator=(const SigslotTester0&) = delete; + int callback_count() const { return callback_count_; } private: void OnSignalCallback() { callback_count_++; } int callback_count_; - - RTC_DISALLOW_COPY_AND_ASSIGN(SigslotTester0); }; // Versions below are for testing signals that pass arguments. For all the @@ -74,6 +74,9 @@ class SigslotTester1 : public sigslot::has_slots<> { signal->connect(this, &SigslotTester1::OnSignalCallback); } + SigslotTester1(const SigslotTester1&) = delete; + SigslotTester1& operator=(const SigslotTester1&) = delete; + int callback_count() const { return callback_count_; } private: @@ -84,8 +87,6 @@ class SigslotTester1 : public sigslot::has_slots<> { int callback_count_; C1* capture1_; - - RTC_DISALLOW_COPY_AND_ASSIGN(SigslotTester1); }; template @@ -96,6 +97,9 @@ class SigslotTester2 : public sigslot::has_slots<> { signal->connect(this, &SigslotTester2::OnSignalCallback); } + SigslotTester2(const SigslotTester2&) = delete; + SigslotTester2& operator=(const SigslotTester2&) = delete; + int callback_count() const { return callback_count_; } private: @@ -108,8 +112,6 @@ class SigslotTester2 : public sigslot::has_slots<> { int callback_count_; C1* capture1_; C2* capture2_; - - RTC_DISALLOW_COPY_AND_ASSIGN(SigslotTester2); }; template @@ -126,6 +128,9 @@ class SigslotTester3 : public sigslot::has_slots<> { signal->connect(this, &SigslotTester3::OnSignalCallback); } + SigslotTester3(const SigslotTester3&) = delete; + SigslotTester3& operator=(const SigslotTester3&) = delete; + int callback_count() const { return callback_count_; } private: @@ -140,8 +145,6 @@ class SigslotTester3 : public sigslot::has_slots<> { C1* capture1_; C2* capture2_; C3* capture3_; - - RTC_DISALLOW_COPY_AND_ASSIGN(SigslotTester3); }; template { signal->connect(this, &SigslotTester4::OnSignalCallback); } + SigslotTester4(const SigslotTester4&) = delete; + SigslotTester4& operator=(const SigslotTester4&) = delete; + int callback_count() const { return callback_count_; } private: @@ -183,8 +189,6 @@ class SigslotTester4 : public sigslot::has_slots<> { C2* capture2_; C3* capture3_; C4* capture4_; - - RTC_DISALLOW_COPY_AND_ASSIGN(SigslotTester4); }; template { signal->connect(this, &SigslotTester5::OnSignalCallback); } + SigslotTester5(const SigslotTester5&) = delete; + SigslotTester5& operator=(const SigslotTester5&) = delete; + int callback_count() const { return callback_count_; } private: @@ -232,8 +239,6 @@ class SigslotTester5 : public sigslot::has_slots<> { C3* capture3_; C4* capture4_; C5* capture5_; - - RTC_DISALLOW_COPY_AND_ASSIGN(SigslotTester5); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/sigslottester.h.pump b/TMessagesProj/jni/voip/webrtc/rtc_base/sigslottester.h.pump index 0a1f41128d..c3d2d6e99b 100755 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/sigslottester.h.pump +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/sigslottester.h.pump @@ -35,7 +35,6 @@ // EXPECT_EQ("hello", capture); // /* See unit-tests for more examples */ -#include "rtc_base/constructor_magic.h" #include "rtc_base/third_party/sigslot/sigslot.h" namespace rtc { @@ -47,13 +46,14 @@ class SigslotTester0 : public sigslot::has_slots<> { signal->connect(this, &SigslotTester0::OnSignalCallback); } + SigslotTester0(const SigslotTester0&) = delete; + SigslotTester0& operator=(const SigslotTester0&) = delete; + int callback_count() const { return callback_count_; } private: void OnSignalCallback() { callback_count_++; } int callback_count_; - - RTC_DISALLOW_COPY_AND_ASSIGN(SigslotTester0); }; // Versions below are for testing signals that pass arguments. For all the @@ -78,6 +78,9 @@ class SigslotTester$i : public sigslot::has_slots<> { signal->connect(this, &SigslotTester$i::OnSignalCallback); } + SigslotTester$i(const SigslotTester$i&) = delete; + SigslotTester$i& operator=(const SigslotTester$i&) = delete; + int callback_count() const { return callback_count_; } private: @@ -91,9 +94,6 @@ class SigslotTester$i : public sigslot::has_slots<> { int callback_count_;$for j [[ C$j* capture$j[[]]_;]] - - - RTC_DISALLOW_COPY_AND_ASSIGN(SigslotTester$i); }; ]] diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/socket.h b/TMessagesProj/jni/voip/webrtc/rtc_base/socket.h index 6482117637..0ed3a7fa6a 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/socket.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/socket.h @@ -25,7 +25,6 @@ #include "rtc_base/win32.h" #endif -#include "rtc_base/constructor_magic.h" #include "rtc_base/socket_address.h" #include "rtc_base/third_party/sigslot/sigslot.h" @@ -83,6 +82,9 @@ class Socket { public: virtual ~Socket() {} + Socket(const Socket&) = delete; + Socket& operator=(const Socket&) = delete; + // Returns the address to which the socket is bound. If the socket is not // bound, then the any-address is returned. virtual SocketAddress GetLocalAddress() const = 0; @@ -138,9 +140,6 @@ class Socket { protected: Socket() {} - - private: - RTC_DISALLOW_COPY_AND_ASSIGN(Socket); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/socket_adapters.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/socket_adapters.cc index 0bd6efad3e..4ec93ae3e9 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/socket_adapters.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/socket_adapters.cc @@ -12,15 +12,17 @@ #pragma warning(disable : 4786) #endif +#include "rtc_base/socket_adapters.h" + #include #include "absl/strings/match.h" +#include "absl/strings/string_view.h" #include "rtc_base/buffer.h" #include "rtc_base/byte_buffer.h" #include "rtc_base/checks.h" #include "rtc_base/http_common.h" #include "rtc_base/logging.h" -#include "rtc_base/socket_adapters.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/zero_memory.h" @@ -217,9 +219,9 @@ void AsyncSSLSocket::ProcessInput(char* data, size_t* len) { /////////////////////////////////////////////////////////////////////////////// AsyncHttpsProxySocket::AsyncHttpsProxySocket(Socket* socket, - const std::string& user_agent, + absl::string_view user_agent, const SocketAddress& proxy, - const std::string& username, + absl::string_view username, const CryptString& password) : BufferedReadAdapter(socket, 1024), proxy_(proxy), @@ -409,8 +411,9 @@ void AsyncHttpsProxySocket::ProcessLine(char* data, size_t len) { } else if ((state_ == PS_AUTHENTICATE) && absl::StartsWithIgnoreCase(data, "Proxy-Authenticate:")) { std::string response, auth_method; - switch (HttpAuthenticate(data + 19, len - 19, proxy_, "CONNECT", "/", user_, - pass_, context_, response, auth_method)) { + switch (HttpAuthenticate(absl::string_view(data + 19, len - 19), proxy_, + "CONNECT", "/", user_, pass_, context_, response, + auth_method)) { case HAR_IGNORE: RTC_LOG(LS_VERBOSE) << "Ignoring Proxy-Authenticate: " << auth_method; if (!unknown_mechanisms_.empty()) @@ -470,7 +473,7 @@ void AsyncHttpsProxySocket::Error(int error) { AsyncSocksProxySocket::AsyncSocksProxySocket(Socket* socket, const SocketAddress& proxy, - const std::string& username, + absl::string_view username, const CryptString& password) : BufferedReadAdapter(socket, 1024), state_(SS_ERROR), diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/socket_adapters.h b/TMessagesProj/jni/voip/webrtc/rtc_base/socket_adapters.h index 67d3bbff7d..e78ee18a27 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/socket_adapters.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/socket_adapters.h @@ -13,9 +13,9 @@ #include +#include "absl/strings/string_view.h" #include "api/array_view.h" #include "rtc_base/async_socket.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/crypt_string.h" namespace rtc { @@ -34,6 +34,9 @@ class BufferedReadAdapter : public AsyncSocketAdapter { BufferedReadAdapter(Socket* socket, size_t buffer_size); ~BufferedReadAdapter() override; + BufferedReadAdapter(const BufferedReadAdapter&) = delete; + BufferedReadAdapter& operator=(const BufferedReadAdapter&) = delete; + int Send(const void* pv, size_t cb) override; int Recv(void* pv, size_t cb, int64_t* timestamp) override; @@ -51,7 +54,6 @@ class BufferedReadAdapter : public AsyncSocketAdapter { char* buffer_; size_t buffer_size_, data_len_; bool buffering_; - RTC_DISALLOW_COPY_AND_ASSIGN(BufferedReadAdapter); }; /////////////////////////////////////////////////////////////////////////////// @@ -65,12 +67,14 @@ class AsyncSSLSocket : public BufferedReadAdapter { explicit AsyncSSLSocket(Socket* socket); + AsyncSSLSocket(const AsyncSSLSocket&) = delete; + AsyncSSLSocket& operator=(const AsyncSSLSocket&) = delete; + int Connect(const SocketAddress& addr) override; protected: void OnConnectEvent(Socket* socket) override; void ProcessInput(char* data, size_t* len) override; - RTC_DISALLOW_COPY_AND_ASSIGN(AsyncSSLSocket); }; /////////////////////////////////////////////////////////////////////////////// @@ -79,12 +83,15 @@ class AsyncSSLSocket : public BufferedReadAdapter { class AsyncHttpsProxySocket : public BufferedReadAdapter { public: AsyncHttpsProxySocket(Socket* socket, - const std::string& user_agent, + absl::string_view user_agent, const SocketAddress& proxy, - const std::string& username, + absl::string_view username, const CryptString& password); ~AsyncHttpsProxySocket() override; + AsyncHttpsProxySocket(const AsyncHttpsProxySocket&) = delete; + AsyncHttpsProxySocket& operator=(const AsyncHttpsProxySocket&) = delete; + // If connect is forced, the adapter will always issue an HTTP CONNECT to the // target address. Otherwise, it will connect only if the destination port // is not port 80. @@ -128,7 +135,6 @@ class AsyncHttpsProxySocket : public BufferedReadAdapter { } state_; HttpAuthContext* context_; std::string unknown_mechanisms_; - RTC_DISALLOW_COPY_AND_ASSIGN(AsyncHttpsProxySocket); }; /////////////////////////////////////////////////////////////////////////////// @@ -138,10 +144,13 @@ class AsyncSocksProxySocket : public BufferedReadAdapter { public: AsyncSocksProxySocket(Socket* socket, const SocketAddress& proxy, - const std::string& username, + absl::string_view username, const CryptString& password); ~AsyncSocksProxySocket() override; + AsyncSocksProxySocket(const AsyncSocksProxySocket&) = delete; + AsyncSocksProxySocket& operator=(const AsyncSocksProxySocket&) = delete; + int Connect(const SocketAddress& addr) override; SocketAddress GetRemoteAddress() const override; int Close() override; @@ -162,7 +171,6 @@ class AsyncSocksProxySocket : public BufferedReadAdapter { SocketAddress proxy_, dest_; std::string user_; CryptString pass_; - RTC_DISALLOW_COPY_AND_ASSIGN(AsyncSocksProxySocket); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/socket_address.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/socket_address.cc index 2996ede9d2..93d6860a70 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/socket_address.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/socket_address.cc @@ -10,6 +10,7 @@ #include "rtc_base/socket_address.h" +#include "absl/strings/string_view.h" #include "rtc_base/numerics/safe_conversions.h" #if defined(WEBRTC_POSIX) @@ -43,7 +44,7 @@ SocketAddress::SocketAddress() { Clear(); } -SocketAddress::SocketAddress(const std::string& hostname, int port) { +SocketAddress::SocketAddress(absl::string_view hostname, int port) { SetIP(hostname); SetPort(port); } @@ -101,8 +102,8 @@ void SocketAddress::SetIP(const IPAddress& ip) { scope_id_ = 0; } -void SocketAddress::SetIP(const std::string& hostname) { - hostname_ = hostname; +void SocketAddress::SetIP(absl::string_view hostname) { + hostname_ = std::string(hostname); literal_ = IPFromString(hostname, &ip_); if (!literal_) { ip_ = IPAddress(); @@ -188,23 +189,24 @@ std::string SocketAddress::ToResolvedSensitiveString() const { return sb.str(); } -bool SocketAddress::FromString(const std::string& str) { +bool SocketAddress::FromString(absl::string_view str) { if (str.at(0) == '[') { - std::string::size_type closebracket = str.rfind(']'); - if (closebracket != std::string::npos) { - std::string::size_type colon = str.find(':', closebracket); - if (colon != std::string::npos && colon > closebracket) { - SetPort(strtoul(str.substr(colon + 1).c_str(), nullptr, 10)); + absl::string_view::size_type closebracket = str.rfind(']'); + if (closebracket != absl::string_view::npos) { + absl::string_view::size_type colon = str.find(':', closebracket); + if (colon != absl::string_view::npos && colon > closebracket) { + SetPort( + strtoul(std::string(str.substr(colon + 1)).c_str(), nullptr, 10)); SetIP(str.substr(1, closebracket - 1)); } else { return false; } } } else { - std::string::size_type pos = str.find(':'); - if (std::string::npos == pos) + absl::string_view::size_type pos = str.find(':'); + if (absl::string_view::npos == pos) return false; - SetPort(strtoul(str.substr(pos + 1).c_str(), nullptr, 10)); + SetPort(strtoul(std::string(str.substr(pos + 1)).c_str(), nullptr, 10)); SetIP(str.substr(0, pos)); } return true; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/socket_address.h b/TMessagesProj/jni/voip/webrtc/rtc_base/socket_address.h index 570a71281e..99e14d8eab 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/socket_address.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/socket_address.h @@ -12,6 +12,8 @@ #define RTC_BASE_SOCKET_ADDRESS_H_ #include + +#include "absl/strings/string_view.h" #ifdef WEBRTC_UNIT_TEST #include // no-presubmit-check TODO(webrtc:8982) #endif // WEBRTC_UNIT_TEST @@ -34,7 +36,7 @@ class RTC_EXPORT SocketAddress { // Creates the address with the given host and port. Host may be a // literal IP string or a hostname to be resolved later. // DCHECKs that port is in valid range (0 to 2^16-1). - SocketAddress(const std::string& hostname, int port); + SocketAddress(absl::string_view hostname, int port); // Creates the address with the given IP and port. // IP is given as an integer in host byte order. V4 only, to be deprecated. @@ -69,7 +71,7 @@ class RTC_EXPORT SocketAddress { // Changes the hostname of this address to the given one. // Does not resolve the address; use Resolve to do so. - void SetIP(const std::string& hostname); + void SetIP(absl::string_view hostname); // Sets the IP address while retaining the hostname. Useful for bypassing // DNS for a pre-resolved IP. @@ -129,7 +131,7 @@ class RTC_EXPORT SocketAddress { std::string ToResolvedSensitiveString() const; // Parses hostname:port and [hostname]:port. - bool FromString(const std::string& str); + bool FromString(absl::string_view str); #ifdef WEBRTC_UNIT_TEST inline std::ostream& operator<<( // no-presubmit-check TODO(webrtc:8982) diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/socket_server.h b/TMessagesProj/jni/voip/webrtc/rtc_base/socket_server.h index face04dbc2..bf1326dad9 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/socket_server.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/socket_server.h @@ -13,6 +13,8 @@ #include +#include "api/units/time_delta.h" +#include "rtc_base/event.h" #include "rtc_base/socket_factory.h" namespace rtc { @@ -30,7 +32,7 @@ class NetworkBinderInterface; // notified of asynchronous I/O from this server's Wait method. class SocketServer : public SocketFactory { public: - static const int kForever = -1; + static constexpr webrtc::TimeDelta kForever = rtc::Event::kForever; static std::unique_ptr CreateDefault(); // When the socket server is installed into a Thread, this function is called @@ -40,10 +42,11 @@ class SocketServer : public SocketFactory { virtual void SetMessageQueue(Thread* queue) {} // Sleeps until: - // 1) cms milliseconds have elapsed (unless cms == kForever) - // 2) WakeUp() is called + // 1) `max_wait_duration` has elapsed (unless `max_wait_duration` == + // `kForever`) + // 2) WakeUp() is called // While sleeping, I/O is performed if process_io is true. - virtual bool Wait(int cms, bool process_io) = 0; + virtual bool Wait(webrtc::TimeDelta max_wait_duration, bool process_io) = 0; // Causes the current wait (if one is in progress) to wake up. virtual void WakeUp() = 0; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/socket_stream.h b/TMessagesProj/jni/voip/webrtc/rtc_base/socket_stream.h index 266a6e6fe6..f678f805d7 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/socket_stream.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/socket_stream.h @@ -13,7 +13,6 @@ #include -#include "rtc_base/constructor_magic.h" #include "rtc_base/socket.h" #include "rtc_base/stream.h" #include "rtc_base/third_party/sigslot/sigslot.h" @@ -27,6 +26,9 @@ class SocketStream : public StreamInterface, public sigslot::has_slots<> { explicit SocketStream(Socket* socket); ~SocketStream() override; + SocketStream(const SocketStream&) = delete; + SocketStream& operator=(const SocketStream&) = delete; + void Attach(Socket* socket); Socket* Detach(); @@ -53,8 +55,6 @@ class SocketStream : public StreamInterface, public sigslot::has_slots<> { void OnCloseEvent(Socket* socket, int err); Socket* socket_; - - RTC_DISALLOW_COPY_AND_ASSIGN(SocketStream); }; /////////////////////////////////////////////////////////////////////////////// diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/socket_unittest.h b/TMessagesProj/jni/voip/webrtc/rtc_base/socket_unittest.h new file mode 100644 index 0000000000..20ef003a80 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/socket_unittest.h @@ -0,0 +1,105 @@ +/* + * Copyright 2009 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_SOCKET_UNITTEST_H_ +#define RTC_BASE_SOCKET_UNITTEST_H_ + +#include "absl/strings/string_view.h" +#include "rtc_base/gunit.h" +#include "rtc_base/thread.h" + +namespace rtc { + +// Generic socket tests, to be used when testing individual socketservers. +// Derive your specific test class from SocketTest, install your +// socketserver, and call the SocketTest test methods. +class SocketTest : public ::testing::Test { + protected: + explicit SocketTest(rtc::SocketFactory* socket_factory) + : kIPv4Loopback(INADDR_LOOPBACK), + kIPv6Loopback(in6addr_loopback), + socket_factory_(socket_factory) {} + void TestConnectIPv4(); + void TestConnectIPv6(); + void TestConnectWithDnsLookupIPv4(); + void TestConnectWithDnsLookupIPv6(); + void TestConnectFailIPv4(); + void TestConnectFailIPv6(); + void TestConnectWithDnsLookupFailIPv4(); + void TestConnectWithDnsLookupFailIPv6(); + void TestConnectWithClosedSocketIPv4(); + void TestConnectWithClosedSocketIPv6(); + void TestConnectWhileNotClosedIPv4(); + void TestConnectWhileNotClosedIPv6(); + void TestServerCloseDuringConnectIPv4(); + void TestServerCloseDuringConnectIPv6(); + void TestClientCloseDuringConnectIPv4(); + void TestClientCloseDuringConnectIPv6(); + void TestServerCloseIPv4(); + void TestServerCloseIPv6(); + void TestCloseInClosedCallbackIPv4(); + void TestCloseInClosedCallbackIPv6(); + void TestDeleteInReadCallbackIPv4(); + void TestDeleteInReadCallbackIPv6(); + void TestSocketServerWaitIPv4(); + void TestSocketServerWaitIPv6(); + void TestTcpIPv4(); + void TestTcpIPv6(); + void TestSingleFlowControlCallbackIPv4(); + void TestSingleFlowControlCallbackIPv6(); + void TestUdpIPv4(); + void TestUdpIPv6(); + void TestUdpReadyToSendIPv4(); + void TestUdpReadyToSendIPv6(); + void TestGetSetOptionsIPv4(); + void TestGetSetOptionsIPv6(); + void TestSocketRecvTimestampIPv4(); + void TestSocketRecvTimestampIPv6(); + + static const int kTimeout = 5000; // ms + const IPAddress kIPv4Loopback; + const IPAddress kIPv6Loopback; + + protected: + void TcpInternal(const IPAddress& loopback, + size_t data_size, + ptrdiff_t max_send_size); + + private: + void ConnectInternal(const IPAddress& loopback); + void ConnectWithDnsLookupInternal(const IPAddress& loopback, + absl::string_view host); + void ConnectFailInternal(const IPAddress& loopback); + + void ConnectWithDnsLookupFailInternal(const IPAddress& loopback); + void ConnectWithClosedSocketInternal(const IPAddress& loopback); + void ConnectWhileNotClosedInternal(const IPAddress& loopback); + void ServerCloseDuringConnectInternal(const IPAddress& loopback); + void ClientCloseDuringConnectInternal(const IPAddress& loopback); + void ServerCloseInternal(const IPAddress& loopback); + void CloseInClosedCallbackInternal(const IPAddress& loopback); + void DeleteInReadCallbackInternal(const IPAddress& loopback); + void SocketServerWaitInternal(const IPAddress& loopback); + void SingleFlowControlCallbackInternal(const IPAddress& loopback); + void UdpInternal(const IPAddress& loopback); + void UdpReadyToSend(const IPAddress& loopback); + void GetSetOptionsInternal(const IPAddress& loopback); + void SocketRecvTimestamp(const IPAddress& loopback); + + SocketFactory* socket_factory_; +}; + +// For unbound sockets, GetLocalAddress / GetRemoteAddress return AF_UNSPEC +// values on Windows, but an empty address of the same family on Linux/MacOS X. +bool IsUnspecOrEmptyIP(const IPAddress& address); + +} // namespace rtc + +#endif // RTC_BASE_SOCKET_UNITTEST_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_adapter.h b/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_adapter.h index 8f98141651..4b8b9c74e0 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_adapter.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_adapter.h @@ -14,6 +14,7 @@ #include #include +#include "absl/strings/string_view.h" #include "rtc_base/async_socket.h" #include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_identity.h" @@ -89,7 +90,7 @@ class SSLAdapter : public AsyncSocketAdapter { // StartSSL returns 0 if successful. // If StartSSL is called while the socket is closed or connecting, the SSL // negotiation will begin as soon as the socket connects. - virtual int StartSSL(const char* hostname) = 0; + virtual int StartSSL(absl::string_view hostname) = 0; // When an SSLAdapterFactory is used, an SSLAdapter may be used to resume // a previous SSL session, which results in an abbreviated handshake. diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_certificate.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_certificate.cc index ed42998353..d1fd57fca5 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_certificate.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_certificate.cc @@ -15,6 +15,7 @@ #include #include "absl/algorithm/container.h" +#include "absl/strings/string_view.h" #include "rtc_base/checks.h" #include "rtc_base/openssl.h" #ifdef OPENSSL_IS_BORINGSSL @@ -43,6 +44,12 @@ SSLCertificateStats::SSLCertificateStats( SSLCertificateStats::~SSLCertificateStats() {} +std::unique_ptr SSLCertificateStats::Copy() const { + return std::make_unique( + std::string(fingerprint), std::string(fingerprint_algorithm), + std::string(base64_certificate), issuer ? issuer->Copy() : nullptr); +} + ////////////////////////////////////////////////////////////////////// // SSLCertificate ////////////////////////////////////////////////////////////////////// @@ -121,7 +128,7 @@ std::unique_ptr SSLCertChain::GetStats() const { // static std::unique_ptr SSLCertificate::FromPEMString( - const std::string& pem_string) { + absl::string_view pem_string) { #ifdef OPENSSL_IS_BORINGSSL return BoringSSLCertificate::FromPEMString(pem_string); #else diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_certificate.h b/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_certificate.h index 3b3f24fb91..2e198800c4 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_certificate.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_certificate.h @@ -17,12 +17,13 @@ #include #include + #include #include #include +#include "absl/strings/string_view.h" #include "rtc_base/buffer.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/system/rtc_export.h" namespace rtc { @@ -37,6 +38,8 @@ struct RTC_EXPORT SSLCertificateStats { std::string fingerprint_algorithm; std::string base64_certificate; std::unique_ptr issuer; + + std::unique_ptr Copy() const; }; // Abstract interface overridden by SSL library specific @@ -55,7 +58,7 @@ class RTC_EXPORT SSLCertificate { // stored in *pem_length if it is non-null, and only if // parsing was successful. static std::unique_ptr FromPEMString( - const std::string& pem_string); + absl::string_view pem_string); virtual ~SSLCertificate() = default; // Returns a new SSLCertificate object instance wrapping the same @@ -73,7 +76,7 @@ class RTC_EXPORT SSLCertificate { virtual bool GetSignatureDigestAlgorithm(std::string* algorithm) const = 0; // Compute the digest of the certificate given algorithm - virtual bool ComputeDigest(const std::string& algorithm, + virtual bool ComputeDigest(absl::string_view algorithm, unsigned char* digest, size_t size, size_t* length) const = 0; @@ -101,6 +104,9 @@ class RTC_EXPORT SSLCertChain final { ~SSLCertChain(); + SSLCertChain(const SSLCertChain&) = delete; + SSLCertChain& operator=(const SSLCertChain&) = delete; + // Vector access methods. size_t GetSize() const { return certs_.size(); } @@ -118,8 +124,6 @@ class RTC_EXPORT SSLCertChain final { private: std::vector> certs_; - - RTC_DISALLOW_COPY_AND_ASSIGN(SSLCertChain); }; // SSLCertificateVerifier provides a simple interface to allow third parties to diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_fingerprint.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_fingerprint.cc index 358402eb03..a43bb159c3 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_fingerprint.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_fingerprint.cc @@ -11,11 +11,14 @@ #include "rtc_base/ssl_fingerprint.h" #include + #include #include #include #include "absl/algorithm/container.h" +#include "absl/strings/string_view.h" +#include "api/array_view.h" #include "rtc_base/logging.h" #include "rtc_base/message_digest.h" #include "rtc_base/rtc_certificate.h" @@ -25,19 +28,19 @@ namespace rtc { -SSLFingerprint* SSLFingerprint::Create(const std::string& algorithm, +SSLFingerprint* SSLFingerprint::Create(absl::string_view algorithm, const rtc::SSLIdentity* identity) { return CreateUnique(algorithm, *identity).release(); } std::unique_ptr SSLFingerprint::CreateUnique( - const std::string& algorithm, + absl::string_view algorithm, const rtc::SSLIdentity& identity) { return Create(algorithm, identity.certificate()); } std::unique_ptr SSLFingerprint::Create( - const std::string& algorithm, + absl::string_view algorithm, const rtc::SSLCertificate& cert) { uint8_t digest_val[64]; size_t digest_len; @@ -51,14 +54,14 @@ std::unique_ptr SSLFingerprint::Create( } SSLFingerprint* SSLFingerprint::CreateFromRfc4572( - const std::string& algorithm, - const std::string& fingerprint) { + absl::string_view algorithm, + absl::string_view fingerprint) { return CreateUniqueFromRfc4572(algorithm, fingerprint).release(); } std::unique_ptr SSLFingerprint::CreateUniqueFromRfc4572( - const std::string& algorithm, - const std::string& fingerprint) { + absl::string_view algorithm, + absl::string_view fingerprint) { if (algorithm.empty() || !rtc::IsFips180DigestAlgorithm(algorithm)) return nullptr; @@ -66,8 +69,8 @@ std::unique_ptr SSLFingerprint::CreateUniqueFromRfc4572( return nullptr; char value[rtc::MessageDigest::kMaxSize]; - size_t value_len = rtc::hex_decode_with_delimiter( - value, sizeof(value), fingerprint.c_str(), fingerprint.length(), ':'); + size_t value_len = + rtc::hex_decode_with_delimiter(ArrayView(value), fingerprint, ':'); if (!value_len) return nullptr; @@ -94,11 +97,11 @@ std::unique_ptr SSLFingerprint::CreateFromCertificate( return fingerprint; } -SSLFingerprint::SSLFingerprint(const std::string& algorithm, +SSLFingerprint::SSLFingerprint(absl::string_view algorithm, ArrayView digest_view) : algorithm(algorithm), digest(digest_view.data(), digest_view.size()) {} -SSLFingerprint::SSLFingerprint(const std::string& algorithm, +SSLFingerprint::SSLFingerprint(absl::string_view algorithm, const uint8_t* digest_in, size_t digest_len) : SSLFingerprint(algorithm, MakeArrayView(digest_in, digest_len)) {} @@ -108,8 +111,8 @@ bool SSLFingerprint::operator==(const SSLFingerprint& other) const { } std::string SSLFingerprint::GetRfc4572Fingerprint() const { - std::string fingerprint = - rtc::hex_encode_with_delimiter(digest.data(), digest.size(), ':'); + std::string fingerprint = rtc::hex_encode_with_delimiter( + absl::string_view(digest.data(), digest.size()), ':'); absl::c_transform(fingerprint, fingerprint.begin(), ::toupper); return fingerprint; } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_fingerprint.h b/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_fingerprint.h index add3ab7911..cfa26dd433 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_fingerprint.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_fingerprint.h @@ -13,8 +13,10 @@ #include #include + #include +#include "absl/strings/string_view.h" #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/system/rtc_export.h" @@ -26,34 +28,34 @@ class SSLIdentity; struct RTC_EXPORT SSLFingerprint { // TODO(steveanton): Remove once downstream projects have moved off of this. - static SSLFingerprint* Create(const std::string& algorithm, + static SSLFingerprint* Create(absl::string_view algorithm, const rtc::SSLIdentity* identity); // TODO(steveanton): Rename to Create once projects have migrated. static std::unique_ptr CreateUnique( - const std::string& algorithm, + absl::string_view algorithm, const rtc::SSLIdentity& identity); static std::unique_ptr Create( - const std::string& algorithm, + absl::string_view algorithm, const rtc::SSLCertificate& cert); // TODO(steveanton): Remove once downstream projects have moved off of this. - static SSLFingerprint* CreateFromRfc4572(const std::string& algorithm, - const std::string& fingerprint); + static SSLFingerprint* CreateFromRfc4572(absl::string_view algorithm, + absl::string_view fingerprint); // TODO(steveanton): Rename to CreateFromRfc4572 once projects have migrated. static std::unique_ptr CreateUniqueFromRfc4572( - const std::string& algorithm, - const std::string& fingerprint); + absl::string_view algorithm, + absl::string_view fingerprint); // Creates a fingerprint from a certificate, using the same digest algorithm // as the certificate's signature. static std::unique_ptr CreateFromCertificate( const RTCCertificate& cert); - SSLFingerprint(const std::string& algorithm, + SSLFingerprint(absl::string_view algorithm, ArrayView digest_view); // TODO(steveanton): Remove once downstream projects have moved off of this. - SSLFingerprint(const std::string& algorithm, + SSLFingerprint(absl::string_view algorithm, const uint8_t* digest_in, size_t digest_len); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_identity.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_identity.cc index 81cf1d78a3..3b4232b06b 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_identity.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_identity.cc @@ -15,6 +15,7 @@ #include #include +#include "absl/strings/string_view.h" #include "rtc_base/checks.h" #ifdef OPENSSL_IS_BORINGSSL #include "rtc_base/boringssl_identity.h" @@ -169,30 +170,31 @@ KeyType IntKeyTypeFamilyToKeyType(int key_type_family) { // SSLIdentity ////////////////////////////////////////////////////////////////////// -bool SSLIdentity::PemToDer(const std::string& pem_type, - const std::string& pem_string, +bool SSLIdentity::PemToDer(absl::string_view pem_type, + absl::string_view pem_string, std::string* der) { // Find the inner body. We need this to fulfill the contract of returning // pem_length. - size_t header = pem_string.find("-----BEGIN " + pem_type + "-----"); - if (header == std::string::npos) { + std::string pem_type_str(pem_type); + size_t header = pem_string.find("-----BEGIN " + pem_type_str + "-----"); + if (header == absl::string_view::npos) { return false; } size_t body = pem_string.find('\n', header); - if (body == std::string::npos) { + if (body == absl::string_view::npos) { return false; } - size_t trailer = pem_string.find("-----END " + pem_type + "-----"); - if (trailer == std::string::npos) { + size_t trailer = pem_string.find("-----END " + pem_type_str + "-----"); + if (trailer == absl::string_view::npos) { return false; } - std::string inner = pem_string.substr(body + 1, trailer - (body + 1)); + std::string inner(pem_string.substr(body + 1, trailer - (body + 1))); *der = Base64::Decode(inner, Base64::DO_PARSE_WHITE | Base64::DO_PAD_ANY | Base64::DO_TERM_BUFFER); return true; } -std::string SSLIdentity::DerToPem(const std::string& pem_type, +std::string SSLIdentity::DerToPem(absl::string_view pem_type, const unsigned char* data, size_t length) { rtc::StringBuilder result; @@ -214,7 +216,7 @@ std::string SSLIdentity::DerToPem(const std::string& pem_type, } // static -std::unique_ptr SSLIdentity::Create(const std::string& common_name, +std::unique_ptr SSLIdentity::Create(absl::string_view common_name, const KeyParams& key_param, time_t certificate_lifetime) { #ifdef OPENSSL_IS_BORINGSSL @@ -227,13 +229,13 @@ std::unique_ptr SSLIdentity::Create(const std::string& common_name, } // static -std::unique_ptr SSLIdentity::Create(const std::string& common_name, +std::unique_ptr SSLIdentity::Create(absl::string_view common_name, const KeyParams& key_param) { return Create(common_name, key_param, kDefaultCertificateLifetimeInSeconds); } // static -std::unique_ptr SSLIdentity::Create(const std::string& common_name, +std::unique_ptr SSLIdentity::Create(absl::string_view common_name, KeyType key_type) { return Create(common_name, KeyParams(key_type), kDefaultCertificateLifetimeInSeconds); @@ -252,8 +254,8 @@ std::unique_ptr SSLIdentity::CreateForTest( // Construct an identity from a private key and a certificate. // static std::unique_ptr SSLIdentity::CreateFromPEMStrings( - const std::string& private_key, - const std::string& certificate) { + absl::string_view private_key, + absl::string_view certificate) { #ifdef OPENSSL_IS_BORINGSSL return BoringSSLIdentity::CreateFromPEMStrings(private_key, certificate); #else @@ -264,8 +266,8 @@ std::unique_ptr SSLIdentity::CreateFromPEMStrings( // Construct an identity from a private key and a certificate chain. // static std::unique_ptr SSLIdentity::CreateFromPEMChainStrings( - const std::string& private_key, - const std::string& certificate_chain) { + absl::string_view private_key, + absl::string_view certificate_chain) { #ifdef OPENSSL_IS_BORINGSSL return BoringSSLIdentity::CreateFromPEMChainStrings(private_key, certificate_chain); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_identity.h b/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_identity.h index 78d1ec12b7..a0119bb1c4 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_identity.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_identity.h @@ -14,10 +14,12 @@ #define RTC_BASE_SSL_IDENTITY_H_ #include + #include #include #include +#include "absl/strings/string_view.h" #include "rtc_base/system/rtc_export.h" namespace rtc { @@ -108,12 +110,12 @@ class RTC_EXPORT SSLIdentity { // should be a non-negative number. // Returns null on failure. // Caller is responsible for freeing the returned object. - static std::unique_ptr Create(const std::string& common_name, + static std::unique_ptr Create(absl::string_view common_name, const KeyParams& key_param, time_t certificate_lifetime); - static std::unique_ptr Create(const std::string& common_name, + static std::unique_ptr Create(absl::string_view common_name, const KeyParams& key_param); - static std::unique_ptr Create(const std::string& common_name, + static std::unique_ptr Create(absl::string_view common_name, KeyType key_type); // Allows fine-grained control over expiration time. @@ -122,13 +124,13 @@ class RTC_EXPORT SSLIdentity { // Construct an identity from a private key and a certificate. static std::unique_ptr CreateFromPEMStrings( - const std::string& private_key, - const std::string& certificate); + absl::string_view private_key, + absl::string_view certificate); // Construct an identity from a private key and a certificate chain. static std::unique_ptr CreateFromPEMChainStrings( - const std::string& private_key, - const std::string& certificate_chain); + absl::string_view private_key, + absl::string_view certificate_chain); virtual ~SSLIdentity() {} @@ -144,10 +146,10 @@ class RTC_EXPORT SSLIdentity { virtual std::string PublicKeyToPEMString() const = 0; // Helpers for parsing converting between PEM and DER format. - static bool PemToDer(const std::string& pem_type, - const std::string& pem_string, + static bool PemToDer(absl::string_view pem_type, + absl::string_view pem_string, std::string* der); - static std::string DerToPem(const std::string& pem_type, + static std::string DerToPem(absl::string_view pem_type, const unsigned char* data, size_t length); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_roots.h b/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_roots.h index 8f869f4a9e..34a4f082b4 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_roots.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_roots.h @@ -11,6 +11,8 @@ #ifndef RTC_BASE_SSL_ROOTS_H_ #define RTC_BASE_SSL_ROOTS_H_ +#include + // This file is the root certificates in C form that are needed to connect to // Google. diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_stream_adapter.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_stream_adapter.cc index b805fdc6c3..4b60d6d7b1 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_stream_adapter.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_stream_adapter.cc @@ -11,6 +11,7 @@ #include "rtc_base/ssl_stream_adapter.h" #include "absl/memory/memory.h" +#include "absl/strings/string_view.h" #include "rtc_base/openssl_stream_adapter.h" /////////////////////////////////////////////////////////////////////////////// @@ -39,7 +40,7 @@ std::string SrtpCryptoSuiteToName(int crypto_suite) { } } -int SrtpCryptoSuiteFromName(const std::string& crypto_suite) { +int SrtpCryptoSuiteFromName(absl::string_view crypto_suite) { if (crypto_suite == kCsAesCm128HmacSha1_32) return kSrtpAes128CmSha1_32; if (crypto_suite == kCsAesCm128HmacSha1_80) @@ -85,7 +86,7 @@ bool IsGcmCryptoSuite(int crypto_suite) { crypto_suite == kSrtpAeadAes128Gcm); } -bool IsGcmCryptoSuiteName(const std::string& crypto_suite) { +bool IsGcmCryptoSuiteName(absl::string_view crypto_suite) { return (crypto_suite == kCsAeadAes256Gcm || crypto_suite == kCsAeadAes128Gcm); } @@ -98,7 +99,7 @@ bool SSLStreamAdapter::GetSslCipherSuite(int* cipher_suite) { return false; } -bool SSLStreamAdapter::ExportKeyingMaterial(const std::string& label, +bool SSLStreamAdapter::ExportKeyingMaterial(absl::string_view label, const uint8_t* context, size_t context_len, bool use_context, @@ -122,7 +123,7 @@ bool SSLStreamAdapter::IsBoringSsl() { bool SSLStreamAdapter::IsAcceptableCipher(int cipher, KeyType key_type) { return OpenSSLStreamAdapter::IsAcceptableCipher(cipher, key_type); } -bool SSLStreamAdapter::IsAcceptableCipher(const std::string& cipher, +bool SSLStreamAdapter::IsAcceptableCipher(absl::string_view cipher, KeyType key_type) { return OpenSSLStreamAdapter::IsAcceptableCipher(cipher, key_type); } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_stream_adapter.h b/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_stream_adapter.h index 618ffca4d0..e68870c747 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_stream_adapter.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_stream_adapter.h @@ -13,11 +13,13 @@ #include #include + #include #include #include #include "absl/memory/memory.h" +#include "absl/strings/string_view.h" #include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_identity.h" #include "rtc_base/stream.h" @@ -53,7 +55,7 @@ extern const char kCsAeadAes256Gcm[]; std::string SrtpCryptoSuiteToName(int crypto_suite); // The reverse of above conversion. -int SrtpCryptoSuiteFromName(const std::string& crypto_suite); +int SrtpCryptoSuiteFromName(absl::string_view crypto_suite); // Get key length and salt length for given crypto suite. Returns true for // valid suites, otherwise false. @@ -65,7 +67,7 @@ bool GetSrtpKeyAndSaltLengths(int crypto_suite, bool IsGcmCryptoSuite(int crypto_suite); // Returns true if the given crypto suite name uses a GCM cipher. -bool IsGcmCryptoSuiteName(const std::string& crypto_suite); +bool IsGcmCryptoSuiteName(absl::string_view crypto_suite); // SSLStreamAdapter : A StreamInterfaceAdapter that does SSL/TLS. // After SSL has been started, the stream will only open on successful @@ -176,7 +178,7 @@ class SSLStreamAdapter : public StreamInterface, public sigslot::has_slots<> { // Returns true if successful. // `error` is optional and provides more information about the failure. virtual bool SetPeerCertificateDigest( - const std::string& digest_alg, + absl::string_view digest_alg, const unsigned char* digest_val, size_t digest_len, SSLPeerCertificateDigestError* error = nullptr) = 0; @@ -208,7 +210,7 @@ class SSLStreamAdapter : public StreamInterface, public sigslot::has_slots<> { // zero-length ones). // result -- where to put the computed value // result_len -- the length of the computed value - virtual bool ExportKeyingMaterial(const std::string& label, + virtual bool ExportKeyingMaterial(absl::string_view label, const uint8_t* context, size_t context_len, bool use_context, @@ -233,7 +235,7 @@ class SSLStreamAdapter : public StreamInterface, public sigslot::has_slots<> { // Returns true iff the supplied cipher is deemed to be strong. // TODO(torbjorng): Consider removing the KeyType argument. static bool IsAcceptableCipher(int cipher, KeyType key_type); - static bool IsAcceptableCipher(const std::string& cipher, KeyType key_type); + static bool IsAcceptableCipher(absl::string_view cipher, KeyType key_type); // TODO(guoweis): Move this away from a static class method. Currently this is // introduced such that any caller could depend on sslstreamadapter.h without diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/stream.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/stream.cc index 30c767888c..e1aab8cc22 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/stream.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/stream.cc @@ -16,7 +16,6 @@ #include #include "rtc_base/checks.h" -#include "rtc_base/location.h" #include "rtc_base/thread.h" namespace rtc { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/stream.h b/TMessagesProj/jni/voip/webrtc/rtc_base/stream.h index 70de65a75d..7a9a588733 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/stream.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/stream.h @@ -14,8 +14,6 @@ #include #include "rtc_base/buffer.h" -#include "rtc_base/constructor_magic.h" -#include "rtc_base/message_handler.h" #include "rtc_base/system/rtc_export.h" #include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/thread.h" @@ -52,6 +50,9 @@ class RTC_EXPORT StreamInterface { public: virtual ~StreamInterface() {} + StreamInterface(const StreamInterface&) = delete; + StreamInterface& operator=(const StreamInterface&) = delete; + virtual StreamState GetState() const = 0; // Read attempts to fill buffer of size buffer_len. Write attempts to send @@ -110,9 +111,6 @@ class RTC_EXPORT StreamInterface { protected: StreamInterface(); - - private: - RTC_DISALLOW_COPY_AND_ASSIGN(StreamInterface); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/string_encode.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/string_encode.cc index 364eaa0f0f..434d1e6139 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/string_encode.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/string_encode.cc @@ -12,6 +12,8 @@ #include +#include "absl/strings/string_view.h" +#include "api/array_view.h" #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" @@ -51,16 +53,16 @@ size_t hex_encode_output_length(size_t srclen, char delimiter) { // hex_encode shows the hex representation of binary data in ascii, with // `delimiter` between bytes, or none if `delimiter` == 0. void hex_encode_with_delimiter(char* buffer, - const char* csource, - size_t srclen, + absl::string_view source, char delimiter) { RTC_DCHECK(buffer); // Init and check bounds. const unsigned char* bsource = - reinterpret_cast(csource); + reinterpret_cast(source.data()); size_t srcpos = 0, bufpos = 0; + size_t srclen = source.length(); while (srcpos < srclen) { unsigned char ch = bsource[srcpos++]; buffer[bufpos] = hex_encode((ch >> 4) & 0xF); @@ -77,43 +79,30 @@ void hex_encode_with_delimiter(char* buffer, } // namespace -std::string hex_encode(const std::string& str) { - return hex_encode(str.c_str(), str.size()); +std::string hex_encode(absl::string_view str) { + return hex_encode_with_delimiter(str, 0); } -std::string hex_encode(const char* source, size_t srclen) { - return hex_encode_with_delimiter(source, srclen, 0); -} - -std::string hex_encode_with_delimiter(const char* source, - size_t srclen, +std::string hex_encode_with_delimiter(absl::string_view source, char delimiter) { - std::string s(hex_encode_output_length(srclen, delimiter), 0); - hex_encode_with_delimiter(&s[0], source, srclen, delimiter); + std::string s(hex_encode_output_length(source.length(), delimiter), 0); + hex_encode_with_delimiter(&s[0], source, delimiter); return s; } -size_t hex_decode(char* cbuffer, - size_t buflen, - const char* source, - size_t srclen) { - return hex_decode_with_delimiter(cbuffer, buflen, source, srclen, 0); -} - -size_t hex_decode_with_delimiter(char* cbuffer, - size_t buflen, - const char* source, - size_t srclen, +size_t hex_decode_with_delimiter(ArrayView cbuffer, + absl::string_view source, char delimiter) { - RTC_DCHECK(cbuffer); // TODO(kwiberg): estimate output size - if (buflen == 0) + if (cbuffer.empty()) return 0; // Init and bounds check. - unsigned char* bbuffer = reinterpret_cast(cbuffer); + unsigned char* bbuffer = reinterpret_cast(cbuffer.data()); size_t srcpos = 0, bufpos = 0; + size_t srclen = source.length(); + size_t needed = (delimiter) ? (srclen + 1) / 3 : srclen / 2; - if (buflen < needed) + if (cbuffer.size() < needed) return 0; while (srcpos < srclen) { @@ -141,15 +130,8 @@ size_t hex_decode_with_delimiter(char* cbuffer, return bufpos; } -size_t hex_decode(char* buffer, size_t buflen, const std::string& source) { - return hex_decode_with_delimiter(buffer, buflen, source, 0); -} -size_t hex_decode_with_delimiter(char* buffer, - size_t buflen, - const std::string& source, - char delimiter) { - return hex_decode_with_delimiter(buffer, buflen, source.c_str(), - source.length(), delimiter); +size_t hex_decode(ArrayView buffer, absl::string_view source) { + return hex_decode_with_delimiter(buffer, source, 0); } size_t tokenize(absl::string_view source, @@ -177,7 +159,7 @@ bool tokenize_first(absl::string_view source, std::string* rest) { // Find the first delimiter size_t left_pos = source.find(delimiter); - if (left_pos == std::string::npos) { + if (left_pos == absl::string_view::npos) { return false; } @@ -192,53 +174,29 @@ bool tokenize_first(absl::string_view source, return true; } -std::string join(const std::vector& source, char delimiter) { - if (source.size() == 0) { - return std::string(); - } - // Find length of the string to be returned to pre-allocate memory. - size_t source_string_length = 0; - for (size_t i = 0; i < source.size(); ++i) { - source_string_length += source[i].length(); - } - - // Build the joined string. - std::string joined_string; - joined_string.reserve(source_string_length + source.size() - 1); - for (size_t i = 0; i < source.size(); ++i) { - if (i != 0) { - joined_string += delimiter; - } - joined_string += source[i]; - } - return joined_string; -} - -size_t split(absl::string_view source, - char delimiter, - std::vector* fields) { - RTC_DCHECK(fields); - fields->clear(); +std::vector split(absl::string_view source, char delimiter) { + std::vector fields; size_t last = 0; for (size_t i = 0; i < source.length(); ++i) { if (source[i] == delimiter) { - fields->emplace_back(source.substr(last, i - last)); + fields.push_back(source.substr(last, i - last)); last = i + 1; } } - fields->emplace_back(source.substr(last)); - return fields->size(); + fields.push_back(source.substr(last)); + return fields; } std::string ToString(const bool b) { return b ? "true" : "false"; } -std::string ToString(const char* const s) { +std::string ToString(absl::string_view s) { return std::string(s); } -std::string ToString(const std::string s) { - return s; + +std::string ToString(const char* s) { + return std::string(s); } std::string ToString(const short s) { @@ -311,7 +269,7 @@ std::string ToString(const void* const p) { return std::string(&buf[0], len); } -bool FromString(const std::string& s, bool* b) { +bool FromString(absl::string_view s, bool* b) { if (s == "false") { *b = false; return true; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/string_encode.h b/TMessagesProj/jni/voip/webrtc/rtc_base/string_encode.h index 356fdfaaf7..82a9dfdb62 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/string_encode.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/string_encode.h @@ -19,6 +19,7 @@ #include "absl/strings/string_view.h" #include "absl/types/optional.h" +#include "api/array_view.h" #include "rtc_base/checks.h" #include "rtc_base/string_to_number.h" @@ -28,44 +29,24 @@ namespace rtc { // String Encoding Utilities ////////////////////////////////////////////////////////////////////// -std::string hex_encode(const std::string& str); -std::string hex_encode(const char* source, size_t srclen); -std::string hex_encode_with_delimiter(const char* source, - size_t srclen, - char delimiter); +std::string hex_encode(absl::string_view str); +std::string hex_encode_with_delimiter(absl::string_view source, char delimiter); // hex_decode converts ascii hex to binary. -size_t hex_decode(char* buffer, - size_t buflen, - const char* source, - size_t srclen); +size_t hex_decode(ArrayView buffer, absl::string_view source); // hex_decode, assuming that there is a delimiter between every byte // pair. // `delimiter` == 0 means no delimiter // If the buffer is too short or the data is invalid, we return 0. -size_t hex_decode_with_delimiter(char* buffer, - size_t buflen, - const char* source, - size_t srclen, +size_t hex_decode_with_delimiter(ArrayView buffer, + absl::string_view source, char delimiter); -// Helper functions for hex_decode. -size_t hex_decode(char* buffer, size_t buflen, const std::string& source); -size_t hex_decode_with_delimiter(char* buffer, - size_t buflen, - const std::string& source, - char delimiter); - -// Joins the source vector of strings into a single string, with each -// field in source being separated by delimiter. No trailing delimiter is added. -std::string join(const std::vector& source, char delimiter); - // Splits the source string into multiple fields separated by delimiter, -// with duplicates of delimiter creating empty fields. -size_t split(absl::string_view source, - char delimiter, - std::vector* fields); +// with duplicates of delimiter creating empty fields. Empty input produces a +// single, empty, field. +std::vector split(absl::string_view source, char delimiter); // Splits the source string into multiple fields separated by delimiter, // with duplicates of delimiter ignored. Trailing delimiter ignored. @@ -77,7 +58,7 @@ size_t tokenize(absl::string_view source, // duplicates of delimiter ignored. Return false if the delimiter could not be // found, otherwise return true. bool tokenize_first(absl::string_view source, - const char delimiter, + char delimiter, std::string* token, std::string* rest); @@ -85,8 +66,10 @@ bool tokenize_first(absl::string_view source, // TODO(jonasolsson): Remove these when absl::StrCat becomes available. std::string ToString(bool b); +std::string ToString(absl::string_view s); +// The const char* overload is needed for correct overload resolution because of +// the const void* version of ToString() below. std::string ToString(const char* s); -std::string ToString(std::string t); std::string ToString(short s); std::string ToString(unsigned short s); @@ -106,7 +89,7 @@ template ::value && !std::is_same::value, int>::type = 0> -static bool FromString(const std::string& s, T* t) { +static bool FromString(absl::string_view s, T* t) { RTC_DCHECK(t); absl::optional result = StringToNumber(s); @@ -116,10 +99,10 @@ static bool FromString(const std::string& s, T* t) { return result.has_value(); } -bool FromString(const std::string& s, bool* b); +bool FromString(absl::string_view s, bool* b); template -static inline T FromString(const std::string& str) { +static inline T FromString(absl::string_view str) { T val; FromString(str, &val); return val; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/string_to_number.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/string_to_number.cc index 351610f31a..1209eced44 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/string_to_number.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/string_to_number.cc @@ -20,30 +20,41 @@ namespace rtc { namespace string_to_number_internal { -absl::optional ParseSigned(const char* str, int base) { - RTC_DCHECK(str); - if (isdigit(str[0]) || str[0] == '-') { +absl::optional ParseSigned(absl::string_view str, int base) { + if (str.empty()) + return absl::nullopt; + + if (isdigit(static_cast(str[0])) || str[0] == '-') { + std::string str_str(str); char* end = nullptr; errno = 0; - const signed_type value = std::strtoll(str, &end, base); - if (end && *end == '\0' && errno == 0) { + const signed_type value = std::strtoll(str_str.c_str(), &end, base); + // Check for errors and also make sure that there were no embedded nuls in + // the input string. + if (end == str_str.c_str() + str_str.size() && errno == 0) { return value; } } return absl::nullopt; } -absl::optional ParseUnsigned(const char* str, int base) { - RTC_DCHECK(str); - if (isdigit(str[0]) || str[0] == '-') { +absl::optional ParseUnsigned(absl::string_view str, int base) { + if (str.empty()) + return absl::nullopt; + + if (isdigit(static_cast(str[0])) || str[0] == '-') { + std::string str_str(str); // Explicitly discard negative values. std::strtoull parsing causes unsigned // wraparound. We cannot just reject values that start with -, though, since // -0 is perfectly fine, as is -0000000000000000000000000000000. const bool is_negative = str[0] == '-'; char* end = nullptr; errno = 0; - const unsigned_type value = std::strtoull(str, &end, base); - if (end && *end == '\0' && errno == 0 && (value == 0 || !is_negative)) { + const unsigned_type value = std::strtoull(str_str.c_str(), &end, base); + // Check for errors and also make sure that there were no embedded nuls in + // the input string. + if (end == str_str.c_str() + str_str.size() && errno == 0 && + (value == 0 || !is_negative)) { return value; } } @@ -69,22 +80,25 @@ inline long double StrToT(const char* str, char** str_end) { } template -absl::optional ParseFloatingPoint(const char* str) { - RTC_DCHECK(str); - if (*str == '\0') +absl::optional ParseFloatingPoint(absl::string_view str) { + if (str.empty()) + return absl::nullopt; + + if (str[0] == '\0') return absl::nullopt; + std::string str_str(str); char* end = nullptr; errno = 0; - const T value = StrToT(str, &end); - if (end && *end == '\0' && errno == 0) { + const T value = StrToT(str_str.c_str(), &end); + if (end == str_str.c_str() + str_str.size() && errno == 0) { return value; } return absl::nullopt; } -template absl::optional ParseFloatingPoint(const char* str); -template absl::optional ParseFloatingPoint(const char* str); -template absl::optional ParseFloatingPoint(const char* str); +template absl::optional ParseFloatingPoint(absl::string_view str); +template absl::optional ParseFloatingPoint(absl::string_view str); +template absl::optional ParseFloatingPoint(absl::string_view str); } // namespace string_to_number_internal } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/string_to_number.h b/TMessagesProj/jni/voip/webrtc/rtc_base/string_to_number.h index 4cb521595d..1d704ee464 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/string_to_number.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/string_to_number.h @@ -15,6 +15,7 @@ #include #include +#include "absl/strings/string_view.h" #include "absl/types/optional.h" namespace rtc { @@ -25,10 +26,9 @@ namespace rtc { // functions (std::stoi, etc.) indicate errors by throwing exceptions, which // are disabled in WebRTC. // -// Integers are parsed using one of the following functions: -// absl::optional StringToNumber(const char* str, int base = 10); -// absl::optional StringToNumber(const std::string& str, -// int base = 10); +// Integers are parsed using: +// absl::optional StringToNumber(absl::string_view str, +// int base = 10); // // These functions parse a value from the beginning of a string into one of the // fundamental integer types, or returns an empty Optional if parsing @@ -38,26 +38,23 @@ namespace rtc { // By setting base to 0, one of octal, decimal or hexadecimal will be // detected from the string's prefix (0, nothing or 0x, respectively). // If non-zero, base can be set to a value between 2 and 36 inclusively. -// -// If desired, this interface could be extended with support for floating-point -// types. namespace string_to_number_internal { // These must be (unsigned) long long, to match the signature of strto(u)ll. using unsigned_type = unsigned long long; // NOLINT(runtime/int) using signed_type = long long; // NOLINT(runtime/int) -absl::optional ParseSigned(const char* str, int base); -absl::optional ParseUnsigned(const char* str, int base); +absl::optional ParseSigned(absl::string_view str, int base); +absl::optional ParseUnsigned(absl::string_view str, int base); template -absl::optional ParseFloatingPoint(const char* str); +absl::optional ParseFloatingPoint(absl::string_view str); } // namespace string_to_number_internal template typename std::enable_if::value && std::is_signed::value, absl::optional>::type -StringToNumber(const char* str, int base = 10) { +StringToNumber(absl::string_view str, int base = 10) { using string_to_number_internal::signed_type; static_assert( std::numeric_limits::max() <= @@ -78,7 +75,7 @@ template typename std::enable_if::value && std::is_unsigned::value, absl::optional>::type -StringToNumber(const char* str, int base = 10) { +StringToNumber(absl::string_view str, int base = 10) { using string_to_number_internal::unsigned_type; static_assert(std::numeric_limits::max() <= std::numeric_limits::max(), @@ -95,7 +92,7 @@ StringToNumber(const char* str, int base = 10) { template typename std::enable_if::value, absl::optional>::type -StringToNumber(const char* str, int base = 10) { +StringToNumber(absl::string_view str, int base = 10) { static_assert( std::numeric_limits::max() <= std::numeric_limits::max(), "StringToNumber only supports floating-point numbers as large " @@ -103,14 +100,6 @@ StringToNumber(const char* str, int base = 10) { return string_to_number_internal::ParseFloatingPoint(str); } -// The std::string overloads only exists if there is a matching const char* -// version. -template -auto StringToNumber(const std::string& str, int base = 10) - -> decltype(StringToNumber(str.c_str(), base)) { - return StringToNumber(str.c_str(), base); -} - } // namespace rtc #endif // RTC_BASE_STRING_TO_NUMBER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/string_utils.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/string_utils.cc index 1720c62d5e..b93e615705 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/string_utils.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/string_utils.cc @@ -10,39 +10,23 @@ #include "rtc_base/string_utils.h" +#include "absl/strings/string_view.h" + namespace rtc { -size_t strcpyn(char* buffer, - size_t buflen, - const char* source, - size_t srclen /* = SIZE_UNKNOWN */) { +size_t strcpyn(char* buffer, size_t buflen, absl::string_view source) { if (buflen <= 0) return 0; - if (srclen == SIZE_UNKNOWN) { - srclen = strlen(source); - } + size_t srclen = source.length(); if (srclen >= buflen) { srclen = buflen - 1; } - memcpy(buffer, source, srclen); + memcpy(buffer, source.data(), srclen); buffer[srclen] = 0; return srclen; } -static const char kWhitespace[] = " \n\r\t"; - -std::string string_trim(const std::string& s) { - std::string::size_type first = s.find_first_not_of(kWhitespace); - std::string::size_type last = s.find_last_not_of(kWhitespace); - - if (first == std::string::npos || last == std::string::npos) { - return std::string(""); - } - - return s.substr(first, last - first + 1); -} - std::string ToHex(const int i) { char buffer[50]; snprintf(buffer, sizeof(buffer), "%x", i); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/string_utils.h b/TMessagesProj/jni/voip/webrtc/rtc_base/string_utils.h index d844e5e125..9534d59e04 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/string_utils.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/string_utils.h @@ -11,11 +11,11 @@ #ifndef RTC_BASE_STRING_UTILS_H_ #define RTC_BASE_STRING_UTILS_H_ -#include -#include #include #include +#include "absl/strings/string_view.h" + #if defined(WEBRTC_WIN) #include #include @@ -30,15 +30,26 @@ #include +#include "absl/strings/string_view.h" + namespace rtc { const size_t SIZE_UNKNOWN = static_cast(-1); +// An absl::string_view comparator functor for use with container types such as +// std::map that support heterogenous lookup. +// +// Example usage: +// std::map my_map; +struct AbslStringViewCmp { + using is_transparent = void; + bool operator()(absl::string_view a, absl::string_view b) const { + return a < b; + } +}; + // Safe version of strncpy that always nul-terminate. -size_t strcpyn(char* buffer, - size_t buflen, - const char* source, - size_t srclen = SIZE_UNKNOWN); +size_t strcpyn(char* buffer, size_t buflen, absl::string_view source); /////////////////////////////////////////////////////////////////////////////// // UTF helpers (Windows only) @@ -57,7 +68,7 @@ inline std::wstring ToUtf16(const char* utf8, size_t len) { return ws; } -inline std::wstring ToUtf16(const std::string& str) { +inline std::wstring ToUtf16(absl::string_view str) { return ToUtf16(str.data(), str.length()); } @@ -82,11 +93,8 @@ inline std::string ToUtf8(const std::wstring& wstr) { #endif // WEBRTC_WIN -// Remove leading and trailing whitespaces. -std::string string_trim(const std::string& s); - // TODO(jonasolsson): replace with absl::Hex when that becomes available. -std::string ToHex(const int i); +std::string ToHex(int i); // CompileTimeString comprises of a string-like object which can be used as a // regular const char* in compile time and supports concatenation. Useful for diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/strings/json.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/strings/json.cc index 99664404cf..5cf153c926 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/strings/json.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/strings/json.cc @@ -14,6 +14,7 @@ #include #include +#include "absl/strings/string_view.h" #include "rtc_base/string_encode.h" namespace rtc { @@ -240,46 +241,47 @@ bool GetDoubleFromJsonArray(const Json::Value& in, size_t n, double* out) { } bool GetValueFromJsonObject(const Json::Value& in, - const std::string& k, + absl::string_view k, Json::Value* out) { - if (!in.isObject() || !in.isMember(k)) { + std::string k_str(k); + if (!in.isObject() || !in.isMember(k_str)) { return false; } - *out = in[k]; + *out = in[k_str]; return true; } bool GetIntFromJsonObject(const Json::Value& in, - const std::string& k, + absl::string_view k, int* out) { Json::Value x; return GetValueFromJsonObject(in, k, &x) && GetIntFromJson(x, out); } bool GetUIntFromJsonObject(const Json::Value& in, - const std::string& k, + absl::string_view k, unsigned int* out) { Json::Value x; return GetValueFromJsonObject(in, k, &x) && GetUIntFromJson(x, out); } bool GetStringFromJsonObject(const Json::Value& in, - const std::string& k, + absl::string_view k, std::string* out) { Json::Value x; return GetValueFromJsonObject(in, k, &x) && GetStringFromJson(x, out); } bool GetBoolFromJsonObject(const Json::Value& in, - const std::string& k, + absl::string_view k, bool* out) { Json::Value x; return GetValueFromJsonObject(in, k, &x) && GetBoolFromJson(x, out); } bool GetDoubleFromJsonObject(const Json::Value& in, - const std::string& k, + absl::string_view k, double* out) { Json::Value x; return GetValueFromJsonObject(in, k, &x) && GetDoubleFromJson(x, out); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/strings/json.h b/TMessagesProj/jni/voip/webrtc/rtc_base/strings/json.h index 0cb9542c7f..618cb71b04 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/strings/json.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/strings/json.h @@ -14,6 +14,8 @@ #include #include +#include "absl/strings/string_view.h" + #if !defined(WEBRTC_EXTERNAL_JSON) #include "json/json.h" #else @@ -62,22 +64,20 @@ Json::Value DoubleVectorToJsonArray(const std::vector& in); // Pull values out of a JSON object. bool GetValueFromJsonObject(const Json::Value& in, - const std::string& k, + absl::string_view k, Json::Value* out); -bool GetIntFromJsonObject(const Json::Value& in, - const std::string& k, - int* out); +bool GetIntFromJsonObject(const Json::Value& in, absl::string_view k, int* out); bool GetUIntFromJsonObject(const Json::Value& in, - const std::string& k, + absl::string_view k, unsigned int* out); bool GetStringFromJsonObject(const Json::Value& in, - const std::string& k, + absl::string_view k, std::string* out); bool GetBoolFromJsonObject(const Json::Value& in, - const std::string& k, + absl::string_view k, bool* out); bool GetDoubleFromJsonObject(const Json::Value& in, - const std::string& k, + absl::string_view k, double* out); // Writes out a Json value as a string. diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/strings/string_builder.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/strings/string_builder.cc index 7536cd77dd..a419b0b3cc 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/strings/string_builder.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/strings/string_builder.cc @@ -15,6 +15,7 @@ #include #include +#include "absl/strings/string_view.h" #include "rtc_base/checks.h" #include "rtc_base/numerics/safe_minmax.h" @@ -26,16 +27,20 @@ SimpleStringBuilder::SimpleStringBuilder(rtc::ArrayView buffer) RTC_DCHECK(IsConsistent()); } -SimpleStringBuilder& SimpleStringBuilder::operator<<(const char* str) { - return Append(str, strlen(str)); -} - SimpleStringBuilder& SimpleStringBuilder::operator<<(char ch) { - return Append(&ch, 1); + return operator<<(absl::string_view(&ch, 1)); } -SimpleStringBuilder& SimpleStringBuilder::operator<<(const std::string& str) { - return Append(str.c_str(), str.length()); +SimpleStringBuilder& SimpleStringBuilder::operator<<(absl::string_view str) { + RTC_DCHECK_LT(size_ + str.length(), buffer_.size()) + << "Buffer size was insufficient"; + const size_t chars_added = + rtc::SafeMin(str.length(), buffer_.size() - size_ - 1); + memcpy(&buffer_[size_], str.data(), chars_added); + size_ += chars_added; + buffer_[size_] = '\0'; + RTC_DCHECK(IsConsistent()); + return *this; } // Numeric conversion routines. @@ -106,18 +111,6 @@ SimpleStringBuilder& SimpleStringBuilder::AppendFormat(const char* fmt, ...) { return *this; } -SimpleStringBuilder& SimpleStringBuilder::Append(const char* str, - size_t length) { - RTC_DCHECK_LT(size_ + length, buffer_.size()) - << "Buffer size was insufficient"; - const size_t chars_added = rtc::SafeMin(length, buffer_.size() - size_ - 1); - memcpy(&buffer_[size_], str, chars_added); - size_ += chars_added; - buffer_[size_] = '\0'; - RTC_DCHECK(IsConsistent()); - return *this; -} - StringBuilder& StringBuilder::AppendFormat(const char* fmt, ...) { va_list args, copy; va_start(args, fmt); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/strings/string_builder.h b/TMessagesProj/jni/voip/webrtc/rtc_base/strings/string_builder.h index 6fe478ce4c..00986371d3 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/strings/string_builder.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/strings/string_builder.h @@ -32,9 +32,8 @@ class SimpleStringBuilder { SimpleStringBuilder(const SimpleStringBuilder&) = delete; SimpleStringBuilder& operator=(const SimpleStringBuilder&) = delete; - SimpleStringBuilder& operator<<(const char* str); SimpleStringBuilder& operator<<(char ch); - SimpleStringBuilder& operator<<(const std::string& str); + SimpleStringBuilder& operator<<(absl::string_view str); SimpleStringBuilder& operator<<(int i); SimpleStringBuilder& operator<<(unsigned i); SimpleStringBuilder& operator<<(long i); // NOLINT @@ -61,10 +60,6 @@ class SimpleStringBuilder { SimpleStringBuilder& AppendFormat(const char* fmt, ...); - // An alternate way from operator<<() to append a string. This variant is - // slightly more efficient when the length of the string to append, is known. - SimpleStringBuilder& Append(const char* str, size_t length); - private: bool IsConsistent() const { return size_ <= buffer_.size() - 1 && buffer_[size_] == '\0'; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/mutex.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/mutex.cc deleted file mode 100644 index 6c2d6ff7f0..0000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/mutex.cc +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright 2020 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "rtc_base/synchronization/mutex.h" - -#include "rtc_base/checks.h" -#include "rtc_base/synchronization/yield.h" - -namespace webrtc { - -#if !defined(WEBRTC_ABSL_MUTEX) -void GlobalMutex::Lock() { - while (mutex_locked_.exchange(1)) { - YieldCurrentThread(); - } -} - -void GlobalMutex::Unlock() { - int old = mutex_locked_.exchange(0); - RTC_DCHECK_EQ(old, 1) << "Unlock called without calling Lock first"; -} - -GlobalMutexLock::GlobalMutexLock(GlobalMutex* mutex) : mutex_(mutex) { - mutex_->Lock(); -} - -GlobalMutexLock::~GlobalMutexLock() { - mutex_->Unlock(); -} -#endif // #if !defined(WEBRTC_ABSL_MUTEX) - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/mutex.h b/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/mutex.h index c6af9e9838..2cf0e67c3d 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/mutex.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/mutex.h @@ -72,41 +72,6 @@ class RTC_SCOPED_LOCKABLE MutexLock final { Mutex* mutex_; }; -// A mutex used to protect global variables. Do NOT use for other purposes. -#if defined(WEBRTC_ABSL_MUTEX) -using GlobalMutex = absl::Mutex; -using GlobalMutexLock = absl::MutexLock; -#else -class RTC_LOCKABLE GlobalMutex final { - public: - GlobalMutex(const GlobalMutex&) = delete; - GlobalMutex& operator=(const GlobalMutex&) = delete; - - constexpr explicit GlobalMutex(absl::ConstInitType /*unused*/) - : mutex_locked_(0) {} - - void Lock() RTC_EXCLUSIVE_LOCK_FUNCTION(); - void Unlock() RTC_UNLOCK_FUNCTION(); - - private: - std::atomic mutex_locked_; // 0 means lock not taken, 1 means taken. -}; - -// GlobalMutexLock, for serializing execution through a scope. -class RTC_SCOPED_LOCKABLE GlobalMutexLock final { - public: - GlobalMutexLock(const GlobalMutexLock&) = delete; - GlobalMutexLock& operator=(const GlobalMutexLock&) = delete; - - explicit GlobalMutexLock(GlobalMutex* mutex) - RTC_EXCLUSIVE_LOCK_FUNCTION(mutex_); - ~GlobalMutexLock() RTC_UNLOCK_FUNCTION(); - - private: - GlobalMutex* mutex_; -}; -#endif // if defined(WEBRTC_ABSL_MUTEX) - } // namespace webrtc #endif // RTC_BASE_SYNCHRONIZATION_MUTEX_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/sequence_checker_internal.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/sequence_checker_internal.cc index c03ee94d1b..2612e9ee84 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/sequence_checker_internal.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/sequence_checker_internal.cc @@ -104,13 +104,5 @@ std::string SequenceCheckerImpl::ExpectationToString() const { } #endif // RTC_DCHECK_IS_ON -std::string ExpectationToString(const SequenceCheckerImpl* checker) { -#if RTC_DCHECK_IS_ON - return checker->ExpectationToString(); -#else - return std::string(); -#endif -} - } // namespace webrtc_sequence_checker_internal } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/sequence_checker_internal.h b/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/sequence_checker_internal.h index f7ac6de125..a20fbb0a54 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/sequence_checker_internal.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/sequence_checker_internal.h @@ -63,27 +63,22 @@ class SequenceCheckerDoNothing { void Detach() {} }; -// Helper class used by RTC_DCHECK_RUN_ON (see example usage below). -class RTC_SCOPED_LOCKABLE SequenceCheckerScope { - public: - template - explicit SequenceCheckerScope(const ThreadLikeObject* thread_like_object) - RTC_EXCLUSIVE_LOCK_FUNCTION(thread_like_object) {} - SequenceCheckerScope(const SequenceCheckerScope&) = delete; - SequenceCheckerScope& operator=(const SequenceCheckerScope&) = delete; - ~SequenceCheckerScope() RTC_UNLOCK_FUNCTION() {} - - template - static bool IsCurrent(const ThreadLikeObject* thread_like_object) { - return thread_like_object->IsCurrent(); - } -}; - -std::string ExpectationToString(const SequenceCheckerImpl* checker); +template +std::enable_if_t, + std::string> +ExpectationToString(const ThreadLikeObject* checker) { +#if RTC_DCHECK_IS_ON + return checker->ExpectationToString(); +#else + return std::string(); +#endif +} // Catch-all implementation for types other than explicitly supported above. template -std::string ExpectationToString(const ThreadLikeObject*) { +std::enable_if_t, + std::string> +ExpectationToString(const ThreadLikeObject*) { return std::string(); } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/system/arch.h b/TMessagesProj/jni/voip/webrtc/rtc_base/system/arch.h index be2367b85f..9d945ef7fc 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/system/arch.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/system/arch.h @@ -73,6 +73,16 @@ #elif defined(__riscv) && __riscv_xlen == 32 #define WEBRTC_ARCH_32_BITS #define WEBRTC_ARCH_LITTLE_ENDIAN +#elif defined(__loongarch32) +#define WEBRTC_ARCH_LOONG_FAMILY +#define WEBRTC_ARCH_LOONG32 +#define WEBRTC_ARCH_32_BITS +#define WEBRTC_ARCH_LITTLE_ENDIAN +#elif defined(__loongarch64) +#define WEBRTC_ARCH_LOONG_FAMILY +#define WEBRTC_ARCH_LOONG64 +#define WEBRTC_ARCH_64_BITS +#define WEBRTC_ARCH_LITTLE_ENDIAN #elif defined(__pnacl__) #define WEBRTC_ARCH_32_BITS #define WEBRTC_ARCH_LITTLE_ENDIAN diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/system/file_wrapper.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/system/file_wrapper.cc index 3e49315793..f7befc6dc5 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/system/file_wrapper.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/system/file_wrapper.cc @@ -9,10 +9,12 @@ */ #include "rtc_base/system/file_wrapper.h" -#include "rtc_base/numerics/safe_conversions.h" #include +#include "absl/strings/string_view.h" +#include "rtc_base/numerics/safe_conversions.h" + #ifdef _WIN32 #include #else @@ -23,14 +25,17 @@ namespace webrtc { namespace { -FILE* FileOpen(const char* file_name_utf8, bool read_only, int* error) { +FILE* FileOpen(absl::string_view file_name_utf8, bool read_only, int* error) { + RTC_CHECK_EQ(file_name_utf8.find_first_of('\0'), absl::string_view::npos) + << "Invalid filename, containing NUL character"; + std::string file_name(file_name_utf8); #if defined(_WIN32) - int len = MultiByteToWideChar(CP_UTF8, 0, file_name_utf8, -1, nullptr, 0); + int len = MultiByteToWideChar(CP_UTF8, 0, file_name.c_str(), -1, nullptr, 0); std::wstring wstr(len, 0); - MultiByteToWideChar(CP_UTF8, 0, file_name_utf8, -1, &wstr[0], len); + MultiByteToWideChar(CP_UTF8, 0, file_name.c_str(), -1, &wstr[0], len); FILE* file = _wfopen(wstr.c_str(), read_only ? L"rb" : L"wb"); #else - FILE* file = fopen(file_name_utf8, read_only ? "rb" : "wb"); + FILE* file = fopen(file_name.c_str(), read_only ? "rb" : "wb"); #endif if (!file && error) { *error = errno; @@ -38,36 +43,19 @@ FILE* FileOpen(const char* file_name_utf8, bool read_only, int* error) { return file; } -const char* GetCstrCheckNoEmbeddedNul(const std::string& s) { - const char* p = s.c_str(); - RTC_CHECK_EQ(strlen(p), s.size()) - << "Invalid filename, containing NUL character"; - return p; -} } // namespace // static -FileWrapper FileWrapper::OpenReadOnly(const char* file_name_utf8) { +FileWrapper FileWrapper::OpenReadOnly(absl::string_view file_name_utf8) { return FileWrapper(FileOpen(file_name_utf8, true, nullptr)); } // static -FileWrapper FileWrapper::OpenReadOnly(const std::string& file_name_utf8) { - return OpenReadOnly(GetCstrCheckNoEmbeddedNul(file_name_utf8)); -} - -// static -FileWrapper FileWrapper::OpenWriteOnly(const char* file_name_utf8, +FileWrapper FileWrapper::OpenWriteOnly(absl::string_view file_name_utf8, int* error /*=nullptr*/) { return FileWrapper(FileOpen(file_name_utf8, false, error)); } -// static -FileWrapper FileWrapper::OpenWriteOnly(const std::string& file_name_utf8, - int* error /*=nullptr*/) { - return OpenWriteOnly(GetCstrCheckNoEmbeddedNul(file_name_utf8), error); -} - FileWrapper::FileWrapper(FileWrapper&& other) { operator=(std::move(other)); } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/system/file_wrapper.h b/TMessagesProj/jni/voip/webrtc/rtc_base/system/file_wrapper.h index b55b0b9864..5e1e3d6a16 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/system/file_wrapper.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/system/file_wrapper.h @@ -16,6 +16,8 @@ #include +#include "absl/strings/string_view.h" + // Implementation that can read (exclusive) or write from/to a file. namespace webrtc { @@ -34,11 +36,8 @@ class FileWrapper final { // returned object to check if the open operation was successful. On failure, // and if `error` is non-null, the system errno value is stored at |*error|. // The file is closed by the destructor. - static FileWrapper OpenReadOnly(const char* file_name_utf8); - static FileWrapper OpenReadOnly(const std::string& file_name_utf8); - static FileWrapper OpenWriteOnly(const char* file_name_utf8, - int* error = nullptr); - static FileWrapper OpenWriteOnly(const std::string& file_name_utf8, + static FileWrapper OpenReadOnly(absl::string_view file_name_utf8); + static FileWrapper OpenWriteOnly(absl::string_view file_name_utf8, int* error = nullptr); FileWrapper() = default; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/system/no_cfi_icall.h b/TMessagesProj/jni/voip/webrtc/rtc_base/system/no_cfi_icall.h new file mode 100644 index 0000000000..42d6c9c2ca --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/system/no_cfi_icall.h @@ -0,0 +1,33 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_SYSTEM_NO_CFI_ICALL_H_ +#define RTC_BASE_SYSTEM_NO_CFI_ICALL_H_ + +#include "rtc_base/sanitizer.h" + +// DISABLE_CFI_ICALL -- Disable Control Flow Integrity indirect call checks. +// Note that the same macro is defined in "base/compiler_specific.h". +// Only use this when building standalone WebRTC. +#if !defined(WEBRTC_CHROMIUM_BUILD) +#if !defined(DISABLE_CFI_ICALL) +#if defined(WEBRTC_WIN) +// Windows also needs __declspec(guard(nocf)). +#define DISABLE_CFI_ICALL RTC_NO_SANITIZE("cfi-icall") __declspec(guard(nocf)) +#else +#define DISABLE_CFI_ICALL RTC_NO_SANITIZE("cfi-icall") +#endif // defined(WEBRTC_WIN) +#endif // !defined(DISABLE_CFI_ICALL) +#if !defined(DISABLE_CFI_ICALL) +#define DISABLE_CFI_ICALL +#endif +#endif // !defined(WEBRTC_CHROMIUM_BUILD) + +#endif // RTC_BASE_SYSTEM_NO_CFI_ICALL_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/system/no_unique_address.h b/TMessagesProj/jni/voip/webrtc/rtc_base/system/no_unique_address.h index 77e7a99526..6bede2c6b6 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/system/no_unique_address.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/system/no_unique_address.h @@ -24,8 +24,10 @@ // should add support for it starting from C++20. Among clang compilers, // clang-cl doesn't support it yet and support is unclear also when the target // platform is iOS. -#if ((defined(__clang__) && !defined(_MSC_VER) && !defined(WEBRTC_IOS)) || \ - __cplusplus > 201703L) +#ifndef __has_cpp_attribute +#define __has_cpp_attribute(__x) 0 +#endif +#if __has_cpp_attribute(no_unique_address) // NOLINTNEXTLINE(whitespace/braces) #define RTC_NO_UNIQUE_ADDRESS [[no_unique_address]] #else diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/system/thread_registry.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/system/thread_registry.cc deleted file mode 100644 index b0e83ca1e9..0000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/system/thread_registry.cc +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright 2019 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "rtc_base/system/thread_registry.h" - -#include -#include - -#include "absl/base/attributes.h" -#include "rtc_base/logging.h" -#include "rtc_base/platform_thread_types.h" -#include "rtc_base/synchronization/mutex.h" -#include "sdk/android/native_api/stacktrace/stacktrace.h" - -namespace webrtc { - -namespace { - -struct ThreadData { - const rtc::PlatformThreadId thread_id; - const rtc::Location location; -}; - -// The map of registered threads, and the lock that protects it. We create the -// map on first use, and never destroy it. -ABSL_CONST_INIT GlobalMutex g_thread_registry_lock(absl::kConstInit); -ABSL_CONST_INIT std::map* - g_registered_threads = nullptr; - -} // namespace - -ScopedRegisterThreadForDebugging::ScopedRegisterThreadForDebugging( - rtc::Location location) { - GlobalMutexLock gls(&g_thread_registry_lock); - if (g_registered_threads == nullptr) { - g_registered_threads = - new std::map(); - } - const auto result = g_registered_threads->insert( - std::make_pair(this, ThreadData{rtc::CurrentThreadId(), location})); - RTC_DCHECK(result.second); // Insertion succeeded without collisions. -} - -ScopedRegisterThreadForDebugging::~ScopedRegisterThreadForDebugging() { - GlobalMutexLock gls(&g_thread_registry_lock); - RTC_DCHECK(g_registered_threads != nullptr); - const int num_erased = g_registered_threads->erase(this); - RTC_DCHECK_EQ(num_erased, 1); -} - -void PrintStackTracesOfRegisteredThreads() { - GlobalMutexLock gls(&g_thread_registry_lock); - if (g_registered_threads == nullptr) { - return; - } - for (const auto& e : *g_registered_threads) { - const ThreadData& td = e.second; - RTC_LOG(LS_WARNING) << "Thread " << td.thread_id << " registered at " - << td.location.ToString() << ":"; - RTC_LOG(LS_WARNING) << StackTraceToString(GetStackTrace(td.thread_id)); - } -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/system/thread_registry.h b/TMessagesProj/jni/voip/webrtc/rtc_base/system/thread_registry.h deleted file mode 100644 index 0e3187b884..0000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/system/thread_registry.h +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright 2019 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_BASE_SYSTEM_THREAD_REGISTRY_H_ -#define RTC_BASE_SYSTEM_THREAD_REGISTRY_H_ - -#include "rtc_base/location.h" - -namespace webrtc { - -class ScopedRegisterThreadForDebugging { - public: -#if defined(WEBRTC_ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD) - explicit ScopedRegisterThreadForDebugging(rtc::Location location); - ~ScopedRegisterThreadForDebugging(); -#else - explicit ScopedRegisterThreadForDebugging(rtc::Location) {} -#endif - - // Not movable or copyable, because we can't duplicate the resource it owns, - // and it needs a constant address. - ScopedRegisterThreadForDebugging(const ScopedRegisterThreadForDebugging&) = - delete; - ScopedRegisterThreadForDebugging(ScopedRegisterThreadForDebugging&&) = delete; - ScopedRegisterThreadForDebugging& operator=( - const ScopedRegisterThreadForDebugging&) = delete; - ScopedRegisterThreadForDebugging& operator=( - ScopedRegisterThreadForDebugging&&) = delete; -}; - -#if defined(WEBRTC_ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD) -void PrintStackTracesOfRegisteredThreads(); -#else -inline void PrintStackTracesOfRegisteredThreads() {} -#endif - -} // namespace webrtc - -#endif // RTC_BASE_SYSTEM_THREAD_REGISTRY_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/system_time.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/system_time.cc index d53d923148..058e6c2990 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/system_time.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/system_time.cc @@ -69,6 +69,10 @@ int64_t SystemTimeNanos() { #elif defined(WINUWP) ticks = WinUwpSystemTimeNanos(); #elif defined(WEBRTC_WIN) + // TODO(webrtc:14601): Fix the volatile increment instead of suppressing the + // warning. +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-volatile" static volatile LONG last_timegettime = 0; static volatile int64_t num_wrap_timegettime = 0; volatile LONG* last_timegettime_ptr = &last_timegettime; @@ -87,6 +91,7 @@ int64_t SystemTimeNanos() { // TODO(deadbeef): Calculate with nanosecond precision. Otherwise, we're // just wasting a multiply and divide when doing Time() on Windows. ticks = ticks * kNumNanosecsPerMillisec; +#pragma clang diagnostic pop #else #error Unsupported platform. #endif diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/system_time.h b/TMessagesProj/jni/voip/webrtc/rtc_base/system_time.h index d86e94adf4..c0ebc2a217 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/system_time.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/system_time.h @@ -11,6 +11,8 @@ #ifndef RTC_BASE_SYSTEM_TIME_H_ #define RTC_BASE_SYSTEM_TIME_H_ +#include + namespace rtc { // Returns the actual system time, even if a clock is set for testing. diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue.cc index 965a4d8c69..7c972ed152 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue.cc @@ -29,13 +29,4 @@ bool TaskQueue::IsCurrent() const { return impl_->IsCurrent(); } -void TaskQueue::PostTask(std::unique_ptr task) { - return impl_->PostTask(std::move(task)); -} - -void TaskQueue::PostDelayedTask(std::unique_ptr task, - uint32_t milliseconds) { - return impl_->PostDelayedTask(std::move(task), milliseconds); -} - } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue.h b/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue.h index 86d35976cb..cae95340bc 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue.h @@ -16,13 +16,11 @@ #include #include +#include "absl/functional/any_invocable.h" #include "absl/memory/memory.h" -#include "api/task_queue/queued_task.h" #include "api/task_queue/task_queue_base.h" #include "api/task_queue/task_queue_factory.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/system/rtc_export.h" -#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/thread_annotations.h" namespace rtc { @@ -83,50 +81,29 @@ class RTC_LOCKABLE RTC_EXPORT TaskQueue { webrtc::TaskQueueDeleter> task_queue); ~TaskQueue(); + TaskQueue(const TaskQueue&) = delete; + TaskQueue& operator=(const TaskQueue&) = delete; + // Used for DCHECKing the current queue. bool IsCurrent() const; // Returns non-owning pointer to the task queue implementation. webrtc::TaskQueueBase* Get() { return impl_; } - // TODO(tommi): For better debuggability, implement RTC_FROM_HERE. - - // Ownership of the task is passed to PostTask. - void PostTask(std::unique_ptr task); - - // Schedules a task to execute a specified number of milliseconds from when - // the call is made. The precision should be considered as "best effort" - // and in some cases, such as on Windows when all high precision timers have - // been used up, can be off by as much as 15 millseconds (although 8 would be - // more likely). This can be mitigated by limiting the use of delayed tasks. - void PostDelayedTask(std::unique_ptr task, - uint32_t milliseconds); - - // std::enable_if is used here to make sure that calls to PostTask() with - // std::unique_ptr would not end up being - // caught by this template. - template >::value>::type* = nullptr> - void PostTask(Closure&& closure) { - PostTask(webrtc::ToQueuedTask(std::forward(closure))); + void PostTask(absl::AnyInvocable task) { + impl_->PostTask(std::move(task)); } - - // See documentation above for performance expectations. - template >::value>::type* = nullptr> - void PostDelayedTask(Closure&& closure, uint32_t milliseconds) { - PostDelayedTask(webrtc::ToQueuedTask(std::forward(closure)), - milliseconds); + void PostDelayedTask(absl::AnyInvocable task, + webrtc::TimeDelta delay) { + impl_->PostDelayedTask(std::move(task), delay); + } + void PostDelayedHighPrecisionTask(absl::AnyInvocable task, + webrtc::TimeDelta delay) { + impl_->PostDelayedHighPrecisionTask(std::move(task), delay); } private: webrtc::TaskQueueBase* const impl_; - - RTC_DISALLOW_COPY_AND_ASSIGN(TaskQueue); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_for_test.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_for_test.cc new file mode 100644 index 0000000000..cb6b23ceae --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_for_test.cc @@ -0,0 +1,21 @@ +/* + * Copyright 2018 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_base/task_queue_for_test.h" + +#include "api/task_queue/default_task_queue_factory.h" + +namespace webrtc { + +TaskQueueForTest::TaskQueueForTest(absl::string_view name, Priority priority) + : TaskQueue( + CreateDefaultTaskQueueFactory()->CreateTaskQueue(name, priority)) {} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_for_test.h b/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_for_test.h new file mode 100644 index 0000000000..4c7f842abe --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_for_test.h @@ -0,0 +1,68 @@ +/* + * Copyright 2018 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_TASK_QUEUE_FOR_TEST_H_ +#define RTC_BASE_TASK_QUEUE_FOR_TEST_H_ + +#include + +#include "absl/cleanup/cleanup.h" +#include "absl/strings/string_view.h" +#include "api/function_view.h" +#include "api/task_queue/task_queue_base.h" +#include "rtc_base/checks.h" +#include "rtc_base/event.h" +#include "rtc_base/task_queue.h" +#include "rtc_base/thread_annotations.h" + +namespace webrtc { + +inline void SendTask(TaskQueueBase* task_queue, + rtc::FunctionView task) { + if (task_queue->IsCurrent()) { + task(); + return; + } + + rtc::Event event; + absl::Cleanup cleanup = [&event] { event.Set(); }; + task_queue->PostTask([task, cleanup = std::move(cleanup)] { task(); }); + RTC_CHECK(event.Wait(/*give_up_after=*/rtc::Event::kForever, + /*warn_after=*/TimeDelta::Seconds(10))); +} + +class RTC_LOCKABLE TaskQueueForTest : public rtc::TaskQueue { + public: + using rtc::TaskQueue::TaskQueue; + explicit TaskQueueForTest(absl::string_view name = "TestQueue", + Priority priority = Priority::NORMAL); + TaskQueueForTest(const TaskQueueForTest&) = delete; + TaskQueueForTest& operator=(const TaskQueueForTest&) = delete; + ~TaskQueueForTest() = default; + + // A convenience, test-only method that blocks the current thread while + // a task executes on the task queue. + void SendTask(rtc::FunctionView task) { + ::webrtc::SendTask(Get(), task); + } + + // Wait for the completion of all tasks posted prior to the + // WaitForPreviouslyPostedTasks() call. + void WaitForPreviouslyPostedTasks() { + RTC_DCHECK(!Get()->IsCurrent()); + // Post an empty task on the queue and wait for it to finish, to ensure + // that all already posted tasks on the queue get executed. + SendTask([]() {}); + } +}; + +} // namespace webrtc + +#endif // RTC_BASE_TASK_QUEUE_FOR_TEST_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_gcd.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_gcd.cc index 2276f635c5..e498ba3017 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_gcd.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_gcd.cc @@ -19,9 +19,10 @@ #include +#include "absl/functional/any_invocable.h" #include "absl/strings/string_view.h" -#include "api/task_queue/queued_task.h" #include "api/task_queue/task_queue_base.h" +#include "api/units/time_delta.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/system/gcd_helpers.h" @@ -40,22 +41,24 @@ int TaskQueuePriorityToGCD(TaskQueueFactory::Priority priority) { } } -class TaskQueueGcd : public TaskQueueBase { +class TaskQueueGcd final : public TaskQueueBase { public: TaskQueueGcd(absl::string_view queue_name, int gcd_priority); void Delete() override; - void PostTask(std::unique_ptr task) override; - void PostDelayedTask(std::unique_ptr task, - uint32_t milliseconds) override; + void PostTask(absl::AnyInvocable task) override; + void PostDelayedTask(absl::AnyInvocable task, + TimeDelta delay) override; + void PostDelayedHighPrecisionTask(absl::AnyInvocable task, + TimeDelta delay) override; private: struct TaskContext { - TaskContext(TaskQueueGcd* queue, std::unique_ptr task) + TaskContext(TaskQueueGcd* queue, absl::AnyInvocable task) : queue(queue), task(std::move(task)) {} TaskQueueGcd* const queue; - std::unique_ptr task; + absl::AnyInvocable task; }; ~TaskQueueGcd() override; @@ -97,17 +100,22 @@ void TaskQueueGcd::Delete() { dispatch_release(queue_); } -void TaskQueueGcd::PostTask(std::unique_ptr task) { +void TaskQueueGcd::PostTask(absl::AnyInvocable task) { auto* context = new TaskContext(this, std::move(task)); dispatch_async_f(queue_, context, &RunTask); } -void TaskQueueGcd::PostDelayedTask(std::unique_ptr task, - uint32_t milliseconds) { +void TaskQueueGcd::PostDelayedTask(absl::AnyInvocable task, + TimeDelta delay) { auto* context = new TaskContext(this, std::move(task)); - dispatch_after_f( - dispatch_time(DISPATCH_TIME_NOW, milliseconds * NSEC_PER_MSEC), queue_, - context, &RunTask); + dispatch_after_f(dispatch_time(DISPATCH_TIME_NOW, delay.us() * NSEC_PER_USEC), + queue_, context, &RunTask); +} + +void TaskQueueGcd::PostDelayedHighPrecisionTask( + absl::AnyInvocable task, + TimeDelta delay) { + PostDelayedTask(std::move(task), delay); } // static @@ -117,12 +125,10 @@ void TaskQueueGcd::RunTask(void* task_context) { return; CurrentTaskQueueSetter set_current(tc->queue); - auto* task = tc->task.release(); - if (task->Run()) { - // Delete the task before CurrentTaskQueueSetter clears state that this code - // is running on the task queue. - delete task; - } + std::move(tc->task)(); + // Delete the task before CurrentTaskQueueSetter clears state that this code + // is running on the task queue. + tc = nullptr; } // static diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_libevent.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_libevent.cc index 4f56400741..7aa7e37b02 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_libevent.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_libevent.cc @@ -24,10 +24,10 @@ #include #include "absl/container/inlined_vector.h" +#include "absl/functional/any_invocable.h" #include "absl/strings/string_view.h" -#include "api/task_queue/queued_task.h" #include "api/task_queue/task_queue_base.h" -#include "base/third_party/libevent/event.h" +#include "api/units/time_delta.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" @@ -36,6 +36,7 @@ #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" #include "rtc_base/time_utils.h" +#include "base/third_party/libevent/event.h" namespace webrtc { namespace { @@ -106,14 +107,18 @@ class TaskQueueLibevent final : public TaskQueueBase { TaskQueueLibevent(absl::string_view queue_name, rtc::ThreadPriority priority); void Delete() override; - void PostTask(std::unique_ptr task) override; - void PostDelayedTask(std::unique_ptr task, - uint32_t milliseconds) override; + void PostTask(absl::AnyInvocable task) override; + void PostDelayedTask(absl::AnyInvocable task, + TimeDelta delay) override; + void PostDelayedHighPrecisionTask(absl::AnyInvocable task, + TimeDelta delay) override; private: - class SetTimerTask; struct TimerEvent; + void PostDelayedTaskOnTaskQueue(absl::AnyInvocable task, + TimeDelta delay); + ~TaskQueueLibevent() override = default; static void OnWakeup(int socket, short flags, void* context); // NOLINT @@ -126,43 +131,20 @@ class TaskQueueLibevent final : public TaskQueueBase { event wakeup_event_; rtc::PlatformThread thread_; Mutex pending_lock_; - absl::InlinedVector, 4> pending_ + absl::InlinedVector, 4> pending_ RTC_GUARDED_BY(pending_lock_); // Holds a list of events pending timers for cleanup when the loop exits. std::list pending_timers_; }; struct TaskQueueLibevent::TimerEvent { - TimerEvent(TaskQueueLibevent* task_queue, std::unique_ptr task) + TimerEvent(TaskQueueLibevent* task_queue, absl::AnyInvocable task) : task_queue(task_queue), task(std::move(task)) {} ~TimerEvent() { event_del(&ev); } event ev; TaskQueueLibevent* task_queue; - std::unique_ptr task; -}; - -class TaskQueueLibevent::SetTimerTask : public QueuedTask { - public: - SetTimerTask(std::unique_ptr task, uint32_t milliseconds) - : task_(std::move(task)), - milliseconds_(milliseconds), - posted_(rtc::Time32()) {} - - private: - bool Run() override { - // Compensate for the time that has passed since construction - // and until we got here. - uint32_t post_time = rtc::Time32() - posted_; - TaskQueueLibevent::Current()->PostDelayedTask( - std::move(task_), - post_time > milliseconds_ ? 0 : milliseconds_ - post_time); - return true; - } - - std::unique_ptr task_; - const uint32_t milliseconds_; - const uint32_t posted_; + absl::AnyInvocable task; }; TaskQueueLibevent::TaskQueueLibevent(absl::string_view queue_name, @@ -219,7 +201,7 @@ void TaskQueueLibevent::Delete() { delete this; } -void TaskQueueLibevent::PostTask(std::unique_ptr task) { +void TaskQueueLibevent::PostTask(absl::AnyInvocable task) { { MutexLock lock(&pending_lock_); bool had_pending_tasks = !pending_.empty(); @@ -242,21 +224,43 @@ void TaskQueueLibevent::PostTask(std::unique_ptr task) { sizeof(message)); } -void TaskQueueLibevent::PostDelayedTask(std::unique_ptr task, - uint32_t milliseconds) { +void TaskQueueLibevent::PostDelayedTaskOnTaskQueue( + absl::AnyInvocable task, + TimeDelta delay) { + // libevent api is not thread safe by default, thus event_add need to be + // called on the `thread_`. + RTC_DCHECK(IsCurrent()); + + TimerEvent* timer = new TimerEvent(this, std::move(task)); + EventAssign(&timer->ev, event_base_, -1, 0, &TaskQueueLibevent::RunTimer, + timer); + pending_timers_.push_back(timer); + timeval tv = {.tv_sec = rtc::dchecked_cast(delay.us() / 1'000'000), + .tv_usec = rtc::dchecked_cast(delay.us() % 1'000'000)}; + event_add(&timer->ev, &tv); +} + +void TaskQueueLibevent::PostDelayedTask(absl::AnyInvocable task, + TimeDelta delay) { if (IsCurrent()) { - TimerEvent* timer = new TimerEvent(this, std::move(task)); - EventAssign(&timer->ev, event_base_, -1, 0, &TaskQueueLibevent::RunTimer, - timer); - pending_timers_.push_back(timer); - timeval tv = {rtc::dchecked_cast(milliseconds / 1000), - rtc::dchecked_cast(milliseconds % 1000) * 1000}; - event_add(&timer->ev, &tv); + PostDelayedTaskOnTaskQueue(std::move(task), delay); } else { - PostTask(std::make_unique(std::move(task), milliseconds)); + int64_t posted_us = rtc::TimeMicros(); + PostTask([posted_us, delay, task = std::move(task), this]() mutable { + // Compensate for the time that has passed since the posting. + TimeDelta post_time = TimeDelta::Micros(rtc::TimeMicros() - posted_us); + PostDelayedTaskOnTaskQueue( + std::move(task), std::max(delay - post_time, TimeDelta::Zero())); + }); } } +void TaskQueueLibevent::PostDelayedHighPrecisionTask( + absl::AnyInvocable task, + TimeDelta delay) { + PostDelayedTask(std::move(task), delay); +} + // static void TaskQueueLibevent::OnWakeup(int socket, short flags, // NOLINT @@ -271,19 +275,16 @@ void TaskQueueLibevent::OnWakeup(int socket, event_base_loopbreak(me->event_base_); break; case kRunTasks: { - absl::InlinedVector, 4> tasks; + absl::InlinedVector, 4> tasks; { MutexLock lock(&me->pending_lock_); tasks.swap(me->pending_); } RTC_DCHECK(!tasks.empty()); for (auto& task : tasks) { - if (task->Run()) { - task.reset(); - } else { - // `false` means the task should *not* be deleted. - task.release(); - } + std::move(task)(); + // Prefer to delete the `task` before running the next one. + task = nullptr; } break; } @@ -298,8 +299,7 @@ void TaskQueueLibevent::RunTimer(int fd, short flags, // NOLINT void* context) { TimerEvent* timer = static_cast(context); - if (!timer->task->Run()) - timer->task.release(); + std::move(timer->task)(); timer->task_queue->pending_timers_.remove(timer); delete timer; } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_stdlib.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_stdlib.cc index 41da285ee7..3f39ad13b5 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_stdlib.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_stdlib.cc @@ -18,12 +18,14 @@ #include #include +#include "absl/functional/any_invocable.h" #include "absl/strings/string_view.h" -#include "api/task_queue/queued_task.h" #include "api/task_queue/task_queue_base.h" +#include "api/units/time_delta.h" #include "rtc_base/checks.h" #include "rtc_base/event.h" #include "rtc_base/logging.h" +#include "rtc_base/numerics/divide_round.h" #include "rtc_base/platform_thread.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" @@ -50,53 +52,57 @@ class TaskQueueStdlib final : public TaskQueueBase { ~TaskQueueStdlib() override = default; void Delete() override; - void PostTask(std::unique_ptr task) override; - void PostDelayedTask(std::unique_ptr task, - uint32_t milliseconds) override; + void PostTask(absl::AnyInvocable task) override; + void PostDelayedTask(absl::AnyInvocable task, + TimeDelta delay) override; + void PostDelayedHighPrecisionTask(absl::AnyInvocable task, + TimeDelta delay) override; private: using OrderId = uint64_t; struct DelayedEntryTimeout { - int64_t next_fire_at_ms_{}; - OrderId order_{}; + // TODO(bugs.webrtc.org/13756): Migrate to Timestamp. + int64_t next_fire_at_us{}; + OrderId order{}; bool operator<(const DelayedEntryTimeout& o) const { - return std::tie(next_fire_at_ms_, order_) < - std::tie(o.next_fire_at_ms_, o.order_); + return std::tie(next_fire_at_us, order) < + std::tie(o.next_fire_at_us, o.order); } }; struct NextTask { - bool final_task_{false}; - std::unique_ptr run_task_; - int64_t sleep_time_ms_{}; + bool final_task = false; + absl::AnyInvocable run_task; + TimeDelta sleep_time = rtc::Event::kForever; }; + static rtc::PlatformThread InitializeThread(TaskQueueStdlib* me, + absl::string_view queue_name, + rtc::ThreadPriority priority); + NextTask GetNextTask(); void ProcessTasks(); void NotifyWake(); - // Indicates if the thread has started. - rtc::Event started_; - // Signaled whenever a new task is pending. rtc::Event flag_notify_; Mutex pending_lock_; // Indicates if the worker thread needs to shutdown now. - bool thread_should_quit_ RTC_GUARDED_BY(pending_lock_){false}; + bool thread_should_quit_ RTC_GUARDED_BY(pending_lock_) = false; // Holds the next order to use for the next task to be // put into one of the pending queues. - OrderId thread_posting_order_ RTC_GUARDED_BY(pending_lock_){}; + OrderId thread_posting_order_ RTC_GUARDED_BY(pending_lock_) = 0; // The list of all pending tasks that need to be processed in the // FIFO queue ordering on the worker thread. - std::queue>> pending_queue_ + std::queue>> pending_queue_ RTC_GUARDED_BY(pending_lock_); // The list of all pending tasks that need to be processed at a future @@ -104,8 +110,8 @@ class TaskQueueStdlib final : public TaskQueueBase { // happen at exactly the same time interval as another task then the // task is processed based on FIFO ordering. std::priority_queue was // considered but rejected due to its inability to extract the - // std::unique_ptr out of the queue without the presence of a hack. - std::map> delayed_queue_ + // move-only value out of the queue without the presence of a hack. + std::map> delayed_queue_ RTC_GUARDED_BY(pending_lock_); // Contains the active worker thread assigned to processing @@ -117,16 +123,24 @@ class TaskQueueStdlib final : public TaskQueueBase { TaskQueueStdlib::TaskQueueStdlib(absl::string_view queue_name, rtc::ThreadPriority priority) - : started_(/*manual_reset=*/false, /*initially_signaled=*/false), - flag_notify_(/*manual_reset=*/false, /*initially_signaled=*/false), - thread_(rtc::PlatformThread::SpawnJoinable( - [this] { - CurrentTaskQueueSetter set_current(this); - ProcessTasks(); - }, - queue_name, - rtc::ThreadAttributes().SetPriority(priority))) { - started_.Wait(rtc::Event::kForever); + : flag_notify_(/*manual_reset=*/false, /*initially_signaled=*/false), + thread_(InitializeThread(this, queue_name, priority)) {} + +// static +rtc::PlatformThread TaskQueueStdlib::InitializeThread( + TaskQueueStdlib* me, + absl::string_view queue_name, + rtc::ThreadPriority priority) { + rtc::Event started; + auto thread = rtc::PlatformThread::SpawnJoinable( + [&started, me] { + CurrentTaskQueueSetter set_current(me); + started.Set(); + me->ProcessTasks(); + }, + queue_name, rtc::ThreadAttributes().SetPriority(priority)); + started.Wait(rtc::Event::kForever); + return thread; } void TaskQueueStdlib::Delete() { @@ -142,43 +156,45 @@ void TaskQueueStdlib::Delete() { delete this; } -void TaskQueueStdlib::PostTask(std::unique_ptr task) { +void TaskQueueStdlib::PostTask(absl::AnyInvocable task) { { MutexLock lock(&pending_lock_); - OrderId order = thread_posting_order_++; - - pending_queue_.push(std::pair>( - order, std::move(task))); + pending_queue_.push( + std::make_pair(++thread_posting_order_, std::move(task))); } NotifyWake(); } -void TaskQueueStdlib::PostDelayedTask(std::unique_ptr task, - uint32_t milliseconds) { - auto fire_at = rtc::TimeMillis() + milliseconds; - - DelayedEntryTimeout delay; - delay.next_fire_at_ms_ = fire_at; +void TaskQueueStdlib::PostDelayedTask(absl::AnyInvocable task, + TimeDelta delay) { + DelayedEntryTimeout delayed_entry; + delayed_entry.next_fire_at_us = rtc::TimeMicros() + delay.us(); { MutexLock lock(&pending_lock_); - delay.order_ = ++thread_posting_order_; - delayed_queue_[delay] = std::move(task); + delayed_entry.order = ++thread_posting_order_; + delayed_queue_[delayed_entry] = std::move(task); } NotifyWake(); } +void TaskQueueStdlib::PostDelayedHighPrecisionTask( + absl::AnyInvocable task, + TimeDelta delay) { + PostDelayedTask(std::move(task), delay); +} + TaskQueueStdlib::NextTask TaskQueueStdlib::GetNextTask() { - NextTask result{}; + NextTask result; - auto tick = rtc::TimeMillis(); + const int64_t tick_us = rtc::TimeMicros(); MutexLock lock(&pending_lock_); if (thread_should_quit_) { - result.final_task_ = true; + result.final_task = true; return result; } @@ -186,29 +202,30 @@ TaskQueueStdlib::NextTask TaskQueueStdlib::GetNextTask() { auto delayed_entry = delayed_queue_.begin(); const auto& delay_info = delayed_entry->first; auto& delay_run = delayed_entry->second; - if (tick >= delay_info.next_fire_at_ms_) { + if (tick_us >= delay_info.next_fire_at_us) { if (pending_queue_.size() > 0) { auto& entry = pending_queue_.front(); auto& entry_order = entry.first; auto& entry_run = entry.second; - if (entry_order < delay_info.order_) { - result.run_task_ = std::move(entry_run); + if (entry_order < delay_info.order) { + result.run_task = std::move(entry_run); pending_queue_.pop(); return result; } } - result.run_task_ = std::move(delay_run); + result.run_task = std::move(delay_run); delayed_queue_.erase(delayed_entry); return result; } - result.sleep_time_ms_ = delay_info.next_fire_at_ms_ - tick; + result.sleep_time = TimeDelta::Millis( + DivideRoundUp(delay_info.next_fire_at_us - tick_us, 1'000)); } if (pending_queue_.size() > 0) { auto& entry = pending_queue_.front(); - result.run_task_ = std::move(entry.second); + result.run_task = std::move(entry.second); pending_queue_.pop(); } @@ -216,28 +233,21 @@ TaskQueueStdlib::NextTask TaskQueueStdlib::GetNextTask() { } void TaskQueueStdlib::ProcessTasks() { - started_.Set(); - while (true) { auto task = GetNextTask(); - if (task.final_task_) + if (task.final_task) break; - if (task.run_task_) { + if (task.run_task) { // process entry immediately then try again - QueuedTask* release_ptr = task.run_task_.release(); - if (release_ptr->Run()) - delete release_ptr; + std::move(task.run_task)(); - // attempt to sleep again + // Attempt to run more tasks before going to sleep. continue; } - if (0 == task.sleep_time_ms_) - flag_notify_.Wait(rtc::Event::kForever); - else - flag_notify_.Wait(task.sleep_time_ms_); + flag_notify_.Wait(task.sleep_time); } } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_win.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_win.cc index 6382d6b15d..bb8e522c71 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_win.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_win.cc @@ -24,17 +24,19 @@ #include #include +#include #include #include #include +#include "absl/functional/any_invocable.h" #include "absl/strings/string_view.h" #include "absl/types/optional.h" -#include "api/task_queue/queued_task.h" #include "api/task_queue/task_queue_base.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/event.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" @@ -44,7 +46,6 @@ namespace webrtc { namespace { -#define WM_RUN_TASK WM_USER + 1 #define WM_QUEUE_DELAYED_TASK WM_USER + 2 void CALLBACK InitializeQueueThread(ULONG_PTR param) { @@ -66,10 +67,10 @@ rtc::ThreadPriority TaskQueuePriorityToThreadPriority( } } -int64_t GetTick() { +Timestamp CurrentTime() { static const UINT kPeriod = 1; bool high_res = (timeBeginPeriod(kPeriod) == TIMERR_NOERROR); - int64_t ret = rtc::TimeMillis(); + Timestamp ret = Timestamp::Micros(rtc::TimeMicros()); if (high_res) timeEndPeriod(kPeriod); return ret; @@ -79,8 +80,8 @@ class DelayedTaskInfo { public: // Default ctor needed to support priority_queue::pop(). DelayedTaskInfo() {} - DelayedTaskInfo(uint32_t milliseconds, std::unique_ptr task) - : due_time_(GetTick() + milliseconds), task_(std::move(task)) {} + DelayedTaskInfo(TimeDelta delay, absl::AnyInvocable task) + : due_time_(CurrentTime() + delay), task_(std::move(task)) {} DelayedTaskInfo(DelayedTaskInfo&&) = default; // Implement for priority_queue. @@ -93,14 +94,14 @@ class DelayedTaskInfo { // See below for why this method is const. void Run() const { - RTC_DCHECK(due_time_); - task_->Run() ? task_.reset() : static_cast(task_.release()); + RTC_DCHECK(task_); + std::move(task_)(); } - int64_t due_time() const { return due_time_; } + Timestamp due_time() const { return due_time_; } private: - int64_t due_time_ = 0; // Absolute timestamp in milliseconds. + Timestamp due_time_ = Timestamp::Zero(); // `task` needs to be mutable because std::priority_queue::top() returns // a const reference and a key in an ordered queue must not be changed. @@ -109,7 +110,7 @@ class DelayedTaskInfo { // (`task`), mutable. // Because of this, the `task` variable is made private and can only be // mutated by calling the `Run()` method. - mutable std::unique_ptr task_; + mutable absl::AnyInvocable task_; }; class MultimediaTimer { @@ -122,6 +123,9 @@ class MultimediaTimer { ::CloseHandle(event_); } + MultimediaTimer(const MultimediaTimer&) = delete; + MultimediaTimer& operator=(const MultimediaTimer&) = delete; + bool StartOneShotTimer(UINT delay_ms) { RTC_DCHECK_EQ(0, timer_id_); RTC_DCHECK(event_ != nullptr); @@ -148,8 +152,6 @@ class MultimediaTimer { private: HANDLE event_ = nullptr; MMRESULT timer_id_ = 0; - - RTC_DISALLOW_COPY_AND_ASSIGN(MultimediaTimer); }; class TaskQueueWin : public TaskQueueBase { @@ -158,10 +160,11 @@ class TaskQueueWin : public TaskQueueBase { ~TaskQueueWin() override = default; void Delete() override; - void PostTask(std::unique_ptr task) override; - void PostDelayedTask(std::unique_ptr task, - uint32_t milliseconds) override; - + void PostTask(absl::AnyInvocable task) override; + void PostDelayedTask(absl::AnyInvocable task, + TimeDelta delay) override; + void PostDelayedHighPrecisionTask(absl::AnyInvocable task, + TimeDelta delay) override; void RunPendingTasks(); private: @@ -171,25 +174,18 @@ class TaskQueueWin : public TaskQueueBase { void ScheduleNextTimer(); void CancelTimers(); + MultimediaTimer timer_; // Since priority_queue<> by defult orders items in terms of // largest->smallest, using std::less<>, and we want smallest->largest, - // we would like to use std::greater<> here. Alas it's only available in - // C++14 and later, so we roll our own compare template that that relies on - // operator<(). - template - struct greater { - bool operator()(const T& l, const T& r) { return l > r; } - }; - - MultimediaTimer timer_; + // we would like to use std::greater<> here. std::priority_queue, - greater> + std::greater> timer_tasks_; UINT_PTR timer_id_ = 0; rtc::PlatformThread thread_; Mutex pending_lock_; - std::queue> pending_ + std::queue> pending_ RTC_GUARDED_BY(pending_lock_); HANDLE in_queue_; }; @@ -221,24 +217,20 @@ void TaskQueueWin::Delete() { delete this; } -void TaskQueueWin::PostTask(std::unique_ptr task) { +void TaskQueueWin::PostTask(absl::AnyInvocable task) { MutexLock lock(&pending_lock_); pending_.push(std::move(task)); ::SetEvent(in_queue_); } -void TaskQueueWin::PostDelayedTask(std::unique_ptr task, - uint32_t milliseconds) { - if (!milliseconds) { +void TaskQueueWin::PostDelayedTask(absl::AnyInvocable task, + TimeDelta delay) { + if (delay <= TimeDelta::Zero()) { PostTask(std::move(task)); return; } - // TODO(tommi): Avoid this allocation. It is currently here since - // the timestamp stored in the task info object, is a 64bit timestamp - // and WPARAM is 32bits in 32bit builds. Otherwise, we could pass the - // task pointer and timestamp as LPARAM and WPARAM. - auto* task_info = new DelayedTaskInfo(milliseconds, std::move(task)); + auto* task_info = new DelayedTaskInfo(delay, std::move(task)); RTC_CHECK(thread_.GetHandle() != absl::nullopt); if (!::PostThreadMessage(GetThreadId(*thread_.GetHandle()), WM_QUEUE_DELAYED_TASK, 0, @@ -247,9 +239,15 @@ void TaskQueueWin::PostDelayedTask(std::unique_ptr task, } } +void TaskQueueWin::PostDelayedHighPrecisionTask( + absl::AnyInvocable task, + TimeDelta delay) { + PostDelayedTask(std::move(task), delay); +} + void TaskQueueWin::RunPendingTasks() { while (true) { - std::unique_ptr task; + absl::AnyInvocable task; { MutexLock lock(&pending_lock_); if (pending_.empty()) @@ -258,8 +256,7 @@ void TaskQueueWin::RunPendingTasks() { pending_.pop(); } - if (!task->Run()) - task.release(); + std::move(task)(); } } @@ -300,26 +297,19 @@ bool TaskQueueWin::ProcessQueuedMessages() { // To protect against overly busy message queues, we limit the time // we process tasks to a few milliseconds. If we don't do that, there's // a chance that timer tasks won't ever run. - static const int kMaxTaskProcessingTimeMs = 500; - auto start = GetTick(); + static constexpr TimeDelta kMaxTaskProcessingTime = TimeDelta::Millis(500); + Timestamp start = CurrentTime(); while (::PeekMessage(&msg, nullptr, 0, 0, PM_REMOVE) && msg.message != WM_QUIT) { if (!msg.hwnd) { switch (msg.message) { - // TODO(tommi): Stop using this way of queueing tasks. - case WM_RUN_TASK: { - QueuedTask* task = reinterpret_cast(msg.lParam); - if (task->Run()) - delete task; - break; - } case WM_QUEUE_DELAYED_TASK: { std::unique_ptr info( reinterpret_cast(msg.lParam)); bool need_to_schedule_timers = timer_tasks_.empty() || timer_tasks_.top().due_time() > info->due_time(); - timer_tasks_.emplace(std::move(*info.get())); + timer_tasks_.push(std::move(*info)); if (need_to_schedule_timers) { CancelTimers(); ScheduleNextTimer(); @@ -343,7 +333,7 @@ bool TaskQueueWin::ProcessQueuedMessages() { ::DispatchMessage(&msg); } - if (GetTick() > start + kMaxTaskProcessingTimeMs) + if (CurrentTime() > start + kMaxTaskProcessingTime) break; } return msg.message != WM_QUIT; @@ -351,7 +341,7 @@ bool TaskQueueWin::ProcessQueuedMessages() { void TaskQueueWin::RunDueTasks() { RTC_DCHECK(!timer_tasks_.empty()); - auto now = GetTick(); + Timestamp now = CurrentTime(); do { const auto& top = timer_tasks_.top(); if (top.due_time() > now) @@ -367,8 +357,9 @@ void TaskQueueWin::ScheduleNextTimer() { return; const auto& next_task = timer_tasks_.top(); - int64_t delay_ms = std::max(0ll, next_task.due_time() - GetTick()); - uint32_t milliseconds = rtc::dchecked_cast(delay_ms); + TimeDelta delay = + std::max(TimeDelta::Zero(), next_task.due_time() - CurrentTime()); + uint32_t milliseconds = delay.RoundUpTo(TimeDelta::Millis(1)).ms(); if (!timer_.StartOneShotTimer(milliseconds)) timer_id_ = ::SetTimer(nullptr, 0, milliseconds, nullptr); } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/pending_task_safety_flag.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/pending_task_safety_flag.cc deleted file mode 100644 index 57b3f6ce88..0000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/pending_task_safety_flag.cc +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright 2020 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "rtc_base/task_utils/pending_task_safety_flag.h" - -namespace webrtc { - -// static -rtc::scoped_refptr PendingTaskSafetyFlag::Create() { - return new PendingTaskSafetyFlag(true); -} - -rtc::scoped_refptr -PendingTaskSafetyFlag::CreateDetached() { - rtc::scoped_refptr safety_flag( - new PendingTaskSafetyFlag(true)); - safety_flag->main_sequence_.Detach(); - return safety_flag; -} - -rtc::scoped_refptr -PendingTaskSafetyFlag::CreateDetachedInactive() { - rtc::scoped_refptr safety_flag( - new PendingTaskSafetyFlag(false)); - safety_flag->main_sequence_.Detach(); - return safety_flag; -} - -void PendingTaskSafetyFlag::SetNotAlive() { - RTC_DCHECK_RUN_ON(&main_sequence_); - alive_ = false; -} - -void PendingTaskSafetyFlag::SetAlive() { - RTC_DCHECK_RUN_ON(&main_sequence_); - alive_ = true; -} - -bool PendingTaskSafetyFlag::alive() const { - RTC_DCHECK_RUN_ON(&main_sequence_); - return alive_; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/repeating_task.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/repeating_task.cc index 1f3eb1d064..222ab1ad67 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/repeating_task.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/repeating_task.cc @@ -10,54 +10,106 @@ #include "rtc_base/task_utils/repeating_task.h" -#include "absl/memory/memory.h" +#include "absl/functional/any_invocable.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "rtc_base/logging.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" -#include "rtc_base/task_utils/to_queued_task.h" -#include "rtc_base/time_utils.h" namespace webrtc { -namespace webrtc_repeating_task_impl { +namespace { + +class RepeatingTask { + public: + RepeatingTask(TaskQueueBase* task_queue, + TaskQueueBase::DelayPrecision precision, + TimeDelta first_delay, + absl::AnyInvocable task, + Clock* clock, + rtc::scoped_refptr alive_flag); + RepeatingTask(RepeatingTask&&) = default; + RepeatingTask& operator=(RepeatingTask&&) = delete; + ~RepeatingTask() = default; + + void operator()() &&; + + private: + TaskQueueBase* const task_queue_; + const TaskQueueBase::DelayPrecision precision_; + Clock* const clock_; + absl::AnyInvocable task_; + // This is always finite. + Timestamp next_run_time_ RTC_GUARDED_BY(task_queue_); + rtc::scoped_refptr alive_flag_ + RTC_GUARDED_BY(task_queue_); +}; -RepeatingTaskBase::RepeatingTaskBase( +RepeatingTask::RepeatingTask( TaskQueueBase* task_queue, + TaskQueueBase::DelayPrecision precision, TimeDelta first_delay, + absl::AnyInvocable task, Clock* clock, rtc::scoped_refptr alive_flag) : task_queue_(task_queue), + precision_(precision), clock_(clock), + task_(std::move(task)), next_run_time_(clock_->CurrentTime() + first_delay), alive_flag_(std::move(alive_flag)) {} -RepeatingTaskBase::~RepeatingTaskBase() = default; - -bool RepeatingTaskBase::Run() { +void RepeatingTask::operator()() && { RTC_DCHECK_RUN_ON(task_queue_); - // Return true to tell the TaskQueue to destruct this object. if (!alive_flag_->alive()) - return true; + return; - TimeDelta delay = RunClosure(); + webrtc_repeating_task_impl::RepeatingTaskImplDTraceProbeRun(); + TimeDelta delay = task_(); + RTC_DCHECK_GE(delay, TimeDelta::Zero()); - // The closure might have stopped this task, in which case we return true to - // destruct this object. - if (!alive_flag_->alive()) - return true; + // A delay of +infinity means that the task should not be run again. + // Alternatively, the closure might have stopped this task. + if (delay.IsPlusInfinity() || !alive_flag_->alive()) + return; - RTC_DCHECK(delay.IsFinite()); TimeDelta lost_time = clock_->CurrentTime() - next_run_time_; next_run_time_ += delay; delay -= lost_time; delay = std::max(delay, TimeDelta::Zero()); - task_queue_->PostDelayedTask(absl::WrapUnique(this), delay.ms()); + task_queue_->PostDelayedTaskWithPrecision(precision_, std::move(*this), + delay); +} - // Return false to tell the TaskQueue to not destruct this object since we - // have taken ownership with absl::WrapUnique. - return false; +} // namespace + +RepeatingTaskHandle RepeatingTaskHandle::Start( + TaskQueueBase* task_queue, + absl::AnyInvocable closure, + TaskQueueBase::DelayPrecision precision, + Clock* clock) { + auto alive_flag = PendingTaskSafetyFlag::CreateDetached(); + webrtc_repeating_task_impl::RepeatingTaskHandleDTraceProbeStart(); + task_queue->PostTask(RepeatingTask(task_queue, precision, TimeDelta::Zero(), + std::move(closure), clock, alive_flag)); + return RepeatingTaskHandle(std::move(alive_flag)); } -} // namespace webrtc_repeating_task_impl +// DelayedStart is equivalent to Start except that the first invocation of the +// closure will be delayed by the given amount. +RepeatingTaskHandle RepeatingTaskHandle::DelayedStart( + TaskQueueBase* task_queue, + TimeDelta first_delay, + absl::AnyInvocable closure, + TaskQueueBase::DelayPrecision precision, + Clock* clock) { + auto alive_flag = PendingTaskSafetyFlag::CreateDetached(); + webrtc_repeating_task_impl::RepeatingTaskHandleDTraceProbeDelayedStart(); + task_queue->PostDelayedTaskWithPrecision( + precision, + RepeatingTask(task_queue, precision, first_delay, std::move(closure), + clock, alive_flag), + first_delay); + return RepeatingTaskHandle(std::move(alive_flag)); +} void RepeatingTaskHandle::Stop() { if (repeating_task_) { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/repeating_task.h b/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/repeating_task.h index 91a40e0714..e5ea3d8174 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/repeating_task.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/repeating_task.h @@ -15,11 +15,10 @@ #include #include -#include "api/task_queue/queued_task.h" +#include "absl/functional/any_invocable.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_base.h" #include "api/units/time_delta.h" -#include "api/units/timestamp.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" #include "system_wrappers/include/clock.h" namespace webrtc { @@ -31,57 +30,6 @@ void RepeatingTaskHandleDTraceProbeStart(); void RepeatingTaskHandleDTraceProbeDelayedStart(); void RepeatingTaskImplDTraceProbeRun(); -class RepeatingTaskBase : public QueuedTask { - public: - RepeatingTaskBase(TaskQueueBase* task_queue, - TimeDelta first_delay, - Clock* clock, - rtc::scoped_refptr alive_flag); - ~RepeatingTaskBase() override; - - private: - virtual TimeDelta RunClosure() = 0; - - bool Run() final; - - TaskQueueBase* const task_queue_; - Clock* const clock_; - // This is always finite. - Timestamp next_run_time_ RTC_GUARDED_BY(task_queue_); - rtc::scoped_refptr alive_flag_ - RTC_GUARDED_BY(task_queue_); -}; - -// The template closure pattern is based on rtc::ClosureTask. -template -class RepeatingTaskImpl final : public RepeatingTaskBase { - public: - RepeatingTaskImpl(TaskQueueBase* task_queue, - TimeDelta first_delay, - Closure&& closure, - Clock* clock, - rtc::scoped_refptr alive_flag) - : RepeatingTaskBase(task_queue, - first_delay, - clock, - std::move(alive_flag)), - closure_(std::forward(closure)) { - static_assert( - std::is_same::type>::value, - ""); - } - - private: - TimeDelta RunClosure() override { - RepeatingTaskImplDTraceProbeRun(); - return closure_(); - } - - typename std::remove_const< - typename std::remove_reference::type>::type closure_; -}; } // namespace webrtc_repeating_task_impl // Allows starting tasks that repeat themselves on a TaskQueue indefinately @@ -103,38 +51,22 @@ class RepeatingTaskHandle { // owned by the TaskQueue and will live until it has been stopped or the // TaskQueue deletes it. It's perfectly fine to destroy the handle while the // task is running, since the repeated task is owned by the TaskQueue. - template + // The tasks are scheduled onto the task queue using the specified precision. static RepeatingTaskHandle Start(TaskQueueBase* task_queue, - Closure&& closure, - Clock* clock = Clock::GetRealTimeClock()) { - auto alive_flag = PendingTaskSafetyFlag::CreateDetached(); - webrtc_repeating_task_impl::RepeatingTaskHandleDTraceProbeStart(); - task_queue->PostTask( - std::make_unique< - webrtc_repeating_task_impl::RepeatingTaskImpl>( - task_queue, TimeDelta::Zero(), std::forward(closure), - clock, alive_flag)); - return RepeatingTaskHandle(std::move(alive_flag)); - } + absl::AnyInvocable closure, + TaskQueueBase::DelayPrecision precision = + TaskQueueBase::DelayPrecision::kLow, + Clock* clock = Clock::GetRealTimeClock()); // DelayedStart is equivalent to Start except that the first invocation of the // closure will be delayed by the given amount. - template static RepeatingTaskHandle DelayedStart( TaskQueueBase* task_queue, TimeDelta first_delay, - Closure&& closure, - Clock* clock = Clock::GetRealTimeClock()) { - auto alive_flag = PendingTaskSafetyFlag::CreateDetached(); - webrtc_repeating_task_impl::RepeatingTaskHandleDTraceProbeDelayedStart(); - task_queue->PostDelayedTask( - std::make_unique< - webrtc_repeating_task_impl::RepeatingTaskImpl>( - task_queue, first_delay, std::forward(closure), clock, - alive_flag), - first_delay.ms()); - return RepeatingTaskHandle(std::move(alive_flag)); - } + absl::AnyInvocable closure, + TaskQueueBase::DelayPrecision precision = + TaskQueueBase::DelayPrecision::kLow, + Clock* clock = Clock::GetRealTimeClock()); // Stops future invocations of the repeating task closure. Can only be called // from the TaskQueue where the task is running. The closure is guaranteed to diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/to_queued_task.h b/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/to_queued_task.h deleted file mode 100644 index b2e3aae7ae..0000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/to_queued_task.h +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright 2019 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_BASE_TASK_UTILS_TO_QUEUED_TASK_H_ -#define RTC_BASE_TASK_UTILS_TO_QUEUED_TASK_H_ - -#include -#include -#include - -#include "api/task_queue/queued_task.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" - -namespace webrtc { -namespace webrtc_new_closure_impl { -// Simple implementation of QueuedTask for use with lambdas. -template -class ClosureTask : public QueuedTask { - public: - explicit ClosureTask(Closure&& closure) - : closure_(std::forward(closure)) {} - - private: - bool Run() override { - closure_(); - return true; - } - - typename std::decay::type closure_; -}; - -template -class SafetyClosureTask : public QueuedTask { - public: - explicit SafetyClosureTask(rtc::scoped_refptr safety, - Closure&& closure) - : closure_(std::forward(closure)), - safety_flag_(std::move(safety)) {} - - private: - bool Run() override { - if (safety_flag_->alive()) - closure_(); - return true; - } - - typename std::decay::type closure_; - rtc::scoped_refptr safety_flag_; -}; - -// Extends ClosureTask to also allow specifying cleanup code. -// This is useful when using lambdas if guaranteeing cleanup, even if a task -// was dropped (queue is too full), is required. -template -class ClosureTaskWithCleanup : public ClosureTask { - public: - ClosureTaskWithCleanup(Closure&& closure, Cleanup&& cleanup) - : ClosureTask(std::forward(closure)), - cleanup_(std::forward(cleanup)) {} - ~ClosureTaskWithCleanup() override { cleanup_(); } - - private: - typename std::decay::type cleanup_; -}; -} // namespace webrtc_new_closure_impl - -// Convenience function to construct closures that can be passed directly -// to methods that support std::unique_ptr but not template -// based parameters. -template -std::unique_ptr ToQueuedTask(Closure&& closure) { - return std::make_unique>( - std::forward(closure)); -} - -template -std::unique_ptr ToQueuedTask( - rtc::scoped_refptr safety, - Closure&& closure) { - return std::make_unique>( - std::move(safety), std::forward(closure)); -} - -template -std::unique_ptr ToQueuedTask(const ScopedTaskSafety& safety, - Closure&& closure) { - return ToQueuedTask(safety.flag(), std::forward(closure)); -} - -template ::type>::type, - ScopedTaskSafety>::value>::type* = nullptr> -std::unique_ptr ToQueuedTask(Closure&& closure, Cleanup&& cleanup) { - return std::make_unique< - webrtc_new_closure_impl::ClosureTaskWithCleanup>( - std::forward(closure), std::forward(cleanup)); -} - -} // namespace webrtc - -#endif // RTC_BASE_TASK_UTILS_TO_QUEUED_TASK_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/test_client.h b/TMessagesProj/jni/voip/webrtc/rtc_base/test_client.h index 6989fe1d57..dd91d37ab9 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/test_client.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/test_client.h @@ -15,7 +15,6 @@ #include #include "rtc_base/async_udp_socket.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/fake_clock.h" #include "rtc_base/synchronization/mutex.h" @@ -53,6 +52,9 @@ class TestClient : public sigslot::has_slots<> { ThreadProcessingFakeClock* fake_clock); ~TestClient() override; + TestClient(const TestClient&) = delete; + TestClient& operator=(const TestClient&) = delete; + SocketAddress address() const { return socket_->GetLocalAddress(); } SocketAddress remote_address() const { return socket_->GetRemoteAddress(); } @@ -110,7 +112,6 @@ class TestClient : public sigslot::has_slots<> { std::vector> packets_; int ready_to_send_count_ = 0; int64_t prev_packet_timestamp_; - RTC_DISALLOW_COPY_AND_ASSIGN(TestClient); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/test_echo_server.h b/TMessagesProj/jni/voip/webrtc/rtc_base/test_echo_server.h index a061ed0ce7..8e3c432853 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/test_echo_server.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/test_echo_server.h @@ -18,9 +18,9 @@ #include #include "absl/algorithm/container.h" +#include "absl/memory/memory.h" #include "rtc_base/async_packet_socket.h" #include "rtc_base/async_tcp_socket.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/socket.h" #include "rtc_base/socket_address.h" #include "rtc_base/third_party/sigslot/sigslot.h" @@ -35,6 +35,9 @@ class TestEchoServer : public sigslot::has_slots<> { TestEchoServer(Thread* thread, const SocketAddress& addr); ~TestEchoServer() override; + TestEchoServer(const TestEchoServer&) = delete; + TestEchoServer& operator=(const TestEchoServer&) = delete; + SocketAddress address() const { return server_socket_->GetLocalAddress(); } private: @@ -43,7 +46,8 @@ class TestEchoServer : public sigslot::has_slots<> { if (raw_socket) { AsyncTCPSocket* packet_socket = new AsyncTCPSocket(raw_socket); packet_socket->SignalReadPacket.connect(this, &TestEchoServer::OnPacket); - packet_socket->SignalClose.connect(this, &TestEchoServer::OnClose); + packet_socket->SubscribeClose( + this, [this](AsyncPacketSocket* s, int err) { OnClose(s, err); }); client_sockets_.push_back(packet_socket); } } @@ -58,13 +62,14 @@ class TestEchoServer : public sigslot::has_slots<> { void OnClose(AsyncPacketSocket* socket, int err) { ClientList::iterator it = absl::c_find(client_sockets_, socket); client_sockets_.erase(it); - Thread::Current()->Dispose(socket); + // `OnClose` is triggered by socket Close callback, deleting `socket` while + // processing that callback might be unsafe. + Thread::Current()->PostTask([socket = absl::WrapUnique(socket)] {}); } typedef std::list ClientList; std::unique_ptr server_socket_; ClientList client_sockets_; - RTC_DISALLOW_COPY_AND_ASSIGN(TestEchoServer); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/third_party/base64/base64.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/third_party/base64/base64.cc index 7e30b0eb34..9dc961ae52 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/third_party/base64/base64.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/third_party/base64/base64.cc @@ -18,6 +18,7 @@ #include +#include "absl/strings/string_view.h" #include "rtc_base/checks.h" using std::vector; @@ -85,7 +86,7 @@ bool Base64::GetNextBase64Char(char ch, char* next_ch) { return true; } -bool Base64::IsBase64Encoded(const std::string& str) { +bool Base64::IsBase64Encoded(absl::string_view str) { for (size_t i = 0; i < str.size(); ++i) { if (!IsBase64Char(str.at(i))) return false; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/third_party/base64/base64.h b/TMessagesProj/jni/voip/webrtc/rtc_base/third_party/base64/base64.h index ca249541d0..4190a79dc0 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/third_party/base64/base64.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/third_party/base64/base64.h @@ -15,6 +15,7 @@ #include #include +#include "absl/strings/string_view.h" #include "rtc_base/system/rtc_export.h" namespace rtc { @@ -56,7 +57,7 @@ class Base64 { // Determines whether the given string consists entirely of valid base64 // encoded characters. - static bool IsBase64Encoded(const std::string& str); + static bool IsBase64Encoded(absl::string_view str); RTC_EXPORT static void EncodeFromArray(const void* data, size_t len, @@ -78,23 +79,23 @@ class Base64 { size_t* data_used); // Convenience Methods - static inline std::string Encode(const std::string& data) { + static inline std::string Encode(absl::string_view data) { std::string result; EncodeFromArray(data.data(), data.size(), &result); return result; } - static inline std::string Decode(const std::string& data, DecodeFlags flags) { + static inline std::string Decode(absl::string_view data, DecodeFlags flags) { std::string result; DecodeFromArray(data.data(), data.size(), flags, &result, nullptr); return result; } - static inline bool Decode(const std::string& data, + static inline bool Decode(absl::string_view data, DecodeFlags flags, std::string* result, size_t* data_used) { return DecodeFromArray(data.data(), data.size(), flags, result, data_used); } - static inline bool Decode(const std::string& data, + static inline bool Decode(absl::string_view data, DecodeFlags flags, std::vector* result, size_t* data_used) { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/thread.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/thread.cc index 46e082f8ff..18a79bc518 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/thread.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/thread.cc @@ -10,6 +10,10 @@ #include "rtc_base/thread.h" +#include "absl/strings/string_view.h" +#include "api/units/time_delta.h" +#include "rtc_base/socket_server.h" + #if defined(WEBRTC_WIN) #include #elif defined(WEBRTC_POSIX) @@ -29,15 +33,15 @@ #include #include "absl/algorithm/container.h" +#include "absl/cleanup/cleanup.h" #include "api/sequence_checker.h" -#include "rtc_base/atomic_ops.h" #include "rtc_base/checks.h" #include "rtc_base/deprecated/recursive_critical_section.h" #include "rtc_base/event.h" #include "rtc_base/internal/default_socket_server.h" #include "rtc_base/logging.h" #include "rtc_base/null_socket_server.h" -#include "rtc_base/task_utils/to_queued_task.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" @@ -71,20 +75,8 @@ class ScopedAutoReleasePool { namespace rtc { namespace { -class MessageHandlerWithTask final : public MessageHandler { - public: - MessageHandlerWithTask() {} - - void OnMessage(Message* msg) override { - static_cast(msg->pdata)->Run(); - delete msg->pdata; - } - - private: - ~MessageHandlerWithTask() override {} - - RTC_DISALLOW_COPY_AND_ASSIGN(MessageHandlerWithTask); -}; +using ::webrtc::MutexLock; +using ::webrtc::TimeDelta; class RTC_SCOPED_LOCKABLE MarkProcessingCritScope { public: @@ -100,11 +92,12 @@ class RTC_SCOPED_LOCKABLE MarkProcessingCritScope { cs_->Leave(); } + MarkProcessingCritScope(const MarkProcessingCritScope&) = delete; + MarkProcessingCritScope& operator=(const MarkProcessingCritScope&) = delete; + private: const RecursiveCriticalSection* const cs_; size_t* processing_; - - RTC_DISALLOW_COPY_AND_ASSIGN(MarkProcessingCritScope); }; } // namespace @@ -185,20 +178,6 @@ void ThreadManager::RegisterSendAndCheckForCycles(Thread* source, } #endif -// static -void ThreadManager::Clear(MessageHandler* handler) { - return Instance()->ClearInternal(handler); -} -void ThreadManager::ClearInternal(MessageHandler* handler) { - // Deleted objects may cause re-entrant calls to ClearInternal. This is - // allowed as the list of message queues does not change while queues are - // cleared. - MarkProcessingCritScope cs(&crit_, &processing_); - for (Thread* queue : message_queues_) { - queue->Clear(handler); - } -} - // static void ThreadManager::ProcessAllMessageQueuesForTesting() { return Instance()->ProcessAllMessageQueuesInternal(); @@ -208,20 +187,7 @@ void ThreadManager::ProcessAllMessageQueuesInternal() { // This works by posting a delayed message at the current time and waiting // for it to be dispatched on all queues, which will ensure that all messages // that came before it were also dispatched. - volatile int queues_not_done = 0; - - // This class is used so that whether the posted message is processed, or the - // message queue is simply cleared, queues_not_done gets decremented. - class ScopedIncrement : public MessageData { - public: - ScopedIncrement(volatile int* value) : value_(value) { - AtomicOps::Increment(value_); - } - ~ScopedIncrement() override { AtomicOps::Decrement(value_); } - - private: - volatile int* value_; - }; + std::atomic queues_not_done(0); { MarkProcessingCritScope cs(&crit_, &processing_); @@ -232,8 +198,13 @@ void ThreadManager::ProcessAllMessageQueuesInternal() { // or ignored. continue; } - queue->PostDelayed(RTC_FROM_HERE, 0, nullptr, MQID_DISPOSE, - new ScopedIncrement(&queues_not_done)); + queues_not_done.fetch_add(1); + // Whether the task is processed, or the thread is simply cleared, + // queues_not_done gets decremented. + absl::Cleanup sub = [&queues_not_done] { queues_not_done.fetch_sub(1); }; + // Post delayed task instead of regular task to wait for all delayed tasks + // that are ready for processing. + queue->PostDelayedTask([sub = std::move(sub)] {}, TimeDelta::Zero()); } } @@ -241,7 +212,7 @@ void ThreadManager::ProcessAllMessageQueuesInternal() { // Note: One of the message queues may have been on this thread, which is // why we can't synchronously wait for queues_not_done to go to 0; we need // to process messages as well. - while (AtomicOps::AcquireLoad(&queues_not_done) > 0) { + while (queues_not_done.load() > 0) { if (current) { current->ProcessMessages(0); } @@ -253,19 +224,11 @@ Thread* Thread::Current() { ThreadManager* manager = ThreadManager::Instance(); Thread* thread = manager->CurrentThread(); -#ifndef NO_MAIN_THREAD_WRAPPING - // Only autowrap the thread which instantiated the ThreadManager. - if (!thread && manager->IsMainThread()) { - thread = new Thread(CreateDefaultSocketServer()); - thread->WrapCurrentWithThreadManager(manager, true); - } -#endif - return thread; } #if defined(WEBRTC_POSIX) -ThreadManager::ThreadManager() : main_thread_ref_(CurrentThreadRef()) { +ThreadManager::ThreadManager() { #if defined(WEBRTC_MAC) InitCocoaMultiThreading(); #endif @@ -282,8 +245,7 @@ void ThreadManager::SetCurrentThreadInternal(Thread* thread) { #endif #if defined(WEBRTC_WIN) -ThreadManager::ThreadManager() - : key_(TlsAlloc()), main_thread_ref_(CurrentThreadRef()) {} +ThreadManager::ThreadManager() : key_(TlsAlloc()) {} Thread* ThreadManager::CurrentThread() { return static_cast(TlsGetValue(key_)); @@ -339,10 +301,6 @@ void ThreadManager::UnwrapCurrentThread() { } } -bool ThreadManager::IsMainThread() { - return IsThreadRefEqual(CurrentThreadRef(), main_thread_ref_); -} - Thread::ScopedDisallowBlockingCalls::ScopedDisallowBlockingCalls() : thread_(Thread::Current()), previous_state_(thread_->SetAllowBlockingCalls(false)) {} @@ -387,8 +345,7 @@ Thread::Thread(std::unique_ptr ss) : Thread(std::move(ss), /*do_init=*/true) {} Thread::Thread(SocketServer* ss, bool do_init) - : fPeekKeep_(false), - delayed_next_num_(0), + : delayed_next_num_(0), fInitialized_(false), fDestroyed_(false), stop_(0), @@ -433,7 +390,9 @@ void Thread::DoDestroy() { ss_->SetMessageQueue(nullptr); } ThreadManager::Remove(this); - ClearInternal(nullptr, MQID_ANY, nullptr); + // Clear. + messages_ = {}; + delayed_messages_ = {}; } SocketServer* Thread::socketserver() { @@ -445,40 +404,19 @@ void Thread::WakeUpSocketServer() { } void Thread::Quit() { - AtomicOps::ReleaseStore(&stop_, 1); + stop_.store(1, std::memory_order_release); WakeUpSocketServer(); } bool Thread::IsQuitting() { - return AtomicOps::AcquireLoad(&stop_) != 0; + return stop_.load(std::memory_order_acquire) != 0; } void Thread::Restart() { - AtomicOps::ReleaseStore(&stop_, 0); -} - -bool Thread::Peek(Message* pmsg, int cmsWait) { - if (fPeekKeep_) { - *pmsg = msgPeek_; - return true; - } - if (!Get(pmsg, cmsWait)) - return false; - msgPeek_ = *pmsg; - fPeekKeep_ = true; - return true; + stop_.store(0, std::memory_order_release); } -bool Thread::Get(Message* pmsg, int cmsWait, bool process_io) { - // Return and clear peek if present - // Always return the peek if it exists so there is Peek/Get symmetry - - if (fPeekKeep_) { - *pmsg = msgPeek_; - fPeekKeep_ = false; - return true; - } - +absl::AnyInvocable Thread::Get(int cmsWait) { // Get w/wait + timer scan / dispatch + socket / event multiplexer dispatch int64_t cmsTotal = cmsWait; @@ -488,44 +426,27 @@ bool Thread::Get(Message* pmsg, int cmsWait, bool process_io) { while (true) { // Check for posted events int64_t cmsDelayNext = kForever; - bool first_pass = true; - while (true) { + { // All queue operations need to be locked, but nothing else in this loop - // (specifically handling disposed message) can happen inside the crit. - // Otherwise, disposed MessageHandlers will cause deadlocks. - { - CritScope cs(&crit_); - // On the first pass, check for delayed messages that have been - // triggered and calculate the next trigger time. - if (first_pass) { - first_pass = false; - while (!delayed_messages_.empty()) { - if (msCurrent < delayed_messages_.top().run_time_ms_) { - cmsDelayNext = - TimeDiff(delayed_messages_.top().run_time_ms_, msCurrent); - break; - } - messages_.push_back(delayed_messages_.top().msg_); - delayed_messages_.pop(); - } - } - // Pull a message off the message queue, if available. - if (messages_.empty()) { + // can happen while holding the `mutex_`. + MutexLock lock(&mutex_); + // Check for delayed messages that have been triggered and calculate the + // next trigger time. + while (!delayed_messages_.empty()) { + if (msCurrent < delayed_messages_.top().run_time_ms) { + cmsDelayNext = + TimeDiff(delayed_messages_.top().run_time_ms, msCurrent); break; - } else { - *pmsg = messages_.front(); - messages_.pop_front(); } - } // crit_ is released here. - - // If this was a dispose message, delete it and skip it. - if (MQID_DISPOSE == pmsg->message_id) { - RTC_DCHECK(nullptr == pmsg->phandler); - delete pmsg->pdata; - *pmsg = Message(); - continue; + messages_.push(std::move(delayed_messages_.top().functor)); + delayed_messages_.pop(); + } + // Pull a message off the message queue, if available. + if (!messages_.empty()) { + absl::AnyInvocable task = std::move(messages_.front()); + messages_.pop(); + return task; } - return true; } if (IsQuitting()) @@ -544,8 +465,10 @@ bool Thread::Get(Message* pmsg, int cmsWait, bool process_io) { { // Wait and multiplex in the meantime - if (!ss_->Wait(static_cast(cmsNext), process_io)) - return false; + if (!ss_->Wait(cmsNext == kForever ? SocketServer::kForever + : webrtc::TimeDelta::Millis(cmsNext), + /*process_io=*/true)) + return nullptr; } // If the specified timeout expired, return @@ -554,20 +477,14 @@ bool Thread::Get(Message* pmsg, int cmsWait, bool process_io) { cmsElapsed = TimeDiff(msCurrent, msStart); if (cmsWait != kForever) { if (cmsElapsed >= cmsWait) - return false; + return nullptr; } } - return false; + return nullptr; } -void Thread::Post(const Location& posted_from, - MessageHandler* phandler, - uint32_t id, - MessageData* pdata, - bool time_sensitive) { - RTC_DCHECK(!time_sensitive); +void Thread::PostTask(absl::AnyInvocable task) { if (IsQuitting()) { - delete pdata; return; } @@ -576,43 +493,15 @@ void Thread::Post(const Location& posted_from, // Signal for the multiplexer to return { - CritScope cs(&crit_); - Message msg; - msg.posted_from = posted_from; - msg.phandler = phandler; - msg.message_id = id; - msg.pdata = pdata; - messages_.push_back(msg); + MutexLock lock(&mutex_); + messages_.push(std::move(task)); } WakeUpSocketServer(); } -void Thread::PostDelayed(const Location& posted_from, - int delay_ms, - MessageHandler* phandler, - uint32_t id, - MessageData* pdata) { - return DoDelayPost(posted_from, delay_ms, TimeAfter(delay_ms), phandler, id, - pdata); -} - -void Thread::PostAt(const Location& posted_from, - int64_t run_at_ms, - MessageHandler* phandler, - uint32_t id, - MessageData* pdata) { - return DoDelayPost(posted_from, TimeUntil(run_at_ms), run_at_ms, phandler, id, - pdata); -} - -void Thread::DoDelayPost(const Location& posted_from, - int64_t delay_ms, - int64_t run_at_ms, - MessageHandler* phandler, - uint32_t id, - MessageData* pdata) { +void Thread::PostDelayedHighPrecisionTask(absl::AnyInvocable task, + webrtc::TimeDelta delay) { if (IsQuitting()) { - delete pdata; return; } @@ -620,15 +509,14 @@ void Thread::DoDelayPost(const Location& posted_from, // Add to the priority queue. Gets sorted soonest first. // Signal for the multiplexer to return. + int64_t delay_ms = delay.RoundUpTo(webrtc::TimeDelta::Millis(1)).ms(); + int64_t run_time_ms = TimeAfter(delay_ms); { - CritScope cs(&crit_); - Message msg; - msg.posted_from = posted_from; - msg.phandler = phandler; - msg.message_id = id; - msg.pdata = pdata; - DelayedMessage delayed(delay_ms, run_at_ms, delayed_next_num_, msg); - delayed_messages_.push(delayed); + MutexLock lock(&mutex_); + delayed_messages_.push({.delay_ms = delay_ms, + .run_time_ms = run_time_ms, + .message_number = delayed_next_num_, + .functor = std::move(task)}); // If this message queue processes 1 message every millisecond for 50 days, // we will wrap this number. Even then, only messages with identical times // will be misordered, and then only briefly. This is probably ok. @@ -639,13 +527,13 @@ void Thread::DoDelayPost(const Location& posted_from, } int Thread::GetDelay() { - CritScope cs(&crit_); + MutexLock lock(&mutex_); if (!messages_.empty()) return 0; if (!delayed_messages_.empty()) { - int delay = TimeUntil(delayed_messages_.top().run_time_ms_); + int delay = TimeUntil(delayed_messages_.top().run_time_ms); if (delay < 0) delay = 0; return delay; @@ -654,67 +542,16 @@ int Thread::GetDelay() { return kForever; } -void Thread::ClearInternal(MessageHandler* phandler, - uint32_t id, - MessageList* removed) { - // Remove messages with phandler - - if (fPeekKeep_ && msgPeek_.Match(phandler, id)) { - if (removed) { - removed->push_back(msgPeek_); - } else { - delete msgPeek_.pdata; - } - fPeekKeep_ = false; - } - - // Remove from ordered message queue - - for (auto it = messages_.begin(); it != messages_.end();) { - if (it->Match(phandler, id)) { - if (removed) { - removed->push_back(*it); - } else { - delete it->pdata; - } - it = messages_.erase(it); - } else { - ++it; - } - } - - // Remove from priority queue. Not directly iterable, so use this approach - - auto new_end = delayed_messages_.container().begin(); - for (auto it = new_end; it != delayed_messages_.container().end(); ++it) { - if (it->msg_.Match(phandler, id)) { - if (removed) { - removed->push_back(it->msg_); - } else { - delete it->msg_.pdata; - } - } else { - *new_end++ = *it; - } - } - delayed_messages_.container().erase(new_end, - delayed_messages_.container().end()); - delayed_messages_.reheap(); -} - -void Thread::Dispatch(Message* pmsg) { - TRACE_EVENT2("webrtc", "Thread::Dispatch", "src_file", - pmsg->posted_from.file_name(), "src_func", - pmsg->posted_from.function_name()); +void Thread::Dispatch(absl::AnyInvocable task) { + TRACE_EVENT0("webrtc", "Thread::Dispatch"); RTC_DCHECK_RUN_ON(this); int64_t start_time = TimeMillis(); - pmsg->phandler->OnMessage(pmsg); + std::move(task)(); int64_t end_time = TimeMillis(); int64_t diff = TimeDiff(end_time, start_time); if (diff >= dispatch_warning_ms_) { RTC_LOG(LS_INFO) << "Message to " << name() << " took " << diff - << "ms to dispatch. Posted from: " - << pmsg->posted_from.ToString(); + << "ms to dispatch."; // To avoid log spew, move the warning limit to only give warning // for delays that are larger than the one observed. dispatch_warning_ms_ = diff + 1; @@ -755,10 +592,10 @@ bool Thread::SleepMs(int milliseconds) { #endif } -bool Thread::SetName(const std::string& name, const void* obj) { +bool Thread::SetName(absl::string_view name, const void* obj) { RTC_DCHECK(!IsRunning()); - name_ = name; + name_ = std::string(name); if (obj) { // The %p specifier typically produce at most 16 hex digits, possibly with a // 0x prefix. But format is implementation defined, so add some margin. @@ -771,8 +608,7 @@ bool Thread::SetName(const std::string& name, const void* obj) { void Thread::SetDispatchWarningMs(int deadline) { if (!IsCurrent()) { - PostTask(webrtc::ToQueuedTask( - [this, deadline]() { SetDispatchWarningMs(deadline); })); + PostTask([this, deadline]() { SetDispatchWarningMs(deadline); }); return; } RTC_DCHECK_RUN_ON(this); @@ -911,29 +747,20 @@ void Thread::Stop() { Join(); } -void Thread::Send(const Location& posted_from, - MessageHandler* phandler, - uint32_t id, - MessageData* pdata) { +void Thread::BlockingCall(rtc::FunctionView functor) { + TRACE_EVENT0("webrtc", "Thread::BlockingCall"); + RTC_DCHECK(!IsQuitting()); if (IsQuitting()) return; - // Sent messages are sent to the MessageHandler directly, in the context - // of "thread", like Win32 SendMessage. If in the right context, - // call the handler directly. - Message msg; - msg.posted_from = posted_from; - msg.phandler = phandler; - msg.message_id = id; - msg.pdata = pdata; if (IsCurrent()) { #if RTC_DCHECK_IS_ON RTC_DCHECK(this->IsInvokeToThreadAllowed(this)); RTC_DCHECK_RUN_ON(this); could_be_blocking_call_count_++; #endif - msg.phandler->OnMessage(&msg); + functor(); return; } @@ -952,34 +779,35 @@ void Thread::Send(const Location& posted_from, #endif // Perhaps down the line we can get rid of this workaround and always require - // current_thread to be valid when Send() is called. + // current_thread to be valid when BlockingCall() is called. std::unique_ptr done_event; if (!current_thread) done_event.reset(new rtc::Event()); bool ready = false; - PostTask(webrtc::ToQueuedTask( - [&msg]() mutable { msg.phandler->OnMessage(&msg); }, - [this, &ready, current_thread, done = done_event.get()] { - if (current_thread) { - CritScope cs(&crit_); - ready = true; - current_thread->socketserver()->WakeUp(); - } else { - done->Set(); - } - })); - + absl::Cleanup cleanup = [this, &ready, current_thread, + done = done_event.get()] { + if (current_thread) { + { + MutexLock lock(&mutex_); + ready = true; + } + current_thread->socketserver()->WakeUp(); + } else { + done->Set(); + } + }; + PostTask([functor, cleanup = std::move(cleanup)] { functor(); }); if (current_thread) { bool waited = false; - crit_.Enter(); + mutex_.Lock(); while (!ready) { - crit_.Leave(); - current_thread->socketserver()->Wait(kForever, false); + mutex_.Unlock(); + current_thread->socketserver()->Wait(SocketServer::kForever, false); waited = true; - crit_.Enter(); + mutex_.Lock(); } - crit_.Leave(); + mutex_.Unlock(); // Our Wait loop above may have consumed some WakeUp events for this // Thread, that weren't relevant to this Send. Losing these WakeUps can @@ -1000,24 +828,6 @@ void Thread::Send(const Location& posted_from, } } -void Thread::InvokeInternal(const Location& posted_from, - rtc::FunctionView functor) { - TRACE_EVENT2("webrtc", "Thread::Invoke", "src_file", posted_from.file_name(), - "src_func", posted_from.function_name()); - - class FunctorMessageHandler : public MessageHandler { - public: - explicit FunctorMessageHandler(rtc::FunctionView functor) - : functor_(functor) {} - void OnMessage(Message* msg) override { functor_(); } - - private: - rtc::FunctionView functor_; - } handler(functor); - - Send(posted_from, &handler); -} - // Called by the ThreadManager when being set as the current thread. void Thread::EnsureIsCurrentTaskQueue() { task_queue_registration_ = @@ -1029,25 +839,10 @@ void Thread::ClearCurrentTaskQueue() { task_queue_registration_.reset(); } -void Thread::QueuedTaskHandler::OnMessage(Message* msg) { - RTC_DCHECK(msg); - auto* data = static_cast*>(msg->pdata); - std::unique_ptr task(data->Release()); - // Thread expects handler to own Message::pdata when OnMessage is called - // Since MessageData is no longer needed, delete it. - delete data; - - // QueuedTask interface uses Run return value to communicate who owns the - // task. false means QueuedTask took the ownership. - if (!task->Run()) - task.release(); -} - void Thread::AllowInvokesToThread(Thread* thread) { #if (!defined(NDEBUG) || RTC_DCHECK_IS_ON) if (!IsCurrent()) { - PostTask(webrtc::ToQueuedTask( - [thread, this]() { AllowInvokesToThread(thread); })); + PostTask([thread, this]() { AllowInvokesToThread(thread); }); return; } RTC_DCHECK_RUN_ON(this); @@ -1059,7 +854,7 @@ void Thread::AllowInvokesToThread(Thread* thread) { void Thread::DisallowAllInvokes() { #if (!defined(NDEBUG) || RTC_DCHECK_IS_ON) if (!IsCurrent()) { - PostTask(webrtc::ToQueuedTask([this]() { DisallowAllInvokes(); })); + PostTask([this]() { DisallowAllInvokes(); }); return; } RTC_DCHECK_RUN_ON(this); @@ -1098,36 +893,19 @@ bool Thread::IsInvokeToThreadAllowed(rtc::Thread* target) { #endif } -void Thread::PostTask(std::unique_ptr task) { - // Though Post takes MessageData by raw pointer (last parameter), it still - // takes it with ownership. - Post(RTC_FROM_HERE, &queued_task_handler_, - /*id=*/0, new ScopedMessageData(std::move(task))); -} - -void Thread::PostDelayedTask(std::unique_ptr task, - uint32_t milliseconds) { - // Though PostDelayed takes MessageData by raw pointer (last parameter), - // it still takes it with ownership. - PostDelayed(RTC_FROM_HERE, milliseconds, &queued_task_handler_, - /*id=*/0, - new ScopedMessageData(std::move(task))); -} - void Thread::Delete() { Stop(); delete this; } -bool Thread::IsProcessingMessagesForTesting() { - return (owned_ || IsCurrent()) && !IsQuitting(); +void Thread::PostDelayedTask(absl::AnyInvocable task, + webrtc::TimeDelta delay) { + // This implementation does not support low precision yet. + PostDelayedHighPrecisionTask(std::move(task), delay); } -void Thread::Clear(MessageHandler* phandler, - uint32_t id, - MessageList* removed) { - CritScope cs(&crit_); - ClearInternal(phandler, id, removed); +bool Thread::IsProcessingMessagesForTesting() { + return (owned_ || IsCurrent()) && !IsQuitting(); } bool Thread::ProcessMessages(int cmsLoop) { @@ -1143,10 +921,10 @@ bool Thread::ProcessMessages(int cmsLoop) { #if defined(WEBRTC_MAC) ScopedAutoReleasePool pool; #endif - Message msg; - if (!Get(&msg, cmsNext)) + absl::AnyInvocable task = Get(cmsNext); + if (!task) return !IsQuitting(); - Dispatch(&msg); + Dispatch(std::move(task)); if (cmsLoop != kForever) { cmsNext = static_cast(TimeUntil(msEnd)); @@ -1187,13 +965,6 @@ bool Thread::IsRunning() { #endif } -// static -MessageHandler* Thread::GetPostTaskMessageHandler() { - // Allocate at first call, never deallocate. - static MessageHandler* handler = new MessageHandlerWithTask; - return handler; -} - AutoThread::AutoThread() : Thread(CreateDefaultSocketServer(), /*do_init=*/false) { if (!ThreadManager::Instance()->CurrentThread()) { @@ -1228,11 +999,6 @@ AutoSocketServerThread::AutoSocketServerThread(SocketServer* ss) AutoSocketServerThread::~AutoSocketServerThread() { RTC_DCHECK(ThreadManager::Instance()->CurrentThread() == this); - // Some tests post destroy messages to this thread. To avoid memory - // leaks, we have to process those messages. In particular - // P2PTransportChannelPingTest, relying on the message posted in - // cricket::Connection::Destroy. - ProcessMessages(0); // Stop and destroy the thread before clearing it as the current thread. // Sometimes there are messages left in the Thread that will be // destroyed by DoDestroy, and sometimes the destructors of the message and/or diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/thread.h b/TMessagesProj/jni/voip/webrtc/rtc_base/thread.h index 38e9732fbb..c571e366d6 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/thread.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/thread.h @@ -20,32 +20,34 @@ #include #include #include +#include #include +#include "absl/strings/string_view.h" + #if defined(WEBRTC_POSIX) #include #endif +#include "absl/base/attributes.h" +#include "absl/functional/any_invocable.h" #include "api/function_view.h" -#include "api/task_queue/queued_task.h" #include "api/task_queue/task_queue_base.h" +#include "api/units/time_delta.h" #include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/deprecated/recursive_critical_section.h" -#include "rtc_base/location.h" -#include "rtc_base/message_handler.h" #include "rtc_base/platform_thread_types.h" #include "rtc_base/socket_server.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/system/rtc_export.h" #include "rtc_base/thread_annotations.h" -#include "rtc_base/thread_message.h" #if defined(WEBRTC_WIN) #include "rtc_base/win32.h" #endif #if RTC_DCHECK_IS_ON -// Counts how many blocking Thread::Invoke or Thread::Send calls are made from -// within a scope and logs the number of blocking calls at the end of the scope. +// Counts how many `Thread::BlockingCall` are made from within a scope and logs +// the number of blocking calls at the end of the scope. #define RTC_LOG_THREAD_BLOCK_COUNT() \ rtc::Thread::ScopedCountBlockingCalls blocked_call_count_printer( \ [func = __func__](uint32_t actual_block, uint32_t could_block) { \ @@ -76,31 +78,6 @@ namespace rtc { class Thread; -namespace rtc_thread_internal { - -class MessageLikeTask : public MessageData { - public: - virtual void Run() = 0; -}; - -template -class MessageWithFunctor final : public MessageLikeTask { - public: - explicit MessageWithFunctor(FunctorT&& functor) - : functor_(std::forward(functor)) {} - - void Run() override { functor_(); } - - private: - ~MessageWithFunctor() override {} - - typename std::remove_reference::type functor_; - - RTC_DISALLOW_COPY_AND_ASSIGN(MessageWithFunctor); -}; - -} // namespace rtc_thread_internal - class RTC_EXPORT ThreadManager { public: static const int kForever = -1; @@ -110,7 +87,6 @@ class RTC_EXPORT ThreadManager { static void Add(Thread* message_queue); static void Remove(Thread* message_queue); - static void Clear(MessageHandler* handler); // For testing purposes, for use with a simulated clock. // Ensures that all message queues have processed delayed messages @@ -139,8 +115,6 @@ class RTC_EXPORT ThreadManager { Thread* WrapCurrentThread(); void UnwrapCurrentThread(); - bool IsMainThread(); - #if RTC_DCHECK_IS_ON // Registers that a Send operation is to be performed between `source` and // `target`, while checking that this does not cause a send cycle that could @@ -152,10 +126,12 @@ class RTC_EXPORT ThreadManager { ThreadManager(); ~ThreadManager(); + ThreadManager(const ThreadManager&) = delete; + ThreadManager& operator=(const ThreadManager&) = delete; + void SetCurrentThreadInternal(Thread* thread); void AddInternal(Thread* message_queue); void RemoveInternal(Thread* message_queue); - void ClearInternal(MessageHandler* handler); void ProcessAllMessageQueuesInternal(); #if RTC_DCHECK_IS_ON void RemoveFromSendGraph(Thread* thread) RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); @@ -183,11 +159,6 @@ class RTC_EXPORT ThreadManager { #if defined(WEBRTC_WIN) const DWORD key_; #endif - - // The thread to potentially autowrap. - const PlatformThreadRef main_thread_ref_; - - RTC_DISALLOW_COPY_AND_ASSIGN(ThreadManager); }; // WARNING! SUBCLASSES MUST CALL Stop() IN THEIR DESTRUCTORS! See ~Thread(). @@ -221,12 +192,15 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase { // calling Clear on the object from a different thread. ~Thread() override; + Thread(const Thread&) = delete; + Thread& operator=(const Thread&) = delete; + static std::unique_ptr CreateWithSocketServer(); static std::unique_ptr Create(); static Thread* Current(); - // Used to catch performance regressions. Use this to disallow blocking calls - // (Invoke) for a given scope. If a synchronous call is made while this is in + // Used to catch performance regressions. Use this to disallow BlockingCall + // for a given scope. If a synchronous call is made while this is in // effect, an assert will be triggered. // Note that this is a single threaded class. class ScopedDisallowBlockingCalls { @@ -291,50 +265,13 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase { // Processed. Normally, this would be true until IsQuitting() is true. virtual bool IsProcessingMessagesForTesting(); - // Get() will process I/O until: - // 1) A message is available (returns true) - // 2) cmsWait seconds have elapsed (returns false) - // 3) Stop() is called (returns false) - virtual bool Get(Message* pmsg, - int cmsWait = kForever, - bool process_io = true); - virtual bool Peek(Message* pmsg, int cmsWait = 0); - // `time_sensitive` is deprecated and should always be false. - virtual void Post(const Location& posted_from, - MessageHandler* phandler, - uint32_t id = 0, - MessageData* pdata = nullptr, - bool time_sensitive = false); - virtual void PostDelayed(const Location& posted_from, - int delay_ms, - MessageHandler* phandler, - uint32_t id = 0, - MessageData* pdata = nullptr); - virtual void PostAt(const Location& posted_from, - int64_t run_at_ms, - MessageHandler* phandler, - uint32_t id = 0, - MessageData* pdata = nullptr); - virtual void Clear(MessageHandler* phandler, - uint32_t id = MQID_ANY, - MessageList* removed = nullptr); - virtual void Dispatch(Message* pmsg); - // Amount of time until the next message can be retrieved virtual int GetDelay(); bool empty() const { return size() == 0u; } size_t size() const { - CritScope cs(&crit_); - return messages_.size() + delayed_messages_.size() + (fPeekKeep_ ? 1u : 0u); - } - - // Internally posts a message which causes the doomed object to be deleted - template - void Dispose(T* doomed) { - if (doomed) { - Post(RTC_FROM_HERE, nullptr, MQID_DISPOSE, new DisposeData(doomed)); - } + webrtc::MutexLock lock(&mutex_); + return messages_.size() + delayed_messages_.size(); } bool IsCurrent() const; @@ -347,10 +284,10 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase { // Sets the thread's name, for debugging. Must be called before Start(). // If `obj` is non-null, its value is appended to `name`. const std::string& name() const { return name_; } - bool SetName(const std::string& name, const void* obj); + bool SetName(absl::string_view name, const void* obj); // Sets the expected processing time in ms. The thread will write - // log messages when Invoke() takes more time than this. + // log messages when Dispatch() takes more time than this. // Default is 50 ms. void SetDispatchWarningMs(int deadline); @@ -368,41 +305,28 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase { // ProcessMessages occasionally. virtual void Run(); - virtual void Send(const Location& posted_from, - MessageHandler* phandler, - uint32_t id = 0, - MessageData* pdata = nullptr); - - // Convenience method to invoke a functor on another thread. Caller must - // provide the `ReturnT` template argument, which cannot (easily) be deduced. - // Uses Send() internally, which blocks the current thread until execution - // is complete. - // Ex: bool result = thread.Invoke(RTC_FROM_HERE, - // &MyFunctionReturningBool); + // Convenience method to invoke a functor on another thread. + // Blocks the current thread until execution is complete. + // Ex: thread.BlockingCall([&] { result = MyFunctionReturningBool(); }); // NOTE: This function can only be called when synchronous calls are allowed. // See ScopedDisallowBlockingCalls for details. - // NOTE: Blocking invokes are DISCOURAGED, consider if what you're doing can + // NOTE: Blocking calls are DISCOURAGED, consider if what you're doing can // be achieved with PostTask() and callbacks instead. - template < - class ReturnT, - typename = typename std::enable_if::value>::type> - ReturnT Invoke(const Location& posted_from, FunctionView functor) { + virtual void BlockingCall(FunctionView functor); + + template , + typename = typename std::enable_if_t>> + ReturnT BlockingCall(Functor&& functor) { ReturnT result; - InvokeInternal(posted_from, [functor, &result] { result = functor(); }); + BlockingCall([&] { result = std::forward(functor)(); }); return result; } - template < - class ReturnT, - typename = typename std::enable_if::value>::type> - void Invoke(const Location& posted_from, FunctionView functor) { - InvokeInternal(posted_from, functor); - } - - // Allows invoke to specified `thread`. Thread never will be dereferenced and - // will be used only for reference-based comparison, so instance can be safely - // deleted. If NDEBUG is defined and RTC_DCHECK_IS_ON is undefined do - // nothing. + // Allows BlockingCall to specified `thread`. Thread never will be + // dereferenced and will be used only for reference-based comparison, so + // instance can be safely deleted. If NDEBUG is defined and RTC_DCHECK_IS_ON + // is undefined do nothing. void AllowInvokesToThread(Thread* thread); // If NDEBUG is defined and RTC_DCHECK_IS_ON is undefined do nothing. @@ -414,61 +338,13 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase { // true. bool IsInvokeToThreadAllowed(rtc::Thread* target); - // Posts a task to invoke the functor on `this` thread asynchronously, i.e. - // without blocking the thread that invoked PostTask(). Ownership of `functor` - // is passed and (usually, see below) destroyed on `this` thread after it is - // invoked. - // Requirements of FunctorT: - // - FunctorT is movable. - // - FunctorT implements "T operator()()" or "T operator()() const" for some T - // (if T is not void, the return value is discarded on `this` thread). - // - FunctorT has a public destructor that can be invoked from `this` thread - // after operation() has been invoked. - // - The functor must not cause the thread to quit before PostTask() is done. - // - // Destruction of the functor/task mimics what TaskQueue::PostTask does: If - // the task is run, it will be destroyed on `this` thread. However, if there - // are pending tasks by the time the Thread is destroyed, or a task is posted - // to a thread that is quitting, the task is destroyed immediately, on the - // calling thread. Destroying the Thread only blocks for any currently running - // task to complete. Note that TQ abstraction is even vaguer on how - // destruction happens in these cases, allowing destruction to happen - // asynchronously at a later time and on some arbitrary thread. So to ease - // migration, don't depend on Thread::PostTask destroying un-run tasks - // immediately. - // - // Example - Calling a class method: - // class Foo { - // public: - // void DoTheThing(); - // }; - // Foo foo; - // thread->PostTask(RTC_FROM_HERE, Bind(&Foo::DoTheThing, &foo)); - // - // Example - Calling a lambda function: - // thread->PostTask(RTC_FROM_HERE, - // [&x, &y] { x.TrackComputations(y.Compute()); }); - template - void PostTask(const Location& posted_from, FunctorT&& functor) { - Post(posted_from, GetPostTaskMessageHandler(), /*id=*/0, - new rtc_thread_internal::MessageWithFunctor( - std::forward(functor))); - } - template - void PostDelayedTask(const Location& posted_from, - FunctorT&& functor, - uint32_t milliseconds) { - PostDelayed(posted_from, milliseconds, GetPostTaskMessageHandler(), - /*id=*/0, - new rtc_thread_internal::MessageWithFunctor( - std::forward(functor))); - } - // From TaskQueueBase - void PostTask(std::unique_ptr task) override; - void PostDelayedTask(std::unique_ptr task, - uint32_t milliseconds) override; void Delete() override; + void PostTask(absl::AnyInvocable task) override; + void PostDelayedTask(absl::AnyInvocable task, + webrtc::TimeDelta delay) override; + void PostDelayedHighPrecisionTask(absl::AnyInvocable task, + webrtc::TimeDelta delay) override; // ProcessMessages will process I/O and dispatch messages until: // 1) cms milliseconds have elapsed (returns true) @@ -522,57 +398,31 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase { // DelayedMessage goes into a priority queue, sorted by trigger time. Messages // with the same trigger time are processed in num_ (FIFO) order. - class DelayedMessage { - public: - DelayedMessage(int64_t delay, - int64_t run_time_ms, - uint32_t num, - const Message& msg) - : delay_ms_(delay), - run_time_ms_(run_time_ms), - message_number_(num), - msg_(msg) {} - + struct DelayedMessage { bool operator<(const DelayedMessage& dmsg) const { - return (dmsg.run_time_ms_ < run_time_ms_) || - ((dmsg.run_time_ms_ == run_time_ms_) && - (dmsg.message_number_ < message_number_)); + return (dmsg.run_time_ms < run_time_ms) || + ((dmsg.run_time_ms == run_time_ms) && + (dmsg.message_number < message_number)); } - int64_t delay_ms_; // for debugging - int64_t run_time_ms_; + int64_t delay_ms; // for debugging + int64_t run_time_ms; // Monotonicaly incrementing number used for ordering of messages // targeted to execute at the same time. - uint32_t message_number_; - Message msg_; - }; - - class PriorityQueue : public std::priority_queue { - public: - container_type& container() { return c; } - void reheap() { make_heap(c.begin(), c.end(), comp); } + uint32_t message_number; + // std::priority_queue doesn't allow to extract elements, but functor + // is move-only and thus need to be changed when pulled out of the + // priority queue. That is ok because `functor` doesn't affect operator< + mutable absl::AnyInvocable functor; }; - void DoDelayPost(const Location& posted_from, - int64_t cmsDelay, - int64_t tstamp, - MessageHandler* phandler, - uint32_t id, - MessageData* pdata); - // Perform initialization, subclasses must call this from their constructor // if false was passed as init_queue to the Thread constructor. void DoInit(); - // Does not take any lock. Must be called either while holding crit_, or by - // the destructor (by definition, the latter has exclusive access). - void ClearInternal(MessageHandler* phandler, - uint32_t id, - MessageList* removed) RTC_EXCLUSIVE_LOCKS_REQUIRED(&crit_); - // Perform cleanup; subclasses must call this from the destructor, // and are not expected to actually hold the lock. - void DoDestroy() RTC_EXCLUSIVE_LOCKS_REQUIRED(&crit_); + void DoDestroy() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); void WakeUpSocketServer(); @@ -588,16 +438,15 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase { friend class ScopedDisallowBlockingCalls; - RecursiveCriticalSection* CritForTest() { return &crit_; } - private: static const int kSlowDispatchLoggingThreshold = 50; // 50 ms - class QueuedTaskHandler final : public MessageHandler { - public: - QueuedTaskHandler() {} - void OnMessage(Message* msg) override; - }; + // Get() will process I/O until: + // 1) A task is available (returns it) + // 2) cmsWait seconds have elapsed (returns empty task) + // 3) Stop() is called (returns empty task) + absl::AnyInvocable Get(int cmsWait); + void Dispatch(absl::AnyInvocable task); // Sets the per-thread allow-blocking-calls flag and returns the previous // value. Must be called on this thread. @@ -620,35 +469,26 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase { // Return true if the thread is currently running. bool IsRunning(); - void InvokeInternal(const Location& posted_from, - rtc::FunctionView functor); - // Called by the ThreadManager when being set as the current thread. void EnsureIsCurrentTaskQueue(); // Called by the ThreadManager when being unset as the current thread. void ClearCurrentTaskQueue(); - // Returns a static-lifetime MessageHandler which runs message with - // MessageLikeTask payload data. - static MessageHandler* GetPostTaskMessageHandler(); - - bool fPeekKeep_; - Message msgPeek_; - MessageList messages_ RTC_GUARDED_BY(crit_); - PriorityQueue delayed_messages_ RTC_GUARDED_BY(crit_); - uint32_t delayed_next_num_ RTC_GUARDED_BY(crit_); + std::queue> messages_ RTC_GUARDED_BY(mutex_); + std::priority_queue delayed_messages_ RTC_GUARDED_BY(mutex_); + uint32_t delayed_next_num_ RTC_GUARDED_BY(mutex_); #if RTC_DCHECK_IS_ON uint32_t blocking_call_count_ RTC_GUARDED_BY(this) = 0; uint32_t could_be_blocking_call_count_ RTC_GUARDED_BY(this) = 0; std::vector allowed_threads_ RTC_GUARDED_BY(this); bool invoke_policy_enabled_ RTC_GUARDED_BY(this) = false; #endif - RecursiveCriticalSection crit_; + mutable webrtc::Mutex mutex_; bool fInitialized_; bool fDestroyed_; - volatile int stop_; + std::atomic stop_; // The SocketServer might not be owned by Thread. SocketServer* const ss_; @@ -677,16 +517,12 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase { // Only touched from the worker thread itself. bool blocking_calls_allowed_ = true; - // Runs webrtc::QueuedTask posted to the Thread. - QueuedTaskHandler queued_task_handler_; std::unique_ptr task_queue_registration_; friend class ThreadManager; int dispatch_warning_ms_ RTC_GUARDED_BY(this) = kSlowDispatchLoggingThreshold; - - RTC_DISALLOW_COPY_AND_ASSIGN(Thread); }; // AutoThread automatically installs itself at construction @@ -700,8 +536,8 @@ class AutoThread : public Thread { AutoThread(); ~AutoThread() override; - private: - RTC_DISALLOW_COPY_AND_ASSIGN(AutoThread); + AutoThread(const AutoThread&) = delete; + AutoThread& operator=(const AutoThread&) = delete; }; // AutoSocketServerThread automatically installs itself at @@ -714,10 +550,11 @@ class AutoSocketServerThread : public Thread { explicit AutoSocketServerThread(SocketServer* ss); ~AutoSocketServerThread() override; + AutoSocketServerThread(const AutoSocketServerThread&) = delete; + AutoSocketServerThread& operator=(const AutoSocketServerThread&) = delete; + private: rtc::Thread* old_thread_; - - RTC_DISALLOW_COPY_AND_ASSIGN(AutoSocketServerThread); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/thread_message.h b/TMessagesProj/jni/voip/webrtc/rtc_base/thread_message.h deleted file mode 100644 index c610c3b911..0000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/thread_message.h +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#ifndef RTC_BASE_THREAD_MESSAGE_H_ -#define RTC_BASE_THREAD_MESSAGE_H_ - -#include -#include -#include - -#include "api/scoped_refptr.h" -#include "rtc_base/location.h" -#include "rtc_base/message_handler.h" - -namespace rtc { - -// Derive from this for specialized data -// App manages lifetime, except when messages are purged - -class MessageData { - public: - MessageData() {} - virtual ~MessageData() {} -}; - -template -class TypedMessageData : public MessageData { - public: - explicit TypedMessageData(const T& data) : data_(data) {} - const T& data() const { return data_; } - T& data() { return data_; } - - private: - T data_; -}; - -// Like TypedMessageData, but for pointers that require a delete. -template -class ScopedMessageData : public MessageData { - public: - explicit ScopedMessageData(std::unique_ptr data) - : data_(std::move(data)) {} - - const T& data() const { return *data_; } - T& data() { return *data_; } - - T* Release() { return data_.release(); } - - private: - std::unique_ptr data_; -}; - -// Like ScopedMessageData, but for reference counted pointers. -template -class ScopedRefMessageData : public MessageData { - public: - explicit ScopedRefMessageData(T* data) : data_(data) {} - const scoped_refptr& data() const { return data_; } - scoped_refptr& data() { return data_; } - - private: - scoped_refptr data_; -}; - -template -inline MessageData* WrapMessageData(const T& data) { - return new TypedMessageData(data); -} - -template -inline const T& UseMessageData(MessageData* data) { - return static_cast*>(data)->data(); -} - -template -class DisposeData : public MessageData { - public: - explicit DisposeData(T* data) : data_(data) {} - virtual ~DisposeData() { delete data_; } - - private: - T* data_; -}; - -const uint32_t MQID_ANY = static_cast(-1); -const uint32_t MQID_DISPOSE = static_cast(-2); - -// No destructor - -struct Message { - Message() : phandler(nullptr), message_id(0), pdata(nullptr) {} - inline bool Match(MessageHandler* handler, uint32_t id) const { - return (handler == nullptr || handler == phandler) && - (id == MQID_ANY || id == message_id); - } - Location posted_from; - MessageHandler* phandler; - uint32_t message_id; - MessageData* pdata; -}; - -typedef std::list MessageList; -} // namespace rtc -#endif // RTC_BASE_THREAD_MESSAGE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/time/timestamp_extrapolator.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/time/timestamp_extrapolator.cc deleted file mode 100644 index 99445284dc..0000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/time/timestamp_extrapolator.cc +++ /dev/null @@ -1,198 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "rtc_base/time/timestamp_extrapolator.h" - -#include - -namespace webrtc { - -TimestampExtrapolator::TimestampExtrapolator(int64_t start_ms) - : _startMs(0), - _firstTimestamp(0), - _wrapArounds(0), - _prevUnwrappedTimestamp(-1), - _prevWrapTimestamp(-1), - _lambda(1), - _firstAfterReset(true), - _packetCount(0), - _startUpFilterDelayInPackets(2), - _detectorAccumulatorPos(0), - _detectorAccumulatorNeg(0), - _alarmThreshold(60e3), - _accDrift(6600), // in timestamp ticks, i.e. 15 ms - _accMaxError(7000), - _pP11(1e10) { - Reset(start_ms); -} - -void TimestampExtrapolator::Reset(int64_t start_ms) { - _startMs = start_ms; - _prevMs = _startMs; - _firstTimestamp = 0; - _w[0] = 90.0; - _w[1] = 0; - _pP[0][0] = 1; - _pP[1][1] = _pP11; - _pP[0][1] = _pP[1][0] = 0; - _firstAfterReset = true; - _prevUnwrappedTimestamp = -1; - _prevWrapTimestamp = -1; - _wrapArounds = 0; - _packetCount = 0; - _detectorAccumulatorPos = 0; - _detectorAccumulatorNeg = 0; -} - -void TimestampExtrapolator::Update(int64_t tMs, uint32_t ts90khz) { - if (tMs - _prevMs > 10e3) { - // Ten seconds without a complete frame. - // Reset the extrapolator - Reset(tMs); - } else { - _prevMs = tMs; - } - - // Remove offset to prevent badly scaled matrices - tMs -= _startMs; - - CheckForWrapArounds(ts90khz); - - int64_t unwrapped_ts90khz = - static_cast(ts90khz) + - _wrapArounds * ((static_cast(1) << 32) - 1); - - if (_firstAfterReset) { - // Make an initial guess of the offset, - // should be almost correct since tMs - _startMs - // should about zero at this time. - _w[1] = -_w[0] * tMs; - _firstTimestamp = unwrapped_ts90khz; - _firstAfterReset = false; - } - - double residual = (static_cast(unwrapped_ts90khz) - _firstTimestamp) - - static_cast(tMs) * _w[0] - _w[1]; - if (DelayChangeDetection(residual) && - _packetCount >= _startUpFilterDelayInPackets) { - // A sudden change of average network delay has been detected. - // Force the filter to adjust its offset parameter by changing - // the offset uncertainty. Don't do this during startup. - _pP[1][1] = _pP11; - } - - if (_prevUnwrappedTimestamp >= 0 && - unwrapped_ts90khz < _prevUnwrappedTimestamp) { - // Drop reordered frames. - return; - } - - // T = [t(k) 1]'; - // that = T'*w; - // K = P*T/(lambda + T'*P*T); - double K[2]; - K[0] = _pP[0][0] * tMs + _pP[0][1]; - K[1] = _pP[1][0] * tMs + _pP[1][1]; - double TPT = _lambda + tMs * K[0] + K[1]; - K[0] /= TPT; - K[1] /= TPT; - // w = w + K*(ts(k) - that); - _w[0] = _w[0] + K[0] * residual; - _w[1] = _w[1] + K[1] * residual; - // P = 1/lambda*(P - K*T'*P); - double p00 = - 1 / _lambda * (_pP[0][0] - (K[0] * tMs * _pP[0][0] + K[0] * _pP[1][0])); - double p01 = - 1 / _lambda * (_pP[0][1] - (K[0] * tMs * _pP[0][1] + K[0] * _pP[1][1])); - _pP[1][0] = - 1 / _lambda * (_pP[1][0] - (K[1] * tMs * _pP[0][0] + K[1] * _pP[1][0])); - _pP[1][1] = - 1 / _lambda * (_pP[1][1] - (K[1] * tMs * _pP[0][1] + K[1] * _pP[1][1])); - _pP[0][0] = p00; - _pP[0][1] = p01; - _prevUnwrappedTimestamp = unwrapped_ts90khz; - if (_packetCount < _startUpFilterDelayInPackets) { - _packetCount++; - } -} - -int64_t TimestampExtrapolator::ExtrapolateLocalTime(uint32_t timestamp90khz) { - int64_t localTimeMs = 0; - CheckForWrapArounds(timestamp90khz); - double unwrapped_ts90khz = - static_cast(timestamp90khz) + - _wrapArounds * ((static_cast(1) << 32) - 1); - if (_packetCount == 0) { - localTimeMs = -1; - } else if (_packetCount < _startUpFilterDelayInPackets) { - localTimeMs = - _prevMs + - static_cast( - static_cast(unwrapped_ts90khz - _prevUnwrappedTimestamp) / - 90.0 + - 0.5); - } else { - if (_w[0] < 1e-3) { - localTimeMs = _startMs; - } else { - double timestampDiff = - unwrapped_ts90khz - static_cast(_firstTimestamp); - localTimeMs = static_cast(static_cast(_startMs) + - (timestampDiff - _w[1]) / _w[0] + 0.5); - } - } - return localTimeMs; -} - -// Investigates if the timestamp clock has overflowed since the last timestamp -// and keeps track of the number of wrap arounds since reset. -void TimestampExtrapolator::CheckForWrapArounds(uint32_t ts90khz) { - if (_prevWrapTimestamp == -1) { - _prevWrapTimestamp = ts90khz; - return; - } - if (ts90khz < _prevWrapTimestamp) { - // This difference will probably be less than -2^31 if we have had a wrap - // around (e.g. timestamp = 1, _previousTimestamp = 2^32 - 1). Since it is - // casted to a Word32, it should be positive. - if (static_cast(ts90khz - _prevWrapTimestamp) > 0) { - // Forward wrap around - _wrapArounds++; - } - } else { - // This difference will probably be less than -2^31 if we have had a - // backward wrap around. Since it is casted to a Word32, it should be - // positive. - if (static_cast(_prevWrapTimestamp - ts90khz) > 0) { - // Backward wrap around - _wrapArounds--; - } - } - _prevWrapTimestamp = ts90khz; -} - -bool TimestampExtrapolator::DelayChangeDetection(double error) { - // CUSUM detection of sudden delay changes - error = (error > 0) ? std::min(error, _accMaxError) - : std::max(error, -_accMaxError); - _detectorAccumulatorPos = - std::max(_detectorAccumulatorPos + error - _accDrift, double{0}); - _detectorAccumulatorNeg = - std::min(_detectorAccumulatorNeg + error + _accDrift, double{0}); - if (_detectorAccumulatorPos > _alarmThreshold || - _detectorAccumulatorNeg < -_alarmThreshold) { - // Alarm - _detectorAccumulatorPos = _detectorAccumulatorNeg = 0; - return true; - } - return false; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/time/timestamp_extrapolator.h b/TMessagesProj/jni/voip/webrtc/rtc_base/time/timestamp_extrapolator.h deleted file mode 100644 index b325d2cbaa..0000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/time/timestamp_extrapolator.h +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_BASE_TIME_TIMESTAMP_EXTRAPOLATOR_H_ -#define RTC_BASE_TIME_TIMESTAMP_EXTRAPOLATOR_H_ - -#include - -namespace webrtc { - -// Not thread safe. -class TimestampExtrapolator { - public: - explicit TimestampExtrapolator(int64_t start_ms); - void Update(int64_t tMs, uint32_t ts90khz); - int64_t ExtrapolateLocalTime(uint32_t timestamp90khz); - void Reset(int64_t start_ms); - - private: - void CheckForWrapArounds(uint32_t ts90khz); - bool DelayChangeDetection(double error); - double _w[2]; - double _pP[2][2]; - int64_t _startMs; - int64_t _prevMs; - uint32_t _firstTimestamp; - int32_t _wrapArounds; - int64_t _prevUnwrappedTimestamp; - int64_t _prevWrapTimestamp; - const double _lambda; - bool _firstAfterReset; - uint32_t _packetCount; - const uint32_t _startUpFilterDelayInPackets; - - double _detectorAccumulatorPos; - double _detectorAccumulatorNeg; - const double _alarmThreshold; - const double _accDrift; - const double _accMaxError; - const double _pP11; -}; - -} // namespace webrtc - -#endif // RTC_BASE_TIME_TIMESTAMP_EXTRAPOLATOR_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/time_utils.h b/TMessagesProj/jni/voip/webrtc/rtc_base/time_utils.h index de3c58c815..6a3cfda3d1 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/time_utils.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/time_utils.h @@ -31,6 +31,12 @@ static const int64_t kNumNanosecsPerMillisec = static const int64_t kNumNanosecsPerMicrosec = kNumNanosecsPerSec / kNumMicrosecsPerSec; +// Elapsed milliseconds between NTP base, 1900 January 1 00:00 GMT +// (see https://tools.ietf.org/html/rfc868), and January 1 00:00 GMT 1970 +// epoch. This is useful when converting between the NTP time base and the +// time base used in RTCP reports. +constexpr int64_t kNtpJan1970Millisecs = 2'208'988'800 * kNumMillisecsPerSec; + // TODO(honghaiz): Define a type for the time value specifically. class ClockInterface { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/timestamp_aligner.h b/TMessagesProj/jni/voip/webrtc/rtc_base/timestamp_aligner.h index 73af9debf9..138e936af2 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/timestamp_aligner.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/timestamp_aligner.h @@ -13,7 +13,6 @@ #include -#include "rtc_base/constructor_magic.h" #include "rtc_base/system/rtc_export.h" namespace rtc { @@ -35,6 +34,9 @@ class RTC_EXPORT TimestampAligner { TimestampAligner(); ~TimestampAligner(); + TimestampAligner(const TimestampAligner&) = delete; + TimestampAligner& operator=(const TimestampAligner&) = delete; + public: // Translates timestamps of a capture system to the same timescale as is used // by rtc::TimeMicros(). `capturer_time_us` is assumed to be accurate, but @@ -77,7 +79,6 @@ class RTC_EXPORT TimestampAligner { // Offset between `prev_translated_time_us_` and the corresponding capturer // time. int64_t prev_time_offset_us_; - RTC_DISALLOW_COPY_AND_ASSIGN(TimestampAligner); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/unique_id_generator.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/unique_id_generator.cc index 9fa3021c6f..e68c643dbe 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/unique_id_generator.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/unique_id_generator.cc @@ -13,6 +13,7 @@ #include #include +#include "absl/strings/string_view.h" #include "rtc_base/helpers.h" #include "rtc_base/string_encode.h" #include "rtc_base/string_to_number.h" @@ -55,8 +56,11 @@ std::string UniqueStringGenerator::GenerateString() { return ToString(unique_number_generator_.GenerateNumber()); } -bool UniqueStringGenerator::AddKnownId(const std::string& value) { - absl::optional int_value = StringToNumber(value); +bool UniqueStringGenerator::AddKnownId(absl::string_view value) { + // TODO(webrtc:13579): remove string copy here once absl::string_view version + // of StringToNumber is available. + absl::optional int_value = + StringToNumber(std::string(value)); // The underlying generator works for uint32_t values, so if the provided // value is not a uint32_t it will never be generated anyway. if (int_value.has_value()) { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/unique_id_generator.h b/TMessagesProj/jni/voip/webrtc/rtc_base/unique_id_generator.h index 3e2f9d7072..342dad7766 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/unique_id_generator.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/unique_id_generator.h @@ -15,6 +15,7 @@ #include #include +#include "absl/strings/string_view.h" #include "api/array_view.h" #include "api/sequence_checker.h" #include "rtc_base/synchronization/mutex.h" @@ -103,7 +104,7 @@ class UniqueStringGenerator { // Adds an id that this generator should no longer generate. // Return value indicates whether the ID was hitherto unknown. - bool AddKnownId(const std::string& value); + bool AddKnownId(absl::string_view value); private: // This implementation will be simple and will generate "0", "1", ... diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/units/unit_base.h b/TMessagesProj/jni/voip/webrtc/rtc_base/units/unit_base.h index 7196bae346..e0a926fb8d 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/units/unit_base.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/units/unit_base.h @@ -50,22 +50,22 @@ class UnitBase { return value_ == MinusInfinityVal(); } - constexpr bool operator==(const Unit_T& other) const { + constexpr bool operator==(const UnitBase& other) const { return value_ == other.value_; } - constexpr bool operator!=(const Unit_T& other) const { + constexpr bool operator!=(const UnitBase& other) const { return value_ != other.value_; } - constexpr bool operator<=(const Unit_T& other) const { + constexpr bool operator<=(const UnitBase& other) const { return value_ <= other.value_; } - constexpr bool operator>=(const Unit_T& other) const { + constexpr bool operator>=(const UnitBase& other) const { return value_ >= other.value_; } - constexpr bool operator>(const Unit_T& other) const { + constexpr bool operator>(const UnitBase& other) const { return value_ > other.value_; } - constexpr bool operator<(const Unit_T& other) const { + constexpr bool operator<(const UnitBase& other) const { return value_ < other.value_; } constexpr Unit_T RoundTo(const Unit_T& resolution) const { @@ -266,14 +266,18 @@ class RelativeUnit : public UnitBase { return UnitBase::template ToValue() / other.template ToValue(); } - template - constexpr typename std::enable_if::value, Unit_T>::type - operator/(const T& scalar) const { - return UnitBase::FromValue( - std::round(UnitBase::template ToValue() / scalar)); + template >* = nullptr> + constexpr Unit_T operator/(T scalar) const { + return UnitBase::FromValue(std::llround(this->ToValue() / scalar)); + } + template >* = nullptr> + constexpr Unit_T operator/(T scalar) const { + return UnitBase::FromValue(this->ToValue() / scalar); } constexpr Unit_T operator*(double scalar) const { - return UnitBase::FromValue(std::round(this->ToValue() * scalar)); + return UnitBase::FromValue(std::llround(this->ToValue() * scalar)); } constexpr Unit_T operator*(int64_t scalar) const { return UnitBase::FromValue(this->ToValue() * scalar); @@ -281,6 +285,9 @@ class RelativeUnit : public UnitBase { constexpr Unit_T operator*(int32_t scalar) const { return UnitBase::FromValue(this->ToValue() * scalar); } + constexpr Unit_T operator*(size_t scalar) const { + return UnitBase::FromValue(this->ToValue() * scalar); + } protected: using UnitBase::UnitBase; @@ -298,6 +305,19 @@ template inline constexpr Unit_T operator*(int32_t scalar, RelativeUnit other) { return other * scalar; } +template +inline constexpr Unit_T operator*(size_t scalar, RelativeUnit other) { + return other * scalar; +} + +template +inline constexpr Unit_T operator-(RelativeUnit other) { + if (other.IsPlusInfinity()) + return UnitBase::MinusInfinity(); + if (other.IsMinusInfinity()) + return UnitBase::PlusInfinity(); + return -1 * other; +} } // namespace rtc_units_impl diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/virtual_socket_server.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/virtual_socket_server.cc index 5d36e3e1de..efc206b219 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/virtual_socket_server.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/virtual_socket_server.cc @@ -18,7 +18,9 @@ #include #include "absl/algorithm/container.h" +#include "api/units/time_delta.h" #include "rtc_base/checks.h" +#include "rtc_base/event.h" #include "rtc_base/fake_clock.h" #include "rtc_base/logging.h" #include "rtc_base/physical_socket_server.h" @@ -27,6 +29,11 @@ #include "rtc_base/time_utils.h" namespace rtc { + +using ::webrtc::MutexLock; +using ::webrtc::TaskQueueBase; +using ::webrtc::TimeDelta; + #if defined(WEBRTC_WIN) const in_addr kInitialNextIPv4 = {{{0x01, 0, 0, 0}}}; #else @@ -51,16 +58,9 @@ const uint32_t TCP_MSS = 1400; // Maximum segment size // Note: The current algorithm doesn't work for sample sizes smaller than this. const int NUM_SAMPLES = 1000; -enum { - MSG_ID_PACKET, - MSG_ID_CONNECT, - MSG_ID_DISCONNECT, - MSG_ID_SIGNALREADEVENT, -}; - // Packets are passed between sockets as messages. We copy the data just like // the kernel does. -class Packet : public MessageData { +class Packet { public: Packet(const char* data, size_t size, const SocketAddress& from) : size_(size), consumed_(0), from_(from) { @@ -69,7 +69,7 @@ class Packet : public MessageData { memcpy(data_, data, size_); } - ~Packet() override { delete[] data_; } + ~Packet() { delete[] data_; } const char* data() const { return data_ + consumed_; } size_t size() const { return size_ - consumed_; } @@ -87,17 +87,11 @@ class Packet : public MessageData { SocketAddress from_; }; -struct MessageAddress : public MessageData { - explicit MessageAddress(const SocketAddress& a) : addr(a) {} - SocketAddress addr; -}; - VirtualSocket::VirtualSocket(VirtualSocketServer* server, int family, int type) : server_(server), type_(type), state_(CS_CLOSED), error_(0), - listen_queue_(nullptr), network_size_(0), recv_buffer_size_(0), bound_(false), @@ -109,11 +103,6 @@ VirtualSocket::VirtualSocket(VirtualSocketServer* server, int family, int type) VirtualSocket::~VirtualSocket() { Close(); - - for (RecvBuffer::iterator it = recv_buffer_.begin(); it != recv_buffer_.end(); - ++it) { - delete *it; - } } SocketAddress VirtualSocket::GetLocalAddress() const { @@ -149,6 +138,75 @@ int VirtualSocket::Connect(const SocketAddress& addr) { return InitiateConnect(addr, true); } +VirtualSocket::SafetyBlock::SafetyBlock(VirtualSocket* socket) + : socket_(*socket) {} + +VirtualSocket::SafetyBlock::~SafetyBlock() { + // Ensure `SetNotAlive` was called and there is nothing left to cleanup. + RTC_DCHECK(!alive_); + RTC_DCHECK(posted_connects_.empty()); + RTC_DCHECK(recv_buffer_.empty()); + RTC_DCHECK(!listen_queue_.has_value()); +} + +void VirtualSocket::SafetyBlock::SetNotAlive() { + VirtualSocketServer* const server = socket_.server_; + const SocketAddress& local_addr = socket_.local_addr_; + + MutexLock lock(&mutex_); + // Cancel pending sockets + if (listen_queue_.has_value()) { + for (const SocketAddress& remote_addr : *listen_queue_) { + server->Disconnect(remote_addr); + } + listen_queue_ = absl::nullopt; + } + + // Cancel potential connects + for (const SocketAddress& remote_addr : posted_connects_) { + // Lookup remote side. + VirtualSocket* lookup_socket = + server->LookupConnection(local_addr, remote_addr); + if (lookup_socket) { + // Server socket, remote side is a socket retreived by accept. Accepted + // sockets are not bound so we will not find it by looking in the + // bindings table. + server->Disconnect(lookup_socket); + server->RemoveConnection(local_addr, remote_addr); + } else { + server->Disconnect(remote_addr); + } + } + posted_connects_.clear(); + + recv_buffer_.clear(); + + alive_ = false; +} + +void VirtualSocket::SafetyBlock::PostSignalReadEvent() { + if (pending_read_signal_event_) { + // Avoid posting multiple times. + return; + } + + pending_read_signal_event_ = true; + rtc::scoped_refptr safety(this); + socket_.server_->msg_queue_->PostTask( + [safety = std::move(safety)] { safety->MaybeSignalReadEvent(); }); +} + +void VirtualSocket::SafetyBlock::MaybeSignalReadEvent() { + { + MutexLock lock(&mutex_); + pending_read_signal_event_ = false; + if (!alive_ || recv_buffer_.empty()) { + return; + } + } + socket_.SignalReadEvent(&socket_); +} + int VirtualSocket::Close() { if (!local_addr_.IsNil() && bound_) { // Remove from the binding table. @@ -156,30 +214,12 @@ int VirtualSocket::Close() { bound_ = false; } - if (SOCK_STREAM == type_) { - webrtc::MutexLock lock(&mutex_); - - // Cancel pending sockets - if (listen_queue_) { - while (!listen_queue_->empty()) { - SocketAddress addr = listen_queue_->front(); - - // Disconnect listening socket. - server_->Disconnect(addr); - listen_queue_->pop_front(); - } - listen_queue_ = nullptr; - } - // Disconnect stream sockets - if (CS_CONNECTED == state_) { - server_->Disconnect(local_addr_, remote_addr_); - } - // Cancel potential connects - server_->CancelConnects(this); + // Disconnect stream sockets + if (state_ == CS_CONNECTED && type_ == SOCK_STREAM) { + server_->Disconnect(local_addr_, remote_addr_); } - // Clear incoming packets and disconnect messages - server_->Clear(this); + safety_->SetNotAlive(); state_ = CS_CLOSED; local_addr_.Clear(); @@ -226,85 +266,111 @@ int VirtualSocket::RecvFrom(void* pv, *timestamp = -1; } - webrtc::MutexLock lock(&mutex_); + int data_read = safety_->RecvFrom(pv, cb, *paddr); + if (data_read < 0) { + error_ = EAGAIN; + return -1; + } + + if (type_ == SOCK_STREAM) { + bool was_full = (recv_buffer_size_ == server_->recv_buffer_capacity()); + recv_buffer_size_ -= data_read; + if (was_full) { + server_->SendTcp(remote_addr_); + } + } + + return data_read; +} + +int VirtualSocket::SafetyBlock::RecvFrom(void* buffer, + size_t size, + SocketAddress& addr) { + MutexLock lock(&mutex_); // If we don't have a packet, then either error or wait for one to arrive. if (recv_buffer_.empty()) { - error_ = EAGAIN; return -1; } // Return the packet at the front of the queue. - Packet* packet = recv_buffer_.front(); - size_t data_read = std::min(cb, packet->size()); - memcpy(pv, packet->data(), data_read); - *paddr = packet->from(); + Packet& packet = *recv_buffer_.front(); + size_t data_read = std::min(size, packet.size()); + memcpy(buffer, packet.data(), data_read); + addr = packet.from(); - if (data_read < packet->size()) { - packet->Consume(data_read); + if (data_read < packet.size()) { + packet.Consume(data_read); } else { recv_buffer_.pop_front(); - delete packet; } - // To behave like a real socket, SignalReadEvent should fire in the next - // message loop pass if there's still data buffered. + // To behave like a real socket, SignalReadEvent should fire if there's still + // data buffered. if (!recv_buffer_.empty()) { - server_->PostSignalReadEvent(this); + PostSignalReadEvent(); } - if (SOCK_STREAM == type_) { - bool was_full = (recv_buffer_size_ == server_->recv_buffer_capacity()); - recv_buffer_size_ -= data_read; - if (was_full) { - server_->SendTcp(remote_addr_); - } - } - - return static_cast(data_read); + return data_read; } int VirtualSocket::Listen(int backlog) { - webrtc::MutexLock lock(&mutex_); RTC_DCHECK(SOCK_STREAM == type_); RTC_DCHECK(CS_CLOSED == state_); if (local_addr_.IsNil()) { error_ = EINVAL; return -1; } - RTC_DCHECK(nullptr == listen_queue_); - listen_queue_ = std::make_unique(); + safety_->Listen(); state_ = CS_CONNECTING; return 0; } +void VirtualSocket::SafetyBlock::Listen() { + MutexLock lock(&mutex_); + RTC_DCHECK(!listen_queue_.has_value()); + listen_queue_.emplace(); +} + VirtualSocket* VirtualSocket::Accept(SocketAddress* paddr) { - webrtc::MutexLock lock(&mutex_); - if (nullptr == listen_queue_) { - error_ = EINVAL; + SafetyBlock::AcceptResult result = safety_->Accept(); + if (result.error != 0) { + error_ = result.error; return nullptr; } + if (paddr) { + *paddr = result.remote_addr; + } + return result.socket.release(); +} + +VirtualSocket::SafetyBlock::AcceptResult VirtualSocket::SafetyBlock::Accept() { + AcceptResult result; + MutexLock lock(&mutex_); + RTC_DCHECK(alive_); + if (!listen_queue_.has_value()) { + result.error = EINVAL; + return result; + } while (!listen_queue_->empty()) { - VirtualSocket* socket = new VirtualSocket(server_, AF_INET, type_); + auto socket = std::make_unique(socket_.server_, AF_INET, + socket_.type_); // Set the new local address to the same as this server socket. - socket->SetLocalAddress(local_addr_); + socket->SetLocalAddress(socket_.local_addr_); // Sockets made from a socket that 'was Any' need to inherit that. - socket->set_was_any(was_any_); - SocketAddress remote_addr(listen_queue_->front()); - int result = socket->InitiateConnect(remote_addr, false); + socket->set_was_any(socket_.was_any()); + SocketAddress remote_addr = listen_queue_->front(); listen_queue_->pop_front(); - if (result != 0) { - delete socket; + if (socket->InitiateConnect(remote_addr, false) != 0) { continue; } socket->CompleteConnect(remote_addr); - if (paddr) { - *paddr = remote_addr; - } - return socket; + result.socket = std::move(socket); + result.remote_addr = remote_addr; + return result; } - error_ = EWOULDBLOCK; - return nullptr; + result.error = EWOULDBLOCK; + return result; } int VirtualSocket::GetError() const { @@ -333,59 +399,109 @@ int VirtualSocket::SetOption(Option opt, int value) { return 0; // 0 is success to emulate setsockopt() } -void VirtualSocket::OnMessage(Message* pmsg) { - bool signal_read_event = false; - bool signal_close_event = false; - bool signal_connect_event = false; - int error_to_signal = 0; - { - webrtc::MutexLock lock(&mutex_); - if (pmsg->message_id == MSG_ID_PACKET) { - RTC_DCHECK(nullptr != pmsg->pdata); - Packet* packet = static_cast(pmsg->pdata); - - recv_buffer_.push_back(packet); - signal_read_event = true; - } else if (pmsg->message_id == MSG_ID_CONNECT) { - RTC_DCHECK(nullptr != pmsg->pdata); - MessageAddress* data = static_cast(pmsg->pdata); - if (listen_queue_ != nullptr) { - listen_queue_->push_back(data->addr); - signal_read_event = true; - } else if ((SOCK_STREAM == type_) && (CS_CONNECTING == state_)) { - CompleteConnect(data->addr); - signal_connect_event = true; - } else { - RTC_LOG(LS_VERBOSE) - << "Socket at " << local_addr_.ToString() << " is not listening"; - server_->Disconnect(data->addr); - } - delete data; - } else if (pmsg->message_id == MSG_ID_DISCONNECT) { - RTC_DCHECK(SOCK_STREAM == type_); - if (CS_CLOSED != state_) { - error_to_signal = (CS_CONNECTING == state_) ? ECONNREFUSED : 0; - state_ = CS_CLOSED; - remote_addr_.Clear(); - signal_close_event = true; - } - } else if (pmsg->message_id == MSG_ID_SIGNALREADEVENT) { - signal_read_event = !recv_buffer_.empty(); - } else { - RTC_DCHECK_NOTREACHED(); +void VirtualSocket::PostPacket(TimeDelta delay, + std::unique_ptr packet) { + rtc::scoped_refptr safety = safety_; + VirtualSocket* socket = this; + server_->msg_queue_->PostDelayedTask( + [safety = std::move(safety), socket, + packet = std::move(packet)]() mutable { + if (safety->AddPacket(std::move(packet))) { + socket->SignalReadEvent(socket); + } + }, + delay); +} + +bool VirtualSocket::SafetyBlock::AddPacket(std::unique_ptr packet) { + MutexLock lock(&mutex_); + if (alive_) { + recv_buffer_.push_back(std::move(packet)); + } + return alive_; +} + +void VirtualSocket::PostConnect(TimeDelta delay, + const SocketAddress& remote_addr) { + safety_->PostConnect(delay, remote_addr); +} + +void VirtualSocket::SafetyBlock::PostConnect(TimeDelta delay, + const SocketAddress& remote_addr) { + rtc::scoped_refptr safety(this); + + MutexLock lock(&mutex_); + RTC_DCHECK(alive_); + // Save addresses of the pending connects to allow propertly disconnect them + // if socket closes before delayed task below runs. + // `posted_connects_` is an std::list, thus its iterators are valid while the + // element is in the list. It can be removed either in the `Connect` just + // below or by calling SetNotAlive function, thus inside `Connect` `it` should + // be valid when alive_ == true. + auto it = posted_connects_.insert(posted_connects_.end(), remote_addr); + auto task = [safety = std::move(safety), it] { + switch (safety->Connect(it)) { + case Signal::kNone: + break; + case Signal::kReadEvent: + safety->socket_.SignalReadEvent(&safety->socket_); + break; + case Signal::kConnectEvent: + safety->socket_.SignalConnectEvent(&safety->socket_); + break; } + }; + socket_.server_->msg_queue_->PostDelayedTask(std::move(task), delay); +} + +VirtualSocket::SafetyBlock::Signal VirtualSocket::SafetyBlock::Connect( + VirtualSocket::SafetyBlock::PostedConnects::iterator remote_addr_it) { + MutexLock lock(&mutex_); + if (!alive_) { + return Signal::kNone; } - // Signal events without holding `mutex_`, to avoid recursive locking, as well - // as issues with sigslot and lock order. - if (signal_read_event) { - SignalReadEvent(this); - } - if (signal_close_event) { - SignalCloseEvent(this, error_to_signal); + RTC_DCHECK(!posted_connects_.empty()); + SocketAddress remote_addr = *remote_addr_it; + posted_connects_.erase(remote_addr_it); + + if (listen_queue_.has_value()) { + listen_queue_->push_back(remote_addr); + return Signal::kReadEvent; } - if (signal_connect_event) { - SignalConnectEvent(this); + if (socket_.type_ == SOCK_STREAM && socket_.state_ == CS_CONNECTING) { + socket_.CompleteConnect(remote_addr); + return Signal::kConnectEvent; } + RTC_LOG(LS_VERBOSE) << "Socket at " << socket_.local_addr_.ToString() + << " is not listening"; + socket_.server_->Disconnect(remote_addr); + return Signal::kNone; +} + +bool VirtualSocket::SafetyBlock::IsAlive() { + MutexLock lock(&mutex_); + return alive_; +} + +void VirtualSocket::PostDisconnect(TimeDelta delay) { + // Posted task may outlive this. Use different name for `this` inside the task + // to avoid accidental unsafe `this->safety_` instead of safe `safety` + VirtualSocket* socket = this; + rtc::scoped_refptr safety = safety_; + auto task = [safety = std::move(safety), socket] { + if (!safety->IsAlive()) { + return; + } + RTC_DCHECK_EQ(socket->type_, SOCK_STREAM); + if (socket->state_ == CS_CLOSED) { + return; + } + int error_to_signal = (socket->state_ == CS_CONNECTING) ? ECONNREFUSED : 0; + socket->state_ = CS_CLOSED; + socket->remote_addr_.Clear(); + socket->SignalCloseEvent(socket, error_to_signal); + }; + server_->msg_queue_->PostDelayedTask(std::move(task), delay); } int VirtualSocket::InitiateConnect(const SocketAddress& addr, bool use_delay) { @@ -476,7 +592,6 @@ void VirtualSocket::OnSocketServerReadyToSend() { } void VirtualSocket::SetToBlocked() { - webrtc::MutexLock lock(&mutex_); ready_to_send_ = false; error_ = EWOULDBLOCK; } @@ -526,8 +641,6 @@ int64_t VirtualSocket::UpdateOrderedDelivery(int64_t ts) { } size_t VirtualSocket::PurgeNetworkPackets(int64_t cur_time) { - webrtc::MutexLock lock(&mutex_); - while (!network_.empty() && (network_.front().done_time <= cur_time)) { RTC_DCHECK(network_size_ >= network_.front().size); network_size_ -= network_.front().size; @@ -611,8 +724,9 @@ void VirtualSocketServer::SetMessageQueue(Thread* msg_queue) { msg_queue_ = msg_queue; } -bool VirtualSocketServer::Wait(int cmsWait, bool process_io) { - RTC_DCHECK(msg_queue_ == Thread::Current()); +bool VirtualSocketServer::Wait(webrtc::TimeDelta max_wait_duration, + bool process_io) { + RTC_DCHECK_RUN_ON(msg_queue_); if (stop_on_idle_ && Thread::Current()->empty()) { return false; } @@ -620,7 +734,7 @@ bool VirtualSocketServer::Wait(int cmsWait, bool process_io) { // any real I/O. Received packets come in the form of queued messages, so // Thread will ensure WakeUp is called if another thread sends a // packet. - wakeup_.Wait(cmsWait); + wakeup_.Wait(max_wait_duration); return true; } @@ -635,7 +749,7 @@ void VirtualSocketServer::SetAlternativeLocalAddress( } bool VirtualSocketServer::ProcessMessagesUntilIdle() { - RTC_DCHECK(msg_queue_ == Thread::Current()); + RTC_DCHECK_RUN_ON(msg_queue_); stop_on_idle_ = true; while (!msg_queue_->empty()) { if (fake_clock_) { @@ -644,10 +758,7 @@ bool VirtualSocketServer::ProcessMessagesUntilIdle() { fake_clock_->AdvanceTime(webrtc::TimeDelta::Millis(1)); } else { // Otherwise, run a normal message loop. - Message msg; - if (msg_queue_->Get(&msg, Thread::kForever)) { - msg_queue_->Dispatch(&msg); - } + msg_queue_->ProcessMessages(Thread::kForever); } } stop_on_idle_ = false; @@ -785,7 +896,9 @@ static double Random() { int VirtualSocketServer::Connect(VirtualSocket* socket, const SocketAddress& remote_addr, bool use_delay) { - uint32_t delay = use_delay ? GetTransitDelay(socket) : 0; + RTC_DCHECK(msg_queue_); + + TimeDelta delay = TimeDelta::Millis(use_delay ? GetTransitDelay(socket) : 0); VirtualSocket* remote = LookupBinding(remote_addr); if (!CanInteractWith(socket, remote)) { RTC_LOG(LS_INFO) << "Address family mismatch between " @@ -794,26 +907,22 @@ int VirtualSocketServer::Connect(VirtualSocket* socket, return -1; } if (remote != nullptr) { - SocketAddress addr = socket->GetLocalAddress(); - msg_queue_->PostDelayed(RTC_FROM_HERE, delay, remote, MSG_ID_CONNECT, - new MessageAddress(addr)); + remote->PostConnect(delay, socket->GetLocalAddress()); } else { RTC_LOG(LS_INFO) << "No one listening at " << remote_addr.ToString(); - msg_queue_->PostDelayed(RTC_FROM_HERE, delay, socket, MSG_ID_DISCONNECT); + socket->PostDisconnect(delay); } return 0; } bool VirtualSocketServer::Disconnect(VirtualSocket* socket) { - if (socket) { - // If we simulate packets being delayed, we should simulate the - // equivalent of a FIN being delayed as well. - uint32_t delay = GetTransitDelay(socket); - // Remove the mapping. - msg_queue_->PostDelayed(RTC_FROM_HERE, delay, socket, MSG_ID_DISCONNECT); - return true; - } - return false; + if (!socket || !msg_queue_) + return false; + + // If we simulate packets being delayed, we should simulate the + // equivalent of a FIN being delayed as well. + socket->PostDisconnect(TimeDelta::Millis(GetTransitDelay(socket))); + return true; } bool VirtualSocketServer::Disconnect(const SocketAddress& addr) { @@ -839,43 +948,6 @@ bool VirtualSocketServer::Disconnect(const SocketAddress& local_addr, return socket != nullptr; } -void VirtualSocketServer::CancelConnects(VirtualSocket* socket) { - MessageList msgs; - if (msg_queue_) { - msg_queue_->Clear(socket, MSG_ID_CONNECT, &msgs); - } - for (MessageList::iterator it = msgs.begin(); it != msgs.end(); ++it) { - RTC_DCHECK(nullptr != it->pdata); - MessageAddress* data = static_cast(it->pdata); - SocketAddress local_addr = socket->GetLocalAddress(); - // Lookup remote side. - VirtualSocket* lookup_socket = LookupConnection(local_addr, data->addr); - if (lookup_socket) { - // Server socket, remote side is a socket retreived by - // accept. Accepted sockets are not bound so we will not - // find it by looking in the bindings table. - Disconnect(lookup_socket); - RemoveConnection(local_addr, data->addr); - } else { - Disconnect(data->addr); - } - delete data; - } -} - -void VirtualSocketServer::Clear(VirtualSocket* socket) { - // Clear incoming packets and disconnect messages - if (msg_queue_) { - msg_queue_->Clear(socket); - } -} - -void VirtualSocketServer::PostSignalReadEvent(VirtualSocket* socket) { - // Clear the message so it doesn't end up posted multiple times. - msg_queue_->Clear(socket, MSG_ID_SIGNALREADEVENT); - msg_queue_->Post(RTC_FROM_HERE, socket, MSG_ID_SIGNALREADEVENT); -} - int VirtualSocketServer::SendUdp(VirtualSocket* socket, const char* data, size_t data_size, @@ -1011,6 +1083,7 @@ void VirtualSocketServer::AddPacketToNetwork(VirtualSocket* sender, size_t data_size, size_t header_size, bool ordered) { + RTC_DCHECK(msg_queue_); uint32_t send_delay = sender->AddPacket(cur_time, data_size + header_size); // Find the delay for crossing the many virtual hops of the network. @@ -1025,14 +1098,12 @@ void VirtualSocketServer::AddPacketToNetwork(VirtualSocket* sender, sender_addr.SetIP(default_ip); } - // Post the packet as a message to be delivered (on our own thread) - Packet* p = new Packet(data, data_size, sender_addr); - - int64_t ts = TimeAfter(send_delay + transit_delay); + int64_t ts = cur_time + send_delay + transit_delay; if (ordered) { ts = sender->UpdateOrderedDelivery(ts); } - msg_queue_->PostAt(RTC_FROM_HERE, ts, recipient, MSG_ID_PACKET, p); + recipient->PostPacket(TimeDelta::Millis(ts - cur_time), + std::make_unique(data, data_size, sender_addr)); } uint32_t VirtualSocketServer::SendDelay(uint32_t size) { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/virtual_socket_server.h b/TMessagesProj/jni/voip/webrtc/rtc_base/virtual_socket_server.h index 8873f18dcc..93ef288826 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/virtual_socket_server.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/virtual_socket_server.h @@ -15,11 +15,14 @@ #include #include +#include "absl/types/optional.h" +#include "api/make_ref_counted.h" +#include "api/ref_counted_base.h" +#include "api/scoped_refptr.h" +#include "api/task_queue/task_queue_base.h" #include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/event.h" #include "rtc_base/fake_clock.h" -#include "rtc_base/message_handler.h" #include "rtc_base/socket_server.h" #include "rtc_base/synchronization/mutex.h" @@ -29,11 +32,9 @@ class Packet; class VirtualSocketServer; class SocketAddressPair; -// Implements the socket interface using the virtual network. Packets are -// passed as messages using the message queue of the socket server. -class VirtualSocket : public Socket, - public MessageHandler, - public sigslot::has_slots<> { +// Implements the socket interface using the virtual network. Packets are +// passed in tasks using the thread of the socket server. +class VirtualSocket : public Socket, public sigslot::has_slots<> { public: VirtualSocket(VirtualSocketServer* server, int family, int type); ~VirtualSocket() override; @@ -59,7 +60,6 @@ class VirtualSocket : public Socket, ConnState GetState() const override; int GetOption(Option opt, int* value) override; int SetOption(Option opt, int value) override; - void OnMessage(Message* pmsg) override; size_t recv_buffer_size() const { return recv_buffer_size_; } size_t send_buffer_size() const { return send_buffer_.size(); } @@ -86,16 +86,82 @@ class VirtualSocket : public Socket, // Removes stale packets from the network. Returns current size. size_t PurgeNetworkPackets(int64_t cur_time); + void PostPacket(webrtc::TimeDelta delay, std::unique_ptr packet); + void PostConnect(webrtc::TimeDelta delay, const SocketAddress& remote_addr); + void PostDisconnect(webrtc::TimeDelta delay); + private: + // Struct shared with pending tasks that may outlive VirtualSocket. + class SafetyBlock : public RefCountedNonVirtual { + public: + explicit SafetyBlock(VirtualSocket* socket); + SafetyBlock(const SafetyBlock&) = delete; + SafetyBlock& operator=(const SafetyBlock&) = delete; + ~SafetyBlock(); + + // Prohibits posted delayed task to access owning VirtualSocket and + // cleanups members protected by the `mutex`. + void SetNotAlive(); + bool IsAlive(); + + // Copies up to `size` bytes into buffer from the next received packet + // and fills `addr` with remote address of that received packet. + // Returns number of bytes copied or negative value on failure. + int RecvFrom(void* buffer, size_t size, SocketAddress& addr); + + void Listen(); + + struct AcceptResult { + int error = 0; + std::unique_ptr socket; + SocketAddress remote_addr; + }; + AcceptResult Accept(); + + bool AddPacket(std::unique_ptr packet); + void PostConnect(webrtc::TimeDelta delay, const SocketAddress& remote_addr); + + private: + enum class Signal { kNone, kReadEvent, kConnectEvent }; + // `PostConnect` rely on the fact that std::list iterators are not + // invalidated on any changes to other elements in the container. + using PostedConnects = std::list; + + void PostSignalReadEvent() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + void MaybeSignalReadEvent(); + Signal Connect(PostedConnects::iterator remote_addr_it); + + webrtc::Mutex mutex_; + VirtualSocket& socket_; + bool alive_ RTC_GUARDED_BY(mutex_) = true; + // Flag indicating if async Task to signal SignalReadEvent is posted. + // To avoid posting multiple such tasks. + bool pending_read_signal_event_ RTC_GUARDED_BY(mutex_) = false; + + // Members below do not need to outlive VirtualSocket, but are used by the + // posted tasks. Keeping them in the VirtualSocket confuses thread + // annotations because they can't detect that locked mutex is the same mutex + // this members are guarded by. + + // Addresses of the sockets for potential connect. For each address there + // is a posted task that should finilze the connect. + PostedConnects posted_connects_ RTC_GUARDED_BY(mutex_); + + // Data which has been received from the network + std::list> recv_buffer_ RTC_GUARDED_BY(mutex_); + + // Pending sockets which can be Accepted + absl::optional> listen_queue_ + RTC_GUARDED_BY(mutex_); + }; + struct NetworkEntry { size_t size; int64_t done_time; }; - typedef std::deque ListenQueue; typedef std::deque NetworkQueue; typedef std::vector SendBuffer; - typedef std::list RecvBuffer; typedef std::map OptionsMap; int InitiateConnect(const SocketAddress& addr, bool use_delay); @@ -112,9 +178,8 @@ class VirtualSocket : public Socket, SocketAddress local_addr_; SocketAddress remote_addr_; - // Pending sockets which can be Accepted - std::unique_ptr listen_queue_ RTC_GUARDED_BY(mutex_) - RTC_PT_GUARDED_BY(mutex_); + const scoped_refptr safety_ = + make_ref_counted(this); // Data which tcp has buffered for sending SendBuffer send_buffer_; @@ -122,9 +187,6 @@ class VirtualSocket : public Socket, // Set back to true when the socket can send again. bool ready_to_send_ = true; - // Mutex to protect the recv_buffer and listen_queue_ - webrtc::Mutex mutex_; - // Network model that enforces bandwidth and capacity constraints NetworkQueue network_; size_t network_size_; @@ -132,8 +194,6 @@ class VirtualSocket : public Socket, // It is used to ensure ordered delivery of packets sent on this socket. int64_t last_delivery_time_ = 0; - // Data which has been received from the network - RecvBuffer recv_buffer_ RTC_GUARDED_BY(mutex_); // The amount of data which is in flight or in recv_buffer_ size_t recv_buffer_size_; @@ -163,6 +223,9 @@ class VirtualSocketServer : public SocketServer { explicit VirtualSocketServer(ThreadProcessingFakeClock* fake_clock); ~VirtualSocketServer() override; + VirtualSocketServer(const VirtualSocketServer&) = delete; + VirtualSocketServer& operator=(const VirtualSocketServer&) = delete; + // The default source address specifies which local address to use when a // socket is bound to the 'any' address, e.g. 0.0.0.0. (If not set, the 'any' // address is used as the source address on outgoing virtual packets, exposed @@ -221,7 +284,7 @@ class VirtualSocketServer : public SocketServer { // SocketServer: void SetMessageQueue(Thread* queue) override; - bool Wait(int cms, bool process_io) override; + bool Wait(webrtc::TimeDelta max_wait_duration, bool process_io) override; void WakeUp() override; void SetDelayOnAddress(const rtc::SocketAddress& address, int delay_ms) { @@ -306,14 +369,6 @@ class VirtualSocketServer : public SocketServer { // Computes the number of milliseconds required to send a packet of this size. uint32_t SendDelay(uint32_t size) RTC_LOCKS_EXCLUDED(mutex_); - // Cancel attempts to connect to a socket that is being closed. - void CancelConnects(VirtualSocket* socket); - - // Clear incoming messages for a socket that is being closed. - void Clear(VirtualSocket* socket); - - void PostSignalReadEvent(VirtualSocket* socket); - // Sending was previously blocked, but now isn't. sigslot::signal0<> SignalReadyToSend; @@ -325,6 +380,7 @@ class VirtualSocketServer : public SocketServer { VirtualSocket* LookupBinding(const SocketAddress& addr); private: + friend VirtualSocket; uint16_t GetNextPort(); // Find the socket pair corresponding to this server address. @@ -419,7 +475,6 @@ class VirtualSocketServer : public SocketServer { size_t max_udp_payload_ RTC_GUARDED_BY(mutex_) = 65507; bool sending_blocked_ RTC_GUARDED_BY(mutex_) = false; - RTC_DISALLOW_COPY_AND_ASSIGN(VirtualSocketServer); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/win/windows_version.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/win/windows_version.cc index 80e49f2a16..93af1377be 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/win/windows_version.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/win/windows_version.cc @@ -211,9 +211,21 @@ Version MajorMinorBuildToVersion(int major, int minor, int build) { return VERSION_WIN10_19H1; } else if (build < 19041) { return VERSION_WIN10_19H2; - } else { + } else if (build < 19042) { return VERSION_WIN10_20H1; + } else if (build < 19043) { + return VERSION_WIN10_20H2; + } else if (build < 19044) { + return VERSION_WIN10_21H1; + } else if (build < 20348) { + return VERSION_WIN10_21H2; + } else if (build < 22000) { + return VERSION_SERVER_2022; + } else { + return VERSION_WIN11; } + } else if (major == 11) { + return VERSION_WIN11; } else if (major > 6) { RTC_DCHECK_NOTREACHED(); return VERSION_WIN_LAST; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/win/windows_version.h b/TMessagesProj/jni/voip/webrtc/rtc_base/win/windows_version.h index dbb0d8eb58..8542626afb 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/win/windows_version.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/win/windows_version.h @@ -15,8 +15,6 @@ #include -#include "rtc_base/constructor_magic.h" - typedef void* HANDLE; namespace rtc { @@ -32,24 +30,27 @@ namespace rtc_win { enum Version { VERSION_PRE_XP = 0, // Not supported. VERSION_XP = 1, - VERSION_SERVER_2003 = 2, // Also includes XP Pro x64 and Server 2003 R2. - VERSION_VISTA = 3, // Also includes Windows Server 2008. - VERSION_WIN7 = 4, // Also includes Windows Server 2008 R2. - VERSION_WIN8 = 5, // Also includes Windows Server 2012. - VERSION_WIN8_1 = 6, // Also includes Windows Server 2012 R2. - VERSION_WIN10 = 7, // Threshold 1: Version 1507, Build 10240. - VERSION_WIN10_TH2 = 8, // Threshold 2: Version 1511, Build 10586. - VERSION_WIN10_RS1 = 9, // Redstone 1: Version 1607, Build 14393. - VERSION_WIN10_RS2 = 10, // Redstone 2: Version 1703, Build 15063. - VERSION_WIN10_RS3 = 11, // Redstone 3: Version 1709, Build 16299. - VERSION_WIN10_RS4 = 12, // Redstone 4: Version 1803, Build 17134. - VERSION_WIN10_RS5 = 13, // Redstone 5: Version 1809, Build 17763. - VERSION_WIN10_19H1 = 14, // 19H1: Version 1903, Build 18362. - VERSION_WIN10_19H2 = 15, // 19H2: Version 1909, Build 18363. - VERSION_WIN10_20H1 = 16, // 20H1 (Vibranium): Version 2004, Build 19041. - // On edit, update tools\metrics\histograms\enums.xml "WindowsVersion" and - // "GpuBlacklistFeatureTestResultsWindows2". - VERSION_WIN_LAST, // Indicates error condition. + VERSION_SERVER_2003 = 2, // Also includes XP Pro x64 and Server 2003 R2. + VERSION_VISTA = 3, // Also includes Windows Server 2008. + VERSION_WIN7 = 4, // Also includes Windows Server 2008 R2. + VERSION_WIN8 = 5, // Also includes Windows Server 2012. + VERSION_WIN8_1 = 6, // Also includes Windows Server 2012 R2. + VERSION_WIN10 = 7, // Threshold 1: Version 1507, Build 10240. + VERSION_WIN10_TH2 = 8, // Threshold 2: Version 1511, Build 10586. + VERSION_WIN10_RS1 = 9, // Redstone 1: Version 1607, Build 14393. + VERSION_WIN10_RS2 = 10, // Redstone 2: Version 1703, Build 15063. + VERSION_WIN10_RS3 = 11, // Redstone 3: Version 1709, Build 16299. + VERSION_WIN10_RS4 = 12, // Redstone 4: Version 1803, Build 17134. + VERSION_WIN10_RS5 = 13, // Redstone 5: Version 1809, Build 17763. + VERSION_WIN10_19H1 = 14, // 19H1: Version 1903, Build 18362. + VERSION_WIN10_19H2 = 15, // 19H2: Version 1909, Build 18363. + VERSION_WIN10_20H1 = 16, // 20H1: Version 2004, Build 19041. + VERSION_WIN10_20H2 = 17, // 20H2: Build 19042. + VERSION_WIN10_21H1 = 18, // 21H1: Build 19043. + VERSION_WIN10_21H2 = 19, // 21H2: Build 19044. + VERSION_SERVER_2022 = 20, // Server 2022: Build 20348. + VERSION_WIN11 = 21, // Windows 11: Build 22000. + VERSION_WIN_LAST, // Indicates error condition. }; // A rough bucketing of the available types of versions of Windows. This is used @@ -105,6 +106,9 @@ class OSInfo { WOW64_UNKNOWN, }; + OSInfo(const OSInfo&) = delete; + OSInfo& operator=(const OSInfo&) = delete; + static OSInfo* GetInstance(); Version version() const { return version_; } @@ -140,8 +144,6 @@ class OSInfo { size_t allocation_granularity_; WOW64Status wow64_status_; std::string processor_model_name_; - - RTC_DISALLOW_COPY_AND_ASSIGN(OSInfo); }; // Because this is by far the most commonly-requested value from the above diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/generated_peerconnection_jni/PeerConnectionFactory_jni.h b/TMessagesProj/jni/voip/webrtc/sdk/android/generated_peerconnection_jni/PeerConnectionFactory_jni.h index 58bf71c80f..c1e74b0034 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/generated_peerconnection_jni/PeerConnectionFactory_jni.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/generated_peerconnection_jni/PeerConnectionFactory_jni.h @@ -317,16 +317,6 @@ JNI_GENERATOR_EXPORT void Java_org_webrtc_PeerConnectionFactory_nativePrintStack return JNI_PeerConnectionFactory_PrintStackTrace(env, tid); } -static void JNI_PeerConnectionFactory_PrintStackTracesOfRegisteredThreads(JNIEnv* env); - -JNI_GENERATOR_EXPORT void - Java_org_webrtc_PeerConnectionFactory_nativePrintStackTracesOfRegisteredThreads( - JNIEnv* env, - jclass jcaller) { - return JNI_PeerConnectionFactory_PrintStackTracesOfRegisteredThreads(env); -} - - static std::atomic g_org_webrtc_PeerConnectionFactory_00024Options_getNetworkIgnoreMask(nullptr); static jint Java_Options_getNetworkIgnoreMask(JNIEnv* env, const base::android::JavaRef& diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/native_api/audio_device_module/audio_device_android.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/native_api/audio_device_module/audio_device_android.cc index 9a4236165a..2be7f7d7fb 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/native_api/audio_device_module/audio_device_android.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/native_api/audio_device_module/audio_device_android.cc @@ -18,7 +18,6 @@ #include "api/scoped_refptr.h" #include "rtc_base/logging.h" #include "rtc_base/ref_count.h" -#include "rtc_base/ref_counted_object.h" #if defined(WEBRTC_AUDIO_DEVICE_INCLUDE_ANDROID_AAUDIO) #include "sdk/android/src/jni/audio_device/aaudio_player.h" diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/native_api/jni/class_loader.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/native_api/jni/class_loader.cc index 1789d78c85..2b93f41a81 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/native_api/jni/class_loader.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/native_api/jni/class_loader.cc @@ -33,6 +33,7 @@ class ClassLoader { public: explicit ClassLoader(JNIEnv* env) : class_loader_(jni::Java_WebRtcClassLoader_getClassLoader(env)) { + DEBUG_REF("webrtc class_loader"); class_loader_class_ = reinterpret_cast( env->NewGlobalRef(env->FindClass("java/lang/ClassLoader"))); CHECK_EXCEPTION(env); diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/native_api/jni/jni_int_wrapper.h b/TMessagesProj/jni/voip/webrtc/sdk/android/native_api/jni/jni_int_wrapper.h index 23da7f2ce4..a6e68eb81f 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/native_api/jni/jni_int_wrapper.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/native_api/jni/jni_int_wrapper.h @@ -14,6 +14,10 @@ #ifndef SDK_ANDROID_NATIVE_API_JNI_JNI_INT_WRAPPER_H_ #define SDK_ANDROID_NATIVE_API_JNI_JNI_INT_WRAPPER_H_ +#include + +#include + // Wrapper used to receive int when calling Java from native. The wrapper // disallows automatic conversion of anything besides int32_t to a jint. // Checking is only done in debugging builds. diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/native_api/jni/scoped_java_ref.h b/TMessagesProj/jni/voip/webrtc/sdk/android/native_api/jni/scoped_java_ref.h index a2be447de2..ca11cdec62 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/native_api/jni/scoped_java_ref.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/native_api/jni/scoped_java_ref.h @@ -19,6 +19,7 @@ #include #include "sdk/android/native_api/jni/jvm.h" +#include "tgnet/FileLog.h" namespace webrtc { @@ -182,8 +183,10 @@ class ScopedJavaGlobalRef : public JavaRef { : JavaRef(other.Release()) {} ~ScopedJavaGlobalRef() { - if (obj_ != nullptr) + if (obj_ != nullptr) { + DEBUG_DELREF("ScopedJavaGlobalRef"); AttachCurrentThreadIfNeeded()->DeleteGlobalRef(obj_); + } } ScopedJavaGlobalRef(const ScopedJavaGlobalRef&) = delete; @@ -192,13 +195,20 @@ class ScopedJavaGlobalRef : public JavaRef { void operator=(const JavaRef& other) { JNIEnv* env = AttachCurrentThreadIfNeeded(); if (obj_ != nullptr) { + DEBUG_DELREF("webrtc 3 delete global ref"); env->DeleteGlobalRef(obj_); } - obj_ = other.is_null() ? nullptr : env->NewGlobalRef(other.obj()); + if (other.is_null()) { + obj_ = nullptr; + } else { + DEBUG_REF("webrtc 3 new global ref"); + obj_ = env->NewGlobalRef(other.obj()); + } } void operator=(std::nullptr_t) { if (obj_ != nullptr) { + DEBUG_DELREF("webrtc 3 delete global ref"); AttachCurrentThreadIfNeeded()->DeleteGlobalRef(obj_); } obj_ = nullptr; diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/native_api/stacktrace/stacktrace.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/native_api/stacktrace/stacktrace.cc index 48894374ac..96e03e0af1 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/native_api/stacktrace/stacktrace.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/native_api/stacktrace/stacktrace.cc @@ -83,6 +83,12 @@ class AsyncSafeWaitableEvent { // Struct to store the arguments to the signal handler. struct SignalHandlerOutputState { + // This function is called iteratively for each stack trace element and stores + // the element in the array from `unwind_output_state`. + static _Unwind_Reason_Code UnwindBacktrace( + struct _Unwind_Context* unwind_context, + void* unwind_output_state); + // This event is signalled when signal handler is done executing. AsyncSafeWaitableEvent signal_handler_finish_event; // Running counter of array index below. @@ -91,17 +97,11 @@ struct SignalHandlerOutputState { uintptr_t addresses[kMaxStackSize]; }; -// Global lock to ensure only one thread gets interrupted at a time. -ABSL_CONST_INIT GlobalMutex g_signal_handler_lock(absl::kConstInit); -// Argument passed to the ThreadSignalHandler() from the sampling thread to the -// sampled (stopped) thread. This value is set just before sending signal to the -// thread and reset when handler is done. -SignalHandlerOutputState* volatile g_signal_handler_output_state; - // This function is called iteratively for each stack trace element and stores // the element in the array from `unwind_output_state`. -_Unwind_Reason_Code UnwindBacktrace(struct _Unwind_Context* unwind_context, - void* unwind_output_state) { +_Unwind_Reason_Code SignalHandlerOutputState::UnwindBacktrace( + struct _Unwind_Context* unwind_context, + void* unwind_output_state) { SignalHandlerOutputState* const output_state = static_cast(unwind_output_state); @@ -123,13 +123,43 @@ _Unwind_Reason_Code UnwindBacktrace(struct _Unwind_Context* unwind_context, return _URC_NO_REASON; } +class GlobalStackUnwinder { + public: + static GlobalStackUnwinder& Get() { + static GlobalStackUnwinder* const instance = new GlobalStackUnwinder(); + return *instance; + } + const char* CaptureRawStacktrace(int pid, + int tid, + SignalHandlerOutputState* params); + + private: + GlobalStackUnwinder() { current_output_state_.store(nullptr); } + + // Temporarily installed signal handler. + static void SignalHandler(int signum, siginfo_t* info, void* ptr); + + Mutex mutex_; + + // Accessed by signal handler. + static std::atomic current_output_state_; + // A signal handler mustn't use locks. + static_assert(std::atomic::is_always_lock_free); +}; + +std::atomic + GlobalStackUnwinder::current_output_state_; + // This signal handler is exectued on the interrupted thread. -void SignalHandler(int signum, siginfo_t* info, void* ptr) { +void GlobalStackUnwinder::SignalHandler(int signum, + siginfo_t* info, + void* ptr) { // This should have been set by the thread requesting the stack trace. SignalHandlerOutputState* signal_handler_output_state = - g_signal_handler_output_state; + current_output_state_.load(); if (signal_handler_output_state != nullptr) { - _Unwind_Backtrace(&UnwindBacktrace, signal_handler_output_state); + _Unwind_Backtrace(&SignalHandlerOutputState::UnwindBacktrace, + signal_handler_output_state); signal_handler_output_state->signal_handler_finish_event.Signal(); } } @@ -138,9 +168,10 @@ void SignalHandler(int signum, siginfo_t* info, void* ptr) { // trace and interrupt the given tid. This function will block until the output // thread stack trace has been stored in `params`. The return value is an error // string on failure and null on success. -const char* CaptureRawStacktrace(int pid, - int tid, - SignalHandlerOutputState* params) { +const char* GlobalStackUnwinder::CaptureRawStacktrace( + int pid, + int tid, + SignalHandlerOutputState* params) { // This function is under a global lock since we are changing the signal // handler and using global state for the output. The lock is to ensure only // one thread at a time gets captured. The lock also means we need to be very @@ -153,8 +184,8 @@ const char* CaptureRawStacktrace(int pid, act.sa_flags = SA_RESTART | SA_SIGINFO; sigemptyset(&act.sa_mask); - GlobalMutexLock ls(&g_signal_handler_lock); - g_signal_handler_output_state = params; + MutexLock loch(&mutex_); + current_output_state_.store(params); if (sigaction(kSignal, &act, &old_act) != 0) return "Failed to change signal action"; @@ -210,7 +241,8 @@ std::vector GetStackTrace(int tid) { // `g_signal_handler_param`. SignalHandlerOutputState params; - const char* error_string = CaptureRawStacktrace(getpid(), tid, ¶ms); + const char* error_string = + GlobalStackUnwinder::Get().CaptureRawStacktrace(getpid(), tid, ¶ms); if (error_string != nullptr) { RTC_LOG(LS_ERROR) << error_string << ". tid: " << tid << ". errno: " << errno; @@ -224,7 +256,7 @@ std::vector GetStackTrace(int tid) { std::vector GetStackTrace() { SignalHandlerOutputState params; - _Unwind_Backtrace(&UnwindBacktrace, ¶ms); + _Unwind_Backtrace(&SignalHandlerOutputState::UnwindBacktrace, ¶ms); if (params.stack_size_counter >= kMaxStackSize) { RTC_LOG(LS_WARNING) << "Stack trace was truncated"; } diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/native_api/video/video_source.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/native_api/video/video_source.cc index 4f1409ef7b..e967c2a465 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/native_api/video/video_source.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/native_api/video/video_source.cc @@ -10,7 +10,6 @@ #include "sdk/android/native_api/video/video_source.h" -#include "rtc_base/ref_counted_object.h" #include "sdk/android/src/jni/android_video_track_source.h" #include "sdk/android/src/jni/native_capturer_observer.h" diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_metrics.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_metrics.cc index e021ef407a..01398cc77f 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_metrics.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_metrics.cc @@ -11,6 +11,7 @@ #include #include +#include "rtc_base/string_utils.h" #include "sdk/android/generated_metrics_jni/Metrics_jni.h" #include "sdk/android/native_api/jni/java_types.h" #include "sdk/android/src/jni/jni_helpers.h" @@ -28,7 +29,9 @@ static void JNI_Metrics_Enable(JNIEnv* jni) { static ScopedJavaLocalRef JNI_Metrics_GetAndReset(JNIEnv* jni) { ScopedJavaLocalRef j_metrics = Java_Metrics_Constructor(jni); - std::map> histograms; + std::map, + rtc::AbslStringViewCmp> + histograms; metrics::GetAndReset(&histograms); for (const auto& kv : histograms) { // Create and add samples to `HistogramInfo`. diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_network_monitor.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_network_monitor.cc index 9dbc27fdc2..9c3276926c 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_network_monitor.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_network_monitor.cc @@ -11,6 +11,8 @@ #include "sdk/android/src/jni/android_network_monitor.h" #include + +#include "absl/strings/string_view.h" #ifndef RTLD_NOLOAD // This was added in Lollipop to dlfcn.h #define RTLD_NOLOAD 4 @@ -21,12 +23,10 @@ #include "rtc_base/ip_address.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" -#include "rtc_base/task_utils/to_queued_task.h" #include "sdk/android/generated_base_jni/NetworkChangeDetector_jni.h" #include "sdk/android/generated_base_jni/NetworkMonitor_jni.h" #include "sdk/android/native_api/jni/java_types.h" #include "sdk/android/src/jni/jni_helpers.h" -#include "system_wrappers/include/field_trial.h" namespace webrtc { namespace jni { @@ -133,14 +133,17 @@ static rtc::AdapterType AdapterTypeFromNetworkType( case NETWORK_UNKNOWN_CELLULAR: return rtc::ADAPTER_TYPE_CELLULAR; case NETWORK_VPN: + return rtc::ADAPTER_TYPE_VPN; case NETWORK_BLUETOOTH: // There is no corresponding mapping for bluetooth networks. - // Map it to VPN for now. - return rtc::ADAPTER_TYPE_VPN; - default: - RTC_DCHECK_NOTREACHED() << "Invalid network type " << network_type; + // Map it to UNKNOWN for now. + return rtc::ADAPTER_TYPE_UNKNOWN; + case NETWORK_NONE: return rtc::ADAPTER_TYPE_UNKNOWN; } + + RTC_DCHECK_NOTREACHED() << "Invalid network type " << network_type; + return rtc::ADAPTER_TYPE_UNKNOWN; } static rtc::IPAddress JavaToNativeIpAddress( @@ -225,11 +228,13 @@ std::string NetworkInformation::ToString() const { AndroidNetworkMonitor::AndroidNetworkMonitor( JNIEnv* env, - const JavaRef& j_application_context) + const JavaRef& j_application_context, + const FieldTrialsView& field_trials) : android_sdk_int_(Java_NetworkMonitor_androidSdkInt(env)), j_application_context_(env, j_application_context), j_network_monitor_(env, Java_NetworkMonitor_getInstance(env)), - network_thread_(rtc::Thread::Current()) {} + network_thread_(rtc::Thread::Current()), + field_trials_(field_trials) {} AndroidNetworkMonitor::~AndroidNetworkMonitor() { RTC_DCHECK(!started_); @@ -240,14 +245,16 @@ void AndroidNetworkMonitor::Start() { if (started_) { return; } + reset(); started_ = true; surface_cellular_types_ = - webrtc::field_trial::IsEnabled("WebRTC-SurfaceCellularTypes"); - find_network_handle_without_ipv6_temporary_part_ = - webrtc::field_trial::IsEnabled( - "WebRTC-FindNetworkHandleWithoutIpv6TemporaryPart"); + field_trials_.IsEnabled("WebRTC-SurfaceCellularTypes"); + find_network_handle_without_ipv6_temporary_part_ = field_trials_.IsEnabled( + "WebRTC-FindNetworkHandleWithoutIpv6TemporaryPart"); bind_using_ifname_ = - !webrtc::field_trial::IsDisabled("WebRTC-BindUsingInterfaceName"); + !field_trials_.IsDisabled("WebRTC-BindUsingInterfaceName"); + disable_is_adapter_available_ = field_trials_.IsDisabled( + "WebRTC-AndroidNetworkMonitor-IsAdapterAvailable"); // This pointer is also accessed by the methods called from java threads. // Assigning it here is safe, because the java monitor is in a stopped state, @@ -259,6 +266,14 @@ void AndroidNetworkMonitor::Start() { env, j_network_monitor_, j_application_context_, jlongFromPointer(this)); } +void AndroidNetworkMonitor::reset() { + RTC_DCHECK_RUN_ON(network_thread_); + network_handle_by_address_.clear(); + network_handle_by_if_name_.clear(); + network_info_by_handle_.clear(); + network_preference_by_adapter_type_.clear(); +} + void AndroidNetworkMonitor::Stop() { RTC_DCHECK_RUN_ON(network_thread_); if (!started_) { @@ -275,8 +290,7 @@ void AndroidNetworkMonitor::Stop() { Java_NetworkMonitor_stopMonitoring(env, j_network_monitor_, jlongFromPointer(this)); - network_handle_by_address_.clear(); - network_info_by_handle_.clear(); + reset(); } // The implementation is largely taken from UDPSocketPosix::BindToNetwork in @@ -284,7 +298,7 @@ void AndroidNetworkMonitor::Stop() { rtc::NetworkBindingResult AndroidNetworkMonitor::BindSocketToNetwork( int socket_fd, const rtc::IPAddress& address, - const std::string& if_name) { + absl::string_view if_name) { RTC_DCHECK_RUN_ON(network_thread_); // Android prior to Lollipop didn't have support for binding sockets to @@ -400,24 +414,20 @@ void AndroidNetworkMonitor::OnNetworkConnected_n( const NetworkInformation& network_info) { RTC_DCHECK_RUN_ON(network_thread_); RTC_LOG(LS_INFO) << "Network connected: " << network_info.ToString(); - adapter_type_by_name_[network_info.interface_name] = - AdapterTypeFromNetworkType(network_info.type, surface_cellular_types_); - if (network_info.type == NETWORK_VPN) { - vpn_underlying_adapter_type_by_name_[network_info.interface_name] = - AdapterTypeFromNetworkType(network_info.underlying_type_for_vpn, - surface_cellular_types_); - } network_info_by_handle_[network_info.handle] = network_info; for (const rtc::IPAddress& address : network_info.ip_addresses) { network_handle_by_address_[address] = network_info.handle; } + network_handle_by_if_name_[network_info.interface_name] = network_info.handle; + RTC_CHECK(network_info_by_handle_.size() >= + network_handle_by_if_name_.size()); InvokeNetworksChangedCallback(); } absl::optional AndroidNetworkMonitor::FindNetworkHandleFromAddressOrName( const rtc::IPAddress& ip_address, - const std::string& if_name) const { + absl::string_view if_name) const { RTC_DCHECK_RUN_ON(network_thread_); RTC_LOG(LS_INFO) << "Find network handle."; if (find_network_handle_without_ipv6_temporary_part_) { @@ -443,14 +453,20 @@ AndroidNetworkMonitor::FindNetworkHandleFromAddressOrName( absl::optional AndroidNetworkMonitor::FindNetworkHandleFromIfname( - const std::string& if_name) const { + absl::string_view if_name) const { RTC_DCHECK_RUN_ON(network_thread_); + + auto iter = network_handle_by_if_name_.find(if_name); + if (iter != network_handle_by_if_name_.end()) { + return iter->second; + } + if (bind_using_ifname_) { - for (auto const& iter : network_info_by_handle_) { - if (if_name.find(iter.second.interface_name) != std::string::npos) { - // Use partial match so that e.g if_name="v4-wlan0" is matched - // agains iter.first="wlan0" - return absl::make_optional(iter.first); + for (auto const& iter : network_handle_by_if_name_) { + // Use substring match so that e.g if_name="v4-wlan0" is matched + // agains iter="wlan0" + if (if_name.find(iter.first) != absl::string_view::npos) { + return absl::make_optional(iter.second); } } } @@ -462,12 +478,57 @@ void AndroidNetworkMonitor::OnNetworkDisconnected_n(NetworkHandle handle) { RTC_DCHECK_RUN_ON(network_thread_); RTC_LOG(LS_INFO) << "Network disconnected for handle " << handle; auto iter = network_info_by_handle_.find(handle); - if (iter != network_info_by_handle_.end()) { - for (const rtc::IPAddress& address : iter->second.ip_addresses) { - network_handle_by_address_.erase(address); + if (iter == network_info_by_handle_.end()) { + return; + } + + for (const rtc::IPAddress& address : iter->second.ip_addresses) { + network_handle_by_address_.erase(address); + } + + // We've discovered that the if_name is not always unique, + // i.e it can be several network conencted with same if_name. + // + // This is handled the following way, + // 1) OnNetworkConnected_n overwrites any previous "owner" of an interface + // name ("owner" == entry in network_handle_by_if_name_). + // 2) OnNetworkDisconnected_n, we scan and see if there are any remaining + // connected network with the interface name, and set it as owner. + // + // This means that network_info_by_handle can have more entries than + // network_handle_by_if_name_. + + // Check if we are registered as "owner" of if_name. + const auto& if_name = iter->second.interface_name; + auto iter2 = network_handle_by_if_name_.find(if_name); + RTC_DCHECK(iter2 != network_handle_by_if_name_.end()); + if (iter2 != network_handle_by_if_name_.end() && iter2->second == handle) { + // We are owner... + // Check if there is someone else we can set as owner. + bool found = false; + for (const auto& info : network_info_by_handle_) { + if (info.first == handle) { + continue; + } + if (info.second.interface_name == if_name) { + found = true; + network_handle_by_if_name_[if_name] = info.first; + break; + } + } + if (!found) { + // No new owner... + network_handle_by_if_name_.erase(iter2); } - network_info_by_handle_.erase(iter); + } else { + // We are not owner...don't do anything. +#if RTC_DCHECK_IS_ON + auto owner_handle = FindNetworkHandleFromIfname(if_name); + RTC_DCHECK(owner_handle && *owner_handle != handle); +#endif } + + network_info_by_handle_.erase(iter); } void AndroidNetworkMonitor::OnNetworkPreference_n( @@ -485,8 +546,16 @@ void AndroidNetworkMonitor::OnNetworkPreference_n( void AndroidNetworkMonitor::SetNetworkInfos( const std::vector& network_infos) { RTC_DCHECK_RUN_ON(network_thread_); - network_handle_by_address_.clear(); - network_info_by_handle_.clear(); + + // We expect this method to be called once directly after startMonitoring. + // All the caches should be empty. + RTC_DCHECK(network_handle_by_if_name_.empty()); + RTC_DCHECK(network_handle_by_address_.empty()); + RTC_DCHECK(network_info_by_handle_.empty()); + RTC_DCHECK(network_preference_by_adapter_type_.empty()); + + // ...but reset just in case. + reset(); RTC_LOG(LS_INFO) << "Android network monitor found " << network_infos.size() << " networks"; for (const NetworkInformation& network : network_infos) { @@ -494,68 +563,43 @@ void AndroidNetworkMonitor::SetNetworkInfos( } } -rtc::AdapterType AndroidNetworkMonitor::GetAdapterType( - const std::string& if_name) { +rtc::NetworkMonitorInterface::InterfaceInfo +AndroidNetworkMonitor::GetInterfaceInfo(absl::string_view if_name) { RTC_DCHECK_RUN_ON(network_thread_); - auto iter = adapter_type_by_name_.find(if_name); - rtc::AdapterType type = (iter == adapter_type_by_name_.end()) - ? rtc::ADAPTER_TYPE_UNKNOWN - : iter->second; - - if (type == rtc::ADAPTER_TYPE_UNKNOWN && bind_using_ifname_) { - for (auto const& iter : adapter_type_by_name_) { - // Use partial match so that e.g if_name="v4-wlan0" is matched - // agains iter.first="wlan0" - if (if_name.find(iter.first) != std::string::npos) { - type = iter.second; - break; - } - } - } - - if (type == rtc::ADAPTER_TYPE_UNKNOWN) { - RTC_LOG(LS_WARNING) << "Get an unknown type for the interface " << if_name; - } - return type; -} - -rtc::AdapterType AndroidNetworkMonitor::GetVpnUnderlyingAdapterType( - const std::string& if_name) { - RTC_DCHECK_RUN_ON(network_thread_); - auto iter = vpn_underlying_adapter_type_by_name_.find(if_name); - rtc::AdapterType type = (iter == vpn_underlying_adapter_type_by_name_.end()) - ? rtc::ADAPTER_TYPE_UNKNOWN - : iter->second; - if (type == rtc::ADAPTER_TYPE_UNKNOWN && bind_using_ifname_) { - // Use partial match so that e.g if_name="v4-wlan0" is matched - // agains iter.first="wlan0" - for (auto const& iter : vpn_underlying_adapter_type_by_name_) { - if (if_name.find(iter.first) != std::string::npos) { - type = iter.second; - break; - } - } - } - - return type; + auto handle = FindNetworkHandleFromIfname(if_name); + if (!handle) { + return { + .adapter_type = rtc::ADAPTER_TYPE_UNKNOWN, + .available = (disable_is_adapter_available_ ? true : false), + }; + } + auto iter = network_info_by_handle_.find(*handle); + RTC_DCHECK(iter != network_info_by_handle_.end()); + if (iter == network_info_by_handle_.end()) { + return { + .adapter_type = rtc::ADAPTER_TYPE_UNKNOWN, + .available = (disable_is_adapter_available_ ? true : false), + }; + } + + auto type = + AdapterTypeFromNetworkType(iter->second.type, surface_cellular_types_); + auto vpn_type = + (type == rtc::ADAPTER_TYPE_VPN) + ? AdapterTypeFromNetworkType(iter->second.underlying_type_for_vpn, + surface_cellular_types_) + : rtc::ADAPTER_TYPE_UNKNOWN; + return { + .adapter_type = type, + .underlying_type_for_vpn = vpn_type, + .network_preference = GetNetworkPreference(type), + .available = true, + }; } rtc::NetworkPreference AndroidNetworkMonitor::GetNetworkPreference( - const std::string& if_name) { + rtc::AdapterType adapter_type) const { RTC_DCHECK_RUN_ON(network_thread_); - auto iter = adapter_type_by_name_.find(if_name); - if (iter == adapter_type_by_name_.end()) { - return rtc::NetworkPreference::NEUTRAL; - } - - rtc::AdapterType adapter_type = iter->second; - if (adapter_type == rtc::ADAPTER_TYPE_VPN) { - auto iter2 = vpn_underlying_adapter_type_by_name_.find(if_name); - if (iter2 != vpn_underlying_adapter_type_by_name_.end()) { - adapter_type = iter2->second; - } - } - auto preference_iter = network_preference_by_adapter_type_.find(adapter_type); if (preference_iter == network_preference_by_adapter_type_.end()) { return rtc::NetworkPreference::NEUTRAL; @@ -575,15 +619,16 @@ AndroidNetworkMonitorFactory::AndroidNetworkMonitorFactory( AndroidNetworkMonitorFactory::~AndroidNetworkMonitorFactory() = default; rtc::NetworkMonitorInterface* -AndroidNetworkMonitorFactory::CreateNetworkMonitor() { +AndroidNetworkMonitorFactory::CreateNetworkMonitor( + const FieldTrialsView& field_trials) { return new AndroidNetworkMonitor(AttachCurrentThreadIfNeeded(), - j_application_context_); + j_application_context_, field_trials); } void AndroidNetworkMonitor::NotifyConnectionTypeChanged( JNIEnv* env, const JavaRef& j_caller) { - network_thread_->PostTask(ToQueuedTask(safety_flag_, [this] { + network_thread_->PostTask(SafeTask(safety_flag_, [this] { RTC_LOG(LS_INFO) << "Android network monitor detected connection type change."; InvokeNetworksChangedCallback(); @@ -606,8 +651,8 @@ void AndroidNetworkMonitor::NotifyOfNetworkConnect( const JavaRef& j_network_info) { NetworkInformation network_info = GetNetworkInformationFromJava(env, j_network_info); - network_thread_->PostTask(ToQueuedTask( - safety_flag_, [this, network_info = std::move(network_info)] { + network_thread_->PostTask( + SafeTask(safety_flag_, [this, network_info = std::move(network_info)] { OnNetworkConnected_n(network_info); })); } @@ -616,7 +661,7 @@ void AndroidNetworkMonitor::NotifyOfNetworkDisconnect( JNIEnv* env, const JavaRef& j_caller, jlong network_handle) { - network_thread_->PostTask(ToQueuedTask(safety_flag_, [this, network_handle] { + network_thread_->PostTask(SafeTask(safety_flag_, [this, network_handle] { OnNetworkDisconnected_n(static_cast(network_handle)); })); } @@ -630,9 +675,9 @@ void AndroidNetworkMonitor::NotifyOfNetworkPreference( rtc::NetworkPreference preference = static_cast(jpreference); - network_thread_->PostTask(ToQueuedTask( - safety_flag_, - [this, type, preference] { OnNetworkPreference_n(type, preference); })); + network_thread_->PostTask(SafeTask(safety_flag_, [this, type, preference] { + OnNetworkPreference_n(type, preference); + })); } } // namespace jni diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_network_monitor.h b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_network_monitor.h index 01e5fb7af7..d0aad5ea76 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_network_monitor.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_network_monitor.h @@ -17,15 +17,22 @@ #include #include +#include "absl/strings/string_view.h" #include "absl/types/optional.h" +#include "api/field_trials_view.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "rtc_base/network_monitor.h" #include "rtc_base/network_monitor_factory.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" +#include "rtc_base/string_utils.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" #include "sdk/android/src/jni/jni_helpers.h" namespace webrtc { +namespace test { +class AndroidNetworkMonitorTest; +} // namespace test + namespace jni { typedef int64_t NetworkHandle; @@ -67,7 +74,8 @@ struct NetworkInformation { class AndroidNetworkMonitor : public rtc::NetworkMonitorInterface { public: AndroidNetworkMonitor(JNIEnv* env, - const JavaRef& j_application_context); + const JavaRef& j_application_context, + const FieldTrialsView& field_trials); ~AndroidNetworkMonitor() override; // TODO(sakal): Remove once down stream dependencies have been updated. @@ -83,12 +91,9 @@ class AndroidNetworkMonitor : public rtc::NetworkMonitorInterface { rtc::NetworkBindingResult BindSocketToNetwork( int socket_fd, const rtc::IPAddress& address, - const std::string& if_name) override; - rtc::AdapterType GetAdapterType(const std::string& if_name) override; - rtc::AdapterType GetVpnUnderlyingAdapterType( - const std::string& if_name) override; - rtc::NetworkPreference GetNetworkPreference( - const std::string& if_name) override; + absl::string_view if_name) override; + + InterfaceInfo GetInterfaceInfo(absl::string_view if_name) override; // Always expected to be called on the network thread. void SetNetworkInfos(const std::vector& network_infos); @@ -112,26 +117,26 @@ class AndroidNetworkMonitor : public rtc::NetworkMonitorInterface { // Visible for testing. absl::optional FindNetworkHandleFromAddressOrName( const rtc::IPAddress& address, - const std::string& ifname) const; + absl::string_view ifname) const; private: + void reset(); void OnNetworkConnected_n(const NetworkInformation& network_info); void OnNetworkDisconnected_n(NetworkHandle network_handle); void OnNetworkPreference_n(NetworkType type, rtc::NetworkPreference preference); + rtc::NetworkPreference GetNetworkPreference(rtc::AdapterType) const; absl::optional FindNetworkHandleFromIfname( - const std::string& ifname) const; + absl::string_view ifname) const; const int android_sdk_int_; ScopedJavaGlobalRef j_application_context_; ScopedJavaGlobalRef j_network_monitor_; rtc::Thread* const network_thread_; bool started_ RTC_GUARDED_BY(network_thread_) = false; - std::map adapter_type_by_name_ - RTC_GUARDED_BY(network_thread_); - std::map vpn_underlying_adapter_type_by_name_ - RTC_GUARDED_BY(network_thread_); + std::map + network_handle_by_if_name_ RTC_GUARDED_BY(network_thread_); std::map network_handle_by_address_ RTC_GUARDED_BY(network_thread_); std::map network_info_by_handle_ @@ -149,8 +154,17 @@ class AndroidNetworkMonitor : public rtc::NetworkMonitorInterface { // This applies to adapter_type_by_name_, vpn_underlying_adapter_type_by_name_ // and FindNetworkHandleFromIfname. bool bind_using_ifname_ RTC_GUARDED_BY(network_thread_) = true; + + // NOTE: disable_is_adapter_available_ is a kill switch for the impl. + // of IsAdapterAvailable(). + bool disable_is_adapter_available_ RTC_GUARDED_BY(network_thread_) = false; + rtc::scoped_refptr safety_flag_ RTC_PT_GUARDED_BY(network_thread_) = nullptr; + + const FieldTrialsView& field_trials_; + + friend class webrtc::test::AndroidNetworkMonitorTest; }; class AndroidNetworkMonitorFactory : public rtc::NetworkMonitorFactory { @@ -163,7 +177,8 @@ class AndroidNetworkMonitorFactory : public rtc::NetworkMonitorFactory { ~AndroidNetworkMonitorFactory() override; - rtc::NetworkMonitorInterface* CreateNetworkMonitor() override; + rtc::NetworkMonitorInterface* CreateNetworkMonitor( + const FieldTrialsView& field_trials) override; private: ScopedJavaGlobalRef j_application_context_; diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_video_track_source.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_video_track_source.cc index ae9d651d67..4f3152dc6f 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_video_track_source.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_video_track_source.cc @@ -67,7 +67,7 @@ void AndroidVideoTrackSource::SetState(JNIEnv* env, } else { // TODO(sakal): Is this even necessary, does FireOnChanged have to be // called from signaling thread? - signaling_thread_->PostTask(RTC_FROM_HERE, [this] { FireOnChanged(); }); + signaling_thread_->PostTask([this] { FireOnChanged(); }); } } } diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/aaudio_player.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/aaudio_player.cc index ae8fcb9613..2b745b3bd8 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/aaudio_player.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/aaudio_player.cc @@ -13,6 +13,7 @@ #include #include "api/array_view.h" +#include "api/task_queue/task_queue_base.h" #include "modules/audio_device/fine_audio_buffer.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -21,12 +22,8 @@ namespace webrtc { namespace jni { -enum AudioDeviceMessageType : uint32_t { - kMessageOutputStreamDisconnected, -}; - AAudioPlayer::AAudioPlayer(const AudioParameters& audio_parameters) - : main_thread_(rtc::Thread::Current()), + : main_thread_(TaskQueueBase::Current()), aaudio_(audio_parameters, AAUDIO_DIRECTION_OUTPUT, this) { RTC_LOG(LS_INFO) << "ctor"; thread_checker_aaudio_.Detach(); @@ -163,7 +160,7 @@ void AAudioPlayer::OnErrorCallback(aaudio_result_t error) { // from the callback, use another thread instead". A message is therefore // sent to the main thread to do the restart operation. RTC_DCHECK(main_thread_); - main_thread_->Post(RTC_FROM_HERE, this, kMessageOutputStreamDisconnected); + main_thread_->PostTask([this] { HandleStreamDisconnected(); }); } } @@ -220,15 +217,6 @@ aaudio_data_callback_result_t AAudioPlayer::OnDataCallback(void* audio_data, return AAUDIO_CALLBACK_RESULT_CONTINUE; } -void AAudioPlayer::OnMessage(rtc::Message* msg) { - RTC_DCHECK_RUN_ON(&main_thread_checker_); - switch (msg->message_id) { - case kMessageOutputStreamDisconnected: - HandleStreamDisconnected(); - break; - } -} - void AAudioPlayer::HandleStreamDisconnected() { RTC_DCHECK_RUN_ON(&main_thread_checker_); RTC_DLOG(LS_INFO) << "HandleStreamDisconnected"; diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/aaudio_player.h b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/aaudio_player.h index 9e775ecfa3..7286d6e872 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/aaudio_player.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/aaudio_player.h @@ -17,10 +17,9 @@ #include "absl/types/optional.h" #include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" #include "modules/audio_device/audio_device_buffer.h" #include "modules/audio_device/include/audio_device_defines.h" -#include "rtc_base/message_handler.h" -#include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" #include "sdk/android/src/jni/audio_device/aaudio_wrapper.h" #include "sdk/android/src/jni/audio_device/audio_device_module.h" @@ -52,9 +51,7 @@ namespace jni { // where the internal AAudio buffer can be increased when needed. It will // reduce the risk of underruns (~glitches) at the expense of an increased // latency. -class AAudioPlayer final : public AudioOutput, - public AAudioObserverInterface, - public rtc::MessageHandler { +class AAudioPlayer final : public AudioOutput, public AAudioObserverInterface { public: explicit AAudioPlayer(const AudioParameters& audio_parameters); ~AAudioPlayer() override; @@ -90,11 +87,10 @@ class AAudioPlayer final : public AudioOutput, // Called on a real-time thread owned by AAudio. void OnErrorCallback(aaudio_result_t error) override; - // rtc::MessageHandler used for restart messages from the error-callback - // thread to the main (creating) thread. - void OnMessage(rtc::Message* msg) override; - private: + // TODO(henrika): Implement. + int GetPlayoutUnderrunCount() override { return 0; } + // Closes the existing stream and starts a new stream. void HandleStreamDisconnected(); @@ -108,7 +104,7 @@ class AAudioPlayer final : public AudioOutput, SequenceChecker thread_checker_aaudio_; // The thread on which this object is created on. - rtc::Thread* main_thread_; + TaskQueueBase* main_thread_; // Wraps all AAudio resources. Contains an output stream using the default // output audio device. Can be accessed on both the main thread and the diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/aaudio_recorder.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/aaudio_recorder.cc index d66c1d0235..39130cd551 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/aaudio_recorder.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/aaudio_recorder.cc @@ -22,12 +22,8 @@ namespace webrtc { namespace jni { -enum AudioDeviceMessageType : uint32_t { - kMessageInputStreamDisconnected, -}; - AAudioRecorder::AAudioRecorder(const AudioParameters& audio_parameters) - : main_thread_(rtc::Thread::Current()), + : main_thread_(TaskQueueBase::Current()), aaudio_(audio_parameters, AAUDIO_DIRECTION_INPUT, this) { RTC_LOG(LS_INFO) << "ctor"; thread_checker_aaudio_.Detach(); @@ -153,7 +149,7 @@ void AAudioRecorder::OnErrorCallback(aaudio_result_t error) { // from the callback, use another thread instead". A message is therefore // sent to the main thread to do the restart operation. RTC_DCHECK(main_thread_); - main_thread_->Post(RTC_FROM_HERE, this, kMessageInputStreamDisconnected); + main_thread_->PostTask([this] { HandleStreamDisconnected(); }); } } @@ -201,18 +197,6 @@ aaudio_data_callback_result_t AAudioRecorder::OnDataCallback( return AAUDIO_CALLBACK_RESULT_CONTINUE; } -void AAudioRecorder::OnMessage(rtc::Message* msg) { - RTC_DCHECK_RUN_ON(&thread_checker_); - switch (msg->message_id) { - case kMessageInputStreamDisconnected: - HandleStreamDisconnected(); - break; - default: - RTC_LOG(LS_ERROR) << "Invalid message id: " << msg->message_id; - break; - } -} - void AAudioRecorder::HandleStreamDisconnected() { RTC_DCHECK_RUN_ON(&thread_checker_); RTC_LOG(LS_INFO) << "HandleStreamDisconnected"; diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/aaudio_recorder.h b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/aaudio_recorder.h index a911577bfe..016c9b00af 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/aaudio_recorder.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/aaudio_recorder.h @@ -16,10 +16,9 @@ #include #include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" #include "modules/audio_device/audio_device_buffer.h" #include "modules/audio_device/include/audio_device_defines.h" -#include "rtc_base/message_handler.h" -#include "rtc_base/thread.h" #include "sdk/android/src/jni/audio_device/aaudio_wrapper.h" #include "sdk/android/src/jni/audio_device/audio_device_module.h" @@ -44,9 +43,7 @@ namespace jni { // // TODO(henrika): add comments about device changes and adaptive buffer // management. -class AAudioRecorder : public AudioInput, - public AAudioObserverInterface, - public rtc::MessageHandler { +class AAudioRecorder : public AudioInput, public AAudioObserverInterface { public: explicit AAudioRecorder(const AudioParameters& audio_parameters); ~AAudioRecorder() override; @@ -82,9 +79,6 @@ class AAudioRecorder : public AudioInput, // Called on a real-time thread owned by AAudio. void OnErrorCallback(aaudio_result_t error) override; - // rtc::MessageHandler used for restart messages. - void OnMessage(rtc::Message* msg) override; - private: // Closes the existing stream and starts a new stream. void HandleStreamDisconnected(); @@ -99,7 +93,7 @@ class AAudioRecorder : public AudioInput, SequenceChecker thread_checker_aaudio_; // The thread on which this object is created on. - rtc::Thread* main_thread_; + TaskQueueBase* main_thread_; // Wraps all AAudio resources. Contains an input stream using the default // input audio device. diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/audio_device_module.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/audio_device_module.cc index 2e75db9418..7c59d3e432 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/audio_device_module.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/audio_device_module.cc @@ -13,13 +13,13 @@ #include #include +#include "api/make_ref_counted.h" #include "api/sequence_checker.h" #include "api/task_queue/default_task_queue_factory.h" #include "api/task_queue/task_queue_factory.h" #include "modules/audio_device/audio_device_buffer.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "rtc_base/ref_counted_object.h" #include "sdk/android/generated_audio_device_module_base_jni/WebRtcAudioManager_jni.h" #include "system_wrappers/include/metrics.h" diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/audio_record_jni.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/audio_record_jni.cc index 170c81af48..d0444d5130 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/audio_record_jni.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/audio_record_jni.cc @@ -15,7 +15,6 @@ #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" -#include "rtc_base/format_macros.h" #include "rtc_base/logging.h" #include "rtc_base/platform_thread.h" #include "rtc_base/time_utils.h" @@ -251,8 +250,8 @@ void AudioRecordJni::DataIsRecorded(JNIEnv* env, RTC_LOG(LS_ERROR) << "AttachAudioBuffer has not been called"; return; } - audio_device_buffer_->SetRecordedBuffer(direct_buffer_address_, - frames_per_buffer_); + audio_device_buffer_->SetRecordedBuffer( + direct_buffer_address_, frames_per_buffer_); // We provide one (combined) fixed delay estimate for the APM and use the // `playDelayMs` parameter only. Components like the AEC only sees the sum // of `playDelayMs` and `recDelayMs`, hence the distributions does not matter. diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/audio_track_jni.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/audio_track_jni.cc index f2f22f915b..c1ff4c30e2 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/audio_track_jni.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/audio_track_jni.cc @@ -14,7 +14,6 @@ #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" -#include "rtc_base/format_macros.h" #include "rtc_base/logging.h" #include "rtc_base/platform_thread.h" #include "sdk/android/generated_java_audio_device_module_native_jni/WebRtcAudioTrack_jni.h" diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/opensles_player.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/opensles_player.cc index 5192accf16..6300a3abe1 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/opensles_player.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/opensles_player.cc @@ -13,11 +13,11 @@ #include #include + #include "api/array_view.h" #include "modules/audio_device/fine_audio_buffer.h" #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" -#include "rtc_base/format_macros.h" #include "rtc_base/platform_thread.h" #include "rtc_base/time_utils.h" #include "sdk/android/src/jni/audio_device/audio_common.h" @@ -202,7 +202,7 @@ void OpenSLESPlayer::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) { ALOGD("SetPlayoutSampleRate(%d)", sample_rate_hz); audio_device_buffer_->SetPlayoutSampleRate(sample_rate_hz); const size_t channels = audio_parameters_.channels(); - ALOGD("SetPlayoutChannels(%" RTC_PRIuS ")", channels); + ALOGD("SetPlayoutChannels(%zu)", channels); audio_device_buffer_->SetPlayoutChannels(channels); RTC_CHECK(audio_device_buffer_); AllocateDataBuffers(); @@ -223,7 +223,7 @@ void OpenSLESPlayer::AllocateDataBuffers() { // which reduces jitter. const size_t buffer_size_in_samples = audio_parameters_.frames_per_buffer() * audio_parameters_.channels(); - ALOGD("native buffer size: %" RTC_PRIuS, buffer_size_in_samples); + ALOGD("native buffer size: %zu", buffer_size_in_samples); ALOGD("native buffer size in ms: %.2f", audio_parameters_.GetBufferSizeInMilliseconds()); fine_audio_buffer_ = std::make_unique(audio_device_buffer_); diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/opensles_recorder.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/opensles_recorder.cc index d2eb2de9b0..c426a8d92b 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/opensles_recorder.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/opensles_recorder.cc @@ -13,11 +13,11 @@ #include #include + #include "api/array_view.h" #include "modules/audio_device/fine_audio_buffer.h" #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" -#include "rtc_base/format_macros.h" #include "rtc_base/platform_thread.h" #include "rtc_base/time_utils.h" #include "sdk/android/src/jni/audio_device/audio_common.h" @@ -188,7 +188,7 @@ void OpenSLESRecorder::AttachAudioBuffer(AudioDeviceBuffer* audio_buffer) { // Ensure that the audio device buffer is informed about the number of // channels preferred by the OS on the recording side. const size_t channels = audio_parameters_.channels(); - ALOGD("SetRecordingChannels(%" RTC_PRIuS ")", channels); + ALOGD("SetRecordingChannels(%zu)", channels); audio_device_buffer_->SetRecordingChannels(channels); // Allocated memory for internal data buffers given existing audio parameters. AllocateDataBuffers(); @@ -345,12 +345,10 @@ void OpenSLESRecorder::AllocateDataBuffers() { // Create a modified audio buffer class which allows us to deliver any number // of samples (and not only multiple of 10ms) to match the native audio unit // buffer size. - ALOGD("frames per native buffer: %" RTC_PRIuS, - audio_parameters_.frames_per_buffer()); - ALOGD("frames per 10ms buffer: %" RTC_PRIuS, + ALOGD("frames per native buffer: %zu", audio_parameters_.frames_per_buffer()); + ALOGD("frames per 10ms buffer: %zu", audio_parameters_.frames_per_10ms_buffer()); - ALOGD("bytes per native buffer: %" RTC_PRIuS, - audio_parameters_.GetBytesPerBuffer()); + ALOGD("bytes per native buffer: %zu", audio_parameters_.GetBytesPerBuffer()); ALOGD("native sample rate: %d", audio_parameters_.sample_rate()); RTC_DCHECK(audio_device_buffer_); fine_audio_buffer_ = std::make_unique(audio_device_buffer_); diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/av1_codec.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/av1_codec.cc deleted file mode 100644 index 02070f7901..0000000000 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/av1_codec.cc +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright 2021 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include - -#include "modules/video_coding/codecs/av1/libaom_av1_decoder.h" -#include "modules/video_coding/codecs/av1/libaom_av1_encoder.h" -#include "sdk/android/generated_libaom_av1_jni/LibaomAv1Decoder_jni.h" -#include "sdk/android/generated_libaom_av1_jni/LibaomAv1Encoder_jni.h" -#include "sdk/android/src/jni/jni_helpers.h" - -namespace webrtc { -namespace jni { - -static jlong JNI_LibaomAv1Encoder_CreateEncoder(JNIEnv* jni) { - return jlongFromPointer(webrtc::CreateLibaomAv1Encoder().release()); -} - -static jboolean JNI_LibaomAv1Encoder_IsSupported(JNIEnv* jni) { - return webrtc::kIsLibaomAv1EncoderSupported; -} - -static jlong JNI_LibaomAv1Decoder_CreateDecoder(JNIEnv* jni) { - return jlongFromPointer(webrtc::CreateLibaomAv1Decoder().release()); -} - -static jboolean JNI_LibaomAv1Decoder_IsSupported(JNIEnv* jni) { - return webrtc::kIsLibaomAv1DecoderSupported; -} - -} // namespace jni -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/class_loader.h b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/class_loader.h deleted file mode 100644 index 4b255a4660..0000000000 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/class_loader.h +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// Deprecated: use sdk/android/native_api/jni/class_loader.h instead. - -#ifndef SDK_ANDROID_SRC_JNI_CLASS_LOADER_H_ -#define SDK_ANDROID_SRC_JNI_CLASS_LOADER_H_ - -#include "sdk/android/native_api/jni/class_loader.h" - -namespace webrtc { -namespace jni { -using ::webrtc::InitClassLoader; -} // namespace jni -} // namespace webrtc - -#endif // SDK_ANDROID_SRC_JNI_CLASS_LOADER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/class_reference_holder.h b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/class_reference_holder.h deleted file mode 100644 index 4702f5e2a9..0000000000 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/class_reference_holder.h +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef SDK_ANDROID_SRC_JNI_CLASS_REFERENCE_HOLDER_H_ -#define SDK_ANDROID_SRC_JNI_CLASS_REFERENCE_HOLDER_H_ - -// TODO(magjed): Update external clients to call webrtc::jni::InitClassLoader -// immediately instead. -#include "sdk/android/native_api/jni/class_loader.h" -#include "sdk/android/src/jni/jni_helpers.h" - -namespace webrtc { -namespace jni { - -// Deprecated. Call webrtc::jni::InitClassLoader() immediately instead.. -inline void LoadGlobalClassReferenceHolder() { - webrtc::InitClassLoader(GetEnv()); -} - -// Deprecated. Do not call at all. -inline void FreeGlobalClassReferenceHolder() {} - -} // namespace jni -} // namespace webrtc - -// TODO(magjed): Remove once external clients are updated. -namespace webrtc_jni { - -using webrtc::jni::LoadGlobalClassReferenceHolder; -using webrtc::jni::FreeGlobalClassReferenceHolder; - -} // namespace webrtc_jni - -#endif // SDK_ANDROID_SRC_JNI_CLASS_REFERENCE_HOLDER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/dav1d_codec.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/dav1d_codec.cc new file mode 100644 index 0000000000..1246d88c0b --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/dav1d_codec.cc @@ -0,0 +1,25 @@ +/* + * Copyright 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include + +#include "modules/video_coding/codecs/av1/dav1d_decoder.h" +#include "sdk/android/generated_dav1d_jni/Dav1dDecoder_jni.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +static jlong JNI_Dav1dDecoder_CreateDecoder(JNIEnv* jni) { + return jlongFromPointer(webrtc::CreateDav1dDecoder().release()); +} + +} // namespace jni +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/encoded_image.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/encoded_image.cc index 189d7e95e4..9bd73a4a51 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/encoded_image.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/encoded_image.cc @@ -11,7 +11,6 @@ #include "sdk/android/src/jni/encoded_image.h" #include "api/video/encoded_image.h" -#include "rtc_base/ref_counted_object.h" #include "rtc_base/time_utils.h" #include "sdk/android/generated_video_jni/EncodedImage_jni.h" #include "sdk/android/native_api/jni/java_types.h" diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/jni_generator_helper.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/jni_generator_helper.cc index 8ddcdff4f6..dc34849d1b 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/jni_generator_helper.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/jni_generator_helper.cc @@ -10,7 +10,6 @@ #include "sdk/android/src/jni/jni_generator_helper.h" -#include "rtc_base/atomic_ops.h" #include "sdk/android/native_api/jni/class_loader.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/jni_helpers.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/jni_helpers.cc index 53399abab1..1098dfb23b 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/jni_helpers.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/jni_helpers.cc @@ -12,6 +12,7 @@ #include #include "sdk/android/native_api/jni/java_types.h" +#include "tgnet/FileLog.h" namespace webrtc { namespace jni { @@ -26,6 +27,7 @@ ScopedJavaLocalRef NewDirectByteBuffer(JNIEnv* env, } jobject NewGlobalRef(JNIEnv* jni, jobject o) { + DEBUG_REF("webrtc 2 new global ref"); jobject ret = jni->NewGlobalRef(o); CHECK_EXCEPTION(jni) << "error during NewGlobalRef"; RTC_CHECK(ret); @@ -33,6 +35,7 @@ jobject NewGlobalRef(JNIEnv* jni, jobject o) { } void DeleteGlobalRef(JNIEnv* jni, jobject o) { + DEBUG_DELREF("webrtc 2 delete global ref"); jni->DeleteGlobalRef(o); CHECK_EXCEPTION(jni) << "error during DeleteGlobalRef"; } diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/jni_onload.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/jni_onload.cc index 88730ea4ea..a1829ad0b1 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/jni_onload.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/jni_onload.cc @@ -13,7 +13,7 @@ #define JNIEXPORT __attribute__((visibility("default"))) #include "rtc_base/ssl_adapter.h" -#include "sdk/android/src/jni/class_reference_holder.h" +#include "sdk/android/native_api/jni/class_loader.h" #include "sdk/android/src/jni/jni_helpers.h" namespace webrtc { @@ -26,13 +26,12 @@ extern "C" jint JNIEXPORT JNICALL JNI_OnLoad(JavaVM* jvm, void* reserved) { return -1; RTC_CHECK(rtc::InitializeSSL()) << "Failed to InitializeSSL()"; - LoadGlobalClassReferenceHolder(); + webrtc::InitClassLoader(GetEnv()); return ret; } extern "C" void JNIEXPORT JNICALL JNI_OnUnLoad(JavaVM* jvm, void* reserved) { - FreeGlobalClassReferenceHolder(); RTC_CHECK(rtc::CleanupSSL()) << "Failed to CleanupSSL()"; } diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/libaom_av1_encoder.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/libaom_av1_encoder.cc new file mode 100644 index 0000000000..400c3124fe --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/libaom_av1_encoder.cc @@ -0,0 +1,25 @@ +/* + * Copyright 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include + +#include "modules/video_coding/codecs/av1/libaom_av1_encoder.h" +#include "sdk/android/generated_libaom_av1_encoder_jni/LibaomAv1Encoder_jni.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +static jlong JNI_LibaomAv1Encoder_CreateEncoder(JNIEnv* jni) { + return jlongFromPointer(webrtc::CreateLibaomAv1Encoder().release()); +} + +} // namespace jni +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/logging/log_sink.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/logging/log_sink.cc index cebc8669be..84394d8ee5 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/logging/log_sink.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/logging/log_sink.cc @@ -9,6 +9,7 @@ */ #include "sdk/android/src/jni/logging/log_sink.h" +#include "absl/strings/string_view.h" #include "sdk/android/generated_logging_jni/JNILogging_jni.h" namespace webrtc { @@ -18,17 +19,23 @@ JNILogSink::JNILogSink(JNIEnv* env, const JavaRef& j_logging) : j_logging_(env, j_logging) {} JNILogSink::~JNILogSink() = default; +void JNILogSink::OnLogMessage(const std::string& msg) { + RTC_DCHECK_NOTREACHED(); +} + void JNILogSink::OnLogMessage(const std::string& msg, rtc::LoggingSeverity severity, const char* tag) { - JNIEnv* env = AttachCurrentThreadIfNeeded(); - Java_JNILogging_logToInjectable(env, j_logging_, NativeToJavaString(env, msg), - NativeToJavaInteger(env, severity), - NativeToJavaString(env, tag)); + OnLogMessage(absl::string_view{msg}, severity, tag); } -void JNILogSink::OnLogMessage(const std::string& msg) { - RTC_DCHECK_NOTREACHED(); +void JNILogSink::OnLogMessage(absl::string_view msg, + rtc::LoggingSeverity severity, + const char* tag) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + Java_JNILogging_logToInjectable( + env, j_logging_, NativeToJavaString(env, std::string(msg)), + NativeToJavaInteger(env, severity), NativeToJavaString(env, tag)); } } // namespace jni diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/logging/log_sink.h b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/logging/log_sink.h index e48b88dcb7..8e681ac3ea 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/logging/log_sink.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/logging/log_sink.h @@ -12,6 +12,7 @@ #include +#include "absl/strings/string_view.h" #include "rtc_base/logging.h" #include "sdk/android/native_api/jni/java_types.h" #include "sdk/android/src/jni/jni_helpers.h" @@ -24,10 +25,13 @@ class JNILogSink : public rtc::LogSink { JNILogSink(JNIEnv* env, const JavaRef& j_logging); ~JNILogSink() override; + void OnLogMessage(const std::string& msg) override; void OnLogMessage(const std::string& msg, rtc::LoggingSeverity severity, const char* tag) override; - void OnLogMessage(const std::string& msg) override; + void OnLogMessage(absl::string_view msg, + rtc::LoggingSeverity severity, + const char* tag) override; private: const ScopedJavaGlobalRef j_logging_; diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/audio.h b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/audio.h index 7a79bed986..09fcaf1c56 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/audio.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/audio.h @@ -12,9 +12,7 @@ #define SDK_ANDROID_SRC_JNI_PC_AUDIO_H_ #include "api/scoped_refptr.h" -// Adding 'nogncheck' to disable the gn include headers check. -// We don't want this target depend on audio related targets -#include "modules/audio_processing/include/audio_processing.h" // nogncheck +#include "modules/audio_processing/include/audio_processing.h" namespace webrtc { namespace jni { diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/media_stream.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/media_stream.cc index c209317e8e..20d59a6f8f 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/media_stream.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/media_stream.cc @@ -29,7 +29,7 @@ JavaMediaStream::JavaMediaStream( // Create an observer to update the Java stream when the native stream's set // of tracks changes. observer_.reset(new MediaStreamObserver( - media_stream, + media_stream.get(), [this](AudioTrackInterface* audio_track, MediaStreamInterface* media_stream) { OnAudioTrackAddedToStream(audio_track, media_stream); @@ -113,7 +113,8 @@ static jboolean JNI_MediaStream_AddAudioTrackToNativeStream( jlong pointer, jlong j_audio_track_pointer) { return reinterpret_cast(pointer)->AddTrack( - reinterpret_cast(j_audio_track_pointer)); + rtc::scoped_refptr( + reinterpret_cast(j_audio_track_pointer))); } static jboolean JNI_MediaStream_AddVideoTrackToNativeStream( @@ -121,21 +122,24 @@ static jboolean JNI_MediaStream_AddVideoTrackToNativeStream( jlong pointer, jlong j_video_track_pointer) { return reinterpret_cast(pointer)->AddTrack( - reinterpret_cast(j_video_track_pointer)); + rtc::scoped_refptr( + reinterpret_cast(j_video_track_pointer))); } static jboolean JNI_MediaStream_RemoveAudioTrack(JNIEnv* jni, jlong pointer, jlong j_audio_track_pointer) { return reinterpret_cast(pointer)->RemoveTrack( - reinterpret_cast(j_audio_track_pointer)); + rtc::scoped_refptr( + reinterpret_cast(j_audio_track_pointer))); } static jboolean JNI_MediaStream_RemoveVideoTrack(JNIEnv* jni, jlong pointer, jlong j_video_track_pointer) { return reinterpret_cast(pointer)->RemoveTrack( - reinterpret_cast(j_video_track_pointer)); + rtc::scoped_refptr( + reinterpret_cast(j_video_track_pointer))); } static ScopedJavaLocalRef JNI_MediaStream_GetId(JNIEnv* jni, diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/peer_connection.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/peer_connection.cc index 9c73b94000..9983ae7df2 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/peer_connection.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/peer_connection.cc @@ -41,6 +41,7 @@ #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" #include "sdk/android/generated_peerconnection_jni/CandidatePairChangeEvent_jni.h" +#include "sdk/android/generated_peerconnection_jni/IceCandidateErrorEvent_jni.h" #include "sdk/android/generated_peerconnection_jni/PeerConnection_jni.h" #include "sdk/android/native_api/jni/java_types.h" #include "sdk/android/src/jni/jni_helpers.h" @@ -118,7 +119,7 @@ SdpSemantics JavaToNativeSdpSemantics(JNIEnv* jni, return SdpSemantics::kUnifiedPlan; RTC_DCHECK_NOTREACHED(); - return SdpSemantics::kPlanB_DEPRECATED; + return SdpSemantics::kUnifiedPlan; } ScopedJavaLocalRef NativeToJavaCandidatePairChange( @@ -251,8 +252,6 @@ void JavaToNativeRTCConfiguration( rtc_config->turn_customizer = GetNativeTurnCustomizer(jni, j_turn_customizer); - rtc_config->disable_ipv6 = - Java_RTCConfiguration_getDisableIpv6(jni, j_rtc_config); rtc_config->media_config.enable_dscp = Java_RTCConfiguration_getEnableDscp(jni, j_rtc_config); rtc_config->media_config.video.enable_cpu_adaptation = @@ -306,6 +305,19 @@ void PeerConnectionObserverJni::OnIceCandidate( NativeToJavaIceCandidate(env, *candidate)); } +void PeerConnectionObserverJni::OnIceCandidateError( + const std::string& address, + int port, + const std::string& url, + int error_code, + const std::string& error_text) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + ScopedJavaLocalRef event = Java_IceCandidateErrorEvent_Constructor( + env, NativeToJavaString(env, address), port, NativeToJavaString(env, url), + error_code, NativeToJavaString(env, error_text)); + Java_Observer_onIceCandidateError(env, j_observer_global_, event); +} + void PeerConnectionObserverJni::OnIceCandidatesRemoved( const std::vector& candidates) { JNIEnv* env = AttachCurrentThreadIfNeeded(); @@ -377,8 +389,9 @@ void PeerConnectionObserverJni::OnAddStream( void PeerConnectionObserverJni::OnRemoveStream( rtc::scoped_refptr stream) { JNIEnv* env = AttachCurrentThreadIfNeeded(); - NativeToJavaStreamsMap::iterator it = remote_streams_.find(stream); - RTC_CHECK(it != remote_streams_.end()) << "unexpected stream: " << stream; + NativeToJavaStreamsMap::iterator it = remote_streams_.find(stream.get()); + RTC_CHECK(it != remote_streams_.end()) + << "unexpected stream: " << stream.get(); Java_Observer_onRemoveStream(env, j_observer_global_, it->second.j_media_stream()); remote_streams_.erase(it); @@ -433,7 +446,7 @@ void PeerConnectionObserverJni::OnTrack( JavaMediaStream& PeerConnectionObserverJni::GetOrCreateJavaStream( JNIEnv* env, const rtc::scoped_refptr& stream) { - NativeToJavaStreamsMap::iterator it = remote_streams_.find(stream); + NativeToJavaStreamsMap::iterator it = remote_streams_.find(stream.get()); if (it == remote_streams_.end()) { it = remote_streams_ .emplace(std::piecewise_construct, @@ -501,7 +514,7 @@ static ScopedJavaLocalRef JNI_PeerConnection_GetLocalDescription( // must do this odd dance. std::string sdp; std::string type; - pc->signaling_thread()->Invoke(RTC_FROM_HERE, [pc, &sdp, &type] { + pc->signaling_thread()->BlockingCall([pc, &sdp, &type] { const SessionDescriptionInterface* desc = pc->local_description(); if (desc) { RTC_CHECK(desc->ToString(&sdp)) << "got so far: " << sdp; @@ -520,7 +533,7 @@ static ScopedJavaLocalRef JNI_PeerConnection_GetRemoteDescription( // must do this odd dance. std::string sdp; std::string type; - pc->signaling_thread()->Invoke(RTC_FROM_HERE, [pc, &sdp, &type] { + pc->signaling_thread()->BlockingCall([pc, &sdp, &type] { const SessionDescriptionInterface* desc = pc->remote_description(); if (desc) { RTC_CHECK(desc->ToString(&sdp)) << "got so far: " << sdp; @@ -565,7 +578,7 @@ static void JNI_PeerConnection_CreateOffer( jni, j_observer, std::move(constraints)); PeerConnectionInterface::RTCOfferAnswerOptions options; CopyConstraintsIntoOfferAnswerOptions(observer->constraints(), &options); - ExtractNativePC(jni, j_pc)->CreateOffer(observer, options); + ExtractNativePC(jni, j_pc)->CreateOffer(observer.get(), options); } static void JNI_PeerConnection_CreateAnswer( @@ -579,7 +592,7 @@ static void JNI_PeerConnection_CreateAnswer( jni, j_observer, std::move(constraints)); PeerConnectionInterface::RTCOfferAnswerOptions options; CopyConstraintsIntoOfferAnswerOptions(observer->constraints(), &options); - ExtractNativePC(jni, j_pc)->CreateAnswer(observer, options); + ExtractNativePC(jni, j_pc)->CreateAnswer(observer.get(), options); } static void JNI_PeerConnection_SetLocalDescriptionAutomatically( @@ -747,7 +760,8 @@ static ScopedJavaLocalRef JNI_PeerConnection_AddTrack( const JavaParamRef& j_stream_labels) { RTCErrorOr> result = ExtractNativePC(jni, j_pc)->AddTrack( - reinterpret_cast(native_track), + rtc::scoped_refptr( + reinterpret_cast(native_track)), JavaListToNativeVector(jni, j_stream_labels, &JavaToNativeString)); if (!result.ok()) { @@ -762,8 +776,10 @@ static jboolean JNI_PeerConnection_RemoveTrack( JNIEnv* jni, const JavaParamRef& j_pc, jlong native_sender) { - return ExtractNativePC(jni, j_pc)->RemoveTrack( - reinterpret_cast(native_sender)); + return ExtractNativePC(jni, j_pc) + ->RemoveTrackOrError(rtc::scoped_refptr( + reinterpret_cast(native_sender))) + .ok(); } static ScopedJavaLocalRef JNI_PeerConnection_AddTransceiverWithTrack( @@ -773,7 +789,8 @@ static ScopedJavaLocalRef JNI_PeerConnection_AddTransceiverWithTrack( const JavaParamRef& j_init) { RTCErrorOr> result = ExtractNativePC(jni, j_pc)->AddTransceiver( - reinterpret_cast(native_track), + rtc::scoped_refptr( + reinterpret_cast(native_track)), JavaToNativeRtpTransceiverInit(jni, j_init)); if (!result.ok()) { RTC_LOG(LS_ERROR) << "Failed to add transceiver: " @@ -809,7 +826,8 @@ static jboolean JNI_PeerConnection_OldGetStats( jlong native_track) { auto observer = rtc::make_ref_counted(jni, j_observer); return ExtractNativePC(jni, j_pc)->GetStats( - observer, reinterpret_cast(native_track), + observer.get(), + reinterpret_cast(native_track), PeerConnectionInterface::kStatsOutputLevelStandard); } @@ -819,7 +837,33 @@ static void JNI_PeerConnection_NewGetStats( const JavaParamRef& j_callback) { auto callback = rtc::make_ref_counted(jni, j_callback); - ExtractNativePC(jni, j_pc)->GetStats(callback); + ExtractNativePC(jni, j_pc)->GetStats(callback.get()); +} + +static void JNI_PeerConnection_NewGetStatsSender( + JNIEnv* jni, + const JavaParamRef& j_pc, + jlong native_sender, + const JavaParamRef& j_callback) { + auto callback = + rtc::make_ref_counted(jni, j_callback); + ExtractNativePC(jni, j_pc)->GetStats( + rtc::scoped_refptr( + reinterpret_cast(native_sender)), + rtc::scoped_refptr(callback.get())); +} + +static void JNI_PeerConnection_NewGetStatsReceiver( + JNIEnv* jni, + const JavaParamRef& j_pc, + jlong native_receiver, + const JavaParamRef& j_callback) { + auto callback = + rtc::make_ref_counted(jni, j_callback); + ExtractNativePC(jni, j_pc)->GetStats( + rtc::scoped_refptr( + reinterpret_cast(native_receiver)), + rtc::scoped_refptr(callback.get())); } static jboolean JNI_PeerConnection_SetBitrate( diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/peer_connection.h b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/peer_connection.h index 86d99f31c4..9976e8e4f5 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/peer_connection.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/peer_connection.h @@ -48,6 +48,12 @@ class PeerConnectionObserverJni : public PeerConnectionObserver { // Implementation of PeerConnectionObserver interface, which propagates // the callbacks to the Java observer. void OnIceCandidate(const IceCandidateInterface* candidate) override; + void OnIceCandidateError(const std::string& address, + int port, + const std::string& url, + int error_code, + const std::string& error_text) override; + void OnIceCandidatesRemoved( const std::vector& candidates) override; void OnSignalingChange( diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/peer_connection_factory.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/peer_connection_factory.cc index 5330cbd638..c2950b31cf 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/peer_connection_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/peer_connection_factory.cc @@ -31,7 +31,6 @@ #include "modules/audio_processing/include/audio_processing.h" #include "rtc_base/event_tracer.h" #include "rtc_base/physical_socket_server.h" -#include "rtc_base/system/thread_registry.h" #include "rtc_base/thread.h" #include "sdk/android/generated_peerconnection_jni/PeerConnectionFactory_jni.h" #include "sdk/android/native_api/jni/java_types.h" @@ -73,31 +72,12 @@ typedef void (*JavaMethodPointer)(JNIEnv*, const JavaRef&); // given Java object. void PostJavaCallback(JNIEnv* env, rtc::Thread* queue, - const rtc::Location& posted_from, const JavaRef& j_object, JavaMethodPointer java_method_pointer) { - // One-off message handler that calls the Java method on the specified Java - // object before deleting itself. - class JavaAsyncCallback : public rtc::MessageHandler { - public: - JavaAsyncCallback(JNIEnv* env, - const JavaRef& j_object, - JavaMethodPointer java_method_pointer) - : j_object_(env, j_object), java_method_pointer_(java_method_pointer) {} - - void OnMessage(rtc::Message*) override { - java_method_pointer_(AttachCurrentThreadIfNeeded(), j_object_); - // The message has been delivered, clean up after ourself. - delete this; - } - - private: - ScopedJavaGlobalRef j_object_; - JavaMethodPointer java_method_pointer_; - }; - - queue->Post(posted_from, - new JavaAsyncCallback(env, j_object, java_method_pointer)); + ScopedJavaGlobalRef object(env, j_object); + queue->PostTask([object = std::move(object), java_method_pointer] { + java_method_pointer(AttachCurrentThreadIfNeeded(), object); + }); } absl::optional @@ -148,11 +128,11 @@ ScopedJavaLocalRef NativeToScopedJavaPeerConnectionFactory( ScopedJavaLocalRef j_pcf = Java_PeerConnectionFactory_Constructor( env, NativeToJavaPointer(owned_factory)); - PostJavaCallback(env, owned_factory->network_thread(), RTC_FROM_HERE, j_pcf, + PostJavaCallback(env, owned_factory->network_thread(), j_pcf, &Java_PeerConnectionFactory_onNetworkThreadReady); - PostJavaCallback(env, owned_factory->worker_thread(), RTC_FROM_HERE, j_pcf, + PostJavaCallback(env, owned_factory->worker_thread(), j_pcf, &Java_PeerConnectionFactory_onWorkerThreadReady); - PostJavaCallback(env, owned_factory->signaling_thread(), RTC_FROM_HERE, j_pcf, + PostJavaCallback(env, owned_factory->signaling_thread(), j_pcf, &Java_PeerConnectionFactory_onSignalingThreadReady); return j_pcf; @@ -351,11 +331,12 @@ JNI_PeerConnectionFactory_CreatePeerConnectionFactory( jlong native_network_controller_factory, jlong native_network_state_predictor_factory, jlong native_neteq_factory) { - rtc::scoped_refptr audio_processor = - reinterpret_cast(native_audio_processor); + rtc::scoped_refptr audio_processor( + reinterpret_cast(native_audio_processor)); return CreatePeerConnectionFactoryForJava( jni, jcontext, joptions, - reinterpret_cast(native_audio_device_module), + rtc::scoped_refptr( + reinterpret_cast(native_audio_device_module)), TakeOwnershipOfRefPtr(native_audio_encoder_factory), TakeOwnershipOfRefPtr(native_audio_decoder_factory), jencoder_factory, jdecoder_factory, @@ -546,10 +527,5 @@ static void JNI_PeerConnectionFactory_PrintStackTrace(JNIEnv* env, jint tid) { RTC_LOG(LS_WARNING) << StackTraceToString(GetStackTrace(tid)); } -static void JNI_PeerConnectionFactory_PrintStackTracesOfRegisteredThreads( - JNIEnv* env) { - PrintStackTracesOfRegisteredThreads(); -} - } // namespace jni } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/rtp_receiver.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/rtp_receiver.cc index 4d7e954872..7a3600b424 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/rtp_receiver.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/rtp_receiver.cc @@ -118,8 +118,9 @@ static void JNI_RtpReceiver_SetFrameDecryptor(JNIEnv* jni, jlong j_rtp_sender_pointer, jlong j_frame_decryptor_pointer) { reinterpret_cast(j_rtp_sender_pointer) - ->SetFrameDecryptor(reinterpret_cast( - j_frame_decryptor_pointer)); + ->SetFrameDecryptor(rtc::scoped_refptr( + reinterpret_cast( + j_frame_decryptor_pointer))); } } // namespace jni diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/rtp_sender.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/rtp_sender.cc index 411e5dc8c5..233a353654 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/rtp_sender.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/rtp_sender.cc @@ -105,8 +105,9 @@ static void JNI_RtpSender_SetFrameEncryptor(JNIEnv* jni, jlong j_rtp_sender_pointer, jlong j_frame_encryptor_pointer) { reinterpret_cast(j_rtp_sender_pointer) - ->SetFrameEncryptor(reinterpret_cast( - j_frame_encryptor_pointer)); + ->SetFrameEncryptor(rtc::scoped_refptr( + reinterpret_cast( + j_frame_encryptor_pointer))); } } // namespace jni diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/video.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/video.cc index ee5ecbea6f..b955dbb1ef 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/video.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/video.cc @@ -11,12 +11,12 @@ #include "sdk/android/src/jni/pc/video.h" #include + #include #include "api/video_codecs/video_decoder_factory.h" #include "api/video_codecs/video_encoder_factory.h" #include "rtc_base/logging.h" -#include "rtc_base/ref_counted_object.h" #include "sdk/android/native_api/jni/java_types.h" #include "sdk/android/src/jni/android_video_track_source.h" #include "sdk/android/src/jni/video_decoder_factory_wrapper.h" diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/scoped_java_ref_counted.h b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/scoped_java_ref_counted.h index 4f8f1831b4..3ea226259e 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/scoped_java_ref_counted.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/scoped_java_ref_counted.h @@ -30,7 +30,6 @@ class ScopedJavaRefCounted { const JavaRef& j_object); ScopedJavaRefCounted(ScopedJavaRefCounted&& other) = default; - // TODO(nisse): Implement move assignment and copy operations when needed. ScopedJavaRefCounted(const ScopedJavaRefCounted& other) = delete; ScopedJavaRefCounted& operator=(const ScopedJavaRefCounted&) = delete; diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_encoder_factory_wrapper.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_encoder_factory_wrapper.cc index 8ab4191db2..9b5da53636 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_encoder_factory_wrapper.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_encoder_factory_wrapper.cc @@ -10,6 +10,7 @@ #include "sdk/android/src/jni/video_encoder_factory_wrapper.h" +#include "api/video/render_resolution.h" #include "api/video_codecs/video_encoder.h" #include "rtc_base/logging.h" #include "sdk/android/generated_video_jni/VideoEncoderFactory_jni.h" @@ -48,6 +49,18 @@ class VideoEncoderSelectorWrapper return VideoCodecInfoToSdpVideoFormat(jni, codec_info); } + absl::optional OnResolutionChange( + const RenderResolution& resolution) override { + JNIEnv* jni = AttachCurrentThreadIfNeeded(); + ScopedJavaLocalRef codec_info = nullptr; +// Java_VideoEncoderSelector_onResolutionChange( +// jni, encoder_selector_, resolution.Width(), resolution.Height()); + if (codec_info.is_null()) { + return absl::nullopt; + } + return VideoCodecInfoToSdpVideoFormat(jni, codec_info); + } + absl::optional OnEncoderBroken() override { JNIEnv* jni = AttachCurrentThreadIfNeeded(); ScopedJavaLocalRef codec_info = diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_encoder_wrapper.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_encoder_wrapper.cc index a9d5239a61..2c356ec040 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_encoder_wrapper.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_encoder_wrapper.cc @@ -22,7 +22,6 @@ #include "modules/video_coding/utility/vp8_header_parser.h" #include "modules/video_coding/utility/vp9_uncompressed_header_parser.h" #include "rtc_base/logging.h" -#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/time_utils.h" #include "sdk/android/generated_video_jni/VideoEncoderWrapper_jni.h" #include "sdk/android/generated_video_jni/VideoEncoder_jni.h" diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_frame.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_frame.cc index dd027316b9..121b34fa94 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_frame.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_frame.cc @@ -12,7 +12,6 @@ #include "api/scoped_refptr.h" #include "common_video/include/video_frame_buffer.h" -#include "rtc_base/ref_counted_object.h" #include "rtc_base/time_utils.h" #include "sdk/android/generated_video_jni/VideoFrame_jni.h" #include "sdk/android/src/jni/jni_helpers.h" diff --git a/TMessagesProj/jni/voip/webrtc/sdk/media_constraints.cc b/TMessagesProj/jni/voip/webrtc/sdk/media_constraints.cc index 08bd9dc66c..bbb46edaae 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/media_constraints.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/media_constraints.cc @@ -95,17 +95,13 @@ const char MediaConstraints::kValueFalse[] = "false"; // Audio constraints. const char MediaConstraints::kGoogEchoCancellation[] = "googEchoCancellation"; const char MediaConstraints::kAutoGainControl[] = "googAutoGainControl"; -const char MediaConstraints::kExperimentalAutoGainControl[] = - "googAutoGainControl2"; const char MediaConstraints::kNoiseSuppression[] = "googNoiseSuppression"; -const char MediaConstraints::kExperimentalNoiseSuppression[] = - "googNoiseSuppression2"; const char MediaConstraints::kHighpassFilter[] = "googHighpassFilter"; -const char MediaConstraints::kTypingNoiseDetection[] = - "googTypingNoiseDetection"; const char MediaConstraints::kAudioMirroring[] = "googAudioMirroring"; const char MediaConstraints::kAudioNetworkAdaptorConfig[] = "googAudioNetworkAdaptorConfig"; +const char MediaConstraints::kInitAudioRecordingOnSend[] = + "InitAudioRecordingOnSend"; // Constraint keys for CreateOffer / CreateAnswer defined in W3C specification. const char MediaConstraints::kOfferToReceiveAudio[] = "OfferToReceiveAudio"; @@ -119,7 +115,6 @@ const char MediaConstraints::kUseRtpMux[] = "googUseRtpMUX"; // Below constraints should be used during PeerConnection construction. // Google-specific constraint keys. const char MediaConstraints::kEnableDscp[] = "googDscp"; -const char MediaConstraints::kEnableIPv6[] = "googIPv6"; const char MediaConstraints::kEnableVideoSuspendBelowMinBitrate[] = "googSuspendBelowMinBitrate"; const char MediaConstraints::kCombinedAudioVideoBwe[] = @@ -155,11 +150,6 @@ void CopyConstraintsIntoRtcConfiguration( return; } - bool enable_ipv6; - if (FindConstraint(constraints, MediaConstraints::kEnableIPv6, &enable_ipv6, - nullptr)) { - configuration->disable_ipv6 = !enable_ipv6; - } FindConstraint(constraints, MediaConstraints::kEnableDscp, &configuration->media_config.enable_dscp, nullptr); FindConstraint(constraints, MediaConstraints::kCpuOveruseDetection, @@ -188,19 +178,10 @@ void CopyConstraintsIntoAudioOptions(const MediaConstraints* constraints, &options->echo_cancellation); ConstraintToOptional(constraints, MediaConstraints::kAutoGainControl, &options->auto_gain_control); - ConstraintToOptional(constraints, - MediaConstraints::kExperimentalAutoGainControl, - &options->experimental_agc); ConstraintToOptional(constraints, MediaConstraints::kNoiseSuppression, &options->noise_suppression); - ConstraintToOptional(constraints, - MediaConstraints::kExperimentalNoiseSuppression, - &options->experimental_ns); ConstraintToOptional(constraints, MediaConstraints::kHighpassFilter, &options->highpass_filter); - ConstraintToOptional(constraints, - MediaConstraints::kTypingNoiseDetection, - &options->typing_detection); ConstraintToOptional(constraints, MediaConstraints::kAudioMirroring, &options->stereo_swapping); ConstraintToOptional( @@ -211,6 +192,9 @@ void CopyConstraintsIntoAudioOptions(const MediaConstraints* constraints, if (options->audio_network_adaptor_config) { options->audio_network_adaptor = true; } + ConstraintToOptional(constraints, + MediaConstraints::kInitAudioRecordingOnSend, + &options->init_recording_on_send); } bool CopyConstraintsIntoOfferAnswerOptions( diff --git a/TMessagesProj/jni/voip/webrtc/sdk/media_constraints.h b/TMessagesProj/jni/voip/webrtc/sdk/media_constraints.h index fd95a60235..c946e4fab1 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/media_constraints.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/media_constraints.h @@ -59,15 +59,13 @@ class MediaConstraints { // These keys are google specific. static const char kGoogEchoCancellation[]; // googEchoCancellation - static const char kAutoGainControl[]; // googAutoGainControl - static const char kExperimentalAutoGainControl[]; // googAutoGainControl2 - static const char kNoiseSuppression[]; // googNoiseSuppression - static const char kExperimentalNoiseSuppression[]; // googNoiseSuppression2 - static const char kHighpassFilter[]; // googHighpassFilter - static const char kTypingNoiseDetection[]; // googTypingNoiseDetection + static const char kAutoGainControl[]; // googAutoGainControl + static const char kNoiseSuppression[]; // googNoiseSuppression + static const char kHighpassFilter[]; // googHighpassFilter static const char kAudioMirroring[]; // googAudioMirroring static const char - kAudioNetworkAdaptorConfig[]; // goodAudioNetworkAdaptorConfig + kAudioNetworkAdaptorConfig[]; // googAudioNetworkAdaptorConfig + static const char kInitAudioRecordingOnSend[]; // InitAudioRecordingOnSend; // Constraint keys for CreateOffer / CreateAnswer // Specified by the W3C PeerConnection spec diff --git a/TMessagesProj/jni/voip/webrtc/stats/rtc_stats.cc b/TMessagesProj/jni/voip/webrtc/stats/rtc_stats.cc index e6eb51e55c..ae352fa170 100644 --- a/TMessagesProj/jni/voip/webrtc/stats/rtc_stats.cc +++ b/TMessagesProj/jni/voip/webrtc/stats/rtc_stats.cc @@ -187,12 +187,12 @@ RTCStats::MembersOfThisObjectAndAncestors(size_t additional_capacity) const { } \ template <> \ std::string RTCStatsMember::ValueToString() const { \ - RTC_DCHECK(is_defined_); \ + RTC_DCHECK(value_.has_value()); \ return to_str; \ } \ template <> \ std::string RTCStatsMember::ValueToJson() const { \ - RTC_DCHECK(is_defined_); \ + RTC_DCHECK(value_.has_value()); \ return to_json; \ } \ template class RTC_EXPORT_TEMPLATE_DEFINE(RTC_EXPORT) RTCStatsMember @@ -201,93 +201,146 @@ WEBRTC_DEFINE_RTCSTATSMEMBER(bool, kBool, false, false, - rtc::ToString(value_), - rtc::ToString(value_)); + rtc::ToString(*value_), + rtc::ToString(*value_)); WEBRTC_DEFINE_RTCSTATSMEMBER(int32_t, kInt32, false, false, - rtc::ToString(value_), - rtc::ToString(value_)); + rtc::ToString(*value_), + rtc::ToString(*value_)); WEBRTC_DEFINE_RTCSTATSMEMBER(uint32_t, kUint32, false, false, - rtc::ToString(value_), - rtc::ToString(value_)); + rtc::ToString(*value_), + rtc::ToString(*value_)); WEBRTC_DEFINE_RTCSTATSMEMBER(int64_t, kInt64, false, false, - rtc::ToString(value_), - ToStringAsDouble(value_)); + rtc::ToString(*value_), + ToStringAsDouble(*value_)); WEBRTC_DEFINE_RTCSTATSMEMBER(uint64_t, kUint64, false, false, - rtc::ToString(value_), - ToStringAsDouble(value_)); + rtc::ToString(*value_), + ToStringAsDouble(*value_)); WEBRTC_DEFINE_RTCSTATSMEMBER(double, kDouble, false, false, - rtc::ToString(value_), - ToStringAsDouble(value_)); -WEBRTC_DEFINE_RTCSTATSMEMBER(std::string, kString, false, true, value_, value_); + rtc::ToString(*value_), + ToStringAsDouble(*value_)); +WEBRTC_DEFINE_RTCSTATSMEMBER(std::string, + kString, + false, + true, + *value_, + *value_); WEBRTC_DEFINE_RTCSTATSMEMBER(std::vector, kSequenceBool, true, false, - VectorToString(value_), - VectorToString(value_)); + VectorToString(*value_), + VectorToString(*value_)); WEBRTC_DEFINE_RTCSTATSMEMBER(std::vector, kSequenceInt32, true, false, - VectorToString(value_), - VectorToString(value_)); + VectorToString(*value_), + VectorToString(*value_)); WEBRTC_DEFINE_RTCSTATSMEMBER(std::vector, kSequenceUint32, true, false, - VectorToString(value_), - VectorToString(value_)); + VectorToString(*value_), + VectorToString(*value_)); WEBRTC_DEFINE_RTCSTATSMEMBER(std::vector, kSequenceInt64, true, false, - VectorToString(value_), - VectorToStringAsDouble(value_)); + VectorToString(*value_), + VectorToStringAsDouble(*value_)); WEBRTC_DEFINE_RTCSTATSMEMBER(std::vector, kSequenceUint64, true, false, - VectorToString(value_), - VectorToStringAsDouble(value_)); + VectorToString(*value_), + VectorToStringAsDouble(*value_)); WEBRTC_DEFINE_RTCSTATSMEMBER(std::vector, kSequenceDouble, true, false, - VectorToString(value_), - VectorToStringAsDouble(value_)); + VectorToString(*value_), + VectorToStringAsDouble(*value_)); WEBRTC_DEFINE_RTCSTATSMEMBER(std::vector, kSequenceString, true, false, - VectorOfStringsToString(value_), - VectorOfStringsToString(value_)); + VectorOfStringsToString(*value_), + VectorOfStringsToString(*value_)); WEBRTC_DEFINE_RTCSTATSMEMBER(rtc_stats_internal::MapStringUint64, kMapStringUint64, false, false, - MapToString(value_), - MapToStringAsDouble(value_)); + MapToString(*value_), + MapToStringAsDouble(*value_)); WEBRTC_DEFINE_RTCSTATSMEMBER(rtc_stats_internal::MapStringDouble, kMapStringDouble, false, false, - MapToString(value_), - MapToStringAsDouble(value_)); + MapToString(*value_), + MapToStringAsDouble(*value_)); + +// Restricted members that expose hardware capabilites. +template class RTC_EXPORT_TEMPLATE_DEFINE(RTC_EXPORT) + RTCRestrictedStatsMember; +template class RTC_EXPORT_TEMPLATE_DEFINE(RTC_EXPORT) + RTCRestrictedStatsMember; +template class RTC_EXPORT_TEMPLATE_DEFINE(RTC_EXPORT) + RTCRestrictedStatsMember; +template class RTC_EXPORT_TEMPLATE_DEFINE(RTC_EXPORT) + RTCRestrictedStatsMember; +template class RTC_EXPORT_TEMPLATE_DEFINE(RTC_EXPORT) + RTCRestrictedStatsMember; +template class RTC_EXPORT_TEMPLATE_DEFINE(RTC_EXPORT) + RTCRestrictedStatsMember; +template class RTC_EXPORT_TEMPLATE_DEFINE(RTC_EXPORT) + RTCRestrictedStatsMember; +template class RTC_EXPORT_TEMPLATE_DEFINE(RTC_EXPORT) + RTCRestrictedStatsMember, + StatExposureCriteria::kHardwareCapability>; +template class RTC_EXPORT_TEMPLATE_DEFINE(RTC_EXPORT) + RTCRestrictedStatsMember, + StatExposureCriteria::kHardwareCapability>; +template class RTC_EXPORT_TEMPLATE_DEFINE(RTC_EXPORT) + RTCRestrictedStatsMember, + StatExposureCriteria::kHardwareCapability>; +template class RTC_EXPORT_TEMPLATE_DEFINE(RTC_EXPORT) + RTCRestrictedStatsMember, + StatExposureCriteria::kHardwareCapability>; +template class RTC_EXPORT_TEMPLATE_DEFINE(RTC_EXPORT) + RTCRestrictedStatsMember, + StatExposureCriteria::kHardwareCapability>; +template class RTC_EXPORT_TEMPLATE_DEFINE(RTC_EXPORT) + RTCRestrictedStatsMember, + StatExposureCriteria::kHardwareCapability>; +template class RTC_EXPORT_TEMPLATE_DEFINE(RTC_EXPORT) + RTCRestrictedStatsMember, + StatExposureCriteria::kHardwareCapability>; +template class RTC_EXPORT_TEMPLATE_DEFINE(RTC_EXPORT) + RTCRestrictedStatsMember, + StatExposureCriteria::kHardwareCapability>; +template class RTC_EXPORT_TEMPLATE_DEFINE(RTC_EXPORT) + RTCRestrictedStatsMember, + StatExposureCriteria::kHardwareCapability>; template class RTC_EXPORT_TEMPLATE_DEFINE(RTC_EXPORT) RTCNonStandardStatsMember; diff --git a/TMessagesProj/jni/voip/webrtc/stats/rtc_stats_report.cc b/TMessagesProj/jni/voip/webrtc/stats/rtc_stats_report.cc index 4fbd82508e..f6fbd8c44d 100644 --- a/TMessagesProj/jni/voip/webrtc/stats/rtc_stats_report.cc +++ b/TMessagesProj/jni/voip/webrtc/stats/rtc_stats_report.cc @@ -59,11 +59,17 @@ rtc::scoped_refptr RTCStatsReport::Create( return rtc::scoped_refptr(new RTCStatsReport(timestamp_us)); } +rtc::scoped_refptr RTCStatsReport::Create(Timestamp timestamp) { + return rtc::scoped_refptr(new RTCStatsReport(timestamp)); +} + RTCStatsReport::RTCStatsReport(int64_t timestamp_us) - : timestamp_us_(timestamp_us) {} + : RTCStatsReport(Timestamp::Micros(timestamp_us)) {} + +RTCStatsReport::RTCStatsReport(Timestamp timestamp) : timestamp_(timestamp) {} rtc::scoped_refptr RTCStatsReport::Copy() const { - rtc::scoped_refptr copy = Create(timestamp_us_); + rtc::scoped_refptr copy = Create(timestamp_); for (auto it = stats_.begin(); it != stats_.end(); ++it) { copy->AddStats(it->second->copy()); } @@ -71,12 +77,15 @@ rtc::scoped_refptr RTCStatsReport::Copy() const { } void RTCStatsReport::AddStats(std::unique_ptr stats) { +#if RTC_DCHECK_IS_ON auto result = +#endif stats_.insert(std::make_pair(std::string(stats->id()), std::move(stats))); +#if RTC_DCHECK_IS_ON RTC_DCHECK(result.second) - << "A stats object with ID " << result.first->second->id() - << " is already " - "present in this stats report."; + << "A stats object with ID \"" << result.first->second->id() << "\" is " + << "already present in this stats report."; +#endif } const RTCStats* RTCStatsReport::Get(const std::string& id) const { diff --git a/TMessagesProj/jni/voip/webrtc/stats/rtcstats_objects.cc b/TMessagesProj/jni/voip/webrtc/stats/rtcstats_objects.cc index 961d17c7eb..420613b777 100644 --- a/TMessagesProj/jni/voip/webrtc/stats/rtcstats_objects.cc +++ b/TMessagesProj/jni/voip/webrtc/stats/rtcstats_objects.cc @@ -62,6 +62,25 @@ const char* const RTCQualityLimitationReason::kOther = "other"; const char* const RTCContentType::kUnspecified = "unspecified"; const char* const RTCContentType::kScreenshare = "screenshare"; +// https://w3c.github.io/webrtc-stats/#dom-rtcdtlsrole +const char* const RTCDtlsRole::kUnknown = "unknown"; +const char* const RTCDtlsRole::kClient = "client"; +const char* const RTCDtlsRole::kServer = "server"; + +// https://www.w3.org/TR/webrtc/#rtcicerole +const char* const RTCIceRole::kUnknown = "unknown"; +const char* const RTCIceRole::kControlled = "controlled"; +const char* const RTCIceRole::kControlling = "controlling"; + +// https://www.w3.org/TR/webrtc/#dom-rtcicetransportstate +const char* const RTCIceTransportState::kNew = "new"; +const char* const RTCIceTransportState::kChecking = "checking"; +const char* const RTCIceTransportState::kConnected = "connected"; +const char* const RTCIceTransportState::kCompleted = "completed"; +const char* const RTCIceTransportState::kDisconnected = "disconnected"; +const char* const RTCIceTransportState::kFailed = "failed"; +const char* const RTCIceTransportState::kClosed = "closed"; + // clang-format off WEBRTC_RTCSTATS_IMPL(RTCCertificateStats, RTCStats, "certificate", &fingerprint, @@ -81,13 +100,8 @@ RTCCertificateStats::RTCCertificateStats(std::string&& id, int64_t timestamp_us) base64_certificate("base64Certificate"), issuer_certificate_id("issuerCertificateId") {} -RTCCertificateStats::RTCCertificateStats(const RTCCertificateStats& other) - : RTCStats(other.id(), other.timestamp_us()), - fingerprint(other.fingerprint), - fingerprint_algorithm(other.fingerprint_algorithm), - base64_certificate(other.base64_certificate), - issuer_certificate_id(other.issuer_certificate_id) {} - +RTCCertificateStats::RTCCertificateStats(const RTCCertificateStats& other) = + default; RTCCertificateStats::~RTCCertificateStats() {} // clang-format off @@ -112,14 +126,7 @@ RTCCodecStats::RTCCodecStats(std::string&& id, int64_t timestamp_us) channels("channels"), sdp_fmtp_line("sdpFmtpLine") {} -RTCCodecStats::RTCCodecStats(const RTCCodecStats& other) - : RTCStats(other.id(), other.timestamp_us()), - transport_id(other.transport_id), - payload_type(other.payload_type), - mime_type(other.mime_type), - clock_rate(other.clock_rate), - channels(other.channels), - sdp_fmtp_line(other.sdp_fmtp_line) {} +RTCCodecStats::RTCCodecStats(const RTCCodecStats& other) = default; RTCCodecStats::~RTCCodecStats() {} @@ -150,16 +157,8 @@ RTCDataChannelStats::RTCDataChannelStats(std::string&& id, int64_t timestamp_us) messages_received("messagesReceived"), bytes_received("bytesReceived") {} -RTCDataChannelStats::RTCDataChannelStats(const RTCDataChannelStats& other) - : RTCStats(other.id(), other.timestamp_us()), - label(other.label), - protocol(other.protocol), - data_channel_identifier(other.data_channel_identifier), - state(other.state), - messages_sent(other.messages_sent), - bytes_sent(other.bytes_sent), - messages_received(other.messages_received), - bytes_received(other.bytes_received) {} +RTCDataChannelStats::RTCDataChannelStats(const RTCDataChannelStats& other) = + default; RTCDataChannelStats::~RTCDataChannelStats() {} @@ -172,7 +171,6 @@ WEBRTC_RTCSTATS_IMPL(RTCIceCandidatePairStats, RTCStats, "candidate-pair", &priority, &nominated, &writable, - &readable, &packets_sent, &packets_received, &bytes_sent, @@ -185,12 +183,7 @@ WEBRTC_RTCSTATS_IMPL(RTCIceCandidatePairStats, RTCStats, "candidate-pair", &requests_sent, &responses_received, &responses_sent, - &retransmissions_received, - &retransmissions_sent, - &consent_requests_received, &consent_requests_sent, - &consent_responses_received, - &consent_responses_sent, &packets_discarded_on_send, &bytes_discarded_on_send) // clang-format on @@ -209,7 +202,6 @@ RTCIceCandidatePairStats::RTCIceCandidatePairStats(std::string&& id, priority("priority"), nominated("nominated"), writable("writable"), - readable("readable"), packets_sent("packetsSent"), packets_received("packetsReceived"), bytes_sent("bytesSent"), @@ -222,46 +214,12 @@ RTCIceCandidatePairStats::RTCIceCandidatePairStats(std::string&& id, requests_sent("requestsSent"), responses_received("responsesReceived"), responses_sent("responsesSent"), - retransmissions_received("retransmissionsReceived"), - retransmissions_sent("retransmissionsSent"), - consent_requests_received("consentRequestsReceived"), consent_requests_sent("consentRequestsSent"), - consent_responses_received("consentResponsesReceived"), - consent_responses_sent("consentResponsesSent"), packets_discarded_on_send("packetsDiscardedOnSend"), bytes_discarded_on_send("bytesDiscardedOnSend") {} RTCIceCandidatePairStats::RTCIceCandidatePairStats( - const RTCIceCandidatePairStats& other) - : RTCStats(other.id(), other.timestamp_us()), - transport_id(other.transport_id), - local_candidate_id(other.local_candidate_id), - remote_candidate_id(other.remote_candidate_id), - state(other.state), - priority(other.priority), - nominated(other.nominated), - writable(other.writable), - readable(other.readable), - packets_sent(other.packets_sent), - packets_received(other.packets_received), - bytes_sent(other.bytes_sent), - bytes_received(other.bytes_received), - total_round_trip_time(other.total_round_trip_time), - current_round_trip_time(other.current_round_trip_time), - available_outgoing_bitrate(other.available_outgoing_bitrate), - available_incoming_bitrate(other.available_incoming_bitrate), - requests_received(other.requests_received), - requests_sent(other.requests_sent), - responses_received(other.responses_received), - responses_sent(other.responses_sent), - retransmissions_received(other.retransmissions_received), - retransmissions_sent(other.retransmissions_sent), - consent_requests_received(other.consent_requests_received), - consent_requests_sent(other.consent_requests_sent), - consent_responses_received(other.consent_responses_received), - consent_responses_sent(other.consent_responses_sent), - packets_discarded_on_send(other.packets_discarded_on_send), - bytes_discarded_on_send(other.bytes_discarded_on_send) {} + const RTCIceCandidatePairStats& other) = default; RTCIceCandidatePairStats::~RTCIceCandidatePairStats() {} @@ -277,7 +235,14 @@ WEBRTC_RTCSTATS_IMPL(RTCIceCandidateStats, RTCStats, "abstract-ice-candidate", &relay_protocol, &candidate_type, &priority, - &url) + &url, + &foundation, + &related_address, + &related_port, + &username_fragment, + &tcp_type, + &vpn, + &network_adapter_type) // clang-format on RTCIceCandidateStats::RTCIceCandidateStats(const std::string& id, @@ -299,21 +264,17 @@ RTCIceCandidateStats::RTCIceCandidateStats(std::string&& id, relay_protocol("relayProtocol"), candidate_type("candidateType"), priority("priority"), - url("url") {} - -RTCIceCandidateStats::RTCIceCandidateStats(const RTCIceCandidateStats& other) - : RTCStats(other.id(), other.timestamp_us()), - transport_id(other.transport_id), - is_remote(other.is_remote), - network_type(other.network_type), - ip(other.ip), - address(other.address), - port(other.port), - protocol(other.protocol), - relay_protocol(other.relay_protocol), - candidate_type(other.candidate_type), - priority(other.priority), - url(other.url) {} + url("url"), + foundation("foundation"), + related_address("relatedAddress"), + related_port("relatedPort"), + username_fragment("usernameFragment"), + tcp_type("tcpType"), + vpn("vpn"), + network_adapter_type("networkAdapterType") {} + +RTCIceCandidateStats::RTCIceCandidateStats(const RTCIceCandidateStats& other) = + default; RTCIceCandidateStats::~RTCIceCandidateStats() {} @@ -328,7 +289,7 @@ RTCLocalIceCandidateStats::RTCLocalIceCandidateStats(std::string&& id, : RTCIceCandidateStats(std::move(id), timestamp_us, false) {} std::unique_ptr RTCLocalIceCandidateStats::copy() const { - return std::unique_ptr(new RTCLocalIceCandidateStats(*this)); + return std::make_unique(*this); } const char* RTCLocalIceCandidateStats::type() const { @@ -346,7 +307,7 @@ RTCRemoteIceCandidateStats::RTCRemoteIceCandidateStats(std::string&& id, : RTCIceCandidateStats(std::move(id), timestamp_us, true) {} std::unique_ptr RTCRemoteIceCandidateStats::copy() const { - return std::unique_ptr(new RTCRemoteIceCandidateStats(*this)); + return std::make_unique(*this); } const char* RTCRemoteIceCandidateStats::type() const { @@ -354,29 +315,30 @@ const char* RTCRemoteIceCandidateStats::type() const { } // clang-format off -WEBRTC_RTCSTATS_IMPL(RTCMediaStreamStats, RTCStats, "stream", +WEBRTC_RTCSTATS_IMPL(DEPRECATED_RTCMediaStreamStats, RTCStats, "stream", &stream_identifier, &track_ids) // clang-format on -RTCMediaStreamStats::RTCMediaStreamStats(const std::string& id, - int64_t timestamp_us) - : RTCMediaStreamStats(std::string(id), timestamp_us) {} +DEPRECATED_RTCMediaStreamStats::DEPRECATED_RTCMediaStreamStats( + const std::string& id, + int64_t timestamp_us) + : DEPRECATED_RTCMediaStreamStats(std::string(id), timestamp_us) {} -RTCMediaStreamStats::RTCMediaStreamStats(std::string&& id, int64_t timestamp_us) +DEPRECATED_RTCMediaStreamStats::DEPRECATED_RTCMediaStreamStats( + std::string&& id, + int64_t timestamp_us) : RTCStats(std::move(id), timestamp_us), stream_identifier("streamIdentifier"), track_ids("trackIds") {} -RTCMediaStreamStats::RTCMediaStreamStats(const RTCMediaStreamStats& other) - : RTCStats(other.id(), other.timestamp_us()), - stream_identifier(other.stream_identifier), - track_ids(other.track_ids) {} +DEPRECATED_RTCMediaStreamStats::DEPRECATED_RTCMediaStreamStats( + const DEPRECATED_RTCMediaStreamStats& other) = default; -RTCMediaStreamStats::~RTCMediaStreamStats() {} +DEPRECATED_RTCMediaStreamStats::~DEPRECATED_RTCMediaStreamStats() {} // clang-format off -WEBRTC_RTCSTATS_IMPL(RTCMediaStreamTrackStats, RTCStats, "track", +WEBRTC_RTCSTATS_IMPL(DEPRECATED_RTCMediaStreamTrackStats, RTCStats, "track", &track_identifier, &media_source_id, &remote_source, @@ -387,15 +349,11 @@ WEBRTC_RTCSTATS_IMPL(RTCMediaStreamTrackStats, RTCStats, "track", &jitter_buffer_emitted_count, &frame_width, &frame_height, - &frames_per_second, &frames_sent, &huge_frames_sent, &frames_received, &frames_decoded, &frames_dropped, - &frames_corrupted, - &partial_frames_lost, - &full_frames_lost, &audio_level, &total_audio_energy, &echo_return_loss, @@ -410,25 +368,27 @@ WEBRTC_RTCSTATS_IMPL(RTCMediaStreamTrackStats, RTCStats, "track", &jitter_buffer_flushes, &delayed_packet_outage_samples, &relative_packet_arrival_delay, - &jitter_buffer_target_delay, &interruption_count, &total_interruption_duration, + &total_frames_duration, + &sum_squared_frame_durations, &freeze_count, &pause_count, &total_freezes_duration, - &total_pauses_duration, - &total_frames_duration, - &sum_squared_frame_durations) + &total_pauses_duration) // clang-format on -RTCMediaStreamTrackStats::RTCMediaStreamTrackStats(const std::string& id, - int64_t timestamp_us, - const char* kind) - : RTCMediaStreamTrackStats(std::string(id), timestamp_us, kind) {} +DEPRECATED_RTCMediaStreamTrackStats::DEPRECATED_RTCMediaStreamTrackStats( + const std::string& id, + int64_t timestamp_us, + const char* kind) + : DEPRECATED_RTCMediaStreamTrackStats(std::string(id), timestamp_us, kind) { +} -RTCMediaStreamTrackStats::RTCMediaStreamTrackStats(std::string&& id, - int64_t timestamp_us, - const char* kind) +DEPRECATED_RTCMediaStreamTrackStats::DEPRECATED_RTCMediaStreamTrackStats( + std::string&& id, + int64_t timestamp_us, + const char* kind) : RTCStats(std::move(id), timestamp_us), track_identifier("trackIdentifier"), media_source_id("mediaSourceId"), @@ -440,15 +400,11 @@ RTCMediaStreamTrackStats::RTCMediaStreamTrackStats(std::string&& id, jitter_buffer_emitted_count("jitterBufferEmittedCount"), frame_width("frameWidth"), frame_height("frameHeight"), - frames_per_second("framesPerSecond"), frames_sent("framesSent"), huge_frames_sent("hugeFramesSent"), frames_received("framesReceived"), frames_decoded("framesDecoded"), frames_dropped("framesDropped"), - frames_corrupted("framesCorrupted"), - partial_frames_lost("partialFramesLost"), - full_frames_lost("fullFramesLost"), audio_level("audioLevel"), total_audio_energy("totalAudioEnergy"), echo_return_loss("echoReturnLoss"), @@ -470,67 +426,22 @@ RTCMediaStreamTrackStats::RTCMediaStreamTrackStats(std::string&& id, relative_packet_arrival_delay( "relativePacketArrivalDelay", {NonStandardGroupId::kRtcStatsRelativePacketArrivalDelay}), - jitter_buffer_target_delay("jitterBufferTargetDelay"), interruption_count("interruptionCount"), total_interruption_duration("totalInterruptionDuration"), + total_frames_duration("totalFramesDuration"), + sum_squared_frame_durations("sumOfSquaredFramesDuration"), freeze_count("freezeCount"), pause_count("pauseCount"), total_freezes_duration("totalFreezesDuration"), - total_pauses_duration("totalPausesDuration"), - total_frames_duration("totalFramesDuration"), - sum_squared_frame_durations("sumOfSquaredFramesDuration") { + total_pauses_duration("totalPausesDuration") { RTC_DCHECK(kind == RTCMediaStreamTrackKind::kAudio || kind == RTCMediaStreamTrackKind::kVideo); } -RTCMediaStreamTrackStats::RTCMediaStreamTrackStats( - const RTCMediaStreamTrackStats& other) - : RTCStats(other.id(), other.timestamp_us()), - track_identifier(other.track_identifier), - media_source_id(other.media_source_id), - remote_source(other.remote_source), - ended(other.ended), - detached(other.detached), - kind(other.kind), - jitter_buffer_delay(other.jitter_buffer_delay), - jitter_buffer_emitted_count(other.jitter_buffer_emitted_count), - frame_width(other.frame_width), - frame_height(other.frame_height), - frames_per_second(other.frames_per_second), - frames_sent(other.frames_sent), - huge_frames_sent(other.huge_frames_sent), - frames_received(other.frames_received), - frames_decoded(other.frames_decoded), - frames_dropped(other.frames_dropped), - frames_corrupted(other.frames_corrupted), - partial_frames_lost(other.partial_frames_lost), - full_frames_lost(other.full_frames_lost), - audio_level(other.audio_level), - total_audio_energy(other.total_audio_energy), - echo_return_loss(other.echo_return_loss), - echo_return_loss_enhancement(other.echo_return_loss_enhancement), - total_samples_received(other.total_samples_received), - total_samples_duration(other.total_samples_duration), - concealed_samples(other.concealed_samples), - silent_concealed_samples(other.silent_concealed_samples), - concealment_events(other.concealment_events), - inserted_samples_for_deceleration( - other.inserted_samples_for_deceleration), - removed_samples_for_acceleration(other.removed_samples_for_acceleration), - jitter_buffer_flushes(other.jitter_buffer_flushes), - delayed_packet_outage_samples(other.delayed_packet_outage_samples), - relative_packet_arrival_delay(other.relative_packet_arrival_delay), - jitter_buffer_target_delay(other.jitter_buffer_target_delay), - interruption_count(other.interruption_count), - total_interruption_duration(other.total_interruption_duration), - freeze_count(other.freeze_count), - pause_count(other.pause_count), - total_freezes_duration(other.total_freezes_duration), - total_pauses_duration(other.total_pauses_duration), - total_frames_duration(other.total_frames_duration), - sum_squared_frame_durations(other.sum_squared_frame_durations) {} - -RTCMediaStreamTrackStats::~RTCMediaStreamTrackStats() {} +DEPRECATED_RTCMediaStreamTrackStats::DEPRECATED_RTCMediaStreamTrackStats( + const DEPRECATED_RTCMediaStreamTrackStats& other) = default; + +DEPRECATED_RTCMediaStreamTrackStats::~DEPRECATED_RTCMediaStreamTrackStats() {} // clang-format off WEBRTC_RTCSTATS_IMPL(RTCPeerConnectionStats, RTCStats, "peer-connection", @@ -549,10 +460,7 @@ RTCPeerConnectionStats::RTCPeerConnectionStats(std::string&& id, data_channels_closed("dataChannelsClosed") {} RTCPeerConnectionStats::RTCPeerConnectionStats( - const RTCPeerConnectionStats& other) - : RTCStats(other.id(), other.timestamp_us()), - data_channels_opened(other.data_channels_opened), - data_channels_closed(other.data_channels_closed) {} + const RTCPeerConnectionStats& other) = default; RTCPeerConnectionStats::~RTCPeerConnectionStats() {} @@ -579,14 +487,7 @@ RTCRTPStreamStats::RTCRTPStreamStats(std::string&& id, int64_t timestamp_us) codec_id("codecId"), media_type("mediaType") {} -RTCRTPStreamStats::RTCRTPStreamStats(const RTCRTPStreamStats& other) - : RTCStats(other.id(), other.timestamp_us()), - ssrc(other.ssrc), - kind(other.kind), - track_id(other.track_id), - transport_id(other.transport_id), - codec_id(other.codec_id), - media_type(other.media_type) {} +RTCRTPStreamStats::RTCRTPStreamStats(const RTCRTPStreamStats& other) = default; RTCRTPStreamStats::~RTCRTPStreamStats() {} @@ -594,8 +495,7 @@ RTCRTPStreamStats::~RTCRTPStreamStats() {} WEBRTC_RTCSTATS_IMPL( RTCReceivedRtpStreamStats, RTCRTPStreamStats, "received-rtp", &jitter, - &packets_lost, - &packets_discarded) + &packets_lost) // clang-format on RTCReceivedRtpStreamStats::RTCReceivedRtpStreamStats(const std::string&& id, @@ -606,15 +506,10 @@ RTCReceivedRtpStreamStats::RTCReceivedRtpStreamStats(std::string&& id, int64_t timestamp_us) : RTCRTPStreamStats(std::move(id), timestamp_us), jitter("jitter"), - packets_lost("packetsLost"), - packets_discarded("packetsDiscarded") {} + packets_lost("packetsLost") {} RTCReceivedRtpStreamStats::RTCReceivedRtpStreamStats( - const RTCReceivedRtpStreamStats& other) - : RTCRTPStreamStats(other), - jitter(other.jitter), - packets_lost(other.packets_lost), - packets_discarded(other.packets_discarded) {} + const RTCReceivedRtpStreamStats& other) = default; RTCReceivedRtpStreamStats::~RTCReceivedRtpStreamStats() {} @@ -635,24 +530,27 @@ RTCSentRtpStreamStats::RTCSentRtpStreamStats(std::string&& id, packets_sent("packetsSent"), bytes_sent("bytesSent") {} -RTCSentRtpStreamStats::RTCSentRtpStreamStats(const RTCSentRtpStreamStats& other) - : RTCRTPStreamStats(other), - packets_sent(other.packets_sent), - bytes_sent(other.bytes_sent) {} +RTCSentRtpStreamStats::RTCSentRtpStreamStats( + const RTCSentRtpStreamStats& other) = default; RTCSentRtpStreamStats::~RTCSentRtpStreamStats() {} // clang-format off WEBRTC_RTCSTATS_IMPL( RTCInboundRTPStreamStats, RTCReceivedRtpStreamStats, "inbound-rtp", + &track_identifier, + &mid, &remote_id, &packets_received, + &packets_discarded, &fec_packets_received, &fec_packets_discarded, &bytes_received, &header_bytes_received, &last_packet_received_timestamp, &jitter_buffer_delay, + &jitter_buffer_target_delay, + &jitter_buffer_minimum_delay, &jitter_buffer_emitted_count, &total_samples_received, &concealed_samples, @@ -664,33 +562,36 @@ WEBRTC_RTCSTATS_IMPL( &total_audio_energy, &total_samples_duration, &frames_received, - &round_trip_time, - &packets_repaired, - &burst_packets_lost, - &burst_packets_discarded, - &burst_loss_count, - &burst_discard_count, - &burst_loss_rate, - &burst_discard_rate, - &gap_loss_rate, - &gap_discard_rate, &frame_width, &frame_height, - &frame_bit_depth, &frames_per_second, &frames_decoded, &key_frames_decoded, &frames_dropped, &total_decode_time, + &total_processing_delay, + &total_assembly_time, + &frames_assembled_from_multiple_packets, &total_inter_frame_delay, &total_squared_inter_frame_delay, + &pause_count, + &total_pauses_duration, + &freeze_count, + &total_freezes_duration, &content_type, &estimated_playout_timestamp, &decoder_implementation, &fir_count, &pli_count, &nack_count, - &qp_sum) + &qp_sum, + &goog_timing_frame_info, + &jitter_buffer_flushes, + &delayed_packet_outage_samples, + &relative_packet_arrival_delay, + &interruption_count, + &total_interruption_duration, + &min_playout_delay) // clang-format on RTCInboundRTPStreamStats::RTCInboundRTPStreamStats(const std::string& id, @@ -700,14 +601,19 @@ RTCInboundRTPStreamStats::RTCInboundRTPStreamStats(const std::string& id, RTCInboundRTPStreamStats::RTCInboundRTPStreamStats(std::string&& id, int64_t timestamp_us) : RTCReceivedRtpStreamStats(std::move(id), timestamp_us), + track_identifier("trackIdentifier"), + mid("mid"), remote_id("remoteId"), packets_received("packetsReceived"), + packets_discarded("packetsDiscarded"), fec_packets_received("fecPacketsReceived"), fec_packets_discarded("fecPacketsDiscarded"), bytes_received("bytesReceived"), header_bytes_received("headerBytesReceived"), last_packet_received_timestamp("lastPacketReceivedTimestamp"), jitter_buffer_delay("jitterBufferDelay"), + jitter_buffer_target_delay("jitterBufferTargetDelay"), + jitter_buffer_minimum_delay("jitterBufferMinimumDelay"), jitter_buffer_emitted_count("jitterBufferEmittedCount"), total_samples_received("totalSamplesReceived"), concealed_samples("concealedSamples"), @@ -719,85 +625,47 @@ RTCInboundRTPStreamStats::RTCInboundRTPStreamStats(std::string&& id, total_audio_energy("totalAudioEnergy"), total_samples_duration("totalSamplesDuration"), frames_received("framesReceived"), - round_trip_time("roundTripTime"), - packets_repaired("packetsRepaired"), - burst_packets_lost("burstPacketsLost"), - burst_packets_discarded("burstPacketsDiscarded"), - burst_loss_count("burstLossCount"), - burst_discard_count("burstDiscardCount"), - burst_loss_rate("burstLossRate"), - burst_discard_rate("burstDiscardRate"), - gap_loss_rate("gapLossRate"), - gap_discard_rate("gapDiscardRate"), frame_width("frameWidth"), frame_height("frameHeight"), - frame_bit_depth("frameBitDepth"), frames_per_second("framesPerSecond"), frames_decoded("framesDecoded"), key_frames_decoded("keyFramesDecoded"), frames_dropped("framesDropped"), total_decode_time("totalDecodeTime"), + total_processing_delay("totalProcessingDelay"), + total_assembly_time("totalAssemblyTime"), + frames_assembled_from_multiple_packets( + "framesAssembledFromMultiplePackets"), total_inter_frame_delay("totalInterFrameDelay"), total_squared_inter_frame_delay("totalSquaredInterFrameDelay"), + pause_count("pauseCount"), + total_pauses_duration("totalPausesDuration"), + freeze_count("freezeCount"), + total_freezes_duration("totalFreezesDuration"), content_type("contentType"), estimated_playout_timestamp("estimatedPlayoutTimestamp"), decoder_implementation("decoderImplementation"), fir_count("firCount"), pli_count("pliCount"), nack_count("nackCount"), - qp_sum("qpSum") {} + qp_sum("qpSum"), + goog_timing_frame_info("googTimingFrameInfo"), + jitter_buffer_flushes( + "jitterBufferFlushes", + {NonStandardGroupId::kRtcAudioJitterBufferMaxPackets}), + delayed_packet_outage_samples( + "delayedPacketOutageSamples", + {NonStandardGroupId::kRtcAudioJitterBufferMaxPackets, + NonStandardGroupId::kRtcStatsRelativePacketArrivalDelay}), + relative_packet_arrival_delay( + "relativePacketArrivalDelay", + {NonStandardGroupId::kRtcStatsRelativePacketArrivalDelay}), + interruption_count("interruptionCount"), + total_interruption_duration("totalInterruptionDuration"), + min_playout_delay("minPlayoutDelay") {} RTCInboundRTPStreamStats::RTCInboundRTPStreamStats( - const RTCInboundRTPStreamStats& other) - : RTCReceivedRtpStreamStats(other), - remote_id(other.remote_id), - packets_received(other.packets_received), - fec_packets_received(other.fec_packets_received), - fec_packets_discarded(other.fec_packets_discarded), - bytes_received(other.bytes_received), - header_bytes_received(other.header_bytes_received), - last_packet_received_timestamp(other.last_packet_received_timestamp), - jitter_buffer_delay(other.jitter_buffer_delay), - jitter_buffer_emitted_count(other.jitter_buffer_emitted_count), - total_samples_received(other.total_samples_received), - concealed_samples(other.concealed_samples), - silent_concealed_samples(other.silent_concealed_samples), - concealment_events(other.concealment_events), - inserted_samples_for_deceleration( - other.inserted_samples_for_deceleration), - removed_samples_for_acceleration(other.removed_samples_for_acceleration), - audio_level(other.audio_level), - total_audio_energy(other.total_audio_energy), - total_samples_duration(other.total_samples_duration), - frames_received(other.frames_received), - round_trip_time(other.round_trip_time), - packets_repaired(other.packets_repaired), - burst_packets_lost(other.burst_packets_lost), - burst_packets_discarded(other.burst_packets_discarded), - burst_loss_count(other.burst_loss_count), - burst_discard_count(other.burst_discard_count), - burst_loss_rate(other.burst_loss_rate), - burst_discard_rate(other.burst_discard_rate), - gap_loss_rate(other.gap_loss_rate), - gap_discard_rate(other.gap_discard_rate), - frame_width(other.frame_width), - frame_height(other.frame_height), - frame_bit_depth(other.frame_bit_depth), - frames_per_second(other.frames_per_second), - frames_decoded(other.frames_decoded), - key_frames_decoded(other.key_frames_decoded), - frames_dropped(other.frames_dropped), - total_decode_time(other.total_decode_time), - total_inter_frame_delay(other.total_inter_frame_delay), - total_squared_inter_frame_delay(other.total_squared_inter_frame_delay), - content_type(other.content_type), - estimated_playout_timestamp(other.estimated_playout_timestamp), - decoder_implementation(other.decoder_implementation), - fir_count(other.fir_count), - pli_count(other.pli_count), - nack_count(other.nack_count), - qp_sum(other.qp_sum) {} - + const RTCInboundRTPStreamStats& other) = default; RTCInboundRTPStreamStats::~RTCInboundRTPStreamStats() {} // clang-format off @@ -805,6 +673,7 @@ WEBRTC_RTCSTATS_IMPL( RTCOutboundRTPStreamStats, RTCRTPStreamStats, "outbound-rtp", &media_source_id, &remote_id, + &mid, &rid, &packets_sent, &retransmitted_packets_sent, @@ -830,7 +699,8 @@ WEBRTC_RTCSTATS_IMPL( &fir_count, &pli_count, &nack_count, - &qp_sum) + &qp_sum, + &active) // clang-format on RTCOutboundRTPStreamStats::RTCOutboundRTPStreamStats(const std::string& id, @@ -842,6 +712,7 @@ RTCOutboundRTPStreamStats::RTCOutboundRTPStreamStats(std::string&& id, : RTCRTPStreamStats(std::move(id), timestamp_us), media_source_id("mediaSourceId"), remote_id("remoteId"), + mid("mid"), rid("rid"), packets_sent("packetsSent"), retransmitted_packets_sent("retransmittedPacketsSent"), @@ -868,40 +739,11 @@ RTCOutboundRTPStreamStats::RTCOutboundRTPStreamStats(std::string&& id, fir_count("firCount"), pli_count("pliCount"), nack_count("nackCount"), - qp_sum("qpSum") {} + qp_sum("qpSum"), + active("active") {} RTCOutboundRTPStreamStats::RTCOutboundRTPStreamStats( - const RTCOutboundRTPStreamStats& other) - : RTCRTPStreamStats(other), - media_source_id(other.media_source_id), - remote_id(other.remote_id), - rid(other.rid), - packets_sent(other.packets_sent), - retransmitted_packets_sent(other.retransmitted_packets_sent), - bytes_sent(other.bytes_sent), - header_bytes_sent(other.header_bytes_sent), - retransmitted_bytes_sent(other.retransmitted_bytes_sent), - target_bitrate(other.target_bitrate), - frames_encoded(other.frames_encoded), - key_frames_encoded(other.key_frames_encoded), - total_encode_time(other.total_encode_time), - total_encoded_bytes_target(other.total_encoded_bytes_target), - frame_width(other.frame_width), - frame_height(other.frame_height), - frames_per_second(other.frames_per_second), - frames_sent(other.frames_sent), - huge_frames_sent(other.huge_frames_sent), - total_packet_send_delay(other.total_packet_send_delay), - quality_limitation_reason(other.quality_limitation_reason), - quality_limitation_durations(other.quality_limitation_durations), - quality_limitation_resolution_changes( - other.quality_limitation_resolution_changes), - content_type(other.content_type), - encoder_implementation(other.encoder_implementation), - fir_count(other.fir_count), - pli_count(other.pli_count), - nack_count(other.nack_count), - qp_sum(other.qp_sum) {} + const RTCOutboundRTPStreamStats& other) = default; RTCOutboundRTPStreamStats::~RTCOutboundRTPStreamStats() {} @@ -932,13 +774,7 @@ RTCRemoteInboundRtpStreamStats::RTCRemoteInboundRtpStreamStats( round_trip_time_measurements("roundTripTimeMeasurements") {} RTCRemoteInboundRtpStreamStats::RTCRemoteInboundRtpStreamStats( - const RTCRemoteInboundRtpStreamStats& other) - : RTCReceivedRtpStreamStats(other), - local_id(other.local_id), - round_trip_time(other.round_trip_time), - fraction_lost(other.fraction_lost), - total_round_trip_time(other.total_round_trip_time), - round_trip_time_measurements(other.round_trip_time_measurements) {} + const RTCRemoteInboundRtpStreamStats& other) = default; RTCRemoteInboundRtpStreamStats::~RTCRemoteInboundRtpStreamStats() {} @@ -971,14 +807,7 @@ RTCRemoteOutboundRtpStreamStats::RTCRemoteOutboundRtpStreamStats( total_round_trip_time("totalRoundTripTime") {} RTCRemoteOutboundRtpStreamStats::RTCRemoteOutboundRtpStreamStats( - const RTCRemoteOutboundRtpStreamStats& other) - : RTCSentRtpStreamStats(other), - local_id(other.local_id), - remote_timestamp(other.remote_timestamp), - reports_sent(other.reports_sent), - round_trip_time(other.round_trip_time), - round_trip_time_measurements(other.round_trip_time_measurements), - total_round_trip_time(other.total_round_trip_time) {} + const RTCRemoteOutboundRtpStreamStats& other) = default; RTCRemoteOutboundRtpStreamStats::~RTCRemoteOutboundRtpStreamStats() {} @@ -997,10 +826,8 @@ RTCMediaSourceStats::RTCMediaSourceStats(std::string&& id, int64_t timestamp_us) track_identifier("trackIdentifier"), kind("kind") {} -RTCMediaSourceStats::RTCMediaSourceStats(const RTCMediaSourceStats& other) - : RTCStats(other.id(), other.timestamp_us()), - track_identifier(other.track_identifier), - kind(other.kind) {} +RTCMediaSourceStats::RTCMediaSourceStats(const RTCMediaSourceStats& other) = + default; RTCMediaSourceStats::~RTCMediaSourceStats() {} @@ -1025,13 +852,8 @@ RTCAudioSourceStats::RTCAudioSourceStats(std::string&& id, int64_t timestamp_us) echo_return_loss("echoReturnLoss"), echo_return_loss_enhancement("echoReturnLossEnhancement") {} -RTCAudioSourceStats::RTCAudioSourceStats(const RTCAudioSourceStats& other) - : RTCMediaSourceStats(other), - audio_level(other.audio_level), - total_audio_energy(other.total_audio_energy), - total_samples_duration(other.total_samples_duration), - echo_return_loss(other.echo_return_loss), - echo_return_loss_enhancement(other.echo_return_loss_enhancement) {} +RTCAudioSourceStats::RTCAudioSourceStats(const RTCAudioSourceStats& other) = + default; RTCAudioSourceStats::~RTCAudioSourceStats() {} @@ -1054,12 +876,8 @@ RTCVideoSourceStats::RTCVideoSourceStats(std::string&& id, int64_t timestamp_us) frames("frames"), frames_per_second("framesPerSecond") {} -RTCVideoSourceStats::RTCVideoSourceStats(const RTCVideoSourceStats& other) - : RTCMediaSourceStats(other), - width(other.width), - height(other.height), - frames(other.frames), - frames_per_second(other.frames_per_second) {} +RTCVideoSourceStats::RTCVideoSourceStats(const RTCVideoSourceStats& other) = + default; RTCVideoSourceStats::~RTCVideoSourceStats() {} @@ -1076,8 +894,12 @@ WEBRTC_RTCSTATS_IMPL(RTCTransportStats, RTCStats, "transport", &remote_certificate_id, &tls_version, &dtls_cipher, + &dtls_role, &srtp_cipher, - &selected_candidate_pair_changes) + &selected_candidate_pair_changes, + &ice_role, + &ice_local_username_fragment, + &ice_state) // clang-format on RTCTransportStats::RTCTransportStats(const std::string& id, @@ -1097,24 +919,14 @@ RTCTransportStats::RTCTransportStats(std::string&& id, int64_t timestamp_us) remote_certificate_id("remoteCertificateId"), tls_version("tlsVersion"), dtls_cipher("dtlsCipher"), + dtls_role("dtlsRole"), srtp_cipher("srtpCipher"), - selected_candidate_pair_changes("selectedCandidatePairChanges") {} - -RTCTransportStats::RTCTransportStats(const RTCTransportStats& other) - : RTCStats(other.id(), other.timestamp_us()), - bytes_sent(other.bytes_sent), - packets_sent(other.packets_sent), - bytes_received(other.bytes_received), - packets_received(other.packets_received), - rtcp_transport_stats_id(other.rtcp_transport_stats_id), - dtls_state(other.dtls_state), - selected_candidate_pair_id(other.selected_candidate_pair_id), - local_certificate_id(other.local_certificate_id), - remote_certificate_id(other.remote_certificate_id), - tls_version(other.tls_version), - dtls_cipher(other.dtls_cipher), - srtp_cipher(other.srtp_cipher), - selected_candidate_pair_changes(other.selected_candidate_pair_changes) {} + selected_candidate_pair_changes("selectedCandidatePairChanges"), + ice_role("iceRole"), + ice_local_username_fragment("iceLocalUsernameFragment"), + ice_state("iceState") {} + +RTCTransportStats::RTCTransportStats(const RTCTransportStats& other) = default; RTCTransportStats::~RTCTransportStats() {} diff --git a/TMessagesProj/jni/voip/webrtc/system_wrappers/OWNERS b/TMessagesProj/jni/voip/webrtc/system_wrappers/OWNERS index 0a2fb1566d..f7bd06a0c3 100644 --- a/TMessagesProj/jni/voip/webrtc/system_wrappers/OWNERS +++ b/TMessagesProj/jni/voip/webrtc/system_wrappers/OWNERS @@ -1,3 +1,2 @@ henrika@webrtc.org mflodman@webrtc.org -nisse@webrtc.org diff --git a/TMessagesProj/jni/voip/webrtc/system_wrappers/include/clock.h b/TMessagesProj/jni/voip/webrtc/system_wrappers/include/clock.h index 271291c214..60296070cc 100644 --- a/TMessagesProj/jni/voip/webrtc/system_wrappers/include/clock.h +++ b/TMessagesProj/jni/voip/webrtc/system_wrappers/include/clock.h @@ -39,11 +39,7 @@ class RTC_EXPORT Clock { int64_t TimeInMicroseconds() { return CurrentTime().us(); } // Retrieve an NTP absolute timestamp (with an epoch of Jan 1, 1900). - // TODO(bugs.webrtc.org/11327): Make this non-virtual once - // "WebRTC-SystemIndependentNtpTimeKillSwitch" is removed. - virtual NtpTime CurrentNtpTime() { - return ConvertTimestampToNtpTime(CurrentTime()); - } + NtpTime CurrentNtpTime() { return ConvertTimestampToNtpTime(CurrentTime()); } int64_t CurrentNtpInMilliseconds() { return CurrentNtpTime().ToMs(); } // Converts between a relative timestamp returned by this clock, to NTP time. diff --git a/TMessagesProj/jni/voip/webrtc/system_wrappers/include/field_trial.h b/TMessagesProj/jni/voip/webrtc/system_wrappers/include/field_trial.h index 52db33b0e9..ffbd864a6a 100644 --- a/TMessagesProj/jni/voip/webrtc/system_wrappers/include/field_trial.h +++ b/TMessagesProj/jni/voip/webrtc/system_wrappers/include/field_trial.h @@ -13,6 +13,9 @@ #include +#include "absl/strings/string_view.h" +#include "rtc_base/containers/flat_set.h" + // Field trials allow webrtc clients (such as Chrome) to turn on feature code // in binaries out in the field and gather information with that. // @@ -24,7 +27,7 @@ // WEBRTC_EXCLUDE_FIELD_TRIAL_DEFAULT (if GN is used this can be achieved // by setting the GN arg rtc_exclude_field_trial_default to true). // 2. Provide an implementation of: -// std::string webrtc::field_trial::FindFullName(const std::string& trial). +// std::string webrtc::field_trial::FindFullName(absl::string_view trial). // // They are designed to wire up directly to chrome field trials and to speed up // developers by reducing the need to wire APIs to control whether a feature is @@ -61,18 +64,18 @@ namespace field_trial { // if the trial does not exists. // // Note: To keep things tidy append all the trial names with WebRTC. -std::string FindFullName(const std::string& name); +std::string FindFullName(absl::string_view name); // Convenience method, returns true iff FindFullName(name) return a string that // starts with "Enabled". // TODO(tommi): Make sure all implementations support this. -inline bool IsEnabled(const char* name) { +inline bool IsEnabled(absl::string_view name) { return FindFullName(name).find("Enabled") == 0; } // Convenience method, returns true iff FindFullName(name) return a string that // starts with "Disabled". -inline bool IsDisabled(const char* name) { +inline bool IsDisabled(absl::string_view name) { return FindFullName(name).find("Disabled") == 0; } @@ -84,17 +87,23 @@ void InitFieldTrialsFromString(const char* trials_string); const char* GetFieldTrialString(); -#ifndef WEBRTC_EXCLUDE_FIELD_TRIAL_DEFAULT // Validates the given field trial string. -bool FieldTrialsStringIsValid(const char* trials_string); +bool FieldTrialsStringIsValid(absl::string_view trials_string); // Merges two field trial strings. // // If a key (trial) exists twice with conflicting values (groups), the value // in 'second' takes precedence. // Shall only be called with valid FieldTrial strings. -std::string MergeFieldTrialsStrings(const char* first, const char* second); -#endif // WEBRTC_EXCLUDE_FIELD_TRIAL_DEFAULT +std::string MergeFieldTrialsStrings(absl::string_view first, + absl::string_view second); + +// RAII type that ensures global state is consistent between tests. +class ScopedGlobalFieldTrialsForTesting { + public: + explicit ScopedGlobalFieldTrialsForTesting(flat_set keys); + ~ScopedGlobalFieldTrialsForTesting(); +}; } // namespace field_trial } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/system_wrappers/include/metrics.h b/TMessagesProj/jni/voip/webrtc/system_wrappers/include/metrics.h index c992458415..ca9ed6d09b 100644 --- a/TMessagesProj/jni/voip/webrtc/system_wrappers/include/metrics.h +++ b/TMessagesProj/jni/voip/webrtc/system_wrappers/include/metrics.h @@ -13,12 +13,14 @@ #include +#include #include #include #include -#include "rtc_base/atomic_ops.h" +#include "absl/strings/string_view.h" #include "rtc_base/checks.h" +#include "rtc_base/string_utils.h" #if defined(RTC_DISABLE_METRICS) #define RTC_METRICS_ENABLED 0 @@ -76,12 +78,12 @@ void NoOp(const Ts&...) {} // by setting the GN arg rtc_exclude_metrics_default to true). // 2. Provide implementations of: // Histogram* webrtc::metrics::HistogramFactoryGetCounts( -// const std::string& name, int sample, int min, int max, +// absl::string_view name, int sample, int min, int max, // int bucket_count); // Histogram* webrtc::metrics::HistogramFactoryGetEnumeration( -// const std::string& name, int sample, int boundary); +// absl::string_view name, int sample, int boundary); // void webrtc::metrics::HistogramAdd( -// Histogram* histogram_pointer, const std::string& name, int sample); +// Histogram* histogram_pointer, absl::string_view name, int sample); // // Example usage: // @@ -188,26 +190,22 @@ void NoOp(const Ts&...) {} webrtc::metrics::HistogramFactoryGetEnumeration(name, boundary)) // The name of the histogram should not vary. -// TODO(asapersson): Consider changing string to const char*. -#define RTC_HISTOGRAM_COMMON_BLOCK(constant_name, sample, \ - factory_get_invocation) \ - do { \ - static webrtc::metrics::Histogram* atomic_histogram_pointer = nullptr; \ - webrtc::metrics::Histogram* histogram_pointer = \ - rtc::AtomicOps::AcquireLoadPtr(&atomic_histogram_pointer); \ - if (!histogram_pointer) { \ - histogram_pointer = factory_get_invocation; \ - webrtc::metrics::Histogram* prev_pointer = \ - rtc::AtomicOps::CompareAndSwapPtr( \ - &atomic_histogram_pointer, \ - static_cast(nullptr), \ - histogram_pointer); \ - RTC_DCHECK(prev_pointer == nullptr || \ - prev_pointer == histogram_pointer); \ - } \ - if (histogram_pointer) { \ - webrtc::metrics::HistogramAdd(histogram_pointer, sample); \ - } \ +#define RTC_HISTOGRAM_COMMON_BLOCK(constant_name, sample, \ + factory_get_invocation) \ + do { \ + static std::atomic atomic_histogram_pointer( \ + nullptr); \ + webrtc::metrics::Histogram* histogram_pointer = \ + atomic_histogram_pointer.load(std::memory_order_acquire); \ + if (!histogram_pointer) { \ + histogram_pointer = factory_get_invocation; \ + webrtc::metrics::Histogram* null_histogram = nullptr; \ + atomic_histogram_pointer.compare_exchange_strong(null_histogram, \ + histogram_pointer); \ + } \ + if (histogram_pointer) { \ + webrtc::metrics::HistogramAdd(histogram_pointer, sample); \ + } \ } while (0) // The histogram is constructed/found for each call. @@ -363,7 +361,7 @@ namespace webrtc { namespace metrics { // Time that should have elapsed for stats that are gathered once per call. -enum { kMinRunTimeInSeconds = 10 }; +constexpr int kMinRunTimeInSeconds = 10; class Histogram; @@ -371,32 +369,31 @@ class Histogram; // histogram). // Get histogram for counters. -Histogram* HistogramFactoryGetCounts(const std::string& name, +Histogram* HistogramFactoryGetCounts(absl::string_view name, int min, int max, int bucket_count); // Get histogram for counters with linear bucket spacing. -Histogram* HistogramFactoryGetCountsLinear(const std::string& name, +Histogram* HistogramFactoryGetCountsLinear(absl::string_view name, int min, int max, int bucket_count); // Get histogram for enumerators. // `boundary` should be above the max enumerator sample. -Histogram* HistogramFactoryGetEnumeration(const std::string& name, - int boundary); +Histogram* HistogramFactoryGetEnumeration(absl::string_view name, int boundary); // Get sparse histogram for enumerators. // `boundary` should be above the max enumerator sample. -Histogram* SparseHistogramFactoryGetEnumeration(const std::string& name, +Histogram* SparseHistogramFactoryGetEnumeration(absl::string_view name, int boundary); // Function for adding a `sample` to a histogram. void HistogramAdd(Histogram* histogram_pointer, int sample); struct SampleInfo { - SampleInfo(const std::string& name, int min, int max, size_t bucket_count); + SampleInfo(absl::string_view name, int min, int max, size_t bucket_count); ~SampleInfo(); const std::string name; @@ -412,7 +409,8 @@ void Enable(); // Gets histograms and clears all samples. void GetAndReset( - std::map>* histograms); + std::map, rtc::AbslStringViewCmp>* + histograms); // Functions below are mainly for testing. @@ -420,17 +418,17 @@ void GetAndReset( void Reset(); // Returns the number of times the `sample` has been added to the histogram. -int NumEvents(const std::string& name, int sample); +int NumEvents(absl::string_view name, int sample); // Returns the total number of added samples to the histogram. -int NumSamples(const std::string& name); +int NumSamples(absl::string_view name); // Returns the minimum sample value (or -1 if the histogram has no samples). -int MinSample(const std::string& name); +int MinSample(absl::string_view name); // Returns a map with keys the samples with at least one event and values the // number of events for that sample. -std::map Samples(const std::string& name); +std::map Samples(absl::string_view name); } // namespace metrics } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/system_wrappers/include/rtp_to_ntp_estimator.h b/TMessagesProj/jni/voip/webrtc/system_wrappers/include/rtp_to_ntp_estimator.h index 175063351a..3b62b78608 100644 --- a/TMessagesProj/jni/voip/webrtc/system_wrappers/include/rtp_to_ntp_estimator.h +++ b/TMessagesProj/jni/voip/webrtc/system_wrappers/include/rtp_to_ntp_estimator.h @@ -18,60 +18,51 @@ #include "absl/types/optional.h" #include "modules/include/module_common_types_public.h" #include "rtc_base/checks.h" -#include "rtc_base/numerics/moving_median_filter.h" #include "system_wrappers/include/ntp_time.h" namespace webrtc { -// Class for converting an RTP timestamp to the NTP domain in milliseconds. + +// Converts an RTP timestamp to the NTP domain. // The class needs to be trained with (at least 2) RTP/NTP timestamp pairs from // RTCP sender reports before the convertion can be done. class RtpToNtpEstimator { public: - RtpToNtpEstimator(); - ~RtpToNtpEstimator(); + static constexpr int kMaxInvalidSamples = 3; - // RTP and NTP timestamp pair from a RTCP SR report. - struct RtcpMeasurement { - RtcpMeasurement(uint32_t ntp_secs, - uint32_t ntp_frac, - int64_t unwrapped_timestamp); - bool IsEqual(const RtcpMeasurement& other) const; + RtpToNtpEstimator() = default; + RtpToNtpEstimator(const RtpToNtpEstimator&) = delete; + RtpToNtpEstimator& operator=(const RtpToNtpEstimator&) = delete; + ~RtpToNtpEstimator() = default; - NtpTime ntp_time; - int64_t unwrapped_rtp_timestamp; - }; + enum UpdateResult { kInvalidMeasurement, kSameMeasurement, kNewMeasurement }; + // Updates measurements with RTP/NTP timestamp pair from a RTCP sender report. + UpdateResult UpdateMeasurements(NtpTime ntp, uint32_t rtp_timestamp); - // Estimated parameters from RTP and NTP timestamp pairs in `measurements_`. - struct Parameters { - Parameters() : frequency_khz(0.0), offset_ms(0.0) {} + // Converts an RTP timestamp to the NTP domain. + // Returns invalid NtpTime (i.e. NtpTime(0)) on failure. + NtpTime Estimate(uint32_t rtp_timestamp) const; - Parameters(double frequency_khz, double offset_ms) - : frequency_khz(frequency_khz), offset_ms(offset_ms) {} + // Returns estimated rtp_timestamp frequency, or 0 on failure. + double EstimatedFrequencyKhz() const; - double frequency_khz; - double offset_ms; + private: + // Estimated parameters from RTP and NTP timestamp pairs in `measurements_`. + // Defines linear estimation: NtpTime (in units of 1s/2^32) = + // `Parameters::slope` * rtp_timestamp + `Parameters::offset`. + struct Parameters { + double slope; + double offset; }; - // Updates measurements with RTP/NTP timestamp pair from a RTCP sender report. - // `new_rtcp_sr` is set to true if a new report is added. - bool UpdateMeasurements(uint32_t ntp_secs, - uint32_t ntp_frac, - uint32_t rtp_timestamp, - bool* new_rtcp_sr); - - // Converts an RTP timestamp to the NTP domain in milliseconds. - // Returns true on success, false otherwise. - bool Estimate(int64_t rtp_timestamp, int64_t* ntp_timestamp_ms) const; - - // Returns estimated rtp to ntp linear transform parameters. - const absl::optional params() const; - - static const int kMaxInvalidSamples = 3; + // RTP and NTP timestamp pair from a RTCP SR report. + struct RtcpMeasurement { + NtpTime ntp_time; + int64_t unwrapped_rtp_timestamp; + }; - private: void UpdateParameters(); - int consecutive_invalid_samples_; + int consecutive_invalid_samples_ = 0; std::list measurements_; absl::optional params_; mutable TimestampUnwrapper unwrapper_; diff --git a/TMessagesProj/jni/voip/webrtc/system_wrappers/source/clock.cc b/TMessagesProj/jni/voip/webrtc/system_wrappers/source/clock.cc index 7784246fa4..88c99d6a68 100644 --- a/TMessagesProj/jni/voip/webrtc/system_wrappers/source/clock.cc +++ b/TMessagesProj/jni/voip/webrtc/system_wrappers/source/clock.cc @@ -10,24 +10,6 @@ #include "system_wrappers/include/clock.h" -#include "system_wrappers/include/field_trial.h" - -#if defined(WEBRTC_WIN) - -// Windows needs to be included before mmsystem.h -#include "rtc_base/win32.h" - -#include - - -#elif defined(WEBRTC_POSIX) - -#include -#include - -#endif // defined(WEBRTC_POSIX) - -#include "rtc_base/synchronization/mutex.h" #include "rtc_base/time_utils.h" namespace webrtc { @@ -60,210 +42,23 @@ NtpTime TimeMicrosToNtp(int64_t time_us) { return NtpTime(ntp_seconds, ntp_fractions); } -void GetSecondsAndFraction(const timeval& time, - uint32_t* seconds, - double* fraction) { - *seconds = time.tv_sec + kNtpJan1970; - *fraction = time.tv_usec / 1e6; - - while (*fraction >= 1) { - --*fraction; - ++*seconds; - } - while (*fraction < 0) { - ++*fraction; - --*seconds; - } -} - } // namespace class RealTimeClock : public Clock { public: - RealTimeClock() - : use_system_independent_ntp_time_(!field_trial::IsEnabled( - "WebRTC-SystemIndependentNtpTimeKillSwitch")) {} + RealTimeClock() = default; Timestamp CurrentTime() override { return Timestamp::Micros(rtc::TimeMicros()); } - NtpTime CurrentNtpTime() override { - return use_system_independent_ntp_time_ ? TimeMicrosToNtp(rtc::TimeMicros()) - : SystemDependentNtpTime(); - } - NtpTime ConvertTimestampToNtpTime(Timestamp timestamp) override { - // This method does not check `use_system_independent_ntp_time_` because - // all callers never used the old behavior of `CurrentNtpTime`. return TimeMicrosToNtp(timestamp.us()); } - - protected: - virtual timeval CurrentTimeVal() = 0; - - private: - NtpTime SystemDependentNtpTime() { - uint32_t seconds; - double fraction; - GetSecondsAndFraction(CurrentTimeVal(), &seconds, &fraction); - - return NtpTime(seconds, static_cast( - fraction * kMagicNtpFractionalUnit + 0.5)); - } - - bool use_system_independent_ntp_time_; -}; - -#if defined(WINUWP) -class WinUwpRealTimeClock final : public RealTimeClock { - public: - WinUwpRealTimeClock() = default; - ~WinUwpRealTimeClock() override {} - - protected: - timeval CurrentTimeVal() override { - // The rtc::WinUwpSystemTimeNanos() method is already time offset from a - // base epoch value and might as be synchronized against an NTP time server - // as an added bonus. - auto nanos = rtc::WinUwpSystemTimeNanos(); - - struct timeval tv; - - tv.tv_sec = rtc::dchecked_cast(nanos / 1000000000); - tv.tv_usec = rtc::dchecked_cast(nanos / 1000); - - return tv; - } -}; - -#elif defined(WEBRTC_WIN) -// TODO(pbos): Consider modifying the implementation to synchronize itself -// against system time (update ref_point_) periodically to -// prevent clock drift. -class WindowsRealTimeClock : public RealTimeClock { - public: - WindowsRealTimeClock() - : last_time_ms_(0), - num_timer_wraps_(0), - ref_point_(GetSystemReferencePoint()) {} - - ~WindowsRealTimeClock() override {} - - protected: - struct ReferencePoint { - FILETIME file_time; - LARGE_INTEGER counter_ms; - }; - - timeval CurrentTimeVal() override { - const uint64_t FILETIME_1970 = 0x019db1ded53e8000; - - FILETIME StartTime; - uint64_t Time; - struct timeval tv; - - // We can't use query performance counter since they can change depending on - // speed stepping. - GetTime(&StartTime); - - Time = (((uint64_t)StartTime.dwHighDateTime) << 32) + - (uint64_t)StartTime.dwLowDateTime; - - // Convert the hecto-nano second time to tv format. - Time -= FILETIME_1970; - - tv.tv_sec = (uint32_t)(Time / (uint64_t)10000000); - tv.tv_usec = (uint32_t)((Time % (uint64_t)10000000) / 10); - return tv; - } - - void GetTime(FILETIME* current_time) { - DWORD t; - LARGE_INTEGER elapsed_ms; - { - MutexLock lock(&mutex_); - // time MUST be fetched inside the critical section to avoid non-monotonic - // last_time_ms_ values that'll register as incorrect wraparounds due to - // concurrent calls to GetTime. - t = timeGetTime(); - if (t < last_time_ms_) - num_timer_wraps_++; - last_time_ms_ = t; - elapsed_ms.HighPart = num_timer_wraps_; - } - elapsed_ms.LowPart = t; - elapsed_ms.QuadPart = elapsed_ms.QuadPart - ref_point_.counter_ms.QuadPart; - - // Translate to 100-nanoseconds intervals (FILETIME resolution) - // and add to reference FILETIME to get current FILETIME. - ULARGE_INTEGER filetime_ref_as_ul; - filetime_ref_as_ul.HighPart = ref_point_.file_time.dwHighDateTime; - filetime_ref_as_ul.LowPart = ref_point_.file_time.dwLowDateTime; - filetime_ref_as_ul.QuadPart += - static_cast((elapsed_ms.QuadPart) * 1000 * 10); - - // Copy to result - current_time->dwHighDateTime = filetime_ref_as_ul.HighPart; - current_time->dwLowDateTime = filetime_ref_as_ul.LowPart; - } - - static ReferencePoint GetSystemReferencePoint() { - ReferencePoint ref = {}; - FILETIME ft0 = {}; - FILETIME ft1 = {}; - // Spin waiting for a change in system time. As soon as this change happens, - // get the matching call for timeGetTime() as soon as possible. This is - // assumed to be the most accurate offset that we can get between - // timeGetTime() and system time. - - // Set timer accuracy to 1 ms. - timeBeginPeriod(1); - GetSystemTimeAsFileTime(&ft0); - do { - GetSystemTimeAsFileTime(&ft1); - - ref.counter_ms.QuadPart = timeGetTime(); - Sleep(0); - } while ((ft0.dwHighDateTime == ft1.dwHighDateTime) && - (ft0.dwLowDateTime == ft1.dwLowDateTime)); - ref.file_time = ft1; - timeEndPeriod(1); - return ref; - } - - Mutex mutex_; - DWORD last_time_ms_; - LONG num_timer_wraps_; - const ReferencePoint ref_point_; -}; - -#elif defined(WEBRTC_POSIX) -class UnixRealTimeClock : public RealTimeClock { - public: - UnixRealTimeClock() {} - - ~UnixRealTimeClock() override {} - - protected: - timeval CurrentTimeVal() override { - struct timeval tv; - gettimeofday(&tv, nullptr); - return tv; - } }; -#endif // defined(WEBRTC_POSIX) Clock* Clock::GetRealTimeClock() { -#if defined(WINUWP) - static Clock* const clock = new WinUwpRealTimeClock(); -#elif defined(WEBRTC_WIN) - static Clock* const clock = new WindowsRealTimeClock(); -#elif defined(WEBRTC_POSIX) - static Clock* const clock = new UnixRealTimeClock(); -#else - static Clock* const clock = nullptr; -#endif + static Clock* const clock = new RealTimeClock(); return clock; } diff --git a/TMessagesProj/jni/voip/webrtc/system_wrappers/source/cpu_features.cc b/TMessagesProj/jni/voip/webrtc/system_wrappers/source/cpu_features.cc index 23e0629851..b676339eea 100644 --- a/TMessagesProj/jni/voip/webrtc/system_wrappers/source/cpu_features.cc +++ b/TMessagesProj/jni/voip/webrtc/system_wrappers/source/cpu_features.cc @@ -93,13 +93,14 @@ int GetCPUInfo(CPUFeature feature) { // a) AVX are supported by the CPU, // b) XSAVE is supported by the CPU, // c) XSAVE is enabled by the kernel. - // See http://software.intel.com/en-us/blogs/2011/04/14/is-avx-enabled - // AVX2 support needs (avx_support && (cpu_info7[1] & 0x00000020) != 0;). - return (cpu_info[2] & 0x10000000) != 0 && + // Compiling with MSVC and /arch:AVX2 surprisingly generates BMI2 + // instructions (see crbug.com/1315519). + return (cpu_info[2] & 0x10000000) != 0 /* AVX */ && (cpu_info[2] & 0x04000000) != 0 /* XSAVE */ && (cpu_info[2] & 0x08000000) != 0 /* OSXSAVE */ && (xgetbv(0) & 0x00000006) == 6 /* XSAVE enabled by kernel */ && - (cpu_info7[1] & 0x00000020) != 0; + (cpu_info7[1] & 0x00000020) != 0 /* AVX2 */ && + (cpu_info7[1] & 0x00000100) != 0 /* BMI2 */; } #endif // WEBRTC_ENABLE_AVX2 return 0; diff --git a/TMessagesProj/jni/voip/webrtc/system_wrappers/source/cpu_info.cc b/TMessagesProj/jni/voip/webrtc/system_wrappers/source/cpu_info.cc index 7288c67efd..eff720371a 100644 --- a/TMessagesProj/jni/voip/webrtc/system_wrappers/source/cpu_info.cc +++ b/TMessagesProj/jni/voip/webrtc/system_wrappers/source/cpu_info.cc @@ -32,7 +32,7 @@ static int DetectNumberOfCores() { number_of_cores = static_cast(si.dwNumberOfProcessors); #elif defined(WEBRTC_LINUX) || defined(WEBRTC_ANDROID) number_of_cores = static_cast(sysconf(_SC_NPROCESSORS_ONLN)); - if (number_of_cores < 0) { + if (number_of_cores <= 0) { RTC_LOG(LS_ERROR) << "Failed to get number of cores"; number_of_cores = 1; } diff --git a/TMessagesProj/jni/voip/webrtc/system_wrappers/source/field_trial.cc b/TMessagesProj/jni/voip/webrtc/system_wrappers/source/field_trial.cc index d16ea7e03f..01b7501204 100644 --- a/TMessagesProj/jni/voip/webrtc/system_wrappers/source/field_trial.cc +++ b/TMessagesProj/jni/voip/webrtc/system_wrappers/source/field_trial.cc @@ -13,9 +13,12 @@ #include #include +#include +#include "absl/algorithm/container.h" #include "absl/strings/string_view.h" #include "rtc_base/checks.h" +#include "rtc_base/containers/flat_set.h" #include "rtc_base/logging.h" #include "rtc_base/string_encode.h" @@ -26,9 +29,15 @@ namespace field_trial { static const char* trials_init_string = NULL; -#ifndef WEBRTC_EXCLUDE_FIELD_TRIAL_DEFAULT namespace { + constexpr char kPersistentStringSeparator = '/'; + +flat_set& TestKeys() { + static auto* test_keys = new flat_set(); + return *test_keys; +} + // Validates the given field trial string. // E.g.: // "WebRTC-experimentFoo/Enabled/WebRTC-experimentBar/Enabled100kbps/" @@ -68,9 +77,10 @@ bool FieldTrialsStringIsValidInternal(const absl::string_view trials) { return true; } + } // namespace -bool FieldTrialsStringIsValid(const char* trials_string) { +bool FieldTrialsStringIsValid(absl::string_view trials_string) { return FieldTrialsStringIsValidInternal(trials_string); } @@ -78,18 +88,19 @@ void InsertOrReplaceFieldTrialStringsInMap( std::map* fieldtrial_map, const absl::string_view trials_string) { if (FieldTrialsStringIsValidInternal(trials_string)) { - std::vector tokens; - rtc::split(std::string(trials_string), '/', &tokens); + std::vector tokens = rtc::split(trials_string, '/'); // Skip last token which is empty due to trailing '/'. for (size_t idx = 0; idx < tokens.size() - 1; idx += 2) { - (*fieldtrial_map)[tokens[idx]] = tokens[idx + 1]; + (*fieldtrial_map)[std::string(tokens[idx])] = + std::string(tokens[idx + 1]); } } else { RTC_DCHECK_NOTREACHED() << "Invalid field trials string:" << trials_string; } } -std::string MergeFieldTrialsStrings(const char* first, const char* second) { +std::string MergeFieldTrialsStrings(absl::string_view first, + absl::string_view second) { std::map fieldtrial_map; InsertOrReplaceFieldTrialStringsInMap(&fieldtrial_map, first); InsertOrReplaceFieldTrialStringsInMap(&fieldtrial_map, second); @@ -102,11 +113,18 @@ std::string MergeFieldTrialsStrings(const char* first, const char* second) { return merged; } -std::string FindFullName(const std::string& name) { +#ifndef WEBRTC_EXCLUDE_FIELD_TRIAL_DEFAULT +std::string FindFullName(absl::string_view name) { +#if WEBRTC_STRICT_FIELD_TRIALS + RTC_DCHECK(absl::c_linear_search(kRegisteredFieldTrials, name) || + TestKeys().contains(name)) + << name << " is not registered."; +#endif + if (trials_init_string == NULL) return std::string(); - std::string trials_string(trials_init_string); + absl::string_view trials_string(trials_init_string); if (trials_string.empty()) return std::string(); @@ -122,14 +140,14 @@ std::string FindFullName(const std::string& name) { if (field_value_end == trials_string.npos || field_value_end == field_name_end + 1) break; - std::string field_name(trials_string, next_item, - field_name_end - next_item); - std::string field_value(trials_string, field_name_end + 1, - field_value_end - field_name_end - 1); + absl::string_view field_name = + trials_string.substr(next_item, field_name_end - next_item); + absl::string_view field_value = trials_string.substr( + field_name_end + 1, field_value_end - field_name_end - 1); next_item = field_value_end + 1; if (name == field_name) - return field_value; + return std::string(field_value); } return std::string(); } @@ -138,12 +156,10 @@ std::string FindFullName(const std::string& name) { // Optionally initialize field trial from a string. void InitFieldTrialsFromString(const char* trials_string) { RTC_LOG(LS_INFO) << "Setting field trial string:" << trials_string; -#ifndef WEBRTC_EXCLUDE_FIELD_TRIAL_DEFAULT if (trials_string) { RTC_DCHECK(FieldTrialsStringIsValidInternal(trials_string)) << "Invalid field trials string:" << trials_string; }; -#endif // WEBRTC_EXCLUDE_FIELD_TRIAL_DEFAULT trials_init_string = trials_string; } @@ -151,5 +167,14 @@ const char* GetFieldTrialString() { return trials_init_string; } +ScopedGlobalFieldTrialsForTesting::ScopedGlobalFieldTrialsForTesting( + flat_set keys) { + TestKeys() = std::move(keys); +} + +ScopedGlobalFieldTrialsForTesting::~ScopedGlobalFieldTrialsForTesting() { + TestKeys().clear(); +} + } // namespace field_trial } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/system_wrappers/source/metrics.cc b/TMessagesProj/jni/voip/webrtc/system_wrappers/source/metrics.cc index b14eef4618..39ca590070 100644 --- a/TMessagesProj/jni/voip/webrtc/system_wrappers/source/metrics.cc +++ b/TMessagesProj/jni/voip/webrtc/system_wrappers/source/metrics.cc @@ -11,7 +11,8 @@ #include -#include "rtc_base/constructor_magic.h" +#include "absl/strings/string_view.h" +#include "rtc_base/string_utils.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" @@ -30,11 +31,14 @@ const int kMaxSampleMapSize = 300; class RtcHistogram { public: - RtcHistogram(const std::string& name, int min, int max, int bucket_count) + RtcHistogram(absl::string_view name, int min, int max, int bucket_count) : min_(min), max_(max), info_(name, min, max, bucket_count) { RTC_DCHECK_GT(bucket_count, 0); } + RtcHistogram(const RtcHistogram&) = delete; + RtcHistogram& operator=(const RtcHistogram&) = delete; + void Add(int sample) { sample = std::min(sample, max_); sample = std::max(sample, min_ - 1); // Underflow bucket. @@ -99,8 +103,6 @@ class RtcHistogram { const int min_; const int max_; SampleInfo info_ RTC_GUARDED_BY(mutex_); - - RTC_DISALLOW_COPY_AND_ASSIGN(RtcHistogram); }; class RtcHistogramMap { @@ -108,7 +110,10 @@ class RtcHistogramMap { RtcHistogramMap() {} ~RtcHistogramMap() {} - Histogram* GetCountsHistogram(const std::string& name, + RtcHistogramMap(const RtcHistogramMap&) = delete; + RtcHistogramMap& operator=(const RtcHistogramMap&) = delete; + + Histogram* GetCountsHistogram(absl::string_view name, int min, int max, int bucket_count) { @@ -118,23 +123,24 @@ class RtcHistogramMap { return reinterpret_cast(it->second.get()); RtcHistogram* hist = new RtcHistogram(name, min, max, bucket_count); - map_[name].reset(hist); + map_.emplace(name, hist); return reinterpret_cast(hist); } - Histogram* GetEnumerationHistogram(const std::string& name, int boundary) { + Histogram* GetEnumerationHistogram(absl::string_view name, int boundary) { MutexLock lock(&mutex_); const auto& it = map_.find(name); if (it != map_.end()) return reinterpret_cast(it->second.get()); RtcHistogram* hist = new RtcHistogram(name, 1, boundary, boundary + 1); - map_[name].reset(hist); + map_.emplace(name, hist); return reinterpret_cast(hist); } - void GetAndReset( - std::map>* histograms) { + void GetAndReset(std::map, + rtc::AbslStringViewCmp>* histograms) { MutexLock lock(&mutex_); for (const auto& kv : map_) { std::unique_ptr info = kv.second->GetAndReset(); @@ -150,25 +156,25 @@ class RtcHistogramMap { kv.second->Reset(); } - int NumEvents(const std::string& name, int sample) const { + int NumEvents(absl::string_view name, int sample) const { MutexLock lock(&mutex_); const auto& it = map_.find(name); return (it == map_.end()) ? 0 : it->second->NumEvents(sample); } - int NumSamples(const std::string& name) const { + int NumSamples(absl::string_view name) const { MutexLock lock(&mutex_); const auto& it = map_.find(name); return (it == map_.end()) ? 0 : it->second->NumSamples(); } - int MinSample(const std::string& name) const { + int MinSample(absl::string_view name) const { MutexLock lock(&mutex_); const auto& it = map_.find(name); return (it == map_.end()) ? -1 : it->second->MinSample(); } - std::map Samples(const std::string& name) const { + std::map Samples(absl::string_view name) const { MutexLock lock(&mutex_); const auto& it = map_.find(name); return (it == map_.end()) ? std::map() : it->second->Samples(); @@ -176,25 +182,21 @@ class RtcHistogramMap { private: mutable Mutex mutex_; - std::map> map_ - RTC_GUARDED_BY(mutex_); - - RTC_DISALLOW_COPY_AND_ASSIGN(RtcHistogramMap); + std::map, rtc::AbslStringViewCmp> + map_ RTC_GUARDED_BY(mutex_); }; // RtcHistogramMap is allocated upon call to Enable(). // The histogram getter functions, which return pointer values to the histograms // in the map, are cached in WebRTC. Therefore, this memory is not freed by the // application (the memory will be reclaimed by the OS). -static RtcHistogramMap* volatile g_rtc_histogram_map = nullptr; +static std::atomic g_rtc_histogram_map(nullptr); void CreateMap() { - RtcHistogramMap* map = rtc::AtomicOps::AcquireLoadPtr(&g_rtc_histogram_map); + RtcHistogramMap* map = g_rtc_histogram_map.load(std::memory_order_acquire); if (map == nullptr) { RtcHistogramMap* new_map = new RtcHistogramMap(); - RtcHistogramMap* old_map = rtc::AtomicOps::CompareAndSwapPtr( - &g_rtc_histogram_map, static_cast(nullptr), new_map); - if (old_map != nullptr) + if (!g_rtc_histogram_map.compare_exchange_strong(map, new_map)) delete new_map; } } @@ -202,15 +204,15 @@ void CreateMap() { // Set the first time we start using histograms. Used to make sure Enable() is // not called thereafter. #if RTC_DCHECK_IS_ON -static volatile int g_rtc_histogram_called = 0; +static std::atomic g_rtc_histogram_called(0); #endif // Gets the map (or nullptr). RtcHistogramMap* GetMap() { #if RTC_DCHECK_IS_ON - rtc::AtomicOps::ReleaseStore(&g_rtc_histogram_called, 1); + g_rtc_histogram_called.store(1, std::memory_order_release); #endif - return g_rtc_histogram_map; + return g_rtc_histogram_map.load(); } } // namespace @@ -222,7 +224,7 @@ RtcHistogramMap* GetMap() { // Creates (or finds) histogram. // The returned histogram pointer is cached (and used for adding samples in // subsequent calls). -Histogram* HistogramFactoryGetCounts(const std::string& name, +Histogram* HistogramFactoryGetCounts(absl::string_view name, int min, int max, int bucket_count) { @@ -235,7 +237,7 @@ Histogram* HistogramFactoryGetCounts(const std::string& name, // Creates (or finds) histogram. // The returned histogram pointer is cached (and used for adding samples in // subsequent calls). -Histogram* HistogramFactoryGetCountsLinear(const std::string& name, +Histogram* HistogramFactoryGetCountsLinear(absl::string_view name, int min, int max, int bucket_count) { @@ -250,7 +252,7 @@ Histogram* HistogramFactoryGetCountsLinear(const std::string& name, // Creates (or finds) histogram. // The returned histogram pointer is cached (and used for adding samples in // subsequent calls). -Histogram* HistogramFactoryGetEnumeration(const std::string& name, +Histogram* HistogramFactoryGetEnumeration(absl::string_view name, int boundary) { RtcHistogramMap* map = GetMap(); if (!map) @@ -260,7 +262,7 @@ Histogram* HistogramFactoryGetEnumeration(const std::string& name, } // Our default implementation reuses the non-sparse histogram. -Histogram* SparseHistogramFactoryGetEnumeration(const std::string& name, +Histogram* SparseHistogramFactoryGetEnumeration(absl::string_view name, int boundary) { return HistogramFactoryGetEnumeration(name, boundary); } @@ -273,7 +275,7 @@ void HistogramAdd(Histogram* histogram_pointer, int sample) { #endif // WEBRTC_EXCLUDE_METRICS_DEFAULT -SampleInfo::SampleInfo(const std::string& name, +SampleInfo::SampleInfo(absl::string_view name, int min, int max, size_t bucket_count) @@ -283,15 +285,16 @@ SampleInfo::~SampleInfo() {} // Implementation of global functions in metrics.h. void Enable() { - RTC_DCHECK(g_rtc_histogram_map == nullptr); + RTC_DCHECK(g_rtc_histogram_map.load() == nullptr); #if RTC_DCHECK_IS_ON - RTC_DCHECK_EQ(0, rtc::AtomicOps::AcquireLoad(&g_rtc_histogram_called)); + RTC_DCHECK_EQ(0, g_rtc_histogram_called.load(std::memory_order_acquire)); #endif CreateMap(); } void GetAndReset( - std::map>* histograms) { + std::map, rtc::AbslStringViewCmp>* + histograms) { histograms->clear(); RtcHistogramMap* map = GetMap(); if (map) @@ -304,22 +307,22 @@ void Reset() { map->Reset(); } -int NumEvents(const std::string& name, int sample) { +int NumEvents(absl::string_view name, int sample) { RtcHistogramMap* map = GetMap(); return map ? map->NumEvents(name, sample) : 0; } -int NumSamples(const std::string& name) { +int NumSamples(absl::string_view name) { RtcHistogramMap* map = GetMap(); return map ? map->NumSamples(name) : 0; } -int MinSample(const std::string& name) { +int MinSample(absl::string_view name) { RtcHistogramMap* map = GetMap(); return map ? map->MinSample(name) : -1; } -std::map Samples(const std::string& name) { +std::map Samples(absl::string_view name) { RtcHistogramMap* map = GetMap(); return map ? map->Samples(name) : std::map(); } diff --git a/TMessagesProj/jni/voip/webrtc/system_wrappers/source/rtp_to_ntp_estimator.cc b/TMessagesProj/jni/voip/webrtc/system_wrappers/source/rtp_to_ntp_estimator.cc index d0b0ad447f..ef5d9a7508 100644 --- a/TMessagesProj/jni/voip/webrtc/system_wrappers/source/rtp_to_ntp_estimator.cc +++ b/TMessagesProj/jni/voip/webrtc/system_wrappers/source/rtp_to_ntp_estimator.cc @@ -18,132 +18,85 @@ #include "api/array_view.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/numerics/safe_conversions.h" namespace webrtc { namespace { // Maximum number of RTCP SR reports to use to map between RTP and NTP. -const size_t kNumRtcpReportsToUse = 20; +constexpr size_t kNumRtcpReportsToUse = 20; // Don't allow NTP timestamps to jump more than 1 hour. Chosen arbitrary as big // enough to not affect normal use-cases. Yet it is smaller than RTP wrap-around // half-period (90khz RTP clock wrap-arounds every 13.25 hours). After half of // wrap-around period it is impossible to unwrap RTP timestamps correctly. -const int kMaxAllowedRtcpNtpIntervalMs = 60 * 60 * 1000; - -bool Contains(const std::list& measurements, - const RtpToNtpEstimator::RtcpMeasurement& other) { - for (const auto& measurement : measurements) { - if (measurement.IsEqual(other)) - return true; - } - return false; -} +constexpr uint64_t kMaxAllowedRtcpNtpInterval = uint64_t{60 * 60} << 32; +} // namespace -// Given x[] and y[] writes out such k and b that line y=k*x+b approximates -// given points in the best way (Least Squares Method). -bool LinearRegression(rtc::ArrayView x, - rtc::ArrayView y, - double* k, - double* b) { - size_t n = x.size(); +void RtpToNtpEstimator::UpdateParameters() { + size_t n = measurements_.size(); if (n < 2) - return false; + return; - if (y.size() != n) - return false; + // Run linear regression: + // Given x[] and y[] writes out such k and b that line y=k*x+b approximates + // given points in the best way (Least Squares Method). + auto x = [](const RtcpMeasurement& m) { + return static_cast(m.unwrapped_rtp_timestamp); + }; + auto y = [](const RtcpMeasurement& m) { + return static_cast(static_cast(m.ntp_time)); + }; double avg_x = 0; double avg_y = 0; - for (size_t i = 0; i < n; ++i) { - avg_x += x[i]; - avg_y += y[i]; + for (const RtcpMeasurement& m : measurements_) { + avg_x += x(m); + avg_y += y(m); } avg_x /= n; avg_y /= n; double variance_x = 0; double covariance_xy = 0; - for (size_t i = 0; i < n; ++i) { - double normalized_x = x[i] - avg_x; - double normalized_y = y[i] - avg_y; + for (const RtcpMeasurement& m : measurements_) { + double normalized_x = x(m) - avg_x; + double normalized_y = y(m) - avg_y; variance_x += normalized_x * normalized_x; covariance_xy += normalized_x * normalized_y; } if (std::fabs(variance_x) < 1e-8) - return false; - - *k = static_cast(covariance_xy / variance_x); - *b = static_cast(avg_y - (*k) * avg_x); - return true; -} - -} // namespace - -RtpToNtpEstimator::RtcpMeasurement::RtcpMeasurement(uint32_t ntp_secs, - uint32_t ntp_frac, - int64_t unwrapped_timestamp) - : ntp_time(ntp_secs, ntp_frac), - unwrapped_rtp_timestamp(unwrapped_timestamp) {} - -bool RtpToNtpEstimator::RtcpMeasurement::IsEqual( - const RtcpMeasurement& other) const { - // Use || since two equal timestamps will result in zero frequency and in - // RtpToNtpMs, `rtp_timestamp_ms` is estimated by dividing by the frequency. - return (ntp_time == other.ntp_time) || - (unwrapped_rtp_timestamp == other.unwrapped_rtp_timestamp); -} - -// Class for converting an RTP timestamp to the NTP domain. -RtpToNtpEstimator::RtpToNtpEstimator() : consecutive_invalid_samples_(0) {} - -RtpToNtpEstimator::~RtpToNtpEstimator() {} - -void RtpToNtpEstimator::UpdateParameters() { - if (measurements_.size() < 2) return; - std::vector x; - std::vector y; - x.reserve(measurements_.size()); - y.reserve(measurements_.size()); - for (auto it = measurements_.begin(); it != measurements_.end(); ++it) { - x.push_back(it->unwrapped_rtp_timestamp); - y.push_back(it->ntp_time.ToMs()); - } - double slope, offset; - - if (!LinearRegression(x, y, &slope, &offset)) { - return; - } - - params_.emplace(1 / slope, offset); + double k = covariance_xy / variance_x; + double b = avg_y - k * avg_x; + params_ = {{.slope = k, .offset = b}}; } -bool RtpToNtpEstimator::UpdateMeasurements(uint32_t ntp_secs, - uint32_t ntp_frac, - uint32_t rtp_timestamp, - bool* new_rtcp_sr) { - *new_rtcp_sr = false; - +RtpToNtpEstimator::UpdateResult RtpToNtpEstimator::UpdateMeasurements( + NtpTime ntp, + uint32_t rtp_timestamp) { int64_t unwrapped_rtp_timestamp = unwrapper_.Unwrap(rtp_timestamp); - RtcpMeasurement new_measurement(ntp_secs, ntp_frac, unwrapped_rtp_timestamp); + RtcpMeasurement new_measurement = { + .ntp_time = ntp, .unwrapped_rtp_timestamp = unwrapped_rtp_timestamp}; - if (Contains(measurements_, new_measurement)) { - // RTCP SR report already added. - return true; + for (const RtcpMeasurement& measurement : measurements_) { + // Use || since two equal timestamps will result in zero frequency. + if (measurement.ntp_time == ntp || + measurement.unwrapped_rtp_timestamp == unwrapped_rtp_timestamp) { + return kSameMeasurement; + } } if (!new_measurement.ntp_time.Valid()) - return false; + return kInvalidMeasurement; - int64_t ntp_ms_new = new_measurement.ntp_time.ToMs(); + uint64_t ntp_new = static_cast(new_measurement.ntp_time); bool invalid_sample = false; if (!measurements_.empty()) { int64_t old_rtp_timestamp = measurements_.front().unwrapped_rtp_timestamp; - int64_t old_ntp_ms = measurements_.front().ntp_time.ToMs(); - if (ntp_ms_new <= old_ntp_ms || - ntp_ms_new > old_ntp_ms + kMaxAllowedRtcpNtpIntervalMs) { + uint64_t old_ntp = static_cast(measurements_.front().ntp_time); + if (ntp_new <= old_ntp || ntp_new > old_ntp + kMaxAllowedRtcpNtpInterval) { invalid_sample = true; } else if (unwrapped_rtp_timestamp <= old_rtp_timestamp) { RTC_LOG(LS_WARNING) @@ -158,7 +111,7 @@ bool RtpToNtpEstimator::UpdateMeasurements(uint32_t ntp_secs, if (invalid_sample) { ++consecutive_invalid_samples_; if (consecutive_invalid_samples_ < kMaxInvalidSamples) { - return false; + return kInvalidMeasurement; } RTC_LOG(LS_WARNING) << "Multiple consecutively invalid RTCP SR reports, " "clearing measurements."; @@ -172,37 +125,29 @@ bool RtpToNtpEstimator::UpdateMeasurements(uint32_t ntp_secs, measurements_.pop_back(); measurements_.push_front(new_measurement); - *new_rtcp_sr = true; // List updated, calculate new parameters. UpdateParameters(); - return true; + return kNewMeasurement; } -bool RtpToNtpEstimator::Estimate(int64_t rtp_timestamp, - int64_t* ntp_timestamp_ms) const { +NtpTime RtpToNtpEstimator::Estimate(uint32_t rtp_timestamp) const { if (!params_) - return false; - - int64_t rtp_timestamp_unwrapped = unwrapper_.Unwrap(rtp_timestamp); - - // params_calculated_ should not be true unless ms params.frequency_khz has - // been calculated to something non zero. - RTC_DCHECK_NE(params_->frequency_khz, 0.0); - double rtp_ms = - static_cast(rtp_timestamp_unwrapped) / params_->frequency_khz + - params_->offset_ms + 0.5f; + return NtpTime(); - if (rtp_ms < 0) - return false; + double estimated = + static_cast(unwrapper_.Unwrap(rtp_timestamp)) * params_->slope + + params_->offset + 0.5f; - *ntp_timestamp_ms = rtp_ms; - - return true; + return NtpTime(rtc::saturated_cast(estimated)); } -const absl::optional RtpToNtpEstimator::params() - const { - return params_; +double RtpToNtpEstimator::EstimatedFrequencyKhz() const { + if (!params_.has_value()) { + return 0.0; + } + static constexpr double kNtpUnitPerMs = 4.294967296E6; // 2^32 / 1000. + return kNtpUnitPerMs / params_->slope; } + } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c.cc b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c.cc new file mode 100644 index 0000000000..804133bc17 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c.cc @@ -0,0 +1,39 @@ +// Copyright 2017 The CRC32C Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. See the AUTHORS file for names of contributors. + +#include "crc32c/crc32c.h" + +#include +#include + +#include "./crc32c_arm64.h" +#include "./crc32c_arm64_check.h" +#include "./crc32c_internal.h" +#include "./crc32c_sse42.h" +#include "./crc32c_sse42_check.h" + +namespace crc32c { + +uint32_t Extend(uint32_t crc, const uint8_t* data, size_t count) { +#if HAVE_SSE42 && (defined(_M_X64) || defined(__x86_64__)) + static bool can_use_sse42 = CanUseSse42(); + if (can_use_sse42) return ExtendSse42(crc, data, count); +#elif HAVE_ARM64_CRC32C + static bool can_use_arm64_crc32 = CanUseArm64Crc32(); + if (can_use_arm64_crc32) return ExtendArm64(crc, data, count); +#endif // HAVE_SSE42 && (defined(_M_X64) || defined(__x86_64__)) + + return ExtendPortable(crc, data, count); +} + +extern "C" uint32_t crc32c_extend(uint32_t crc, const uint8_t* data, + size_t count) { + return crc32c::Extend(crc, data, count); +} + +extern "C" uint32_t crc32c_value(const uint8_t* data, size_t count) { + return crc32c::Crc32c(data, count); +} + +} // namespace crc32c diff --git a/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_arm64.cc b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_arm64.cc new file mode 100644 index 0000000000..2595135f6d --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_arm64.cc @@ -0,0 +1,123 @@ +// Copyright 2017 The CRC32C Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. See the AUTHORS file for names of contributors. + +#include "./crc32c_arm64.h" + +// In a separate source file to allow this accelerated CRC32C function to be +// compiled with the appropriate compiler flags to enable ARM NEON CRC32C +// instructions. + +// This implementation is based on https://github.com/google/leveldb/pull/490. + +#include +#include + +#include "./crc32c_internal.h" +#include "crc32c/crc32c_config.h" + +#if HAVE_ARM64_CRC32C + +#include +#include + +#define KBYTES 1032 +#define SEGMENTBYTES 256 + +// compute 8bytes for each segment parallelly +#define CRC32C32BYTES(P, IND) \ + do { \ + crc1 = __crc32cd( \ + crc1, *((const uint64_t *)(P) + (SEGMENTBYTES / 8) * 1 + (IND))); \ + crc2 = __crc32cd( \ + crc2, *((const uint64_t *)(P) + (SEGMENTBYTES / 8) * 2 + (IND))); \ + crc3 = __crc32cd( \ + crc3, *((const uint64_t *)(P) + (SEGMENTBYTES / 8) * 3 + (IND))); \ + crc0 = __crc32cd( \ + crc0, *((const uint64_t *)(P) + (SEGMENTBYTES / 8) * 0 + (IND))); \ + } while (0); + +// compute 8*8 bytes for each segment parallelly +#define CRC32C256BYTES(P, IND) \ + do { \ + CRC32C32BYTES((P), (IND)*8 + 0) \ + CRC32C32BYTES((P), (IND)*8 + 1) \ + CRC32C32BYTES((P), (IND)*8 + 2) \ + CRC32C32BYTES((P), (IND)*8 + 3) \ + CRC32C32BYTES((P), (IND)*8 + 4) \ + CRC32C32BYTES((P), (IND)*8 + 5) \ + CRC32C32BYTES((P), (IND)*8 + 6) \ + CRC32C32BYTES((P), (IND)*8 + 7) \ + } while (0); + +// compute 4*8*8 bytes for each segment parallelly +#define CRC32C1024BYTES(P) \ + do { \ + CRC32C256BYTES((P), 0) \ + CRC32C256BYTES((P), 1) \ + CRC32C256BYTES((P), 2) \ + CRC32C256BYTES((P), 3) \ + (P) += 4 * SEGMENTBYTES; \ + } while (0) + +namespace crc32c { + +uint32_t ExtendArm64(uint32_t crc, const uint8_t *data, size_t size) { + int64_t length = size; + uint32_t crc0, crc1, crc2, crc3; + uint64_t t0, t1, t2; + + // k0=CRC(x^(3*SEGMENTBYTES*8)), k1=CRC(x^(2*SEGMENTBYTES*8)), + // k2=CRC(x^(SEGMENTBYTES*8)) + const poly64_t k0 = 0x8d96551c, k1 = 0xbd6f81f8, k2 = 0xdcb17aa4; + + crc = crc ^ kCRC32Xor; + + while (length >= KBYTES) { + crc0 = crc; + crc1 = 0; + crc2 = 0; + crc3 = 0; + + // Process 1024 bytes in parallel. + CRC32C1024BYTES(data); + + // Merge the 4 partial CRC32C values. + t2 = (uint64_t)vmull_p64(crc2, k2); + t1 = (uint64_t)vmull_p64(crc1, k1); + t0 = (uint64_t)vmull_p64(crc0, k0); + crc = __crc32cd(crc3, *(uint64_t *)data); + data += sizeof(uint64_t); + crc ^= __crc32cd(0, t2); + crc ^= __crc32cd(0, t1); + crc ^= __crc32cd(0, t0); + + length -= KBYTES; + } + + while (length >= 8) { + crc = __crc32cd(crc, *(uint64_t *)data); + data += 8; + length -= 8; + } + + if (length & 4) { + crc = __crc32cw(crc, *(uint32_t *)data); + data += 4; + } + + if (length & 2) { + crc = __crc32ch(crc, *(uint16_t *)data); + data += 2; + } + + if (length & 1) { + crc = __crc32cb(crc, *data); + } + + return crc ^ kCRC32Xor; +} + +} // namespace crc32c + +#endif // HAVE_ARM64_CRC32C diff --git a/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_arm64.h b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_arm64.h new file mode 100644 index 0000000000..dedd27e7ec --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_arm64.h @@ -0,0 +1,25 @@ +// Copyright 2017 The CRC32C Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. See the AUTHORS file for names of contributors. + +// ARM-specific code + +#ifndef CRC32C_CRC32C_ARM_H_ +#define CRC32C_CRC32C_ARM_H_ + +#include +#include + +#include "crc32c/crc32c_config.h" + +#if HAVE_ARM64_CRC32C + +namespace crc32c { + +uint32_t ExtendArm64(uint32_t crc, const uint8_t* data, size_t count); + +} // namespace crc32c + +#endif // HAVE_ARM64_CRC32C + +#endif // CRC32C_CRC32C_ARM_H_ diff --git a/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_arm64_check.h b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_arm64_check.h new file mode 100644 index 0000000000..c5a5e46fd5 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_arm64_check.h @@ -0,0 +1,66 @@ +// Copyright 2017 The CRC32C Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. See the AUTHORS file for names of contributors. + +// ARM-specific code checking for the availability of CRC32C instructions. + +#ifndef CRC32C_CRC32C_ARM_CHECK_H_ +#define CRC32C_CRC32C_ARM_CHECK_H_ + +#include +#include + +#include "crc32c/crc32c_config.h" + +#if HAVE_ARM64_CRC32C + +#ifdef __linux__ +#if HAVE_STRONG_GETAUXVAL +#include +#elif HAVE_WEAK_GETAUXVAL +// getauxval() is not available on Android until API level 20. Link it as a weak +// symbol. +extern "C" unsigned long getauxval(unsigned long type) __attribute__((weak)); + +#define AT_HWCAP 16 +#endif // HAVE_STRONG_GETAUXVAL || HAVE_WEAK_GETAUXVAL +#endif // defined (__linux__) + +#ifdef __APPLE__ +#include +#include +#endif // defined (__APPLE__) + +namespace crc32c { + +inline bool CanUseArm64Crc32() { +#if defined (__linux__) && (HAVE_STRONG_GETAUXVAL || HAVE_WEAK_GETAUXVAL) + // From 'arch/arm64/include/uapi/asm/hwcap.h' in Linux kernel source code. + constexpr unsigned long kHWCAP_PMULL = 1 << 4; + constexpr unsigned long kHWCAP_CRC32 = 1 << 7; + unsigned long hwcap = +#if HAVE_STRONG_GETAUXVAL + // Some compilers warn on (&getauxval != nullptr) in the block below. + getauxval(AT_HWCAP); +#elif HAVE_WEAK_GETAUXVAL + (&getauxval != nullptr) ? getauxval(AT_HWCAP) : 0; +#else +#error This is supposed to be nested inside a check for HAVE_*_GETAUXVAL. +#endif // HAVE_STRONG_GETAUXVAL + return (hwcap & (kHWCAP_PMULL | kHWCAP_CRC32)) == + (kHWCAP_PMULL | kHWCAP_CRC32); +#elif defined(__APPLE__) + int val = 0; + size_t len = sizeof(val); + return sysctlbyname("hw.optional.armv8_crc32", &val, &len, nullptr, 0) == 0 + && val != 0; +#else + return false; +#endif // HAVE_STRONG_GETAUXVAL || HAVE_WEAK_GETAUXVAL +} + +} // namespace crc32c + +#endif // HAVE_ARM64_CRC32C + +#endif // CRC32C_CRC32C_ARM_CHECK_H_ diff --git a/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_arm64_unittest.cc b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_arm64_unittest.cc new file mode 100644 index 0000000000..6f917d9c0c --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_arm64_unittest.cc @@ -0,0 +1,24 @@ +// Copyright 2017 The CRC32C Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. See the AUTHORS file for names of contributors. + +#include "gtest/gtest.h" + +#include "./crc32c_arm64.h" +#include "./crc32c_extend_unittests.h" + +namespace crc32c { + +#if HAVE_ARM64_CRC32C + +struct Arm64TestTraits { + static uint32_t Extend(uint32_t crc, const uint8_t* data, size_t count) { + return ExtendArm64(crc, data, count); + } +}; + +INSTANTIATE_TYPED_TEST_SUITE_P(Arm64, ExtendTest, Arm64TestTraits); + +#endif // HAVE_ARM64_CRC32C + +} // namespace crc32c diff --git a/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_benchmark.cc b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_benchmark.cc new file mode 100644 index 0000000000..68510a2457 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_benchmark.cc @@ -0,0 +1,104 @@ +// Copyright 2017 The CRC32C Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. See the AUTHORS file for names of contributors. + +#include +#include + +#include "crc32c/crc32c_config.h" + +#include "benchmark/benchmark.h" + +#if CRC32C_TESTS_BUILT_WITH_GLOG +#include "glog/logging.h" +#endif // CRC32C_TESTS_BUILT_WITH_GLOG + +#include "./crc32c_arm64.h" +#include "./crc32c_arm64_check.h" +#include "./crc32c_internal.h" +#include "./crc32c_sse42.h" +#include "./crc32c_sse42_check.h" +#include "crc32c/crc32c.h" + +class CRC32CBenchmark : public benchmark::Fixture { + public: + void SetUp(const benchmark::State& state) override { + block_size_ = static_cast(state.range(0)); + block_data_ = std::string(block_size_, 'x'); + block_buffer_ = reinterpret_cast(block_data_.data()); + } + + protected: + std::string block_data_; + const uint8_t* block_buffer_; + size_t block_size_; +}; + +BENCHMARK_DEFINE_F(CRC32CBenchmark, Public)(benchmark::State& state) { + uint32_t crc = 0; + for (auto _ : state) + crc = crc32c::Extend(crc, block_buffer_, block_size_); + state.SetBytesProcessed(state.iterations() * block_size_); +} +BENCHMARK_REGISTER_F(CRC32CBenchmark, Public) + ->RangeMultiplier(16) + ->Range(256, 16777216); // Block size. + +BENCHMARK_DEFINE_F(CRC32CBenchmark, Portable)(benchmark::State& state) { + uint32_t crc = 0; + for (auto _ : state) + crc = crc32c::ExtendPortable(crc, block_buffer_, block_size_); + state.SetBytesProcessed(state.iterations() * block_size_); +} +BENCHMARK_REGISTER_F(CRC32CBenchmark, Portable) + ->RangeMultiplier(16) + ->Range(256, 16777216); // Block size. + +#if HAVE_ARM64_CRC32C + +BENCHMARK_DEFINE_F(CRC32CBenchmark, ArmCRC32C)(benchmark::State& state) { + if (!crc32c::CanUseArm64Crc32()) { + state.SkipWithError("ARM CRC32C instructions not available or not enabled"); + return; + } + + uint32_t crc = 0; + for (auto _ : state) + crc = crc32c::ExtendArm64(crc, block_buffer_, block_size_); + state.SetBytesProcessed(state.iterations() * block_size_); +} +BENCHMARK_REGISTER_F(CRC32CBenchmark, ArmCRC32C) + ->RangeMultiplier(16) + ->Range(256, 16777216); // Block size. + +#endif // HAVE_ARM64_CRC32C + +#if HAVE_SSE42 && (defined(_M_X64) || defined(__x86_64__)) + +BENCHMARK_DEFINE_F(CRC32CBenchmark, Sse42)(benchmark::State& state) { + if (!crc32c::CanUseSse42()) { + state.SkipWithError("SSE4.2 instructions not available or not enabled"); + return; + } + + uint32_t crc = 0; + for (auto _ : state) + crc = crc32c::ExtendSse42(crc, block_buffer_, block_size_); + state.SetBytesProcessed(state.iterations() * block_size_); +} +BENCHMARK_REGISTER_F(CRC32CBenchmark, Sse42) + ->RangeMultiplier(16) + ->Range(256, 16777216); // Block size. + +#endif // HAVE_SSE42 && (defined(_M_X64) || defined(__x86_64__)) + +int main(int argc, char** argv) { +#if CRC32C_TESTS_BUILT_WITH_GLOG + google::InitGoogleLogging(argv[0]); + google::InstallFailureSignalHandler(); +#endif // CRC32C_TESTS_BUILT_WITH_GLOG + + benchmark::Initialize(&argc, argv); + benchmark::RunSpecifiedBenchmarks(); + return 0; +} diff --git a/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_capi_unittest.c b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_capi_unittest.c new file mode 100644 index 0000000000..c8993a0959 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_capi_unittest.c @@ -0,0 +1,66 @@ +// Copyright 2017 The CRC32C Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. See the AUTHORS file for names of contributors. + +#include "crc32c/crc32c.h" + +#include +#include +#include +#include +#include + +int main() { + /* From rfc3720 section B.4. */ + uint8_t buf[32]; + + memset(buf, 0, sizeof(buf)); + if ((uint32_t)0x8a9136aa != crc32c_value(buf, sizeof(buf))) { + printf("crc32c_value(zeros) test failed\n"); + return 1; + } + + memset(buf, 0xff, sizeof(buf)); + if ((uint32_t)0x62a8ab43 != crc32c_value(buf, sizeof(buf))) { + printf("crc32c_value(0xff) test failed\n"); + return 1; + } + + for (size_t i = 0; i < 32; ++i) + buf[i] = (uint8_t)i; + if ((uint32_t)0x46dd794e != crc32c_value(buf, sizeof(buf))) { + printf("crc32c_value(0..31) test failed\n"); + return 1; + } + + for (size_t i = 0; i < 32; ++i) + buf[i] = (uint8_t)(31 - i); + if ((uint32_t)0x113fdb5c != crc32c_value(buf, sizeof(buf))) { + printf("crc32c_value(31..0) test failed\n"); + return 1; + } + + uint8_t data[48] = { + 0x01, 0xc0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, + 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x18, 0x28, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + }; + if ((uint32_t)0xd9963a56 != crc32c_value(data, sizeof(data))) { + printf("crc32c_value(31..0) test failed\n"); + return 1; + } + + const uint8_t* hello_space_world = (const uint8_t*)"hello world"; + const uint8_t* hello_space = (const uint8_t*)"hello "; + const uint8_t* world = (const uint8_t*)"world"; + + if (crc32c_value(hello_space_world, 11) != + crc32c_extend(crc32c_value(hello_space, 6), world, 5)) { + printf("crc32c_extend test failed\n"); + return 1; + } + + printf("All tests passed\n"); + return 0; +} diff --git a/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_extend_unittests.h b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_extend_unittests.h new file mode 100644 index 0000000000..0732973737 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_extend_unittests.h @@ -0,0 +1,112 @@ +// Copyright 2017 The CRC32C Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. See the AUTHORS file for names of contributors. + +#ifndef CRC32C_CRC32C_EXTEND_UNITTESTS_H_ +#define CRC32C_CRC32C_EXTEND_UNITTESTS_H_ + +#include +#include +#include + +#include "gtest/gtest.h" + +// Common test cases for all implementations of CRC32C_Extend(). + +namespace crc32c { + +template +class ExtendTest : public testing::Test {}; + +TYPED_TEST_SUITE_P(ExtendTest); + +TYPED_TEST_P(ExtendTest, StandardResults) { + // From rfc3720 section B.4. + uint8_t buf[32]; + + std::memset(buf, 0, sizeof(buf)); + EXPECT_EQ(static_cast(0x8a9136aa), + TypeParam::Extend(0, buf, sizeof(buf))); + + std::memset(buf, 0xff, sizeof(buf)); + EXPECT_EQ(static_cast(0x62a8ab43), + TypeParam::Extend(0, buf, sizeof(buf))); + + for (int i = 0; i < 32; ++i) + buf[i] = static_cast(i); + EXPECT_EQ(static_cast(0x46dd794e), + TypeParam::Extend(0, buf, sizeof(buf))); + + for (int i = 0; i < 32; ++i) + buf[i] = static_cast(31 - i); + EXPECT_EQ(static_cast(0x113fdb5c), + TypeParam::Extend(0, buf, sizeof(buf))); + + uint8_t data[48] = { + 0x01, 0xc0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, + 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x18, 0x28, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + }; + EXPECT_EQ(static_cast(0xd9963a56), + TypeParam::Extend(0, data, sizeof(data))); +} + +TYPED_TEST_P(ExtendTest, HelloWorld) { + const uint8_t* hello_space_world = + reinterpret_cast("hello world"); + const uint8_t* hello_space = reinterpret_cast("hello "); + const uint8_t* world = reinterpret_cast("world"); + + EXPECT_EQ(TypeParam::Extend(0, hello_space_world, 11), + TypeParam::Extend(TypeParam::Extend(0, hello_space, 6), world, 5)); +} + +TYPED_TEST_P(ExtendTest, BufferSlicing) { + uint8_t buffer[48] = { + 0x01, 0xc0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, + 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x18, 0x28, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + }; + + for (size_t i = 0; i < 48; ++i) { + for (size_t j = i + 1; j <= 48; ++j) { + uint32_t crc = 0; + + if (i > 0) crc = TypeParam::Extend(crc, buffer, i); + crc = TypeParam::Extend(crc, buffer + i, j - i); + if (j < 48) crc = TypeParam::Extend(crc, buffer + j, 48 - j); + + EXPECT_EQ(static_cast(0xd9963a56), crc); + } + } +} + +TYPED_TEST_P(ExtendTest, LargeBufferSlicing) { + uint8_t buffer[2048]; + for (size_t i = 0; i < 2048; i++) + buffer[i] = static_cast(3 * i * i + 7 * i + 11); + + for (size_t i = 0; i < 2048; ++i) { + for (size_t j = i + 1; j <= 2048; ++j) { + uint32_t crc = 0; + + if (i > 0) crc = TypeParam::Extend(crc, buffer, i); + crc = TypeParam::Extend(crc, buffer + i, j - i); + if (j < 2048) crc = TypeParam::Extend(crc, buffer + j, 2048 - j); + + EXPECT_EQ(static_cast(0x36dcc753), crc); + } + } +} + +REGISTER_TYPED_TEST_SUITE_P(ExtendTest, + StandardResults, + HelloWorld, + BufferSlicing, + LargeBufferSlicing); + +} // namespace crc32c + +#endif // CRC32C_CRC32C_EXTEND_UNITTESTS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_internal.h b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_internal.h new file mode 100644 index 0000000000..2bd23dea43 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_internal.h @@ -0,0 +1,23 @@ +// Copyright 2017 The CRC32C Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. See the AUTHORS file for names of contributors. + +#ifndef CRC32C_CRC32C_INTERNAL_H_ +#define CRC32C_CRC32C_INTERNAL_H_ + +// Internal functions that may change between releases. + +#include +#include + +namespace crc32c { + +// Un-accelerated implementation that works on all CPUs. +uint32_t ExtendPortable(uint32_t crc, const uint8_t* data, size_t count); + +// CRCs are pre- and post- conditioned by xoring with all ones. +static constexpr const uint32_t kCRC32Xor = static_cast(0xffffffffU); + +} // namespace crc32c + +#endif // CRC32C_CRC32C_INTERNAL_H_ diff --git a/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_portable.cc b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_portable.cc new file mode 100644 index 0000000000..31ec6eac53 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_portable.cc @@ -0,0 +1,351 @@ +// Copyright 2008 The CRC32C Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. See the AUTHORS file for names of contributors. + +#include "./crc32c_internal.h" + +#include +#include + +#include "./crc32c_prefetch.h" +#include "./crc32c_read_le.h" +#include "./crc32c_round_up.h" + +namespace { + +const uint32_t kByteExtensionTable[256] = { + 0x00000000, 0xf26b8303, 0xe13b70f7, 0x1350f3f4, 0xc79a971f, 0x35f1141c, + 0x26a1e7e8, 0xd4ca64eb, 0x8ad958cf, 0x78b2dbcc, 0x6be22838, 0x9989ab3b, + 0x4d43cfd0, 0xbf284cd3, 0xac78bf27, 0x5e133c24, 0x105ec76f, 0xe235446c, + 0xf165b798, 0x030e349b, 0xd7c45070, 0x25afd373, 0x36ff2087, 0xc494a384, + 0x9a879fa0, 0x68ec1ca3, 0x7bbcef57, 0x89d76c54, 0x5d1d08bf, 0xaf768bbc, + 0xbc267848, 0x4e4dfb4b, 0x20bd8ede, 0xd2d60ddd, 0xc186fe29, 0x33ed7d2a, + 0xe72719c1, 0x154c9ac2, 0x061c6936, 0xf477ea35, 0xaa64d611, 0x580f5512, + 0x4b5fa6e6, 0xb93425e5, 0x6dfe410e, 0x9f95c20d, 0x8cc531f9, 0x7eaeb2fa, + 0x30e349b1, 0xc288cab2, 0xd1d83946, 0x23b3ba45, 0xf779deae, 0x05125dad, + 0x1642ae59, 0xe4292d5a, 0xba3a117e, 0x4851927d, 0x5b016189, 0xa96ae28a, + 0x7da08661, 0x8fcb0562, 0x9c9bf696, 0x6ef07595, 0x417b1dbc, 0xb3109ebf, + 0xa0406d4b, 0x522bee48, 0x86e18aa3, 0x748a09a0, 0x67dafa54, 0x95b17957, + 0xcba24573, 0x39c9c670, 0x2a993584, 0xd8f2b687, 0x0c38d26c, 0xfe53516f, + 0xed03a29b, 0x1f682198, 0x5125dad3, 0xa34e59d0, 0xb01eaa24, 0x42752927, + 0x96bf4dcc, 0x64d4cecf, 0x77843d3b, 0x85efbe38, 0xdbfc821c, 0x2997011f, + 0x3ac7f2eb, 0xc8ac71e8, 0x1c661503, 0xee0d9600, 0xfd5d65f4, 0x0f36e6f7, + 0x61c69362, 0x93ad1061, 0x80fde395, 0x72966096, 0xa65c047d, 0x5437877e, + 0x4767748a, 0xb50cf789, 0xeb1fcbad, 0x197448ae, 0x0a24bb5a, 0xf84f3859, + 0x2c855cb2, 0xdeeedfb1, 0xcdbe2c45, 0x3fd5af46, 0x7198540d, 0x83f3d70e, + 0x90a324fa, 0x62c8a7f9, 0xb602c312, 0x44694011, 0x5739b3e5, 0xa55230e6, + 0xfb410cc2, 0x092a8fc1, 0x1a7a7c35, 0xe811ff36, 0x3cdb9bdd, 0xceb018de, + 0xdde0eb2a, 0x2f8b6829, 0x82f63b78, 0x709db87b, 0x63cd4b8f, 0x91a6c88c, + 0x456cac67, 0xb7072f64, 0xa457dc90, 0x563c5f93, 0x082f63b7, 0xfa44e0b4, + 0xe9141340, 0x1b7f9043, 0xcfb5f4a8, 0x3dde77ab, 0x2e8e845f, 0xdce5075c, + 0x92a8fc17, 0x60c37f14, 0x73938ce0, 0x81f80fe3, 0x55326b08, 0xa759e80b, + 0xb4091bff, 0x466298fc, 0x1871a4d8, 0xea1a27db, 0xf94ad42f, 0x0b21572c, + 0xdfeb33c7, 0x2d80b0c4, 0x3ed04330, 0xccbbc033, 0xa24bb5a6, 0x502036a5, + 0x4370c551, 0xb11b4652, 0x65d122b9, 0x97baa1ba, 0x84ea524e, 0x7681d14d, + 0x2892ed69, 0xdaf96e6a, 0xc9a99d9e, 0x3bc21e9d, 0xef087a76, 0x1d63f975, + 0x0e330a81, 0xfc588982, 0xb21572c9, 0x407ef1ca, 0x532e023e, 0xa145813d, + 0x758fe5d6, 0x87e466d5, 0x94b49521, 0x66df1622, 0x38cc2a06, 0xcaa7a905, + 0xd9f75af1, 0x2b9cd9f2, 0xff56bd19, 0x0d3d3e1a, 0x1e6dcdee, 0xec064eed, + 0xc38d26c4, 0x31e6a5c7, 0x22b65633, 0xd0ddd530, 0x0417b1db, 0xf67c32d8, + 0xe52cc12c, 0x1747422f, 0x49547e0b, 0xbb3ffd08, 0xa86f0efc, 0x5a048dff, + 0x8ecee914, 0x7ca56a17, 0x6ff599e3, 0x9d9e1ae0, 0xd3d3e1ab, 0x21b862a8, + 0x32e8915c, 0xc083125f, 0x144976b4, 0xe622f5b7, 0xf5720643, 0x07198540, + 0x590ab964, 0xab613a67, 0xb831c993, 0x4a5a4a90, 0x9e902e7b, 0x6cfbad78, + 0x7fab5e8c, 0x8dc0dd8f, 0xe330a81a, 0x115b2b19, 0x020bd8ed, 0xf0605bee, + 0x24aa3f05, 0xd6c1bc06, 0xc5914ff2, 0x37faccf1, 0x69e9f0d5, 0x9b8273d6, + 0x88d28022, 0x7ab90321, 0xae7367ca, 0x5c18e4c9, 0x4f48173d, 0xbd23943e, + 0xf36e6f75, 0x0105ec76, 0x12551f82, 0xe03e9c81, 0x34f4f86a, 0xc69f7b69, + 0xd5cf889d, 0x27a40b9e, 0x79b737ba, 0x8bdcb4b9, 0x988c474d, 0x6ae7c44e, + 0xbe2da0a5, 0x4c4623a6, 0x5f16d052, 0xad7d5351}; + +const uint32_t kStrideExtensionTable0[256] = { + 0x00000000, 0x30d23865, 0x61a470ca, 0x517648af, 0xc348e194, 0xf39ad9f1, + 0xa2ec915e, 0x923ea93b, 0x837db5d9, 0xb3af8dbc, 0xe2d9c513, 0xd20bfd76, + 0x4035544d, 0x70e76c28, 0x21912487, 0x11431ce2, 0x03171d43, 0x33c52526, + 0x62b36d89, 0x526155ec, 0xc05ffcd7, 0xf08dc4b2, 0xa1fb8c1d, 0x9129b478, + 0x806aa89a, 0xb0b890ff, 0xe1ced850, 0xd11ce035, 0x4322490e, 0x73f0716b, + 0x228639c4, 0x125401a1, 0x062e3a86, 0x36fc02e3, 0x678a4a4c, 0x57587229, + 0xc566db12, 0xf5b4e377, 0xa4c2abd8, 0x941093bd, 0x85538f5f, 0xb581b73a, + 0xe4f7ff95, 0xd425c7f0, 0x461b6ecb, 0x76c956ae, 0x27bf1e01, 0x176d2664, + 0x053927c5, 0x35eb1fa0, 0x649d570f, 0x544f6f6a, 0xc671c651, 0xf6a3fe34, + 0xa7d5b69b, 0x97078efe, 0x8644921c, 0xb696aa79, 0xe7e0e2d6, 0xd732dab3, + 0x450c7388, 0x75de4bed, 0x24a80342, 0x147a3b27, 0x0c5c750c, 0x3c8e4d69, + 0x6df805c6, 0x5d2a3da3, 0xcf149498, 0xffc6acfd, 0xaeb0e452, 0x9e62dc37, + 0x8f21c0d5, 0xbff3f8b0, 0xee85b01f, 0xde57887a, 0x4c692141, 0x7cbb1924, + 0x2dcd518b, 0x1d1f69ee, 0x0f4b684f, 0x3f99502a, 0x6eef1885, 0x5e3d20e0, + 0xcc0389db, 0xfcd1b1be, 0xada7f911, 0x9d75c174, 0x8c36dd96, 0xbce4e5f3, + 0xed92ad5c, 0xdd409539, 0x4f7e3c02, 0x7fac0467, 0x2eda4cc8, 0x1e0874ad, + 0x0a724f8a, 0x3aa077ef, 0x6bd63f40, 0x5b040725, 0xc93aae1e, 0xf9e8967b, + 0xa89eded4, 0x984ce6b1, 0x890ffa53, 0xb9ddc236, 0xe8ab8a99, 0xd879b2fc, + 0x4a471bc7, 0x7a9523a2, 0x2be36b0d, 0x1b315368, 0x096552c9, 0x39b76aac, + 0x68c12203, 0x58131a66, 0xca2db35d, 0xfaff8b38, 0xab89c397, 0x9b5bfbf2, + 0x8a18e710, 0xbacadf75, 0xebbc97da, 0xdb6eafbf, 0x49500684, 0x79823ee1, + 0x28f4764e, 0x18264e2b, 0x18b8ea18, 0x286ad27d, 0x791c9ad2, 0x49cea2b7, + 0xdbf00b8c, 0xeb2233e9, 0xba547b46, 0x8a864323, 0x9bc55fc1, 0xab1767a4, + 0xfa612f0b, 0xcab3176e, 0x588dbe55, 0x685f8630, 0x3929ce9f, 0x09fbf6fa, + 0x1baff75b, 0x2b7dcf3e, 0x7a0b8791, 0x4ad9bff4, 0xd8e716cf, 0xe8352eaa, + 0xb9436605, 0x89915e60, 0x98d24282, 0xa8007ae7, 0xf9763248, 0xc9a40a2d, + 0x5b9aa316, 0x6b489b73, 0x3a3ed3dc, 0x0aecebb9, 0x1e96d09e, 0x2e44e8fb, + 0x7f32a054, 0x4fe09831, 0xddde310a, 0xed0c096f, 0xbc7a41c0, 0x8ca879a5, + 0x9deb6547, 0xad395d22, 0xfc4f158d, 0xcc9d2de8, 0x5ea384d3, 0x6e71bcb6, + 0x3f07f419, 0x0fd5cc7c, 0x1d81cddd, 0x2d53f5b8, 0x7c25bd17, 0x4cf78572, + 0xdec92c49, 0xee1b142c, 0xbf6d5c83, 0x8fbf64e6, 0x9efc7804, 0xae2e4061, + 0xff5808ce, 0xcf8a30ab, 0x5db49990, 0x6d66a1f5, 0x3c10e95a, 0x0cc2d13f, + 0x14e49f14, 0x2436a771, 0x7540efde, 0x4592d7bb, 0xd7ac7e80, 0xe77e46e5, + 0xb6080e4a, 0x86da362f, 0x97992acd, 0xa74b12a8, 0xf63d5a07, 0xc6ef6262, + 0x54d1cb59, 0x6403f33c, 0x3575bb93, 0x05a783f6, 0x17f38257, 0x2721ba32, + 0x7657f29d, 0x4685caf8, 0xd4bb63c3, 0xe4695ba6, 0xb51f1309, 0x85cd2b6c, + 0x948e378e, 0xa45c0feb, 0xf52a4744, 0xc5f87f21, 0x57c6d61a, 0x6714ee7f, + 0x3662a6d0, 0x06b09eb5, 0x12caa592, 0x22189df7, 0x736ed558, 0x43bced3d, + 0xd1824406, 0xe1507c63, 0xb02634cc, 0x80f40ca9, 0x91b7104b, 0xa165282e, + 0xf0136081, 0xc0c158e4, 0x52fff1df, 0x622dc9ba, 0x335b8115, 0x0389b970, + 0x11ddb8d1, 0x210f80b4, 0x7079c81b, 0x40abf07e, 0xd2955945, 0xe2476120, + 0xb331298f, 0x83e311ea, 0x92a00d08, 0xa272356d, 0xf3047dc2, 0xc3d645a7, + 0x51e8ec9c, 0x613ad4f9, 0x304c9c56, 0x009ea433}; + +const uint32_t kStrideExtensionTable1[256] = { + 0x00000000, 0x54075546, 0xa80eaa8c, 0xfc09ffca, 0x55f123e9, 0x01f676af, + 0xfdff8965, 0xa9f8dc23, 0xabe247d2, 0xffe51294, 0x03eced5e, 0x57ebb818, + 0xfe13643b, 0xaa14317d, 0x561dceb7, 0x021a9bf1, 0x5228f955, 0x062fac13, + 0xfa2653d9, 0xae21069f, 0x07d9dabc, 0x53de8ffa, 0xafd77030, 0xfbd02576, + 0xf9cabe87, 0xadcdebc1, 0x51c4140b, 0x05c3414d, 0xac3b9d6e, 0xf83cc828, + 0x043537e2, 0x503262a4, 0xa451f2aa, 0xf056a7ec, 0x0c5f5826, 0x58580d60, + 0xf1a0d143, 0xa5a78405, 0x59ae7bcf, 0x0da92e89, 0x0fb3b578, 0x5bb4e03e, + 0xa7bd1ff4, 0xf3ba4ab2, 0x5a429691, 0x0e45c3d7, 0xf24c3c1d, 0xa64b695b, + 0xf6790bff, 0xa27e5eb9, 0x5e77a173, 0x0a70f435, 0xa3882816, 0xf78f7d50, + 0x0b86829a, 0x5f81d7dc, 0x5d9b4c2d, 0x099c196b, 0xf595e6a1, 0xa192b3e7, + 0x086a6fc4, 0x5c6d3a82, 0xa064c548, 0xf463900e, 0x4d4f93a5, 0x1948c6e3, + 0xe5413929, 0xb1466c6f, 0x18beb04c, 0x4cb9e50a, 0xb0b01ac0, 0xe4b74f86, + 0xe6add477, 0xb2aa8131, 0x4ea37efb, 0x1aa42bbd, 0xb35cf79e, 0xe75ba2d8, + 0x1b525d12, 0x4f550854, 0x1f676af0, 0x4b603fb6, 0xb769c07c, 0xe36e953a, + 0x4a964919, 0x1e911c5f, 0xe298e395, 0xb69fb6d3, 0xb4852d22, 0xe0827864, + 0x1c8b87ae, 0x488cd2e8, 0xe1740ecb, 0xb5735b8d, 0x497aa447, 0x1d7df101, + 0xe91e610f, 0xbd193449, 0x4110cb83, 0x15179ec5, 0xbcef42e6, 0xe8e817a0, + 0x14e1e86a, 0x40e6bd2c, 0x42fc26dd, 0x16fb739b, 0xeaf28c51, 0xbef5d917, + 0x170d0534, 0x430a5072, 0xbf03afb8, 0xeb04fafe, 0xbb36985a, 0xef31cd1c, + 0x133832d6, 0x473f6790, 0xeec7bbb3, 0xbac0eef5, 0x46c9113f, 0x12ce4479, + 0x10d4df88, 0x44d38ace, 0xb8da7504, 0xecdd2042, 0x4525fc61, 0x1122a927, + 0xed2b56ed, 0xb92c03ab, 0x9a9f274a, 0xce98720c, 0x32918dc6, 0x6696d880, + 0xcf6e04a3, 0x9b6951e5, 0x6760ae2f, 0x3367fb69, 0x317d6098, 0x657a35de, + 0x9973ca14, 0xcd749f52, 0x648c4371, 0x308b1637, 0xcc82e9fd, 0x9885bcbb, + 0xc8b7de1f, 0x9cb08b59, 0x60b97493, 0x34be21d5, 0x9d46fdf6, 0xc941a8b0, + 0x3548577a, 0x614f023c, 0x635599cd, 0x3752cc8b, 0xcb5b3341, 0x9f5c6607, + 0x36a4ba24, 0x62a3ef62, 0x9eaa10a8, 0xcaad45ee, 0x3eced5e0, 0x6ac980a6, + 0x96c07f6c, 0xc2c72a2a, 0x6b3ff609, 0x3f38a34f, 0xc3315c85, 0x973609c3, + 0x952c9232, 0xc12bc774, 0x3d2238be, 0x69256df8, 0xc0ddb1db, 0x94dae49d, + 0x68d31b57, 0x3cd44e11, 0x6ce62cb5, 0x38e179f3, 0xc4e88639, 0x90efd37f, + 0x39170f5c, 0x6d105a1a, 0x9119a5d0, 0xc51ef096, 0xc7046b67, 0x93033e21, + 0x6f0ac1eb, 0x3b0d94ad, 0x92f5488e, 0xc6f21dc8, 0x3afbe202, 0x6efcb744, + 0xd7d0b4ef, 0x83d7e1a9, 0x7fde1e63, 0x2bd94b25, 0x82219706, 0xd626c240, + 0x2a2f3d8a, 0x7e2868cc, 0x7c32f33d, 0x2835a67b, 0xd43c59b1, 0x803b0cf7, + 0x29c3d0d4, 0x7dc48592, 0x81cd7a58, 0xd5ca2f1e, 0x85f84dba, 0xd1ff18fc, + 0x2df6e736, 0x79f1b270, 0xd0096e53, 0x840e3b15, 0x7807c4df, 0x2c009199, + 0x2e1a0a68, 0x7a1d5f2e, 0x8614a0e4, 0xd213f5a2, 0x7beb2981, 0x2fec7cc7, + 0xd3e5830d, 0x87e2d64b, 0x73814645, 0x27861303, 0xdb8fecc9, 0x8f88b98f, + 0x267065ac, 0x727730ea, 0x8e7ecf20, 0xda799a66, 0xd8630197, 0x8c6454d1, + 0x706dab1b, 0x246afe5d, 0x8d92227e, 0xd9957738, 0x259c88f2, 0x719bddb4, + 0x21a9bf10, 0x75aeea56, 0x89a7159c, 0xdda040da, 0x74589cf9, 0x205fc9bf, + 0xdc563675, 0x88516333, 0x8a4bf8c2, 0xde4cad84, 0x2245524e, 0x76420708, + 0xdfbadb2b, 0x8bbd8e6d, 0x77b471a7, 0x23b324e1}; + +const uint32_t kStrideExtensionTable2[256] = { + 0x00000000, 0x678efd01, 0xcf1dfa02, 0xa8930703, 0x9bd782f5, 0xfc597ff4, + 0x54ca78f7, 0x334485f6, 0x3243731b, 0x55cd8e1a, 0xfd5e8919, 0x9ad07418, + 0xa994f1ee, 0xce1a0cef, 0x66890bec, 0x0107f6ed, 0x6486e636, 0x03081b37, + 0xab9b1c34, 0xcc15e135, 0xff5164c3, 0x98df99c2, 0x304c9ec1, 0x57c263c0, + 0x56c5952d, 0x314b682c, 0x99d86f2f, 0xfe56922e, 0xcd1217d8, 0xaa9cead9, + 0x020fedda, 0x658110db, 0xc90dcc6c, 0xae83316d, 0x0610366e, 0x619ecb6f, + 0x52da4e99, 0x3554b398, 0x9dc7b49b, 0xfa49499a, 0xfb4ebf77, 0x9cc04276, + 0x34534575, 0x53ddb874, 0x60993d82, 0x0717c083, 0xaf84c780, 0xc80a3a81, + 0xad8b2a5a, 0xca05d75b, 0x6296d058, 0x05182d59, 0x365ca8af, 0x51d255ae, + 0xf94152ad, 0x9ecfafac, 0x9fc85941, 0xf846a440, 0x50d5a343, 0x375b5e42, + 0x041fdbb4, 0x639126b5, 0xcb0221b6, 0xac8cdcb7, 0x97f7ee29, 0xf0791328, + 0x58ea142b, 0x3f64e92a, 0x0c206cdc, 0x6bae91dd, 0xc33d96de, 0xa4b36bdf, + 0xa5b49d32, 0xc23a6033, 0x6aa96730, 0x0d279a31, 0x3e631fc7, 0x59ede2c6, + 0xf17ee5c5, 0x96f018c4, 0xf371081f, 0x94fff51e, 0x3c6cf21d, 0x5be20f1c, + 0x68a68aea, 0x0f2877eb, 0xa7bb70e8, 0xc0358de9, 0xc1327b04, 0xa6bc8605, + 0x0e2f8106, 0x69a17c07, 0x5ae5f9f1, 0x3d6b04f0, 0x95f803f3, 0xf276fef2, + 0x5efa2245, 0x3974df44, 0x91e7d847, 0xf6692546, 0xc52da0b0, 0xa2a35db1, + 0x0a305ab2, 0x6dbea7b3, 0x6cb9515e, 0x0b37ac5f, 0xa3a4ab5c, 0xc42a565d, + 0xf76ed3ab, 0x90e02eaa, 0x387329a9, 0x5ffdd4a8, 0x3a7cc473, 0x5df23972, + 0xf5613e71, 0x92efc370, 0xa1ab4686, 0xc625bb87, 0x6eb6bc84, 0x09384185, + 0x083fb768, 0x6fb14a69, 0xc7224d6a, 0xa0acb06b, 0x93e8359d, 0xf466c89c, + 0x5cf5cf9f, 0x3b7b329e, 0x2a03aaa3, 0x4d8d57a2, 0xe51e50a1, 0x8290ada0, + 0xb1d42856, 0xd65ad557, 0x7ec9d254, 0x19472f55, 0x1840d9b8, 0x7fce24b9, + 0xd75d23ba, 0xb0d3debb, 0x83975b4d, 0xe419a64c, 0x4c8aa14f, 0x2b045c4e, + 0x4e854c95, 0x290bb194, 0x8198b697, 0xe6164b96, 0xd552ce60, 0xb2dc3361, + 0x1a4f3462, 0x7dc1c963, 0x7cc63f8e, 0x1b48c28f, 0xb3dbc58c, 0xd455388d, + 0xe711bd7b, 0x809f407a, 0x280c4779, 0x4f82ba78, 0xe30e66cf, 0x84809bce, + 0x2c139ccd, 0x4b9d61cc, 0x78d9e43a, 0x1f57193b, 0xb7c41e38, 0xd04ae339, + 0xd14d15d4, 0xb6c3e8d5, 0x1e50efd6, 0x79de12d7, 0x4a9a9721, 0x2d146a20, + 0x85876d23, 0xe2099022, 0x878880f9, 0xe0067df8, 0x48957afb, 0x2f1b87fa, + 0x1c5f020c, 0x7bd1ff0d, 0xd342f80e, 0xb4cc050f, 0xb5cbf3e2, 0xd2450ee3, + 0x7ad609e0, 0x1d58f4e1, 0x2e1c7117, 0x49928c16, 0xe1018b15, 0x868f7614, + 0xbdf4448a, 0xda7ab98b, 0x72e9be88, 0x15674389, 0x2623c67f, 0x41ad3b7e, + 0xe93e3c7d, 0x8eb0c17c, 0x8fb73791, 0xe839ca90, 0x40aacd93, 0x27243092, + 0x1460b564, 0x73ee4865, 0xdb7d4f66, 0xbcf3b267, 0xd972a2bc, 0xbefc5fbd, + 0x166f58be, 0x71e1a5bf, 0x42a52049, 0x252bdd48, 0x8db8da4b, 0xea36274a, + 0xeb31d1a7, 0x8cbf2ca6, 0x242c2ba5, 0x43a2d6a4, 0x70e65352, 0x1768ae53, + 0xbffba950, 0xd8755451, 0x74f988e6, 0x137775e7, 0xbbe472e4, 0xdc6a8fe5, + 0xef2e0a13, 0x88a0f712, 0x2033f011, 0x47bd0d10, 0x46bafbfd, 0x213406fc, + 0x89a701ff, 0xee29fcfe, 0xdd6d7908, 0xbae38409, 0x1270830a, 0x75fe7e0b, + 0x107f6ed0, 0x77f193d1, 0xdf6294d2, 0xb8ec69d3, 0x8ba8ec25, 0xec261124, + 0x44b51627, 0x233beb26, 0x223c1dcb, 0x45b2e0ca, 0xed21e7c9, 0x8aaf1ac8, + 0xb9eb9f3e, 0xde65623f, 0x76f6653c, 0x1178983d}; + +const uint32_t kStrideExtensionTable3[256] = { + 0x00000000, 0xf20c0dfe, 0xe1f46d0d, 0x13f860f3, 0xc604aceb, 0x3408a115, + 0x27f0c1e6, 0xd5fccc18, 0x89e52f27, 0x7be922d9, 0x6811422a, 0x9a1d4fd4, + 0x4fe183cc, 0xbded8e32, 0xae15eec1, 0x5c19e33f, 0x162628bf, 0xe42a2541, + 0xf7d245b2, 0x05de484c, 0xd0228454, 0x222e89aa, 0x31d6e959, 0xc3dae4a7, + 0x9fc30798, 0x6dcf0a66, 0x7e376a95, 0x8c3b676b, 0x59c7ab73, 0xabcba68d, + 0xb833c67e, 0x4a3fcb80, 0x2c4c517e, 0xde405c80, 0xcdb83c73, 0x3fb4318d, + 0xea48fd95, 0x1844f06b, 0x0bbc9098, 0xf9b09d66, 0xa5a97e59, 0x57a573a7, + 0x445d1354, 0xb6511eaa, 0x63add2b2, 0x91a1df4c, 0x8259bfbf, 0x7055b241, + 0x3a6a79c1, 0xc866743f, 0xdb9e14cc, 0x29921932, 0xfc6ed52a, 0x0e62d8d4, + 0x1d9ab827, 0xef96b5d9, 0xb38f56e6, 0x41835b18, 0x527b3beb, 0xa0773615, + 0x758bfa0d, 0x8787f7f3, 0x947f9700, 0x66739afe, 0x5898a2fc, 0xaa94af02, + 0xb96ccff1, 0x4b60c20f, 0x9e9c0e17, 0x6c9003e9, 0x7f68631a, 0x8d646ee4, + 0xd17d8ddb, 0x23718025, 0x3089e0d6, 0xc285ed28, 0x17792130, 0xe5752cce, + 0xf68d4c3d, 0x048141c3, 0x4ebe8a43, 0xbcb287bd, 0xaf4ae74e, 0x5d46eab0, + 0x88ba26a8, 0x7ab62b56, 0x694e4ba5, 0x9b42465b, 0xc75ba564, 0x3557a89a, + 0x26afc869, 0xd4a3c597, 0x015f098f, 0xf3530471, 0xe0ab6482, 0x12a7697c, + 0x74d4f382, 0x86d8fe7c, 0x95209e8f, 0x672c9371, 0xb2d05f69, 0x40dc5297, + 0x53243264, 0xa1283f9a, 0xfd31dca5, 0x0f3dd15b, 0x1cc5b1a8, 0xeec9bc56, + 0x3b35704e, 0xc9397db0, 0xdac11d43, 0x28cd10bd, 0x62f2db3d, 0x90fed6c3, + 0x8306b630, 0x710abbce, 0xa4f677d6, 0x56fa7a28, 0x45021adb, 0xb70e1725, + 0xeb17f41a, 0x191bf9e4, 0x0ae39917, 0xf8ef94e9, 0x2d1358f1, 0xdf1f550f, + 0xcce735fc, 0x3eeb3802, 0xb13145f8, 0x433d4806, 0x50c528f5, 0xa2c9250b, + 0x7735e913, 0x8539e4ed, 0x96c1841e, 0x64cd89e0, 0x38d46adf, 0xcad86721, + 0xd92007d2, 0x2b2c0a2c, 0xfed0c634, 0x0cdccbca, 0x1f24ab39, 0xed28a6c7, + 0xa7176d47, 0x551b60b9, 0x46e3004a, 0xb4ef0db4, 0x6113c1ac, 0x931fcc52, + 0x80e7aca1, 0x72eba15f, 0x2ef24260, 0xdcfe4f9e, 0xcf062f6d, 0x3d0a2293, + 0xe8f6ee8b, 0x1afae375, 0x09028386, 0xfb0e8e78, 0x9d7d1486, 0x6f711978, + 0x7c89798b, 0x8e857475, 0x5b79b86d, 0xa975b593, 0xba8dd560, 0x4881d89e, + 0x14983ba1, 0xe694365f, 0xf56c56ac, 0x07605b52, 0xd29c974a, 0x20909ab4, + 0x3368fa47, 0xc164f7b9, 0x8b5b3c39, 0x795731c7, 0x6aaf5134, 0x98a35cca, + 0x4d5f90d2, 0xbf539d2c, 0xacabfddf, 0x5ea7f021, 0x02be131e, 0xf0b21ee0, + 0xe34a7e13, 0x114673ed, 0xc4babff5, 0x36b6b20b, 0x254ed2f8, 0xd742df06, + 0xe9a9e704, 0x1ba5eafa, 0x085d8a09, 0xfa5187f7, 0x2fad4bef, 0xdda14611, + 0xce5926e2, 0x3c552b1c, 0x604cc823, 0x9240c5dd, 0x81b8a52e, 0x73b4a8d0, + 0xa64864c8, 0x54446936, 0x47bc09c5, 0xb5b0043b, 0xff8fcfbb, 0x0d83c245, + 0x1e7ba2b6, 0xec77af48, 0x398b6350, 0xcb876eae, 0xd87f0e5d, 0x2a7303a3, + 0x766ae09c, 0x8466ed62, 0x979e8d91, 0x6592806f, 0xb06e4c77, 0x42624189, + 0x519a217a, 0xa3962c84, 0xc5e5b67a, 0x37e9bb84, 0x2411db77, 0xd61dd689, + 0x03e11a91, 0xf1ed176f, 0xe215779c, 0x10197a62, 0x4c00995d, 0xbe0c94a3, + 0xadf4f450, 0x5ff8f9ae, 0x8a0435b6, 0x78083848, 0x6bf058bb, 0x99fc5545, + 0xd3c39ec5, 0x21cf933b, 0x3237f3c8, 0xc03bfe36, 0x15c7322e, 0xe7cb3fd0, + 0xf4335f23, 0x063f52dd, 0x5a26b1e2, 0xa82abc1c, 0xbbd2dcef, 0x49ded111, + 0x9c221d09, 0x6e2e10f7, 0x7dd67004, 0x8fda7dfa}; + +constexpr const ptrdiff_t kPrefetchHorizon = 256; + +} // namespace + +namespace crc32c { + +uint32_t ExtendPortable(uint32_t crc, const uint8_t* data, size_t size) { + const uint8_t* p = data; + const uint8_t* e = p + size; + uint32_t l = crc ^ kCRC32Xor; + +// Process one byte at a time. +#define STEP1 \ + do { \ + int c = (l & 0xff) ^ *p++; \ + l = kByteExtensionTable[c] ^ (l >> 8); \ + } while (0) + +// Process one of the 4 strides of 4-byte data. +#define STEP4(s) \ + do { \ + crc##s = ReadUint32LE(p + s * 4) ^ kStrideExtensionTable3[crc##s & 0xff] ^ \ + kStrideExtensionTable2[(crc##s >> 8) & 0xff] ^ \ + kStrideExtensionTable1[(crc##s >> 16) & 0xff] ^ \ + kStrideExtensionTable0[crc##s >> 24]; \ + } while (0) + +// Process a 16-byte swath of 4 strides, each of which has 4 bytes of data. +#define STEP16 \ + do { \ + STEP4(0); \ + STEP4(1); \ + STEP4(2); \ + STEP4(3); \ + p += 16; \ + } while (0) + +// Process 4 bytes that were already loaded into a word. +#define STEP4W(w) \ + do { \ + w ^= l; \ + for (size_t i = 0; i < 4; ++i) { \ + w = (w >> 8) ^ kByteExtensionTable[w & 0xff]; \ + } \ + l = w; \ + } while (0) + + // Point x at first 4-byte aligned byte in the buffer. This might be past the + // end of the buffer. + const uint8_t* x = RoundUp<4>(p); + if (x <= e) { + // Process bytes p is 4-byte aligned. + while (p != x) { + STEP1; + } + } + + if ((e - p) >= 16) { + // Load a 16-byte swath into the stride partial results. + uint32_t crc0 = ReadUint32LE(p + 0 * 4) ^ l; + uint32_t crc1 = ReadUint32LE(p + 1 * 4); + uint32_t crc2 = ReadUint32LE(p + 2 * 4); + uint32_t crc3 = ReadUint32LE(p + 3 * 4); + p += 16; + + while ((e - p) > kPrefetchHorizon) { + RequestPrefetch(p + kPrefetchHorizon); + + // Process 64 bytes at a time. + STEP16; + STEP16; + STEP16; + STEP16; + } + + // Process one 16-byte swath at a time. + while ((e - p) >= 16) { + STEP16; + } + + // Advance one word at a time as far as possible. + while ((e - p) >= 4) { + STEP4(0); + uint32_t tmp = crc0; + crc0 = crc1; + crc1 = crc2; + crc2 = crc3; + crc3 = tmp; + p += 4; + } + + // Combine the 4 partial stride results. + l = 0; + STEP4W(crc0); + STEP4W(crc1); + STEP4W(crc2); + STEP4W(crc3); + } + + // Process the last few bytes. + while (p != e) { + STEP1; + } +#undef STEP4W +#undef STEP16 +#undef STEP4 +#undef STEP1 + return l ^ kCRC32Xor; +} + +} // namespace crc32c diff --git a/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_portable_unittest.cc b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_portable_unittest.cc new file mode 100644 index 0000000000..5098e2c373 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_portable_unittest.cc @@ -0,0 +1,20 @@ +// Copyright 2017 The CRC32C Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. See the AUTHORS file for names of contributors. + +#include "gtest/gtest.h" + +#include "./crc32c_extend_unittests.h" +#include "./crc32c_internal.h" + +namespace crc32c { + +struct PortableTestTraits { + static uint32_t Extend(uint32_t crc, const uint8_t* data, size_t count) { + return ExtendPortable(crc, data, count); + } +}; + +INSTANTIATE_TYPED_TEST_SUITE_P(Portable, ExtendTest, PortableTestTraits); + +} // namespace crc32c diff --git a/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_prefetch.h b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_prefetch.h new file mode 100644 index 0000000000..e8df540494 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_prefetch.h @@ -0,0 +1,44 @@ +// Copyright 2017 The CRC32C Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. See the AUTHORS file for names of contributors. + +#ifndef CRC32C_CRC32C_PREFETCH_H_ +#define CRC32C_CRC32C_PREFETCH_H_ + +#include +#include + +#include "crc32c/crc32c_config.h" + +#if HAVE_MM_PREFETCH + +#if defined(_MSC_VER) +#include +#else // !defined(_MSC_VER) +#include +#endif // defined(_MSC_VER) + +#endif // HAVE_MM_PREFETCH + +namespace crc32c { + +// Ask the hardware to prefetch the data at the given address into the L1 cache. +inline void RequestPrefetch(const uint8_t* address) { +#if HAVE_BUILTIN_PREFETCH + // Clang and GCC implement the __builtin_prefetch non-standard extension, + // which maps to the best instruction on the target architecture. + __builtin_prefetch(reinterpret_cast(address), 0 /* Read only. */, + 0 /* No temporal locality. */); +#elif HAVE_MM_PREFETCH + // Visual Studio doesn't implement __builtin_prefetch, but exposes the + // PREFETCHNTA instruction via the _mm_prefetch intrinsic. + _mm_prefetch(reinterpret_cast(address), _MM_HINT_NTA); +#else + // No prefetch support. Silence compiler warnings. + (void)address; +#endif // HAVE_BUILTIN_PREFETCH +} + +} // namespace crc32c + +#endif // CRC32C_CRC32C_ROUND_UP_H_ diff --git a/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_prefetch_unittest.cc b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_prefetch_unittest.cc new file mode 100644 index 0000000000..b34ed2d5fe --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_prefetch_unittest.cc @@ -0,0 +1,9 @@ +// Copyright 2017 The CRC32C Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. See the AUTHORS file for names of contributors. + +#include "./crc32c_prefetch.h" + +// There is no easy way to test cache prefetching. We can only test that the +// crc32c_prefetch.h header compiles on its own, so it doesn't have any unstated +// dependencies. diff --git a/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_read_le.h b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_read_le.h new file mode 100644 index 0000000000..1ebcf5d390 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_read_le.h @@ -0,0 +1,51 @@ +// Copyright 2017 The CRC32C Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. See the AUTHORS file for names of contributors. + +#ifndef CRC32C_CRC32C_READ_LE_H_ +#define CRC32C_CRC32C_READ_LE_H_ + +#include +#include + +#include "crc32c/crc32c_config.h" + +namespace crc32c { + +// Reads a little-endian 32-bit integer from a 32-bit-aligned buffer. +inline uint32_t ReadUint32LE(const uint8_t* buffer) { +#if BYTE_ORDER_BIG_ENDIAN + return ((static_cast(static_cast(buffer[0]))) | + (static_cast(static_cast(buffer[1])) << 8) | + (static_cast(static_cast(buffer[2])) << 16) | + (static_cast(static_cast(buffer[3])) << 24)); +#else // !BYTE_ORDER_BIG_ENDIAN + uint32_t result; + // This should be optimized to a single instruction. + std::memcpy(&result, buffer, sizeof(result)); + return result; +#endif // BYTE_ORDER_BIG_ENDIAN +} + +// Reads a little-endian 64-bit integer from a 64-bit-aligned buffer. +inline uint64_t ReadUint64LE(const uint8_t* buffer) { +#if BYTE_ORDER_BIG_ENDIAN + return ((static_cast(static_cast(buffer[0]))) | + (static_cast(static_cast(buffer[1])) << 8) | + (static_cast(static_cast(buffer[2])) << 16) | + (static_cast(static_cast(buffer[3])) << 24) | + (static_cast(static_cast(buffer[4])) << 32) | + (static_cast(static_cast(buffer[5])) << 40) | + (static_cast(static_cast(buffer[6])) << 48) | + (static_cast(static_cast(buffer[7])) << 56)); +#else // !BYTE_ORDER_BIG_ENDIAN + uint64_t result; + // This should be optimized to a single instruction. + std::memcpy(&result, buffer, sizeof(result)); + return result; +#endif // BYTE_ORDER_BIG_ENDIAN +} + +} // namespace crc32c + +#endif // CRC32C_CRC32C_READ_LE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_read_le_unittest.cc b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_read_le_unittest.cc new file mode 100644 index 0000000000..2a30302adf --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_read_le_unittest.cc @@ -0,0 +1,32 @@ +// Copyright 2017 The CRC32C Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. See the AUTHORS file for names of contributors. + +#include "./crc32c_read_le.h" + +#include +#include + +#include "gtest/gtest.h" + +#include "./crc32c_round_up.h" + +namespace crc32c { + +TEST(Crc32CReadLETest, ReadUint32LE) { + // little-endian 0x12345678 + alignas(4) uint8_t bytes[] = {0x78, 0x56, 0x34, 0x12}; + + ASSERT_EQ(RoundUp<4>(bytes), bytes) << "Stack array is not aligned"; + EXPECT_EQ(static_cast(0x12345678), ReadUint32LE(bytes)); +} + +TEST(Crc32CReadLETest, ReadUint64LE) { + // little-endian 0x123456789ABCDEF0 + alignas(8) uint8_t bytes[] = {0xF0, 0xDE, 0xBC, 0x9A, 0x78, 0x56, 0x34, 0x12}; + + ASSERT_EQ(RoundUp<8>(bytes), bytes) << "Stack array is not aligned"; + EXPECT_EQ(static_cast(0x123456789ABCDEF0), ReadUint64LE(bytes)); +} + +} // namespace crc32c diff --git a/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_round_up.h b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_round_up.h new file mode 100644 index 0000000000..d3b922beb9 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_round_up.h @@ -0,0 +1,34 @@ +// Copyright 2017 The CRC32C Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. See the AUTHORS file for names of contributors. + +#ifndef CRC32C_CRC32C_ROUND_UP_H_ +#define CRC32C_CRC32C_ROUND_UP_H_ + +#include +#include + +namespace crc32c { + +// Returns the smallest number >= the given number that is evenly divided by N. +// +// N must be a power of two. +template +constexpr inline uintptr_t RoundUp(uintptr_t pointer) { + static_assert((N & (N - 1)) == 0, "N must be a power of two"); + return (pointer + (N - 1)) & ~(N - 1); +} + +// Returns the smallest address >= the given address that is aligned to N bytes. +// +// N must be a power of two. +template +constexpr inline const uint8_t* RoundUp(const uint8_t* pointer) { + static_assert((N & (N - 1)) == 0, "N must be a power of two"); + return reinterpret_cast( + RoundUp(reinterpret_cast(pointer))); +} + +} // namespace crc32c + +#endif // CRC32C_CRC32C_ROUND_UP_H_ diff --git a/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_round_up_unittest.cc b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_round_up_unittest.cc new file mode 100644 index 0000000000..5ff657bb5c --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_round_up_unittest.cc @@ -0,0 +1,84 @@ +// Copyright 2017 The CRC32C Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. See the AUTHORS file for names of contributors. + +#include "./crc32c_round_up.h" + +#include +#include + +#include "gtest/gtest.h" + +namespace crc32c { + +TEST(CRC32CRoundUpTest, RoundUpUintptr) { + uintptr_t zero = 0; + + ASSERT_EQ(zero, RoundUp<1>(zero)); + ASSERT_EQ(1U, RoundUp<1>(1U)); + ASSERT_EQ(2U, RoundUp<1>(2U)); + ASSERT_EQ(3U, RoundUp<1>(3U)); + ASSERT_EQ(~static_cast(0), RoundUp<1>(~static_cast(0))); + ASSERT_EQ(~static_cast(1), RoundUp<1>(~static_cast(1))); + ASSERT_EQ(~static_cast(2), RoundUp<1>(~static_cast(2))); + ASSERT_EQ(~static_cast(3), RoundUp<1>(~static_cast(3))); + + ASSERT_EQ(zero, RoundUp<2>(zero)); + ASSERT_EQ(2U, RoundUp<2>(1U)); + ASSERT_EQ(2U, RoundUp<2>(2U)); + ASSERT_EQ(4U, RoundUp<2>(3U)); + ASSERT_EQ(4U, RoundUp<2>(4U)); + ASSERT_EQ(6U, RoundUp<2>(5U)); + ASSERT_EQ(6U, RoundUp<2>(6U)); + ASSERT_EQ(8U, RoundUp<2>(7U)); + ASSERT_EQ(8U, RoundUp<2>(8U)); + ASSERT_EQ(~static_cast(1), RoundUp<2>(~static_cast(1))); + ASSERT_EQ(~static_cast(1), RoundUp<2>(~static_cast(2))); + ASSERT_EQ(~static_cast(3), RoundUp<2>(~static_cast(3))); + ASSERT_EQ(~static_cast(3), RoundUp<2>(~static_cast(4))); + + ASSERT_EQ(zero, RoundUp<4>(zero)); + ASSERT_EQ(4U, RoundUp<4>(1U)); + ASSERT_EQ(4U, RoundUp<4>(2U)); + ASSERT_EQ(4U, RoundUp<4>(3U)); + ASSERT_EQ(4U, RoundUp<4>(4U)); + ASSERT_EQ(8U, RoundUp<4>(5U)); + ASSERT_EQ(8U, RoundUp<4>(6U)); + ASSERT_EQ(8U, RoundUp<4>(7U)); + ASSERT_EQ(8U, RoundUp<4>(8U)); + ASSERT_EQ(~static_cast(3), RoundUp<4>(~static_cast(3))); + ASSERT_EQ(~static_cast(3), RoundUp<4>(~static_cast(4))); + ASSERT_EQ(~static_cast(3), RoundUp<4>(~static_cast(5))); + ASSERT_EQ(~static_cast(3), RoundUp<4>(~static_cast(6))); + ASSERT_EQ(~static_cast(7), RoundUp<4>(~static_cast(7))); + ASSERT_EQ(~static_cast(7), RoundUp<4>(~static_cast(8))); + ASSERT_EQ(~static_cast(7), RoundUp<4>(~static_cast(9))); +} + +TEST(CRC32CRoundUpTest, RoundUpPointer) { + uintptr_t zero = 0, three = 3, four = 4, seven = 7, eight = 8; + + const uint8_t* zero_ptr = reinterpret_cast(zero); + const uint8_t* three_ptr = reinterpret_cast(three); + const uint8_t* four_ptr = reinterpret_cast(four); + const uint8_t* seven_ptr = reinterpret_cast(seven); + const uint8_t* eight_ptr = reinterpret_cast(eight); + + ASSERT_EQ(zero_ptr, RoundUp<1>(zero_ptr)); + ASSERT_EQ(zero_ptr, RoundUp<4>(zero_ptr)); + ASSERT_EQ(zero_ptr, RoundUp<8>(zero_ptr)); + + ASSERT_EQ(three_ptr, RoundUp<1>(three_ptr)); + ASSERT_EQ(four_ptr, RoundUp<4>(three_ptr)); + ASSERT_EQ(eight_ptr, RoundUp<8>(three_ptr)); + + ASSERT_EQ(four_ptr, RoundUp<1>(four_ptr)); + ASSERT_EQ(four_ptr, RoundUp<4>(four_ptr)); + ASSERT_EQ(eight_ptr, RoundUp<8>(four_ptr)); + + ASSERT_EQ(seven_ptr, RoundUp<1>(seven_ptr)); + ASSERT_EQ(eight_ptr, RoundUp<4>(seven_ptr)); + ASSERT_EQ(eight_ptr, RoundUp<8>(four_ptr)); +} + +} // namespace crc32c diff --git a/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_sse42.cc b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_sse42.cc new file mode 100644 index 0000000000..524b2376d8 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_sse42.cc @@ -0,0 +1,256 @@ +// Copyright 2008 The CRC32C Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. See the AUTHORS file for names of contributors. + +#include "./crc32c_sse42.h" + +// In a separate source file to allow this accelerated CRC32C function to be +// compiled with the appropriate compiler flags to enable SSE4.2 instructions. + +// This implementation is loosely based on Intel Pub 323405 from April 2011, +// "Fast CRC Computation for iSCSI Polynomial Using CRC32 Instruction". + +#include +#include + +#include "./crc32c_internal.h" +#include "./crc32c_prefetch.h" +#include "./crc32c_read_le.h" +#include "./crc32c_round_up.h" +#include "crc32c/crc32c_config.h" + +#if HAVE_SSE42 && (defined(_M_X64) || defined(__x86_64__)) + +#if defined(_MSC_VER) +#include +#else // !defined(_MSC_VER) +#include +#endif // defined(_MSC_VER) + +namespace crc32c { + +namespace { + +constexpr const ptrdiff_t kGroups = 3; +constexpr const ptrdiff_t kBlock0Size = 16 * 1024 / kGroups / 64 * 64; +constexpr const ptrdiff_t kBlock1Size = 4 * 1024 / kGroups / 8 * 8; +constexpr const ptrdiff_t kBlock2Size = 1024 / kGroups / 8 * 8; + +const uint32_t kBlock0SkipTable[8][16] = { + {0x00000000, 0xff770459, 0xfb027e43, 0x04757a1a, 0xf3e88a77, 0x0c9f8e2e, + 0x08eaf434, 0xf79df06d, 0xe23d621f, 0x1d4a6646, 0x193f1c5c, 0xe6481805, + 0x11d5e868, 0xeea2ec31, 0xead7962b, 0x15a09272}, + {0x00000000, 0xc196b2cf, 0x86c1136f, 0x4757a1a0, 0x086e502f, 0xc9f8e2e0, + 0x8eaf4340, 0x4f39f18f, 0x10dca05e, 0xd14a1291, 0x961db331, 0x578b01fe, + 0x18b2f071, 0xd92442be, 0x9e73e31e, 0x5fe551d1}, + {0x00000000, 0x21b940bc, 0x43728178, 0x62cbc1c4, 0x86e502f0, 0xa75c424c, + 0xc5978388, 0xe42ec334, 0x08267311, 0x299f33ad, 0x4b54f269, 0x6aedb2d5, + 0x8ec371e1, 0xaf7a315d, 0xcdb1f099, 0xec08b025}, + {0x00000000, 0x104ce622, 0x2099cc44, 0x30d52a66, 0x41339888, 0x517f7eaa, + 0x61aa54cc, 0x71e6b2ee, 0x82673110, 0x922bd732, 0xa2fefd54, 0xb2b21b76, + 0xc354a998, 0xd3184fba, 0xe3cd65dc, 0xf38183fe}, + {0x00000000, 0x012214d1, 0x024429a2, 0x03663d73, 0x04885344, 0x05aa4795, + 0x06cc7ae6, 0x07ee6e37, 0x0910a688, 0x0832b259, 0x0b548f2a, 0x0a769bfb, + 0x0d98f5cc, 0x0cbae11d, 0x0fdcdc6e, 0x0efec8bf}, + {0x00000000, 0x12214d10, 0x24429a20, 0x3663d730, 0x48853440, 0x5aa47950, + 0x6cc7ae60, 0x7ee6e370, 0x910a6880, 0x832b2590, 0xb548f2a0, 0xa769bfb0, + 0xd98f5cc0, 0xcbae11d0, 0xfdcdc6e0, 0xefec8bf0}, + {0x00000000, 0x27f8a7f1, 0x4ff14fe2, 0x6809e813, 0x9fe29fc4, 0xb81a3835, + 0xd013d026, 0xf7eb77d7, 0x3a294979, 0x1dd1ee88, 0x75d8069b, 0x5220a16a, + 0xa5cbd6bd, 0x8233714c, 0xea3a995f, 0xcdc23eae}, + {0x00000000, 0x745292f2, 0xe8a525e4, 0x9cf7b716, 0xd4a63d39, 0xa0f4afcb, + 0x3c0318dd, 0x48518a2f, 0xaca00c83, 0xd8f29e71, 0x44052967, 0x3057bb95, + 0x780631ba, 0x0c54a348, 0x90a3145e, 0xe4f186ac}, +}; +const uint32_t kBlock1SkipTable[8][16] = { + {0x00000000, 0x79113270, 0xf22264e0, 0x8b335690, 0xe1a8bf31, 0x98b98d41, + 0x138adbd1, 0x6a9be9a1, 0xc6bd0893, 0xbfac3ae3, 0x349f6c73, 0x4d8e5e03, + 0x2715b7a2, 0x5e0485d2, 0xd537d342, 0xac26e132}, + {0x00000000, 0x889667d7, 0x14c0b95f, 0x9c56de88, 0x298172be, 0xa1171569, + 0x3d41cbe1, 0xb5d7ac36, 0x5302e57c, 0xdb9482ab, 0x47c25c23, 0xcf543bf4, + 0x7a8397c2, 0xf215f015, 0x6e432e9d, 0xe6d5494a}, + {0x00000000, 0xa605caf8, 0x49e7e301, 0xefe229f9, 0x93cfc602, 0x35ca0cfa, + 0xda282503, 0x7c2deffb, 0x2273faf5, 0x8476300d, 0x6b9419f4, 0xcd91d30c, + 0xb1bc3cf7, 0x17b9f60f, 0xf85bdff6, 0x5e5e150e}, + {0x00000000, 0x44e7f5ea, 0x89cfebd4, 0xcd281e3e, 0x1673a159, 0x529454b3, + 0x9fbc4a8d, 0xdb5bbf67, 0x2ce742b2, 0x6800b758, 0xa528a966, 0xe1cf5c8c, + 0x3a94e3eb, 0x7e731601, 0xb35b083f, 0xf7bcfdd5}, + {0x00000000, 0x59ce8564, 0xb39d0ac8, 0xea538fac, 0x62d66361, 0x3b18e605, + 0xd14b69a9, 0x8885eccd, 0xc5acc6c2, 0x9c6243a6, 0x7631cc0a, 0x2fff496e, + 0xa77aa5a3, 0xfeb420c7, 0x14e7af6b, 0x4d292a0f}, + {0x00000000, 0x8eb5fb75, 0x1887801b, 0x96327b6e, 0x310f0036, 0xbfbafb43, + 0x2988802d, 0xa73d7b58, 0x621e006c, 0xecabfb19, 0x7a998077, 0xf42c7b02, + 0x5311005a, 0xdda4fb2f, 0x4b968041, 0xc5237b34}, + {0x00000000, 0xc43c00d8, 0x8d947741, 0x49a87799, 0x1ec49873, 0xdaf898ab, + 0x9350ef32, 0x576cefea, 0x3d8930e6, 0xf9b5303e, 0xb01d47a7, 0x7421477f, + 0x234da895, 0xe771a84d, 0xaed9dfd4, 0x6ae5df0c}, + {0x00000000, 0x7b1261cc, 0xf624c398, 0x8d36a254, 0xe9a5f1c1, 0x92b7900d, + 0x1f813259, 0x64935395, 0xd6a79573, 0xadb5f4bf, 0x208356eb, 0x5b913727, + 0x3f0264b2, 0x4410057e, 0xc926a72a, 0xb234c6e6}, +}; +const uint32_t kBlock2SkipTable[8][16] = { + {0x00000000, 0x8f158014, 0x1bc776d9, 0x94d2f6cd, 0x378eedb2, 0xb89b6da6, + 0x2c499b6b, 0xa35c1b7f, 0x6f1ddb64, 0xe0085b70, 0x74daadbd, 0xfbcf2da9, + 0x589336d6, 0xd786b6c2, 0x4354400f, 0xcc41c01b}, + {0x00000000, 0xde3bb6c8, 0xb99b1b61, 0x67a0ada9, 0x76da4033, 0xa8e1f6fb, + 0xcf415b52, 0x117aed9a, 0xedb48066, 0x338f36ae, 0x542f9b07, 0x8a142dcf, + 0x9b6ec055, 0x4555769d, 0x22f5db34, 0xfcce6dfc}, + {0x00000000, 0xde85763d, 0xb8e69a8b, 0x6663ecb6, 0x742143e7, 0xaaa435da, + 0xccc7d96c, 0x1242af51, 0xe84287ce, 0x36c7f1f3, 0x50a41d45, 0x8e216b78, + 0x9c63c429, 0x42e6b214, 0x24855ea2, 0xfa00289f}, + {0x00000000, 0xd569796d, 0xaf3e842b, 0x7a57fd46, 0x5b917ea7, 0x8ef807ca, + 0xf4affa8c, 0x21c683e1, 0xb722fd4e, 0x624b8423, 0x181c7965, 0xcd750008, + 0xecb383e9, 0x39dafa84, 0x438d07c2, 0x96e47eaf}, + {0x00000000, 0x6ba98c6d, 0xd75318da, 0xbcfa94b7, 0xab4a4745, 0xc0e3cb28, + 0x7c195f9f, 0x17b0d3f2, 0x5378f87b, 0x38d17416, 0x842be0a1, 0xef826ccc, + 0xf832bf3e, 0x939b3353, 0x2f61a7e4, 0x44c82b89}, + {0x00000000, 0xa6f1f0f6, 0x480f971d, 0xeefe67eb, 0x901f2e3a, 0x36eedecc, + 0xd810b927, 0x7ee149d1, 0x25d22a85, 0x8323da73, 0x6dddbd98, 0xcb2c4d6e, + 0xb5cd04bf, 0x133cf449, 0xfdc293a2, 0x5b336354}, + {0x00000000, 0x4ba4550a, 0x9748aa14, 0xdcecff1e, 0x2b7d22d9, 0x60d977d3, + 0xbc3588cd, 0xf791ddc7, 0x56fa45b2, 0x1d5e10b8, 0xc1b2efa6, 0x8a16baac, + 0x7d87676b, 0x36233261, 0xeacfcd7f, 0xa16b9875}, + {0x00000000, 0xadf48b64, 0x5e056039, 0xf3f1eb5d, 0xbc0ac072, 0x11fe4b16, + 0xe20fa04b, 0x4ffb2b2f, 0x7df9f615, 0xd00d7d71, 0x23fc962c, 0x8e081d48, + 0xc1f33667, 0x6c07bd03, 0x9ff6565e, 0x3202dd3a}, +}; + +constexpr const ptrdiff_t kPrefetchHorizon = 256; + +} // namespace + +uint32_t ExtendSse42(uint32_t crc, const uint8_t* data, size_t size) { + const uint8_t* p = data; + const uint8_t* e = data + size; + uint32_t l = crc ^ kCRC32Xor; + +#define STEP1 \ + do { \ + l = _mm_crc32_u8(l, *p++); \ + } while (0) + +#define STEP4(crc) \ + do { \ + crc = _mm_crc32_u32(crc, ReadUint32LE(p)); \ + p += 4; \ + } while (0) + +#define STEP8(crc, data) \ + do { \ + crc = _mm_crc32_u64(crc, ReadUint64LE(data)); \ + data += 8; \ + } while (0) + +#define STEP8BY3(crc0, crc1, crc2, p0, p1, p2) \ + do { \ + STEP8(crc0, p0); \ + STEP8(crc1, p1); \ + STEP8(crc2, p2); \ + } while (0) + +#define STEP8X3(crc0, crc1, crc2, bs) \ + do { \ + crc0 = _mm_crc32_u64(crc0, ReadUint64LE(p)); \ + crc1 = _mm_crc32_u64(crc1, ReadUint64LE(p + bs)); \ + crc2 = _mm_crc32_u64(crc2, ReadUint64LE(p + 2 * bs)); \ + p += 8; \ + } while (0) + +#define SKIP_BLOCK(crc, tab) \ + do { \ + crc = tab[0][crc & 0xf] ^ tab[1][(crc >> 4) & 0xf] ^ \ + tab[2][(crc >> 8) & 0xf] ^ tab[3][(crc >> 12) & 0xf] ^ \ + tab[4][(crc >> 16) & 0xf] ^ tab[5][(crc >> 20) & 0xf] ^ \ + tab[6][(crc >> 24) & 0xf] ^ tab[7][(crc >> 28) & 0xf]; \ + } while (0) + + // Point x at first 8-byte aligned byte in the buffer. This might be past the + // end of the buffer. + const uint8_t* x = RoundUp<8>(p); + if (x <= e) { + // Process bytes p is 8-byte aligned. + while (p != x) { + STEP1; + } + } + + // Process the data in predetermined block sizes with tables for quickly + // combining the checksum. Experimentally it's better to use larger block + // sizes where possible so use a hierarchy of decreasing block sizes. + uint64_t l64 = l; + while ((e - p) >= kGroups * kBlock0Size) { + uint64_t l641 = 0; + uint64_t l642 = 0; + for (int i = 0; i < kBlock0Size; i += 8 * 8) { + // Prefetch ahead to hide latency. + RequestPrefetch(p + kPrefetchHorizon); + RequestPrefetch(p + kBlock0Size + kPrefetchHorizon); + RequestPrefetch(p + 2 * kBlock0Size + kPrefetchHorizon); + + // Process 64 bytes at a time. + STEP8X3(l64, l641, l642, kBlock0Size); + STEP8X3(l64, l641, l642, kBlock0Size); + STEP8X3(l64, l641, l642, kBlock0Size); + STEP8X3(l64, l641, l642, kBlock0Size); + STEP8X3(l64, l641, l642, kBlock0Size); + STEP8X3(l64, l641, l642, kBlock0Size); + STEP8X3(l64, l641, l642, kBlock0Size); + STEP8X3(l64, l641, l642, kBlock0Size); + } + + // Combine results. + SKIP_BLOCK(l64, kBlock0SkipTable); + l64 ^= l641; + SKIP_BLOCK(l64, kBlock0SkipTable); + l64 ^= l642; + p += (kGroups - 1) * kBlock0Size; + } + while ((e - p) >= kGroups * kBlock1Size) { + uint64_t l641 = 0; + uint64_t l642 = 0; + for (int i = 0; i < kBlock1Size; i += 8) { + STEP8X3(l64, l641, l642, kBlock1Size); + } + SKIP_BLOCK(l64, kBlock1SkipTable); + l64 ^= l641; + SKIP_BLOCK(l64, kBlock1SkipTable); + l64 ^= l642; + p += (kGroups - 1) * kBlock1Size; + } + while ((e - p) >= kGroups * kBlock2Size) { + uint64_t l641 = 0; + uint64_t l642 = 0; + for (int i = 0; i < kBlock2Size; i += 8) { + STEP8X3(l64, l641, l642, kBlock2Size); + } + SKIP_BLOCK(l64, kBlock2SkipTable); + l64 ^= l641; + SKIP_BLOCK(l64, kBlock2SkipTable); + l64 ^= l642; + p += (kGroups - 1) * kBlock2Size; + } + + // Process bytes 16 at a time + while ((e - p) >= 16) { + STEP8(l64, p); + STEP8(l64, p); + } + + l = static_cast(l64); + // Process the last few bytes. + while (p != e) { + STEP1; + } +#undef SKIP_BLOCK +#undef STEP8X3 +#undef STEP8BY3 +#undef STEP8 +#undef STEP4 +#undef STEP1 + + return l ^ kCRC32Xor; +} + +} // namespace crc32c + +#endif // HAVE_SSE42 && (defined(_M_X64) || defined(__x86_64__)) diff --git a/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_sse42.h b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_sse42.h new file mode 100644 index 0000000000..b9ed179e54 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_sse42.h @@ -0,0 +1,31 @@ +// Copyright 2017 The CRC32C Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. See the AUTHORS file for names of contributors. + +#ifndef CRC32C_CRC32C_SSE42_H_ +#define CRC32C_CRC32C_SSE42_H_ + +// X86-specific code. + +#include +#include + +#include "crc32c/crc32c_config.h" + +// The hardware-accelerated implementation is only enabled for 64-bit builds, +// because a straightforward 32-bit implementation actually runs slower than the +// portable version. Most X86 machines are 64-bit nowadays, so it doesn't make +// much sense to spend time building an optimized hardware-accelerated +// implementation. +#if HAVE_SSE42 && (defined(_M_X64) || defined(__x86_64__)) + +namespace crc32c { + +// SSE4.2-accelerated implementation in crc32c_sse42.cc +uint32_t ExtendSse42(uint32_t crc, const uint8_t* data, size_t count); + +} // namespace crc32c + +#endif // HAVE_SSE42 && (defined(_M_X64) || defined(__x86_64__)) + +#endif // CRC32C_CRC32C_SSE42_H_ diff --git a/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_sse42_check.h b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_sse42_check.h new file mode 100644 index 0000000000..ad380dd20e --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_sse42_check.h @@ -0,0 +1,48 @@ +// Copyright 2017 The CRC32C Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. See the AUTHORS file for names of contributors. + +#ifndef CRC32C_CRC32C_SSE42_CHECK_H_ +#define CRC32C_CRC32C_SSE42_CHECK_H_ + +// X86-specific code checking the availability of SSE4.2 instructions. + +#include +#include + +#include "crc32c/crc32c_config.h" + +#if HAVE_SSE42 && (defined(_M_X64) || defined(__x86_64__)) + +// If the compiler supports SSE4.2, it definitely supports X86. + +#if defined(_MSC_VER) +#include + +namespace crc32c { + +inline bool CanUseSse42() { + int cpu_info[4]; + __cpuid(cpu_info, 1); + return (cpu_info[2] & (1 << 20)) != 0; +} + +} // namespace crc32c + +#else // !defined(_MSC_VER) +#include + +namespace crc32c { + +inline bool CanUseSse42() { + unsigned int eax, ebx, ecx, edx; + return __get_cpuid(1, &eax, &ebx, &ecx, &edx) && ((ecx & (1 << 20)) != 0); +} + +} // namespace crc32c + +#endif // defined(_MSC_VER) + +#endif // HAVE_SSE42 && (defined(_M_X64) || defined(__x86_64__)) + +#endif // CRC32C_CRC32C_SSE42_CHECK_H_ diff --git a/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_sse42_unittest.cc b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_sse42_unittest.cc new file mode 100644 index 0000000000..c73ad8ddd1 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_sse42_unittest.cc @@ -0,0 +1,24 @@ +// Copyright 2017 The CRC32C Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. See the AUTHORS file for names of contributors. + +#include "gtest/gtest.h" + +#include "./crc32c_extend_unittests.h" +#include "./crc32c_sse42.h" + +namespace crc32c { + +#if HAVE_SSE42 && (defined(_M_X64) || defined(__x86_64__)) + +struct Sse42TestTraits { + static uint32_t Extend(uint32_t crc, const uint8_t* data, size_t count) { + return ExtendSse42(crc, data, count); + } +}; + +INSTANTIATE_TYPED_TEST_SUITE_P(Sse42, ExtendTest, Sse42TestTraits); + +#endif // HAVE_SSE42 && (defined(_M_X64) || defined(__x86_64__)) + +} // namespace crc32c diff --git a/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_test_main.cc b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_test_main.cc new file mode 100644 index 0000000000..c07e1c8bc4 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_test_main.cc @@ -0,0 +1,20 @@ +// Copyright 2017 The CRC32C Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "crc32c/crc32c_config.h" + +#include "gtest/gtest.h" + +#if CRC32C_TESTS_BUILT_WITH_GLOG +#include "glog/logging.h" +#endif // CRC32C_TESTS_BUILT_WITH_GLOG + +int main(int argc, char** argv) { +#if CRC32C_TESTS_BUILT_WITH_GLOG + google::InitGoogleLogging(argv[0]); + google::InstallFailureSignalHandler(); +#endif // CRC32C_TESTS_BUILT_WITH_GLOG + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_unittest.cc b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_unittest.cc new file mode 100644 index 0000000000..d6c6af680c --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/crc32c_unittest.cc @@ -0,0 +1,129 @@ +// Copyright 2017 The CRC32C Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. See the AUTHORS file for names of contributors. + +#include "crc32c/crc32c.h" + +#include +#include +#include + +#include "gtest/gtest.h" + +#include "./crc32c_extend_unittests.h" + +TEST(Crc32CTest, Crc32c) { + // From rfc3720 section B.4. + uint8_t buf[32]; + + std::memset(buf, 0, sizeof(buf)); + EXPECT_EQ(static_cast(0x8a9136aa), + crc32c::Crc32c(buf, sizeof(buf))); + + std::memset(buf, 0xff, sizeof(buf)); + EXPECT_EQ(static_cast(0x62a8ab43), + crc32c::Crc32c(buf, sizeof(buf))); + + for (size_t i = 0; i < 32; ++i) + buf[i] = static_cast(i); + EXPECT_EQ(static_cast(0x46dd794e), + crc32c::Crc32c(buf, sizeof(buf))); + + for (size_t i = 0; i < 32; ++i) + buf[i] = static_cast(31 - i); + EXPECT_EQ(static_cast(0x113fdb5c), + crc32c::Crc32c(buf, sizeof(buf))); + + uint8_t data[48] = { + 0x01, 0xc0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, + 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x18, 0x28, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + }; + EXPECT_EQ(static_cast(0xd9963a56), + crc32c::Crc32c(data, sizeof(data))); +} + +namespace crc32c { + +struct ApiTestTraits { + static uint32_t Extend(uint32_t crc, const uint8_t* data, size_t count) { + return ::crc32c::Extend(crc, data, count); + } +}; + +INSTANTIATE_TYPED_TEST_SUITE_P(Api, ExtendTest, ApiTestTraits); + +} // namespace crc32c + +TEST(CRC32CTest, Crc32cCharPointer) { + char buf[32]; + + std::memset(buf, 0, sizeof(buf)); + EXPECT_EQ(static_cast(0x8a9136aa), + crc32c::Crc32c(buf, sizeof(buf))); + + std::memset(buf, 0xff, sizeof(buf)); + EXPECT_EQ(static_cast(0x62a8ab43), + crc32c::Crc32c(buf, sizeof(buf))); + + for (size_t i = 0; i < 32; ++i) + buf[i] = static_cast(i); + EXPECT_EQ(static_cast(0x46dd794e), + crc32c::Crc32c(buf, sizeof(buf))); + + for (size_t i = 0; i < 32; ++i) + buf[i] = static_cast(31 - i); + EXPECT_EQ(static_cast(0x113fdb5c), + crc32c::Crc32c(buf, sizeof(buf))); +} + +TEST(CRC32CTest, Crc32cStdString) { + std::string buf; + buf.resize(32); + + for (size_t i = 0; i < 32; ++i) + buf[i] = static_cast(0x00); + EXPECT_EQ(static_cast(0x8a9136aa), crc32c::Crc32c(buf)); + + for (size_t i = 0; i < 32; ++i) + buf[i] = '\xff'; + EXPECT_EQ(static_cast(0x62a8ab43), crc32c::Crc32c(buf)); + + for (size_t i = 0; i < 32; ++i) + buf[i] = static_cast(i); + EXPECT_EQ(static_cast(0x46dd794e), crc32c::Crc32c(buf)); + + for (size_t i = 0; i < 32; ++i) + buf[i] = static_cast(31 - i); + EXPECT_EQ(static_cast(0x113fdb5c), crc32c::Crc32c(buf)); +} + +#if __cplusplus > 201402L +#if __has_include() + +TEST(CRC32CTest, Crc32cStdStringView) { + uint8_t buf[32]; + std::string_view view(reinterpret_cast(buf), sizeof(buf)); + + std::memset(buf, 0, sizeof(buf)); + EXPECT_EQ(static_cast(0x8a9136aa), crc32c::Crc32c(view)); + + std::memset(buf, 0xff, sizeof(buf)); + EXPECT_EQ(static_cast(0x62a8ab43), crc32c::Crc32c(view)); + + for (size_t i = 0; i < 32; ++i) + buf[i] = static_cast(i); + EXPECT_EQ(static_cast(0x46dd794e), crc32c::Crc32c(view)); + + for (size_t i = 0; i < 32; ++i) + buf[i] = static_cast(31 - i); + EXPECT_EQ(static_cast(0x113fdb5c), crc32c::Crc32c(view)); +} + +#endif // __has_include() +#endif // __cplusplus > 201402L + +#define TESTED_EXTEND Extend +#include "./crc32c_extend_unittests.h" +#undef TESTED_EXTEND diff --git a/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/include/crc32c/crc32c.h b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/include/crc32c/crc32c.h new file mode 100644 index 0000000000..e8a78170a9 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/include/crc32c/crc32c.h @@ -0,0 +1,89 @@ +/* Copyright 2017 The CRC32C Authors. All rights reserved. + Use of this source code is governed by a BSD-style license that can be + found in the LICENSE file. See the AUTHORS file for names of contributors. */ + +#ifndef CRC32C_CRC32C_H_ +#define CRC32C_CRC32C_H_ + +/* The API exported by the CRC32C project. */ + +#if defined(__cplusplus) + +#include +#include +#include + +#else /* !defined(__cplusplus) */ + +#include +#include + +#endif /* !defined(__cplusplus) */ + + +/* The C API. */ + +#if defined(__cplusplus) +extern "C" { +#endif /* defined(__cplusplus) */ + +/* Extends "crc" with the CRC32C of "count" bytes in the buffer pointed by + "data" */ +uint32_t crc32c_extend(uint32_t crc, const uint8_t* data, size_t count); + +/* Computes the CRC32C of "count" bytes in the buffer pointed by "data". */ +uint32_t crc32c_value(const uint8_t* data, size_t count); + +#ifdef __cplusplus +} /* end extern "C" */ +#endif /* defined(__cplusplus) */ + + +/* The C++ API. */ + +#if defined(__cplusplus) + +namespace crc32c { + +// Extends "crc" with the CRC32C of "count" bytes in the buffer pointed by +// "data". +uint32_t Extend(uint32_t crc, const uint8_t* data, size_t count); + +// Computes the CRC32C of "count" bytes in the buffer pointed by "data". +inline uint32_t Crc32c(const uint8_t* data, size_t count) { + return Extend(0, data, count); +} + +// Computes the CRC32C of "count" bytes in the buffer pointed by "data". +inline uint32_t Crc32c(const char* data, size_t count) { + return Extend(0, reinterpret_cast(data), count); +} + +// Computes the CRC32C of the string's content. +inline uint32_t Crc32c(const std::string& string) { + return Crc32c(reinterpret_cast(string.data()), + string.size()); +} + +} // namespace crc32c + +#if __cplusplus > 201402L +#if __has_include() +#include + +namespace crc32c { + +// Computes the CRC32C of the bytes in the string_view. +inline uint32_t Crc32c(const std::string_view& string_view) { + return Crc32c(reinterpret_cast(string_view.data()), + string_view.size()); +} + +} // namespace crc32c + +#endif // __has_include() +#endif // __cplusplus > 201402L + +#endif /* defined(__cplusplus) */ + +#endif // CRC32C_CRC32C_H_ diff --git a/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/include/crc32c/crc32c_config.h b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/include/crc32c/crc32c_config.h new file mode 100644 index 0000000000..a00d6a63c9 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/third_party/crc32c/src/include/crc32c/crc32c_config.h @@ -0,0 +1,43 @@ +// Copyright 2017 The CRC32C Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. See the AUTHORS file for names of contributors. + +#ifndef CRC32C_CRC32C_CONFIG_H_ +#define CRC32C_CRC32C_CONFIG_H_ + +// Define to 1 if building for a big-endian platform. +//#cmakedefine01 BYTE_ORDER_BIG_ENDIAN + +// Define to 1 if the compiler has the __builtin_prefetch intrinsic. +#define HAVE_BUILTIN_PREFETCH 1 + +// Define to 1 if targeting X86 and the compiler has the _mm_prefetch intrinsic. + +#if HAVE_SSE42 && (defined(_M_X64) || defined(__x86_64__)) +#define HAVE_MM_PREFETCH 1 +#endif + +// Define to 1 if targeting X86 and the compiler has the _mm_crc32_u{8,32,64} +// intrinsics. +#if defined(__i386) || defined(__x86_64) || defined(_M_IX86) +//#define HAVE_SSE42 1 +#endif + +// Define to 1 if targeting ARM and the compiler has the __crc32c{b,h,w,d} and +// the vmull_p64 intrinsics. +#if defined(__aarch64__) +//#define HAVE_ARM64_CRC32C 1 +#endif + +// Define to 1 if the system libraries have the getauxval function in the +// header. Should be true on Linux and Android API level 20+. +#define HAVE_STRONG_GETAUXVAL 1 + +// Define to 1 if the compiler supports defining getauxval as a weak symbol. +// Should be true for any compiler that supports __attribute__((weak)). +#define HAVE_WEAK_GETAUXVAL + +// Define to 1 if CRC32C tests have been built with Google Logging. +//#cmakedefine01 CRC32C_TESTS_BUILT_WITH_GLOG + +#endif // CRC32C_CRC32C_CONFIG_H_ diff --git a/TMessagesProj/jni/voip/webrtc/video/OWNERS b/TMessagesProj/jni/voip/webrtc/video/OWNERS index f76cf9009a..2206a59a18 100644 --- a/TMessagesProj/jni/voip/webrtc/video/OWNERS +++ b/TMessagesProj/jni/voip/webrtc/video/OWNERS @@ -1,5 +1,6 @@ asapersson@webrtc.org ilnik@webrtc.org mflodman@webrtc.org +philipel@webrtc.org sprang@webrtc.org stefan@webrtc.org diff --git a/TMessagesProj/jni/voip/webrtc/video/adaptation/balanced_constraint.cc b/TMessagesProj/jni/voip/webrtc/video/adaptation/balanced_constraint.cc index ec0b8e41d5..f9ee08ac87 100644 --- a/TMessagesProj/jni/voip/webrtc/video/adaptation/balanced_constraint.cc +++ b/TMessagesProj/jni/voip/webrtc/video/adaptation/balanced_constraint.cc @@ -14,13 +14,14 @@ #include #include "api/sequence_checker.h" -#include "rtc_base/task_utils/to_queued_task.h" namespace webrtc { BalancedConstraint::BalancedConstraint( - DegradationPreferenceProvider* degradation_preference_provider) + DegradationPreferenceProvider* degradation_preference_provider, + const FieldTrialsView& field_trials) : encoder_target_bitrate_bps_(absl::nullopt), + balanced_settings_(field_trials), degradation_preference_provider_(degradation_preference_provider) { RTC_DCHECK(degradation_preference_provider_); sequence_checker_.Detach(); diff --git a/TMessagesProj/jni/voip/webrtc/video/adaptation/balanced_constraint.h b/TMessagesProj/jni/voip/webrtc/video/adaptation/balanced_constraint.h index 0bbd670408..22c7d2923c 100644 --- a/TMessagesProj/jni/voip/webrtc/video/adaptation/balanced_constraint.h +++ b/TMessagesProj/jni/voip/webrtc/video/adaptation/balanced_constraint.h @@ -14,6 +14,7 @@ #include #include "absl/types/optional.h" +#include "api/field_trials_view.h" #include "api/sequence_checker.h" #include "call/adaptation/adaptation_constraint.h" #include "call/adaptation/degradation_preference_provider.h" @@ -24,8 +25,9 @@ namespace webrtc { class BalancedConstraint : public AdaptationConstraint { public: - explicit BalancedConstraint( - DegradationPreferenceProvider* degradation_preference_provider); + BalancedConstraint( + DegradationPreferenceProvider* degradation_preference_provider, + const FieldTrialsView& field_trials); ~BalancedConstraint() override = default; void OnEncoderTargetBitrateUpdated( diff --git a/TMessagesProj/jni/voip/webrtc/video/adaptation/bandwidth_quality_scaler_resource.cc b/TMessagesProj/jni/voip/webrtc/video/adaptation/bandwidth_quality_scaler_resource.cc index 283f3f0221..485019f309 100644 --- a/TMessagesProj/jni/voip/webrtc/video/adaptation/bandwidth_quality_scaler_resource.cc +++ b/TMessagesProj/jni/voip/webrtc/video/adaptation/bandwidth_quality_scaler_resource.cc @@ -15,8 +15,6 @@ #include "rtc_base/checks.h" #include "rtc_base/experiments/balanced_degradation_settings.h" #include "rtc_base/logging.h" -#include "rtc_base/ref_counted_object.h" -#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/time_utils.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/video/adaptation/bandwidth_quality_scaler_resource.h b/TMessagesProj/jni/voip/webrtc/video/adaptation/bandwidth_quality_scaler_resource.h index 9dfa97d11d..a57c9907a4 100644 --- a/TMessagesProj/jni/voip/webrtc/video/adaptation/bandwidth_quality_scaler_resource.h +++ b/TMessagesProj/jni/voip/webrtc/video/adaptation/bandwidth_quality_scaler_resource.h @@ -23,8 +23,6 @@ #include "call/adaptation/degradation_preference_provider.h" #include "call/adaptation/resource_adaptation_processor_interface.h" #include "modules/video_coding/utility/bandwidth_quality_scaler.h" -#include "rtc_base/ref_counted_object.h" -#include "rtc_base/task_queue.h" #include "video/adaptation/video_stream_encoder_resource.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/video/adaptation/bitrate_constraint.cc b/TMessagesProj/jni/voip/webrtc/video/adaptation/bitrate_constraint.cc index cd61e555cd..bc36723d48 100644 --- a/TMessagesProj/jni/voip/webrtc/video/adaptation/bitrate_constraint.cc +++ b/TMessagesProj/jni/voip/webrtc/video/adaptation/bitrate_constraint.cc @@ -37,6 +37,10 @@ void BitrateConstraint::OnEncoderTargetBitrateUpdated( encoder_target_bitrate_bps_ = std::move(encoder_target_bitrate_bps); } +// Checks if resolution is allowed to adapt up based on the current bitrate and +// ResolutionBitrateLimits.min_start_bitrate_bps for the next higher resolution. +// Bitrate limits usage is restricted to a single active stream/layer (e.g. when +// quality scaling is enabled). bool BitrateConstraint::IsAdaptationUpAllowed( const VideoStreamInputState& input_state, const VideoSourceRestrictions& restrictions_before, @@ -53,7 +57,7 @@ bool BitrateConstraint::IsAdaptationUpAllowed( return true; } - if (VideoStreamEncoderResourceManager::IsSimulcast( + if (VideoStreamEncoderResourceManager::IsSimulcastOrMultipleSpatialLayers( encoder_settings_->encoder_config())) { // Resolution bitrate limits usage is restricted to singlecast. return true; diff --git a/TMessagesProj/jni/voip/webrtc/video/adaptation/encode_usage_resource.cc b/TMessagesProj/jni/voip/webrtc/video/adaptation/encode_usage_resource.cc index c42c63f4b7..4a97881b04 100644 --- a/TMessagesProj/jni/voip/webrtc/video/adaptation/encode_usage_resource.cc +++ b/TMessagesProj/jni/voip/webrtc/video/adaptation/encode_usage_resource.cc @@ -14,7 +14,6 @@ #include #include "rtc_base/checks.h" -#include "rtc_base/ref_counted_object.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/video/adaptation/encode_usage_resource.h b/TMessagesProj/jni/voip/webrtc/video/adaptation/encode_usage_resource.h index 257988fa12..c391132e57 100644 --- a/TMessagesProj/jni/voip/webrtc/video/adaptation/encode_usage_resource.h +++ b/TMessagesProj/jni/voip/webrtc/video/adaptation/encode_usage_resource.h @@ -17,8 +17,6 @@ #include "absl/types/optional.h" #include "api/scoped_refptr.h" #include "api/video/video_adaptation_reason.h" -#include "rtc_base/ref_counted_object.h" -#include "rtc_base/task_queue.h" #include "video/adaptation/overuse_frame_detector.h" #include "video/adaptation/video_stream_encoder_resource.h" diff --git a/TMessagesProj/jni/voip/webrtc/video/adaptation/overuse_frame_detector.cc b/TMessagesProj/jni/voip/webrtc/video/adaptation/overuse_frame_detector.cc index 9703ac8025..9836a466b5 100644 --- a/TMessagesProj/jni/voip/webrtc/video/adaptation/overuse_frame_detector.cc +++ b/TMessagesProj/jni/voip/webrtc/video/adaptation/overuse_frame_detector.cc @@ -429,7 +429,7 @@ class OverdoseInjector : public OveruseFrameDetector::ProcessingUsage { } // namespace -CpuOveruseOptions::CpuOveruseOptions() +CpuOveruseOptions::CpuOveruseOptions(const FieldTrialsView& field_trials) : high_encode_usage_threshold_percent(85), frame_timeout_interval_ms(1500), min_frame_samples(120), @@ -438,42 +438,46 @@ CpuOveruseOptions::CpuOveruseOptions() // Disabled by default. filter_time_ms(0) { #if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) - // This is proof-of-concept code for letting the physical core count affect - // the interval into which we attempt to scale. For now, the code is Mac OS - // specific, since that's the platform were we saw most problems. - // TODO(torbjorng): Enhance SystemInfo to return this metric. - - mach_port_t mach_host = mach_host_self(); - host_basic_info hbi = {}; - mach_msg_type_number_t info_count = HOST_BASIC_INFO_COUNT; - kern_return_t kr = - host_info(mach_host, HOST_BASIC_INFO, reinterpret_cast(&hbi), - &info_count); - mach_port_deallocate(mach_task_self(), mach_host); - - int n_physical_cores; - if (kr != KERN_SUCCESS) { - // If we couldn't get # of physical CPUs, don't panic. Assume we have 1. - n_physical_cores = 1; - RTC_LOG(LS_ERROR) - << "Failed to determine number of physical cores, assuming 1"; - } else { - n_physical_cores = hbi.physical_cpu; - RTC_LOG(LS_INFO) << "Number of physical cores:" << n_physical_cores; - } + // Kill switch for re-enabling special adaptation rules for macOS. + // TODO(bugs.webrtc.org/14138): Remove once removal is deemed safe. + if (field_trials.IsEnabled( + "WebRTC-MacSpecialOveruseRulesRemovalKillSwitch")) { + // This is proof-of-concept code for letting the physical core count affect + // the interval into which we attempt to scale. For now, the code is Mac OS + // specific, since that's the platform were we saw most problems. + // TODO(torbjorng): Enhance SystemInfo to return this metric. + + mach_port_t mach_host = mach_host_self(); + host_basic_info hbi = {}; + mach_msg_type_number_t info_count = HOST_BASIC_INFO_COUNT; + kern_return_t kr = + host_info(mach_host, HOST_BASIC_INFO, + reinterpret_cast(&hbi), &info_count); + mach_port_deallocate(mach_task_self(), mach_host); + + int n_physical_cores; + if (kr != KERN_SUCCESS) { + // If we couldn't get # of physical CPUs, don't panic. Assume we have 1. + n_physical_cores = 1; + RTC_LOG(LS_ERROR) + << "Failed to determine number of physical cores, assuming 1"; + } else { + n_physical_cores = hbi.physical_cpu; + RTC_LOG(LS_INFO) << "Number of physical cores:" << n_physical_cores; + } - // Change init list default for few core systems. The assumption here is that - // encoding, which we measure here, takes about 1/4 of the processing of a - // two-way call. This is roughly true for x86 using both vp8 and vp9 without - // hardware encoding. Since we don't affect the incoming stream here, we only - // control about 1/2 of the total processing needs, but this is not taken into - // account. - if (n_physical_cores == 1) - high_encode_usage_threshold_percent = 20; // Roughly 1/4 of 100%. - else if (n_physical_cores == 2) - high_encode_usage_threshold_percent = 40; // Roughly 1/4 of 200%. + // Change init list default for few core systems. The assumption here is + // that encoding, which we measure here, takes about 1/4 of the processing + // of a two-way call. This is roughly true for x86 using both vp8 and vp9 + // without hardware encoding. Since we don't affect the incoming stream + // here, we only control about 1/2 of the total processing needs, but this + // is not taken into account. + if (n_physical_cores == 1) + high_encode_usage_threshold_percent = 20; // Roughly 1/4 of 100%. + else if (n_physical_cores == 2) + high_encode_usage_threshold_percent = 40; // Roughly 1/4 of 200%. + } #endif // defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) - // Note that we make the interval 2x+epsilon wide, since libyuv scaling steps // are close to that (when squared). This wide interval makes sure that // scaling up or down does not jump all the way across the interval. @@ -517,10 +521,12 @@ OveruseFrameDetector::CreateProcessingUsage(const CpuOveruseOptions& options) { } OveruseFrameDetector::OveruseFrameDetector( - CpuOveruseMetricsObserver* metrics_observer) - : metrics_observer_(metrics_observer), + CpuOveruseMetricsObserver* metrics_observer, + const FieldTrialsView& field_trials) + : options_(field_trials), + metrics_observer_(metrics_observer), num_process_times_(0), - // TODO(nisse): Use absl::optional + // TODO(bugs.webrtc.org/9078): Use absl::optional last_capture_time_us_(-1), num_pixels_(0), max_framerate_(kDefaultFrameRate), diff --git a/TMessagesProj/jni/voip/webrtc/video/adaptation/overuse_frame_detector.h b/TMessagesProj/jni/voip/webrtc/video/adaptation/overuse_frame_detector.h index 2b4dd61d21..4e1f6a83a4 100644 --- a/TMessagesProj/jni/voip/webrtc/video/adaptation/overuse_frame_detector.h +++ b/TMessagesProj/jni/voip/webrtc/video/adaptation/overuse_frame_detector.h @@ -15,22 +15,22 @@ #include #include "absl/types/optional.h" +#include "api/field_trials_view.h" #include "api/sequence_checker.h" #include "api/task_queue/task_queue_base.h" -#include "api/video/video_stream_encoder_observer.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/numerics/exp_filter.h" #include "rtc_base/system/no_unique_address.h" #include "rtc_base/task_utils/repeating_task.h" #include "rtc_base/thread_annotations.h" +#include "video/video_stream_encoder_observer.h" namespace webrtc { class VideoFrame; struct CpuOveruseOptions { - CpuOveruseOptions(); + explicit CpuOveruseOptions(const FieldTrialsView& field_trials); int low_encode_usage_threshold_percent; // Threshold for triggering underuse. int high_encode_usage_threshold_percent; // Threshold for triggering overuse. @@ -65,9 +65,13 @@ class OveruseFrameDetectorObserverInterface { // check for overuse. class OveruseFrameDetector { public: - explicit OveruseFrameDetector(CpuOveruseMetricsObserver* metrics_observer); + explicit OveruseFrameDetector(CpuOveruseMetricsObserver* metrics_observer, + const FieldTrialsView& field_trials); virtual ~OveruseFrameDetector(); + OveruseFrameDetector(const OveruseFrameDetector&) = delete; + OveruseFrameDetector& operator=(const OveruseFrameDetector&) = delete; + // Start to periodically check for overuse. void StartCheckForOveruse( TaskQueueBase* task_queue_base, @@ -161,8 +165,6 @@ class OveruseFrameDetector { // If set by field trial, overrides CpuOveruseOptions::filter_time_ms. FieldTrialOptional filter_time_constant_{"tau"}; - - RTC_DISALLOW_COPY_AND_ASSIGN(OveruseFrameDetector); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/adaptation/pixel_limit_resource.cc b/TMessagesProj/jni/voip/webrtc/video/adaptation/pixel_limit_resource.cc index 36c93cd4fa..872e169879 100644 --- a/TMessagesProj/jni/voip/webrtc/video/adaptation/pixel_limit_resource.cc +++ b/TMessagesProj/jni/voip/webrtc/video/adaptation/pixel_limit_resource.cc @@ -14,7 +14,6 @@ #include "api/units/time_delta.h" #include "call/adaptation/video_stream_adapter.h" #include "rtc_base/checks.h" -#include "rtc_base/ref_counted_object.h" namespace webrtc { @@ -84,11 +83,11 @@ void PixelLimitResource::SetResourceListener(ResourceListener* listener) { int target_pixels_lower_bounds = GetLowerResolutionThan(target_pixel_upper_bounds); if (current_pixels > target_pixel_upper_bounds) { - listener_->OnResourceUsageStateMeasured(this, - ResourceUsageState::kOveruse); + listener_->OnResourceUsageStateMeasured( + rtc::scoped_refptr(this), ResourceUsageState::kOveruse); } else if (current_pixels < target_pixels_lower_bounds) { - listener_->OnResourceUsageStateMeasured(this, - ResourceUsageState::kUnderuse); + listener_->OnResourceUsageStateMeasured( + rtc::scoped_refptr(this), ResourceUsageState::kUnderuse); } return kResourceUsageCheckIntervalMs; }); diff --git a/TMessagesProj/jni/voip/webrtc/video/adaptation/quality_scaler_resource.cc b/TMessagesProj/jni/voip/webrtc/video/adaptation/quality_scaler_resource.cc index c455252d45..68d56fe29e 100644 --- a/TMessagesProj/jni/voip/webrtc/video/adaptation/quality_scaler_resource.cc +++ b/TMessagesProj/jni/voip/webrtc/video/adaptation/quality_scaler_resource.cc @@ -14,8 +14,6 @@ #include "rtc_base/checks.h" #include "rtc_base/experiments/balanced_degradation_settings.h" -#include "rtc_base/ref_counted_object.h" -#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/time_utils.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/video/adaptation/quality_scaler_resource.h b/TMessagesProj/jni/voip/webrtc/video/adaptation/quality_scaler_resource.h index 06c22ca3c6..cbb6d3d06f 100644 --- a/TMessagesProj/jni/voip/webrtc/video/adaptation/quality_scaler_resource.h +++ b/TMessagesProj/jni/voip/webrtc/video/adaptation/quality_scaler_resource.h @@ -22,8 +22,6 @@ #include "call/adaptation/degradation_preference_provider.h" #include "call/adaptation/resource_adaptation_processor_interface.h" #include "modules/video_coding/utility/quality_scaler.h" -#include "rtc_base/ref_counted_object.h" -#include "rtc_base/task_queue.h" #include "video/adaptation/video_stream_encoder_resource.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource.cc b/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource.cc index d26da708b6..ad89aef52a 100644 --- a/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource.cc +++ b/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource.cc @@ -51,7 +51,8 @@ void VideoStreamEncoderResource::OnResourceUsageStateMeasured( ResourceUsageState usage_state) { MutexLock crit(&lock_); if (listener_) { - listener_->OnResourceUsageStateMeasured(this, usage_state); + listener_->OnResourceUsageStateMeasured(rtc::scoped_refptr(this), + usage_state); } } diff --git a/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource_manager.cc b/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource_manager.cc index 6a1e9215a6..2470bc8893 100644 --- a/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource_manager.cc +++ b/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource_manager.cc @@ -26,14 +26,13 @@ #include "api/video/video_adaptation_reason.h" #include "api/video/video_source_interface.h" #include "call/adaptation/video_source_restrictions.h" +#include "modules/video_coding/svc/scalability_mode_util.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" -#include "rtc_base/ref_counted_object.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" -#include "system_wrappers/include/field_trial.h" #include "video/adaptation/quality_scaler_resource.h" namespace webrtc { @@ -266,11 +265,14 @@ VideoStreamEncoderResourceManager::VideoStreamEncoderResourceManager( Clock* clock, bool experiment_cpu_load_estimator, std::unique_ptr overuse_detector, - DegradationPreferenceProvider* degradation_preference_provider) - : degradation_preference_provider_(degradation_preference_provider), + DegradationPreferenceProvider* degradation_preference_provider, + const FieldTrialsView& field_trials) + : field_trials_(field_trials), + degradation_preference_provider_(degradation_preference_provider), bitrate_constraint_(std::make_unique()), - balanced_constraint_(std::make_unique( - degradation_preference_provider_)), + balanced_constraint_( + std::make_unique(degradation_preference_provider_, + field_trials)), encode_usage_resource_( EncodeUsageResource::Create(std::move(overuse_detector))), quality_scaler_resource_(QualityScalerResource::Create()), @@ -283,11 +285,14 @@ VideoStreamEncoderResourceManager::VideoStreamEncoderResourceManager( encoder_stats_observer_(encoder_stats_observer), degradation_preference_(DegradationPreference::DISABLED), video_source_restrictions_(), + balanced_settings_(field_trials), clock_(clock), experiment_cpu_load_estimator_(experiment_cpu_load_estimator), initial_frame_dropper_( std::make_unique(quality_scaler_resource_)), quality_scaling_experiment_enabled_(QualityScalingExperiment::Enabled()), + pixel_limit_resource_experiment_enabled_( + field_trials.IsEnabled(kPixelLimitResourceFieldTrialName)), encoder_target_bitrate_bps_(absl::nullopt), quality_rampup_experiment_( QualityRampUpExperimentHelper::CreateIfEnabled(this, clock_)), @@ -303,14 +308,13 @@ VideoStreamEncoderResourceManager::~VideoStreamEncoderResourceManager() = default; void VideoStreamEncoderResourceManager::Initialize( - rtc::TaskQueue* encoder_queue) { + TaskQueueBase* encoder_queue) { RTC_DCHECK(!encoder_queue_); RTC_DCHECK(encoder_queue); encoder_queue_ = encoder_queue; - encode_usage_resource_->RegisterEncoderTaskQueue(encoder_queue_->Get()); - quality_scaler_resource_->RegisterEncoderTaskQueue(encoder_queue_->Get()); - bandwidth_quality_scaler_resource_->RegisterEncoderTaskQueue( - encoder_queue_->Get()); + encode_usage_resource_->RegisterEncoderTaskQueue(encoder_queue_); + quality_scaler_resource_->RegisterEncoderTaskQueue(encoder_queue_); + bandwidth_quality_scaler_resource_->RegisterEncoderTaskQueue(encoder_queue_); } void VideoStreamEncoderResourceManager::SetAdaptationProcessor( @@ -350,13 +354,13 @@ void VideoStreamEncoderResourceManager::MaybeInitializePixelLimitResource() { RTC_DCHECK_RUN_ON(encoder_queue_); RTC_DCHECK(adaptation_processor_); RTC_DCHECK(!pixel_limit_resource_); - if (!field_trial::IsEnabled(kPixelLimitResourceFieldTrialName)) { + if (!pixel_limit_resource_experiment_enabled_) { // The field trial is not running. return; } int max_pixels = 0; std::string pixel_limit_field_trial = - field_trial::FindFullName(kPixelLimitResourceFieldTrialName); + field_trials_.Lookup(kPixelLimitResourceFieldTrialName); if (sscanf(pixel_limit_field_trial.c_str(), "Enabled-%d", &max_pixels) != 1) { RTC_LOG(LS_ERROR) << "Couldn't parse " << kPixelLimitResourceFieldTrialName << " trial config: " << pixel_limit_field_trial; @@ -369,7 +373,7 @@ void VideoStreamEncoderResourceManager::MaybeInitializePixelLimitResource() { // resource is active for the lifetme of the stream (until // StopManagedResources() is called). pixel_limit_resource_ = - PixelLimitResource::Create(encoder_queue_->Get(), input_state_provider_); + PixelLimitResource::Create(encoder_queue_, input_state_provider_); pixel_limit_resource_->SetMaxPixels(max_pixels); AddResource(pixel_limit_resource_, VideoAdaptationReason::kCpu); } @@ -668,7 +672,7 @@ CpuOveruseOptions VideoStreamEncoderResourceManager::GetCpuOveruseOptions() // This is already ensured by the only caller of this method: // StartResourceAdaptation(). RTC_DCHECK(encoder_settings_.has_value()); - CpuOveruseOptions options; + CpuOveruseOptions options(field_trials_); // Hardware accelerated encoders are assumed to be pipelined; give them // additional overuse time. if (encoder_settings_->encoder_info().is_hardware_accelerated) { @@ -810,15 +814,29 @@ void VideoStreamEncoderResourceManager::OnQualityRampUp() { quality_rampup_experiment_.reset(); } -bool VideoStreamEncoderResourceManager::IsSimulcast( +bool VideoStreamEncoderResourceManager::IsSimulcastOrMultipleSpatialLayers( const VideoEncoderConfig& encoder_config) { const std::vector& simulcast_layers = encoder_config.simulcast_layers; - if (simulcast_layers.size() <= 1) { + if (simulcast_layers.empty()) { return false; } - if (simulcast_layers[0].active) { + absl::optional num_spatial_layers; + if (simulcast_layers[0].scalability_mode.has_value() && + encoder_config.number_of_streams == 1) { + num_spatial_layers = ScalabilityModeToNumSpatialLayers( + *simulcast_layers[0].scalability_mode); + } + + if (simulcast_layers.size() == 1) { + // Check if multiple spatial layers are used. + return num_spatial_layers && *num_spatial_layers > 1; + } + + bool svc_with_one_spatial_layer = + num_spatial_layers && *num_spatial_layers == 1; + if (simulcast_layers[0].active && !svc_with_one_spatial_layer) { // We can't distinguish between simulcast and singlecast when only the // lowest spatial layer is active. Treat this case as simulcast. return true; diff --git a/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource_manager.h b/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource_manager.h index f1bc8854b2..e0de3f7d19 100644 --- a/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource_manager.h +++ b/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource_manager.h @@ -21,6 +21,7 @@ #include "absl/types/optional.h" #include "api/adaptation/resource.h" +#include "api/field_trials_view.h" #include "api/rtp_parameters.h" #include "api/scoped_refptr.h" #include "api/task_queue/task_queue_base.h" @@ -28,10 +29,8 @@ #include "api/video/video_adaptation_reason.h" #include "api/video/video_frame.h" #include "api/video/video_source_interface.h" -#include "api/video/video_stream_encoder_observer.h" #include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_encoder.h" -#include "api/video_codecs/video_encoder_config.h" #include "call/adaptation/resource_adaptation_processor_interface.h" #include "call/adaptation/video_stream_adapter.h" #include "call/adaptation/video_stream_input_state_provider.h" @@ -39,7 +38,6 @@ #include "rtc_base/ref_count.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/synchronization/mutex.h" -#include "rtc_base/task_queue.h" #include "rtc_base/thread_annotations.h" #include "system_wrappers/include/clock.h" #include "video/adaptation/balanced_constraint.h" @@ -51,6 +49,8 @@ #include "video/adaptation/quality_rampup_experiment_helper.h" #include "video/adaptation/quality_scaler_resource.h" #include "video/adaptation/video_stream_encoder_resource.h" +#include "video/config/video_encoder_config.h" +#include "video/video_stream_encoder_observer.h" namespace webrtc { @@ -79,10 +79,11 @@ class VideoStreamEncoderResourceManager Clock* clock, bool experiment_cpu_load_estimator, std::unique_ptr overuse_detector, - DegradationPreferenceProvider* degradation_preference_provider); + DegradationPreferenceProvider* degradation_preference_provider, + const FieldTrialsView& field_trials); ~VideoStreamEncoderResourceManager() override; - void Initialize(rtc::TaskQueue* encoder_queue); + void Initialize(TaskQueueBase* encoder_queue); void SetAdaptationProcessor( ResourceAdaptationProcessorInterface* adaptation_processor, VideoStreamAdapter* stream_adapter); @@ -151,7 +152,8 @@ class VideoStreamEncoderResourceManager // QualityRampUpExperimentListener implementation. void OnQualityRampUp() override; - static bool IsSimulcast(const VideoEncoderConfig& encoder_config); + static bool IsSimulcastOrMultipleSpatialLayers( + const VideoEncoderConfig& encoder_config); private: class InitialFrameDropper; @@ -181,6 +183,7 @@ class VideoStreamEncoderResourceManager const std::map& active_counts); + const FieldTrialsView& field_trials_; DegradationPreferenceProvider* const degradation_preference_provider_; std::unique_ptr bitrate_constraint_ RTC_GUARDED_BY(encoder_queue_); @@ -192,7 +195,7 @@ class VideoStreamEncoderResourceManager const rtc::scoped_refptr bandwidth_quality_scaler_resource_; - rtc::TaskQueue* encoder_queue_; + TaskQueueBase* encoder_queue_; VideoStreamInputStateProvider* const input_state_provider_ RTC_GUARDED_BY(encoder_queue_); ResourceAdaptationProcessorInterface* adaptation_processor_; @@ -213,6 +216,8 @@ class VideoStreamEncoderResourceManager const std::unique_ptr initial_frame_dropper_ RTC_GUARDED_BY(encoder_queue_); const bool quality_scaling_experiment_enabled_ RTC_GUARDED_BY(encoder_queue_); + const bool pixel_limit_resource_experiment_enabled_ + RTC_GUARDED_BY(encoder_queue_); absl::optional encoder_target_bitrate_bps_ RTC_GUARDED_BY(encoder_queue_); absl::optional encoder_rates_ diff --git a/TMessagesProj/jni/voip/webrtc/video/alignment_adjuster.h b/TMessagesProj/jni/voip/webrtc/video/alignment_adjuster.h index ea2a9a0bef..36ac062e91 100644 --- a/TMessagesProj/jni/voip/webrtc/video/alignment_adjuster.h +++ b/TMessagesProj/jni/voip/webrtc/video/alignment_adjuster.h @@ -12,7 +12,7 @@ #define VIDEO_ALIGNMENT_ADJUSTER_H_ #include "api/video_codecs/video_encoder.h" -#include "api/video_codecs/video_encoder_config.h" +#include "video/config/video_encoder_config.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/video/buffered_frame_decryptor.cc b/TMessagesProj/jni/voip/webrtc/video/buffered_frame_decryptor.cc index 27a3c4cfc4..24cbaf8265 100644 --- a/TMessagesProj/jni/voip/webrtc/video/buffered_frame_decryptor.cc +++ b/TMessagesProj/jni/voip/webrtc/video/buffered_frame_decryptor.cc @@ -22,9 +22,10 @@ namespace webrtc { BufferedFrameDecryptor::BufferedFrameDecryptor( OnDecryptedFrameCallback* decrypted_frame_callback, - OnDecryptionStatusChangeCallback* decryption_status_change_callback) + OnDecryptionStatusChangeCallback* decryption_status_change_callback, + const FieldTrialsView& field_trials) : generic_descriptor_auth_experiment_( - !field_trial::IsDisabled("WebRTC-GenericDescriptorAuth")), + !field_trials.IsDisabled("WebRTC-GenericDescriptorAuth")), decrypted_frame_callback_(decrypted_frame_callback), decryption_status_change_callback_(decryption_status_change_callback) {} diff --git a/TMessagesProj/jni/voip/webrtc/video/buffered_frame_decryptor.h b/TMessagesProj/jni/voip/webrtc/video/buffered_frame_decryptor.h index f6dd8d8c2a..681f89a7f4 100644 --- a/TMessagesProj/jni/voip/webrtc/video/buffered_frame_decryptor.h +++ b/TMessagesProj/jni/voip/webrtc/video/buffered_frame_decryptor.h @@ -16,6 +16,7 @@ #include "api/crypto/crypto_options.h" #include "api/crypto/frame_decryptor_interface.h" +#include "api/field_trials_view.h" #include "modules/video_coding/frame_object.h" namespace webrtc { @@ -57,7 +58,9 @@ class BufferedFrameDecryptor final { // Constructs a new BufferedFrameDecryptor that can hold explicit BufferedFrameDecryptor( OnDecryptedFrameCallback* decrypted_frame_callback, - OnDecryptionStatusChangeCallback* decryption_status_change_callback); + OnDecryptionStatusChangeCallback* decryption_status_change_callback, + const FieldTrialsView& field_trials); + ~BufferedFrameDecryptor(); // This object cannot be copied. BufferedFrameDecryptor(const BufferedFrameDecryptor&) = delete; diff --git a/TMessagesProj/jni/voip/webrtc/video/call_stats.cc b/TMessagesProj/jni/voip/webrtc/video/call_stats.cc deleted file mode 100644 index 9fd6802c44..0000000000 --- a/TMessagesProj/jni/voip/webrtc/video/call_stats.cc +++ /dev/null @@ -1,228 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "video/call_stats.h" - -#include -#include - -#include "absl/algorithm/container.h" -#include "modules/utility/include/process_thread.h" -#include "rtc_base/checks.h" -#include "rtc_base/location.h" -#include "rtc_base/task_utils/to_queued_task.h" -#include "system_wrappers/include/metrics.h" - -namespace webrtc { -namespace { - -void RemoveOldReports(int64_t now, std::list* reports) { - static constexpr const int64_t kRttTimeoutMs = 1500; - reports->remove_if( - [&now](CallStats::RttTime& r) { return now - r.time > kRttTimeoutMs; }); -} - -int64_t GetMaxRttMs(const std::list& reports) { - int64_t max_rtt_ms = -1; - for (const CallStats::RttTime& rtt_time : reports) - max_rtt_ms = std::max(rtt_time.rtt, max_rtt_ms); - return max_rtt_ms; -} - -int64_t GetAvgRttMs(const std::list& reports) { - RTC_DCHECK(!reports.empty()); - int64_t sum = 0; - for (std::list::const_iterator it = reports.begin(); - it != reports.end(); ++it) { - sum += it->rtt; - } - return sum / reports.size(); -} - -int64_t GetNewAvgRttMs(const std::list& reports, - int64_t prev_avg_rtt) { - if (reports.empty()) - return -1; // Reset (invalid average). - - int64_t cur_rtt_ms = GetAvgRttMs(reports); - if (prev_avg_rtt == -1) - return cur_rtt_ms; // New initial average value. - - // Weight factor to apply to the average rtt. - // We weigh the old average at 70% against the new average (30%). - constexpr const float kWeightFactor = 0.3f; - return prev_avg_rtt * (1.0f - kWeightFactor) + cur_rtt_ms * kWeightFactor; -} - -// This class is used to de-register a Module from a ProcessThread to satisfy -// threading requirements of the Module (CallStats). -// The guarantee offered by TemporaryDeregistration is that while its in scope, -// no calls to `TimeUntilNextProcess` or `Process()` will occur and therefore -// synchronization with those methods, is not necessary. -class TemporaryDeregistration { - public: - TemporaryDeregistration(Module* module, - ProcessThread* process_thread, - bool thread_running) - : module_(module), - process_thread_(process_thread), - deregistered_(thread_running) { - if (thread_running) - process_thread_->DeRegisterModule(module_); - } - ~TemporaryDeregistration() { - if (deregistered_) - process_thread_->RegisterModule(module_, RTC_FROM_HERE); - } - - private: - Module* const module_; - ProcessThread* const process_thread_; - const bool deregistered_; -}; - -} // namespace - -CallStats::CallStats(Clock* clock, ProcessThread* process_thread) - : clock_(clock), - last_process_time_(clock_->TimeInMilliseconds()), - max_rtt_ms_(-1), - avg_rtt_ms_(-1), - sum_avg_rtt_ms_(0), - num_avg_rtt_(0), - time_of_first_rtt_ms_(-1), - process_thread_(process_thread), - process_thread_running_(false) { - RTC_DCHECK(process_thread_); - process_thread_checker_.Detach(); -} - -CallStats::~CallStats() { - RTC_DCHECK_RUN_ON(&construction_thread_checker_); - RTC_DCHECK(!process_thread_running_); - RTC_DCHECK(observers_.empty()); - - UpdateHistograms(); -} - -int64_t CallStats::TimeUntilNextProcess() { - RTC_DCHECK_RUN_ON(&process_thread_checker_); - return last_process_time_ + kUpdateIntervalMs - clock_->TimeInMilliseconds(); -} - -void CallStats::Process() { - RTC_DCHECK_RUN_ON(&process_thread_checker_); - int64_t now = clock_->TimeInMilliseconds(); - last_process_time_ = now; - - // `avg_rtt_ms_` is allowed to be read on the process thread since that's the - // only thread that modifies the value. - int64_t avg_rtt_ms = avg_rtt_ms_; - RemoveOldReports(now, &reports_); - max_rtt_ms_ = GetMaxRttMs(reports_); - avg_rtt_ms = GetNewAvgRttMs(reports_, avg_rtt_ms); - { - MutexLock lock(&avg_rtt_ms_lock_); - avg_rtt_ms_ = avg_rtt_ms; - } - - // If there is a valid rtt, update all observers with the max rtt. - if (max_rtt_ms_ >= 0) { - RTC_DCHECK_GE(avg_rtt_ms, 0); - for (CallStatsObserver* observer : observers_) - observer->OnRttUpdate(avg_rtt_ms, max_rtt_ms_); - // Sum for Histogram of average RTT reported over the entire call. - sum_avg_rtt_ms_ += avg_rtt_ms; - ++num_avg_rtt_; - } -} - -void CallStats::ProcessThreadAttached(ProcessThread* process_thread) { - RTC_DCHECK_RUN_ON(&construction_thread_checker_); - RTC_DCHECK(!process_thread || process_thread_ == process_thread); - process_thread_running_ = process_thread != nullptr; - - // Whether we just got attached or detached, we clear the - // `process_thread_checker_` so that it can be used to protect variables - // in either the process thread when it starts again, or UpdateHistograms() - // (mutually exclusive). - process_thread_checker_.Detach(); -} - -void CallStats::RegisterStatsObserver(CallStatsObserver* observer) { - RTC_DCHECK_RUN_ON(&construction_thread_checker_); - TemporaryDeregistration deregister(this, process_thread_, - process_thread_running_); - - if (!absl::c_linear_search(observers_, observer)) - observers_.push_back(observer); -} - -void CallStats::DeregisterStatsObserver(CallStatsObserver* observer) { - RTC_DCHECK_RUN_ON(&construction_thread_checker_); - TemporaryDeregistration deregister(this, process_thread_, - process_thread_running_); - observers_.remove(observer); -} - -int64_t CallStats::LastProcessedRtt() const { - // TODO(tommi): This currently gets called from the construction thread of - // Call as well as from the process thread. Look into restricting this to - // allow only reading this from the process thread (or TQ once we get there) - // so that the lock isn't necessary. - - MutexLock lock(&avg_rtt_ms_lock_); - return avg_rtt_ms_; -} - -void CallStats::OnRttUpdate(int64_t rtt) { - RTC_DCHECK_RUN_ON(&process_thread_checker_); - - int64_t now_ms = clock_->TimeInMilliseconds(); - reports_.push_back(RttTime(rtt, now_ms)); - if (time_of_first_rtt_ms_ == -1) - time_of_first_rtt_ms_ = now_ms; - - // Make sure Process() will be called and deliver the updates asynchronously. - last_process_time_ -= kUpdateIntervalMs; - process_thread_->WakeUp(this); -} - -void CallStats::UpdateHistograms() { - RTC_DCHECK_RUN_ON(&construction_thread_checker_); - RTC_DCHECK(!process_thread_running_); - - // The extra scope is because we have two 'dcheck run on' thread checkers. - // This is a special case since it's safe to access variables on the current - // thread that normally are only touched on the process thread. - // Since we're not attached to the process thread and/or the process thread - // isn't running, it's OK to touch these variables here. - { - // This method is called on the ctor thread (usually from the dtor, unless - // a test calls it). It's a requirement that the function be called when - // the process thread is not running (a condition that's met at destruction - // time), and thanks to that, we don't need a lock to synchronize against - // it. - RTC_DCHECK_RUN_ON(&process_thread_checker_); - - if (time_of_first_rtt_ms_ == -1 || num_avg_rtt_ < 1) - return; - - int64_t elapsed_sec = - (clock_->TimeInMilliseconds() - time_of_first_rtt_ms_) / 1000; - if (elapsed_sec >= metrics::kMinRunTimeInSeconds) { - int64_t avg_rtt_ms = (sum_avg_rtt_ms_ + num_avg_rtt_ / 2) / num_avg_rtt_; - RTC_HISTOGRAM_COUNTS_10000( - "WebRTC.Video.AverageRoundTripTimeInMilliseconds", avg_rtt_ms); - } - } -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/call_stats.h b/TMessagesProj/jni/voip/webrtc/video/call_stats.h deleted file mode 100644 index 0c8e26741b..0000000000 --- a/TMessagesProj/jni/voip/webrtc/video/call_stats.h +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef VIDEO_CALL_STATS_H_ -#define VIDEO_CALL_STATS_H_ - -#include -#include - -#include "api/sequence_checker.h" -#include "modules/include/module.h" -#include "modules/include/module_common_types.h" -#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "rtc_base/constructor_magic.h" -#include "rtc_base/synchronization/mutex.h" -#include "system_wrappers/include/clock.h" - -namespace webrtc { - -// CallStats keeps track of statistics for a call. -// TODO(webrtc:11489): Make call_stats_ not depend on ProcessThread and -// make callbacks on the worker thread (TQ). -class CallStats : public Module, public RtcpRttStats { - public: - // Time interval for updating the observers. - static constexpr int64_t kUpdateIntervalMs = 1000; - - CallStats(Clock* clock, ProcessThread* process_thread); - ~CallStats() override; - - // Registers/deregisters a new observer to receive statistics updates. - // Must be called from the construction thread. - void RegisterStatsObserver(CallStatsObserver* observer); - void DeregisterStatsObserver(CallStatsObserver* observer); - - // Expose `LastProcessedRtt()` from RtcpRttStats to the public interface, as - // it is the part of the API that is needed by direct users of CallStats. - // TODO(tommi): Threading or lifetime guarantees are not explicit in how - // CallStats is used as RtcpRttStats or how pointers are cached in a - // few different places (distributed via Call). It would be good to clarify - // from what thread/TQ calls to OnRttUpdate and LastProcessedRtt need to be - // allowed. - int64_t LastProcessedRtt() const override; - - // Exposed for tests to test histogram support. - void UpdateHistogramsForTest() { UpdateHistograms(); } - - // Helper struct keeping track of the time a rtt value is reported. - struct RttTime { - RttTime(int64_t new_rtt, int64_t rtt_time) : rtt(new_rtt), time(rtt_time) {} - const int64_t rtt; - const int64_t time; - }; - - private: - // RtcpRttStats implementation. - void OnRttUpdate(int64_t rtt) override; - - // Implements Module, to use the process thread. - int64_t TimeUntilNextProcess() override; - void Process() override; - - // TODO(tommi): Use this to know when we're attached to the process thread? - // Alternatively, inject that pointer via the ctor since the call_stats - // test code, isn't using a processthread atm. - void ProcessThreadAttached(ProcessThread* process_thread) override; - - // This method must only be called when the process thread is not - // running, and from the construction thread. - void UpdateHistograms(); - - Clock* const clock_; - - // The last time 'Process' resulted in statistic update. - int64_t last_process_time_ RTC_GUARDED_BY(process_thread_checker_); - // The last RTT in the statistics update (zero if there is no valid estimate). - int64_t max_rtt_ms_ RTC_GUARDED_BY(process_thread_checker_); - - // Accessed from random threads (seemingly). Consider atomic. - // `avg_rtt_ms_` is allowed to be read on the process thread without a lock. - // `avg_rtt_ms_lock_` must be held elsewhere for reading. - // `avg_rtt_ms_lock_` must be held on the process thread for writing. - int64_t avg_rtt_ms_; - - // Protects `avg_rtt_ms_`. - mutable Mutex avg_rtt_ms_lock_; - - // `sum_avg_rtt_ms_`, `num_avg_rtt_` and `time_of_first_rtt_ms_` are only used - // on the ProcessThread when running. When the Process Thread is not running, - // (and only then) they can be used in UpdateHistograms(), usually called from - // the dtor. - int64_t sum_avg_rtt_ms_ RTC_GUARDED_BY(process_thread_checker_); - int64_t num_avg_rtt_ RTC_GUARDED_BY(process_thread_checker_); - int64_t time_of_first_rtt_ms_ RTC_GUARDED_BY(process_thread_checker_); - - // All Rtt reports within valid time interval, oldest first. - std::list reports_ RTC_GUARDED_BY(process_thread_checker_); - - // Observers getting stats reports. - // When attached to ProcessThread, this is read-only. In order to allow - // modification, we detach from the process thread while the observer - // list is updated, to avoid races. This allows us to not require a lock - // for the observers_ list, which makes the most common case lock free. - std::list observers_; - - SequenceChecker construction_thread_checker_; - SequenceChecker process_thread_checker_; - ProcessThread* const process_thread_; - bool process_thread_running_ RTC_GUARDED_BY(construction_thread_checker_); - - RTC_DISALLOW_COPY_AND_ASSIGN(CallStats); -}; - -} // namespace webrtc - -#endif // VIDEO_CALL_STATS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/video/call_stats2.cc b/TMessagesProj/jni/voip/webrtc/video/call_stats2.cc index 2b7c61e0f8..ef575d2667 100644 --- a/TMessagesProj/jni/voip/webrtc/video/call_stats2.cc +++ b/TMessagesProj/jni/voip/webrtc/video/call_stats2.cc @@ -15,10 +15,7 @@ #include #include "absl/algorithm/container.h" -#include "modules/utility/include/process_thread.h" #include "rtc_base/checks.h" -#include "rtc_base/location.h" -#include "rtc_base/task_utils/to_queued_task.h" #include "system_wrappers/include/metrics.h" namespace webrtc { @@ -148,7 +145,7 @@ void CallStats::OnRttUpdate(int64_t rtt) { if (task_queue_->IsCurrent()) { update(); } else { - task_queue_->PostTask(ToQueuedTask(task_safety_, std::move(update))); + task_queue_->PostTask(SafeTask(task_safety_.flag(), std::move(update))); } } diff --git a/TMessagesProj/jni/voip/webrtc/video/call_stats2.h b/TMessagesProj/jni/voip/webrtc/video/call_stats2.h index 74bd3482da..7e941d1e75 100644 --- a/TMessagesProj/jni/voip/webrtc/video/call_stats2.h +++ b/TMessagesProj/jni/voip/webrtc/video/call_stats2.h @@ -14,12 +14,11 @@ #include #include +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/task_queue/task_queue_base.h" #include "api/units/timestamp.h" #include "modules/include/module_common_types.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "rtc_base/constructor_magic.h" -#include "rtc_base/task_queue.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/task_utils/repeating_task.h" #include "system_wrappers/include/clock.h" @@ -35,6 +34,9 @@ class CallStats { CallStats(Clock* clock, TaskQueueBase* task_queue); ~CallStats(); + CallStats(const CallStats&) = delete; + CallStats& operator=(const CallStats&) = delete; + // Ensure that necessary repeating tasks are started. void EnsureStarted(); @@ -125,8 +127,6 @@ class CallStats { // Used to signal destruction to potentially pending tasks. ScopedTaskSafety task_safety_; - - RTC_DISALLOW_COPY_AND_ASSIGN(CallStats); }; } // namespace internal diff --git a/TMessagesProj/jni/voip/webrtc/video/config/encoder_stream_factory.cc b/TMessagesProj/jni/voip/webrtc/video/config/encoder_stream_factory.cc new file mode 100644 index 0000000000..fceadf09b4 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/video/config/encoder_stream_factory.cc @@ -0,0 +1,465 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "video/config/encoder_stream_factory.h" + +#include +#include +#include +#include +#include + +#include "absl/algorithm/container.h" +#include "absl/strings/match.h" +#include "api/video/video_codec_constants.h" +#include "media/base/media_constants.h" +#include "media/base/video_adapter.h" +#include "modules/video_coding/codecs/vp9/svc_config.h" +#include "rtc_base/experiments/min_video_bitrate_experiment.h" +#include "rtc_base/experiments/normalize_simulcast_size_experiment.h" +#include "rtc_base/logging.h" +#include "video/config/simulcast.h" + +namespace cricket { +namespace { + +const int kMinLayerSize = 16; + +int ScaleDownResolution(int resolution, + double scale_down_by, + int min_resolution) { + // Resolution is never scalied down to smaller than min_resolution. + // If the input resolution is already smaller than min_resolution, + // no scaling should be done at all. + if (resolution <= min_resolution) + return resolution; + return std::max(static_cast(resolution / scale_down_by + 0.5), + min_resolution); +} + +bool PowerOfTwo(int value) { + return (value > 0) && ((value & (value - 1)) == 0); +} + +bool IsScaleFactorsPowerOfTwo(const webrtc::VideoEncoderConfig& config) { + for (const auto& layer : config.simulcast_layers) { + double scale = std::max(layer.scale_resolution_down_by, 1.0); + if (std::round(scale) != scale || !PowerOfTwo(scale)) { + return false; + } + } + return true; +} + +bool IsTemporalLayersSupported(const std::string& codec_name) { + return absl::EqualsIgnoreCase(codec_name, kVp8CodecName) || + absl::EqualsIgnoreCase(codec_name, kVp9CodecName) || + absl::EqualsIgnoreCase(codec_name, kAv1CodecName); +} + +size_t FindRequiredActiveLayers( + const webrtc::VideoEncoderConfig& encoder_config) { + // Need enough layers so that at least the first active one is present. + for (size_t i = 0; i < encoder_config.number_of_streams; ++i) { + if (encoder_config.simulcast_layers[i].active) { + return i + 1; + } + } + return 0; +} + +// The selected thresholds for QVGA and VGA corresponded to a QP around 10. +// The change in QP declined above the selected bitrates. +static int GetMaxDefaultVideoBitrateKbps(int width, + int height, + bool is_screenshare) { + int max_bitrate; + if (width * height <= 320 * 240) { + max_bitrate = 600; + } else if (width * height <= 640 * 480) { + max_bitrate = 1700; + } else if (width * height <= 960 * 540) { + max_bitrate = 2000; + } else { + max_bitrate = 2500; + } + if (is_screenshare) + max_bitrate = std::max(max_bitrate, 1200); + return max_bitrate; +} + +} // namespace + +// TODO(bugs.webrtc.org/8785): Consider removing max_qp as member of +// EncoderStreamFactory and instead set this value individually for each stream +// in the VideoEncoderConfig.simulcast_layers. +EncoderStreamFactory::EncoderStreamFactory(std::string codec_name, + int max_qp, + bool is_screenshare, + bool conference_mode) + : codec_name_(codec_name), + max_qp_(max_qp), + is_screenshare_(is_screenshare), + conference_mode_(conference_mode), + trials_(fallback_trials_), + encoder_info_requested_resolution_alignment_(1) {} + +EncoderStreamFactory::EncoderStreamFactory( + std::string codec_name, + int max_qp, + bool is_screenshare, + bool conference_mode, + const webrtc::VideoEncoder::EncoderInfo& encoder_info, + absl::optional restrictions, + const webrtc::FieldTrialsView* trials) + : codec_name_(codec_name), + max_qp_(max_qp), + is_screenshare_(is_screenshare), + conference_mode_(conference_mode), + trials_(trials ? *trials : fallback_trials_), + encoder_info_requested_resolution_alignment_( + encoder_info.requested_resolution_alignment), + restrictions_(restrictions) {} + +std::vector EncoderStreamFactory::CreateEncoderStreams( + int frame_width, + int frame_height, + const webrtc::VideoEncoderConfig& encoder_config) { + RTC_DCHECK_GT(encoder_config.number_of_streams, 0); + RTC_DCHECK_GE(encoder_config.simulcast_layers.size(), + encoder_config.number_of_streams); + + const absl::optional experimental_min_bitrate = + GetExperimentalMinVideoBitrate(encoder_config.codec_type); + + if (encoder_config.number_of_streams > 1 || + ((absl::EqualsIgnoreCase(codec_name_, kVp8CodecName) || + absl::EqualsIgnoreCase(codec_name_, kH264CodecName)) && + is_screenshare_ && conference_mode_)) { + return CreateSimulcastOrConferenceModeScreenshareStreams( + frame_width, frame_height, encoder_config, experimental_min_bitrate); + } + + return CreateDefaultVideoStreams(frame_width, frame_height, encoder_config, + experimental_min_bitrate); +} + +std::vector +EncoderStreamFactory::CreateDefaultVideoStreams( + int width, + int height, + const webrtc::VideoEncoderConfig& encoder_config, + const absl::optional& experimental_min_bitrate) const { + std::vector layers; + + // For unset max bitrates set default bitrate for non-simulcast. + int max_bitrate_bps = + (encoder_config.max_bitrate_bps > 0) + ? encoder_config.max_bitrate_bps + : GetMaxDefaultVideoBitrateKbps(width, height, is_screenshare_) * + 1000; + + int min_bitrate_bps = + experimental_min_bitrate + ? rtc::saturated_cast(experimental_min_bitrate->bps()) + : webrtc::kDefaultMinVideoBitrateBps; + if (encoder_config.simulcast_layers[0].min_bitrate_bps > 0) { + // Use set min bitrate. + min_bitrate_bps = encoder_config.simulcast_layers[0].min_bitrate_bps; + // If only min bitrate is configured, make sure max is above min. + if (encoder_config.max_bitrate_bps <= 0) + max_bitrate_bps = std::max(min_bitrate_bps, max_bitrate_bps); + } + int max_framerate = (encoder_config.simulcast_layers[0].max_framerate > 0) + ? encoder_config.simulcast_layers[0].max_framerate + : kDefaultVideoMaxFramerate; + + webrtc::VideoStream layer; + layer.width = width; + layer.height = height; + layer.max_framerate = max_framerate; + layer.requested_resolution = + encoder_config.simulcast_layers[0].requested_resolution; + // Note: VP9 seems to have be sending if any layer is active, + // (see `UpdateSendState`) and still use parameters only from + // encoder_config.simulcast_layers[0]. + layer.active = absl::c_any_of(encoder_config.simulcast_layers, + [](const auto& layer) { return layer.active; }); + + if (encoder_config.simulcast_layers[0].requested_resolution) { + auto res = GetLayerResolutionFromRequestedResolution( + width, height, + *encoder_config.simulcast_layers[0].requested_resolution); + layer.width = res.width; + layer.height = res.height; + } else if (encoder_config.simulcast_layers[0].scale_resolution_down_by > 1.) { + layer.width = ScaleDownResolution( + layer.width, + encoder_config.simulcast_layers[0].scale_resolution_down_by, + kMinLayerSize); + layer.height = ScaleDownResolution( + layer.height, + encoder_config.simulcast_layers[0].scale_resolution_down_by, + kMinLayerSize); + } + + if (absl::EqualsIgnoreCase(codec_name_, kVp9CodecName)) { + RTC_DCHECK(encoder_config.encoder_specific_settings); + // Use VP9 SVC layering from codec settings which might be initialized + // though field trial in ConfigureVideoEncoderSettings. + webrtc::VideoCodecVP9 vp9_settings; + encoder_config.encoder_specific_settings->FillVideoCodecVp9(&vp9_settings); + layer.num_temporal_layers = vp9_settings.numberOfTemporalLayers; + + // Number of spatial layers is signalled differently from different call + // sites (sigh), pick the max as we are interested in the upper bound. + int num_spatial_layers = + std::max({encoder_config.simulcast_layers.size(), + encoder_config.spatial_layers.size(), + size_t{vp9_settings.numberOfSpatialLayers}}); + + if (width * height > 0 && + (layer.num_temporal_layers > 1u || num_spatial_layers > 1)) { + // In SVC mode, the VP9 max bitrate is determined by SvcConfig, instead of + // GetMaxDefaultVideoBitrateKbps(). + std::vector svc_layers = + webrtc::GetSvcConfig(width, height, max_framerate, + /*first_active_layer=*/0, num_spatial_layers, + *layer.num_temporal_layers, is_screenshare_); + int sum_max_bitrates_kbps = 0; + for (const webrtc::SpatialLayer& spatial_layer : svc_layers) { + sum_max_bitrates_kbps += spatial_layer.maxBitrate; + } + RTC_DCHECK_GE(sum_max_bitrates_kbps, 0); + if (encoder_config.max_bitrate_bps <= 0) { + max_bitrate_bps = sum_max_bitrates_kbps * 1000; + } else { + max_bitrate_bps = + std::min(max_bitrate_bps, sum_max_bitrates_kbps * 1000); + } + max_bitrate_bps = std::max(min_bitrate_bps, max_bitrate_bps); + } + } + + // In the case that the application sets a max bitrate that's lower than the + // min bitrate, we adjust it down (see bugs.webrtc.org/9141). + layer.min_bitrate_bps = std::min(min_bitrate_bps, max_bitrate_bps); + if (encoder_config.simulcast_layers[0].target_bitrate_bps <= 0) { + layer.target_bitrate_bps = max_bitrate_bps; + } else { + layer.target_bitrate_bps = std::min( + encoder_config.simulcast_layers[0].target_bitrate_bps, max_bitrate_bps); + } + layer.max_bitrate_bps = max_bitrate_bps; + layer.max_qp = max_qp_; + layer.bitrate_priority = encoder_config.bitrate_priority; + + if (IsTemporalLayersSupported(codec_name_)) { + // Use configured number of temporal layers if set. + if (encoder_config.simulcast_layers[0].num_temporal_layers) { + layer.num_temporal_layers = + *encoder_config.simulcast_layers[0].num_temporal_layers; + } + } + layer.scalability_mode = encoder_config.simulcast_layers[0].scalability_mode; + layers.push_back(layer); + return layers; +} + +std::vector +EncoderStreamFactory::CreateSimulcastOrConferenceModeScreenshareStreams( + int width, + int height, + const webrtc::VideoEncoderConfig& encoder_config, + const absl::optional& experimental_min_bitrate) const { + std::vector layers; + + const bool temporal_layers_supported = + absl::EqualsIgnoreCase(codec_name_, kVp8CodecName) || + absl::EqualsIgnoreCase(codec_name_, kH264CodecName); + // Use legacy simulcast screenshare if conference mode is explicitly enabled + // or use the regular simulcast configuration path which is generic. + layers = GetSimulcastConfig(FindRequiredActiveLayers(encoder_config), + encoder_config.number_of_streams, width, height, + encoder_config.bitrate_priority, max_qp_, + is_screenshare_ && conference_mode_, + temporal_layers_supported, trials_); + // Allow an experiment to override the minimum bitrate for the lowest + // spatial layer. The experiment's configuration has the lowest priority. + if (experimental_min_bitrate) { + layers[0].min_bitrate_bps = + rtc::saturated_cast(experimental_min_bitrate->bps()); + } + // Update the active simulcast layers and configured bitrates. + bool is_highest_layer_max_bitrate_configured = false; + const bool has_scale_resolution_down_by = absl::c_any_of( + encoder_config.simulcast_layers, [](const webrtc::VideoStream& layer) { + return layer.scale_resolution_down_by != -1.; + }); + + bool default_scale_factors_used = true; + if (has_scale_resolution_down_by) { + default_scale_factors_used = IsScaleFactorsPowerOfTwo(encoder_config); + } + const bool norm_size_configured = + webrtc::NormalizeSimulcastSizeExperiment::GetBase2Exponent().has_value(); + const int normalized_width = + (default_scale_factors_used || norm_size_configured) && + (width >= kMinLayerSize) + ? NormalizeSimulcastSize(width, encoder_config.number_of_streams) + : width; + const int normalized_height = + (default_scale_factors_used || norm_size_configured) && + (height >= kMinLayerSize) + ? NormalizeSimulcastSize(height, encoder_config.number_of_streams) + : height; + for (size_t i = 0; i < layers.size(); ++i) { + layers[i].active = encoder_config.simulcast_layers[i].active; + layers[i].scalability_mode = + encoder_config.simulcast_layers[i].scalability_mode; + layers[i].requested_resolution = + encoder_config.simulcast_layers[i].requested_resolution; + // Update with configured num temporal layers if supported by codec. + if (encoder_config.simulcast_layers[i].num_temporal_layers && + IsTemporalLayersSupported(codec_name_)) { + layers[i].num_temporal_layers = + *encoder_config.simulcast_layers[i].num_temporal_layers; + } + if (encoder_config.simulcast_layers[i].max_framerate > 0) { + layers[i].max_framerate = + encoder_config.simulcast_layers[i].max_framerate; + } + if (encoder_config.simulcast_layers[i].requested_resolution.has_value()) { + auto res = GetLayerResolutionFromRequestedResolution( + normalized_width, normalized_height, + *encoder_config.simulcast_layers[i].requested_resolution); + layers[i].width = res.width; + layers[i].height = res.height; + } else if (has_scale_resolution_down_by) { + const double scale_resolution_down_by = std::max( + encoder_config.simulcast_layers[i].scale_resolution_down_by, 1.0); + layers[i].width = ScaleDownResolution( + normalized_width, scale_resolution_down_by, kMinLayerSize); + layers[i].height = ScaleDownResolution( + normalized_height, scale_resolution_down_by, kMinLayerSize); + } + // Update simulcast bitrates with configured min and max bitrate. + if (encoder_config.simulcast_layers[i].min_bitrate_bps > 0) { + layers[i].min_bitrate_bps = + encoder_config.simulcast_layers[i].min_bitrate_bps; + } + if (encoder_config.simulcast_layers[i].max_bitrate_bps > 0) { + layers[i].max_bitrate_bps = + encoder_config.simulcast_layers[i].max_bitrate_bps; + } + if (encoder_config.simulcast_layers[i].target_bitrate_bps > 0) { + layers[i].target_bitrate_bps = + encoder_config.simulcast_layers[i].target_bitrate_bps; + } + if (encoder_config.simulcast_layers[i].min_bitrate_bps > 0 && + encoder_config.simulcast_layers[i].max_bitrate_bps > 0) { + // Min and max bitrate are configured. + // Set target to 3/4 of the max bitrate (or to max if below min). + if (encoder_config.simulcast_layers[i].target_bitrate_bps <= 0) + layers[i].target_bitrate_bps = layers[i].max_bitrate_bps * 3 / 4; + if (layers[i].target_bitrate_bps < layers[i].min_bitrate_bps) + layers[i].target_bitrate_bps = layers[i].max_bitrate_bps; + } else if (encoder_config.simulcast_layers[i].min_bitrate_bps > 0) { + // Only min bitrate is configured, make sure target/max are above min. + layers[i].target_bitrate_bps = + std::max(layers[i].target_bitrate_bps, layers[i].min_bitrate_bps); + layers[i].max_bitrate_bps = + std::max(layers[i].max_bitrate_bps, layers[i].min_bitrate_bps); + } else if (encoder_config.simulcast_layers[i].max_bitrate_bps > 0) { + // Only max bitrate is configured, make sure min/target are below max. + // Keep target bitrate if it is set explicitly in encoding config. + // Otherwise set target bitrate to 3/4 of the max bitrate + // or the one calculated from GetSimulcastConfig() which is larger. + layers[i].min_bitrate_bps = + std::min(layers[i].min_bitrate_bps, layers[i].max_bitrate_bps); + if (encoder_config.simulcast_layers[i].target_bitrate_bps <= 0) { + layers[i].target_bitrate_bps = std::max( + layers[i].target_bitrate_bps, layers[i].max_bitrate_bps * 3 / 4); + } + layers[i].target_bitrate_bps = std::max( + std::min(layers[i].target_bitrate_bps, layers[i].max_bitrate_bps), + layers[i].min_bitrate_bps); + } + if (i == layers.size() - 1) { + is_highest_layer_max_bitrate_configured = + encoder_config.simulcast_layers[i].max_bitrate_bps > 0; + } + } + if (!is_screenshare_ && !is_highest_layer_max_bitrate_configured && + encoder_config.max_bitrate_bps > 0) { + // No application-configured maximum for the largest layer. + // If there is bitrate leftover, give it to the largest layer. + BoostMaxSimulcastLayer( + webrtc::DataRate::BitsPerSec(encoder_config.max_bitrate_bps), &layers); + } + + // Sort the layers by max_bitrate_bps, they might not always be from + // smallest to biggest + std::vector index(layers.size()); + std::iota(index.begin(), index.end(), 0); + std::stable_sort(index.begin(), index.end(), [&layers](size_t a, size_t b) { + return layers[a].max_bitrate_bps < layers[b].max_bitrate_bps; + }); + + if (!layers[index[0]].active) { + // Adjust min bitrate of the first active layer to allow it to go as low as + // the lowest (now inactive) layer could. + // Otherwise, if e.g. a single HD stream is active, it would have 600kbps + // min bitrate, which would always be allocated to the stream. + // This would lead to congested network, dropped frames and overall bad + // experience. + + const int min_configured_bitrate = layers[index[0]].min_bitrate_bps; + for (size_t i = 0; i < layers.size(); ++i) { + if (layers[index[i]].active) { + layers[index[i]].min_bitrate_bps = min_configured_bitrate; + break; + } + } + } + + return layers; +} + +webrtc::Resolution +EncoderStreamFactory::GetLayerResolutionFromRequestedResolution( + int frame_width, + int frame_height, + webrtc::Resolution requested_resolution) const { + VideoAdapter adapter(encoder_info_requested_resolution_alignment_); + adapter.OnOutputFormatRequest(requested_resolution.ToPair(), + requested_resolution.PixelCount(), + absl::nullopt); + if (restrictions_) { + rtc::VideoSinkWants wants; + wants.is_active = true; + wants.target_pixel_count = restrictions_->target_pixels_per_frame(); + wants.max_pixel_count = + rtc::dchecked_cast(restrictions_->max_pixels_per_frame().value_or( + std::numeric_limits::max())); + wants.aggregates.emplace(rtc::VideoSinkWants::Aggregates()); + wants.resolution_alignment = encoder_info_requested_resolution_alignment_; + adapter.OnSinkWants(wants); + } + int cropped_width, cropped_height; + int out_width = 0, out_height = 0; + if (!adapter.AdaptFrameResolution(frame_width, frame_height, 0, + &cropped_width, &cropped_height, &out_width, + &out_height)) { + RTC_LOG(LS_ERROR) << "AdaptFrameResolution returned false!"; + } + return {.width = out_width, .height = out_height}; +} + +} // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/video/config/encoder_stream_factory.h b/TMessagesProj/jni/voip/webrtc/video/config/encoder_stream_factory.h new file mode 100644 index 0000000000..37abb93876 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/video/config/encoder_stream_factory.h @@ -0,0 +1,80 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef VIDEO_CONFIG_ENCODER_STREAM_FACTORY_H_ +#define VIDEO_CONFIG_ENCODER_STREAM_FACTORY_H_ + +#include +#include + +#include "api/transport/field_trial_based_config.h" +#include "api/units/data_rate.h" +#include "api/video_codecs/video_encoder.h" +#include "call/adaptation/video_source_restrictions.h" +#include "video/config/video_encoder_config.h" + +namespace cricket { + +class EncoderStreamFactory + : public webrtc::VideoEncoderConfig::VideoStreamFactoryInterface { + public: + // Note: this constructor is used by testcase in downstream. + EncoderStreamFactory(std::string codec_name, + int max_qp, + bool is_screenshare, + bool conference_mode); + + EncoderStreamFactory(std::string codec_name, + int max_qp, + bool is_screenshare, + bool conference_mode, + const webrtc::VideoEncoder::EncoderInfo& encoder_info, + absl::optional + restrictions = absl::nullopt, + const webrtc::FieldTrialsView* trials = nullptr); + + std::vector CreateEncoderStreams( + int width, + int height, + const webrtc::VideoEncoderConfig& encoder_config) override; + + private: + std::vector CreateDefaultVideoStreams( + int width, + int height, + const webrtc::VideoEncoderConfig& encoder_config, + const absl::optional& experimental_min_bitrate) const; + + std::vector + CreateSimulcastOrConferenceModeScreenshareStreams( + int width, + int height, + const webrtc::VideoEncoderConfig& encoder_config, + const absl::optional& experimental_min_bitrate) const; + + webrtc::Resolution GetLayerResolutionFromRequestedResolution( + int in_frame_width, + int in_frame_height, + webrtc::Resolution requested_resolution) const; + + const std::string codec_name_; + const int max_qp_; + const bool is_screenshare_; + // Allows a screenshare specific configuration, which enables temporal + // layering and various settings. + const bool conference_mode_; + const webrtc::FieldTrialBasedConfig fallback_trials_; + const webrtc::FieldTrialsView& trials_; + const int encoder_info_requested_resolution_alignment_; + const absl::optional restrictions_; +}; + +} // namespace cricket + +#endif // VIDEO_CONFIG_ENCODER_STREAM_FACTORY_H_ diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/simulcast.cc b/TMessagesProj/jni/voip/webrtc/video/config/simulcast.cc similarity index 88% rename from TMessagesProj/jni/voip/webrtc/media/engine/simulcast.cc rename to TMessagesProj/jni/voip/webrtc/video/config/simulcast.cc index 3909685995..2bd4ac04c3 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/simulcast.cc +++ b/TMessagesProj/jni/voip/webrtc/video/config/simulcast.cc @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "media/engine/simulcast.h" +#include "video/config/simulcast.h" #include #include @@ -33,24 +33,11 @@ namespace cricket { namespace { -constexpr webrtc::DataRate Interpolate(const webrtc::DataRate& a, - const webrtc::DataRate& b, - float rate) { - return a * (1.0 - rate) + b * rate; -} - constexpr char kUseLegacySimulcastLayerLimitFieldTrial[] = "WebRTC-LegacySimulcastLayerLimit"; constexpr double kDefaultMaxRoundupRate = 0.1; -// TODO(webrtc:12415): Flip this to a kill switch when this feature launches. -bool EnableLowresBitrateInterpolation( - const webrtc::WebRtcKeyValueConfig& trials) { - return absl::StartsWith( - trials.Lookup("WebRTC-LowresSimulcastBitrateInterpolation"), "Enabled"); -} - // Limits for legacy conference screensharing mode. Currently used for the // lower of the two simulcast streams. constexpr webrtc::DataRate kScreenshareDefaultTl0Bitrate = @@ -65,7 +52,9 @@ constexpr webrtc::DataRate kScreenshareHighStreamMinBitrate = constexpr webrtc::DataRate kScreenshareHighStreamMaxBitrate = webrtc::DataRate::KilobitsPerSec(1250); -} // namespace +constexpr int kDefaultNumTemporalLayers = 3; +constexpr int kScreenshareMaxSimulcastLayers = 2; +constexpr int kScreenshareTemporalLayers = 2; struct SimulcastFormat { int width; @@ -113,6 +102,18 @@ constexpr const SimulcastFormat kSimulcastFormats[] = { webrtc::DataRate::KilobitsPerSec(0), webrtc::DataRate::KilobitsPerSec(30)}}; +constexpr webrtc::DataRate Interpolate(const webrtc::DataRate& a, + const webrtc::DataRate& b, + float rate) { + return a * (1.0 - rate) + b * rate; +} + +// TODO(webrtc:12415): Flip this to a kill switch when this feature launches. +bool EnableLowresBitrateInterpolation(const webrtc::FieldTrialsView& trials) { + return absl::StartsWith( + trials.Lookup("WebRTC-LowresSimulcastBitrateInterpolation"), "Enabled"); +} + std::vector GetSimulcastFormats( bool enable_lowres_bitrate_interpolation) { std::vector formats; @@ -129,28 +130,14 @@ std::vector GetSimulcastFormats( return formats; } -const int kMaxScreenshareSimulcastLayers = 2; - // Multiway: Number of temporal layers for each simulcast stream. -int DefaultNumberOfTemporalLayers(int simulcast_id, - bool screenshare, - const webrtc::WebRtcKeyValueConfig& trials) { - RTC_CHECK_GE(simulcast_id, 0); - RTC_CHECK_LT(simulcast_id, webrtc::kMaxSimulcastStreams); - - const int kDefaultNumTemporalLayers = 3; - const int kDefaultNumScreenshareTemporalLayers = 2; - int default_num_temporal_layers = screenshare - ? kDefaultNumScreenshareTemporalLayers - : kDefaultNumTemporalLayers; - +int DefaultNumberOfTemporalLayers(const webrtc::FieldTrialsView& trials) { const std::string group_name = - screenshare ? trials.Lookup("WebRTC-VP8ScreenshareTemporalLayers") - : trials.Lookup("WebRTC-VP8ConferenceTemporalLayers"); + trials.Lookup("WebRTC-VP8ConferenceTemporalLayers"); if (group_name.empty()) - return default_num_temporal_layers; + return kDefaultNumTemporalLayers; - int num_temporal_layers = default_num_temporal_layers; + int num_temporal_layers = kDefaultNumTemporalLayers; if (sscanf(group_name.c_str(), "%d", &num_temporal_layers) == 1 && num_temporal_layers > 0 && num_temporal_layers <= webrtc::kMaxTemporalStreams) { @@ -161,7 +148,7 @@ int DefaultNumberOfTemporalLayers(int simulcast_id, "incorrect value: " << group_name; - return default_num_temporal_layers; + return kDefaultNumTemporalLayers; } int FindSimulcastFormatIndex(int width, @@ -179,6 +166,8 @@ int FindSimulcastFormatIndex(int width, return -1; } +} // namespace + // Round size to nearest simulcast-friendly size. // Simulcast stream width and height must both be dividable by // |2 ^ (simulcast_layers - 1)|. @@ -291,7 +280,7 @@ size_t LimitSimulcastLayerCount(int width, int height, size_t need_layers, size_t layer_count, - const webrtc::WebRtcKeyValueConfig& trials) { + const webrtc::FieldTrialsView& trials) { if (!absl::StartsWith(trials.Lookup(kUseLegacySimulcastLayerLimitFieldTrial), "Disabled")) { // Max layers from one higher resolution in kSimulcastFormats will be used @@ -327,7 +316,7 @@ std::vector GetSimulcastConfig( int max_qp, bool is_screenshare_with_conference_mode, bool temporal_layers_supported, - const webrtc::WebRtcKeyValueConfig& trials) { + const webrtc::FieldTrialsView& trials) { RTC_DCHECK_LE(min_layers, max_layers); RTC_DCHECK(max_layers > 1 || is_screenshare_with_conference_mode); @@ -359,7 +348,7 @@ std::vector GetNormalSimulcastLayers( int max_qp, bool temporal_layers_supported, bool base_heavy_tl3_rate_alloc, - const webrtc::WebRtcKeyValueConfig& trials) { + const webrtc::FieldTrialsView& trials) { std::vector layers(layer_count); const bool enable_lowres_bitrate_interpolation = @@ -377,9 +366,7 @@ std::vector GetNormalSimulcastLayers( // TODO(pbos): Fill actual temporal-layer bitrate thresholds. layers[s].max_qp = max_qp; layers[s].num_temporal_layers = - temporal_layers_supported - ? DefaultNumberOfTemporalLayers(s, false, trials) - : 1; + temporal_layers_supported ? DefaultNumberOfTemporalLayers(trials) : 1; layers[s].max_bitrate_bps = FindSimulcastMaxBitrate(width, height, enable_lowres_bitrate_interpolation) @@ -388,7 +375,7 @@ std::vector GetNormalSimulcastLayers( FindSimulcastTargetBitrate(width, height, enable_lowres_bitrate_interpolation) .bps(); - int num_temporal_layers = DefaultNumberOfTemporalLayers(s, false, trials); + int num_temporal_layers = DefaultNumberOfTemporalLayers(trials); if (s == 0) { // If alternative temporal rate allocation is selected, adjust the // bitrate of the lowest simulcast stream so that absolute bitrate for @@ -451,10 +438,9 @@ std::vector GetScreenshareLayers( int max_qp, bool temporal_layers_supported, bool base_heavy_tl3_rate_alloc, - const webrtc::WebRtcKeyValueConfig& trials) { - auto max_screenshare_layers = kMaxScreenshareSimulcastLayers; + const webrtc::FieldTrialsView& trials) { size_t num_simulcast_layers = - std::min(max_layers, max_screenshare_layers); + std::min(max_layers, kScreenshareMaxSimulcastLayers); std::vector layers(num_simulcast_layers); // For legacy screenshare in conference mode, tl0 and tl1 bitrates are @@ -472,10 +458,8 @@ std::vector GetScreenshareLayers( // With simulcast enabled, add another spatial layer. This one will have a // more normal layout, with the regular 3 temporal layer pattern and no fps // restrictions. The base simulcast layer will still use legacy setup. - if (num_simulcast_layers == kMaxScreenshareSimulcastLayers) { + if (num_simulcast_layers == kScreenshareMaxSimulcastLayers) { // Add optional upper simulcast layer. - const int num_temporal_layers = - DefaultNumberOfTemporalLayers(1, true, trials); int max_bitrate_bps; bool using_boosted_bitrate = false; if (!temporal_layers_supported) { @@ -484,20 +468,11 @@ std::vector GetScreenshareLayers( max_bitrate_bps = static_cast( kScreenshareHighStreamMaxBitrate.bps() * webrtc::SimulcastRateAllocator::GetTemporalRateAllocation( - num_temporal_layers, 0, base_heavy_tl3_rate_alloc)); - } else if (DefaultNumberOfTemporalLayers(1, true, trials) != 3 || - base_heavy_tl3_rate_alloc) { + kScreenshareTemporalLayers, 0, base_heavy_tl3_rate_alloc)); + } else { // Experimental temporal layer mode used, use increased max bitrate. max_bitrate_bps = kScreenshareHighStreamMaxBitrate.bps(); using_boosted_bitrate = true; - } else { - // Keep current bitrates with default 3tl/8 frame settings. - // Lowest temporal layers of a 3 layer setup will have 40% of the total - // bitrate allocation for that simulcast layer. Make sure the gap between - // the target of the lower simulcast layer and first temporal layer of the - // higher one is at most 2x the bitrate, so that upswitching is not - // hampered by stalled bitrate estimates. - max_bitrate_bps = 2 * ((layers[0].target_bitrate_bps * 10) / 4); } layers[1].width = width; @@ -505,9 +480,7 @@ std::vector GetScreenshareLayers( layers[1].max_qp = max_qp; layers[1].max_framerate = kDefaultVideoMaxFramerate; layers[1].num_temporal_layers = - temporal_layers_supported - ? DefaultNumberOfTemporalLayers(1, true, trials) - : 1; + temporal_layers_supported ? kScreenshareTemporalLayers : 1; layers[1].min_bitrate_bps = using_boosted_bitrate ? kScreenshareHighStreamMinBitrate.bps() : layers[0].target_bitrate_bps * 2; diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/simulcast.h b/TMessagesProj/jni/voip/webrtc/video/config/simulcast.h similarity index 84% rename from TMessagesProj/jni/voip/webrtc/media/engine/simulcast.h rename to TMessagesProj/jni/voip/webrtc/video/config/simulcast.h index aa8c394816..32af168bcd 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/simulcast.h +++ b/TMessagesProj/jni/voip/webrtc/video/config/simulcast.h @@ -8,16 +8,16 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef MEDIA_ENGINE_SIMULCAST_H_ -#define MEDIA_ENGINE_SIMULCAST_H_ +#ifndef VIDEO_CONFIG_SIMULCAST_H_ +#define VIDEO_CONFIG_SIMULCAST_H_ #include #include -#include "api/transport/webrtc_key_value_config.h" +#include "api/field_trials_view.h" #include "api/units/data_rate.h" -#include "api/video_codecs/video_encoder_config.h" +#include "video/config/video_encoder_config.h" namespace cricket { @@ -43,7 +43,7 @@ std::vector GetSimulcastConfig( int max_qp, bool is_screenshare_with_conference_mode, bool temporal_layers_supported, - const webrtc::WebRtcKeyValueConfig& trials); + const webrtc::FieldTrialsView& trials); // Gets the simulcast config layers for a non-screensharing case. std::vector GetNormalSimulcastLayers( @@ -54,7 +54,7 @@ std::vector GetNormalSimulcastLayers( int max_qp, bool temporal_layers_supported, bool base_heavy_tl3_rate_alloc, - const webrtc::WebRtcKeyValueConfig& trials); + const webrtc::FieldTrialsView& trials); // Gets simulcast config layers for screenshare settings. std::vector GetScreenshareLayers( @@ -65,8 +65,8 @@ std::vector GetScreenshareLayers( int max_qp, bool temporal_layers_supported, bool base_heavy_tl3_rate_alloc, - const webrtc::WebRtcKeyValueConfig& trials); + const webrtc::FieldTrialsView& trials); } // namespace cricket -#endif // MEDIA_ENGINE_SIMULCAST_H_ +#endif // VIDEO_CONFIG_SIMULCAST_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_config.cc b/TMessagesProj/jni/voip/webrtc/video/config/video_encoder_config.cc similarity index 95% rename from TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_config.cc rename to TMessagesProj/jni/voip/webrtc/video/config/video_encoder_config.cc index 70aece3211..74f7cd0e3a 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_config.cc +++ b/TMessagesProj/jni/voip/webrtc/video/config/video_encoder_config.cc @@ -7,7 +7,7 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ -#include "api/video_codecs/video_encoder_config.h" +#include "video/config/video_encoder_config.h" #include @@ -52,6 +52,7 @@ VideoEncoderConfig::VideoEncoderConfig() : codec_type(kVideoCodecGeneric), video_format("Unset"), content_type(ContentType::kRealtimeVideo), + frame_drop_enabled(false), encoder_specific_settings(nullptr), min_transmit_bitrate_bps(0), max_bitrate_bps(0), @@ -78,8 +79,9 @@ std::string VideoEncoderConfig::ToString() const { ss << "kScreenshare"; break; } + ss << ", frame_drop_enabled: " << frame_drop_enabled; ss << ", encoder_specific_settings: "; - ss << (encoder_specific_settings != NULL ? "(ptr)" : "NULL"); + ss << (encoder_specific_settings != nullptr ? "(ptr)" : "NULL"); ss << ", min_transmit_bitrate_bps: " << min_transmit_bitrate_bps; ss << '}'; @@ -90,9 +92,7 @@ VideoEncoderConfig::VideoEncoderConfig(const VideoEncoderConfig&) = default; void VideoEncoderConfig::EncoderSpecificSettings::FillEncoderSpecificSettings( VideoCodec* codec) const { - if (codec->codecType == kVideoCodecH264) { - FillVideoCodecH264(codec->H264()); - } else if (codec->codecType == kVideoCodecVP8) { + if (codec->codecType == kVideoCodecVP8) { FillVideoCodecVp8(codec->VP8()); } else if (codec->codecType == kVideoCodecVP9) { FillVideoCodecVp9(codec->VP9()); diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_config.h b/TMessagesProj/jni/voip/webrtc/video/config/video_encoder_config.h similarity index 82% rename from TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_config.h rename to TMessagesProj/jni/voip/webrtc/video/config/video_encoder_config.h index 2b7ae93607..0fb527bfb3 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_config.h +++ b/TMessagesProj/jni/voip/webrtc/video/config/video_encoder_config.h @@ -8,8 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef API_VIDEO_CODECS_VIDEO_ENCODER_CONFIG_H_ -#define API_VIDEO_CODECS_VIDEO_ENCODER_CONFIG_H_ +#ifndef VIDEO_CONFIG_VIDEO_ENCODER_CONFIG_H_ +#define VIDEO_CONFIG_VIDEO_ENCODER_CONFIG_H_ #include @@ -18,6 +18,8 @@ #include "absl/types/optional.h" #include "api/scoped_refptr.h" +#include "api/video/resolution.h" +#include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_codec.h" #include "rtc_base/ref_count.h" @@ -31,10 +33,11 @@ struct VideoStream { VideoStream(const VideoStream& other); std::string ToString() const; - // Width in pixels. + // Width/Height in pixels. + // This is the actual width and height used to configure encoder, + // which might be less than `requested_resolution` due to adaptation + // or due to the source providing smaller frames than requested. size_t width; - - // Height in pixels. size_t height; // Frame rate in fps. @@ -64,10 +67,21 @@ struct VideoStream { // between multiple streams. absl::optional bitrate_priority; - absl::optional scalability_mode; + absl::optional scalability_mode; // If this stream is enabled by the user, or not. bool active; + + // An optional user supplied max_frame_resolution + // than can be set independently of (adapted) VideoSource. + // This value is set from RtpEncodingParameters::requested_resolution + // (i.e. used for signaling app-level settings). + // + // The actual encode resolution is in `width` and `height`, + // which can be lower than requested_resolution, + // e.g. if source only provides lower resolution or + // if resource adaptation is active. + absl::optional requested_resolution; }; class VideoEncoderConfig { @@ -144,8 +158,8 @@ class VideoEncoderConfig { // The size of the vector may not be larger than // `encoder_config.number_of_streams`. virtual std::vector CreateEncoderStreams( - int width, - int height, + int frame_width, + int frame_height, const VideoEncoderConfig& encoder_config) = 0; protected: @@ -163,13 +177,17 @@ class VideoEncoderConfig { ~VideoEncoderConfig(); std::string ToString() const; - // TODO(nisse): Consolidate on one of these. + // TODO(bugs.webrtc.org/6883): Consolidate on one of these. VideoCodecType codec_type; SdpVideoFormat video_format; + // Note: This factory can be unset, and VideoStreamEncoder will + // then use the EncoderStreamFactory. The factory is only set by + // tests. rtc::scoped_refptr video_stream_factory; std::vector spatial_layers; ContentType content_type; + bool frame_drop_enabled; rtc::scoped_refptr encoder_specific_settings; // Padding will be used up to this bitrate regardless of the bitrate produced @@ -197,6 +215,11 @@ class VideoEncoderConfig { // Indicates whether quality scaling can be used or not. bool is_quality_scaling_allowed; + // Maximum Quantization Parameter. + // This value is fed into EncoderStreamFactory that + // apply it to all simulcast layers/spatial layers. + int max_qp; + private: // Access to the copy constructor is private to force use of the Copy() // method for those exceptional cases where we do use it. @@ -205,4 +228,4 @@ class VideoEncoderConfig { } // namespace webrtc -#endif // API_VIDEO_CODECS_VIDEO_ENCODER_CONFIG_H_ +#endif // VIDEO_CONFIG_VIDEO_ENCODER_CONFIG_H_ diff --git a/TMessagesProj/jni/voip/webrtc/video/cpu_scaling_tests.cc b/TMessagesProj/jni/voip/webrtc/video/cpu_scaling_tests.cc index 5f3dbd74f4..b9f3a45e94 100644 --- a/TMessagesProj/jni/voip/webrtc/video/cpu_scaling_tests.cc +++ b/TMessagesProj/jni/voip/webrtc/video/cpu_scaling_tests.cc @@ -15,7 +15,6 @@ #include "api/video/video_frame.h" #include "api/video/video_sink_interface.h" #include "api/video/video_source_interface.h" -#include "api/video_codecs/video_encoder_config.h" #include "call/video_receive_stream.h" #include "call/video_send_stream.h" #include "rtc_base/checks.h" @@ -24,6 +23,7 @@ #include "test/field_trial.h" #include "test/frame_generator_capturer.h" #include "test/gtest.h" +#include "video/config/video_encoder_config.h" namespace webrtc { namespace { @@ -55,7 +55,7 @@ void CpuOveruseTest::RunTestAndCheckForAdaptation( public: OveruseObserver(const DegradationPreference& degradation_preference, bool expect_adaptation) - : SendTest(expect_adaptation ? kLongTimeoutMs : kDefaultTimeoutMs), + : SendTest(expect_adaptation ? kLongTimeout : kDefaultTimeout), degradation_preference_(degradation_preference), expect_adaptation_(expect_adaptation) {} @@ -102,7 +102,7 @@ void CpuOveruseTest::RunTestAndCheckForAdaptation( void ModifyVideoConfigs( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { EXPECT_FALSE(encoder_config->simulcast_layers.empty()); encoder_config->simulcast_layers[0].max_framerate = kFps; diff --git a/TMessagesProj/jni/voip/webrtc/video/decode_synchronizer.cc b/TMessagesProj/jni/voip/webrtc/video/decode_synchronizer.cc new file mode 100644 index 0000000000..a86066800f --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/video/decode_synchronizer.cc @@ -0,0 +1,186 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/decode_synchronizer.h" + +#include +#include +#include +#include + +#include "api/sequence_checker.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "video/frame_decode_scheduler.h" +#include "video/frame_decode_timing.h" + +namespace webrtc { + +DecodeSynchronizer::ScheduledFrame::ScheduledFrame( + uint32_t rtp_timestamp, + FrameDecodeTiming::FrameSchedule schedule, + FrameDecodeScheduler::FrameReleaseCallback callback) + : rtp_timestamp_(rtp_timestamp), + schedule_(std::move(schedule)), + callback_(std::move(callback)) {} + +void DecodeSynchronizer::ScheduledFrame::RunFrameReleaseCallback() && { + // Inspiration from Chromium base::OnceCallback. Move `*this` to a local + // before execution to ensure internal state is cleared after callback + // execution. + auto sf = std::move(*this); + std::move(sf.callback_)(sf.rtp_timestamp_, sf.schedule_.render_time); +} + +Timestamp DecodeSynchronizer::ScheduledFrame::LatestDecodeTime() const { + return schedule_.latest_decode_time; +} + +DecodeSynchronizer::SynchronizedFrameDecodeScheduler:: + SynchronizedFrameDecodeScheduler(DecodeSynchronizer* sync) + : sync_(sync) { + RTC_DCHECK(sync_); +} + +DecodeSynchronizer::SynchronizedFrameDecodeScheduler:: + ~SynchronizedFrameDecodeScheduler() { + RTC_DCHECK(!next_frame_); + RTC_DCHECK(stopped_); +} + +absl::optional +DecodeSynchronizer::SynchronizedFrameDecodeScheduler::ScheduledRtpTimestamp() { + return next_frame_.has_value() + ? absl::make_optional(next_frame_->rtp_timestamp()) + : absl::nullopt; +} + +DecodeSynchronizer::ScheduledFrame +DecodeSynchronizer::SynchronizedFrameDecodeScheduler::ReleaseNextFrame() { + RTC_DCHECK(next_frame_); + auto res = std::move(*next_frame_); + next_frame_.reset(); + return res; +} + +Timestamp +DecodeSynchronizer::SynchronizedFrameDecodeScheduler::LatestDecodeTime() { + RTC_DCHECK(next_frame_); + return next_frame_->LatestDecodeTime(); +} + +void DecodeSynchronizer::SynchronizedFrameDecodeScheduler::ScheduleFrame( + uint32_t rtp, + FrameDecodeTiming::FrameSchedule schedule, + FrameReleaseCallback cb) { + RTC_DCHECK(!next_frame_) << "Can not schedule two frames at once."; + next_frame_ = ScheduledFrame(rtp, std::move(schedule), std::move(cb)); + sync_->OnFrameScheduled(this); +} + +void DecodeSynchronizer::SynchronizedFrameDecodeScheduler::CancelOutstanding() { + next_frame_.reset(); +} + +void DecodeSynchronizer::SynchronizedFrameDecodeScheduler::Stop() { + CancelOutstanding(); + stopped_ = true; + sync_->RemoveFrameScheduler(this); +} + +DecodeSynchronizer::DecodeSynchronizer(Clock* clock, + Metronome* metronome, + TaskQueueBase* worker_queue) + : clock_(clock), worker_queue_(worker_queue), metronome_(metronome) { + RTC_DCHECK(metronome_); + RTC_DCHECK(worker_queue_); +} + +DecodeSynchronizer::~DecodeSynchronizer() { + RTC_DCHECK(schedulers_.empty()); +} + +std::unique_ptr +DecodeSynchronizer::CreateSynchronizedFrameScheduler() { + RTC_DCHECK_RUN_ON(worker_queue_); + auto scheduler = std::make_unique(this); + auto [it, inserted] = schedulers_.emplace(scheduler.get()); + // If this is the first `scheduler` added, start listening to the metronome. + if (inserted && schedulers_.size() == 1) { + RTC_DLOG(LS_VERBOSE) << "Listening to metronome"; + metronome_->AddListener(this); + } + + return std::move(scheduler); +} + +void DecodeSynchronizer::OnFrameScheduled( + SynchronizedFrameDecodeScheduler* scheduler) { + RTC_DCHECK_RUN_ON(worker_queue_); + RTC_DCHECK(scheduler->ScheduledRtpTimestamp()); + + Timestamp now = clock_->CurrentTime(); + Timestamp next_tick = expected_next_tick_; + // If no tick has registered yet assume it will occur in the tick period. + if (next_tick.IsInfinite()) { + next_tick = now + metronome_->TickPeriod(); + } + + // Release the frame right away if the decode time is too soon. Otherwise + // the stream may fall behind too much. + bool decode_before_next_tick = + scheduler->LatestDecodeTime() < + (next_tick - FrameDecodeTiming::kMaxAllowedFrameDelay); + // Decode immediately if the decode time is in the past. + bool decode_time_in_past = scheduler->LatestDecodeTime() < now; + + if (decode_before_next_tick || decode_time_in_past) { + ScheduledFrame scheduled_frame = scheduler->ReleaseNextFrame(); + std::move(scheduled_frame).RunFrameReleaseCallback(); + } +} + +void DecodeSynchronizer::RemoveFrameScheduler( + SynchronizedFrameDecodeScheduler* scheduler) { + RTC_DCHECK_RUN_ON(worker_queue_); + RTC_DCHECK(scheduler); + auto it = schedulers_.find(scheduler); + if (it == schedulers_.end()) { + return; + } + schedulers_.erase(it); + // If there are no more schedulers active, stop listening for metronome ticks. + if (schedulers_.empty()) { + RTC_DLOG(LS_VERBOSE) << "Not listening to metronome"; + metronome_->RemoveListener(this); + expected_next_tick_ = Timestamp::PlusInfinity(); + } +} + +void DecodeSynchronizer::OnTick() { + RTC_DCHECK_RUN_ON(worker_queue_); + expected_next_tick_ = clock_->CurrentTime() + metronome_->TickPeriod(); + + for (auto* scheduler : schedulers_) { + if (scheduler->ScheduledRtpTimestamp() && + scheduler->LatestDecodeTime() < expected_next_tick_) { + auto scheduled_frame = scheduler->ReleaseNextFrame(); + std::move(scheduled_frame).RunFrameReleaseCallback(); + } + } +} + +TaskQueueBase* DecodeSynchronizer::OnTickTaskQueue() { + return worker_queue_; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/decode_synchronizer.h b/TMessagesProj/jni/voip/webrtc/video/decode_synchronizer.h new file mode 100644 index 0000000000..26e6fdf31d --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/video/decode_synchronizer.h @@ -0,0 +1,137 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_DECODE_SYNCHRONIZER_H_ +#define VIDEO_DECODE_SYNCHRONIZER_H_ + +#include + +#include +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/metronome/metronome.h" +#include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" +#include "api/units/timestamp.h" +#include "rtc_base/checks.h" +#include "rtc_base/thread_annotations.h" +#include "video/frame_decode_scheduler.h" +#include "video/frame_decode_timing.h" + +namespace webrtc { + +// DecodeSynchronizer synchronizes the frame scheduling by coalescing decoding +// on the metronome. +// +// A video receive stream can use the DecodeSynchronizer by receiving a +// FrameDecodeScheduler instance with `CreateSynchronizedFrameScheduler()`. +// This instance implements FrameDecodeScheduler and can be used as a normal +// scheduler. This instance is owned by the receive stream, and is borrowed by +// the DecodeSynchronizer. The DecodeSynchronizer will stop borrowing the +// instance when `FrameDecodeScheduler::Stop()` is called, after which the +// scheduler may be destroyed by the receive stream. +// +// When a frame is scheduled for decode by a receive stream using the +// DecodeSynchronizer, it will instead be executed on the metronome during the +// tick interval where `max_decode_time` occurs. For example, if a frame is +// scheduled for decode in 50ms and the tick interval is 20ms, then the frame +// will be released for decoding in 2 ticks. See below for illustration, +// +// In the case where the decode time is in the past, or must occur before the +// next metronome tick then the frame will be released right away, allowing a +// delayed stream to catch up quickly. +// +// DecodeSynchronizer is single threaded - all method calls must run on the +// `worker_queue_`. +class DecodeSynchronizer : private Metronome::TickListener { + public: + DecodeSynchronizer(Clock* clock, + Metronome* metronome, + TaskQueueBase* worker_queue); + ~DecodeSynchronizer() override; + DecodeSynchronizer(const DecodeSynchronizer&) = delete; + DecodeSynchronizer& operator=(const DecodeSynchronizer&) = delete; + + std::unique_ptr CreateSynchronizedFrameScheduler(); + + private: + class ScheduledFrame { + public: + ScheduledFrame(uint32_t rtp_timestamp, + FrameDecodeTiming::FrameSchedule schedule, + FrameDecodeScheduler::FrameReleaseCallback callback); + + // Disallow copy since `callback` should only be moved. + ScheduledFrame(const ScheduledFrame&) = delete; + ScheduledFrame& operator=(const ScheduledFrame&) = delete; + ScheduledFrame(ScheduledFrame&&) = default; + ScheduledFrame& operator=(ScheduledFrame&&) = default; + + // Executes `callback_`. + void RunFrameReleaseCallback() &&; + + uint32_t rtp_timestamp() const { return rtp_timestamp_; } + Timestamp LatestDecodeTime() const; + + private: + uint32_t rtp_timestamp_; + FrameDecodeTiming::FrameSchedule schedule_; + FrameDecodeScheduler::FrameReleaseCallback callback_; + }; + + class SynchronizedFrameDecodeScheduler : public FrameDecodeScheduler { + public: + explicit SynchronizedFrameDecodeScheduler(DecodeSynchronizer* sync); + ~SynchronizedFrameDecodeScheduler() override; + + // Releases the outstanding frame for decoding. This invalidates + // `next_frame_`. There must be a frame scheduled. + ScheduledFrame ReleaseNextFrame(); + + // Returns `next_frame_.schedule.max_decode_time`. There must be a frame + // scheduled when this is called. + Timestamp LatestDecodeTime(); + + // FrameDecodeScheduler implementation. + absl::optional ScheduledRtpTimestamp() override; + void ScheduleFrame(uint32_t rtp, + FrameDecodeTiming::FrameSchedule schedule, + FrameReleaseCallback cb) override; + void CancelOutstanding() override; + void Stop() override; + + private: + DecodeSynchronizer* sync_; + absl::optional next_frame_; + bool stopped_ = false; + }; + + void OnFrameScheduled(SynchronizedFrameDecodeScheduler* scheduler); + void RemoveFrameScheduler(SynchronizedFrameDecodeScheduler* scheduler); + + // Metronome::TickListener implementation. + void OnTick() override; + TaskQueueBase* OnTickTaskQueue() override; + + Clock* const clock_; + TaskQueueBase* const worker_queue_; + Metronome* const metronome_; + + Timestamp expected_next_tick_ = Timestamp::PlusInfinity(); + std::set schedulers_ + RTC_GUARDED_BY(worker_queue_); +}; + +} // namespace webrtc + +#endif // VIDEO_DECODE_SYNCHRONIZER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/video/encoder_bitrate_adjuster.cc b/TMessagesProj/jni/voip/webrtc/video/encoder_bitrate_adjuster.cc index 9f4d8551c4..8ed16a7565 100644 --- a/TMessagesProj/jni/voip/webrtc/video/encoder_bitrate_adjuster.cc +++ b/TMessagesProj/jni/voip/webrtc/video/encoder_bitrate_adjuster.cc @@ -17,7 +17,6 @@ #include "rtc_base/experiments/rate_control_settings.h" #include "rtc_base/logging.h" #include "rtc_base/time_utils.h" -#include "system_wrappers/include/field_trial.h" namespace webrtc { namespace { diff --git a/TMessagesProj/jni/voip/webrtc/video/encoder_rtcp_feedback.cc b/TMessagesProj/jni/voip/webrtc/video/encoder_rtcp_feedback.cc index 17095a0a0c..ebba41e807 100644 --- a/TMessagesProj/jni/voip/webrtc/video/encoder_rtcp_feedback.cc +++ b/TMessagesProj/jni/voip/webrtc/video/encoder_rtcp_feedback.cc @@ -35,7 +35,7 @@ EncoderRtcpFeedback::EncoderRtcpFeedback( ssrcs_(ssrcs), get_packet_infos_(std::move(get_packet_infos)), video_stream_encoder_(encoder), - time_last_packet_delivery_queue_(Timestamp::Millis(0)), + time_last_packet_delivery_queue_(Timestamp::Zero()), min_keyframe_send_interval_( TimeDelta::Millis(KeyframeIntervalSettings::ParseFromFieldTrials() .MinKeyframeSendIntervalMs() diff --git a/TMessagesProj/jni/voip/webrtc/video/encoder_rtcp_feedback.h b/TMessagesProj/jni/voip/webrtc/video/encoder_rtcp_feedback.h index 2aadcc34e7..c66a94503e 100644 --- a/TMessagesProj/jni/voip/webrtc/video/encoder_rtcp_feedback.h +++ b/TMessagesProj/jni/voip/webrtc/video/encoder_rtcp_feedback.h @@ -16,11 +16,11 @@ #include "api/sequence_checker.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" -#include "api/video/video_stream_encoder_interface.h" #include "call/rtp_video_sender_interface.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "rtc_base/system/no_unique_address.h" #include "system_wrappers/include/clock.h" +#include "video/video_stream_encoder_interface.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/video/frame_cadence_adapter.cc b/TMessagesProj/jni/voip/webrtc/video/frame_cadence_adapter.cc index c46790992c..efffa9672a 100644 --- a/TMessagesProj/jni/voip/webrtc/video/frame_cadence_adapter.cc +++ b/TMessagesProj/jni/voip/webrtc/video/frame_cadence_adapter.cc @@ -11,21 +11,31 @@ #include "video/frame_cadence_adapter.h" #include +#include #include #include +#include +#include "absl/algorithm/container.h" +#include "absl/base/attributes.h" #include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_base.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "api/video/video_frame.h" +#include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/race_checker.h" #include "rtc_base/rate_statistics.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/system/no_unique_address.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" -#include "rtc_base/task_utils/to_queued_task.h" +#include "rtc_base/task_utils/repeating_task.h" +#include "rtc_base/thread_annotations.h" +#include "rtc_base/time_utils.h" #include "system_wrappers/include/clock.h" -#include "system_wrappers/include/field_trial.h" #include "system_wrappers/include/metrics.h" +#include "system_wrappers/include/ntp_time.h" namespace webrtc { namespace { @@ -86,9 +96,23 @@ class PassthroughAdapterMode : public AdapterMode { // Implements a frame cadence adapter supporting zero-hertz input. class ZeroHertzAdapterMode : public AdapterMode { public: - ZeroHertzAdapterMode(FrameCadenceAdapterInterface::Callback* callback, + ZeroHertzAdapterMode(TaskQueueBase* queue, + Clock* clock, + FrameCadenceAdapterInterface::Callback* callback, double max_fps); + // Reconfigures according to parameters. + // All spatial layer trackers are initialized as unconverged by this method. + void ReconfigureParameters( + const FrameCadenceAdapterInterface::ZeroHertzModeParams& params); + + // Updates spatial layer quality convergence status. + void UpdateLayerQualityConvergence(size_t spatial_index, + bool quality_converged); + + // Updates spatial layer enabled status. + void UpdateLayerStatus(size_t spatial_index, bool enabled); + // Adapter overrides. void OnFrame(Timestamp post_time, int frames_scheduled_for_processing, @@ -96,27 +120,127 @@ class ZeroHertzAdapterMode : public AdapterMode { absl::optional GetInputFrameRateFps() override; void UpdateFrameRate() override {} + // Notified on dropped frames. + void OnDiscardedFrame(); + + // Conditionally requests a refresh frame via + // Callback::RequestRefreshFrame. + void ProcessKeyFrameRequest(); + private: + // The tracking state of each spatial layer. Used for determining when to + // stop repeating frames. + struct SpatialLayerTracker { + // If unset, the layer is disabled. Otherwise carries the quality + // convergence status of the layer. + absl::optional quality_converged; + }; + // The state of a scheduled repeat. + struct ScheduledRepeat { + ScheduledRepeat(Timestamp origin, + int64_t origin_timestamp_us, + int64_t origin_ntp_time_ms) + : scheduled(origin), + idle(false), + origin(origin), + origin_timestamp_us(origin_timestamp_us), + origin_ntp_time_ms(origin_ntp_time_ms) {} + // The instant when the repeat was scheduled. + Timestamp scheduled; + // True if the repeat was scheduled as an idle repeat (long), false + // otherwise. + bool idle; + // The moment we decided to start repeating. + Timestamp origin; + // The timestamp_us of the frame when we started repeating. + int64_t origin_timestamp_us; + // The ntp_times_ms of the frame when we started repeating. + int64_t origin_ntp_time_ms; + }; + + // Returns true if all spatial layers can be considered to be converged in + // terms of quality. + // Convergence means QP has dropped to a low-enough level to warrant ceasing + // to send identical frames at high frequency. + bool HasQualityConverged() const RTC_RUN_ON(sequence_checker_); + // Resets quality convergence information. HasQualityConverged() returns false + // after this call. + void ResetQualityConvergenceInfo() RTC_RUN_ON(sequence_checker_); + // Processes incoming frames on a delayed cadence. + void ProcessOnDelayedCadence() RTC_RUN_ON(sequence_checker_); + // Schedules a later repeat with delay depending on state of layer trackers. + // If true is passed in `idle_repeat`, the repeat is going to be + // kZeroHertzIdleRepeatRatePeriod. Otherwise it'll be the value of + // `frame_delay`. + void ScheduleRepeat(int frame_id, bool idle_repeat) + RTC_RUN_ON(sequence_checker_); + // Repeats a frame in the abscence of incoming frames. Slows down when quality + // convergence is attained, and stops the cadence terminally when new frames + // have arrived. + void ProcessRepeatedFrameOnDelayedCadence(int frame_id) + RTC_RUN_ON(sequence_checker_); + // Sends a frame, updating the timestamp to the current time. + void SendFrameNow(const VideoFrame& frame) const + RTC_RUN_ON(sequence_checker_); + // Returns the repeat duration depending on if it's an idle repeat or not. + TimeDelta RepeatDuration(bool idle_repeat) const + RTC_RUN_ON(sequence_checker_); + // Unless timer already running, starts repeatedly requesting refresh frames + // after a grace_period. If a frame appears before the grace_period has + // passed, the request is cancelled. + void MaybeStartRefreshFrameRequester() RTC_RUN_ON(sequence_checker_); + + TaskQueueBase* const queue_; + Clock* const clock_; FrameCadenceAdapterInterface::Callback* const callback_; + // The configured max_fps. // TODO(crbug.com/1255737): support max_fps updates. const double max_fps_; + // How much the incoming frame sequence is delayed by. + const TimeDelta frame_delay_ = TimeDelta::Seconds(1) / max_fps_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; + // A queue of incoming frames and repeated frames. + std::deque queued_frames_ RTC_GUARDED_BY(sequence_checker_); + // The current frame ID to use when starting to repeat frames. This is used + // for cancelling deferred repeated frame processing happening. + int current_frame_id_ RTC_GUARDED_BY(sequence_checker_) = 0; + // Has content when we are repeating frames. + absl::optional scheduled_repeat_ + RTC_GUARDED_BY(sequence_checker_); + // Convergent state of each of the configured simulcast layers. + std::vector layer_trackers_ + RTC_GUARDED_BY(sequence_checker_); + // Repeating task handle used for requesting refresh frames until arrival, as + // they can be dropped in various places in the capture pipeline. + RepeatingTaskHandle refresh_frame_requester_ + RTC_GUARDED_BY(sequence_checker_); + + ScopedTaskSafety safety_; }; class FrameCadenceAdapterImpl : public FrameCadenceAdapterInterface { public: - FrameCadenceAdapterImpl(Clock* clock, TaskQueueBase* queue); + FrameCadenceAdapterImpl(Clock* clock, + TaskQueueBase* queue, + const FieldTrialsView& field_trials); + ~FrameCadenceAdapterImpl(); // FrameCadenceAdapterInterface overrides. void Initialize(Callback* callback) override; - void SetZeroHertzModeEnabled(bool enabled) override; + void SetZeroHertzModeEnabled( + absl::optional params) override; absl::optional GetInputFrameRateFps() override; void UpdateFrameRate() override; + void UpdateLayerQualityConvergence(size_t spatial_index, + bool quality_converged) override; + void UpdateLayerStatus(size_t spatial_index, bool enabled) override; + void ProcessKeyFrameRequest() override; // VideoFrameSink overrides. void OnFrame(const VideoFrame& frame) override; - void OnDiscardedFrame() override { callback_->OnDiscardedFrame(); } + void OnDiscardedFrame() override; void OnConstraintsChanged( const VideoTrackSourceConstraints& constraints) override; @@ -149,9 +273,15 @@ class FrameCadenceAdapterImpl : public FrameCadenceAdapterInterface { // The two possible modes we're under. absl::optional passthrough_adapter_; absl::optional zero_hertz_adapter_; + // If set, zero-hertz mode has been enabled. + absl::optional zero_hertz_params_; // Cache for the current adapter mode. AdapterMode* current_adapter_mode_ = nullptr; + // Timestamp for statistics reporting. + absl::optional zero_hertz_adapter_created_timestamp_ + RTC_GUARDED_BY(queue_); + // Set up during Initialize. Callback* callback_ = nullptr; @@ -159,34 +289,115 @@ class FrameCadenceAdapterImpl : public FrameCadenceAdapterInterface { absl::optional source_constraints_ RTC_GUARDED_BY(queue_); - // Whether zero-hertz and UMA reporting is enabled. - bool zero_hertz_and_uma_reporting_enabled_ RTC_GUARDED_BY(queue_) = false; - // Race checker for incoming frames. This is the network thread in chromium, // but may vary from test contexts. rtc::RaceChecker incoming_frame_race_checker_; bool has_reported_screenshare_frame_rate_umas_ RTC_GUARDED_BY(queue_) = false; // Number of frames that are currently scheduled for processing on the - // |queue_|. + // `queue_`. std::atomic frames_scheduled_for_processing_{0}; ScopedTaskSafetyDetached safety_; }; ZeroHertzAdapterMode::ZeroHertzAdapterMode( + TaskQueueBase* queue, + Clock* clock, FrameCadenceAdapterInterface::Callback* callback, double max_fps) - : callback_(callback), max_fps_(max_fps) { + : queue_(queue), clock_(clock), callback_(callback), max_fps_(max_fps) { sequence_checker_.Detach(); + MaybeStartRefreshFrameRequester(); +} + +void ZeroHertzAdapterMode::ReconfigureParameters( + const FrameCadenceAdapterInterface::ZeroHertzModeParams& params) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + RTC_DLOG(LS_INFO) << __func__ << " this " << this << " num_simulcast_layers " + << params.num_simulcast_layers; + + // Start as unconverged. + layer_trackers_.clear(); + layer_trackers_.resize(params.num_simulcast_layers, + SpatialLayerTracker{false}); +} + +void ZeroHertzAdapterMode::UpdateLayerQualityConvergence( + size_t spatial_index, + bool quality_converged) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + RTC_LOG(LS_INFO) << __func__ << " this " << this << " layer " << spatial_index + << " quality has converged: " << quality_converged; + if (spatial_index >= layer_trackers_.size()) + return; + if (layer_trackers_[spatial_index].quality_converged.has_value()) + layer_trackers_[spatial_index].quality_converged = quality_converged; +} + +void ZeroHertzAdapterMode::UpdateLayerStatus(size_t spatial_index, + bool enabled) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + if (spatial_index >= layer_trackers_.size()) + return; + if (enabled) { + if (!layer_trackers_[spatial_index].quality_converged.has_value()) { + // Assume quality has not converged until hearing otherwise. + layer_trackers_[spatial_index].quality_converged = false; + } + } else { + layer_trackers_[spatial_index].quality_converged = absl::nullopt; + } + RTC_LOG(LS_INFO) + << __func__ << " this " << this << " layer " << spatial_index + << (enabled + ? (layer_trackers_[spatial_index].quality_converged.has_value() + ? " enabled." + : " enabled and it's assumed quality has not converged.") + : " disabled."); } void ZeroHertzAdapterMode::OnFrame(Timestamp post_time, int frames_scheduled_for_processing, const VideoFrame& frame) { RTC_DCHECK_RUN_ON(&sequence_checker_); - // TODO(crbug.com/1255737): fill with meaningful implementation. - callback_->OnFrame(post_time, frames_scheduled_for_processing, frame); + RTC_DLOG(LS_VERBOSE) << "ZeroHertzAdapterMode::" << __func__ << " this " + << this; + refresh_frame_requester_.Stop(); + + // Assume all enabled layers are unconverged after frame entry. + ResetQualityConvergenceInfo(); + + // Remove stored repeating frame if needed. + if (scheduled_repeat_.has_value()) { + RTC_DCHECK(queued_frames_.size() == 1); + RTC_DLOG(LS_VERBOSE) << __func__ << " this " << this + << " cancel repeat and restart with original"; + queued_frames_.pop_front(); + } + + // Store the frame in the queue and schedule deferred processing. + queued_frames_.push_back(frame); + current_frame_id_++; + scheduled_repeat_ = absl::nullopt; + queue_->PostDelayedHighPrecisionTask( + SafeTask(safety_.flag(), + [this] { + RTC_DCHECK_RUN_ON(&sequence_checker_); + ProcessOnDelayedCadence(); + }), + frame_delay_); +} + +void ZeroHertzAdapterMode::OnDiscardedFrame() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + RTC_DLOG(LS_VERBOSE) << "ZeroHertzAdapterMode::" << __func__; + + // Under zero hertz source delivery, a discarded frame ending a sequence of + // frames which happened to contain important information can be seen as a + // capture freeze. Avoid this by starting requesting refresh frames after a + // grace period. + MaybeStartRefreshFrameRequester(); } absl::optional ZeroHertzAdapterMode::GetInputFrameRateFps() { @@ -194,12 +405,198 @@ absl::optional ZeroHertzAdapterMode::GetInputFrameRateFps() { return max_fps_; } -FrameCadenceAdapterImpl::FrameCadenceAdapterImpl(Clock* clock, - TaskQueueBase* queue) +void ZeroHertzAdapterMode::ProcessKeyFrameRequest() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + + // If we're new and don't have a frame, there's no need to request refresh + // frames as this was being triggered for us when zero-hz mode was set up. + // + // The next frame encoded will be a key frame. Reset quality convergence so we + // don't get idle repeats shortly after, because key frames need a lot of + // refinement frames. + ResetQualityConvergenceInfo(); + + // If we're not repeating, or we're repeating with short duration, we will + // very soon send out a frame and don't need a refresh frame. + if (!scheduled_repeat_.has_value() || !scheduled_repeat_->idle) { + RTC_LOG(LS_INFO) << __func__ << " this " << this + << " not requesting refresh frame because of recently " + "incoming frame or short repeating."; + return; + } + + // If the repeat is scheduled within a short (i.e. frame_delay_) interval, we + // will very soon send out a frame and don't need a refresh frame. + Timestamp now = clock_->CurrentTime(); + if (scheduled_repeat_->scheduled + RepeatDuration(/*idle_repeat=*/true) - + now <= + frame_delay_) { + RTC_LOG(LS_INFO) << __func__ << " this " << this + << " not requesting refresh frame because of soon " + "happening idle repeat"; + return; + } + + // Cancel the current repeat and reschedule a short repeat now. No need for a + // new refresh frame. + RTC_LOG(LS_INFO) << __func__ << " this " << this + << " not requesting refresh frame and scheduling a short " + "repeat due to key frame request"; + ScheduleRepeat(++current_frame_id_, /*idle_repeat=*/false); + return; +} + +bool ZeroHertzAdapterMode::HasQualityConverged() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + // 1. Define ourselves as unconverged with no spatial layers configured. This + // is to keep short repeating until the layer configuration comes. + // 2. Unset layers implicitly imply that they're converged to support + // disabling layers when they're not needed. + const bool quality_converged = + !layer_trackers_.empty() && + absl::c_all_of(layer_trackers_, [](const SpatialLayerTracker& tracker) { + return tracker.quality_converged.value_or(true); + }); + return quality_converged; +} + +void ZeroHertzAdapterMode::ResetQualityConvergenceInfo() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + RTC_DLOG(LS_INFO) << __func__ << " this " << this; + for (auto& layer_tracker : layer_trackers_) { + if (layer_tracker.quality_converged.has_value()) + layer_tracker.quality_converged = false; + } +} + +void ZeroHertzAdapterMode::ProcessOnDelayedCadence() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + RTC_DCHECK(!queued_frames_.empty()); + RTC_DLOG(LS_VERBOSE) << __func__ << " this " << this; + + SendFrameNow(queued_frames_.front()); + + // If there were two or more frames stored, we do not have to schedule repeats + // of the front frame. + if (queued_frames_.size() > 1) { + queued_frames_.pop_front(); + return; + } + + // There's only one frame to send. Schedule a repeat sequence, which is + // cancelled by `current_frame_id_` getting incremented should new frames + // arrive. + ScheduleRepeat(current_frame_id_, HasQualityConverged()); +} + +void ZeroHertzAdapterMode::ScheduleRepeat(int frame_id, bool idle_repeat) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + RTC_DLOG(LS_VERBOSE) << __func__ << " this " << this << " frame_id " + << frame_id; + Timestamp now = clock_->CurrentTime(); + if (!scheduled_repeat_.has_value()) { + scheduled_repeat_.emplace(now, queued_frames_.front().timestamp_us(), + queued_frames_.front().ntp_time_ms()); + } + scheduled_repeat_->scheduled = now; + scheduled_repeat_->idle = idle_repeat; + + TimeDelta repeat_delay = RepeatDuration(idle_repeat); + queue_->PostDelayedHighPrecisionTask( + SafeTask(safety_.flag(), + [this, frame_id] { + RTC_DCHECK_RUN_ON(&sequence_checker_); + ProcessRepeatedFrameOnDelayedCadence(frame_id); + }), + repeat_delay); +} + +void ZeroHertzAdapterMode::ProcessRepeatedFrameOnDelayedCadence(int frame_id) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + RTC_DLOG(LS_VERBOSE) << __func__ << " this " << this << " frame_id " + << frame_id; + RTC_DCHECK(!queued_frames_.empty()); + + // Cancel this invocation if new frames turned up. + if (frame_id != current_frame_id_) + return; + RTC_DCHECK(scheduled_repeat_.has_value()); + + VideoFrame& frame = queued_frames_.front(); + + // Since this is a repeated frame, nothing changed compared to before. + VideoFrame::UpdateRect empty_update_rect; + empty_update_rect.MakeEmptyUpdate(); + frame.set_update_rect(empty_update_rect); + + // Adjust timestamps of the frame of the repeat, accounting for the actual + // delay since we started repeating. + // + // NOTE: No need to update the RTP timestamp as the VideoStreamEncoder + // overwrites it based on its chosen NTP timestamp source. + TimeDelta total_delay = clock_->CurrentTime() - scheduled_repeat_->origin; + if (frame.timestamp_us() > 0) { + frame.set_timestamp_us(scheduled_repeat_->origin_timestamp_us + + total_delay.us()); + } + if (frame.ntp_time_ms()) { + frame.set_ntp_time_ms(scheduled_repeat_->origin_ntp_time_ms + + total_delay.ms()); + } + SendFrameNow(frame); + + // Schedule another repeat. + ScheduleRepeat(frame_id, HasQualityConverged()); +} + +void ZeroHertzAdapterMode::SendFrameNow(const VideoFrame& frame) const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + RTC_DLOG(LS_VERBOSE) << __func__ << " this " << this << " timestamp " + << frame.timestamp() << " timestamp_us " + << frame.timestamp_us() << " ntp_time_ms " + << frame.ntp_time_ms(); + // TODO(crbug.com/1255737): figure out if frames_scheduled_for_processing + // makes sense to compute in this implementation. + callback_->OnFrame(/*post_time=*/clock_->CurrentTime(), + /*frames_scheduled_for_processing=*/1, frame); +} + +TimeDelta ZeroHertzAdapterMode::RepeatDuration(bool idle_repeat) const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + return idle_repeat + ? FrameCadenceAdapterInterface::kZeroHertzIdleRepeatRatePeriod + : frame_delay_; +} + +void ZeroHertzAdapterMode::MaybeStartRefreshFrameRequester() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + RTC_DLOG(LS_VERBOSE) << __func__; + if (!refresh_frame_requester_.Running()) { + refresh_frame_requester_ = RepeatingTaskHandle::DelayedStart( + queue_, + FrameCadenceAdapterInterface::kOnDiscardedFrameRefreshFramePeriod * + frame_delay_, + [this] { + RTC_DLOG(LS_VERBOSE) << __func__ << " RequestRefreshFrame"; + if (callback_) + callback_->RequestRefreshFrame(); + return frame_delay_; + }); + } +} + +FrameCadenceAdapterImpl::FrameCadenceAdapterImpl( + Clock* clock, + TaskQueueBase* queue, + const FieldTrialsView& field_trials) : clock_(clock), queue_(queue), zero_hertz_screenshare_enabled_( - field_trial::IsEnabled("WebRTC-ZeroHertzScreenshare")) {} + !field_trials.IsDisabled("WebRTC-ZeroHertzScreenshare")) {} + +FrameCadenceAdapterImpl::~FrameCadenceAdapterImpl() { + RTC_DLOG(LS_VERBOSE) << __func__ << " this " << this; +} void FrameCadenceAdapterImpl::Initialize(Callback* callback) { callback_ = callback; @@ -207,12 +604,13 @@ void FrameCadenceAdapterImpl::Initialize(Callback* callback) { current_adapter_mode_ = &passthrough_adapter_.value(); } -void FrameCadenceAdapterImpl::SetZeroHertzModeEnabled(bool enabled) { +void FrameCadenceAdapterImpl::SetZeroHertzModeEnabled( + absl::optional params) { RTC_DCHECK_RUN_ON(queue_); - bool was_zero_hertz_enabled = zero_hertz_and_uma_reporting_enabled_; - if (enabled && !zero_hertz_and_uma_reporting_enabled_) + bool was_zero_hertz_enabled = zero_hertz_params_.has_value(); + if (params.has_value() && !was_zero_hertz_enabled) has_reported_screenshare_frame_rate_umas_ = false; - zero_hertz_and_uma_reporting_enabled_ = enabled; + zero_hertz_params_ = params; MaybeReconfigureAdapters(was_zero_hertz_enabled); } @@ -229,16 +627,47 @@ void FrameCadenceAdapterImpl::UpdateFrameRate() { passthrough_adapter_->UpdateFrameRate(); } +void FrameCadenceAdapterImpl::UpdateLayerQualityConvergence( + size_t spatial_index, + bool quality_converged) { + if (zero_hertz_adapter_.has_value()) + zero_hertz_adapter_->UpdateLayerQualityConvergence(spatial_index, + quality_converged); +} + +void FrameCadenceAdapterImpl::UpdateLayerStatus(size_t spatial_index, + bool enabled) { + if (zero_hertz_adapter_.has_value()) + zero_hertz_adapter_->UpdateLayerStatus(spatial_index, enabled); +} + +void FrameCadenceAdapterImpl::ProcessKeyFrameRequest() { + RTC_DCHECK_RUN_ON(queue_); + if (zero_hertz_adapter_) + zero_hertz_adapter_->ProcessKeyFrameRequest(); +} + void FrameCadenceAdapterImpl::OnFrame(const VideoFrame& frame) { // This method is called on the network thread under Chromium, or other // various contexts in test. RTC_DCHECK_RUNS_SERIALIZED(&incoming_frame_race_checker_); + RTC_DLOG(LS_VERBOSE) << "FrameCadenceAdapterImpl::" << __func__ << " this " + << this; // Local time in webrtc time base. Timestamp post_time = clock_->CurrentTime(); frames_scheduled_for_processing_.fetch_add(1, std::memory_order_relaxed); - queue_->PostTask(ToQueuedTask(safety_.flag(), [this, post_time, frame] { + queue_->PostTask(SafeTask(safety_.flag(), [this, post_time, frame] { RTC_DCHECK_RUN_ON(queue_); + if (zero_hertz_adapter_created_timestamp_.has_value()) { + TimeDelta time_until_first_frame = + clock_->CurrentTime() - *zero_hertz_adapter_created_timestamp_; + zero_hertz_adapter_created_timestamp_ = absl::nullopt; + RTC_HISTOGRAM_COUNTS_10000( + "WebRTC.Screenshare.ZeroHz.TimeUntilFirstFrameMs", + time_until_first_frame.ms()); + } + const int frames_scheduled_for_processing = frames_scheduled_for_processing_.fetch_sub(1, std::memory_order_relaxed); @@ -248,12 +677,22 @@ void FrameCadenceAdapterImpl::OnFrame(const VideoFrame& frame) { })); } +void FrameCadenceAdapterImpl::OnDiscardedFrame() { + callback_->OnDiscardedFrame(); + queue_->PostTask(SafeTask(safety_.flag(), [this] { + RTC_DCHECK_RUN_ON(queue_); + if (zero_hertz_adapter_) { + zero_hertz_adapter_->OnDiscardedFrame(); + } + })); +} + void FrameCadenceAdapterImpl::OnConstraintsChanged( const VideoTrackSourceConstraints& constraints) { - RTC_LOG(LS_INFO) << __func__ << " min_fps " + RTC_LOG(LS_INFO) << __func__ << " this " << this << " min_fps " << constraints.min_fps.value_or(-1) << " max_fps " << constraints.max_fps.value_or(-1); - queue_->PostTask(ToQueuedTask(safety_.flag(), [this, constraints] { + queue_->PostTask(SafeTask(safety_.flag(), [this, constraints] { RTC_DCHECK_RUN_ON(queue_); bool was_zero_hertz_enabled = IsZeroHertzScreenshareEnabled(); source_constraints_ = constraints; @@ -261,32 +700,35 @@ void FrameCadenceAdapterImpl::OnConstraintsChanged( })); } -// RTC_RUN_ON(queue_) void FrameCadenceAdapterImpl::OnFrameOnMainQueue( Timestamp post_time, int frames_scheduled_for_processing, const VideoFrame& frame) { + RTC_DCHECK_RUN_ON(queue_); current_adapter_mode_->OnFrame(post_time, frames_scheduled_for_processing, frame); } -// RTC_RUN_ON(queue_) bool FrameCadenceAdapterImpl::IsZeroHertzScreenshareEnabled() const { + RTC_DCHECK_RUN_ON(queue_); return zero_hertz_screenshare_enabled_ && source_constraints_.has_value() && source_constraints_->max_fps.value_or(-1) > 0 && source_constraints_->min_fps.value_or(-1) == 0 && - zero_hertz_and_uma_reporting_enabled_; + zero_hertz_params_.has_value(); } -// RTC_RUN_ON(queue_) void FrameCadenceAdapterImpl::MaybeReconfigureAdapters( bool was_zero_hertz_enabled) { + RTC_DCHECK_RUN_ON(queue_); bool is_zero_hertz_enabled = IsZeroHertzScreenshareEnabled(); if (is_zero_hertz_enabled) { if (!was_zero_hertz_enabled) { - zero_hertz_adapter_.emplace(callback_, + zero_hertz_adapter_.emplace(queue_, clock_, callback_, source_constraints_->max_fps.value()); + RTC_LOG(LS_INFO) << "Zero hertz mode activated."; + zero_hertz_adapter_created_timestamp_ = clock_->CurrentTime(); } + zero_hertz_adapter_->ReconfigureParameters(zero_hertz_params_.value()); current_adapter_mode_ = &zero_hertz_adapter_.value(); } else { if (was_zero_hertz_enabled) @@ -295,12 +737,12 @@ void FrameCadenceAdapterImpl::MaybeReconfigureAdapters( } } -// RTC_RUN_ON(queue_) void FrameCadenceAdapterImpl::MaybeReportFrameRateConstraintUmas() { + RTC_DCHECK_RUN_ON(queue_); if (has_reported_screenshare_frame_rate_umas_) return; has_reported_screenshare_frame_rate_umas_ = true; - if (!zero_hertz_and_uma_reporting_enabled_) + if (!zero_hertz_params_.has_value()) return; RTC_HISTOGRAM_BOOLEAN("WebRTC.Screenshare.FrameRateConstraints.Exists", source_constraints_.has_value()); @@ -352,8 +794,10 @@ void FrameCadenceAdapterImpl::MaybeReportFrameRateConstraintUmas() { } // namespace std::unique_ptr -FrameCadenceAdapterInterface::Create(Clock* clock, TaskQueueBase* queue) { - return std::make_unique(clock, queue); +FrameCadenceAdapterInterface::Create(Clock* clock, + TaskQueueBase* queue, + const FieldTrialsView& field_trials) { + return std::make_unique(clock, queue, field_trials); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/frame_cadence_adapter.h b/TMessagesProj/jni/voip/webrtc/video/frame_cadence_adapter.h index 8685f37f97..d0eab7e770 100644 --- a/TMessagesProj/jni/voip/webrtc/video/frame_cadence_adapter.h +++ b/TMessagesProj/jni/voip/webrtc/video/frame_cadence_adapter.h @@ -13,7 +13,10 @@ #include +#include "absl/base/attributes.h" +#include "api/field_trials_view.h" #include "api/task_queue/task_queue_base.h" +#include "api/units/time_delta.h" #include "api/video/video_frame.h" #include "api/video/video_sink_interface.h" #include "rtc_base/synchronization/mutex.h" @@ -31,7 +34,21 @@ class FrameCadenceAdapterInterface public: // Averaging window spanning 90 frames at default 30fps, matching old media // optimization module defaults. + // TODO(crbug.com/1255737): Use TimeDelta. static constexpr int64_t kFrameRateAveragingWindowSizeMs = (1000 / 30) * 90; + // In zero-hertz mode, the idle repeat rate is a compromise between + // RTP receiver keyframe-requesting timeout (3s), other backend limitations + // and some worst case RTT. + static constexpr TimeDelta kZeroHertzIdleRepeatRatePeriod = + TimeDelta::Millis(1000); + // The number of frame periods to wait for new frames until starting to + // request refresh frames. + static constexpr int kOnDiscardedFrameRefreshFramePeriod = 3; + + struct ZeroHertzModeParams { + // The number of simulcast layers used in this configuration. + size_t num_simulcast_layers = 0; + }; // Callback interface used to inform instance owners. class Callback { @@ -55,6 +72,9 @@ class FrameCadenceAdapterInterface // Called when the source has discarded a frame. virtual void OnDiscardedFrame() = 0; + + // Called when the adapter needs the source to send a refresh frame. + virtual void RequestRefreshFrame() = 0; }; // Factory function creating a production instance. Deletion of the returned @@ -63,13 +83,17 @@ class FrameCadenceAdapterInterface // Callback::OnFrame on the |queue|. static std::unique_ptr Create( Clock* clock, - TaskQueueBase* queue); + TaskQueueBase* queue, + const FieldTrialsView& field_trials); // Call before using the rest of the API. virtual void Initialize(Callback* callback) = 0; - // Pass true in |enabled| as a prerequisite to enable zero-hertz operation. - virtual void SetZeroHertzModeEnabled(bool enabled) = 0; + // Pass zero hertz parameters in |params| as a prerequisite to enable + // zero-hertz operation. If absl:::nullopt is passed, the cadence adapter will + // switch to passthrough mode. + virtual void SetZeroHertzModeEnabled( + absl::optional params) = 0; // Returns the input framerate. This is measured by RateStatistics when // zero-hertz mode is off, and returns the max framerate in zero-hertz mode. @@ -78,6 +102,19 @@ class FrameCadenceAdapterInterface // Updates frame rate. This is done unconditionally irrespective of adapter // mode. virtual void UpdateFrameRate() = 0; + + // Updates quality convergence status for an enabled spatial layer. + // Convergence means QP has dropped to a low-enough level to warrant ceasing + // to send identical frames at high frequency. + virtual void UpdateLayerQualityConvergence(size_t spatial_index, + bool converged) = 0; + + // Updates spatial layer enabled status. + virtual void UpdateLayerStatus(size_t spatial_index, bool enabled) = 0; + + // Conditionally requests a refresh frame via + // Callback::RequestRefreshFrame. + virtual void ProcessKeyFrameRequest() = 0; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/frame_decode_scheduler.h b/TMessagesProj/jni/voip/webrtc/video/frame_decode_scheduler.h new file mode 100644 index 0000000000..29e27c22c8 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/video/frame_decode_scheduler.h @@ -0,0 +1,52 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_FRAME_DECODE_SCHEDULER_H_ +#define VIDEO_FRAME_DECODE_SCHEDULER_H_ + +#include + +#include "absl/functional/any_invocable.h" +#include "absl/types/optional.h" +#include "api/units/timestamp.h" +#include "video/frame_decode_timing.h" + +namespace webrtc { + +class FrameDecodeScheduler { + public: + // Invoked when a frame with `rtp_timestamp` is ready for decoding. + using FrameReleaseCallback = + absl::AnyInvocable; + + virtual ~FrameDecodeScheduler() = default; + + // Returns the rtp timestamp of the next frame scheduled for release, or + // `nullopt` if no frame is currently scheduled. + virtual absl::optional ScheduledRtpTimestamp() = 0; + + // Schedules a frame for release based on `schedule`. When released, + // `callback` will be invoked with the `rtp` timestamp of the frame and the + // `render_time` + virtual void ScheduleFrame(uint32_t rtp, + FrameDecodeTiming::FrameSchedule schedule, + FrameReleaseCallback callback) = 0; + + // Cancels all scheduled frames. + virtual void CancelOutstanding() = 0; + + // Stop() Must be called before destruction. + virtual void Stop() = 0; +}; + +} // namespace webrtc + +#endif // VIDEO_FRAME_DECODE_SCHEDULER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/video/frame_decode_timing.cc b/TMessagesProj/jni/voip/webrtc/video/frame_decode_timing.cc new file mode 100644 index 0000000000..58ecd41c9e --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/video/frame_decode_timing.cc @@ -0,0 +1,60 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/frame_decode_timing.h" + +#include + +#include "absl/types/optional.h" +#include "api/units/time_delta.h" +#include "rtc_base/logging.h" + +namespace webrtc { + +FrameDecodeTiming::FrameDecodeTiming(Clock* clock, + webrtc::VCMTiming const* timing) + : clock_(clock), timing_(timing) { + RTC_DCHECK(clock_); + RTC_DCHECK(timing_); +} + +absl::optional +FrameDecodeTiming::OnFrameBufferUpdated(uint32_t next_temporal_unit_rtp, + uint32_t last_temporal_unit_rtp, + TimeDelta max_wait_for_frame, + bool too_many_frames_queued) { + RTC_DCHECK_GE(max_wait_for_frame, TimeDelta::Zero()); + const Timestamp now = clock_->CurrentTime(); + Timestamp render_time = timing_->RenderTime(next_temporal_unit_rtp, now); + TimeDelta max_wait = + timing_->MaxWaitingTime(render_time, now, too_many_frames_queued); + + // If the delay is not too far in the past, or this is the last decodable + // frame then it is the best frame to be decoded. Otherwise, fast-forward + // to the next frame in the buffer. + if (max_wait <= -kMaxAllowedFrameDelay && + next_temporal_unit_rtp != last_temporal_unit_rtp) { + RTC_DLOG(LS_VERBOSE) << "Fast-forwarded frame " << next_temporal_unit_rtp + << " render time " << render_time << " with delay " + << max_wait; + return absl::nullopt; + } + + max_wait.Clamp(TimeDelta::Zero(), max_wait_for_frame); + RTC_DLOG(LS_VERBOSE) << "Selected frame with rtp " << next_temporal_unit_rtp + << " render time " << render_time + << " with a max wait of " << max_wait_for_frame + << " clamped to " << max_wait; + Timestamp latest_decode_time = now + max_wait; + return FrameSchedule{.latest_decode_time = latest_decode_time, + .render_time = render_time}; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/frame_decode_timing.h b/TMessagesProj/jni/voip/webrtc/video/frame_decode_timing.h new file mode 100644 index 0000000000..6bde4702ad --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/video/frame_decode_timing.h @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_FRAME_DECODE_TIMING_H_ +#define VIDEO_FRAME_DECODE_TIMING_H_ + +#include + +#include + +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/task_queue/task_queue_base.h" +#include "modules/video_coding/timing/timing.h" +#include "system_wrappers/include/clock.h" + +namespace webrtc { + +class FrameDecodeTiming { + public: + FrameDecodeTiming(Clock* clock, webrtc::VCMTiming const* timing); + ~FrameDecodeTiming() = default; + FrameDecodeTiming(const FrameDecodeTiming&) = delete; + FrameDecodeTiming& operator=(const FrameDecodeTiming&) = delete; + + // Any frame that has decode delay more than this in the past can be + // fast-forwarded. + static constexpr TimeDelta kMaxAllowedFrameDelay = TimeDelta::Millis(5); + + struct FrameSchedule { + Timestamp latest_decode_time; + Timestamp render_time; + }; + + absl::optional OnFrameBufferUpdated( + uint32_t next_temporal_unit_rtp, + uint32_t last_temporal_unit_rtp, + TimeDelta max_wait_for_frame, + bool too_many_frames_queued); + + private: + Clock* const clock_; + webrtc::VCMTiming const* const timing_; +}; + +} // namespace webrtc + +#endif // VIDEO_FRAME_DECODE_TIMING_H_ diff --git a/TMessagesProj/jni/voip/webrtc/video/frame_encode_metadata_writer.cc b/TMessagesProj/jni/voip/webrtc/video/frame_encode_metadata_writer.cc index 51f09b09c4..d6095a090b 100644 --- a/TMessagesProj/jni/voip/webrtc/video/frame_encode_metadata_writer.cc +++ b/TMessagesProj/jni/voip/webrtc/video/frame_encode_metadata_writer.cc @@ -19,7 +19,6 @@ #include "modules/video_coding/include/video_coding_defines.h" #include "modules/video_coding/svc/create_scalability_structure.h" #include "rtc_base/logging.h" -#include "rtc_base/ref_counted_object.h" #include "rtc_base/time_utils.h" namespace webrtc { @@ -67,11 +66,16 @@ void FrameEncodeMetadataWriter::OnEncoderInit(const VideoCodec& codec) { num_spatial_layers, static_cast(codec_settings_.VP9()->numberOfSpatialLayers)); } else if (codec_settings_.codecType == kVideoCodecAV1 && - codec_settings_.ScalabilityMode() != "") { + codec_settings_.GetScalabilityMode().has_value()) { std::unique_ptr structure = - CreateScalabilityStructure(codec_settings_.ScalabilityMode()); - RTC_DCHECK(structure); - num_spatial_layers = structure->StreamConfig().num_spatial_layers; + CreateScalabilityStructure(*codec_settings_.GetScalabilityMode()); + if (structure) { + num_spatial_layers = structure->StreamConfig().num_spatial_layers; + } else { + // |structure| maybe nullptr if the scalability mode is invalid. + RTC_LOG(LS_WARNING) << "Cannot create ScalabilityStructure, since the " + "scalability mode is invalid"; + } } num_spatial_layers_ = std::max(num_spatial_layers, size_t{1}); } diff --git a/TMessagesProj/jni/voip/webrtc/video/full_stack_tests.cc b/TMessagesProj/jni/voip/webrtc/video/full_stack_tests.cc index 3831fdfcef..cddf98343d 100644 --- a/TMessagesProj/jni/voip/webrtc/video/full_stack_tests.cc +++ b/TMessagesProj/jni/voip/webrtc/video/full_stack_tests.cc @@ -20,13 +20,13 @@ #include "api/test/video_quality_test_fixture.h" #include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_codec.h" -#include "api/video_codecs/video_encoder_config.h" #include "api/video_codecs/vp9_profile.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" #include "system_wrappers/include/field_trial.h" #include "test/field_trial.h" #include "test/gtest.h" #include "test/testsupport/file_utils.h" +#include "video/config/video_encoder_config.h" #include "video/video_quality_test.h" ABSL_FLAG(std::string, diff --git a/TMessagesProj/jni/voip/webrtc/video/pc_full_stack_tests.cc b/TMessagesProj/jni/voip/webrtc/video/pc_full_stack_tests.cc index 6728e4bd19..715f3993ba 100644 --- a/TMessagesProj/jni/voip/webrtc/video/pc_full_stack_tests.cc +++ b/TMessagesProj/jni/voip/webrtc/video/pc_full_stack_tests.cc @@ -17,6 +17,7 @@ #include "api/test/create_peer_connection_quality_test_frame_generator.h" #include "api/test/create_peerconnection_quality_test_fixture.h" #include "api/test/frame_generator_interface.h" +#include "api/test/metrics/global_metrics_logger_and_exporter.h" #include "api/test/network_emulation_manager.h" #include "api/test/peerconnection_quality_test_fixture.h" #include "api/test/simulated_network.h" @@ -32,6 +33,8 @@ namespace webrtc { +using EmulatedSFUConfig = + webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::EmulatedSFUConfig; using PeerConfigurer = webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::PeerConfigurer; using RunParams = webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::RunParams; @@ -50,31 +53,6 @@ namespace { constexpr int kTestDurationSec = 45; -EmulatedNetworkNode* CreateEmulatedNodeWithConfig( - NetworkEmulationManager* emulation, - const BuiltInNetworkBehaviorConfig& config) { - return emulation->CreateEmulatedNode( - std::make_unique(config)); -} - -std::pair -CreateTwoNetworkLinks(NetworkEmulationManager* emulation, - const BuiltInNetworkBehaviorConfig& config) { - auto* alice_node = CreateEmulatedNodeWithConfig(emulation, config); - auto* bob_node = CreateEmulatedNodeWithConfig(emulation, config); - - auto* alice_endpoint = emulation->CreateEndpoint(EmulatedEndpointConfig()); - auto* bob_endpoint = emulation->CreateEndpoint(EmulatedEndpointConfig()); - - emulation->CreateRoute(alice_endpoint, {alice_node}, bob_endpoint); - emulation->CreateRoute(bob_endpoint, {bob_node}, alice_endpoint); - - return { - emulation->CreateEmulatedNetworkManagerInterface({alice_endpoint}), - emulation->CreateEmulatedNetworkManagerInterface({bob_endpoint}), - }; -} - std::unique_ptr CreateTestFixture(const std::string& test_case_name, TimeController& time_controller, @@ -91,7 +69,8 @@ CreateTestFixture(const std::string& test_case_name, bob_configurer); fixture->AddQualityMetricsReporter( std::make_unique( - network_links.first, network_links.second)); + network_links.first, network_links.second, + test::GetGlobalMetricsLogger())); return fixture; } @@ -106,6 +85,44 @@ std::string ClipNameToClipPath(const char* clip_name) { } // namespace +struct PCFullStackTestParams { + bool use_network_thread_as_worker_thread = false; + std::string field_trials; + std::string test_case_name_postfix; +}; + +std::vector ParameterizedTestParams() { + return {// Run with default parameters and field trials. + {}, + // Use the network thread as worker thread. + // Use the worker thread for sending packets. + // https://bugs.chromium.org/p/webrtc/issues/detail?id=14502 + {// TODO(webrtc:14502): Enable field trial soon but let it first run a + // couple of times to get a baseline.. + // .use_network_thread_as_worker_thread = true, + // .field_trials = "WebRTC-SendPacketsOnWorkerThread/Enabled/", + .test_case_name_postfix = "_ReducedThreads"}}; +} + +class ParameterizedPCFullStackTest + : public ::testing::TestWithParam { + public: + ParameterizedPCFullStackTest() : field_trials_(GetParam().field_trials) {} + + private: + test::ScopedFieldTrials field_trials_; +}; + +INSTANTIATE_TEST_SUITE_P( + ParameterizedPCFullStackTest, + ParameterizedPCFullStackTest, + testing::ValuesIn(ParameterizedTestParams()), + [](const testing::TestParamInfo& info) { + if (info.param.test_case_name_postfix.empty()) + return std::string("Default"); + return info.param.test_case_name_postfix; + }); + #if defined(RTC_ENABLE_VP9) TEST(PCFullStackTest, Pc_Foreman_Cif_Net_Delay_0_0_Plr_0_VP9) { std::unique_ptr network_emulation_manager = @@ -113,8 +130,8 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_Net_Delay_0_0_Plr_0_VP9) { auto fixture = CreateTestFixture( "pc_foreman_cif_net_delay_0_0_plr_0_VP9", *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), - BuiltInNetworkBehaviorConfig()), + network_emulation_manager->CreateEndpointPairWithTwoWayRoutes( + BuiltInNetworkBehaviorConfig()), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); video.stream_label = "alice-video"; @@ -132,10 +149,7 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_Net_Delay_0_0_Plr_0_VP9) { {kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}})}); }); - RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.use_flex_fec = false; - run_params.use_ulp_fec = false; - fixture->Run(std::move(run_params)); + fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); } TEST(PCGenericDescriptorTest, @@ -148,7 +162,7 @@ TEST(PCGenericDescriptorTest, auto fixture = CreateTestFixture( "pc_foreman_cif_delay_50_0_plr_5_VP9_generic_descriptor", *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), config), + network_emulation_manager->CreateEndpointPairWithTwoWayRoutes(config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); video.stream_label = "alice-video"; @@ -166,10 +180,7 @@ TEST(PCGenericDescriptorTest, {kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}})}); }); - RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.use_flex_fec = false; - run_params.use_ulp_fec = false; - fixture->Run(std::move(run_params)); + fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); } // VP9 2nd profile isn't supported on android arm and arm 64. @@ -188,8 +199,8 @@ TEST(PCFullStackTest, MAYBE_Pc_Generator_Net_Delay_0_0_Plr_0_VP9Profile2) { auto fixture = CreateTestFixture( "pc_generator_net_delay_0_0_plr_0_VP9Profile2", *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), - BuiltInNetworkBehaviorConfig()), + network_emulation_manager->CreateEndpointPairWithTwoWayRoutes( + BuiltInNetworkBehaviorConfig()), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); video.stream_label = "alice-video"; @@ -207,10 +218,7 @@ TEST(PCFullStackTest, MAYBE_Pc_Generator_Net_Delay_0_0_Plr_0_VP9Profile2) { {kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile2)}})}); }); - RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.use_flex_fec = false; - run_params.use_ulp_fec = false; - fixture->Run(std::move(run_params)); + fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); } /* @@ -250,8 +258,8 @@ TEST(PCFullStackTest, Pc_Net_Delay_0_0_Plr_0) { CreateNetworkEmulationManager(); auto fixture = CreateTestFixture( "pc_net_delay_0_0_plr_0", *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), - BuiltInNetworkBehaviorConfig()), + network_emulation_manager->CreateEndpointPairWithTwoWayRoutes( + BuiltInNetworkBehaviorConfig()), [](PeerConfigurer* alice) { VideoConfig video(176, 144, 30); video.stream_label = "alice-video"; @@ -260,10 +268,7 @@ TEST(PCFullStackTest, Pc_Net_Delay_0_0_Plr_0) { alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.use_flex_fec = false; - run_params.use_ulp_fec = false; - fixture->Run(std::move(run_params)); + fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); } TEST(PCGenericDescriptorTest, @@ -273,8 +278,8 @@ TEST(PCGenericDescriptorTest, auto fixture = CreateTestFixture( "pc_foreman_cif_net_delay_0_0_plr_0_generic_descriptor", *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), - BuiltInNetworkBehaviorConfig()), + network_emulation_manager->CreateEndpointPairWithTwoWayRoutes( + BuiltInNetworkBehaviorConfig()), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); video.stream_label = "alice-video"; @@ -283,10 +288,7 @@ TEST(PCGenericDescriptorTest, alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.use_flex_fec = false; - run_params.use_ulp_fec = false; - fixture->Run(std::move(run_params)); + fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); } TEST(PCGenericDescriptorTest, @@ -297,7 +299,7 @@ TEST(PCGenericDescriptorTest, auto fixture = CreateTestFixture( "pc_foreman_cif_30kbps_net_delay_0_0_plr_0_generic_descriptor", *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), config), + network_emulation_manager->CreateEndpointPairWithTwoWayRoutes(config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 10); video.stream_label = "alice-video"; @@ -313,8 +315,6 @@ TEST(PCGenericDescriptorTest, }, [](PeerConfigurer* bob) {}); RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.use_flex_fec = false; - run_params.use_ulp_fec = false; fixture->Run(std::move(run_params)); } @@ -327,7 +327,7 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_Link_150kbps_Net_Delay_0_0_Plr_0) { auto fixture = CreateTestFixture( "pc_foreman_cif_link_150kbps_net_delay_0_0_plr_0", *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), config), + network_emulation_manager->CreateEndpointPairWithTwoWayRoutes(config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); video.stream_label = "alice-video"; @@ -336,10 +336,7 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_Link_150kbps_Net_Delay_0_0_Plr_0) { alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.use_flex_fec = false; - run_params.use_ulp_fec = false; - fixture->Run(std::move(run_params)); + fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); } TEST(PCFullStackTest, Pc_Foreman_Cif_Link_130kbps_Delay100ms_Loss1_Ulpfec) { @@ -352,19 +349,17 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_Link_130kbps_Delay100ms_Loss1_Ulpfec) { auto fixture = CreateTestFixture( "pc_foreman_cif_link_130kbps_delay100ms_loss1_ulpfec", *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), config), + network_emulation_manager->CreateEndpointPairWithTwoWayRoutes(config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); video.stream_label = "alice-video"; auto frame_generator = CreateFromYuvFileFrameGenerator( video, ClipNameToClipPath("foreman_cif")); alice->AddVideoConfig(std::move(video), std::move(frame_generator)); + alice->SetUseUlpFEC(true); }, - [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.use_flex_fec = false; - run_params.use_ulp_fec = true; - fixture->Run(std::move(run_params)); + [](PeerConfigurer* bob) { bob->SetUseUlpFEC(true); }); + fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); } TEST(PCFullStackTest, Pc_Foreman_Cif_Link_50kbps_Delay100ms_Loss1_Ulpfec) { @@ -377,19 +372,17 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_Link_50kbps_Delay100ms_Loss1_Ulpfec) { auto fixture = CreateTestFixture( "pc_foreman_cif_link_50kbps_delay100ms_loss1_ulpfec", *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), config), + network_emulation_manager->CreateEndpointPairWithTwoWayRoutes(config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); video.stream_label = "alice-video"; auto frame_generator = CreateFromYuvFileFrameGenerator( video, ClipNameToClipPath("foreman_cif")); alice->AddVideoConfig(std::move(video), std::move(frame_generator)); + alice->SetUseUlpFEC(true); }, - [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.use_flex_fec = false; - run_params.use_ulp_fec = true; - fixture->Run(std::move(run_params)); + [](PeerConfigurer* bob) { bob->SetUseUlpFEC(true); }); + fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); } // Restricted network and encoder overproducing by 30%. @@ -404,20 +397,17 @@ TEST(PCFullStackTest, auto fixture = CreateTestFixture( "pc_foreman_cif_link_150kbps_delay100ms_30pkts_queue_overshoot30", *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), config), + network_emulation_manager->CreateEndpointPairWithTwoWayRoutes(config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); video.stream_label = "alice-video"; auto frame_generator = CreateFromYuvFileFrameGenerator( video, ClipNameToClipPath("foreman_cif")); alice->AddVideoConfig(std::move(video), std::move(frame_generator)); + alice->SetVideoEncoderBitrateMultiplier(1.30); }, - [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.use_flex_fec = false; - run_params.use_ulp_fec = false; - run_params.video_encoder_bitrate_multiplier = 1.30; - fixture->Run(std::move(run_params)); + [](PeerConfigurer* bob) { bob->SetVideoEncoderBitrateMultiplier(1.30); }); + fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); } // Weak 3G-style link: 250kbps, 1% loss, 100ms delay, 15 packets queue. @@ -435,20 +425,17 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_Link_250kbps_Delay100ms_10pkts_Loss1) { auto fixture = CreateTestFixture( "pc_foreman_cif_link_250kbps_delay100ms_10pkts_loss1", *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), config), + network_emulation_manager->CreateEndpointPairWithTwoWayRoutes(config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); video.stream_label = "alice-video"; auto frame_generator = CreateFromYuvFileFrameGenerator( video, ClipNameToClipPath("foreman_cif")); alice->AddVideoConfig(std::move(video), std::move(frame_generator)); + alice->SetVideoEncoderBitrateMultiplier(1.30); }, - [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.use_flex_fec = false; - run_params.use_ulp_fec = false; - run_params.video_encoder_bitrate_multiplier = 1.30; - fixture->Run(std::move(run_params)); + [](PeerConfigurer* bob) { bob->SetVideoEncoderBitrateMultiplier(1.30); }); + fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); } TEST(PCGenericDescriptorTest, @@ -461,7 +448,7 @@ TEST(PCGenericDescriptorTest, auto fixture = CreateTestFixture( "pc_foreman_cif_delay_50_0_plr_5_generic_descriptor", *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), config), + network_emulation_manager->CreateEndpointPairWithTwoWayRoutes(config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); video.stream_label = "alice-video"; @@ -470,10 +457,7 @@ TEST(PCGenericDescriptorTest, alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.use_flex_fec = false; - run_params.use_ulp_fec = false; - fixture->Run(std::move(run_params)); + fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); } TEST(PCGenericDescriptorTest, @@ -486,19 +470,17 @@ TEST(PCGenericDescriptorTest, auto fixture = CreateTestFixture( "pc_foreman_cif_delay_50_0_plr_5_ulpfec_generic_descriptor", *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), config), + network_emulation_manager->CreateEndpointPairWithTwoWayRoutes(config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); video.stream_label = "alice-video"; auto frame_generator = CreateFromYuvFileFrameGenerator( video, ClipNameToClipPath("foreman_cif")); alice->AddVideoConfig(std::move(video), std::move(frame_generator)); + alice->SetUseUlpFEC(true); }, - [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.use_flex_fec = false; - run_params.use_ulp_fec = true; - fixture->Run(std::move(run_params)); + [](PeerConfigurer* bob) { bob->SetUseUlpFEC(true); }); + fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); } TEST(PCFullStackTest, Pc_Foreman_Cif_Delay_50_0_Plr_5_Flexfec) { @@ -510,18 +492,18 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_Delay_50_0_Plr_5_Flexfec) { auto fixture = CreateTestFixture( "pc_foreman_cif_delay_50_0_plr_5_flexfec", *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), config), + network_emulation_manager->CreateEndpointPairWithTwoWayRoutes(config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); video.stream_label = "alice-video"; auto frame_generator = CreateFromYuvFileFrameGenerator( video, ClipNameToClipPath("foreman_cif")); alice->AddVideoConfig(std::move(video), std::move(frame_generator)); + alice->SetUseFlexFEC(true); }, - [](PeerConfigurer* bob) {}); + [](PeerConfigurer* bob) { bob->SetUseFlexFEC(true); }); RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.use_flex_fec = true; - run_params.use_ulp_fec = false; + run_params.enable_flex_fec_support = true; fixture->Run(std::move(run_params)); } @@ -535,18 +517,18 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_500kbps_Delay_50_0_Plr_3_Flexfec) { auto fixture = CreateTestFixture( "pc_foreman_cif_500kbps_delay_50_0_plr_3_flexfec", *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), config), + network_emulation_manager->CreateEndpointPairWithTwoWayRoutes(config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); video.stream_label = "alice-video"; auto frame_generator = CreateFromYuvFileFrameGenerator( video, ClipNameToClipPath("foreman_cif")); alice->AddVideoConfig(std::move(video), std::move(frame_generator)); + alice->SetUseFlexFEC(true); }, - [](PeerConfigurer* bob) {}); + [](PeerConfigurer* bob) { bob->SetUseFlexFEC(true); }); RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.use_flex_fec = true; - run_params.use_ulp_fec = false; + run_params.enable_flex_fec_support = true; fixture->Run(std::move(run_params)); } @@ -560,19 +542,17 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_500kbps_Delay_50_0_Plr_3_Ulpfec) { auto fixture = CreateTestFixture( "pc_foreman_cif_500kbps_delay_50_0_plr_3_ulpfec", *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), config), + network_emulation_manager->CreateEndpointPairWithTwoWayRoutes(config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); video.stream_label = "alice-video"; auto frame_generator = CreateFromYuvFileFrameGenerator( video, ClipNameToClipPath("foreman_cif")); alice->AddVideoConfig(std::move(video), std::move(frame_generator)); + alice->SetUseUlpFEC(true); }, - [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.use_flex_fec = false; - run_params.use_ulp_fec = true; - fixture->Run(std::move(run_params)); + [](PeerConfigurer* bob) { bob->SetUseUlpFEC(true); }); + fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); } #if defined(WEBRTC_USE_H264) @@ -582,8 +562,8 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_Net_Delay_0_0_Plr_0_H264) { auto fixture = CreateTestFixture( "pc_foreman_cif_net_delay_0_0_plr_0_H264", *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), - BuiltInNetworkBehaviorConfig()), + network_emulation_manager->CreateEndpointPairWithTwoWayRoutes( + BuiltInNetworkBehaviorConfig()), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); video.stream_label = "alice-video"; @@ -595,10 +575,7 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_Net_Delay_0_0_Plr_0_H264) { [](PeerConfigurer* bob) { bob->SetVideoCodecs({VideoCodecConfig(cricket::kH264CodecName)}); }); - RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.use_flex_fec = false; - run_params.use_ulp_fec = false; - fixture->Run(std::move(run_params)); + fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); } TEST(PCFullStackTest, Pc_Foreman_Cif_30kbps_Net_Delay_0_0_Plr_0_H264) { @@ -608,7 +585,7 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_30kbps_Net_Delay_0_0_Plr_0_H264) { auto fixture = CreateTestFixture( "pc_foreman_cif_30kbps_net_delay_0_0_plr_0_H264", *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), config), + network_emulation_manager->CreateEndpointPairWithTwoWayRoutes(config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 10); video.stream_label = "alice-video"; @@ -626,10 +603,7 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_30kbps_Net_Delay_0_0_Plr_0_H264) { [](PeerConfigurer* bob) { bob->SetVideoCodecs({VideoCodecConfig(cricket::kH264CodecName)}); }); - RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.use_flex_fec = false; - run_params.use_ulp_fec = false; - fixture->Run(std::move(run_params)); + fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); } TEST(PCGenericDescriptorTest, @@ -642,7 +616,7 @@ TEST(PCGenericDescriptorTest, auto fixture = CreateTestFixture( "pc_foreman_cif_delay_50_0_plr_5_H264_generic_descriptor", *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), config), + network_emulation_manager->CreateEndpointPairWithTwoWayRoutes(config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); video.stream_label = "alice-video"; @@ -654,10 +628,7 @@ TEST(PCGenericDescriptorTest, [](PeerConfigurer* bob) { bob->SetVideoCodecs({VideoCodecConfig(cricket::kH264CodecName)}); }); - RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.use_flex_fec = false; - run_params.use_ulp_fec = false; - fixture->Run(std::move(run_params)); + fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); } TEST(PCFullStackTest, Pc_Foreman_Cif_Delay_50_0_Plr_5_H264_Sps_Pps_Idr) { @@ -672,7 +643,7 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_Delay_50_0_Plr_5_H264_Sps_Pps_Idr) { auto fixture = CreateTestFixture( "pc_foreman_cif_delay_50_0_plr_5_H264_sps_pps_idr", *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), config), + network_emulation_manager->CreateEndpointPairWithTwoWayRoutes(config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); video.stream_label = "alice-video"; @@ -684,10 +655,7 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_Delay_50_0_Plr_5_H264_Sps_Pps_Idr) { [](PeerConfigurer* bob) { bob->SetVideoCodecs({VideoCodecConfig(cricket::kH264CodecName)}); }); - RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.use_flex_fec = false; - run_params.use_ulp_fec = false; - fixture->Run(std::move(run_params)); + fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); } TEST(PCFullStackTest, Pc_Foreman_Cif_Delay_50_0_Plr_5_H264_Flexfec) { @@ -699,7 +667,7 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_Delay_50_0_Plr_5_H264_Flexfec) { auto fixture = CreateTestFixture( "pc_foreman_cif_delay_50_0_plr_5_H264_flexfec", *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), config), + network_emulation_manager->CreateEndpointPairWithTwoWayRoutes(config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); video.stream_label = "alice-video"; @@ -707,13 +675,14 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_Delay_50_0_Plr_5_H264_Flexfec) { video, ClipNameToClipPath("foreman_cif")); alice->AddVideoConfig(std::move(video), std::move(frame_generator)); alice->SetVideoCodecs({VideoCodecConfig(cricket::kH264CodecName)}); + alice->SetUseFlexFEC(true); }, [](PeerConfigurer* bob) { bob->SetVideoCodecs({VideoCodecConfig(cricket::kH264CodecName)}); + bob->SetUseFlexFEC(true); }); RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.use_flex_fec = true; - run_params.use_ulp_fec = false; + run_params.enable_flex_fec_support = true; fixture->Run(std::move(run_params)); } @@ -728,7 +697,7 @@ TEST(PCFullStackTest, DISABLED_Pc_Foreman_Cif_Delay_50_0_Plr_5_H264_Ulpfec) { auto fixture = CreateTestFixture( "pc_foreman_cif_delay_50_0_plr_5_H264_ulpfec", *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), config), + network_emulation_manager->CreateEndpointPairWithTwoWayRoutes(config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); video.stream_label = "alice-video"; @@ -736,14 +705,13 @@ TEST(PCFullStackTest, DISABLED_Pc_Foreman_Cif_Delay_50_0_Plr_5_H264_Ulpfec) { video, ClipNameToClipPath("foreman_cif")); alice->AddVideoConfig(std::move(video), std::move(frame_generator)); alice->SetVideoCodecs({VideoCodecConfig(cricket::kH264CodecName)}); + alice->SetUseUlpFEC(true); }, [](PeerConfigurer* bob) { bob->SetVideoCodecs({VideoCodecConfig(cricket::kH264CodecName)}); + bob->SetUseUlpFEC(true); }); - RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.use_flex_fec = false; - run_params.use_ulp_fec = true; - fixture->Run(std::move(run_params)); + fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); } #endif // defined(WEBRTC_USE_H264) @@ -756,7 +724,7 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_500kbps) { config.link_capacity_kbps = 500; auto fixture = CreateTestFixture( "pc_foreman_cif_500kbps", *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), config), + network_emulation_manager->CreateEndpointPairWithTwoWayRoutes(config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); video.stream_label = "alice-video"; @@ -765,13 +733,10 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_500kbps) { alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.use_flex_fec = false; - run_params.use_ulp_fec = false; - fixture->Run(std::move(run_params)); + fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); } -TEST(PCFullStackTest, Pc_Foreman_Cif_500kbps_32pkts_Queue) { +TEST_P(ParameterizedPCFullStackTest, Pc_Foreman_Cif_500kbps_32pkts_Queue) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -779,21 +744,25 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_500kbps_32pkts_Queue) { config.queue_delay_ms = 0; config.link_capacity_kbps = 500; auto fixture = CreateTestFixture( - "pc_foreman_cif_500kbps_32pkts_queue", + "pc_foreman_cif_500kbps_32pkts_queue" + GetParam().test_case_name_postfix, *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), config), + network_emulation_manager->CreateEndpointPairWithTwoWayRoutes(config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); video.stream_label = "alice-video"; auto frame_generator = CreateFromYuvFileFrameGenerator( video, ClipNameToClipPath("foreman_cif")); alice->AddVideoConfig(std::move(video), std::move(frame_generator)); + if (GetParam().use_network_thread_as_worker_thread) { + alice->SetUseNetworkThreadAsWorkerThread(); + } }, - [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.use_flex_fec = false; - run_params.use_ulp_fec = false; - fixture->Run(std::move(run_params)); + [](PeerConfigurer* bob) { + if (GetParam().use_network_thread_as_worker_thread) { + bob->SetUseNetworkThreadAsWorkerThread(); + } + }); + fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); } TEST(PCFullStackTest, Pc_Foreman_Cif_500kbps_100ms) { @@ -806,7 +775,7 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_500kbps_100ms) { auto fixture = CreateTestFixture( "pc_foreman_cif_500kbps_100ms", *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), config), + network_emulation_manager->CreateEndpointPairWithTwoWayRoutes(config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); video.stream_label = "alice-video"; @@ -815,10 +784,7 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_500kbps_100ms) { alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.use_flex_fec = false; - run_params.use_ulp_fec = false; - fixture->Run(std::move(run_params)); + fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); } TEST(PCGenericDescriptorTest, @@ -832,7 +798,7 @@ TEST(PCGenericDescriptorTest, auto fixture = CreateTestFixture( "pc_foreman_cif_500kbps_100ms_32pkts_queue_generic_descriptor", *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), config), + network_emulation_manager->CreateEndpointPairWithTwoWayRoutes(config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); video.stream_label = "alice-video"; @@ -841,10 +807,7 @@ TEST(PCGenericDescriptorTest, alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.use_flex_fec = false; - run_params.use_ulp_fec = false; - fixture->Run(std::move(run_params)); + fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); } /* @@ -878,7 +841,7 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_1000kbps_100ms_32pkts_Queue) { auto fixture = CreateTestFixture( "pc_foreman_cif_1000kbps_100ms_32pkts_queue", *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), config), + network_emulation_manager->CreateEndpointPairWithTwoWayRoutes(config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); video.stream_label = "alice-video"; @@ -887,10 +850,7 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_1000kbps_100ms_32pkts_Queue) { alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.use_flex_fec = false; - run_params.use_ulp_fec = false; - fixture->Run(std::move(run_params)); + fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); } // TODO(sprang): Remove this if we have the similar ModerateLimits below? @@ -904,7 +864,7 @@ TEST(PCFullStackTest, Pc_Conference_Motion_Hd_2000kbps_100ms_32pkts_Queue) { auto fixture = CreateTestFixture( "pc_conference_motion_hd_2000kbps_100ms_32pkts_queue", *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), config), + network_emulation_manager->CreateEndpointPairWithTwoWayRoutes(config), [](PeerConfigurer* alice) { VideoConfig video(1280, 720, 50); video.stream_label = "alice-video"; @@ -913,10 +873,7 @@ TEST(PCFullStackTest, Pc_Conference_Motion_Hd_2000kbps_100ms_32pkts_Queue) { alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.use_flex_fec = false; - run_params.use_ulp_fec = false; - fixture->Run(std::move(run_params)); + fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); } /* @@ -1046,7 +1003,8 @@ TEST(PCFullStackTest, */ #if defined(RTC_ENABLE_VP9) -TEST(PCFullStackTest, Pc_Conference_Motion_Hd_2000kbps_100ms_32pkts_Queue_Vp9) { +TEST_P(ParameterizedPCFullStackTest, + Pc_Conference_Motion_Hd_2000kbps_100ms_32pkts_Queue_Vp9) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -1054,9 +1012,10 @@ TEST(PCFullStackTest, Pc_Conference_Motion_Hd_2000kbps_100ms_32pkts_Queue_Vp9) { config.queue_delay_ms = 100; config.link_capacity_kbps = 2000; auto fixture = CreateTestFixture( - "pc_conference_motion_hd_2000kbps_100ms_32pkts_queue_vp9", + "pc_conference_motion_hd_2000kbps_100ms_32pkts_queue_vp9" + + GetParam().test_case_name_postfix, *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), config), + network_emulation_manager->CreateEndpointPairWithTwoWayRoutes(config), [](PeerConfigurer* alice) { VideoConfig video(1280, 720, 50); video.stream_label = "alice-video"; @@ -1067,17 +1026,20 @@ TEST(PCFullStackTest, Pc_Conference_Motion_Hd_2000kbps_100ms_32pkts_Queue_Vp9) { /*name=*/cricket::kVp9CodecName, /*required_params=*/{ {kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}})}); + if (GetParam().use_network_thread_as_worker_thread) { + alice->SetUseNetworkThreadAsWorkerThread(); + } }, [](PeerConfigurer* bob) { bob->SetVideoCodecs({VideoCodecConfig( /*name=*/cricket::kVp9CodecName, /*required_params=*/{ {kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}})}); + if (GetParam().use_network_thread_as_worker_thread) { + bob->SetUseNetworkThreadAsWorkerThread(); + } }); - RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.use_flex_fec = false; - run_params.use_ulp_fec = false; - fixture->Run(std::move(run_params)); + fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); } #endif @@ -1087,8 +1049,8 @@ TEST(PCFullStackTest, Pc_Screenshare_Slides_No_Conference_Mode) { auto fixture = CreateTestFixture( "pc_screenshare_slides_no_conference_mode", *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), - BuiltInNetworkBehaviorConfig()), + network_emulation_manager->CreateEndpointPairWithTwoWayRoutes( + BuiltInNetworkBehaviorConfig()), [](PeerConfigurer* alice) { VideoConfig video(1850, 1110, 5); video.stream_label = "alice-video"; @@ -1098,10 +1060,7 @@ TEST(PCFullStackTest, Pc_Screenshare_Slides_No_Conference_Mode) { alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.use_flex_fec = false; - run_params.use_ulp_fec = false; - fixture->Run(std::move(run_params)); + fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); } TEST(PCFullStackTest, Pc_Screenshare_Slides) { @@ -1109,8 +1068,8 @@ TEST(PCFullStackTest, Pc_Screenshare_Slides) { CreateNetworkEmulationManager(); auto fixture = CreateTestFixture( "pc_screenshare_slides", *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), - BuiltInNetworkBehaviorConfig()), + network_emulation_manager->CreateEndpointPairWithTwoWayRoutes( + BuiltInNetworkBehaviorConfig()), [](PeerConfigurer* alice) { VideoConfig video(1850, 1110, 5); video.stream_label = "alice-video"; @@ -1121,8 +1080,6 @@ TEST(PCFullStackTest, Pc_Screenshare_Slides) { }, [](PeerConfigurer* bob) {}); RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.use_flex_fec = false; - run_params.use_ulp_fec = false; run_params.use_conference_mode = true; fixture->Run(std::move(run_params)); } @@ -1135,11 +1092,12 @@ TEST(PCFullStackTest, Pc_Screenshare_Slides_Simulcast_No_Conference_Mode) { auto fixture = CreateTestFixture( "pc_screenshare_slides_simulcast_no_conference_mode", *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), - BuiltInNetworkBehaviorConfig()), + network_emulation_manager->CreateEndpointPairWithTwoWayRoutes( + BuiltInNetworkBehaviorConfig()), [](PeerConfigurer* alice) { VideoConfig video(1850, 1110, 30); - video.simulcast_config = VideoSimulcastConfig(2, 1); + video.simulcast_config = VideoSimulcastConfig(2); + video.emulated_sfu_config = EmulatedSFUConfig(1); video.temporal_layers_count = 2; video.stream_label = "alice-video"; video.content_hint = VideoTrackInterface::ContentHint::kText; @@ -1148,34 +1106,37 @@ TEST(PCFullStackTest, Pc_Screenshare_Slides_Simulcast_No_Conference_Mode) { alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.use_flex_fec = false; - run_params.use_ulp_fec = false; - fixture->Run(std::move(run_params)); + fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); } -TEST(PCFullStackTest, Pc_Screenshare_Slides_Simulcast) { +TEST_P(ParameterizedPCFullStackTest, Pc_Screenshare_Slides_Simulcast) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); auto fixture = CreateTestFixture( - "pc_screenshare_slides_simulcast", + "pc_screenshare_slides_simulcast" + GetParam().test_case_name_postfix, *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), - BuiltInNetworkBehaviorConfig()), + network_emulation_manager->CreateEndpointPairWithTwoWayRoutes( + BuiltInNetworkBehaviorConfig()), [](PeerConfigurer* alice) { VideoConfig video(1850, 1110, 30); - video.simulcast_config = VideoSimulcastConfig(2, 1); + video.simulcast_config = VideoSimulcastConfig(2); + video.emulated_sfu_config = EmulatedSFUConfig(1); video.temporal_layers_count = 2; video.stream_label = "alice-video"; video.content_hint = VideoTrackInterface::ContentHint::kText; auto frame_generator = CreateScreenShareFrameGenerator( video, ScreenShareConfig(TimeDelta::Seconds(10))); alice->AddVideoConfig(std::move(video), std::move(frame_generator)); + if (GetParam().use_network_thread_as_worker_thread) { + alice->SetUseNetworkThreadAsWorkerThread(); + } }, - [](PeerConfigurer* bob) {}); + [](PeerConfigurer* bob) { + if (GetParam().use_network_thread_as_worker_thread) { + bob->SetUseNetworkThreadAsWorkerThread(); + } + }); RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.use_flex_fec = false; - run_params.use_ulp_fec = false; run_params.use_conference_mode = true; fixture->Run(std::move(run_params)); } @@ -1367,12 +1328,13 @@ TEST(PCFullStackTest, Pc_Screenshare_Slides_Vp9_3sl_High_Fps) { auto fixture = CreateTestFixture( "pc_screenshare_slides_vp9_3sl_high_fps", *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), - BuiltInNetworkBehaviorConfig()), + network_emulation_manager->CreateEndpointPairWithTwoWayRoutes( + BuiltInNetworkBehaviorConfig()), [](PeerConfigurer* alice) { VideoConfig video(1850, 1110, 30); video.stream_label = "alice-video"; - video.simulcast_config = VideoSimulcastConfig(3, 2); + video.simulcast_config = VideoSimulcastConfig(3); + video.emulated_sfu_config = EmulatedSFUConfig(2); video.content_hint = VideoTrackInterface::ContentHint::kText; auto frame_generator = CreateScreenShareFrameGenerator( video, ScreenShareConfig(TimeDelta::Seconds(10))); @@ -1388,10 +1350,7 @@ TEST(PCFullStackTest, Pc_Screenshare_Slides_Vp9_3sl_High_Fps) { {kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}})}); }); - RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.use_flex_fec = false; - run_params.use_ulp_fec = false; - fixture->Run(std::move(run_params)); + fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); } TEST(PCFullStackTest, Pc_Vp9svc_3sl_High) { @@ -1402,12 +1361,13 @@ TEST(PCFullStackTest, Pc_Vp9svc_3sl_High) { CreateNetworkEmulationManager(); auto fixture = CreateTestFixture( "pc_vp9svc_3sl_high", *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), - BuiltInNetworkBehaviorConfig()), + network_emulation_manager->CreateEndpointPairWithTwoWayRoutes( + BuiltInNetworkBehaviorConfig()), [](PeerConfigurer* alice) { VideoConfig video(1280, 720, 30); video.stream_label = "alice-video"; - video.simulcast_config = VideoSimulcastConfig(3, 2); + video.simulcast_config = VideoSimulcastConfig(3); + video.emulated_sfu_config = EmulatedSFUConfig(2); video.temporal_layers_count = 3; auto frame_generator = CreateFromYuvFileFrameGenerator( video, ClipNameToClipPath("ConferenceMotion_1280_720_50")); @@ -1423,10 +1383,7 @@ TEST(PCFullStackTest, Pc_Vp9svc_3sl_High) { {kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}})}); }); - RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.use_flex_fec = false; - run_params.use_ulp_fec = false; - fixture->Run(std::move(run_params)); + fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); } TEST(PCFullStackTest, Pc_Vp9svc_3sl_Low) { @@ -1437,12 +1394,13 @@ TEST(PCFullStackTest, Pc_Vp9svc_3sl_Low) { CreateNetworkEmulationManager(); auto fixture = CreateTestFixture( "pc_vp9svc_3sl_low", *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), - BuiltInNetworkBehaviorConfig()), + network_emulation_manager->CreateEndpointPairWithTwoWayRoutes( + BuiltInNetworkBehaviorConfig()), [](PeerConfigurer* alice) { VideoConfig video(1280, 720, 30); video.stream_label = "alice-video"; - video.simulcast_config = VideoSimulcastConfig(3, 0); + video.simulcast_config = VideoSimulcastConfig(3); + video.emulated_sfu_config = EmulatedSFUConfig(0); video.temporal_layers_count = 3; auto frame_generator = CreateFromYuvFileFrameGenerator( video, ClipNameToClipPath("ConferenceMotion_1280_720_50")); @@ -1458,10 +1416,7 @@ TEST(PCFullStackTest, Pc_Vp9svc_3sl_Low) { {kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}})}); }); - RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.use_flex_fec = false; - run_params.use_ulp_fec = false; - fixture->Run(std::move(run_params)); + fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); } #endif // defined(RTC_ENABLE_VP9) @@ -1573,44 +1528,47 @@ TEST(PCFullStackTest, MAYBE_Pc_Simulcast_HD_High) { config.queue_delay_ms = 100; auto fixture = CreateTestFixture( "pc_simulcast_HD_high", *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), config), + network_emulation_manager->CreateEndpointPairWithTwoWayRoutes(config), [](PeerConfigurer* alice) { VideoConfig video(1920, 1080, 30); - video.simulcast_config = VideoSimulcastConfig(3, 2); + video.simulcast_config = VideoSimulcastConfig(3); + video.emulated_sfu_config = EmulatedSFUConfig(2); video.temporal_layers_count = 3; video.stream_label = "alice-video"; alice->AddVideoConfig(std::move(video)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.use_flex_fec = false; - run_params.use_ulp_fec = false; - fixture->Run(std::move(run_params)); + fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); } -TEST(PCFullStackTest, Pc_Simulcast_Vp8_3sl_High) { +TEST_P(ParameterizedPCFullStackTest, Pc_Simulcast_Vp8_3sl_High) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; config.loss_percent = 0; config.queue_delay_ms = 100; auto fixture = CreateTestFixture( - "pc_simulcast_vp8_3sl_high", + "pc_simulcast_vp8_3sl_high" + GetParam().test_case_name_postfix, *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), config), + network_emulation_manager->CreateEndpointPairWithTwoWayRoutes(config), [](PeerConfigurer* alice) { VideoConfig video(1280, 720, 30); - video.simulcast_config = VideoSimulcastConfig(3, 2); + video.simulcast_config = VideoSimulcastConfig(3); + video.emulated_sfu_config = EmulatedSFUConfig(2); video.stream_label = "alice-video"; auto frame_generator = CreateFromYuvFileFrameGenerator( video, ClipNameToClipPath("ConferenceMotion_1280_720_50")); alice->AddVideoConfig(std::move(video), std::move(frame_generator)); + if (GetParam().use_network_thread_as_worker_thread) { + alice->SetUseNetworkThreadAsWorkerThread(); + } }, - [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.use_flex_fec = false; - run_params.use_ulp_fec = false; - fixture->Run(std::move(run_params)); + [](PeerConfigurer* bob) { + if (GetParam().use_network_thread_as_worker_thread) { + bob->SetUseNetworkThreadAsWorkerThread(); + } + }); + fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); } TEST(PCFullStackTest, Pc_Simulcast_Vp8_3sl_Low) { @@ -1621,20 +1579,18 @@ TEST(PCFullStackTest, Pc_Simulcast_Vp8_3sl_Low) { config.queue_delay_ms = 100; auto fixture = CreateTestFixture( "pc_simulcast_vp8_3sl_low", *network_emulation_manager->time_controller(), - CreateTwoNetworkLinks(network_emulation_manager.get(), config), + network_emulation_manager->CreateEndpointPairWithTwoWayRoutes(config), [](PeerConfigurer* alice) { VideoConfig video(1280, 720, 30); - video.simulcast_config = VideoSimulcastConfig(3, 0); + video.simulcast_config = VideoSimulcastConfig(3); + video.emulated_sfu_config = EmulatedSFUConfig(0); video.stream_label = "alice-video"; auto frame_generator = CreateFromYuvFileFrameGenerator( video, ClipNameToClipPath("ConferenceMotion_1280_720_50")); alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.use_flex_fec = false; - run_params.use_ulp_fec = false; - fixture->Run(std::move(run_params)); + fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); } /* diff --git a/TMessagesProj/jni/voip/webrtc/video/picture_id_tests.cc b/TMessagesProj/jni/voip/webrtc/video/picture_id_tests.cc index 6dda450a82..8ea9078c5d 100644 --- a/TMessagesProj/jni/voip/webrtc/video/picture_id_tests.cc +++ b/TMessagesProj/jni/voip/webrtc/video/picture_id_tests.cc @@ -43,7 +43,7 @@ const size_t kNumTemporalLayers[] = {1, 2, 3}; class PictureIdObserver : public test::RtpRtcpObserver { public: explicit PictureIdObserver(VideoCodecType codec_type) - : test::RtpRtcpObserver(test::CallTest::kDefaultTimeoutMs), + : test::RtpRtcpObserver(test::CallTest::kDefaultTimeout), depacketizer_(CreateVideoRtpDepacketizer(codec_type)), max_expected_picture_id_gap_(0), max_expected_tl0_idx_gap_(0), @@ -135,11 +135,15 @@ class PictureIdObserver : public test::RtpRtcpObserver { // Expect continuously increasing picture id. int diff = ForwardDiff(last.picture_id, current.picture_id); - if (diff > 1) { + EXPECT_LE(diff - 1, max_expected_picture_id_gap_); + if (diff > 2) { // If the VideoSendStream is destroyed, any frames still in queue is lost. - // Gaps only possible for first frame after a recreation, i.e. key frames. + // This can result in a two-frame gap, which will result in logs like + // "packet transmission failed, no matching RTP module found, or + // transmission error". + // A larger gap is only possible for first frame after a recreation, i.e. + // key frames. EXPECT_EQ(VideoFrameType::kVideoFrameKey, current.frame_type); - EXPECT_LE(diff - 1, max_expected_picture_id_gap_); } } @@ -213,7 +217,7 @@ class PictureIdTest : public test::CallTest, PictureIdTest() : num_temporal_layers_(GetParam()) {} virtual ~PictureIdTest() { - SendTask(RTC_FROM_HERE, task_queue(), [this]() { + SendTask(task_queue(), [this]() { send_transport_.reset(); receive_transport_.reset(); DestroyCalls(); @@ -233,7 +237,14 @@ class PictureIdTest : public test::CallTest, std::unique_ptr observer_; }; -INSTANTIATE_TEST_SUITE_P(TemporalLayers, +// TODO(bugs.webrtc.org/13725): Enable on android when flakiness fixed. +#if defined(WEBRTC_ANDROID) +#define MAYBE_TemporalLayers DISABLED_TemporalLayers +#else +#define MAYBE_TemporalLayers TemporalLayers +#endif + +INSTANTIATE_TEST_SUITE_P(MAYBE_TemporalLayers, PictureIdTest, ::testing::ValuesIn(kNumTemporalLayers)); @@ -243,7 +254,7 @@ void PictureIdTest::SetupEncoder(VideoEncoderFactory* encoder_factory, new PictureIdObserver(PayloadStringToCodecType(payload_name))); SendTask( - RTC_FROM_HERE, task_queue(), [this, encoder_factory, payload_name]() { + task_queue(), [this, encoder_factory, payload_name]() { CreateCalls(); send_transport_.reset(new test::PacketTransport( @@ -288,7 +299,7 @@ void PictureIdTest::SetVideoEncoderConfig(int num_streams) { void PictureIdTest::TestPictureIdContinuousAfterReconfigure( const std::vector& ssrc_counts) { - SendTask(RTC_FROM_HERE, task_queue(), [this]() { + SendTask(task_queue(), [this]() { CreateVideoStreams(); CreateFrameGeneratorCapturer(kFrameRate, kFrameMaxWidth, kFrameMaxHeight); @@ -306,14 +317,14 @@ void PictureIdTest::TestPictureIdContinuousAfterReconfigure( observer_->SetExpectedSsrcs(ssrc_count); observer_->ResetObservedSsrcs(); // Make sure the picture_id sequence is continuous on reinit and recreate. - SendTask(RTC_FROM_HERE, task_queue(), [this]() { + SendTask(task_queue(), [this]() { GetVideoSendStream()->ReconfigureVideoEncoder( GetVideoEncoderConfig()->Copy()); }); EXPECT_TRUE(observer_->Wait()) << "Timed out waiting for packets."; } - SendTask(RTC_FROM_HERE, task_queue(), [this]() { + SendTask(task_queue(), [this]() { Stop(); DestroyStreams(); }); @@ -321,7 +332,7 @@ void PictureIdTest::TestPictureIdContinuousAfterReconfigure( void PictureIdTest::TestPictureIdIncreaseAfterRecreateStreams( const std::vector& ssrc_counts) { - SendTask(RTC_FROM_HERE, task_queue(), [this]() { + SendTask(task_queue(), [this]() { CreateVideoStreams(); CreateFrameGeneratorCapturer(kFrameRate, kFrameMaxWidth, kFrameMaxHeight); @@ -336,7 +347,7 @@ void PictureIdTest::TestPictureIdIncreaseAfterRecreateStreams( // with it, therefore it is expected that some frames might be lost. observer_->SetMaxExpectedPictureIdGap(kMaxFramesLost); for (int ssrc_count : ssrc_counts) { - SendTask(RTC_FROM_HERE, task_queue(), [this, &ssrc_count]() { + SendTask(task_queue(), [this, &ssrc_count]() { DestroyVideoSendStreams(); SetVideoEncoderConfig(ssrc_count); @@ -351,7 +362,7 @@ void PictureIdTest::TestPictureIdIncreaseAfterRecreateStreams( EXPECT_TRUE(observer_->Wait()) << "Timed out waiting for packets."; } - SendTask(RTC_FROM_HERE, task_queue(), [this]() { + SendTask(task_queue(), [this]() { Stop(); DestroyStreams(); }); diff --git a/TMessagesProj/jni/voip/webrtc/video/quality_limitation_reason_tracker.h b/TMessagesProj/jni/voip/webrtc/video/quality_limitation_reason_tracker.h index 22816a8272..15bc90773a 100644 --- a/TMessagesProj/jni/voip/webrtc/video/quality_limitation_reason_tracker.h +++ b/TMessagesProj/jni/voip/webrtc/video/quality_limitation_reason_tracker.h @@ -24,6 +24,8 @@ namespace webrtc { // duration of time spent in each reason. See qualityLimitationReason[1], // qualityLimitationDurations[2], and qualityLimitationResolutionChanges[3] in // the webrtc-stats spec. +// Note that the specification defines the durations in seconds while the +// internal data structures defines it in milliseconds. // [1] // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-qualitylimitationreason // [2] diff --git a/TMessagesProj/jni/voip/webrtc/video/quality_scaling_tests.cc b/TMessagesProj/jni/voip/webrtc/video/quality_scaling_tests.cc index c7c393e861..7eaf14831b 100644 --- a/TMessagesProj/jni/voip/webrtc/video/quality_scaling_tests.cc +++ b/TMessagesProj/jni/voip/webrtc/video/quality_scaling_tests.cc @@ -19,6 +19,7 @@ #include "test/call_test.h" #include "test/field_trial.h" #include "test/frame_generator_capturer.h" +#include "video/config/encoder_stream_factory.h" namespace webrtc { namespace { @@ -27,7 +28,8 @@ constexpr int kInitialHeight = 720; constexpr int kLowStartBps = 100000; constexpr int kHighStartBps = 1000000; constexpr int kDefaultVgaMinStartBps = 500000; // From video_stream_encoder.cc -constexpr int kTimeoutMs = 10000; // Some tests are expected to time out. +constexpr TimeDelta kTimeout = + TimeDelta::Seconds(10); // Some tests are expected to time out. void SetEncoderSpecific(VideoEncoderConfig* encoder_config, VideoCodecType type, @@ -64,16 +66,25 @@ class QualityScalingTest : public test::CallTest { EncoderInfoSettings::GetDefaultSinglecastBitrateLimitsForResolution( kVideoCodecVP9, 640 * 360); + const absl::optional + kSinglecastLimits720pVp9 = + EncoderInfoSettings::GetDefaultSinglecastBitrateLimitsForResolution( + kVideoCodecVP9, + 1280 * 720); }; class ScalingObserver : public test::SendTest { protected: + struct TestParams { + bool active; + absl::optional scalability_mode; + }; ScalingObserver(const std::string& payload_name, - const std::vector& streams_active, + const std::vector& test_params, int start_bps, bool automatic_resize, bool expect_scaling) - : SendTest(expect_scaling ? kTimeoutMs * 4 : kTimeoutMs), + : SendTest(expect_scaling ? kTimeout * 4 : kTimeout), encoder_factory_( [](const SdpVideoFormat& format) -> std::unique_ptr { if (format.name == "VP8") @@ -86,7 +97,7 @@ class ScalingObserver : public test::SendTest { return nullptr; }), payload_name_(payload_name), - streams_active_(streams_active), + test_params_(test_params), start_bps_(start_bps), automatic_resize_(automatic_resize), expect_scaling_(expect_scaling) {} @@ -105,43 +116,50 @@ class ScalingObserver : public test::SendTest { } size_t GetNumVideoStreams() const override { - return (payload_name_ == "VP9") ? 1 : streams_active_.size(); + return (payload_name_ == "VP9") ? 1 : test_params_.size(); } void ModifyVideoConfigs( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { + VideoEncoder::EncoderInfo encoder_info; send_config->encoder_settings.encoder_factory = &encoder_factory_; send_config->rtp.payload_name = payload_name_; send_config->rtp.payload_type = test::CallTest::kVideoSendPayloadType; encoder_config->video_format.name = payload_name_; const VideoCodecType codec_type = PayloadStringToCodecType(payload_name_); encoder_config->codec_type = codec_type; + encoder_config->video_stream_factory = + rtc::make_ref_counted( + payload_name_, /*max_qp=*/0, /*is_screenshare=*/false, + /*conference_mode=*/false, encoder_info); encoder_config->max_bitrate_bps = std::max(start_bps_, encoder_config->max_bitrate_bps); if (payload_name_ == "VP9") { // Simulcast layers indicates which spatial layers are active. - encoder_config->simulcast_layers.resize(streams_active_.size()); + encoder_config->simulcast_layers.resize(test_params_.size()); encoder_config->simulcast_layers[0].max_bitrate_bps = encoder_config->max_bitrate_bps; } double scale_factor = 1.0; - for (int i = streams_active_.size() - 1; i >= 0; --i) { + for (int i = test_params_.size() - 1; i >= 0; --i) { VideoStream& stream = encoder_config->simulcast_layers[i]; - stream.active = streams_active_[i]; + stream.active = test_params_[i].active; + stream.scalability_mode = test_params_[i].scalability_mode; stream.scale_resolution_down_by = scale_factor; scale_factor *= (payload_name_ == "VP9") ? 1.0 : 2.0; } + encoder_config->frame_drop_enabled = true; SetEncoderSpecific(encoder_config, codec_type, automatic_resize_, - streams_active_.size()); + test_params_.size()); } void PerformTest() override { EXPECT_EQ(expect_scaling_, Wait()); } test::FunctionVideoEncoderFactory encoder_factory_; const std::string payload_name_; - const std::vector streams_active_; + const std::vector test_params_; const int start_bps_; const bool automatic_resize_; const bool expect_scaling_; @@ -152,12 +170,12 @@ class DownscalingObserver public test::FrameGeneratorCapturer::SinkWantsObserver { public: DownscalingObserver(const std::string& payload_name, - const std::vector& streams_active, + const std::vector& test_params, int start_bps, bool automatic_resize, bool expect_downscale) : ScalingObserver(payload_name, - streams_active, + test_params, start_bps, automatic_resize, expect_downscale) {} @@ -181,12 +199,12 @@ class UpscalingObserver public test::FrameGeneratorCapturer::SinkWantsObserver { public: UpscalingObserver(const std::string& payload_name, - const std::vector& streams_active, + const std::vector& test_params, int start_bps, bool automatic_resize, bool expect_upscale) : ScalingObserver(payload_name, - streams_active, + test_params, start_bps, automatic_resize, expect_upscale) {} @@ -216,9 +234,10 @@ class UpscalingObserver TEST_F(QualityScalingTest, AdaptsDownForHighQp_Vp8) { // qp_low:1, qp_high:1 -> kHighQp - test::ScopedFieldTrials field_trials(kPrefix + "1,1,0,0,0,0" + kEnd); + test::ScopedKeyValueConfig field_trials(field_trials_, + kPrefix + "1,1,0,0,0,0" + kEnd); - DownscalingObserver test("VP8", /*streams_active=*/{true}, kHighStartBps, + DownscalingObserver test("VP8", {{.active = true}}, kHighStartBps, /*automatic_resize=*/true, /*expect_downscale=*/true); RunBaseTest(&test); @@ -226,9 +245,10 @@ TEST_F(QualityScalingTest, AdaptsDownForHighQp_Vp8) { TEST_F(QualityScalingTest, NoAdaptDownForHighQpIfScalingOff_Vp8) { // qp_low:1, qp_high:1 -> kHighQp - test::ScopedFieldTrials field_trials(kPrefix + "1,1,0,0,0,0" + kEnd); + test::ScopedKeyValueConfig field_trials(field_trials_, + kPrefix + "1,1,0,0,0,0" + kEnd); - DownscalingObserver test("VP8", /*streams_active=*/{true}, kHighStartBps, + DownscalingObserver test("VP8", {{.active = true}}, kHighStartBps, /*automatic_resize=*/false, /*expect_downscale=*/false); RunBaseTest(&test); @@ -236,9 +256,10 @@ TEST_F(QualityScalingTest, NoAdaptDownForHighQpIfScalingOff_Vp8) { TEST_F(QualityScalingTest, NoAdaptDownForNormalQp_Vp8) { // qp_low:1, qp_high:127 -> kNormalQp - test::ScopedFieldTrials field_trials(kPrefix + "1,127,0,0,0,0" + kEnd); + test::ScopedKeyValueConfig field_trials(field_trials_, + kPrefix + "1,127,0,0,0,0" + kEnd); - DownscalingObserver test("VP8", /*streams_active=*/{true}, kHighStartBps, + DownscalingObserver test("VP8", {{.active = true}}, kHighStartBps, /*automatic_resize=*/true, /*expect_downscale=*/false); RunBaseTest(&test); @@ -246,9 +267,10 @@ TEST_F(QualityScalingTest, NoAdaptDownForNormalQp_Vp8) { TEST_F(QualityScalingTest, AdaptsDownForLowStartBitrate_Vp8) { // qp_low:1, qp_high:127 -> kNormalQp - test::ScopedFieldTrials field_trials(kPrefix + "1,127,0,0,0,0" + kEnd); + test::ScopedKeyValueConfig field_trials(field_trials_, + kPrefix + "1,127,0,0,0,0" + kEnd); - DownscalingObserver test("VP8", /*streams_active=*/{true}, kLowStartBps, + DownscalingObserver test("VP8", {{.active = true}}, kLowStartBps, /*automatic_resize=*/true, /*expect_downscale=*/true); RunBaseTest(&test); @@ -256,26 +278,25 @@ TEST_F(QualityScalingTest, AdaptsDownForLowStartBitrate_Vp8) { TEST_F(QualityScalingTest, AdaptsDownForLowStartBitrateAndThenUp) { // qp_low:127, qp_high:127 -> kLowQp - test::ScopedFieldTrials field_trials( + test::ScopedKeyValueConfig field_trials( + field_trials_, kPrefix + "127,127,0,0,0,0" + kEnd + - "WebRTC-Video-BalancedDegradationSettings/" - "pixels:230400|921600,fps:20|30,kbps:300|500/"); // should not affect + "WebRTC-Video-BalancedDegradationSettings/" + "pixels:230400|921600,fps:20|30,kbps:300|500/"); // should not affect - UpscalingObserver test("VP8", /*streams_active=*/{true}, - kDefaultVgaMinStartBps - 1, + UpscalingObserver test("VP8", {{.active = true}}, kDefaultVgaMinStartBps - 1, /*automatic_resize=*/true, /*expect_upscale=*/true); RunBaseTest(&test); } TEST_F(QualityScalingTest, AdaptsDownAndThenUpWithBalanced) { // qp_low:127, qp_high:127 -> kLowQp - test::ScopedFieldTrials field_trials( - kPrefix + "127,127,0,0,0,0" + kEnd + - "WebRTC-Video-BalancedDegradationSettings/" - "pixels:230400|921600,fps:20|30,kbps:300|499/"); + test::ScopedKeyValueConfig field_trials( + field_trials_, kPrefix + "127,127,0,0,0,0" + kEnd + + "WebRTC-Video-BalancedDegradationSettings/" + "pixels:230400|921600,fps:20|30,kbps:300|499/"); - UpscalingObserver test("VP8", /*streams_active=*/{true}, - kDefaultVgaMinStartBps - 1, + UpscalingObserver test("VP8", {{.active = true}}, kDefaultVgaMinStartBps - 1, /*automatic_resize=*/true, /*expect_upscale=*/true); test.SetDegradationPreference(DegradationPreference::BALANCED); RunBaseTest(&test); @@ -283,13 +304,12 @@ TEST_F(QualityScalingTest, AdaptsDownAndThenUpWithBalanced) { TEST_F(QualityScalingTest, AdaptsDownButNotUpWithBalancedIfBitrateNotEnough) { // qp_low:127, qp_high:127 -> kLowQp - test::ScopedFieldTrials field_trials( - kPrefix + "127,127,0,0,0,0" + kEnd + - "WebRTC-Video-BalancedDegradationSettings/" - "pixels:230400|921600,fps:20|30,kbps:300|500/"); + test::ScopedKeyValueConfig field_trials( + field_trials_, kPrefix + "127,127,0,0,0,0" + kEnd + + "WebRTC-Video-BalancedDegradationSettings/" + "pixels:230400|921600,fps:20|30,kbps:300|500/"); - UpscalingObserver test("VP8", /*streams_active=*/{true}, - kDefaultVgaMinStartBps - 1, + UpscalingObserver test("VP8", {{.active = true}}, kDefaultVgaMinStartBps - 1, /*automatic_resize=*/true, /*expect_upscale=*/false); test.SetDegradationPreference(DegradationPreference::BALANCED); RunBaseTest(&test); @@ -297,9 +317,11 @@ TEST_F(QualityScalingTest, AdaptsDownButNotUpWithBalancedIfBitrateNotEnough) { TEST_F(QualityScalingTest, NoAdaptDownForLowStartBitrate_Simulcast) { // qp_low:1, qp_high:127 -> kNormalQp - test::ScopedFieldTrials field_trials(kPrefix + "1,127,0,0,0,0" + kEnd); + test::ScopedKeyValueConfig field_trials(field_trials_, + kPrefix + "1,127,0,0,0,0" + kEnd); - DownscalingObserver test("VP8", /*streams_active=*/{true, true}, kLowStartBps, + DownscalingObserver test("VP8", {{.active = true}, {.active = true}}, + kLowStartBps, /*automatic_resize=*/false, /*expect_downscale=*/false); RunBaseTest(&test); @@ -307,32 +329,37 @@ TEST_F(QualityScalingTest, NoAdaptDownForLowStartBitrate_Simulcast) { TEST_F(QualityScalingTest, AdaptsDownForHighQp_HighestStreamActive_Vp8) { // qp_low:1, qp_high:1 -> kHighQp - test::ScopedFieldTrials field_trials(kPrefix + "1,1,0,0,0,0" + kEnd); - - DownscalingObserver test("VP8", /*streams_active=*/{false, false, true}, - kHighStartBps, - /*automatic_resize=*/true, - /*expect_downscale=*/true); + test::ScopedKeyValueConfig field_trials(field_trials_, + kPrefix + "1,1,0,0,0,0" + kEnd); + + DownscalingObserver test( + "VP8", {{.active = false}, {.active = false}, {.active = true}}, + kHighStartBps, + /*automatic_resize=*/true, + /*expect_downscale=*/true); RunBaseTest(&test); } TEST_F(QualityScalingTest, AdaptsDownForLowStartBitrate_HighestStreamActive_Vp8) { // qp_low:1, qp_high:127 -> kNormalQp - test::ScopedFieldTrials field_trials(kPrefix + "1,127,0,0,0,0" + kEnd); - - DownscalingObserver test("VP8", /*streams_active=*/{false, false, true}, - kSinglecastLimits720pVp8->min_start_bitrate_bps - 1, - /*automatic_resize=*/true, - /*expect_downscale=*/true); + test::ScopedKeyValueConfig field_trials(field_trials_, + kPrefix + "1,127,0,0,0,0" + kEnd); + + DownscalingObserver test( + "VP8", {{.active = false}, {.active = false}, {.active = true}}, + kSinglecastLimits720pVp8->min_start_bitrate_bps - 1, + /*automatic_resize=*/true, + /*expect_downscale=*/true); RunBaseTest(&test); } TEST_F(QualityScalingTest, AdaptsDownButNotUpWithMinStartBitrateLimit) { // qp_low:127, qp_high:127 -> kLowQp - test::ScopedFieldTrials field_trials(kPrefix + "127,127,0,0,0,0" + kEnd); + test::ScopedKeyValueConfig field_trials(field_trials_, + kPrefix + "127,127,0,0,0,0" + kEnd); - UpscalingObserver test("VP8", /*streams_active=*/{false, true}, + UpscalingObserver test("VP8", {{.active = false}, {.active = true}}, kSinglecastLimits720pVp8->min_start_bitrate_bps - 1, /*automatic_resize=*/true, /*expect_upscale=*/false); RunBaseTest(&test); @@ -340,35 +367,39 @@ TEST_F(QualityScalingTest, AdaptsDownButNotUpWithMinStartBitrateLimit) { TEST_F(QualityScalingTest, NoAdaptDownForLowStartBitrateIfBitrateEnough_Vp8) { // qp_low:1, qp_high:127 -> kNormalQp - test::ScopedFieldTrials field_trials(kPrefix + "1,127,0,0,0,0" + kEnd); - - DownscalingObserver test("VP8", /*streams_active=*/{false, false, true}, - kSinglecastLimits720pVp8->min_start_bitrate_bps, - /*automatic_resize=*/true, - /*expect_downscale=*/false); + test::ScopedKeyValueConfig field_trials(field_trials_, + kPrefix + "1,127,0,0,0,0" + kEnd); + + DownscalingObserver test( + "VP8", {{.active = false}, {.active = false}, {.active = true}}, + kSinglecastLimits720pVp8->min_start_bitrate_bps, + /*automatic_resize=*/true, + /*expect_downscale=*/false); RunBaseTest(&test); } TEST_F(QualityScalingTest, NoAdaptDownForLowStartBitrateIfDefaultLimitsDisabled_Vp8) { // qp_low:1, qp_high:127 -> kNormalQp - test::ScopedFieldTrials field_trials( - kPrefix + "1,127,0,0,0,0" + kEnd + - "WebRTC-DefaultBitrateLimitsKillSwitch/Enabled/"); - - DownscalingObserver test("VP8", /*streams_active=*/{false, false, true}, - kSinglecastLimits720pVp8->min_start_bitrate_bps - 1, - /*automatic_resize=*/true, - /*expect_downscale=*/false); + test::ScopedKeyValueConfig field_trials( + field_trials_, kPrefix + "1,127,0,0,0,0" + kEnd + + "WebRTC-DefaultBitrateLimitsKillSwitch/Enabled/"); + + DownscalingObserver test( + "VP8", {{.active = false}, {.active = false}, {.active = true}}, + kSinglecastLimits720pVp8->min_start_bitrate_bps - 1, + /*automatic_resize=*/true, + /*expect_downscale=*/false); RunBaseTest(&test); } TEST_F(QualityScalingTest, NoAdaptDownForLowStartBitrate_OneStreamSinglecastLimitsNotUsed_Vp8) { // qp_low:1, qp_high:127 -> kNormalQp - test::ScopedFieldTrials field_trials(kPrefix + "1,127,0,0,0,0" + kEnd); + test::ScopedKeyValueConfig field_trials(field_trials_, + kPrefix + "1,127,0,0,0,0" + kEnd); - DownscalingObserver test("VP8", /*streams_active=*/{true}, + DownscalingObserver test("VP8", {{.active = true}}, kSinglecastLimits720pVp8->min_start_bitrate_bps - 1, /*automatic_resize=*/true, /*expect_downscale=*/false); @@ -377,32 +408,37 @@ TEST_F(QualityScalingTest, TEST_F(QualityScalingTest, NoAdaptDownForHighQp_LowestStreamActive_Vp8) { // qp_low:1, qp_high:1 -> kHighQp - test::ScopedFieldTrials field_trials(kPrefix + "1,1,0,0,0,0" + kEnd); - - DownscalingObserver test("VP8", /*streams_active=*/{true, false, false}, - kHighStartBps, - /*automatic_resize=*/true, - /*expect_downscale=*/false); + test::ScopedKeyValueConfig field_trials(field_trials_, + kPrefix + "1,1,0,0,0,0" + kEnd); + + DownscalingObserver test( + "VP8", {{.active = true}, {.active = false}, {.active = false}}, + kHighStartBps, + /*automatic_resize=*/true, + /*expect_downscale=*/false); RunBaseTest(&test); } TEST_F(QualityScalingTest, NoAdaptDownForLowStartBitrate_LowestStreamActive_Vp8) { // qp_low:1, qp_high:127 -> kNormalQp - test::ScopedFieldTrials field_trials(kPrefix + "1,127,0,0,0,0" + kEnd); - - DownscalingObserver test("VP8", /*streams_active=*/{true, false, false}, - kLowStartBps, - /*automatic_resize=*/true, - /*expect_downscale=*/false); + test::ScopedKeyValueConfig field_trials(field_trials_, + kPrefix + "1,127,0,0,0,0" + kEnd); + + DownscalingObserver test( + "VP8", {{.active = true}, {.active = false}, {.active = false}}, + kLowStartBps, + /*automatic_resize=*/true, + /*expect_downscale=*/false); RunBaseTest(&test); } TEST_F(QualityScalingTest, NoAdaptDownForLowStartBitrateIfScalingOff_Vp8) { // qp_low:1, qp_high:127 -> kNormalQp - test::ScopedFieldTrials field_trials(kPrefix + "1,127,0,0,0,0" + kEnd); + test::ScopedKeyValueConfig field_trials(field_trials_, + kPrefix + "1,127,0,0,0,0" + kEnd); - DownscalingObserver test("VP8", /*streams_active=*/{true}, kLowStartBps, + DownscalingObserver test("VP8", {{.active = true}}, kLowStartBps, /*automatic_resize=*/false, /*expect_downscale=*/false); RunBaseTest(&test); @@ -410,10 +446,10 @@ TEST_F(QualityScalingTest, NoAdaptDownForLowStartBitrateIfScalingOff_Vp8) { TEST_F(QualityScalingTest, AdaptsDownForHighQp_Vp9) { // qp_low:1, qp_high:1 -> kHighQp - test::ScopedFieldTrials field_trials(kPrefix + "0,0,1,1,0,0" + kEnd + - "WebRTC-VP9QualityScaler/Enabled/"); + test::ScopedKeyValueConfig field_trials(field_trials_, + kPrefix + "0,0,1,1,0,0" + kEnd); - DownscalingObserver test("VP9", /*streams_active=*/{true}, kHighStartBps, + DownscalingObserver test("VP9", {{.active = true}}, kHighStartBps, /*automatic_resize=*/true, /*expect_downscale=*/true); RunBaseTest(&test); @@ -421,10 +457,11 @@ TEST_F(QualityScalingTest, AdaptsDownForHighQp_Vp9) { TEST_F(QualityScalingTest, NoAdaptDownForHighQpIfScalingOff_Vp9) { // qp_low:1, qp_high:1 -> kHighQp - test::ScopedFieldTrials field_trials(kPrefix + "0,0,1,1,0,0" + kEnd + - "WebRTC-VP9QualityScaler/Disabled/"); + test::ScopedKeyValueConfig field_trials( + field_trials_, + kPrefix + "0,0,1,1,0,0" + kEnd + "WebRTC-VP9QualityScaler/Disabled/"); - DownscalingObserver test("VP9", /*streams_active=*/{true}, kHighStartBps, + DownscalingObserver test("VP9", {{.active = true}}, kHighStartBps, /*automatic_resize=*/true, /*expect_downscale=*/false); RunBaseTest(&test); @@ -432,83 +469,130 @@ TEST_F(QualityScalingTest, NoAdaptDownForHighQpIfScalingOff_Vp9) { TEST_F(QualityScalingTest, AdaptsDownForLowStartBitrate_Vp9) { // qp_low:1, qp_high:255 -> kNormalQp - test::ScopedFieldTrials field_trials(kPrefix + "0,0,1,255,0,0" + kEnd + - "WebRTC-VP9QualityScaler/Enabled/"); + test::ScopedKeyValueConfig field_trials(field_trials_, + kPrefix + "0,0,1,255,0,0" + kEnd); - DownscalingObserver test("VP9", /*streams_active=*/{true}, kLowStartBps, + DownscalingObserver test("VP9", {{.active = true}}, kLowStartBps, /*automatic_resize=*/true, /*expect_downscale=*/true); RunBaseTest(&test); } +TEST_F(QualityScalingTest, NoAdaptDownForHighStartBitrate_Vp9) { + DownscalingObserver test( + "VP9", {{.active = false}, {.active = false}, {.active = true}}, + kHighStartBps, + /*automatic_resize=*/true, + /*expect_downscale=*/false); + RunBaseTest(&test); +} + TEST_F(QualityScalingTest, NoAdaptDownForHighQp_LowestStreamActive_Vp9) { // qp_low:1, qp_high:1 -> kHighQp - test::ScopedFieldTrials field_trials(kPrefix + "0,0,1,1,0,0" + kEnd + - "WebRTC-VP9QualityScaler/Enabled/"); - - DownscalingObserver test("VP9", /*streams_active=*/{true, false, false}, - kHighStartBps, - /*automatic_resize=*/true, - /*expect_downscale=*/false); + test::ScopedKeyValueConfig field_trials(field_trials_, + kPrefix + "0,0,1,1,0,0" + kEnd); + + DownscalingObserver test( + "VP9", {{.active = true}, {.active = false}, {.active = false}}, + kHighStartBps, + /*automatic_resize=*/true, + /*expect_downscale=*/false); RunBaseTest(&test); } TEST_F(QualityScalingTest, NoAdaptDownForLowStartBitrate_LowestStreamActive_Vp9) { // qp_low:1, qp_high:255 -> kNormalQp - test::ScopedFieldTrials field_trials(kPrefix + "0,0,1,255,0,0" + kEnd + - "WebRTC-VP9QualityScaler/Enabled/"); - - DownscalingObserver test("VP9", /*streams_active=*/{true, false, false}, - kLowStartBps, - /*automatic_resize=*/true, - /*expect_downscale=*/false); + test::ScopedKeyValueConfig field_trials(field_trials_, + kPrefix + "0,0,1,255,0,0" + kEnd); + + DownscalingObserver test( + "VP9", {{.active = true}, {.active = false}, {.active = false}}, + kLowStartBps, + /*automatic_resize=*/true, + /*expect_downscale=*/false); RunBaseTest(&test); } TEST_F(QualityScalingTest, AdaptsDownForHighQp_MiddleStreamActive_Vp9) { // qp_low:1, qp_high:1 -> kHighQp - test::ScopedFieldTrials field_trials(kPrefix + "0,0,1,1,0,0" + kEnd + - "WebRTC-VP9QualityScaler/Enabled/"); - - DownscalingObserver test("VP9", /*streams_active=*/{false, true, false}, - kHighStartBps, - /*automatic_resize=*/true, - /*expect_downscale=*/true); + test::ScopedKeyValueConfig field_trials(field_trials_, + kPrefix + "0,0,1,1,0,0" + kEnd); + + DownscalingObserver test( + "VP9", {{.active = false}, {.active = true}, {.active = false}}, + kHighStartBps, + /*automatic_resize=*/true, + /*expect_downscale=*/true); RunBaseTest(&test); } TEST_F(QualityScalingTest, AdaptsDownForLowStartBitrate_MiddleStreamActive_Vp9) { // qp_low:1, qp_high:255 -> kNormalQp - test::ScopedFieldTrials field_trials(kPrefix + "0,0,1,255,0,0" + kEnd + - "WebRTC-VP9QualityScaler/Enabled/"); - - DownscalingObserver test("VP9", /*streams_active=*/{false, true, false}, - kSinglecastLimits360pVp9->min_start_bitrate_bps - 1, - /*automatic_resize=*/true, - /*expect_downscale=*/true); + test::ScopedKeyValueConfig field_trials(field_trials_, + kPrefix + "0,0,1,255,0,0" + kEnd); + + DownscalingObserver test( + "VP9", {{.active = false}, {.active = true}, {.active = false}}, + kSinglecastLimits360pVp9->min_start_bitrate_bps - 1, + /*automatic_resize=*/true, + /*expect_downscale=*/true); RunBaseTest(&test); } TEST_F(QualityScalingTest, NoAdaptDownForLowStartBitrateIfBitrateEnough_Vp9) { // qp_low:1, qp_high:255 -> kNormalQp - test::ScopedFieldTrials field_trials(kPrefix + "0,0,1,255,0,0" + kEnd + - "WebRTC-VP9QualityScaler/Enabled/"); + test::ScopedKeyValueConfig field_trials(field_trials_, + kPrefix + "0,0,1,255,0,0" + kEnd); + + DownscalingObserver test( + "VP9", {{.active = false}, {.active = true}, {.active = false}}, + kSinglecastLimits360pVp9->min_start_bitrate_bps, + /*automatic_resize=*/true, + /*expect_downscale=*/false); + RunBaseTest(&test); +} - DownscalingObserver test("VP9", /*streams_active=*/{false, true, false}, - kSinglecastLimits360pVp9->min_start_bitrate_bps, - /*automatic_resize=*/true, - /*expect_downscale=*/false); +TEST_F(QualityScalingTest, + AdaptsDownButNotUpWithMinStartBitrateLimitWithScalabilityMode_VP9) { + // qp_low:255, qp_high:255 -> kLowQp + test::ScopedKeyValueConfig field_trials(field_trials_, + kPrefix + "0,0,255,255,0,0" + kEnd); + + UpscalingObserver test( + "VP9", + {{.active = true, .scalability_mode = ScalabilityMode::kL1T3}, + {.active = false}}, + kSinglecastLimits720pVp9->min_start_bitrate_bps - 1, + /*automatic_resize=*/true, /*expect_upscale=*/false); + RunBaseTest(&test); +} + +TEST_F(QualityScalingTest, + NoAdaptDownForLowStartBitrateIfBitrateEnoughWithScalabilityMode_Vp9) { + // qp_low:1, qp_high:255 -> kNormalQp + test::ScopedKeyValueConfig field_trials(field_trials_, + kPrefix + "0,0,1,255,0,0" + kEnd); + + DownscalingObserver test( + "VP9", + {{.active = true, .scalability_mode = ScalabilityMode::kL1T3}, + {.active = false}, + {.active = false}}, + kSinglecastLimits720pVp9->min_start_bitrate_bps, + /*automatic_resize=*/true, + /*expect_downscale=*/false); RunBaseTest(&test); } #if defined(WEBRTC_USE_H264) TEST_F(QualityScalingTest, AdaptsDownForHighQp_H264) { // qp_low:1, qp_high:1 -> kHighQp - test::ScopedFieldTrials field_trials(kPrefix + "0,0,0,0,1,1" + kEnd); + test::ScopedKeyValueConfig field_trials(field_trials_, + kPrefix + "0,0,0,0,1,1" + kEnd); - DownscalingObserver test("H264", /*streams_active=*/{true}, kHighStartBps, + DownscalingObserver test("H264", {{.active = true}}, kHighStartBps, /*automatic_resize=*/true, /*expect_downscale=*/true); RunBaseTest(&test); @@ -516,9 +600,10 @@ TEST_F(QualityScalingTest, AdaptsDownForHighQp_H264) { TEST_F(QualityScalingTest, AdaptsDownForLowStartBitrate_H264) { // qp_low:1, qp_high:51 -> kNormalQp - test::ScopedFieldTrials field_trials(kPrefix + "0,0,0,0,1,51" + kEnd); + test::ScopedKeyValueConfig field_trials(field_trials_, + kPrefix + "0,0,0,0,1,51" + kEnd); - DownscalingObserver test("H264", /*streams_active=*/{true}, kLowStartBps, + DownscalingObserver test("H264", {{.active = true}}, kLowStartBps, /*automatic_resize=*/true, /*expect_downscale=*/true); RunBaseTest(&test); diff --git a/TMessagesProj/jni/voip/webrtc/video/receive_statistics_proxy.cc b/TMessagesProj/jni/voip/webrtc/video/receive_statistics_proxy.cc deleted file mode 100644 index 1384ae7dbe..0000000000 --- a/TMessagesProj/jni/voip/webrtc/video/receive_statistics_proxy.cc +++ /dev/null @@ -1,938 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "video/receive_statistics_proxy.h" - -#include -#include -#include - -#include "modules/video_coding/include/video_codec_interface.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" -#include "rtc_base/strings/string_builder.h" -#include "rtc_base/time_utils.h" -#include "system_wrappers/include/clock.h" -#include "system_wrappers/include/field_trial.h" -#include "system_wrappers/include/metrics.h" - -namespace webrtc { -namespace { -// Periodic time interval for processing samples for `freq_offset_counter_`. -const int64_t kFreqOffsetProcessIntervalMs = 40000; - -// Configuration for bad call detection. -const int kBadCallMinRequiredSamples = 10; -const int kMinSampleLengthMs = 990; -const int kNumMeasurements = 10; -const int kNumMeasurementsVariance = kNumMeasurements * 1.5; -const float kBadFraction = 0.8f; -// For fps: -// Low means low enough to be bad, high means high enough to be good -const int kLowFpsThreshold = 12; -const int kHighFpsThreshold = 14; -// For qp and fps variance: -// Low means low enough to be good, high means high enough to be bad -const int kLowQpThresholdVp8 = 60; -const int kHighQpThresholdVp8 = 70; -const int kLowVarianceThreshold = 1; -const int kHighVarianceThreshold = 2; - -// Some metrics are reported as a maximum over this period. -// This should be synchronized with a typical getStats polling interval in -// the clients. -const int kMovingMaxWindowMs = 1000; - -// How large window we use to calculate the framerate/bitrate. -const int kRateStatisticsWindowSizeMs = 1000; - -// Some sane ballpark estimate for maximum common value of inter-frame delay. -// Values below that will be stored explicitly in the array, -// values above - in the map. -const int kMaxCommonInterframeDelayMs = 500; - -const char* UmaPrefixForContentType(VideoContentType content_type) { - if (videocontenttypehelpers::IsScreenshare(content_type)) - return "WebRTC.Video.Screenshare"; - return "WebRTC.Video"; -} - -std::string UmaSuffixForContentType(VideoContentType content_type) { - char ss_buf[1024]; - rtc::SimpleStringBuilder ss(ss_buf); - int simulcast_id = videocontenttypehelpers::GetSimulcastId(content_type); - if (simulcast_id > 0) { - ss << ".S" << simulcast_id - 1; - } - int experiment_id = videocontenttypehelpers::GetExperimentId(content_type); - if (experiment_id > 0) { - ss << ".ExperimentGroup" << experiment_id - 1; - } - return ss.str(); -} - -} // namespace - -ReceiveStatisticsProxy::ReceiveStatisticsProxy(uint32_t remote_ssrc, - Clock* clock) - : clock_(clock), - start_ms_(clock->TimeInMilliseconds()), - enable_decode_time_histograms_( - !field_trial::IsEnabled("WebRTC-DecodeTimeHistogramsKillSwitch")), - last_sample_time_(clock->TimeInMilliseconds()), - fps_threshold_(kLowFpsThreshold, - kHighFpsThreshold, - kBadFraction, - kNumMeasurements), - qp_threshold_(kLowQpThresholdVp8, - kHighQpThresholdVp8, - kBadFraction, - kNumMeasurements), - variance_threshold_(kLowVarianceThreshold, - kHighVarianceThreshold, - kBadFraction, - kNumMeasurementsVariance), - num_bad_states_(0), - num_certain_states_(0), - // 1000ms window, scale 1000 for ms to s. - decode_fps_estimator_(1000, 1000), - renders_fps_estimator_(1000, 1000), - render_fps_tracker_(100, 10u), - render_pixel_tracker_(100, 10u), - video_quality_observer_( - new VideoQualityObserver(VideoContentType::UNSPECIFIED)), - interframe_delay_max_moving_(kMovingMaxWindowMs), - freq_offset_counter_(clock, nullptr, kFreqOffsetProcessIntervalMs), - avg_rtt_ms_(0), - last_content_type_(VideoContentType::UNSPECIFIED), - last_codec_type_(kVideoCodecVP8), - num_delayed_frames_rendered_(0), - sum_missed_render_deadline_ms_(0), - timing_frame_info_counter_(kMovingMaxWindowMs) { - decode_thread_.Detach(); - network_thread_.Detach(); - stats_.ssrc = remote_ssrc; -} - -void ReceiveStatisticsProxy::UpdateHistograms( - absl::optional fraction_lost, - const StreamDataCounters& rtp_stats, - const StreamDataCounters* rtx_stats) { - // Not actually running on the decoder thread, but must be called after - // DecoderThreadStopped, which detaches the thread checker. It is therefore - // safe to access `qp_counters_`, which were updated on the decode thread - // earlier. - RTC_DCHECK_RUN_ON(&decode_thread_); - - MutexLock lock(&mutex_); - - char log_stream_buf[8 * 1024]; - rtc::SimpleStringBuilder log_stream(log_stream_buf); - int stream_duration_sec = (clock_->TimeInMilliseconds() - start_ms_) / 1000; - if (stats_.frame_counts.key_frames > 0 || - stats_.frame_counts.delta_frames > 0) { - RTC_HISTOGRAM_COUNTS_100000("WebRTC.Video.ReceiveStreamLifetimeInSeconds", - stream_duration_sec); - log_stream << "WebRTC.Video.ReceiveStreamLifetimeInSeconds " - << stream_duration_sec << '\n'; - } - - log_stream << "Frames decoded " << stats_.frames_decoded << '\n'; - - if (num_unique_frames_) { - int num_dropped_frames = *num_unique_frames_ - stats_.frames_decoded; - RTC_HISTOGRAM_COUNTS_1000("WebRTC.Video.DroppedFrames.Receiver", - num_dropped_frames); - log_stream << "WebRTC.Video.DroppedFrames.Receiver " << num_dropped_frames - << '\n'; - } - - if (fraction_lost && stream_duration_sec >= metrics::kMinRunTimeInSeconds) { - RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.ReceivedPacketsLostInPercent", - *fraction_lost); - log_stream << "WebRTC.Video.ReceivedPacketsLostInPercent " << *fraction_lost - << '\n'; - } - - if (first_decoded_frame_time_ms_) { - const int64_t elapsed_ms = - (clock_->TimeInMilliseconds() - *first_decoded_frame_time_ms_); - if (elapsed_ms >= - metrics::kMinRunTimeInSeconds * rtc::kNumMillisecsPerSec) { - int decoded_fps = static_cast( - (stats_.frames_decoded * 1000.0f / elapsed_ms) + 0.5f); - RTC_HISTOGRAM_COUNTS_100("WebRTC.Video.DecodedFramesPerSecond", - decoded_fps); - log_stream << "WebRTC.Video.DecodedFramesPerSecond " << decoded_fps - << '\n'; - - const uint32_t frames_rendered = stats_.frames_rendered; - if (frames_rendered > 0) { - RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.DelayedFramesToRenderer", - static_cast(num_delayed_frames_rendered_ * - 100 / frames_rendered)); - if (num_delayed_frames_rendered_ > 0) { - RTC_HISTOGRAM_COUNTS_1000( - "WebRTC.Video.DelayedFramesToRenderer_AvgDelayInMs", - static_cast(sum_missed_render_deadline_ms_ / - num_delayed_frames_rendered_)); - } - } - } - } - - const int kMinRequiredSamples = 200; - int samples = static_cast(render_fps_tracker_.TotalSampleCount()); - if (samples >= kMinRequiredSamples) { - int rendered_fps = round(render_fps_tracker_.ComputeTotalRate()); - RTC_HISTOGRAM_COUNTS_100("WebRTC.Video.RenderFramesPerSecond", - rendered_fps); - log_stream << "WebRTC.Video.RenderFramesPerSecond " << rendered_fps << '\n'; - RTC_HISTOGRAM_COUNTS_100000( - "WebRTC.Video.RenderSqrtPixelsPerSecond", - round(render_pixel_tracker_.ComputeTotalRate())); - } - - absl::optional sync_offset_ms = - sync_offset_counter_.Avg(kMinRequiredSamples); - if (sync_offset_ms) { - RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.AVSyncOffsetInMs", - *sync_offset_ms); - log_stream << "WebRTC.Video.AVSyncOffsetInMs " << *sync_offset_ms << '\n'; - } - AggregatedStats freq_offset_stats = freq_offset_counter_.GetStats(); - if (freq_offset_stats.num_samples > 0) { - RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.RtpToNtpFreqOffsetInKhz", - freq_offset_stats.average); - log_stream << "WebRTC.Video.RtpToNtpFreqOffsetInKhz " - << freq_offset_stats.ToString() << '\n'; - } - - int num_total_frames = - stats_.frame_counts.key_frames + stats_.frame_counts.delta_frames; - if (num_total_frames >= kMinRequiredSamples) { - int num_key_frames = stats_.frame_counts.key_frames; - int key_frames_permille = - (num_key_frames * 1000 + num_total_frames / 2) / num_total_frames; - RTC_HISTOGRAM_COUNTS_1000("WebRTC.Video.KeyFramesReceivedInPermille", - key_frames_permille); - log_stream << "WebRTC.Video.KeyFramesReceivedInPermille " - << key_frames_permille << '\n'; - } - - absl::optional qp = qp_counters_.vp8.Avg(kMinRequiredSamples); - if (qp) { - RTC_HISTOGRAM_COUNTS_200("WebRTC.Video.Decoded.Vp8.Qp", *qp); - log_stream << "WebRTC.Video.Decoded.Vp8.Qp " << *qp << '\n'; - } - absl::optional decode_ms = decode_time_counter_.Avg(kMinRequiredSamples); - if (decode_ms) { - RTC_HISTOGRAM_COUNTS_1000("WebRTC.Video.DecodeTimeInMs", *decode_ms); - log_stream << "WebRTC.Video.DecodeTimeInMs " << *decode_ms << '\n'; - } - absl::optional jb_delay_ms = - jitter_buffer_delay_counter_.Avg(kMinRequiredSamples); - if (jb_delay_ms) { - RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.JitterBufferDelayInMs", - *jb_delay_ms); - log_stream << "WebRTC.Video.JitterBufferDelayInMs " << *jb_delay_ms << '\n'; - } - - absl::optional target_delay_ms = - target_delay_counter_.Avg(kMinRequiredSamples); - if (target_delay_ms) { - RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.TargetDelayInMs", - *target_delay_ms); - log_stream << "WebRTC.Video.TargetDelayInMs " << *target_delay_ms << '\n'; - } - absl::optional current_delay_ms = - current_delay_counter_.Avg(kMinRequiredSamples); - if (current_delay_ms) { - RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.CurrentDelayInMs", - *current_delay_ms); - log_stream << "WebRTC.Video.CurrentDelayInMs " << *current_delay_ms << '\n'; - } - absl::optional delay_ms = delay_counter_.Avg(kMinRequiredSamples); - if (delay_ms) - RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.OnewayDelayInMs", *delay_ms); - - // Aggregate content_specific_stats_ by removing experiment or simulcast - // information; - std::map aggregated_stats; - for (const auto& it : content_specific_stats_) { - // Calculate simulcast specific metrics (".S0" ... ".S2" suffixes). - VideoContentType content_type = it.first; - if (videocontenttypehelpers::GetSimulcastId(content_type) > 0) { - // Aggregate on experiment id. - videocontenttypehelpers::SetExperimentId(&content_type, 0); - aggregated_stats[content_type].Add(it.second); - } - // Calculate experiment specific metrics (".ExperimentGroup[0-7]" suffixes). - content_type = it.first; - if (videocontenttypehelpers::GetExperimentId(content_type) > 0) { - // Aggregate on simulcast id. - videocontenttypehelpers::SetSimulcastId(&content_type, 0); - aggregated_stats[content_type].Add(it.second); - } - // Calculate aggregated metrics (no suffixes. Aggregated on everything). - content_type = it.first; - videocontenttypehelpers::SetSimulcastId(&content_type, 0); - videocontenttypehelpers::SetExperimentId(&content_type, 0); - aggregated_stats[content_type].Add(it.second); - } - - for (const auto& it : aggregated_stats) { - // For the metric Foo we report the following slices: - // WebRTC.Video.Foo, - // WebRTC.Video.Screenshare.Foo, - // WebRTC.Video.Foo.S[0-3], - // WebRTC.Video.Foo.ExperimentGroup[0-7], - // WebRTC.Video.Screenshare.Foo.S[0-3], - // WebRTC.Video.Screenshare.Foo.ExperimentGroup[0-7]. - auto content_type = it.first; - auto stats = it.second; - std::string uma_prefix = UmaPrefixForContentType(content_type); - std::string uma_suffix = UmaSuffixForContentType(content_type); - // Metrics can be sliced on either simulcast id or experiment id but not - // both. - RTC_DCHECK(videocontenttypehelpers::GetExperimentId(content_type) == 0 || - videocontenttypehelpers::GetSimulcastId(content_type) == 0); - - absl::optional e2e_delay_ms = - stats.e2e_delay_counter.Avg(kMinRequiredSamples); - if (e2e_delay_ms) { - RTC_HISTOGRAM_COUNTS_SPARSE_10000( - uma_prefix + ".EndToEndDelayInMs" + uma_suffix, *e2e_delay_ms); - log_stream << uma_prefix << ".EndToEndDelayInMs" << uma_suffix << " " - << *e2e_delay_ms << '\n'; - } - absl::optional e2e_delay_max_ms = stats.e2e_delay_counter.Max(); - if (e2e_delay_max_ms && e2e_delay_ms) { - RTC_HISTOGRAM_COUNTS_SPARSE_100000( - uma_prefix + ".EndToEndDelayMaxInMs" + uma_suffix, *e2e_delay_max_ms); - log_stream << uma_prefix << ".EndToEndDelayMaxInMs" << uma_suffix << " " - << *e2e_delay_max_ms << '\n'; - } - absl::optional interframe_delay_ms = - stats.interframe_delay_counter.Avg(kMinRequiredSamples); - if (interframe_delay_ms) { - RTC_HISTOGRAM_COUNTS_SPARSE_10000( - uma_prefix + ".InterframeDelayInMs" + uma_suffix, - *interframe_delay_ms); - log_stream << uma_prefix << ".InterframeDelayInMs" << uma_suffix << " " - << *interframe_delay_ms << '\n'; - } - absl::optional interframe_delay_max_ms = - stats.interframe_delay_counter.Max(); - if (interframe_delay_max_ms && interframe_delay_ms) { - RTC_HISTOGRAM_COUNTS_SPARSE_10000( - uma_prefix + ".InterframeDelayMaxInMs" + uma_suffix, - *interframe_delay_max_ms); - log_stream << uma_prefix << ".InterframeDelayMaxInMs" << uma_suffix << " " - << *interframe_delay_max_ms << '\n'; - } - - absl::optional interframe_delay_95p_ms = - stats.interframe_delay_percentiles.GetPercentile(0.95f); - if (interframe_delay_95p_ms && interframe_delay_ms != -1) { - RTC_HISTOGRAM_COUNTS_SPARSE_10000( - uma_prefix + ".InterframeDelay95PercentileInMs" + uma_suffix, - *interframe_delay_95p_ms); - log_stream << uma_prefix << ".InterframeDelay95PercentileInMs" - << uma_suffix << " " << *interframe_delay_95p_ms << '\n'; - } - - absl::optional width = stats.received_width.Avg(kMinRequiredSamples); - if (width) { - RTC_HISTOGRAM_COUNTS_SPARSE_10000( - uma_prefix + ".ReceivedWidthInPixels" + uma_suffix, *width); - log_stream << uma_prefix << ".ReceivedWidthInPixels" << uma_suffix << " " - << *width << '\n'; - } - - absl::optional height = stats.received_height.Avg(kMinRequiredSamples); - if (height) { - RTC_HISTOGRAM_COUNTS_SPARSE_10000( - uma_prefix + ".ReceivedHeightInPixels" + uma_suffix, *height); - log_stream << uma_prefix << ".ReceivedHeightInPixels" << uma_suffix << " " - << *height << '\n'; - } - - if (content_type != VideoContentType::UNSPECIFIED) { - // Don't report these 3 metrics unsliced, as more precise variants - // are reported separately in this method. - float flow_duration_sec = stats.flow_duration_ms / 1000.0; - if (flow_duration_sec >= metrics::kMinRunTimeInSeconds) { - int media_bitrate_kbps = static_cast(stats.total_media_bytes * 8 / - flow_duration_sec / 1000); - RTC_HISTOGRAM_COUNTS_SPARSE_10000( - uma_prefix + ".MediaBitrateReceivedInKbps" + uma_suffix, - media_bitrate_kbps); - log_stream << uma_prefix << ".MediaBitrateReceivedInKbps" << uma_suffix - << " " << media_bitrate_kbps << '\n'; - } - - int num_total_frames2 = - stats.frame_counts.key_frames + stats.frame_counts.delta_frames; - if (num_total_frames2 >= kMinRequiredSamples) { - int num_key_frames = stats.frame_counts.key_frames; - int key_frames_permille = - (num_key_frames * 1000 + num_total_frames2 / 2) / num_total_frames2; - RTC_HISTOGRAM_COUNTS_SPARSE_1000( - uma_prefix + ".KeyFramesReceivedInPermille" + uma_suffix, - key_frames_permille); - log_stream << uma_prefix << ".KeyFramesReceivedInPermille" << uma_suffix - << " " << key_frames_permille << '\n'; - } - - absl::optional qp2 = stats.qp_counter.Avg(kMinRequiredSamples); - if (qp2) { - RTC_HISTOGRAM_COUNTS_SPARSE_200( - uma_prefix + ".Decoded.Vp8.Qp" + uma_suffix, *qp2); - log_stream << uma_prefix << ".Decoded.Vp8.Qp" << uma_suffix << " " - << *qp2 << '\n'; - } - } - } - - StreamDataCounters rtp_rtx_stats = rtp_stats; - if (rtx_stats) - rtp_rtx_stats.Add(*rtx_stats); - int64_t elapsed_sec = - rtp_rtx_stats.TimeSinceFirstPacketInMs(clock_->TimeInMilliseconds()) / - 1000; - if (elapsed_sec >= metrics::kMinRunTimeInSeconds) { - RTC_HISTOGRAM_COUNTS_10000( - "WebRTC.Video.BitrateReceivedInKbps", - static_cast(rtp_rtx_stats.transmitted.TotalBytes() * 8 / - elapsed_sec / 1000)); - int media_bitrate_kbs = static_cast(rtp_stats.MediaPayloadBytes() * 8 / - elapsed_sec / 1000); - RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.MediaBitrateReceivedInKbps", - media_bitrate_kbs); - log_stream << "WebRTC.Video.MediaBitrateReceivedInKbps " - << media_bitrate_kbs << '\n'; - RTC_HISTOGRAM_COUNTS_10000( - "WebRTC.Video.PaddingBitrateReceivedInKbps", - static_cast(rtp_rtx_stats.transmitted.padding_bytes * 8 / - elapsed_sec / 1000)); - RTC_HISTOGRAM_COUNTS_10000( - "WebRTC.Video.RetransmittedBitrateReceivedInKbps", - static_cast(rtp_rtx_stats.retransmitted.TotalBytes() * 8 / - elapsed_sec / 1000)); - if (rtx_stats) { - RTC_HISTOGRAM_COUNTS_10000( - "WebRTC.Video.RtxBitrateReceivedInKbps", - static_cast(rtx_stats->transmitted.TotalBytes() * 8 / - elapsed_sec / 1000)); - } - const RtcpPacketTypeCounter& counters = stats_.rtcp_packet_type_counts; - RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.NackPacketsSentPerMinute", - counters.nack_packets * 60 / elapsed_sec); - RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.FirPacketsSentPerMinute", - counters.fir_packets * 60 / elapsed_sec); - RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.PliPacketsSentPerMinute", - counters.pli_packets * 60 / elapsed_sec); - if (counters.nack_requests > 0) { - RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.UniqueNackRequestsSentInPercent", - counters.UniqueNackRequestsInPercent()); - } - } - - if (num_certain_states_ >= kBadCallMinRequiredSamples) { - RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.BadCall.Any", - 100 * num_bad_states_ / num_certain_states_); - } - absl::optional fps_fraction = - fps_threshold_.FractionHigh(kBadCallMinRequiredSamples); - if (fps_fraction) { - RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.BadCall.FrameRate", - static_cast(100 * (1 - *fps_fraction))); - } - absl::optional variance_fraction = - variance_threshold_.FractionHigh(kBadCallMinRequiredSamples); - if (variance_fraction) { - RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.BadCall.FrameRateVariance", - static_cast(100 * *variance_fraction)); - } - absl::optional qp_fraction = - qp_threshold_.FractionHigh(kBadCallMinRequiredSamples); - if (qp_fraction) { - RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.BadCall.Qp", - static_cast(100 * *qp_fraction)); - } - - RTC_LOG(LS_INFO) << log_stream.str(); - video_quality_observer_->UpdateHistograms(); -} - -void ReceiveStatisticsProxy::QualitySample() { - int64_t now = clock_->TimeInMilliseconds(); - if (last_sample_time_ + kMinSampleLengthMs > now) - return; - - double fps = - render_fps_tracker_.ComputeRateForInterval(now - last_sample_time_); - absl::optional qp = qp_sample_.Avg(1); - - bool prev_fps_bad = !fps_threshold_.IsHigh().value_or(true); - bool prev_qp_bad = qp_threshold_.IsHigh().value_or(false); - bool prev_variance_bad = variance_threshold_.IsHigh().value_or(false); - bool prev_any_bad = prev_fps_bad || prev_qp_bad || prev_variance_bad; - - fps_threshold_.AddMeasurement(static_cast(fps)); - if (qp) - qp_threshold_.AddMeasurement(*qp); - absl::optional fps_variance_opt = fps_threshold_.CalculateVariance(); - double fps_variance = fps_variance_opt.value_or(0); - if (fps_variance_opt) { - variance_threshold_.AddMeasurement(static_cast(fps_variance)); - } - - bool fps_bad = !fps_threshold_.IsHigh().value_or(true); - bool qp_bad = qp_threshold_.IsHigh().value_or(false); - bool variance_bad = variance_threshold_.IsHigh().value_or(false); - bool any_bad = fps_bad || qp_bad || variance_bad; - - if (!prev_any_bad && any_bad) { - RTC_LOG(LS_INFO) << "Bad call (any) start: " << now; - } else if (prev_any_bad && !any_bad) { - RTC_LOG(LS_INFO) << "Bad call (any) end: " << now; - } - - if (!prev_fps_bad && fps_bad) { - RTC_LOG(LS_INFO) << "Bad call (fps) start: " << now; - } else if (prev_fps_bad && !fps_bad) { - RTC_LOG(LS_INFO) << "Bad call (fps) end: " << now; - } - - if (!prev_qp_bad && qp_bad) { - RTC_LOG(LS_INFO) << "Bad call (qp) start: " << now; - } else if (prev_qp_bad && !qp_bad) { - RTC_LOG(LS_INFO) << "Bad call (qp) end: " << now; - } - - if (!prev_variance_bad && variance_bad) { - RTC_LOG(LS_INFO) << "Bad call (variance) start: " << now; - } else if (prev_variance_bad && !variance_bad) { - RTC_LOG(LS_INFO) << "Bad call (variance) end: " << now; - } - - RTC_LOG(LS_VERBOSE) << "SAMPLE: sample_length: " << (now - last_sample_time_) - << " fps: " << fps << " fps_bad: " << fps_bad - << " qp: " << qp.value_or(-1) << " qp_bad: " << qp_bad - << " variance_bad: " << variance_bad - << " fps_variance: " << fps_variance; - - last_sample_time_ = now; - qp_sample_.Reset(); - - if (fps_threshold_.IsHigh() || variance_threshold_.IsHigh() || - qp_threshold_.IsHigh()) { - if (any_bad) - ++num_bad_states_; - ++num_certain_states_; - } -} - -void ReceiveStatisticsProxy::UpdateFramerate(int64_t now_ms) const { - int64_t old_frames_ms = now_ms - kRateStatisticsWindowSizeMs; - while (!frame_window_.empty() && - frame_window_.begin()->first < old_frames_ms) { - frame_window_.erase(frame_window_.begin()); - } - - size_t framerate = - (frame_window_.size() * 1000 + 500) / kRateStatisticsWindowSizeMs; - stats_.network_frame_rate = static_cast(framerate); -} - -void ReceiveStatisticsProxy::UpdateDecodeTimeHistograms( - int width, - int height, - int decode_time_ms) const { - bool is_4k = (width == 3840 || width == 4096) && height == 2160; - bool is_hd = width == 1920 && height == 1080; - // Only update histograms for 4k/HD and VP9/H264. - if ((is_4k || is_hd) && (last_codec_type_ == kVideoCodecVP9 || - last_codec_type_ == kVideoCodecH264)) { - const std::string kDecodeTimeUmaPrefix = - "WebRTC.Video.DecodeTimePerFrameInMs."; - - // Each histogram needs its own line for it to not be reused in the wrong - // way when the format changes. - if (last_codec_type_ == kVideoCodecVP9) { - bool is_sw_decoder = - stats_.decoder_implementation_name.compare(0, 6, "libvpx") == 0; - if (is_4k) { - if (is_sw_decoder) - RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "Vp9.4k.Sw", - decode_time_ms); - else - RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "Vp9.4k.Hw", - decode_time_ms); - } else { - if (is_sw_decoder) - RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "Vp9.Hd.Sw", - decode_time_ms); - else - RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "Vp9.Hd.Hw", - decode_time_ms); - } - } else { - bool is_sw_decoder = - stats_.decoder_implementation_name.compare(0, 6, "FFmpeg") == 0; - if (is_4k) { - if (is_sw_decoder) - RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "H264.4k.Sw", - decode_time_ms); - else - RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "H264.4k.Hw", - decode_time_ms); - - } else { - if (is_sw_decoder) - RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "H264.Hd.Sw", - decode_time_ms); - else - RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "H264.Hd.Hw", - decode_time_ms); - } - } - } -} - -absl::optional -ReceiveStatisticsProxy::GetCurrentEstimatedPlayoutNtpTimestampMs( - int64_t now_ms) const { - if (!last_estimated_playout_ntp_timestamp_ms_ || - !last_estimated_playout_time_ms_) { - return absl::nullopt; - } - int64_t elapsed_ms = now_ms - *last_estimated_playout_time_ms_; - return *last_estimated_playout_ntp_timestamp_ms_ + elapsed_ms; -} - -VideoReceiveStream::Stats ReceiveStatisticsProxy::GetStats() const { - MutexLock lock(&mutex_); - // Get current frame rates here, as only updating them on new frames prevents - // us from ever correctly displaying frame rate of 0. - int64_t now_ms = clock_->TimeInMilliseconds(); - UpdateFramerate(now_ms); - stats_.render_frame_rate = renders_fps_estimator_.Rate(now_ms).value_or(0); - stats_.decode_frame_rate = decode_fps_estimator_.Rate(now_ms).value_or(0); - stats_.interframe_delay_max_ms = - interframe_delay_max_moving_.Max(now_ms).value_or(-1); - stats_.freeze_count = video_quality_observer_->NumFreezes(); - stats_.pause_count = video_quality_observer_->NumPauses(); - stats_.total_freezes_duration_ms = - video_quality_observer_->TotalFreezesDurationMs(); - stats_.total_pauses_duration_ms = - video_quality_observer_->TotalPausesDurationMs(); - stats_.total_frames_duration_ms = - video_quality_observer_->TotalFramesDurationMs(); - stats_.sum_squared_frame_durations = - video_quality_observer_->SumSquaredFrameDurationsSec(); - stats_.content_type = last_content_type_; - stats_.timing_frame_info = timing_frame_info_counter_.Max(now_ms); - stats_.jitter_buffer_delay_seconds = - static_cast(current_delay_counter_.Sum(1).value_or(0)) / - rtc::kNumMillisecsPerSec; - stats_.jitter_buffer_emitted_count = current_delay_counter_.NumSamples(); - stats_.estimated_playout_ntp_timestamp_ms = - GetCurrentEstimatedPlayoutNtpTimestampMs(now_ms); - return stats_; -} - -void ReceiveStatisticsProxy::OnIncomingPayloadType(int payload_type) { - MutexLock lock(&mutex_); - stats_.current_payload_type = payload_type; -} - -void ReceiveStatisticsProxy::OnDecoderImplementationName( - const char* implementation_name) { - MutexLock lock(&mutex_); - stats_.decoder_implementation_name = implementation_name; -} - -void ReceiveStatisticsProxy::OnFrameBufferTimingsUpdated( - int max_decode_ms, - int current_delay_ms, - int target_delay_ms, - int jitter_buffer_ms, - int min_playout_delay_ms, - int render_delay_ms) { - MutexLock lock(&mutex_); - stats_.max_decode_ms = max_decode_ms; - stats_.current_delay_ms = current_delay_ms; - stats_.target_delay_ms = target_delay_ms; - stats_.jitter_buffer_ms = jitter_buffer_ms; - stats_.min_playout_delay_ms = min_playout_delay_ms; - stats_.render_delay_ms = render_delay_ms; - jitter_buffer_delay_counter_.Add(jitter_buffer_ms); - target_delay_counter_.Add(target_delay_ms); - current_delay_counter_.Add(current_delay_ms); - // Network delay (rtt/2) + target_delay_ms (jitter delay + decode time + - // render delay). - delay_counter_.Add(target_delay_ms + avg_rtt_ms_ / 2); -} - -void ReceiveStatisticsProxy::OnUniqueFramesCounted(int num_unique_frames) { - MutexLock lock(&mutex_); - num_unique_frames_.emplace(num_unique_frames); -} - -void ReceiveStatisticsProxy::OnTimingFrameInfoUpdated( - const TimingFrameInfo& info) { - MutexLock lock(&mutex_); - if (info.flags != VideoSendTiming::kInvalid) { - int64_t now_ms = clock_->TimeInMilliseconds(); - timing_frame_info_counter_.Add(info, now_ms); - } - - // Measure initial decoding latency between the first frame arriving and the - // first frame being decoded. - if (!first_frame_received_time_ms_.has_value()) { - first_frame_received_time_ms_ = info.receive_finish_ms; - } - if (stats_.first_frame_received_to_decoded_ms == -1 && - first_decoded_frame_time_ms_) { - stats_.first_frame_received_to_decoded_ms = - *first_decoded_frame_time_ms_ - *first_frame_received_time_ms_; - } -} - -void ReceiveStatisticsProxy::RtcpPacketTypesCounterUpdated( - uint32_t ssrc, - const RtcpPacketTypeCounter& packet_counter) { - MutexLock lock(&mutex_); - if (stats_.ssrc != ssrc) - return; - stats_.rtcp_packet_type_counts = packet_counter; -} - -void ReceiveStatisticsProxy::OnCname(uint32_t ssrc, absl::string_view cname) { - MutexLock lock(&mutex_); - // TODO(pbos): Handle both local and remote ssrcs here and RTC_DCHECK that we - // receive stats from one of them. - if (stats_.ssrc != ssrc) - return; - stats_.c_name = std::string(cname); -} - -void ReceiveStatisticsProxy::OnDecodedFrame(const VideoFrame& frame, - absl::optional qp, - int32_t decode_time_ms, - VideoContentType content_type) { - MutexLock lock(&mutex_); - - uint64_t now_ms = clock_->TimeInMilliseconds(); - - if (videocontenttypehelpers::IsScreenshare(content_type) != - videocontenttypehelpers::IsScreenshare(last_content_type_)) { - // Reset the quality observer if content type is switched. But first report - // stats for the previous part of the call. - video_quality_observer_->UpdateHistograms(); - video_quality_observer_.reset(new VideoQualityObserver(content_type)); - } - - video_quality_observer_->OnDecodedFrame(frame, qp, last_codec_type_); - - ContentSpecificStats* content_specific_stats = - &content_specific_stats_[content_type]; - ++stats_.frames_decoded; - if (qp) { - if (!stats_.qp_sum) { - if (stats_.frames_decoded != 1) { - RTC_LOG(LS_WARNING) - << "Frames decoded was not 1 when first qp value was received."; - } - stats_.qp_sum = 0; - } - *stats_.qp_sum += *qp; - content_specific_stats->qp_counter.Add(*qp); - } else if (stats_.qp_sum) { - RTC_LOG(LS_WARNING) - << "QP sum was already set and no QP was given for a frame."; - stats_.qp_sum.reset(); - } - decode_time_counter_.Add(decode_time_ms); - stats_.decode_ms = decode_time_ms; - stats_.total_decode_time_ms += decode_time_ms; - if (enable_decode_time_histograms_) { - UpdateDecodeTimeHistograms(frame.width(), frame.height(), decode_time_ms); - } - - last_content_type_ = content_type; - decode_fps_estimator_.Update(1, now_ms); - if (last_decoded_frame_time_ms_) { - int64_t interframe_delay_ms = now_ms - *last_decoded_frame_time_ms_; - RTC_DCHECK_GE(interframe_delay_ms, 0); - double interframe_delay = interframe_delay_ms / 1000.0; - stats_.total_inter_frame_delay += interframe_delay; - stats_.total_squared_inter_frame_delay += - interframe_delay * interframe_delay; - interframe_delay_max_moving_.Add(interframe_delay_ms, now_ms); - content_specific_stats->interframe_delay_counter.Add(interframe_delay_ms); - content_specific_stats->interframe_delay_percentiles.Add( - interframe_delay_ms); - content_specific_stats->flow_duration_ms += interframe_delay_ms; - } - if (stats_.frames_decoded == 1) { - first_decoded_frame_time_ms_.emplace(now_ms); - } - last_decoded_frame_time_ms_.emplace(now_ms); -} - -void ReceiveStatisticsProxy::OnRenderedFrame(const VideoFrame& frame) { - int width = frame.width(); - int height = frame.height(); - RTC_DCHECK_GT(width, 0); - RTC_DCHECK_GT(height, 0); - int64_t now_ms = clock_->TimeInMilliseconds(); - MutexLock lock(&mutex_); - - video_quality_observer_->OnRenderedFrame(frame, now_ms); - - ContentSpecificStats* content_specific_stats = - &content_specific_stats_[last_content_type_]; - renders_fps_estimator_.Update(1, now_ms); - ++stats_.frames_rendered; - stats_.width = width; - stats_.height = height; - render_fps_tracker_.AddSamples(1); - render_pixel_tracker_.AddSamples(sqrt(width * height)); - content_specific_stats->received_width.Add(width); - content_specific_stats->received_height.Add(height); - - // Consider taking stats_.render_delay_ms into account. - const int64_t time_until_rendering_ms = frame.render_time_ms() - now_ms; - if (time_until_rendering_ms < 0) { - sum_missed_render_deadline_ms_ += -time_until_rendering_ms; - ++num_delayed_frames_rendered_; - } - - if (frame.ntp_time_ms() > 0) { - int64_t delay_ms = clock_->CurrentNtpInMilliseconds() - frame.ntp_time_ms(); - if (delay_ms >= 0) { - content_specific_stats->e2e_delay_counter.Add(delay_ms); - } - } - QualitySample(); -} - -void ReceiveStatisticsProxy::OnSyncOffsetUpdated(int64_t video_playout_ntp_ms, - int64_t sync_offset_ms, - double estimated_freq_khz) { - MutexLock lock(&mutex_); - sync_offset_counter_.Add(std::abs(sync_offset_ms)); - stats_.sync_offset_ms = sync_offset_ms; - last_estimated_playout_ntp_timestamp_ms_ = video_playout_ntp_ms; - last_estimated_playout_time_ms_ = clock_->TimeInMilliseconds(); - - const double kMaxFreqKhz = 10000.0; - int offset_khz = kMaxFreqKhz; - // Should not be zero or negative. If so, report max. - if (estimated_freq_khz < kMaxFreqKhz && estimated_freq_khz > 0.0) - offset_khz = static_cast(std::fabs(estimated_freq_khz - 90.0) + 0.5); - - freq_offset_counter_.Add(offset_khz); -} - -void ReceiveStatisticsProxy::OnCompleteFrame(bool is_keyframe, - size_t size_bytes, - VideoContentType content_type) { - MutexLock lock(&mutex_); - if (is_keyframe) { - ++stats_.frame_counts.key_frames; - } else { - ++stats_.frame_counts.delta_frames; - } - - // Content type extension is set only for keyframes and should be propagated - // for all the following delta frames. Here we may receive frames out of order - // and miscategorise some delta frames near the layer switch. - // This may slightly offset calculated bitrate and keyframes permille metrics. - VideoContentType propagated_content_type = - is_keyframe ? content_type : last_content_type_; - - ContentSpecificStats* content_specific_stats = - &content_specific_stats_[propagated_content_type]; - - content_specific_stats->total_media_bytes += size_bytes; - if (is_keyframe) { - ++content_specific_stats->frame_counts.key_frames; - } else { - ++content_specific_stats->frame_counts.delta_frames; - } - - int64_t now_ms = clock_->TimeInMilliseconds(); - frame_window_.insert(std::make_pair(now_ms, size_bytes)); - UpdateFramerate(now_ms); -} - -void ReceiveStatisticsProxy::OnDroppedFrames(uint32_t frames_dropped) { - MutexLock lock(&mutex_); - stats_.frames_dropped += frames_dropped; -} - -void ReceiveStatisticsProxy::OnPreDecode(VideoCodecType codec_type, int qp) { - RTC_DCHECK_RUN_ON(&decode_thread_); - MutexLock lock(&mutex_); - last_codec_type_ = codec_type; - if (last_codec_type_ == kVideoCodecVP8 && qp != -1) { - qp_counters_.vp8.Add(qp); - qp_sample_.Add(qp); - } -} - -void ReceiveStatisticsProxy::OnStreamInactive() { - // TODO(sprang): Figure out any other state that should be reset. - - MutexLock lock(&mutex_); - // Don't report inter-frame delay if stream was paused. - last_decoded_frame_time_ms_.reset(); - video_quality_observer_->OnStreamInactive(); -} - -void ReceiveStatisticsProxy::OnRttUpdate(int64_t avg_rtt_ms, - int64_t max_rtt_ms) { - MutexLock lock(&mutex_); - avg_rtt_ms_ = avg_rtt_ms; -} - -void ReceiveStatisticsProxy::DecoderThreadStarting() { - RTC_DCHECK_RUN_ON(&main_thread_); -} - -void ReceiveStatisticsProxy::DecoderThreadStopped() { - RTC_DCHECK_RUN_ON(&main_thread_); - decode_thread_.Detach(); -} - -ReceiveStatisticsProxy::ContentSpecificStats::ContentSpecificStats() - : interframe_delay_percentiles(kMaxCommonInterframeDelayMs) {} - -ReceiveStatisticsProxy::ContentSpecificStats::~ContentSpecificStats() = default; - -void ReceiveStatisticsProxy::ContentSpecificStats::Add( - const ContentSpecificStats& other) { - e2e_delay_counter.Add(other.e2e_delay_counter); - interframe_delay_counter.Add(other.interframe_delay_counter); - flow_duration_ms += other.flow_duration_ms; - total_media_bytes += other.total_media_bytes; - received_height.Add(other.received_height); - received_width.Add(other.received_width); - qp_counter.Add(other.qp_counter); - frame_counts.key_frames += other.frame_counts.key_frames; - frame_counts.delta_frames += other.frame_counts.delta_frames; - interframe_delay_percentiles.Add(other.interframe_delay_percentiles); -} -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/receive_statistics_proxy.h b/TMessagesProj/jni/voip/webrtc/video/receive_statistics_proxy.h deleted file mode 100644 index 9560973118..0000000000 --- a/TMessagesProj/jni/voip/webrtc/video/receive_statistics_proxy.h +++ /dev/null @@ -1,196 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef VIDEO_RECEIVE_STATISTICS_PROXY_H_ -#define VIDEO_RECEIVE_STATISTICS_PROXY_H_ - -#include -#include -#include -#include - -#include "absl/types/optional.h" -#include "api/sequence_checker.h" -#include "call/video_receive_stream.h" -#include "modules/include/module_common_types.h" -#include "modules/video_coding/include/video_coding_defines.h" -#include "rtc_base/numerics/histogram_percentile_counter.h" -#include "rtc_base/numerics/moving_max_counter.h" -#include "rtc_base/numerics/sample_counter.h" -#include "rtc_base/rate_statistics.h" -#include "rtc_base/rate_tracker.h" -#include "rtc_base/synchronization/mutex.h" -#include "rtc_base/thread_annotations.h" -#include "video/quality_threshold.h" -#include "video/stats_counter.h" -#include "video/video_quality_observer.h" - -namespace webrtc { - -class Clock; -struct CodecSpecificInfo; - -class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback, - public RtcpCnameCallback, - public RtcpPacketTypeCounterObserver, - public CallStatsObserver { - public: - ReceiveStatisticsProxy(uint32_t remote_ssrc, Clock* clock); - ~ReceiveStatisticsProxy() = default; - - VideoReceiveStream::Stats GetStats() const; - - void OnDecodedFrame(const VideoFrame& frame, - absl::optional qp, - int32_t decode_time_ms, - VideoContentType content_type); - void OnSyncOffsetUpdated(int64_t video_playout_ntp_ms, - int64_t sync_offset_ms, - double estimated_freq_khz); - void OnRenderedFrame(const VideoFrame& frame); - void OnIncomingPayloadType(int payload_type); - void OnDecoderImplementationName(const char* implementation_name); - - void OnPreDecode(VideoCodecType codec_type, int qp); - - void OnUniqueFramesCounted(int num_unique_frames); - - // Indicates video stream has been paused (no incoming packets). - void OnStreamInactive(); - - // Overrides VCMReceiveStatisticsCallback. - void OnCompleteFrame(bool is_keyframe, - size_t size_bytes, - VideoContentType content_type) override; - void OnDroppedFrames(uint32_t frames_dropped) override; - void OnFrameBufferTimingsUpdated(int max_decode_ms, - int current_delay_ms, - int target_delay_ms, - int jitter_buffer_ms, - int min_playout_delay_ms, - int render_delay_ms) override; - - void OnTimingFrameInfoUpdated(const TimingFrameInfo& info) override; - - // Overrides RtcpCnameCallback. - void OnCname(uint32_t ssrc, absl::string_view cname) override; - - // Overrides RtcpPacketTypeCounterObserver. - void RtcpPacketTypesCounterUpdated( - uint32_t ssrc, - const RtcpPacketTypeCounter& packet_counter) override; - - // Implements CallStatsObserver. - void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) override; - - // Notification methods that are used to check our internal state and validate - // threading assumptions. These are called by VideoReceiveStream. - void DecoderThreadStarting(); - void DecoderThreadStopped(); - - // Produce histograms. Must be called after DecoderThreadStopped(), typically - // at the end of the call. - void UpdateHistograms(absl::optional fraction_lost, - const StreamDataCounters& rtp_stats, - const StreamDataCounters* rtx_stats); - - private: - struct QpCounters { - rtc::SampleCounter vp8; - }; - - struct ContentSpecificStats { - ContentSpecificStats(); - ~ContentSpecificStats(); - - void Add(const ContentSpecificStats& other); - - rtc::SampleCounter e2e_delay_counter; - rtc::SampleCounter interframe_delay_counter; - int64_t flow_duration_ms = 0; - int64_t total_media_bytes = 0; - rtc::SampleCounter received_width; - rtc::SampleCounter received_height; - rtc::SampleCounter qp_counter; - FrameCounts frame_counts; - rtc::HistogramPercentileCounter interframe_delay_percentiles; - }; - - void QualitySample() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - - // Removes info about old frames and then updates the framerate. - void UpdateFramerate(int64_t now_ms) const - RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - - void UpdateDecodeTimeHistograms(int width, - int height, - int decode_time_ms) const - RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - - absl::optional GetCurrentEstimatedPlayoutNtpTimestampMs( - int64_t now_ms) const RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - - Clock* const clock_; - const int64_t start_ms_; - const bool enable_decode_time_histograms_; - - mutable Mutex mutex_; - int64_t last_sample_time_ RTC_GUARDED_BY(mutex_); - QualityThreshold fps_threshold_ RTC_GUARDED_BY(mutex_); - QualityThreshold qp_threshold_ RTC_GUARDED_BY(mutex_); - QualityThreshold variance_threshold_ RTC_GUARDED_BY(mutex_); - rtc::SampleCounter qp_sample_ RTC_GUARDED_BY(mutex_); - int num_bad_states_ RTC_GUARDED_BY(mutex_); - int num_certain_states_ RTC_GUARDED_BY(mutex_); - // Note: The `stats_.rtp_stats` member is not used or populated by this class. - mutable VideoReceiveStream::Stats stats_ RTC_GUARDED_BY(mutex_); - RateStatistics decode_fps_estimator_ RTC_GUARDED_BY(mutex_); - RateStatistics renders_fps_estimator_ RTC_GUARDED_BY(mutex_); - rtc::RateTracker render_fps_tracker_ RTC_GUARDED_BY(mutex_); - rtc::RateTracker render_pixel_tracker_ RTC_GUARDED_BY(mutex_); - rtc::SampleCounter sync_offset_counter_ RTC_GUARDED_BY(mutex_); - rtc::SampleCounter decode_time_counter_ RTC_GUARDED_BY(mutex_); - rtc::SampleCounter jitter_buffer_delay_counter_ RTC_GUARDED_BY(mutex_); - rtc::SampleCounter target_delay_counter_ RTC_GUARDED_BY(mutex_); - rtc::SampleCounter current_delay_counter_ RTC_GUARDED_BY(mutex_); - rtc::SampleCounter delay_counter_ RTC_GUARDED_BY(mutex_); - std::unique_ptr video_quality_observer_ - RTC_GUARDED_BY(mutex_); - mutable rtc::MovingMaxCounter interframe_delay_max_moving_ - RTC_GUARDED_BY(mutex_); - std::map content_specific_stats_ - RTC_GUARDED_BY(mutex_); - MaxCounter freq_offset_counter_ RTC_GUARDED_BY(mutex_); - QpCounters qp_counters_ RTC_GUARDED_BY(decode_thread_); - int64_t avg_rtt_ms_ RTC_GUARDED_BY(mutex_); - mutable std::map frame_window_ RTC_GUARDED_BY(&mutex_); - VideoContentType last_content_type_ RTC_GUARDED_BY(&mutex_); - VideoCodecType last_codec_type_ RTC_GUARDED_BY(&mutex_); - absl::optional first_frame_received_time_ms_ RTC_GUARDED_BY(&mutex_); - absl::optional first_decoded_frame_time_ms_ RTC_GUARDED_BY(&mutex_); - absl::optional last_decoded_frame_time_ms_ RTC_GUARDED_BY(&mutex_); - size_t num_delayed_frames_rendered_ RTC_GUARDED_BY(&mutex_); - int64_t sum_missed_render_deadline_ms_ RTC_GUARDED_BY(&mutex_); - // Mutable because calling Max() on MovingMaxCounter is not const. Yet it is - // called from const GetStats(). - mutable rtc::MovingMaxCounter timing_frame_info_counter_ - RTC_GUARDED_BY(&mutex_); - absl::optional num_unique_frames_ RTC_GUARDED_BY(mutex_); - absl::optional last_estimated_playout_ntp_timestamp_ms_ - RTC_GUARDED_BY(&mutex_); - absl::optional last_estimated_playout_time_ms_ - RTC_GUARDED_BY(&mutex_); - SequenceChecker decode_thread_; - SequenceChecker network_thread_; - SequenceChecker main_thread_; -}; - -} // namespace webrtc -#endif // VIDEO_RECEIVE_STATISTICS_PROXY_H_ diff --git a/TMessagesProj/jni/voip/webrtc/video/receive_statistics_proxy2.cc b/TMessagesProj/jni/voip/webrtc/video/receive_statistics_proxy2.cc index 22da793cdc..297f5d3de9 100644 --- a/TMessagesProj/jni/voip/webrtc/video/receive_statistics_proxy2.cc +++ b/TMessagesProj/jni/voip/webrtc/video/receive_statistics_proxy2.cc @@ -18,11 +18,9 @@ #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" -#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/thread.h" #include "rtc_base/time_utils.h" #include "system_wrappers/include/clock.h" -#include "system_wrappers/include/field_trial.h" #include "system_wrappers/include/metrics.h" #include "video/video_receive_stream2.h" @@ -103,8 +101,6 @@ ReceiveStatisticsProxy::ReceiveStatisticsProxy(uint32_t remote_ssrc, TaskQueueBase* worker_thread) : clock_(clock), start_ms_(clock->TimeInMilliseconds()), - enable_decode_time_histograms_( - !field_trial::IsEnabled("WebRTC-DecodeTimeHistogramsKillSwitch")), last_sample_time_(clock->TimeInMilliseconds()), fps_threshold_(kLowFpsThreshold, kHighFpsThreshold, @@ -582,63 +578,6 @@ void ReceiveStatisticsProxy::UpdateFramerate(int64_t now_ms) const { stats_.network_frame_rate = static_cast(framerate); } -void ReceiveStatisticsProxy::UpdateDecodeTimeHistograms( - int width, - int height, - int decode_time_ms) const { - RTC_DCHECK_RUN_ON(&main_thread_); - - bool is_4k = (width == 3840 || width == 4096) && height == 2160; - bool is_hd = width == 1920 && height == 1080; - // Only update histograms for 4k/HD and VP9/H264. - if ((is_4k || is_hd) && (last_codec_type_ == kVideoCodecVP9 || - last_codec_type_ == kVideoCodecH264)) { - const std::string kDecodeTimeUmaPrefix = - "WebRTC.Video.DecodeTimePerFrameInMs."; - - // Each histogram needs its own line for it to not be reused in the wrong - // way when the format changes. - if (last_codec_type_ == kVideoCodecVP9) { - bool is_sw_decoder = - stats_.decoder_implementation_name.compare(0, 6, "libvpx") == 0; - if (is_4k) { - if (is_sw_decoder) - RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "Vp9.4k.Sw", - decode_time_ms); - else - RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "Vp9.4k.Hw", - decode_time_ms); - } else { - if (is_sw_decoder) - RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "Vp9.Hd.Sw", - decode_time_ms); - else - RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "Vp9.Hd.Hw", - decode_time_ms); - } - } else { - bool is_sw_decoder = - stats_.decoder_implementation_name.compare(0, 6, "FFmpeg") == 0; - if (is_4k) { - if (is_sw_decoder) - RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "H264.4k.Sw", - decode_time_ms); - else - RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "H264.4k.Hw", - decode_time_ms); - - } else { - if (is_sw_decoder) - RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "H264.Hd.Sw", - decode_time_ms); - else - RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "H264.Hd.Hw", - decode_time_ms); - } - } - } -} - absl::optional ReceiveStatisticsProxy::GetCurrentEstimatedPlayoutNtpTimestampMs( int64_t now_ms) const { @@ -651,11 +590,11 @@ ReceiveStatisticsProxy::GetCurrentEstimatedPlayoutNtpTimestampMs( return *last_estimated_playout_ntp_timestamp_ms_ + elapsed_ms; } -VideoReceiveStream::Stats ReceiveStatisticsProxy::GetStats() const { +VideoReceiveStreamInterface::Stats ReceiveStatisticsProxy::GetStats() const { RTC_DCHECK_RUN_ON(&main_thread_); - // Like VideoReceiveStream::GetStats, called on the worker thread from - // StatsCollector::ExtractMediaInfo via worker_thread()->Invoke(). + // Like VideoReceiveStreamInterface::GetStats, called on the worker thread + // from StatsCollector::ExtractMediaInfo via worker_thread()->BlockingCall(). // WebRtcVideoChannel::GetStats(), GetVideoReceiverInfo. // Get current frame rates here, as only updating them on new frames prevents @@ -702,19 +641,22 @@ VideoReceiveStream::Stats ReceiveStatisticsProxy::GetStats() const { void ReceiveStatisticsProxy::OnIncomingPayloadType(int payload_type) { RTC_DCHECK_RUN_ON(&decode_queue_); - worker_thread_->PostTask(ToQueuedTask(task_safety_, [payload_type, this]() { + worker_thread_->PostTask(SafeTask(task_safety_.flag(), [payload_type, this] { RTC_DCHECK_RUN_ON(&main_thread_); stats_.current_payload_type = payload_type; })); } -void ReceiveStatisticsProxy::OnDecoderImplementationName( - const char* implementation_name) { +void ReceiveStatisticsProxy::OnDecoderInfo( + const VideoDecoder::DecoderInfo& decoder_info) { RTC_DCHECK_RUN_ON(&decode_queue_); - worker_thread_->PostTask(ToQueuedTask( - task_safety_, [name = std::string(implementation_name), this]() { + worker_thread_->PostTask(SafeTask( + task_safety_.flag(), + [this, name = decoder_info.implementation_name, + is_hardware_accelerated = decoder_info.is_hardware_accelerated]() { RTC_DCHECK_RUN_ON(&main_thread_); stats_.decoder_implementation_name = name; + stats_.power_efficient_decoder = is_hardware_accelerated; })); } @@ -725,25 +667,19 @@ void ReceiveStatisticsProxy::OnFrameBufferTimingsUpdated( int jitter_buffer_ms, int min_playout_delay_ms, int render_delay_ms) { - RTC_DCHECK_RUN_ON(&decode_queue_); - worker_thread_->PostTask(ToQueuedTask( - task_safety_, - [max_decode_ms, current_delay_ms, target_delay_ms, jitter_buffer_ms, - min_playout_delay_ms, render_delay_ms, this]() { - RTC_DCHECK_RUN_ON(&main_thread_); - stats_.max_decode_ms = max_decode_ms; - stats_.current_delay_ms = current_delay_ms; - stats_.target_delay_ms = target_delay_ms; - stats_.jitter_buffer_ms = jitter_buffer_ms; - stats_.min_playout_delay_ms = min_playout_delay_ms; - stats_.render_delay_ms = render_delay_ms; - jitter_buffer_delay_counter_.Add(jitter_buffer_ms); - target_delay_counter_.Add(target_delay_ms); - current_delay_counter_.Add(current_delay_ms); - // Network delay (rtt/2) + target_delay_ms (jitter delay + decode time + - // render delay). - delay_counter_.Add(target_delay_ms + avg_rtt_ms_ / 2); - })); + RTC_DCHECK_RUN_ON(&main_thread_); + stats_.max_decode_ms = max_decode_ms; + stats_.current_delay_ms = current_delay_ms; + stats_.target_delay_ms = target_delay_ms; + stats_.jitter_buffer_ms = jitter_buffer_ms; + stats_.min_playout_delay_ms = min_playout_delay_ms; + stats_.render_delay_ms = render_delay_ms; + jitter_buffer_delay_counter_.Add(jitter_buffer_ms); + target_delay_counter_.Add(target_delay_ms); + current_delay_counter_.Add(current_delay_ms); + // Network delay (rtt/2) + target_delay_ms (jitter delay + decode time + + // render delay). + delay_counter_.Add(target_delay_ms + avg_rtt_ms_ / 2); } void ReceiveStatisticsProxy::OnUniqueFramesCounted(int num_unique_frames) { @@ -753,25 +689,22 @@ void ReceiveStatisticsProxy::OnUniqueFramesCounted(int num_unique_frames) { void ReceiveStatisticsProxy::OnTimingFrameInfoUpdated( const TimingFrameInfo& info) { - RTC_DCHECK_RUN_ON(&decode_queue_); - worker_thread_->PostTask(ToQueuedTask(task_safety_, [info, this]() { - RTC_DCHECK_RUN_ON(&main_thread_); - if (info.flags != VideoSendTiming::kInvalid) { - int64_t now_ms = clock_->TimeInMilliseconds(); - timing_frame_info_counter_.Add(info, now_ms); - } + RTC_DCHECK_RUN_ON(&main_thread_); + if (info.flags != VideoSendTiming::kInvalid) { + int64_t now_ms = clock_->TimeInMilliseconds(); + timing_frame_info_counter_.Add(info, now_ms); + } - // Measure initial decoding latency between the first frame arriving and - // the first frame being decoded. - if (!first_frame_received_time_ms_.has_value()) { - first_frame_received_time_ms_ = info.receive_finish_ms; - } - if (stats_.first_frame_received_to_decoded_ms == -1 && - first_decoded_frame_time_ms_) { - stats_.first_frame_received_to_decoded_ms = - *first_decoded_frame_time_ms_ - *first_frame_received_time_ms_; - } - })); + // Measure initial decoding latency between the first frame arriving and + // the first frame being decoded. + if (!first_frame_received_time_ms_.has_value()) { + first_frame_received_time_ms_ = info.receive_finish_ms; + } + if (stats_.first_frame_received_to_decoded_ms == -1 && + first_decoded_frame_time_ms_) { + stats_.first_frame_received_to_decoded_ms = + *first_decoded_frame_time_ms_ - *first_frame_received_time_ms_; + } } void ReceiveStatisticsProxy::RtcpPacketTypesCounterUpdated( @@ -793,7 +726,7 @@ void ReceiveStatisticsProxy::RtcpPacketTypesCounterUpdated( // runs after the `ReceiveStatisticsProxy` has been deleted. In such a // case the packet_counter update won't be recorded. worker_thread_->PostTask( - ToQueuedTask(task_safety_, [ssrc, packet_counter, this]() { + SafeTask(task_safety_.flag(), [ssrc, packet_counter, this]() { RtcpPacketTypesCounterUpdated(ssrc, packet_counter); })); return; @@ -815,22 +748,44 @@ void ReceiveStatisticsProxy::OnCname(uint32_t ssrc, absl::string_view cname) { void ReceiveStatisticsProxy::OnDecodedFrame(const VideoFrame& frame, absl::optional qp, - int32_t decode_time_ms, + TimeDelta decode_time, VideoContentType content_type) { + TimeDelta processing_delay = TimeDelta::Zero(); + webrtc::Timestamp current_time = clock_->CurrentTime(); + // TODO(bugs.webrtc.org/13984): some tests do not fill packet_infos(). + TimeDelta assembly_time = TimeDelta::Zero(); + if (frame.packet_infos().size() > 0) { + const auto [first_packet, last_packet] = std::minmax_element( + frame.packet_infos().cbegin(), frame.packet_infos().cend(), + [](const webrtc::RtpPacketInfo& a, const webrtc::RtpPacketInfo& b) { + return a.receive_time() < b.receive_time(); + }); + if (first_packet->receive_time().IsFinite()) { + processing_delay = current_time - first_packet->receive_time(); + // Extract frame assembly time (i.e. time between earliest and latest + // packet arrival). Note: for single-packet frames this will be 0. + assembly_time = + last_packet->receive_time() - first_packet->receive_time(); + } + } // See VCMDecodedFrameCallback::Decoded for more info on what thread/queue we // may be on. E.g. on iOS this gets called on // "com.apple.coremedia.decompressionsession.clientcallback" - VideoFrameMetaData meta(frame, clock_->CurrentTime()); - worker_thread_->PostTask(ToQueuedTask( - task_safety_, [meta, qp, decode_time_ms, content_type, this]() { - OnDecodedFrame(meta, qp, decode_time_ms, content_type); + VideoFrameMetaData meta(frame, current_time); + worker_thread_->PostTask( + SafeTask(task_safety_.flag(), [meta, qp, decode_time, processing_delay, + assembly_time, content_type, this]() { + OnDecodedFrame(meta, qp, decode_time, processing_delay, assembly_time, + content_type); })); } void ReceiveStatisticsProxy::OnDecodedFrame( const VideoFrameMetaData& frame_meta, absl::optional qp, - int32_t decode_time_ms, + TimeDelta decode_time, + TimeDelta processing_delay, + TimeDelta assembly_time, VideoContentType content_type) { RTC_DCHECK_RUN_ON(&main_thread_); @@ -868,12 +823,13 @@ void ReceiveStatisticsProxy::OnDecodedFrame( << "QP sum was already set and no QP was given for a frame."; stats_.qp_sum.reset(); } - decode_time_counter_.Add(decode_time_ms); - stats_.decode_ms = decode_time_ms; - stats_.total_decode_time_ms += decode_time_ms; - if (enable_decode_time_histograms_) { - UpdateDecodeTimeHistograms(frame_meta.width, frame_meta.height, - decode_time_ms); + decode_time_counter_.Add(decode_time.ms()); + stats_.decode_ms = decode_time.ms(); + stats_.total_decode_time += decode_time; + stats_.total_processing_delay += processing_delay; + stats_.total_assembly_time += assembly_time; + if (!assembly_time.IsZero()) { + ++stats_.frames_assembled_from_multiple_packets; } last_content_type_ = content_type; @@ -998,22 +954,20 @@ void ReceiveStatisticsProxy::OnCompleteFrame(bool is_keyframe, void ReceiveStatisticsProxy::OnDroppedFrames(uint32_t frames_dropped) { // Can be called on either the decode queue or the worker thread // See FrameBuffer2 for more details. - worker_thread_->PostTask(ToQueuedTask(task_safety_, [frames_dropped, this]() { - RTC_DCHECK_RUN_ON(&main_thread_); - stats_.frames_dropped += frames_dropped; - })); + worker_thread_->PostTask( + SafeTask(task_safety_.flag(), [frames_dropped, this]() { + RTC_DCHECK_RUN_ON(&main_thread_); + stats_.frames_dropped += frames_dropped; + })); } void ReceiveStatisticsProxy::OnPreDecode(VideoCodecType codec_type, int qp) { - RTC_DCHECK_RUN_ON(&decode_queue_); - worker_thread_->PostTask(ToQueuedTask(task_safety_, [codec_type, qp, this]() { - RTC_DCHECK_RUN_ON(&main_thread_); - last_codec_type_ = codec_type; - if (last_codec_type_ == kVideoCodecVP8 && qp != -1) { - qp_counters_.vp8.Add(qp); - qp_sample_.Add(qp); - } - })); + RTC_DCHECK_RUN_ON(&main_thread_); + last_codec_type_ = codec_type; + if (last_codec_type_ == kVideoCodecVP8 && qp != -1) { + qp_counters_.vp8.Add(qp); + qp_sample_.Add(qp); + } } void ReceiveStatisticsProxy::OnStreamInactive() { diff --git a/TMessagesProj/jni/voip/webrtc/video/receive_statistics_proxy2.h b/TMessagesProj/jni/voip/webrtc/video/receive_statistics_proxy2.h index 32269f7381..1a2bb77fa6 100644 --- a/TMessagesProj/jni/voip/webrtc/video/receive_statistics_proxy2.h +++ b/TMessagesProj/jni/voip/webrtc/video/receive_statistics_proxy2.h @@ -18,8 +18,10 @@ #include "absl/types/optional.h" #include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_base.h" #include "api/units/timestamp.h" +#include "api/video_codecs/video_decoder.h" #include "call/video_receive_stream.h" #include "modules/include/module_common_types.h" #include "modules/video_coding/include/video_coding_defines.h" @@ -29,7 +31,6 @@ #include "rtc_base/rate_statistics.h" #include "rtc_base/rate_tracker.h" #include "rtc_base/system/no_unique_address.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/thread_annotations.h" #include "video/quality_threshold.h" #include "video/stats_counter.h" @@ -53,11 +54,11 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback, TaskQueueBase* worker_thread); ~ReceiveStatisticsProxy() override; - VideoReceiveStream::Stats GetStats() const; + VideoReceiveStreamInterface::Stats GetStats() const; void OnDecodedFrame(const VideoFrame& frame, absl::optional qp, - int32_t decode_time_ms, + TimeDelta decode_time, VideoContentType content_type); // Called asyncronously on the worker thread as a result of a call to the @@ -65,7 +66,9 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback, // the actual decoding happens. void OnDecodedFrame(const VideoFrameMetaData& frame_meta, absl::optional qp, - int32_t decode_time_ms, + TimeDelta decode_time, + TimeDelta processing_delay, + TimeDelta assembly_time, VideoContentType content_type); void OnSyncOffsetUpdated(int64_t video_playout_ntp_ms, @@ -73,7 +76,7 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback, double estimated_freq_khz); void OnRenderedFrame(const VideoFrameMetaData& frame_meta); void OnIncomingPayloadType(int payload_type); - void OnDecoderImplementationName(const char* implementation_name); + void OnDecoderInfo(const VideoDecoder::DecoderInfo& decoder_info); void OnPreDecode(VideoCodecType codec_type, int qp); @@ -107,7 +110,7 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback, void OnRttUpdate(int64_t avg_rtt_ms); // Notification methods that are used to check our internal state and validate - // threading assumptions. These are called by VideoReceiveStream. + // threading assumptions. These are called by VideoReceiveStreamInterface. void DecoderThreadStarting(); void DecoderThreadStopped(); @@ -144,16 +147,11 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback, // Removes info about old frames and then updates the framerate. void UpdateFramerate(int64_t now_ms) const; - void UpdateDecodeTimeHistograms(int width, - int height, - int decode_time_ms) const; - absl::optional GetCurrentEstimatedPlayoutNtpTimestampMs( int64_t now_ms) const; Clock* const clock_; const int64_t start_ms_; - const bool enable_decode_time_histograms_; int64_t last_sample_time_ RTC_GUARDED_BY(main_thread_); @@ -164,7 +162,8 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback, int num_bad_states_ RTC_GUARDED_BY(main_thread_); int num_certain_states_ RTC_GUARDED_BY(main_thread_); // Note: The `stats_.rtp_stats` member is not used or populated by this class. - mutable VideoReceiveStream::Stats stats_ RTC_GUARDED_BY(main_thread_); + mutable VideoReceiveStreamInterface::Stats stats_ + RTC_GUARDED_BY(main_thread_); // Same as stats_.ssrc, but const (no lock required). const uint32_t remote_ssrc_; RateStatistics decode_fps_estimator_ RTC_GUARDED_BY(main_thread_); diff --git a/TMessagesProj/jni/voip/webrtc/common_video/incoming_video_stream.cc b/TMessagesProj/jni/voip/webrtc/video/render/incoming_video_stream.cc similarity index 89% rename from TMessagesProj/jni/voip/webrtc/common_video/incoming_video_stream.cc rename to TMessagesProj/jni/voip/webrtc/video/render/incoming_video_stream.cc index 15c668e78e..e740c47bd0 100644 --- a/TMessagesProj/jni/voip/webrtc/common_video/incoming_video_stream.cc +++ b/TMessagesProj/jni/voip/webrtc/video/render/incoming_video_stream.cc @@ -8,15 +8,16 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "common_video/include/incoming_video_stream.h" +#include "video/render/incoming_video_stream.h" #include #include #include "absl/types/optional.h" -#include "common_video/video_render_frames.h" +#include "api/units/time_delta.h" #include "rtc_base/checks.h" #include "rtc_base/trace_event.h" +#include "video/render/video_render_frames.h" namespace webrtc { @@ -57,7 +58,8 @@ void IncomingVideoStream::Dequeue() { if (render_buffers_.HasPendingFrames()) { uint32_t wait_time = render_buffers_.TimeToNextFrameRelease(); - incoming_render_queue_.PostDelayedTask([this]() { Dequeue(); }, wait_time); + incoming_render_queue_.PostDelayedHighPrecisionTask( + [this]() { Dequeue(); }, TimeDelta::Millis(wait_time)); } } diff --git a/TMessagesProj/jni/voip/webrtc/common_video/include/incoming_video_stream.h b/TMessagesProj/jni/voip/webrtc/video/render/incoming_video_stream.h similarity index 86% rename from TMessagesProj/jni/voip/webrtc/common_video/include/incoming_video_stream.h rename to TMessagesProj/jni/voip/webrtc/video/render/incoming_video_stream.h index d616c5a2ec..4873ae7dcb 100644 --- a/TMessagesProj/jni/voip/webrtc/common_video/include/incoming_video_stream.h +++ b/TMessagesProj/jni/voip/webrtc/video/render/incoming_video_stream.h @@ -8,8 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef COMMON_VIDEO_INCLUDE_INCOMING_VIDEO_STREAM_H_ -#define COMMON_VIDEO_INCLUDE_INCOMING_VIDEO_STREAM_H_ +#ifndef VIDEO_RENDER_INCOMING_VIDEO_STREAM_H_ +#define VIDEO_RENDER_INCOMING_VIDEO_STREAM_H_ #include @@ -17,10 +17,10 @@ #include "api/task_queue/task_queue_factory.h" #include "api/video/video_frame.h" #include "api/video/video_sink_interface.h" -#include "common_video/video_render_frames.h" #include "rtc_base/race_checker.h" #include "rtc_base/task_queue.h" #include "rtc_base/thread_annotations.h" +#include "video/render/video_render_frames.h" namespace webrtc { @@ -45,4 +45,4 @@ class IncomingVideoStream : public rtc::VideoSinkInterface { } // namespace webrtc -#endif // COMMON_VIDEO_INCLUDE_INCOMING_VIDEO_STREAM_H_ +#endif // VIDEO_RENDER_INCOMING_VIDEO_STREAM_H_ diff --git a/TMessagesProj/jni/voip/webrtc/common_video/video_render_frames.cc b/TMessagesProj/jni/voip/webrtc/video/render/video_render_frames.cc similarity index 98% rename from TMessagesProj/jni/voip/webrtc/common_video/video_render_frames.cc rename to TMessagesProj/jni/voip/webrtc/video/render/video_render_frames.cc index 5ef51f2805..ea1362abbb 100644 --- a/TMessagesProj/jni/voip/webrtc/common_video/video_render_frames.cc +++ b/TMessagesProj/jni/voip/webrtc/video/render/video_render_frames.cc @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "common_video/video_render_frames.h" +#include "video/render/video_render_frames.h" #include #include diff --git a/TMessagesProj/jni/voip/webrtc/common_video/video_render_frames.h b/TMessagesProj/jni/voip/webrtc/video/render/video_render_frames.h similarity index 91% rename from TMessagesProj/jni/voip/webrtc/common_video/video_render_frames.h rename to TMessagesProj/jni/voip/webrtc/video/render/video_render_frames.h index 9973c1ff08..7f48eae496 100644 --- a/TMessagesProj/jni/voip/webrtc/common_video/video_render_frames.h +++ b/TMessagesProj/jni/voip/webrtc/video/render/video_render_frames.h @@ -8,8 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef COMMON_VIDEO_VIDEO_RENDER_FRAMES_H_ -#define COMMON_VIDEO_VIDEO_RENDER_FRAMES_H_ +#ifndef VIDEO_RENDER_VIDEO_RENDER_FRAMES_H_ +#define VIDEO_RENDER_VIDEO_RENDER_FRAMES_H_ #include #include @@ -52,4 +52,4 @@ class VideoRenderFrames { } // namespace webrtc -#endif // COMMON_VIDEO_VIDEO_RENDER_FRAMES_H_ +#endif // VIDEO_RENDER_VIDEO_RENDER_FRAMES_H_ diff --git a/TMessagesProj/jni/voip/webrtc/video/rtp_streams_synchronizer.cc b/TMessagesProj/jni/voip/webrtc/video/rtp_streams_synchronizer.cc deleted file mode 100644 index 29ace90431..0000000000 --- a/TMessagesProj/jni/voip/webrtc/video/rtp_streams_synchronizer.cc +++ /dev/null @@ -1,208 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "video/rtp_streams_synchronizer.h" - -#include "absl/types/optional.h" -#include "call/syncable.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" -#include "rtc_base/time_utils.h" -#include "rtc_base/trace_event.h" -#include "system_wrappers/include/rtp_to_ntp_estimator.h" - -namespace webrtc { -namespace { -// Time interval for logging stats. -constexpr int64_t kStatsLogIntervalMs = 10000; - -bool UpdateMeasurements(StreamSynchronization::Measurements* stream, - const Syncable::Info& info) { - RTC_DCHECK(stream); - stream->latest_timestamp = info.latest_received_capture_timestamp; - stream->latest_receive_time_ms = info.latest_receive_time_ms; - bool new_rtcp_sr = false; - if (!stream->rtp_to_ntp.UpdateMeasurements( - info.capture_time_ntp_secs, info.capture_time_ntp_frac, - info.capture_time_source_clock, &new_rtcp_sr)) { - return false; - } - return true; -} -} // namespace - -RtpStreamsSynchronizer::RtpStreamsSynchronizer(Syncable* syncable_video) - : syncable_video_(syncable_video), - syncable_audio_(nullptr), - sync_(), - last_sync_time_(rtc::TimeNanos()), - last_stats_log_ms_(rtc::TimeMillis()) { - RTC_DCHECK(syncable_video); - process_thread_checker_.Detach(); -} - -RtpStreamsSynchronizer::~RtpStreamsSynchronizer() = default; - -void RtpStreamsSynchronizer::ConfigureSync(Syncable* syncable_audio) { - MutexLock lock(&mutex_); - if (syncable_audio == syncable_audio_) { - // This prevents expensive no-ops. - return; - } - - syncable_audio_ = syncable_audio; - sync_.reset(nullptr); - if (syncable_audio_) { - sync_.reset(new StreamSynchronization(syncable_video_->id(), - syncable_audio_->id())); - } -} - -int64_t RtpStreamsSynchronizer::TimeUntilNextProcess() { - RTC_DCHECK_RUN_ON(&process_thread_checker_); - const int64_t kSyncIntervalMs = 1000; - return kSyncIntervalMs - - (rtc::TimeNanos() - last_sync_time_) / rtc::kNumNanosecsPerMillisec; -} - -void RtpStreamsSynchronizer::Process() { - RTC_DCHECK_RUN_ON(&process_thread_checker_); - last_sync_time_ = rtc::TimeNanos(); - - MutexLock lock(&mutex_); - if (!syncable_audio_) { - return; - } - RTC_DCHECK(sync_.get()); - - bool log_stats = false; - const int64_t now_ms = rtc::TimeMillis(); - if (now_ms - last_stats_log_ms_ > kStatsLogIntervalMs) { - last_stats_log_ms_ = now_ms; - log_stats = true; - } - - int64_t last_audio_receive_time_ms = - audio_measurement_.latest_receive_time_ms; - absl::optional audio_info = syncable_audio_->GetInfo(); - if (!audio_info || !UpdateMeasurements(&audio_measurement_, *audio_info)) { - return; - } - - if (last_audio_receive_time_ms == audio_measurement_.latest_receive_time_ms) { - // No new audio packet has been received since last update. - return; - } - - int64_t last_video_receive_ms = video_measurement_.latest_receive_time_ms; - absl::optional video_info = syncable_video_->GetInfo(); - if (!video_info || !UpdateMeasurements(&video_measurement_, *video_info)) { - return; - } - - if (last_video_receive_ms == video_measurement_.latest_receive_time_ms) { - // No new video packet has been received since last update. - return; - } - - int relative_delay_ms; - // Calculate how much later or earlier the audio stream is compared to video. - if (!sync_->ComputeRelativeDelay(audio_measurement_, video_measurement_, - &relative_delay_ms)) { - return; - } - - if (log_stats) { - RTC_LOG(LS_INFO) << "Sync info stats: " << now_ms - << ", {ssrc: " << sync_->audio_stream_id() << ", " - << "cur_delay_ms: " << audio_info->current_delay_ms - << "} {ssrc: " << sync_->video_stream_id() << ", " - << "cur_delay_ms: " << video_info->current_delay_ms - << "} {relative_delay_ms: " << relative_delay_ms << "} "; - } - - TRACE_COUNTER1("webrtc", "SyncCurrentVideoDelay", - video_info->current_delay_ms); - TRACE_COUNTER1("webrtc", "SyncCurrentAudioDelay", - audio_info->current_delay_ms); - TRACE_COUNTER1("webrtc", "SyncRelativeDelay", relative_delay_ms); - - int target_audio_delay_ms = 0; - int target_video_delay_ms = video_info->current_delay_ms; - // Calculate the necessary extra audio delay and desired total video - // delay to get the streams in sync. - if (!sync_->ComputeDelays(relative_delay_ms, audio_info->current_delay_ms, - &target_audio_delay_ms, &target_video_delay_ms)) { - return; - } - - if (log_stats) { - RTC_LOG(LS_INFO) << "Sync delay stats: " << now_ms - << ", {ssrc: " << sync_->audio_stream_id() << ", " - << "target_delay_ms: " << target_audio_delay_ms - << "} {ssrc: " << sync_->video_stream_id() << ", " - << "target_delay_ms: " << target_video_delay_ms << "} "; - } - - syncable_audio_->SetMinimumPlayoutDelay(target_audio_delay_ms); - syncable_video_->SetMinimumPlayoutDelay(target_video_delay_ms); -} - -// TODO(https://bugs.webrtc.org/7065): Move RtpToNtpEstimator out of -// RtpStreamsSynchronizer and into respective receive stream to always populate -// the estimated playout timestamp. -bool RtpStreamsSynchronizer::GetStreamSyncOffsetInMs( - uint32_t rtp_timestamp, - int64_t render_time_ms, - int64_t* video_playout_ntp_ms, - int64_t* stream_offset_ms, - double* estimated_freq_khz) const { - MutexLock lock(&mutex_); - if (!syncable_audio_) { - return false; - } - - uint32_t audio_rtp_timestamp; - int64_t time_ms; - if (!syncable_audio_->GetPlayoutRtpTimestamp(&audio_rtp_timestamp, - &time_ms)) { - return false; - } - - int64_t latest_audio_ntp; - if (!audio_measurement_.rtp_to_ntp.Estimate(audio_rtp_timestamp, - &latest_audio_ntp)) { - return false; - } - - syncable_audio_->SetEstimatedPlayoutNtpTimestampMs(latest_audio_ntp, time_ms); - - int64_t latest_video_ntp; - if (!video_measurement_.rtp_to_ntp.Estimate(rtp_timestamp, - &latest_video_ntp)) { - return false; - } - - // Current audio ntp. - int64_t now_ms = rtc::TimeMillis(); - latest_audio_ntp += (now_ms - time_ms); - - // Remove video playout delay. - int64_t time_to_render_ms = render_time_ms - now_ms; - if (time_to_render_ms > 0) - latest_video_ntp -= time_to_render_ms; - - *video_playout_ntp_ms = latest_video_ntp; - *stream_offset_ms = latest_audio_ntp - latest_video_ntp; - *estimated_freq_khz = video_measurement_.rtp_to_ntp.params()->frequency_khz; - return true; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/rtp_streams_synchronizer.h b/TMessagesProj/jni/voip/webrtc/video/rtp_streams_synchronizer.h deleted file mode 100644 index b759ad1789..0000000000 --- a/TMessagesProj/jni/voip/webrtc/video/rtp_streams_synchronizer.h +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// RtpStreamsSynchronizer is responsible for synchronizing audio and video for -// a given audio receive stream and video receive stream. - -#ifndef VIDEO_RTP_STREAMS_SYNCHRONIZER_H_ -#define VIDEO_RTP_STREAMS_SYNCHRONIZER_H_ - -#include - -#include "api/sequence_checker.h" -#include "modules/include/module.h" -#include "rtc_base/synchronization/mutex.h" -#include "video/stream_synchronization.h" - -namespace webrtc { - -class Syncable; - -// DEPRECATED. -class RtpStreamsSynchronizer : public Module { - public: - explicit RtpStreamsSynchronizer(Syncable* syncable_video); - ~RtpStreamsSynchronizer() override; - - void ConfigureSync(Syncable* syncable_audio); - - // Implements Module. - int64_t TimeUntilNextProcess() override; - void Process() override; - - // Gets the estimated playout NTP timestamp for the video frame with - // `rtp_timestamp` and the sync offset between the current played out audio - // frame and the video frame. Returns true on success, false otherwise. - // The `estimated_freq_khz` is the frequency used in the RTP to NTP timestamp - // conversion. - bool GetStreamSyncOffsetInMs(uint32_t rtp_timestamp, - int64_t render_time_ms, - int64_t* video_playout_ntp_ms, - int64_t* stream_offset_ms, - double* estimated_freq_khz) const; - - private: - Syncable* syncable_video_; - - mutable Mutex mutex_; - Syncable* syncable_audio_ RTC_GUARDED_BY(mutex_); - std::unique_ptr sync_ RTC_GUARDED_BY(mutex_); - StreamSynchronization::Measurements audio_measurement_ RTC_GUARDED_BY(mutex_); - StreamSynchronization::Measurements video_measurement_ RTC_GUARDED_BY(mutex_); - - SequenceChecker process_thread_checker_; - int64_t last_sync_time_ RTC_GUARDED_BY(&process_thread_checker_); - int64_t last_stats_log_ms_ RTC_GUARDED_BY(&process_thread_checker_); -}; - -} // namespace webrtc - -#endif // VIDEO_RTP_STREAMS_SYNCHRONIZER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/video/rtp_streams_synchronizer2.cc b/TMessagesProj/jni/voip/webrtc/video/rtp_streams_synchronizer2.cc index 4096fceb99..0fbb3916cb 100644 --- a/TMessagesProj/jni/voip/webrtc/video/rtp_streams_synchronizer2.cc +++ b/TMessagesProj/jni/voip/webrtc/video/rtp_streams_synchronizer2.cc @@ -29,10 +29,10 @@ bool UpdateMeasurements(StreamSynchronization::Measurements* stream, const Syncable::Info& info) { stream->latest_timestamp = info.latest_received_capture_timestamp; stream->latest_receive_time_ms = info.latest_receive_time_ms; - bool new_rtcp_sr = false; return stream->rtp_to_ntp.UpdateMeasurements( - info.capture_time_ntp_secs, info.capture_time_ntp_frac, - info.capture_time_source_clock, &new_rtcp_sr); + NtpTime(info.capture_time_ntp_secs, info.capture_time_ntp_frac), + info.capture_time_source_clock) != + RtpToNtpEstimator::kInvalidMeasurement; } } // namespace @@ -183,32 +183,35 @@ bool RtpStreamsSynchronizer::GetStreamSyncOffsetInMs( return false; } - int64_t latest_audio_ntp; - if (!audio_measurement_.rtp_to_ntp.Estimate(audio_rtp_timestamp, - &latest_audio_ntp)) { + NtpTime latest_audio_ntp = + audio_measurement_.rtp_to_ntp.Estimate(audio_rtp_timestamp); + if (!latest_audio_ntp.Valid()) { return false; } + int64_t latest_audio_ntp_ms = latest_audio_ntp.ToMs(); - syncable_audio_->SetEstimatedPlayoutNtpTimestampMs(latest_audio_ntp, time_ms); + syncable_audio_->SetEstimatedPlayoutNtpTimestampMs(latest_audio_ntp_ms, + time_ms); - int64_t latest_video_ntp; - if (!video_measurement_.rtp_to_ntp.Estimate(rtp_timestamp, - &latest_video_ntp)) { + NtpTime latest_video_ntp = + video_measurement_.rtp_to_ntp.Estimate(rtp_timestamp); + if (!latest_video_ntp.Valid()) { return false; } + int64_t latest_video_ntp_ms = latest_video_ntp.ToMs(); // Current audio ntp. int64_t now_ms = rtc::TimeMillis(); - latest_audio_ntp += (now_ms - time_ms); + latest_audio_ntp_ms += (now_ms - time_ms); // Remove video playout delay. int64_t time_to_render_ms = render_time_ms - now_ms; if (time_to_render_ms > 0) - latest_video_ntp -= time_to_render_ms; + latest_video_ntp_ms -= time_to_render_ms; - *video_playout_ntp_ms = latest_video_ntp; - *stream_offset_ms = latest_audio_ntp - latest_video_ntp; - *estimated_freq_khz = video_measurement_.rtp_to_ntp.params()->frequency_khz; + *video_playout_ntp_ms = latest_video_ntp_ms; + *stream_offset_ms = latest_audio_ntp_ms - latest_video_ntp_ms; + *estimated_freq_khz = video_measurement_.rtp_to_ntp.EstimatedFrequencyKhz(); return true; } diff --git a/TMessagesProj/jni/voip/webrtc/video/rtp_streams_synchronizer2.h b/TMessagesProj/jni/voip/webrtc/video/rtp_streams_synchronizer2.h index 73fd604892..7042b1bd9a 100644 --- a/TMessagesProj/jni/voip/webrtc/video/rtp_streams_synchronizer2.h +++ b/TMessagesProj/jni/voip/webrtc/video/rtp_streams_synchronizer2.h @@ -14,8 +14,8 @@ #include #include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" #include "rtc_base/system/no_unique_address.h" -#include "rtc_base/task_queue.h" #include "rtc_base/task_utils/repeating_task.h" #include "video/stream_synchronization.h" diff --git a/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver.cc b/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver.cc index 32a438de1f..e69de29bb2 100644 --- a/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver.cc +++ b/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver.cc @@ -1,1301 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "video/rtp_video_stream_receiver.h" - -#include -#include -#include -#include -#include - -#include "absl/algorithm/container.h" -#include "absl/base/macros.h" -#include "absl/memory/memory.h" -#include "absl/types/optional.h" -#include "media/base/media_constants.h" -#include "modules/pacing/packet_router.h" -#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h" -#include "modules/rtp_rtcp/include/receive_statistics.h" -#include "modules/rtp_rtcp/include/rtp_cvo.h" -#include "modules/rtp_rtcp/include/ulpfec_receiver.h" -#include "modules/rtp_rtcp/source/create_video_rtp_depacketizer.h" -#include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h" -#include "modules/rtp_rtcp/source/rtp_format.h" -#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h" -#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h" -#include "modules/rtp_rtcp/source/rtp_header_extensions.h" -#include "modules/rtp_rtcp/source/rtp_packet_received.h" -#include "modules/rtp_rtcp/source/rtp_rtcp_config.h" -#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" -#include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" -#include "modules/rtp_rtcp/source/video_rtp_depacketizer_raw.h" -#include "modules/utility/include/process_thread.h" -#include "modules/video_coding/deprecated/nack_module.h" -#include "modules/video_coding/frame_object.h" -#include "modules/video_coding/h264_sprop_parameter_sets.h" -#include "modules/video_coding/h264_sps_pps_tracker.h" -#include "modules/video_coding/packet_buffer.h" -#include "rtc_base/checks.h" -#include "rtc_base/location.h" -#include "rtc_base/logging.h" -#include "rtc_base/strings/string_builder.h" -#include "system_wrappers/include/field_trial.h" -#include "system_wrappers/include/metrics.h" -#include "system_wrappers/include/ntp_time.h" -#include "video/receive_statistics_proxy.h" - -namespace webrtc { - -namespace { -// TODO(philipel): Change kPacketBufferStartSize back to 32 in M63 see: -// crbug.com/752886 -constexpr int kPacketBufferStartSize = 512; -constexpr int kPacketBufferMaxSize = 2048; - -int PacketBufferMaxSize() { - // The group here must be a positive power of 2, in which case that is used as - // size. All other values shall result in the default value being used. - const std::string group_name = - webrtc::field_trial::FindFullName("WebRTC-PacketBufferMaxSize"); - int packet_buffer_max_size = kPacketBufferMaxSize; - if (!group_name.empty() && - (sscanf(group_name.c_str(), "%d", &packet_buffer_max_size) != 1 || - packet_buffer_max_size <= 0 || - // Verify that the number is a positive power of 2. - (packet_buffer_max_size & (packet_buffer_max_size - 1)) != 0)) { - RTC_LOG(LS_WARNING) << "Invalid packet buffer max size: " << group_name; - packet_buffer_max_size = kPacketBufferMaxSize; - } - return packet_buffer_max_size; -} - -std::unique_ptr CreateRtpRtcpModule( - Clock* clock, - ReceiveStatistics* receive_statistics, - Transport* outgoing_transport, - RtcpRttStats* rtt_stats, - RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer, - RtcpCnameCallback* rtcp_cname_callback, - bool non_sender_rtt_measurement, - uint32_t local_ssrc) { - RtpRtcpInterface::Configuration configuration; - configuration.clock = clock; - configuration.audio = false; - configuration.receiver_only = true; - configuration.receive_statistics = receive_statistics; - configuration.outgoing_transport = outgoing_transport; - configuration.rtt_stats = rtt_stats; - configuration.rtcp_packet_type_counter_observer = - rtcp_packet_type_counter_observer; - configuration.rtcp_cname_callback = rtcp_cname_callback; - configuration.local_media_ssrc = local_ssrc; - configuration.non_sender_rtt_measurement = non_sender_rtt_measurement; - - std::unique_ptr rtp_rtcp = RtpRtcp::DEPRECATED_Create(configuration); - rtp_rtcp->SetRTCPStatus(RtcpMode::kCompound); - - return rtp_rtcp; -} - -static const int kPacketLogIntervalMs = 10000; - -} // namespace - -RtpVideoStreamReceiver::RtcpFeedbackBuffer::RtcpFeedbackBuffer( - KeyFrameRequestSender* key_frame_request_sender, - NackSender* nack_sender, - LossNotificationSender* loss_notification_sender) - : key_frame_request_sender_(key_frame_request_sender), - nack_sender_(nack_sender), - loss_notification_sender_(loss_notification_sender), - request_key_frame_(false) { - RTC_DCHECK(key_frame_request_sender_); - RTC_DCHECK(nack_sender_); - RTC_DCHECK(loss_notification_sender_); -} - -void RtpVideoStreamReceiver::RtcpFeedbackBuffer::RequestKeyFrame() { - MutexLock lock(&mutex_); - request_key_frame_ = true; -} - -void RtpVideoStreamReceiver::RtcpFeedbackBuffer::SendNack( - const std::vector& sequence_numbers, - bool buffering_allowed) { - RTC_DCHECK(!sequence_numbers.empty()); - MutexLock lock(&mutex_); - nack_sequence_numbers_.insert(nack_sequence_numbers_.end(), - sequence_numbers.cbegin(), - sequence_numbers.cend()); - if (!buffering_allowed) { - // Note that while *buffering* is not allowed, *batching* is, meaning that - // previously buffered messages may be sent along with the current message. - SendRtcpFeedback(ConsumeRtcpFeedbackLocked()); - } -} - -void RtpVideoStreamReceiver::RtcpFeedbackBuffer::SendLossNotification( - uint16_t last_decoded_seq_num, - uint16_t last_received_seq_num, - bool decodability_flag, - bool buffering_allowed) { - RTC_DCHECK(buffering_allowed); - MutexLock lock(&mutex_); - RTC_DCHECK(!lntf_state_) - << "SendLossNotification() called twice in a row with no call to " - "SendBufferedRtcpFeedback() in between."; - lntf_state_ = absl::make_optional( - last_decoded_seq_num, last_received_seq_num, decodability_flag); -} - -void RtpVideoStreamReceiver::RtcpFeedbackBuffer::SendBufferedRtcpFeedback() { - SendRtcpFeedback(ConsumeRtcpFeedback()); -} - -RtpVideoStreamReceiver::RtcpFeedbackBuffer::ConsumedRtcpFeedback -RtpVideoStreamReceiver::RtcpFeedbackBuffer::ConsumeRtcpFeedback() { - MutexLock lock(&mutex_); - return ConsumeRtcpFeedbackLocked(); -} - -RtpVideoStreamReceiver::RtcpFeedbackBuffer::ConsumedRtcpFeedback -RtpVideoStreamReceiver::RtcpFeedbackBuffer::ConsumeRtcpFeedbackLocked() { - ConsumedRtcpFeedback feedback; - std::swap(feedback.request_key_frame, request_key_frame_); - std::swap(feedback.nack_sequence_numbers, nack_sequence_numbers_); - std::swap(feedback.lntf_state, lntf_state_); - return feedback; -} - -void RtpVideoStreamReceiver::RtcpFeedbackBuffer::SendRtcpFeedback( - ConsumedRtcpFeedback feedback) { - if (feedback.lntf_state) { - // If either a NACK or a key frame request is sent, we should buffer - // the LNTF and wait for them (NACK or key frame request) to trigger - // the compound feedback message. - // Otherwise, the LNTF should be sent out immediately. - const bool buffering_allowed = - feedback.request_key_frame || !feedback.nack_sequence_numbers.empty(); - - loss_notification_sender_->SendLossNotification( - feedback.lntf_state->last_decoded_seq_num, - feedback.lntf_state->last_received_seq_num, - feedback.lntf_state->decodability_flag, buffering_allowed); - } - - if (feedback.request_key_frame) { - key_frame_request_sender_->RequestKeyFrame(); - } else if (!feedback.nack_sequence_numbers.empty()) { - nack_sender_->SendNack(feedback.nack_sequence_numbers, true); - } -} - -// DEPRECATED -RtpVideoStreamReceiver::RtpVideoStreamReceiver( - Clock* clock, - Transport* transport, - RtcpRttStats* rtt_stats, - PacketRouter* packet_router, - const VideoReceiveStream::Config* config, - ReceiveStatistics* rtp_receive_statistics, - ReceiveStatisticsProxy* receive_stats_proxy, - ProcessThread* process_thread, - NackSender* nack_sender, - KeyFrameRequestSender* keyframe_request_sender, - OnCompleteFrameCallback* complete_frame_callback, - rtc::scoped_refptr frame_decryptor, - rtc::scoped_refptr frame_transformer) - : RtpVideoStreamReceiver(clock, - transport, - rtt_stats, - packet_router, - config, - rtp_receive_statistics, - receive_stats_proxy, - receive_stats_proxy, - process_thread, - nack_sender, - keyframe_request_sender, - complete_frame_callback, - frame_decryptor, - frame_transformer) {} - -RtpVideoStreamReceiver::RtpVideoStreamReceiver( - Clock* clock, - Transport* transport, - RtcpRttStats* rtt_stats, - PacketRouter* packet_router, - const VideoReceiveStream::Config* config, - ReceiveStatistics* rtp_receive_statistics, - RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer, - RtcpCnameCallback* rtcp_cname_callback, - ProcessThread* process_thread, - NackSender* nack_sender, - KeyFrameRequestSender* keyframe_request_sender, - OnCompleteFrameCallback* complete_frame_callback, - rtc::scoped_refptr frame_decryptor, - rtc::scoped_refptr frame_transformer) - : clock_(clock), - config_(*config), - packet_router_(packet_router), - process_thread_(process_thread), - ntp_estimator_(clock), - rtp_header_extensions_(config_.rtp.extensions), - forced_playout_delay_max_ms_("max_ms", absl::nullopt), - forced_playout_delay_min_ms_("min_ms", absl::nullopt), - rtp_receive_statistics_(rtp_receive_statistics), - ulpfec_receiver_(UlpfecReceiver::Create(config->rtp.remote_ssrc, - this, - config->rtp.extensions)), - receiving_(false), - last_packet_log_ms_(-1), - rtp_rtcp_(CreateRtpRtcpModule( - clock, - rtp_receive_statistics_, - transport, - rtt_stats, - rtcp_packet_type_counter_observer, - rtcp_cname_callback, - config_.rtp.rtcp_xr.receiver_reference_time_report, - config_.rtp.local_ssrc)), - complete_frame_callback_(complete_frame_callback), - keyframe_request_sender_(keyframe_request_sender), - // TODO(bugs.webrtc.org/10336): Let `rtcp_feedback_buffer_` communicate - // directly with `rtp_rtcp_`. - rtcp_feedback_buffer_(this, nack_sender, this), - packet_buffer_(kPacketBufferStartSize, PacketBufferMaxSize()), - reference_finder_(std::make_unique()), - has_received_frame_(false), - frames_decryptable_(false), - absolute_capture_time_interpolator_(clock) { - constexpr bool remb_candidate = true; - if (packet_router_) - packet_router_->AddReceiveRtpModule(rtp_rtcp_.get(), remb_candidate); - - RTC_DCHECK(config_.rtp.rtcp_mode != RtcpMode::kOff) - << "A stream should not be configured with RTCP disabled. This value is " - "reserved for internal usage."; - // TODO(pbos): What's an appropriate local_ssrc for receive-only streams? - RTC_DCHECK(config_.rtp.local_ssrc != 0); - RTC_DCHECK(config_.rtp.remote_ssrc != config_.rtp.local_ssrc); - - rtp_rtcp_->SetRTCPStatus(config_.rtp.rtcp_mode); - rtp_rtcp_->SetRemoteSSRC(config_.rtp.remote_ssrc); - - static const int kMaxPacketAgeToNack = 450; - const int max_reordering_threshold = (config_.rtp.nack.rtp_history_ms > 0) - ? kMaxPacketAgeToNack - : kDefaultMaxReorderingThreshold; - rtp_receive_statistics_->SetMaxReorderingThreshold(config_.rtp.remote_ssrc, - max_reordering_threshold); - // TODO(nisse): For historic reasons, we applied the above - // max_reordering_threshold also for RTX stats, which makes little sense since - // we don't NACK rtx packets. Consider deleting the below block, and rely on - // the default threshold. - if (config_.rtp.rtx_ssrc) { - rtp_receive_statistics_->SetMaxReorderingThreshold( - config_.rtp.rtx_ssrc, max_reordering_threshold); - } - ParseFieldTrial( - {&forced_playout_delay_max_ms_, &forced_playout_delay_min_ms_}, - field_trial::FindFullName("WebRTC-ForcePlayoutDelay")); - - process_thread_->RegisterModule(rtp_rtcp_.get(), RTC_FROM_HERE); - - if (config_.rtp.lntf.enabled) { - loss_notification_controller_ = - std::make_unique(&rtcp_feedback_buffer_, - &rtcp_feedback_buffer_); - } - - if (config_.rtp.nack.rtp_history_ms != 0) { - nack_module_ = std::make_unique( - clock_, &rtcp_feedback_buffer_, &rtcp_feedback_buffer_); - process_thread_->RegisterModule(nack_module_.get(), RTC_FROM_HERE); - } - - // Only construct the encrypted receiver if frame encryption is enabled. - if (config_.crypto_options.sframe.require_frame_encryption) { - buffered_frame_decryptor_ = - std::make_unique(this, this); - if (frame_decryptor != nullptr) { - buffered_frame_decryptor_->SetFrameDecryptor(std::move(frame_decryptor)); - } - } - - if (frame_transformer) { - frame_transformer_delegate_ = - rtc::make_ref_counted( - this, std::move(frame_transformer), rtc::Thread::Current(), - config_.rtp.remote_ssrc); - frame_transformer_delegate_->Init(); - } -} - -RtpVideoStreamReceiver::~RtpVideoStreamReceiver() { - RTC_DCHECK(secondary_sinks_.empty()); - - if (nack_module_) { - process_thread_->DeRegisterModule(nack_module_.get()); - } - - process_thread_->DeRegisterModule(rtp_rtcp_.get()); - - if (packet_router_) - packet_router_->RemoveReceiveRtpModule(rtp_rtcp_.get()); - UpdateHistograms(); - if (frame_transformer_delegate_) - frame_transformer_delegate_->Reset(); -} - -void RtpVideoStreamReceiver::AddReceiveCodec( - uint8_t payload_type, - VideoCodecType codec_type, - const std::map& codec_params, - bool raw_payload) { - if (codec_params.count(cricket::kH264FmtpSpsPpsIdrInKeyframe) || - field_trial::IsEnabled("WebRTC-SpsPpsIdrIsH264Keyframe")) { - MutexLock lock(&packet_buffer_lock_); - packet_buffer_.ForceSpsPpsIdrIsH264Keyframe(); - } - payload_type_map_.emplace( - payload_type, raw_payload ? std::make_unique() - : CreateVideoRtpDepacketizer(codec_type)); - pt_codec_params_.emplace(payload_type, codec_params); -} - -absl::optional RtpVideoStreamReceiver::GetSyncInfo() const { - Syncable::Info info; - if (rtp_rtcp_->RemoteNTP(&info.capture_time_ntp_secs, - &info.capture_time_ntp_frac, - /*rtcp_arrival_time_secs=*/nullptr, - /*rtcp_arrival_time_frac=*/nullptr, - &info.capture_time_source_clock) != 0) { - return absl::nullopt; - } - { - MutexLock lock(&sync_info_lock_); - if (!last_received_rtp_timestamp_ || !last_received_rtp_system_time_) { - return absl::nullopt; - } - info.latest_received_capture_timestamp = *last_received_rtp_timestamp_; - info.latest_receive_time_ms = last_received_rtp_system_time_->ms(); - } - - // Leaves info.current_delay_ms uninitialized. - return info; -} - -RtpVideoStreamReceiver::ParseGenericDependenciesResult -RtpVideoStreamReceiver::ParseGenericDependenciesExtension( - const RtpPacketReceived& rtp_packet, - RTPVideoHeader* video_header) { - if (rtp_packet.HasExtension()) { - webrtc::DependencyDescriptor dependency_descriptor; - if (!rtp_packet.GetExtension( - video_structure_.get(), &dependency_descriptor)) { - // Descriptor is there, but failed to parse. Either it is invalid, - // or too old packet (after relevant video_structure_ changed), - // or too new packet (before relevant video_structure_ arrived). - // Drop such packet to be on the safe side. - // TODO(bugs.webrtc.org/10342): Stash too new packet. - RTC_LOG(LS_WARNING) << "ssrc: " << rtp_packet.Ssrc() - << " Failed to parse dependency descriptor."; - return kDropPacket; - } - if (dependency_descriptor.attached_structure != nullptr && - !dependency_descriptor.first_packet_in_frame) { - RTC_LOG(LS_WARNING) << "ssrc: " << rtp_packet.Ssrc() - << "Invalid dependency descriptor: structure " - "attached to non first packet of a frame."; - return kDropPacket; - } - video_header->is_first_packet_in_frame = - dependency_descriptor.first_packet_in_frame; - video_header->is_last_packet_in_frame = - dependency_descriptor.last_packet_in_frame; - - int64_t frame_id = - frame_id_unwrapper_.Unwrap(dependency_descriptor.frame_number); - auto& generic_descriptor_info = video_header->generic.emplace(); - generic_descriptor_info.frame_id = frame_id; - generic_descriptor_info.spatial_index = - dependency_descriptor.frame_dependencies.spatial_id; - generic_descriptor_info.temporal_index = - dependency_descriptor.frame_dependencies.temporal_id; - for (int fdiff : dependency_descriptor.frame_dependencies.frame_diffs) { - generic_descriptor_info.dependencies.push_back(frame_id - fdiff); - } - generic_descriptor_info.decode_target_indications = - dependency_descriptor.frame_dependencies.decode_target_indications; - if (dependency_descriptor.resolution) { - video_header->width = dependency_descriptor.resolution->Width(); - video_header->height = dependency_descriptor.resolution->Height(); - } - - // FrameDependencyStructure is sent in dependency descriptor of the first - // packet of a key frame and required for parsed dependency descriptor in - // all the following packets until next key frame. - // Save it if there is a (potentially) new structure. - if (dependency_descriptor.attached_structure) { - RTC_DCHECK(dependency_descriptor.first_packet_in_frame); - if (video_structure_frame_id_ > frame_id) { - RTC_LOG(LS_WARNING) - << "Arrived key frame with id " << frame_id << " and structure id " - << dependency_descriptor.attached_structure->structure_id - << " is older than the latest received key frame with id " - << *video_structure_frame_id_ << " and structure id " - << video_structure_->structure_id; - return kDropPacket; - } - video_structure_ = std::move(dependency_descriptor.attached_structure); - video_structure_frame_id_ = frame_id; - video_header->frame_type = VideoFrameType::kVideoFrameKey; - } else { - video_header->frame_type = VideoFrameType::kVideoFrameDelta; - } - return kHasGenericDescriptor; - } - - RtpGenericFrameDescriptor generic_frame_descriptor; - if (!rtp_packet.GetExtension( - &generic_frame_descriptor)) { - return kNoGenericDescriptor; - } - - video_header->is_first_packet_in_frame = - generic_frame_descriptor.FirstPacketInSubFrame(); - video_header->is_last_packet_in_frame = - generic_frame_descriptor.LastPacketInSubFrame(); - - if (generic_frame_descriptor.FirstPacketInSubFrame()) { - video_header->frame_type = - generic_frame_descriptor.FrameDependenciesDiffs().empty() - ? VideoFrameType::kVideoFrameKey - : VideoFrameType::kVideoFrameDelta; - - auto& generic_descriptor_info = video_header->generic.emplace(); - int64_t frame_id = - frame_id_unwrapper_.Unwrap(generic_frame_descriptor.FrameId()); - generic_descriptor_info.frame_id = frame_id; - generic_descriptor_info.spatial_index = - generic_frame_descriptor.SpatialLayer(); - generic_descriptor_info.temporal_index = - generic_frame_descriptor.TemporalLayer(); - for (uint16_t fdiff : generic_frame_descriptor.FrameDependenciesDiffs()) { - generic_descriptor_info.dependencies.push_back(frame_id - fdiff); - } - } - video_header->width = generic_frame_descriptor.Width(); - video_header->height = generic_frame_descriptor.Height(); - return kHasGenericDescriptor; -} - -void RtpVideoStreamReceiver::OnReceivedPayloadData( - rtc::CopyOnWriteBuffer codec_payload, - const RtpPacketReceived& rtp_packet, - const RTPVideoHeader& video) { - RTC_DCHECK_RUN_ON(&worker_task_checker_); - - auto packet = - std::make_unique(rtp_packet, video); - - RTPVideoHeader& video_header = packet->video_header; - video_header.rotation = kVideoRotation_0; - video_header.content_type = VideoContentType::UNSPECIFIED; - video_header.video_timing.flags = VideoSendTiming::kInvalid; - video_header.is_last_packet_in_frame |= rtp_packet.Marker(); - - if (const auto* vp9_header = - absl::get_if(&video_header.video_type_header)) { - video_header.is_last_packet_in_frame |= vp9_header->end_of_frame; - video_header.is_first_packet_in_frame |= vp9_header->beginning_of_frame; - } - - rtp_packet.GetExtension(&video_header.rotation); - rtp_packet.GetExtension( - &video_header.content_type); - rtp_packet.GetExtension(&video_header.video_timing); - if (forced_playout_delay_max_ms_ && forced_playout_delay_min_ms_) { - video_header.playout_delay.max_ms = *forced_playout_delay_max_ms_; - video_header.playout_delay.min_ms = *forced_playout_delay_min_ms_; - } else { - rtp_packet.GetExtension(&video_header.playout_delay); - } - - ParseGenericDependenciesResult generic_descriptor_state = - ParseGenericDependenciesExtension(rtp_packet, &video_header); - - if (!rtp_packet.recovered()) { - UpdatePacketReceiveTimestamps( - rtp_packet, video_header.frame_type == VideoFrameType::kVideoFrameKey); - } - - if (generic_descriptor_state == kDropPacket) - return; - - // Color space should only be transmitted in the last packet of a frame, - // therefore, neglect it otherwise so that last_color_space_ is not reset by - // mistake. - if (video_header.is_last_packet_in_frame) { - video_header.color_space = rtp_packet.GetExtension(); - if (video_header.color_space || - video_header.frame_type == VideoFrameType::kVideoFrameKey) { - // Store color space since it's only transmitted when changed or for key - // frames. Color space will be cleared if a key frame is transmitted - // without color space information. - last_color_space_ = video_header.color_space; - } else if (last_color_space_) { - video_header.color_space = last_color_space_; - } - } - video_header.video_frame_tracking_id = - rtp_packet.GetExtension(); - - if (loss_notification_controller_) { - if (rtp_packet.recovered()) { - // TODO(bugs.webrtc.org/10336): Implement support for reordering. - RTC_LOG(LS_INFO) - << "LossNotificationController does not support reordering."; - } else if (generic_descriptor_state == kNoGenericDescriptor) { - RTC_LOG(LS_WARNING) << "LossNotificationController requires generic " - "frame descriptor, but it is missing."; - } else { - if (video_header.is_first_packet_in_frame) { - RTC_DCHECK(video_header.generic); - LossNotificationController::FrameDetails frame; - frame.is_keyframe = - video_header.frame_type == VideoFrameType::kVideoFrameKey; - frame.frame_id = video_header.generic->frame_id; - frame.frame_dependencies = video_header.generic->dependencies; - loss_notification_controller_->OnReceivedPacket( - rtp_packet.SequenceNumber(), &frame); - } else { - loss_notification_controller_->OnReceivedPacket( - rtp_packet.SequenceNumber(), nullptr); - } - } - } - - if (nack_module_) { - const bool is_keyframe = - video_header.is_first_packet_in_frame && - video_header.frame_type == VideoFrameType::kVideoFrameKey; - - packet->times_nacked = nack_module_->OnReceivedPacket( - rtp_packet.SequenceNumber(), is_keyframe, rtp_packet.recovered()); - } else { - packet->times_nacked = -1; - } - - if (codec_payload.size() == 0) { - NotifyReceiverOfEmptyPacket(packet->seq_num); - rtcp_feedback_buffer_.SendBufferedRtcpFeedback(); - return; - } - - if (packet->codec() == kVideoCodecH264) { - // Only when we start to receive packets will we know what payload type - // that will be used. When we know the payload type insert the correct - // sps/pps into the tracker. - if (packet->payload_type != last_payload_type_) { - last_payload_type_ = packet->payload_type; - InsertSpsPpsIntoTracker(packet->payload_type); - } - - video_coding::H264SpsPpsTracker::FixedBitstream fixed = - tracker_.CopyAndFixBitstream( - rtc::MakeArrayView(codec_payload.cdata(), codec_payload.size()), - &packet->video_header); - - switch (fixed.action) { - case video_coding::H264SpsPpsTracker::kRequestKeyframe: - rtcp_feedback_buffer_.RequestKeyFrame(); - rtcp_feedback_buffer_.SendBufferedRtcpFeedback(); - ABSL_FALLTHROUGH_INTENDED; - case video_coding::H264SpsPpsTracker::kDrop: - return; - case video_coding::H264SpsPpsTracker::kInsert: - packet->video_payload = std::move(fixed.bitstream); - break; - } - } -#ifndef DISABLE_H265 - else if (packet->codec() == kVideoCodecH265) { - // Only when we start to receive packets will we know what payload type - // that will be used. When we know the payload type insert the correct - // sps/pps into the tracker. - if (packet->payload_type != last_payload_type_) { - last_payload_type_ = packet->payload_type; - InsertSpsPpsIntoTracker(packet->payload_type); - } - - video_coding::H265VpsSpsPpsTracker::FixedBitstream fixed = - h265_tracker_.CopyAndFixBitstream( - rtc::MakeArrayView(codec_payload.cdata(), codec_payload.size()), - &packet->video_header); - - switch (fixed.action) { - case video_coding::H265VpsSpsPpsTracker::kRequestKeyframe: - rtcp_feedback_buffer_.RequestKeyFrame(); - rtcp_feedback_buffer_.SendBufferedRtcpFeedback(); - ABSL_FALLTHROUGH_INTENDED; - case video_coding::H265VpsSpsPpsTracker::kDrop: - return; - case video_coding::H265VpsSpsPpsTracker::kInsert: - packet->video_payload = std::move(fixed.bitstream); - break; - } - } -#endif - else { - packet->video_payload = std::move(codec_payload); - } - - rtcp_feedback_buffer_.SendBufferedRtcpFeedback(); - frame_counter_.Add(packet->timestamp); - video_coding::PacketBuffer::InsertResult insert_result; - { - MutexLock lock(&packet_buffer_lock_); - int64_t unwrapped_rtp_seq_num = - rtp_seq_num_unwrapper_.Unwrap(rtp_packet.SequenceNumber()); - auto& packet_info = - packet_infos_ - .emplace( - unwrapped_rtp_seq_num, - RtpPacketInfo( - rtp_packet.Ssrc(), rtp_packet.Csrcs(), - rtp_packet.Timestamp(), - /*audio_level=*/absl::nullopt, - rtp_packet.GetExtension(), - /*receive_time_ms=*/clock_->TimeInMilliseconds())) - .first->second; - - // Try to extrapolate absolute capture time if it is missing. - absl::optional absolute_capture_time = - absolute_capture_time_interpolator_.OnReceivePacket( - AbsoluteCaptureTimeInterpolator::GetSource(packet_info.ssrc(), - packet_info.csrcs()), - packet_info.rtp_timestamp(), - // Assume frequency is the same one for all video frames. - kVideoPayloadTypeFrequency, packet_info.absolute_capture_time()); - packet_info.set_absolute_capture_time(absolute_capture_time); - - if (absolute_capture_time.has_value()) { - packet_info.set_local_capture_clock_offset( - capture_clock_offset_updater_.AdjustEstimatedCaptureClockOffset( - absolute_capture_time->estimated_capture_clock_offset)); - } - - insert_result = packet_buffer_.InsertPacket(std::move(packet)); - } - OnInsertedPacket(std::move(insert_result)); -} - -void RtpVideoStreamReceiver::OnRecoveredPacket(const uint8_t* rtp_packet, - size_t rtp_packet_length) { - RtpPacketReceived packet; - if (!packet.Parse(rtp_packet, rtp_packet_length)) - return; - if (packet.PayloadType() == config_.rtp.red_payload_type) { - RTC_LOG(LS_WARNING) << "Discarding recovered packet with RED encapsulation"; - return; - } - - packet.IdentifyExtensions(rtp_header_extensions_); - packet.set_payload_type_frequency(kVideoPayloadTypeFrequency); - // TODO(nisse): UlpfecReceiverImpl::ProcessReceivedFec passes both - // original (decapsulated) media packets and recovered packets to - // this callback. We need a way to distinguish, for setting - // packet.recovered() correctly. Ideally, move RED decapsulation out - // of the Ulpfec implementation. - - ReceivePacket(packet); -} - -// This method handles both regular RTP packets and packets recovered -// via FlexFEC. -void RtpVideoStreamReceiver::OnRtpPacket(const RtpPacketReceived& packet) { - RTC_DCHECK_RUN_ON(&worker_task_checker_); - - if (!receiving_) { - return; - } - - ReceivePacket(packet); - - // Update receive statistics after ReceivePacket. - // Receive statistics will be reset if the payload type changes (make sure - // that the first packet is included in the stats). - if (!packet.recovered()) { - rtp_receive_statistics_->OnRtpPacket(packet); - } - - for (RtpPacketSinkInterface* secondary_sink : secondary_sinks_) { - secondary_sink->OnRtpPacket(packet); - } -} - -void RtpVideoStreamReceiver::RequestKeyFrame() { - // TODO(bugs.webrtc.org/10336): Allow the sender to ignore key frame requests - // issued by anything other than the LossNotificationController if it (the - // sender) is relying on LNTF alone. - if (keyframe_request_sender_) { - keyframe_request_sender_->RequestKeyFrame(); - } else { - rtp_rtcp_->SendPictureLossIndication(); - } -} - -void RtpVideoStreamReceiver::SendLossNotification( - uint16_t last_decoded_seq_num, - uint16_t last_received_seq_num, - bool decodability_flag, - bool buffering_allowed) { - RTC_DCHECK(config_.rtp.lntf.enabled); - rtp_rtcp_->SendLossNotification(last_decoded_seq_num, last_received_seq_num, - decodability_flag, buffering_allowed); -} - -bool RtpVideoStreamReceiver::IsUlpfecEnabled() const { - return config_.rtp.ulpfec_payload_type != -1; -} - -bool RtpVideoStreamReceiver::IsRetransmissionsEnabled() const { - return config_.rtp.nack.rtp_history_ms > 0; -} - -void RtpVideoStreamReceiver::RequestPacketRetransmit( - const std::vector& sequence_numbers) { - rtp_rtcp_->SendNack(sequence_numbers); -} - -bool RtpVideoStreamReceiver::IsDecryptable() const { - return frames_decryptable_.load(); -} - -void RtpVideoStreamReceiver::OnInsertedPacket( - video_coding::PacketBuffer::InsertResult result) { - std::vector> assembled_frames; - { - MutexLock lock(&packet_buffer_lock_); - video_coding::PacketBuffer::Packet* first_packet = nullptr; - int max_nack_count; - int64_t min_recv_time; - int64_t max_recv_time; - std::vector> payloads; - RtpPacketInfos::vector_type packet_infos; - - bool frame_boundary = true; - for (auto& packet : result.packets) { - // PacketBuffer promisses frame boundaries are correctly set on each - // packet. Document that assumption with the DCHECKs. - RTC_DCHECK_EQ(frame_boundary, packet->is_first_packet_in_frame()); - int64_t unwrapped_rtp_seq_num = - rtp_seq_num_unwrapper_.Unwrap(packet->seq_num); - RTC_DCHECK(packet_infos_.count(unwrapped_rtp_seq_num) > 0); - RtpPacketInfo& packet_info = packet_infos_[unwrapped_rtp_seq_num]; - if (packet->is_first_packet_in_frame()) { - first_packet = packet.get(); - max_nack_count = packet->times_nacked; - min_recv_time = packet_info.receive_time().ms(); - max_recv_time = packet_info.receive_time().ms(); - } else { - max_nack_count = std::max(max_nack_count, packet->times_nacked); - min_recv_time = - std::min(min_recv_time, packet_info.receive_time().ms()); - max_recv_time = - std::max(max_recv_time, packet_info.receive_time().ms()); - } - payloads.emplace_back(packet->video_payload); - packet_infos.push_back(packet_info); - - frame_boundary = packet->is_last_packet_in_frame(); - if (packet->is_last_packet_in_frame()) { - auto depacketizer_it = - payload_type_map_.find(first_packet->payload_type); - RTC_CHECK(depacketizer_it != payload_type_map_.end()); - - rtc::scoped_refptr bitstream = - depacketizer_it->second->AssembleFrame(payloads); - if (!bitstream) { - // Failed to assemble a frame. Discard and continue. - continue; - } - - const video_coding::PacketBuffer::Packet& last_packet = *packet; - assembled_frames.push_back(std::make_unique( - first_packet->seq_num, // - last_packet.seq_num, // - last_packet.marker_bit, // - max_nack_count, // - min_recv_time, // - max_recv_time, // - first_packet->timestamp, // - ntp_estimator_.Estimate(first_packet->timestamp), // - last_packet.video_header.video_timing, // - first_packet->payload_type, // - first_packet->codec(), // - last_packet.video_header.rotation, // - last_packet.video_header.content_type, // - first_packet->video_header, // - last_packet.video_header.color_space, // - RtpPacketInfos(std::move(packet_infos)), // - std::move(bitstream))); - payloads.clear(); - packet_infos.clear(); - } - } - RTC_DCHECK(frame_boundary); - - if (result.buffer_cleared) { - packet_infos_.clear(); - } - } // packet_buffer_lock_ - - if (result.buffer_cleared) { - { - MutexLock lock(&sync_info_lock_); - last_received_rtp_system_time_.reset(); - last_received_keyframe_rtp_system_time_.reset(); - last_received_keyframe_rtp_timestamp_.reset(); - } - RequestKeyFrame(); - } - - for (auto& frame : assembled_frames) { - OnAssembledFrame(std::move(frame)); - } -} - -void RtpVideoStreamReceiver::OnAssembledFrame( - std::unique_ptr frame) { - RTC_DCHECK_RUN_ON(&network_tc_); - RTC_DCHECK(frame); - - const absl::optional& descriptor = - frame->GetRtpVideoHeader().generic; - - if (loss_notification_controller_ && descriptor) { - loss_notification_controller_->OnAssembledFrame( - frame->first_seq_num(), descriptor->frame_id, - absl::c_linear_search(descriptor->decode_target_indications, - DecodeTargetIndication::kDiscardable), - descriptor->dependencies); - } - - // If frames arrive before a key frame, they would not be decodable. - // In that case, request a key frame ASAP. - if (!has_received_frame_) { - if (frame->FrameType() != VideoFrameType::kVideoFrameKey) { - // `loss_notification_controller_`, if present, would have already - // requested a key frame when the first packet for the non-key frame - // had arrived, so no need to replicate the request. - if (!loss_notification_controller_) { - RequestKeyFrame(); - } - } - has_received_frame_ = true; - } - - MutexLock lock(&reference_finder_lock_); - // Reset `reference_finder_` if `frame` is new and the codec have changed. - if (current_codec_) { - bool frame_is_newer = - AheadOf(frame->Timestamp(), last_assembled_frame_rtp_timestamp_); - - if (frame->codec_type() != current_codec_) { - if (frame_is_newer) { - // When we reset the `reference_finder_` we don't want new picture ids - // to overlap with old picture ids. To ensure that doesn't happen we - // start from the `last_completed_picture_id_` and add an offset in - // case of reordering. - reference_finder_ = std::make_unique( - last_completed_picture_id_ + std::numeric_limits::max()); - current_codec_ = frame->codec_type(); - } else { - // Old frame from before the codec switch, discard it. - return; - } - } - - if (frame_is_newer) { - last_assembled_frame_rtp_timestamp_ = frame->Timestamp(); - } - } else { - current_codec_ = frame->codec_type(); - last_assembled_frame_rtp_timestamp_ = frame->Timestamp(); - } - - if (buffered_frame_decryptor_ != nullptr) { - buffered_frame_decryptor_->ManageEncryptedFrame(std::move(frame)); - } else if (frame_transformer_delegate_) { - frame_transformer_delegate_->TransformFrame(std::move(frame)); - } else { - OnCompleteFrames(reference_finder_->ManageFrame(std::move(frame))); - } -} - -void RtpVideoStreamReceiver::OnCompleteFrames( - RtpFrameReferenceFinder::ReturnVector frames) { - { - MutexLock lock(&last_seq_num_mutex_); - for (const auto& frame : frames) { - RtpFrameObject* rtp_frame = static_cast(frame.get()); - last_seq_num_for_pic_id_[rtp_frame->Id()] = rtp_frame->last_seq_num(); - } - } - for (auto& frame : frames) { - last_completed_picture_id_ = - std::max(last_completed_picture_id_, frame->Id()); - complete_frame_callback_->OnCompleteFrame(std::move(frame)); - } -} - -void RtpVideoStreamReceiver::OnDecryptedFrame( - std::unique_ptr frame) { - MutexLock lock(&reference_finder_lock_); - OnCompleteFrames(reference_finder_->ManageFrame(std::move(frame))); -} - -void RtpVideoStreamReceiver::OnDecryptionStatusChange( - FrameDecryptorInterface::Status status) { - frames_decryptable_.store( - (status == FrameDecryptorInterface::Status::kOk) || - (status == FrameDecryptorInterface::Status::kRecoverable)); -} - -void RtpVideoStreamReceiver::SetFrameDecryptor( - rtc::scoped_refptr frame_decryptor) { - RTC_DCHECK_RUN_ON(&network_tc_); - if (buffered_frame_decryptor_ == nullptr) { - buffered_frame_decryptor_ = - std::make_unique(this, this); - } - buffered_frame_decryptor_->SetFrameDecryptor(std::move(frame_decryptor)); -} - -void RtpVideoStreamReceiver::SetDepacketizerToDecoderFrameTransformer( - rtc::scoped_refptr frame_transformer) { - RTC_DCHECK_RUN_ON(&network_tc_); - frame_transformer_delegate_ = - rtc::make_ref_counted( - this, std::move(frame_transformer), rtc::Thread::Current(), - config_.rtp.remote_ssrc); - frame_transformer_delegate_->Init(); -} - -void RtpVideoStreamReceiver::UpdateRtt(int64_t max_rtt_ms) { - if (nack_module_) - nack_module_->UpdateRtt(max_rtt_ms); -} - -absl::optional RtpVideoStreamReceiver::LastReceivedPacketMs() const { - MutexLock lock(&sync_info_lock_); - if (last_received_rtp_system_time_) { - return absl::optional(last_received_rtp_system_time_->ms()); - } - return absl::nullopt; -} - -absl::optional RtpVideoStreamReceiver::LastReceivedKeyframePacketMs() - const { - MutexLock lock(&sync_info_lock_); - if (last_received_keyframe_rtp_system_time_) { - return absl::optional( - last_received_keyframe_rtp_system_time_->ms()); - } - return absl::nullopt; -} - -void RtpVideoStreamReceiver::AddSecondarySink(RtpPacketSinkInterface* sink) { - RTC_DCHECK_RUN_ON(&worker_task_checker_); - RTC_DCHECK(!absl::c_linear_search(secondary_sinks_, sink)); - secondary_sinks_.push_back(sink); -} - -void RtpVideoStreamReceiver::RemoveSecondarySink( - const RtpPacketSinkInterface* sink) { - RTC_DCHECK_RUN_ON(&worker_task_checker_); - auto it = absl::c_find(secondary_sinks_, sink); - if (it == secondary_sinks_.end()) { - // We might be rolling-back a call whose setup failed mid-way. In such a - // case, it's simpler to remove "everything" rather than remember what - // has already been added. - RTC_LOG(LS_WARNING) << "Removal of unknown sink."; - return; - } - secondary_sinks_.erase(it); -} - -void RtpVideoStreamReceiver::ManageFrame( - std::unique_ptr frame) { - MutexLock lock(&reference_finder_lock_); - OnCompleteFrames(reference_finder_->ManageFrame(std::move(frame))); -} - -void RtpVideoStreamReceiver::ReceivePacket(const RtpPacketReceived& packet) { - if (packet.payload_size() == 0) { - // Padding or keep-alive packet. - // TODO(nisse): Could drop empty packets earlier, but need to figure out how - // they should be counted in stats. - NotifyReceiverOfEmptyPacket(packet.SequenceNumber()); - return; - } - if (packet.PayloadType() == config_.rtp.red_payload_type) { - ParseAndHandleEncapsulatingHeader(packet); - return; - } - - const auto type_it = payload_type_map_.find(packet.PayloadType()); - if (type_it == payload_type_map_.end()) { - return; - } - absl::optional parsed_payload = - type_it->second->Parse(packet.PayloadBuffer()); - if (parsed_payload == absl::nullopt) { - RTC_LOG(LS_WARNING) << "Failed parsing payload."; - return; - } - - OnReceivedPayloadData(std::move(parsed_payload->video_payload), packet, - parsed_payload->video_header); -} - -void RtpVideoStreamReceiver::ParseAndHandleEncapsulatingHeader( - const RtpPacketReceived& packet) { - RTC_DCHECK_RUN_ON(&worker_task_checker_); - if (packet.PayloadType() == config_.rtp.red_payload_type && - packet.payload_size() > 0) { - if (packet.payload()[0] == config_.rtp.ulpfec_payload_type) { - // Notify video_receiver about received FEC packets to avoid NACKing these - // packets. - NotifyReceiverOfEmptyPacket(packet.SequenceNumber()); - } - if (!ulpfec_receiver_->AddReceivedRedPacket( - packet, config_.rtp.ulpfec_payload_type)) { - return; - } - ulpfec_receiver_->ProcessReceivedFec(); - } -} - -// In the case of a video stream without picture ids and no rtx the -// RtpFrameReferenceFinder will need to know about padding to -// correctly calculate frame references. -void RtpVideoStreamReceiver::NotifyReceiverOfEmptyPacket(uint16_t seq_num) { - { - MutexLock lock(&reference_finder_lock_); - OnCompleteFrames(reference_finder_->PaddingReceived(seq_num)); - } - - video_coding::PacketBuffer::InsertResult insert_result; - { - MutexLock lock(&packet_buffer_lock_); - insert_result = packet_buffer_.InsertPadding(seq_num); - } - OnInsertedPacket(std::move(insert_result)); - - if (nack_module_) { - nack_module_->OnReceivedPacket(seq_num, /* is_keyframe = */ false, - /* is _recovered = */ false); - } - if (loss_notification_controller_) { - // TODO(bugs.webrtc.org/10336): Handle empty packets. - RTC_LOG(LS_WARNING) - << "LossNotificationController does not expect empty packets."; - } -} - -bool RtpVideoStreamReceiver::DeliverRtcp(const uint8_t* rtcp_packet, - size_t rtcp_packet_length) { - RTC_DCHECK_RUN_ON(&worker_task_checker_); - - if (!receiving_) { - return false; - } - - rtp_rtcp_->IncomingRtcpPacket(rtcp_packet, rtcp_packet_length); - - int64_t rtt = 0; - rtp_rtcp_->RTT(config_.rtp.remote_ssrc, &rtt, nullptr, nullptr, nullptr); - if (rtt == 0) { - // Waiting for valid rtt. - return true; - } - uint32_t ntp_secs = 0; - uint32_t ntp_frac = 0; - uint32_t rtp_timestamp = 0; - uint32_t recieved_ntp_secs = 0; - uint32_t recieved_ntp_frac = 0; - if (rtp_rtcp_->RemoteNTP(&ntp_secs, &ntp_frac, &recieved_ntp_secs, - &recieved_ntp_frac, &rtp_timestamp) != 0) { - // Waiting for RTCP. - return true; - } - NtpTime recieved_ntp(recieved_ntp_secs, recieved_ntp_frac); - int64_t time_since_recieved = - clock_->CurrentNtpInMilliseconds() - recieved_ntp.ToMs(); - // Don't use old SRs to estimate time. - if (time_since_recieved <= 1) { - ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp); - absl::optional remote_to_local_clock_offset_ms = - ntp_estimator_.EstimateRemoteToLocalClockOffsetMs(); - if (remote_to_local_clock_offset_ms.has_value()) { - capture_clock_offset_updater_.SetRemoteToLocalClockOffset( - Int64MsToQ32x32(*remote_to_local_clock_offset_ms)); - } - } - - return true; -} - -void RtpVideoStreamReceiver::FrameContinuous(int64_t picture_id) { - if (!nack_module_) - return; - - int seq_num = -1; - { - MutexLock lock(&last_seq_num_mutex_); - auto seq_num_it = last_seq_num_for_pic_id_.find(picture_id); - if (seq_num_it != last_seq_num_for_pic_id_.end()) - seq_num = seq_num_it->second; - } - if (seq_num != -1) - nack_module_->ClearUpTo(seq_num); -} - -void RtpVideoStreamReceiver::FrameDecoded(int64_t picture_id) { - int seq_num = -1; - { - MutexLock lock(&last_seq_num_mutex_); - auto seq_num_it = last_seq_num_for_pic_id_.find(picture_id); - if (seq_num_it != last_seq_num_for_pic_id_.end()) { - seq_num = seq_num_it->second; - last_seq_num_for_pic_id_.erase(last_seq_num_for_pic_id_.begin(), - ++seq_num_it); - } - } - if (seq_num != -1) { - { - MutexLock lock(&packet_buffer_lock_); - packet_buffer_.ClearTo(seq_num); - int64_t unwrapped_rtp_seq_num = rtp_seq_num_unwrapper_.Unwrap(seq_num); - packet_infos_.erase(packet_infos_.begin(), - packet_infos_.upper_bound(unwrapped_rtp_seq_num)); - } - MutexLock lock(&reference_finder_lock_); - reference_finder_->ClearTo(seq_num); - } -} - -void RtpVideoStreamReceiver::SignalNetworkState(NetworkState state) { - rtp_rtcp_->SetRTCPStatus(state == kNetworkUp ? config_.rtp.rtcp_mode - : RtcpMode::kOff); -} - -void RtpVideoStreamReceiver::StartReceive() { - RTC_DCHECK_RUN_ON(&worker_task_checker_); - receiving_ = true; -} - -void RtpVideoStreamReceiver::StopReceive() { - RTC_DCHECK_RUN_ON(&worker_task_checker_); - receiving_ = false; -} - -void RtpVideoStreamReceiver::UpdateHistograms() { - FecPacketCounter counter = ulpfec_receiver_->GetPacketCounter(); - if (counter.first_packet_time_ms == -1) - return; - - int64_t elapsed_sec = - (clock_->TimeInMilliseconds() - counter.first_packet_time_ms) / 1000; - if (elapsed_sec < metrics::kMinRunTimeInSeconds) - return; - - if (counter.num_packets > 0) { - RTC_HISTOGRAM_PERCENTAGE( - "WebRTC.Video.ReceivedFecPacketsInPercent", - static_cast(counter.num_fec_packets * 100 / counter.num_packets)); - } - if (counter.num_fec_packets > 0) { - RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.RecoveredMediaPacketsInPercentOfFec", - static_cast(counter.num_recovered_packets * - 100 / counter.num_fec_packets)); - } - if (config_.rtp.ulpfec_payload_type != -1) { - RTC_HISTOGRAM_COUNTS_10000( - "WebRTC.Video.FecBitrateReceivedInKbps", - static_cast(counter.num_bytes * 8 / elapsed_sec / 1000)); - } -} - -void RtpVideoStreamReceiver::InsertSpsPpsIntoTracker(uint8_t payload_type) { - auto codec_params_it = pt_codec_params_.find(payload_type); - if (codec_params_it == pt_codec_params_.end()) - return; - - RTC_LOG(LS_INFO) << "Found out of band supplied codec parameters for" - " payload type: " - << static_cast(payload_type); - - H264SpropParameterSets sprop_decoder; - auto sprop_base64_it = - codec_params_it->second.find(cricket::kH264FmtpSpropParameterSets); - - if (sprop_base64_it == codec_params_it->second.end()) - return; - - if (!sprop_decoder.DecodeSprop(sprop_base64_it->second.c_str())) - return; - - tracker_.InsertSpsPpsNalus(sprop_decoder.sps_nalu(), - sprop_decoder.pps_nalu()); -} - -void RtpVideoStreamReceiver::UpdatePacketReceiveTimestamps( - const RtpPacketReceived& packet, - bool is_keyframe) { - Timestamp now = clock_->CurrentTime(); - { - MutexLock lock(&sync_info_lock_); - if (is_keyframe || - last_received_keyframe_rtp_timestamp_ == packet.Timestamp()) { - last_received_keyframe_rtp_timestamp_ = packet.Timestamp(); - last_received_keyframe_rtp_system_time_ = now; - } - last_received_rtp_system_time_ = now; - last_received_rtp_timestamp_ = packet.Timestamp(); - } - - // Periodically log the RTP header of incoming packets. - if (now.ms() - last_packet_log_ms_ > kPacketLogIntervalMs) { - rtc::StringBuilder ss; - ss << "Packet received on SSRC: " << packet.Ssrc() - << " with payload type: " << static_cast(packet.PayloadType()) - << ", timestamp: " << packet.Timestamp() - << ", sequence number: " << packet.SequenceNumber() - << ", arrival time: " << ToString(packet.arrival_time()); - int32_t time_offset; - if (packet.GetExtension(&time_offset)) { - ss << ", toffset: " << time_offset; - } - uint32_t send_time; - if (packet.GetExtension(&send_time)) { - ss << ", abs send time: " << send_time; - } - RTC_LOG(LS_INFO) << ss.str(); - last_packet_log_ms_ = now.ms(); - } -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver.h b/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver.h index d78b76a856..e69de29bb2 100644 --- a/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver.h +++ b/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver.h @@ -1,449 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef VIDEO_RTP_VIDEO_STREAM_RECEIVER_H_ -#define VIDEO_RTP_VIDEO_STREAM_RECEIVER_H_ - -#include -#include -#include -#include -#include -#include - -#include "absl/base/attributes.h" -#include "absl/types/optional.h" -#include "api/array_view.h" -#include "api/crypto/frame_decryptor_interface.h" -#include "api/sequence_checker.h" -#include "api/units/timestamp.h" -#include "api/video/color_space.h" -#include "api/video/video_codec_type.h" -#include "api/video_codecs/video_codec.h" -#include "call/rtp_packet_sink_interface.h" -#include "call/syncable.h" -#include "call/video_receive_stream.h" -#include "modules/rtp_rtcp/include/receive_statistics.h" -#include "modules/rtp_rtcp/include/remote_ntp_time_estimator.h" -#include "modules/rtp_rtcp/include/rtp_header_extension_map.h" -#include "modules/rtp_rtcp/include/rtp_rtcp.h" -#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "modules/rtp_rtcp/source/absolute_capture_time_interpolator.h" -#include "modules/rtp_rtcp/source/capture_clock_offset_updater.h" -#include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h" -#include "modules/rtp_rtcp/source/rtp_packet_received.h" -#include "modules/rtp_rtcp/source/rtp_video_header.h" -#include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" -#include "modules/video_coding/h264_sps_pps_tracker.h" -#include "modules/video_coding/loss_notification_controller.h" -#ifndef DISABLE_H265 -#include "modules/video_coding/h265_vps_sps_pps_tracker.h" -#endif -#include "modules/video_coding/packet_buffer.h" -#include "modules/video_coding/rtp_frame_reference_finder.h" -#include "modules/video_coding/unique_timestamp_counter.h" -#include "rtc_base/constructor_magic.h" -#include "rtc_base/experiments/field_trial_parser.h" -#include "rtc_base/numerics/sequence_number_util.h" -#include "rtc_base/synchronization/mutex.h" -#include "rtc_base/system/no_unique_address.h" -#include "rtc_base/thread_annotations.h" -#include "video/buffered_frame_decryptor.h" -#include "video/rtp_video_stream_receiver_frame_transformer_delegate.h" - -namespace webrtc { - -class DEPRECATED_NackModule; -class PacketRouter; -class ProcessThread; -class ReceiveStatistics; -class ReceiveStatisticsProxy; -class RtcpRttStats; -class RtpPacketReceived; -class Transport; -class UlpfecReceiver; - -class RtpVideoStreamReceiver : public LossNotificationSender, - public RecoveredPacketReceiver, - public RtpPacketSinkInterface, - public KeyFrameRequestSender, - public OnDecryptedFrameCallback, - public OnDecryptionStatusChangeCallback, - public RtpVideoFrameReceiver { - public: - // A complete frame is a frame which has received all its packets and all its - // references are known. - class OnCompleteFrameCallback { - public: - virtual ~OnCompleteFrameCallback() {} - virtual void OnCompleteFrame(std::unique_ptr frame) = 0; - }; - - // DEPRECATED due to dependency on ReceiveStatisticsProxy. - RtpVideoStreamReceiver( - Clock* clock, - Transport* transport, - RtcpRttStats* rtt_stats, - // The packet router is optional; if provided, the RtpRtcp module for this - // stream is registered as a candidate for sending REMB and transport - // feedback. - PacketRouter* packet_router, - const VideoReceiveStream::Config* config, - ReceiveStatistics* rtp_receive_statistics, - ReceiveStatisticsProxy* receive_stats_proxy, - ProcessThread* process_thread, - NackSender* nack_sender, - // The KeyFrameRequestSender is optional; if not provided, key frame - // requests are sent via the internal RtpRtcp module. - KeyFrameRequestSender* keyframe_request_sender, - OnCompleteFrameCallback* complete_frame_callback, - rtc::scoped_refptr frame_decryptor, - rtc::scoped_refptr frame_transformer); - - RtpVideoStreamReceiver( - Clock* clock, - Transport* transport, - RtcpRttStats* rtt_stats, - // The packet router is optional; if provided, the RtpRtcp module for this - // stream is registered as a candidate for sending REMB and transport - // feedback. - PacketRouter* packet_router, - const VideoReceiveStream::Config* config, - ReceiveStatistics* rtp_receive_statistics, - RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer, - RtcpCnameCallback* rtcp_cname_callback, - ProcessThread* process_thread, - NackSender* nack_sender, - // The KeyFrameRequestSender is optional; if not provided, key frame - // requests are sent via the internal RtpRtcp module. - KeyFrameRequestSender* keyframe_request_sender, - OnCompleteFrameCallback* complete_frame_callback, - rtc::scoped_refptr frame_decryptor, - rtc::scoped_refptr frame_transformer); - ~RtpVideoStreamReceiver() override; - - void AddReceiveCodec(uint8_t payload_type, - VideoCodecType codec_type, - const std::map& codec_params, - bool raw_payload); - - ABSL_DEPRECATED("Use AddReceiveCodec above") - void AddReceiveCodec(uint8_t payload_type, - const VideoCodec& video_codec, - const std::map& codec_params, - bool raw_payload) { - AddReceiveCodec(payload_type, video_codec.codecType, codec_params, - raw_payload); - } - - void StartReceive(); - void StopReceive(); - - // Produces the transport-related timestamps; current_delay_ms is left unset. - absl::optional GetSyncInfo() const; - - bool DeliverRtcp(const uint8_t* rtcp_packet, size_t rtcp_packet_length); - - void FrameContinuous(int64_t seq_num); - - void FrameDecoded(int64_t seq_num); - - void SignalNetworkState(NetworkState state); - - // Returns number of different frames seen. - int GetUniqueFramesSeen() const { - RTC_DCHECK_RUN_ON(&worker_task_checker_); - return frame_counter_.GetUniqueSeen(); - } - - // Implements RtpPacketSinkInterface. - void OnRtpPacket(const RtpPacketReceived& packet) override; - - // Public only for tests. - void OnReceivedPayloadData(rtc::CopyOnWriteBuffer codec_payload, - const RtpPacketReceived& rtp_packet, - const RTPVideoHeader& video); - - // Implements RecoveredPacketReceiver. - void OnRecoveredPacket(const uint8_t* packet, size_t packet_length) override; - - // Send an RTCP keyframe request. - void RequestKeyFrame() override; - - // Implements LossNotificationSender. - void SendLossNotification(uint16_t last_decoded_seq_num, - uint16_t last_received_seq_num, - bool decodability_flag, - bool buffering_allowed) override; - - bool IsUlpfecEnabled() const; - bool IsRetransmissionsEnabled() const; - - // Returns true if a decryptor is attached and frames can be decrypted. - // Updated by OnDecryptionStatusChangeCallback. Note this refers to Frame - // Decryption not SRTP. - bool IsDecryptable() const; - - // Don't use, still experimental. - void RequestPacketRetransmit(const std::vector& sequence_numbers); - - void OnCompleteFrames(RtpFrameReferenceFinder::ReturnVector frames); - - // Implements OnDecryptedFrameCallback. - void OnDecryptedFrame(std::unique_ptr frame) override; - - // Implements OnDecryptionStatusChangeCallback. - void OnDecryptionStatusChange( - FrameDecryptorInterface::Status status) override; - - // Optionally set a frame decryptor after a stream has started. This will not - // reset the decoder state. - void SetFrameDecryptor( - rtc::scoped_refptr frame_decryptor); - - // Sets a frame transformer after a stream has started, if no transformer - // has previously been set. Does not reset the decoder state. - void SetDepacketizerToDecoderFrameTransformer( - rtc::scoped_refptr frame_transformer); - - // Called by VideoReceiveStream when stats are updated. - void UpdateRtt(int64_t max_rtt_ms); - - absl::optional LastReceivedPacketMs() const; - absl::optional LastReceivedKeyframePacketMs() const; - - // RtpDemuxer only forwards a given RTP packet to one sink. However, some - // sinks, such as FlexFEC, might wish to be informed of all of the packets - // a given sink receives (or any set of sinks). They may do so by registering - // themselves as secondary sinks. - void AddSecondarySink(RtpPacketSinkInterface* sink); - void RemoveSecondarySink(const RtpPacketSinkInterface* sink); - - private: - // Implements RtpVideoFrameReceiver. - void ManageFrame(std::unique_ptr frame) override; - - // Used for buffering RTCP feedback messages and sending them all together. - // Note: - // 1. Key frame requests and NACKs are mutually exclusive, with the - // former taking precedence over the latter. - // 2. Loss notifications are orthogonal to either. (That is, may be sent - // alongside either.) - class RtcpFeedbackBuffer : public KeyFrameRequestSender, - public NackSender, - public LossNotificationSender { - public: - RtcpFeedbackBuffer(KeyFrameRequestSender* key_frame_request_sender, - NackSender* nack_sender, - LossNotificationSender* loss_notification_sender); - - ~RtcpFeedbackBuffer() override = default; - - // KeyFrameRequestSender implementation. - void RequestKeyFrame() RTC_LOCKS_EXCLUDED(mutex_) override; - - // NackSender implementation. - void SendNack(const std::vector& sequence_numbers, - bool buffering_allowed) RTC_LOCKS_EXCLUDED(mutex_) override; - - // LossNotificationSender implementation. - void SendLossNotification(uint16_t last_decoded_seq_num, - uint16_t last_received_seq_num, - bool decodability_flag, - bool buffering_allowed) - RTC_LOCKS_EXCLUDED(mutex_) override; - - // Send all RTCP feedback messages buffered thus far. - void SendBufferedRtcpFeedback() RTC_LOCKS_EXCLUDED(mutex_); - - private: - // LNTF-related state. - struct LossNotificationState { - LossNotificationState(uint16_t last_decoded_seq_num, - uint16_t last_received_seq_num, - bool decodability_flag) - : last_decoded_seq_num(last_decoded_seq_num), - last_received_seq_num(last_received_seq_num), - decodability_flag(decodability_flag) {} - - uint16_t last_decoded_seq_num; - uint16_t last_received_seq_num; - bool decodability_flag; - }; - struct ConsumedRtcpFeedback { - bool request_key_frame = false; - std::vector nack_sequence_numbers; - absl::optional lntf_state; - }; - - ConsumedRtcpFeedback ConsumeRtcpFeedback() RTC_LOCKS_EXCLUDED(mutex_); - ConsumedRtcpFeedback ConsumeRtcpFeedbackLocked() - RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - // This method is called both with and without mutex_ held. - void SendRtcpFeedback(ConsumedRtcpFeedback feedback); - - KeyFrameRequestSender* const key_frame_request_sender_; - NackSender* const nack_sender_; - LossNotificationSender* const loss_notification_sender_; - - // NACKs are accessible from two threads due to nack_module_ being a module. - Mutex mutex_; - - // Key-frame-request-related state. - bool request_key_frame_ RTC_GUARDED_BY(mutex_); - - // NACK-related state. - std::vector nack_sequence_numbers_ RTC_GUARDED_BY(mutex_); - - absl::optional lntf_state_ RTC_GUARDED_BY(mutex_); - }; - enum ParseGenericDependenciesResult { - kDropPacket, - kHasGenericDescriptor, - kNoGenericDescriptor - }; - - // Entry point doing non-stats work for a received packet. Called - // for the same packet both before and after RED decapsulation. - void ReceivePacket(const RtpPacketReceived& packet); - // Parses and handles RED headers. - // This function assumes that it's being called from only one thread. - void ParseAndHandleEncapsulatingHeader(const RtpPacketReceived& packet); - void NotifyReceiverOfEmptyPacket(uint16_t seq_num); - void UpdateHistograms(); - bool IsRedEnabled() const; - void InsertSpsPpsIntoTracker(uint8_t payload_type); - void OnInsertedPacket(video_coding::PacketBuffer::InsertResult result); - ParseGenericDependenciesResult ParseGenericDependenciesExtension( - const RtpPacketReceived& rtp_packet, - RTPVideoHeader* video_header) RTC_RUN_ON(worker_task_checker_); - void OnAssembledFrame(std::unique_ptr frame) - RTC_LOCKS_EXCLUDED(packet_buffer_lock_); - void UpdatePacketReceiveTimestamps(const RtpPacketReceived& packet, - bool is_keyframe) - RTC_RUN_ON(worker_task_checker_); - - Clock* const clock_; - // Ownership of this object lies with VideoReceiveStream, which owns `this`. - const VideoReceiveStream::Config& config_; - PacketRouter* const packet_router_; - ProcessThread* const process_thread_; - - RemoteNtpTimeEstimator ntp_estimator_; - - RtpHeaderExtensionMap rtp_header_extensions_; - // Set by the field trial WebRTC-ForcePlayoutDelay to override any playout - // delay that is specified in the received packets. - FieldTrialOptional forced_playout_delay_max_ms_; - FieldTrialOptional forced_playout_delay_min_ms_; - ReceiveStatistics* const rtp_receive_statistics_; - std::unique_ptr ulpfec_receiver_; - - RTC_NO_UNIQUE_ADDRESS SequenceChecker worker_task_checker_; - bool receiving_ RTC_GUARDED_BY(worker_task_checker_); - int64_t last_packet_log_ms_ RTC_GUARDED_BY(worker_task_checker_); - - const std::unique_ptr rtp_rtcp_; - - OnCompleteFrameCallback* complete_frame_callback_; - KeyFrameRequestSender* const keyframe_request_sender_; - - RtcpFeedbackBuffer rtcp_feedback_buffer_; - std::unique_ptr nack_module_; - std::unique_ptr loss_notification_controller_; - - mutable Mutex packet_buffer_lock_; - video_coding::PacketBuffer packet_buffer_ RTC_GUARDED_BY(packet_buffer_lock_); - UniqueTimestampCounter frame_counter_ RTC_GUARDED_BY(worker_task_checker_); - SeqNumUnwrapper frame_id_unwrapper_ - RTC_GUARDED_BY(worker_task_checker_); - - // Video structure provided in the dependency descriptor in a first packet - // of a key frame. It is required to parse dependency descriptor in the - // following delta packets. - std::unique_ptr video_structure_ - RTC_GUARDED_BY(worker_task_checker_); - // Frame id of the last frame with the attached video structure. - // absl::nullopt when `video_structure_ == nullptr`; - absl::optional video_structure_frame_id_ - RTC_GUARDED_BY(worker_task_checker_); - - Mutex reference_finder_lock_; - std::unique_ptr reference_finder_ - RTC_GUARDED_BY(reference_finder_lock_); - absl::optional current_codec_; - uint32_t last_assembled_frame_rtp_timestamp_; - - Mutex last_seq_num_mutex_; - std::map last_seq_num_for_pic_id_ - RTC_GUARDED_BY(last_seq_num_mutex_); - video_coding::H264SpsPpsTracker tracker_; - - // Maps payload id to the depacketizer. - std::map> payload_type_map_; - - -#ifndef DISABLE_H265 - video_coding::H265VpsSpsPpsTracker h265_tracker_; -#endif - - // TODO(johan): Remove pt_codec_params_ once - // https://bugs.chromium.org/p/webrtc/issues/detail?id=6883 is resolved. - // Maps a payload type to a map of out-of-band supplied codec parameters. - std::map> pt_codec_params_; - int16_t last_payload_type_ = -1; - - bool has_received_frame_; - - std::vector secondary_sinks_ - RTC_GUARDED_BY(worker_task_checker_); - - // Info for GetSyncInfo is updated on network or worker thread, and queried on - // the worker thread. - mutable Mutex sync_info_lock_; - absl::optional last_received_rtp_timestamp_ - RTC_GUARDED_BY(sync_info_lock_); - absl::optional last_received_keyframe_rtp_timestamp_ - RTC_GUARDED_BY(sync_info_lock_); - absl::optional last_received_rtp_system_time_ - RTC_GUARDED_BY(sync_info_lock_); - absl::optional last_received_keyframe_rtp_system_time_ - RTC_GUARDED_BY(sync_info_lock_); - - // Used to validate the buffered frame decryptor is always run on the correct - // thread. - SequenceChecker network_tc_; - // Handles incoming encrypted frames and forwards them to the - // rtp_reference_finder if they are decryptable. - std::unique_ptr buffered_frame_decryptor_ - RTC_PT_GUARDED_BY(network_tc_); - std::atomic frames_decryptable_; - absl::optional last_color_space_; - - AbsoluteCaptureTimeInterpolator absolute_capture_time_interpolator_ - RTC_GUARDED_BY(worker_task_checker_); - - CaptureClockOffsetUpdater capture_clock_offset_updater_ - RTC_GUARDED_BY(worker_task_checker_); - - int64_t last_completed_picture_id_ = 0; - - rtc::scoped_refptr - frame_transformer_delegate_; - - SeqNumUnwrapper rtp_seq_num_unwrapper_ - RTC_GUARDED_BY(packet_buffer_lock_); - std::map packet_infos_ - RTC_GUARDED_BY(packet_buffer_lock_); -}; - -} // namespace webrtc - -#endif // VIDEO_RTP_VIDEO_STREAM_RECEIVER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver2.cc b/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver2.cc index ce0b7a14f4..2df75ff630 100644 --- a/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver2.cc +++ b/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver2.cc @@ -17,7 +17,6 @@ #include #include "absl/algorithm/container.h" -#include "absl/base/macros.h" #include "absl/memory/memory.h" #include "absl/types/optional.h" #include "api/video/video_codec_type.h" @@ -26,7 +25,6 @@ #include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h" #include "modules/rtp_rtcp/include/receive_statistics.h" #include "modules/rtp_rtcp/include/rtp_cvo.h" -#include "modules/rtp_rtcp/include/ulpfec_receiver.h" #include "modules/rtp_rtcp/source/create_video_rtp_depacketizer.h" #include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h" #include "modules/rtp_rtcp/source/rtp_format.h" @@ -35,6 +33,7 @@ #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_rtcp_config.h" +#include "modules/rtp_rtcp/source/ulpfec_receiver.h" #include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" #include "modules/rtp_rtcp/source/video_rtp_depacketizer_raw.h" #include "modules/video_coding/frame_object.h" @@ -43,10 +42,8 @@ #include "modules/video_coding/nack_requester.h" #include "modules/video_coding/packet_buffer.h" #include "rtc_base/checks.h" -#include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" -#include "system_wrappers/include/field_trial.h" #include "system_wrappers/include/metrics.h" #include "system_wrappers/include/ntp_time.h" @@ -58,11 +55,13 @@ namespace { constexpr int kPacketBufferStartSize = 512; constexpr int kPacketBufferMaxSize = 2048; -int PacketBufferMaxSize() { +constexpr int kMaxPacketAgeToNack = 450; + +int PacketBufferMaxSize(const FieldTrialsView& field_trials) { // The group here must be a positive power of 2, in which case that is used as // size. All other values shall result in the default value being used. const std::string group_name = - webrtc::field_trial::FindFullName("WebRTC-PacketBufferMaxSize"); + field_trials.Lookup("WebRTC-PacketBufferMaxSize"); int packet_buffer_max_size = kPacketBufferMaxSize; if (!group_name.empty() && (sscanf(group_name.c_str(), "%d", &packet_buffer_max_size) != 1 || @@ -83,7 +82,8 @@ std::unique_ptr CreateRtpRtcpModule( RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer, RtcpCnameCallback* rtcp_cname_callback, bool non_sender_rtt_measurement, - uint32_t local_ssrc) { + uint32_t local_ssrc, + RtcEventLog* rtc_event_log) { RtpRtcpInterface::Configuration configuration; configuration.clock = clock; configuration.audio = false; @@ -96,6 +96,7 @@ std::unique_ptr CreateRtpRtcpModule( configuration.rtcp_cname_callback = rtcp_cname_callback; configuration.local_media_ssrc = local_ssrc; configuration.non_sender_rtt_measurement = non_sender_rtt_measurement; + configuration.event_log = rtc_event_log; std::unique_ptr rtp_rtcp = ModuleRtpRtcpImpl2::Create(configuration); @@ -107,17 +108,40 @@ std::unique_ptr CreateRtpRtcpModule( std::unique_ptr MaybeConstructNackModule( TaskQueueBase* current_queue, NackPeriodicProcessor* nack_periodic_processor, - const VideoReceiveStream::Config& config, + const NackConfig& nack, Clock* clock, NackSender* nack_sender, - KeyFrameRequestSender* keyframe_request_sender) { - if (config.rtp.nack.rtp_history_ms == 0) + KeyFrameRequestSender* keyframe_request_sender, + const FieldTrialsView& field_trials) { + if (nack.rtp_history_ms == 0) return nullptr; // TODO(bugs.webrtc.org/12420): pass rtp_history_ms to the nack module. return std::make_unique(current_queue, nack_periodic_processor, clock, nack_sender, - keyframe_request_sender); + keyframe_request_sender, field_trials); +} + +std::unique_ptr MaybeConstructUlpfecReceiver( + uint32_t remote_ssrc, + int red_payload_type, + int ulpfec_payload_type, + rtc::ArrayView extensions, + RecoveredPacketReceiver* callback, + Clock* clock) { + RTC_DCHECK_GE(red_payload_type, -1); + RTC_DCHECK_GE(ulpfec_payload_type, -1); + if (red_payload_type == -1) + return nullptr; + + // TODO(tommi, brandtr): Consider including this check too once + // `UlpfecReceiver` has been updated to not consider both red and ulpfec + // payload ids. + // if (ulpfec_payload_type == -1) + // return nullptr; + + return std::make_unique(remote_ssrc, ulpfec_payload_type, + callback, extensions, clock); } static const int kPacketLogIntervalMs = 10000; @@ -203,23 +227,30 @@ void RtpVideoStreamReceiver2::RtcpFeedbackBuffer::SendBufferedRtcpFeedback() { } } +void RtpVideoStreamReceiver2::RtcpFeedbackBuffer::ClearLossNotificationState() { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + lntf_state_.reset(); +} + RtpVideoStreamReceiver2::RtpVideoStreamReceiver2( TaskQueueBase* current_queue, Clock* clock, Transport* transport, RtcpRttStats* rtt_stats, PacketRouter* packet_router, - const VideoReceiveStream::Config* config, + const VideoReceiveStreamInterface::Config* config, ReceiveStatistics* rtp_receive_statistics, RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer, RtcpCnameCallback* rtcp_cname_callback, NackPeriodicProcessor* nack_periodic_processor, - NackSender* nack_sender, - KeyFrameRequestSender* keyframe_request_sender, OnCompleteFrameCallback* complete_frame_callback, rtc::scoped_refptr frame_decryptor, - rtc::scoped_refptr frame_transformer) - : clock_(clock), + rtc::scoped_refptr frame_transformer, + const FieldTrialsView& field_trials, + RtcEventLog* event_log) + : field_trials_(field_trials), + worker_queue_(current_queue), + clock_(clock), config_(*config), packet_router_(packet_router), ntp_estimator_(clock), @@ -227,9 +258,15 @@ RtpVideoStreamReceiver2::RtpVideoStreamReceiver2( forced_playout_delay_max_ms_("max_ms", absl::nullopt), forced_playout_delay_min_ms_("min_ms", absl::nullopt), rtp_receive_statistics_(rtp_receive_statistics), - ulpfec_receiver_(UlpfecReceiver::Create(config->rtp.remote_ssrc, - this, - config->rtp.extensions)), + ulpfec_receiver_( + MaybeConstructUlpfecReceiver(config->rtp.remote_ssrc, + config->rtp.red_payload_type, + config->rtp.ulpfec_payload_type, + config->rtp.extensions, + this, + clock_)), + red_payload_type_(config_.rtp.red_payload_type), + packet_sink_(config->rtp.packet_sink_), receiving_(false), last_packet_log_ms_(-1), rtp_rtcp_(CreateRtpRtcpModule( @@ -240,19 +277,23 @@ RtpVideoStreamReceiver2::RtpVideoStreamReceiver2( rtcp_packet_type_counter_observer, rtcp_cname_callback, config_.rtp.rtcp_xr.receiver_reference_time_report, - config_.rtp.local_ssrc)), + config_.rtp.local_ssrc, + event_log)), + nack_periodic_processor_(nack_periodic_processor), complete_frame_callback_(complete_frame_callback), - keyframe_request_sender_(keyframe_request_sender), + keyframe_request_method_(config_.rtp.keyframe_method), // TODO(bugs.webrtc.org/10336): Let `rtcp_feedback_buffer_` communicate // directly with `rtp_rtcp_`. - rtcp_feedback_buffer_(this, nack_sender, this), + rtcp_feedback_buffer_(this, this, this), nack_module_(MaybeConstructNackModule(current_queue, nack_periodic_processor, - config_, + config_.rtp.nack, clock_, &rtcp_feedback_buffer_, - &rtcp_feedback_buffer_)), - packet_buffer_(kPacketBufferStartSize, PacketBufferMaxSize()), + &rtcp_feedback_buffer_, + field_trials_)), + packet_buffer_(kPacketBufferStartSize, + PacketBufferMaxSize(field_trials_)), reference_finder_(std::make_unique()), has_received_frame_(false), frames_decryptable_(false), @@ -272,24 +313,13 @@ RtpVideoStreamReceiver2::RtpVideoStreamReceiver2( rtp_rtcp_->SetRTCPStatus(config_.rtp.rtcp_mode); rtp_rtcp_->SetRemoteSSRC(config_.rtp.remote_ssrc); - static const int kMaxPacketAgeToNack = 450; - const int max_reordering_threshold = (config_.rtp.nack.rtp_history_ms > 0) - ? kMaxPacketAgeToNack - : kDefaultMaxReorderingThreshold; - rtp_receive_statistics_->SetMaxReorderingThreshold(config_.rtp.remote_ssrc, - max_reordering_threshold); - // TODO(nisse): For historic reasons, we applied the above - // max_reordering_threshold also for RTX stats, which makes little sense since - // we don't NACK rtx packets. Consider deleting the below block, and rely on - // the default threshold. - if (config_.rtp.rtx_ssrc) { - rtp_receive_statistics_->SetMaxReorderingThreshold( - config_.rtp.rtx_ssrc, max_reordering_threshold); + if (config_.rtp.nack.rtp_history_ms > 0) { + rtp_receive_statistics_->SetMaxReorderingThreshold(config_.rtp.remote_ssrc, + kMaxPacketAgeToNack); } - ParseFieldTrial( {&forced_playout_delay_max_ms_, &forced_playout_delay_min_ms_}, - field_trial::FindFullName("WebRTC-ForcePlayoutDelay")); + field_trials_.Lookup("WebRTC-ForcePlayoutDelay")); if (config_.rtp.lntf.enabled) { loss_notification_controller_ = @@ -300,7 +330,7 @@ RtpVideoStreamReceiver2::RtpVideoStreamReceiver2( // Only construct the encrypted receiver if frame encryption is enabled. if (config_.crypto_options.sframe.require_frame_encryption) { buffered_frame_decryptor_ = - std::make_unique(this, this); + std::make_unique(this, this, field_trials_); if (frame_decryptor != nullptr) { buffered_frame_decryptor_->SetFrameDecryptor(std::move(frame_decryptor)); } @@ -318,7 +348,7 @@ RtpVideoStreamReceiver2::RtpVideoStreamReceiver2( RtpVideoStreamReceiver2::~RtpVideoStreamReceiver2() { if (packet_router_) packet_router_->RemoveReceiveRtpModule(rtp_rtcp_.get()); - UpdateHistograms(); + ulpfec_receiver_.reset(); if (frame_transformer_delegate_) frame_transformer_delegate_->Reset(); } @@ -329,8 +359,8 @@ void RtpVideoStreamReceiver2::AddReceiveCodec( const std::map& codec_params, bool raw_payload) { RTC_DCHECK_RUN_ON(&packet_sequence_checker_); - if (codec_params.count(cricket::kH264FmtpSpsPpsIdrInKeyframe) || - field_trial::IsEnabled("WebRTC-SpsPpsIdrIsH264Keyframe")) { + if (codec_params.count(cricket::kH264FmtpSpsPpsIdrInKeyframe) > 0 || + field_trials_.IsEnabled("WebRTC-SpsPpsIdrIsH264Keyframe")) { packet_buffer_.ForceSpsPpsIdrIsH264Keyframe(); } payload_type_map_.emplace( @@ -339,6 +369,41 @@ void RtpVideoStreamReceiver2::AddReceiveCodec( pt_codec_params_.emplace(payload_type, codec_params); } +void RtpVideoStreamReceiver2::RemoveReceiveCodec(uint8_t payload_type) { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + auto codec_params_it = pt_codec_params_.find(payload_type); + if (codec_params_it == pt_codec_params_.end()) + return; + + const bool sps_pps_idr_in_key_frame = + codec_params_it->second.count(cricket::kH264FmtpSpsPpsIdrInKeyframe) > 0; + + pt_codec_params_.erase(codec_params_it); + payload_type_map_.erase(payload_type); + + if (sps_pps_idr_in_key_frame) { + bool reset_setting = true; + for (auto& [unused, codec_params] : pt_codec_params_) { + if (codec_params.count(cricket::kH264FmtpSpsPpsIdrInKeyframe) > 0) { + reset_setting = false; + break; + } + } + + if (reset_setting) { + packet_buffer_.ResetSpsPpsIdrIsH264Keyframe(); + } + } +} + +void RtpVideoStreamReceiver2::RemoveReceiveCodecs() { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + + pt_codec_params_.clear(); + payload_type_map_.clear(); + packet_buffer_.ResetSpsPpsIdrIsH264Keyframe(); +} + absl::optional RtpVideoStreamReceiver2::GetSyncInfo() const { RTC_DCHECK_RUN_ON(&packet_sequence_checker_); Syncable::Info info; @@ -360,11 +425,11 @@ absl::optional RtpVideoStreamReceiver2::GetSyncInfo() const { return info; } -// RTC_RUN_ON(packet_sequence_checker_) RtpVideoStreamReceiver2::ParseGenericDependenciesResult RtpVideoStreamReceiver2::ParseGenericDependenciesExtension( const RtpPacketReceived& rtp_packet, RTPVideoHeader* video_header) { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); if (rtp_packet.HasExtension()) { webrtc::DependencyDescriptor dependency_descriptor; if (!rtp_packet.GetExtension( @@ -477,15 +542,13 @@ void RtpVideoStreamReceiver2::OnReceivedPayloadData( int64_t unwrapped_rtp_seq_num = rtp_seq_num_unwrapper_.Unwrap(rtp_packet.SequenceNumber()); - auto& packet_info = + + RtpPacketInfo& packet_info = packet_infos_ - .emplace( - unwrapped_rtp_seq_num, - RtpPacketInfo( - rtp_packet.Ssrc(), rtp_packet.Csrcs(), rtp_packet.Timestamp(), - /*audio_level=*/absl::nullopt, - rtp_packet.GetExtension(), - /*receive_time_ms=*/clock_->CurrentTime())) + .emplace(unwrapped_rtp_seq_num, + RtpPacketInfo(rtp_packet.Ssrc(), rtp_packet.Csrcs(), + rtp_packet.Timestamp(), + /*receive_time_ms=*/clock_->CurrentTime())) .first->second; // Try to extrapolate absolute capture time if it is missing. @@ -495,7 +558,8 @@ void RtpVideoStreamReceiver2::OnReceivedPayloadData( packet_info.csrcs()), packet_info.rtp_timestamp(), // Assume frequency is the same one for all video frames. - kVideoPayloadTypeFrequency, packet_info.absolute_capture_time())); + kVideoPayloadTypeFrequency, + rtp_packet.GetExtension())); RTPVideoHeader& video_header = packet->video_header; video_header.rotation = kVideoRotation_0; @@ -522,8 +586,18 @@ void RtpVideoStreamReceiver2::OnReceivedPayloadData( rtp_packet, video_header.frame_type == VideoFrameType::kVideoFrameKey); } - if (generic_descriptor_state == kDropPacket) + if (generic_descriptor_state == kDropPacket) { + Timestamp now = clock_->CurrentTime(); + if (video_structure_ == nullptr && + next_keyframe_request_for_missing_video_structure_ < now) { + // No video structure received yet, most likely part of the initial + // keyframe was lost. + RequestKeyFrame(); + next_keyframe_request_for_missing_video_structure_ = + now + TimeDelta::Seconds(1); + } return; + } // Color space should only be transmitted in the last packet of a frame, // therefore, neglect it otherwise so that last_color_space_ is not reset by @@ -603,7 +677,7 @@ void RtpVideoStreamReceiver2::OnReceivedPayloadData( case video_coding::H264SpsPpsTracker::kRequestKeyframe: rtcp_feedback_buffer_.RequestKeyFrame(); rtcp_feedback_buffer_.SendBufferedRtcpFeedback(); - ABSL_FALLTHROUGH_INTENDED; + [[fallthrough]]; case video_coding::H264SpsPpsTracker::kDrop: return; case video_coding::H264SpsPpsTracker::kInsert: @@ -656,18 +730,18 @@ void RtpVideoStreamReceiver2::OnRecoveredPacket(const uint8_t* rtp_packet, RtpPacketReceived packet; if (!packet.Parse(rtp_packet, rtp_packet_length)) return; - if (packet.PayloadType() == config_.rtp.red_payload_type) { + if (packet.PayloadType() == red_payload_type_) { RTC_LOG(LS_WARNING) << "Discarding recovered packet with RED encapsulation"; return; } packet.IdentifyExtensions(rtp_header_extensions_); packet.set_payload_type_frequency(kVideoPayloadTypeFrequency); - // TODO(nisse): UlpfecReceiverImpl::ProcessReceivedFec passes both - // original (decapsulated) media packets and recovered packets to - // this callback. We need a way to distinguish, for setting - // packet.recovered() correctly. Ideally, move RED decapsulation out - // of the Ulpfec implementation. + // TODO(bugs.webrtc.org/7135): UlpfecReceiverImpl::ProcessReceivedFec passes + // both original (decapsulated) media packets and recovered packets to this + // callback. We need a way to distinguish, for setting packet.recovered() + // correctly. Ideally, move RED decapsulation out of the Ulpfec + // implementation. ReceivePacket(packet); } @@ -689,8 +763,8 @@ void RtpVideoStreamReceiver2::OnRtpPacket(const RtpPacketReceived& packet) { rtp_receive_statistics_->OnRtpPacket(packet); } - if (config_.rtp.packet_sink_) { - config_.rtp.packet_sink_->OnRtpPacket(packet); + if (packet_sink_) { + packet_sink_->OnRtpPacket(packet); } } @@ -699,13 +773,19 @@ void RtpVideoStreamReceiver2::RequestKeyFrame() { // TODO(bugs.webrtc.org/10336): Allow the sender to ignore key frame requests // issued by anything other than the LossNotificationController if it (the // sender) is relying on LNTF alone. - if (keyframe_request_sender_) { - keyframe_request_sender_->RequestKeyFrame(); - } else { + if (keyframe_request_method_ == KeyFrameReqMethod::kPliRtcp) { rtp_rtcp_->SendPictureLossIndication(); + } else if (keyframe_request_method_ == KeyFrameReqMethod::kFirRtcp) { + rtp_rtcp_->SendFullIntraRequest(); } } +void RtpVideoStreamReceiver2::SendNack( + const std::vector& sequence_numbers, + bool /*buffering_allowed*/) { + rtp_rtcp_->SendNack(sequence_numbers); +} + void RtpVideoStreamReceiver2::SendLossNotification( uint16_t last_decoded_seq_num, uint16_t last_received_seq_num, @@ -716,28 +796,14 @@ void RtpVideoStreamReceiver2::SendLossNotification( decodability_flag, buffering_allowed); } -bool RtpVideoStreamReceiver2::IsUlpfecEnabled() const { - return config_.rtp.ulpfec_payload_type != -1; -} - -bool RtpVideoStreamReceiver2::IsRetransmissionsEnabled() const { - return config_.rtp.nack.rtp_history_ms > 0; -} - -void RtpVideoStreamReceiver2::RequestPacketRetransmit( - const std::vector& sequence_numbers) { - RTC_DCHECK_RUN_ON(&worker_task_checker_); - rtp_rtcp_->SendNack(sequence_numbers); -} - bool RtpVideoStreamReceiver2::IsDecryptable() const { RTC_DCHECK_RUN_ON(&worker_task_checker_); return frames_decryptable_; } -// RTC_RUN_ON(packet_sequence_checker_) void RtpVideoStreamReceiver2::OnInsertedPacket( video_coding::PacketBuffer::InsertResult result) { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); RTC_DCHECK_RUN_ON(&worker_task_checker_); video_coding::PacketBuffer::Packet* first_packet = nullptr; int max_nack_count; @@ -753,7 +819,7 @@ void RtpVideoStreamReceiver2::OnInsertedPacket( RTC_DCHECK_EQ(frame_boundary, packet->is_first_packet_in_frame()); int64_t unwrapped_rtp_seq_num = rtp_seq_num_unwrapper_.Unwrap(packet->seq_num); - RTC_DCHECK(packet_infos_.count(unwrapped_rtp_seq_num) > 0); + RTC_DCHECK_GT(packet_infos_.count(unwrapped_rtp_seq_num), 0); RtpPacketInfo& packet_info = packet_infos_[unwrapped_rtp_seq_num]; if (packet->is_first_packet_in_frame()) { first_packet = packet.get(); @@ -813,9 +879,9 @@ void RtpVideoStreamReceiver2::OnInsertedPacket( } } -// RTC_RUN_ON(packet_sequence_checker_) void RtpVideoStreamReceiver2::OnAssembledFrame( std::unique_ptr frame) { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); RTC_DCHECK(frame); const absl::optional& descriptor = @@ -880,9 +946,9 @@ void RtpVideoStreamReceiver2::OnAssembledFrame( } } -// RTC_RUN_ON(packet_sequence_checker_) void RtpVideoStreamReceiver2::OnCompleteFrames( RtpFrameReferenceFinder::ReturnVector frames) { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); for (auto& frame : frames) { last_seq_num_for_pic_id_[frame->Id()] = frame->last_seq_num(); @@ -914,7 +980,7 @@ void RtpVideoStreamReceiver2::SetFrameDecryptor( RTC_DCHECK_RUN_ON(&packet_sequence_checker_); if (buffered_frame_decryptor_ == nullptr) { buffered_frame_decryptor_ = - std::make_unique(this, this); + std::make_unique(this, this, field_trials_); } buffered_frame_decryptor_->SetFrameDecryptor(std::move(frame_decryptor)); } @@ -935,12 +1001,86 @@ void RtpVideoStreamReceiver2::SetRtpExtensions( rtp_header_extensions_.Reset(extensions); } +const RtpHeaderExtensionMap& RtpVideoStreamReceiver2::GetRtpExtensions() const { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + return rtp_header_extensions_; +} + void RtpVideoStreamReceiver2::UpdateRtt(int64_t max_rtt_ms) { - RTC_DCHECK_RUN_ON(&worker_task_checker_); + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); if (nack_module_) nack_module_->UpdateRtt(max_rtt_ms); } +void RtpVideoStreamReceiver2::OnLocalSsrcChange(uint32_t local_ssrc) { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + rtp_rtcp_->SetLocalSsrc(local_ssrc); +} + +void RtpVideoStreamReceiver2::SetRtcpMode(RtcpMode mode) { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + rtp_rtcp_->SetRTCPStatus(mode); +} + +void RtpVideoStreamReceiver2::SetReferenceTimeReport(bool enabled) { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + rtp_rtcp_->SetNonSenderRttMeasurement(enabled); +} + +void RtpVideoStreamReceiver2::SetPacketSink( + RtpPacketSinkInterface* packet_sink) { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + packet_sink_ = packet_sink; +} + +void RtpVideoStreamReceiver2::SetLossNotificationEnabled(bool enabled) { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + if (enabled && !loss_notification_controller_) { + loss_notification_controller_ = + std::make_unique(&rtcp_feedback_buffer_, + &rtcp_feedback_buffer_); + } else if (!enabled && loss_notification_controller_) { + loss_notification_controller_.reset(); + rtcp_feedback_buffer_.ClearLossNotificationState(); + } +} + +void RtpVideoStreamReceiver2::SetNackHistory(TimeDelta history) { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + if (history.ms() == 0) { + nack_module_.reset(); + } else if (!nack_module_) { + nack_module_ = std::make_unique( + worker_queue_, nack_periodic_processor_, clock_, &rtcp_feedback_buffer_, + &rtcp_feedback_buffer_, field_trials_); + } + + rtp_receive_statistics_->SetMaxReorderingThreshold( + config_.rtp.remote_ssrc, + history.ms() > 0 ? kMaxPacketAgeToNack : kDefaultMaxReorderingThreshold); +} + +int RtpVideoStreamReceiver2::ulpfec_payload_type() const { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + return ulpfec_receiver_ ? ulpfec_receiver_->ulpfec_payload_type() : -1; +} + +int RtpVideoStreamReceiver2::red_payload_type() const { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + return red_payload_type_; +} + +void RtpVideoStreamReceiver2::SetProtectionPayloadTypes( + int red_payload_type, + int ulpfec_payload_type) { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + RTC_DCHECK(red_payload_type >= -1 && red_payload_type < 0x80); + RTC_DCHECK(ulpfec_payload_type >= -1 && ulpfec_payload_type < 0x80); + ulpfec_receiver_ = MaybeConstructUlpfecReceiver( + config_.rtp.remote_ssrc, red_payload_type, ulpfec_payload_type, + config_.rtp.extensions, this, clock_); +} + absl::optional RtpVideoStreamReceiver2::LastReceivedPacketMs() const { RTC_DCHECK_RUN_ON(&packet_sequence_checker_); if (last_received_rtp_system_time_) { @@ -965,9 +1105,9 @@ void RtpVideoStreamReceiver2::ManageFrame( OnCompleteFrames(reference_finder_->ManageFrame(std::move(frame))); } -// RTC_RUN_ON(packet_sequence_checker_) void RtpVideoStreamReceiver2::ReceivePacket(const RtpPacketReceived& packet) { - RTC_DCHECK_RUN_ON(&worker_task_checker_); + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + if (packet.payload_size() == 0) { // Padding or keep-alive packet. // TODO(nisse): Could drop empty packets earlier, but need to figure out how @@ -975,7 +1115,7 @@ void RtpVideoStreamReceiver2::ReceivePacket(const RtpPacketReceived& packet) { NotifyReceiverOfEmptyPacket(packet.SequenceNumber()); return; } - if (packet.PayloadType() == config_.rtp.red_payload_type) { + if (packet.PayloadType() == red_payload_type_) { ParseAndHandleEncapsulatingHeader(packet); return; } @@ -995,20 +1135,20 @@ void RtpVideoStreamReceiver2::ReceivePacket(const RtpPacketReceived& packet) { parsed_payload->video_header); } -// RTC_RUN_ON(packet_sequence_checker_) void RtpVideoStreamReceiver2::ParseAndHandleEncapsulatingHeader( const RtpPacketReceived& packet) { - if (packet.PayloadType() == config_.rtp.red_payload_type && - packet.payload_size() > 0) { - if (packet.payload()[0] == config_.rtp.ulpfec_payload_type) { - // Notify video_receiver about received FEC packets to avoid NACKing these - // packets. - NotifyReceiverOfEmptyPacket(packet.SequenceNumber()); - } - if (!ulpfec_receiver_->AddReceivedRedPacket( - packet, config_.rtp.ulpfec_payload_type)) { - return; - } + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + RTC_DCHECK_EQ(packet.PayloadType(), red_payload_type_); + + if (!ulpfec_receiver_ || packet.payload_size() == 0U) + return; + + if (packet.payload()[0] == ulpfec_receiver_->ulpfec_payload_type()) { + // Notify video_receiver about received FEC packets to avoid NACKing these + // packets. + NotifyReceiverOfEmptyPacket(packet.SequenceNumber()); + } + if (ulpfec_receiver_->AddReceivedRedPacket(packet)) { ulpfec_receiver_->ProcessReceivedFec(); } } @@ -1016,8 +1156,8 @@ void RtpVideoStreamReceiver2::ParseAndHandleEncapsulatingHeader( // In the case of a video stream without picture ids and no rtx the // RtpFrameReferenceFinder will need to know about padding to // correctly calculate frame references. -// RTC_RUN_ON(packet_sequence_checker_) void RtpVideoStreamReceiver2::NotifyReceiverOfEmptyPacket(uint16_t seq_num) { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); RTC_DCHECK_RUN_ON(&worker_task_checker_); OnCompleteFrames(reference_finder_->PaddingReceived(seq_num)); @@ -1053,24 +1193,25 @@ bool RtpVideoStreamReceiver2::DeliverRtcp(const uint8_t* rtcp_packet, uint32_t ntp_secs = 0; uint32_t ntp_frac = 0; uint32_t rtp_timestamp = 0; - uint32_t recieved_ntp_secs = 0; - uint32_t recieved_ntp_frac = 0; - if (rtp_rtcp_->RemoteNTP(&ntp_secs, &ntp_frac, &recieved_ntp_secs, - &recieved_ntp_frac, &rtp_timestamp) != 0) { + uint32_t received_ntp_secs = 0; + uint32_t received_ntp_frac = 0; + if (rtp_rtcp_->RemoteNTP(&ntp_secs, &ntp_frac, &received_ntp_secs, + &received_ntp_frac, &rtp_timestamp) != 0) { // Waiting for RTCP. return true; } - NtpTime recieved_ntp(recieved_ntp_secs, recieved_ntp_frac); - int64_t time_since_recieved = - clock_->CurrentNtpInMilliseconds() - recieved_ntp.ToMs(); + NtpTime received_ntp(received_ntp_secs, received_ntp_frac); + int64_t time_since_received = + clock_->CurrentNtpInMilliseconds() - received_ntp.ToMs(); // Don't use old SRs to estimate time. - if (time_since_recieved <= 1) { - ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp); - absl::optional remote_to_local_clock_offset_ms = - ntp_estimator_.EstimateRemoteToLocalClockOffsetMs(); - if (remote_to_local_clock_offset_ms.has_value()) { + if (time_since_received <= 1) { + ntp_estimator_.UpdateRtcpTimestamp( + TimeDelta::Millis(rtt), NtpTime(ntp_secs, ntp_frac), rtp_timestamp); + absl::optional remote_to_local_clock_offset = + ntp_estimator_.EstimateRemoteToLocalClockOffset(); + if (remote_to_local_clock_offset.has_value()) { capture_clock_offset_updater_.SetRemoteToLocalClockOffset( - Int64MsToQ32x32(*remote_to_local_clock_offset_ms)); + *remote_to_local_clock_offset); } } @@ -1125,35 +1266,8 @@ void RtpVideoStreamReceiver2::StopReceive() { receiving_ = false; } -void RtpVideoStreamReceiver2::UpdateHistograms() { - FecPacketCounter counter = ulpfec_receiver_->GetPacketCounter(); - if (counter.first_packet_time_ms == -1) - return; - - int64_t elapsed_sec = - (clock_->TimeInMilliseconds() - counter.first_packet_time_ms) / 1000; - if (elapsed_sec < metrics::kMinRunTimeInSeconds) - return; - - if (counter.num_packets > 0) { - RTC_HISTOGRAM_PERCENTAGE( - "WebRTC.Video.ReceivedFecPacketsInPercent", - static_cast(counter.num_fec_packets * 100 / counter.num_packets)); - } - if (counter.num_fec_packets > 0) { - RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.RecoveredMediaPacketsInPercentOfFec", - static_cast(counter.num_recovered_packets * - 100 / counter.num_fec_packets)); - } - if (config_.rtp.ulpfec_payload_type != -1) { - RTC_HISTOGRAM_COUNTS_10000( - "WebRTC.Video.FecBitrateReceivedInKbps", - static_cast(counter.num_bytes * 8 / elapsed_sec / 1000)); - } -} - -// RTC_RUN_ON(packet_sequence_checker_) void RtpVideoStreamReceiver2::InsertSpsPpsIntoTracker(uint8_t payload_type) { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); RTC_DCHECK_RUN_ON(&worker_task_checker_); auto codec_params_it = pt_codec_params_.find(payload_type); diff --git a/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver2.h b/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver2.h index 54eb7502a1..7bc1a837e1 100644 --- a/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver2.h +++ b/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver2.h @@ -45,14 +45,13 @@ #include "modules/video_coding/nack_requester.h" #include "modules/video_coding/packet_buffer.h" #include "modules/video_coding/rtp_frame_reference_finder.h" -#include "modules/video_coding/unique_timestamp_counter.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/numerics/sequence_number_util.h" #include "rtc_base/system/no_unique_address.h" #include "rtc_base/thread_annotations.h" #include "video/buffered_frame_decryptor.h" #include "video/rtp_video_stream_receiver_frame_transformer_delegate.h" +#include "video/unique_timestamp_counter.h" namespace webrtc { @@ -68,6 +67,7 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, public RecoveredPacketReceiver, public RtpPacketSinkInterface, public KeyFrameRequestSender, + public NackSender, public OnDecryptedFrameCallback, public OnDecryptionStatusChangeCallback, public RtpVideoFrameReceiver { @@ -89,24 +89,28 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, // stream is registered as a candidate for sending REMB and transport // feedback. PacketRouter* packet_router, - const VideoReceiveStream::Config* config, + const VideoReceiveStreamInterface::Config* config, ReceiveStatistics* rtp_receive_statistics, RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer, RtcpCnameCallback* rtcp_cname_callback, NackPeriodicProcessor* nack_periodic_processor, - NackSender* nack_sender, // The KeyFrameRequestSender is optional; if not provided, key frame // requests are sent via the internal RtpRtcp module. - KeyFrameRequestSender* keyframe_request_sender, OnCompleteFrameCallback* complete_frame_callback, rtc::scoped_refptr frame_decryptor, - rtc::scoped_refptr frame_transformer); + rtc::scoped_refptr frame_transformer, + const FieldTrialsView& field_trials, + RtcEventLog* event_log); ~RtpVideoStreamReceiver2() override; void AddReceiveCodec(uint8_t payload_type, VideoCodecType video_codec, const std::map& codec_params, bool raw_payload); + void RemoveReceiveCodec(uint8_t payload_type); + + // Clears state for all receive codecs added via `AddReceiveCodec`. + void RemoveReceiveCodecs(); void StartReceive(); void StopReceive(); @@ -142,26 +146,21 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, // Send an RTCP keyframe request. void RequestKeyFrame() override; + // Implements NackSender. + void SendNack(const std::vector& sequence_numbers, + bool buffering_allowed) override; + // Implements LossNotificationSender. void SendLossNotification(uint16_t last_decoded_seq_num, uint16_t last_received_seq_num, bool decodability_flag, bool buffering_allowed) override; - bool IsUlpfecEnabled() const; - bool IsRetransmissionsEnabled() const; - // Returns true if a decryptor is attached and frames can be decrypted. // Updated by OnDecryptionStatusChangeCallback. Note this refers to Frame // Decryption not SRTP. bool IsDecryptable() const; - // Request packet retransmits via NACK. Called via - // VideoReceiveStream2::SendNack, which gets called when - // RtpVideoStreamReceiver2::RtcpFeedbackBuffer's SendNack and - // SendBufferedRtcpFeedback methods (see `rtcp_feedback_buffer_` below). - void RequestPacketRetransmit(const std::vector& sequence_numbers); - // Implements OnDecryptedFrameCallback. void OnDecryptedFrame(std::unique_ptr frame) override; @@ -182,10 +181,36 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, // Updates the rtp header extensions at runtime. Must be called on the // `packet_sequence_checker_` thread. void SetRtpExtensions(const std::vector& extensions); + const RtpHeaderExtensionMap& GetRtpExtensions() const; - // Called by VideoReceiveStream when stats are updated. + // Called by VideoReceiveStreamInterface when stats are updated. void UpdateRtt(int64_t max_rtt_ms); + // Called when the local_ssrc is changed to match with a sender. + void OnLocalSsrcChange(uint32_t local_ssrc); + + // Forwards the call to set rtcp_sender_ to the RTCP mode of the rtcp sender. + void SetRtcpMode(RtcpMode mode); + + void SetReferenceTimeReport(bool enabled); + + // Sets or clears the callback sink that gets called for RTP packets. Used for + // packet handlers such as FlexFec. Must be called on the packet delivery + // thread (same context as `OnRtpPacket` is called on). + // TODO(bugs.webrtc.org/11993): Packet delivery thread today means `worker + // thread` but will be `network thread`. + void SetPacketSink(RtpPacketSinkInterface* packet_sink); + + // Turns on/off loss notifications. Must be called on the packet delivery + // thread. + void SetLossNotificationEnabled(bool enabled); + + void SetNackHistory(TimeDelta history); + + int ulpfec_payload_type() const; + int red_payload_type() const; + void SetProtectionPayloadTypes(int red_payload_type, int ulpfec_payload_type); + absl::optional LastReceivedPacketMs() const; absl::optional LastReceivedKeyframePacketMs() const; @@ -228,6 +253,8 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, // Send all RTCP feedback messages buffered thus far. void SendBufferedRtcpFeedback(); + void ClearLossNotificationState(); + private: // LNTF-related state. struct LossNotificationState { @@ -275,7 +302,6 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, RTC_RUN_ON(packet_sequence_checker_); void NotifyReceiverOfEmptyPacket(uint16_t seq_num) RTC_RUN_ON(packet_sequence_checker_); - void UpdateHistograms(); bool IsRedEnabled() const; void InsertSpsPpsIntoTracker(uint8_t payload_type) RTC_RUN_ON(packet_sequence_checker_); @@ -290,9 +316,12 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, bool is_keyframe) RTC_RUN_ON(packet_sequence_checker_); + const FieldTrialsView& field_trials_; + TaskQueueBase* const worker_queue_; Clock* const clock_; - // Ownership of this object lies with VideoReceiveStream, which owns `this`. - const VideoReceiveStream::Config& config_; + // Ownership of this object lies with VideoReceiveStreamInterface, which owns + // `this`. + const VideoReceiveStreamInterface::Config& config_; PacketRouter* const packet_router_; RemoteNtpTimeEstimator ntp_estimator_; @@ -304,7 +333,9 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, FieldTrialOptional forced_playout_delay_max_ms_; FieldTrialOptional forced_playout_delay_min_ms_; ReceiveStatistics* const rtp_receive_statistics_; - std::unique_ptr ulpfec_receiver_; + std::unique_ptr ulpfec_receiver_ + RTC_GUARDED_BY(packet_sequence_checker_); + int red_payload_type_ RTC_GUARDED_BY(packet_sequence_checker_); RTC_NO_UNIQUE_ADDRESS SequenceChecker worker_task_checker_; // TODO(bugs.webrtc.org/11993): This checker conceptually represents @@ -315,17 +346,23 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, // that belong to the network thread. Once the packets are fully delivered // on the network thread, this comment will be deleted. RTC_NO_UNIQUE_ADDRESS SequenceChecker packet_sequence_checker_; + RtpPacketSinkInterface* packet_sink_ RTC_GUARDED_BY(packet_sequence_checker_); bool receiving_ RTC_GUARDED_BY(packet_sequence_checker_); int64_t last_packet_log_ms_ RTC_GUARDED_BY(packet_sequence_checker_); const std::unique_ptr rtp_rtcp_; + NackPeriodicProcessor* const nack_periodic_processor_; OnCompleteFrameCallback* complete_frame_callback_; - KeyFrameRequestSender* const keyframe_request_sender_; + const KeyFrameReqMethod keyframe_request_method_; RtcpFeedbackBuffer rtcp_feedback_buffer_; - const std::unique_ptr nack_module_; - std::unique_ptr loss_notification_controller_; + // TODO(tommi): Consider absl::optional instead of unique_ptr + // since nack is usually configured. + std::unique_ptr nack_module_ + RTC_GUARDED_BY(packet_sequence_checker_); + std::unique_ptr loss_notification_controller_ + RTC_GUARDED_BY(packet_sequence_checker_); video_coding::PacketBuffer packet_buffer_ RTC_GUARDED_BY(packet_sequence_checker_); @@ -404,6 +441,9 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, RTC_GUARDED_BY(packet_sequence_checker_); std::map packet_infos_ RTC_GUARDED_BY(packet_sequence_checker_); + + Timestamp next_keyframe_request_for_missing_video_structure_ = + Timestamp::MinusInfinity(); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver_frame_transformer_delegate.cc b/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver_frame_transformer_delegate.cc index c54939fe5a..e6f33262b2 100644 --- a/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver_frame_transformer_delegate.cc +++ b/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver_frame_transformer_delegate.cc @@ -15,7 +15,6 @@ #include "absl/memory/memory.h" #include "modules/rtp_rtcp/source/rtp_descriptor_authentication.h" -#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/thread.h" namespace webrtc { @@ -102,12 +101,12 @@ void RtpVideoStreamReceiverFrameTransformerDelegate::TransformFrame( void RtpVideoStreamReceiverFrameTransformerDelegate::OnTransformedFrame( std::unique_ptr frame) { - rtc::scoped_refptr delegate = - this; - network_thread_->PostTask(ToQueuedTask( + rtc::scoped_refptr delegate( + this); + network_thread_->PostTask( [delegate = std::move(delegate), frame = std::move(frame)]() mutable { delegate->ManageFrame(std::move(frame)); - })); + }); } void RtpVideoStreamReceiverFrameTransformerDelegate::ManageFrame( diff --git a/TMessagesProj/jni/voip/webrtc/video/send_statistics_proxy.cc b/TMessagesProj/jni/voip/webrtc/video/send_statistics_proxy.cc index 6b620acf8d..65b3abb802 100644 --- a/TMessagesProj/jni/voip/webrtc/video/send_statistics_proxy.cc +++ b/TMessagesProj/jni/voip/webrtc/video/send_statistics_proxy.cc @@ -25,7 +25,6 @@ #include "rtc_base/logging.h" #include "rtc_base/numerics/mod_ops.h" #include "rtc_base/strings/string_builder.h" -#include "system_wrappers/include/field_trial.h" #include "system_wrappers/include/metrics.h" namespace webrtc { @@ -47,8 +46,9 @@ enum HistogramCodecType { kVideoVp8 = 1, kVideoVp9 = 2, kVideoH264 = 3, + kVideoAv1 = 4, #ifndef DISABLE_H265 - kVideoH265 = 4, + kVideoH265 = 5, #endif kVideoMax = 64, }; @@ -77,6 +77,8 @@ HistogramCodecType PayloadNameToHistogramCodecType( return kVideoVp9; case kVideoCodecH264: return kVideoH264; + case kVideoCodecAV1: + return kVideoAv1; #ifndef DISABLE_H265 case kVideoCodecH265: return kVideoH265; @@ -117,17 +119,17 @@ absl::optional GetFallbackMaxPixels(const std::string& group) { return absl::optional(max_pixels); } -absl::optional GetFallbackMaxPixelsIfFieldTrialEnabled() { - std::string group = - webrtc::field_trial::FindFullName(kVp8ForcedFallbackEncoderFieldTrial); +absl::optional GetFallbackMaxPixelsIfFieldTrialEnabled( + const webrtc::FieldTrialsView& field_trials) { + std::string group = field_trials.Lookup(kVp8ForcedFallbackEncoderFieldTrial); return (absl::StartsWith(group, "Enabled")) ? GetFallbackMaxPixels(group.substr(7)) : absl::optional(); } -absl::optional GetFallbackMaxPixelsIfFieldTrialDisabled() { - std::string group = - webrtc::field_trial::FindFullName(kVp8ForcedFallbackEncoderFieldTrial); +absl::optional GetFallbackMaxPixelsIfFieldTrialDisabled( + const webrtc::FieldTrialsView& field_trials) { + std::string group = field_trials.Lookup(kVp8ForcedFallbackEncoderFieldTrial); return (absl::StartsWith(group, "Disabled")) ? GetFallbackMaxPixels(group.substr(8)) : absl::optional(); @@ -139,12 +141,15 @@ const int SendStatisticsProxy::kStatsTimeoutMs = 5000; SendStatisticsProxy::SendStatisticsProxy( Clock* clock, const VideoSendStream::Config& config, - VideoEncoderConfig::ContentType content_type) + VideoEncoderConfig::ContentType content_type, + const FieldTrialsView& field_trials) : clock_(clock), payload_name_(config.rtp.payload_name), rtp_config_(config.rtp), - fallback_max_pixels_(GetFallbackMaxPixelsIfFieldTrialEnabled()), - fallback_max_pixels_disabled_(GetFallbackMaxPixelsIfFieldTrialDisabled()), + fallback_max_pixels_( + GetFallbackMaxPixelsIfFieldTrialEnabled(field_trials)), + fallback_max_pixels_disabled_( + GetFallbackMaxPixelsIfFieldTrialDisabled(field_trials)), content_type_(content_type), start_ms_(clock->TimeInMilliseconds()), encode_time_(kEncodeTimeWeigthFactor), @@ -1055,11 +1060,12 @@ void SendStatisticsProxy::OnSendEncodedImage( } void SendStatisticsProxy::OnEncoderImplementationChanged( - const std::string& implementation_name) { + EncoderImplementation implementation) { MutexLock lock(&mutex_); encoder_changed_ = EncoderChangeEvent{stats_.encoder_implementation_name, - implementation_name}; - stats_.encoder_implementation_name = implementation_name; + implementation.name}; + stats_.encoder_implementation_name = implementation.name; + stats_.power_efficient_encoder = implementation.is_hardware_accelerated; } int SendStatisticsProxy::GetInputFrameRate() const { @@ -1387,7 +1393,6 @@ void SendStatisticsProxy::FrameCountUpdated(const FrameCounts& frame_counts, void SendStatisticsProxy::SendSideDelayUpdated(int avg_delay_ms, int max_delay_ms, - uint64_t total_delay_ms, uint32_t ssrc) { MutexLock lock(&mutex_); VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc); @@ -1395,7 +1400,6 @@ void SendStatisticsProxy::SendSideDelayUpdated(int avg_delay_ms, return; stats->avg_delay_ms = avg_delay_ms; stats->max_delay_ms = max_delay_ms; - stats->total_packet_send_delay_ms = total_delay_ms; uma_container_->delay_counter_.Add(avg_delay_ms); uma_container_->max_delay_counter_.Add(max_delay_ms); diff --git a/TMessagesProj/jni/voip/webrtc/video/send_statistics_proxy.h b/TMessagesProj/jni/voip/webrtc/video/send_statistics_proxy.h index c38488dd84..4203b1c873 100644 --- a/TMessagesProj/jni/voip/webrtc/video/send_statistics_proxy.h +++ b/TMessagesProj/jni/voip/webrtc/video/send_statistics_proxy.h @@ -17,9 +17,8 @@ #include #include +#include "api/field_trials_view.h" #include "api/video/video_codec_constants.h" -#include "api/video/video_stream_encoder_observer.h" -#include "api/video_codecs/video_encoder_config.h" #include "call/video_send_stream.h" #include "modules/include/module_common_types_public.h" #include "modules/rtp_rtcp/include/report_block_data.h" @@ -30,9 +29,11 @@ #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" #include "system_wrappers/include/clock.h" +#include "video/config/video_encoder_config.h" #include "video/quality_limitation_reason_tracker.h" #include "video/report_block_stats.h" #include "video/stats_counter.h" +#include "video/video_stream_encoder_observer.h" namespace webrtc { @@ -51,7 +52,8 @@ class SendStatisticsProxy : public VideoStreamEncoderObserver, SendStatisticsProxy(Clock* clock, const VideoSendStream::Config& config, - VideoEncoderConfig::ContentType content_type); + VideoEncoderConfig::ContentType content_type, + const FieldTrialsView& field_trials); ~SendStatisticsProxy() override; virtual VideoSendStream::Stats GetStats(); @@ -60,7 +62,7 @@ class SendStatisticsProxy : public VideoStreamEncoderObserver, const CodecSpecificInfo* codec_info) override; void OnEncoderImplementationChanged( - const std::string& implementation_name) override; + EncoderImplementation implementation) override; // Used to update incoming frame rate. void OnIncomingFrame(int width, int height) override; @@ -124,9 +126,9 @@ class SendStatisticsProxy : public VideoStreamEncoderObserver, void FrameCountUpdated(const FrameCounts& frame_counts, uint32_t ssrc) override; + // From SendSideDelayObserver. void SendSideDelayUpdated(int avg_delay_ms, int max_delay_ms, - uint64_t total_delay_ms, uint32_t ssrc) override; private: diff --git a/TMessagesProj/jni/voip/webrtc/video/stats_counter.h b/TMessagesProj/jni/voip/webrtc/video/stats_counter.h index 9c3f6f8156..9e2b8702d6 100644 --- a/TMessagesProj/jni/voip/webrtc/video/stats_counter.h +++ b/TMessagesProj/jni/voip/webrtc/video/stats_counter.h @@ -14,8 +14,6 @@ #include #include -#include "rtc_base/constructor_magic.h" - namespace webrtc { class AggregatedCounter; @@ -156,6 +154,9 @@ class AvgCounter : public StatsCounter { bool include_empty_intervals); ~AvgCounter() override {} + AvgCounter(const AvgCounter&) = delete; + AvgCounter& operator=(const AvgCounter&) = delete; + void Add(int sample); private: @@ -163,8 +164,6 @@ class AvgCounter : public StatsCounter { // Returns the last computed metric (i.e. from GetMetric). int GetValueForEmptyInterval() const override; - - RTC_DISALLOW_COPY_AND_ASSIGN(AvgCounter); }; // MaxCounter: maximum of samples @@ -180,13 +179,14 @@ class MaxCounter : public StatsCounter { int64_t process_intervals_ms); ~MaxCounter() override {} + MaxCounter(const MaxCounter&) = delete; + MaxCounter& operator=(const MaxCounter&) = delete; + void Add(int sample); private: bool GetMetric(int* metric) const override; int GetValueForEmptyInterval() const override; - - RTC_DISALLOW_COPY_AND_ASSIGN(MaxCounter); }; // PercentCounter: percentage of samples @@ -200,13 +200,14 @@ class PercentCounter : public StatsCounter { PercentCounter(Clock* clock, StatsCounterObserver* observer); ~PercentCounter() override {} + PercentCounter(const PercentCounter&) = delete; + PercentCounter& operator=(const PercentCounter&) = delete; + void Add(bool sample); private: bool GetMetric(int* metric) const override; int GetValueForEmptyInterval() const override; - - RTC_DISALLOW_COPY_AND_ASSIGN(PercentCounter); }; // PermilleCounter: permille of samples @@ -220,13 +221,14 @@ class PermilleCounter : public StatsCounter { PermilleCounter(Clock* clock, StatsCounterObserver* observer); ~PermilleCounter() override {} + PermilleCounter(const PermilleCounter&) = delete; + PermilleCounter& operator=(const PermilleCounter&) = delete; + void Add(bool sample); private: bool GetMetric(int* metric) const override; int GetValueForEmptyInterval() const override; - - RTC_DISALLOW_COPY_AND_ASSIGN(PermilleCounter); }; // RateCounter: units per second @@ -247,13 +249,14 @@ class RateCounter : public StatsCounter { bool include_empty_intervals); ~RateCounter() override {} + RateCounter(const RateCounter&) = delete; + RateCounter& operator=(const RateCounter&) = delete; + void Add(int sample); private: bool GetMetric(int* metric) const override; int GetValueForEmptyInterval() const override; // Returns zero. - - RTC_DISALLOW_COPY_AND_ASSIGN(RateCounter); }; // RateAccCounter: units per second (used for counters) @@ -274,6 +277,9 @@ class RateAccCounter : public StatsCounter { bool include_empty_intervals); ~RateAccCounter() override {} + RateAccCounter(const RateAccCounter&) = delete; + RateAccCounter& operator=(const RateAccCounter&) = delete; + void Set(int64_t sample, uint32_t stream_id); // Sets the value for previous interval. @@ -283,8 +289,6 @@ class RateAccCounter : public StatsCounter { private: bool GetMetric(int* metric) const override; int GetValueForEmptyInterval() const override; // Returns zero. - - RTC_DISALLOW_COPY_AND_ASSIGN(RateAccCounter); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/stream_synchronization.cc b/TMessagesProj/jni/voip/webrtc/video/stream_synchronization.cc index d5c77c1eca..d86cc79203 100644 --- a/TMessagesProj/jni/voip/webrtc/video/stream_synchronization.cc +++ b/TMessagesProj/jni/voip/webrtc/video/stream_synchronization.cc @@ -35,19 +35,19 @@ bool StreamSynchronization::ComputeRelativeDelay( const Measurements& audio_measurement, const Measurements& video_measurement, int* relative_delay_ms) { - int64_t audio_last_capture_time_ms; - if (!audio_measurement.rtp_to_ntp.Estimate(audio_measurement.latest_timestamp, - &audio_last_capture_time_ms)) { + NtpTime audio_last_capture_time = + audio_measurement.rtp_to_ntp.Estimate(audio_measurement.latest_timestamp); + if (!audio_last_capture_time.Valid()) { return false; } - int64_t video_last_capture_time_ms; - if (!video_measurement.rtp_to_ntp.Estimate(video_measurement.latest_timestamp, - &video_last_capture_time_ms)) { - return false; - } - if (video_last_capture_time_ms < 0) { + NtpTime video_last_capture_time = + video_measurement.rtp_to_ntp.Estimate(video_measurement.latest_timestamp); + if (!video_last_capture_time.Valid()) { return false; } + int64_t audio_last_capture_time_ms = audio_last_capture_time.ToMs(); + int64_t video_last_capture_time_ms = video_last_capture_time.ToMs(); + // Positive diff means that video_measurement is behind audio_measurement. *relative_delay_ms = video_measurement.latest_receive_time_ms - diff --git a/TMessagesProj/jni/voip/webrtc/video/task_queue_frame_decode_scheduler.cc b/TMessagesProj/jni/voip/webrtc/video/task_queue_frame_decode_scheduler.cc new file mode 100644 index 0000000000..cd109c2932 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/video/task_queue_frame_decode_scheduler.cc @@ -0,0 +1,76 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/task_queue_frame_decode_scheduler.h" + +#include +#include + +#include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" +#include "rtc_base/checks.h" + +namespace webrtc { + +TaskQueueFrameDecodeScheduler::TaskQueueFrameDecodeScheduler( + Clock* clock, + TaskQueueBase* const bookkeeping_queue) + : clock_(clock), bookkeeping_queue_(bookkeeping_queue) { + RTC_DCHECK(clock_); + RTC_DCHECK(bookkeeping_queue_); +} + +TaskQueueFrameDecodeScheduler::~TaskQueueFrameDecodeScheduler() { + RTC_DCHECK(stopped_); + RTC_DCHECK(!scheduled_rtp_) << "Outstanding scheduled rtp=" << *scheduled_rtp_ + << ". Call CancelOutstanding before destruction."; +} + +void TaskQueueFrameDecodeScheduler::ScheduleFrame( + uint32_t rtp, + FrameDecodeTiming::FrameSchedule schedule, + FrameReleaseCallback cb) { + RTC_DCHECK(!stopped_) << "Can not schedule frames after stopped."; + RTC_DCHECK(!scheduled_rtp_.has_value()) + << "Can not schedule two frames for release at the same time."; + RTC_DCHECK(cb); + scheduled_rtp_ = rtp; + + TimeDelta wait = std::max( + TimeDelta::Zero(), schedule.latest_decode_time - clock_->CurrentTime()); + bookkeeping_queue_->PostDelayedHighPrecisionTask( + SafeTask(task_safety_.flag(), + [this, rtp, schedule, cb = std::move(cb)]() mutable { + RTC_DCHECK_RUN_ON(bookkeeping_queue_); + // If the next frame rtp has changed since this task was + // this scheduled release should be skipped. + if (scheduled_rtp_ != rtp) + return; + scheduled_rtp_ = absl::nullopt; + std::move(cb)(rtp, schedule.render_time); + }), + wait); +} + +void TaskQueueFrameDecodeScheduler::CancelOutstanding() { + scheduled_rtp_ = absl::nullopt; +} + +absl::optional +TaskQueueFrameDecodeScheduler::ScheduledRtpTimestamp() { + return scheduled_rtp_; +} + +void TaskQueueFrameDecodeScheduler::Stop() { + CancelOutstanding(); + stopped_ = true; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/task_queue_frame_decode_scheduler.h b/TMessagesProj/jni/voip/webrtc/video/task_queue_frame_decode_scheduler.h new file mode 100644 index 0000000000..69c6dae63d --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/video/task_queue_frame_decode_scheduler.h @@ -0,0 +1,48 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_TASK_QUEUE_FRAME_DECODE_SCHEDULER_H_ +#define VIDEO_TASK_QUEUE_FRAME_DECODE_SCHEDULER_H_ + +#include "video/frame_decode_scheduler.h" + +namespace webrtc { + +// An implementation of FrameDecodeScheduler that is based on TaskQueues. This +// is the default implementation for general use. +class TaskQueueFrameDecodeScheduler : public FrameDecodeScheduler { + public: + TaskQueueFrameDecodeScheduler(Clock* clock, + TaskQueueBase* const bookkeeping_queue); + ~TaskQueueFrameDecodeScheduler() override; + TaskQueueFrameDecodeScheduler(const TaskQueueFrameDecodeScheduler&) = delete; + TaskQueueFrameDecodeScheduler& operator=( + const TaskQueueFrameDecodeScheduler&) = delete; + + // FrameDecodeScheduler implementation. + absl::optional ScheduledRtpTimestamp() override; + void ScheduleFrame(uint32_t rtp, + FrameDecodeTiming::FrameSchedule schedule, + FrameReleaseCallback cb) override; + void CancelOutstanding() override; + void Stop() override; + + private: + Clock* const clock_; + TaskQueueBase* const bookkeeping_queue_; + + absl::optional scheduled_rtp_; + ScopedTaskSafetyDetached task_safety_; + bool stopped_ = false; +}; + +} // namespace webrtc + +#endif // VIDEO_TASK_QUEUE_FRAME_DECODE_SCHEDULER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/video/test/mock_video_stream_encoder.h b/TMessagesProj/jni/voip/webrtc/video/test/mock_video_stream_encoder.h index 8ea87acc0f..ff246df253 100644 --- a/TMessagesProj/jni/voip/webrtc/video/test/mock_video_stream_encoder.h +++ b/TMessagesProj/jni/voip/webrtc/video/test/mock_video_stream_encoder.h @@ -12,8 +12,8 @@ #include -#include "api/video/video_stream_encoder_interface.h" #include "test/gmock.h" +#include "video/video_stream_encoder_interface.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/unique_timestamp_counter.cc b/TMessagesProj/jni/voip/webrtc/video/unique_timestamp_counter.cc similarity index 94% rename from TMessagesProj/jni/voip/webrtc/modules/video_coding/unique_timestamp_counter.cc rename to TMessagesProj/jni/voip/webrtc/video/unique_timestamp_counter.cc index 8157994bb9..14cc039ec9 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/unique_timestamp_counter.cc +++ b/TMessagesProj/jni/voip/webrtc/video/unique_timestamp_counter.cc @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/video_coding/unique_timestamp_counter.h" +#include "video/unique_timestamp_counter.h" #include #include diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/unique_timestamp_counter.h b/TMessagesProj/jni/voip/webrtc/video/unique_timestamp_counter.h similarity index 83% rename from TMessagesProj/jni/voip/webrtc/modules/video_coding/unique_timestamp_counter.h rename to TMessagesProj/jni/voip/webrtc/video/unique_timestamp_counter.h index 23540c6f01..5dfb758bce 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/unique_timestamp_counter.h +++ b/TMessagesProj/jni/voip/webrtc/video/unique_timestamp_counter.h @@ -7,8 +7,8 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ -#ifndef MODULES_VIDEO_CODING_UNIQUE_TIMESTAMP_COUNTER_H_ -#define MODULES_VIDEO_CODING_UNIQUE_TIMESTAMP_COUNTER_H_ +#ifndef VIDEO_UNIQUE_TIMESTAMP_COUNTER_H_ +#define VIDEO_UNIQUE_TIMESTAMP_COUNTER_H_ #include #include @@ -16,7 +16,7 @@ namespace webrtc { -// Counts number of uniquly seen frames (aka pictures, aka temporal units) +// Counts number of uniquely seen frames (aka pictures, aka temporal units) // identified by their rtp timestamp. class UniqueTimestampCounter { public: @@ -41,4 +41,4 @@ class UniqueTimestampCounter { } // namespace webrtc -#endif // MODULES_VIDEO_CODING_UNIQUE_TIMESTAMP_COUNTER_H_ +#endif // VIDEO_UNIQUE_TIMESTAMP_COUNTER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/video/video_analyzer.cc b/TMessagesProj/jni/voip/webrtc/video/video_analyzer.cc index 62ee7b4352..3077a77b2c 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_analyzer.cc +++ b/TMessagesProj/jni/voip/webrtc/video/video_analyzer.cc @@ -9,18 +9,22 @@ */ #include "video/video_analyzer.h" +#include + #include #include #include "absl/algorithm/container.h" #include "absl/flags/flag.h" #include "absl/flags/parse.h" +#include "absl/strings/string_view.h" +#include "api/test/metrics/global_metrics_logger_and_exporter.h" +#include "api/test/metrics/metric.h" #include "common_video/libyuv/include/webrtc_libyuv.h" #include "modules/rtp_rtcp/source/create_video_rtp_depacketizer.h" #include "modules/rtp_rtcp/source/rtp_packet.h" #include "modules/rtp_rtcp/source/rtp_util.h" #include "rtc_base/cpu_time.h" -#include "rtc_base/format_macros.h" #include "rtc_base/memory_usage.h" #include "rtc_base/task_queue_for_test.h" #include "rtc_base/task_utils/repeating_task.h" @@ -29,7 +33,6 @@ #include "test/call_test.h" #include "test/testsupport/file_utils.h" #include "test/testsupport/frame_writer.h" -#include "test/testsupport/perf_test.h" #include "test/testsupport/test_artifacts.h" ABSL_FLAG(bool, @@ -40,18 +43,25 @@ ABSL_FLAG(bool, namespace webrtc { namespace { + +using ::webrtc::test::GetGlobalMetricsLogger; +using ::webrtc::test::ImprovementDirection; +using ::webrtc::test::Metric; +using ::webrtc::test::Unit; + constexpr TimeDelta kSendStatsPollingInterval = TimeDelta::Seconds(1); constexpr size_t kMaxComparisons = 10; // How often is keep alive message printed. -constexpr int kKeepAliveIntervalSeconds = 30; +constexpr TimeDelta kKeepAliveInterval = TimeDelta::Seconds(30); // Interval between checking that the test is over. -constexpr int kProbingIntervalMs = 500; +constexpr TimeDelta kProbingInterval = TimeDelta::Millis(500); constexpr int kKeepAliveIntervalIterations = - kKeepAliveIntervalSeconds * 1000 / kProbingIntervalMs; + kKeepAliveInterval.ms() / kProbingInterval.ms(); bool IsFlexfec(int payload_type) { return payload_type == test::CallTest::kFlexfecPayloadType; } + } // namespace VideoAnalyzer::VideoAnalyzer(test::LayerFilteringTransport* transport, @@ -187,13 +197,14 @@ void VideoAnalyzer::SetSendStream(VideoSendStream* stream) { send_stream_ = stream; } -void VideoAnalyzer::SetReceiveStream(VideoReceiveStream* stream) { +void VideoAnalyzer::SetReceiveStream(VideoReceiveStreamInterface* stream) { MutexLock lock(&lock_); RTC_DCHECK(!receive_stream_); receive_stream_ = stream; } -void VideoAnalyzer::SetAudioReceiveStream(AudioReceiveStream* recv_stream) { +void VideoAnalyzer::SetAudioReceiveStream( + AudioReceiveStreamInterface* recv_stream) { MutexLock lock(&lock_); RTC_CHECK(!audio_receive_stream_); audio_receive_stream_ = recv_stream; @@ -359,7 +370,7 @@ void VideoAnalyzer::Wait() { int last_frames_captured = -1; int iteration = 0; - while (!done_.Wait(kProbingIntervalMs)) { + while (!done_.Wait(kProbingInterval)) { int frames_processed; int frames_captured; { @@ -395,7 +406,7 @@ void VideoAnalyzer::Wait() { if (iteration > 0) printf("- Farewell, sweet Concorde!\n"); - SendTask(RTC_FROM_HERE, task_queue_, [&] { stats_polling_task.Stop(); }); + SendTask(task_queue_, [&] { stats_polling_task.Stop(); }); PrintResults(); if (graph_data_output_file_) @@ -489,13 +500,13 @@ void VideoAnalyzer::PollStats() { last_fec_bytes_ = fec_bytes; if (receive_stream_ != nullptr) { - VideoReceiveStream::Stats receive_stats = receive_stream_->GetStats(); + VideoReceiveStreamInterface::Stats receive_stats = + receive_stream_->GetStats(); // `total_decode_time_ms` gives a good estimate of the mean decode time, // `decode_ms` is used to keep track of the standard deviation. if (receive_stats.frames_decoded > 0) - mean_decode_time_ms_ = - static_cast(receive_stats.total_decode_time_ms) / - receive_stats.frames_decoded; + mean_decode_time_ms_ = receive_stats.total_decode_time.ms() / + receive_stats.frames_decoded; if (receive_stats.decode_ms > 0) decode_time_ms_.AddSample(receive_stats.decode_ms); if (receive_stats.max_decode_ms > 0) @@ -524,7 +535,7 @@ void VideoAnalyzer::PollStats() { } if (audio_receive_stream_ != nullptr) { - AudioReceiveStream::Stats receive_stats = + AudioReceiveStreamInterface::Stats receive_stats = audio_receive_stream_->GetStats(/*get_and_clear_legacy_stats=*/true); audio_expand_rate_.AddSample(receive_stats.expand_rate); audio_accelerate_rate_.AddSample(receive_stats.accelerate_rate); @@ -543,7 +554,7 @@ bool VideoAnalyzer::CompareFrames() { if (!PopComparison(&comparison)) { // Wait until new comparison task is available, or test is done. // If done, wake up remaining threads waiting. - comparison_available_event_.Wait(1000); + comparison_available_event_.Wait(TimeDelta::Seconds(1)); if (AllFramesRecorded()) { comparison_available_event_.Set(); return false; @@ -607,8 +618,6 @@ bool VideoAnalyzer::FrameProcessed() { } void VideoAnalyzer::PrintResults() { - using ::webrtc::test::ImproveDirection; - StopMeasuringCpuProcessTime(); int dropped_frames_diff; { @@ -617,36 +626,39 @@ void VideoAnalyzer::PrintResults() { dropped_frames_before_rendering_ + frames_.size(); } MutexLock lock(&comparison_lock_); - PrintResult("psnr", psnr_, "dB", ImproveDirection::kBiggerIsBetter); - PrintResult("ssim", ssim_, "unitless", ImproveDirection::kBiggerIsBetter); - PrintResult("sender_time", sender_time_, "ms", - ImproveDirection::kSmallerIsBetter); - PrintResult("receiver_time", receiver_time_, "ms", - ImproveDirection::kSmallerIsBetter); - PrintResult("network_time", network_time_, "ms", - ImproveDirection::kSmallerIsBetter); - PrintResult("total_delay_incl_network", end_to_end_, "ms", - ImproveDirection::kSmallerIsBetter); - PrintResult("time_between_rendered_frames", rendered_delta_, "ms", - ImproveDirection::kSmallerIsBetter); - PrintResult("encode_frame_rate", encode_frame_rate_, "fps", - ImproveDirection::kBiggerIsBetter); - PrintResult("encode_time", encode_time_ms_, "ms", - ImproveDirection::kSmallerIsBetter); - PrintResult("media_bitrate", media_bitrate_bps_, "bps", - ImproveDirection::kNone); - PrintResult("fec_bitrate", fec_bitrate_bps_, "bps", ImproveDirection::kNone); - PrintResult("send_bandwidth", send_bandwidth_bps_, "bps", - ImproveDirection::kNone); - PrintResult("pixels_per_frame", pixels_, "count", - ImproveDirection::kBiggerIsBetter); - - test::PrintResult("decode_frame_rate", "", test_label_.c_str(), - decode_frame_rate_, "fps", false, - ImproveDirection::kBiggerIsBetter); - test::PrintResult("render_frame_rate", "", test_label_.c_str(), - render_frame_rate_, "fps", false, - ImproveDirection::kBiggerIsBetter); + PrintResult("psnr_dB", psnr_, Unit::kUnitless, + ImprovementDirection::kBiggerIsBetter); + PrintResult("ssim", ssim_, Unit::kUnitless, + ImprovementDirection::kBiggerIsBetter); + PrintResult("sender_time", sender_time_, Unit::kMilliseconds, + ImprovementDirection::kSmallerIsBetter); + PrintResult("receiver_time", receiver_time_, Unit::kMilliseconds, + ImprovementDirection::kSmallerIsBetter); + PrintResult("network_time", network_time_, Unit::kMilliseconds, + ImprovementDirection::kSmallerIsBetter); + PrintResult("total_delay_incl_network", end_to_end_, Unit::kMilliseconds, + ImprovementDirection::kSmallerIsBetter); + PrintResult("time_between_rendered_frames", rendered_delta_, + Unit::kMilliseconds, ImprovementDirection::kSmallerIsBetter); + PrintResult("encode_frame_rate_fps", encode_frame_rate_, Unit::kHertz, + ImprovementDirection::kBiggerIsBetter); + PrintResult("encode_time", encode_time_ms_, Unit::kMilliseconds, + ImprovementDirection::kSmallerIsBetter); + PrintResult("media_bitrate", media_bitrate_bps_ / 1000.0, + Unit::kKilobitsPerSecond, ImprovementDirection::kNeitherIsBetter); + PrintResult("fec_bitrate", fec_bitrate_bps_ / 1000.0, + Unit::kKilobitsPerSecond, ImprovementDirection::kNeitherIsBetter); + PrintResult("send_bandwidth", send_bandwidth_bps_ / 1000.0, + Unit::kKilobitsPerSecond, ImprovementDirection::kNeitherIsBetter); + PrintResult("pixels_per_frame", pixels_, Unit::kCount, + ImprovementDirection::kBiggerIsBetter); + + GetGlobalMetricsLogger()->LogSingleValueMetric( + "decode_frame_rate_fps", test_label_, decode_frame_rate_, Unit::kHertz, + ImprovementDirection::kBiggerIsBetter); + GetGlobalMetricsLogger()->LogSingleValueMetric( + "render_frame_rate_fps", test_label_, render_frame_rate_, Unit::kHertz, + ImprovementDirection::kBiggerIsBetter); // Record the time from the last freeze until the last rendered frame to // ensure we cover the full timespan of the session. Otherwise the metric @@ -654,8 +666,8 @@ void VideoAnalyzer::PrintResults() { time_between_freezes_.AddSample(last_render_time_ - last_unfreeze_time_ms_); // Freeze metrics. - PrintResult("time_between_freezes", time_between_freezes_, "ms", - ImproveDirection::kBiggerIsBetter); + PrintResult("time_between_freezes", time_between_freezes_, + Unit::kMilliseconds, ImprovementDirection::kBiggerIsBetter); const double freeze_count_double = static_cast(freeze_count_); const double total_freezes_duration_ms_double = @@ -664,10 +676,10 @@ void VideoAnalyzer::PrintResults() { static_cast(total_frames_duration_ms_); if (total_frames_duration_ms_double > 0) { - test::PrintResult( - "freeze_duration_ratio", "", test_label_.c_str(), + GetGlobalMetricsLogger()->LogSingleValueMetric( + "freeze_duration_ratio", test_label_, total_freezes_duration_ms_double / total_frames_duration_ms_double, - "unitless", false, ImproveDirection::kSmallerIsBetter); + Unit::kUnitless, ImprovementDirection::kSmallerIsBetter); RTC_DCHECK_LE(total_freezes_duration_ms_double, total_frames_duration_ms_double); @@ -675,47 +687,52 @@ void VideoAnalyzer::PrintResults() { const double total_frames_duration_min = total_frames_duration_ms_double / ms_per_minute; if (total_frames_duration_min > 0) { - test::PrintResult("freeze_count_per_minute", "", test_label_.c_str(), - freeze_count_double / total_frames_duration_min, - "unitless", false, ImproveDirection::kSmallerIsBetter); + GetGlobalMetricsLogger()->LogSingleValueMetric( + "freeze_count_per_minute", test_label_, + freeze_count_double / total_frames_duration_min, Unit::kUnitless, + ImprovementDirection::kSmallerIsBetter); } } - test::PrintResult("freeze_duration_average", "", test_label_.c_str(), - freeze_count_double > 0 - ? total_freezes_duration_ms_double / freeze_count_double - : 0, - "ms", false, ImproveDirection::kSmallerIsBetter); + GetGlobalMetricsLogger()->LogSingleValueMetric( + "freeze_duration_average", test_label_, + freeze_count_double > 0 + ? total_freezes_duration_ms_double / freeze_count_double + : 0, + Unit::kMilliseconds, ImprovementDirection::kSmallerIsBetter); if (1000 * sum_squared_frame_durations_ > 0) { - test::PrintResult( - "harmonic_frame_rate", "", test_label_.c_str(), + GetGlobalMetricsLogger()->LogSingleValueMetric( + "harmonic_frame_rate_fps", test_label_, total_frames_duration_ms_double / (1000 * sum_squared_frame_durations_), - "fps", false, ImproveDirection::kBiggerIsBetter); + Unit::kHertz, ImprovementDirection::kBiggerIsBetter); } if (worst_frame_) { - test::PrintResult("min_psnr", "", test_label_.c_str(), worst_frame_->psnr, - "dB", false, ImproveDirection::kBiggerIsBetter); + GetGlobalMetricsLogger()->LogSingleValueMetric( + "min_psnr_dB", test_label_, worst_frame_->psnr, Unit::kUnitless, + ImprovementDirection::kBiggerIsBetter); } if (receive_stream_ != nullptr) { PrintResultWithExternalMean("decode_time", mean_decode_time_ms_, - decode_time_ms_, "ms", - ImproveDirection::kSmallerIsBetter); + decode_time_ms_, Unit::kMilliseconds, + ImprovementDirection::kSmallerIsBetter); } dropped_frames_ += dropped_frames_diff; - test::PrintResult("dropped_frames", "", test_label_.c_str(), dropped_frames_, - "count", false, ImproveDirection::kSmallerIsBetter); - test::PrintResult("cpu_usage", "", test_label_.c_str(), GetCpuUsagePercent(), - "%", false, ImproveDirection::kSmallerIsBetter); + GetGlobalMetricsLogger()->LogSingleValueMetric( + "dropped_frames", test_label_, dropped_frames_, Unit::kCount, + ImprovementDirection::kSmallerIsBetter); + GetGlobalMetricsLogger()->LogSingleValueMetric( + "cpu_usage_%", test_label_, GetCpuUsagePercent(), Unit::kUnitless, + ImprovementDirection::kSmallerIsBetter); #if defined(WEBRTC_WIN) // On Linux and Mac in Resident Set some unused pages may be counted. // Therefore this metric will depend on order in which tests are run and // will be flaky. - PrintResult("memory_usage", memory_usage_, "sizeInBytes", - ImproveDirection::kSmallerIsBetter); + PrintResult("memory_usage", memory_usage_, Unit::kBytes, + ImprovementDirection::kSmallerIsBetter); #endif // Saving only the worst frame for manual analysis. Intention here is to @@ -733,19 +750,19 @@ void VideoAnalyzer::PrintResults() { } if (audio_receive_stream_ != nullptr) { - PrintResult("audio_expand_rate", audio_expand_rate_, "unitless", - ImproveDirection::kSmallerIsBetter); - PrintResult("audio_accelerate_rate", audio_accelerate_rate_, "unitless", - ImproveDirection::kSmallerIsBetter); - PrintResult("audio_jitter_buffer", audio_jitter_buffer_ms_, "ms", - ImproveDirection::kNone); + PrintResult("audio_expand_rate", audio_expand_rate_, Unit::kUnitless, + ImprovementDirection::kSmallerIsBetter); + PrintResult("audio_accelerate_rate", audio_accelerate_rate_, + Unit::kUnitless, ImprovementDirection::kSmallerIsBetter); + PrintResult("audio_jitter_buffer", audio_jitter_buffer_ms_, + Unit::kMilliseconds, ImprovementDirection::kNeitherIsBetter); } // Disable quality check for quick test, as quality checks may fail // because too few samples were collected. if (!is_quick_test_enabled_) { - EXPECT_GT(*psnr_.GetMean(), avg_psnr_threshold_); - EXPECT_GT(*ssim_.GetMean(), avg_ssim_threshold_); + EXPECT_GT(psnr_.GetAverage(), avg_psnr_threshold_); + EXPECT_GT(ssim_.GetAverage(), avg_ssim_threshold_); } } @@ -818,32 +835,31 @@ void VideoAnalyzer::PerformFrameComparison( encoded_frame_size_.AddSample(comparison.encoded_frame_size); } -void VideoAnalyzer::PrintResult( - const char* result_type, - Statistics stats, - const char* unit, - webrtc::test::ImproveDirection improve_direction) { - test::PrintResultMeanAndError( - result_type, "", test_label_.c_str(), stats.GetMean().value_or(0), - stats.GetStandardDeviation().value_or(0), unit, false, improve_direction); +void VideoAnalyzer::PrintResult(absl::string_view result_type, + const SamplesStatsCounter& stats, + Unit unit, + ImprovementDirection improvement_direction) { + GetGlobalMetricsLogger()->LogMetric(result_type, test_label_, stats, unit, + improvement_direction); } void VideoAnalyzer::PrintResultWithExternalMean( - const char* result_type, + absl::string_view result_type, double mean, - Statistics stats, - const char* unit, - webrtc::test::ImproveDirection improve_direction) { + const SamplesStatsCounter& stats, + Unit unit, + ImprovementDirection improvement_direction) { // If the true mean is different than the sample mean, the sample variance is // too low. The sample variance given a known mean is obtained by adding the // squared error between the true mean and the sample mean. double compensated_variance = - stats.Size() > 0 - ? *stats.GetVariance() + pow(mean - *stats.GetMean(), 2.0) - : 0.0; - test::PrintResultMeanAndError(result_type, "", test_label_.c_str(), mean, - std::sqrt(compensated_variance), unit, false, - improve_direction); + stats.IsEmpty() + ? 0.0 + : stats.GetVariance() + pow(mean - stats.GetAverage(), 2.0); + GetGlobalMetricsLogger()->LogMetric( + result_type, test_label_, + Metric::Stats{.mean = mean, .stddev = std::sqrt(compensated_variance)}, + unit, improvement_direction); } void VideoAnalyzer::PrintSamplesToFile() { @@ -854,7 +870,7 @@ void VideoAnalyzer::PrintSamplesToFile() { }); fprintf(out, "%s\n", graph_title_.c_str()); - fprintf(out, "%" RTC_PRIuS "\n", samples_.size()); + fprintf(out, "%zu\n", samples_.size()); fprintf(out, "dropped " "input_time_ms " @@ -867,8 +883,7 @@ void VideoAnalyzer::PrintSamplesToFile() { "encode_time_ms\n"); for (const Sample& sample : samples_) { fprintf(out, - "%d %" PRId64 " %" PRId64 " %" PRId64 " %" PRId64 " %" RTC_PRIuS - " %lf %lf\n", + "%d %" PRId64 " %" PRId64 " %" PRId64 " %" PRId64 " %zu %lf %lf\n", sample.dropped, sample.input_time_ms, sample.send_time_ms, sample.recv_time_ms, sample.render_time_ms, sample.encoded_frame_size, sample.psnr, sample.ssim); diff --git a/TMessagesProj/jni/voip/webrtc/video/video_analyzer.h b/TMessagesProj/jni/voip/webrtc/video/video_analyzer.h index c121370043..2cee5e1b92 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_analyzer.h +++ b/TMessagesProj/jni/voip/webrtc/video/video_analyzer.h @@ -16,7 +16,10 @@ #include #include +#include "absl/strings/string_view.h" +#include "api/numerics/samples_stats_counter.h" #include "api/task_queue/task_queue_base.h" +#include "api/test/metrics/metric.h" #include "api/video/video_source_interface.h" #include "modules/rtp_rtcp/source/rtp_packet.h" #include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" @@ -27,7 +30,6 @@ #include "rtc_base/time_utils.h" #include "test/layer_filtering_transport.h" #include "test/rtp_file_writer.h" -#include "test/testsupport/perf_test.h" namespace webrtc { @@ -35,8 +37,6 @@ class VideoAnalyzer : public PacketReceiver, public Transport, public rtc::VideoSinkInterface { public: - using Statistics = webrtc_impl::RunningStatistics; - VideoAnalyzer(test::LayerFilteringTransport* transport, const std::string& test_label, double avg_psnr_threshold, @@ -61,8 +61,8 @@ class VideoAnalyzer : public PacketReceiver, bool respect_sink_wants); void SetCall(Call* call); void SetSendStream(VideoSendStream* stream); - void SetReceiveStream(VideoReceiveStream* stream); - void SetAudioReceiveStream(AudioReceiveStream* recv_stream); + void SetReceiveStream(VideoReceiveStreamInterface* stream); + void SetAudioReceiveStream(AudioReceiveStreamInterface* recv_stream); rtc::VideoSinkInterface* InputInterface(); rtc::VideoSourceInterface* OutputInterface(); @@ -205,24 +205,24 @@ class VideoAnalyzer : public PacketReceiver, void PrintResults() RTC_LOCKS_EXCLUDED(lock_, comparison_lock_); void PerformFrameComparison(const FrameComparison& comparison) RTC_LOCKS_EXCLUDED(comparison_lock_); - void PrintResult(const char* result_type, - Statistics stats, - const char* unit, - webrtc::test::ImproveDirection improve_direction); + void PrintResult(absl::string_view result_type, + const SamplesStatsCounter& stats, + webrtc::test::Unit unit, + webrtc::test::ImprovementDirection improvement_direction); void PrintResultWithExternalMean( - const char* result_type, + absl::string_view result_type, double mean, - Statistics stats, - const char* unit, - webrtc::test::ImproveDirection improve_direction); + const SamplesStatsCounter& stats, + webrtc::test::Unit unit, + webrtc::test::ImprovementDirection improvement_direction); void PrintSamplesToFile(void) RTC_LOCKS_EXCLUDED(comparison_lock_); void AddCapturedFrameForComparison(const VideoFrame& video_frame) RTC_LOCKS_EXCLUDED(lock_, comparison_lock_); Call* call_; VideoSendStream* send_stream_; - VideoReceiveStream* receive_stream_; - AudioReceiveStream* audio_receive_stream_; + VideoReceiveStreamInterface* receive_stream_; + AudioReceiveStreamInterface* audio_receive_stream_; CapturedFrameForwarder captured_frame_forwarder_; const std::string test_label_; FILE* const graph_data_output_file_; @@ -235,32 +235,32 @@ class VideoAnalyzer : public PacketReceiver, Mutex comparison_lock_; std::vector samples_ RTC_GUARDED_BY(comparison_lock_); - Statistics sender_time_ RTC_GUARDED_BY(comparison_lock_); - Statistics receiver_time_ RTC_GUARDED_BY(comparison_lock_); - Statistics network_time_ RTC_GUARDED_BY(comparison_lock_); - Statistics psnr_ RTC_GUARDED_BY(comparison_lock_); - Statistics ssim_ RTC_GUARDED_BY(comparison_lock_); - Statistics end_to_end_ RTC_GUARDED_BY(comparison_lock_); - Statistics rendered_delta_ RTC_GUARDED_BY(comparison_lock_); - Statistics encoded_frame_size_ RTC_GUARDED_BY(comparison_lock_); - Statistics encode_frame_rate_ RTC_GUARDED_BY(comparison_lock_); - Statistics encode_time_ms_ RTC_GUARDED_BY(comparison_lock_); - Statistics encode_usage_percent_ RTC_GUARDED_BY(comparison_lock_); + SamplesStatsCounter sender_time_ RTC_GUARDED_BY(comparison_lock_); + SamplesStatsCounter receiver_time_ RTC_GUARDED_BY(comparison_lock_); + SamplesStatsCounter network_time_ RTC_GUARDED_BY(comparison_lock_); + SamplesStatsCounter psnr_ RTC_GUARDED_BY(comparison_lock_); + SamplesStatsCounter ssim_ RTC_GUARDED_BY(comparison_lock_); + SamplesStatsCounter end_to_end_ RTC_GUARDED_BY(comparison_lock_); + SamplesStatsCounter rendered_delta_ RTC_GUARDED_BY(comparison_lock_); + SamplesStatsCounter encoded_frame_size_ RTC_GUARDED_BY(comparison_lock_); + SamplesStatsCounter encode_frame_rate_ RTC_GUARDED_BY(comparison_lock_); + SamplesStatsCounter encode_time_ms_ RTC_GUARDED_BY(comparison_lock_); + SamplesStatsCounter encode_usage_percent_ RTC_GUARDED_BY(comparison_lock_); double mean_decode_time_ms_ RTC_GUARDED_BY(comparison_lock_); - Statistics decode_time_ms_ RTC_GUARDED_BY(comparison_lock_); - Statistics decode_time_max_ms_ RTC_GUARDED_BY(comparison_lock_); - Statistics media_bitrate_bps_ RTC_GUARDED_BY(comparison_lock_); - Statistics fec_bitrate_bps_ RTC_GUARDED_BY(comparison_lock_); - Statistics send_bandwidth_bps_ RTC_GUARDED_BY(comparison_lock_); - Statistics memory_usage_ RTC_GUARDED_BY(comparison_lock_); - Statistics audio_expand_rate_ RTC_GUARDED_BY(comparison_lock_); - Statistics audio_accelerate_rate_ RTC_GUARDED_BY(comparison_lock_); - Statistics audio_jitter_buffer_ms_ RTC_GUARDED_BY(comparison_lock_); - Statistics pixels_ RTC_GUARDED_BY(comparison_lock_); + SamplesStatsCounter decode_time_ms_ RTC_GUARDED_BY(comparison_lock_); + SamplesStatsCounter decode_time_max_ms_ RTC_GUARDED_BY(comparison_lock_); + SamplesStatsCounter media_bitrate_bps_ RTC_GUARDED_BY(comparison_lock_); + SamplesStatsCounter fec_bitrate_bps_ RTC_GUARDED_BY(comparison_lock_); + SamplesStatsCounter send_bandwidth_bps_ RTC_GUARDED_BY(comparison_lock_); + SamplesStatsCounter memory_usage_ RTC_GUARDED_BY(comparison_lock_); + SamplesStatsCounter audio_expand_rate_ RTC_GUARDED_BY(comparison_lock_); + SamplesStatsCounter audio_accelerate_rate_ RTC_GUARDED_BY(comparison_lock_); + SamplesStatsCounter audio_jitter_buffer_ms_ RTC_GUARDED_BY(comparison_lock_); + SamplesStatsCounter pixels_ RTC_GUARDED_BY(comparison_lock_); // Rendered frame with worst PSNR is saved for further analysis. absl::optional worst_frame_ RTC_GUARDED_BY(comparison_lock_); // Freeze metrics. - Statistics time_between_freezes_ RTC_GUARDED_BY(comparison_lock_); + SamplesStatsCounter time_between_freezes_ RTC_GUARDED_BY(comparison_lock_); uint32_t freeze_count_ RTC_GUARDED_BY(comparison_lock_); uint32_t total_freezes_duration_ms_ RTC_GUARDED_BY(comparison_lock_); uint32_t total_frames_duration_ms_ RTC_GUARDED_BY(comparison_lock_); diff --git a/TMessagesProj/jni/voip/webrtc/video/video_quality_observer.cc b/TMessagesProj/jni/voip/webrtc/video/video_quality_observer.cc deleted file mode 100644 index be7b08c887..0000000000 --- a/TMessagesProj/jni/voip/webrtc/video/video_quality_observer.cc +++ /dev/null @@ -1,286 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "video/video_quality_observer.h" - -#include -#include -#include -#include - -#include "rtc_base/logging.h" -#include "rtc_base/strings/string_builder.h" -#include "system_wrappers/include/metrics.h" - -namespace webrtc { -const uint32_t VideoQualityObserver::kMinFrameSamplesToDetectFreeze = 5; -const uint32_t VideoQualityObserver::kMinIncreaseForFreezeMs = 150; -const uint32_t VideoQualityObserver::kAvgInterframeDelaysWindowSizeFrames = 30; - -namespace { -constexpr int kMinVideoDurationMs = 3000; -constexpr int kMinRequiredSamples = 1; -constexpr int kPixelsInHighResolution = - 960 * 540; // CPU-adapted HD still counts. -constexpr int kPixelsInMediumResolution = 640 * 360; -constexpr int kBlockyQpThresholdVp8 = 70; -constexpr int kBlockyQpThresholdVp9 = 180; -constexpr int kMaxNumCachedBlockyFrames = 100; -// TODO(ilnik): Add H264/HEVC thresholds. -} // namespace - -VideoQualityObserver::VideoQualityObserver(VideoContentType content_type) - : last_frame_rendered_ms_(-1), - num_frames_rendered_(0), - first_frame_rendered_ms_(-1), - last_frame_pixels_(0), - is_last_frame_blocky_(false), - last_unfreeze_time_ms_(0), - render_interframe_delays_(kAvgInterframeDelaysWindowSizeFrames), - sum_squared_interframe_delays_secs_(0.0), - time_in_resolution_ms_(3, 0), - current_resolution_(Resolution::Low), - num_resolution_downgrades_(0), - time_in_blocky_video_ms_(0), - content_type_(content_type), - is_paused_(false) {} - -void VideoQualityObserver::UpdateHistograms() { - // Don't report anything on an empty video stream. - if (num_frames_rendered_ == 0) { - return; - } - - char log_stream_buf[2 * 1024]; - rtc::SimpleStringBuilder log_stream(log_stream_buf); - - if (last_frame_rendered_ms_ > last_unfreeze_time_ms_) { - smooth_playback_durations_.Add(last_frame_rendered_ms_ - - last_unfreeze_time_ms_); - } - - std::string uma_prefix = videocontenttypehelpers::IsScreenshare(content_type_) - ? "WebRTC.Video.Screenshare" - : "WebRTC.Video"; - - auto mean_time_between_freezes = - smooth_playback_durations_.Avg(kMinRequiredSamples); - if (mean_time_between_freezes) { - RTC_HISTOGRAM_COUNTS_SPARSE_100000(uma_prefix + ".MeanTimeBetweenFreezesMs", - *mean_time_between_freezes); - log_stream << uma_prefix << ".MeanTimeBetweenFreezesMs " - << *mean_time_between_freezes << "\n"; - } - auto avg_freeze_length = freezes_durations_.Avg(kMinRequiredSamples); - if (avg_freeze_length) { - RTC_HISTOGRAM_COUNTS_SPARSE_100000(uma_prefix + ".MeanFreezeDurationMs", - *avg_freeze_length); - log_stream << uma_prefix << ".MeanFreezeDurationMs " << *avg_freeze_length - << "\n"; - } - - int64_t video_duration_ms = - last_frame_rendered_ms_ - first_frame_rendered_ms_; - - if (video_duration_ms >= kMinVideoDurationMs) { - int time_spent_in_hd_percentage = static_cast( - time_in_resolution_ms_[Resolution::High] * 100 / video_duration_ms); - RTC_HISTOGRAM_COUNTS_SPARSE_100(uma_prefix + ".TimeInHdPercentage", - time_spent_in_hd_percentage); - log_stream << uma_prefix << ".TimeInHdPercentage " - << time_spent_in_hd_percentage << "\n"; - - int time_with_blocky_video_percentage = - static_cast(time_in_blocky_video_ms_ * 100 / video_duration_ms); - RTC_HISTOGRAM_COUNTS_SPARSE_100(uma_prefix + ".TimeInBlockyVideoPercentage", - time_with_blocky_video_percentage); - log_stream << uma_prefix << ".TimeInBlockyVideoPercentage " - << time_with_blocky_video_percentage << "\n"; - - int num_resolution_downgrades_per_minute = - num_resolution_downgrades_ * 60000 / video_duration_ms; - RTC_HISTOGRAM_COUNTS_SPARSE_100( - uma_prefix + ".NumberResolutionDownswitchesPerMinute", - num_resolution_downgrades_per_minute); - log_stream << uma_prefix << ".NumberResolutionDownswitchesPerMinute " - << num_resolution_downgrades_per_minute << "\n"; - - int num_freezes_per_minute = - freezes_durations_.NumSamples() * 60000 / video_duration_ms; - RTC_HISTOGRAM_COUNTS_SPARSE_100(uma_prefix + ".NumberFreezesPerMinute", - num_freezes_per_minute); - log_stream << uma_prefix << ".NumberFreezesPerMinute " - << num_freezes_per_minute << "\n"; - - if (sum_squared_interframe_delays_secs_ > 0.0) { - int harmonic_framerate_fps = std::round( - video_duration_ms / (1000 * sum_squared_interframe_delays_secs_)); - RTC_HISTOGRAM_COUNTS_SPARSE_100(uma_prefix + ".HarmonicFrameRate", - harmonic_framerate_fps); - log_stream << uma_prefix << ".HarmonicFrameRate " - << harmonic_framerate_fps << "\n"; - } - } - RTC_LOG(LS_INFO) << log_stream.str(); -} - -void VideoQualityObserver::OnRenderedFrame(const VideoFrame& frame, - int64_t now_ms) { - RTC_DCHECK_LE(last_frame_rendered_ms_, now_ms); - RTC_DCHECK_LE(last_unfreeze_time_ms_, now_ms); - - if (num_frames_rendered_ == 0) { - first_frame_rendered_ms_ = last_unfreeze_time_ms_ = now_ms; - } - - auto blocky_frame_it = blocky_frames_.find(frame.timestamp()); - - if (num_frames_rendered_ > 0) { - // Process inter-frame delay. - const int64_t interframe_delay_ms = now_ms - last_frame_rendered_ms_; - const double interframe_delays_secs = interframe_delay_ms / 1000.0; - - // Sum of squared inter frame intervals is used to calculate the harmonic - // frame rate metric. The metric aims to reflect overall experience related - // to smoothness of video playback and includes both freezes and pauses. - sum_squared_interframe_delays_secs_ += - interframe_delays_secs * interframe_delays_secs; - - if (!is_paused_) { - render_interframe_delays_.AddSample(interframe_delay_ms); - - bool was_freeze = false; - if (render_interframe_delays_.Size() >= kMinFrameSamplesToDetectFreeze) { - const absl::optional avg_interframe_delay = - render_interframe_delays_.GetAverageRoundedDown(); - RTC_DCHECK(avg_interframe_delay); - was_freeze = interframe_delay_ms >= - std::max(3 * *avg_interframe_delay, - *avg_interframe_delay + kMinIncreaseForFreezeMs); - } - - if (was_freeze) { - freezes_durations_.Add(interframe_delay_ms); - smooth_playback_durations_.Add(last_frame_rendered_ms_ - - last_unfreeze_time_ms_); - last_unfreeze_time_ms_ = now_ms; - } else { - // Count spatial metrics if there were no freeze. - time_in_resolution_ms_[current_resolution_] += interframe_delay_ms; - - if (is_last_frame_blocky_) { - time_in_blocky_video_ms_ += interframe_delay_ms; - } - } - } - } - - if (is_paused_) { - // If the stream was paused since the previous frame, do not count the - // pause toward smooth playback. Explicitly count the part before it and - // start the new smooth playback interval from this frame. - is_paused_ = false; - if (last_frame_rendered_ms_ > last_unfreeze_time_ms_) { - smooth_playback_durations_.Add(last_frame_rendered_ms_ - - last_unfreeze_time_ms_); - } - last_unfreeze_time_ms_ = now_ms; - - if (num_frames_rendered_ > 0) { - pauses_durations_.Add(now_ms - last_frame_rendered_ms_); - } - } - - int64_t pixels = frame.width() * frame.height(); - if (pixels >= kPixelsInHighResolution) { - current_resolution_ = Resolution::High; - } else if (pixels >= kPixelsInMediumResolution) { - current_resolution_ = Resolution::Medium; - } else { - current_resolution_ = Resolution::Low; - } - - if (pixels < last_frame_pixels_) { - ++num_resolution_downgrades_; - } - - last_frame_pixels_ = pixels; - last_frame_rendered_ms_ = now_ms; - - is_last_frame_blocky_ = blocky_frame_it != blocky_frames_.end(); - if (is_last_frame_blocky_) { - blocky_frames_.erase(blocky_frames_.begin(), ++blocky_frame_it); - } - - ++num_frames_rendered_; -} - -void VideoQualityObserver::OnDecodedFrame(const VideoFrame& frame, - absl::optional qp, - VideoCodecType codec) { - if (qp) { - absl::optional qp_blocky_threshold; - // TODO(ilnik): add other codec types when we have QP for them. - switch (codec) { - case kVideoCodecVP8: - qp_blocky_threshold = kBlockyQpThresholdVp8; - break; - case kVideoCodecVP9: - qp_blocky_threshold = kBlockyQpThresholdVp9; - break; - default: - qp_blocky_threshold = absl::nullopt; - } - - RTC_DCHECK(blocky_frames_.find(frame.timestamp()) == blocky_frames_.end()); - - if (qp_blocky_threshold && *qp > *qp_blocky_threshold) { - // Cache blocky frame. Its duration will be calculated in render callback. - if (blocky_frames_.size() > kMaxNumCachedBlockyFrames) { - RTC_LOG(LS_WARNING) << "Overflow of blocky frames cache."; - blocky_frames_.erase( - blocky_frames_.begin(), - std::next(blocky_frames_.begin(), kMaxNumCachedBlockyFrames / 2)); - } - - blocky_frames_.insert(frame.timestamp()); - } - } -} - -void VideoQualityObserver::OnStreamInactive() { - is_paused_ = true; -} - -uint32_t VideoQualityObserver::NumFreezes() const { - return freezes_durations_.NumSamples(); -} - -uint32_t VideoQualityObserver::NumPauses() const { - return pauses_durations_.NumSamples(); -} - -uint32_t VideoQualityObserver::TotalFreezesDurationMs() const { - return freezes_durations_.Sum(kMinRequiredSamples).value_or(0); -} - -uint32_t VideoQualityObserver::TotalPausesDurationMs() const { - return pauses_durations_.Sum(kMinRequiredSamples).value_or(0); -} - -uint32_t VideoQualityObserver::TotalFramesDurationMs() const { - return last_frame_rendered_ms_ - first_frame_rendered_ms_; -} - -double VideoQualityObserver::SumSquaredFrameDurationsSec() const { - return sum_squared_interframe_delays_secs_; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/video_quality_observer.h b/TMessagesProj/jni/voip/webrtc/video/video_quality_observer.h deleted file mode 100644 index 6494a6f43c..0000000000 --- a/TMessagesProj/jni/voip/webrtc/video/video_quality_observer.h +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef VIDEO_VIDEO_QUALITY_OBSERVER_H_ -#define VIDEO_VIDEO_QUALITY_OBSERVER_H_ - -#include - -#include -#include - -#include "absl/types/optional.h" -#include "api/video/video_codec_type.h" -#include "api/video/video_content_type.h" -#include "api/video/video_frame.h" -#include "rtc_base/numerics/moving_average.h" -#include "rtc_base/numerics/sample_counter.h" - -namespace webrtc { - -// Calculates spatial and temporal quality metrics and reports them to UMA -// stats. -class VideoQualityObserver { - public: - // Use either VideoQualityObserver::kBlockyQpThresholdVp8 or - // VideoQualityObserver::kBlockyQpThresholdVp9. - explicit VideoQualityObserver(VideoContentType content_type); - ~VideoQualityObserver() = default; - - void OnDecodedFrame(const VideoFrame& frame, - absl::optional qp, - VideoCodecType codec); - - void OnRenderedFrame(const VideoFrame& frame, int64_t now_ms); - - void OnStreamInactive(); - - uint32_t NumFreezes() const; - uint32_t NumPauses() const; - uint32_t TotalFreezesDurationMs() const; - uint32_t TotalPausesDurationMs() const; - uint32_t TotalFramesDurationMs() const; - double SumSquaredFrameDurationsSec() const; - - void UpdateHistograms(); - - static const uint32_t kMinFrameSamplesToDetectFreeze; - static const uint32_t kMinIncreaseForFreezeMs; - static const uint32_t kAvgInterframeDelaysWindowSizeFrames; - - private: - enum Resolution { - Low = 0, - Medium = 1, - High = 2, - }; - - int64_t last_frame_rendered_ms_; - int64_t num_frames_rendered_; - int64_t first_frame_rendered_ms_; - int64_t last_frame_pixels_; - bool is_last_frame_blocky_; - // Decoded timestamp of the last delayed frame. - int64_t last_unfreeze_time_ms_; - rtc::MovingAverage render_interframe_delays_; - double sum_squared_interframe_delays_secs_; - // An inter-frame delay is counted as a freeze if it's significantly longer - // than average inter-frame delay. - rtc::SampleCounter freezes_durations_; - rtc::SampleCounter pauses_durations_; - // Time between freezes. - rtc::SampleCounter smooth_playback_durations_; - // Counters for time spent in different resolutions. Time between each two - // Consecutive frames is counted to bin corresponding to the first frame - // resolution. - std::vector time_in_resolution_ms_; - // Resolution of the last decoded frame. Resolution enum is used as an index. - Resolution current_resolution_; - int num_resolution_downgrades_; - // Similar to resolution, time spent in high-QP video. - int64_t time_in_blocky_video_ms_; - // Content type of the last decoded frame. - VideoContentType content_type_; - bool is_paused_; - - // Set of decoded frames with high QP value. - std::set blocky_frames_; -}; - -} // namespace webrtc - -#endif // VIDEO_VIDEO_QUALITY_OBSERVER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/video/video_quality_test.cc b/TMessagesProj/jni/voip/webrtc/video/video_quality_test.cc new file mode 100644 index 0000000000..21dfaa8ae8 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/video/video_quality_test.cc @@ -0,0 +1,1569 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "video/video_quality_test.h" + +#include + +#if defined(WEBRTC_WIN) +#include +#endif + +#include +#include +#include +#include +#include +#include + +#include "api/fec_controller_override.h" +#include "api/rtc_event_log_output_file.h" +#include "api/task_queue/default_task_queue_factory.h" +#include "api/task_queue/task_queue_base.h" +#include "api/test/create_frame_generator.h" +#include "api/video/builtin_video_bitrate_allocator_factory.h" +#include "api/video_codecs/video_encoder.h" +#include "call/fake_network_pipe.h" +#include "call/simulated_network.h" +#include "media/base/media_constants.h" +#include "media/engine/adm_helpers.h" +#include "media/engine/encoder_simulcast_proxy.h" +#include "media/engine/fake_video_codec_factory.h" +#include "media/engine/internal_encoder_factory.h" +#include "media/engine/webrtc_video_engine.h" +#include "modules/audio_device/include/audio_device.h" +#include "modules/audio_mixer/audio_mixer_impl.h" +#include "modules/video_coding/codecs/h264/include/h264.h" +#include "modules/video_coding/codecs/multiplex/include/multiplex_decoder_adapter.h" +#include "modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h" +#include "modules/video_coding/codecs/vp8/include/vp8.h" +#include "modules/video_coding/codecs/vp9/include/vp9.h" +#include "modules/video_coding/utility/ivf_file_writer.h" +#include "rtc_base/strings/string_builder.h" +#include "rtc_base/task_queue_for_test.h" +#include "test/platform_video_capturer.h" +#include "test/testsupport/file_utils.h" +#include "test/video_renderer.h" +#include "video/frame_dumping_decoder.h" +#ifdef WEBRTC_WIN +#include "modules/audio_device/include/audio_device_factory.h" +#endif +#include "video/config/encoder_stream_factory.h" + +namespace webrtc { + +namespace { +enum : int { // The first valid value is 1. + kAbsSendTimeExtensionId = 1, + kGenericFrameDescriptorExtensionId00, + kGenericFrameDescriptorExtensionId01, + kTransportSequenceNumberExtensionId, + kVideoContentTypeExtensionId, + kVideoTimingExtensionId, +}; + +constexpr char kSyncGroup[] = "av_sync"; +constexpr int kOpusMinBitrateBps = 6000; +constexpr int kOpusBitrateFbBps = 32000; +constexpr int kFramesSentInQuickTest = 1; +constexpr uint32_t kThumbnailSendSsrcStart = 0xE0000; +constexpr uint32_t kThumbnailRtxSsrcStart = 0xF0000; + +constexpr int kDefaultMaxQp = cricket::WebRtcVideoChannel::kDefaultQpMax; + +const VideoEncoder::Capabilities kCapabilities(false); + +std::pair GetMinMaxBitratesBps(const VideoCodec& codec, + size_t spatial_idx) { + uint32_t min_bitrate = codec.minBitrate; + uint32_t max_bitrate = codec.maxBitrate; + if (spatial_idx < codec.numberOfSimulcastStreams) { + min_bitrate = + std::max(min_bitrate, codec.simulcastStream[spatial_idx].minBitrate); + max_bitrate = + std::min(max_bitrate, codec.simulcastStream[spatial_idx].maxBitrate); + } + if (codec.codecType == VideoCodecType::kVideoCodecVP9 && + spatial_idx < codec.VP9().numberOfSpatialLayers) { + min_bitrate = + std::max(min_bitrate, codec.spatialLayers[spatial_idx].minBitrate); + max_bitrate = + std::min(max_bitrate, codec.spatialLayers[spatial_idx].maxBitrate); + } + max_bitrate = std::max(max_bitrate, min_bitrate); + return {min_bitrate * 1000, max_bitrate * 1000}; +} + +class VideoStreamFactory + : public VideoEncoderConfig::VideoStreamFactoryInterface { + public: + explicit VideoStreamFactory(const std::vector& streams) + : streams_(streams) {} + + private: + std::vector CreateEncoderStreams( + int frame_width, + int frame_height, + const VideoEncoderConfig& encoder_config) override { + // The highest layer must match the incoming resolution. + std::vector streams = streams_; + streams[streams_.size() - 1].height = frame_height; + streams[streams_.size() - 1].width = frame_width; + + streams[0].bitrate_priority = encoder_config.bitrate_priority; + return streams; + } + + std::vector streams_; +}; + +// This wrapper provides two features needed by the video quality tests: +// 1. Invoke VideoAnalyzer callbacks before and after encoding each frame. +// 2. Write the encoded frames to file, one file per simulcast layer. +class QualityTestVideoEncoder : public VideoEncoder, + private EncodedImageCallback { + public: + QualityTestVideoEncoder(std::unique_ptr encoder, + VideoAnalyzer* analyzer, + std::vector files, + double overshoot_factor) + : encoder_(std::move(encoder)), + overshoot_factor_(overshoot_factor), + analyzer_(analyzer) { + for (FileWrapper& file : files) { + writers_.push_back( + IvfFileWriter::Wrap(std::move(file), /* byte_limit= */ 100000000)); + } + } + + // Implement VideoEncoder + void SetFecControllerOverride( + FecControllerOverride* fec_controller_override) { + // Ignored. + } + + int32_t InitEncode(const VideoCodec* codec_settings, + const Settings& settings) override { + codec_settings_ = *codec_settings; + return encoder_->InitEncode(codec_settings, settings); + } + + int32_t RegisterEncodeCompleteCallback( + EncodedImageCallback* callback) override { + callback_ = callback; + return encoder_->RegisterEncodeCompleteCallback(this); + } + + int32_t Release() override { return encoder_->Release(); } + + int32_t Encode(const VideoFrame& frame, + const std::vector* frame_types) { + if (analyzer_) { + analyzer_->PreEncodeOnFrame(frame); + } + return encoder_->Encode(frame, frame_types); + } + + void SetRates(const RateControlParameters& parameters) override { + RTC_DCHECK_GT(overshoot_factor_, 0.0); + if (overshoot_factor_ == 1.0) { + encoder_->SetRates(parameters); + return; + } + + // Simulating encoder overshooting target bitrate, by configuring actual + // encoder too high. Take care not to adjust past limits of config, + // otherwise encoders may crash on DCHECK. + VideoBitrateAllocation overshot_allocation; + for (size_t si = 0; si < kMaxSpatialLayers; ++si) { + const uint32_t spatial_layer_bitrate_bps = + parameters.bitrate.GetSpatialLayerSum(si); + if (spatial_layer_bitrate_bps == 0) { + continue; + } + + uint32_t min_bitrate_bps; + uint32_t max_bitrate_bps; + std::tie(min_bitrate_bps, max_bitrate_bps) = + GetMinMaxBitratesBps(codec_settings_, si); + double overshoot_factor = overshoot_factor_; + const uint32_t corrected_bitrate = rtc::checked_cast( + overshoot_factor * spatial_layer_bitrate_bps); + if (corrected_bitrate < min_bitrate_bps) { + overshoot_factor = min_bitrate_bps / spatial_layer_bitrate_bps; + } else if (corrected_bitrate > max_bitrate_bps) { + overshoot_factor = max_bitrate_bps / spatial_layer_bitrate_bps; + } + + for (size_t ti = 0; ti < kMaxTemporalStreams; ++ti) { + if (parameters.bitrate.HasBitrate(si, ti)) { + overshot_allocation.SetBitrate( + si, ti, + rtc::checked_cast( + overshoot_factor * parameters.bitrate.GetBitrate(si, ti))); + } + } + } + + return encoder_->SetRates( + RateControlParameters(overshot_allocation, parameters.framerate_fps, + parameters.bandwidth_allocation)); + } + + void OnPacketLossRateUpdate(float packet_loss_rate) override { + encoder_->OnPacketLossRateUpdate(packet_loss_rate); + } + + void OnRttUpdate(int64_t rtt_ms) override { encoder_->OnRttUpdate(rtt_ms); } + + void OnLossNotification(const LossNotification& loss_notification) override { + encoder_->OnLossNotification(loss_notification); + } + + EncoderInfo GetEncoderInfo() const override { + EncoderInfo info = encoder_->GetEncoderInfo(); + if (overshoot_factor_ != 1.0) { + // We're simulating bad encoder, don't forward trusted setting + // from eg libvpx. + info.has_trusted_rate_controller = false; + } + return info; + } + + private: + // Implement EncodedImageCallback + Result OnEncodedImage(const EncodedImage& encoded_image, + const CodecSpecificInfo* codec_specific_info) override { + if (codec_specific_info) { + int simulcast_index; + if (codec_specific_info->codecType == kVideoCodecVP9) { + simulcast_index = 0; + } else { + simulcast_index = encoded_image.SpatialIndex().value_or(0); + } + RTC_DCHECK_GE(simulcast_index, 0); + if (analyzer_) { + analyzer_->PostEncodeOnFrame(simulcast_index, + encoded_image.Timestamp()); + } + if (static_cast(simulcast_index) < writers_.size()) { + writers_[simulcast_index]->WriteFrame(encoded_image, + codec_specific_info->codecType); + } + } + + return callback_->OnEncodedImage(encoded_image, codec_specific_info); + } + + void OnDroppedFrame(DropReason reason) override { + callback_->OnDroppedFrame(reason); + } + + const std::unique_ptr encoder_; + const double overshoot_factor_; + VideoAnalyzer* const analyzer_; + std::vector> writers_; + EncodedImageCallback* callback_ = nullptr; + VideoCodec codec_settings_; +}; + +#if defined(WEBRTC_WIN) && !defined(WINUWP) +void PressEnterToContinue(TaskQueueBase* task_queue) { + puts(">> Press ENTER to continue..."); + + while (!_kbhit() || _getch() != '\r') { + // Drive the message loop for the thread running the task_queue + SendTask(task_queue, [&]() { + MSG msg; + if (PeekMessage(&msg, NULL, 0, 0, PM_REMOVE)) { + TranslateMessage(&msg); + DispatchMessage(&msg); + } + }); + } +} +#else +void PressEnterToContinue(TaskQueueBase* /*task_queue*/) { + puts(">> Press ENTER to continue..."); + while (getc(stdin) != '\n' && !feof(stdin)) + ; // NOLINT +} +#endif + +} // namespace + +std::unique_ptr VideoQualityTest::CreateVideoDecoder( + const SdpVideoFormat& format) { + std::unique_ptr decoder; + if (format.name == "multiplex") { + decoder = std::make_unique( + decoder_factory_.get(), SdpVideoFormat(cricket::kVp9CodecName)); + } else if (format.name == "FakeCodec") { + decoder = webrtc::FakeVideoDecoderFactory::CreateVideoDecoder(); + } else { + decoder = decoder_factory_->CreateVideoDecoder(format); + } + if (!params_.logging.encoded_frame_base_path.empty()) { + rtc::StringBuilder str; + str << receive_logs_++; + std::string path = + params_.logging.encoded_frame_base_path + "." + str.str() + ".recv.ivf"; + decoder = CreateFrameDumpingDecoderWrapper( + std::move(decoder), FileWrapper::OpenWriteOnly(path)); + } + return decoder; +} + +std::unique_ptr VideoQualityTest::CreateVideoEncoder( + const SdpVideoFormat& format, + VideoAnalyzer* analyzer) { + std::unique_ptr encoder; + if (format.name == "VP8") { + encoder = + std::make_unique(encoder_factory_.get(), format); + } else if (format.name == "multiplex") { + encoder = std::make_unique( + encoder_factory_.get(), SdpVideoFormat(cricket::kVp9CodecName)); + } else if (format.name == "FakeCodec") { + encoder = webrtc::FakeVideoEncoderFactory::CreateVideoEncoder(); + } else { + encoder = encoder_factory_->CreateVideoEncoder(format); + } + + std::vector encoded_frame_dump_files; + if (!params_.logging.encoded_frame_base_path.empty()) { + char ss_buf[100]; + rtc::SimpleStringBuilder sb(ss_buf); + sb << send_logs_++; + std::string prefix = + params_.logging.encoded_frame_base_path + "." + sb.str() + ".send."; + encoded_frame_dump_files.push_back( + FileWrapper::OpenWriteOnly(prefix + "1.ivf")); + encoded_frame_dump_files.push_back( + FileWrapper::OpenWriteOnly(prefix + "2.ivf")); + encoded_frame_dump_files.push_back( + FileWrapper::OpenWriteOnly(prefix + "3.ivf")); + } + + double overshoot_factor = 1.0; + // Match format to either of the streams in dual-stream mode in order to get + // the overshoot factor. This is not very robust but we can't know for sure + // which stream this encoder is meant for, from within the factory. + if (format == + SdpVideoFormat(params_.video[0].codec, params_.video[0].sdp_params)) { + overshoot_factor = params_.video[0].encoder_overshoot_factor; + } else if (format == SdpVideoFormat(params_.video[1].codec, + params_.video[1].sdp_params)) { + overshoot_factor = params_.video[1].encoder_overshoot_factor; + } + if (overshoot_factor == 0.0) { + // If params were zero-initialized, set to 1.0 instead. + overshoot_factor = 1.0; + } + + if (analyzer || !encoded_frame_dump_files.empty() || overshoot_factor > 1.0) { + encoder = std::make_unique( + std::move(encoder), analyzer, std::move(encoded_frame_dump_files), + overshoot_factor); + } + + return encoder; +} + +VideoQualityTest::VideoQualityTest( + std::unique_ptr injection_components) + : clock_(Clock::GetRealTimeClock()), + task_queue_factory_(CreateDefaultTaskQueueFactory()), + rtc_event_log_factory_(task_queue_factory_.get()), + video_decoder_factory_([this](const SdpVideoFormat& format) { + return this->CreateVideoDecoder(format); + }), + video_encoder_factory_([this](const SdpVideoFormat& format) { + return this->CreateVideoEncoder(format, nullptr); + }), + video_encoder_factory_with_analyzer_( + [this](const SdpVideoFormat& format) { + return this->CreateVideoEncoder(format, analyzer_.get()); + }), + video_bitrate_allocator_factory_( + CreateBuiltinVideoBitrateAllocatorFactory()), + receive_logs_(0), + send_logs_(0), + injection_components_(std::move(injection_components)), + num_video_streams_(0) { + if (injection_components_ == nullptr) { + injection_components_ = std::make_unique(); + } + if (injection_components_->video_decoder_factory != nullptr) { + decoder_factory_ = std::move(injection_components_->video_decoder_factory); + } else { + decoder_factory_ = std::make_unique(); + } + if (injection_components_->video_encoder_factory != nullptr) { + encoder_factory_ = std::move(injection_components_->video_encoder_factory); + } else { + encoder_factory_ = std::make_unique(); + } + + payload_type_map_ = test::CallTest::payload_type_map_; + RTC_DCHECK(payload_type_map_.find(kPayloadTypeH264) == + payload_type_map_.end()); + RTC_DCHECK(payload_type_map_.find(kPayloadTypeVP8) == + payload_type_map_.end()); + RTC_DCHECK(payload_type_map_.find(kPayloadTypeVP9) == + payload_type_map_.end()); + RTC_DCHECK(payload_type_map_.find(kPayloadTypeGeneric) == + payload_type_map_.end()); + payload_type_map_[kPayloadTypeH264] = webrtc::MediaType::VIDEO; + payload_type_map_[kPayloadTypeVP8] = webrtc::MediaType::VIDEO; + payload_type_map_[kPayloadTypeVP9] = webrtc::MediaType::VIDEO; + payload_type_map_[kPayloadTypeGeneric] = webrtc::MediaType::VIDEO; + + fec_controller_factory_ = + std::move(injection_components_->fec_controller_factory); + network_state_predictor_factory_ = + std::move(injection_components_->network_state_predictor_factory); + network_controller_factory_ = + std::move(injection_components_->network_controller_factory); +} + +VideoQualityTest::InjectionComponents::InjectionComponents() = default; + +VideoQualityTest::InjectionComponents::~InjectionComponents() = default; + +void VideoQualityTest::TestBody() {} + +std::string VideoQualityTest::GenerateGraphTitle() const { + rtc::StringBuilder ss; + ss << params_.video[0].codec; + ss << " (" << params_.video[0].target_bitrate_bps / 1000 << "kbps"; + ss << ", " << params_.video[0].fps << " FPS"; + if (params_.screenshare[0].scroll_duration) + ss << ", " << params_.screenshare[0].scroll_duration << "s scroll"; + if (params_.ss[0].streams.size() > 1) + ss << ", Stream #" << params_.ss[0].selected_stream; + if (params_.ss[0].num_spatial_layers > 1) + ss << ", Layer #" << params_.ss[0].selected_sl; + ss << ")"; + return ss.Release(); +} + +void VideoQualityTest::CheckParamsAndInjectionComponents() { + if (injection_components_ == nullptr) { + injection_components_ = std::make_unique(); + } + if (!params_.config && injection_components_->sender_network == nullptr && + injection_components_->receiver_network == nullptr) { + params_.config = BuiltInNetworkBehaviorConfig(); + } + RTC_CHECK( + (params_.config && injection_components_->sender_network == nullptr && + injection_components_->receiver_network == nullptr) || + (!params_.config && injection_components_->sender_network != nullptr && + injection_components_->receiver_network != nullptr)); + for (size_t video_idx = 0; video_idx < num_video_streams_; ++video_idx) { + // Iterate over primary and secondary video streams. + if (!params_.video[video_idx].enabled) + return; + // Add a default stream in none specified. + if (params_.ss[video_idx].streams.empty()) + params_.ss[video_idx].streams.push_back( + VideoQualityTest::DefaultVideoStream(params_, video_idx)); + if (params_.ss[video_idx].num_spatial_layers == 0) + params_.ss[video_idx].num_spatial_layers = 1; + + if (params_.config) { + if (params_.config->loss_percent != 0 || + params_.config->queue_length_packets != 0) { + // Since LayerFilteringTransport changes the sequence numbers, we can't + // use that feature with pack loss, since the NACK request would end up + // retransmitting the wrong packets. + RTC_CHECK(params_.ss[video_idx].selected_sl == -1 || + params_.ss[video_idx].selected_sl == + params_.ss[video_idx].num_spatial_layers - 1); + RTC_CHECK(params_.video[video_idx].selected_tl == -1 || + params_.video[video_idx].selected_tl == + params_.video[video_idx].num_temporal_layers - 1); + } + } + + // TODO(ivica): Should max_bitrate_bps == -1 represent inf max bitrate, as + // it does in some parts of the code? + RTC_CHECK_GE(params_.video[video_idx].max_bitrate_bps, + params_.video[video_idx].target_bitrate_bps); + RTC_CHECK_GE(params_.video[video_idx].target_bitrate_bps, + params_.video[video_idx].min_bitrate_bps); + int selected_stream = params_.ss[video_idx].selected_stream; + if (params_.video[video_idx].selected_tl > -1) { + RTC_CHECK_LT(selected_stream, params_.ss[video_idx].streams.size()) + << "Can not use --selected_tl when --selected_stream is all streams"; + int stream_tl = params_.ss[video_idx] + .streams[selected_stream] + .num_temporal_layers.value_or(1); + RTC_CHECK_LT(params_.video[video_idx].selected_tl, stream_tl); + } + RTC_CHECK_LE(params_.ss[video_idx].selected_stream, + params_.ss[video_idx].streams.size()); + for (const VideoStream& stream : params_.ss[video_idx].streams) { + RTC_CHECK_GE(stream.min_bitrate_bps, 0); + RTC_CHECK_GE(stream.target_bitrate_bps, stream.min_bitrate_bps); + RTC_CHECK_GE(stream.max_bitrate_bps, stream.target_bitrate_bps); + } + // TODO(ivica): Should we check if the sum of all streams/layers is equal to + // the total bitrate? We anyway have to update them in the case bitrate + // estimator changes the total bitrates. + RTC_CHECK_GE(params_.ss[video_idx].num_spatial_layers, 1); + RTC_CHECK_LE(params_.ss[video_idx].selected_sl, + params_.ss[video_idx].num_spatial_layers); + RTC_CHECK( + params_.ss[video_idx].spatial_layers.empty() || + params_.ss[video_idx].spatial_layers.size() == + static_cast(params_.ss[video_idx].num_spatial_layers)); + if (params_.video[video_idx].codec == "VP8") { + RTC_CHECK_EQ(params_.ss[video_idx].num_spatial_layers, 1); + } else if (params_.video[video_idx].codec == "VP9") { + RTC_CHECK_EQ(params_.ss[video_idx].streams.size(), 1); + } + RTC_CHECK_GE(params_.call.num_thumbnails, 0); + if (params_.call.num_thumbnails > 0) { + RTC_CHECK_EQ(params_.ss[video_idx].num_spatial_layers, 1); + RTC_CHECK_EQ(params_.ss[video_idx].streams.size(), 3); + RTC_CHECK_EQ(params_.video[video_idx].num_temporal_layers, 3); + RTC_CHECK_EQ(params_.video[video_idx].codec, "VP8"); + } + // Dual streams with FEC not supported in tests yet. + RTC_CHECK(!params_.video[video_idx].flexfec || num_video_streams_ == 1); + RTC_CHECK(!params_.video[video_idx].ulpfec || num_video_streams_ == 1); + } +} + +// Static. +std::vector VideoQualityTest::ParseCSV(const std::string& str) { + // Parse comma separated nonnegative integers, where some elements may be + // empty. The empty values are replaced with -1. + // E.g. "10,-20,,30,40" --> {10, 20, -1, 30,40} + // E.g. ",,10,,20," --> {-1, -1, 10, -1, 20, -1} + std::vector result; + if (str.empty()) + return result; + + const char* p = str.c_str(); + int value = -1; + int pos; + while (*p) { + if (*p == ',') { + result.push_back(value); + value = -1; + ++p; + continue; + } + RTC_CHECK_EQ(sscanf(p, "%d%n", &value, &pos), 1) + << "Unexpected non-number value."; + p += pos; + } + result.push_back(value); + return result; +} + +// Static. +VideoStream VideoQualityTest::DefaultVideoStream(const Params& params, + size_t video_idx) { + VideoStream stream; + stream.width = params.video[video_idx].width; + stream.height = params.video[video_idx].height; + stream.max_framerate = params.video[video_idx].fps; + stream.min_bitrate_bps = params.video[video_idx].min_bitrate_bps; + stream.target_bitrate_bps = params.video[video_idx].target_bitrate_bps; + stream.max_bitrate_bps = params.video[video_idx].max_bitrate_bps; + stream.max_qp = kDefaultMaxQp; + stream.num_temporal_layers = params.video[video_idx].num_temporal_layers; + stream.active = true; + return stream; +} + +// Static. +VideoStream VideoQualityTest::DefaultThumbnailStream() { + VideoStream stream; + stream.width = 320; + stream.height = 180; + stream.max_framerate = 7; + stream.min_bitrate_bps = 7500; + stream.target_bitrate_bps = 37500; + stream.max_bitrate_bps = 50000; + stream.max_qp = kDefaultMaxQp; + return stream; +} + +// Static. +void VideoQualityTest::FillScalabilitySettings( + Params* params, + size_t video_idx, + const std::vector& stream_descriptors, + int num_streams, + size_t selected_stream, + int num_spatial_layers, + int selected_sl, + InterLayerPredMode inter_layer_pred, + const std::vector& sl_descriptors) { + if (params->ss[video_idx].streams.empty() && + params->ss[video_idx].infer_streams) { + webrtc::VideoEncoder::EncoderInfo encoder_info; + webrtc::VideoEncoderConfig encoder_config; + encoder_config.codec_type = + PayloadStringToCodecType(params->video[video_idx].codec); + encoder_config.content_type = + params->screenshare[video_idx].enabled + ? webrtc::VideoEncoderConfig::ContentType::kScreen + : webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo; + encoder_config.max_bitrate_bps = params->video[video_idx].max_bitrate_bps; + encoder_config.min_transmit_bitrate_bps = + params->video[video_idx].min_transmit_bps; + encoder_config.number_of_streams = num_streams; + encoder_config.spatial_layers = params->ss[video_idx].spatial_layers; + encoder_config.simulcast_layers = std::vector(num_streams); + encoder_config.video_stream_factory = + rtc::make_ref_counted( + params->video[video_idx].codec, kDefaultMaxQp, + params->screenshare[video_idx].enabled, true, encoder_info); + params->ss[video_idx].streams = + encoder_config.video_stream_factory->CreateEncoderStreams( + params->video[video_idx].width, params->video[video_idx].height, + encoder_config); + } else { + // Read VideoStream and SpatialLayer elements from a list of comma separated + // lists. To use a default value for an element, use -1 or leave empty. + // Validity checks performed in CheckParamsAndInjectionComponents. + RTC_CHECK(params->ss[video_idx].streams.empty()); + for (const auto& descriptor : stream_descriptors) { + if (descriptor.empty()) + continue; + VideoStream stream = + VideoQualityTest::DefaultVideoStream(*params, video_idx); + std::vector v = VideoQualityTest::ParseCSV(descriptor); + if (v[0] != -1) + stream.width = static_cast(v[0]); + if (v[1] != -1) + stream.height = static_cast(v[1]); + if (v[2] != -1) + stream.max_framerate = v[2]; + if (v[3] != -1) + stream.min_bitrate_bps = v[3]; + if (v[4] != -1) + stream.target_bitrate_bps = v[4]; + if (v[5] != -1) + stream.max_bitrate_bps = v[5]; + if (v.size() > 6 && v[6] != -1) + stream.max_qp = v[6]; + if (v.size() > 7 && v[7] != -1) { + stream.num_temporal_layers = v[7]; + } else { + // Automatic TL thresholds for more than two layers not supported. + RTC_CHECK_LE(params->video[video_idx].num_temporal_layers, 2); + } + params->ss[video_idx].streams.push_back(stream); + } + } + + params->ss[video_idx].num_spatial_layers = std::max(1, num_spatial_layers); + params->ss[video_idx].selected_stream = selected_stream; + + params->ss[video_idx].selected_sl = selected_sl; + params->ss[video_idx].inter_layer_pred = inter_layer_pred; + RTC_CHECK(params->ss[video_idx].spatial_layers.empty()); + for (const auto& descriptor : sl_descriptors) { + if (descriptor.empty()) + continue; + std::vector v = VideoQualityTest::ParseCSV(descriptor); + RTC_CHECK_EQ(v.size(), 8); + + SpatialLayer layer = {0}; + layer.width = v[0]; + layer.height = v[1]; + layer.maxFramerate = v[2]; + layer.numberOfTemporalLayers = v[3]; + layer.maxBitrate = v[4]; + layer.minBitrate = v[5]; + layer.targetBitrate = v[6]; + layer.qpMax = v[7]; + layer.active = true; + + params->ss[video_idx].spatial_layers.push_back(layer); + } +} + +void VideoQualityTest::SetupVideo(Transport* send_transport, + Transport* recv_transport) { + size_t total_streams_used = 0; + video_receive_configs_.clear(); + video_send_configs_.clear(); + video_encoder_configs_.clear(); + bool decode_all_receive_streams = true; + size_t num_video_substreams = params_.ss[0].streams.size(); + RTC_CHECK(num_video_streams_ > 0); + video_encoder_configs_.resize(num_video_streams_); + std::string generic_codec_name; + webrtc::VideoEncoder::EncoderInfo encoder_info; + for (size_t video_idx = 0; video_idx < num_video_streams_; ++video_idx) { + video_send_configs_.push_back(VideoSendStream::Config(send_transport)); + video_encoder_configs_.push_back(VideoEncoderConfig()); + num_video_substreams = params_.ss[video_idx].streams.size(); + RTC_CHECK_GT(num_video_substreams, 0); + for (size_t i = 0; i < num_video_substreams; ++i) + video_send_configs_[video_idx].rtp.ssrcs.push_back( + kVideoSendSsrcs[total_streams_used + i]); + + int payload_type; + if (params_.video[video_idx].codec == "H264") { + payload_type = kPayloadTypeH264; + } else if (params_.video[video_idx].codec == "VP8") { + payload_type = kPayloadTypeVP8; + } else if (params_.video[video_idx].codec == "VP9") { + payload_type = kPayloadTypeVP9; + } else if (params_.video[video_idx].codec == "multiplex") { + payload_type = kPayloadTypeVP9; + } else if (params_.video[video_idx].codec == "FakeCodec") { + payload_type = kFakeVideoSendPayloadType; + } else { + RTC_CHECK(generic_codec_name.empty() || + generic_codec_name == params_.video[video_idx].codec) + << "Supplying multiple generic codecs is unsupported."; + RTC_LOG(LS_INFO) << "Treating codec " << params_.video[video_idx].codec + << " as generic."; + payload_type = kPayloadTypeGeneric; + generic_codec_name = params_.video[video_idx].codec; + } + video_send_configs_[video_idx].encoder_settings.encoder_factory = + (video_idx == 0) ? &video_encoder_factory_with_analyzer_ + : &video_encoder_factory_; + video_send_configs_[video_idx].encoder_settings.bitrate_allocator_factory = + video_bitrate_allocator_factory_.get(); + + video_send_configs_[video_idx].rtp.payload_name = + params_.video[video_idx].codec; + video_send_configs_[video_idx].rtp.payload_type = payload_type; + video_send_configs_[video_idx].rtp.nack.rtp_history_ms = kNackRtpHistoryMs; + video_send_configs_[video_idx].rtp.rtx.payload_type = kSendRtxPayloadType; + for (size_t i = 0; i < num_video_substreams; ++i) { + video_send_configs_[video_idx].rtp.rtx.ssrcs.push_back( + kSendRtxSsrcs[i + total_streams_used]); + } + video_send_configs_[video_idx].rtp.extensions.clear(); + if (params_.call.send_side_bwe) { + video_send_configs_[video_idx].rtp.extensions.emplace_back( + RtpExtension::kTransportSequenceNumberUri, + kTransportSequenceNumberExtensionId); + } else { + video_send_configs_[video_idx].rtp.extensions.emplace_back( + RtpExtension::kAbsSendTimeUri, kAbsSendTimeExtensionId); + } + + if (params_.call.generic_descriptor) { + video_send_configs_[video_idx].rtp.extensions.emplace_back( + RtpExtension::kGenericFrameDescriptorUri00, + kGenericFrameDescriptorExtensionId00); + } + + video_send_configs_[video_idx].rtp.extensions.emplace_back( + RtpExtension::kVideoContentTypeUri, kVideoContentTypeExtensionId); + video_send_configs_[video_idx].rtp.extensions.emplace_back( + RtpExtension::kVideoTimingUri, kVideoTimingExtensionId); + + video_encoder_configs_[video_idx].video_format.name = + params_.video[video_idx].codec; + + video_encoder_configs_[video_idx].video_format.parameters = + params_.video[video_idx].sdp_params; + + video_encoder_configs_[video_idx].codec_type = + PayloadStringToCodecType(params_.video[video_idx].codec); + + video_encoder_configs_[video_idx].min_transmit_bitrate_bps = + params_.video[video_idx].min_transmit_bps; + + video_send_configs_[video_idx].suspend_below_min_bitrate = + params_.video[video_idx].suspend_below_min_bitrate; + + video_encoder_configs_[video_idx].number_of_streams = + params_.ss[video_idx].streams.size(); + video_encoder_configs_[video_idx].max_bitrate_bps = 0; + for (size_t i = 0; i < params_.ss[video_idx].streams.size(); ++i) { + video_encoder_configs_[video_idx].max_bitrate_bps += + params_.ss[video_idx].streams[i].max_bitrate_bps; + } + video_encoder_configs_[video_idx].simulcast_layers = + std::vector(params_.ss[video_idx].streams.size()); + if (!params_.ss[video_idx].infer_streams) { + video_encoder_configs_[video_idx].simulcast_layers = + params_.ss[video_idx].streams; + } + video_encoder_configs_[video_idx].video_stream_factory = + rtc::make_ref_counted( + params_.video[video_idx].codec, + params_.ss[video_idx].streams[0].max_qp, + params_.screenshare[video_idx].enabled, true, encoder_info); + + video_encoder_configs_[video_idx].spatial_layers = + params_.ss[video_idx].spatial_layers; + + video_encoder_configs_[video_idx].frame_drop_enabled = true; + + decode_all_receive_streams = params_.ss[video_idx].selected_stream == + params_.ss[video_idx].streams.size(); + absl::optional decode_sub_stream; + if (!decode_all_receive_streams) + decode_sub_stream = params_.ss[video_idx].selected_stream; + CreateMatchingVideoReceiveConfigs( + video_send_configs_[video_idx], recv_transport, + params_.call.send_side_bwe, &video_decoder_factory_, decode_sub_stream, + true, kNackRtpHistoryMs); + + if (params_.screenshare[video_idx].enabled) { + // Fill out codec settings. + video_encoder_configs_[video_idx].content_type = + VideoEncoderConfig::ContentType::kScreen; + degradation_preference_ = DegradationPreference::MAINTAIN_RESOLUTION; + if (params_.video[video_idx].codec == "VP8") { + VideoCodecVP8 vp8_settings = VideoEncoder::GetDefaultVp8Settings(); + vp8_settings.denoisingOn = false; + vp8_settings.numberOfTemporalLayers = static_cast( + params_.video[video_idx].num_temporal_layers); + video_encoder_configs_[video_idx].encoder_specific_settings = + rtc::make_ref_counted< + VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings); + } else if (params_.video[video_idx].codec == "VP9") { + VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings(); + vp9_settings.denoisingOn = false; + vp9_settings.automaticResizeOn = false; + vp9_settings.numberOfTemporalLayers = static_cast( + params_.video[video_idx].num_temporal_layers); + vp9_settings.numberOfSpatialLayers = static_cast( + params_.ss[video_idx].num_spatial_layers); + vp9_settings.interLayerPred = params_.ss[video_idx].inter_layer_pred; + // High FPS vp9 screenshare requires flexible mode. + if (params_.ss[video_idx].num_spatial_layers > 1) { + vp9_settings.flexibleMode = true; + } + video_encoder_configs_[video_idx].encoder_specific_settings = + rtc::make_ref_counted< + VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings); + } + } else if (params_.ss[video_idx].num_spatial_layers > 1) { + // If SVC mode without screenshare, still need to set codec specifics. + RTC_CHECK(params_.video[video_idx].codec == "VP9"); + VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings(); + vp9_settings.numberOfTemporalLayers = static_cast( + params_.video[video_idx].num_temporal_layers); + vp9_settings.numberOfSpatialLayers = + static_cast(params_.ss[video_idx].num_spatial_layers); + vp9_settings.interLayerPred = params_.ss[video_idx].inter_layer_pred; + vp9_settings.automaticResizeOn = false; + video_encoder_configs_[video_idx].encoder_specific_settings = + rtc::make_ref_counted( + vp9_settings); + RTC_DCHECK_EQ(video_encoder_configs_[video_idx].simulcast_layers.size(), + 1); + // Min bitrate will be enforced by spatial layer config instead. + video_encoder_configs_[video_idx].simulcast_layers[0].min_bitrate_bps = 0; + } else if (params_.video[video_idx].automatic_scaling) { + if (params_.video[video_idx].codec == "VP8") { + VideoCodecVP8 vp8_settings = VideoEncoder::GetDefaultVp8Settings(); + vp8_settings.automaticResizeOn = true; + video_encoder_configs_[video_idx].encoder_specific_settings = + rtc::make_ref_counted< + VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings); + } else if (params_.video[video_idx].codec == "VP9") { + VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings(); + // Only enable quality scaler for single spatial layer. + vp9_settings.automaticResizeOn = + params_.ss[video_idx].num_spatial_layers == 1; + video_encoder_configs_[video_idx].encoder_specific_settings = + rtc::make_ref_counted< + VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings); + } else if (params_.video[video_idx].codec == "H264") { + // Quality scaling is always on for H.264. + } else if (params_.video[video_idx].codec == cricket::kAv1CodecName) { + // TODO(bugs.webrtc.org/11404): Propagate the flag to + // aom_codec_enc_cfg_t::rc_resize_mode in Av1 encoder wrapper. + // Until then do nothing, specially do not crash. + } else { + RTC_DCHECK_NOTREACHED() + << "Automatic scaling not supported for codec " + << params_.video[video_idx].codec << ", stream " << video_idx; + } + } else { + // Default mode. Single SL, no automatic_scaling, + if (params_.video[video_idx].codec == "VP8") { + VideoCodecVP8 vp8_settings = VideoEncoder::GetDefaultVp8Settings(); + vp8_settings.automaticResizeOn = false; + video_encoder_configs_[video_idx].encoder_specific_settings = + rtc::make_ref_counted< + VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings); + } else if (params_.video[video_idx].codec == "VP9") { + VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings(); + vp9_settings.automaticResizeOn = false; + video_encoder_configs_[video_idx].encoder_specific_settings = + rtc::make_ref_counted< + VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings); + } else if (params_.video[video_idx].codec == "H264") { + video_encoder_configs_[video_idx].encoder_specific_settings = nullptr; + } + } + total_streams_used += num_video_substreams; + } + + // FEC supported only for single video stream mode yet. + if (params_.video[0].flexfec) { + if (decode_all_receive_streams) { + SetSendFecConfig(GetVideoSendConfig()->rtp.ssrcs); + } else { + SetSendFecConfig({kVideoSendSsrcs[params_.ss[0].selected_stream]}); + } + + CreateMatchingFecConfig(recv_transport, *GetVideoSendConfig()); + GetFlexFecConfig()->rtp.transport_cc = params_.call.send_side_bwe; + if (params_.call.send_side_bwe) { + GetFlexFecConfig()->rtp.extensions.push_back( + RtpExtension(RtpExtension::kTransportSequenceNumberUri, + kTransportSequenceNumberExtensionId)); + } else { + GetFlexFecConfig()->rtp.extensions.push_back( + RtpExtension(RtpExtension::kAbsSendTimeUri, kAbsSendTimeExtensionId)); + } + } + + if (params_.video[0].ulpfec) { + SetSendUlpFecConfig(GetVideoSendConfig()); + if (decode_all_receive_streams) { + for (auto& receive_config : video_receive_configs_) { + SetReceiveUlpFecConfig(&receive_config); + } + } else { + SetReceiveUlpFecConfig( + &video_receive_configs_[params_.ss[0].selected_stream]); + } + } +} + +void VideoQualityTest::SetupThumbnails(Transport* send_transport, + Transport* recv_transport) { + for (int i = 0; i < params_.call.num_thumbnails; ++i) { + // Thumbnails will be send in the other way: from receiver_call to + // sender_call. + VideoSendStream::Config thumbnail_send_config(recv_transport); + thumbnail_send_config.rtp.ssrcs.push_back(kThumbnailSendSsrcStart + i); + thumbnail_send_config.encoder_settings.encoder_factory = + &video_encoder_factory_; + thumbnail_send_config.encoder_settings.bitrate_allocator_factory = + video_bitrate_allocator_factory_.get(); + thumbnail_send_config.rtp.payload_name = params_.video[0].codec; + thumbnail_send_config.rtp.payload_type = kPayloadTypeVP8; + thumbnail_send_config.rtp.nack.rtp_history_ms = kNackRtpHistoryMs; + thumbnail_send_config.rtp.rtx.payload_type = kSendRtxPayloadType; + thumbnail_send_config.rtp.rtx.ssrcs.push_back(kThumbnailRtxSsrcStart + i); + thumbnail_send_config.rtp.extensions.clear(); + if (params_.call.send_side_bwe) { + thumbnail_send_config.rtp.extensions.push_back( + RtpExtension(RtpExtension::kTransportSequenceNumberUri, + kTransportSequenceNumberExtensionId)); + } else { + thumbnail_send_config.rtp.extensions.push_back( + RtpExtension(RtpExtension::kAbsSendTimeUri, kAbsSendTimeExtensionId)); + } + + VideoEncoderConfig thumbnail_encoder_config; + thumbnail_encoder_config.codec_type = kVideoCodecVP8; + thumbnail_encoder_config.video_format.name = "VP8"; + thumbnail_encoder_config.min_transmit_bitrate_bps = 7500; + thumbnail_send_config.suspend_below_min_bitrate = + params_.video[0].suspend_below_min_bitrate; + thumbnail_encoder_config.number_of_streams = 1; + thumbnail_encoder_config.max_bitrate_bps = 50000; + std::vector streams{params_.ss[0].streams[0]}; + thumbnail_encoder_config.video_stream_factory = + rtc::make_ref_counted(streams); + thumbnail_encoder_config.spatial_layers = params_.ss[0].spatial_layers; + + thumbnail_encoder_configs_.push_back(thumbnail_encoder_config.Copy()); + thumbnail_send_configs_.push_back(thumbnail_send_config.Copy()); + + AddMatchingVideoReceiveConfigs( + &thumbnail_receive_configs_, thumbnail_send_config, send_transport, + params_.call.send_side_bwe, &video_decoder_factory_, absl::nullopt, + false, kNackRtpHistoryMs); + } + for (size_t i = 0; i < thumbnail_send_configs_.size(); ++i) { + thumbnail_send_streams_.push_back(receiver_call_->CreateVideoSendStream( + thumbnail_send_configs_[i].Copy(), + thumbnail_encoder_configs_[i].Copy())); + } + for (size_t i = 0; i < thumbnail_receive_configs_.size(); ++i) { + thumbnail_receive_streams_.push_back(sender_call_->CreateVideoReceiveStream( + thumbnail_receive_configs_[i].Copy())); + } +} + +void VideoQualityTest::DestroyThumbnailStreams() { + for (VideoSendStream* thumbnail_send_stream : thumbnail_send_streams_) { + receiver_call_->DestroyVideoSendStream(thumbnail_send_stream); + } + thumbnail_send_streams_.clear(); + for (VideoReceiveStreamInterface* thumbnail_receive_stream : + thumbnail_receive_streams_) { + sender_call_->DestroyVideoReceiveStream(thumbnail_receive_stream); + } + thumbnail_send_streams_.clear(); + thumbnail_receive_streams_.clear(); + for (std::unique_ptr>& video_capturer : + thumbnail_capturers_) { + video_capturer.reset(); + } +} + +void VideoQualityTest::SetupThumbnailCapturers(size_t num_thumbnail_streams) { + VideoStream thumbnail = DefaultThumbnailStream(); + for (size_t i = 0; i < num_thumbnail_streams; ++i) { + auto frame_generator_capturer = + std::make_unique( + clock_, + test::CreateSquareFrameGenerator(static_cast(thumbnail.width), + static_cast(thumbnail.height), + absl::nullopt, absl::nullopt), + thumbnail.max_framerate, *task_queue_factory_); + EXPECT_TRUE(frame_generator_capturer->Init()); + thumbnail_capturers_.push_back(std::move(frame_generator_capturer)); + } +} + +std::unique_ptr +VideoQualityTest::CreateFrameGenerator(size_t video_idx) { + // Setup frame generator. + const size_t kWidth = 1850; + const size_t kHeight = 1110; + std::unique_ptr frame_generator; + if (params_.screenshare[video_idx].generate_slides) { + frame_generator = test::CreateSlideFrameGenerator( + kWidth, kHeight, + params_.screenshare[video_idx].slide_change_interval * + params_.video[video_idx].fps); + } else { + std::vector slides = params_.screenshare[video_idx].slides; + if (slides.empty()) { + slides.push_back(test::ResourcePath("web_screenshot_1850_1110", "yuv")); + slides.push_back(test::ResourcePath("presentation_1850_1110", "yuv")); + slides.push_back(test::ResourcePath("photo_1850_1110", "yuv")); + slides.push_back(test::ResourcePath("difficult_photo_1850_1110", "yuv")); + } + if (params_.screenshare[video_idx].scroll_duration == 0) { + // Cycle image every slide_change_interval seconds. + frame_generator = test::CreateFromYuvFileFrameGenerator( + slides, kWidth, kHeight, + params_.screenshare[video_idx].slide_change_interval * + params_.video[video_idx].fps); + } else { + RTC_CHECK_LE(params_.video[video_idx].width, kWidth); + RTC_CHECK_LE(params_.video[video_idx].height, kHeight); + RTC_CHECK_GT(params_.screenshare[video_idx].slide_change_interval, 0); + const int kPauseDurationMs = + (params_.screenshare[video_idx].slide_change_interval - + params_.screenshare[video_idx].scroll_duration) * + 1000; + RTC_CHECK_LE(params_.screenshare[video_idx].scroll_duration, + params_.screenshare[video_idx].slide_change_interval); + + frame_generator = test::CreateScrollingInputFromYuvFilesFrameGenerator( + clock_, slides, kWidth, kHeight, params_.video[video_idx].width, + params_.video[video_idx].height, + params_.screenshare[video_idx].scroll_duration * 1000, + kPauseDurationMs); + } + } + return frame_generator; +} + +void VideoQualityTest::CreateCapturers() { + RTC_DCHECK(video_sources_.empty()); + video_sources_.resize(num_video_streams_); + for (size_t video_idx = 0; video_idx < num_video_streams_; ++video_idx) { + std::unique_ptr frame_generator; + if (params_.screenshare[video_idx].enabled) { + frame_generator = CreateFrameGenerator(video_idx); + } else if (params_.video[video_idx].clip_path == "Generator") { + frame_generator = test::CreateSquareFrameGenerator( + static_cast(params_.video[video_idx].width), + static_cast(params_.video[video_idx].height), absl::nullopt, + absl::nullopt); + } else if (params_.video[video_idx].clip_path == "GeneratorI420A") { + frame_generator = test::CreateSquareFrameGenerator( + static_cast(params_.video[video_idx].width), + static_cast(params_.video[video_idx].height), + test::FrameGeneratorInterface::OutputType::kI420A, absl::nullopt); + } else if (params_.video[video_idx].clip_path == "GeneratorI010") { + frame_generator = test::CreateSquareFrameGenerator( + static_cast(params_.video[video_idx].width), + static_cast(params_.video[video_idx].height), + test::FrameGeneratorInterface::OutputType::kI010, absl::nullopt); + } else if (params_.video[video_idx].clip_path == "GeneratorNV12") { + frame_generator = test::CreateSquareFrameGenerator( + static_cast(params_.video[video_idx].width), + static_cast(params_.video[video_idx].height), + test::FrameGeneratorInterface::OutputType::kNV12, absl::nullopt); + } else if (params_.video[video_idx].clip_path.empty()) { + video_sources_[video_idx] = test::CreateVideoCapturer( + params_.video[video_idx].width, params_.video[video_idx].height, + params_.video[video_idx].fps, + params_.video[video_idx].capture_device_index); + if (video_sources_[video_idx]) { + continue; + } else { + // Failed to get actual camera, use chroma generator as backup. + frame_generator = test::CreateSquareFrameGenerator( + static_cast(params_.video[video_idx].width), + static_cast(params_.video[video_idx].height), absl::nullopt, + absl::nullopt); + } + } else { + frame_generator = test::CreateFromYuvFileFrameGenerator( + {params_.video[video_idx].clip_path}, params_.video[video_idx].width, + params_.video[video_idx].height, 1); + ASSERT_TRUE(frame_generator) << "Could not create capturer for " + << params_.video[video_idx].clip_path + << ".yuv. Is this file present?"; + } + ASSERT_TRUE(frame_generator); + auto frame_generator_capturer = + std::make_unique( + clock_, std::move(frame_generator), params_.video[video_idx].fps, + *task_queue_factory_); + EXPECT_TRUE(frame_generator_capturer->Init()); + video_sources_[video_idx] = std::move(frame_generator_capturer); + } +} + +void VideoQualityTest::StartAudioStreams() { + audio_send_stream_->Start(); + for (AudioReceiveStreamInterface* audio_recv_stream : audio_receive_streams_) + audio_recv_stream->Start(); +} + +void VideoQualityTest::StartThumbnails() { + for (VideoSendStream* send_stream : thumbnail_send_streams_) + send_stream->Start(); + for (VideoReceiveStreamInterface* receive_stream : thumbnail_receive_streams_) + receive_stream->Start(); +} + +void VideoQualityTest::StopThumbnails() { + for (VideoReceiveStreamInterface* receive_stream : thumbnail_receive_streams_) + receive_stream->Stop(); + for (VideoSendStream* send_stream : thumbnail_send_streams_) + send_stream->Stop(); +} + +std::unique_ptr +VideoQualityTest::CreateSendTransport() { + std::unique_ptr network_behavior = nullptr; + if (injection_components_->sender_network == nullptr) { + network_behavior = std::make_unique(*params_.config); + } else { + network_behavior = std::move(injection_components_->sender_network); + } + return std::make_unique( + task_queue(), + std::make_unique(clock_, std::move(network_behavior)), + sender_call_.get(), kPayloadTypeVP8, kPayloadTypeVP9, + params_.video[0].selected_tl, params_.ss[0].selected_sl, + payload_type_map_, kVideoSendSsrcs[0], + static_cast(kVideoSendSsrcs[0] + params_.ss[0].streams.size() - + 1)); +} + +std::unique_ptr +VideoQualityTest::CreateReceiveTransport() { + std::unique_ptr network_behavior = nullptr; + if (injection_components_->receiver_network == nullptr) { + network_behavior = std::make_unique(*params_.config); + } else { + network_behavior = std::move(injection_components_->receiver_network); + } + return std::make_unique( + task_queue(), + std::make_unique(clock_, std::move(network_behavior)), + receiver_call_.get(), payload_type_map_); +} + +void VideoQualityTest::RunWithAnalyzer(const Params& params) { + num_video_streams_ = params.call.dual_video ? 2 : 1; + std::unique_ptr send_transport; + std::unique_ptr recv_transport; + FILE* graph_data_output_file = nullptr; + + params_ = params; + // TODO(ivica): Merge with RunWithRenderer and use a flag / argument to + // differentiate between the analyzer and the renderer case. + CheckParamsAndInjectionComponents(); + + if (!params_.analyzer.graph_data_output_filename.empty()) { + graph_data_output_file = + fopen(params_.analyzer.graph_data_output_filename.c_str(), "w"); + RTC_CHECK(graph_data_output_file) + << "Can't open the file " << params_.analyzer.graph_data_output_filename + << "!"; + } + + if (!params.logging.rtc_event_log_name.empty()) { + send_event_log_ = rtc_event_log_factory_.CreateRtcEventLog( + RtcEventLog::EncodingType::Legacy); + recv_event_log_ = rtc_event_log_factory_.CreateRtcEventLog( + RtcEventLog::EncodingType::Legacy); + std::unique_ptr send_output( + std::make_unique( + params.logging.rtc_event_log_name + "_send", + RtcEventLog::kUnlimitedOutput)); + std::unique_ptr recv_output( + std::make_unique( + params.logging.rtc_event_log_name + "_recv", + RtcEventLog::kUnlimitedOutput)); + bool event_log_started = + send_event_log_->StartLogging(std::move(send_output), + RtcEventLog::kImmediateOutput) && + recv_event_log_->StartLogging(std::move(recv_output), + RtcEventLog::kImmediateOutput); + RTC_DCHECK(event_log_started); + } else { + send_event_log_ = std::make_unique(); + recv_event_log_ = std::make_unique(); + } + + SendTask(task_queue(), [this, ¶ms, &send_transport, &recv_transport]() { + Call::Config send_call_config(send_event_log_.get()); + Call::Config recv_call_config(recv_event_log_.get()); + send_call_config.bitrate_config = params.call.call_bitrate_config; + recv_call_config.bitrate_config = params.call.call_bitrate_config; + if (params_.audio.enabled) + InitializeAudioDevice(&send_call_config, &recv_call_config, + params_.audio.use_real_adm); + + CreateCalls(send_call_config, recv_call_config); + send_transport = CreateSendTransport(); + recv_transport = CreateReceiveTransport(); + }); + + std::string graph_title = params_.analyzer.graph_title; + if (graph_title.empty()) + graph_title = VideoQualityTest::GenerateGraphTitle(); + bool is_quick_test_enabled = field_trial::IsEnabled("WebRTC-QuickPerfTest"); + analyzer_ = std::make_unique( + send_transport.get(), params_.analyzer.test_label, + params_.analyzer.avg_psnr_threshold, params_.analyzer.avg_ssim_threshold, + is_quick_test_enabled + ? kFramesSentInQuickTest + : params_.analyzer.test_durations_secs * params_.video[0].fps, + is_quick_test_enabled + ? TimeDelta::Millis(1) + : TimeDelta::Seconds(params_.analyzer.test_durations_secs), + graph_data_output_file, graph_title, + kVideoSendSsrcs[params_.ss[0].selected_stream], + kSendRtxSsrcs[params_.ss[0].selected_stream], + static_cast(params_.ss[0].selected_stream), + params.ss[0].selected_sl, params_.video[0].selected_tl, + is_quick_test_enabled, clock_, params_.logging.rtp_dump_name, + task_queue()); + + SendTask(task_queue(), [&]() { + analyzer_->SetCall(sender_call_.get()); + analyzer_->SetReceiver(receiver_call_->Receiver()); + send_transport->SetReceiver(analyzer_.get()); + recv_transport->SetReceiver(sender_call_->Receiver()); + + SetupVideo(analyzer_.get(), recv_transport.get()); + SetupThumbnails(analyzer_.get(), recv_transport.get()); + video_receive_configs_[params_.ss[0].selected_stream].renderer = + analyzer_.get(); + + CreateFlexfecStreams(); + CreateVideoStreams(); + analyzer_->SetSendStream(video_send_streams_[0]); + analyzer_->SetReceiveStream( + video_receive_streams_[params_.ss[0].selected_stream]); + + GetVideoSendStream()->SetSource(analyzer_->OutputInterface(), + degradation_preference_); + SetupThumbnailCapturers(params_.call.num_thumbnails); + for (size_t i = 0; i < thumbnail_send_streams_.size(); ++i) { + thumbnail_send_streams_[i]->SetSource(thumbnail_capturers_[i].get(), + degradation_preference_); + } + + CreateCapturers(); + + analyzer_->SetSource(video_sources_[0].get(), true); + + for (size_t video_idx = 1; video_idx < num_video_streams_; ++video_idx) { + video_send_streams_[video_idx]->SetSource(video_sources_[video_idx].get(), + degradation_preference_); + } + + if (params_.audio.enabled) { + SetupAudio(send_transport.get()); + StartAudioStreams(); + analyzer_->SetAudioReceiveStream(audio_receive_streams_[0]); + } + StartVideoStreams(); + StartThumbnails(); + analyzer_->StartMeasuringCpuProcessTime(); + }); + + analyzer_->Wait(); + + SendTask(task_queue(), [&]() { + StopThumbnails(); + Stop(); + + DestroyStreams(); + DestroyThumbnailStreams(); + + if (graph_data_output_file) + fclose(graph_data_output_file); + + send_transport.reset(); + recv_transport.reset(); + + DestroyCalls(); + }); + analyzer_ = nullptr; +} + +rtc::scoped_refptr VideoQualityTest::CreateAudioDevice() { +#ifdef WEBRTC_WIN + RTC_LOG(LS_INFO) << "Using latest version of ADM on Windows"; + // We must initialize the COM library on a thread before we calling any of + // the library functions. All COM functions in the ADM will return + // CO_E_NOTINITIALIZED otherwise. The legacy ADM for Windows used internal + // COM initialization but the new ADM requires COM to be initialized + // externally. + com_initializer_ = + std::make_unique(ScopedCOMInitializer::kMTA); + RTC_CHECK(com_initializer_->Succeeded()); + RTC_CHECK(webrtc_win::core_audio_utility::IsSupported()); + RTC_CHECK(webrtc_win::core_audio_utility::IsMMCSSSupported()); + return CreateWindowsCoreAudioAudioDeviceModule(task_queue_factory_.get()); +#else + // Use legacy factory method on all platforms except Windows. + return AudioDeviceModule::Create(AudioDeviceModule::kPlatformDefaultAudio, + task_queue_factory_.get()); +#endif +} + +void VideoQualityTest::InitializeAudioDevice(Call::Config* send_call_config, + Call::Config* recv_call_config, + bool use_real_adm) { + rtc::scoped_refptr audio_device; + if (use_real_adm) { + // Run test with real ADM (using default audio devices) if user has + // explicitly set the --audio and --use_real_adm command-line flags. + audio_device = CreateAudioDevice(); + } else { + // By default, create a test ADM which fakes audio. + audio_device = TestAudioDeviceModule::Create( + task_queue_factory_.get(), + TestAudioDeviceModule::CreatePulsedNoiseCapturer(32000, 48000), + TestAudioDeviceModule::CreateDiscardRenderer(48000), 1.f); + } + RTC_CHECK(audio_device); + + AudioState::Config audio_state_config; + audio_state_config.audio_mixer = AudioMixerImpl::Create(); + audio_state_config.audio_processing = AudioProcessingBuilder().Create(); + audio_state_config.audio_device_module = audio_device; + send_call_config->audio_state = AudioState::Create(audio_state_config); + recv_call_config->audio_state = AudioState::Create(audio_state_config); + if (use_real_adm) { + // The real ADM requires extra initialization: setting default devices, + // setting up number of channels etc. Helper class also calls + // AudioDeviceModule::Init(). + webrtc::adm_helpers::Init(audio_device.get()); + } else { + audio_device->Init(); + } + // Always initialize the ADM before injecting a valid audio transport. + RTC_CHECK(audio_device->RegisterAudioCallback( + send_call_config->audio_state->audio_transport()) == 0); +} + +void VideoQualityTest::SetupAudio(Transport* transport) { + AudioSendStream::Config audio_send_config(transport); + audio_send_config.rtp.ssrc = kAudioSendSsrc; + + // Add extension to enable audio send side BWE, and allow audio bit rate + // adaptation. + audio_send_config.rtp.extensions.clear(); + audio_send_config.send_codec_spec = AudioSendStream::Config::SendCodecSpec( + kAudioSendPayloadType, + {"OPUS", + 48000, + 2, + {{"usedtx", (params_.audio.dtx ? "1" : "0")}, {"stereo", "1"}}}); + + if (params_.call.send_side_bwe) { + audio_send_config.rtp.extensions.push_back( + webrtc::RtpExtension(webrtc::RtpExtension::kTransportSequenceNumberUri, + kTransportSequenceNumberExtensionId)); + audio_send_config.min_bitrate_bps = kOpusMinBitrateBps; + audio_send_config.max_bitrate_bps = kOpusBitrateFbBps; + audio_send_config.send_codec_spec->transport_cc_enabled = true; + // Only allow ANA when send-side BWE is enabled. + audio_send_config.audio_network_adaptor_config = params_.audio.ana_config; + } + audio_send_config.encoder_factory = audio_encoder_factory_; + SetAudioConfig(audio_send_config); + + std::string sync_group; + if (params_.video[0].enabled && params_.audio.sync_video) + sync_group = kSyncGroup; + + CreateMatchingAudioConfigs(transport, sync_group); + CreateAudioStreams(); +} + +void VideoQualityTest::RunWithRenderers(const Params& params) { + RTC_LOG(LS_INFO) << __FUNCTION__; + num_video_streams_ = params.call.dual_video ? 2 : 1; + std::unique_ptr send_transport; + std::unique_ptr recv_transport; + std::unique_ptr local_preview; + std::vector> loopback_renderers; + + if (!params.logging.rtc_event_log_name.empty()) { + send_event_log_ = rtc_event_log_factory_.CreateRtcEventLog( + RtcEventLog::EncodingType::Legacy); + recv_event_log_ = rtc_event_log_factory_.CreateRtcEventLog( + RtcEventLog::EncodingType::Legacy); + std::unique_ptr send_output( + std::make_unique( + params.logging.rtc_event_log_name + "_send", + RtcEventLog::kUnlimitedOutput)); + std::unique_ptr recv_output( + std::make_unique( + params.logging.rtc_event_log_name + "_recv", + RtcEventLog::kUnlimitedOutput)); + bool event_log_started = + send_event_log_->StartLogging(std::move(send_output), + /*output_period_ms=*/5000) && + recv_event_log_->StartLogging(std::move(recv_output), + /*output_period_ms=*/5000); + RTC_DCHECK(event_log_started); + } else { + send_event_log_ = std::make_unique(); + recv_event_log_ = std::make_unique(); + } + + SendTask(task_queue(), [&]() { + params_ = params; + CheckParamsAndInjectionComponents(); + + // TODO(ivica): Remove bitrate_config and use the default Call::Config(), to + // match the full stack tests. + Call::Config send_call_config(send_event_log_.get()); + send_call_config.bitrate_config = params_.call.call_bitrate_config; + Call::Config recv_call_config(recv_event_log_.get()); + + if (params_.audio.enabled) + InitializeAudioDevice(&send_call_config, &recv_call_config, + params_.audio.use_real_adm); + + CreateCalls(send_call_config, recv_call_config); + + // TODO(minyue): consider if this is a good transport even for audio only + // calls. + send_transport = CreateSendTransport(); + + recv_transport = CreateReceiveTransport(); + + // TODO(ivica): Use two calls to be able to merge with RunWithAnalyzer or at + // least share as much code as possible. That way this test would also match + // the full stack tests better. + send_transport->SetReceiver(receiver_call_->Receiver()); + recv_transport->SetReceiver(sender_call_->Receiver()); + + if (params_.video[0].enabled) { + // Create video renderers. + SetupVideo(send_transport.get(), recv_transport.get()); + size_t num_streams_processed = 0; + for (size_t video_idx = 0; video_idx < num_video_streams_; ++video_idx) { + const size_t selected_stream_id = params_.ss[video_idx].selected_stream; + const size_t num_streams = params_.ss[video_idx].streams.size(); + if (selected_stream_id == num_streams) { + for (size_t stream_id = 0; stream_id < num_streams; ++stream_id) { + rtc::StringBuilder oss; + oss << "Loopback Video #" << video_idx << " - Stream #" + << static_cast(stream_id); + loopback_renderers.emplace_back(test::VideoRenderer::Create( + oss.str().c_str(), + params_.ss[video_idx].streams[stream_id].width, + params_.ss[video_idx].streams[stream_id].height)); + video_receive_configs_[stream_id + num_streams_processed].renderer = + loopback_renderers.back().get(); + if (params_.audio.enabled && params_.audio.sync_video) + video_receive_configs_[stream_id + num_streams_processed] + .sync_group = kSyncGroup; + } + } else { + rtc::StringBuilder oss; + oss << "Loopback Video #" << video_idx; + loopback_renderers.emplace_back(test::VideoRenderer::Create( + oss.str().c_str(), + params_.ss[video_idx].streams[selected_stream_id].width, + params_.ss[video_idx].streams[selected_stream_id].height)); + video_receive_configs_[selected_stream_id + num_streams_processed] + .renderer = loopback_renderers.back().get(); + if (params_.audio.enabled && params_.audio.sync_video) + video_receive_configs_[num_streams_processed + selected_stream_id] + .sync_group = kSyncGroup; + } + num_streams_processed += num_streams; + } + CreateFlexfecStreams(); + CreateVideoStreams(); + + CreateCapturers(); + if (params_.video[0].enabled) { + // Create local preview + local_preview.reset(test::VideoRenderer::Create( + "Local Preview", params_.video[0].width, params_.video[0].height)); + + video_sources_[0]->AddOrUpdateSink(local_preview.get(), + rtc::VideoSinkWants()); + } + ConnectVideoSourcesToStreams(); + } + + if (params_.audio.enabled) { + SetupAudio(send_transport.get()); + } + + Start(); + }); + + PressEnterToContinue(task_queue()); + + SendTask(task_queue(), [&]() { + Stop(); + DestroyStreams(); + + send_transport.reset(); + recv_transport.reset(); + + local_preview.reset(); + loopback_renderers.clear(); + + DestroyCalls(); + }); +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/video_quality_test.h b/TMessagesProj/jni/voip/webrtc/video/video_quality_test.h new file mode 100644 index 0000000000..d1f630557e --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/video/video_quality_test.h @@ -0,0 +1,147 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef VIDEO_VIDEO_QUALITY_TEST_H_ +#define VIDEO_VIDEO_QUALITY_TEST_H_ + +#include +#include +#include +#include + +#include "api/fec_controller.h" +#include "api/rtc_event_log/rtc_event_log_factory.h" +#include "api/task_queue/task_queue_base.h" +#include "api/task_queue/task_queue_factory.h" +#include "api/test/frame_generator_interface.h" +#include "api/test/video_quality_test_fixture.h" +#include "api/video/video_bitrate_allocator_factory.h" +#include "call/fake_network_pipe.h" +#include "media/engine/internal_decoder_factory.h" +#include "media/engine/internal_encoder_factory.h" +#include "test/call_test.h" +#include "test/layer_filtering_transport.h" +#include "video/video_analyzer.h" +#ifdef WEBRTC_WIN +#include "modules/audio_device/win/core_audio_utility_win.h" +#include "rtc_base/win/scoped_com_initializer.h" +#endif + +namespace webrtc { + +class VideoQualityTest : public test::CallTest, + public VideoQualityTestFixtureInterface { + public: + explicit VideoQualityTest( + std::unique_ptr injection_components); + + void RunWithAnalyzer(const Params& params) override; + void RunWithRenderers(const Params& params) override; + + const std::map& payload_type_map() override { + return payload_type_map_; + } + + static void FillScalabilitySettings( + Params* params, + size_t video_idx, + const std::vector& stream_descriptors, + int num_streams, + size_t selected_stream, + int num_spatial_layers, + int selected_sl, + InterLayerPredMode inter_layer_pred, + const std::vector& sl_descriptors); + + // Helper static methods. + static VideoStream DefaultVideoStream(const Params& params, size_t video_idx); + static VideoStream DefaultThumbnailStream(); + static std::vector ParseCSV(const std::string& str); + + protected: + std::map payload_type_map_; + + // No-op implementation to be able to instantiate this class from non-TEST_F + // locations. + void TestBody() override; + + // Helper methods accessing only params_. + std::string GenerateGraphTitle() const; + void CheckParamsAndInjectionComponents(); + + // Helper methods for setting up the call. + void CreateCapturers(); + std::unique_ptr CreateFrameGenerator( + size_t video_idx); + void SetupThumbnailCapturers(size_t num_thumbnail_streams); + std::unique_ptr CreateVideoDecoder( + const SdpVideoFormat& format); + std::unique_ptr CreateVideoEncoder(const SdpVideoFormat& format, + VideoAnalyzer* analyzer); + void SetupVideo(Transport* send_transport, Transport* recv_transport); + void SetupThumbnails(Transport* send_transport, Transport* recv_transport); + void StartAudioStreams(); + void StartThumbnails(); + void StopThumbnails(); + void DestroyThumbnailStreams(); + // Helper method for creating a real ADM (using hardware) for all platforms. + rtc::scoped_refptr CreateAudioDevice(); + void InitializeAudioDevice(Call::Config* send_call_config, + Call::Config* recv_call_config, + bool use_real_adm); + void SetupAudio(Transport* transport); + + void StartEncodedFrameLogs(VideoReceiveStreamInterface* stream); + + virtual std::unique_ptr CreateSendTransport(); + virtual std::unique_ptr CreateReceiveTransport(); + + std::vector>> + thumbnail_capturers_; + Clock* const clock_; + const std::unique_ptr task_queue_factory_; + RtcEventLogFactory rtc_event_log_factory_; + + test::FunctionVideoDecoderFactory video_decoder_factory_; + std::unique_ptr decoder_factory_; + test::FunctionVideoEncoderFactory video_encoder_factory_; + test::FunctionVideoEncoderFactory video_encoder_factory_with_analyzer_; + std::unique_ptr + video_bitrate_allocator_factory_; + std::unique_ptr encoder_factory_; + std::vector thumbnail_send_configs_; + std::vector thumbnail_encoder_configs_; + std::vector thumbnail_send_streams_; + std::vector thumbnail_receive_configs_; + std::vector thumbnail_receive_streams_; + + int receive_logs_; + int send_logs_; + + Params params_; + std::unique_ptr injection_components_; + + // Set non-null when running with analyzer. + std::unique_ptr analyzer_; + + // Note: not same as similarly named member in CallTest. This is the number of + // separate send streams, the one in CallTest is the number of substreams for + // a single send stream. + size_t num_video_streams_; + +#ifdef WEBRTC_WIN + // Windows Core Audio based ADM needs to run on a COM initialized thread. + // Only referenced in combination with --audio --use_real_adm flags. + std::unique_ptr com_initializer_; +#endif +}; + +} // namespace webrtc + +#endif // VIDEO_VIDEO_QUALITY_TEST_H_ diff --git a/TMessagesProj/jni/voip/webrtc/video/video_receive_stream.cc b/TMessagesProj/jni/voip/webrtc/video/video_receive_stream.cc deleted file mode 100644 index 27f86cfaa8..0000000000 --- a/TMessagesProj/jni/voip/webrtc/video/video_receive_stream.cc +++ /dev/null @@ -1,766 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "video/video_receive_stream.h" - -#include -#include - -#include -#include -#include -#include -#include - -#include "absl/algorithm/container.h" -#include "absl/types/optional.h" -#include "api/array_view.h" -#include "api/crypto/frame_decryptor_interface.h" -#include "api/video/encoded_image.h" -#include "api/video_codecs/h264_profile_level_id.h" -#include "api/video_codecs/sdp_video_format.h" -#include "api/video_codecs/video_codec.h" -#include "api/video_codecs/video_decoder_factory.h" -#include "api/video_codecs/video_encoder.h" -#include "call/rtp_stream_receiver_controller_interface.h" -#include "call/rtx_receive_stream.h" -#include "common_video/include/incoming_video_stream.h" -#include "modules/utility/include/process_thread.h" -#include "modules/video_coding/include/video_codec_interface.h" -#include "modules/video_coding/include/video_coding_defines.h" -#include "modules/video_coding/include/video_error_codes.h" -#include "modules/video_coding/timing.h" -#include "modules/video_coding/utility/vp8_header_parser.h" -#include "rtc_base/checks.h" -#include "rtc_base/location.h" -#include "rtc_base/logging.h" -#include "rtc_base/strings/string_builder.h" -#include "rtc_base/system/thread_registry.h" -#include "rtc_base/time_utils.h" -#include "rtc_base/trace_event.h" -#include "system_wrappers/include/clock.h" -#include "system_wrappers/include/field_trial.h" -#include "video/call_stats.h" -#include "video/frame_dumping_decoder.h" -#include "video/receive_statistics_proxy.h" - -namespace webrtc { - -namespace internal { -constexpr int VideoReceiveStream::kMaxWaitForKeyFrameMs; -} // namespace internal - -namespace { - -constexpr int kMinBaseMinimumDelayMs = 0; -constexpr int kMaxBaseMinimumDelayMs = 10000; - -constexpr int kMaxWaitForFrameMs = 3000; - -// Concrete instance of RecordableEncodedFrame wrapping needed content -// from EncodedFrame. -class WebRtcRecordableEncodedFrame : public RecordableEncodedFrame { - public: - explicit WebRtcRecordableEncodedFrame(const EncodedFrame& frame) - : buffer_(frame.GetEncodedData()), - render_time_ms_(frame.RenderTime()), - codec_(frame.CodecSpecific()->codecType), - is_key_frame_(frame.FrameType() == VideoFrameType::kVideoFrameKey), - resolution_{frame.EncodedImage()._encodedWidth, - frame.EncodedImage()._encodedHeight} { - if (frame.ColorSpace()) { - color_space_ = *frame.ColorSpace(); - } - } - - // VideoEncodedSinkInterface::FrameBuffer - rtc::scoped_refptr encoded_buffer() - const override { - return buffer_; - } - - absl::optional color_space() const override { - return color_space_; - } - - VideoCodecType codec() const override { return codec_; } - - bool is_key_frame() const override { return is_key_frame_; } - - EncodedResolution resolution() const override { return resolution_; } - - Timestamp render_time() const override { - return Timestamp::Millis(render_time_ms_); - } - - private: - rtc::scoped_refptr buffer_; - int64_t render_time_ms_; - VideoCodecType codec_; - bool is_key_frame_; - EncodedResolution resolution_; - absl::optional color_space_; -}; - -// Video decoder class to be used for unknown codecs. Doesn't support decoding -// but logs messages to LS_ERROR. -class NullVideoDecoder : public webrtc::VideoDecoder { - public: - bool Configure(const Settings& settings) override { - RTC_LOG(LS_ERROR) << "Can't initialize NullVideoDecoder."; - return true; - } - - int32_t Decode(const webrtc::EncodedImage& input_image, - bool missing_frames, - int64_t render_time_ms) override { - RTC_LOG(LS_ERROR) << "The NullVideoDecoder doesn't support decoding."; - return WEBRTC_VIDEO_CODEC_OK; - } - - int32_t RegisterDecodeCompleteCallback( - webrtc::DecodedImageCallback* callback) override { - RTC_LOG(LS_ERROR) - << "Can't register decode complete callback on NullVideoDecoder."; - return WEBRTC_VIDEO_CODEC_OK; - } - - int32_t Release() override { return WEBRTC_VIDEO_CODEC_OK; } - - DecoderInfo GetDecoderInfo() const override { - DecoderInfo info; - info.implementation_name = "NullVideoDecoder"; - return info; - } - const char* ImplementationName() const override { return "NullVideoDecoder"; } -}; - -// TODO(https://bugs.webrtc.org/9974): Consider removing this workaround. -// Maximum time between frames before resetting the FrameBuffer to avoid RTP -// timestamps wraparound to affect FrameBuffer. -constexpr int kInactiveStreamThresholdMs = 600000; // 10 minutes. - -} // namespace - -namespace internal { - -VideoReceiveStream::VideoReceiveStream( - TaskQueueFactory* task_queue_factory, - RtpStreamReceiverControllerInterface* receiver_controller, - int num_cpu_cores, - PacketRouter* packet_router, - VideoReceiveStream::Config config, - ProcessThread* process_thread, - CallStats* call_stats, - Clock* clock, - VCMTiming* timing) - : task_queue_factory_(task_queue_factory), - transport_adapter_(config.rtcp_send_transport), - config_(std::move(config)), - num_cpu_cores_(num_cpu_cores), - process_thread_(process_thread), - clock_(clock), - call_stats_(call_stats), - source_tracker_(clock_), - stats_proxy_(config_.rtp.remote_ssrc, clock_), - rtp_receive_statistics_(ReceiveStatistics::Create(clock_)), - timing_(timing), - video_receiver_(clock_, timing_.get()), - rtp_video_stream_receiver_(clock_, - &transport_adapter_, - call_stats, - packet_router, - &config_, - rtp_receive_statistics_.get(), - &stats_proxy_, - &stats_proxy_, - process_thread_, - this, // NackSender - nullptr, // Use default KeyFrameRequestSender - this, // OnCompleteFrameCallback - config_.frame_decryptor, - config_.frame_transformer), - rtp_stream_sync_(this), - max_wait_for_keyframe_ms_(kMaxWaitForKeyFrameMs), - max_wait_for_frame_ms_(kMaxWaitForFrameMs), - decode_queue_(task_queue_factory_->CreateTaskQueue( - "DecodingQueue", - TaskQueueFactory::Priority::HIGH)) { - RTC_LOG(LS_INFO) << "VideoReceiveStream: " << config_.ToString(); - - RTC_DCHECK(config_.renderer); - RTC_DCHECK(process_thread_); - RTC_DCHECK(call_stats_); - - module_process_sequence_checker_.Detach(); - network_sequence_checker_.Detach(); - - RTC_DCHECK(!config_.decoders.empty()); - RTC_CHECK(config_.decoder_factory); - std::set decoder_payload_types; - for (const Decoder& decoder : config_.decoders) { - RTC_CHECK(decoder_payload_types.find(decoder.payload_type) == - decoder_payload_types.end()) - << "Duplicate payload type (" << decoder.payload_type - << ") for different decoders."; - decoder_payload_types.insert(decoder.payload_type); - } - - timing_->set_render_delay(config_.render_delay_ms); - - frame_buffer_.reset( - new video_coding::FrameBuffer(clock_, timing_.get(), &stats_proxy_)); - - process_thread_->RegisterModule(&rtp_stream_sync_, RTC_FROM_HERE); - // Register with RtpStreamReceiverController. - media_receiver_ = receiver_controller->CreateReceiver( - config_.rtp.remote_ssrc, &rtp_video_stream_receiver_); - if (config_.rtp.rtx_ssrc) { - rtx_receive_stream_ = std::make_unique( - &rtp_video_stream_receiver_, config.rtp.rtx_associated_payload_types, - config_.rtp.remote_ssrc, rtp_receive_statistics_.get()); - rtx_receiver_ = receiver_controller->CreateReceiver( - config_.rtp.rtx_ssrc, rtx_receive_stream_.get()); - } else { - rtp_receive_statistics_->EnableRetransmitDetection(config.rtp.remote_ssrc, - true); - } -} - -VideoReceiveStream::VideoReceiveStream( - TaskQueueFactory* task_queue_factory, - RtpStreamReceiverControllerInterface* receiver_controller, - int num_cpu_cores, - PacketRouter* packet_router, - VideoReceiveStream::Config config, - ProcessThread* process_thread, - CallStats* call_stats, - Clock* clock) - : VideoReceiveStream(task_queue_factory, - receiver_controller, - num_cpu_cores, - packet_router, - std::move(config), - process_thread, - call_stats, - clock, - new VCMTiming(clock)) {} - -VideoReceiveStream::~VideoReceiveStream() { - RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - RTC_LOG(LS_INFO) << "~VideoReceiveStream: " << config_.ToString(); - Stop(); - process_thread_->DeRegisterModule(&rtp_stream_sync_); -} - -void VideoReceiveStream::SignalNetworkState(NetworkState state) { - RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - rtp_video_stream_receiver_.SignalNetworkState(state); -} - -bool VideoReceiveStream::DeliverRtcp(const uint8_t* packet, size_t length) { - return rtp_video_stream_receiver_.DeliverRtcp(packet, length); -} - -void VideoReceiveStream::SetSync(Syncable* audio_syncable) { - RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - rtp_stream_sync_.ConfigureSync(audio_syncable); -} - -void VideoReceiveStream::Start() { - RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - - if (decoder_running_) { - return; - } - - const bool protected_by_fec = config_.rtp.protected_by_flexfec || - rtp_video_stream_receiver_.IsUlpfecEnabled(); - - if (rtp_video_stream_receiver_.IsRetransmissionsEnabled() && - protected_by_fec) { - frame_buffer_->SetProtectionMode(kProtectionNackFEC); - } - - transport_adapter_.Enable(); - rtc::VideoSinkInterface* renderer = nullptr; - if (config_.enable_prerenderer_smoothing) { - incoming_video_stream_.reset(new IncomingVideoStream( - task_queue_factory_, config_.render_delay_ms, this)); - renderer = incoming_video_stream_.get(); - } else { - renderer = this; - } - - for (const Decoder& decoder : config_.decoders) { - std::unique_ptr video_decoder = - config_.decoder_factory->CreateVideoDecoder(decoder.video_format); - // If we still have no valid decoder, we have to create a "Null" decoder - // that ignores all calls. The reason we can get into this state is that the - // old decoder factory interface doesn't have a way to query supported - // codecs. - if (!video_decoder) { - video_decoder = std::make_unique(); - } - - std::string decoded_output_file = - field_trial::FindFullName("WebRTC-DecoderDataDumpDirectory"); - // Because '/' can't be used inside a field trial parameter, we use ';' - // instead. - // This is only relevant to WebRTC-DecoderDataDumpDirectory - // field trial. ';' is chosen arbitrary. Even though it's a legal character - // in some file systems, we can sacrifice ability to use it in the path to - // dumped video, since it's developers-only feature for debugging. - absl::c_replace(decoded_output_file, ';', '/'); - if (!decoded_output_file.empty()) { - char filename_buffer[256]; - rtc::SimpleStringBuilder ssb(filename_buffer); - ssb << decoded_output_file << "/webrtc_receive_stream_" - << this->config_.rtp.remote_ssrc << "-" << rtc::TimeMicros() - << ".ivf"; - video_decoder = CreateFrameDumpingDecoderWrapper( - std::move(video_decoder), FileWrapper::OpenWriteOnly(ssb.str())); - } - - video_decoders_.push_back(std::move(video_decoder)); - - video_receiver_.RegisterExternalDecoder(video_decoders_.back().get(), - decoder.payload_type); - VideoDecoder::Settings settings; - settings.set_codec_type( - PayloadStringToCodecType(decoder.video_format.name)); - settings.set_max_render_resolution({320, 180}); - settings.set_number_of_cores(num_cpu_cores_); - - const bool raw_payload = - config_.rtp.raw_payload_types.count(decoder.payload_type) > 0; - rtp_video_stream_receiver_.AddReceiveCodec( - decoder.payload_type, settings.codec_type(), - decoder.video_format.parameters, raw_payload); - video_receiver_.RegisterReceiveCodec(decoder.payload_type, settings); - } - - RTC_DCHECK(renderer != nullptr); - video_stream_decoder_.reset( - new VideoStreamDecoder(&video_receiver_, &stats_proxy_, renderer)); - - // Make sure we register as a stats observer *after* we've prepared the - // `video_stream_decoder_`. - call_stats_->RegisterStatsObserver(this); - - // Start decoding on task queue. - video_receiver_.DecoderThreadStarting(); - stats_proxy_.DecoderThreadStarting(); - decode_queue_.PostTask([this] { - RTC_DCHECK_RUN_ON(&decode_queue_); - decoder_stopped_ = false; - StartNextDecode(); - }); - decoder_running_ = true; - rtp_video_stream_receiver_.StartReceive(); -} - -void VideoReceiveStream::Stop() { - RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - rtp_video_stream_receiver_.StopReceive(); - - stats_proxy_.OnUniqueFramesCounted( - rtp_video_stream_receiver_.GetUniqueFramesSeen()); - - decode_queue_.PostTask([this] { frame_buffer_->Stop(); }); - - call_stats_->DeregisterStatsObserver(this); - - if (decoder_running_) { - rtc::Event done; - decode_queue_.PostTask([this, &done] { - RTC_DCHECK_RUN_ON(&decode_queue_); - decoder_stopped_ = true; - done.Set(); - }); - done.Wait(rtc::Event::kForever); - - decoder_running_ = false; - video_receiver_.DecoderThreadStopped(); - stats_proxy_.DecoderThreadStopped(); - // Deregister external decoders so they are no longer running during - // destruction. This effectively stops the VCM since the decoder thread is - // stopped, the VCM is deregistered and no asynchronous decoder threads are - // running. - for (const Decoder& decoder : config_.decoders) - video_receiver_.RegisterExternalDecoder(nullptr, decoder.payload_type); - - UpdateHistograms(); - } - - video_stream_decoder_.reset(); - incoming_video_stream_.reset(); - transport_adapter_.Disable(); -} - -VideoReceiveStream::Stats VideoReceiveStream::GetStats() const { - VideoReceiveStream::Stats stats = stats_proxy_.GetStats(); - stats.total_bitrate_bps = 0; - StreamStatistician* statistician = - rtp_receive_statistics_->GetStatistician(stats.ssrc); - if (statistician) { - stats.rtp_stats = statistician->GetStats(); - stats.total_bitrate_bps = statistician->BitrateReceived(); - } - if (config_.rtp.rtx_ssrc) { - StreamStatistician* rtx_statistician = - rtp_receive_statistics_->GetStatistician(config_.rtp.rtx_ssrc); - if (rtx_statistician) - stats.total_bitrate_bps += rtx_statistician->BitrateReceived(); - } - return stats; -} - -void VideoReceiveStream::UpdateHistograms() { - absl::optional fraction_lost; - StreamDataCounters rtp_stats; - StreamStatistician* statistician = - rtp_receive_statistics_->GetStatistician(config_.rtp.remote_ssrc); - if (statistician) { - fraction_lost = statistician->GetFractionLostInPercent(); - rtp_stats = statistician->GetReceiveStreamDataCounters(); - } - if (config_.rtp.rtx_ssrc) { - StreamStatistician* rtx_statistician = - rtp_receive_statistics_->GetStatistician(config_.rtp.rtx_ssrc); - if (rtx_statistician) { - StreamDataCounters rtx_stats = - rtx_statistician->GetReceiveStreamDataCounters(); - stats_proxy_.UpdateHistograms(fraction_lost, rtp_stats, &rtx_stats); - return; - } - } - stats_proxy_.UpdateHistograms(fraction_lost, rtp_stats, nullptr); -} - -void VideoReceiveStream::AddSecondarySink(RtpPacketSinkInterface* sink) { - rtp_video_stream_receiver_.AddSecondarySink(sink); -} - -void VideoReceiveStream::RemoveSecondarySink( - const RtpPacketSinkInterface* sink) { - rtp_video_stream_receiver_.RemoveSecondarySink(sink); -} - -void VideoReceiveStream::SetRtpExtensions( - std::vector extensions) { - // VideoReceiveStream is deprecated and this function not supported. - RTC_DCHECK_NOTREACHED(); -} - -bool VideoReceiveStream::SetBaseMinimumPlayoutDelayMs(int delay_ms) { - RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - if (delay_ms < kMinBaseMinimumDelayMs || delay_ms > kMaxBaseMinimumDelayMs) { - return false; - } - - MutexLock lock(&playout_delay_lock_); - base_minimum_playout_delay_ms_ = delay_ms; - UpdatePlayoutDelays(); - return true; -} - -int VideoReceiveStream::GetBaseMinimumPlayoutDelayMs() const { - RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - - MutexLock lock(&playout_delay_lock_); - return base_minimum_playout_delay_ms_; -} - -// TODO(tommi): This method grabs a lock 6 times. -void VideoReceiveStream::OnFrame(const VideoFrame& video_frame) { - int64_t video_playout_ntp_ms; - int64_t sync_offset_ms; - double estimated_freq_khz; - - // TODO(bugs.webrtc.org/10739): we should set local capture clock offset for - // `video_frame.packet_infos`. But VideoFrame is const qualified here. - - // TODO(tommi): GetStreamSyncOffsetInMs grabs three locks. One inside the - // function itself, another in GetChannel() and a third in - // GetPlayoutTimestamp. Seems excessive. Anyhow, I'm assuming the function - // succeeds most of the time, which leads to grabbing a fourth lock. - if (rtp_stream_sync_.GetStreamSyncOffsetInMs( - video_frame.timestamp(), video_frame.render_time_ms(), - &video_playout_ntp_ms, &sync_offset_ms, &estimated_freq_khz)) { - // TODO(tommi): OnSyncOffsetUpdated grabs a lock. - stats_proxy_.OnSyncOffsetUpdated(video_playout_ntp_ms, sync_offset_ms, - estimated_freq_khz); - } - source_tracker_.OnFrameDelivered(video_frame.packet_infos()); - - config_.renderer->OnFrame(video_frame); - - // TODO(tommi): OnRenderFrame grabs a lock too. - stats_proxy_.OnRenderedFrame(video_frame); -} - -void VideoReceiveStream::SetFrameDecryptor( - rtc::scoped_refptr frame_decryptor) { - rtp_video_stream_receiver_.SetFrameDecryptor(std::move(frame_decryptor)); -} - -void VideoReceiveStream::SetDepacketizerToDecoderFrameTransformer( - rtc::scoped_refptr frame_transformer) { - rtp_video_stream_receiver_.SetDepacketizerToDecoderFrameTransformer( - std::move(frame_transformer)); -} - -void VideoReceiveStream::SendNack(const std::vector& sequence_numbers, - bool buffering_allowed) { - RTC_DCHECK(buffering_allowed); - rtp_video_stream_receiver_.RequestPacketRetransmit(sequence_numbers); -} - -void VideoReceiveStream::RequestKeyFrame(int64_t timestamp_ms) { - rtp_video_stream_receiver_.RequestKeyFrame(); - last_keyframe_request_ms_ = timestamp_ms; -} - -void VideoReceiveStream::OnCompleteFrame(std::unique_ptr frame) { - RTC_DCHECK_RUN_ON(&network_sequence_checker_); - // TODO(https://bugs.webrtc.org/9974): Consider removing this workaround. - int64_t time_now_ms = clock_->TimeInMilliseconds(); - if (last_complete_frame_time_ms_ > 0 && - time_now_ms - last_complete_frame_time_ms_ > kInactiveStreamThresholdMs) { - frame_buffer_->Clear(); - } - last_complete_frame_time_ms_ = time_now_ms; - - const VideoPlayoutDelay& playout_delay = frame->EncodedImage().playout_delay_; - if (playout_delay.min_ms >= 0) { - MutexLock lock(&playout_delay_lock_); - frame_minimum_playout_delay_ms_ = playout_delay.min_ms; - UpdatePlayoutDelays(); - } - - if (playout_delay.max_ms >= 0) { - MutexLock lock(&playout_delay_lock_); - frame_maximum_playout_delay_ms_ = playout_delay.max_ms; - UpdatePlayoutDelays(); - } - - int64_t last_continuous_pid = frame_buffer_->InsertFrame(std::move(frame)); - if (last_continuous_pid != -1) - rtp_video_stream_receiver_.FrameContinuous(last_continuous_pid); -} - -void VideoReceiveStream::OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) { - RTC_DCHECK_RUN_ON(&module_process_sequence_checker_); - frame_buffer_->UpdateRtt(max_rtt_ms); - rtp_video_stream_receiver_.UpdateRtt(max_rtt_ms); -} - -uint32_t VideoReceiveStream::id() const { - RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - return config_.rtp.remote_ssrc; -} - -absl::optional VideoReceiveStream::GetInfo() const { - RTC_DCHECK_RUN_ON(&module_process_sequence_checker_); - absl::optional info = - rtp_video_stream_receiver_.GetSyncInfo(); - - if (!info) - return absl::nullopt; - - info->current_delay_ms = timing_->TargetVideoDelay(); - return info; -} - -bool VideoReceiveStream::GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp, - int64_t* time_ms) const { - RTC_DCHECK_NOTREACHED(); - return 0; -} - -void VideoReceiveStream::SetEstimatedPlayoutNtpTimestampMs( - int64_t ntp_timestamp_ms, - int64_t time_ms) { - RTC_DCHECK_NOTREACHED(); -} - -bool VideoReceiveStream::SetMinimumPlayoutDelay(int delay_ms) { - RTC_DCHECK_RUN_ON(&module_process_sequence_checker_); - MutexLock lock(&playout_delay_lock_); - syncable_minimum_playout_delay_ms_ = delay_ms; - UpdatePlayoutDelays(); - return true; -} - -int64_t VideoReceiveStream::GetWaitMs() const { - return keyframe_required_ ? max_wait_for_keyframe_ms_ - : max_wait_for_frame_ms_; -} - -void VideoReceiveStream::StartNextDecode() { - TRACE_EVENT0("webrtc", "VideoReceiveStream::StartNextDecode"); - frame_buffer_->NextFrame(GetWaitMs(), keyframe_required_, &decode_queue_, - /* encoded frame handler */ - [this](std::unique_ptr frame) { - RTC_DCHECK_RUN_ON(&decode_queue_); - if (decoder_stopped_) - return; - if (frame) { - HandleEncodedFrame(std::move(frame)); - } else { - HandleFrameBufferTimeout(); - } - StartNextDecode(); - }); -} - -void VideoReceiveStream::HandleEncodedFrame( - std::unique_ptr frame) { - int64_t now_ms = clock_->TimeInMilliseconds(); - - // Current OnPreDecode only cares about QP for VP8. - int qp = -1; - if (frame->CodecSpecific()->codecType == kVideoCodecVP8) { - if (!vp8::GetQp(frame->data(), frame->size(), &qp)) { - RTC_LOG(LS_WARNING) << "Failed to extract QP from VP8 video frame"; - } - } - stats_proxy_.OnPreDecode(frame->CodecSpecific()->codecType, qp); - HandleKeyFrameGeneration(frame->FrameType() == VideoFrameType::kVideoFrameKey, - now_ms); - int decode_result = video_receiver_.Decode(frame.get()); - if (decode_result == WEBRTC_VIDEO_CODEC_OK || - decode_result == WEBRTC_VIDEO_CODEC_OK_REQUEST_KEYFRAME) { - keyframe_required_ = false; - frame_decoded_ = true; - rtp_video_stream_receiver_.FrameDecoded(frame->Id()); - - if (decode_result == WEBRTC_VIDEO_CODEC_OK_REQUEST_KEYFRAME) - RequestKeyFrame(now_ms); - } else if (!frame_decoded_ || !keyframe_required_ || - (last_keyframe_request_ms_ + max_wait_for_keyframe_ms_ < now_ms)) { - keyframe_required_ = true; - // TODO(philipel): Remove this keyframe request when downstream project - // has been fixed. - RequestKeyFrame(now_ms); - } - - if (encoded_frame_buffer_function_) { - encoded_frame_buffer_function_(WebRtcRecordableEncodedFrame(*frame)); - } -} - -void VideoReceiveStream::HandleKeyFrameGeneration( - bool received_frame_is_keyframe, - int64_t now_ms) { - // Repeat sending keyframe requests if we've requested a keyframe. - if (!keyframe_generation_requested_) { - return; - } - if (received_frame_is_keyframe) { - keyframe_generation_requested_ = false; - } else if (last_keyframe_request_ms_ + max_wait_for_keyframe_ms_ <= now_ms) { - if (!IsReceivingKeyFrame(now_ms)) { - RequestKeyFrame(now_ms); - } - } else { - // It hasn't been long enough since the last keyframe request, do nothing. - } -} - -void VideoReceiveStream::HandleFrameBufferTimeout() { - int64_t now_ms = clock_->TimeInMilliseconds(); - absl::optional last_packet_ms = - rtp_video_stream_receiver_.LastReceivedPacketMs(); - - // To avoid spamming keyframe requests for a stream that is not active we - // check if we have received a packet within the last 5 seconds. - bool stream_is_active = last_packet_ms && now_ms - *last_packet_ms < 5000; - if (!stream_is_active) - stats_proxy_.OnStreamInactive(); - - if (stream_is_active && !IsReceivingKeyFrame(now_ms) && - (!config_.crypto_options.sframe.require_frame_encryption || - rtp_video_stream_receiver_.IsDecryptable())) { - RTC_LOG(LS_WARNING) << "No decodable frame in " << GetWaitMs() - << " ms, requesting keyframe."; - RequestKeyFrame(now_ms); - } -} - -bool VideoReceiveStream::IsReceivingKeyFrame(int64_t timestamp_ms) const { - absl::optional last_keyframe_packet_ms = - rtp_video_stream_receiver_.LastReceivedKeyframePacketMs(); - - // If we recently have been receiving packets belonging to a keyframe then - // we assume a keyframe is currently being received. - bool receiving_keyframe = - last_keyframe_packet_ms && - timestamp_ms - *last_keyframe_packet_ms < max_wait_for_keyframe_ms_; - return receiving_keyframe; -} - -void VideoReceiveStream::UpdatePlayoutDelays() const { - const int minimum_delay_ms = - std::max({frame_minimum_playout_delay_ms_, base_minimum_playout_delay_ms_, - syncable_minimum_playout_delay_ms_}); - if (minimum_delay_ms >= 0) { - timing_->set_min_playout_delay(minimum_delay_ms); - } - - const int maximum_delay_ms = frame_maximum_playout_delay_ms_; - if (maximum_delay_ms >= 0) { - timing_->set_max_playout_delay(maximum_delay_ms); - } -} - -std::vector VideoReceiveStream::GetSources() const { - return source_tracker_.GetSources(); -} - -VideoReceiveStream::RecordingState VideoReceiveStream::SetAndGetRecordingState( - RecordingState state, - bool generate_key_frame) { - RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - rtc::Event event; - RecordingState old_state; - decode_queue_.PostTask([this, &event, &old_state, generate_key_frame, - state = std::move(state)] { - RTC_DCHECK_RUN_ON(&decode_queue_); - // Save old state. - old_state.callback = std::move(encoded_frame_buffer_function_); - old_state.last_keyframe_request_ms = last_keyframe_request_ms_; - - // Set new state. - encoded_frame_buffer_function_ = std::move(state.callback); - if (generate_key_frame) { - RequestKeyFrame(clock_->TimeInMilliseconds()); - keyframe_generation_requested_ = true; - } else { - keyframe_generation_requested_ = false; - last_keyframe_request_ms_ = state.last_keyframe_request_ms.value_or(0); - } - event.Set(); - }); - event.Wait(rtc::Event::kForever); - return old_state; -} - -void VideoReceiveStream::GenerateKeyFrame() { - decode_queue_.PostTask([this]() { - RTC_DCHECK_RUN_ON(&decode_queue_); - RequestKeyFrame(clock_->TimeInMilliseconds()); - keyframe_generation_requested_ = true; - }); -} - -} // namespace internal -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/video_receive_stream2.cc b/TMessagesProj/jni/voip/webrtc/video/video_receive_stream2.cc index 528b2998e9..1ad51ad612 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_receive_stream2.cc +++ b/TMessagesProj/jni/voip/webrtc/video/video_receive_stream2.cc @@ -23,49 +23,52 @@ #include "absl/types/optional.h" #include "api/array_view.h" #include "api/crypto/frame_decryptor_interface.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/task_queue/task_queue_base.h" +#include "api/units/frequency.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "api/video/encoded_image.h" -#include "api/video_codecs/h264_profile_level_id.h" #include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_decoder_factory.h" -#include "api/video_codecs/video_encoder.h" #include "call/rtp_stream_receiver_controller_interface.h" #include "call/rtx_receive_stream.h" -#include "common_video/include/incoming_video_stream.h" #include "modules/video_coding/include/video_codec_interface.h" #include "modules/video_coding/include/video_coding_defines.h" #include "modules/video_coding/include/video_error_codes.h" -#include "modules/video_coding/timing.h" +#include "modules/video_coding/timing/timing.h" #include "modules/video_coding/utility/vp8_header_parser.h" #include "rtc_base/checks.h" -#include "rtc_base/location.h" +#include "rtc_base/event.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/synchronization/mutex.h" -#include "rtc_base/system/thread_registry.h" +#include "rtc_base/thread_annotations.h" #include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" #include "system_wrappers/include/clock.h" -#include "system_wrappers/include/field_trial.h" #include "video/call_stats2.h" #include "video/frame_dumping_decoder.h" #include "video/receive_statistics_proxy2.h" +#include "video/render/incoming_video_stream.h" +#include "video/task_queue_frame_decode_scheduler.h" namespace webrtc { namespace internal { -constexpr int VideoReceiveStream2::kMaxWaitForKeyFrameMs; namespace { -constexpr int kMinBaseMinimumDelayMs = 0; -constexpr int kMaxBaseMinimumDelayMs = 10000; +// The default delay before re-requesting a key frame to be sent. +constexpr TimeDelta kMinBaseMinimumDelay = TimeDelta::Zero(); +constexpr TimeDelta kMaxBaseMinimumDelay = TimeDelta::Seconds(10); -constexpr int kMaxWaitForFrameMs = 3000; - -// Create a decoder for the preferred codec before the stream starts and any -// other decoder lazily on demand. -constexpr int kDefaultMaximumPreStreamDecoders = 1; +// Create no decoders before the stream starts. All decoders are created on +// demand when we receive payload data of the corresponding type. +constexpr int kDefaultMaximumPreStreamDecoders = 0; // Concrete instance of RecordableEncodedFrame wrapping needed content // from EncodedFrame. @@ -113,12 +116,11 @@ class WebRtcRecordableEncodedFrame : public RecordableEncodedFrame { absl::optional color_space_; }; -RenderResolution InitialDecoderResolution() { +RenderResolution InitialDecoderResolution(const FieldTrialsView& field_trials) { FieldTrialOptional width("w"); FieldTrialOptional height("h"); - ParseFieldTrial( - {&width, &height}, - field_trial::FindFullName("WebRTC-Video-InitialDecoderResolution")); + ParseFieldTrial({&width, &height}, + field_trials.Lookup("WebRTC-Video-InitialDecoderResolution")); if (width && height) { return RenderResolution(width.Value(), height.Value()); } @@ -160,28 +162,23 @@ bool IsKeyFrameAndUnspecifiedResolution(const EncodedFrame& frame) { frame.EncodedImage()._encodedHeight == 0; } -// TODO(https://bugs.webrtc.org/9974): Consider removing this workaround. -// Maximum time between frames before resetting the FrameBuffer to avoid RTP -// timestamps wraparound to affect FrameBuffer. -constexpr int kInactiveStreamThresholdMs = 600000; // 10 minutes. +std::string OptionalDelayToLogString(const absl::optional opt) { + return opt.has_value() ? ToLogString(*opt) : ""; +} } // namespace -int DetermineMaxWaitForFrame(const VideoReceiveStream::Config& config, - bool is_keyframe) { +TimeDelta DetermineMaxWaitForFrame(TimeDelta rtp_history, bool is_keyframe) { // A (arbitrary) conversion factor between the remotely signalled NACK buffer // time (if not present defaults to 1000ms) and the maximum time we wait for a // remote frame. Chosen to not change existing defaults when using not // rtx-time. const int conversion_factor = 3; - - if (config.rtp.nack.rtp_history_ms > 0 && - conversion_factor * config.rtp.nack.rtp_history_ms < kMaxWaitForFrameMs) { - return is_keyframe ? config.rtp.nack.rtp_history_ms - : conversion_factor * config.rtp.nack.rtp_history_ms; + if (rtp_history > TimeDelta::Zero() && + conversion_factor * rtp_history < kMaxWaitForFrame) { + return is_keyframe ? rtp_history : conversion_factor * rtp_history; } - return is_keyframe ? VideoReceiveStream2::kMaxWaitForKeyFrameMs - : kMaxWaitForFrameMs; + return is_keyframe ? kMaxWaitForKeyFrame : kMaxWaitForFrame; } VideoReceiveStream2::VideoReceiveStream2( @@ -189,11 +186,13 @@ VideoReceiveStream2::VideoReceiveStream2( Call* call, int num_cpu_cores, PacketRouter* packet_router, - VideoReceiveStream::Config config, + VideoReceiveStreamInterface::Config config, CallStats* call_stats, Clock* clock, - VCMTiming* timing, - NackPeriodicProcessor* nack_periodic_processor) + std::unique_ptr timing, + NackPeriodicProcessor* nack_periodic_processor, + DecodeSynchronizer* decode_sync, + RtcEventLog* event_log) : task_queue_factory_(task_queue_factory), transport_adapter_(config.rtcp_send_transport), config_(std::move(config)), @@ -202,10 +201,10 @@ VideoReceiveStream2::VideoReceiveStream2( clock_(clock), call_stats_(call_stats), source_tracker_(clock_), - stats_proxy_(config_.rtp.remote_ssrc, clock_, call->worker_thread()), + stats_proxy_(remote_ssrc(), clock_, call->worker_thread()), rtp_receive_statistics_(ReceiveStatistics::Create(clock_)), - timing_(timing), - video_receiver_(clock_, timing_.get()), + timing_(std::move(timing)), + video_receiver_(clock_, timing_.get(), call->trials()), rtp_video_stream_receiver_(call->worker_thread(), clock_, &transport_adapter_, @@ -216,17 +215,18 @@ VideoReceiveStream2::VideoReceiveStream2( &stats_proxy_, &stats_proxy_, nack_periodic_processor, - this, // NackSender - nullptr, // Use default KeyFrameRequestSender - this, // OnCompleteFrameCallback + this, // OnCompleteFrameCallback std::move(config_.frame_decryptor), - std::move(config_.frame_transformer)), + std::move(config_.frame_transformer), + call->trials(), + event_log), rtp_stream_sync_(call->worker_thread(), this), - max_wait_for_keyframe_ms_(DetermineMaxWaitForFrame(config, true)), - max_wait_for_frame_ms_(DetermineMaxWaitForFrame(config, false)), - low_latency_renderer_enabled_("enabled", true), - low_latency_renderer_include_predecode_buffer_("include_predecode_buffer", - true), + max_wait_for_keyframe_(DetermineMaxWaitForFrame( + TimeDelta::Millis(config_.rtp.nack.rtp_history_ms), + true)), + max_wait_for_frame_(DetermineMaxWaitForFrame( + TimeDelta::Millis(config_.rtp.nack.rtp_history_ms), + false)), maximum_pre_stream_decoders_("max", kDefaultMaximumPreStreamDecoders), decode_queue_(task_queue_factory_->CreateTaskQueue( "DecodingQueue", @@ -249,28 +249,31 @@ VideoReceiveStream2::VideoReceiveStream2( decoder_payload_types.insert(decoder.payload_type); } - timing_->set_render_delay(config_.render_delay_ms); + timing_->set_render_delay(TimeDelta::Millis(config_.render_delay_ms)); - frame_buffer_.reset( - new video_coding::FrameBuffer(clock_, timing_.get(), &stats_proxy_)); + std::unique_ptr scheduler = + decode_sync ? decode_sync->CreateSynchronizedFrameScheduler() + : std::make_unique( + clock, call_->worker_thread()); + buffer_ = std::make_unique( + clock_, call_->worker_thread(), timing_.get(), &stats_proxy_, this, + max_wait_for_keyframe_, max_wait_for_frame_, std::move(scheduler), + call_->trials()); - if (config_.rtp.rtx_ssrc) { + if (rtx_ssrc()) { rtx_receive_stream_ = std::make_unique( - &rtp_video_stream_receiver_, config.rtp.rtx_associated_payload_types, - config_.rtp.remote_ssrc, rtp_receive_statistics_.get()); + &rtp_video_stream_receiver_, + std::move(config_.rtp.rtx_associated_payload_types), remote_ssrc(), + rtp_receive_statistics_.get()); } else { - rtp_receive_statistics_->EnableRetransmitDetection(config.rtp.remote_ssrc, - true); + rtp_receive_statistics_->EnableRetransmitDetection(remote_ssrc(), true); } - ParseFieldTrial({&low_latency_renderer_enabled_, - &low_latency_renderer_include_predecode_buffer_}, - field_trial::FindFullName("WebRTC-LowLatencyRenderer")); ParseFieldTrial( { &maximum_pre_stream_decoders_, }, - field_trial::FindFullName("WebRTC-PreStreamDecoders")); + call_->trials().Lookup("WebRTC-PreStreamDecoders")); } VideoReceiveStream2::~VideoReceiveStream2() { @@ -289,11 +292,11 @@ void VideoReceiveStream2::RegisterWithTransport( // Register with RtpStreamReceiverController. media_receiver_ = receiver_controller->CreateReceiver( - config_.rtp.remote_ssrc, &rtp_video_stream_receiver_); - if (config_.rtp.rtx_ssrc) { + remote_ssrc(), &rtp_video_stream_receiver_); + if (rtx_ssrc()) { RTC_DCHECK(rtx_receive_stream_); rtx_receiver_ = receiver_controller->CreateReceiver( - config_.rtp.rtx_ssrc, rtx_receive_stream_.get()); + rtx_ssrc(), rtx_receive_stream_.get()); } } @@ -303,11 +306,6 @@ void VideoReceiveStream2::UnregisterFromTransport() { rtx_receiver_.reset(); } -const VideoReceiveStream2::Config::Rtp& VideoReceiveStream2::rtp() const { - RTC_DCHECK_RUN_ON(&packet_sequence_checker_); - return config_.rtp; -} - const std::string& VideoReceiveStream2::sync_group() const { RTC_DCHECK_RUN_ON(&packet_sequence_checker_); return config_.sync_group; @@ -328,6 +326,16 @@ void VideoReceiveStream2::SetSync(Syncable* audio_syncable) { rtp_stream_sync_.ConfigureSync(audio_syncable); } +void VideoReceiveStream2::SetLocalSsrc(uint32_t local_ssrc) { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + if (config_.rtp.local_ssrc == local_ssrc) + return; + + // TODO(tommi): Make sure we don't rely on local_ssrc via the config struct. + const_cast(config_.rtp.local_ssrc) = local_ssrc; + rtp_video_stream_receiver_.OnLocalSsrcChange(local_ssrc); +} + void VideoReceiveStream2::Start() { RTC_DCHECK_RUN_ON(&worker_sequence_checker_); @@ -335,12 +343,12 @@ void VideoReceiveStream2::Start() { return; } - const bool protected_by_fec = config_.rtp.protected_by_flexfec || - rtp_video_stream_receiver_.IsUlpfecEnabled(); + const bool protected_by_fec = + config_.rtp.protected_by_flexfec || + rtp_video_stream_receiver_.ulpfec_payload_type() != -1; - if (rtp_video_stream_receiver_.IsRetransmissionsEnabled() && - protected_by_fec) { - frame_buffer_->SetProtectionMode(kProtectionNackFEC); + if (config_.rtp.nack.rtp_history_ms > 0 && protected_by_fec) { + buffer_->SetProtectionMode(kProtectionNackFEC); } transport_adapter_.Enable(); @@ -353,20 +361,12 @@ void VideoReceiveStream2::Start() { renderer = this; } - int decoders_count = 0; for (const Decoder& decoder : config_.decoders) { - // Create up to maximum_pre_stream_decoders_ up front, wait the the other - // decoders until they are requested (i.e., we receive the corresponding - // payload). - if (decoders_count < maximum_pre_stream_decoders_) { - CreateAndRegisterExternalDecoder(decoder); - ++decoders_count; - } - VideoDecoder::Settings settings; settings.set_codec_type( PayloadStringToCodecType(decoder.video_format.name)); - settings.set_max_render_resolution(InitialDecoderResolution()); + settings.set_max_render_resolution( + InitialDecoderResolution(call_->trials())); settings.set_number_of_cores(num_cpu_cores_); const bool raw_payload = @@ -390,13 +390,24 @@ void VideoReceiveStream2::Start() { call_stats_->RegisterStatsObserver(this); // Start decoding on task queue. - video_receiver_.DecoderThreadStarting(); stats_proxy_.DecoderThreadStarting(); decode_queue_.PostTask([this] { RTC_DCHECK_RUN_ON(&decode_queue_); + // Create up to maximum_pre_stream_decoders_ up front, wait the the other + // decoders until they are requested (i.e., we receive the corresponding + // payload). + int decoders_count = 0; + for (const Decoder& decoder : config_.decoders) { + if (decoders_count >= maximum_pre_stream_decoders_) { + break; + } + CreateAndRegisterExternalDecoder(decoder); + ++decoders_count; + } + decoder_stopped_ = false; - StartNextDecode(); }); + buffer_->StartNextDecode(true); decoder_running_ = true; { @@ -408,43 +419,45 @@ void VideoReceiveStream2::Start() { void VideoReceiveStream2::Stop() { RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - { - // TODO(bugs.webrtc.org/11993): Make this call on the network thread. - // Also call `GetUniqueFramesSeen()` at the same time (since it's a counter - // that's updated on the network thread). - RTC_DCHECK_RUN_ON(&packet_sequence_checker_); - rtp_video_stream_receiver_.StopReceive(); - } + + // TODO(bugs.webrtc.org/11993): Make this call on the network thread. + // Also call `GetUniqueFramesSeen()` at the same time (since it's a counter + // that's updated on the network thread). + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + rtp_video_stream_receiver_.StopReceive(); stats_proxy_.OnUniqueFramesCounted( rtp_video_stream_receiver_.GetUniqueFramesSeen()); - decode_queue_.PostTask([this] { frame_buffer_->Stop(); }); - + buffer_->Stop(); call_stats_->DeregisterStatsObserver(this); if (decoder_running_) { rtc::Event done; decode_queue_.PostTask([this, &done] { RTC_DCHECK_RUN_ON(&decode_queue_); + // Set `decoder_stopped_` before deregistering all decoders. This means + // that any pending encoded frame will return early without trying to + // access the decoder database. decoder_stopped_ = true; + for (const Decoder& decoder : config_.decoders) { + video_receiver_.RegisterExternalDecoder(nullptr, decoder.payload_type); + } done.Set(); }); done.Wait(rtc::Event::kForever); decoder_running_ = false; - video_receiver_.DecoderThreadStopped(); stats_proxy_.DecoderThreadStopped(); - // Deregister external decoders so they are no longer running during - // destruction. This effectively stops the VCM since the decoder thread is - // stopped, the VCM is deregistered and no asynchronous decoder threads are - // running. - for (const Decoder& decoder : config_.decoders) - video_receiver_.RegisterExternalDecoder(nullptr, decoder.payload_type); UpdateHistograms(); } + // TODO(bugs.webrtc.org/11993): Make these calls on the network thread. + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + rtp_video_stream_receiver_.RemoveReceiveCodecs(); + video_receiver_.DeregisterReceiveCodecs(); + video_stream_decoder_.reset(); incoming_video_stream_.reset(); transport_adapter_.Disable(); @@ -464,9 +477,106 @@ void VideoReceiveStream2::SetRtpExtensions( // and guarded by `packet_sequence_checker_`. However the scope of that state // is huge (the whole Config struct), and would require all methods that touch // the struct to abide the needs of the `extensions` member. - VideoReceiveStream::Config& c = - const_cast(config_); - c.rtp.extensions = std::move(extensions); + const_cast&>(config_.rtp.extensions) = + std::move(extensions); +} + +RtpHeaderExtensionMap VideoReceiveStream2::GetRtpExtensionMap() const { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + return rtp_video_stream_receiver_.GetRtpExtensions(); +} + +bool VideoReceiveStream2::transport_cc() const { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + return config_.rtp.transport_cc; +} + +void VideoReceiveStream2::SetTransportCc(bool transport_cc) { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + // TODO(tommi): Stop using the config struct for the internal state. + const_cast(config_.rtp.transport_cc) = transport_cc; +} + +void VideoReceiveStream2::SetRtcpMode(RtcpMode mode) { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + // TODO(tommi): Stop using the config struct for the internal state. + const_cast(config_.rtp.rtcp_mode) = mode; + rtp_video_stream_receiver_.SetRtcpMode(mode); +} + +void VideoReceiveStream2::SetFlexFecProtection( + RtpPacketSinkInterface* flexfec_sink) { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + rtp_video_stream_receiver_.SetPacketSink(flexfec_sink); + // TODO(tommi): Stop using the config struct for the internal state. + const_cast(config_.rtp.packet_sink_) = flexfec_sink; + const_cast(config_.rtp.protected_by_flexfec) = + (flexfec_sink != nullptr); +} + +void VideoReceiveStream2::SetLossNotificationEnabled(bool enabled) { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + // TODO(tommi): Stop using the config struct for the internal state. + const_cast(config_.rtp.lntf.enabled) = enabled; + rtp_video_stream_receiver_.SetLossNotificationEnabled(enabled); +} + +void VideoReceiveStream2::SetNackHistory(TimeDelta history) { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + RTC_DCHECK_GE(history.ms(), 0); + + if (config_.rtp.nack.rtp_history_ms == history.ms()) + return; + + // TODO(tommi): Stop using the config struct for the internal state. + const_cast(config_.rtp.nack.rtp_history_ms) = history.ms(); + + const bool protected_by_fec = + config_.rtp.protected_by_flexfec || + rtp_video_stream_receiver_.ulpfec_payload_type() != -1; + + buffer_->SetProtectionMode(history.ms() > 0 && protected_by_fec + ? kProtectionNackFEC + : kProtectionNack); + + rtp_video_stream_receiver_.SetNackHistory(history); + TimeDelta max_wait_for_keyframe = DetermineMaxWaitForFrame(history, true); + TimeDelta max_wait_for_frame = DetermineMaxWaitForFrame(history, false); + + max_wait_for_keyframe_ = max_wait_for_keyframe; + max_wait_for_frame_ = max_wait_for_frame; + + buffer_->SetMaxWaits(max_wait_for_keyframe, max_wait_for_frame); +} + +void VideoReceiveStream2::SetProtectionPayloadTypes(int red_payload_type, + int ulpfec_payload_type) { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + rtp_video_stream_receiver_.SetProtectionPayloadTypes(red_payload_type, + ulpfec_payload_type); +} + +void VideoReceiveStream2::SetRtcpXr(Config::Rtp::RtcpXr rtcp_xr) { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + rtp_video_stream_receiver_.SetReferenceTimeReport( + rtcp_xr.receiver_reference_time_report); +} + +void VideoReceiveStream2::SetAssociatedPayloadTypes( + std::map associated_payload_types) { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + + // For setting the associated payload types after construction, we currently + // assume that the rtx_ssrc cannot change. In such a case we can know that + // if the ssrc is non-0, a `rtx_receive_stream_` instance has previously been + // created and configured (and is referenced by `rtx_receiver_`) and we can + // simply reconfigure it. + // If rtx_ssrc is 0 however, we ignore this call. + if (!rtx_ssrc()) + return; + + rtx_receive_stream_->SetAssociatedPayloadTypes( + std::move(associated_payload_types)); } void VideoReceiveStream2::CreateAndRegisterExternalDecoder( @@ -484,7 +594,7 @@ void VideoReceiveStream2::CreateAndRegisterExternalDecoder( } std::string decoded_output_file = - field_trial::FindFullName("WebRTC-DecoderDataDumpDirectory"); + call_->trials().Lookup("WebRTC-DecoderDataDumpDirectory"); // Because '/' can't be used inside a field trial parameter, we use ';' // instead. // This is only relevant to WebRTC-DecoderDataDumpDirectory @@ -495,18 +605,17 @@ void VideoReceiveStream2::CreateAndRegisterExternalDecoder( if (!decoded_output_file.empty()) { char filename_buffer[256]; rtc::SimpleStringBuilder ssb(filename_buffer); - ssb << decoded_output_file << "/webrtc_receive_stream_" - << config_.rtp.remote_ssrc << "-" << rtc::TimeMicros() << ".ivf"; + ssb << decoded_output_file << "/webrtc_receive_stream_" << remote_ssrc() + << "-" << rtc::TimeMicros() << ".ivf"; video_decoder = CreateFrameDumpingDecoderWrapper( std::move(video_decoder), FileWrapper::OpenWriteOnly(ssb.str())); } - video_decoders_.push_back(std::move(video_decoder)); - video_receiver_.RegisterExternalDecoder(video_decoders_.back().get(), + video_receiver_.RegisterExternalDecoder(std::move(video_decoder), decoder.payload_type); } -VideoReceiveStream::Stats VideoReceiveStream2::GetStats() const { +VideoReceiveStreamInterface::Stats VideoReceiveStream2::GetStats() const { RTC_DCHECK_RUN_ON(&worker_sequence_checker_); VideoReceiveStream2::Stats stats = stats_proxy_.GetStats(); stats.total_bitrate_bps = 0; @@ -516,9 +625,9 @@ VideoReceiveStream::Stats VideoReceiveStream2::GetStats() const { stats.rtp_stats = statistician->GetStats(); stats.total_bitrate_bps = statistician->BitrateReceived(); } - if (config_.rtp.rtx_ssrc) { + if (rtx_ssrc()) { StreamStatistician* rtx_statistician = - rtp_receive_statistics_->GetStatistician(config_.rtp.rtx_ssrc); + rtp_receive_statistics_->GetStatistician(rtx_ssrc()); if (rtx_statistician) stats.total_bitrate_bps += rtx_statistician->BitrateReceived(); } @@ -530,14 +639,14 @@ void VideoReceiveStream2::UpdateHistograms() { absl::optional fraction_lost; StreamDataCounters rtp_stats; StreamStatistician* statistician = - rtp_receive_statistics_->GetStatistician(config_.rtp.remote_ssrc); + rtp_receive_statistics_->GetStatistician(remote_ssrc()); if (statistician) { fraction_lost = statistician->GetFractionLostInPercent(); rtp_stats = statistician->GetReceiveStreamDataCounters(); } - if (config_.rtp.rtx_ssrc) { + if (rtx_ssrc()) { StreamStatistician* rtx_statistician = - rtp_receive_statistics_->GetStatistician(config_.rtp.rtx_ssrc); + rtp_receive_statistics_->GetStatistician(rtx_ssrc()); if (rtx_statistician) { StreamDataCounters rtx_stats = rtx_statistician->GetReceiveStreamDataCounters(); @@ -550,18 +659,22 @@ void VideoReceiveStream2::UpdateHistograms() { bool VideoReceiveStream2::SetBaseMinimumPlayoutDelayMs(int delay_ms) { RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - if (delay_ms < kMinBaseMinimumDelayMs || delay_ms > kMaxBaseMinimumDelayMs) { + TimeDelta delay = TimeDelta::Millis(delay_ms); + if (delay < kMinBaseMinimumDelay || delay > kMaxBaseMinimumDelay) { return false; } - base_minimum_playout_delay_ms_ = delay_ms; + base_minimum_playout_delay_ = delay; UpdatePlayoutDelays(); return true; } int VideoReceiveStream2::GetBaseMinimumPlayoutDelayMs() const { RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - return base_minimum_playout_delay_ms_; + constexpr TimeDelta kDefaultBaseMinPlayoutDelay = TimeDelta::Millis(-1); + // Unset must be -1. + static_assert(-1 == kDefaultBaseMinPlayoutDelay.ms(), ""); + return base_minimum_playout_delay_.value_or(kDefaultBaseMinPlayoutDelay).ms(); } void VideoReceiveStream2::OnFrame(const VideoFrame& video_frame) { @@ -571,7 +684,7 @@ void VideoReceiveStream2::OnFrame(const VideoFrame& video_frame) { // `video_frame.packet_infos`. But VideoFrame is const qualified here. call_->worker_thread()->PostTask( - ToQueuedTask(task_safety_, [frame_meta, this]() { + SafeTask(task_safety_.flag(), [frame_meta, this]() { RTC_DCHECK_RUN_ON(&worker_sequence_checker_); int64_t video_playout_ntp_ms; int64_t sync_offset_ms; @@ -616,67 +729,48 @@ void VideoReceiveStream2::SetDepacketizerToDecoderFrameTransformer( std::move(frame_transformer)); } -void VideoReceiveStream2::SendNack( - const std::vector& sequence_numbers, - bool buffering_allowed) { +void VideoReceiveStream2::RequestKeyFrame(Timestamp now) { RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - RTC_DCHECK(buffering_allowed); - rtp_video_stream_receiver_.RequestPacketRetransmit(sequence_numbers); -} - -void VideoReceiveStream2::RequestKeyFrame(int64_t timestamp_ms) { - // Running on worker_sequence_checker_. // Called from RtpVideoStreamReceiver (rtp_video_stream_receiver_ is // ultimately responsible). rtp_video_stream_receiver_.RequestKeyFrame(); - decode_queue_.PostTask([this, timestamp_ms]() { - RTC_DCHECK_RUN_ON(&decode_queue_); - last_keyframe_request_ms_ = timestamp_ms; - }); + last_keyframe_request_ = now; } void VideoReceiveStream2::OnCompleteFrame(std::unique_ptr frame) { RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - // TODO(https://bugs.webrtc.org/9974): Consider removing this workaround. - int64_t time_now_ms = clock_->TimeInMilliseconds(); - if (last_complete_frame_time_ms_ > 0 && - time_now_ms - last_complete_frame_time_ms_ > kInactiveStreamThresholdMs) { - frame_buffer_->Clear(); - } - last_complete_frame_time_ms_ = time_now_ms; - const VideoPlayoutDelay& playout_delay = frame->EncodedImage().playout_delay_; if (playout_delay.min_ms >= 0) { - frame_minimum_playout_delay_ms_ = playout_delay.min_ms; + frame_minimum_playout_delay_ = TimeDelta::Millis(playout_delay.min_ms); UpdatePlayoutDelays(); } - if (playout_delay.max_ms >= 0) { - frame_maximum_playout_delay_ms_ = playout_delay.max_ms; + frame_maximum_playout_delay_ = TimeDelta::Millis(playout_delay.max_ms); UpdatePlayoutDelays(); } - int64_t last_continuous_pid = frame_buffer_->InsertFrame(std::move(frame)); - if (last_continuous_pid != -1) { + auto last_continuous_pid = buffer_->InsertFrame(std::move(frame)); + if (last_continuous_pid.has_value()) { { // TODO(bugs.webrtc.org/11993): Call on the network thread. RTC_DCHECK_RUN_ON(&packet_sequence_checker_); - rtp_video_stream_receiver_.FrameContinuous(last_continuous_pid); + rtp_video_stream_receiver_.FrameContinuous(*last_continuous_pid); } } } void VideoReceiveStream2::OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) { RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - frame_buffer_->UpdateRtt(max_rtt_ms); + // TODO(bugs.webrtc.org/13757): Replace with TimeDelta. + buffer_->UpdateRtt(max_rtt_ms); rtp_video_stream_receiver_.UpdateRtt(max_rtt_ms); stats_proxy_.OnRttUpdate(avg_rtt_ms); } uint32_t VideoReceiveStream2::id() const { RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - return config_.rtp.remote_ssrc; + return remote_ssrc(); } absl::optional VideoReceiveStream2::GetInfo() const { @@ -687,14 +781,14 @@ absl::optional VideoReceiveStream2::GetInfo() const { if (!info) return absl::nullopt; - info->current_delay_ms = timing_->TargetVideoDelay(); + info->current_delay_ms = timing_->TargetVideoDelay().ms(); return info; } bool VideoReceiveStream2::GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp, int64_t* time_ms) const { RTC_DCHECK_NOTREACHED(); - return 0; + return false; } void VideoReceiveStream2::SetEstimatedPlayoutNtpTimestampMs( @@ -705,45 +799,19 @@ void VideoReceiveStream2::SetEstimatedPlayoutNtpTimestampMs( bool VideoReceiveStream2::SetMinimumPlayoutDelay(int delay_ms) { RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - syncable_minimum_playout_delay_ms_ = delay_ms; + syncable_minimum_playout_delay_ = TimeDelta::Millis(delay_ms); UpdatePlayoutDelays(); return true; } -int64_t VideoReceiveStream2::GetMaxWaitMs() const { - return keyframe_required_ ? max_wait_for_keyframe_ms_ - : max_wait_for_frame_ms_; -} - -void VideoReceiveStream2::StartNextDecode() { - // Running on the decode thread. - TRACE_EVENT0("webrtc", "VideoReceiveStream2::StartNextDecode"); - frame_buffer_->NextFrame( - GetMaxWaitMs(), keyframe_required_, &decode_queue_, - /* encoded frame handler */ - [this](std::unique_ptr frame) { - RTC_DCHECK_RUN_ON(&decode_queue_); - if (decoder_stopped_) - return; - if (frame) { - HandleEncodedFrame(std::move(frame)); - } else { - int64_t now_ms = clock_->TimeInMilliseconds(); - // TODO(bugs.webrtc.org/11993): PostTask to the network thread. - call_->worker_thread()->PostTask(ToQueuedTask( - task_safety_, [this, now_ms, wait_ms = GetMaxWaitMs()]() { - RTC_DCHECK_RUN_ON(&packet_sequence_checker_); - HandleFrameBufferTimeout(now_ms, wait_ms); - })); - } - StartNextDecode(); - }); -} - -void VideoReceiveStream2::HandleEncodedFrame( - std::unique_ptr frame) { - // Running on `decode_queue_`. - int64_t now_ms = clock_->TimeInMilliseconds(); +void VideoReceiveStream2::OnEncodedFrame(std::unique_ptr frame) { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + Timestamp now = clock_->CurrentTime(); + const bool keyframe_request_is_due = + !last_keyframe_request_ || + now >= (*last_keyframe_request_ + max_wait_for_keyframe_); + const bool received_frame_is_keyframe = + frame->FrameType() == VideoFrameType::kVideoFrameKey; // Current OnPreDecode only cares about QP for VP8. int qp = -1; @@ -754,11 +822,72 @@ void VideoReceiveStream2::HandleEncodedFrame( } stats_proxy_.OnPreDecode(frame->CodecSpecific()->codecType, qp); - bool force_request_key_frame = false; - int64_t decoded_frame_picture_id = -1; + decode_queue_.PostTask([this, now, keyframe_request_is_due, + received_frame_is_keyframe, frame = std::move(frame), + keyframe_required = keyframe_required_]() mutable { + RTC_DCHECK_RUN_ON(&decode_queue_); + if (decoder_stopped_) + return; + DecodeFrameResult result = HandleEncodedFrameOnDecodeQueue( + std::move(frame), keyframe_request_is_due, keyframe_required); - const bool keyframe_request_is_due = - now_ms >= (last_keyframe_request_ms_ + max_wait_for_keyframe_ms_); + // TODO(bugs.webrtc.org/11993): Make this PostTask to the network thread. + call_->worker_thread()->PostTask( + SafeTask(task_safety_.flag(), + [this, now, result = std::move(result), + received_frame_is_keyframe, keyframe_request_is_due]() { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + keyframe_required_ = result.keyframe_required; + + if (result.decoded_frame_picture_id) { + rtp_video_stream_receiver_.FrameDecoded( + *result.decoded_frame_picture_id); + } + + HandleKeyFrameGeneration(received_frame_is_keyframe, now, + result.force_request_key_frame, + keyframe_request_is_due); + buffer_->StartNextDecode(keyframe_required_); + })); + }); +} + +void VideoReceiveStream2::OnDecodableFrameTimeout(TimeDelta wait) { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + Timestamp now = clock_->CurrentTime(); + + absl::optional last_packet_ms = + rtp_video_stream_receiver_.LastReceivedPacketMs(); + + // To avoid spamming keyframe requests for a stream that is not active we + // check if we have received a packet within the last 5 seconds. + constexpr TimeDelta kInactiveDuration = TimeDelta::Seconds(5); + const bool stream_is_active = + last_packet_ms && + now - Timestamp::Millis(*last_packet_ms) < kInactiveDuration; + if (!stream_is_active) + stats_proxy_.OnStreamInactive(); + + if (stream_is_active && !IsReceivingKeyFrame(now) && + (!config_.crypto_options.sframe.require_frame_encryption || + rtp_video_stream_receiver_.IsDecryptable())) { + RTC_LOG(LS_WARNING) << "No decodable frame in " << wait + << ", requesting keyframe."; + RequestKeyFrame(now); + } + + buffer_->StartNextDecode(keyframe_required_); +} + +VideoReceiveStream2::DecodeFrameResult +VideoReceiveStream2::HandleEncodedFrameOnDecodeQueue( + std::unique_ptr frame, + bool keyframe_request_is_due, + bool keyframe_required) { + RTC_DCHECK_RUN_ON(&decode_queue_); + + bool force_request_key_frame = false; + absl::optional decoded_frame_picture_id; if (!video_receiver_.IsExternalDecoderRegistered(frame->PayloadType())) { // Look for the decoder with this payload type. @@ -771,47 +900,33 @@ void VideoReceiveStream2::HandleEncodedFrame( } int64_t frame_id = frame->Id(); - bool received_frame_is_keyframe = - frame->FrameType() == VideoFrameType::kVideoFrameKey; int decode_result = DecodeAndMaybeDispatchEncodedFrame(std::move(frame)); if (decode_result == WEBRTC_VIDEO_CODEC_OK || decode_result == WEBRTC_VIDEO_CODEC_OK_REQUEST_KEYFRAME) { - keyframe_required_ = false; + keyframe_required = false; frame_decoded_ = true; decoded_frame_picture_id = frame_id; if (decode_result == WEBRTC_VIDEO_CODEC_OK_REQUEST_KEYFRAME) force_request_key_frame = true; - } else if (!frame_decoded_ || !keyframe_required_ || - keyframe_request_is_due) { - keyframe_required_ = true; + } else if (!frame_decoded_ || !keyframe_required || keyframe_request_is_due) { + keyframe_required = true; // TODO(philipel): Remove this keyframe request when downstream project // has been fixed. force_request_key_frame = true; } - { - // TODO(bugs.webrtc.org/11993): Make this PostTask to the network thread. - call_->worker_thread()->PostTask(ToQueuedTask( - task_safety_, - [this, now_ms, received_frame_is_keyframe, force_request_key_frame, - decoded_frame_picture_id, keyframe_request_is_due]() { - RTC_DCHECK_RUN_ON(&packet_sequence_checker_); - - if (decoded_frame_picture_id != -1) - rtp_video_stream_receiver_.FrameDecoded(decoded_frame_picture_id); - - HandleKeyFrameGeneration(received_frame_is_keyframe, now_ms, - force_request_key_frame, - keyframe_request_is_due); - })); - } + return DecodeFrameResult{ + .force_request_key_frame = force_request_key_frame, + .decoded_frame_picture_id = std::move(decoded_frame_picture_id), + .keyframe_required = keyframe_required, + }; } int VideoReceiveStream2::DecodeAndMaybeDispatchEncodedFrame( std::unique_ptr frame) { - // Running on decode_queue_. + RTC_DCHECK_RUN_ON(&decode_queue_); // If `buffered_encoded_frames_` grows out of control (=60 queued frames), // maybe due to a stuck decoder, we just halt the process here and log the @@ -869,12 +984,12 @@ int VideoReceiveStream2::DecodeAndMaybeDispatchEncodedFrame( return decode_result; } -// RTC_RUN_ON(packet_sequence_checker_) void VideoReceiveStream2::HandleKeyFrameGeneration( bool received_frame_is_keyframe, - int64_t now_ms, + Timestamp now, bool always_request_key_frame, bool keyframe_request_is_due) { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); bool request_key_frame = always_request_key_frame; // Repeat sending keyframe requests if we've requested a keyframe. @@ -882,7 +997,7 @@ void VideoReceiveStream2::HandleKeyFrameGeneration( if (received_frame_is_keyframe) { keyframe_generation_requested_ = false; } else if (keyframe_request_is_due) { - if (!IsReceivingKeyFrame(now_ms)) { + if (!IsReceivingKeyFrame(now)) { request_key_frame = true; } } else { @@ -891,76 +1006,66 @@ void VideoReceiveStream2::HandleKeyFrameGeneration( } if (request_key_frame) { - // HandleKeyFrameGeneration is initated from the decode thread - + // HandleKeyFrameGeneration is initiated from the decode thread - // RequestKeyFrame() triggers a call back to the decode thread. // Perhaps there's a way to avoid that. - RequestKeyFrame(now_ms); - } -} - -// RTC_RUN_ON(packet_sequence_checker_) -void VideoReceiveStream2::HandleFrameBufferTimeout(int64_t now_ms, - int64_t wait_ms) { - absl::optional last_packet_ms = - rtp_video_stream_receiver_.LastReceivedPacketMs(); - - // To avoid spamming keyframe requests for a stream that is not active we - // check if we have received a packet within the last 5 seconds. - const bool stream_is_active = - last_packet_ms && now_ms - *last_packet_ms < 5000; - if (!stream_is_active) - stats_proxy_.OnStreamInactive(); - - if (stream_is_active && !IsReceivingKeyFrame(now_ms) && - (!config_.crypto_options.sframe.require_frame_encryption || - rtp_video_stream_receiver_.IsDecryptable())) { - RTC_LOG(LS_WARNING) << "No decodable frame in " << wait_ms - << " ms, requesting keyframe."; - RequestKeyFrame(now_ms); + RequestKeyFrame(now); } } -// RTC_RUN_ON(packet_sequence_checker_) -bool VideoReceiveStream2::IsReceivingKeyFrame(int64_t timestamp_ms) const { +bool VideoReceiveStream2::IsReceivingKeyFrame(Timestamp now) const { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); absl::optional last_keyframe_packet_ms = rtp_video_stream_receiver_.LastReceivedKeyframePacketMs(); // If we recently have been receiving packets belonging to a keyframe then // we assume a keyframe is currently being received. - bool receiving_keyframe = - last_keyframe_packet_ms && - timestamp_ms - *last_keyframe_packet_ms < max_wait_for_keyframe_ms_; + bool receiving_keyframe = last_keyframe_packet_ms && + now - Timestamp::Millis(*last_keyframe_packet_ms) < + max_wait_for_keyframe_; return receiving_keyframe; } void VideoReceiveStream2::UpdatePlayoutDelays() const { - // Running on worker_sequence_checker_. - const int minimum_delay_ms = - std::max({frame_minimum_playout_delay_ms_, base_minimum_playout_delay_ms_, - syncable_minimum_playout_delay_ms_}); - if (minimum_delay_ms >= 0) { - timing_->set_min_playout_delay(minimum_delay_ms); - if (frame_minimum_playout_delay_ms_ == 0 && - frame_maximum_playout_delay_ms_ > 0 && low_latency_renderer_enabled_) { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + const std::initializer_list> min_delays = { + frame_minimum_playout_delay_, base_minimum_playout_delay_, + syncable_minimum_playout_delay_}; + + // Since nullopt < anything, this will return the largest of the minumum + // delays, or nullopt if all are nullopt. + absl::optional minimum_delay = std::max(min_delays); + if (minimum_delay) { + auto num_playout_delays_set = + absl::c_count_if(min_delays, [](auto opt) { return opt.has_value(); }); + if (num_playout_delays_set > 1 && + timing_->min_playout_delay() != minimum_delay) { + RTC_LOG(LS_WARNING) + << "Multiple playout delays set. Actual delay value set to " + << *minimum_delay << " frame min delay=" + << OptionalDelayToLogString(frame_maximum_playout_delay_) + << " base min delay=" + << OptionalDelayToLogString(base_minimum_playout_delay_) + << " sync min delay=" + << OptionalDelayToLogString(syncable_minimum_playout_delay_); + } + timing_->set_min_playout_delay(*minimum_delay); + if (frame_minimum_playout_delay_ == TimeDelta::Zero() && + frame_maximum_playout_delay_ > TimeDelta::Zero()) { // TODO(kron): Estimate frame rate from video stream. - constexpr double kFrameRate = 60.0; + constexpr Frequency kFrameRate = Frequency::Hertz(60); // Convert playout delay in ms to number of frames. - int max_composition_delay_in_frames = std::lrint( - static_cast(frame_maximum_playout_delay_ms_ * kFrameRate) / - rtc::kNumMillisecsPerSec); - if (low_latency_renderer_include_predecode_buffer_) { - // Subtract frames in buffer. - max_composition_delay_in_frames = std::max( - max_composition_delay_in_frames - frame_buffer_->Size(), 0); - } - timing_->SetMaxCompositionDelayInFrames( - absl::make_optional(max_composition_delay_in_frames)); + int max_composition_delay_in_frames = + std::lrint(*frame_maximum_playout_delay_ * kFrameRate); + // Subtract frames in buffer. + max_composition_delay_in_frames = + std::max(max_composition_delay_in_frames - buffer_->Size(), 0); + timing_->SetMaxCompositionDelayInFrames(max_composition_delay_in_frames); } } - const int maximum_delay_ms = frame_maximum_playout_delay_ms_; - if (maximum_delay_ms >= 0) { - timing_->set_max_playout_delay(maximum_delay_ms); + if (frame_maximum_playout_delay_) { + timing_->set_max_playout_delay(*frame_maximum_playout_delay_); } } @@ -977,18 +1082,26 @@ VideoReceiveStream2::SetAndGetRecordingState(RecordingState state, // Save old state, set the new state. RecordingState old_state; + absl::optional last_keyframe_request; + { + // TODO(bugs.webrtc.org/11993): Post this to the network thread. + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + last_keyframe_request = last_keyframe_request_; + last_keyframe_request_ = + generate_key_frame + ? clock_->CurrentTime() + : Timestamp::Millis(state.last_keyframe_request_ms.value_or(0)); + } + decode_queue_.PostTask( [this, &event, &old_state, callback = std::move(state.callback), - generate_key_frame, - last_keyframe_request = state.last_keyframe_request_ms.value_or(0)] { + last_keyframe_request = std::move(last_keyframe_request)] { RTC_DCHECK_RUN_ON(&decode_queue_); old_state.callback = std::move(encoded_frame_buffer_function_); encoded_frame_buffer_function_ = std::move(callback); - old_state.last_keyframe_request_ms = last_keyframe_request_ms_; - last_keyframe_request_ms_ = generate_key_frame - ? clock_->TimeInMilliseconds() - : last_keyframe_request; + old_state.last_keyframe_request_ms = + last_keyframe_request.value_or(Timestamp::Zero()).ms(); event.Set(); }); @@ -1008,7 +1121,7 @@ VideoReceiveStream2::SetAndGetRecordingState(RecordingState state, void VideoReceiveStream2::GenerateKeyFrame() { RTC_DCHECK_RUN_ON(&packet_sequence_checker_); - RequestKeyFrame(clock_->TimeInMilliseconds()); + RequestKeyFrame(clock_->CurrentTime()); keyframe_generation_requested_ = true; } diff --git a/TMessagesProj/jni/voip/webrtc/video/video_receive_stream2.h b/TMessagesProj/jni/voip/webrtc/video/video_receive_stream2.h index cf637f8c0e..6b3d1ce439 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_receive_stream2.h +++ b/TMessagesProj/jni/voip/webrtc/video/video_receive_stream2.h @@ -11,32 +11,33 @@ #ifndef VIDEO_VIDEO_RECEIVE_STREAM2_H_ #define VIDEO_VIDEO_RECEIVE_STREAM2_H_ +#include #include #include #include #include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_factory.h" +#include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "api/video/recordable_encoded_frame.h" #include "call/call.h" #include "call/rtp_packet_sink_interface.h" #include "call/syncable.h" #include "call/video_receive_stream.h" -#include "modules/rtp_rtcp/include/flexfec_receiver.h" #include "modules/rtp_rtcp/source/source_tracker.h" -#include "modules/video_coding/frame_buffer2.h" #include "modules/video_coding/nack_requester.h" #include "modules/video_coding/video_receiver2.h" #include "rtc_base/system/no_unique_address.h" #include "rtc_base/task_queue.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/thread_annotations.h" #include "system_wrappers/include/clock.h" #include "video/receive_statistics_proxy2.h" #include "video/rtp_streams_synchronizer2.h" #include "video/rtp_video_stream_receiver2.h" #include "video/transport_adapter.h" +#include "video/video_stream_buffer_controller.h" #include "video/video_stream_decoder2.h" namespace webrtc { @@ -46,6 +47,9 @@ class RtpStreamReceiverControllerInterface; class RtxReceiveStream; class VCMTiming; +constexpr TimeDelta kMaxWaitForKeyFrame = TimeDelta::Millis(200); +constexpr TimeDelta kMaxWaitForFrame = TimeDelta::Seconds(3); + namespace internal { class CallStats; @@ -53,7 +57,7 @@ class CallStats; // Utility struct for grabbing metadata from a VideoFrame and processing it // asynchronously without needing the actual frame data. // Additionally the caller can bundle information from the current clock -// when the metadata is captured, for accurate reporting and not needeing +// when the metadata is captured, for accurate reporting and not needing // multiple calls to clock->Now(). struct VideoFrameMetaData { VideoFrameMetaData(const webrtc::VideoFrame& frame, Timestamp now) @@ -78,16 +82,13 @@ struct VideoFrameMetaData { }; class VideoReceiveStream2 - : public webrtc::VideoReceiveStream, + : public webrtc::VideoReceiveStreamInterface, public rtc::VideoSinkInterface, - public NackSender, public RtpVideoStreamReceiver2::OnCompleteFrameCallback, public Syncable, - public CallStatsObserver { + public CallStatsObserver, + public FrameSchedulingReceiver { public: - // The default number of milliseconds to pass before re-requesting a key frame - // to be sent. - static constexpr int kMaxWaitForKeyFrameMs = 200; // The maximum number of buffered encoded frames when encoded output is // configured. static constexpr size_t kBufferedEncodedFramesMaxSize = 60; @@ -96,11 +97,13 @@ class VideoReceiveStream2 Call* call, int num_cpu_cores, PacketRouter* packet_router, - VideoReceiveStream::Config config, + VideoReceiveStreamInterface::Config config, CallStats* call_stats, Clock* clock, - VCMTiming* timing, - NackPeriodicProcessor* nack_periodic_processor); + std::unique_ptr timing, + NackPeriodicProcessor* nack_periodic_processor, + DecodeSynchronizer* decode_sync, + RtcEventLog* event_log); // Destruction happens on the worker thread. Prior to destruction the caller // must ensure that a registration with the transport has been cleared. See // `RegisterWithTransport` for details. @@ -117,28 +120,43 @@ class VideoReceiveStream2 // network thread. void UnregisterFromTransport(); - // Convenience getters for parts of the receive stream's config. - // The accessors must be called on the packet delivery thread in accordance - // to documentation for RtpConfig (see receive_stream.h), the returned - // values should not be cached and should just be used within the calling - // context as some values might change. - const Config::Rtp& rtp() const; + // Accessor for the a/v sync group. This value may change and the caller + // must be on the packet delivery thread. const std::string& sync_group() const; + // Getters for const remote SSRC values that won't change throughout the + // object's lifetime. + uint32_t remote_ssrc() const { return config_.rtp.remote_ssrc; } + uint32_t rtx_ssrc() const { return config_.rtp.rtx_ssrc; } + void SignalNetworkState(NetworkState state); bool DeliverRtcp(const uint8_t* packet, size_t length); void SetSync(Syncable* audio_syncable); - // Implements webrtc::VideoReceiveStream. + // Updates the `rtp_video_stream_receiver_`'s `local_ssrc` when the default + // sender has been created, changed or removed. + void SetLocalSsrc(uint32_t local_ssrc); + + // Implements webrtc::VideoReceiveStreamInterface. void Start() override; void Stop() override; void SetRtpExtensions(std::vector extensions) override; - - const RtpConfig& rtp_config() const override { return rtp(); } - - webrtc::VideoReceiveStream::Stats GetStats() const override; + RtpHeaderExtensionMap GetRtpExtensionMap() const override; + bool transport_cc() const override; + void SetTransportCc(bool transport_cc) override; + void SetRtcpMode(RtcpMode mode) override; + void SetFlexFecProtection(RtpPacketSinkInterface* flexfec_sink) override; + void SetLossNotificationEnabled(bool enabled) override; + void SetNackHistory(TimeDelta history) override; + void SetProtectionPayloadTypes(int red_payload_type, + int ulpfec_payload_type) override; + void SetRtcpXr(Config::Rtp::RtcpXr rtcp_xr) override; + void SetAssociatedPayloadTypes( + std::map associated_payload_types) override; + + webrtc::VideoReceiveStreamInterface::Stats GetStats() const override; // SetBaseMinimumPlayoutDelayMs and GetBaseMinimumPlayoutDelayMs are called // from webrtc/api level and requested by user code. For e.g. blink/js layer @@ -154,12 +172,6 @@ class VideoReceiveStream2 // Implements rtc::VideoSinkInterface. void OnFrame(const VideoFrame& video_frame) override; - // Implements NackSender. - // For this particular override of the interface, - // only (buffering_allowed == true) is acceptable. - void SendNack(const std::vector& sequence_numbers, - bool buffering_allowed) override; - // Implements RtpVideoStreamReceiver2::OnCompleteFrameCallback. void OnCompleteFrame(std::unique_ptr frame) override; @@ -184,23 +196,44 @@ class VideoReceiveStream2 void GenerateKeyFrame() override; private: + // FrameSchedulingReceiver implementation. + // Called on packet sequence. + void OnEncodedFrame(std::unique_ptr frame) override; + // Called on packet sequence. + void OnDecodableFrameTimeout(TimeDelta wait) override; + void CreateAndRegisterExternalDecoder(const Decoder& decoder); - int64_t GetMaxWaitMs() const RTC_RUN_ON(decode_queue_); - void StartNextDecode() RTC_RUN_ON(decode_queue_); - void HandleEncodedFrame(std::unique_ptr frame) - RTC_RUN_ON(decode_queue_); - void HandleFrameBufferTimeout(int64_t now_ms, int64_t wait_ms) - RTC_RUN_ON(packet_sequence_checker_); + + struct DecodeFrameResult { + // True if the decoder returned code WEBRTC_VIDEO_CODEC_OK_REQUEST_KEYFRAME, + // or if the decoder failed and a keyframe is required. When true, a + // keyframe request should be sent even if a keyframe request was sent + // recently. + bool force_request_key_frame; + + // The picture id of the frame that was decoded, or nullopt if the frame was + // not decoded. + absl::optional decoded_frame_picture_id; + + // True if the next frame decoded must be a keyframe. This value will set + // the value of `keyframe_required_`, which will force the frame buffer to + // drop all frames that are not keyframes. + bool keyframe_required; + }; + + DecodeFrameResult HandleEncodedFrameOnDecodeQueue( + std::unique_ptr frame, + bool keyframe_request_is_due, + bool keyframe_required) RTC_RUN_ON(decode_queue_); void UpdatePlayoutDelays() const RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_sequence_checker_); - void RequestKeyFrame(int64_t timestamp_ms) - RTC_RUN_ON(packet_sequence_checker_); + void RequestKeyFrame(Timestamp now) RTC_RUN_ON(packet_sequence_checker_); void HandleKeyFrameGeneration(bool received_frame_is_keyframe, - int64_t now_ms, + Timestamp now, bool always_request_key_frame, bool keyframe_request_is_due) RTC_RUN_ON(packet_sequence_checker_); - bool IsReceivingKeyFrame(int64_t timestamp_ms) const + bool IsReceivingKeyFrame(Timestamp timestamp) const RTC_RUN_ON(packet_sequence_checker_); int DecodeAndMaybeDispatchEncodedFrame(std::unique_ptr frame) RTC_RUN_ON(decode_queue_); @@ -220,7 +253,7 @@ class VideoReceiveStream2 TaskQueueFactory* const task_queue_factory_; TransportAdapter transport_adapter_; - const VideoReceiveStream::Config config_; + const VideoReceiveStreamInterface::Config config_; const int num_cpu_cores_; Call* const call_; Clock* const clock_; @@ -243,12 +276,7 @@ class VideoReceiveStream2 std::unique_ptr video_stream_decoder_; RtpStreamsSynchronizer rtp_stream_sync_; - // TODO(nisse, philipel): Creation and ownership of video encoders should be - // moved to the new VideoStreamDecoder. - std::vector> video_decoders_; - - // Members for the new jitter buffer experiment. - std::unique_ptr frame_buffer_; + std::unique_ptr buffer_; std::unique_ptr media_receiver_ RTC_GUARDED_BY(packet_sequence_checker_); @@ -259,36 +287,35 @@ class VideoReceiveStream2 // Whenever we are in an undecodable state (stream has just started or due to // a decoding error) we require a keyframe to restart the stream. - bool keyframe_required_ RTC_GUARDED_BY(decode_queue_) = true; + bool keyframe_required_ RTC_GUARDED_BY(packet_sequence_checker_) = true; // If we have successfully decoded any frame. bool frame_decoded_ RTC_GUARDED_BY(decode_queue_) = false; - int64_t last_keyframe_request_ms_ RTC_GUARDED_BY(decode_queue_) = 0; - int64_t last_complete_frame_time_ms_ - RTC_GUARDED_BY(worker_sequence_checker_) = 0; + absl::optional last_keyframe_request_ + RTC_GUARDED_BY(packet_sequence_checker_); // Keyframe request intervals are configurable through field trials. - const int max_wait_for_keyframe_ms_; - const int max_wait_for_frame_ms_; + TimeDelta max_wait_for_keyframe_ RTC_GUARDED_BY(packet_sequence_checker_); + TimeDelta max_wait_for_frame_ RTC_GUARDED_BY(packet_sequence_checker_); // All of them tries to change current min_playout_delay on `timing_` but // source of the change request is different in each case. Among them the // biggest delay is used. -1 means use default value from the `timing_`. // // Minimum delay as decided by the RTP playout delay extension. - int frame_minimum_playout_delay_ms_ RTC_GUARDED_BY(worker_sequence_checker_) = - -1; + absl::optional frame_minimum_playout_delay_ + RTC_GUARDED_BY(worker_sequence_checker_); // Minimum delay as decided by the setLatency function in "webrtc/api". - int base_minimum_playout_delay_ms_ RTC_GUARDED_BY(worker_sequence_checker_) = - -1; + absl::optional base_minimum_playout_delay_ + RTC_GUARDED_BY(worker_sequence_checker_); // Minimum delay as decided by the A/V synchronization feature. - int syncable_minimum_playout_delay_ms_ - RTC_GUARDED_BY(worker_sequence_checker_) = -1; + absl::optional syncable_minimum_playout_delay_ + RTC_GUARDED_BY(worker_sequence_checker_); // Maximum delay as decided by the RTP playout delay extension. - int frame_maximum_playout_delay_ms_ RTC_GUARDED_BY(worker_sequence_checker_) = - -1; + absl::optional frame_maximum_playout_delay_ + RTC_GUARDED_BY(worker_sequence_checker_); // Function that is triggered with encoded frames, if not empty. std::function @@ -307,16 +334,6 @@ class VideoReceiveStream2 std::vector> buffered_encoded_frames_ RTC_GUARDED_BY(decode_queue_); - // Set by the field trial WebRTC-LowLatencyRenderer. The parameter `enabled` - // determines if the low-latency renderer algorithm should be used for the - // case min playout delay=0 and max playout delay>0. - FieldTrialParameter low_latency_renderer_enabled_; - // Set by the field trial WebRTC-LowLatencyRenderer. The parameter - // `include_predecode_buffer` determines if the predecode buffer should be - // taken into account when calculating maximum number of frames in composition - // queue. - FieldTrialParameter low_latency_renderer_include_predecode_buffer_; - // Set by the field trial WebRTC-PreStreamDecoders. The parameter `max` // determines the maximum number of decoders that are created up front before // any video frame has been received. @@ -328,6 +345,7 @@ class VideoReceiveStream2 // Used to signal destruction to potentially pending tasks. ScopedTaskSafety task_safety_; }; + } // namespace internal } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/video_receive_stream_timeout_tracker.cc b/TMessagesProj/jni/voip/webrtc/video/video_receive_stream_timeout_tracker.cc new file mode 100644 index 0000000000..0409f26560 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/video/video_receive_stream_timeout_tracker.cc @@ -0,0 +1,98 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/video_receive_stream_timeout_tracker.h" + +#include +#include + +#include "rtc_base/logging.h" + +namespace webrtc { + +VideoReceiveStreamTimeoutTracker::VideoReceiveStreamTimeoutTracker( + Clock* clock, + TaskQueueBase* const bookkeeping_queue, + const Timeouts& timeouts, + TimeoutCallback callback) + : clock_(clock), + bookkeeping_queue_(bookkeeping_queue), + timeouts_(timeouts), + timeout_cb_(std::move(callback)) {} + +VideoReceiveStreamTimeoutTracker::~VideoReceiveStreamTimeoutTracker() { + RTC_DCHECK(!timeout_task_.Running()); +} + +bool VideoReceiveStreamTimeoutTracker::Running() const { + return timeout_task_.Running(); +} + +TimeDelta VideoReceiveStreamTimeoutTracker::TimeUntilTimeout() const { + return std::max(timeout_ - clock_->CurrentTime(), TimeDelta::Zero()); +} + +void VideoReceiveStreamTimeoutTracker::Start(bool waiting_for_keyframe) { + RTC_DCHECK_RUN_ON(bookkeeping_queue_); + RTC_DCHECK(!timeout_task_.Running()); + waiting_for_keyframe_ = waiting_for_keyframe; + TimeDelta timeout_delay = TimeoutForNextFrame(); + last_frame_ = clock_->CurrentTime(); + timeout_ = last_frame_ + timeout_delay; + timeout_task_ = + RepeatingTaskHandle::DelayedStart(bookkeeping_queue_, timeout_delay, + [this] { return HandleTimeoutTask(); }); +} + +void VideoReceiveStreamTimeoutTracker::Stop() { + timeout_task_.Stop(); +} + +void VideoReceiveStreamTimeoutTracker::SetWaitingForKeyframe() { + RTC_DCHECK_RUN_ON(bookkeeping_queue_); + waiting_for_keyframe_ = true; + TimeDelta timeout_delay = TimeoutForNextFrame(); + if (clock_->CurrentTime() + timeout_delay < timeout_) { + Stop(); + Start(waiting_for_keyframe_); + } +} + +void VideoReceiveStreamTimeoutTracker::OnEncodedFrameReleased() { + RTC_DCHECK_RUN_ON(bookkeeping_queue_); + // If we were waiting for a keyframe, then it has just been released. + waiting_for_keyframe_ = false; + last_frame_ = clock_->CurrentTime(); + timeout_ = last_frame_ + TimeoutForNextFrame(); +} + +TimeDelta VideoReceiveStreamTimeoutTracker::HandleTimeoutTask() { + RTC_DCHECK_RUN_ON(bookkeeping_queue_); + Timestamp now = clock_->CurrentTime(); + // `timeout_` is hit and we have timed out. Schedule the next timeout at + // the timeout delay. + if (now >= timeout_) { + RTC_DLOG(LS_VERBOSE) << "Stream timeout at " << now; + TimeDelta timeout_delay = TimeoutForNextFrame(); + timeout_ = now + timeout_delay; + timeout_cb_(now - last_frame_); + return timeout_delay; + } + // Otherwise, `timeout_` changed since we scheduled a timeout. Reschedule + // a timeout check. + return timeout_ - now; +} + +void VideoReceiveStreamTimeoutTracker::SetTimeouts(Timeouts timeouts) { + RTC_DCHECK_RUN_ON(bookkeeping_queue_); + timeouts_ = timeouts; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/video_receive_stream_timeout_tracker.h b/TMessagesProj/jni/voip/webrtc/video/video_receive_stream_timeout_tracker.h new file mode 100644 index 0000000000..c15aa70e92 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/video/video_receive_stream_timeout_tracker.h @@ -0,0 +1,70 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_VIDEO_RECEIVE_STREAM_TIMEOUT_TRACKER_H_ +#define VIDEO_VIDEO_RECEIVE_STREAM_TIMEOUT_TRACKER_H_ + +#include + +#include "api/task_queue/task_queue_base.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "rtc_base/task_utils/repeating_task.h" +#include "system_wrappers/include/clock.h" + +namespace webrtc { + +class VideoReceiveStreamTimeoutTracker { + public: + struct Timeouts { + TimeDelta max_wait_for_keyframe; + TimeDelta max_wait_for_frame; + }; + + using TimeoutCallback = std::function; + VideoReceiveStreamTimeoutTracker(Clock* clock, + TaskQueueBase* const bookkeeping_queue, + const Timeouts& timeouts, + TimeoutCallback callback); + ~VideoReceiveStreamTimeoutTracker(); + VideoReceiveStreamTimeoutTracker(const VideoReceiveStreamTimeoutTracker&) = + delete; + VideoReceiveStreamTimeoutTracker& operator=( + const VideoReceiveStreamTimeoutTracker&) = delete; + + bool Running() const; + void Start(bool waiting_for_keyframe); + void Stop(); + void SetWaitingForKeyframe(); + void OnEncodedFrameReleased(); + TimeDelta TimeUntilTimeout() const; + + void SetTimeouts(Timeouts timeouts); + + private: + TimeDelta TimeoutForNextFrame() const RTC_RUN_ON(bookkeeping_queue_) { + return waiting_for_keyframe_ ? timeouts_.max_wait_for_keyframe + : timeouts_.max_wait_for_frame; + } + TimeDelta HandleTimeoutTask(); + + Clock* const clock_; + TaskQueueBase* const bookkeeping_queue_; + Timeouts timeouts_ RTC_GUARDED_BY(bookkeeping_queue_); + const TimeoutCallback timeout_cb_; + RepeatingTaskHandle timeout_task_; + + Timestamp last_frame_ = Timestamp::MinusInfinity(); + Timestamp timeout_ = Timestamp::MinusInfinity(); + bool waiting_for_keyframe_; +}; +} // namespace webrtc + +#endif // VIDEO_VIDEO_RECEIVE_STREAM_TIMEOUT_TRACKER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/video/video_send_stream.cc b/TMessagesProj/jni/voip/webrtc/video/video_send_stream.cc index e78211be36..f245332753 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_send_stream.cc +++ b/TMessagesProj/jni/voip/webrtc/video/video_send_stream.cc @@ -20,9 +20,7 @@ #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" -#include "rtc_base/task_utils/to_queued_task.h" #include "system_wrappers/include/clock.h" -#include "system_wrappers/include/field_trial.h" #include "video/adaptation/overuse_frame_detector.h" #include "video/frame_cadence_adapter.h" #include "video/video_stream_encoder.h" @@ -63,7 +61,8 @@ size_t CalculateMaxHeaderSize(const RtpConfig& config) { } VideoStreamEncoder::BitrateAllocationCallbackType -GetBitrateAllocationCallbackType(const VideoSendStream::Config& config) { +GetBitrateAllocationCallbackType(const VideoSendStream::Config& config, + const FieldTrialsView& field_trials) { if (webrtc::RtpExtension::FindHeaderExtensionByUri( config.rtp.extensions, webrtc::RtpExtension::kVideoLayersAllocationUri, @@ -73,7 +72,7 @@ GetBitrateAllocationCallbackType(const VideoSendStream::Config& config) { return VideoStreamEncoder::BitrateAllocationCallbackType:: kVideoLayersAllocation; } - if (field_trial::IsEnabled("WebRTC-Target-Bitrate-Rtcp")) { + if (field_trials.IsEnabled("WebRTC-Target-Bitrate-Rtcp")) { return VideoStreamEncoder::BitrateAllocationCallbackType:: kVideoBitrateAllocation; } @@ -84,7 +83,7 @@ GetBitrateAllocationCallbackType(const VideoSendStream::Config& config) { RtpSenderFrameEncryptionConfig CreateFrameEncryptionConfig( const VideoSendStream::Config* config) { RtpSenderFrameEncryptionConfig frame_encryption_config; - frame_encryption_config.frame_encryptor = config->frame_encryptor; + frame_encryption_config.frame_encryptor = config->frame_encryptor.get(); frame_encryption_config.crypto_options = config->crypto_options; return frame_encryption_config; } @@ -114,16 +113,20 @@ std::unique_ptr CreateVideoStreamEncoder( SendStatisticsProxy* stats_proxy, const VideoStreamEncoderSettings& encoder_settings, VideoStreamEncoder::BitrateAllocationCallbackType - bitrate_allocation_callback_type) { + bitrate_allocation_callback_type, + const FieldTrialsView& field_trials, + webrtc::VideoEncoderFactory::EncoderSelectorInterface* encoder_selector) { std::unique_ptr encoder_queue = task_queue_factory->CreateTaskQueue("EncoderQueue", TaskQueueFactory::Priority::NORMAL); TaskQueueBase* encoder_queue_ptr = encoder_queue.get(); return std::make_unique( clock, num_cpu_cores, stats_proxy, encoder_settings, - std::make_unique(stats_proxy), - FrameCadenceAdapterInterface::Create(clock, encoder_queue_ptr), - std::move(encoder_queue), bitrate_allocation_callback_type); + std::make_unique(stats_proxy, field_trials), + FrameCadenceAdapterInterface::Create(clock, encoder_queue_ptr, + field_trials), + std::move(encoder_queue), bitrate_allocation_callback_type, field_trials, + encoder_selector); } } // namespace @@ -144,19 +147,22 @@ VideoSendStream::VideoSendStream( VideoEncoderConfig encoder_config, const std::map& suspended_ssrcs, const std::map& suspended_payload_states, - std::unique_ptr fec_controller) + std::unique_ptr fec_controller, + const FieldTrialsView& field_trials) : rtp_transport_queue_(transport->GetWorkerQueue()), transport_(transport), - stats_proxy_(clock, config, encoder_config.content_type), + stats_proxy_(clock, config, encoder_config.content_type, field_trials), config_(std::move(config)), content_type_(encoder_config.content_type), - video_stream_encoder_( - CreateVideoStreamEncoder(clock, - num_cpu_cores, - task_queue_factory, - &stats_proxy_, - config_.encoder_settings, - GetBitrateAllocationCallbackType(config_))), + video_stream_encoder_(CreateVideoStreamEncoder( + clock, + num_cpu_cores, + task_queue_factory, + &stats_proxy_, + config_.encoder_settings, + GetBitrateAllocationCallbackType(config_, field_trials), + field_trials, + config_.encoder_selector)), encoder_feedback_( clock, config_.rtp.ssrcs, @@ -180,7 +186,6 @@ VideoSendStream::VideoSendStream( config_.frame_transformer)), send_stream_(clock, &stats_proxy_, - rtp_transport_queue_, transport, bitrate_allocator, video_stream_encoder_.get(), @@ -188,7 +193,8 @@ VideoSendStream::VideoSendStream( encoder_config.max_bitrate_bps, encoder_config.bitrate_priority, encoder_config.content_type, - rtp_video_sender_) { + rtp_video_sender_, + field_trials) { RTC_DCHECK(config_.encoder_settings.encoder_factory); RTC_DCHECK(config_.encoder_settings.bitrate_allocator_factory); @@ -229,8 +235,8 @@ void VideoSendStream::UpdateActiveSimulcastLayers( RTC_LOG(LS_INFO) << "UpdateActiveSimulcastLayers: " << active_layers_string.str(); - rtp_transport_queue_->PostTask( - ToQueuedTask(transport_queue_safety_, [this, active_layers] { + rtp_transport_queue_->RunOrPost( + SafeTask(transport_queue_safety_, [this, active_layers] { send_stream_.UpdateActiveSimulcastLayers(active_layers); })); @@ -245,17 +251,14 @@ void VideoSendStream::Start() { running_ = true; - rtp_transport_queue_->PostTask(ToQueuedTask([this] { - transport_queue_safety_->SetAlive(); - send_stream_.Start(); - thread_sync_event_.Set(); - })); - // It is expected that after VideoSendStream::Start has been called, incoming // frames are not dropped in VideoStreamEncoder. To ensure this, Start has to // be synchronized. // TODO(tommi): ^^^ Validate if this still holds. - thread_sync_event_.Wait(rtc::Event::kForever); + rtp_transport_queue_->RunSynchronous([this] { + transport_queue_safety_->SetAlive(); + send_stream_.Start(); + }); } void VideoSendStream::Stop() { @@ -264,7 +267,7 @@ void VideoSendStream::Stop() { return; RTC_DLOG(LS_INFO) << "VideoSendStream::Stop"; running_ = false; - rtp_transport_queue_->PostTask(ToQueuedTask(transport_queue_safety_, [this] { + rtp_transport_queue_->RunOrPost(SafeTask(transport_queue_safety_, [this] { // As the stream can get re-used and implicitly restarted via changing // the state of the active layers, we do not mark the // `transport_queue_safety_` flag with `SetNotAlive()` here. That's only @@ -326,20 +329,25 @@ void VideoSendStream::StopPermanentlyAndGetRtpStates( // Always run these cleanup steps regardless of whether running_ was set // or not. This will unregister callbacks before destruction. // See `VideoSendStreamImpl::StopVideoSendStream` for more. - rtp_transport_queue_->PostTask([this, rtp_state_map, payload_state_map]() { - transport_queue_safety_->SetNotAlive(); - send_stream_.Stop(); - *rtp_state_map = send_stream_.GetRtpStates(); - *payload_state_map = send_stream_.GetRtpPayloadStates(); - thread_sync_event_.Set(); - }); - thread_sync_event_.Wait(rtc::Event::kForever); + rtp_transport_queue_->RunSynchronous( + [this, rtp_state_map, payload_state_map]() { + transport_queue_safety_->SetNotAlive(); + send_stream_.Stop(); + *rtp_state_map = send_stream_.GetRtpStates(); + *payload_state_map = send_stream_.GetRtpPayloadStates(); + }); } void VideoSendStream::DeliverRtcp(const uint8_t* packet, size_t length) { - // Called on a network thread. + RTC_DCHECK_RUN_ON(&thread_checker_); send_stream_.DeliverRtcp(packet, length); } +void VideoSendStream::GenerateKeyFrame() { + if (video_stream_encoder_) { + video_stream_encoder_->SendKeyFrame(); + } +} + } // namespace internal } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/video_send_stream.h b/TMessagesProj/jni/voip/webrtc/video/video_send_stream.h index 58a0f989b2..a7763731b7 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_send_stream.h +++ b/TMessagesProj/jni/voip/webrtc/video/video_send_stream.h @@ -16,19 +16,20 @@ #include #include "api/fec_controller.h" +#include "api/field_trials_view.h" #include "api/sequence_checker.h" -#include "api/video/video_stream_encoder_interface.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "call/bitrate_allocator.h" #include "call/video_receive_stream.h" #include "call/video_send_stream.h" +#include "modules/utility/maybe_worker_thread.h" #include "rtc_base/event.h" #include "rtc_base/system/no_unique_address.h" -#include "rtc_base/task_queue.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" #include "video/encoder_rtcp_feedback.h" #include "video/send_delay_stats.h" #include "video/send_statistics_proxy.h" #include "video/video_send_stream_impl.h" +#include "video/video_stream_encoder_interface.h" namespace webrtc { namespace test { @@ -68,15 +69,15 @@ class VideoSendStream : public webrtc::VideoSendStream { VideoEncoderConfig encoder_config, const std::map& suspended_ssrcs, const std::map& suspended_payload_states, - std::unique_ptr fec_controller); + std::unique_ptr fec_controller, + const FieldTrialsView& field_trials); ~VideoSendStream() override; void DeliverRtcp(const uint8_t* packet, size_t length); // webrtc::VideoSendStream implementation. - void UpdateActiveSimulcastLayers( - const std::vector active_layers) override; + void UpdateActiveSimulcastLayers(std::vector active_layers) override; void Start() override; void Stop() override; bool started() override; @@ -92,6 +93,7 @@ class VideoSendStream : public webrtc::VideoSendStream { void StopPermanentlyAndGetRtpStates(RtpStateMap* rtp_state_map, RtpPayloadStateMap* payload_state_map); + void GenerateKeyFrame() override; private: friend class test::VideoSendStreamPeer; @@ -99,7 +101,7 @@ class VideoSendStream : public webrtc::VideoSendStream { absl::optional GetPacingFactorOverride() const; RTC_NO_UNIQUE_ADDRESS SequenceChecker thread_checker_; - rtc::TaskQueue* const rtp_transport_queue_; + MaybeWorkerThread* const rtp_transport_queue_; RtpTransportControllerSendInterface* const transport_; rtc::Event thread_sync_event_; rtc::scoped_refptr transport_queue_safety_ = diff --git a/TMessagesProj/jni/voip/webrtc/video/video_send_stream_impl.cc b/TMessagesProj/jni/voip/webrtc/video/video_send_stream_impl.cc index 862dfde14c..3fdbcb8ad1 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_send_stream_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/video/video_send_stream_impl.cc @@ -21,11 +21,12 @@ #include "api/rtp_parameters.h" #include "api/scoped_refptr.h" #include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/task_queue/task_queue_base.h" #include "api/video_codecs/video_codec.h" #include "call/rtp_transport_controller_send_interface.h" #include "call/video_send_stream.h" -#include "modules/pacing/paced_sender.h" -#include "rtc_base/atomic_ops.h" +#include "modules/pacing/pacing_controller.h" #include "rtc_base/checks.h" #include "rtc_base/experiments/alr_experiment.h" #include "rtc_base/experiments/field_trial_parser.h" @@ -33,7 +34,6 @@ #include "rtc_base/experiments/rate_control_settings.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" -#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/trace_event.h" #include "system_wrappers/include/clock.h" #include "system_wrappers/include/field_trial.h" @@ -49,6 +49,9 @@ static constexpr int64_t kMaxVbaThrottleTimeMs = 500; constexpr TimeDelta kEncoderTimeOut = TimeDelta::Seconds(2); +constexpr double kVideoHysteresis = 1.2; +constexpr double kScreenshareHysteresis = 1.35; + // When send-side BWE is used a stricter 1.1x pacing factor is used, rather than // the 2.5x which is used with receive-side BWE. Provides a more careful // bandwidth rampup with less risk of overshoots causing adverse effects like @@ -97,8 +100,9 @@ int CalculateMaxPadBitrateBps(const std::vector& streams, // Without alr probing, pad up to start bitrate of the // highest active stream. const double hysteresis_factor = - RateControlSettings::ParseFromFieldTrials() - .GetSimulcastHysteresisFactor(content_type); + content_type == VideoEncoderConfig::ContentType::kScreen + ? kScreenshareHysteresis + : kVideoHysteresis; if (is_svc) { // For SVC, since there is only one "stream", the padding bitrate // needed to enable the top spatial layer is stored in the @@ -192,12 +196,11 @@ uint32_t GetInitialEncoderMaxBitrate(int initial_encoder_max_bitrate) { } // namespace -PacingConfig::PacingConfig() +PacingConfig::PacingConfig(const FieldTrialsView& field_trials) : pacing_factor("factor", kStrictPacingMultiplier), - max_pacing_delay("max_delay", - TimeDelta::Millis(PacedSender::kMaxQueueLengthMs)) { + max_pacing_delay("max_delay", PacingController::kMaxExpectedQueueLength) { ParseFieldTrial({&pacing_factor, &max_pacing_delay}, - field_trial::FindFullName("WebRTC-Video-Pacing")); + field_trials.Lookup("WebRTC-Video-Pacing")); } PacingConfig::PacingConfig(const PacingConfig&) = default; PacingConfig::~PacingConfig() = default; @@ -205,7 +208,6 @@ PacingConfig::~PacingConfig() = default; VideoSendStreamImpl::VideoSendStreamImpl( Clock* clock, SendStatisticsProxy* stats_proxy, - rtc::TaskQueue* rtp_transport_queue, RtpTransportControllerSendInterface* transport, BitrateAllocatorInterface* bitrate_allocator, VideoStreamEncoderInterface* video_stream_encoder, @@ -213,14 +215,15 @@ VideoSendStreamImpl::VideoSendStreamImpl( int initial_encoder_max_bitrate, double initial_encoder_bitrate_priority, VideoEncoderConfig::ContentType content_type, - RtpVideoSenderInterface* rtp_video_sender) + RtpVideoSenderInterface* rtp_video_sender, + const FieldTrialsView& field_trials) : clock_(clock), has_alr_probing_(config->periodic_alr_bandwidth_probing || GetAlrSettings(content_type)), - pacing_config_(PacingConfig()), + pacing_config_(PacingConfig(field_trials)), stats_proxy_(stats_proxy), config_(config), - rtp_transport_queue_(rtp_transport_queue), + rtp_transport_queue_(transport->GetWorkerQueue()), timed_out_(false), transport_(transport), bitrate_allocator_(bitrate_allocator), @@ -285,7 +288,7 @@ VideoSendStreamImpl::VideoSendStreamImpl( transport->EnablePeriodicAlrProbing(*enable_alr_bw_probing); } - rtp_transport_queue_->PostTask(ToQueuedTask(transport_queue_safety_, [this] { + rtp_transport_queue_->RunOrPost(SafeTask(transport_queue_safety_, [this] { if (configured_pacing_factor_) transport_->SetPacingFactor(*configured_pacing_factor_); @@ -300,8 +303,7 @@ VideoSendStreamImpl::~VideoSendStreamImpl() { } void VideoSendStreamImpl::DeliverRtcp(const uint8_t* packet, size_t length) { - // Runs on a network thread. - RTC_DCHECK(!rtp_transport_queue_->IsCurrent()); + // Runs on a worker thread. rtp_video_sender_->DeliverRtcp(packet, length); } @@ -343,7 +345,8 @@ void VideoSendStreamImpl::StartupVideoSendStream() { activity_ = false; timed_out_ = false; check_encoder_activity_task_ = RepeatingTaskHandle::DelayedStart( - rtp_transport_queue_->Get(), kEncoderTimeOut, [this] { + rtp_transport_queue_->TaskQueueForDelayedTasks(), kEncoderTimeOut, + [this] { RTC_DCHECK_RUN_ON(rtp_transport_queue_); if (!activity_) { if (!timed_out_) { @@ -375,8 +378,8 @@ void VideoSendStreamImpl::Stop() { StopVideoSendStream(); } -// RTC_RUN_ON(rtp_transport_queue_) void VideoSendStreamImpl::StopVideoSendStream() { + RTC_DCHECK_RUN_ON(rtp_transport_queue_); bitrate_allocator_->RemoveObserver(this); check_encoder_activity_task_.Stop(); video_stream_encoder_->OnBitrateUpdated(DataRate::Zero(), DataRate::Zero(), @@ -398,22 +401,19 @@ void VideoSendStreamImpl::SignalEncoderTimedOut() { void VideoSendStreamImpl::OnBitrateAllocationUpdated( const VideoBitrateAllocation& allocation) { - if (!rtp_transport_queue_->IsCurrent()) { - rtp_transport_queue_->PostTask(ToQueuedTask(transport_queue_safety_, [=] { - OnBitrateAllocationUpdated(allocation); - })); - return; - } - - RTC_DCHECK_RUN_ON(rtp_transport_queue_); - - int64_t now_ms = clock_->TimeInMilliseconds(); - if (encoder_target_rate_bps_ != 0) { + // OnBitrateAllocationUpdated is invoked from the encoder task queue or + // the rtp_transport_queue_. + auto task = [=] { + RTC_DCHECK_RUN_ON(rtp_transport_queue_); + if (encoder_target_rate_bps_ == 0) { + return; + } + int64_t now_ms = clock_->TimeInMilliseconds(); if (video_bitrate_allocation_context_) { - // If new allocation is within kMaxVbaSizeDifferencePercent larger than - // the previously sent allocation and the same streams are still enabled, - // it is considered "similar". We do not want send similar allocations - // more once per kMaxVbaThrottleTimeMs. + // If new allocation is within kMaxVbaSizeDifferencePercent larger + // than the previously sent allocation and the same streams are still + // enabled, it is considered "similar". We do not want send similar + // allocations more once per kMaxVbaThrottleTimeMs. const VideoBitrateAllocation& last = video_bitrate_allocation_context_->last_sent_allocation; const bool is_similar = @@ -439,6 +439,12 @@ void VideoSendStreamImpl::OnBitrateAllocationUpdated( // Send bitrate allocation metadata only if encoder is not paused. rtp_video_sender_->OnBitrateAllocationUpdated(allocation); + }; + if (!rtp_transport_queue_->IsCurrent()) { + rtp_transport_queue_->TaskQueueForPost()->PostTask( + SafeTask(transport_queue_safety_, std::move(task))); + } else { + task(); } } @@ -472,68 +478,65 @@ void VideoSendStreamImpl::OnEncoderConfigurationChanged( bool is_svc, VideoEncoderConfig::ContentType content_type, int min_transmit_bitrate_bps) { - if (!rtp_transport_queue_->IsCurrent()) { - rtp_transport_queue_->PostTask(ToQueuedTask( - transport_queue_safety_, - [this, streams = std::move(streams), is_svc, content_type, - min_transmit_bitrate_bps]() mutable { - OnEncoderConfigurationChanged(std::move(streams), is_svc, - content_type, min_transmit_bitrate_bps); - })); - return; - } - - RTC_DCHECK_GE(config_->rtp.ssrcs.size(), streams.size()); - TRACE_EVENT0("webrtc", "VideoSendStream::OnEncoderConfigurationChanged"); - RTC_DCHECK_RUN_ON(rtp_transport_queue_); + // Currently called on the encoder TQ + RTC_DCHECK(!rtp_transport_queue_->IsCurrent()); + auto closure = [this, streams = std::move(streams), is_svc, content_type, + min_transmit_bitrate_bps]() mutable { + RTC_DCHECK_GE(config_->rtp.ssrcs.size(), streams.size()); + TRACE_EVENT0("webrtc", "VideoSendStream::OnEncoderConfigurationChanged"); + RTC_DCHECK_RUN_ON(rtp_transport_queue_); - const VideoCodecType codec_type = - PayloadStringToCodecType(config_->rtp.payload_name); - - const absl::optional experimental_min_bitrate = - GetExperimentalMinVideoBitrate(codec_type); - encoder_min_bitrate_bps_ = - experimental_min_bitrate - ? experimental_min_bitrate->bps() - : std::max(streams[0].min_bitrate_bps, kDefaultMinVideoBitrateBps); - - encoder_max_bitrate_bps_ = 0; - double stream_bitrate_priority_sum = 0; - for (const auto& stream : streams) { - // We don't want to allocate more bitrate than needed to inactive streams. - encoder_max_bitrate_bps_ += stream.active ? stream.max_bitrate_bps : 0; - if (stream.bitrate_priority) { - RTC_DCHECK_GT(*stream.bitrate_priority, 0); - stream_bitrate_priority_sum += *stream.bitrate_priority; + const VideoCodecType codec_type = + PayloadStringToCodecType(config_->rtp.payload_name); + + const absl::optional experimental_min_bitrate = + GetExperimentalMinVideoBitrate(codec_type); + encoder_min_bitrate_bps_ = + experimental_min_bitrate + ? experimental_min_bitrate->bps() + : std::max(streams[0].min_bitrate_bps, kDefaultMinVideoBitrateBps); + + encoder_max_bitrate_bps_ = 0; + double stream_bitrate_priority_sum = 0; + for (const auto& stream : streams) { + // We don't want to allocate more bitrate than needed to inactive streams. + encoder_max_bitrate_bps_ += stream.active ? stream.max_bitrate_bps : 0; + if (stream.bitrate_priority) { + RTC_DCHECK_GT(*stream.bitrate_priority, 0); + stream_bitrate_priority_sum += *stream.bitrate_priority; + } + } + RTC_DCHECK_GT(stream_bitrate_priority_sum, 0); + encoder_bitrate_priority_ = stream_bitrate_priority_sum; + encoder_max_bitrate_bps_ = + std::max(static_cast(encoder_min_bitrate_bps_), + encoder_max_bitrate_bps_); + + // TODO(bugs.webrtc.org/10266): Query the VideoBitrateAllocator instead. + max_padding_bitrate_ = CalculateMaxPadBitrateBps( + streams, is_svc, content_type, min_transmit_bitrate_bps, + config_->suspend_below_min_bitrate, has_alr_probing_); + + // Clear stats for disabled layers. + for (size_t i = streams.size(); i < config_->rtp.ssrcs.size(); ++i) { + stats_proxy_->OnInactiveSsrc(config_->rtp.ssrcs[i]); } - } - RTC_DCHECK_GT(stream_bitrate_priority_sum, 0); - encoder_bitrate_priority_ = stream_bitrate_priority_sum; - encoder_max_bitrate_bps_ = - std::max(static_cast(encoder_min_bitrate_bps_), - encoder_max_bitrate_bps_); - - // TODO(bugs.webrtc.org/10266): Query the VideoBitrateAllocator instead. - max_padding_bitrate_ = CalculateMaxPadBitrateBps( - streams, is_svc, content_type, min_transmit_bitrate_bps, - config_->suspend_below_min_bitrate, has_alr_probing_); - - // Clear stats for disabled layers. - for (size_t i = streams.size(); i < config_->rtp.ssrcs.size(); ++i) { - stats_proxy_->OnInactiveSsrc(config_->rtp.ssrcs[i]); - } - const size_t num_temporal_layers = - streams.back().num_temporal_layers.value_or(1); + const size_t num_temporal_layers = + streams.back().num_temporal_layers.value_or(1); - rtp_video_sender_->SetEncodingData(streams[0].width, streams[0].height, - num_temporal_layers); + rtp_video_sender_->SetEncodingData(streams[0].width, streams[0].height, + num_temporal_layers); - if (rtp_video_sender_->IsActive()) { - // The send stream is started already. Update the allocator with new bitrate - // limits. - bitrate_allocator_->AddObserver(this, GetAllocationConfig()); - } + if (rtp_video_sender_->IsActive()) { + // The send stream is started already. Update the allocator with new + // bitrate limits. + bitrate_allocator_->AddObserver(this, GetAllocationConfig()); + } + }; + + rtp_transport_queue_->TaskQueueForPost()->PostTask( + SafeTask(transport_queue_safety_, std::move(closure))); } EncodedImageCallback::Result VideoSendStreamImpl::OnEncodedImage( @@ -545,42 +548,26 @@ EncodedImageCallback::Result VideoSendStreamImpl::OnEncodedImage( // Indicate that there still is activity going on. activity_ = true; + RTC_DCHECK(!rtp_transport_queue_->IsCurrent()); - auto enable_padding_task = [this]() { + auto task_to_run_on_worker = [this]() { + RTC_DCHECK_RUN_ON(rtp_transport_queue_); if (disable_padding_) { - RTC_DCHECK_RUN_ON(rtp_transport_queue_); disable_padding_ = false; // To ensure that padding bitrate is propagated to the bitrate allocator. SignalEncoderActive(); } - }; - if (!rtp_transport_queue_->IsCurrent()) { - rtp_transport_queue_->PostTask( - ToQueuedTask(transport_queue_safety_, std::move(enable_padding_task))); - } else { - enable_padding_task(); - } - - EncodedImageCallback::Result result(EncodedImageCallback::Result::OK); - result = - rtp_video_sender_->OnEncodedImage(encoded_image, codec_specific_info); - // Check if there's a throttled VideoBitrateAllocation that we should try - // sending. - auto update_task = [this]() { - RTC_DCHECK_RUN_ON(rtp_transport_queue_); + // Check if there's a throttled VideoBitrateAllocation that we should try + // sending. auto& context = video_bitrate_allocation_context_; if (context && context->throttled_allocation) { OnBitrateAllocationUpdated(*context->throttled_allocation); } }; - if (!rtp_transport_queue_->IsCurrent()) { - rtp_transport_queue_->PostTask( - ToQueuedTask(transport_queue_safety_, std::move(update_task))); - } else { - update_task(); - } + rtp_transport_queue_->TaskQueueForPost()->PostTask( + SafeTask(transport_queue_safety_, std::move(task_to_run_on_worker))); - return result; + return rtp_video_sender_->OnEncodedImage(encoded_image, codec_specific_info); } void VideoSendStreamImpl::OnDroppedFrame( diff --git a/TMessagesProj/jni/voip/webrtc/video/video_send_stream_impl.h b/TMessagesProj/jni/voip/webrtc/video/video_send_stream_impl.h index a29f186af2..d444eabc21 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_send_stream_impl.h +++ b/TMessagesProj/jni/voip/webrtc/video/video_send_stream_impl.h @@ -19,33 +19,35 @@ #include #include "absl/types/optional.h" +#include "api/field_trials_view.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/task_queue/task_queue_base.h" #include "api/video/encoded_image.h" #include "api/video/video_bitrate_allocation.h" #include "api/video/video_bitrate_allocator.h" -#include "api/video/video_stream_encoder_interface.h" #include "api/video_codecs/video_encoder.h" -#include "api/video_codecs/video_encoder_config.h" #include "call/bitrate_allocator.h" #include "call/rtp_config.h" #include "call/rtp_transport_controller_send_interface.h" #include "call/rtp_video_sender_interface.h" #include "modules/include/module_common_types.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/utility/maybe_worker_thread.h" #include "modules/video_coding/include/video_codec_interface.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/system/no_unique_address.h" -#include "rtc_base/task_queue.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/task_utils/repeating_task.h" #include "rtc_base/thread_annotations.h" +#include "video/config/video_encoder_config.h" #include "video/send_statistics_proxy.h" +#include "video/video_stream_encoder_interface.h" namespace webrtc { namespace internal { // Pacing buffer config; overridden by ALR config if provided. struct PacingConfig { - PacingConfig(); + explicit PacingConfig(const FieldTrialsView& field_trials); PacingConfig(const PacingConfig&); PacingConfig& operator=(const PacingConfig&) = default; ~PacingConfig(); @@ -65,7 +67,6 @@ class VideoSendStreamImpl : public webrtc::BitrateAllocatorObserver, public: VideoSendStreamImpl(Clock* clock, SendStatisticsProxy* stats_proxy, - rtc::TaskQueue* rtp_transport_queue, RtpTransportControllerSendInterface* transport, BitrateAllocatorInterface* bitrate_allocator, VideoStreamEncoderInterface* video_stream_encoder, @@ -73,11 +74,12 @@ class VideoSendStreamImpl : public webrtc::BitrateAllocatorObserver, int initial_encoder_max_bitrate, double initial_encoder_bitrate_priority, VideoEncoderConfig::ContentType content_type, - RtpVideoSenderInterface* rtp_video_sender); + RtpVideoSenderInterface* rtp_video_sender, + const FieldTrialsView& field_trials); ~VideoSendStreamImpl() override; void DeliverRtcp(const uint8_t* packet, size_t length); - void UpdateActiveSimulcastLayers(const std::vector active_layers); + void UpdateActiveSimulcastLayers(std::vector active_layers); void Start(); void Stop(); @@ -137,7 +139,7 @@ class VideoSendStreamImpl : public webrtc::BitrateAllocatorObserver, SendStatisticsProxy* const stats_proxy_; const VideoSendStream::Config* const config_; - rtc::TaskQueue* const rtp_transport_queue_; + MaybeWorkerThread* const rtp_transport_queue_; RepeatingTaskHandle check_encoder_activity_task_ RTC_GUARDED_BY(rtp_transport_queue_); diff --git a/TMessagesProj/jni/voip/webrtc/video/video_send_stream_tests.cc b/TMessagesProj/jni/voip/webrtc/video/video_send_stream_tests.cc index 23f9faa1fa..958d04e247 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_send_stream_tests.cc +++ b/TMessagesProj/jni/voip/webrtc/video/video_send_stream_tests.cc @@ -12,9 +12,12 @@ #include #include "absl/algorithm/container.h" +#include "absl/strings/match.h" #include "api/sequence_checker.h" #include "api/task_queue/default_task_queue_factory.h" #include "api/task_queue/task_queue_base.h" +#include "api/test/metrics/global_metrics_logger_and_exporter.h" +#include "api/test/metrics/metric.h" #include "api/test/simulated_network.h" #include "api/video/builtin_video_bitrate_allocator_factory.h" #include "api/video/encoded_image.h" @@ -39,15 +42,18 @@ #include "modules/video_coding/codecs/interface/common_constants.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" +#include "modules/video_coding/svc/create_scalability_structure.h" +#include "modules/video_coding/svc/scalability_mode_util.h" +#include "modules/video_coding/svc/scalable_video_controller.h" #include "rtc_base/checks.h" #include "rtc_base/event.h" #include "rtc_base/experiments/alr_experiment.h" #include "rtc_base/logging.h" #include "rtc_base/platform_thread.h" #include "rtc_base/rate_limiter.h" +#include "rtc_base/strings/string_builder.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue_for_test.h" -#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/time_utils.h" #include "rtc_base/unique_id_generator.h" #include "system_wrappers/include/sleep.h" @@ -55,7 +61,6 @@ #include "test/configurable_frame_size_encoder.h" #include "test/fake_encoder.h" #include "test/fake_texture_frame.h" -#include "test/field_trial.h" #include "test/frame_forwarder.h" #include "test/frame_generator_capturer.h" #include "test/frame_utils.h" @@ -63,8 +68,8 @@ #include "test/gtest.h" #include "test/null_transport.h" #include "test/rtcp_packet_parser.h" -#include "test/testsupport/perf_test.h" #include "test/video_encoder_proxy_factory.h" +#include "video/config/encoder_stream_factory.h" #include "video/send_statistics_proxy.h" #include "video/transport_adapter.h" #include "video/video_send_stream.h" @@ -105,6 +110,23 @@ enum VideoFormat { kVP8, }; +struct Vp9TestParams { + std::string scalability_mode; + uint8_t num_spatial_layers; + uint8_t num_temporal_layers; + InterLayerPredMode inter_layer_pred; +}; + +using ParameterizationType = std::tuple; + +std::string ParamInfoToStr( + const testing::TestParamInfo& info) { + rtc::StringBuilder sb; + sb << std::get<0>(info.param).scalability_mode << "_" + << (std::get<1>(info.param) ? "WithIdentifier" : "WithoutIdentifier"); + return sb.str(); +} + } // namespace class VideoSendStreamTest : public test::CallTest { @@ -119,18 +141,19 @@ class VideoSendStreamTest : public test::CallTest { uint8_t retransmit_payload_type); void TestPacketFragmentationSize(VideoFormat format, bool with_fec); - void TestVp9NonFlexMode(uint8_t num_temporal_layers, - uint8_t num_spatial_layers); + void TestVp9NonFlexMode(const Vp9TestParams& params, + bool use_scalability_mode_identifier); void TestRequestSourceRotateVideo(bool support_orientation_ext); void TestTemporalLayers(VideoEncoderFactory* encoder_factory, const std::string& payload_name, - const std::vector& num_temporal_layers); + const std::vector& num_temporal_layers, + const std::vector& scalability_mode); }; TEST_F(VideoSendStreamTest, CanStartStartedStream) { - SendTask(RTC_FROM_HERE, task_queue(), [this]() { + SendTask(task_queue(), [this]() { CreateSenderCall(); test::NullTransport transport; @@ -144,7 +167,7 @@ TEST_F(VideoSendStreamTest, CanStartStartedStream) { } TEST_F(VideoSendStreamTest, CanStopStoppedStream) { - SendTask(RTC_FROM_HERE, task_queue(), [this]() { + SendTask(task_queue(), [this]() { CreateSenderCall(); test::NullTransport transport; @@ -161,7 +184,7 @@ TEST_F(VideoSendStreamTest, SupportsCName) { static std::string kCName = "PjQatC14dGfbVwGPUOA9IH7RlsFDbWl4AhXEiDsBizo="; class CNameObserver : public test::SendTest { public: - CNameObserver() : SendTest(kDefaultTimeoutMs) {} + CNameObserver() : SendTest(kDefaultTimeout) {} private: Action OnSendRtcp(const uint8_t* packet, size_t length) override { @@ -179,7 +202,7 @@ TEST_F(VideoSendStreamTest, SupportsCName) { void ModifyVideoConfigs( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { send_config->rtp.c_name = kCName; } @@ -195,7 +218,7 @@ TEST_F(VideoSendStreamTest, SupportsCName) { TEST_F(VideoSendStreamTest, SupportsAbsoluteSendTime) { class AbsoluteSendTimeObserver : public test::SendTest { public: - AbsoluteSendTimeObserver() : SendTest(kDefaultTimeoutMs) { + AbsoluteSendTimeObserver() : SendTest(kDefaultTimeout) { extensions_.Register(kAbsSendTimeExtensionId); } @@ -223,7 +246,7 @@ TEST_F(VideoSendStreamTest, SupportsAbsoluteSendTime) { void ModifyVideoConfigs( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { send_config->rtp.extensions.clear(); send_config->rtp.extensions.push_back( @@ -246,7 +269,7 @@ TEST_F(VideoSendStreamTest, SupportsTransmissionTimeOffset) { class TransmissionTimeOffsetObserver : public test::SendTest { public: TransmissionTimeOffsetObserver() - : SendTest(kDefaultTimeoutMs), encoder_factory_([]() { + : SendTest(kDefaultTimeout), encoder_factory_([]() { return std::make_unique( Clock::GetRealTimeClock(), kEncodeDelayMs); }) { @@ -269,7 +292,7 @@ TEST_F(VideoSendStreamTest, SupportsTransmissionTimeOffset) { void ModifyVideoConfigs( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { send_config->encoder_settings.encoder_factory = &encoder_factory_; send_config->rtp.extensions.clear(); @@ -293,7 +316,7 @@ TEST_F(VideoSendStreamTest, SupportsTransportWideSequenceNumbers) { class TransportWideSequenceNumberObserver : public test::SendTest { public: TransportWideSequenceNumberObserver() - : SendTest(kDefaultTimeoutMs), encoder_factory_([]() { + : SendTest(kDefaultTimeout), encoder_factory_([]() { return std::make_unique( Clock::GetRealTimeClock()); }) { @@ -316,7 +339,7 @@ TEST_F(VideoSendStreamTest, SupportsTransportWideSequenceNumbers) { void ModifyVideoConfigs( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { send_config->encoder_settings.encoder_factory = &encoder_factory_; } @@ -335,7 +358,7 @@ TEST_F(VideoSendStreamTest, SupportsTransportWideSequenceNumbers) { TEST_F(VideoSendStreamTest, SupportsVideoRotation) { class VideoRotationObserver : public test::SendTest { public: - VideoRotationObserver() : SendTest(kDefaultTimeoutMs) { + VideoRotationObserver() : SendTest(kDefaultTimeout) { extensions_.Register(kVideoRotationExtensionId); } @@ -352,7 +375,7 @@ TEST_F(VideoSendStreamTest, SupportsVideoRotation) { void ModifyVideoConfigs( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { send_config->rtp.extensions.clear(); send_config->rtp.extensions.push_back(RtpExtension( @@ -379,7 +402,7 @@ TEST_F(VideoSendStreamTest, SupportsVideoContentType) { class VideoContentTypeObserver : public test::SendTest { public: VideoContentTypeObserver() - : SendTest(kDefaultTimeoutMs), first_frame_sent_(false) { + : SendTest(kDefaultTimeout), first_frame_sent_(false) { extensions_.Register( kVideoContentTypeExtensionId); } @@ -401,7 +424,7 @@ TEST_F(VideoSendStreamTest, SupportsVideoContentType) { void ModifyVideoConfigs( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { send_config->rtp.extensions.clear(); send_config->rtp.extensions.push_back(RtpExtension( @@ -425,7 +448,7 @@ TEST_F(VideoSendStreamTest, SupportsVideoTimingFrames) { class VideoTimingObserver : public test::SendTest { public: VideoTimingObserver() - : SendTest(kDefaultTimeoutMs), first_frame_sent_(false) { + : SendTest(kDefaultTimeout), first_frame_sent_(false) { extensions_.Register(kVideoTimingExtensionId); } @@ -445,7 +468,7 @@ TEST_F(VideoSendStreamTest, SupportsVideoTimingFrames) { void ModifyVideoConfigs( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { send_config->rtp.extensions.clear(); send_config->rtp.extensions.push_back( @@ -489,7 +512,7 @@ class UlpfecObserver : public test::EndToEndTest { public: // Some of the test cases are expected to time out. // Use a shorter timeout window than the default one for those. - static constexpr int kReducedTimeoutMs = 10000; + static constexpr TimeDelta kReducedTimeout = TimeDelta::Seconds(10); UlpfecObserver(bool header_extensions_enabled, bool use_nack, @@ -497,8 +520,8 @@ class UlpfecObserver : public test::EndToEndTest { bool expect_ulpfec, const std::string& codec, VideoEncoderFactory* encoder_factory) - : EndToEndTest(expect_ulpfec ? VideoSendStreamTest::kDefaultTimeoutMs - : kReducedTimeoutMs), + : EndToEndTest(expect_ulpfec ? VideoSendStreamTest::kDefaultTimeout + : kReducedTimeout), encoder_factory_(encoder_factory), payload_name_(codec), use_nack_(use_nack), @@ -596,7 +619,7 @@ class UlpfecObserver : public test::EndToEndTest { void ModifyVideoConfigs( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { if (use_nack_) { send_config->rtp.nack.rtp_history_ms = @@ -659,9 +682,10 @@ TEST_F(VideoSendStreamTest, SupportsUlpfecWithoutExtensions) { class VideoSendStreamWithoutUlpfecTest : public test::CallTest { protected: VideoSendStreamWithoutUlpfecTest() - : field_trial_("WebRTC-DisableUlpFecExperiment/Enabled/") {} + : field_trial_(field_trials_, "WebRTC-DisableUlpFecExperiment/Enabled/") { + } - test::ScopedFieldTrials field_trial_; + test::ScopedKeyValueConfig field_trial_; }; TEST_F(VideoSendStreamWithoutUlpfecTest, NoUlpfecIfDisabledThroughFieldTrial) { @@ -728,7 +752,7 @@ class FlexfecObserver : public test::EndToEndTest { const std::string& codec, VideoEncoderFactory* encoder_factory, size_t num_video_streams) - : EndToEndTest(VideoSendStreamTest::kDefaultTimeoutMs), + : EndToEndTest(VideoSendStreamTest::kDefaultTimeout), encoder_factory_(encoder_factory), payload_name_(codec), use_nack_(use_nack), @@ -809,7 +833,7 @@ class FlexfecObserver : public test::EndToEndTest { void ModifyVideoConfigs( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { if (use_nack_) { send_config->rtp.nack.rtp_history_ms = @@ -924,7 +948,7 @@ void VideoSendStreamTest::TestNackRetransmission( public: explicit NackObserver(uint32_t retransmit_ssrc, uint8_t retransmit_payload_type) - : SendTest(kDefaultTimeoutMs), + : SendTest(kDefaultTimeout), send_count_(0), retransmit_count_(0), retransmit_ssrc_(retransmit_ssrc), @@ -947,12 +971,7 @@ void VideoSendStreamTest::TestNackRetransmission( ++send_count_; // NACK packets at arbitrary points. - if (send_count_ == 5 || send_count_ == 25) { - nacked_sequence_numbers_.insert( - nacked_sequence_numbers_.end(), - non_padding_sequence_numbers_.end() - kNackedPacketsAtOnceCount, - non_padding_sequence_numbers_.end()); - + if (send_count_ % 25 == 0) { RTCPSender::Configuration config; config.clock = Clock::GetRealTimeClock(); config.outgoing_transport = transport_adapter_.get(); @@ -964,11 +983,19 @@ void VideoSendStreamTest::TestNackRetransmission( rtcp_sender.SetRemoteSSRC(kVideoSendSsrcs[0]); RTCPSender::FeedbackState feedback_state; + uint16_t nack_sequence_numbers[kNackedPacketsAtOnceCount]; + int nack_count = 0; + for (uint16_t sequence_number : + sequence_numbers_pending_retransmission_) { + if (nack_count < kNackedPacketsAtOnceCount) { + nack_sequence_numbers[nack_count++] = sequence_number; + } else { + break; + } + } - EXPECT_EQ(0, rtcp_sender.SendRTCP( - feedback_state, kRtcpNack, - static_cast(nacked_sequence_numbers_.size()), - &nacked_sequence_numbers_.front())); + EXPECT_EQ(0, rtcp_sender.SendRTCP(feedback_state, kRtcpNack, nack_count, + nack_sequence_numbers)); } uint16_t sequence_number = rtp_packet.SequenceNumber(); @@ -980,17 +1007,25 @@ void VideoSendStreamTest::TestNackRetransmission( sequence_number = (rtx_header[0] << 8) + rtx_header[1]; } - auto found = absl::c_find(nacked_sequence_numbers_, sequence_number); - if (found != nacked_sequence_numbers_.end()) { - nacked_sequence_numbers_.erase(found); - + auto it = sequence_numbers_pending_retransmission_.find(sequence_number); + if (it == sequence_numbers_pending_retransmission_.end()) { + // Not currently pending retransmission. Add it to retransmission queue + // if media and limit not reached. + if (rtp_packet.Ssrc() == kVideoSendSsrcs[0] && + rtp_packet.payload_size() > 0 && + retransmit_count_ + + sequence_numbers_pending_retransmission_.size() < + kRetransmitTarget) { + sequence_numbers_pending_retransmission_.insert(sequence_number); + } + } else { + // Packet is a retransmission, remove it from queue and check if done. + sequence_numbers_pending_retransmission_.erase(it); if (++retransmit_count_ == kRetransmitTarget) { EXPECT_EQ(retransmit_ssrc_, rtp_packet.Ssrc()); EXPECT_EQ(retransmit_payload_type_, rtp_packet.PayloadType()); observation_complete_.Set(); } - } else { - non_padding_sequence_numbers_.push_back(sequence_number); } return SEND_PACKET; @@ -998,7 +1033,7 @@ void VideoSendStreamTest::TestNackRetransmission( void ModifyVideoConfigs( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { transport_adapter_.reset( new internal::TransportAdapter(send_config->send_transport)); @@ -1018,8 +1053,7 @@ void VideoSendStreamTest::TestNackRetransmission( int retransmit_count_; const uint32_t retransmit_ssrc_; const uint8_t retransmit_payload_type_; - std::vector nacked_sequence_numbers_; - std::vector non_padding_sequence_numbers_; + std::set sequence_numbers_pending_retransmission_; } test(retransmit_ssrc, retransmit_payload_type); RunBaseTest(&test); @@ -1053,7 +1087,7 @@ void VideoSendStreamTest::TestPacketFragmentationSize(VideoFormat format, size_t stop_size, bool test_generic_packetization, bool use_fec) - : SendTest(kLongTimeoutMs), + : SendTest(kLongTimeout), encoder_(stop), encoder_factory_(&encoder_), max_packet_size_(max_packet_size), @@ -1203,7 +1237,7 @@ void VideoSendStreamTest::TestPacketFragmentationSize(VideoFormat format, void ModifyVideoConfigs( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { transport_adapter_.reset( new internal::TransportAdapter(send_config->send_transport)); @@ -1287,7 +1321,7 @@ TEST_F(VideoSendStreamTest, NoPaddingWhenVideoIsMuted) { class NoPaddingWhenVideoIsMuted : public test::SendTest { public: NoPaddingWhenVideoIsMuted() - : SendTest(kDefaultTimeoutMs), + : SendTest(kDefaultTimeout), clock_(Clock::GetRealTimeClock()), capturer_(nullptr) {} @@ -1336,7 +1370,7 @@ TEST_F(VideoSendStreamTest, NoPaddingWhenVideoIsMuted) { void ModifyVideoConfigs( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { // Make sure padding is sent if encoder is not producing media. encoder_config->min_transmit_bitrate_bps = 50000; @@ -1376,7 +1410,7 @@ TEST_F(VideoSendStreamTest, PaddingIsPrimarilyRetransmissions) { class PaddingIsPrimarilyRetransmissions : public test::EndToEndTest { public: PaddingIsPrimarilyRetransmissions() - : EndToEndTest(kDefaultTimeoutMs), + : EndToEndTest(kDefaultTimeout), clock_(Clock::GetRealTimeClock()), padding_length_(0), total_length_(0), @@ -1415,7 +1449,7 @@ TEST_F(VideoSendStreamTest, PaddingIsPrimarilyRetransmissions) { void ModifyVideoConfigs( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { // Turn on RTX. send_config->rtp.rtx.payload_type = kFakeVideoSendPayloadType; @@ -1459,18 +1493,13 @@ TEST_F(VideoSendStreamTest, MinTransmitBitrateRespectsRemb) { class BitrateObserver : public test::SendTest { public: explicit BitrateObserver(TaskQueueBase* task_queue) - : SendTest(kDefaultTimeoutMs), + : SendTest(kDefaultTimeout), task_queue_(task_queue), retranmission_rate_limiter_(Clock::GetRealTimeClock(), 1000), stream_(nullptr), bitrate_capped_(false), task_safety_flag_(PendingTaskSafetyFlag::CreateDetached()) {} - ~BitrateObserver() override { - // Make sure we free `rtp_rtcp_` in the same context as we constructed it. - SendTask(RTC_FROM_HERE, task_queue_, [this]() { rtp_rtcp_ = nullptr; }); - } - private: Action OnSendRtp(const uint8_t* packet, size_t length) override { if (IsRtcpPacket(rtc::MakeArrayView(packet, length))) @@ -1481,15 +1510,17 @@ TEST_F(VideoSendStreamTest, MinTransmitBitrateRespectsRemb) { const uint32_t ssrc = rtp_packet.Ssrc(); RTC_DCHECK(stream_); - task_queue_->PostTask(ToQueuedTask(task_safety_flag_, [this, ssrc]() { + task_queue_->PostTask(SafeTask(task_safety_flag_, [this, ssrc]() { VideoSendStream::Stats stats = stream_->GetStats(); if (!stats.substreams.empty()) { EXPECT_EQ(1u, stats.substreams.size()); int total_bitrate_bps = stats.substreams.begin()->second.total_bitrate_bps; - test::PrintResult( - "bitrate_stats_", "min_transmit_bitrate_low_remb", "bitrate_bps", - static_cast(total_bitrate_bps), "bps", false); + test::GetGlobalMetricsLogger()->LogSingleValueMetric( + "bitrate_stats_min_transmit_bitrate_low_remb", "bitrate_bps", + static_cast(total_bitrate_bps) / 1000.0, + test::Unit::kKilobitsPerSecond, + test::ImprovementDirection::kNeitherIsBetter); if (total_bitrate_bps > kHighBitrateBps) { rtp_rtcp_->SetRemb(kRembBitrateBps, {ssrc}); bitrate_capped_ = true; @@ -1504,9 +1535,9 @@ TEST_F(VideoSendStreamTest, MinTransmitBitrateRespectsRemb) { return DROP_PACKET; } - void OnVideoStreamsCreated( - VideoSendStream* send_stream, - const std::vector& receive_streams) override { + void OnVideoStreamsCreated(VideoSendStream* send_stream, + const std::vector& + receive_streams) override { stream_ = send_stream; RtpRtcpInterface::Configuration config; config.clock = Clock::GetRealTimeClock(); @@ -1518,7 +1549,7 @@ TEST_F(VideoSendStreamTest, MinTransmitBitrateRespectsRemb) { void ModifyVideoConfigs( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { feedback_transport_.reset( new internal::TransportAdapter(send_config->send_transport)); @@ -1526,7 +1557,10 @@ TEST_F(VideoSendStreamTest, MinTransmitBitrateRespectsRemb) { encoder_config->min_transmit_bitrate_bps = kMinTransmitBitrateBps; } - void OnStreamsStopped() override { task_safety_flag_->SetNotAlive(); } + void OnStreamsStopped() override { + task_safety_flag_->SetNotAlive(); + rtp_rtcp_.reset(); + } void PerformTest() override { EXPECT_TRUE(Wait()) @@ -1552,7 +1586,7 @@ TEST_F(VideoSendStreamTest, ChangingNetworkRoute) { class ChangingNetworkRouteTest : public test::EndToEndTest { public: explicit ChangingNetworkRouteTest(TaskQueueBase* task_queue) - : EndToEndTest(test::CallTest::kDefaultTimeoutMs), + : EndToEndTest(test::CallTest::kDefaultTimeout), task_queue_(task_queue), call_(nullptr) { module_process_thread_.Detach(); @@ -1563,7 +1597,7 @@ TEST_F(VideoSendStreamTest, ChangingNetworkRoute) { ~ChangingNetworkRouteTest() { // Block until all already posted tasks run to avoid 'use after free' // when such task accesses `this`. - SendTask(RTC_FROM_HERE, task_queue_, [] {}); + SendTask(task_queue_, [] {}); } void OnCallsCreated(Call* sender_call, Call* receiver_call) override { @@ -1574,7 +1608,7 @@ TEST_F(VideoSendStreamTest, ChangingNetworkRoute) { void ModifyVideoConfigs( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { RTC_DCHECK_RUN_ON(&task_queue_thread_); send_config->rtp.extensions.clear(); @@ -1584,9 +1618,9 @@ TEST_F(VideoSendStreamTest, ChangingNetworkRoute) { (*receive_configs)[0].rtp.transport_cc = true; } - void ModifyAudioConfigs( - AudioSendStream::Config* send_config, - std::vector* receive_configs) override { + void ModifyAudioConfigs(AudioSendStream::Config* send_config, + std::vector* + receive_configs) override { RTC_DCHECK_RUN_ON(&task_queue_thread_); send_config->rtp.extensions.clear(); send_config->rtp.extensions.push_back(RtpExtension( @@ -1598,14 +1632,14 @@ TEST_F(VideoSendStreamTest, ChangingNetworkRoute) { Action OnSendRtp(const uint8_t* packet, size_t length) override { RTC_DCHECK_RUN_ON(&module_process_thread_); - task_queue_->PostTask(ToQueuedTask([this]() { + task_queue_->PostTask([this]() { RTC_DCHECK_RUN_ON(&task_queue_thread_); if (!call_) return; Call::Stats stats = call_->GetStats(); if (stats.send_bandwidth_bps > kStartBitrateBps) observation_complete_.Set(); - })); + }); return SEND_PACKET; } @@ -1621,7 +1655,7 @@ TEST_F(VideoSendStreamTest, ChangingNetworkRoute) { new_route.remote = rtc::RouteEndpoint::CreateWithNetworkId(20); BitrateConstraints bitrate_config; - SendTask(RTC_FROM_HERE, task_queue_, + SendTask(task_queue_, [this, &new_route, &bitrate_config]() { RTC_DCHECK_RUN_ON(&task_queue_thread_); call_->GetTransportControllerSend()->OnNetworkRouteChanged( @@ -1635,7 +1669,7 @@ TEST_F(VideoSendStreamTest, ChangingNetworkRoute) { << "Timed out while waiting for start bitrate to be exceeded."; SendTask( - RTC_FROM_HERE, task_queue_, [this, &new_route, &bitrate_config]() { + task_queue_, [this, &new_route, &bitrate_config]() { RTC_DCHECK_RUN_ON(&task_queue_thread_); bitrate_config.start_bitrate_bps = -1; bitrate_config.max_bitrate_bps = kNewMaxBitrateBps; @@ -1665,19 +1699,19 @@ TEST_F(VideoSendStreamTest, ChangingNetworkRoute) { // Test that if specified, relay cap is lifted on transition to direct // connection. -TEST_F(VideoSendStreamTest, RelayToDirectRoute) { +// TODO(https://bugs.webrtc.org/13353): Test disabled due to flakiness. +TEST_F(VideoSendStreamTest, DISABLED_RelayToDirectRoute) { static const int kStartBitrateBps = 300000; static const int kRelayBandwidthCapBps = 800000; static const int kMinPacketsToSend = 100; - webrtc::test::ScopedFieldTrials field_trials( - std::string(field_trial::GetFieldTrialString()) + - "WebRTC-Bwe-NetworkRouteConstraints/relay_cap:" + - std::to_string(kRelayBandwidthCapBps) + "bps/"); + webrtc::test::ScopedKeyValueConfig field_trials( + field_trials_, "WebRTC-Bwe-NetworkRouteConstraints/relay_cap:" + + std::to_string(kRelayBandwidthCapBps) + "bps/"); class RelayToDirectRouteTest : public test::EndToEndTest { public: explicit RelayToDirectRouteTest(TaskQueueBase* task_queue) - : EndToEndTest(test::CallTest::kDefaultTimeoutMs), + : EndToEndTest(test::CallTest::kDefaultTimeout), task_queue_(task_queue), call_(nullptr), packets_sent_(0), @@ -1689,7 +1723,7 @@ TEST_F(VideoSendStreamTest, RelayToDirectRoute) { ~RelayToDirectRouteTest() { // Block until all already posted tasks run to avoid 'use after free' // when such task accesses `this`. - SendTask(RTC_FROM_HERE, task_queue_, [] {}); + SendTask(task_queue_, [] {}); } void OnCallsCreated(Call* sender_call, Call* receiver_call) override { @@ -1700,7 +1734,7 @@ TEST_F(VideoSendStreamTest, RelayToDirectRoute) { Action OnSendRtp(const uint8_t* packet, size_t length) override { RTC_DCHECK_RUN_ON(&module_process_thread_); - task_queue_->PostTask(ToQueuedTask([this]() { + task_queue_->PostTask([this]() { RTC_DCHECK_RUN_ON(&task_queue_thread_); if (!call_) return; @@ -1710,7 +1744,7 @@ TEST_F(VideoSendStreamTest, RelayToDirectRoute) { call_->GetStats().send_bandwidth_bps > kRelayBandwidthCapBps; if (did_exceed_cap || had_time_to_exceed_cap_in_relayed_phase) observation_complete_.Set(); - })); + }); return SEND_PACKET; } @@ -1725,7 +1759,7 @@ TEST_F(VideoSendStreamTest, RelayToDirectRoute) { route.local = rtc::RouteEndpoint::CreateWithNetworkId(10); route.remote = rtc::RouteEndpoint::CreateWithNetworkId(20); - SendTask(RTC_FROM_HERE, task_queue_, [this, &route]() { + SendTask(task_queue_, [this, &route]() { RTC_DCHECK_RUN_ON(&task_queue_thread_); relayed_phase_ = true; route.remote = route.remote.CreateWithTurn(true); @@ -1741,7 +1775,7 @@ TEST_F(VideoSendStreamTest, RelayToDirectRoute) { EXPECT_TRUE(Wait()) << "Timeout waiting for sufficient packets sent count."; - SendTask(RTC_FROM_HERE, task_queue_, [this, &route]() { + SendTask(task_queue_, [this, &route]() { RTC_DCHECK_RUN_ON(&task_queue_thread_); EXPECT_LE(call_->GetStats().send_bandwidth_bps, kRelayBandwidthCapBps); @@ -1772,7 +1806,7 @@ TEST_F(VideoSendStreamTest, ChangingTransportOverhead) { class ChangingTransportOverheadTest : public test::EndToEndTest { public: explicit ChangingTransportOverheadTest(TaskQueueBase* task_queue) - : EndToEndTest(test::CallTest::kDefaultTimeoutMs), + : EndToEndTest(test::CallTest::kDefaultTimeout), task_queue_(task_queue), call_(nullptr), packets_sent_(0), @@ -1793,13 +1827,13 @@ TEST_F(VideoSendStreamTest, ChangingTransportOverhead) { void ModifyVideoConfigs( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { send_config->rtp.max_packet_size = kMaxRtpPacketSize; } void PerformTest() override { - SendTask(RTC_FROM_HERE, task_queue_, [this]() { + SendTask(task_queue_, [this]() { transport_overhead_ = 100; call_->GetTransportControllerSend()->OnTransportOverheadChanged( transport_overhead_); @@ -1812,7 +1846,7 @@ TEST_F(VideoSendStreamTest, ChangingTransportOverhead) { packets_sent_ = 0; } - SendTask(RTC_FROM_HERE, task_queue_, [this]() { + SendTask(task_queue_, [this]() { transport_overhead_ = 500; call_->GetTransportControllerSend()->OnTransportOverheadChanged( transport_overhead_); @@ -1848,7 +1882,7 @@ class MaxPaddingSetTest : public test::SendTest { MaxPaddingSetTest(bool test_switch_content_type, T* stream_reset_fun, TaskQueueBase* task_queue) - : SendTest(test::CallTest::kDefaultTimeoutMs), + : SendTest(test::CallTest::kDefaultTimeout), running_without_padding_(test_switch_content_type), stream_resetter_(stream_reset_fun), task_queue_(task_queue) { @@ -1860,12 +1894,12 @@ class MaxPaddingSetTest : public test::SendTest { ~MaxPaddingSetTest() { // Block until all already posted tasks run to avoid 'use after free' // when such task accesses `this`. - SendTask(RTC_FROM_HERE, task_queue_, [] {}); + SendTask(task_queue_, [] {}); } void ModifyVideoConfigs( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { RTC_DCHECK_RUN_ON(&task_queue_thread_); RTC_DCHECK_EQ(1, encoder_config->number_of_streams); @@ -1896,7 +1930,7 @@ class MaxPaddingSetTest : public test::SendTest { // Check the stats on the correct thread and signal the 'complete' flag // once we detect that we're done. - task_queue_->PostTask(ToQueuedTask([this]() { + task_queue_->PostTask([this]() { RTC_DCHECK_RUN_ON(&task_queue_thread_); // In case we get a callback during teardown. // When this happens, OnStreamsStopped() has been called already, @@ -1931,7 +1965,7 @@ class MaxPaddingSetTest : public test::SendTest { observation_complete_.Set(); } } - })); + }); return SEND_PACKET; } @@ -2006,7 +2040,7 @@ TEST_F(VideoSendStreamTest, } } EXPECT_TRUE( - init_encode_called_.Wait(VideoSendStreamTest::kDefaultTimeoutMs)); + init_encode_called_.Wait(VideoSendStreamTest::kDefaultTimeout)); { MutexLock lock(&mutex_); EXPECT_EQ(width, last_initialized_frame_width_); @@ -2041,7 +2075,7 @@ TEST_F(VideoSendStreamTest, EncoderObserver encoder; test::VideoEncoderProxyFactory encoder_factory(&encoder); - SendTask(RTC_FROM_HERE, task_queue(), [this, &transport, &encoder_factory]() { + SendTask(task_queue(), [this, &transport, &encoder_factory]() { CreateSenderCall(); CreateSendConfig(1, 0, 0, &transport); GetVideoSendConfig()->encoder_settings.encoder_factory = &encoder_factory; @@ -2053,14 +2087,14 @@ TEST_F(VideoSendStreamTest, encoder.WaitForResolution(kDefaultWidth, kDefaultHeight); - SendTask(RTC_FROM_HERE, task_queue(), [this]() { + SendTask(task_queue(), [this]() { frame_generator_capturer_->ChangeResolution(kDefaultWidth * 2, kDefaultHeight * 2); }); encoder.WaitForResolution(kDefaultWidth * 2, kDefaultHeight * 2); - SendTask(RTC_FROM_HERE, task_queue(), [this]() { + SendTask(task_queue(), [this]() { DestroyStreams(); DestroyCalls(); }); @@ -2092,8 +2126,7 @@ TEST_F(VideoSendStreamTest, CanReconfigureToUseStartBitrateAbovePreviousMax) { } bool WaitForStartBitrate() { - return start_bitrate_changed_.Wait( - VideoSendStreamTest::kDefaultTimeoutMs); + return start_bitrate_changed_.Wait(VideoSendStreamTest::kDefaultTimeout); } private: @@ -2171,7 +2204,7 @@ class StartStopBitrateObserver : public test::FakeEncoder { } bool WaitForEncoderInit() { - return encoder_init_.Wait(VideoSendStreamTest::kDefaultTimeoutMs); + return encoder_init_.Wait(VideoSendStreamTest::kDefaultTimeout); } bool WaitBitrateChanged(WaitUntil until) { @@ -2188,7 +2221,7 @@ class StartStopBitrateObserver : public test::FakeEncoder { (until == WaitUntil::kZero && *bitrate_kbps == 0)) { return true; } - } while (bitrate_changed_.Wait(VideoSendStreamTest::kDefaultTimeoutMs)); + } while (bitrate_changed_.Wait(VideoSendStreamTest::kDefaultTimeout)); return false; } @@ -2203,7 +2236,7 @@ TEST_F(VideoSendStreamTest, EncoderIsProperlyInitializedAndDestroyed) { class EncoderStateObserver : public test::SendTest, public VideoEncoder { public: explicit EncoderStateObserver(TaskQueueBase* task_queue) - : SendTest(kDefaultTimeoutMs), + : SendTest(kDefaultTimeout), task_queue_(task_queue), stream_(nullptr), initialized_(false), @@ -2278,15 +2311,15 @@ TEST_F(VideoSendStreamTest, EncoderIsProperlyInitializedAndDestroyed) { EXPECT_TRUE(IsReadyForEncode()); } - void OnVideoStreamsCreated( - VideoSendStream* send_stream, - const std::vector& receive_streams) override { + void OnVideoStreamsCreated(VideoSendStream* send_stream, + const std::vector& + receive_streams) override { stream_ = send_stream; } void ModifyVideoConfigs( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { send_config->encoder_settings.encoder_factory = &encoder_factory_; encoder_config_ = encoder_config->Copy(); @@ -2295,7 +2328,7 @@ TEST_F(VideoSendStreamTest, EncoderIsProperlyInitializedAndDestroyed) { void PerformTest() override { EXPECT_TRUE(Wait()) << "Timed out while waiting for Encode."; - SendTask(RTC_FROM_HERE, task_queue_, [this]() { + SendTask(task_queue_, [this]() { EXPECT_EQ(0u, num_releases()); stream_->ReconfigureVideoEncoder(std::move(encoder_config_)); EXPECT_EQ(0u, num_releases()); @@ -2334,7 +2367,7 @@ class VideoCodecConfigObserver : public test::SendTest, public: VideoCodecConfigObserver(VideoCodecType video_codec_type, TaskQueueBase* task_queue) - : SendTest(VideoSendStreamTest::kDefaultTimeoutMs), + : SendTest(VideoSendStreamTest::kDefaultTimeout), FakeEncoder(Clock::GetRealTimeClock()), video_codec_type_(video_codec_type), stream_(nullptr), @@ -2346,7 +2379,7 @@ class VideoCodecConfigObserver : public test::SendTest, private: void ModifyVideoConfigs( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { send_config->encoder_settings.encoder_factory = &encoder_factory_; send_config->rtp.payload_name = CodecTypeToPayloadString(video_codec_type_); @@ -2359,9 +2392,9 @@ class VideoCodecConfigObserver : public test::SendTest, encoder_config_ = encoder_config->Copy(); } - void OnVideoStreamsCreated( - VideoSendStream* send_stream, - const std::vector& receive_streams) override { + void OnVideoStreamsCreated(VideoSendStream* send_stream, + const std::vector& + receive_streams) override { stream_ = send_stream; } @@ -2380,19 +2413,17 @@ class VideoCodecConfigObserver : public test::SendTest, GetEncoderSpecificSettings() const; void PerformTest() override { - EXPECT_TRUE( - init_encode_event_.Wait(VideoSendStreamTest::kDefaultTimeoutMs)); + EXPECT_TRUE(init_encode_event_.Wait(VideoSendStreamTest::kDefaultTimeout)); ASSERT_EQ(1, FakeEncoder::GetNumInitializations()) << "VideoEncoder not initialized."; // Change encoder settings to actually trigger reconfiguration. - encoder_settings_.frameDroppingOn = !encoder_settings_.frameDroppingOn; + encoder_config_.frame_drop_enabled = !encoder_config_.frame_drop_enabled; encoder_config_.encoder_specific_settings = GetEncoderSpecificSettings(); - SendTask(RTC_FROM_HERE, task_queue_, [&]() { + SendTask(task_queue_, [&]() { stream_->ReconfigureVideoEncoder(std::move(encoder_config_)); }); - ASSERT_TRUE( - init_encode_event_.Wait(VideoSendStreamTest::kDefaultTimeoutMs)); + ASSERT_TRUE(init_encode_event_.Wait(VideoSendStreamTest::kDefaultTimeout)); EXPECT_EQ(2, FakeEncoder::GetNumInitializations()) << "ReconfigureVideoEncoder did not reinitialize the encoder with " "new encoder settings."; @@ -2414,9 +2445,7 @@ class VideoCodecConfigObserver : public test::SendTest, }; template <> -void VideoCodecConfigObserver::InitCodecSpecifics() { - encoder_settings_ = VideoEncoder::GetDefaultH264Settings(); -} +void VideoCodecConfigObserver::InitCodecSpecifics() {} template <> void VideoCodecConfigObserver::VerifyCodecSpecifics( @@ -2433,18 +2462,16 @@ void VideoCodecConfigObserver::VerifyCodecSpecifics( // Set expected temporal layers as they should have been set when // reconfiguring the encoder and not match the set config. - VideoCodecH264 encoder_settings = encoder_settings_; + VideoCodecH264 encoder_settings = VideoEncoder::GetDefaultH264Settings(); encoder_settings.numberOfTemporalLayers = kVideoCodecConfigObserverNumberOfTemporalLayers; - EXPECT_EQ( - 0, memcmp(&config.H264(), &encoder_settings, sizeof(encoder_settings_))); + EXPECT_EQ(config.H264(), encoder_settings); } template <> rtc::scoped_refptr VideoCodecConfigObserver::GetEncoderSpecificSettings() const { - return rtc::make_ref_counted( - encoder_settings_); + return nullptr; } template <> @@ -2541,7 +2568,7 @@ TEST_F(VideoSendStreamTest, RtcpSenderReportContainsMediaBytesSent) { class RtcpSenderReportTest : public test::SendTest { public: RtcpSenderReportTest() - : SendTest(kDefaultTimeoutMs), + : SendTest(kDefaultTimeout), rtp_packets_sent_(0), media_bytes_sent_(0) {} @@ -2598,11 +2625,11 @@ TEST_F(VideoSendStreamTest, TranslatesTwoLayerScreencastToTargetBitrate) { private: std::vector CreateEncoderStreams( - int width, - int height, + int frame_width, + int frame_height, const VideoEncoderConfig& encoder_config) override { std::vector streams = - test::CreateVideoStreams(width, height, encoder_config); + test::CreateVideoStreams(frame_width, frame_height, encoder_config); RTC_CHECK_GT(streams[0].max_bitrate_bps, kScreencastMaxTargetBitrateDeltaKbps); streams[0].target_bitrate_bps = @@ -2616,7 +2643,7 @@ TEST_F(VideoSendStreamTest, TranslatesTwoLayerScreencastToTargetBitrate) { public test::FakeEncoder { public: ScreencastTargetBitrateTest() - : SendTest(kDefaultTimeoutMs), + : SendTest(kDefaultTimeout), test::FakeEncoder(Clock::GetRealTimeClock()), encoder_factory_(this) {} @@ -2633,7 +2660,7 @@ TEST_F(VideoSendStreamTest, TranslatesTwoLayerScreencastToTargetBitrate) { void ModifyVideoConfigs( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { send_config->encoder_settings.encoder_factory = &encoder_factory_; EXPECT_EQ(1u, encoder_config->number_of_streams); @@ -2666,8 +2693,8 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { // TODO(bugs.webrtc.org/12058): If these fields trial are on, we get lower // bitrates than expected by this test, due to encoder pushback and subtracted // overhead. - webrtc::test::ScopedFieldTrials field_trials( - std::string(field_trial::GetFieldTrialString()) + + webrtc::test::ScopedKeyValueConfig field_trials( + field_trials_, "WebRTC-VideoRateControl/bitrate_adjuster:false/" "WebRTC-SendSideBwe-WithOverhead/Disabled/"); @@ -2676,7 +2703,7 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { public test::FakeEncoder { public: explicit EncoderBitrateThresholdObserver(TaskQueueBase* task_queue) - : SendTest(kDefaultTimeoutMs), + : SendTest(kDefaultTimeout), FakeEncoder(Clock::GetRealTimeClock()), task_queue_(task_queue), target_bitrate_(0), @@ -2759,8 +2786,9 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { return; } } while (bitrate_changed_event_.Wait( - std::max(int64_t{1}, VideoSendStreamTest::kDefaultTimeoutMs - - (rtc::TimeMillis() - start_time)))); + std::max(TimeDelta::Millis(1), + VideoSendStreamTest::kDefaultTimeout - + TimeDelta::Millis(rtc::TimeMillis() - start_time)))); MutexLock lock(&mutex_); EXPECT_EQ(target_bitrate_, expected_bitrate) << "Timed out while waiting encoder rate to be set."; @@ -2775,7 +2803,7 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { void ModifyVideoConfigs( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { send_config->encoder_settings.encoder_factory = &encoder_factory_; send_config->encoder_settings.bitrate_allocator_factory = this; @@ -2792,35 +2820,34 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { call_ = sender_call; } - void OnVideoStreamsCreated( - VideoSendStream* send_stream, - const std::vector& receive_streams) override { + void OnVideoStreamsCreated(VideoSendStream* send_stream, + const std::vector& + receive_streams) override { send_stream_ = send_stream; } void PerformTest() override { ASSERT_TRUE(create_rate_allocator_event_.Wait( - VideoSendStreamTest::kDefaultTimeoutMs)) + VideoSendStreamTest::kDefaultTimeout)) << "Timed out while waiting for rate allocator to be created."; - ASSERT_TRUE( - init_encode_event_.Wait(VideoSendStreamTest::kDefaultTimeoutMs)) + ASSERT_TRUE(init_encode_event_.Wait(VideoSendStreamTest::kDefaultTimeout)) << "Timed out while waiting for encoder to be configured."; WaitForSetRates(kStartBitrateKbps); BitrateConstraints bitrate_config; bitrate_config.start_bitrate_bps = kIncreasedStartBitrateKbps * 1000; bitrate_config.max_bitrate_bps = kIncreasedMaxBitrateKbps * 1000; - SendTask(RTC_FROM_HERE, task_queue_, [this, &bitrate_config]() { + SendTask(task_queue_, [this, &bitrate_config]() { call_->GetTransportControllerSend()->SetSdpBitrateParameters( bitrate_config); }); // Encoder rate is capped by EncoderConfig max_bitrate_bps. WaitForSetRates(kMaxBitrateKbps); encoder_config_.max_bitrate_bps = kLowerMaxBitrateKbps * 1000; - SendTask(RTC_FROM_HERE, task_queue_, [&]() { + SendTask(task_queue_, [&]() { send_stream_->ReconfigureVideoEncoder(encoder_config_.Copy()); }); ASSERT_TRUE(create_rate_allocator_event_.Wait( - VideoSendStreamTest::kDefaultTimeoutMs)); + VideoSendStreamTest::kDefaultTimeout)); EXPECT_EQ(2, num_rate_allocator_creations_) << "Rate allocator should have been recreated."; @@ -2828,11 +2855,11 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { EXPECT_EQ(1, num_encoder_initializations_); encoder_config_.max_bitrate_bps = kIncreasedMaxBitrateKbps * 1000; - SendTask(RTC_FROM_HERE, task_queue_, [&]() { + SendTask(task_queue_, [&]() { send_stream_->ReconfigureVideoEncoder(encoder_config_.Copy()); }); ASSERT_TRUE(create_rate_allocator_event_.Wait( - VideoSendStreamTest::kDefaultTimeoutMs)); + VideoSendStreamTest::kDefaultTimeout)); EXPECT_EQ(3, num_rate_allocator_creations_) << "Rate allocator should have been recreated."; @@ -2872,7 +2899,7 @@ TEST_F(VideoSendStreamTest, ReportsSentResolution) { public test::FakeEncoder { public: explicit ScreencastTargetBitrateTest(TaskQueueBase* task_queue) - : SendTest(kDefaultTimeoutMs), + : SendTest(kDefaultTimeout), test::FakeEncoder(Clock::GetRealTimeClock()), send_stream_(nullptr), encoder_factory_(this), @@ -2913,7 +2940,7 @@ TEST_F(VideoSendStreamTest, ReportsSentResolution) { } void ModifyVideoConfigs( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { send_config->encoder_settings.encoder_factory = &encoder_factory_; EXPECT_EQ(kNumStreams, encoder_config->number_of_streams); @@ -2925,8 +2952,7 @@ TEST_F(VideoSendStreamTest, ReportsSentResolution) { EXPECT_TRUE(Wait()) << "Timed out while waiting for the encoder to send one frame."; VideoSendStream::Stats stats; - SendTask(RTC_FROM_HERE, task_queue_, - [&]() { stats = send_stream_->GetStats(); }); + SendTask(task_queue_, [&]() { stats = send_stream_->GetStats(); }); for (size_t i = 0; i < kNumStreams; ++i) { ASSERT_TRUE(stats.substreams.find(kVideoSendSsrcs[i]) != @@ -2940,9 +2966,9 @@ TEST_F(VideoSendStreamTest, ReportsSentResolution) { } } - void OnVideoStreamsCreated( - VideoSendStream* send_stream, - const std::vector& receive_streams) override { + void OnVideoStreamsCreated(VideoSendStream* send_stream, + const std::vector& + receive_streams) override { send_stream_ = send_stream; } @@ -2957,18 +2983,15 @@ TEST_F(VideoSendStreamTest, ReportsSentResolution) { #if defined(RTC_ENABLE_VP9) class Vp9HeaderObserver : public test::SendTest { public: - Vp9HeaderObserver() - : SendTest(VideoSendStreamTest::kLongTimeoutMs), + explicit Vp9HeaderObserver(const Vp9TestParams& params) + : SendTest(VideoSendStreamTest::kLongTimeout), encoder_factory_([]() { return VP9Encoder::Create(); }), - vp9_settings_(VideoEncoder::GetDefaultVp9Settings()), - packets_sent_(0), - frames_sent_(0), - expected_width_(0), - expected_height_(0) {} + params_(params), + vp9_settings_(VideoEncoder::GetDefaultVp9Settings()) {} virtual void ModifyVideoConfigsHook( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) {} virtual void InspectHeader(const RTPVideoHeaderVP9& vp9) = 0; @@ -2978,7 +3001,7 @@ class Vp9HeaderObserver : public test::SendTest { void ModifyVideoConfigs( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { send_config->encoder_settings.encoder_factory = &encoder_factory_; send_config->rtp.payload_name = "VP9"; @@ -2989,8 +3012,6 @@ class Vp9HeaderObserver : public test::SendTest { vp9_settings_); EXPECT_EQ(1u, encoder_config->number_of_streams); EXPECT_EQ(1u, encoder_config->simulcast_layers.size()); - encoder_config->simulcast_layers[0].num_temporal_layers = - vp9_settings_.numberOfTemporalLayers; encoder_config_ = encoder_config->Copy(); } @@ -3018,9 +3039,9 @@ class Vp9HeaderObserver : public test::SendTest { EXPECT_EQ(kVp9PayloadType, rtp_packet.PayloadType()); rtc::ArrayView rtp_payload = rtp_packet.payload(); - bool new_packet = packets_sent_ == 0 || + bool new_packet = !last_packet_sequence_number_.has_value() || IsNewerSequenceNumber(rtp_packet.SequenceNumber(), - last_packet_sequence_number_); + *last_packet_sequence_number_); if (!rtp_payload.empty() && new_packet) { RTPVideoHeader video_header; EXPECT_NE( @@ -3035,7 +3056,6 @@ class Vp9HeaderObserver : public test::SendTest { // Verify configuration specific settings. InspectHeader(vp9_header); - ++packets_sent_; if (rtp_packet.Marker()) { MutexLock lock(&mutex_); ++frames_sent_; @@ -3044,6 +3064,8 @@ class Vp9HeaderObserver : public test::SendTest { last_packet_sequence_number_ = rtp_packet.SequenceNumber(); last_packet_timestamp_ = rtp_packet.Timestamp(); last_vp9_ = vp9_header; + last_temporal_idx_by_spatial_idx_[vp9_header.spatial_idx] = + vp9_header.temporal_idx; } return SEND_PACKET; } @@ -3057,6 +3079,10 @@ class Vp9HeaderObserver : public test::SendTest { } } + bool IsTemporalShiftEnabled() const { + return params_.scalability_mode.find("_SHIFT") != std::string::npos; + } + void VerifySpatialIdxWithinFrame(const RTPVideoHeaderVP9& vp9) const { bool new_layer = vp9.spatial_idx != last_vp9_.spatial_idx; EXPECT_EQ(new_layer, vp9.beginning_of_frame); @@ -3065,6 +3091,28 @@ class Vp9HeaderObserver : public test::SendTest { vp9.spatial_idx); } + void VerifyTemporalIdxWithinFrame(const RTPVideoHeaderVP9& vp9) const { + if (!IsTemporalShiftEnabled()) { + EXPECT_EQ(vp9.temporal_idx, last_vp9_.temporal_idx); + return; + } + // Temporal shift. + EXPECT_EQ(params_.num_temporal_layers, 2); + if (vp9.spatial_idx == params_.num_spatial_layers - 1) { + // Lower spatial layers should be shifted. + int expected_tid = + (!vp9.inter_pic_predicted || vp9.temporal_idx == 1) ? 0 : 1; + for (int i = 0; i < vp9.spatial_idx; ++i) { + EXPECT_EQ(last_temporal_idx_by_spatial_idx_.at(i), expected_tid); + } + } + // Same within spatial layer. + bool new_layer = vp9.spatial_idx != last_vp9_.spatial_idx; + if (!new_layer) { + EXPECT_EQ(vp9.temporal_idx, last_vp9_.temporal_idx); + } + } + void VerifyFixedTemporalLayerStructure(const RTPVideoHeaderVP9& vp9, uint8_t num_layers) const { switch (num_layers) { @@ -3088,24 +3136,29 @@ class Vp9HeaderObserver : public test::SendTest { void VerifyTemporalLayerStructure0(const RTPVideoHeaderVP9& vp9) const { EXPECT_EQ(kNoTl0PicIdx, vp9.tl0_pic_idx); EXPECT_EQ(kNoTemporalIdx, vp9.temporal_idx); // no tid + // Technically true, but layer indices not available. EXPECT_FALSE(vp9.temporal_up_switch); } void VerifyTemporalLayerStructure1(const RTPVideoHeaderVP9& vp9) const { EXPECT_NE(kNoTl0PicIdx, vp9.tl0_pic_idx); EXPECT_EQ(0, vp9.temporal_idx); // 0,0,0,... - EXPECT_FALSE(vp9.temporal_up_switch); } void VerifyTemporalLayerStructure2(const RTPVideoHeaderVP9& vp9) const { EXPECT_NE(kNoTl0PicIdx, vp9.tl0_pic_idx); EXPECT_GE(vp9.temporal_idx, 0); // 0,1,0,1,... (tid reset on I-frames). EXPECT_LE(vp9.temporal_idx, 1); - EXPECT_EQ(vp9.temporal_idx > 0, vp9.temporal_up_switch); - if (IsNewPictureId(vp9)) { - uint8_t expected_tid = - (!vp9.inter_pic_predicted || last_vp9_.temporal_idx == 1) ? 0 : 1; - EXPECT_EQ(expected_tid, vp9.temporal_idx); + EXPECT_TRUE(vp9.temporal_up_switch); + // Verify temporal structure for the highest spatial layer (the structure + // may be shifted for lower spatial layer if temporal shift is configured). + if (IsHighestSpatialLayer(vp9) && vp9.beginning_of_frame) { + int expected_tid = + (!vp9.inter_pic_predicted || + last_temporal_idx_by_spatial_idx_.at(vp9.spatial_idx) == 1) + ? 0 + : 1; + EXPECT_EQ(vp9.temporal_idx, expected_tid); } } @@ -3115,18 +3168,16 @@ class Vp9HeaderObserver : public test::SendTest { EXPECT_LE(vp9.temporal_idx, 2); if (IsNewPictureId(vp9) && vp9.inter_pic_predicted) { EXPECT_NE(vp9.temporal_idx, last_vp9_.temporal_idx); + EXPECT_TRUE(vp9.temporal_up_switch); switch (vp9.temporal_idx) { case 0: - EXPECT_EQ(2, last_vp9_.temporal_idx); - EXPECT_FALSE(vp9.temporal_up_switch); + EXPECT_EQ(last_vp9_.temporal_idx, 2); break; case 1: - EXPECT_EQ(2, last_vp9_.temporal_idx); - EXPECT_TRUE(vp9.temporal_up_switch); + EXPECT_EQ(last_vp9_.temporal_idx, 2); break; case 2: EXPECT_LT(last_vp9_.temporal_idx, 2); - EXPECT_TRUE(vp9.temporal_up_switch); break; } } @@ -3146,6 +3197,11 @@ class Vp9HeaderObserver : public test::SendTest { return frames_sent_ > 0 && (vp9.picture_id != last_vp9_.picture_id); } + bool IsHighestSpatialLayer(const RTPVideoHeaderVP9& vp9) const { + return vp9.spatial_idx == params_.num_spatial_layers - 1 || + vp9.spatial_idx == kNoSpatialIdx; + } + // Flexible mode (F=1): Non-flexible mode (F=0): // // +-+-+-+-+-+-+-+-+ +-+-+-+-+-+-+-+-+ @@ -3168,17 +3224,17 @@ class Vp9HeaderObserver : public test::SendTest { EXPECT_NE(kNoPictureId, vp9.picture_id); // I:1 EXPECT_EQ(vp9_settings_.flexibleMode, vp9.flexible_mode); // F - if (vp9_settings_.numberOfSpatialLayers > 1) { - EXPECT_LT(vp9.spatial_idx, vp9_settings_.numberOfSpatialLayers); - } else if (vp9_settings_.numberOfTemporalLayers > 1) { + if (params_.num_spatial_layers > 1) { + EXPECT_LT(vp9.spatial_idx, params_.num_spatial_layers); + } else if (params_.num_temporal_layers > 1) { EXPECT_EQ(vp9.spatial_idx, 0); } else { EXPECT_EQ(vp9.spatial_idx, kNoSpatialIdx); } - if (vp9_settings_.numberOfTemporalLayers > 1) { - EXPECT_LT(vp9.temporal_idx, vp9_settings_.numberOfTemporalLayers); - } else if (vp9_settings_.numberOfSpatialLayers > 1) { + if (params_.num_temporal_layers > 1) { + EXPECT_LT(vp9.temporal_idx, params_.num_temporal_layers); + } else if (params_.num_spatial_layers > 1) { EXPECT_EQ(vp9.temporal_idx, 0); } else { EXPECT_EQ(vp9.temporal_idx, kNoTemporalIdx); @@ -3191,8 +3247,12 @@ class Vp9HeaderObserver : public test::SendTest { EXPECT_FALSE(vp9.inter_pic_predicted); // P if (!vp9.inter_pic_predicted) { - EXPECT_TRUE(vp9.temporal_idx == 0 || vp9.temporal_idx == kNoTemporalIdx); - EXPECT_FALSE(vp9.temporal_up_switch); + if (vp9.temporal_idx == kNoTemporalIdx) { + EXPECT_FALSE(vp9.temporal_up_switch); + } else { + EXPECT_EQ(vp9.temporal_idx, 0); + EXPECT_TRUE(vp9.temporal_up_switch); + } } } @@ -3213,16 +3273,16 @@ class Vp9HeaderObserver : public test::SendTest { // +-+-+-+-+-+-+-+-+ void VerifySsData(const RTPVideoHeaderVP9& vp9) const { EXPECT_TRUE(vp9.ss_data_available); // V - EXPECT_EQ(vp9_settings_.numberOfSpatialLayers, // N_S + 1 + EXPECT_EQ(params_.num_spatial_layers, // N_S + 1 vp9.num_spatial_layers); EXPECT_TRUE(vp9.spatial_layer_resolution_present); // Y:1 - int expected_width = expected_width_; - int expected_height = expected_height_; - for (int i = static_cast(vp9.num_spatial_layers) - 1; i >= 0; --i) { - EXPECT_EQ(expected_width, vp9.width[i]); // WIDTH - EXPECT_EQ(expected_height, vp9.height[i]); // HEIGHT - expected_width /= 2; - expected_height /= 2; + + ScalableVideoController::StreamLayersConfig config = GetScalabilityConfig(); + for (int i = config.num_spatial_layers - 1; i >= 0; --i) { + double ratio = static_cast(config.scaling_factor_num[i]) / + config.scaling_factor_den[i]; + EXPECT_EQ(expected_width_ * ratio, vp9.width[i]); // WIDTH + EXPECT_EQ(expected_height_ * ratio, vp9.height[i]); // HEIGHT } } @@ -3232,19 +3292,19 @@ class Vp9HeaderObserver : public test::SendTest { absl::get(video.video_type_header); const bool new_temporal_unit = - packets_sent_ == 0 || - IsNewerTimestamp(rtp_packet.Timestamp(), last_packet_timestamp_); + !last_packet_timestamp_.has_value() || + IsNewerTimestamp(rtp_packet.Timestamp(), *last_packet_timestamp_); const bool new_frame = new_temporal_unit || last_vp9_.spatial_idx != vp9_header.spatial_idx; EXPECT_EQ(new_frame, video.is_first_packet_in_frame); if (!new_temporal_unit) { EXPECT_FALSE(last_packet_marker_); - EXPECT_EQ(last_packet_timestamp_, rtp_packet.Timestamp()); + EXPECT_EQ(*last_packet_timestamp_, rtp_packet.Timestamp()); EXPECT_EQ(last_vp9_.picture_id, vp9_header.picture_id); - EXPECT_EQ(last_vp9_.temporal_idx, vp9_header.temporal_idx); EXPECT_EQ(last_vp9_.tl0_pic_idx, vp9_header.tl0_pic_idx); VerifySpatialIdxWithinFrame(vp9_header); + VerifyTemporalIdxWithinFrame(vp9_header); return; } // New frame. @@ -3259,58 +3319,101 @@ class Vp9HeaderObserver : public test::SendTest { VerifyTl0Idx(vp9_header); } + ScalableVideoController::StreamLayersConfig GetScalabilityConfig() const { + absl::optional scalability_mode = + ScalabilityModeFromString(params_.scalability_mode); + EXPECT_TRUE(scalability_mode.has_value()); + absl::optional config = + ScalabilityStructureConfig(*scalability_mode); + EXPECT_TRUE(config.has_value()); + EXPECT_EQ(config->num_spatial_layers, params_.num_spatial_layers); + return *config; + } + test::FunctionVideoEncoderFactory encoder_factory_; + const Vp9TestParams params_; VideoCodecVP9 vp9_settings_; webrtc::VideoEncoderConfig encoder_config_; bool last_packet_marker_ = false; - uint16_t last_packet_sequence_number_ = 0; - uint32_t last_packet_timestamp_ = 0; + absl::optional last_packet_sequence_number_; + absl::optional last_packet_timestamp_; RTPVideoHeaderVP9 last_vp9_; - size_t packets_sent_; + std::map last_temporal_idx_by_spatial_idx_; Mutex mutex_; - size_t frames_sent_; - int expected_width_; - int expected_height_; + size_t frames_sent_ = 0; + int expected_width_ = 0; + int expected_height_ = 0; }; -TEST_F(VideoSendStreamTest, Vp9NonFlexMode_1Tl1SLayers) { - const uint8_t kNumTemporalLayers = 1; - const uint8_t kNumSpatialLayers = 1; - TestVp9NonFlexMode(kNumTemporalLayers, kNumSpatialLayers); -} - -TEST_F(VideoSendStreamTest, Vp9NonFlexMode_2Tl1SLayers) { - const uint8_t kNumTemporalLayers = 2; - const uint8_t kNumSpatialLayers = 1; - TestVp9NonFlexMode(kNumTemporalLayers, kNumSpatialLayers); -} - -TEST_F(VideoSendStreamTest, Vp9NonFlexMode_3Tl1SLayers) { - const uint8_t kNumTemporalLayers = 3; - const uint8_t kNumSpatialLayers = 1; - TestVp9NonFlexMode(kNumTemporalLayers, kNumSpatialLayers); -} - -TEST_F(VideoSendStreamTest, Vp9NonFlexMode_1Tl2SLayers) { - const uint8_t kNumTemporalLayers = 1; - const uint8_t kNumSpatialLayers = 2; - TestVp9NonFlexMode(kNumTemporalLayers, kNumSpatialLayers); -} +class Vp9Test : public VideoSendStreamTest, + public ::testing::WithParamInterface { + public: + Vp9Test() + : params_(::testing::get(GetParam())), + use_scalability_mode_identifier_(::testing::get(GetParam())) {} -TEST_F(VideoSendStreamTest, Vp9NonFlexMode_2Tl2SLayers) { - const uint8_t kNumTemporalLayers = 2; - const uint8_t kNumSpatialLayers = 2; - TestVp9NonFlexMode(kNumTemporalLayers, kNumSpatialLayers); -} + protected: + const Vp9TestParams params_; + const bool use_scalability_mode_identifier_; +}; -TEST_F(VideoSendStreamTest, Vp9NonFlexMode_3Tl2SLayers) { - const uint8_t kNumTemporalLayers = 3; - const uint8_t kNumSpatialLayers = 2; - TestVp9NonFlexMode(kNumTemporalLayers, kNumSpatialLayers); +INSTANTIATE_TEST_SUITE_P( + ScalabilityMode, + Vp9Test, + ::testing::Combine( + ::testing::ValuesIn( + {{"L1T1", 1, 1, InterLayerPredMode::kOn}, + {"L1T2", 1, 2, InterLayerPredMode::kOn}, + {"L1T3", 1, 3, InterLayerPredMode::kOn}, + {"L2T1", 2, 1, InterLayerPredMode::kOn}, + {"L2T1_KEY", 2, 1, InterLayerPredMode::kOnKeyPic}, + {"L2T2", 2, 2, InterLayerPredMode::kOn}, + {"L2T2_KEY", 2, 2, InterLayerPredMode::kOnKeyPic}, + {"L2T3", 2, 3, InterLayerPredMode::kOn}, + {"L2T3_KEY", 2, 3, InterLayerPredMode::kOnKeyPic}, + {"L3T1", 3, 1, InterLayerPredMode::kOn}, + {"L3T1_KEY", 3, 1, InterLayerPredMode::kOnKeyPic}, + {"L3T2", 3, 2, InterLayerPredMode::kOn}, + {"L3T2_KEY", 3, 2, InterLayerPredMode::kOnKeyPic}, + {"L3T3", 3, 3, InterLayerPredMode::kOn}, + {"L3T3_KEY", 3, 3, InterLayerPredMode::kOnKeyPic}, + {"S2T1", 2, 1, InterLayerPredMode::kOff}, + {"S2T2", 2, 2, InterLayerPredMode::kOff}, + {"S2T3", 2, 3, InterLayerPredMode::kOff}, + {"S3T1", 3, 1, InterLayerPredMode::kOff}, + {"S3T2", 3, 2, InterLayerPredMode::kOff}, + {"S3T3", 3, 3, InterLayerPredMode::kOff}}), + ::testing::Values(false, true)), // use_scalability_mode_identifier + ParamInfoToStr); + +INSTANTIATE_TEST_SUITE_P( + ScalabilityModeOn, + Vp9Test, + ::testing::Combine( + ::testing::ValuesIn( + {{"L2T1h", 2, 1, InterLayerPredMode::kOn}, + {"L2T2h", 2, 2, InterLayerPredMode::kOn}, + {"L2T3h", 2, 3, InterLayerPredMode::kOn}, + {"L2T2_KEY_SHIFT", 2, 2, InterLayerPredMode::kOnKeyPic}, + {"L3T1h", 3, 1, InterLayerPredMode::kOn}, + {"L3T2h", 3, 2, InterLayerPredMode::kOn}, + {"L3T3h", 3, 3, InterLayerPredMode::kOn}, + {"S2T1h", 2, 1, InterLayerPredMode::kOff}, + {"S2T2h", 2, 2, InterLayerPredMode::kOff}, + {"S2T3h", 2, 3, InterLayerPredMode::kOff}, + {"S3T1h", 3, 1, InterLayerPredMode::kOff}, + {"S3T2h", 3, 2, InterLayerPredMode::kOff}, + {"S3T3h", 3, 3, InterLayerPredMode::kOff}}), + ::testing::Values(true)), // use_scalability_mode_identifier + ParamInfoToStr); + +TEST_P(Vp9Test, NonFlexMode) { + TestVp9NonFlexMode(params_, use_scalability_mode_identifier_); } -void VideoSendStreamTest::TestVp9NonFlexMode(uint8_t num_temporal_layers, - uint8_t num_spatial_layers) { +void VideoSendStreamTest::TestVp9NonFlexMode( + const Vp9TestParams& params, + bool use_scalability_mode_identifier) { static const size_t kNumFramesToSend = 100; // Set to < kNumFramesToSend and coprime to length of temporal layer // structures to verify temporal id reset on key frame. @@ -3321,18 +3424,20 @@ void VideoSendStreamTest::TestVp9NonFlexMode(uint8_t num_temporal_layers, static const float kGoodBitsPerPixel = 0.1f; class NonFlexibleMode : public Vp9HeaderObserver { public: - NonFlexibleMode(uint8_t num_temporal_layers, uint8_t num_spatial_layers) - : num_temporal_layers_(num_temporal_layers), - num_spatial_layers_(num_spatial_layers), - l_field_(num_temporal_layers > 1 || num_spatial_layers > 1) {} + NonFlexibleMode(const Vp9TestParams& params, + bool use_scalability_mode_identifier) + : Vp9HeaderObserver(params), + use_scalability_mode_identifier_(use_scalability_mode_identifier), + l_field_(params.num_temporal_layers > 1 || + params.num_spatial_layers > 1) {} void ModifyVideoConfigsHook( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { encoder_config->codec_type = kVideoCodecVP9; int bitrate_bps = 0; - for (int sl_idx = 0; sl_idx < num_spatial_layers_; ++sl_idx) { + for (int sl_idx = 0; sl_idx < params_.num_spatial_layers; ++sl_idx) { const int width = kWidth << sl_idx; const int height = kHeight << sl_idx; const float bpp = kGoodBitsPerPixel / (1 << sl_idx); @@ -3340,64 +3445,101 @@ void VideoSendStreamTest::TestVp9NonFlexMode(uint8_t num_temporal_layers, } encoder_config->max_bitrate_bps = bitrate_bps * 2; + encoder_config->frame_drop_enabled = false; + vp9_settings_.flexibleMode = false; - vp9_settings_.frameDroppingOn = false; vp9_settings_.automaticResizeOn = false; vp9_settings_.keyFrameInterval = kKeyFrameInterval; - vp9_settings_.numberOfTemporalLayers = num_temporal_layers_; - vp9_settings_.numberOfSpatialLayers = num_spatial_layers_; + if (!use_scalability_mode_identifier_) { + vp9_settings_.numberOfTemporalLayers = params_.num_temporal_layers; + vp9_settings_.numberOfSpatialLayers = params_.num_spatial_layers; + vp9_settings_.interLayerPred = params_.inter_layer_pred; + } else { + absl::optional mode = + ScalabilityModeFromString(params_.scalability_mode); + encoder_config->simulcast_layers[0].scalability_mode = mode; + EXPECT_TRUE(mode.has_value()); + } + } + + int GetRequiredDivisibility() const { + ScalableVideoController::StreamLayersConfig config = + GetScalabilityConfig(); + int required_divisibility = 1; + for (int sl_idx = 0; sl_idx < config.num_spatial_layers; ++sl_idx) { + required_divisibility = cricket::LeastCommonMultiple( + required_divisibility, config.scaling_factor_den[sl_idx]); + } + return required_divisibility; } void ModifyVideoCaptureStartResolution(int* width, int* height, int* frame_rate) override { - expected_width_ = kWidth << (num_spatial_layers_ - 1); - expected_height_ = kHeight << (num_spatial_layers_ - 1); + expected_width_ = kWidth << (params_.num_spatial_layers - 1); + expected_height_ = kHeight << (params_.num_spatial_layers - 1); *width = expected_width_; *height = expected_height_; + // Top layer may be adjusted to ensure evenly divided layers. + int divisibility = GetRequiredDivisibility(); + expected_width_ -= (expected_width_ % divisibility); + expected_height_ -= (expected_height_ % divisibility); } void InspectHeader(const RTPVideoHeaderVP9& vp9) override { - bool ss_data_expected = - !vp9.inter_pic_predicted && vp9.beginning_of_frame && - (vp9.spatial_idx == 0 || vp9.spatial_idx == kNoSpatialIdx); + bool ss_data_expected = !vp9.inter_pic_predicted && + vp9.beginning_of_frame && + !vp9.inter_layer_predicted; EXPECT_EQ(ss_data_expected, vp9.ss_data_available); - if (num_spatial_layers_ > 1) { - EXPECT_EQ(vp9.spatial_idx > 0, vp9.inter_layer_predicted); + + bool is_key_frame = frames_sent_ % kKeyFrameInterval == 0; + if (params_.num_spatial_layers > 1) { + switch (params_.inter_layer_pred) { + case InterLayerPredMode::kOff: + EXPECT_FALSE(vp9.inter_layer_predicted); + break; + case InterLayerPredMode::kOn: + EXPECT_EQ(vp9.spatial_idx > 0, vp9.inter_layer_predicted); + break; + case InterLayerPredMode::kOnKeyPic: + EXPECT_EQ(is_key_frame && vp9.spatial_idx > 0, + vp9.inter_layer_predicted); + break; + } } else { EXPECT_FALSE(vp9.inter_layer_predicted); } - EXPECT_EQ(!vp9.inter_pic_predicted, - frames_sent_ % kKeyFrameInterval == 0); + EXPECT_EQ(is_key_frame, !vp9.inter_pic_predicted); if (IsNewPictureId(vp9)) { - if (num_temporal_layers_ == 1 && num_spatial_layers_ == 1) { + if (params_.num_temporal_layers == 1 && + params_.num_spatial_layers == 1) { EXPECT_EQ(kNoSpatialIdx, vp9.spatial_idx); } else { EXPECT_EQ(0, vp9.spatial_idx); } - if (num_spatial_layers_ > 1) - EXPECT_EQ(num_spatial_layers_ - 1, last_vp9_.spatial_idx); + if (params_.num_spatial_layers > 1) + EXPECT_EQ(params_.num_spatial_layers - 1, last_vp9_.spatial_idx); } - VerifyFixedTemporalLayerStructure(vp9, - l_field_ ? num_temporal_layers_ : 0); + VerifyFixedTemporalLayerStructure( + vp9, l_field_ ? params_.num_temporal_layers : 0); if (frames_sent_ > kNumFramesToSend) observation_complete_.Set(); } - const uint8_t num_temporal_layers_; - const uint8_t num_spatial_layers_; + const bool use_scalability_mode_identifier_; const bool l_field_; private: void ModifySenderBitrateConfig( BitrateConstraints* bitrate_config) override { - const int kMinBitrateBps = 300000; - bitrate_config->min_bitrate_bps = kMinBitrateBps; + const int kBitrateBps = 800000; + bitrate_config->min_bitrate_bps = kBitrateBps; + bitrate_config->start_bitrate_bps = kBitrateBps; } - } test(num_temporal_layers, num_spatial_layers); + } test(params, use_scalability_mode_identifier); RunBaseTest(&test); } @@ -3407,16 +3549,20 @@ TEST_F(VideoSendStreamTest, Vp9NonFlexModeSmallResolution) { static const int kWidth = 4; static const int kHeight = 4; class NonFlexibleModeResolution : public Vp9HeaderObserver { + public: + explicit NonFlexibleModeResolution(const Vp9TestParams& params) + : Vp9HeaderObserver(params) {} + + private: void ModifyVideoConfigsHook( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { encoder_config->codec_type = kVideoCodecVP9; vp9_settings_.flexibleMode = false; - vp9_settings_.numberOfTemporalLayers = 1; - vp9_settings_.numberOfSpatialLayers = 1; - - EXPECT_EQ(1u, encoder_config->number_of_streams); + vp9_settings_.numberOfTemporalLayers = params_.num_temporal_layers; + vp9_settings_.numberOfSpatialLayers = params_.num_spatial_layers; + vp9_settings_.interLayerPred = params_.inter_layer_pred; } void InspectHeader(const RTPVideoHeaderVP9& vp9_header) override { @@ -3432,7 +3578,10 @@ TEST_F(VideoSendStreamTest, Vp9NonFlexModeSmallResolution) { *width = kWidth; *height = kHeight; } - } test; + }; + + Vp9TestParams params{"L1T1", 1, 1, InterLayerPredMode::kOn}; + NonFlexibleModeResolution test(params); RunBaseTest(&test); } @@ -3448,15 +3597,21 @@ TEST_F(VideoSendStreamTest, Vp9NonFlexModeSmallResolution) { #endif TEST_F(VideoSendStreamTest, MAYBE_Vp9FlexModeRefCount) { class FlexibleMode : public Vp9HeaderObserver { + public: + explicit FlexibleMode(const Vp9TestParams& params) + : Vp9HeaderObserver(params) {} + + private: void ModifyVideoConfigsHook( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { encoder_config->codec_type = kVideoCodecVP9; encoder_config->content_type = VideoEncoderConfig::ContentType::kScreen; vp9_settings_.flexibleMode = true; - vp9_settings_.numberOfTemporalLayers = 1; - vp9_settings_.numberOfSpatialLayers = 2; + vp9_settings_.numberOfTemporalLayers = params_.num_temporal_layers; + vp9_settings_.numberOfSpatialLayers = params_.num_spatial_layers; + vp9_settings_.interLayerPred = params_.inter_layer_pred; } void InspectHeader(const RTPVideoHeaderVP9& vp9_header) override { @@ -3467,7 +3622,10 @@ TEST_F(VideoSendStreamTest, MAYBE_Vp9FlexModeRefCount) { observation_complete_.Set(); } } - } test; + }; + + Vp9TestParams params{"L2T1", 2, 1, InterLayerPredMode::kOn}; + FlexibleMode test(params); RunBaseTest(&test); } @@ -3511,7 +3669,7 @@ TEST_F(VideoSendStreamTest, EncoderConfigMaxFramerateReportedToSource) { class FpsObserver : public test::SendTest, public test::FrameGeneratorCapturer::SinkWantsObserver { public: - FpsObserver() : SendTest(kDefaultTimeoutMs) {} + FpsObserver() : SendTest(kDefaultTimeout) {} void OnFrameGeneratorCapturerCreated( test::FrameGeneratorCapturer* frame_generator_capturer) override { @@ -3526,7 +3684,7 @@ TEST_F(VideoSendStreamTest, EncoderConfigMaxFramerateReportedToSource) { void ModifyVideoConfigs( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { encoder_config->simulcast_layers[0].max_framerate = kMaxFps; } @@ -3549,7 +3707,7 @@ TEST_F(VideoSendStreamTest, RemoveOverheadFromBandwidth) { public test::FakeEncoder { public: explicit RemoveOverheadFromBandwidthTest(TaskQueueBase* task_queue) - : EndToEndTest(test::CallTest::kDefaultTimeoutMs), + : EndToEndTest(test::CallTest::kDefaultTimeout), FakeEncoder(Clock::GetRealTimeClock()), task_queue_(task_queue), encoder_factory_(this), @@ -3574,7 +3732,7 @@ TEST_F(VideoSendStreamTest, RemoveOverheadFromBandwidth) { void ModifyVideoConfigs( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { send_config->rtp.max_packet_size = 1200; send_config->encoder_settings.encoder_factory = &encoder_factory_; @@ -3595,7 +3753,7 @@ TEST_F(VideoSendStreamTest, RemoveOverheadFromBandwidth) { bitrate_config.start_bitrate_bps = kStartBitrateBps; bitrate_config.max_bitrate_bps = kMaxBitrateBps; bitrate_config.min_bitrate_bps = kMinBitrateBps; - SendTask(RTC_FROM_HERE, task_queue_, [this, &bitrate_config]() { + SendTask(task_queue_, [this, &bitrate_config]() { call_->GetTransportControllerSend()->SetSdpBitrateParameters( bitrate_config); call_->GetTransportControllerSend()->OnTransportOverheadChanged(40); @@ -3605,7 +3763,7 @@ TEST_F(VideoSendStreamTest, RemoveOverheadFromBandwidth) { // overhead of 40B per packet video produces 2240bps overhead. // So the encoder BW should be set to 57760bps. EXPECT_TRUE( - bitrate_changed_event_.Wait(VideoSendStreamTest::kDefaultTimeoutMs)); + bitrate_changed_event_.Wait(VideoSendStreamTest::kDefaultTimeout)); { MutexLock lock(&mutex_); EXPECT_LE(max_bitrate_bps_, 57760u); @@ -3628,13 +3786,13 @@ class PacingFactorObserver : public test::SendTest { public: PacingFactorObserver(bool configure_send_side, absl::optional expected_pacing_factor) - : test::SendTest(VideoSendStreamTest::kDefaultTimeoutMs), + : test::SendTest(VideoSendStreamTest::kDefaultTimeout), configure_send_side_(configure_send_side), expected_pacing_factor_(expected_pacing_factor) {} void ModifyVideoConfigs( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { // Check if send-side bwe extension is already present, and remove it if // it is not desired. @@ -3666,9 +3824,9 @@ class PacingFactorObserver : public test::SendTest { encoder_config->content_type = VideoEncoderConfig::ContentType::kScreen; } - void OnVideoStreamsCreated( - VideoSendStream* send_stream, - const std::vector& receive_streams) override { + void OnVideoStreamsCreated(VideoSendStream* send_stream, + const std::vector& + receive_streams) override { auto internal_send_peer = test::VideoSendStreamPeer(send_stream); // Video streams created, check that pacing factor is correctly configured. EXPECT_EQ(expected_pacing_factor_, @@ -3724,7 +3882,7 @@ class ContentSwitchTest : public test::SendTest { static const uint32_t kMinPacketsToSend = 50; explicit ContentSwitchTest(T* stream_reset_fun, TaskQueueBase* task_queue) - : SendTest(test::CallTest::kDefaultTimeoutMs), + : SendTest(test::CallTest::kDefaultTimeout), call_(nullptr), state_(StreamState::kBeforeSwitch), send_stream_(nullptr), @@ -3735,16 +3893,16 @@ class ContentSwitchTest : public test::SendTest { RTC_DCHECK(stream_resetter_); } - void OnVideoStreamsCreated( - VideoSendStream* send_stream, - const std::vector& receive_streams) override { + void OnVideoStreamsCreated(VideoSendStream* send_stream, + const std::vector& + receive_streams) override { MutexLock lock(&mutex_); send_stream_ = send_stream; } void ModifyVideoConfigs( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { RTC_DCHECK_EQ(1, encoder_config->number_of_streams); encoder_config->min_transmit_bitrate_bps = 0; @@ -3764,7 +3922,7 @@ class ContentSwitchTest : public test::SendTest { } Action OnSendRtp(const uint8_t* packet, size_t length) override { - task_queue_->PostTask(ToQueuedTask([this]() { + task_queue_->PostTask([this]() { MutexLock lock(&mutex_); if (done_) return; @@ -3813,15 +3971,14 @@ class ContentSwitchTest : public test::SendTest { return; } observation_complete_.Set(); - })); + }); return SEND_PACKET; } void PerformTest() override { while (GetStreamState() != StreamState::kAfterSwitchBack) { - ASSERT_TRUE( - content_switch_event_.Wait(test::CallTest::kDefaultTimeoutMs)); + ASSERT_TRUE(content_switch_event_.Wait(test::CallTest::kDefaultTimeout)); (*stream_resetter_)(send_stream_config_, encoder_config_, this); } @@ -3852,7 +4009,7 @@ TEST_F(VideoSendStreamTest, SwitchesToScreenshareAndBack) { auto reset_fun = [this](const VideoSendStream::Config& send_stream_config, const VideoEncoderConfig& encoder_config, test::BaseTest* test) { - SendTask(RTC_FROM_HERE, task_queue(), + SendTask(task_queue(), [this, &send_stream_config, &encoder_config, &test]() { Stop(); DestroyVideoSendStreams(); @@ -3872,7 +4029,8 @@ TEST_F(VideoSendStreamTest, SwitchesToScreenshareAndBack) { void VideoSendStreamTest::TestTemporalLayers( VideoEncoderFactory* encoder_factory, const std::string& payload_name, - const std::vector& num_temporal_layers) { + const std::vector& num_temporal_layers, + const std::vector& scalability_mode) { static constexpr int kMaxBitrateBps = 1000000; static constexpr int kMinFramesToObservePerStream = 8; @@ -3882,11 +4040,13 @@ void VideoSendStreamTest::TestTemporalLayers( public: TemporalLayerObserver(VideoEncoderFactory* encoder_factory, const std::string& payload_name, - const std::vector& num_temporal_layers) - : EndToEndTest(kDefaultTimeoutMs), + const std::vector& num_temporal_layers, + const std::vector& scalability_mode) + : EndToEndTest(kDefaultTimeout), encoder_factory_(encoder_factory), payload_name_(payload_name), num_temporal_layers_(num_temporal_layers), + scalability_mode_(scalability_mode), depacketizer_(CreateVideoRtpDepacketizer( PayloadStringToCodecType(payload_name))) {} @@ -3905,13 +4065,18 @@ void VideoSendStreamTest::TestTemporalLayers( } size_t GetNumVideoStreams() const override { - return num_temporal_layers_.size(); + if (scalability_mode_.empty()) { + return num_temporal_layers_.size(); + } else { + return scalability_mode_.size(); + } } void ModifyVideoConfigs( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { + webrtc::VideoEncoder::EncoderInfo encoder_info; send_config->encoder_settings.encoder_factory = encoder_factory_; send_config->rtp.payload_name = payload_name_; send_config->rtp.payload_type = test::CallTest::kVideoSendPayloadType; @@ -3920,14 +4085,28 @@ void VideoSendStreamTest::TestTemporalLayers( encoder_config->video_stream_factory = rtc::make_ref_counted( payload_name_, /*max_qp=*/56, /*is_screenshare=*/false, - /*conference_mode=*/false); + /*conference_mode=*/false, encoder_info); encoder_config->max_bitrate_bps = kMaxBitrateBps; + if (absl::EqualsIgnoreCase(payload_name_, "VP9")) { + encoder_config->encoder_specific_settings = rtc::make_ref_counted< + VideoEncoderConfig::Vp9EncoderSpecificSettings>( + VideoEncoder::GetDefaultVp9Settings()); + } + if (scalability_mode_.empty()) { + for (size_t i = 0; i < num_temporal_layers_.size(); ++i) { + VideoStream& stream = encoder_config->simulcast_layers[i]; + stream.num_temporal_layers = num_temporal_layers_[i]; + configured_num_temporal_layers_[send_config->rtp.ssrcs[i]] = + num_temporal_layers_[i]; + } + } else { + for (size_t i = 0; i < scalability_mode_.size(); ++i) { + VideoStream& stream = encoder_config->simulcast_layers[i]; + stream.scalability_mode = scalability_mode_[i]; - for (size_t i = 0; i < num_temporal_layers_.size(); ++i) { - VideoStream& stream = encoder_config->simulcast_layers[i]; - stream.num_temporal_layers = num_temporal_layers_[i]; - configured_num_temporal_layers_[send_config->rtp.ssrcs[i]] = - num_temporal_layers_[i]; + configured_num_temporal_layers_[send_config->rtp.ssrcs[i]] = + ScalabilityModeToNumTemporalLayers(scalability_mode_[i]); + } } } @@ -3956,6 +4135,9 @@ void VideoSendStreamTest::TestTemporalLayers( if (const auto* vp8_header = absl::get_if( &parsed_payload->video_header.video_type_header)) { parsed.temporal_idx = vp8_header->temporalIdx; + } else if (const auto* vp9_header = absl::get_if( + &parsed_payload->video_header.video_type_header)) { + parsed.temporal_idx = vp9_header->temporal_idx; } else { RTC_DCHECK_NOTREACHED(); } @@ -4014,13 +4196,14 @@ void VideoSendStreamTest::TestTemporalLayers( VideoEncoderFactory* const encoder_factory_; const std::string payload_name_; const std::vector num_temporal_layers_; + const std::vector scalability_mode_; const std::unique_ptr depacketizer_; // Mapped by SSRC. std::map configured_num_temporal_layers_; std::map max_observed_tl_idxs_; std::map num_observed_frames_; std::map last_observed_packet_; - } test(encoder_factory, payload_name, num_temporal_layers); + } test(encoder_factory, payload_name, num_temporal_layers, scalability_mode); RunBaseTest(&test); } @@ -4034,7 +4217,8 @@ TEST_F(VideoSendStreamTest, TestTemporalLayersVp8) { }); TestTemporalLayers(&encoder_factory, "VP8", - /*num_temporal_layers=*/{2}); + /*num_temporal_layers=*/{2}, + /*scalability_mode=*/{}); } TEST_F(VideoSendStreamTest, TestTemporalLayersVp8Simulcast) { @@ -4046,7 +4230,8 @@ TEST_F(VideoSendStreamTest, TestTemporalLayersVp8Simulcast) { }); TestTemporalLayers(&encoder_factory, "VP8", - /*num_temporal_layers=*/{2, 2}); + /*num_temporal_layers=*/{2, 2}, + /*scalability_mode=*/{}); } TEST_F(VideoSendStreamTest, TestTemporalLayersVp8SimulcastWithDifferentNumTls) { @@ -4058,7 +4243,8 @@ TEST_F(VideoSendStreamTest, TestTemporalLayersVp8SimulcastWithDifferentNumTls) { }); TestTemporalLayers(&encoder_factory, "VP8", - /*num_temporal_layers=*/{3, 1}); + /*num_temporal_layers=*/{3, 1}, + /*scalability_mode=*/{}); } TEST_F(VideoSendStreamTest, TestTemporalLayersVp8SimulcastWithoutSimAdapter) { @@ -4066,7 +4252,64 @@ TEST_F(VideoSendStreamTest, TestTemporalLayersVp8SimulcastWithoutSimAdapter) { []() { return VP8Encoder::Create(); }); TestTemporalLayers(&encoder_factory, "VP8", - /*num_temporal_layers=*/{2, 2}); + /*num_temporal_layers=*/{2, 2}, + /*scalability_mode=*/{}); +} + +TEST_F(VideoSendStreamTest, TestScalabilityModeVp8L1T2) { + InternalEncoderFactory internal_encoder_factory; + test::FunctionVideoEncoderFactory encoder_factory( + [&internal_encoder_factory]() { + return std::make_unique( + &internal_encoder_factory, SdpVideoFormat("VP8")); + }); + + TestTemporalLayers(&encoder_factory, "VP8", + /*num_temporal_layers=*/{}, {ScalabilityMode::kL1T2}); +} + +TEST_F(VideoSendStreamTest, TestScalabilityModeVp8Simulcast) { + InternalEncoderFactory internal_encoder_factory; + test::FunctionVideoEncoderFactory encoder_factory( + [&internal_encoder_factory]() { + return std::make_unique( + &internal_encoder_factory, SdpVideoFormat("VP8")); + }); + + TestTemporalLayers(&encoder_factory, "VP8", + /*num_temporal_layers=*/{}, + {ScalabilityMode::kL1T2, ScalabilityMode::kL1T2}); +} + +TEST_F(VideoSendStreamTest, TestScalabilityModeVp8SimulcastWithDifferentMode) { + InternalEncoderFactory internal_encoder_factory; + test::FunctionVideoEncoderFactory encoder_factory( + [&internal_encoder_factory]() { + return std::make_unique( + &internal_encoder_factory, SdpVideoFormat("VP8")); + }); + + TestTemporalLayers(&encoder_factory, "VP8", + /*num_temporal_layers=*/{}, + {ScalabilityMode::kL1T3, ScalabilityMode::kL1T1}); +} + +TEST_F(VideoSendStreamTest, TestScalabilityModeVp8SimulcastWithoutSimAdapter) { + test::FunctionVideoEncoderFactory encoder_factory( + []() { return VP8Encoder::Create(); }); + + TestTemporalLayers(&encoder_factory, "VP8", + /*num_temporal_layers=*/{}, + {ScalabilityMode::kL1T2, ScalabilityMode::kL1T2}); +} + +TEST_F(VideoSendStreamTest, TestTemporalLayersVp9) { + test::FunctionVideoEncoderFactory encoder_factory( + []() { return VP9Encoder::Create(); }); + + TestTemporalLayers(&encoder_factory, "VP9", + /*num_temporal_layers=*/{2}, + /*scalability_mode=*/{}); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/video_source_sink_controller.cc b/TMessagesProj/jni/voip/webrtc/video/video_source_sink_controller.cc index 810a4ff1f5..2f7b37585d 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_source_sink_controller.cc +++ b/TMessagesProj/jni/voip/webrtc/video/video_source_sink_controller.cc @@ -20,29 +20,6 @@ namespace webrtc { -namespace { - -std::string WantsToString(const rtc::VideoSinkWants& wants) { - rtc::StringBuilder ss; - - ss << "max_fps=" << wants.max_framerate_fps - << " max_pixel_count=" << wants.max_pixel_count << " target_pixel_count=" - << (wants.target_pixel_count.has_value() - ? std::to_string(wants.target_pixel_count.value()) - : "null") - << " resolutions={"; - for (size_t i = 0; i < wants.resolutions.size(); ++i) { - if (i != 0) - ss << ","; - ss << wants.resolutions[i].width << "x" << wants.resolutions[i].height; - } - ss << "}"; - - return ss.Release(); -} - -} // namespace - VideoSourceSinkController::VideoSourceSinkController( rtc::VideoSinkInterface* sink, rtc::VideoSourceInterface* source) @@ -75,13 +52,17 @@ bool VideoSourceSinkController::HasSource() const { return source_ != nullptr; } +void VideoSourceSinkController::RequestRefreshFrame() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + if (source_) + source_->RequestRefreshFrame(); +} + void VideoSourceSinkController::PushSourceSinkSettings() { RTC_DCHECK_RUN_ON(&sequence_checker_); if (!source_) return; rtc::VideoSinkWants wants = CurrentSettingsToSinkWants(); - RTC_LOG(LS_INFO) << "Pushing SourceSink restrictions: " - << WantsToString(wants); source_->AddOrUpdateSink(sink_, wants); } @@ -118,6 +99,17 @@ VideoSourceSinkController::resolutions() const { return resolutions_; } +bool VideoSourceSinkController::active() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + return active_; +} + +absl::optional +VideoSourceSinkController::requested_resolution() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + return requested_resolution_; +} + void VideoSourceSinkController::SetRestrictions( VideoSourceRestrictions restrictions) { RTC_DCHECK_RUN_ON(&sequence_checker_); @@ -153,6 +145,17 @@ void VideoSourceSinkController::SetResolutions( resolutions_ = std::move(resolutions); } +void VideoSourceSinkController::SetActive(bool active) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + active_ = active; +} + +void VideoSourceSinkController::SetRequestedResolution( + absl::optional requested_resolution) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + requested_resolution_ = std::move(requested_resolution); +} + // RTC_EXCLUSIVE_LOCKS_REQUIRED(sequence_checker_) rtc::VideoSinkWants VideoSourceSinkController::CurrentSettingsToSinkWants() const { @@ -182,6 +185,8 @@ rtc::VideoSinkWants VideoSourceSinkController::CurrentSettingsToSinkWants() ? static_cast(frame_rate_upper_limit_.value()) : std::numeric_limits::max()); wants.resolutions = resolutions_; + wants.is_active = active_; + wants.requested_resolution = requested_resolution_; return wants; } diff --git a/TMessagesProj/jni/voip/webrtc/video/video_source_sink_controller.h b/TMessagesProj/jni/voip/webrtc/video/video_source_sink_controller.h index d2e3267a89..1bb6ef61bf 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_source_sink_controller.h +++ b/TMessagesProj/jni/voip/webrtc/video/video_source_sink_controller.h @@ -38,6 +38,9 @@ class VideoSourceSinkController { void SetSource(rtc::VideoSourceInterface* source); bool HasSource() const; + // Requests a refresh frame from the current source, if set. + void RequestRefreshFrame(); + // Must be called in order for changes to settings to have an effect. This // allows you to modify multiple properties in a single push to the sink. void PushSourceSinkSettings(); @@ -48,6 +51,8 @@ class VideoSourceSinkController { bool rotation_applied() const; int resolution_alignment() const; const std::vector& resolutions() const; + bool active() const; + absl::optional requested_resolution() const; // Updates the settings stored internally. In order for these settings to be // applied to the sink, PushSourceSinkSettings() must subsequently be called. @@ -58,6 +63,9 @@ class VideoSourceSinkController { void SetRotationApplied(bool rotation_applied); void SetResolutionAlignment(int resolution_alignment); void SetResolutions(std::vector resolutions); + void SetActive(bool active); + void SetRequestedResolution( + absl::optional requested_resolution); private: rtc::VideoSinkWants CurrentSettingsToSinkWants() const @@ -84,6 +92,9 @@ class VideoSourceSinkController { int resolution_alignment_ RTC_GUARDED_BY(&sequence_checker_) = 1; std::vector resolutions_ RTC_GUARDED_BY(&sequence_checker_); + bool active_ RTC_GUARDED_BY(&sequence_checker_) = true; + absl::optional requested_resolution_ + RTC_GUARDED_BY(&sequence_checker_); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/video_stream_buffer_controller.cc b/TMessagesProj/jni/voip/webrtc/video/video_stream_buffer_controller.cc new file mode 100644 index 0000000000..f7d3acdaf6 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/video/video_stream_buffer_controller.cc @@ -0,0 +1,403 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/video_stream_buffer_controller.h" + +#include +#include +#include + +#include "absl/base/attributes.h" +#include "absl/functional/bind_front.h" +#include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" +#include "api/units/data_size.h" +#include "api/video/encoded_frame.h" +#include "api/video/frame_buffer.h" +#include "api/video/video_content_type.h" +#include "modules/video_coding/frame_helpers.h" +#include "modules/video_coding/timing/inter_frame_delay.h" +#include "modules/video_coding/timing/jitter_estimator.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "rtc_base/thread_annotations.h" +#include "video/frame_decode_scheduler.h" +#include "video/frame_decode_timing.h" +#include "video/task_queue_frame_decode_scheduler.h" +#include "video/video_receive_stream_timeout_tracker.h" + +namespace webrtc { + +namespace { + +// Max number of frames the buffer will hold. +static constexpr size_t kMaxFramesBuffered = 800; +// Max number of decoded frame info that will be saved. +static constexpr int kMaxFramesHistory = 1 << 13; + +// Default value for the maximum decode queue size that is used when the +// low-latency renderer is used. +static constexpr size_t kZeroPlayoutDelayDefaultMaxDecodeQueueSize = 8; + +struct FrameMetadata { + explicit FrameMetadata(const EncodedFrame& frame) + : is_last_spatial_layer(frame.is_last_spatial_layer), + is_keyframe(frame.is_keyframe()), + size(frame.size()), + contentType(frame.contentType()), + delayed_by_retransmission(frame.delayed_by_retransmission()), + rtp_timestamp(frame.Timestamp()), + receive_time(frame.ReceivedTimestamp()) {} + + const bool is_last_spatial_layer; + const bool is_keyframe; + const size_t size; + const VideoContentType contentType; + const bool delayed_by_retransmission; + const uint32_t rtp_timestamp; + const absl::optional receive_time; +}; + +Timestamp ReceiveTime(const EncodedFrame& frame) { + absl::optional ts = frame.ReceivedTimestamp(); + RTC_DCHECK(ts.has_value()) << "Received frame must have a timestamp set!"; + return *ts; +} + +} // namespace + +VideoStreamBufferController::VideoStreamBufferController( + Clock* clock, + TaskQueueBase* worker_queue, + VCMTiming* timing, + VCMReceiveStatisticsCallback* stats_proxy, + FrameSchedulingReceiver* receiver, + TimeDelta max_wait_for_keyframe, + TimeDelta max_wait_for_frame, + std::unique_ptr frame_decode_scheduler, + const FieldTrialsView& field_trials) + : field_trials_(field_trials), + clock_(clock), + stats_proxy_(stats_proxy), + receiver_(receiver), + timing_(timing), + frame_decode_scheduler_(std::move(frame_decode_scheduler)), + jitter_estimator_(clock_, field_trials), + buffer_(std::make_unique(kMaxFramesBuffered, + kMaxFramesHistory, + field_trials)), + decode_timing_(clock_, timing_), + timeout_tracker_( + clock_, + worker_queue, + VideoReceiveStreamTimeoutTracker::Timeouts{ + .max_wait_for_keyframe = max_wait_for_keyframe, + .max_wait_for_frame = max_wait_for_frame}, + absl::bind_front(&VideoStreamBufferController::OnTimeout, this)), + zero_playout_delay_max_decode_queue_size_( + "max_decode_queue_size", + kZeroPlayoutDelayDefaultMaxDecodeQueueSize) { + RTC_DCHECK(stats_proxy_); + RTC_DCHECK(receiver_); + RTC_DCHECK(timing_); + RTC_DCHECK(clock_); + RTC_DCHECK(frame_decode_scheduler_); + + ParseFieldTrial({&zero_playout_delay_max_decode_queue_size_}, + field_trials.Lookup("WebRTC-ZeroPlayoutDelay")); +} + +void VideoStreamBufferController::Stop() { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + frame_decode_scheduler_->Stop(); + timeout_tracker_.Stop(); + decoder_ready_for_new_frame_ = false; +} + +void VideoStreamBufferController::SetProtectionMode( + VCMVideoProtection protection_mode) { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + protection_mode_ = protection_mode; +} + +void VideoStreamBufferController::Clear() { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + stats_proxy_->OnDroppedFrames(buffer_->CurrentSize()); + buffer_ = std::make_unique(kMaxFramesBuffered, kMaxFramesHistory, + field_trials_); + frame_decode_scheduler_->CancelOutstanding(); +} + +absl::optional VideoStreamBufferController::InsertFrame( + std::unique_ptr frame) { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + FrameMetadata metadata(*frame); + int complete_units = buffer_->GetTotalNumberOfContinuousTemporalUnits(); + if (buffer_->InsertFrame(std::move(frame))) { + RTC_DCHECK(metadata.receive_time) << "Frame receive time must be set!"; + if (!metadata.delayed_by_retransmission && metadata.receive_time && + (field_trials_.IsDisabled("WebRTC-IncomingTimestampOnMarkerBitOnly") || + metadata.is_last_spatial_layer)) { + timing_->IncomingTimestamp(metadata.rtp_timestamp, + *metadata.receive_time); + } + if (complete_units < buffer_->GetTotalNumberOfContinuousTemporalUnits()) { + stats_proxy_->OnCompleteFrame(metadata.is_keyframe, metadata.size, + metadata.contentType); + MaybeScheduleFrameForRelease(); + } + } + + return buffer_->LastContinuousFrameId(); +} + +void VideoStreamBufferController::UpdateRtt(int64_t max_rtt_ms) { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + jitter_estimator_.UpdateRtt(TimeDelta::Millis(max_rtt_ms)); +} + +void VideoStreamBufferController::SetMaxWaits(TimeDelta max_wait_for_keyframe, + TimeDelta max_wait_for_frame) { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + timeout_tracker_.SetTimeouts({.max_wait_for_keyframe = max_wait_for_keyframe, + .max_wait_for_frame = max_wait_for_frame}); +} + +void VideoStreamBufferController::StartNextDecode(bool keyframe_required) { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + if (!timeout_tracker_.Running()) + timeout_tracker_.Start(keyframe_required); + keyframe_required_ = keyframe_required; + if (keyframe_required_) { + timeout_tracker_.SetWaitingForKeyframe(); + } + decoder_ready_for_new_frame_ = true; + MaybeScheduleFrameForRelease(); +} + +int VideoStreamBufferController::Size() { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + return buffer_->CurrentSize(); +} + +void VideoStreamBufferController::OnFrameReady( + absl::InlinedVector, 4> frames, + Timestamp render_time) { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + RTC_CHECK(!frames.empty()) + << "Callers must ensure there is at least one frame to decode."; + + timeout_tracker_.OnEncodedFrameReleased(); + + Timestamp now = clock_->CurrentTime(); + bool superframe_delayed_by_retransmission = false; + DataSize superframe_size = DataSize::Zero(); + const EncodedFrame& first_frame = *frames.front(); + Timestamp receive_time = ReceiveTime(first_frame); + + if (first_frame.is_keyframe()) + keyframe_required_ = false; + + // Gracefully handle bad RTP timestamps and render time issues. + if (FrameHasBadRenderTiming(render_time, now) || + TargetVideoDelayIsTooLarge(timing_->TargetVideoDelay())) { + RTC_LOG(LS_WARNING) << "Resetting jitter estimator and timing module due " + "to bad render timing for rtp_timestamp=" + << first_frame.Timestamp(); + jitter_estimator_.Reset(); + timing_->Reset(); + render_time = timing_->RenderTime(first_frame.Timestamp(), now); + } + + for (std::unique_ptr& frame : frames) { + frame->SetRenderTime(render_time.ms()); + + superframe_delayed_by_retransmission |= frame->delayed_by_retransmission(); + receive_time = std::max(receive_time, ReceiveTime(*frame)); + superframe_size += DataSize::Bytes(frame->size()); + } + + if (!superframe_delayed_by_retransmission) { + auto frame_delay = inter_frame_delay_.CalculateDelay( + first_frame.Timestamp(), receive_time); + if (frame_delay) { + jitter_estimator_.UpdateEstimate(*frame_delay, superframe_size); + } + + float rtt_mult = protection_mode_ == kProtectionNackFEC ? 0.0 : 1.0; + absl::optional rtt_mult_add_cap_ms = absl::nullopt; + if (rtt_mult_settings_.has_value()) { + rtt_mult = rtt_mult_settings_->rtt_mult_setting; + rtt_mult_add_cap_ms = + TimeDelta::Millis(rtt_mult_settings_->rtt_mult_add_cap_ms); + } + timing_->SetJitterDelay( + jitter_estimator_.GetJitterEstimate(rtt_mult, rtt_mult_add_cap_ms)); + timing_->UpdateCurrentDelay(render_time, now); + } else if (RttMultExperiment::RttMultEnabled()) { + jitter_estimator_.FrameNacked(); + } + + // Update stats. + UpdateDroppedFrames(); + UpdateJitterDelay(); + UpdateTimingFrameInfo(); + + std::unique_ptr frame = + CombineAndDeleteFrames(std::move(frames)); + + timing_->SetLastDecodeScheduledTimestamp(now); + + decoder_ready_for_new_frame_ = false; + receiver_->OnEncodedFrame(std::move(frame)); +} + +void VideoStreamBufferController::OnTimeout(TimeDelta delay) { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + + // Stop sending timeouts until receiver starts waiting for a new frame. + timeout_tracker_.Stop(); + + // If the stream is paused then ignore the timeout. + if (!decoder_ready_for_new_frame_) { + return; + } + decoder_ready_for_new_frame_ = false; + receiver_->OnDecodableFrameTimeout(delay); +} + +void VideoStreamBufferController::FrameReadyForDecode(uint32_t rtp_timestamp, + Timestamp render_time) { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + // Check that the frame to decode is still valid before passing the frame for + // decoding. + auto decodable_tu_info = buffer_->DecodableTemporalUnitsInfo(); + if (!decodable_tu_info) { + RTC_LOG(LS_ERROR) + << "The frame buffer became undecodable during the wait " + "to decode frame with rtp-timestamp " + << rtp_timestamp + << ". Cancelling the decode of this frame, decoding " + "will resume when the frame buffers become decodable again."; + return; + } + RTC_DCHECK_EQ(rtp_timestamp, decodable_tu_info->next_rtp_timestamp) + << "Frame buffer's next decodable frame was not the one sent for " + "extraction."; + auto frames = buffer_->ExtractNextDecodableTemporalUnit(); + if (frames.empty()) { + RTC_LOG(LS_ERROR) + << "The frame buffer should never return an empty temporal until list " + "when there is a decodable temporal unit."; + RTC_DCHECK_NOTREACHED(); + return; + } + OnFrameReady(std::move(frames), render_time); +} + +void VideoStreamBufferController::UpdateDroppedFrames() + RTC_RUN_ON(&worker_sequence_checker_) { + const int dropped_frames = buffer_->GetTotalNumberOfDroppedFrames() - + frames_dropped_before_last_new_frame_; + if (dropped_frames > 0) + stats_proxy_->OnDroppedFrames(dropped_frames); + frames_dropped_before_last_new_frame_ = + buffer_->GetTotalNumberOfDroppedFrames(); +} + +void VideoStreamBufferController::UpdateJitterDelay() { + auto timings = timing_->GetTimings(); + if (timings.num_decoded_frames) { + stats_proxy_->OnFrameBufferTimingsUpdated( + timings.max_decode_duration.ms(), timings.current_delay.ms(), + timings.target_delay.ms(), timings.jitter_buffer_delay.ms(), + timings.min_playout_delay.ms(), timings.render_delay.ms()); + } +} + +void VideoStreamBufferController::UpdateTimingFrameInfo() { + absl::optional info = timing_->GetTimingFrameInfo(); + if (info) + stats_proxy_->OnTimingFrameInfoUpdated(*info); +} + +bool VideoStreamBufferController::IsTooManyFramesQueued() const + RTC_RUN_ON(&worker_sequence_checker_) { + return buffer_->CurrentSize() > zero_playout_delay_max_decode_queue_size_; +} + +void VideoStreamBufferController::ForceKeyFrameReleaseImmediately() + RTC_RUN_ON(&worker_sequence_checker_) { + RTC_DCHECK(keyframe_required_); + // Iterate through the frame buffer until there is a complete keyframe and + // release this right away. + while (buffer_->DecodableTemporalUnitsInfo()) { + auto next_frame = buffer_->ExtractNextDecodableTemporalUnit(); + if (next_frame.empty()) { + RTC_DCHECK_NOTREACHED() + << "Frame buffer should always return at least 1 frame."; + continue; + } + // Found keyframe - decode right away. + if (next_frame.front()->is_keyframe()) { + auto render_time = timing_->RenderTime(next_frame.front()->Timestamp(), + clock_->CurrentTime()); + OnFrameReady(std::move(next_frame), render_time); + return; + } + } +} + +void VideoStreamBufferController::MaybeScheduleFrameForRelease() + RTC_RUN_ON(&worker_sequence_checker_) { + auto decodable_tu_info = buffer_->DecodableTemporalUnitsInfo(); + if (!decoder_ready_for_new_frame_ || !decodable_tu_info) { + return; + } + + if (keyframe_required_) { + return ForceKeyFrameReleaseImmediately(); + } + + // If already scheduled then abort. + if (frame_decode_scheduler_->ScheduledRtpTimestamp() == + decodable_tu_info->next_rtp_timestamp) { + return; + } + + TimeDelta max_wait = timeout_tracker_.TimeUntilTimeout(); + // Ensures the frame is scheduled for decode before the stream times out. + // This is otherwise a race condition. + max_wait = std::max(max_wait - TimeDelta::Millis(1), TimeDelta::Zero()); + absl::optional schedule; + while (decodable_tu_info) { + schedule = decode_timing_.OnFrameBufferUpdated( + decodable_tu_info->next_rtp_timestamp, + decodable_tu_info->last_rtp_timestamp, max_wait, + IsTooManyFramesQueued()); + if (schedule) { + // Don't schedule if already waiting for the same frame. + if (frame_decode_scheduler_->ScheduledRtpTimestamp() != + decodable_tu_info->next_rtp_timestamp) { + frame_decode_scheduler_->CancelOutstanding(); + frame_decode_scheduler_->ScheduleFrame( + decodable_tu_info->next_rtp_timestamp, *schedule, + absl::bind_front(&VideoStreamBufferController::FrameReadyForDecode, + this)); + } + return; + } + // If no schedule for current rtp, drop and try again. + buffer_->DropNextDecodableTemporalUnit(); + decodable_tu_info = buffer_->DecodableTemporalUnitsInfo(); + } +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/video_stream_buffer_controller.h b/TMessagesProj/jni/voip/webrtc/video/video_stream_buffer_controller.h new file mode 100644 index 0000000000..ed79b0fa1f --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/video/video_stream_buffer_controller.h @@ -0,0 +1,119 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_VIDEO_STREAM_BUFFER_CONTROLLER_H_ +#define VIDEO_VIDEO_STREAM_BUFFER_CONTROLLER_H_ + +#include + +#include "api/field_trials_view.h" +#include "api/task_queue/task_queue_base.h" +#include "api/video/encoded_frame.h" +#include "api/video/frame_buffer.h" +#include "modules/video_coding/include/video_coding_defines.h" +#include "modules/video_coding/timing/inter_frame_delay.h" +#include "modules/video_coding/timing/jitter_estimator.h" +#include "modules/video_coding/timing/timing.h" +#include "rtc_base/experiments/rtt_mult_experiment.h" +#include "system_wrappers/include/clock.h" +#include "video/decode_synchronizer.h" +#include "video/video_receive_stream_timeout_tracker.h" + +namespace webrtc { + +class FrameSchedulingReceiver { + public: + virtual ~FrameSchedulingReceiver() = default; + + virtual void OnEncodedFrame(std::unique_ptr frame) = 0; + virtual void OnDecodableFrameTimeout(TimeDelta wait_time) = 0; +}; + +class VideoStreamBufferController { + public: + VideoStreamBufferController( + Clock* clock, + TaskQueueBase* worker_queue, + VCMTiming* timing, + VCMReceiveStatisticsCallback* stats_proxy, + FrameSchedulingReceiver* receiver, + TimeDelta max_wait_for_keyframe, + TimeDelta max_wait_for_frame, + std::unique_ptr frame_decode_scheduler, + const FieldTrialsView& field_trials); + virtual ~VideoStreamBufferController() = default; + + void Stop(); + void SetProtectionMode(VCMVideoProtection protection_mode); + void Clear(); + absl::optional InsertFrame(std::unique_ptr frame); + void UpdateRtt(int64_t max_rtt_ms); + void SetMaxWaits(TimeDelta max_wait_for_keyframe, + TimeDelta max_wait_for_frame); + void StartNextDecode(bool keyframe_required); + int Size(); + + private: + void OnFrameReady( + absl::InlinedVector, 4> frames, + Timestamp render_time); + void OnTimeout(TimeDelta delay); + void FrameReadyForDecode(uint32_t rtp_timestamp, Timestamp render_time); + void UpdateDroppedFrames() RTC_RUN_ON(&worker_sequence_checker_); + void UpdateJitterDelay(); + void UpdateTimingFrameInfo(); + bool IsTooManyFramesQueued() const RTC_RUN_ON(&worker_sequence_checker_); + void ForceKeyFrameReleaseImmediately() RTC_RUN_ON(&worker_sequence_checker_); + void MaybeScheduleFrameForRelease() RTC_RUN_ON(&worker_sequence_checker_); + + RTC_NO_UNIQUE_ADDRESS SequenceChecker worker_sequence_checker_; + const FieldTrialsView& field_trials_; + const absl::optional rtt_mult_settings_ = + RttMultExperiment::GetRttMultValue(); + Clock* const clock_; + VCMReceiveStatisticsCallback* const stats_proxy_; + FrameSchedulingReceiver* const receiver_; + VCMTiming* const timing_; + const std::unique_ptr frame_decode_scheduler_ + RTC_GUARDED_BY(&worker_sequence_checker_); + + JitterEstimator jitter_estimator_ RTC_GUARDED_BY(&worker_sequence_checker_); + InterFrameDelay inter_frame_delay_ RTC_GUARDED_BY(&worker_sequence_checker_); + bool keyframe_required_ RTC_GUARDED_BY(&worker_sequence_checker_) = false; + std::unique_ptr buffer_ + RTC_GUARDED_BY(&worker_sequence_checker_); + FrameDecodeTiming decode_timing_ RTC_GUARDED_BY(&worker_sequence_checker_); + VideoReceiveStreamTimeoutTracker timeout_tracker_ + RTC_GUARDED_BY(&worker_sequence_checker_); + int frames_dropped_before_last_new_frame_ + RTC_GUARDED_BY(&worker_sequence_checker_) = 0; + VCMVideoProtection protection_mode_ + RTC_GUARDED_BY(&worker_sequence_checker_) = kProtectionNack; + + // This flag guards frames from queuing in front of the decoder. Without this + // guard, encoded frames will not wait for the decoder to finish decoding a + // frame and just queue up, meaning frames will not be dropped or + // fast-forwarded when the decoder is slow or hangs. + bool decoder_ready_for_new_frame_ RTC_GUARDED_BY(&worker_sequence_checker_) = + false; + + // Maximum number of frames in the decode queue to allow pacing. If the + // queue grows beyond the max limit, pacing will be disabled and frames will + // be pushed to the decoder as soon as possible. This only has an effect + // when the low-latency rendering path is active, which is indicated by + // the frame's render time == 0. + FieldTrialParameter zero_playout_delay_max_decode_queue_size_; + + ScopedTaskSafety worker_safety_; +}; + +} // namespace webrtc + +#endif // VIDEO_VIDEO_STREAM_BUFFER_CONTROLLER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/video/video_stream_decoder.cc b/TMessagesProj/jni/voip/webrtc/video/video_stream_decoder.cc deleted file mode 100644 index 7fe0adef10..0000000000 --- a/TMessagesProj/jni/voip/webrtc/video/video_stream_decoder.cc +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "video/video_stream_decoder.h" - -#include "modules/video_coding/video_receiver2.h" -#include "rtc_base/checks.h" -#include "video/receive_statistics_proxy.h" - -namespace webrtc { - -VideoStreamDecoder::VideoStreamDecoder( - VideoReceiver2* video_receiver, - ReceiveStatisticsProxy* receive_statistics_proxy, - rtc::VideoSinkInterface* incoming_video_stream) - : video_receiver_(video_receiver), - receive_stats_callback_(receive_statistics_proxy), - incoming_video_stream_(incoming_video_stream) { - RTC_DCHECK(video_receiver_); - - video_receiver_->RegisterReceiveCallback(this); -} - -VideoStreamDecoder::~VideoStreamDecoder() { - // Note: There's an assumption at this point that the decoder thread is - // *not* running. If it was, then there could be a race for each of these - // callbacks. - - // Unset all the callback pointers that we set in the ctor. - video_receiver_->RegisterReceiveCallback(nullptr); -} - -// Do not acquire the lock of `video_receiver_` in this function. Decode -// callback won't necessarily be called from the decoding thread. The decoding -// thread may have held the lock when calling VideoDecoder::Decode, Reset, or -// Release. Acquiring the same lock in the path of decode callback can deadlock. -int32_t VideoStreamDecoder::FrameToRender(VideoFrame& video_frame, - absl::optional qp, - int32_t decode_time_ms, - VideoContentType content_type) { - receive_stats_callback_->OnDecodedFrame(video_frame, qp, decode_time_ms, - content_type); - incoming_video_stream_->OnFrame(video_frame); - return 0; -} - -void VideoStreamDecoder::OnDroppedFrames(uint32_t frames_dropped) { - receive_stats_callback_->OnDroppedFrames(frames_dropped); -} - -void VideoStreamDecoder::OnIncomingPayloadType(int payload_type) { - receive_stats_callback_->OnIncomingPayloadType(payload_type); -} - -void VideoStreamDecoder::OnDecoderImplementationName( - const char* implementation_name) { - receive_stats_callback_->OnDecoderImplementationName(implementation_name); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/video_stream_decoder.h b/TMessagesProj/jni/voip/webrtc/video/video_stream_decoder.h deleted file mode 100644 index bfe9252976..0000000000 --- a/TMessagesProj/jni/voip/webrtc/video/video_stream_decoder.h +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef VIDEO_VIDEO_STREAM_DECODER_H_ -#define VIDEO_VIDEO_STREAM_DECODER_H_ - -#include -#include -#include -#include - -#include "api/scoped_refptr.h" -#include "api/video/video_sink_interface.h" -#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h" -#include "modules/video_coding/include/video_coding_defines.h" -#include "rtc_base/platform_thread.h" -#include "rtc_base/synchronization/mutex.h" - -namespace webrtc { - -class ReceiveStatisticsProxy; -class VideoReceiver2; - -class VideoStreamDecoder : public VCMReceiveCallback { - public: - VideoStreamDecoder( - VideoReceiver2* video_receiver, - ReceiveStatisticsProxy* receive_statistics_proxy, - rtc::VideoSinkInterface* incoming_video_stream); - ~VideoStreamDecoder() override; - - // Implements VCMReceiveCallback. - int32_t FrameToRender(VideoFrame& video_frame, - absl::optional qp, - int32_t decode_time_ms, - VideoContentType content_type) override; - void OnDroppedFrames(uint32_t frames_dropped) override; - void OnIncomingPayloadType(int payload_type) override; - void OnDecoderImplementationName(const char* implementation_name) override; - - void RegisterReceiveStatisticsProxy( - ReceiveStatisticsProxy* receive_statistics_proxy); - - private: - // Used for all registered callbacks except rendering. - Mutex mutex_; - - VideoReceiver2* const video_receiver_; - - ReceiveStatisticsProxy* const receive_stats_callback_; - rtc::VideoSinkInterface* const incoming_video_stream_; -}; - -} // namespace webrtc - -#endif // VIDEO_VIDEO_STREAM_DECODER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/video/video_stream_decoder2.cc b/TMessagesProj/jni/voip/webrtc/video/video_stream_decoder2.cc index 6ef60024a9..1ef2d0ecd0 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_stream_decoder2.cc +++ b/TMessagesProj/jni/voip/webrtc/video/video_stream_decoder2.cc @@ -10,6 +10,7 @@ #include "video/video_stream_decoder2.h" +#include "api/video_codecs/video_decoder.h" #include "modules/video_coding/video_receiver2.h" #include "rtc_base/checks.h" #include "video/receive_statistics_proxy2.h" @@ -44,9 +45,9 @@ VideoStreamDecoder::~VideoStreamDecoder() { // Release. Acquiring the same lock in the path of decode callback can deadlock. int32_t VideoStreamDecoder::FrameToRender(VideoFrame& video_frame, absl::optional qp, - int32_t decode_time_ms, + TimeDelta decode_time, VideoContentType content_type) { - receive_stats_callback_->OnDecodedFrame(video_frame, qp, decode_time_ms, + receive_stats_callback_->OnDecodedFrame(video_frame, qp, decode_time, content_type); incoming_video_stream_->OnFrame(video_frame); return 0; @@ -60,9 +61,9 @@ void VideoStreamDecoder::OnIncomingPayloadType(int payload_type) { receive_stats_callback_->OnIncomingPayloadType(payload_type); } -void VideoStreamDecoder::OnDecoderImplementationName( - const char* implementation_name) { - receive_stats_callback_->OnDecoderImplementationName(implementation_name); +void VideoStreamDecoder::OnDecoderInfoChanged( + const VideoDecoder::DecoderInfo& decoder_info) { + receive_stats_callback_->OnDecoderInfo(decoder_info); } } // namespace internal diff --git a/TMessagesProj/jni/voip/webrtc/video/video_stream_decoder2.h b/TMessagesProj/jni/voip/webrtc/video/video_stream_decoder2.h index a301d32107..473d463186 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_stream_decoder2.h +++ b/TMessagesProj/jni/voip/webrtc/video/video_stream_decoder2.h @@ -18,6 +18,7 @@ #include "api/scoped_refptr.h" #include "api/video/video_sink_interface.h" +#include "api/video_codecs/video_decoder.h" #include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h" #include "modules/video_coding/include/video_coding_defines.h" #include "rtc_base/platform_thread.h" @@ -41,11 +42,12 @@ class VideoStreamDecoder : public VCMReceiveCallback { // Implements VCMReceiveCallback. int32_t FrameToRender(VideoFrame& video_frame, absl::optional qp, - int32_t decode_time_ms, + TimeDelta decode_time, VideoContentType content_type) override; void OnDroppedFrames(uint32_t frames_dropped) override; void OnIncomingPayloadType(int payload_type) override; - void OnDecoderImplementationName(const char* implementation_name) override; + void OnDecoderInfoChanged( + const VideoDecoder::DecoderInfo& decoder_info) override; private: VideoReceiver2* const video_receiver_; diff --git a/TMessagesProj/jni/voip/webrtc/video/video_stream_decoder_impl.cc b/TMessagesProj/jni/voip/webrtc/video/video_stream_decoder_impl.cc index bbd67ee257..516aceb680 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_stream_decoder_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/video/video_stream_decoder_impl.cc @@ -12,7 +12,6 @@ #include -#include "api/task_queue/queued_task.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/mod_ops.h" #include "rtc_base/time_utils.h" @@ -23,8 +22,10 @@ VideoStreamDecoderImpl::VideoStreamDecoderImpl( VideoStreamDecoderInterface::Callbacks* callbacks, VideoDecoderFactory* decoder_factory, TaskQueueFactory* task_queue_factory, - std::map> decoder_settings) - : timing_(Clock::GetRealTimeClock()), + std::map> decoder_settings, + const FieldTrialsView* field_trials) + : field_trials_(field_trials), + timing_(Clock::GetRealTimeClock(), *field_trials_), decode_callbacks_(this), next_frame_info_index_(0), callbacks_(callbacks), @@ -32,7 +33,7 @@ VideoStreamDecoderImpl::VideoStreamDecoderImpl( decoder_factory_(decoder_factory), decoder_settings_(std::move(decoder_settings)), shut_down_(false), - frame_buffer_(Clock::GetRealTimeClock(), &timing_, nullptr), + frame_buffer_(Clock::GetRealTimeClock(), &timing_, *field_trials_), bookkeeping_queue_(task_queue_factory->CreateTaskQueue( "video_stream_decoder_bookkeeping_queue", TaskQueueFactory::Priority::NORMAL)), @@ -70,11 +71,11 @@ void VideoStreamDecoderImpl::OnFrame(std::unique_ptr frame) { } void VideoStreamDecoderImpl::SetMinPlayoutDelay(TimeDelta min_delay) { - timing_.set_min_playout_delay(min_delay.ms()); + timing_.set_min_playout_delay(min_delay); } void VideoStreamDecoderImpl::SetMaxPlayoutDelay(TimeDelta max_delay) { - timing_.set_max_playout_delay(max_delay.ms()); + timing_.set_max_playout_delay(max_delay); } VideoDecoder* VideoStreamDecoderImpl::GetDecoder(int payload_type) { @@ -136,7 +137,7 @@ void VideoStreamDecoderImpl::StartNextDecode() { int64_t max_wait_time = keyframe_required_ ? 200 : 3000; frame_buffer_.NextFrame(max_wait_time, keyframe_required_, - &bookkeeping_queue_, + bookkeeping_queue_.Get(), [this](std::unique_ptr frame) { RTC_DCHECK_RUN_ON(&bookkeeping_queue_); OnNextFrameCallback(std::move(frame)); @@ -258,7 +259,8 @@ void VideoStreamDecoderImpl::OnDecodedFrameCallback( Timestamp::Millis(frame_info->decode_start_time_ms + *decode_time_ms)}); decoded_image.set_timestamp_us(frame_info->render_time_us); - timing_.StopDecodeTimer(*decode_time_ms, decode_stop_time_ms); + timing_.StopDecodeTimer(TimeDelta::Millis(*decode_time_ms), + Timestamp::Millis(decode_stop_time_ms)); callbacks_->OnDecodedFrame(decoded_image, callback_info); }); diff --git a/TMessagesProj/jni/voip/webrtc/video/video_stream_decoder_impl.h b/TMessagesProj/jni/voip/webrtc/video/video_stream_decoder_impl.h index 9d028a2d6e..fcd7158391 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_stream_decoder_impl.h +++ b/TMessagesProj/jni/voip/webrtc/video/video_stream_decoder_impl.h @@ -16,10 +16,13 @@ #include #include "absl/types/optional.h" +#include "api/field_trials_view.h" #include "api/sequence_checker.h" +#include "api/transport/field_trial_based_config.h" #include "api/video/video_stream_decoder.h" #include "modules/video_coding/frame_buffer2.h" -#include "modules/video_coding/timing.h" +#include "modules/video_coding/timing/timing.h" +#include "rtc_base/memory/always_valid_pointer.h" #include "rtc_base/platform_thread.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue.h" @@ -33,7 +36,8 @@ class VideoStreamDecoderImpl : public VideoStreamDecoderInterface { VideoStreamDecoderInterface::Callbacks* callbacks, VideoDecoderFactory* decoder_factory, TaskQueueFactory* task_queue_factory, - std::map> decoder_settings); + std::map> decoder_settings, + const FieldTrialsView* field_trials); ~VideoStreamDecoderImpl() override; @@ -82,6 +86,8 @@ class VideoStreamDecoderImpl : public VideoStreamDecoderInterface { VideoStreamDecoderImpl::DecodeResult DecodeFrame( std::unique_ptr frame) RTC_RUN_ON(decode_queue_); + AlwaysValidPointer + field_trials_; VCMTiming timing_; DecodeCallbacks decode_callbacks_; diff --git a/TMessagesProj/jni/voip/webrtc/video/video_stream_encoder.cc b/TMessagesProj/jni/voip/webrtc/video/video_stream_encoder.cc index 4f1d8c2603..068b60eb4b 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_stream_encoder.cc +++ b/TMessagesProj/jni/voip/webrtc/video/video_stream_encoder.cc @@ -18,39 +18,42 @@ #include #include "absl/algorithm/container.h" +#include "absl/cleanup/cleanup.h" #include "absl/types/optional.h" +#include "api/field_trials_view.h" #include "api/sequence_checker.h" -#include "api/task_queue/queued_task.h" #include "api/task_queue/task_queue_base.h" #include "api/video/encoded_image.h" #include "api/video/i420_buffer.h" +#include "api/video/render_resolution.h" #include "api/video/video_adaptation_reason.h" #include "api/video/video_bitrate_allocator_factory.h" #include "api/video/video_codec_constants.h" #include "api/video/video_layers_allocation.h" +#include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_encoder.h" #include "call/adaptation/resource_adaptation_processor.h" +#include "call/adaptation/video_source_restrictions.h" #include "call/adaptation/video_stream_adapter.h" #include "modules/video_coding/include/video_codec_initializer.h" #include "modules/video_coding/svc/svc_rate_allocator.h" +#include "modules/video_coding/utility/vp8_constants.h" #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/event.h" #include "rtc_base/experiments/alr_experiment.h" #include "rtc_base/experiments/encoder_info_settings.h" #include "rtc_base/experiments/rate_control_settings.h" -#include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/system/no_unique_address.h" -#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/thread_annotations.h" #include "rtc_base/trace_event.h" -#include "system_wrappers/include/field_trial.h" #include "system_wrappers/include/metrics.h" #include "video/adaptation/video_stream_encoder_resource_manager.h" #include "video/alignment_adjuster.h" +#include "video/config/encoder_stream_factory.h" +#include "video/frame_cadence_adapter.h" namespace webrtc { @@ -64,6 +67,11 @@ const int64_t kPendingFrameTimeoutMs = 1000; constexpr char kFrameDropperFieldTrial[] = "WebRTC-FrameDropper"; +// TODO(bugs.webrtc.org/13572): Remove this kill switch after deploying the +// feature. +constexpr char kSwitchEncoderOnInitializationFailuresFieldTrial[] = + "WebRTC-SwitchEncoderOnInitializationFailures"; + const size_t kDefaultPayloadSize = 1440; const int64_t kParameterUpdateIntervalMs = 1000; @@ -83,7 +91,9 @@ bool RequiresEncoderReset(const VideoCodec& prev_send_codec, new_send_codec.qpMax != prev_send_codec.qpMax || new_send_codec.numberOfSimulcastStreams != prev_send_codec.numberOfSimulcastStreams || - new_send_codec.mode != prev_send_codec.mode) { + new_send_codec.mode != prev_send_codec.mode || + new_send_codec.GetFrameDropEnabled() != + prev_send_codec.GetFrameDropEnabled()) { return true; } @@ -159,7 +169,8 @@ bool RequiresEncoderReset(const VideoCodec& prev_send_codec, } } - if (new_send_codec.ScalabilityMode() != prev_send_codec.ScalabilityMode()) { + if (new_send_codec.GetScalabilityMode() != + prev_send_codec.GetScalabilityMode()) { return true; } @@ -378,7 +389,8 @@ void ApplyVp9BitrateLimits(const VideoEncoder::EncoderInfo& encoder_info, VideoCodec* codec) { if (codec->codecType != VideoCodecType::kVideoCodecVP9 || encoder_config.simulcast_layers.size() <= 1 || - VideoStreamEncoderResourceManager::IsSimulcast(encoder_config)) { + VideoStreamEncoderResourceManager::IsSimulcastOrMultipleSpatialLayers( + encoder_config)) { // Resolution bitrate limits usage is restricted to singlecast. return; } @@ -503,6 +515,34 @@ void ApplyEncoderBitrateLimitsIfSingleActiveStream( encoder_bitrate_limits->max_bitrate_bps); } +absl::optional ParseVp9LowTierCoreCountThreshold( + const FieldTrialsView& trials) { + FieldTrialFlag disable_low_tier("Disabled"); + FieldTrialParameter max_core_count("max_core_count", 2); + ParseFieldTrial({&disable_low_tier, &max_core_count}, + trials.Lookup("WebRTC-VP9-LowTierOptimizations")); + if (disable_low_tier.Get()) { + return absl::nullopt; + } + return max_core_count.Get(); +} + +absl::optional MergeRestrictions( + const std::vector>& list) { + absl::optional return_value; + for (const auto& res : list) { + if (!res) { + continue; + } + if (!return_value) { + return_value = *res; + continue; + } + return_value->UpdateMin(*res); + } + return return_value; +} + } // namespace VideoStreamEncoder::EncoderRateSettings::EncoderRateSettings() @@ -598,14 +638,24 @@ VideoStreamEncoder::VideoStreamEncoder( std::unique_ptr frame_cadence_adapter, std::unique_ptr encoder_queue, - BitrateAllocationCallbackType allocation_cb_type) - : worker_queue_(TaskQueueBase::Current()), + BitrateAllocationCallbackType allocation_cb_type, + const FieldTrialsView& field_trials, + webrtc::VideoEncoderFactory::EncoderSelectorInterface* encoder_selector) + : field_trials_(field_trials), + worker_queue_(TaskQueueBase::Current()), number_of_cores_(number_of_cores), sink_(nullptr), settings_(settings), allocation_cb_type_(allocation_cb_type), rate_control_settings_(RateControlSettings::ParseFromFieldTrials()), - encoder_selector_(settings.encoder_factory->GetEncoderSelector()), + encoder_selector_from_constructor_(encoder_selector), + encoder_selector_from_factory_( + encoder_selector_from_constructor_ + ? nullptr + : settings.encoder_factory->GetEncoderSelector()), + encoder_selector_(encoder_selector_from_constructor_ + ? encoder_selector_from_constructor_ + : encoder_selector_from_factory_.get()), encoder_stats_observer_(encoder_stats_observer), cadence_callback_(*this), frame_cadence_adapter_(std::move(frame_cadence_adapter)), @@ -646,10 +696,8 @@ VideoStreamEncoder::VideoStreamEncoder( input_state_provider_(encoder_stats_observer), video_stream_adapter_( std::make_unique(&input_state_provider_, - encoder_stats_observer)), - resource_adaptation_processor_( - std::make_unique( - video_stream_adapter_.get())), + encoder_stats_observer, + field_trials)), degradation_preference_manager_( std::make_unique( video_stream_adapter_.get())), @@ -659,13 +707,18 @@ VideoStreamEncoder::VideoStreamEncoder( clock_, settings_.experiment_cpu_load_estimator, std::move(overuse_detector), - degradation_preference_manager_.get()), + degradation_preference_manager_.get(), + field_trials), video_source_sink_controller_(/*sink=*/frame_cadence_adapter_.get(), /*source=*/nullptr), default_limits_allowed_( - !field_trial::IsEnabled("WebRTC-DefaultBitrateLimitsKillSwitch")), + !field_trials.IsEnabled("WebRTC-DefaultBitrateLimitsKillSwitch")), qp_parsing_allowed_( - !field_trial::IsEnabled("WebRTC-QpParsingKillSwitch")), + !field_trials.IsEnabled("WebRTC-QpParsingKillSwitch")), + switch_encoder_on_init_failures_(!field_trials.IsDisabled( + kSwitchEncoderOnInitializationFailuresFieldTrial)), + vp9_low_tier_core_threshold_( + ParseVp9LowTierCoreCountThreshold(field_trials)), encoder_queue_(std::move(encoder_queue)) { TRACE_EVENT0("webrtc", "VideoStreamEncoder::VideoStreamEncoder"); RTC_DCHECK_RUN_ON(worker_queue_); @@ -673,12 +726,15 @@ VideoStreamEncoder::VideoStreamEncoder( RTC_DCHECK_GE(number_of_cores, 1); frame_cadence_adapter_->Initialize(&cadence_callback_); - stream_resource_manager_.Initialize(&encoder_queue_); + stream_resource_manager_.Initialize(encoder_queue_.Get()); - rtc::Event initialize_processor_event; - encoder_queue_.PostTask([this, &initialize_processor_event] { + encoder_queue_.PostTask([this] { RTC_DCHECK_RUN_ON(&encoder_queue_); - resource_adaptation_processor_->SetTaskQueue(encoder_queue_.Get()); + + resource_adaptation_processor_ = + std::make_unique( + video_stream_adapter_.get()); + stream_resource_manager_.SetAdaptationProcessor( resource_adaptation_processor_.get(), video_stream_adapter_.get()); resource_adaptation_processor_->AddResourceLimitationsListener( @@ -692,9 +748,7 @@ VideoStreamEncoder::VideoStreamEncoder( for (auto* constraint : adaptation_constraints_) { video_stream_adapter_->AddAdaptationConstraint(constraint); } - initialize_processor_event.Set(); }); - initialize_processor_event.Wait(rtc::Event::kForever); } VideoStreamEncoder::~VideoStreamEncoder() { @@ -708,32 +762,32 @@ void VideoStreamEncoder::Stop() { video_source_sink_controller_.SetSource(nullptr); rtc::Event shutdown_event; - - encoder_queue_.PostTask([this, &shutdown_event] { - RTC_DCHECK_RUN_ON(&encoder_queue_); - if (resource_adaptation_processor_) { - stream_resource_manager_.StopManagedResources(); - for (auto* constraint : adaptation_constraints_) { - video_stream_adapter_->RemoveAdaptationConstraint(constraint); - } - for (auto& resource : additional_resources_) { - stream_resource_manager_.RemoveResource(resource); - } - additional_resources_.clear(); - video_stream_adapter_->RemoveRestrictionsListener(this); - video_stream_adapter_->RemoveRestrictionsListener( - &stream_resource_manager_); - resource_adaptation_processor_->RemoveResourceLimitationsListener( - &stream_resource_manager_); - stream_resource_manager_.SetAdaptationProcessor(nullptr, nullptr); - resource_adaptation_processor_.reset(); - } - rate_allocator_ = nullptr; - ReleaseEncoder(); - encoder_ = nullptr; - frame_cadence_adapter_ = nullptr; - shutdown_event.Set(); - }); + absl::Cleanup shutdown = [&shutdown_event] { shutdown_event.Set(); }; + encoder_queue_.PostTask( + [this, shutdown = std::move(shutdown)] { + RTC_DCHECK_RUN_ON(&encoder_queue_); + if (resource_adaptation_processor_) { + stream_resource_manager_.StopManagedResources(); + for (auto* constraint : adaptation_constraints_) { + video_stream_adapter_->RemoveAdaptationConstraint(constraint); + } + for (auto& resource : additional_resources_) { + stream_resource_manager_.RemoveResource(resource); + } + additional_resources_.clear(); + video_stream_adapter_->RemoveRestrictionsListener(this); + video_stream_adapter_->RemoveRestrictionsListener( + &stream_resource_manager_); + resource_adaptation_processor_->RemoveResourceLimitationsListener( + &stream_resource_manager_); + stream_resource_manager_.SetAdaptationProcessor(nullptr, nullptr); + resource_adaptation_processor_.reset(); + } + rate_allocator_ = nullptr; + ReleaseEncoder(); + encoder_ = nullptr; + frame_cadence_adapter_ = nullptr; + }); shutdown_event.Wait(rtc::Event::kForever); } @@ -758,22 +812,31 @@ void VideoStreamEncoder::AddAdaptationResource( // of this MapResourceToReason() call. TRACE_EVENT_ASYNC_BEGIN0( "webrtc", "VideoStreamEncoder::AddAdaptationResource(latency)", this); - rtc::Event map_resource_event; - encoder_queue_.PostTask([this, resource, &map_resource_event] { + encoder_queue_.PostTask([this, resource = std::move(resource)] { TRACE_EVENT_ASYNC_END0( "webrtc", "VideoStreamEncoder::AddAdaptationResource(latency)", this); RTC_DCHECK_RUN_ON(&encoder_queue_); additional_resources_.push_back(resource); stream_resource_manager_.AddResource(resource, VideoAdaptationReason::kCpu); - map_resource_event.Set(); }); - map_resource_event.Wait(rtc::Event::kForever); } std::vector> VideoStreamEncoder::GetAdaptationResources() { RTC_DCHECK_RUN_ON(worker_queue_); - return resource_adaptation_processor_->GetResources(); + // In practice, this method is only called by tests to verify operations that + // run on the encoder queue. So rather than force PostTask() operations to + // be accompanied by an event and a `Wait()`, we'll use PostTask + Wait() + // here. + rtc::Event event; + std::vector> resources; + encoder_queue_.PostTask([&] { + RTC_DCHECK_RUN_ON(&encoder_queue_); + resources = resource_adaptation_processor_->GetResources(); + event.Set(); + }); + event.Wait(rtc::Event::kForever); + return resources; } void VideoStreamEncoder::SetSource( @@ -830,8 +893,20 @@ void VideoStreamEncoder::ConfigureEncoder(VideoEncoderConfig config, RTC_DCHECK(sink_); RTC_LOG(LS_INFO) << "ConfigureEncoder requested."; - frame_cadence_adapter_->SetZeroHertzModeEnabled( - config.content_type == VideoEncoderConfig::ContentType::kScreen); + // Set up the frame cadence adapter according to if we're going to do + // screencast. The final number of spatial layers is based on info + // in `send_codec_`, which is computed based on incoming frame + // dimensions which can only be determined later. + // + // Note: zero-hertz mode isn't enabled by this alone. Constraints also + // have to be set up with min_fps = 0 and max_fps > 0. + if (config.content_type == VideoEncoderConfig::ContentType::kScreen) { + frame_cadence_adapter_->SetZeroHertzModeEnabled( + FrameCadenceAdapterInterface::ZeroHertzModeParams{}); + } else { + frame_cadence_adapter_->SetZeroHertzModeEnabled(absl::nullopt); + } + pending_encoder_creation_ = (!encoder_ || encoder_config_.video_format != config.video_format || max_data_payload_length_ != max_data_payload_length); @@ -865,9 +940,12 @@ void VideoStreamEncoder::ReconfigureEncoder() { encoder_ = settings_.encoder_factory->CreateVideoEncoder( encoder_config_.video_format); - // TODO(nisse): What to do if creating the encoder fails? Crash, - // or just discard incoming frames? - RTC_CHECK(encoder_); + if (!encoder_) { + RTC_LOG(LS_ERROR) << "CreateVideoEncoder failed, failing encoder format: " + << encoder_config_.video_format.ToString(); + RequestEncoderSwitch(); + return; + } if (encoder_selector_) { encoder_selector_->OnCurrentEncoder(encoder_config_.video_format); @@ -878,15 +956,32 @@ void VideoStreamEncoder::ReconfigureEncoder() { encoder_reset_required = true; } + // TODO(webrtc:14451) : Move AlignmentAdjuster into EncoderStreamFactory // Possibly adjusts scale_resolution_down_by in `encoder_config_` to limit the // alignment value. AlignmentAdjuster::GetAlignmentAndMaybeAdjustScaleFactors( encoder_->GetEncoderInfo(), &encoder_config_, absl::nullopt); - std::vector streams = - encoder_config_.video_stream_factory->CreateEncoderStreams( - last_frame_info_->width, last_frame_info_->height, encoder_config_); + std::vector streams; + if (encoder_config_.video_stream_factory) { + // Note: only tests set their own EncoderStreamFactory... + streams = encoder_config_.video_stream_factory->CreateEncoderStreams( + last_frame_info_->width, last_frame_info_->height, encoder_config_); + } else { + rtc::scoped_refptr + factory = rtc::make_ref_counted( + encoder_config_.video_format.name, encoder_config_.max_qp, + encoder_config_.content_type == + webrtc::VideoEncoderConfig::ContentType::kScreen, + encoder_config_.legacy_conference_mode, encoder_->GetEncoderInfo(), + MergeRestrictions({latest_restrictions_, animate_restrictions_}), + &field_trials_); + + streams = factory->CreateEncoderStreams( + last_frame_info_->width, last_frame_info_->height, encoder_config_); + } + // TODO(webrtc:14451) : Move AlignmentAdjuster into EncoderStreamFactory // Get alignment when actual number of layers are known. int alignment = AlignmentAdjuster::GetAlignmentAndMaybeAdjustScaleFactors( encoder_->GetEncoderInfo(), &encoder_config_, streams.size()); @@ -1077,10 +1172,30 @@ void VideoStreamEncoder::ReconfigureEncoder() { RTC_DCHECK_LE(codec.startBitrate, 1000000); max_framerate_ = codec.maxFramerate; - // Inform source about max configured framerate. + // Inform source about max configured framerate, + // requested_resolution and which layers are active. int max_framerate = 0; + // Is any layer active. + bool active = false; + // The max requested_resolution. + absl::optional requested_resolution; for (const auto& stream : streams) { max_framerate = std::max(stream.max_framerate, max_framerate); + active |= stream.active; + // Note: we propagate the highest requested_resolution regardless + // if layer is active or not. + if (stream.requested_resolution) { + if (!requested_resolution) { + requested_resolution.emplace(stream.requested_resolution->width, + stream.requested_resolution->height); + } else { + requested_resolution.emplace( + std::max(stream.requested_resolution->width, + requested_resolution->width), + std::max(stream.requested_resolution->height, + requested_resolution->height)); + } + } } // The resolutions that we're actually encoding with. @@ -1093,19 +1208,28 @@ void VideoStreamEncoder::ReconfigureEncoder() { encoder_resolutions.emplace_back(simulcastStream.width, simulcastStream.height); } - worker_queue_->PostTask(ToQueuedTask( - task_safety_, [this, max_framerate, alignment, - encoder_resolutions = std::move(encoder_resolutions)]() { + + worker_queue_->PostTask(SafeTask( + task_safety_.flag(), + [this, max_framerate, alignment, + encoder_resolutions = std::move(encoder_resolutions), + requested_resolution = std::move(requested_resolution), active]() { RTC_DCHECK_RUN_ON(worker_queue_); if (max_framerate != video_source_sink_controller_.frame_rate_upper_limit() || alignment != video_source_sink_controller_.resolution_alignment() || encoder_resolutions != - video_source_sink_controller_.resolutions()) { + video_source_sink_controller_.resolutions() || + (video_source_sink_controller_.requested_resolution() != + requested_resolution) || + (video_source_sink_controller_.active() != active)) { video_source_sink_controller_.SetFrameRateUpperLimit(max_framerate); video_source_sink_controller_.SetResolutionAlignment(alignment); video_source_sink_controller_.SetResolutions( std::move(encoder_resolutions)); + video_source_sink_controller_.SetRequestedResolution( + requested_resolution); + video_source_sink_controller_.SetActive(active); video_source_sink_controller_.PushSourceSinkSettings(); } })); @@ -1121,13 +1245,18 @@ void VideoStreamEncoder::ReconfigureEncoder() { encoder_reset_required = RequiresEncoderReset( send_codec_, codec, was_encode_called_since_last_initialization_); } + + if (codec.codecType == VideoCodecType::kVideoCodecVP9 && + number_of_cores_ <= vp9_low_tier_core_threshold_.value_or(0)) { + codec.SetVideoEncoderComplexity(VideoCodecComplexity::kComplexityLow); + } + send_codec_ = codec; // Keep the same encoder, as long as the video_format is unchanged. // Encoder creation block is split in two since EncoderInfo needed to start // CPU adaptation with the correct settings should be polled after // encoder_->InitEncode(). - bool success = true; if (encoder_reset_required) { ReleaseEncoder(); const size_t max_data_payload_length = max_data_payload_length_ > 0 @@ -1142,7 +1271,6 @@ void VideoStreamEncoder::ReconfigureEncoder() { << CodecTypeToPayloadString(send_codec_.codecType) << " (" << send_codec_.codecType << ")"; ReleaseEncoder(); - success = false; } else { encoder_initialized_ = true; encoder_->RegisterEncodeCompleteCallback(this); @@ -1161,7 +1289,7 @@ void VideoStreamEncoder::ReconfigureEncoder() { // Inform dependents of updated encoder settings. OnEncoderSettingsChanged(); - if (success) { + if (encoder_initialized_) { RTC_LOG(LS_VERBOSE) << " max bitrate " << codec.maxBitrate << " start bitrate " << codec.startBitrate << " max frame rate " << codec.maxFramerate @@ -1198,7 +1326,7 @@ void VideoStreamEncoder::ReconfigureEncoder() { // * We have screensharing with layers. // * "WebRTC-FrameDropper" field trial is "Disabled". force_disable_frame_dropper_ = - field_trial::IsDisabled(kFrameDropperFieldTrial) || + field_trials_.IsDisabled(kFrameDropperFieldTrial) || (num_layers > 1 && codec.mode == VideoCodecMode::kScreensharing); VideoEncoder::EncoderInfo info = encoder_->GetEncoderInfo(); @@ -1247,6 +1375,49 @@ void VideoStreamEncoder::ReconfigureEncoder() { stream_resource_manager_.ConfigureQualityScaler(info); stream_resource_manager_.ConfigureBandwidthQualityScaler(info); + + if (!encoder_initialized_) { + RTC_LOG(LS_WARNING) << "Failed to initialize " + << CodecTypeToPayloadString(codec.codecType) + << " encoder." + << "switch_encoder_on_init_failures: " + << switch_encoder_on_init_failures_; + + if (switch_encoder_on_init_failures_) { + RequestEncoderSwitch(); + } + } +} + +void VideoStreamEncoder::RequestEncoderSwitch() { + bool is_encoder_switching_supported = + settings_.encoder_switch_request_callback != nullptr; + bool is_encoder_selector_available = encoder_selector_ != nullptr; + + RTC_LOG(LS_INFO) << "RequestEncoderSwitch." + << " is_encoder_selector_available: " + << is_encoder_selector_available + << " is_encoder_switching_supported: " + << is_encoder_switching_supported; + + if (!is_encoder_switching_supported) { + return; + } + + // If encoder selector is available, switch to the encoder it prefers. + // Otherwise try switching to VP8 (default WebRTC codec). + absl::optional preferred_fallback_encoder; + if (is_encoder_selector_available) { + preferred_fallback_encoder = encoder_selector_->OnEncoderBroken(); + } + + if (!preferred_fallback_encoder) { + preferred_fallback_encoder = + SdpVideoFormat(CodecTypeToPayloadString(kVideoCodecVP8)); + } + + settings_.encoder_switch_request_callback->RequestEncoderSwitch( + *preferred_fallback_encoder, /*allow_default_fallback=*/true); } void VideoStreamEncoder::OnEncoderSettingsChanged() { @@ -1259,6 +1430,11 @@ void VideoStreamEncoder::OnEncoderSettingsChanged() { bool is_screenshare = encoder_settings.encoder_config().content_type == VideoEncoderConfig::ContentType::kScreen; degradation_preference_manager_->SetIsScreenshare(is_screenshare); + if (is_screenshare) { + frame_cadence_adapter_->SetZeroHertzModeEnabled( + FrameCadenceAdapterInterface::ZeroHertzModeParams{ + send_codec_.numberOfSimulcastStreams}); + } } void VideoStreamEncoder::OnFrame(Timestamp post_time, @@ -1456,8 +1632,16 @@ void VideoStreamEncoder::SetEncoderRates( last_encoder_rate_settings_ = rate_settings; } - if (!encoder_) { + if (!encoder_) return; + + // Make the cadence adapter know if streams were disabled. + for (int spatial_index = 0; + spatial_index != send_codec_.numberOfSimulcastStreams; ++spatial_index) { + frame_cadence_adapter_->UpdateLayerStatus( + spatial_index, + /*enabled=*/rate_settings.rate_control.target_bitrate + .GetSpatialLayerSum(spatial_index) > 0); } // `bitrate_allocation` is 0 it means that the network is down or the send @@ -1466,9 +1650,8 @@ void VideoStreamEncoder::SetEncoderRates( // bitrate. // TODO(perkj): Make sure all known encoder implementations handle zero // target bitrate and remove this check. - if (rate_settings.rate_control.bitrate.get_sum_bps() == 0) { + if (rate_settings.rate_control.bitrate.get_sum_bps() == 0) return; - } if (rate_control_changed) { encoder_->SetRates(rate_settings.rate_control); @@ -1509,6 +1692,16 @@ void VideoStreamEncoder::MaybeEncodeVideoFrame(const VideoFrame& video_frame, if (!last_frame_info_ || video_frame.width() != last_frame_info_->width || video_frame.height() != last_frame_info_->height || video_frame.is_texture() != last_frame_info_->is_texture) { + if ((!last_frame_info_ || video_frame.width() != last_frame_info_->width || + video_frame.height() != last_frame_info_->height) && + settings_.encoder_switch_request_callback && encoder_selector_) { + if (auto encoder = encoder_selector_->OnResolutionChange( + {video_frame.width(), video_frame.height()})) { + settings_.encoder_switch_request_callback->RequestEncoderSwitch( + *encoder, /*allow_default_fallback=*/false); + } + } + pending_encoder_reconfiguration_ = true; last_frame_info_ = VideoFrameInfo(video_frame.width(), video_frame.height(), video_frame.is_texture()); @@ -1576,6 +1769,8 @@ void VideoStreamEncoder::MaybeEncodeVideoFrame(const VideoFrame& video_frame, pending_frame_.reset(); accumulated_update_rect_.Union(video_frame.update_rect()); accumulated_update_rect_is_valid_ &= video_frame.has_update_rect(); + encoder_stats_observer_->OnFrameDropped( + VideoStreamEncoderObserver::DropReason::kEncoderQueue); } return; } @@ -1595,6 +1790,8 @@ void VideoStreamEncoder::MaybeEncodeVideoFrame(const VideoFrame& video_frame, TraceFrameDropStart(); accumulated_update_rect_.Union(video_frame.update_rect()); accumulated_update_rect_is_valid_ &= video_frame.has_update_rect(); + encoder_stats_observer_->OnFrameDropped( + VideoStreamEncoderObserver::DropReason::kEncoderQueue); } return; } @@ -1629,11 +1826,13 @@ void VideoStreamEncoder::MaybeEncodeVideoFrame(const VideoFrame& video_frame, void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, int64_t time_when_posted_us) { RTC_DCHECK_RUN_ON(&encoder_queue_); + RTC_LOG(LS_VERBOSE) << __func__ << " posted " << time_when_posted_us + << " ntp time " << video_frame.ntp_time_ms(); // If the encoder fail we can't continue to encode frames. When this happens // the WebrtcVideoSender is notified and the whole VideoSendStream is // recreated. - if (encoder_failed_) + if (encoder_failed_ || !encoder_initialized_) return; // It's possible that EncodeVideoFrame can be called after we've completed @@ -1646,9 +1845,12 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, // Encoder metadata needs to be updated before encode complete callback. VideoEncoder::EncoderInfo info = encoder_->GetEncoderInfo(); - if (info.implementation_name != encoder_info_.implementation_name) { - encoder_stats_observer_->OnEncoderImplementationChanged( - info.implementation_name); + if (info.implementation_name != encoder_info_.implementation_name || + info.is_hardware_accelerated != encoder_info_.is_hardware_accelerated) { + encoder_stats_observer_->OnEncoderImplementationChanged({ + .name = info.implementation_name, + .is_hardware_accelerated = info.is_hardware_accelerated, + }); if (bitrate_adjuster_) { // Encoder implementation changed, reset overshoot detector states. bitrate_adjuster_->Reset(); @@ -1768,29 +1970,9 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, was_encode_called_since_last_initialization_ = true; if (encode_status < 0) { - if (encode_status == WEBRTC_VIDEO_CODEC_ENCODER_FAILURE) { - RTC_LOG(LS_ERROR) << "Encoder failed, failing encoder format: " - << encoder_config_.video_format.ToString(); - - if (settings_.encoder_switch_request_callback) { - if (encoder_selector_) { - if (auto encoder = encoder_selector_->OnEncoderBroken()) { - settings_.encoder_switch_request_callback->RequestEncoderSwitch( - *encoder); - } - } else { - encoder_failed_ = true; - settings_.encoder_switch_request_callback->RequestEncoderFallback(); - } - } else { - RTC_LOG(LS_ERROR) - << "Encoder failed but no encoder fallback callback is registered"; - } - } else { - RTC_LOG(LS_ERROR) << "Failed to encode frame. Error code: " - << encode_status; - } - + RTC_LOG(LS_ERROR) << "Encoder failed, failing encoder format: " + << encoder_config_.video_format.ToString(); + RequestEncoderSwitch(); return; } @@ -1799,6 +1981,13 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, } } +void VideoStreamEncoder::RequestRefreshFrame() { + worker_queue_->PostTask(SafeTask(task_safety_.flag(), [this] { + RTC_DCHECK_RUN_ON(worker_queue_); + video_source_sink_controller_.RequestRefreshFrame(); + })); +} + void VideoStreamEncoder::SendKeyFrame() { if (!encoder_queue_.IsCurrent()) { encoder_queue_.PostTask([this] { SendKeyFrame(); }); @@ -1808,8 +1997,13 @@ void VideoStreamEncoder::SendKeyFrame() { TRACE_EVENT0("webrtc", "OnKeyFrameRequest"); RTC_DCHECK(!next_frame_types_.empty()); - if (!encoder_) - return; // Shutting down. + if (frame_cadence_adapter_) + frame_cadence_adapter_->ProcessKeyFrameRequest(); + + if (!encoder_) { + RTC_DLOG(LS_INFO) << __func__ << " no encoder."; + return; // Shutting down, or not configured yet. + } // TODO(webrtc:10615): Map keyframe request to spatial layer. std::fill(next_frame_types_.begin(), next_frame_types_.end(), @@ -1830,26 +2024,17 @@ void VideoStreamEncoder::OnLossNotification( } } -EncodedImageCallback::Result VideoStreamEncoder::OnEncodedImage( +EncodedImage VideoStreamEncoder::AugmentEncodedImage( const EncodedImage& encoded_image, const CodecSpecificInfo* codec_specific_info) { - TRACE_EVENT_INSTANT1("webrtc", "VCMEncodedFrameCallback::Encoded", - "timestamp", encoded_image.Timestamp()); - - // TODO(bugs.webrtc.org/10520): Signal the simulcast id explicitly. - - const size_t spatial_idx = encoded_image.SpatialIndex().value_or(0); EncodedImage image_copy(encoded_image); - + const size_t spatial_idx = encoded_image.SpatialIndex().value_or(0); frame_encode_metadata_writer_.FillTimingInfo(spatial_idx, &image_copy); - frame_encode_metadata_writer_.UpdateBitstream(codec_specific_info, &image_copy); - VideoCodecType codec_type = codec_specific_info ? codec_specific_info->codecType : VideoCodecType::kVideoCodecGeneric; - if (image_copy.qp_ < 0 && qp_parsing_allowed_) { // Parse encoded frame QP if that was not provided by encoder. image_copy.qp_ = qp_parser_ @@ -1857,6 +2042,10 @@ EncodedImageCallback::Result VideoStreamEncoder::OnEncodedImage( image_copy.size()) .value_or(-1); } + RTC_LOG(LS_VERBOSE) << __func__ << " spatial_idx " << spatial_idx << " qp " + << image_copy.qp_; + image_copy.SetAtTargetQuality(codec_type == kVideoCodecVP8 && + image_copy.qp_ <= kVp8SteadyStateQpThreshold); // Piggyback ALR experiment group id and simulcast id into the content type. const uint8_t experiment_id = @@ -1874,14 +2063,42 @@ EncodedImageCallback::Result VideoStreamEncoder::OnEncodedImage( RTC_CHECK(videocontenttypehelpers::SetSimulcastId( &image_copy.content_type_, static_cast(spatial_idx + 1))); - // Currently internal quality scaler is used for VP9 instead of webrtc qp - // scaler (in no-svc case or if only a single spatial layer is encoded). - // It has to be explicitly detected and reported to adaptation metrics. - // Post a task because `send_codec_` requires `encoder_queue_` lock. + return image_copy; +} + +EncodedImageCallback::Result VideoStreamEncoder::OnEncodedImage( + const EncodedImage& encoded_image, + const CodecSpecificInfo* codec_specific_info) { + TRACE_EVENT_INSTANT1("webrtc", "VCMEncodedFrameCallback::Encoded", + "timestamp", encoded_image.Timestamp()); + + // TODO(bugs.webrtc.org/10520): Signal the simulcast id explicitly. + + const size_t spatial_idx = encoded_image.SpatialIndex().value_or(0); + const VideoCodecType codec_type = codec_specific_info + ? codec_specific_info->codecType + : VideoCodecType::kVideoCodecGeneric; + EncodedImage image_copy = + AugmentEncodedImage(encoded_image, codec_specific_info); + + // Post a task because `send_codec_` requires `encoder_queue_` lock and we + // need to update on quality convergence. unsigned int image_width = image_copy._encodedWidth; unsigned int image_height = image_copy._encodedHeight; - encoder_queue_.PostTask([this, codec_type, image_width, image_height] { + encoder_queue_.PostTask([this, codec_type, image_width, image_height, + spatial_idx, + at_target_quality = image_copy.IsAtTargetQuality()] { RTC_DCHECK_RUN_ON(&encoder_queue_); + + // Let the frame cadence adapter know about quality convergence. + if (frame_cadence_adapter_) + frame_cadence_adapter_->UpdateLayerQualityConvergence(spatial_idx, + at_target_quality); + + // Currently, the internal quality scaler is used for VP9 instead of the + // webrtc qp scaler (in the no-svc case or if only a single spatial layer is + // encoded). It has to be explicitly detected and reported to adaptation + // metrics. if (codec_type == VideoCodecType::kVideoCodecVP9 && send_codec_.VP9()->automaticResizeOn) { unsigned int expected_width = send_codec_.width; @@ -2019,7 +2236,8 @@ void VideoStreamEncoder::OnBitrateUpdated(DataRate target_bitrate, if (!video_is_suspended && settings_.encoder_switch_request_callback && encoder_selector_) { if (auto encoder = encoder_selector_->OnAvailableBitrate(link_allocation)) { - settings_.encoder_switch_request_callback->RequestEncoderSwitch(*encoder); + settings_.encoder_switch_request_callback->RequestEncoderSwitch( + *encoder, /*allow_default_fallback=*/false); } } @@ -2053,19 +2271,27 @@ void VideoStreamEncoder::OnBitrateUpdated(DataRate target_bitrate, RTC_LOG(LS_INFO) << "Video suspend state changed to: " << (video_is_suspended ? "suspended" : "not suspended"); encoder_stats_observer_->OnSuspendChange(video_is_suspended); - } - if (video_suspension_changed && !video_is_suspended && pending_frame_ && - !DropDueToSize(pending_frame_->size())) { - int64_t pending_time_us = - clock_->CurrentTime().us() - pending_frame_post_time_us_; - if (pending_time_us < kPendingFrameTimeoutMs * 1000) - EncodeVideoFrame(*pending_frame_, pending_frame_post_time_us_); - pending_frame_.reset(); + + if (!video_is_suspended && pending_frame_ && + !DropDueToSize(pending_frame_->size())) { + // A pending stored frame can be processed. + int64_t pending_time_us = + clock_->CurrentTime().us() - pending_frame_post_time_us_; + if (pending_time_us < kPendingFrameTimeoutMs * 1000) + EncodeVideoFrame(*pending_frame_, pending_frame_post_time_us_); + pending_frame_.reset(); + } else if (!video_is_suspended && !pending_frame_ && + encoder_paused_and_dropped_frame_) { + // A frame was enqueued during pause-state, but since it was a native + // frame we could not store it in `pending_frame_` so request a + // refresh-frame instead. + RequestRefreshFrame(); + } } } bool VideoStreamEncoder::DropDueToSize(uint32_t pixel_count) const { - if (!stream_resource_manager_.DropInitialFrames() || + if (!encoder_ || !stream_resource_manager_.DropInitialFrames() || !encoder_target_bitrate_bps_.has_value()) { return false; } @@ -2116,8 +2342,13 @@ void VideoStreamEncoder::OnVideoSourceRestrictionsUpdated( RTC_LOG(LS_INFO) << "Updating sink restrictions from " << (reason ? reason->Name() : std::string("")) << " to " << restrictions.ToString(); - worker_queue_->PostTask(ToQueuedTask( - task_safety_, [this, restrictions = std::move(restrictions)]() { + + // TODO(webrtc:14451) Split video_source_sink_controller_ + // so that ownership on restrictions/wants is kept on &encoder_queue_ + latest_restrictions_ = restrictions; + + worker_queue_->PostTask(SafeTask( + task_safety_.flag(), [this, restrictions = std::move(restrictions)]() { RTC_DCHECK_RUN_ON(worker_queue_); video_source_sink_controller_.SetRestrictions(std::move(restrictions)); video_source_sink_controller_.PushSourceSinkSettings(); @@ -2141,7 +2372,6 @@ void VideoStreamEncoder::RunPostEncode(const EncodedImage& encoded_image, absl::optional encode_duration_us; if (encoded_image.timing_.flags != VideoSendTiming::kInvalid) { encode_duration_us = - // TODO(nisse): Maybe use capture_time_ms_ rather than encode_start_ms_? TimeDelta::Millis(encoded_image.timing_.encode_finish_ms - encoded_image.timing_.encode_start_ms) .us(); @@ -2177,8 +2407,8 @@ VideoStreamEncoder::AutomaticAnimationDetectionExperiment VideoStreamEncoder::ParseAutomatincAnimationDetectionFieldTrial() const { AutomaticAnimationDetectionExperiment result; - result.Parser()->Parse(webrtc::field_trial::FindFullName( - "WebRTC-AutomaticAnimationDetectionScreenshare")); + result.Parser()->Parse( + field_trials_.Lookup("WebRTC-AutomaticAnimationDetectionScreenshare")); if (!result.enabled) { RTC_LOG(LS_INFO) << "Automatic animation detection experiment is disabled."; @@ -2258,8 +2488,19 @@ void VideoStreamEncoder::CheckForAnimatedContent( RTC_LOG(LS_INFO) << "Removing resolution cap due to no consistent " "animation detection."; } + // TODO(webrtc:14451) Split video_source_sink_controller_ + // so that ownership on restrictions/wants is kept on &encoder_queue_ + if (should_cap_resolution) { + animate_restrictions_ = + VideoSourceRestrictions(kMaxAnimationPixels, + /* target_pixels_per_frame= */ absl::nullopt, + /* max_frame_rate= */ absl::nullopt); + } else { + animate_restrictions_.reset(); + } + worker_queue_->PostTask( - ToQueuedTask(task_safety_, [this, should_cap_resolution]() { + SafeTask(task_safety_.flag(), [this, should_cap_resolution]() { RTC_DCHECK_RUN_ON(worker_queue_); video_source_sink_controller_.SetPixelsPerFrameUpperLimit( should_cap_resolution @@ -2273,14 +2514,11 @@ void VideoStreamEncoder::CheckForAnimatedContent( void VideoStreamEncoder::InjectAdaptationResource( rtc::scoped_refptr resource, VideoAdaptationReason reason) { - rtc::Event map_resource_event; - encoder_queue_.PostTask([this, resource, reason, &map_resource_event] { + encoder_queue_.PostTask([this, resource = std::move(resource), reason] { RTC_DCHECK_RUN_ON(&encoder_queue_); additional_resources_.push_back(resource); stream_resource_manager_.AddResource(resource, reason); - map_resource_event.Set(); }); - map_resource_event.Wait(rtc::Event::kForever); } void VideoStreamEncoder::InjectAdaptationConstraint( diff --git a/TMessagesProj/jni/voip/webrtc/video/video_stream_encoder.h b/TMessagesProj/jni/voip/webrtc/video/video_stream_encoder.h index cd181fc6fa..9af2e0bcff 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_stream_encoder.h +++ b/TMessagesProj/jni/voip/webrtc/video/video_stream_encoder.h @@ -18,13 +18,14 @@ #include #include "api/adaptation/resource.h" +#include "api/field_trials_view.h" #include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "api/units/data_rate.h" +#include "api/video/encoded_image.h" #include "api/video/video_bitrate_allocator.h" #include "api/video/video_rotation.h" #include "api/video/video_sink_interface.h" -#include "api/video/video_stream_encoder_interface.h" -#include "api/video/video_stream_encoder_observer.h" #include "api/video/video_stream_encoder_settings.h" #include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_encoder.h" @@ -40,7 +41,6 @@ #include "rtc_base/race_checker.h" #include "rtc_base/rate_statistics.h" #include "rtc_base/task_queue.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/thread_annotations.h" #include "system_wrappers/include/clock.h" #include "video/adaptation/video_stream_encoder_resource_manager.h" @@ -48,6 +48,8 @@ #include "video/frame_cadence_adapter.h" #include "video/frame_encode_metadata_writer.h" #include "video/video_source_sink_controller.h" +#include "video/video_stream_encoder_interface.h" +#include "video/video_stream_encoder_observer.h" namespace webrtc { @@ -79,9 +81,15 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, std::unique_ptr frame_cadence_adapter, std::unique_ptr encoder_queue, - BitrateAllocationCallbackType allocation_cb_type); + BitrateAllocationCallbackType allocation_cb_type, + const FieldTrialsView& field_trials, + webrtc::VideoEncoderFactory::EncoderSelectorInterface* encoder_selector = + nullptr); ~VideoStreamEncoder() override; + VideoStreamEncoder(const VideoStreamEncoder&) = delete; + VideoStreamEncoder& operator=(const VideoStreamEncoder&) = delete; + void AddAdaptationResource(rtc::scoped_refptr resource) override; std::vector> GetAdaptationResources() override; @@ -121,7 +129,7 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, protected: // Used for testing. For example the `ScalingObserverInterface` methods must // be called on `encoder_queue_`. - rtc::TaskQueue* encoder_queue() { return &encoder_queue_; } + TaskQueueBase* encoder_queue() { return encoder_queue_.Get(); } void OnVideoSourceRestrictionsUpdated( VideoSourceRestrictions restrictions, @@ -155,6 +163,9 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, void OnDiscardedFrame() override { video_stream_encoder_.OnDiscardedFrame(); } + void RequestRefreshFrame() override { + video_stream_encoder_.RequestRefreshFrame(); + } private: VideoStreamEncoder& video_stream_encoder_; @@ -199,6 +210,7 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, int frames_scheduled_for_processing, const VideoFrame& video_frame); void OnDiscardedFrame(); + void RequestRefreshFrame(); void MaybeEncodeVideoFrame(const VideoFrame& frame, int64_t time_when_posted_in_ms); @@ -241,17 +253,32 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, int64_t time_when_posted_in_ms) RTC_RUN_ON(&encoder_queue_); + void RequestEncoderSwitch() RTC_RUN_ON(&encoder_queue_); + + // Augments an EncodedImage received from an encoder with parsable + // information. + EncodedImage AugmentEncodedImage( + const EncodedImage& encoded_image, + const CodecSpecificInfo* codec_specific_info); + + const FieldTrialsView& field_trials_; TaskQueueBase* const worker_queue_; - const uint32_t number_of_cores_; + const int number_of_cores_; EncoderSink* sink_; const VideoStreamEncoderSettings settings_; const BitrateAllocationCallbackType allocation_cb_type_; const RateControlSettings rate_control_settings_; + webrtc::VideoEncoderFactory::EncoderSelectorInterface* const + encoder_selector_from_constructor_; std::unique_ptr const - encoder_selector_; + encoder_selector_from_factory_; + // Pointing to either encoder_selector_from_constructor_ or + // encoder_selector_from_factory_ but can be nullptr. + VideoEncoderFactory::EncoderSelectorInterface* const encoder_selector_; + VideoStreamEncoderObserver* const encoder_stats_observer_; // Adapter that avoids public inheritance of the cadence adapter's callback // interface. @@ -388,13 +415,13 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, // Provides video stream input states: current resolution and frame rate. VideoStreamInputStateProvider input_state_provider_; - std::unique_ptr video_stream_adapter_ + const std::unique_ptr video_stream_adapter_ RTC_GUARDED_BY(&encoder_queue_); // Responsible for adapting input resolution or frame rate to ensure resources // (e.g. CPU or bandwidth) are not overused. Adding resources can occur on any // thread. std::unique_ptr - resource_adaptation_processor_; + resource_adaptation_processor_ RTC_GUARDED_BY(&encoder_queue_); std::unique_ptr degradation_preference_manager_ RTC_GUARDED_BY(&encoder_queue_); std::vector adaptation_constraints_ @@ -425,14 +452,34 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, QpParser qp_parser_; const bool qp_parsing_allowed_; - // Public methods are proxied to the task queues. The queues must be destroyed - // first to make sure no tasks run that use other members. - rtc::TaskQueue encoder_queue_; + // Enables encoder switching on initialization failures. + bool switch_encoder_on_init_failures_; + + const absl::optional vp9_low_tier_core_threshold_; + + // These are copies of restrictions (glorified max_pixel_count) set by + // a) OnVideoSourceRestrictionsUpdated + // b) CheckForAnimatedContent + // They are used to scale down encoding resolution if needed when using + // requested_resolution. + // + // TODO(webrtc:14451) Split video_source_sink_controller_ + // so that ownership on restrictions/wants is kept on &encoder_queue_, that + // these extra copies would not be needed. + absl::optional latest_restrictions_ + RTC_GUARDED_BY(&encoder_queue_); + absl::optional animate_restrictions_ + RTC_GUARDED_BY(&encoder_queue_); // Used to cancel any potentially pending tasks to the worker thread. + // Refrenced by tasks running on `encoder_queue_` so need to be destroyed + // after stopping that queue. Must be created and destroyed on + // `worker_queue_`. ScopedTaskSafety task_safety_; - RTC_DISALLOW_COPY_AND_ASSIGN(VideoStreamEncoder); + // Public methods are proxied to the task queues. The queues must be destroyed + // first to make sure no tasks run that use other members. + rtc::TaskQueue encoder_queue_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/video/video_stream_encoder_interface.h b/TMessagesProj/jni/voip/webrtc/video/video_stream_encoder_interface.h similarity index 94% rename from TMessagesProj/jni/voip/webrtc/api/video/video_stream_encoder_interface.h rename to TMessagesProj/jni/voip/webrtc/video/video_stream_encoder_interface.h index f2d7e131e6..38f180d121 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/video_stream_encoder_interface.h +++ b/TMessagesProj/jni/voip/webrtc/video/video_stream_encoder_interface.h @@ -8,8 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef API_VIDEO_VIDEO_STREAM_ENCODER_INTERFACE_H_ -#define API_VIDEO_VIDEO_STREAM_ENCODER_INTERFACE_H_ +#ifndef VIDEO_VIDEO_STREAM_ENCODER_INTERFACE_H_ +#define VIDEO_VIDEO_STREAM_ENCODER_INTERFACE_H_ #include @@ -23,7 +23,7 @@ #include "api/video/video_sink_interface.h" #include "api/video/video_source_interface.h" #include "api/video_codecs/video_encoder.h" -#include "api/video_codecs/video_encoder_config.h" +#include "video/config/video_encoder_config.h" namespace webrtc { @@ -74,8 +74,8 @@ class VideoStreamEncoderInterface { // or frame rate may be reduced. The VideoStreamEncoder registers itself with // `source`, and signals adaptation decisions to the source in the form of // VideoSinkWants. - // TODO(nisse): When adaptation logic is extracted from this class, - // it no longer needs to know the source. + // TODO(bugs.webrtc.org/14246): When adaptation logic is extracted from this + // class, it no longer needs to know the source. virtual void SetSource( rtc::VideoSourceInterface* source, const DegradationPreference& degradation_preference) = 0; @@ -137,4 +137,4 @@ class VideoStreamEncoderInterface { } // namespace webrtc -#endif // API_VIDEO_VIDEO_STREAM_ENCODER_INTERFACE_H_ +#endif // VIDEO_VIDEO_STREAM_ENCODER_INTERFACE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/video/video_stream_encoder_observer.h b/TMessagesProj/jni/voip/webrtc/video/video_stream_encoder_observer.h similarity index 83% rename from TMessagesProj/jni/voip/webrtc/api/video/video_stream_encoder_observer.h rename to TMessagesProj/jni/voip/webrtc/video/video_stream_encoder_observer.h index ea8196ce6d..c10412181d 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/video_stream_encoder_observer.h +++ b/TMessagesProj/jni/voip/webrtc/video/video_stream_encoder_observer.h @@ -8,19 +8,17 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef API_VIDEO_VIDEO_STREAM_ENCODER_OBSERVER_H_ -#define API_VIDEO_VIDEO_STREAM_ENCODER_OBSERVER_H_ +#ifndef VIDEO_VIDEO_STREAM_ENCODER_OBSERVER_H_ +#define VIDEO_VIDEO_STREAM_ENCODER_OBSERVER_H_ #include #include -#include "absl/types/optional.h" #include "api/video/video_adaptation_counters.h" #include "api/video/video_adaptation_reason.h" #include "api/video/video_bitrate_allocation.h" -#include "api/video/video_codec_constants.h" #include "api/video_codecs/video_encoder.h" -#include "api/video_codecs/video_encoder_config.h" +#include "video/config/video_encoder_config.h" namespace webrtc { @@ -29,6 +27,11 @@ namespace webrtc { // encoded data. So use some other type to represent that. class EncodedImage; +struct EncoderImplementation { + const std::string& name; + bool is_hardware_accelerated; +}; + // Broken out into a base class, with public inheritance below, only to ease // unit testing of the internal class OveruseFrameDetector. class CpuOveruseMetricsObserver { @@ -53,7 +56,6 @@ class VideoStreamEncoderObserver : public CpuOveruseMetricsObserver { bool framerate_scaling_enabled; }; - // TODO(nisse): Duplicates enum EncodedImageCallback::DropReason. enum class DropReason { kSource, kEncoderQueue, @@ -66,13 +68,13 @@ class VideoStreamEncoderObserver : public CpuOveruseMetricsObserver { virtual void OnIncomingFrame(int width, int height) = 0; - // TODO(nisse): Merge into one callback per encoded frame. + // TODO(bugs.webrtc.org/8504): Merge into one callback per encoded frame. using CpuOveruseMetricsObserver::OnEncodedFrameTimeMeasured; virtual void OnSendEncodedImage(const EncodedImage& encoded_image, const CodecSpecificInfo* codec_info) = 0; virtual void OnEncoderImplementationChanged( - const std::string& implementation_name) = 0; + EncoderImplementation implementation) = 0; virtual void OnFrameDropped(DropReason reason) = 0; @@ -105,12 +107,13 @@ class VideoStreamEncoderObserver : public CpuOveruseMetricsObserver { // down. virtual void OnEncoderInternalScalerUpdate(bool is_scaled) {} - // TODO(nisse): VideoStreamEncoder wants to query the stats, which makes this - // not a pure observer. GetInputFrameRate is needed for the cpu adaptation, so - // can be deleted if that responsibility is moved out to a VideoStreamAdaptor - // class. + // TODO(bugs.webrtc.org/14246): VideoStreamEncoder wants to query the stats, + // which makes this not a pure observer. GetInputFrameRate is needed for the + // cpu adaptation, so can be deleted if that responsibility is moved out to a + // VideoStreamAdaptor class. virtual int GetInputFrameRate() const = 0; }; } // namespace webrtc -#endif // API_VIDEO_VIDEO_STREAM_ENCODER_OBSERVER_H_ + +#endif // VIDEO_VIDEO_STREAM_ENCODER_OBSERVER_H_ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_0.png b/TMessagesProj/src/emojis/apple/emoji/0_0.png index f44a85f627..3977ec0b15 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_0.png and b/TMessagesProj/src/emojis/apple/emoji/0_0.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1.png b/TMessagesProj/src/emojis/apple/emoji/0_1.png index 21f6659229..8dbe7b2031 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1.png and b/TMessagesProj/src/emojis/apple/emoji/0_1.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_10.png b/TMessagesProj/src/emojis/apple/emoji/0_10.png index cb6128b360..bdf97aff10 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_10.png and b/TMessagesProj/src/emojis/apple/emoji/0_10.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_100.png b/TMessagesProj/src/emojis/apple/emoji/0_100.png index 81644b3423..c2ab8c199a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_100.png and b/TMessagesProj/src/emojis/apple/emoji/0_100.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1000.png b/TMessagesProj/src/emojis/apple/emoji/0_1000.png index 9156e39715..0194ecac21 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1000.png and b/TMessagesProj/src/emojis/apple/emoji/0_1000.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1001.png b/TMessagesProj/src/emojis/apple/emoji/0_1001.png index f5f37c5e4c..58164e5ba9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1001.png and b/TMessagesProj/src/emojis/apple/emoji/0_1001.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1002.png b/TMessagesProj/src/emojis/apple/emoji/0_1002.png index 341e400c29..98fdf8ad8a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1002.png and b/TMessagesProj/src/emojis/apple/emoji/0_1002.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1003.png b/TMessagesProj/src/emojis/apple/emoji/0_1003.png index 2014d39671..80dc0dd324 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1003.png and b/TMessagesProj/src/emojis/apple/emoji/0_1003.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1004.png b/TMessagesProj/src/emojis/apple/emoji/0_1004.png index f2dc0dfcd4..e342ae1f53 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1004.png and b/TMessagesProj/src/emojis/apple/emoji/0_1004.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1005.png b/TMessagesProj/src/emojis/apple/emoji/0_1005.png index e16aeaa7b9..90f174b604 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1005.png and b/TMessagesProj/src/emojis/apple/emoji/0_1005.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1006.png b/TMessagesProj/src/emojis/apple/emoji/0_1006.png index 7a9d2b4cce..930424571e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1006.png and b/TMessagesProj/src/emojis/apple/emoji/0_1006.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1007.png b/TMessagesProj/src/emojis/apple/emoji/0_1007.png index e0b66c78fd..f1ccf1b675 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1007.png and b/TMessagesProj/src/emojis/apple/emoji/0_1007.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1008.png b/TMessagesProj/src/emojis/apple/emoji/0_1008.png index 00d9b8ae50..c225fe0f77 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1008.png and b/TMessagesProj/src/emojis/apple/emoji/0_1008.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1009.png b/TMessagesProj/src/emojis/apple/emoji/0_1009.png index 9158f8ed6b..84341a6146 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1009.png and b/TMessagesProj/src/emojis/apple/emoji/0_1009.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_101.png b/TMessagesProj/src/emojis/apple/emoji/0_101.png index 3a5ca21e9d..d26e60ae32 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_101.png and b/TMessagesProj/src/emojis/apple/emoji/0_101.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1010.png b/TMessagesProj/src/emojis/apple/emoji/0_1010.png index 6ed3da5193..5f5dcfcd87 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1010.png and b/TMessagesProj/src/emojis/apple/emoji/0_1010.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1011.png b/TMessagesProj/src/emojis/apple/emoji/0_1011.png index ed9b61adfb..1323993c71 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1011.png and b/TMessagesProj/src/emojis/apple/emoji/0_1011.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1012.png b/TMessagesProj/src/emojis/apple/emoji/0_1012.png index 13dafaf789..92773cb058 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1012.png and b/TMessagesProj/src/emojis/apple/emoji/0_1012.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1013.png b/TMessagesProj/src/emojis/apple/emoji/0_1013.png index 9245a05667..b3f7012cb1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1013.png and b/TMessagesProj/src/emojis/apple/emoji/0_1013.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1014.png b/TMessagesProj/src/emojis/apple/emoji/0_1014.png index 06de35b3d6..df045504e9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1014.png and b/TMessagesProj/src/emojis/apple/emoji/0_1014.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1015.png b/TMessagesProj/src/emojis/apple/emoji/0_1015.png index f2cdf9be33..692649aa75 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1015.png and b/TMessagesProj/src/emojis/apple/emoji/0_1015.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1016.png b/TMessagesProj/src/emojis/apple/emoji/0_1016.png index 2a4e9d1364..1a1a530750 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1016.png and b/TMessagesProj/src/emojis/apple/emoji/0_1016.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1017.png b/TMessagesProj/src/emojis/apple/emoji/0_1017.png index 8978fd0e3a..4dada7c139 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1017.png and b/TMessagesProj/src/emojis/apple/emoji/0_1017.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1018.png b/TMessagesProj/src/emojis/apple/emoji/0_1018.png index 2677fb5b6e..316c80ef89 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1018.png and b/TMessagesProj/src/emojis/apple/emoji/0_1018.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1019.png b/TMessagesProj/src/emojis/apple/emoji/0_1019.png index faf435b422..992dbfb080 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1019.png and b/TMessagesProj/src/emojis/apple/emoji/0_1019.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_102.png b/TMessagesProj/src/emojis/apple/emoji/0_102.png index 3a0fcfbf24..15a751f5b4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_102.png and b/TMessagesProj/src/emojis/apple/emoji/0_102.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1020.png b/TMessagesProj/src/emojis/apple/emoji/0_1020.png index e8c0388159..9229e0ce7a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1020.png and b/TMessagesProj/src/emojis/apple/emoji/0_1020.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1021.png b/TMessagesProj/src/emojis/apple/emoji/0_1021.png index 529a78435f..5919371192 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1021.png and b/TMessagesProj/src/emojis/apple/emoji/0_1021.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1022.png b/TMessagesProj/src/emojis/apple/emoji/0_1022.png index 37c0643690..906d249b72 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1022.png and b/TMessagesProj/src/emojis/apple/emoji/0_1022.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1023.png b/TMessagesProj/src/emojis/apple/emoji/0_1023.png index d03a5daa8c..3c330156fd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1023.png and b/TMessagesProj/src/emojis/apple/emoji/0_1023.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1024.png b/TMessagesProj/src/emojis/apple/emoji/0_1024.png index a6eec46f20..1482c83dc8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1024.png and b/TMessagesProj/src/emojis/apple/emoji/0_1024.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1025.png b/TMessagesProj/src/emojis/apple/emoji/0_1025.png index 81886dad54..2020f05a7b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1025.png and b/TMessagesProj/src/emojis/apple/emoji/0_1025.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1026.png b/TMessagesProj/src/emojis/apple/emoji/0_1026.png index 57b770f8a5..b18703b449 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1026.png and b/TMessagesProj/src/emojis/apple/emoji/0_1026.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1027.png b/TMessagesProj/src/emojis/apple/emoji/0_1027.png index d86eb12d50..79349c327a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1027.png and b/TMessagesProj/src/emojis/apple/emoji/0_1027.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1028.png b/TMessagesProj/src/emojis/apple/emoji/0_1028.png index 2b6ec44d01..d4e6e86dde 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1028.png and b/TMessagesProj/src/emojis/apple/emoji/0_1028.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1029.png b/TMessagesProj/src/emojis/apple/emoji/0_1029.png index 82e71662ab..ec2aea024e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1029.png and b/TMessagesProj/src/emojis/apple/emoji/0_1029.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_103.png b/TMessagesProj/src/emojis/apple/emoji/0_103.png index dc19be0c22..94f1015010 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_103.png and b/TMessagesProj/src/emojis/apple/emoji/0_103.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1030.png b/TMessagesProj/src/emojis/apple/emoji/0_1030.png index bdc931074b..a336013cd2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1030.png and b/TMessagesProj/src/emojis/apple/emoji/0_1030.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1031.png b/TMessagesProj/src/emojis/apple/emoji/0_1031.png index 589046fc16..48825e09ff 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1031.png and b/TMessagesProj/src/emojis/apple/emoji/0_1031.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1032.png b/TMessagesProj/src/emojis/apple/emoji/0_1032.png index 61865e804a..d8419ddd4b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1032.png and b/TMessagesProj/src/emojis/apple/emoji/0_1032.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1033.png b/TMessagesProj/src/emojis/apple/emoji/0_1033.png index 59e7b55d1a..f35ef8e252 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1033.png and b/TMessagesProj/src/emojis/apple/emoji/0_1033.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1034.png b/TMessagesProj/src/emojis/apple/emoji/0_1034.png index 37cff176d8..1fb589906f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1034.png and b/TMessagesProj/src/emojis/apple/emoji/0_1034.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1035.png b/TMessagesProj/src/emojis/apple/emoji/0_1035.png index 537879ea35..1248aa87dc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1035.png and b/TMessagesProj/src/emojis/apple/emoji/0_1035.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1036.png b/TMessagesProj/src/emojis/apple/emoji/0_1036.png index c6ee7df23b..599412049a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1036.png and b/TMessagesProj/src/emojis/apple/emoji/0_1036.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1037.png b/TMessagesProj/src/emojis/apple/emoji/0_1037.png index 78b57c6495..822b4d3b5d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1037.png and b/TMessagesProj/src/emojis/apple/emoji/0_1037.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1038.png b/TMessagesProj/src/emojis/apple/emoji/0_1038.png index 540c7d1662..167e6809fb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1038.png and b/TMessagesProj/src/emojis/apple/emoji/0_1038.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1039.png b/TMessagesProj/src/emojis/apple/emoji/0_1039.png index 44df3f2d29..43da8e18d8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1039.png and b/TMessagesProj/src/emojis/apple/emoji/0_1039.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_104.png b/TMessagesProj/src/emojis/apple/emoji/0_104.png index 2cf06cfe5c..c60382dfc4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_104.png and b/TMessagesProj/src/emojis/apple/emoji/0_104.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1040.png b/TMessagesProj/src/emojis/apple/emoji/0_1040.png index 365993560f..52caadbab4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1040.png and b/TMessagesProj/src/emojis/apple/emoji/0_1040.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1041.png b/TMessagesProj/src/emojis/apple/emoji/0_1041.png index 82f34be543..22f4678e1a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1041.png and b/TMessagesProj/src/emojis/apple/emoji/0_1041.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1042.png b/TMessagesProj/src/emojis/apple/emoji/0_1042.png index 870bc68d6c..f57009fedb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1042.png and b/TMessagesProj/src/emojis/apple/emoji/0_1042.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1043.png b/TMessagesProj/src/emojis/apple/emoji/0_1043.png index f6e72648d3..3bd836903f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1043.png and b/TMessagesProj/src/emojis/apple/emoji/0_1043.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1044.png b/TMessagesProj/src/emojis/apple/emoji/0_1044.png index 4f9f2915bd..d776b8796a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1044.png and b/TMessagesProj/src/emojis/apple/emoji/0_1044.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1045.png b/TMessagesProj/src/emojis/apple/emoji/0_1045.png index 635f45d3d2..a66e443b0b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1045.png and b/TMessagesProj/src/emojis/apple/emoji/0_1045.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1046.png b/TMessagesProj/src/emojis/apple/emoji/0_1046.png index 3ff89e627d..3f0b79df16 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1046.png and b/TMessagesProj/src/emojis/apple/emoji/0_1046.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1047.png b/TMessagesProj/src/emojis/apple/emoji/0_1047.png index 558a274aaf..a3a599d08e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1047.png and b/TMessagesProj/src/emojis/apple/emoji/0_1047.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1048.png b/TMessagesProj/src/emojis/apple/emoji/0_1048.png index a204f5f41d..5b99bdf916 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1048.png and b/TMessagesProj/src/emojis/apple/emoji/0_1048.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1049.png b/TMessagesProj/src/emojis/apple/emoji/0_1049.png index 04bbc965bc..5d08f8a1b7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1049.png and b/TMessagesProj/src/emojis/apple/emoji/0_1049.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_105.png b/TMessagesProj/src/emojis/apple/emoji/0_105.png index 912b02fe94..5aed64bc6a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_105.png and b/TMessagesProj/src/emojis/apple/emoji/0_105.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1050.png b/TMessagesProj/src/emojis/apple/emoji/0_1050.png index 4f3a67ef9f..fa045231d7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1050.png and b/TMessagesProj/src/emojis/apple/emoji/0_1050.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1051.png b/TMessagesProj/src/emojis/apple/emoji/0_1051.png index bf81da1e42..9c03876d6f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1051.png and b/TMessagesProj/src/emojis/apple/emoji/0_1051.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1052.png b/TMessagesProj/src/emojis/apple/emoji/0_1052.png index 52c5683484..74ccbddb33 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1052.png and b/TMessagesProj/src/emojis/apple/emoji/0_1052.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1053.png b/TMessagesProj/src/emojis/apple/emoji/0_1053.png index cdc973db94..8d9c3d4112 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1053.png and b/TMessagesProj/src/emojis/apple/emoji/0_1053.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1054.png b/TMessagesProj/src/emojis/apple/emoji/0_1054.png index fa8bc00584..b0b29b83ff 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1054.png and b/TMessagesProj/src/emojis/apple/emoji/0_1054.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1055.png b/TMessagesProj/src/emojis/apple/emoji/0_1055.png index 4961f37e3f..4fb8cb32e5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1055.png and b/TMessagesProj/src/emojis/apple/emoji/0_1055.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1056.png b/TMessagesProj/src/emojis/apple/emoji/0_1056.png index fed74d9d4a..336f745160 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1056.png and b/TMessagesProj/src/emojis/apple/emoji/0_1056.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1057.png b/TMessagesProj/src/emojis/apple/emoji/0_1057.png index 8baa34f767..a1e6044256 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1057.png and b/TMessagesProj/src/emojis/apple/emoji/0_1057.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1058.png b/TMessagesProj/src/emojis/apple/emoji/0_1058.png index 1fc80d86ed..83aa32c165 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1058.png and b/TMessagesProj/src/emojis/apple/emoji/0_1058.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1059.png b/TMessagesProj/src/emojis/apple/emoji/0_1059.png index e4fec506a7..3ad8904510 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1059.png and b/TMessagesProj/src/emojis/apple/emoji/0_1059.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_106.png b/TMessagesProj/src/emojis/apple/emoji/0_106.png index 4c25e30301..25661d2283 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_106.png and b/TMessagesProj/src/emojis/apple/emoji/0_106.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1060.png b/TMessagesProj/src/emojis/apple/emoji/0_1060.png index 89916b7884..a600c6d642 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1060.png and b/TMessagesProj/src/emojis/apple/emoji/0_1060.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1061.png b/TMessagesProj/src/emojis/apple/emoji/0_1061.png index 7ed30da819..b9d77231c3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1061.png and b/TMessagesProj/src/emojis/apple/emoji/0_1061.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1062.png b/TMessagesProj/src/emojis/apple/emoji/0_1062.png index 58bf2cc837..432fabc9e2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1062.png and b/TMessagesProj/src/emojis/apple/emoji/0_1062.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1063.png b/TMessagesProj/src/emojis/apple/emoji/0_1063.png index 71f8025610..f31f82640e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1063.png and b/TMessagesProj/src/emojis/apple/emoji/0_1063.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1064.png b/TMessagesProj/src/emojis/apple/emoji/0_1064.png index 5bbf74ba7b..eda08953b6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1064.png and b/TMessagesProj/src/emojis/apple/emoji/0_1064.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1065.png b/TMessagesProj/src/emojis/apple/emoji/0_1065.png index f8f3a95d3f..c5a0a28c13 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1065.png and b/TMessagesProj/src/emojis/apple/emoji/0_1065.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1066.png b/TMessagesProj/src/emojis/apple/emoji/0_1066.png index cd42f13fd5..f9e5531a7c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1066.png and b/TMessagesProj/src/emojis/apple/emoji/0_1066.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1067.png b/TMessagesProj/src/emojis/apple/emoji/0_1067.png index 33cad504c5..3e9867b9e3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1067.png and b/TMessagesProj/src/emojis/apple/emoji/0_1067.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1068.png b/TMessagesProj/src/emojis/apple/emoji/0_1068.png index 2e68d95833..cfe979fdd0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1068.png and b/TMessagesProj/src/emojis/apple/emoji/0_1068.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1069.png b/TMessagesProj/src/emojis/apple/emoji/0_1069.png index 3e2c26edfb..92b944cd0c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1069.png and b/TMessagesProj/src/emojis/apple/emoji/0_1069.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_107.png b/TMessagesProj/src/emojis/apple/emoji/0_107.png index 0697a98b6b..61ba275873 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_107.png and b/TMessagesProj/src/emojis/apple/emoji/0_107.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1070.png b/TMessagesProj/src/emojis/apple/emoji/0_1070.png index f6893fff81..5e5343ddfa 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1070.png and b/TMessagesProj/src/emojis/apple/emoji/0_1070.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1071.png b/TMessagesProj/src/emojis/apple/emoji/0_1071.png index d98ea71690..a986cb9d4a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1071.png and b/TMessagesProj/src/emojis/apple/emoji/0_1071.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1072.png b/TMessagesProj/src/emojis/apple/emoji/0_1072.png index 573854cdf7..094fe64a28 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1072.png and b/TMessagesProj/src/emojis/apple/emoji/0_1072.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1073.png b/TMessagesProj/src/emojis/apple/emoji/0_1073.png index 60d09751cd..6c078b7c44 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1073.png and b/TMessagesProj/src/emojis/apple/emoji/0_1073.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1074.png b/TMessagesProj/src/emojis/apple/emoji/0_1074.png index 939851d555..fe27c23f00 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1074.png and b/TMessagesProj/src/emojis/apple/emoji/0_1074.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1075.png b/TMessagesProj/src/emojis/apple/emoji/0_1075.png index dcaa59fd51..898fafb471 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1075.png and b/TMessagesProj/src/emojis/apple/emoji/0_1075.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1076.png b/TMessagesProj/src/emojis/apple/emoji/0_1076.png index 3cd1f025f5..d597207f21 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1076.png and b/TMessagesProj/src/emojis/apple/emoji/0_1076.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1077.png b/TMessagesProj/src/emojis/apple/emoji/0_1077.png index 7150ad6123..3d2921d929 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1077.png and b/TMessagesProj/src/emojis/apple/emoji/0_1077.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1078.png b/TMessagesProj/src/emojis/apple/emoji/0_1078.png index 3d2b41c290..cd840c27f1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1078.png and b/TMessagesProj/src/emojis/apple/emoji/0_1078.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1079.png b/TMessagesProj/src/emojis/apple/emoji/0_1079.png index fc04b25bb8..d5209e1b21 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1079.png and b/TMessagesProj/src/emojis/apple/emoji/0_1079.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_108.png b/TMessagesProj/src/emojis/apple/emoji/0_108.png index 2194349823..c69ef93bfd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_108.png and b/TMessagesProj/src/emojis/apple/emoji/0_108.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1080.png b/TMessagesProj/src/emojis/apple/emoji/0_1080.png index 84de03b227..5a53533eb4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1080.png and b/TMessagesProj/src/emojis/apple/emoji/0_1080.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1081.png b/TMessagesProj/src/emojis/apple/emoji/0_1081.png index d65650d90f..d2f23824f6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1081.png and b/TMessagesProj/src/emojis/apple/emoji/0_1081.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1082.png b/TMessagesProj/src/emojis/apple/emoji/0_1082.png index 2a9c8855f3..fcf663e29a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1082.png and b/TMessagesProj/src/emojis/apple/emoji/0_1082.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1083.png b/TMessagesProj/src/emojis/apple/emoji/0_1083.png index f70908c985..a841bd07f3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1083.png and b/TMessagesProj/src/emojis/apple/emoji/0_1083.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1084.png b/TMessagesProj/src/emojis/apple/emoji/0_1084.png index d9fc49ebfb..8e2044ddcb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1084.png and b/TMessagesProj/src/emojis/apple/emoji/0_1084.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1085.png b/TMessagesProj/src/emojis/apple/emoji/0_1085.png index b849889674..328a2e062d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1085.png and b/TMessagesProj/src/emojis/apple/emoji/0_1085.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1086.png b/TMessagesProj/src/emojis/apple/emoji/0_1086.png index 8da826bef8..65507998e9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1086.png and b/TMessagesProj/src/emojis/apple/emoji/0_1086.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1087.png b/TMessagesProj/src/emojis/apple/emoji/0_1087.png index a52b9ffc90..2c6ab9bcfe 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1087.png and b/TMessagesProj/src/emojis/apple/emoji/0_1087.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1088.png b/TMessagesProj/src/emojis/apple/emoji/0_1088.png index cf64fdedeb..ccf484fe58 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1088.png and b/TMessagesProj/src/emojis/apple/emoji/0_1088.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1089.png b/TMessagesProj/src/emojis/apple/emoji/0_1089.png index a053b56461..8728b83ba9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1089.png and b/TMessagesProj/src/emojis/apple/emoji/0_1089.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_109.png b/TMessagesProj/src/emojis/apple/emoji/0_109.png index eb3c5d54a9..95f6c170f7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_109.png and b/TMessagesProj/src/emojis/apple/emoji/0_109.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1090.png b/TMessagesProj/src/emojis/apple/emoji/0_1090.png index f4c167e0ea..4a20531b6b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1090.png and b/TMessagesProj/src/emojis/apple/emoji/0_1090.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1091.png b/TMessagesProj/src/emojis/apple/emoji/0_1091.png index 338d9e5201..e7f15d4ff0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1091.png and b/TMessagesProj/src/emojis/apple/emoji/0_1091.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1092.png b/TMessagesProj/src/emojis/apple/emoji/0_1092.png index 566f449818..0b17f0126c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1092.png and b/TMessagesProj/src/emojis/apple/emoji/0_1092.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1093.png b/TMessagesProj/src/emojis/apple/emoji/0_1093.png index d1a7125f23..6c1052712e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1093.png and b/TMessagesProj/src/emojis/apple/emoji/0_1093.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1094.png b/TMessagesProj/src/emojis/apple/emoji/0_1094.png index 1092900d4c..5811a9fc9a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1094.png and b/TMessagesProj/src/emojis/apple/emoji/0_1094.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1095.png b/TMessagesProj/src/emojis/apple/emoji/0_1095.png index 4acdc084be..446bfaad22 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1095.png and b/TMessagesProj/src/emojis/apple/emoji/0_1095.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1096.png b/TMessagesProj/src/emojis/apple/emoji/0_1096.png index 9a968667f4..ed5be6f4c6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1096.png and b/TMessagesProj/src/emojis/apple/emoji/0_1096.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1097.png b/TMessagesProj/src/emojis/apple/emoji/0_1097.png index 04f6f5cf20..17ba634817 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1097.png and b/TMessagesProj/src/emojis/apple/emoji/0_1097.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1098.png b/TMessagesProj/src/emojis/apple/emoji/0_1098.png index fb8aa7d453..ac435e3d7c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1098.png and b/TMessagesProj/src/emojis/apple/emoji/0_1098.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1099.png b/TMessagesProj/src/emojis/apple/emoji/0_1099.png index 615de04305..3f2cc656db 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1099.png and b/TMessagesProj/src/emojis/apple/emoji/0_1099.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_11.png b/TMessagesProj/src/emojis/apple/emoji/0_11.png index f20e41147a..275f5c7a92 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_11.png and b/TMessagesProj/src/emojis/apple/emoji/0_11.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_110.png b/TMessagesProj/src/emojis/apple/emoji/0_110.png index 2f40468c3e..33db597795 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_110.png and b/TMessagesProj/src/emojis/apple/emoji/0_110.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1100.png b/TMessagesProj/src/emojis/apple/emoji/0_1100.png index f311d6fe40..f8fc5576c7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1100.png and b/TMessagesProj/src/emojis/apple/emoji/0_1100.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1101.png b/TMessagesProj/src/emojis/apple/emoji/0_1101.png index c1a656ce49..98a9d790cb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1101.png and b/TMessagesProj/src/emojis/apple/emoji/0_1101.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1102.png b/TMessagesProj/src/emojis/apple/emoji/0_1102.png index 496a59ef40..882cdd7232 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1102.png and b/TMessagesProj/src/emojis/apple/emoji/0_1102.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1103.png b/TMessagesProj/src/emojis/apple/emoji/0_1103.png index 0d5f58fe29..6e6243ea94 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1103.png and b/TMessagesProj/src/emojis/apple/emoji/0_1103.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1104.png b/TMessagesProj/src/emojis/apple/emoji/0_1104.png index 4166372418..74764e741f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1104.png and b/TMessagesProj/src/emojis/apple/emoji/0_1104.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1105.png b/TMessagesProj/src/emojis/apple/emoji/0_1105.png index 2fba191925..6b5417857e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1105.png and b/TMessagesProj/src/emojis/apple/emoji/0_1105.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1106.png b/TMessagesProj/src/emojis/apple/emoji/0_1106.png index ba01c32e0f..02d459870b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1106.png and b/TMessagesProj/src/emojis/apple/emoji/0_1106.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1107.png b/TMessagesProj/src/emojis/apple/emoji/0_1107.png index e483c7722b..a602003551 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1107.png and b/TMessagesProj/src/emojis/apple/emoji/0_1107.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1108.png b/TMessagesProj/src/emojis/apple/emoji/0_1108.png index 04429f115f..dd87cdaba5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1108.png and b/TMessagesProj/src/emojis/apple/emoji/0_1108.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1109.png b/TMessagesProj/src/emojis/apple/emoji/0_1109.png index 1bba3a6816..366da32c0e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1109.png and b/TMessagesProj/src/emojis/apple/emoji/0_1109.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_111.png b/TMessagesProj/src/emojis/apple/emoji/0_111.png index c3e4a76cec..30838f4b2b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_111.png and b/TMessagesProj/src/emojis/apple/emoji/0_111.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1110.png b/TMessagesProj/src/emojis/apple/emoji/0_1110.png index 127b2496a1..4a8841fe05 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1110.png and b/TMessagesProj/src/emojis/apple/emoji/0_1110.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1111.png b/TMessagesProj/src/emojis/apple/emoji/0_1111.png index 8d6f62deec..7b19e6103b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1111.png and b/TMessagesProj/src/emojis/apple/emoji/0_1111.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1112.png b/TMessagesProj/src/emojis/apple/emoji/0_1112.png index cd8a3b5c8d..059451b32e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1112.png and b/TMessagesProj/src/emojis/apple/emoji/0_1112.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1113.png b/TMessagesProj/src/emojis/apple/emoji/0_1113.png index 12a764ba85..bfb4ee7cf6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1113.png and b/TMessagesProj/src/emojis/apple/emoji/0_1113.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1114.png b/TMessagesProj/src/emojis/apple/emoji/0_1114.png index b207262df7..033effa12a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1114.png and b/TMessagesProj/src/emojis/apple/emoji/0_1114.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1115.png b/TMessagesProj/src/emojis/apple/emoji/0_1115.png index bfacaf65e8..9441ff8b30 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1115.png and b/TMessagesProj/src/emojis/apple/emoji/0_1115.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1116.png b/TMessagesProj/src/emojis/apple/emoji/0_1116.png index 4fd9b9c8b5..5bec8cbaaa 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1116.png and b/TMessagesProj/src/emojis/apple/emoji/0_1116.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1117.png b/TMessagesProj/src/emojis/apple/emoji/0_1117.png index 60f0fde59e..52a611d0aa 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1117.png and b/TMessagesProj/src/emojis/apple/emoji/0_1117.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1118.png b/TMessagesProj/src/emojis/apple/emoji/0_1118.png index 617b648370..90ccec49a6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1118.png and b/TMessagesProj/src/emojis/apple/emoji/0_1118.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1119.png b/TMessagesProj/src/emojis/apple/emoji/0_1119.png index cc590f070d..526d789ec1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1119.png and b/TMessagesProj/src/emojis/apple/emoji/0_1119.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_112.png b/TMessagesProj/src/emojis/apple/emoji/0_112.png index c07feae0b6..2620a740cd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_112.png and b/TMessagesProj/src/emojis/apple/emoji/0_112.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1120.png b/TMessagesProj/src/emojis/apple/emoji/0_1120.png index 4d0893debc..92bd178544 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1120.png and b/TMessagesProj/src/emojis/apple/emoji/0_1120.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1121.png b/TMessagesProj/src/emojis/apple/emoji/0_1121.png index 8f14dd5d4b..6a9a2d72f9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1121.png and b/TMessagesProj/src/emojis/apple/emoji/0_1121.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1122.png b/TMessagesProj/src/emojis/apple/emoji/0_1122.png index 4361ce1cc1..3e216b2753 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1122.png and b/TMessagesProj/src/emojis/apple/emoji/0_1122.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1123.png b/TMessagesProj/src/emojis/apple/emoji/0_1123.png index fad568fc7b..655b4b4a27 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1123.png and b/TMessagesProj/src/emojis/apple/emoji/0_1123.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1124.png b/TMessagesProj/src/emojis/apple/emoji/0_1124.png index 4f4074b675..0917f2cab6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1124.png and b/TMessagesProj/src/emojis/apple/emoji/0_1124.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1125.png b/TMessagesProj/src/emojis/apple/emoji/0_1125.png index 61f6a9a721..80781406be 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1125.png and b/TMessagesProj/src/emojis/apple/emoji/0_1125.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1126.png b/TMessagesProj/src/emojis/apple/emoji/0_1126.png index 5b0e31c74f..964cccb6f5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1126.png and b/TMessagesProj/src/emojis/apple/emoji/0_1126.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1127.png b/TMessagesProj/src/emojis/apple/emoji/0_1127.png index 15d6c7ca38..f31c90f1dc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1127.png and b/TMessagesProj/src/emojis/apple/emoji/0_1127.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1128.png b/TMessagesProj/src/emojis/apple/emoji/0_1128.png index 51ff8aec07..0573075fb7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1128.png and b/TMessagesProj/src/emojis/apple/emoji/0_1128.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1129.png b/TMessagesProj/src/emojis/apple/emoji/0_1129.png index de830f9c3f..fa68d3f538 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1129.png and b/TMessagesProj/src/emojis/apple/emoji/0_1129.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_113.png b/TMessagesProj/src/emojis/apple/emoji/0_113.png index 86d94c4fd1..0355ae99e6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_113.png and b/TMessagesProj/src/emojis/apple/emoji/0_113.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1130.png b/TMessagesProj/src/emojis/apple/emoji/0_1130.png index 7670ce0ef4..d3caaab554 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1130.png and b/TMessagesProj/src/emojis/apple/emoji/0_1130.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1131.png b/TMessagesProj/src/emojis/apple/emoji/0_1131.png index 70fa8c4c6d..99a7f077c7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1131.png and b/TMessagesProj/src/emojis/apple/emoji/0_1131.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1132.png b/TMessagesProj/src/emojis/apple/emoji/0_1132.png index 7deb146cb0..d5291ed831 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1132.png and b/TMessagesProj/src/emojis/apple/emoji/0_1132.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1133.png b/TMessagesProj/src/emojis/apple/emoji/0_1133.png index 9aedb64e43..c4a54ff2b4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1133.png and b/TMessagesProj/src/emojis/apple/emoji/0_1133.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1134.png b/TMessagesProj/src/emojis/apple/emoji/0_1134.png index 0967106469..d3a03b6d0f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1134.png and b/TMessagesProj/src/emojis/apple/emoji/0_1134.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1135.png b/TMessagesProj/src/emojis/apple/emoji/0_1135.png index 40f9c1a71e..3031c0b449 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1135.png and b/TMessagesProj/src/emojis/apple/emoji/0_1135.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1136.png b/TMessagesProj/src/emojis/apple/emoji/0_1136.png index 440f7e929e..e3a79db5fc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1136.png and b/TMessagesProj/src/emojis/apple/emoji/0_1136.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1137.png b/TMessagesProj/src/emojis/apple/emoji/0_1137.png index ee04fa1f2d..891a4da08c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1137.png and b/TMessagesProj/src/emojis/apple/emoji/0_1137.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1138.png b/TMessagesProj/src/emojis/apple/emoji/0_1138.png index 2149878a62..4e82b73b91 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1138.png and b/TMessagesProj/src/emojis/apple/emoji/0_1138.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1139.png b/TMessagesProj/src/emojis/apple/emoji/0_1139.png index b6b768644a..55db5bd6f1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1139.png and b/TMessagesProj/src/emojis/apple/emoji/0_1139.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_114.png b/TMessagesProj/src/emojis/apple/emoji/0_114.png index ca3174df42..7d7551b20c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_114.png and b/TMessagesProj/src/emojis/apple/emoji/0_114.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1140.png b/TMessagesProj/src/emojis/apple/emoji/0_1140.png index 1faac35048..68d15d6d49 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1140.png and b/TMessagesProj/src/emojis/apple/emoji/0_1140.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1141.png b/TMessagesProj/src/emojis/apple/emoji/0_1141.png index a61d9298ce..d897f924fa 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1141.png and b/TMessagesProj/src/emojis/apple/emoji/0_1141.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1142.png b/TMessagesProj/src/emojis/apple/emoji/0_1142.png index 4b2a4d4e3f..dadaf8fead 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1142.png and b/TMessagesProj/src/emojis/apple/emoji/0_1142.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1143.png b/TMessagesProj/src/emojis/apple/emoji/0_1143.png index c306dd1bb5..e086ea4e23 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1143.png and b/TMessagesProj/src/emojis/apple/emoji/0_1143.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1144.png b/TMessagesProj/src/emojis/apple/emoji/0_1144.png index 426a471cc6..754a54c7a1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1144.png and b/TMessagesProj/src/emojis/apple/emoji/0_1144.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1145.png b/TMessagesProj/src/emojis/apple/emoji/0_1145.png index 3c25258c80..348e71cbc8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1145.png and b/TMessagesProj/src/emojis/apple/emoji/0_1145.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1146.png b/TMessagesProj/src/emojis/apple/emoji/0_1146.png index a96e9a413d..5edfe9e6a9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1146.png and b/TMessagesProj/src/emojis/apple/emoji/0_1146.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1147.png b/TMessagesProj/src/emojis/apple/emoji/0_1147.png index ef13ac0913..ecfdae8cbd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1147.png and b/TMessagesProj/src/emojis/apple/emoji/0_1147.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1148.png b/TMessagesProj/src/emojis/apple/emoji/0_1148.png index efe36d2816..7e600fcd30 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1148.png and b/TMessagesProj/src/emojis/apple/emoji/0_1148.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1149.png b/TMessagesProj/src/emojis/apple/emoji/0_1149.png index dfc9bc503d..9d0d05181b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1149.png and b/TMessagesProj/src/emojis/apple/emoji/0_1149.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_115.png b/TMessagesProj/src/emojis/apple/emoji/0_115.png index 07472b916b..1c61e8c891 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_115.png and b/TMessagesProj/src/emojis/apple/emoji/0_115.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1150.png b/TMessagesProj/src/emojis/apple/emoji/0_1150.png index be9a8807e3..0dcea1fd13 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1150.png and b/TMessagesProj/src/emojis/apple/emoji/0_1150.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1151.png b/TMessagesProj/src/emojis/apple/emoji/0_1151.png index a3ffcfaf48..d5febd3aaf 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1151.png and b/TMessagesProj/src/emojis/apple/emoji/0_1151.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1152.png b/TMessagesProj/src/emojis/apple/emoji/0_1152.png index 48ba7c6cc7..438b1c471a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1152.png and b/TMessagesProj/src/emojis/apple/emoji/0_1152.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1153.png b/TMessagesProj/src/emojis/apple/emoji/0_1153.png index d7368b6f39..b05a8b2672 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1153.png and b/TMessagesProj/src/emojis/apple/emoji/0_1153.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1154.png b/TMessagesProj/src/emojis/apple/emoji/0_1154.png index 3748b4cc00..1b4e64e343 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1154.png and b/TMessagesProj/src/emojis/apple/emoji/0_1154.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1155.png b/TMessagesProj/src/emojis/apple/emoji/0_1155.png index dea5faaea1..9b92501f1b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1155.png and b/TMessagesProj/src/emojis/apple/emoji/0_1155.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1156.png b/TMessagesProj/src/emojis/apple/emoji/0_1156.png index 5bd60b11ae..746a036ff6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1156.png and b/TMessagesProj/src/emojis/apple/emoji/0_1156.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1157.png b/TMessagesProj/src/emojis/apple/emoji/0_1157.png index 6a29ee3a48..fe4d0f2582 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1157.png and b/TMessagesProj/src/emojis/apple/emoji/0_1157.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1158.png b/TMessagesProj/src/emojis/apple/emoji/0_1158.png index a4a573985c..278b448152 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1158.png and b/TMessagesProj/src/emojis/apple/emoji/0_1158.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1159.png b/TMessagesProj/src/emojis/apple/emoji/0_1159.png index 2fe3475371..ded03640a3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1159.png and b/TMessagesProj/src/emojis/apple/emoji/0_1159.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_116.png b/TMessagesProj/src/emojis/apple/emoji/0_116.png index d7b8b73ca8..5661cea414 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_116.png and b/TMessagesProj/src/emojis/apple/emoji/0_116.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1160.png b/TMessagesProj/src/emojis/apple/emoji/0_1160.png index 304d7fa1bb..c34b3deeb7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1160.png and b/TMessagesProj/src/emojis/apple/emoji/0_1160.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1161.png b/TMessagesProj/src/emojis/apple/emoji/0_1161.png index ba561b99e1..46d20f606c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1161.png and b/TMessagesProj/src/emojis/apple/emoji/0_1161.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1162.png b/TMessagesProj/src/emojis/apple/emoji/0_1162.png index ccdcbfa029..8e13e958c6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1162.png and b/TMessagesProj/src/emojis/apple/emoji/0_1162.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1163.png b/TMessagesProj/src/emojis/apple/emoji/0_1163.png index 929e4abb15..836d73c58a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1163.png and b/TMessagesProj/src/emojis/apple/emoji/0_1163.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1164.png b/TMessagesProj/src/emojis/apple/emoji/0_1164.png index 6cb6f7c287..6798996f1f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1164.png and b/TMessagesProj/src/emojis/apple/emoji/0_1164.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1165.png b/TMessagesProj/src/emojis/apple/emoji/0_1165.png index 44ea67288c..4ec3deccdb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1165.png and b/TMessagesProj/src/emojis/apple/emoji/0_1165.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1166.png b/TMessagesProj/src/emojis/apple/emoji/0_1166.png index b63862fa83..6ee256687d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1166.png and b/TMessagesProj/src/emojis/apple/emoji/0_1166.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1167.png b/TMessagesProj/src/emojis/apple/emoji/0_1167.png index 8e2d81219a..123fea5f4c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1167.png and b/TMessagesProj/src/emojis/apple/emoji/0_1167.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1168.png b/TMessagesProj/src/emojis/apple/emoji/0_1168.png index 48607fd80b..b3c45e48ec 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1168.png and b/TMessagesProj/src/emojis/apple/emoji/0_1168.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1169.png b/TMessagesProj/src/emojis/apple/emoji/0_1169.png index fc0d4ccf28..9cd05edcbd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1169.png and b/TMessagesProj/src/emojis/apple/emoji/0_1169.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_117.png b/TMessagesProj/src/emojis/apple/emoji/0_117.png index 63eed1a774..d507526470 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_117.png and b/TMessagesProj/src/emojis/apple/emoji/0_117.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1170.png b/TMessagesProj/src/emojis/apple/emoji/0_1170.png index a4ba238ede..dc10f69e17 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1170.png and b/TMessagesProj/src/emojis/apple/emoji/0_1170.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1171.png b/TMessagesProj/src/emojis/apple/emoji/0_1171.png index 8c65ed75bc..7f7f20469a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1171.png and b/TMessagesProj/src/emojis/apple/emoji/0_1171.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1172.png b/TMessagesProj/src/emojis/apple/emoji/0_1172.png index 4e163e5185..54e2a9d443 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1172.png and b/TMessagesProj/src/emojis/apple/emoji/0_1172.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1173.png b/TMessagesProj/src/emojis/apple/emoji/0_1173.png index 1a647ef1cb..422a1d3937 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1173.png and b/TMessagesProj/src/emojis/apple/emoji/0_1173.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1174.png b/TMessagesProj/src/emojis/apple/emoji/0_1174.png index 19d8c2396e..8b17945150 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1174.png and b/TMessagesProj/src/emojis/apple/emoji/0_1174.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1175.png b/TMessagesProj/src/emojis/apple/emoji/0_1175.png index 6bfdf7b4ca..7a1bb4ac02 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1175.png and b/TMessagesProj/src/emojis/apple/emoji/0_1175.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1176.png b/TMessagesProj/src/emojis/apple/emoji/0_1176.png index 4cb111f366..127ddc27bb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1176.png and b/TMessagesProj/src/emojis/apple/emoji/0_1176.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1177.png b/TMessagesProj/src/emojis/apple/emoji/0_1177.png index d878384609..07e722e798 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1177.png and b/TMessagesProj/src/emojis/apple/emoji/0_1177.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1178.png b/TMessagesProj/src/emojis/apple/emoji/0_1178.png index 4fc3193c6e..d58bb67e38 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1178.png and b/TMessagesProj/src/emojis/apple/emoji/0_1178.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1179.png b/TMessagesProj/src/emojis/apple/emoji/0_1179.png index 20919a69aa..b7277fedab 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1179.png and b/TMessagesProj/src/emojis/apple/emoji/0_1179.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_118.png b/TMessagesProj/src/emojis/apple/emoji/0_118.png index 8cf1ebb5cc..efd3d1ec1f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_118.png and b/TMessagesProj/src/emojis/apple/emoji/0_118.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1180.png b/TMessagesProj/src/emojis/apple/emoji/0_1180.png index 8419a903f3..7d6cbf08c5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1180.png and b/TMessagesProj/src/emojis/apple/emoji/0_1180.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1181.png b/TMessagesProj/src/emojis/apple/emoji/0_1181.png index d564dcf65d..f2eac83682 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1181.png and b/TMessagesProj/src/emojis/apple/emoji/0_1181.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1182.png b/TMessagesProj/src/emojis/apple/emoji/0_1182.png index 4720afadcf..a9b6eeb5fd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1182.png and b/TMessagesProj/src/emojis/apple/emoji/0_1182.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1183.png b/TMessagesProj/src/emojis/apple/emoji/0_1183.png index 4242106517..f9ffc15d25 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1183.png and b/TMessagesProj/src/emojis/apple/emoji/0_1183.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1184.png b/TMessagesProj/src/emojis/apple/emoji/0_1184.png index 3947849727..bdebfd0adf 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1184.png and b/TMessagesProj/src/emojis/apple/emoji/0_1184.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1185.png b/TMessagesProj/src/emojis/apple/emoji/0_1185.png index ddca52adaf..be6a5e6a40 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1185.png and b/TMessagesProj/src/emojis/apple/emoji/0_1185.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1186.png b/TMessagesProj/src/emojis/apple/emoji/0_1186.png index 14041f9931..19c2802fe4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1186.png and b/TMessagesProj/src/emojis/apple/emoji/0_1186.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1187.png b/TMessagesProj/src/emojis/apple/emoji/0_1187.png index 79d5978528..59acd587d7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1187.png and b/TMessagesProj/src/emojis/apple/emoji/0_1187.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1188.png b/TMessagesProj/src/emojis/apple/emoji/0_1188.png index 28c69a9c09..bda3f1f607 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1188.png and b/TMessagesProj/src/emojis/apple/emoji/0_1188.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1189.png b/TMessagesProj/src/emojis/apple/emoji/0_1189.png index 243d012933..070628f76d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1189.png and b/TMessagesProj/src/emojis/apple/emoji/0_1189.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_119.png b/TMessagesProj/src/emojis/apple/emoji/0_119.png index a14e4e7ae3..880f93f608 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_119.png and b/TMessagesProj/src/emojis/apple/emoji/0_119.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1190.png b/TMessagesProj/src/emojis/apple/emoji/0_1190.png index 7831a9674e..3cfc376e44 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1190.png and b/TMessagesProj/src/emojis/apple/emoji/0_1190.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1191.png b/TMessagesProj/src/emojis/apple/emoji/0_1191.png index 4a7f5bcb45..377c7ab542 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1191.png and b/TMessagesProj/src/emojis/apple/emoji/0_1191.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1192.png b/TMessagesProj/src/emojis/apple/emoji/0_1192.png index 228f651061..77b5b73189 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1192.png and b/TMessagesProj/src/emojis/apple/emoji/0_1192.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1193.png b/TMessagesProj/src/emojis/apple/emoji/0_1193.png index 7286cf4ac2..ecf94f100c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1193.png and b/TMessagesProj/src/emojis/apple/emoji/0_1193.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1194.png b/TMessagesProj/src/emojis/apple/emoji/0_1194.png index 77c24d09c0..7c3235bff8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1194.png and b/TMessagesProj/src/emojis/apple/emoji/0_1194.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1195.png b/TMessagesProj/src/emojis/apple/emoji/0_1195.png index 437a1628fa..eebee24a79 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1195.png and b/TMessagesProj/src/emojis/apple/emoji/0_1195.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1196.png b/TMessagesProj/src/emojis/apple/emoji/0_1196.png index b2dedd78fb..5907f0a04b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1196.png and b/TMessagesProj/src/emojis/apple/emoji/0_1196.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1197.png b/TMessagesProj/src/emojis/apple/emoji/0_1197.png index 3c37ae4429..34b9911dbe 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1197.png and b/TMessagesProj/src/emojis/apple/emoji/0_1197.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1198.png b/TMessagesProj/src/emojis/apple/emoji/0_1198.png index b7a20c8ab7..d8108897e0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1198.png and b/TMessagesProj/src/emojis/apple/emoji/0_1198.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1199.png b/TMessagesProj/src/emojis/apple/emoji/0_1199.png index 25280ff00d..51ad2311db 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1199.png and b/TMessagesProj/src/emojis/apple/emoji/0_1199.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_12.png b/TMessagesProj/src/emojis/apple/emoji/0_12.png index e9f79b81f4..08675733e8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_12.png and b/TMessagesProj/src/emojis/apple/emoji/0_12.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_120.png b/TMessagesProj/src/emojis/apple/emoji/0_120.png index 0141029ba1..c07d5d1e2d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_120.png and b/TMessagesProj/src/emojis/apple/emoji/0_120.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1200.png b/TMessagesProj/src/emojis/apple/emoji/0_1200.png index f396244475..0d2ba7b238 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1200.png and b/TMessagesProj/src/emojis/apple/emoji/0_1200.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1201.png b/TMessagesProj/src/emojis/apple/emoji/0_1201.png index efdc43438f..420a14a363 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1201.png and b/TMessagesProj/src/emojis/apple/emoji/0_1201.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1202.png b/TMessagesProj/src/emojis/apple/emoji/0_1202.png index 8a6b68b969..09fd601dce 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1202.png and b/TMessagesProj/src/emojis/apple/emoji/0_1202.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1203.png b/TMessagesProj/src/emojis/apple/emoji/0_1203.png index d05a9e60bc..0df86446f5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1203.png and b/TMessagesProj/src/emojis/apple/emoji/0_1203.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1204.png b/TMessagesProj/src/emojis/apple/emoji/0_1204.png index f6f3c27a7e..496840d5be 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1204.png and b/TMessagesProj/src/emojis/apple/emoji/0_1204.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1205.png b/TMessagesProj/src/emojis/apple/emoji/0_1205.png index 293c1153bc..f2b951d0c9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1205.png and b/TMessagesProj/src/emojis/apple/emoji/0_1205.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1206.png b/TMessagesProj/src/emojis/apple/emoji/0_1206.png index 1ac5cc4233..0f28d769fe 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1206.png and b/TMessagesProj/src/emojis/apple/emoji/0_1206.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1207.png b/TMessagesProj/src/emojis/apple/emoji/0_1207.png index 4f00e1895c..4df6c2a21a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1207.png and b/TMessagesProj/src/emojis/apple/emoji/0_1207.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1208.png b/TMessagesProj/src/emojis/apple/emoji/0_1208.png index 471c34f43a..35a675dce9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1208.png and b/TMessagesProj/src/emojis/apple/emoji/0_1208.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1209.png b/TMessagesProj/src/emojis/apple/emoji/0_1209.png index f62e254d09..31bcbf81b0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1209.png and b/TMessagesProj/src/emojis/apple/emoji/0_1209.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_121.png b/TMessagesProj/src/emojis/apple/emoji/0_121.png index ab654fb4cb..0ec4e1794d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_121.png and b/TMessagesProj/src/emojis/apple/emoji/0_121.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1210.png b/TMessagesProj/src/emojis/apple/emoji/0_1210.png index ea02e0a390..55bcfa35cd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1210.png and b/TMessagesProj/src/emojis/apple/emoji/0_1210.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1211.png b/TMessagesProj/src/emojis/apple/emoji/0_1211.png index 912d3f5d95..aac2a9ab4f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1211.png and b/TMessagesProj/src/emojis/apple/emoji/0_1211.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1212.png b/TMessagesProj/src/emojis/apple/emoji/0_1212.png index 310b8629f4..46da51e6b0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1212.png and b/TMessagesProj/src/emojis/apple/emoji/0_1212.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1213.png b/TMessagesProj/src/emojis/apple/emoji/0_1213.png index 291cb6a154..b064b3813a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1213.png and b/TMessagesProj/src/emojis/apple/emoji/0_1213.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1214.png b/TMessagesProj/src/emojis/apple/emoji/0_1214.png index b4a5281f62..1f5ff39381 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1214.png and b/TMessagesProj/src/emojis/apple/emoji/0_1214.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1215.png b/TMessagesProj/src/emojis/apple/emoji/0_1215.png index 97a53b6a3f..e42d33d84b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1215.png and b/TMessagesProj/src/emojis/apple/emoji/0_1215.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1216.png b/TMessagesProj/src/emojis/apple/emoji/0_1216.png index cfbae787ea..b3d9e7a88e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1216.png and b/TMessagesProj/src/emojis/apple/emoji/0_1216.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1217.png b/TMessagesProj/src/emojis/apple/emoji/0_1217.png index 5c0d443c92..a133cc9f7c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1217.png and b/TMessagesProj/src/emojis/apple/emoji/0_1217.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1218.png b/TMessagesProj/src/emojis/apple/emoji/0_1218.png index f13f4fc100..4409d57fd6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1218.png and b/TMessagesProj/src/emojis/apple/emoji/0_1218.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1219.png b/TMessagesProj/src/emojis/apple/emoji/0_1219.png index 1c76319efc..3268986860 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1219.png and b/TMessagesProj/src/emojis/apple/emoji/0_1219.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_122.png b/TMessagesProj/src/emojis/apple/emoji/0_122.png index 891980ce73..d421583694 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_122.png and b/TMessagesProj/src/emojis/apple/emoji/0_122.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1220.png b/TMessagesProj/src/emojis/apple/emoji/0_1220.png index 4667f614d6..3c81d5da59 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1220.png and b/TMessagesProj/src/emojis/apple/emoji/0_1220.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1221.png b/TMessagesProj/src/emojis/apple/emoji/0_1221.png index 88b2c46b30..092be145da 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1221.png and b/TMessagesProj/src/emojis/apple/emoji/0_1221.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1222.png b/TMessagesProj/src/emojis/apple/emoji/0_1222.png index bd2d138325..706efef5f2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1222.png and b/TMessagesProj/src/emojis/apple/emoji/0_1222.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1223.png b/TMessagesProj/src/emojis/apple/emoji/0_1223.png index df4d408b4e..8895229478 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1223.png and b/TMessagesProj/src/emojis/apple/emoji/0_1223.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1224.png b/TMessagesProj/src/emojis/apple/emoji/0_1224.png index 29d2e71a08..a75abcac79 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1224.png and b/TMessagesProj/src/emojis/apple/emoji/0_1224.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1225.png b/TMessagesProj/src/emojis/apple/emoji/0_1225.png index 9141eb42dd..d9dc1aefab 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1225.png and b/TMessagesProj/src/emojis/apple/emoji/0_1225.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1226.png b/TMessagesProj/src/emojis/apple/emoji/0_1226.png index 308721233b..0fb5a4e183 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1226.png and b/TMessagesProj/src/emojis/apple/emoji/0_1226.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1227.png b/TMessagesProj/src/emojis/apple/emoji/0_1227.png index a8a9270d9a..653cf9b9a6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1227.png and b/TMessagesProj/src/emojis/apple/emoji/0_1227.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1228.png b/TMessagesProj/src/emojis/apple/emoji/0_1228.png index e59e852677..133f37f23a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1228.png and b/TMessagesProj/src/emojis/apple/emoji/0_1228.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1229.png b/TMessagesProj/src/emojis/apple/emoji/0_1229.png index 601315b74e..873771537e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1229.png and b/TMessagesProj/src/emojis/apple/emoji/0_1229.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_123.png b/TMessagesProj/src/emojis/apple/emoji/0_123.png index 02c515736e..de5ea75d99 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_123.png and b/TMessagesProj/src/emojis/apple/emoji/0_123.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1230.png b/TMessagesProj/src/emojis/apple/emoji/0_1230.png index e496e8416f..909f977c73 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1230.png and b/TMessagesProj/src/emojis/apple/emoji/0_1230.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1231.png b/TMessagesProj/src/emojis/apple/emoji/0_1231.png index d26e02093c..e155aded64 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1231.png and b/TMessagesProj/src/emojis/apple/emoji/0_1231.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1232.png b/TMessagesProj/src/emojis/apple/emoji/0_1232.png index 41e0810864..c0117ede22 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1232.png and b/TMessagesProj/src/emojis/apple/emoji/0_1232.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1233.png b/TMessagesProj/src/emojis/apple/emoji/0_1233.png index e189f43d3a..a65b94ba6f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1233.png and b/TMessagesProj/src/emojis/apple/emoji/0_1233.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1234.png b/TMessagesProj/src/emojis/apple/emoji/0_1234.png index 9c09de94eb..c9577f7e5d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1234.png and b/TMessagesProj/src/emojis/apple/emoji/0_1234.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1235.png b/TMessagesProj/src/emojis/apple/emoji/0_1235.png index 792cbe1237..0b2a50e187 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1235.png and b/TMessagesProj/src/emojis/apple/emoji/0_1235.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1236.png b/TMessagesProj/src/emojis/apple/emoji/0_1236.png index 19bd1a850d..d846d11a11 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1236.png and b/TMessagesProj/src/emojis/apple/emoji/0_1236.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1237.png b/TMessagesProj/src/emojis/apple/emoji/0_1237.png index 693be2a6a9..30c6e8a1bc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1237.png and b/TMessagesProj/src/emojis/apple/emoji/0_1237.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1238.png b/TMessagesProj/src/emojis/apple/emoji/0_1238.png index 0a8db9d42f..6767727c68 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1238.png and b/TMessagesProj/src/emojis/apple/emoji/0_1238.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1239.png b/TMessagesProj/src/emojis/apple/emoji/0_1239.png index e6c7ec64de..26b82f5323 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1239.png and b/TMessagesProj/src/emojis/apple/emoji/0_1239.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_124.png b/TMessagesProj/src/emojis/apple/emoji/0_124.png index 2da71f75ee..f17ab5ca85 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_124.png and b/TMessagesProj/src/emojis/apple/emoji/0_124.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1240.png b/TMessagesProj/src/emojis/apple/emoji/0_1240.png index 056443d97e..84446dc3a2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1240.png and b/TMessagesProj/src/emojis/apple/emoji/0_1240.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1241.png b/TMessagesProj/src/emojis/apple/emoji/0_1241.png index ab3d7cd4d3..a83416df48 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1241.png and b/TMessagesProj/src/emojis/apple/emoji/0_1241.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1242.png b/TMessagesProj/src/emojis/apple/emoji/0_1242.png index 64d16e8e45..d704252bcb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1242.png and b/TMessagesProj/src/emojis/apple/emoji/0_1242.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1243.png b/TMessagesProj/src/emojis/apple/emoji/0_1243.png index 47668d4e8f..3724491dec 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1243.png and b/TMessagesProj/src/emojis/apple/emoji/0_1243.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1244.png b/TMessagesProj/src/emojis/apple/emoji/0_1244.png index 429c1847a1..43bb243682 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1244.png and b/TMessagesProj/src/emojis/apple/emoji/0_1244.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1245.png b/TMessagesProj/src/emojis/apple/emoji/0_1245.png index 13a2a3bba3..3434af5d24 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1245.png and b/TMessagesProj/src/emojis/apple/emoji/0_1245.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1246.png b/TMessagesProj/src/emojis/apple/emoji/0_1246.png index 2fcdce4df3..5d77c0c592 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1246.png and b/TMessagesProj/src/emojis/apple/emoji/0_1246.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1247.png b/TMessagesProj/src/emojis/apple/emoji/0_1247.png index 5a158db2cb..b7c61b0bc8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1247.png and b/TMessagesProj/src/emojis/apple/emoji/0_1247.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1248.png b/TMessagesProj/src/emojis/apple/emoji/0_1248.png index 680379d4f6..0e360ff6ad 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1248.png and b/TMessagesProj/src/emojis/apple/emoji/0_1248.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1249.png b/TMessagesProj/src/emojis/apple/emoji/0_1249.png index 94ab81e86e..6f06cfa70c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1249.png and b/TMessagesProj/src/emojis/apple/emoji/0_1249.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_125.png b/TMessagesProj/src/emojis/apple/emoji/0_125.png index d755b18d79..c0d0bf7f75 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_125.png and b/TMessagesProj/src/emojis/apple/emoji/0_125.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1250.png b/TMessagesProj/src/emojis/apple/emoji/0_1250.png index 3bd743f429..5c9502af69 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1250.png and b/TMessagesProj/src/emojis/apple/emoji/0_1250.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1251.png b/TMessagesProj/src/emojis/apple/emoji/0_1251.png index 72ef0c375a..8c8cdec58b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1251.png and b/TMessagesProj/src/emojis/apple/emoji/0_1251.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1252.png b/TMessagesProj/src/emojis/apple/emoji/0_1252.png index ddc82829a6..3df7473ce2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1252.png and b/TMessagesProj/src/emojis/apple/emoji/0_1252.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1253.png b/TMessagesProj/src/emojis/apple/emoji/0_1253.png index 3bb475b780..24abff0efb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1253.png and b/TMessagesProj/src/emojis/apple/emoji/0_1253.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1254.png b/TMessagesProj/src/emojis/apple/emoji/0_1254.png index 211269398a..be04a6be11 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1254.png and b/TMessagesProj/src/emojis/apple/emoji/0_1254.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1255.png b/TMessagesProj/src/emojis/apple/emoji/0_1255.png index 72d06cc443..52431d1199 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1255.png and b/TMessagesProj/src/emojis/apple/emoji/0_1255.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1256.png b/TMessagesProj/src/emojis/apple/emoji/0_1256.png index fbe8135f6d..f0d32b5598 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1256.png and b/TMessagesProj/src/emojis/apple/emoji/0_1256.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1257.png b/TMessagesProj/src/emojis/apple/emoji/0_1257.png index 4ef0101d5c..490d3c4b83 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1257.png and b/TMessagesProj/src/emojis/apple/emoji/0_1257.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1258.png b/TMessagesProj/src/emojis/apple/emoji/0_1258.png index 6499f46282..55d4784ea5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1258.png and b/TMessagesProj/src/emojis/apple/emoji/0_1258.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1259.png b/TMessagesProj/src/emojis/apple/emoji/0_1259.png index 5f6a9a34f4..f81c462314 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1259.png and b/TMessagesProj/src/emojis/apple/emoji/0_1259.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_126.png b/TMessagesProj/src/emojis/apple/emoji/0_126.png index 3c9ac1cca9..94a8d56fba 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_126.png and b/TMessagesProj/src/emojis/apple/emoji/0_126.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1260.png b/TMessagesProj/src/emojis/apple/emoji/0_1260.png index dae8873308..5a64860e63 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1260.png and b/TMessagesProj/src/emojis/apple/emoji/0_1260.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1261.png b/TMessagesProj/src/emojis/apple/emoji/0_1261.png index ab3561a8c3..7e8e8c1fee 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1261.png and b/TMessagesProj/src/emojis/apple/emoji/0_1261.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1262.png b/TMessagesProj/src/emojis/apple/emoji/0_1262.png index 5aaf3c8649..8547a4d2ea 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1262.png and b/TMessagesProj/src/emojis/apple/emoji/0_1262.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1263.png b/TMessagesProj/src/emojis/apple/emoji/0_1263.png index 58cc6f95c4..ab87d147e3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1263.png and b/TMessagesProj/src/emojis/apple/emoji/0_1263.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1264.png b/TMessagesProj/src/emojis/apple/emoji/0_1264.png index 5915188a41..489cf19c71 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1264.png and b/TMessagesProj/src/emojis/apple/emoji/0_1264.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1265.png b/TMessagesProj/src/emojis/apple/emoji/0_1265.png index e7ec871553..e0f5555bb5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1265.png and b/TMessagesProj/src/emojis/apple/emoji/0_1265.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1266.png b/TMessagesProj/src/emojis/apple/emoji/0_1266.png index 40edbf4135..d6221b04f5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1266.png and b/TMessagesProj/src/emojis/apple/emoji/0_1266.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1267.png b/TMessagesProj/src/emojis/apple/emoji/0_1267.png index 3e13ad20b0..4b28981ed5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1267.png and b/TMessagesProj/src/emojis/apple/emoji/0_1267.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1268.png b/TMessagesProj/src/emojis/apple/emoji/0_1268.png index dee6d87195..cee3b22568 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1268.png and b/TMessagesProj/src/emojis/apple/emoji/0_1268.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1269.png b/TMessagesProj/src/emojis/apple/emoji/0_1269.png index b5d7bde3f6..2b35397439 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1269.png and b/TMessagesProj/src/emojis/apple/emoji/0_1269.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_127.png b/TMessagesProj/src/emojis/apple/emoji/0_127.png index 4c7c7ea8d0..a7d45e2c41 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_127.png and b/TMessagesProj/src/emojis/apple/emoji/0_127.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1270.png b/TMessagesProj/src/emojis/apple/emoji/0_1270.png index a275dca96a..935f51c53d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1270.png and b/TMessagesProj/src/emojis/apple/emoji/0_1270.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1271.png b/TMessagesProj/src/emojis/apple/emoji/0_1271.png index de2494466d..b5d4e2f38b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1271.png and b/TMessagesProj/src/emojis/apple/emoji/0_1271.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1272.png b/TMessagesProj/src/emojis/apple/emoji/0_1272.png index b53a37a711..8c6f0caacb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1272.png and b/TMessagesProj/src/emojis/apple/emoji/0_1272.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1273.png b/TMessagesProj/src/emojis/apple/emoji/0_1273.png index 39c84c41b8..7809f69729 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1273.png and b/TMessagesProj/src/emojis/apple/emoji/0_1273.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1274.png b/TMessagesProj/src/emojis/apple/emoji/0_1274.png index c3a56c99d9..9b3d89d088 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1274.png and b/TMessagesProj/src/emojis/apple/emoji/0_1274.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1275.png b/TMessagesProj/src/emojis/apple/emoji/0_1275.png index b9fe8acd31..a3a1897709 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1275.png and b/TMessagesProj/src/emojis/apple/emoji/0_1275.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1276.png b/TMessagesProj/src/emojis/apple/emoji/0_1276.png index 55001a1e3b..ebf5a3a86c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1276.png and b/TMessagesProj/src/emojis/apple/emoji/0_1276.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1277.png b/TMessagesProj/src/emojis/apple/emoji/0_1277.png index 1a8686172b..325591ecc3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1277.png and b/TMessagesProj/src/emojis/apple/emoji/0_1277.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1278.png b/TMessagesProj/src/emojis/apple/emoji/0_1278.png index 15d867ce3e..9df347781f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1278.png and b/TMessagesProj/src/emojis/apple/emoji/0_1278.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1279.png b/TMessagesProj/src/emojis/apple/emoji/0_1279.png index 25b27d178e..c42a849772 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1279.png and b/TMessagesProj/src/emojis/apple/emoji/0_1279.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_128.png b/TMessagesProj/src/emojis/apple/emoji/0_128.png index 54926ad150..0dc03573fd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_128.png and b/TMessagesProj/src/emojis/apple/emoji/0_128.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1280.png b/TMessagesProj/src/emojis/apple/emoji/0_1280.png index 6025373f04..5555672d71 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1280.png and b/TMessagesProj/src/emojis/apple/emoji/0_1280.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1281.png b/TMessagesProj/src/emojis/apple/emoji/0_1281.png index c5f57dd51f..d3eef14f32 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1281.png and b/TMessagesProj/src/emojis/apple/emoji/0_1281.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1282.png b/TMessagesProj/src/emojis/apple/emoji/0_1282.png index 2c251a764c..b9335073bd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1282.png and b/TMessagesProj/src/emojis/apple/emoji/0_1282.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1283.png b/TMessagesProj/src/emojis/apple/emoji/0_1283.png index 64a9f04295..6750973422 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1283.png and b/TMessagesProj/src/emojis/apple/emoji/0_1283.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1284.png b/TMessagesProj/src/emojis/apple/emoji/0_1284.png index afc915ea35..b1076d1c34 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1284.png and b/TMessagesProj/src/emojis/apple/emoji/0_1284.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1285.png b/TMessagesProj/src/emojis/apple/emoji/0_1285.png index 112da8c3ff..374a87d44e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1285.png and b/TMessagesProj/src/emojis/apple/emoji/0_1285.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1286.png b/TMessagesProj/src/emojis/apple/emoji/0_1286.png index 0efa18ae5d..e8d0ae922d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1286.png and b/TMessagesProj/src/emojis/apple/emoji/0_1286.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1287.png b/TMessagesProj/src/emojis/apple/emoji/0_1287.png index 5d5e1c9749..f1e702c917 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1287.png and b/TMessagesProj/src/emojis/apple/emoji/0_1287.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1288.png b/TMessagesProj/src/emojis/apple/emoji/0_1288.png index 541648aaf9..bfe8340f27 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1288.png and b/TMessagesProj/src/emojis/apple/emoji/0_1288.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1289.png b/TMessagesProj/src/emojis/apple/emoji/0_1289.png index bc3c181731..281529703c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1289.png and b/TMessagesProj/src/emojis/apple/emoji/0_1289.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_129.png b/TMessagesProj/src/emojis/apple/emoji/0_129.png index c77c07efaf..6592b8b023 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_129.png and b/TMessagesProj/src/emojis/apple/emoji/0_129.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1290.png b/TMessagesProj/src/emojis/apple/emoji/0_1290.png index 8608acc70b..93041b04bc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1290.png and b/TMessagesProj/src/emojis/apple/emoji/0_1290.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1291.png b/TMessagesProj/src/emojis/apple/emoji/0_1291.png index c6687d6d62..279353d795 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1291.png and b/TMessagesProj/src/emojis/apple/emoji/0_1291.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1292.png b/TMessagesProj/src/emojis/apple/emoji/0_1292.png index 3fd31a411e..cfef507cb7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1292.png and b/TMessagesProj/src/emojis/apple/emoji/0_1292.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1293.png b/TMessagesProj/src/emojis/apple/emoji/0_1293.png index 430ef80ed6..9c1a13513d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1293.png and b/TMessagesProj/src/emojis/apple/emoji/0_1293.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1294.png b/TMessagesProj/src/emojis/apple/emoji/0_1294.png index a6edfec42d..2c3b4c2921 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1294.png and b/TMessagesProj/src/emojis/apple/emoji/0_1294.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1295.png b/TMessagesProj/src/emojis/apple/emoji/0_1295.png index 683c8fd46c..c3eff5cfa6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1295.png and b/TMessagesProj/src/emojis/apple/emoji/0_1295.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1296.png b/TMessagesProj/src/emojis/apple/emoji/0_1296.png index 582e57e260..892cf2f7c1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1296.png and b/TMessagesProj/src/emojis/apple/emoji/0_1296.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1297.png b/TMessagesProj/src/emojis/apple/emoji/0_1297.png index f9c8aeb18e..6654ee0a94 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1297.png and b/TMessagesProj/src/emojis/apple/emoji/0_1297.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1298.png b/TMessagesProj/src/emojis/apple/emoji/0_1298.png index 9ebf25f278..62629a9741 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1298.png and b/TMessagesProj/src/emojis/apple/emoji/0_1298.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1299.png b/TMessagesProj/src/emojis/apple/emoji/0_1299.png index f641d2d19b..a2ef023314 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1299.png and b/TMessagesProj/src/emojis/apple/emoji/0_1299.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_13.png b/TMessagesProj/src/emojis/apple/emoji/0_13.png index 7b36753f9d..972d5d786f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_13.png and b/TMessagesProj/src/emojis/apple/emoji/0_13.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_130.png b/TMessagesProj/src/emojis/apple/emoji/0_130.png index 1dd8037722..5d5997ab89 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_130.png and b/TMessagesProj/src/emojis/apple/emoji/0_130.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1300.png b/TMessagesProj/src/emojis/apple/emoji/0_1300.png index 321044b4d3..bdf1f5f47c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1300.png and b/TMessagesProj/src/emojis/apple/emoji/0_1300.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1301.png b/TMessagesProj/src/emojis/apple/emoji/0_1301.png index 92b238489d..097853269d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1301.png and b/TMessagesProj/src/emojis/apple/emoji/0_1301.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1302.png b/TMessagesProj/src/emojis/apple/emoji/0_1302.png index f49365614c..12cbf6aeda 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1302.png and b/TMessagesProj/src/emojis/apple/emoji/0_1302.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1303.png b/TMessagesProj/src/emojis/apple/emoji/0_1303.png index da8f256237..699388bf65 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1303.png and b/TMessagesProj/src/emojis/apple/emoji/0_1303.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1304.png b/TMessagesProj/src/emojis/apple/emoji/0_1304.png index 05f6037e25..bf1cf62703 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1304.png and b/TMessagesProj/src/emojis/apple/emoji/0_1304.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1305.png b/TMessagesProj/src/emojis/apple/emoji/0_1305.png index a9c0c5c361..f3716a1761 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1305.png and b/TMessagesProj/src/emojis/apple/emoji/0_1305.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1306.png b/TMessagesProj/src/emojis/apple/emoji/0_1306.png index 8c66196aea..ee109b78e1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1306.png and b/TMessagesProj/src/emojis/apple/emoji/0_1306.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1307.png b/TMessagesProj/src/emojis/apple/emoji/0_1307.png index cbd3755357..0d80dd33ea 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1307.png and b/TMessagesProj/src/emojis/apple/emoji/0_1307.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1308.png b/TMessagesProj/src/emojis/apple/emoji/0_1308.png index 6cfc79ce42..7c5ba958d6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1308.png and b/TMessagesProj/src/emojis/apple/emoji/0_1308.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1309.png b/TMessagesProj/src/emojis/apple/emoji/0_1309.png index b53e655c83..15cfd76011 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1309.png and b/TMessagesProj/src/emojis/apple/emoji/0_1309.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_131.png b/TMessagesProj/src/emojis/apple/emoji/0_131.png index 92e5f55fec..04dffd3634 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_131.png and b/TMessagesProj/src/emojis/apple/emoji/0_131.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1310.png b/TMessagesProj/src/emojis/apple/emoji/0_1310.png index 33d0df0a04..f95144dcc9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1310.png and b/TMessagesProj/src/emojis/apple/emoji/0_1310.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1311.png b/TMessagesProj/src/emojis/apple/emoji/0_1311.png index 369c0e9242..e970feb5d6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1311.png and b/TMessagesProj/src/emojis/apple/emoji/0_1311.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1312.png b/TMessagesProj/src/emojis/apple/emoji/0_1312.png index e5b00f7d54..211ee2881a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1312.png and b/TMessagesProj/src/emojis/apple/emoji/0_1312.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1313.png b/TMessagesProj/src/emojis/apple/emoji/0_1313.png index 458c51a382..58acacf7e4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1313.png and b/TMessagesProj/src/emojis/apple/emoji/0_1313.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1314.png b/TMessagesProj/src/emojis/apple/emoji/0_1314.png index ff57cd01e2..6fb116e015 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1314.png and b/TMessagesProj/src/emojis/apple/emoji/0_1314.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1315.png b/TMessagesProj/src/emojis/apple/emoji/0_1315.png index 6ca1b6e892..e38b35e737 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1315.png and b/TMessagesProj/src/emojis/apple/emoji/0_1315.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1316.png b/TMessagesProj/src/emojis/apple/emoji/0_1316.png index 66bef45017..159c1eead7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1316.png and b/TMessagesProj/src/emojis/apple/emoji/0_1316.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1317.png b/TMessagesProj/src/emojis/apple/emoji/0_1317.png index 0a2ebeaef8..1334c236c9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1317.png and b/TMessagesProj/src/emojis/apple/emoji/0_1317.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1318.png b/TMessagesProj/src/emojis/apple/emoji/0_1318.png index 5d3725aff7..adf519eb2e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1318.png and b/TMessagesProj/src/emojis/apple/emoji/0_1318.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1319.png b/TMessagesProj/src/emojis/apple/emoji/0_1319.png index 4b90a19b95..bf7fd0a1e4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1319.png and b/TMessagesProj/src/emojis/apple/emoji/0_1319.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_132.png b/TMessagesProj/src/emojis/apple/emoji/0_132.png index b98c7e8721..f90a6db3e0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_132.png and b/TMessagesProj/src/emojis/apple/emoji/0_132.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1320.png b/TMessagesProj/src/emojis/apple/emoji/0_1320.png index 2a14b7f793..00ce88731a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1320.png and b/TMessagesProj/src/emojis/apple/emoji/0_1320.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1321.png b/TMessagesProj/src/emojis/apple/emoji/0_1321.png index a1fabef3ef..6aa8939a70 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1321.png and b/TMessagesProj/src/emojis/apple/emoji/0_1321.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1322.png b/TMessagesProj/src/emojis/apple/emoji/0_1322.png index 9d5d73ca68..2b2e062425 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1322.png and b/TMessagesProj/src/emojis/apple/emoji/0_1322.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1323.png b/TMessagesProj/src/emojis/apple/emoji/0_1323.png index 1243b3cef0..a70864ff4c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1323.png and b/TMessagesProj/src/emojis/apple/emoji/0_1323.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1324.png b/TMessagesProj/src/emojis/apple/emoji/0_1324.png index 5e755ddb91..2e2d60a226 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1324.png and b/TMessagesProj/src/emojis/apple/emoji/0_1324.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1325.png b/TMessagesProj/src/emojis/apple/emoji/0_1325.png index 80569fb494..9c828afc84 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1325.png and b/TMessagesProj/src/emojis/apple/emoji/0_1325.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1326.png b/TMessagesProj/src/emojis/apple/emoji/0_1326.png index 5bf5e2ccee..38ebb52e63 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1326.png and b/TMessagesProj/src/emojis/apple/emoji/0_1326.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1327.png b/TMessagesProj/src/emojis/apple/emoji/0_1327.png index 0e9f2b6987..1d27161c90 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1327.png and b/TMessagesProj/src/emojis/apple/emoji/0_1327.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1328.png b/TMessagesProj/src/emojis/apple/emoji/0_1328.png index 8c48d8feec..a96c6fed40 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1328.png and b/TMessagesProj/src/emojis/apple/emoji/0_1328.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1329.png b/TMessagesProj/src/emojis/apple/emoji/0_1329.png index b2d5087384..5acc2599f0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1329.png and b/TMessagesProj/src/emojis/apple/emoji/0_1329.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_133.png b/TMessagesProj/src/emojis/apple/emoji/0_133.png index e2e8254836..0ff2ca1bf9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_133.png and b/TMessagesProj/src/emojis/apple/emoji/0_133.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1330.png b/TMessagesProj/src/emojis/apple/emoji/0_1330.png index 202154020b..20edf68c69 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1330.png and b/TMessagesProj/src/emojis/apple/emoji/0_1330.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1331.png b/TMessagesProj/src/emojis/apple/emoji/0_1331.png index b39aaba232..2abc20573b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1331.png and b/TMessagesProj/src/emojis/apple/emoji/0_1331.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1332.png b/TMessagesProj/src/emojis/apple/emoji/0_1332.png index ff646b3c9b..debd9e76d6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1332.png and b/TMessagesProj/src/emojis/apple/emoji/0_1332.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1333.png b/TMessagesProj/src/emojis/apple/emoji/0_1333.png index d204e4604c..e9c3fdde06 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1333.png and b/TMessagesProj/src/emojis/apple/emoji/0_1333.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1334.png b/TMessagesProj/src/emojis/apple/emoji/0_1334.png index fdd8722bee..1627f9370a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1334.png and b/TMessagesProj/src/emojis/apple/emoji/0_1334.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1335.png b/TMessagesProj/src/emojis/apple/emoji/0_1335.png index b6f3834beb..ab304460f6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1335.png and b/TMessagesProj/src/emojis/apple/emoji/0_1335.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1336.png b/TMessagesProj/src/emojis/apple/emoji/0_1336.png index b37fc5c433..4db6405caf 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1336.png and b/TMessagesProj/src/emojis/apple/emoji/0_1336.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1337.png b/TMessagesProj/src/emojis/apple/emoji/0_1337.png index 84d342a2ad..a510d6cddd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1337.png and b/TMessagesProj/src/emojis/apple/emoji/0_1337.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1338.png b/TMessagesProj/src/emojis/apple/emoji/0_1338.png index ce9f146556..e2a07172a8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1338.png and b/TMessagesProj/src/emojis/apple/emoji/0_1338.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1339.png b/TMessagesProj/src/emojis/apple/emoji/0_1339.png index 99dd4f0c2e..a424b731f2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1339.png and b/TMessagesProj/src/emojis/apple/emoji/0_1339.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_134.png b/TMessagesProj/src/emojis/apple/emoji/0_134.png index 317d29ad3a..28bf1d9bff 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_134.png and b/TMessagesProj/src/emojis/apple/emoji/0_134.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1340.png b/TMessagesProj/src/emojis/apple/emoji/0_1340.png index 6da1dff39e..96e6f6772b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1340.png and b/TMessagesProj/src/emojis/apple/emoji/0_1340.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1341.png b/TMessagesProj/src/emojis/apple/emoji/0_1341.png index f2acb23c3a..a0b0953a30 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1341.png and b/TMessagesProj/src/emojis/apple/emoji/0_1341.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1342.png b/TMessagesProj/src/emojis/apple/emoji/0_1342.png index c4979f6e31..f823b8742b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1342.png and b/TMessagesProj/src/emojis/apple/emoji/0_1342.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1343.png b/TMessagesProj/src/emojis/apple/emoji/0_1343.png index 4e1e551b0f..61cc4d05a8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1343.png and b/TMessagesProj/src/emojis/apple/emoji/0_1343.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1344.png b/TMessagesProj/src/emojis/apple/emoji/0_1344.png index 464a3496ad..1453050706 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1344.png and b/TMessagesProj/src/emojis/apple/emoji/0_1344.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1345.png b/TMessagesProj/src/emojis/apple/emoji/0_1345.png index 06f5132f04..9bc159370b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1345.png and b/TMessagesProj/src/emojis/apple/emoji/0_1345.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1346.png b/TMessagesProj/src/emojis/apple/emoji/0_1346.png index 3c0538cbeb..40be6db3e9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1346.png and b/TMessagesProj/src/emojis/apple/emoji/0_1346.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1347.png b/TMessagesProj/src/emojis/apple/emoji/0_1347.png index 3940e67355..9e8fe026ad 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1347.png and b/TMessagesProj/src/emojis/apple/emoji/0_1347.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1348.png b/TMessagesProj/src/emojis/apple/emoji/0_1348.png index 166f882671..8323666f41 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1348.png and b/TMessagesProj/src/emojis/apple/emoji/0_1348.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1349.png b/TMessagesProj/src/emojis/apple/emoji/0_1349.png index 408c08c7ee..6205c94bf6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1349.png and b/TMessagesProj/src/emojis/apple/emoji/0_1349.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_135.png b/TMessagesProj/src/emojis/apple/emoji/0_135.png index 345a90b914..4e91fa38da 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_135.png and b/TMessagesProj/src/emojis/apple/emoji/0_135.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1350.png b/TMessagesProj/src/emojis/apple/emoji/0_1350.png index bd539432b1..a1cd8895c5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1350.png and b/TMessagesProj/src/emojis/apple/emoji/0_1350.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1351.png b/TMessagesProj/src/emojis/apple/emoji/0_1351.png index cee8ec169e..51616967d0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1351.png and b/TMessagesProj/src/emojis/apple/emoji/0_1351.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1352.png b/TMessagesProj/src/emojis/apple/emoji/0_1352.png index fdd420699f..3d261526da 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1352.png and b/TMessagesProj/src/emojis/apple/emoji/0_1352.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1353.png b/TMessagesProj/src/emojis/apple/emoji/0_1353.png index f5159d0f83..d897843109 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1353.png and b/TMessagesProj/src/emojis/apple/emoji/0_1353.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1354.png b/TMessagesProj/src/emojis/apple/emoji/0_1354.png index 614829ce11..b553178891 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1354.png and b/TMessagesProj/src/emojis/apple/emoji/0_1354.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1355.png b/TMessagesProj/src/emojis/apple/emoji/0_1355.png index 78adcab7de..49fb18e518 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1355.png and b/TMessagesProj/src/emojis/apple/emoji/0_1355.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1356.png b/TMessagesProj/src/emojis/apple/emoji/0_1356.png index cfd006c50b..f3fb254939 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1356.png and b/TMessagesProj/src/emojis/apple/emoji/0_1356.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1357.png b/TMessagesProj/src/emojis/apple/emoji/0_1357.png index 287e70dba0..b1d9c023d0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1357.png and b/TMessagesProj/src/emojis/apple/emoji/0_1357.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1358.png b/TMessagesProj/src/emojis/apple/emoji/0_1358.png index 6f30953efd..f9306e5b8d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1358.png and b/TMessagesProj/src/emojis/apple/emoji/0_1358.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1359.png b/TMessagesProj/src/emojis/apple/emoji/0_1359.png index a9394991ca..e3ef8617a4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1359.png and b/TMessagesProj/src/emojis/apple/emoji/0_1359.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_136.png b/TMessagesProj/src/emojis/apple/emoji/0_136.png index 3446463023..cd0ac8efab 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_136.png and b/TMessagesProj/src/emojis/apple/emoji/0_136.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1360.png b/TMessagesProj/src/emojis/apple/emoji/0_1360.png index 6415586d24..43b740777e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1360.png and b/TMessagesProj/src/emojis/apple/emoji/0_1360.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1361.png b/TMessagesProj/src/emojis/apple/emoji/0_1361.png index 927a87d12e..3ea9039ce8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1361.png and b/TMessagesProj/src/emojis/apple/emoji/0_1361.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1362.png b/TMessagesProj/src/emojis/apple/emoji/0_1362.png index e991b61b68..05ece06650 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1362.png and b/TMessagesProj/src/emojis/apple/emoji/0_1362.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1363.png b/TMessagesProj/src/emojis/apple/emoji/0_1363.png index e50723c1e0..dc1c89ed9d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1363.png and b/TMessagesProj/src/emojis/apple/emoji/0_1363.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1364.png b/TMessagesProj/src/emojis/apple/emoji/0_1364.png index 67c5af5e7b..a0f0fb811c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1364.png and b/TMessagesProj/src/emojis/apple/emoji/0_1364.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1365.png b/TMessagesProj/src/emojis/apple/emoji/0_1365.png index 95603d417b..82dc7e8504 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1365.png and b/TMessagesProj/src/emojis/apple/emoji/0_1365.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1366.png b/TMessagesProj/src/emojis/apple/emoji/0_1366.png index 54386f5286..2466f3cede 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1366.png and b/TMessagesProj/src/emojis/apple/emoji/0_1366.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1367.png b/TMessagesProj/src/emojis/apple/emoji/0_1367.png index d1817268c3..1be28164b1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1367.png and b/TMessagesProj/src/emojis/apple/emoji/0_1367.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1368.png b/TMessagesProj/src/emojis/apple/emoji/0_1368.png index bb7a843094..2d0d87bde5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1368.png and b/TMessagesProj/src/emojis/apple/emoji/0_1368.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1369.png b/TMessagesProj/src/emojis/apple/emoji/0_1369.png index 7ca5bd5f9d..c70e547f35 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1369.png and b/TMessagesProj/src/emojis/apple/emoji/0_1369.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_137.png b/TMessagesProj/src/emojis/apple/emoji/0_137.png index d2e48733b2..aa305aa558 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_137.png and b/TMessagesProj/src/emojis/apple/emoji/0_137.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1370.png b/TMessagesProj/src/emojis/apple/emoji/0_1370.png index d36e4d04fc..07babfb842 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1370.png and b/TMessagesProj/src/emojis/apple/emoji/0_1370.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1371.png b/TMessagesProj/src/emojis/apple/emoji/0_1371.png index 34d0c44b80..aa053cbe02 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1371.png and b/TMessagesProj/src/emojis/apple/emoji/0_1371.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1372.png b/TMessagesProj/src/emojis/apple/emoji/0_1372.png index ad8029cc47..df10f157bf 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1372.png and b/TMessagesProj/src/emojis/apple/emoji/0_1372.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1373.png b/TMessagesProj/src/emojis/apple/emoji/0_1373.png index 1cd7f6c869..3914c9cc2a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1373.png and b/TMessagesProj/src/emojis/apple/emoji/0_1373.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1374.png b/TMessagesProj/src/emojis/apple/emoji/0_1374.png index 20af4651be..190fb673f7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1374.png and b/TMessagesProj/src/emojis/apple/emoji/0_1374.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1375.png b/TMessagesProj/src/emojis/apple/emoji/0_1375.png index 6ec9e8ec91..00e6ce45d3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1375.png and b/TMessagesProj/src/emojis/apple/emoji/0_1375.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1376.png b/TMessagesProj/src/emojis/apple/emoji/0_1376.png index b724a4902a..c823de457e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1376.png and b/TMessagesProj/src/emojis/apple/emoji/0_1376.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1377.png b/TMessagesProj/src/emojis/apple/emoji/0_1377.png index 39d86774c3..01390f1ca8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1377.png and b/TMessagesProj/src/emojis/apple/emoji/0_1377.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1378.png b/TMessagesProj/src/emojis/apple/emoji/0_1378.png index 44f9c4bb57..9af61a6991 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1378.png and b/TMessagesProj/src/emojis/apple/emoji/0_1378.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1379.png b/TMessagesProj/src/emojis/apple/emoji/0_1379.png index 24d8f3103f..0145ae7508 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1379.png and b/TMessagesProj/src/emojis/apple/emoji/0_1379.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_138.png b/TMessagesProj/src/emojis/apple/emoji/0_138.png index 13bba6e750..e1f70443ce 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_138.png and b/TMessagesProj/src/emojis/apple/emoji/0_138.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1380.png b/TMessagesProj/src/emojis/apple/emoji/0_1380.png index 1ad7223140..6e92097e2e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1380.png and b/TMessagesProj/src/emojis/apple/emoji/0_1380.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1381.png b/TMessagesProj/src/emojis/apple/emoji/0_1381.png index e2e93a0c1a..a887aca171 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1381.png and b/TMessagesProj/src/emojis/apple/emoji/0_1381.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1382.png b/TMessagesProj/src/emojis/apple/emoji/0_1382.png index 7eebabcf17..b8b9f6a8ac 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1382.png and b/TMessagesProj/src/emojis/apple/emoji/0_1382.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1383.png b/TMessagesProj/src/emojis/apple/emoji/0_1383.png index a0d339d582..e395d60151 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1383.png and b/TMessagesProj/src/emojis/apple/emoji/0_1383.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1384.png b/TMessagesProj/src/emojis/apple/emoji/0_1384.png index 6ea3d96b2a..dcbd30831c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1384.png and b/TMessagesProj/src/emojis/apple/emoji/0_1384.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1385.png b/TMessagesProj/src/emojis/apple/emoji/0_1385.png index 3f83de552c..25fa9d925d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1385.png and b/TMessagesProj/src/emojis/apple/emoji/0_1385.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1386.png b/TMessagesProj/src/emojis/apple/emoji/0_1386.png index d0274c153f..75520e512b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1386.png and b/TMessagesProj/src/emojis/apple/emoji/0_1386.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1387.png b/TMessagesProj/src/emojis/apple/emoji/0_1387.png index fc785c5b83..04f520827a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1387.png and b/TMessagesProj/src/emojis/apple/emoji/0_1387.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1388.png b/TMessagesProj/src/emojis/apple/emoji/0_1388.png index ba040cc8bd..68893a02d0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1388.png and b/TMessagesProj/src/emojis/apple/emoji/0_1388.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1389.png b/TMessagesProj/src/emojis/apple/emoji/0_1389.png index 12702c815a..9ad2ba6fec 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1389.png and b/TMessagesProj/src/emojis/apple/emoji/0_1389.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_139.png b/TMessagesProj/src/emojis/apple/emoji/0_139.png index 80d44d5a3f..221214081e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_139.png and b/TMessagesProj/src/emojis/apple/emoji/0_139.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1390.png b/TMessagesProj/src/emojis/apple/emoji/0_1390.png index fb4ae55c51..94b8d1421e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1390.png and b/TMessagesProj/src/emojis/apple/emoji/0_1390.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1391.png b/TMessagesProj/src/emojis/apple/emoji/0_1391.png index f12d552d9e..d686fb7070 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1391.png and b/TMessagesProj/src/emojis/apple/emoji/0_1391.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1392.png b/TMessagesProj/src/emojis/apple/emoji/0_1392.png index a3c3e8f870..a75b530238 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1392.png and b/TMessagesProj/src/emojis/apple/emoji/0_1392.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1393.png b/TMessagesProj/src/emojis/apple/emoji/0_1393.png index ffeec8540a..c5d390b852 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1393.png and b/TMessagesProj/src/emojis/apple/emoji/0_1393.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1394.png b/TMessagesProj/src/emojis/apple/emoji/0_1394.png index e5e19866bf..191734e26b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1394.png and b/TMessagesProj/src/emojis/apple/emoji/0_1394.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1395.png b/TMessagesProj/src/emojis/apple/emoji/0_1395.png index cd6b3d0fba..64d9bfaf71 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1395.png and b/TMessagesProj/src/emojis/apple/emoji/0_1395.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1396.png b/TMessagesProj/src/emojis/apple/emoji/0_1396.png index 320f2fcf6a..38cf10ad1f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1396.png and b/TMessagesProj/src/emojis/apple/emoji/0_1396.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1397.png b/TMessagesProj/src/emojis/apple/emoji/0_1397.png index 5246c14fd4..b388072679 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1397.png and b/TMessagesProj/src/emojis/apple/emoji/0_1397.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1398.png b/TMessagesProj/src/emojis/apple/emoji/0_1398.png index c85b7116f6..d5740d8150 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1398.png and b/TMessagesProj/src/emojis/apple/emoji/0_1398.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1399.png b/TMessagesProj/src/emojis/apple/emoji/0_1399.png index 514f0f7932..321006f6d4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1399.png and b/TMessagesProj/src/emojis/apple/emoji/0_1399.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_14.png b/TMessagesProj/src/emojis/apple/emoji/0_14.png index 2d6c063f67..5ac11d67fd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_14.png and b/TMessagesProj/src/emojis/apple/emoji/0_14.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_140.png b/TMessagesProj/src/emojis/apple/emoji/0_140.png index 4f2859be81..aa98b0160d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_140.png and b/TMessagesProj/src/emojis/apple/emoji/0_140.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1400.png b/TMessagesProj/src/emojis/apple/emoji/0_1400.png index f416bd24fe..b46671a9c3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1400.png and b/TMessagesProj/src/emojis/apple/emoji/0_1400.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1401.png b/TMessagesProj/src/emojis/apple/emoji/0_1401.png index 9e924e8db5..707d3a6fd8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1401.png and b/TMessagesProj/src/emojis/apple/emoji/0_1401.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1402.png b/TMessagesProj/src/emojis/apple/emoji/0_1402.png index 2e00760347..aa0c767f79 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1402.png and b/TMessagesProj/src/emojis/apple/emoji/0_1402.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1403.png b/TMessagesProj/src/emojis/apple/emoji/0_1403.png index 5822f4e3b3..8a7b6d3729 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1403.png and b/TMessagesProj/src/emojis/apple/emoji/0_1403.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1404.png b/TMessagesProj/src/emojis/apple/emoji/0_1404.png index 1f6db0691e..d282d2598a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1404.png and b/TMessagesProj/src/emojis/apple/emoji/0_1404.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1405.png b/TMessagesProj/src/emojis/apple/emoji/0_1405.png index 26eabf05da..134a494352 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1405.png and b/TMessagesProj/src/emojis/apple/emoji/0_1405.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1406.png b/TMessagesProj/src/emojis/apple/emoji/0_1406.png index 5ac560ecfb..b604735b98 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1406.png and b/TMessagesProj/src/emojis/apple/emoji/0_1406.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1407.png b/TMessagesProj/src/emojis/apple/emoji/0_1407.png index b1839ca7ba..f4baf974d5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1407.png and b/TMessagesProj/src/emojis/apple/emoji/0_1407.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1408.png b/TMessagesProj/src/emojis/apple/emoji/0_1408.png index 3e7b86ddaa..b11d0cedda 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1408.png and b/TMessagesProj/src/emojis/apple/emoji/0_1408.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1409.png b/TMessagesProj/src/emojis/apple/emoji/0_1409.png index 55e2250874..c41f3d7864 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1409.png and b/TMessagesProj/src/emojis/apple/emoji/0_1409.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_141.png b/TMessagesProj/src/emojis/apple/emoji/0_141.png index 0e23c17b3c..c7e21e39a1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_141.png and b/TMessagesProj/src/emojis/apple/emoji/0_141.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1410.png b/TMessagesProj/src/emojis/apple/emoji/0_1410.png index ef697e64ae..243a29fb32 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1410.png and b/TMessagesProj/src/emojis/apple/emoji/0_1410.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1411.png b/TMessagesProj/src/emojis/apple/emoji/0_1411.png index 11920ada22..f40f38bb18 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1411.png and b/TMessagesProj/src/emojis/apple/emoji/0_1411.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1412.png b/TMessagesProj/src/emojis/apple/emoji/0_1412.png index f8c5a6234b..28c11f117e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1412.png and b/TMessagesProj/src/emojis/apple/emoji/0_1412.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1413.png b/TMessagesProj/src/emojis/apple/emoji/0_1413.png index c8deb675cc..103ffc1c1e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1413.png and b/TMessagesProj/src/emojis/apple/emoji/0_1413.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1414.png b/TMessagesProj/src/emojis/apple/emoji/0_1414.png index 89a98cb4dd..1105b3d601 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1414.png and b/TMessagesProj/src/emojis/apple/emoji/0_1414.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1415.png b/TMessagesProj/src/emojis/apple/emoji/0_1415.png index 3954b3b51b..bdb6fb214e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1415.png and b/TMessagesProj/src/emojis/apple/emoji/0_1415.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1416.png b/TMessagesProj/src/emojis/apple/emoji/0_1416.png index f49eb79ac1..546e1b3e17 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1416.png and b/TMessagesProj/src/emojis/apple/emoji/0_1416.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1417.png b/TMessagesProj/src/emojis/apple/emoji/0_1417.png index af8e7e5b4c..b67d521978 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1417.png and b/TMessagesProj/src/emojis/apple/emoji/0_1417.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1418.png b/TMessagesProj/src/emojis/apple/emoji/0_1418.png index 4c816653d7..1470844344 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1418.png and b/TMessagesProj/src/emojis/apple/emoji/0_1418.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1419.png b/TMessagesProj/src/emojis/apple/emoji/0_1419.png index 2f29264390..03d21766d9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1419.png and b/TMessagesProj/src/emojis/apple/emoji/0_1419.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_142.png b/TMessagesProj/src/emojis/apple/emoji/0_142.png index fadb5e608d..88a6d42901 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_142.png and b/TMessagesProj/src/emojis/apple/emoji/0_142.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1420.png b/TMessagesProj/src/emojis/apple/emoji/0_1420.png index eb17323ea8..a73668d5ad 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1420.png and b/TMessagesProj/src/emojis/apple/emoji/0_1420.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1421.png b/TMessagesProj/src/emojis/apple/emoji/0_1421.png index b9d07e8f67..51182909df 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1421.png and b/TMessagesProj/src/emojis/apple/emoji/0_1421.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1422.png b/TMessagesProj/src/emojis/apple/emoji/0_1422.png index 1818ca7478..8b6aeeec2f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1422.png and b/TMessagesProj/src/emojis/apple/emoji/0_1422.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1423.png b/TMessagesProj/src/emojis/apple/emoji/0_1423.png index d45eb9c518..ad637b3f9f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1423.png and b/TMessagesProj/src/emojis/apple/emoji/0_1423.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1424.png b/TMessagesProj/src/emojis/apple/emoji/0_1424.png index eaf67f9893..d3dd9b5acd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1424.png and b/TMessagesProj/src/emojis/apple/emoji/0_1424.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1425.png b/TMessagesProj/src/emojis/apple/emoji/0_1425.png index 9a5f25a41a..28470f00f5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1425.png and b/TMessagesProj/src/emojis/apple/emoji/0_1425.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1426.png b/TMessagesProj/src/emojis/apple/emoji/0_1426.png index 073f4e3d27..720cb4ef74 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1426.png and b/TMessagesProj/src/emojis/apple/emoji/0_1426.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1427.png b/TMessagesProj/src/emojis/apple/emoji/0_1427.png index da15e3e642..b59f59820c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1427.png and b/TMessagesProj/src/emojis/apple/emoji/0_1427.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1428.png b/TMessagesProj/src/emojis/apple/emoji/0_1428.png index 31e1d0ab65..3feb11a053 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1428.png and b/TMessagesProj/src/emojis/apple/emoji/0_1428.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1429.png b/TMessagesProj/src/emojis/apple/emoji/0_1429.png index cc8f64921e..c615aa216e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1429.png and b/TMessagesProj/src/emojis/apple/emoji/0_1429.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_143.png b/TMessagesProj/src/emojis/apple/emoji/0_143.png index 8f67519ee3..f7a44012cf 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_143.png and b/TMessagesProj/src/emojis/apple/emoji/0_143.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1430.png b/TMessagesProj/src/emojis/apple/emoji/0_1430.png index 17ca34c7d2..ebf93bf4d7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1430.png and b/TMessagesProj/src/emojis/apple/emoji/0_1430.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1431.png b/TMessagesProj/src/emojis/apple/emoji/0_1431.png index 4bb75c7796..7ddcf3bb62 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1431.png and b/TMessagesProj/src/emojis/apple/emoji/0_1431.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1432.png b/TMessagesProj/src/emojis/apple/emoji/0_1432.png index 83127caca6..956d77d7fe 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1432.png and b/TMessagesProj/src/emojis/apple/emoji/0_1432.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1433.png b/TMessagesProj/src/emojis/apple/emoji/0_1433.png index 7ee807169b..6ffea9132b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1433.png and b/TMessagesProj/src/emojis/apple/emoji/0_1433.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1434.png b/TMessagesProj/src/emojis/apple/emoji/0_1434.png index 53777c49df..3b9e1a7d19 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1434.png and b/TMessagesProj/src/emojis/apple/emoji/0_1434.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1435.png b/TMessagesProj/src/emojis/apple/emoji/0_1435.png index 1c4722c93a..df2bd519fc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1435.png and b/TMessagesProj/src/emojis/apple/emoji/0_1435.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1436.png b/TMessagesProj/src/emojis/apple/emoji/0_1436.png index 60bbe02141..c51cfd4fd6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1436.png and b/TMessagesProj/src/emojis/apple/emoji/0_1436.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1437.png b/TMessagesProj/src/emojis/apple/emoji/0_1437.png index 02ed5a4e10..8a93c6d639 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1437.png and b/TMessagesProj/src/emojis/apple/emoji/0_1437.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1438.png b/TMessagesProj/src/emojis/apple/emoji/0_1438.png index e2a328027d..fa1f71967e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1438.png and b/TMessagesProj/src/emojis/apple/emoji/0_1438.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1439.png b/TMessagesProj/src/emojis/apple/emoji/0_1439.png index 16842816ff..f356c9d9b4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1439.png and b/TMessagesProj/src/emojis/apple/emoji/0_1439.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_144.png b/TMessagesProj/src/emojis/apple/emoji/0_144.png index c2c4ebd795..f663b625e4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_144.png and b/TMessagesProj/src/emojis/apple/emoji/0_144.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1440.png b/TMessagesProj/src/emojis/apple/emoji/0_1440.png index 6d17d4cdf4..8476988e61 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1440.png and b/TMessagesProj/src/emojis/apple/emoji/0_1440.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1441.png b/TMessagesProj/src/emojis/apple/emoji/0_1441.png index 93098643b2..564ef44201 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1441.png and b/TMessagesProj/src/emojis/apple/emoji/0_1441.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1442.png b/TMessagesProj/src/emojis/apple/emoji/0_1442.png index eb5c13e36b..c4e9de2cef 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1442.png and b/TMessagesProj/src/emojis/apple/emoji/0_1442.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1443.png b/TMessagesProj/src/emojis/apple/emoji/0_1443.png index 5048e515d7..07ed203cbc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1443.png and b/TMessagesProj/src/emojis/apple/emoji/0_1443.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1444.png b/TMessagesProj/src/emojis/apple/emoji/0_1444.png index b0648f17e4..4ceaa480e9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1444.png and b/TMessagesProj/src/emojis/apple/emoji/0_1444.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1445.png b/TMessagesProj/src/emojis/apple/emoji/0_1445.png index 384b1d698e..3aeebb01fd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1445.png and b/TMessagesProj/src/emojis/apple/emoji/0_1445.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1446.png b/TMessagesProj/src/emojis/apple/emoji/0_1446.png index d387b1fa19..fb8783d722 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1446.png and b/TMessagesProj/src/emojis/apple/emoji/0_1446.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1447.png b/TMessagesProj/src/emojis/apple/emoji/0_1447.png index 367562971d..367b7724b0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1447.png and b/TMessagesProj/src/emojis/apple/emoji/0_1447.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1448.png b/TMessagesProj/src/emojis/apple/emoji/0_1448.png index 5d91c1bfe5..ff8abd2590 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1448.png and b/TMessagesProj/src/emojis/apple/emoji/0_1448.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1449.png b/TMessagesProj/src/emojis/apple/emoji/0_1449.png index 4a511b55d7..c3a103b180 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1449.png and b/TMessagesProj/src/emojis/apple/emoji/0_1449.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_145.png b/TMessagesProj/src/emojis/apple/emoji/0_145.png index 31c1d249d4..007545b38f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_145.png and b/TMessagesProj/src/emojis/apple/emoji/0_145.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1450.png b/TMessagesProj/src/emojis/apple/emoji/0_1450.png index 7bd2977e37..70ca81d894 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1450.png and b/TMessagesProj/src/emojis/apple/emoji/0_1450.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1451.png b/TMessagesProj/src/emojis/apple/emoji/0_1451.png index 86a5c4a32e..33fede404d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1451.png and b/TMessagesProj/src/emojis/apple/emoji/0_1451.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1452.png b/TMessagesProj/src/emojis/apple/emoji/0_1452.png index bb15567041..f4cf3dcdc7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1452.png and b/TMessagesProj/src/emojis/apple/emoji/0_1452.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1453.png b/TMessagesProj/src/emojis/apple/emoji/0_1453.png index 28d75d3c18..73b7d61fd7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1453.png and b/TMessagesProj/src/emojis/apple/emoji/0_1453.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1454.png b/TMessagesProj/src/emojis/apple/emoji/0_1454.png index 24b33c3994..a5697a8092 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1454.png and b/TMessagesProj/src/emojis/apple/emoji/0_1454.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1455.png b/TMessagesProj/src/emojis/apple/emoji/0_1455.png index 85b84f5132..eedc97859d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1455.png and b/TMessagesProj/src/emojis/apple/emoji/0_1455.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1456.png b/TMessagesProj/src/emojis/apple/emoji/0_1456.png index 2b6e6dfae3..3185e7382c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1456.png and b/TMessagesProj/src/emojis/apple/emoji/0_1456.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1457.png b/TMessagesProj/src/emojis/apple/emoji/0_1457.png index db23b455b3..f0509e480a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1457.png and b/TMessagesProj/src/emojis/apple/emoji/0_1457.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1458.png b/TMessagesProj/src/emojis/apple/emoji/0_1458.png index 2e18224e49..765652b42f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1458.png and b/TMessagesProj/src/emojis/apple/emoji/0_1458.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1459.png b/TMessagesProj/src/emojis/apple/emoji/0_1459.png index 5fdcafb81d..11f9b4669e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1459.png and b/TMessagesProj/src/emojis/apple/emoji/0_1459.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_146.png b/TMessagesProj/src/emojis/apple/emoji/0_146.png index 15ac6c4a2a..553847320e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_146.png and b/TMessagesProj/src/emojis/apple/emoji/0_146.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1460.png b/TMessagesProj/src/emojis/apple/emoji/0_1460.png index 4e9807f003..587e309513 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1460.png and b/TMessagesProj/src/emojis/apple/emoji/0_1460.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1461.png b/TMessagesProj/src/emojis/apple/emoji/0_1461.png index c90298a575..16925cee0a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1461.png and b/TMessagesProj/src/emojis/apple/emoji/0_1461.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1462.png b/TMessagesProj/src/emojis/apple/emoji/0_1462.png index ef8dbbfd6e..e6cbd7f208 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1462.png and b/TMessagesProj/src/emojis/apple/emoji/0_1462.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1463.png b/TMessagesProj/src/emojis/apple/emoji/0_1463.png index 4749ed3c93..2f3bb179bc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1463.png and b/TMessagesProj/src/emojis/apple/emoji/0_1463.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1464.png b/TMessagesProj/src/emojis/apple/emoji/0_1464.png index 1ab0d81e5e..71f1d27f79 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1464.png and b/TMessagesProj/src/emojis/apple/emoji/0_1464.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1465.png b/TMessagesProj/src/emojis/apple/emoji/0_1465.png index 9260b89eaa..3a80bf0bc5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1465.png and b/TMessagesProj/src/emojis/apple/emoji/0_1465.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1466.png b/TMessagesProj/src/emojis/apple/emoji/0_1466.png index 755f391084..722027e0e3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1466.png and b/TMessagesProj/src/emojis/apple/emoji/0_1466.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1467.png b/TMessagesProj/src/emojis/apple/emoji/0_1467.png index c97a6f2d30..58958e6f2a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1467.png and b/TMessagesProj/src/emojis/apple/emoji/0_1467.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1468.png b/TMessagesProj/src/emojis/apple/emoji/0_1468.png index 002a7c9fd5..523915516a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1468.png and b/TMessagesProj/src/emojis/apple/emoji/0_1468.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1469.png b/TMessagesProj/src/emojis/apple/emoji/0_1469.png index 09ffc9436f..3415914735 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1469.png and b/TMessagesProj/src/emojis/apple/emoji/0_1469.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_147.png b/TMessagesProj/src/emojis/apple/emoji/0_147.png index f978e9b453..f32816096f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_147.png and b/TMessagesProj/src/emojis/apple/emoji/0_147.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1470.png b/TMessagesProj/src/emojis/apple/emoji/0_1470.png index 39cd6b9969..2e3f4da69b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1470.png and b/TMessagesProj/src/emojis/apple/emoji/0_1470.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1471.png b/TMessagesProj/src/emojis/apple/emoji/0_1471.png index 155f844be2..520df20598 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1471.png and b/TMessagesProj/src/emojis/apple/emoji/0_1471.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1472.png b/TMessagesProj/src/emojis/apple/emoji/0_1472.png index 3d310d5838..fd2bf8a18b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1472.png and b/TMessagesProj/src/emojis/apple/emoji/0_1472.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1473.png b/TMessagesProj/src/emojis/apple/emoji/0_1473.png index 6b8265aef8..189fc488a5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1473.png and b/TMessagesProj/src/emojis/apple/emoji/0_1473.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1474.png b/TMessagesProj/src/emojis/apple/emoji/0_1474.png index 61c094e9e2..d7ac548847 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1474.png and b/TMessagesProj/src/emojis/apple/emoji/0_1474.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1475.png b/TMessagesProj/src/emojis/apple/emoji/0_1475.png index 26dd7994c6..37aad71d59 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1475.png and b/TMessagesProj/src/emojis/apple/emoji/0_1475.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1476.png b/TMessagesProj/src/emojis/apple/emoji/0_1476.png index e35652e38e..1736a7b420 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1476.png and b/TMessagesProj/src/emojis/apple/emoji/0_1476.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1477.png b/TMessagesProj/src/emojis/apple/emoji/0_1477.png index c71a11315c..53eeca1a0c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1477.png and b/TMessagesProj/src/emojis/apple/emoji/0_1477.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1478.png b/TMessagesProj/src/emojis/apple/emoji/0_1478.png index ff525c151b..7a74fb0385 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1478.png and b/TMessagesProj/src/emojis/apple/emoji/0_1478.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1479.png b/TMessagesProj/src/emojis/apple/emoji/0_1479.png index 890fc56ef2..cb467e5787 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1479.png and b/TMessagesProj/src/emojis/apple/emoji/0_1479.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_148.png b/TMessagesProj/src/emojis/apple/emoji/0_148.png index adbc4d5e07..c89ac165bf 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_148.png and b/TMessagesProj/src/emojis/apple/emoji/0_148.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1480.png b/TMessagesProj/src/emojis/apple/emoji/0_1480.png index cb8ed5c4ed..416628bf7f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1480.png and b/TMessagesProj/src/emojis/apple/emoji/0_1480.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1481.png b/TMessagesProj/src/emojis/apple/emoji/0_1481.png index 1a9a9bf05f..6a12f6fb24 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1481.png and b/TMessagesProj/src/emojis/apple/emoji/0_1481.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1482.png b/TMessagesProj/src/emojis/apple/emoji/0_1482.png index 1980370db3..881da3c43b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1482.png and b/TMessagesProj/src/emojis/apple/emoji/0_1482.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1483.png b/TMessagesProj/src/emojis/apple/emoji/0_1483.png index ef59bb25fb..58241395c3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1483.png and b/TMessagesProj/src/emojis/apple/emoji/0_1483.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1484.png b/TMessagesProj/src/emojis/apple/emoji/0_1484.png index 933741bb10..2ad7d7620b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1484.png and b/TMessagesProj/src/emojis/apple/emoji/0_1484.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1485.png b/TMessagesProj/src/emojis/apple/emoji/0_1485.png index 45ea1d9c7d..81bddfc1c9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1485.png and b/TMessagesProj/src/emojis/apple/emoji/0_1485.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1486.png b/TMessagesProj/src/emojis/apple/emoji/0_1486.png index c5dd93e24f..63ba8bd837 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1486.png and b/TMessagesProj/src/emojis/apple/emoji/0_1486.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1487.png b/TMessagesProj/src/emojis/apple/emoji/0_1487.png index 5dce1ad673..aa7618e715 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1487.png and b/TMessagesProj/src/emojis/apple/emoji/0_1487.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1488.png b/TMessagesProj/src/emojis/apple/emoji/0_1488.png index d7ceff5e63..6db861fb88 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1488.png and b/TMessagesProj/src/emojis/apple/emoji/0_1488.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1489.png b/TMessagesProj/src/emojis/apple/emoji/0_1489.png index 4aa71e7419..d00d24817a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1489.png and b/TMessagesProj/src/emojis/apple/emoji/0_1489.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_149.png b/TMessagesProj/src/emojis/apple/emoji/0_149.png index 5213551c4c..83e58865a3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_149.png and b/TMessagesProj/src/emojis/apple/emoji/0_149.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1490.png b/TMessagesProj/src/emojis/apple/emoji/0_1490.png index ace5a4d8cf..bb1e9eebbb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1490.png and b/TMessagesProj/src/emojis/apple/emoji/0_1490.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1491.png b/TMessagesProj/src/emojis/apple/emoji/0_1491.png index 50e0d156d5..644ce650e9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1491.png and b/TMessagesProj/src/emojis/apple/emoji/0_1491.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1492.png b/TMessagesProj/src/emojis/apple/emoji/0_1492.png index 9b0624f414..c04c9bb6cb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1492.png and b/TMessagesProj/src/emojis/apple/emoji/0_1492.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1493.png b/TMessagesProj/src/emojis/apple/emoji/0_1493.png index 26ce68f4bf..6a0bd1a7d0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1493.png and b/TMessagesProj/src/emojis/apple/emoji/0_1493.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1494.png b/TMessagesProj/src/emojis/apple/emoji/0_1494.png index 4c2d9723a9..9d31e09f26 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1494.png and b/TMessagesProj/src/emojis/apple/emoji/0_1494.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1495.png b/TMessagesProj/src/emojis/apple/emoji/0_1495.png index 46d59006ba..2a14d5099a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1495.png and b/TMessagesProj/src/emojis/apple/emoji/0_1495.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1496.png b/TMessagesProj/src/emojis/apple/emoji/0_1496.png index 8ad0114da8..7f958a99fb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1496.png and b/TMessagesProj/src/emojis/apple/emoji/0_1496.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1497.png b/TMessagesProj/src/emojis/apple/emoji/0_1497.png index b320be09fa..10b81bec28 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1497.png and b/TMessagesProj/src/emojis/apple/emoji/0_1497.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1498.png b/TMessagesProj/src/emojis/apple/emoji/0_1498.png index ce9f3bf588..aeddf9bf47 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1498.png and b/TMessagesProj/src/emojis/apple/emoji/0_1498.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1499.png b/TMessagesProj/src/emojis/apple/emoji/0_1499.png index 44edee2d1f..432ad394be 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1499.png and b/TMessagesProj/src/emojis/apple/emoji/0_1499.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_15.png b/TMessagesProj/src/emojis/apple/emoji/0_15.png index ac5b5388a5..e58ec92f9c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_15.png and b/TMessagesProj/src/emojis/apple/emoji/0_15.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_150.png b/TMessagesProj/src/emojis/apple/emoji/0_150.png index ceac75e774..b8818f8b38 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_150.png and b/TMessagesProj/src/emojis/apple/emoji/0_150.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1500.png b/TMessagesProj/src/emojis/apple/emoji/0_1500.png index b3bd6b7f47..665b4541fb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1500.png and b/TMessagesProj/src/emojis/apple/emoji/0_1500.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1501.png b/TMessagesProj/src/emojis/apple/emoji/0_1501.png index f0ee21039b..6641f9cae1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1501.png and b/TMessagesProj/src/emojis/apple/emoji/0_1501.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1502.png b/TMessagesProj/src/emojis/apple/emoji/0_1502.png index 9ec33a1d52..229f3a88f5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1502.png and b/TMessagesProj/src/emojis/apple/emoji/0_1502.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1503.png b/TMessagesProj/src/emojis/apple/emoji/0_1503.png index 66333ad9dd..dcd6189556 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1503.png and b/TMessagesProj/src/emojis/apple/emoji/0_1503.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1504.png b/TMessagesProj/src/emojis/apple/emoji/0_1504.png index ace5ce66b7..4997a7f88c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1504.png and b/TMessagesProj/src/emojis/apple/emoji/0_1504.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1505.png b/TMessagesProj/src/emojis/apple/emoji/0_1505.png index bda2fab14a..58ac155ec2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1505.png and b/TMessagesProj/src/emojis/apple/emoji/0_1505.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1506.png b/TMessagesProj/src/emojis/apple/emoji/0_1506.png index 4914ab67b7..5ab2bab435 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1506.png and b/TMessagesProj/src/emojis/apple/emoji/0_1506.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1507.png b/TMessagesProj/src/emojis/apple/emoji/0_1507.png index f53ed8876c..5cfd7faa96 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1507.png and b/TMessagesProj/src/emojis/apple/emoji/0_1507.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1508.png b/TMessagesProj/src/emojis/apple/emoji/0_1508.png index 18777e6602..cbd391d27a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1508.png and b/TMessagesProj/src/emojis/apple/emoji/0_1508.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1509.png b/TMessagesProj/src/emojis/apple/emoji/0_1509.png index f0f03c89ff..e0687288a7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1509.png and b/TMessagesProj/src/emojis/apple/emoji/0_1509.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_151.png b/TMessagesProj/src/emojis/apple/emoji/0_151.png index e40523d27c..1b9231ad66 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_151.png and b/TMessagesProj/src/emojis/apple/emoji/0_151.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1510.png b/TMessagesProj/src/emojis/apple/emoji/0_1510.png index 0643fd352a..3a9d460709 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1510.png and b/TMessagesProj/src/emojis/apple/emoji/0_1510.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1511.png b/TMessagesProj/src/emojis/apple/emoji/0_1511.png index a24e738ea5..07b90efec3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1511.png and b/TMessagesProj/src/emojis/apple/emoji/0_1511.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1512.png b/TMessagesProj/src/emojis/apple/emoji/0_1512.png index c42f88e99b..33ad6c1bb6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1512.png and b/TMessagesProj/src/emojis/apple/emoji/0_1512.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1513.png b/TMessagesProj/src/emojis/apple/emoji/0_1513.png index c9fdd8cb72..4d6940a973 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1513.png and b/TMessagesProj/src/emojis/apple/emoji/0_1513.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1514.png b/TMessagesProj/src/emojis/apple/emoji/0_1514.png index 9d9da9c377..3db82e9f03 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1514.png and b/TMessagesProj/src/emojis/apple/emoji/0_1514.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1515.png b/TMessagesProj/src/emojis/apple/emoji/0_1515.png index 1a1f807ccf..530e874fce 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1515.png and b/TMessagesProj/src/emojis/apple/emoji/0_1515.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1516.png b/TMessagesProj/src/emojis/apple/emoji/0_1516.png index 55ede00ad8..5f9457bcde 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1516.png and b/TMessagesProj/src/emojis/apple/emoji/0_1516.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1517.png b/TMessagesProj/src/emojis/apple/emoji/0_1517.png index 07ad02c3b4..3d1ddf5063 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1517.png and b/TMessagesProj/src/emojis/apple/emoji/0_1517.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1518.png b/TMessagesProj/src/emojis/apple/emoji/0_1518.png index da031cb4b5..4c3f81631d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1518.png and b/TMessagesProj/src/emojis/apple/emoji/0_1518.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1519.png b/TMessagesProj/src/emojis/apple/emoji/0_1519.png index ec2d21c137..2ea9d777ba 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1519.png and b/TMessagesProj/src/emojis/apple/emoji/0_1519.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_152.png b/TMessagesProj/src/emojis/apple/emoji/0_152.png index f119d4c396..c0073aec1b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_152.png and b/TMessagesProj/src/emojis/apple/emoji/0_152.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1520.png b/TMessagesProj/src/emojis/apple/emoji/0_1520.png index 5a45dbb26a..df18727d65 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1520.png and b/TMessagesProj/src/emojis/apple/emoji/0_1520.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1521.png b/TMessagesProj/src/emojis/apple/emoji/0_1521.png index b617abe9a9..e3b98dfa7a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1521.png and b/TMessagesProj/src/emojis/apple/emoji/0_1521.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1522.png b/TMessagesProj/src/emojis/apple/emoji/0_1522.png index 7a65039cbf..465ef319d7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1522.png and b/TMessagesProj/src/emojis/apple/emoji/0_1522.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1523.png b/TMessagesProj/src/emojis/apple/emoji/0_1523.png index 80c749bb9c..4d6c09acf3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1523.png and b/TMessagesProj/src/emojis/apple/emoji/0_1523.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1524.png b/TMessagesProj/src/emojis/apple/emoji/0_1524.png index 0a58190455..6168965bf3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1524.png and b/TMessagesProj/src/emojis/apple/emoji/0_1524.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1525.png b/TMessagesProj/src/emojis/apple/emoji/0_1525.png index a749ebd1a3..a4f955adf0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1525.png and b/TMessagesProj/src/emojis/apple/emoji/0_1525.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1526.png b/TMessagesProj/src/emojis/apple/emoji/0_1526.png index cdc154c086..103f6d4191 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1526.png and b/TMessagesProj/src/emojis/apple/emoji/0_1526.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1527.png b/TMessagesProj/src/emojis/apple/emoji/0_1527.png index 86f235ece5..624bc997b6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1527.png and b/TMessagesProj/src/emojis/apple/emoji/0_1527.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1528.png b/TMessagesProj/src/emojis/apple/emoji/0_1528.png index 16dddf5a9c..8e961255e4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1528.png and b/TMessagesProj/src/emojis/apple/emoji/0_1528.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1529.png b/TMessagesProj/src/emojis/apple/emoji/0_1529.png index a46e563672..cbe9d48001 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1529.png and b/TMessagesProj/src/emojis/apple/emoji/0_1529.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_153.png b/TMessagesProj/src/emojis/apple/emoji/0_153.png index d293d37f34..76d3daf15c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_153.png and b/TMessagesProj/src/emojis/apple/emoji/0_153.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1530.png b/TMessagesProj/src/emojis/apple/emoji/0_1530.png index 2f9915e430..83d3b62197 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1530.png and b/TMessagesProj/src/emojis/apple/emoji/0_1530.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1531.png b/TMessagesProj/src/emojis/apple/emoji/0_1531.png index c877bd389e..4fc96cbfec 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1531.png and b/TMessagesProj/src/emojis/apple/emoji/0_1531.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1532.png b/TMessagesProj/src/emojis/apple/emoji/0_1532.png index 321ed133d6..461b30e624 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1532.png and b/TMessagesProj/src/emojis/apple/emoji/0_1532.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1533.png b/TMessagesProj/src/emojis/apple/emoji/0_1533.png index 6d01894064..8342fa5881 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1533.png and b/TMessagesProj/src/emojis/apple/emoji/0_1533.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1534.png b/TMessagesProj/src/emojis/apple/emoji/0_1534.png index 273c7e0769..181abcde72 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1534.png and b/TMessagesProj/src/emojis/apple/emoji/0_1534.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1535.png b/TMessagesProj/src/emojis/apple/emoji/0_1535.png index fdceedb267..b75363f2ca 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1535.png and b/TMessagesProj/src/emojis/apple/emoji/0_1535.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1536.png b/TMessagesProj/src/emojis/apple/emoji/0_1536.png index 7b793a4857..4c954b4ec3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1536.png and b/TMessagesProj/src/emojis/apple/emoji/0_1536.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1537.png b/TMessagesProj/src/emojis/apple/emoji/0_1537.png index e3bf8903f1..1ce2032ff6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1537.png and b/TMessagesProj/src/emojis/apple/emoji/0_1537.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1538.png b/TMessagesProj/src/emojis/apple/emoji/0_1538.png index 1e9f849878..61aa3df0ff 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1538.png and b/TMessagesProj/src/emojis/apple/emoji/0_1538.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1539.png b/TMessagesProj/src/emojis/apple/emoji/0_1539.png index 6febba9cff..0de8109b01 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1539.png and b/TMessagesProj/src/emojis/apple/emoji/0_1539.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_154.png b/TMessagesProj/src/emojis/apple/emoji/0_154.png index bcfdadcff3..2add17a922 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_154.png and b/TMessagesProj/src/emojis/apple/emoji/0_154.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1540.png b/TMessagesProj/src/emojis/apple/emoji/0_1540.png index a54bb3f4a6..c55831058b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1540.png and b/TMessagesProj/src/emojis/apple/emoji/0_1540.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1541.png b/TMessagesProj/src/emojis/apple/emoji/0_1541.png index b958255e52..83deed7653 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1541.png and b/TMessagesProj/src/emojis/apple/emoji/0_1541.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1542.png b/TMessagesProj/src/emojis/apple/emoji/0_1542.png index 9361e86933..6a60c8cff6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1542.png and b/TMessagesProj/src/emojis/apple/emoji/0_1542.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1543.png b/TMessagesProj/src/emojis/apple/emoji/0_1543.png index 0964496235..d85f2a78e7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1543.png and b/TMessagesProj/src/emojis/apple/emoji/0_1543.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1544.png b/TMessagesProj/src/emojis/apple/emoji/0_1544.png index 181025bc17..3a7dd4f04b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1544.png and b/TMessagesProj/src/emojis/apple/emoji/0_1544.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1545.png b/TMessagesProj/src/emojis/apple/emoji/0_1545.png index ae077bf1c6..52070f1e42 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1545.png and b/TMessagesProj/src/emojis/apple/emoji/0_1545.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1546.png b/TMessagesProj/src/emojis/apple/emoji/0_1546.png index 6f29195311..1e09dbf1a8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1546.png and b/TMessagesProj/src/emojis/apple/emoji/0_1546.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1547.png b/TMessagesProj/src/emojis/apple/emoji/0_1547.png index 47fe885259..bffa1596b4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1547.png and b/TMessagesProj/src/emojis/apple/emoji/0_1547.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1548.png b/TMessagesProj/src/emojis/apple/emoji/0_1548.png index 2b5bfa2b11..cea9ef16fb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1548.png and b/TMessagesProj/src/emojis/apple/emoji/0_1548.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1549.png b/TMessagesProj/src/emojis/apple/emoji/0_1549.png index 63a5591033..606611bf98 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1549.png and b/TMessagesProj/src/emojis/apple/emoji/0_1549.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_155.png b/TMessagesProj/src/emojis/apple/emoji/0_155.png index 9667181b59..4f10344562 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_155.png and b/TMessagesProj/src/emojis/apple/emoji/0_155.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1550.png b/TMessagesProj/src/emojis/apple/emoji/0_1550.png index 9c8e6ea582..8aa4555a5c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1550.png and b/TMessagesProj/src/emojis/apple/emoji/0_1550.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1551.png b/TMessagesProj/src/emojis/apple/emoji/0_1551.png index 4c4d7b4c1a..ca2883347c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1551.png and b/TMessagesProj/src/emojis/apple/emoji/0_1551.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1552.png b/TMessagesProj/src/emojis/apple/emoji/0_1552.png index 4b17a83acc..805ba35d2e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1552.png and b/TMessagesProj/src/emojis/apple/emoji/0_1552.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1553.png b/TMessagesProj/src/emojis/apple/emoji/0_1553.png index ce6e9a467e..2c8defb452 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1553.png and b/TMessagesProj/src/emojis/apple/emoji/0_1553.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1554.png b/TMessagesProj/src/emojis/apple/emoji/0_1554.png index a2a63b7fb8..43d67c172c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1554.png and b/TMessagesProj/src/emojis/apple/emoji/0_1554.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1555.png b/TMessagesProj/src/emojis/apple/emoji/0_1555.png index 50f6c68526..cb02b0681b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1555.png and b/TMessagesProj/src/emojis/apple/emoji/0_1555.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1556.png b/TMessagesProj/src/emojis/apple/emoji/0_1556.png index 526b0bf137..4be52e7542 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1556.png and b/TMessagesProj/src/emojis/apple/emoji/0_1556.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1557.png b/TMessagesProj/src/emojis/apple/emoji/0_1557.png index 67b356b163..db73b42067 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1557.png and b/TMessagesProj/src/emojis/apple/emoji/0_1557.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1558.png b/TMessagesProj/src/emojis/apple/emoji/0_1558.png index 7df0b544f4..2d17783dba 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1558.png and b/TMessagesProj/src/emojis/apple/emoji/0_1558.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1559.png b/TMessagesProj/src/emojis/apple/emoji/0_1559.png index 5ae2336e4f..1986f9c74b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1559.png and b/TMessagesProj/src/emojis/apple/emoji/0_1559.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_156.png b/TMessagesProj/src/emojis/apple/emoji/0_156.png index 6936d29d8a..6670d897ae 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_156.png and b/TMessagesProj/src/emojis/apple/emoji/0_156.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1560.png b/TMessagesProj/src/emojis/apple/emoji/0_1560.png index 01c45ea1fb..c1da317f0d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1560.png and b/TMessagesProj/src/emojis/apple/emoji/0_1560.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1561.png b/TMessagesProj/src/emojis/apple/emoji/0_1561.png index 87c60fbbbf..5feaba92bb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1561.png and b/TMessagesProj/src/emojis/apple/emoji/0_1561.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1562.png b/TMessagesProj/src/emojis/apple/emoji/0_1562.png index 323cf871cc..fd3f3c8aa2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1562.png and b/TMessagesProj/src/emojis/apple/emoji/0_1562.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1563.png b/TMessagesProj/src/emojis/apple/emoji/0_1563.png index 1a04559c7a..80679df624 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1563.png and b/TMessagesProj/src/emojis/apple/emoji/0_1563.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1564.png b/TMessagesProj/src/emojis/apple/emoji/0_1564.png index 89cb876a6c..8e0866ff11 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1564.png and b/TMessagesProj/src/emojis/apple/emoji/0_1564.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1565.png b/TMessagesProj/src/emojis/apple/emoji/0_1565.png index f51ba9cfc0..5085ba3974 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1565.png and b/TMessagesProj/src/emojis/apple/emoji/0_1565.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1566.png b/TMessagesProj/src/emojis/apple/emoji/0_1566.png index f57520265f..99a5538591 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1566.png and b/TMessagesProj/src/emojis/apple/emoji/0_1566.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1567.png b/TMessagesProj/src/emojis/apple/emoji/0_1567.png index 35a5658ca5..fb6a3623d3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1567.png and b/TMessagesProj/src/emojis/apple/emoji/0_1567.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1568.png b/TMessagesProj/src/emojis/apple/emoji/0_1568.png index 4ce8ffbb74..83201171a2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1568.png and b/TMessagesProj/src/emojis/apple/emoji/0_1568.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1569.png b/TMessagesProj/src/emojis/apple/emoji/0_1569.png index 9da3bca8b8..d18f5bfa13 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1569.png and b/TMessagesProj/src/emojis/apple/emoji/0_1569.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_157.png b/TMessagesProj/src/emojis/apple/emoji/0_157.png index 0b56de8c12..96149b097c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_157.png and b/TMessagesProj/src/emojis/apple/emoji/0_157.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1570.png b/TMessagesProj/src/emojis/apple/emoji/0_1570.png index e999e39bf6..e78a045e18 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1570.png and b/TMessagesProj/src/emojis/apple/emoji/0_1570.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1571.png b/TMessagesProj/src/emojis/apple/emoji/0_1571.png index a606c6e53d..0018629c7f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1571.png and b/TMessagesProj/src/emojis/apple/emoji/0_1571.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1572.png b/TMessagesProj/src/emojis/apple/emoji/0_1572.png index b892c0e76a..8a19ec682b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1572.png and b/TMessagesProj/src/emojis/apple/emoji/0_1572.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1573.png b/TMessagesProj/src/emojis/apple/emoji/0_1573.png index 5c62e76da5..737f02ddc4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1573.png and b/TMessagesProj/src/emojis/apple/emoji/0_1573.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1574.png b/TMessagesProj/src/emojis/apple/emoji/0_1574.png index ded8247b82..ac83e23474 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1574.png and b/TMessagesProj/src/emojis/apple/emoji/0_1574.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1575.png b/TMessagesProj/src/emojis/apple/emoji/0_1575.png index 9e99c85c7d..91dd33c5f6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1575.png and b/TMessagesProj/src/emojis/apple/emoji/0_1575.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1576.png b/TMessagesProj/src/emojis/apple/emoji/0_1576.png index 58ac6d58aa..1854f6b95c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1576.png and b/TMessagesProj/src/emojis/apple/emoji/0_1576.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1577.png b/TMessagesProj/src/emojis/apple/emoji/0_1577.png index a78510d165..6e03c500dd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1577.png and b/TMessagesProj/src/emojis/apple/emoji/0_1577.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1578.png b/TMessagesProj/src/emojis/apple/emoji/0_1578.png index 3ad0e17b23..091eb70d3e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1578.png and b/TMessagesProj/src/emojis/apple/emoji/0_1578.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1579.png b/TMessagesProj/src/emojis/apple/emoji/0_1579.png index a85750f9f1..342f95ae76 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1579.png and b/TMessagesProj/src/emojis/apple/emoji/0_1579.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_158.png b/TMessagesProj/src/emojis/apple/emoji/0_158.png index 6d450aa1d0..a358f619c1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_158.png and b/TMessagesProj/src/emojis/apple/emoji/0_158.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1580.png b/TMessagesProj/src/emojis/apple/emoji/0_1580.png index eb4efae5a7..407a808481 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1580.png and b/TMessagesProj/src/emojis/apple/emoji/0_1580.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1581.png b/TMessagesProj/src/emojis/apple/emoji/0_1581.png index b20ea900be..126374db1d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1581.png and b/TMessagesProj/src/emojis/apple/emoji/0_1581.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1582.png b/TMessagesProj/src/emojis/apple/emoji/0_1582.png index a2263adf9b..223f5c64fa 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1582.png and b/TMessagesProj/src/emojis/apple/emoji/0_1582.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1583.png b/TMessagesProj/src/emojis/apple/emoji/0_1583.png index db2dd29cc0..722a2e3ec3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1583.png and b/TMessagesProj/src/emojis/apple/emoji/0_1583.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1584.png b/TMessagesProj/src/emojis/apple/emoji/0_1584.png index 397e775667..35083d0827 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1584.png and b/TMessagesProj/src/emojis/apple/emoji/0_1584.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1585.png b/TMessagesProj/src/emojis/apple/emoji/0_1585.png index c33df9fafa..eabe8a6f15 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1585.png and b/TMessagesProj/src/emojis/apple/emoji/0_1585.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1586.png b/TMessagesProj/src/emojis/apple/emoji/0_1586.png index a2456c0461..e0da28fb7b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1586.png and b/TMessagesProj/src/emojis/apple/emoji/0_1586.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1587.png b/TMessagesProj/src/emojis/apple/emoji/0_1587.png index 9d90261b37..56e189f0f5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1587.png and b/TMessagesProj/src/emojis/apple/emoji/0_1587.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1588.png b/TMessagesProj/src/emojis/apple/emoji/0_1588.png index 7ab6b8bd6a..5cadab1a59 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1588.png and b/TMessagesProj/src/emojis/apple/emoji/0_1588.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1589.png b/TMessagesProj/src/emojis/apple/emoji/0_1589.png index 7bc078d46a..f276af0362 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1589.png and b/TMessagesProj/src/emojis/apple/emoji/0_1589.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_159.png b/TMessagesProj/src/emojis/apple/emoji/0_159.png index 49d3d45e6a..1193ad79c1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_159.png and b/TMessagesProj/src/emojis/apple/emoji/0_159.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1590.png b/TMessagesProj/src/emojis/apple/emoji/0_1590.png index d8663311bc..43033615d4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1590.png and b/TMessagesProj/src/emojis/apple/emoji/0_1590.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1591.png b/TMessagesProj/src/emojis/apple/emoji/0_1591.png index 4a4703caae..660c74eb19 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1591.png and b/TMessagesProj/src/emojis/apple/emoji/0_1591.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1592.png b/TMessagesProj/src/emojis/apple/emoji/0_1592.png index 4930284938..60f32a123d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1592.png and b/TMessagesProj/src/emojis/apple/emoji/0_1592.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1593.png b/TMessagesProj/src/emojis/apple/emoji/0_1593.png index 52ba3ef8d1..544dd9d566 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1593.png and b/TMessagesProj/src/emojis/apple/emoji/0_1593.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1594.png b/TMessagesProj/src/emojis/apple/emoji/0_1594.png index d269b07521..84eeda9dbf 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1594.png and b/TMessagesProj/src/emojis/apple/emoji/0_1594.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1595.png b/TMessagesProj/src/emojis/apple/emoji/0_1595.png index 78c1b85f60..f0f57b3767 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1595.png and b/TMessagesProj/src/emojis/apple/emoji/0_1595.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1596.png b/TMessagesProj/src/emojis/apple/emoji/0_1596.png index b055f0266a..fe26c220b8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1596.png and b/TMessagesProj/src/emojis/apple/emoji/0_1596.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1597.png b/TMessagesProj/src/emojis/apple/emoji/0_1597.png index 7fcb580cf4..1ff38e1312 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1597.png and b/TMessagesProj/src/emojis/apple/emoji/0_1597.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1598.png b/TMessagesProj/src/emojis/apple/emoji/0_1598.png index 916c53497a..5102347fff 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1598.png and b/TMessagesProj/src/emojis/apple/emoji/0_1598.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1599.png b/TMessagesProj/src/emojis/apple/emoji/0_1599.png index 939b96ec33..d97b138e79 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1599.png and b/TMessagesProj/src/emojis/apple/emoji/0_1599.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_16.png b/TMessagesProj/src/emojis/apple/emoji/0_16.png index bda1f25cba..bb115dc5f3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_16.png and b/TMessagesProj/src/emojis/apple/emoji/0_16.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_160.png b/TMessagesProj/src/emojis/apple/emoji/0_160.png index ce4e3c2aeb..2721d7632a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_160.png and b/TMessagesProj/src/emojis/apple/emoji/0_160.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1600.png b/TMessagesProj/src/emojis/apple/emoji/0_1600.png index 23582618a4..68cf1cab7c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1600.png and b/TMessagesProj/src/emojis/apple/emoji/0_1600.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1601.png b/TMessagesProj/src/emojis/apple/emoji/0_1601.png index e666238030..6fca268099 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1601.png and b/TMessagesProj/src/emojis/apple/emoji/0_1601.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1602.png b/TMessagesProj/src/emojis/apple/emoji/0_1602.png index 95fadf2bda..76f542e594 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1602.png and b/TMessagesProj/src/emojis/apple/emoji/0_1602.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1603.png b/TMessagesProj/src/emojis/apple/emoji/0_1603.png index 0219f1fa70..3ad59f3260 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1603.png and b/TMessagesProj/src/emojis/apple/emoji/0_1603.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1604.png b/TMessagesProj/src/emojis/apple/emoji/0_1604.png index fe87bae9b6..15898fc171 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1604.png and b/TMessagesProj/src/emojis/apple/emoji/0_1604.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1605.png b/TMessagesProj/src/emojis/apple/emoji/0_1605.png index 4ef65c57d8..15588ce487 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1605.png and b/TMessagesProj/src/emojis/apple/emoji/0_1605.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1606.png b/TMessagesProj/src/emojis/apple/emoji/0_1606.png index 4412aa3b5c..b2a9cd3239 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1606.png and b/TMessagesProj/src/emojis/apple/emoji/0_1606.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1607.png b/TMessagesProj/src/emojis/apple/emoji/0_1607.png index 955e87e889..6ce988b15b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1607.png and b/TMessagesProj/src/emojis/apple/emoji/0_1607.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1608.png b/TMessagesProj/src/emojis/apple/emoji/0_1608.png index 5e87850c57..a6d637263a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1608.png and b/TMessagesProj/src/emojis/apple/emoji/0_1608.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1609.png b/TMessagesProj/src/emojis/apple/emoji/0_1609.png index 441e819af7..d2fda5230f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1609.png and b/TMessagesProj/src/emojis/apple/emoji/0_1609.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_161.png b/TMessagesProj/src/emojis/apple/emoji/0_161.png index a46b3789a7..96e2e961c7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_161.png and b/TMessagesProj/src/emojis/apple/emoji/0_161.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1610.png b/TMessagesProj/src/emojis/apple/emoji/0_1610.png index 10afb93c80..179a3093b0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1610.png and b/TMessagesProj/src/emojis/apple/emoji/0_1610.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1611.png b/TMessagesProj/src/emojis/apple/emoji/0_1611.png index b44da97922..177f52a839 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1611.png and b/TMessagesProj/src/emojis/apple/emoji/0_1611.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1612.png b/TMessagesProj/src/emojis/apple/emoji/0_1612.png index cab3b387b8..c473c1ea37 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1612.png and b/TMessagesProj/src/emojis/apple/emoji/0_1612.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1613.png b/TMessagesProj/src/emojis/apple/emoji/0_1613.png index a5c218f8d5..af2e2ba53b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1613.png and b/TMessagesProj/src/emojis/apple/emoji/0_1613.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1614.png b/TMessagesProj/src/emojis/apple/emoji/0_1614.png index 514f1d96bb..3e91e24efb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1614.png and b/TMessagesProj/src/emojis/apple/emoji/0_1614.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1615.png b/TMessagesProj/src/emojis/apple/emoji/0_1615.png index 7ee9299c01..0254c49bd8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1615.png and b/TMessagesProj/src/emojis/apple/emoji/0_1615.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1616.png b/TMessagesProj/src/emojis/apple/emoji/0_1616.png index db4e998065..114c27b655 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1616.png and b/TMessagesProj/src/emojis/apple/emoji/0_1616.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1617.png b/TMessagesProj/src/emojis/apple/emoji/0_1617.png index 29c4a37f83..50d5b10cc3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1617.png and b/TMessagesProj/src/emojis/apple/emoji/0_1617.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1618.png b/TMessagesProj/src/emojis/apple/emoji/0_1618.png index b4c6db0e1d..6a87cb1312 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1618.png and b/TMessagesProj/src/emojis/apple/emoji/0_1618.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1619.png b/TMessagesProj/src/emojis/apple/emoji/0_1619.png index 866ed93216..b0e64deb16 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1619.png and b/TMessagesProj/src/emojis/apple/emoji/0_1619.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_162.png b/TMessagesProj/src/emojis/apple/emoji/0_162.png index f68a811a8e..cb7ef678c0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_162.png and b/TMessagesProj/src/emojis/apple/emoji/0_162.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1620.png b/TMessagesProj/src/emojis/apple/emoji/0_1620.png index c1cf98effc..382e87f2b8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1620.png and b/TMessagesProj/src/emojis/apple/emoji/0_1620.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1621.png b/TMessagesProj/src/emojis/apple/emoji/0_1621.png index c9a4abfa57..ba80f9427c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1621.png and b/TMessagesProj/src/emojis/apple/emoji/0_1621.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1622.png b/TMessagesProj/src/emojis/apple/emoji/0_1622.png index b95097a812..73a71ab7f7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1622.png and b/TMessagesProj/src/emojis/apple/emoji/0_1622.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1623.png b/TMessagesProj/src/emojis/apple/emoji/0_1623.png index aaf4d2e913..b558dcf978 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1623.png and b/TMessagesProj/src/emojis/apple/emoji/0_1623.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1624.png b/TMessagesProj/src/emojis/apple/emoji/0_1624.png index 971daa6ecf..728a20d1cb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1624.png and b/TMessagesProj/src/emojis/apple/emoji/0_1624.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1625.png b/TMessagesProj/src/emojis/apple/emoji/0_1625.png index ed62dfa35f..173d444a06 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1625.png and b/TMessagesProj/src/emojis/apple/emoji/0_1625.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1626.png b/TMessagesProj/src/emojis/apple/emoji/0_1626.png index 23218ed469..6d55a4d343 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1626.png and b/TMessagesProj/src/emojis/apple/emoji/0_1626.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1627.png b/TMessagesProj/src/emojis/apple/emoji/0_1627.png index ae8a9c159b..4b2bd68292 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1627.png and b/TMessagesProj/src/emojis/apple/emoji/0_1627.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1628.png b/TMessagesProj/src/emojis/apple/emoji/0_1628.png index 4b8d3357ea..76b929dfee 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1628.png and b/TMessagesProj/src/emojis/apple/emoji/0_1628.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1629.png b/TMessagesProj/src/emojis/apple/emoji/0_1629.png index eb7ec0c830..9170719df8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1629.png and b/TMessagesProj/src/emojis/apple/emoji/0_1629.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_163.png b/TMessagesProj/src/emojis/apple/emoji/0_163.png index c74d94d024..711ee3900d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_163.png and b/TMessagesProj/src/emojis/apple/emoji/0_163.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1630.png b/TMessagesProj/src/emojis/apple/emoji/0_1630.png index b724a1cdf6..d11268ee6c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1630.png and b/TMessagesProj/src/emojis/apple/emoji/0_1630.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1631.png b/TMessagesProj/src/emojis/apple/emoji/0_1631.png index 3325be2387..402626117c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1631.png and b/TMessagesProj/src/emojis/apple/emoji/0_1631.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1632.png b/TMessagesProj/src/emojis/apple/emoji/0_1632.png index 52bd67e171..1e1c9dfa0e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1632.png and b/TMessagesProj/src/emojis/apple/emoji/0_1632.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1633.png b/TMessagesProj/src/emojis/apple/emoji/0_1633.png index eaa5663043..aa51b0e6eb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1633.png and b/TMessagesProj/src/emojis/apple/emoji/0_1633.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1634.png b/TMessagesProj/src/emojis/apple/emoji/0_1634.png index 45d084f532..d0069aafb3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1634.png and b/TMessagesProj/src/emojis/apple/emoji/0_1634.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1635.png b/TMessagesProj/src/emojis/apple/emoji/0_1635.png index 106087c635..5af05883e8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1635.png and b/TMessagesProj/src/emojis/apple/emoji/0_1635.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1636.png b/TMessagesProj/src/emojis/apple/emoji/0_1636.png index 6e858c9aec..be58167772 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1636.png and b/TMessagesProj/src/emojis/apple/emoji/0_1636.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1637.png b/TMessagesProj/src/emojis/apple/emoji/0_1637.png index a2879beeb4..fc9bd2d7bf 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1637.png and b/TMessagesProj/src/emojis/apple/emoji/0_1637.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1638.png b/TMessagesProj/src/emojis/apple/emoji/0_1638.png index 5126d88dcc..803e5260a2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1638.png and b/TMessagesProj/src/emojis/apple/emoji/0_1638.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1639.png b/TMessagesProj/src/emojis/apple/emoji/0_1639.png index 4e20648cfb..599f243e93 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1639.png and b/TMessagesProj/src/emojis/apple/emoji/0_1639.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_164.png b/TMessagesProj/src/emojis/apple/emoji/0_164.png index e569ad3a6c..08fc03e438 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_164.png and b/TMessagesProj/src/emojis/apple/emoji/0_164.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1640.png b/TMessagesProj/src/emojis/apple/emoji/0_1640.png index ed9e8e1b70..16ac67364e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1640.png and b/TMessagesProj/src/emojis/apple/emoji/0_1640.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1641.png b/TMessagesProj/src/emojis/apple/emoji/0_1641.png index 39e25d5935..4b4830cf1e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1641.png and b/TMessagesProj/src/emojis/apple/emoji/0_1641.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1642.png b/TMessagesProj/src/emojis/apple/emoji/0_1642.png index fbfbbf77f0..de4fcb08b9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1642.png and b/TMessagesProj/src/emojis/apple/emoji/0_1642.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1643.png b/TMessagesProj/src/emojis/apple/emoji/0_1643.png index 4b2e6881a1..e22d5b84c3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1643.png and b/TMessagesProj/src/emojis/apple/emoji/0_1643.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1644.png b/TMessagesProj/src/emojis/apple/emoji/0_1644.png index 0d0dce79fd..7f61ef1e44 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1644.png and b/TMessagesProj/src/emojis/apple/emoji/0_1644.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1645.png b/TMessagesProj/src/emojis/apple/emoji/0_1645.png index 6b0903fd6a..17ce64d4d4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1645.png and b/TMessagesProj/src/emojis/apple/emoji/0_1645.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1646.png b/TMessagesProj/src/emojis/apple/emoji/0_1646.png index ae217eafeb..737ed5eca2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1646.png and b/TMessagesProj/src/emojis/apple/emoji/0_1646.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1647.png b/TMessagesProj/src/emojis/apple/emoji/0_1647.png index f6a6aa61e8..df47d4a6fc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1647.png and b/TMessagesProj/src/emojis/apple/emoji/0_1647.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1648.png b/TMessagesProj/src/emojis/apple/emoji/0_1648.png index c135e864a5..49493d26e9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1648.png and b/TMessagesProj/src/emojis/apple/emoji/0_1648.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1649.png b/TMessagesProj/src/emojis/apple/emoji/0_1649.png index 8a60b0efa0..597ab94d81 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1649.png and b/TMessagesProj/src/emojis/apple/emoji/0_1649.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_165.png b/TMessagesProj/src/emojis/apple/emoji/0_165.png index f76d3b4199..bf70f9687c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_165.png and b/TMessagesProj/src/emojis/apple/emoji/0_165.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1650.png b/TMessagesProj/src/emojis/apple/emoji/0_1650.png index 8199b77401..3eda7d2db6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1650.png and b/TMessagesProj/src/emojis/apple/emoji/0_1650.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1651.png b/TMessagesProj/src/emojis/apple/emoji/0_1651.png index a7a3f5bc26..70ad3cd570 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1651.png and b/TMessagesProj/src/emojis/apple/emoji/0_1651.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1652.png b/TMessagesProj/src/emojis/apple/emoji/0_1652.png index 7389bb063e..f297c2ba55 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1652.png and b/TMessagesProj/src/emojis/apple/emoji/0_1652.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1653.png b/TMessagesProj/src/emojis/apple/emoji/0_1653.png index d4ea372085..3e7af12245 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1653.png and b/TMessagesProj/src/emojis/apple/emoji/0_1653.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1654.png b/TMessagesProj/src/emojis/apple/emoji/0_1654.png index df3ba26256..d5ea027b62 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1654.png and b/TMessagesProj/src/emojis/apple/emoji/0_1654.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1655.png b/TMessagesProj/src/emojis/apple/emoji/0_1655.png index 1f1b445de0..c90d1ce2b7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1655.png and b/TMessagesProj/src/emojis/apple/emoji/0_1655.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1656.png b/TMessagesProj/src/emojis/apple/emoji/0_1656.png index 0dde4045d7..f0e252b9a0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1656.png and b/TMessagesProj/src/emojis/apple/emoji/0_1656.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1657.png b/TMessagesProj/src/emojis/apple/emoji/0_1657.png index 1ddfab1601..3382ee2237 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1657.png and b/TMessagesProj/src/emojis/apple/emoji/0_1657.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1658.png b/TMessagesProj/src/emojis/apple/emoji/0_1658.png index 31d0741ea6..00ae42cb30 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1658.png and b/TMessagesProj/src/emojis/apple/emoji/0_1658.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1659.png b/TMessagesProj/src/emojis/apple/emoji/0_1659.png index 46b609f1db..fa78241e3c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1659.png and b/TMessagesProj/src/emojis/apple/emoji/0_1659.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_166.png b/TMessagesProj/src/emojis/apple/emoji/0_166.png index 91a2aeb13c..06c17cc34c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_166.png and b/TMessagesProj/src/emojis/apple/emoji/0_166.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1660.png b/TMessagesProj/src/emojis/apple/emoji/0_1660.png index 08dada1230..b885e2f011 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1660.png and b/TMessagesProj/src/emojis/apple/emoji/0_1660.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1661.png b/TMessagesProj/src/emojis/apple/emoji/0_1661.png index a241b952cd..a5b1d95f84 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1661.png and b/TMessagesProj/src/emojis/apple/emoji/0_1661.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1662.png b/TMessagesProj/src/emojis/apple/emoji/0_1662.png index 2fe87097eb..bc6846cfcf 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1662.png and b/TMessagesProj/src/emojis/apple/emoji/0_1662.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1663.png b/TMessagesProj/src/emojis/apple/emoji/0_1663.png index 26383bf4f6..ffefaf9d40 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1663.png and b/TMessagesProj/src/emojis/apple/emoji/0_1663.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1664.png b/TMessagesProj/src/emojis/apple/emoji/0_1664.png index 076d88903c..e3277bfcce 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1664.png and b/TMessagesProj/src/emojis/apple/emoji/0_1664.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1665.png b/TMessagesProj/src/emojis/apple/emoji/0_1665.png index 8ae6c7cb88..eb2c63aaf4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1665.png and b/TMessagesProj/src/emojis/apple/emoji/0_1665.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1666.png b/TMessagesProj/src/emojis/apple/emoji/0_1666.png index 0cef9c49bf..47da60fb0a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1666.png and b/TMessagesProj/src/emojis/apple/emoji/0_1666.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1667.png b/TMessagesProj/src/emojis/apple/emoji/0_1667.png index c9f3271b0b..781fae0a32 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1667.png and b/TMessagesProj/src/emojis/apple/emoji/0_1667.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1668.png b/TMessagesProj/src/emojis/apple/emoji/0_1668.png index e81f2f285c..b0bc54a254 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1668.png and b/TMessagesProj/src/emojis/apple/emoji/0_1668.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1669.png b/TMessagesProj/src/emojis/apple/emoji/0_1669.png index 703ec84321..bea82981b5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1669.png and b/TMessagesProj/src/emojis/apple/emoji/0_1669.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_167.png b/TMessagesProj/src/emojis/apple/emoji/0_167.png index 5fd59f472f..6b33759d57 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_167.png and b/TMessagesProj/src/emojis/apple/emoji/0_167.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1670.png b/TMessagesProj/src/emojis/apple/emoji/0_1670.png index 9348f44fba..682f58d2f6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1670.png and b/TMessagesProj/src/emojis/apple/emoji/0_1670.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1671.png b/TMessagesProj/src/emojis/apple/emoji/0_1671.png index b161486386..1351eb679e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1671.png and b/TMessagesProj/src/emojis/apple/emoji/0_1671.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1672.png b/TMessagesProj/src/emojis/apple/emoji/0_1672.png index d62c385be4..e657132a5e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1672.png and b/TMessagesProj/src/emojis/apple/emoji/0_1672.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1673.png b/TMessagesProj/src/emojis/apple/emoji/0_1673.png index 6728948acf..a4954dfadd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1673.png and b/TMessagesProj/src/emojis/apple/emoji/0_1673.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1674.png b/TMessagesProj/src/emojis/apple/emoji/0_1674.png index cb7f97d00b..d76507f0bd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1674.png and b/TMessagesProj/src/emojis/apple/emoji/0_1674.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1675.png b/TMessagesProj/src/emojis/apple/emoji/0_1675.png index 4dff39ed05..721f49a996 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1675.png and b/TMessagesProj/src/emojis/apple/emoji/0_1675.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1676.png b/TMessagesProj/src/emojis/apple/emoji/0_1676.png index 016dc55bfe..70717f4874 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1676.png and b/TMessagesProj/src/emojis/apple/emoji/0_1676.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1677.png b/TMessagesProj/src/emojis/apple/emoji/0_1677.png index 8e5e43b150..9ea42a017a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1677.png and b/TMessagesProj/src/emojis/apple/emoji/0_1677.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1678.png b/TMessagesProj/src/emojis/apple/emoji/0_1678.png index 7883ea1786..07f7dbfcb1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1678.png and b/TMessagesProj/src/emojis/apple/emoji/0_1678.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1679.png b/TMessagesProj/src/emojis/apple/emoji/0_1679.png index 4ba1d85314..d6c549e97f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1679.png and b/TMessagesProj/src/emojis/apple/emoji/0_1679.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_168.png b/TMessagesProj/src/emojis/apple/emoji/0_168.png index 2b6cd6cb7a..829d93d4e6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_168.png and b/TMessagesProj/src/emojis/apple/emoji/0_168.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1680.png b/TMessagesProj/src/emojis/apple/emoji/0_1680.png index 243feb1395..491a1d8953 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1680.png and b/TMessagesProj/src/emojis/apple/emoji/0_1680.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1681.png b/TMessagesProj/src/emojis/apple/emoji/0_1681.png index 36f979751e..80357978ad 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1681.png and b/TMessagesProj/src/emojis/apple/emoji/0_1681.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1682.png b/TMessagesProj/src/emojis/apple/emoji/0_1682.png index 2d96b8305d..631f1f8a34 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1682.png and b/TMessagesProj/src/emojis/apple/emoji/0_1682.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1683.png b/TMessagesProj/src/emojis/apple/emoji/0_1683.png index 67349b23a8..61153e94d9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1683.png and b/TMessagesProj/src/emojis/apple/emoji/0_1683.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1684.png b/TMessagesProj/src/emojis/apple/emoji/0_1684.png index 66678480b8..883e348ff7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1684.png and b/TMessagesProj/src/emojis/apple/emoji/0_1684.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1685.png b/TMessagesProj/src/emojis/apple/emoji/0_1685.png index db18ad26b3..495eece85c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1685.png and b/TMessagesProj/src/emojis/apple/emoji/0_1685.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1686.png b/TMessagesProj/src/emojis/apple/emoji/0_1686.png index f4996ac140..7f2e2439fc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1686.png and b/TMessagesProj/src/emojis/apple/emoji/0_1686.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1687.png b/TMessagesProj/src/emojis/apple/emoji/0_1687.png index 883f64bc15..b29f3858cf 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1687.png and b/TMessagesProj/src/emojis/apple/emoji/0_1687.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1688.png b/TMessagesProj/src/emojis/apple/emoji/0_1688.png index 8d4a7010f4..ceda121c4d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1688.png and b/TMessagesProj/src/emojis/apple/emoji/0_1688.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1689.png b/TMessagesProj/src/emojis/apple/emoji/0_1689.png index 78b0663cc9..602ae5b6bd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1689.png and b/TMessagesProj/src/emojis/apple/emoji/0_1689.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_169.png b/TMessagesProj/src/emojis/apple/emoji/0_169.png index a01f16bb2c..ae19ad413f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_169.png and b/TMessagesProj/src/emojis/apple/emoji/0_169.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1690.png b/TMessagesProj/src/emojis/apple/emoji/0_1690.png index 6da1077d78..dd2c8be177 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1690.png and b/TMessagesProj/src/emojis/apple/emoji/0_1690.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1691.png b/TMessagesProj/src/emojis/apple/emoji/0_1691.png index 0e77ef2ed4..c0f7983b78 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1691.png and b/TMessagesProj/src/emojis/apple/emoji/0_1691.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1692.png b/TMessagesProj/src/emojis/apple/emoji/0_1692.png index 68714213ec..5dc916b124 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1692.png and b/TMessagesProj/src/emojis/apple/emoji/0_1692.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1693.png b/TMessagesProj/src/emojis/apple/emoji/0_1693.png index e3a2f83e74..973b273d23 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1693.png and b/TMessagesProj/src/emojis/apple/emoji/0_1693.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1694.png b/TMessagesProj/src/emojis/apple/emoji/0_1694.png index 23211a0a40..1f6feaaa9c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1694.png and b/TMessagesProj/src/emojis/apple/emoji/0_1694.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1695.png b/TMessagesProj/src/emojis/apple/emoji/0_1695.png index 6442fd1feb..0a1e6f1f72 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1695.png and b/TMessagesProj/src/emojis/apple/emoji/0_1695.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1696.png b/TMessagesProj/src/emojis/apple/emoji/0_1696.png index e945f98ce8..0395803f20 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1696.png and b/TMessagesProj/src/emojis/apple/emoji/0_1696.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1697.png b/TMessagesProj/src/emojis/apple/emoji/0_1697.png index f03c52d113..5803995ae6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1697.png and b/TMessagesProj/src/emojis/apple/emoji/0_1697.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1698.png b/TMessagesProj/src/emojis/apple/emoji/0_1698.png index dfdbac6b14..0abf0e5c99 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1698.png and b/TMessagesProj/src/emojis/apple/emoji/0_1698.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1699.png b/TMessagesProj/src/emojis/apple/emoji/0_1699.png index f3053c6a91..3248ffb3c2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1699.png and b/TMessagesProj/src/emojis/apple/emoji/0_1699.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_17.png b/TMessagesProj/src/emojis/apple/emoji/0_17.png index f13c6882e3..ff79cece2a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_17.png and b/TMessagesProj/src/emojis/apple/emoji/0_17.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_170.png b/TMessagesProj/src/emojis/apple/emoji/0_170.png index 329e24b1a2..a113d52367 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_170.png and b/TMessagesProj/src/emojis/apple/emoji/0_170.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1700.png b/TMessagesProj/src/emojis/apple/emoji/0_1700.png index 95eea85dc3..83e09cb96d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1700.png and b/TMessagesProj/src/emojis/apple/emoji/0_1700.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1701.png b/TMessagesProj/src/emojis/apple/emoji/0_1701.png index a2f731b64e..548be52316 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1701.png and b/TMessagesProj/src/emojis/apple/emoji/0_1701.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1702.png b/TMessagesProj/src/emojis/apple/emoji/0_1702.png index c7fc2eb622..a657b6fd1e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1702.png and b/TMessagesProj/src/emojis/apple/emoji/0_1702.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1703.png b/TMessagesProj/src/emojis/apple/emoji/0_1703.png index 877f1930dd..4002c5bf2f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1703.png and b/TMessagesProj/src/emojis/apple/emoji/0_1703.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1704.png b/TMessagesProj/src/emojis/apple/emoji/0_1704.png index 0d4aa8c28c..cae24c9469 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1704.png and b/TMessagesProj/src/emojis/apple/emoji/0_1704.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1705.png b/TMessagesProj/src/emojis/apple/emoji/0_1705.png index 19982d2a98..812e25790d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1705.png and b/TMessagesProj/src/emojis/apple/emoji/0_1705.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1706.png b/TMessagesProj/src/emojis/apple/emoji/0_1706.png index 25839a7b6b..305f8012f1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1706.png and b/TMessagesProj/src/emojis/apple/emoji/0_1706.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1707.png b/TMessagesProj/src/emojis/apple/emoji/0_1707.png index c9769d19ec..236bcedbf1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1707.png and b/TMessagesProj/src/emojis/apple/emoji/0_1707.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1708.png b/TMessagesProj/src/emojis/apple/emoji/0_1708.png index 4bb8898aba..419966b4b9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1708.png and b/TMessagesProj/src/emojis/apple/emoji/0_1708.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1709.png b/TMessagesProj/src/emojis/apple/emoji/0_1709.png index 9a238f97b6..42a0f6086d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1709.png and b/TMessagesProj/src/emojis/apple/emoji/0_1709.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_171.png b/TMessagesProj/src/emojis/apple/emoji/0_171.png index 0fd19f676d..dcc1bcac58 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_171.png and b/TMessagesProj/src/emojis/apple/emoji/0_171.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1710.png b/TMessagesProj/src/emojis/apple/emoji/0_1710.png index 53a308729b..a8cd255b41 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1710.png and b/TMessagesProj/src/emojis/apple/emoji/0_1710.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1711.png b/TMessagesProj/src/emojis/apple/emoji/0_1711.png index 3c2cf32040..0c21ec7099 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1711.png and b/TMessagesProj/src/emojis/apple/emoji/0_1711.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1712.png b/TMessagesProj/src/emojis/apple/emoji/0_1712.png index 8f81283830..0dc7e56da7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1712.png and b/TMessagesProj/src/emojis/apple/emoji/0_1712.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1713.png b/TMessagesProj/src/emojis/apple/emoji/0_1713.png index ea92094be0..45fdd69e33 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1713.png and b/TMessagesProj/src/emojis/apple/emoji/0_1713.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1714.png b/TMessagesProj/src/emojis/apple/emoji/0_1714.png index b9eb1cd5ad..ba73c8d6ce 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1714.png and b/TMessagesProj/src/emojis/apple/emoji/0_1714.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1715.png b/TMessagesProj/src/emojis/apple/emoji/0_1715.png index a6ddc0118e..742f18bdf4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1715.png and b/TMessagesProj/src/emojis/apple/emoji/0_1715.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1716.png b/TMessagesProj/src/emojis/apple/emoji/0_1716.png index c91f67168f..a5d38d4b68 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1716.png and b/TMessagesProj/src/emojis/apple/emoji/0_1716.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1717.png b/TMessagesProj/src/emojis/apple/emoji/0_1717.png index 010581318b..c690c61302 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1717.png and b/TMessagesProj/src/emojis/apple/emoji/0_1717.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1718.png b/TMessagesProj/src/emojis/apple/emoji/0_1718.png index 5d9db8f536..92f8deb599 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1718.png and b/TMessagesProj/src/emojis/apple/emoji/0_1718.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1719.png b/TMessagesProj/src/emojis/apple/emoji/0_1719.png index 4ec27e19d9..801ea0815c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1719.png and b/TMessagesProj/src/emojis/apple/emoji/0_1719.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_172.png b/TMessagesProj/src/emojis/apple/emoji/0_172.png index 9de14baafe..ce3be27082 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_172.png and b/TMessagesProj/src/emojis/apple/emoji/0_172.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1720.png b/TMessagesProj/src/emojis/apple/emoji/0_1720.png index 00e5a49916..25340b03ff 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1720.png and b/TMessagesProj/src/emojis/apple/emoji/0_1720.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1721.png b/TMessagesProj/src/emojis/apple/emoji/0_1721.png index 6868cab65e..b5dcc4e480 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1721.png and b/TMessagesProj/src/emojis/apple/emoji/0_1721.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1722.png b/TMessagesProj/src/emojis/apple/emoji/0_1722.png index c3ecad76fb..ff2b62d0b3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1722.png and b/TMessagesProj/src/emojis/apple/emoji/0_1722.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1723.png b/TMessagesProj/src/emojis/apple/emoji/0_1723.png index 1a96865666..a99f3348ef 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1723.png and b/TMessagesProj/src/emojis/apple/emoji/0_1723.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1724.png b/TMessagesProj/src/emojis/apple/emoji/0_1724.png index 878c61262b..b5ef905356 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1724.png and b/TMessagesProj/src/emojis/apple/emoji/0_1724.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1725.png b/TMessagesProj/src/emojis/apple/emoji/0_1725.png index c2d7ca61a8..361c818925 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1725.png and b/TMessagesProj/src/emojis/apple/emoji/0_1725.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1726.png b/TMessagesProj/src/emojis/apple/emoji/0_1726.png index 7a78b557cd..5430e8ec71 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1726.png and b/TMessagesProj/src/emojis/apple/emoji/0_1726.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1727.png b/TMessagesProj/src/emojis/apple/emoji/0_1727.png index 4b3cb9bd02..f6e4a40061 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1727.png and b/TMessagesProj/src/emojis/apple/emoji/0_1727.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1728.png b/TMessagesProj/src/emojis/apple/emoji/0_1728.png index f141f316a7..cc78ec26c7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1728.png and b/TMessagesProj/src/emojis/apple/emoji/0_1728.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1729.png b/TMessagesProj/src/emojis/apple/emoji/0_1729.png index 0e42b6704b..42e0594d0f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1729.png and b/TMessagesProj/src/emojis/apple/emoji/0_1729.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_173.png b/TMessagesProj/src/emojis/apple/emoji/0_173.png index 835a74a23d..03dbde408c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_173.png and b/TMessagesProj/src/emojis/apple/emoji/0_173.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1730.png b/TMessagesProj/src/emojis/apple/emoji/0_1730.png index ab9bf8c379..3bb83835f7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1730.png and b/TMessagesProj/src/emojis/apple/emoji/0_1730.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1731.png b/TMessagesProj/src/emojis/apple/emoji/0_1731.png index 2696addd4e..4f73b9c34f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1731.png and b/TMessagesProj/src/emojis/apple/emoji/0_1731.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1732.png b/TMessagesProj/src/emojis/apple/emoji/0_1732.png index e123b250ec..11c957b1c3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1732.png and b/TMessagesProj/src/emojis/apple/emoji/0_1732.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1733.png b/TMessagesProj/src/emojis/apple/emoji/0_1733.png index c9423df6b6..bbf27d13ec 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1733.png and b/TMessagesProj/src/emojis/apple/emoji/0_1733.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1734.png b/TMessagesProj/src/emojis/apple/emoji/0_1734.png index 8e51fa37ef..d35108116c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1734.png and b/TMessagesProj/src/emojis/apple/emoji/0_1734.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1735.png b/TMessagesProj/src/emojis/apple/emoji/0_1735.png index f940db7b1d..8c4df540c7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1735.png and b/TMessagesProj/src/emojis/apple/emoji/0_1735.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1736.png b/TMessagesProj/src/emojis/apple/emoji/0_1736.png index b7be4345aa..ba119aa609 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1736.png and b/TMessagesProj/src/emojis/apple/emoji/0_1736.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1737.png b/TMessagesProj/src/emojis/apple/emoji/0_1737.png index 2205847a52..07082dd063 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1737.png and b/TMessagesProj/src/emojis/apple/emoji/0_1737.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1738.png b/TMessagesProj/src/emojis/apple/emoji/0_1738.png index 7e7dff0daa..366551f4ad 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1738.png and b/TMessagesProj/src/emojis/apple/emoji/0_1738.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1739.png b/TMessagesProj/src/emojis/apple/emoji/0_1739.png index cd0f0bc1be..3bcf8e73cb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1739.png and b/TMessagesProj/src/emojis/apple/emoji/0_1739.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_174.png b/TMessagesProj/src/emojis/apple/emoji/0_174.png index a7284b7285..688a7bec54 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_174.png and b/TMessagesProj/src/emojis/apple/emoji/0_174.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1740.png b/TMessagesProj/src/emojis/apple/emoji/0_1740.png index 978ac826a3..201ed26e61 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1740.png and b/TMessagesProj/src/emojis/apple/emoji/0_1740.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1741.png b/TMessagesProj/src/emojis/apple/emoji/0_1741.png index 243ba95567..edb6112782 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1741.png and b/TMessagesProj/src/emojis/apple/emoji/0_1741.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1742.png b/TMessagesProj/src/emojis/apple/emoji/0_1742.png index 0d9c78abff..11f48721ed 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1742.png and b/TMessagesProj/src/emojis/apple/emoji/0_1742.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1743.png b/TMessagesProj/src/emojis/apple/emoji/0_1743.png index a55abec381..9c56d2ff07 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1743.png and b/TMessagesProj/src/emojis/apple/emoji/0_1743.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1744.png b/TMessagesProj/src/emojis/apple/emoji/0_1744.png index d52ce7d826..e0ff80928b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1744.png and b/TMessagesProj/src/emojis/apple/emoji/0_1744.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1745.png b/TMessagesProj/src/emojis/apple/emoji/0_1745.png index de3fa68efb..da0b08faf9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1745.png and b/TMessagesProj/src/emojis/apple/emoji/0_1745.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1746.png b/TMessagesProj/src/emojis/apple/emoji/0_1746.png index 1ce843ceb1..5a88598411 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1746.png and b/TMessagesProj/src/emojis/apple/emoji/0_1746.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1747.png b/TMessagesProj/src/emojis/apple/emoji/0_1747.png index 965989e4dd..3fe7f0ae37 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1747.png and b/TMessagesProj/src/emojis/apple/emoji/0_1747.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1748.png b/TMessagesProj/src/emojis/apple/emoji/0_1748.png index aef8e4df58..8538076dac 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1748.png and b/TMessagesProj/src/emojis/apple/emoji/0_1748.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1749.png b/TMessagesProj/src/emojis/apple/emoji/0_1749.png index 7a99c3cc51..8f88a85604 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1749.png and b/TMessagesProj/src/emojis/apple/emoji/0_1749.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_175.png b/TMessagesProj/src/emojis/apple/emoji/0_175.png index 2249a10116..fc3255b9e9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_175.png and b/TMessagesProj/src/emojis/apple/emoji/0_175.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1750.png b/TMessagesProj/src/emojis/apple/emoji/0_1750.png index 682a0e37ce..48686b421b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1750.png and b/TMessagesProj/src/emojis/apple/emoji/0_1750.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1751.png b/TMessagesProj/src/emojis/apple/emoji/0_1751.png index 3d652710ce..fb970c2a79 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1751.png and b/TMessagesProj/src/emojis/apple/emoji/0_1751.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1752.png b/TMessagesProj/src/emojis/apple/emoji/0_1752.png index ef2754cf58..1a0c869681 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1752.png and b/TMessagesProj/src/emojis/apple/emoji/0_1752.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1753.png b/TMessagesProj/src/emojis/apple/emoji/0_1753.png index fb78e39726..625d10cf1b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1753.png and b/TMessagesProj/src/emojis/apple/emoji/0_1753.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1754.png b/TMessagesProj/src/emojis/apple/emoji/0_1754.png index e03b1f445f..9e87fc1627 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1754.png and b/TMessagesProj/src/emojis/apple/emoji/0_1754.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1755.png b/TMessagesProj/src/emojis/apple/emoji/0_1755.png index a7cfad900c..7b2c474778 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1755.png and b/TMessagesProj/src/emojis/apple/emoji/0_1755.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1756.png b/TMessagesProj/src/emojis/apple/emoji/0_1756.png index dd2b895035..49694edf11 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1756.png and b/TMessagesProj/src/emojis/apple/emoji/0_1756.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1757.png b/TMessagesProj/src/emojis/apple/emoji/0_1757.png index 62110ef5e2..e76d6be654 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1757.png and b/TMessagesProj/src/emojis/apple/emoji/0_1757.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1758.png b/TMessagesProj/src/emojis/apple/emoji/0_1758.png index 4db9a7ec01..8db8bc2d64 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1758.png and b/TMessagesProj/src/emojis/apple/emoji/0_1758.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1759.png b/TMessagesProj/src/emojis/apple/emoji/0_1759.png index f3f80040c0..784c5590f8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1759.png and b/TMessagesProj/src/emojis/apple/emoji/0_1759.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_176.png b/TMessagesProj/src/emojis/apple/emoji/0_176.png index c4c2c3b33e..267db2b8a3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_176.png and b/TMessagesProj/src/emojis/apple/emoji/0_176.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1760.png b/TMessagesProj/src/emojis/apple/emoji/0_1760.png index eb71ef4533..85ba04736e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1760.png and b/TMessagesProj/src/emojis/apple/emoji/0_1760.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1761.png b/TMessagesProj/src/emojis/apple/emoji/0_1761.png index c32732ba54..2efa9c50a6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1761.png and b/TMessagesProj/src/emojis/apple/emoji/0_1761.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1762.png b/TMessagesProj/src/emojis/apple/emoji/0_1762.png index 3cfdbd2152..8227ec2758 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1762.png and b/TMessagesProj/src/emojis/apple/emoji/0_1762.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1763.png b/TMessagesProj/src/emojis/apple/emoji/0_1763.png index 65870334cc..915d3f4c26 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1763.png and b/TMessagesProj/src/emojis/apple/emoji/0_1763.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1764.png b/TMessagesProj/src/emojis/apple/emoji/0_1764.png index 5ae567b7a8..f6b3f190d5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1764.png and b/TMessagesProj/src/emojis/apple/emoji/0_1764.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1765.png b/TMessagesProj/src/emojis/apple/emoji/0_1765.png index 034bff1692..8f6a321b7d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1765.png and b/TMessagesProj/src/emojis/apple/emoji/0_1765.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1766.png b/TMessagesProj/src/emojis/apple/emoji/0_1766.png index dd621db42f..6826f27ccb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1766.png and b/TMessagesProj/src/emojis/apple/emoji/0_1766.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1767.png b/TMessagesProj/src/emojis/apple/emoji/0_1767.png index 2efcc07c15..462b133540 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1767.png and b/TMessagesProj/src/emojis/apple/emoji/0_1767.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1768.png b/TMessagesProj/src/emojis/apple/emoji/0_1768.png index cc7d91da24..07fc648659 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1768.png and b/TMessagesProj/src/emojis/apple/emoji/0_1768.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1769.png b/TMessagesProj/src/emojis/apple/emoji/0_1769.png index 34f5806c71..72dc7b00fd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1769.png and b/TMessagesProj/src/emojis/apple/emoji/0_1769.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_177.png b/TMessagesProj/src/emojis/apple/emoji/0_177.png index 069a921c70..55f4d1a302 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_177.png and b/TMessagesProj/src/emojis/apple/emoji/0_177.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1770.png b/TMessagesProj/src/emojis/apple/emoji/0_1770.png index 97b361c468..e4217c47fb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1770.png and b/TMessagesProj/src/emojis/apple/emoji/0_1770.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1771.png b/TMessagesProj/src/emojis/apple/emoji/0_1771.png index 509f71efd0..c6032b8360 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1771.png and b/TMessagesProj/src/emojis/apple/emoji/0_1771.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1772.png b/TMessagesProj/src/emojis/apple/emoji/0_1772.png index 9323fc7e07..f476a178b3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1772.png and b/TMessagesProj/src/emojis/apple/emoji/0_1772.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1773.png b/TMessagesProj/src/emojis/apple/emoji/0_1773.png index 913591ad9c..21c464829a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1773.png and b/TMessagesProj/src/emojis/apple/emoji/0_1773.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1774.png b/TMessagesProj/src/emojis/apple/emoji/0_1774.png index 2fd7780672..946409f931 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1774.png and b/TMessagesProj/src/emojis/apple/emoji/0_1774.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1775.png b/TMessagesProj/src/emojis/apple/emoji/0_1775.png index 022154ac09..db7eee6c80 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1775.png and b/TMessagesProj/src/emojis/apple/emoji/0_1775.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1776.png b/TMessagesProj/src/emojis/apple/emoji/0_1776.png index 2a1e75a689..ccd9f501d3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1776.png and b/TMessagesProj/src/emojis/apple/emoji/0_1776.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1777.png b/TMessagesProj/src/emojis/apple/emoji/0_1777.png index bca1c039a9..bd4b2d7964 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1777.png and b/TMessagesProj/src/emojis/apple/emoji/0_1777.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1778.png b/TMessagesProj/src/emojis/apple/emoji/0_1778.png index 82eba1f7a4..5e2d511d1c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1778.png and b/TMessagesProj/src/emojis/apple/emoji/0_1778.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1779.png b/TMessagesProj/src/emojis/apple/emoji/0_1779.png index 282dad8e42..472b775ae0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1779.png and b/TMessagesProj/src/emojis/apple/emoji/0_1779.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_178.png b/TMessagesProj/src/emojis/apple/emoji/0_178.png index a6ce9cc653..463d9428ac 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_178.png and b/TMessagesProj/src/emojis/apple/emoji/0_178.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1780.png b/TMessagesProj/src/emojis/apple/emoji/0_1780.png index ad150ef947..9ca6c68538 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1780.png and b/TMessagesProj/src/emojis/apple/emoji/0_1780.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1781.png b/TMessagesProj/src/emojis/apple/emoji/0_1781.png index cea63a630e..2abca50217 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1781.png and b/TMessagesProj/src/emojis/apple/emoji/0_1781.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1782.png b/TMessagesProj/src/emojis/apple/emoji/0_1782.png index f878d4f9fc..569aaa7a3a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1782.png and b/TMessagesProj/src/emojis/apple/emoji/0_1782.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1783.png b/TMessagesProj/src/emojis/apple/emoji/0_1783.png index 58c9803568..51c5787806 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1783.png and b/TMessagesProj/src/emojis/apple/emoji/0_1783.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1784.png b/TMessagesProj/src/emojis/apple/emoji/0_1784.png index 5fce3dcc39..926aa2b7b1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1784.png and b/TMessagesProj/src/emojis/apple/emoji/0_1784.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1785.png b/TMessagesProj/src/emojis/apple/emoji/0_1785.png index 23ae8c0c7f..52c967113d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1785.png and b/TMessagesProj/src/emojis/apple/emoji/0_1785.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1786.png b/TMessagesProj/src/emojis/apple/emoji/0_1786.png index 8732bc78ab..18e82e5ddc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1786.png and b/TMessagesProj/src/emojis/apple/emoji/0_1786.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1787.png b/TMessagesProj/src/emojis/apple/emoji/0_1787.png index 77419ccf0f..773fa8f28c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1787.png and b/TMessagesProj/src/emojis/apple/emoji/0_1787.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1788.png b/TMessagesProj/src/emojis/apple/emoji/0_1788.png index 8b7aa1e6dd..3645f8d9b1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1788.png and b/TMessagesProj/src/emojis/apple/emoji/0_1788.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1789.png b/TMessagesProj/src/emojis/apple/emoji/0_1789.png index cec78ff4cf..4ef37cd7ed 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1789.png and b/TMessagesProj/src/emojis/apple/emoji/0_1789.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_179.png b/TMessagesProj/src/emojis/apple/emoji/0_179.png index 6fe15d8075..47ab79dd8c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_179.png and b/TMessagesProj/src/emojis/apple/emoji/0_179.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1790.png b/TMessagesProj/src/emojis/apple/emoji/0_1790.png index 9285a4b62e..7c9747892e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1790.png and b/TMessagesProj/src/emojis/apple/emoji/0_1790.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1791.png b/TMessagesProj/src/emojis/apple/emoji/0_1791.png index ea075b6d19..9196717b2d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1791.png and b/TMessagesProj/src/emojis/apple/emoji/0_1791.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1792.png b/TMessagesProj/src/emojis/apple/emoji/0_1792.png index 9cce10e839..e1ddbd6f98 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1792.png and b/TMessagesProj/src/emojis/apple/emoji/0_1792.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1793.png b/TMessagesProj/src/emojis/apple/emoji/0_1793.png index 7c45634cbb..3ff6849152 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1793.png and b/TMessagesProj/src/emojis/apple/emoji/0_1793.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1794.png b/TMessagesProj/src/emojis/apple/emoji/0_1794.png index 4d0440d020..04e3ad210b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1794.png and b/TMessagesProj/src/emojis/apple/emoji/0_1794.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1795.png b/TMessagesProj/src/emojis/apple/emoji/0_1795.png index bce929a505..004b8090da 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1795.png and b/TMessagesProj/src/emojis/apple/emoji/0_1795.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1796.png b/TMessagesProj/src/emojis/apple/emoji/0_1796.png index e31a63c7bd..9e80d3a39f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1796.png and b/TMessagesProj/src/emojis/apple/emoji/0_1796.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1797.png b/TMessagesProj/src/emojis/apple/emoji/0_1797.png index 6d84b1cd2c..5f3080769d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1797.png and b/TMessagesProj/src/emojis/apple/emoji/0_1797.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1798.png b/TMessagesProj/src/emojis/apple/emoji/0_1798.png index e74eea232e..fb26343eb0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1798.png and b/TMessagesProj/src/emojis/apple/emoji/0_1798.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1799.png b/TMessagesProj/src/emojis/apple/emoji/0_1799.png index a5526e6163..fb065be00f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1799.png and b/TMessagesProj/src/emojis/apple/emoji/0_1799.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_18.png b/TMessagesProj/src/emojis/apple/emoji/0_18.png index a822b52a94..978736fd89 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_18.png and b/TMessagesProj/src/emojis/apple/emoji/0_18.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_180.png b/TMessagesProj/src/emojis/apple/emoji/0_180.png index 8dbb2713fc..680a659c3e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_180.png and b/TMessagesProj/src/emojis/apple/emoji/0_180.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1800.png b/TMessagesProj/src/emojis/apple/emoji/0_1800.png index e379c19ad4..23d4312999 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1800.png and b/TMessagesProj/src/emojis/apple/emoji/0_1800.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1801.png b/TMessagesProj/src/emojis/apple/emoji/0_1801.png index 0fdfbea642..28ccba2710 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1801.png and b/TMessagesProj/src/emojis/apple/emoji/0_1801.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1802.png b/TMessagesProj/src/emojis/apple/emoji/0_1802.png index 6412cb0431..40af53d65a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1802.png and b/TMessagesProj/src/emojis/apple/emoji/0_1802.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1803.png b/TMessagesProj/src/emojis/apple/emoji/0_1803.png index 9ade78197c..011bf647bd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1803.png and b/TMessagesProj/src/emojis/apple/emoji/0_1803.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1804.png b/TMessagesProj/src/emojis/apple/emoji/0_1804.png index 37f820aa30..ce4c07941a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1804.png and b/TMessagesProj/src/emojis/apple/emoji/0_1804.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1805.png b/TMessagesProj/src/emojis/apple/emoji/0_1805.png index ed3dfdea1e..75fcc1b249 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1805.png and b/TMessagesProj/src/emojis/apple/emoji/0_1805.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1806.png b/TMessagesProj/src/emojis/apple/emoji/0_1806.png index fb23e08fa2..d64a58a9a0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1806.png and b/TMessagesProj/src/emojis/apple/emoji/0_1806.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1807.png b/TMessagesProj/src/emojis/apple/emoji/0_1807.png index 70956856f5..d0f70c9362 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1807.png and b/TMessagesProj/src/emojis/apple/emoji/0_1807.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1808.png b/TMessagesProj/src/emojis/apple/emoji/0_1808.png index b04a2c53cd..3c7d7aef02 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1808.png and b/TMessagesProj/src/emojis/apple/emoji/0_1808.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1809.png b/TMessagesProj/src/emojis/apple/emoji/0_1809.png index 99c6b18838..aaa293b028 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1809.png and b/TMessagesProj/src/emojis/apple/emoji/0_1809.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_181.png b/TMessagesProj/src/emojis/apple/emoji/0_181.png index a815917fc4..609277b69c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_181.png and b/TMessagesProj/src/emojis/apple/emoji/0_181.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1810.png b/TMessagesProj/src/emojis/apple/emoji/0_1810.png index 7be95e8e58..bb24d714e4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1810.png and b/TMessagesProj/src/emojis/apple/emoji/0_1810.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1811.png b/TMessagesProj/src/emojis/apple/emoji/0_1811.png index a955783725..1192999d07 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1811.png and b/TMessagesProj/src/emojis/apple/emoji/0_1811.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1812.png b/TMessagesProj/src/emojis/apple/emoji/0_1812.png index a5561eaf2c..67b31f402e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1812.png and b/TMessagesProj/src/emojis/apple/emoji/0_1812.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1813.png b/TMessagesProj/src/emojis/apple/emoji/0_1813.png index 98bea54469..cf55b06b9a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1813.png and b/TMessagesProj/src/emojis/apple/emoji/0_1813.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1814.png b/TMessagesProj/src/emojis/apple/emoji/0_1814.png index a7349afb17..738e105d14 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1814.png and b/TMessagesProj/src/emojis/apple/emoji/0_1814.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1815.png b/TMessagesProj/src/emojis/apple/emoji/0_1815.png index 9b286a2434..6dbfbf9daa 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1815.png and b/TMessagesProj/src/emojis/apple/emoji/0_1815.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1816.png b/TMessagesProj/src/emojis/apple/emoji/0_1816.png index 75b5d34e3a..1aaf4867b1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1816.png and b/TMessagesProj/src/emojis/apple/emoji/0_1816.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1817.png b/TMessagesProj/src/emojis/apple/emoji/0_1817.png index 7ccf8d7bf2..8cddcef7c5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1817.png and b/TMessagesProj/src/emojis/apple/emoji/0_1817.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1818.png b/TMessagesProj/src/emojis/apple/emoji/0_1818.png index 5de6265aed..ec7a77ec6d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1818.png and b/TMessagesProj/src/emojis/apple/emoji/0_1818.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1819.png b/TMessagesProj/src/emojis/apple/emoji/0_1819.png index d237ed39b8..373c092847 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1819.png and b/TMessagesProj/src/emojis/apple/emoji/0_1819.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_182.png b/TMessagesProj/src/emojis/apple/emoji/0_182.png index 30d78f9a9c..035cf5e92d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_182.png and b/TMessagesProj/src/emojis/apple/emoji/0_182.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1820.png b/TMessagesProj/src/emojis/apple/emoji/0_1820.png index 8c01f51fff..4442df1c14 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1820.png and b/TMessagesProj/src/emojis/apple/emoji/0_1820.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1821.png b/TMessagesProj/src/emojis/apple/emoji/0_1821.png index 4b652f4e62..e4932d66f6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1821.png and b/TMessagesProj/src/emojis/apple/emoji/0_1821.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1822.png b/TMessagesProj/src/emojis/apple/emoji/0_1822.png index e16e939f74..38c85ce2e7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1822.png and b/TMessagesProj/src/emojis/apple/emoji/0_1822.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1823.png b/TMessagesProj/src/emojis/apple/emoji/0_1823.png index 4388f73d4d..cd00e3c219 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1823.png and b/TMessagesProj/src/emojis/apple/emoji/0_1823.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1824.png b/TMessagesProj/src/emojis/apple/emoji/0_1824.png index 22bcc86e4b..ef8f920050 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1824.png and b/TMessagesProj/src/emojis/apple/emoji/0_1824.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1825.png b/TMessagesProj/src/emojis/apple/emoji/0_1825.png index c7c0d1b7bc..5d3d552b67 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1825.png and b/TMessagesProj/src/emojis/apple/emoji/0_1825.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1826.png b/TMessagesProj/src/emojis/apple/emoji/0_1826.png index 47d1169274..faa638ca0c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1826.png and b/TMessagesProj/src/emojis/apple/emoji/0_1826.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1827.png b/TMessagesProj/src/emojis/apple/emoji/0_1827.png index de01b68dcc..5ec81f45e3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1827.png and b/TMessagesProj/src/emojis/apple/emoji/0_1827.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1828.png b/TMessagesProj/src/emojis/apple/emoji/0_1828.png index 1da64d0098..878223c47a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1828.png and b/TMessagesProj/src/emojis/apple/emoji/0_1828.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1829.png b/TMessagesProj/src/emojis/apple/emoji/0_1829.png index d43551db1f..8c1d8a3259 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1829.png and b/TMessagesProj/src/emojis/apple/emoji/0_1829.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_183.png b/TMessagesProj/src/emojis/apple/emoji/0_183.png index b4fc9c20ee..fcf111010e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_183.png and b/TMessagesProj/src/emojis/apple/emoji/0_183.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1830.png b/TMessagesProj/src/emojis/apple/emoji/0_1830.png index 30d07c881d..026b6034c6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1830.png and b/TMessagesProj/src/emojis/apple/emoji/0_1830.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1831.png b/TMessagesProj/src/emojis/apple/emoji/0_1831.png index c6b9cb5af0..031997259f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1831.png and b/TMessagesProj/src/emojis/apple/emoji/0_1831.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1832.png b/TMessagesProj/src/emojis/apple/emoji/0_1832.png index 235d2d4736..36b5669c68 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1832.png and b/TMessagesProj/src/emojis/apple/emoji/0_1832.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1833.png b/TMessagesProj/src/emojis/apple/emoji/0_1833.png index 8782525ecd..8ca93a59a6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1833.png and b/TMessagesProj/src/emojis/apple/emoji/0_1833.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1834.png b/TMessagesProj/src/emojis/apple/emoji/0_1834.png index 5fd46a37be..e73b80e4db 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1834.png and b/TMessagesProj/src/emojis/apple/emoji/0_1834.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1835.png b/TMessagesProj/src/emojis/apple/emoji/0_1835.png index 411c39f480..f9c472c16f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1835.png and b/TMessagesProj/src/emojis/apple/emoji/0_1835.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1836.png b/TMessagesProj/src/emojis/apple/emoji/0_1836.png index ee4fc1835e..200c15b60f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1836.png and b/TMessagesProj/src/emojis/apple/emoji/0_1836.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1837.png b/TMessagesProj/src/emojis/apple/emoji/0_1837.png index c38bc8cfa9..7e7ab5a150 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1837.png and b/TMessagesProj/src/emojis/apple/emoji/0_1837.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1838.png b/TMessagesProj/src/emojis/apple/emoji/0_1838.png index 142ed42cb7..18db0a26ac 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1838.png and b/TMessagesProj/src/emojis/apple/emoji/0_1838.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1839.png b/TMessagesProj/src/emojis/apple/emoji/0_1839.png index db89254c25..313f16ce63 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1839.png and b/TMessagesProj/src/emojis/apple/emoji/0_1839.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_184.png b/TMessagesProj/src/emojis/apple/emoji/0_184.png index f2c5eef02d..08ac775e38 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_184.png and b/TMessagesProj/src/emojis/apple/emoji/0_184.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1840.png b/TMessagesProj/src/emojis/apple/emoji/0_1840.png index 51347d7d0c..a4e2797df7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1840.png and b/TMessagesProj/src/emojis/apple/emoji/0_1840.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1841.png b/TMessagesProj/src/emojis/apple/emoji/0_1841.png index 6d010bc7fa..723fe25ef3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1841.png and b/TMessagesProj/src/emojis/apple/emoji/0_1841.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1842.png b/TMessagesProj/src/emojis/apple/emoji/0_1842.png index 79b9e74637..9e130b219f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1842.png and b/TMessagesProj/src/emojis/apple/emoji/0_1842.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1843.png b/TMessagesProj/src/emojis/apple/emoji/0_1843.png index 6c0be6a7e8..feae3968de 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1843.png and b/TMessagesProj/src/emojis/apple/emoji/0_1843.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1844.png b/TMessagesProj/src/emojis/apple/emoji/0_1844.png index d3d2f05dc3..9374d1f20e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1844.png and b/TMessagesProj/src/emojis/apple/emoji/0_1844.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1845.png b/TMessagesProj/src/emojis/apple/emoji/0_1845.png index 20172d1406..c98ca7d4b7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1845.png and b/TMessagesProj/src/emojis/apple/emoji/0_1845.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1846.png b/TMessagesProj/src/emojis/apple/emoji/0_1846.png index 60ae7c5b30..f92a9c533e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1846.png and b/TMessagesProj/src/emojis/apple/emoji/0_1846.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1847.png b/TMessagesProj/src/emojis/apple/emoji/0_1847.png index 9d82e6cb79..349ce80d38 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1847.png and b/TMessagesProj/src/emojis/apple/emoji/0_1847.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1848.png b/TMessagesProj/src/emojis/apple/emoji/0_1848.png index c055320afa..c6d650692f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1848.png and b/TMessagesProj/src/emojis/apple/emoji/0_1848.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1849.png b/TMessagesProj/src/emojis/apple/emoji/0_1849.png index a79f3cd08f..c2d03bb812 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1849.png and b/TMessagesProj/src/emojis/apple/emoji/0_1849.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_185.png b/TMessagesProj/src/emojis/apple/emoji/0_185.png index 2b4c4292be..6c846c7706 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_185.png and b/TMessagesProj/src/emojis/apple/emoji/0_185.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1850.png b/TMessagesProj/src/emojis/apple/emoji/0_1850.png index e9c3d7bce4..caf01b84ec 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1850.png and b/TMessagesProj/src/emojis/apple/emoji/0_1850.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1851.png b/TMessagesProj/src/emojis/apple/emoji/0_1851.png index d7657e2151..b40794472b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1851.png and b/TMessagesProj/src/emojis/apple/emoji/0_1851.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1852.png b/TMessagesProj/src/emojis/apple/emoji/0_1852.png index f7755f0442..9ecc0b1d4b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1852.png and b/TMessagesProj/src/emojis/apple/emoji/0_1852.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1853.png b/TMessagesProj/src/emojis/apple/emoji/0_1853.png index 45267aa01d..9742c0e48f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1853.png and b/TMessagesProj/src/emojis/apple/emoji/0_1853.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1854.png b/TMessagesProj/src/emojis/apple/emoji/0_1854.png index 06a817412d..6a045ee168 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1854.png and b/TMessagesProj/src/emojis/apple/emoji/0_1854.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1855.png b/TMessagesProj/src/emojis/apple/emoji/0_1855.png index 39469f31e2..363debe95c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1855.png and b/TMessagesProj/src/emojis/apple/emoji/0_1855.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1856.png b/TMessagesProj/src/emojis/apple/emoji/0_1856.png index 750c1c48b2..deeb38927d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1856.png and b/TMessagesProj/src/emojis/apple/emoji/0_1856.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1857.png b/TMessagesProj/src/emojis/apple/emoji/0_1857.png index 8ce57482c0..484e19797c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1857.png and b/TMessagesProj/src/emojis/apple/emoji/0_1857.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1858.png b/TMessagesProj/src/emojis/apple/emoji/0_1858.png index a5b61d3da1..fe34ec8356 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1858.png and b/TMessagesProj/src/emojis/apple/emoji/0_1858.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1859.png b/TMessagesProj/src/emojis/apple/emoji/0_1859.png index b0877e2189..4b353dcaa2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1859.png and b/TMessagesProj/src/emojis/apple/emoji/0_1859.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_186.png b/TMessagesProj/src/emojis/apple/emoji/0_186.png index eb53b1efec..0f234bdcfa 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_186.png and b/TMessagesProj/src/emojis/apple/emoji/0_186.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1860.png b/TMessagesProj/src/emojis/apple/emoji/0_1860.png index 5988e3866b..98bf9e64dc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1860.png and b/TMessagesProj/src/emojis/apple/emoji/0_1860.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1861.png b/TMessagesProj/src/emojis/apple/emoji/0_1861.png index 3db025cbdd..fc9ec5fc12 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1861.png and b/TMessagesProj/src/emojis/apple/emoji/0_1861.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1862.png b/TMessagesProj/src/emojis/apple/emoji/0_1862.png index d17465d753..458e9828e2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1862.png and b/TMessagesProj/src/emojis/apple/emoji/0_1862.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1863.png b/TMessagesProj/src/emojis/apple/emoji/0_1863.png index af478204d9..4c7d96ec41 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1863.png and b/TMessagesProj/src/emojis/apple/emoji/0_1863.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1864.png b/TMessagesProj/src/emojis/apple/emoji/0_1864.png index 97ac3c41e2..464333a8de 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1864.png and b/TMessagesProj/src/emojis/apple/emoji/0_1864.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1865.png b/TMessagesProj/src/emojis/apple/emoji/0_1865.png index 754445cb6c..917df7f6d0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1865.png and b/TMessagesProj/src/emojis/apple/emoji/0_1865.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1866.png b/TMessagesProj/src/emojis/apple/emoji/0_1866.png index b2c2dc766d..ad719be70c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1866.png and b/TMessagesProj/src/emojis/apple/emoji/0_1866.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1867.png b/TMessagesProj/src/emojis/apple/emoji/0_1867.png index 3fbc773813..268dbfca16 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1867.png and b/TMessagesProj/src/emojis/apple/emoji/0_1867.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1868.png b/TMessagesProj/src/emojis/apple/emoji/0_1868.png index 4ec85e3298..0d7740dbc6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1868.png and b/TMessagesProj/src/emojis/apple/emoji/0_1868.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1869.png b/TMessagesProj/src/emojis/apple/emoji/0_1869.png index 93ce5a08b1..1580dd7657 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1869.png and b/TMessagesProj/src/emojis/apple/emoji/0_1869.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_187.png b/TMessagesProj/src/emojis/apple/emoji/0_187.png index 4296bc834e..955b0e9b9e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_187.png and b/TMessagesProj/src/emojis/apple/emoji/0_187.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1870.png b/TMessagesProj/src/emojis/apple/emoji/0_1870.png index eefb5511f7..99afae6231 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1870.png and b/TMessagesProj/src/emojis/apple/emoji/0_1870.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1871.png b/TMessagesProj/src/emojis/apple/emoji/0_1871.png index df8adc6859..8f76738fe3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1871.png and b/TMessagesProj/src/emojis/apple/emoji/0_1871.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1872.png b/TMessagesProj/src/emojis/apple/emoji/0_1872.png index fcc74ba36a..2450e3f852 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1872.png and b/TMessagesProj/src/emojis/apple/emoji/0_1872.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1873.png b/TMessagesProj/src/emojis/apple/emoji/0_1873.png index 3849eaa47e..33d3ff1360 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1873.png and b/TMessagesProj/src/emojis/apple/emoji/0_1873.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1874.png b/TMessagesProj/src/emojis/apple/emoji/0_1874.png index 609fb32a88..514b33ab7c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1874.png and b/TMessagesProj/src/emojis/apple/emoji/0_1874.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1875.png b/TMessagesProj/src/emojis/apple/emoji/0_1875.png index 8a8cb35c81..554d7977a3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1875.png and b/TMessagesProj/src/emojis/apple/emoji/0_1875.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1876.png b/TMessagesProj/src/emojis/apple/emoji/0_1876.png index e68400408f..659d231e52 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1876.png and b/TMessagesProj/src/emojis/apple/emoji/0_1876.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1877.png b/TMessagesProj/src/emojis/apple/emoji/0_1877.png index de9555bea7..0a3e3507c0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1877.png and b/TMessagesProj/src/emojis/apple/emoji/0_1877.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1878.png b/TMessagesProj/src/emojis/apple/emoji/0_1878.png index 2a697ddc86..447610e129 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1878.png and b/TMessagesProj/src/emojis/apple/emoji/0_1878.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1879.png b/TMessagesProj/src/emojis/apple/emoji/0_1879.png index e81d9a897b..518ab9cec8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1879.png and b/TMessagesProj/src/emojis/apple/emoji/0_1879.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_188.png b/TMessagesProj/src/emojis/apple/emoji/0_188.png index 98e6beaa7c..1b65ce93a8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_188.png and b/TMessagesProj/src/emojis/apple/emoji/0_188.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1880.png b/TMessagesProj/src/emojis/apple/emoji/0_1880.png index 5265615fc5..3a22cc3778 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1880.png and b/TMessagesProj/src/emojis/apple/emoji/0_1880.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1881.png b/TMessagesProj/src/emojis/apple/emoji/0_1881.png index 1655fcdeff..5c5b882450 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1881.png and b/TMessagesProj/src/emojis/apple/emoji/0_1881.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1882.png b/TMessagesProj/src/emojis/apple/emoji/0_1882.png index ab47694bb1..5b3b281f01 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1882.png and b/TMessagesProj/src/emojis/apple/emoji/0_1882.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1883.png b/TMessagesProj/src/emojis/apple/emoji/0_1883.png index a5e2ec41a1..73ee12b4d7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1883.png and b/TMessagesProj/src/emojis/apple/emoji/0_1883.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1884.png b/TMessagesProj/src/emojis/apple/emoji/0_1884.png index 7ba94cdeeb..00bfdcb1f4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1884.png and b/TMessagesProj/src/emojis/apple/emoji/0_1884.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1885.png b/TMessagesProj/src/emojis/apple/emoji/0_1885.png index 5b6ae61a88..48c7f4020c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1885.png and b/TMessagesProj/src/emojis/apple/emoji/0_1885.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1886.png b/TMessagesProj/src/emojis/apple/emoji/0_1886.png index 6143d23b6a..f8ee4631e1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1886.png and b/TMessagesProj/src/emojis/apple/emoji/0_1886.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1887.png b/TMessagesProj/src/emojis/apple/emoji/0_1887.png index 4c6db4c568..586dd2b43c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1887.png and b/TMessagesProj/src/emojis/apple/emoji/0_1887.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1888.png b/TMessagesProj/src/emojis/apple/emoji/0_1888.png index f0f6873ac8..8d4d22d554 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1888.png and b/TMessagesProj/src/emojis/apple/emoji/0_1888.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1889.png b/TMessagesProj/src/emojis/apple/emoji/0_1889.png index fbc42fecea..736bbb0878 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1889.png and b/TMessagesProj/src/emojis/apple/emoji/0_1889.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_189.png b/TMessagesProj/src/emojis/apple/emoji/0_189.png index 879c434da7..a4bb3a9922 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_189.png and b/TMessagesProj/src/emojis/apple/emoji/0_189.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1890.png b/TMessagesProj/src/emojis/apple/emoji/0_1890.png index 04c54aea35..6b618b5753 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1890.png and b/TMessagesProj/src/emojis/apple/emoji/0_1890.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1891.png b/TMessagesProj/src/emojis/apple/emoji/0_1891.png index 78ccd7e7e8..7b52ff016f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1891.png and b/TMessagesProj/src/emojis/apple/emoji/0_1891.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1892.png b/TMessagesProj/src/emojis/apple/emoji/0_1892.png index 14bdd14112..167db99ea7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1892.png and b/TMessagesProj/src/emojis/apple/emoji/0_1892.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1893.png b/TMessagesProj/src/emojis/apple/emoji/0_1893.png index 4498e0fbf6..8d04e6934c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1893.png and b/TMessagesProj/src/emojis/apple/emoji/0_1893.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1894.png b/TMessagesProj/src/emojis/apple/emoji/0_1894.png index 052eaff767..1d67afc5e1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1894.png and b/TMessagesProj/src/emojis/apple/emoji/0_1894.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1895.png b/TMessagesProj/src/emojis/apple/emoji/0_1895.png index 898a1f3c37..2e038ece6e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1895.png and b/TMessagesProj/src/emojis/apple/emoji/0_1895.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1896.png b/TMessagesProj/src/emojis/apple/emoji/0_1896.png index 8a58f9e8f2..fee075b40a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1896.png and b/TMessagesProj/src/emojis/apple/emoji/0_1896.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1897.png b/TMessagesProj/src/emojis/apple/emoji/0_1897.png index 29842a4cf0..d433258be5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1897.png and b/TMessagesProj/src/emojis/apple/emoji/0_1897.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1898.png b/TMessagesProj/src/emojis/apple/emoji/0_1898.png index 74d473fbd3..6d14d46659 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1898.png and b/TMessagesProj/src/emojis/apple/emoji/0_1898.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1899.png b/TMessagesProj/src/emojis/apple/emoji/0_1899.png index 18c8d324f2..c5aa51a049 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1899.png and b/TMessagesProj/src/emojis/apple/emoji/0_1899.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_19.png b/TMessagesProj/src/emojis/apple/emoji/0_19.png index 4a7ab143f1..a9958864d0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_19.png and b/TMessagesProj/src/emojis/apple/emoji/0_19.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_190.png b/TMessagesProj/src/emojis/apple/emoji/0_190.png index 620325b1aa..c26c6be8e6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_190.png and b/TMessagesProj/src/emojis/apple/emoji/0_190.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1900.png b/TMessagesProj/src/emojis/apple/emoji/0_1900.png index 509580b348..7523629d45 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1900.png and b/TMessagesProj/src/emojis/apple/emoji/0_1900.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1901.png b/TMessagesProj/src/emojis/apple/emoji/0_1901.png index 5d448ad8db..e96f3d1fa3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1901.png and b/TMessagesProj/src/emojis/apple/emoji/0_1901.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1902.png b/TMessagesProj/src/emojis/apple/emoji/0_1902.png index c64f8cdeab..42e3dd4eb5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1902.png and b/TMessagesProj/src/emojis/apple/emoji/0_1902.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1903.png b/TMessagesProj/src/emojis/apple/emoji/0_1903.png index db10a63050..950a83acf2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1903.png and b/TMessagesProj/src/emojis/apple/emoji/0_1903.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1904.png b/TMessagesProj/src/emojis/apple/emoji/0_1904.png index e5417b69ef..a66b5ca2de 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1904.png and b/TMessagesProj/src/emojis/apple/emoji/0_1904.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1905.png b/TMessagesProj/src/emojis/apple/emoji/0_1905.png index 7a045517c0..414b0dfdc7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1905.png and b/TMessagesProj/src/emojis/apple/emoji/0_1905.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1906.png b/TMessagesProj/src/emojis/apple/emoji/0_1906.png index df1aed3059..ee82c6653d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1906.png and b/TMessagesProj/src/emojis/apple/emoji/0_1906.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1907.png b/TMessagesProj/src/emojis/apple/emoji/0_1907.png index 633ba9b3b6..8031f01256 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1907.png and b/TMessagesProj/src/emojis/apple/emoji/0_1907.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1908.png b/TMessagesProj/src/emojis/apple/emoji/0_1908.png index 2af1b2ee1e..35510cf1a3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1908.png and b/TMessagesProj/src/emojis/apple/emoji/0_1908.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1909.png b/TMessagesProj/src/emojis/apple/emoji/0_1909.png index 3c60ebe9cd..3786be0f21 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1909.png and b/TMessagesProj/src/emojis/apple/emoji/0_1909.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_191.png b/TMessagesProj/src/emojis/apple/emoji/0_191.png index 1967853c37..a90ce0c040 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_191.png and b/TMessagesProj/src/emojis/apple/emoji/0_191.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1910.png b/TMessagesProj/src/emojis/apple/emoji/0_1910.png index d08a8bdd44..79b22df25c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1910.png and b/TMessagesProj/src/emojis/apple/emoji/0_1910.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1911.png b/TMessagesProj/src/emojis/apple/emoji/0_1911.png index 31702ac6db..b3fde0d61d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1911.png and b/TMessagesProj/src/emojis/apple/emoji/0_1911.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1912.png b/TMessagesProj/src/emojis/apple/emoji/0_1912.png index 04ef514be1..9389110f4b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1912.png and b/TMessagesProj/src/emojis/apple/emoji/0_1912.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1913.png b/TMessagesProj/src/emojis/apple/emoji/0_1913.png index 08225d6584..7a72c88f08 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1913.png and b/TMessagesProj/src/emojis/apple/emoji/0_1913.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1914.png b/TMessagesProj/src/emojis/apple/emoji/0_1914.png index 6f1f240ef6..82d34d3bca 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1914.png and b/TMessagesProj/src/emojis/apple/emoji/0_1914.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1915.png b/TMessagesProj/src/emojis/apple/emoji/0_1915.png index fba46d9529..cbc8094c23 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1915.png and b/TMessagesProj/src/emojis/apple/emoji/0_1915.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1916.png b/TMessagesProj/src/emojis/apple/emoji/0_1916.png index 612a69843f..1413b513e3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1916.png and b/TMessagesProj/src/emojis/apple/emoji/0_1916.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1917.png b/TMessagesProj/src/emojis/apple/emoji/0_1917.png index 903c2556c3..50912886ab 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1917.png and b/TMessagesProj/src/emojis/apple/emoji/0_1917.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1918.png b/TMessagesProj/src/emojis/apple/emoji/0_1918.png index 1d7116b95d..f77ede85db 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1918.png and b/TMessagesProj/src/emojis/apple/emoji/0_1918.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1919.png b/TMessagesProj/src/emojis/apple/emoji/0_1919.png index f6103c88f8..4b0382731b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1919.png and b/TMessagesProj/src/emojis/apple/emoji/0_1919.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_192.png b/TMessagesProj/src/emojis/apple/emoji/0_192.png index d033ae9619..8cb759eedd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_192.png and b/TMessagesProj/src/emojis/apple/emoji/0_192.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1920.png b/TMessagesProj/src/emojis/apple/emoji/0_1920.png index 1aca271682..de8810dc16 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1920.png and b/TMessagesProj/src/emojis/apple/emoji/0_1920.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1921.png b/TMessagesProj/src/emojis/apple/emoji/0_1921.png index bf92d286d6..5f3ca38423 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1921.png and b/TMessagesProj/src/emojis/apple/emoji/0_1921.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1922.png b/TMessagesProj/src/emojis/apple/emoji/0_1922.png index cf06b6b445..42764476d6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1922.png and b/TMessagesProj/src/emojis/apple/emoji/0_1922.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1923.png b/TMessagesProj/src/emojis/apple/emoji/0_1923.png index 33f7da5cad..1c2bca65ab 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1923.png and b/TMessagesProj/src/emojis/apple/emoji/0_1923.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1924.png b/TMessagesProj/src/emojis/apple/emoji/0_1924.png index e2cd1dbcc4..3550e7244d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1924.png and b/TMessagesProj/src/emojis/apple/emoji/0_1924.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1925.png b/TMessagesProj/src/emojis/apple/emoji/0_1925.png index 40a1c37d75..828b2870dc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1925.png and b/TMessagesProj/src/emojis/apple/emoji/0_1925.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1926.png b/TMessagesProj/src/emojis/apple/emoji/0_1926.png index 6fd13e0112..76d7f30c0d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1926.png and b/TMessagesProj/src/emojis/apple/emoji/0_1926.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1927.png b/TMessagesProj/src/emojis/apple/emoji/0_1927.png index d3755e359d..1456998353 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1927.png and b/TMessagesProj/src/emojis/apple/emoji/0_1927.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1928.png b/TMessagesProj/src/emojis/apple/emoji/0_1928.png index 59be233e86..ccfbd30388 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1928.png and b/TMessagesProj/src/emojis/apple/emoji/0_1928.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1929.png b/TMessagesProj/src/emojis/apple/emoji/0_1929.png index 6fc2846274..ed2de51ca6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1929.png and b/TMessagesProj/src/emojis/apple/emoji/0_1929.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_193.png b/TMessagesProj/src/emojis/apple/emoji/0_193.png index b66edd9f97..59cf31e74c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_193.png and b/TMessagesProj/src/emojis/apple/emoji/0_193.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1930.png b/TMessagesProj/src/emojis/apple/emoji/0_1930.png index a0c03607cd..4eefbdb785 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1930.png and b/TMessagesProj/src/emojis/apple/emoji/0_1930.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1931.png b/TMessagesProj/src/emojis/apple/emoji/0_1931.png index 08e952fed5..d447139e71 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1931.png and b/TMessagesProj/src/emojis/apple/emoji/0_1931.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1932.png b/TMessagesProj/src/emojis/apple/emoji/0_1932.png index 1c64e57926..ae3f784cb0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1932.png and b/TMessagesProj/src/emojis/apple/emoji/0_1932.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1933.png b/TMessagesProj/src/emojis/apple/emoji/0_1933.png index ba954c1665..3dc5773d8e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1933.png and b/TMessagesProj/src/emojis/apple/emoji/0_1933.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1934.png b/TMessagesProj/src/emojis/apple/emoji/0_1934.png index f17c2d1ecb..f72824cfad 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1934.png and b/TMessagesProj/src/emojis/apple/emoji/0_1934.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1935.png b/TMessagesProj/src/emojis/apple/emoji/0_1935.png index 3e5669c1cc..757c539aef 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1935.png and b/TMessagesProj/src/emojis/apple/emoji/0_1935.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1936.png b/TMessagesProj/src/emojis/apple/emoji/0_1936.png index 2322026c39..530924c341 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1936.png and b/TMessagesProj/src/emojis/apple/emoji/0_1936.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1937.png b/TMessagesProj/src/emojis/apple/emoji/0_1937.png index 031601c4a9..c7378ba578 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1937.png and b/TMessagesProj/src/emojis/apple/emoji/0_1937.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1938.png b/TMessagesProj/src/emojis/apple/emoji/0_1938.png index 4bb790ff69..38d3ec2b23 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1938.png and b/TMessagesProj/src/emojis/apple/emoji/0_1938.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1939.png b/TMessagesProj/src/emojis/apple/emoji/0_1939.png index 48a83b13c3..9a9778f5d8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1939.png and b/TMessagesProj/src/emojis/apple/emoji/0_1939.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_194.png b/TMessagesProj/src/emojis/apple/emoji/0_194.png index 7aee1806b6..dfe8c23473 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_194.png and b/TMessagesProj/src/emojis/apple/emoji/0_194.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1940.png b/TMessagesProj/src/emojis/apple/emoji/0_1940.png index 8770819649..933d99d852 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1940.png and b/TMessagesProj/src/emojis/apple/emoji/0_1940.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1941.png b/TMessagesProj/src/emojis/apple/emoji/0_1941.png index e21fa21738..fbc4e4763c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1941.png and b/TMessagesProj/src/emojis/apple/emoji/0_1941.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1942.png b/TMessagesProj/src/emojis/apple/emoji/0_1942.png index 5dce658d1b..ec5da6f3f3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1942.png and b/TMessagesProj/src/emojis/apple/emoji/0_1942.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1943.png b/TMessagesProj/src/emojis/apple/emoji/0_1943.png index 05280ba623..4615b22a8d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1943.png and b/TMessagesProj/src/emojis/apple/emoji/0_1943.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1944.png b/TMessagesProj/src/emojis/apple/emoji/0_1944.png index 4746de15b9..644aa82171 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1944.png and b/TMessagesProj/src/emojis/apple/emoji/0_1944.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1945.png b/TMessagesProj/src/emojis/apple/emoji/0_1945.png index 38f2ce0b2a..0d90fa1fbd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1945.png and b/TMessagesProj/src/emojis/apple/emoji/0_1945.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1946.png b/TMessagesProj/src/emojis/apple/emoji/0_1946.png index 306f088059..aff1308eeb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1946.png and b/TMessagesProj/src/emojis/apple/emoji/0_1946.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1947.png b/TMessagesProj/src/emojis/apple/emoji/0_1947.png index 05448ff438..c35d52b418 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1947.png and b/TMessagesProj/src/emojis/apple/emoji/0_1947.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1948.png b/TMessagesProj/src/emojis/apple/emoji/0_1948.png index d4a98c9b0b..edaad77467 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1948.png and b/TMessagesProj/src/emojis/apple/emoji/0_1948.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1949.png b/TMessagesProj/src/emojis/apple/emoji/0_1949.png index 20ea894e10..d9e1767a77 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1949.png and b/TMessagesProj/src/emojis/apple/emoji/0_1949.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_195.png b/TMessagesProj/src/emojis/apple/emoji/0_195.png index 10dff63538..f0f2a8d4ba 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_195.png and b/TMessagesProj/src/emojis/apple/emoji/0_195.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1950.png b/TMessagesProj/src/emojis/apple/emoji/0_1950.png index 3c9aad88a9..a86198738a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1950.png and b/TMessagesProj/src/emojis/apple/emoji/0_1950.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1951.png b/TMessagesProj/src/emojis/apple/emoji/0_1951.png index 3d6c4cb3c4..a4e1be9ad5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1951.png and b/TMessagesProj/src/emojis/apple/emoji/0_1951.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1952.png b/TMessagesProj/src/emojis/apple/emoji/0_1952.png index 68f06b683c..415d6b94ad 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1952.png and b/TMessagesProj/src/emojis/apple/emoji/0_1952.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1953.png b/TMessagesProj/src/emojis/apple/emoji/0_1953.png index 97c529579b..16c0c20dcc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1953.png and b/TMessagesProj/src/emojis/apple/emoji/0_1953.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1954.png b/TMessagesProj/src/emojis/apple/emoji/0_1954.png index e973bc375b..ee21a09a07 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1954.png and b/TMessagesProj/src/emojis/apple/emoji/0_1954.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1955.png b/TMessagesProj/src/emojis/apple/emoji/0_1955.png index c96f35ecc6..0107ad9241 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1955.png and b/TMessagesProj/src/emojis/apple/emoji/0_1955.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1956.png b/TMessagesProj/src/emojis/apple/emoji/0_1956.png index 6e5b3f9603..2992b67746 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1956.png and b/TMessagesProj/src/emojis/apple/emoji/0_1956.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1957.png b/TMessagesProj/src/emojis/apple/emoji/0_1957.png index 5523be65f0..6d9585eba8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1957.png and b/TMessagesProj/src/emojis/apple/emoji/0_1957.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1958.png b/TMessagesProj/src/emojis/apple/emoji/0_1958.png index b0105d2fcf..a20f01a662 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1958.png and b/TMessagesProj/src/emojis/apple/emoji/0_1958.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1959.png b/TMessagesProj/src/emojis/apple/emoji/0_1959.png index 5ca4513e63..1cdc8b07b0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1959.png and b/TMessagesProj/src/emojis/apple/emoji/0_1959.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_196.png b/TMessagesProj/src/emojis/apple/emoji/0_196.png index 3c0a5ec314..dfeadd55b7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_196.png and b/TMessagesProj/src/emojis/apple/emoji/0_196.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1960.png b/TMessagesProj/src/emojis/apple/emoji/0_1960.png index 11810dbfaa..7caad1dae8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1960.png and b/TMessagesProj/src/emojis/apple/emoji/0_1960.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1961.png b/TMessagesProj/src/emojis/apple/emoji/0_1961.png index a96da3d404..f0908725da 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1961.png and b/TMessagesProj/src/emojis/apple/emoji/0_1961.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1962.png b/TMessagesProj/src/emojis/apple/emoji/0_1962.png index 159ae0d4a0..c44b74bc36 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1962.png and b/TMessagesProj/src/emojis/apple/emoji/0_1962.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1963.png b/TMessagesProj/src/emojis/apple/emoji/0_1963.png index ae430912f7..bced729fa7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1963.png and b/TMessagesProj/src/emojis/apple/emoji/0_1963.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1964.png b/TMessagesProj/src/emojis/apple/emoji/0_1964.png index c82ca5a9af..aedb47834e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1964.png and b/TMessagesProj/src/emojis/apple/emoji/0_1964.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1965.png b/TMessagesProj/src/emojis/apple/emoji/0_1965.png index ce4dd8896c..3f7ebc108c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1965.png and b/TMessagesProj/src/emojis/apple/emoji/0_1965.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1966.png b/TMessagesProj/src/emojis/apple/emoji/0_1966.png index 16dcb3d1e9..fff9c6300b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1966.png and b/TMessagesProj/src/emojis/apple/emoji/0_1966.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1967.png b/TMessagesProj/src/emojis/apple/emoji/0_1967.png index ac6f14b18f..65a3e61f37 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1967.png and b/TMessagesProj/src/emojis/apple/emoji/0_1967.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1968.png b/TMessagesProj/src/emojis/apple/emoji/0_1968.png index 3e3f2cfeb1..e26892d158 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1968.png and b/TMessagesProj/src/emojis/apple/emoji/0_1968.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1969.png b/TMessagesProj/src/emojis/apple/emoji/0_1969.png index e3653fe006..cf3d2fac70 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1969.png and b/TMessagesProj/src/emojis/apple/emoji/0_1969.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_197.png b/TMessagesProj/src/emojis/apple/emoji/0_197.png index d75a1afee4..71d9de9bb8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_197.png and b/TMessagesProj/src/emojis/apple/emoji/0_197.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1970.png b/TMessagesProj/src/emojis/apple/emoji/0_1970.png index 8c5d01795b..5e23d05f4e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1970.png and b/TMessagesProj/src/emojis/apple/emoji/0_1970.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1971.png b/TMessagesProj/src/emojis/apple/emoji/0_1971.png index c8e72f5b35..1cba603554 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1971.png and b/TMessagesProj/src/emojis/apple/emoji/0_1971.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1972.png b/TMessagesProj/src/emojis/apple/emoji/0_1972.png index 604c9721b4..a3e5191722 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1972.png and b/TMessagesProj/src/emojis/apple/emoji/0_1972.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1973.png b/TMessagesProj/src/emojis/apple/emoji/0_1973.png index 9b28186aa9..0a4fc41a10 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1973.png and b/TMessagesProj/src/emojis/apple/emoji/0_1973.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1974.png b/TMessagesProj/src/emojis/apple/emoji/0_1974.png index 6b0ae6d96d..9b352feecb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_1974.png and b/TMessagesProj/src/emojis/apple/emoji/0_1974.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1975.png b/TMessagesProj/src/emojis/apple/emoji/0_1975.png new file mode 100644 index 0000000000..60f4cb0de2 Binary files /dev/null and b/TMessagesProj/src/emojis/apple/emoji/0_1975.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1976.png b/TMessagesProj/src/emojis/apple/emoji/0_1976.png new file mode 100644 index 0000000000..8761dac47b Binary files /dev/null and b/TMessagesProj/src/emojis/apple/emoji/0_1976.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1977.png b/TMessagesProj/src/emojis/apple/emoji/0_1977.png new file mode 100644 index 0000000000..c4780b3878 Binary files /dev/null and b/TMessagesProj/src/emojis/apple/emoji/0_1977.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1978.png b/TMessagesProj/src/emojis/apple/emoji/0_1978.png new file mode 100644 index 0000000000..e62fe3cefc Binary files /dev/null and b/TMessagesProj/src/emojis/apple/emoji/0_1978.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1979.png b/TMessagesProj/src/emojis/apple/emoji/0_1979.png new file mode 100644 index 0000000000..225924e6bd Binary files /dev/null and b/TMessagesProj/src/emojis/apple/emoji/0_1979.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_198.png b/TMessagesProj/src/emojis/apple/emoji/0_198.png index 3b37f6bea7..34768a2a09 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_198.png and b/TMessagesProj/src/emojis/apple/emoji/0_198.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1980.png b/TMessagesProj/src/emojis/apple/emoji/0_1980.png new file mode 100644 index 0000000000..76f4631bae Binary files /dev/null and b/TMessagesProj/src/emojis/apple/emoji/0_1980.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1981.png b/TMessagesProj/src/emojis/apple/emoji/0_1981.png new file mode 100644 index 0000000000..ab5975bca1 Binary files /dev/null and b/TMessagesProj/src/emojis/apple/emoji/0_1981.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1982.png b/TMessagesProj/src/emojis/apple/emoji/0_1982.png new file mode 100644 index 0000000000..c741559834 Binary files /dev/null and b/TMessagesProj/src/emojis/apple/emoji/0_1982.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1983.png b/TMessagesProj/src/emojis/apple/emoji/0_1983.png new file mode 100644 index 0000000000..7e6cda2e0c Binary files /dev/null and b/TMessagesProj/src/emojis/apple/emoji/0_1983.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1984.png b/TMessagesProj/src/emojis/apple/emoji/0_1984.png new file mode 100644 index 0000000000..47e6b5d3c0 Binary files /dev/null and b/TMessagesProj/src/emojis/apple/emoji/0_1984.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1985.png b/TMessagesProj/src/emojis/apple/emoji/0_1985.png new file mode 100644 index 0000000000..547b384a67 Binary files /dev/null and b/TMessagesProj/src/emojis/apple/emoji/0_1985.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1986.png b/TMessagesProj/src/emojis/apple/emoji/0_1986.png new file mode 100644 index 0000000000..befcf27cb4 Binary files /dev/null and b/TMessagesProj/src/emojis/apple/emoji/0_1986.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_1987.png b/TMessagesProj/src/emojis/apple/emoji/0_1987.png new file mode 100644 index 0000000000..076b4ebf44 Binary files /dev/null and b/TMessagesProj/src/emojis/apple/emoji/0_1987.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_199.png b/TMessagesProj/src/emojis/apple/emoji/0_199.png index da5b919b0a..cc4d929867 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_199.png and b/TMessagesProj/src/emojis/apple/emoji/0_199.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_2.png b/TMessagesProj/src/emojis/apple/emoji/0_2.png index f8039f4563..4e874d0879 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_2.png and b/TMessagesProj/src/emojis/apple/emoji/0_2.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_20.png b/TMessagesProj/src/emojis/apple/emoji/0_20.png index ff15a772f5..f90c8885c6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_20.png and b/TMessagesProj/src/emojis/apple/emoji/0_20.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_200.png b/TMessagesProj/src/emojis/apple/emoji/0_200.png index 33e6f7dbea..2f02e3370a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_200.png and b/TMessagesProj/src/emojis/apple/emoji/0_200.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_201.png b/TMessagesProj/src/emojis/apple/emoji/0_201.png index 7de6200906..81056c4648 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_201.png and b/TMessagesProj/src/emojis/apple/emoji/0_201.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_202.png b/TMessagesProj/src/emojis/apple/emoji/0_202.png index 152d264080..e5cb5eafe8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_202.png and b/TMessagesProj/src/emojis/apple/emoji/0_202.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_203.png b/TMessagesProj/src/emojis/apple/emoji/0_203.png index d5eb12c3f3..96d733c249 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_203.png and b/TMessagesProj/src/emojis/apple/emoji/0_203.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_204.png b/TMessagesProj/src/emojis/apple/emoji/0_204.png index 5ff05840d9..f5692890e7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_204.png and b/TMessagesProj/src/emojis/apple/emoji/0_204.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_205.png b/TMessagesProj/src/emojis/apple/emoji/0_205.png index 223156ccc5..40a2b479d6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_205.png and b/TMessagesProj/src/emojis/apple/emoji/0_205.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_206.png b/TMessagesProj/src/emojis/apple/emoji/0_206.png index 62661f1559..24eef66270 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_206.png and b/TMessagesProj/src/emojis/apple/emoji/0_206.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_207.png b/TMessagesProj/src/emojis/apple/emoji/0_207.png index 4bf471097d..0d76e880c2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_207.png and b/TMessagesProj/src/emojis/apple/emoji/0_207.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_208.png b/TMessagesProj/src/emojis/apple/emoji/0_208.png index 5f0407344b..61d399a95b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_208.png and b/TMessagesProj/src/emojis/apple/emoji/0_208.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_209.png b/TMessagesProj/src/emojis/apple/emoji/0_209.png index 8b799b91b4..84236d0e59 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_209.png and b/TMessagesProj/src/emojis/apple/emoji/0_209.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_21.png b/TMessagesProj/src/emojis/apple/emoji/0_21.png index 8e089ac356..2fc6dbc42a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_21.png and b/TMessagesProj/src/emojis/apple/emoji/0_21.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_210.png b/TMessagesProj/src/emojis/apple/emoji/0_210.png index 988e579f58..29d3b11fff 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_210.png and b/TMessagesProj/src/emojis/apple/emoji/0_210.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_211.png b/TMessagesProj/src/emojis/apple/emoji/0_211.png index eae75e5b64..e670b00342 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_211.png and b/TMessagesProj/src/emojis/apple/emoji/0_211.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_212.png b/TMessagesProj/src/emojis/apple/emoji/0_212.png index c9ed411c2c..621f71f3a5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_212.png and b/TMessagesProj/src/emojis/apple/emoji/0_212.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_213.png b/TMessagesProj/src/emojis/apple/emoji/0_213.png index d27e982e3b..c386f83cb2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_213.png and b/TMessagesProj/src/emojis/apple/emoji/0_213.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_214.png b/TMessagesProj/src/emojis/apple/emoji/0_214.png index bbcb0a8c8e..c402088bc3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_214.png and b/TMessagesProj/src/emojis/apple/emoji/0_214.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_215.png b/TMessagesProj/src/emojis/apple/emoji/0_215.png index f25cc60ece..1e66909d45 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_215.png and b/TMessagesProj/src/emojis/apple/emoji/0_215.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_216.png b/TMessagesProj/src/emojis/apple/emoji/0_216.png index 217ba3f39b..bdcc2d689d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_216.png and b/TMessagesProj/src/emojis/apple/emoji/0_216.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_217.png b/TMessagesProj/src/emojis/apple/emoji/0_217.png index b0fa41516d..8d6d9f1dfb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_217.png and b/TMessagesProj/src/emojis/apple/emoji/0_217.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_218.png b/TMessagesProj/src/emojis/apple/emoji/0_218.png index bf6af74e84..068ec49c37 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_218.png and b/TMessagesProj/src/emojis/apple/emoji/0_218.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_219.png b/TMessagesProj/src/emojis/apple/emoji/0_219.png index f3bd999682..43c6527f33 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_219.png and b/TMessagesProj/src/emojis/apple/emoji/0_219.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_22.png b/TMessagesProj/src/emojis/apple/emoji/0_22.png index 0211c1ebbc..e7ccf70c6c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_22.png and b/TMessagesProj/src/emojis/apple/emoji/0_22.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_220.png b/TMessagesProj/src/emojis/apple/emoji/0_220.png index f2a5909129..bfb068eb64 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_220.png and b/TMessagesProj/src/emojis/apple/emoji/0_220.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_221.png b/TMessagesProj/src/emojis/apple/emoji/0_221.png index 976c301c95..c2c8ee6a5d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_221.png and b/TMessagesProj/src/emojis/apple/emoji/0_221.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_222.png b/TMessagesProj/src/emojis/apple/emoji/0_222.png index f5df2bbb17..75c053ac11 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_222.png and b/TMessagesProj/src/emojis/apple/emoji/0_222.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_223.png b/TMessagesProj/src/emojis/apple/emoji/0_223.png index 5c2b8ba240..6f9670cf85 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_223.png and b/TMessagesProj/src/emojis/apple/emoji/0_223.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_224.png b/TMessagesProj/src/emojis/apple/emoji/0_224.png index 3ad94f8aa9..df00ae63c8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_224.png and b/TMessagesProj/src/emojis/apple/emoji/0_224.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_225.png b/TMessagesProj/src/emojis/apple/emoji/0_225.png index d2bab6e657..78018f1bc1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_225.png and b/TMessagesProj/src/emojis/apple/emoji/0_225.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_226.png b/TMessagesProj/src/emojis/apple/emoji/0_226.png index d3fc68e364..1f208a53f9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_226.png and b/TMessagesProj/src/emojis/apple/emoji/0_226.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_227.png b/TMessagesProj/src/emojis/apple/emoji/0_227.png index 2937673290..b3b3e9978e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_227.png and b/TMessagesProj/src/emojis/apple/emoji/0_227.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_228.png b/TMessagesProj/src/emojis/apple/emoji/0_228.png index 86577b0ae5..0bfade75a0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_228.png and b/TMessagesProj/src/emojis/apple/emoji/0_228.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_229.png b/TMessagesProj/src/emojis/apple/emoji/0_229.png index f94c475807..07afce04bd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_229.png and b/TMessagesProj/src/emojis/apple/emoji/0_229.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_23.png b/TMessagesProj/src/emojis/apple/emoji/0_23.png index 4c3acb09ae..f0e3abcc1d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_23.png and b/TMessagesProj/src/emojis/apple/emoji/0_23.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_230.png b/TMessagesProj/src/emojis/apple/emoji/0_230.png index 356c72806d..dae027141c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_230.png and b/TMessagesProj/src/emojis/apple/emoji/0_230.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_231.png b/TMessagesProj/src/emojis/apple/emoji/0_231.png index 4b3df05ae9..311da664c7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_231.png and b/TMessagesProj/src/emojis/apple/emoji/0_231.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_232.png b/TMessagesProj/src/emojis/apple/emoji/0_232.png index 79f171c8e5..04370b5354 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_232.png and b/TMessagesProj/src/emojis/apple/emoji/0_232.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_233.png b/TMessagesProj/src/emojis/apple/emoji/0_233.png index 93bba3ab2c..16d15e9356 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_233.png and b/TMessagesProj/src/emojis/apple/emoji/0_233.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_234.png b/TMessagesProj/src/emojis/apple/emoji/0_234.png index df02fa7261..f08e3a663e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_234.png and b/TMessagesProj/src/emojis/apple/emoji/0_234.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_235.png b/TMessagesProj/src/emojis/apple/emoji/0_235.png index 7391386c12..ea4f12f3be 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_235.png and b/TMessagesProj/src/emojis/apple/emoji/0_235.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_236.png b/TMessagesProj/src/emojis/apple/emoji/0_236.png index e2b25cc606..c744d08242 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_236.png and b/TMessagesProj/src/emojis/apple/emoji/0_236.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_237.png b/TMessagesProj/src/emojis/apple/emoji/0_237.png index b891eafb2b..3920dc8685 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_237.png and b/TMessagesProj/src/emojis/apple/emoji/0_237.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_238.png b/TMessagesProj/src/emojis/apple/emoji/0_238.png index f4fba1912d..f148796c33 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_238.png and b/TMessagesProj/src/emojis/apple/emoji/0_238.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_239.png b/TMessagesProj/src/emojis/apple/emoji/0_239.png index 79fb4f3e26..f7a1b85637 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_239.png and b/TMessagesProj/src/emojis/apple/emoji/0_239.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_24.png b/TMessagesProj/src/emojis/apple/emoji/0_24.png index ff55e65e6e..2a2f7a9dab 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_24.png and b/TMessagesProj/src/emojis/apple/emoji/0_24.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_240.png b/TMessagesProj/src/emojis/apple/emoji/0_240.png index b892b12c7e..9f4aadeba5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_240.png and b/TMessagesProj/src/emojis/apple/emoji/0_240.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_241.png b/TMessagesProj/src/emojis/apple/emoji/0_241.png index d7cfb14c18..350f2f3a10 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_241.png and b/TMessagesProj/src/emojis/apple/emoji/0_241.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_242.png b/TMessagesProj/src/emojis/apple/emoji/0_242.png index 49b85ffa3d..584faa6cdd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_242.png and b/TMessagesProj/src/emojis/apple/emoji/0_242.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_243.png b/TMessagesProj/src/emojis/apple/emoji/0_243.png index 6f7163a3b3..14a5a81c47 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_243.png and b/TMessagesProj/src/emojis/apple/emoji/0_243.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_244.png b/TMessagesProj/src/emojis/apple/emoji/0_244.png index b689992f07..5d12e33fde 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_244.png and b/TMessagesProj/src/emojis/apple/emoji/0_244.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_245.png b/TMessagesProj/src/emojis/apple/emoji/0_245.png index f3e8d8f110..752123afb1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_245.png and b/TMessagesProj/src/emojis/apple/emoji/0_245.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_246.png b/TMessagesProj/src/emojis/apple/emoji/0_246.png index afdbdb56cf..2ac006df01 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_246.png and b/TMessagesProj/src/emojis/apple/emoji/0_246.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_247.png b/TMessagesProj/src/emojis/apple/emoji/0_247.png index f3dedb5f7c..5828ed9dce 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_247.png and b/TMessagesProj/src/emojis/apple/emoji/0_247.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_248.png b/TMessagesProj/src/emojis/apple/emoji/0_248.png index 54183f6abe..96d2960073 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_248.png and b/TMessagesProj/src/emojis/apple/emoji/0_248.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_249.png b/TMessagesProj/src/emojis/apple/emoji/0_249.png index edb6575b79..38ad775108 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_249.png and b/TMessagesProj/src/emojis/apple/emoji/0_249.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_25.png b/TMessagesProj/src/emojis/apple/emoji/0_25.png index b10440500d..c3005a9c49 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_25.png and b/TMessagesProj/src/emojis/apple/emoji/0_25.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_250.png b/TMessagesProj/src/emojis/apple/emoji/0_250.png index 5b0f119752..396ff7bfff 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_250.png and b/TMessagesProj/src/emojis/apple/emoji/0_250.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_251.png b/TMessagesProj/src/emojis/apple/emoji/0_251.png index 762239eccd..85adaebce9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_251.png and b/TMessagesProj/src/emojis/apple/emoji/0_251.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_252.png b/TMessagesProj/src/emojis/apple/emoji/0_252.png index 1c6aecc2d1..ea08c0ee7d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_252.png and b/TMessagesProj/src/emojis/apple/emoji/0_252.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_253.png b/TMessagesProj/src/emojis/apple/emoji/0_253.png index 3384cf3bf9..b44bd804d4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_253.png and b/TMessagesProj/src/emojis/apple/emoji/0_253.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_254.png b/TMessagesProj/src/emojis/apple/emoji/0_254.png index 4e3f87014e..3142f7e698 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_254.png and b/TMessagesProj/src/emojis/apple/emoji/0_254.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_255.png b/TMessagesProj/src/emojis/apple/emoji/0_255.png index c9bfe9aadf..fa467e0719 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_255.png and b/TMessagesProj/src/emojis/apple/emoji/0_255.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_256.png b/TMessagesProj/src/emojis/apple/emoji/0_256.png index 757422392d..2a756e9634 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_256.png and b/TMessagesProj/src/emojis/apple/emoji/0_256.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_257.png b/TMessagesProj/src/emojis/apple/emoji/0_257.png index 7184b99e66..6e2d5f28f4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_257.png and b/TMessagesProj/src/emojis/apple/emoji/0_257.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_258.png b/TMessagesProj/src/emojis/apple/emoji/0_258.png index 7a8085e275..57a5d3dc4c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_258.png and b/TMessagesProj/src/emojis/apple/emoji/0_258.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_259.png b/TMessagesProj/src/emojis/apple/emoji/0_259.png index 130294bd4b..1dcdcf3663 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_259.png and b/TMessagesProj/src/emojis/apple/emoji/0_259.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_26.png b/TMessagesProj/src/emojis/apple/emoji/0_26.png index 320f92ef6c..f0c4c47ab2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_26.png and b/TMessagesProj/src/emojis/apple/emoji/0_26.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_260.png b/TMessagesProj/src/emojis/apple/emoji/0_260.png index ca537d118c..cc5220c4d7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_260.png and b/TMessagesProj/src/emojis/apple/emoji/0_260.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_261.png b/TMessagesProj/src/emojis/apple/emoji/0_261.png index c40e35752f..ae4e9ba4d4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_261.png and b/TMessagesProj/src/emojis/apple/emoji/0_261.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_262.png b/TMessagesProj/src/emojis/apple/emoji/0_262.png index f664bb734c..92681146fc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_262.png and b/TMessagesProj/src/emojis/apple/emoji/0_262.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_263.png b/TMessagesProj/src/emojis/apple/emoji/0_263.png index 5fa3311918..fc3a283689 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_263.png and b/TMessagesProj/src/emojis/apple/emoji/0_263.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_264.png b/TMessagesProj/src/emojis/apple/emoji/0_264.png index fa8e77b76a..58aa43ec9d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_264.png and b/TMessagesProj/src/emojis/apple/emoji/0_264.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_265.png b/TMessagesProj/src/emojis/apple/emoji/0_265.png index 80af07132c..5a33892042 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_265.png and b/TMessagesProj/src/emojis/apple/emoji/0_265.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_266.png b/TMessagesProj/src/emojis/apple/emoji/0_266.png index 7ae590a98b..a6dded035e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_266.png and b/TMessagesProj/src/emojis/apple/emoji/0_266.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_267.png b/TMessagesProj/src/emojis/apple/emoji/0_267.png index 6a7130c6c3..937a86e71a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_267.png and b/TMessagesProj/src/emojis/apple/emoji/0_267.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_268.png b/TMessagesProj/src/emojis/apple/emoji/0_268.png index 41a897f1f3..fd86eed8c5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_268.png and b/TMessagesProj/src/emojis/apple/emoji/0_268.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_269.png b/TMessagesProj/src/emojis/apple/emoji/0_269.png index 6333a3b347..a61b986300 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_269.png and b/TMessagesProj/src/emojis/apple/emoji/0_269.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_27.png b/TMessagesProj/src/emojis/apple/emoji/0_27.png index ed78a8fa09..6ab709ba81 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_27.png and b/TMessagesProj/src/emojis/apple/emoji/0_27.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_270.png b/TMessagesProj/src/emojis/apple/emoji/0_270.png index 89b3041329..c1fc7d9d00 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_270.png and b/TMessagesProj/src/emojis/apple/emoji/0_270.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_271.png b/TMessagesProj/src/emojis/apple/emoji/0_271.png index dfc38c35d6..90169a521f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_271.png and b/TMessagesProj/src/emojis/apple/emoji/0_271.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_272.png b/TMessagesProj/src/emojis/apple/emoji/0_272.png index 42536aed12..dca3bb8e03 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_272.png and b/TMessagesProj/src/emojis/apple/emoji/0_272.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_273.png b/TMessagesProj/src/emojis/apple/emoji/0_273.png index 2a09020341..3c71f883e1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_273.png and b/TMessagesProj/src/emojis/apple/emoji/0_273.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_274.png b/TMessagesProj/src/emojis/apple/emoji/0_274.png index 7aad56ff56..6d0435c09e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_274.png and b/TMessagesProj/src/emojis/apple/emoji/0_274.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_275.png b/TMessagesProj/src/emojis/apple/emoji/0_275.png index 7fbc299974..3374c81b0d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_275.png and b/TMessagesProj/src/emojis/apple/emoji/0_275.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_276.png b/TMessagesProj/src/emojis/apple/emoji/0_276.png index 05d6912eb6..c07ab92fc4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_276.png and b/TMessagesProj/src/emojis/apple/emoji/0_276.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_277.png b/TMessagesProj/src/emojis/apple/emoji/0_277.png index 06502c4ab2..c280580d88 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_277.png and b/TMessagesProj/src/emojis/apple/emoji/0_277.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_278.png b/TMessagesProj/src/emojis/apple/emoji/0_278.png index 379f720f35..78ef3677b8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_278.png and b/TMessagesProj/src/emojis/apple/emoji/0_278.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_279.png b/TMessagesProj/src/emojis/apple/emoji/0_279.png index 4dcb502f0c..956cbfe674 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_279.png and b/TMessagesProj/src/emojis/apple/emoji/0_279.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_28.png b/TMessagesProj/src/emojis/apple/emoji/0_28.png index 027e22a8a2..6293fc1489 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_28.png and b/TMessagesProj/src/emojis/apple/emoji/0_28.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_280.png b/TMessagesProj/src/emojis/apple/emoji/0_280.png index 7efdc4e4ec..6103a88608 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_280.png and b/TMessagesProj/src/emojis/apple/emoji/0_280.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_281.png b/TMessagesProj/src/emojis/apple/emoji/0_281.png index 187eb95491..964c66e176 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_281.png and b/TMessagesProj/src/emojis/apple/emoji/0_281.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_282.png b/TMessagesProj/src/emojis/apple/emoji/0_282.png index e3c1d5de1f..6faa9406ab 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_282.png and b/TMessagesProj/src/emojis/apple/emoji/0_282.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_283.png b/TMessagesProj/src/emojis/apple/emoji/0_283.png index dfe043d076..8f28816baa 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_283.png and b/TMessagesProj/src/emojis/apple/emoji/0_283.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_284.png b/TMessagesProj/src/emojis/apple/emoji/0_284.png index 06a4a0897f..7c23766f68 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_284.png and b/TMessagesProj/src/emojis/apple/emoji/0_284.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_285.png b/TMessagesProj/src/emojis/apple/emoji/0_285.png index 0987ea33af..32a3bd7cb1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_285.png and b/TMessagesProj/src/emojis/apple/emoji/0_285.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_286.png b/TMessagesProj/src/emojis/apple/emoji/0_286.png index d0dcc23895..5f09a3102c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_286.png and b/TMessagesProj/src/emojis/apple/emoji/0_286.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_287.png b/TMessagesProj/src/emojis/apple/emoji/0_287.png index f735dd9410..b64f5b28ae 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_287.png and b/TMessagesProj/src/emojis/apple/emoji/0_287.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_288.png b/TMessagesProj/src/emojis/apple/emoji/0_288.png index 812c4b2104..46c0f74533 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_288.png and b/TMessagesProj/src/emojis/apple/emoji/0_288.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_289.png b/TMessagesProj/src/emojis/apple/emoji/0_289.png index beea1086a6..3b0490afa1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_289.png and b/TMessagesProj/src/emojis/apple/emoji/0_289.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_29.png b/TMessagesProj/src/emojis/apple/emoji/0_29.png index 153165541f..668789ec57 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_29.png and b/TMessagesProj/src/emojis/apple/emoji/0_29.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_290.png b/TMessagesProj/src/emojis/apple/emoji/0_290.png index 00dc878b83..7cb6088c1d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_290.png and b/TMessagesProj/src/emojis/apple/emoji/0_290.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_291.png b/TMessagesProj/src/emojis/apple/emoji/0_291.png index f11b6ef447..31b4e0af04 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_291.png and b/TMessagesProj/src/emojis/apple/emoji/0_291.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_292.png b/TMessagesProj/src/emojis/apple/emoji/0_292.png index dba063563b..f9ed4afb82 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_292.png and b/TMessagesProj/src/emojis/apple/emoji/0_292.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_293.png b/TMessagesProj/src/emojis/apple/emoji/0_293.png index d7576be2f1..f699b2a053 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_293.png and b/TMessagesProj/src/emojis/apple/emoji/0_293.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_294.png b/TMessagesProj/src/emojis/apple/emoji/0_294.png index f8120e6839..94cc0aa8a4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_294.png and b/TMessagesProj/src/emojis/apple/emoji/0_294.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_295.png b/TMessagesProj/src/emojis/apple/emoji/0_295.png index 82ce5363e2..2acd36be89 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_295.png and b/TMessagesProj/src/emojis/apple/emoji/0_295.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_296.png b/TMessagesProj/src/emojis/apple/emoji/0_296.png index e4c95ce426..c81144bd15 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_296.png and b/TMessagesProj/src/emojis/apple/emoji/0_296.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_297.png b/TMessagesProj/src/emojis/apple/emoji/0_297.png index 37a6dbe59c..6141b8ccd4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_297.png and b/TMessagesProj/src/emojis/apple/emoji/0_297.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_298.png b/TMessagesProj/src/emojis/apple/emoji/0_298.png index 4d5d49b583..d098140075 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_298.png and b/TMessagesProj/src/emojis/apple/emoji/0_298.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_299.png b/TMessagesProj/src/emojis/apple/emoji/0_299.png index 41bb51aa89..191bf57764 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_299.png and b/TMessagesProj/src/emojis/apple/emoji/0_299.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_3.png b/TMessagesProj/src/emojis/apple/emoji/0_3.png index e15c6229e0..198e087c06 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_3.png and b/TMessagesProj/src/emojis/apple/emoji/0_3.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_30.png b/TMessagesProj/src/emojis/apple/emoji/0_30.png index 569797aad3..58ce887986 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_30.png and b/TMessagesProj/src/emojis/apple/emoji/0_30.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_300.png b/TMessagesProj/src/emojis/apple/emoji/0_300.png index 3470ace927..f260a87432 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_300.png and b/TMessagesProj/src/emojis/apple/emoji/0_300.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_301.png b/TMessagesProj/src/emojis/apple/emoji/0_301.png index dcdfd2f506..6cea54ee62 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_301.png and b/TMessagesProj/src/emojis/apple/emoji/0_301.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_302.png b/TMessagesProj/src/emojis/apple/emoji/0_302.png index be56c4a3fd..50ad8f1382 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_302.png and b/TMessagesProj/src/emojis/apple/emoji/0_302.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_303.png b/TMessagesProj/src/emojis/apple/emoji/0_303.png index 5b9205ebac..a80bd7fe22 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_303.png and b/TMessagesProj/src/emojis/apple/emoji/0_303.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_304.png b/TMessagesProj/src/emojis/apple/emoji/0_304.png index 091a9c4a25..b05a46d30b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_304.png and b/TMessagesProj/src/emojis/apple/emoji/0_304.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_305.png b/TMessagesProj/src/emojis/apple/emoji/0_305.png index 1a5be9d2ba..12d64c5168 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_305.png and b/TMessagesProj/src/emojis/apple/emoji/0_305.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_306.png b/TMessagesProj/src/emojis/apple/emoji/0_306.png index 744c010e63..d087c5a00a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_306.png and b/TMessagesProj/src/emojis/apple/emoji/0_306.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_307.png b/TMessagesProj/src/emojis/apple/emoji/0_307.png index 1808dcb947..cdc876abf4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_307.png and b/TMessagesProj/src/emojis/apple/emoji/0_307.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_308.png b/TMessagesProj/src/emojis/apple/emoji/0_308.png index 7c2ae4611f..f06f6dfbe8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_308.png and b/TMessagesProj/src/emojis/apple/emoji/0_308.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_309.png b/TMessagesProj/src/emojis/apple/emoji/0_309.png index 81b84b9708..7e2c219142 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_309.png and b/TMessagesProj/src/emojis/apple/emoji/0_309.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_31.png b/TMessagesProj/src/emojis/apple/emoji/0_31.png index 5509c848ea..3eb7749f8f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_31.png and b/TMessagesProj/src/emojis/apple/emoji/0_31.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_310.png b/TMessagesProj/src/emojis/apple/emoji/0_310.png index 3b6d1e7620..bc21f8ce1f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_310.png and b/TMessagesProj/src/emojis/apple/emoji/0_310.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_311.png b/TMessagesProj/src/emojis/apple/emoji/0_311.png index 7935d53fba..e95c41a3e4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_311.png and b/TMessagesProj/src/emojis/apple/emoji/0_311.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_312.png b/TMessagesProj/src/emojis/apple/emoji/0_312.png index 337f66524b..d50a210339 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_312.png and b/TMessagesProj/src/emojis/apple/emoji/0_312.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_313.png b/TMessagesProj/src/emojis/apple/emoji/0_313.png index c6b2944bbf..cfb129399c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_313.png and b/TMessagesProj/src/emojis/apple/emoji/0_313.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_314.png b/TMessagesProj/src/emojis/apple/emoji/0_314.png index bdff56d81a..de5b4420a2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_314.png and b/TMessagesProj/src/emojis/apple/emoji/0_314.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_315.png b/TMessagesProj/src/emojis/apple/emoji/0_315.png index de0b768995..59f44d7f0c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_315.png and b/TMessagesProj/src/emojis/apple/emoji/0_315.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_316.png b/TMessagesProj/src/emojis/apple/emoji/0_316.png index fee504f73e..936a669476 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_316.png and b/TMessagesProj/src/emojis/apple/emoji/0_316.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_317.png b/TMessagesProj/src/emojis/apple/emoji/0_317.png index 62add4baf9..e19208a4b1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_317.png and b/TMessagesProj/src/emojis/apple/emoji/0_317.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_318.png b/TMessagesProj/src/emojis/apple/emoji/0_318.png index 7f17aaeceb..debb78ec32 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_318.png and b/TMessagesProj/src/emojis/apple/emoji/0_318.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_319.png b/TMessagesProj/src/emojis/apple/emoji/0_319.png index c799874066..01762ba21d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_319.png and b/TMessagesProj/src/emojis/apple/emoji/0_319.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_32.png b/TMessagesProj/src/emojis/apple/emoji/0_32.png index 223c443e9d..49082569b9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_32.png and b/TMessagesProj/src/emojis/apple/emoji/0_32.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_320.png b/TMessagesProj/src/emojis/apple/emoji/0_320.png index d430438979..fdb4e47faf 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_320.png and b/TMessagesProj/src/emojis/apple/emoji/0_320.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_321.png b/TMessagesProj/src/emojis/apple/emoji/0_321.png index 925ec012ab..0c6e9b7340 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_321.png and b/TMessagesProj/src/emojis/apple/emoji/0_321.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_322.png b/TMessagesProj/src/emojis/apple/emoji/0_322.png index 2e1c9afe43..a15db317a0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_322.png and b/TMessagesProj/src/emojis/apple/emoji/0_322.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_323.png b/TMessagesProj/src/emojis/apple/emoji/0_323.png index 7a4d975b6e..e29e95ec23 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_323.png and b/TMessagesProj/src/emojis/apple/emoji/0_323.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_324.png b/TMessagesProj/src/emojis/apple/emoji/0_324.png index 030f99e8ff..6cb2c2a878 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_324.png and b/TMessagesProj/src/emojis/apple/emoji/0_324.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_325.png b/TMessagesProj/src/emojis/apple/emoji/0_325.png index d6e368a9e9..ee0312b102 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_325.png and b/TMessagesProj/src/emojis/apple/emoji/0_325.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_326.png b/TMessagesProj/src/emojis/apple/emoji/0_326.png index c0f6726c1c..dadf8881b2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_326.png and b/TMessagesProj/src/emojis/apple/emoji/0_326.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_327.png b/TMessagesProj/src/emojis/apple/emoji/0_327.png index 174a24ceb7..ec985d045c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_327.png and b/TMessagesProj/src/emojis/apple/emoji/0_327.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_328.png b/TMessagesProj/src/emojis/apple/emoji/0_328.png index 68e37e7a55..9949a510b8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_328.png and b/TMessagesProj/src/emojis/apple/emoji/0_328.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_329.png b/TMessagesProj/src/emojis/apple/emoji/0_329.png index 0330d52aeb..676f516ff1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_329.png and b/TMessagesProj/src/emojis/apple/emoji/0_329.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_33.png b/TMessagesProj/src/emojis/apple/emoji/0_33.png index a7a8e9bb67..3176692689 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_33.png and b/TMessagesProj/src/emojis/apple/emoji/0_33.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_330.png b/TMessagesProj/src/emojis/apple/emoji/0_330.png index 59d7653e1b..4ff8baf14d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_330.png and b/TMessagesProj/src/emojis/apple/emoji/0_330.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_331.png b/TMessagesProj/src/emojis/apple/emoji/0_331.png index bbb280993b..598c2dcf55 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_331.png and b/TMessagesProj/src/emojis/apple/emoji/0_331.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_332.png b/TMessagesProj/src/emojis/apple/emoji/0_332.png index 5f85325e33..b6ba44a6b4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_332.png and b/TMessagesProj/src/emojis/apple/emoji/0_332.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_333.png b/TMessagesProj/src/emojis/apple/emoji/0_333.png index 2df30f3beb..49c5f9ed65 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_333.png and b/TMessagesProj/src/emojis/apple/emoji/0_333.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_334.png b/TMessagesProj/src/emojis/apple/emoji/0_334.png index 6ae26574a2..1a55ddcddb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_334.png and b/TMessagesProj/src/emojis/apple/emoji/0_334.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_335.png b/TMessagesProj/src/emojis/apple/emoji/0_335.png index 50d777a9e2..81b7363d23 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_335.png and b/TMessagesProj/src/emojis/apple/emoji/0_335.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_336.png b/TMessagesProj/src/emojis/apple/emoji/0_336.png index 7cb4647ab9..5546441757 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_336.png and b/TMessagesProj/src/emojis/apple/emoji/0_336.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_337.png b/TMessagesProj/src/emojis/apple/emoji/0_337.png index 82fed0b09a..7462e38b66 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_337.png and b/TMessagesProj/src/emojis/apple/emoji/0_337.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_338.png b/TMessagesProj/src/emojis/apple/emoji/0_338.png index 6b00e06caf..e781a26e2a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_338.png and b/TMessagesProj/src/emojis/apple/emoji/0_338.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_339.png b/TMessagesProj/src/emojis/apple/emoji/0_339.png index 08cbfbd94d..c9f3e17acb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_339.png and b/TMessagesProj/src/emojis/apple/emoji/0_339.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_34.png b/TMessagesProj/src/emojis/apple/emoji/0_34.png index b2f14575aa..7e7040660c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_34.png and b/TMessagesProj/src/emojis/apple/emoji/0_34.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_340.png b/TMessagesProj/src/emojis/apple/emoji/0_340.png index c56589fbbe..6a877a187d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_340.png and b/TMessagesProj/src/emojis/apple/emoji/0_340.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_341.png b/TMessagesProj/src/emojis/apple/emoji/0_341.png index f72b33a81b..350e741efd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_341.png and b/TMessagesProj/src/emojis/apple/emoji/0_341.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_342.png b/TMessagesProj/src/emojis/apple/emoji/0_342.png index 417a9d6aab..c555102807 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_342.png and b/TMessagesProj/src/emojis/apple/emoji/0_342.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_343.png b/TMessagesProj/src/emojis/apple/emoji/0_343.png index 577de46941..e1dcd59572 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_343.png and b/TMessagesProj/src/emojis/apple/emoji/0_343.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_344.png b/TMessagesProj/src/emojis/apple/emoji/0_344.png index 082db9b582..ca365ebd9a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_344.png and b/TMessagesProj/src/emojis/apple/emoji/0_344.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_345.png b/TMessagesProj/src/emojis/apple/emoji/0_345.png index f21fba5a2b..5cce87fa50 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_345.png and b/TMessagesProj/src/emojis/apple/emoji/0_345.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_346.png b/TMessagesProj/src/emojis/apple/emoji/0_346.png index 56f8a8abcd..fd89a3b0e0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_346.png and b/TMessagesProj/src/emojis/apple/emoji/0_346.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_347.png b/TMessagesProj/src/emojis/apple/emoji/0_347.png index 0047f8c5c6..5e68f866a6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_347.png and b/TMessagesProj/src/emojis/apple/emoji/0_347.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_348.png b/TMessagesProj/src/emojis/apple/emoji/0_348.png index 28c649f4f6..ad261bf48d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_348.png and b/TMessagesProj/src/emojis/apple/emoji/0_348.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_349.png b/TMessagesProj/src/emojis/apple/emoji/0_349.png index 18140399e7..5077baf341 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_349.png and b/TMessagesProj/src/emojis/apple/emoji/0_349.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_35.png b/TMessagesProj/src/emojis/apple/emoji/0_35.png index 4dd81986a9..6891d13e36 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_35.png and b/TMessagesProj/src/emojis/apple/emoji/0_35.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_350.png b/TMessagesProj/src/emojis/apple/emoji/0_350.png index aff83f0d75..d9958cada7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_350.png and b/TMessagesProj/src/emojis/apple/emoji/0_350.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_351.png b/TMessagesProj/src/emojis/apple/emoji/0_351.png index 160d0080bb..7e717f4b5b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_351.png and b/TMessagesProj/src/emojis/apple/emoji/0_351.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_352.png b/TMessagesProj/src/emojis/apple/emoji/0_352.png index 593e7fd74f..bafb8aec7f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_352.png and b/TMessagesProj/src/emojis/apple/emoji/0_352.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_353.png b/TMessagesProj/src/emojis/apple/emoji/0_353.png index 0c2f71814a..156bffc6a1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_353.png and b/TMessagesProj/src/emojis/apple/emoji/0_353.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_354.png b/TMessagesProj/src/emojis/apple/emoji/0_354.png index ca364260d5..907be0cfbb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_354.png and b/TMessagesProj/src/emojis/apple/emoji/0_354.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_355.png b/TMessagesProj/src/emojis/apple/emoji/0_355.png index c572bcdbd5..d03c72f80e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_355.png and b/TMessagesProj/src/emojis/apple/emoji/0_355.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_356.png b/TMessagesProj/src/emojis/apple/emoji/0_356.png index 3604f5c793..bd54554fcb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_356.png and b/TMessagesProj/src/emojis/apple/emoji/0_356.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_357.png b/TMessagesProj/src/emojis/apple/emoji/0_357.png index 72080f9269..8dae314e4f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_357.png and b/TMessagesProj/src/emojis/apple/emoji/0_357.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_358.png b/TMessagesProj/src/emojis/apple/emoji/0_358.png index b40368cf52..57effe4211 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_358.png and b/TMessagesProj/src/emojis/apple/emoji/0_358.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_359.png b/TMessagesProj/src/emojis/apple/emoji/0_359.png index 9800aa8665..c74a800552 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_359.png and b/TMessagesProj/src/emojis/apple/emoji/0_359.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_36.png b/TMessagesProj/src/emojis/apple/emoji/0_36.png index 733897ec70..93af66f263 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_36.png and b/TMessagesProj/src/emojis/apple/emoji/0_36.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_360.png b/TMessagesProj/src/emojis/apple/emoji/0_360.png index 078f1d8721..499b733c16 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_360.png and b/TMessagesProj/src/emojis/apple/emoji/0_360.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_361.png b/TMessagesProj/src/emojis/apple/emoji/0_361.png index 12f134ab61..5bc37c9c28 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_361.png and b/TMessagesProj/src/emojis/apple/emoji/0_361.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_362.png b/TMessagesProj/src/emojis/apple/emoji/0_362.png index 2a6f8a4b30..294ead3949 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_362.png and b/TMessagesProj/src/emojis/apple/emoji/0_362.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_363.png b/TMessagesProj/src/emojis/apple/emoji/0_363.png index 19a5990d37..02679b5377 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_363.png and b/TMessagesProj/src/emojis/apple/emoji/0_363.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_364.png b/TMessagesProj/src/emojis/apple/emoji/0_364.png index 2a250870e1..b82551ffd1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_364.png and b/TMessagesProj/src/emojis/apple/emoji/0_364.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_365.png b/TMessagesProj/src/emojis/apple/emoji/0_365.png index 1e304bcecc..f861800124 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_365.png and b/TMessagesProj/src/emojis/apple/emoji/0_365.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_366.png b/TMessagesProj/src/emojis/apple/emoji/0_366.png index edcc183b25..74af3651d0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_366.png and b/TMessagesProj/src/emojis/apple/emoji/0_366.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_367.png b/TMessagesProj/src/emojis/apple/emoji/0_367.png index f3ef753819..5832dece8f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_367.png and b/TMessagesProj/src/emojis/apple/emoji/0_367.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_368.png b/TMessagesProj/src/emojis/apple/emoji/0_368.png index b7ad29ef3c..4512d8d7b9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_368.png and b/TMessagesProj/src/emojis/apple/emoji/0_368.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_369.png b/TMessagesProj/src/emojis/apple/emoji/0_369.png index a6930e379f..9582f1f57a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_369.png and b/TMessagesProj/src/emojis/apple/emoji/0_369.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_37.png b/TMessagesProj/src/emojis/apple/emoji/0_37.png index 6890fb2621..3512a9a5f0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_37.png and b/TMessagesProj/src/emojis/apple/emoji/0_37.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_370.png b/TMessagesProj/src/emojis/apple/emoji/0_370.png index d49556271b..c084e3da8c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_370.png and b/TMessagesProj/src/emojis/apple/emoji/0_370.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_371.png b/TMessagesProj/src/emojis/apple/emoji/0_371.png index 25cc204394..fe6fcdfb7d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_371.png and b/TMessagesProj/src/emojis/apple/emoji/0_371.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_372.png b/TMessagesProj/src/emojis/apple/emoji/0_372.png index a3908b673a..51a8e350a6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_372.png and b/TMessagesProj/src/emojis/apple/emoji/0_372.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_373.png b/TMessagesProj/src/emojis/apple/emoji/0_373.png index 4304f53b49..1b5b5e809c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_373.png and b/TMessagesProj/src/emojis/apple/emoji/0_373.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_374.png b/TMessagesProj/src/emojis/apple/emoji/0_374.png index 17e581a8c0..1dcc60dfb1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_374.png and b/TMessagesProj/src/emojis/apple/emoji/0_374.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_375.png b/TMessagesProj/src/emojis/apple/emoji/0_375.png index 1975ea6b71..89af235703 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_375.png and b/TMessagesProj/src/emojis/apple/emoji/0_375.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_376.png b/TMessagesProj/src/emojis/apple/emoji/0_376.png index ad1367ff18..4192cf2e7c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_376.png and b/TMessagesProj/src/emojis/apple/emoji/0_376.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_377.png b/TMessagesProj/src/emojis/apple/emoji/0_377.png index 8f200e14da..2aecfda183 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_377.png and b/TMessagesProj/src/emojis/apple/emoji/0_377.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_378.png b/TMessagesProj/src/emojis/apple/emoji/0_378.png index 3fbf3b337e..fff541983e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_378.png and b/TMessagesProj/src/emojis/apple/emoji/0_378.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_379.png b/TMessagesProj/src/emojis/apple/emoji/0_379.png index 868efe398b..a95c0f2166 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_379.png and b/TMessagesProj/src/emojis/apple/emoji/0_379.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_38.png b/TMessagesProj/src/emojis/apple/emoji/0_38.png index 859fa83b26..5194e232c0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_38.png and b/TMessagesProj/src/emojis/apple/emoji/0_38.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_380.png b/TMessagesProj/src/emojis/apple/emoji/0_380.png index ca1379b76d..0df9e502ea 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_380.png and b/TMessagesProj/src/emojis/apple/emoji/0_380.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_381.png b/TMessagesProj/src/emojis/apple/emoji/0_381.png index 0bf3c4ccb6..4dd1519d8a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_381.png and b/TMessagesProj/src/emojis/apple/emoji/0_381.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_382.png b/TMessagesProj/src/emojis/apple/emoji/0_382.png index b8af0709c5..d747d7d578 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_382.png and b/TMessagesProj/src/emojis/apple/emoji/0_382.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_383.png b/TMessagesProj/src/emojis/apple/emoji/0_383.png index f843a8d58c..26361e9eaa 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_383.png and b/TMessagesProj/src/emojis/apple/emoji/0_383.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_384.png b/TMessagesProj/src/emojis/apple/emoji/0_384.png index 2d6b6ff574..4e686108a9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_384.png and b/TMessagesProj/src/emojis/apple/emoji/0_384.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_385.png b/TMessagesProj/src/emojis/apple/emoji/0_385.png index 506eabcf7b..f470bdf292 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_385.png and b/TMessagesProj/src/emojis/apple/emoji/0_385.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_386.png b/TMessagesProj/src/emojis/apple/emoji/0_386.png index 620d2b01c4..1473964f6a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_386.png and b/TMessagesProj/src/emojis/apple/emoji/0_386.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_387.png b/TMessagesProj/src/emojis/apple/emoji/0_387.png index b09ead6044..919088f033 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_387.png and b/TMessagesProj/src/emojis/apple/emoji/0_387.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_388.png b/TMessagesProj/src/emojis/apple/emoji/0_388.png index 0de711fe33..f7b5b7fc13 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_388.png and b/TMessagesProj/src/emojis/apple/emoji/0_388.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_389.png b/TMessagesProj/src/emojis/apple/emoji/0_389.png index 0ee8086b7d..b7b55726e0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_389.png and b/TMessagesProj/src/emojis/apple/emoji/0_389.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_39.png b/TMessagesProj/src/emojis/apple/emoji/0_39.png index f8d39a06d3..1ae53e2ad9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_39.png and b/TMessagesProj/src/emojis/apple/emoji/0_39.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_390.png b/TMessagesProj/src/emojis/apple/emoji/0_390.png index edbbcf17b4..72c7948800 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_390.png and b/TMessagesProj/src/emojis/apple/emoji/0_390.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_391.png b/TMessagesProj/src/emojis/apple/emoji/0_391.png index d49a579128..05bdde3d7c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_391.png and b/TMessagesProj/src/emojis/apple/emoji/0_391.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_392.png b/TMessagesProj/src/emojis/apple/emoji/0_392.png index 5bd81f2bb6..9d68f23dfd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_392.png and b/TMessagesProj/src/emojis/apple/emoji/0_392.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_393.png b/TMessagesProj/src/emojis/apple/emoji/0_393.png index af44c4175a..e4571ee884 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_393.png and b/TMessagesProj/src/emojis/apple/emoji/0_393.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_394.png b/TMessagesProj/src/emojis/apple/emoji/0_394.png index 88dc29ff0b..7827387fe4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_394.png and b/TMessagesProj/src/emojis/apple/emoji/0_394.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_395.png b/TMessagesProj/src/emojis/apple/emoji/0_395.png index f8768eae8f..76725ab82c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_395.png and b/TMessagesProj/src/emojis/apple/emoji/0_395.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_396.png b/TMessagesProj/src/emojis/apple/emoji/0_396.png index faf39f90f7..5b66a130ff 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_396.png and b/TMessagesProj/src/emojis/apple/emoji/0_396.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_397.png b/TMessagesProj/src/emojis/apple/emoji/0_397.png index a1e956dcbc..afcb84d8db 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_397.png and b/TMessagesProj/src/emojis/apple/emoji/0_397.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_398.png b/TMessagesProj/src/emojis/apple/emoji/0_398.png index 7c6de403d0..a2dcf47ae0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_398.png and b/TMessagesProj/src/emojis/apple/emoji/0_398.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_399.png b/TMessagesProj/src/emojis/apple/emoji/0_399.png index 90f4bc87af..3d285b99d3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_399.png and b/TMessagesProj/src/emojis/apple/emoji/0_399.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_4.png b/TMessagesProj/src/emojis/apple/emoji/0_4.png index a40f44e66e..995b06ecf1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_4.png and b/TMessagesProj/src/emojis/apple/emoji/0_4.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_40.png b/TMessagesProj/src/emojis/apple/emoji/0_40.png index d11b60c930..96e077f5cb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_40.png and b/TMessagesProj/src/emojis/apple/emoji/0_40.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_400.png b/TMessagesProj/src/emojis/apple/emoji/0_400.png index f5d5e8e611..0eae6f7da9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_400.png and b/TMessagesProj/src/emojis/apple/emoji/0_400.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_401.png b/TMessagesProj/src/emojis/apple/emoji/0_401.png index 0e665f122a..b27f1001c6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_401.png and b/TMessagesProj/src/emojis/apple/emoji/0_401.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_402.png b/TMessagesProj/src/emojis/apple/emoji/0_402.png index 002c0a7de2..44d039e10c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_402.png and b/TMessagesProj/src/emojis/apple/emoji/0_402.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_403.png b/TMessagesProj/src/emojis/apple/emoji/0_403.png index 44e075d6a4..9a56e86fef 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_403.png and b/TMessagesProj/src/emojis/apple/emoji/0_403.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_404.png b/TMessagesProj/src/emojis/apple/emoji/0_404.png index 738cc840bc..c672d17c8e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_404.png and b/TMessagesProj/src/emojis/apple/emoji/0_404.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_405.png b/TMessagesProj/src/emojis/apple/emoji/0_405.png index 466a219db8..a2d25d856a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_405.png and b/TMessagesProj/src/emojis/apple/emoji/0_405.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_406.png b/TMessagesProj/src/emojis/apple/emoji/0_406.png index 692c455e55..d913774868 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_406.png and b/TMessagesProj/src/emojis/apple/emoji/0_406.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_407.png b/TMessagesProj/src/emojis/apple/emoji/0_407.png index 5e2aa520d8..66de001c26 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_407.png and b/TMessagesProj/src/emojis/apple/emoji/0_407.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_408.png b/TMessagesProj/src/emojis/apple/emoji/0_408.png index 5c5133ec25..758319646b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_408.png and b/TMessagesProj/src/emojis/apple/emoji/0_408.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_409.png b/TMessagesProj/src/emojis/apple/emoji/0_409.png index c3d2345701..643926bbce 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_409.png and b/TMessagesProj/src/emojis/apple/emoji/0_409.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_41.png b/TMessagesProj/src/emojis/apple/emoji/0_41.png index 530af3ee21..ec3735fe10 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_41.png and b/TMessagesProj/src/emojis/apple/emoji/0_41.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_410.png b/TMessagesProj/src/emojis/apple/emoji/0_410.png index 8989c798ac..43f2f04478 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_410.png and b/TMessagesProj/src/emojis/apple/emoji/0_410.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_411.png b/TMessagesProj/src/emojis/apple/emoji/0_411.png index 1491a5d85b..fdca965cb9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_411.png and b/TMessagesProj/src/emojis/apple/emoji/0_411.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_412.png b/TMessagesProj/src/emojis/apple/emoji/0_412.png index 7336ffbdb4..f30384b07f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_412.png and b/TMessagesProj/src/emojis/apple/emoji/0_412.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_413.png b/TMessagesProj/src/emojis/apple/emoji/0_413.png index 1380547be0..cfa1247f25 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_413.png and b/TMessagesProj/src/emojis/apple/emoji/0_413.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_414.png b/TMessagesProj/src/emojis/apple/emoji/0_414.png index 80bbdd1530..364c2bfafe 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_414.png and b/TMessagesProj/src/emojis/apple/emoji/0_414.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_415.png b/TMessagesProj/src/emojis/apple/emoji/0_415.png index fb7def59ba..68dbff3ef1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_415.png and b/TMessagesProj/src/emojis/apple/emoji/0_415.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_416.png b/TMessagesProj/src/emojis/apple/emoji/0_416.png index b1b4ca6f17..61b8247ff8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_416.png and b/TMessagesProj/src/emojis/apple/emoji/0_416.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_417.png b/TMessagesProj/src/emojis/apple/emoji/0_417.png index aaac5ae13e..d094eb1c57 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_417.png and b/TMessagesProj/src/emojis/apple/emoji/0_417.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_418.png b/TMessagesProj/src/emojis/apple/emoji/0_418.png index 7a54b3b58e..5779c1abf3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_418.png and b/TMessagesProj/src/emojis/apple/emoji/0_418.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_419.png b/TMessagesProj/src/emojis/apple/emoji/0_419.png index 2794ecc5b3..96424cfc38 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_419.png and b/TMessagesProj/src/emojis/apple/emoji/0_419.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_42.png b/TMessagesProj/src/emojis/apple/emoji/0_42.png index d6815f7cfa..fc107db775 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_42.png and b/TMessagesProj/src/emojis/apple/emoji/0_42.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_420.png b/TMessagesProj/src/emojis/apple/emoji/0_420.png index a9e67ff98c..d3f741745d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_420.png and b/TMessagesProj/src/emojis/apple/emoji/0_420.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_421.png b/TMessagesProj/src/emojis/apple/emoji/0_421.png index 663ecf941d..b9f35ad1b5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_421.png and b/TMessagesProj/src/emojis/apple/emoji/0_421.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_422.png b/TMessagesProj/src/emojis/apple/emoji/0_422.png index 3899f0d75b..307d46204b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_422.png and b/TMessagesProj/src/emojis/apple/emoji/0_422.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_423.png b/TMessagesProj/src/emojis/apple/emoji/0_423.png index 45383d66d3..859f231958 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_423.png and b/TMessagesProj/src/emojis/apple/emoji/0_423.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_424.png b/TMessagesProj/src/emojis/apple/emoji/0_424.png index 935e11197a..5c30df8318 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_424.png and b/TMessagesProj/src/emojis/apple/emoji/0_424.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_425.png b/TMessagesProj/src/emojis/apple/emoji/0_425.png index 7366cc934b..eab2d25d75 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_425.png and b/TMessagesProj/src/emojis/apple/emoji/0_425.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_426.png b/TMessagesProj/src/emojis/apple/emoji/0_426.png index 47c151861a..414441ab1d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_426.png and b/TMessagesProj/src/emojis/apple/emoji/0_426.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_427.png b/TMessagesProj/src/emojis/apple/emoji/0_427.png index e3b27be992..9b5ffec1b1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_427.png and b/TMessagesProj/src/emojis/apple/emoji/0_427.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_428.png b/TMessagesProj/src/emojis/apple/emoji/0_428.png index 3f803e64b1..5c05a81e47 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_428.png and b/TMessagesProj/src/emojis/apple/emoji/0_428.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_429.png b/TMessagesProj/src/emojis/apple/emoji/0_429.png index f977218b5b..1e412ca88a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_429.png and b/TMessagesProj/src/emojis/apple/emoji/0_429.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_43.png b/TMessagesProj/src/emojis/apple/emoji/0_43.png index adbf204e54..aa618bd383 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_43.png and b/TMessagesProj/src/emojis/apple/emoji/0_43.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_430.png b/TMessagesProj/src/emojis/apple/emoji/0_430.png index b42972807e..0065552867 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_430.png and b/TMessagesProj/src/emojis/apple/emoji/0_430.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_431.png b/TMessagesProj/src/emojis/apple/emoji/0_431.png index 4acfb00892..1bd0147cf0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_431.png and b/TMessagesProj/src/emojis/apple/emoji/0_431.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_432.png b/TMessagesProj/src/emojis/apple/emoji/0_432.png index 86637bded5..5eea315669 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_432.png and b/TMessagesProj/src/emojis/apple/emoji/0_432.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_433.png b/TMessagesProj/src/emojis/apple/emoji/0_433.png index 3274742a2f..bf8287cf32 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_433.png and b/TMessagesProj/src/emojis/apple/emoji/0_433.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_434.png b/TMessagesProj/src/emojis/apple/emoji/0_434.png index 46c02928e6..3cfbd1a09e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_434.png and b/TMessagesProj/src/emojis/apple/emoji/0_434.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_435.png b/TMessagesProj/src/emojis/apple/emoji/0_435.png index b535116ce8..ad4d9cbb18 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_435.png and b/TMessagesProj/src/emojis/apple/emoji/0_435.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_436.png b/TMessagesProj/src/emojis/apple/emoji/0_436.png index de154092df..1fcef74ae3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_436.png and b/TMessagesProj/src/emojis/apple/emoji/0_436.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_437.png b/TMessagesProj/src/emojis/apple/emoji/0_437.png index f907859546..be32290883 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_437.png and b/TMessagesProj/src/emojis/apple/emoji/0_437.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_438.png b/TMessagesProj/src/emojis/apple/emoji/0_438.png index 0de7aa124b..a4c2875ebc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_438.png and b/TMessagesProj/src/emojis/apple/emoji/0_438.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_439.png b/TMessagesProj/src/emojis/apple/emoji/0_439.png index d865bc358e..fa2ad532aa 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_439.png and b/TMessagesProj/src/emojis/apple/emoji/0_439.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_44.png b/TMessagesProj/src/emojis/apple/emoji/0_44.png index 8805ef207d..84b8eac12b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_44.png and b/TMessagesProj/src/emojis/apple/emoji/0_44.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_440.png b/TMessagesProj/src/emojis/apple/emoji/0_440.png index 9ef585eb4d..dd3da75e0f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_440.png and b/TMessagesProj/src/emojis/apple/emoji/0_440.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_441.png b/TMessagesProj/src/emojis/apple/emoji/0_441.png index 00f17920d2..a7000f82c6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_441.png and b/TMessagesProj/src/emojis/apple/emoji/0_441.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_442.png b/TMessagesProj/src/emojis/apple/emoji/0_442.png index d96d4db7f4..93bb2384a8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_442.png and b/TMessagesProj/src/emojis/apple/emoji/0_442.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_443.png b/TMessagesProj/src/emojis/apple/emoji/0_443.png index 6242f64e9a..df5e7d0784 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_443.png and b/TMessagesProj/src/emojis/apple/emoji/0_443.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_444.png b/TMessagesProj/src/emojis/apple/emoji/0_444.png index bac6573910..96faf4687f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_444.png and b/TMessagesProj/src/emojis/apple/emoji/0_444.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_445.png b/TMessagesProj/src/emojis/apple/emoji/0_445.png index 93254aa92d..e816ff2f0f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_445.png and b/TMessagesProj/src/emojis/apple/emoji/0_445.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_446.png b/TMessagesProj/src/emojis/apple/emoji/0_446.png index 39fbab19d2..4a6d75b787 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_446.png and b/TMessagesProj/src/emojis/apple/emoji/0_446.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_447.png b/TMessagesProj/src/emojis/apple/emoji/0_447.png index 0f907c9002..13ee655655 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_447.png and b/TMessagesProj/src/emojis/apple/emoji/0_447.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_448.png b/TMessagesProj/src/emojis/apple/emoji/0_448.png index d2c5c532b2..c40ab75f29 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_448.png and b/TMessagesProj/src/emojis/apple/emoji/0_448.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_449.png b/TMessagesProj/src/emojis/apple/emoji/0_449.png index 7023f6e611..c56d2735b9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_449.png and b/TMessagesProj/src/emojis/apple/emoji/0_449.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_45.png b/TMessagesProj/src/emojis/apple/emoji/0_45.png index 654fe7aae5..c833101bec 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_45.png and b/TMessagesProj/src/emojis/apple/emoji/0_45.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_450.png b/TMessagesProj/src/emojis/apple/emoji/0_450.png index c6d9307833..decc462804 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_450.png and b/TMessagesProj/src/emojis/apple/emoji/0_450.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_451.png b/TMessagesProj/src/emojis/apple/emoji/0_451.png index f2289a87ab..376817df77 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_451.png and b/TMessagesProj/src/emojis/apple/emoji/0_451.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_452.png b/TMessagesProj/src/emojis/apple/emoji/0_452.png index 0910269fc0..2c02d9215f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_452.png and b/TMessagesProj/src/emojis/apple/emoji/0_452.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_453.png b/TMessagesProj/src/emojis/apple/emoji/0_453.png index 820092e010..07dc1d8794 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_453.png and b/TMessagesProj/src/emojis/apple/emoji/0_453.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_454.png b/TMessagesProj/src/emojis/apple/emoji/0_454.png index 295bc68f97..75ffd54b3b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_454.png and b/TMessagesProj/src/emojis/apple/emoji/0_454.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_455.png b/TMessagesProj/src/emojis/apple/emoji/0_455.png index d617d65393..e1216e8aa6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_455.png and b/TMessagesProj/src/emojis/apple/emoji/0_455.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_456.png b/TMessagesProj/src/emojis/apple/emoji/0_456.png index 3be61d40d4..d24f2804a5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_456.png and b/TMessagesProj/src/emojis/apple/emoji/0_456.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_457.png b/TMessagesProj/src/emojis/apple/emoji/0_457.png index 58fac2a2a9..2705869bf5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_457.png and b/TMessagesProj/src/emojis/apple/emoji/0_457.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_458.png b/TMessagesProj/src/emojis/apple/emoji/0_458.png index 8c4430b4aa..a9e24505cd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_458.png and b/TMessagesProj/src/emojis/apple/emoji/0_458.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_459.png b/TMessagesProj/src/emojis/apple/emoji/0_459.png index b57cfa8ce2..fd5f65243b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_459.png and b/TMessagesProj/src/emojis/apple/emoji/0_459.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_46.png b/TMessagesProj/src/emojis/apple/emoji/0_46.png index f866bed759..2bf1c95df2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_46.png and b/TMessagesProj/src/emojis/apple/emoji/0_46.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_460.png b/TMessagesProj/src/emojis/apple/emoji/0_460.png index 17d2229cd3..7c989dac94 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_460.png and b/TMessagesProj/src/emojis/apple/emoji/0_460.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_461.png b/TMessagesProj/src/emojis/apple/emoji/0_461.png index 4745511ed3..ddbac822a6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_461.png and b/TMessagesProj/src/emojis/apple/emoji/0_461.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_462.png b/TMessagesProj/src/emojis/apple/emoji/0_462.png index 8884ded56d..952d54a9a9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_462.png and b/TMessagesProj/src/emojis/apple/emoji/0_462.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_463.png b/TMessagesProj/src/emojis/apple/emoji/0_463.png index 8ead5dd10e..9edfbb7fca 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_463.png and b/TMessagesProj/src/emojis/apple/emoji/0_463.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_464.png b/TMessagesProj/src/emojis/apple/emoji/0_464.png index e421a118fd..ac91c23fd1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_464.png and b/TMessagesProj/src/emojis/apple/emoji/0_464.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_465.png b/TMessagesProj/src/emojis/apple/emoji/0_465.png index 2185f6df9a..aa4e3ec204 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_465.png and b/TMessagesProj/src/emojis/apple/emoji/0_465.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_466.png b/TMessagesProj/src/emojis/apple/emoji/0_466.png index 05950b540c..a193ff0d7d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_466.png and b/TMessagesProj/src/emojis/apple/emoji/0_466.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_467.png b/TMessagesProj/src/emojis/apple/emoji/0_467.png index 2930d0ebc6..faf746782f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_467.png and b/TMessagesProj/src/emojis/apple/emoji/0_467.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_468.png b/TMessagesProj/src/emojis/apple/emoji/0_468.png index 02f5b297f8..27a588385e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_468.png and b/TMessagesProj/src/emojis/apple/emoji/0_468.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_469.png b/TMessagesProj/src/emojis/apple/emoji/0_469.png index 2642ee3c3e..d512be9c32 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_469.png and b/TMessagesProj/src/emojis/apple/emoji/0_469.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_47.png b/TMessagesProj/src/emojis/apple/emoji/0_47.png index fefd0f2b65..b9aff6a76a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_47.png and b/TMessagesProj/src/emojis/apple/emoji/0_47.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_470.png b/TMessagesProj/src/emojis/apple/emoji/0_470.png index 9e40465692..d90a9ea177 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_470.png and b/TMessagesProj/src/emojis/apple/emoji/0_470.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_471.png b/TMessagesProj/src/emojis/apple/emoji/0_471.png index 752f4fd85d..206ca1e34b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_471.png and b/TMessagesProj/src/emojis/apple/emoji/0_471.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_472.png b/TMessagesProj/src/emojis/apple/emoji/0_472.png index f69bc67f65..972e4c477c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_472.png and b/TMessagesProj/src/emojis/apple/emoji/0_472.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_473.png b/TMessagesProj/src/emojis/apple/emoji/0_473.png index e79d146958..b5f571c9a2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_473.png and b/TMessagesProj/src/emojis/apple/emoji/0_473.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_474.png b/TMessagesProj/src/emojis/apple/emoji/0_474.png index bd19b3547d..1d458525c7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_474.png and b/TMessagesProj/src/emojis/apple/emoji/0_474.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_475.png b/TMessagesProj/src/emojis/apple/emoji/0_475.png index 6781c6e7f2..6b343ff689 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_475.png and b/TMessagesProj/src/emojis/apple/emoji/0_475.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_476.png b/TMessagesProj/src/emojis/apple/emoji/0_476.png index c33f644ce5..d23b49396c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_476.png and b/TMessagesProj/src/emojis/apple/emoji/0_476.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_477.png b/TMessagesProj/src/emojis/apple/emoji/0_477.png index f363cdc99f..cb1fe3fe68 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_477.png and b/TMessagesProj/src/emojis/apple/emoji/0_477.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_478.png b/TMessagesProj/src/emojis/apple/emoji/0_478.png index 5f0b80f282..c5fbfcf2fb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_478.png and b/TMessagesProj/src/emojis/apple/emoji/0_478.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_479.png b/TMessagesProj/src/emojis/apple/emoji/0_479.png index abe4706150..501d98a40a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_479.png and b/TMessagesProj/src/emojis/apple/emoji/0_479.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_48.png b/TMessagesProj/src/emojis/apple/emoji/0_48.png index 2f3458b276..fa12cc36ab 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_48.png and b/TMessagesProj/src/emojis/apple/emoji/0_48.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_480.png b/TMessagesProj/src/emojis/apple/emoji/0_480.png index 304cbe9204..a2e41af4b8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_480.png and b/TMessagesProj/src/emojis/apple/emoji/0_480.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_481.png b/TMessagesProj/src/emojis/apple/emoji/0_481.png index 3ebe4c020f..99f5a51b2a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_481.png and b/TMessagesProj/src/emojis/apple/emoji/0_481.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_482.png b/TMessagesProj/src/emojis/apple/emoji/0_482.png index 261931b3f9..c065079736 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_482.png and b/TMessagesProj/src/emojis/apple/emoji/0_482.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_483.png b/TMessagesProj/src/emojis/apple/emoji/0_483.png index 7bc9827d8b..88237f12a1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_483.png and b/TMessagesProj/src/emojis/apple/emoji/0_483.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_484.png b/TMessagesProj/src/emojis/apple/emoji/0_484.png index 0396640104..ca2ac660db 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_484.png and b/TMessagesProj/src/emojis/apple/emoji/0_484.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_485.png b/TMessagesProj/src/emojis/apple/emoji/0_485.png index a19a9fd1a1..ac3d6dc78b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_485.png and b/TMessagesProj/src/emojis/apple/emoji/0_485.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_486.png b/TMessagesProj/src/emojis/apple/emoji/0_486.png index 6320ec7afd..2fe025ec9f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_486.png and b/TMessagesProj/src/emojis/apple/emoji/0_486.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_487.png b/TMessagesProj/src/emojis/apple/emoji/0_487.png index 69f82d86ae..2d709f1937 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_487.png and b/TMessagesProj/src/emojis/apple/emoji/0_487.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_488.png b/TMessagesProj/src/emojis/apple/emoji/0_488.png index ea2f937f23..5ca666c4c1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_488.png and b/TMessagesProj/src/emojis/apple/emoji/0_488.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_489.png b/TMessagesProj/src/emojis/apple/emoji/0_489.png index 2b38ccfdec..0a15c59f4a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_489.png and b/TMessagesProj/src/emojis/apple/emoji/0_489.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_49.png b/TMessagesProj/src/emojis/apple/emoji/0_49.png index a60e4bb90f..46d2724233 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_49.png and b/TMessagesProj/src/emojis/apple/emoji/0_49.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_490.png b/TMessagesProj/src/emojis/apple/emoji/0_490.png index f202b3b037..b21dce041b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_490.png and b/TMessagesProj/src/emojis/apple/emoji/0_490.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_491.png b/TMessagesProj/src/emojis/apple/emoji/0_491.png index 5bc0ab0bea..5d8ad49647 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_491.png and b/TMessagesProj/src/emojis/apple/emoji/0_491.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_492.png b/TMessagesProj/src/emojis/apple/emoji/0_492.png index 04ed169eac..e121e11c28 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_492.png and b/TMessagesProj/src/emojis/apple/emoji/0_492.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_493.png b/TMessagesProj/src/emojis/apple/emoji/0_493.png index c1d226e05e..c69c80ded6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_493.png and b/TMessagesProj/src/emojis/apple/emoji/0_493.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_494.png b/TMessagesProj/src/emojis/apple/emoji/0_494.png index 6a4dae9236..33621d3230 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_494.png and b/TMessagesProj/src/emojis/apple/emoji/0_494.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_495.png b/TMessagesProj/src/emojis/apple/emoji/0_495.png index 62bdaaaf40..264f4fcd1c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_495.png and b/TMessagesProj/src/emojis/apple/emoji/0_495.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_496.png b/TMessagesProj/src/emojis/apple/emoji/0_496.png index cbb38b96d5..7030b39041 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_496.png and b/TMessagesProj/src/emojis/apple/emoji/0_496.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_497.png b/TMessagesProj/src/emojis/apple/emoji/0_497.png index a267883247..ac93720348 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_497.png and b/TMessagesProj/src/emojis/apple/emoji/0_497.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_498.png b/TMessagesProj/src/emojis/apple/emoji/0_498.png index fb3f74b4c0..6b3fda0dc2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_498.png and b/TMessagesProj/src/emojis/apple/emoji/0_498.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_499.png b/TMessagesProj/src/emojis/apple/emoji/0_499.png index eab0f924ae..412cf99887 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_499.png and b/TMessagesProj/src/emojis/apple/emoji/0_499.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_5.png b/TMessagesProj/src/emojis/apple/emoji/0_5.png index c53b4d2592..84b75e67a5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_5.png and b/TMessagesProj/src/emojis/apple/emoji/0_5.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_50.png b/TMessagesProj/src/emojis/apple/emoji/0_50.png index 4fa40c5a8f..339729f96d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_50.png and b/TMessagesProj/src/emojis/apple/emoji/0_50.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_500.png b/TMessagesProj/src/emojis/apple/emoji/0_500.png index b933dcfb19..0e26612418 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_500.png and b/TMessagesProj/src/emojis/apple/emoji/0_500.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_501.png b/TMessagesProj/src/emojis/apple/emoji/0_501.png index f57e2823a1..8580c4b297 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_501.png and b/TMessagesProj/src/emojis/apple/emoji/0_501.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_502.png b/TMessagesProj/src/emojis/apple/emoji/0_502.png index 711deb7ae5..7e5872d602 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_502.png and b/TMessagesProj/src/emojis/apple/emoji/0_502.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_503.png b/TMessagesProj/src/emojis/apple/emoji/0_503.png index 215d12da85..fd3f954786 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_503.png and b/TMessagesProj/src/emojis/apple/emoji/0_503.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_504.png b/TMessagesProj/src/emojis/apple/emoji/0_504.png index bdda498ac1..a67a593def 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_504.png and b/TMessagesProj/src/emojis/apple/emoji/0_504.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_505.png b/TMessagesProj/src/emojis/apple/emoji/0_505.png index 6368118281..c9f093c1ec 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_505.png and b/TMessagesProj/src/emojis/apple/emoji/0_505.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_506.png b/TMessagesProj/src/emojis/apple/emoji/0_506.png index 978ccdbf72..0db7d5a48a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_506.png and b/TMessagesProj/src/emojis/apple/emoji/0_506.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_507.png b/TMessagesProj/src/emojis/apple/emoji/0_507.png index 35450d8145..30824c15c9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_507.png and b/TMessagesProj/src/emojis/apple/emoji/0_507.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_508.png b/TMessagesProj/src/emojis/apple/emoji/0_508.png index 9af4f5aed4..caca8c5efc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_508.png and b/TMessagesProj/src/emojis/apple/emoji/0_508.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_509.png b/TMessagesProj/src/emojis/apple/emoji/0_509.png index 0f42e4b156..ecc88bfb3a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_509.png and b/TMessagesProj/src/emojis/apple/emoji/0_509.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_51.png b/TMessagesProj/src/emojis/apple/emoji/0_51.png index af0d4c11b5..9eeeb6d813 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_51.png and b/TMessagesProj/src/emojis/apple/emoji/0_51.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_510.png b/TMessagesProj/src/emojis/apple/emoji/0_510.png index da40fafca8..fc437fbd49 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_510.png and b/TMessagesProj/src/emojis/apple/emoji/0_510.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_511.png b/TMessagesProj/src/emojis/apple/emoji/0_511.png index 5a140e36d1..62b173ddfd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_511.png and b/TMessagesProj/src/emojis/apple/emoji/0_511.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_512.png b/TMessagesProj/src/emojis/apple/emoji/0_512.png index 264f441520..749c358bef 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_512.png and b/TMessagesProj/src/emojis/apple/emoji/0_512.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_513.png b/TMessagesProj/src/emojis/apple/emoji/0_513.png index 1e0f525f88..4bd5336650 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_513.png and b/TMessagesProj/src/emojis/apple/emoji/0_513.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_514.png b/TMessagesProj/src/emojis/apple/emoji/0_514.png index 178275dcf9..1db025c9cc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_514.png and b/TMessagesProj/src/emojis/apple/emoji/0_514.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_515.png b/TMessagesProj/src/emojis/apple/emoji/0_515.png index 731ef69d51..6d8071dd01 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_515.png and b/TMessagesProj/src/emojis/apple/emoji/0_515.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_516.png b/TMessagesProj/src/emojis/apple/emoji/0_516.png index bd2249b165..29f346e198 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_516.png and b/TMessagesProj/src/emojis/apple/emoji/0_516.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_517.png b/TMessagesProj/src/emojis/apple/emoji/0_517.png index 7e55a6fe98..b64285c4a4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_517.png and b/TMessagesProj/src/emojis/apple/emoji/0_517.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_518.png b/TMessagesProj/src/emojis/apple/emoji/0_518.png index 450b35cd2c..f34834ee1b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_518.png and b/TMessagesProj/src/emojis/apple/emoji/0_518.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_519.png b/TMessagesProj/src/emojis/apple/emoji/0_519.png index 4413b01a40..7d8ef95b3f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_519.png and b/TMessagesProj/src/emojis/apple/emoji/0_519.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_52.png b/TMessagesProj/src/emojis/apple/emoji/0_52.png index 3459178e83..35ef311126 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_52.png and b/TMessagesProj/src/emojis/apple/emoji/0_52.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_520.png b/TMessagesProj/src/emojis/apple/emoji/0_520.png index ac097d3274..660f865cd2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_520.png and b/TMessagesProj/src/emojis/apple/emoji/0_520.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_521.png b/TMessagesProj/src/emojis/apple/emoji/0_521.png index 4bc4be351f..5991d737d1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_521.png and b/TMessagesProj/src/emojis/apple/emoji/0_521.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_522.png b/TMessagesProj/src/emojis/apple/emoji/0_522.png index 5a87cb4e88..11661a6443 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_522.png and b/TMessagesProj/src/emojis/apple/emoji/0_522.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_523.png b/TMessagesProj/src/emojis/apple/emoji/0_523.png index f66b92095b..b82d00c474 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_523.png and b/TMessagesProj/src/emojis/apple/emoji/0_523.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_524.png b/TMessagesProj/src/emojis/apple/emoji/0_524.png index a87c1326ff..2b080d2f21 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_524.png and b/TMessagesProj/src/emojis/apple/emoji/0_524.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_525.png b/TMessagesProj/src/emojis/apple/emoji/0_525.png index 007e10dec8..531b4a5d3c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_525.png and b/TMessagesProj/src/emojis/apple/emoji/0_525.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_526.png b/TMessagesProj/src/emojis/apple/emoji/0_526.png index e77f7b6271..3d28a361c3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_526.png and b/TMessagesProj/src/emojis/apple/emoji/0_526.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_527.png b/TMessagesProj/src/emojis/apple/emoji/0_527.png index d6e8d247fe..0c2ed30592 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_527.png and b/TMessagesProj/src/emojis/apple/emoji/0_527.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_528.png b/TMessagesProj/src/emojis/apple/emoji/0_528.png index 719b4b2c31..b4bc1b6040 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_528.png and b/TMessagesProj/src/emojis/apple/emoji/0_528.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_529.png b/TMessagesProj/src/emojis/apple/emoji/0_529.png index c90d0d3126..4e99b59a4a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_529.png and b/TMessagesProj/src/emojis/apple/emoji/0_529.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_53.png b/TMessagesProj/src/emojis/apple/emoji/0_53.png index 6e1d640f5f..0a2557fc4f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_53.png and b/TMessagesProj/src/emojis/apple/emoji/0_53.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_530.png b/TMessagesProj/src/emojis/apple/emoji/0_530.png index 405418a970..486dc162e2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_530.png and b/TMessagesProj/src/emojis/apple/emoji/0_530.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_531.png b/TMessagesProj/src/emojis/apple/emoji/0_531.png index d28eced8f8..0ebb421318 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_531.png and b/TMessagesProj/src/emojis/apple/emoji/0_531.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_532.png b/TMessagesProj/src/emojis/apple/emoji/0_532.png index 8405ea94c3..f66023779a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_532.png and b/TMessagesProj/src/emojis/apple/emoji/0_532.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_533.png b/TMessagesProj/src/emojis/apple/emoji/0_533.png index 33a7d4f71e..5fe26b08ee 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_533.png and b/TMessagesProj/src/emojis/apple/emoji/0_533.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_534.png b/TMessagesProj/src/emojis/apple/emoji/0_534.png index 505912ec61..364d7c2de7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_534.png and b/TMessagesProj/src/emojis/apple/emoji/0_534.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_535.png b/TMessagesProj/src/emojis/apple/emoji/0_535.png index 256edb44f1..dac50b9f51 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_535.png and b/TMessagesProj/src/emojis/apple/emoji/0_535.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_536.png b/TMessagesProj/src/emojis/apple/emoji/0_536.png index ed9139bda6..d35806f041 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_536.png and b/TMessagesProj/src/emojis/apple/emoji/0_536.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_537.png b/TMessagesProj/src/emojis/apple/emoji/0_537.png index 332bb82c17..7ddf04f2a7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_537.png and b/TMessagesProj/src/emojis/apple/emoji/0_537.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_538.png b/TMessagesProj/src/emojis/apple/emoji/0_538.png index eb12392a2f..72e1f10cc5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_538.png and b/TMessagesProj/src/emojis/apple/emoji/0_538.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_539.png b/TMessagesProj/src/emojis/apple/emoji/0_539.png index be9899b745..7906ffd761 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_539.png and b/TMessagesProj/src/emojis/apple/emoji/0_539.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_54.png b/TMessagesProj/src/emojis/apple/emoji/0_54.png index 3a564b4ef8..f126b1eeb1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_54.png and b/TMessagesProj/src/emojis/apple/emoji/0_54.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_540.png b/TMessagesProj/src/emojis/apple/emoji/0_540.png index 78b81ffa67..e8720d9623 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_540.png and b/TMessagesProj/src/emojis/apple/emoji/0_540.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_541.png b/TMessagesProj/src/emojis/apple/emoji/0_541.png index 3856932c71..5b855ea48e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_541.png and b/TMessagesProj/src/emojis/apple/emoji/0_541.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_542.png b/TMessagesProj/src/emojis/apple/emoji/0_542.png index 1dc070368d..e452498286 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_542.png and b/TMessagesProj/src/emojis/apple/emoji/0_542.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_543.png b/TMessagesProj/src/emojis/apple/emoji/0_543.png index 49065a14be..59712a3b07 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_543.png and b/TMessagesProj/src/emojis/apple/emoji/0_543.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_544.png b/TMessagesProj/src/emojis/apple/emoji/0_544.png index 78d3cb1b2e..5198fa5fc4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_544.png and b/TMessagesProj/src/emojis/apple/emoji/0_544.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_545.png b/TMessagesProj/src/emojis/apple/emoji/0_545.png index 41f83ac5ef..5c8aefa370 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_545.png and b/TMessagesProj/src/emojis/apple/emoji/0_545.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_546.png b/TMessagesProj/src/emojis/apple/emoji/0_546.png index fdf3af03cd..3910264c6b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_546.png and b/TMessagesProj/src/emojis/apple/emoji/0_546.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_547.png b/TMessagesProj/src/emojis/apple/emoji/0_547.png index e639763db0..4e735be5fe 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_547.png and b/TMessagesProj/src/emojis/apple/emoji/0_547.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_548.png b/TMessagesProj/src/emojis/apple/emoji/0_548.png index 1d8d1d514b..f2a0e31780 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_548.png and b/TMessagesProj/src/emojis/apple/emoji/0_548.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_549.png b/TMessagesProj/src/emojis/apple/emoji/0_549.png index 1b0045bd23..a1a318ed3f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_549.png and b/TMessagesProj/src/emojis/apple/emoji/0_549.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_55.png b/TMessagesProj/src/emojis/apple/emoji/0_55.png index 7a4889e3b2..6cc1924d08 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_55.png and b/TMessagesProj/src/emojis/apple/emoji/0_55.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_550.png b/TMessagesProj/src/emojis/apple/emoji/0_550.png index 52dcc8a410..f07daef7c2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_550.png and b/TMessagesProj/src/emojis/apple/emoji/0_550.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_551.png b/TMessagesProj/src/emojis/apple/emoji/0_551.png index 29ea7272e3..f0b9037db1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_551.png and b/TMessagesProj/src/emojis/apple/emoji/0_551.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_552.png b/TMessagesProj/src/emojis/apple/emoji/0_552.png index a56f0de88d..c583ab4699 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_552.png and b/TMessagesProj/src/emojis/apple/emoji/0_552.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_553.png b/TMessagesProj/src/emojis/apple/emoji/0_553.png index b35c7a321b..5a22a2e41d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_553.png and b/TMessagesProj/src/emojis/apple/emoji/0_553.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_554.png b/TMessagesProj/src/emojis/apple/emoji/0_554.png index e9524254a5..f718ac9efc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_554.png and b/TMessagesProj/src/emojis/apple/emoji/0_554.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_555.png b/TMessagesProj/src/emojis/apple/emoji/0_555.png index 696d2face7..64235f0f45 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_555.png and b/TMessagesProj/src/emojis/apple/emoji/0_555.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_556.png b/TMessagesProj/src/emojis/apple/emoji/0_556.png index f9ad160a43..3ff656c378 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_556.png and b/TMessagesProj/src/emojis/apple/emoji/0_556.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_557.png b/TMessagesProj/src/emojis/apple/emoji/0_557.png index 9b4fe33f48..d9c8fbf75f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_557.png and b/TMessagesProj/src/emojis/apple/emoji/0_557.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_558.png b/TMessagesProj/src/emojis/apple/emoji/0_558.png index c9dd987beb..e5ab3db1d2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_558.png and b/TMessagesProj/src/emojis/apple/emoji/0_558.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_559.png b/TMessagesProj/src/emojis/apple/emoji/0_559.png index 0cad3916c0..1d36458199 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_559.png and b/TMessagesProj/src/emojis/apple/emoji/0_559.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_56.png b/TMessagesProj/src/emojis/apple/emoji/0_56.png index c1dcb6a17e..231f11c8ef 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_56.png and b/TMessagesProj/src/emojis/apple/emoji/0_56.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_560.png b/TMessagesProj/src/emojis/apple/emoji/0_560.png index 4c22280322..3188e807fa 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_560.png and b/TMessagesProj/src/emojis/apple/emoji/0_560.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_561.png b/TMessagesProj/src/emojis/apple/emoji/0_561.png index 4f84c18aa0..aa91918fea 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_561.png and b/TMessagesProj/src/emojis/apple/emoji/0_561.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_562.png b/TMessagesProj/src/emojis/apple/emoji/0_562.png index 915b2ff882..434cd1115d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_562.png and b/TMessagesProj/src/emojis/apple/emoji/0_562.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_563.png b/TMessagesProj/src/emojis/apple/emoji/0_563.png index da5ef601fd..c93aadbaf6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_563.png and b/TMessagesProj/src/emojis/apple/emoji/0_563.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_564.png b/TMessagesProj/src/emojis/apple/emoji/0_564.png index f7579d8880..8763b39058 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_564.png and b/TMessagesProj/src/emojis/apple/emoji/0_564.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_565.png b/TMessagesProj/src/emojis/apple/emoji/0_565.png index 751f6d248e..38574d97d0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_565.png and b/TMessagesProj/src/emojis/apple/emoji/0_565.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_566.png b/TMessagesProj/src/emojis/apple/emoji/0_566.png index 3b997ffb7e..55206621e1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_566.png and b/TMessagesProj/src/emojis/apple/emoji/0_566.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_567.png b/TMessagesProj/src/emojis/apple/emoji/0_567.png index 3a2be6658b..8a4ae2f239 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_567.png and b/TMessagesProj/src/emojis/apple/emoji/0_567.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_568.png b/TMessagesProj/src/emojis/apple/emoji/0_568.png index 8115e5b5a8..472794cb34 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_568.png and b/TMessagesProj/src/emojis/apple/emoji/0_568.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_569.png b/TMessagesProj/src/emojis/apple/emoji/0_569.png index deee40ea50..4c975d0fb3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_569.png and b/TMessagesProj/src/emojis/apple/emoji/0_569.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_57.png b/TMessagesProj/src/emojis/apple/emoji/0_57.png index fd5e960529..fec4bddcec 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_57.png and b/TMessagesProj/src/emojis/apple/emoji/0_57.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_570.png b/TMessagesProj/src/emojis/apple/emoji/0_570.png index 526f1f6588..1261244d3a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_570.png and b/TMessagesProj/src/emojis/apple/emoji/0_570.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_571.png b/TMessagesProj/src/emojis/apple/emoji/0_571.png index 792a1ed7a1..14283dc9b8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_571.png and b/TMessagesProj/src/emojis/apple/emoji/0_571.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_572.png b/TMessagesProj/src/emojis/apple/emoji/0_572.png index d31356169b..c95fc9359e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_572.png and b/TMessagesProj/src/emojis/apple/emoji/0_572.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_573.png b/TMessagesProj/src/emojis/apple/emoji/0_573.png index 83efd3ec0c..ee533d5b96 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_573.png and b/TMessagesProj/src/emojis/apple/emoji/0_573.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_574.png b/TMessagesProj/src/emojis/apple/emoji/0_574.png index 4339462133..fdb63860a7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_574.png and b/TMessagesProj/src/emojis/apple/emoji/0_574.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_575.png b/TMessagesProj/src/emojis/apple/emoji/0_575.png index fbf7b7d9d0..874a2048f4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_575.png and b/TMessagesProj/src/emojis/apple/emoji/0_575.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_576.png b/TMessagesProj/src/emojis/apple/emoji/0_576.png index 8c2df27dd3..4cefa3180b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_576.png and b/TMessagesProj/src/emojis/apple/emoji/0_576.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_577.png b/TMessagesProj/src/emojis/apple/emoji/0_577.png index 80b7204a36..208e82d821 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_577.png and b/TMessagesProj/src/emojis/apple/emoji/0_577.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_578.png b/TMessagesProj/src/emojis/apple/emoji/0_578.png index a0b201c9fa..d230e42706 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_578.png and b/TMessagesProj/src/emojis/apple/emoji/0_578.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_579.png b/TMessagesProj/src/emojis/apple/emoji/0_579.png index d1f70dc124..3518beeaf1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_579.png and b/TMessagesProj/src/emojis/apple/emoji/0_579.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_58.png b/TMessagesProj/src/emojis/apple/emoji/0_58.png index 908b9c82f9..051bace597 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_58.png and b/TMessagesProj/src/emojis/apple/emoji/0_58.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_580.png b/TMessagesProj/src/emojis/apple/emoji/0_580.png index 3831cd2d00..e4c26135b5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_580.png and b/TMessagesProj/src/emojis/apple/emoji/0_580.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_581.png b/TMessagesProj/src/emojis/apple/emoji/0_581.png index 7db3f59845..fc708464bf 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_581.png and b/TMessagesProj/src/emojis/apple/emoji/0_581.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_582.png b/TMessagesProj/src/emojis/apple/emoji/0_582.png index 1dc4076218..a7b6726d54 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_582.png and b/TMessagesProj/src/emojis/apple/emoji/0_582.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_583.png b/TMessagesProj/src/emojis/apple/emoji/0_583.png index 2a32188750..462b4d6f5b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_583.png and b/TMessagesProj/src/emojis/apple/emoji/0_583.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_584.png b/TMessagesProj/src/emojis/apple/emoji/0_584.png index 4416ffc39d..8b156939f3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_584.png and b/TMessagesProj/src/emojis/apple/emoji/0_584.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_585.png b/TMessagesProj/src/emojis/apple/emoji/0_585.png index f9e67d355e..4dc0c66db7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_585.png and b/TMessagesProj/src/emojis/apple/emoji/0_585.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_586.png b/TMessagesProj/src/emojis/apple/emoji/0_586.png index 73d43fe2c8..5316e571bc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_586.png and b/TMessagesProj/src/emojis/apple/emoji/0_586.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_587.png b/TMessagesProj/src/emojis/apple/emoji/0_587.png index 0813431c93..25e16452a5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_587.png and b/TMessagesProj/src/emojis/apple/emoji/0_587.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_588.png b/TMessagesProj/src/emojis/apple/emoji/0_588.png index c56855d75e..809000b2cf 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_588.png and b/TMessagesProj/src/emojis/apple/emoji/0_588.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_589.png b/TMessagesProj/src/emojis/apple/emoji/0_589.png index 0cfbf5d962..5985f7d6c3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_589.png and b/TMessagesProj/src/emojis/apple/emoji/0_589.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_59.png b/TMessagesProj/src/emojis/apple/emoji/0_59.png index 9ac82cefd3..5af551ebb8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_59.png and b/TMessagesProj/src/emojis/apple/emoji/0_59.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_590.png b/TMessagesProj/src/emojis/apple/emoji/0_590.png index 769e650e9c..dfe7cd6fd9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_590.png and b/TMessagesProj/src/emojis/apple/emoji/0_590.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_591.png b/TMessagesProj/src/emojis/apple/emoji/0_591.png index c3b64dc0a1..7861649819 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_591.png and b/TMessagesProj/src/emojis/apple/emoji/0_591.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_592.png b/TMessagesProj/src/emojis/apple/emoji/0_592.png index 2266b3d013..c144e58bb3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_592.png and b/TMessagesProj/src/emojis/apple/emoji/0_592.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_593.png b/TMessagesProj/src/emojis/apple/emoji/0_593.png index c69f2edc0c..6d5870aaae 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_593.png and b/TMessagesProj/src/emojis/apple/emoji/0_593.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_594.png b/TMessagesProj/src/emojis/apple/emoji/0_594.png index ba1f4057f3..1b455637a5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_594.png and b/TMessagesProj/src/emojis/apple/emoji/0_594.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_595.png b/TMessagesProj/src/emojis/apple/emoji/0_595.png index c715620d7b..7730a84c8d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_595.png and b/TMessagesProj/src/emojis/apple/emoji/0_595.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_596.png b/TMessagesProj/src/emojis/apple/emoji/0_596.png index a8c2623d67..979f526bbc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_596.png and b/TMessagesProj/src/emojis/apple/emoji/0_596.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_597.png b/TMessagesProj/src/emojis/apple/emoji/0_597.png index 79eb3532be..de6b586688 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_597.png and b/TMessagesProj/src/emojis/apple/emoji/0_597.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_598.png b/TMessagesProj/src/emojis/apple/emoji/0_598.png index 21f89a377e..607d6c2ec2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_598.png and b/TMessagesProj/src/emojis/apple/emoji/0_598.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_599.png b/TMessagesProj/src/emojis/apple/emoji/0_599.png index 7b537ab3d5..9ea998ee22 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_599.png and b/TMessagesProj/src/emojis/apple/emoji/0_599.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_6.png b/TMessagesProj/src/emojis/apple/emoji/0_6.png index 8ecd68ce7f..04ed80c9ef 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_6.png and b/TMessagesProj/src/emojis/apple/emoji/0_6.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_60.png b/TMessagesProj/src/emojis/apple/emoji/0_60.png index d1f026e815..88b0077795 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_60.png and b/TMessagesProj/src/emojis/apple/emoji/0_60.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_600.png b/TMessagesProj/src/emojis/apple/emoji/0_600.png index a89df52954..682fef47b8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_600.png and b/TMessagesProj/src/emojis/apple/emoji/0_600.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_601.png b/TMessagesProj/src/emojis/apple/emoji/0_601.png index 7475feefe4..cea2db8b7a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_601.png and b/TMessagesProj/src/emojis/apple/emoji/0_601.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_602.png b/TMessagesProj/src/emojis/apple/emoji/0_602.png index c1ff0efcbf..7fba1c73b5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_602.png and b/TMessagesProj/src/emojis/apple/emoji/0_602.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_603.png b/TMessagesProj/src/emojis/apple/emoji/0_603.png index be0f2176c7..d1903ea681 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_603.png and b/TMessagesProj/src/emojis/apple/emoji/0_603.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_604.png b/TMessagesProj/src/emojis/apple/emoji/0_604.png index 90a36d19c7..3732b10894 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_604.png and b/TMessagesProj/src/emojis/apple/emoji/0_604.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_605.png b/TMessagesProj/src/emojis/apple/emoji/0_605.png index 0eee8d46b2..03cf50936d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_605.png and b/TMessagesProj/src/emojis/apple/emoji/0_605.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_606.png b/TMessagesProj/src/emojis/apple/emoji/0_606.png index 6de84926b3..3aacbfee56 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_606.png and b/TMessagesProj/src/emojis/apple/emoji/0_606.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_607.png b/TMessagesProj/src/emojis/apple/emoji/0_607.png index 4bf43672d5..3cf91758b4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_607.png and b/TMessagesProj/src/emojis/apple/emoji/0_607.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_608.png b/TMessagesProj/src/emojis/apple/emoji/0_608.png index 0170925d61..4dce60b838 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_608.png and b/TMessagesProj/src/emojis/apple/emoji/0_608.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_609.png b/TMessagesProj/src/emojis/apple/emoji/0_609.png index 311a5828a9..7d7a964986 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_609.png and b/TMessagesProj/src/emojis/apple/emoji/0_609.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_61.png b/TMessagesProj/src/emojis/apple/emoji/0_61.png index c9b08437e2..1312c5204b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_61.png and b/TMessagesProj/src/emojis/apple/emoji/0_61.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_610.png b/TMessagesProj/src/emojis/apple/emoji/0_610.png index d8fefc318b..b3f487d5c3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_610.png and b/TMessagesProj/src/emojis/apple/emoji/0_610.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_611.png b/TMessagesProj/src/emojis/apple/emoji/0_611.png index babdbfa312..84c09317ac 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_611.png and b/TMessagesProj/src/emojis/apple/emoji/0_611.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_612.png b/TMessagesProj/src/emojis/apple/emoji/0_612.png index e685339b43..40c2285f5c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_612.png and b/TMessagesProj/src/emojis/apple/emoji/0_612.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_613.png b/TMessagesProj/src/emojis/apple/emoji/0_613.png index 123d9f5cfc..059010fa8a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_613.png and b/TMessagesProj/src/emojis/apple/emoji/0_613.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_614.png b/TMessagesProj/src/emojis/apple/emoji/0_614.png index 67fcca58ed..f16bcd5fef 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_614.png and b/TMessagesProj/src/emojis/apple/emoji/0_614.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_615.png b/TMessagesProj/src/emojis/apple/emoji/0_615.png index 2335d4ecc4..aeda4519d4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_615.png and b/TMessagesProj/src/emojis/apple/emoji/0_615.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_616.png b/TMessagesProj/src/emojis/apple/emoji/0_616.png index 1cb198e7bc..10087c260b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_616.png and b/TMessagesProj/src/emojis/apple/emoji/0_616.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_617.png b/TMessagesProj/src/emojis/apple/emoji/0_617.png index d80e973208..ea4d4dd929 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_617.png and b/TMessagesProj/src/emojis/apple/emoji/0_617.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_618.png b/TMessagesProj/src/emojis/apple/emoji/0_618.png index 93f7c4a5f1..d9947c898d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_618.png and b/TMessagesProj/src/emojis/apple/emoji/0_618.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_619.png b/TMessagesProj/src/emojis/apple/emoji/0_619.png index 5d94017301..43426de5c5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_619.png and b/TMessagesProj/src/emojis/apple/emoji/0_619.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_62.png b/TMessagesProj/src/emojis/apple/emoji/0_62.png index 3da967a073..a59eb7d739 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_62.png and b/TMessagesProj/src/emojis/apple/emoji/0_62.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_620.png b/TMessagesProj/src/emojis/apple/emoji/0_620.png index 440644bea9..168e9589fb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_620.png and b/TMessagesProj/src/emojis/apple/emoji/0_620.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_621.png b/TMessagesProj/src/emojis/apple/emoji/0_621.png index 2a2d67bb30..478b3b5144 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_621.png and b/TMessagesProj/src/emojis/apple/emoji/0_621.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_622.png b/TMessagesProj/src/emojis/apple/emoji/0_622.png index 97c4c8cef6..acb57beb9c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_622.png and b/TMessagesProj/src/emojis/apple/emoji/0_622.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_623.png b/TMessagesProj/src/emojis/apple/emoji/0_623.png index da1ef8b021..501c2ddd8b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_623.png and b/TMessagesProj/src/emojis/apple/emoji/0_623.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_624.png b/TMessagesProj/src/emojis/apple/emoji/0_624.png index 9c6f1273df..c1cbae7d8b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_624.png and b/TMessagesProj/src/emojis/apple/emoji/0_624.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_625.png b/TMessagesProj/src/emojis/apple/emoji/0_625.png index b5d6e348bb..4e430ced12 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_625.png and b/TMessagesProj/src/emojis/apple/emoji/0_625.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_626.png b/TMessagesProj/src/emojis/apple/emoji/0_626.png index d439c526a7..7b2c9c3f23 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_626.png and b/TMessagesProj/src/emojis/apple/emoji/0_626.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_627.png b/TMessagesProj/src/emojis/apple/emoji/0_627.png index eeac5c231a..e74ac6a957 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_627.png and b/TMessagesProj/src/emojis/apple/emoji/0_627.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_628.png b/TMessagesProj/src/emojis/apple/emoji/0_628.png index 6b2250c1be..2956d277ad 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_628.png and b/TMessagesProj/src/emojis/apple/emoji/0_628.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_629.png b/TMessagesProj/src/emojis/apple/emoji/0_629.png index 70e861659c..a8cd0c2252 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_629.png and b/TMessagesProj/src/emojis/apple/emoji/0_629.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_63.png b/TMessagesProj/src/emojis/apple/emoji/0_63.png index c6cd268f1d..c96a39eb94 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_63.png and b/TMessagesProj/src/emojis/apple/emoji/0_63.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_630.png b/TMessagesProj/src/emojis/apple/emoji/0_630.png index 8881286a96..28dee271dd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_630.png and b/TMessagesProj/src/emojis/apple/emoji/0_630.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_631.png b/TMessagesProj/src/emojis/apple/emoji/0_631.png index 4da23eaeab..d9c9842df0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_631.png and b/TMessagesProj/src/emojis/apple/emoji/0_631.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_632.png b/TMessagesProj/src/emojis/apple/emoji/0_632.png index 075072c44c..5d0a74d3ef 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_632.png and b/TMessagesProj/src/emojis/apple/emoji/0_632.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_633.png b/TMessagesProj/src/emojis/apple/emoji/0_633.png index 238c8f8d55..c92a065f27 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_633.png and b/TMessagesProj/src/emojis/apple/emoji/0_633.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_634.png b/TMessagesProj/src/emojis/apple/emoji/0_634.png index f1802e104a..50420329ca 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_634.png and b/TMessagesProj/src/emojis/apple/emoji/0_634.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_635.png b/TMessagesProj/src/emojis/apple/emoji/0_635.png index 9bd26d688b..05c5f7f9be 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_635.png and b/TMessagesProj/src/emojis/apple/emoji/0_635.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_636.png b/TMessagesProj/src/emojis/apple/emoji/0_636.png index 24514d7c08..d0f5efe955 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_636.png and b/TMessagesProj/src/emojis/apple/emoji/0_636.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_637.png b/TMessagesProj/src/emojis/apple/emoji/0_637.png index 8e469d129f..156cb26786 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_637.png and b/TMessagesProj/src/emojis/apple/emoji/0_637.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_638.png b/TMessagesProj/src/emojis/apple/emoji/0_638.png index 7b05d4e9b9..00bd959fa5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_638.png and b/TMessagesProj/src/emojis/apple/emoji/0_638.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_639.png b/TMessagesProj/src/emojis/apple/emoji/0_639.png index 75d7466b4e..46bd0f76a8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_639.png and b/TMessagesProj/src/emojis/apple/emoji/0_639.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_64.png b/TMessagesProj/src/emojis/apple/emoji/0_64.png index 5248dbf3a6..c30a9053ce 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_64.png and b/TMessagesProj/src/emojis/apple/emoji/0_64.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_640.png b/TMessagesProj/src/emojis/apple/emoji/0_640.png index 90e531c6a3..49a683d91f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_640.png and b/TMessagesProj/src/emojis/apple/emoji/0_640.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_641.png b/TMessagesProj/src/emojis/apple/emoji/0_641.png index 0b16b866f3..661fd45429 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_641.png and b/TMessagesProj/src/emojis/apple/emoji/0_641.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_642.png b/TMessagesProj/src/emojis/apple/emoji/0_642.png index f3202c5f02..b882448456 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_642.png and b/TMessagesProj/src/emojis/apple/emoji/0_642.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_643.png b/TMessagesProj/src/emojis/apple/emoji/0_643.png index 3dffe8a6c1..fcea72f415 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_643.png and b/TMessagesProj/src/emojis/apple/emoji/0_643.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_644.png b/TMessagesProj/src/emojis/apple/emoji/0_644.png index 1215f84df5..70e7195324 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_644.png and b/TMessagesProj/src/emojis/apple/emoji/0_644.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_645.png b/TMessagesProj/src/emojis/apple/emoji/0_645.png index 18cd9363a7..bb1f98971b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_645.png and b/TMessagesProj/src/emojis/apple/emoji/0_645.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_646.png b/TMessagesProj/src/emojis/apple/emoji/0_646.png index f0bf78946d..403e6d9bb6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_646.png and b/TMessagesProj/src/emojis/apple/emoji/0_646.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_647.png b/TMessagesProj/src/emojis/apple/emoji/0_647.png index d5ca0288c3..e42940d04c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_647.png and b/TMessagesProj/src/emojis/apple/emoji/0_647.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_648.png b/TMessagesProj/src/emojis/apple/emoji/0_648.png index 5a9f91ff3e..e5bdff17db 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_648.png and b/TMessagesProj/src/emojis/apple/emoji/0_648.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_649.png b/TMessagesProj/src/emojis/apple/emoji/0_649.png index c0223512fc..9660ffde80 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_649.png and b/TMessagesProj/src/emojis/apple/emoji/0_649.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_65.png b/TMessagesProj/src/emojis/apple/emoji/0_65.png index 843c0e9c47..2b3112e3e5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_65.png and b/TMessagesProj/src/emojis/apple/emoji/0_65.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_650.png b/TMessagesProj/src/emojis/apple/emoji/0_650.png index d557506f04..15d4a7a909 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_650.png and b/TMessagesProj/src/emojis/apple/emoji/0_650.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_651.png b/TMessagesProj/src/emojis/apple/emoji/0_651.png index 79ff08ab60..6cd0822913 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_651.png and b/TMessagesProj/src/emojis/apple/emoji/0_651.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_652.png b/TMessagesProj/src/emojis/apple/emoji/0_652.png index 77510ab033..b2967cbbf0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_652.png and b/TMessagesProj/src/emojis/apple/emoji/0_652.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_653.png b/TMessagesProj/src/emojis/apple/emoji/0_653.png index 7dce76bcea..2149b987c8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_653.png and b/TMessagesProj/src/emojis/apple/emoji/0_653.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_654.png b/TMessagesProj/src/emojis/apple/emoji/0_654.png index 806c315b7b..a639599cc9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_654.png and b/TMessagesProj/src/emojis/apple/emoji/0_654.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_655.png b/TMessagesProj/src/emojis/apple/emoji/0_655.png index ec407b7049..fb85f99726 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_655.png and b/TMessagesProj/src/emojis/apple/emoji/0_655.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_656.png b/TMessagesProj/src/emojis/apple/emoji/0_656.png index 4b893e3f3f..b51f841c8c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_656.png and b/TMessagesProj/src/emojis/apple/emoji/0_656.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_657.png b/TMessagesProj/src/emojis/apple/emoji/0_657.png index d84eee1e97..d54ddb683d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_657.png and b/TMessagesProj/src/emojis/apple/emoji/0_657.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_658.png b/TMessagesProj/src/emojis/apple/emoji/0_658.png index 3ff57cf8d3..e6aabdfcd6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_658.png and b/TMessagesProj/src/emojis/apple/emoji/0_658.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_659.png b/TMessagesProj/src/emojis/apple/emoji/0_659.png index ea140706d4..69897d6833 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_659.png and b/TMessagesProj/src/emojis/apple/emoji/0_659.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_66.png b/TMessagesProj/src/emojis/apple/emoji/0_66.png index 6d695da597..b5640ab2c8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_66.png and b/TMessagesProj/src/emojis/apple/emoji/0_66.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_660.png b/TMessagesProj/src/emojis/apple/emoji/0_660.png index c72cc6fbe4..afcc1fe7e3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_660.png and b/TMessagesProj/src/emojis/apple/emoji/0_660.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_661.png b/TMessagesProj/src/emojis/apple/emoji/0_661.png index 3d61f55d6e..01304f0952 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_661.png and b/TMessagesProj/src/emojis/apple/emoji/0_661.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_662.png b/TMessagesProj/src/emojis/apple/emoji/0_662.png index b8a93e4b52..d77caba6fb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_662.png and b/TMessagesProj/src/emojis/apple/emoji/0_662.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_663.png b/TMessagesProj/src/emojis/apple/emoji/0_663.png index d93f8d26b4..c37333be82 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_663.png and b/TMessagesProj/src/emojis/apple/emoji/0_663.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_664.png b/TMessagesProj/src/emojis/apple/emoji/0_664.png index 0f287c8167..c7175548b6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_664.png and b/TMessagesProj/src/emojis/apple/emoji/0_664.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_665.png b/TMessagesProj/src/emojis/apple/emoji/0_665.png index ff330911d7..488ec5d6c1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_665.png and b/TMessagesProj/src/emojis/apple/emoji/0_665.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_666.png b/TMessagesProj/src/emojis/apple/emoji/0_666.png index 71cc614620..713a2774a7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_666.png and b/TMessagesProj/src/emojis/apple/emoji/0_666.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_667.png b/TMessagesProj/src/emojis/apple/emoji/0_667.png index b36d17ce7a..2d1af04043 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_667.png and b/TMessagesProj/src/emojis/apple/emoji/0_667.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_668.png b/TMessagesProj/src/emojis/apple/emoji/0_668.png index 06d24a358f..198597be5e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_668.png and b/TMessagesProj/src/emojis/apple/emoji/0_668.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_669.png b/TMessagesProj/src/emojis/apple/emoji/0_669.png index 6177408634..4994d4f243 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_669.png and b/TMessagesProj/src/emojis/apple/emoji/0_669.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_67.png b/TMessagesProj/src/emojis/apple/emoji/0_67.png index a8d1127f95..da9bee198a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_67.png and b/TMessagesProj/src/emojis/apple/emoji/0_67.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_670.png b/TMessagesProj/src/emojis/apple/emoji/0_670.png index f792d1db5c..2a43285643 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_670.png and b/TMessagesProj/src/emojis/apple/emoji/0_670.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_671.png b/TMessagesProj/src/emojis/apple/emoji/0_671.png index ddcd948e08..03b1cceba5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_671.png and b/TMessagesProj/src/emojis/apple/emoji/0_671.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_672.png b/TMessagesProj/src/emojis/apple/emoji/0_672.png index dde9a876f1..5d810de5d5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_672.png and b/TMessagesProj/src/emojis/apple/emoji/0_672.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_673.png b/TMessagesProj/src/emojis/apple/emoji/0_673.png index b741ce1560..4e70e4a5de 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_673.png and b/TMessagesProj/src/emojis/apple/emoji/0_673.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_674.png b/TMessagesProj/src/emojis/apple/emoji/0_674.png index 50290c1417..24ca6f7bc3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_674.png and b/TMessagesProj/src/emojis/apple/emoji/0_674.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_675.png b/TMessagesProj/src/emojis/apple/emoji/0_675.png index 62486cc850..2c8259de4a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_675.png and b/TMessagesProj/src/emojis/apple/emoji/0_675.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_676.png b/TMessagesProj/src/emojis/apple/emoji/0_676.png index 0a6e980294..932d68ad07 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_676.png and b/TMessagesProj/src/emojis/apple/emoji/0_676.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_677.png b/TMessagesProj/src/emojis/apple/emoji/0_677.png index 6c398df99d..5ebf4663a4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_677.png and b/TMessagesProj/src/emojis/apple/emoji/0_677.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_678.png b/TMessagesProj/src/emojis/apple/emoji/0_678.png index 2cd163cd2d..cb00083368 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_678.png and b/TMessagesProj/src/emojis/apple/emoji/0_678.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_679.png b/TMessagesProj/src/emojis/apple/emoji/0_679.png index 8c9bcfe3ad..19d66f7e40 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_679.png and b/TMessagesProj/src/emojis/apple/emoji/0_679.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_68.png b/TMessagesProj/src/emojis/apple/emoji/0_68.png index 971a63f144..cfcc4d88cf 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_68.png and b/TMessagesProj/src/emojis/apple/emoji/0_68.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_680.png b/TMessagesProj/src/emojis/apple/emoji/0_680.png index f4778ffd63..5471146a19 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_680.png and b/TMessagesProj/src/emojis/apple/emoji/0_680.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_681.png b/TMessagesProj/src/emojis/apple/emoji/0_681.png index a60e00a65e..c7f4c430b0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_681.png and b/TMessagesProj/src/emojis/apple/emoji/0_681.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_682.png b/TMessagesProj/src/emojis/apple/emoji/0_682.png index 00694d2cad..a6bb5e24f6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_682.png and b/TMessagesProj/src/emojis/apple/emoji/0_682.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_683.png b/TMessagesProj/src/emojis/apple/emoji/0_683.png index d4abf24b43..d552c37a01 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_683.png and b/TMessagesProj/src/emojis/apple/emoji/0_683.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_684.png b/TMessagesProj/src/emojis/apple/emoji/0_684.png index 15ba3524b7..1e1a22043f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_684.png and b/TMessagesProj/src/emojis/apple/emoji/0_684.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_685.png b/TMessagesProj/src/emojis/apple/emoji/0_685.png index 9674f55bb5..576a70b13d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_685.png and b/TMessagesProj/src/emojis/apple/emoji/0_685.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_686.png b/TMessagesProj/src/emojis/apple/emoji/0_686.png index d3d2ec0c5e..12c050728e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_686.png and b/TMessagesProj/src/emojis/apple/emoji/0_686.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_687.png b/TMessagesProj/src/emojis/apple/emoji/0_687.png index d7090e2728..f3b90ff4b7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_687.png and b/TMessagesProj/src/emojis/apple/emoji/0_687.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_688.png b/TMessagesProj/src/emojis/apple/emoji/0_688.png index 102c803bfe..94fff3c604 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_688.png and b/TMessagesProj/src/emojis/apple/emoji/0_688.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_689.png b/TMessagesProj/src/emojis/apple/emoji/0_689.png index 8edfb25773..ebcac48274 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_689.png and b/TMessagesProj/src/emojis/apple/emoji/0_689.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_69.png b/TMessagesProj/src/emojis/apple/emoji/0_69.png index 526a7b6158..6532702d46 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_69.png and b/TMessagesProj/src/emojis/apple/emoji/0_69.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_690.png b/TMessagesProj/src/emojis/apple/emoji/0_690.png index 79203de644..d07a786234 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_690.png and b/TMessagesProj/src/emojis/apple/emoji/0_690.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_691.png b/TMessagesProj/src/emojis/apple/emoji/0_691.png index 5b720c19b7..09f2b68886 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_691.png and b/TMessagesProj/src/emojis/apple/emoji/0_691.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_692.png b/TMessagesProj/src/emojis/apple/emoji/0_692.png index a9878f1ae0..a644c6ebd2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_692.png and b/TMessagesProj/src/emojis/apple/emoji/0_692.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_693.png b/TMessagesProj/src/emojis/apple/emoji/0_693.png index ebc994bc79..4b59d8d78d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_693.png and b/TMessagesProj/src/emojis/apple/emoji/0_693.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_694.png b/TMessagesProj/src/emojis/apple/emoji/0_694.png index 19364c1677..5734d8cb74 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_694.png and b/TMessagesProj/src/emojis/apple/emoji/0_694.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_695.png b/TMessagesProj/src/emojis/apple/emoji/0_695.png index b88c5b8258..43f1c01b3c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_695.png and b/TMessagesProj/src/emojis/apple/emoji/0_695.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_696.png b/TMessagesProj/src/emojis/apple/emoji/0_696.png index ed36626fde..de6c9d3310 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_696.png and b/TMessagesProj/src/emojis/apple/emoji/0_696.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_697.png b/TMessagesProj/src/emojis/apple/emoji/0_697.png index f51ce0aff9..bfa78926d3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_697.png and b/TMessagesProj/src/emojis/apple/emoji/0_697.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_698.png b/TMessagesProj/src/emojis/apple/emoji/0_698.png index 20acb1a199..542db0434f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_698.png and b/TMessagesProj/src/emojis/apple/emoji/0_698.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_699.png b/TMessagesProj/src/emojis/apple/emoji/0_699.png index a9b95dfff7..ae1d9adbc1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_699.png and b/TMessagesProj/src/emojis/apple/emoji/0_699.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_7.png b/TMessagesProj/src/emojis/apple/emoji/0_7.png index fa597d7812..3538b9617c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_7.png and b/TMessagesProj/src/emojis/apple/emoji/0_7.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_70.png b/TMessagesProj/src/emojis/apple/emoji/0_70.png index 5e7ac3c79a..376facf760 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_70.png and b/TMessagesProj/src/emojis/apple/emoji/0_70.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_700.png b/TMessagesProj/src/emojis/apple/emoji/0_700.png index d90a1cd7c3..3e9ca6b674 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_700.png and b/TMessagesProj/src/emojis/apple/emoji/0_700.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_701.png b/TMessagesProj/src/emojis/apple/emoji/0_701.png index 5d9542ef1b..aa55c1ba19 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_701.png and b/TMessagesProj/src/emojis/apple/emoji/0_701.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_702.png b/TMessagesProj/src/emojis/apple/emoji/0_702.png index 83e74ee22e..a169933e41 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_702.png and b/TMessagesProj/src/emojis/apple/emoji/0_702.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_703.png b/TMessagesProj/src/emojis/apple/emoji/0_703.png index 9f821d478c..a95408d8fe 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_703.png and b/TMessagesProj/src/emojis/apple/emoji/0_703.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_704.png b/TMessagesProj/src/emojis/apple/emoji/0_704.png index d32298b8e8..8438b584e6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_704.png and b/TMessagesProj/src/emojis/apple/emoji/0_704.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_705.png b/TMessagesProj/src/emojis/apple/emoji/0_705.png index 6750c52940..869bbb4df8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_705.png and b/TMessagesProj/src/emojis/apple/emoji/0_705.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_706.png b/TMessagesProj/src/emojis/apple/emoji/0_706.png index 1335fcddcd..1bf3bcf71d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_706.png and b/TMessagesProj/src/emojis/apple/emoji/0_706.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_707.png b/TMessagesProj/src/emojis/apple/emoji/0_707.png index a0fa1d049c..8816cbddcb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_707.png and b/TMessagesProj/src/emojis/apple/emoji/0_707.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_708.png b/TMessagesProj/src/emojis/apple/emoji/0_708.png index c51311c893..44d819642a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_708.png and b/TMessagesProj/src/emojis/apple/emoji/0_708.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_709.png b/TMessagesProj/src/emojis/apple/emoji/0_709.png index 0dbd0e54a2..0be7777cb2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_709.png and b/TMessagesProj/src/emojis/apple/emoji/0_709.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_71.png b/TMessagesProj/src/emojis/apple/emoji/0_71.png index 9bd90c1a6f..47a9abcd01 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_71.png and b/TMessagesProj/src/emojis/apple/emoji/0_71.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_710.png b/TMessagesProj/src/emojis/apple/emoji/0_710.png index cdb710250e..1dd0071319 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_710.png and b/TMessagesProj/src/emojis/apple/emoji/0_710.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_711.png b/TMessagesProj/src/emojis/apple/emoji/0_711.png index 325f40a22b..a800bb29dc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_711.png and b/TMessagesProj/src/emojis/apple/emoji/0_711.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_712.png b/TMessagesProj/src/emojis/apple/emoji/0_712.png index 4726c63543..feb63accff 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_712.png and b/TMessagesProj/src/emojis/apple/emoji/0_712.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_713.png b/TMessagesProj/src/emojis/apple/emoji/0_713.png index 52f6df5c25..23db9aa39e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_713.png and b/TMessagesProj/src/emojis/apple/emoji/0_713.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_714.png b/TMessagesProj/src/emojis/apple/emoji/0_714.png index 6bb12fe3fe..ec58059575 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_714.png and b/TMessagesProj/src/emojis/apple/emoji/0_714.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_715.png b/TMessagesProj/src/emojis/apple/emoji/0_715.png index 2c9ef88344..3174c191a9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_715.png and b/TMessagesProj/src/emojis/apple/emoji/0_715.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_716.png b/TMessagesProj/src/emojis/apple/emoji/0_716.png index 04a7aae4bb..27f22ebd60 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_716.png and b/TMessagesProj/src/emojis/apple/emoji/0_716.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_717.png b/TMessagesProj/src/emojis/apple/emoji/0_717.png index f3368a858e..6793e0eef8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_717.png and b/TMessagesProj/src/emojis/apple/emoji/0_717.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_718.png b/TMessagesProj/src/emojis/apple/emoji/0_718.png index d30b059c61..e7f003f65a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_718.png and b/TMessagesProj/src/emojis/apple/emoji/0_718.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_719.png b/TMessagesProj/src/emojis/apple/emoji/0_719.png index 87f5301d71..cf9c1fc919 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_719.png and b/TMessagesProj/src/emojis/apple/emoji/0_719.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_72.png b/TMessagesProj/src/emojis/apple/emoji/0_72.png index 5ff7b17081..85da915190 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_72.png and b/TMessagesProj/src/emojis/apple/emoji/0_72.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_720.png b/TMessagesProj/src/emojis/apple/emoji/0_720.png index 6de4cbc6ed..ce0fee3a4f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_720.png and b/TMessagesProj/src/emojis/apple/emoji/0_720.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_721.png b/TMessagesProj/src/emojis/apple/emoji/0_721.png index 5ed0745766..e66f426911 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_721.png and b/TMessagesProj/src/emojis/apple/emoji/0_721.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_722.png b/TMessagesProj/src/emojis/apple/emoji/0_722.png index be77ff6669..36d5d46a17 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_722.png and b/TMessagesProj/src/emojis/apple/emoji/0_722.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_723.png b/TMessagesProj/src/emojis/apple/emoji/0_723.png index b1d7aaa550..e669032e45 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_723.png and b/TMessagesProj/src/emojis/apple/emoji/0_723.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_724.png b/TMessagesProj/src/emojis/apple/emoji/0_724.png index f63a570ed6..904cac265e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_724.png and b/TMessagesProj/src/emojis/apple/emoji/0_724.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_725.png b/TMessagesProj/src/emojis/apple/emoji/0_725.png index d7e4e36877..75d8a64cdd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_725.png and b/TMessagesProj/src/emojis/apple/emoji/0_725.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_726.png b/TMessagesProj/src/emojis/apple/emoji/0_726.png index ecc7f0d3e6..5e14e32b40 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_726.png and b/TMessagesProj/src/emojis/apple/emoji/0_726.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_727.png b/TMessagesProj/src/emojis/apple/emoji/0_727.png index 186070ef1e..abce8cac2d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_727.png and b/TMessagesProj/src/emojis/apple/emoji/0_727.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_728.png b/TMessagesProj/src/emojis/apple/emoji/0_728.png index 0d94e7912e..0b623f793d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_728.png and b/TMessagesProj/src/emojis/apple/emoji/0_728.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_729.png b/TMessagesProj/src/emojis/apple/emoji/0_729.png index 2f12d53672..1911481415 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_729.png and b/TMessagesProj/src/emojis/apple/emoji/0_729.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_73.png b/TMessagesProj/src/emojis/apple/emoji/0_73.png index 4d42272a23..38b01d07a0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_73.png and b/TMessagesProj/src/emojis/apple/emoji/0_73.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_730.png b/TMessagesProj/src/emojis/apple/emoji/0_730.png index 76cec5a91b..cde6ecdd82 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_730.png and b/TMessagesProj/src/emojis/apple/emoji/0_730.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_731.png b/TMessagesProj/src/emojis/apple/emoji/0_731.png index b13e4c90cb..afad29c04c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_731.png and b/TMessagesProj/src/emojis/apple/emoji/0_731.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_732.png b/TMessagesProj/src/emojis/apple/emoji/0_732.png index d332190590..3d8afa85fb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_732.png and b/TMessagesProj/src/emojis/apple/emoji/0_732.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_733.png b/TMessagesProj/src/emojis/apple/emoji/0_733.png index 6d1417b7f1..40e0b9c114 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_733.png and b/TMessagesProj/src/emojis/apple/emoji/0_733.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_734.png b/TMessagesProj/src/emojis/apple/emoji/0_734.png index d485e1d630..26396f7252 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_734.png and b/TMessagesProj/src/emojis/apple/emoji/0_734.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_735.png b/TMessagesProj/src/emojis/apple/emoji/0_735.png index dcf24369e9..edf787510c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_735.png and b/TMessagesProj/src/emojis/apple/emoji/0_735.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_736.png b/TMessagesProj/src/emojis/apple/emoji/0_736.png index 5e90da9b9f..f16d789e59 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_736.png and b/TMessagesProj/src/emojis/apple/emoji/0_736.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_737.png b/TMessagesProj/src/emojis/apple/emoji/0_737.png index 6d6072a740..efc5f1076a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_737.png and b/TMessagesProj/src/emojis/apple/emoji/0_737.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_738.png b/TMessagesProj/src/emojis/apple/emoji/0_738.png index 165903ab59..3f30675e68 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_738.png and b/TMessagesProj/src/emojis/apple/emoji/0_738.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_739.png b/TMessagesProj/src/emojis/apple/emoji/0_739.png index d08ff0ce81..3abae7fd5c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_739.png and b/TMessagesProj/src/emojis/apple/emoji/0_739.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_74.png b/TMessagesProj/src/emojis/apple/emoji/0_74.png index 6a39d58af8..99b075b103 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_74.png and b/TMessagesProj/src/emojis/apple/emoji/0_74.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_740.png b/TMessagesProj/src/emojis/apple/emoji/0_740.png index 4747da085e..6a106def2b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_740.png and b/TMessagesProj/src/emojis/apple/emoji/0_740.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_741.png b/TMessagesProj/src/emojis/apple/emoji/0_741.png index 9b3eb3ada5..c79532ec09 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_741.png and b/TMessagesProj/src/emojis/apple/emoji/0_741.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_742.png b/TMessagesProj/src/emojis/apple/emoji/0_742.png index edd12086a1..f2777500a0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_742.png and b/TMessagesProj/src/emojis/apple/emoji/0_742.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_743.png b/TMessagesProj/src/emojis/apple/emoji/0_743.png index 4a97aec982..8a54f70ffe 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_743.png and b/TMessagesProj/src/emojis/apple/emoji/0_743.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_744.png b/TMessagesProj/src/emojis/apple/emoji/0_744.png index fd747790f4..5df6ef602e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_744.png and b/TMessagesProj/src/emojis/apple/emoji/0_744.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_745.png b/TMessagesProj/src/emojis/apple/emoji/0_745.png index ca2dcc4360..76c9bd5ac7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_745.png and b/TMessagesProj/src/emojis/apple/emoji/0_745.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_746.png b/TMessagesProj/src/emojis/apple/emoji/0_746.png index ae0a6cce9c..70d4d02236 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_746.png and b/TMessagesProj/src/emojis/apple/emoji/0_746.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_747.png b/TMessagesProj/src/emojis/apple/emoji/0_747.png index 94cd0d0240..dee6c42e95 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_747.png and b/TMessagesProj/src/emojis/apple/emoji/0_747.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_748.png b/TMessagesProj/src/emojis/apple/emoji/0_748.png index f8681e99aa..4e5afd33be 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_748.png and b/TMessagesProj/src/emojis/apple/emoji/0_748.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_749.png b/TMessagesProj/src/emojis/apple/emoji/0_749.png index cd380ed4df..88c9f5326d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_749.png and b/TMessagesProj/src/emojis/apple/emoji/0_749.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_75.png b/TMessagesProj/src/emojis/apple/emoji/0_75.png index 8e618abf4e..96cf5b361c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_75.png and b/TMessagesProj/src/emojis/apple/emoji/0_75.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_750.png b/TMessagesProj/src/emojis/apple/emoji/0_750.png index a17f94020a..33ce2888d6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_750.png and b/TMessagesProj/src/emojis/apple/emoji/0_750.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_751.png b/TMessagesProj/src/emojis/apple/emoji/0_751.png index 4156c42d63..8cdd3d8044 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_751.png and b/TMessagesProj/src/emojis/apple/emoji/0_751.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_752.png b/TMessagesProj/src/emojis/apple/emoji/0_752.png index dc37fedd5c..017cfdae0d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_752.png and b/TMessagesProj/src/emojis/apple/emoji/0_752.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_753.png b/TMessagesProj/src/emojis/apple/emoji/0_753.png index 3ce83de072..b95d42b5de 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_753.png and b/TMessagesProj/src/emojis/apple/emoji/0_753.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_754.png b/TMessagesProj/src/emojis/apple/emoji/0_754.png index be395a3cd8..7cdecd8766 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_754.png and b/TMessagesProj/src/emojis/apple/emoji/0_754.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_755.png b/TMessagesProj/src/emojis/apple/emoji/0_755.png index 02f980f8ae..987db4a2c6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_755.png and b/TMessagesProj/src/emojis/apple/emoji/0_755.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_756.png b/TMessagesProj/src/emojis/apple/emoji/0_756.png index 0530507581..551bb0abd7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_756.png and b/TMessagesProj/src/emojis/apple/emoji/0_756.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_757.png b/TMessagesProj/src/emojis/apple/emoji/0_757.png index fa2aa790e3..f1cdb54488 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_757.png and b/TMessagesProj/src/emojis/apple/emoji/0_757.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_758.png b/TMessagesProj/src/emojis/apple/emoji/0_758.png index 9bda3cbde1..928ce6fbc4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_758.png and b/TMessagesProj/src/emojis/apple/emoji/0_758.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_759.png b/TMessagesProj/src/emojis/apple/emoji/0_759.png index d04c3dae77..225e860d19 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_759.png and b/TMessagesProj/src/emojis/apple/emoji/0_759.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_76.png b/TMessagesProj/src/emojis/apple/emoji/0_76.png index d4fd5c4602..a65a7860fa 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_76.png and b/TMessagesProj/src/emojis/apple/emoji/0_76.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_760.png b/TMessagesProj/src/emojis/apple/emoji/0_760.png index 6b6bcfaced..1c201d3e38 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_760.png and b/TMessagesProj/src/emojis/apple/emoji/0_760.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_761.png b/TMessagesProj/src/emojis/apple/emoji/0_761.png index 4eef6bc8f6..e0b5b654aa 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_761.png and b/TMessagesProj/src/emojis/apple/emoji/0_761.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_762.png b/TMessagesProj/src/emojis/apple/emoji/0_762.png index e5fb1e4d6f..3c8d8a20b6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_762.png and b/TMessagesProj/src/emojis/apple/emoji/0_762.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_763.png b/TMessagesProj/src/emojis/apple/emoji/0_763.png index b7c495cda2..6339cfa54d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_763.png and b/TMessagesProj/src/emojis/apple/emoji/0_763.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_764.png b/TMessagesProj/src/emojis/apple/emoji/0_764.png index fcdfcaac80..93aaae908b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_764.png and b/TMessagesProj/src/emojis/apple/emoji/0_764.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_765.png b/TMessagesProj/src/emojis/apple/emoji/0_765.png index 62edb8563e..3a4f5fc04f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_765.png and b/TMessagesProj/src/emojis/apple/emoji/0_765.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_766.png b/TMessagesProj/src/emojis/apple/emoji/0_766.png index 4afb47d7a9..aaa57343a6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_766.png and b/TMessagesProj/src/emojis/apple/emoji/0_766.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_767.png b/TMessagesProj/src/emojis/apple/emoji/0_767.png index 33c2d0cad3..1a415a882b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_767.png and b/TMessagesProj/src/emojis/apple/emoji/0_767.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_768.png b/TMessagesProj/src/emojis/apple/emoji/0_768.png index 46aad5d9fb..6f2577051c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_768.png and b/TMessagesProj/src/emojis/apple/emoji/0_768.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_769.png b/TMessagesProj/src/emojis/apple/emoji/0_769.png index 7c92495029..3bc3ec80cf 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_769.png and b/TMessagesProj/src/emojis/apple/emoji/0_769.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_77.png b/TMessagesProj/src/emojis/apple/emoji/0_77.png index 12fe972b8e..8cbaedff9d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_77.png and b/TMessagesProj/src/emojis/apple/emoji/0_77.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_770.png b/TMessagesProj/src/emojis/apple/emoji/0_770.png index 2da49a2959..4b9643879b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_770.png and b/TMessagesProj/src/emojis/apple/emoji/0_770.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_771.png b/TMessagesProj/src/emojis/apple/emoji/0_771.png index b849e4a222..65fe61d129 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_771.png and b/TMessagesProj/src/emojis/apple/emoji/0_771.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_772.png b/TMessagesProj/src/emojis/apple/emoji/0_772.png index 2b63d9b574..4edf9bea90 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_772.png and b/TMessagesProj/src/emojis/apple/emoji/0_772.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_773.png b/TMessagesProj/src/emojis/apple/emoji/0_773.png index 2d90f0b410..79185009d0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_773.png and b/TMessagesProj/src/emojis/apple/emoji/0_773.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_774.png b/TMessagesProj/src/emojis/apple/emoji/0_774.png index 8f9aa64754..da6af89407 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_774.png and b/TMessagesProj/src/emojis/apple/emoji/0_774.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_775.png b/TMessagesProj/src/emojis/apple/emoji/0_775.png index c53bdea35a..53459b52f3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_775.png and b/TMessagesProj/src/emojis/apple/emoji/0_775.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_776.png b/TMessagesProj/src/emojis/apple/emoji/0_776.png index b97adc8335..3ae39e7bf0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_776.png and b/TMessagesProj/src/emojis/apple/emoji/0_776.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_777.png b/TMessagesProj/src/emojis/apple/emoji/0_777.png index e50b304477..c088df06ae 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_777.png and b/TMessagesProj/src/emojis/apple/emoji/0_777.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_778.png b/TMessagesProj/src/emojis/apple/emoji/0_778.png index d5b021c2b1..7bfb17b314 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_778.png and b/TMessagesProj/src/emojis/apple/emoji/0_778.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_779.png b/TMessagesProj/src/emojis/apple/emoji/0_779.png index 6de1487641..0b3f70eda5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_779.png and b/TMessagesProj/src/emojis/apple/emoji/0_779.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_78.png b/TMessagesProj/src/emojis/apple/emoji/0_78.png index cb2d7a32e0..13cedac889 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_78.png and b/TMessagesProj/src/emojis/apple/emoji/0_78.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_780.png b/TMessagesProj/src/emojis/apple/emoji/0_780.png index 70ad2993e4..053e8292cb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_780.png and b/TMessagesProj/src/emojis/apple/emoji/0_780.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_781.png b/TMessagesProj/src/emojis/apple/emoji/0_781.png index f1b374d75b..a77d43b430 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_781.png and b/TMessagesProj/src/emojis/apple/emoji/0_781.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_782.png b/TMessagesProj/src/emojis/apple/emoji/0_782.png index 2e10ffcd60..f8f106d1fd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_782.png and b/TMessagesProj/src/emojis/apple/emoji/0_782.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_783.png b/TMessagesProj/src/emojis/apple/emoji/0_783.png index 796a5b3054..c516f327dc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_783.png and b/TMessagesProj/src/emojis/apple/emoji/0_783.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_784.png b/TMessagesProj/src/emojis/apple/emoji/0_784.png index 073afd3f86..3a3f105cf6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_784.png and b/TMessagesProj/src/emojis/apple/emoji/0_784.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_785.png b/TMessagesProj/src/emojis/apple/emoji/0_785.png index d35539be28..2b6d1173fd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_785.png and b/TMessagesProj/src/emojis/apple/emoji/0_785.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_786.png b/TMessagesProj/src/emojis/apple/emoji/0_786.png index 6bee92fa0a..b3d5ef6dc3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_786.png and b/TMessagesProj/src/emojis/apple/emoji/0_786.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_787.png b/TMessagesProj/src/emojis/apple/emoji/0_787.png index 3e016d98b1..b6b3e66ddc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_787.png and b/TMessagesProj/src/emojis/apple/emoji/0_787.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_788.png b/TMessagesProj/src/emojis/apple/emoji/0_788.png index d192d526cb..fbcbf3cfbf 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_788.png and b/TMessagesProj/src/emojis/apple/emoji/0_788.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_789.png b/TMessagesProj/src/emojis/apple/emoji/0_789.png index dbc5b4ce11..61804fbda2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_789.png and b/TMessagesProj/src/emojis/apple/emoji/0_789.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_79.png b/TMessagesProj/src/emojis/apple/emoji/0_79.png index e1558a5c76..ed359b9893 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_79.png and b/TMessagesProj/src/emojis/apple/emoji/0_79.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_790.png b/TMessagesProj/src/emojis/apple/emoji/0_790.png index fa3173f9d7..67b068df28 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_790.png and b/TMessagesProj/src/emojis/apple/emoji/0_790.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_791.png b/TMessagesProj/src/emojis/apple/emoji/0_791.png index 4803a44766..ad1121cfd9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_791.png and b/TMessagesProj/src/emojis/apple/emoji/0_791.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_792.png b/TMessagesProj/src/emojis/apple/emoji/0_792.png index 9dee9cc653..4a3a604b24 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_792.png and b/TMessagesProj/src/emojis/apple/emoji/0_792.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_793.png b/TMessagesProj/src/emojis/apple/emoji/0_793.png index 81a2f16fa1..b11a9afc16 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_793.png and b/TMessagesProj/src/emojis/apple/emoji/0_793.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_794.png b/TMessagesProj/src/emojis/apple/emoji/0_794.png index d7c436a5d6..d92d029c12 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_794.png and b/TMessagesProj/src/emojis/apple/emoji/0_794.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_795.png b/TMessagesProj/src/emojis/apple/emoji/0_795.png index 9331884d8c..b3343ca22a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_795.png and b/TMessagesProj/src/emojis/apple/emoji/0_795.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_796.png b/TMessagesProj/src/emojis/apple/emoji/0_796.png index 1b551258ce..7f3231aaa3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_796.png and b/TMessagesProj/src/emojis/apple/emoji/0_796.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_797.png b/TMessagesProj/src/emojis/apple/emoji/0_797.png index 6c2819903b..66a389e0fb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_797.png and b/TMessagesProj/src/emojis/apple/emoji/0_797.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_798.png b/TMessagesProj/src/emojis/apple/emoji/0_798.png index 203e2a512a..a26cf616ef 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_798.png and b/TMessagesProj/src/emojis/apple/emoji/0_798.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_799.png b/TMessagesProj/src/emojis/apple/emoji/0_799.png index 81c85bd2c8..031605faa4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_799.png and b/TMessagesProj/src/emojis/apple/emoji/0_799.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_8.png b/TMessagesProj/src/emojis/apple/emoji/0_8.png index a744dca038..7ef0402126 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_8.png and b/TMessagesProj/src/emojis/apple/emoji/0_8.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_80.png b/TMessagesProj/src/emojis/apple/emoji/0_80.png index a350b36eec..d8e3ec5b42 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_80.png and b/TMessagesProj/src/emojis/apple/emoji/0_80.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_800.png b/TMessagesProj/src/emojis/apple/emoji/0_800.png index c2afcfedbb..85bafaf20c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_800.png and b/TMessagesProj/src/emojis/apple/emoji/0_800.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_801.png b/TMessagesProj/src/emojis/apple/emoji/0_801.png index 037db00ca8..67d9b32cc3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_801.png and b/TMessagesProj/src/emojis/apple/emoji/0_801.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_802.png b/TMessagesProj/src/emojis/apple/emoji/0_802.png index 8427e92cdf..17cbcde8c4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_802.png and b/TMessagesProj/src/emojis/apple/emoji/0_802.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_803.png b/TMessagesProj/src/emojis/apple/emoji/0_803.png index 50778832c4..d7efdaa4f0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_803.png and b/TMessagesProj/src/emojis/apple/emoji/0_803.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_804.png b/TMessagesProj/src/emojis/apple/emoji/0_804.png index 9e598cc409..98ea2a3359 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_804.png and b/TMessagesProj/src/emojis/apple/emoji/0_804.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_805.png b/TMessagesProj/src/emojis/apple/emoji/0_805.png index de2c712b66..ef7d844b57 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_805.png and b/TMessagesProj/src/emojis/apple/emoji/0_805.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_806.png b/TMessagesProj/src/emojis/apple/emoji/0_806.png index f7ddf2fa85..a15d44e086 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_806.png and b/TMessagesProj/src/emojis/apple/emoji/0_806.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_807.png b/TMessagesProj/src/emojis/apple/emoji/0_807.png index 923eb1cb93..8fdaeab0aa 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_807.png and b/TMessagesProj/src/emojis/apple/emoji/0_807.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_808.png b/TMessagesProj/src/emojis/apple/emoji/0_808.png index d4447ab4cf..30d939fb2e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_808.png and b/TMessagesProj/src/emojis/apple/emoji/0_808.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_809.png b/TMessagesProj/src/emojis/apple/emoji/0_809.png index c2aa6df07b..7b12748a3b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_809.png and b/TMessagesProj/src/emojis/apple/emoji/0_809.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_81.png b/TMessagesProj/src/emojis/apple/emoji/0_81.png index ba55620221..de1b70b595 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_81.png and b/TMessagesProj/src/emojis/apple/emoji/0_81.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_810.png b/TMessagesProj/src/emojis/apple/emoji/0_810.png index 91c2d122c3..5a2ece30c8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_810.png and b/TMessagesProj/src/emojis/apple/emoji/0_810.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_811.png b/TMessagesProj/src/emojis/apple/emoji/0_811.png index e6768c1119..b569adb2f3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_811.png and b/TMessagesProj/src/emojis/apple/emoji/0_811.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_812.png b/TMessagesProj/src/emojis/apple/emoji/0_812.png index 5cd5367237..23e0daece0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_812.png and b/TMessagesProj/src/emojis/apple/emoji/0_812.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_813.png b/TMessagesProj/src/emojis/apple/emoji/0_813.png index 9dede7373b..20bc5084ff 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_813.png and b/TMessagesProj/src/emojis/apple/emoji/0_813.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_814.png b/TMessagesProj/src/emojis/apple/emoji/0_814.png index df64a1dd86..3a0dcf3bae 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_814.png and b/TMessagesProj/src/emojis/apple/emoji/0_814.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_815.png b/TMessagesProj/src/emojis/apple/emoji/0_815.png index 87b60ddefc..5d46066e1c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_815.png and b/TMessagesProj/src/emojis/apple/emoji/0_815.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_816.png b/TMessagesProj/src/emojis/apple/emoji/0_816.png index d733917b29..69b89b956d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_816.png and b/TMessagesProj/src/emojis/apple/emoji/0_816.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_817.png b/TMessagesProj/src/emojis/apple/emoji/0_817.png index 443ebabe32..65b5acda1c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_817.png and b/TMessagesProj/src/emojis/apple/emoji/0_817.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_818.png b/TMessagesProj/src/emojis/apple/emoji/0_818.png index e092e5f7b6..7b4f5b6e4f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_818.png and b/TMessagesProj/src/emojis/apple/emoji/0_818.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_819.png b/TMessagesProj/src/emojis/apple/emoji/0_819.png index c22039ab29..7ed489277a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_819.png and b/TMessagesProj/src/emojis/apple/emoji/0_819.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_82.png b/TMessagesProj/src/emojis/apple/emoji/0_82.png index 601125532a..72cde4f980 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_82.png and b/TMessagesProj/src/emojis/apple/emoji/0_82.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_820.png b/TMessagesProj/src/emojis/apple/emoji/0_820.png index 0ce436913f..77de7291ce 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_820.png and b/TMessagesProj/src/emojis/apple/emoji/0_820.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_821.png b/TMessagesProj/src/emojis/apple/emoji/0_821.png index 898221afcc..cffa27600d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_821.png and b/TMessagesProj/src/emojis/apple/emoji/0_821.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_822.png b/TMessagesProj/src/emojis/apple/emoji/0_822.png index ab72c53348..539d3c56fa 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_822.png and b/TMessagesProj/src/emojis/apple/emoji/0_822.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_823.png b/TMessagesProj/src/emojis/apple/emoji/0_823.png index 4489594ca6..04c9d22b28 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_823.png and b/TMessagesProj/src/emojis/apple/emoji/0_823.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_824.png b/TMessagesProj/src/emojis/apple/emoji/0_824.png index d912c65385..e664ddf6ab 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_824.png and b/TMessagesProj/src/emojis/apple/emoji/0_824.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_825.png b/TMessagesProj/src/emojis/apple/emoji/0_825.png index 8e101b6565..11fcb6cddd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_825.png and b/TMessagesProj/src/emojis/apple/emoji/0_825.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_826.png b/TMessagesProj/src/emojis/apple/emoji/0_826.png index 823c441ceb..1d0e04be23 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_826.png and b/TMessagesProj/src/emojis/apple/emoji/0_826.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_827.png b/TMessagesProj/src/emojis/apple/emoji/0_827.png index c3ce1987db..acca4c997a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_827.png and b/TMessagesProj/src/emojis/apple/emoji/0_827.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_828.png b/TMessagesProj/src/emojis/apple/emoji/0_828.png index 8ff4f0a9c6..3a8b52c6ff 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_828.png and b/TMessagesProj/src/emojis/apple/emoji/0_828.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_829.png b/TMessagesProj/src/emojis/apple/emoji/0_829.png index 25a3b65084..b4d9389bba 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_829.png and b/TMessagesProj/src/emojis/apple/emoji/0_829.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_83.png b/TMessagesProj/src/emojis/apple/emoji/0_83.png index 6cd858a8ac..ea52d5156d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_83.png and b/TMessagesProj/src/emojis/apple/emoji/0_83.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_830.png b/TMessagesProj/src/emojis/apple/emoji/0_830.png index 5b9da559fe..e90f9ac7c2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_830.png and b/TMessagesProj/src/emojis/apple/emoji/0_830.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_831.png b/TMessagesProj/src/emojis/apple/emoji/0_831.png index c32c3d0cea..c4f30a1124 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_831.png and b/TMessagesProj/src/emojis/apple/emoji/0_831.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_832.png b/TMessagesProj/src/emojis/apple/emoji/0_832.png index 31763f7226..2e187a356e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_832.png and b/TMessagesProj/src/emojis/apple/emoji/0_832.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_833.png b/TMessagesProj/src/emojis/apple/emoji/0_833.png index 57e5c2c97e..1cab85bb7f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_833.png and b/TMessagesProj/src/emojis/apple/emoji/0_833.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_834.png b/TMessagesProj/src/emojis/apple/emoji/0_834.png index 378fcd43e6..3f2b6c1874 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_834.png and b/TMessagesProj/src/emojis/apple/emoji/0_834.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_835.png b/TMessagesProj/src/emojis/apple/emoji/0_835.png index 5f52d41efd..94aebf38fc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_835.png and b/TMessagesProj/src/emojis/apple/emoji/0_835.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_836.png b/TMessagesProj/src/emojis/apple/emoji/0_836.png index de748681dd..238a73ed93 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_836.png and b/TMessagesProj/src/emojis/apple/emoji/0_836.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_837.png b/TMessagesProj/src/emojis/apple/emoji/0_837.png index d6b0fada7d..b41da590a7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_837.png and b/TMessagesProj/src/emojis/apple/emoji/0_837.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_838.png b/TMessagesProj/src/emojis/apple/emoji/0_838.png index d0295fa784..29042b51e9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_838.png and b/TMessagesProj/src/emojis/apple/emoji/0_838.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_839.png b/TMessagesProj/src/emojis/apple/emoji/0_839.png index cc87520075..5936daf009 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_839.png and b/TMessagesProj/src/emojis/apple/emoji/0_839.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_84.png b/TMessagesProj/src/emojis/apple/emoji/0_84.png index 9d5c12bb1c..eaa290cd74 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_84.png and b/TMessagesProj/src/emojis/apple/emoji/0_84.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_840.png b/TMessagesProj/src/emojis/apple/emoji/0_840.png index e83764210f..26d687de0f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_840.png and b/TMessagesProj/src/emojis/apple/emoji/0_840.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_841.png b/TMessagesProj/src/emojis/apple/emoji/0_841.png index b83d80d8a2..547e4dea36 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_841.png and b/TMessagesProj/src/emojis/apple/emoji/0_841.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_842.png b/TMessagesProj/src/emojis/apple/emoji/0_842.png index 0cf00bf61a..18bf7c28ce 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_842.png and b/TMessagesProj/src/emojis/apple/emoji/0_842.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_843.png b/TMessagesProj/src/emojis/apple/emoji/0_843.png index 22040595b7..83277044fd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_843.png and b/TMessagesProj/src/emojis/apple/emoji/0_843.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_844.png b/TMessagesProj/src/emojis/apple/emoji/0_844.png index 251e478c74..6d1241dfed 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_844.png and b/TMessagesProj/src/emojis/apple/emoji/0_844.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_845.png b/TMessagesProj/src/emojis/apple/emoji/0_845.png index c01e8da570..4c43352473 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_845.png and b/TMessagesProj/src/emojis/apple/emoji/0_845.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_846.png b/TMessagesProj/src/emojis/apple/emoji/0_846.png index ea9d76b8cf..b4d835459a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_846.png and b/TMessagesProj/src/emojis/apple/emoji/0_846.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_847.png b/TMessagesProj/src/emojis/apple/emoji/0_847.png index 46a3ad99d1..8af8fdb2d9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_847.png and b/TMessagesProj/src/emojis/apple/emoji/0_847.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_848.png b/TMessagesProj/src/emojis/apple/emoji/0_848.png index 14c9d86d55..8e0359df95 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_848.png and b/TMessagesProj/src/emojis/apple/emoji/0_848.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_849.png b/TMessagesProj/src/emojis/apple/emoji/0_849.png index 118e37f72c..704e2476de 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_849.png and b/TMessagesProj/src/emojis/apple/emoji/0_849.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_85.png b/TMessagesProj/src/emojis/apple/emoji/0_85.png index 6bfcb18c7f..5713640268 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_85.png and b/TMessagesProj/src/emojis/apple/emoji/0_85.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_850.png b/TMessagesProj/src/emojis/apple/emoji/0_850.png index 189355876e..4ad822d6ff 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_850.png and b/TMessagesProj/src/emojis/apple/emoji/0_850.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_851.png b/TMessagesProj/src/emojis/apple/emoji/0_851.png index 06fae95e36..8b20851807 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_851.png and b/TMessagesProj/src/emojis/apple/emoji/0_851.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_852.png b/TMessagesProj/src/emojis/apple/emoji/0_852.png index 4fe6b025b8..ed4893af36 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_852.png and b/TMessagesProj/src/emojis/apple/emoji/0_852.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_853.png b/TMessagesProj/src/emojis/apple/emoji/0_853.png index 9ad5f8fb95..036e1284c3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_853.png and b/TMessagesProj/src/emojis/apple/emoji/0_853.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_854.png b/TMessagesProj/src/emojis/apple/emoji/0_854.png index 81c4cf021b..de8332fd68 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_854.png and b/TMessagesProj/src/emojis/apple/emoji/0_854.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_855.png b/TMessagesProj/src/emojis/apple/emoji/0_855.png index 17ccfa3bca..6bc3ee5e9d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_855.png and b/TMessagesProj/src/emojis/apple/emoji/0_855.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_856.png b/TMessagesProj/src/emojis/apple/emoji/0_856.png index 6f1c5ad32b..4f1d7ead93 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_856.png and b/TMessagesProj/src/emojis/apple/emoji/0_856.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_857.png b/TMessagesProj/src/emojis/apple/emoji/0_857.png index 5bbda8d803..9843dd3261 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_857.png and b/TMessagesProj/src/emojis/apple/emoji/0_857.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_858.png b/TMessagesProj/src/emojis/apple/emoji/0_858.png index d5d22982e6..63173561c4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_858.png and b/TMessagesProj/src/emojis/apple/emoji/0_858.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_859.png b/TMessagesProj/src/emojis/apple/emoji/0_859.png index dae46da070..d5c20aa0d7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_859.png and b/TMessagesProj/src/emojis/apple/emoji/0_859.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_86.png b/TMessagesProj/src/emojis/apple/emoji/0_86.png index 0a2bf1ca6c..5ca02ffd1b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_86.png and b/TMessagesProj/src/emojis/apple/emoji/0_86.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_860.png b/TMessagesProj/src/emojis/apple/emoji/0_860.png index 6db24ad16a..c038c51224 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_860.png and b/TMessagesProj/src/emojis/apple/emoji/0_860.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_861.png b/TMessagesProj/src/emojis/apple/emoji/0_861.png index fcf130db9e..92e135e349 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_861.png and b/TMessagesProj/src/emojis/apple/emoji/0_861.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_862.png b/TMessagesProj/src/emojis/apple/emoji/0_862.png index 951d953e21..8f9fa73d15 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_862.png and b/TMessagesProj/src/emojis/apple/emoji/0_862.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_863.png b/TMessagesProj/src/emojis/apple/emoji/0_863.png index 497b17b5e2..60e7e78cee 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_863.png and b/TMessagesProj/src/emojis/apple/emoji/0_863.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_864.png b/TMessagesProj/src/emojis/apple/emoji/0_864.png index 603901dba8..29e1dae04c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_864.png and b/TMessagesProj/src/emojis/apple/emoji/0_864.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_865.png b/TMessagesProj/src/emojis/apple/emoji/0_865.png index 784d75820d..a186174c51 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_865.png and b/TMessagesProj/src/emojis/apple/emoji/0_865.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_866.png b/TMessagesProj/src/emojis/apple/emoji/0_866.png index fa293d1ed0..0183b292c9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_866.png and b/TMessagesProj/src/emojis/apple/emoji/0_866.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_867.png b/TMessagesProj/src/emojis/apple/emoji/0_867.png index 530421554e..2ab42a75a1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_867.png and b/TMessagesProj/src/emojis/apple/emoji/0_867.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_868.png b/TMessagesProj/src/emojis/apple/emoji/0_868.png index bbccc03dcc..8d67add65e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_868.png and b/TMessagesProj/src/emojis/apple/emoji/0_868.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_869.png b/TMessagesProj/src/emojis/apple/emoji/0_869.png index 56f9c172e4..e68dc2febc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_869.png and b/TMessagesProj/src/emojis/apple/emoji/0_869.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_87.png b/TMessagesProj/src/emojis/apple/emoji/0_87.png index cfb2849a30..adab8bd21e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_87.png and b/TMessagesProj/src/emojis/apple/emoji/0_87.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_870.png b/TMessagesProj/src/emojis/apple/emoji/0_870.png index e4fe642b24..5d2ca23226 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_870.png and b/TMessagesProj/src/emojis/apple/emoji/0_870.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_871.png b/TMessagesProj/src/emojis/apple/emoji/0_871.png index fafeb31c29..62aed36f71 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_871.png and b/TMessagesProj/src/emojis/apple/emoji/0_871.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_872.png b/TMessagesProj/src/emojis/apple/emoji/0_872.png index bf0e6844e2..f3ca118b8f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_872.png and b/TMessagesProj/src/emojis/apple/emoji/0_872.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_873.png b/TMessagesProj/src/emojis/apple/emoji/0_873.png index 5adab90685..f95977634e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_873.png and b/TMessagesProj/src/emojis/apple/emoji/0_873.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_874.png b/TMessagesProj/src/emojis/apple/emoji/0_874.png index 11a5b74d26..b0cd68e9ee 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_874.png and b/TMessagesProj/src/emojis/apple/emoji/0_874.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_875.png b/TMessagesProj/src/emojis/apple/emoji/0_875.png index c8d3ee5b4c..97e68ad2e7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_875.png and b/TMessagesProj/src/emojis/apple/emoji/0_875.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_876.png b/TMessagesProj/src/emojis/apple/emoji/0_876.png index a8cb271c40..46a825e13e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_876.png and b/TMessagesProj/src/emojis/apple/emoji/0_876.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_877.png b/TMessagesProj/src/emojis/apple/emoji/0_877.png index 5978c22f27..e6e902f626 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_877.png and b/TMessagesProj/src/emojis/apple/emoji/0_877.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_878.png b/TMessagesProj/src/emojis/apple/emoji/0_878.png index c735220aae..0dbb789825 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_878.png and b/TMessagesProj/src/emojis/apple/emoji/0_878.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_879.png b/TMessagesProj/src/emojis/apple/emoji/0_879.png index 2e9821bc62..0ba50ba0bd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_879.png and b/TMessagesProj/src/emojis/apple/emoji/0_879.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_88.png b/TMessagesProj/src/emojis/apple/emoji/0_88.png index 8dc8726086..dfaeb10407 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_88.png and b/TMessagesProj/src/emojis/apple/emoji/0_88.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_880.png b/TMessagesProj/src/emojis/apple/emoji/0_880.png index 3a4f52a0f2..175e349cf2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_880.png and b/TMessagesProj/src/emojis/apple/emoji/0_880.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_881.png b/TMessagesProj/src/emojis/apple/emoji/0_881.png index d7573e16c7..c8aeca8112 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_881.png and b/TMessagesProj/src/emojis/apple/emoji/0_881.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_882.png b/TMessagesProj/src/emojis/apple/emoji/0_882.png index 52c72813cc..5f2ee4794a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_882.png and b/TMessagesProj/src/emojis/apple/emoji/0_882.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_883.png b/TMessagesProj/src/emojis/apple/emoji/0_883.png index 364442d5f6..1b0fa6ee9e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_883.png and b/TMessagesProj/src/emojis/apple/emoji/0_883.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_884.png b/TMessagesProj/src/emojis/apple/emoji/0_884.png index 7c43c6cd6a..0f6e820161 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_884.png and b/TMessagesProj/src/emojis/apple/emoji/0_884.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_885.png b/TMessagesProj/src/emojis/apple/emoji/0_885.png index 90632f28c7..3b744838e3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_885.png and b/TMessagesProj/src/emojis/apple/emoji/0_885.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_886.png b/TMessagesProj/src/emojis/apple/emoji/0_886.png index 01b26a358e..b96c766663 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_886.png and b/TMessagesProj/src/emojis/apple/emoji/0_886.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_887.png b/TMessagesProj/src/emojis/apple/emoji/0_887.png index fadeac9038..d573ac3238 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_887.png and b/TMessagesProj/src/emojis/apple/emoji/0_887.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_888.png b/TMessagesProj/src/emojis/apple/emoji/0_888.png index 962a4498b9..03e3c89620 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_888.png and b/TMessagesProj/src/emojis/apple/emoji/0_888.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_889.png b/TMessagesProj/src/emojis/apple/emoji/0_889.png index 18ed0d3045..cbf6afb7ff 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_889.png and b/TMessagesProj/src/emojis/apple/emoji/0_889.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_89.png b/TMessagesProj/src/emojis/apple/emoji/0_89.png index 665df93b4a..2dfcea6169 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_89.png and b/TMessagesProj/src/emojis/apple/emoji/0_89.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_890.png b/TMessagesProj/src/emojis/apple/emoji/0_890.png index 92334db96e..57cb670703 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_890.png and b/TMessagesProj/src/emojis/apple/emoji/0_890.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_891.png b/TMessagesProj/src/emojis/apple/emoji/0_891.png index 3facc23c3f..ad06e2df0c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_891.png and b/TMessagesProj/src/emojis/apple/emoji/0_891.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_892.png b/TMessagesProj/src/emojis/apple/emoji/0_892.png index 75fa438e5d..c6f1e8f04e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_892.png and b/TMessagesProj/src/emojis/apple/emoji/0_892.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_893.png b/TMessagesProj/src/emojis/apple/emoji/0_893.png index 1ab1ba8f23..3218e6741c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_893.png and b/TMessagesProj/src/emojis/apple/emoji/0_893.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_894.png b/TMessagesProj/src/emojis/apple/emoji/0_894.png index 263914241b..6f3d024e45 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_894.png and b/TMessagesProj/src/emojis/apple/emoji/0_894.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_895.png b/TMessagesProj/src/emojis/apple/emoji/0_895.png index b2be617c9f..92cff2e7f3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_895.png and b/TMessagesProj/src/emojis/apple/emoji/0_895.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_896.png b/TMessagesProj/src/emojis/apple/emoji/0_896.png index 3f13cff0ec..3f0b89b0c2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_896.png and b/TMessagesProj/src/emojis/apple/emoji/0_896.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_897.png b/TMessagesProj/src/emojis/apple/emoji/0_897.png index c44a0fae4e..c5c71fa6e0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_897.png and b/TMessagesProj/src/emojis/apple/emoji/0_897.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_898.png b/TMessagesProj/src/emojis/apple/emoji/0_898.png index 8efef6f0a7..a5492ac7e9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_898.png and b/TMessagesProj/src/emojis/apple/emoji/0_898.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_899.png b/TMessagesProj/src/emojis/apple/emoji/0_899.png index 66399fd86f..06b94e57c5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_899.png and b/TMessagesProj/src/emojis/apple/emoji/0_899.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_9.png b/TMessagesProj/src/emojis/apple/emoji/0_9.png index ffe1b9afdc..aaa18d47f6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_9.png and b/TMessagesProj/src/emojis/apple/emoji/0_9.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_90.png b/TMessagesProj/src/emojis/apple/emoji/0_90.png index 8470734b87..aa5f62703e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_90.png and b/TMessagesProj/src/emojis/apple/emoji/0_90.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_900.png b/TMessagesProj/src/emojis/apple/emoji/0_900.png index e5c9fc305d..a6ea34c679 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_900.png and b/TMessagesProj/src/emojis/apple/emoji/0_900.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_901.png b/TMessagesProj/src/emojis/apple/emoji/0_901.png index 3705866c7e..627afed6a8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_901.png and b/TMessagesProj/src/emojis/apple/emoji/0_901.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_902.png b/TMessagesProj/src/emojis/apple/emoji/0_902.png index 661af7be9a..c91a22e07a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_902.png and b/TMessagesProj/src/emojis/apple/emoji/0_902.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_903.png b/TMessagesProj/src/emojis/apple/emoji/0_903.png index e060ccbb7a..6c5c545073 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_903.png and b/TMessagesProj/src/emojis/apple/emoji/0_903.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_904.png b/TMessagesProj/src/emojis/apple/emoji/0_904.png index 5a77651828..56a649e005 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_904.png and b/TMessagesProj/src/emojis/apple/emoji/0_904.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_905.png b/TMessagesProj/src/emojis/apple/emoji/0_905.png index 9f1de8a18b..c4af08f046 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_905.png and b/TMessagesProj/src/emojis/apple/emoji/0_905.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_906.png b/TMessagesProj/src/emojis/apple/emoji/0_906.png index 75f0923493..ff5c513630 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_906.png and b/TMessagesProj/src/emojis/apple/emoji/0_906.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_907.png b/TMessagesProj/src/emojis/apple/emoji/0_907.png index 6d4707f75c..57fcd7bc34 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_907.png and b/TMessagesProj/src/emojis/apple/emoji/0_907.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_908.png b/TMessagesProj/src/emojis/apple/emoji/0_908.png index 905be52a42..29105f22de 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_908.png and b/TMessagesProj/src/emojis/apple/emoji/0_908.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_909.png b/TMessagesProj/src/emojis/apple/emoji/0_909.png index 4d3323caa6..aa7ab2d4a0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_909.png and b/TMessagesProj/src/emojis/apple/emoji/0_909.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_91.png b/TMessagesProj/src/emojis/apple/emoji/0_91.png index f9c64e80a3..f899ac9248 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_91.png and b/TMessagesProj/src/emojis/apple/emoji/0_91.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_910.png b/TMessagesProj/src/emojis/apple/emoji/0_910.png index b4e7ca9fcf..82c6ac962f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_910.png and b/TMessagesProj/src/emojis/apple/emoji/0_910.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_911.png b/TMessagesProj/src/emojis/apple/emoji/0_911.png index b7fa76586a..217f8ce02d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_911.png and b/TMessagesProj/src/emojis/apple/emoji/0_911.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_912.png b/TMessagesProj/src/emojis/apple/emoji/0_912.png index 769c9e6dec..f703f3dd3d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_912.png and b/TMessagesProj/src/emojis/apple/emoji/0_912.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_913.png b/TMessagesProj/src/emojis/apple/emoji/0_913.png index f3373d74e9..1bce9710b9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_913.png and b/TMessagesProj/src/emojis/apple/emoji/0_913.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_914.png b/TMessagesProj/src/emojis/apple/emoji/0_914.png index df2901a25e..9cc10de837 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_914.png and b/TMessagesProj/src/emojis/apple/emoji/0_914.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_915.png b/TMessagesProj/src/emojis/apple/emoji/0_915.png index 2a4aaac926..809cc611c9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_915.png and b/TMessagesProj/src/emojis/apple/emoji/0_915.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_916.png b/TMessagesProj/src/emojis/apple/emoji/0_916.png index 63111083b2..ba28449e10 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_916.png and b/TMessagesProj/src/emojis/apple/emoji/0_916.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_917.png b/TMessagesProj/src/emojis/apple/emoji/0_917.png index 2cb226b6d2..a6424c0c81 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_917.png and b/TMessagesProj/src/emojis/apple/emoji/0_917.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_918.png b/TMessagesProj/src/emojis/apple/emoji/0_918.png index 97ccfac245..480af9c2f4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_918.png and b/TMessagesProj/src/emojis/apple/emoji/0_918.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_919.png b/TMessagesProj/src/emojis/apple/emoji/0_919.png index ca8170eea5..d00a9878c0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_919.png and b/TMessagesProj/src/emojis/apple/emoji/0_919.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_92.png b/TMessagesProj/src/emojis/apple/emoji/0_92.png index 5002b08d84..4f7836cc2a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_92.png and b/TMessagesProj/src/emojis/apple/emoji/0_92.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_920.png b/TMessagesProj/src/emojis/apple/emoji/0_920.png index 6dc4af8e58..7d6901afbc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_920.png and b/TMessagesProj/src/emojis/apple/emoji/0_920.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_921.png b/TMessagesProj/src/emojis/apple/emoji/0_921.png index 5daac79841..119ac42319 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_921.png and b/TMessagesProj/src/emojis/apple/emoji/0_921.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_922.png b/TMessagesProj/src/emojis/apple/emoji/0_922.png index faec4d5989..301c283ff2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_922.png and b/TMessagesProj/src/emojis/apple/emoji/0_922.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_923.png b/TMessagesProj/src/emojis/apple/emoji/0_923.png index 3ef1ba1c04..fb940c716e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_923.png and b/TMessagesProj/src/emojis/apple/emoji/0_923.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_924.png b/TMessagesProj/src/emojis/apple/emoji/0_924.png index 2f266475a3..1c1e37d13c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_924.png and b/TMessagesProj/src/emojis/apple/emoji/0_924.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_925.png b/TMessagesProj/src/emojis/apple/emoji/0_925.png index 1cb1d598a0..b8cd74e6f6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_925.png and b/TMessagesProj/src/emojis/apple/emoji/0_925.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_926.png b/TMessagesProj/src/emojis/apple/emoji/0_926.png index 1215e46216..71dddf64a8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_926.png and b/TMessagesProj/src/emojis/apple/emoji/0_926.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_927.png b/TMessagesProj/src/emojis/apple/emoji/0_927.png index 43f44370e6..982ae4110c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_927.png and b/TMessagesProj/src/emojis/apple/emoji/0_927.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_928.png b/TMessagesProj/src/emojis/apple/emoji/0_928.png index 0ad16a9bcb..eff103d684 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_928.png and b/TMessagesProj/src/emojis/apple/emoji/0_928.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_929.png b/TMessagesProj/src/emojis/apple/emoji/0_929.png index 01d41626e6..687c5b7425 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_929.png and b/TMessagesProj/src/emojis/apple/emoji/0_929.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_93.png b/TMessagesProj/src/emojis/apple/emoji/0_93.png index d04f157167..15a241c488 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_93.png and b/TMessagesProj/src/emojis/apple/emoji/0_93.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_930.png b/TMessagesProj/src/emojis/apple/emoji/0_930.png index e4248c85a7..3a1082732a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_930.png and b/TMessagesProj/src/emojis/apple/emoji/0_930.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_931.png b/TMessagesProj/src/emojis/apple/emoji/0_931.png index 16a37d6a55..eb797d0f50 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_931.png and b/TMessagesProj/src/emojis/apple/emoji/0_931.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_932.png b/TMessagesProj/src/emojis/apple/emoji/0_932.png index 21d1d27b3d..ba7d2ab8a1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_932.png and b/TMessagesProj/src/emojis/apple/emoji/0_932.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_933.png b/TMessagesProj/src/emojis/apple/emoji/0_933.png index 28aa786a46..182743edd6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_933.png and b/TMessagesProj/src/emojis/apple/emoji/0_933.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_934.png b/TMessagesProj/src/emojis/apple/emoji/0_934.png index 0f4ba55f67..e37002b9d9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_934.png and b/TMessagesProj/src/emojis/apple/emoji/0_934.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_935.png b/TMessagesProj/src/emojis/apple/emoji/0_935.png index add11149a1..191a70209f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_935.png and b/TMessagesProj/src/emojis/apple/emoji/0_935.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_936.png b/TMessagesProj/src/emojis/apple/emoji/0_936.png index f23cab9d25..af00bb38ba 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_936.png and b/TMessagesProj/src/emojis/apple/emoji/0_936.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_937.png b/TMessagesProj/src/emojis/apple/emoji/0_937.png index cd11f81c0e..76b7595f71 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_937.png and b/TMessagesProj/src/emojis/apple/emoji/0_937.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_938.png b/TMessagesProj/src/emojis/apple/emoji/0_938.png index 8701ba3db1..7f991d63ed 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_938.png and b/TMessagesProj/src/emojis/apple/emoji/0_938.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_939.png b/TMessagesProj/src/emojis/apple/emoji/0_939.png index df51ac42f2..7d7cdf99e3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_939.png and b/TMessagesProj/src/emojis/apple/emoji/0_939.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_94.png b/TMessagesProj/src/emojis/apple/emoji/0_94.png index c7277d63c3..927369a9e6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_94.png and b/TMessagesProj/src/emojis/apple/emoji/0_94.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_940.png b/TMessagesProj/src/emojis/apple/emoji/0_940.png index 9ab7043eb2..39d6a2c15e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_940.png and b/TMessagesProj/src/emojis/apple/emoji/0_940.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_941.png b/TMessagesProj/src/emojis/apple/emoji/0_941.png index 5af1112056..ddac83ce69 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_941.png and b/TMessagesProj/src/emojis/apple/emoji/0_941.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_942.png b/TMessagesProj/src/emojis/apple/emoji/0_942.png index 3eefb8a1ca..1d275daa51 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_942.png and b/TMessagesProj/src/emojis/apple/emoji/0_942.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_943.png b/TMessagesProj/src/emojis/apple/emoji/0_943.png index 7bc2e42bea..f240af2bb4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_943.png and b/TMessagesProj/src/emojis/apple/emoji/0_943.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_944.png b/TMessagesProj/src/emojis/apple/emoji/0_944.png index c3a64a75c0..a61ebd30a9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_944.png and b/TMessagesProj/src/emojis/apple/emoji/0_944.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_945.png b/TMessagesProj/src/emojis/apple/emoji/0_945.png index a4e6ea7d84..7aa6d5d0e1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_945.png and b/TMessagesProj/src/emojis/apple/emoji/0_945.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_946.png b/TMessagesProj/src/emojis/apple/emoji/0_946.png index 50d10080d9..4c9683d91f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_946.png and b/TMessagesProj/src/emojis/apple/emoji/0_946.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_947.png b/TMessagesProj/src/emojis/apple/emoji/0_947.png index 472a857162..c091df6eca 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_947.png and b/TMessagesProj/src/emojis/apple/emoji/0_947.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_948.png b/TMessagesProj/src/emojis/apple/emoji/0_948.png index 30d056f6ee..b369951f0d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_948.png and b/TMessagesProj/src/emojis/apple/emoji/0_948.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_949.png b/TMessagesProj/src/emojis/apple/emoji/0_949.png index 565103141f..a33699f8bf 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_949.png and b/TMessagesProj/src/emojis/apple/emoji/0_949.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_95.png b/TMessagesProj/src/emojis/apple/emoji/0_95.png index 8c45ee4a2e..feb9573eb6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_95.png and b/TMessagesProj/src/emojis/apple/emoji/0_95.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_950.png b/TMessagesProj/src/emojis/apple/emoji/0_950.png index 1b0265ce4a..ec700c63b2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_950.png and b/TMessagesProj/src/emojis/apple/emoji/0_950.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_951.png b/TMessagesProj/src/emojis/apple/emoji/0_951.png index eae5117a4c..6a1bc71d3a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_951.png and b/TMessagesProj/src/emojis/apple/emoji/0_951.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_952.png b/TMessagesProj/src/emojis/apple/emoji/0_952.png index 6c7393c3de..8041f0a3ef 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_952.png and b/TMessagesProj/src/emojis/apple/emoji/0_952.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_953.png b/TMessagesProj/src/emojis/apple/emoji/0_953.png index 4b75a36325..1e4bbe4b72 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_953.png and b/TMessagesProj/src/emojis/apple/emoji/0_953.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_954.png b/TMessagesProj/src/emojis/apple/emoji/0_954.png index 814b109426..155ccf14b4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_954.png and b/TMessagesProj/src/emojis/apple/emoji/0_954.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_955.png b/TMessagesProj/src/emojis/apple/emoji/0_955.png index a452d138fd..22c4b25430 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_955.png and b/TMessagesProj/src/emojis/apple/emoji/0_955.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_956.png b/TMessagesProj/src/emojis/apple/emoji/0_956.png index 1d091a3541..49c64af121 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_956.png and b/TMessagesProj/src/emojis/apple/emoji/0_956.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_957.png b/TMessagesProj/src/emojis/apple/emoji/0_957.png index d500371746..c9181120e8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_957.png and b/TMessagesProj/src/emojis/apple/emoji/0_957.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_958.png b/TMessagesProj/src/emojis/apple/emoji/0_958.png index 43301d9e4c..c7860dfd59 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_958.png and b/TMessagesProj/src/emojis/apple/emoji/0_958.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_959.png b/TMessagesProj/src/emojis/apple/emoji/0_959.png index fa17310487..68f68d83e4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_959.png and b/TMessagesProj/src/emojis/apple/emoji/0_959.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_96.png b/TMessagesProj/src/emojis/apple/emoji/0_96.png index fd4cd4e828..6165b2f6cd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_96.png and b/TMessagesProj/src/emojis/apple/emoji/0_96.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_960.png b/TMessagesProj/src/emojis/apple/emoji/0_960.png index fc6517b4ac..9274b9af8c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_960.png and b/TMessagesProj/src/emojis/apple/emoji/0_960.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_961.png b/TMessagesProj/src/emojis/apple/emoji/0_961.png index e9244cbe8d..c8c14fbc91 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_961.png and b/TMessagesProj/src/emojis/apple/emoji/0_961.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_962.png b/TMessagesProj/src/emojis/apple/emoji/0_962.png index ce6411447f..c7adba3d16 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_962.png and b/TMessagesProj/src/emojis/apple/emoji/0_962.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_963.png b/TMessagesProj/src/emojis/apple/emoji/0_963.png index 8d70d13d9d..d37f62f732 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_963.png and b/TMessagesProj/src/emojis/apple/emoji/0_963.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_964.png b/TMessagesProj/src/emojis/apple/emoji/0_964.png index f9665647b2..3a926a5b78 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_964.png and b/TMessagesProj/src/emojis/apple/emoji/0_964.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_965.png b/TMessagesProj/src/emojis/apple/emoji/0_965.png index a9d13e0cfb..ba2f276641 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_965.png and b/TMessagesProj/src/emojis/apple/emoji/0_965.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_966.png b/TMessagesProj/src/emojis/apple/emoji/0_966.png index 5bfb9a6d71..da309a91a5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_966.png and b/TMessagesProj/src/emojis/apple/emoji/0_966.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_967.png b/TMessagesProj/src/emojis/apple/emoji/0_967.png index 1273e3e401..02ff5e4670 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_967.png and b/TMessagesProj/src/emojis/apple/emoji/0_967.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_968.png b/TMessagesProj/src/emojis/apple/emoji/0_968.png index f782368380..b7d34c25bb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_968.png and b/TMessagesProj/src/emojis/apple/emoji/0_968.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_969.png b/TMessagesProj/src/emojis/apple/emoji/0_969.png index a21fcf4f05..d1dca0634b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_969.png and b/TMessagesProj/src/emojis/apple/emoji/0_969.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_97.png b/TMessagesProj/src/emojis/apple/emoji/0_97.png index cfa3430835..0216572b63 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_97.png and b/TMessagesProj/src/emojis/apple/emoji/0_97.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_970.png b/TMessagesProj/src/emojis/apple/emoji/0_970.png index b4a7814d47..c23647a442 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_970.png and b/TMessagesProj/src/emojis/apple/emoji/0_970.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_971.png b/TMessagesProj/src/emojis/apple/emoji/0_971.png index 556090faf3..794805d52f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_971.png and b/TMessagesProj/src/emojis/apple/emoji/0_971.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_972.png b/TMessagesProj/src/emojis/apple/emoji/0_972.png index 820057a574..09f6041f9b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_972.png and b/TMessagesProj/src/emojis/apple/emoji/0_972.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_973.png b/TMessagesProj/src/emojis/apple/emoji/0_973.png index f738c478d0..8bf1ec5f64 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_973.png and b/TMessagesProj/src/emojis/apple/emoji/0_973.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_974.png b/TMessagesProj/src/emojis/apple/emoji/0_974.png index 078c0ee7d3..732ffb1e73 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_974.png and b/TMessagesProj/src/emojis/apple/emoji/0_974.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_975.png b/TMessagesProj/src/emojis/apple/emoji/0_975.png index 63fab38bed..7dfce6e294 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_975.png and b/TMessagesProj/src/emojis/apple/emoji/0_975.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_976.png b/TMessagesProj/src/emojis/apple/emoji/0_976.png index 21970038ad..2fe0c4b996 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_976.png and b/TMessagesProj/src/emojis/apple/emoji/0_976.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_977.png b/TMessagesProj/src/emojis/apple/emoji/0_977.png index a8c41f10c8..5ba006e4cd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_977.png and b/TMessagesProj/src/emojis/apple/emoji/0_977.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_978.png b/TMessagesProj/src/emojis/apple/emoji/0_978.png index aa7ff84e8c..cc71aee135 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_978.png and b/TMessagesProj/src/emojis/apple/emoji/0_978.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_979.png b/TMessagesProj/src/emojis/apple/emoji/0_979.png index 0cfeb3d13f..e40398a91c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_979.png and b/TMessagesProj/src/emojis/apple/emoji/0_979.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_98.png b/TMessagesProj/src/emojis/apple/emoji/0_98.png index fc69896ccc..5c8a55d39d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_98.png and b/TMessagesProj/src/emojis/apple/emoji/0_98.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_980.png b/TMessagesProj/src/emojis/apple/emoji/0_980.png index 58abb2ac5b..ddabb9b6a8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_980.png and b/TMessagesProj/src/emojis/apple/emoji/0_980.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_981.png b/TMessagesProj/src/emojis/apple/emoji/0_981.png index 26de777ec0..5922a70caf 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_981.png and b/TMessagesProj/src/emojis/apple/emoji/0_981.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_982.png b/TMessagesProj/src/emojis/apple/emoji/0_982.png index e22a5b5776..f16740f491 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_982.png and b/TMessagesProj/src/emojis/apple/emoji/0_982.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_983.png b/TMessagesProj/src/emojis/apple/emoji/0_983.png index ec95617b5e..d14a7ae404 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_983.png and b/TMessagesProj/src/emojis/apple/emoji/0_983.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_984.png b/TMessagesProj/src/emojis/apple/emoji/0_984.png index e58828b68c..a9370560fc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_984.png and b/TMessagesProj/src/emojis/apple/emoji/0_984.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_985.png b/TMessagesProj/src/emojis/apple/emoji/0_985.png index 3b2b41673a..b8c2355708 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_985.png and b/TMessagesProj/src/emojis/apple/emoji/0_985.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_986.png b/TMessagesProj/src/emojis/apple/emoji/0_986.png index 5d46f36d48..f1dc933f87 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_986.png and b/TMessagesProj/src/emojis/apple/emoji/0_986.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_987.png b/TMessagesProj/src/emojis/apple/emoji/0_987.png index 9ff6b43bb6..a6ecea5507 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_987.png and b/TMessagesProj/src/emojis/apple/emoji/0_987.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_988.png b/TMessagesProj/src/emojis/apple/emoji/0_988.png index 303fe33c79..0fb6cb86ba 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_988.png and b/TMessagesProj/src/emojis/apple/emoji/0_988.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_989.png b/TMessagesProj/src/emojis/apple/emoji/0_989.png index c9dd50afe7..cdf31e226e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_989.png and b/TMessagesProj/src/emojis/apple/emoji/0_989.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_99.png b/TMessagesProj/src/emojis/apple/emoji/0_99.png index 194139351a..a33b3c320a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_99.png and b/TMessagesProj/src/emojis/apple/emoji/0_99.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_990.png b/TMessagesProj/src/emojis/apple/emoji/0_990.png index 86526a8b95..f7da903cb1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_990.png and b/TMessagesProj/src/emojis/apple/emoji/0_990.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_991.png b/TMessagesProj/src/emojis/apple/emoji/0_991.png index 23b22b3bb2..fbb0128a87 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_991.png and b/TMessagesProj/src/emojis/apple/emoji/0_991.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_992.png b/TMessagesProj/src/emojis/apple/emoji/0_992.png index f89ca7b932..87a9490ba0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_992.png and b/TMessagesProj/src/emojis/apple/emoji/0_992.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_993.png b/TMessagesProj/src/emojis/apple/emoji/0_993.png index f7f1b0b2f9..e7d01bf953 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_993.png and b/TMessagesProj/src/emojis/apple/emoji/0_993.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_994.png b/TMessagesProj/src/emojis/apple/emoji/0_994.png index 1a3f1f477b..ae0f2786fe 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_994.png and b/TMessagesProj/src/emojis/apple/emoji/0_994.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_995.png b/TMessagesProj/src/emojis/apple/emoji/0_995.png index 060e56ebe1..273754648c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_995.png and b/TMessagesProj/src/emojis/apple/emoji/0_995.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_996.png b/TMessagesProj/src/emojis/apple/emoji/0_996.png index a90fc95ec5..3fd647cbf4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_996.png and b/TMessagesProj/src/emojis/apple/emoji/0_996.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_997.png b/TMessagesProj/src/emojis/apple/emoji/0_997.png index ba820064e2..bb58a495a5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_997.png and b/TMessagesProj/src/emojis/apple/emoji/0_997.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_998.png b/TMessagesProj/src/emojis/apple/emoji/0_998.png index c6fd85bddd..e446b46981 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_998.png and b/TMessagesProj/src/emojis/apple/emoji/0_998.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/0_999.png b/TMessagesProj/src/emojis/apple/emoji/0_999.png index 5591cb4af7..fe05b2abd2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/0_999.png and b/TMessagesProj/src/emojis/apple/emoji/0_999.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_0.png b/TMessagesProj/src/emojis/apple/emoji/1_0.png index 1adcf2898b..5f7a1fdbb3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_0.png and b/TMessagesProj/src/emojis/apple/emoji/1_0.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_1.png b/TMessagesProj/src/emojis/apple/emoji/1_1.png index 99641710c5..02d8349749 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_1.png and b/TMessagesProj/src/emojis/apple/emoji/1_1.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_10.png b/TMessagesProj/src/emojis/apple/emoji/1_10.png index e03595398c..342f75c17f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_10.png and b/TMessagesProj/src/emojis/apple/emoji/1_10.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_100.png b/TMessagesProj/src/emojis/apple/emoji/1_100.png index 4e0bfbf3c9..b9e2ca4fa4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_100.png and b/TMessagesProj/src/emojis/apple/emoji/1_100.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_101.png b/TMessagesProj/src/emojis/apple/emoji/1_101.png index 03d1cfbc3f..74354f73ba 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_101.png and b/TMessagesProj/src/emojis/apple/emoji/1_101.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_102.png b/TMessagesProj/src/emojis/apple/emoji/1_102.png index 1746faf8de..b034a16c01 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_102.png and b/TMessagesProj/src/emojis/apple/emoji/1_102.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_103.png b/TMessagesProj/src/emojis/apple/emoji/1_103.png index ec60f55420..9e21b07f60 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_103.png and b/TMessagesProj/src/emojis/apple/emoji/1_103.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_104.png b/TMessagesProj/src/emojis/apple/emoji/1_104.png index c363ecbb56..9cc268b184 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_104.png and b/TMessagesProj/src/emojis/apple/emoji/1_104.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_105.png b/TMessagesProj/src/emojis/apple/emoji/1_105.png index 954a568861..052efe3b5f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_105.png and b/TMessagesProj/src/emojis/apple/emoji/1_105.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_106.png b/TMessagesProj/src/emojis/apple/emoji/1_106.png index 8d8d1fb1e0..b5fffcc901 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_106.png and b/TMessagesProj/src/emojis/apple/emoji/1_106.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_107.png b/TMessagesProj/src/emojis/apple/emoji/1_107.png index f66b951046..8bb3588805 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_107.png and b/TMessagesProj/src/emojis/apple/emoji/1_107.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_108.png b/TMessagesProj/src/emojis/apple/emoji/1_108.png index bd643c2ae9..20c56068fc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_108.png and b/TMessagesProj/src/emojis/apple/emoji/1_108.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_109.png b/TMessagesProj/src/emojis/apple/emoji/1_109.png index d0641669bf..f60c4ce9a9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_109.png and b/TMessagesProj/src/emojis/apple/emoji/1_109.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_11.png b/TMessagesProj/src/emojis/apple/emoji/1_11.png index 144df39319..6b2106c7a2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_11.png and b/TMessagesProj/src/emojis/apple/emoji/1_11.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_110.png b/TMessagesProj/src/emojis/apple/emoji/1_110.png index 0f1fc50502..271bebdfef 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_110.png and b/TMessagesProj/src/emojis/apple/emoji/1_110.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_111.png b/TMessagesProj/src/emojis/apple/emoji/1_111.png index 183d085c3c..eaca0fe1eb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_111.png and b/TMessagesProj/src/emojis/apple/emoji/1_111.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_112.png b/TMessagesProj/src/emojis/apple/emoji/1_112.png index 3bb8514bc3..a7a2fe4d57 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_112.png and b/TMessagesProj/src/emojis/apple/emoji/1_112.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_113.png b/TMessagesProj/src/emojis/apple/emoji/1_113.png index 73ebcfbcaf..845212cf89 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_113.png and b/TMessagesProj/src/emojis/apple/emoji/1_113.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_114.png b/TMessagesProj/src/emojis/apple/emoji/1_114.png index 758b4a9d26..268c3b1c14 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_114.png and b/TMessagesProj/src/emojis/apple/emoji/1_114.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_115.png b/TMessagesProj/src/emojis/apple/emoji/1_115.png index e44c419f30..7a3e1433bc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_115.png and b/TMessagesProj/src/emojis/apple/emoji/1_115.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_116.png b/TMessagesProj/src/emojis/apple/emoji/1_116.png index 76f7a4f193..b64a54fcb6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_116.png and b/TMessagesProj/src/emojis/apple/emoji/1_116.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_117.png b/TMessagesProj/src/emojis/apple/emoji/1_117.png index dfc394fcd0..6ee27e5cab 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_117.png and b/TMessagesProj/src/emojis/apple/emoji/1_117.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_118.png b/TMessagesProj/src/emojis/apple/emoji/1_118.png index a436805f67..ed10379553 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_118.png and b/TMessagesProj/src/emojis/apple/emoji/1_118.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_119.png b/TMessagesProj/src/emojis/apple/emoji/1_119.png index 1b7da5ea5a..550ab82ee8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_119.png and b/TMessagesProj/src/emojis/apple/emoji/1_119.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_12.png b/TMessagesProj/src/emojis/apple/emoji/1_12.png index d11e1f4de1..d644afa0e8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_12.png and b/TMessagesProj/src/emojis/apple/emoji/1_12.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_120.png b/TMessagesProj/src/emojis/apple/emoji/1_120.png index 62cf3d8ff2..cf97bdd81f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_120.png and b/TMessagesProj/src/emojis/apple/emoji/1_120.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_121.png b/TMessagesProj/src/emojis/apple/emoji/1_121.png index 8ec5818411..01a246753a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_121.png and b/TMessagesProj/src/emojis/apple/emoji/1_121.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_122.png b/TMessagesProj/src/emojis/apple/emoji/1_122.png index befafd2c30..4c8062d36e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_122.png and b/TMessagesProj/src/emojis/apple/emoji/1_122.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_123.png b/TMessagesProj/src/emojis/apple/emoji/1_123.png index bf2e89371e..3438fbcb6d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_123.png and b/TMessagesProj/src/emojis/apple/emoji/1_123.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_124.png b/TMessagesProj/src/emojis/apple/emoji/1_124.png index 12a685aa22..08bdecf3b6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_124.png and b/TMessagesProj/src/emojis/apple/emoji/1_124.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_125.png b/TMessagesProj/src/emojis/apple/emoji/1_125.png index 894dffed6d..b4edbb054b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_125.png and b/TMessagesProj/src/emojis/apple/emoji/1_125.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_126.png b/TMessagesProj/src/emojis/apple/emoji/1_126.png index afb4987756..bd67c3f1c3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_126.png and b/TMessagesProj/src/emojis/apple/emoji/1_126.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_127.png b/TMessagesProj/src/emojis/apple/emoji/1_127.png index cbfe7a5ba5..d6827294c0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_127.png and b/TMessagesProj/src/emojis/apple/emoji/1_127.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_128.png b/TMessagesProj/src/emojis/apple/emoji/1_128.png index 7211bc0d29..6f611a2cb9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_128.png and b/TMessagesProj/src/emojis/apple/emoji/1_128.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_129.png b/TMessagesProj/src/emojis/apple/emoji/1_129.png index d6524f43de..04789dcccf 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_129.png and b/TMessagesProj/src/emojis/apple/emoji/1_129.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_13.png b/TMessagesProj/src/emojis/apple/emoji/1_13.png index 3b2a335623..759c142fa0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_13.png and b/TMessagesProj/src/emojis/apple/emoji/1_13.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_130.png b/TMessagesProj/src/emojis/apple/emoji/1_130.png index bf1d7e7ffd..54319f9fef 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_130.png and b/TMessagesProj/src/emojis/apple/emoji/1_130.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_131.png b/TMessagesProj/src/emojis/apple/emoji/1_131.png index 054a743849..87107d0e40 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_131.png and b/TMessagesProj/src/emojis/apple/emoji/1_131.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_132.png b/TMessagesProj/src/emojis/apple/emoji/1_132.png index f55aa44900..5d9617a36a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_132.png and b/TMessagesProj/src/emojis/apple/emoji/1_132.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_133.png b/TMessagesProj/src/emojis/apple/emoji/1_133.png index 0a4df6796e..bbf17758e5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_133.png and b/TMessagesProj/src/emojis/apple/emoji/1_133.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_134.png b/TMessagesProj/src/emojis/apple/emoji/1_134.png index da3a059b24..882e0c11ec 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_134.png and b/TMessagesProj/src/emojis/apple/emoji/1_134.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_135.png b/TMessagesProj/src/emojis/apple/emoji/1_135.png index 692d97f3f2..07c4582395 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_135.png and b/TMessagesProj/src/emojis/apple/emoji/1_135.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_136.png b/TMessagesProj/src/emojis/apple/emoji/1_136.png index 6d0ad6ebee..d45d2e8beb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_136.png and b/TMessagesProj/src/emojis/apple/emoji/1_136.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_137.png b/TMessagesProj/src/emojis/apple/emoji/1_137.png index 1a55cd2775..aa5c49bcc3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_137.png and b/TMessagesProj/src/emojis/apple/emoji/1_137.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_138.png b/TMessagesProj/src/emojis/apple/emoji/1_138.png index b494d75d0e..9a6a344db4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_138.png and b/TMessagesProj/src/emojis/apple/emoji/1_138.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_139.png b/TMessagesProj/src/emojis/apple/emoji/1_139.png index 9ad5749301..59701285ce 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_139.png and b/TMessagesProj/src/emojis/apple/emoji/1_139.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_14.png b/TMessagesProj/src/emojis/apple/emoji/1_14.png index 3753124bda..7dc6460f1f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_14.png and b/TMessagesProj/src/emojis/apple/emoji/1_14.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_140.png b/TMessagesProj/src/emojis/apple/emoji/1_140.png index 42eb2292da..1b7ea1afb9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_140.png and b/TMessagesProj/src/emojis/apple/emoji/1_140.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_141.png b/TMessagesProj/src/emojis/apple/emoji/1_141.png index ad13f607ed..129e184da6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_141.png and b/TMessagesProj/src/emojis/apple/emoji/1_141.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_142.png b/TMessagesProj/src/emojis/apple/emoji/1_142.png index 5f1d0addcc..e6647d9a33 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_142.png and b/TMessagesProj/src/emojis/apple/emoji/1_142.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_143.png b/TMessagesProj/src/emojis/apple/emoji/1_143.png index 708d6f04de..a572d22a9b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_143.png and b/TMessagesProj/src/emojis/apple/emoji/1_143.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_144.png b/TMessagesProj/src/emojis/apple/emoji/1_144.png index a5d7624a6f..6295b6d8e3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_144.png and b/TMessagesProj/src/emojis/apple/emoji/1_144.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_145.png b/TMessagesProj/src/emojis/apple/emoji/1_145.png index 4ba60981a0..d8f540b9e8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_145.png and b/TMessagesProj/src/emojis/apple/emoji/1_145.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_146.png b/TMessagesProj/src/emojis/apple/emoji/1_146.png index cb8210ba1e..2af23c0d52 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_146.png and b/TMessagesProj/src/emojis/apple/emoji/1_146.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_147.png b/TMessagesProj/src/emojis/apple/emoji/1_147.png index 90d61162df..342aabb1cf 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_147.png and b/TMessagesProj/src/emojis/apple/emoji/1_147.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_148.png b/TMessagesProj/src/emojis/apple/emoji/1_148.png index 3f37c4466e..52f0abb2f0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_148.png and b/TMessagesProj/src/emojis/apple/emoji/1_148.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_149.png b/TMessagesProj/src/emojis/apple/emoji/1_149.png index d8e5f0816a..4cda31af5a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_149.png and b/TMessagesProj/src/emojis/apple/emoji/1_149.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_15.png b/TMessagesProj/src/emojis/apple/emoji/1_15.png index 5c02f39b5a..0f42c5424c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_15.png and b/TMessagesProj/src/emojis/apple/emoji/1_15.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_150.png b/TMessagesProj/src/emojis/apple/emoji/1_150.png index f903f2322d..6f06a37612 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_150.png and b/TMessagesProj/src/emojis/apple/emoji/1_150.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_151.png b/TMessagesProj/src/emojis/apple/emoji/1_151.png index 24f3489207..ea25b61eb2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_151.png and b/TMessagesProj/src/emojis/apple/emoji/1_151.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_152.png b/TMessagesProj/src/emojis/apple/emoji/1_152.png index ac826a8659..ef8772e22d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_152.png and b/TMessagesProj/src/emojis/apple/emoji/1_152.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_153.png b/TMessagesProj/src/emojis/apple/emoji/1_153.png index f9ab05cfbc..06e5097366 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_153.png and b/TMessagesProj/src/emojis/apple/emoji/1_153.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_154.png b/TMessagesProj/src/emojis/apple/emoji/1_154.png index 8294db7374..c391815e4c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_154.png and b/TMessagesProj/src/emojis/apple/emoji/1_154.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_155.png b/TMessagesProj/src/emojis/apple/emoji/1_155.png index 384e7aea60..f4a981d8d8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_155.png and b/TMessagesProj/src/emojis/apple/emoji/1_155.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_156.png b/TMessagesProj/src/emojis/apple/emoji/1_156.png index fa6418d3e0..af082213b5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_156.png and b/TMessagesProj/src/emojis/apple/emoji/1_156.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_157.png b/TMessagesProj/src/emojis/apple/emoji/1_157.png index a12be24024..b3bf10ed5b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_157.png and b/TMessagesProj/src/emojis/apple/emoji/1_157.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_158.png b/TMessagesProj/src/emojis/apple/emoji/1_158.png index d712b1b644..b874a256f8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_158.png and b/TMessagesProj/src/emojis/apple/emoji/1_158.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_159.png b/TMessagesProj/src/emojis/apple/emoji/1_159.png index cd1b3f0138..f951cfe5ed 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_159.png and b/TMessagesProj/src/emojis/apple/emoji/1_159.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_16.png b/TMessagesProj/src/emojis/apple/emoji/1_16.png index 42fdd34a02..170c15ceea 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_16.png and b/TMessagesProj/src/emojis/apple/emoji/1_16.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_160.png b/TMessagesProj/src/emojis/apple/emoji/1_160.png index 71cabd4fbc..e4f26603b8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_160.png and b/TMessagesProj/src/emojis/apple/emoji/1_160.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_161.png b/TMessagesProj/src/emojis/apple/emoji/1_161.png index bc1e8c0b73..06c58b3d9c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_161.png and b/TMessagesProj/src/emojis/apple/emoji/1_161.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_162.png b/TMessagesProj/src/emojis/apple/emoji/1_162.png index 903288ed85..8f18e937a7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_162.png and b/TMessagesProj/src/emojis/apple/emoji/1_162.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_163.png b/TMessagesProj/src/emojis/apple/emoji/1_163.png index 4626d3a19b..c5fa1930f5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_163.png and b/TMessagesProj/src/emojis/apple/emoji/1_163.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_164.png b/TMessagesProj/src/emojis/apple/emoji/1_164.png index bcee190547..375eaf2e25 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_164.png and b/TMessagesProj/src/emojis/apple/emoji/1_164.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_165.png b/TMessagesProj/src/emojis/apple/emoji/1_165.png index 44b56f6353..5740612c02 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_165.png and b/TMessagesProj/src/emojis/apple/emoji/1_165.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_166.png b/TMessagesProj/src/emojis/apple/emoji/1_166.png index 891cce68a7..4b9cdfdf8f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_166.png and b/TMessagesProj/src/emojis/apple/emoji/1_166.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_167.png b/TMessagesProj/src/emojis/apple/emoji/1_167.png index 208938ec53..98b7ff73f2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_167.png and b/TMessagesProj/src/emojis/apple/emoji/1_167.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_168.png b/TMessagesProj/src/emojis/apple/emoji/1_168.png index 245d4742c2..8f1128e1dd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_168.png and b/TMessagesProj/src/emojis/apple/emoji/1_168.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_169.png b/TMessagesProj/src/emojis/apple/emoji/1_169.png index 1df5a9e1a3..7875404513 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_169.png and b/TMessagesProj/src/emojis/apple/emoji/1_169.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_17.png b/TMessagesProj/src/emojis/apple/emoji/1_17.png index 5cfaee7332..d1f5446cd6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_17.png and b/TMessagesProj/src/emojis/apple/emoji/1_17.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_170.png b/TMessagesProj/src/emojis/apple/emoji/1_170.png index 9fe38ff374..df8f1db38e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_170.png and b/TMessagesProj/src/emojis/apple/emoji/1_170.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_171.png b/TMessagesProj/src/emojis/apple/emoji/1_171.png index ca27ca4e55..c749094de5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_171.png and b/TMessagesProj/src/emojis/apple/emoji/1_171.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_172.png b/TMessagesProj/src/emojis/apple/emoji/1_172.png index c1a45651c9..cd37e27cfa 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_172.png and b/TMessagesProj/src/emojis/apple/emoji/1_172.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_173.png b/TMessagesProj/src/emojis/apple/emoji/1_173.png index 1afaff3656..55d296cf88 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_173.png and b/TMessagesProj/src/emojis/apple/emoji/1_173.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_174.png b/TMessagesProj/src/emojis/apple/emoji/1_174.png index 89a299d3cc..f786c26287 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_174.png and b/TMessagesProj/src/emojis/apple/emoji/1_174.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_175.png b/TMessagesProj/src/emojis/apple/emoji/1_175.png index c08654fbd9..b1ab0cf74f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_175.png and b/TMessagesProj/src/emojis/apple/emoji/1_175.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_176.png b/TMessagesProj/src/emojis/apple/emoji/1_176.png index e26a340b05..440dc9379d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_176.png and b/TMessagesProj/src/emojis/apple/emoji/1_176.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_177.png b/TMessagesProj/src/emojis/apple/emoji/1_177.png index fd3fe380eb..d8a7dca72e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_177.png and b/TMessagesProj/src/emojis/apple/emoji/1_177.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_178.png b/TMessagesProj/src/emojis/apple/emoji/1_178.png index 2cf015a45c..5e63ccab2b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_178.png and b/TMessagesProj/src/emojis/apple/emoji/1_178.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_179.png b/TMessagesProj/src/emojis/apple/emoji/1_179.png index cc093d0d8e..f1725d4515 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_179.png and b/TMessagesProj/src/emojis/apple/emoji/1_179.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_18.png b/TMessagesProj/src/emojis/apple/emoji/1_18.png index 24a15d72fb..618aef35af 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_18.png and b/TMessagesProj/src/emojis/apple/emoji/1_18.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_180.png b/TMessagesProj/src/emojis/apple/emoji/1_180.png index 36a6e3a646..c86124dfd9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_180.png and b/TMessagesProj/src/emojis/apple/emoji/1_180.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_181.png b/TMessagesProj/src/emojis/apple/emoji/1_181.png index cb6b0a0b4f..a8ee3574c6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_181.png and b/TMessagesProj/src/emojis/apple/emoji/1_181.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_182.png b/TMessagesProj/src/emojis/apple/emoji/1_182.png index 4c7c7de152..df48e72b62 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_182.png and b/TMessagesProj/src/emojis/apple/emoji/1_182.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_183.png b/TMessagesProj/src/emojis/apple/emoji/1_183.png index 4d59482467..cbf05eae12 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_183.png and b/TMessagesProj/src/emojis/apple/emoji/1_183.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_184.png b/TMessagesProj/src/emojis/apple/emoji/1_184.png index eaa7103e06..ff09d0b2d6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_184.png and b/TMessagesProj/src/emojis/apple/emoji/1_184.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_185.png b/TMessagesProj/src/emojis/apple/emoji/1_185.png index 8e959e7465..617a3b77a3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_185.png and b/TMessagesProj/src/emojis/apple/emoji/1_185.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_186.png b/TMessagesProj/src/emojis/apple/emoji/1_186.png index f773d93e96..4bc74d1550 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_186.png and b/TMessagesProj/src/emojis/apple/emoji/1_186.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_187.png b/TMessagesProj/src/emojis/apple/emoji/1_187.png index 6131a83ea3..9530d191df 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_187.png and b/TMessagesProj/src/emojis/apple/emoji/1_187.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_188.png b/TMessagesProj/src/emojis/apple/emoji/1_188.png index a5fc45dc07..d5e10b4d56 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_188.png and b/TMessagesProj/src/emojis/apple/emoji/1_188.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_189.png b/TMessagesProj/src/emojis/apple/emoji/1_189.png index 4e437b4401..7c932eca40 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_189.png and b/TMessagesProj/src/emojis/apple/emoji/1_189.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_19.png b/TMessagesProj/src/emojis/apple/emoji/1_19.png index 2861da9ffa..faf945ab23 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_19.png and b/TMessagesProj/src/emojis/apple/emoji/1_19.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_190.png b/TMessagesProj/src/emojis/apple/emoji/1_190.png index f56ed98d1e..abe79ae7c9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_190.png and b/TMessagesProj/src/emojis/apple/emoji/1_190.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_191.png b/TMessagesProj/src/emojis/apple/emoji/1_191.png index b9c5730294..be88029494 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_191.png and b/TMessagesProj/src/emojis/apple/emoji/1_191.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_192.png b/TMessagesProj/src/emojis/apple/emoji/1_192.png index 7cb9eb1113..b9d05f8360 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_192.png and b/TMessagesProj/src/emojis/apple/emoji/1_192.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_193.png b/TMessagesProj/src/emojis/apple/emoji/1_193.png index fadef66040..ecec20fadb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_193.png and b/TMessagesProj/src/emojis/apple/emoji/1_193.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_194.png b/TMessagesProj/src/emojis/apple/emoji/1_194.png index 087f27f739..7ea8d4eb5c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_194.png and b/TMessagesProj/src/emojis/apple/emoji/1_194.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_195.png b/TMessagesProj/src/emojis/apple/emoji/1_195.png index 051f06a7ea..affae43c46 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_195.png and b/TMessagesProj/src/emojis/apple/emoji/1_195.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_196.png b/TMessagesProj/src/emojis/apple/emoji/1_196.png index c069190adc..d0845b6efc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_196.png and b/TMessagesProj/src/emojis/apple/emoji/1_196.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_197.png b/TMessagesProj/src/emojis/apple/emoji/1_197.png index 670128da49..244e947051 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_197.png and b/TMessagesProj/src/emojis/apple/emoji/1_197.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_198.png b/TMessagesProj/src/emojis/apple/emoji/1_198.png index 7806660895..ff0968a0a4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_198.png and b/TMessagesProj/src/emojis/apple/emoji/1_198.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_199.png b/TMessagesProj/src/emojis/apple/emoji/1_199.png index 7db6adcbfc..58d93c2589 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_199.png and b/TMessagesProj/src/emojis/apple/emoji/1_199.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_2.png b/TMessagesProj/src/emojis/apple/emoji/1_2.png index 63a869bfd7..df96d00b1b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_2.png and b/TMessagesProj/src/emojis/apple/emoji/1_2.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_20.png b/TMessagesProj/src/emojis/apple/emoji/1_20.png index f5b2e2d69f..4277668995 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_20.png and b/TMessagesProj/src/emojis/apple/emoji/1_20.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_200.png b/TMessagesProj/src/emojis/apple/emoji/1_200.png index e84f770248..4b1b975a04 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_200.png and b/TMessagesProj/src/emojis/apple/emoji/1_200.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_201.png b/TMessagesProj/src/emojis/apple/emoji/1_201.png index 9e4697c8f3..5bf8b5215f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_201.png and b/TMessagesProj/src/emojis/apple/emoji/1_201.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_202.png b/TMessagesProj/src/emojis/apple/emoji/1_202.png index a9773c236b..ffa4d7fca2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_202.png and b/TMessagesProj/src/emojis/apple/emoji/1_202.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_203.png b/TMessagesProj/src/emojis/apple/emoji/1_203.png index 5f5bcc96d8..1fbd396987 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_203.png and b/TMessagesProj/src/emojis/apple/emoji/1_203.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_204.png b/TMessagesProj/src/emojis/apple/emoji/1_204.png new file mode 100644 index 0000000000..9ed5b6c488 Binary files /dev/null and b/TMessagesProj/src/emojis/apple/emoji/1_204.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_205.png b/TMessagesProj/src/emojis/apple/emoji/1_205.png new file mode 100644 index 0000000000..2505f22b15 Binary files /dev/null and b/TMessagesProj/src/emojis/apple/emoji/1_205.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_206.png b/TMessagesProj/src/emojis/apple/emoji/1_206.png new file mode 100644 index 0000000000..32f70e3481 Binary files /dev/null and b/TMessagesProj/src/emojis/apple/emoji/1_206.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_207.png b/TMessagesProj/src/emojis/apple/emoji/1_207.png new file mode 100644 index 0000000000..17f0b9f8d8 Binary files /dev/null and b/TMessagesProj/src/emojis/apple/emoji/1_207.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_208.png b/TMessagesProj/src/emojis/apple/emoji/1_208.png new file mode 100644 index 0000000000..d606b13980 Binary files /dev/null and b/TMessagesProj/src/emojis/apple/emoji/1_208.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_209.png b/TMessagesProj/src/emojis/apple/emoji/1_209.png new file mode 100644 index 0000000000..43cac5ed27 Binary files /dev/null and b/TMessagesProj/src/emojis/apple/emoji/1_209.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_21.png b/TMessagesProj/src/emojis/apple/emoji/1_21.png index cf3796830f..9656c4378b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_21.png and b/TMessagesProj/src/emojis/apple/emoji/1_21.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_210.png b/TMessagesProj/src/emojis/apple/emoji/1_210.png new file mode 100644 index 0000000000..d6b49f3713 Binary files /dev/null and b/TMessagesProj/src/emojis/apple/emoji/1_210.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_22.png b/TMessagesProj/src/emojis/apple/emoji/1_22.png index 8a3ace3cd2..682a8bdaec 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_22.png and b/TMessagesProj/src/emojis/apple/emoji/1_22.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_23.png b/TMessagesProj/src/emojis/apple/emoji/1_23.png index 56dd98a4fb..fae635d334 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_23.png and b/TMessagesProj/src/emojis/apple/emoji/1_23.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_24.png b/TMessagesProj/src/emojis/apple/emoji/1_24.png index 5dc1e48a9e..2004c81761 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_24.png and b/TMessagesProj/src/emojis/apple/emoji/1_24.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_25.png b/TMessagesProj/src/emojis/apple/emoji/1_25.png index 9a9009adda..1c4338a61c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_25.png and b/TMessagesProj/src/emojis/apple/emoji/1_25.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_26.png b/TMessagesProj/src/emojis/apple/emoji/1_26.png index 700c9035fa..02e8f3fb86 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_26.png and b/TMessagesProj/src/emojis/apple/emoji/1_26.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_27.png b/TMessagesProj/src/emojis/apple/emoji/1_27.png index 26c83fdf2f..0f345afe6b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_27.png and b/TMessagesProj/src/emojis/apple/emoji/1_27.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_28.png b/TMessagesProj/src/emojis/apple/emoji/1_28.png index e43e435d4c..9a47f3eeee 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_28.png and b/TMessagesProj/src/emojis/apple/emoji/1_28.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_29.png b/TMessagesProj/src/emojis/apple/emoji/1_29.png index 08c807db0a..81a920c63a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_29.png and b/TMessagesProj/src/emojis/apple/emoji/1_29.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_3.png b/TMessagesProj/src/emojis/apple/emoji/1_3.png index cc88664c2d..9f0c6efc03 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_3.png and b/TMessagesProj/src/emojis/apple/emoji/1_3.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_30.png b/TMessagesProj/src/emojis/apple/emoji/1_30.png index d2e4fef7e5..43a3a6f8cf 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_30.png and b/TMessagesProj/src/emojis/apple/emoji/1_30.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_31.png b/TMessagesProj/src/emojis/apple/emoji/1_31.png index 2bf9177da5..b4593912b6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_31.png and b/TMessagesProj/src/emojis/apple/emoji/1_31.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_32.png b/TMessagesProj/src/emojis/apple/emoji/1_32.png index ea109f3f5f..be726cc9f8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_32.png and b/TMessagesProj/src/emojis/apple/emoji/1_32.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_33.png b/TMessagesProj/src/emojis/apple/emoji/1_33.png index 9913288d33..70c2b8963f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_33.png and b/TMessagesProj/src/emojis/apple/emoji/1_33.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_34.png b/TMessagesProj/src/emojis/apple/emoji/1_34.png index eecb16af21..2b28d7fb2a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_34.png and b/TMessagesProj/src/emojis/apple/emoji/1_34.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_35.png b/TMessagesProj/src/emojis/apple/emoji/1_35.png index 04430801d9..bd75c5e6d7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_35.png and b/TMessagesProj/src/emojis/apple/emoji/1_35.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_36.png b/TMessagesProj/src/emojis/apple/emoji/1_36.png index af8727fc52..76a322a426 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_36.png and b/TMessagesProj/src/emojis/apple/emoji/1_36.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_37.png b/TMessagesProj/src/emojis/apple/emoji/1_37.png index 1ab35ff9eb..69198bc9f1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_37.png and b/TMessagesProj/src/emojis/apple/emoji/1_37.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_38.png b/TMessagesProj/src/emojis/apple/emoji/1_38.png index 7b7d4bad79..7d27869e15 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_38.png and b/TMessagesProj/src/emojis/apple/emoji/1_38.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_39.png b/TMessagesProj/src/emojis/apple/emoji/1_39.png index b551014f6d..33f40f9723 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_39.png and b/TMessagesProj/src/emojis/apple/emoji/1_39.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_4.png b/TMessagesProj/src/emojis/apple/emoji/1_4.png index 078d555ded..277a4aa8af 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_4.png and b/TMessagesProj/src/emojis/apple/emoji/1_4.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_40.png b/TMessagesProj/src/emojis/apple/emoji/1_40.png index 1b3b46fb11..2a9520ee16 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_40.png and b/TMessagesProj/src/emojis/apple/emoji/1_40.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_41.png b/TMessagesProj/src/emojis/apple/emoji/1_41.png index 2fec2a7d02..bbec0c2ecc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_41.png and b/TMessagesProj/src/emojis/apple/emoji/1_41.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_42.png b/TMessagesProj/src/emojis/apple/emoji/1_42.png index e7d5289376..c89b5106d1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_42.png and b/TMessagesProj/src/emojis/apple/emoji/1_42.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_43.png b/TMessagesProj/src/emojis/apple/emoji/1_43.png index 0379562a3d..7474baf3b3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_43.png and b/TMessagesProj/src/emojis/apple/emoji/1_43.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_44.png b/TMessagesProj/src/emojis/apple/emoji/1_44.png index 993ea119fb..be048d03e9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_44.png and b/TMessagesProj/src/emojis/apple/emoji/1_44.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_45.png b/TMessagesProj/src/emojis/apple/emoji/1_45.png index 137ef9daf0..61c2f12534 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_45.png and b/TMessagesProj/src/emojis/apple/emoji/1_45.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_46.png b/TMessagesProj/src/emojis/apple/emoji/1_46.png index 9f2d9cf6ea..1d94a74228 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_46.png and b/TMessagesProj/src/emojis/apple/emoji/1_46.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_47.png b/TMessagesProj/src/emojis/apple/emoji/1_47.png index 9c562230b9..8f93a56238 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_47.png and b/TMessagesProj/src/emojis/apple/emoji/1_47.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_48.png b/TMessagesProj/src/emojis/apple/emoji/1_48.png index 59bf689582..8230419e41 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_48.png and b/TMessagesProj/src/emojis/apple/emoji/1_48.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_49.png b/TMessagesProj/src/emojis/apple/emoji/1_49.png index a1d08f7fcb..0adb766ac8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_49.png and b/TMessagesProj/src/emojis/apple/emoji/1_49.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_5.png b/TMessagesProj/src/emojis/apple/emoji/1_5.png index d4a1d06aea..14807c1b75 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_5.png and b/TMessagesProj/src/emojis/apple/emoji/1_5.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_50.png b/TMessagesProj/src/emojis/apple/emoji/1_50.png index 3582f4a2c5..f216d43a16 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_50.png and b/TMessagesProj/src/emojis/apple/emoji/1_50.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_51.png b/TMessagesProj/src/emojis/apple/emoji/1_51.png index 05d5ccb8bf..4c33a33996 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_51.png and b/TMessagesProj/src/emojis/apple/emoji/1_51.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_52.png b/TMessagesProj/src/emojis/apple/emoji/1_52.png index 64cb478983..d78996e1ae 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_52.png and b/TMessagesProj/src/emojis/apple/emoji/1_52.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_53.png b/TMessagesProj/src/emojis/apple/emoji/1_53.png index f47f2f957d..180b048260 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_53.png and b/TMessagesProj/src/emojis/apple/emoji/1_53.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_54.png b/TMessagesProj/src/emojis/apple/emoji/1_54.png index a284ebd886..e9d68aaa19 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_54.png and b/TMessagesProj/src/emojis/apple/emoji/1_54.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_55.png b/TMessagesProj/src/emojis/apple/emoji/1_55.png index b74ae13d14..d14938b2d3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_55.png and b/TMessagesProj/src/emojis/apple/emoji/1_55.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_56.png b/TMessagesProj/src/emojis/apple/emoji/1_56.png index 174c3d26f2..e8f8d8e3fc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_56.png and b/TMessagesProj/src/emojis/apple/emoji/1_56.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_57.png b/TMessagesProj/src/emojis/apple/emoji/1_57.png index 96327b4293..0fa14c16a8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_57.png and b/TMessagesProj/src/emojis/apple/emoji/1_57.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_58.png b/TMessagesProj/src/emojis/apple/emoji/1_58.png index 801b7912d0..175d436060 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_58.png and b/TMessagesProj/src/emojis/apple/emoji/1_58.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_59.png b/TMessagesProj/src/emojis/apple/emoji/1_59.png index 71bb4e937e..348d8600a8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_59.png and b/TMessagesProj/src/emojis/apple/emoji/1_59.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_6.png b/TMessagesProj/src/emojis/apple/emoji/1_6.png index 44c80fee33..100bc3cfc6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_6.png and b/TMessagesProj/src/emojis/apple/emoji/1_6.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_60.png b/TMessagesProj/src/emojis/apple/emoji/1_60.png index 68cbc2ef89..cf87e514b1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_60.png and b/TMessagesProj/src/emojis/apple/emoji/1_60.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_61.png b/TMessagesProj/src/emojis/apple/emoji/1_61.png index 5543f49fb4..157fca59ed 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_61.png and b/TMessagesProj/src/emojis/apple/emoji/1_61.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_62.png b/TMessagesProj/src/emojis/apple/emoji/1_62.png index 0a7f5c4936..671213974f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_62.png and b/TMessagesProj/src/emojis/apple/emoji/1_62.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_63.png b/TMessagesProj/src/emojis/apple/emoji/1_63.png index 38b92093d7..04b76cd1ac 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_63.png and b/TMessagesProj/src/emojis/apple/emoji/1_63.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_64.png b/TMessagesProj/src/emojis/apple/emoji/1_64.png index 9cdcb9e3d2..4ec11ac0de 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_64.png and b/TMessagesProj/src/emojis/apple/emoji/1_64.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_65.png b/TMessagesProj/src/emojis/apple/emoji/1_65.png index 43457b71cb..34faa681c7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_65.png and b/TMessagesProj/src/emojis/apple/emoji/1_65.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_66.png b/TMessagesProj/src/emojis/apple/emoji/1_66.png index a14899c8df..d1ea0a1b0e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_66.png and b/TMessagesProj/src/emojis/apple/emoji/1_66.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_67.png b/TMessagesProj/src/emojis/apple/emoji/1_67.png index 12e5ad2b35..7293073e44 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_67.png and b/TMessagesProj/src/emojis/apple/emoji/1_67.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_68.png b/TMessagesProj/src/emojis/apple/emoji/1_68.png index 5f58424293..89c70819b8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_68.png and b/TMessagesProj/src/emojis/apple/emoji/1_68.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_69.png b/TMessagesProj/src/emojis/apple/emoji/1_69.png index ab06116b64..db0ba331f9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_69.png and b/TMessagesProj/src/emojis/apple/emoji/1_69.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_7.png b/TMessagesProj/src/emojis/apple/emoji/1_7.png index 61c5ad89b1..62e160e6e3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_7.png and b/TMessagesProj/src/emojis/apple/emoji/1_7.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_70.png b/TMessagesProj/src/emojis/apple/emoji/1_70.png index 075f4a5dab..2551e5955f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_70.png and b/TMessagesProj/src/emojis/apple/emoji/1_70.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_71.png b/TMessagesProj/src/emojis/apple/emoji/1_71.png index cd62318912..a565a1ab3c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_71.png and b/TMessagesProj/src/emojis/apple/emoji/1_71.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_72.png b/TMessagesProj/src/emojis/apple/emoji/1_72.png index acd7397da8..159553da5d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_72.png and b/TMessagesProj/src/emojis/apple/emoji/1_72.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_73.png b/TMessagesProj/src/emojis/apple/emoji/1_73.png index 746d24e2ff..ecd256ff65 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_73.png and b/TMessagesProj/src/emojis/apple/emoji/1_73.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_74.png b/TMessagesProj/src/emojis/apple/emoji/1_74.png index b3dc87a8a1..22c6d2b6c6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_74.png and b/TMessagesProj/src/emojis/apple/emoji/1_74.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_75.png b/TMessagesProj/src/emojis/apple/emoji/1_75.png index c4a5bbb79c..bb1b3392f7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_75.png and b/TMessagesProj/src/emojis/apple/emoji/1_75.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_76.png b/TMessagesProj/src/emojis/apple/emoji/1_76.png index 572e06b214..029a9c84eb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_76.png and b/TMessagesProj/src/emojis/apple/emoji/1_76.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_77.png b/TMessagesProj/src/emojis/apple/emoji/1_77.png index ab8be94a17..ed65385a08 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_77.png and b/TMessagesProj/src/emojis/apple/emoji/1_77.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_78.png b/TMessagesProj/src/emojis/apple/emoji/1_78.png index 1e027bba1b..8d8fedd6f3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_78.png and b/TMessagesProj/src/emojis/apple/emoji/1_78.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_79.png b/TMessagesProj/src/emojis/apple/emoji/1_79.png index d1f64840c6..847dbbedc8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_79.png and b/TMessagesProj/src/emojis/apple/emoji/1_79.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_8.png b/TMessagesProj/src/emojis/apple/emoji/1_8.png index 697d78db94..a7d8783e8d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_8.png and b/TMessagesProj/src/emojis/apple/emoji/1_8.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_80.png b/TMessagesProj/src/emojis/apple/emoji/1_80.png index 76020441a5..099e53f888 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_80.png and b/TMessagesProj/src/emojis/apple/emoji/1_80.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_81.png b/TMessagesProj/src/emojis/apple/emoji/1_81.png index 89ba82ffc3..0fd34aa311 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_81.png and b/TMessagesProj/src/emojis/apple/emoji/1_81.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_82.png b/TMessagesProj/src/emojis/apple/emoji/1_82.png index 8be5a26710..bae2d80c3b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_82.png and b/TMessagesProj/src/emojis/apple/emoji/1_82.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_83.png b/TMessagesProj/src/emojis/apple/emoji/1_83.png index f8353f4148..a04fa225e0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_83.png and b/TMessagesProj/src/emojis/apple/emoji/1_83.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_84.png b/TMessagesProj/src/emojis/apple/emoji/1_84.png index 47721541f7..60ae0d748e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_84.png and b/TMessagesProj/src/emojis/apple/emoji/1_84.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_85.png b/TMessagesProj/src/emojis/apple/emoji/1_85.png index a1eefefd79..ae2f125151 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_85.png and b/TMessagesProj/src/emojis/apple/emoji/1_85.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_86.png b/TMessagesProj/src/emojis/apple/emoji/1_86.png index 4a91192cd3..a2f3a35c26 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_86.png and b/TMessagesProj/src/emojis/apple/emoji/1_86.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_87.png b/TMessagesProj/src/emojis/apple/emoji/1_87.png index 37468f4132..e5bb3e361d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_87.png and b/TMessagesProj/src/emojis/apple/emoji/1_87.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_88.png b/TMessagesProj/src/emojis/apple/emoji/1_88.png index a34df7e74b..43d320c73b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_88.png and b/TMessagesProj/src/emojis/apple/emoji/1_88.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_89.png b/TMessagesProj/src/emojis/apple/emoji/1_89.png index 9648f72076..9d43535d0a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_89.png and b/TMessagesProj/src/emojis/apple/emoji/1_89.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_9.png b/TMessagesProj/src/emojis/apple/emoji/1_9.png index dd33ff49eb..7233c3a412 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_9.png and b/TMessagesProj/src/emojis/apple/emoji/1_9.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_90.png b/TMessagesProj/src/emojis/apple/emoji/1_90.png index c94bd88203..08ed23405a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_90.png and b/TMessagesProj/src/emojis/apple/emoji/1_90.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_91.png b/TMessagesProj/src/emojis/apple/emoji/1_91.png index bc96c998e7..e010b305cd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_91.png and b/TMessagesProj/src/emojis/apple/emoji/1_91.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_92.png b/TMessagesProj/src/emojis/apple/emoji/1_92.png index 44868d209a..5ef68d15a7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_92.png and b/TMessagesProj/src/emojis/apple/emoji/1_92.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_93.png b/TMessagesProj/src/emojis/apple/emoji/1_93.png index 08062901ed..51af9b1188 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_93.png and b/TMessagesProj/src/emojis/apple/emoji/1_93.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_94.png b/TMessagesProj/src/emojis/apple/emoji/1_94.png index 631bfe003b..9beca00170 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_94.png and b/TMessagesProj/src/emojis/apple/emoji/1_94.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_95.png b/TMessagesProj/src/emojis/apple/emoji/1_95.png index ca3d1ae8e3..e45a93e2e6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_95.png and b/TMessagesProj/src/emojis/apple/emoji/1_95.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_96.png b/TMessagesProj/src/emojis/apple/emoji/1_96.png index 2dd9592ff3..936a273818 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_96.png and b/TMessagesProj/src/emojis/apple/emoji/1_96.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_97.png b/TMessagesProj/src/emojis/apple/emoji/1_97.png index f8978bafab..aa32c6c2d8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_97.png and b/TMessagesProj/src/emojis/apple/emoji/1_97.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_98.png b/TMessagesProj/src/emojis/apple/emoji/1_98.png index 50a06ae451..428522566c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_98.png and b/TMessagesProj/src/emojis/apple/emoji/1_98.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/1_99.png b/TMessagesProj/src/emojis/apple/emoji/1_99.png index 6ec1b68b12..c5a7fdfbe7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/1_99.png and b/TMessagesProj/src/emojis/apple/emoji/1_99.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_0.png b/TMessagesProj/src/emojis/apple/emoji/2_0.png index b9ba5bcbf1..baa1cda873 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_0.png and b/TMessagesProj/src/emojis/apple/emoji/2_0.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_1.png b/TMessagesProj/src/emojis/apple/emoji/2_1.png index c2ccb9d25d..599d037215 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_1.png and b/TMessagesProj/src/emojis/apple/emoji/2_1.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_10.png b/TMessagesProj/src/emojis/apple/emoji/2_10.png index b5af07bd63..f6dd84d05d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_10.png and b/TMessagesProj/src/emojis/apple/emoji/2_10.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_100.png b/TMessagesProj/src/emojis/apple/emoji/2_100.png index 4f0523e1fd..9865cb4728 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_100.png and b/TMessagesProj/src/emojis/apple/emoji/2_100.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_101.png b/TMessagesProj/src/emojis/apple/emoji/2_101.png index 9bcfb0508f..951920aa66 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_101.png and b/TMessagesProj/src/emojis/apple/emoji/2_101.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_102.png b/TMessagesProj/src/emojis/apple/emoji/2_102.png index fecadcb6c1..a5bd1b5f30 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_102.png and b/TMessagesProj/src/emojis/apple/emoji/2_102.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_103.png b/TMessagesProj/src/emojis/apple/emoji/2_103.png index fa60d0f8eb..865024e193 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_103.png and b/TMessagesProj/src/emojis/apple/emoji/2_103.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_104.png b/TMessagesProj/src/emojis/apple/emoji/2_104.png index bbaec16ee9..ca6de53b95 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_104.png and b/TMessagesProj/src/emojis/apple/emoji/2_104.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_105.png b/TMessagesProj/src/emojis/apple/emoji/2_105.png index 844ea2544e..2b78912022 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_105.png and b/TMessagesProj/src/emojis/apple/emoji/2_105.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_106.png b/TMessagesProj/src/emojis/apple/emoji/2_106.png index dc79d0eb9b..3d05bb15fd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_106.png and b/TMessagesProj/src/emojis/apple/emoji/2_106.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_107.png b/TMessagesProj/src/emojis/apple/emoji/2_107.png index 9ec6cfb845..80fee3013b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_107.png and b/TMessagesProj/src/emojis/apple/emoji/2_107.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_108.png b/TMessagesProj/src/emojis/apple/emoji/2_108.png index eaf6b38997..22f56a9558 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_108.png and b/TMessagesProj/src/emojis/apple/emoji/2_108.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_109.png b/TMessagesProj/src/emojis/apple/emoji/2_109.png index f6e8461e2d..edb749d912 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_109.png and b/TMessagesProj/src/emojis/apple/emoji/2_109.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_11.png b/TMessagesProj/src/emojis/apple/emoji/2_11.png index 3ad83df771..acaa175833 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_11.png and b/TMessagesProj/src/emojis/apple/emoji/2_11.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_110.png b/TMessagesProj/src/emojis/apple/emoji/2_110.png index d7b8a37a7a..8d7c90bf7e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_110.png and b/TMessagesProj/src/emojis/apple/emoji/2_110.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_111.png b/TMessagesProj/src/emojis/apple/emoji/2_111.png index d489478303..9730a9a062 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_111.png and b/TMessagesProj/src/emojis/apple/emoji/2_111.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_112.png b/TMessagesProj/src/emojis/apple/emoji/2_112.png index 5785deb3e2..960d0a119c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_112.png and b/TMessagesProj/src/emojis/apple/emoji/2_112.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_113.png b/TMessagesProj/src/emojis/apple/emoji/2_113.png index 8ae3038ead..69a1ddd1f3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_113.png and b/TMessagesProj/src/emojis/apple/emoji/2_113.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_114.png b/TMessagesProj/src/emojis/apple/emoji/2_114.png index bc2fe6f475..dd28f1d69b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_114.png and b/TMessagesProj/src/emojis/apple/emoji/2_114.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_115.png b/TMessagesProj/src/emojis/apple/emoji/2_115.png index f145e1e965..0a1858e452 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_115.png and b/TMessagesProj/src/emojis/apple/emoji/2_115.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_116.png b/TMessagesProj/src/emojis/apple/emoji/2_116.png index 937fa2ea44..3a919526a5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_116.png and b/TMessagesProj/src/emojis/apple/emoji/2_116.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_117.png b/TMessagesProj/src/emojis/apple/emoji/2_117.png index 3a5e14647c..8b573a8eed 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_117.png and b/TMessagesProj/src/emojis/apple/emoji/2_117.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_118.png b/TMessagesProj/src/emojis/apple/emoji/2_118.png index f33f69cc29..a7774aa48a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_118.png and b/TMessagesProj/src/emojis/apple/emoji/2_118.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_119.png b/TMessagesProj/src/emojis/apple/emoji/2_119.png index 7cee3ca270..8b3ac4874f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_119.png and b/TMessagesProj/src/emojis/apple/emoji/2_119.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_12.png b/TMessagesProj/src/emojis/apple/emoji/2_12.png index 014fbeb94e..b36887cfae 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_12.png and b/TMessagesProj/src/emojis/apple/emoji/2_12.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_120.png b/TMessagesProj/src/emojis/apple/emoji/2_120.png index 76808c4fdf..c80efe27da 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_120.png and b/TMessagesProj/src/emojis/apple/emoji/2_120.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_121.png b/TMessagesProj/src/emojis/apple/emoji/2_121.png index 7f067d5ff5..3d2652080a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_121.png and b/TMessagesProj/src/emojis/apple/emoji/2_121.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_122.png b/TMessagesProj/src/emojis/apple/emoji/2_122.png index cf63ff3a7e..d6a25fc591 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_122.png and b/TMessagesProj/src/emojis/apple/emoji/2_122.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_123.png b/TMessagesProj/src/emojis/apple/emoji/2_123.png index fd5654fea2..ad9597d13f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_123.png and b/TMessagesProj/src/emojis/apple/emoji/2_123.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_124.png b/TMessagesProj/src/emojis/apple/emoji/2_124.png index de41681909..c01ec35b0f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_124.png and b/TMessagesProj/src/emojis/apple/emoji/2_124.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_125.png b/TMessagesProj/src/emojis/apple/emoji/2_125.png index f98ccf5671..aa27d816b1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_125.png and b/TMessagesProj/src/emojis/apple/emoji/2_125.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_126.png b/TMessagesProj/src/emojis/apple/emoji/2_126.png new file mode 100644 index 0000000000..6b433ae87d Binary files /dev/null and b/TMessagesProj/src/emojis/apple/emoji/2_126.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_127.png b/TMessagesProj/src/emojis/apple/emoji/2_127.png new file mode 100644 index 0000000000..9663f6df85 Binary files /dev/null and b/TMessagesProj/src/emojis/apple/emoji/2_127.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_13.png b/TMessagesProj/src/emojis/apple/emoji/2_13.png index 2dca519bea..f9c9f0674e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_13.png and b/TMessagesProj/src/emojis/apple/emoji/2_13.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_14.png b/TMessagesProj/src/emojis/apple/emoji/2_14.png index f836b17b86..7e195a91c5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_14.png and b/TMessagesProj/src/emojis/apple/emoji/2_14.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_15.png b/TMessagesProj/src/emojis/apple/emoji/2_15.png index 6374c1e9c0..bbaa6f4bf4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_15.png and b/TMessagesProj/src/emojis/apple/emoji/2_15.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_16.png b/TMessagesProj/src/emojis/apple/emoji/2_16.png index 3796401dd0..b8c1c2c056 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_16.png and b/TMessagesProj/src/emojis/apple/emoji/2_16.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_17.png b/TMessagesProj/src/emojis/apple/emoji/2_17.png index 4a2abd17f9..f9bb7dd1b7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_17.png and b/TMessagesProj/src/emojis/apple/emoji/2_17.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_18.png b/TMessagesProj/src/emojis/apple/emoji/2_18.png index 43aee73a6d..3e18ce9208 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_18.png and b/TMessagesProj/src/emojis/apple/emoji/2_18.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_19.png b/TMessagesProj/src/emojis/apple/emoji/2_19.png index 57432a5791..fbffa2459e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_19.png and b/TMessagesProj/src/emojis/apple/emoji/2_19.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_2.png b/TMessagesProj/src/emojis/apple/emoji/2_2.png index dbc0940731..3de6212698 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_2.png and b/TMessagesProj/src/emojis/apple/emoji/2_2.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_20.png b/TMessagesProj/src/emojis/apple/emoji/2_20.png index 474b6856b1..db7258ff1a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_20.png and b/TMessagesProj/src/emojis/apple/emoji/2_20.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_21.png b/TMessagesProj/src/emojis/apple/emoji/2_21.png index 0968d62a1e..fb6e876241 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_21.png and b/TMessagesProj/src/emojis/apple/emoji/2_21.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_22.png b/TMessagesProj/src/emojis/apple/emoji/2_22.png index 7ba176b101..fdf4695a3f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_22.png and b/TMessagesProj/src/emojis/apple/emoji/2_22.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_23.png b/TMessagesProj/src/emojis/apple/emoji/2_23.png index 920bb22503..ddc1997c18 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_23.png and b/TMessagesProj/src/emojis/apple/emoji/2_23.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_24.png b/TMessagesProj/src/emojis/apple/emoji/2_24.png index 8026015418..d04facac65 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_24.png and b/TMessagesProj/src/emojis/apple/emoji/2_24.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_25.png b/TMessagesProj/src/emojis/apple/emoji/2_25.png index cce9fcb8bf..dc0fbbf4c0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_25.png and b/TMessagesProj/src/emojis/apple/emoji/2_25.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_26.png b/TMessagesProj/src/emojis/apple/emoji/2_26.png index 1bcfc69ffc..161cf8c46f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_26.png and b/TMessagesProj/src/emojis/apple/emoji/2_26.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_27.png b/TMessagesProj/src/emojis/apple/emoji/2_27.png index 70020046a0..42a1c00acf 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_27.png and b/TMessagesProj/src/emojis/apple/emoji/2_27.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_28.png b/TMessagesProj/src/emojis/apple/emoji/2_28.png index 21c9c5dabb..d3d1086bab 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_28.png and b/TMessagesProj/src/emojis/apple/emoji/2_28.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_29.png b/TMessagesProj/src/emojis/apple/emoji/2_29.png index e0d176a63d..c67c56adeb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_29.png and b/TMessagesProj/src/emojis/apple/emoji/2_29.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_3.png b/TMessagesProj/src/emojis/apple/emoji/2_3.png index 2be4476dee..e8562ad523 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_3.png and b/TMessagesProj/src/emojis/apple/emoji/2_3.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_30.png b/TMessagesProj/src/emojis/apple/emoji/2_30.png index 1e8269fccf..9af0c69225 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_30.png and b/TMessagesProj/src/emojis/apple/emoji/2_30.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_31.png b/TMessagesProj/src/emojis/apple/emoji/2_31.png index 686ae82f00..445b5af775 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_31.png and b/TMessagesProj/src/emojis/apple/emoji/2_31.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_32.png b/TMessagesProj/src/emojis/apple/emoji/2_32.png index b4127b2c6f..f5e5007d95 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_32.png and b/TMessagesProj/src/emojis/apple/emoji/2_32.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_33.png b/TMessagesProj/src/emojis/apple/emoji/2_33.png index 0e8b891bc9..1f4a01e8a2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_33.png and b/TMessagesProj/src/emojis/apple/emoji/2_33.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_34.png b/TMessagesProj/src/emojis/apple/emoji/2_34.png index 992e08d874..226fac8245 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_34.png and b/TMessagesProj/src/emojis/apple/emoji/2_34.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_35.png b/TMessagesProj/src/emojis/apple/emoji/2_35.png index fb8268e058..b14cdde07c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_35.png and b/TMessagesProj/src/emojis/apple/emoji/2_35.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_36.png b/TMessagesProj/src/emojis/apple/emoji/2_36.png index 381f9842bf..2ceda024e7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_36.png and b/TMessagesProj/src/emojis/apple/emoji/2_36.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_37.png b/TMessagesProj/src/emojis/apple/emoji/2_37.png index a13a5a2265..e304a104e5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_37.png and b/TMessagesProj/src/emojis/apple/emoji/2_37.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_38.png b/TMessagesProj/src/emojis/apple/emoji/2_38.png index 116169e43e..b3c69771b5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_38.png and b/TMessagesProj/src/emojis/apple/emoji/2_38.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_39.png b/TMessagesProj/src/emojis/apple/emoji/2_39.png index 9873912c33..e793304331 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_39.png and b/TMessagesProj/src/emojis/apple/emoji/2_39.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_4.png b/TMessagesProj/src/emojis/apple/emoji/2_4.png index 17e120f68b..00ce5d2d91 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_4.png and b/TMessagesProj/src/emojis/apple/emoji/2_4.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_40.png b/TMessagesProj/src/emojis/apple/emoji/2_40.png index af7d643a73..9552b9c39b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_40.png and b/TMessagesProj/src/emojis/apple/emoji/2_40.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_41.png b/TMessagesProj/src/emojis/apple/emoji/2_41.png index 9ed595fa15..6006748664 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_41.png and b/TMessagesProj/src/emojis/apple/emoji/2_41.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_42.png b/TMessagesProj/src/emojis/apple/emoji/2_42.png index df0a276e74..f90d6a07e9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_42.png and b/TMessagesProj/src/emojis/apple/emoji/2_42.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_43.png b/TMessagesProj/src/emojis/apple/emoji/2_43.png index 1ceef0401a..aa7db40b63 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_43.png and b/TMessagesProj/src/emojis/apple/emoji/2_43.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_44.png b/TMessagesProj/src/emojis/apple/emoji/2_44.png index 0a75511922..809868df9d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_44.png and b/TMessagesProj/src/emojis/apple/emoji/2_44.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_45.png b/TMessagesProj/src/emojis/apple/emoji/2_45.png index a982d50886..563efb5c46 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_45.png and b/TMessagesProj/src/emojis/apple/emoji/2_45.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_46.png b/TMessagesProj/src/emojis/apple/emoji/2_46.png index 28e96f99ba..7055f03560 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_46.png and b/TMessagesProj/src/emojis/apple/emoji/2_46.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_47.png b/TMessagesProj/src/emojis/apple/emoji/2_47.png index 98d2a78fed..9ce104389c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_47.png and b/TMessagesProj/src/emojis/apple/emoji/2_47.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_48.png b/TMessagesProj/src/emojis/apple/emoji/2_48.png index 32778cee7a..cc1bcb21b0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_48.png and b/TMessagesProj/src/emojis/apple/emoji/2_48.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_49.png b/TMessagesProj/src/emojis/apple/emoji/2_49.png index 5468b5c2be..431edbb644 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_49.png and b/TMessagesProj/src/emojis/apple/emoji/2_49.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_5.png b/TMessagesProj/src/emojis/apple/emoji/2_5.png index b8c1dd5bbc..bac81aab06 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_5.png and b/TMessagesProj/src/emojis/apple/emoji/2_5.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_50.png b/TMessagesProj/src/emojis/apple/emoji/2_50.png index aee43db034..22b764f70a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_50.png and b/TMessagesProj/src/emojis/apple/emoji/2_50.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_51.png b/TMessagesProj/src/emojis/apple/emoji/2_51.png index 81cdac240e..adf8e168d8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_51.png and b/TMessagesProj/src/emojis/apple/emoji/2_51.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_52.png b/TMessagesProj/src/emojis/apple/emoji/2_52.png index 4b1917d093..17abb54c6b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_52.png and b/TMessagesProj/src/emojis/apple/emoji/2_52.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_53.png b/TMessagesProj/src/emojis/apple/emoji/2_53.png index 1e90718a9b..556f596716 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_53.png and b/TMessagesProj/src/emojis/apple/emoji/2_53.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_54.png b/TMessagesProj/src/emojis/apple/emoji/2_54.png index 80cc99a965..817b5ce3fa 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_54.png and b/TMessagesProj/src/emojis/apple/emoji/2_54.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_55.png b/TMessagesProj/src/emojis/apple/emoji/2_55.png index fa21c66a3a..386c4118fb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_55.png and b/TMessagesProj/src/emojis/apple/emoji/2_55.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_56.png b/TMessagesProj/src/emojis/apple/emoji/2_56.png index a444bdc8d3..3e552a7bb7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_56.png and b/TMessagesProj/src/emojis/apple/emoji/2_56.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_57.png b/TMessagesProj/src/emojis/apple/emoji/2_57.png index f89fc57ab2..9fc438d04f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_57.png and b/TMessagesProj/src/emojis/apple/emoji/2_57.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_58.png b/TMessagesProj/src/emojis/apple/emoji/2_58.png index e564b8e798..bcbb52083b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_58.png and b/TMessagesProj/src/emojis/apple/emoji/2_58.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_59.png b/TMessagesProj/src/emojis/apple/emoji/2_59.png index 8192e181f8..e88230544d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_59.png and b/TMessagesProj/src/emojis/apple/emoji/2_59.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_6.png b/TMessagesProj/src/emojis/apple/emoji/2_6.png index df99c087e1..45f1a6b3fe 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_6.png and b/TMessagesProj/src/emojis/apple/emoji/2_6.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_60.png b/TMessagesProj/src/emojis/apple/emoji/2_60.png index 4da12e0767..b5d076743e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_60.png and b/TMessagesProj/src/emojis/apple/emoji/2_60.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_61.png b/TMessagesProj/src/emojis/apple/emoji/2_61.png index e882d1a407..b347292f6e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_61.png and b/TMessagesProj/src/emojis/apple/emoji/2_61.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_62.png b/TMessagesProj/src/emojis/apple/emoji/2_62.png index ad7630295a..80b01ecd4d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_62.png and b/TMessagesProj/src/emojis/apple/emoji/2_62.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_63.png b/TMessagesProj/src/emojis/apple/emoji/2_63.png index b6b0d12df6..c57b611313 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_63.png and b/TMessagesProj/src/emojis/apple/emoji/2_63.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_64.png b/TMessagesProj/src/emojis/apple/emoji/2_64.png index 2749fde35c..cf04796c59 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_64.png and b/TMessagesProj/src/emojis/apple/emoji/2_64.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_65.png b/TMessagesProj/src/emojis/apple/emoji/2_65.png index dc1636b166..5491978a98 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_65.png and b/TMessagesProj/src/emojis/apple/emoji/2_65.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_66.png b/TMessagesProj/src/emojis/apple/emoji/2_66.png index 8118f8f675..514850b2b0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_66.png and b/TMessagesProj/src/emojis/apple/emoji/2_66.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_67.png b/TMessagesProj/src/emojis/apple/emoji/2_67.png index 361286de8a..7396a446f0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_67.png and b/TMessagesProj/src/emojis/apple/emoji/2_67.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_68.png b/TMessagesProj/src/emojis/apple/emoji/2_68.png index 0fe4666ce3..fc338a001b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_68.png and b/TMessagesProj/src/emojis/apple/emoji/2_68.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_69.png b/TMessagesProj/src/emojis/apple/emoji/2_69.png index 8d6aa407dd..8f22bca7ab 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_69.png and b/TMessagesProj/src/emojis/apple/emoji/2_69.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_7.png b/TMessagesProj/src/emojis/apple/emoji/2_7.png index 5232120b21..5396f8b399 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_7.png and b/TMessagesProj/src/emojis/apple/emoji/2_7.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_70.png b/TMessagesProj/src/emojis/apple/emoji/2_70.png index c3dd084015..01effbd181 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_70.png and b/TMessagesProj/src/emojis/apple/emoji/2_70.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_71.png b/TMessagesProj/src/emojis/apple/emoji/2_71.png index 8b95c92edf..e34e21dec2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_71.png and b/TMessagesProj/src/emojis/apple/emoji/2_71.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_72.png b/TMessagesProj/src/emojis/apple/emoji/2_72.png index bd571ac1d0..dc4d42d189 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_72.png and b/TMessagesProj/src/emojis/apple/emoji/2_72.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_73.png b/TMessagesProj/src/emojis/apple/emoji/2_73.png index 8946de3436..2b0c77579a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_73.png and b/TMessagesProj/src/emojis/apple/emoji/2_73.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_74.png b/TMessagesProj/src/emojis/apple/emoji/2_74.png index a39f06af02..1d4a089dc5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_74.png and b/TMessagesProj/src/emojis/apple/emoji/2_74.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_75.png b/TMessagesProj/src/emojis/apple/emoji/2_75.png index 6b94575887..84eca2e13e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_75.png and b/TMessagesProj/src/emojis/apple/emoji/2_75.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_76.png b/TMessagesProj/src/emojis/apple/emoji/2_76.png index 00750c6ec3..84263f8c3c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_76.png and b/TMessagesProj/src/emojis/apple/emoji/2_76.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_77.png b/TMessagesProj/src/emojis/apple/emoji/2_77.png index ee5a176142..221239c2a8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_77.png and b/TMessagesProj/src/emojis/apple/emoji/2_77.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_78.png b/TMessagesProj/src/emojis/apple/emoji/2_78.png index 70b9f08078..8514280d7a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_78.png and b/TMessagesProj/src/emojis/apple/emoji/2_78.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_79.png b/TMessagesProj/src/emojis/apple/emoji/2_79.png index 9f21ea0880..22aac9d1fd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_79.png and b/TMessagesProj/src/emojis/apple/emoji/2_79.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_8.png b/TMessagesProj/src/emojis/apple/emoji/2_8.png index 6ef0c5847c..fd94747333 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_8.png and b/TMessagesProj/src/emojis/apple/emoji/2_8.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_80.png b/TMessagesProj/src/emojis/apple/emoji/2_80.png index b0628ad4b0..7e96280bed 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_80.png and b/TMessagesProj/src/emojis/apple/emoji/2_80.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_81.png b/TMessagesProj/src/emojis/apple/emoji/2_81.png index 4d595d3a9b..be6420be70 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_81.png and b/TMessagesProj/src/emojis/apple/emoji/2_81.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_82.png b/TMessagesProj/src/emojis/apple/emoji/2_82.png index e8251eb9d0..dec450979a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_82.png and b/TMessagesProj/src/emojis/apple/emoji/2_82.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_83.png b/TMessagesProj/src/emojis/apple/emoji/2_83.png index 933968f57b..3bf2383b27 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_83.png and b/TMessagesProj/src/emojis/apple/emoji/2_83.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_84.png b/TMessagesProj/src/emojis/apple/emoji/2_84.png index ead396919d..39edd5174c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_84.png and b/TMessagesProj/src/emojis/apple/emoji/2_84.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_85.png b/TMessagesProj/src/emojis/apple/emoji/2_85.png index 268cbd4bd7..2ff14a5f18 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_85.png and b/TMessagesProj/src/emojis/apple/emoji/2_85.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_86.png b/TMessagesProj/src/emojis/apple/emoji/2_86.png index 33aae7409c..b68fe04ce2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_86.png and b/TMessagesProj/src/emojis/apple/emoji/2_86.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_87.png b/TMessagesProj/src/emojis/apple/emoji/2_87.png index 2a9cb4ecbd..cd1347db2f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_87.png and b/TMessagesProj/src/emojis/apple/emoji/2_87.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_88.png b/TMessagesProj/src/emojis/apple/emoji/2_88.png index cf7438df19..56e353d44b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_88.png and b/TMessagesProj/src/emojis/apple/emoji/2_88.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_89.png b/TMessagesProj/src/emojis/apple/emoji/2_89.png index f173be9d81..e3274d955c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_89.png and b/TMessagesProj/src/emojis/apple/emoji/2_89.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_9.png b/TMessagesProj/src/emojis/apple/emoji/2_9.png index 1df6b7547a..1d80f5f89b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_9.png and b/TMessagesProj/src/emojis/apple/emoji/2_9.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_90.png b/TMessagesProj/src/emojis/apple/emoji/2_90.png index 88f47a54bc..18bc828fb9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_90.png and b/TMessagesProj/src/emojis/apple/emoji/2_90.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_91.png b/TMessagesProj/src/emojis/apple/emoji/2_91.png index 81bd5429e2..fb8ff9a84f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_91.png and b/TMessagesProj/src/emojis/apple/emoji/2_91.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_92.png b/TMessagesProj/src/emojis/apple/emoji/2_92.png index 5942e5bd36..6c23b07531 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_92.png and b/TMessagesProj/src/emojis/apple/emoji/2_92.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_93.png b/TMessagesProj/src/emojis/apple/emoji/2_93.png index 9a8d5fd264..b17578829b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_93.png and b/TMessagesProj/src/emojis/apple/emoji/2_93.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_94.png b/TMessagesProj/src/emojis/apple/emoji/2_94.png index 1f8c947939..124bd51e2a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_94.png and b/TMessagesProj/src/emojis/apple/emoji/2_94.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_95.png b/TMessagesProj/src/emojis/apple/emoji/2_95.png index c00773b51b..1dcc2e3c7e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_95.png and b/TMessagesProj/src/emojis/apple/emoji/2_95.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_96.png b/TMessagesProj/src/emojis/apple/emoji/2_96.png index 9a00fa9bda..ab0a04dd1f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_96.png and b/TMessagesProj/src/emojis/apple/emoji/2_96.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_97.png b/TMessagesProj/src/emojis/apple/emoji/2_97.png index 5e2ae1cf52..6df595576d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_97.png and b/TMessagesProj/src/emojis/apple/emoji/2_97.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_98.png b/TMessagesProj/src/emojis/apple/emoji/2_98.png index c282f708ca..52b80df0b7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_98.png and b/TMessagesProj/src/emojis/apple/emoji/2_98.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/2_99.png b/TMessagesProj/src/emojis/apple/emoji/2_99.png index 637711592d..8aed90371f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/2_99.png and b/TMessagesProj/src/emojis/apple/emoji/2_99.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_0.png b/TMessagesProj/src/emojis/apple/emoji/3_0.png index d6ab7072ac..02baef3a58 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_0.png and b/TMessagesProj/src/emojis/apple/emoji/3_0.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_1.png b/TMessagesProj/src/emojis/apple/emoji/3_1.png index 6cd50640e4..74873afd3b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_1.png and b/TMessagesProj/src/emojis/apple/emoji/3_1.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_10.png b/TMessagesProj/src/emojis/apple/emoji/3_10.png index f4b9da0556..dafa5df487 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_10.png and b/TMessagesProj/src/emojis/apple/emoji/3_10.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_100.png b/TMessagesProj/src/emojis/apple/emoji/3_100.png index 1923c64437..1bd5c3ef6a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_100.png and b/TMessagesProj/src/emojis/apple/emoji/3_100.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_101.png b/TMessagesProj/src/emojis/apple/emoji/3_101.png index 36ef4fe21e..30b16041fd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_101.png and b/TMessagesProj/src/emojis/apple/emoji/3_101.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_102.png b/TMessagesProj/src/emojis/apple/emoji/3_102.png index c62aac8612..8db2c2cdd3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_102.png and b/TMessagesProj/src/emojis/apple/emoji/3_102.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_103.png b/TMessagesProj/src/emojis/apple/emoji/3_103.png index 2ab98c8e59..ff8959841f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_103.png and b/TMessagesProj/src/emojis/apple/emoji/3_103.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_104.png b/TMessagesProj/src/emojis/apple/emoji/3_104.png index 5ffe992b0e..98641a5668 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_104.png and b/TMessagesProj/src/emojis/apple/emoji/3_104.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_105.png b/TMessagesProj/src/emojis/apple/emoji/3_105.png index c81f68a32e..81118141a1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_105.png and b/TMessagesProj/src/emojis/apple/emoji/3_105.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_106.png b/TMessagesProj/src/emojis/apple/emoji/3_106.png index 50b09dffba..519f1fa726 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_106.png and b/TMessagesProj/src/emojis/apple/emoji/3_106.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_107.png b/TMessagesProj/src/emojis/apple/emoji/3_107.png index 241003d8c2..96c2a4854c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_107.png and b/TMessagesProj/src/emojis/apple/emoji/3_107.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_108.png b/TMessagesProj/src/emojis/apple/emoji/3_108.png index 965f066869..5e5730fc7b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_108.png and b/TMessagesProj/src/emojis/apple/emoji/3_108.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_109.png b/TMessagesProj/src/emojis/apple/emoji/3_109.png index 59090ffb0f..929d381b1f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_109.png and b/TMessagesProj/src/emojis/apple/emoji/3_109.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_11.png b/TMessagesProj/src/emojis/apple/emoji/3_11.png index d8c536c99a..3e04e28d22 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_11.png and b/TMessagesProj/src/emojis/apple/emoji/3_11.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_110.png b/TMessagesProj/src/emojis/apple/emoji/3_110.png index 1dd92c92f3..b6e5931de1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_110.png and b/TMessagesProj/src/emojis/apple/emoji/3_110.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_111.png b/TMessagesProj/src/emojis/apple/emoji/3_111.png index 46bb88d9ec..81fc8a91da 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_111.png and b/TMessagesProj/src/emojis/apple/emoji/3_111.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_112.png b/TMessagesProj/src/emojis/apple/emoji/3_112.png index 41d3e52f8f..acc5588d47 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_112.png and b/TMessagesProj/src/emojis/apple/emoji/3_112.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_113.png b/TMessagesProj/src/emojis/apple/emoji/3_113.png index be9991c0d2..af7c606e21 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_113.png and b/TMessagesProj/src/emojis/apple/emoji/3_113.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_114.png b/TMessagesProj/src/emojis/apple/emoji/3_114.png index 9e5b499411..0fabe70d0e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_114.png and b/TMessagesProj/src/emojis/apple/emoji/3_114.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_115.png b/TMessagesProj/src/emojis/apple/emoji/3_115.png index c8eace317c..680b4d6e6b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_115.png and b/TMessagesProj/src/emojis/apple/emoji/3_115.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_116.png b/TMessagesProj/src/emojis/apple/emoji/3_116.png index 574036d789..8f6ff1ebd2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_116.png and b/TMessagesProj/src/emojis/apple/emoji/3_116.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_117.png b/TMessagesProj/src/emojis/apple/emoji/3_117.png index ff346088c6..a6a74d2d21 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_117.png and b/TMessagesProj/src/emojis/apple/emoji/3_117.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_118.png b/TMessagesProj/src/emojis/apple/emoji/3_118.png index 9115ab8657..f0c07f9ec4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_118.png and b/TMessagesProj/src/emojis/apple/emoji/3_118.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_119.png b/TMessagesProj/src/emojis/apple/emoji/3_119.png index 7852e89682..41f2012dd5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_119.png and b/TMessagesProj/src/emojis/apple/emoji/3_119.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_12.png b/TMessagesProj/src/emojis/apple/emoji/3_12.png index bed705febc..9d2227931e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_12.png and b/TMessagesProj/src/emojis/apple/emoji/3_12.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_120.png b/TMessagesProj/src/emojis/apple/emoji/3_120.png index 379752e781..c479b43b95 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_120.png and b/TMessagesProj/src/emojis/apple/emoji/3_120.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_121.png b/TMessagesProj/src/emojis/apple/emoji/3_121.png index 00099d7ef1..543e2f1183 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_121.png and b/TMessagesProj/src/emojis/apple/emoji/3_121.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_122.png b/TMessagesProj/src/emojis/apple/emoji/3_122.png index 7dcec89195..ad483d2522 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_122.png and b/TMessagesProj/src/emojis/apple/emoji/3_122.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_123.png b/TMessagesProj/src/emojis/apple/emoji/3_123.png index 58a8c53a62..ab729c8857 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_123.png and b/TMessagesProj/src/emojis/apple/emoji/3_123.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_124.png b/TMessagesProj/src/emojis/apple/emoji/3_124.png index 9a75651206..5aef136427 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_124.png and b/TMessagesProj/src/emojis/apple/emoji/3_124.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_125.png b/TMessagesProj/src/emojis/apple/emoji/3_125.png index 7ba87e000b..9b33a8f6f0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_125.png and b/TMessagesProj/src/emojis/apple/emoji/3_125.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_126.png b/TMessagesProj/src/emojis/apple/emoji/3_126.png index b326692414..280e1026fc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_126.png and b/TMessagesProj/src/emojis/apple/emoji/3_126.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_127.png b/TMessagesProj/src/emojis/apple/emoji/3_127.png index 94d667e19e..938a09a5f6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_127.png and b/TMessagesProj/src/emojis/apple/emoji/3_127.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_128.png b/TMessagesProj/src/emojis/apple/emoji/3_128.png index f75bf545ab..56a1672fff 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_128.png and b/TMessagesProj/src/emojis/apple/emoji/3_128.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_129.png b/TMessagesProj/src/emojis/apple/emoji/3_129.png index 7d45390188..1ec22d07ef 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_129.png and b/TMessagesProj/src/emojis/apple/emoji/3_129.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_13.png b/TMessagesProj/src/emojis/apple/emoji/3_13.png index 3d29e488ab..d975576f3d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_13.png and b/TMessagesProj/src/emojis/apple/emoji/3_13.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_130.png b/TMessagesProj/src/emojis/apple/emoji/3_130.png index f0417eb179..c92f1604f9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_130.png and b/TMessagesProj/src/emojis/apple/emoji/3_130.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_131.png b/TMessagesProj/src/emojis/apple/emoji/3_131.png index 050922ec30..c77c66b416 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_131.png and b/TMessagesProj/src/emojis/apple/emoji/3_131.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_132.png b/TMessagesProj/src/emojis/apple/emoji/3_132.png index 24c5cedee6..61bc9d2659 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_132.png and b/TMessagesProj/src/emojis/apple/emoji/3_132.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_133.png b/TMessagesProj/src/emojis/apple/emoji/3_133.png index 1e7bed4c37..ce924cafd2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_133.png and b/TMessagesProj/src/emojis/apple/emoji/3_133.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_134.png b/TMessagesProj/src/emojis/apple/emoji/3_134.png index 8eef789e33..8e7069aa20 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_134.png and b/TMessagesProj/src/emojis/apple/emoji/3_134.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_135.png b/TMessagesProj/src/emojis/apple/emoji/3_135.png index e844483bd1..ddab02a885 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_135.png and b/TMessagesProj/src/emojis/apple/emoji/3_135.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_136.png b/TMessagesProj/src/emojis/apple/emoji/3_136.png index fd66323735..18cb62ba4b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_136.png and b/TMessagesProj/src/emojis/apple/emoji/3_136.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_137.png b/TMessagesProj/src/emojis/apple/emoji/3_137.png index bf44014df6..d3caef3807 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_137.png and b/TMessagesProj/src/emojis/apple/emoji/3_137.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_138.png b/TMessagesProj/src/emojis/apple/emoji/3_138.png index b68c903c83..582cd45eac 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_138.png and b/TMessagesProj/src/emojis/apple/emoji/3_138.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_139.png b/TMessagesProj/src/emojis/apple/emoji/3_139.png index 91570fd93a..faf287c15b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_139.png and b/TMessagesProj/src/emojis/apple/emoji/3_139.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_14.png b/TMessagesProj/src/emojis/apple/emoji/3_14.png index 5816ddacd1..fb97531f4b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_14.png and b/TMessagesProj/src/emojis/apple/emoji/3_14.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_140.png b/TMessagesProj/src/emojis/apple/emoji/3_140.png index 54cc2f35d4..53dc904079 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_140.png and b/TMessagesProj/src/emojis/apple/emoji/3_140.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_141.png b/TMessagesProj/src/emojis/apple/emoji/3_141.png index c5850eea1b..c2a37620a2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_141.png and b/TMessagesProj/src/emojis/apple/emoji/3_141.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_142.png b/TMessagesProj/src/emojis/apple/emoji/3_142.png index fe61695928..44f3f69650 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_142.png and b/TMessagesProj/src/emojis/apple/emoji/3_142.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_143.png b/TMessagesProj/src/emojis/apple/emoji/3_143.png index d1a6fe525b..ffcabb815e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_143.png and b/TMessagesProj/src/emojis/apple/emoji/3_143.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_144.png b/TMessagesProj/src/emojis/apple/emoji/3_144.png index 5f7b599632..4168d3c560 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_144.png and b/TMessagesProj/src/emojis/apple/emoji/3_144.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_145.png b/TMessagesProj/src/emojis/apple/emoji/3_145.png index 1f27d108ce..914fa5f43c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_145.png and b/TMessagesProj/src/emojis/apple/emoji/3_145.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_146.png b/TMessagesProj/src/emojis/apple/emoji/3_146.png index 463d2cceb6..9a558a1a59 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_146.png and b/TMessagesProj/src/emojis/apple/emoji/3_146.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_147.png b/TMessagesProj/src/emojis/apple/emoji/3_147.png index 2097ecc152..bf54d0903c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_147.png and b/TMessagesProj/src/emojis/apple/emoji/3_147.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_148.png b/TMessagesProj/src/emojis/apple/emoji/3_148.png index 25f2b7b0dd..b2d4e5d632 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_148.png and b/TMessagesProj/src/emojis/apple/emoji/3_148.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_149.png b/TMessagesProj/src/emojis/apple/emoji/3_149.png index f4a635ff31..2e73e70b23 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_149.png and b/TMessagesProj/src/emojis/apple/emoji/3_149.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_15.png b/TMessagesProj/src/emojis/apple/emoji/3_15.png index 5ab133a19b..5b187b9077 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_15.png and b/TMessagesProj/src/emojis/apple/emoji/3_15.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_150.png b/TMessagesProj/src/emojis/apple/emoji/3_150.png index 90d9d1a75d..494d95a1ad 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_150.png and b/TMessagesProj/src/emojis/apple/emoji/3_150.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_151.png b/TMessagesProj/src/emojis/apple/emoji/3_151.png index a8004040fc..4c6518a5dc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_151.png and b/TMessagesProj/src/emojis/apple/emoji/3_151.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_152.png b/TMessagesProj/src/emojis/apple/emoji/3_152.png index 15a8286329..0ec1f57792 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_152.png and b/TMessagesProj/src/emojis/apple/emoji/3_152.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_153.png b/TMessagesProj/src/emojis/apple/emoji/3_153.png index 032afc7631..6f32677275 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_153.png and b/TMessagesProj/src/emojis/apple/emoji/3_153.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_154.png b/TMessagesProj/src/emojis/apple/emoji/3_154.png index accde2823e..2bd47270dd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_154.png and b/TMessagesProj/src/emojis/apple/emoji/3_154.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_155.png b/TMessagesProj/src/emojis/apple/emoji/3_155.png index c27c79817e..40c616d018 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_155.png and b/TMessagesProj/src/emojis/apple/emoji/3_155.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_156.png b/TMessagesProj/src/emojis/apple/emoji/3_156.png index e507b8e7f0..e20355d59e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_156.png and b/TMessagesProj/src/emojis/apple/emoji/3_156.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_157.png b/TMessagesProj/src/emojis/apple/emoji/3_157.png index 441031b2cb..c189b6e17d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_157.png and b/TMessagesProj/src/emojis/apple/emoji/3_157.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_158.png b/TMessagesProj/src/emojis/apple/emoji/3_158.png index e47c4c8b40..2607411271 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_158.png and b/TMessagesProj/src/emojis/apple/emoji/3_158.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_159.png b/TMessagesProj/src/emojis/apple/emoji/3_159.png index b41812fce6..fc747a72bb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_159.png and b/TMessagesProj/src/emojis/apple/emoji/3_159.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_16.png b/TMessagesProj/src/emojis/apple/emoji/3_16.png index 6c2f128cf2..2e7df1d222 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_16.png and b/TMessagesProj/src/emojis/apple/emoji/3_16.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_160.png b/TMessagesProj/src/emojis/apple/emoji/3_160.png index 5df2428b3f..a885e12fe6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_160.png and b/TMessagesProj/src/emojis/apple/emoji/3_160.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_161.png b/TMessagesProj/src/emojis/apple/emoji/3_161.png index 5dec3c7f2d..c0a5c5fb20 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_161.png and b/TMessagesProj/src/emojis/apple/emoji/3_161.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_162.png b/TMessagesProj/src/emojis/apple/emoji/3_162.png index b4c37a2ada..0143afbfd3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_162.png and b/TMessagesProj/src/emojis/apple/emoji/3_162.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_163.png b/TMessagesProj/src/emojis/apple/emoji/3_163.png index 805803a501..027ae3358e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_163.png and b/TMessagesProj/src/emojis/apple/emoji/3_163.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_164.png b/TMessagesProj/src/emojis/apple/emoji/3_164.png index c47b8faaa9..6e715a71f8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_164.png and b/TMessagesProj/src/emojis/apple/emoji/3_164.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_165.png b/TMessagesProj/src/emojis/apple/emoji/3_165.png index b0798cffe3..b5fb4f9c82 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_165.png and b/TMessagesProj/src/emojis/apple/emoji/3_165.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_166.png b/TMessagesProj/src/emojis/apple/emoji/3_166.png index 80c0573530..67779d83bf 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_166.png and b/TMessagesProj/src/emojis/apple/emoji/3_166.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_167.png b/TMessagesProj/src/emojis/apple/emoji/3_167.png index aee0337d96..3673111f12 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_167.png and b/TMessagesProj/src/emojis/apple/emoji/3_167.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_168.png b/TMessagesProj/src/emojis/apple/emoji/3_168.png index 0f76c20b97..6c2c1bd417 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_168.png and b/TMessagesProj/src/emojis/apple/emoji/3_168.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_169.png b/TMessagesProj/src/emojis/apple/emoji/3_169.png index 3abaddef90..9540e9a6fa 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_169.png and b/TMessagesProj/src/emojis/apple/emoji/3_169.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_17.png b/TMessagesProj/src/emojis/apple/emoji/3_17.png index f1cf7ca90d..d35aa510e1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_17.png and b/TMessagesProj/src/emojis/apple/emoji/3_17.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_170.png b/TMessagesProj/src/emojis/apple/emoji/3_170.png index 611173bfa4..0794f3166e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_170.png and b/TMessagesProj/src/emojis/apple/emoji/3_170.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_171.png b/TMessagesProj/src/emojis/apple/emoji/3_171.png index fbaf1f4a11..8692cef385 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_171.png and b/TMessagesProj/src/emojis/apple/emoji/3_171.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_172.png b/TMessagesProj/src/emojis/apple/emoji/3_172.png index 155b49494f..4ef248fc85 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_172.png and b/TMessagesProj/src/emojis/apple/emoji/3_172.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_173.png b/TMessagesProj/src/emojis/apple/emoji/3_173.png index 1cc4671c9c..4eda81975c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_173.png and b/TMessagesProj/src/emojis/apple/emoji/3_173.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_174.png b/TMessagesProj/src/emojis/apple/emoji/3_174.png index d35411172e..d93833e532 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_174.png and b/TMessagesProj/src/emojis/apple/emoji/3_174.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_175.png b/TMessagesProj/src/emojis/apple/emoji/3_175.png index a97027775c..aeaa607bdd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_175.png and b/TMessagesProj/src/emojis/apple/emoji/3_175.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_176.png b/TMessagesProj/src/emojis/apple/emoji/3_176.png index 27b45e7e69..81c4b7fb94 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_176.png and b/TMessagesProj/src/emojis/apple/emoji/3_176.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_177.png b/TMessagesProj/src/emojis/apple/emoji/3_177.png index f3f5d84adb..412b92f49b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_177.png and b/TMessagesProj/src/emojis/apple/emoji/3_177.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_178.png b/TMessagesProj/src/emojis/apple/emoji/3_178.png index 6abbe44895..ff8fff41a7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_178.png and b/TMessagesProj/src/emojis/apple/emoji/3_178.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_179.png b/TMessagesProj/src/emojis/apple/emoji/3_179.png index 8548ca81dd..89ee352033 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_179.png and b/TMessagesProj/src/emojis/apple/emoji/3_179.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_18.png b/TMessagesProj/src/emojis/apple/emoji/3_18.png index b0e1bb1ef6..2a243ffb18 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_18.png and b/TMessagesProj/src/emojis/apple/emoji/3_18.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_180.png b/TMessagesProj/src/emojis/apple/emoji/3_180.png index d09d9da98b..077704bd5c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_180.png and b/TMessagesProj/src/emojis/apple/emoji/3_180.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_181.png b/TMessagesProj/src/emojis/apple/emoji/3_181.png index 779f889a38..43e08666c7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_181.png and b/TMessagesProj/src/emojis/apple/emoji/3_181.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_182.png b/TMessagesProj/src/emojis/apple/emoji/3_182.png index 3c33ea200b..e7a7ef2bf5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_182.png and b/TMessagesProj/src/emojis/apple/emoji/3_182.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_183.png b/TMessagesProj/src/emojis/apple/emoji/3_183.png index ab3544fecc..1aaa2f9d27 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_183.png and b/TMessagesProj/src/emojis/apple/emoji/3_183.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_184.png b/TMessagesProj/src/emojis/apple/emoji/3_184.png index 13df2c4190..dd0587dfa3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_184.png and b/TMessagesProj/src/emojis/apple/emoji/3_184.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_185.png b/TMessagesProj/src/emojis/apple/emoji/3_185.png index 903c0d43db..d14dbea90f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_185.png and b/TMessagesProj/src/emojis/apple/emoji/3_185.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_186.png b/TMessagesProj/src/emojis/apple/emoji/3_186.png index 1e3bb6fd45..73b8c97662 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_186.png and b/TMessagesProj/src/emojis/apple/emoji/3_186.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_187.png b/TMessagesProj/src/emojis/apple/emoji/3_187.png index 10f9638b9f..a087ebf0d4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_187.png and b/TMessagesProj/src/emojis/apple/emoji/3_187.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_188.png b/TMessagesProj/src/emojis/apple/emoji/3_188.png index e3bb6b30ac..f0d3f3de03 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_188.png and b/TMessagesProj/src/emojis/apple/emoji/3_188.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_189.png b/TMessagesProj/src/emojis/apple/emoji/3_189.png index b42e354c3a..49ce71f449 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_189.png and b/TMessagesProj/src/emojis/apple/emoji/3_189.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_19.png b/TMessagesProj/src/emojis/apple/emoji/3_19.png index 3fedb05ddb..c8f93cafc8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_19.png and b/TMessagesProj/src/emojis/apple/emoji/3_19.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_190.png b/TMessagesProj/src/emojis/apple/emoji/3_190.png index 3b36509539..28db50c489 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_190.png and b/TMessagesProj/src/emojis/apple/emoji/3_190.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_191.png b/TMessagesProj/src/emojis/apple/emoji/3_191.png index 67ccb38667..054e59d962 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_191.png and b/TMessagesProj/src/emojis/apple/emoji/3_191.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_192.png b/TMessagesProj/src/emojis/apple/emoji/3_192.png index 93253ab965..d74917c6f4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_192.png and b/TMessagesProj/src/emojis/apple/emoji/3_192.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_193.png b/TMessagesProj/src/emojis/apple/emoji/3_193.png index 1cbe96a487..b1f49efccd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_193.png and b/TMessagesProj/src/emojis/apple/emoji/3_193.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_194.png b/TMessagesProj/src/emojis/apple/emoji/3_194.png index 1f2f5583c1..c281d2309f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_194.png and b/TMessagesProj/src/emojis/apple/emoji/3_194.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_195.png b/TMessagesProj/src/emojis/apple/emoji/3_195.png index 455bda829e..8a9d79b368 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_195.png and b/TMessagesProj/src/emojis/apple/emoji/3_195.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_196.png b/TMessagesProj/src/emojis/apple/emoji/3_196.png index add6e51af6..8894cc872e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_196.png and b/TMessagesProj/src/emojis/apple/emoji/3_196.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_197.png b/TMessagesProj/src/emojis/apple/emoji/3_197.png index bb319136e7..ab3a2a04ea 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_197.png and b/TMessagesProj/src/emojis/apple/emoji/3_197.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_198.png b/TMessagesProj/src/emojis/apple/emoji/3_198.png index a3fb1993a0..91857161c5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_198.png and b/TMessagesProj/src/emojis/apple/emoji/3_198.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_199.png b/TMessagesProj/src/emojis/apple/emoji/3_199.png index c883149215..41df032daf 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_199.png and b/TMessagesProj/src/emojis/apple/emoji/3_199.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_2.png b/TMessagesProj/src/emojis/apple/emoji/3_2.png index fb0c27658c..e12fcd129d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_2.png and b/TMessagesProj/src/emojis/apple/emoji/3_2.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_20.png b/TMessagesProj/src/emojis/apple/emoji/3_20.png index c032b6b1b4..30c59d3b79 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_20.png and b/TMessagesProj/src/emojis/apple/emoji/3_20.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_200.png b/TMessagesProj/src/emojis/apple/emoji/3_200.png index 642c4bbcf0..b1588bea8a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_200.png and b/TMessagesProj/src/emojis/apple/emoji/3_200.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_201.png b/TMessagesProj/src/emojis/apple/emoji/3_201.png index cd4b144606..4277db54e1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_201.png and b/TMessagesProj/src/emojis/apple/emoji/3_201.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_202.png b/TMessagesProj/src/emojis/apple/emoji/3_202.png index bf7b158502..dc222cec6b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_202.png and b/TMessagesProj/src/emojis/apple/emoji/3_202.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_203.png b/TMessagesProj/src/emojis/apple/emoji/3_203.png index efac9837d8..3c2ff277e5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_203.png and b/TMessagesProj/src/emojis/apple/emoji/3_203.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_204.png b/TMessagesProj/src/emojis/apple/emoji/3_204.png index 97e4451e62..36d423753f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_204.png and b/TMessagesProj/src/emojis/apple/emoji/3_204.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_205.png b/TMessagesProj/src/emojis/apple/emoji/3_205.png index ed813c1679..7b8d6adf26 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_205.png and b/TMessagesProj/src/emojis/apple/emoji/3_205.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_206.png b/TMessagesProj/src/emojis/apple/emoji/3_206.png index a68ef4c457..8259f5da57 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_206.png and b/TMessagesProj/src/emojis/apple/emoji/3_206.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_207.png b/TMessagesProj/src/emojis/apple/emoji/3_207.png index 728f580785..0ef796c308 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_207.png and b/TMessagesProj/src/emojis/apple/emoji/3_207.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_208.png b/TMessagesProj/src/emojis/apple/emoji/3_208.png index 188cabd68e..aa4f558907 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_208.png and b/TMessagesProj/src/emojis/apple/emoji/3_208.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_209.png b/TMessagesProj/src/emojis/apple/emoji/3_209.png index db6a012666..3cdade7900 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_209.png and b/TMessagesProj/src/emojis/apple/emoji/3_209.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_21.png b/TMessagesProj/src/emojis/apple/emoji/3_21.png index a3ce17c754..0b1764f99c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_21.png and b/TMessagesProj/src/emojis/apple/emoji/3_21.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_210.png b/TMessagesProj/src/emojis/apple/emoji/3_210.png index 0e0c836910..500a7dd7f2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_210.png and b/TMessagesProj/src/emojis/apple/emoji/3_210.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_211.png b/TMessagesProj/src/emojis/apple/emoji/3_211.png index bc4c931761..cb0312f559 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_211.png and b/TMessagesProj/src/emojis/apple/emoji/3_211.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_212.png b/TMessagesProj/src/emojis/apple/emoji/3_212.png index 6fa4dae84a..f43d4b7a44 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_212.png and b/TMessagesProj/src/emojis/apple/emoji/3_212.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_213.png b/TMessagesProj/src/emojis/apple/emoji/3_213.png index c93d52b0e0..7808c66ed6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_213.png and b/TMessagesProj/src/emojis/apple/emoji/3_213.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_214.png b/TMessagesProj/src/emojis/apple/emoji/3_214.png index 80a0a875bf..af30e1f0c6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_214.png and b/TMessagesProj/src/emojis/apple/emoji/3_214.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_215.png b/TMessagesProj/src/emojis/apple/emoji/3_215.png index 7d410b8789..c281bac59f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_215.png and b/TMessagesProj/src/emojis/apple/emoji/3_215.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_216.png b/TMessagesProj/src/emojis/apple/emoji/3_216.png index b91237ac7a..8390adb562 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_216.png and b/TMessagesProj/src/emojis/apple/emoji/3_216.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_217.png b/TMessagesProj/src/emojis/apple/emoji/3_217.png index 063bb124cc..7ad3279a84 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_217.png and b/TMessagesProj/src/emojis/apple/emoji/3_217.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_218.png b/TMessagesProj/src/emojis/apple/emoji/3_218.png index 2eb94f072f..935f6e7ca8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_218.png and b/TMessagesProj/src/emojis/apple/emoji/3_218.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_219.png b/TMessagesProj/src/emojis/apple/emoji/3_219.png index 13c33095c3..44108bb939 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_219.png and b/TMessagesProj/src/emojis/apple/emoji/3_219.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_22.png b/TMessagesProj/src/emojis/apple/emoji/3_22.png index 923398e8a6..44162e76ac 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_22.png and b/TMessagesProj/src/emojis/apple/emoji/3_22.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_220.png b/TMessagesProj/src/emojis/apple/emoji/3_220.png index cca6c65a88..4f1b9c7f0a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_220.png and b/TMessagesProj/src/emojis/apple/emoji/3_220.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_221.png b/TMessagesProj/src/emojis/apple/emoji/3_221.png index 330885e1a7..44b40cf528 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_221.png and b/TMessagesProj/src/emojis/apple/emoji/3_221.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_222.png b/TMessagesProj/src/emojis/apple/emoji/3_222.png index 73bd2dc19b..4422960109 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_222.png and b/TMessagesProj/src/emojis/apple/emoji/3_222.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_223.png b/TMessagesProj/src/emojis/apple/emoji/3_223.png index f0c7750f1b..05712cb46e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_223.png and b/TMessagesProj/src/emojis/apple/emoji/3_223.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_224.png b/TMessagesProj/src/emojis/apple/emoji/3_224.png index f002d7ca5c..87b41094d2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_224.png and b/TMessagesProj/src/emojis/apple/emoji/3_224.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_225.png b/TMessagesProj/src/emojis/apple/emoji/3_225.png index c155761364..2df1c6b613 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_225.png and b/TMessagesProj/src/emojis/apple/emoji/3_225.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_226.png b/TMessagesProj/src/emojis/apple/emoji/3_226.png index 35e4cc6aa2..a2226da60d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_226.png and b/TMessagesProj/src/emojis/apple/emoji/3_226.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_227.png b/TMessagesProj/src/emojis/apple/emoji/3_227.png index 964dd310aa..77ccc3a06e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_227.png and b/TMessagesProj/src/emojis/apple/emoji/3_227.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_228.png b/TMessagesProj/src/emojis/apple/emoji/3_228.png index 136bd759f3..3c1b43bd70 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_228.png and b/TMessagesProj/src/emojis/apple/emoji/3_228.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_229.png b/TMessagesProj/src/emojis/apple/emoji/3_229.png index b36900efbf..9dbaf4754b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_229.png and b/TMessagesProj/src/emojis/apple/emoji/3_229.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_23.png b/TMessagesProj/src/emojis/apple/emoji/3_23.png index bab7e2f6ab..c173ebca93 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_23.png and b/TMessagesProj/src/emojis/apple/emoji/3_23.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_230.png b/TMessagesProj/src/emojis/apple/emoji/3_230.png index 048ca61b2c..9c01d9ba24 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_230.png and b/TMessagesProj/src/emojis/apple/emoji/3_230.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_231.png b/TMessagesProj/src/emojis/apple/emoji/3_231.png index ae49e3f4ff..48bd5d94a4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_231.png and b/TMessagesProj/src/emojis/apple/emoji/3_231.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_232.png b/TMessagesProj/src/emojis/apple/emoji/3_232.png index c423bc88ac..a52b693ffd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_232.png and b/TMessagesProj/src/emojis/apple/emoji/3_232.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_233.png b/TMessagesProj/src/emojis/apple/emoji/3_233.png index e070a98cf5..1e970ba499 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_233.png and b/TMessagesProj/src/emojis/apple/emoji/3_233.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_234.png b/TMessagesProj/src/emojis/apple/emoji/3_234.png index a06afe84dd..68e87a3782 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_234.png and b/TMessagesProj/src/emojis/apple/emoji/3_234.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_235.png b/TMessagesProj/src/emojis/apple/emoji/3_235.png index 58d7beaead..c52aa3c22b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_235.png and b/TMessagesProj/src/emojis/apple/emoji/3_235.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_236.png b/TMessagesProj/src/emojis/apple/emoji/3_236.png index 2a7b623113..1d435948b3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_236.png and b/TMessagesProj/src/emojis/apple/emoji/3_236.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_237.png b/TMessagesProj/src/emojis/apple/emoji/3_237.png index 98321c75c2..448bd4b58b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_237.png and b/TMessagesProj/src/emojis/apple/emoji/3_237.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_238.png b/TMessagesProj/src/emojis/apple/emoji/3_238.png index 2f5900cf42..f16dbe8a49 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_238.png and b/TMessagesProj/src/emojis/apple/emoji/3_238.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_239.png b/TMessagesProj/src/emojis/apple/emoji/3_239.png index 043017a165..335aca10ca 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_239.png and b/TMessagesProj/src/emojis/apple/emoji/3_239.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_24.png b/TMessagesProj/src/emojis/apple/emoji/3_24.png index eb8a0bb2b3..a01d5f71ba 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_24.png and b/TMessagesProj/src/emojis/apple/emoji/3_24.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_240.png b/TMessagesProj/src/emojis/apple/emoji/3_240.png index 7e167415e1..169132eadb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_240.png and b/TMessagesProj/src/emojis/apple/emoji/3_240.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_241.png b/TMessagesProj/src/emojis/apple/emoji/3_241.png index 33f451e4cc..7cbdd0ca32 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_241.png and b/TMessagesProj/src/emojis/apple/emoji/3_241.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_242.png b/TMessagesProj/src/emojis/apple/emoji/3_242.png index 9471abad04..2f6b7b5b8c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_242.png and b/TMessagesProj/src/emojis/apple/emoji/3_242.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_243.png b/TMessagesProj/src/emojis/apple/emoji/3_243.png index fec71e8760..d526f196bb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_243.png and b/TMessagesProj/src/emojis/apple/emoji/3_243.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_244.png b/TMessagesProj/src/emojis/apple/emoji/3_244.png index 3fb86ae957..0e31dd433d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_244.png and b/TMessagesProj/src/emojis/apple/emoji/3_244.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_245.png b/TMessagesProj/src/emojis/apple/emoji/3_245.png index 9ce7bfbbdb..0272dc183f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_245.png and b/TMessagesProj/src/emojis/apple/emoji/3_245.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_246.png b/TMessagesProj/src/emojis/apple/emoji/3_246.png index bdc4087530..721b4af67e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_246.png and b/TMessagesProj/src/emojis/apple/emoji/3_246.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_247.png b/TMessagesProj/src/emojis/apple/emoji/3_247.png index 1ad5a562cc..409b1d4341 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_247.png and b/TMessagesProj/src/emojis/apple/emoji/3_247.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_248.png b/TMessagesProj/src/emojis/apple/emoji/3_248.png index a36fb7f827..58020aab9c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_248.png and b/TMessagesProj/src/emojis/apple/emoji/3_248.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_249.png b/TMessagesProj/src/emojis/apple/emoji/3_249.png index 18b8270225..21b59368b5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_249.png and b/TMessagesProj/src/emojis/apple/emoji/3_249.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_25.png b/TMessagesProj/src/emojis/apple/emoji/3_25.png index 31cd69f71e..f3f715b87b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_25.png and b/TMessagesProj/src/emojis/apple/emoji/3_25.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_250.png b/TMessagesProj/src/emojis/apple/emoji/3_250.png index ea0d9addec..316d1a6905 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_250.png and b/TMessagesProj/src/emojis/apple/emoji/3_250.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_251.png b/TMessagesProj/src/emojis/apple/emoji/3_251.png index 649eb89a36..340117b9e3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_251.png and b/TMessagesProj/src/emojis/apple/emoji/3_251.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_252.png b/TMessagesProj/src/emojis/apple/emoji/3_252.png index b6bf9c8e3d..4996cd1d5c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_252.png and b/TMessagesProj/src/emojis/apple/emoji/3_252.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_253.png b/TMessagesProj/src/emojis/apple/emoji/3_253.png index 1ad66dfee4..1c274dac48 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_253.png and b/TMessagesProj/src/emojis/apple/emoji/3_253.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_254.png b/TMessagesProj/src/emojis/apple/emoji/3_254.png index aa62256afc..876615ad0c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_254.png and b/TMessagesProj/src/emojis/apple/emoji/3_254.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_255.png b/TMessagesProj/src/emojis/apple/emoji/3_255.png index f6e2c743dd..2a4831925a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_255.png and b/TMessagesProj/src/emojis/apple/emoji/3_255.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_256.png b/TMessagesProj/src/emojis/apple/emoji/3_256.png index a798809b74..bb81607ff6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_256.png and b/TMessagesProj/src/emojis/apple/emoji/3_256.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_257.png b/TMessagesProj/src/emojis/apple/emoji/3_257.png index 51096853d9..99d81ca494 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_257.png and b/TMessagesProj/src/emojis/apple/emoji/3_257.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_258.png b/TMessagesProj/src/emojis/apple/emoji/3_258.png index 63489f2185..4dd962d733 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_258.png and b/TMessagesProj/src/emojis/apple/emoji/3_258.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_259.png b/TMessagesProj/src/emojis/apple/emoji/3_259.png index df340e4997..ddadd3a838 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_259.png and b/TMessagesProj/src/emojis/apple/emoji/3_259.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_26.png b/TMessagesProj/src/emojis/apple/emoji/3_26.png index 4c90dbebe8..0fae544fc5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_26.png and b/TMessagesProj/src/emojis/apple/emoji/3_26.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_260.png b/TMessagesProj/src/emojis/apple/emoji/3_260.png index f985e7353f..96a85deb57 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_260.png and b/TMessagesProj/src/emojis/apple/emoji/3_260.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_261.png b/TMessagesProj/src/emojis/apple/emoji/3_261.png index 7f845ad762..8865b598d1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_261.png and b/TMessagesProj/src/emojis/apple/emoji/3_261.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_262.png b/TMessagesProj/src/emojis/apple/emoji/3_262.png index 7bfe905d4b..14d29939a0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_262.png and b/TMessagesProj/src/emojis/apple/emoji/3_262.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_263.png b/TMessagesProj/src/emojis/apple/emoji/3_263.png index d3ae48b77b..e1737a6a9b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_263.png and b/TMessagesProj/src/emojis/apple/emoji/3_263.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_264.png b/TMessagesProj/src/emojis/apple/emoji/3_264.png index ef0562a6f9..a156407c5d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_264.png and b/TMessagesProj/src/emojis/apple/emoji/3_264.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_265.png b/TMessagesProj/src/emojis/apple/emoji/3_265.png index a38762291a..d83659f48c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_265.png and b/TMessagesProj/src/emojis/apple/emoji/3_265.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_266.png b/TMessagesProj/src/emojis/apple/emoji/3_266.png index 00013704f1..4976537632 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_266.png and b/TMessagesProj/src/emojis/apple/emoji/3_266.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_267.png b/TMessagesProj/src/emojis/apple/emoji/3_267.png index 8861088d23..26cf364156 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_267.png and b/TMessagesProj/src/emojis/apple/emoji/3_267.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_268.png b/TMessagesProj/src/emojis/apple/emoji/3_268.png index b60c943b9c..5232830ecd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_268.png and b/TMessagesProj/src/emojis/apple/emoji/3_268.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_269.png b/TMessagesProj/src/emojis/apple/emoji/3_269.png index 286b2968cd..c1078de139 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_269.png and b/TMessagesProj/src/emojis/apple/emoji/3_269.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_27.png b/TMessagesProj/src/emojis/apple/emoji/3_27.png index f4ce6615be..7239ee84eb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_27.png and b/TMessagesProj/src/emojis/apple/emoji/3_27.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_270.png b/TMessagesProj/src/emojis/apple/emoji/3_270.png index 7d1e12e690..f5a8ea4410 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_270.png and b/TMessagesProj/src/emojis/apple/emoji/3_270.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_271.png b/TMessagesProj/src/emojis/apple/emoji/3_271.png index 7c5391b47a..cef7b7b0c9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_271.png and b/TMessagesProj/src/emojis/apple/emoji/3_271.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_272.png b/TMessagesProj/src/emojis/apple/emoji/3_272.png index ec3078ab7f..b2a740c253 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_272.png and b/TMessagesProj/src/emojis/apple/emoji/3_272.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_273.png b/TMessagesProj/src/emojis/apple/emoji/3_273.png index 9a2411e8ed..5925fa6645 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_273.png and b/TMessagesProj/src/emojis/apple/emoji/3_273.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_274.png b/TMessagesProj/src/emojis/apple/emoji/3_274.png index e8e84d8b31..78686a6023 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_274.png and b/TMessagesProj/src/emojis/apple/emoji/3_274.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_275.png b/TMessagesProj/src/emojis/apple/emoji/3_275.png index 507065ce09..7bf811d488 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_275.png and b/TMessagesProj/src/emojis/apple/emoji/3_275.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_276.png b/TMessagesProj/src/emojis/apple/emoji/3_276.png index cfc602f013..2a0ad37468 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_276.png and b/TMessagesProj/src/emojis/apple/emoji/3_276.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_277.png b/TMessagesProj/src/emojis/apple/emoji/3_277.png index 4430440836..d14cd28ee1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_277.png and b/TMessagesProj/src/emojis/apple/emoji/3_277.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_278.png b/TMessagesProj/src/emojis/apple/emoji/3_278.png index 4d1f661d41..26052e50b3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_278.png and b/TMessagesProj/src/emojis/apple/emoji/3_278.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_279.png b/TMessagesProj/src/emojis/apple/emoji/3_279.png index 3ef8024c84..63aba56903 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_279.png and b/TMessagesProj/src/emojis/apple/emoji/3_279.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_28.png b/TMessagesProj/src/emojis/apple/emoji/3_28.png index 91811276bd..88a31b62d2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_28.png and b/TMessagesProj/src/emojis/apple/emoji/3_28.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_280.png b/TMessagesProj/src/emojis/apple/emoji/3_280.png index 8a706f0e71..d1f082320a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_280.png and b/TMessagesProj/src/emojis/apple/emoji/3_280.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_281.png b/TMessagesProj/src/emojis/apple/emoji/3_281.png index 534189ad4e..491971f0b6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_281.png and b/TMessagesProj/src/emojis/apple/emoji/3_281.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_282.png b/TMessagesProj/src/emojis/apple/emoji/3_282.png index d81471a843..39488beaed 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_282.png and b/TMessagesProj/src/emojis/apple/emoji/3_282.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_283.png b/TMessagesProj/src/emojis/apple/emoji/3_283.png index a812d1517d..d63dbcafe7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_283.png and b/TMessagesProj/src/emojis/apple/emoji/3_283.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_284.png b/TMessagesProj/src/emojis/apple/emoji/3_284.png index 65475fa38d..2ba7e3e4e0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_284.png and b/TMessagesProj/src/emojis/apple/emoji/3_284.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_285.png b/TMessagesProj/src/emojis/apple/emoji/3_285.png index 2e3a33b8b1..56d79d7251 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_285.png and b/TMessagesProj/src/emojis/apple/emoji/3_285.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_286.png b/TMessagesProj/src/emojis/apple/emoji/3_286.png index 28f57fec84..bf7eb2e92f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_286.png and b/TMessagesProj/src/emojis/apple/emoji/3_286.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_287.png b/TMessagesProj/src/emojis/apple/emoji/3_287.png index 87c104727a..81135c49eb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_287.png and b/TMessagesProj/src/emojis/apple/emoji/3_287.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_288.png b/TMessagesProj/src/emojis/apple/emoji/3_288.png index 13af211c9a..aa34f842e4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_288.png and b/TMessagesProj/src/emojis/apple/emoji/3_288.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_289.png b/TMessagesProj/src/emojis/apple/emoji/3_289.png index a21768ddcf..703a5ee99d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_289.png and b/TMessagesProj/src/emojis/apple/emoji/3_289.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_29.png b/TMessagesProj/src/emojis/apple/emoji/3_29.png index 15ff960747..866aec1656 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_29.png and b/TMessagesProj/src/emojis/apple/emoji/3_29.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_290.png b/TMessagesProj/src/emojis/apple/emoji/3_290.png index 161c576c5d..b911848229 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_290.png and b/TMessagesProj/src/emojis/apple/emoji/3_290.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_291.png b/TMessagesProj/src/emojis/apple/emoji/3_291.png index 0ac88592d1..0821968fa6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_291.png and b/TMessagesProj/src/emojis/apple/emoji/3_291.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_292.png b/TMessagesProj/src/emojis/apple/emoji/3_292.png index ab3f58b1fc..fea80204b6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_292.png and b/TMessagesProj/src/emojis/apple/emoji/3_292.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_293.png b/TMessagesProj/src/emojis/apple/emoji/3_293.png index b464a958bd..6773409f3a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_293.png and b/TMessagesProj/src/emojis/apple/emoji/3_293.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_294.png b/TMessagesProj/src/emojis/apple/emoji/3_294.png index 8d7099e035..ebed28d940 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_294.png and b/TMessagesProj/src/emojis/apple/emoji/3_294.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_295.png b/TMessagesProj/src/emojis/apple/emoji/3_295.png index f97cc61a81..fb21dbcc50 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_295.png and b/TMessagesProj/src/emojis/apple/emoji/3_295.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_296.png b/TMessagesProj/src/emojis/apple/emoji/3_296.png index d25c05b22a..9c8740e6aa 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_296.png and b/TMessagesProj/src/emojis/apple/emoji/3_296.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_297.png b/TMessagesProj/src/emojis/apple/emoji/3_297.png index 8135839479..5c38907054 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_297.png and b/TMessagesProj/src/emojis/apple/emoji/3_297.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_298.png b/TMessagesProj/src/emojis/apple/emoji/3_298.png index 95210699b3..f0dff9bb93 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_298.png and b/TMessagesProj/src/emojis/apple/emoji/3_298.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_299.png b/TMessagesProj/src/emojis/apple/emoji/3_299.png index af44c79306..0e5bb5052e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_299.png and b/TMessagesProj/src/emojis/apple/emoji/3_299.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_3.png b/TMessagesProj/src/emojis/apple/emoji/3_3.png index 471862e0dd..768046dcc5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_3.png and b/TMessagesProj/src/emojis/apple/emoji/3_3.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_30.png b/TMessagesProj/src/emojis/apple/emoji/3_30.png index baf0c4be8a..f44ff9d744 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_30.png and b/TMessagesProj/src/emojis/apple/emoji/3_30.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_300.png b/TMessagesProj/src/emojis/apple/emoji/3_300.png index feabae170f..779f341ca7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_300.png and b/TMessagesProj/src/emojis/apple/emoji/3_300.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_301.png b/TMessagesProj/src/emojis/apple/emoji/3_301.png index 9cabfae4b5..bda1578c50 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_301.png and b/TMessagesProj/src/emojis/apple/emoji/3_301.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_302.png b/TMessagesProj/src/emojis/apple/emoji/3_302.png index a994f95d61..091df73372 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_302.png and b/TMessagesProj/src/emojis/apple/emoji/3_302.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_303.png b/TMessagesProj/src/emojis/apple/emoji/3_303.png index de0375d04e..08194ca3cc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_303.png and b/TMessagesProj/src/emojis/apple/emoji/3_303.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_304.png b/TMessagesProj/src/emojis/apple/emoji/3_304.png index d33c68457c..3f6a5c6bea 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_304.png and b/TMessagesProj/src/emojis/apple/emoji/3_304.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_305.png b/TMessagesProj/src/emojis/apple/emoji/3_305.png index 888edcb7e3..41904dfaa5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_305.png and b/TMessagesProj/src/emojis/apple/emoji/3_305.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_306.png b/TMessagesProj/src/emojis/apple/emoji/3_306.png index 12faf03ace..759af0559e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_306.png and b/TMessagesProj/src/emojis/apple/emoji/3_306.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_307.png b/TMessagesProj/src/emojis/apple/emoji/3_307.png index 1378500c86..c8de6b6747 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_307.png and b/TMessagesProj/src/emojis/apple/emoji/3_307.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_308.png b/TMessagesProj/src/emojis/apple/emoji/3_308.png index 10e17c25e5..f5c84dd8ce 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_308.png and b/TMessagesProj/src/emojis/apple/emoji/3_308.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_309.png b/TMessagesProj/src/emojis/apple/emoji/3_309.png index f120c0edef..4aa0718a45 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_309.png and b/TMessagesProj/src/emojis/apple/emoji/3_309.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_31.png b/TMessagesProj/src/emojis/apple/emoji/3_31.png index ef17b85a42..c115c18b24 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_31.png and b/TMessagesProj/src/emojis/apple/emoji/3_31.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_310.png b/TMessagesProj/src/emojis/apple/emoji/3_310.png index 358301a992..77291d499d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_310.png and b/TMessagesProj/src/emojis/apple/emoji/3_310.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_311.png b/TMessagesProj/src/emojis/apple/emoji/3_311.png index f4a61c18df..8db595bda1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_311.png and b/TMessagesProj/src/emojis/apple/emoji/3_311.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_312.png b/TMessagesProj/src/emojis/apple/emoji/3_312.png index 0327a28021..66ee7a2f42 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_312.png and b/TMessagesProj/src/emojis/apple/emoji/3_312.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_313.png b/TMessagesProj/src/emojis/apple/emoji/3_313.png index a3ba2239ed..351041fbde 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_313.png and b/TMessagesProj/src/emojis/apple/emoji/3_313.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_314.png b/TMessagesProj/src/emojis/apple/emoji/3_314.png index 0883a1a165..34d60b2121 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_314.png and b/TMessagesProj/src/emojis/apple/emoji/3_314.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_315.png b/TMessagesProj/src/emojis/apple/emoji/3_315.png index 48395631ea..29ee8c3ac8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_315.png and b/TMessagesProj/src/emojis/apple/emoji/3_315.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_316.png b/TMessagesProj/src/emojis/apple/emoji/3_316.png index 823d91cf5c..80c34cf1b1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_316.png and b/TMessagesProj/src/emojis/apple/emoji/3_316.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_317.png b/TMessagesProj/src/emojis/apple/emoji/3_317.png index 79ddc96440..6a61a0615c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_317.png and b/TMessagesProj/src/emojis/apple/emoji/3_317.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_318.png b/TMessagesProj/src/emojis/apple/emoji/3_318.png index c59df51526..5a8247c54f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_318.png and b/TMessagesProj/src/emojis/apple/emoji/3_318.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_319.png b/TMessagesProj/src/emojis/apple/emoji/3_319.png index 666be664c7..83aa840d64 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_319.png and b/TMessagesProj/src/emojis/apple/emoji/3_319.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_32.png b/TMessagesProj/src/emojis/apple/emoji/3_32.png index dd2e7e41a9..e433eb9096 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_32.png and b/TMessagesProj/src/emojis/apple/emoji/3_32.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_320.png b/TMessagesProj/src/emojis/apple/emoji/3_320.png index 9fbe8b79af..83b3c08007 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_320.png and b/TMessagesProj/src/emojis/apple/emoji/3_320.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_321.png b/TMessagesProj/src/emojis/apple/emoji/3_321.png index e7e6b079a1..e7407e9ad6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_321.png and b/TMessagesProj/src/emojis/apple/emoji/3_321.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_322.png b/TMessagesProj/src/emojis/apple/emoji/3_322.png index 30ebf5618f..1f01901567 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_322.png and b/TMessagesProj/src/emojis/apple/emoji/3_322.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_323.png b/TMessagesProj/src/emojis/apple/emoji/3_323.png index 04690f57cf..a786f10527 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_323.png and b/TMessagesProj/src/emojis/apple/emoji/3_323.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_324.png b/TMessagesProj/src/emojis/apple/emoji/3_324.png index 4c3b865a10..6a856f472e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_324.png and b/TMessagesProj/src/emojis/apple/emoji/3_324.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_325.png b/TMessagesProj/src/emojis/apple/emoji/3_325.png index bcf450f108..34390d9daa 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_325.png and b/TMessagesProj/src/emojis/apple/emoji/3_325.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_326.png b/TMessagesProj/src/emojis/apple/emoji/3_326.png index f5051ca72d..28e98d9581 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_326.png and b/TMessagesProj/src/emojis/apple/emoji/3_326.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_327.png b/TMessagesProj/src/emojis/apple/emoji/3_327.png index 92c579887d..b87b506242 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_327.png and b/TMessagesProj/src/emojis/apple/emoji/3_327.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_328.png b/TMessagesProj/src/emojis/apple/emoji/3_328.png index 51391a6676..f9c8e1b1d1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_328.png and b/TMessagesProj/src/emojis/apple/emoji/3_328.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_329.png b/TMessagesProj/src/emojis/apple/emoji/3_329.png index a7615aa084..0156f6c905 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_329.png and b/TMessagesProj/src/emojis/apple/emoji/3_329.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_33.png b/TMessagesProj/src/emojis/apple/emoji/3_33.png index 55599b93de..b18419fd5f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_33.png and b/TMessagesProj/src/emojis/apple/emoji/3_33.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_330.png b/TMessagesProj/src/emojis/apple/emoji/3_330.png index c6fb6852df..4795f24c13 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_330.png and b/TMessagesProj/src/emojis/apple/emoji/3_330.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_331.png b/TMessagesProj/src/emojis/apple/emoji/3_331.png index 4ce676d7e9..0c98622322 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_331.png and b/TMessagesProj/src/emojis/apple/emoji/3_331.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_332.png b/TMessagesProj/src/emojis/apple/emoji/3_332.png index bdaf5e2caf..647f12598f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_332.png and b/TMessagesProj/src/emojis/apple/emoji/3_332.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_333.png b/TMessagesProj/src/emojis/apple/emoji/3_333.png new file mode 100644 index 0000000000..33b65dbf3f Binary files /dev/null and b/TMessagesProj/src/emojis/apple/emoji/3_333.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_334.png b/TMessagesProj/src/emojis/apple/emoji/3_334.png new file mode 100644 index 0000000000..2787438c07 Binary files /dev/null and b/TMessagesProj/src/emojis/apple/emoji/3_334.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_34.png b/TMessagesProj/src/emojis/apple/emoji/3_34.png index 79522c6dab..80c9b80cc7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_34.png and b/TMessagesProj/src/emojis/apple/emoji/3_34.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_35.png b/TMessagesProj/src/emojis/apple/emoji/3_35.png index 5cab6fad41..cff04e682f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_35.png and b/TMessagesProj/src/emojis/apple/emoji/3_35.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_36.png b/TMessagesProj/src/emojis/apple/emoji/3_36.png index 7015da5fb9..6236e55418 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_36.png and b/TMessagesProj/src/emojis/apple/emoji/3_36.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_37.png b/TMessagesProj/src/emojis/apple/emoji/3_37.png index 4bc914ff41..49e94e12ef 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_37.png and b/TMessagesProj/src/emojis/apple/emoji/3_37.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_38.png b/TMessagesProj/src/emojis/apple/emoji/3_38.png index dae5ed3de8..751c9b228b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_38.png and b/TMessagesProj/src/emojis/apple/emoji/3_38.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_39.png b/TMessagesProj/src/emojis/apple/emoji/3_39.png index 25e179fc14..0be19d16ab 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_39.png and b/TMessagesProj/src/emojis/apple/emoji/3_39.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_4.png b/TMessagesProj/src/emojis/apple/emoji/3_4.png index 615dadeb86..3628c98655 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_4.png and b/TMessagesProj/src/emojis/apple/emoji/3_4.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_40.png b/TMessagesProj/src/emojis/apple/emoji/3_40.png index 10a67a9c16..b1fa8592ff 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_40.png and b/TMessagesProj/src/emojis/apple/emoji/3_40.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_41.png b/TMessagesProj/src/emojis/apple/emoji/3_41.png index c7db3ed342..9b1c1f2fc3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_41.png and b/TMessagesProj/src/emojis/apple/emoji/3_41.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_42.png b/TMessagesProj/src/emojis/apple/emoji/3_42.png index 978858ff08..2c4ac78979 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_42.png and b/TMessagesProj/src/emojis/apple/emoji/3_42.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_43.png b/TMessagesProj/src/emojis/apple/emoji/3_43.png index 569889fa01..2fca47b91d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_43.png and b/TMessagesProj/src/emojis/apple/emoji/3_43.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_44.png b/TMessagesProj/src/emojis/apple/emoji/3_44.png index 37b34a98ad..e0f63dc034 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_44.png and b/TMessagesProj/src/emojis/apple/emoji/3_44.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_45.png b/TMessagesProj/src/emojis/apple/emoji/3_45.png index fc98d32c21..b632ef625f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_45.png and b/TMessagesProj/src/emojis/apple/emoji/3_45.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_46.png b/TMessagesProj/src/emojis/apple/emoji/3_46.png index 7532e8888c..0e9283b3fe 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_46.png and b/TMessagesProj/src/emojis/apple/emoji/3_46.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_47.png b/TMessagesProj/src/emojis/apple/emoji/3_47.png index 5588c1b89e..3d5577bc2a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_47.png and b/TMessagesProj/src/emojis/apple/emoji/3_47.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_48.png b/TMessagesProj/src/emojis/apple/emoji/3_48.png index c723821449..eaa1b94900 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_48.png and b/TMessagesProj/src/emojis/apple/emoji/3_48.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_49.png b/TMessagesProj/src/emojis/apple/emoji/3_49.png index c68c4b78b1..454c1cccb0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_49.png and b/TMessagesProj/src/emojis/apple/emoji/3_49.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_5.png b/TMessagesProj/src/emojis/apple/emoji/3_5.png index 33013f23ad..00b29432ed 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_5.png and b/TMessagesProj/src/emojis/apple/emoji/3_5.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_50.png b/TMessagesProj/src/emojis/apple/emoji/3_50.png index 88b4494b01..6349b53a0f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_50.png and b/TMessagesProj/src/emojis/apple/emoji/3_50.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_51.png b/TMessagesProj/src/emojis/apple/emoji/3_51.png index 626dcaa4fe..f7818df724 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_51.png and b/TMessagesProj/src/emojis/apple/emoji/3_51.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_52.png b/TMessagesProj/src/emojis/apple/emoji/3_52.png index 2bfcaf60a2..553f001bdd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_52.png and b/TMessagesProj/src/emojis/apple/emoji/3_52.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_53.png b/TMessagesProj/src/emojis/apple/emoji/3_53.png index 71eb880f78..077bfa629d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_53.png and b/TMessagesProj/src/emojis/apple/emoji/3_53.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_54.png b/TMessagesProj/src/emojis/apple/emoji/3_54.png index 5b08bbeb7a..f17618a837 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_54.png and b/TMessagesProj/src/emojis/apple/emoji/3_54.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_55.png b/TMessagesProj/src/emojis/apple/emoji/3_55.png index 3fc1c460d6..a9aa58893e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_55.png and b/TMessagesProj/src/emojis/apple/emoji/3_55.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_56.png b/TMessagesProj/src/emojis/apple/emoji/3_56.png index 44e4fd3f8b..4e3a73a7af 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_56.png and b/TMessagesProj/src/emojis/apple/emoji/3_56.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_57.png b/TMessagesProj/src/emojis/apple/emoji/3_57.png index 55ede5c3ef..08b6233f7f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_57.png and b/TMessagesProj/src/emojis/apple/emoji/3_57.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_58.png b/TMessagesProj/src/emojis/apple/emoji/3_58.png index f898f527c7..9e62f3e405 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_58.png and b/TMessagesProj/src/emojis/apple/emoji/3_58.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_59.png b/TMessagesProj/src/emojis/apple/emoji/3_59.png index 82fc795391..e044212be5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_59.png and b/TMessagesProj/src/emojis/apple/emoji/3_59.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_6.png b/TMessagesProj/src/emojis/apple/emoji/3_6.png index 791e0b88fa..71745a11cf 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_6.png and b/TMessagesProj/src/emojis/apple/emoji/3_6.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_60.png b/TMessagesProj/src/emojis/apple/emoji/3_60.png index b70d9cc788..33c3d7ff7f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_60.png and b/TMessagesProj/src/emojis/apple/emoji/3_60.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_61.png b/TMessagesProj/src/emojis/apple/emoji/3_61.png index 8a3dd3115c..e521a0e124 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_61.png and b/TMessagesProj/src/emojis/apple/emoji/3_61.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_62.png b/TMessagesProj/src/emojis/apple/emoji/3_62.png index 139adcc389..7ff38f2b67 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_62.png and b/TMessagesProj/src/emojis/apple/emoji/3_62.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_63.png b/TMessagesProj/src/emojis/apple/emoji/3_63.png index 45ed72ec8f..3b3676d173 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_63.png and b/TMessagesProj/src/emojis/apple/emoji/3_63.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_64.png b/TMessagesProj/src/emojis/apple/emoji/3_64.png index d3b7a86a89..ce94db7e58 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_64.png and b/TMessagesProj/src/emojis/apple/emoji/3_64.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_65.png b/TMessagesProj/src/emojis/apple/emoji/3_65.png index b75985620b..0f1e39ef42 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_65.png and b/TMessagesProj/src/emojis/apple/emoji/3_65.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_66.png b/TMessagesProj/src/emojis/apple/emoji/3_66.png index 83697c3e48..4e0ed910af 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_66.png and b/TMessagesProj/src/emojis/apple/emoji/3_66.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_67.png b/TMessagesProj/src/emojis/apple/emoji/3_67.png index 79d9765be3..6572fd1b3a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_67.png and b/TMessagesProj/src/emojis/apple/emoji/3_67.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_68.png b/TMessagesProj/src/emojis/apple/emoji/3_68.png index f708831f2b..d647651ed7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_68.png and b/TMessagesProj/src/emojis/apple/emoji/3_68.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_69.png b/TMessagesProj/src/emojis/apple/emoji/3_69.png index e5b601973e..7f1c949a98 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_69.png and b/TMessagesProj/src/emojis/apple/emoji/3_69.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_7.png b/TMessagesProj/src/emojis/apple/emoji/3_7.png index a998797269..1e8a4be3d1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_7.png and b/TMessagesProj/src/emojis/apple/emoji/3_7.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_70.png b/TMessagesProj/src/emojis/apple/emoji/3_70.png index 1f9bf9167e..0af7b0d411 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_70.png and b/TMessagesProj/src/emojis/apple/emoji/3_70.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_71.png b/TMessagesProj/src/emojis/apple/emoji/3_71.png index 38ffed7cd9..5ab7db7518 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_71.png and b/TMessagesProj/src/emojis/apple/emoji/3_71.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_72.png b/TMessagesProj/src/emojis/apple/emoji/3_72.png index a549f3f75b..ddf350f5e3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_72.png and b/TMessagesProj/src/emojis/apple/emoji/3_72.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_73.png b/TMessagesProj/src/emojis/apple/emoji/3_73.png index 5daa31f301..195a47d8b0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_73.png and b/TMessagesProj/src/emojis/apple/emoji/3_73.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_74.png b/TMessagesProj/src/emojis/apple/emoji/3_74.png index d1860b7457..d26357bfe1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_74.png and b/TMessagesProj/src/emojis/apple/emoji/3_74.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_75.png b/TMessagesProj/src/emojis/apple/emoji/3_75.png index 5e371433eb..b04aba70f9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_75.png and b/TMessagesProj/src/emojis/apple/emoji/3_75.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_76.png b/TMessagesProj/src/emojis/apple/emoji/3_76.png index e93f8c7a67..798860b123 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_76.png and b/TMessagesProj/src/emojis/apple/emoji/3_76.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_77.png b/TMessagesProj/src/emojis/apple/emoji/3_77.png index fcbc13a5a7..57ea25a4d4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_77.png and b/TMessagesProj/src/emojis/apple/emoji/3_77.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_78.png b/TMessagesProj/src/emojis/apple/emoji/3_78.png index 5ec1164f4b..c1fdb60c2d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_78.png and b/TMessagesProj/src/emojis/apple/emoji/3_78.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_79.png b/TMessagesProj/src/emojis/apple/emoji/3_79.png index 0b5dfe4571..e68f6514d5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_79.png and b/TMessagesProj/src/emojis/apple/emoji/3_79.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_8.png b/TMessagesProj/src/emojis/apple/emoji/3_8.png index 0c56d9a050..43e7cd8485 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_8.png and b/TMessagesProj/src/emojis/apple/emoji/3_8.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_80.png b/TMessagesProj/src/emojis/apple/emoji/3_80.png index d74f8b2cc8..5d738626e6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_80.png and b/TMessagesProj/src/emojis/apple/emoji/3_80.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_81.png b/TMessagesProj/src/emojis/apple/emoji/3_81.png index 8acb1a7d10..e22e6c6312 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_81.png and b/TMessagesProj/src/emojis/apple/emoji/3_81.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_82.png b/TMessagesProj/src/emojis/apple/emoji/3_82.png index e0818583fd..be4000cac3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_82.png and b/TMessagesProj/src/emojis/apple/emoji/3_82.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_83.png b/TMessagesProj/src/emojis/apple/emoji/3_83.png index 87c930e12a..2d79f50976 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_83.png and b/TMessagesProj/src/emojis/apple/emoji/3_83.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_84.png b/TMessagesProj/src/emojis/apple/emoji/3_84.png index 7d442fd753..c7cf7ef153 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_84.png and b/TMessagesProj/src/emojis/apple/emoji/3_84.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_85.png b/TMessagesProj/src/emojis/apple/emoji/3_85.png index 84c3ba157a..39e8ecbfea 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_85.png and b/TMessagesProj/src/emojis/apple/emoji/3_85.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_86.png b/TMessagesProj/src/emojis/apple/emoji/3_86.png index 46cdedbcd0..b7edc79b23 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_86.png and b/TMessagesProj/src/emojis/apple/emoji/3_86.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_87.png b/TMessagesProj/src/emojis/apple/emoji/3_87.png index 1d6fdbc862..fab5832cef 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_87.png and b/TMessagesProj/src/emojis/apple/emoji/3_87.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_88.png b/TMessagesProj/src/emojis/apple/emoji/3_88.png index 2fe9c99dec..f8e81b15f6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_88.png and b/TMessagesProj/src/emojis/apple/emoji/3_88.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_89.png b/TMessagesProj/src/emojis/apple/emoji/3_89.png index aa7f1d3643..36bf47bb09 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_89.png and b/TMessagesProj/src/emojis/apple/emoji/3_89.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_9.png b/TMessagesProj/src/emojis/apple/emoji/3_9.png index c68ca41129..098d3df72c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_9.png and b/TMessagesProj/src/emojis/apple/emoji/3_9.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_90.png b/TMessagesProj/src/emojis/apple/emoji/3_90.png index 652cbde3f7..947cfd5dd9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_90.png and b/TMessagesProj/src/emojis/apple/emoji/3_90.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_91.png b/TMessagesProj/src/emojis/apple/emoji/3_91.png index 1a8066a9df..80ee88b6c0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_91.png and b/TMessagesProj/src/emojis/apple/emoji/3_91.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_92.png b/TMessagesProj/src/emojis/apple/emoji/3_92.png index 19ef18f5aa..a8fae14084 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_92.png and b/TMessagesProj/src/emojis/apple/emoji/3_92.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_93.png b/TMessagesProj/src/emojis/apple/emoji/3_93.png index 6e2f237a7a..8124403ed0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_93.png and b/TMessagesProj/src/emojis/apple/emoji/3_93.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_94.png b/TMessagesProj/src/emojis/apple/emoji/3_94.png index 3fce65ddf9..bbd5c825e9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_94.png and b/TMessagesProj/src/emojis/apple/emoji/3_94.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_95.png b/TMessagesProj/src/emojis/apple/emoji/3_95.png index 2c223bda9f..aa99b7a222 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_95.png and b/TMessagesProj/src/emojis/apple/emoji/3_95.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_96.png b/TMessagesProj/src/emojis/apple/emoji/3_96.png index 6a57e2af1c..87f4db552e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_96.png and b/TMessagesProj/src/emojis/apple/emoji/3_96.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_97.png b/TMessagesProj/src/emojis/apple/emoji/3_97.png index b6895ea105..49b75273ba 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_97.png and b/TMessagesProj/src/emojis/apple/emoji/3_97.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_98.png b/TMessagesProj/src/emojis/apple/emoji/3_98.png index d7022e7046..12916b7823 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_98.png and b/TMessagesProj/src/emojis/apple/emoji/3_98.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/3_99.png b/TMessagesProj/src/emojis/apple/emoji/3_99.png index 4860bfd064..14e1c90f88 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/3_99.png and b/TMessagesProj/src/emojis/apple/emoji/3_99.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_0.png b/TMessagesProj/src/emojis/apple/emoji/4_0.png index 8841fac2d5..b456d0ff63 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_0.png and b/TMessagesProj/src/emojis/apple/emoji/4_0.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_1.png b/TMessagesProj/src/emojis/apple/emoji/4_1.png index d216ac1808..65847b0fd4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_1.png and b/TMessagesProj/src/emojis/apple/emoji/4_1.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_10.png b/TMessagesProj/src/emojis/apple/emoji/4_10.png index 20db138a40..146085cc8e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_10.png and b/TMessagesProj/src/emojis/apple/emoji/4_10.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_100.png b/TMessagesProj/src/emojis/apple/emoji/4_100.png index b13a82e945..bf91cfcb1c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_100.png and b/TMessagesProj/src/emojis/apple/emoji/4_100.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_101.png b/TMessagesProj/src/emojis/apple/emoji/4_101.png index 5b3da22c20..8a4998ec91 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_101.png and b/TMessagesProj/src/emojis/apple/emoji/4_101.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_102.png b/TMessagesProj/src/emojis/apple/emoji/4_102.png index b62c86fbf0..901a9ccc3c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_102.png and b/TMessagesProj/src/emojis/apple/emoji/4_102.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_103.png b/TMessagesProj/src/emojis/apple/emoji/4_103.png index 42dc37918f..36953e3add 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_103.png and b/TMessagesProj/src/emojis/apple/emoji/4_103.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_104.png b/TMessagesProj/src/emojis/apple/emoji/4_104.png index 92ec11dc6e..37bf48f0f0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_104.png and b/TMessagesProj/src/emojis/apple/emoji/4_104.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_105.png b/TMessagesProj/src/emojis/apple/emoji/4_105.png index 4a4ba593c5..87cf13140b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_105.png and b/TMessagesProj/src/emojis/apple/emoji/4_105.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_106.png b/TMessagesProj/src/emojis/apple/emoji/4_106.png index 42c36f7e29..82ad413c31 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_106.png and b/TMessagesProj/src/emojis/apple/emoji/4_106.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_107.png b/TMessagesProj/src/emojis/apple/emoji/4_107.png index d15f3216fa..bbdbb81db4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_107.png and b/TMessagesProj/src/emojis/apple/emoji/4_107.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_108.png b/TMessagesProj/src/emojis/apple/emoji/4_108.png index c7553d168c..db9f1daad8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_108.png and b/TMessagesProj/src/emojis/apple/emoji/4_108.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_109.png b/TMessagesProj/src/emojis/apple/emoji/4_109.png index 169dabff61..05fa253983 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_109.png and b/TMessagesProj/src/emojis/apple/emoji/4_109.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_11.png b/TMessagesProj/src/emojis/apple/emoji/4_11.png index 442d9ebab9..81bbadff79 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_11.png and b/TMessagesProj/src/emojis/apple/emoji/4_11.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_110.png b/TMessagesProj/src/emojis/apple/emoji/4_110.png index a063abdea1..7bf5171866 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_110.png and b/TMessagesProj/src/emojis/apple/emoji/4_110.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_111.png b/TMessagesProj/src/emojis/apple/emoji/4_111.png index 8ad538cbbe..e6b12c4d1e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_111.png and b/TMessagesProj/src/emojis/apple/emoji/4_111.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_112.png b/TMessagesProj/src/emojis/apple/emoji/4_112.png index 46521e135f..df96216700 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_112.png and b/TMessagesProj/src/emojis/apple/emoji/4_112.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_113.png b/TMessagesProj/src/emojis/apple/emoji/4_113.png index 0abe5fda7e..88e8358e1d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_113.png and b/TMessagesProj/src/emojis/apple/emoji/4_113.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_114.png b/TMessagesProj/src/emojis/apple/emoji/4_114.png index 43115aa24b..c3a5c5cbf1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_114.png and b/TMessagesProj/src/emojis/apple/emoji/4_114.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_115.png b/TMessagesProj/src/emojis/apple/emoji/4_115.png index ba71f433b7..c9a911f49a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_115.png and b/TMessagesProj/src/emojis/apple/emoji/4_115.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_116.png b/TMessagesProj/src/emojis/apple/emoji/4_116.png index 9de3c75166..f79ce5abf3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_116.png and b/TMessagesProj/src/emojis/apple/emoji/4_116.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_117.png b/TMessagesProj/src/emojis/apple/emoji/4_117.png index fc07577333..89369867a6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_117.png and b/TMessagesProj/src/emojis/apple/emoji/4_117.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_118.png b/TMessagesProj/src/emojis/apple/emoji/4_118.png index 440b09f55a..05960a4795 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_118.png and b/TMessagesProj/src/emojis/apple/emoji/4_118.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_119.png b/TMessagesProj/src/emojis/apple/emoji/4_119.png index 9c05090b90..07adfc1787 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_119.png and b/TMessagesProj/src/emojis/apple/emoji/4_119.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_12.png b/TMessagesProj/src/emojis/apple/emoji/4_12.png index 7c6310a666..8eb37aaeb6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_12.png and b/TMessagesProj/src/emojis/apple/emoji/4_12.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_120.png b/TMessagesProj/src/emojis/apple/emoji/4_120.png index 3c1aa14d79..d11b5727b7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_120.png and b/TMessagesProj/src/emojis/apple/emoji/4_120.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_121.png b/TMessagesProj/src/emojis/apple/emoji/4_121.png index 18fdf7ed25..29d12d33e1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_121.png and b/TMessagesProj/src/emojis/apple/emoji/4_121.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_122.png b/TMessagesProj/src/emojis/apple/emoji/4_122.png index c6fb635d4e..3f0b2fb195 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_122.png and b/TMessagesProj/src/emojis/apple/emoji/4_122.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_123.png b/TMessagesProj/src/emojis/apple/emoji/4_123.png index 59efd45dfb..934a114046 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_123.png and b/TMessagesProj/src/emojis/apple/emoji/4_123.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_124.png b/TMessagesProj/src/emojis/apple/emoji/4_124.png index 244ab9fc86..70693fb4c9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_124.png and b/TMessagesProj/src/emojis/apple/emoji/4_124.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_125.png b/TMessagesProj/src/emojis/apple/emoji/4_125.png index 4ae5a199b1..1d8aefa080 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_125.png and b/TMessagesProj/src/emojis/apple/emoji/4_125.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_126.png b/TMessagesProj/src/emojis/apple/emoji/4_126.png index b002a4053c..19589cd714 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_126.png and b/TMessagesProj/src/emojis/apple/emoji/4_126.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_127.png b/TMessagesProj/src/emojis/apple/emoji/4_127.png index 4723db21dd..74a62996bd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_127.png and b/TMessagesProj/src/emojis/apple/emoji/4_127.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_128.png b/TMessagesProj/src/emojis/apple/emoji/4_128.png index 7cfb2ad847..d1a204e3ce 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_128.png and b/TMessagesProj/src/emojis/apple/emoji/4_128.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_129.png b/TMessagesProj/src/emojis/apple/emoji/4_129.png index 847f7ea598..2e5bc98b64 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_129.png and b/TMessagesProj/src/emojis/apple/emoji/4_129.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_13.png b/TMessagesProj/src/emojis/apple/emoji/4_13.png index c99a5fb192..9f1f5b4a01 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_13.png and b/TMessagesProj/src/emojis/apple/emoji/4_13.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_130.png b/TMessagesProj/src/emojis/apple/emoji/4_130.png index a2babfb77c..b0120242ca 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_130.png and b/TMessagesProj/src/emojis/apple/emoji/4_130.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_14.png b/TMessagesProj/src/emojis/apple/emoji/4_14.png index ffed5c67f4..39b2dca5c3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_14.png and b/TMessagesProj/src/emojis/apple/emoji/4_14.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_15.png b/TMessagesProj/src/emojis/apple/emoji/4_15.png index 36a3532fdf..ebc92cd43c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_15.png and b/TMessagesProj/src/emojis/apple/emoji/4_15.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_16.png b/TMessagesProj/src/emojis/apple/emoji/4_16.png index ed1e3b5226..a6beedfb76 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_16.png and b/TMessagesProj/src/emojis/apple/emoji/4_16.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_17.png b/TMessagesProj/src/emojis/apple/emoji/4_17.png index df24889bcf..542989853c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_17.png and b/TMessagesProj/src/emojis/apple/emoji/4_17.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_18.png b/TMessagesProj/src/emojis/apple/emoji/4_18.png index 6dbc7d5ecf..9fb2cd928f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_18.png and b/TMessagesProj/src/emojis/apple/emoji/4_18.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_19.png b/TMessagesProj/src/emojis/apple/emoji/4_19.png index 0eadca53db..398eb11075 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_19.png and b/TMessagesProj/src/emojis/apple/emoji/4_19.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_2.png b/TMessagesProj/src/emojis/apple/emoji/4_2.png index 8b71778d7a..994050b5c1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_2.png and b/TMessagesProj/src/emojis/apple/emoji/4_2.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_20.png b/TMessagesProj/src/emojis/apple/emoji/4_20.png index bc9b124b67..c1b8f6050f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_20.png and b/TMessagesProj/src/emojis/apple/emoji/4_20.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_21.png b/TMessagesProj/src/emojis/apple/emoji/4_21.png index 6effbc0d2c..5767bb25df 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_21.png and b/TMessagesProj/src/emojis/apple/emoji/4_21.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_22.png b/TMessagesProj/src/emojis/apple/emoji/4_22.png index f2b8da632a..c93e24c475 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_22.png and b/TMessagesProj/src/emojis/apple/emoji/4_22.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_23.png b/TMessagesProj/src/emojis/apple/emoji/4_23.png index 22485e71d4..1475777df3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_23.png and b/TMessagesProj/src/emojis/apple/emoji/4_23.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_24.png b/TMessagesProj/src/emojis/apple/emoji/4_24.png index ae706b81ee..575002a8c7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_24.png and b/TMessagesProj/src/emojis/apple/emoji/4_24.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_25.png b/TMessagesProj/src/emojis/apple/emoji/4_25.png index 531431ea06..5e272a9616 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_25.png and b/TMessagesProj/src/emojis/apple/emoji/4_25.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_26.png b/TMessagesProj/src/emojis/apple/emoji/4_26.png index 13f07496d1..989cdbeb9f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_26.png and b/TMessagesProj/src/emojis/apple/emoji/4_26.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_27.png b/TMessagesProj/src/emojis/apple/emoji/4_27.png index 1bb2cf7e41..31d92f16b3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_27.png and b/TMessagesProj/src/emojis/apple/emoji/4_27.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_28.png b/TMessagesProj/src/emojis/apple/emoji/4_28.png index 031b5ad90e..300d3037c4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_28.png and b/TMessagesProj/src/emojis/apple/emoji/4_28.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_29.png b/TMessagesProj/src/emojis/apple/emoji/4_29.png index 0eaaeb5dda..e2f92c3112 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_29.png and b/TMessagesProj/src/emojis/apple/emoji/4_29.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_3.png b/TMessagesProj/src/emojis/apple/emoji/4_3.png index ddc722b7c8..182c9f49c6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_3.png and b/TMessagesProj/src/emojis/apple/emoji/4_3.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_30.png b/TMessagesProj/src/emojis/apple/emoji/4_30.png index e54d808c48..ba75f84b94 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_30.png and b/TMessagesProj/src/emojis/apple/emoji/4_30.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_31.png b/TMessagesProj/src/emojis/apple/emoji/4_31.png index 65b66f8c1b..523b8625ee 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_31.png and b/TMessagesProj/src/emojis/apple/emoji/4_31.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_32.png b/TMessagesProj/src/emojis/apple/emoji/4_32.png index 1bf2dfde17..d153d90d61 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_32.png and b/TMessagesProj/src/emojis/apple/emoji/4_32.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_33.png b/TMessagesProj/src/emojis/apple/emoji/4_33.png index 5b1a7aaa24..6b8672dbc5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_33.png and b/TMessagesProj/src/emojis/apple/emoji/4_33.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_34.png b/TMessagesProj/src/emojis/apple/emoji/4_34.png index 95d6959c56..dc540d11b3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_34.png and b/TMessagesProj/src/emojis/apple/emoji/4_34.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_35.png b/TMessagesProj/src/emojis/apple/emoji/4_35.png index 7fa6bd2545..2cca9eb2b1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_35.png and b/TMessagesProj/src/emojis/apple/emoji/4_35.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_36.png b/TMessagesProj/src/emojis/apple/emoji/4_36.png index a742d6963c..e451b1bcd2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_36.png and b/TMessagesProj/src/emojis/apple/emoji/4_36.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_37.png b/TMessagesProj/src/emojis/apple/emoji/4_37.png index 37f9221a8e..bfc190fb7a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_37.png and b/TMessagesProj/src/emojis/apple/emoji/4_37.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_38.png b/TMessagesProj/src/emojis/apple/emoji/4_38.png index 6643a445c7..7421cc765c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_38.png and b/TMessagesProj/src/emojis/apple/emoji/4_38.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_39.png b/TMessagesProj/src/emojis/apple/emoji/4_39.png index df9d8c94e9..c6ed9b9aa2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_39.png and b/TMessagesProj/src/emojis/apple/emoji/4_39.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_4.png b/TMessagesProj/src/emojis/apple/emoji/4_4.png index 4e1f965ec5..13c890342a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_4.png and b/TMessagesProj/src/emojis/apple/emoji/4_4.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_40.png b/TMessagesProj/src/emojis/apple/emoji/4_40.png index f2a1dc21c6..53256c5f63 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_40.png and b/TMessagesProj/src/emojis/apple/emoji/4_40.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_41.png b/TMessagesProj/src/emojis/apple/emoji/4_41.png index e2e8ad930e..c3865134c9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_41.png and b/TMessagesProj/src/emojis/apple/emoji/4_41.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_42.png b/TMessagesProj/src/emojis/apple/emoji/4_42.png index b4ac6a25cc..4f5ec469d7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_42.png and b/TMessagesProj/src/emojis/apple/emoji/4_42.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_43.png b/TMessagesProj/src/emojis/apple/emoji/4_43.png index f2064deec2..1e3a0d1f7b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_43.png and b/TMessagesProj/src/emojis/apple/emoji/4_43.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_44.png b/TMessagesProj/src/emojis/apple/emoji/4_44.png index 3d8cd97c84..e8b4eed695 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_44.png and b/TMessagesProj/src/emojis/apple/emoji/4_44.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_45.png b/TMessagesProj/src/emojis/apple/emoji/4_45.png index a8f82adeb5..d5d0fe7d1d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_45.png and b/TMessagesProj/src/emojis/apple/emoji/4_45.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_46.png b/TMessagesProj/src/emojis/apple/emoji/4_46.png index 01ac54832b..38c7b7e937 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_46.png and b/TMessagesProj/src/emojis/apple/emoji/4_46.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_47.png b/TMessagesProj/src/emojis/apple/emoji/4_47.png index eb5c693f71..216c3c17c2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_47.png and b/TMessagesProj/src/emojis/apple/emoji/4_47.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_48.png b/TMessagesProj/src/emojis/apple/emoji/4_48.png index 477522b1e9..eebe5332c5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_48.png and b/TMessagesProj/src/emojis/apple/emoji/4_48.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_49.png b/TMessagesProj/src/emojis/apple/emoji/4_49.png index 9b79031063..910f55c3df 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_49.png and b/TMessagesProj/src/emojis/apple/emoji/4_49.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_5.png b/TMessagesProj/src/emojis/apple/emoji/4_5.png index dfe642ebe4..1b1927c00a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_5.png and b/TMessagesProj/src/emojis/apple/emoji/4_5.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_50.png b/TMessagesProj/src/emojis/apple/emoji/4_50.png index 5dbe120160..cf5481fc5d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_50.png and b/TMessagesProj/src/emojis/apple/emoji/4_50.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_51.png b/TMessagesProj/src/emojis/apple/emoji/4_51.png index 20d4241376..b0cc84afc6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_51.png and b/TMessagesProj/src/emojis/apple/emoji/4_51.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_52.png b/TMessagesProj/src/emojis/apple/emoji/4_52.png index 8906425b36..313704f1b9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_52.png and b/TMessagesProj/src/emojis/apple/emoji/4_52.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_53.png b/TMessagesProj/src/emojis/apple/emoji/4_53.png index d67bf399f5..08e7bb7437 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_53.png and b/TMessagesProj/src/emojis/apple/emoji/4_53.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_54.png b/TMessagesProj/src/emojis/apple/emoji/4_54.png index 45a4f9f9af..e04f75041f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_54.png and b/TMessagesProj/src/emojis/apple/emoji/4_54.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_55.png b/TMessagesProj/src/emojis/apple/emoji/4_55.png index c1ce55d876..c3494d1426 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_55.png and b/TMessagesProj/src/emojis/apple/emoji/4_55.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_56.png b/TMessagesProj/src/emojis/apple/emoji/4_56.png index bf8089162a..08928f07ba 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_56.png and b/TMessagesProj/src/emojis/apple/emoji/4_56.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_57.png b/TMessagesProj/src/emojis/apple/emoji/4_57.png index c59eb863da..437f539930 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_57.png and b/TMessagesProj/src/emojis/apple/emoji/4_57.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_58.png b/TMessagesProj/src/emojis/apple/emoji/4_58.png index 6db4cb93d3..f01267041a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_58.png and b/TMessagesProj/src/emojis/apple/emoji/4_58.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_59.png b/TMessagesProj/src/emojis/apple/emoji/4_59.png index 427c5d0068..f6d660235c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_59.png and b/TMessagesProj/src/emojis/apple/emoji/4_59.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_6.png b/TMessagesProj/src/emojis/apple/emoji/4_6.png index 68d72ecb34..f385da2c89 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_6.png and b/TMessagesProj/src/emojis/apple/emoji/4_6.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_60.png b/TMessagesProj/src/emojis/apple/emoji/4_60.png index 40cc1bd40f..36722d5f81 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_60.png and b/TMessagesProj/src/emojis/apple/emoji/4_60.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_61.png b/TMessagesProj/src/emojis/apple/emoji/4_61.png index e33f1cee80..4a77fef120 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_61.png and b/TMessagesProj/src/emojis/apple/emoji/4_61.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_62.png b/TMessagesProj/src/emojis/apple/emoji/4_62.png index 25461344ed..97893f55cb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_62.png and b/TMessagesProj/src/emojis/apple/emoji/4_62.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_63.png b/TMessagesProj/src/emojis/apple/emoji/4_63.png index f35fcc3824..684510fbb1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_63.png and b/TMessagesProj/src/emojis/apple/emoji/4_63.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_64.png b/TMessagesProj/src/emojis/apple/emoji/4_64.png index 99b1c944e0..296cf49844 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_64.png and b/TMessagesProj/src/emojis/apple/emoji/4_64.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_65.png b/TMessagesProj/src/emojis/apple/emoji/4_65.png index 6f4387f696..c2caf78b84 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_65.png and b/TMessagesProj/src/emojis/apple/emoji/4_65.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_66.png b/TMessagesProj/src/emojis/apple/emoji/4_66.png index 6178a9e5ca..6628549db8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_66.png and b/TMessagesProj/src/emojis/apple/emoji/4_66.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_67.png b/TMessagesProj/src/emojis/apple/emoji/4_67.png index 6d747ba035..4c6a781794 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_67.png and b/TMessagesProj/src/emojis/apple/emoji/4_67.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_68.png b/TMessagesProj/src/emojis/apple/emoji/4_68.png index 47c540e151..29a145a00e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_68.png and b/TMessagesProj/src/emojis/apple/emoji/4_68.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_69.png b/TMessagesProj/src/emojis/apple/emoji/4_69.png index 08191c7496..f11ad9b4ab 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_69.png and b/TMessagesProj/src/emojis/apple/emoji/4_69.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_7.png b/TMessagesProj/src/emojis/apple/emoji/4_7.png index 917e65b3c7..21203ce173 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_7.png and b/TMessagesProj/src/emojis/apple/emoji/4_7.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_70.png b/TMessagesProj/src/emojis/apple/emoji/4_70.png index f2f60304e4..ffe66691b4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_70.png and b/TMessagesProj/src/emojis/apple/emoji/4_70.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_71.png b/TMessagesProj/src/emojis/apple/emoji/4_71.png index c528946aa0..7236f13c14 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_71.png and b/TMessagesProj/src/emojis/apple/emoji/4_71.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_72.png b/TMessagesProj/src/emojis/apple/emoji/4_72.png index 127602f3a4..f073237375 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_72.png and b/TMessagesProj/src/emojis/apple/emoji/4_72.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_73.png b/TMessagesProj/src/emojis/apple/emoji/4_73.png index 4e176eea10..82e24d56ec 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_73.png and b/TMessagesProj/src/emojis/apple/emoji/4_73.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_74.png b/TMessagesProj/src/emojis/apple/emoji/4_74.png index 40afd24f66..ad0a189796 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_74.png and b/TMessagesProj/src/emojis/apple/emoji/4_74.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_75.png b/TMessagesProj/src/emojis/apple/emoji/4_75.png index 4388dede6a..4943e8c26f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_75.png and b/TMessagesProj/src/emojis/apple/emoji/4_75.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_76.png b/TMessagesProj/src/emojis/apple/emoji/4_76.png index d6f1d63e99..b3d95f344f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_76.png and b/TMessagesProj/src/emojis/apple/emoji/4_76.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_77.png b/TMessagesProj/src/emojis/apple/emoji/4_77.png index abe3755f44..b263ed1d87 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_77.png and b/TMessagesProj/src/emojis/apple/emoji/4_77.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_78.png b/TMessagesProj/src/emojis/apple/emoji/4_78.png index 709d6d28e3..094c447d7e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_78.png and b/TMessagesProj/src/emojis/apple/emoji/4_78.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_79.png b/TMessagesProj/src/emojis/apple/emoji/4_79.png index 891825f80c..5cf23e8e67 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_79.png and b/TMessagesProj/src/emojis/apple/emoji/4_79.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_8.png b/TMessagesProj/src/emojis/apple/emoji/4_8.png index 3be3979f40..a79de7e1bf 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_8.png and b/TMessagesProj/src/emojis/apple/emoji/4_8.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_80.png b/TMessagesProj/src/emojis/apple/emoji/4_80.png index 3d6da1beae..8653166f32 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_80.png and b/TMessagesProj/src/emojis/apple/emoji/4_80.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_81.png b/TMessagesProj/src/emojis/apple/emoji/4_81.png index 521bc106b5..297e518a61 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_81.png and b/TMessagesProj/src/emojis/apple/emoji/4_81.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_82.png b/TMessagesProj/src/emojis/apple/emoji/4_82.png index abd96ed2fd..bc750ac462 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_82.png and b/TMessagesProj/src/emojis/apple/emoji/4_82.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_83.png b/TMessagesProj/src/emojis/apple/emoji/4_83.png index c99965210d..24c2eb8e38 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_83.png and b/TMessagesProj/src/emojis/apple/emoji/4_83.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_84.png b/TMessagesProj/src/emojis/apple/emoji/4_84.png index 3c508f72ec..271a66bdc1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_84.png and b/TMessagesProj/src/emojis/apple/emoji/4_84.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_85.png b/TMessagesProj/src/emojis/apple/emoji/4_85.png index d73efa944c..3be1ef63d6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_85.png and b/TMessagesProj/src/emojis/apple/emoji/4_85.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_86.png b/TMessagesProj/src/emojis/apple/emoji/4_86.png index d2d0eacef9..15f180cc8b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_86.png and b/TMessagesProj/src/emojis/apple/emoji/4_86.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_87.png b/TMessagesProj/src/emojis/apple/emoji/4_87.png index 669d0d3643..4d77510c3e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_87.png and b/TMessagesProj/src/emojis/apple/emoji/4_87.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_88.png b/TMessagesProj/src/emojis/apple/emoji/4_88.png index 8a23ec1658..4ed75e345f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_88.png and b/TMessagesProj/src/emojis/apple/emoji/4_88.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_89.png b/TMessagesProj/src/emojis/apple/emoji/4_89.png index 8d77611b3a..9d65c33f5d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_89.png and b/TMessagesProj/src/emojis/apple/emoji/4_89.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_9.png b/TMessagesProj/src/emojis/apple/emoji/4_9.png index 1bcd0c5be0..eb20e19d85 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_9.png and b/TMessagesProj/src/emojis/apple/emoji/4_9.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_90.png b/TMessagesProj/src/emojis/apple/emoji/4_90.png index 6d0c194f56..8ddf4e8fb5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_90.png and b/TMessagesProj/src/emojis/apple/emoji/4_90.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_91.png b/TMessagesProj/src/emojis/apple/emoji/4_91.png index d70102c747..2d1b58d35e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_91.png and b/TMessagesProj/src/emojis/apple/emoji/4_91.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_92.png b/TMessagesProj/src/emojis/apple/emoji/4_92.png index 08d9a78563..a07cfd7c02 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_92.png and b/TMessagesProj/src/emojis/apple/emoji/4_92.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_93.png b/TMessagesProj/src/emojis/apple/emoji/4_93.png index d1bcd172fd..63ac6b909a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_93.png and b/TMessagesProj/src/emojis/apple/emoji/4_93.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_94.png b/TMessagesProj/src/emojis/apple/emoji/4_94.png index d6a6fd773c..11e25a6e7f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_94.png and b/TMessagesProj/src/emojis/apple/emoji/4_94.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_95.png b/TMessagesProj/src/emojis/apple/emoji/4_95.png index a557a049e8..33c7cfd697 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_95.png and b/TMessagesProj/src/emojis/apple/emoji/4_95.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_96.png b/TMessagesProj/src/emojis/apple/emoji/4_96.png index c239dba5bb..6569214324 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_96.png and b/TMessagesProj/src/emojis/apple/emoji/4_96.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_97.png b/TMessagesProj/src/emojis/apple/emoji/4_97.png index 891da7a72c..d1df82cd6e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_97.png and b/TMessagesProj/src/emojis/apple/emoji/4_97.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_98.png b/TMessagesProj/src/emojis/apple/emoji/4_98.png index 6c1de12dbb..fa8b416d01 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_98.png and b/TMessagesProj/src/emojis/apple/emoji/4_98.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/4_99.png b/TMessagesProj/src/emojis/apple/emoji/4_99.png index c3652e5fc0..36bd9be685 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/4_99.png and b/TMessagesProj/src/emojis/apple/emoji/4_99.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_0.png b/TMessagesProj/src/emojis/apple/emoji/5_0.png index e9656b0891..795101aacc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_0.png and b/TMessagesProj/src/emojis/apple/emoji/5_0.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_1.png b/TMessagesProj/src/emojis/apple/emoji/5_1.png index 0dc63a9579..5a718a68a2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_1.png and b/TMessagesProj/src/emojis/apple/emoji/5_1.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_10.png b/TMessagesProj/src/emojis/apple/emoji/5_10.png index 1462c6aecc..0d4e9d8314 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_10.png and b/TMessagesProj/src/emojis/apple/emoji/5_10.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_100.png b/TMessagesProj/src/emojis/apple/emoji/5_100.png index 2c9ba42e59..ba02acfa43 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_100.png and b/TMessagesProj/src/emojis/apple/emoji/5_100.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_101.png b/TMessagesProj/src/emojis/apple/emoji/5_101.png index ce08169a66..95c537885d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_101.png and b/TMessagesProj/src/emojis/apple/emoji/5_101.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_102.png b/TMessagesProj/src/emojis/apple/emoji/5_102.png index 53313301d4..b54b9dbf0b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_102.png and b/TMessagesProj/src/emojis/apple/emoji/5_102.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_103.png b/TMessagesProj/src/emojis/apple/emoji/5_103.png index a639683d9b..3e9edafd6f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_103.png and b/TMessagesProj/src/emojis/apple/emoji/5_103.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_104.png b/TMessagesProj/src/emojis/apple/emoji/5_104.png index 77e5d9896a..80b7d65b8d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_104.png and b/TMessagesProj/src/emojis/apple/emoji/5_104.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_105.png b/TMessagesProj/src/emojis/apple/emoji/5_105.png index c30e4f3cb5..1e7fb7bbc6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_105.png and b/TMessagesProj/src/emojis/apple/emoji/5_105.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_106.png b/TMessagesProj/src/emojis/apple/emoji/5_106.png index 731b562503..4181e2d07f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_106.png and b/TMessagesProj/src/emojis/apple/emoji/5_106.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_107.png b/TMessagesProj/src/emojis/apple/emoji/5_107.png index 6c9591535f..a859115b12 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_107.png and b/TMessagesProj/src/emojis/apple/emoji/5_107.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_108.png b/TMessagesProj/src/emojis/apple/emoji/5_108.png index d6fe126667..6d59221f55 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_108.png and b/TMessagesProj/src/emojis/apple/emoji/5_108.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_109.png b/TMessagesProj/src/emojis/apple/emoji/5_109.png index d8f70fd4e7..17b732fd2d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_109.png and b/TMessagesProj/src/emojis/apple/emoji/5_109.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_11.png b/TMessagesProj/src/emojis/apple/emoji/5_11.png index 12408ccd63..e1697ff68d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_11.png and b/TMessagesProj/src/emojis/apple/emoji/5_11.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_110.png b/TMessagesProj/src/emojis/apple/emoji/5_110.png index 043ccb3744..7b5fe6576d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_110.png and b/TMessagesProj/src/emojis/apple/emoji/5_110.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_111.png b/TMessagesProj/src/emojis/apple/emoji/5_111.png index 55e90b09a8..a6d2a55554 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_111.png and b/TMessagesProj/src/emojis/apple/emoji/5_111.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_112.png b/TMessagesProj/src/emojis/apple/emoji/5_112.png index 6f66997aba..c92293bed0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_112.png and b/TMessagesProj/src/emojis/apple/emoji/5_112.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_113.png b/TMessagesProj/src/emojis/apple/emoji/5_113.png index e6e4a05b35..c0d09b11ad 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_113.png and b/TMessagesProj/src/emojis/apple/emoji/5_113.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_114.png b/TMessagesProj/src/emojis/apple/emoji/5_114.png index 05ee35d717..237beff27f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_114.png and b/TMessagesProj/src/emojis/apple/emoji/5_114.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_115.png b/TMessagesProj/src/emojis/apple/emoji/5_115.png index 6721be966b..787cb5a428 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_115.png and b/TMessagesProj/src/emojis/apple/emoji/5_115.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_116.png b/TMessagesProj/src/emojis/apple/emoji/5_116.png index dcfb50c9f4..18fbb190a7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_116.png and b/TMessagesProj/src/emojis/apple/emoji/5_116.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_117.png b/TMessagesProj/src/emojis/apple/emoji/5_117.png index 7ed6d8e025..34db62ca6b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_117.png and b/TMessagesProj/src/emojis/apple/emoji/5_117.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_118.png b/TMessagesProj/src/emojis/apple/emoji/5_118.png index 57690853ab..67987c3c71 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_118.png and b/TMessagesProj/src/emojis/apple/emoji/5_118.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_119.png b/TMessagesProj/src/emojis/apple/emoji/5_119.png index a9bbc54e2a..1c59a1edd2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_119.png and b/TMessagesProj/src/emojis/apple/emoji/5_119.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_12.png b/TMessagesProj/src/emojis/apple/emoji/5_12.png index c59bab3473..28b6d56688 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_12.png and b/TMessagesProj/src/emojis/apple/emoji/5_12.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_120.png b/TMessagesProj/src/emojis/apple/emoji/5_120.png index 7d3f23c624..bec63ad895 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_120.png and b/TMessagesProj/src/emojis/apple/emoji/5_120.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_121.png b/TMessagesProj/src/emojis/apple/emoji/5_121.png index 954dc668e1..95ac13f9eb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_121.png and b/TMessagesProj/src/emojis/apple/emoji/5_121.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_122.png b/TMessagesProj/src/emojis/apple/emoji/5_122.png index f7b36044be..3e08f7b89a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_122.png and b/TMessagesProj/src/emojis/apple/emoji/5_122.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_123.png b/TMessagesProj/src/emojis/apple/emoji/5_123.png index 2640a94381..da21a69371 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_123.png and b/TMessagesProj/src/emojis/apple/emoji/5_123.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_124.png b/TMessagesProj/src/emojis/apple/emoji/5_124.png index 506a4450cd..700d41c410 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_124.png and b/TMessagesProj/src/emojis/apple/emoji/5_124.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_125.png b/TMessagesProj/src/emojis/apple/emoji/5_125.png index beff34f12c..9aad305e5b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_125.png and b/TMessagesProj/src/emojis/apple/emoji/5_125.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_126.png b/TMessagesProj/src/emojis/apple/emoji/5_126.png index 03ef743026..c91b5d8da8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_126.png and b/TMessagesProj/src/emojis/apple/emoji/5_126.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_127.png b/TMessagesProj/src/emojis/apple/emoji/5_127.png index 78f0b87c8b..3841735766 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_127.png and b/TMessagesProj/src/emojis/apple/emoji/5_127.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_128.png b/TMessagesProj/src/emojis/apple/emoji/5_128.png index 4564656d06..461073a465 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_128.png and b/TMessagesProj/src/emojis/apple/emoji/5_128.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_129.png b/TMessagesProj/src/emojis/apple/emoji/5_129.png index 7e5858ce4b..3bf75851ae 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_129.png and b/TMessagesProj/src/emojis/apple/emoji/5_129.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_13.png b/TMessagesProj/src/emojis/apple/emoji/5_13.png index dc36268175..d079a8cd9e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_13.png and b/TMessagesProj/src/emojis/apple/emoji/5_13.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_130.png b/TMessagesProj/src/emojis/apple/emoji/5_130.png index 7abc910938..c128358311 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_130.png and b/TMessagesProj/src/emojis/apple/emoji/5_130.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_131.png b/TMessagesProj/src/emojis/apple/emoji/5_131.png index 236c1f0bcb..90adb5dd7b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_131.png and b/TMessagesProj/src/emojis/apple/emoji/5_131.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_132.png b/TMessagesProj/src/emojis/apple/emoji/5_132.png index 9403396546..7c5197306b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_132.png and b/TMessagesProj/src/emojis/apple/emoji/5_132.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_133.png b/TMessagesProj/src/emojis/apple/emoji/5_133.png index fdc61eb460..a025b04a82 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_133.png and b/TMessagesProj/src/emojis/apple/emoji/5_133.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_134.png b/TMessagesProj/src/emojis/apple/emoji/5_134.png index 23d87fc993..3342dd6fab 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_134.png and b/TMessagesProj/src/emojis/apple/emoji/5_134.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_135.png b/TMessagesProj/src/emojis/apple/emoji/5_135.png index 3b6828fdec..561d61f4b7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_135.png and b/TMessagesProj/src/emojis/apple/emoji/5_135.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_136.png b/TMessagesProj/src/emojis/apple/emoji/5_136.png index 7dc2f7aa56..c4ec12bac5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_136.png and b/TMessagesProj/src/emojis/apple/emoji/5_136.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_137.png b/TMessagesProj/src/emojis/apple/emoji/5_137.png index 3eceec7cb2..ceaff88179 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_137.png and b/TMessagesProj/src/emojis/apple/emoji/5_137.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_138.png b/TMessagesProj/src/emojis/apple/emoji/5_138.png index fb4f2eb6ee..ab0e6ab698 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_138.png and b/TMessagesProj/src/emojis/apple/emoji/5_138.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_139.png b/TMessagesProj/src/emojis/apple/emoji/5_139.png index edd198c4a3..a978851015 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_139.png and b/TMessagesProj/src/emojis/apple/emoji/5_139.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_14.png b/TMessagesProj/src/emojis/apple/emoji/5_14.png index a369ab00ad..fe368bc964 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_14.png and b/TMessagesProj/src/emojis/apple/emoji/5_14.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_140.png b/TMessagesProj/src/emojis/apple/emoji/5_140.png index b7393eb541..d983deb87d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_140.png and b/TMessagesProj/src/emojis/apple/emoji/5_140.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_141.png b/TMessagesProj/src/emojis/apple/emoji/5_141.png index 823991622c..8945f2f258 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_141.png and b/TMessagesProj/src/emojis/apple/emoji/5_141.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_142.png b/TMessagesProj/src/emojis/apple/emoji/5_142.png index 449b9b22c1..0ccfe8f552 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_142.png and b/TMessagesProj/src/emojis/apple/emoji/5_142.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_143.png b/TMessagesProj/src/emojis/apple/emoji/5_143.png index 66ac62f3fc..40654c3d8b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_143.png and b/TMessagesProj/src/emojis/apple/emoji/5_143.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_144.png b/TMessagesProj/src/emojis/apple/emoji/5_144.png index e4e37383fd..f306e147e8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_144.png and b/TMessagesProj/src/emojis/apple/emoji/5_144.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_145.png b/TMessagesProj/src/emojis/apple/emoji/5_145.png index fdd5ae488e..5bf485ff1f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_145.png and b/TMessagesProj/src/emojis/apple/emoji/5_145.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_146.png b/TMessagesProj/src/emojis/apple/emoji/5_146.png index f103d67583..e56fbabceb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_146.png and b/TMessagesProj/src/emojis/apple/emoji/5_146.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_147.png b/TMessagesProj/src/emojis/apple/emoji/5_147.png index 76a41935e9..0a36dd07fa 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_147.png and b/TMessagesProj/src/emojis/apple/emoji/5_147.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_148.png b/TMessagesProj/src/emojis/apple/emoji/5_148.png index 792072486f..0d409c682b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_148.png and b/TMessagesProj/src/emojis/apple/emoji/5_148.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_149.png b/TMessagesProj/src/emojis/apple/emoji/5_149.png index 1fa51ae0e4..241201189a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_149.png and b/TMessagesProj/src/emojis/apple/emoji/5_149.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_15.png b/TMessagesProj/src/emojis/apple/emoji/5_15.png index 4f7bd140f5..709b54a34a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_15.png and b/TMessagesProj/src/emojis/apple/emoji/5_15.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_150.png b/TMessagesProj/src/emojis/apple/emoji/5_150.png index 9843e879a0..5c8d2afb93 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_150.png and b/TMessagesProj/src/emojis/apple/emoji/5_150.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_151.png b/TMessagesProj/src/emojis/apple/emoji/5_151.png index 17fcc1cd21..3ca3d271bd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_151.png and b/TMessagesProj/src/emojis/apple/emoji/5_151.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_152.png b/TMessagesProj/src/emojis/apple/emoji/5_152.png index 7e038addec..7d5f8f3c96 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_152.png and b/TMessagesProj/src/emojis/apple/emoji/5_152.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_153.png b/TMessagesProj/src/emojis/apple/emoji/5_153.png index 2406e4ce6f..846e5dfba0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_153.png and b/TMessagesProj/src/emojis/apple/emoji/5_153.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_154.png b/TMessagesProj/src/emojis/apple/emoji/5_154.png index da8f972dda..ff43d71d96 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_154.png and b/TMessagesProj/src/emojis/apple/emoji/5_154.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_155.png b/TMessagesProj/src/emojis/apple/emoji/5_155.png index f9ac3b1365..ab114737ae 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_155.png and b/TMessagesProj/src/emojis/apple/emoji/5_155.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_156.png b/TMessagesProj/src/emojis/apple/emoji/5_156.png index 9a7815ffbe..d2d5c1b188 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_156.png and b/TMessagesProj/src/emojis/apple/emoji/5_156.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_157.png b/TMessagesProj/src/emojis/apple/emoji/5_157.png index ce064ddbf0..da231500ad 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_157.png and b/TMessagesProj/src/emojis/apple/emoji/5_157.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_158.png b/TMessagesProj/src/emojis/apple/emoji/5_158.png index 396f9d637a..c9a62bd8b8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_158.png and b/TMessagesProj/src/emojis/apple/emoji/5_158.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_159.png b/TMessagesProj/src/emojis/apple/emoji/5_159.png index a6f22b503c..a92ca805e5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_159.png and b/TMessagesProj/src/emojis/apple/emoji/5_159.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_16.png b/TMessagesProj/src/emojis/apple/emoji/5_16.png index 2137f2acdc..65f80715c4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_16.png and b/TMessagesProj/src/emojis/apple/emoji/5_16.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_160.png b/TMessagesProj/src/emojis/apple/emoji/5_160.png index 1dd2a4dcf4..45a461f31c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_160.png and b/TMessagesProj/src/emojis/apple/emoji/5_160.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_161.png b/TMessagesProj/src/emojis/apple/emoji/5_161.png index 2f2fc2c4d1..20fb48028d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_161.png and b/TMessagesProj/src/emojis/apple/emoji/5_161.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_162.png b/TMessagesProj/src/emojis/apple/emoji/5_162.png index c410c1c879..63be27e172 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_162.png and b/TMessagesProj/src/emojis/apple/emoji/5_162.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_163.png b/TMessagesProj/src/emojis/apple/emoji/5_163.png index 39669e002e..71956b772f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_163.png and b/TMessagesProj/src/emojis/apple/emoji/5_163.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_164.png b/TMessagesProj/src/emojis/apple/emoji/5_164.png index e0331036b1..5b6bfccb33 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_164.png and b/TMessagesProj/src/emojis/apple/emoji/5_164.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_165.png b/TMessagesProj/src/emojis/apple/emoji/5_165.png index a341edb98c..b43615c984 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_165.png and b/TMessagesProj/src/emojis/apple/emoji/5_165.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_166.png b/TMessagesProj/src/emojis/apple/emoji/5_166.png index 23d886ce1f..bec248eeda 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_166.png and b/TMessagesProj/src/emojis/apple/emoji/5_166.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_167.png b/TMessagesProj/src/emojis/apple/emoji/5_167.png index 2ad2fee931..c3bc0b452f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_167.png and b/TMessagesProj/src/emojis/apple/emoji/5_167.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_168.png b/TMessagesProj/src/emojis/apple/emoji/5_168.png index bd7c61cd51..7acb2c7d53 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_168.png and b/TMessagesProj/src/emojis/apple/emoji/5_168.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_169.png b/TMessagesProj/src/emojis/apple/emoji/5_169.png index 5d607398e8..d93e40b0a0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_169.png and b/TMessagesProj/src/emojis/apple/emoji/5_169.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_17.png b/TMessagesProj/src/emojis/apple/emoji/5_17.png index f35f465f26..00f16e3516 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_17.png and b/TMessagesProj/src/emojis/apple/emoji/5_17.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_170.png b/TMessagesProj/src/emojis/apple/emoji/5_170.png index aafe854713..f7f8a176a2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_170.png and b/TMessagesProj/src/emojis/apple/emoji/5_170.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_171.png b/TMessagesProj/src/emojis/apple/emoji/5_171.png index 2c4d5100bd..d68b089ce9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_171.png and b/TMessagesProj/src/emojis/apple/emoji/5_171.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_172.png b/TMessagesProj/src/emojis/apple/emoji/5_172.png index 68a0628d95..16c100a0bc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_172.png and b/TMessagesProj/src/emojis/apple/emoji/5_172.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_173.png b/TMessagesProj/src/emojis/apple/emoji/5_173.png index 57a13e6f9b..219b92bc07 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_173.png and b/TMessagesProj/src/emojis/apple/emoji/5_173.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_174.png b/TMessagesProj/src/emojis/apple/emoji/5_174.png index dfc7f1896b..7314f19966 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_174.png and b/TMessagesProj/src/emojis/apple/emoji/5_174.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_175.png b/TMessagesProj/src/emojis/apple/emoji/5_175.png index d7b27d43a5..4a5cd3b075 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_175.png and b/TMessagesProj/src/emojis/apple/emoji/5_175.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_176.png b/TMessagesProj/src/emojis/apple/emoji/5_176.png index 7e679ec75b..e835290420 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_176.png and b/TMessagesProj/src/emojis/apple/emoji/5_176.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_177.png b/TMessagesProj/src/emojis/apple/emoji/5_177.png index eb98c38d05..c594443a2f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_177.png and b/TMessagesProj/src/emojis/apple/emoji/5_177.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_178.png b/TMessagesProj/src/emojis/apple/emoji/5_178.png index b975a392cf..4a3b53a032 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_178.png and b/TMessagesProj/src/emojis/apple/emoji/5_178.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_179.png b/TMessagesProj/src/emojis/apple/emoji/5_179.png index 48713ba534..550dd89542 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_179.png and b/TMessagesProj/src/emojis/apple/emoji/5_179.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_18.png b/TMessagesProj/src/emojis/apple/emoji/5_18.png index 7c4fde54fa..ea916e38cb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_18.png and b/TMessagesProj/src/emojis/apple/emoji/5_18.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_180.png b/TMessagesProj/src/emojis/apple/emoji/5_180.png index c435dd2afd..ac63f62e19 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_180.png and b/TMessagesProj/src/emojis/apple/emoji/5_180.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_181.png b/TMessagesProj/src/emojis/apple/emoji/5_181.png index 77ed4cfd4b..e0bbdb9784 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_181.png and b/TMessagesProj/src/emojis/apple/emoji/5_181.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_182.png b/TMessagesProj/src/emojis/apple/emoji/5_182.png index 53d96f1a6e..50738c2efd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_182.png and b/TMessagesProj/src/emojis/apple/emoji/5_182.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_183.png b/TMessagesProj/src/emojis/apple/emoji/5_183.png index d09426de18..c599b509a0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_183.png and b/TMessagesProj/src/emojis/apple/emoji/5_183.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_184.png b/TMessagesProj/src/emojis/apple/emoji/5_184.png index 207e7eb18d..256d911d8a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_184.png and b/TMessagesProj/src/emojis/apple/emoji/5_184.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_185.png b/TMessagesProj/src/emojis/apple/emoji/5_185.png index 95fbe6353d..d9eed28515 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_185.png and b/TMessagesProj/src/emojis/apple/emoji/5_185.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_186.png b/TMessagesProj/src/emojis/apple/emoji/5_186.png index 70635d768f..0d5077a1f8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_186.png and b/TMessagesProj/src/emojis/apple/emoji/5_186.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_187.png b/TMessagesProj/src/emojis/apple/emoji/5_187.png index ebaca94be4..562930c36c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_187.png and b/TMessagesProj/src/emojis/apple/emoji/5_187.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_188.png b/TMessagesProj/src/emojis/apple/emoji/5_188.png index b67d2806d9..dbc40308ac 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_188.png and b/TMessagesProj/src/emojis/apple/emoji/5_188.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_189.png b/TMessagesProj/src/emojis/apple/emoji/5_189.png index 93b8a0926f..5d6d4bee68 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_189.png and b/TMessagesProj/src/emojis/apple/emoji/5_189.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_19.png b/TMessagesProj/src/emojis/apple/emoji/5_19.png index a2fe254da4..3934218524 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_19.png and b/TMessagesProj/src/emojis/apple/emoji/5_19.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_190.png b/TMessagesProj/src/emojis/apple/emoji/5_190.png index 0638061bf3..34744a4e45 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_190.png and b/TMessagesProj/src/emojis/apple/emoji/5_190.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_191.png b/TMessagesProj/src/emojis/apple/emoji/5_191.png index 2af90a0540..be50fcdf32 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_191.png and b/TMessagesProj/src/emojis/apple/emoji/5_191.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_192.png b/TMessagesProj/src/emojis/apple/emoji/5_192.png index c6da06c75a..143ceef01f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_192.png and b/TMessagesProj/src/emojis/apple/emoji/5_192.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_193.png b/TMessagesProj/src/emojis/apple/emoji/5_193.png index 3efd655895..4b50e79a0d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_193.png and b/TMessagesProj/src/emojis/apple/emoji/5_193.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_194.png b/TMessagesProj/src/emojis/apple/emoji/5_194.png index 7dadb993c3..20d0f1c536 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_194.png and b/TMessagesProj/src/emojis/apple/emoji/5_194.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_195.png b/TMessagesProj/src/emojis/apple/emoji/5_195.png index 7dd9269762..fcfe318974 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_195.png and b/TMessagesProj/src/emojis/apple/emoji/5_195.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_196.png b/TMessagesProj/src/emojis/apple/emoji/5_196.png index 84996d5dcc..5ec781ecff 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_196.png and b/TMessagesProj/src/emojis/apple/emoji/5_196.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_197.png b/TMessagesProj/src/emojis/apple/emoji/5_197.png index f959355e09..08fb154da6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_197.png and b/TMessagesProj/src/emojis/apple/emoji/5_197.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_198.png b/TMessagesProj/src/emojis/apple/emoji/5_198.png index ef44ceb17d..ec4f4f4ccd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_198.png and b/TMessagesProj/src/emojis/apple/emoji/5_198.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_199.png b/TMessagesProj/src/emojis/apple/emoji/5_199.png index c7029a2fce..9c0d246aeb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_199.png and b/TMessagesProj/src/emojis/apple/emoji/5_199.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_2.png b/TMessagesProj/src/emojis/apple/emoji/5_2.png index b16a2f7044..c2da129d46 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_2.png and b/TMessagesProj/src/emojis/apple/emoji/5_2.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_20.png b/TMessagesProj/src/emojis/apple/emoji/5_20.png index 7f1d4973f0..1228b81db2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_20.png and b/TMessagesProj/src/emojis/apple/emoji/5_20.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_200.png b/TMessagesProj/src/emojis/apple/emoji/5_200.png index bb774e928b..41d4fbf0bb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_200.png and b/TMessagesProj/src/emojis/apple/emoji/5_200.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_201.png b/TMessagesProj/src/emojis/apple/emoji/5_201.png index 17074d1ab1..503fdc8de4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_201.png and b/TMessagesProj/src/emojis/apple/emoji/5_201.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_202.png b/TMessagesProj/src/emojis/apple/emoji/5_202.png index e09cef8224..579f436eea 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_202.png and b/TMessagesProj/src/emojis/apple/emoji/5_202.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_203.png b/TMessagesProj/src/emojis/apple/emoji/5_203.png index 59c5b7d573..d816088a4a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_203.png and b/TMessagesProj/src/emojis/apple/emoji/5_203.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_204.png b/TMessagesProj/src/emojis/apple/emoji/5_204.png index 6f8cf5298d..b9183215a4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_204.png and b/TMessagesProj/src/emojis/apple/emoji/5_204.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_205.png b/TMessagesProj/src/emojis/apple/emoji/5_205.png index 9fadb55036..756fa3662c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_205.png and b/TMessagesProj/src/emojis/apple/emoji/5_205.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_206.png b/TMessagesProj/src/emojis/apple/emoji/5_206.png index 0076d494eb..0dc505ae01 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_206.png and b/TMessagesProj/src/emojis/apple/emoji/5_206.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_207.png b/TMessagesProj/src/emojis/apple/emoji/5_207.png index 8d8220c8e9..80d6b7cb95 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_207.png and b/TMessagesProj/src/emojis/apple/emoji/5_207.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_208.png b/TMessagesProj/src/emojis/apple/emoji/5_208.png index 06711e3051..c382da1f6f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_208.png and b/TMessagesProj/src/emojis/apple/emoji/5_208.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_209.png b/TMessagesProj/src/emojis/apple/emoji/5_209.png index 317be1822c..82a9b2ff4e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_209.png and b/TMessagesProj/src/emojis/apple/emoji/5_209.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_21.png b/TMessagesProj/src/emojis/apple/emoji/5_21.png index 89eb850bee..c39d2afe5c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_21.png and b/TMessagesProj/src/emojis/apple/emoji/5_21.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_210.png b/TMessagesProj/src/emojis/apple/emoji/5_210.png index b5aba0db84..b4989463fb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_210.png and b/TMessagesProj/src/emojis/apple/emoji/5_210.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_211.png b/TMessagesProj/src/emojis/apple/emoji/5_211.png index fe0895513b..4609beb1c9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_211.png and b/TMessagesProj/src/emojis/apple/emoji/5_211.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_212.png b/TMessagesProj/src/emojis/apple/emoji/5_212.png index e1d6fc8bc8..8df82be8cc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_212.png and b/TMessagesProj/src/emojis/apple/emoji/5_212.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_213.png b/TMessagesProj/src/emojis/apple/emoji/5_213.png index 09ac6e7ebb..fc4afb04e4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_213.png and b/TMessagesProj/src/emojis/apple/emoji/5_213.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_214.png b/TMessagesProj/src/emojis/apple/emoji/5_214.png index ae84c6d9e6..ba6ed2fb6e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_214.png and b/TMessagesProj/src/emojis/apple/emoji/5_214.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_215.png b/TMessagesProj/src/emojis/apple/emoji/5_215.png index 3c54ad729d..28cf22402b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_215.png and b/TMessagesProj/src/emojis/apple/emoji/5_215.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_216.png b/TMessagesProj/src/emojis/apple/emoji/5_216.png index 47c3f5e55d..70d6639e4b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_216.png and b/TMessagesProj/src/emojis/apple/emoji/5_216.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_217.png b/TMessagesProj/src/emojis/apple/emoji/5_217.png index eae67af941..29f2ff4042 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_217.png and b/TMessagesProj/src/emojis/apple/emoji/5_217.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_218.png b/TMessagesProj/src/emojis/apple/emoji/5_218.png index ca588dac4d..e1df59b1f7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_218.png and b/TMessagesProj/src/emojis/apple/emoji/5_218.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_219.png b/TMessagesProj/src/emojis/apple/emoji/5_219.png index 9576268b33..91aea6d248 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_219.png and b/TMessagesProj/src/emojis/apple/emoji/5_219.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_22.png b/TMessagesProj/src/emojis/apple/emoji/5_22.png index bc3c5e38b0..65d2baaf9a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_22.png and b/TMessagesProj/src/emojis/apple/emoji/5_22.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_220.png b/TMessagesProj/src/emojis/apple/emoji/5_220.png index 3778a4adc0..fd98714d99 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_220.png and b/TMessagesProj/src/emojis/apple/emoji/5_220.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_221.png b/TMessagesProj/src/emojis/apple/emoji/5_221.png index 514bbf38d9..1254779db1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_221.png and b/TMessagesProj/src/emojis/apple/emoji/5_221.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_222.png b/TMessagesProj/src/emojis/apple/emoji/5_222.png index 6dacd05ff1..ba10bfcbf1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_222.png and b/TMessagesProj/src/emojis/apple/emoji/5_222.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_223.png b/TMessagesProj/src/emojis/apple/emoji/5_223.png index 92aeecec3e..ff1a6e69c7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_223.png and b/TMessagesProj/src/emojis/apple/emoji/5_223.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_224.png b/TMessagesProj/src/emojis/apple/emoji/5_224.png index 0ad36a7d6d..a30802bd09 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_224.png and b/TMessagesProj/src/emojis/apple/emoji/5_224.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_225.png b/TMessagesProj/src/emojis/apple/emoji/5_225.png index 306d7c8724..ce4f1a1081 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_225.png and b/TMessagesProj/src/emojis/apple/emoji/5_225.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_226.png b/TMessagesProj/src/emojis/apple/emoji/5_226.png index 892fd2fdd3..0767322590 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_226.png and b/TMessagesProj/src/emojis/apple/emoji/5_226.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_227.png b/TMessagesProj/src/emojis/apple/emoji/5_227.png new file mode 100644 index 0000000000..da83456209 Binary files /dev/null and b/TMessagesProj/src/emojis/apple/emoji/5_227.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_228.png b/TMessagesProj/src/emojis/apple/emoji/5_228.png new file mode 100644 index 0000000000..e09081e71e Binary files /dev/null and b/TMessagesProj/src/emojis/apple/emoji/5_228.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_23.png b/TMessagesProj/src/emojis/apple/emoji/5_23.png index 560c5854ad..b8a42a17a1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_23.png and b/TMessagesProj/src/emojis/apple/emoji/5_23.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_24.png b/TMessagesProj/src/emojis/apple/emoji/5_24.png index 16af102cde..669c5ec989 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_24.png and b/TMessagesProj/src/emojis/apple/emoji/5_24.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_25.png b/TMessagesProj/src/emojis/apple/emoji/5_25.png index 060fea4f85..2fe736d064 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_25.png and b/TMessagesProj/src/emojis/apple/emoji/5_25.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_26.png b/TMessagesProj/src/emojis/apple/emoji/5_26.png index 9464b214a5..2f4f04c542 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_26.png and b/TMessagesProj/src/emojis/apple/emoji/5_26.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_27.png b/TMessagesProj/src/emojis/apple/emoji/5_27.png index b4c70adcc5..12992b26bb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_27.png and b/TMessagesProj/src/emojis/apple/emoji/5_27.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_28.png b/TMessagesProj/src/emojis/apple/emoji/5_28.png index 2c54d10de8..5617809eeb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_28.png and b/TMessagesProj/src/emojis/apple/emoji/5_28.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_29.png b/TMessagesProj/src/emojis/apple/emoji/5_29.png index abe32edd15..6bc98e0ed0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_29.png and b/TMessagesProj/src/emojis/apple/emoji/5_29.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_3.png b/TMessagesProj/src/emojis/apple/emoji/5_3.png index 63b1c677ac..037b8e1fcb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_3.png and b/TMessagesProj/src/emojis/apple/emoji/5_3.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_30.png b/TMessagesProj/src/emojis/apple/emoji/5_30.png index 88fa2cff3e..88fa99002c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_30.png and b/TMessagesProj/src/emojis/apple/emoji/5_30.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_31.png b/TMessagesProj/src/emojis/apple/emoji/5_31.png index 1f1f41f0e5..85124f911d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_31.png and b/TMessagesProj/src/emojis/apple/emoji/5_31.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_32.png b/TMessagesProj/src/emojis/apple/emoji/5_32.png index 2bad8eed38..ad388de6b1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_32.png and b/TMessagesProj/src/emojis/apple/emoji/5_32.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_33.png b/TMessagesProj/src/emojis/apple/emoji/5_33.png index eb879295bf..1b258a4929 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_33.png and b/TMessagesProj/src/emojis/apple/emoji/5_33.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_34.png b/TMessagesProj/src/emojis/apple/emoji/5_34.png index 483656e7a1..11479a07a6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_34.png and b/TMessagesProj/src/emojis/apple/emoji/5_34.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_35.png b/TMessagesProj/src/emojis/apple/emoji/5_35.png index 9bb48b4a3f..5169fa33cc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_35.png and b/TMessagesProj/src/emojis/apple/emoji/5_35.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_36.png b/TMessagesProj/src/emojis/apple/emoji/5_36.png index 57918432e6..ddb0162567 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_36.png and b/TMessagesProj/src/emojis/apple/emoji/5_36.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_37.png b/TMessagesProj/src/emojis/apple/emoji/5_37.png index e2ac7b1523..8e0dcf288e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_37.png and b/TMessagesProj/src/emojis/apple/emoji/5_37.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_38.png b/TMessagesProj/src/emojis/apple/emoji/5_38.png index 7b6d0af499..668618a3f5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_38.png and b/TMessagesProj/src/emojis/apple/emoji/5_38.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_39.png b/TMessagesProj/src/emojis/apple/emoji/5_39.png index b43bd8dd7a..898355d0eb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_39.png and b/TMessagesProj/src/emojis/apple/emoji/5_39.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_4.png b/TMessagesProj/src/emojis/apple/emoji/5_4.png index fab4c6ec21..6916a2c4ae 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_4.png and b/TMessagesProj/src/emojis/apple/emoji/5_4.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_40.png b/TMessagesProj/src/emojis/apple/emoji/5_40.png index 1e10d0e2a1..ef1916c66f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_40.png and b/TMessagesProj/src/emojis/apple/emoji/5_40.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_41.png b/TMessagesProj/src/emojis/apple/emoji/5_41.png index 472a1a2b19..366905e65a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_41.png and b/TMessagesProj/src/emojis/apple/emoji/5_41.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_42.png b/TMessagesProj/src/emojis/apple/emoji/5_42.png index 6f8b7c779a..de2ce03a0e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_42.png and b/TMessagesProj/src/emojis/apple/emoji/5_42.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_43.png b/TMessagesProj/src/emojis/apple/emoji/5_43.png index c76f44c264..bf5daca6a5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_43.png and b/TMessagesProj/src/emojis/apple/emoji/5_43.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_44.png b/TMessagesProj/src/emojis/apple/emoji/5_44.png index 1a3b29f729..36e67c983b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_44.png and b/TMessagesProj/src/emojis/apple/emoji/5_44.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_45.png b/TMessagesProj/src/emojis/apple/emoji/5_45.png index 31d1fe6781..c17f53fd28 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_45.png and b/TMessagesProj/src/emojis/apple/emoji/5_45.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_46.png b/TMessagesProj/src/emojis/apple/emoji/5_46.png index 48ea4c2b63..10354c30ad 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_46.png and b/TMessagesProj/src/emojis/apple/emoji/5_46.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_47.png b/TMessagesProj/src/emojis/apple/emoji/5_47.png index 4654eae354..e463b9ff3c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_47.png and b/TMessagesProj/src/emojis/apple/emoji/5_47.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_48.png b/TMessagesProj/src/emojis/apple/emoji/5_48.png index d8459a6158..654aff39b3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_48.png and b/TMessagesProj/src/emojis/apple/emoji/5_48.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_49.png b/TMessagesProj/src/emojis/apple/emoji/5_49.png index 589686a8af..76c4de9cad 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_49.png and b/TMessagesProj/src/emojis/apple/emoji/5_49.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_5.png b/TMessagesProj/src/emojis/apple/emoji/5_5.png index 0b73cd0188..b603dad642 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_5.png and b/TMessagesProj/src/emojis/apple/emoji/5_5.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_50.png b/TMessagesProj/src/emojis/apple/emoji/5_50.png index 421d3a8d5e..432da2a659 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_50.png and b/TMessagesProj/src/emojis/apple/emoji/5_50.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_51.png b/TMessagesProj/src/emojis/apple/emoji/5_51.png index 43a838b7bd..6df8de6b35 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_51.png and b/TMessagesProj/src/emojis/apple/emoji/5_51.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_52.png b/TMessagesProj/src/emojis/apple/emoji/5_52.png index 968ff6a054..79094ccb9e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_52.png and b/TMessagesProj/src/emojis/apple/emoji/5_52.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_53.png b/TMessagesProj/src/emojis/apple/emoji/5_53.png index 45c95b5d8a..96eb12abfa 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_53.png and b/TMessagesProj/src/emojis/apple/emoji/5_53.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_54.png b/TMessagesProj/src/emojis/apple/emoji/5_54.png index 010a0699dd..fc9b6a9cb3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_54.png and b/TMessagesProj/src/emojis/apple/emoji/5_54.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_55.png b/TMessagesProj/src/emojis/apple/emoji/5_55.png index 0a5b8b0f47..9aafb67710 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_55.png and b/TMessagesProj/src/emojis/apple/emoji/5_55.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_56.png b/TMessagesProj/src/emojis/apple/emoji/5_56.png index dd91f181b5..0c99845bf1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_56.png and b/TMessagesProj/src/emojis/apple/emoji/5_56.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_57.png b/TMessagesProj/src/emojis/apple/emoji/5_57.png index f6e63bc645..176ecde8f3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_57.png and b/TMessagesProj/src/emojis/apple/emoji/5_57.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_58.png b/TMessagesProj/src/emojis/apple/emoji/5_58.png index 64a71f0723..03e5f1b45a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_58.png and b/TMessagesProj/src/emojis/apple/emoji/5_58.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_59.png b/TMessagesProj/src/emojis/apple/emoji/5_59.png index 4f19246b3e..4d4317a0d0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_59.png and b/TMessagesProj/src/emojis/apple/emoji/5_59.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_6.png b/TMessagesProj/src/emojis/apple/emoji/5_6.png index 5162349814..e4cebac2fd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_6.png and b/TMessagesProj/src/emojis/apple/emoji/5_6.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_60.png b/TMessagesProj/src/emojis/apple/emoji/5_60.png index 97c2702120..9c92c315a0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_60.png and b/TMessagesProj/src/emojis/apple/emoji/5_60.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_61.png b/TMessagesProj/src/emojis/apple/emoji/5_61.png index c98a9f216e..e8a50eb63f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_61.png and b/TMessagesProj/src/emojis/apple/emoji/5_61.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_62.png b/TMessagesProj/src/emojis/apple/emoji/5_62.png index 96465ca4a6..d04fe9611a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_62.png and b/TMessagesProj/src/emojis/apple/emoji/5_62.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_63.png b/TMessagesProj/src/emojis/apple/emoji/5_63.png index 98dfeb6264..df0ccfa689 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_63.png and b/TMessagesProj/src/emojis/apple/emoji/5_63.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_64.png b/TMessagesProj/src/emojis/apple/emoji/5_64.png index 9462f87041..276dd51111 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_64.png and b/TMessagesProj/src/emojis/apple/emoji/5_64.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_65.png b/TMessagesProj/src/emojis/apple/emoji/5_65.png index 29f24073c8..2e70f15173 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_65.png and b/TMessagesProj/src/emojis/apple/emoji/5_65.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_66.png b/TMessagesProj/src/emojis/apple/emoji/5_66.png index b8352be5cd..3419991486 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_66.png and b/TMessagesProj/src/emojis/apple/emoji/5_66.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_67.png b/TMessagesProj/src/emojis/apple/emoji/5_67.png index 1c39cb400c..8c280522a4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_67.png and b/TMessagesProj/src/emojis/apple/emoji/5_67.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_68.png b/TMessagesProj/src/emojis/apple/emoji/5_68.png index 1aa66cb940..1ca621ed2c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_68.png and b/TMessagesProj/src/emojis/apple/emoji/5_68.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_69.png b/TMessagesProj/src/emojis/apple/emoji/5_69.png index 65c5610576..5ef529f567 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_69.png and b/TMessagesProj/src/emojis/apple/emoji/5_69.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_7.png b/TMessagesProj/src/emojis/apple/emoji/5_7.png index d8a1bd0469..e0bbe725e8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_7.png and b/TMessagesProj/src/emojis/apple/emoji/5_7.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_70.png b/TMessagesProj/src/emojis/apple/emoji/5_70.png index 6fa47576c9..277691bc85 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_70.png and b/TMessagesProj/src/emojis/apple/emoji/5_70.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_71.png b/TMessagesProj/src/emojis/apple/emoji/5_71.png index 2174aa0d4e..6584427ed9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_71.png and b/TMessagesProj/src/emojis/apple/emoji/5_71.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_72.png b/TMessagesProj/src/emojis/apple/emoji/5_72.png index f28157deb0..3a3fc71e90 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_72.png and b/TMessagesProj/src/emojis/apple/emoji/5_72.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_73.png b/TMessagesProj/src/emojis/apple/emoji/5_73.png index ea30c0a8e9..5dfe24ae49 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_73.png and b/TMessagesProj/src/emojis/apple/emoji/5_73.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_74.png b/TMessagesProj/src/emojis/apple/emoji/5_74.png index 6351c71168..69bd4d7025 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_74.png and b/TMessagesProj/src/emojis/apple/emoji/5_74.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_75.png b/TMessagesProj/src/emojis/apple/emoji/5_75.png index 35c5a0c920..398bf4f3a7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_75.png and b/TMessagesProj/src/emojis/apple/emoji/5_75.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_76.png b/TMessagesProj/src/emojis/apple/emoji/5_76.png index 717daa028c..e353449855 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_76.png and b/TMessagesProj/src/emojis/apple/emoji/5_76.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_77.png b/TMessagesProj/src/emojis/apple/emoji/5_77.png index aaf89c24a9..8d82747b42 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_77.png and b/TMessagesProj/src/emojis/apple/emoji/5_77.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_78.png b/TMessagesProj/src/emojis/apple/emoji/5_78.png index bda625f6e6..8f13a21a4e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_78.png and b/TMessagesProj/src/emojis/apple/emoji/5_78.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_79.png b/TMessagesProj/src/emojis/apple/emoji/5_79.png index 4f1835b10e..0bc401ddcb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_79.png and b/TMessagesProj/src/emojis/apple/emoji/5_79.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_8.png b/TMessagesProj/src/emojis/apple/emoji/5_8.png index b3e172b677..0434dbcd93 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_8.png and b/TMessagesProj/src/emojis/apple/emoji/5_8.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_80.png b/TMessagesProj/src/emojis/apple/emoji/5_80.png index 601a5fe47c..aef65bac4a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_80.png and b/TMessagesProj/src/emojis/apple/emoji/5_80.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_81.png b/TMessagesProj/src/emojis/apple/emoji/5_81.png index 95bf73e3a8..c52972b51d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_81.png and b/TMessagesProj/src/emojis/apple/emoji/5_81.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_82.png b/TMessagesProj/src/emojis/apple/emoji/5_82.png index 2da6b73f91..48e4a6e824 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_82.png and b/TMessagesProj/src/emojis/apple/emoji/5_82.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_83.png b/TMessagesProj/src/emojis/apple/emoji/5_83.png index 58a0b6597f..a39c4a490d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_83.png and b/TMessagesProj/src/emojis/apple/emoji/5_83.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_84.png b/TMessagesProj/src/emojis/apple/emoji/5_84.png index f35276cf60..7019573d18 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_84.png and b/TMessagesProj/src/emojis/apple/emoji/5_84.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_85.png b/TMessagesProj/src/emojis/apple/emoji/5_85.png index 1fc5b7e065..677dde37ae 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_85.png and b/TMessagesProj/src/emojis/apple/emoji/5_85.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_86.png b/TMessagesProj/src/emojis/apple/emoji/5_86.png index dad87233d8..3579c81c99 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_86.png and b/TMessagesProj/src/emojis/apple/emoji/5_86.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_87.png b/TMessagesProj/src/emojis/apple/emoji/5_87.png index 9296679d25..4291e085cf 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_87.png and b/TMessagesProj/src/emojis/apple/emoji/5_87.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_88.png b/TMessagesProj/src/emojis/apple/emoji/5_88.png index 88f2c0783d..c86119b1dd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_88.png and b/TMessagesProj/src/emojis/apple/emoji/5_88.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_89.png b/TMessagesProj/src/emojis/apple/emoji/5_89.png index e595b6e800..38fbb0c17e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_89.png and b/TMessagesProj/src/emojis/apple/emoji/5_89.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_9.png b/TMessagesProj/src/emojis/apple/emoji/5_9.png index af855d579c..e191f2c3a5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_9.png and b/TMessagesProj/src/emojis/apple/emoji/5_9.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_90.png b/TMessagesProj/src/emojis/apple/emoji/5_90.png index cc2c9c0f31..34226abc4e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_90.png and b/TMessagesProj/src/emojis/apple/emoji/5_90.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_91.png b/TMessagesProj/src/emojis/apple/emoji/5_91.png index e2433c3d5f..c6ccd258aa 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_91.png and b/TMessagesProj/src/emojis/apple/emoji/5_91.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_92.png b/TMessagesProj/src/emojis/apple/emoji/5_92.png index 176833dbde..d2715bb3a5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_92.png and b/TMessagesProj/src/emojis/apple/emoji/5_92.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_93.png b/TMessagesProj/src/emojis/apple/emoji/5_93.png index 8ea63809af..08c1138dd2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_93.png and b/TMessagesProj/src/emojis/apple/emoji/5_93.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_94.png b/TMessagesProj/src/emojis/apple/emoji/5_94.png index 6514008e32..8f2d6528a4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_94.png and b/TMessagesProj/src/emojis/apple/emoji/5_94.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_95.png b/TMessagesProj/src/emojis/apple/emoji/5_95.png index 1b575c741a..73986212f5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_95.png and b/TMessagesProj/src/emojis/apple/emoji/5_95.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_96.png b/TMessagesProj/src/emojis/apple/emoji/5_96.png index 0d03334e07..e74cc9fa1a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_96.png and b/TMessagesProj/src/emojis/apple/emoji/5_96.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_97.png b/TMessagesProj/src/emojis/apple/emoji/5_97.png index 2f59a88640..6ade6f9bea 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_97.png and b/TMessagesProj/src/emojis/apple/emoji/5_97.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_98.png b/TMessagesProj/src/emojis/apple/emoji/5_98.png index df3f6242ae..eb0845aaf7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_98.png and b/TMessagesProj/src/emojis/apple/emoji/5_98.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/5_99.png b/TMessagesProj/src/emojis/apple/emoji/5_99.png index b2f95b34c9..e603285372 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/5_99.png and b/TMessagesProj/src/emojis/apple/emoji/5_99.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_0.png b/TMessagesProj/src/emojis/apple/emoji/6_0.png index 6e9fbf2d23..84b6317c9d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_0.png and b/TMessagesProj/src/emojis/apple/emoji/6_0.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_1.png b/TMessagesProj/src/emojis/apple/emoji/6_1.png index bdf62f7785..38ad15ae82 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_1.png and b/TMessagesProj/src/emojis/apple/emoji/6_1.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_10.png b/TMessagesProj/src/emojis/apple/emoji/6_10.png index 098b846cc8..6dcc5ff28d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_10.png and b/TMessagesProj/src/emojis/apple/emoji/6_10.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_100.png b/TMessagesProj/src/emojis/apple/emoji/6_100.png index ddbe6bbd55..91180a4aa6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_100.png and b/TMessagesProj/src/emojis/apple/emoji/6_100.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_101.png b/TMessagesProj/src/emojis/apple/emoji/6_101.png index 41e6ac372b..a57f9d4302 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_101.png and b/TMessagesProj/src/emojis/apple/emoji/6_101.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_102.png b/TMessagesProj/src/emojis/apple/emoji/6_102.png index 52dd4f8b80..9a1e67a212 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_102.png and b/TMessagesProj/src/emojis/apple/emoji/6_102.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_103.png b/TMessagesProj/src/emojis/apple/emoji/6_103.png index 7e56e2cd8c..2434ad2beb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_103.png and b/TMessagesProj/src/emojis/apple/emoji/6_103.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_104.png b/TMessagesProj/src/emojis/apple/emoji/6_104.png index 19b4e8824e..966ef11955 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_104.png and b/TMessagesProj/src/emojis/apple/emoji/6_104.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_105.png b/TMessagesProj/src/emojis/apple/emoji/6_105.png index 8f681590ab..3d5e0f07fd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_105.png and b/TMessagesProj/src/emojis/apple/emoji/6_105.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_106.png b/TMessagesProj/src/emojis/apple/emoji/6_106.png index bf946bd6af..5a45201bfd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_106.png and b/TMessagesProj/src/emojis/apple/emoji/6_106.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_107.png b/TMessagesProj/src/emojis/apple/emoji/6_107.png index 89fcbc28ad..9a9531f3cc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_107.png and b/TMessagesProj/src/emojis/apple/emoji/6_107.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_108.png b/TMessagesProj/src/emojis/apple/emoji/6_108.png index d08117094b..242e83400a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_108.png and b/TMessagesProj/src/emojis/apple/emoji/6_108.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_109.png b/TMessagesProj/src/emojis/apple/emoji/6_109.png index 42e662c83d..6a6404003a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_109.png and b/TMessagesProj/src/emojis/apple/emoji/6_109.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_11.png b/TMessagesProj/src/emojis/apple/emoji/6_11.png index 846e5fe2ae..cc4b9d39c6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_11.png and b/TMessagesProj/src/emojis/apple/emoji/6_11.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_110.png b/TMessagesProj/src/emojis/apple/emoji/6_110.png index 5f3145d763..aabab08a22 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_110.png and b/TMessagesProj/src/emojis/apple/emoji/6_110.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_111.png b/TMessagesProj/src/emojis/apple/emoji/6_111.png index 3450dd7e01..b79f48f17a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_111.png and b/TMessagesProj/src/emojis/apple/emoji/6_111.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_112.png b/TMessagesProj/src/emojis/apple/emoji/6_112.png index ebfd3e1f32..a55d020c17 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_112.png and b/TMessagesProj/src/emojis/apple/emoji/6_112.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_113.png b/TMessagesProj/src/emojis/apple/emoji/6_113.png index ab359890c4..e5b3fdcca6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_113.png and b/TMessagesProj/src/emojis/apple/emoji/6_113.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_114.png b/TMessagesProj/src/emojis/apple/emoji/6_114.png index 8c52a0a3b7..b48a6254ad 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_114.png and b/TMessagesProj/src/emojis/apple/emoji/6_114.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_115.png b/TMessagesProj/src/emojis/apple/emoji/6_115.png index 145fce3f9c..26daa92136 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_115.png and b/TMessagesProj/src/emojis/apple/emoji/6_115.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_116.png b/TMessagesProj/src/emojis/apple/emoji/6_116.png index 7d94323cd7..3e94a0f0d3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_116.png and b/TMessagesProj/src/emojis/apple/emoji/6_116.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_117.png b/TMessagesProj/src/emojis/apple/emoji/6_117.png index c16c14b1aa..bc35ce9c8e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_117.png and b/TMessagesProj/src/emojis/apple/emoji/6_117.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_118.png b/TMessagesProj/src/emojis/apple/emoji/6_118.png index 75548f0bdf..bc06981628 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_118.png and b/TMessagesProj/src/emojis/apple/emoji/6_118.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_119.png b/TMessagesProj/src/emojis/apple/emoji/6_119.png index 75fee60cd3..762b8e2048 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_119.png and b/TMessagesProj/src/emojis/apple/emoji/6_119.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_12.png b/TMessagesProj/src/emojis/apple/emoji/6_12.png index 34680bec95..3aff5b4e61 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_12.png and b/TMessagesProj/src/emojis/apple/emoji/6_12.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_120.png b/TMessagesProj/src/emojis/apple/emoji/6_120.png index 229c65c1cc..79f11a3188 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_120.png and b/TMessagesProj/src/emojis/apple/emoji/6_120.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_121.png b/TMessagesProj/src/emojis/apple/emoji/6_121.png index 7433f574fc..999c781539 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_121.png and b/TMessagesProj/src/emojis/apple/emoji/6_121.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_122.png b/TMessagesProj/src/emojis/apple/emoji/6_122.png index 4e4eda3c41..3cc7e3ef7e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_122.png and b/TMessagesProj/src/emojis/apple/emoji/6_122.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_123.png b/TMessagesProj/src/emojis/apple/emoji/6_123.png index 92b81d166b..6d0c00d424 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_123.png and b/TMessagesProj/src/emojis/apple/emoji/6_123.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_124.png b/TMessagesProj/src/emojis/apple/emoji/6_124.png index f4e17f0438..69da2864b6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_124.png and b/TMessagesProj/src/emojis/apple/emoji/6_124.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_125.png b/TMessagesProj/src/emojis/apple/emoji/6_125.png index 8ca7c04250..ce53f15a29 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_125.png and b/TMessagesProj/src/emojis/apple/emoji/6_125.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_126.png b/TMessagesProj/src/emojis/apple/emoji/6_126.png index faa2e9bccc..849f14c9b6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_126.png and b/TMessagesProj/src/emojis/apple/emoji/6_126.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_127.png b/TMessagesProj/src/emojis/apple/emoji/6_127.png index 302e327da2..cf5fd199f0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_127.png and b/TMessagesProj/src/emojis/apple/emoji/6_127.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_128.png b/TMessagesProj/src/emojis/apple/emoji/6_128.png index 0001722402..79ced7e07b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_128.png and b/TMessagesProj/src/emojis/apple/emoji/6_128.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_129.png b/TMessagesProj/src/emojis/apple/emoji/6_129.png index 6e50e39d57..c1a69afa1e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_129.png and b/TMessagesProj/src/emojis/apple/emoji/6_129.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_13.png b/TMessagesProj/src/emojis/apple/emoji/6_13.png index d507ce679f..6a79591845 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_13.png and b/TMessagesProj/src/emojis/apple/emoji/6_13.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_130.png b/TMessagesProj/src/emojis/apple/emoji/6_130.png index d5c7f9b200..0228f2b2f4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_130.png and b/TMessagesProj/src/emojis/apple/emoji/6_130.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_131.png b/TMessagesProj/src/emojis/apple/emoji/6_131.png index f1dfd73fa8..02a1ebf7f8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_131.png and b/TMessagesProj/src/emojis/apple/emoji/6_131.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_132.png b/TMessagesProj/src/emojis/apple/emoji/6_132.png index fbcb8e3078..4fb50bfc05 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_132.png and b/TMessagesProj/src/emojis/apple/emoji/6_132.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_133.png b/TMessagesProj/src/emojis/apple/emoji/6_133.png index 4f7665533c..efb926c74c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_133.png and b/TMessagesProj/src/emojis/apple/emoji/6_133.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_134.png b/TMessagesProj/src/emojis/apple/emoji/6_134.png index bf774beed8..5d7d53d301 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_134.png and b/TMessagesProj/src/emojis/apple/emoji/6_134.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_135.png b/TMessagesProj/src/emojis/apple/emoji/6_135.png index d0b120f3c0..f19f6cdb78 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_135.png and b/TMessagesProj/src/emojis/apple/emoji/6_135.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_136.png b/TMessagesProj/src/emojis/apple/emoji/6_136.png index a1dc4bdb29..62e68b1004 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_136.png and b/TMessagesProj/src/emojis/apple/emoji/6_136.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_137.png b/TMessagesProj/src/emojis/apple/emoji/6_137.png index f6e6a6dfa6..689bbe14f0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_137.png and b/TMessagesProj/src/emojis/apple/emoji/6_137.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_138.png b/TMessagesProj/src/emojis/apple/emoji/6_138.png index 219b5055d5..1bb0c7eb0f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_138.png and b/TMessagesProj/src/emojis/apple/emoji/6_138.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_139.png b/TMessagesProj/src/emojis/apple/emoji/6_139.png index 73719d2b32..fd2df51a9e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_139.png and b/TMessagesProj/src/emojis/apple/emoji/6_139.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_14.png b/TMessagesProj/src/emojis/apple/emoji/6_14.png index a90ac21e83..67860bdf27 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_14.png and b/TMessagesProj/src/emojis/apple/emoji/6_14.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_140.png b/TMessagesProj/src/emojis/apple/emoji/6_140.png index 5ab568ca54..db0a2d6c35 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_140.png and b/TMessagesProj/src/emojis/apple/emoji/6_140.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_141.png b/TMessagesProj/src/emojis/apple/emoji/6_141.png index eacf8f969c..9d62886474 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_141.png and b/TMessagesProj/src/emojis/apple/emoji/6_141.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_142.png b/TMessagesProj/src/emojis/apple/emoji/6_142.png index 04d664923b..6eebf5a012 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_142.png and b/TMessagesProj/src/emojis/apple/emoji/6_142.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_143.png b/TMessagesProj/src/emojis/apple/emoji/6_143.png index 529909af2e..ce040f1459 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_143.png and b/TMessagesProj/src/emojis/apple/emoji/6_143.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_144.png b/TMessagesProj/src/emojis/apple/emoji/6_144.png index 3ba0909c9b..34256c4bc4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_144.png and b/TMessagesProj/src/emojis/apple/emoji/6_144.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_145.png b/TMessagesProj/src/emojis/apple/emoji/6_145.png index f3121e7ba2..560917ebd3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_145.png and b/TMessagesProj/src/emojis/apple/emoji/6_145.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_146.png b/TMessagesProj/src/emojis/apple/emoji/6_146.png index 8a7648bc7b..c1e46e0a40 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_146.png and b/TMessagesProj/src/emojis/apple/emoji/6_146.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_147.png b/TMessagesProj/src/emojis/apple/emoji/6_147.png index c484c0ffc9..3b99700860 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_147.png and b/TMessagesProj/src/emojis/apple/emoji/6_147.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_148.png b/TMessagesProj/src/emojis/apple/emoji/6_148.png index 88aa20b0d8..4248ed6d4e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_148.png and b/TMessagesProj/src/emojis/apple/emoji/6_148.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_149.png b/TMessagesProj/src/emojis/apple/emoji/6_149.png index 3bac1ff46e..7b785c2386 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_149.png and b/TMessagesProj/src/emojis/apple/emoji/6_149.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_15.png b/TMessagesProj/src/emojis/apple/emoji/6_15.png index 871d66927a..f62601d417 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_15.png and b/TMessagesProj/src/emojis/apple/emoji/6_15.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_150.png b/TMessagesProj/src/emojis/apple/emoji/6_150.png index af25bc67fc..4386c5f350 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_150.png and b/TMessagesProj/src/emojis/apple/emoji/6_150.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_151.png b/TMessagesProj/src/emojis/apple/emoji/6_151.png index f68f99217a..b9cb8f64d1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_151.png and b/TMessagesProj/src/emojis/apple/emoji/6_151.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_152.png b/TMessagesProj/src/emojis/apple/emoji/6_152.png index 77fa042214..3db377a1eb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_152.png and b/TMessagesProj/src/emojis/apple/emoji/6_152.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_153.png b/TMessagesProj/src/emojis/apple/emoji/6_153.png index c4b7155b03..ba9be0b37f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_153.png and b/TMessagesProj/src/emojis/apple/emoji/6_153.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_154.png b/TMessagesProj/src/emojis/apple/emoji/6_154.png index 7c0f4a637c..e1cd17fb3f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_154.png and b/TMessagesProj/src/emojis/apple/emoji/6_154.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_155.png b/TMessagesProj/src/emojis/apple/emoji/6_155.png index 0dd06550a6..18827a92d4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_155.png and b/TMessagesProj/src/emojis/apple/emoji/6_155.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_156.png b/TMessagesProj/src/emojis/apple/emoji/6_156.png index a8ca42283a..8275a11d32 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_156.png and b/TMessagesProj/src/emojis/apple/emoji/6_156.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_157.png b/TMessagesProj/src/emojis/apple/emoji/6_157.png index 289e808b41..82273fd2b6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_157.png and b/TMessagesProj/src/emojis/apple/emoji/6_157.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_158.png b/TMessagesProj/src/emojis/apple/emoji/6_158.png index b2dd3620a0..3e8b679610 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_158.png and b/TMessagesProj/src/emojis/apple/emoji/6_158.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_159.png b/TMessagesProj/src/emojis/apple/emoji/6_159.png index 8750a5563f..a89186a157 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_159.png and b/TMessagesProj/src/emojis/apple/emoji/6_159.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_16.png b/TMessagesProj/src/emojis/apple/emoji/6_16.png index d040ca38de..a2861f44a2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_16.png and b/TMessagesProj/src/emojis/apple/emoji/6_16.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_160.png b/TMessagesProj/src/emojis/apple/emoji/6_160.png index 4ef46a752e..65ab3e0567 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_160.png and b/TMessagesProj/src/emojis/apple/emoji/6_160.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_161.png b/TMessagesProj/src/emojis/apple/emoji/6_161.png index baa4d0d5c9..c217a12773 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_161.png and b/TMessagesProj/src/emojis/apple/emoji/6_161.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_162.png b/TMessagesProj/src/emojis/apple/emoji/6_162.png index 206c831183..d82565c816 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_162.png and b/TMessagesProj/src/emojis/apple/emoji/6_162.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_163.png b/TMessagesProj/src/emojis/apple/emoji/6_163.png index 10c8d43ad6..b65cdcd0f8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_163.png and b/TMessagesProj/src/emojis/apple/emoji/6_163.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_164.png b/TMessagesProj/src/emojis/apple/emoji/6_164.png index cc237d5105..d099e43929 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_164.png and b/TMessagesProj/src/emojis/apple/emoji/6_164.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_165.png b/TMessagesProj/src/emojis/apple/emoji/6_165.png index f2de56b2a7..d7fab7c260 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_165.png and b/TMessagesProj/src/emojis/apple/emoji/6_165.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_166.png b/TMessagesProj/src/emojis/apple/emoji/6_166.png index 868a322dff..4e50599c7e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_166.png and b/TMessagesProj/src/emojis/apple/emoji/6_166.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_167.png b/TMessagesProj/src/emojis/apple/emoji/6_167.png index 491d536ffa..b0ca7b733d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_167.png and b/TMessagesProj/src/emojis/apple/emoji/6_167.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_168.png b/TMessagesProj/src/emojis/apple/emoji/6_168.png index 75e47a0aa5..e4d1e6aa15 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_168.png and b/TMessagesProj/src/emojis/apple/emoji/6_168.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_169.png b/TMessagesProj/src/emojis/apple/emoji/6_169.png index 7690fa2f9a..3ca37857a3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_169.png and b/TMessagesProj/src/emojis/apple/emoji/6_169.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_17.png b/TMessagesProj/src/emojis/apple/emoji/6_17.png index 6e34b985c7..564f315ec8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_17.png and b/TMessagesProj/src/emojis/apple/emoji/6_17.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_170.png b/TMessagesProj/src/emojis/apple/emoji/6_170.png index b88671cbd0..ce68997504 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_170.png and b/TMessagesProj/src/emojis/apple/emoji/6_170.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_171.png b/TMessagesProj/src/emojis/apple/emoji/6_171.png index eda5eeee31..ab806cbdd1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_171.png and b/TMessagesProj/src/emojis/apple/emoji/6_171.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_172.png b/TMessagesProj/src/emojis/apple/emoji/6_172.png index d6a73fba5e..3289c1103f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_172.png and b/TMessagesProj/src/emojis/apple/emoji/6_172.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_173.png b/TMessagesProj/src/emojis/apple/emoji/6_173.png index dcb31dd5de..b53915db1a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_173.png and b/TMessagesProj/src/emojis/apple/emoji/6_173.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_174.png b/TMessagesProj/src/emojis/apple/emoji/6_174.png index ddeabd872b..c95e1b0f7f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_174.png and b/TMessagesProj/src/emojis/apple/emoji/6_174.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_175.png b/TMessagesProj/src/emojis/apple/emoji/6_175.png index 42f78478d2..be953dd00d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_175.png and b/TMessagesProj/src/emojis/apple/emoji/6_175.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_176.png b/TMessagesProj/src/emojis/apple/emoji/6_176.png index e357d2e215..5176be2427 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_176.png and b/TMessagesProj/src/emojis/apple/emoji/6_176.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_177.png b/TMessagesProj/src/emojis/apple/emoji/6_177.png index dcb0ad45d9..e58c86dd56 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_177.png and b/TMessagesProj/src/emojis/apple/emoji/6_177.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_178.png b/TMessagesProj/src/emojis/apple/emoji/6_178.png index fff4192016..4dc6757805 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_178.png and b/TMessagesProj/src/emojis/apple/emoji/6_178.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_179.png b/TMessagesProj/src/emojis/apple/emoji/6_179.png index fcde2891ed..a90e25dfaa 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_179.png and b/TMessagesProj/src/emojis/apple/emoji/6_179.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_18.png b/TMessagesProj/src/emojis/apple/emoji/6_18.png index 5f82346813..f2fd6f3a11 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_18.png and b/TMessagesProj/src/emojis/apple/emoji/6_18.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_180.png b/TMessagesProj/src/emojis/apple/emoji/6_180.png index 352b3b9e3a..90f7a17ffc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_180.png and b/TMessagesProj/src/emojis/apple/emoji/6_180.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_181.png b/TMessagesProj/src/emojis/apple/emoji/6_181.png index fe30199461..7e30187ede 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_181.png and b/TMessagesProj/src/emojis/apple/emoji/6_181.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_182.png b/TMessagesProj/src/emojis/apple/emoji/6_182.png index 97be81c2b1..055fe44e43 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_182.png and b/TMessagesProj/src/emojis/apple/emoji/6_182.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_183.png b/TMessagesProj/src/emojis/apple/emoji/6_183.png index ed71a6c247..66c5bf79e8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_183.png and b/TMessagesProj/src/emojis/apple/emoji/6_183.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_184.png b/TMessagesProj/src/emojis/apple/emoji/6_184.png index e0020314ac..ea01f1ae99 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_184.png and b/TMessagesProj/src/emojis/apple/emoji/6_184.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_185.png b/TMessagesProj/src/emojis/apple/emoji/6_185.png index 257111da86..f41267b039 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_185.png and b/TMessagesProj/src/emojis/apple/emoji/6_185.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_186.png b/TMessagesProj/src/emojis/apple/emoji/6_186.png index a1b366742c..d17feae67a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_186.png and b/TMessagesProj/src/emojis/apple/emoji/6_186.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_187.png b/TMessagesProj/src/emojis/apple/emoji/6_187.png index 290bc319d9..d72fa66476 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_187.png and b/TMessagesProj/src/emojis/apple/emoji/6_187.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_188.png b/TMessagesProj/src/emojis/apple/emoji/6_188.png index 7a9c8fd153..085409f3fb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_188.png and b/TMessagesProj/src/emojis/apple/emoji/6_188.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_189.png b/TMessagesProj/src/emojis/apple/emoji/6_189.png index 7a98001d72..4f0f03edec 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_189.png and b/TMessagesProj/src/emojis/apple/emoji/6_189.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_19.png b/TMessagesProj/src/emojis/apple/emoji/6_19.png index 81f2bf4281..3c5e97dc53 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_19.png and b/TMessagesProj/src/emojis/apple/emoji/6_19.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_190.png b/TMessagesProj/src/emojis/apple/emoji/6_190.png index df52284bff..241e2b010e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_190.png and b/TMessagesProj/src/emojis/apple/emoji/6_190.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_191.png b/TMessagesProj/src/emojis/apple/emoji/6_191.png index 4ec14bf912..39dde8ea33 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_191.png and b/TMessagesProj/src/emojis/apple/emoji/6_191.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_192.png b/TMessagesProj/src/emojis/apple/emoji/6_192.png index 4621efc19d..010a008903 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_192.png and b/TMessagesProj/src/emojis/apple/emoji/6_192.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_193.png b/TMessagesProj/src/emojis/apple/emoji/6_193.png index 1658e543ed..b738fa4dce 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_193.png and b/TMessagesProj/src/emojis/apple/emoji/6_193.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_194.png b/TMessagesProj/src/emojis/apple/emoji/6_194.png index d79844833f..169e605525 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_194.png and b/TMessagesProj/src/emojis/apple/emoji/6_194.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_195.png b/TMessagesProj/src/emojis/apple/emoji/6_195.png index 68683c78d6..9030460ae1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_195.png and b/TMessagesProj/src/emojis/apple/emoji/6_195.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_196.png b/TMessagesProj/src/emojis/apple/emoji/6_196.png index 549a1e478f..0f6616f7e6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_196.png and b/TMessagesProj/src/emojis/apple/emoji/6_196.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_197.png b/TMessagesProj/src/emojis/apple/emoji/6_197.png index e8d48c35e6..c7de797b46 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_197.png and b/TMessagesProj/src/emojis/apple/emoji/6_197.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_198.png b/TMessagesProj/src/emojis/apple/emoji/6_198.png index 4c332f0fc2..d5de71559a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_198.png and b/TMessagesProj/src/emojis/apple/emoji/6_198.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_199.png b/TMessagesProj/src/emojis/apple/emoji/6_199.png index aa779e10af..9a852412ba 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_199.png and b/TMessagesProj/src/emojis/apple/emoji/6_199.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_2.png b/TMessagesProj/src/emojis/apple/emoji/6_2.png index 80cb3e80f2..ff070624b7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_2.png and b/TMessagesProj/src/emojis/apple/emoji/6_2.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_20.png b/TMessagesProj/src/emojis/apple/emoji/6_20.png index ac311abb0d..ee795a39a1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_20.png and b/TMessagesProj/src/emojis/apple/emoji/6_20.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_200.png b/TMessagesProj/src/emojis/apple/emoji/6_200.png index a0d2da2495..a4663cc480 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_200.png and b/TMessagesProj/src/emojis/apple/emoji/6_200.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_201.png b/TMessagesProj/src/emojis/apple/emoji/6_201.png index 78cf4aa47a..3ab1581b66 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_201.png and b/TMessagesProj/src/emojis/apple/emoji/6_201.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_202.png b/TMessagesProj/src/emojis/apple/emoji/6_202.png index 2bdac24007..e0e702339d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_202.png and b/TMessagesProj/src/emojis/apple/emoji/6_202.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_203.png b/TMessagesProj/src/emojis/apple/emoji/6_203.png index f15f65cb5d..2b12928383 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_203.png and b/TMessagesProj/src/emojis/apple/emoji/6_203.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_204.png b/TMessagesProj/src/emojis/apple/emoji/6_204.png index e73b556581..fd3cbfe905 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_204.png and b/TMessagesProj/src/emojis/apple/emoji/6_204.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_205.png b/TMessagesProj/src/emojis/apple/emoji/6_205.png index 91a044be79..e6356ab980 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_205.png and b/TMessagesProj/src/emojis/apple/emoji/6_205.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_206.png b/TMessagesProj/src/emojis/apple/emoji/6_206.png index 5c8856340b..7df6007d4e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_206.png and b/TMessagesProj/src/emojis/apple/emoji/6_206.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_207.png b/TMessagesProj/src/emojis/apple/emoji/6_207.png index 31e190bc6d..f5971dedb9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_207.png and b/TMessagesProj/src/emojis/apple/emoji/6_207.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_208.png b/TMessagesProj/src/emojis/apple/emoji/6_208.png index d7054bd451..9b6273e83a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_208.png and b/TMessagesProj/src/emojis/apple/emoji/6_208.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_209.png b/TMessagesProj/src/emojis/apple/emoji/6_209.png index 7a11c9007f..32372b6dae 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_209.png and b/TMessagesProj/src/emojis/apple/emoji/6_209.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_21.png b/TMessagesProj/src/emojis/apple/emoji/6_21.png index 304cc0b39a..ab0da3db87 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_21.png and b/TMessagesProj/src/emojis/apple/emoji/6_21.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_210.png b/TMessagesProj/src/emojis/apple/emoji/6_210.png index f102de35ba..d04a7d1c43 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_210.png and b/TMessagesProj/src/emojis/apple/emoji/6_210.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_211.png b/TMessagesProj/src/emojis/apple/emoji/6_211.png index 71b7df6aae..a250cfd0d6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_211.png and b/TMessagesProj/src/emojis/apple/emoji/6_211.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_212.png b/TMessagesProj/src/emojis/apple/emoji/6_212.png index f9555e0f9b..5626bed5f7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_212.png and b/TMessagesProj/src/emojis/apple/emoji/6_212.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_213.png b/TMessagesProj/src/emojis/apple/emoji/6_213.png index 92f068a99b..be241162ee 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_213.png and b/TMessagesProj/src/emojis/apple/emoji/6_213.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_214.png b/TMessagesProj/src/emojis/apple/emoji/6_214.png index 22563eeb60..93245f51e0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_214.png and b/TMessagesProj/src/emojis/apple/emoji/6_214.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_215.png b/TMessagesProj/src/emojis/apple/emoji/6_215.png index af641b864f..a52a404b39 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_215.png and b/TMessagesProj/src/emojis/apple/emoji/6_215.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_216.png b/TMessagesProj/src/emojis/apple/emoji/6_216.png index 0a8446513b..ca18f16f19 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_216.png and b/TMessagesProj/src/emojis/apple/emoji/6_216.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_217.png b/TMessagesProj/src/emojis/apple/emoji/6_217.png index 007e80cf83..749907f405 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_217.png and b/TMessagesProj/src/emojis/apple/emoji/6_217.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_218.png b/TMessagesProj/src/emojis/apple/emoji/6_218.png index 913fbbf0cc..ab5443e908 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_218.png and b/TMessagesProj/src/emojis/apple/emoji/6_218.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_219.png b/TMessagesProj/src/emojis/apple/emoji/6_219.png index a85107a0e5..48d21fe85b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_219.png and b/TMessagesProj/src/emojis/apple/emoji/6_219.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_22.png b/TMessagesProj/src/emojis/apple/emoji/6_22.png index 5d8ec977a2..917ce693fb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_22.png and b/TMessagesProj/src/emojis/apple/emoji/6_22.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_220.png b/TMessagesProj/src/emojis/apple/emoji/6_220.png index 5caad28479..43b0d8b99c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_220.png and b/TMessagesProj/src/emojis/apple/emoji/6_220.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_221.png b/TMessagesProj/src/emojis/apple/emoji/6_221.png index 9a7dda1894..ff9a7c7460 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_221.png and b/TMessagesProj/src/emojis/apple/emoji/6_221.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_222.png b/TMessagesProj/src/emojis/apple/emoji/6_222.png index 513d628dd5..b213afb2ea 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_222.png and b/TMessagesProj/src/emojis/apple/emoji/6_222.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_223.png b/TMessagesProj/src/emojis/apple/emoji/6_223.png index 31e1ee981a..94accbbd49 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_223.png and b/TMessagesProj/src/emojis/apple/emoji/6_223.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_224.png b/TMessagesProj/src/emojis/apple/emoji/6_224.png index 7c3ef5e61f..48a4ffb9f4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_224.png and b/TMessagesProj/src/emojis/apple/emoji/6_224.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_225.png b/TMessagesProj/src/emojis/apple/emoji/6_225.png index 0d60984a67..d57b35b9b4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_225.png and b/TMessagesProj/src/emojis/apple/emoji/6_225.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_226.png b/TMessagesProj/src/emojis/apple/emoji/6_226.png index 524659b9c2..68f0e09800 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_226.png and b/TMessagesProj/src/emojis/apple/emoji/6_226.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_227.png b/TMessagesProj/src/emojis/apple/emoji/6_227.png index 8aff9657dd..e6c3f5d929 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_227.png and b/TMessagesProj/src/emojis/apple/emoji/6_227.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_228.png b/TMessagesProj/src/emojis/apple/emoji/6_228.png index 337d823ce1..770810edd0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_228.png and b/TMessagesProj/src/emojis/apple/emoji/6_228.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_229.png b/TMessagesProj/src/emojis/apple/emoji/6_229.png index 01c6804b11..31a88dfe79 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_229.png and b/TMessagesProj/src/emojis/apple/emoji/6_229.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_23.png b/TMessagesProj/src/emojis/apple/emoji/6_23.png index f4b8e1a668..e387ac78b8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_23.png and b/TMessagesProj/src/emojis/apple/emoji/6_23.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_230.png b/TMessagesProj/src/emojis/apple/emoji/6_230.png index 32669d89d0..745e2c763b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_230.png and b/TMessagesProj/src/emojis/apple/emoji/6_230.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_231.png b/TMessagesProj/src/emojis/apple/emoji/6_231.png index 814ab95264..6dd1c2611b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_231.png and b/TMessagesProj/src/emojis/apple/emoji/6_231.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_232.png b/TMessagesProj/src/emojis/apple/emoji/6_232.png index bbb135e28c..dca46c4549 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_232.png and b/TMessagesProj/src/emojis/apple/emoji/6_232.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_233.png b/TMessagesProj/src/emojis/apple/emoji/6_233.png index c88f0d19a3..c18f2e977f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_233.png and b/TMessagesProj/src/emojis/apple/emoji/6_233.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_234.png b/TMessagesProj/src/emojis/apple/emoji/6_234.png index 469af98f3b..07c79929dd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_234.png and b/TMessagesProj/src/emojis/apple/emoji/6_234.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_235.png b/TMessagesProj/src/emojis/apple/emoji/6_235.png index d64901d343..13c0c28c74 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_235.png and b/TMessagesProj/src/emojis/apple/emoji/6_235.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_236.png b/TMessagesProj/src/emojis/apple/emoji/6_236.png index e9693a1be4..35fb80bd44 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_236.png and b/TMessagesProj/src/emojis/apple/emoji/6_236.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_237.png b/TMessagesProj/src/emojis/apple/emoji/6_237.png index 2b81b712f0..8992bb2e85 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_237.png and b/TMessagesProj/src/emojis/apple/emoji/6_237.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_238.png b/TMessagesProj/src/emojis/apple/emoji/6_238.png index 159f4d62a0..221e2b4516 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_238.png and b/TMessagesProj/src/emojis/apple/emoji/6_238.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_239.png b/TMessagesProj/src/emojis/apple/emoji/6_239.png index 6603795fcf..1c7a0fb6df 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_239.png and b/TMessagesProj/src/emojis/apple/emoji/6_239.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_24.png b/TMessagesProj/src/emojis/apple/emoji/6_24.png index 65a9e16130..a03ee61529 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_24.png and b/TMessagesProj/src/emojis/apple/emoji/6_24.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_240.png b/TMessagesProj/src/emojis/apple/emoji/6_240.png index cc681e4069..49adef1622 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_240.png and b/TMessagesProj/src/emojis/apple/emoji/6_240.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_241.png b/TMessagesProj/src/emojis/apple/emoji/6_241.png index 046890c511..6ab9214ca2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_241.png and b/TMessagesProj/src/emojis/apple/emoji/6_241.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_242.png b/TMessagesProj/src/emojis/apple/emoji/6_242.png index c95cbbf641..c3159c02f7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_242.png and b/TMessagesProj/src/emojis/apple/emoji/6_242.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_243.png b/TMessagesProj/src/emojis/apple/emoji/6_243.png index f08b9664e3..de27955c35 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_243.png and b/TMessagesProj/src/emojis/apple/emoji/6_243.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_244.png b/TMessagesProj/src/emojis/apple/emoji/6_244.png index 3e2bc4a2a4..139666de48 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_244.png and b/TMessagesProj/src/emojis/apple/emoji/6_244.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_245.png b/TMessagesProj/src/emojis/apple/emoji/6_245.png index 5f6d6b5ac1..8bb01037c1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_245.png and b/TMessagesProj/src/emojis/apple/emoji/6_245.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_246.png b/TMessagesProj/src/emojis/apple/emoji/6_246.png index 093c301f5a..f448e651e2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_246.png and b/TMessagesProj/src/emojis/apple/emoji/6_246.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_247.png b/TMessagesProj/src/emojis/apple/emoji/6_247.png index 59deb50375..bac06cb6b3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_247.png and b/TMessagesProj/src/emojis/apple/emoji/6_247.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_248.png b/TMessagesProj/src/emojis/apple/emoji/6_248.png index 2a9245ebc6..bf6fc8ec7b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_248.png and b/TMessagesProj/src/emojis/apple/emoji/6_248.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_249.png b/TMessagesProj/src/emojis/apple/emoji/6_249.png index 06d69e2706..ba6ec3521f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_249.png and b/TMessagesProj/src/emojis/apple/emoji/6_249.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_25.png b/TMessagesProj/src/emojis/apple/emoji/6_25.png index b6cbc75d27..9df7e59da3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_25.png and b/TMessagesProj/src/emojis/apple/emoji/6_25.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_250.png b/TMessagesProj/src/emojis/apple/emoji/6_250.png index a014d3af93..19c54daf33 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_250.png and b/TMessagesProj/src/emojis/apple/emoji/6_250.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_251.png b/TMessagesProj/src/emojis/apple/emoji/6_251.png index 0279839b2b..8e210927b6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_251.png and b/TMessagesProj/src/emojis/apple/emoji/6_251.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_252.png b/TMessagesProj/src/emojis/apple/emoji/6_252.png index 4a3d77ab36..fcda5fa091 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_252.png and b/TMessagesProj/src/emojis/apple/emoji/6_252.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_253.png b/TMessagesProj/src/emojis/apple/emoji/6_253.png index f797911a53..9ed0e1653d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_253.png and b/TMessagesProj/src/emojis/apple/emoji/6_253.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_254.png b/TMessagesProj/src/emojis/apple/emoji/6_254.png index 7c07d178f1..91f1461564 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_254.png and b/TMessagesProj/src/emojis/apple/emoji/6_254.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_255.png b/TMessagesProj/src/emojis/apple/emoji/6_255.png index 8f15807bf4..c5c7df4080 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_255.png and b/TMessagesProj/src/emojis/apple/emoji/6_255.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_256.png b/TMessagesProj/src/emojis/apple/emoji/6_256.png index 51d9e89564..bb97c6ad75 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_256.png and b/TMessagesProj/src/emojis/apple/emoji/6_256.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_257.png b/TMessagesProj/src/emojis/apple/emoji/6_257.png index 5b78fb47eb..ddae2e56fa 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_257.png and b/TMessagesProj/src/emojis/apple/emoji/6_257.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_258.png b/TMessagesProj/src/emojis/apple/emoji/6_258.png index 5c59f1829d..a28f4c3573 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_258.png and b/TMessagesProj/src/emojis/apple/emoji/6_258.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_259.png b/TMessagesProj/src/emojis/apple/emoji/6_259.png index decc12804f..1d63835d89 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_259.png and b/TMessagesProj/src/emojis/apple/emoji/6_259.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_26.png b/TMessagesProj/src/emojis/apple/emoji/6_26.png index 09d48e582b..2b50fb619e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_26.png and b/TMessagesProj/src/emojis/apple/emoji/6_26.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_260.png b/TMessagesProj/src/emojis/apple/emoji/6_260.png index 8db03cf335..dcb3dc2b55 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_260.png and b/TMessagesProj/src/emojis/apple/emoji/6_260.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_261.png b/TMessagesProj/src/emojis/apple/emoji/6_261.png index d13e6eeaf5..fbf98220f7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_261.png and b/TMessagesProj/src/emojis/apple/emoji/6_261.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_262.png b/TMessagesProj/src/emojis/apple/emoji/6_262.png index 7929e27c1d..c461b6d251 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_262.png and b/TMessagesProj/src/emojis/apple/emoji/6_262.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_263.png b/TMessagesProj/src/emojis/apple/emoji/6_263.png index e98817c756..3b28874170 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_263.png and b/TMessagesProj/src/emojis/apple/emoji/6_263.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_264.png b/TMessagesProj/src/emojis/apple/emoji/6_264.png index 023fde92d5..f2cdb86c1a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_264.png and b/TMessagesProj/src/emojis/apple/emoji/6_264.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_265.png b/TMessagesProj/src/emojis/apple/emoji/6_265.png index 5dc0b06ad6..8a49dece33 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_265.png and b/TMessagesProj/src/emojis/apple/emoji/6_265.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_266.png b/TMessagesProj/src/emojis/apple/emoji/6_266.png index e036f6deec..0e3fc57ebd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_266.png and b/TMessagesProj/src/emojis/apple/emoji/6_266.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_267.png b/TMessagesProj/src/emojis/apple/emoji/6_267.png index eb1a419308..38c8b13449 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_267.png and b/TMessagesProj/src/emojis/apple/emoji/6_267.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_268.png b/TMessagesProj/src/emojis/apple/emoji/6_268.png index c1efe20a42..8599656526 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_268.png and b/TMessagesProj/src/emojis/apple/emoji/6_268.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_269.png b/TMessagesProj/src/emojis/apple/emoji/6_269.png index 0169b7d89f..014c4c9e68 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_269.png and b/TMessagesProj/src/emojis/apple/emoji/6_269.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_27.png b/TMessagesProj/src/emojis/apple/emoji/6_27.png index b315347dd0..a12a659fa3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_27.png and b/TMessagesProj/src/emojis/apple/emoji/6_27.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_270.png b/TMessagesProj/src/emojis/apple/emoji/6_270.png index a6fc1ed729..424b253f81 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_270.png and b/TMessagesProj/src/emojis/apple/emoji/6_270.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_271.png b/TMessagesProj/src/emojis/apple/emoji/6_271.png index 8d942343e1..dbd115d7fd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_271.png and b/TMessagesProj/src/emojis/apple/emoji/6_271.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_272.png b/TMessagesProj/src/emojis/apple/emoji/6_272.png index 739f7e2ce3..2a52722df2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_272.png and b/TMessagesProj/src/emojis/apple/emoji/6_272.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_273.png b/TMessagesProj/src/emojis/apple/emoji/6_273.png index 9aceba3780..65030cabd2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_273.png and b/TMessagesProj/src/emojis/apple/emoji/6_273.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_274.png b/TMessagesProj/src/emojis/apple/emoji/6_274.png index 79bf4db944..266733f6aa 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_274.png and b/TMessagesProj/src/emojis/apple/emoji/6_274.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_275.png b/TMessagesProj/src/emojis/apple/emoji/6_275.png index aecd5e2a97..241de4b73f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_275.png and b/TMessagesProj/src/emojis/apple/emoji/6_275.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_276.png b/TMessagesProj/src/emojis/apple/emoji/6_276.png index ba923e7086..10dd7e0910 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_276.png and b/TMessagesProj/src/emojis/apple/emoji/6_276.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_277.png b/TMessagesProj/src/emojis/apple/emoji/6_277.png index 078915ce60..c797a5c6b5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_277.png and b/TMessagesProj/src/emojis/apple/emoji/6_277.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_278.png b/TMessagesProj/src/emojis/apple/emoji/6_278.png index 59fb8e2dc7..6d2f2feb7d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_278.png and b/TMessagesProj/src/emojis/apple/emoji/6_278.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_279.png b/TMessagesProj/src/emojis/apple/emoji/6_279.png index 7d1960d845..df4288936f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_279.png and b/TMessagesProj/src/emojis/apple/emoji/6_279.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_28.png b/TMessagesProj/src/emojis/apple/emoji/6_28.png index 43e0b502ae..f253d9faec 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_28.png and b/TMessagesProj/src/emojis/apple/emoji/6_28.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_280.png b/TMessagesProj/src/emojis/apple/emoji/6_280.png index 8aa5057d5e..443cda1c88 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_280.png and b/TMessagesProj/src/emojis/apple/emoji/6_280.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_281.png b/TMessagesProj/src/emojis/apple/emoji/6_281.png index da51ae5caa..5258b9793d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_281.png and b/TMessagesProj/src/emojis/apple/emoji/6_281.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_282.png b/TMessagesProj/src/emojis/apple/emoji/6_282.png index 3103e29428..9ea51debef 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_282.png and b/TMessagesProj/src/emojis/apple/emoji/6_282.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_283.png b/TMessagesProj/src/emojis/apple/emoji/6_283.png index 03eddd03c3..71a1bd812a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_283.png and b/TMessagesProj/src/emojis/apple/emoji/6_283.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_284.png b/TMessagesProj/src/emojis/apple/emoji/6_284.png index 6e1ad05dfa..24790e1dd7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_284.png and b/TMessagesProj/src/emojis/apple/emoji/6_284.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_285.png b/TMessagesProj/src/emojis/apple/emoji/6_285.png index 4c583d6079..0770a86839 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_285.png and b/TMessagesProj/src/emojis/apple/emoji/6_285.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_286.png b/TMessagesProj/src/emojis/apple/emoji/6_286.png index 64f923f5ba..559ed86b5b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_286.png and b/TMessagesProj/src/emojis/apple/emoji/6_286.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_287.png b/TMessagesProj/src/emojis/apple/emoji/6_287.png index 79c86acdcf..9ff047b2b9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_287.png and b/TMessagesProj/src/emojis/apple/emoji/6_287.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_288.png b/TMessagesProj/src/emojis/apple/emoji/6_288.png index 64b481d4d2..8ca9e33ba6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_288.png and b/TMessagesProj/src/emojis/apple/emoji/6_288.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_289.png b/TMessagesProj/src/emojis/apple/emoji/6_289.png index ea8c44f880..e1f9a5923f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_289.png and b/TMessagesProj/src/emojis/apple/emoji/6_289.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_29.png b/TMessagesProj/src/emojis/apple/emoji/6_29.png index abaaa1499d..5fa82b1c6f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_29.png and b/TMessagesProj/src/emojis/apple/emoji/6_29.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_290.png b/TMessagesProj/src/emojis/apple/emoji/6_290.png index 05c6050982..fc57cf1d97 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_290.png and b/TMessagesProj/src/emojis/apple/emoji/6_290.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_291.png b/TMessagesProj/src/emojis/apple/emoji/6_291.png index 080843c940..0e17053b05 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_291.png and b/TMessagesProj/src/emojis/apple/emoji/6_291.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_292.png b/TMessagesProj/src/emojis/apple/emoji/6_292.png index eb38d115fa..9abd03d63f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_292.png and b/TMessagesProj/src/emojis/apple/emoji/6_292.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_293.png b/TMessagesProj/src/emojis/apple/emoji/6_293.png new file mode 100644 index 0000000000..d105b7b860 Binary files /dev/null and b/TMessagesProj/src/emojis/apple/emoji/6_293.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_294.png b/TMessagesProj/src/emojis/apple/emoji/6_294.png new file mode 100644 index 0000000000..5becae5d30 Binary files /dev/null and b/TMessagesProj/src/emojis/apple/emoji/6_294.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_295.png b/TMessagesProj/src/emojis/apple/emoji/6_295.png new file mode 100644 index 0000000000..1a166f017a Binary files /dev/null and b/TMessagesProj/src/emojis/apple/emoji/6_295.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_296.png b/TMessagesProj/src/emojis/apple/emoji/6_296.png new file mode 100644 index 0000000000..6a916b165d Binary files /dev/null and b/TMessagesProj/src/emojis/apple/emoji/6_296.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_297.png b/TMessagesProj/src/emojis/apple/emoji/6_297.png new file mode 100644 index 0000000000..3e387f8c49 Binary files /dev/null and b/TMessagesProj/src/emojis/apple/emoji/6_297.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_3.png b/TMessagesProj/src/emojis/apple/emoji/6_3.png index 2b8a4ab5a0..2e2787c11d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_3.png and b/TMessagesProj/src/emojis/apple/emoji/6_3.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_30.png b/TMessagesProj/src/emojis/apple/emoji/6_30.png index 87750a2149..a4a0641beb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_30.png and b/TMessagesProj/src/emojis/apple/emoji/6_30.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_31.png b/TMessagesProj/src/emojis/apple/emoji/6_31.png index 1d34a6e7f8..5ff73fe63a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_31.png and b/TMessagesProj/src/emojis/apple/emoji/6_31.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_32.png b/TMessagesProj/src/emojis/apple/emoji/6_32.png index 1bec5efcf5..80478225d3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_32.png and b/TMessagesProj/src/emojis/apple/emoji/6_32.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_33.png b/TMessagesProj/src/emojis/apple/emoji/6_33.png index 2243e5f7ce..f1ea5d1f8c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_33.png and b/TMessagesProj/src/emojis/apple/emoji/6_33.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_34.png b/TMessagesProj/src/emojis/apple/emoji/6_34.png index 229dc23899..eed5f724a3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_34.png and b/TMessagesProj/src/emojis/apple/emoji/6_34.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_35.png b/TMessagesProj/src/emojis/apple/emoji/6_35.png index de876b9a54..0802ce3577 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_35.png and b/TMessagesProj/src/emojis/apple/emoji/6_35.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_36.png b/TMessagesProj/src/emojis/apple/emoji/6_36.png index a195206b6b..144f0b6917 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_36.png and b/TMessagesProj/src/emojis/apple/emoji/6_36.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_37.png b/TMessagesProj/src/emojis/apple/emoji/6_37.png index 9937af781a..17fbe6746c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_37.png and b/TMessagesProj/src/emojis/apple/emoji/6_37.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_38.png b/TMessagesProj/src/emojis/apple/emoji/6_38.png index bd7fe19c0b..6fe8a57519 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_38.png and b/TMessagesProj/src/emojis/apple/emoji/6_38.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_39.png b/TMessagesProj/src/emojis/apple/emoji/6_39.png index 2a5b720135..8da1e0eea4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_39.png and b/TMessagesProj/src/emojis/apple/emoji/6_39.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_4.png b/TMessagesProj/src/emojis/apple/emoji/6_4.png index 52e79bdef7..8f192ac53c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_4.png and b/TMessagesProj/src/emojis/apple/emoji/6_4.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_40.png b/TMessagesProj/src/emojis/apple/emoji/6_40.png index 134d1df0e3..958b4fc1b4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_40.png and b/TMessagesProj/src/emojis/apple/emoji/6_40.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_41.png b/TMessagesProj/src/emojis/apple/emoji/6_41.png index 8fc5875040..578a43f018 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_41.png and b/TMessagesProj/src/emojis/apple/emoji/6_41.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_42.png b/TMessagesProj/src/emojis/apple/emoji/6_42.png index f3f54c49f3..72dff22707 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_42.png and b/TMessagesProj/src/emojis/apple/emoji/6_42.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_43.png b/TMessagesProj/src/emojis/apple/emoji/6_43.png index 4052bc01db..ac46a08d35 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_43.png and b/TMessagesProj/src/emojis/apple/emoji/6_43.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_44.png b/TMessagesProj/src/emojis/apple/emoji/6_44.png index 50ed458e27..88f54a3823 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_44.png and b/TMessagesProj/src/emojis/apple/emoji/6_44.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_45.png b/TMessagesProj/src/emojis/apple/emoji/6_45.png index 77652b1db1..b3bb4a0da3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_45.png and b/TMessagesProj/src/emojis/apple/emoji/6_45.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_46.png b/TMessagesProj/src/emojis/apple/emoji/6_46.png index 668bdf6c03..3482a8bc27 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_46.png and b/TMessagesProj/src/emojis/apple/emoji/6_46.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_47.png b/TMessagesProj/src/emojis/apple/emoji/6_47.png index 147b6d4c16..dbe10acaf0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_47.png and b/TMessagesProj/src/emojis/apple/emoji/6_47.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_48.png b/TMessagesProj/src/emojis/apple/emoji/6_48.png index 75d95c3bef..9e4b5dcc1f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_48.png and b/TMessagesProj/src/emojis/apple/emoji/6_48.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_49.png b/TMessagesProj/src/emojis/apple/emoji/6_49.png index cc85d65d1d..5096e871c2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_49.png and b/TMessagesProj/src/emojis/apple/emoji/6_49.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_5.png b/TMessagesProj/src/emojis/apple/emoji/6_5.png index ba237042ac..765f84425d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_5.png and b/TMessagesProj/src/emojis/apple/emoji/6_5.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_50.png b/TMessagesProj/src/emojis/apple/emoji/6_50.png index 4f7870fb5a..2a15305943 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_50.png and b/TMessagesProj/src/emojis/apple/emoji/6_50.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_51.png b/TMessagesProj/src/emojis/apple/emoji/6_51.png index 57e20fad65..73e525e927 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_51.png and b/TMessagesProj/src/emojis/apple/emoji/6_51.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_52.png b/TMessagesProj/src/emojis/apple/emoji/6_52.png index 119b670950..735f5df1f9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_52.png and b/TMessagesProj/src/emojis/apple/emoji/6_52.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_53.png b/TMessagesProj/src/emojis/apple/emoji/6_53.png index 180cf864f7..41a3618315 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_53.png and b/TMessagesProj/src/emojis/apple/emoji/6_53.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_54.png b/TMessagesProj/src/emojis/apple/emoji/6_54.png index 9434049839..39da68864a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_54.png and b/TMessagesProj/src/emojis/apple/emoji/6_54.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_55.png b/TMessagesProj/src/emojis/apple/emoji/6_55.png index fb68ff546b..3d62e72d38 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_55.png and b/TMessagesProj/src/emojis/apple/emoji/6_55.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_56.png b/TMessagesProj/src/emojis/apple/emoji/6_56.png index 5dbc61d8aa..6947d1b8b6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_56.png and b/TMessagesProj/src/emojis/apple/emoji/6_56.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_57.png b/TMessagesProj/src/emojis/apple/emoji/6_57.png index cd3cbef82f..0d64c2050d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_57.png and b/TMessagesProj/src/emojis/apple/emoji/6_57.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_58.png b/TMessagesProj/src/emojis/apple/emoji/6_58.png index 27e6d66fe3..744dcc23fa 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_58.png and b/TMessagesProj/src/emojis/apple/emoji/6_58.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_59.png b/TMessagesProj/src/emojis/apple/emoji/6_59.png index 6f181a20f5..3b76efebe6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_59.png and b/TMessagesProj/src/emojis/apple/emoji/6_59.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_6.png b/TMessagesProj/src/emojis/apple/emoji/6_6.png index a9fd3ca1aa..d22c92294e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_6.png and b/TMessagesProj/src/emojis/apple/emoji/6_6.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_60.png b/TMessagesProj/src/emojis/apple/emoji/6_60.png index 7a003db939..ea383101c5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_60.png and b/TMessagesProj/src/emojis/apple/emoji/6_60.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_61.png b/TMessagesProj/src/emojis/apple/emoji/6_61.png index 5012b8e5a5..669022ff3f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_61.png and b/TMessagesProj/src/emojis/apple/emoji/6_61.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_62.png b/TMessagesProj/src/emojis/apple/emoji/6_62.png index 12457e5b4f..d8a87fc571 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_62.png and b/TMessagesProj/src/emojis/apple/emoji/6_62.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_63.png b/TMessagesProj/src/emojis/apple/emoji/6_63.png index 9b6b3991de..17e4d039ed 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_63.png and b/TMessagesProj/src/emojis/apple/emoji/6_63.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_64.png b/TMessagesProj/src/emojis/apple/emoji/6_64.png index 868730c123..6a211cd43f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_64.png and b/TMessagesProj/src/emojis/apple/emoji/6_64.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_65.png b/TMessagesProj/src/emojis/apple/emoji/6_65.png index f0494d483c..f6d91cfc33 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_65.png and b/TMessagesProj/src/emojis/apple/emoji/6_65.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_66.png b/TMessagesProj/src/emojis/apple/emoji/6_66.png index e1fa3450b3..8c11714438 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_66.png and b/TMessagesProj/src/emojis/apple/emoji/6_66.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_67.png b/TMessagesProj/src/emojis/apple/emoji/6_67.png index b27ff81307..106a5afb16 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_67.png and b/TMessagesProj/src/emojis/apple/emoji/6_67.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_68.png b/TMessagesProj/src/emojis/apple/emoji/6_68.png index d2cec6004c..f3541fc1cc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_68.png and b/TMessagesProj/src/emojis/apple/emoji/6_68.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_69.png b/TMessagesProj/src/emojis/apple/emoji/6_69.png index 6a84e32b35..47a9fb2f61 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_69.png and b/TMessagesProj/src/emojis/apple/emoji/6_69.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_7.png b/TMessagesProj/src/emojis/apple/emoji/6_7.png index 3623001dc8..ebd9850591 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_7.png and b/TMessagesProj/src/emojis/apple/emoji/6_7.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_70.png b/TMessagesProj/src/emojis/apple/emoji/6_70.png index efd5fc561b..fbf98c9e8f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_70.png and b/TMessagesProj/src/emojis/apple/emoji/6_70.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_71.png b/TMessagesProj/src/emojis/apple/emoji/6_71.png index dc6bec03e2..2adc4ec6e0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_71.png and b/TMessagesProj/src/emojis/apple/emoji/6_71.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_72.png b/TMessagesProj/src/emojis/apple/emoji/6_72.png index d7c1dd58f7..45598903b8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_72.png and b/TMessagesProj/src/emojis/apple/emoji/6_72.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_73.png b/TMessagesProj/src/emojis/apple/emoji/6_73.png index 97a2fba622..2c61ff864f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_73.png and b/TMessagesProj/src/emojis/apple/emoji/6_73.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_74.png b/TMessagesProj/src/emojis/apple/emoji/6_74.png index 4be7676762..ae296009a5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_74.png and b/TMessagesProj/src/emojis/apple/emoji/6_74.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_75.png b/TMessagesProj/src/emojis/apple/emoji/6_75.png index f64af9798d..0e7880f7d3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_75.png and b/TMessagesProj/src/emojis/apple/emoji/6_75.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_76.png b/TMessagesProj/src/emojis/apple/emoji/6_76.png index 803a4e6c87..51ac50b6ab 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_76.png and b/TMessagesProj/src/emojis/apple/emoji/6_76.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_77.png b/TMessagesProj/src/emojis/apple/emoji/6_77.png index f05705566c..d21c5c2819 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_77.png and b/TMessagesProj/src/emojis/apple/emoji/6_77.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_78.png b/TMessagesProj/src/emojis/apple/emoji/6_78.png index 93d9728e52..693821160d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_78.png and b/TMessagesProj/src/emojis/apple/emoji/6_78.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_79.png b/TMessagesProj/src/emojis/apple/emoji/6_79.png index 2c96345100..1a8dfc93cf 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_79.png and b/TMessagesProj/src/emojis/apple/emoji/6_79.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_8.png b/TMessagesProj/src/emojis/apple/emoji/6_8.png index b66f1c85fd..cb2a61795a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_8.png and b/TMessagesProj/src/emojis/apple/emoji/6_8.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_80.png b/TMessagesProj/src/emojis/apple/emoji/6_80.png index 8aeda39225..8ef2de5075 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_80.png and b/TMessagesProj/src/emojis/apple/emoji/6_80.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_81.png b/TMessagesProj/src/emojis/apple/emoji/6_81.png index c868b936eb..42e0febf1f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_81.png and b/TMessagesProj/src/emojis/apple/emoji/6_81.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_82.png b/TMessagesProj/src/emojis/apple/emoji/6_82.png index 45d19af81b..00d8ac6b40 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_82.png and b/TMessagesProj/src/emojis/apple/emoji/6_82.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_83.png b/TMessagesProj/src/emojis/apple/emoji/6_83.png index 00a77c7391..b85bf10b67 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_83.png and b/TMessagesProj/src/emojis/apple/emoji/6_83.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_84.png b/TMessagesProj/src/emojis/apple/emoji/6_84.png index 5f45d7269d..002bbb3bfa 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_84.png and b/TMessagesProj/src/emojis/apple/emoji/6_84.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_85.png b/TMessagesProj/src/emojis/apple/emoji/6_85.png index e13c435378..035f5a97be 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_85.png and b/TMessagesProj/src/emojis/apple/emoji/6_85.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_86.png b/TMessagesProj/src/emojis/apple/emoji/6_86.png index 6d068ffd3a..5f1fa3afcc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_86.png and b/TMessagesProj/src/emojis/apple/emoji/6_86.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_87.png b/TMessagesProj/src/emojis/apple/emoji/6_87.png index da8d4aad6f..48b2d2950d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_87.png and b/TMessagesProj/src/emojis/apple/emoji/6_87.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_88.png b/TMessagesProj/src/emojis/apple/emoji/6_88.png index e02217cc3d..3b5129a95c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_88.png and b/TMessagesProj/src/emojis/apple/emoji/6_88.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_89.png b/TMessagesProj/src/emojis/apple/emoji/6_89.png index ed74d9dd0d..4724953142 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_89.png and b/TMessagesProj/src/emojis/apple/emoji/6_89.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_9.png b/TMessagesProj/src/emojis/apple/emoji/6_9.png index f8f21bbc5b..54cead2ef9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_9.png and b/TMessagesProj/src/emojis/apple/emoji/6_9.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_90.png b/TMessagesProj/src/emojis/apple/emoji/6_90.png index 3ec5c75752..bb62b720e9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_90.png and b/TMessagesProj/src/emojis/apple/emoji/6_90.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_91.png b/TMessagesProj/src/emojis/apple/emoji/6_91.png index 8be7e8f2a5..245964eaa3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_91.png and b/TMessagesProj/src/emojis/apple/emoji/6_91.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_92.png b/TMessagesProj/src/emojis/apple/emoji/6_92.png index c61f5773c2..7394e08d0e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_92.png and b/TMessagesProj/src/emojis/apple/emoji/6_92.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_93.png b/TMessagesProj/src/emojis/apple/emoji/6_93.png index 50021e82f4..946c2856e8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_93.png and b/TMessagesProj/src/emojis/apple/emoji/6_93.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_94.png b/TMessagesProj/src/emojis/apple/emoji/6_94.png index d38fad6a52..a9754c2915 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_94.png and b/TMessagesProj/src/emojis/apple/emoji/6_94.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_95.png b/TMessagesProj/src/emojis/apple/emoji/6_95.png index f3ab8f688e..f900af9999 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_95.png and b/TMessagesProj/src/emojis/apple/emoji/6_95.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_96.png b/TMessagesProj/src/emojis/apple/emoji/6_96.png index 18ae9c1941..f6be17df0d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_96.png and b/TMessagesProj/src/emojis/apple/emoji/6_96.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_97.png b/TMessagesProj/src/emojis/apple/emoji/6_97.png index b4ad88a2d9..eb23f44388 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_97.png and b/TMessagesProj/src/emojis/apple/emoji/6_97.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_98.png b/TMessagesProj/src/emojis/apple/emoji/6_98.png index 91e86aff33..898bda06e3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_98.png and b/TMessagesProj/src/emojis/apple/emoji/6_98.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/6_99.png b/TMessagesProj/src/emojis/apple/emoji/6_99.png index c88b8ed683..864029d3ee 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/6_99.png and b/TMessagesProj/src/emojis/apple/emoji/6_99.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_0.png b/TMessagesProj/src/emojis/apple/emoji/7_0.png index a48390fc2a..1c9fddac49 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_0.png and b/TMessagesProj/src/emojis/apple/emoji/7_0.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_1.png b/TMessagesProj/src/emojis/apple/emoji/7_1.png index dbcb52a638..be2410d861 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_1.png and b/TMessagesProj/src/emojis/apple/emoji/7_1.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_10.png b/TMessagesProj/src/emojis/apple/emoji/7_10.png index 8600d3b3f1..2a73368ca6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_10.png and b/TMessagesProj/src/emojis/apple/emoji/7_10.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_100.png b/TMessagesProj/src/emojis/apple/emoji/7_100.png index f2e352d0f9..ad429eca7b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_100.png and b/TMessagesProj/src/emojis/apple/emoji/7_100.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_101.png b/TMessagesProj/src/emojis/apple/emoji/7_101.png index 9af2f176a2..3c80ea7cf1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_101.png and b/TMessagesProj/src/emojis/apple/emoji/7_101.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_102.png b/TMessagesProj/src/emojis/apple/emoji/7_102.png index a6f1614662..cd5bf0efc3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_102.png and b/TMessagesProj/src/emojis/apple/emoji/7_102.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_103.png b/TMessagesProj/src/emojis/apple/emoji/7_103.png index 5359be529b..0a2074da0a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_103.png and b/TMessagesProj/src/emojis/apple/emoji/7_103.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_104.png b/TMessagesProj/src/emojis/apple/emoji/7_104.png index a6ce04e076..36cffaa2e9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_104.png and b/TMessagesProj/src/emojis/apple/emoji/7_104.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_105.png b/TMessagesProj/src/emojis/apple/emoji/7_105.png index b0b5d0ef4b..c7bd003027 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_105.png and b/TMessagesProj/src/emojis/apple/emoji/7_105.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_106.png b/TMessagesProj/src/emojis/apple/emoji/7_106.png index 55c5a1169b..37afa3dcb3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_106.png and b/TMessagesProj/src/emojis/apple/emoji/7_106.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_107.png b/TMessagesProj/src/emojis/apple/emoji/7_107.png index 73e8f44a51..c9e14a490f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_107.png and b/TMessagesProj/src/emojis/apple/emoji/7_107.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_108.png b/TMessagesProj/src/emojis/apple/emoji/7_108.png index d8b58bf54e..9bb4d8eade 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_108.png and b/TMessagesProj/src/emojis/apple/emoji/7_108.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_109.png b/TMessagesProj/src/emojis/apple/emoji/7_109.png index 520abba14c..911ee88087 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_109.png and b/TMessagesProj/src/emojis/apple/emoji/7_109.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_11.png b/TMessagesProj/src/emojis/apple/emoji/7_11.png index 2a8766f645..a90c40817a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_11.png and b/TMessagesProj/src/emojis/apple/emoji/7_11.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_110.png b/TMessagesProj/src/emojis/apple/emoji/7_110.png index 3c3581e6f3..ad31295a6f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_110.png and b/TMessagesProj/src/emojis/apple/emoji/7_110.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_111.png b/TMessagesProj/src/emojis/apple/emoji/7_111.png index 9b7e5740c9..9af66311d8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_111.png and b/TMessagesProj/src/emojis/apple/emoji/7_111.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_112.png b/TMessagesProj/src/emojis/apple/emoji/7_112.png index 2d911cfa95..b5426e85b1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_112.png and b/TMessagesProj/src/emojis/apple/emoji/7_112.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_113.png b/TMessagesProj/src/emojis/apple/emoji/7_113.png index 5270bbe040..c7781a3677 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_113.png and b/TMessagesProj/src/emojis/apple/emoji/7_113.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_114.png b/TMessagesProj/src/emojis/apple/emoji/7_114.png index eba349619e..2e4bac122f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_114.png and b/TMessagesProj/src/emojis/apple/emoji/7_114.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_115.png b/TMessagesProj/src/emojis/apple/emoji/7_115.png index f4ad246f5f..c03d19af01 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_115.png and b/TMessagesProj/src/emojis/apple/emoji/7_115.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_116.png b/TMessagesProj/src/emojis/apple/emoji/7_116.png index 76ebb11572..b693f1c66b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_116.png and b/TMessagesProj/src/emojis/apple/emoji/7_116.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_117.png b/TMessagesProj/src/emojis/apple/emoji/7_117.png index 438dd1b1fd..d7551fda5f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_117.png and b/TMessagesProj/src/emojis/apple/emoji/7_117.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_118.png b/TMessagesProj/src/emojis/apple/emoji/7_118.png index 7b10a7a853..482bbaf668 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_118.png and b/TMessagesProj/src/emojis/apple/emoji/7_118.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_119.png b/TMessagesProj/src/emojis/apple/emoji/7_119.png index 444d766f29..c5b746c6c7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_119.png and b/TMessagesProj/src/emojis/apple/emoji/7_119.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_12.png b/TMessagesProj/src/emojis/apple/emoji/7_12.png index 97214e379b..7e18e6d5c9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_12.png and b/TMessagesProj/src/emojis/apple/emoji/7_12.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_120.png b/TMessagesProj/src/emojis/apple/emoji/7_120.png index 6f1cbb30a7..fb035fab59 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_120.png and b/TMessagesProj/src/emojis/apple/emoji/7_120.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_121.png b/TMessagesProj/src/emojis/apple/emoji/7_121.png index 1a96343570..465a96f892 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_121.png and b/TMessagesProj/src/emojis/apple/emoji/7_121.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_122.png b/TMessagesProj/src/emojis/apple/emoji/7_122.png index db3803b512..ef76579a92 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_122.png and b/TMessagesProj/src/emojis/apple/emoji/7_122.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_123.png b/TMessagesProj/src/emojis/apple/emoji/7_123.png index d63d9077c5..4bef8e53c1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_123.png and b/TMessagesProj/src/emojis/apple/emoji/7_123.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_124.png b/TMessagesProj/src/emojis/apple/emoji/7_124.png index b4fa427ff9..fd0c5245bd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_124.png and b/TMessagesProj/src/emojis/apple/emoji/7_124.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_125.png b/TMessagesProj/src/emojis/apple/emoji/7_125.png index 3308fc912e..a672967f07 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_125.png and b/TMessagesProj/src/emojis/apple/emoji/7_125.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_126.png b/TMessagesProj/src/emojis/apple/emoji/7_126.png index 7f3fa8356a..966a1ac106 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_126.png and b/TMessagesProj/src/emojis/apple/emoji/7_126.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_127.png b/TMessagesProj/src/emojis/apple/emoji/7_127.png index 484047ee08..e6ef14a00f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_127.png and b/TMessagesProj/src/emojis/apple/emoji/7_127.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_128.png b/TMessagesProj/src/emojis/apple/emoji/7_128.png index 29e7aa6553..bd50f0b079 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_128.png and b/TMessagesProj/src/emojis/apple/emoji/7_128.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_129.png b/TMessagesProj/src/emojis/apple/emoji/7_129.png index f8289fad94..93457942ef 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_129.png and b/TMessagesProj/src/emojis/apple/emoji/7_129.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_13.png b/TMessagesProj/src/emojis/apple/emoji/7_13.png index 062cf729d4..392ebaa991 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_13.png and b/TMessagesProj/src/emojis/apple/emoji/7_13.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_130.png b/TMessagesProj/src/emojis/apple/emoji/7_130.png index e4f317ef6f..c9887c0e21 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_130.png and b/TMessagesProj/src/emojis/apple/emoji/7_130.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_131.png b/TMessagesProj/src/emojis/apple/emoji/7_131.png index cbb63e5ef8..8296d902b6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_131.png and b/TMessagesProj/src/emojis/apple/emoji/7_131.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_132.png b/TMessagesProj/src/emojis/apple/emoji/7_132.png index 43f4b3a814..3b85c0a37d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_132.png and b/TMessagesProj/src/emojis/apple/emoji/7_132.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_133.png b/TMessagesProj/src/emojis/apple/emoji/7_133.png index 5f3c5e9793..d84f49104e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_133.png and b/TMessagesProj/src/emojis/apple/emoji/7_133.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_134.png b/TMessagesProj/src/emojis/apple/emoji/7_134.png index 603194a9c6..46877d0ea3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_134.png and b/TMessagesProj/src/emojis/apple/emoji/7_134.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_135.png b/TMessagesProj/src/emojis/apple/emoji/7_135.png index 250be09909..55d7df1157 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_135.png and b/TMessagesProj/src/emojis/apple/emoji/7_135.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_136.png b/TMessagesProj/src/emojis/apple/emoji/7_136.png index dd5c8a05c0..b6cbe88ed8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_136.png and b/TMessagesProj/src/emojis/apple/emoji/7_136.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_137.png b/TMessagesProj/src/emojis/apple/emoji/7_137.png index fceef0f678..d978085b2b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_137.png and b/TMessagesProj/src/emojis/apple/emoji/7_137.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_138.png b/TMessagesProj/src/emojis/apple/emoji/7_138.png index 4b241980b7..d40b045766 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_138.png and b/TMessagesProj/src/emojis/apple/emoji/7_138.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_139.png b/TMessagesProj/src/emojis/apple/emoji/7_139.png index 323e079d28..4835078b2c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_139.png and b/TMessagesProj/src/emojis/apple/emoji/7_139.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_14.png b/TMessagesProj/src/emojis/apple/emoji/7_14.png index 5df1111315..2125603abb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_14.png and b/TMessagesProj/src/emojis/apple/emoji/7_14.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_140.png b/TMessagesProj/src/emojis/apple/emoji/7_140.png index 84ecaaca0a..79c48666e7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_140.png and b/TMessagesProj/src/emojis/apple/emoji/7_140.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_141.png b/TMessagesProj/src/emojis/apple/emoji/7_141.png index 11601dc84e..5bbd717744 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_141.png and b/TMessagesProj/src/emojis/apple/emoji/7_141.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_142.png b/TMessagesProj/src/emojis/apple/emoji/7_142.png index 2d5792de5f..3267afb9e0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_142.png and b/TMessagesProj/src/emojis/apple/emoji/7_142.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_143.png b/TMessagesProj/src/emojis/apple/emoji/7_143.png index b2c07bc51a..17d0c58d55 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_143.png and b/TMessagesProj/src/emojis/apple/emoji/7_143.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_144.png b/TMessagesProj/src/emojis/apple/emoji/7_144.png index 00b91cbc20..9919357ebf 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_144.png and b/TMessagesProj/src/emojis/apple/emoji/7_144.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_145.png b/TMessagesProj/src/emojis/apple/emoji/7_145.png index 492dcb696c..af5b8bd848 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_145.png and b/TMessagesProj/src/emojis/apple/emoji/7_145.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_146.png b/TMessagesProj/src/emojis/apple/emoji/7_146.png index 0958c22edc..2444a431ac 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_146.png and b/TMessagesProj/src/emojis/apple/emoji/7_146.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_147.png b/TMessagesProj/src/emojis/apple/emoji/7_147.png index f0449356d3..91d62d98eb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_147.png and b/TMessagesProj/src/emojis/apple/emoji/7_147.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_148.png b/TMessagesProj/src/emojis/apple/emoji/7_148.png index a9e70800d9..30c484908a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_148.png and b/TMessagesProj/src/emojis/apple/emoji/7_148.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_149.png b/TMessagesProj/src/emojis/apple/emoji/7_149.png index 3009e2d287..30b74067ca 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_149.png and b/TMessagesProj/src/emojis/apple/emoji/7_149.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_15.png b/TMessagesProj/src/emojis/apple/emoji/7_15.png index 4295eb3cb2..1cd0eabf0e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_15.png and b/TMessagesProj/src/emojis/apple/emoji/7_15.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_150.png b/TMessagesProj/src/emojis/apple/emoji/7_150.png index 49cbf5444e..872c5d97cb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_150.png and b/TMessagesProj/src/emojis/apple/emoji/7_150.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_151.png b/TMessagesProj/src/emojis/apple/emoji/7_151.png index 07a5de82e8..77708c2b96 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_151.png and b/TMessagesProj/src/emojis/apple/emoji/7_151.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_152.png b/TMessagesProj/src/emojis/apple/emoji/7_152.png index b771d9cd10..c096ca3204 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_152.png and b/TMessagesProj/src/emojis/apple/emoji/7_152.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_153.png b/TMessagesProj/src/emojis/apple/emoji/7_153.png index 7f54540688..049781a178 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_153.png and b/TMessagesProj/src/emojis/apple/emoji/7_153.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_154.png b/TMessagesProj/src/emojis/apple/emoji/7_154.png index fb8eaffdc7..161aa5bd75 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_154.png and b/TMessagesProj/src/emojis/apple/emoji/7_154.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_155.png b/TMessagesProj/src/emojis/apple/emoji/7_155.png index 9db91a36e4..d26db47bdd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_155.png and b/TMessagesProj/src/emojis/apple/emoji/7_155.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_156.png b/TMessagesProj/src/emojis/apple/emoji/7_156.png index 368ac96328..ec7f2dfb38 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_156.png and b/TMessagesProj/src/emojis/apple/emoji/7_156.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_157.png b/TMessagesProj/src/emojis/apple/emoji/7_157.png index 137edebbe0..fec0fea2cf 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_157.png and b/TMessagesProj/src/emojis/apple/emoji/7_157.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_158.png b/TMessagesProj/src/emojis/apple/emoji/7_158.png index a9640f8b3f..2628e4d361 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_158.png and b/TMessagesProj/src/emojis/apple/emoji/7_158.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_159.png b/TMessagesProj/src/emojis/apple/emoji/7_159.png index d9d4104135..7d9f32cef8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_159.png and b/TMessagesProj/src/emojis/apple/emoji/7_159.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_16.png b/TMessagesProj/src/emojis/apple/emoji/7_16.png index 3347d9bb40..9b84a77975 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_16.png and b/TMessagesProj/src/emojis/apple/emoji/7_16.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_160.png b/TMessagesProj/src/emojis/apple/emoji/7_160.png index 71b59fe046..d445d6c6c8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_160.png and b/TMessagesProj/src/emojis/apple/emoji/7_160.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_161.png b/TMessagesProj/src/emojis/apple/emoji/7_161.png index 7576fd3bfd..cdc699d430 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_161.png and b/TMessagesProj/src/emojis/apple/emoji/7_161.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_162.png b/TMessagesProj/src/emojis/apple/emoji/7_162.png index cff727b1da..4268fcc7dd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_162.png and b/TMessagesProj/src/emojis/apple/emoji/7_162.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_163.png b/TMessagesProj/src/emojis/apple/emoji/7_163.png index f571349249..07fdb18336 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_163.png and b/TMessagesProj/src/emojis/apple/emoji/7_163.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_164.png b/TMessagesProj/src/emojis/apple/emoji/7_164.png index 14524bb8bf..4983e4655d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_164.png and b/TMessagesProj/src/emojis/apple/emoji/7_164.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_165.png b/TMessagesProj/src/emojis/apple/emoji/7_165.png index 28e765fd5e..ed07beed59 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_165.png and b/TMessagesProj/src/emojis/apple/emoji/7_165.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_166.png b/TMessagesProj/src/emojis/apple/emoji/7_166.png index 98f5c462cd..a6cfedd3db 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_166.png and b/TMessagesProj/src/emojis/apple/emoji/7_166.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_167.png b/TMessagesProj/src/emojis/apple/emoji/7_167.png index 524e971633..aaa7ffad01 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_167.png and b/TMessagesProj/src/emojis/apple/emoji/7_167.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_168.png b/TMessagesProj/src/emojis/apple/emoji/7_168.png index 8affad0922..52776c2049 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_168.png and b/TMessagesProj/src/emojis/apple/emoji/7_168.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_169.png b/TMessagesProj/src/emojis/apple/emoji/7_169.png index 0c71c0a307..4b2bb564f9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_169.png and b/TMessagesProj/src/emojis/apple/emoji/7_169.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_17.png b/TMessagesProj/src/emojis/apple/emoji/7_17.png index 8a98991a31..de7826cc35 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_17.png and b/TMessagesProj/src/emojis/apple/emoji/7_17.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_170.png b/TMessagesProj/src/emojis/apple/emoji/7_170.png index a77eba2d41..66d2528390 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_170.png and b/TMessagesProj/src/emojis/apple/emoji/7_170.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_171.png b/TMessagesProj/src/emojis/apple/emoji/7_171.png index de0d3484ce..e4ba306564 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_171.png and b/TMessagesProj/src/emojis/apple/emoji/7_171.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_172.png b/TMessagesProj/src/emojis/apple/emoji/7_172.png index 9c3ff1d3e4..3629642153 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_172.png and b/TMessagesProj/src/emojis/apple/emoji/7_172.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_173.png b/TMessagesProj/src/emojis/apple/emoji/7_173.png index 51dcf8e5fc..0aa06180e7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_173.png and b/TMessagesProj/src/emojis/apple/emoji/7_173.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_174.png b/TMessagesProj/src/emojis/apple/emoji/7_174.png index 4d09102e0e..a08fd2d1c9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_174.png and b/TMessagesProj/src/emojis/apple/emoji/7_174.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_175.png b/TMessagesProj/src/emojis/apple/emoji/7_175.png index 64a313d5fb..290cd60b9e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_175.png and b/TMessagesProj/src/emojis/apple/emoji/7_175.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_176.png b/TMessagesProj/src/emojis/apple/emoji/7_176.png index c73dd831b7..9126fe9242 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_176.png and b/TMessagesProj/src/emojis/apple/emoji/7_176.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_177.png b/TMessagesProj/src/emojis/apple/emoji/7_177.png index 164d7fbff6..7546d58983 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_177.png and b/TMessagesProj/src/emojis/apple/emoji/7_177.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_178.png b/TMessagesProj/src/emojis/apple/emoji/7_178.png index 8b8b74b8c4..8bffae27c0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_178.png and b/TMessagesProj/src/emojis/apple/emoji/7_178.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_179.png b/TMessagesProj/src/emojis/apple/emoji/7_179.png index f5cd8627c1..f1a26b44e2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_179.png and b/TMessagesProj/src/emojis/apple/emoji/7_179.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_18.png b/TMessagesProj/src/emojis/apple/emoji/7_18.png index a542f603e5..2b2c0b0970 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_18.png and b/TMessagesProj/src/emojis/apple/emoji/7_18.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_180.png b/TMessagesProj/src/emojis/apple/emoji/7_180.png index e3e820dda1..b37e6c0fae 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_180.png and b/TMessagesProj/src/emojis/apple/emoji/7_180.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_181.png b/TMessagesProj/src/emojis/apple/emoji/7_181.png index bbc4be279f..bec6f39ce0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_181.png and b/TMessagesProj/src/emojis/apple/emoji/7_181.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_182.png b/TMessagesProj/src/emojis/apple/emoji/7_182.png index 9053714e51..b1a537cb43 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_182.png and b/TMessagesProj/src/emojis/apple/emoji/7_182.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_183.png b/TMessagesProj/src/emojis/apple/emoji/7_183.png index c1dffdda84..b71a7310a1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_183.png and b/TMessagesProj/src/emojis/apple/emoji/7_183.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_184.png b/TMessagesProj/src/emojis/apple/emoji/7_184.png index ac0592a631..d93771142e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_184.png and b/TMessagesProj/src/emojis/apple/emoji/7_184.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_185.png b/TMessagesProj/src/emojis/apple/emoji/7_185.png index 797d2951af..a4ce3d571d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_185.png and b/TMessagesProj/src/emojis/apple/emoji/7_185.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_186.png b/TMessagesProj/src/emojis/apple/emoji/7_186.png index 078ec62842..7640d12e9d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_186.png and b/TMessagesProj/src/emojis/apple/emoji/7_186.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_187.png b/TMessagesProj/src/emojis/apple/emoji/7_187.png index 553edb7aeb..16e67b2641 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_187.png and b/TMessagesProj/src/emojis/apple/emoji/7_187.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_188.png b/TMessagesProj/src/emojis/apple/emoji/7_188.png index a906827902..da72c877b2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_188.png and b/TMessagesProj/src/emojis/apple/emoji/7_188.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_189.png b/TMessagesProj/src/emojis/apple/emoji/7_189.png index f9e2b6f012..0a236fb6d0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_189.png and b/TMessagesProj/src/emojis/apple/emoji/7_189.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_19.png b/TMessagesProj/src/emojis/apple/emoji/7_19.png index 4a0f248f3e..1898e45379 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_19.png and b/TMessagesProj/src/emojis/apple/emoji/7_19.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_190.png b/TMessagesProj/src/emojis/apple/emoji/7_190.png index 94f7ae2226..14bd7518fc 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_190.png and b/TMessagesProj/src/emojis/apple/emoji/7_190.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_191.png b/TMessagesProj/src/emojis/apple/emoji/7_191.png index e7b7e3f61e..858884aafb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_191.png and b/TMessagesProj/src/emojis/apple/emoji/7_191.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_192.png b/TMessagesProj/src/emojis/apple/emoji/7_192.png index 1bb46d43c7..819b1b37b0 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_192.png and b/TMessagesProj/src/emojis/apple/emoji/7_192.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_193.png b/TMessagesProj/src/emojis/apple/emoji/7_193.png index b5526ad61c..306e8c3713 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_193.png and b/TMessagesProj/src/emojis/apple/emoji/7_193.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_194.png b/TMessagesProj/src/emojis/apple/emoji/7_194.png index 430ced2beb..7c3a56cc12 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_194.png and b/TMessagesProj/src/emojis/apple/emoji/7_194.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_195.png b/TMessagesProj/src/emojis/apple/emoji/7_195.png index a05d2dde6c..0bcf3f0543 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_195.png and b/TMessagesProj/src/emojis/apple/emoji/7_195.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_196.png b/TMessagesProj/src/emojis/apple/emoji/7_196.png index b0a7f7dc02..9cbea67781 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_196.png and b/TMessagesProj/src/emojis/apple/emoji/7_196.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_197.png b/TMessagesProj/src/emojis/apple/emoji/7_197.png index f1690a5030..cc477362db 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_197.png and b/TMessagesProj/src/emojis/apple/emoji/7_197.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_198.png b/TMessagesProj/src/emojis/apple/emoji/7_198.png index d51c738855..33cbbf2056 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_198.png and b/TMessagesProj/src/emojis/apple/emoji/7_198.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_199.png b/TMessagesProj/src/emojis/apple/emoji/7_199.png index ab785a0d20..340a928558 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_199.png and b/TMessagesProj/src/emojis/apple/emoji/7_199.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_2.png b/TMessagesProj/src/emojis/apple/emoji/7_2.png index 256f1712f5..4295efaa69 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_2.png and b/TMessagesProj/src/emojis/apple/emoji/7_2.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_20.png b/TMessagesProj/src/emojis/apple/emoji/7_20.png index 49ab12f93d..28f84f4de4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_20.png and b/TMessagesProj/src/emojis/apple/emoji/7_20.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_200.png b/TMessagesProj/src/emojis/apple/emoji/7_200.png index 291ab0806d..48cdf7ba6d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_200.png and b/TMessagesProj/src/emojis/apple/emoji/7_200.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_201.png b/TMessagesProj/src/emojis/apple/emoji/7_201.png index a27e239f34..498414569d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_201.png and b/TMessagesProj/src/emojis/apple/emoji/7_201.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_202.png b/TMessagesProj/src/emojis/apple/emoji/7_202.png index 7e1371f6eb..b5d7231a66 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_202.png and b/TMessagesProj/src/emojis/apple/emoji/7_202.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_203.png b/TMessagesProj/src/emojis/apple/emoji/7_203.png index 46e1ef853a..51e738d00d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_203.png and b/TMessagesProj/src/emojis/apple/emoji/7_203.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_204.png b/TMessagesProj/src/emojis/apple/emoji/7_204.png index 2446b2d547..98a18903e4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_204.png and b/TMessagesProj/src/emojis/apple/emoji/7_204.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_205.png b/TMessagesProj/src/emojis/apple/emoji/7_205.png index 7eb89bbd69..072f69648b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_205.png and b/TMessagesProj/src/emojis/apple/emoji/7_205.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_206.png b/TMessagesProj/src/emojis/apple/emoji/7_206.png index 1ddf0049da..a751282620 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_206.png and b/TMessagesProj/src/emojis/apple/emoji/7_206.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_207.png b/TMessagesProj/src/emojis/apple/emoji/7_207.png index bad1e3774c..f19d6c987e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_207.png and b/TMessagesProj/src/emojis/apple/emoji/7_207.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_208.png b/TMessagesProj/src/emojis/apple/emoji/7_208.png index 539b3b99b9..92b9347ef1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_208.png and b/TMessagesProj/src/emojis/apple/emoji/7_208.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_209.png b/TMessagesProj/src/emojis/apple/emoji/7_209.png index e0c46ef556..79d949f2ae 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_209.png and b/TMessagesProj/src/emojis/apple/emoji/7_209.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_21.png b/TMessagesProj/src/emojis/apple/emoji/7_21.png index 3c028be5a2..8067895c78 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_21.png and b/TMessagesProj/src/emojis/apple/emoji/7_21.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_210.png b/TMessagesProj/src/emojis/apple/emoji/7_210.png index 8ffb6367fc..6ef769c9ad 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_210.png and b/TMessagesProj/src/emojis/apple/emoji/7_210.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_211.png b/TMessagesProj/src/emojis/apple/emoji/7_211.png index 6ffd88171c..823d1b22f8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_211.png and b/TMessagesProj/src/emojis/apple/emoji/7_211.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_212.png b/TMessagesProj/src/emojis/apple/emoji/7_212.png index 14a2b42156..81b61b311b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_212.png and b/TMessagesProj/src/emojis/apple/emoji/7_212.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_213.png b/TMessagesProj/src/emojis/apple/emoji/7_213.png index f47c047d12..0ad245076b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_213.png and b/TMessagesProj/src/emojis/apple/emoji/7_213.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_214.png b/TMessagesProj/src/emojis/apple/emoji/7_214.png index f3fecb6126..48e6c5edf8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_214.png and b/TMessagesProj/src/emojis/apple/emoji/7_214.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_215.png b/TMessagesProj/src/emojis/apple/emoji/7_215.png index 8166887225..3ced8b9d4c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_215.png and b/TMessagesProj/src/emojis/apple/emoji/7_215.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_216.png b/TMessagesProj/src/emojis/apple/emoji/7_216.png index cacee36e9b..5ad9cc2a90 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_216.png and b/TMessagesProj/src/emojis/apple/emoji/7_216.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_217.png b/TMessagesProj/src/emojis/apple/emoji/7_217.png index 5adb35c550..f6c6526171 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_217.png and b/TMessagesProj/src/emojis/apple/emoji/7_217.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_218.png b/TMessagesProj/src/emojis/apple/emoji/7_218.png index f06d6ec15c..e11f309e61 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_218.png and b/TMessagesProj/src/emojis/apple/emoji/7_218.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_219.png b/TMessagesProj/src/emojis/apple/emoji/7_219.png index 4716667e20..569030be0f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_219.png and b/TMessagesProj/src/emojis/apple/emoji/7_219.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_22.png b/TMessagesProj/src/emojis/apple/emoji/7_22.png index ab68d2c76d..f7bdb50101 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_22.png and b/TMessagesProj/src/emojis/apple/emoji/7_22.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_220.png b/TMessagesProj/src/emojis/apple/emoji/7_220.png index 2f710f147c..eac75c1be8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_220.png and b/TMessagesProj/src/emojis/apple/emoji/7_220.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_221.png b/TMessagesProj/src/emojis/apple/emoji/7_221.png index 260fd3e6c4..17f1f94016 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_221.png and b/TMessagesProj/src/emojis/apple/emoji/7_221.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_222.png b/TMessagesProj/src/emojis/apple/emoji/7_222.png index c514425af6..8f8f6930d4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_222.png and b/TMessagesProj/src/emojis/apple/emoji/7_222.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_223.png b/TMessagesProj/src/emojis/apple/emoji/7_223.png index 613846852a..14f032299c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_223.png and b/TMessagesProj/src/emojis/apple/emoji/7_223.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_224.png b/TMessagesProj/src/emojis/apple/emoji/7_224.png index a867f4d652..2e1ff69bc7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_224.png and b/TMessagesProj/src/emojis/apple/emoji/7_224.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_225.png b/TMessagesProj/src/emojis/apple/emoji/7_225.png index c69c736fea..f29b8f7e30 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_225.png and b/TMessagesProj/src/emojis/apple/emoji/7_225.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_226.png b/TMessagesProj/src/emojis/apple/emoji/7_226.png index 635036ffed..6dcbd03c8e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_226.png and b/TMessagesProj/src/emojis/apple/emoji/7_226.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_227.png b/TMessagesProj/src/emojis/apple/emoji/7_227.png index f3dfe551d7..26b6d9ae2f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_227.png and b/TMessagesProj/src/emojis/apple/emoji/7_227.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_228.png b/TMessagesProj/src/emojis/apple/emoji/7_228.png index 963ed0a634..22a2949065 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_228.png and b/TMessagesProj/src/emojis/apple/emoji/7_228.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_229.png b/TMessagesProj/src/emojis/apple/emoji/7_229.png index 2de722d1bd..ddb796107b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_229.png and b/TMessagesProj/src/emojis/apple/emoji/7_229.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_23.png b/TMessagesProj/src/emojis/apple/emoji/7_23.png index ccb1ce0644..53cbbad973 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_23.png and b/TMessagesProj/src/emojis/apple/emoji/7_23.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_230.png b/TMessagesProj/src/emojis/apple/emoji/7_230.png index 4197792765..10c6e41684 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_230.png and b/TMessagesProj/src/emojis/apple/emoji/7_230.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_231.png b/TMessagesProj/src/emojis/apple/emoji/7_231.png index ec13e836de..614b151e9c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_231.png and b/TMessagesProj/src/emojis/apple/emoji/7_231.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_232.png b/TMessagesProj/src/emojis/apple/emoji/7_232.png index da9eebf540..1e279264a9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_232.png and b/TMessagesProj/src/emojis/apple/emoji/7_232.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_233.png b/TMessagesProj/src/emojis/apple/emoji/7_233.png index f32bd48211..5945403da9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_233.png and b/TMessagesProj/src/emojis/apple/emoji/7_233.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_234.png b/TMessagesProj/src/emojis/apple/emoji/7_234.png index d4dd941aa5..62f7c73582 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_234.png and b/TMessagesProj/src/emojis/apple/emoji/7_234.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_235.png b/TMessagesProj/src/emojis/apple/emoji/7_235.png index 7c19691056..1866a8f74b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_235.png and b/TMessagesProj/src/emojis/apple/emoji/7_235.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_236.png b/TMessagesProj/src/emojis/apple/emoji/7_236.png index 1297a29c5b..23466b4d49 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_236.png and b/TMessagesProj/src/emojis/apple/emoji/7_236.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_237.png b/TMessagesProj/src/emojis/apple/emoji/7_237.png index b2b23d46d1..1485154e6b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_237.png and b/TMessagesProj/src/emojis/apple/emoji/7_237.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_238.png b/TMessagesProj/src/emojis/apple/emoji/7_238.png index 74c8fd05ca..1fceb12768 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_238.png and b/TMessagesProj/src/emojis/apple/emoji/7_238.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_239.png b/TMessagesProj/src/emojis/apple/emoji/7_239.png index 9bf93a4e23..156283fe0b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_239.png and b/TMessagesProj/src/emojis/apple/emoji/7_239.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_24.png b/TMessagesProj/src/emojis/apple/emoji/7_24.png index da6f2ec5a1..481468ec8b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_24.png and b/TMessagesProj/src/emojis/apple/emoji/7_24.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_240.png b/TMessagesProj/src/emojis/apple/emoji/7_240.png index aeb1283351..a58db21b2a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_240.png and b/TMessagesProj/src/emojis/apple/emoji/7_240.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_241.png b/TMessagesProj/src/emojis/apple/emoji/7_241.png index 998b015538..c36111bdf9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_241.png and b/TMessagesProj/src/emojis/apple/emoji/7_241.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_242.png b/TMessagesProj/src/emojis/apple/emoji/7_242.png index c5cd05a364..f178d8cfbe 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_242.png and b/TMessagesProj/src/emojis/apple/emoji/7_242.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_243.png b/TMessagesProj/src/emojis/apple/emoji/7_243.png index e0aee11e38..1b453f204e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_243.png and b/TMessagesProj/src/emojis/apple/emoji/7_243.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_244.png b/TMessagesProj/src/emojis/apple/emoji/7_244.png index b2824e4925..e4b172bccb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_244.png and b/TMessagesProj/src/emojis/apple/emoji/7_244.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_245.png b/TMessagesProj/src/emojis/apple/emoji/7_245.png index f80de6e618..4b5ca52bd4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_245.png and b/TMessagesProj/src/emojis/apple/emoji/7_245.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_246.png b/TMessagesProj/src/emojis/apple/emoji/7_246.png index 477140d6cc..ce44511d77 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_246.png and b/TMessagesProj/src/emojis/apple/emoji/7_246.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_247.png b/TMessagesProj/src/emojis/apple/emoji/7_247.png index f173b383a9..a5aa7f50e2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_247.png and b/TMessagesProj/src/emojis/apple/emoji/7_247.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_248.png b/TMessagesProj/src/emojis/apple/emoji/7_248.png index 4b3043801d..65e7ef59a8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_248.png and b/TMessagesProj/src/emojis/apple/emoji/7_248.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_249.png b/TMessagesProj/src/emojis/apple/emoji/7_249.png index 033e4aaf4b..ce3b032f64 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_249.png and b/TMessagesProj/src/emojis/apple/emoji/7_249.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_25.png b/TMessagesProj/src/emojis/apple/emoji/7_25.png index d83d7f0c71..4c9656bc3e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_25.png and b/TMessagesProj/src/emojis/apple/emoji/7_25.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_250.png b/TMessagesProj/src/emojis/apple/emoji/7_250.png index b18ed451e4..348c8ef5f3 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_250.png and b/TMessagesProj/src/emojis/apple/emoji/7_250.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_251.png b/TMessagesProj/src/emojis/apple/emoji/7_251.png index 56bb8140e2..d28aa8048e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_251.png and b/TMessagesProj/src/emojis/apple/emoji/7_251.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_252.png b/TMessagesProj/src/emojis/apple/emoji/7_252.png index 4e7cf9f57c..b2d8c5682e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_252.png and b/TMessagesProj/src/emojis/apple/emoji/7_252.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_253.png b/TMessagesProj/src/emojis/apple/emoji/7_253.png index 3c81aad5cd..ae6aa06cd1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_253.png and b/TMessagesProj/src/emojis/apple/emoji/7_253.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_254.png b/TMessagesProj/src/emojis/apple/emoji/7_254.png index 16384aedb1..f31fece7c1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_254.png and b/TMessagesProj/src/emojis/apple/emoji/7_254.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_255.png b/TMessagesProj/src/emojis/apple/emoji/7_255.png index 669e22a67e..b7db906579 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_255.png and b/TMessagesProj/src/emojis/apple/emoji/7_255.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_256.png b/TMessagesProj/src/emojis/apple/emoji/7_256.png index 30fae24dce..daf2d5221a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_256.png and b/TMessagesProj/src/emojis/apple/emoji/7_256.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_257.png b/TMessagesProj/src/emojis/apple/emoji/7_257.png index f6ca945cab..819b949bf7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_257.png and b/TMessagesProj/src/emojis/apple/emoji/7_257.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_258.png b/TMessagesProj/src/emojis/apple/emoji/7_258.png index 52e42486af..91255a3d2b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_258.png and b/TMessagesProj/src/emojis/apple/emoji/7_258.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_26.png b/TMessagesProj/src/emojis/apple/emoji/7_26.png index 31e703513a..3e507adf6f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_26.png and b/TMessagesProj/src/emojis/apple/emoji/7_26.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_27.png b/TMessagesProj/src/emojis/apple/emoji/7_27.png index 949554e445..e6ee274d22 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_27.png and b/TMessagesProj/src/emojis/apple/emoji/7_27.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_28.png b/TMessagesProj/src/emojis/apple/emoji/7_28.png index c9dc269408..502e0ace43 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_28.png and b/TMessagesProj/src/emojis/apple/emoji/7_28.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_29.png b/TMessagesProj/src/emojis/apple/emoji/7_29.png index 9a8a0a6e6f..88428eb03d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_29.png and b/TMessagesProj/src/emojis/apple/emoji/7_29.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_3.png b/TMessagesProj/src/emojis/apple/emoji/7_3.png index f711a9a0b1..65e6f4634e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_3.png and b/TMessagesProj/src/emojis/apple/emoji/7_3.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_30.png b/TMessagesProj/src/emojis/apple/emoji/7_30.png index 6e7e7c459e..37ebfbb525 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_30.png and b/TMessagesProj/src/emojis/apple/emoji/7_30.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_31.png b/TMessagesProj/src/emojis/apple/emoji/7_31.png index 489ba6b4c9..02a9ba5122 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_31.png and b/TMessagesProj/src/emojis/apple/emoji/7_31.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_32.png b/TMessagesProj/src/emojis/apple/emoji/7_32.png index ec379195bf..66ae3dae3f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_32.png and b/TMessagesProj/src/emojis/apple/emoji/7_32.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_33.png b/TMessagesProj/src/emojis/apple/emoji/7_33.png index 83ba6fba6e..55edf34135 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_33.png and b/TMessagesProj/src/emojis/apple/emoji/7_33.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_34.png b/TMessagesProj/src/emojis/apple/emoji/7_34.png index 9834b97fe2..3844480c93 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_34.png and b/TMessagesProj/src/emojis/apple/emoji/7_34.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_35.png b/TMessagesProj/src/emojis/apple/emoji/7_35.png index fc782e8999..1c7852afe5 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_35.png and b/TMessagesProj/src/emojis/apple/emoji/7_35.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_36.png b/TMessagesProj/src/emojis/apple/emoji/7_36.png index a576370f08..daf93f8fad 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_36.png and b/TMessagesProj/src/emojis/apple/emoji/7_36.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_37.png b/TMessagesProj/src/emojis/apple/emoji/7_37.png index b15ac1de88..2488703a83 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_37.png and b/TMessagesProj/src/emojis/apple/emoji/7_37.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_38.png b/TMessagesProj/src/emojis/apple/emoji/7_38.png index ae4d506a00..06c7e6c190 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_38.png and b/TMessagesProj/src/emojis/apple/emoji/7_38.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_39.png b/TMessagesProj/src/emojis/apple/emoji/7_39.png index fdb9e41d6f..cff01d961c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_39.png and b/TMessagesProj/src/emojis/apple/emoji/7_39.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_4.png b/TMessagesProj/src/emojis/apple/emoji/7_4.png index df40b9a8e9..9803c910fb 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_4.png and b/TMessagesProj/src/emojis/apple/emoji/7_4.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_40.png b/TMessagesProj/src/emojis/apple/emoji/7_40.png index cb394146b3..8d6bdb6779 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_40.png and b/TMessagesProj/src/emojis/apple/emoji/7_40.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_41.png b/TMessagesProj/src/emojis/apple/emoji/7_41.png index cca34bcb4c..2fb92b565e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_41.png and b/TMessagesProj/src/emojis/apple/emoji/7_41.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_42.png b/TMessagesProj/src/emojis/apple/emoji/7_42.png index 708dc82e6c..90bf21b5e8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_42.png and b/TMessagesProj/src/emojis/apple/emoji/7_42.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_43.png b/TMessagesProj/src/emojis/apple/emoji/7_43.png index 7ca22918ac..21ff41484a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_43.png and b/TMessagesProj/src/emojis/apple/emoji/7_43.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_44.png b/TMessagesProj/src/emojis/apple/emoji/7_44.png index df90b9d958..a7dc3027a7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_44.png and b/TMessagesProj/src/emojis/apple/emoji/7_44.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_45.png b/TMessagesProj/src/emojis/apple/emoji/7_45.png index 915700213e..46f9afc75c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_45.png and b/TMessagesProj/src/emojis/apple/emoji/7_45.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_46.png b/TMessagesProj/src/emojis/apple/emoji/7_46.png index bdf1cf1a6c..2452abee57 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_46.png and b/TMessagesProj/src/emojis/apple/emoji/7_46.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_47.png b/TMessagesProj/src/emojis/apple/emoji/7_47.png index 2b98461b0b..1b9245dacf 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_47.png and b/TMessagesProj/src/emojis/apple/emoji/7_47.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_48.png b/TMessagesProj/src/emojis/apple/emoji/7_48.png index 3e758e42e0..13fcbf6062 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_48.png and b/TMessagesProj/src/emojis/apple/emoji/7_48.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_49.png b/TMessagesProj/src/emojis/apple/emoji/7_49.png index b6681b8ee9..ee9000eb7c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_49.png and b/TMessagesProj/src/emojis/apple/emoji/7_49.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_5.png b/TMessagesProj/src/emojis/apple/emoji/7_5.png index 256ac36a63..223533b941 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_5.png and b/TMessagesProj/src/emojis/apple/emoji/7_5.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_50.png b/TMessagesProj/src/emojis/apple/emoji/7_50.png index 91e331f1c7..5d9820a00a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_50.png and b/TMessagesProj/src/emojis/apple/emoji/7_50.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_51.png b/TMessagesProj/src/emojis/apple/emoji/7_51.png index a8a50c31eb..334424a0f7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_51.png and b/TMessagesProj/src/emojis/apple/emoji/7_51.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_52.png b/TMessagesProj/src/emojis/apple/emoji/7_52.png index 43aacdd3ca..96e7530713 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_52.png and b/TMessagesProj/src/emojis/apple/emoji/7_52.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_53.png b/TMessagesProj/src/emojis/apple/emoji/7_53.png index 76d85b2998..56ab1ecfb7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_53.png and b/TMessagesProj/src/emojis/apple/emoji/7_53.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_54.png b/TMessagesProj/src/emojis/apple/emoji/7_54.png index 9a46ffdff8..092ac5db43 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_54.png and b/TMessagesProj/src/emojis/apple/emoji/7_54.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_55.png b/TMessagesProj/src/emojis/apple/emoji/7_55.png index 3eba2b5ac4..a0de6ebda6 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_55.png and b/TMessagesProj/src/emojis/apple/emoji/7_55.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_56.png b/TMessagesProj/src/emojis/apple/emoji/7_56.png index 572f908c32..056a19222c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_56.png and b/TMessagesProj/src/emojis/apple/emoji/7_56.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_57.png b/TMessagesProj/src/emojis/apple/emoji/7_57.png index 1718eaa617..75a9255ce2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_57.png and b/TMessagesProj/src/emojis/apple/emoji/7_57.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_58.png b/TMessagesProj/src/emojis/apple/emoji/7_58.png index 0d9e33e709..f33cd77f4c 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_58.png and b/TMessagesProj/src/emojis/apple/emoji/7_58.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_59.png b/TMessagesProj/src/emojis/apple/emoji/7_59.png index 358bbe0f4b..2ddb773787 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_59.png and b/TMessagesProj/src/emojis/apple/emoji/7_59.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_6.png b/TMessagesProj/src/emojis/apple/emoji/7_6.png index 7c46e45b0c..c96bdb3986 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_6.png and b/TMessagesProj/src/emojis/apple/emoji/7_6.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_60.png b/TMessagesProj/src/emojis/apple/emoji/7_60.png index 6194822651..50351b6400 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_60.png and b/TMessagesProj/src/emojis/apple/emoji/7_60.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_61.png b/TMessagesProj/src/emojis/apple/emoji/7_61.png index a59818017e..610faf5411 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_61.png and b/TMessagesProj/src/emojis/apple/emoji/7_61.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_62.png b/TMessagesProj/src/emojis/apple/emoji/7_62.png index 214001b51a..eb6a407a16 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_62.png and b/TMessagesProj/src/emojis/apple/emoji/7_62.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_63.png b/TMessagesProj/src/emojis/apple/emoji/7_63.png index 5d73a08a70..28b4d21543 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_63.png and b/TMessagesProj/src/emojis/apple/emoji/7_63.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_64.png b/TMessagesProj/src/emojis/apple/emoji/7_64.png index 343a5ce119..de9c04a623 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_64.png and b/TMessagesProj/src/emojis/apple/emoji/7_64.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_65.png b/TMessagesProj/src/emojis/apple/emoji/7_65.png index 35e3cd662f..e196e73840 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_65.png and b/TMessagesProj/src/emojis/apple/emoji/7_65.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_66.png b/TMessagesProj/src/emojis/apple/emoji/7_66.png index 2aecf487b6..c204483446 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_66.png and b/TMessagesProj/src/emojis/apple/emoji/7_66.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_67.png b/TMessagesProj/src/emojis/apple/emoji/7_67.png index 400f8ff73b..cd681478e8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_67.png and b/TMessagesProj/src/emojis/apple/emoji/7_67.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_68.png b/TMessagesProj/src/emojis/apple/emoji/7_68.png index 7b1ddcbc0b..623d196920 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_68.png and b/TMessagesProj/src/emojis/apple/emoji/7_68.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_69.png b/TMessagesProj/src/emojis/apple/emoji/7_69.png index c274e67250..97ba2c087e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_69.png and b/TMessagesProj/src/emojis/apple/emoji/7_69.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_7.png b/TMessagesProj/src/emojis/apple/emoji/7_7.png index fdb7b42211..a2f7e5e1ab 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_7.png and b/TMessagesProj/src/emojis/apple/emoji/7_7.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_70.png b/TMessagesProj/src/emojis/apple/emoji/7_70.png index f1f4553391..d8a2978622 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_70.png and b/TMessagesProj/src/emojis/apple/emoji/7_70.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_71.png b/TMessagesProj/src/emojis/apple/emoji/7_71.png index e7058bea1e..3709e83a50 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_71.png and b/TMessagesProj/src/emojis/apple/emoji/7_71.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_72.png b/TMessagesProj/src/emojis/apple/emoji/7_72.png index 316872b300..93ec29501e 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_72.png and b/TMessagesProj/src/emojis/apple/emoji/7_72.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_73.png b/TMessagesProj/src/emojis/apple/emoji/7_73.png index a7577114cd..3ca170d763 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_73.png and b/TMessagesProj/src/emojis/apple/emoji/7_73.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_74.png b/TMessagesProj/src/emojis/apple/emoji/7_74.png index c0043d7aa8..7a681f2a24 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_74.png and b/TMessagesProj/src/emojis/apple/emoji/7_74.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_75.png b/TMessagesProj/src/emojis/apple/emoji/7_75.png index c00325c408..feec8f7458 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_75.png and b/TMessagesProj/src/emojis/apple/emoji/7_75.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_76.png b/TMessagesProj/src/emojis/apple/emoji/7_76.png index 384059b637..8605c631d8 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_76.png and b/TMessagesProj/src/emojis/apple/emoji/7_76.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_77.png b/TMessagesProj/src/emojis/apple/emoji/7_77.png index 3cdd89516f..16555baf5b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_77.png and b/TMessagesProj/src/emojis/apple/emoji/7_77.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_78.png b/TMessagesProj/src/emojis/apple/emoji/7_78.png index 91015f3fbe..40bd16d832 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_78.png and b/TMessagesProj/src/emojis/apple/emoji/7_78.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_79.png b/TMessagesProj/src/emojis/apple/emoji/7_79.png index edf0c25e52..f23666131d 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_79.png and b/TMessagesProj/src/emojis/apple/emoji/7_79.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_8.png b/TMessagesProj/src/emojis/apple/emoji/7_8.png index 8ac5017b88..34682fb0f2 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_8.png and b/TMessagesProj/src/emojis/apple/emoji/7_8.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_80.png b/TMessagesProj/src/emojis/apple/emoji/7_80.png index 1feed4bffd..c8feed7b9b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_80.png and b/TMessagesProj/src/emojis/apple/emoji/7_80.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_81.png b/TMessagesProj/src/emojis/apple/emoji/7_81.png index e3c19282a2..ccf51c21bd 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_81.png and b/TMessagesProj/src/emojis/apple/emoji/7_81.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_82.png b/TMessagesProj/src/emojis/apple/emoji/7_82.png index dcfa3f70f8..e4551f8497 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_82.png and b/TMessagesProj/src/emojis/apple/emoji/7_82.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_83.png b/TMessagesProj/src/emojis/apple/emoji/7_83.png index bae98bda63..907e8953f9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_83.png and b/TMessagesProj/src/emojis/apple/emoji/7_83.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_84.png b/TMessagesProj/src/emojis/apple/emoji/7_84.png index d561b3b84d..6c02844c9b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_84.png and b/TMessagesProj/src/emojis/apple/emoji/7_84.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_85.png b/TMessagesProj/src/emojis/apple/emoji/7_85.png index 7d33cc6499..a1da7d795f 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_85.png and b/TMessagesProj/src/emojis/apple/emoji/7_85.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_86.png b/TMessagesProj/src/emojis/apple/emoji/7_86.png index 51029c1a39..36fd781b54 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_86.png and b/TMessagesProj/src/emojis/apple/emoji/7_86.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_87.png b/TMessagesProj/src/emojis/apple/emoji/7_87.png index c3658dac5a..1826cdec1b 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_87.png and b/TMessagesProj/src/emojis/apple/emoji/7_87.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_88.png b/TMessagesProj/src/emojis/apple/emoji/7_88.png index 91ce89e965..b436088ca4 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_88.png and b/TMessagesProj/src/emojis/apple/emoji/7_88.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_89.png b/TMessagesProj/src/emojis/apple/emoji/7_89.png index b1f526354c..c51aad2a3a 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_89.png and b/TMessagesProj/src/emojis/apple/emoji/7_89.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_9.png b/TMessagesProj/src/emojis/apple/emoji/7_9.png index ad08e03f8d..760ad712d1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_9.png and b/TMessagesProj/src/emojis/apple/emoji/7_9.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_90.png b/TMessagesProj/src/emojis/apple/emoji/7_90.png index 530ac04429..7abd004d21 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_90.png and b/TMessagesProj/src/emojis/apple/emoji/7_90.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_91.png b/TMessagesProj/src/emojis/apple/emoji/7_91.png index f1c1a9ce97..ac6720b303 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_91.png and b/TMessagesProj/src/emojis/apple/emoji/7_91.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_92.png b/TMessagesProj/src/emojis/apple/emoji/7_92.png index e676c99b65..2d0a148950 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_92.png and b/TMessagesProj/src/emojis/apple/emoji/7_92.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_93.png b/TMessagesProj/src/emojis/apple/emoji/7_93.png index bdc89449ab..68b1d8a061 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_93.png and b/TMessagesProj/src/emojis/apple/emoji/7_93.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_94.png b/TMessagesProj/src/emojis/apple/emoji/7_94.png index 5ab1bda3ff..c871f8efad 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_94.png and b/TMessagesProj/src/emojis/apple/emoji/7_94.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_95.png b/TMessagesProj/src/emojis/apple/emoji/7_95.png index eb4c9a3046..5a81db0514 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_95.png and b/TMessagesProj/src/emojis/apple/emoji/7_95.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_96.png b/TMessagesProj/src/emojis/apple/emoji/7_96.png index 504f814562..766373b8d1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_96.png and b/TMessagesProj/src/emojis/apple/emoji/7_96.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_97.png b/TMessagesProj/src/emojis/apple/emoji/7_97.png index 68c3e3d62f..199842b0f1 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_97.png and b/TMessagesProj/src/emojis/apple/emoji/7_97.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_98.png b/TMessagesProj/src/emojis/apple/emoji/7_98.png index 0e55cbd47d..ce4bbc85b9 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_98.png and b/TMessagesProj/src/emojis/apple/emoji/7_98.png differ diff --git a/TMessagesProj/src/emojis/apple/emoji/7_99.png b/TMessagesProj/src/emojis/apple/emoji/7_99.png index a3aea6af68..a8f0ff8cd7 100644 Binary files a/TMessagesProj/src/emojis/apple/emoji/7_99.png and b/TMessagesProj/src/emojis/apple/emoji/7_99.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_0.png b/TMessagesProj/src/emojis/twitter/emoji/0_0.png index 6a43065ef9..cbbb154463 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_0.png and b/TMessagesProj/src/emojis/twitter/emoji/0_0.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1.png b/TMessagesProj/src/emojis/twitter/emoji/0_1.png index 87e2e65b35..bab1558ab1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_10.png b/TMessagesProj/src/emojis/twitter/emoji/0_10.png index 9aab83b768..7a32d98a28 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_10.png and b/TMessagesProj/src/emojis/twitter/emoji/0_10.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_100.png b/TMessagesProj/src/emojis/twitter/emoji/0_100.png index 5f8e9d5510..3b0a8dcc55 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_100.png and b/TMessagesProj/src/emojis/twitter/emoji/0_100.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1000.png b/TMessagesProj/src/emojis/twitter/emoji/0_1000.png index 991f810a6c..0b9fcb552a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1000.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1000.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1001.png b/TMessagesProj/src/emojis/twitter/emoji/0_1001.png index 66871d4c9e..2768a04a81 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1001.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1001.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1002.png b/TMessagesProj/src/emojis/twitter/emoji/0_1002.png index 07d1692471..3fc94f249f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1002.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1002.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1003.png b/TMessagesProj/src/emojis/twitter/emoji/0_1003.png index 7fd260fc50..f48f75f590 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1003.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1003.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1004.png b/TMessagesProj/src/emojis/twitter/emoji/0_1004.png index 7418d064cb..8fd384afb3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1004.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1004.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1005.png b/TMessagesProj/src/emojis/twitter/emoji/0_1005.png index 2149dca17d..f8788bbeaf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1005.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1005.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1006.png b/TMessagesProj/src/emojis/twitter/emoji/0_1006.png index 242bd2f72f..224f15e5a3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1006.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1006.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1007.png b/TMessagesProj/src/emojis/twitter/emoji/0_1007.png index f14307e875..e1e94f3894 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1007.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1007.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1008.png b/TMessagesProj/src/emojis/twitter/emoji/0_1008.png index 8868e74556..f99758018b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1008.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1008.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1009.png b/TMessagesProj/src/emojis/twitter/emoji/0_1009.png index 3c5cbf7fb9..e3b1994d8b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1009.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1009.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_101.png b/TMessagesProj/src/emojis/twitter/emoji/0_101.png index 0199029bab..c78d936d45 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_101.png and b/TMessagesProj/src/emojis/twitter/emoji/0_101.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1010.png b/TMessagesProj/src/emojis/twitter/emoji/0_1010.png index 5347dbe494..3dc0501943 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1010.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1010.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1011.png b/TMessagesProj/src/emojis/twitter/emoji/0_1011.png index 4828f19fa0..b32fb203f7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1011.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1011.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1012.png b/TMessagesProj/src/emojis/twitter/emoji/0_1012.png index ca7f4ad6b4..7ef10a83f7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1012.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1012.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1013.png b/TMessagesProj/src/emojis/twitter/emoji/0_1013.png index 81139d230b..0c6da9b0eb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1013.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1013.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1014.png b/TMessagesProj/src/emojis/twitter/emoji/0_1014.png index 1506bfeb8d..f8c8de51d6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1014.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1014.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1015.png b/TMessagesProj/src/emojis/twitter/emoji/0_1015.png index 244f785ea9..7df45ab458 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1015.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1015.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1016.png b/TMessagesProj/src/emojis/twitter/emoji/0_1016.png index d1f2cfeea5..476dd7dd05 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1016.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1016.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1017.png b/TMessagesProj/src/emojis/twitter/emoji/0_1017.png index fd45d976f8..d30dc7b6e7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1017.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1017.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1018.png b/TMessagesProj/src/emojis/twitter/emoji/0_1018.png index 9df305691c..f0fbab7a52 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1018.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1018.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1019.png b/TMessagesProj/src/emojis/twitter/emoji/0_1019.png index fa9ab4591e..17bb084703 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1019.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1019.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_102.png b/TMessagesProj/src/emojis/twitter/emoji/0_102.png index 7f83be1dbe..20b65070e5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_102.png and b/TMessagesProj/src/emojis/twitter/emoji/0_102.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1020.png b/TMessagesProj/src/emojis/twitter/emoji/0_1020.png index 5fa741e2db..6fe955c166 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1020.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1020.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1021.png b/TMessagesProj/src/emojis/twitter/emoji/0_1021.png index 274b45bfd0..8df3011a99 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1021.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1021.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1022.png b/TMessagesProj/src/emojis/twitter/emoji/0_1022.png index 5d05cd00a6..e0d11d4014 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1022.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1022.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1023.png b/TMessagesProj/src/emojis/twitter/emoji/0_1023.png index 9c81ac7858..e27a9e61aa 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1023.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1023.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1024.png b/TMessagesProj/src/emojis/twitter/emoji/0_1024.png index f8087cf2e8..2ae2552fed 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1024.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1024.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1025.png b/TMessagesProj/src/emojis/twitter/emoji/0_1025.png index 9825d05930..beab115a83 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1025.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1025.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1026.png b/TMessagesProj/src/emojis/twitter/emoji/0_1026.png index 4fa08e8a83..2895d0bb8c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1026.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1026.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1027.png b/TMessagesProj/src/emojis/twitter/emoji/0_1027.png index 777c18fa20..d46e17d710 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1027.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1027.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1028.png b/TMessagesProj/src/emojis/twitter/emoji/0_1028.png index 2af464b5d7..cdae297c6e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1028.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1028.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1029.png b/TMessagesProj/src/emojis/twitter/emoji/0_1029.png index f03c7e6aa8..04e504203c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1029.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1029.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_103.png b/TMessagesProj/src/emojis/twitter/emoji/0_103.png index e873ad55ed..0b0fc66c95 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_103.png and b/TMessagesProj/src/emojis/twitter/emoji/0_103.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1030.png b/TMessagesProj/src/emojis/twitter/emoji/0_1030.png index b6eeb07106..19ce880871 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1030.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1030.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1031.png b/TMessagesProj/src/emojis/twitter/emoji/0_1031.png index 978c5cb0b8..29d4f4e8d4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1031.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1031.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1032.png b/TMessagesProj/src/emojis/twitter/emoji/0_1032.png index b15db60b4e..7da6ba8fc4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1032.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1032.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1033.png b/TMessagesProj/src/emojis/twitter/emoji/0_1033.png index 5643aade53..b2dec14b81 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1033.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1033.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1034.png b/TMessagesProj/src/emojis/twitter/emoji/0_1034.png index e135c9a257..c37d3a6f87 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1034.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1034.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1035.png b/TMessagesProj/src/emojis/twitter/emoji/0_1035.png index 57efabfdf6..cf2eb882e0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1035.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1035.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1036.png b/TMessagesProj/src/emojis/twitter/emoji/0_1036.png index 82dd72416b..4162a78c13 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1036.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1036.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1037.png b/TMessagesProj/src/emojis/twitter/emoji/0_1037.png index 4ca82b1fd2..347af7df9c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1037.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1037.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1038.png b/TMessagesProj/src/emojis/twitter/emoji/0_1038.png index 241f5fc103..e4c16a93eb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1038.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1038.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1039.png b/TMessagesProj/src/emojis/twitter/emoji/0_1039.png index 8ea2005055..f84bd99b2a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1039.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1039.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_104.png b/TMessagesProj/src/emojis/twitter/emoji/0_104.png index b0b44c4abe..e8c17a26fa 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_104.png and b/TMessagesProj/src/emojis/twitter/emoji/0_104.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1040.png b/TMessagesProj/src/emojis/twitter/emoji/0_1040.png index 50975dbb1c..4308c5fef8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1040.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1040.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1041.png b/TMessagesProj/src/emojis/twitter/emoji/0_1041.png index 0c9f72f3e6..e4d708611a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1041.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1041.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1042.png b/TMessagesProj/src/emojis/twitter/emoji/0_1042.png index f4dcb0e5a2..7a05da1ce9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1042.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1042.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1043.png b/TMessagesProj/src/emojis/twitter/emoji/0_1043.png index 8ad3417fd3..f244cd0fbc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1043.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1043.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1044.png b/TMessagesProj/src/emojis/twitter/emoji/0_1044.png index b26ac46df7..25e50ffc09 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1044.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1044.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1045.png b/TMessagesProj/src/emojis/twitter/emoji/0_1045.png index de753e408b..f3898d3c12 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1045.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1045.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1046.png b/TMessagesProj/src/emojis/twitter/emoji/0_1046.png index 9c4c2000ef..c6ba8e48ac 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1046.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1046.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1047.png b/TMessagesProj/src/emojis/twitter/emoji/0_1047.png index c6184061a3..88054d2a3d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1047.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1047.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1048.png b/TMessagesProj/src/emojis/twitter/emoji/0_1048.png index c3685c2a43..46ffa3bb0d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1048.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1048.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1049.png b/TMessagesProj/src/emojis/twitter/emoji/0_1049.png index e26e74e391..50aa73d88a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1049.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1049.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_105.png b/TMessagesProj/src/emojis/twitter/emoji/0_105.png index ca86f2b128..c5043a3f76 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_105.png and b/TMessagesProj/src/emojis/twitter/emoji/0_105.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1050.png b/TMessagesProj/src/emojis/twitter/emoji/0_1050.png index 2db06bda04..a263ee397f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1050.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1050.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1051.png b/TMessagesProj/src/emojis/twitter/emoji/0_1051.png index 8fa648da73..8769ed08a6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1051.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1051.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1052.png b/TMessagesProj/src/emojis/twitter/emoji/0_1052.png index 2671a8840a..72456d304c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1052.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1052.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1053.png b/TMessagesProj/src/emojis/twitter/emoji/0_1053.png index af263bb934..6740c71e75 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1053.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1053.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1054.png b/TMessagesProj/src/emojis/twitter/emoji/0_1054.png index a205b301c9..8e009a18f4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1054.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1054.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1055.png b/TMessagesProj/src/emojis/twitter/emoji/0_1055.png index 02362382dd..64c5336afd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1055.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1055.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1056.png b/TMessagesProj/src/emojis/twitter/emoji/0_1056.png index 17143bf084..8101cc3f23 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1056.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1056.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1057.png b/TMessagesProj/src/emojis/twitter/emoji/0_1057.png index 614ab049ea..458b97c136 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1057.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1057.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1058.png b/TMessagesProj/src/emojis/twitter/emoji/0_1058.png index 9d4e7cea60..726ef3f3cf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1058.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1058.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1059.png b/TMessagesProj/src/emojis/twitter/emoji/0_1059.png index 3185bb8b9f..bfb02d9baa 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1059.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1059.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_106.png b/TMessagesProj/src/emojis/twitter/emoji/0_106.png index b1b2cf75fb..69c2889be4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_106.png and b/TMessagesProj/src/emojis/twitter/emoji/0_106.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1060.png b/TMessagesProj/src/emojis/twitter/emoji/0_1060.png index 403c649eff..7f75a652a4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1060.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1060.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1061.png b/TMessagesProj/src/emojis/twitter/emoji/0_1061.png index 125ac4aed3..70f0fb9a4d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1061.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1061.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1062.png b/TMessagesProj/src/emojis/twitter/emoji/0_1062.png index e5f4ad6557..7574325ee0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1062.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1062.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1063.png b/TMessagesProj/src/emojis/twitter/emoji/0_1063.png index d1f24bfa83..3ca9f92658 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1063.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1063.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1064.png b/TMessagesProj/src/emojis/twitter/emoji/0_1064.png index 85990c0e05..097d6bb1eb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1064.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1064.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1065.png b/TMessagesProj/src/emojis/twitter/emoji/0_1065.png index 0b7081b1da..73d01cbd5c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1065.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1065.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1066.png b/TMessagesProj/src/emojis/twitter/emoji/0_1066.png index c3de74204b..9e0902f451 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1066.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1066.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1067.png b/TMessagesProj/src/emojis/twitter/emoji/0_1067.png index 8ed1a222c5..aea068647d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1067.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1067.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1068.png b/TMessagesProj/src/emojis/twitter/emoji/0_1068.png index 0634dede28..e6b43d15da 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1068.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1068.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1069.png b/TMessagesProj/src/emojis/twitter/emoji/0_1069.png index ffd62baecb..f8d0efdb55 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1069.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1069.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_107.png b/TMessagesProj/src/emojis/twitter/emoji/0_107.png index c5ef067eb0..c2dcc1a366 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_107.png and b/TMessagesProj/src/emojis/twitter/emoji/0_107.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1070.png b/TMessagesProj/src/emojis/twitter/emoji/0_1070.png index 2bc0c110b6..0aa733da7d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1070.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1070.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1071.png b/TMessagesProj/src/emojis/twitter/emoji/0_1071.png index 4b60e8f8df..cb231af327 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1071.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1071.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1072.png b/TMessagesProj/src/emojis/twitter/emoji/0_1072.png index 966a640d61..7369c5cfe6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1072.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1072.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1073.png b/TMessagesProj/src/emojis/twitter/emoji/0_1073.png index 88c87594d9..72ee0d5192 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1073.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1073.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1074.png b/TMessagesProj/src/emojis/twitter/emoji/0_1074.png index 4d11a79bca..9fcd317ba2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1074.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1074.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1075.png b/TMessagesProj/src/emojis/twitter/emoji/0_1075.png index 86dd55bd38..ff839c3c09 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1075.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1075.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1076.png b/TMessagesProj/src/emojis/twitter/emoji/0_1076.png index 1bfb0e1428..7c886d956b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1076.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1076.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1077.png b/TMessagesProj/src/emojis/twitter/emoji/0_1077.png index e0b31d7266..426612628b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1077.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1077.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1078.png b/TMessagesProj/src/emojis/twitter/emoji/0_1078.png index 5161fa6eb5..8be29233dc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1078.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1078.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1079.png b/TMessagesProj/src/emojis/twitter/emoji/0_1079.png index 84fe1c4037..2b4d2e86cc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1079.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1079.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_108.png b/TMessagesProj/src/emojis/twitter/emoji/0_108.png index 820872e475..9b4c56552f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_108.png and b/TMessagesProj/src/emojis/twitter/emoji/0_108.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1080.png b/TMessagesProj/src/emojis/twitter/emoji/0_1080.png index 8cf932e4f8..a8ae844e22 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1080.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1080.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1081.png b/TMessagesProj/src/emojis/twitter/emoji/0_1081.png index c6840a98a5..a9fb023d79 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1081.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1081.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1082.png b/TMessagesProj/src/emojis/twitter/emoji/0_1082.png index ca1ca19d36..963badd183 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1082.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1082.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1083.png b/TMessagesProj/src/emojis/twitter/emoji/0_1083.png index 624dec16e8..15fba972ce 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1083.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1083.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1084.png b/TMessagesProj/src/emojis/twitter/emoji/0_1084.png index 1ca2367e40..3dace477e9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1084.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1084.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1085.png b/TMessagesProj/src/emojis/twitter/emoji/0_1085.png index a7fad6d86a..f550eed34d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1085.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1085.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1086.png b/TMessagesProj/src/emojis/twitter/emoji/0_1086.png index 5e0c2912fa..60228b5e86 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1086.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1086.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1087.png b/TMessagesProj/src/emojis/twitter/emoji/0_1087.png index ef30348ff9..737bac4685 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1087.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1087.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1088.png b/TMessagesProj/src/emojis/twitter/emoji/0_1088.png index 6dc16d3d8b..88959d061d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1088.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1088.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1089.png b/TMessagesProj/src/emojis/twitter/emoji/0_1089.png index dbf4865000..01ffdf1956 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1089.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1089.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_109.png b/TMessagesProj/src/emojis/twitter/emoji/0_109.png index a29be92457..2af39eb33c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_109.png and b/TMessagesProj/src/emojis/twitter/emoji/0_109.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1090.png b/TMessagesProj/src/emojis/twitter/emoji/0_1090.png index a4e5808f31..0c5de29453 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1090.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1090.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1091.png b/TMessagesProj/src/emojis/twitter/emoji/0_1091.png index a5fafa23f1..a8d8dc3a00 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1091.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1091.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1092.png b/TMessagesProj/src/emojis/twitter/emoji/0_1092.png index c357ad261a..b1f67b2802 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1092.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1092.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1093.png b/TMessagesProj/src/emojis/twitter/emoji/0_1093.png index 57a2d3bb79..31ca351fd5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1093.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1093.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1094.png b/TMessagesProj/src/emojis/twitter/emoji/0_1094.png index 2a262c836c..af1ebcbd68 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1094.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1094.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1095.png b/TMessagesProj/src/emojis/twitter/emoji/0_1095.png index a5bf07652c..3fb05fb4f8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1095.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1095.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1096.png b/TMessagesProj/src/emojis/twitter/emoji/0_1096.png index 564013b997..3b0e25afbc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1096.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1096.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1097.png b/TMessagesProj/src/emojis/twitter/emoji/0_1097.png index e9997e2724..5fafc845f7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1097.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1097.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1098.png b/TMessagesProj/src/emojis/twitter/emoji/0_1098.png index 8c94838de6..98c5d5aa1b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1098.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1098.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1099.png b/TMessagesProj/src/emojis/twitter/emoji/0_1099.png index 87f51023cf..7e92c36984 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1099.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1099.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_11.png b/TMessagesProj/src/emojis/twitter/emoji/0_11.png index 6bf85de7d9..dee6168d41 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_11.png and b/TMessagesProj/src/emojis/twitter/emoji/0_11.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_110.png b/TMessagesProj/src/emojis/twitter/emoji/0_110.png index 8633655177..465f4e0b79 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_110.png and b/TMessagesProj/src/emojis/twitter/emoji/0_110.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1100.png b/TMessagesProj/src/emojis/twitter/emoji/0_1100.png index e7257bb2ed..d5858952ac 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1100.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1100.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1101.png b/TMessagesProj/src/emojis/twitter/emoji/0_1101.png index a8178869d6..762fecded3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1101.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1101.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1102.png b/TMessagesProj/src/emojis/twitter/emoji/0_1102.png index f74fab6c10..79c02b167e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1102.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1102.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1103.png b/TMessagesProj/src/emojis/twitter/emoji/0_1103.png index a5e5653b61..568b19b735 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1103.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1103.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1104.png b/TMessagesProj/src/emojis/twitter/emoji/0_1104.png index ce56ba1fa2..1a5275a76f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1104.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1104.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1105.png b/TMessagesProj/src/emojis/twitter/emoji/0_1105.png index 12e1904b7d..8c71f329fc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1105.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1105.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1106.png b/TMessagesProj/src/emojis/twitter/emoji/0_1106.png index 31f861ca5b..1c0d35bc68 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1106.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1106.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1107.png b/TMessagesProj/src/emojis/twitter/emoji/0_1107.png index c5808deb8d..950719e743 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1107.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1107.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1108.png b/TMessagesProj/src/emojis/twitter/emoji/0_1108.png index 3b5d41cede..33b7094608 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1108.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1108.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1109.png b/TMessagesProj/src/emojis/twitter/emoji/0_1109.png index 9d697058db..40c9d8239d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1109.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1109.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_111.png b/TMessagesProj/src/emojis/twitter/emoji/0_111.png index 4c4ed771a4..1979583f30 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_111.png and b/TMessagesProj/src/emojis/twitter/emoji/0_111.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1110.png b/TMessagesProj/src/emojis/twitter/emoji/0_1110.png index eeb9f0c594..8cc8e0ef2b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1110.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1110.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1111.png b/TMessagesProj/src/emojis/twitter/emoji/0_1111.png index 2293a1bde0..9e606528e7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1111.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1111.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1112.png b/TMessagesProj/src/emojis/twitter/emoji/0_1112.png index 45eb39064f..eac697e5bb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1112.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1112.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1113.png b/TMessagesProj/src/emojis/twitter/emoji/0_1113.png index 6ade33a098..5fc26f7cbe 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1113.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1113.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1114.png b/TMessagesProj/src/emojis/twitter/emoji/0_1114.png index 52ca6234dd..0b606bac03 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1114.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1114.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1115.png b/TMessagesProj/src/emojis/twitter/emoji/0_1115.png index c0ae2a7abb..87aa087d0a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1115.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1115.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1116.png b/TMessagesProj/src/emojis/twitter/emoji/0_1116.png index 8db4e52b92..11d80b5904 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1116.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1116.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1117.png b/TMessagesProj/src/emojis/twitter/emoji/0_1117.png index 3ecb5254c9..7ba96d15b9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1117.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1117.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1118.png b/TMessagesProj/src/emojis/twitter/emoji/0_1118.png index 5dce57103a..4a2196a558 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1118.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1118.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1119.png b/TMessagesProj/src/emojis/twitter/emoji/0_1119.png index d090b312a3..3f5383b618 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1119.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1119.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_112.png b/TMessagesProj/src/emojis/twitter/emoji/0_112.png index 5698ab40f4..ebb31228f7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_112.png and b/TMessagesProj/src/emojis/twitter/emoji/0_112.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1120.png b/TMessagesProj/src/emojis/twitter/emoji/0_1120.png index 2ee66d6805..9ddaf2431c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1120.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1120.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1121.png b/TMessagesProj/src/emojis/twitter/emoji/0_1121.png index fec9de634c..1e2efe7459 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1121.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1121.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1122.png b/TMessagesProj/src/emojis/twitter/emoji/0_1122.png index 5cd08b22d5..e8ba442a4d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1122.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1122.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1123.png b/TMessagesProj/src/emojis/twitter/emoji/0_1123.png index 7b8aaacc17..c2c1a8a937 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1123.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1123.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1124.png b/TMessagesProj/src/emojis/twitter/emoji/0_1124.png index 3071cb38c2..1faa4bed06 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1124.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1124.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1125.png b/TMessagesProj/src/emojis/twitter/emoji/0_1125.png index ecbd0ba294..f5b2c24db4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1125.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1125.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1126.png b/TMessagesProj/src/emojis/twitter/emoji/0_1126.png index eadd6b9465..4525eb8770 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1126.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1126.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1127.png b/TMessagesProj/src/emojis/twitter/emoji/0_1127.png index 69d5f64548..1a22757f47 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1127.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1127.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1128.png b/TMessagesProj/src/emojis/twitter/emoji/0_1128.png index 176fb7e950..f1ab97f9f2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1128.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1128.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1129.png b/TMessagesProj/src/emojis/twitter/emoji/0_1129.png index b984a61ae7..3624490c98 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1129.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1129.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_113.png b/TMessagesProj/src/emojis/twitter/emoji/0_113.png index 0ee00d3eb5..a6985a7e50 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_113.png and b/TMessagesProj/src/emojis/twitter/emoji/0_113.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1130.png b/TMessagesProj/src/emojis/twitter/emoji/0_1130.png index 5ac6076f64..c12ec16752 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1130.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1130.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1131.png b/TMessagesProj/src/emojis/twitter/emoji/0_1131.png index 5c003313c8..fbf35107f4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1131.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1131.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1132.png b/TMessagesProj/src/emojis/twitter/emoji/0_1132.png index dcbce3fbb6..e677247a6e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1132.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1132.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1133.png b/TMessagesProj/src/emojis/twitter/emoji/0_1133.png index 3b2bebb465..b3006a52d3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1133.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1133.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1134.png b/TMessagesProj/src/emojis/twitter/emoji/0_1134.png index 9004e8d08c..082b217aaf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1134.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1134.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1135.png b/TMessagesProj/src/emojis/twitter/emoji/0_1135.png index 6352cbbab6..51d2b33e0a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1135.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1135.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1136.png b/TMessagesProj/src/emojis/twitter/emoji/0_1136.png index f19dfe3c44..53110ba15e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1136.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1136.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1137.png b/TMessagesProj/src/emojis/twitter/emoji/0_1137.png index aa8f7d5e73..93c3529dba 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1137.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1137.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1138.png b/TMessagesProj/src/emojis/twitter/emoji/0_1138.png index 46af78ec45..b9c743103a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1138.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1138.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1139.png b/TMessagesProj/src/emojis/twitter/emoji/0_1139.png index b3b34e9b9e..6f2b6ce2e4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1139.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1139.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_114.png b/TMessagesProj/src/emojis/twitter/emoji/0_114.png index bb05277f3b..194bfca30d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_114.png and b/TMessagesProj/src/emojis/twitter/emoji/0_114.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1140.png b/TMessagesProj/src/emojis/twitter/emoji/0_1140.png index 98b007f0b0..90aa51c9e7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1140.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1140.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1141.png b/TMessagesProj/src/emojis/twitter/emoji/0_1141.png index 29ce6531f6..2a012f3c98 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1141.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1141.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1142.png b/TMessagesProj/src/emojis/twitter/emoji/0_1142.png index 8a579ace65..c97450d0e4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1142.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1142.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1143.png b/TMessagesProj/src/emojis/twitter/emoji/0_1143.png index 265fc0629f..8361ebf563 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1143.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1143.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1144.png b/TMessagesProj/src/emojis/twitter/emoji/0_1144.png index c78cdfa6ef..eb238d6354 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1144.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1144.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1145.png b/TMessagesProj/src/emojis/twitter/emoji/0_1145.png index b850562cca..7f6d309265 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1145.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1145.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1146.png b/TMessagesProj/src/emojis/twitter/emoji/0_1146.png index 66ebf6116a..0c53343e9f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1146.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1146.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1147.png b/TMessagesProj/src/emojis/twitter/emoji/0_1147.png index fdd46b9761..41527f475b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1147.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1147.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1148.png b/TMessagesProj/src/emojis/twitter/emoji/0_1148.png index 4e9b3c2e72..ce7a09a1e6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1148.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1148.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1149.png b/TMessagesProj/src/emojis/twitter/emoji/0_1149.png index d6019c7707..fc6b36e031 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1149.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1149.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_115.png b/TMessagesProj/src/emojis/twitter/emoji/0_115.png index bb7d6b3886..2bae114e2e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_115.png and b/TMessagesProj/src/emojis/twitter/emoji/0_115.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1150.png b/TMessagesProj/src/emojis/twitter/emoji/0_1150.png index 655c520334..041baaf76f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1150.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1150.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1151.png b/TMessagesProj/src/emojis/twitter/emoji/0_1151.png index 31fe252f93..e0b0677b94 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1151.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1151.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1152.png b/TMessagesProj/src/emojis/twitter/emoji/0_1152.png index 940343298e..e4bb61ed2d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1152.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1152.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1153.png b/TMessagesProj/src/emojis/twitter/emoji/0_1153.png index 2275bcae72..7ab65ab9e4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1153.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1153.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1154.png b/TMessagesProj/src/emojis/twitter/emoji/0_1154.png index 33a80ff532..c50bf4c968 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1154.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1154.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1155.png b/TMessagesProj/src/emojis/twitter/emoji/0_1155.png index be5656fa83..764a623871 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1155.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1155.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1156.png b/TMessagesProj/src/emojis/twitter/emoji/0_1156.png index 247651a794..14c7f4da01 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1156.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1156.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1157.png b/TMessagesProj/src/emojis/twitter/emoji/0_1157.png index c27c5e1951..9cec3eb41f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1157.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1157.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1158.png b/TMessagesProj/src/emojis/twitter/emoji/0_1158.png index 3459aef660..1dbe08926a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1158.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1158.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1159.png b/TMessagesProj/src/emojis/twitter/emoji/0_1159.png index 13268aa125..a63f2b21bc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1159.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1159.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_116.png b/TMessagesProj/src/emojis/twitter/emoji/0_116.png index cbb10ac1c1..57d8f6f698 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_116.png and b/TMessagesProj/src/emojis/twitter/emoji/0_116.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1160.png b/TMessagesProj/src/emojis/twitter/emoji/0_1160.png index f7a5438b0f..3dd323b61d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1160.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1160.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1161.png b/TMessagesProj/src/emojis/twitter/emoji/0_1161.png index 5a5c827aab..6ee6d8e957 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1161.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1161.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1162.png b/TMessagesProj/src/emojis/twitter/emoji/0_1162.png index ce38bf9580..d222d4093e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1162.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1162.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1163.png b/TMessagesProj/src/emojis/twitter/emoji/0_1163.png index b3177cff9c..93998b240c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1163.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1163.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1164.png b/TMessagesProj/src/emojis/twitter/emoji/0_1164.png index ed814e285f..ac5f5c8c22 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1164.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1164.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1165.png b/TMessagesProj/src/emojis/twitter/emoji/0_1165.png index 2ada6bb324..cb07f64723 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1165.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1165.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1166.png b/TMessagesProj/src/emojis/twitter/emoji/0_1166.png index 9e4cbc0f96..4c8cc404e5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1166.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1166.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1167.png b/TMessagesProj/src/emojis/twitter/emoji/0_1167.png index e776af9263..b7ee76e131 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1167.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1167.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1168.png b/TMessagesProj/src/emojis/twitter/emoji/0_1168.png index 3fa963b56d..ead1f52c8d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1168.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1168.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1169.png b/TMessagesProj/src/emojis/twitter/emoji/0_1169.png index d35faa1eac..241a7deff7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1169.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1169.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_117.png b/TMessagesProj/src/emojis/twitter/emoji/0_117.png index 6134eab1b2..f05d0c9e7d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_117.png and b/TMessagesProj/src/emojis/twitter/emoji/0_117.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1170.png b/TMessagesProj/src/emojis/twitter/emoji/0_1170.png index 8177a13c14..c97498b62c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1170.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1170.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1171.png b/TMessagesProj/src/emojis/twitter/emoji/0_1171.png index 6f8df0627b..35d3c53ce1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1171.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1171.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1172.png b/TMessagesProj/src/emojis/twitter/emoji/0_1172.png index 91cbde4808..e1b30aa017 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1172.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1172.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1173.png b/TMessagesProj/src/emojis/twitter/emoji/0_1173.png index 4d3b7b21a1..fd471d0600 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1173.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1173.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1174.png b/TMessagesProj/src/emojis/twitter/emoji/0_1174.png index de0bbe19a6..60b948c96d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1174.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1174.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1175.png b/TMessagesProj/src/emojis/twitter/emoji/0_1175.png index b6a83ad8f1..5ec3ce32bd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1175.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1175.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1176.png b/TMessagesProj/src/emojis/twitter/emoji/0_1176.png index b45ced74d5..f0dfccb739 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1176.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1176.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1177.png b/TMessagesProj/src/emojis/twitter/emoji/0_1177.png index 8a7b3ecc77..9a7879574e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1177.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1177.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1178.png b/TMessagesProj/src/emojis/twitter/emoji/0_1178.png index 331f5d8c18..21e91c9dfd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1178.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1178.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1179.png b/TMessagesProj/src/emojis/twitter/emoji/0_1179.png index b4872615b2..3c5d7634b5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1179.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1179.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_118.png b/TMessagesProj/src/emojis/twitter/emoji/0_118.png index 90acb11e30..b625840322 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_118.png and b/TMessagesProj/src/emojis/twitter/emoji/0_118.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1180.png b/TMessagesProj/src/emojis/twitter/emoji/0_1180.png index 0d6b2981d3..de275c5c09 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1180.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1180.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1181.png b/TMessagesProj/src/emojis/twitter/emoji/0_1181.png index 82c7f96f3f..03997ac194 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1181.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1181.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1182.png b/TMessagesProj/src/emojis/twitter/emoji/0_1182.png index 50a22725c2..bfdc00020c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1182.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1182.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1183.png b/TMessagesProj/src/emojis/twitter/emoji/0_1183.png index ff972304dd..1d46853ab7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1183.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1183.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1184.png b/TMessagesProj/src/emojis/twitter/emoji/0_1184.png index bff6c801ac..4aa7061b7a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1184.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1184.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1185.png b/TMessagesProj/src/emojis/twitter/emoji/0_1185.png index 5b1705333a..bd39d14a40 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1185.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1185.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1186.png b/TMessagesProj/src/emojis/twitter/emoji/0_1186.png index 925b0127d9..a3bc638d25 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1186.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1186.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1187.png b/TMessagesProj/src/emojis/twitter/emoji/0_1187.png index 222a1a2123..bf74d833e9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1187.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1187.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1188.png b/TMessagesProj/src/emojis/twitter/emoji/0_1188.png index 38fc7756ca..d6d2f4cec9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1188.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1188.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1189.png b/TMessagesProj/src/emojis/twitter/emoji/0_1189.png index 43ea904a0a..3b13c8bd82 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1189.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1189.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_119.png b/TMessagesProj/src/emojis/twitter/emoji/0_119.png index 69a84af379..5c03799b21 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_119.png and b/TMessagesProj/src/emojis/twitter/emoji/0_119.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1190.png b/TMessagesProj/src/emojis/twitter/emoji/0_1190.png index 22977a4c95..7691a857eb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1190.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1190.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1191.png b/TMessagesProj/src/emojis/twitter/emoji/0_1191.png index 2a2f3e66e9..aab6736493 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1191.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1191.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1192.png b/TMessagesProj/src/emojis/twitter/emoji/0_1192.png index a45eb43e53..1d76e48cab 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1192.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1192.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1193.png b/TMessagesProj/src/emojis/twitter/emoji/0_1193.png index e9e6e5ea4a..98f599fa66 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1193.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1193.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1194.png b/TMessagesProj/src/emojis/twitter/emoji/0_1194.png index 720322242f..68f7e25b03 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1194.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1194.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1195.png b/TMessagesProj/src/emojis/twitter/emoji/0_1195.png index e29c34702b..b377e0c1e8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1195.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1195.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1196.png b/TMessagesProj/src/emojis/twitter/emoji/0_1196.png index 5872c7da80..19ab5f0061 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1196.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1196.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1197.png b/TMessagesProj/src/emojis/twitter/emoji/0_1197.png index 3289eba2c7..d677cb31f8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1197.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1197.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1198.png b/TMessagesProj/src/emojis/twitter/emoji/0_1198.png index 51bf251548..e05f09e1c3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1198.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1198.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1199.png b/TMessagesProj/src/emojis/twitter/emoji/0_1199.png index 6a13b324ec..63153e42f6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1199.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1199.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_12.png b/TMessagesProj/src/emojis/twitter/emoji/0_12.png index ba7b6e0de1..5e59f2f3a1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_12.png and b/TMessagesProj/src/emojis/twitter/emoji/0_12.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_120.png b/TMessagesProj/src/emojis/twitter/emoji/0_120.png index 8fbc827a75..0b495f5e59 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_120.png and b/TMessagesProj/src/emojis/twitter/emoji/0_120.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1200.png b/TMessagesProj/src/emojis/twitter/emoji/0_1200.png index 43a3b1dd88..6ace39b6ab 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1200.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1200.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1201.png b/TMessagesProj/src/emojis/twitter/emoji/0_1201.png index cd801b3821..48b2fa0499 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1201.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1201.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1202.png b/TMessagesProj/src/emojis/twitter/emoji/0_1202.png index 7bcd8e5c2d..cc4090e63d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1202.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1202.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1203.png b/TMessagesProj/src/emojis/twitter/emoji/0_1203.png index 937da22df9..b666a9a973 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1203.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1203.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1204.png b/TMessagesProj/src/emojis/twitter/emoji/0_1204.png index 34383ab220..55477096df 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1204.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1204.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1205.png b/TMessagesProj/src/emojis/twitter/emoji/0_1205.png index af636702f8..a093bd034e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1205.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1205.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1206.png b/TMessagesProj/src/emojis/twitter/emoji/0_1206.png index 00280958ad..5895c2a521 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1206.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1206.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1207.png b/TMessagesProj/src/emojis/twitter/emoji/0_1207.png index 76c29429fc..d4efa64485 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1207.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1207.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1208.png b/TMessagesProj/src/emojis/twitter/emoji/0_1208.png index 4bfa26e3e4..ae1438f139 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1208.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1208.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1209.png b/TMessagesProj/src/emojis/twitter/emoji/0_1209.png index 539173aaf6..72483f699a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1209.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1209.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_121.png b/TMessagesProj/src/emojis/twitter/emoji/0_121.png index 120092bb22..55aa31a6e7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_121.png and b/TMessagesProj/src/emojis/twitter/emoji/0_121.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1210.png b/TMessagesProj/src/emojis/twitter/emoji/0_1210.png index e321df6190..76f5b99457 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1210.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1210.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1211.png b/TMessagesProj/src/emojis/twitter/emoji/0_1211.png index f802aeb340..949c6fe38d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1211.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1211.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1212.png b/TMessagesProj/src/emojis/twitter/emoji/0_1212.png index 79b45dc723..80b7f1a669 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1212.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1212.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1213.png b/TMessagesProj/src/emojis/twitter/emoji/0_1213.png index d4a8805b69..8b05471a71 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1213.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1213.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1214.png b/TMessagesProj/src/emojis/twitter/emoji/0_1214.png index fd2ec10d24..5cb3b02103 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1214.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1214.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1215.png b/TMessagesProj/src/emojis/twitter/emoji/0_1215.png index 35f929f849..30c8d4371e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1215.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1215.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1216.png b/TMessagesProj/src/emojis/twitter/emoji/0_1216.png index 4b1733518d..eb16de292a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1216.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1216.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1217.png b/TMessagesProj/src/emojis/twitter/emoji/0_1217.png index 022e4d1dd5..26b389ffbb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1217.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1217.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1218.png b/TMessagesProj/src/emojis/twitter/emoji/0_1218.png index a11ead674c..da4317b240 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1218.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1218.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1219.png b/TMessagesProj/src/emojis/twitter/emoji/0_1219.png index 03705291ce..348712a495 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1219.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1219.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_122.png b/TMessagesProj/src/emojis/twitter/emoji/0_122.png index 2ce52c18b4..eb428085cd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_122.png and b/TMessagesProj/src/emojis/twitter/emoji/0_122.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1220.png b/TMessagesProj/src/emojis/twitter/emoji/0_1220.png index b1ff2b04c2..1045fbe61d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1220.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1220.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1221.png b/TMessagesProj/src/emojis/twitter/emoji/0_1221.png index b9e8b66154..8cc37d52fa 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1221.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1221.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1222.png b/TMessagesProj/src/emojis/twitter/emoji/0_1222.png index 841b07b472..2a241fdcb9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1222.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1222.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1223.png b/TMessagesProj/src/emojis/twitter/emoji/0_1223.png index 410aeb3c63..bcc0d59186 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1223.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1223.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1224.png b/TMessagesProj/src/emojis/twitter/emoji/0_1224.png index 48e02c2d5c..6cd3d750f2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1224.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1224.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1225.png b/TMessagesProj/src/emojis/twitter/emoji/0_1225.png index 73f98d0a56..3c79c4ca55 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1225.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1225.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1226.png b/TMessagesProj/src/emojis/twitter/emoji/0_1226.png index b643a73481..e385719e87 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1226.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1226.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1227.png b/TMessagesProj/src/emojis/twitter/emoji/0_1227.png index 5986b742ed..331bf42035 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1227.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1227.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1228.png b/TMessagesProj/src/emojis/twitter/emoji/0_1228.png index 6bc1653c5a..c6220c48a9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1228.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1228.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1229.png b/TMessagesProj/src/emojis/twitter/emoji/0_1229.png index 73177ec687..a036faa348 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1229.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1229.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_123.png b/TMessagesProj/src/emojis/twitter/emoji/0_123.png index 2ace9753bd..ea17cc81f1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_123.png and b/TMessagesProj/src/emojis/twitter/emoji/0_123.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1230.png b/TMessagesProj/src/emojis/twitter/emoji/0_1230.png index cb63bbd073..81e3685170 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1230.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1230.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1231.png b/TMessagesProj/src/emojis/twitter/emoji/0_1231.png index aab20c4f5e..09d7730558 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1231.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1231.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1232.png b/TMessagesProj/src/emojis/twitter/emoji/0_1232.png index f602aa8c04..c92340f684 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1232.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1232.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1233.png b/TMessagesProj/src/emojis/twitter/emoji/0_1233.png index ee7b034cbc..f070bd7de0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1233.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1233.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1234.png b/TMessagesProj/src/emojis/twitter/emoji/0_1234.png index 80b832df19..c7f9460af1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1234.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1234.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1235.png b/TMessagesProj/src/emojis/twitter/emoji/0_1235.png index ffc6039fee..5b7490c115 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1235.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1235.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1236.png b/TMessagesProj/src/emojis/twitter/emoji/0_1236.png index b275615be7..5973d41aa0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1236.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1236.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1237.png b/TMessagesProj/src/emojis/twitter/emoji/0_1237.png index e7d69492ef..bfa6cfb4c2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1237.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1237.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1238.png b/TMessagesProj/src/emojis/twitter/emoji/0_1238.png index a3400426de..16d7554c6b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1238.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1238.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1239.png b/TMessagesProj/src/emojis/twitter/emoji/0_1239.png index cda9a75471..a485d3c71e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1239.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1239.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_124.png b/TMessagesProj/src/emojis/twitter/emoji/0_124.png index e27418547c..e334f0ee2d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_124.png and b/TMessagesProj/src/emojis/twitter/emoji/0_124.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1240.png b/TMessagesProj/src/emojis/twitter/emoji/0_1240.png index 7ece624ac6..3ce2061dd9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1240.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1240.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1241.png b/TMessagesProj/src/emojis/twitter/emoji/0_1241.png index 1b2bc242c2..316878eeee 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1241.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1241.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1242.png b/TMessagesProj/src/emojis/twitter/emoji/0_1242.png index e46edc7151..dc5509852f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1242.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1242.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1243.png b/TMessagesProj/src/emojis/twitter/emoji/0_1243.png index a089793399..e57ee89aec 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1243.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1243.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1244.png b/TMessagesProj/src/emojis/twitter/emoji/0_1244.png index 174bd579b1..f041bb95b8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1244.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1244.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1245.png b/TMessagesProj/src/emojis/twitter/emoji/0_1245.png index bcb70e5d97..2ed64f72ae 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1245.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1245.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1246.png b/TMessagesProj/src/emojis/twitter/emoji/0_1246.png index 13c13c9fc1..67f4004a6d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1246.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1246.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1247.png b/TMessagesProj/src/emojis/twitter/emoji/0_1247.png index ded3194d94..c7f3b1622e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1247.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1247.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1248.png b/TMessagesProj/src/emojis/twitter/emoji/0_1248.png index 353cde70e0..023a1ea741 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1248.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1248.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1249.png b/TMessagesProj/src/emojis/twitter/emoji/0_1249.png index 00bc628d1a..b002c42ab3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1249.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1249.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_125.png b/TMessagesProj/src/emojis/twitter/emoji/0_125.png index 88f019ab44..6b9a4e2352 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_125.png and b/TMessagesProj/src/emojis/twitter/emoji/0_125.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1250.png b/TMessagesProj/src/emojis/twitter/emoji/0_1250.png index 6fbcb97207..175277f610 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1250.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1250.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1251.png b/TMessagesProj/src/emojis/twitter/emoji/0_1251.png index 4cb96706c4..c84f8192a5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1251.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1251.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1252.png b/TMessagesProj/src/emojis/twitter/emoji/0_1252.png index 9da1ccfd81..ccaf81cd83 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1252.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1252.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1253.png b/TMessagesProj/src/emojis/twitter/emoji/0_1253.png index 34741e2e6c..5500cbc057 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1253.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1253.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1254.png b/TMessagesProj/src/emojis/twitter/emoji/0_1254.png index 1bd6d08c06..f9d734541d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1254.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1254.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1255.png b/TMessagesProj/src/emojis/twitter/emoji/0_1255.png index 558b87b745..4a93ac121b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1255.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1255.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1256.png b/TMessagesProj/src/emojis/twitter/emoji/0_1256.png index d8631079c3..e33d40ea3c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1256.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1256.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1257.png b/TMessagesProj/src/emojis/twitter/emoji/0_1257.png index 65892f7683..7da8ad6a0b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1257.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1257.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1258.png b/TMessagesProj/src/emojis/twitter/emoji/0_1258.png index 03d09e2b6f..897fafc140 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1258.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1258.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1259.png b/TMessagesProj/src/emojis/twitter/emoji/0_1259.png index 523eff9d00..2dfefc35da 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1259.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1259.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_126.png b/TMessagesProj/src/emojis/twitter/emoji/0_126.png index f63000658c..104f132285 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_126.png and b/TMessagesProj/src/emojis/twitter/emoji/0_126.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1260.png b/TMessagesProj/src/emojis/twitter/emoji/0_1260.png index dd9e310275..6de28aa841 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1260.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1260.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1261.png b/TMessagesProj/src/emojis/twitter/emoji/0_1261.png index e0805406c7..2f740fe663 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1261.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1261.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1262.png b/TMessagesProj/src/emojis/twitter/emoji/0_1262.png index 93f91cacb2..148745e773 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1262.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1262.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1263.png b/TMessagesProj/src/emojis/twitter/emoji/0_1263.png index caec4e188a..2fbc473b74 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1263.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1263.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1264.png b/TMessagesProj/src/emojis/twitter/emoji/0_1264.png index b5d7209466..4b62f3ed8d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1264.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1264.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1265.png b/TMessagesProj/src/emojis/twitter/emoji/0_1265.png index f5ddada17c..82eaf5b7a8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1265.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1265.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1266.png b/TMessagesProj/src/emojis/twitter/emoji/0_1266.png index 78f3b61b26..84f9171f19 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1266.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1266.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1267.png b/TMessagesProj/src/emojis/twitter/emoji/0_1267.png index 2b1152bf87..0fc784ac41 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1267.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1267.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1268.png b/TMessagesProj/src/emojis/twitter/emoji/0_1268.png index 11d0223d1b..5ec5644291 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1268.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1268.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1269.png b/TMessagesProj/src/emojis/twitter/emoji/0_1269.png index c16bd1738c..05122068a1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1269.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1269.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_127.png b/TMessagesProj/src/emojis/twitter/emoji/0_127.png index 97e22298f1..8731cf5003 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_127.png and b/TMessagesProj/src/emojis/twitter/emoji/0_127.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1270.png b/TMessagesProj/src/emojis/twitter/emoji/0_1270.png index bc030af07e..789c0f069e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1270.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1270.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1271.png b/TMessagesProj/src/emojis/twitter/emoji/0_1271.png index 3d1b9e1ddc..41461f2f16 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1271.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1271.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1272.png b/TMessagesProj/src/emojis/twitter/emoji/0_1272.png index 8f0ec057fb..9b72a7dc31 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1272.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1272.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1273.png b/TMessagesProj/src/emojis/twitter/emoji/0_1273.png index ceab0373d5..2569a1ef6a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1273.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1273.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1274.png b/TMessagesProj/src/emojis/twitter/emoji/0_1274.png index ef7701317f..c884775f17 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1274.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1274.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1275.png b/TMessagesProj/src/emojis/twitter/emoji/0_1275.png index 5677246016..ce4bceda13 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1275.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1275.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1276.png b/TMessagesProj/src/emojis/twitter/emoji/0_1276.png index 4e0770e7d6..b72b8d5af7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1276.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1276.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1277.png b/TMessagesProj/src/emojis/twitter/emoji/0_1277.png index 50420e1cec..fc7c1cf7ca 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1277.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1277.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1278.png b/TMessagesProj/src/emojis/twitter/emoji/0_1278.png index 0804816796..28bcbda054 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1278.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1278.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1279.png b/TMessagesProj/src/emojis/twitter/emoji/0_1279.png index 7f2a8f5053..36115ab5dd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1279.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1279.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_128.png b/TMessagesProj/src/emojis/twitter/emoji/0_128.png index 92ac2c804e..6d76d517be 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_128.png and b/TMessagesProj/src/emojis/twitter/emoji/0_128.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1280.png b/TMessagesProj/src/emojis/twitter/emoji/0_1280.png index 0ee7906fa0..482ae8f689 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1280.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1280.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1281.png b/TMessagesProj/src/emojis/twitter/emoji/0_1281.png index 417f4eca61..d5f51fb3df 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1281.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1281.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1282.png b/TMessagesProj/src/emojis/twitter/emoji/0_1282.png index db4a3b2eb8..9a2843d545 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1282.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1282.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1283.png b/TMessagesProj/src/emojis/twitter/emoji/0_1283.png index 3ac99c2700..5ceab54388 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1283.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1283.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1284.png b/TMessagesProj/src/emojis/twitter/emoji/0_1284.png index 42f258be80..a6dbf9388e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1284.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1284.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1285.png b/TMessagesProj/src/emojis/twitter/emoji/0_1285.png index fb46df9d6b..f8cd3af492 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1285.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1285.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1286.png b/TMessagesProj/src/emojis/twitter/emoji/0_1286.png index 137df32752..858a78e7fa 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1286.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1286.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1287.png b/TMessagesProj/src/emojis/twitter/emoji/0_1287.png index 91c046f754..6e3848bf23 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1287.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1287.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1288.png b/TMessagesProj/src/emojis/twitter/emoji/0_1288.png index 9c1fff5357..cf8b58baca 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1288.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1288.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1289.png b/TMessagesProj/src/emojis/twitter/emoji/0_1289.png index 7a5f90f774..c4c1d3191d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1289.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1289.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_129.png b/TMessagesProj/src/emojis/twitter/emoji/0_129.png index dd66c9dcb4..48265d8868 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_129.png and b/TMessagesProj/src/emojis/twitter/emoji/0_129.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1290.png b/TMessagesProj/src/emojis/twitter/emoji/0_1290.png index 9d842b89e9..8f98f533d2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1290.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1290.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1291.png b/TMessagesProj/src/emojis/twitter/emoji/0_1291.png index 585fa5867d..0c31a4bb00 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1291.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1291.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1292.png b/TMessagesProj/src/emojis/twitter/emoji/0_1292.png index a69c2380cf..e54e1be632 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1292.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1292.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1293.png b/TMessagesProj/src/emojis/twitter/emoji/0_1293.png index cb456a81b7..b69b824e68 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1293.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1293.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1294.png b/TMessagesProj/src/emojis/twitter/emoji/0_1294.png index 4743de6f18..9b21e0d409 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1294.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1294.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1295.png b/TMessagesProj/src/emojis/twitter/emoji/0_1295.png index 7aa1ef8da5..2e752c12bf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1295.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1295.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1296.png b/TMessagesProj/src/emojis/twitter/emoji/0_1296.png index e5c08493a5..a887ca4e8c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1296.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1296.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1297.png b/TMessagesProj/src/emojis/twitter/emoji/0_1297.png index 01b0a632db..d453f335d5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1297.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1297.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1298.png b/TMessagesProj/src/emojis/twitter/emoji/0_1298.png index 8a16657906..254765f0e1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1298.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1298.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1299.png b/TMessagesProj/src/emojis/twitter/emoji/0_1299.png index 1a5c55f1ae..9da03f6983 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1299.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1299.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_13.png b/TMessagesProj/src/emojis/twitter/emoji/0_13.png index a30263c37c..79c632561d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_13.png and b/TMessagesProj/src/emojis/twitter/emoji/0_13.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_130.png b/TMessagesProj/src/emojis/twitter/emoji/0_130.png index 38ee7c3a9d..8f6798ff7b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_130.png and b/TMessagesProj/src/emojis/twitter/emoji/0_130.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1300.png b/TMessagesProj/src/emojis/twitter/emoji/0_1300.png index b306738dab..1feabc5e19 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1300.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1300.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1301.png b/TMessagesProj/src/emojis/twitter/emoji/0_1301.png index fa1b345954..909c580391 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1301.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1301.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1302.png b/TMessagesProj/src/emojis/twitter/emoji/0_1302.png index 860caf6c5d..fb1624722a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1302.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1302.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1303.png b/TMessagesProj/src/emojis/twitter/emoji/0_1303.png index d242ea93c6..256b4de894 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1303.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1303.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1304.png b/TMessagesProj/src/emojis/twitter/emoji/0_1304.png index c2ea70a92e..e164ec4ee9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1304.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1304.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1305.png b/TMessagesProj/src/emojis/twitter/emoji/0_1305.png index d75e94cfc0..ed6e2a9ac0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1305.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1305.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1306.png b/TMessagesProj/src/emojis/twitter/emoji/0_1306.png index 933d8a4d46..1cae91b864 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1306.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1306.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1307.png b/TMessagesProj/src/emojis/twitter/emoji/0_1307.png index 97f09084a1..8738a0554d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1307.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1307.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1308.png b/TMessagesProj/src/emojis/twitter/emoji/0_1308.png index 0adcd3468b..477df5a600 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1308.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1308.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1309.png b/TMessagesProj/src/emojis/twitter/emoji/0_1309.png index 218abb289a..13a0dac203 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1309.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1309.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_131.png b/TMessagesProj/src/emojis/twitter/emoji/0_131.png index 8cdae9ce10..3c30952f87 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_131.png and b/TMessagesProj/src/emojis/twitter/emoji/0_131.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1310.png b/TMessagesProj/src/emojis/twitter/emoji/0_1310.png index a63747ca5c..038da5597a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1310.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1310.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1311.png b/TMessagesProj/src/emojis/twitter/emoji/0_1311.png index 345d228618..d9439b906c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1311.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1311.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1312.png b/TMessagesProj/src/emojis/twitter/emoji/0_1312.png index 4ffcff7dd1..6099dcfb38 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1312.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1312.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1313.png b/TMessagesProj/src/emojis/twitter/emoji/0_1313.png index d7b87c10d6..48e04d12cb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1313.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1313.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1314.png b/TMessagesProj/src/emojis/twitter/emoji/0_1314.png index d3c482a659..74ed34af09 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1314.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1314.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1315.png b/TMessagesProj/src/emojis/twitter/emoji/0_1315.png index 899e3d59a9..d5c98c4e89 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1315.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1315.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1316.png b/TMessagesProj/src/emojis/twitter/emoji/0_1316.png index ce58a77e24..b65d0d3d35 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1316.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1316.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1317.png b/TMessagesProj/src/emojis/twitter/emoji/0_1317.png index 5881b13d8e..b1ae916c34 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1317.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1317.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1318.png b/TMessagesProj/src/emojis/twitter/emoji/0_1318.png index bf43321942..5fbff1c230 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1318.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1318.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1319.png b/TMessagesProj/src/emojis/twitter/emoji/0_1319.png index c1dd3c931c..cedeaa3f38 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1319.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1319.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_132.png b/TMessagesProj/src/emojis/twitter/emoji/0_132.png index 6559e83ded..c1105a584c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_132.png and b/TMessagesProj/src/emojis/twitter/emoji/0_132.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1320.png b/TMessagesProj/src/emojis/twitter/emoji/0_1320.png index 06ff3a72db..c516fcfc2b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1320.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1320.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1321.png b/TMessagesProj/src/emojis/twitter/emoji/0_1321.png index 1d68e2af8b..f8c647b2d1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1321.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1321.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1322.png b/TMessagesProj/src/emojis/twitter/emoji/0_1322.png index 5b57f6f930..399e2ec10d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1322.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1322.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1323.png b/TMessagesProj/src/emojis/twitter/emoji/0_1323.png index 528052e9e2..a6a5ebfe40 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1323.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1323.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1324.png b/TMessagesProj/src/emojis/twitter/emoji/0_1324.png index 43f5f454a8..daac3d063c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1324.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1324.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1325.png b/TMessagesProj/src/emojis/twitter/emoji/0_1325.png index 561b87f05b..b7222f4407 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1325.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1325.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1326.png b/TMessagesProj/src/emojis/twitter/emoji/0_1326.png index 616a32daac..657dd43744 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1326.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1326.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1327.png b/TMessagesProj/src/emojis/twitter/emoji/0_1327.png index 0dbabb694e..ea405d7d13 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1327.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1327.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1328.png b/TMessagesProj/src/emojis/twitter/emoji/0_1328.png index 4edc48fbae..15019d1ced 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1328.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1328.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1329.png b/TMessagesProj/src/emojis/twitter/emoji/0_1329.png index de9e0ce1fc..1fbec5f2c4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1329.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1329.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_133.png b/TMessagesProj/src/emojis/twitter/emoji/0_133.png index e0543a333d..07066e38d7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_133.png and b/TMessagesProj/src/emojis/twitter/emoji/0_133.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1330.png b/TMessagesProj/src/emojis/twitter/emoji/0_1330.png index 4932d984ec..c848ca1f11 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1330.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1330.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1331.png b/TMessagesProj/src/emojis/twitter/emoji/0_1331.png index b9128e20ae..5ef77d9f09 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1331.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1331.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1332.png b/TMessagesProj/src/emojis/twitter/emoji/0_1332.png index 463ba08d25..29ac4570d8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1332.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1332.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1333.png b/TMessagesProj/src/emojis/twitter/emoji/0_1333.png index ce4e9f9634..44cdbbd7c4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1333.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1333.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1334.png b/TMessagesProj/src/emojis/twitter/emoji/0_1334.png index 30be736d55..fa3ed6e3ce 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1334.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1334.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1335.png b/TMessagesProj/src/emojis/twitter/emoji/0_1335.png index d3ec2cc09d..90e4eb9956 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1335.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1335.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1336.png b/TMessagesProj/src/emojis/twitter/emoji/0_1336.png index f06edfc5ac..f316ccdf65 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1336.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1336.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1337.png b/TMessagesProj/src/emojis/twitter/emoji/0_1337.png index 3da602fd37..02ebd47864 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1337.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1337.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1338.png b/TMessagesProj/src/emojis/twitter/emoji/0_1338.png index b1fb7df919..2b534d7d85 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1338.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1338.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1339.png b/TMessagesProj/src/emojis/twitter/emoji/0_1339.png index 35bb49893e..c0ac56585e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1339.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1339.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_134.png b/TMessagesProj/src/emojis/twitter/emoji/0_134.png index e8531f081d..1c680109fb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_134.png and b/TMessagesProj/src/emojis/twitter/emoji/0_134.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1340.png b/TMessagesProj/src/emojis/twitter/emoji/0_1340.png index 19613614c8..0f00cbf436 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1340.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1340.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1341.png b/TMessagesProj/src/emojis/twitter/emoji/0_1341.png index 8394a21748..f977fb7b5d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1341.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1341.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1342.png b/TMessagesProj/src/emojis/twitter/emoji/0_1342.png index 8e82e3a132..e60e49eb70 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1342.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1342.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1343.png b/TMessagesProj/src/emojis/twitter/emoji/0_1343.png index c5f52567e5..1527c19260 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1343.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1343.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1344.png b/TMessagesProj/src/emojis/twitter/emoji/0_1344.png index 2ccc583f6a..4f275bc9c6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1344.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1344.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1345.png b/TMessagesProj/src/emojis/twitter/emoji/0_1345.png index c21057fff5..641d085f7d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1345.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1345.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1346.png b/TMessagesProj/src/emojis/twitter/emoji/0_1346.png index 02b0460a23..1cf5bac0df 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1346.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1346.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1347.png b/TMessagesProj/src/emojis/twitter/emoji/0_1347.png index ef43fc4ecf..da0effa819 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1347.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1347.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1348.png b/TMessagesProj/src/emojis/twitter/emoji/0_1348.png index 4262a06bc3..b724294c83 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1348.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1348.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1349.png b/TMessagesProj/src/emojis/twitter/emoji/0_1349.png index 2d7f1b28f3..d4dc180582 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1349.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1349.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_135.png b/TMessagesProj/src/emojis/twitter/emoji/0_135.png index 4f742a1ec8..a5ab81255d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_135.png and b/TMessagesProj/src/emojis/twitter/emoji/0_135.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1350.png b/TMessagesProj/src/emojis/twitter/emoji/0_1350.png index eae0c988a9..8a407fcb6c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1350.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1350.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1351.png b/TMessagesProj/src/emojis/twitter/emoji/0_1351.png index a5f274aa16..bb9083952b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1351.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1351.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1352.png b/TMessagesProj/src/emojis/twitter/emoji/0_1352.png index 2baf489922..b31419b970 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1352.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1352.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1353.png b/TMessagesProj/src/emojis/twitter/emoji/0_1353.png index a653f05754..90b77d7c57 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1353.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1353.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1354.png b/TMessagesProj/src/emojis/twitter/emoji/0_1354.png index 6b7a5f0459..479063203b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1354.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1354.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1355.png b/TMessagesProj/src/emojis/twitter/emoji/0_1355.png index 8392d8698d..3f382e159b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1355.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1355.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1356.png b/TMessagesProj/src/emojis/twitter/emoji/0_1356.png index 3036fd5ee3..00a1e3122c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1356.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1356.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1357.png b/TMessagesProj/src/emojis/twitter/emoji/0_1357.png index 1137fda018..8a26b2437b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1357.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1357.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1358.png b/TMessagesProj/src/emojis/twitter/emoji/0_1358.png index 0596f8c15c..0c45f3c728 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1358.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1358.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1359.png b/TMessagesProj/src/emojis/twitter/emoji/0_1359.png index c0ae69c4e4..c9bfc1b5fd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1359.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1359.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_136.png b/TMessagesProj/src/emojis/twitter/emoji/0_136.png index 1efc891568..85c5669125 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_136.png and b/TMessagesProj/src/emojis/twitter/emoji/0_136.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1360.png b/TMessagesProj/src/emojis/twitter/emoji/0_1360.png index aca84bb874..6a950a2ff0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1360.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1360.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1361.png b/TMessagesProj/src/emojis/twitter/emoji/0_1361.png index a5d445a763..b32dc7d6f8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1361.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1361.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1362.png b/TMessagesProj/src/emojis/twitter/emoji/0_1362.png index 347c971d94..a0108ad9a8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1362.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1362.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1363.png b/TMessagesProj/src/emojis/twitter/emoji/0_1363.png index cde87949aa..8735cc0994 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1363.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1363.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1364.png b/TMessagesProj/src/emojis/twitter/emoji/0_1364.png index ffc6c94759..199c4c0f24 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1364.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1364.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1365.png b/TMessagesProj/src/emojis/twitter/emoji/0_1365.png index 42d3c9968a..cc1dbab42a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1365.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1365.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1366.png b/TMessagesProj/src/emojis/twitter/emoji/0_1366.png index 7520ac5e37..6278cb30c6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1366.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1366.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1367.png b/TMessagesProj/src/emojis/twitter/emoji/0_1367.png index ed3c903ac5..6e0e0ffab5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1367.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1367.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1368.png b/TMessagesProj/src/emojis/twitter/emoji/0_1368.png index 0e4d77b3b6..2b07765eac 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1368.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1368.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1369.png b/TMessagesProj/src/emojis/twitter/emoji/0_1369.png index e81ec01ba2..b9ce24a228 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1369.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1369.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_137.png b/TMessagesProj/src/emojis/twitter/emoji/0_137.png index 5139635db3..32e58a5f1c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_137.png and b/TMessagesProj/src/emojis/twitter/emoji/0_137.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1370.png b/TMessagesProj/src/emojis/twitter/emoji/0_1370.png index 44fafb4638..254501ba2e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1370.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1370.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1371.png b/TMessagesProj/src/emojis/twitter/emoji/0_1371.png index 8f1b026ef0..3cc95b8e78 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1371.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1371.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1372.png b/TMessagesProj/src/emojis/twitter/emoji/0_1372.png index 1debaa7373..ff5c268edb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1372.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1372.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1373.png b/TMessagesProj/src/emojis/twitter/emoji/0_1373.png index 6f7b5d4208..e58e58b77b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1373.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1373.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1374.png b/TMessagesProj/src/emojis/twitter/emoji/0_1374.png index d4e484a70c..67fb430ba1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1374.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1374.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1375.png b/TMessagesProj/src/emojis/twitter/emoji/0_1375.png index 4f3be0f361..58e8d95a64 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1375.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1375.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1376.png b/TMessagesProj/src/emojis/twitter/emoji/0_1376.png index 8ff41f8a2a..9816f2c1f8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1376.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1376.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1377.png b/TMessagesProj/src/emojis/twitter/emoji/0_1377.png index 83c3acb578..0c63c22c31 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1377.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1377.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1378.png b/TMessagesProj/src/emojis/twitter/emoji/0_1378.png index a9a4dd126e..66b5a61a1e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1378.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1378.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1379.png b/TMessagesProj/src/emojis/twitter/emoji/0_1379.png index 76498d19d9..0e8b320285 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1379.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1379.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_138.png b/TMessagesProj/src/emojis/twitter/emoji/0_138.png index bfeb9c8f84..4294f4823c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_138.png and b/TMessagesProj/src/emojis/twitter/emoji/0_138.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1380.png b/TMessagesProj/src/emojis/twitter/emoji/0_1380.png index c1a3cbb5ad..1b97d93ed5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1380.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1380.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1381.png b/TMessagesProj/src/emojis/twitter/emoji/0_1381.png index 0c1b73d1dd..fd499b5dc8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1381.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1381.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1382.png b/TMessagesProj/src/emojis/twitter/emoji/0_1382.png index 07c8c3d968..cd610b816e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1382.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1382.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1383.png b/TMessagesProj/src/emojis/twitter/emoji/0_1383.png index d357485063..e9e6371a6b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1383.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1383.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1384.png b/TMessagesProj/src/emojis/twitter/emoji/0_1384.png index 1cd862b9a3..96faf3dcb0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1384.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1384.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1385.png b/TMessagesProj/src/emojis/twitter/emoji/0_1385.png index 565de105dc..c096c00ed7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1385.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1385.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1386.png b/TMessagesProj/src/emojis/twitter/emoji/0_1386.png index 16fa11f94b..9dfa1efbe0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1386.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1386.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1387.png b/TMessagesProj/src/emojis/twitter/emoji/0_1387.png index 84adbc906b..a531499baa 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1387.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1387.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1388.png b/TMessagesProj/src/emojis/twitter/emoji/0_1388.png index 343c4fbcb7..b23c0a1a3f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1388.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1388.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1389.png b/TMessagesProj/src/emojis/twitter/emoji/0_1389.png index ac6a3f3fda..7c5253f0d8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1389.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1389.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_139.png b/TMessagesProj/src/emojis/twitter/emoji/0_139.png index 5f3e736308..3466a85142 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_139.png and b/TMessagesProj/src/emojis/twitter/emoji/0_139.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1390.png b/TMessagesProj/src/emojis/twitter/emoji/0_1390.png index 6179176bab..05d99bafd1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1390.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1390.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1391.png b/TMessagesProj/src/emojis/twitter/emoji/0_1391.png index c284878cc1..c0dffb59b2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1391.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1391.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1392.png b/TMessagesProj/src/emojis/twitter/emoji/0_1392.png index 1aed9e617a..9271af6b95 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1392.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1392.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1393.png b/TMessagesProj/src/emojis/twitter/emoji/0_1393.png index e96b1d39f6..f5c2e60b22 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1393.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1393.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1394.png b/TMessagesProj/src/emojis/twitter/emoji/0_1394.png index c2e8094d31..145088596a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1394.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1394.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1395.png b/TMessagesProj/src/emojis/twitter/emoji/0_1395.png index c01d8bfc93..07af7f2774 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1395.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1395.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1396.png b/TMessagesProj/src/emojis/twitter/emoji/0_1396.png index 6cd1fd7061..d8e2eacad2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1396.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1396.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1397.png b/TMessagesProj/src/emojis/twitter/emoji/0_1397.png index 42944fdc3a..e35f637c75 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1397.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1397.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1398.png b/TMessagesProj/src/emojis/twitter/emoji/0_1398.png index 364d1a75e2..254e0a44d1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1398.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1398.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1399.png b/TMessagesProj/src/emojis/twitter/emoji/0_1399.png index 19c4fbacb2..a07a12a2ce 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1399.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1399.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_14.png b/TMessagesProj/src/emojis/twitter/emoji/0_14.png index 276570cade..1a88f84ae8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_14.png and b/TMessagesProj/src/emojis/twitter/emoji/0_14.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_140.png b/TMessagesProj/src/emojis/twitter/emoji/0_140.png index affeea96b7..c1d961b25a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_140.png and b/TMessagesProj/src/emojis/twitter/emoji/0_140.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1400.png b/TMessagesProj/src/emojis/twitter/emoji/0_1400.png index 8e6f3f1935..2e4dcd674f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1400.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1400.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1401.png b/TMessagesProj/src/emojis/twitter/emoji/0_1401.png index c4ccba6613..9bf92bd9d5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1401.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1401.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1402.png b/TMessagesProj/src/emojis/twitter/emoji/0_1402.png index de9b27d478..18a29fe5ea 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1402.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1402.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1403.png b/TMessagesProj/src/emojis/twitter/emoji/0_1403.png index 4080261c91..d00617d5ca 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1403.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1403.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1404.png b/TMessagesProj/src/emojis/twitter/emoji/0_1404.png index 8ddefb6ace..8ff2feae19 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1404.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1404.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1405.png b/TMessagesProj/src/emojis/twitter/emoji/0_1405.png index a81a70af73..5f03a5053a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1405.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1405.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1406.png b/TMessagesProj/src/emojis/twitter/emoji/0_1406.png index 4ef36ba07b..1350a71a09 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1406.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1406.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1407.png b/TMessagesProj/src/emojis/twitter/emoji/0_1407.png index b113a40b08..a13c63f67e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1407.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1407.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1408.png b/TMessagesProj/src/emojis/twitter/emoji/0_1408.png index e7c4227987..f6301b77c8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1408.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1408.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1409.png b/TMessagesProj/src/emojis/twitter/emoji/0_1409.png index 8f2da8a901..cf2b4ab498 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1409.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1409.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_141.png b/TMessagesProj/src/emojis/twitter/emoji/0_141.png index 8564aaae8c..ecff6446a1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_141.png and b/TMessagesProj/src/emojis/twitter/emoji/0_141.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1410.png b/TMessagesProj/src/emojis/twitter/emoji/0_1410.png index c043a6a940..58b0d524de 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1410.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1410.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1411.png b/TMessagesProj/src/emojis/twitter/emoji/0_1411.png index b2b3ad4151..d7de52e773 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1411.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1411.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1412.png b/TMessagesProj/src/emojis/twitter/emoji/0_1412.png index e6c398fe51..d4a9a4f9e9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1412.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1412.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1413.png b/TMessagesProj/src/emojis/twitter/emoji/0_1413.png index 99ecdb15c7..b711e88e33 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1413.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1413.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1414.png b/TMessagesProj/src/emojis/twitter/emoji/0_1414.png index 42a07a8f44..074c016498 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1414.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1414.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1415.png b/TMessagesProj/src/emojis/twitter/emoji/0_1415.png index 4edd0eae2e..dafa8e6b21 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1415.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1415.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1416.png b/TMessagesProj/src/emojis/twitter/emoji/0_1416.png index b2f280ce6c..fe1571f70b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1416.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1416.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1417.png b/TMessagesProj/src/emojis/twitter/emoji/0_1417.png index bf2f0b78ec..d31d38c54a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1417.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1417.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1418.png b/TMessagesProj/src/emojis/twitter/emoji/0_1418.png index 112346a59d..c8bdedc8ae 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1418.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1418.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1419.png b/TMessagesProj/src/emojis/twitter/emoji/0_1419.png index e7d2f002c9..7c3be3892b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1419.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1419.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_142.png b/TMessagesProj/src/emojis/twitter/emoji/0_142.png index 1d8a825cbe..b9b5af2b7d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_142.png and b/TMessagesProj/src/emojis/twitter/emoji/0_142.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1420.png b/TMessagesProj/src/emojis/twitter/emoji/0_1420.png index 44d3037f22..93cc22f931 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1420.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1420.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1421.png b/TMessagesProj/src/emojis/twitter/emoji/0_1421.png index 3f06e27580..24728d76a6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1421.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1421.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1422.png b/TMessagesProj/src/emojis/twitter/emoji/0_1422.png index 4323b12beb..71adb55611 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1422.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1422.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1423.png b/TMessagesProj/src/emojis/twitter/emoji/0_1423.png index 4125d734f2..bc48d87141 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1423.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1423.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1424.png b/TMessagesProj/src/emojis/twitter/emoji/0_1424.png index a098b0c6b9..8ddb2e73ba 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1424.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1424.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1425.png b/TMessagesProj/src/emojis/twitter/emoji/0_1425.png index d29211663a..f6f8d64404 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1425.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1425.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1426.png b/TMessagesProj/src/emojis/twitter/emoji/0_1426.png index bf89f4bc2b..d87ebaa6fb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1426.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1426.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1427.png b/TMessagesProj/src/emojis/twitter/emoji/0_1427.png index 538d954aa4..0f46374e58 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1427.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1427.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1428.png b/TMessagesProj/src/emojis/twitter/emoji/0_1428.png index 09ef02b6f1..1de39ce809 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1428.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1428.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1429.png b/TMessagesProj/src/emojis/twitter/emoji/0_1429.png index 480a350ace..33467d4414 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1429.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1429.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_143.png b/TMessagesProj/src/emojis/twitter/emoji/0_143.png index d5e1509103..72d181a4ad 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_143.png and b/TMessagesProj/src/emojis/twitter/emoji/0_143.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1430.png b/TMessagesProj/src/emojis/twitter/emoji/0_1430.png index 7d8c2ac045..a499f09980 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1430.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1430.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1431.png b/TMessagesProj/src/emojis/twitter/emoji/0_1431.png index fdb9d44e4d..0f3518ae0b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1431.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1431.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1432.png b/TMessagesProj/src/emojis/twitter/emoji/0_1432.png index 1ae75307ce..b2a9d79754 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1432.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1432.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1433.png b/TMessagesProj/src/emojis/twitter/emoji/0_1433.png index facffe17ad..9f9e934fad 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1433.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1433.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1434.png b/TMessagesProj/src/emojis/twitter/emoji/0_1434.png index 8368037366..9f84b798c0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1434.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1434.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1435.png b/TMessagesProj/src/emojis/twitter/emoji/0_1435.png index 9cab339fc9..c1909fb089 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1435.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1435.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1436.png b/TMessagesProj/src/emojis/twitter/emoji/0_1436.png index 78c23e0149..1422f15f1b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1436.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1436.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1437.png b/TMessagesProj/src/emojis/twitter/emoji/0_1437.png index 77d0607ff3..8bc3aa150b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1437.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1437.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1438.png b/TMessagesProj/src/emojis/twitter/emoji/0_1438.png index b9dd06eb59..dd0d464a85 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1438.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1438.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1439.png b/TMessagesProj/src/emojis/twitter/emoji/0_1439.png index 6ed717dcbd..0962c07df1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1439.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1439.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_144.png b/TMessagesProj/src/emojis/twitter/emoji/0_144.png index ef0188012d..ad68e1c57d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_144.png and b/TMessagesProj/src/emojis/twitter/emoji/0_144.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1440.png b/TMessagesProj/src/emojis/twitter/emoji/0_1440.png index e4224f7d7b..add3284589 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1440.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1440.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1441.png b/TMessagesProj/src/emojis/twitter/emoji/0_1441.png index 5bbe1ae671..281d746ce0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1441.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1441.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1442.png b/TMessagesProj/src/emojis/twitter/emoji/0_1442.png index 2cc650bc1e..c25e1ea533 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1442.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1442.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1443.png b/TMessagesProj/src/emojis/twitter/emoji/0_1443.png index 490e11a65a..63311d13e2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1443.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1443.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1444.png b/TMessagesProj/src/emojis/twitter/emoji/0_1444.png index 2d2ef081c4..bcaeacac95 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1444.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1444.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1445.png b/TMessagesProj/src/emojis/twitter/emoji/0_1445.png index 252ec3e330..3110aa5450 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1445.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1445.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1446.png b/TMessagesProj/src/emojis/twitter/emoji/0_1446.png index 19e35eb74f..8cb94745d1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1446.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1446.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1447.png b/TMessagesProj/src/emojis/twitter/emoji/0_1447.png index 4e226e0a25..82be7eda29 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1447.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1447.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1448.png b/TMessagesProj/src/emojis/twitter/emoji/0_1448.png index 561fcfceda..9492d24c0d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1448.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1448.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1449.png b/TMessagesProj/src/emojis/twitter/emoji/0_1449.png index 1d2d873dad..2ef8d36de4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1449.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1449.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_145.png b/TMessagesProj/src/emojis/twitter/emoji/0_145.png index 87e257d063..94e4d5273e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_145.png and b/TMessagesProj/src/emojis/twitter/emoji/0_145.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1450.png b/TMessagesProj/src/emojis/twitter/emoji/0_1450.png index 9b91343222..da8dc3761f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1450.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1450.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1451.png b/TMessagesProj/src/emojis/twitter/emoji/0_1451.png index d413be56c7..1b901b552a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1451.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1451.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1452.png b/TMessagesProj/src/emojis/twitter/emoji/0_1452.png index 436adda282..d419085af7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1452.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1452.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1453.png b/TMessagesProj/src/emojis/twitter/emoji/0_1453.png index 516c3cb8d0..71a3b8d962 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1453.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1453.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1454.png b/TMessagesProj/src/emojis/twitter/emoji/0_1454.png index a647baef56..390d5d3bdd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1454.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1454.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1455.png b/TMessagesProj/src/emojis/twitter/emoji/0_1455.png index 55c77d4bd6..3bcf7a11b5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1455.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1455.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1456.png b/TMessagesProj/src/emojis/twitter/emoji/0_1456.png index 266b38682e..d9603fbb2f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1456.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1456.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1457.png b/TMessagesProj/src/emojis/twitter/emoji/0_1457.png index 93a16c44ae..8309f59b94 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1457.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1457.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1458.png b/TMessagesProj/src/emojis/twitter/emoji/0_1458.png index 66c39ccea6..9cd946b9e7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1458.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1458.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1459.png b/TMessagesProj/src/emojis/twitter/emoji/0_1459.png index 8064522400..d6084c31f1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1459.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1459.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_146.png b/TMessagesProj/src/emojis/twitter/emoji/0_146.png index c62a020672..55f9696a27 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_146.png and b/TMessagesProj/src/emojis/twitter/emoji/0_146.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1460.png b/TMessagesProj/src/emojis/twitter/emoji/0_1460.png index 7b1629c483..633d1ec9f1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1460.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1460.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1461.png b/TMessagesProj/src/emojis/twitter/emoji/0_1461.png index 58eb46f08f..620f350693 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1461.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1461.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1462.png b/TMessagesProj/src/emojis/twitter/emoji/0_1462.png index f66e5a9b2d..e853ff05c2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1462.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1462.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1463.png b/TMessagesProj/src/emojis/twitter/emoji/0_1463.png index 686b6864cb..3735315725 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1463.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1463.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1464.png b/TMessagesProj/src/emojis/twitter/emoji/0_1464.png index fc0eacabce..bff22c74f8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1464.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1464.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1465.png b/TMessagesProj/src/emojis/twitter/emoji/0_1465.png index a47ee16b90..6197f74497 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1465.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1465.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1466.png b/TMessagesProj/src/emojis/twitter/emoji/0_1466.png index 8fae3d11be..1ea694d9d4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1466.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1466.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1467.png b/TMessagesProj/src/emojis/twitter/emoji/0_1467.png index 5da90a2cfa..e636c0c8fb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1467.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1467.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1468.png b/TMessagesProj/src/emojis/twitter/emoji/0_1468.png index 34265ce39b..dbfe30640c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1468.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1468.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1469.png b/TMessagesProj/src/emojis/twitter/emoji/0_1469.png index 6d6a62234b..2d2e17ebc3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1469.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1469.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_147.png b/TMessagesProj/src/emojis/twitter/emoji/0_147.png index e110b27187..ae93280505 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_147.png and b/TMessagesProj/src/emojis/twitter/emoji/0_147.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1470.png b/TMessagesProj/src/emojis/twitter/emoji/0_1470.png index feb35fe0e9..6f6137542b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1470.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1470.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1471.png b/TMessagesProj/src/emojis/twitter/emoji/0_1471.png index 60ae9f8565..53b1f14b28 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1471.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1471.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1472.png b/TMessagesProj/src/emojis/twitter/emoji/0_1472.png index 582d20f47b..fa8d8d654a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1472.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1472.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1473.png b/TMessagesProj/src/emojis/twitter/emoji/0_1473.png index 5b18a7f96e..4c50182a8d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1473.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1473.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1474.png b/TMessagesProj/src/emojis/twitter/emoji/0_1474.png index a92d2ea888..0b1c024ca2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1474.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1474.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1475.png b/TMessagesProj/src/emojis/twitter/emoji/0_1475.png index cc39d7aa5b..f64a1975f1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1475.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1475.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1476.png b/TMessagesProj/src/emojis/twitter/emoji/0_1476.png index 4b88995a24..d8f05497b8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1476.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1476.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1477.png b/TMessagesProj/src/emojis/twitter/emoji/0_1477.png index 77fd1cc234..663fedf437 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1477.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1477.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1478.png b/TMessagesProj/src/emojis/twitter/emoji/0_1478.png index dbc6a79328..7ef48a51e3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1478.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1478.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1479.png b/TMessagesProj/src/emojis/twitter/emoji/0_1479.png index 21d29eb06e..b247a70f32 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1479.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1479.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_148.png b/TMessagesProj/src/emojis/twitter/emoji/0_148.png index 1274acf7f9..77fc490ac9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_148.png and b/TMessagesProj/src/emojis/twitter/emoji/0_148.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1480.png b/TMessagesProj/src/emojis/twitter/emoji/0_1480.png index 9921605f41..0aa6acbfcf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1480.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1480.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1481.png b/TMessagesProj/src/emojis/twitter/emoji/0_1481.png index 3f8fac1e22..054e91cc0c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1481.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1481.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1482.png b/TMessagesProj/src/emojis/twitter/emoji/0_1482.png index 52b1ced752..dd51ccb64b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1482.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1482.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1483.png b/TMessagesProj/src/emojis/twitter/emoji/0_1483.png index 2c9253467d..2a3da1361e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1483.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1483.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1484.png b/TMessagesProj/src/emojis/twitter/emoji/0_1484.png index 5e02dfe512..06fc028ee3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1484.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1484.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1485.png b/TMessagesProj/src/emojis/twitter/emoji/0_1485.png index 27f219f6f8..f302aca28d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1485.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1485.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1486.png b/TMessagesProj/src/emojis/twitter/emoji/0_1486.png index 91b6de5349..5f67c80896 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1486.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1486.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1487.png b/TMessagesProj/src/emojis/twitter/emoji/0_1487.png index cba1e5038a..797c53d558 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1487.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1487.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1488.png b/TMessagesProj/src/emojis/twitter/emoji/0_1488.png index 751e4ecf09..eb58cd0cb2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1488.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1488.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1489.png b/TMessagesProj/src/emojis/twitter/emoji/0_1489.png index e6c4489ad7..5986831b70 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1489.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1489.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_149.png b/TMessagesProj/src/emojis/twitter/emoji/0_149.png index 99ec162ca8..9b47842296 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_149.png and b/TMessagesProj/src/emojis/twitter/emoji/0_149.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1490.png b/TMessagesProj/src/emojis/twitter/emoji/0_1490.png index 79d98316f2..add3239fdd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1490.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1490.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1491.png b/TMessagesProj/src/emojis/twitter/emoji/0_1491.png index 9a413561eb..5900293f9a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1491.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1491.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1492.png b/TMessagesProj/src/emojis/twitter/emoji/0_1492.png index 253a1653ca..8c11123e11 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1492.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1492.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1493.png b/TMessagesProj/src/emojis/twitter/emoji/0_1493.png index 08be567585..c83325087e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1493.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1493.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1494.png b/TMessagesProj/src/emojis/twitter/emoji/0_1494.png index 34009c2789..7aacc088fe 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1494.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1494.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1495.png b/TMessagesProj/src/emojis/twitter/emoji/0_1495.png index 91a175236f..7f8e1ef162 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1495.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1495.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1496.png b/TMessagesProj/src/emojis/twitter/emoji/0_1496.png index 99faf16b37..8a8517a04b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1496.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1496.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1497.png b/TMessagesProj/src/emojis/twitter/emoji/0_1497.png index 12c10af43b..d541075344 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1497.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1497.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1498.png b/TMessagesProj/src/emojis/twitter/emoji/0_1498.png index f1621b0cc4..b8c2d6dd30 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1498.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1498.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1499.png b/TMessagesProj/src/emojis/twitter/emoji/0_1499.png index 24d2466f80..419f5e62ba 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1499.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1499.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_15.png b/TMessagesProj/src/emojis/twitter/emoji/0_15.png index 05c13a9408..7a782f0e3a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_15.png and b/TMessagesProj/src/emojis/twitter/emoji/0_15.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_150.png b/TMessagesProj/src/emojis/twitter/emoji/0_150.png index 5869cff0a0..c368356e01 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_150.png and b/TMessagesProj/src/emojis/twitter/emoji/0_150.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1500.png b/TMessagesProj/src/emojis/twitter/emoji/0_1500.png index 0c022f59e0..30d7ef80f0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1500.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1500.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1501.png b/TMessagesProj/src/emojis/twitter/emoji/0_1501.png index 9a86797c69..0ad74a2218 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1501.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1501.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1502.png b/TMessagesProj/src/emojis/twitter/emoji/0_1502.png index c9080a5674..3df32dda3e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1502.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1502.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1503.png b/TMessagesProj/src/emojis/twitter/emoji/0_1503.png index 009021f1eb..385d43da53 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1503.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1503.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1504.png b/TMessagesProj/src/emojis/twitter/emoji/0_1504.png index 10b7f145d2..423664504e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1504.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1504.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1505.png b/TMessagesProj/src/emojis/twitter/emoji/0_1505.png index 78c0cf5f6d..bfa90adc7e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1505.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1505.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1506.png b/TMessagesProj/src/emojis/twitter/emoji/0_1506.png index 6194346fa3..54debe01e4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1506.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1506.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1507.png b/TMessagesProj/src/emojis/twitter/emoji/0_1507.png index 17bb94e387..8cf949e813 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1507.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1507.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1508.png b/TMessagesProj/src/emojis/twitter/emoji/0_1508.png index 60440ba794..9f20c43b51 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1508.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1508.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1509.png b/TMessagesProj/src/emojis/twitter/emoji/0_1509.png index 5a4f7cab1a..2ee44e74c6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1509.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1509.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_151.png b/TMessagesProj/src/emojis/twitter/emoji/0_151.png index 15f4eb32d4..5b726361fe 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_151.png and b/TMessagesProj/src/emojis/twitter/emoji/0_151.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1510.png b/TMessagesProj/src/emojis/twitter/emoji/0_1510.png index 108c4c170e..8c7108fcdb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1510.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1510.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1511.png b/TMessagesProj/src/emojis/twitter/emoji/0_1511.png index ab049746e6..81a3f809cb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1511.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1511.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1512.png b/TMessagesProj/src/emojis/twitter/emoji/0_1512.png index 99665feff2..d39350ca8c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1512.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1512.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1513.png b/TMessagesProj/src/emojis/twitter/emoji/0_1513.png index 254f559936..487046175a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1513.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1513.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1514.png b/TMessagesProj/src/emojis/twitter/emoji/0_1514.png index 5903f2edf8..8a59d33b17 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1514.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1514.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1515.png b/TMessagesProj/src/emojis/twitter/emoji/0_1515.png index 3c6d0b7d62..51980f4667 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1515.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1515.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1516.png b/TMessagesProj/src/emojis/twitter/emoji/0_1516.png index 2e4826b9fe..82644fa878 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1516.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1516.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1517.png b/TMessagesProj/src/emojis/twitter/emoji/0_1517.png index 595c673826..2294a7beae 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1517.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1517.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1518.png b/TMessagesProj/src/emojis/twitter/emoji/0_1518.png index f5b2f75d90..715e49fac9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1518.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1518.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1519.png b/TMessagesProj/src/emojis/twitter/emoji/0_1519.png index f64027aa12..be3a75ad4e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1519.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1519.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_152.png b/TMessagesProj/src/emojis/twitter/emoji/0_152.png index 152069cedf..384d32d532 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_152.png and b/TMessagesProj/src/emojis/twitter/emoji/0_152.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1520.png b/TMessagesProj/src/emojis/twitter/emoji/0_1520.png index 50ea47bf6f..16993e6d98 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1520.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1520.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1521.png b/TMessagesProj/src/emojis/twitter/emoji/0_1521.png index 218ffa5c3d..4ed279a0df 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1521.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1521.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1522.png b/TMessagesProj/src/emojis/twitter/emoji/0_1522.png index c2ab7a465a..37b8e2f583 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1522.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1522.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1523.png b/TMessagesProj/src/emojis/twitter/emoji/0_1523.png index 2a5574bbdb..b4761994ef 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1523.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1523.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1524.png b/TMessagesProj/src/emojis/twitter/emoji/0_1524.png index c4c27f92c5..822229a1c1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1524.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1524.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1525.png b/TMessagesProj/src/emojis/twitter/emoji/0_1525.png index 97e2fa7bfc..9cc9f42a2d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1525.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1525.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1526.png b/TMessagesProj/src/emojis/twitter/emoji/0_1526.png index 123deb4ab7..55a6821fb9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1526.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1526.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1527.png b/TMessagesProj/src/emojis/twitter/emoji/0_1527.png index 08bba40670..63aa7efd55 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1527.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1527.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1528.png b/TMessagesProj/src/emojis/twitter/emoji/0_1528.png index 71c087e34f..55e8a35f77 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1528.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1528.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1529.png b/TMessagesProj/src/emojis/twitter/emoji/0_1529.png index df716a101f..a344911826 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1529.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1529.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_153.png b/TMessagesProj/src/emojis/twitter/emoji/0_153.png index 16e6a00434..205a07916e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_153.png and b/TMessagesProj/src/emojis/twitter/emoji/0_153.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1530.png b/TMessagesProj/src/emojis/twitter/emoji/0_1530.png index 5572ebed39..855167c119 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1530.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1530.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1531.png b/TMessagesProj/src/emojis/twitter/emoji/0_1531.png index fa74359d68..b7d7181eb6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1531.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1531.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1532.png b/TMessagesProj/src/emojis/twitter/emoji/0_1532.png index bf2f8f8389..29bbb50c8b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1532.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1532.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1533.png b/TMessagesProj/src/emojis/twitter/emoji/0_1533.png index 10a3a23b8e..084510e690 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1533.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1533.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1534.png b/TMessagesProj/src/emojis/twitter/emoji/0_1534.png index d8d893bed6..04298ba57c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1534.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1534.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1535.png b/TMessagesProj/src/emojis/twitter/emoji/0_1535.png index 7af1d290ad..4ae9a3c02f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1535.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1535.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1536.png b/TMessagesProj/src/emojis/twitter/emoji/0_1536.png index 7a13b90bbd..53716c8f19 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1536.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1536.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1537.png b/TMessagesProj/src/emojis/twitter/emoji/0_1537.png index a78bf02964..744dd86a71 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1537.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1537.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1538.png b/TMessagesProj/src/emojis/twitter/emoji/0_1538.png index 776ad5b3f3..b951b15792 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1538.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1538.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1539.png b/TMessagesProj/src/emojis/twitter/emoji/0_1539.png index 1940d02c9d..c2c8fafa02 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1539.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1539.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_154.png b/TMessagesProj/src/emojis/twitter/emoji/0_154.png index 0939b8b70d..0eb1b03e54 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_154.png and b/TMessagesProj/src/emojis/twitter/emoji/0_154.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1540.png b/TMessagesProj/src/emojis/twitter/emoji/0_1540.png index 41f2d9fa6f..b39bef38ea 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1540.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1540.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1541.png b/TMessagesProj/src/emojis/twitter/emoji/0_1541.png index 39c3698d0b..e83aedda1a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1541.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1541.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1542.png b/TMessagesProj/src/emojis/twitter/emoji/0_1542.png index cf5e3e3e52..2f0616487f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1542.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1542.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1543.png b/TMessagesProj/src/emojis/twitter/emoji/0_1543.png index aedb264df5..21b60a12eb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1543.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1543.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1544.png b/TMessagesProj/src/emojis/twitter/emoji/0_1544.png index 3f0e6c7280..c80909d589 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1544.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1544.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1545.png b/TMessagesProj/src/emojis/twitter/emoji/0_1545.png index 9a1e62e232..34930f074c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1545.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1545.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1546.png b/TMessagesProj/src/emojis/twitter/emoji/0_1546.png index 6923fa262a..74ce75cd96 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1546.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1546.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1547.png b/TMessagesProj/src/emojis/twitter/emoji/0_1547.png index e707b0f8dc..22547ff7a9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1547.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1547.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1548.png b/TMessagesProj/src/emojis/twitter/emoji/0_1548.png index 0193416fb7..3ba8da972c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1548.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1548.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1549.png b/TMessagesProj/src/emojis/twitter/emoji/0_1549.png index 429221512d..bce1d531d8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1549.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1549.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_155.png b/TMessagesProj/src/emojis/twitter/emoji/0_155.png index 0bdade6f7f..5df55e1c2f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_155.png and b/TMessagesProj/src/emojis/twitter/emoji/0_155.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1550.png b/TMessagesProj/src/emojis/twitter/emoji/0_1550.png index 3fdf1897b0..fbaf8abf4f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1550.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1550.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1551.png b/TMessagesProj/src/emojis/twitter/emoji/0_1551.png index f9e27193c1..e7be13de97 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1551.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1551.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1552.png b/TMessagesProj/src/emojis/twitter/emoji/0_1552.png index 5644b3a3d2..8c5e167f66 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1552.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1552.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1553.png b/TMessagesProj/src/emojis/twitter/emoji/0_1553.png index c2beae017d..4008447b89 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1553.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1553.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1554.png b/TMessagesProj/src/emojis/twitter/emoji/0_1554.png index 991d8f79d7..a257e36f94 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1554.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1554.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1555.png b/TMessagesProj/src/emojis/twitter/emoji/0_1555.png index 3138b0adb7..9db2dc7a2e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1555.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1555.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1556.png b/TMessagesProj/src/emojis/twitter/emoji/0_1556.png index 33b9afe451..71c182aa76 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1556.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1556.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1557.png b/TMessagesProj/src/emojis/twitter/emoji/0_1557.png index 18716f03c6..13a8f01a2e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1557.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1557.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1558.png b/TMessagesProj/src/emojis/twitter/emoji/0_1558.png index 78a76a2c86..eec5af3311 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1558.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1558.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1559.png b/TMessagesProj/src/emojis/twitter/emoji/0_1559.png index a730e285bf..dc9f2c3260 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1559.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1559.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_156.png b/TMessagesProj/src/emojis/twitter/emoji/0_156.png index 5e303385dd..2e103013e5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_156.png and b/TMessagesProj/src/emojis/twitter/emoji/0_156.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1560.png b/TMessagesProj/src/emojis/twitter/emoji/0_1560.png index a9fa69065b..cc0074949a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1560.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1560.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1561.png b/TMessagesProj/src/emojis/twitter/emoji/0_1561.png index cff64d8373..4f4c15ab54 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1561.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1561.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1562.png b/TMessagesProj/src/emojis/twitter/emoji/0_1562.png index 07822828df..7d434ae4fa 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1562.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1562.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1563.png b/TMessagesProj/src/emojis/twitter/emoji/0_1563.png index d13a0d4a73..94c8740fe9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1563.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1563.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1564.png b/TMessagesProj/src/emojis/twitter/emoji/0_1564.png index 97d3cf2d56..ea2d9082c2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1564.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1564.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1565.png b/TMessagesProj/src/emojis/twitter/emoji/0_1565.png index 94623eb3b6..14f9b4f047 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1565.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1565.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1566.png b/TMessagesProj/src/emojis/twitter/emoji/0_1566.png index dba61f81e6..da9f2560dc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1566.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1566.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1567.png b/TMessagesProj/src/emojis/twitter/emoji/0_1567.png index b90ab583e8..c3f6a5e0b6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1567.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1567.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1568.png b/TMessagesProj/src/emojis/twitter/emoji/0_1568.png index 3aad6e325d..f485948891 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1568.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1568.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1569.png b/TMessagesProj/src/emojis/twitter/emoji/0_1569.png index 411b41e3f0..a9b82ada26 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1569.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1569.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_157.png b/TMessagesProj/src/emojis/twitter/emoji/0_157.png index c41ec09e4c..ae103a3607 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_157.png and b/TMessagesProj/src/emojis/twitter/emoji/0_157.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1570.png b/TMessagesProj/src/emojis/twitter/emoji/0_1570.png index 324c0070e5..de8dd1a00f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1570.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1570.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1571.png b/TMessagesProj/src/emojis/twitter/emoji/0_1571.png index 7f35246887..3957a3469a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1571.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1571.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1572.png b/TMessagesProj/src/emojis/twitter/emoji/0_1572.png index 0a4fde5e42..4e2cc7b73a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1572.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1572.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1573.png b/TMessagesProj/src/emojis/twitter/emoji/0_1573.png index 3611fe129e..008a9ded6f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1573.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1573.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1574.png b/TMessagesProj/src/emojis/twitter/emoji/0_1574.png index aabaaf11bb..bca24bc7d6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1574.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1574.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1575.png b/TMessagesProj/src/emojis/twitter/emoji/0_1575.png index 7234b8e71b..ea8d835838 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1575.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1575.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1576.png b/TMessagesProj/src/emojis/twitter/emoji/0_1576.png index 0c57bf2dc4..f88a6728b4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1576.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1576.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1577.png b/TMessagesProj/src/emojis/twitter/emoji/0_1577.png index 1f03b4dd98..a7ed38740a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1577.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1577.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1578.png b/TMessagesProj/src/emojis/twitter/emoji/0_1578.png index 5111971b25..cc20a4dce5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1578.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1578.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1579.png b/TMessagesProj/src/emojis/twitter/emoji/0_1579.png index ea8a14c70f..d65272df20 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1579.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1579.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_158.png b/TMessagesProj/src/emojis/twitter/emoji/0_158.png index 78d83af66d..ebb0bb4b6e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_158.png and b/TMessagesProj/src/emojis/twitter/emoji/0_158.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1580.png b/TMessagesProj/src/emojis/twitter/emoji/0_1580.png index ca9b8855f5..fc3a0b26db 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1580.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1580.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1581.png b/TMessagesProj/src/emojis/twitter/emoji/0_1581.png index 5423690f78..ae5f9a123f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1581.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1581.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1582.png b/TMessagesProj/src/emojis/twitter/emoji/0_1582.png index ac90a1bf63..d15bf8cd10 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1582.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1582.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1583.png b/TMessagesProj/src/emojis/twitter/emoji/0_1583.png index d49d57b78d..6c8374b670 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1583.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1583.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1584.png b/TMessagesProj/src/emojis/twitter/emoji/0_1584.png index 3d709a50c3..169a9c1ad9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1584.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1584.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1585.png b/TMessagesProj/src/emojis/twitter/emoji/0_1585.png index 4a1d0006b9..98ec87a0ed 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1585.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1585.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1586.png b/TMessagesProj/src/emojis/twitter/emoji/0_1586.png index 32d8d46b4c..095736dd9a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1586.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1586.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1587.png b/TMessagesProj/src/emojis/twitter/emoji/0_1587.png index dc629b3c47..1594155dc5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1587.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1587.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1588.png b/TMessagesProj/src/emojis/twitter/emoji/0_1588.png index aa1d8dc73e..8aaa0b2233 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1588.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1588.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1589.png b/TMessagesProj/src/emojis/twitter/emoji/0_1589.png index 3c194c5b3e..548123dc2d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1589.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1589.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_159.png b/TMessagesProj/src/emojis/twitter/emoji/0_159.png index 7da14896cd..39cc344cbe 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_159.png and b/TMessagesProj/src/emojis/twitter/emoji/0_159.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1590.png b/TMessagesProj/src/emojis/twitter/emoji/0_1590.png index f704516240..423fb21fec 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1590.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1590.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1591.png b/TMessagesProj/src/emojis/twitter/emoji/0_1591.png index ba098d018b..bf5d717f37 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1591.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1591.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1592.png b/TMessagesProj/src/emojis/twitter/emoji/0_1592.png index 331b8502a7..f81463ef47 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1592.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1592.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1593.png b/TMessagesProj/src/emojis/twitter/emoji/0_1593.png index dd0b857a48..1243af6abd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1593.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1593.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1594.png b/TMessagesProj/src/emojis/twitter/emoji/0_1594.png index 585f68050e..4bacdb72c6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1594.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1594.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1595.png b/TMessagesProj/src/emojis/twitter/emoji/0_1595.png index 0bf8fdf9c7..596a8692ab 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1595.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1595.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1596.png b/TMessagesProj/src/emojis/twitter/emoji/0_1596.png index 2cb8f3e6b4..acdca703db 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1596.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1596.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1597.png b/TMessagesProj/src/emojis/twitter/emoji/0_1597.png index c907ea6034..975102e78c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1597.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1597.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1598.png b/TMessagesProj/src/emojis/twitter/emoji/0_1598.png index d8042a7e9f..b8169f4df2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1598.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1598.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1599.png b/TMessagesProj/src/emojis/twitter/emoji/0_1599.png index 94d55e9a4b..1dbb043321 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1599.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1599.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_16.png b/TMessagesProj/src/emojis/twitter/emoji/0_16.png index e63edd2e48..faa629641c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_16.png and b/TMessagesProj/src/emojis/twitter/emoji/0_16.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_160.png b/TMessagesProj/src/emojis/twitter/emoji/0_160.png index cee83a6cd2..6ae1b49990 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_160.png and b/TMessagesProj/src/emojis/twitter/emoji/0_160.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1600.png b/TMessagesProj/src/emojis/twitter/emoji/0_1600.png index 5625b643a2..10e050f191 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1600.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1600.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1601.png b/TMessagesProj/src/emojis/twitter/emoji/0_1601.png index 431dfd141f..e4657a0e4a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1601.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1601.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1602.png b/TMessagesProj/src/emojis/twitter/emoji/0_1602.png index 2a11a4f113..183bdf9d05 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1602.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1602.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1603.png b/TMessagesProj/src/emojis/twitter/emoji/0_1603.png index aab3da4e8d..0c0c326818 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1603.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1603.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1604.png b/TMessagesProj/src/emojis/twitter/emoji/0_1604.png index 1d872242f3..c1b5af2359 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1604.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1604.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1605.png b/TMessagesProj/src/emojis/twitter/emoji/0_1605.png index f59c821f75..0fc363f2cb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1605.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1605.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1606.png b/TMessagesProj/src/emojis/twitter/emoji/0_1606.png index 9ab6424565..f537970579 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1606.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1606.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1607.png b/TMessagesProj/src/emojis/twitter/emoji/0_1607.png index 5906dbba8a..47e865fddb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1607.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1607.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1608.png b/TMessagesProj/src/emojis/twitter/emoji/0_1608.png index a7436a1d85..8008902fe5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1608.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1608.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1609.png b/TMessagesProj/src/emojis/twitter/emoji/0_1609.png index 2487d663c8..33da41f0a8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1609.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1609.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_161.png b/TMessagesProj/src/emojis/twitter/emoji/0_161.png index 9889b85759..51ea7bfdbf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_161.png and b/TMessagesProj/src/emojis/twitter/emoji/0_161.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1610.png b/TMessagesProj/src/emojis/twitter/emoji/0_1610.png index 2f4f6400b2..e6e90d34ec 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1610.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1610.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1611.png b/TMessagesProj/src/emojis/twitter/emoji/0_1611.png index a215dfbc33..685124e791 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1611.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1611.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1612.png b/TMessagesProj/src/emojis/twitter/emoji/0_1612.png index 88e392aaaa..b6419596ac 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1612.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1612.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1613.png b/TMessagesProj/src/emojis/twitter/emoji/0_1613.png index 3b842589a5..cb6e34993d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1613.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1613.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1614.png b/TMessagesProj/src/emojis/twitter/emoji/0_1614.png index 8a23299921..564613f8cc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1614.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1614.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1615.png b/TMessagesProj/src/emojis/twitter/emoji/0_1615.png index e47929b9f1..4e81e82b19 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1615.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1615.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1616.png b/TMessagesProj/src/emojis/twitter/emoji/0_1616.png index 1ab7b49528..eb8c31b6b6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1616.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1616.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1617.png b/TMessagesProj/src/emojis/twitter/emoji/0_1617.png index 7094e0cc18..155c7e365f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1617.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1617.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1618.png b/TMessagesProj/src/emojis/twitter/emoji/0_1618.png index 462619825f..0cc23fde50 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1618.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1618.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1619.png b/TMessagesProj/src/emojis/twitter/emoji/0_1619.png index 8d2d7e335b..d9c7c03152 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1619.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1619.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_162.png b/TMessagesProj/src/emojis/twitter/emoji/0_162.png index 9f2e3f209f..9fd8e7d73b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_162.png and b/TMessagesProj/src/emojis/twitter/emoji/0_162.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1620.png b/TMessagesProj/src/emojis/twitter/emoji/0_1620.png index b3ab098200..763af9c6de 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1620.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1620.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1621.png b/TMessagesProj/src/emojis/twitter/emoji/0_1621.png index 4bc300f7cd..49eaf62200 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1621.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1621.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1622.png b/TMessagesProj/src/emojis/twitter/emoji/0_1622.png index f42e72ea45..1935171c5a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1622.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1622.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1623.png b/TMessagesProj/src/emojis/twitter/emoji/0_1623.png index 0b2109f248..f323fdaa33 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1623.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1623.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1624.png b/TMessagesProj/src/emojis/twitter/emoji/0_1624.png index 8d88ecc9b0..3aaf500601 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1624.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1624.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1625.png b/TMessagesProj/src/emojis/twitter/emoji/0_1625.png index 0c566a794e..d7c0317b52 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1625.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1625.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1626.png b/TMessagesProj/src/emojis/twitter/emoji/0_1626.png index 5d3a85f605..301a90e543 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1626.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1626.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1627.png b/TMessagesProj/src/emojis/twitter/emoji/0_1627.png index 532face0c0..894ec1fe97 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1627.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1627.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1628.png b/TMessagesProj/src/emojis/twitter/emoji/0_1628.png index 74ffe9028c..d90d980abc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1628.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1628.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1629.png b/TMessagesProj/src/emojis/twitter/emoji/0_1629.png index 5881486953..036c981224 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1629.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1629.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_163.png b/TMessagesProj/src/emojis/twitter/emoji/0_163.png index 57a7f4e3f3..36653e7481 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_163.png and b/TMessagesProj/src/emojis/twitter/emoji/0_163.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1630.png b/TMessagesProj/src/emojis/twitter/emoji/0_1630.png index 898e4d26c9..bf8884f57e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1630.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1630.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1631.png b/TMessagesProj/src/emojis/twitter/emoji/0_1631.png index 73c4df5bfd..e4023fff8a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1631.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1631.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1632.png b/TMessagesProj/src/emojis/twitter/emoji/0_1632.png index 000fcd8eb1..7cd108478a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1632.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1632.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1633.png b/TMessagesProj/src/emojis/twitter/emoji/0_1633.png index ee76b091d2..042151c35f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1633.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1633.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1634.png b/TMessagesProj/src/emojis/twitter/emoji/0_1634.png index ba05f39991..12a7150ff4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1634.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1634.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1635.png b/TMessagesProj/src/emojis/twitter/emoji/0_1635.png index 3656922a56..5248657b2a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1635.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1635.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1636.png b/TMessagesProj/src/emojis/twitter/emoji/0_1636.png index 802446407b..b57198ffa3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1636.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1636.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1637.png b/TMessagesProj/src/emojis/twitter/emoji/0_1637.png index ee86472482..045cd21a62 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1637.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1637.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1638.png b/TMessagesProj/src/emojis/twitter/emoji/0_1638.png index dcd8c64c92..8a4bb211b6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1638.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1638.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1639.png b/TMessagesProj/src/emojis/twitter/emoji/0_1639.png index ccd996caef..80b046ddd1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1639.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1639.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_164.png b/TMessagesProj/src/emojis/twitter/emoji/0_164.png index de6aa76742..90c2599d22 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_164.png and b/TMessagesProj/src/emojis/twitter/emoji/0_164.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1640.png b/TMessagesProj/src/emojis/twitter/emoji/0_1640.png index d7f193617b..bcd3634881 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1640.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1640.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1641.png b/TMessagesProj/src/emojis/twitter/emoji/0_1641.png index 658314a04d..c54b53365f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1641.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1641.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1642.png b/TMessagesProj/src/emojis/twitter/emoji/0_1642.png index ec11df264a..090bf2b970 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1642.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1642.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1643.png b/TMessagesProj/src/emojis/twitter/emoji/0_1643.png index a92f517cf0..bac4ac059e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1643.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1643.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1644.png b/TMessagesProj/src/emojis/twitter/emoji/0_1644.png index 8c86cef9ba..ce23fb10db 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1644.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1644.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1645.png b/TMessagesProj/src/emojis/twitter/emoji/0_1645.png index 9255aa567c..cc8a937dbd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1645.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1645.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1646.png b/TMessagesProj/src/emojis/twitter/emoji/0_1646.png index fab511ce7d..8f68be62ad 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1646.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1646.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1647.png b/TMessagesProj/src/emojis/twitter/emoji/0_1647.png index 36293b1643..168f3006e2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1647.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1647.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1648.png b/TMessagesProj/src/emojis/twitter/emoji/0_1648.png index 150b05d993..71a2c72b6b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1648.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1648.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1649.png b/TMessagesProj/src/emojis/twitter/emoji/0_1649.png index 8c1a0b6d73..76594d8988 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1649.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1649.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_165.png b/TMessagesProj/src/emojis/twitter/emoji/0_165.png index 5c12da251d..44909fdbeb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_165.png and b/TMessagesProj/src/emojis/twitter/emoji/0_165.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1650.png b/TMessagesProj/src/emojis/twitter/emoji/0_1650.png index 04d55477d5..5e33ba6139 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1650.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1650.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1651.png b/TMessagesProj/src/emojis/twitter/emoji/0_1651.png index 2cae692cc8..101b84bb3e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1651.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1651.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1652.png b/TMessagesProj/src/emojis/twitter/emoji/0_1652.png index d95f8317f9..d395065604 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1652.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1652.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1653.png b/TMessagesProj/src/emojis/twitter/emoji/0_1653.png index 1f930e7217..62abe1548b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1653.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1653.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1654.png b/TMessagesProj/src/emojis/twitter/emoji/0_1654.png index abfe7201b1..35caa7fa2a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1654.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1654.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1655.png b/TMessagesProj/src/emojis/twitter/emoji/0_1655.png index e473e74264..c8fb87b331 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1655.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1655.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1656.png b/TMessagesProj/src/emojis/twitter/emoji/0_1656.png index 0a55b77e43..565c5973cf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1656.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1656.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1657.png b/TMessagesProj/src/emojis/twitter/emoji/0_1657.png index b93f0f5365..56a64f05fe 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1657.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1657.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1658.png b/TMessagesProj/src/emojis/twitter/emoji/0_1658.png index d6aaa32fa3..e2d710cf15 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1658.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1658.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1659.png b/TMessagesProj/src/emojis/twitter/emoji/0_1659.png index 0969fd4745..f7c3e24369 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1659.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1659.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_166.png b/TMessagesProj/src/emojis/twitter/emoji/0_166.png index 8b0243ff5b..a4b2caecec 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_166.png and b/TMessagesProj/src/emojis/twitter/emoji/0_166.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1660.png b/TMessagesProj/src/emojis/twitter/emoji/0_1660.png index 61d05bdce3..0ae867be51 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1660.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1660.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1661.png b/TMessagesProj/src/emojis/twitter/emoji/0_1661.png index 48e624f168..6045437459 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1661.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1661.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1662.png b/TMessagesProj/src/emojis/twitter/emoji/0_1662.png index ac6e34b708..1ac0e0c051 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1662.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1662.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1663.png b/TMessagesProj/src/emojis/twitter/emoji/0_1663.png index 44a003675f..6a444d3c78 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1663.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1663.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1664.png b/TMessagesProj/src/emojis/twitter/emoji/0_1664.png index 81268e3e02..7efb6664be 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1664.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1664.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1665.png b/TMessagesProj/src/emojis/twitter/emoji/0_1665.png index 4f93c2cd5c..0e37085031 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1665.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1665.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1666.png b/TMessagesProj/src/emojis/twitter/emoji/0_1666.png index e1fa042795..ffcfc07ecc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1666.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1666.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1667.png b/TMessagesProj/src/emojis/twitter/emoji/0_1667.png index 631282f6a3..591b981818 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1667.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1667.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1668.png b/TMessagesProj/src/emojis/twitter/emoji/0_1668.png index 39b47c9cc9..154b66e2a2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1668.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1668.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1669.png b/TMessagesProj/src/emojis/twitter/emoji/0_1669.png index f86638088c..4779904eb5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1669.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1669.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_167.png b/TMessagesProj/src/emojis/twitter/emoji/0_167.png index 03d86bd304..4c43956d5b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_167.png and b/TMessagesProj/src/emojis/twitter/emoji/0_167.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1670.png b/TMessagesProj/src/emojis/twitter/emoji/0_1670.png index 184ba6066e..47e6e3a74d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1670.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1670.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1671.png b/TMessagesProj/src/emojis/twitter/emoji/0_1671.png index 16ec46de77..da7c19aabe 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1671.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1671.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1672.png b/TMessagesProj/src/emojis/twitter/emoji/0_1672.png index 263b6614e8..1ec8e181ea 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1672.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1672.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1673.png b/TMessagesProj/src/emojis/twitter/emoji/0_1673.png index e7ad7cab14..9841f1b8a1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1673.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1673.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1674.png b/TMessagesProj/src/emojis/twitter/emoji/0_1674.png index ea0c745d7d..7f58c095f4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1674.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1674.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1675.png b/TMessagesProj/src/emojis/twitter/emoji/0_1675.png index 63c98da369..0c8f79d828 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1675.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1675.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1676.png b/TMessagesProj/src/emojis/twitter/emoji/0_1676.png index 5bd984ab20..5fd7beaecb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1676.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1676.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1677.png b/TMessagesProj/src/emojis/twitter/emoji/0_1677.png index 00c431e6bb..9f900e9059 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1677.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1677.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1678.png b/TMessagesProj/src/emojis/twitter/emoji/0_1678.png index 3835447aa1..9bd1783092 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1678.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1678.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1679.png b/TMessagesProj/src/emojis/twitter/emoji/0_1679.png index f461e343bd..77427f0a4a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1679.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1679.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_168.png b/TMessagesProj/src/emojis/twitter/emoji/0_168.png index e7bd38b9d5..43ca7ee529 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_168.png and b/TMessagesProj/src/emojis/twitter/emoji/0_168.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1680.png b/TMessagesProj/src/emojis/twitter/emoji/0_1680.png index 12287e5b02..d34435a3ed 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1680.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1680.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1681.png b/TMessagesProj/src/emojis/twitter/emoji/0_1681.png index 10b944b2e6..6a884860ea 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1681.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1681.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1682.png b/TMessagesProj/src/emojis/twitter/emoji/0_1682.png index fc70971126..acbcb17c2e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1682.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1682.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1683.png b/TMessagesProj/src/emojis/twitter/emoji/0_1683.png index ad62a07362..cfcfaa9e66 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1683.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1683.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1684.png b/TMessagesProj/src/emojis/twitter/emoji/0_1684.png index 2aa6e2dd27..6ff9915f86 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1684.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1684.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1685.png b/TMessagesProj/src/emojis/twitter/emoji/0_1685.png index e35bb00775..ae33cd8ae7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1685.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1685.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1686.png b/TMessagesProj/src/emojis/twitter/emoji/0_1686.png index dfff846e67..b15b889e53 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1686.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1686.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1687.png b/TMessagesProj/src/emojis/twitter/emoji/0_1687.png index 281b8219f6..932b9cdc54 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1687.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1687.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1688.png b/TMessagesProj/src/emojis/twitter/emoji/0_1688.png index fc2993630e..1805e481d2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1688.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1688.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1689.png b/TMessagesProj/src/emojis/twitter/emoji/0_1689.png index b53d7d6550..fc8961f16a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1689.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1689.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_169.png b/TMessagesProj/src/emojis/twitter/emoji/0_169.png index d46c659aff..19b693364d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_169.png and b/TMessagesProj/src/emojis/twitter/emoji/0_169.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1690.png b/TMessagesProj/src/emojis/twitter/emoji/0_1690.png index 953ce97e55..80130270db 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1690.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1690.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1691.png b/TMessagesProj/src/emojis/twitter/emoji/0_1691.png index c4cec71c3f..86d84649a7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1691.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1691.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1692.png b/TMessagesProj/src/emojis/twitter/emoji/0_1692.png index 5982d3fc97..7b115f1354 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1692.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1692.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1693.png b/TMessagesProj/src/emojis/twitter/emoji/0_1693.png index 5fc615f32a..fb99b775f0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1693.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1693.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1694.png b/TMessagesProj/src/emojis/twitter/emoji/0_1694.png index 3415bdbf72..4a0e0e76d7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1694.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1694.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1695.png b/TMessagesProj/src/emojis/twitter/emoji/0_1695.png index 5be8491b3e..ac2820ac93 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1695.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1695.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1696.png b/TMessagesProj/src/emojis/twitter/emoji/0_1696.png index aff6117b9d..78c0a3ec29 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1696.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1696.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1697.png b/TMessagesProj/src/emojis/twitter/emoji/0_1697.png index 2ba545e3eb..00e4c6d8e3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1697.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1697.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1698.png b/TMessagesProj/src/emojis/twitter/emoji/0_1698.png index 251c5c3c46..a6f606010f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1698.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1698.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1699.png b/TMessagesProj/src/emojis/twitter/emoji/0_1699.png index 57553cf77d..9f684e81eb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1699.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1699.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_17.png b/TMessagesProj/src/emojis/twitter/emoji/0_17.png index d3eebd3b7d..34431c81c9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_17.png and b/TMessagesProj/src/emojis/twitter/emoji/0_17.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_170.png b/TMessagesProj/src/emojis/twitter/emoji/0_170.png index 4ccfca10c0..a76d7078e3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_170.png and b/TMessagesProj/src/emojis/twitter/emoji/0_170.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1700.png b/TMessagesProj/src/emojis/twitter/emoji/0_1700.png index b4eb8ad7a5..5842d5a5ce 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1700.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1700.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1701.png b/TMessagesProj/src/emojis/twitter/emoji/0_1701.png index 09bcbcefb7..a196845b15 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1701.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1701.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1702.png b/TMessagesProj/src/emojis/twitter/emoji/0_1702.png index e68419fcf4..cf17c9e9d4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1702.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1702.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1703.png b/TMessagesProj/src/emojis/twitter/emoji/0_1703.png index 4d096a8dfe..0d586c2db6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1703.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1703.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1704.png b/TMessagesProj/src/emojis/twitter/emoji/0_1704.png index de81608146..d8c314a257 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1704.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1704.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1705.png b/TMessagesProj/src/emojis/twitter/emoji/0_1705.png index bead568384..ef3df5222b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1705.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1705.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1706.png b/TMessagesProj/src/emojis/twitter/emoji/0_1706.png index 2380460da6..0f71194687 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1706.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1706.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1707.png b/TMessagesProj/src/emojis/twitter/emoji/0_1707.png index c26cdacdbd..558fc10607 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1707.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1707.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1708.png b/TMessagesProj/src/emojis/twitter/emoji/0_1708.png index 3738859afd..c8bc3731ec 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1708.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1708.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1709.png b/TMessagesProj/src/emojis/twitter/emoji/0_1709.png index 53791284db..8ad77e22ae 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1709.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1709.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_171.png b/TMessagesProj/src/emojis/twitter/emoji/0_171.png index f76b91a951..444b72909c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_171.png and b/TMessagesProj/src/emojis/twitter/emoji/0_171.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1710.png b/TMessagesProj/src/emojis/twitter/emoji/0_1710.png index 14f7ca31af..cff0f190dc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1710.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1710.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1711.png b/TMessagesProj/src/emojis/twitter/emoji/0_1711.png index 459b635651..554c03d33f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1711.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1711.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1712.png b/TMessagesProj/src/emojis/twitter/emoji/0_1712.png index 0ce17b130d..7f8ff6e666 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1712.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1712.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1713.png b/TMessagesProj/src/emojis/twitter/emoji/0_1713.png index e23195ed04..c32dc39064 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1713.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1713.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1714.png b/TMessagesProj/src/emojis/twitter/emoji/0_1714.png index c64aff0985..7d1a556365 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1714.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1714.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1715.png b/TMessagesProj/src/emojis/twitter/emoji/0_1715.png index b4cd24ae71..21207f8b30 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1715.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1715.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1716.png b/TMessagesProj/src/emojis/twitter/emoji/0_1716.png index 9c3e80761b..71fd1bb9cf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1716.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1716.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1717.png b/TMessagesProj/src/emojis/twitter/emoji/0_1717.png index 72c77facfc..a2c90f3d2d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1717.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1717.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1718.png b/TMessagesProj/src/emojis/twitter/emoji/0_1718.png index 38c80b9ee8..4c2e4c8b00 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1718.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1718.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1719.png b/TMessagesProj/src/emojis/twitter/emoji/0_1719.png index 2bd9600963..8235389a8f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1719.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1719.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_172.png b/TMessagesProj/src/emojis/twitter/emoji/0_172.png index 4345a8ee85..dcfd1ab771 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_172.png and b/TMessagesProj/src/emojis/twitter/emoji/0_172.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1720.png b/TMessagesProj/src/emojis/twitter/emoji/0_1720.png index 665093261d..8b3e000d43 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1720.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1720.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1721.png b/TMessagesProj/src/emojis/twitter/emoji/0_1721.png index 61a87ee80c..891c0cfcc7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1721.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1721.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1722.png b/TMessagesProj/src/emojis/twitter/emoji/0_1722.png index 8146aed9e2..7eea579249 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1722.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1722.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1723.png b/TMessagesProj/src/emojis/twitter/emoji/0_1723.png index 800d9b1c2e..9455e2e71d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1723.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1723.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1724.png b/TMessagesProj/src/emojis/twitter/emoji/0_1724.png index 02d3713aa7..937b59cb1d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1724.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1724.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1725.png b/TMessagesProj/src/emojis/twitter/emoji/0_1725.png index 020d8f2d9b..0a59d6d66b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1725.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1725.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1726.png b/TMessagesProj/src/emojis/twitter/emoji/0_1726.png index 270b5cd2b1..9d0653a59a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1726.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1726.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1727.png b/TMessagesProj/src/emojis/twitter/emoji/0_1727.png index f874e286a2..3744bf157c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1727.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1727.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1728.png b/TMessagesProj/src/emojis/twitter/emoji/0_1728.png index 17a887e57d..533a878cb1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1728.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1728.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1729.png b/TMessagesProj/src/emojis/twitter/emoji/0_1729.png index 75e6085adf..968f2eb8ab 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1729.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1729.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_173.png b/TMessagesProj/src/emojis/twitter/emoji/0_173.png index 0721007502..4f40e5f218 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_173.png and b/TMessagesProj/src/emojis/twitter/emoji/0_173.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1730.png b/TMessagesProj/src/emojis/twitter/emoji/0_1730.png index 390d15b681..93c39da185 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1730.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1730.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1731.png b/TMessagesProj/src/emojis/twitter/emoji/0_1731.png index a118929804..df17fc7892 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1731.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1731.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1732.png b/TMessagesProj/src/emojis/twitter/emoji/0_1732.png index b99a1b079b..2f0fde55c6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1732.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1732.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1733.png b/TMessagesProj/src/emojis/twitter/emoji/0_1733.png index c9e3f97c97..ba5c3e5e14 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1733.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1733.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1734.png b/TMessagesProj/src/emojis/twitter/emoji/0_1734.png index 317101dfcb..378d8b249c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1734.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1734.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1735.png b/TMessagesProj/src/emojis/twitter/emoji/0_1735.png index 5ac7e1e220..74599b9db1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1735.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1735.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1736.png b/TMessagesProj/src/emojis/twitter/emoji/0_1736.png index 69a4d4f71c..f82992f24d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1736.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1736.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1737.png b/TMessagesProj/src/emojis/twitter/emoji/0_1737.png index 2c666d6ff3..03df4c1415 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1737.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1737.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1738.png b/TMessagesProj/src/emojis/twitter/emoji/0_1738.png index 6e117badbd..e0ec3f0628 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1738.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1738.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1739.png b/TMessagesProj/src/emojis/twitter/emoji/0_1739.png index 10416651e3..1845ae81bb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1739.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1739.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_174.png b/TMessagesProj/src/emojis/twitter/emoji/0_174.png index c835e480db..b116c3421a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_174.png and b/TMessagesProj/src/emojis/twitter/emoji/0_174.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1740.png b/TMessagesProj/src/emojis/twitter/emoji/0_1740.png index f179ceb668..3200f7284d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1740.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1740.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1741.png b/TMessagesProj/src/emojis/twitter/emoji/0_1741.png index 25c8a19777..1e4d1e894c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1741.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1741.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1742.png b/TMessagesProj/src/emojis/twitter/emoji/0_1742.png index c045c2b223..3d86bdacb8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1742.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1742.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1743.png b/TMessagesProj/src/emojis/twitter/emoji/0_1743.png index 90a827b444..36e87d4429 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1743.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1743.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1744.png b/TMessagesProj/src/emojis/twitter/emoji/0_1744.png index 18f2734f32..67a9ef053a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1744.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1744.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1745.png b/TMessagesProj/src/emojis/twitter/emoji/0_1745.png index 588c61300e..ede49b79b2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1745.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1745.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1746.png b/TMessagesProj/src/emojis/twitter/emoji/0_1746.png index 0c6dcd7e8f..831c7f68b5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1746.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1746.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1747.png b/TMessagesProj/src/emojis/twitter/emoji/0_1747.png index 5e965244b0..cfc5c4394b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1747.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1747.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1748.png b/TMessagesProj/src/emojis/twitter/emoji/0_1748.png index 271361d585..c646e7b9ee 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1748.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1748.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1749.png b/TMessagesProj/src/emojis/twitter/emoji/0_1749.png index 58b8d31d23..a893591efc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1749.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1749.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_175.png b/TMessagesProj/src/emojis/twitter/emoji/0_175.png index a80e60619e..d9b9d06639 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_175.png and b/TMessagesProj/src/emojis/twitter/emoji/0_175.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1750.png b/TMessagesProj/src/emojis/twitter/emoji/0_1750.png index 559897c9da..8aeef17558 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1750.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1750.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1751.png b/TMessagesProj/src/emojis/twitter/emoji/0_1751.png index 7e061a4033..071ca9a337 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1751.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1751.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1752.png b/TMessagesProj/src/emojis/twitter/emoji/0_1752.png index 6592538029..a6ee7d5dae 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1752.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1752.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1753.png b/TMessagesProj/src/emojis/twitter/emoji/0_1753.png index afa7bd0ec4..90c3fc34c4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1753.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1753.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1754.png b/TMessagesProj/src/emojis/twitter/emoji/0_1754.png index 66f63a9b37..6e124f2999 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1754.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1754.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1755.png b/TMessagesProj/src/emojis/twitter/emoji/0_1755.png index b3fa0f7b20..15442dddc6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1755.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1755.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1756.png b/TMessagesProj/src/emojis/twitter/emoji/0_1756.png index 1f2d1e76b5..e6a49c8b85 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1756.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1756.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1757.png b/TMessagesProj/src/emojis/twitter/emoji/0_1757.png index 966297983f..b8b161ceb6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1757.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1757.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1758.png b/TMessagesProj/src/emojis/twitter/emoji/0_1758.png index 7af116fec7..80c5e9713f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1758.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1758.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1759.png b/TMessagesProj/src/emojis/twitter/emoji/0_1759.png index 3bc8e5705c..7b4dde9fb0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1759.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1759.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_176.png b/TMessagesProj/src/emojis/twitter/emoji/0_176.png index e6e4c98c95..3ed7c5cd3d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_176.png and b/TMessagesProj/src/emojis/twitter/emoji/0_176.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1760.png b/TMessagesProj/src/emojis/twitter/emoji/0_1760.png index d6d9176f4f..124318e3ae 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1760.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1760.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1761.png b/TMessagesProj/src/emojis/twitter/emoji/0_1761.png index 010456f726..570211fe90 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1761.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1761.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1762.png b/TMessagesProj/src/emojis/twitter/emoji/0_1762.png index 7a0ed56c46..570211fe90 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1762.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1762.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1763.png b/TMessagesProj/src/emojis/twitter/emoji/0_1763.png index e12a664165..509ec49c57 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1763.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1763.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1764.png b/TMessagesProj/src/emojis/twitter/emoji/0_1764.png index 56be647c44..fdc5ea721b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1764.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1764.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1765.png b/TMessagesProj/src/emojis/twitter/emoji/0_1765.png index c388d22221..be749c9601 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1765.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1765.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1766.png b/TMessagesProj/src/emojis/twitter/emoji/0_1766.png index 88d7689c35..320be3bfd4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1766.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1766.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1767.png b/TMessagesProj/src/emojis/twitter/emoji/0_1767.png index baba51effb..320be3bfd4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1767.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1767.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1768.png b/TMessagesProj/src/emojis/twitter/emoji/0_1768.png index dfdedc4cf0..e9ce964cc5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1768.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1768.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1769.png b/TMessagesProj/src/emojis/twitter/emoji/0_1769.png index b45b14fda5..95dcefb06f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1769.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1769.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_177.png b/TMessagesProj/src/emojis/twitter/emoji/0_177.png index 30e4b93872..8257077295 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_177.png and b/TMessagesProj/src/emojis/twitter/emoji/0_177.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1770.png b/TMessagesProj/src/emojis/twitter/emoji/0_1770.png index d48d756ed9..8ceba72ac0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1770.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1770.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1771.png b/TMessagesProj/src/emojis/twitter/emoji/0_1771.png index d0e923c172..cfbc00ef54 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1771.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1771.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1772.png b/TMessagesProj/src/emojis/twitter/emoji/0_1772.png index 64a972fc7d..f60be25c99 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1772.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1772.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1773.png b/TMessagesProj/src/emojis/twitter/emoji/0_1773.png index 6feaae923d..ad6a8fb501 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1773.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1773.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1774.png b/TMessagesProj/src/emojis/twitter/emoji/0_1774.png index 3fdce06e49..ad6a8fb501 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1774.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1774.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1775.png b/TMessagesProj/src/emojis/twitter/emoji/0_1775.png index 2a741c1fc9..b6a7ecbc05 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1775.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1775.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1776.png b/TMessagesProj/src/emojis/twitter/emoji/0_1776.png index 1dd4c47223..39f8133fa3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1776.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1776.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1777.png b/TMessagesProj/src/emojis/twitter/emoji/0_1777.png index 14ef2d73c2..31b29c4f29 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1777.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1777.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1778.png b/TMessagesProj/src/emojis/twitter/emoji/0_1778.png index f52f070852..8c0ba3e691 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1778.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1778.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1779.png b/TMessagesProj/src/emojis/twitter/emoji/0_1779.png index 1e72059159..44b745c67c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1779.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1779.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_178.png b/TMessagesProj/src/emojis/twitter/emoji/0_178.png index d1f7c8d089..106a24a939 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_178.png and b/TMessagesProj/src/emojis/twitter/emoji/0_178.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1780.png b/TMessagesProj/src/emojis/twitter/emoji/0_1780.png index aa91d91a4f..44b745c67c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1780.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1780.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1781.png b/TMessagesProj/src/emojis/twitter/emoji/0_1781.png index 331377c356..5a6a74f195 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1781.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1781.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1782.png b/TMessagesProj/src/emojis/twitter/emoji/0_1782.png index 926e35c576..851d8d1f6a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1782.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1782.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1783.png b/TMessagesProj/src/emojis/twitter/emoji/0_1783.png index 6bf37e20b0..e93ec808a4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1783.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1783.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1784.png b/TMessagesProj/src/emojis/twitter/emoji/0_1784.png index 499255bd0d..3087b82b1d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1784.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1784.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1785.png b/TMessagesProj/src/emojis/twitter/emoji/0_1785.png index c941c77fc2..3087b82b1d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1785.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1785.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1786.png b/TMessagesProj/src/emojis/twitter/emoji/0_1786.png index 912d395a47..0266022048 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1786.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1786.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1787.png b/TMessagesProj/src/emojis/twitter/emoji/0_1787.png index 6d96b2ed5d..a464c860c5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1787.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1787.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1788.png b/TMessagesProj/src/emojis/twitter/emoji/0_1788.png index 42bf86ecd6..d4be9e980a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1788.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1788.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1789.png b/TMessagesProj/src/emojis/twitter/emoji/0_1789.png index 42e806870c..743208eac7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1789.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1789.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_179.png b/TMessagesProj/src/emojis/twitter/emoji/0_179.png index 073c9421c0..a42dcf14cd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_179.png and b/TMessagesProj/src/emojis/twitter/emoji/0_179.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1790.png b/TMessagesProj/src/emojis/twitter/emoji/0_1790.png index 61d1582970..b412561eee 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1790.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1790.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1791.png b/TMessagesProj/src/emojis/twitter/emoji/0_1791.png index d9dbdd58b7..bbf0fcdc58 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1791.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1791.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1792.png b/TMessagesProj/src/emojis/twitter/emoji/0_1792.png index cd7335d9bb..ef1d1ad6c7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1792.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1792.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1793.png b/TMessagesProj/src/emojis/twitter/emoji/0_1793.png index 23828323ce..41ff037618 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1793.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1793.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1794.png b/TMessagesProj/src/emojis/twitter/emoji/0_1794.png index 8dee909655..273b60fa0b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1794.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1794.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1795.png b/TMessagesProj/src/emojis/twitter/emoji/0_1795.png index fbdac80893..e8c9a8b70e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1795.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1795.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1796.png b/TMessagesProj/src/emojis/twitter/emoji/0_1796.png index 1098277e4b..0c5dbacafb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1796.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1796.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1797.png b/TMessagesProj/src/emojis/twitter/emoji/0_1797.png index c534833fa1..79b0dbc1f0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1797.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1797.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1798.png b/TMessagesProj/src/emojis/twitter/emoji/0_1798.png index 83cdbda351..4d1c396373 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1798.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1798.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1799.png b/TMessagesProj/src/emojis/twitter/emoji/0_1799.png index d98579d94a..4c4baef1d7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1799.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1799.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_18.png b/TMessagesProj/src/emojis/twitter/emoji/0_18.png index a227ff9728..59356916cb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_18.png and b/TMessagesProj/src/emojis/twitter/emoji/0_18.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_180.png b/TMessagesProj/src/emojis/twitter/emoji/0_180.png index dcf6061547..bb85c716f9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_180.png and b/TMessagesProj/src/emojis/twitter/emoji/0_180.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1800.png b/TMessagesProj/src/emojis/twitter/emoji/0_1800.png index eb3178fa8e..750c3bd382 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1800.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1800.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1801.png b/TMessagesProj/src/emojis/twitter/emoji/0_1801.png index 42c467fcf4..fa1c74b11e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1801.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1801.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1802.png b/TMessagesProj/src/emojis/twitter/emoji/0_1802.png index 97ee4eeebb..630688e26f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1802.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1802.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1803.png b/TMessagesProj/src/emojis/twitter/emoji/0_1803.png index 0897d2cf13..01f043e730 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1803.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1803.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1804.png b/TMessagesProj/src/emojis/twitter/emoji/0_1804.png index cc46554eaa..cd26e4bcde 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1804.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1804.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1805.png b/TMessagesProj/src/emojis/twitter/emoji/0_1805.png index c4ec733dc4..f2fd4e9ede 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1805.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1805.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1806.png b/TMessagesProj/src/emojis/twitter/emoji/0_1806.png index 0ba2c9aaee..e270115de4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1806.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1806.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1807.png b/TMessagesProj/src/emojis/twitter/emoji/0_1807.png index 9b0f390ebb..821df7708c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1807.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1807.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1808.png b/TMessagesProj/src/emojis/twitter/emoji/0_1808.png index 8d1d0ec739..0b344ead89 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1808.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1808.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1809.png b/TMessagesProj/src/emojis/twitter/emoji/0_1809.png index 55380328a2..9540f8b957 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1809.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1809.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_181.png b/TMessagesProj/src/emojis/twitter/emoji/0_181.png index 4e9125032e..1d28de1ab7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_181.png and b/TMessagesProj/src/emojis/twitter/emoji/0_181.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1810.png b/TMessagesProj/src/emojis/twitter/emoji/0_1810.png index 221f1c5ef5..83d81f85a1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1810.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1810.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1811.png b/TMessagesProj/src/emojis/twitter/emoji/0_1811.png index 31668c872a..cb0f23d66c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1811.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1811.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1812.png b/TMessagesProj/src/emojis/twitter/emoji/0_1812.png index 33b5569342..e34f06a971 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1812.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1812.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1813.png b/TMessagesProj/src/emojis/twitter/emoji/0_1813.png index b78079ec8a..94d16ef86a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1813.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1813.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1814.png b/TMessagesProj/src/emojis/twitter/emoji/0_1814.png index e30361ea59..ac306eb1c8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1814.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1814.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1815.png b/TMessagesProj/src/emojis/twitter/emoji/0_1815.png index 7623a0edc6..7ae449edb1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1815.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1815.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1816.png b/TMessagesProj/src/emojis/twitter/emoji/0_1816.png index 231a54f318..e783655628 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1816.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1816.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1817.png b/TMessagesProj/src/emojis/twitter/emoji/0_1817.png index 271361d585..dd8fc827d9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1817.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1817.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1818.png b/TMessagesProj/src/emojis/twitter/emoji/0_1818.png index d1b968d0b9..54b615251a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1818.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1818.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1819.png b/TMessagesProj/src/emojis/twitter/emoji/0_1819.png index 2933306b5f..de5cc9ad80 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1819.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1819.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_182.png b/TMessagesProj/src/emojis/twitter/emoji/0_182.png index 149a59854e..b3cd38b6c3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_182.png and b/TMessagesProj/src/emojis/twitter/emoji/0_182.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1820.png b/TMessagesProj/src/emojis/twitter/emoji/0_1820.png index 664cb3ffe8..1ef1d51547 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1820.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1820.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1821.png b/TMessagesProj/src/emojis/twitter/emoji/0_1821.png index a3db94ccc9..b338a946ba 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1821.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1821.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1822.png b/TMessagesProj/src/emojis/twitter/emoji/0_1822.png index 1cf04db05c..d015060b03 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1822.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1822.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1823.png b/TMessagesProj/src/emojis/twitter/emoji/0_1823.png index 66f63a9b37..9eeea8d873 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1823.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1823.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1824.png b/TMessagesProj/src/emojis/twitter/emoji/0_1824.png index e918f659fa..8a936f7881 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1824.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1824.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1825.png b/TMessagesProj/src/emojis/twitter/emoji/0_1825.png index 797c78d599..0a53229c85 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1825.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1825.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1826.png b/TMessagesProj/src/emojis/twitter/emoji/0_1826.png index 92f7afbdcb..11a5e1c940 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1826.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1826.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1827.png b/TMessagesProj/src/emojis/twitter/emoji/0_1827.png index 708d80a112..b79797b741 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1827.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1827.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1828.png b/TMessagesProj/src/emojis/twitter/emoji/0_1828.png index 4225c489e2..1149286f52 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1828.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1828.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1829.png b/TMessagesProj/src/emojis/twitter/emoji/0_1829.png index d6d9176f4f..2aaad483f6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1829.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1829.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_183.png b/TMessagesProj/src/emojis/twitter/emoji/0_183.png index fb06f548c5..bd290f00aa 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_183.png and b/TMessagesProj/src/emojis/twitter/emoji/0_183.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1830.png b/TMessagesProj/src/emojis/twitter/emoji/0_1830.png index e55a23042b..bb1b2a515c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1830.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1830.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1831.png b/TMessagesProj/src/emojis/twitter/emoji/0_1831.png index 20852a77e4..d7f4840f60 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1831.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1831.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1832.png b/TMessagesProj/src/emojis/twitter/emoji/0_1832.png index 9c1afd9c8f..98db53e22a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1832.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1832.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1833.png b/TMessagesProj/src/emojis/twitter/emoji/0_1833.png index a194d867ac..f900d9c2ba 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1833.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1833.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1834.png b/TMessagesProj/src/emojis/twitter/emoji/0_1834.png index 1717422787..8e9076c7b0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1834.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1834.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1835.png b/TMessagesProj/src/emojis/twitter/emoji/0_1835.png index 88d7689c35..5ae9d09cc2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1835.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1835.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1836.png b/TMessagesProj/src/emojis/twitter/emoji/0_1836.png index 184d9c759f..b6debf89ea 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1836.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1836.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1837.png b/TMessagesProj/src/emojis/twitter/emoji/0_1837.png index 61014b1089..1fce75b50e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1837.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1837.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1838.png b/TMessagesProj/src/emojis/twitter/emoji/0_1838.png index 2d863f71cf..48fb2f556a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1838.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1838.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1839.png b/TMessagesProj/src/emojis/twitter/emoji/0_1839.png index bdfd408dc4..07be400150 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1839.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1839.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_184.png b/TMessagesProj/src/emojis/twitter/emoji/0_184.png index 7f780b7682..312cba7f08 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_184.png and b/TMessagesProj/src/emojis/twitter/emoji/0_184.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1840.png b/TMessagesProj/src/emojis/twitter/emoji/0_1840.png index 33df9019e4..224076f586 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1840.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1840.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1841.png b/TMessagesProj/src/emojis/twitter/emoji/0_1841.png index 64a972fc7d..7cff9cd262 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1841.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1841.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1842.png b/TMessagesProj/src/emojis/twitter/emoji/0_1842.png index 5ca323798c..15244be133 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1842.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1842.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1843.png b/TMessagesProj/src/emojis/twitter/emoji/0_1843.png index f051c0d58d..e63acd94f7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1843.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1843.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1844.png b/TMessagesProj/src/emojis/twitter/emoji/0_1844.png index eaf06e6c61..bb36c217a7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1844.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1844.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1845.png b/TMessagesProj/src/emojis/twitter/emoji/0_1845.png index 172489afe4..df229e9c80 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1845.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1845.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1846.png b/TMessagesProj/src/emojis/twitter/emoji/0_1846.png index 00380a522c..8e7107737e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1846.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1846.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1847.png b/TMessagesProj/src/emojis/twitter/emoji/0_1847.png index f4d3efc537..235e62a1f0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1847.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1847.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1848.png b/TMessagesProj/src/emojis/twitter/emoji/0_1848.png index 807f854d24..701c8bd1eb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1848.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1848.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1849.png b/TMessagesProj/src/emojis/twitter/emoji/0_1849.png index 6be891ecb8..7c96e3fbe1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1849.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1849.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_185.png b/TMessagesProj/src/emojis/twitter/emoji/0_185.png index b80f7c0977..e241385097 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_185.png and b/TMessagesProj/src/emojis/twitter/emoji/0_185.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1850.png b/TMessagesProj/src/emojis/twitter/emoji/0_1850.png index c807e8df4b..0560a25e06 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1850.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1850.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1851.png b/TMessagesProj/src/emojis/twitter/emoji/0_1851.png index 063ad01d4e..f2a4cdfc84 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1851.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1851.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1852.png b/TMessagesProj/src/emojis/twitter/emoji/0_1852.png index 9a1367d0ed..4b4a89e987 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1852.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1852.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1853.png b/TMessagesProj/src/emojis/twitter/emoji/0_1853.png index 2f5c76c5ec..9a6ec939fe 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1853.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1853.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1854.png b/TMessagesProj/src/emojis/twitter/emoji/0_1854.png index 2ee91dfab0..b05f02e287 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1854.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1854.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1855.png b/TMessagesProj/src/emojis/twitter/emoji/0_1855.png index ea6c5277f4..e04c608eb7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1855.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1855.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1856.png b/TMessagesProj/src/emojis/twitter/emoji/0_1856.png index e828ecfb67..503ffe5d8a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1856.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1856.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1857.png b/TMessagesProj/src/emojis/twitter/emoji/0_1857.png index ff7f9be863..6fb6309fa7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1857.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1857.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1858.png b/TMessagesProj/src/emojis/twitter/emoji/0_1858.png index f3a6219914..2764f40803 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1858.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1858.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1859.png b/TMessagesProj/src/emojis/twitter/emoji/0_1859.png index 719aab377a..729b17f030 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1859.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1859.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_186.png b/TMessagesProj/src/emojis/twitter/emoji/0_186.png index db7a475236..53a52d5c27 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_186.png and b/TMessagesProj/src/emojis/twitter/emoji/0_186.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1860.png b/TMessagesProj/src/emojis/twitter/emoji/0_1860.png index 4f2774d86c..711fd5a0de 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1860.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1860.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1861.png b/TMessagesProj/src/emojis/twitter/emoji/0_1861.png index 187bf1db41..812d6d6106 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1861.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1861.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1862.png b/TMessagesProj/src/emojis/twitter/emoji/0_1862.png index c868daa13f..fb0315e520 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1862.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1862.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1863.png b/TMessagesProj/src/emojis/twitter/emoji/0_1863.png index b486c653ff..89dac9861c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1863.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1863.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1864.png b/TMessagesProj/src/emojis/twitter/emoji/0_1864.png index 0e778f11c7..272689e684 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1864.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1864.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1865.png b/TMessagesProj/src/emojis/twitter/emoji/0_1865.png index 5cee6280fd..b5ec4681a1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1865.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1865.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1866.png b/TMessagesProj/src/emojis/twitter/emoji/0_1866.png index cbe8c257e1..b5ec4681a1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1866.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1866.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1867.png b/TMessagesProj/src/emojis/twitter/emoji/0_1867.png index 3ef3f9be87..11a833e4cb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1867.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1867.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1868.png b/TMessagesProj/src/emojis/twitter/emoji/0_1868.png index db57cb615b..c6615d6c66 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1868.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1868.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1869.png b/TMessagesProj/src/emojis/twitter/emoji/0_1869.png index c682c812fb..6e56465182 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1869.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1869.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_187.png b/TMessagesProj/src/emojis/twitter/emoji/0_187.png index f821a8bf74..10458d3ef4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_187.png and b/TMessagesProj/src/emojis/twitter/emoji/0_187.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1870.png b/TMessagesProj/src/emojis/twitter/emoji/0_1870.png index 94750a4bdc..47a07b4f60 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1870.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1870.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1871.png b/TMessagesProj/src/emojis/twitter/emoji/0_1871.png index ffb677d47a..47a07b4f60 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1871.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1871.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1872.png b/TMessagesProj/src/emojis/twitter/emoji/0_1872.png index 65aab65d19..0d3751fe9e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1872.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1872.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1873.png b/TMessagesProj/src/emojis/twitter/emoji/0_1873.png index 2e449dbed8..514a9a397c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1873.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1873.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1874.png b/TMessagesProj/src/emojis/twitter/emoji/0_1874.png index cad18d0340..be46e36df3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1874.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1874.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1875.png b/TMessagesProj/src/emojis/twitter/emoji/0_1875.png index a1fe37d04f..60621dfbf4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1875.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1875.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1876.png b/TMessagesProj/src/emojis/twitter/emoji/0_1876.png index 4f54f3356b..990f2641a9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1876.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1876.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1877.png b/TMessagesProj/src/emojis/twitter/emoji/0_1877.png index a3183965e0..d34b4e65d3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1877.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1877.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1878.png b/TMessagesProj/src/emojis/twitter/emoji/0_1878.png index 05db684428..d34b4e65d3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1878.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1878.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1879.png b/TMessagesProj/src/emojis/twitter/emoji/0_1879.png index 555b33bd6f..2a01901ab9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1879.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1879.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_188.png b/TMessagesProj/src/emojis/twitter/emoji/0_188.png index 21f9d5163b..a5566cb4b2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_188.png and b/TMessagesProj/src/emojis/twitter/emoji/0_188.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1880.png b/TMessagesProj/src/emojis/twitter/emoji/0_1880.png index 6e440115a1..7a015259b2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1880.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1880.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1881.png b/TMessagesProj/src/emojis/twitter/emoji/0_1881.png index cf5329aabb..648f75aaf1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1881.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1881.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1882.png b/TMessagesProj/src/emojis/twitter/emoji/0_1882.png index 36233e2c86..28843901d2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1882.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1882.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1883.png b/TMessagesProj/src/emojis/twitter/emoji/0_1883.png index db58e6a881..8860541b38 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1883.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1883.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1884.png b/TMessagesProj/src/emojis/twitter/emoji/0_1884.png index 5e6b85225b..8860541b38 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1884.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1884.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1885.png b/TMessagesProj/src/emojis/twitter/emoji/0_1885.png index 0d1bc30d37..2ae3c7e613 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1885.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1885.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1886.png b/TMessagesProj/src/emojis/twitter/emoji/0_1886.png index 48d6e5133f..b3bea3c80c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1886.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1886.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1887.png b/TMessagesProj/src/emojis/twitter/emoji/0_1887.png index 78934430dc..86b7c6b1a9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1887.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1887.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1888.png b/TMessagesProj/src/emojis/twitter/emoji/0_1888.png index 3b74fa3471..6d1e13d85b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1888.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1888.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1889.png b/TMessagesProj/src/emojis/twitter/emoji/0_1889.png index 405d2dcb42..6d1e13d85b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1889.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1889.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_189.png b/TMessagesProj/src/emojis/twitter/emoji/0_189.png index f56f441d11..cbaa1ca7b5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_189.png and b/TMessagesProj/src/emojis/twitter/emoji/0_189.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1890.png b/TMessagesProj/src/emojis/twitter/emoji/0_1890.png index 6f82fa8ca8..c954b22fed 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1890.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1890.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1891.png b/TMessagesProj/src/emojis/twitter/emoji/0_1891.png index fb4f0efffc..8d86a2d1a6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1891.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1891.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1892.png b/TMessagesProj/src/emojis/twitter/emoji/0_1892.png index 83dbe40084..adfb2e5007 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1892.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1892.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1893.png b/TMessagesProj/src/emojis/twitter/emoji/0_1893.png index 4ba47863cf..c723fa46fb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1893.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1893.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1894.png b/TMessagesProj/src/emojis/twitter/emoji/0_1894.png index 07fc952ddc..1ef0cb7395 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1894.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1894.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1895.png b/TMessagesProj/src/emojis/twitter/emoji/0_1895.png index 731a48c5e3..da6278fe33 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1895.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1895.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1896.png b/TMessagesProj/src/emojis/twitter/emoji/0_1896.png index e4502bd01a..96c935bb84 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1896.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1896.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1897.png b/TMessagesProj/src/emojis/twitter/emoji/0_1897.png index 7cefc50638..4f021cf9f6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1897.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1897.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1898.png b/TMessagesProj/src/emojis/twitter/emoji/0_1898.png index 958280a2e0..fe4449f6ae 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1898.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1898.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1899.png b/TMessagesProj/src/emojis/twitter/emoji/0_1899.png index 17ff89f319..6a066b9e85 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1899.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1899.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_19.png b/TMessagesProj/src/emojis/twitter/emoji/0_19.png index 5adf6916cb..137363d18c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_19.png and b/TMessagesProj/src/emojis/twitter/emoji/0_19.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_190.png b/TMessagesProj/src/emojis/twitter/emoji/0_190.png index b83e9b26dc..8df469a271 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_190.png and b/TMessagesProj/src/emojis/twitter/emoji/0_190.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1900.png b/TMessagesProj/src/emojis/twitter/emoji/0_1900.png index 16600a8c77..9cb9527e80 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1900.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1900.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1901.png b/TMessagesProj/src/emojis/twitter/emoji/0_1901.png index df7e2d9a73..19c4e99eb1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1901.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1901.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1902.png b/TMessagesProj/src/emojis/twitter/emoji/0_1902.png index 8b92df710e..4e2f86f189 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1902.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1902.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1903.png b/TMessagesProj/src/emojis/twitter/emoji/0_1903.png index 9e252da746..47d109750a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1903.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1903.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1904.png b/TMessagesProj/src/emojis/twitter/emoji/0_1904.png index e7172bdba3..2c7ba847ed 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1904.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1904.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1905.png b/TMessagesProj/src/emojis/twitter/emoji/0_1905.png index deb59f89d6..b0f40e935e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1905.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1905.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1906.png b/TMessagesProj/src/emojis/twitter/emoji/0_1906.png index a480bbade9..7d56b3f2ad 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1906.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1906.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1907.png b/TMessagesProj/src/emojis/twitter/emoji/0_1907.png index 5fb0e82d2c..0cf4308774 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1907.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1907.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1908.png b/TMessagesProj/src/emojis/twitter/emoji/0_1908.png index 7851a3dd06..21be47daba 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1908.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1908.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1909.png b/TMessagesProj/src/emojis/twitter/emoji/0_1909.png index 7ef520c0fc..ab03e73cbf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1909.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1909.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_191.png b/TMessagesProj/src/emojis/twitter/emoji/0_191.png index 976cc3e684..69ba512adb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_191.png and b/TMessagesProj/src/emojis/twitter/emoji/0_191.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1910.png b/TMessagesProj/src/emojis/twitter/emoji/0_1910.png index 05babf9024..f63997aa0f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1910.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1910.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1911.png b/TMessagesProj/src/emojis/twitter/emoji/0_1911.png index 9bcb3c020a..c73b7cbad3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1911.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1911.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1912.png b/TMessagesProj/src/emojis/twitter/emoji/0_1912.png index 5318e40703..a58f2de0e6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1912.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1912.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1913.png b/TMessagesProj/src/emojis/twitter/emoji/0_1913.png index ab76371e59..d418fde4b8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1913.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1913.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1914.png b/TMessagesProj/src/emojis/twitter/emoji/0_1914.png index 0358b6b7aa..13ba3f124d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1914.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1914.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1915.png b/TMessagesProj/src/emojis/twitter/emoji/0_1915.png index 2b32337754..49cbe7f6ae 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1915.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1915.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1916.png b/TMessagesProj/src/emojis/twitter/emoji/0_1916.png index 667d83d9b5..47868dcdb9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1916.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1916.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1917.png b/TMessagesProj/src/emojis/twitter/emoji/0_1917.png index 2bd0e8fede..097a5833d2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1917.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1917.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1918.png b/TMessagesProj/src/emojis/twitter/emoji/0_1918.png index efa4e9da33..795208a56d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1918.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1918.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1919.png b/TMessagesProj/src/emojis/twitter/emoji/0_1919.png index 24b2ee1b8d..6090652960 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1919.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1919.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_192.png b/TMessagesProj/src/emojis/twitter/emoji/0_192.png index 0cb47f8720..621d6cc599 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_192.png and b/TMessagesProj/src/emojis/twitter/emoji/0_192.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1920.png b/TMessagesProj/src/emojis/twitter/emoji/0_1920.png index ca4ba3b41c..ee4934656f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1920.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1920.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1921.png b/TMessagesProj/src/emojis/twitter/emoji/0_1921.png index 9a1367d0ed..96e6ef4d42 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1921.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1921.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1922.png b/TMessagesProj/src/emojis/twitter/emoji/0_1922.png index 3e9d6ce2d4..00b77241ce 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1922.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1922.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1923.png b/TMessagesProj/src/emojis/twitter/emoji/0_1923.png index c400d967f2..729de046e2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1923.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1923.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1924.png b/TMessagesProj/src/emojis/twitter/emoji/0_1924.png index 8ea23df576..caa5964b69 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1924.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1924.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1925.png b/TMessagesProj/src/emojis/twitter/emoji/0_1925.png index 043eb1b89c..bd20b95f48 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1925.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1925.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1926.png b/TMessagesProj/src/emojis/twitter/emoji/0_1926.png index bc27237393..d7872c85e8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1926.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1926.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1927.png b/TMessagesProj/src/emojis/twitter/emoji/0_1927.png index f3a6219914..5c8d8972cf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1927.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1927.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1928.png b/TMessagesProj/src/emojis/twitter/emoji/0_1928.png index 8c56a3893c..3bb5fc6c0f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1928.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1928.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1929.png b/TMessagesProj/src/emojis/twitter/emoji/0_1929.png index b5d04a9edb..259afc381f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1929.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1929.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_193.png b/TMessagesProj/src/emojis/twitter/emoji/0_193.png index 52463e6e26..0927c4843c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_193.png and b/TMessagesProj/src/emojis/twitter/emoji/0_193.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1930.png b/TMessagesProj/src/emojis/twitter/emoji/0_1930.png index 37b6bce434..8a40076aa1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1930.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1930.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1931.png b/TMessagesProj/src/emojis/twitter/emoji/0_1931.png index 24732af5d0..855bc7fd8d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1931.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1931.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1932.png b/TMessagesProj/src/emojis/twitter/emoji/0_1932.png index 819bdcce68..3de1e3e8bf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1932.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1932.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1933.png b/TMessagesProj/src/emojis/twitter/emoji/0_1933.png index 0e778f11c7..6525d10982 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1933.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1933.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1934.png b/TMessagesProj/src/emojis/twitter/emoji/0_1934.png index 60149e4228..83b9f8ce1e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1934.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1934.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1935.png b/TMessagesProj/src/emojis/twitter/emoji/0_1935.png index 78bf06d0e9..960a9e635a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1935.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1935.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1936.png b/TMessagesProj/src/emojis/twitter/emoji/0_1936.png index c3334603ac..05d04b176f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1936.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1936.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1937.png b/TMessagesProj/src/emojis/twitter/emoji/0_1937.png index f84cf9eedc..eeb4896e05 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1937.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1937.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1938.png b/TMessagesProj/src/emojis/twitter/emoji/0_1938.png index 5941af01d6..ff652858f6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1938.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1938.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1939.png b/TMessagesProj/src/emojis/twitter/emoji/0_1939.png index 94750a4bdc..9f9721f6c4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1939.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1939.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_194.png b/TMessagesProj/src/emojis/twitter/emoji/0_194.png index 2bbbf94a7d..8e5d3d17f4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_194.png and b/TMessagesProj/src/emojis/twitter/emoji/0_194.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1940.png b/TMessagesProj/src/emojis/twitter/emoji/0_1940.png index 1dafac43e1..1b596db4f5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1940.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1940.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1941.png b/TMessagesProj/src/emojis/twitter/emoji/0_1941.png index 55c7658537..cc4d8c01d7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1941.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1941.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1942.png b/TMessagesProj/src/emojis/twitter/emoji/0_1942.png index 613aa24690..77a3ba9eda 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1942.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1942.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1943.png b/TMessagesProj/src/emojis/twitter/emoji/0_1943.png index 2c4c75232a..b7d5173202 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1943.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1943.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1944.png b/TMessagesProj/src/emojis/twitter/emoji/0_1944.png index a8f5ce4b7b..6cf8246392 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1944.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1944.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1945.png b/TMessagesProj/src/emojis/twitter/emoji/0_1945.png index 4f54f3356b..2a82ac61e3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1945.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1945.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1946.png b/TMessagesProj/src/emojis/twitter/emoji/0_1946.png index 1843c88fb9..532c2722b5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1946.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1946.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1947.png b/TMessagesProj/src/emojis/twitter/emoji/0_1947.png index 5894123674..fb59a7b70c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1947.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1947.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1948.png b/TMessagesProj/src/emojis/twitter/emoji/0_1948.png index 6b5aff9f9d..f65691b8a5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1948.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1948.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1949.png b/TMessagesProj/src/emojis/twitter/emoji/0_1949.png index bfa1163683..59872cd0b4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1949.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1949.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_195.png b/TMessagesProj/src/emojis/twitter/emoji/0_195.png index 3b7d63eb61..2e8b72154f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_195.png and b/TMessagesProj/src/emojis/twitter/emoji/0_195.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1950.png b/TMessagesProj/src/emojis/twitter/emoji/0_1950.png index 57d90725ba..b00026c5f1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1950.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1950.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1951.png b/TMessagesProj/src/emojis/twitter/emoji/0_1951.png index 315851ed71..4a54528486 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1951.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1951.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1952.png b/TMessagesProj/src/emojis/twitter/emoji/0_1952.png index f83c62cfce..cc17e435b7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1952.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1952.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1953.png b/TMessagesProj/src/emojis/twitter/emoji/0_1953.png index 3d665dfa76..13af1edd27 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1953.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1953.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1954.png b/TMessagesProj/src/emojis/twitter/emoji/0_1954.png index 42517295d6..dfb39e2cf0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1954.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1954.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1955.png b/TMessagesProj/src/emojis/twitter/emoji/0_1955.png index 9046b4d157..8b340fa13c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1955.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1955.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1956.png b/TMessagesProj/src/emojis/twitter/emoji/0_1956.png index 4e15e16228..3d1d8fb71a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1956.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1956.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1957.png b/TMessagesProj/src/emojis/twitter/emoji/0_1957.png index 00386984a3..e24cac2458 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1957.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1957.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1958.png b/TMessagesProj/src/emojis/twitter/emoji/0_1958.png index c0b660f0ae..aaf60b524e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1958.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1958.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1959.png b/TMessagesProj/src/emojis/twitter/emoji/0_1959.png index eabeb6a03a..b783d92c4c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1959.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1959.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_196.png b/TMessagesProj/src/emojis/twitter/emoji/0_196.png index 1907c173ef..8fece95bec 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_196.png and b/TMessagesProj/src/emojis/twitter/emoji/0_196.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1960.png b/TMessagesProj/src/emojis/twitter/emoji/0_1960.png index 3f605ecd9f..01263e5ca5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1960.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1960.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1961.png b/TMessagesProj/src/emojis/twitter/emoji/0_1961.png index 206fd3e312..44648cb392 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1961.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1961.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1962.png b/TMessagesProj/src/emojis/twitter/emoji/0_1962.png index 752c5b6b0e..ec03ed68df 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1962.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1962.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1963.png b/TMessagesProj/src/emojis/twitter/emoji/0_1963.png index b0ec365b0b..9de746391d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1963.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1963.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1964.png b/TMessagesProj/src/emojis/twitter/emoji/0_1964.png index ddd97677aa..7ee06194f9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1964.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1964.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1965.png b/TMessagesProj/src/emojis/twitter/emoji/0_1965.png index e03e4fdfe1..a3b50d9dc4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1965.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1965.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1966.png b/TMessagesProj/src/emojis/twitter/emoji/0_1966.png index 4c58e60fc7..5a76f62d7c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1966.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1966.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1967.png b/TMessagesProj/src/emojis/twitter/emoji/0_1967.png index 9f0c15c344..45254b1050 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1967.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1967.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1968.png b/TMessagesProj/src/emojis/twitter/emoji/0_1968.png index 2bf953ea20..375e75e450 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1968.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1968.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1969.png b/TMessagesProj/src/emojis/twitter/emoji/0_1969.png index d6995ffd7e..5e0e61b218 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1969.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1969.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_197.png b/TMessagesProj/src/emojis/twitter/emoji/0_197.png index d1932421f3..1d74284b38 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_197.png and b/TMessagesProj/src/emojis/twitter/emoji/0_197.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1970.png b/TMessagesProj/src/emojis/twitter/emoji/0_1970.png index 4f9ea6ad2e..6888b5953b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1970.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1970.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1971.png b/TMessagesProj/src/emojis/twitter/emoji/0_1971.png index 363b4fe9bd..dfd3847f6d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1971.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1971.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1972.png b/TMessagesProj/src/emojis/twitter/emoji/0_1972.png index 5f56c70351..aa84dfebc3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1972.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1972.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1973.png b/TMessagesProj/src/emojis/twitter/emoji/0_1973.png index bd99cfd88f..ba6be64397 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1973.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1973.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1974.png b/TMessagesProj/src/emojis/twitter/emoji/0_1974.png index 8ed62cdb7c..692fa49af3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_1974.png and b/TMessagesProj/src/emojis/twitter/emoji/0_1974.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1975.png b/TMessagesProj/src/emojis/twitter/emoji/0_1975.png new file mode 100644 index 0000000000..cc2be8b028 Binary files /dev/null and b/TMessagesProj/src/emojis/twitter/emoji/0_1975.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1976.png b/TMessagesProj/src/emojis/twitter/emoji/0_1976.png new file mode 100644 index 0000000000..c371090a84 Binary files /dev/null and b/TMessagesProj/src/emojis/twitter/emoji/0_1976.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1977.png b/TMessagesProj/src/emojis/twitter/emoji/0_1977.png new file mode 100644 index 0000000000..06f1c91d29 Binary files /dev/null and b/TMessagesProj/src/emojis/twitter/emoji/0_1977.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1978.png b/TMessagesProj/src/emojis/twitter/emoji/0_1978.png new file mode 100644 index 0000000000..8edf30a253 Binary files /dev/null and b/TMessagesProj/src/emojis/twitter/emoji/0_1978.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1979.png b/TMessagesProj/src/emojis/twitter/emoji/0_1979.png new file mode 100644 index 0000000000..179d4a5fe1 Binary files /dev/null and b/TMessagesProj/src/emojis/twitter/emoji/0_1979.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_198.png b/TMessagesProj/src/emojis/twitter/emoji/0_198.png index eb3f8fd593..3a7f6c6f6c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_198.png and b/TMessagesProj/src/emojis/twitter/emoji/0_198.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1980.png b/TMessagesProj/src/emojis/twitter/emoji/0_1980.png new file mode 100644 index 0000000000..314cafb021 Binary files /dev/null and b/TMessagesProj/src/emojis/twitter/emoji/0_1980.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1981.png b/TMessagesProj/src/emojis/twitter/emoji/0_1981.png new file mode 100644 index 0000000000..fa30952e0a Binary files /dev/null and b/TMessagesProj/src/emojis/twitter/emoji/0_1981.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1982.png b/TMessagesProj/src/emojis/twitter/emoji/0_1982.png new file mode 100644 index 0000000000..1d6a8b1e77 Binary files /dev/null and b/TMessagesProj/src/emojis/twitter/emoji/0_1982.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1983.png b/TMessagesProj/src/emojis/twitter/emoji/0_1983.png new file mode 100644 index 0000000000..228a863b55 Binary files /dev/null and b/TMessagesProj/src/emojis/twitter/emoji/0_1983.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1984.png b/TMessagesProj/src/emojis/twitter/emoji/0_1984.png new file mode 100644 index 0000000000..439a2799ee Binary files /dev/null and b/TMessagesProj/src/emojis/twitter/emoji/0_1984.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1985.png b/TMessagesProj/src/emojis/twitter/emoji/0_1985.png new file mode 100644 index 0000000000..36551fd83f Binary files /dev/null and b/TMessagesProj/src/emojis/twitter/emoji/0_1985.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1986.png b/TMessagesProj/src/emojis/twitter/emoji/0_1986.png new file mode 100644 index 0000000000..d9f405f778 Binary files /dev/null and b/TMessagesProj/src/emojis/twitter/emoji/0_1986.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_1987.png b/TMessagesProj/src/emojis/twitter/emoji/0_1987.png new file mode 100644 index 0000000000..ffc78528c5 Binary files /dev/null and b/TMessagesProj/src/emojis/twitter/emoji/0_1987.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_199.png b/TMessagesProj/src/emojis/twitter/emoji/0_199.png index e189aedeb4..a9ced8ec8f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_199.png and b/TMessagesProj/src/emojis/twitter/emoji/0_199.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_2.png b/TMessagesProj/src/emojis/twitter/emoji/0_2.png index cf7bb9289d..8c169faf32 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_2.png and b/TMessagesProj/src/emojis/twitter/emoji/0_2.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_20.png b/TMessagesProj/src/emojis/twitter/emoji/0_20.png index 657ffc9cfd..874302af85 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_20.png and b/TMessagesProj/src/emojis/twitter/emoji/0_20.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_200.png b/TMessagesProj/src/emojis/twitter/emoji/0_200.png index 5525a36840..8a7c2c8c68 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_200.png and b/TMessagesProj/src/emojis/twitter/emoji/0_200.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_201.png b/TMessagesProj/src/emojis/twitter/emoji/0_201.png index c05111d835..8dba75a986 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_201.png and b/TMessagesProj/src/emojis/twitter/emoji/0_201.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_202.png b/TMessagesProj/src/emojis/twitter/emoji/0_202.png index 91f38554bb..13b1a7b9fd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_202.png and b/TMessagesProj/src/emojis/twitter/emoji/0_202.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_203.png b/TMessagesProj/src/emojis/twitter/emoji/0_203.png index 2143ce8b90..7951da7d03 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_203.png and b/TMessagesProj/src/emojis/twitter/emoji/0_203.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_204.png b/TMessagesProj/src/emojis/twitter/emoji/0_204.png index c68529b6c8..d4c1b0055d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_204.png and b/TMessagesProj/src/emojis/twitter/emoji/0_204.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_205.png b/TMessagesProj/src/emojis/twitter/emoji/0_205.png index 9a6e4ceec9..25d8e5f2f4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_205.png and b/TMessagesProj/src/emojis/twitter/emoji/0_205.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_206.png b/TMessagesProj/src/emojis/twitter/emoji/0_206.png index 6bdd192116..91186a9ae2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_206.png and b/TMessagesProj/src/emojis/twitter/emoji/0_206.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_207.png b/TMessagesProj/src/emojis/twitter/emoji/0_207.png index f86b0968ca..5042fc1145 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_207.png and b/TMessagesProj/src/emojis/twitter/emoji/0_207.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_208.png b/TMessagesProj/src/emojis/twitter/emoji/0_208.png index 046c60f9f5..c2e1519144 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_208.png and b/TMessagesProj/src/emojis/twitter/emoji/0_208.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_209.png b/TMessagesProj/src/emojis/twitter/emoji/0_209.png index 257da61738..c92598286f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_209.png and b/TMessagesProj/src/emojis/twitter/emoji/0_209.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_21.png b/TMessagesProj/src/emojis/twitter/emoji/0_21.png index 9d90f5bf3b..173e43923a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_21.png and b/TMessagesProj/src/emojis/twitter/emoji/0_21.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_210.png b/TMessagesProj/src/emojis/twitter/emoji/0_210.png index 133b5a49b3..08be5da892 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_210.png and b/TMessagesProj/src/emojis/twitter/emoji/0_210.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_211.png b/TMessagesProj/src/emojis/twitter/emoji/0_211.png index 4566c4545f..79745a51da 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_211.png and b/TMessagesProj/src/emojis/twitter/emoji/0_211.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_212.png b/TMessagesProj/src/emojis/twitter/emoji/0_212.png index 302c925345..69a134baef 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_212.png and b/TMessagesProj/src/emojis/twitter/emoji/0_212.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_213.png b/TMessagesProj/src/emojis/twitter/emoji/0_213.png index 8167e237da..4a0612c564 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_213.png and b/TMessagesProj/src/emojis/twitter/emoji/0_213.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_214.png b/TMessagesProj/src/emojis/twitter/emoji/0_214.png index e5c43174b4..354f44bdf7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_214.png and b/TMessagesProj/src/emojis/twitter/emoji/0_214.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_215.png b/TMessagesProj/src/emojis/twitter/emoji/0_215.png index c1929b81fa..955abc1a9a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_215.png and b/TMessagesProj/src/emojis/twitter/emoji/0_215.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_216.png b/TMessagesProj/src/emojis/twitter/emoji/0_216.png index 45d79fa167..9b310a568a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_216.png and b/TMessagesProj/src/emojis/twitter/emoji/0_216.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_217.png b/TMessagesProj/src/emojis/twitter/emoji/0_217.png index 0fc8458634..7f19905dff 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_217.png and b/TMessagesProj/src/emojis/twitter/emoji/0_217.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_218.png b/TMessagesProj/src/emojis/twitter/emoji/0_218.png index 1e820a03df..81f34a0cdc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_218.png and b/TMessagesProj/src/emojis/twitter/emoji/0_218.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_219.png b/TMessagesProj/src/emojis/twitter/emoji/0_219.png index c846c30a29..8a8864af1d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_219.png and b/TMessagesProj/src/emojis/twitter/emoji/0_219.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_22.png b/TMessagesProj/src/emojis/twitter/emoji/0_22.png index 45720ba721..926d1a710c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_22.png and b/TMessagesProj/src/emojis/twitter/emoji/0_22.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_220.png b/TMessagesProj/src/emojis/twitter/emoji/0_220.png index b93f848d0f..852bea517a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_220.png and b/TMessagesProj/src/emojis/twitter/emoji/0_220.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_221.png b/TMessagesProj/src/emojis/twitter/emoji/0_221.png index 619685eb63..cebe03dd73 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_221.png and b/TMessagesProj/src/emojis/twitter/emoji/0_221.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_222.png b/TMessagesProj/src/emojis/twitter/emoji/0_222.png index d0fb5cb174..37bf4c5b46 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_222.png and b/TMessagesProj/src/emojis/twitter/emoji/0_222.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_223.png b/TMessagesProj/src/emojis/twitter/emoji/0_223.png index f9638b1213..4ce9f13ac9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_223.png and b/TMessagesProj/src/emojis/twitter/emoji/0_223.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_224.png b/TMessagesProj/src/emojis/twitter/emoji/0_224.png index e55542f07f..73e19bbf1c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_224.png and b/TMessagesProj/src/emojis/twitter/emoji/0_224.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_225.png b/TMessagesProj/src/emojis/twitter/emoji/0_225.png index 577ec984a7..b4f8d17407 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_225.png and b/TMessagesProj/src/emojis/twitter/emoji/0_225.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_226.png b/TMessagesProj/src/emojis/twitter/emoji/0_226.png index 6241952754..0c8024de78 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_226.png and b/TMessagesProj/src/emojis/twitter/emoji/0_226.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_227.png b/TMessagesProj/src/emojis/twitter/emoji/0_227.png index 61e715d5d1..e74abdbc9b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_227.png and b/TMessagesProj/src/emojis/twitter/emoji/0_227.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_228.png b/TMessagesProj/src/emojis/twitter/emoji/0_228.png index 7b93179849..79b263259d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_228.png and b/TMessagesProj/src/emojis/twitter/emoji/0_228.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_229.png b/TMessagesProj/src/emojis/twitter/emoji/0_229.png index 5cedfb4332..66586e4015 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_229.png and b/TMessagesProj/src/emojis/twitter/emoji/0_229.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_23.png b/TMessagesProj/src/emojis/twitter/emoji/0_23.png index e065b06ca8..acec85cb78 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_23.png and b/TMessagesProj/src/emojis/twitter/emoji/0_23.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_230.png b/TMessagesProj/src/emojis/twitter/emoji/0_230.png index 4acbfa9fb0..552fbc098a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_230.png and b/TMessagesProj/src/emojis/twitter/emoji/0_230.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_231.png b/TMessagesProj/src/emojis/twitter/emoji/0_231.png index 31fefc4554..222c1e21aa 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_231.png and b/TMessagesProj/src/emojis/twitter/emoji/0_231.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_232.png b/TMessagesProj/src/emojis/twitter/emoji/0_232.png index 66c7f77336..1d473e711a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_232.png and b/TMessagesProj/src/emojis/twitter/emoji/0_232.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_233.png b/TMessagesProj/src/emojis/twitter/emoji/0_233.png index 687d161179..5d437137ac 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_233.png and b/TMessagesProj/src/emojis/twitter/emoji/0_233.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_234.png b/TMessagesProj/src/emojis/twitter/emoji/0_234.png index 20ac2e1d6a..2ba21b4866 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_234.png and b/TMessagesProj/src/emojis/twitter/emoji/0_234.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_235.png b/TMessagesProj/src/emojis/twitter/emoji/0_235.png index a6afc46485..c5037a7d76 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_235.png and b/TMessagesProj/src/emojis/twitter/emoji/0_235.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_236.png b/TMessagesProj/src/emojis/twitter/emoji/0_236.png index d83111cbdd..a9f2ea4894 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_236.png and b/TMessagesProj/src/emojis/twitter/emoji/0_236.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_237.png b/TMessagesProj/src/emojis/twitter/emoji/0_237.png index 3e3d0659f9..e639c6d888 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_237.png and b/TMessagesProj/src/emojis/twitter/emoji/0_237.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_238.png b/TMessagesProj/src/emojis/twitter/emoji/0_238.png index a3543292d5..0a43a6fbac 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_238.png and b/TMessagesProj/src/emojis/twitter/emoji/0_238.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_239.png b/TMessagesProj/src/emojis/twitter/emoji/0_239.png index 9b6d950350..ba8d4b9b41 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_239.png and b/TMessagesProj/src/emojis/twitter/emoji/0_239.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_24.png b/TMessagesProj/src/emojis/twitter/emoji/0_24.png index bc928c1098..3ef3b10b1b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_24.png and b/TMessagesProj/src/emojis/twitter/emoji/0_24.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_240.png b/TMessagesProj/src/emojis/twitter/emoji/0_240.png index 5a1f0f324d..ed2532938d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_240.png and b/TMessagesProj/src/emojis/twitter/emoji/0_240.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_241.png b/TMessagesProj/src/emojis/twitter/emoji/0_241.png index f01c87ea8e..c0eb32b8bf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_241.png and b/TMessagesProj/src/emojis/twitter/emoji/0_241.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_242.png b/TMessagesProj/src/emojis/twitter/emoji/0_242.png index d3ef38c2f0..52bd3e11b3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_242.png and b/TMessagesProj/src/emojis/twitter/emoji/0_242.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_243.png b/TMessagesProj/src/emojis/twitter/emoji/0_243.png index 285fd0d4ac..ab62ec7e7d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_243.png and b/TMessagesProj/src/emojis/twitter/emoji/0_243.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_244.png b/TMessagesProj/src/emojis/twitter/emoji/0_244.png index 07d0835c73..6b0034d4b0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_244.png and b/TMessagesProj/src/emojis/twitter/emoji/0_244.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_245.png b/TMessagesProj/src/emojis/twitter/emoji/0_245.png index 7660a72fba..29cf0dd10f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_245.png and b/TMessagesProj/src/emojis/twitter/emoji/0_245.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_246.png b/TMessagesProj/src/emojis/twitter/emoji/0_246.png index a9e49eb3e8..0b90de4324 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_246.png and b/TMessagesProj/src/emojis/twitter/emoji/0_246.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_247.png b/TMessagesProj/src/emojis/twitter/emoji/0_247.png index eeb4b88a31..a8e8a25710 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_247.png and b/TMessagesProj/src/emojis/twitter/emoji/0_247.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_248.png b/TMessagesProj/src/emojis/twitter/emoji/0_248.png index 8441e628fd..c93ab2f5b7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_248.png and b/TMessagesProj/src/emojis/twitter/emoji/0_248.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_249.png b/TMessagesProj/src/emojis/twitter/emoji/0_249.png index b6e430512e..56a4e06ca9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_249.png and b/TMessagesProj/src/emojis/twitter/emoji/0_249.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_25.png b/TMessagesProj/src/emojis/twitter/emoji/0_25.png index 766b78d6b6..d744611105 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_25.png and b/TMessagesProj/src/emojis/twitter/emoji/0_25.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_250.png b/TMessagesProj/src/emojis/twitter/emoji/0_250.png index 11e19e7747..2b79da8d72 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_250.png and b/TMessagesProj/src/emojis/twitter/emoji/0_250.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_251.png b/TMessagesProj/src/emojis/twitter/emoji/0_251.png index 43c44b43dc..443a7db87c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_251.png and b/TMessagesProj/src/emojis/twitter/emoji/0_251.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_252.png b/TMessagesProj/src/emojis/twitter/emoji/0_252.png index c7f9aabe42..8755d295ca 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_252.png and b/TMessagesProj/src/emojis/twitter/emoji/0_252.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_253.png b/TMessagesProj/src/emojis/twitter/emoji/0_253.png index 65b41d6611..501727436d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_253.png and b/TMessagesProj/src/emojis/twitter/emoji/0_253.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_254.png b/TMessagesProj/src/emojis/twitter/emoji/0_254.png index ee751051e5..461920887d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_254.png and b/TMessagesProj/src/emojis/twitter/emoji/0_254.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_255.png b/TMessagesProj/src/emojis/twitter/emoji/0_255.png index f13873222b..556245bda4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_255.png and b/TMessagesProj/src/emojis/twitter/emoji/0_255.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_256.png b/TMessagesProj/src/emojis/twitter/emoji/0_256.png index 57011a67c1..6aeaccf6db 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_256.png and b/TMessagesProj/src/emojis/twitter/emoji/0_256.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_257.png b/TMessagesProj/src/emojis/twitter/emoji/0_257.png index 43175c1441..c9ec8ca0ec 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_257.png and b/TMessagesProj/src/emojis/twitter/emoji/0_257.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_258.png b/TMessagesProj/src/emojis/twitter/emoji/0_258.png index edaa91ff76..a222ea230b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_258.png and b/TMessagesProj/src/emojis/twitter/emoji/0_258.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_259.png b/TMessagesProj/src/emojis/twitter/emoji/0_259.png index 6f27e04464..f912a4496d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_259.png and b/TMessagesProj/src/emojis/twitter/emoji/0_259.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_26.png b/TMessagesProj/src/emojis/twitter/emoji/0_26.png index 83aea0b598..84e175391b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_26.png and b/TMessagesProj/src/emojis/twitter/emoji/0_26.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_260.png b/TMessagesProj/src/emojis/twitter/emoji/0_260.png index d02cdbd01c..3cefd29787 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_260.png and b/TMessagesProj/src/emojis/twitter/emoji/0_260.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_261.png b/TMessagesProj/src/emojis/twitter/emoji/0_261.png index f83f6aa929..5804b1694b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_261.png and b/TMessagesProj/src/emojis/twitter/emoji/0_261.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_262.png b/TMessagesProj/src/emojis/twitter/emoji/0_262.png index 1556e17c29..b9e7ac8104 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_262.png and b/TMessagesProj/src/emojis/twitter/emoji/0_262.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_263.png b/TMessagesProj/src/emojis/twitter/emoji/0_263.png index 2a9b53241c..594a46df87 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_263.png and b/TMessagesProj/src/emojis/twitter/emoji/0_263.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_264.png b/TMessagesProj/src/emojis/twitter/emoji/0_264.png index 2bcf8dd888..c395295046 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_264.png and b/TMessagesProj/src/emojis/twitter/emoji/0_264.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_265.png b/TMessagesProj/src/emojis/twitter/emoji/0_265.png index 6a4fe1ee8f..f41ed5878a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_265.png and b/TMessagesProj/src/emojis/twitter/emoji/0_265.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_266.png b/TMessagesProj/src/emojis/twitter/emoji/0_266.png index a6944b4f2a..a56fe78a78 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_266.png and b/TMessagesProj/src/emojis/twitter/emoji/0_266.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_267.png b/TMessagesProj/src/emojis/twitter/emoji/0_267.png index 290846edf1..bdda2bb10e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_267.png and b/TMessagesProj/src/emojis/twitter/emoji/0_267.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_268.png b/TMessagesProj/src/emojis/twitter/emoji/0_268.png index 809c9ffab4..11fd4cb987 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_268.png and b/TMessagesProj/src/emojis/twitter/emoji/0_268.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_269.png b/TMessagesProj/src/emojis/twitter/emoji/0_269.png index 39c5d95932..4ff0cab0ed 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_269.png and b/TMessagesProj/src/emojis/twitter/emoji/0_269.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_27.png b/TMessagesProj/src/emojis/twitter/emoji/0_27.png index 05d2472f3c..4d252bba0e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_27.png and b/TMessagesProj/src/emojis/twitter/emoji/0_27.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_270.png b/TMessagesProj/src/emojis/twitter/emoji/0_270.png index aa9839e3d3..21849c84ca 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_270.png and b/TMessagesProj/src/emojis/twitter/emoji/0_270.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_271.png b/TMessagesProj/src/emojis/twitter/emoji/0_271.png index f252e819f5..fc5d076630 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_271.png and b/TMessagesProj/src/emojis/twitter/emoji/0_271.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_272.png b/TMessagesProj/src/emojis/twitter/emoji/0_272.png index ef6eb53b4d..fc62bbe814 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_272.png and b/TMessagesProj/src/emojis/twitter/emoji/0_272.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_273.png b/TMessagesProj/src/emojis/twitter/emoji/0_273.png index fbf9e4f6a7..cb54b10ce6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_273.png and b/TMessagesProj/src/emojis/twitter/emoji/0_273.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_274.png b/TMessagesProj/src/emojis/twitter/emoji/0_274.png index 220ba403b7..9b31c261d9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_274.png and b/TMessagesProj/src/emojis/twitter/emoji/0_274.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_275.png b/TMessagesProj/src/emojis/twitter/emoji/0_275.png index ffd62d0aa7..6ed96b044a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_275.png and b/TMessagesProj/src/emojis/twitter/emoji/0_275.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_276.png b/TMessagesProj/src/emojis/twitter/emoji/0_276.png index e8f9584b2c..11a8e19f23 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_276.png and b/TMessagesProj/src/emojis/twitter/emoji/0_276.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_277.png b/TMessagesProj/src/emojis/twitter/emoji/0_277.png index c04f10ef5c..a7a0e336c3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_277.png and b/TMessagesProj/src/emojis/twitter/emoji/0_277.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_278.png b/TMessagesProj/src/emojis/twitter/emoji/0_278.png index dfc46089cf..ac203ae06a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_278.png and b/TMessagesProj/src/emojis/twitter/emoji/0_278.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_279.png b/TMessagesProj/src/emojis/twitter/emoji/0_279.png index 3d03f89643..cc2d7ee6b8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_279.png and b/TMessagesProj/src/emojis/twitter/emoji/0_279.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_28.png b/TMessagesProj/src/emojis/twitter/emoji/0_28.png index 023b9c63a1..01da90c491 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_28.png and b/TMessagesProj/src/emojis/twitter/emoji/0_28.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_280.png b/TMessagesProj/src/emojis/twitter/emoji/0_280.png index d99d2b3db8..c60d1aadd9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_280.png and b/TMessagesProj/src/emojis/twitter/emoji/0_280.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_281.png b/TMessagesProj/src/emojis/twitter/emoji/0_281.png index 3224126c39..d03fa40e02 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_281.png and b/TMessagesProj/src/emojis/twitter/emoji/0_281.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_282.png b/TMessagesProj/src/emojis/twitter/emoji/0_282.png index d62c86ec5b..dd425f9d98 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_282.png and b/TMessagesProj/src/emojis/twitter/emoji/0_282.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_283.png b/TMessagesProj/src/emojis/twitter/emoji/0_283.png index 48a5a3a4a6..887c188d03 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_283.png and b/TMessagesProj/src/emojis/twitter/emoji/0_283.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_284.png b/TMessagesProj/src/emojis/twitter/emoji/0_284.png index d59c2ee8d0..0dd22147f0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_284.png and b/TMessagesProj/src/emojis/twitter/emoji/0_284.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_285.png b/TMessagesProj/src/emojis/twitter/emoji/0_285.png index 63e0b4207f..4f98d7b00a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_285.png and b/TMessagesProj/src/emojis/twitter/emoji/0_285.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_286.png b/TMessagesProj/src/emojis/twitter/emoji/0_286.png index a790d9f341..984d83ebbc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_286.png and b/TMessagesProj/src/emojis/twitter/emoji/0_286.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_287.png b/TMessagesProj/src/emojis/twitter/emoji/0_287.png index 01eab27751..bdf3e72ad4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_287.png and b/TMessagesProj/src/emojis/twitter/emoji/0_287.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_288.png b/TMessagesProj/src/emojis/twitter/emoji/0_288.png index 68b6d43f4a..6f3646dd76 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_288.png and b/TMessagesProj/src/emojis/twitter/emoji/0_288.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_289.png b/TMessagesProj/src/emojis/twitter/emoji/0_289.png index 0887c0efac..61fc17ecc8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_289.png and b/TMessagesProj/src/emojis/twitter/emoji/0_289.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_29.png b/TMessagesProj/src/emojis/twitter/emoji/0_29.png index 406c6b4089..144c5f257c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_29.png and b/TMessagesProj/src/emojis/twitter/emoji/0_29.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_290.png b/TMessagesProj/src/emojis/twitter/emoji/0_290.png index cfec409653..3233c2b378 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_290.png and b/TMessagesProj/src/emojis/twitter/emoji/0_290.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_291.png b/TMessagesProj/src/emojis/twitter/emoji/0_291.png index 137609d66b..f4cf09fffd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_291.png and b/TMessagesProj/src/emojis/twitter/emoji/0_291.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_292.png b/TMessagesProj/src/emojis/twitter/emoji/0_292.png index d38e1dea96..121252eecd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_292.png and b/TMessagesProj/src/emojis/twitter/emoji/0_292.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_293.png b/TMessagesProj/src/emojis/twitter/emoji/0_293.png index e22245c426..fe6632167f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_293.png and b/TMessagesProj/src/emojis/twitter/emoji/0_293.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_294.png b/TMessagesProj/src/emojis/twitter/emoji/0_294.png index 1ac65943de..e84c411845 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_294.png and b/TMessagesProj/src/emojis/twitter/emoji/0_294.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_295.png b/TMessagesProj/src/emojis/twitter/emoji/0_295.png index 0b3ca08510..4ae0e740bf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_295.png and b/TMessagesProj/src/emojis/twitter/emoji/0_295.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_296.png b/TMessagesProj/src/emojis/twitter/emoji/0_296.png index 4ee3be9e10..efe4ec0d8f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_296.png and b/TMessagesProj/src/emojis/twitter/emoji/0_296.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_297.png b/TMessagesProj/src/emojis/twitter/emoji/0_297.png index e2122d6551..8ede174ef0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_297.png and b/TMessagesProj/src/emojis/twitter/emoji/0_297.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_298.png b/TMessagesProj/src/emojis/twitter/emoji/0_298.png index e02abe9788..00c07b55b5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_298.png and b/TMessagesProj/src/emojis/twitter/emoji/0_298.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_299.png b/TMessagesProj/src/emojis/twitter/emoji/0_299.png index 81e9e90d7b..538e27cb54 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_299.png and b/TMessagesProj/src/emojis/twitter/emoji/0_299.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_3.png b/TMessagesProj/src/emojis/twitter/emoji/0_3.png index 834678bfca..fd5a48533f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_3.png and b/TMessagesProj/src/emojis/twitter/emoji/0_3.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_30.png b/TMessagesProj/src/emojis/twitter/emoji/0_30.png index c58257a22a..5723949038 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_30.png and b/TMessagesProj/src/emojis/twitter/emoji/0_30.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_300.png b/TMessagesProj/src/emojis/twitter/emoji/0_300.png index e1f92cc6cd..e8fc90d04f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_300.png and b/TMessagesProj/src/emojis/twitter/emoji/0_300.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_301.png b/TMessagesProj/src/emojis/twitter/emoji/0_301.png index e20457ccc3..a2fe874dbd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_301.png and b/TMessagesProj/src/emojis/twitter/emoji/0_301.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_302.png b/TMessagesProj/src/emojis/twitter/emoji/0_302.png index 92ec38b720..414f68be06 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_302.png and b/TMessagesProj/src/emojis/twitter/emoji/0_302.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_303.png b/TMessagesProj/src/emojis/twitter/emoji/0_303.png index 0af910dc32..06d74c722f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_303.png and b/TMessagesProj/src/emojis/twitter/emoji/0_303.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_304.png b/TMessagesProj/src/emojis/twitter/emoji/0_304.png index aa32720215..29a3c010ba 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_304.png and b/TMessagesProj/src/emojis/twitter/emoji/0_304.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_305.png b/TMessagesProj/src/emojis/twitter/emoji/0_305.png index 69e977de10..bf1b64b927 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_305.png and b/TMessagesProj/src/emojis/twitter/emoji/0_305.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_306.png b/TMessagesProj/src/emojis/twitter/emoji/0_306.png index 1c7d786565..44c97c4f14 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_306.png and b/TMessagesProj/src/emojis/twitter/emoji/0_306.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_307.png b/TMessagesProj/src/emojis/twitter/emoji/0_307.png index 3f6115c98a..0b37133044 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_307.png and b/TMessagesProj/src/emojis/twitter/emoji/0_307.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_308.png b/TMessagesProj/src/emojis/twitter/emoji/0_308.png index d072477e57..a4e3909d4c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_308.png and b/TMessagesProj/src/emojis/twitter/emoji/0_308.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_309.png b/TMessagesProj/src/emojis/twitter/emoji/0_309.png index 152e0b53ae..6e26463a26 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_309.png and b/TMessagesProj/src/emojis/twitter/emoji/0_309.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_31.png b/TMessagesProj/src/emojis/twitter/emoji/0_31.png index 821c5739c4..25f82a2aa9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_31.png and b/TMessagesProj/src/emojis/twitter/emoji/0_31.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_310.png b/TMessagesProj/src/emojis/twitter/emoji/0_310.png index 7cefac7377..ded17a5262 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_310.png and b/TMessagesProj/src/emojis/twitter/emoji/0_310.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_311.png b/TMessagesProj/src/emojis/twitter/emoji/0_311.png index fb020d1945..4e3536fd06 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_311.png and b/TMessagesProj/src/emojis/twitter/emoji/0_311.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_312.png b/TMessagesProj/src/emojis/twitter/emoji/0_312.png index 6eca8c602b..61e0af1193 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_312.png and b/TMessagesProj/src/emojis/twitter/emoji/0_312.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_313.png b/TMessagesProj/src/emojis/twitter/emoji/0_313.png index dc7ff7d822..d1e81b2033 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_313.png and b/TMessagesProj/src/emojis/twitter/emoji/0_313.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_314.png b/TMessagesProj/src/emojis/twitter/emoji/0_314.png index 50a14e80b8..3a594026c9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_314.png and b/TMessagesProj/src/emojis/twitter/emoji/0_314.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_315.png b/TMessagesProj/src/emojis/twitter/emoji/0_315.png index 29d52096b2..0be8f88e67 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_315.png and b/TMessagesProj/src/emojis/twitter/emoji/0_315.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_316.png b/TMessagesProj/src/emojis/twitter/emoji/0_316.png index 9cba8a590b..016edfe217 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_316.png and b/TMessagesProj/src/emojis/twitter/emoji/0_316.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_317.png b/TMessagesProj/src/emojis/twitter/emoji/0_317.png index 012ed34b4d..f3fb40275e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_317.png and b/TMessagesProj/src/emojis/twitter/emoji/0_317.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_318.png b/TMessagesProj/src/emojis/twitter/emoji/0_318.png index ef4d464bfb..82cae778bc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_318.png and b/TMessagesProj/src/emojis/twitter/emoji/0_318.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_319.png b/TMessagesProj/src/emojis/twitter/emoji/0_319.png index 47266a2ec8..415aa187e1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_319.png and b/TMessagesProj/src/emojis/twitter/emoji/0_319.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_32.png b/TMessagesProj/src/emojis/twitter/emoji/0_32.png index b3e24c34ff..4d3f8f6809 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_32.png and b/TMessagesProj/src/emojis/twitter/emoji/0_32.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_320.png b/TMessagesProj/src/emojis/twitter/emoji/0_320.png index b982f0fced..6b1b27e669 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_320.png and b/TMessagesProj/src/emojis/twitter/emoji/0_320.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_321.png b/TMessagesProj/src/emojis/twitter/emoji/0_321.png index 3683abcdc4..3d63333551 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_321.png and b/TMessagesProj/src/emojis/twitter/emoji/0_321.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_322.png b/TMessagesProj/src/emojis/twitter/emoji/0_322.png index 784d061241..757d184df7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_322.png and b/TMessagesProj/src/emojis/twitter/emoji/0_322.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_323.png b/TMessagesProj/src/emojis/twitter/emoji/0_323.png index 1214acdb99..d9af6f6cbd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_323.png and b/TMessagesProj/src/emojis/twitter/emoji/0_323.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_324.png b/TMessagesProj/src/emojis/twitter/emoji/0_324.png index 76767ac524..894b9ac55f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_324.png and b/TMessagesProj/src/emojis/twitter/emoji/0_324.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_325.png b/TMessagesProj/src/emojis/twitter/emoji/0_325.png index efafd94708..687ace174b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_325.png and b/TMessagesProj/src/emojis/twitter/emoji/0_325.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_326.png b/TMessagesProj/src/emojis/twitter/emoji/0_326.png index e55fc1437e..fbcc4f17f6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_326.png and b/TMessagesProj/src/emojis/twitter/emoji/0_326.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_327.png b/TMessagesProj/src/emojis/twitter/emoji/0_327.png index f0b9552254..72c9a64d3b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_327.png and b/TMessagesProj/src/emojis/twitter/emoji/0_327.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_328.png b/TMessagesProj/src/emojis/twitter/emoji/0_328.png index 2f6a1a81e5..17d8d334cb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_328.png and b/TMessagesProj/src/emojis/twitter/emoji/0_328.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_329.png b/TMessagesProj/src/emojis/twitter/emoji/0_329.png index f54ad639a6..b9e78b5b20 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_329.png and b/TMessagesProj/src/emojis/twitter/emoji/0_329.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_33.png b/TMessagesProj/src/emojis/twitter/emoji/0_33.png index cf7e6a4714..22395351e3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_33.png and b/TMessagesProj/src/emojis/twitter/emoji/0_33.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_330.png b/TMessagesProj/src/emojis/twitter/emoji/0_330.png index 66662df193..d3b500d564 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_330.png and b/TMessagesProj/src/emojis/twitter/emoji/0_330.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_331.png b/TMessagesProj/src/emojis/twitter/emoji/0_331.png index cba10abd4c..2a1d2f2ed4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_331.png and b/TMessagesProj/src/emojis/twitter/emoji/0_331.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_332.png b/TMessagesProj/src/emojis/twitter/emoji/0_332.png index 0899b4e9b1..8591fafbc3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_332.png and b/TMessagesProj/src/emojis/twitter/emoji/0_332.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_333.png b/TMessagesProj/src/emojis/twitter/emoji/0_333.png index 016a8901d6..04dcc7d4b3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_333.png and b/TMessagesProj/src/emojis/twitter/emoji/0_333.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_334.png b/TMessagesProj/src/emojis/twitter/emoji/0_334.png index 45afcf3a11..21225b9484 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_334.png and b/TMessagesProj/src/emojis/twitter/emoji/0_334.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_335.png b/TMessagesProj/src/emojis/twitter/emoji/0_335.png index 7f16617c49..7479b323db 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_335.png and b/TMessagesProj/src/emojis/twitter/emoji/0_335.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_336.png b/TMessagesProj/src/emojis/twitter/emoji/0_336.png index 5ff8013cde..feb3dc76b4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_336.png and b/TMessagesProj/src/emojis/twitter/emoji/0_336.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_337.png b/TMessagesProj/src/emojis/twitter/emoji/0_337.png index d7363a21cd..164939d24e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_337.png and b/TMessagesProj/src/emojis/twitter/emoji/0_337.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_338.png b/TMessagesProj/src/emojis/twitter/emoji/0_338.png index ada42fd0d5..c3149242a8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_338.png and b/TMessagesProj/src/emojis/twitter/emoji/0_338.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_339.png b/TMessagesProj/src/emojis/twitter/emoji/0_339.png index 4efff4a51c..0a21aaa9f0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_339.png and b/TMessagesProj/src/emojis/twitter/emoji/0_339.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_34.png b/TMessagesProj/src/emojis/twitter/emoji/0_34.png index f80f0ef5e4..05b30f44b0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_34.png and b/TMessagesProj/src/emojis/twitter/emoji/0_34.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_340.png b/TMessagesProj/src/emojis/twitter/emoji/0_340.png index b3480f6614..dcc2816678 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_340.png and b/TMessagesProj/src/emojis/twitter/emoji/0_340.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_341.png b/TMessagesProj/src/emojis/twitter/emoji/0_341.png index 4810aaa363..eacb8deb8c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_341.png and b/TMessagesProj/src/emojis/twitter/emoji/0_341.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_342.png b/TMessagesProj/src/emojis/twitter/emoji/0_342.png index b3d2e76acd..64b51ad686 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_342.png and b/TMessagesProj/src/emojis/twitter/emoji/0_342.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_343.png b/TMessagesProj/src/emojis/twitter/emoji/0_343.png index 120f5cf614..7ab186b442 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_343.png and b/TMessagesProj/src/emojis/twitter/emoji/0_343.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_344.png b/TMessagesProj/src/emojis/twitter/emoji/0_344.png index 58e4d117a1..e423c61a58 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_344.png and b/TMessagesProj/src/emojis/twitter/emoji/0_344.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_345.png b/TMessagesProj/src/emojis/twitter/emoji/0_345.png index d3bf5be596..3e8127d9a1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_345.png and b/TMessagesProj/src/emojis/twitter/emoji/0_345.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_346.png b/TMessagesProj/src/emojis/twitter/emoji/0_346.png index 74f4aaae77..27c7e20c1e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_346.png and b/TMessagesProj/src/emojis/twitter/emoji/0_346.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_347.png b/TMessagesProj/src/emojis/twitter/emoji/0_347.png index 61622f6f78..eea85b9ef1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_347.png and b/TMessagesProj/src/emojis/twitter/emoji/0_347.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_348.png b/TMessagesProj/src/emojis/twitter/emoji/0_348.png index 645de8dd86..e348aa886c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_348.png and b/TMessagesProj/src/emojis/twitter/emoji/0_348.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_349.png b/TMessagesProj/src/emojis/twitter/emoji/0_349.png index 5383a99aa6..1ab01a9a23 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_349.png and b/TMessagesProj/src/emojis/twitter/emoji/0_349.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_35.png b/TMessagesProj/src/emojis/twitter/emoji/0_35.png index 81a9c84ab2..d61d86af82 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_35.png and b/TMessagesProj/src/emojis/twitter/emoji/0_35.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_350.png b/TMessagesProj/src/emojis/twitter/emoji/0_350.png index 7bbcb145a9..791f7991f4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_350.png and b/TMessagesProj/src/emojis/twitter/emoji/0_350.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_351.png b/TMessagesProj/src/emojis/twitter/emoji/0_351.png index a9bcbfdade..18d67fd4a7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_351.png and b/TMessagesProj/src/emojis/twitter/emoji/0_351.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_352.png b/TMessagesProj/src/emojis/twitter/emoji/0_352.png index 94eee320e6..4a80d17b1d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_352.png and b/TMessagesProj/src/emojis/twitter/emoji/0_352.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_353.png b/TMessagesProj/src/emojis/twitter/emoji/0_353.png index c7049ea681..88deb47ff1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_353.png and b/TMessagesProj/src/emojis/twitter/emoji/0_353.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_354.png b/TMessagesProj/src/emojis/twitter/emoji/0_354.png index 67b2a4dbde..e55cc5b13c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_354.png and b/TMessagesProj/src/emojis/twitter/emoji/0_354.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_355.png b/TMessagesProj/src/emojis/twitter/emoji/0_355.png index 478e234a49..c22d879a83 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_355.png and b/TMessagesProj/src/emojis/twitter/emoji/0_355.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_356.png b/TMessagesProj/src/emojis/twitter/emoji/0_356.png index 4c4e590bd3..6a41d89296 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_356.png and b/TMessagesProj/src/emojis/twitter/emoji/0_356.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_357.png b/TMessagesProj/src/emojis/twitter/emoji/0_357.png index b1beb9d2d8..ad9b86ac55 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_357.png and b/TMessagesProj/src/emojis/twitter/emoji/0_357.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_358.png b/TMessagesProj/src/emojis/twitter/emoji/0_358.png index a642254467..ca9fc9b603 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_358.png and b/TMessagesProj/src/emojis/twitter/emoji/0_358.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_359.png b/TMessagesProj/src/emojis/twitter/emoji/0_359.png index d0495dc526..587cd27f8f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_359.png and b/TMessagesProj/src/emojis/twitter/emoji/0_359.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_36.png b/TMessagesProj/src/emojis/twitter/emoji/0_36.png index d34dc76ebd..6c3dbc49cb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_36.png and b/TMessagesProj/src/emojis/twitter/emoji/0_36.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_360.png b/TMessagesProj/src/emojis/twitter/emoji/0_360.png index 28900d3f06..3d4d6cd30a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_360.png and b/TMessagesProj/src/emojis/twitter/emoji/0_360.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_361.png b/TMessagesProj/src/emojis/twitter/emoji/0_361.png index 09fbcef7ce..1acd245123 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_361.png and b/TMessagesProj/src/emojis/twitter/emoji/0_361.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_362.png b/TMessagesProj/src/emojis/twitter/emoji/0_362.png index 6fdb9c248e..727de259c7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_362.png and b/TMessagesProj/src/emojis/twitter/emoji/0_362.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_363.png b/TMessagesProj/src/emojis/twitter/emoji/0_363.png index 6514de56da..bf49c47c7b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_363.png and b/TMessagesProj/src/emojis/twitter/emoji/0_363.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_364.png b/TMessagesProj/src/emojis/twitter/emoji/0_364.png index 2a75f81cc7..c7b0f7fda8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_364.png and b/TMessagesProj/src/emojis/twitter/emoji/0_364.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_365.png b/TMessagesProj/src/emojis/twitter/emoji/0_365.png index f152607454..2a5a9d7c05 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_365.png and b/TMessagesProj/src/emojis/twitter/emoji/0_365.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_366.png b/TMessagesProj/src/emojis/twitter/emoji/0_366.png index 97716fd59f..0d040984c7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_366.png and b/TMessagesProj/src/emojis/twitter/emoji/0_366.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_367.png b/TMessagesProj/src/emojis/twitter/emoji/0_367.png index ba3171a33b..7fc2235cea 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_367.png and b/TMessagesProj/src/emojis/twitter/emoji/0_367.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_368.png b/TMessagesProj/src/emojis/twitter/emoji/0_368.png index e57691e97b..026faaf4d6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_368.png and b/TMessagesProj/src/emojis/twitter/emoji/0_368.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_369.png b/TMessagesProj/src/emojis/twitter/emoji/0_369.png index 8a363cc296..d507034c2c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_369.png and b/TMessagesProj/src/emojis/twitter/emoji/0_369.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_37.png b/TMessagesProj/src/emojis/twitter/emoji/0_37.png index 400edad62f..adc4089cf3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_37.png and b/TMessagesProj/src/emojis/twitter/emoji/0_37.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_370.png b/TMessagesProj/src/emojis/twitter/emoji/0_370.png index 8bf536ce78..bb07262877 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_370.png and b/TMessagesProj/src/emojis/twitter/emoji/0_370.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_371.png b/TMessagesProj/src/emojis/twitter/emoji/0_371.png index ec0917bbb5..511bc5f918 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_371.png and b/TMessagesProj/src/emojis/twitter/emoji/0_371.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_372.png b/TMessagesProj/src/emojis/twitter/emoji/0_372.png index 8a9275a2dd..03088745ae 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_372.png and b/TMessagesProj/src/emojis/twitter/emoji/0_372.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_373.png b/TMessagesProj/src/emojis/twitter/emoji/0_373.png index 6cd5d49682..be9e809517 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_373.png and b/TMessagesProj/src/emojis/twitter/emoji/0_373.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_374.png b/TMessagesProj/src/emojis/twitter/emoji/0_374.png index 73fb843318..f953145f1c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_374.png and b/TMessagesProj/src/emojis/twitter/emoji/0_374.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_375.png b/TMessagesProj/src/emojis/twitter/emoji/0_375.png index b781f58123..84e77e57c4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_375.png and b/TMessagesProj/src/emojis/twitter/emoji/0_375.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_376.png b/TMessagesProj/src/emojis/twitter/emoji/0_376.png index bacf0820d1..2a3e32f170 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_376.png and b/TMessagesProj/src/emojis/twitter/emoji/0_376.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_377.png b/TMessagesProj/src/emojis/twitter/emoji/0_377.png index 4a9761a600..d31d8d1cc1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_377.png and b/TMessagesProj/src/emojis/twitter/emoji/0_377.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_378.png b/TMessagesProj/src/emojis/twitter/emoji/0_378.png index ceb5ce4d39..d006daea41 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_378.png and b/TMessagesProj/src/emojis/twitter/emoji/0_378.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_379.png b/TMessagesProj/src/emojis/twitter/emoji/0_379.png index 8a09432454..c3ad0a8468 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_379.png and b/TMessagesProj/src/emojis/twitter/emoji/0_379.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_38.png b/TMessagesProj/src/emojis/twitter/emoji/0_38.png index d9cb6cbd11..3b821556eb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_38.png and b/TMessagesProj/src/emojis/twitter/emoji/0_38.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_380.png b/TMessagesProj/src/emojis/twitter/emoji/0_380.png index 2adc1f5826..59f9466aa1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_380.png and b/TMessagesProj/src/emojis/twitter/emoji/0_380.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_381.png b/TMessagesProj/src/emojis/twitter/emoji/0_381.png index b35c24e3e7..e59b68cc41 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_381.png and b/TMessagesProj/src/emojis/twitter/emoji/0_381.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_382.png b/TMessagesProj/src/emojis/twitter/emoji/0_382.png index 3b36acfcc3..f4fd1f3be1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_382.png and b/TMessagesProj/src/emojis/twitter/emoji/0_382.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_383.png b/TMessagesProj/src/emojis/twitter/emoji/0_383.png index 12505c24a1..8ea6726017 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_383.png and b/TMessagesProj/src/emojis/twitter/emoji/0_383.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_384.png b/TMessagesProj/src/emojis/twitter/emoji/0_384.png index f2e693d5ed..7f69bed840 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_384.png and b/TMessagesProj/src/emojis/twitter/emoji/0_384.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_385.png b/TMessagesProj/src/emojis/twitter/emoji/0_385.png index 527b0232b4..e9c82fc50c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_385.png and b/TMessagesProj/src/emojis/twitter/emoji/0_385.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_386.png b/TMessagesProj/src/emojis/twitter/emoji/0_386.png index 0883650d4a..5cfc98a6f4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_386.png and b/TMessagesProj/src/emojis/twitter/emoji/0_386.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_387.png b/TMessagesProj/src/emojis/twitter/emoji/0_387.png index ef198d4b35..6151b2529a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_387.png and b/TMessagesProj/src/emojis/twitter/emoji/0_387.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_388.png b/TMessagesProj/src/emojis/twitter/emoji/0_388.png index d37278daf2..dcb4ca8a7a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_388.png and b/TMessagesProj/src/emojis/twitter/emoji/0_388.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_389.png b/TMessagesProj/src/emojis/twitter/emoji/0_389.png index 48149c4ff5..d578af90fb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_389.png and b/TMessagesProj/src/emojis/twitter/emoji/0_389.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_39.png b/TMessagesProj/src/emojis/twitter/emoji/0_39.png index 1e0d4b0cda..bf240c152d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_39.png and b/TMessagesProj/src/emojis/twitter/emoji/0_39.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_390.png b/TMessagesProj/src/emojis/twitter/emoji/0_390.png index 02869b3601..31cc996027 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_390.png and b/TMessagesProj/src/emojis/twitter/emoji/0_390.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_391.png b/TMessagesProj/src/emojis/twitter/emoji/0_391.png index aa9fa21c9b..82c7ff0210 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_391.png and b/TMessagesProj/src/emojis/twitter/emoji/0_391.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_392.png b/TMessagesProj/src/emojis/twitter/emoji/0_392.png index bc194acc06..d7b33690a6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_392.png and b/TMessagesProj/src/emojis/twitter/emoji/0_392.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_393.png b/TMessagesProj/src/emojis/twitter/emoji/0_393.png index 1204e7d7c9..bea575bab4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_393.png and b/TMessagesProj/src/emojis/twitter/emoji/0_393.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_394.png b/TMessagesProj/src/emojis/twitter/emoji/0_394.png index acfebd2657..2f1177e540 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_394.png and b/TMessagesProj/src/emojis/twitter/emoji/0_394.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_395.png b/TMessagesProj/src/emojis/twitter/emoji/0_395.png index eb691bc9a6..a8cf09cad4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_395.png and b/TMessagesProj/src/emojis/twitter/emoji/0_395.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_396.png b/TMessagesProj/src/emojis/twitter/emoji/0_396.png index e86feabf10..d6f68bbf2e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_396.png and b/TMessagesProj/src/emojis/twitter/emoji/0_396.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_397.png b/TMessagesProj/src/emojis/twitter/emoji/0_397.png index a6a9c23320..25642f5d7c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_397.png and b/TMessagesProj/src/emojis/twitter/emoji/0_397.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_398.png b/TMessagesProj/src/emojis/twitter/emoji/0_398.png index 9b07d09fe6..be3fabe56e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_398.png and b/TMessagesProj/src/emojis/twitter/emoji/0_398.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_399.png b/TMessagesProj/src/emojis/twitter/emoji/0_399.png index 7a79a75138..536f3da4ec 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_399.png and b/TMessagesProj/src/emojis/twitter/emoji/0_399.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_4.png b/TMessagesProj/src/emojis/twitter/emoji/0_4.png index a643e79994..3f54f42437 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_4.png and b/TMessagesProj/src/emojis/twitter/emoji/0_4.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_40.png b/TMessagesProj/src/emojis/twitter/emoji/0_40.png index 11822ab087..16eec6c3f7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_40.png and b/TMessagesProj/src/emojis/twitter/emoji/0_40.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_400.png b/TMessagesProj/src/emojis/twitter/emoji/0_400.png index 33132ed361..c52471d44b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_400.png and b/TMessagesProj/src/emojis/twitter/emoji/0_400.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_401.png b/TMessagesProj/src/emojis/twitter/emoji/0_401.png index b32564b00e..d744f0852f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_401.png and b/TMessagesProj/src/emojis/twitter/emoji/0_401.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_402.png b/TMessagesProj/src/emojis/twitter/emoji/0_402.png index f2e081834d..db0202129c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_402.png and b/TMessagesProj/src/emojis/twitter/emoji/0_402.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_403.png b/TMessagesProj/src/emojis/twitter/emoji/0_403.png index 9eb50d860c..0801fa5e91 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_403.png and b/TMessagesProj/src/emojis/twitter/emoji/0_403.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_404.png b/TMessagesProj/src/emojis/twitter/emoji/0_404.png index a0d072083e..814ff9307f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_404.png and b/TMessagesProj/src/emojis/twitter/emoji/0_404.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_405.png b/TMessagesProj/src/emojis/twitter/emoji/0_405.png index ba1af9c740..c1416fbafd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_405.png and b/TMessagesProj/src/emojis/twitter/emoji/0_405.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_406.png b/TMessagesProj/src/emojis/twitter/emoji/0_406.png index 5eb291dca5..0c8233f065 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_406.png and b/TMessagesProj/src/emojis/twitter/emoji/0_406.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_407.png b/TMessagesProj/src/emojis/twitter/emoji/0_407.png index f98383f78b..fffe946416 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_407.png and b/TMessagesProj/src/emojis/twitter/emoji/0_407.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_408.png b/TMessagesProj/src/emojis/twitter/emoji/0_408.png index 2aea5d0870..c0aee8a5dd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_408.png and b/TMessagesProj/src/emojis/twitter/emoji/0_408.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_409.png b/TMessagesProj/src/emojis/twitter/emoji/0_409.png index f84fb9e804..d13a50f4da 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_409.png and b/TMessagesProj/src/emojis/twitter/emoji/0_409.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_41.png b/TMessagesProj/src/emojis/twitter/emoji/0_41.png index a838eb57ff..13e9e10f0e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_41.png and b/TMessagesProj/src/emojis/twitter/emoji/0_41.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_410.png b/TMessagesProj/src/emojis/twitter/emoji/0_410.png index f255e93773..a3051910b0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_410.png and b/TMessagesProj/src/emojis/twitter/emoji/0_410.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_411.png b/TMessagesProj/src/emojis/twitter/emoji/0_411.png index 1fe905c421..3da368170f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_411.png and b/TMessagesProj/src/emojis/twitter/emoji/0_411.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_412.png b/TMessagesProj/src/emojis/twitter/emoji/0_412.png index 9410fadb81..56760bd18f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_412.png and b/TMessagesProj/src/emojis/twitter/emoji/0_412.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_413.png b/TMessagesProj/src/emojis/twitter/emoji/0_413.png index 7ee7853f25..fed4042dcc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_413.png and b/TMessagesProj/src/emojis/twitter/emoji/0_413.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_414.png b/TMessagesProj/src/emojis/twitter/emoji/0_414.png index 6beb80d581..7bc2bdb709 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_414.png and b/TMessagesProj/src/emojis/twitter/emoji/0_414.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_415.png b/TMessagesProj/src/emojis/twitter/emoji/0_415.png index f56fa6b8bc..d727628133 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_415.png and b/TMessagesProj/src/emojis/twitter/emoji/0_415.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_416.png b/TMessagesProj/src/emojis/twitter/emoji/0_416.png index ac432b2278..bfccf0d8e9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_416.png and b/TMessagesProj/src/emojis/twitter/emoji/0_416.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_417.png b/TMessagesProj/src/emojis/twitter/emoji/0_417.png index b4bae0cca6..1870e33036 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_417.png and b/TMessagesProj/src/emojis/twitter/emoji/0_417.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_418.png b/TMessagesProj/src/emojis/twitter/emoji/0_418.png index 0a259d52da..ec8fdd6eaa 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_418.png and b/TMessagesProj/src/emojis/twitter/emoji/0_418.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_419.png b/TMessagesProj/src/emojis/twitter/emoji/0_419.png index c324fa1ff3..7c9ede4b85 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_419.png and b/TMessagesProj/src/emojis/twitter/emoji/0_419.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_42.png b/TMessagesProj/src/emojis/twitter/emoji/0_42.png index 1708b069da..7b48c743e6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_42.png and b/TMessagesProj/src/emojis/twitter/emoji/0_42.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_420.png b/TMessagesProj/src/emojis/twitter/emoji/0_420.png index 3e1b977e2d..2623cb02e1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_420.png and b/TMessagesProj/src/emojis/twitter/emoji/0_420.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_421.png b/TMessagesProj/src/emojis/twitter/emoji/0_421.png index c8d1f22c8e..e845660306 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_421.png and b/TMessagesProj/src/emojis/twitter/emoji/0_421.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_422.png b/TMessagesProj/src/emojis/twitter/emoji/0_422.png index 43b0f287cd..9fab2883fb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_422.png and b/TMessagesProj/src/emojis/twitter/emoji/0_422.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_423.png b/TMessagesProj/src/emojis/twitter/emoji/0_423.png index dc5743b66c..c4f8263982 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_423.png and b/TMessagesProj/src/emojis/twitter/emoji/0_423.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_424.png b/TMessagesProj/src/emojis/twitter/emoji/0_424.png index 805818653d..c68fda3bf4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_424.png and b/TMessagesProj/src/emojis/twitter/emoji/0_424.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_425.png b/TMessagesProj/src/emojis/twitter/emoji/0_425.png index 09b92543f3..42fafc27aa 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_425.png and b/TMessagesProj/src/emojis/twitter/emoji/0_425.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_426.png b/TMessagesProj/src/emojis/twitter/emoji/0_426.png index 3287c2d02d..ec1a25ac4a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_426.png and b/TMessagesProj/src/emojis/twitter/emoji/0_426.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_427.png b/TMessagesProj/src/emojis/twitter/emoji/0_427.png index a38f18f86e..4128a66dd3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_427.png and b/TMessagesProj/src/emojis/twitter/emoji/0_427.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_428.png b/TMessagesProj/src/emojis/twitter/emoji/0_428.png index b15e09fcfc..9b73491042 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_428.png and b/TMessagesProj/src/emojis/twitter/emoji/0_428.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_429.png b/TMessagesProj/src/emojis/twitter/emoji/0_429.png index 2026f2c0b7..3804b1ae63 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_429.png and b/TMessagesProj/src/emojis/twitter/emoji/0_429.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_43.png b/TMessagesProj/src/emojis/twitter/emoji/0_43.png index 69dfd1c145..cf3f5ff1ab 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_43.png and b/TMessagesProj/src/emojis/twitter/emoji/0_43.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_430.png b/TMessagesProj/src/emojis/twitter/emoji/0_430.png index d60ba9db6b..113b5a509b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_430.png and b/TMessagesProj/src/emojis/twitter/emoji/0_430.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_431.png b/TMessagesProj/src/emojis/twitter/emoji/0_431.png index ce1e613001..6d28ae8bb5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_431.png and b/TMessagesProj/src/emojis/twitter/emoji/0_431.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_432.png b/TMessagesProj/src/emojis/twitter/emoji/0_432.png index 07a740338e..0a81940dcd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_432.png and b/TMessagesProj/src/emojis/twitter/emoji/0_432.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_433.png b/TMessagesProj/src/emojis/twitter/emoji/0_433.png index b4d676d5dc..bab027e58c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_433.png and b/TMessagesProj/src/emojis/twitter/emoji/0_433.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_434.png b/TMessagesProj/src/emojis/twitter/emoji/0_434.png index 7b50c840ab..5dd8ca489b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_434.png and b/TMessagesProj/src/emojis/twitter/emoji/0_434.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_435.png b/TMessagesProj/src/emojis/twitter/emoji/0_435.png index e742e13caf..feda1ea5af 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_435.png and b/TMessagesProj/src/emojis/twitter/emoji/0_435.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_436.png b/TMessagesProj/src/emojis/twitter/emoji/0_436.png index f1b4433431..9ab54285bd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_436.png and b/TMessagesProj/src/emojis/twitter/emoji/0_436.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_437.png b/TMessagesProj/src/emojis/twitter/emoji/0_437.png index c46ff1d6a2..7fa6284721 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_437.png and b/TMessagesProj/src/emojis/twitter/emoji/0_437.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_438.png b/TMessagesProj/src/emojis/twitter/emoji/0_438.png index 379594e40a..d09da30897 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_438.png and b/TMessagesProj/src/emojis/twitter/emoji/0_438.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_439.png b/TMessagesProj/src/emojis/twitter/emoji/0_439.png index 514f999668..d3fb96739e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_439.png and b/TMessagesProj/src/emojis/twitter/emoji/0_439.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_44.png b/TMessagesProj/src/emojis/twitter/emoji/0_44.png index 0aa3416f13..384683c486 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_44.png and b/TMessagesProj/src/emojis/twitter/emoji/0_44.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_440.png b/TMessagesProj/src/emojis/twitter/emoji/0_440.png index a5283c878c..7cefc82ed1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_440.png and b/TMessagesProj/src/emojis/twitter/emoji/0_440.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_441.png b/TMessagesProj/src/emojis/twitter/emoji/0_441.png index 7fde90f7ec..ee8e63bd5c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_441.png and b/TMessagesProj/src/emojis/twitter/emoji/0_441.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_442.png b/TMessagesProj/src/emojis/twitter/emoji/0_442.png index f635534be2..f67e62f9a9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_442.png and b/TMessagesProj/src/emojis/twitter/emoji/0_442.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_443.png b/TMessagesProj/src/emojis/twitter/emoji/0_443.png index eacd4d40fa..1fcdd63ed4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_443.png and b/TMessagesProj/src/emojis/twitter/emoji/0_443.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_444.png b/TMessagesProj/src/emojis/twitter/emoji/0_444.png index 7b6984dae5..6c6cef3aad 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_444.png and b/TMessagesProj/src/emojis/twitter/emoji/0_444.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_445.png b/TMessagesProj/src/emojis/twitter/emoji/0_445.png index afa1e17a8e..45d688adcb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_445.png and b/TMessagesProj/src/emojis/twitter/emoji/0_445.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_446.png b/TMessagesProj/src/emojis/twitter/emoji/0_446.png index b68d54bc11..d91b87e282 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_446.png and b/TMessagesProj/src/emojis/twitter/emoji/0_446.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_447.png b/TMessagesProj/src/emojis/twitter/emoji/0_447.png index 59c04d1ace..09ffdf8deb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_447.png and b/TMessagesProj/src/emojis/twitter/emoji/0_447.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_448.png b/TMessagesProj/src/emojis/twitter/emoji/0_448.png index 5012098d46..e992ac8b4d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_448.png and b/TMessagesProj/src/emojis/twitter/emoji/0_448.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_449.png b/TMessagesProj/src/emojis/twitter/emoji/0_449.png index 2c5dac38f0..dafe4deed6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_449.png and b/TMessagesProj/src/emojis/twitter/emoji/0_449.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_45.png b/TMessagesProj/src/emojis/twitter/emoji/0_45.png index dae9e4e1af..29639b8073 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_45.png and b/TMessagesProj/src/emojis/twitter/emoji/0_45.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_450.png b/TMessagesProj/src/emojis/twitter/emoji/0_450.png index 2204436e8d..40ae50ad0a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_450.png and b/TMessagesProj/src/emojis/twitter/emoji/0_450.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_451.png b/TMessagesProj/src/emojis/twitter/emoji/0_451.png index 52f9f3b7e8..e94c3c743c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_451.png and b/TMessagesProj/src/emojis/twitter/emoji/0_451.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_452.png b/TMessagesProj/src/emojis/twitter/emoji/0_452.png index e7f4c78996..243e1147ed 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_452.png and b/TMessagesProj/src/emojis/twitter/emoji/0_452.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_453.png b/TMessagesProj/src/emojis/twitter/emoji/0_453.png index 98bbc3609d..0e563c2805 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_453.png and b/TMessagesProj/src/emojis/twitter/emoji/0_453.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_454.png b/TMessagesProj/src/emojis/twitter/emoji/0_454.png index 7cb45bdff8..48b4b3a705 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_454.png and b/TMessagesProj/src/emojis/twitter/emoji/0_454.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_455.png b/TMessagesProj/src/emojis/twitter/emoji/0_455.png index e665fc3126..5da39fceae 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_455.png and b/TMessagesProj/src/emojis/twitter/emoji/0_455.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_456.png b/TMessagesProj/src/emojis/twitter/emoji/0_456.png index 2c4eeb2f96..9b52818b19 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_456.png and b/TMessagesProj/src/emojis/twitter/emoji/0_456.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_457.png b/TMessagesProj/src/emojis/twitter/emoji/0_457.png index 4627e9dd12..8bc30d972c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_457.png and b/TMessagesProj/src/emojis/twitter/emoji/0_457.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_458.png b/TMessagesProj/src/emojis/twitter/emoji/0_458.png index fc99147242..b35ee8028f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_458.png and b/TMessagesProj/src/emojis/twitter/emoji/0_458.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_459.png b/TMessagesProj/src/emojis/twitter/emoji/0_459.png index e9d810ce6e..9b18700ad3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_459.png and b/TMessagesProj/src/emojis/twitter/emoji/0_459.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_46.png b/TMessagesProj/src/emojis/twitter/emoji/0_46.png index 5600a6a329..8a8e9c250a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_46.png and b/TMessagesProj/src/emojis/twitter/emoji/0_46.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_460.png b/TMessagesProj/src/emojis/twitter/emoji/0_460.png index e8da3d95c5..107fa4d465 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_460.png and b/TMessagesProj/src/emojis/twitter/emoji/0_460.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_461.png b/TMessagesProj/src/emojis/twitter/emoji/0_461.png index 0d1df885f6..8f88e3301c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_461.png and b/TMessagesProj/src/emojis/twitter/emoji/0_461.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_462.png b/TMessagesProj/src/emojis/twitter/emoji/0_462.png index ad9f37ba2c..3afae58c62 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_462.png and b/TMessagesProj/src/emojis/twitter/emoji/0_462.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_463.png b/TMessagesProj/src/emojis/twitter/emoji/0_463.png index a1cff70b94..f8ec267e0d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_463.png and b/TMessagesProj/src/emojis/twitter/emoji/0_463.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_464.png b/TMessagesProj/src/emojis/twitter/emoji/0_464.png index f486b6f2af..fc6ed16341 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_464.png and b/TMessagesProj/src/emojis/twitter/emoji/0_464.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_465.png b/TMessagesProj/src/emojis/twitter/emoji/0_465.png index ab4fcd72ad..f158d44bc6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_465.png and b/TMessagesProj/src/emojis/twitter/emoji/0_465.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_466.png b/TMessagesProj/src/emojis/twitter/emoji/0_466.png index c4448d4f17..8a08202aea 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_466.png and b/TMessagesProj/src/emojis/twitter/emoji/0_466.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_467.png b/TMessagesProj/src/emojis/twitter/emoji/0_467.png index 59ec091b9f..daa6e0f078 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_467.png and b/TMessagesProj/src/emojis/twitter/emoji/0_467.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_468.png b/TMessagesProj/src/emojis/twitter/emoji/0_468.png index 3da60ca2ca..9415e1c6b2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_468.png and b/TMessagesProj/src/emojis/twitter/emoji/0_468.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_469.png b/TMessagesProj/src/emojis/twitter/emoji/0_469.png index addeb86a66..87cd1f0ebc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_469.png and b/TMessagesProj/src/emojis/twitter/emoji/0_469.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_47.png b/TMessagesProj/src/emojis/twitter/emoji/0_47.png index fd5c291e9a..c6639c95a1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_47.png and b/TMessagesProj/src/emojis/twitter/emoji/0_47.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_470.png b/TMessagesProj/src/emojis/twitter/emoji/0_470.png index 507016e9a0..76459f688a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_470.png and b/TMessagesProj/src/emojis/twitter/emoji/0_470.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_471.png b/TMessagesProj/src/emojis/twitter/emoji/0_471.png index 793cc830a4..27d6ce4cb3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_471.png and b/TMessagesProj/src/emojis/twitter/emoji/0_471.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_472.png b/TMessagesProj/src/emojis/twitter/emoji/0_472.png index 60aa1ed7db..eb21ee2775 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_472.png and b/TMessagesProj/src/emojis/twitter/emoji/0_472.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_473.png b/TMessagesProj/src/emojis/twitter/emoji/0_473.png index 0a532f4fca..4e86b05688 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_473.png and b/TMessagesProj/src/emojis/twitter/emoji/0_473.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_474.png b/TMessagesProj/src/emojis/twitter/emoji/0_474.png index ff8e6690c7..843d714a5b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_474.png and b/TMessagesProj/src/emojis/twitter/emoji/0_474.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_475.png b/TMessagesProj/src/emojis/twitter/emoji/0_475.png index 1e299eeb6a..e61df1398b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_475.png and b/TMessagesProj/src/emojis/twitter/emoji/0_475.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_476.png b/TMessagesProj/src/emojis/twitter/emoji/0_476.png index 6b3de94f1c..b55b4c292e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_476.png and b/TMessagesProj/src/emojis/twitter/emoji/0_476.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_477.png b/TMessagesProj/src/emojis/twitter/emoji/0_477.png index 8182a26fd0..279fbf3f91 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_477.png and b/TMessagesProj/src/emojis/twitter/emoji/0_477.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_478.png b/TMessagesProj/src/emojis/twitter/emoji/0_478.png index 142575da1f..6dc1a55d64 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_478.png and b/TMessagesProj/src/emojis/twitter/emoji/0_478.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_479.png b/TMessagesProj/src/emojis/twitter/emoji/0_479.png index 114b2bcdf8..154f2d02a3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_479.png and b/TMessagesProj/src/emojis/twitter/emoji/0_479.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_48.png b/TMessagesProj/src/emojis/twitter/emoji/0_48.png index cb69767a98..d7774462a9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_48.png and b/TMessagesProj/src/emojis/twitter/emoji/0_48.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_480.png b/TMessagesProj/src/emojis/twitter/emoji/0_480.png index 22bd017e22..75e34ddd22 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_480.png and b/TMessagesProj/src/emojis/twitter/emoji/0_480.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_481.png b/TMessagesProj/src/emojis/twitter/emoji/0_481.png index 6dc1225c30..1aeee6a745 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_481.png and b/TMessagesProj/src/emojis/twitter/emoji/0_481.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_482.png b/TMessagesProj/src/emojis/twitter/emoji/0_482.png index 69a1eb57be..f5b2620d06 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_482.png and b/TMessagesProj/src/emojis/twitter/emoji/0_482.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_483.png b/TMessagesProj/src/emojis/twitter/emoji/0_483.png index 20514c7411..8e87b6f27d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_483.png and b/TMessagesProj/src/emojis/twitter/emoji/0_483.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_484.png b/TMessagesProj/src/emojis/twitter/emoji/0_484.png index c6d52eed37..b97acac4ae 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_484.png and b/TMessagesProj/src/emojis/twitter/emoji/0_484.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_485.png b/TMessagesProj/src/emojis/twitter/emoji/0_485.png index f865c3ca07..3e81eb735f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_485.png and b/TMessagesProj/src/emojis/twitter/emoji/0_485.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_486.png b/TMessagesProj/src/emojis/twitter/emoji/0_486.png index eaa4bebad2..09888205f1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_486.png and b/TMessagesProj/src/emojis/twitter/emoji/0_486.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_487.png b/TMessagesProj/src/emojis/twitter/emoji/0_487.png index b4d676d5dc..ce62e71083 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_487.png and b/TMessagesProj/src/emojis/twitter/emoji/0_487.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_488.png b/TMessagesProj/src/emojis/twitter/emoji/0_488.png index 927215518b..1efa48ea00 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_488.png and b/TMessagesProj/src/emojis/twitter/emoji/0_488.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_489.png b/TMessagesProj/src/emojis/twitter/emoji/0_489.png index 07bff6b19c..b7584280e6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_489.png and b/TMessagesProj/src/emojis/twitter/emoji/0_489.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_49.png b/TMessagesProj/src/emojis/twitter/emoji/0_49.png index fe392bdd10..68524fc545 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_49.png and b/TMessagesProj/src/emojis/twitter/emoji/0_49.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_490.png b/TMessagesProj/src/emojis/twitter/emoji/0_490.png index 054e0b4875..67c5095cc2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_490.png and b/TMessagesProj/src/emojis/twitter/emoji/0_490.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_491.png b/TMessagesProj/src/emojis/twitter/emoji/0_491.png index c8222a8843..0fa8bad18e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_491.png and b/TMessagesProj/src/emojis/twitter/emoji/0_491.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_492.png b/TMessagesProj/src/emojis/twitter/emoji/0_492.png index 6fec4fd260..ab62714bcd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_492.png and b/TMessagesProj/src/emojis/twitter/emoji/0_492.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_493.png b/TMessagesProj/src/emojis/twitter/emoji/0_493.png index c70b6919a4..4101ba2cf5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_493.png and b/TMessagesProj/src/emojis/twitter/emoji/0_493.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_494.png b/TMessagesProj/src/emojis/twitter/emoji/0_494.png index ac7bb4fde8..9754424dcb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_494.png and b/TMessagesProj/src/emojis/twitter/emoji/0_494.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_495.png b/TMessagesProj/src/emojis/twitter/emoji/0_495.png index 5906c65331..939c1da88c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_495.png and b/TMessagesProj/src/emojis/twitter/emoji/0_495.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_496.png b/TMessagesProj/src/emojis/twitter/emoji/0_496.png index a5586ddbd6..86b9f67749 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_496.png and b/TMessagesProj/src/emojis/twitter/emoji/0_496.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_497.png b/TMessagesProj/src/emojis/twitter/emoji/0_497.png index 0774fff452..e6c685c336 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_497.png and b/TMessagesProj/src/emojis/twitter/emoji/0_497.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_498.png b/TMessagesProj/src/emojis/twitter/emoji/0_498.png index 90ca332a19..42a632a2dc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_498.png and b/TMessagesProj/src/emojis/twitter/emoji/0_498.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_499.png b/TMessagesProj/src/emojis/twitter/emoji/0_499.png index 7b79ca2dab..bb5f14f280 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_499.png and b/TMessagesProj/src/emojis/twitter/emoji/0_499.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_5.png b/TMessagesProj/src/emojis/twitter/emoji/0_5.png index e5fc0ba0e9..a5830e90b8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_5.png and b/TMessagesProj/src/emojis/twitter/emoji/0_5.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_50.png b/TMessagesProj/src/emojis/twitter/emoji/0_50.png index 90d296e7f1..51ae40109e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_50.png and b/TMessagesProj/src/emojis/twitter/emoji/0_50.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_500.png b/TMessagesProj/src/emojis/twitter/emoji/0_500.png index da33db53c8..923740e6ea 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_500.png and b/TMessagesProj/src/emojis/twitter/emoji/0_500.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_501.png b/TMessagesProj/src/emojis/twitter/emoji/0_501.png index 8aa2efc3e8..cbd3b98a03 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_501.png and b/TMessagesProj/src/emojis/twitter/emoji/0_501.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_502.png b/TMessagesProj/src/emojis/twitter/emoji/0_502.png index a05df3c149..69d83dabf9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_502.png and b/TMessagesProj/src/emojis/twitter/emoji/0_502.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_503.png b/TMessagesProj/src/emojis/twitter/emoji/0_503.png index f862bae034..efeba3ecaf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_503.png and b/TMessagesProj/src/emojis/twitter/emoji/0_503.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_504.png b/TMessagesProj/src/emojis/twitter/emoji/0_504.png index 1d3d6f2562..d6734aa531 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_504.png and b/TMessagesProj/src/emojis/twitter/emoji/0_504.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_505.png b/TMessagesProj/src/emojis/twitter/emoji/0_505.png index 6bd11883f3..2aa8348eb0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_505.png and b/TMessagesProj/src/emojis/twitter/emoji/0_505.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_506.png b/TMessagesProj/src/emojis/twitter/emoji/0_506.png index f413654ed1..360fb8e759 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_506.png and b/TMessagesProj/src/emojis/twitter/emoji/0_506.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_507.png b/TMessagesProj/src/emojis/twitter/emoji/0_507.png index 32959f3ca1..9f528aa5e9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_507.png and b/TMessagesProj/src/emojis/twitter/emoji/0_507.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_508.png b/TMessagesProj/src/emojis/twitter/emoji/0_508.png index 9cacdff317..66eb985620 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_508.png and b/TMessagesProj/src/emojis/twitter/emoji/0_508.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_509.png b/TMessagesProj/src/emojis/twitter/emoji/0_509.png index 46de10913b..398b0ccacd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_509.png and b/TMessagesProj/src/emojis/twitter/emoji/0_509.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_51.png b/TMessagesProj/src/emojis/twitter/emoji/0_51.png index a168f02c7b..03a831e3fe 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_51.png and b/TMessagesProj/src/emojis/twitter/emoji/0_51.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_510.png b/TMessagesProj/src/emojis/twitter/emoji/0_510.png index 7a638db964..df5cfdaeaa 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_510.png and b/TMessagesProj/src/emojis/twitter/emoji/0_510.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_511.png b/TMessagesProj/src/emojis/twitter/emoji/0_511.png index fcc8f264c7..7906f16980 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_511.png and b/TMessagesProj/src/emojis/twitter/emoji/0_511.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_512.png b/TMessagesProj/src/emojis/twitter/emoji/0_512.png index b94a8c40ca..cfe82910bf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_512.png and b/TMessagesProj/src/emojis/twitter/emoji/0_512.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_513.png b/TMessagesProj/src/emojis/twitter/emoji/0_513.png index 7d183bad1c..cffcda6b36 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_513.png and b/TMessagesProj/src/emojis/twitter/emoji/0_513.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_514.png b/TMessagesProj/src/emojis/twitter/emoji/0_514.png index 277ed38914..c0610c595f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_514.png and b/TMessagesProj/src/emojis/twitter/emoji/0_514.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_515.png b/TMessagesProj/src/emojis/twitter/emoji/0_515.png index 950a94527d..b25145eb07 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_515.png and b/TMessagesProj/src/emojis/twitter/emoji/0_515.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_516.png b/TMessagesProj/src/emojis/twitter/emoji/0_516.png index b39e33614c..92daf40a20 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_516.png and b/TMessagesProj/src/emojis/twitter/emoji/0_516.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_517.png b/TMessagesProj/src/emojis/twitter/emoji/0_517.png index 644071e905..1da7108d98 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_517.png and b/TMessagesProj/src/emojis/twitter/emoji/0_517.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_518.png b/TMessagesProj/src/emojis/twitter/emoji/0_518.png index e706fd1d6a..973216ab75 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_518.png and b/TMessagesProj/src/emojis/twitter/emoji/0_518.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_519.png b/TMessagesProj/src/emojis/twitter/emoji/0_519.png index 46cb5d2a21..2db5ea9474 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_519.png and b/TMessagesProj/src/emojis/twitter/emoji/0_519.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_52.png b/TMessagesProj/src/emojis/twitter/emoji/0_52.png index b2c455bddb..4499a36dff 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_52.png and b/TMessagesProj/src/emojis/twitter/emoji/0_52.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_520.png b/TMessagesProj/src/emojis/twitter/emoji/0_520.png index 64072b418f..68c8dc3e7d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_520.png and b/TMessagesProj/src/emojis/twitter/emoji/0_520.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_521.png b/TMessagesProj/src/emojis/twitter/emoji/0_521.png index 6e7158cba3..e2ecac74ef 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_521.png and b/TMessagesProj/src/emojis/twitter/emoji/0_521.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_522.png b/TMessagesProj/src/emojis/twitter/emoji/0_522.png index a66a89b9ed..9ad7458ea5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_522.png and b/TMessagesProj/src/emojis/twitter/emoji/0_522.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_523.png b/TMessagesProj/src/emojis/twitter/emoji/0_523.png index 670f8cf477..9b82ac1d77 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_523.png and b/TMessagesProj/src/emojis/twitter/emoji/0_523.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_524.png b/TMessagesProj/src/emojis/twitter/emoji/0_524.png index 964673825c..f90aa4b0ad 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_524.png and b/TMessagesProj/src/emojis/twitter/emoji/0_524.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_525.png b/TMessagesProj/src/emojis/twitter/emoji/0_525.png index 880ef11cf0..61734637b8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_525.png and b/TMessagesProj/src/emojis/twitter/emoji/0_525.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_526.png b/TMessagesProj/src/emojis/twitter/emoji/0_526.png index 3192629366..8f9a40dbe3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_526.png and b/TMessagesProj/src/emojis/twitter/emoji/0_526.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_527.png b/TMessagesProj/src/emojis/twitter/emoji/0_527.png index 3808daff11..dab06a0507 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_527.png and b/TMessagesProj/src/emojis/twitter/emoji/0_527.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_528.png b/TMessagesProj/src/emojis/twitter/emoji/0_528.png index 3c75298483..c893621565 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_528.png and b/TMessagesProj/src/emojis/twitter/emoji/0_528.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_529.png b/TMessagesProj/src/emojis/twitter/emoji/0_529.png index fa1ac65504..29104128f8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_529.png and b/TMessagesProj/src/emojis/twitter/emoji/0_529.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_53.png b/TMessagesProj/src/emojis/twitter/emoji/0_53.png index eb1b52c0db..088d84828e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_53.png and b/TMessagesProj/src/emojis/twitter/emoji/0_53.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_530.png b/TMessagesProj/src/emojis/twitter/emoji/0_530.png index a69a0607fb..6211db9c7c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_530.png and b/TMessagesProj/src/emojis/twitter/emoji/0_530.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_531.png b/TMessagesProj/src/emojis/twitter/emoji/0_531.png index 71623ebda0..795dbbc86d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_531.png and b/TMessagesProj/src/emojis/twitter/emoji/0_531.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_532.png b/TMessagesProj/src/emojis/twitter/emoji/0_532.png index 361bba11b8..1e534c9667 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_532.png and b/TMessagesProj/src/emojis/twitter/emoji/0_532.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_533.png b/TMessagesProj/src/emojis/twitter/emoji/0_533.png index fb57badc43..4770d9d165 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_533.png and b/TMessagesProj/src/emojis/twitter/emoji/0_533.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_534.png b/TMessagesProj/src/emojis/twitter/emoji/0_534.png index 32e25ecb67..d7a69c67ff 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_534.png and b/TMessagesProj/src/emojis/twitter/emoji/0_534.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_535.png b/TMessagesProj/src/emojis/twitter/emoji/0_535.png index e6899adcfb..d85063124d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_535.png and b/TMessagesProj/src/emojis/twitter/emoji/0_535.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_536.png b/TMessagesProj/src/emojis/twitter/emoji/0_536.png index c1b7692f46..cc8c084ba7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_536.png and b/TMessagesProj/src/emojis/twitter/emoji/0_536.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_537.png b/TMessagesProj/src/emojis/twitter/emoji/0_537.png index 4e347ca78e..1f5c73fb2e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_537.png and b/TMessagesProj/src/emojis/twitter/emoji/0_537.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_538.png b/TMessagesProj/src/emojis/twitter/emoji/0_538.png index fbc364b1fc..df38ed0d0a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_538.png and b/TMessagesProj/src/emojis/twitter/emoji/0_538.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_539.png b/TMessagesProj/src/emojis/twitter/emoji/0_539.png index aa1e870589..f1e3b04204 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_539.png and b/TMessagesProj/src/emojis/twitter/emoji/0_539.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_54.png b/TMessagesProj/src/emojis/twitter/emoji/0_54.png index b8395f0e43..15ebc2b80a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_54.png and b/TMessagesProj/src/emojis/twitter/emoji/0_54.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_540.png b/TMessagesProj/src/emojis/twitter/emoji/0_540.png index e52f2e2740..3e09147dd5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_540.png and b/TMessagesProj/src/emojis/twitter/emoji/0_540.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_541.png b/TMessagesProj/src/emojis/twitter/emoji/0_541.png index 02bd64f26b..d10071a130 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_541.png and b/TMessagesProj/src/emojis/twitter/emoji/0_541.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_542.png b/TMessagesProj/src/emojis/twitter/emoji/0_542.png index 61268c7f2a..a7174aa361 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_542.png and b/TMessagesProj/src/emojis/twitter/emoji/0_542.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_543.png b/TMessagesProj/src/emojis/twitter/emoji/0_543.png index 4773c7eb14..8bf8b30972 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_543.png and b/TMessagesProj/src/emojis/twitter/emoji/0_543.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_544.png b/TMessagesProj/src/emojis/twitter/emoji/0_544.png index c1f262aa3d..1f9c8bf676 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_544.png and b/TMessagesProj/src/emojis/twitter/emoji/0_544.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_545.png b/TMessagesProj/src/emojis/twitter/emoji/0_545.png index 17e8dabced..459ca920a6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_545.png and b/TMessagesProj/src/emojis/twitter/emoji/0_545.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_546.png b/TMessagesProj/src/emojis/twitter/emoji/0_546.png index b58e85725b..fe95398a73 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_546.png and b/TMessagesProj/src/emojis/twitter/emoji/0_546.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_547.png b/TMessagesProj/src/emojis/twitter/emoji/0_547.png index ee76e44922..936ee33567 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_547.png and b/TMessagesProj/src/emojis/twitter/emoji/0_547.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_548.png b/TMessagesProj/src/emojis/twitter/emoji/0_548.png index 4c99a75687..cede00b8d4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_548.png and b/TMessagesProj/src/emojis/twitter/emoji/0_548.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_549.png b/TMessagesProj/src/emojis/twitter/emoji/0_549.png index c529a0f9d4..b57cc050b4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_549.png and b/TMessagesProj/src/emojis/twitter/emoji/0_549.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_55.png b/TMessagesProj/src/emojis/twitter/emoji/0_55.png index e87bd69f09..7ee3e3dcbf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_55.png and b/TMessagesProj/src/emojis/twitter/emoji/0_55.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_550.png b/TMessagesProj/src/emojis/twitter/emoji/0_550.png index f95fdded38..05ba313beb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_550.png and b/TMessagesProj/src/emojis/twitter/emoji/0_550.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_551.png b/TMessagesProj/src/emojis/twitter/emoji/0_551.png index 313648f18c..fb392f0b23 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_551.png and b/TMessagesProj/src/emojis/twitter/emoji/0_551.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_552.png b/TMessagesProj/src/emojis/twitter/emoji/0_552.png index 571ba964b7..ce1565fd6e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_552.png and b/TMessagesProj/src/emojis/twitter/emoji/0_552.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_553.png b/TMessagesProj/src/emojis/twitter/emoji/0_553.png index 328da6b4f3..615d65ac3d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_553.png and b/TMessagesProj/src/emojis/twitter/emoji/0_553.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_554.png b/TMessagesProj/src/emojis/twitter/emoji/0_554.png index 5278d2426f..96beceb6fb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_554.png and b/TMessagesProj/src/emojis/twitter/emoji/0_554.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_555.png b/TMessagesProj/src/emojis/twitter/emoji/0_555.png index 088f7d8abe..ba8e96e38b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_555.png and b/TMessagesProj/src/emojis/twitter/emoji/0_555.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_556.png b/TMessagesProj/src/emojis/twitter/emoji/0_556.png index 7aa197f253..503633853e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_556.png and b/TMessagesProj/src/emojis/twitter/emoji/0_556.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_557.png b/TMessagesProj/src/emojis/twitter/emoji/0_557.png index 4d16bdfb4c..dfefb57964 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_557.png and b/TMessagesProj/src/emojis/twitter/emoji/0_557.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_558.png b/TMessagesProj/src/emojis/twitter/emoji/0_558.png index 65b4497aea..1ee3174ae4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_558.png and b/TMessagesProj/src/emojis/twitter/emoji/0_558.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_559.png b/TMessagesProj/src/emojis/twitter/emoji/0_559.png index 05c69b10a4..8644ab8784 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_559.png and b/TMessagesProj/src/emojis/twitter/emoji/0_559.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_56.png b/TMessagesProj/src/emojis/twitter/emoji/0_56.png index 2b7552f3a5..4c236020cb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_56.png and b/TMessagesProj/src/emojis/twitter/emoji/0_56.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_560.png b/TMessagesProj/src/emojis/twitter/emoji/0_560.png index bac8690cbb..ebb28e9290 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_560.png and b/TMessagesProj/src/emojis/twitter/emoji/0_560.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_561.png b/TMessagesProj/src/emojis/twitter/emoji/0_561.png index 7350ced552..225d8cfc83 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_561.png and b/TMessagesProj/src/emojis/twitter/emoji/0_561.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_562.png b/TMessagesProj/src/emojis/twitter/emoji/0_562.png index 4b16d9e902..ff91215a2f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_562.png and b/TMessagesProj/src/emojis/twitter/emoji/0_562.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_563.png b/TMessagesProj/src/emojis/twitter/emoji/0_563.png index c9a1568c57..ca3ded16ce 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_563.png and b/TMessagesProj/src/emojis/twitter/emoji/0_563.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_564.png b/TMessagesProj/src/emojis/twitter/emoji/0_564.png index 12ef098b55..6bf6cf9278 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_564.png and b/TMessagesProj/src/emojis/twitter/emoji/0_564.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_565.png b/TMessagesProj/src/emojis/twitter/emoji/0_565.png index 0f12b5abdf..b8719a83e7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_565.png and b/TMessagesProj/src/emojis/twitter/emoji/0_565.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_566.png b/TMessagesProj/src/emojis/twitter/emoji/0_566.png index 2693244b04..4a599649ed 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_566.png and b/TMessagesProj/src/emojis/twitter/emoji/0_566.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_567.png b/TMessagesProj/src/emojis/twitter/emoji/0_567.png index 603a1505f5..0508a0ec1d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_567.png and b/TMessagesProj/src/emojis/twitter/emoji/0_567.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_568.png b/TMessagesProj/src/emojis/twitter/emoji/0_568.png index 03ba05d152..76d3131e49 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_568.png and b/TMessagesProj/src/emojis/twitter/emoji/0_568.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_569.png b/TMessagesProj/src/emojis/twitter/emoji/0_569.png index c1c47b9720..c34856d361 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_569.png and b/TMessagesProj/src/emojis/twitter/emoji/0_569.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_57.png b/TMessagesProj/src/emojis/twitter/emoji/0_57.png index dd1932a70e..500cbb5279 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_57.png and b/TMessagesProj/src/emojis/twitter/emoji/0_57.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_570.png b/TMessagesProj/src/emojis/twitter/emoji/0_570.png index 7ab7a51450..6b0cffb269 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_570.png and b/TMessagesProj/src/emojis/twitter/emoji/0_570.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_571.png b/TMessagesProj/src/emojis/twitter/emoji/0_571.png index 4e8495c13e..6b2e4670eb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_571.png and b/TMessagesProj/src/emojis/twitter/emoji/0_571.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_572.png b/TMessagesProj/src/emojis/twitter/emoji/0_572.png index 3f57b8f487..d3c36c33ac 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_572.png and b/TMessagesProj/src/emojis/twitter/emoji/0_572.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_573.png b/TMessagesProj/src/emojis/twitter/emoji/0_573.png index bb32cad601..0cf14f49c3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_573.png and b/TMessagesProj/src/emojis/twitter/emoji/0_573.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_574.png b/TMessagesProj/src/emojis/twitter/emoji/0_574.png index 8011f6a763..97230cb4d4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_574.png and b/TMessagesProj/src/emojis/twitter/emoji/0_574.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_575.png b/TMessagesProj/src/emojis/twitter/emoji/0_575.png index 68c2ba0503..8aff89efbc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_575.png and b/TMessagesProj/src/emojis/twitter/emoji/0_575.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_576.png b/TMessagesProj/src/emojis/twitter/emoji/0_576.png index dae0407e76..fc71793fb5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_576.png and b/TMessagesProj/src/emojis/twitter/emoji/0_576.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_577.png b/TMessagesProj/src/emojis/twitter/emoji/0_577.png index 3759de4408..e4ba9bd129 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_577.png and b/TMessagesProj/src/emojis/twitter/emoji/0_577.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_578.png b/TMessagesProj/src/emojis/twitter/emoji/0_578.png index 123b98f820..1f14f7af7b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_578.png and b/TMessagesProj/src/emojis/twitter/emoji/0_578.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_579.png b/TMessagesProj/src/emojis/twitter/emoji/0_579.png index 34dc7b4383..23b79b80b3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_579.png and b/TMessagesProj/src/emojis/twitter/emoji/0_579.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_58.png b/TMessagesProj/src/emojis/twitter/emoji/0_58.png index cb3b3f9a72..4753b0b2cd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_58.png and b/TMessagesProj/src/emojis/twitter/emoji/0_58.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_580.png b/TMessagesProj/src/emojis/twitter/emoji/0_580.png index d273bf8504..4b04385593 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_580.png and b/TMessagesProj/src/emojis/twitter/emoji/0_580.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_581.png b/TMessagesProj/src/emojis/twitter/emoji/0_581.png index e9b96f3e85..c568aee405 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_581.png and b/TMessagesProj/src/emojis/twitter/emoji/0_581.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_582.png b/TMessagesProj/src/emojis/twitter/emoji/0_582.png index a3ecf26e61..7aaa0ab547 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_582.png and b/TMessagesProj/src/emojis/twitter/emoji/0_582.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_583.png b/TMessagesProj/src/emojis/twitter/emoji/0_583.png index 2fbc0249ef..69ab857b2f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_583.png and b/TMessagesProj/src/emojis/twitter/emoji/0_583.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_584.png b/TMessagesProj/src/emojis/twitter/emoji/0_584.png index baf06c864e..0843714e16 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_584.png and b/TMessagesProj/src/emojis/twitter/emoji/0_584.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_585.png b/TMessagesProj/src/emojis/twitter/emoji/0_585.png index 0cd74626ec..73863d3118 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_585.png and b/TMessagesProj/src/emojis/twitter/emoji/0_585.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_586.png b/TMessagesProj/src/emojis/twitter/emoji/0_586.png index bac927dc88..a12bd57a93 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_586.png and b/TMessagesProj/src/emojis/twitter/emoji/0_586.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_587.png b/TMessagesProj/src/emojis/twitter/emoji/0_587.png index 3b4c67f14d..7db5ac0e7d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_587.png and b/TMessagesProj/src/emojis/twitter/emoji/0_587.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_588.png b/TMessagesProj/src/emojis/twitter/emoji/0_588.png index 87d7d19d7a..9fe3f48492 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_588.png and b/TMessagesProj/src/emojis/twitter/emoji/0_588.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_589.png b/TMessagesProj/src/emojis/twitter/emoji/0_589.png index 25b1b62cda..950537ad0c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_589.png and b/TMessagesProj/src/emojis/twitter/emoji/0_589.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_59.png b/TMessagesProj/src/emojis/twitter/emoji/0_59.png index 0e5eace7e9..44604d0f9d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_59.png and b/TMessagesProj/src/emojis/twitter/emoji/0_59.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_590.png b/TMessagesProj/src/emojis/twitter/emoji/0_590.png index 2f74354e05..265ed5bf08 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_590.png and b/TMessagesProj/src/emojis/twitter/emoji/0_590.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_591.png b/TMessagesProj/src/emojis/twitter/emoji/0_591.png index 5a97660c50..81667dee55 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_591.png and b/TMessagesProj/src/emojis/twitter/emoji/0_591.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_592.png b/TMessagesProj/src/emojis/twitter/emoji/0_592.png index badbc99731..96dfc9a78e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_592.png and b/TMessagesProj/src/emojis/twitter/emoji/0_592.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_593.png b/TMessagesProj/src/emojis/twitter/emoji/0_593.png index 7dd07ebd58..986bbacc83 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_593.png and b/TMessagesProj/src/emojis/twitter/emoji/0_593.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_594.png b/TMessagesProj/src/emojis/twitter/emoji/0_594.png index 2f6d1bbc7c..cf132a4595 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_594.png and b/TMessagesProj/src/emojis/twitter/emoji/0_594.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_595.png b/TMessagesProj/src/emojis/twitter/emoji/0_595.png index 1f6d97fccf..df26da23b3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_595.png and b/TMessagesProj/src/emojis/twitter/emoji/0_595.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_596.png b/TMessagesProj/src/emojis/twitter/emoji/0_596.png index b1f9f232d2..b242465d43 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_596.png and b/TMessagesProj/src/emojis/twitter/emoji/0_596.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_597.png b/TMessagesProj/src/emojis/twitter/emoji/0_597.png index 77cebf9862..460a3f392c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_597.png and b/TMessagesProj/src/emojis/twitter/emoji/0_597.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_598.png b/TMessagesProj/src/emojis/twitter/emoji/0_598.png index b98810821f..449a2d0ae6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_598.png and b/TMessagesProj/src/emojis/twitter/emoji/0_598.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_599.png b/TMessagesProj/src/emojis/twitter/emoji/0_599.png index 5fc0e1356a..2fdf3852bc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_599.png and b/TMessagesProj/src/emojis/twitter/emoji/0_599.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_6.png b/TMessagesProj/src/emojis/twitter/emoji/0_6.png index c5b2db58be..fcf8ebcb5f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_6.png and b/TMessagesProj/src/emojis/twitter/emoji/0_6.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_60.png b/TMessagesProj/src/emojis/twitter/emoji/0_60.png index c767607933..c7bfa2bbd4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_60.png and b/TMessagesProj/src/emojis/twitter/emoji/0_60.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_600.png b/TMessagesProj/src/emojis/twitter/emoji/0_600.png index c8f099d212..36d0f23bda 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_600.png and b/TMessagesProj/src/emojis/twitter/emoji/0_600.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_601.png b/TMessagesProj/src/emojis/twitter/emoji/0_601.png index aaa7db207a..abbe7ac231 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_601.png and b/TMessagesProj/src/emojis/twitter/emoji/0_601.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_602.png b/TMessagesProj/src/emojis/twitter/emoji/0_602.png index ac32f91fa4..59e882fcbc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_602.png and b/TMessagesProj/src/emojis/twitter/emoji/0_602.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_603.png b/TMessagesProj/src/emojis/twitter/emoji/0_603.png index 7655d84a07..8b0acfe621 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_603.png and b/TMessagesProj/src/emojis/twitter/emoji/0_603.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_604.png b/TMessagesProj/src/emojis/twitter/emoji/0_604.png index 52b76c39cc..aaf04f6238 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_604.png and b/TMessagesProj/src/emojis/twitter/emoji/0_604.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_605.png b/TMessagesProj/src/emojis/twitter/emoji/0_605.png index ce07d2d2cb..8045ffbd6c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_605.png and b/TMessagesProj/src/emojis/twitter/emoji/0_605.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_606.png b/TMessagesProj/src/emojis/twitter/emoji/0_606.png index 102b883b74..797452ee41 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_606.png and b/TMessagesProj/src/emojis/twitter/emoji/0_606.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_607.png b/TMessagesProj/src/emojis/twitter/emoji/0_607.png index 46cfbb62fe..a3a911f099 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_607.png and b/TMessagesProj/src/emojis/twitter/emoji/0_607.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_608.png b/TMessagesProj/src/emojis/twitter/emoji/0_608.png index cea9dc77bf..2972bb6aad 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_608.png and b/TMessagesProj/src/emojis/twitter/emoji/0_608.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_609.png b/TMessagesProj/src/emojis/twitter/emoji/0_609.png index f7473a80c8..e9f5bf8fd9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_609.png and b/TMessagesProj/src/emojis/twitter/emoji/0_609.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_61.png b/TMessagesProj/src/emojis/twitter/emoji/0_61.png index decb344052..4ddc79fcd7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_61.png and b/TMessagesProj/src/emojis/twitter/emoji/0_61.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_610.png b/TMessagesProj/src/emojis/twitter/emoji/0_610.png index 999da63b5a..0ba583cf7d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_610.png and b/TMessagesProj/src/emojis/twitter/emoji/0_610.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_611.png b/TMessagesProj/src/emojis/twitter/emoji/0_611.png index 54e1b5527f..6403596aa9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_611.png and b/TMessagesProj/src/emojis/twitter/emoji/0_611.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_612.png b/TMessagesProj/src/emojis/twitter/emoji/0_612.png index 81ae572cb3..4ba6153582 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_612.png and b/TMessagesProj/src/emojis/twitter/emoji/0_612.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_613.png b/TMessagesProj/src/emojis/twitter/emoji/0_613.png index f9747aa97b..cf156e7764 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_613.png and b/TMessagesProj/src/emojis/twitter/emoji/0_613.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_614.png b/TMessagesProj/src/emojis/twitter/emoji/0_614.png index ed5cc21bb1..2efe05ea57 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_614.png and b/TMessagesProj/src/emojis/twitter/emoji/0_614.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_615.png b/TMessagesProj/src/emojis/twitter/emoji/0_615.png index c9a2ec8a19..f5f5ca0043 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_615.png and b/TMessagesProj/src/emojis/twitter/emoji/0_615.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_616.png b/TMessagesProj/src/emojis/twitter/emoji/0_616.png index c822dbd368..fae8bab6b0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_616.png and b/TMessagesProj/src/emojis/twitter/emoji/0_616.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_617.png b/TMessagesProj/src/emojis/twitter/emoji/0_617.png index 42b05f246c..568b29dd3d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_617.png and b/TMessagesProj/src/emojis/twitter/emoji/0_617.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_618.png b/TMessagesProj/src/emojis/twitter/emoji/0_618.png index a11aa4c6e9..d9cd99e449 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_618.png and b/TMessagesProj/src/emojis/twitter/emoji/0_618.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_619.png b/TMessagesProj/src/emojis/twitter/emoji/0_619.png index 70e4efea0c..c0fc2b4952 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_619.png and b/TMessagesProj/src/emojis/twitter/emoji/0_619.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_62.png b/TMessagesProj/src/emojis/twitter/emoji/0_62.png index f34bec1fb7..8c15670331 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_62.png and b/TMessagesProj/src/emojis/twitter/emoji/0_62.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_620.png b/TMessagesProj/src/emojis/twitter/emoji/0_620.png index fd2ac10dcb..392ab48734 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_620.png and b/TMessagesProj/src/emojis/twitter/emoji/0_620.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_621.png b/TMessagesProj/src/emojis/twitter/emoji/0_621.png index a3beac5e03..8297dea8bd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_621.png and b/TMessagesProj/src/emojis/twitter/emoji/0_621.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_622.png b/TMessagesProj/src/emojis/twitter/emoji/0_622.png index a6825366ce..3337386359 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_622.png and b/TMessagesProj/src/emojis/twitter/emoji/0_622.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_623.png b/TMessagesProj/src/emojis/twitter/emoji/0_623.png index 3e98ea7fcb..7beee609ac 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_623.png and b/TMessagesProj/src/emojis/twitter/emoji/0_623.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_624.png b/TMessagesProj/src/emojis/twitter/emoji/0_624.png index 651e62e076..356d7d3d82 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_624.png and b/TMessagesProj/src/emojis/twitter/emoji/0_624.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_625.png b/TMessagesProj/src/emojis/twitter/emoji/0_625.png index 9aafc78489..030c206677 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_625.png and b/TMessagesProj/src/emojis/twitter/emoji/0_625.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_626.png b/TMessagesProj/src/emojis/twitter/emoji/0_626.png index 88240ae793..09c34ed704 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_626.png and b/TMessagesProj/src/emojis/twitter/emoji/0_626.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_627.png b/TMessagesProj/src/emojis/twitter/emoji/0_627.png index 8db373b39c..06a00711ad 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_627.png and b/TMessagesProj/src/emojis/twitter/emoji/0_627.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_628.png b/TMessagesProj/src/emojis/twitter/emoji/0_628.png index 9022380ebd..9062fb6c08 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_628.png and b/TMessagesProj/src/emojis/twitter/emoji/0_628.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_629.png b/TMessagesProj/src/emojis/twitter/emoji/0_629.png index 6cef64192d..0587103ef2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_629.png and b/TMessagesProj/src/emojis/twitter/emoji/0_629.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_63.png b/TMessagesProj/src/emojis/twitter/emoji/0_63.png index b41db895b1..2d75cf3f46 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_63.png and b/TMessagesProj/src/emojis/twitter/emoji/0_63.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_630.png b/TMessagesProj/src/emojis/twitter/emoji/0_630.png index 5146894342..3397fa5c7f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_630.png and b/TMessagesProj/src/emojis/twitter/emoji/0_630.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_631.png b/TMessagesProj/src/emojis/twitter/emoji/0_631.png index d4a663eea7..6cf85e1aa8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_631.png and b/TMessagesProj/src/emojis/twitter/emoji/0_631.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_632.png b/TMessagesProj/src/emojis/twitter/emoji/0_632.png index 66c2dd80d3..29a73f0cc2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_632.png and b/TMessagesProj/src/emojis/twitter/emoji/0_632.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_633.png b/TMessagesProj/src/emojis/twitter/emoji/0_633.png index 7c6b2d9da6..0e4e06857d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_633.png and b/TMessagesProj/src/emojis/twitter/emoji/0_633.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_634.png b/TMessagesProj/src/emojis/twitter/emoji/0_634.png index 9557418bd8..eb1d8bc8eb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_634.png and b/TMessagesProj/src/emojis/twitter/emoji/0_634.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_635.png b/TMessagesProj/src/emojis/twitter/emoji/0_635.png index cf1dcf3853..6c60666548 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_635.png and b/TMessagesProj/src/emojis/twitter/emoji/0_635.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_636.png b/TMessagesProj/src/emojis/twitter/emoji/0_636.png index 0a4e60251b..503cf22b08 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_636.png and b/TMessagesProj/src/emojis/twitter/emoji/0_636.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_637.png b/TMessagesProj/src/emojis/twitter/emoji/0_637.png index 5d73eb3604..bc8321b061 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_637.png and b/TMessagesProj/src/emojis/twitter/emoji/0_637.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_638.png b/TMessagesProj/src/emojis/twitter/emoji/0_638.png index a376e7c017..f1333c128e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_638.png and b/TMessagesProj/src/emojis/twitter/emoji/0_638.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_639.png b/TMessagesProj/src/emojis/twitter/emoji/0_639.png index 98341b50e8..6eaffa7b5a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_639.png and b/TMessagesProj/src/emojis/twitter/emoji/0_639.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_64.png b/TMessagesProj/src/emojis/twitter/emoji/0_64.png index 5f6595f2cd..53c54dd314 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_64.png and b/TMessagesProj/src/emojis/twitter/emoji/0_64.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_640.png b/TMessagesProj/src/emojis/twitter/emoji/0_640.png index 7939165eb7..a3e08c76e6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_640.png and b/TMessagesProj/src/emojis/twitter/emoji/0_640.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_641.png b/TMessagesProj/src/emojis/twitter/emoji/0_641.png index 8aa1d7b771..908528acf1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_641.png and b/TMessagesProj/src/emojis/twitter/emoji/0_641.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_642.png b/TMessagesProj/src/emojis/twitter/emoji/0_642.png index e534066280..09463b0f8e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_642.png and b/TMessagesProj/src/emojis/twitter/emoji/0_642.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_643.png b/TMessagesProj/src/emojis/twitter/emoji/0_643.png index 7d4516a245..05b765c9bf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_643.png and b/TMessagesProj/src/emojis/twitter/emoji/0_643.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_644.png b/TMessagesProj/src/emojis/twitter/emoji/0_644.png index a75d4337f8..0ac9f14379 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_644.png and b/TMessagesProj/src/emojis/twitter/emoji/0_644.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_645.png b/TMessagesProj/src/emojis/twitter/emoji/0_645.png index 3ce738cbe7..8ecef22fd7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_645.png and b/TMessagesProj/src/emojis/twitter/emoji/0_645.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_646.png b/TMessagesProj/src/emojis/twitter/emoji/0_646.png index 4f61584a49..709b34ec1c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_646.png and b/TMessagesProj/src/emojis/twitter/emoji/0_646.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_647.png b/TMessagesProj/src/emojis/twitter/emoji/0_647.png index dd6095d37b..5b3ed7c3dd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_647.png and b/TMessagesProj/src/emojis/twitter/emoji/0_647.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_648.png b/TMessagesProj/src/emojis/twitter/emoji/0_648.png index 2343ae66c9..2fa80e638d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_648.png and b/TMessagesProj/src/emojis/twitter/emoji/0_648.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_649.png b/TMessagesProj/src/emojis/twitter/emoji/0_649.png index 0be224b977..48c798bbdc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_649.png and b/TMessagesProj/src/emojis/twitter/emoji/0_649.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_65.png b/TMessagesProj/src/emojis/twitter/emoji/0_65.png index 54a1f6b718..259c3d0d3e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_65.png and b/TMessagesProj/src/emojis/twitter/emoji/0_65.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_650.png b/TMessagesProj/src/emojis/twitter/emoji/0_650.png index 4f00c9f827..0db2daef7f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_650.png and b/TMessagesProj/src/emojis/twitter/emoji/0_650.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_651.png b/TMessagesProj/src/emojis/twitter/emoji/0_651.png index ff6b99cf83..57824eddc4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_651.png and b/TMessagesProj/src/emojis/twitter/emoji/0_651.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_652.png b/TMessagesProj/src/emojis/twitter/emoji/0_652.png index 7f3350c506..da7a5a84d2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_652.png and b/TMessagesProj/src/emojis/twitter/emoji/0_652.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_653.png b/TMessagesProj/src/emojis/twitter/emoji/0_653.png index 883f627f42..6382ae48a8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_653.png and b/TMessagesProj/src/emojis/twitter/emoji/0_653.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_654.png b/TMessagesProj/src/emojis/twitter/emoji/0_654.png index 9c47d68f6d..5bf4606738 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_654.png and b/TMessagesProj/src/emojis/twitter/emoji/0_654.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_655.png b/TMessagesProj/src/emojis/twitter/emoji/0_655.png index b1c6ff256c..89295a8e53 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_655.png and b/TMessagesProj/src/emojis/twitter/emoji/0_655.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_656.png b/TMessagesProj/src/emojis/twitter/emoji/0_656.png index 12881929c5..24c8a8592b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_656.png and b/TMessagesProj/src/emojis/twitter/emoji/0_656.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_657.png b/TMessagesProj/src/emojis/twitter/emoji/0_657.png index a6fe3c4ad2..9977e7a804 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_657.png and b/TMessagesProj/src/emojis/twitter/emoji/0_657.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_658.png b/TMessagesProj/src/emojis/twitter/emoji/0_658.png index 4844ef41a5..53b41fe82f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_658.png and b/TMessagesProj/src/emojis/twitter/emoji/0_658.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_659.png b/TMessagesProj/src/emojis/twitter/emoji/0_659.png index ac4c32113c..712db77ce2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_659.png and b/TMessagesProj/src/emojis/twitter/emoji/0_659.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_66.png b/TMessagesProj/src/emojis/twitter/emoji/0_66.png index ed8d5eca1d..5f9949f8da 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_66.png and b/TMessagesProj/src/emojis/twitter/emoji/0_66.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_660.png b/TMessagesProj/src/emojis/twitter/emoji/0_660.png index c5d753b0aa..9365ce61bb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_660.png and b/TMessagesProj/src/emojis/twitter/emoji/0_660.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_661.png b/TMessagesProj/src/emojis/twitter/emoji/0_661.png index 322f66f4c6..c6787315d4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_661.png and b/TMessagesProj/src/emojis/twitter/emoji/0_661.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_662.png b/TMessagesProj/src/emojis/twitter/emoji/0_662.png index 65ff72428d..aa024bd46c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_662.png and b/TMessagesProj/src/emojis/twitter/emoji/0_662.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_663.png b/TMessagesProj/src/emojis/twitter/emoji/0_663.png index 9e61766910..8418b6f072 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_663.png and b/TMessagesProj/src/emojis/twitter/emoji/0_663.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_664.png b/TMessagesProj/src/emojis/twitter/emoji/0_664.png index b0620e7a5d..c8723dac8d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_664.png and b/TMessagesProj/src/emojis/twitter/emoji/0_664.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_665.png b/TMessagesProj/src/emojis/twitter/emoji/0_665.png index 900caa1e32..0a75b9d8d0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_665.png and b/TMessagesProj/src/emojis/twitter/emoji/0_665.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_666.png b/TMessagesProj/src/emojis/twitter/emoji/0_666.png index 44a6dafe3f..e54a8d4dbd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_666.png and b/TMessagesProj/src/emojis/twitter/emoji/0_666.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_667.png b/TMessagesProj/src/emojis/twitter/emoji/0_667.png index f815cf44db..18130cb7cb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_667.png and b/TMessagesProj/src/emojis/twitter/emoji/0_667.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_668.png b/TMessagesProj/src/emojis/twitter/emoji/0_668.png index 260bb00c1f..c265567a52 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_668.png and b/TMessagesProj/src/emojis/twitter/emoji/0_668.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_669.png b/TMessagesProj/src/emojis/twitter/emoji/0_669.png index c5b4429195..f279d150c0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_669.png and b/TMessagesProj/src/emojis/twitter/emoji/0_669.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_67.png b/TMessagesProj/src/emojis/twitter/emoji/0_67.png index 55a04aa58c..6d35827e8e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_67.png and b/TMessagesProj/src/emojis/twitter/emoji/0_67.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_670.png b/TMessagesProj/src/emojis/twitter/emoji/0_670.png index 8e4af363e9..807b988d8f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_670.png and b/TMessagesProj/src/emojis/twitter/emoji/0_670.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_671.png b/TMessagesProj/src/emojis/twitter/emoji/0_671.png index db89d07f3f..55d838ae2e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_671.png and b/TMessagesProj/src/emojis/twitter/emoji/0_671.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_672.png b/TMessagesProj/src/emojis/twitter/emoji/0_672.png index f2a70227f1..d95414cd49 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_672.png and b/TMessagesProj/src/emojis/twitter/emoji/0_672.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_673.png b/TMessagesProj/src/emojis/twitter/emoji/0_673.png index 28a3df07ee..16249a6a09 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_673.png and b/TMessagesProj/src/emojis/twitter/emoji/0_673.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_674.png b/TMessagesProj/src/emojis/twitter/emoji/0_674.png index 598d1e1190..5e52cd2dd8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_674.png and b/TMessagesProj/src/emojis/twitter/emoji/0_674.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_675.png b/TMessagesProj/src/emojis/twitter/emoji/0_675.png index ad50f233fa..8fb8d42667 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_675.png and b/TMessagesProj/src/emojis/twitter/emoji/0_675.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_676.png b/TMessagesProj/src/emojis/twitter/emoji/0_676.png index cfb77ad51a..d41df6bbc7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_676.png and b/TMessagesProj/src/emojis/twitter/emoji/0_676.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_677.png b/TMessagesProj/src/emojis/twitter/emoji/0_677.png index 7f62a7a815..6294172eb1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_677.png and b/TMessagesProj/src/emojis/twitter/emoji/0_677.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_678.png b/TMessagesProj/src/emojis/twitter/emoji/0_678.png index 7a9032b383..7b45b3b889 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_678.png and b/TMessagesProj/src/emojis/twitter/emoji/0_678.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_679.png b/TMessagesProj/src/emojis/twitter/emoji/0_679.png index 3fa1050a73..414f7a4872 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_679.png and b/TMessagesProj/src/emojis/twitter/emoji/0_679.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_68.png b/TMessagesProj/src/emojis/twitter/emoji/0_68.png index 14d0460796..6e29429ce1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_68.png and b/TMessagesProj/src/emojis/twitter/emoji/0_68.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_680.png b/TMessagesProj/src/emojis/twitter/emoji/0_680.png index cf38384ea8..dc3a1dfd2e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_680.png and b/TMessagesProj/src/emojis/twitter/emoji/0_680.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_681.png b/TMessagesProj/src/emojis/twitter/emoji/0_681.png index ac76488f33..5e85a64a2f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_681.png and b/TMessagesProj/src/emojis/twitter/emoji/0_681.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_682.png b/TMessagesProj/src/emojis/twitter/emoji/0_682.png index 57f4132982..7d256615c5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_682.png and b/TMessagesProj/src/emojis/twitter/emoji/0_682.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_683.png b/TMessagesProj/src/emojis/twitter/emoji/0_683.png index 3f2cfabbd6..18fbe7e5a5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_683.png and b/TMessagesProj/src/emojis/twitter/emoji/0_683.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_684.png b/TMessagesProj/src/emojis/twitter/emoji/0_684.png index 6b1153e1a8..c541611cc2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_684.png and b/TMessagesProj/src/emojis/twitter/emoji/0_684.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_685.png b/TMessagesProj/src/emojis/twitter/emoji/0_685.png index 8a22dc01a1..feafcb2506 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_685.png and b/TMessagesProj/src/emojis/twitter/emoji/0_685.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_686.png b/TMessagesProj/src/emojis/twitter/emoji/0_686.png index fe80881792..1b8ffa4f0b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_686.png and b/TMessagesProj/src/emojis/twitter/emoji/0_686.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_687.png b/TMessagesProj/src/emojis/twitter/emoji/0_687.png index 04bda34073..9ea641bdaa 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_687.png and b/TMessagesProj/src/emojis/twitter/emoji/0_687.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_688.png b/TMessagesProj/src/emojis/twitter/emoji/0_688.png index ae95b9d84f..3a64538e5e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_688.png and b/TMessagesProj/src/emojis/twitter/emoji/0_688.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_689.png b/TMessagesProj/src/emojis/twitter/emoji/0_689.png index dac6cba15d..7e2ec6194a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_689.png and b/TMessagesProj/src/emojis/twitter/emoji/0_689.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_69.png b/TMessagesProj/src/emojis/twitter/emoji/0_69.png index c951f0c813..55f6074f4b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_69.png and b/TMessagesProj/src/emojis/twitter/emoji/0_69.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_690.png b/TMessagesProj/src/emojis/twitter/emoji/0_690.png index fb72964241..459a8262fa 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_690.png and b/TMessagesProj/src/emojis/twitter/emoji/0_690.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_691.png b/TMessagesProj/src/emojis/twitter/emoji/0_691.png index 57b8f593f9..1a85c0af35 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_691.png and b/TMessagesProj/src/emojis/twitter/emoji/0_691.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_692.png b/TMessagesProj/src/emojis/twitter/emoji/0_692.png index 7bc7d47a15..20adfc274a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_692.png and b/TMessagesProj/src/emojis/twitter/emoji/0_692.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_693.png b/TMessagesProj/src/emojis/twitter/emoji/0_693.png index bf3a690391..42d84956ca 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_693.png and b/TMessagesProj/src/emojis/twitter/emoji/0_693.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_694.png b/TMessagesProj/src/emojis/twitter/emoji/0_694.png index 4e89695582..6babd50784 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_694.png and b/TMessagesProj/src/emojis/twitter/emoji/0_694.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_695.png b/TMessagesProj/src/emojis/twitter/emoji/0_695.png index c48f4c523a..bd4d39fb68 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_695.png and b/TMessagesProj/src/emojis/twitter/emoji/0_695.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_696.png b/TMessagesProj/src/emojis/twitter/emoji/0_696.png index f0120ced1e..25a5fd843e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_696.png and b/TMessagesProj/src/emojis/twitter/emoji/0_696.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_697.png b/TMessagesProj/src/emojis/twitter/emoji/0_697.png index af1e850acb..3933e3e164 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_697.png and b/TMessagesProj/src/emojis/twitter/emoji/0_697.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_698.png b/TMessagesProj/src/emojis/twitter/emoji/0_698.png index f7a6b00c41..77e5c6e1f1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_698.png and b/TMessagesProj/src/emojis/twitter/emoji/0_698.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_699.png b/TMessagesProj/src/emojis/twitter/emoji/0_699.png index 49c1e367fb..89011becee 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_699.png and b/TMessagesProj/src/emojis/twitter/emoji/0_699.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_7.png b/TMessagesProj/src/emojis/twitter/emoji/0_7.png index 68115f15bf..97f859d090 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_7.png and b/TMessagesProj/src/emojis/twitter/emoji/0_7.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_70.png b/TMessagesProj/src/emojis/twitter/emoji/0_70.png index 92e3fe901c..590d4b8465 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_70.png and b/TMessagesProj/src/emojis/twitter/emoji/0_70.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_700.png b/TMessagesProj/src/emojis/twitter/emoji/0_700.png index 3fe11b9a45..e998cf9f43 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_700.png and b/TMessagesProj/src/emojis/twitter/emoji/0_700.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_701.png b/TMessagesProj/src/emojis/twitter/emoji/0_701.png index 79ab69d1ed..3001c754df 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_701.png and b/TMessagesProj/src/emojis/twitter/emoji/0_701.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_702.png b/TMessagesProj/src/emojis/twitter/emoji/0_702.png index c77b5f4c10..8fd9c5a7a5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_702.png and b/TMessagesProj/src/emojis/twitter/emoji/0_702.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_703.png b/TMessagesProj/src/emojis/twitter/emoji/0_703.png index 860ed28b3b..9736b6fc83 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_703.png and b/TMessagesProj/src/emojis/twitter/emoji/0_703.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_704.png b/TMessagesProj/src/emojis/twitter/emoji/0_704.png index e582842ea5..277eca2cc1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_704.png and b/TMessagesProj/src/emojis/twitter/emoji/0_704.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_705.png b/TMessagesProj/src/emojis/twitter/emoji/0_705.png index 4925682b0a..770022c509 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_705.png and b/TMessagesProj/src/emojis/twitter/emoji/0_705.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_706.png b/TMessagesProj/src/emojis/twitter/emoji/0_706.png index 075139baa8..ba7d97f6a5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_706.png and b/TMessagesProj/src/emojis/twitter/emoji/0_706.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_707.png b/TMessagesProj/src/emojis/twitter/emoji/0_707.png index d611ec1976..6059c02080 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_707.png and b/TMessagesProj/src/emojis/twitter/emoji/0_707.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_708.png b/TMessagesProj/src/emojis/twitter/emoji/0_708.png index 908e09482f..b2297efd03 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_708.png and b/TMessagesProj/src/emojis/twitter/emoji/0_708.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_709.png b/TMessagesProj/src/emojis/twitter/emoji/0_709.png index 7dca3e50f9..fce70217cd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_709.png and b/TMessagesProj/src/emojis/twitter/emoji/0_709.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_71.png b/TMessagesProj/src/emojis/twitter/emoji/0_71.png index 5d35ae97b7..6522fd4d9f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_71.png and b/TMessagesProj/src/emojis/twitter/emoji/0_71.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_710.png b/TMessagesProj/src/emojis/twitter/emoji/0_710.png index d4891bf961..e3c78722a2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_710.png and b/TMessagesProj/src/emojis/twitter/emoji/0_710.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_711.png b/TMessagesProj/src/emojis/twitter/emoji/0_711.png index 3f0afb937e..2085853c11 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_711.png and b/TMessagesProj/src/emojis/twitter/emoji/0_711.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_712.png b/TMessagesProj/src/emojis/twitter/emoji/0_712.png index 29c67fbd0f..dba1c147d7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_712.png and b/TMessagesProj/src/emojis/twitter/emoji/0_712.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_713.png b/TMessagesProj/src/emojis/twitter/emoji/0_713.png index 48d658a01f..a538515722 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_713.png and b/TMessagesProj/src/emojis/twitter/emoji/0_713.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_714.png b/TMessagesProj/src/emojis/twitter/emoji/0_714.png index fa8fa4ad55..f57cb89f22 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_714.png and b/TMessagesProj/src/emojis/twitter/emoji/0_714.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_715.png b/TMessagesProj/src/emojis/twitter/emoji/0_715.png index 000276d5e2..69eb8111e6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_715.png and b/TMessagesProj/src/emojis/twitter/emoji/0_715.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_716.png b/TMessagesProj/src/emojis/twitter/emoji/0_716.png index 7bb1c602ea..e4e1ff2006 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_716.png and b/TMessagesProj/src/emojis/twitter/emoji/0_716.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_717.png b/TMessagesProj/src/emojis/twitter/emoji/0_717.png index 3418b505b3..a9bad4d482 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_717.png and b/TMessagesProj/src/emojis/twitter/emoji/0_717.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_718.png b/TMessagesProj/src/emojis/twitter/emoji/0_718.png index 17eeda0fd6..322237353b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_718.png and b/TMessagesProj/src/emojis/twitter/emoji/0_718.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_719.png b/TMessagesProj/src/emojis/twitter/emoji/0_719.png index a2a9bb154d..bcaff9b0a4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_719.png and b/TMessagesProj/src/emojis/twitter/emoji/0_719.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_72.png b/TMessagesProj/src/emojis/twitter/emoji/0_72.png index d464ec1d85..323df68191 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_72.png and b/TMessagesProj/src/emojis/twitter/emoji/0_72.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_720.png b/TMessagesProj/src/emojis/twitter/emoji/0_720.png index e198f5fd47..d722305dcd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_720.png and b/TMessagesProj/src/emojis/twitter/emoji/0_720.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_721.png b/TMessagesProj/src/emojis/twitter/emoji/0_721.png index 9c2d224a6c..b0ba9b2918 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_721.png and b/TMessagesProj/src/emojis/twitter/emoji/0_721.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_722.png b/TMessagesProj/src/emojis/twitter/emoji/0_722.png index 3176003943..1337b6a791 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_722.png and b/TMessagesProj/src/emojis/twitter/emoji/0_722.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_723.png b/TMessagesProj/src/emojis/twitter/emoji/0_723.png index 551791f063..b299e539b3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_723.png and b/TMessagesProj/src/emojis/twitter/emoji/0_723.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_724.png b/TMessagesProj/src/emojis/twitter/emoji/0_724.png index 0caf2cb4ea..2bf7174281 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_724.png and b/TMessagesProj/src/emojis/twitter/emoji/0_724.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_725.png b/TMessagesProj/src/emojis/twitter/emoji/0_725.png index 8d22fbceca..1525822d7b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_725.png and b/TMessagesProj/src/emojis/twitter/emoji/0_725.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_726.png b/TMessagesProj/src/emojis/twitter/emoji/0_726.png index 962e0f07ab..95d113cc69 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_726.png and b/TMessagesProj/src/emojis/twitter/emoji/0_726.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_727.png b/TMessagesProj/src/emojis/twitter/emoji/0_727.png index 4af9b73bce..cd5ee272a0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_727.png and b/TMessagesProj/src/emojis/twitter/emoji/0_727.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_728.png b/TMessagesProj/src/emojis/twitter/emoji/0_728.png index 750af049d6..b5889e2fd8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_728.png and b/TMessagesProj/src/emojis/twitter/emoji/0_728.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_729.png b/TMessagesProj/src/emojis/twitter/emoji/0_729.png index 344629cb38..c46f9a367a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_729.png and b/TMessagesProj/src/emojis/twitter/emoji/0_729.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_73.png b/TMessagesProj/src/emojis/twitter/emoji/0_73.png index 0df4bb6058..6e4694c4cf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_73.png and b/TMessagesProj/src/emojis/twitter/emoji/0_73.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_730.png b/TMessagesProj/src/emojis/twitter/emoji/0_730.png index d1f1822c6f..2ca41c7cbe 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_730.png and b/TMessagesProj/src/emojis/twitter/emoji/0_730.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_731.png b/TMessagesProj/src/emojis/twitter/emoji/0_731.png index 07f2251af3..b03891b38f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_731.png and b/TMessagesProj/src/emojis/twitter/emoji/0_731.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_732.png b/TMessagesProj/src/emojis/twitter/emoji/0_732.png index fc4287ff52..40a8673f2f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_732.png and b/TMessagesProj/src/emojis/twitter/emoji/0_732.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_733.png b/TMessagesProj/src/emojis/twitter/emoji/0_733.png index abe744b784..2753b3d025 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_733.png and b/TMessagesProj/src/emojis/twitter/emoji/0_733.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_734.png b/TMessagesProj/src/emojis/twitter/emoji/0_734.png index 19ba1b5ed9..f3981360f4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_734.png and b/TMessagesProj/src/emojis/twitter/emoji/0_734.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_735.png b/TMessagesProj/src/emojis/twitter/emoji/0_735.png index 84869ae432..5a13fa1876 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_735.png and b/TMessagesProj/src/emojis/twitter/emoji/0_735.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_736.png b/TMessagesProj/src/emojis/twitter/emoji/0_736.png index 6fa11465f6..51034c99cb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_736.png and b/TMessagesProj/src/emojis/twitter/emoji/0_736.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_737.png b/TMessagesProj/src/emojis/twitter/emoji/0_737.png index d78cf6b05f..fc4b8ed10b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_737.png and b/TMessagesProj/src/emojis/twitter/emoji/0_737.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_738.png b/TMessagesProj/src/emojis/twitter/emoji/0_738.png index c3e80dc603..296cd9acbe 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_738.png and b/TMessagesProj/src/emojis/twitter/emoji/0_738.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_739.png b/TMessagesProj/src/emojis/twitter/emoji/0_739.png index 065a40ac14..05b8fa5167 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_739.png and b/TMessagesProj/src/emojis/twitter/emoji/0_739.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_74.png b/TMessagesProj/src/emojis/twitter/emoji/0_74.png index eb27cef496..0c08b262d6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_74.png and b/TMessagesProj/src/emojis/twitter/emoji/0_74.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_740.png b/TMessagesProj/src/emojis/twitter/emoji/0_740.png index 61f6d4807c..8e199071fb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_740.png and b/TMessagesProj/src/emojis/twitter/emoji/0_740.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_741.png b/TMessagesProj/src/emojis/twitter/emoji/0_741.png index 5e647c1bfa..5a9a851b2c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_741.png and b/TMessagesProj/src/emojis/twitter/emoji/0_741.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_742.png b/TMessagesProj/src/emojis/twitter/emoji/0_742.png index b02ac79100..2c7e9ecd4e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_742.png and b/TMessagesProj/src/emojis/twitter/emoji/0_742.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_743.png b/TMessagesProj/src/emojis/twitter/emoji/0_743.png index 0e3e867691..455e4506ce 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_743.png and b/TMessagesProj/src/emojis/twitter/emoji/0_743.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_744.png b/TMessagesProj/src/emojis/twitter/emoji/0_744.png index 22ed3dc83d..b782b5589c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_744.png and b/TMessagesProj/src/emojis/twitter/emoji/0_744.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_745.png b/TMessagesProj/src/emojis/twitter/emoji/0_745.png index bc441579ab..8ac2644b0f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_745.png and b/TMessagesProj/src/emojis/twitter/emoji/0_745.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_746.png b/TMessagesProj/src/emojis/twitter/emoji/0_746.png index 2d87a54cde..49f5d3b065 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_746.png and b/TMessagesProj/src/emojis/twitter/emoji/0_746.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_747.png b/TMessagesProj/src/emojis/twitter/emoji/0_747.png index 2f8c95e43f..f289656f79 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_747.png and b/TMessagesProj/src/emojis/twitter/emoji/0_747.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_748.png b/TMessagesProj/src/emojis/twitter/emoji/0_748.png index be5f5e77f3..c08757084e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_748.png and b/TMessagesProj/src/emojis/twitter/emoji/0_748.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_749.png b/TMessagesProj/src/emojis/twitter/emoji/0_749.png index 25b6fc56f1..fe98f66716 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_749.png and b/TMessagesProj/src/emojis/twitter/emoji/0_749.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_75.png b/TMessagesProj/src/emojis/twitter/emoji/0_75.png index cb6f11992a..74834b585e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_75.png and b/TMessagesProj/src/emojis/twitter/emoji/0_75.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_750.png b/TMessagesProj/src/emojis/twitter/emoji/0_750.png index 94fe86ccfe..55fb996286 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_750.png and b/TMessagesProj/src/emojis/twitter/emoji/0_750.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_751.png b/TMessagesProj/src/emojis/twitter/emoji/0_751.png index 11700224af..1ac72e1bde 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_751.png and b/TMessagesProj/src/emojis/twitter/emoji/0_751.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_752.png b/TMessagesProj/src/emojis/twitter/emoji/0_752.png index f70ec6ef43..5b5d7a42c3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_752.png and b/TMessagesProj/src/emojis/twitter/emoji/0_752.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_753.png b/TMessagesProj/src/emojis/twitter/emoji/0_753.png index 224bc5774a..7fc362f838 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_753.png and b/TMessagesProj/src/emojis/twitter/emoji/0_753.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_754.png b/TMessagesProj/src/emojis/twitter/emoji/0_754.png index 006d7fc5b8..5ae043a0aa 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_754.png and b/TMessagesProj/src/emojis/twitter/emoji/0_754.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_755.png b/TMessagesProj/src/emojis/twitter/emoji/0_755.png index 8dbae73fd6..0444292779 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_755.png and b/TMessagesProj/src/emojis/twitter/emoji/0_755.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_756.png b/TMessagesProj/src/emojis/twitter/emoji/0_756.png index fd4c0171b2..b815f805c0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_756.png and b/TMessagesProj/src/emojis/twitter/emoji/0_756.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_757.png b/TMessagesProj/src/emojis/twitter/emoji/0_757.png index d015ea8967..5328b2c7fd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_757.png and b/TMessagesProj/src/emojis/twitter/emoji/0_757.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_758.png b/TMessagesProj/src/emojis/twitter/emoji/0_758.png index 096300a14d..1615124e96 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_758.png and b/TMessagesProj/src/emojis/twitter/emoji/0_758.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_759.png b/TMessagesProj/src/emojis/twitter/emoji/0_759.png index dc47eb00eb..525d337e69 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_759.png and b/TMessagesProj/src/emojis/twitter/emoji/0_759.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_76.png b/TMessagesProj/src/emojis/twitter/emoji/0_76.png index bd5e22d177..9805d5c606 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_76.png and b/TMessagesProj/src/emojis/twitter/emoji/0_76.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_760.png b/TMessagesProj/src/emojis/twitter/emoji/0_760.png index 112c1b0c2a..9be6f3b81e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_760.png and b/TMessagesProj/src/emojis/twitter/emoji/0_760.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_761.png b/TMessagesProj/src/emojis/twitter/emoji/0_761.png index 9fb9e83b96..65bd778559 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_761.png and b/TMessagesProj/src/emojis/twitter/emoji/0_761.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_762.png b/TMessagesProj/src/emojis/twitter/emoji/0_762.png index 2a742ce209..4567441f4c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_762.png and b/TMessagesProj/src/emojis/twitter/emoji/0_762.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_763.png b/TMessagesProj/src/emojis/twitter/emoji/0_763.png index 10b3aff5a0..2329e8fcd8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_763.png and b/TMessagesProj/src/emojis/twitter/emoji/0_763.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_764.png b/TMessagesProj/src/emojis/twitter/emoji/0_764.png index f3c9dd8756..4f60d08d77 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_764.png and b/TMessagesProj/src/emojis/twitter/emoji/0_764.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_765.png b/TMessagesProj/src/emojis/twitter/emoji/0_765.png index 45f81ddeb8..b9549bf915 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_765.png and b/TMessagesProj/src/emojis/twitter/emoji/0_765.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_766.png b/TMessagesProj/src/emojis/twitter/emoji/0_766.png index 92b691b7be..e8b1764d7d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_766.png and b/TMessagesProj/src/emojis/twitter/emoji/0_766.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_767.png b/TMessagesProj/src/emojis/twitter/emoji/0_767.png index 01451b3777..d5cb03e06e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_767.png and b/TMessagesProj/src/emojis/twitter/emoji/0_767.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_768.png b/TMessagesProj/src/emojis/twitter/emoji/0_768.png index c083e7a87c..6b4f874447 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_768.png and b/TMessagesProj/src/emojis/twitter/emoji/0_768.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_769.png b/TMessagesProj/src/emojis/twitter/emoji/0_769.png index d5e2f387af..4b39c5b2b6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_769.png and b/TMessagesProj/src/emojis/twitter/emoji/0_769.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_77.png b/TMessagesProj/src/emojis/twitter/emoji/0_77.png index 013f206a70..43ef21e81c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_77.png and b/TMessagesProj/src/emojis/twitter/emoji/0_77.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_770.png b/TMessagesProj/src/emojis/twitter/emoji/0_770.png index 017a5904cb..d1eb74ebef 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_770.png and b/TMessagesProj/src/emojis/twitter/emoji/0_770.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_771.png b/TMessagesProj/src/emojis/twitter/emoji/0_771.png index 88dfb1463f..5783c7f99f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_771.png and b/TMessagesProj/src/emojis/twitter/emoji/0_771.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_772.png b/TMessagesProj/src/emojis/twitter/emoji/0_772.png index 8053220cd8..c3ce20319c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_772.png and b/TMessagesProj/src/emojis/twitter/emoji/0_772.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_773.png b/TMessagesProj/src/emojis/twitter/emoji/0_773.png index 394161d35a..20013b9457 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_773.png and b/TMessagesProj/src/emojis/twitter/emoji/0_773.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_774.png b/TMessagesProj/src/emojis/twitter/emoji/0_774.png index 3bc03702cd..3e0dfaf4e2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_774.png and b/TMessagesProj/src/emojis/twitter/emoji/0_774.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_775.png b/TMessagesProj/src/emojis/twitter/emoji/0_775.png index 5a36d8c80d..577c10553f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_775.png and b/TMessagesProj/src/emojis/twitter/emoji/0_775.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_776.png b/TMessagesProj/src/emojis/twitter/emoji/0_776.png index 68f07807c7..c364f0b072 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_776.png and b/TMessagesProj/src/emojis/twitter/emoji/0_776.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_777.png b/TMessagesProj/src/emojis/twitter/emoji/0_777.png index a8ae978693..c4e7bf134d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_777.png and b/TMessagesProj/src/emojis/twitter/emoji/0_777.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_778.png b/TMessagesProj/src/emojis/twitter/emoji/0_778.png index 8d804a7feb..b3af46159a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_778.png and b/TMessagesProj/src/emojis/twitter/emoji/0_778.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_779.png b/TMessagesProj/src/emojis/twitter/emoji/0_779.png index 9cecd46f4c..f6cd6a69b6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_779.png and b/TMessagesProj/src/emojis/twitter/emoji/0_779.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_78.png b/TMessagesProj/src/emojis/twitter/emoji/0_78.png index b26fe9983c..013c97e74d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_78.png and b/TMessagesProj/src/emojis/twitter/emoji/0_78.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_780.png b/TMessagesProj/src/emojis/twitter/emoji/0_780.png index 2a655b7b19..2ea92a19a2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_780.png and b/TMessagesProj/src/emojis/twitter/emoji/0_780.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_781.png b/TMessagesProj/src/emojis/twitter/emoji/0_781.png index f3896f60d6..8115be6e7b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_781.png and b/TMessagesProj/src/emojis/twitter/emoji/0_781.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_782.png b/TMessagesProj/src/emojis/twitter/emoji/0_782.png index 059db8dac0..be7a0d3521 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_782.png and b/TMessagesProj/src/emojis/twitter/emoji/0_782.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_783.png b/TMessagesProj/src/emojis/twitter/emoji/0_783.png index cceda11253..b80b74acec 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_783.png and b/TMessagesProj/src/emojis/twitter/emoji/0_783.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_784.png b/TMessagesProj/src/emojis/twitter/emoji/0_784.png index f4129ef47a..1748dd8070 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_784.png and b/TMessagesProj/src/emojis/twitter/emoji/0_784.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_785.png b/TMessagesProj/src/emojis/twitter/emoji/0_785.png index 9be8047b89..3dfae5b64e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_785.png and b/TMessagesProj/src/emojis/twitter/emoji/0_785.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_786.png b/TMessagesProj/src/emojis/twitter/emoji/0_786.png index 5e619a0819..f3fb9efa93 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_786.png and b/TMessagesProj/src/emojis/twitter/emoji/0_786.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_787.png b/TMessagesProj/src/emojis/twitter/emoji/0_787.png index b329a267b0..893b879bf6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_787.png and b/TMessagesProj/src/emojis/twitter/emoji/0_787.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_788.png b/TMessagesProj/src/emojis/twitter/emoji/0_788.png index 5ea3dd055c..da6eadf105 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_788.png and b/TMessagesProj/src/emojis/twitter/emoji/0_788.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_789.png b/TMessagesProj/src/emojis/twitter/emoji/0_789.png index cd303dd882..79da00ec4a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_789.png and b/TMessagesProj/src/emojis/twitter/emoji/0_789.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_79.png b/TMessagesProj/src/emojis/twitter/emoji/0_79.png index 2f05e09312..db7612209e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_79.png and b/TMessagesProj/src/emojis/twitter/emoji/0_79.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_790.png b/TMessagesProj/src/emojis/twitter/emoji/0_790.png index aa0270b3a3..402ace270d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_790.png and b/TMessagesProj/src/emojis/twitter/emoji/0_790.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_791.png b/TMessagesProj/src/emojis/twitter/emoji/0_791.png index 9a0bd1cebe..84d61984e9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_791.png and b/TMessagesProj/src/emojis/twitter/emoji/0_791.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_792.png b/TMessagesProj/src/emojis/twitter/emoji/0_792.png index 10a02f046e..ee6e38fb6a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_792.png and b/TMessagesProj/src/emojis/twitter/emoji/0_792.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_793.png b/TMessagesProj/src/emojis/twitter/emoji/0_793.png index 1be7cf0fc4..0a02122351 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_793.png and b/TMessagesProj/src/emojis/twitter/emoji/0_793.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_794.png b/TMessagesProj/src/emojis/twitter/emoji/0_794.png index c0fa752fc6..d1c9657021 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_794.png and b/TMessagesProj/src/emojis/twitter/emoji/0_794.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_795.png b/TMessagesProj/src/emojis/twitter/emoji/0_795.png index 9980a09aae..66c9852681 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_795.png and b/TMessagesProj/src/emojis/twitter/emoji/0_795.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_796.png b/TMessagesProj/src/emojis/twitter/emoji/0_796.png index d9e575b670..2343ad1d82 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_796.png and b/TMessagesProj/src/emojis/twitter/emoji/0_796.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_797.png b/TMessagesProj/src/emojis/twitter/emoji/0_797.png index 931b04d694..937fc91764 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_797.png and b/TMessagesProj/src/emojis/twitter/emoji/0_797.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_798.png b/TMessagesProj/src/emojis/twitter/emoji/0_798.png index b7a82a6308..4811a5fac6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_798.png and b/TMessagesProj/src/emojis/twitter/emoji/0_798.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_799.png b/TMessagesProj/src/emojis/twitter/emoji/0_799.png index 729a01a992..ab24651077 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_799.png and b/TMessagesProj/src/emojis/twitter/emoji/0_799.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_8.png b/TMessagesProj/src/emojis/twitter/emoji/0_8.png index 90bd3292ea..478b86edeb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_8.png and b/TMessagesProj/src/emojis/twitter/emoji/0_8.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_80.png b/TMessagesProj/src/emojis/twitter/emoji/0_80.png index f336d58035..45fc7be32c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_80.png and b/TMessagesProj/src/emojis/twitter/emoji/0_80.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_800.png b/TMessagesProj/src/emojis/twitter/emoji/0_800.png index 6f01190f87..c81caa31c2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_800.png and b/TMessagesProj/src/emojis/twitter/emoji/0_800.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_801.png b/TMessagesProj/src/emojis/twitter/emoji/0_801.png index 38d2b70593..8c1800806e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_801.png and b/TMessagesProj/src/emojis/twitter/emoji/0_801.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_802.png b/TMessagesProj/src/emojis/twitter/emoji/0_802.png index c2ad507725..adc6b29a38 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_802.png and b/TMessagesProj/src/emojis/twitter/emoji/0_802.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_803.png b/TMessagesProj/src/emojis/twitter/emoji/0_803.png index 46fea24a24..a19dd4f518 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_803.png and b/TMessagesProj/src/emojis/twitter/emoji/0_803.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_804.png b/TMessagesProj/src/emojis/twitter/emoji/0_804.png index 3b8101d9cc..79db597872 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_804.png and b/TMessagesProj/src/emojis/twitter/emoji/0_804.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_805.png b/TMessagesProj/src/emojis/twitter/emoji/0_805.png index 1b528553b0..99ba6c8c53 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_805.png and b/TMessagesProj/src/emojis/twitter/emoji/0_805.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_806.png b/TMessagesProj/src/emojis/twitter/emoji/0_806.png index 5b56aaf527..876e39595b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_806.png and b/TMessagesProj/src/emojis/twitter/emoji/0_806.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_807.png b/TMessagesProj/src/emojis/twitter/emoji/0_807.png index bc3d198215..d484d491b1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_807.png and b/TMessagesProj/src/emojis/twitter/emoji/0_807.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_808.png b/TMessagesProj/src/emojis/twitter/emoji/0_808.png index ef31221782..8202741915 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_808.png and b/TMessagesProj/src/emojis/twitter/emoji/0_808.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_809.png b/TMessagesProj/src/emojis/twitter/emoji/0_809.png index 266264cd6f..ea36582c8b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_809.png and b/TMessagesProj/src/emojis/twitter/emoji/0_809.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_81.png b/TMessagesProj/src/emojis/twitter/emoji/0_81.png index 929ba3819e..d80d9fd25f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_81.png and b/TMessagesProj/src/emojis/twitter/emoji/0_81.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_810.png b/TMessagesProj/src/emojis/twitter/emoji/0_810.png index a7e3cc845b..f71668aea7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_810.png and b/TMessagesProj/src/emojis/twitter/emoji/0_810.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_811.png b/TMessagesProj/src/emojis/twitter/emoji/0_811.png index 2509aa305d..8bc25c2eaf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_811.png and b/TMessagesProj/src/emojis/twitter/emoji/0_811.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_812.png b/TMessagesProj/src/emojis/twitter/emoji/0_812.png index 1c72bcc54b..3fc5ed6c64 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_812.png and b/TMessagesProj/src/emojis/twitter/emoji/0_812.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_813.png b/TMessagesProj/src/emojis/twitter/emoji/0_813.png index c56b03169e..1b7cd05022 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_813.png and b/TMessagesProj/src/emojis/twitter/emoji/0_813.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_814.png b/TMessagesProj/src/emojis/twitter/emoji/0_814.png index 7627f5a0ac..d3eeafe0b4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_814.png and b/TMessagesProj/src/emojis/twitter/emoji/0_814.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_815.png b/TMessagesProj/src/emojis/twitter/emoji/0_815.png index 0d9fafb5b1..6a2eeeb87f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_815.png and b/TMessagesProj/src/emojis/twitter/emoji/0_815.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_816.png b/TMessagesProj/src/emojis/twitter/emoji/0_816.png index 2a215f1511..af794eb614 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_816.png and b/TMessagesProj/src/emojis/twitter/emoji/0_816.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_817.png b/TMessagesProj/src/emojis/twitter/emoji/0_817.png index 6c78c7ff0c..16cddfb91a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_817.png and b/TMessagesProj/src/emojis/twitter/emoji/0_817.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_818.png b/TMessagesProj/src/emojis/twitter/emoji/0_818.png index 2fefe996ab..c4b4c07f04 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_818.png and b/TMessagesProj/src/emojis/twitter/emoji/0_818.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_819.png b/TMessagesProj/src/emojis/twitter/emoji/0_819.png index efdd25363b..7d90e9ab0e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_819.png and b/TMessagesProj/src/emojis/twitter/emoji/0_819.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_82.png b/TMessagesProj/src/emojis/twitter/emoji/0_82.png index a187b88ccc..b886b61bb9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_82.png and b/TMessagesProj/src/emojis/twitter/emoji/0_82.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_820.png b/TMessagesProj/src/emojis/twitter/emoji/0_820.png index bec2ce8a89..c2e128f787 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_820.png and b/TMessagesProj/src/emojis/twitter/emoji/0_820.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_821.png b/TMessagesProj/src/emojis/twitter/emoji/0_821.png index 41e9a04ee1..dff2fa727d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_821.png and b/TMessagesProj/src/emojis/twitter/emoji/0_821.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_822.png b/TMessagesProj/src/emojis/twitter/emoji/0_822.png index 2043a6c895..7ac930d527 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_822.png and b/TMessagesProj/src/emojis/twitter/emoji/0_822.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_823.png b/TMessagesProj/src/emojis/twitter/emoji/0_823.png index cacafcab19..a075d20ac0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_823.png and b/TMessagesProj/src/emojis/twitter/emoji/0_823.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_824.png b/TMessagesProj/src/emojis/twitter/emoji/0_824.png index 9c79d3c7b5..cd9930495a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_824.png and b/TMessagesProj/src/emojis/twitter/emoji/0_824.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_825.png b/TMessagesProj/src/emojis/twitter/emoji/0_825.png index 6214153279..4b0589e7f6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_825.png and b/TMessagesProj/src/emojis/twitter/emoji/0_825.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_826.png b/TMessagesProj/src/emojis/twitter/emoji/0_826.png index c5e1d7488d..cd67720835 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_826.png and b/TMessagesProj/src/emojis/twitter/emoji/0_826.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_827.png b/TMessagesProj/src/emojis/twitter/emoji/0_827.png index 98f0f29a07..96acfe2b2a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_827.png and b/TMessagesProj/src/emojis/twitter/emoji/0_827.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_828.png b/TMessagesProj/src/emojis/twitter/emoji/0_828.png index 17c7d5667b..2960377a9a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_828.png and b/TMessagesProj/src/emojis/twitter/emoji/0_828.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_829.png b/TMessagesProj/src/emojis/twitter/emoji/0_829.png index dface44706..2e1143b4a2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_829.png and b/TMessagesProj/src/emojis/twitter/emoji/0_829.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_83.png b/TMessagesProj/src/emojis/twitter/emoji/0_83.png index bbcaa3b7c2..82538ee746 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_83.png and b/TMessagesProj/src/emojis/twitter/emoji/0_83.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_830.png b/TMessagesProj/src/emojis/twitter/emoji/0_830.png index 57ddedaa34..2b408e5bf3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_830.png and b/TMessagesProj/src/emojis/twitter/emoji/0_830.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_831.png b/TMessagesProj/src/emojis/twitter/emoji/0_831.png index d7cd5bbd57..281988277f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_831.png and b/TMessagesProj/src/emojis/twitter/emoji/0_831.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_832.png b/TMessagesProj/src/emojis/twitter/emoji/0_832.png index 5997fb338d..c3e949b3d8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_832.png and b/TMessagesProj/src/emojis/twitter/emoji/0_832.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_833.png b/TMessagesProj/src/emojis/twitter/emoji/0_833.png index 28e1bb032b..4adabb5a52 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_833.png and b/TMessagesProj/src/emojis/twitter/emoji/0_833.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_834.png b/TMessagesProj/src/emojis/twitter/emoji/0_834.png index be023b3fed..72a681f990 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_834.png and b/TMessagesProj/src/emojis/twitter/emoji/0_834.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_835.png b/TMessagesProj/src/emojis/twitter/emoji/0_835.png index eef9588e90..7ae7a7f127 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_835.png and b/TMessagesProj/src/emojis/twitter/emoji/0_835.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_836.png b/TMessagesProj/src/emojis/twitter/emoji/0_836.png index 75ae311828..efeec1727b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_836.png and b/TMessagesProj/src/emojis/twitter/emoji/0_836.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_837.png b/TMessagesProj/src/emojis/twitter/emoji/0_837.png index cb6718c34b..49f21f585b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_837.png and b/TMessagesProj/src/emojis/twitter/emoji/0_837.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_838.png b/TMessagesProj/src/emojis/twitter/emoji/0_838.png index 75f3603b85..9f16f0acb5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_838.png and b/TMessagesProj/src/emojis/twitter/emoji/0_838.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_839.png b/TMessagesProj/src/emojis/twitter/emoji/0_839.png index 86ab087730..697b1e52d4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_839.png and b/TMessagesProj/src/emojis/twitter/emoji/0_839.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_84.png b/TMessagesProj/src/emojis/twitter/emoji/0_84.png index fb39c582a1..a0a5945d74 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_84.png and b/TMessagesProj/src/emojis/twitter/emoji/0_84.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_840.png b/TMessagesProj/src/emojis/twitter/emoji/0_840.png index 0b7890c91d..1d34f47afb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_840.png and b/TMessagesProj/src/emojis/twitter/emoji/0_840.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_841.png b/TMessagesProj/src/emojis/twitter/emoji/0_841.png index 3acee59439..4e763622fe 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_841.png and b/TMessagesProj/src/emojis/twitter/emoji/0_841.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_842.png b/TMessagesProj/src/emojis/twitter/emoji/0_842.png index a0e6b9ef52..a19e4d3c33 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_842.png and b/TMessagesProj/src/emojis/twitter/emoji/0_842.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_843.png b/TMessagesProj/src/emojis/twitter/emoji/0_843.png index b04850d842..daa702b14c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_843.png and b/TMessagesProj/src/emojis/twitter/emoji/0_843.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_844.png b/TMessagesProj/src/emojis/twitter/emoji/0_844.png index 688c8ca652..8468f8d74a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_844.png and b/TMessagesProj/src/emojis/twitter/emoji/0_844.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_845.png b/TMessagesProj/src/emojis/twitter/emoji/0_845.png index 29e0c3c1dd..5becba1e8e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_845.png and b/TMessagesProj/src/emojis/twitter/emoji/0_845.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_846.png b/TMessagesProj/src/emojis/twitter/emoji/0_846.png index 37178e6475..7a3b58da55 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_846.png and b/TMessagesProj/src/emojis/twitter/emoji/0_846.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_847.png b/TMessagesProj/src/emojis/twitter/emoji/0_847.png index 1001953352..0a570dcbf2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_847.png and b/TMessagesProj/src/emojis/twitter/emoji/0_847.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_848.png b/TMessagesProj/src/emojis/twitter/emoji/0_848.png index ce600fbba0..e66f6bbc3e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_848.png and b/TMessagesProj/src/emojis/twitter/emoji/0_848.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_849.png b/TMessagesProj/src/emojis/twitter/emoji/0_849.png index df15fb0210..5f8fd93778 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_849.png and b/TMessagesProj/src/emojis/twitter/emoji/0_849.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_85.png b/TMessagesProj/src/emojis/twitter/emoji/0_85.png index c40006a0ff..3067fd2570 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_85.png and b/TMessagesProj/src/emojis/twitter/emoji/0_85.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_850.png b/TMessagesProj/src/emojis/twitter/emoji/0_850.png index b17dee9fa0..1396d6023d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_850.png and b/TMessagesProj/src/emojis/twitter/emoji/0_850.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_851.png b/TMessagesProj/src/emojis/twitter/emoji/0_851.png index ea0a9594c6..fdc2a9e31f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_851.png and b/TMessagesProj/src/emojis/twitter/emoji/0_851.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_852.png b/TMessagesProj/src/emojis/twitter/emoji/0_852.png index c9ca4ac7ff..3ae94389b7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_852.png and b/TMessagesProj/src/emojis/twitter/emoji/0_852.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_853.png b/TMessagesProj/src/emojis/twitter/emoji/0_853.png index 84b4b55bb8..bd9d87bc42 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_853.png and b/TMessagesProj/src/emojis/twitter/emoji/0_853.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_854.png b/TMessagesProj/src/emojis/twitter/emoji/0_854.png index e840f4a319..9d7e66f9ee 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_854.png and b/TMessagesProj/src/emojis/twitter/emoji/0_854.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_855.png b/TMessagesProj/src/emojis/twitter/emoji/0_855.png index f21dc99aa8..38225999cc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_855.png and b/TMessagesProj/src/emojis/twitter/emoji/0_855.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_856.png b/TMessagesProj/src/emojis/twitter/emoji/0_856.png index 5625530098..867f50f3fd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_856.png and b/TMessagesProj/src/emojis/twitter/emoji/0_856.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_857.png b/TMessagesProj/src/emojis/twitter/emoji/0_857.png index b8bc288d46..4c1888b530 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_857.png and b/TMessagesProj/src/emojis/twitter/emoji/0_857.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_858.png b/TMessagesProj/src/emojis/twitter/emoji/0_858.png index e53d137fbc..68574c7c02 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_858.png and b/TMessagesProj/src/emojis/twitter/emoji/0_858.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_859.png b/TMessagesProj/src/emojis/twitter/emoji/0_859.png index 3df2675ac4..84daecdb79 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_859.png and b/TMessagesProj/src/emojis/twitter/emoji/0_859.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_86.png b/TMessagesProj/src/emojis/twitter/emoji/0_86.png index 72e0fa908a..d3ec6bfdf0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_86.png and b/TMessagesProj/src/emojis/twitter/emoji/0_86.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_860.png b/TMessagesProj/src/emojis/twitter/emoji/0_860.png index 71a9344416..81d01c068a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_860.png and b/TMessagesProj/src/emojis/twitter/emoji/0_860.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_861.png b/TMessagesProj/src/emojis/twitter/emoji/0_861.png index 57cb56f205..ad4b58e521 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_861.png and b/TMessagesProj/src/emojis/twitter/emoji/0_861.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_862.png b/TMessagesProj/src/emojis/twitter/emoji/0_862.png index 3e30c06767..85ce31a9d7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_862.png and b/TMessagesProj/src/emojis/twitter/emoji/0_862.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_863.png b/TMessagesProj/src/emojis/twitter/emoji/0_863.png index 3514ed939b..c7d56aff48 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_863.png and b/TMessagesProj/src/emojis/twitter/emoji/0_863.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_864.png b/TMessagesProj/src/emojis/twitter/emoji/0_864.png index 6145a9fd42..9d844bba61 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_864.png and b/TMessagesProj/src/emojis/twitter/emoji/0_864.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_865.png b/TMessagesProj/src/emojis/twitter/emoji/0_865.png index 247b95ca15..9726bc9bb4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_865.png and b/TMessagesProj/src/emojis/twitter/emoji/0_865.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_866.png b/TMessagesProj/src/emojis/twitter/emoji/0_866.png index 0626a71ce1..a64c397412 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_866.png and b/TMessagesProj/src/emojis/twitter/emoji/0_866.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_867.png b/TMessagesProj/src/emojis/twitter/emoji/0_867.png index 709617a5a4..c62859b096 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_867.png and b/TMessagesProj/src/emojis/twitter/emoji/0_867.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_868.png b/TMessagesProj/src/emojis/twitter/emoji/0_868.png index a75a09fe58..32e8d2f45a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_868.png and b/TMessagesProj/src/emojis/twitter/emoji/0_868.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_869.png b/TMessagesProj/src/emojis/twitter/emoji/0_869.png index a7e37fea3f..44719efa43 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_869.png and b/TMessagesProj/src/emojis/twitter/emoji/0_869.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_87.png b/TMessagesProj/src/emojis/twitter/emoji/0_87.png index cfefa02646..f694cd0a91 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_87.png and b/TMessagesProj/src/emojis/twitter/emoji/0_87.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_870.png b/TMessagesProj/src/emojis/twitter/emoji/0_870.png index c990336b0e..ae12433a03 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_870.png and b/TMessagesProj/src/emojis/twitter/emoji/0_870.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_871.png b/TMessagesProj/src/emojis/twitter/emoji/0_871.png index a06020ca56..4f6b8625cd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_871.png and b/TMessagesProj/src/emojis/twitter/emoji/0_871.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_872.png b/TMessagesProj/src/emojis/twitter/emoji/0_872.png index 815a86d19b..e0fc2a9ada 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_872.png and b/TMessagesProj/src/emojis/twitter/emoji/0_872.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_873.png b/TMessagesProj/src/emojis/twitter/emoji/0_873.png index d453d4b135..adfd1ead8a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_873.png and b/TMessagesProj/src/emojis/twitter/emoji/0_873.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_874.png b/TMessagesProj/src/emojis/twitter/emoji/0_874.png index da0a62ae56..1fda7114d4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_874.png and b/TMessagesProj/src/emojis/twitter/emoji/0_874.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_875.png b/TMessagesProj/src/emojis/twitter/emoji/0_875.png index b320790537..6d8abb7e8e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_875.png and b/TMessagesProj/src/emojis/twitter/emoji/0_875.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_876.png b/TMessagesProj/src/emojis/twitter/emoji/0_876.png index 14341da949..b6718c5618 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_876.png and b/TMessagesProj/src/emojis/twitter/emoji/0_876.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_877.png b/TMessagesProj/src/emojis/twitter/emoji/0_877.png index 51d780915f..67d0794556 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_877.png and b/TMessagesProj/src/emojis/twitter/emoji/0_877.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_878.png b/TMessagesProj/src/emojis/twitter/emoji/0_878.png index febaf53f71..92d7805aad 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_878.png and b/TMessagesProj/src/emojis/twitter/emoji/0_878.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_879.png b/TMessagesProj/src/emojis/twitter/emoji/0_879.png index c0f96e76b3..3935f9c622 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_879.png and b/TMessagesProj/src/emojis/twitter/emoji/0_879.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_88.png b/TMessagesProj/src/emojis/twitter/emoji/0_88.png index 36bbff846c..93af29d5c8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_88.png and b/TMessagesProj/src/emojis/twitter/emoji/0_88.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_880.png b/TMessagesProj/src/emojis/twitter/emoji/0_880.png index 89eb6e8045..a8a186743e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_880.png and b/TMessagesProj/src/emojis/twitter/emoji/0_880.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_881.png b/TMessagesProj/src/emojis/twitter/emoji/0_881.png index ed79f12e30..d98cd949d5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_881.png and b/TMessagesProj/src/emojis/twitter/emoji/0_881.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_882.png b/TMessagesProj/src/emojis/twitter/emoji/0_882.png index aa14119f76..cbf0cdf101 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_882.png and b/TMessagesProj/src/emojis/twitter/emoji/0_882.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_883.png b/TMessagesProj/src/emojis/twitter/emoji/0_883.png index dc9a2a35a8..05c708081b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_883.png and b/TMessagesProj/src/emojis/twitter/emoji/0_883.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_884.png b/TMessagesProj/src/emojis/twitter/emoji/0_884.png index 10dae5a226..96dec5a736 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_884.png and b/TMessagesProj/src/emojis/twitter/emoji/0_884.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_885.png b/TMessagesProj/src/emojis/twitter/emoji/0_885.png index 004da3b160..6798399967 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_885.png and b/TMessagesProj/src/emojis/twitter/emoji/0_885.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_886.png b/TMessagesProj/src/emojis/twitter/emoji/0_886.png index c5e00b31e4..e8d28fb220 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_886.png and b/TMessagesProj/src/emojis/twitter/emoji/0_886.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_887.png b/TMessagesProj/src/emojis/twitter/emoji/0_887.png index 376608765e..8ce75093b7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_887.png and b/TMessagesProj/src/emojis/twitter/emoji/0_887.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_888.png b/TMessagesProj/src/emojis/twitter/emoji/0_888.png index b525a83c64..7e996da1fa 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_888.png and b/TMessagesProj/src/emojis/twitter/emoji/0_888.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_889.png b/TMessagesProj/src/emojis/twitter/emoji/0_889.png index c14b6d125c..61e77e0a2a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_889.png and b/TMessagesProj/src/emojis/twitter/emoji/0_889.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_89.png b/TMessagesProj/src/emojis/twitter/emoji/0_89.png index 7323158184..0fc42959fd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_89.png and b/TMessagesProj/src/emojis/twitter/emoji/0_89.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_890.png b/TMessagesProj/src/emojis/twitter/emoji/0_890.png index 215cfc0ed5..3387f1e855 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_890.png and b/TMessagesProj/src/emojis/twitter/emoji/0_890.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_891.png b/TMessagesProj/src/emojis/twitter/emoji/0_891.png index 5854e27093..3051f1701d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_891.png and b/TMessagesProj/src/emojis/twitter/emoji/0_891.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_892.png b/TMessagesProj/src/emojis/twitter/emoji/0_892.png index b4325fe5b7..fbf220185d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_892.png and b/TMessagesProj/src/emojis/twitter/emoji/0_892.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_893.png b/TMessagesProj/src/emojis/twitter/emoji/0_893.png index 7af4e81b90..1f45c6bea6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_893.png and b/TMessagesProj/src/emojis/twitter/emoji/0_893.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_894.png b/TMessagesProj/src/emojis/twitter/emoji/0_894.png index 1789ee63b5..c3532f6d6d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_894.png and b/TMessagesProj/src/emojis/twitter/emoji/0_894.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_895.png b/TMessagesProj/src/emojis/twitter/emoji/0_895.png index 4de9d2a8d2..06c289f094 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_895.png and b/TMessagesProj/src/emojis/twitter/emoji/0_895.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_896.png b/TMessagesProj/src/emojis/twitter/emoji/0_896.png index 7b580d7a66..80ef628732 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_896.png and b/TMessagesProj/src/emojis/twitter/emoji/0_896.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_897.png b/TMessagesProj/src/emojis/twitter/emoji/0_897.png index 35d9d80713..11ddbbb4a8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_897.png and b/TMessagesProj/src/emojis/twitter/emoji/0_897.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_898.png b/TMessagesProj/src/emojis/twitter/emoji/0_898.png index 04cfcb32d4..c122d11977 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_898.png and b/TMessagesProj/src/emojis/twitter/emoji/0_898.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_899.png b/TMessagesProj/src/emojis/twitter/emoji/0_899.png index 8e9ff8cbfe..bd69acc40c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_899.png and b/TMessagesProj/src/emojis/twitter/emoji/0_899.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_9.png b/TMessagesProj/src/emojis/twitter/emoji/0_9.png index d3b3d1e4a5..af7ed54d84 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_9.png and b/TMessagesProj/src/emojis/twitter/emoji/0_9.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_90.png b/TMessagesProj/src/emojis/twitter/emoji/0_90.png index da3a02abba..79edc24c74 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_90.png and b/TMessagesProj/src/emojis/twitter/emoji/0_90.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_900.png b/TMessagesProj/src/emojis/twitter/emoji/0_900.png index 640c560f6a..2c1dea1933 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_900.png and b/TMessagesProj/src/emojis/twitter/emoji/0_900.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_901.png b/TMessagesProj/src/emojis/twitter/emoji/0_901.png index 6b74338d22..4103892c40 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_901.png and b/TMessagesProj/src/emojis/twitter/emoji/0_901.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_902.png b/TMessagesProj/src/emojis/twitter/emoji/0_902.png index 0fc87b51b9..87c6a000ac 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_902.png and b/TMessagesProj/src/emojis/twitter/emoji/0_902.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_903.png b/TMessagesProj/src/emojis/twitter/emoji/0_903.png index 467edb3d86..efeb17765a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_903.png and b/TMessagesProj/src/emojis/twitter/emoji/0_903.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_904.png b/TMessagesProj/src/emojis/twitter/emoji/0_904.png index a6d597865b..d0efe6aec2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_904.png and b/TMessagesProj/src/emojis/twitter/emoji/0_904.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_905.png b/TMessagesProj/src/emojis/twitter/emoji/0_905.png index 8d05a65d22..ff26e5e282 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_905.png and b/TMessagesProj/src/emojis/twitter/emoji/0_905.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_906.png b/TMessagesProj/src/emojis/twitter/emoji/0_906.png index 05698acbf7..88faf0b0be 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_906.png and b/TMessagesProj/src/emojis/twitter/emoji/0_906.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_907.png b/TMessagesProj/src/emojis/twitter/emoji/0_907.png index 24ba0eea5d..b8e262a12c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_907.png and b/TMessagesProj/src/emojis/twitter/emoji/0_907.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_908.png b/TMessagesProj/src/emojis/twitter/emoji/0_908.png index 721b011501..6de8944c96 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_908.png and b/TMessagesProj/src/emojis/twitter/emoji/0_908.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_909.png b/TMessagesProj/src/emojis/twitter/emoji/0_909.png index d3590ecbe7..061aab9d83 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_909.png and b/TMessagesProj/src/emojis/twitter/emoji/0_909.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_91.png b/TMessagesProj/src/emojis/twitter/emoji/0_91.png index d8b66798a5..e2fd59ff12 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_91.png and b/TMessagesProj/src/emojis/twitter/emoji/0_91.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_910.png b/TMessagesProj/src/emojis/twitter/emoji/0_910.png index 6e95e4ccc2..176cc235e8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_910.png and b/TMessagesProj/src/emojis/twitter/emoji/0_910.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_911.png b/TMessagesProj/src/emojis/twitter/emoji/0_911.png index 3c109ed95d..d78d065aa3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_911.png and b/TMessagesProj/src/emojis/twitter/emoji/0_911.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_912.png b/TMessagesProj/src/emojis/twitter/emoji/0_912.png index 9f0d28b1c8..efc7ba8894 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_912.png and b/TMessagesProj/src/emojis/twitter/emoji/0_912.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_913.png b/TMessagesProj/src/emojis/twitter/emoji/0_913.png index f4fd03d333..226c3ac831 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_913.png and b/TMessagesProj/src/emojis/twitter/emoji/0_913.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_914.png b/TMessagesProj/src/emojis/twitter/emoji/0_914.png index 4d023bf3ed..79ecedbdf5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_914.png and b/TMessagesProj/src/emojis/twitter/emoji/0_914.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_915.png b/TMessagesProj/src/emojis/twitter/emoji/0_915.png index f85bbd5628..590dc46ddd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_915.png and b/TMessagesProj/src/emojis/twitter/emoji/0_915.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_916.png b/TMessagesProj/src/emojis/twitter/emoji/0_916.png index fd80ab1ece..7fb3ffd949 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_916.png and b/TMessagesProj/src/emojis/twitter/emoji/0_916.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_917.png b/TMessagesProj/src/emojis/twitter/emoji/0_917.png index 2efa02a8b7..88f3e4ae61 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_917.png and b/TMessagesProj/src/emojis/twitter/emoji/0_917.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_918.png b/TMessagesProj/src/emojis/twitter/emoji/0_918.png index 6c24d9dd98..ad864a4ec0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_918.png and b/TMessagesProj/src/emojis/twitter/emoji/0_918.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_919.png b/TMessagesProj/src/emojis/twitter/emoji/0_919.png index 4e200a3b95..5f7e194ee6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_919.png and b/TMessagesProj/src/emojis/twitter/emoji/0_919.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_92.png b/TMessagesProj/src/emojis/twitter/emoji/0_92.png index 098ba563fc..25914aee80 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_92.png and b/TMessagesProj/src/emojis/twitter/emoji/0_92.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_920.png b/TMessagesProj/src/emojis/twitter/emoji/0_920.png index da38f211c9..3cf21093d7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_920.png and b/TMessagesProj/src/emojis/twitter/emoji/0_920.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_921.png b/TMessagesProj/src/emojis/twitter/emoji/0_921.png index f20883b33a..51f2e63dba 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_921.png and b/TMessagesProj/src/emojis/twitter/emoji/0_921.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_922.png b/TMessagesProj/src/emojis/twitter/emoji/0_922.png index ede5472f97..efb97455d2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_922.png and b/TMessagesProj/src/emojis/twitter/emoji/0_922.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_923.png b/TMessagesProj/src/emojis/twitter/emoji/0_923.png index 19acd97dad..94f83232e2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_923.png and b/TMessagesProj/src/emojis/twitter/emoji/0_923.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_924.png b/TMessagesProj/src/emojis/twitter/emoji/0_924.png index 02f150d20f..ed3738d5b1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_924.png and b/TMessagesProj/src/emojis/twitter/emoji/0_924.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_925.png b/TMessagesProj/src/emojis/twitter/emoji/0_925.png index 0465d3f378..facaa7054b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_925.png and b/TMessagesProj/src/emojis/twitter/emoji/0_925.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_926.png b/TMessagesProj/src/emojis/twitter/emoji/0_926.png index ffbc4509af..ac6c0624fd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_926.png and b/TMessagesProj/src/emojis/twitter/emoji/0_926.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_927.png b/TMessagesProj/src/emojis/twitter/emoji/0_927.png index 42d48b9fdc..d35650903d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_927.png and b/TMessagesProj/src/emojis/twitter/emoji/0_927.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_928.png b/TMessagesProj/src/emojis/twitter/emoji/0_928.png index 30af9697fb..fe26a2f862 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_928.png and b/TMessagesProj/src/emojis/twitter/emoji/0_928.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_929.png b/TMessagesProj/src/emojis/twitter/emoji/0_929.png index 59b5a9960d..cf3b4df9c2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_929.png and b/TMessagesProj/src/emojis/twitter/emoji/0_929.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_93.png b/TMessagesProj/src/emojis/twitter/emoji/0_93.png index 55b0eb5967..979f15e973 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_93.png and b/TMessagesProj/src/emojis/twitter/emoji/0_93.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_930.png b/TMessagesProj/src/emojis/twitter/emoji/0_930.png index 9e08f08f8f..dfc34e3f8f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_930.png and b/TMessagesProj/src/emojis/twitter/emoji/0_930.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_931.png b/TMessagesProj/src/emojis/twitter/emoji/0_931.png index 7e84f62ce4..ccfb5e813f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_931.png and b/TMessagesProj/src/emojis/twitter/emoji/0_931.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_932.png b/TMessagesProj/src/emojis/twitter/emoji/0_932.png index 117a0f3508..7ace453a71 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_932.png and b/TMessagesProj/src/emojis/twitter/emoji/0_932.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_933.png b/TMessagesProj/src/emojis/twitter/emoji/0_933.png index 80fed1dadb..8fe68032f9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_933.png and b/TMessagesProj/src/emojis/twitter/emoji/0_933.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_934.png b/TMessagesProj/src/emojis/twitter/emoji/0_934.png index 79f270a314..f0eafb9489 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_934.png and b/TMessagesProj/src/emojis/twitter/emoji/0_934.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_935.png b/TMessagesProj/src/emojis/twitter/emoji/0_935.png index 1d289b78cd..38c0c8fc07 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_935.png and b/TMessagesProj/src/emojis/twitter/emoji/0_935.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_936.png b/TMessagesProj/src/emojis/twitter/emoji/0_936.png index 0a39703a1b..f44587d11e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_936.png and b/TMessagesProj/src/emojis/twitter/emoji/0_936.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_937.png b/TMessagesProj/src/emojis/twitter/emoji/0_937.png index 3a8e310cfc..d622bb5cbc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_937.png and b/TMessagesProj/src/emojis/twitter/emoji/0_937.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_938.png b/TMessagesProj/src/emojis/twitter/emoji/0_938.png index 5b810f2e4e..de9bba315c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_938.png and b/TMessagesProj/src/emojis/twitter/emoji/0_938.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_939.png b/TMessagesProj/src/emojis/twitter/emoji/0_939.png index 53e6704a7b..49a755dadd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_939.png and b/TMessagesProj/src/emojis/twitter/emoji/0_939.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_94.png b/TMessagesProj/src/emojis/twitter/emoji/0_94.png index 77db24174f..8e5dd41bf6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_94.png and b/TMessagesProj/src/emojis/twitter/emoji/0_94.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_940.png b/TMessagesProj/src/emojis/twitter/emoji/0_940.png index f2b7e546c6..79875c781f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_940.png and b/TMessagesProj/src/emojis/twitter/emoji/0_940.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_941.png b/TMessagesProj/src/emojis/twitter/emoji/0_941.png index 6a08bc6a36..4adea01743 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_941.png and b/TMessagesProj/src/emojis/twitter/emoji/0_941.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_942.png b/TMessagesProj/src/emojis/twitter/emoji/0_942.png index 854c2e3e7e..bf5695462c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_942.png and b/TMessagesProj/src/emojis/twitter/emoji/0_942.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_943.png b/TMessagesProj/src/emojis/twitter/emoji/0_943.png index 4c7a87d0f6..58f9d54b50 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_943.png and b/TMessagesProj/src/emojis/twitter/emoji/0_943.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_944.png b/TMessagesProj/src/emojis/twitter/emoji/0_944.png index 07b8dd3892..47904ef036 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_944.png and b/TMessagesProj/src/emojis/twitter/emoji/0_944.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_945.png b/TMessagesProj/src/emojis/twitter/emoji/0_945.png index b9d06335d3..cb69044c1c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_945.png and b/TMessagesProj/src/emojis/twitter/emoji/0_945.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_946.png b/TMessagesProj/src/emojis/twitter/emoji/0_946.png index 72de4c151c..8441f1a10a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_946.png and b/TMessagesProj/src/emojis/twitter/emoji/0_946.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_947.png b/TMessagesProj/src/emojis/twitter/emoji/0_947.png index 0d3827c279..647335074e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_947.png and b/TMessagesProj/src/emojis/twitter/emoji/0_947.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_948.png b/TMessagesProj/src/emojis/twitter/emoji/0_948.png index 5bcb519fbc..4e71842172 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_948.png and b/TMessagesProj/src/emojis/twitter/emoji/0_948.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_949.png b/TMessagesProj/src/emojis/twitter/emoji/0_949.png index 1ae2dedd65..aa325dce3e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_949.png and b/TMessagesProj/src/emojis/twitter/emoji/0_949.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_95.png b/TMessagesProj/src/emojis/twitter/emoji/0_95.png index 06236c20d1..f55611f5f1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_95.png and b/TMessagesProj/src/emojis/twitter/emoji/0_95.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_950.png b/TMessagesProj/src/emojis/twitter/emoji/0_950.png index db359e60e2..d22275821e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_950.png and b/TMessagesProj/src/emojis/twitter/emoji/0_950.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_951.png b/TMessagesProj/src/emojis/twitter/emoji/0_951.png index 8f9d9d7a89..6e5be1124a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_951.png and b/TMessagesProj/src/emojis/twitter/emoji/0_951.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_952.png b/TMessagesProj/src/emojis/twitter/emoji/0_952.png index 0ffda784c7..15a340958e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_952.png and b/TMessagesProj/src/emojis/twitter/emoji/0_952.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_953.png b/TMessagesProj/src/emojis/twitter/emoji/0_953.png index 5b46a0f315..de8bf966c7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_953.png and b/TMessagesProj/src/emojis/twitter/emoji/0_953.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_954.png b/TMessagesProj/src/emojis/twitter/emoji/0_954.png index da816e70b9..201e0fc479 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_954.png and b/TMessagesProj/src/emojis/twitter/emoji/0_954.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_955.png b/TMessagesProj/src/emojis/twitter/emoji/0_955.png index a908f1e95e..15a15feaf7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_955.png and b/TMessagesProj/src/emojis/twitter/emoji/0_955.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_956.png b/TMessagesProj/src/emojis/twitter/emoji/0_956.png index 5e87b1ef36..73519e77e3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_956.png and b/TMessagesProj/src/emojis/twitter/emoji/0_956.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_957.png b/TMessagesProj/src/emojis/twitter/emoji/0_957.png index d49bc499fd..0e471287c4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_957.png and b/TMessagesProj/src/emojis/twitter/emoji/0_957.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_958.png b/TMessagesProj/src/emojis/twitter/emoji/0_958.png index 28257a4313..2ecdaae814 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_958.png and b/TMessagesProj/src/emojis/twitter/emoji/0_958.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_959.png b/TMessagesProj/src/emojis/twitter/emoji/0_959.png index d6f66a9f3f..b588166545 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_959.png and b/TMessagesProj/src/emojis/twitter/emoji/0_959.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_96.png b/TMessagesProj/src/emojis/twitter/emoji/0_96.png index 6e9dd231c5..675b9e64b7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_96.png and b/TMessagesProj/src/emojis/twitter/emoji/0_96.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_960.png b/TMessagesProj/src/emojis/twitter/emoji/0_960.png index 169aa69948..261a662742 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_960.png and b/TMessagesProj/src/emojis/twitter/emoji/0_960.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_961.png b/TMessagesProj/src/emojis/twitter/emoji/0_961.png index c476e30c2d..ee19b3cefb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_961.png and b/TMessagesProj/src/emojis/twitter/emoji/0_961.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_962.png b/TMessagesProj/src/emojis/twitter/emoji/0_962.png index 93503897d9..c079cc802a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_962.png and b/TMessagesProj/src/emojis/twitter/emoji/0_962.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_963.png b/TMessagesProj/src/emojis/twitter/emoji/0_963.png index 37b7d8b90a..613e01a5c7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_963.png and b/TMessagesProj/src/emojis/twitter/emoji/0_963.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_964.png b/TMessagesProj/src/emojis/twitter/emoji/0_964.png index 2fda103777..1d889c29da 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_964.png and b/TMessagesProj/src/emojis/twitter/emoji/0_964.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_965.png b/TMessagesProj/src/emojis/twitter/emoji/0_965.png index 55dc6b5a26..952f461e45 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_965.png and b/TMessagesProj/src/emojis/twitter/emoji/0_965.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_966.png b/TMessagesProj/src/emojis/twitter/emoji/0_966.png index 9e027401a8..d4484af79b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_966.png and b/TMessagesProj/src/emojis/twitter/emoji/0_966.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_967.png b/TMessagesProj/src/emojis/twitter/emoji/0_967.png index 2528285835..6eb31dd5e7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_967.png and b/TMessagesProj/src/emojis/twitter/emoji/0_967.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_968.png b/TMessagesProj/src/emojis/twitter/emoji/0_968.png index 1b79d42eca..1e6fb8ae69 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_968.png and b/TMessagesProj/src/emojis/twitter/emoji/0_968.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_969.png b/TMessagesProj/src/emojis/twitter/emoji/0_969.png index 7a4aad46fd..8cbecb04f4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_969.png and b/TMessagesProj/src/emojis/twitter/emoji/0_969.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_97.png b/TMessagesProj/src/emojis/twitter/emoji/0_97.png index e1d947366e..d6a3df89e1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_97.png and b/TMessagesProj/src/emojis/twitter/emoji/0_97.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_970.png b/TMessagesProj/src/emojis/twitter/emoji/0_970.png index 8c7e7283ef..eeb3316f42 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_970.png and b/TMessagesProj/src/emojis/twitter/emoji/0_970.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_971.png b/TMessagesProj/src/emojis/twitter/emoji/0_971.png index 7281849e4e..92479fa383 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_971.png and b/TMessagesProj/src/emojis/twitter/emoji/0_971.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_972.png b/TMessagesProj/src/emojis/twitter/emoji/0_972.png index b73686aa29..40b2f42563 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_972.png and b/TMessagesProj/src/emojis/twitter/emoji/0_972.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_973.png b/TMessagesProj/src/emojis/twitter/emoji/0_973.png index c9f53e81ae..36ac653096 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_973.png and b/TMessagesProj/src/emojis/twitter/emoji/0_973.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_974.png b/TMessagesProj/src/emojis/twitter/emoji/0_974.png index 67ada283c5..0521f23611 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_974.png and b/TMessagesProj/src/emojis/twitter/emoji/0_974.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_975.png b/TMessagesProj/src/emojis/twitter/emoji/0_975.png index 0385b675ef..3754a2a43e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_975.png and b/TMessagesProj/src/emojis/twitter/emoji/0_975.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_976.png b/TMessagesProj/src/emojis/twitter/emoji/0_976.png index b1b166afae..a2ef921e64 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_976.png and b/TMessagesProj/src/emojis/twitter/emoji/0_976.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_977.png b/TMessagesProj/src/emojis/twitter/emoji/0_977.png index acc10d57d1..eb2085cbb2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_977.png and b/TMessagesProj/src/emojis/twitter/emoji/0_977.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_978.png b/TMessagesProj/src/emojis/twitter/emoji/0_978.png index 0e81ecf9be..f774d66c2d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_978.png and b/TMessagesProj/src/emojis/twitter/emoji/0_978.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_979.png b/TMessagesProj/src/emojis/twitter/emoji/0_979.png index adc7c69bff..21c53ed9c6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_979.png and b/TMessagesProj/src/emojis/twitter/emoji/0_979.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_98.png b/TMessagesProj/src/emojis/twitter/emoji/0_98.png index 864bf87fdc..54e460267c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_98.png and b/TMessagesProj/src/emojis/twitter/emoji/0_98.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_980.png b/TMessagesProj/src/emojis/twitter/emoji/0_980.png index bd93b62ab9..7b26932935 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_980.png and b/TMessagesProj/src/emojis/twitter/emoji/0_980.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_981.png b/TMessagesProj/src/emojis/twitter/emoji/0_981.png index 3da1ea2ee8..7ffb3ebb92 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_981.png and b/TMessagesProj/src/emojis/twitter/emoji/0_981.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_982.png b/TMessagesProj/src/emojis/twitter/emoji/0_982.png index d2f34f3c2a..5579209080 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_982.png and b/TMessagesProj/src/emojis/twitter/emoji/0_982.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_983.png b/TMessagesProj/src/emojis/twitter/emoji/0_983.png index 292db0499d..1ecef01220 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_983.png and b/TMessagesProj/src/emojis/twitter/emoji/0_983.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_984.png b/TMessagesProj/src/emojis/twitter/emoji/0_984.png index 3c058390d4..0bedd3055a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_984.png and b/TMessagesProj/src/emojis/twitter/emoji/0_984.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_985.png b/TMessagesProj/src/emojis/twitter/emoji/0_985.png index 36c41f6eeb..ad208fd15b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_985.png and b/TMessagesProj/src/emojis/twitter/emoji/0_985.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_986.png b/TMessagesProj/src/emojis/twitter/emoji/0_986.png index 412f7aeb82..530ddb57b4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_986.png and b/TMessagesProj/src/emojis/twitter/emoji/0_986.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_987.png b/TMessagesProj/src/emojis/twitter/emoji/0_987.png index 6e8af269b2..4fd982e207 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_987.png and b/TMessagesProj/src/emojis/twitter/emoji/0_987.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_988.png b/TMessagesProj/src/emojis/twitter/emoji/0_988.png index aaf6551baa..8b1a87c3d3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_988.png and b/TMessagesProj/src/emojis/twitter/emoji/0_988.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_989.png b/TMessagesProj/src/emojis/twitter/emoji/0_989.png index d572c63da8..841d6b251b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_989.png and b/TMessagesProj/src/emojis/twitter/emoji/0_989.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_99.png b/TMessagesProj/src/emojis/twitter/emoji/0_99.png index 7bd58ba140..f5338fa0fb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_99.png and b/TMessagesProj/src/emojis/twitter/emoji/0_99.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_990.png b/TMessagesProj/src/emojis/twitter/emoji/0_990.png index 566cb5810f..15970da7f9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_990.png and b/TMessagesProj/src/emojis/twitter/emoji/0_990.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_991.png b/TMessagesProj/src/emojis/twitter/emoji/0_991.png index 0cb264d3ea..9c70c3e5e5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_991.png and b/TMessagesProj/src/emojis/twitter/emoji/0_991.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_992.png b/TMessagesProj/src/emojis/twitter/emoji/0_992.png index 631f333eb5..eef568a2ad 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_992.png and b/TMessagesProj/src/emojis/twitter/emoji/0_992.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_993.png b/TMessagesProj/src/emojis/twitter/emoji/0_993.png index ee7f43cc7f..2a0fc841c6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_993.png and b/TMessagesProj/src/emojis/twitter/emoji/0_993.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_994.png b/TMessagesProj/src/emojis/twitter/emoji/0_994.png index fc16be44ff..4cf6422302 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_994.png and b/TMessagesProj/src/emojis/twitter/emoji/0_994.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_995.png b/TMessagesProj/src/emojis/twitter/emoji/0_995.png index 6a497422d7..48c7cd7074 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_995.png and b/TMessagesProj/src/emojis/twitter/emoji/0_995.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_996.png b/TMessagesProj/src/emojis/twitter/emoji/0_996.png index 51f32c677b..67b76abd26 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_996.png and b/TMessagesProj/src/emojis/twitter/emoji/0_996.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_997.png b/TMessagesProj/src/emojis/twitter/emoji/0_997.png index eb486eec50..f01154d5e6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_997.png and b/TMessagesProj/src/emojis/twitter/emoji/0_997.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_998.png b/TMessagesProj/src/emojis/twitter/emoji/0_998.png index 2c904846c5..96cee8a4b1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_998.png and b/TMessagesProj/src/emojis/twitter/emoji/0_998.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/0_999.png b/TMessagesProj/src/emojis/twitter/emoji/0_999.png index 3b96165b8f..def1df4028 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/0_999.png and b/TMessagesProj/src/emojis/twitter/emoji/0_999.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_0.png b/TMessagesProj/src/emojis/twitter/emoji/1_0.png index 353ae519f6..59cf04e37a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_0.png and b/TMessagesProj/src/emojis/twitter/emoji/1_0.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_1.png b/TMessagesProj/src/emojis/twitter/emoji/1_1.png index c15b2699ae..acb3dfd00c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_1.png and b/TMessagesProj/src/emojis/twitter/emoji/1_1.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_10.png b/TMessagesProj/src/emojis/twitter/emoji/1_10.png index 3526231ac7..a5340cf65a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_10.png and b/TMessagesProj/src/emojis/twitter/emoji/1_10.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_100.png b/TMessagesProj/src/emojis/twitter/emoji/1_100.png index f96eadb175..ba029b09f7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_100.png and b/TMessagesProj/src/emojis/twitter/emoji/1_100.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_101.png b/TMessagesProj/src/emojis/twitter/emoji/1_101.png index 7e65c140df..d1ead6f9c9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_101.png and b/TMessagesProj/src/emojis/twitter/emoji/1_101.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_102.png b/TMessagesProj/src/emojis/twitter/emoji/1_102.png index 1e7705e6b1..4b7293ac9f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_102.png and b/TMessagesProj/src/emojis/twitter/emoji/1_102.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_103.png b/TMessagesProj/src/emojis/twitter/emoji/1_103.png index 0c714dc5a2..ae4e2d55f0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_103.png and b/TMessagesProj/src/emojis/twitter/emoji/1_103.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_104.png b/TMessagesProj/src/emojis/twitter/emoji/1_104.png index 2914a94700..7ea905a9e7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_104.png and b/TMessagesProj/src/emojis/twitter/emoji/1_104.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_105.png b/TMessagesProj/src/emojis/twitter/emoji/1_105.png index 9f3b84dba5..55ef2343f0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_105.png and b/TMessagesProj/src/emojis/twitter/emoji/1_105.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_106.png b/TMessagesProj/src/emojis/twitter/emoji/1_106.png index 80c5407a71..cd8ee65b1e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_106.png and b/TMessagesProj/src/emojis/twitter/emoji/1_106.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_107.png b/TMessagesProj/src/emojis/twitter/emoji/1_107.png index e94c855f45..ef6a759f7b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_107.png and b/TMessagesProj/src/emojis/twitter/emoji/1_107.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_108.png b/TMessagesProj/src/emojis/twitter/emoji/1_108.png index fcaedc0d85..f9bce5bcf2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_108.png and b/TMessagesProj/src/emojis/twitter/emoji/1_108.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_109.png b/TMessagesProj/src/emojis/twitter/emoji/1_109.png index 192c5a1467..6c44332a8a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_109.png and b/TMessagesProj/src/emojis/twitter/emoji/1_109.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_11.png b/TMessagesProj/src/emojis/twitter/emoji/1_11.png index b1a1827f8d..3ac5fc6231 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_11.png and b/TMessagesProj/src/emojis/twitter/emoji/1_11.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_110.png b/TMessagesProj/src/emojis/twitter/emoji/1_110.png index e7d6d28536..2860b6e219 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_110.png and b/TMessagesProj/src/emojis/twitter/emoji/1_110.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_111.png b/TMessagesProj/src/emojis/twitter/emoji/1_111.png index cc0158e3a9..6d53f53b9b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_111.png and b/TMessagesProj/src/emojis/twitter/emoji/1_111.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_112.png b/TMessagesProj/src/emojis/twitter/emoji/1_112.png index 4e5d4d8a43..89aec9826d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_112.png and b/TMessagesProj/src/emojis/twitter/emoji/1_112.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_113.png b/TMessagesProj/src/emojis/twitter/emoji/1_113.png index ec994710e6..ce557c4c3f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_113.png and b/TMessagesProj/src/emojis/twitter/emoji/1_113.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_114.png b/TMessagesProj/src/emojis/twitter/emoji/1_114.png index 17f4ddf865..533f8d2ac0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_114.png and b/TMessagesProj/src/emojis/twitter/emoji/1_114.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_115.png b/TMessagesProj/src/emojis/twitter/emoji/1_115.png index 7bbfce5102..568031f881 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_115.png and b/TMessagesProj/src/emojis/twitter/emoji/1_115.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_116.png b/TMessagesProj/src/emojis/twitter/emoji/1_116.png index cc17b7ef66..0ea306ad9e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_116.png and b/TMessagesProj/src/emojis/twitter/emoji/1_116.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_117.png b/TMessagesProj/src/emojis/twitter/emoji/1_117.png index ee668f97fa..ba63d40484 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_117.png and b/TMessagesProj/src/emojis/twitter/emoji/1_117.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_118.png b/TMessagesProj/src/emojis/twitter/emoji/1_118.png index 449afbba29..e5af6bbf48 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_118.png and b/TMessagesProj/src/emojis/twitter/emoji/1_118.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_119.png b/TMessagesProj/src/emojis/twitter/emoji/1_119.png index b7864a482a..0572730f68 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_119.png and b/TMessagesProj/src/emojis/twitter/emoji/1_119.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_12.png b/TMessagesProj/src/emojis/twitter/emoji/1_12.png index dc60c6348f..1e022a9b49 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_12.png and b/TMessagesProj/src/emojis/twitter/emoji/1_12.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_120.png b/TMessagesProj/src/emojis/twitter/emoji/1_120.png index 8bf96f56c7..5bfc2ddbfc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_120.png and b/TMessagesProj/src/emojis/twitter/emoji/1_120.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_121.png b/TMessagesProj/src/emojis/twitter/emoji/1_121.png index f9c550b25c..9f101bad95 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_121.png and b/TMessagesProj/src/emojis/twitter/emoji/1_121.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_122.png b/TMessagesProj/src/emojis/twitter/emoji/1_122.png index 5f28e999a0..5cd20083eb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_122.png and b/TMessagesProj/src/emojis/twitter/emoji/1_122.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_123.png b/TMessagesProj/src/emojis/twitter/emoji/1_123.png index b6d61270c6..6041fb8143 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_123.png and b/TMessagesProj/src/emojis/twitter/emoji/1_123.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_124.png b/TMessagesProj/src/emojis/twitter/emoji/1_124.png index 5cf7b8bf5f..caf200265a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_124.png and b/TMessagesProj/src/emojis/twitter/emoji/1_124.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_125.png b/TMessagesProj/src/emojis/twitter/emoji/1_125.png index e36fd9e6ac..e06c4c24c4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_125.png and b/TMessagesProj/src/emojis/twitter/emoji/1_125.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_126.png b/TMessagesProj/src/emojis/twitter/emoji/1_126.png index e9be9eb0d3..ff23b4b30c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_126.png and b/TMessagesProj/src/emojis/twitter/emoji/1_126.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_127.png b/TMessagesProj/src/emojis/twitter/emoji/1_127.png index 2f48884ec1..73efad602c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_127.png and b/TMessagesProj/src/emojis/twitter/emoji/1_127.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_128.png b/TMessagesProj/src/emojis/twitter/emoji/1_128.png index 3211298cf1..26adab20ae 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_128.png and b/TMessagesProj/src/emojis/twitter/emoji/1_128.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_129.png b/TMessagesProj/src/emojis/twitter/emoji/1_129.png index 331158dd41..c26ace5978 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_129.png and b/TMessagesProj/src/emojis/twitter/emoji/1_129.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_13.png b/TMessagesProj/src/emojis/twitter/emoji/1_13.png index d460d0ec75..9ba1ef0860 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_13.png and b/TMessagesProj/src/emojis/twitter/emoji/1_13.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_130.png b/TMessagesProj/src/emojis/twitter/emoji/1_130.png index af5df69e09..84eeba6d05 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_130.png and b/TMessagesProj/src/emojis/twitter/emoji/1_130.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_131.png b/TMessagesProj/src/emojis/twitter/emoji/1_131.png index 1a61e054fe..c05991db1f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_131.png and b/TMessagesProj/src/emojis/twitter/emoji/1_131.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_132.png b/TMessagesProj/src/emojis/twitter/emoji/1_132.png index 12906ab67b..85cbaeeeb4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_132.png and b/TMessagesProj/src/emojis/twitter/emoji/1_132.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_133.png b/TMessagesProj/src/emojis/twitter/emoji/1_133.png index 7c1672cab4..6fe8d1eaaa 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_133.png and b/TMessagesProj/src/emojis/twitter/emoji/1_133.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_134.png b/TMessagesProj/src/emojis/twitter/emoji/1_134.png index d8cf3863bb..dc7e0d88e9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_134.png and b/TMessagesProj/src/emojis/twitter/emoji/1_134.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_135.png b/TMessagesProj/src/emojis/twitter/emoji/1_135.png index 70c13481e9..a76589d2f7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_135.png and b/TMessagesProj/src/emojis/twitter/emoji/1_135.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_136.png b/TMessagesProj/src/emojis/twitter/emoji/1_136.png index 2a8a25e8e4..ea52fd2a53 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_136.png and b/TMessagesProj/src/emojis/twitter/emoji/1_136.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_137.png b/TMessagesProj/src/emojis/twitter/emoji/1_137.png index 17dd84ea3d..8528e176f8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_137.png and b/TMessagesProj/src/emojis/twitter/emoji/1_137.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_138.png b/TMessagesProj/src/emojis/twitter/emoji/1_138.png index 008cefbc83..74574de7ae 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_138.png and b/TMessagesProj/src/emojis/twitter/emoji/1_138.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_139.png b/TMessagesProj/src/emojis/twitter/emoji/1_139.png index b91851dbd4..818eb64d3c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_139.png and b/TMessagesProj/src/emojis/twitter/emoji/1_139.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_14.png b/TMessagesProj/src/emojis/twitter/emoji/1_14.png index d466db42e4..a1e4af5221 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_14.png and b/TMessagesProj/src/emojis/twitter/emoji/1_14.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_140.png b/TMessagesProj/src/emojis/twitter/emoji/1_140.png index 299923a55b..c3be4595b1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_140.png and b/TMessagesProj/src/emojis/twitter/emoji/1_140.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_141.png b/TMessagesProj/src/emojis/twitter/emoji/1_141.png index 20d69dc268..111711d4bc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_141.png and b/TMessagesProj/src/emojis/twitter/emoji/1_141.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_142.png b/TMessagesProj/src/emojis/twitter/emoji/1_142.png index 91b8e8533e..508001de7e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_142.png and b/TMessagesProj/src/emojis/twitter/emoji/1_142.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_143.png b/TMessagesProj/src/emojis/twitter/emoji/1_143.png index f1cd047829..e246e8cc84 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_143.png and b/TMessagesProj/src/emojis/twitter/emoji/1_143.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_144.png b/TMessagesProj/src/emojis/twitter/emoji/1_144.png index 657793583d..af19e1da39 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_144.png and b/TMessagesProj/src/emojis/twitter/emoji/1_144.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_145.png b/TMessagesProj/src/emojis/twitter/emoji/1_145.png index e9d7bd42c6..709e5516c8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_145.png and b/TMessagesProj/src/emojis/twitter/emoji/1_145.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_146.png b/TMessagesProj/src/emojis/twitter/emoji/1_146.png index f346d5863d..10bb5a8c21 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_146.png and b/TMessagesProj/src/emojis/twitter/emoji/1_146.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_147.png b/TMessagesProj/src/emojis/twitter/emoji/1_147.png index 2eccde6f82..2d101ff669 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_147.png and b/TMessagesProj/src/emojis/twitter/emoji/1_147.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_148.png b/TMessagesProj/src/emojis/twitter/emoji/1_148.png index f8a688e4dc..a560096fa8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_148.png and b/TMessagesProj/src/emojis/twitter/emoji/1_148.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_149.png b/TMessagesProj/src/emojis/twitter/emoji/1_149.png index 9c76d52bd1..c5f7a3ccc0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_149.png and b/TMessagesProj/src/emojis/twitter/emoji/1_149.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_15.png b/TMessagesProj/src/emojis/twitter/emoji/1_15.png index 5bcb58edc2..f1648589a9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_15.png and b/TMessagesProj/src/emojis/twitter/emoji/1_15.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_150.png b/TMessagesProj/src/emojis/twitter/emoji/1_150.png index 7b499b7cc5..eac133f6c1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_150.png and b/TMessagesProj/src/emojis/twitter/emoji/1_150.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_151.png b/TMessagesProj/src/emojis/twitter/emoji/1_151.png index 5f1614316a..81a39d2908 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_151.png and b/TMessagesProj/src/emojis/twitter/emoji/1_151.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_152.png b/TMessagesProj/src/emojis/twitter/emoji/1_152.png index a683387616..57be921498 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_152.png and b/TMessagesProj/src/emojis/twitter/emoji/1_152.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_153.png b/TMessagesProj/src/emojis/twitter/emoji/1_153.png index 300d0180ef..60149deeaa 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_153.png and b/TMessagesProj/src/emojis/twitter/emoji/1_153.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_154.png b/TMessagesProj/src/emojis/twitter/emoji/1_154.png index 2b8c41727f..04bb204b68 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_154.png and b/TMessagesProj/src/emojis/twitter/emoji/1_154.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_155.png b/TMessagesProj/src/emojis/twitter/emoji/1_155.png index f3fce1eebb..d360a525bc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_155.png and b/TMessagesProj/src/emojis/twitter/emoji/1_155.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_156.png b/TMessagesProj/src/emojis/twitter/emoji/1_156.png index f6944a9422..1e27d4e76f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_156.png and b/TMessagesProj/src/emojis/twitter/emoji/1_156.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_157.png b/TMessagesProj/src/emojis/twitter/emoji/1_157.png index b0ed1ade91..38c7a73786 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_157.png and b/TMessagesProj/src/emojis/twitter/emoji/1_157.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_158.png b/TMessagesProj/src/emojis/twitter/emoji/1_158.png index df0e7197d1..beccb13a4a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_158.png and b/TMessagesProj/src/emojis/twitter/emoji/1_158.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_159.png b/TMessagesProj/src/emojis/twitter/emoji/1_159.png index 9ab985b0fc..bbe40e3d45 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_159.png and b/TMessagesProj/src/emojis/twitter/emoji/1_159.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_16.png b/TMessagesProj/src/emojis/twitter/emoji/1_16.png index 6b3ec3c60b..ab9cd7f1e7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_16.png and b/TMessagesProj/src/emojis/twitter/emoji/1_16.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_160.png b/TMessagesProj/src/emojis/twitter/emoji/1_160.png index 9c4989fa20..2f6157fdaa 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_160.png and b/TMessagesProj/src/emojis/twitter/emoji/1_160.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_161.png b/TMessagesProj/src/emojis/twitter/emoji/1_161.png index 174bbae4d5..1f9a265450 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_161.png and b/TMessagesProj/src/emojis/twitter/emoji/1_161.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_162.png b/TMessagesProj/src/emojis/twitter/emoji/1_162.png index e2557c661b..f38e5ab8b6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_162.png and b/TMessagesProj/src/emojis/twitter/emoji/1_162.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_163.png b/TMessagesProj/src/emojis/twitter/emoji/1_163.png index 57a90a0c6a..d2676ff0b1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_163.png and b/TMessagesProj/src/emojis/twitter/emoji/1_163.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_164.png b/TMessagesProj/src/emojis/twitter/emoji/1_164.png index d9f29c6741..c9457ae36a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_164.png and b/TMessagesProj/src/emojis/twitter/emoji/1_164.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_165.png b/TMessagesProj/src/emojis/twitter/emoji/1_165.png index 7f231f9288..50e53311d6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_165.png and b/TMessagesProj/src/emojis/twitter/emoji/1_165.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_166.png b/TMessagesProj/src/emojis/twitter/emoji/1_166.png index 2f11b85ef4..98dc46615c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_166.png and b/TMessagesProj/src/emojis/twitter/emoji/1_166.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_167.png b/TMessagesProj/src/emojis/twitter/emoji/1_167.png index e4bbdde219..67c97e7b01 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_167.png and b/TMessagesProj/src/emojis/twitter/emoji/1_167.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_168.png b/TMessagesProj/src/emojis/twitter/emoji/1_168.png index 3fc1fe32e2..f85719d6e7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_168.png and b/TMessagesProj/src/emojis/twitter/emoji/1_168.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_169.png b/TMessagesProj/src/emojis/twitter/emoji/1_169.png index 728f187b41..2aa36cf6b7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_169.png and b/TMessagesProj/src/emojis/twitter/emoji/1_169.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_17.png b/TMessagesProj/src/emojis/twitter/emoji/1_17.png index bb3aed985d..a00c813c47 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_17.png and b/TMessagesProj/src/emojis/twitter/emoji/1_17.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_170.png b/TMessagesProj/src/emojis/twitter/emoji/1_170.png index a9451ddece..f95ada2f78 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_170.png and b/TMessagesProj/src/emojis/twitter/emoji/1_170.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_171.png b/TMessagesProj/src/emojis/twitter/emoji/1_171.png index f85e12d619..adbb90e1d7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_171.png and b/TMessagesProj/src/emojis/twitter/emoji/1_171.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_172.png b/TMessagesProj/src/emojis/twitter/emoji/1_172.png index f8cd4babdc..eb1ded7785 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_172.png and b/TMessagesProj/src/emojis/twitter/emoji/1_172.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_173.png b/TMessagesProj/src/emojis/twitter/emoji/1_173.png index a20fc5ca04..d5002a39df 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_173.png and b/TMessagesProj/src/emojis/twitter/emoji/1_173.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_174.png b/TMessagesProj/src/emojis/twitter/emoji/1_174.png index 9f01ead4c3..9920726248 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_174.png and b/TMessagesProj/src/emojis/twitter/emoji/1_174.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_175.png b/TMessagesProj/src/emojis/twitter/emoji/1_175.png index d4f04c53d1..3219c231f3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_175.png and b/TMessagesProj/src/emojis/twitter/emoji/1_175.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_176.png b/TMessagesProj/src/emojis/twitter/emoji/1_176.png index a820f97925..df7c6430fa 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_176.png and b/TMessagesProj/src/emojis/twitter/emoji/1_176.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_177.png b/TMessagesProj/src/emojis/twitter/emoji/1_177.png index 49d758b75d..4d8219cbf3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_177.png and b/TMessagesProj/src/emojis/twitter/emoji/1_177.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_178.png b/TMessagesProj/src/emojis/twitter/emoji/1_178.png index f20cf497a8..9af2067ea2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_178.png and b/TMessagesProj/src/emojis/twitter/emoji/1_178.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_179.png b/TMessagesProj/src/emojis/twitter/emoji/1_179.png index beeef20ebe..e919099a56 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_179.png and b/TMessagesProj/src/emojis/twitter/emoji/1_179.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_18.png b/TMessagesProj/src/emojis/twitter/emoji/1_18.png index 15c9eb75f2..fc43cc392f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_18.png and b/TMessagesProj/src/emojis/twitter/emoji/1_18.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_180.png b/TMessagesProj/src/emojis/twitter/emoji/1_180.png index 046f0d1f31..2a944c0a85 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_180.png and b/TMessagesProj/src/emojis/twitter/emoji/1_180.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_181.png b/TMessagesProj/src/emojis/twitter/emoji/1_181.png index c2bb0f6b81..5456d71588 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_181.png and b/TMessagesProj/src/emojis/twitter/emoji/1_181.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_182.png b/TMessagesProj/src/emojis/twitter/emoji/1_182.png index e48d773b87..a749377480 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_182.png and b/TMessagesProj/src/emojis/twitter/emoji/1_182.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_183.png b/TMessagesProj/src/emojis/twitter/emoji/1_183.png index 02b00c875b..6d63990d52 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_183.png and b/TMessagesProj/src/emojis/twitter/emoji/1_183.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_184.png b/TMessagesProj/src/emojis/twitter/emoji/1_184.png index ab22a02640..cddba27cfb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_184.png and b/TMessagesProj/src/emojis/twitter/emoji/1_184.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_185.png b/TMessagesProj/src/emojis/twitter/emoji/1_185.png index 4f2c6dace2..2a3a0ecd35 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_185.png and b/TMessagesProj/src/emojis/twitter/emoji/1_185.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_186.png b/TMessagesProj/src/emojis/twitter/emoji/1_186.png index 0351ea51d2..d5a68c2585 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_186.png and b/TMessagesProj/src/emojis/twitter/emoji/1_186.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_187.png b/TMessagesProj/src/emojis/twitter/emoji/1_187.png index d24aa33448..538e2b83ee 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_187.png and b/TMessagesProj/src/emojis/twitter/emoji/1_187.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_188.png b/TMessagesProj/src/emojis/twitter/emoji/1_188.png index 00513f2320..3d8f451a07 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_188.png and b/TMessagesProj/src/emojis/twitter/emoji/1_188.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_189.png b/TMessagesProj/src/emojis/twitter/emoji/1_189.png index e82e9e1e70..0df91162dc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_189.png and b/TMessagesProj/src/emojis/twitter/emoji/1_189.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_19.png b/TMessagesProj/src/emojis/twitter/emoji/1_19.png index 9bc3bc06ec..24b7eb9acd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_19.png and b/TMessagesProj/src/emojis/twitter/emoji/1_19.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_190.png b/TMessagesProj/src/emojis/twitter/emoji/1_190.png index 991d910af9..fe75d7d729 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_190.png and b/TMessagesProj/src/emojis/twitter/emoji/1_190.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_191.png b/TMessagesProj/src/emojis/twitter/emoji/1_191.png index 6b56413a36..999baf45bd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_191.png and b/TMessagesProj/src/emojis/twitter/emoji/1_191.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_192.png b/TMessagesProj/src/emojis/twitter/emoji/1_192.png index 7ce5884656..d5a57f1ea6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_192.png and b/TMessagesProj/src/emojis/twitter/emoji/1_192.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_193.png b/TMessagesProj/src/emojis/twitter/emoji/1_193.png index e6d9180b79..ab87201282 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_193.png and b/TMessagesProj/src/emojis/twitter/emoji/1_193.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_194.png b/TMessagesProj/src/emojis/twitter/emoji/1_194.png index eb7a5c1027..7486f36470 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_194.png and b/TMessagesProj/src/emojis/twitter/emoji/1_194.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_195.png b/TMessagesProj/src/emojis/twitter/emoji/1_195.png index ab8b07d919..a8f3d0101b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_195.png and b/TMessagesProj/src/emojis/twitter/emoji/1_195.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_196.png b/TMessagesProj/src/emojis/twitter/emoji/1_196.png index d50bebbdc4..b5b49d8abc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_196.png and b/TMessagesProj/src/emojis/twitter/emoji/1_196.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_197.png b/TMessagesProj/src/emojis/twitter/emoji/1_197.png index 85e437a6fc..b36673e3e4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_197.png and b/TMessagesProj/src/emojis/twitter/emoji/1_197.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_198.png b/TMessagesProj/src/emojis/twitter/emoji/1_198.png index 67db70ffbe..5133f9e491 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_198.png and b/TMessagesProj/src/emojis/twitter/emoji/1_198.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_199.png b/TMessagesProj/src/emojis/twitter/emoji/1_199.png index a58714f730..589c5cbdaf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_199.png and b/TMessagesProj/src/emojis/twitter/emoji/1_199.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_2.png b/TMessagesProj/src/emojis/twitter/emoji/1_2.png index f3a26a4577..99a7e47648 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_2.png and b/TMessagesProj/src/emojis/twitter/emoji/1_2.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_20.png b/TMessagesProj/src/emojis/twitter/emoji/1_20.png index 6767775f3b..413f37af2e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_20.png and b/TMessagesProj/src/emojis/twitter/emoji/1_20.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_200.png b/TMessagesProj/src/emojis/twitter/emoji/1_200.png index dc8b09485d..b0b4a1fa8d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_200.png and b/TMessagesProj/src/emojis/twitter/emoji/1_200.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_201.png b/TMessagesProj/src/emojis/twitter/emoji/1_201.png index 382a202c5f..e23c12cefc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_201.png and b/TMessagesProj/src/emojis/twitter/emoji/1_201.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_202.png b/TMessagesProj/src/emojis/twitter/emoji/1_202.png index 28924d3f51..f769329ed3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_202.png and b/TMessagesProj/src/emojis/twitter/emoji/1_202.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_203.png b/TMessagesProj/src/emojis/twitter/emoji/1_203.png index 91d7616c8b..4559f864d5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_203.png and b/TMessagesProj/src/emojis/twitter/emoji/1_203.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_204.png b/TMessagesProj/src/emojis/twitter/emoji/1_204.png new file mode 100644 index 0000000000..c46c3f7714 Binary files /dev/null and b/TMessagesProj/src/emojis/twitter/emoji/1_204.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_205.png b/TMessagesProj/src/emojis/twitter/emoji/1_205.png new file mode 100644 index 0000000000..87cc483b1c Binary files /dev/null and b/TMessagesProj/src/emojis/twitter/emoji/1_205.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_206.png b/TMessagesProj/src/emojis/twitter/emoji/1_206.png new file mode 100644 index 0000000000..a7f2b770f8 Binary files /dev/null and b/TMessagesProj/src/emojis/twitter/emoji/1_206.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_207.png b/TMessagesProj/src/emojis/twitter/emoji/1_207.png new file mode 100644 index 0000000000..23f77ebac6 Binary files /dev/null and b/TMessagesProj/src/emojis/twitter/emoji/1_207.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_208.png b/TMessagesProj/src/emojis/twitter/emoji/1_208.png new file mode 100644 index 0000000000..bc03845850 Binary files /dev/null and b/TMessagesProj/src/emojis/twitter/emoji/1_208.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_209.png b/TMessagesProj/src/emojis/twitter/emoji/1_209.png new file mode 100644 index 0000000000..bfd470a9af Binary files /dev/null and b/TMessagesProj/src/emojis/twitter/emoji/1_209.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_21.png b/TMessagesProj/src/emojis/twitter/emoji/1_21.png index 5ed2abff48..8d9cd7e3a2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_21.png and b/TMessagesProj/src/emojis/twitter/emoji/1_21.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_210.png b/TMessagesProj/src/emojis/twitter/emoji/1_210.png new file mode 100644 index 0000000000..bb2cae30a1 Binary files /dev/null and b/TMessagesProj/src/emojis/twitter/emoji/1_210.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_22.png b/TMessagesProj/src/emojis/twitter/emoji/1_22.png index 50275a13c0..8929181c97 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_22.png and b/TMessagesProj/src/emojis/twitter/emoji/1_22.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_23.png b/TMessagesProj/src/emojis/twitter/emoji/1_23.png index 8d5f973bc6..c908ef70c5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_23.png and b/TMessagesProj/src/emojis/twitter/emoji/1_23.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_24.png b/TMessagesProj/src/emojis/twitter/emoji/1_24.png index 4a79ddd9cc..d9de0b8b10 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_24.png and b/TMessagesProj/src/emojis/twitter/emoji/1_24.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_25.png b/TMessagesProj/src/emojis/twitter/emoji/1_25.png index 3757571867..352f34194e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_25.png and b/TMessagesProj/src/emojis/twitter/emoji/1_25.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_26.png b/TMessagesProj/src/emojis/twitter/emoji/1_26.png index 04b1450c1e..bafb6abd4a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_26.png and b/TMessagesProj/src/emojis/twitter/emoji/1_26.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_27.png b/TMessagesProj/src/emojis/twitter/emoji/1_27.png index 5fb24ca43f..10eb79b39a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_27.png and b/TMessagesProj/src/emojis/twitter/emoji/1_27.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_28.png b/TMessagesProj/src/emojis/twitter/emoji/1_28.png index b1ca45c7bb..e567b76f46 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_28.png and b/TMessagesProj/src/emojis/twitter/emoji/1_28.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_29.png b/TMessagesProj/src/emojis/twitter/emoji/1_29.png index be244d9e66..2ea59ea55b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_29.png and b/TMessagesProj/src/emojis/twitter/emoji/1_29.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_3.png b/TMessagesProj/src/emojis/twitter/emoji/1_3.png index 9b255a689a..9b88892d66 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_3.png and b/TMessagesProj/src/emojis/twitter/emoji/1_3.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_30.png b/TMessagesProj/src/emojis/twitter/emoji/1_30.png index edcbcfe864..56ec4060f4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_30.png and b/TMessagesProj/src/emojis/twitter/emoji/1_30.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_31.png b/TMessagesProj/src/emojis/twitter/emoji/1_31.png index 32b82e5893..67b6d6153a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_31.png and b/TMessagesProj/src/emojis/twitter/emoji/1_31.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_32.png b/TMessagesProj/src/emojis/twitter/emoji/1_32.png index f5e11fa70b..5b68cc9fd8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_32.png and b/TMessagesProj/src/emojis/twitter/emoji/1_32.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_33.png b/TMessagesProj/src/emojis/twitter/emoji/1_33.png index bd932076cc..17e0b9e24a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_33.png and b/TMessagesProj/src/emojis/twitter/emoji/1_33.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_34.png b/TMessagesProj/src/emojis/twitter/emoji/1_34.png index 95d9aa5c6b..6d6ebe9fb7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_34.png and b/TMessagesProj/src/emojis/twitter/emoji/1_34.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_35.png b/TMessagesProj/src/emojis/twitter/emoji/1_35.png index 7bcbba5027..2ebedd6742 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_35.png and b/TMessagesProj/src/emojis/twitter/emoji/1_35.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_36.png b/TMessagesProj/src/emojis/twitter/emoji/1_36.png index 5529c6cae0..7769bd1cd5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_36.png and b/TMessagesProj/src/emojis/twitter/emoji/1_36.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_37.png b/TMessagesProj/src/emojis/twitter/emoji/1_37.png index 9762d93e59..50fc01b816 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_37.png and b/TMessagesProj/src/emojis/twitter/emoji/1_37.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_38.png b/TMessagesProj/src/emojis/twitter/emoji/1_38.png index 9f595daeba..e68f7df45b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_38.png and b/TMessagesProj/src/emojis/twitter/emoji/1_38.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_39.png b/TMessagesProj/src/emojis/twitter/emoji/1_39.png index 99b13137b7..f99048306d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_39.png and b/TMessagesProj/src/emojis/twitter/emoji/1_39.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_4.png b/TMessagesProj/src/emojis/twitter/emoji/1_4.png index b9e1954721..7fc3322193 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_4.png and b/TMessagesProj/src/emojis/twitter/emoji/1_4.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_40.png b/TMessagesProj/src/emojis/twitter/emoji/1_40.png index f966bf5401..17761cbf19 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_40.png and b/TMessagesProj/src/emojis/twitter/emoji/1_40.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_41.png b/TMessagesProj/src/emojis/twitter/emoji/1_41.png index 1f0aeccbf8..f667bf3770 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_41.png and b/TMessagesProj/src/emojis/twitter/emoji/1_41.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_42.png b/TMessagesProj/src/emojis/twitter/emoji/1_42.png index b48b3448f5..b39cd89857 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_42.png and b/TMessagesProj/src/emojis/twitter/emoji/1_42.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_43.png b/TMessagesProj/src/emojis/twitter/emoji/1_43.png index 923043501f..a80975b5d8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_43.png and b/TMessagesProj/src/emojis/twitter/emoji/1_43.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_44.png b/TMessagesProj/src/emojis/twitter/emoji/1_44.png index 9219671206..bc7bd1039d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_44.png and b/TMessagesProj/src/emojis/twitter/emoji/1_44.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_45.png b/TMessagesProj/src/emojis/twitter/emoji/1_45.png index a514be0eb0..6b7988032f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_45.png and b/TMessagesProj/src/emojis/twitter/emoji/1_45.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_46.png b/TMessagesProj/src/emojis/twitter/emoji/1_46.png index c1b408061d..df2e3e728c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_46.png and b/TMessagesProj/src/emojis/twitter/emoji/1_46.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_47.png b/TMessagesProj/src/emojis/twitter/emoji/1_47.png index d5b41301f4..8a4d2f42ef 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_47.png and b/TMessagesProj/src/emojis/twitter/emoji/1_47.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_48.png b/TMessagesProj/src/emojis/twitter/emoji/1_48.png index 70606eabce..278f511e0e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_48.png and b/TMessagesProj/src/emojis/twitter/emoji/1_48.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_49.png b/TMessagesProj/src/emojis/twitter/emoji/1_49.png index 00757beb94..36fd49fb53 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_49.png and b/TMessagesProj/src/emojis/twitter/emoji/1_49.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_5.png b/TMessagesProj/src/emojis/twitter/emoji/1_5.png index 16496b68d3..aba2064767 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_5.png and b/TMessagesProj/src/emojis/twitter/emoji/1_5.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_50.png b/TMessagesProj/src/emojis/twitter/emoji/1_50.png index 1fe6aa2221..c73efc0d6d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_50.png and b/TMessagesProj/src/emojis/twitter/emoji/1_50.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_51.png b/TMessagesProj/src/emojis/twitter/emoji/1_51.png index fb8f878810..99de15d23c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_51.png and b/TMessagesProj/src/emojis/twitter/emoji/1_51.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_52.png b/TMessagesProj/src/emojis/twitter/emoji/1_52.png index 62403992c4..15359be343 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_52.png and b/TMessagesProj/src/emojis/twitter/emoji/1_52.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_53.png b/TMessagesProj/src/emojis/twitter/emoji/1_53.png index d76541386c..99bdbc9531 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_53.png and b/TMessagesProj/src/emojis/twitter/emoji/1_53.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_54.png b/TMessagesProj/src/emojis/twitter/emoji/1_54.png index b9ee236fb6..44ebdf38ef 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_54.png and b/TMessagesProj/src/emojis/twitter/emoji/1_54.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_55.png b/TMessagesProj/src/emojis/twitter/emoji/1_55.png index 8bff5769b1..8dd72b2ebb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_55.png and b/TMessagesProj/src/emojis/twitter/emoji/1_55.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_56.png b/TMessagesProj/src/emojis/twitter/emoji/1_56.png index 6353c6e02e..77c623246c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_56.png and b/TMessagesProj/src/emojis/twitter/emoji/1_56.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_57.png b/TMessagesProj/src/emojis/twitter/emoji/1_57.png index dd1242d22f..be607bc9d8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_57.png and b/TMessagesProj/src/emojis/twitter/emoji/1_57.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_58.png b/TMessagesProj/src/emojis/twitter/emoji/1_58.png index 0d22289db9..85b7406d7c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_58.png and b/TMessagesProj/src/emojis/twitter/emoji/1_58.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_59.png b/TMessagesProj/src/emojis/twitter/emoji/1_59.png index 41176f5502..6a129cf618 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_59.png and b/TMessagesProj/src/emojis/twitter/emoji/1_59.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_6.png b/TMessagesProj/src/emojis/twitter/emoji/1_6.png index 10aa540356..8ff3eb587e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_6.png and b/TMessagesProj/src/emojis/twitter/emoji/1_6.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_60.png b/TMessagesProj/src/emojis/twitter/emoji/1_60.png index 07089da394..f712b153bd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_60.png and b/TMessagesProj/src/emojis/twitter/emoji/1_60.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_61.png b/TMessagesProj/src/emojis/twitter/emoji/1_61.png index 4b1211f907..805b1ad1b2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_61.png and b/TMessagesProj/src/emojis/twitter/emoji/1_61.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_62.png b/TMessagesProj/src/emojis/twitter/emoji/1_62.png index 033556d798..341e4b0dc1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_62.png and b/TMessagesProj/src/emojis/twitter/emoji/1_62.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_63.png b/TMessagesProj/src/emojis/twitter/emoji/1_63.png index 6abdc8cb0d..4cb8c362c7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_63.png and b/TMessagesProj/src/emojis/twitter/emoji/1_63.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_64.png b/TMessagesProj/src/emojis/twitter/emoji/1_64.png index 38ef088d16..a50407932b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_64.png and b/TMessagesProj/src/emojis/twitter/emoji/1_64.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_65.png b/TMessagesProj/src/emojis/twitter/emoji/1_65.png index ccb1e0a9b5..a5b1d6032d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_65.png and b/TMessagesProj/src/emojis/twitter/emoji/1_65.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_66.png b/TMessagesProj/src/emojis/twitter/emoji/1_66.png index 89494a85ee..a8630ec8f5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_66.png and b/TMessagesProj/src/emojis/twitter/emoji/1_66.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_67.png b/TMessagesProj/src/emojis/twitter/emoji/1_67.png index 55be9da628..5de61868a5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_67.png and b/TMessagesProj/src/emojis/twitter/emoji/1_67.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_68.png b/TMessagesProj/src/emojis/twitter/emoji/1_68.png index f3faee9741..11049055ac 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_68.png and b/TMessagesProj/src/emojis/twitter/emoji/1_68.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_69.png b/TMessagesProj/src/emojis/twitter/emoji/1_69.png index daf3a66a57..0f1bce852c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_69.png and b/TMessagesProj/src/emojis/twitter/emoji/1_69.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_7.png b/TMessagesProj/src/emojis/twitter/emoji/1_7.png index 5f2a053da5..efddb778e7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_7.png and b/TMessagesProj/src/emojis/twitter/emoji/1_7.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_70.png b/TMessagesProj/src/emojis/twitter/emoji/1_70.png index 405a9fa401..1b930156ea 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_70.png and b/TMessagesProj/src/emojis/twitter/emoji/1_70.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_71.png b/TMessagesProj/src/emojis/twitter/emoji/1_71.png index 4ed85b1bb6..43607e3686 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_71.png and b/TMessagesProj/src/emojis/twitter/emoji/1_71.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_72.png b/TMessagesProj/src/emojis/twitter/emoji/1_72.png index 76a9f4456a..51fd75580b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_72.png and b/TMessagesProj/src/emojis/twitter/emoji/1_72.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_73.png b/TMessagesProj/src/emojis/twitter/emoji/1_73.png index 94813d2a0e..8caabf3a0a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_73.png and b/TMessagesProj/src/emojis/twitter/emoji/1_73.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_74.png b/TMessagesProj/src/emojis/twitter/emoji/1_74.png index e6fe3e5982..31edef5302 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_74.png and b/TMessagesProj/src/emojis/twitter/emoji/1_74.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_75.png b/TMessagesProj/src/emojis/twitter/emoji/1_75.png index a08ba61812..b1acb24ffc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_75.png and b/TMessagesProj/src/emojis/twitter/emoji/1_75.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_76.png b/TMessagesProj/src/emojis/twitter/emoji/1_76.png index ee7dd6da2d..99d91dd30d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_76.png and b/TMessagesProj/src/emojis/twitter/emoji/1_76.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_77.png b/TMessagesProj/src/emojis/twitter/emoji/1_77.png index 6686175175..c66227eb36 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_77.png and b/TMessagesProj/src/emojis/twitter/emoji/1_77.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_78.png b/TMessagesProj/src/emojis/twitter/emoji/1_78.png index f5465994de..a1eb02825b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_78.png and b/TMessagesProj/src/emojis/twitter/emoji/1_78.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_79.png b/TMessagesProj/src/emojis/twitter/emoji/1_79.png index e568328042..17687d90f7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_79.png and b/TMessagesProj/src/emojis/twitter/emoji/1_79.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_8.png b/TMessagesProj/src/emojis/twitter/emoji/1_8.png index 35cecf6dd3..020ae5ef6e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_8.png and b/TMessagesProj/src/emojis/twitter/emoji/1_8.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_80.png b/TMessagesProj/src/emojis/twitter/emoji/1_80.png index 39cb61c6d3..ce16ca4422 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_80.png and b/TMessagesProj/src/emojis/twitter/emoji/1_80.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_81.png b/TMessagesProj/src/emojis/twitter/emoji/1_81.png index 5cf1bc1b1d..1de971fbb6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_81.png and b/TMessagesProj/src/emojis/twitter/emoji/1_81.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_82.png b/TMessagesProj/src/emojis/twitter/emoji/1_82.png index 31ecb89cc8..71ee00a4ec 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_82.png and b/TMessagesProj/src/emojis/twitter/emoji/1_82.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_83.png b/TMessagesProj/src/emojis/twitter/emoji/1_83.png index d96a78d7c0..04054340a4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_83.png and b/TMessagesProj/src/emojis/twitter/emoji/1_83.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_84.png b/TMessagesProj/src/emojis/twitter/emoji/1_84.png index 6a65189126..7c471fe5f6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_84.png and b/TMessagesProj/src/emojis/twitter/emoji/1_84.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_85.png b/TMessagesProj/src/emojis/twitter/emoji/1_85.png index 53e42a1788..c8f592ab4b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_85.png and b/TMessagesProj/src/emojis/twitter/emoji/1_85.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_86.png b/TMessagesProj/src/emojis/twitter/emoji/1_86.png index 811f879841..9e38099880 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_86.png and b/TMessagesProj/src/emojis/twitter/emoji/1_86.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_87.png b/TMessagesProj/src/emojis/twitter/emoji/1_87.png index 56c9f31542..3ae1030c5e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_87.png and b/TMessagesProj/src/emojis/twitter/emoji/1_87.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_88.png b/TMessagesProj/src/emojis/twitter/emoji/1_88.png index d7b82cfe06..c6fed18ff7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_88.png and b/TMessagesProj/src/emojis/twitter/emoji/1_88.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_89.png b/TMessagesProj/src/emojis/twitter/emoji/1_89.png index f4edf78d05..e95d1fbcb8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_89.png and b/TMessagesProj/src/emojis/twitter/emoji/1_89.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_9.png b/TMessagesProj/src/emojis/twitter/emoji/1_9.png index 9bacea4320..06d38b4cdb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_9.png and b/TMessagesProj/src/emojis/twitter/emoji/1_9.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_90.png b/TMessagesProj/src/emojis/twitter/emoji/1_90.png index 113774147a..0d46322141 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_90.png and b/TMessagesProj/src/emojis/twitter/emoji/1_90.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_91.png b/TMessagesProj/src/emojis/twitter/emoji/1_91.png index ee84f47e62..ba1a309431 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_91.png and b/TMessagesProj/src/emojis/twitter/emoji/1_91.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_92.png b/TMessagesProj/src/emojis/twitter/emoji/1_92.png index ff35ce709f..e0e2475bc7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_92.png and b/TMessagesProj/src/emojis/twitter/emoji/1_92.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_93.png b/TMessagesProj/src/emojis/twitter/emoji/1_93.png index cb3df30525..4c1182419f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_93.png and b/TMessagesProj/src/emojis/twitter/emoji/1_93.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_94.png b/TMessagesProj/src/emojis/twitter/emoji/1_94.png index 813846d7e9..b859007ce1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_94.png and b/TMessagesProj/src/emojis/twitter/emoji/1_94.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_95.png b/TMessagesProj/src/emojis/twitter/emoji/1_95.png index 63abfaf898..4370c93017 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_95.png and b/TMessagesProj/src/emojis/twitter/emoji/1_95.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_96.png b/TMessagesProj/src/emojis/twitter/emoji/1_96.png index 32f9334faf..31e25270ad 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_96.png and b/TMessagesProj/src/emojis/twitter/emoji/1_96.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_97.png b/TMessagesProj/src/emojis/twitter/emoji/1_97.png index 512ae7070a..cef72a5d99 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_97.png and b/TMessagesProj/src/emojis/twitter/emoji/1_97.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_98.png b/TMessagesProj/src/emojis/twitter/emoji/1_98.png index 8936a0fa60..d87c28d49e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_98.png and b/TMessagesProj/src/emojis/twitter/emoji/1_98.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/1_99.png b/TMessagesProj/src/emojis/twitter/emoji/1_99.png index b31469c607..be4e169a10 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/1_99.png and b/TMessagesProj/src/emojis/twitter/emoji/1_99.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_0.png b/TMessagesProj/src/emojis/twitter/emoji/2_0.png index 71925bb5d8..12009d622e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_0.png and b/TMessagesProj/src/emojis/twitter/emoji/2_0.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_1.png b/TMessagesProj/src/emojis/twitter/emoji/2_1.png index 3245b1ad6e..ff31ae4c2a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_1.png and b/TMessagesProj/src/emojis/twitter/emoji/2_1.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_10.png b/TMessagesProj/src/emojis/twitter/emoji/2_10.png index f9e38df4ec..74999af8dd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_10.png and b/TMessagesProj/src/emojis/twitter/emoji/2_10.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_100.png b/TMessagesProj/src/emojis/twitter/emoji/2_100.png index c237d88dc1..fb81984598 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_100.png and b/TMessagesProj/src/emojis/twitter/emoji/2_100.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_101.png b/TMessagesProj/src/emojis/twitter/emoji/2_101.png index 58eea42ebc..8ae015215c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_101.png and b/TMessagesProj/src/emojis/twitter/emoji/2_101.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_102.png b/TMessagesProj/src/emojis/twitter/emoji/2_102.png index 6739c249ee..6c3a823a08 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_102.png and b/TMessagesProj/src/emojis/twitter/emoji/2_102.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_103.png b/TMessagesProj/src/emojis/twitter/emoji/2_103.png index 45073fc0e8..5c89e562e0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_103.png and b/TMessagesProj/src/emojis/twitter/emoji/2_103.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_104.png b/TMessagesProj/src/emojis/twitter/emoji/2_104.png index 1be2882ea8..149209334f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_104.png and b/TMessagesProj/src/emojis/twitter/emoji/2_104.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_105.png b/TMessagesProj/src/emojis/twitter/emoji/2_105.png index bfd8a2029e..99f90fdadc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_105.png and b/TMessagesProj/src/emojis/twitter/emoji/2_105.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_106.png b/TMessagesProj/src/emojis/twitter/emoji/2_106.png index 1fb8deac62..3e13803bbb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_106.png and b/TMessagesProj/src/emojis/twitter/emoji/2_106.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_107.png b/TMessagesProj/src/emojis/twitter/emoji/2_107.png index b99653f2d0..33aeda7962 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_107.png and b/TMessagesProj/src/emojis/twitter/emoji/2_107.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_108.png b/TMessagesProj/src/emojis/twitter/emoji/2_108.png index ba3a0be640..4d4735dcbe 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_108.png and b/TMessagesProj/src/emojis/twitter/emoji/2_108.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_109.png b/TMessagesProj/src/emojis/twitter/emoji/2_109.png index 54741d67f1..b154d6dee6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_109.png and b/TMessagesProj/src/emojis/twitter/emoji/2_109.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_11.png b/TMessagesProj/src/emojis/twitter/emoji/2_11.png index b93790d90f..8eb80337f0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_11.png and b/TMessagesProj/src/emojis/twitter/emoji/2_11.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_110.png b/TMessagesProj/src/emojis/twitter/emoji/2_110.png index 50a1096ba7..1e12c5d4fd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_110.png and b/TMessagesProj/src/emojis/twitter/emoji/2_110.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_111.png b/TMessagesProj/src/emojis/twitter/emoji/2_111.png index ab582bc840..54c5ed80b4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_111.png and b/TMessagesProj/src/emojis/twitter/emoji/2_111.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_112.png b/TMessagesProj/src/emojis/twitter/emoji/2_112.png index 73e126b4be..744b047a20 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_112.png and b/TMessagesProj/src/emojis/twitter/emoji/2_112.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_113.png b/TMessagesProj/src/emojis/twitter/emoji/2_113.png index 42dd7eaa0d..d75bc12517 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_113.png and b/TMessagesProj/src/emojis/twitter/emoji/2_113.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_114.png b/TMessagesProj/src/emojis/twitter/emoji/2_114.png index 5c1f902887..9aa6110262 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_114.png and b/TMessagesProj/src/emojis/twitter/emoji/2_114.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_115.png b/TMessagesProj/src/emojis/twitter/emoji/2_115.png index 8af9be471d..e551acb682 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_115.png and b/TMessagesProj/src/emojis/twitter/emoji/2_115.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_116.png b/TMessagesProj/src/emojis/twitter/emoji/2_116.png index 3ed6ab376d..ff00219f5e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_116.png and b/TMessagesProj/src/emojis/twitter/emoji/2_116.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_117.png b/TMessagesProj/src/emojis/twitter/emoji/2_117.png index aaba768d96..92b6cf689c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_117.png and b/TMessagesProj/src/emojis/twitter/emoji/2_117.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_118.png b/TMessagesProj/src/emojis/twitter/emoji/2_118.png index 5fd7337f38..e31a3074f7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_118.png and b/TMessagesProj/src/emojis/twitter/emoji/2_118.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_119.png b/TMessagesProj/src/emojis/twitter/emoji/2_119.png index 3ee8e3c0bf..595461771d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_119.png and b/TMessagesProj/src/emojis/twitter/emoji/2_119.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_12.png b/TMessagesProj/src/emojis/twitter/emoji/2_12.png index ac5b7a101a..276f29f7fa 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_12.png and b/TMessagesProj/src/emojis/twitter/emoji/2_12.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_120.png b/TMessagesProj/src/emojis/twitter/emoji/2_120.png index 663362034b..145bd0cce1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_120.png and b/TMessagesProj/src/emojis/twitter/emoji/2_120.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_121.png b/TMessagesProj/src/emojis/twitter/emoji/2_121.png index a28b4ac86d..8e09440f22 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_121.png and b/TMessagesProj/src/emojis/twitter/emoji/2_121.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_122.png b/TMessagesProj/src/emojis/twitter/emoji/2_122.png index 06d01af242..e815f6901f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_122.png and b/TMessagesProj/src/emojis/twitter/emoji/2_122.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_123.png b/TMessagesProj/src/emojis/twitter/emoji/2_123.png index df93f23f3a..57bdb120ec 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_123.png and b/TMessagesProj/src/emojis/twitter/emoji/2_123.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_124.png b/TMessagesProj/src/emojis/twitter/emoji/2_124.png index 39232e6d40..7141b7cfae 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_124.png and b/TMessagesProj/src/emojis/twitter/emoji/2_124.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_125.png b/TMessagesProj/src/emojis/twitter/emoji/2_125.png index 949fd0c7cd..f819f56ef7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_125.png and b/TMessagesProj/src/emojis/twitter/emoji/2_125.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_126.png b/TMessagesProj/src/emojis/twitter/emoji/2_126.png new file mode 100644 index 0000000000..64dc278440 Binary files /dev/null and b/TMessagesProj/src/emojis/twitter/emoji/2_126.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_127.png b/TMessagesProj/src/emojis/twitter/emoji/2_127.png new file mode 100644 index 0000000000..abe676fe29 Binary files /dev/null and b/TMessagesProj/src/emojis/twitter/emoji/2_127.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_13.png b/TMessagesProj/src/emojis/twitter/emoji/2_13.png index 05927768e9..e4175a5cc3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_13.png and b/TMessagesProj/src/emojis/twitter/emoji/2_13.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_14.png b/TMessagesProj/src/emojis/twitter/emoji/2_14.png index 01c61403da..a97c7320a5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_14.png and b/TMessagesProj/src/emojis/twitter/emoji/2_14.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_15.png b/TMessagesProj/src/emojis/twitter/emoji/2_15.png index 3fe558509a..e9ccf38a01 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_15.png and b/TMessagesProj/src/emojis/twitter/emoji/2_15.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_16.png b/TMessagesProj/src/emojis/twitter/emoji/2_16.png index 8f09015cdd..4d10ff3522 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_16.png and b/TMessagesProj/src/emojis/twitter/emoji/2_16.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_17.png b/TMessagesProj/src/emojis/twitter/emoji/2_17.png index a77201ff1d..610758084d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_17.png and b/TMessagesProj/src/emojis/twitter/emoji/2_17.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_18.png b/TMessagesProj/src/emojis/twitter/emoji/2_18.png index a13ac98d7d..83dcb5f0fe 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_18.png and b/TMessagesProj/src/emojis/twitter/emoji/2_18.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_19.png b/TMessagesProj/src/emojis/twitter/emoji/2_19.png index 32204ae6c7..589ceac934 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_19.png and b/TMessagesProj/src/emojis/twitter/emoji/2_19.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_2.png b/TMessagesProj/src/emojis/twitter/emoji/2_2.png index 064d52209d..40c1007e59 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_2.png and b/TMessagesProj/src/emojis/twitter/emoji/2_2.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_20.png b/TMessagesProj/src/emojis/twitter/emoji/2_20.png index 22e1ff2cdb..7083d34f36 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_20.png and b/TMessagesProj/src/emojis/twitter/emoji/2_20.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_21.png b/TMessagesProj/src/emojis/twitter/emoji/2_21.png index 0151097403..72655d3d3c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_21.png and b/TMessagesProj/src/emojis/twitter/emoji/2_21.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_22.png b/TMessagesProj/src/emojis/twitter/emoji/2_22.png index 07304c6c0c..1a2daa941f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_22.png and b/TMessagesProj/src/emojis/twitter/emoji/2_22.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_23.png b/TMessagesProj/src/emojis/twitter/emoji/2_23.png index 567b20cf8b..6cd0c6829b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_23.png and b/TMessagesProj/src/emojis/twitter/emoji/2_23.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_24.png b/TMessagesProj/src/emojis/twitter/emoji/2_24.png index 404e2b9541..63190353cb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_24.png and b/TMessagesProj/src/emojis/twitter/emoji/2_24.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_25.png b/TMessagesProj/src/emojis/twitter/emoji/2_25.png index 15d21fe781..777ca10227 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_25.png and b/TMessagesProj/src/emojis/twitter/emoji/2_25.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_26.png b/TMessagesProj/src/emojis/twitter/emoji/2_26.png index 436a7e04cb..e2e817e666 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_26.png and b/TMessagesProj/src/emojis/twitter/emoji/2_26.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_27.png b/TMessagesProj/src/emojis/twitter/emoji/2_27.png index 9b62f97af0..5612fc84a4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_27.png and b/TMessagesProj/src/emojis/twitter/emoji/2_27.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_28.png b/TMessagesProj/src/emojis/twitter/emoji/2_28.png index 0d664dc2f8..a96f3045e0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_28.png and b/TMessagesProj/src/emojis/twitter/emoji/2_28.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_29.png b/TMessagesProj/src/emojis/twitter/emoji/2_29.png index 4bc4877f99..a59328129c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_29.png and b/TMessagesProj/src/emojis/twitter/emoji/2_29.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_3.png b/TMessagesProj/src/emojis/twitter/emoji/2_3.png index 02a67f4c87..f49470772b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_3.png and b/TMessagesProj/src/emojis/twitter/emoji/2_3.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_30.png b/TMessagesProj/src/emojis/twitter/emoji/2_30.png index 17283cc78d..7098a0b77f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_30.png and b/TMessagesProj/src/emojis/twitter/emoji/2_30.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_31.png b/TMessagesProj/src/emojis/twitter/emoji/2_31.png index 720fb62869..6af0717072 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_31.png and b/TMessagesProj/src/emojis/twitter/emoji/2_31.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_32.png b/TMessagesProj/src/emojis/twitter/emoji/2_32.png index ec3c789157..9a58994b99 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_32.png and b/TMessagesProj/src/emojis/twitter/emoji/2_32.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_33.png b/TMessagesProj/src/emojis/twitter/emoji/2_33.png index 50ce3810c4..208ca0a682 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_33.png and b/TMessagesProj/src/emojis/twitter/emoji/2_33.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_34.png b/TMessagesProj/src/emojis/twitter/emoji/2_34.png index 64d44b4d05..e933897df4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_34.png and b/TMessagesProj/src/emojis/twitter/emoji/2_34.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_35.png b/TMessagesProj/src/emojis/twitter/emoji/2_35.png index 4a2662a6a4..8c714bb779 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_35.png and b/TMessagesProj/src/emojis/twitter/emoji/2_35.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_36.png b/TMessagesProj/src/emojis/twitter/emoji/2_36.png index d86cc0fb65..29b3331192 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_36.png and b/TMessagesProj/src/emojis/twitter/emoji/2_36.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_37.png b/TMessagesProj/src/emojis/twitter/emoji/2_37.png index 4c3eaffc4b..192364d653 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_37.png and b/TMessagesProj/src/emojis/twitter/emoji/2_37.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_38.png b/TMessagesProj/src/emojis/twitter/emoji/2_38.png index 0e9fa283be..ce15c18832 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_38.png and b/TMessagesProj/src/emojis/twitter/emoji/2_38.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_39.png b/TMessagesProj/src/emojis/twitter/emoji/2_39.png index 6087fec8c0..4b34c7c48c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_39.png and b/TMessagesProj/src/emojis/twitter/emoji/2_39.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_4.png b/TMessagesProj/src/emojis/twitter/emoji/2_4.png index 75f33aaf01..ab223ba660 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_4.png and b/TMessagesProj/src/emojis/twitter/emoji/2_4.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_40.png b/TMessagesProj/src/emojis/twitter/emoji/2_40.png index db7740ece6..31ce1beb90 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_40.png and b/TMessagesProj/src/emojis/twitter/emoji/2_40.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_41.png b/TMessagesProj/src/emojis/twitter/emoji/2_41.png index 448ddedc90..db07456ead 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_41.png and b/TMessagesProj/src/emojis/twitter/emoji/2_41.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_42.png b/TMessagesProj/src/emojis/twitter/emoji/2_42.png index 75fa6102b8..f773193965 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_42.png and b/TMessagesProj/src/emojis/twitter/emoji/2_42.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_43.png b/TMessagesProj/src/emojis/twitter/emoji/2_43.png index 2162fa85b7..2d4312b600 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_43.png and b/TMessagesProj/src/emojis/twitter/emoji/2_43.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_44.png b/TMessagesProj/src/emojis/twitter/emoji/2_44.png index d817dd4488..0bb82f6507 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_44.png and b/TMessagesProj/src/emojis/twitter/emoji/2_44.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_45.png b/TMessagesProj/src/emojis/twitter/emoji/2_45.png index 39f4fd6567..e3d3cbd10d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_45.png and b/TMessagesProj/src/emojis/twitter/emoji/2_45.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_46.png b/TMessagesProj/src/emojis/twitter/emoji/2_46.png index f214d3d0d6..3f13c331f8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_46.png and b/TMessagesProj/src/emojis/twitter/emoji/2_46.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_47.png b/TMessagesProj/src/emojis/twitter/emoji/2_47.png index 4a4a8eed81..47f3f1f0cd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_47.png and b/TMessagesProj/src/emojis/twitter/emoji/2_47.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_48.png b/TMessagesProj/src/emojis/twitter/emoji/2_48.png index f6ccf98e36..c401170bd0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_48.png and b/TMessagesProj/src/emojis/twitter/emoji/2_48.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_49.png b/TMessagesProj/src/emojis/twitter/emoji/2_49.png index f5aca08c59..20e24e9ee7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_49.png and b/TMessagesProj/src/emojis/twitter/emoji/2_49.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_5.png b/TMessagesProj/src/emojis/twitter/emoji/2_5.png index a9914d4fab..3cbe59fb6e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_5.png and b/TMessagesProj/src/emojis/twitter/emoji/2_5.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_50.png b/TMessagesProj/src/emojis/twitter/emoji/2_50.png index f79d157710..2a9f272ed8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_50.png and b/TMessagesProj/src/emojis/twitter/emoji/2_50.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_51.png b/TMessagesProj/src/emojis/twitter/emoji/2_51.png index 1cd72ff68c..9e6d95c0fb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_51.png and b/TMessagesProj/src/emojis/twitter/emoji/2_51.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_52.png b/TMessagesProj/src/emojis/twitter/emoji/2_52.png index 7c05cadc25..1dd9cbc72a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_52.png and b/TMessagesProj/src/emojis/twitter/emoji/2_52.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_53.png b/TMessagesProj/src/emojis/twitter/emoji/2_53.png index 0137a4ce47..589c2205b8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_53.png and b/TMessagesProj/src/emojis/twitter/emoji/2_53.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_54.png b/TMessagesProj/src/emojis/twitter/emoji/2_54.png index b2db324224..ce04140c46 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_54.png and b/TMessagesProj/src/emojis/twitter/emoji/2_54.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_55.png b/TMessagesProj/src/emojis/twitter/emoji/2_55.png index 7e14b4cbfc..90134f96a7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_55.png and b/TMessagesProj/src/emojis/twitter/emoji/2_55.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_56.png b/TMessagesProj/src/emojis/twitter/emoji/2_56.png index a291435467..c84d7ac565 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_56.png and b/TMessagesProj/src/emojis/twitter/emoji/2_56.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_57.png b/TMessagesProj/src/emojis/twitter/emoji/2_57.png index da6a9a5b06..954285837e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_57.png and b/TMessagesProj/src/emojis/twitter/emoji/2_57.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_58.png b/TMessagesProj/src/emojis/twitter/emoji/2_58.png index 1a53c74fd2..ef11aa131e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_58.png and b/TMessagesProj/src/emojis/twitter/emoji/2_58.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_59.png b/TMessagesProj/src/emojis/twitter/emoji/2_59.png index d8b0279d14..dab2505de7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_59.png and b/TMessagesProj/src/emojis/twitter/emoji/2_59.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_6.png b/TMessagesProj/src/emojis/twitter/emoji/2_6.png index 95b6fbb093..269ea5a427 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_6.png and b/TMessagesProj/src/emojis/twitter/emoji/2_6.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_60.png b/TMessagesProj/src/emojis/twitter/emoji/2_60.png index 982a5f0d2d..55ddfe186a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_60.png and b/TMessagesProj/src/emojis/twitter/emoji/2_60.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_61.png b/TMessagesProj/src/emojis/twitter/emoji/2_61.png index 0b73085f2e..a54091908e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_61.png and b/TMessagesProj/src/emojis/twitter/emoji/2_61.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_62.png b/TMessagesProj/src/emojis/twitter/emoji/2_62.png index 40a650e84e..5073c4e6e7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_62.png and b/TMessagesProj/src/emojis/twitter/emoji/2_62.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_63.png b/TMessagesProj/src/emojis/twitter/emoji/2_63.png index bf8d5113d8..d6c1323793 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_63.png and b/TMessagesProj/src/emojis/twitter/emoji/2_63.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_64.png b/TMessagesProj/src/emojis/twitter/emoji/2_64.png index c87d82bced..9da945184a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_64.png and b/TMessagesProj/src/emojis/twitter/emoji/2_64.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_65.png b/TMessagesProj/src/emojis/twitter/emoji/2_65.png index 4efb3ae025..8e9a66f02d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_65.png and b/TMessagesProj/src/emojis/twitter/emoji/2_65.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_66.png b/TMessagesProj/src/emojis/twitter/emoji/2_66.png index 605a99ec66..d1940d3284 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_66.png and b/TMessagesProj/src/emojis/twitter/emoji/2_66.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_67.png b/TMessagesProj/src/emojis/twitter/emoji/2_67.png index 19d2522386..1011a39cb5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_67.png and b/TMessagesProj/src/emojis/twitter/emoji/2_67.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_68.png b/TMessagesProj/src/emojis/twitter/emoji/2_68.png index 73566dd25f..89d03b8122 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_68.png and b/TMessagesProj/src/emojis/twitter/emoji/2_68.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_69.png b/TMessagesProj/src/emojis/twitter/emoji/2_69.png index 3e21a01df8..8bb73028a8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_69.png and b/TMessagesProj/src/emojis/twitter/emoji/2_69.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_7.png b/TMessagesProj/src/emojis/twitter/emoji/2_7.png index 0272fa51bd..3087b2ef4d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_7.png and b/TMessagesProj/src/emojis/twitter/emoji/2_7.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_70.png b/TMessagesProj/src/emojis/twitter/emoji/2_70.png index 8ace6b6758..5b93fd289d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_70.png and b/TMessagesProj/src/emojis/twitter/emoji/2_70.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_71.png b/TMessagesProj/src/emojis/twitter/emoji/2_71.png index bfe91d5fa2..fd079c6449 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_71.png and b/TMessagesProj/src/emojis/twitter/emoji/2_71.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_72.png b/TMessagesProj/src/emojis/twitter/emoji/2_72.png index bc79b1dbbc..3c14bf815f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_72.png and b/TMessagesProj/src/emojis/twitter/emoji/2_72.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_73.png b/TMessagesProj/src/emojis/twitter/emoji/2_73.png index 2bacf82134..1d1a7f3842 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_73.png and b/TMessagesProj/src/emojis/twitter/emoji/2_73.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_74.png b/TMessagesProj/src/emojis/twitter/emoji/2_74.png index f6443216f7..4c05714acd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_74.png and b/TMessagesProj/src/emojis/twitter/emoji/2_74.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_75.png b/TMessagesProj/src/emojis/twitter/emoji/2_75.png index 49c4c15b8e..27681b08dc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_75.png and b/TMessagesProj/src/emojis/twitter/emoji/2_75.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_76.png b/TMessagesProj/src/emojis/twitter/emoji/2_76.png index d4c542111b..09790a0487 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_76.png and b/TMessagesProj/src/emojis/twitter/emoji/2_76.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_77.png b/TMessagesProj/src/emojis/twitter/emoji/2_77.png index 63318015f3..9d8a705032 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_77.png and b/TMessagesProj/src/emojis/twitter/emoji/2_77.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_78.png b/TMessagesProj/src/emojis/twitter/emoji/2_78.png index ccb4a70389..69ff74a2ca 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_78.png and b/TMessagesProj/src/emojis/twitter/emoji/2_78.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_79.png b/TMessagesProj/src/emojis/twitter/emoji/2_79.png index d488397ab3..538d975a3d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_79.png and b/TMessagesProj/src/emojis/twitter/emoji/2_79.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_8.png b/TMessagesProj/src/emojis/twitter/emoji/2_8.png index e25c565d7e..ad29762675 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_8.png and b/TMessagesProj/src/emojis/twitter/emoji/2_8.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_80.png b/TMessagesProj/src/emojis/twitter/emoji/2_80.png index 17e9f0d899..142cad82d9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_80.png and b/TMessagesProj/src/emojis/twitter/emoji/2_80.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_81.png b/TMessagesProj/src/emojis/twitter/emoji/2_81.png index f8d91dbb7e..21c3b9baed 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_81.png and b/TMessagesProj/src/emojis/twitter/emoji/2_81.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_82.png b/TMessagesProj/src/emojis/twitter/emoji/2_82.png index 2fcf41e423..46e2236882 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_82.png and b/TMessagesProj/src/emojis/twitter/emoji/2_82.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_83.png b/TMessagesProj/src/emojis/twitter/emoji/2_83.png index 4a69fb8a74..f9b1e7ba67 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_83.png and b/TMessagesProj/src/emojis/twitter/emoji/2_83.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_84.png b/TMessagesProj/src/emojis/twitter/emoji/2_84.png index 8061caebc0..f0c0f33624 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_84.png and b/TMessagesProj/src/emojis/twitter/emoji/2_84.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_85.png b/TMessagesProj/src/emojis/twitter/emoji/2_85.png index 7f2bdbcc7c..e2136c44d3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_85.png and b/TMessagesProj/src/emojis/twitter/emoji/2_85.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_86.png b/TMessagesProj/src/emojis/twitter/emoji/2_86.png index 371186cb6a..5f6908a7fd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_86.png and b/TMessagesProj/src/emojis/twitter/emoji/2_86.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_87.png b/TMessagesProj/src/emojis/twitter/emoji/2_87.png index affb06355f..189de838a3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_87.png and b/TMessagesProj/src/emojis/twitter/emoji/2_87.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_88.png b/TMessagesProj/src/emojis/twitter/emoji/2_88.png index a7b5b5d1b5..3c34c19a1f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_88.png and b/TMessagesProj/src/emojis/twitter/emoji/2_88.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_89.png b/TMessagesProj/src/emojis/twitter/emoji/2_89.png index e8cf44210c..33c83d1375 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_89.png and b/TMessagesProj/src/emojis/twitter/emoji/2_89.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_9.png b/TMessagesProj/src/emojis/twitter/emoji/2_9.png index 883269fa07..6afa64fb6e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_9.png and b/TMessagesProj/src/emojis/twitter/emoji/2_9.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_90.png b/TMessagesProj/src/emojis/twitter/emoji/2_90.png index aa4ae9a476..73fd7de48f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_90.png and b/TMessagesProj/src/emojis/twitter/emoji/2_90.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_91.png b/TMessagesProj/src/emojis/twitter/emoji/2_91.png index 6122d81198..0ae66c72bc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_91.png and b/TMessagesProj/src/emojis/twitter/emoji/2_91.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_92.png b/TMessagesProj/src/emojis/twitter/emoji/2_92.png index d374697b36..a82383917f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_92.png and b/TMessagesProj/src/emojis/twitter/emoji/2_92.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_93.png b/TMessagesProj/src/emojis/twitter/emoji/2_93.png index e6a94f12c4..a81522e740 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_93.png and b/TMessagesProj/src/emojis/twitter/emoji/2_93.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_94.png b/TMessagesProj/src/emojis/twitter/emoji/2_94.png index e25cb5c84d..5478d378ba 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_94.png and b/TMessagesProj/src/emojis/twitter/emoji/2_94.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_95.png b/TMessagesProj/src/emojis/twitter/emoji/2_95.png index 55afd24336..008da50011 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_95.png and b/TMessagesProj/src/emojis/twitter/emoji/2_95.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_96.png b/TMessagesProj/src/emojis/twitter/emoji/2_96.png index 77dad4f816..8aae04246a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_96.png and b/TMessagesProj/src/emojis/twitter/emoji/2_96.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_97.png b/TMessagesProj/src/emojis/twitter/emoji/2_97.png index ba234f2f96..db9923ddca 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_97.png and b/TMessagesProj/src/emojis/twitter/emoji/2_97.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_98.png b/TMessagesProj/src/emojis/twitter/emoji/2_98.png index e09520fc69..b68ae45a3d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_98.png and b/TMessagesProj/src/emojis/twitter/emoji/2_98.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/2_99.png b/TMessagesProj/src/emojis/twitter/emoji/2_99.png index d60d5edc22..54ddca1eb6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/2_99.png and b/TMessagesProj/src/emojis/twitter/emoji/2_99.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_0.png b/TMessagesProj/src/emojis/twitter/emoji/3_0.png index 39f9dd2cf6..9fbc8b9cdd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_0.png and b/TMessagesProj/src/emojis/twitter/emoji/3_0.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_1.png b/TMessagesProj/src/emojis/twitter/emoji/3_1.png index df094c2911..7ad739fca9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_1.png and b/TMessagesProj/src/emojis/twitter/emoji/3_1.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_10.png b/TMessagesProj/src/emojis/twitter/emoji/3_10.png index fc720c8fc9..103dfd76a8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_10.png and b/TMessagesProj/src/emojis/twitter/emoji/3_10.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_100.png b/TMessagesProj/src/emojis/twitter/emoji/3_100.png index 227ea726b0..56eca34dc8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_100.png and b/TMessagesProj/src/emojis/twitter/emoji/3_100.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_101.png b/TMessagesProj/src/emojis/twitter/emoji/3_101.png index a4295aea23..be55e02c1d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_101.png and b/TMessagesProj/src/emojis/twitter/emoji/3_101.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_102.png b/TMessagesProj/src/emojis/twitter/emoji/3_102.png index 041604d6d4..6ba5610627 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_102.png and b/TMessagesProj/src/emojis/twitter/emoji/3_102.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_103.png b/TMessagesProj/src/emojis/twitter/emoji/3_103.png index ec6281a630..b7978f605f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_103.png and b/TMessagesProj/src/emojis/twitter/emoji/3_103.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_104.png b/TMessagesProj/src/emojis/twitter/emoji/3_104.png index 6895ed4b96..285736672b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_104.png and b/TMessagesProj/src/emojis/twitter/emoji/3_104.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_105.png b/TMessagesProj/src/emojis/twitter/emoji/3_105.png index b83aab1c4c..09147f21d0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_105.png and b/TMessagesProj/src/emojis/twitter/emoji/3_105.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_106.png b/TMessagesProj/src/emojis/twitter/emoji/3_106.png index a8345dcfc2..a5c6c92d4b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_106.png and b/TMessagesProj/src/emojis/twitter/emoji/3_106.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_107.png b/TMessagesProj/src/emojis/twitter/emoji/3_107.png index e5f66e7e8f..4115b2b401 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_107.png and b/TMessagesProj/src/emojis/twitter/emoji/3_107.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_108.png b/TMessagesProj/src/emojis/twitter/emoji/3_108.png index b097cd6225..2be8b9795e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_108.png and b/TMessagesProj/src/emojis/twitter/emoji/3_108.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_109.png b/TMessagesProj/src/emojis/twitter/emoji/3_109.png index dce6a9b0e2..fc7a157e02 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_109.png and b/TMessagesProj/src/emojis/twitter/emoji/3_109.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_11.png b/TMessagesProj/src/emojis/twitter/emoji/3_11.png index 13cec106ff..5d4f5446d0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_11.png and b/TMessagesProj/src/emojis/twitter/emoji/3_11.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_110.png b/TMessagesProj/src/emojis/twitter/emoji/3_110.png index b5d5ca6b02..106e55f204 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_110.png and b/TMessagesProj/src/emojis/twitter/emoji/3_110.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_111.png b/TMessagesProj/src/emojis/twitter/emoji/3_111.png index 519a07ab3e..b5d2986d32 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_111.png and b/TMessagesProj/src/emojis/twitter/emoji/3_111.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_112.png b/TMessagesProj/src/emojis/twitter/emoji/3_112.png index 000510d8a6..57501f8b33 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_112.png and b/TMessagesProj/src/emojis/twitter/emoji/3_112.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_113.png b/TMessagesProj/src/emojis/twitter/emoji/3_113.png index ac1af23691..0bd7c132ec 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_113.png and b/TMessagesProj/src/emojis/twitter/emoji/3_113.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_114.png b/TMessagesProj/src/emojis/twitter/emoji/3_114.png index 253029bbaf..dce0a00da8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_114.png and b/TMessagesProj/src/emojis/twitter/emoji/3_114.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_115.png b/TMessagesProj/src/emojis/twitter/emoji/3_115.png index 6816762946..036076fcf0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_115.png and b/TMessagesProj/src/emojis/twitter/emoji/3_115.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_116.png b/TMessagesProj/src/emojis/twitter/emoji/3_116.png index 82dae0ffc0..6e3d3fbe42 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_116.png and b/TMessagesProj/src/emojis/twitter/emoji/3_116.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_117.png b/TMessagesProj/src/emojis/twitter/emoji/3_117.png index d93de4e151..bd4d28e6fc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_117.png and b/TMessagesProj/src/emojis/twitter/emoji/3_117.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_118.png b/TMessagesProj/src/emojis/twitter/emoji/3_118.png index 8e844c1b8b..f140f8d18c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_118.png and b/TMessagesProj/src/emojis/twitter/emoji/3_118.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_119.png b/TMessagesProj/src/emojis/twitter/emoji/3_119.png index 9c1c93f10a..66df80978b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_119.png and b/TMessagesProj/src/emojis/twitter/emoji/3_119.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_12.png b/TMessagesProj/src/emojis/twitter/emoji/3_12.png index ace7c0abf5..28eea3ec04 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_12.png and b/TMessagesProj/src/emojis/twitter/emoji/3_12.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_120.png b/TMessagesProj/src/emojis/twitter/emoji/3_120.png index 1c4f623038..986070dbf2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_120.png and b/TMessagesProj/src/emojis/twitter/emoji/3_120.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_121.png b/TMessagesProj/src/emojis/twitter/emoji/3_121.png index 340205cd51..1c65f7843a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_121.png and b/TMessagesProj/src/emojis/twitter/emoji/3_121.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_122.png b/TMessagesProj/src/emojis/twitter/emoji/3_122.png index 82e7cf7ec2..5134618dfe 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_122.png and b/TMessagesProj/src/emojis/twitter/emoji/3_122.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_123.png b/TMessagesProj/src/emojis/twitter/emoji/3_123.png index 6e1f9b96a5..afbc227d3b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_123.png and b/TMessagesProj/src/emojis/twitter/emoji/3_123.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_124.png b/TMessagesProj/src/emojis/twitter/emoji/3_124.png index b69c7968df..506e6f1f58 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_124.png and b/TMessagesProj/src/emojis/twitter/emoji/3_124.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_125.png b/TMessagesProj/src/emojis/twitter/emoji/3_125.png index 371db69573..760c94b8a7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_125.png and b/TMessagesProj/src/emojis/twitter/emoji/3_125.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_126.png b/TMessagesProj/src/emojis/twitter/emoji/3_126.png index c7e6db3943..a50995f082 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_126.png and b/TMessagesProj/src/emojis/twitter/emoji/3_126.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_127.png b/TMessagesProj/src/emojis/twitter/emoji/3_127.png index 3d10ee6e88..1aa4ea19c7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_127.png and b/TMessagesProj/src/emojis/twitter/emoji/3_127.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_128.png b/TMessagesProj/src/emojis/twitter/emoji/3_128.png index e5dfa56181..ecfb966956 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_128.png and b/TMessagesProj/src/emojis/twitter/emoji/3_128.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_129.png b/TMessagesProj/src/emojis/twitter/emoji/3_129.png index 8a81174475..644f0422fe 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_129.png and b/TMessagesProj/src/emojis/twitter/emoji/3_129.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_13.png b/TMessagesProj/src/emojis/twitter/emoji/3_13.png index 2551423404..7955f72e2f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_13.png and b/TMessagesProj/src/emojis/twitter/emoji/3_13.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_130.png b/TMessagesProj/src/emojis/twitter/emoji/3_130.png index a99d0533b3..eb71719214 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_130.png and b/TMessagesProj/src/emojis/twitter/emoji/3_130.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_131.png b/TMessagesProj/src/emojis/twitter/emoji/3_131.png index 533b109f58..7b63a8fff6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_131.png and b/TMessagesProj/src/emojis/twitter/emoji/3_131.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_132.png b/TMessagesProj/src/emojis/twitter/emoji/3_132.png index ab382b1da4..42ece0e27e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_132.png and b/TMessagesProj/src/emojis/twitter/emoji/3_132.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_133.png b/TMessagesProj/src/emojis/twitter/emoji/3_133.png index 4276ac07b1..f6f3cb8f43 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_133.png and b/TMessagesProj/src/emojis/twitter/emoji/3_133.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_134.png b/TMessagesProj/src/emojis/twitter/emoji/3_134.png index 1e7f938b5f..7cf3334db0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_134.png and b/TMessagesProj/src/emojis/twitter/emoji/3_134.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_135.png b/TMessagesProj/src/emojis/twitter/emoji/3_135.png index b414350ea0..cd8bb37e99 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_135.png and b/TMessagesProj/src/emojis/twitter/emoji/3_135.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_136.png b/TMessagesProj/src/emojis/twitter/emoji/3_136.png index 43fd2eece2..c0c35a657d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_136.png and b/TMessagesProj/src/emojis/twitter/emoji/3_136.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_137.png b/TMessagesProj/src/emojis/twitter/emoji/3_137.png index 21fd6a9f79..c2981899cf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_137.png and b/TMessagesProj/src/emojis/twitter/emoji/3_137.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_138.png b/TMessagesProj/src/emojis/twitter/emoji/3_138.png index f086df82fc..80ea3322ba 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_138.png and b/TMessagesProj/src/emojis/twitter/emoji/3_138.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_139.png b/TMessagesProj/src/emojis/twitter/emoji/3_139.png index 6eb59692b2..44f95e5276 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_139.png and b/TMessagesProj/src/emojis/twitter/emoji/3_139.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_14.png b/TMessagesProj/src/emojis/twitter/emoji/3_14.png index 009cde844c..ac2d7e96de 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_14.png and b/TMessagesProj/src/emojis/twitter/emoji/3_14.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_140.png b/TMessagesProj/src/emojis/twitter/emoji/3_140.png index 0ec54e9881..2a146a826a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_140.png and b/TMessagesProj/src/emojis/twitter/emoji/3_140.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_141.png b/TMessagesProj/src/emojis/twitter/emoji/3_141.png index 888bde4c48..58950bf6c8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_141.png and b/TMessagesProj/src/emojis/twitter/emoji/3_141.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_142.png b/TMessagesProj/src/emojis/twitter/emoji/3_142.png index b1562a2415..7f262ab940 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_142.png and b/TMessagesProj/src/emojis/twitter/emoji/3_142.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_143.png b/TMessagesProj/src/emojis/twitter/emoji/3_143.png index f8a17727f6..d4265690f6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_143.png and b/TMessagesProj/src/emojis/twitter/emoji/3_143.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_144.png b/TMessagesProj/src/emojis/twitter/emoji/3_144.png index cab0b8cc0f..663c1fc482 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_144.png and b/TMessagesProj/src/emojis/twitter/emoji/3_144.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_145.png b/TMessagesProj/src/emojis/twitter/emoji/3_145.png index 6636f302b2..b620437fa0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_145.png and b/TMessagesProj/src/emojis/twitter/emoji/3_145.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_146.png b/TMessagesProj/src/emojis/twitter/emoji/3_146.png index 1712ee85c3..7862d5e00f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_146.png and b/TMessagesProj/src/emojis/twitter/emoji/3_146.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_147.png b/TMessagesProj/src/emojis/twitter/emoji/3_147.png index bbbce5e0c1..b08614b55f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_147.png and b/TMessagesProj/src/emojis/twitter/emoji/3_147.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_148.png b/TMessagesProj/src/emojis/twitter/emoji/3_148.png index b133d1ad43..c0efff754d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_148.png and b/TMessagesProj/src/emojis/twitter/emoji/3_148.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_149.png b/TMessagesProj/src/emojis/twitter/emoji/3_149.png index d59ceaf34f..dc9941016a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_149.png and b/TMessagesProj/src/emojis/twitter/emoji/3_149.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_15.png b/TMessagesProj/src/emojis/twitter/emoji/3_15.png index 3ae7d63bf1..2781e5bac8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_15.png and b/TMessagesProj/src/emojis/twitter/emoji/3_15.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_150.png b/TMessagesProj/src/emojis/twitter/emoji/3_150.png index 0689a3b857..7137b33003 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_150.png and b/TMessagesProj/src/emojis/twitter/emoji/3_150.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_151.png b/TMessagesProj/src/emojis/twitter/emoji/3_151.png index f1151a0c75..a1d84f79f2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_151.png and b/TMessagesProj/src/emojis/twitter/emoji/3_151.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_152.png b/TMessagesProj/src/emojis/twitter/emoji/3_152.png index 3a24c1b45f..46fcd75c47 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_152.png and b/TMessagesProj/src/emojis/twitter/emoji/3_152.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_153.png b/TMessagesProj/src/emojis/twitter/emoji/3_153.png index 7a69853681..54c9a3859b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_153.png and b/TMessagesProj/src/emojis/twitter/emoji/3_153.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_154.png b/TMessagesProj/src/emojis/twitter/emoji/3_154.png index 747b375baa..f4e853b928 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_154.png and b/TMessagesProj/src/emojis/twitter/emoji/3_154.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_155.png b/TMessagesProj/src/emojis/twitter/emoji/3_155.png index bb6b50dc3e..7e030a85e7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_155.png and b/TMessagesProj/src/emojis/twitter/emoji/3_155.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_156.png b/TMessagesProj/src/emojis/twitter/emoji/3_156.png index 6b13873485..239abf87ca 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_156.png and b/TMessagesProj/src/emojis/twitter/emoji/3_156.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_157.png b/TMessagesProj/src/emojis/twitter/emoji/3_157.png index 4b7424c6dd..7c3d5dc4a1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_157.png and b/TMessagesProj/src/emojis/twitter/emoji/3_157.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_158.png b/TMessagesProj/src/emojis/twitter/emoji/3_158.png index c41d064b3f..b633ff3248 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_158.png and b/TMessagesProj/src/emojis/twitter/emoji/3_158.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_159.png b/TMessagesProj/src/emojis/twitter/emoji/3_159.png index dfdaf3fe4e..b0e0e9c92e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_159.png and b/TMessagesProj/src/emojis/twitter/emoji/3_159.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_16.png b/TMessagesProj/src/emojis/twitter/emoji/3_16.png index 7b3d9a1ee8..4b441c7d8b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_16.png and b/TMessagesProj/src/emojis/twitter/emoji/3_16.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_160.png b/TMessagesProj/src/emojis/twitter/emoji/3_160.png index 659127aac4..0306a43bef 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_160.png and b/TMessagesProj/src/emojis/twitter/emoji/3_160.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_161.png b/TMessagesProj/src/emojis/twitter/emoji/3_161.png index be364b4992..6159cad9f4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_161.png and b/TMessagesProj/src/emojis/twitter/emoji/3_161.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_162.png b/TMessagesProj/src/emojis/twitter/emoji/3_162.png index 60b39ee5a4..bd691ed4fc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_162.png and b/TMessagesProj/src/emojis/twitter/emoji/3_162.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_163.png b/TMessagesProj/src/emojis/twitter/emoji/3_163.png index 568dd123a3..a9ad333863 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_163.png and b/TMessagesProj/src/emojis/twitter/emoji/3_163.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_164.png b/TMessagesProj/src/emojis/twitter/emoji/3_164.png index 4e41e9aed0..a24cecb85a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_164.png and b/TMessagesProj/src/emojis/twitter/emoji/3_164.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_165.png b/TMessagesProj/src/emojis/twitter/emoji/3_165.png index e36f3fef76..e592d3bcf7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_165.png and b/TMessagesProj/src/emojis/twitter/emoji/3_165.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_166.png b/TMessagesProj/src/emojis/twitter/emoji/3_166.png index 36675b1a41..a79cfff515 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_166.png and b/TMessagesProj/src/emojis/twitter/emoji/3_166.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_167.png b/TMessagesProj/src/emojis/twitter/emoji/3_167.png index 30397b5aea..a54ecce33c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_167.png and b/TMessagesProj/src/emojis/twitter/emoji/3_167.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_168.png b/TMessagesProj/src/emojis/twitter/emoji/3_168.png index 8c148077f0..0d3ef5ba59 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_168.png and b/TMessagesProj/src/emojis/twitter/emoji/3_168.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_169.png b/TMessagesProj/src/emojis/twitter/emoji/3_169.png index a2df8cbf40..86e5223e0f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_169.png and b/TMessagesProj/src/emojis/twitter/emoji/3_169.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_17.png b/TMessagesProj/src/emojis/twitter/emoji/3_17.png index c348978d46..d0e6906184 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_17.png and b/TMessagesProj/src/emojis/twitter/emoji/3_17.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_170.png b/TMessagesProj/src/emojis/twitter/emoji/3_170.png index 52ec515707..dcd9dfbfb6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_170.png and b/TMessagesProj/src/emojis/twitter/emoji/3_170.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_171.png b/TMessagesProj/src/emojis/twitter/emoji/3_171.png index 30e2085721..577a2e1578 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_171.png and b/TMessagesProj/src/emojis/twitter/emoji/3_171.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_172.png b/TMessagesProj/src/emojis/twitter/emoji/3_172.png index 890eaeb8f4..924d139e5f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_172.png and b/TMessagesProj/src/emojis/twitter/emoji/3_172.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_173.png b/TMessagesProj/src/emojis/twitter/emoji/3_173.png index 8462555a97..1fe336cd05 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_173.png and b/TMessagesProj/src/emojis/twitter/emoji/3_173.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_174.png b/TMessagesProj/src/emojis/twitter/emoji/3_174.png index 7ae829fc78..e37b7ae6e9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_174.png and b/TMessagesProj/src/emojis/twitter/emoji/3_174.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_175.png b/TMessagesProj/src/emojis/twitter/emoji/3_175.png index 6a8ad2d682..0d67068a64 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_175.png and b/TMessagesProj/src/emojis/twitter/emoji/3_175.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_176.png b/TMessagesProj/src/emojis/twitter/emoji/3_176.png index 43ffde04b6..ab3a430d42 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_176.png and b/TMessagesProj/src/emojis/twitter/emoji/3_176.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_177.png b/TMessagesProj/src/emojis/twitter/emoji/3_177.png index 374dda744f..9c0a83e110 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_177.png and b/TMessagesProj/src/emojis/twitter/emoji/3_177.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_178.png b/TMessagesProj/src/emojis/twitter/emoji/3_178.png index 45f3cad96c..d8d5ee810f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_178.png and b/TMessagesProj/src/emojis/twitter/emoji/3_178.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_179.png b/TMessagesProj/src/emojis/twitter/emoji/3_179.png index 95db7b2ef2..b4aad8f971 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_179.png and b/TMessagesProj/src/emojis/twitter/emoji/3_179.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_18.png b/TMessagesProj/src/emojis/twitter/emoji/3_18.png index b7fc6e288b..9a8a6fb3de 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_18.png and b/TMessagesProj/src/emojis/twitter/emoji/3_18.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_180.png b/TMessagesProj/src/emojis/twitter/emoji/3_180.png index cc83ae93fe..20e2937828 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_180.png and b/TMessagesProj/src/emojis/twitter/emoji/3_180.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_181.png b/TMessagesProj/src/emojis/twitter/emoji/3_181.png index 632feb8aa7..c2f6c44d69 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_181.png and b/TMessagesProj/src/emojis/twitter/emoji/3_181.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_182.png b/TMessagesProj/src/emojis/twitter/emoji/3_182.png index d0390d880c..b78a9b0864 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_182.png and b/TMessagesProj/src/emojis/twitter/emoji/3_182.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_183.png b/TMessagesProj/src/emojis/twitter/emoji/3_183.png index aba43a7cf1..24baa15fac 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_183.png and b/TMessagesProj/src/emojis/twitter/emoji/3_183.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_184.png b/TMessagesProj/src/emojis/twitter/emoji/3_184.png index 6727abc4b1..f90a798aa3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_184.png and b/TMessagesProj/src/emojis/twitter/emoji/3_184.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_185.png b/TMessagesProj/src/emojis/twitter/emoji/3_185.png index cacccdaa5f..32da99148b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_185.png and b/TMessagesProj/src/emojis/twitter/emoji/3_185.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_186.png b/TMessagesProj/src/emojis/twitter/emoji/3_186.png index 4fd44f7df1..0edb0f960a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_186.png and b/TMessagesProj/src/emojis/twitter/emoji/3_186.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_187.png b/TMessagesProj/src/emojis/twitter/emoji/3_187.png index 4df4348245..ac9da8cadf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_187.png and b/TMessagesProj/src/emojis/twitter/emoji/3_187.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_188.png b/TMessagesProj/src/emojis/twitter/emoji/3_188.png index 46be219e6d..ef62f3efd0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_188.png and b/TMessagesProj/src/emojis/twitter/emoji/3_188.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_189.png b/TMessagesProj/src/emojis/twitter/emoji/3_189.png index 0802bb38e1..2d4323dbc6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_189.png and b/TMessagesProj/src/emojis/twitter/emoji/3_189.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_19.png b/TMessagesProj/src/emojis/twitter/emoji/3_19.png index 91a172ee16..01e43bc4e9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_19.png and b/TMessagesProj/src/emojis/twitter/emoji/3_19.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_190.png b/TMessagesProj/src/emojis/twitter/emoji/3_190.png index 39994e7960..2667d8e38d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_190.png and b/TMessagesProj/src/emojis/twitter/emoji/3_190.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_191.png b/TMessagesProj/src/emojis/twitter/emoji/3_191.png index cf586b3cf6..1da69e1801 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_191.png and b/TMessagesProj/src/emojis/twitter/emoji/3_191.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_192.png b/TMessagesProj/src/emojis/twitter/emoji/3_192.png index 37bf1a6ac2..e68c23ebfb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_192.png and b/TMessagesProj/src/emojis/twitter/emoji/3_192.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_193.png b/TMessagesProj/src/emojis/twitter/emoji/3_193.png index 5ca3426fdb..7200badb8e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_193.png and b/TMessagesProj/src/emojis/twitter/emoji/3_193.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_194.png b/TMessagesProj/src/emojis/twitter/emoji/3_194.png index dd2aaee2ce..cf311f926c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_194.png and b/TMessagesProj/src/emojis/twitter/emoji/3_194.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_195.png b/TMessagesProj/src/emojis/twitter/emoji/3_195.png index 943b00cacc..2fd7669bd0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_195.png and b/TMessagesProj/src/emojis/twitter/emoji/3_195.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_196.png b/TMessagesProj/src/emojis/twitter/emoji/3_196.png index 84f3e6ee36..c803c35ef8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_196.png and b/TMessagesProj/src/emojis/twitter/emoji/3_196.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_197.png b/TMessagesProj/src/emojis/twitter/emoji/3_197.png index 908b0f8090..f532635942 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_197.png and b/TMessagesProj/src/emojis/twitter/emoji/3_197.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_198.png b/TMessagesProj/src/emojis/twitter/emoji/3_198.png index 9fba207c6e..d98e75e2cf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_198.png and b/TMessagesProj/src/emojis/twitter/emoji/3_198.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_199.png b/TMessagesProj/src/emojis/twitter/emoji/3_199.png index f29737f1ce..fcb2c13b4e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_199.png and b/TMessagesProj/src/emojis/twitter/emoji/3_199.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_2.png b/TMessagesProj/src/emojis/twitter/emoji/3_2.png index b78f3ed9d7..a43bba351d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_2.png and b/TMessagesProj/src/emojis/twitter/emoji/3_2.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_20.png b/TMessagesProj/src/emojis/twitter/emoji/3_20.png index 6f0adaa478..96210d7df8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_20.png and b/TMessagesProj/src/emojis/twitter/emoji/3_20.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_200.png b/TMessagesProj/src/emojis/twitter/emoji/3_200.png index bf6084d39b..12efc4cc53 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_200.png and b/TMessagesProj/src/emojis/twitter/emoji/3_200.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_201.png b/TMessagesProj/src/emojis/twitter/emoji/3_201.png index 866bd25fb1..9336f8c6bc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_201.png and b/TMessagesProj/src/emojis/twitter/emoji/3_201.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_202.png b/TMessagesProj/src/emojis/twitter/emoji/3_202.png index aebea78790..377ae4b91e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_202.png and b/TMessagesProj/src/emojis/twitter/emoji/3_202.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_203.png b/TMessagesProj/src/emojis/twitter/emoji/3_203.png index 80b7d0b23a..eb8b471468 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_203.png and b/TMessagesProj/src/emojis/twitter/emoji/3_203.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_204.png b/TMessagesProj/src/emojis/twitter/emoji/3_204.png index 924e20d207..a4ab5f79cc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_204.png and b/TMessagesProj/src/emojis/twitter/emoji/3_204.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_205.png b/TMessagesProj/src/emojis/twitter/emoji/3_205.png index e08de2e4f3..037b934e04 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_205.png and b/TMessagesProj/src/emojis/twitter/emoji/3_205.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_206.png b/TMessagesProj/src/emojis/twitter/emoji/3_206.png index d3030c417c..e32168f1e9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_206.png and b/TMessagesProj/src/emojis/twitter/emoji/3_206.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_207.png b/TMessagesProj/src/emojis/twitter/emoji/3_207.png index 4631a9fcb6..417f23aaec 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_207.png and b/TMessagesProj/src/emojis/twitter/emoji/3_207.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_208.png b/TMessagesProj/src/emojis/twitter/emoji/3_208.png index a75034c155..033ab1bf03 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_208.png and b/TMessagesProj/src/emojis/twitter/emoji/3_208.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_209.png b/TMessagesProj/src/emojis/twitter/emoji/3_209.png index c6f01ace7f..fe84f71173 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_209.png and b/TMessagesProj/src/emojis/twitter/emoji/3_209.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_21.png b/TMessagesProj/src/emojis/twitter/emoji/3_21.png index cf74fbd539..ce81b2e7b8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_21.png and b/TMessagesProj/src/emojis/twitter/emoji/3_21.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_210.png b/TMessagesProj/src/emojis/twitter/emoji/3_210.png index 2a5c7671d3..323f05fd9c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_210.png and b/TMessagesProj/src/emojis/twitter/emoji/3_210.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_211.png b/TMessagesProj/src/emojis/twitter/emoji/3_211.png index a37c32fe48..3c7722de7b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_211.png and b/TMessagesProj/src/emojis/twitter/emoji/3_211.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_212.png b/TMessagesProj/src/emojis/twitter/emoji/3_212.png index 4ac57d4b2a..a1ba01dc2e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_212.png and b/TMessagesProj/src/emojis/twitter/emoji/3_212.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_213.png b/TMessagesProj/src/emojis/twitter/emoji/3_213.png index 242180792d..3f19fa304e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_213.png and b/TMessagesProj/src/emojis/twitter/emoji/3_213.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_214.png b/TMessagesProj/src/emojis/twitter/emoji/3_214.png index c7fd3b2694..caf721def3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_214.png and b/TMessagesProj/src/emojis/twitter/emoji/3_214.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_215.png b/TMessagesProj/src/emojis/twitter/emoji/3_215.png index 260c7ad42f..1d080767c9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_215.png and b/TMessagesProj/src/emojis/twitter/emoji/3_215.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_216.png b/TMessagesProj/src/emojis/twitter/emoji/3_216.png index 0e89b96184..7609677337 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_216.png and b/TMessagesProj/src/emojis/twitter/emoji/3_216.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_217.png b/TMessagesProj/src/emojis/twitter/emoji/3_217.png index 342d09da03..fd9afad39e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_217.png and b/TMessagesProj/src/emojis/twitter/emoji/3_217.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_218.png b/TMessagesProj/src/emojis/twitter/emoji/3_218.png index ab4c25eb52..b453334d68 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_218.png and b/TMessagesProj/src/emojis/twitter/emoji/3_218.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_219.png b/TMessagesProj/src/emojis/twitter/emoji/3_219.png index c8fb094b4c..cfa7668373 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_219.png and b/TMessagesProj/src/emojis/twitter/emoji/3_219.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_22.png b/TMessagesProj/src/emojis/twitter/emoji/3_22.png index 6ab43b4248..6a680fd24c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_22.png and b/TMessagesProj/src/emojis/twitter/emoji/3_22.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_220.png b/TMessagesProj/src/emojis/twitter/emoji/3_220.png index 115e42aede..b97d74ac05 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_220.png and b/TMessagesProj/src/emojis/twitter/emoji/3_220.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_221.png b/TMessagesProj/src/emojis/twitter/emoji/3_221.png index 4925cb2686..8d6232bd88 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_221.png and b/TMessagesProj/src/emojis/twitter/emoji/3_221.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_222.png b/TMessagesProj/src/emojis/twitter/emoji/3_222.png index 53e183338b..3c7cbdce07 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_222.png and b/TMessagesProj/src/emojis/twitter/emoji/3_222.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_223.png b/TMessagesProj/src/emojis/twitter/emoji/3_223.png index c2c74202ad..310ad3b510 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_223.png and b/TMessagesProj/src/emojis/twitter/emoji/3_223.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_224.png b/TMessagesProj/src/emojis/twitter/emoji/3_224.png index 5ee07cded2..bddf56bde4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_224.png and b/TMessagesProj/src/emojis/twitter/emoji/3_224.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_225.png b/TMessagesProj/src/emojis/twitter/emoji/3_225.png index 8fb74b1507..615824263a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_225.png and b/TMessagesProj/src/emojis/twitter/emoji/3_225.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_226.png b/TMessagesProj/src/emojis/twitter/emoji/3_226.png index 19703ef494..a64f8b8bcc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_226.png and b/TMessagesProj/src/emojis/twitter/emoji/3_226.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_227.png b/TMessagesProj/src/emojis/twitter/emoji/3_227.png index 388392b4a4..5566050f04 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_227.png and b/TMessagesProj/src/emojis/twitter/emoji/3_227.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_228.png b/TMessagesProj/src/emojis/twitter/emoji/3_228.png index 40f6540498..50581052e3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_228.png and b/TMessagesProj/src/emojis/twitter/emoji/3_228.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_229.png b/TMessagesProj/src/emojis/twitter/emoji/3_229.png index a576bfc9e2..afe1ed51a7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_229.png and b/TMessagesProj/src/emojis/twitter/emoji/3_229.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_23.png b/TMessagesProj/src/emojis/twitter/emoji/3_23.png index c9c9114fe8..a7230988c9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_23.png and b/TMessagesProj/src/emojis/twitter/emoji/3_23.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_230.png b/TMessagesProj/src/emojis/twitter/emoji/3_230.png index a00cf597a8..0e75c74c4b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_230.png and b/TMessagesProj/src/emojis/twitter/emoji/3_230.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_231.png b/TMessagesProj/src/emojis/twitter/emoji/3_231.png index a9e9effd50..e1332c40c6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_231.png and b/TMessagesProj/src/emojis/twitter/emoji/3_231.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_232.png b/TMessagesProj/src/emojis/twitter/emoji/3_232.png index 317c5baea8..7bb63d416c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_232.png and b/TMessagesProj/src/emojis/twitter/emoji/3_232.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_233.png b/TMessagesProj/src/emojis/twitter/emoji/3_233.png index 4098bd468e..796d56af14 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_233.png and b/TMessagesProj/src/emojis/twitter/emoji/3_233.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_234.png b/TMessagesProj/src/emojis/twitter/emoji/3_234.png index 7156d7ed48..ac5788bd49 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_234.png and b/TMessagesProj/src/emojis/twitter/emoji/3_234.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_235.png b/TMessagesProj/src/emojis/twitter/emoji/3_235.png index ef8a402311..d0182cb7c5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_235.png and b/TMessagesProj/src/emojis/twitter/emoji/3_235.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_236.png b/TMessagesProj/src/emojis/twitter/emoji/3_236.png index 3c6e7a90b6..8e4fc7819a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_236.png and b/TMessagesProj/src/emojis/twitter/emoji/3_236.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_237.png b/TMessagesProj/src/emojis/twitter/emoji/3_237.png index bd5c3f2256..cb15794587 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_237.png and b/TMessagesProj/src/emojis/twitter/emoji/3_237.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_238.png b/TMessagesProj/src/emojis/twitter/emoji/3_238.png index 7d8f31ee91..7ec784c9c8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_238.png and b/TMessagesProj/src/emojis/twitter/emoji/3_238.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_239.png b/TMessagesProj/src/emojis/twitter/emoji/3_239.png index 07485eabcc..db6c4b44bb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_239.png and b/TMessagesProj/src/emojis/twitter/emoji/3_239.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_24.png b/TMessagesProj/src/emojis/twitter/emoji/3_24.png index 2a71d94678..8313aa0c52 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_24.png and b/TMessagesProj/src/emojis/twitter/emoji/3_24.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_240.png b/TMessagesProj/src/emojis/twitter/emoji/3_240.png index 2cce125ead..57f4e1c834 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_240.png and b/TMessagesProj/src/emojis/twitter/emoji/3_240.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_241.png b/TMessagesProj/src/emojis/twitter/emoji/3_241.png index 922b776a31..3af42f0bf5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_241.png and b/TMessagesProj/src/emojis/twitter/emoji/3_241.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_242.png b/TMessagesProj/src/emojis/twitter/emoji/3_242.png index 44c0657a4f..e2197a2523 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_242.png and b/TMessagesProj/src/emojis/twitter/emoji/3_242.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_243.png b/TMessagesProj/src/emojis/twitter/emoji/3_243.png index a470d8dac9..f1ccbe5cb2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_243.png and b/TMessagesProj/src/emojis/twitter/emoji/3_243.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_244.png b/TMessagesProj/src/emojis/twitter/emoji/3_244.png index 1a6d6b4c9e..10bd3cde09 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_244.png and b/TMessagesProj/src/emojis/twitter/emoji/3_244.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_245.png b/TMessagesProj/src/emojis/twitter/emoji/3_245.png index 953bb2aadb..a4b1bf85b6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_245.png and b/TMessagesProj/src/emojis/twitter/emoji/3_245.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_246.png b/TMessagesProj/src/emojis/twitter/emoji/3_246.png index 6a20675fa6..92fd430b46 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_246.png and b/TMessagesProj/src/emojis/twitter/emoji/3_246.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_247.png b/TMessagesProj/src/emojis/twitter/emoji/3_247.png index f333b95116..0acd1f2e63 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_247.png and b/TMessagesProj/src/emojis/twitter/emoji/3_247.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_248.png b/TMessagesProj/src/emojis/twitter/emoji/3_248.png index 7ad8ea397a..9b819578a0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_248.png and b/TMessagesProj/src/emojis/twitter/emoji/3_248.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_249.png b/TMessagesProj/src/emojis/twitter/emoji/3_249.png index dca69c313b..73f0efcb61 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_249.png and b/TMessagesProj/src/emojis/twitter/emoji/3_249.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_25.png b/TMessagesProj/src/emojis/twitter/emoji/3_25.png index 087ff275cd..019ae148a5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_25.png and b/TMessagesProj/src/emojis/twitter/emoji/3_25.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_250.png b/TMessagesProj/src/emojis/twitter/emoji/3_250.png index f9187a602a..2e64f3f496 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_250.png and b/TMessagesProj/src/emojis/twitter/emoji/3_250.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_251.png b/TMessagesProj/src/emojis/twitter/emoji/3_251.png index 1e2943de04..9c2a3682d3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_251.png and b/TMessagesProj/src/emojis/twitter/emoji/3_251.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_252.png b/TMessagesProj/src/emojis/twitter/emoji/3_252.png index 94e73d7f96..b28dc55b4e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_252.png and b/TMessagesProj/src/emojis/twitter/emoji/3_252.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_253.png b/TMessagesProj/src/emojis/twitter/emoji/3_253.png index 50bd196da7..d4a8e7ef52 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_253.png and b/TMessagesProj/src/emojis/twitter/emoji/3_253.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_254.png b/TMessagesProj/src/emojis/twitter/emoji/3_254.png index 71ca4439cd..c1e6b3f8aa 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_254.png and b/TMessagesProj/src/emojis/twitter/emoji/3_254.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_255.png b/TMessagesProj/src/emojis/twitter/emoji/3_255.png index ed743ad461..6abc323943 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_255.png and b/TMessagesProj/src/emojis/twitter/emoji/3_255.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_256.png b/TMessagesProj/src/emojis/twitter/emoji/3_256.png index 2c26bf4b17..7948b55c3b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_256.png and b/TMessagesProj/src/emojis/twitter/emoji/3_256.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_257.png b/TMessagesProj/src/emojis/twitter/emoji/3_257.png index c7355b9c5b..d2d0d12521 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_257.png and b/TMessagesProj/src/emojis/twitter/emoji/3_257.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_258.png b/TMessagesProj/src/emojis/twitter/emoji/3_258.png index b9d3a971af..cb7ce88140 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_258.png and b/TMessagesProj/src/emojis/twitter/emoji/3_258.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_259.png b/TMessagesProj/src/emojis/twitter/emoji/3_259.png index 4a8a2c3bbc..4d36c29210 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_259.png and b/TMessagesProj/src/emojis/twitter/emoji/3_259.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_26.png b/TMessagesProj/src/emojis/twitter/emoji/3_26.png index 4135bf57f3..6b266494ab 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_26.png and b/TMessagesProj/src/emojis/twitter/emoji/3_26.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_260.png b/TMessagesProj/src/emojis/twitter/emoji/3_260.png index 1aeb5fe432..951960584d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_260.png and b/TMessagesProj/src/emojis/twitter/emoji/3_260.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_261.png b/TMessagesProj/src/emojis/twitter/emoji/3_261.png index af4237e1a6..7c078b6caf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_261.png and b/TMessagesProj/src/emojis/twitter/emoji/3_261.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_262.png b/TMessagesProj/src/emojis/twitter/emoji/3_262.png index f31d349ded..53a7be474e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_262.png and b/TMessagesProj/src/emojis/twitter/emoji/3_262.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_263.png b/TMessagesProj/src/emojis/twitter/emoji/3_263.png index fe1597c2fb..50b66f9746 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_263.png and b/TMessagesProj/src/emojis/twitter/emoji/3_263.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_264.png b/TMessagesProj/src/emojis/twitter/emoji/3_264.png index 96e189a8d0..27bf6646f0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_264.png and b/TMessagesProj/src/emojis/twitter/emoji/3_264.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_265.png b/TMessagesProj/src/emojis/twitter/emoji/3_265.png index 1eac0754a0..141e285e3a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_265.png and b/TMessagesProj/src/emojis/twitter/emoji/3_265.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_266.png b/TMessagesProj/src/emojis/twitter/emoji/3_266.png index 7c7d42656d..29d3d5d46e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_266.png and b/TMessagesProj/src/emojis/twitter/emoji/3_266.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_267.png b/TMessagesProj/src/emojis/twitter/emoji/3_267.png index ba31215a1e..74383ce5e3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_267.png and b/TMessagesProj/src/emojis/twitter/emoji/3_267.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_268.png b/TMessagesProj/src/emojis/twitter/emoji/3_268.png index 64595e4f76..2d4f15dcbe 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_268.png and b/TMessagesProj/src/emojis/twitter/emoji/3_268.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_269.png b/TMessagesProj/src/emojis/twitter/emoji/3_269.png index 11963db50a..3d4726ffc1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_269.png and b/TMessagesProj/src/emojis/twitter/emoji/3_269.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_27.png b/TMessagesProj/src/emojis/twitter/emoji/3_27.png index 99c4160f8d..3b5a3b8edf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_27.png and b/TMessagesProj/src/emojis/twitter/emoji/3_27.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_270.png b/TMessagesProj/src/emojis/twitter/emoji/3_270.png index 136247f528..f72af14537 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_270.png and b/TMessagesProj/src/emojis/twitter/emoji/3_270.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_271.png b/TMessagesProj/src/emojis/twitter/emoji/3_271.png index e4b9e8bdf0..685be48f38 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_271.png and b/TMessagesProj/src/emojis/twitter/emoji/3_271.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_272.png b/TMessagesProj/src/emojis/twitter/emoji/3_272.png index 08b70b9b41..be4dab5525 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_272.png and b/TMessagesProj/src/emojis/twitter/emoji/3_272.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_273.png b/TMessagesProj/src/emojis/twitter/emoji/3_273.png index a8362425d1..aaca830354 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_273.png and b/TMessagesProj/src/emojis/twitter/emoji/3_273.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_274.png b/TMessagesProj/src/emojis/twitter/emoji/3_274.png index dc0bdb7a68..b3bd6aa0f9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_274.png and b/TMessagesProj/src/emojis/twitter/emoji/3_274.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_275.png b/TMessagesProj/src/emojis/twitter/emoji/3_275.png index fb14193a9e..d43fe3abda 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_275.png and b/TMessagesProj/src/emojis/twitter/emoji/3_275.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_276.png b/TMessagesProj/src/emojis/twitter/emoji/3_276.png index b078a9a341..adc8bbc5d3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_276.png and b/TMessagesProj/src/emojis/twitter/emoji/3_276.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_277.png b/TMessagesProj/src/emojis/twitter/emoji/3_277.png index e0e2d50544..1498746d8f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_277.png and b/TMessagesProj/src/emojis/twitter/emoji/3_277.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_278.png b/TMessagesProj/src/emojis/twitter/emoji/3_278.png index 38aba281ae..7b1f228cc8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_278.png and b/TMessagesProj/src/emojis/twitter/emoji/3_278.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_279.png b/TMessagesProj/src/emojis/twitter/emoji/3_279.png index 08aeb3e940..46b63010a6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_279.png and b/TMessagesProj/src/emojis/twitter/emoji/3_279.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_28.png b/TMessagesProj/src/emojis/twitter/emoji/3_28.png index 1a8d4addf5..930d31aa56 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_28.png and b/TMessagesProj/src/emojis/twitter/emoji/3_28.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_280.png b/TMessagesProj/src/emojis/twitter/emoji/3_280.png index 09e7fcd1f5..5bbcc32cea 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_280.png and b/TMessagesProj/src/emojis/twitter/emoji/3_280.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_281.png b/TMessagesProj/src/emojis/twitter/emoji/3_281.png index dbf31d1a31..39cb78589a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_281.png and b/TMessagesProj/src/emojis/twitter/emoji/3_281.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_282.png b/TMessagesProj/src/emojis/twitter/emoji/3_282.png index 677f0e7dce..b29f14cc56 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_282.png and b/TMessagesProj/src/emojis/twitter/emoji/3_282.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_283.png b/TMessagesProj/src/emojis/twitter/emoji/3_283.png index 37429d9175..0240e1a2fd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_283.png and b/TMessagesProj/src/emojis/twitter/emoji/3_283.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_284.png b/TMessagesProj/src/emojis/twitter/emoji/3_284.png index c49a84dd98..6d24ef06ad 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_284.png and b/TMessagesProj/src/emojis/twitter/emoji/3_284.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_285.png b/TMessagesProj/src/emojis/twitter/emoji/3_285.png index f92b5ccbbd..b5832c295e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_285.png and b/TMessagesProj/src/emojis/twitter/emoji/3_285.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_286.png b/TMessagesProj/src/emojis/twitter/emoji/3_286.png index 3b8640b8f6..43d4c254e7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_286.png and b/TMessagesProj/src/emojis/twitter/emoji/3_286.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_287.png b/TMessagesProj/src/emojis/twitter/emoji/3_287.png index 3093ea1482..321d3cfce9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_287.png and b/TMessagesProj/src/emojis/twitter/emoji/3_287.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_288.png b/TMessagesProj/src/emojis/twitter/emoji/3_288.png index 73e8557215..5492839dd1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_288.png and b/TMessagesProj/src/emojis/twitter/emoji/3_288.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_289.png b/TMessagesProj/src/emojis/twitter/emoji/3_289.png index 89101dee69..d0de1bb009 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_289.png and b/TMessagesProj/src/emojis/twitter/emoji/3_289.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_29.png b/TMessagesProj/src/emojis/twitter/emoji/3_29.png index c8758da35d..1c86a80d9e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_29.png and b/TMessagesProj/src/emojis/twitter/emoji/3_29.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_290.png b/TMessagesProj/src/emojis/twitter/emoji/3_290.png index 47f18cd4cb..320b184d46 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_290.png and b/TMessagesProj/src/emojis/twitter/emoji/3_290.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_291.png b/TMessagesProj/src/emojis/twitter/emoji/3_291.png index b71abbc7ad..216df787db 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_291.png and b/TMessagesProj/src/emojis/twitter/emoji/3_291.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_292.png b/TMessagesProj/src/emojis/twitter/emoji/3_292.png index 89b1389296..3093416d67 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_292.png and b/TMessagesProj/src/emojis/twitter/emoji/3_292.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_293.png b/TMessagesProj/src/emojis/twitter/emoji/3_293.png index 0d87b0423e..d9ac89a0cc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_293.png and b/TMessagesProj/src/emojis/twitter/emoji/3_293.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_294.png b/TMessagesProj/src/emojis/twitter/emoji/3_294.png index 15d1878321..c351d61341 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_294.png and b/TMessagesProj/src/emojis/twitter/emoji/3_294.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_295.png b/TMessagesProj/src/emojis/twitter/emoji/3_295.png index b6c07971cc..aab60584e4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_295.png and b/TMessagesProj/src/emojis/twitter/emoji/3_295.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_296.png b/TMessagesProj/src/emojis/twitter/emoji/3_296.png index 508a982140..60a0282379 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_296.png and b/TMessagesProj/src/emojis/twitter/emoji/3_296.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_297.png b/TMessagesProj/src/emojis/twitter/emoji/3_297.png index 3f32974136..857aa82a1c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_297.png and b/TMessagesProj/src/emojis/twitter/emoji/3_297.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_298.png b/TMessagesProj/src/emojis/twitter/emoji/3_298.png index 541622bd81..5dabcb7ddb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_298.png and b/TMessagesProj/src/emojis/twitter/emoji/3_298.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_299.png b/TMessagesProj/src/emojis/twitter/emoji/3_299.png index 91f21f4ffd..91da0b84c7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_299.png and b/TMessagesProj/src/emojis/twitter/emoji/3_299.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_3.png b/TMessagesProj/src/emojis/twitter/emoji/3_3.png index 1a188a06fe..da85ebcf99 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_3.png and b/TMessagesProj/src/emojis/twitter/emoji/3_3.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_30.png b/TMessagesProj/src/emojis/twitter/emoji/3_30.png index 9556af902d..2cc3a03eff 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_30.png and b/TMessagesProj/src/emojis/twitter/emoji/3_30.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_300.png b/TMessagesProj/src/emojis/twitter/emoji/3_300.png index 671f87365f..b480c5ec15 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_300.png and b/TMessagesProj/src/emojis/twitter/emoji/3_300.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_301.png b/TMessagesProj/src/emojis/twitter/emoji/3_301.png index b2dfa8a348..07c01e4641 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_301.png and b/TMessagesProj/src/emojis/twitter/emoji/3_301.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_302.png b/TMessagesProj/src/emojis/twitter/emoji/3_302.png index 318ec7a069..b6c53999b6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_302.png and b/TMessagesProj/src/emojis/twitter/emoji/3_302.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_303.png b/TMessagesProj/src/emojis/twitter/emoji/3_303.png index b817f2b6f3..c10002d989 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_303.png and b/TMessagesProj/src/emojis/twitter/emoji/3_303.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_304.png b/TMessagesProj/src/emojis/twitter/emoji/3_304.png index f001f03664..0c924278ae 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_304.png and b/TMessagesProj/src/emojis/twitter/emoji/3_304.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_305.png b/TMessagesProj/src/emojis/twitter/emoji/3_305.png index 133237158c..7d51373b7b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_305.png and b/TMessagesProj/src/emojis/twitter/emoji/3_305.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_306.png b/TMessagesProj/src/emojis/twitter/emoji/3_306.png index bc0bb9e8bf..c901f122e8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_306.png and b/TMessagesProj/src/emojis/twitter/emoji/3_306.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_307.png b/TMessagesProj/src/emojis/twitter/emoji/3_307.png index 58f9355e76..e5e23f870f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_307.png and b/TMessagesProj/src/emojis/twitter/emoji/3_307.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_308.png b/TMessagesProj/src/emojis/twitter/emoji/3_308.png index 52a46087bb..1b7eef86fe 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_308.png and b/TMessagesProj/src/emojis/twitter/emoji/3_308.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_309.png b/TMessagesProj/src/emojis/twitter/emoji/3_309.png index 2cbff117a4..8c5aa9c94d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_309.png and b/TMessagesProj/src/emojis/twitter/emoji/3_309.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_31.png b/TMessagesProj/src/emojis/twitter/emoji/3_31.png index 2bd9b3c9ff..5ece318a2c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_31.png and b/TMessagesProj/src/emojis/twitter/emoji/3_31.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_310.png b/TMessagesProj/src/emojis/twitter/emoji/3_310.png index 1ed2c37224..b178d611f5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_310.png and b/TMessagesProj/src/emojis/twitter/emoji/3_310.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_311.png b/TMessagesProj/src/emojis/twitter/emoji/3_311.png index a0cf5cd36d..6da5cc300b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_311.png and b/TMessagesProj/src/emojis/twitter/emoji/3_311.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_312.png b/TMessagesProj/src/emojis/twitter/emoji/3_312.png index ea83215a5f..0219ed8e36 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_312.png and b/TMessagesProj/src/emojis/twitter/emoji/3_312.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_313.png b/TMessagesProj/src/emojis/twitter/emoji/3_313.png index 5ed5536f41..383065037c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_313.png and b/TMessagesProj/src/emojis/twitter/emoji/3_313.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_314.png b/TMessagesProj/src/emojis/twitter/emoji/3_314.png index aabf20f158..c01d3a94bd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_314.png and b/TMessagesProj/src/emojis/twitter/emoji/3_314.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_315.png b/TMessagesProj/src/emojis/twitter/emoji/3_315.png index 448bd2bc29..7aab831ea7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_315.png and b/TMessagesProj/src/emojis/twitter/emoji/3_315.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_316.png b/TMessagesProj/src/emojis/twitter/emoji/3_316.png index 87dea1ac46..e52c67ecc2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_316.png and b/TMessagesProj/src/emojis/twitter/emoji/3_316.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_317.png b/TMessagesProj/src/emojis/twitter/emoji/3_317.png index 71200e91e6..04b77399fa 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_317.png and b/TMessagesProj/src/emojis/twitter/emoji/3_317.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_318.png b/TMessagesProj/src/emojis/twitter/emoji/3_318.png index e80bb55e59..c99c33df85 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_318.png and b/TMessagesProj/src/emojis/twitter/emoji/3_318.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_319.png b/TMessagesProj/src/emojis/twitter/emoji/3_319.png index 647e25b577..25ee554afc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_319.png and b/TMessagesProj/src/emojis/twitter/emoji/3_319.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_32.png b/TMessagesProj/src/emojis/twitter/emoji/3_32.png index 452ecdb528..50f73bf17d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_32.png and b/TMessagesProj/src/emojis/twitter/emoji/3_32.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_320.png b/TMessagesProj/src/emojis/twitter/emoji/3_320.png index 3922096b66..bea5ad038d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_320.png and b/TMessagesProj/src/emojis/twitter/emoji/3_320.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_321.png b/TMessagesProj/src/emojis/twitter/emoji/3_321.png index 8ab5ed6471..e02c740172 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_321.png and b/TMessagesProj/src/emojis/twitter/emoji/3_321.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_322.png b/TMessagesProj/src/emojis/twitter/emoji/3_322.png index 16d24a437b..2751cdff84 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_322.png and b/TMessagesProj/src/emojis/twitter/emoji/3_322.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_323.png b/TMessagesProj/src/emojis/twitter/emoji/3_323.png index 79e649fba1..795ead6839 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_323.png and b/TMessagesProj/src/emojis/twitter/emoji/3_323.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_324.png b/TMessagesProj/src/emojis/twitter/emoji/3_324.png index c5eb9b6715..5507d74218 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_324.png and b/TMessagesProj/src/emojis/twitter/emoji/3_324.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_325.png b/TMessagesProj/src/emojis/twitter/emoji/3_325.png index 8a269406c0..60007384cb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_325.png and b/TMessagesProj/src/emojis/twitter/emoji/3_325.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_326.png b/TMessagesProj/src/emojis/twitter/emoji/3_326.png index c8563f9e46..d39a8b433f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_326.png and b/TMessagesProj/src/emojis/twitter/emoji/3_326.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_327.png b/TMessagesProj/src/emojis/twitter/emoji/3_327.png index d9ea4d4f22..98869b6da4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_327.png and b/TMessagesProj/src/emojis/twitter/emoji/3_327.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_328.png b/TMessagesProj/src/emojis/twitter/emoji/3_328.png index 1661aed9e4..4d96f5b537 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_328.png and b/TMessagesProj/src/emojis/twitter/emoji/3_328.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_329.png b/TMessagesProj/src/emojis/twitter/emoji/3_329.png index 4733cf176c..a5ead06199 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_329.png and b/TMessagesProj/src/emojis/twitter/emoji/3_329.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_33.png b/TMessagesProj/src/emojis/twitter/emoji/3_33.png index 6cc0ff5e85..8ee340bba3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_33.png and b/TMessagesProj/src/emojis/twitter/emoji/3_33.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_330.png b/TMessagesProj/src/emojis/twitter/emoji/3_330.png index 42d1f90d6f..b64e0881a5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_330.png and b/TMessagesProj/src/emojis/twitter/emoji/3_330.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_331.png b/TMessagesProj/src/emojis/twitter/emoji/3_331.png index e3561b6a09..87b6874e2a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_331.png and b/TMessagesProj/src/emojis/twitter/emoji/3_331.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_332.png b/TMessagesProj/src/emojis/twitter/emoji/3_332.png index 731a6b3367..865de7659d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_332.png and b/TMessagesProj/src/emojis/twitter/emoji/3_332.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_333.png b/TMessagesProj/src/emojis/twitter/emoji/3_333.png new file mode 100644 index 0000000000..790da0cd0f Binary files /dev/null and b/TMessagesProj/src/emojis/twitter/emoji/3_333.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_334.png b/TMessagesProj/src/emojis/twitter/emoji/3_334.png new file mode 100644 index 0000000000..221935f961 Binary files /dev/null and b/TMessagesProj/src/emojis/twitter/emoji/3_334.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_34.png b/TMessagesProj/src/emojis/twitter/emoji/3_34.png index d4b753fe1f..7a38231858 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_34.png and b/TMessagesProj/src/emojis/twitter/emoji/3_34.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_35.png b/TMessagesProj/src/emojis/twitter/emoji/3_35.png index 4a415fbd38..3c9ef5e44e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_35.png and b/TMessagesProj/src/emojis/twitter/emoji/3_35.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_36.png b/TMessagesProj/src/emojis/twitter/emoji/3_36.png index c825412337..dadea90c57 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_36.png and b/TMessagesProj/src/emojis/twitter/emoji/3_36.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_37.png b/TMessagesProj/src/emojis/twitter/emoji/3_37.png index 649cceb41f..1b4295670c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_37.png and b/TMessagesProj/src/emojis/twitter/emoji/3_37.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_38.png b/TMessagesProj/src/emojis/twitter/emoji/3_38.png index ba9845b0e1..397484a713 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_38.png and b/TMessagesProj/src/emojis/twitter/emoji/3_38.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_39.png b/TMessagesProj/src/emojis/twitter/emoji/3_39.png index a9a3423916..88d4c4ff6f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_39.png and b/TMessagesProj/src/emojis/twitter/emoji/3_39.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_4.png b/TMessagesProj/src/emojis/twitter/emoji/3_4.png index ce17e3cf92..e70253931e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_4.png and b/TMessagesProj/src/emojis/twitter/emoji/3_4.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_40.png b/TMessagesProj/src/emojis/twitter/emoji/3_40.png index ca33535f5e..5c9e9b0b93 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_40.png and b/TMessagesProj/src/emojis/twitter/emoji/3_40.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_41.png b/TMessagesProj/src/emojis/twitter/emoji/3_41.png index 401dde9af4..a9270eb79d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_41.png and b/TMessagesProj/src/emojis/twitter/emoji/3_41.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_42.png b/TMessagesProj/src/emojis/twitter/emoji/3_42.png index 12c18fc975..277a4df5d6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_42.png and b/TMessagesProj/src/emojis/twitter/emoji/3_42.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_43.png b/TMessagesProj/src/emojis/twitter/emoji/3_43.png index 5272a60cc5..370914480b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_43.png and b/TMessagesProj/src/emojis/twitter/emoji/3_43.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_44.png b/TMessagesProj/src/emojis/twitter/emoji/3_44.png index a06a85c184..6e1f9d4a5f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_44.png and b/TMessagesProj/src/emojis/twitter/emoji/3_44.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_45.png b/TMessagesProj/src/emojis/twitter/emoji/3_45.png index 346f95069e..d0363b6d96 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_45.png and b/TMessagesProj/src/emojis/twitter/emoji/3_45.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_46.png b/TMessagesProj/src/emojis/twitter/emoji/3_46.png index 44c61bb45d..16112a9c59 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_46.png and b/TMessagesProj/src/emojis/twitter/emoji/3_46.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_47.png b/TMessagesProj/src/emojis/twitter/emoji/3_47.png index 9c8d8c6dad..feb72066c1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_47.png and b/TMessagesProj/src/emojis/twitter/emoji/3_47.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_48.png b/TMessagesProj/src/emojis/twitter/emoji/3_48.png index 9dc4cb2011..2b22f62042 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_48.png and b/TMessagesProj/src/emojis/twitter/emoji/3_48.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_49.png b/TMessagesProj/src/emojis/twitter/emoji/3_49.png index 4c70d95acc..63c98cff47 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_49.png and b/TMessagesProj/src/emojis/twitter/emoji/3_49.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_5.png b/TMessagesProj/src/emojis/twitter/emoji/3_5.png index d0a7c0051b..71bd3cdf16 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_5.png and b/TMessagesProj/src/emojis/twitter/emoji/3_5.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_50.png b/TMessagesProj/src/emojis/twitter/emoji/3_50.png index c2e5c82926..a106429cfa 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_50.png and b/TMessagesProj/src/emojis/twitter/emoji/3_50.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_51.png b/TMessagesProj/src/emojis/twitter/emoji/3_51.png index 1857c77b8b..786d491794 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_51.png and b/TMessagesProj/src/emojis/twitter/emoji/3_51.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_52.png b/TMessagesProj/src/emojis/twitter/emoji/3_52.png index 6ec1b93fbe..1a66c608a8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_52.png and b/TMessagesProj/src/emojis/twitter/emoji/3_52.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_53.png b/TMessagesProj/src/emojis/twitter/emoji/3_53.png index 40ef7aca4e..baadc247e2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_53.png and b/TMessagesProj/src/emojis/twitter/emoji/3_53.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_54.png b/TMessagesProj/src/emojis/twitter/emoji/3_54.png index 9f71d47220..de0bc71816 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_54.png and b/TMessagesProj/src/emojis/twitter/emoji/3_54.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_55.png b/TMessagesProj/src/emojis/twitter/emoji/3_55.png index 771a3499e3..95810a7998 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_55.png and b/TMessagesProj/src/emojis/twitter/emoji/3_55.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_56.png b/TMessagesProj/src/emojis/twitter/emoji/3_56.png index c88375dd51..653a1be782 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_56.png and b/TMessagesProj/src/emojis/twitter/emoji/3_56.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_57.png b/TMessagesProj/src/emojis/twitter/emoji/3_57.png index ed12a0c5b9..9e72deea90 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_57.png and b/TMessagesProj/src/emojis/twitter/emoji/3_57.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_58.png b/TMessagesProj/src/emojis/twitter/emoji/3_58.png index a7b1595ea4..8b3f85ccd5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_58.png and b/TMessagesProj/src/emojis/twitter/emoji/3_58.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_59.png b/TMessagesProj/src/emojis/twitter/emoji/3_59.png index b3a21d61e4..be1efafaa7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_59.png and b/TMessagesProj/src/emojis/twitter/emoji/3_59.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_6.png b/TMessagesProj/src/emojis/twitter/emoji/3_6.png index 48e24c931b..e410c6479c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_6.png and b/TMessagesProj/src/emojis/twitter/emoji/3_6.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_60.png b/TMessagesProj/src/emojis/twitter/emoji/3_60.png index c83f6f889a..03f1898bd4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_60.png and b/TMessagesProj/src/emojis/twitter/emoji/3_60.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_61.png b/TMessagesProj/src/emojis/twitter/emoji/3_61.png index 740685fc4d..9cf81ff22b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_61.png and b/TMessagesProj/src/emojis/twitter/emoji/3_61.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_62.png b/TMessagesProj/src/emojis/twitter/emoji/3_62.png index fe01736d77..c6cca126cb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_62.png and b/TMessagesProj/src/emojis/twitter/emoji/3_62.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_63.png b/TMessagesProj/src/emojis/twitter/emoji/3_63.png index f73b4c9258..5099ccc32a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_63.png and b/TMessagesProj/src/emojis/twitter/emoji/3_63.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_64.png b/TMessagesProj/src/emojis/twitter/emoji/3_64.png index 3a3d923e77..56554bbcf7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_64.png and b/TMessagesProj/src/emojis/twitter/emoji/3_64.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_65.png b/TMessagesProj/src/emojis/twitter/emoji/3_65.png index a07c9e5061..e5755107b9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_65.png and b/TMessagesProj/src/emojis/twitter/emoji/3_65.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_66.png b/TMessagesProj/src/emojis/twitter/emoji/3_66.png index 462fc935dd..22d33be57d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_66.png and b/TMessagesProj/src/emojis/twitter/emoji/3_66.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_67.png b/TMessagesProj/src/emojis/twitter/emoji/3_67.png index 4b335bfd1f..d67e09a8f9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_67.png and b/TMessagesProj/src/emojis/twitter/emoji/3_67.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_68.png b/TMessagesProj/src/emojis/twitter/emoji/3_68.png index 3ec713b570..c92a1c4961 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_68.png and b/TMessagesProj/src/emojis/twitter/emoji/3_68.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_69.png b/TMessagesProj/src/emojis/twitter/emoji/3_69.png index 19f47fcea3..4285fec982 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_69.png and b/TMessagesProj/src/emojis/twitter/emoji/3_69.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_7.png b/TMessagesProj/src/emojis/twitter/emoji/3_7.png index 1cfa83299a..bfe778512e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_7.png and b/TMessagesProj/src/emojis/twitter/emoji/3_7.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_70.png b/TMessagesProj/src/emojis/twitter/emoji/3_70.png index cd211dff96..bc406c5a43 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_70.png and b/TMessagesProj/src/emojis/twitter/emoji/3_70.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_71.png b/TMessagesProj/src/emojis/twitter/emoji/3_71.png index df5bf0b2c0..475eb77507 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_71.png and b/TMessagesProj/src/emojis/twitter/emoji/3_71.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_72.png b/TMessagesProj/src/emojis/twitter/emoji/3_72.png index 8bb42f54b0..3672305b1b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_72.png and b/TMessagesProj/src/emojis/twitter/emoji/3_72.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_73.png b/TMessagesProj/src/emojis/twitter/emoji/3_73.png index d6fe65d0dc..76c260d336 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_73.png and b/TMessagesProj/src/emojis/twitter/emoji/3_73.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_74.png b/TMessagesProj/src/emojis/twitter/emoji/3_74.png index 56fab3a2a6..2de4fbbd09 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_74.png and b/TMessagesProj/src/emojis/twitter/emoji/3_74.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_75.png b/TMessagesProj/src/emojis/twitter/emoji/3_75.png index bd268db2fb..fb814c113e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_75.png and b/TMessagesProj/src/emojis/twitter/emoji/3_75.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_76.png b/TMessagesProj/src/emojis/twitter/emoji/3_76.png index 8f3b678dec..c518d3dcab 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_76.png and b/TMessagesProj/src/emojis/twitter/emoji/3_76.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_77.png b/TMessagesProj/src/emojis/twitter/emoji/3_77.png index 2f9424e288..95f1bcf060 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_77.png and b/TMessagesProj/src/emojis/twitter/emoji/3_77.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_78.png b/TMessagesProj/src/emojis/twitter/emoji/3_78.png index 788cf3e0cf..827e5bbe89 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_78.png and b/TMessagesProj/src/emojis/twitter/emoji/3_78.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_79.png b/TMessagesProj/src/emojis/twitter/emoji/3_79.png index 4a6776e1d7..a9f9bd1d4c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_79.png and b/TMessagesProj/src/emojis/twitter/emoji/3_79.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_8.png b/TMessagesProj/src/emojis/twitter/emoji/3_8.png index cd25515ead..200d38a875 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_8.png and b/TMessagesProj/src/emojis/twitter/emoji/3_8.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_80.png b/TMessagesProj/src/emojis/twitter/emoji/3_80.png index 15d5711f02..aca93be715 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_80.png and b/TMessagesProj/src/emojis/twitter/emoji/3_80.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_81.png b/TMessagesProj/src/emojis/twitter/emoji/3_81.png index 34745d70d9..9875ee27b0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_81.png and b/TMessagesProj/src/emojis/twitter/emoji/3_81.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_82.png b/TMessagesProj/src/emojis/twitter/emoji/3_82.png index 34745d70d9..39c6436528 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_82.png and b/TMessagesProj/src/emojis/twitter/emoji/3_82.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_83.png b/TMessagesProj/src/emojis/twitter/emoji/3_83.png index 656e0a549c..5fed85fb18 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_83.png and b/TMessagesProj/src/emojis/twitter/emoji/3_83.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_84.png b/TMessagesProj/src/emojis/twitter/emoji/3_84.png index b36e260aa7..93e7f23982 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_84.png and b/TMessagesProj/src/emojis/twitter/emoji/3_84.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_85.png b/TMessagesProj/src/emojis/twitter/emoji/3_85.png index 81fe026247..7ab4de3ec2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_85.png and b/TMessagesProj/src/emojis/twitter/emoji/3_85.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_86.png b/TMessagesProj/src/emojis/twitter/emoji/3_86.png index 4f225f2d05..3490479e44 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_86.png and b/TMessagesProj/src/emojis/twitter/emoji/3_86.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_87.png b/TMessagesProj/src/emojis/twitter/emoji/3_87.png index 7677146065..a2aa799866 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_87.png and b/TMessagesProj/src/emojis/twitter/emoji/3_87.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_88.png b/TMessagesProj/src/emojis/twitter/emoji/3_88.png index 40d1594abd..32c3bdcc2f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_88.png and b/TMessagesProj/src/emojis/twitter/emoji/3_88.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_89.png b/TMessagesProj/src/emojis/twitter/emoji/3_89.png index 9fad16694f..9434781b15 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_89.png and b/TMessagesProj/src/emojis/twitter/emoji/3_89.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_9.png b/TMessagesProj/src/emojis/twitter/emoji/3_9.png index 5071aee9e3..fc47c8ef86 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_9.png and b/TMessagesProj/src/emojis/twitter/emoji/3_9.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_90.png b/TMessagesProj/src/emojis/twitter/emoji/3_90.png index 0ae87a8779..1a815efae3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_90.png and b/TMessagesProj/src/emojis/twitter/emoji/3_90.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_91.png b/TMessagesProj/src/emojis/twitter/emoji/3_91.png index c3b392add5..cbc989c75b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_91.png and b/TMessagesProj/src/emojis/twitter/emoji/3_91.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_92.png b/TMessagesProj/src/emojis/twitter/emoji/3_92.png index fcc5abe922..3970ad10e1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_92.png and b/TMessagesProj/src/emojis/twitter/emoji/3_92.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_93.png b/TMessagesProj/src/emojis/twitter/emoji/3_93.png index a2918e70d8..dc3f1aa76d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_93.png and b/TMessagesProj/src/emojis/twitter/emoji/3_93.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_94.png b/TMessagesProj/src/emojis/twitter/emoji/3_94.png index a2918e70d8..cf2dd9a8d8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_94.png and b/TMessagesProj/src/emojis/twitter/emoji/3_94.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_95.png b/TMessagesProj/src/emojis/twitter/emoji/3_95.png index b53275105b..cf9c4716e7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_95.png and b/TMessagesProj/src/emojis/twitter/emoji/3_95.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_96.png b/TMessagesProj/src/emojis/twitter/emoji/3_96.png index 9804acbc58..2c3a22c88e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_96.png and b/TMessagesProj/src/emojis/twitter/emoji/3_96.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_97.png b/TMessagesProj/src/emojis/twitter/emoji/3_97.png index 49c07ced7d..92fc39df53 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_97.png and b/TMessagesProj/src/emojis/twitter/emoji/3_97.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_98.png b/TMessagesProj/src/emojis/twitter/emoji/3_98.png index cc8003c609..372ab68ab7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_98.png and b/TMessagesProj/src/emojis/twitter/emoji/3_98.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/3_99.png b/TMessagesProj/src/emojis/twitter/emoji/3_99.png index 43a29ee958..e7d5dda5d3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/3_99.png and b/TMessagesProj/src/emojis/twitter/emoji/3_99.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_0.png b/TMessagesProj/src/emojis/twitter/emoji/4_0.png index 0861eab51a..18cfcd5806 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_0.png and b/TMessagesProj/src/emojis/twitter/emoji/4_0.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_1.png b/TMessagesProj/src/emojis/twitter/emoji/4_1.png index 8f30e3b831..d7787b07c5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_1.png and b/TMessagesProj/src/emojis/twitter/emoji/4_1.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_10.png b/TMessagesProj/src/emojis/twitter/emoji/4_10.png index f0ec41e3ab..0f01cce0bf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_10.png and b/TMessagesProj/src/emojis/twitter/emoji/4_10.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_100.png b/TMessagesProj/src/emojis/twitter/emoji/4_100.png index 0843678cf1..a185eac6be 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_100.png and b/TMessagesProj/src/emojis/twitter/emoji/4_100.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_101.png b/TMessagesProj/src/emojis/twitter/emoji/4_101.png index 7723e3c869..8ec7cef1cb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_101.png and b/TMessagesProj/src/emojis/twitter/emoji/4_101.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_102.png b/TMessagesProj/src/emojis/twitter/emoji/4_102.png index 402b9a54ac..3e27835cb3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_102.png and b/TMessagesProj/src/emojis/twitter/emoji/4_102.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_103.png b/TMessagesProj/src/emojis/twitter/emoji/4_103.png index 8f5a171287..fb6c741008 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_103.png and b/TMessagesProj/src/emojis/twitter/emoji/4_103.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_104.png b/TMessagesProj/src/emojis/twitter/emoji/4_104.png index 6ca07a73d5..cdbe2f729c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_104.png and b/TMessagesProj/src/emojis/twitter/emoji/4_104.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_105.png b/TMessagesProj/src/emojis/twitter/emoji/4_105.png index 875426d06e..22dac4649d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_105.png and b/TMessagesProj/src/emojis/twitter/emoji/4_105.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_106.png b/TMessagesProj/src/emojis/twitter/emoji/4_106.png index 0440b72bf3..caa24659a2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_106.png and b/TMessagesProj/src/emojis/twitter/emoji/4_106.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_107.png b/TMessagesProj/src/emojis/twitter/emoji/4_107.png index 774f74db74..3c1e522ce0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_107.png and b/TMessagesProj/src/emojis/twitter/emoji/4_107.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_108.png b/TMessagesProj/src/emojis/twitter/emoji/4_108.png index 048b7d2fb0..027f89bd59 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_108.png and b/TMessagesProj/src/emojis/twitter/emoji/4_108.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_109.png b/TMessagesProj/src/emojis/twitter/emoji/4_109.png index 5a70b5b59c..e41a385390 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_109.png and b/TMessagesProj/src/emojis/twitter/emoji/4_109.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_11.png b/TMessagesProj/src/emojis/twitter/emoji/4_11.png index ef58b6bd01..fefa872d95 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_11.png and b/TMessagesProj/src/emojis/twitter/emoji/4_11.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_110.png b/TMessagesProj/src/emojis/twitter/emoji/4_110.png index 61ccbcf70e..29ce488581 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_110.png and b/TMessagesProj/src/emojis/twitter/emoji/4_110.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_111.png b/TMessagesProj/src/emojis/twitter/emoji/4_111.png index c909c96d53..a6a6cc4d99 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_111.png and b/TMessagesProj/src/emojis/twitter/emoji/4_111.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_112.png b/TMessagesProj/src/emojis/twitter/emoji/4_112.png index 4e11ce7372..55f90f3e82 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_112.png and b/TMessagesProj/src/emojis/twitter/emoji/4_112.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_113.png b/TMessagesProj/src/emojis/twitter/emoji/4_113.png index 21851449b0..c83e277db6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_113.png and b/TMessagesProj/src/emojis/twitter/emoji/4_113.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_114.png b/TMessagesProj/src/emojis/twitter/emoji/4_114.png index 1d53f789d4..95152c653e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_114.png and b/TMessagesProj/src/emojis/twitter/emoji/4_114.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_115.png b/TMessagesProj/src/emojis/twitter/emoji/4_115.png index 5e2649eb5b..448fb0ebf2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_115.png and b/TMessagesProj/src/emojis/twitter/emoji/4_115.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_116.png b/TMessagesProj/src/emojis/twitter/emoji/4_116.png index 934fb237b0..4db49ebdf6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_116.png and b/TMessagesProj/src/emojis/twitter/emoji/4_116.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_117.png b/TMessagesProj/src/emojis/twitter/emoji/4_117.png index 86a80cb8c6..5973d2d847 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_117.png and b/TMessagesProj/src/emojis/twitter/emoji/4_117.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_118.png b/TMessagesProj/src/emojis/twitter/emoji/4_118.png index 63013c367d..a5eb5493e4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_118.png and b/TMessagesProj/src/emojis/twitter/emoji/4_118.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_119.png b/TMessagesProj/src/emojis/twitter/emoji/4_119.png index 5e76472a23..54d8a8bd91 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_119.png and b/TMessagesProj/src/emojis/twitter/emoji/4_119.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_12.png b/TMessagesProj/src/emojis/twitter/emoji/4_12.png index 8d26c3b7c1..5c8fba4d18 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_12.png and b/TMessagesProj/src/emojis/twitter/emoji/4_12.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_120.png b/TMessagesProj/src/emojis/twitter/emoji/4_120.png index 24a72dc475..1eb62a7946 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_120.png and b/TMessagesProj/src/emojis/twitter/emoji/4_120.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_121.png b/TMessagesProj/src/emojis/twitter/emoji/4_121.png index a043982da2..a62bb937f8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_121.png and b/TMessagesProj/src/emojis/twitter/emoji/4_121.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_122.png b/TMessagesProj/src/emojis/twitter/emoji/4_122.png index 888773a5ca..767ea4e8a7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_122.png and b/TMessagesProj/src/emojis/twitter/emoji/4_122.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_123.png b/TMessagesProj/src/emojis/twitter/emoji/4_123.png index ca5ca5f4e3..3f5e490bce 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_123.png and b/TMessagesProj/src/emojis/twitter/emoji/4_123.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_124.png b/TMessagesProj/src/emojis/twitter/emoji/4_124.png index 95b61cb1e1..31b542591d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_124.png and b/TMessagesProj/src/emojis/twitter/emoji/4_124.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_125.png b/TMessagesProj/src/emojis/twitter/emoji/4_125.png index 815445555e..2f74e3d478 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_125.png and b/TMessagesProj/src/emojis/twitter/emoji/4_125.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_126.png b/TMessagesProj/src/emojis/twitter/emoji/4_126.png index 046175ee58..f9531d1f4f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_126.png and b/TMessagesProj/src/emojis/twitter/emoji/4_126.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_127.png b/TMessagesProj/src/emojis/twitter/emoji/4_127.png index 55f225e1ba..5dc2ee719d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_127.png and b/TMessagesProj/src/emojis/twitter/emoji/4_127.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_128.png b/TMessagesProj/src/emojis/twitter/emoji/4_128.png index ca0cdfcd62..511b2f7067 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_128.png and b/TMessagesProj/src/emojis/twitter/emoji/4_128.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_129.png b/TMessagesProj/src/emojis/twitter/emoji/4_129.png index 69277d52b2..0a748dd09b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_129.png and b/TMessagesProj/src/emojis/twitter/emoji/4_129.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_13.png b/TMessagesProj/src/emojis/twitter/emoji/4_13.png index 7727c997c6..9fac557569 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_13.png and b/TMessagesProj/src/emojis/twitter/emoji/4_13.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_130.png b/TMessagesProj/src/emojis/twitter/emoji/4_130.png index 30ed561291..84e2c34e9c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_130.png and b/TMessagesProj/src/emojis/twitter/emoji/4_130.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_14.png b/TMessagesProj/src/emojis/twitter/emoji/4_14.png index b26ed41695..8cdd1310d7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_14.png and b/TMessagesProj/src/emojis/twitter/emoji/4_14.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_15.png b/TMessagesProj/src/emojis/twitter/emoji/4_15.png index 2f9575a6a6..d09f624f6a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_15.png and b/TMessagesProj/src/emojis/twitter/emoji/4_15.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_16.png b/TMessagesProj/src/emojis/twitter/emoji/4_16.png index 3329573a43..5c6c9be613 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_16.png and b/TMessagesProj/src/emojis/twitter/emoji/4_16.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_17.png b/TMessagesProj/src/emojis/twitter/emoji/4_17.png index c7543714f8..271c82a025 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_17.png and b/TMessagesProj/src/emojis/twitter/emoji/4_17.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_18.png b/TMessagesProj/src/emojis/twitter/emoji/4_18.png index 96f32ce95d..64f32ab50b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_18.png and b/TMessagesProj/src/emojis/twitter/emoji/4_18.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_19.png b/TMessagesProj/src/emojis/twitter/emoji/4_19.png index 44a337e7f1..6abb34e7e2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_19.png and b/TMessagesProj/src/emojis/twitter/emoji/4_19.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_2.png b/TMessagesProj/src/emojis/twitter/emoji/4_2.png index 53ae648e9f..ccea13a5c7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_2.png and b/TMessagesProj/src/emojis/twitter/emoji/4_2.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_20.png b/TMessagesProj/src/emojis/twitter/emoji/4_20.png index 5788713613..a0cfc72011 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_20.png and b/TMessagesProj/src/emojis/twitter/emoji/4_20.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_21.png b/TMessagesProj/src/emojis/twitter/emoji/4_21.png index 3d8c509bf6..8408724805 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_21.png and b/TMessagesProj/src/emojis/twitter/emoji/4_21.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_22.png b/TMessagesProj/src/emojis/twitter/emoji/4_22.png index bbc1c8eea4..c3b63d12d8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_22.png and b/TMessagesProj/src/emojis/twitter/emoji/4_22.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_23.png b/TMessagesProj/src/emojis/twitter/emoji/4_23.png index c72ea04090..885363c171 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_23.png and b/TMessagesProj/src/emojis/twitter/emoji/4_23.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_24.png b/TMessagesProj/src/emojis/twitter/emoji/4_24.png index 2b23f1810b..319a9978d7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_24.png and b/TMessagesProj/src/emojis/twitter/emoji/4_24.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_25.png b/TMessagesProj/src/emojis/twitter/emoji/4_25.png index 063d3658e8..13236e0791 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_25.png and b/TMessagesProj/src/emojis/twitter/emoji/4_25.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_26.png b/TMessagesProj/src/emojis/twitter/emoji/4_26.png index 48a5772fbc..5d084fd888 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_26.png and b/TMessagesProj/src/emojis/twitter/emoji/4_26.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_27.png b/TMessagesProj/src/emojis/twitter/emoji/4_27.png index f69ca0ebec..54dc8d130d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_27.png and b/TMessagesProj/src/emojis/twitter/emoji/4_27.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_28.png b/TMessagesProj/src/emojis/twitter/emoji/4_28.png index c04b99883d..82f23cca91 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_28.png and b/TMessagesProj/src/emojis/twitter/emoji/4_28.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_29.png b/TMessagesProj/src/emojis/twitter/emoji/4_29.png index 142838fcb6..f49ce3b91f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_29.png and b/TMessagesProj/src/emojis/twitter/emoji/4_29.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_3.png b/TMessagesProj/src/emojis/twitter/emoji/4_3.png index b988dd0189..6b02e6cd3a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_3.png and b/TMessagesProj/src/emojis/twitter/emoji/4_3.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_30.png b/TMessagesProj/src/emojis/twitter/emoji/4_30.png index 3c8a44e12e..ff52a324a0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_30.png and b/TMessagesProj/src/emojis/twitter/emoji/4_30.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_31.png b/TMessagesProj/src/emojis/twitter/emoji/4_31.png index db882e1f97..3196e77c31 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_31.png and b/TMessagesProj/src/emojis/twitter/emoji/4_31.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_32.png b/TMessagesProj/src/emojis/twitter/emoji/4_32.png index f2aee30748..95e0e5fc58 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_32.png and b/TMessagesProj/src/emojis/twitter/emoji/4_32.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_33.png b/TMessagesProj/src/emojis/twitter/emoji/4_33.png index 16c4397f69..1f49d13e09 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_33.png and b/TMessagesProj/src/emojis/twitter/emoji/4_33.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_34.png b/TMessagesProj/src/emojis/twitter/emoji/4_34.png index bdd78bfef2..75fcd68e87 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_34.png and b/TMessagesProj/src/emojis/twitter/emoji/4_34.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_35.png b/TMessagesProj/src/emojis/twitter/emoji/4_35.png index e5df8bfba2..3408545aec 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_35.png and b/TMessagesProj/src/emojis/twitter/emoji/4_35.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_36.png b/TMessagesProj/src/emojis/twitter/emoji/4_36.png index a8aa832d00..2437ee7c6f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_36.png and b/TMessagesProj/src/emojis/twitter/emoji/4_36.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_37.png b/TMessagesProj/src/emojis/twitter/emoji/4_37.png index 8f535d71ba..e45ea1920d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_37.png and b/TMessagesProj/src/emojis/twitter/emoji/4_37.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_38.png b/TMessagesProj/src/emojis/twitter/emoji/4_38.png index 5332b7b5d3..b8396f8732 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_38.png and b/TMessagesProj/src/emojis/twitter/emoji/4_38.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_39.png b/TMessagesProj/src/emojis/twitter/emoji/4_39.png index 23643af624..7992fe6f96 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_39.png and b/TMessagesProj/src/emojis/twitter/emoji/4_39.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_4.png b/TMessagesProj/src/emojis/twitter/emoji/4_4.png index 6ac50cf2ea..d6c8040916 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_4.png and b/TMessagesProj/src/emojis/twitter/emoji/4_4.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_40.png b/TMessagesProj/src/emojis/twitter/emoji/4_40.png index 68daede1f4..ffe53f420b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_40.png and b/TMessagesProj/src/emojis/twitter/emoji/4_40.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_41.png b/TMessagesProj/src/emojis/twitter/emoji/4_41.png index 1a2f79a06a..9ba1ef227f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_41.png and b/TMessagesProj/src/emojis/twitter/emoji/4_41.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_42.png b/TMessagesProj/src/emojis/twitter/emoji/4_42.png index 94d46754a2..d71f346203 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_42.png and b/TMessagesProj/src/emojis/twitter/emoji/4_42.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_43.png b/TMessagesProj/src/emojis/twitter/emoji/4_43.png index 1854940cb2..fbb3f757c5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_43.png and b/TMessagesProj/src/emojis/twitter/emoji/4_43.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_44.png b/TMessagesProj/src/emojis/twitter/emoji/4_44.png index 63a2a5c0e7..eecc040ae3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_44.png and b/TMessagesProj/src/emojis/twitter/emoji/4_44.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_45.png b/TMessagesProj/src/emojis/twitter/emoji/4_45.png index 3eb6554805..901358129a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_45.png and b/TMessagesProj/src/emojis/twitter/emoji/4_45.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_46.png b/TMessagesProj/src/emojis/twitter/emoji/4_46.png index dab330776e..8f55bfb75a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_46.png and b/TMessagesProj/src/emojis/twitter/emoji/4_46.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_47.png b/TMessagesProj/src/emojis/twitter/emoji/4_47.png index 0706ce28e0..0e650d8daf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_47.png and b/TMessagesProj/src/emojis/twitter/emoji/4_47.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_48.png b/TMessagesProj/src/emojis/twitter/emoji/4_48.png index 5516725c1a..76f163e9cf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_48.png and b/TMessagesProj/src/emojis/twitter/emoji/4_48.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_49.png b/TMessagesProj/src/emojis/twitter/emoji/4_49.png index b04ecd594b..9562ded552 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_49.png and b/TMessagesProj/src/emojis/twitter/emoji/4_49.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_5.png b/TMessagesProj/src/emojis/twitter/emoji/4_5.png index f98f986760..c4f02fafae 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_5.png and b/TMessagesProj/src/emojis/twitter/emoji/4_5.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_50.png b/TMessagesProj/src/emojis/twitter/emoji/4_50.png index 8433a30979..e63f6ce74c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_50.png and b/TMessagesProj/src/emojis/twitter/emoji/4_50.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_51.png b/TMessagesProj/src/emojis/twitter/emoji/4_51.png index f66e91ac0f..0efebc0572 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_51.png and b/TMessagesProj/src/emojis/twitter/emoji/4_51.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_52.png b/TMessagesProj/src/emojis/twitter/emoji/4_52.png index 59b9de37c2..a5f74e31f0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_52.png and b/TMessagesProj/src/emojis/twitter/emoji/4_52.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_53.png b/TMessagesProj/src/emojis/twitter/emoji/4_53.png index 1063ed7a38..4ac45892ea 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_53.png and b/TMessagesProj/src/emojis/twitter/emoji/4_53.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_54.png b/TMessagesProj/src/emojis/twitter/emoji/4_54.png index 3b14e7cf72..94f4b09975 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_54.png and b/TMessagesProj/src/emojis/twitter/emoji/4_54.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_55.png b/TMessagesProj/src/emojis/twitter/emoji/4_55.png index 22c759cdde..c2495ec331 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_55.png and b/TMessagesProj/src/emojis/twitter/emoji/4_55.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_56.png b/TMessagesProj/src/emojis/twitter/emoji/4_56.png index 43f1f72ede..b46eb93910 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_56.png and b/TMessagesProj/src/emojis/twitter/emoji/4_56.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_57.png b/TMessagesProj/src/emojis/twitter/emoji/4_57.png index 14ad278352..b7dea76f9f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_57.png and b/TMessagesProj/src/emojis/twitter/emoji/4_57.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_58.png b/TMessagesProj/src/emojis/twitter/emoji/4_58.png index 6ff5c95fea..1c2ef39bcf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_58.png and b/TMessagesProj/src/emojis/twitter/emoji/4_58.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_59.png b/TMessagesProj/src/emojis/twitter/emoji/4_59.png index cf3c3e3c3f..bd57d96ed8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_59.png and b/TMessagesProj/src/emojis/twitter/emoji/4_59.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_6.png b/TMessagesProj/src/emojis/twitter/emoji/4_6.png index 27d4809c6b..f4826fe2aa 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_6.png and b/TMessagesProj/src/emojis/twitter/emoji/4_6.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_60.png b/TMessagesProj/src/emojis/twitter/emoji/4_60.png index 2be6d461f4..2a9da8d77f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_60.png and b/TMessagesProj/src/emojis/twitter/emoji/4_60.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_61.png b/TMessagesProj/src/emojis/twitter/emoji/4_61.png index 87cb909f24..cd738826d1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_61.png and b/TMessagesProj/src/emojis/twitter/emoji/4_61.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_62.png b/TMessagesProj/src/emojis/twitter/emoji/4_62.png index 17c1b57dff..f3323c3b06 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_62.png and b/TMessagesProj/src/emojis/twitter/emoji/4_62.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_63.png b/TMessagesProj/src/emojis/twitter/emoji/4_63.png index 9ec392a841..9f4cdb530b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_63.png and b/TMessagesProj/src/emojis/twitter/emoji/4_63.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_64.png b/TMessagesProj/src/emojis/twitter/emoji/4_64.png index 679c564eec..3f6207074c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_64.png and b/TMessagesProj/src/emojis/twitter/emoji/4_64.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_65.png b/TMessagesProj/src/emojis/twitter/emoji/4_65.png index 047c7a6610..af06508ac9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_65.png and b/TMessagesProj/src/emojis/twitter/emoji/4_65.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_66.png b/TMessagesProj/src/emojis/twitter/emoji/4_66.png index 2bca2be952..201a5b6298 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_66.png and b/TMessagesProj/src/emojis/twitter/emoji/4_66.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_67.png b/TMessagesProj/src/emojis/twitter/emoji/4_67.png index e2f168d279..481a0ed468 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_67.png and b/TMessagesProj/src/emojis/twitter/emoji/4_67.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_68.png b/TMessagesProj/src/emojis/twitter/emoji/4_68.png index f1e95a65f4..ec16ee81f0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_68.png and b/TMessagesProj/src/emojis/twitter/emoji/4_68.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_69.png b/TMessagesProj/src/emojis/twitter/emoji/4_69.png index 25bb6108cd..e096d0ece2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_69.png and b/TMessagesProj/src/emojis/twitter/emoji/4_69.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_7.png b/TMessagesProj/src/emojis/twitter/emoji/4_7.png index a19a7c6b3c..5c3612080d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_7.png and b/TMessagesProj/src/emojis/twitter/emoji/4_7.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_70.png b/TMessagesProj/src/emojis/twitter/emoji/4_70.png index bb0c992492..dd6d067574 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_70.png and b/TMessagesProj/src/emojis/twitter/emoji/4_70.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_71.png b/TMessagesProj/src/emojis/twitter/emoji/4_71.png index 1a67ccfd29..1e9be47ca0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_71.png and b/TMessagesProj/src/emojis/twitter/emoji/4_71.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_72.png b/TMessagesProj/src/emojis/twitter/emoji/4_72.png index 2b208ea6e1..dcf8bf915b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_72.png and b/TMessagesProj/src/emojis/twitter/emoji/4_72.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_73.png b/TMessagesProj/src/emojis/twitter/emoji/4_73.png index 4afb0a3d78..1134caedd8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_73.png and b/TMessagesProj/src/emojis/twitter/emoji/4_73.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_74.png b/TMessagesProj/src/emojis/twitter/emoji/4_74.png index 5c00e5fe91..12f13ab3a0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_74.png and b/TMessagesProj/src/emojis/twitter/emoji/4_74.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_75.png b/TMessagesProj/src/emojis/twitter/emoji/4_75.png index 06997de172..c0c3953edb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_75.png and b/TMessagesProj/src/emojis/twitter/emoji/4_75.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_76.png b/TMessagesProj/src/emojis/twitter/emoji/4_76.png index a2b1a532fc..c799b8051b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_76.png and b/TMessagesProj/src/emojis/twitter/emoji/4_76.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_77.png b/TMessagesProj/src/emojis/twitter/emoji/4_77.png index 18f8c633d0..179418bc15 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_77.png and b/TMessagesProj/src/emojis/twitter/emoji/4_77.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_78.png b/TMessagesProj/src/emojis/twitter/emoji/4_78.png index d4a5f1e94b..045bb1ee21 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_78.png and b/TMessagesProj/src/emojis/twitter/emoji/4_78.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_79.png b/TMessagesProj/src/emojis/twitter/emoji/4_79.png index e8d8881499..92e9b1393d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_79.png and b/TMessagesProj/src/emojis/twitter/emoji/4_79.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_8.png b/TMessagesProj/src/emojis/twitter/emoji/4_8.png index 02f1112a84..af647cb8f8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_8.png and b/TMessagesProj/src/emojis/twitter/emoji/4_8.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_80.png b/TMessagesProj/src/emojis/twitter/emoji/4_80.png index b184bf15da..bc8c5769fd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_80.png and b/TMessagesProj/src/emojis/twitter/emoji/4_80.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_81.png b/TMessagesProj/src/emojis/twitter/emoji/4_81.png index 7bde0e5c60..6e3a42a6a2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_81.png and b/TMessagesProj/src/emojis/twitter/emoji/4_81.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_82.png b/TMessagesProj/src/emojis/twitter/emoji/4_82.png index aa6f07a674..8bbb83e446 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_82.png and b/TMessagesProj/src/emojis/twitter/emoji/4_82.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_83.png b/TMessagesProj/src/emojis/twitter/emoji/4_83.png index 5cd6088b42..5aee9b647e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_83.png and b/TMessagesProj/src/emojis/twitter/emoji/4_83.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_84.png b/TMessagesProj/src/emojis/twitter/emoji/4_84.png index 7187422995..ce97341f25 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_84.png and b/TMessagesProj/src/emojis/twitter/emoji/4_84.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_85.png b/TMessagesProj/src/emojis/twitter/emoji/4_85.png index 4af41e43d1..88f62e19a4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_85.png and b/TMessagesProj/src/emojis/twitter/emoji/4_85.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_86.png b/TMessagesProj/src/emojis/twitter/emoji/4_86.png index 105ae82d4e..9633d842af 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_86.png and b/TMessagesProj/src/emojis/twitter/emoji/4_86.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_87.png b/TMessagesProj/src/emojis/twitter/emoji/4_87.png index c1e6007033..c02246d356 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_87.png and b/TMessagesProj/src/emojis/twitter/emoji/4_87.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_88.png b/TMessagesProj/src/emojis/twitter/emoji/4_88.png index 30b6a5bef3..2702fa0fda 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_88.png and b/TMessagesProj/src/emojis/twitter/emoji/4_88.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_89.png b/TMessagesProj/src/emojis/twitter/emoji/4_89.png index b5b464d795..9627641857 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_89.png and b/TMessagesProj/src/emojis/twitter/emoji/4_89.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_9.png b/TMessagesProj/src/emojis/twitter/emoji/4_9.png index 617f053ef7..22f38562bb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_9.png and b/TMessagesProj/src/emojis/twitter/emoji/4_9.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_90.png b/TMessagesProj/src/emojis/twitter/emoji/4_90.png index 90863ba27d..d05d8ebd9c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_90.png and b/TMessagesProj/src/emojis/twitter/emoji/4_90.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_91.png b/TMessagesProj/src/emojis/twitter/emoji/4_91.png index f3213f78a2..d937523421 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_91.png and b/TMessagesProj/src/emojis/twitter/emoji/4_91.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_92.png b/TMessagesProj/src/emojis/twitter/emoji/4_92.png index abf592073f..edae8a9615 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_92.png and b/TMessagesProj/src/emojis/twitter/emoji/4_92.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_93.png b/TMessagesProj/src/emojis/twitter/emoji/4_93.png index e542b8294f..f100b581f2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_93.png and b/TMessagesProj/src/emojis/twitter/emoji/4_93.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_94.png b/TMessagesProj/src/emojis/twitter/emoji/4_94.png index 5d5315e53a..d521ac9010 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_94.png and b/TMessagesProj/src/emojis/twitter/emoji/4_94.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_95.png b/TMessagesProj/src/emojis/twitter/emoji/4_95.png index 5763645edc..73c64182a1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_95.png and b/TMessagesProj/src/emojis/twitter/emoji/4_95.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_96.png b/TMessagesProj/src/emojis/twitter/emoji/4_96.png index bbce375a70..8f792503e4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_96.png and b/TMessagesProj/src/emojis/twitter/emoji/4_96.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_97.png b/TMessagesProj/src/emojis/twitter/emoji/4_97.png index 74b73fae64..aa8e05fdc4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_97.png and b/TMessagesProj/src/emojis/twitter/emoji/4_97.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_98.png b/TMessagesProj/src/emojis/twitter/emoji/4_98.png index abe25f0399..ae6dfca039 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_98.png and b/TMessagesProj/src/emojis/twitter/emoji/4_98.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/4_99.png b/TMessagesProj/src/emojis/twitter/emoji/4_99.png index 46365cb06f..ae5e4d621e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/4_99.png and b/TMessagesProj/src/emojis/twitter/emoji/4_99.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_0.png b/TMessagesProj/src/emojis/twitter/emoji/5_0.png index 25541ca87b..63cf54490f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_0.png and b/TMessagesProj/src/emojis/twitter/emoji/5_0.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_1.png b/TMessagesProj/src/emojis/twitter/emoji/5_1.png index 42a5b545bf..95107fab4f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_1.png and b/TMessagesProj/src/emojis/twitter/emoji/5_1.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_10.png b/TMessagesProj/src/emojis/twitter/emoji/5_10.png index c4779200b5..e5edf31b04 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_10.png and b/TMessagesProj/src/emojis/twitter/emoji/5_10.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_100.png b/TMessagesProj/src/emojis/twitter/emoji/5_100.png index f58c587b65..d7f30b47e1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_100.png and b/TMessagesProj/src/emojis/twitter/emoji/5_100.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_101.png b/TMessagesProj/src/emojis/twitter/emoji/5_101.png index 74b5f6ac97..4fbf6eb174 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_101.png and b/TMessagesProj/src/emojis/twitter/emoji/5_101.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_102.png b/TMessagesProj/src/emojis/twitter/emoji/5_102.png index 7f99397654..ba704262cc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_102.png and b/TMessagesProj/src/emojis/twitter/emoji/5_102.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_103.png b/TMessagesProj/src/emojis/twitter/emoji/5_103.png index c6276edc9e..cb629fcadd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_103.png and b/TMessagesProj/src/emojis/twitter/emoji/5_103.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_104.png b/TMessagesProj/src/emojis/twitter/emoji/5_104.png index 9369043f76..a4e6a15374 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_104.png and b/TMessagesProj/src/emojis/twitter/emoji/5_104.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_105.png b/TMessagesProj/src/emojis/twitter/emoji/5_105.png index 713436d28b..d2758c6e1c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_105.png and b/TMessagesProj/src/emojis/twitter/emoji/5_105.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_106.png b/TMessagesProj/src/emojis/twitter/emoji/5_106.png index 26ab2b114e..bbbba03ba7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_106.png and b/TMessagesProj/src/emojis/twitter/emoji/5_106.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_107.png b/TMessagesProj/src/emojis/twitter/emoji/5_107.png index 2666a39af1..624a2b9903 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_107.png and b/TMessagesProj/src/emojis/twitter/emoji/5_107.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_108.png b/TMessagesProj/src/emojis/twitter/emoji/5_108.png index 0a0c34c862..5aeedf8122 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_108.png and b/TMessagesProj/src/emojis/twitter/emoji/5_108.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_109.png b/TMessagesProj/src/emojis/twitter/emoji/5_109.png index 0fe5579719..435cb52072 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_109.png and b/TMessagesProj/src/emojis/twitter/emoji/5_109.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_11.png b/TMessagesProj/src/emojis/twitter/emoji/5_11.png index 43910bbb8e..a52ef270bd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_11.png and b/TMessagesProj/src/emojis/twitter/emoji/5_11.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_110.png b/TMessagesProj/src/emojis/twitter/emoji/5_110.png index cfc0807917..c11391de4c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_110.png and b/TMessagesProj/src/emojis/twitter/emoji/5_110.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_111.png b/TMessagesProj/src/emojis/twitter/emoji/5_111.png index 29242f02b1..bc420222bb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_111.png and b/TMessagesProj/src/emojis/twitter/emoji/5_111.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_112.png b/TMessagesProj/src/emojis/twitter/emoji/5_112.png index c187c598cb..ce3f4f02db 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_112.png and b/TMessagesProj/src/emojis/twitter/emoji/5_112.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_113.png b/TMessagesProj/src/emojis/twitter/emoji/5_113.png index d6da04175d..1a667be146 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_113.png and b/TMessagesProj/src/emojis/twitter/emoji/5_113.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_114.png b/TMessagesProj/src/emojis/twitter/emoji/5_114.png index ff64022fa9..d18ae67a8d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_114.png and b/TMessagesProj/src/emojis/twitter/emoji/5_114.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_115.png b/TMessagesProj/src/emojis/twitter/emoji/5_115.png index a48b93e7a1..c2ca393028 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_115.png and b/TMessagesProj/src/emojis/twitter/emoji/5_115.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_116.png b/TMessagesProj/src/emojis/twitter/emoji/5_116.png index 839d7a2ee6..9d77ea5680 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_116.png and b/TMessagesProj/src/emojis/twitter/emoji/5_116.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_117.png b/TMessagesProj/src/emojis/twitter/emoji/5_117.png index eff0fbcd43..0583140c3c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_117.png and b/TMessagesProj/src/emojis/twitter/emoji/5_117.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_118.png b/TMessagesProj/src/emojis/twitter/emoji/5_118.png index d7527cef55..815e7bd216 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_118.png and b/TMessagesProj/src/emojis/twitter/emoji/5_118.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_119.png b/TMessagesProj/src/emojis/twitter/emoji/5_119.png index 971e607811..41e2d8b949 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_119.png and b/TMessagesProj/src/emojis/twitter/emoji/5_119.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_12.png b/TMessagesProj/src/emojis/twitter/emoji/5_12.png index 6457701b38..b99c8700cc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_12.png and b/TMessagesProj/src/emojis/twitter/emoji/5_12.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_120.png b/TMessagesProj/src/emojis/twitter/emoji/5_120.png index 733407150a..d97aa9c149 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_120.png and b/TMessagesProj/src/emojis/twitter/emoji/5_120.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_121.png b/TMessagesProj/src/emojis/twitter/emoji/5_121.png index f4636972fe..b6792c713a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_121.png and b/TMessagesProj/src/emojis/twitter/emoji/5_121.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_122.png b/TMessagesProj/src/emojis/twitter/emoji/5_122.png index eb2fc18686..a9165468c5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_122.png and b/TMessagesProj/src/emojis/twitter/emoji/5_122.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_123.png b/TMessagesProj/src/emojis/twitter/emoji/5_123.png index 403dcef967..18ce74a8ff 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_123.png and b/TMessagesProj/src/emojis/twitter/emoji/5_123.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_124.png b/TMessagesProj/src/emojis/twitter/emoji/5_124.png index 2ff7677651..1100da08a3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_124.png and b/TMessagesProj/src/emojis/twitter/emoji/5_124.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_125.png b/TMessagesProj/src/emojis/twitter/emoji/5_125.png index 9702d26509..4d7bbdc70b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_125.png and b/TMessagesProj/src/emojis/twitter/emoji/5_125.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_126.png b/TMessagesProj/src/emojis/twitter/emoji/5_126.png index 3085a47b7d..9aa5680b49 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_126.png and b/TMessagesProj/src/emojis/twitter/emoji/5_126.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_127.png b/TMessagesProj/src/emojis/twitter/emoji/5_127.png index d017abcd05..3dda1df009 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_127.png and b/TMessagesProj/src/emojis/twitter/emoji/5_127.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_128.png b/TMessagesProj/src/emojis/twitter/emoji/5_128.png index 9c3affa29b..3ce95744eb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_128.png and b/TMessagesProj/src/emojis/twitter/emoji/5_128.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_129.png b/TMessagesProj/src/emojis/twitter/emoji/5_129.png index b5b286fce0..00bf39ce68 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_129.png and b/TMessagesProj/src/emojis/twitter/emoji/5_129.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_13.png b/TMessagesProj/src/emojis/twitter/emoji/5_13.png index 77bdf4137c..8e333a2a00 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_13.png and b/TMessagesProj/src/emojis/twitter/emoji/5_13.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_130.png b/TMessagesProj/src/emojis/twitter/emoji/5_130.png index 7e62790ad6..2bcbf4f06c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_130.png and b/TMessagesProj/src/emojis/twitter/emoji/5_130.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_131.png b/TMessagesProj/src/emojis/twitter/emoji/5_131.png index 619dde17a4..1327dbcb02 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_131.png and b/TMessagesProj/src/emojis/twitter/emoji/5_131.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_132.png b/TMessagesProj/src/emojis/twitter/emoji/5_132.png index 35a8e47bfc..dec30b0e13 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_132.png and b/TMessagesProj/src/emojis/twitter/emoji/5_132.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_133.png b/TMessagesProj/src/emojis/twitter/emoji/5_133.png index 0243b65da0..26be5680cf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_133.png and b/TMessagesProj/src/emojis/twitter/emoji/5_133.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_134.png b/TMessagesProj/src/emojis/twitter/emoji/5_134.png index b7432f9b73..f522db42de 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_134.png and b/TMessagesProj/src/emojis/twitter/emoji/5_134.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_135.png b/TMessagesProj/src/emojis/twitter/emoji/5_135.png index 6bc7012090..8690ef4d1c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_135.png and b/TMessagesProj/src/emojis/twitter/emoji/5_135.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_136.png b/TMessagesProj/src/emojis/twitter/emoji/5_136.png index 1cfc1963a5..91234fd57b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_136.png and b/TMessagesProj/src/emojis/twitter/emoji/5_136.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_137.png b/TMessagesProj/src/emojis/twitter/emoji/5_137.png index b7d8a9dc7b..f672ed3e25 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_137.png and b/TMessagesProj/src/emojis/twitter/emoji/5_137.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_138.png b/TMessagesProj/src/emojis/twitter/emoji/5_138.png index 9b5bb40d62..b8e703bfba 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_138.png and b/TMessagesProj/src/emojis/twitter/emoji/5_138.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_139.png b/TMessagesProj/src/emojis/twitter/emoji/5_139.png index 504c49fae0..11d5cd91e2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_139.png and b/TMessagesProj/src/emojis/twitter/emoji/5_139.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_14.png b/TMessagesProj/src/emojis/twitter/emoji/5_14.png index 03973273ed..70f5013b39 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_14.png and b/TMessagesProj/src/emojis/twitter/emoji/5_14.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_140.png b/TMessagesProj/src/emojis/twitter/emoji/5_140.png index b127d72aef..c376ddace7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_140.png and b/TMessagesProj/src/emojis/twitter/emoji/5_140.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_141.png b/TMessagesProj/src/emojis/twitter/emoji/5_141.png index ba26481d0f..5e09ce7ae0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_141.png and b/TMessagesProj/src/emojis/twitter/emoji/5_141.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_142.png b/TMessagesProj/src/emojis/twitter/emoji/5_142.png index 580cb35c1f..f773d43f3e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_142.png and b/TMessagesProj/src/emojis/twitter/emoji/5_142.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_143.png b/TMessagesProj/src/emojis/twitter/emoji/5_143.png index 68481c0636..609030c27b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_143.png and b/TMessagesProj/src/emojis/twitter/emoji/5_143.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_144.png b/TMessagesProj/src/emojis/twitter/emoji/5_144.png index a5dcb4f98a..475b929ee1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_144.png and b/TMessagesProj/src/emojis/twitter/emoji/5_144.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_145.png b/TMessagesProj/src/emojis/twitter/emoji/5_145.png index 39ab5e2925..ed62cb7298 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_145.png and b/TMessagesProj/src/emojis/twitter/emoji/5_145.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_146.png b/TMessagesProj/src/emojis/twitter/emoji/5_146.png index b9d016a811..bc272e3674 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_146.png and b/TMessagesProj/src/emojis/twitter/emoji/5_146.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_147.png b/TMessagesProj/src/emojis/twitter/emoji/5_147.png index a3965eca9e..764d91b4f5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_147.png and b/TMessagesProj/src/emojis/twitter/emoji/5_147.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_148.png b/TMessagesProj/src/emojis/twitter/emoji/5_148.png index aac05a9159..3a6733b38f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_148.png and b/TMessagesProj/src/emojis/twitter/emoji/5_148.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_149.png b/TMessagesProj/src/emojis/twitter/emoji/5_149.png index 44d71d398d..30068b0574 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_149.png and b/TMessagesProj/src/emojis/twitter/emoji/5_149.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_15.png b/TMessagesProj/src/emojis/twitter/emoji/5_15.png index 1cf7c429ca..2f70e4bb2d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_15.png and b/TMessagesProj/src/emojis/twitter/emoji/5_15.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_150.png b/TMessagesProj/src/emojis/twitter/emoji/5_150.png index b448bdfade..e07c1d951d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_150.png and b/TMessagesProj/src/emojis/twitter/emoji/5_150.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_151.png b/TMessagesProj/src/emojis/twitter/emoji/5_151.png index 2148998725..d7d3dcaeea 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_151.png and b/TMessagesProj/src/emojis/twitter/emoji/5_151.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_152.png b/TMessagesProj/src/emojis/twitter/emoji/5_152.png index 97d78edcc9..1df9e62b50 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_152.png and b/TMessagesProj/src/emojis/twitter/emoji/5_152.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_153.png b/TMessagesProj/src/emojis/twitter/emoji/5_153.png index 9eb153b7db..cf8772f905 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_153.png and b/TMessagesProj/src/emojis/twitter/emoji/5_153.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_154.png b/TMessagesProj/src/emojis/twitter/emoji/5_154.png index 79f1451a07..7519204979 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_154.png and b/TMessagesProj/src/emojis/twitter/emoji/5_154.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_155.png b/TMessagesProj/src/emojis/twitter/emoji/5_155.png index 6707f0dd95..b31b0826bf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_155.png and b/TMessagesProj/src/emojis/twitter/emoji/5_155.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_156.png b/TMessagesProj/src/emojis/twitter/emoji/5_156.png index 7c21424ce4..d307cb4af5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_156.png and b/TMessagesProj/src/emojis/twitter/emoji/5_156.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_157.png b/TMessagesProj/src/emojis/twitter/emoji/5_157.png index 180d14b14b..fd580a5eb4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_157.png and b/TMessagesProj/src/emojis/twitter/emoji/5_157.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_158.png b/TMessagesProj/src/emojis/twitter/emoji/5_158.png index ea75128052..2eacb1987d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_158.png and b/TMessagesProj/src/emojis/twitter/emoji/5_158.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_159.png b/TMessagesProj/src/emojis/twitter/emoji/5_159.png index 594661634d..4e9391c342 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_159.png and b/TMessagesProj/src/emojis/twitter/emoji/5_159.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_16.png b/TMessagesProj/src/emojis/twitter/emoji/5_16.png index c9aba73e92..9a0a87e747 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_16.png and b/TMessagesProj/src/emojis/twitter/emoji/5_16.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_160.png b/TMessagesProj/src/emojis/twitter/emoji/5_160.png index 32a835b9e8..5b8c4cb928 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_160.png and b/TMessagesProj/src/emojis/twitter/emoji/5_160.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_161.png b/TMessagesProj/src/emojis/twitter/emoji/5_161.png index 64f4371fa9..bd79995e37 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_161.png and b/TMessagesProj/src/emojis/twitter/emoji/5_161.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_162.png b/TMessagesProj/src/emojis/twitter/emoji/5_162.png index 35f9d7283d..5dd5f1824c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_162.png and b/TMessagesProj/src/emojis/twitter/emoji/5_162.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_163.png b/TMessagesProj/src/emojis/twitter/emoji/5_163.png index 4b60c99a29..393d22f8e3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_163.png and b/TMessagesProj/src/emojis/twitter/emoji/5_163.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_164.png b/TMessagesProj/src/emojis/twitter/emoji/5_164.png index 9add736e41..27edba1457 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_164.png and b/TMessagesProj/src/emojis/twitter/emoji/5_164.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_165.png b/TMessagesProj/src/emojis/twitter/emoji/5_165.png index c48d45498d..d13a4a03ab 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_165.png and b/TMessagesProj/src/emojis/twitter/emoji/5_165.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_166.png b/TMessagesProj/src/emojis/twitter/emoji/5_166.png index b205201c12..102bba7371 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_166.png and b/TMessagesProj/src/emojis/twitter/emoji/5_166.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_167.png b/TMessagesProj/src/emojis/twitter/emoji/5_167.png index 87437f5ae8..b39801faab 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_167.png and b/TMessagesProj/src/emojis/twitter/emoji/5_167.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_168.png b/TMessagesProj/src/emojis/twitter/emoji/5_168.png index 5cc8d05d8a..86d5fda652 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_168.png and b/TMessagesProj/src/emojis/twitter/emoji/5_168.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_169.png b/TMessagesProj/src/emojis/twitter/emoji/5_169.png index cff4af3890..679ee81a36 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_169.png and b/TMessagesProj/src/emojis/twitter/emoji/5_169.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_17.png b/TMessagesProj/src/emojis/twitter/emoji/5_17.png index 12d43d64d9..df90280d2a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_17.png and b/TMessagesProj/src/emojis/twitter/emoji/5_17.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_170.png b/TMessagesProj/src/emojis/twitter/emoji/5_170.png index 16023433ee..695eaa58bf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_170.png and b/TMessagesProj/src/emojis/twitter/emoji/5_170.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_171.png b/TMessagesProj/src/emojis/twitter/emoji/5_171.png index aa34a4ffeb..eedbd483cd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_171.png and b/TMessagesProj/src/emojis/twitter/emoji/5_171.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_172.png b/TMessagesProj/src/emojis/twitter/emoji/5_172.png index b945896265..fa8525f35e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_172.png and b/TMessagesProj/src/emojis/twitter/emoji/5_172.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_173.png b/TMessagesProj/src/emojis/twitter/emoji/5_173.png index 3873938416..210bfd3a81 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_173.png and b/TMessagesProj/src/emojis/twitter/emoji/5_173.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_174.png b/TMessagesProj/src/emojis/twitter/emoji/5_174.png index c809510197..16578bc896 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_174.png and b/TMessagesProj/src/emojis/twitter/emoji/5_174.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_175.png b/TMessagesProj/src/emojis/twitter/emoji/5_175.png index 044d32d0fa..9f40225ffb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_175.png and b/TMessagesProj/src/emojis/twitter/emoji/5_175.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_176.png b/TMessagesProj/src/emojis/twitter/emoji/5_176.png index 6ccf13fa69..6d1b5f94d3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_176.png and b/TMessagesProj/src/emojis/twitter/emoji/5_176.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_177.png b/TMessagesProj/src/emojis/twitter/emoji/5_177.png index 4feedce322..a0408013a7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_177.png and b/TMessagesProj/src/emojis/twitter/emoji/5_177.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_178.png b/TMessagesProj/src/emojis/twitter/emoji/5_178.png index 950a640c92..73e0dfeb20 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_178.png and b/TMessagesProj/src/emojis/twitter/emoji/5_178.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_179.png b/TMessagesProj/src/emojis/twitter/emoji/5_179.png index 51cad93b69..70bd0a2c57 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_179.png and b/TMessagesProj/src/emojis/twitter/emoji/5_179.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_18.png b/TMessagesProj/src/emojis/twitter/emoji/5_18.png index cd02d472e9..2ff5af37bf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_18.png and b/TMessagesProj/src/emojis/twitter/emoji/5_18.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_180.png b/TMessagesProj/src/emojis/twitter/emoji/5_180.png index 698c7467e4..b49d669e38 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_180.png and b/TMessagesProj/src/emojis/twitter/emoji/5_180.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_181.png b/TMessagesProj/src/emojis/twitter/emoji/5_181.png index 3821d06eec..314e74de9f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_181.png and b/TMessagesProj/src/emojis/twitter/emoji/5_181.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_182.png b/TMessagesProj/src/emojis/twitter/emoji/5_182.png index 48e4a34361..7653062696 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_182.png and b/TMessagesProj/src/emojis/twitter/emoji/5_182.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_183.png b/TMessagesProj/src/emojis/twitter/emoji/5_183.png index 4313d0e967..39c6f5357c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_183.png and b/TMessagesProj/src/emojis/twitter/emoji/5_183.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_184.png b/TMessagesProj/src/emojis/twitter/emoji/5_184.png index 2acdb1c7e2..c02d33fb8f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_184.png and b/TMessagesProj/src/emojis/twitter/emoji/5_184.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_185.png b/TMessagesProj/src/emojis/twitter/emoji/5_185.png index 2d837a2ca0..f0aa027da5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_185.png and b/TMessagesProj/src/emojis/twitter/emoji/5_185.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_186.png b/TMessagesProj/src/emojis/twitter/emoji/5_186.png index 15cac67933..9a0be3e6d9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_186.png and b/TMessagesProj/src/emojis/twitter/emoji/5_186.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_187.png b/TMessagesProj/src/emojis/twitter/emoji/5_187.png index b01e79c540..52ae09c4d5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_187.png and b/TMessagesProj/src/emojis/twitter/emoji/5_187.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_188.png b/TMessagesProj/src/emojis/twitter/emoji/5_188.png index 59dacc2241..25a7afb7fa 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_188.png and b/TMessagesProj/src/emojis/twitter/emoji/5_188.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_189.png b/TMessagesProj/src/emojis/twitter/emoji/5_189.png index d5b9a141da..40129f0cfa 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_189.png and b/TMessagesProj/src/emojis/twitter/emoji/5_189.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_19.png b/TMessagesProj/src/emojis/twitter/emoji/5_19.png index 69cd97f94f..891b3f04ae 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_19.png and b/TMessagesProj/src/emojis/twitter/emoji/5_19.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_190.png b/TMessagesProj/src/emojis/twitter/emoji/5_190.png index 729bdc1d74..fa8de00876 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_190.png and b/TMessagesProj/src/emojis/twitter/emoji/5_190.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_191.png b/TMessagesProj/src/emojis/twitter/emoji/5_191.png index 6b7194624b..facc61b12a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_191.png and b/TMessagesProj/src/emojis/twitter/emoji/5_191.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_192.png b/TMessagesProj/src/emojis/twitter/emoji/5_192.png index 5004b44996..4265696a98 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_192.png and b/TMessagesProj/src/emojis/twitter/emoji/5_192.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_193.png b/TMessagesProj/src/emojis/twitter/emoji/5_193.png index 9a93601a47..5dc2d32b2d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_193.png and b/TMessagesProj/src/emojis/twitter/emoji/5_193.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_194.png b/TMessagesProj/src/emojis/twitter/emoji/5_194.png index 8e934af131..cb14a85652 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_194.png and b/TMessagesProj/src/emojis/twitter/emoji/5_194.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_195.png b/TMessagesProj/src/emojis/twitter/emoji/5_195.png index d482c7c174..17ace34e62 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_195.png and b/TMessagesProj/src/emojis/twitter/emoji/5_195.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_196.png b/TMessagesProj/src/emojis/twitter/emoji/5_196.png index c45fec0fd5..e1be4eedae 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_196.png and b/TMessagesProj/src/emojis/twitter/emoji/5_196.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_197.png b/TMessagesProj/src/emojis/twitter/emoji/5_197.png index d2fa7ab6bb..cffd354d54 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_197.png and b/TMessagesProj/src/emojis/twitter/emoji/5_197.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_198.png b/TMessagesProj/src/emojis/twitter/emoji/5_198.png index c60d579441..3e619efc8b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_198.png and b/TMessagesProj/src/emojis/twitter/emoji/5_198.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_199.png b/TMessagesProj/src/emojis/twitter/emoji/5_199.png index 93fd03c662..9a9142ae9c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_199.png and b/TMessagesProj/src/emojis/twitter/emoji/5_199.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_2.png b/TMessagesProj/src/emojis/twitter/emoji/5_2.png index 27a99c501a..081cbee551 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_2.png and b/TMessagesProj/src/emojis/twitter/emoji/5_2.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_20.png b/TMessagesProj/src/emojis/twitter/emoji/5_20.png index 40cecd3108..d75d32b9a3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_20.png and b/TMessagesProj/src/emojis/twitter/emoji/5_20.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_200.png b/TMessagesProj/src/emojis/twitter/emoji/5_200.png index 66adc38d7e..a671c16fad 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_200.png and b/TMessagesProj/src/emojis/twitter/emoji/5_200.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_201.png b/TMessagesProj/src/emojis/twitter/emoji/5_201.png index 8fbb95943f..a1f4da5c98 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_201.png and b/TMessagesProj/src/emojis/twitter/emoji/5_201.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_202.png b/TMessagesProj/src/emojis/twitter/emoji/5_202.png index c576b5668a..9bd8a0b692 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_202.png and b/TMessagesProj/src/emojis/twitter/emoji/5_202.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_203.png b/TMessagesProj/src/emojis/twitter/emoji/5_203.png index ccc0c568f7..2eb2b161a3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_203.png and b/TMessagesProj/src/emojis/twitter/emoji/5_203.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_204.png b/TMessagesProj/src/emojis/twitter/emoji/5_204.png index e55b2fe521..661d274b63 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_204.png and b/TMessagesProj/src/emojis/twitter/emoji/5_204.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_205.png b/TMessagesProj/src/emojis/twitter/emoji/5_205.png index 66aa68ff07..3599c284f8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_205.png and b/TMessagesProj/src/emojis/twitter/emoji/5_205.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_206.png b/TMessagesProj/src/emojis/twitter/emoji/5_206.png index 405cf1c9a3..a493342f9a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_206.png and b/TMessagesProj/src/emojis/twitter/emoji/5_206.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_207.png b/TMessagesProj/src/emojis/twitter/emoji/5_207.png index 5d6f06235f..29ee807c9d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_207.png and b/TMessagesProj/src/emojis/twitter/emoji/5_207.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_208.png b/TMessagesProj/src/emojis/twitter/emoji/5_208.png index 5afd793bcd..1171f747d8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_208.png and b/TMessagesProj/src/emojis/twitter/emoji/5_208.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_209.png b/TMessagesProj/src/emojis/twitter/emoji/5_209.png index 9ea3737794..4ba4cd2641 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_209.png and b/TMessagesProj/src/emojis/twitter/emoji/5_209.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_21.png b/TMessagesProj/src/emojis/twitter/emoji/5_21.png index 68729590ae..2e47b36266 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_21.png and b/TMessagesProj/src/emojis/twitter/emoji/5_21.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_210.png b/TMessagesProj/src/emojis/twitter/emoji/5_210.png index eb76f713ae..e6476d985a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_210.png and b/TMessagesProj/src/emojis/twitter/emoji/5_210.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_211.png b/TMessagesProj/src/emojis/twitter/emoji/5_211.png index 9c9c454b48..86df7cd970 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_211.png and b/TMessagesProj/src/emojis/twitter/emoji/5_211.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_212.png b/TMessagesProj/src/emojis/twitter/emoji/5_212.png index 120a70f415..eb0a665a0c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_212.png and b/TMessagesProj/src/emojis/twitter/emoji/5_212.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_213.png b/TMessagesProj/src/emojis/twitter/emoji/5_213.png index 0d70886073..5073f1330f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_213.png and b/TMessagesProj/src/emojis/twitter/emoji/5_213.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_214.png b/TMessagesProj/src/emojis/twitter/emoji/5_214.png index b011f72643..9e17f046b2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_214.png and b/TMessagesProj/src/emojis/twitter/emoji/5_214.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_215.png b/TMessagesProj/src/emojis/twitter/emoji/5_215.png index 5739018ba9..7a1e0d5d03 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_215.png and b/TMessagesProj/src/emojis/twitter/emoji/5_215.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_216.png b/TMessagesProj/src/emojis/twitter/emoji/5_216.png index 8ef001d6e2..b2cb272b59 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_216.png and b/TMessagesProj/src/emojis/twitter/emoji/5_216.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_217.png b/TMessagesProj/src/emojis/twitter/emoji/5_217.png index 00ed7ac0a7..69f2d675b0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_217.png and b/TMessagesProj/src/emojis/twitter/emoji/5_217.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_218.png b/TMessagesProj/src/emojis/twitter/emoji/5_218.png index cbe9b5e7b5..a5873454e4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_218.png and b/TMessagesProj/src/emojis/twitter/emoji/5_218.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_219.png b/TMessagesProj/src/emojis/twitter/emoji/5_219.png index 6323d32fb3..35d192e417 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_219.png and b/TMessagesProj/src/emojis/twitter/emoji/5_219.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_22.png b/TMessagesProj/src/emojis/twitter/emoji/5_22.png index c530de4b0f..828d208339 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_22.png and b/TMessagesProj/src/emojis/twitter/emoji/5_22.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_220.png b/TMessagesProj/src/emojis/twitter/emoji/5_220.png index 14451b553e..5ee9054c53 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_220.png and b/TMessagesProj/src/emojis/twitter/emoji/5_220.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_221.png b/TMessagesProj/src/emojis/twitter/emoji/5_221.png index 221407f1dd..8fb5a813d3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_221.png and b/TMessagesProj/src/emojis/twitter/emoji/5_221.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_222.png b/TMessagesProj/src/emojis/twitter/emoji/5_222.png index efc3f00275..a41c2819c2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_222.png and b/TMessagesProj/src/emojis/twitter/emoji/5_222.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_223.png b/TMessagesProj/src/emojis/twitter/emoji/5_223.png index 165d9dfdd3..50dd2acaec 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_223.png and b/TMessagesProj/src/emojis/twitter/emoji/5_223.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_224.png b/TMessagesProj/src/emojis/twitter/emoji/5_224.png index 65594bc3c0..a49168ca2a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_224.png and b/TMessagesProj/src/emojis/twitter/emoji/5_224.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_225.png b/TMessagesProj/src/emojis/twitter/emoji/5_225.png index 10075ef879..8dfd284915 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_225.png and b/TMessagesProj/src/emojis/twitter/emoji/5_225.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_226.png b/TMessagesProj/src/emojis/twitter/emoji/5_226.png index 87c1766d78..c9a927db35 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_226.png and b/TMessagesProj/src/emojis/twitter/emoji/5_226.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_227.png b/TMessagesProj/src/emojis/twitter/emoji/5_227.png new file mode 100644 index 0000000000..2dc595614f Binary files /dev/null and b/TMessagesProj/src/emojis/twitter/emoji/5_227.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_228.png b/TMessagesProj/src/emojis/twitter/emoji/5_228.png new file mode 100644 index 0000000000..235fa16397 Binary files /dev/null and b/TMessagesProj/src/emojis/twitter/emoji/5_228.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_23.png b/TMessagesProj/src/emojis/twitter/emoji/5_23.png index 90c73e84ff..5fd7c60869 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_23.png and b/TMessagesProj/src/emojis/twitter/emoji/5_23.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_24.png b/TMessagesProj/src/emojis/twitter/emoji/5_24.png index 4f45501042..228367ecf3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_24.png and b/TMessagesProj/src/emojis/twitter/emoji/5_24.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_25.png b/TMessagesProj/src/emojis/twitter/emoji/5_25.png index 6e80490f53..a6dee3fe6e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_25.png and b/TMessagesProj/src/emojis/twitter/emoji/5_25.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_26.png b/TMessagesProj/src/emojis/twitter/emoji/5_26.png index c1ea4cac8f..485e36b2e5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_26.png and b/TMessagesProj/src/emojis/twitter/emoji/5_26.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_27.png b/TMessagesProj/src/emojis/twitter/emoji/5_27.png index e672cc744e..06d0f4eec2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_27.png and b/TMessagesProj/src/emojis/twitter/emoji/5_27.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_28.png b/TMessagesProj/src/emojis/twitter/emoji/5_28.png index 9e269519f3..c528ce9a34 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_28.png and b/TMessagesProj/src/emojis/twitter/emoji/5_28.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_29.png b/TMessagesProj/src/emojis/twitter/emoji/5_29.png index a160f961f9..abc68ab1e0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_29.png and b/TMessagesProj/src/emojis/twitter/emoji/5_29.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_3.png b/TMessagesProj/src/emojis/twitter/emoji/5_3.png index b6bfa59c7c..a6b1276f87 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_3.png and b/TMessagesProj/src/emojis/twitter/emoji/5_3.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_30.png b/TMessagesProj/src/emojis/twitter/emoji/5_30.png index 0017cf00d0..0931319d99 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_30.png and b/TMessagesProj/src/emojis/twitter/emoji/5_30.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_31.png b/TMessagesProj/src/emojis/twitter/emoji/5_31.png index 0ee87f1b64..6ec2d21534 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_31.png and b/TMessagesProj/src/emojis/twitter/emoji/5_31.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_32.png b/TMessagesProj/src/emojis/twitter/emoji/5_32.png index 500412ea1c..7896760921 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_32.png and b/TMessagesProj/src/emojis/twitter/emoji/5_32.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_33.png b/TMessagesProj/src/emojis/twitter/emoji/5_33.png index 3a68bd9f0b..f25b948ad9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_33.png and b/TMessagesProj/src/emojis/twitter/emoji/5_33.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_34.png b/TMessagesProj/src/emojis/twitter/emoji/5_34.png index 25b4b8604e..1f4bbaea9f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_34.png and b/TMessagesProj/src/emojis/twitter/emoji/5_34.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_35.png b/TMessagesProj/src/emojis/twitter/emoji/5_35.png index 312e8047d9..4f487baa30 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_35.png and b/TMessagesProj/src/emojis/twitter/emoji/5_35.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_36.png b/TMessagesProj/src/emojis/twitter/emoji/5_36.png index ac36d1a8b4..178db8c9eb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_36.png and b/TMessagesProj/src/emojis/twitter/emoji/5_36.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_37.png b/TMessagesProj/src/emojis/twitter/emoji/5_37.png index 66b26fbfaa..a07b464a2c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_37.png and b/TMessagesProj/src/emojis/twitter/emoji/5_37.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_38.png b/TMessagesProj/src/emojis/twitter/emoji/5_38.png index f6578e0025..c39d8c529a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_38.png and b/TMessagesProj/src/emojis/twitter/emoji/5_38.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_39.png b/TMessagesProj/src/emojis/twitter/emoji/5_39.png index 70d3372789..4508e1d7c5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_39.png and b/TMessagesProj/src/emojis/twitter/emoji/5_39.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_4.png b/TMessagesProj/src/emojis/twitter/emoji/5_4.png index 3505c5d2ce..a62733723e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_4.png and b/TMessagesProj/src/emojis/twitter/emoji/5_4.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_40.png b/TMessagesProj/src/emojis/twitter/emoji/5_40.png index 58e2f49bbf..68775322b1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_40.png and b/TMessagesProj/src/emojis/twitter/emoji/5_40.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_41.png b/TMessagesProj/src/emojis/twitter/emoji/5_41.png index 8a3a3e05ef..54b23aea59 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_41.png and b/TMessagesProj/src/emojis/twitter/emoji/5_41.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_42.png b/TMessagesProj/src/emojis/twitter/emoji/5_42.png index 9ec4e72424..f071d2cc81 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_42.png and b/TMessagesProj/src/emojis/twitter/emoji/5_42.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_43.png b/TMessagesProj/src/emojis/twitter/emoji/5_43.png index 2154967c38..5a4fbd58c3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_43.png and b/TMessagesProj/src/emojis/twitter/emoji/5_43.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_44.png b/TMessagesProj/src/emojis/twitter/emoji/5_44.png index 1d882950bf..c34105ed50 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_44.png and b/TMessagesProj/src/emojis/twitter/emoji/5_44.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_45.png b/TMessagesProj/src/emojis/twitter/emoji/5_45.png index 097a38f9b7..0265401cca 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_45.png and b/TMessagesProj/src/emojis/twitter/emoji/5_45.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_46.png b/TMessagesProj/src/emojis/twitter/emoji/5_46.png index 18cc44ad25..a7b5b84579 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_46.png and b/TMessagesProj/src/emojis/twitter/emoji/5_46.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_47.png b/TMessagesProj/src/emojis/twitter/emoji/5_47.png index fbc7b83573..753e306e9d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_47.png and b/TMessagesProj/src/emojis/twitter/emoji/5_47.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_48.png b/TMessagesProj/src/emojis/twitter/emoji/5_48.png index 7b0c52ae4f..06e67a299d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_48.png and b/TMessagesProj/src/emojis/twitter/emoji/5_48.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_49.png b/TMessagesProj/src/emojis/twitter/emoji/5_49.png index c6f09f0441..89d61144bc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_49.png and b/TMessagesProj/src/emojis/twitter/emoji/5_49.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_5.png b/TMessagesProj/src/emojis/twitter/emoji/5_5.png index f85201e109..5e8629d55e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_5.png and b/TMessagesProj/src/emojis/twitter/emoji/5_5.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_50.png b/TMessagesProj/src/emojis/twitter/emoji/5_50.png index 04923abce4..3528a94eea 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_50.png and b/TMessagesProj/src/emojis/twitter/emoji/5_50.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_51.png b/TMessagesProj/src/emojis/twitter/emoji/5_51.png index 597eb49f2a..17693f6519 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_51.png and b/TMessagesProj/src/emojis/twitter/emoji/5_51.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_52.png b/TMessagesProj/src/emojis/twitter/emoji/5_52.png index b0b6740582..2efcfb3d72 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_52.png and b/TMessagesProj/src/emojis/twitter/emoji/5_52.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_53.png b/TMessagesProj/src/emojis/twitter/emoji/5_53.png index 986b1b0784..9f84046d03 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_53.png and b/TMessagesProj/src/emojis/twitter/emoji/5_53.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_54.png b/TMessagesProj/src/emojis/twitter/emoji/5_54.png index 94363ab0a8..c25baa0473 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_54.png and b/TMessagesProj/src/emojis/twitter/emoji/5_54.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_55.png b/TMessagesProj/src/emojis/twitter/emoji/5_55.png index 6e853bc696..395541b2fb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_55.png and b/TMessagesProj/src/emojis/twitter/emoji/5_55.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_56.png b/TMessagesProj/src/emojis/twitter/emoji/5_56.png index 0813115335..2d19602f20 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_56.png and b/TMessagesProj/src/emojis/twitter/emoji/5_56.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_57.png b/TMessagesProj/src/emojis/twitter/emoji/5_57.png index 74a5df6683..a748de9cfa 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_57.png and b/TMessagesProj/src/emojis/twitter/emoji/5_57.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_58.png b/TMessagesProj/src/emojis/twitter/emoji/5_58.png index 13488d6be2..b8bf3fe8a1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_58.png and b/TMessagesProj/src/emojis/twitter/emoji/5_58.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_59.png b/TMessagesProj/src/emojis/twitter/emoji/5_59.png index 7cf9cec050..75d81d6ef3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_59.png and b/TMessagesProj/src/emojis/twitter/emoji/5_59.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_6.png b/TMessagesProj/src/emojis/twitter/emoji/5_6.png index ae45c697ee..564d205fe1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_6.png and b/TMessagesProj/src/emojis/twitter/emoji/5_6.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_60.png b/TMessagesProj/src/emojis/twitter/emoji/5_60.png index d0c193dbb6..2e5e5a6d9a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_60.png and b/TMessagesProj/src/emojis/twitter/emoji/5_60.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_61.png b/TMessagesProj/src/emojis/twitter/emoji/5_61.png index 75b7ad666c..b089ac14e7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_61.png and b/TMessagesProj/src/emojis/twitter/emoji/5_61.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_62.png b/TMessagesProj/src/emojis/twitter/emoji/5_62.png index 03cb467fc3..43be455042 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_62.png and b/TMessagesProj/src/emojis/twitter/emoji/5_62.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_63.png b/TMessagesProj/src/emojis/twitter/emoji/5_63.png index 5c51fc110c..6c2932ac4e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_63.png and b/TMessagesProj/src/emojis/twitter/emoji/5_63.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_64.png b/TMessagesProj/src/emojis/twitter/emoji/5_64.png index 03f0693239..9bf20a51cf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_64.png and b/TMessagesProj/src/emojis/twitter/emoji/5_64.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_65.png b/TMessagesProj/src/emojis/twitter/emoji/5_65.png index e3d5798b91..6c154630ba 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_65.png and b/TMessagesProj/src/emojis/twitter/emoji/5_65.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_66.png b/TMessagesProj/src/emojis/twitter/emoji/5_66.png index db0cb012f8..5297e92ac4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_66.png and b/TMessagesProj/src/emojis/twitter/emoji/5_66.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_67.png b/TMessagesProj/src/emojis/twitter/emoji/5_67.png index d4058e7ac7..cbf897094c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_67.png and b/TMessagesProj/src/emojis/twitter/emoji/5_67.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_68.png b/TMessagesProj/src/emojis/twitter/emoji/5_68.png index 1b38c95321..d90520eb5d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_68.png and b/TMessagesProj/src/emojis/twitter/emoji/5_68.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_69.png b/TMessagesProj/src/emojis/twitter/emoji/5_69.png index 7432d700c5..db19de4af7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_69.png and b/TMessagesProj/src/emojis/twitter/emoji/5_69.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_7.png b/TMessagesProj/src/emojis/twitter/emoji/5_7.png index 4fb916ce91..58f5ee8cee 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_7.png and b/TMessagesProj/src/emojis/twitter/emoji/5_7.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_70.png b/TMessagesProj/src/emojis/twitter/emoji/5_70.png index 05a5649777..23bf1ba5fc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_70.png and b/TMessagesProj/src/emojis/twitter/emoji/5_70.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_71.png b/TMessagesProj/src/emojis/twitter/emoji/5_71.png index e21e34a5f4..543f28f7b5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_71.png and b/TMessagesProj/src/emojis/twitter/emoji/5_71.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_72.png b/TMessagesProj/src/emojis/twitter/emoji/5_72.png index 867eb1fbd7..bf4d572ba0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_72.png and b/TMessagesProj/src/emojis/twitter/emoji/5_72.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_73.png b/TMessagesProj/src/emojis/twitter/emoji/5_73.png index 7f6eaf85b1..25cafceec2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_73.png and b/TMessagesProj/src/emojis/twitter/emoji/5_73.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_74.png b/TMessagesProj/src/emojis/twitter/emoji/5_74.png index 11a00f40e1..74f6d7e758 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_74.png and b/TMessagesProj/src/emojis/twitter/emoji/5_74.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_75.png b/TMessagesProj/src/emojis/twitter/emoji/5_75.png index b7a33119a8..e14120e153 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_75.png and b/TMessagesProj/src/emojis/twitter/emoji/5_75.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_76.png b/TMessagesProj/src/emojis/twitter/emoji/5_76.png index 937e07856b..90fd0d55a7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_76.png and b/TMessagesProj/src/emojis/twitter/emoji/5_76.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_77.png b/TMessagesProj/src/emojis/twitter/emoji/5_77.png index 6798278102..d24641cd91 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_77.png and b/TMessagesProj/src/emojis/twitter/emoji/5_77.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_78.png b/TMessagesProj/src/emojis/twitter/emoji/5_78.png index 96a8ba8d48..effa1c0cc9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_78.png and b/TMessagesProj/src/emojis/twitter/emoji/5_78.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_79.png b/TMessagesProj/src/emojis/twitter/emoji/5_79.png index dd78c7b844..1ae655d9e8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_79.png and b/TMessagesProj/src/emojis/twitter/emoji/5_79.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_8.png b/TMessagesProj/src/emojis/twitter/emoji/5_8.png index c6179f3067..b70bfc9f9d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_8.png and b/TMessagesProj/src/emojis/twitter/emoji/5_8.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_80.png b/TMessagesProj/src/emojis/twitter/emoji/5_80.png index 06a9cc44ae..2fd6d96bb8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_80.png and b/TMessagesProj/src/emojis/twitter/emoji/5_80.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_81.png b/TMessagesProj/src/emojis/twitter/emoji/5_81.png index 8e5ce7e743..0638e572a6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_81.png and b/TMessagesProj/src/emojis/twitter/emoji/5_81.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_82.png b/TMessagesProj/src/emojis/twitter/emoji/5_82.png index 5d3b07cebe..965f2da3f1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_82.png and b/TMessagesProj/src/emojis/twitter/emoji/5_82.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_83.png b/TMessagesProj/src/emojis/twitter/emoji/5_83.png index 38aac2c2af..a6c66e1aa1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_83.png and b/TMessagesProj/src/emojis/twitter/emoji/5_83.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_84.png b/TMessagesProj/src/emojis/twitter/emoji/5_84.png index 771ad0db33..4e748b6b95 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_84.png and b/TMessagesProj/src/emojis/twitter/emoji/5_84.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_85.png b/TMessagesProj/src/emojis/twitter/emoji/5_85.png index 30d40b7c4a..fcfc34a0fa 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_85.png and b/TMessagesProj/src/emojis/twitter/emoji/5_85.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_86.png b/TMessagesProj/src/emojis/twitter/emoji/5_86.png index 6221d843fc..9fef921e3f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_86.png and b/TMessagesProj/src/emojis/twitter/emoji/5_86.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_87.png b/TMessagesProj/src/emojis/twitter/emoji/5_87.png index 4fc2fc9f51..03b284f325 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_87.png and b/TMessagesProj/src/emojis/twitter/emoji/5_87.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_88.png b/TMessagesProj/src/emojis/twitter/emoji/5_88.png index 896e1de661..7455fd3c39 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_88.png and b/TMessagesProj/src/emojis/twitter/emoji/5_88.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_89.png b/TMessagesProj/src/emojis/twitter/emoji/5_89.png index 7829b0da8a..c0d00a1bcf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_89.png and b/TMessagesProj/src/emojis/twitter/emoji/5_89.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_9.png b/TMessagesProj/src/emojis/twitter/emoji/5_9.png index 8162a18173..e91440a266 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_9.png and b/TMessagesProj/src/emojis/twitter/emoji/5_9.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_90.png b/TMessagesProj/src/emojis/twitter/emoji/5_90.png index d792833660..275949f6fb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_90.png and b/TMessagesProj/src/emojis/twitter/emoji/5_90.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_91.png b/TMessagesProj/src/emojis/twitter/emoji/5_91.png index 3e42873cca..ad31366a2c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_91.png and b/TMessagesProj/src/emojis/twitter/emoji/5_91.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_92.png b/TMessagesProj/src/emojis/twitter/emoji/5_92.png index 88a9d644eb..e56b41bfe7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_92.png and b/TMessagesProj/src/emojis/twitter/emoji/5_92.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_93.png b/TMessagesProj/src/emojis/twitter/emoji/5_93.png index 4430338ce9..de98dbffe3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_93.png and b/TMessagesProj/src/emojis/twitter/emoji/5_93.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_94.png b/TMessagesProj/src/emojis/twitter/emoji/5_94.png index 6b817f2c51..4b055c9711 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_94.png and b/TMessagesProj/src/emojis/twitter/emoji/5_94.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_95.png b/TMessagesProj/src/emojis/twitter/emoji/5_95.png index 1ef0f807fc..df8e066f82 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_95.png and b/TMessagesProj/src/emojis/twitter/emoji/5_95.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_96.png b/TMessagesProj/src/emojis/twitter/emoji/5_96.png index 540656b0aa..d09ac30a5a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_96.png and b/TMessagesProj/src/emojis/twitter/emoji/5_96.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_97.png b/TMessagesProj/src/emojis/twitter/emoji/5_97.png index 45a33c9c26..bb484ae456 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_97.png and b/TMessagesProj/src/emojis/twitter/emoji/5_97.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_98.png b/TMessagesProj/src/emojis/twitter/emoji/5_98.png index 6c7de9fab4..4f2f7983c0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_98.png and b/TMessagesProj/src/emojis/twitter/emoji/5_98.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/5_99.png b/TMessagesProj/src/emojis/twitter/emoji/5_99.png index cf6819ca0c..f348008790 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/5_99.png and b/TMessagesProj/src/emojis/twitter/emoji/5_99.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_0.png b/TMessagesProj/src/emojis/twitter/emoji/6_0.png index 07434fc566..d8dc83882b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_0.png and b/TMessagesProj/src/emojis/twitter/emoji/6_0.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_1.png b/TMessagesProj/src/emojis/twitter/emoji/6_1.png index 98197b7e64..ba8e715ed1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_1.png and b/TMessagesProj/src/emojis/twitter/emoji/6_1.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_10.png b/TMessagesProj/src/emojis/twitter/emoji/6_10.png index 7a8d089044..64f416f23a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_10.png and b/TMessagesProj/src/emojis/twitter/emoji/6_10.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_100.png b/TMessagesProj/src/emojis/twitter/emoji/6_100.png index 16cb48aba4..9b8d541313 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_100.png and b/TMessagesProj/src/emojis/twitter/emoji/6_100.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_101.png b/TMessagesProj/src/emojis/twitter/emoji/6_101.png index fbbf121419..7aa3e89617 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_101.png and b/TMessagesProj/src/emojis/twitter/emoji/6_101.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_102.png b/TMessagesProj/src/emojis/twitter/emoji/6_102.png index fb9baaca9d..1c664e4fe5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_102.png and b/TMessagesProj/src/emojis/twitter/emoji/6_102.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_103.png b/TMessagesProj/src/emojis/twitter/emoji/6_103.png index 8f91541fef..1170d826c8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_103.png and b/TMessagesProj/src/emojis/twitter/emoji/6_103.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_104.png b/TMessagesProj/src/emojis/twitter/emoji/6_104.png index d356e0e42a..629010bef9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_104.png and b/TMessagesProj/src/emojis/twitter/emoji/6_104.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_105.png b/TMessagesProj/src/emojis/twitter/emoji/6_105.png index a6c6553532..e8b9b60ed9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_105.png and b/TMessagesProj/src/emojis/twitter/emoji/6_105.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_106.png b/TMessagesProj/src/emojis/twitter/emoji/6_106.png index 7324e5afe4..4a031b96a0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_106.png and b/TMessagesProj/src/emojis/twitter/emoji/6_106.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_107.png b/TMessagesProj/src/emojis/twitter/emoji/6_107.png index 099c5040b6..203ee09330 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_107.png and b/TMessagesProj/src/emojis/twitter/emoji/6_107.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_108.png b/TMessagesProj/src/emojis/twitter/emoji/6_108.png index ffb87d6ee0..6dba9f7cf4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_108.png and b/TMessagesProj/src/emojis/twitter/emoji/6_108.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_109.png b/TMessagesProj/src/emojis/twitter/emoji/6_109.png index d6f52bf6f0..6f9ba26f11 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_109.png and b/TMessagesProj/src/emojis/twitter/emoji/6_109.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_11.png b/TMessagesProj/src/emojis/twitter/emoji/6_11.png index 47c3c09255..9e9dc5596e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_11.png and b/TMessagesProj/src/emojis/twitter/emoji/6_11.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_110.png b/TMessagesProj/src/emojis/twitter/emoji/6_110.png index 846677bb4d..a71809e254 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_110.png and b/TMessagesProj/src/emojis/twitter/emoji/6_110.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_111.png b/TMessagesProj/src/emojis/twitter/emoji/6_111.png index 32a1aa6b78..3f0dd1e1f5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_111.png and b/TMessagesProj/src/emojis/twitter/emoji/6_111.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_112.png b/TMessagesProj/src/emojis/twitter/emoji/6_112.png index 6927aae272..e1f3b7ba4e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_112.png and b/TMessagesProj/src/emojis/twitter/emoji/6_112.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_113.png b/TMessagesProj/src/emojis/twitter/emoji/6_113.png index 2b2bd17648..07e33c07cd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_113.png and b/TMessagesProj/src/emojis/twitter/emoji/6_113.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_114.png b/TMessagesProj/src/emojis/twitter/emoji/6_114.png index 7c1ad49c8a..c84766e739 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_114.png and b/TMessagesProj/src/emojis/twitter/emoji/6_114.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_115.png b/TMessagesProj/src/emojis/twitter/emoji/6_115.png index 5aab852c4c..454a0e4dfe 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_115.png and b/TMessagesProj/src/emojis/twitter/emoji/6_115.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_116.png b/TMessagesProj/src/emojis/twitter/emoji/6_116.png index 38497a5569..8c2e033a94 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_116.png and b/TMessagesProj/src/emojis/twitter/emoji/6_116.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_117.png b/TMessagesProj/src/emojis/twitter/emoji/6_117.png index 82ce85acb0..07b645ad00 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_117.png and b/TMessagesProj/src/emojis/twitter/emoji/6_117.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_118.png b/TMessagesProj/src/emojis/twitter/emoji/6_118.png index 751746d1a6..110d35471e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_118.png and b/TMessagesProj/src/emojis/twitter/emoji/6_118.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_119.png b/TMessagesProj/src/emojis/twitter/emoji/6_119.png index 2aca42a9aa..7b145b7c52 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_119.png and b/TMessagesProj/src/emojis/twitter/emoji/6_119.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_12.png b/TMessagesProj/src/emojis/twitter/emoji/6_12.png index 96b98d3d28..26ca29dc09 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_12.png and b/TMessagesProj/src/emojis/twitter/emoji/6_12.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_120.png b/TMessagesProj/src/emojis/twitter/emoji/6_120.png index f08823ee67..828e8b45cc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_120.png and b/TMessagesProj/src/emojis/twitter/emoji/6_120.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_121.png b/TMessagesProj/src/emojis/twitter/emoji/6_121.png index a154f497a3..5801510140 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_121.png and b/TMessagesProj/src/emojis/twitter/emoji/6_121.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_122.png b/TMessagesProj/src/emojis/twitter/emoji/6_122.png index c78b8cdd37..e6fa5cc06d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_122.png and b/TMessagesProj/src/emojis/twitter/emoji/6_122.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_123.png b/TMessagesProj/src/emojis/twitter/emoji/6_123.png index db65a20add..87968ac971 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_123.png and b/TMessagesProj/src/emojis/twitter/emoji/6_123.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_124.png b/TMessagesProj/src/emojis/twitter/emoji/6_124.png index 1b80efce43..d050b82f61 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_124.png and b/TMessagesProj/src/emojis/twitter/emoji/6_124.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_125.png b/TMessagesProj/src/emojis/twitter/emoji/6_125.png index 87d1e7a654..86c5bc4812 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_125.png and b/TMessagesProj/src/emojis/twitter/emoji/6_125.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_126.png b/TMessagesProj/src/emojis/twitter/emoji/6_126.png index 25aa73672d..6aaad1acf1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_126.png and b/TMessagesProj/src/emojis/twitter/emoji/6_126.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_127.png b/TMessagesProj/src/emojis/twitter/emoji/6_127.png index 3f43d99b0a..2b1451254e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_127.png and b/TMessagesProj/src/emojis/twitter/emoji/6_127.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_128.png b/TMessagesProj/src/emojis/twitter/emoji/6_128.png index 249f60c84f..eaae4ccb1b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_128.png and b/TMessagesProj/src/emojis/twitter/emoji/6_128.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_129.png b/TMessagesProj/src/emojis/twitter/emoji/6_129.png index 65a854e306..b2d2f6750f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_129.png and b/TMessagesProj/src/emojis/twitter/emoji/6_129.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_13.png b/TMessagesProj/src/emojis/twitter/emoji/6_13.png index 8f0c82e856..a4731811ce 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_13.png and b/TMessagesProj/src/emojis/twitter/emoji/6_13.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_130.png b/TMessagesProj/src/emojis/twitter/emoji/6_130.png index 8963afe7df..a25143cfbf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_130.png and b/TMessagesProj/src/emojis/twitter/emoji/6_130.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_131.png b/TMessagesProj/src/emojis/twitter/emoji/6_131.png index 43f111579d..24016ae671 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_131.png and b/TMessagesProj/src/emojis/twitter/emoji/6_131.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_132.png b/TMessagesProj/src/emojis/twitter/emoji/6_132.png index 8c19efd4b2..9a8fe188f5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_132.png and b/TMessagesProj/src/emojis/twitter/emoji/6_132.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_133.png b/TMessagesProj/src/emojis/twitter/emoji/6_133.png index 936b7920e9..d237a7db10 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_133.png and b/TMessagesProj/src/emojis/twitter/emoji/6_133.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_134.png b/TMessagesProj/src/emojis/twitter/emoji/6_134.png index e92db08fee..f660153201 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_134.png and b/TMessagesProj/src/emojis/twitter/emoji/6_134.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_135.png b/TMessagesProj/src/emojis/twitter/emoji/6_135.png index 5fa0017310..5a3750ccd5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_135.png and b/TMessagesProj/src/emojis/twitter/emoji/6_135.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_136.png b/TMessagesProj/src/emojis/twitter/emoji/6_136.png index adb30e33ec..a068b25a28 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_136.png and b/TMessagesProj/src/emojis/twitter/emoji/6_136.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_137.png b/TMessagesProj/src/emojis/twitter/emoji/6_137.png index 7a7f7e5b37..85ec51d2cd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_137.png and b/TMessagesProj/src/emojis/twitter/emoji/6_137.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_138.png b/TMessagesProj/src/emojis/twitter/emoji/6_138.png index e439e5a639..fc69a059ba 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_138.png and b/TMessagesProj/src/emojis/twitter/emoji/6_138.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_139.png b/TMessagesProj/src/emojis/twitter/emoji/6_139.png index 13f021ef1b..28bceb51b5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_139.png and b/TMessagesProj/src/emojis/twitter/emoji/6_139.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_14.png b/TMessagesProj/src/emojis/twitter/emoji/6_14.png index 58d741b131..ca6eca7d59 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_14.png and b/TMessagesProj/src/emojis/twitter/emoji/6_14.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_140.png b/TMessagesProj/src/emojis/twitter/emoji/6_140.png index 5c1d205287..0f164f455d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_140.png and b/TMessagesProj/src/emojis/twitter/emoji/6_140.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_141.png b/TMessagesProj/src/emojis/twitter/emoji/6_141.png index 1c71a71f1d..430d6244ff 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_141.png and b/TMessagesProj/src/emojis/twitter/emoji/6_141.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_142.png b/TMessagesProj/src/emojis/twitter/emoji/6_142.png index fb94a6fe29..0d160160bf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_142.png and b/TMessagesProj/src/emojis/twitter/emoji/6_142.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_143.png b/TMessagesProj/src/emojis/twitter/emoji/6_143.png index d64f29864c..0b272b2e15 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_143.png and b/TMessagesProj/src/emojis/twitter/emoji/6_143.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_144.png b/TMessagesProj/src/emojis/twitter/emoji/6_144.png index b566656f5b..8e6aa70e44 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_144.png and b/TMessagesProj/src/emojis/twitter/emoji/6_144.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_145.png b/TMessagesProj/src/emojis/twitter/emoji/6_145.png index 96744c8601..3c0de8c4ec 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_145.png and b/TMessagesProj/src/emojis/twitter/emoji/6_145.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_146.png b/TMessagesProj/src/emojis/twitter/emoji/6_146.png index 2fe2eece2d..bfe330004f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_146.png and b/TMessagesProj/src/emojis/twitter/emoji/6_146.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_147.png b/TMessagesProj/src/emojis/twitter/emoji/6_147.png index c557477353..bb6f49d4be 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_147.png and b/TMessagesProj/src/emojis/twitter/emoji/6_147.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_148.png b/TMessagesProj/src/emojis/twitter/emoji/6_148.png index a1af34f1e6..121d0fca51 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_148.png and b/TMessagesProj/src/emojis/twitter/emoji/6_148.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_149.png b/TMessagesProj/src/emojis/twitter/emoji/6_149.png index fb5cd9cb36..82bce5dc4c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_149.png and b/TMessagesProj/src/emojis/twitter/emoji/6_149.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_15.png b/TMessagesProj/src/emojis/twitter/emoji/6_15.png index 26eda58d44..83a0cfcb53 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_15.png and b/TMessagesProj/src/emojis/twitter/emoji/6_15.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_150.png b/TMessagesProj/src/emojis/twitter/emoji/6_150.png index 42f321eca8..879c5e56ff 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_150.png and b/TMessagesProj/src/emojis/twitter/emoji/6_150.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_151.png b/TMessagesProj/src/emojis/twitter/emoji/6_151.png index f8ac9630ca..b33bc6e783 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_151.png and b/TMessagesProj/src/emojis/twitter/emoji/6_151.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_152.png b/TMessagesProj/src/emojis/twitter/emoji/6_152.png index 63a9478640..1e43945670 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_152.png and b/TMessagesProj/src/emojis/twitter/emoji/6_152.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_153.png b/TMessagesProj/src/emojis/twitter/emoji/6_153.png index aab0a5e682..77d1a68bb2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_153.png and b/TMessagesProj/src/emojis/twitter/emoji/6_153.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_154.png b/TMessagesProj/src/emojis/twitter/emoji/6_154.png index 7bb98cb296..95bb1e032f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_154.png and b/TMessagesProj/src/emojis/twitter/emoji/6_154.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_155.png b/TMessagesProj/src/emojis/twitter/emoji/6_155.png index bfc664e1d3..9c94baad4f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_155.png and b/TMessagesProj/src/emojis/twitter/emoji/6_155.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_156.png b/TMessagesProj/src/emojis/twitter/emoji/6_156.png index 4805d57fbd..2fe0abc2cf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_156.png and b/TMessagesProj/src/emojis/twitter/emoji/6_156.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_157.png b/TMessagesProj/src/emojis/twitter/emoji/6_157.png index 67643d5f88..53eaf10c71 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_157.png and b/TMessagesProj/src/emojis/twitter/emoji/6_157.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_158.png b/TMessagesProj/src/emojis/twitter/emoji/6_158.png index b5ac93049b..677b0d4df1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_158.png and b/TMessagesProj/src/emojis/twitter/emoji/6_158.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_159.png b/TMessagesProj/src/emojis/twitter/emoji/6_159.png index e16bc38d89..c5f0082aff 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_159.png and b/TMessagesProj/src/emojis/twitter/emoji/6_159.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_16.png b/TMessagesProj/src/emojis/twitter/emoji/6_16.png index 0ac7ac96d7..4c024dca8e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_16.png and b/TMessagesProj/src/emojis/twitter/emoji/6_16.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_160.png b/TMessagesProj/src/emojis/twitter/emoji/6_160.png index 9d280b086e..f60c20523e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_160.png and b/TMessagesProj/src/emojis/twitter/emoji/6_160.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_161.png b/TMessagesProj/src/emojis/twitter/emoji/6_161.png index 3db47d1cdf..e802c504e1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_161.png and b/TMessagesProj/src/emojis/twitter/emoji/6_161.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_162.png b/TMessagesProj/src/emojis/twitter/emoji/6_162.png index aa631790d2..88dbf6d734 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_162.png and b/TMessagesProj/src/emojis/twitter/emoji/6_162.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_163.png b/TMessagesProj/src/emojis/twitter/emoji/6_163.png index ec06b2f707..b6ef0d533b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_163.png and b/TMessagesProj/src/emojis/twitter/emoji/6_163.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_164.png b/TMessagesProj/src/emojis/twitter/emoji/6_164.png index 593750c107..48d8ae9cc4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_164.png and b/TMessagesProj/src/emojis/twitter/emoji/6_164.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_165.png b/TMessagesProj/src/emojis/twitter/emoji/6_165.png index 3f2590d82b..13b69f56af 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_165.png and b/TMessagesProj/src/emojis/twitter/emoji/6_165.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_166.png b/TMessagesProj/src/emojis/twitter/emoji/6_166.png index 56f80654ec..5f718f0dbe 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_166.png and b/TMessagesProj/src/emojis/twitter/emoji/6_166.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_167.png b/TMessagesProj/src/emojis/twitter/emoji/6_167.png index db5e58acf8..72eb3f1169 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_167.png and b/TMessagesProj/src/emojis/twitter/emoji/6_167.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_168.png b/TMessagesProj/src/emojis/twitter/emoji/6_168.png index f64b768bd2..1191451f7e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_168.png and b/TMessagesProj/src/emojis/twitter/emoji/6_168.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_169.png b/TMessagesProj/src/emojis/twitter/emoji/6_169.png index cb4721b4b3..f4ad0ac525 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_169.png and b/TMessagesProj/src/emojis/twitter/emoji/6_169.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_17.png b/TMessagesProj/src/emojis/twitter/emoji/6_17.png index 51fd0cfc32..cc83985d16 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_17.png and b/TMessagesProj/src/emojis/twitter/emoji/6_17.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_170.png b/TMessagesProj/src/emojis/twitter/emoji/6_170.png index 4705ad6f75..f3d4d202a7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_170.png and b/TMessagesProj/src/emojis/twitter/emoji/6_170.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_171.png b/TMessagesProj/src/emojis/twitter/emoji/6_171.png index d4cf41be18..6d9b7e14fd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_171.png and b/TMessagesProj/src/emojis/twitter/emoji/6_171.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_172.png b/TMessagesProj/src/emojis/twitter/emoji/6_172.png index d873b78989..7d7caa3993 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_172.png and b/TMessagesProj/src/emojis/twitter/emoji/6_172.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_173.png b/TMessagesProj/src/emojis/twitter/emoji/6_173.png index e14bd90afc..b325e8acb9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_173.png and b/TMessagesProj/src/emojis/twitter/emoji/6_173.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_174.png b/TMessagesProj/src/emojis/twitter/emoji/6_174.png index 5d64e2188d..fc7a4ee699 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_174.png and b/TMessagesProj/src/emojis/twitter/emoji/6_174.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_175.png b/TMessagesProj/src/emojis/twitter/emoji/6_175.png index b26e44d410..14c31ccfb6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_175.png and b/TMessagesProj/src/emojis/twitter/emoji/6_175.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_176.png b/TMessagesProj/src/emojis/twitter/emoji/6_176.png index a270b4d1e7..43055abac5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_176.png and b/TMessagesProj/src/emojis/twitter/emoji/6_176.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_177.png b/TMessagesProj/src/emojis/twitter/emoji/6_177.png index 04f4046c3b..e9922f5af8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_177.png and b/TMessagesProj/src/emojis/twitter/emoji/6_177.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_178.png b/TMessagesProj/src/emojis/twitter/emoji/6_178.png index 18f0e99c46..3b74b725b4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_178.png and b/TMessagesProj/src/emojis/twitter/emoji/6_178.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_179.png b/TMessagesProj/src/emojis/twitter/emoji/6_179.png index b0a5ce05d1..0a6fe278df 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_179.png and b/TMessagesProj/src/emojis/twitter/emoji/6_179.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_18.png b/TMessagesProj/src/emojis/twitter/emoji/6_18.png index 1a3d785018..1594d928ed 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_18.png and b/TMessagesProj/src/emojis/twitter/emoji/6_18.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_180.png b/TMessagesProj/src/emojis/twitter/emoji/6_180.png index 9fec8599a7..b1199ba47d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_180.png and b/TMessagesProj/src/emojis/twitter/emoji/6_180.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_181.png b/TMessagesProj/src/emojis/twitter/emoji/6_181.png index 1f3216f2b7..e0dba7b23f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_181.png and b/TMessagesProj/src/emojis/twitter/emoji/6_181.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_182.png b/TMessagesProj/src/emojis/twitter/emoji/6_182.png index de048adf3d..98057b12f8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_182.png and b/TMessagesProj/src/emojis/twitter/emoji/6_182.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_183.png b/TMessagesProj/src/emojis/twitter/emoji/6_183.png index f317f668d3..f907c4cccd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_183.png and b/TMessagesProj/src/emojis/twitter/emoji/6_183.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_184.png b/TMessagesProj/src/emojis/twitter/emoji/6_184.png index 0374fdf596..43d2088e30 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_184.png and b/TMessagesProj/src/emojis/twitter/emoji/6_184.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_185.png b/TMessagesProj/src/emojis/twitter/emoji/6_185.png index 45a0d78f00..df97e96591 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_185.png and b/TMessagesProj/src/emojis/twitter/emoji/6_185.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_186.png b/TMessagesProj/src/emojis/twitter/emoji/6_186.png index 4d23e9b120..472e474f91 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_186.png and b/TMessagesProj/src/emojis/twitter/emoji/6_186.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_187.png b/TMessagesProj/src/emojis/twitter/emoji/6_187.png index d76d2751c7..3a0a82f373 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_187.png and b/TMessagesProj/src/emojis/twitter/emoji/6_187.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_188.png b/TMessagesProj/src/emojis/twitter/emoji/6_188.png index 0e94c36f07..2deecf0711 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_188.png and b/TMessagesProj/src/emojis/twitter/emoji/6_188.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_189.png b/TMessagesProj/src/emojis/twitter/emoji/6_189.png index 5361e3773c..b38f0a03fb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_189.png and b/TMessagesProj/src/emojis/twitter/emoji/6_189.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_19.png b/TMessagesProj/src/emojis/twitter/emoji/6_19.png index 227ac73ea1..1b7c31ad96 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_19.png and b/TMessagesProj/src/emojis/twitter/emoji/6_19.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_190.png b/TMessagesProj/src/emojis/twitter/emoji/6_190.png index ee653949ad..93b695cfcc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_190.png and b/TMessagesProj/src/emojis/twitter/emoji/6_190.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_191.png b/TMessagesProj/src/emojis/twitter/emoji/6_191.png index 150cd629fe..afee5830ab 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_191.png and b/TMessagesProj/src/emojis/twitter/emoji/6_191.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_192.png b/TMessagesProj/src/emojis/twitter/emoji/6_192.png index b315805386..2724f463ac 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_192.png and b/TMessagesProj/src/emojis/twitter/emoji/6_192.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_193.png b/TMessagesProj/src/emojis/twitter/emoji/6_193.png index b3b66b2518..6b0ee755db 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_193.png and b/TMessagesProj/src/emojis/twitter/emoji/6_193.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_194.png b/TMessagesProj/src/emojis/twitter/emoji/6_194.png index c9cba28cec..c89df2f924 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_194.png and b/TMessagesProj/src/emojis/twitter/emoji/6_194.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_195.png b/TMessagesProj/src/emojis/twitter/emoji/6_195.png index b5be1fb60f..9ab7c9c907 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_195.png and b/TMessagesProj/src/emojis/twitter/emoji/6_195.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_196.png b/TMessagesProj/src/emojis/twitter/emoji/6_196.png index fcf338349e..44c4411252 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_196.png and b/TMessagesProj/src/emojis/twitter/emoji/6_196.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_197.png b/TMessagesProj/src/emojis/twitter/emoji/6_197.png index 0bb0db2820..b613dacb79 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_197.png and b/TMessagesProj/src/emojis/twitter/emoji/6_197.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_198.png b/TMessagesProj/src/emojis/twitter/emoji/6_198.png index 7382543570..c55191bca0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_198.png and b/TMessagesProj/src/emojis/twitter/emoji/6_198.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_199.png b/TMessagesProj/src/emojis/twitter/emoji/6_199.png index f936670b04..e04284343c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_199.png and b/TMessagesProj/src/emojis/twitter/emoji/6_199.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_2.png b/TMessagesProj/src/emojis/twitter/emoji/6_2.png index 7ce2e124a7..c345bc3515 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_2.png and b/TMessagesProj/src/emojis/twitter/emoji/6_2.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_20.png b/TMessagesProj/src/emojis/twitter/emoji/6_20.png index 27cd4a522f..7ec1099987 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_20.png and b/TMessagesProj/src/emojis/twitter/emoji/6_20.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_200.png b/TMessagesProj/src/emojis/twitter/emoji/6_200.png index c5bf155220..206037de69 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_200.png and b/TMessagesProj/src/emojis/twitter/emoji/6_200.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_201.png b/TMessagesProj/src/emojis/twitter/emoji/6_201.png index ef181399b8..bdfd879fde 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_201.png and b/TMessagesProj/src/emojis/twitter/emoji/6_201.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_202.png b/TMessagesProj/src/emojis/twitter/emoji/6_202.png index 4b4014769a..b008856c1d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_202.png and b/TMessagesProj/src/emojis/twitter/emoji/6_202.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_203.png b/TMessagesProj/src/emojis/twitter/emoji/6_203.png index b4e0dd1ebb..0d3cba815f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_203.png and b/TMessagesProj/src/emojis/twitter/emoji/6_203.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_204.png b/TMessagesProj/src/emojis/twitter/emoji/6_204.png index 359129a3e9..e5e0961f5d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_204.png and b/TMessagesProj/src/emojis/twitter/emoji/6_204.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_205.png b/TMessagesProj/src/emojis/twitter/emoji/6_205.png index f0b525c82d..f05ae71429 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_205.png and b/TMessagesProj/src/emojis/twitter/emoji/6_205.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_206.png b/TMessagesProj/src/emojis/twitter/emoji/6_206.png index c312dad670..1539c1e262 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_206.png and b/TMessagesProj/src/emojis/twitter/emoji/6_206.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_207.png b/TMessagesProj/src/emojis/twitter/emoji/6_207.png index 9cd3cfbfe0..0469068292 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_207.png and b/TMessagesProj/src/emojis/twitter/emoji/6_207.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_208.png b/TMessagesProj/src/emojis/twitter/emoji/6_208.png index eb0d8bad1d..576232dd3a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_208.png and b/TMessagesProj/src/emojis/twitter/emoji/6_208.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_209.png b/TMessagesProj/src/emojis/twitter/emoji/6_209.png index 274fe55c09..bcd4975211 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_209.png and b/TMessagesProj/src/emojis/twitter/emoji/6_209.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_21.png b/TMessagesProj/src/emojis/twitter/emoji/6_21.png index 2c02f0add7..083c0f07bd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_21.png and b/TMessagesProj/src/emojis/twitter/emoji/6_21.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_210.png b/TMessagesProj/src/emojis/twitter/emoji/6_210.png index 4a31a91bb0..d9985213f1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_210.png and b/TMessagesProj/src/emojis/twitter/emoji/6_210.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_211.png b/TMessagesProj/src/emojis/twitter/emoji/6_211.png index e6a8691767..8ff331a256 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_211.png and b/TMessagesProj/src/emojis/twitter/emoji/6_211.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_212.png b/TMessagesProj/src/emojis/twitter/emoji/6_212.png index 4db56c59b4..6648c510ee 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_212.png and b/TMessagesProj/src/emojis/twitter/emoji/6_212.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_213.png b/TMessagesProj/src/emojis/twitter/emoji/6_213.png index b1ba9d4ef1..7a4d3f7497 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_213.png and b/TMessagesProj/src/emojis/twitter/emoji/6_213.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_214.png b/TMessagesProj/src/emojis/twitter/emoji/6_214.png index dcaee78659..f57d32bfb9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_214.png and b/TMessagesProj/src/emojis/twitter/emoji/6_214.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_215.png b/TMessagesProj/src/emojis/twitter/emoji/6_215.png index 747f43b852..efceb02a55 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_215.png and b/TMessagesProj/src/emojis/twitter/emoji/6_215.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_216.png b/TMessagesProj/src/emojis/twitter/emoji/6_216.png index 4f66071975..906e51d366 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_216.png and b/TMessagesProj/src/emojis/twitter/emoji/6_216.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_217.png b/TMessagesProj/src/emojis/twitter/emoji/6_217.png index 9515854f36..bab4d7708c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_217.png and b/TMessagesProj/src/emojis/twitter/emoji/6_217.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_218.png b/TMessagesProj/src/emojis/twitter/emoji/6_218.png index a627c11f15..54b40c9852 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_218.png and b/TMessagesProj/src/emojis/twitter/emoji/6_218.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_219.png b/TMessagesProj/src/emojis/twitter/emoji/6_219.png index b8d201915e..8d4468e2a5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_219.png and b/TMessagesProj/src/emojis/twitter/emoji/6_219.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_22.png b/TMessagesProj/src/emojis/twitter/emoji/6_22.png index 4f70e99169..3089d8c744 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_22.png and b/TMessagesProj/src/emojis/twitter/emoji/6_22.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_220.png b/TMessagesProj/src/emojis/twitter/emoji/6_220.png index 887683ca8c..2ee874e4d9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_220.png and b/TMessagesProj/src/emojis/twitter/emoji/6_220.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_221.png b/TMessagesProj/src/emojis/twitter/emoji/6_221.png index a76d311641..cb83822c9d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_221.png and b/TMessagesProj/src/emojis/twitter/emoji/6_221.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_222.png b/TMessagesProj/src/emojis/twitter/emoji/6_222.png index 4ac095ee82..994116f9fd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_222.png and b/TMessagesProj/src/emojis/twitter/emoji/6_222.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_223.png b/TMessagesProj/src/emojis/twitter/emoji/6_223.png index 9ba9439791..1488c5834c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_223.png and b/TMessagesProj/src/emojis/twitter/emoji/6_223.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_224.png b/TMessagesProj/src/emojis/twitter/emoji/6_224.png index f2a7eb9779..58441168a9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_224.png and b/TMessagesProj/src/emojis/twitter/emoji/6_224.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_225.png b/TMessagesProj/src/emojis/twitter/emoji/6_225.png index 5d1d8e5229..d0fb618ecc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_225.png and b/TMessagesProj/src/emojis/twitter/emoji/6_225.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_226.png b/TMessagesProj/src/emojis/twitter/emoji/6_226.png index 46d9589904..f5b32379e4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_226.png and b/TMessagesProj/src/emojis/twitter/emoji/6_226.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_227.png b/TMessagesProj/src/emojis/twitter/emoji/6_227.png index 4ebefaca98..8eb11df79b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_227.png and b/TMessagesProj/src/emojis/twitter/emoji/6_227.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_228.png b/TMessagesProj/src/emojis/twitter/emoji/6_228.png index 565d16230f..0e5881c21d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_228.png and b/TMessagesProj/src/emojis/twitter/emoji/6_228.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_229.png b/TMessagesProj/src/emojis/twitter/emoji/6_229.png index 23192434d7..0ea3c5ee05 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_229.png and b/TMessagesProj/src/emojis/twitter/emoji/6_229.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_23.png b/TMessagesProj/src/emojis/twitter/emoji/6_23.png index 97cb5f7564..06f9cbfc64 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_23.png and b/TMessagesProj/src/emojis/twitter/emoji/6_23.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_230.png b/TMessagesProj/src/emojis/twitter/emoji/6_230.png index 7edd76d34c..a22b2757ab 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_230.png and b/TMessagesProj/src/emojis/twitter/emoji/6_230.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_231.png b/TMessagesProj/src/emojis/twitter/emoji/6_231.png index 9168c81fe2..9f9c95ba5c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_231.png and b/TMessagesProj/src/emojis/twitter/emoji/6_231.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_232.png b/TMessagesProj/src/emojis/twitter/emoji/6_232.png index 543b9f8d80..0a00af72e6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_232.png and b/TMessagesProj/src/emojis/twitter/emoji/6_232.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_233.png b/TMessagesProj/src/emojis/twitter/emoji/6_233.png index 0cf0b657de..96655e33cd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_233.png and b/TMessagesProj/src/emojis/twitter/emoji/6_233.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_234.png b/TMessagesProj/src/emojis/twitter/emoji/6_234.png index 45812c190e..99f815ddb5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_234.png and b/TMessagesProj/src/emojis/twitter/emoji/6_234.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_235.png b/TMessagesProj/src/emojis/twitter/emoji/6_235.png index 8394b443af..60b272c1c4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_235.png and b/TMessagesProj/src/emojis/twitter/emoji/6_235.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_236.png b/TMessagesProj/src/emojis/twitter/emoji/6_236.png index 0b1260738b..666eb8b366 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_236.png and b/TMessagesProj/src/emojis/twitter/emoji/6_236.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_237.png b/TMessagesProj/src/emojis/twitter/emoji/6_237.png index c7a7f572a0..c231c739f3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_237.png and b/TMessagesProj/src/emojis/twitter/emoji/6_237.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_238.png b/TMessagesProj/src/emojis/twitter/emoji/6_238.png index 08f9df6037..719005af38 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_238.png and b/TMessagesProj/src/emojis/twitter/emoji/6_238.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_239.png b/TMessagesProj/src/emojis/twitter/emoji/6_239.png index e2bad3f1f7..a5fac885e3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_239.png and b/TMessagesProj/src/emojis/twitter/emoji/6_239.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_24.png b/TMessagesProj/src/emojis/twitter/emoji/6_24.png index bb5640b0a3..37cb1c1f27 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_24.png and b/TMessagesProj/src/emojis/twitter/emoji/6_24.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_240.png b/TMessagesProj/src/emojis/twitter/emoji/6_240.png index 6c43962d14..5304666f2e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_240.png and b/TMessagesProj/src/emojis/twitter/emoji/6_240.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_241.png b/TMessagesProj/src/emojis/twitter/emoji/6_241.png index 67f77ce4c2..4778925527 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_241.png and b/TMessagesProj/src/emojis/twitter/emoji/6_241.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_242.png b/TMessagesProj/src/emojis/twitter/emoji/6_242.png index 21111687f4..19b31415f7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_242.png and b/TMessagesProj/src/emojis/twitter/emoji/6_242.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_243.png b/TMessagesProj/src/emojis/twitter/emoji/6_243.png index 9889dfdb64..2392486e13 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_243.png and b/TMessagesProj/src/emojis/twitter/emoji/6_243.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_244.png b/TMessagesProj/src/emojis/twitter/emoji/6_244.png index f5ba30c93c..828b37fe09 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_244.png and b/TMessagesProj/src/emojis/twitter/emoji/6_244.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_245.png b/TMessagesProj/src/emojis/twitter/emoji/6_245.png index cc94981c86..eae3ae6b8e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_245.png and b/TMessagesProj/src/emojis/twitter/emoji/6_245.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_246.png b/TMessagesProj/src/emojis/twitter/emoji/6_246.png index 5122003d9a..9296d7ae1e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_246.png and b/TMessagesProj/src/emojis/twitter/emoji/6_246.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_247.png b/TMessagesProj/src/emojis/twitter/emoji/6_247.png index e50373498f..a9a9526a3f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_247.png and b/TMessagesProj/src/emojis/twitter/emoji/6_247.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_248.png b/TMessagesProj/src/emojis/twitter/emoji/6_248.png index 24d083ec84..e6340745a7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_248.png and b/TMessagesProj/src/emojis/twitter/emoji/6_248.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_249.png b/TMessagesProj/src/emojis/twitter/emoji/6_249.png index 82bc92402d..73a8677832 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_249.png and b/TMessagesProj/src/emojis/twitter/emoji/6_249.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_25.png b/TMessagesProj/src/emojis/twitter/emoji/6_25.png index ab156e4a16..8cf93751ea 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_25.png and b/TMessagesProj/src/emojis/twitter/emoji/6_25.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_250.png b/TMessagesProj/src/emojis/twitter/emoji/6_250.png index 36b3c242bb..e81b51390c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_250.png and b/TMessagesProj/src/emojis/twitter/emoji/6_250.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_251.png b/TMessagesProj/src/emojis/twitter/emoji/6_251.png index 9ba31fb027..69c6fc7f78 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_251.png and b/TMessagesProj/src/emojis/twitter/emoji/6_251.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_252.png b/TMessagesProj/src/emojis/twitter/emoji/6_252.png index 3bf8229046..0a3418cfd6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_252.png and b/TMessagesProj/src/emojis/twitter/emoji/6_252.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_253.png b/TMessagesProj/src/emojis/twitter/emoji/6_253.png index 3c9e8a7adf..fbcd33eb3e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_253.png and b/TMessagesProj/src/emojis/twitter/emoji/6_253.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_254.png b/TMessagesProj/src/emojis/twitter/emoji/6_254.png index dde1ff7fa2..091f47672d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_254.png and b/TMessagesProj/src/emojis/twitter/emoji/6_254.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_255.png b/TMessagesProj/src/emojis/twitter/emoji/6_255.png index 097e2ccaf4..623d978412 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_255.png and b/TMessagesProj/src/emojis/twitter/emoji/6_255.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_256.png b/TMessagesProj/src/emojis/twitter/emoji/6_256.png index 516ad1a00e..f24dbd2019 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_256.png and b/TMessagesProj/src/emojis/twitter/emoji/6_256.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_257.png b/TMessagesProj/src/emojis/twitter/emoji/6_257.png index 16c1522dc1..358e466e7c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_257.png and b/TMessagesProj/src/emojis/twitter/emoji/6_257.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_258.png b/TMessagesProj/src/emojis/twitter/emoji/6_258.png index cf88061a60..84fd5ff302 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_258.png and b/TMessagesProj/src/emojis/twitter/emoji/6_258.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_259.png b/TMessagesProj/src/emojis/twitter/emoji/6_259.png index d847db23f0..cc2ff81850 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_259.png and b/TMessagesProj/src/emojis/twitter/emoji/6_259.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_26.png b/TMessagesProj/src/emojis/twitter/emoji/6_26.png index 93ff319655..211306a8b1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_26.png and b/TMessagesProj/src/emojis/twitter/emoji/6_26.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_260.png b/TMessagesProj/src/emojis/twitter/emoji/6_260.png index 002c0bf7aa..a8a0d94b8e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_260.png and b/TMessagesProj/src/emojis/twitter/emoji/6_260.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_261.png b/TMessagesProj/src/emojis/twitter/emoji/6_261.png index bcf4b30cdd..f1c01da9e6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_261.png and b/TMessagesProj/src/emojis/twitter/emoji/6_261.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_262.png b/TMessagesProj/src/emojis/twitter/emoji/6_262.png index 40826fcf1c..a8c7a88e99 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_262.png and b/TMessagesProj/src/emojis/twitter/emoji/6_262.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_263.png b/TMessagesProj/src/emojis/twitter/emoji/6_263.png index 279dbc324f..b6df5aa703 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_263.png and b/TMessagesProj/src/emojis/twitter/emoji/6_263.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_264.png b/TMessagesProj/src/emojis/twitter/emoji/6_264.png index 26f7020104..0f813e5a2b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_264.png and b/TMessagesProj/src/emojis/twitter/emoji/6_264.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_265.png b/TMessagesProj/src/emojis/twitter/emoji/6_265.png index 1427454743..211b8d5fc6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_265.png and b/TMessagesProj/src/emojis/twitter/emoji/6_265.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_266.png b/TMessagesProj/src/emojis/twitter/emoji/6_266.png index fc60f2c1f2..3c51349f28 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_266.png and b/TMessagesProj/src/emojis/twitter/emoji/6_266.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_267.png b/TMessagesProj/src/emojis/twitter/emoji/6_267.png index b9e0647920..25319844c2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_267.png and b/TMessagesProj/src/emojis/twitter/emoji/6_267.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_268.png b/TMessagesProj/src/emojis/twitter/emoji/6_268.png index d3240f962d..864bd9e5fb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_268.png and b/TMessagesProj/src/emojis/twitter/emoji/6_268.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_269.png b/TMessagesProj/src/emojis/twitter/emoji/6_269.png index b176bcde1a..887d7aac16 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_269.png and b/TMessagesProj/src/emojis/twitter/emoji/6_269.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_27.png b/TMessagesProj/src/emojis/twitter/emoji/6_27.png index 3c5d45d1e7..0de85ca8bd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_27.png and b/TMessagesProj/src/emojis/twitter/emoji/6_27.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_270.png b/TMessagesProj/src/emojis/twitter/emoji/6_270.png index e59ab80489..3294055758 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_270.png and b/TMessagesProj/src/emojis/twitter/emoji/6_270.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_271.png b/TMessagesProj/src/emojis/twitter/emoji/6_271.png index f7549e063e..95ea1e5219 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_271.png and b/TMessagesProj/src/emojis/twitter/emoji/6_271.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_272.png b/TMessagesProj/src/emojis/twitter/emoji/6_272.png index 1b1d7209ae..e6c087f16a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_272.png and b/TMessagesProj/src/emojis/twitter/emoji/6_272.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_273.png b/TMessagesProj/src/emojis/twitter/emoji/6_273.png index 78bbee3ba0..3e913167d3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_273.png and b/TMessagesProj/src/emojis/twitter/emoji/6_273.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_274.png b/TMessagesProj/src/emojis/twitter/emoji/6_274.png index 374197551f..252323e97d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_274.png and b/TMessagesProj/src/emojis/twitter/emoji/6_274.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_275.png b/TMessagesProj/src/emojis/twitter/emoji/6_275.png index 15ce85f733..877597bb73 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_275.png and b/TMessagesProj/src/emojis/twitter/emoji/6_275.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_276.png b/TMessagesProj/src/emojis/twitter/emoji/6_276.png index fd200c36aa..0c862b037c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_276.png and b/TMessagesProj/src/emojis/twitter/emoji/6_276.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_277.png b/TMessagesProj/src/emojis/twitter/emoji/6_277.png index 28679acca6..0d9fe4d8da 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_277.png and b/TMessagesProj/src/emojis/twitter/emoji/6_277.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_278.png b/TMessagesProj/src/emojis/twitter/emoji/6_278.png index 7241177f94..3036e9ee8e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_278.png and b/TMessagesProj/src/emojis/twitter/emoji/6_278.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_279.png b/TMessagesProj/src/emojis/twitter/emoji/6_279.png index a987999767..cce9520e39 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_279.png and b/TMessagesProj/src/emojis/twitter/emoji/6_279.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_28.png b/TMessagesProj/src/emojis/twitter/emoji/6_28.png index 786a293a52..19c93e79e2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_28.png and b/TMessagesProj/src/emojis/twitter/emoji/6_28.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_280.png b/TMessagesProj/src/emojis/twitter/emoji/6_280.png index 8330e8c36c..1843888985 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_280.png and b/TMessagesProj/src/emojis/twitter/emoji/6_280.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_281.png b/TMessagesProj/src/emojis/twitter/emoji/6_281.png index 3aa389f094..3fa452c04c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_281.png and b/TMessagesProj/src/emojis/twitter/emoji/6_281.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_282.png b/TMessagesProj/src/emojis/twitter/emoji/6_282.png index 455833e94c..c89c692636 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_282.png and b/TMessagesProj/src/emojis/twitter/emoji/6_282.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_283.png b/TMessagesProj/src/emojis/twitter/emoji/6_283.png index deca500191..7581659f31 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_283.png and b/TMessagesProj/src/emojis/twitter/emoji/6_283.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_284.png b/TMessagesProj/src/emojis/twitter/emoji/6_284.png index 3d33005439..b397a67445 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_284.png and b/TMessagesProj/src/emojis/twitter/emoji/6_284.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_285.png b/TMessagesProj/src/emojis/twitter/emoji/6_285.png index d89a79a8ed..5a1fc3ecc7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_285.png and b/TMessagesProj/src/emojis/twitter/emoji/6_285.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_286.png b/TMessagesProj/src/emojis/twitter/emoji/6_286.png index 074f501aa8..607d95cc6d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_286.png and b/TMessagesProj/src/emojis/twitter/emoji/6_286.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_287.png b/TMessagesProj/src/emojis/twitter/emoji/6_287.png index 2ae92797e3..72ae11554a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_287.png and b/TMessagesProj/src/emojis/twitter/emoji/6_287.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_288.png b/TMessagesProj/src/emojis/twitter/emoji/6_288.png index b7117ba5de..4c35fab013 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_288.png and b/TMessagesProj/src/emojis/twitter/emoji/6_288.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_289.png b/TMessagesProj/src/emojis/twitter/emoji/6_289.png index 9f97c3b615..0c3a77833f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_289.png and b/TMessagesProj/src/emojis/twitter/emoji/6_289.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_29.png b/TMessagesProj/src/emojis/twitter/emoji/6_29.png index 7ba5fdd6b0..7a58129f0a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_29.png and b/TMessagesProj/src/emojis/twitter/emoji/6_29.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_290.png b/TMessagesProj/src/emojis/twitter/emoji/6_290.png index 24156f69bb..5f44f43e56 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_290.png and b/TMessagesProj/src/emojis/twitter/emoji/6_290.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_291.png b/TMessagesProj/src/emojis/twitter/emoji/6_291.png index 3a3bdb1d21..a04e4a7411 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_291.png and b/TMessagesProj/src/emojis/twitter/emoji/6_291.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_292.png b/TMessagesProj/src/emojis/twitter/emoji/6_292.png index 0502d27570..cee77e0dc1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_292.png and b/TMessagesProj/src/emojis/twitter/emoji/6_292.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_293.png b/TMessagesProj/src/emojis/twitter/emoji/6_293.png new file mode 100644 index 0000000000..6f6277e5e7 Binary files /dev/null and b/TMessagesProj/src/emojis/twitter/emoji/6_293.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_294.png b/TMessagesProj/src/emojis/twitter/emoji/6_294.png new file mode 100644 index 0000000000..787a076c20 Binary files /dev/null and b/TMessagesProj/src/emojis/twitter/emoji/6_294.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_295.png b/TMessagesProj/src/emojis/twitter/emoji/6_295.png new file mode 100644 index 0000000000..d36eba9148 Binary files /dev/null and b/TMessagesProj/src/emojis/twitter/emoji/6_295.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_296.png b/TMessagesProj/src/emojis/twitter/emoji/6_296.png new file mode 100644 index 0000000000..ba4f7aae8b Binary files /dev/null and b/TMessagesProj/src/emojis/twitter/emoji/6_296.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_297.png b/TMessagesProj/src/emojis/twitter/emoji/6_297.png new file mode 100644 index 0000000000..4df93bbeda Binary files /dev/null and b/TMessagesProj/src/emojis/twitter/emoji/6_297.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_3.png b/TMessagesProj/src/emojis/twitter/emoji/6_3.png index be85ddbb7c..9b10d1a71f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_3.png and b/TMessagesProj/src/emojis/twitter/emoji/6_3.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_30.png b/TMessagesProj/src/emojis/twitter/emoji/6_30.png index 13592a00ba..a47af2f047 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_30.png and b/TMessagesProj/src/emojis/twitter/emoji/6_30.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_31.png b/TMessagesProj/src/emojis/twitter/emoji/6_31.png index 36f7ad17f4..03d138e793 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_31.png and b/TMessagesProj/src/emojis/twitter/emoji/6_31.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_32.png b/TMessagesProj/src/emojis/twitter/emoji/6_32.png index 94c739bbc8..1d0d7c36c3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_32.png and b/TMessagesProj/src/emojis/twitter/emoji/6_32.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_33.png b/TMessagesProj/src/emojis/twitter/emoji/6_33.png index 10d508996b..770280a4e9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_33.png and b/TMessagesProj/src/emojis/twitter/emoji/6_33.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_34.png b/TMessagesProj/src/emojis/twitter/emoji/6_34.png index f079567805..60bad2352d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_34.png and b/TMessagesProj/src/emojis/twitter/emoji/6_34.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_35.png b/TMessagesProj/src/emojis/twitter/emoji/6_35.png index 3807e59223..97b909ca5e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_35.png and b/TMessagesProj/src/emojis/twitter/emoji/6_35.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_36.png b/TMessagesProj/src/emojis/twitter/emoji/6_36.png index 857fa3f064..8b6c88584c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_36.png and b/TMessagesProj/src/emojis/twitter/emoji/6_36.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_37.png b/TMessagesProj/src/emojis/twitter/emoji/6_37.png index 6de27149f7..0ba0706679 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_37.png and b/TMessagesProj/src/emojis/twitter/emoji/6_37.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_38.png b/TMessagesProj/src/emojis/twitter/emoji/6_38.png index 188abd8704..e7b175986f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_38.png and b/TMessagesProj/src/emojis/twitter/emoji/6_38.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_39.png b/TMessagesProj/src/emojis/twitter/emoji/6_39.png index 9e8970698c..95cf1431f2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_39.png and b/TMessagesProj/src/emojis/twitter/emoji/6_39.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_4.png b/TMessagesProj/src/emojis/twitter/emoji/6_4.png index 3a468d859d..1ba8fe14c5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_4.png and b/TMessagesProj/src/emojis/twitter/emoji/6_4.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_40.png b/TMessagesProj/src/emojis/twitter/emoji/6_40.png index 59faf95ef6..b3e4102e7f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_40.png and b/TMessagesProj/src/emojis/twitter/emoji/6_40.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_41.png b/TMessagesProj/src/emojis/twitter/emoji/6_41.png index 24e066054f..223750a984 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_41.png and b/TMessagesProj/src/emojis/twitter/emoji/6_41.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_42.png b/TMessagesProj/src/emojis/twitter/emoji/6_42.png index 4173a31f6d..179238ef93 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_42.png and b/TMessagesProj/src/emojis/twitter/emoji/6_42.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_43.png b/TMessagesProj/src/emojis/twitter/emoji/6_43.png index 45f1f08693..cd0d577c61 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_43.png and b/TMessagesProj/src/emojis/twitter/emoji/6_43.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_44.png b/TMessagesProj/src/emojis/twitter/emoji/6_44.png index 74f20e33f0..ded4663e55 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_44.png and b/TMessagesProj/src/emojis/twitter/emoji/6_44.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_45.png b/TMessagesProj/src/emojis/twitter/emoji/6_45.png index ce18652d25..3874e72190 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_45.png and b/TMessagesProj/src/emojis/twitter/emoji/6_45.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_46.png b/TMessagesProj/src/emojis/twitter/emoji/6_46.png index 755aba26e2..9b974af459 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_46.png and b/TMessagesProj/src/emojis/twitter/emoji/6_46.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_47.png b/TMessagesProj/src/emojis/twitter/emoji/6_47.png index 1091406ad9..328e705f1e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_47.png and b/TMessagesProj/src/emojis/twitter/emoji/6_47.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_48.png b/TMessagesProj/src/emojis/twitter/emoji/6_48.png index bc6f8afc93..60258b5e35 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_48.png and b/TMessagesProj/src/emojis/twitter/emoji/6_48.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_49.png b/TMessagesProj/src/emojis/twitter/emoji/6_49.png index 808e2b7597..4bc918e4b6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_49.png and b/TMessagesProj/src/emojis/twitter/emoji/6_49.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_5.png b/TMessagesProj/src/emojis/twitter/emoji/6_5.png index 408332d0be..7d36f18a20 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_5.png and b/TMessagesProj/src/emojis/twitter/emoji/6_5.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_50.png b/TMessagesProj/src/emojis/twitter/emoji/6_50.png index ea8db6308f..24557981e1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_50.png and b/TMessagesProj/src/emojis/twitter/emoji/6_50.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_51.png b/TMessagesProj/src/emojis/twitter/emoji/6_51.png index 7188d0236d..b4f3806818 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_51.png and b/TMessagesProj/src/emojis/twitter/emoji/6_51.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_52.png b/TMessagesProj/src/emojis/twitter/emoji/6_52.png index 45a65b8cc3..1f10ee1771 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_52.png and b/TMessagesProj/src/emojis/twitter/emoji/6_52.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_53.png b/TMessagesProj/src/emojis/twitter/emoji/6_53.png index 22e0f3ca71..46daf76ba0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_53.png and b/TMessagesProj/src/emojis/twitter/emoji/6_53.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_54.png b/TMessagesProj/src/emojis/twitter/emoji/6_54.png index d5a6a53540..52fe5c1865 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_54.png and b/TMessagesProj/src/emojis/twitter/emoji/6_54.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_55.png b/TMessagesProj/src/emojis/twitter/emoji/6_55.png index 8b6aecdc6c..e410e06ad6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_55.png and b/TMessagesProj/src/emojis/twitter/emoji/6_55.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_56.png b/TMessagesProj/src/emojis/twitter/emoji/6_56.png index f5daf22264..66baaa37a5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_56.png and b/TMessagesProj/src/emojis/twitter/emoji/6_56.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_57.png b/TMessagesProj/src/emojis/twitter/emoji/6_57.png index aee497099d..6090e9d5be 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_57.png and b/TMessagesProj/src/emojis/twitter/emoji/6_57.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_58.png b/TMessagesProj/src/emojis/twitter/emoji/6_58.png index f309a9de3a..d6f85f77cb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_58.png and b/TMessagesProj/src/emojis/twitter/emoji/6_58.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_59.png b/TMessagesProj/src/emojis/twitter/emoji/6_59.png index f999d1ecab..3b99456a3c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_59.png and b/TMessagesProj/src/emojis/twitter/emoji/6_59.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_6.png b/TMessagesProj/src/emojis/twitter/emoji/6_6.png index 67e5498fc9..11a3d85fc4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_6.png and b/TMessagesProj/src/emojis/twitter/emoji/6_6.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_60.png b/TMessagesProj/src/emojis/twitter/emoji/6_60.png index 12a95fdf1c..177f782f7b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_60.png and b/TMessagesProj/src/emojis/twitter/emoji/6_60.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_61.png b/TMessagesProj/src/emojis/twitter/emoji/6_61.png index 13b564cf01..a2d2b9733c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_61.png and b/TMessagesProj/src/emojis/twitter/emoji/6_61.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_62.png b/TMessagesProj/src/emojis/twitter/emoji/6_62.png index d8abaaca3b..01b1393c19 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_62.png and b/TMessagesProj/src/emojis/twitter/emoji/6_62.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_63.png b/TMessagesProj/src/emojis/twitter/emoji/6_63.png index 688d0a4b42..9e9eb61bd8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_63.png and b/TMessagesProj/src/emojis/twitter/emoji/6_63.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_64.png b/TMessagesProj/src/emojis/twitter/emoji/6_64.png index 3774caf073..0f03d01412 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_64.png and b/TMessagesProj/src/emojis/twitter/emoji/6_64.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_65.png b/TMessagesProj/src/emojis/twitter/emoji/6_65.png index d4db273ad4..f00f40ad0b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_65.png and b/TMessagesProj/src/emojis/twitter/emoji/6_65.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_66.png b/TMessagesProj/src/emojis/twitter/emoji/6_66.png index 126cc0872e..c5e24637b6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_66.png and b/TMessagesProj/src/emojis/twitter/emoji/6_66.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_67.png b/TMessagesProj/src/emojis/twitter/emoji/6_67.png index 3eff47d3a7..4b3e0393d1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_67.png and b/TMessagesProj/src/emojis/twitter/emoji/6_67.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_68.png b/TMessagesProj/src/emojis/twitter/emoji/6_68.png index 196b47c29c..e449feded1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_68.png and b/TMessagesProj/src/emojis/twitter/emoji/6_68.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_69.png b/TMessagesProj/src/emojis/twitter/emoji/6_69.png index aa4c254b24..7df96279e8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_69.png and b/TMessagesProj/src/emojis/twitter/emoji/6_69.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_7.png b/TMessagesProj/src/emojis/twitter/emoji/6_7.png index 95cc1550d8..5e4889b0fe 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_7.png and b/TMessagesProj/src/emojis/twitter/emoji/6_7.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_70.png b/TMessagesProj/src/emojis/twitter/emoji/6_70.png index 62df4ce9c8..cccedae570 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_70.png and b/TMessagesProj/src/emojis/twitter/emoji/6_70.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_71.png b/TMessagesProj/src/emojis/twitter/emoji/6_71.png index 0594c9e071..566e8780cd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_71.png and b/TMessagesProj/src/emojis/twitter/emoji/6_71.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_72.png b/TMessagesProj/src/emojis/twitter/emoji/6_72.png index 9885281dcf..66b5055c2c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_72.png and b/TMessagesProj/src/emojis/twitter/emoji/6_72.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_73.png b/TMessagesProj/src/emojis/twitter/emoji/6_73.png index f8a24c2a66..bdfd2297c2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_73.png and b/TMessagesProj/src/emojis/twitter/emoji/6_73.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_74.png b/TMessagesProj/src/emojis/twitter/emoji/6_74.png index 75ce8d9a17..17f1c289fa 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_74.png and b/TMessagesProj/src/emojis/twitter/emoji/6_74.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_75.png b/TMessagesProj/src/emojis/twitter/emoji/6_75.png index 235a9365a1..d12c06485f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_75.png and b/TMessagesProj/src/emojis/twitter/emoji/6_75.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_76.png b/TMessagesProj/src/emojis/twitter/emoji/6_76.png index 007b03808a..0066c777fa 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_76.png and b/TMessagesProj/src/emojis/twitter/emoji/6_76.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_77.png b/TMessagesProj/src/emojis/twitter/emoji/6_77.png index 503f207e24..8a7d706e90 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_77.png and b/TMessagesProj/src/emojis/twitter/emoji/6_77.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_78.png b/TMessagesProj/src/emojis/twitter/emoji/6_78.png index 7a4b3cae39..d0c058c4ef 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_78.png and b/TMessagesProj/src/emojis/twitter/emoji/6_78.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_79.png b/TMessagesProj/src/emojis/twitter/emoji/6_79.png index 62e9480dc4..21f722d467 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_79.png and b/TMessagesProj/src/emojis/twitter/emoji/6_79.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_8.png b/TMessagesProj/src/emojis/twitter/emoji/6_8.png index 4a64a2a730..9a6cb8a680 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_8.png and b/TMessagesProj/src/emojis/twitter/emoji/6_8.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_80.png b/TMessagesProj/src/emojis/twitter/emoji/6_80.png index 498f022574..530d200c97 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_80.png and b/TMessagesProj/src/emojis/twitter/emoji/6_80.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_81.png b/TMessagesProj/src/emojis/twitter/emoji/6_81.png index 6669132976..183060e6bb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_81.png and b/TMessagesProj/src/emojis/twitter/emoji/6_81.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_82.png b/TMessagesProj/src/emojis/twitter/emoji/6_82.png index 36feaa9f7a..b694478238 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_82.png and b/TMessagesProj/src/emojis/twitter/emoji/6_82.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_83.png b/TMessagesProj/src/emojis/twitter/emoji/6_83.png index 29b2df2e69..257119f637 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_83.png and b/TMessagesProj/src/emojis/twitter/emoji/6_83.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_84.png b/TMessagesProj/src/emojis/twitter/emoji/6_84.png index 43da659f65..77aef374b1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_84.png and b/TMessagesProj/src/emojis/twitter/emoji/6_84.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_85.png b/TMessagesProj/src/emojis/twitter/emoji/6_85.png index 5d4e689303..fef6f5e00c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_85.png and b/TMessagesProj/src/emojis/twitter/emoji/6_85.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_86.png b/TMessagesProj/src/emojis/twitter/emoji/6_86.png index 473ca2ed85..1fd8c8a148 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_86.png and b/TMessagesProj/src/emojis/twitter/emoji/6_86.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_87.png b/TMessagesProj/src/emojis/twitter/emoji/6_87.png index 8911c74dff..c954414cd0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_87.png and b/TMessagesProj/src/emojis/twitter/emoji/6_87.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_88.png b/TMessagesProj/src/emojis/twitter/emoji/6_88.png index 9a049d7739..73d98ff0a1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_88.png and b/TMessagesProj/src/emojis/twitter/emoji/6_88.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_89.png b/TMessagesProj/src/emojis/twitter/emoji/6_89.png index d1226ad7f1..6e6bcebce3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_89.png and b/TMessagesProj/src/emojis/twitter/emoji/6_89.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_9.png b/TMessagesProj/src/emojis/twitter/emoji/6_9.png index 7153c1d362..27dad82d9d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_9.png and b/TMessagesProj/src/emojis/twitter/emoji/6_9.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_90.png b/TMessagesProj/src/emojis/twitter/emoji/6_90.png index 7b382db7d1..3a78bf2915 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_90.png and b/TMessagesProj/src/emojis/twitter/emoji/6_90.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_91.png b/TMessagesProj/src/emojis/twitter/emoji/6_91.png index aea49ed6e9..920c1d0d62 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_91.png and b/TMessagesProj/src/emojis/twitter/emoji/6_91.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_92.png b/TMessagesProj/src/emojis/twitter/emoji/6_92.png index 0868c3d7f6..331a83e536 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_92.png and b/TMessagesProj/src/emojis/twitter/emoji/6_92.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_93.png b/TMessagesProj/src/emojis/twitter/emoji/6_93.png index 26de4b627e..0324756547 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_93.png and b/TMessagesProj/src/emojis/twitter/emoji/6_93.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_94.png b/TMessagesProj/src/emojis/twitter/emoji/6_94.png index 5043899b82..24310ccda2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_94.png and b/TMessagesProj/src/emojis/twitter/emoji/6_94.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_95.png b/TMessagesProj/src/emojis/twitter/emoji/6_95.png index 65af813181..2ae86c5cd4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_95.png and b/TMessagesProj/src/emojis/twitter/emoji/6_95.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_96.png b/TMessagesProj/src/emojis/twitter/emoji/6_96.png index 7a3c4f48e3..45a515ab16 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_96.png and b/TMessagesProj/src/emojis/twitter/emoji/6_96.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_97.png b/TMessagesProj/src/emojis/twitter/emoji/6_97.png index 00be4d5e2f..8c8aa5e906 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_97.png and b/TMessagesProj/src/emojis/twitter/emoji/6_97.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_98.png b/TMessagesProj/src/emojis/twitter/emoji/6_98.png index 8a85237c39..c93dd54d79 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_98.png and b/TMessagesProj/src/emojis/twitter/emoji/6_98.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/6_99.png b/TMessagesProj/src/emojis/twitter/emoji/6_99.png index 56ee7c3002..ed04dd290d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/6_99.png and b/TMessagesProj/src/emojis/twitter/emoji/6_99.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_0.png b/TMessagesProj/src/emojis/twitter/emoji/7_0.png index cfba8c6839..28287eb28d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_0.png and b/TMessagesProj/src/emojis/twitter/emoji/7_0.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_1.png b/TMessagesProj/src/emojis/twitter/emoji/7_1.png index 094fc53db1..0c80081104 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_1.png and b/TMessagesProj/src/emojis/twitter/emoji/7_1.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_10.png b/TMessagesProj/src/emojis/twitter/emoji/7_10.png index 7f9019d6bf..975d5fb8b4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_10.png and b/TMessagesProj/src/emojis/twitter/emoji/7_10.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_100.png b/TMessagesProj/src/emojis/twitter/emoji/7_100.png index bb3b014748..43aeaf7483 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_100.png and b/TMessagesProj/src/emojis/twitter/emoji/7_100.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_101.png b/TMessagesProj/src/emojis/twitter/emoji/7_101.png index c552920bf8..6ea5223eef 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_101.png and b/TMessagesProj/src/emojis/twitter/emoji/7_101.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_102.png b/TMessagesProj/src/emojis/twitter/emoji/7_102.png index 267a59e41a..54e7fd135d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_102.png and b/TMessagesProj/src/emojis/twitter/emoji/7_102.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_103.png b/TMessagesProj/src/emojis/twitter/emoji/7_103.png index cb66acc52e..652c33fe10 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_103.png and b/TMessagesProj/src/emojis/twitter/emoji/7_103.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_104.png b/TMessagesProj/src/emojis/twitter/emoji/7_104.png index 5292e6d5ab..027da4f770 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_104.png and b/TMessagesProj/src/emojis/twitter/emoji/7_104.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_105.png b/TMessagesProj/src/emojis/twitter/emoji/7_105.png index 9792759863..c2572e8200 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_105.png and b/TMessagesProj/src/emojis/twitter/emoji/7_105.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_106.png b/TMessagesProj/src/emojis/twitter/emoji/7_106.png index a1ef38abcf..753e5b7474 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_106.png and b/TMessagesProj/src/emojis/twitter/emoji/7_106.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_107.png b/TMessagesProj/src/emojis/twitter/emoji/7_107.png index 7c2da4d201..a7165776fa 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_107.png and b/TMessagesProj/src/emojis/twitter/emoji/7_107.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_108.png b/TMessagesProj/src/emojis/twitter/emoji/7_108.png index 3380aa0114..06ddb4090f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_108.png and b/TMessagesProj/src/emojis/twitter/emoji/7_108.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_109.png b/TMessagesProj/src/emojis/twitter/emoji/7_109.png index afaf0b0a86..f12db37b0d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_109.png and b/TMessagesProj/src/emojis/twitter/emoji/7_109.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_11.png b/TMessagesProj/src/emojis/twitter/emoji/7_11.png index 075ec6a466..7bfb8c5645 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_11.png and b/TMessagesProj/src/emojis/twitter/emoji/7_11.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_110.png b/TMessagesProj/src/emojis/twitter/emoji/7_110.png index 0705b26c2d..623f7cc7a6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_110.png and b/TMessagesProj/src/emojis/twitter/emoji/7_110.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_111.png b/TMessagesProj/src/emojis/twitter/emoji/7_111.png index 76d87db216..0dbdd81b6c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_111.png and b/TMessagesProj/src/emojis/twitter/emoji/7_111.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_112.png b/TMessagesProj/src/emojis/twitter/emoji/7_112.png index 88149df0ab..e26d5f68ba 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_112.png and b/TMessagesProj/src/emojis/twitter/emoji/7_112.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_113.png b/TMessagesProj/src/emojis/twitter/emoji/7_113.png index 064b0fb96c..474a599538 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_113.png and b/TMessagesProj/src/emojis/twitter/emoji/7_113.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_114.png b/TMessagesProj/src/emojis/twitter/emoji/7_114.png index 68db4e32e8..93551f7829 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_114.png and b/TMessagesProj/src/emojis/twitter/emoji/7_114.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_115.png b/TMessagesProj/src/emojis/twitter/emoji/7_115.png index d8490b9f54..f4ab0ebdea 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_115.png and b/TMessagesProj/src/emojis/twitter/emoji/7_115.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_116.png b/TMessagesProj/src/emojis/twitter/emoji/7_116.png index 188f9276a4..13883b6d79 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_116.png and b/TMessagesProj/src/emojis/twitter/emoji/7_116.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_117.png b/TMessagesProj/src/emojis/twitter/emoji/7_117.png index 0b429b1c2b..10dd5edc2e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_117.png and b/TMessagesProj/src/emojis/twitter/emoji/7_117.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_118.png b/TMessagesProj/src/emojis/twitter/emoji/7_118.png index 60e409bba3..cc73738d3a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_118.png and b/TMessagesProj/src/emojis/twitter/emoji/7_118.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_119.png b/TMessagesProj/src/emojis/twitter/emoji/7_119.png index bb3c885ec0..c017d78966 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_119.png and b/TMessagesProj/src/emojis/twitter/emoji/7_119.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_12.png b/TMessagesProj/src/emojis/twitter/emoji/7_12.png index 82afefd9e5..ef9524a6c5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_12.png and b/TMessagesProj/src/emojis/twitter/emoji/7_12.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_120.png b/TMessagesProj/src/emojis/twitter/emoji/7_120.png index 93bbdbb0fa..43b43e68c9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_120.png and b/TMessagesProj/src/emojis/twitter/emoji/7_120.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_121.png b/TMessagesProj/src/emojis/twitter/emoji/7_121.png index 5723c76285..7ced7db66e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_121.png and b/TMessagesProj/src/emojis/twitter/emoji/7_121.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_122.png b/TMessagesProj/src/emojis/twitter/emoji/7_122.png index 07b2a8f3d1..86e8545114 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_122.png and b/TMessagesProj/src/emojis/twitter/emoji/7_122.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_123.png b/TMessagesProj/src/emojis/twitter/emoji/7_123.png index 1fcfc4ea93..690b23f1c3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_123.png and b/TMessagesProj/src/emojis/twitter/emoji/7_123.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_124.png b/TMessagesProj/src/emojis/twitter/emoji/7_124.png index 110832b711..f82a9b4de8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_124.png and b/TMessagesProj/src/emojis/twitter/emoji/7_124.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_125.png b/TMessagesProj/src/emojis/twitter/emoji/7_125.png index d0c367eeab..6ea5de4476 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_125.png and b/TMessagesProj/src/emojis/twitter/emoji/7_125.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_126.png b/TMessagesProj/src/emojis/twitter/emoji/7_126.png index 6627c3e7db..853680d08c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_126.png and b/TMessagesProj/src/emojis/twitter/emoji/7_126.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_127.png b/TMessagesProj/src/emojis/twitter/emoji/7_127.png index 609152c1bb..57be020a93 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_127.png and b/TMessagesProj/src/emojis/twitter/emoji/7_127.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_128.png b/TMessagesProj/src/emojis/twitter/emoji/7_128.png index 469bf95cb8..92de8850eb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_128.png and b/TMessagesProj/src/emojis/twitter/emoji/7_128.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_129.png b/TMessagesProj/src/emojis/twitter/emoji/7_129.png index 1205acb366..084c73d8c4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_129.png and b/TMessagesProj/src/emojis/twitter/emoji/7_129.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_13.png b/TMessagesProj/src/emojis/twitter/emoji/7_13.png index 3f5a0722fe..91e1087ccc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_13.png and b/TMessagesProj/src/emojis/twitter/emoji/7_13.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_130.png b/TMessagesProj/src/emojis/twitter/emoji/7_130.png index 11be93a7b2..28e37aaf2a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_130.png and b/TMessagesProj/src/emojis/twitter/emoji/7_130.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_131.png b/TMessagesProj/src/emojis/twitter/emoji/7_131.png index 866aeb1794..6d7345bd8f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_131.png and b/TMessagesProj/src/emojis/twitter/emoji/7_131.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_132.png b/TMessagesProj/src/emojis/twitter/emoji/7_132.png index d2a5ef6568..8d7c850a9c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_132.png and b/TMessagesProj/src/emojis/twitter/emoji/7_132.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_133.png b/TMessagesProj/src/emojis/twitter/emoji/7_133.png index 92d8da1338..861f4ca5c8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_133.png and b/TMessagesProj/src/emojis/twitter/emoji/7_133.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_134.png b/TMessagesProj/src/emojis/twitter/emoji/7_134.png index 80fc6ce811..b740478062 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_134.png and b/TMessagesProj/src/emojis/twitter/emoji/7_134.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_135.png b/TMessagesProj/src/emojis/twitter/emoji/7_135.png index 3cf40050ca..15dfaba2cb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_135.png and b/TMessagesProj/src/emojis/twitter/emoji/7_135.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_136.png b/TMessagesProj/src/emojis/twitter/emoji/7_136.png index ea6d52860e..e0f9c96cc1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_136.png and b/TMessagesProj/src/emojis/twitter/emoji/7_136.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_137.png b/TMessagesProj/src/emojis/twitter/emoji/7_137.png index 2267d5fbf2..8e1b3a2696 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_137.png and b/TMessagesProj/src/emojis/twitter/emoji/7_137.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_138.png b/TMessagesProj/src/emojis/twitter/emoji/7_138.png index 009ad19716..ab2c2fc91c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_138.png and b/TMessagesProj/src/emojis/twitter/emoji/7_138.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_139.png b/TMessagesProj/src/emojis/twitter/emoji/7_139.png index 87b90b4441..ab0f6572f4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_139.png and b/TMessagesProj/src/emojis/twitter/emoji/7_139.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_14.png b/TMessagesProj/src/emojis/twitter/emoji/7_14.png index 254d4e1ccd..490761773d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_14.png and b/TMessagesProj/src/emojis/twitter/emoji/7_14.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_140.png b/TMessagesProj/src/emojis/twitter/emoji/7_140.png index bac1396fac..974c5608d9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_140.png and b/TMessagesProj/src/emojis/twitter/emoji/7_140.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_141.png b/TMessagesProj/src/emojis/twitter/emoji/7_141.png index a021ad9cd7..8ac9f8d917 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_141.png and b/TMessagesProj/src/emojis/twitter/emoji/7_141.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_142.png b/TMessagesProj/src/emojis/twitter/emoji/7_142.png index c0d0b2d088..3301ba89bf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_142.png and b/TMessagesProj/src/emojis/twitter/emoji/7_142.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_143.png b/TMessagesProj/src/emojis/twitter/emoji/7_143.png index 01b8da4464..98c3e60ed9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_143.png and b/TMessagesProj/src/emojis/twitter/emoji/7_143.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_144.png b/TMessagesProj/src/emojis/twitter/emoji/7_144.png index 5d6b2b046b..4c05e5bf80 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_144.png and b/TMessagesProj/src/emojis/twitter/emoji/7_144.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_145.png b/TMessagesProj/src/emojis/twitter/emoji/7_145.png index b6a8bfdb58..809d1e0066 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_145.png and b/TMessagesProj/src/emojis/twitter/emoji/7_145.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_146.png b/TMessagesProj/src/emojis/twitter/emoji/7_146.png index 1df8f35142..d027be9047 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_146.png and b/TMessagesProj/src/emojis/twitter/emoji/7_146.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_147.png b/TMessagesProj/src/emojis/twitter/emoji/7_147.png index 94f2fba58e..427adbd57f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_147.png and b/TMessagesProj/src/emojis/twitter/emoji/7_147.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_148.png b/TMessagesProj/src/emojis/twitter/emoji/7_148.png index 6eb29fd3cf..80c3fb1ea0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_148.png and b/TMessagesProj/src/emojis/twitter/emoji/7_148.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_149.png b/TMessagesProj/src/emojis/twitter/emoji/7_149.png index 1441a86250..9301580205 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_149.png and b/TMessagesProj/src/emojis/twitter/emoji/7_149.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_15.png b/TMessagesProj/src/emojis/twitter/emoji/7_15.png index 226bc22afc..ba932d5ba6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_15.png and b/TMessagesProj/src/emojis/twitter/emoji/7_15.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_150.png b/TMessagesProj/src/emojis/twitter/emoji/7_150.png index 4fc28becaf..8201eeedfd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_150.png and b/TMessagesProj/src/emojis/twitter/emoji/7_150.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_151.png b/TMessagesProj/src/emojis/twitter/emoji/7_151.png index 777cb76132..1c89909cac 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_151.png and b/TMessagesProj/src/emojis/twitter/emoji/7_151.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_152.png b/TMessagesProj/src/emojis/twitter/emoji/7_152.png index f4e8a1fc05..57f8c395b3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_152.png and b/TMessagesProj/src/emojis/twitter/emoji/7_152.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_153.png b/TMessagesProj/src/emojis/twitter/emoji/7_153.png index d1156e8176..35945aca49 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_153.png and b/TMessagesProj/src/emojis/twitter/emoji/7_153.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_154.png b/TMessagesProj/src/emojis/twitter/emoji/7_154.png index 43ab8c1b59..a81d10c1dd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_154.png and b/TMessagesProj/src/emojis/twitter/emoji/7_154.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_155.png b/TMessagesProj/src/emojis/twitter/emoji/7_155.png index e8c652166a..475e934a00 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_155.png and b/TMessagesProj/src/emojis/twitter/emoji/7_155.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_156.png b/TMessagesProj/src/emojis/twitter/emoji/7_156.png index 236ab0bb30..0016105f47 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_156.png and b/TMessagesProj/src/emojis/twitter/emoji/7_156.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_157.png b/TMessagesProj/src/emojis/twitter/emoji/7_157.png index a6f8a1ea82..9748929838 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_157.png and b/TMessagesProj/src/emojis/twitter/emoji/7_157.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_158.png b/TMessagesProj/src/emojis/twitter/emoji/7_158.png index 2be79eedf0..7358f44033 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_158.png and b/TMessagesProj/src/emojis/twitter/emoji/7_158.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_159.png b/TMessagesProj/src/emojis/twitter/emoji/7_159.png index 223fa6e4d3..f35221124d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_159.png and b/TMessagesProj/src/emojis/twitter/emoji/7_159.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_16.png b/TMessagesProj/src/emojis/twitter/emoji/7_16.png index 3bc8b5c93f..c6888da64b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_16.png and b/TMessagesProj/src/emojis/twitter/emoji/7_16.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_160.png b/TMessagesProj/src/emojis/twitter/emoji/7_160.png index 91003cb407..4eda638e70 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_160.png and b/TMessagesProj/src/emojis/twitter/emoji/7_160.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_161.png b/TMessagesProj/src/emojis/twitter/emoji/7_161.png index 1a63c66936..5741e07b6b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_161.png and b/TMessagesProj/src/emojis/twitter/emoji/7_161.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_162.png b/TMessagesProj/src/emojis/twitter/emoji/7_162.png index dd6096eb82..077cb433ae 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_162.png and b/TMessagesProj/src/emojis/twitter/emoji/7_162.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_163.png b/TMessagesProj/src/emojis/twitter/emoji/7_163.png index 1a0d5c57c2..b734d02240 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_163.png and b/TMessagesProj/src/emojis/twitter/emoji/7_163.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_164.png b/TMessagesProj/src/emojis/twitter/emoji/7_164.png index d1087b9f28..decbdabd9b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_164.png and b/TMessagesProj/src/emojis/twitter/emoji/7_164.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_165.png b/TMessagesProj/src/emojis/twitter/emoji/7_165.png index 240f8100aa..8d54416fbb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_165.png and b/TMessagesProj/src/emojis/twitter/emoji/7_165.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_166.png b/TMessagesProj/src/emojis/twitter/emoji/7_166.png index 9c0528acab..ded10be00a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_166.png and b/TMessagesProj/src/emojis/twitter/emoji/7_166.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_167.png b/TMessagesProj/src/emojis/twitter/emoji/7_167.png index debda55be8..bf4b06f05b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_167.png and b/TMessagesProj/src/emojis/twitter/emoji/7_167.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_168.png b/TMessagesProj/src/emojis/twitter/emoji/7_168.png index f46ebe9682..55db271365 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_168.png and b/TMessagesProj/src/emojis/twitter/emoji/7_168.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_169.png b/TMessagesProj/src/emojis/twitter/emoji/7_169.png index dc493056de..faacd0e18f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_169.png and b/TMessagesProj/src/emojis/twitter/emoji/7_169.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_17.png b/TMessagesProj/src/emojis/twitter/emoji/7_17.png index 84b14ab77d..77983ccef9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_17.png and b/TMessagesProj/src/emojis/twitter/emoji/7_17.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_170.png b/TMessagesProj/src/emojis/twitter/emoji/7_170.png index 3ade40783f..f3f2cb1010 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_170.png and b/TMessagesProj/src/emojis/twitter/emoji/7_170.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_171.png b/TMessagesProj/src/emojis/twitter/emoji/7_171.png index 8bb36d4b05..fcc89eccad 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_171.png and b/TMessagesProj/src/emojis/twitter/emoji/7_171.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_172.png b/TMessagesProj/src/emojis/twitter/emoji/7_172.png index 4a8d5cd2dc..1c2ce9ee32 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_172.png and b/TMessagesProj/src/emojis/twitter/emoji/7_172.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_173.png b/TMessagesProj/src/emojis/twitter/emoji/7_173.png index 22de83f61e..9ad9aea3ea 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_173.png and b/TMessagesProj/src/emojis/twitter/emoji/7_173.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_174.png b/TMessagesProj/src/emojis/twitter/emoji/7_174.png index 7d680a925e..90259a728e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_174.png and b/TMessagesProj/src/emojis/twitter/emoji/7_174.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_175.png b/TMessagesProj/src/emojis/twitter/emoji/7_175.png index 49e4aa997c..94e5ade405 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_175.png and b/TMessagesProj/src/emojis/twitter/emoji/7_175.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_176.png b/TMessagesProj/src/emojis/twitter/emoji/7_176.png index 8a86279b07..f4806d61cf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_176.png and b/TMessagesProj/src/emojis/twitter/emoji/7_176.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_177.png b/TMessagesProj/src/emojis/twitter/emoji/7_177.png index 43c6071ae2..c2e10b70f2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_177.png and b/TMessagesProj/src/emojis/twitter/emoji/7_177.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_178.png b/TMessagesProj/src/emojis/twitter/emoji/7_178.png index 6fe439b98e..7ceb1d7e76 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_178.png and b/TMessagesProj/src/emojis/twitter/emoji/7_178.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_179.png b/TMessagesProj/src/emojis/twitter/emoji/7_179.png index 2e697175e1..11bce0ffdf 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_179.png and b/TMessagesProj/src/emojis/twitter/emoji/7_179.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_18.png b/TMessagesProj/src/emojis/twitter/emoji/7_18.png index 9b8c2c0aaa..2f2a33d4ed 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_18.png and b/TMessagesProj/src/emojis/twitter/emoji/7_18.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_180.png b/TMessagesProj/src/emojis/twitter/emoji/7_180.png index 00f9455971..1cdbe0f05c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_180.png and b/TMessagesProj/src/emojis/twitter/emoji/7_180.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_181.png b/TMessagesProj/src/emojis/twitter/emoji/7_181.png index 1e6dd8cd85..b768eac047 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_181.png and b/TMessagesProj/src/emojis/twitter/emoji/7_181.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_182.png b/TMessagesProj/src/emojis/twitter/emoji/7_182.png index f1711dabc9..f889015347 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_182.png and b/TMessagesProj/src/emojis/twitter/emoji/7_182.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_183.png b/TMessagesProj/src/emojis/twitter/emoji/7_183.png index f2a31cdeda..9f6a789942 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_183.png and b/TMessagesProj/src/emojis/twitter/emoji/7_183.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_184.png b/TMessagesProj/src/emojis/twitter/emoji/7_184.png index b5af97323d..aab85f1250 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_184.png and b/TMessagesProj/src/emojis/twitter/emoji/7_184.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_185.png b/TMessagesProj/src/emojis/twitter/emoji/7_185.png index 0c39bbda65..b84f7141d2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_185.png and b/TMessagesProj/src/emojis/twitter/emoji/7_185.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_186.png b/TMessagesProj/src/emojis/twitter/emoji/7_186.png index e67dd666b8..8960728c13 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_186.png and b/TMessagesProj/src/emojis/twitter/emoji/7_186.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_187.png b/TMessagesProj/src/emojis/twitter/emoji/7_187.png index 6604b4de09..af6d2685a0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_187.png and b/TMessagesProj/src/emojis/twitter/emoji/7_187.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_188.png b/TMessagesProj/src/emojis/twitter/emoji/7_188.png index fb09fe8a9a..b787606af1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_188.png and b/TMessagesProj/src/emojis/twitter/emoji/7_188.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_189.png b/TMessagesProj/src/emojis/twitter/emoji/7_189.png index 47c0f7ac38..1987199c8c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_189.png and b/TMessagesProj/src/emojis/twitter/emoji/7_189.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_19.png b/TMessagesProj/src/emojis/twitter/emoji/7_19.png index 759a44660f..b420604b0b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_19.png and b/TMessagesProj/src/emojis/twitter/emoji/7_19.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_190.png b/TMessagesProj/src/emojis/twitter/emoji/7_190.png index 40c60975e5..11b2c031ef 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_190.png and b/TMessagesProj/src/emojis/twitter/emoji/7_190.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_191.png b/TMessagesProj/src/emojis/twitter/emoji/7_191.png index d0c6066a4c..d289805013 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_191.png and b/TMessagesProj/src/emojis/twitter/emoji/7_191.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_192.png b/TMessagesProj/src/emojis/twitter/emoji/7_192.png index 261c674cf0..26f98574e0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_192.png and b/TMessagesProj/src/emojis/twitter/emoji/7_192.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_193.png b/TMessagesProj/src/emojis/twitter/emoji/7_193.png index f4071f8b72..b16abab436 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_193.png and b/TMessagesProj/src/emojis/twitter/emoji/7_193.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_194.png b/TMessagesProj/src/emojis/twitter/emoji/7_194.png index 7d2dfd4bfa..0cc17fec03 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_194.png and b/TMessagesProj/src/emojis/twitter/emoji/7_194.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_195.png b/TMessagesProj/src/emojis/twitter/emoji/7_195.png index be76979d95..eee429e498 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_195.png and b/TMessagesProj/src/emojis/twitter/emoji/7_195.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_196.png b/TMessagesProj/src/emojis/twitter/emoji/7_196.png index 1571c63703..6ecfe447fb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_196.png and b/TMessagesProj/src/emojis/twitter/emoji/7_196.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_197.png b/TMessagesProj/src/emojis/twitter/emoji/7_197.png index 4b47543544..c76448b2ff 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_197.png and b/TMessagesProj/src/emojis/twitter/emoji/7_197.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_198.png b/TMessagesProj/src/emojis/twitter/emoji/7_198.png index 0555159ce1..19c6c82abc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_198.png and b/TMessagesProj/src/emojis/twitter/emoji/7_198.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_199.png b/TMessagesProj/src/emojis/twitter/emoji/7_199.png index ce51fad249..26e0d728ef 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_199.png and b/TMessagesProj/src/emojis/twitter/emoji/7_199.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_2.png b/TMessagesProj/src/emojis/twitter/emoji/7_2.png index 65c41280e6..ea0af3ac09 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_2.png and b/TMessagesProj/src/emojis/twitter/emoji/7_2.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_20.png b/TMessagesProj/src/emojis/twitter/emoji/7_20.png index 8651e60748..de49a04e49 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_20.png and b/TMessagesProj/src/emojis/twitter/emoji/7_20.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_200.png b/TMessagesProj/src/emojis/twitter/emoji/7_200.png index ff85b121a1..475e884cf5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_200.png and b/TMessagesProj/src/emojis/twitter/emoji/7_200.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_201.png b/TMessagesProj/src/emojis/twitter/emoji/7_201.png index db483f9ff4..05c8301f47 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_201.png and b/TMessagesProj/src/emojis/twitter/emoji/7_201.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_202.png b/TMessagesProj/src/emojis/twitter/emoji/7_202.png index 0e09dfbb13..6c400c3754 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_202.png and b/TMessagesProj/src/emojis/twitter/emoji/7_202.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_203.png b/TMessagesProj/src/emojis/twitter/emoji/7_203.png index fa67f97770..8cee03a946 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_203.png and b/TMessagesProj/src/emojis/twitter/emoji/7_203.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_204.png b/TMessagesProj/src/emojis/twitter/emoji/7_204.png index 48fcb71c13..03274e2b20 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_204.png and b/TMessagesProj/src/emojis/twitter/emoji/7_204.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_205.png b/TMessagesProj/src/emojis/twitter/emoji/7_205.png index e0669b3874..b245bf8cbe 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_205.png and b/TMessagesProj/src/emojis/twitter/emoji/7_205.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_206.png b/TMessagesProj/src/emojis/twitter/emoji/7_206.png index dd7ac685f6..7581bdad44 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_206.png and b/TMessagesProj/src/emojis/twitter/emoji/7_206.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_207.png b/TMessagesProj/src/emojis/twitter/emoji/7_207.png index d8f5404c07..fda4d2ec96 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_207.png and b/TMessagesProj/src/emojis/twitter/emoji/7_207.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_208.png b/TMessagesProj/src/emojis/twitter/emoji/7_208.png index 00671b2acd..496c3a81e8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_208.png and b/TMessagesProj/src/emojis/twitter/emoji/7_208.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_209.png b/TMessagesProj/src/emojis/twitter/emoji/7_209.png index 3538485393..1643957d7f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_209.png and b/TMessagesProj/src/emojis/twitter/emoji/7_209.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_21.png b/TMessagesProj/src/emojis/twitter/emoji/7_21.png index 0c3f36c30e..f3370d8640 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_21.png and b/TMessagesProj/src/emojis/twitter/emoji/7_21.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_210.png b/TMessagesProj/src/emojis/twitter/emoji/7_210.png index f5a9fd28fe..06b2af3222 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_210.png and b/TMessagesProj/src/emojis/twitter/emoji/7_210.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_211.png b/TMessagesProj/src/emojis/twitter/emoji/7_211.png index 27e64f0b28..4a355be9a1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_211.png and b/TMessagesProj/src/emojis/twitter/emoji/7_211.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_212.png b/TMessagesProj/src/emojis/twitter/emoji/7_212.png index 113c4c4af5..adb9f2a22f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_212.png and b/TMessagesProj/src/emojis/twitter/emoji/7_212.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_213.png b/TMessagesProj/src/emojis/twitter/emoji/7_213.png index 46f8b13aef..5e9f57ec81 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_213.png and b/TMessagesProj/src/emojis/twitter/emoji/7_213.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_214.png b/TMessagesProj/src/emojis/twitter/emoji/7_214.png index 2b806d4534..b355c06ac4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_214.png and b/TMessagesProj/src/emojis/twitter/emoji/7_214.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_215.png b/TMessagesProj/src/emojis/twitter/emoji/7_215.png index af0b12faba..c7e549194b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_215.png and b/TMessagesProj/src/emojis/twitter/emoji/7_215.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_216.png b/TMessagesProj/src/emojis/twitter/emoji/7_216.png index 20665e766e..262a9690e6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_216.png and b/TMessagesProj/src/emojis/twitter/emoji/7_216.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_217.png b/TMessagesProj/src/emojis/twitter/emoji/7_217.png index 7aa65f143f..6d22cb6c79 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_217.png and b/TMessagesProj/src/emojis/twitter/emoji/7_217.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_218.png b/TMessagesProj/src/emojis/twitter/emoji/7_218.png index 0db7154abb..32569074e5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_218.png and b/TMessagesProj/src/emojis/twitter/emoji/7_218.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_219.png b/TMessagesProj/src/emojis/twitter/emoji/7_219.png index 6d48a5824f..400fcca632 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_219.png and b/TMessagesProj/src/emojis/twitter/emoji/7_219.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_22.png b/TMessagesProj/src/emojis/twitter/emoji/7_22.png index 2e428e459f..a14120e16b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_22.png and b/TMessagesProj/src/emojis/twitter/emoji/7_22.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_220.png b/TMessagesProj/src/emojis/twitter/emoji/7_220.png index 03813f6673..71b6d3bab5 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_220.png and b/TMessagesProj/src/emojis/twitter/emoji/7_220.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_221.png b/TMessagesProj/src/emojis/twitter/emoji/7_221.png index f8e6a26f73..ea0be4bf98 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_221.png and b/TMessagesProj/src/emojis/twitter/emoji/7_221.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_222.png b/TMessagesProj/src/emojis/twitter/emoji/7_222.png index 9f31902cad..ec47da945a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_222.png and b/TMessagesProj/src/emojis/twitter/emoji/7_222.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_223.png b/TMessagesProj/src/emojis/twitter/emoji/7_223.png index 47d98c4f07..04fab76164 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_223.png and b/TMessagesProj/src/emojis/twitter/emoji/7_223.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_224.png b/TMessagesProj/src/emojis/twitter/emoji/7_224.png index c97aea3ef2..1ae3c83c62 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_224.png and b/TMessagesProj/src/emojis/twitter/emoji/7_224.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_225.png b/TMessagesProj/src/emojis/twitter/emoji/7_225.png index b4b2965ce8..a9b6154c87 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_225.png and b/TMessagesProj/src/emojis/twitter/emoji/7_225.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_226.png b/TMessagesProj/src/emojis/twitter/emoji/7_226.png index c762392921..f49a625667 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_226.png and b/TMessagesProj/src/emojis/twitter/emoji/7_226.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_227.png b/TMessagesProj/src/emojis/twitter/emoji/7_227.png index 75fa4cff38..bf214e478c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_227.png and b/TMessagesProj/src/emojis/twitter/emoji/7_227.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_228.png b/TMessagesProj/src/emojis/twitter/emoji/7_228.png index 20342f3f66..4477129b18 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_228.png and b/TMessagesProj/src/emojis/twitter/emoji/7_228.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_229.png b/TMessagesProj/src/emojis/twitter/emoji/7_229.png index 9d7534a2b9..e83233a866 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_229.png and b/TMessagesProj/src/emojis/twitter/emoji/7_229.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_23.png b/TMessagesProj/src/emojis/twitter/emoji/7_23.png index 2cf0e8db84..4fa66d81dc 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_23.png and b/TMessagesProj/src/emojis/twitter/emoji/7_23.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_230.png b/TMessagesProj/src/emojis/twitter/emoji/7_230.png index 71a293f2cb..596423dda4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_230.png and b/TMessagesProj/src/emojis/twitter/emoji/7_230.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_231.png b/TMessagesProj/src/emojis/twitter/emoji/7_231.png index bb50e893b7..96e11c5651 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_231.png and b/TMessagesProj/src/emojis/twitter/emoji/7_231.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_232.png b/TMessagesProj/src/emojis/twitter/emoji/7_232.png index a65747f596..83741cc5d3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_232.png and b/TMessagesProj/src/emojis/twitter/emoji/7_232.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_233.png b/TMessagesProj/src/emojis/twitter/emoji/7_233.png index 31c38ad642..7349b1e63e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_233.png and b/TMessagesProj/src/emojis/twitter/emoji/7_233.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_234.png b/TMessagesProj/src/emojis/twitter/emoji/7_234.png index b657ffd21b..6b005969b6 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_234.png and b/TMessagesProj/src/emojis/twitter/emoji/7_234.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_235.png b/TMessagesProj/src/emojis/twitter/emoji/7_235.png index 9f666eeee0..9229b66696 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_235.png and b/TMessagesProj/src/emojis/twitter/emoji/7_235.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_236.png b/TMessagesProj/src/emojis/twitter/emoji/7_236.png index e657d888d0..0f9d054ac0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_236.png and b/TMessagesProj/src/emojis/twitter/emoji/7_236.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_237.png b/TMessagesProj/src/emojis/twitter/emoji/7_237.png index 92b5640f37..2e057b84c3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_237.png and b/TMessagesProj/src/emojis/twitter/emoji/7_237.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_238.png b/TMessagesProj/src/emojis/twitter/emoji/7_238.png index 54c83f116d..f38a88969c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_238.png and b/TMessagesProj/src/emojis/twitter/emoji/7_238.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_239.png b/TMessagesProj/src/emojis/twitter/emoji/7_239.png index c6d34a09b0..2506925e32 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_239.png and b/TMessagesProj/src/emojis/twitter/emoji/7_239.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_24.png b/TMessagesProj/src/emojis/twitter/emoji/7_24.png index 7df14e5188..2c744ee9c4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_24.png and b/TMessagesProj/src/emojis/twitter/emoji/7_24.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_240.png b/TMessagesProj/src/emojis/twitter/emoji/7_240.png index 1b8d705b9d..1153e042e1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_240.png and b/TMessagesProj/src/emojis/twitter/emoji/7_240.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_241.png b/TMessagesProj/src/emojis/twitter/emoji/7_241.png index f6182b08f6..d0defc0933 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_241.png and b/TMessagesProj/src/emojis/twitter/emoji/7_241.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_242.png b/TMessagesProj/src/emojis/twitter/emoji/7_242.png index 0398df832b..21c13eb47c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_242.png and b/TMessagesProj/src/emojis/twitter/emoji/7_242.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_243.png b/TMessagesProj/src/emojis/twitter/emoji/7_243.png index 327fe8fc58..f010cbaaba 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_243.png and b/TMessagesProj/src/emojis/twitter/emoji/7_243.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_244.png b/TMessagesProj/src/emojis/twitter/emoji/7_244.png index 3ec30cee2a..a724220a5c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_244.png and b/TMessagesProj/src/emojis/twitter/emoji/7_244.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_245.png b/TMessagesProj/src/emojis/twitter/emoji/7_245.png index 0b11feb803..dec873bf41 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_245.png and b/TMessagesProj/src/emojis/twitter/emoji/7_245.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_246.png b/TMessagesProj/src/emojis/twitter/emoji/7_246.png index 61cdefad4b..5a53c1b33b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_246.png and b/TMessagesProj/src/emojis/twitter/emoji/7_246.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_247.png b/TMessagesProj/src/emojis/twitter/emoji/7_247.png index 84b8092be8..324beb2846 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_247.png and b/TMessagesProj/src/emojis/twitter/emoji/7_247.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_248.png b/TMessagesProj/src/emojis/twitter/emoji/7_248.png index edfe7e000c..d2bcf8c4f8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_248.png and b/TMessagesProj/src/emojis/twitter/emoji/7_248.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_249.png b/TMessagesProj/src/emojis/twitter/emoji/7_249.png index 8d8f2662db..47bd8f6788 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_249.png and b/TMessagesProj/src/emojis/twitter/emoji/7_249.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_25.png b/TMessagesProj/src/emojis/twitter/emoji/7_25.png index f546f4d8e8..eb3ebea479 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_25.png and b/TMessagesProj/src/emojis/twitter/emoji/7_25.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_250.png b/TMessagesProj/src/emojis/twitter/emoji/7_250.png index bee2591b51..8ff3733105 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_250.png and b/TMessagesProj/src/emojis/twitter/emoji/7_250.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_251.png b/TMessagesProj/src/emojis/twitter/emoji/7_251.png index b952c75653..803954f854 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_251.png and b/TMessagesProj/src/emojis/twitter/emoji/7_251.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_252.png b/TMessagesProj/src/emojis/twitter/emoji/7_252.png index 63c4a425dc..2bc4312d56 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_252.png and b/TMessagesProj/src/emojis/twitter/emoji/7_252.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_253.png b/TMessagesProj/src/emojis/twitter/emoji/7_253.png index cf8cbdea9c..23e7d35837 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_253.png and b/TMessagesProj/src/emojis/twitter/emoji/7_253.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_254.png b/TMessagesProj/src/emojis/twitter/emoji/7_254.png index 7a9e51fcdb..9e08ed6693 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_254.png and b/TMessagesProj/src/emojis/twitter/emoji/7_254.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_255.png b/TMessagesProj/src/emojis/twitter/emoji/7_255.png index c29febfe77..d8da6bf06b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_255.png and b/TMessagesProj/src/emojis/twitter/emoji/7_255.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_256.png b/TMessagesProj/src/emojis/twitter/emoji/7_256.png index 5521289f7d..362b2bd1fd 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_256.png and b/TMessagesProj/src/emojis/twitter/emoji/7_256.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_257.png b/TMessagesProj/src/emojis/twitter/emoji/7_257.png index 606303ffac..1ae7157269 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_257.png and b/TMessagesProj/src/emojis/twitter/emoji/7_257.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_258.png b/TMessagesProj/src/emojis/twitter/emoji/7_258.png index 56a6d0ef66..0830d7153e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_258.png and b/TMessagesProj/src/emojis/twitter/emoji/7_258.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_26.png b/TMessagesProj/src/emojis/twitter/emoji/7_26.png index c92f99cd6f..249f34ea98 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_26.png and b/TMessagesProj/src/emojis/twitter/emoji/7_26.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_27.png b/TMessagesProj/src/emojis/twitter/emoji/7_27.png index ddc1a2108e..2adad5f14c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_27.png and b/TMessagesProj/src/emojis/twitter/emoji/7_27.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_28.png b/TMessagesProj/src/emojis/twitter/emoji/7_28.png index 5dc47e5ebf..480632fc83 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_28.png and b/TMessagesProj/src/emojis/twitter/emoji/7_28.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_29.png b/TMessagesProj/src/emojis/twitter/emoji/7_29.png index a0668d81d4..6aab2bcced 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_29.png and b/TMessagesProj/src/emojis/twitter/emoji/7_29.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_3.png b/TMessagesProj/src/emojis/twitter/emoji/7_3.png index a4461dac0b..f7eca202c8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_3.png and b/TMessagesProj/src/emojis/twitter/emoji/7_3.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_30.png b/TMessagesProj/src/emojis/twitter/emoji/7_30.png index 2a8f4693ba..7b39e22a3c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_30.png and b/TMessagesProj/src/emojis/twitter/emoji/7_30.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_31.png b/TMessagesProj/src/emojis/twitter/emoji/7_31.png index 2faa3ae1e0..351464d65a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_31.png and b/TMessagesProj/src/emojis/twitter/emoji/7_31.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_32.png b/TMessagesProj/src/emojis/twitter/emoji/7_32.png index 4232f46ab9..c5482aa6aa 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_32.png and b/TMessagesProj/src/emojis/twitter/emoji/7_32.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_33.png b/TMessagesProj/src/emojis/twitter/emoji/7_33.png index d29bb2053f..b440d626b2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_33.png and b/TMessagesProj/src/emojis/twitter/emoji/7_33.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_34.png b/TMessagesProj/src/emojis/twitter/emoji/7_34.png index bc10582513..997640cf2f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_34.png and b/TMessagesProj/src/emojis/twitter/emoji/7_34.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_35.png b/TMessagesProj/src/emojis/twitter/emoji/7_35.png index b913f6fb1c..6e058f67a8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_35.png and b/TMessagesProj/src/emojis/twitter/emoji/7_35.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_36.png b/TMessagesProj/src/emojis/twitter/emoji/7_36.png index 7fed7ba454..6cc70454d3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_36.png and b/TMessagesProj/src/emojis/twitter/emoji/7_36.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_37.png b/TMessagesProj/src/emojis/twitter/emoji/7_37.png index 62a6842114..b307ca1754 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_37.png and b/TMessagesProj/src/emojis/twitter/emoji/7_37.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_38.png b/TMessagesProj/src/emojis/twitter/emoji/7_38.png index fb032ff64e..790d47d16f 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_38.png and b/TMessagesProj/src/emojis/twitter/emoji/7_38.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_39.png b/TMessagesProj/src/emojis/twitter/emoji/7_39.png index 7ea82df09d..0b95b32370 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_39.png and b/TMessagesProj/src/emojis/twitter/emoji/7_39.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_4.png b/TMessagesProj/src/emojis/twitter/emoji/7_4.png index f0d88a9c1f..8e51cefcce 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_4.png and b/TMessagesProj/src/emojis/twitter/emoji/7_4.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_40.png b/TMessagesProj/src/emojis/twitter/emoji/7_40.png index 276a920548..e4ac520b9c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_40.png and b/TMessagesProj/src/emojis/twitter/emoji/7_40.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_41.png b/TMessagesProj/src/emojis/twitter/emoji/7_41.png index 60a8e03c4e..6da8ee79f8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_41.png and b/TMessagesProj/src/emojis/twitter/emoji/7_41.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_42.png b/TMessagesProj/src/emojis/twitter/emoji/7_42.png index 857f30bad7..f2b21ffd7c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_42.png and b/TMessagesProj/src/emojis/twitter/emoji/7_42.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_43.png b/TMessagesProj/src/emojis/twitter/emoji/7_43.png index 2d977294ae..2e809bc594 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_43.png and b/TMessagesProj/src/emojis/twitter/emoji/7_43.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_44.png b/TMessagesProj/src/emojis/twitter/emoji/7_44.png index 6f9d4221a1..6c76117bb2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_44.png and b/TMessagesProj/src/emojis/twitter/emoji/7_44.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_45.png b/TMessagesProj/src/emojis/twitter/emoji/7_45.png index c30365b22e..835a9d0cfa 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_45.png and b/TMessagesProj/src/emojis/twitter/emoji/7_45.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_46.png b/TMessagesProj/src/emojis/twitter/emoji/7_46.png index ac406571b2..74158e29a9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_46.png and b/TMessagesProj/src/emojis/twitter/emoji/7_46.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_47.png b/TMessagesProj/src/emojis/twitter/emoji/7_47.png index b3a0c01435..f502e58647 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_47.png and b/TMessagesProj/src/emojis/twitter/emoji/7_47.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_48.png b/TMessagesProj/src/emojis/twitter/emoji/7_48.png index 3bd1b1ddeb..09a4141460 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_48.png and b/TMessagesProj/src/emojis/twitter/emoji/7_48.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_49.png b/TMessagesProj/src/emojis/twitter/emoji/7_49.png index 1b47f0a082..002b0cc506 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_49.png and b/TMessagesProj/src/emojis/twitter/emoji/7_49.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_5.png b/TMessagesProj/src/emojis/twitter/emoji/7_5.png index 4af7783c72..2273f38205 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_5.png and b/TMessagesProj/src/emojis/twitter/emoji/7_5.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_50.png b/TMessagesProj/src/emojis/twitter/emoji/7_50.png index 9108954515..bfd45fe71c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_50.png and b/TMessagesProj/src/emojis/twitter/emoji/7_50.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_51.png b/TMessagesProj/src/emojis/twitter/emoji/7_51.png index 280205a461..e6200b156e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_51.png and b/TMessagesProj/src/emojis/twitter/emoji/7_51.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_52.png b/TMessagesProj/src/emojis/twitter/emoji/7_52.png index 6c3d6d69af..b42f41de4d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_52.png and b/TMessagesProj/src/emojis/twitter/emoji/7_52.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_53.png b/TMessagesProj/src/emojis/twitter/emoji/7_53.png index 6a7858b8b7..96ef1412e7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_53.png and b/TMessagesProj/src/emojis/twitter/emoji/7_53.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_54.png b/TMessagesProj/src/emojis/twitter/emoji/7_54.png index ecd1965c5e..9568741623 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_54.png and b/TMessagesProj/src/emojis/twitter/emoji/7_54.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_55.png b/TMessagesProj/src/emojis/twitter/emoji/7_55.png index f7ace937f3..d5d40da2c0 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_55.png and b/TMessagesProj/src/emojis/twitter/emoji/7_55.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_56.png b/TMessagesProj/src/emojis/twitter/emoji/7_56.png index b537da5ca2..c2e7e8590e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_56.png and b/TMessagesProj/src/emojis/twitter/emoji/7_56.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_57.png b/TMessagesProj/src/emojis/twitter/emoji/7_57.png index dac897cccb..17073903ab 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_57.png and b/TMessagesProj/src/emojis/twitter/emoji/7_57.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_58.png b/TMessagesProj/src/emojis/twitter/emoji/7_58.png index c386a8d839..ba5dfc5be4 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_58.png and b/TMessagesProj/src/emojis/twitter/emoji/7_58.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_59.png b/TMessagesProj/src/emojis/twitter/emoji/7_59.png index ecf4adef17..28d8440de3 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_59.png and b/TMessagesProj/src/emojis/twitter/emoji/7_59.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_6.png b/TMessagesProj/src/emojis/twitter/emoji/7_6.png index 1ddf228eff..8b43699e14 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_6.png and b/TMessagesProj/src/emojis/twitter/emoji/7_6.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_60.png b/TMessagesProj/src/emojis/twitter/emoji/7_60.png index cfe5ed5481..874577f30b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_60.png and b/TMessagesProj/src/emojis/twitter/emoji/7_60.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_61.png b/TMessagesProj/src/emojis/twitter/emoji/7_61.png index a2a694ecda..2b6496c118 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_61.png and b/TMessagesProj/src/emojis/twitter/emoji/7_61.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_62.png b/TMessagesProj/src/emojis/twitter/emoji/7_62.png index 06146613cd..585d69e4ab 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_62.png and b/TMessagesProj/src/emojis/twitter/emoji/7_62.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_63.png b/TMessagesProj/src/emojis/twitter/emoji/7_63.png index f161fc430e..3285d44f18 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_63.png and b/TMessagesProj/src/emojis/twitter/emoji/7_63.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_64.png b/TMessagesProj/src/emojis/twitter/emoji/7_64.png index 1d78bb317f..3219da2229 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_64.png and b/TMessagesProj/src/emojis/twitter/emoji/7_64.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_65.png b/TMessagesProj/src/emojis/twitter/emoji/7_65.png index 47852c383b..6e5df72905 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_65.png and b/TMessagesProj/src/emojis/twitter/emoji/7_65.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_66.png b/TMessagesProj/src/emojis/twitter/emoji/7_66.png index 75f08a9097..ecef3a5dcb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_66.png and b/TMessagesProj/src/emojis/twitter/emoji/7_66.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_67.png b/TMessagesProj/src/emojis/twitter/emoji/7_67.png index 8c1043cad8..a62c68e6a1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_67.png and b/TMessagesProj/src/emojis/twitter/emoji/7_67.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_68.png b/TMessagesProj/src/emojis/twitter/emoji/7_68.png index 16b8451f79..5e4d5bea14 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_68.png and b/TMessagesProj/src/emojis/twitter/emoji/7_68.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_69.png b/TMessagesProj/src/emojis/twitter/emoji/7_69.png index 46364ec7e8..33627b5ad2 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_69.png and b/TMessagesProj/src/emojis/twitter/emoji/7_69.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_7.png b/TMessagesProj/src/emojis/twitter/emoji/7_7.png index 7dc81aaea4..72fedea2fe 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_7.png and b/TMessagesProj/src/emojis/twitter/emoji/7_7.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_70.png b/TMessagesProj/src/emojis/twitter/emoji/7_70.png index 33337a2f22..977fa6738b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_70.png and b/TMessagesProj/src/emojis/twitter/emoji/7_70.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_71.png b/TMessagesProj/src/emojis/twitter/emoji/7_71.png index f7399b312d..01f4df83d7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_71.png and b/TMessagesProj/src/emojis/twitter/emoji/7_71.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_72.png b/TMessagesProj/src/emojis/twitter/emoji/7_72.png index 02fc7b6bb6..7301d8e8ba 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_72.png and b/TMessagesProj/src/emojis/twitter/emoji/7_72.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_73.png b/TMessagesProj/src/emojis/twitter/emoji/7_73.png index dcd5280626..4e56fff28e 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_73.png and b/TMessagesProj/src/emojis/twitter/emoji/7_73.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_74.png b/TMessagesProj/src/emojis/twitter/emoji/7_74.png index e5cdfe79a5..0d60de1d04 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_74.png and b/TMessagesProj/src/emojis/twitter/emoji/7_74.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_75.png b/TMessagesProj/src/emojis/twitter/emoji/7_75.png index f749a2aebc..da208c78f1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_75.png and b/TMessagesProj/src/emojis/twitter/emoji/7_75.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_76.png b/TMessagesProj/src/emojis/twitter/emoji/7_76.png index ba1efc9899..84c367dcdb 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_76.png and b/TMessagesProj/src/emojis/twitter/emoji/7_76.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_77.png b/TMessagesProj/src/emojis/twitter/emoji/7_77.png index 07e2f06fc5..4051d7fff9 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_77.png and b/TMessagesProj/src/emojis/twitter/emoji/7_77.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_78.png b/TMessagesProj/src/emojis/twitter/emoji/7_78.png index 42418c6506..33b6f15c20 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_78.png and b/TMessagesProj/src/emojis/twitter/emoji/7_78.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_79.png b/TMessagesProj/src/emojis/twitter/emoji/7_79.png index 68d74f7b33..11e7c4d15d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_79.png and b/TMessagesProj/src/emojis/twitter/emoji/7_79.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_8.png b/TMessagesProj/src/emojis/twitter/emoji/7_8.png index dcc9efc1be..ec28d78ace 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_8.png and b/TMessagesProj/src/emojis/twitter/emoji/7_8.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_80.png b/TMessagesProj/src/emojis/twitter/emoji/7_80.png index 9437a91071..ad22861fc1 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_80.png and b/TMessagesProj/src/emojis/twitter/emoji/7_80.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_81.png b/TMessagesProj/src/emojis/twitter/emoji/7_81.png index a0137a88ff..7172940bf8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_81.png and b/TMessagesProj/src/emojis/twitter/emoji/7_81.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_82.png b/TMessagesProj/src/emojis/twitter/emoji/7_82.png index a086f9e7ff..f6be262016 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_82.png and b/TMessagesProj/src/emojis/twitter/emoji/7_82.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_83.png b/TMessagesProj/src/emojis/twitter/emoji/7_83.png index e0801912ac..21d83c83c8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_83.png and b/TMessagesProj/src/emojis/twitter/emoji/7_83.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_84.png b/TMessagesProj/src/emojis/twitter/emoji/7_84.png index 9210b862e7..12d339d25a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_84.png and b/TMessagesProj/src/emojis/twitter/emoji/7_84.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_85.png b/TMessagesProj/src/emojis/twitter/emoji/7_85.png index fde8e7e561..a235dcef54 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_85.png and b/TMessagesProj/src/emojis/twitter/emoji/7_85.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_86.png b/TMessagesProj/src/emojis/twitter/emoji/7_86.png index b733c78c0c..fd556a44f7 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_86.png and b/TMessagesProj/src/emojis/twitter/emoji/7_86.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_87.png b/TMessagesProj/src/emojis/twitter/emoji/7_87.png index 1a68a1f146..3a338be435 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_87.png and b/TMessagesProj/src/emojis/twitter/emoji/7_87.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_88.png b/TMessagesProj/src/emojis/twitter/emoji/7_88.png index 352c2fdaee..44e2929bb8 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_88.png and b/TMessagesProj/src/emojis/twitter/emoji/7_88.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_89.png b/TMessagesProj/src/emojis/twitter/emoji/7_89.png index 7521afd333..f59af8a920 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_89.png and b/TMessagesProj/src/emojis/twitter/emoji/7_89.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_9.png b/TMessagesProj/src/emojis/twitter/emoji/7_9.png index 0da0725e07..1e0539c6de 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_9.png and b/TMessagesProj/src/emojis/twitter/emoji/7_9.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_90.png b/TMessagesProj/src/emojis/twitter/emoji/7_90.png index 307ffff6cb..088628e111 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_90.png and b/TMessagesProj/src/emojis/twitter/emoji/7_90.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_91.png b/TMessagesProj/src/emojis/twitter/emoji/7_91.png index 4fea56b257..6038e0154c 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_91.png and b/TMessagesProj/src/emojis/twitter/emoji/7_91.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_92.png b/TMessagesProj/src/emojis/twitter/emoji/7_92.png index 58c34f69ee..063f8b0615 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_92.png and b/TMessagesProj/src/emojis/twitter/emoji/7_92.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_93.png b/TMessagesProj/src/emojis/twitter/emoji/7_93.png index 84b2426f2a..948c257542 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_93.png and b/TMessagesProj/src/emojis/twitter/emoji/7_93.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_94.png b/TMessagesProj/src/emojis/twitter/emoji/7_94.png index d092f7649d..377d07f58d 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_94.png and b/TMessagesProj/src/emojis/twitter/emoji/7_94.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_95.png b/TMessagesProj/src/emojis/twitter/emoji/7_95.png index 2937afd288..b36ff7716a 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_95.png and b/TMessagesProj/src/emojis/twitter/emoji/7_95.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_96.png b/TMessagesProj/src/emojis/twitter/emoji/7_96.png index 9fff2af567..ffae6ba927 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_96.png and b/TMessagesProj/src/emojis/twitter/emoji/7_96.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_97.png b/TMessagesProj/src/emojis/twitter/emoji/7_97.png index 792c2f2c67..59690c0558 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_97.png and b/TMessagesProj/src/emojis/twitter/emoji/7_97.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_98.png b/TMessagesProj/src/emojis/twitter/emoji/7_98.png index c96404498c..733cd0b32b 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_98.png and b/TMessagesProj/src/emojis/twitter/emoji/7_98.png differ diff --git a/TMessagesProj/src/emojis/twitter/emoji/7_99.png b/TMessagesProj/src/emojis/twitter/emoji/7_99.png index 39dcb06b6f..7f5ffe7eff 100644 Binary files a/TMessagesProj/src/emojis/twitter/emoji/7_99.png and b/TMessagesProj/src/emojis/twitter/emoji/7_99.png differ diff --git a/TMessagesProj/src/main/AndroidManifest.xml b/TMessagesProj/src/main/AndroidManifest.xml index 820e606732..e3ec28348b 100644 --- a/TMessagesProj/src/main/AndroidManifest.xml +++ b/TMessagesProj/src/main/AndroidManifest.xml @@ -49,6 +49,7 @@ + @@ -116,7 +117,9 @@ + + + + + + + + + + + + + + + + + + + + + + + + anim = new ArrayList<>(animators.values()); animators.clear(); for (Animator animator : anim) { @@ -1067,9 +1041,6 @@ public void endAnimation(RecyclerView.ViewHolder item) { } super.endAnimation(item); restoreTransitionParams(item.itemView); - if (BuildVars.LOGS_ENABLED) { - FileLog.d("end animation"); - } } private void restoreTransitionParams(View view) { diff --git a/TMessagesProj/src/main/java/androidx/recyclerview/widget/DefaultItemAnimator.java b/TMessagesProj/src/main/java/androidx/recyclerview/widget/DefaultItemAnimator.java index cc171b104d..cc458d8d8f 100644 --- a/TMessagesProj/src/main/java/androidx/recyclerview/widget/DefaultItemAnimator.java +++ b/TMessagesProj/src/main/java/androidx/recyclerview/widget/DefaultItemAnimator.java @@ -338,6 +338,10 @@ protected void onMoveAnimationUpdate(RecyclerView.ViewHolder holder) { } + protected void onChangeAnimationUpdate(RecyclerView.ViewHolder holder) { + + } + protected void beforeAnimateMoveImpl(final RecyclerView.ViewHolder holder) { } @@ -346,6 +350,14 @@ protected void afterAnimateMoveImpl(final RecyclerView.ViewHolder holder) { } + protected void beforeAnimateChangeImpl(final RecyclerView.ViewHolder oldHolder, final RecyclerView.ViewHolder newHolder) { + + } + + protected void afterAnimateChangeImpl(final RecyclerView.ViewHolder oldHolder, final RecyclerView.ViewHolder newHolder) { + + } + protected void animateMoveImpl(final RecyclerView.ViewHolder holder, MoveInfo moveInfo) { int fromX = moveInfo.fromX; int fromY = moveInfo.fromY; @@ -445,6 +457,7 @@ void animateChangeImpl(final ChangeInfo changeInfo) { final View view = holder == null ? null : holder.itemView; final RecyclerView.ViewHolder newHolder = changeInfo.newHolder; final View newView = newHolder != null ? newHolder.itemView : null; + beforeAnimateChangeImpl(changeInfo.oldHolder, changeInfo.newHolder); if (view != null) { final ViewPropertyAnimator oldViewAnim = view.animate().setDuration(getChangeRemoveDuration()).setStartDelay(getChangeDelay()); mChangeAnimations.add(changeInfo.oldHolder); @@ -457,6 +470,9 @@ void animateChangeImpl(final ChangeInfo changeInfo) { .scaleX(1f - animateByScale(view)) .scaleY(1f - animateByScale(view)); } + if (Build.VERSION.SDK_INT >= 19) { + oldViewAnim.setUpdateListener(animation1 -> onChangeAnimationUpdate(changeInfo.oldHolder)); + } oldViewAnim .setInterpolator(getChangeInterpolator()) .setListener(new AnimatorListenerAdapter() { @@ -493,6 +509,9 @@ public void onAnimationEnd(Animator animator) { if (animateByScale(newView) > 0) { newViewAnimation.scaleX(1f).scaleY(1f); } + if (Build.VERSION.SDK_INT >= 19) { + newViewAnimation.setUpdateListener(animation1 -> onChangeAnimationUpdate(changeInfo.newHolder)); + } newViewAnimation .setListener(new AnimatorListenerAdapter() { @Override @@ -512,6 +531,8 @@ public void onAnimationEnd(Animator animator) { dispatchChangeFinished(changeInfo.newHolder, false); mChangeAnimations.remove(changeInfo.newHolder); dispatchFinishedWhenDone(); + + afterAnimateChangeImpl(changeInfo.oldHolder, changeInfo.newHolder); } }).start(); } diff --git a/TMessagesProj/src/main/java/androidx/recyclerview/widget/LinearLayoutManager.java b/TMessagesProj/src/main/java/androidx/recyclerview/widget/LinearLayoutManager.java index ac4d357c50..f668d696fb 100644 --- a/TMessagesProj/src/main/java/androidx/recyclerview/widget/LinearLayoutManager.java +++ b/TMessagesProj/src/main/java/androidx/recyclerview/widget/LinearLayoutManager.java @@ -802,6 +802,10 @@ private void layoutForPredictiveAnimations(RecyclerView.Recycler recycler, mLayoutState.mScrapList = null; } + protected int firstPosition() { + return 0; + } + private void updateAnchorInfoForLayout(RecyclerView.Recycler recycler, RecyclerView.State state, AnchorInfo anchorInfo) { if (updateAnchorFromPendingData(state, anchorInfo)) { @@ -821,7 +825,7 @@ private void updateAnchorInfoForLayout(RecyclerView.Recycler recycler, RecyclerV Log.d(TAG, "deciding anchor info for fresh state"); } anchorInfo.assignCoordinateFromPadding(); - anchorInfo.mPosition = mStackFromEnd ? state.getItemCount() - 1 : 0; + anchorInfo.mPosition = mStackFromEnd ? state.getItemCount() - 1 : firstPosition(); } /** diff --git a/TMessagesProj/src/main/java/com/google/android/README.md b/TMessagesProj/src/main/java/com/google/android/README.md index e5bc440021..8bf197bf5b 100644 --- a/TMessagesProj/src/main/java/com/google/android/README.md +++ b/TMessagesProj/src/main/java/com/google/android/README.md @@ -1,8 +1,19 @@ -change SimpleExoPlayer.java +last exo player commit: +f72ca833 + +change ExoPlayerImpl.java change Player.java -change VideoListener.java -change AspectRatioFrameLayout.java -change DefaultExtractorsFactory.java +add VideoListener.java + add support VideoListener + + +custom AspectRatioFrameLayout.java + + change MediaCodecVideoRenderer.java add SurfaceNotValidException.java -change MP4Extractor.java - MAXIMUM_READ_AHEAD_BYTES_STREAM to 1MB + add hack with SurfaceNotValidException + + +change MP4Extractor.java + MAXIMUM_READ_AHEAD_BYTES_STREAM to 1MB diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/AbstractConcatenatedTimeline.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/AbstractConcatenatedTimeline.java similarity index 94% rename from TMessagesProj/src/main/java/com/google/android/exoplayer2/source/AbstractConcatenatedTimeline.java rename to TMessagesProj/src/main/java/com/google/android/exoplayer2/AbstractConcatenatedTimeline.java index 29ef1faa80..99fb0a4112 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/AbstractConcatenatedTimeline.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/AbstractConcatenatedTimeline.java @@ -13,16 +13,14 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.google.android.exoplayer2.source; +package com.google.android.exoplayer2; import android.util.Pair; -import com.google.android.exoplayer2.C; -import com.google.android.exoplayer2.Player; -import com.google.android.exoplayer2.Timeline; +import com.google.android.exoplayer2.source.ShuffleOrder; import com.google.android.exoplayer2.util.Assertions; /** Abstract base class for the concatenation of one or more {@link Timeline}s. */ -/* package */ abstract class AbstractConcatenatedTimeline extends Timeline { +public abstract class AbstractConcatenatedTimeline extends Timeline { private final int childCount; private final ShuffleOrder shuffleOrder; @@ -34,7 +32,7 @@ * @param concatenatedUid UID of a period in a concatenated timeline. * @return UID of the child timeline this period belongs to. */ - @SuppressWarnings("nullness:return.type.incompatible") + @SuppressWarnings("nullness:return") public static Object getChildTimelineUidFromConcatenatedUid(Object concatenatedUid) { return ((Pair) concatenatedUid).first; } @@ -45,7 +43,7 @@ public static Object getChildTimelineUidFromConcatenatedUid(Object concatenatedU * @param concatenatedUid UID of a period in a concatenated timeline. * @return UID of the period in the child timeline. */ - @SuppressWarnings("nullness:return.type.incompatible") + @SuppressWarnings("nullness:return") public static Object getChildPeriodUidFromConcatenatedUid(Object concatenatedUid) { return ((Pair) concatenatedUid).second; } @@ -209,14 +207,14 @@ public final Window getWindow(int windowIndex, Window window, long defaultPositi } @Override - public final Period getPeriodByUid(Object uid, Period period) { - Object childUid = getChildTimelineUidFromConcatenatedUid(uid); - Object periodUid = getChildPeriodUidFromConcatenatedUid(uid); + public final Period getPeriodByUid(Object periodUid, Period period) { + Object childUid = getChildTimelineUidFromConcatenatedUid(periodUid); + Object childPeriodUid = getChildPeriodUidFromConcatenatedUid(periodUid); int childIndex = getChildIndexByChildUid(childUid); int firstWindowIndexInChild = getFirstWindowIndexByChildIndex(childIndex); - getTimelineByChildIndex(childIndex).getPeriodByUid(periodUid, period); + getTimelineByChildIndex(childIndex).getPeriodByUid(childPeriodUid, period); period.windowIndex += firstWindowIndexInChild; - period.uid = uid; + period.uid = periodUid; return period; } @@ -242,12 +240,12 @@ public final int getIndexOfPeriod(Object uid) { return C.INDEX_UNSET; } Object childUid = getChildTimelineUidFromConcatenatedUid(uid); - Object periodUid = getChildPeriodUidFromConcatenatedUid(uid); + Object childPeriodUid = getChildPeriodUidFromConcatenatedUid(uid); int childIndex = getChildIndexByChildUid(childUid); if (childIndex == C.INDEX_UNSET) { return C.INDEX_UNSET; } - int periodIndexInChild = getTimelineByChildIndex(childIndex).getIndexOfPeriod(periodUid); + int periodIndexInChild = getTimelineByChildIndex(childIndex).getIndexOfPeriod(childPeriodUid); return periodIndexInChild == C.INDEX_UNSET ? C.INDEX_UNSET : getFirstPeriodIndexByChildIndex(childIndex) + periodIndexInChild; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/AudioBecomingNoisyManager.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/AudioBecomingNoisyManager.java index 2a52a039d6..f8be0198c8 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/AudioBecomingNoisyManager.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/AudioBecomingNoisyManager.java @@ -21,6 +21,7 @@ import android.content.IntentFilter; import android.media.AudioManager; import android.os.Handler; +import com.google.android.exoplayer2.util.Util; /* package */ final class AudioBecomingNoisyManager { @@ -46,8 +47,8 @@ public AudioBecomingNoisyManager(Context context, Handler eventHandler, EventLis */ public void setEnabled(boolean enabled) { if (enabled && !receiverRegistered) { - context.registerReceiver( - receiver, new IntentFilter(AudioManager.ACTION_AUDIO_BECOMING_NOISY)); + Util.registerReceiverNotExported( + context, receiver, new IntentFilter(AudioManager.ACTION_AUDIO_BECOMING_NOISY)); receiverRegistered = true; } else if (!enabled && receiverRegistered) { context.unregisterReceiver(receiver); diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/AudioFocusManager.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/AudioFocusManager.java index 5aeca440ff..3615167744 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/AudioFocusManager.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/AudioFocusManager.java @@ -15,6 +15,9 @@ */ package com.google.android.exoplayer2; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static java.lang.annotation.ElementType.TYPE_USE; + import android.content.Context; import android.media.AudioFocusRequest; import android.media.AudioManager; @@ -30,6 +33,7 @@ import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; import org.checkerframework.checker.nullness.qual.MonotonicNonNull; /** Manages requesting and responding to changes in audio focus. */ @@ -58,6 +62,7 @@ public interface PlayerControl { */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({ PLAYER_COMMAND_DO_NOT_PLAY, PLAYER_COMMAND_WAIT_FOR_CALLBACK, @@ -74,6 +79,7 @@ public interface PlayerControl { /** Audio focus state. */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({ AUDIO_FOCUS_STATE_NO_FOCUS, AUDIO_FOCUS_STATE_HAVE_FOCUS, @@ -90,6 +96,47 @@ public interface PlayerControl { /** Audio focus has been temporarily lost, but playback may continue with reduced volume. */ private static final int AUDIO_FOCUS_STATE_LOSS_TRANSIENT_DUCK = 3; + /** + * Audio focus types. One of {@link #AUDIOFOCUS_NONE}, {@link #AUDIOFOCUS_GAIN}, {@link + * #AUDIOFOCUS_GAIN_TRANSIENT}, {@link #AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK} or {@link + * #AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE}. + */ + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef({ + AUDIOFOCUS_NONE, + AUDIOFOCUS_GAIN, + AUDIOFOCUS_GAIN_TRANSIENT, + AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK, + AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE + }) + private @interface AudioFocusGain {} + /** + * @see AudioManager#AUDIOFOCUS_NONE + */ + @SuppressWarnings("InlinedApi") + private static final int AUDIOFOCUS_NONE = AudioManager.AUDIOFOCUS_NONE; + /** + * @see AudioManager#AUDIOFOCUS_GAIN + */ + private static final int AUDIOFOCUS_GAIN = AudioManager.AUDIOFOCUS_GAIN; + /** + * @see AudioManager#AUDIOFOCUS_GAIN_TRANSIENT + */ + private static final int AUDIOFOCUS_GAIN_TRANSIENT = AudioManager.AUDIOFOCUS_GAIN_TRANSIENT; + /** + * @see AudioManager#AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK + */ + private static final int AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK = + AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK; + /** + * @see AudioManager#AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE + */ + @SuppressWarnings("InlinedApi") + private static final int AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE = + AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE; + private static final String TAG = "AudioFocusManager"; private static final float VOLUME_MULTIPLIER_DUCK = 0.2f; @@ -100,8 +147,8 @@ public interface PlayerControl { @Nullable private PlayerControl playerControl; @Nullable private AudioAttributes audioAttributes; - @AudioFocusState private int audioFocusState; - @C.AudioFocusGain private int focusGain; + private @AudioFocusState int audioFocusState; + private @AudioFocusGain int focusGainToRequest; private float volumeMultiplier = VOLUME_MULTIPLIER_DEFAULT; private @MonotonicNonNull AudioFocusRequest audioFocusRequest; @@ -116,7 +163,8 @@ public interface PlayerControl { */ public AudioFocusManager(Context context, Handler eventHandler, PlayerControl playerControl) { this.audioManager = - (AudioManager) context.getApplicationContext().getSystemService(Context.AUDIO_SERVICE); + checkNotNull( + (AudioManager) context.getApplicationContext().getSystemService(Context.AUDIO_SERVICE)); this.playerControl = playerControl; this.focusListener = new AudioFocusListener(eventHandler); this.audioFocusState = AUDIO_FOCUS_STATE_NO_FOCUS; @@ -139,9 +187,9 @@ public float getVolumeMultiplier() { public void setAudioAttributes(@Nullable AudioAttributes audioAttributes) { if (!Util.areEqual(this.audioAttributes, audioAttributes)) { this.audioAttributes = audioAttributes; - focusGain = convertAudioAttributesToFocusGain(audioAttributes); + focusGainToRequest = convertAudioAttributesToFocusGain(audioAttributes); Assertions.checkArgument( - focusGain == C.AUDIOFOCUS_GAIN || focusGain == C.AUDIOFOCUS_NONE, + focusGainToRequest == AUDIOFOCUS_GAIN || focusGainToRequest == AUDIOFOCUS_NONE, "Automatic handling of audio focus is only available for USAGE_MEDIA and USAGE_GAME."); } } @@ -153,10 +201,10 @@ public void setAudioAttributes(@Nullable AudioAttributes audioAttributes) { * @param playbackState The desired playback state. * @return A {@link PlayerCommand} to execute on the player. */ - @PlayerCommand - public int updateAudioFocus(boolean playWhenReady, @Player.State int playbackState) { - if (shouldAbandonAudioFocus(playbackState)) { - abandonAudioFocus(); + public @PlayerCommand int updateAudioFocus( + boolean playWhenReady, @Player.State int playbackState) { + if (shouldAbandonAudioFocusIfHeld(playbackState)) { + abandonAudioFocusIfHeld(); return playWhenReady ? PLAYER_COMMAND_PLAY_WHEN_READY : PLAYER_COMMAND_DO_NOT_PLAY; } return playWhenReady ? requestAudioFocus() : PLAYER_COMMAND_DO_NOT_PLAY; @@ -168,7 +216,7 @@ public int updateAudioFocus(boolean playWhenReady, @Player.State int playbackSta */ public void release() { playerControl = null; - abandonAudioFocus(); + abandonAudioFocusIfHeld(); } // Internal methods. @@ -178,12 +226,11 @@ public void release() { return focusListener; } - private boolean shouldAbandonAudioFocus(@Player.State int playbackState) { - return playbackState == Player.STATE_IDLE || focusGain != C.AUDIOFOCUS_GAIN; + private boolean shouldAbandonAudioFocusIfHeld(@Player.State int playbackState) { + return playbackState == Player.STATE_IDLE || focusGainToRequest != AUDIOFOCUS_GAIN; } - @PlayerCommand - private int requestAudioFocus() { + private @PlayerCommand int requestAudioFocus() { if (audioFocusState == AUDIO_FOCUS_STATE_HAVE_FOCUS) { return PLAYER_COMMAND_PLAY_WHEN_READY; } @@ -197,7 +244,7 @@ private int requestAudioFocus() { } } - private void abandonAudioFocus() { + private void abandonAudioFocusIfHeld() { if (audioFocusState == AUDIO_FOCUS_STATE_NO_FOCUS) { return; } @@ -212,8 +259,8 @@ private void abandonAudioFocus() { private int requestAudioFocusDefault() { return audioManager.requestAudioFocus( focusListener, - Util.getStreamTypeForAudioUsage(Assertions.checkNotNull(audioAttributes).usage), - focusGain); + Util.getStreamTypeForAudioUsage(checkNotNull(audioAttributes).usage), + focusGainToRequest); } @RequiresApi(26) @@ -221,13 +268,14 @@ private int requestAudioFocusV26() { if (audioFocusRequest == null || rebuildAudioFocusRequest) { AudioFocusRequest.Builder builder = audioFocusRequest == null - ? new AudioFocusRequest.Builder(focusGain) + ? new AudioFocusRequest.Builder(focusGainToRequest) : new AudioFocusRequest.Builder(audioFocusRequest); boolean willPauseWhenDucked = willPauseWhenDucked(); audioFocusRequest = builder - .setAudioAttributes(Assertions.checkNotNull(audioAttributes).getAudioAttributesV21()) + .setAudioAttributes( + checkNotNull(audioAttributes).getAudioAttributesV21().audioAttributes) .setWillPauseWhenDucked(willPauseWhenDucked) .setOnAudioFocusChangeListener(focusListener) .build(); @@ -249,7 +297,7 @@ private void abandonAudioFocusV26() { } private boolean willPauseWhenDucked() { - return audioAttributes != null && audioAttributes.contentType == C.CONTENT_TYPE_SPEECH; + return audioAttributes != null && audioAttributes.contentType == C.AUDIO_CONTENT_TYPE_SPEECH; } /** @@ -260,12 +308,12 @@ private boolean willPauseWhenDucked() { * @param audioAttributes The audio attributes associated with this focus request. * @return The type of audio focus gain that should be requested. */ - @C.AudioFocusGain - private static int convertAudioAttributesToFocusGain(@Nullable AudioAttributes audioAttributes) { + private static @AudioFocusGain int convertAudioAttributesToFocusGain( + @Nullable AudioAttributes audioAttributes) { if (audioAttributes == null) { // Don't handle audio focus. It may be either video only contents or developers // want to have more finer grained control. (e.g. adding audio focus listener) - return C.AUDIOFOCUS_NONE; + return AUDIOFOCUS_NONE; } switch (audioAttributes.usage) { @@ -273,13 +321,13 @@ private static int convertAudioAttributesToFocusGain(@Nullable AudioAttributes a // during the phone call when AUDIOFOCUS_GAIN_TRANSIENT is requested for that. // Don't request audio focus here. case C.USAGE_VOICE_COMMUNICATION_SIGNALLING: - return C.AUDIOFOCUS_NONE; + return AUDIOFOCUS_NONE; // Javadoc says 'AUDIOFOCUS_GAIN: Examples of uses of this focus gain are for music // playback, for a game or a video player' case C.USAGE_GAME: case C.USAGE_MEDIA: - return C.AUDIOFOCUS_GAIN; + return AUDIOFOCUS_GAIN; // Special usages: USAGE_UNKNOWN shouldn't be used. Request audio focus to prevent // multiple media playback happen at the same time. @@ -288,13 +336,13 @@ private static int convertAudioAttributesToFocusGain(@Nullable AudioAttributes a TAG, "Specify a proper usage in the audio attributes for audio focus" + " handling. Using AUDIOFOCUS_GAIN by default."); - return C.AUDIOFOCUS_GAIN; + return AUDIOFOCUS_GAIN; // Javadoc says 'AUDIOFOCUS_GAIN_TRANSIENT: An example is for playing an alarm, or // during a VoIP call' case C.USAGE_ALARM: case C.USAGE_VOICE_COMMUNICATION: - return C.AUDIOFOCUS_GAIN_TRANSIENT; + return AUDIOFOCUS_GAIN_TRANSIENT; // Javadoc says 'AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK: Examples are when playing // driving directions or notifications' @@ -306,28 +354,28 @@ private static int convertAudioAttributesToFocusGain(@Nullable AudioAttributes a case C.USAGE_NOTIFICATION_COMMUNICATION_REQUEST: case C.USAGE_NOTIFICATION_EVENT: case C.USAGE_NOTIFICATION_RINGTONE: - return C.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK; + return AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK; // Javadoc says 'AUDIOFOCUS_GAIN_EXCLUSIVE: This is typically used if you are doing // audio recording or speech recognition'. // Assistant is considered as both recording and notifying developer case C.USAGE_ASSISTANT: if (Util.SDK_INT >= 19) { - return C.AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE; + return AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE; } else { - return C.AUDIOFOCUS_GAIN_TRANSIENT; + return AUDIOFOCUS_GAIN_TRANSIENT; } // Special usages: case C.USAGE_ASSISTANCE_ACCESSIBILITY: - if (audioAttributes.contentType == C.CONTENT_TYPE_SPEECH) { + if (audioAttributes.contentType == C.AUDIO_CONTENT_TYPE_SPEECH) { // Voice shouldn't be interrupted by other playback. - return C.AUDIOFOCUS_GAIN_TRANSIENT; + return AUDIOFOCUS_GAIN_TRANSIENT; } - return C.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK; + return AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK; default: Log.w(TAG, "Unidentified audio usage: " + audioAttributes.usage); - return C.AUDIOFOCUS_NONE; + return AUDIOFOCUS_NONE; } } @@ -358,7 +406,7 @@ private void handlePlatformAudioFocusChange(int focusChange) { return; case AudioManager.AUDIOFOCUS_LOSS: executePlayerCommand(PLAYER_COMMAND_DO_NOT_PLAY); - abandonAudioFocus(); + abandonAudioFocusIfHeld(); return; case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT: case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK: diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/BasePlayer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/BasePlayer.java index 2646cbc035..0632dff53f 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/BasePlayer.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/BasePlayer.java @@ -15,18 +15,102 @@ */ package com.google.android.exoplayer2; +import static androidx.annotation.VisibleForTesting.PROTECTED; +import static java.lang.Math.max; +import static java.lang.Math.min; + import androidx.annotation.Nullable; +import androidx.annotation.VisibleForTesting; import com.google.android.exoplayer2.util.Util; +import com.google.common.collect.ImmutableList; +import java.util.List; /** Abstract base {@link Player} which implements common implementation independent methods. */ public abstract class BasePlayer implements Player { protected final Timeline.Window window; - public BasePlayer() { + protected BasePlayer() { window = new Timeline.Window(); } + @Override + public final void setMediaItem(MediaItem mediaItem) { + setMediaItems(ImmutableList.of(mediaItem)); + } + + @Override + public final void setMediaItem(MediaItem mediaItem, long startPositionMs) { + setMediaItems(ImmutableList.of(mediaItem), /* startIndex= */ 0, startPositionMs); + } + + @Override + public final void setMediaItem(MediaItem mediaItem, boolean resetPosition) { + setMediaItems(ImmutableList.of(mediaItem), resetPosition); + } + + @Override + public final void setMediaItems(List mediaItems) { + setMediaItems(mediaItems, /* resetPosition= */ true); + } + + @Override + public final void addMediaItem(int index, MediaItem mediaItem) { + addMediaItems(index, ImmutableList.of(mediaItem)); + } + + @Override + public final void addMediaItem(MediaItem mediaItem) { + addMediaItems(ImmutableList.of(mediaItem)); + } + + @Override + public final void addMediaItems(List mediaItems) { + addMediaItems(/* index= */ Integer.MAX_VALUE, mediaItems); + } + + @Override + public final void moveMediaItem(int currentIndex, int newIndex) { + if (currentIndex != newIndex) { + moveMediaItems(/* fromIndex= */ currentIndex, /* toIndex= */ currentIndex + 1, newIndex); + } + } + + @Override + public final void removeMediaItem(int index) { + removeMediaItems(/* fromIndex= */ index, /* toIndex= */ index + 1); + } + + @Override + public final void clearMediaItems() { + removeMediaItems(/* fromIndex= */ 0, /* toIndex= */ Integer.MAX_VALUE); + } + + @Override + public final boolean isCommandAvailable(@Command int command) { + return getAvailableCommands().contains(command); + } + + /** + * {@inheritDoc} + * + *

BasePlayer and its descendants will return {@code true}. + */ + @Override + public final boolean canAdvertiseSession() { + return true; + } + + @Override + public final void play() { + setPlayWhenReady(true); + } + + @Override + public final void pause() { + setPlayWhenReady(false); + } + @Override public final boolean isPlaying() { return getPlaybackState() == Player.STATE_READY @@ -36,80 +120,254 @@ && getPlayWhenReady() @Override public final void seekToDefaultPosition() { - seekToDefaultPosition(getCurrentWindowIndex()); + seekToDefaultPositionInternal( + getCurrentMediaItemIndex(), Player.COMMAND_SEEK_TO_DEFAULT_POSITION); } @Override - public final void seekToDefaultPosition(int windowIndex) { - seekTo(windowIndex, /* positionMs= */ C.TIME_UNSET); + public final void seekToDefaultPosition(int mediaItemIndex) { + seekToDefaultPositionInternal(mediaItemIndex, Player.COMMAND_SEEK_TO_MEDIA_ITEM); } @Override - public final void seekTo(long positionMs) { - seekTo(getCurrentWindowIndex(), positionMs); + public final void seekBack() { + seekToOffset(-getSeekBackIncrement(), Player.COMMAND_SEEK_BACK); + } + + @Override + public final void seekForward() { + seekToOffset(getSeekForwardIncrement(), Player.COMMAND_SEEK_FORWARD); } + /** + * @deprecated Use {@link #hasPreviousMediaItem()} instead. + */ + @Deprecated @Override public final boolean hasPrevious() { - return getPreviousWindowIndex() != C.INDEX_UNSET; + return hasPreviousMediaItem(); + } + + /** + * @deprecated Use {@link #hasPreviousMediaItem()} instead. + */ + @Deprecated + @Override + public final boolean hasPreviousWindow() { + return hasPreviousMediaItem(); } + @Override + public final boolean hasPreviousMediaItem() { + return getPreviousMediaItemIndex() != C.INDEX_UNSET; + } + + /** + * @deprecated Use {@link #seekToPreviousMediaItem()} instead. + */ + @Deprecated @Override public final void previous() { - int previousWindowIndex = getPreviousWindowIndex(); - if (previousWindowIndex != C.INDEX_UNSET) { - seekToDefaultPosition(previousWindowIndex); + seekToPreviousMediaItem(); + } + + /** + * @deprecated Use {@link #seekToPreviousMediaItem()} instead. + */ + @Deprecated + @Override + public final void seekToPreviousWindow() { + seekToPreviousMediaItem(); + } + + @Override + public final void seekToPreviousMediaItem() { + seekToPreviousMediaItemInternal(Player.COMMAND_SEEK_TO_PREVIOUS_MEDIA_ITEM); + } + + @Override + public final void seekToPrevious() { + Timeline timeline = getCurrentTimeline(); + if (timeline.isEmpty() || isPlayingAd()) { + return; + } + boolean hasPreviousMediaItem = hasPreviousMediaItem(); + if (isCurrentMediaItemLive() && !isCurrentMediaItemSeekable()) { + if (hasPreviousMediaItem) { + seekToPreviousMediaItemInternal(Player.COMMAND_SEEK_TO_PREVIOUS); + } + } else if (hasPreviousMediaItem && getCurrentPosition() <= getMaxSeekToPreviousPosition()) { + seekToPreviousMediaItemInternal(Player.COMMAND_SEEK_TO_PREVIOUS); + } else { + seekToCurrentItem(/* positionMs= */ 0, Player.COMMAND_SEEK_TO_PREVIOUS); } } + /** + * @deprecated Use {@link #hasNextMediaItem()} instead. + */ + @Deprecated @Override public final boolean hasNext() { - return getNextWindowIndex() != C.INDEX_UNSET; + return hasNextMediaItem(); + } + + /** + * @deprecated Use {@link #hasNextMediaItem()} instead. + */ + @Deprecated + @Override + public final boolean hasNextWindow() { + return hasNextMediaItem(); } + @Override + public final boolean hasNextMediaItem() { + return getNextMediaItemIndex() != C.INDEX_UNSET; + } + + /** + * @deprecated Use {@link #seekToNextMediaItem()} instead. + */ + @Deprecated @Override public final void next() { - int nextWindowIndex = getNextWindowIndex(); - if (nextWindowIndex != C.INDEX_UNSET) { - seekToDefaultPosition(nextWindowIndex); + seekToNextMediaItem(); + } + + /** + * @deprecated Use {@link #seekToNextMediaItem()} instead. + */ + @Deprecated + @Override + public final void seekToNextWindow() { + seekToNextMediaItem(); + } + + @Override + public final void seekToNextMediaItem() { + seekToNextMediaItemInternal(Player.COMMAND_SEEK_TO_NEXT_MEDIA_ITEM); + } + + @Override + public final void seekToNext() { + Timeline timeline = getCurrentTimeline(); + if (timeline.isEmpty() || isPlayingAd()) { + return; + } + if (hasNextMediaItem()) { + seekToNextMediaItemInternal(Player.COMMAND_SEEK_TO_NEXT); + } else if (isCurrentMediaItemLive() && isCurrentMediaItemDynamic()) { + seekToDefaultPositionInternal(getCurrentMediaItemIndex(), Player.COMMAND_SEEK_TO_NEXT); } } @Override - public final void stop() { - stop(/* reset= */ false); + public final void seekTo(long positionMs) { + seekToCurrentItem(positionMs, Player.COMMAND_SEEK_IN_CURRENT_MEDIA_ITEM); + } + + @Override + public final void seekTo(int mediaItemIndex, long positionMs) { + seekTo( + mediaItemIndex, + positionMs, + Player.COMMAND_SEEK_TO_MEDIA_ITEM, + /* isRepeatingCurrentItem= */ false); + } + + /** + * Seeks to a position in the specified {@link MediaItem}. + * + * @param mediaItemIndex The index of the {@link MediaItem}. + * @param positionMs The seek position in the specified {@link MediaItem} in milliseconds, or + * {@link C#TIME_UNSET} to seek to the media item's default position. + * @param seekCommand The {@link Player.Command} used to trigger the seek. + * @param isRepeatingCurrentItem Whether this seeks repeats the current item. + */ + @VisibleForTesting(otherwise = PROTECTED) + public abstract void seekTo( + int mediaItemIndex, + long positionMs, + @Player.Command int seekCommand, + boolean isRepeatingCurrentItem); + + @Override + public final void setPlaybackSpeed(float speed) { + setPlaybackParameters(getPlaybackParameters().withSpeed(speed)); + } + + /** + * @deprecated Use {@link #getCurrentMediaItemIndex()} instead. + */ + @Deprecated + @Override + public final int getCurrentWindowIndex() { + return getCurrentMediaItemIndex(); } + /** + * @deprecated Use {@link #getNextMediaItemIndex()} instead. + */ + @Deprecated @Override public final int getNextWindowIndex() { + return getNextMediaItemIndex(); + } + + @Override + public final int getNextMediaItemIndex() { Timeline timeline = getCurrentTimeline(); return timeline.isEmpty() ? C.INDEX_UNSET : timeline.getNextWindowIndex( - getCurrentWindowIndex(), getRepeatModeForNavigation(), getShuffleModeEnabled()); + getCurrentMediaItemIndex(), getRepeatModeForNavigation(), getShuffleModeEnabled()); } + /** + * @deprecated Use {@link #getPreviousMediaItemIndex()} instead. + */ + @Deprecated @Override public final int getPreviousWindowIndex() { + return getPreviousMediaItemIndex(); + } + + @Override + public final int getPreviousMediaItemIndex() { Timeline timeline = getCurrentTimeline(); return timeline.isEmpty() ? C.INDEX_UNSET : timeline.getPreviousWindowIndex( - getCurrentWindowIndex(), getRepeatModeForNavigation(), getShuffleModeEnabled()); + getCurrentMediaItemIndex(), getRepeatModeForNavigation(), getShuffleModeEnabled()); } @Override @Nullable - public final Object getCurrentTag() { + public final MediaItem getCurrentMediaItem() { Timeline timeline = getCurrentTimeline(); - return timeline.isEmpty() ? null : timeline.getWindow(getCurrentWindowIndex(), window).tag; + return timeline.isEmpty() + ? null + : timeline.getWindow(getCurrentMediaItemIndex(), window).mediaItem; + } + + @Override + public final int getMediaItemCount() { + return getCurrentTimeline().getWindowCount(); + } + + @Override + public final MediaItem getMediaItemAt(int index) { + return getCurrentTimeline().getWindow(index, window).mediaItem; } @Override @Nullable public final Object getCurrentManifest() { Timeline timeline = getCurrentTimeline(); - return timeline.isEmpty() ? null : timeline.getWindow(getCurrentWindowIndex(), window).manifest; + return timeline.isEmpty() + ? null + : timeline.getWindow(getCurrentMediaItemIndex(), window).manifest; } @Override @@ -121,22 +379,63 @@ public final int getBufferedPercentage() { : duration == 0 ? 100 : Util.constrainValue((int) ((position * 100) / duration), 0, 100); } + /** + * @deprecated Use {@link #isCurrentMediaItemDynamic()} instead. + */ + @Deprecated @Override public final boolean isCurrentWindowDynamic() { + return isCurrentMediaItemDynamic(); + } + + @Override + public final boolean isCurrentMediaItemDynamic() { Timeline timeline = getCurrentTimeline(); - return !timeline.isEmpty() && timeline.getWindow(getCurrentWindowIndex(), window).isDynamic; + return !timeline.isEmpty() && timeline.getWindow(getCurrentMediaItemIndex(), window).isDynamic; } + /** + * @deprecated Use {@link #isCurrentMediaItemLive()} instead. + */ + @Deprecated @Override public final boolean isCurrentWindowLive() { + return isCurrentMediaItemLive(); + } + + @Override + public final boolean isCurrentMediaItemLive() { + Timeline timeline = getCurrentTimeline(); + return !timeline.isEmpty() && timeline.getWindow(getCurrentMediaItemIndex(), window).isLive(); + } + + @Override + public final long getCurrentLiveOffset() { Timeline timeline = getCurrentTimeline(); - return !timeline.isEmpty() && timeline.getWindow(getCurrentWindowIndex(), window).isLive; + if (timeline.isEmpty()) { + return C.TIME_UNSET; + } + long windowStartTimeMs = + timeline.getWindow(getCurrentMediaItemIndex(), window).windowStartTimeMs; + if (windowStartTimeMs == C.TIME_UNSET) { + return C.TIME_UNSET; + } + return window.getCurrentUnixTimeMs() - window.windowStartTimeMs - getContentPosition(); } + /** + * @deprecated Use {@link #isCurrentMediaItemSeekable()} instead. + */ + @Deprecated @Override public final boolean isCurrentWindowSeekable() { + return isCurrentMediaItemSeekable(); + } + + @Override + public final boolean isCurrentMediaItemSeekable() { Timeline timeline = getCurrentTimeline(); - return !timeline.isEmpty() && timeline.getWindow(getCurrentWindowIndex(), window).isSeekable; + return !timeline.isEmpty() && timeline.getWindow(getCurrentMediaItemIndex(), window).isSeekable; } @Override @@ -144,66 +443,66 @@ public final long getContentDuration() { Timeline timeline = getCurrentTimeline(); return timeline.isEmpty() ? C.TIME_UNSET - : timeline.getWindow(getCurrentWindowIndex(), window).getDurationMs(); + : timeline.getWindow(getCurrentMediaItemIndex(), window).getDurationMs(); } - @RepeatMode - private int getRepeatModeForNavigation() { + private @RepeatMode int getRepeatModeForNavigation() { @RepeatMode int repeatMode = getRepeatMode(); return repeatMode == REPEAT_MODE_ONE ? REPEAT_MODE_OFF : repeatMode; } - /** Holds a listener reference. */ - protected static final class ListenerHolder { - - /** - * The listener on which {link #invoke} will execute {@link ListenerInvocation listener - * invocations}. - */ - public final Player.EventListener listener; - - private boolean released; + private void seekToCurrentItem(long positionMs, @Player.Command int seekCommand) { + seekTo( + getCurrentMediaItemIndex(), positionMs, seekCommand, /* isRepeatingCurrentItem= */ false); + } - public ListenerHolder(Player.EventListener listener) { - this.listener = listener; + private void seekToOffset(long offsetMs, @Player.Command int seekCommand) { + long positionMs = getCurrentPosition() + offsetMs; + long durationMs = getDuration(); + if (durationMs != C.TIME_UNSET) { + positionMs = min(positionMs, durationMs); } + positionMs = max(positionMs, 0); + seekToCurrentItem(positionMs, seekCommand); + } - /** Prevents any further {@link ListenerInvocation} to be executed on {@link #listener}. */ - public void release() { - released = true; - } + private void seekToDefaultPositionInternal(int mediaItemIndex, @Player.Command int seekCommand) { + seekTo( + mediaItemIndex, + /* positionMs= */ C.TIME_UNSET, + seekCommand, + /* isRepeatingCurrentItem= */ false); + } - /** - * Executes the given {@link ListenerInvocation} on {@link #listener}. Does nothing if {@link - * #release} has been called on this instance. - */ - public void invoke(ListenerInvocation listenerInvocation) { - if (!released) { - listenerInvocation.invokeListener(listener); - } + private void seekToNextMediaItemInternal(@Player.Command int seekCommand) { + int nextMediaItemIndex = getNextMediaItemIndex(); + if (nextMediaItemIndex == C.INDEX_UNSET) { + return; } - - @Override - public boolean equals(@Nullable Object other) { - if (this == other) { - return true; - } - if (other == null || getClass() != other.getClass()) { - return false; - } - return listener.equals(((ListenerHolder) other).listener); + if (nextMediaItemIndex == getCurrentMediaItemIndex()) { + repeatCurrentMediaItem(seekCommand); + } else { + seekToDefaultPositionInternal(nextMediaItemIndex, seekCommand); } + } - @Override - public int hashCode() { - return listener.hashCode(); + private void seekToPreviousMediaItemInternal(@Player.Command int seekCommand) { + int previousMediaItemIndex = getPreviousMediaItemIndex(); + if (previousMediaItemIndex == C.INDEX_UNSET) { + return; + } + if (previousMediaItemIndex == getCurrentMediaItemIndex()) { + repeatCurrentMediaItem(seekCommand); + } else { + seekToDefaultPositionInternal(previousMediaItemIndex, seekCommand); } } - /** Parameterized invocation of a {@link Player.EventListener} method. */ - protected interface ListenerInvocation { - - /** Executes the invocation on the given {@link Player.EventListener}. */ - void invokeListener(Player.EventListener listener); + private void repeatCurrentMediaItem(@Player.Command int seekCommand) { + seekTo( + getCurrentMediaItemIndex(), + /* positionMs= */ C.TIME_UNSET, + seekCommand, + /* isRepeatingCurrentItem= */ true); } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/BaseRenderer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/BaseRenderer.java index ee44681e55..8d0379e65b 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/BaseRenderer.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/BaseRenderer.java @@ -15,52 +15,53 @@ */ package com.google.android.exoplayer2; -import android.os.Looper; - +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static java.lang.Math.max; import androidx.annotation.Nullable; +import com.google.android.exoplayer2.analytics.PlayerId; import com.google.android.exoplayer2.decoder.DecoderInputBuffer; -import com.google.android.exoplayer2.drm.DrmInitData; -import com.google.android.exoplayer2.drm.DrmSession; -import com.google.android.exoplayer2.drm.DrmSessionManager; -import com.google.android.exoplayer2.drm.ExoMediaCrypto; +import com.google.android.exoplayer2.decoder.DecoderInputBuffer.InsufficientCapacityException; import com.google.android.exoplayer2.source.SampleStream; +import com.google.android.exoplayer2.source.SampleStream.ReadDataResult; +import com.google.android.exoplayer2.source.SampleStream.ReadFlags; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.MediaClock; import com.google.android.exoplayer2.util.Util; import java.io.IOException; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; -/** - * An abstract base class suitable for most {@link Renderer} implementations. - */ +/** An abstract base class suitable for most {@link Renderer} implementations. */ public abstract class BaseRenderer implements Renderer, RendererCapabilities { - private final int trackType; + private final @C.TrackType int trackType; private final FormatHolder formatHolder; - private RendererConfiguration configuration; + @Nullable private RendererConfiguration configuration; private int index; + private @MonotonicNonNull PlayerId playerId; private int state; - private SampleStream stream; - private Format[] streamFormats; + @Nullable private SampleStream stream; + @Nullable private Format[] streamFormats; private long streamOffsetUs; + private long lastResetPositionUs; private long readingPositionUs; private boolean streamIsFinal; private boolean throwRendererExceptionIsExecuting; /** - * @param trackType The track type that the renderer handles. One of the {@link C} - * {@code TRACK_TYPE_*} constants. + * @param trackType The track type that the renderer handles. One of the {@link C} {@code + * TRACK_TYPE_*} constants. */ - public BaseRenderer(int trackType) { + public BaseRenderer(@C.TrackType int trackType) { this.trackType = trackType; formatHolder = new FormatHolder(); readingPositionUs = C.TIME_END_OF_SOURCE; } @Override - public final int getTrackType() { + public final @C.TrackType int getTrackType() { return trackType; } @@ -70,8 +71,9 @@ public final RendererCapabilities getCapabilities() { } @Override - public final void setIndex(int index) { + public final void init(int index, PlayerId playerId) { this.index = index; + this.playerId = playerId; } @Override @@ -86,15 +88,22 @@ public final int getState() { } @Override - public final void enable(RendererConfiguration configuration, Format[] formats, - SampleStream stream, long positionUs, boolean joining, long offsetUs) + public final void enable( + RendererConfiguration configuration, + Format[] formats, + SampleStream stream, + long positionUs, + boolean joining, + boolean mayRenderStartOfStream, + long startPositionUs, + long offsetUs) throws ExoPlaybackException { Assertions.checkState(state == STATE_DISABLED); this.configuration = configuration; state = STATE_ENABLED; - onEnabled(joining); - replaceStream(formats, stream, offsetUs); - onPositionReset(positionUs, joining); + onEnabled(joining, mayRenderStartOfStream); + replaceStream(formats, stream, startPositionUs, offsetUs); + resetPosition(positionUs, joining); } @Override @@ -105,14 +114,17 @@ public final void start() throws ExoPlaybackException { } @Override - public final void replaceStream(Format[] formats, SampleStream stream, long offsetUs) + public final void replaceStream( + Format[] formats, SampleStream stream, long startPositionUs, long offsetUs) throws ExoPlaybackException { Assertions.checkState(!streamIsFinal); this.stream = stream; - readingPositionUs = offsetUs; + if (readingPositionUs == C.TIME_END_OF_SOURCE) { + readingPositionUs = startPositionUs; + } streamFormats = formats; streamOffsetUs = offsetUs; - onStreamChanged(formats, offsetUs); + onStreamChanged(formats, startPositionUs, offsetUs); } @Override @@ -143,18 +155,23 @@ public final boolean isCurrentStreamFinal() { @Override public final void maybeThrowStreamError() throws IOException { - stream.maybeThrowError(); + Assertions.checkNotNull(stream).maybeThrowError(); } @Override public final void resetPosition(long positionUs) throws ExoPlaybackException { + resetPosition(positionUs, /* joining= */ false); + } + + private void resetPosition(long positionUs, boolean joining) throws ExoPlaybackException { streamIsFinal = false; + lastResetPositionUs = positionUs; readingPositionUs = positionUs; - onPositionReset(positionUs, false); + onPositionReset(positionUs, joining); } @Override - public final void stop() throws ExoPlaybackException { + public final void stop() { Assertions.checkState(state == STATE_STARTED); state = STATE_ENABLED; onStopped(); @@ -181,15 +198,15 @@ public final void reset() { // RendererCapabilities implementation. @Override - @AdaptiveSupport - public int supportsMixedMimeTypeAdaptation() throws ExoPlaybackException { + public @AdaptiveSupport int supportsMixedMimeTypeAdaptation() throws ExoPlaybackException { return ADAPTIVE_NOT_SUPPORTED; } // PlayerMessage.CCTarget implementation. @Override - public void handleMessage(int what, @Nullable Object object) throws ExoPlaybackException { + public void handleMessage(@MessageType int messageType, @Nullable Object message) + throws ExoPlaybackException { // Do nothing. } @@ -197,42 +214,46 @@ public void handleMessage(int what, @Nullable Object object) throws ExoPlaybackE /** * Called when the renderer is enabled. - *

- * The default implementation is a no-op. + * + *

The default implementation is a no-op. * * @param joining Whether this renderer is being enabled to join an ongoing playback. + * @param mayRenderStartOfStream Whether this renderer is allowed to render the start of the + * stream even if the state is not {@link #STATE_STARTED} yet. * @throws ExoPlaybackException If an error occurs. */ - protected void onEnabled(boolean joining) throws ExoPlaybackException { + protected void onEnabled(boolean joining, boolean mayRenderStartOfStream) + throws ExoPlaybackException { // Do nothing. } /** * Called when the renderer's stream has changed. This occurs when the renderer is enabled after - * {@link #onEnabled(boolean)} has been called, and also when the stream has been replaced whilst - * the renderer is enabled or started. - *

- * The default implementation is a no-op. + * {@link #onEnabled(boolean, boolean)} has been called, and also when the stream has been + * replaced whilst the renderer is enabled or started. + * + *

The default implementation is a no-op. * * @param formats The enabled formats. - * @param offsetUs The offset that will be added to the timestamps of buffers read via - * {@link #readSource(FormatHolder, DecoderInputBuffer, boolean)} so that decoder input - * buffers have monotonically increasing timestamps. + * @param startPositionUs The start position of the new stream in renderer time (microseconds). + * @param offsetUs The offset that will be added to the timestamps of buffers read via {@link + * #readSource} so that decoder input buffers have monotonically increasing timestamps. * @throws ExoPlaybackException If an error occurs. */ - protected void onStreamChanged(Format[] formats, long offsetUs) throws ExoPlaybackException { + protected void onStreamChanged(Format[] formats, long startPositionUs, long offsetUs) + throws ExoPlaybackException { // Do nothing. } /** - * Called when the position is reset. This occurs when the renderer is enabled after - * {@link #onStreamChanged(Format[], long)} has been called, and also when a position - * discontinuity is encountered. - *

- * After a position reset, the renderer's {@link SampleStream} is guaranteed to provide samples + * Called when the position is reset. This occurs when the renderer is enabled after {@link + * #onStreamChanged(Format[], long, long)} has been called, and also when a position discontinuity + * is encountered. + * + *

After a position reset, the renderer's {@link SampleStream} is guaranteed to provide samples * starting from a key frame. - *

- * The default implementation is a no-op. + * + *

The default implementation is a no-op. * * @param positionUs The new playback position in microseconds. * @param joining Whether this renderer is being enabled to join an ongoing playback. @@ -244,8 +265,8 @@ protected void onPositionReset(long positionUs, boolean joining) throws ExoPlayb /** * Called when the renderer is started. - *

- * The default implementation is a no-op. + * + *

The default implementation is a no-op. * * @throws ExoPlaybackException If an error occurs. */ @@ -255,19 +276,17 @@ protected void onStarted() throws ExoPlaybackException { /** * Called when the renderer is stopped. - *

- * The default implementation is a no-op. * - * @throws ExoPlaybackException If an error occurs. + *

The default implementation is a no-op. */ - protected void onStopped() throws ExoPlaybackException { + protected void onStopped() { // Do nothing. } /** * Called when the renderer is disabled. - *

- * The default implementation is a no-op. + * + *

The default implementation is a no-op. */ protected void onDisabled() { // Do nothing. @@ -284,70 +303,91 @@ protected void onReset() { // Methods to be called by subclasses. + /** + * Returns the position passed to the most recent call to {@link #enable} or {@link + * #resetPosition}. + */ + protected final long getLastResetPositionUs() { + return lastResetPositionUs; + } + /** Returns a clear {@link FormatHolder}. */ protected final FormatHolder getFormatHolder() { formatHolder.clear(); return formatHolder; } - /** Returns the formats of the currently enabled stream. */ + /** + * Returns the formats of the currently enabled stream. + * + *

This method may be called when the renderer is in the following states: {@link + * #STATE_ENABLED}, {@link #STATE_STARTED}. + */ protected final Format[] getStreamFormats() { - return streamFormats; + return Assertions.checkNotNull(streamFormats); } /** * Returns the configuration set when the renderer was most recently enabled. + * + *

This method may be called when the renderer is in the following states: {@link + * #STATE_ENABLED}, {@link #STATE_STARTED}. */ protected final RendererConfiguration getConfiguration() { - return configuration; - } - - /** Returns a {@link DrmSession} ready for assignment, handling resource management. */ - @Nullable - protected final DrmSession getUpdatedSourceDrmSession( - @Nullable Format oldFormat, - Format newFormat, - @Nullable DrmSessionManager drmSessionManager, - @Nullable DrmSession existingSourceSession) - throws ExoPlaybackException { - boolean drmInitDataChanged = - !Util.areEqual(newFormat.drmInitData, oldFormat == null ? null : oldFormat.drmInitData); - if (!drmInitDataChanged) { - return existingSourceSession; - } - @Nullable DrmSession newSourceDrmSession = null; - if (newFormat.drmInitData != null) { - if (drmSessionManager == null) { - throw createRendererException( - new IllegalStateException("Media requires a DrmSessionManager"), newFormat); - } - newSourceDrmSession = - drmSessionManager.acquireSession( - Assertions.checkNotNull(Looper.myLooper()), newFormat.drmInitData); - } - if (existingSourceSession != null) { - existingSourceSession.release(); - } - return newSourceDrmSession; + return Assertions.checkNotNull(configuration); } /** * Returns the index of the renderer within the player. + * + *

Must only be used after the renderer has been initialized by the player. */ protected final int getIndex() { return index; } + /** + * Returns the {@link PlayerId} of the player using this renderer. + * + *

Must only be used after the renderer has been initialized by the player. + */ + protected final PlayerId getPlayerId() { + return checkNotNull(playerId); + } + + /** + * Creates an {@link ExoPlaybackException} of type {@link ExoPlaybackException#TYPE_RENDERER} for + * this renderer. + * + * @param cause The cause of the exception. + * @param format The current format used by the renderer. May be null. + * @param errorCode A {@link PlaybackException.ErrorCode} to identify the cause of the playback + * failure. + * @return The created instance, in which {@link ExoPlaybackException#isRecoverable} is {@code + * false}. + */ + protected final ExoPlaybackException createRendererException( + Throwable cause, @Nullable Format format, @PlaybackException.ErrorCode int errorCode) { + return createRendererException(cause, format, /* isRecoverable= */ false, errorCode); + } + /** * Creates an {@link ExoPlaybackException} of type {@link ExoPlaybackException#TYPE_RENDERER} for * this renderer. * * @param cause The cause of the exception. * @param format The current format used by the renderer. May be null. + * @param isRecoverable If the error is recoverable by disabling and re-enabling the renderer. + * @param errorCode A {@link PlaybackException.ErrorCode} to identify the cause of the playback + * failure. + * @return The created instance. */ protected final ExoPlaybackException createRendererException( - Exception cause, @Nullable Format format) { - @FormatSupport int formatSupport = RendererCapabilities.FORMAT_HANDLED; + Throwable cause, + @Nullable Format format, + boolean isRecoverable, + @PlaybackException.ErrorCode int errorCode) { + @C.FormatSupport int formatSupport = C.FORMAT_HANDLED; if (format != null && !throwRendererExceptionIsExecuting) { // Prevent recursive re-entry from subclass supportsFormat implementations. throwRendererExceptionIsExecuting = true; @@ -359,7 +399,8 @@ protected final ExoPlaybackException createRendererException( throwRendererExceptionIsExecuting = false; } } - return ExoPlaybackException.createForRenderer(cause, getIndex(), format, formatSupport); + return ExoPlaybackException.createForRenderer( + cause, getName(), getIndex(), format, formatSupport, isRecoverable, errorCode); } /** @@ -367,30 +408,38 @@ protected final ExoPlaybackException createRendererException( * {@link C#RESULT_BUFFER_READ} is only returned if {@link #setCurrentStreamFinal()} has been * called. {@link C#RESULT_NOTHING_READ} is returned otherwise. * + *

This method may be called when the renderer is in the following states: {@link + * #STATE_ENABLED}, {@link #STATE_STARTED}. + * * @param formatHolder A {@link FormatHolder} to populate in the case of reading a format. * @param buffer A {@link DecoderInputBuffer} to populate in the case of reading a sample or the * end of the stream. If the end of the stream has been reached, the {@link * C#BUFFER_FLAG_END_OF_STREAM} flag will be set on the buffer. - * @param formatRequired Whether the caller requires that the format of the stream be read even if - * it's not changing. A sample will never be read if set to true, however it is still possible - * for the end of stream or nothing to be read. - * @return The result, which can be {@link C#RESULT_NOTHING_READ}, {@link C#RESULT_FORMAT_READ} or - * {@link C#RESULT_BUFFER_READ}. + * @param readFlags Flags controlling the behavior of this read operation. + * @return The {@link ReadDataResult result} of the read operation. + * @throws InsufficientCapacityException If the {@code buffer} has insufficient capacity to hold + * the data of a sample being read. The buffer {@link DecoderInputBuffer#timeUs timestamp} and + * flags are populated if this exception is thrown, but the read position is not advanced. */ - protected final int readSource( - FormatHolder formatHolder, DecoderInputBuffer buffer, boolean formatRequired) { - int result = stream.readData(formatHolder, buffer, formatRequired); + protected final @ReadDataResult int readSource( + FormatHolder formatHolder, DecoderInputBuffer buffer, @ReadFlags int readFlags) { + @ReadDataResult + int result = Assertions.checkNotNull(stream).readData(formatHolder, buffer, readFlags); if (result == C.RESULT_BUFFER_READ) { if (buffer.isEndOfStream()) { readingPositionUs = C.TIME_END_OF_SOURCE; return streamIsFinal ? C.RESULT_BUFFER_READ : C.RESULT_NOTHING_READ; } buffer.timeUs += streamOffsetUs; - readingPositionUs = Math.max(readingPositionUs, buffer.timeUs); + readingPositionUs = max(readingPositionUs, buffer.timeUs); } else if (result == C.RESULT_FORMAT_READ) { - Format format = formatHolder.format; + Format format = Assertions.checkNotNull(formatHolder.format); if (format.subsampleOffsetUs != Format.OFFSET_SAMPLE_RELATIVE) { - format = format.copyWithSubsampleOffsetUs(format.subsampleOffsetUs + streamOffsetUs); + format = + format + .buildUpon() + .setSubsampleOffsetUs(format.subsampleOffsetUs + streamOffsetUs) + .build(); formatHolder.format = format; } } @@ -401,39 +450,23 @@ protected final int readSource( * Attempts to skip to the keyframe before the specified position, or to the end of the stream if * {@code positionUs} is beyond it. * + *

This method may be called when the renderer is in the following states: {@link + * #STATE_ENABLED}, {@link #STATE_STARTED}. + * * @param positionUs The position in microseconds. * @return The number of samples that were skipped. */ protected int skipSource(long positionUs) { - return stream.skipData(positionUs - streamOffsetUs); + return Assertions.checkNotNull(stream).skipData(positionUs - streamOffsetUs); } /** * Returns whether the upstream source is ready. - */ - protected final boolean isSourceReady() { - return hasReadStreamToEnd() ? streamIsFinal : stream.isReady(); - } - - /** - * Returns whether {@code drmSessionManager} supports the specified {@code drmInitData}, or true - * if {@code drmInitData} is null. * - * @param drmSessionManager The drm session manager. - * @param drmInitData {@link DrmInitData} of the format to check for support. - * @return Whether {@code drmSessionManager} supports the specified {@code drmInitData}, or - * true if {@code drmInitData} is null. + *

This method may be called when the renderer is in the following states: {@link + * #STATE_ENABLED}, {@link #STATE_STARTED}. */ - protected static boolean supportsFormatDrm(@Nullable DrmSessionManager drmSessionManager, - @Nullable DrmInitData drmInitData) { - if (drmInitData == null) { - // Content is unencrypted. - return true; - } else if (drmSessionManager == null) { - // Content is encrypted, but no drm session manager is available. - return false; - } - return drmSessionManager.canAcquireSession(drmInitData); + protected final boolean isSourceReady() { + return hasReadStreamToEnd() ? streamIsFinal : Assertions.checkNotNull(stream).isReady(); } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/BundleListRetriever.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/BundleListRetriever.java new file mode 100644 index 0000000000..4deaf43a8f --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/BundleListRetriever.java @@ -0,0 +1,125 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2; + +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; + +import android.os.Binder; +import android.os.Bundle; +import android.os.IBinder; +import android.os.Parcel; +import android.os.RemoteException; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.util.Util; +import com.google.common.collect.ImmutableList; +import java.util.List; + +/** + * A {@link Binder} to transfer a list of {@link Bundle Bundles} across processes by splitting the + * list into multiple transactions. + * + *

Note: Using this class causes synchronous binder calls in the opposite direction regardless of + * the "oneway" property. + * + *

Example usage: + * + *

{@code
+ * // Sender
+ * List list = ...;
+ * IBinder binder = new BundleListRetriever(list);
+ * Bundle bundle = new Bundle();
+ * bundle.putBinder("list", binder);
+ *
+ * // Receiver
+ * Bundle bundle = ...; // Received from the sender
+ * IBinder binder = bundle.getBinder("list");
+ * List list = BundleListRetriever.getList(binder);
+ * }
+ */ +public final class BundleListRetriever extends Binder { + + // Soft limit of an IPC buffer size + private static final int SUGGESTED_MAX_IPC_SIZE = + Util.SDK_INT >= 30 ? IBinder.getSuggestedMaxIpcSizeBytes() : 64 * 1024; + + private static final int REPLY_END_OF_LIST = 0; + private static final int REPLY_CONTINUE = 1; + private static final int REPLY_BREAK = 2; + + private final ImmutableList list; + + /** Creates a {@link Binder} to send a list of {@link Bundle Bundles} to another process. */ + public BundleListRetriever(List list) { + this.list = ImmutableList.copyOf(list); + } + + @Override + protected boolean onTransact(int code, Parcel data, @Nullable Parcel reply, int flags) + throws RemoteException { + if (code != FIRST_CALL_TRANSACTION) { + return super.onTransact(code, data, reply, flags); + } + + if (reply == null) { + return false; + } + + int count = list.size(); + int index = data.readInt(); + while (index < count && reply.dataSize() < SUGGESTED_MAX_IPC_SIZE) { + reply.writeInt(REPLY_CONTINUE); + reply.writeBundle(list.get(index)); + index++; + } + reply.writeInt(index < count ? REPLY_BREAK : REPLY_END_OF_LIST); + return true; + } + + /** + * Gets a list of {@link Bundle Bundles} from a {@link BundleListRetriever}. + * + * @param binder A binder interface backed by {@link BundleListRetriever}. + * @return The list of {@link Bundle Bundles}. + */ + public static ImmutableList getList(IBinder binder) { + ImmutableList.Builder builder = ImmutableList.builder(); + + int index = 0; + int replyCode = REPLY_CONTINUE; + + while (replyCode != REPLY_END_OF_LIST) { + Parcel data = Parcel.obtain(); + Parcel reply = Parcel.obtain(); + try { + data.writeInt(index); + try { + binder.transact(FIRST_CALL_TRANSACTION, data, reply, /* flags= */ 0); + } catch (RemoteException e) { + throw new RuntimeException(e); + } + while ((replyCode = reply.readInt()) == REPLY_CONTINUE) { + builder.add(checkNotNull(reply.readBundle())); + index++; + } + } finally { + reply.recycle(); + data.recycle(); + } + } + + return builder.build(); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/Bundleable.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/Bundleable.java new file mode 100644 index 0000000000..29dae2e50e --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/Bundleable.java @@ -0,0 +1,52 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2; + +import android.os.Bundle; + +/** + * Interface for classes whose instance can be stored in a {@link Bundle} by {@link #toBundle()} and + * can be restored from the {@link Bundle} by using the static {@code CREATOR} field that implements + * {@link Bundleable.Creator}. + * + *

For example, a {@link Bundleable} class {@code Foo} supports the following: + * + *

{@code
+ * Foo foo = ...;
+ * Bundle fooBundle = foo.toBundle();
+ * Foo restoredFoo = Foo.CREATOR.fromBundle(fooBundle);
+ * assertThat(restoredFoo).isEqualTo(foo);
+ * }
+ */ +public interface Bundleable { + + /** Returns a {@link Bundle} representing the information stored in this object. */ + Bundle toBundle(); + + /** Interface for the static {@code CREATOR} field of {@link Bundleable} classes. */ + interface Creator { + + /** + * Restores a {@link Bundleable} instance from a {@link Bundle} produced by {@link + * Bundleable#toBundle()}. + * + *

It guarantees the compatibility of {@link Bundle} representations produced by different + * versions of {@link Bundleable#toBundle()} by providing best default values for missing + * fields. It throws an exception if any essential fields are missing. + */ + T fromBundle(Bundle bundle); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/C.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/C.java index 3eee0a1891..30e645cca7 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/C.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/C.java @@ -15,30 +15,34 @@ */ package com.google.android.exoplayer2; -import android.annotation.TargetApi; +import static java.lang.annotation.ElementType.FIELD; +import static java.lang.annotation.ElementType.LOCAL_VARIABLE; +import static java.lang.annotation.ElementType.METHOD; +import static java.lang.annotation.ElementType.PARAMETER; +import static java.lang.annotation.ElementType.TYPE_USE; + +import android.annotation.SuppressLint; import android.content.Context; import android.media.AudioAttributes; import android.media.AudioFormat; import android.media.AudioManager; import android.media.MediaCodec; +import android.media.MediaCrypto; import android.media.MediaFormat; +import android.net.Uri; import android.view.Surface; import androidx.annotation.IntDef; -import com.google.android.exoplayer2.PlayerMessage.Target; -import com.google.android.exoplayer2.audio.AuxEffectInfo; +import androidx.annotation.RequiresApi; +import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.Util; -import com.google.android.exoplayer2.video.SimpleDecoderVideoRenderer; -import com.google.android.exoplayer2.video.VideoDecoderOutputBufferRenderer; -import com.google.android.exoplayer2.video.VideoFrameMetadataListener; -import com.google.android.exoplayer2.video.spherical.CameraMotionListener; +import com.google.errorprone.annotations.InlineMe; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; import java.util.UUID; -/** - * Defines constants used by the library. - */ +/** Defines constants used by the library. */ @SuppressWarnings("InlinedApi") public final class C { @@ -56,19 +60,19 @@ private C() {} */ public static final long TIME_UNSET = Long.MIN_VALUE + 1; - /** - * Represents an unset or unknown index. - */ + /** Represents an unset or unknown index. */ public static final int INDEX_UNSET = -1; - /** - * Represents an unset or unknown position. - */ + /** Represents an unset or unknown position. */ public static final int POSITION_UNSET = -1; - /** - * Represents an unset or unknown length. - */ + /** Represents an unset or unknown rate. */ + public static final float RATE_UNSET = -Float.MAX_VALUE; + + /** Represents an unset or unknown integer rate. */ + public static final int RATE_UNSET_INT = Integer.MIN_VALUE + 1; + + /** Represents an unset or unknown length. */ public static final int LENGTH_UNSET = -1; /** Represents an unset or unknown percentage. */ @@ -80,9 +84,7 @@ private C() {} /** The number of microseconds in one second. */ public static final long MICROS_PER_SECOND = 1000000L; - /** - * The number of nanoseconds in one second. - */ + /** The number of nanoseconds in one second. */ public static final long NANOS_PER_SECOND = 1000000000L; /** The number of bits per byte. */ @@ -92,33 +94,71 @@ private C() {} public static final int BYTES_PER_FLOAT = 4; /** - * The name of the ASCII charset. + * @deprecated Use {@link java.nio.charset.StandardCharsets} or {@link + * com.google.common.base.Charsets} instead. */ - public static final String ASCII_NAME = "US-ASCII"; + @Deprecated public static final String ASCII_NAME = "US-ASCII"; /** - * The name of the UTF-8 charset. + * @deprecated Use {@link java.nio.charset.StandardCharsets} or {@link + * com.google.common.base.Charsets} instead. */ - public static final String UTF8_NAME = "UTF-8"; + @Deprecated public static final String UTF8_NAME = "UTF-8"; - /** The name of the ISO-8859-1 charset. */ - public static final String ISO88591_NAME = "ISO-8859-1"; - - /** The name of the UTF-16 charset. */ - public static final String UTF16_NAME = "UTF-16"; + /** + * @deprecated Use {@link java.nio.charset.StandardCharsets} or {@link + * com.google.common.base.Charsets} instead. + */ + @Deprecated public static final String ISO88591_NAME = "ISO-8859-1"; - /** The name of the UTF-16 little-endian charset. */ - public static final String UTF16LE_NAME = "UTF-16LE"; + /** + * @deprecated Use {@link java.nio.charset.StandardCharsets} or {@link + * com.google.common.base.Charsets} instead. + */ + @Deprecated public static final String UTF16_NAME = "UTF-16"; /** - * The name of the serif font family. + * @deprecated Use {@link java.nio.charset.StandardCharsets} or {@link + * com.google.common.base.Charsets} instead. */ + @Deprecated public static final String UTF16LE_NAME = "UTF-16LE"; + + /** The name of the serif font family. */ public static final String SERIF_NAME = "serif"; + /** The name of the sans-serif font family. */ + public static final String SANS_SERIF_NAME = "sans-serif"; + + /** The {@link Uri#getScheme() URI scheme} used for content with server side ad insertion. */ + public static final String SSAI_SCHEME = "ssai"; + /** - * The name of the sans-serif font family. + * Types of crypto implementation. May be one of {@link #CRYPTO_TYPE_NONE}, {@link + * #CRYPTO_TYPE_UNSUPPORTED} or {@link #CRYPTO_TYPE_FRAMEWORK}. May also be an app-defined value + * (see {@link #CRYPTO_TYPE_CUSTOM_BASE}). */ - public static final String SANS_SERIF_NAME = "sans-serif"; + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef( + open = true, + value = { + CRYPTO_TYPE_UNSUPPORTED, + CRYPTO_TYPE_NONE, + CRYPTO_TYPE_FRAMEWORK, + }) + public @interface CryptoType {} + /** No crypto. */ + public static final int CRYPTO_TYPE_NONE = 0; + /** An unsupported crypto type. */ + public static final int CRYPTO_TYPE_UNSUPPORTED = 1; + /** Framework crypto in which a {@link MediaCodec} is configured with a {@link MediaCrypto}. */ + public static final int CRYPTO_TYPE_FRAMEWORK = 2; + /** + * Applications or extensions may define custom {@code CRYPTO_TYPE_*} constants greater than or + * equal to this value. + */ + public static final int CRYPTO_TYPE_CUSTOM_BASE = 10000; /** * Crypto modes for a codec. One of {@link #CRYPTO_MODE_UNENCRYPTED}, {@link #CRYPTO_MODE_AES_CTR} @@ -126,6 +166,7 @@ private C() {} */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({CRYPTO_MODE_UNENCRYPTED, CRYPTO_MODE_AES_CTR, CRYPTO_MODE_AES_CBC}) public @interface CryptoMode {} /** @@ -142,8 +183,8 @@ private C() {} public static final int CRYPTO_MODE_AES_CBC = MediaCodec.CRYPTO_MODE_AES_CBC; /** - * Represents an unset {@link android.media.AudioTrack} session identifier. Equal to - * {@link AudioManager#AUDIO_SESSION_ID_GENERATE}. + * Represents an unset {@link android.media.AudioTrack} session identifier. Equal to {@link + * AudioManager#AUDIO_SESSION_ID_GENERATE}. */ public static final int AUDIO_SESSION_ID_UNSET = AudioManager.AUDIO_SESSION_ID_GENERATE; @@ -153,10 +194,11 @@ private C() {} * #ENCODING_PCM_16BIT_BIG_ENDIAN}, {@link #ENCODING_PCM_24BIT}, {@link #ENCODING_PCM_32BIT}, * {@link #ENCODING_PCM_FLOAT}, {@link #ENCODING_MP3}, {@link #ENCODING_AC3}, {@link * #ENCODING_E_AC3}, {@link #ENCODING_E_AC3_JOC}, {@link #ENCODING_AC4}, {@link #ENCODING_DTS}, - * {@link #ENCODING_DTS_HD} or {@link #ENCODING_DOLBY_TRUEHD}. + * {@link #ENCODING_DTS_HD}, {@link #ENCODING_DOLBY_TRUEHD} or {@link #ENCODING_OPUS}. */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({ Format.NO_VALUE, ENCODING_INVALID, @@ -167,13 +209,20 @@ private C() {} ENCODING_PCM_32BIT, ENCODING_PCM_FLOAT, ENCODING_MP3, + ENCODING_AAC_LC, + ENCODING_AAC_HE_V1, + ENCODING_AAC_HE_V2, + ENCODING_AAC_XHE, + ENCODING_AAC_ELD, + ENCODING_AAC_ER_BSAC, ENCODING_AC3, ENCODING_E_AC3, ENCODING_E_AC3_JOC, ENCODING_AC4, ENCODING_DTS, ENCODING_DTS_HD, - ENCODING_DOLBY_TRUEHD + ENCODING_DOLBY_TRUEHD, + ENCODING_OPUS, }) public @interface Encoding {} @@ -185,6 +234,7 @@ private C() {} */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({ Format.NO_VALUE, ENCODING_INVALID, @@ -196,11 +246,17 @@ private C() {} ENCODING_PCM_FLOAT }) public @interface PcmEncoding {} - /** @see AudioFormat#ENCODING_INVALID */ + /** + * @see AudioFormat#ENCODING_INVALID + */ public static final int ENCODING_INVALID = AudioFormat.ENCODING_INVALID; - /** @see AudioFormat#ENCODING_PCM_8BIT */ + /** + * @see AudioFormat#ENCODING_PCM_8BIT + */ public static final int ENCODING_PCM_8BIT = AudioFormat.ENCODING_PCM_8BIT; - /** @see AudioFormat#ENCODING_PCM_16BIT */ + /** + * @see AudioFormat#ENCODING_PCM_16BIT + */ public static final int ENCODING_PCM_16BIT = AudioFormat.ENCODING_PCM_16BIT; /** Like {@link #ENCODING_PCM_16BIT}, but with the bytes in big endian order. */ public static final int ENCODING_PCM_16BIT_BIG_ENDIAN = 0x10000000; @@ -208,33 +264,99 @@ private C() {} public static final int ENCODING_PCM_24BIT = 0x20000000; /** PCM encoding with 32 bits per sample. */ public static final int ENCODING_PCM_32BIT = 0x30000000; - /** @see AudioFormat#ENCODING_PCM_FLOAT */ + /** + * @see AudioFormat#ENCODING_PCM_FLOAT + */ public static final int ENCODING_PCM_FLOAT = AudioFormat.ENCODING_PCM_FLOAT; - /** @see AudioFormat#ENCODING_MP3 */ + /** + * @see AudioFormat#ENCODING_MP3 + */ public static final int ENCODING_MP3 = AudioFormat.ENCODING_MP3; - /** @see AudioFormat#ENCODING_AC3 */ + /** + * @see AudioFormat#ENCODING_AAC_LC + */ + public static final int ENCODING_AAC_LC = AudioFormat.ENCODING_AAC_LC; + /** + * @see AudioFormat#ENCODING_AAC_HE_V1 + */ + public static final int ENCODING_AAC_HE_V1 = AudioFormat.ENCODING_AAC_HE_V1; + /** + * @see AudioFormat#ENCODING_AAC_HE_V2 + */ + public static final int ENCODING_AAC_HE_V2 = AudioFormat.ENCODING_AAC_HE_V2; + /** + * @see AudioFormat#ENCODING_AAC_XHE + */ + public static final int ENCODING_AAC_XHE = AudioFormat.ENCODING_AAC_XHE; + /** + * @see AudioFormat#ENCODING_AAC_ELD + */ + public static final int ENCODING_AAC_ELD = AudioFormat.ENCODING_AAC_ELD; + /** AAC Error Resilient Bit-Sliced Arithmetic Coding. */ + public static final int ENCODING_AAC_ER_BSAC = 0x40000000; + /** + * @see AudioFormat#ENCODING_AC3 + */ public static final int ENCODING_AC3 = AudioFormat.ENCODING_AC3; - /** @see AudioFormat#ENCODING_E_AC3 */ + /** + * @see AudioFormat#ENCODING_E_AC3 + */ public static final int ENCODING_E_AC3 = AudioFormat.ENCODING_E_AC3; - /** @see AudioFormat#ENCODING_E_AC3_JOC */ + /** + * @see AudioFormat#ENCODING_E_AC3_JOC + */ public static final int ENCODING_E_AC3_JOC = AudioFormat.ENCODING_E_AC3_JOC; - /** @see AudioFormat#ENCODING_AC4 */ + /** + * @see AudioFormat#ENCODING_AC4 + */ public static final int ENCODING_AC4 = AudioFormat.ENCODING_AC4; - /** @see AudioFormat#ENCODING_DTS */ + /** + * @see AudioFormat#ENCODING_DTS + */ public static final int ENCODING_DTS = AudioFormat.ENCODING_DTS; - /** @see AudioFormat#ENCODING_DTS_HD */ + /** + * @see AudioFormat#ENCODING_DTS_HD + */ public static final int ENCODING_DTS_HD = AudioFormat.ENCODING_DTS_HD; - /** @see AudioFormat#ENCODING_DOLBY_TRUEHD */ + /** + * @see AudioFormat#ENCODING_DOLBY_TRUEHD + */ public static final int ENCODING_DOLBY_TRUEHD = AudioFormat.ENCODING_DOLBY_TRUEHD; + /** + * @see AudioFormat#ENCODING_OPUS + */ + public static final int ENCODING_OPUS = AudioFormat.ENCODING_OPUS; + + /** Represents the behavior affecting whether spatialization will be used. */ + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef({SPATIALIZATION_BEHAVIOR_AUTO, SPATIALIZATION_BEHAVIOR_NEVER}) + public @interface SpatializationBehavior {} + + /** + * @see AudioAttributes#SPATIALIZATION_BEHAVIOR_AUTO + */ + public static final int SPATIALIZATION_BEHAVIOR_AUTO = + AudioAttributes.SPATIALIZATION_BEHAVIOR_AUTO; + /** + * @see AudioAttributes#SPATIALIZATION_BEHAVIOR_NEVER + */ + public static final int SPATIALIZATION_BEHAVIOR_NEVER = + AudioAttributes.SPATIALIZATION_BEHAVIOR_NEVER; /** * Stream types for an {@link android.media.AudioTrack}. One of {@link #STREAM_TYPE_ALARM}, {@link * #STREAM_TYPE_DTMF}, {@link #STREAM_TYPE_MUSIC}, {@link #STREAM_TYPE_NOTIFICATION}, {@link * #STREAM_TYPE_RING}, {@link #STREAM_TYPE_SYSTEM}, {@link #STREAM_TYPE_VOICE_CALL} or {@link - * #STREAM_TYPE_USE_DEFAULT}. + * #STREAM_TYPE_DEFAULT}. */ + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. + @SuppressLint("UniqueConstants") // Intentional duplication to set STREAM_TYPE_DEFAULT. @Documented @Retention(RetentionPolicy.SOURCE) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) @IntDef({ STREAM_TYPE_ALARM, STREAM_TYPE_DTMF, @@ -243,7 +365,7 @@ private C() {} STREAM_TYPE_RING, STREAM_TYPE_SYSTEM, STREAM_TYPE_VOICE_CALL, - STREAM_TYPE_USE_DEFAULT + STREAM_TYPE_DEFAULT }) public @interface StreamType {} /** @@ -274,63 +396,76 @@ private C() {} * @see AudioManager#STREAM_VOICE_CALL */ public static final int STREAM_TYPE_VOICE_CALL = AudioManager.STREAM_VOICE_CALL; - /** - * @see AudioManager#USE_DEFAULT_STREAM_TYPE - */ - public static final int STREAM_TYPE_USE_DEFAULT = AudioManager.USE_DEFAULT_STREAM_TYPE; - /** - * The default stream type used by audio renderers. - */ + /** The default stream type used by audio renderers. Equal to {@link #STREAM_TYPE_MUSIC}. */ public static final int STREAM_TYPE_DEFAULT = STREAM_TYPE_MUSIC; /** - * Content types for {@link com.google.android.exoplayer2.audio.AudioAttributes}. One of {@link - * #CONTENT_TYPE_MOVIE}, {@link #CONTENT_TYPE_MUSIC}, {@link #CONTENT_TYPE_SONIFICATION}, {@link - * #CONTENT_TYPE_SPEECH} or {@link #CONTENT_TYPE_UNKNOWN}. - */ + * Content types for audio attributes. One of: + * + *

    + *
  • {@link #AUDIO_CONTENT_TYPE_MOVIE} + *
  • {@link #AUDIO_CONTENT_TYPE_MUSIC} + *
  • {@link #AUDIO_CONTENT_TYPE_SONIFICATION} + *
  • {@link #AUDIO_CONTENT_TYPE_SPEECH} + *
  • {@link #AUDIO_CONTENT_TYPE_UNKNOWN} + *
+ */ + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. @Documented @Retention(RetentionPolicy.SOURCE) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) @IntDef({ - CONTENT_TYPE_MOVIE, - CONTENT_TYPE_MUSIC, - CONTENT_TYPE_SONIFICATION, - CONTENT_TYPE_SPEECH, - CONTENT_TYPE_UNKNOWN + AUDIO_CONTENT_TYPE_MOVIE, + AUDIO_CONTENT_TYPE_MUSIC, + AUDIO_CONTENT_TYPE_SONIFICATION, + AUDIO_CONTENT_TYPE_SPEECH, + AUDIO_CONTENT_TYPE_UNKNOWN }) public @interface AudioContentType {} + /** See {@link AudioAttributes#CONTENT_TYPE_MOVIE}. */ + public static final int AUDIO_CONTENT_TYPE_MOVIE = AudioAttributes.CONTENT_TYPE_MOVIE; /** - * @see android.media.AudioAttributes#CONTENT_TYPE_MOVIE + * @deprecated Use {@link #AUDIO_CONTENT_TYPE_MOVIE} instead. */ - public static final int CONTENT_TYPE_MOVIE = android.media.AudioAttributes.CONTENT_TYPE_MOVIE; + @Deprecated public static final int CONTENT_TYPE_MOVIE = AUDIO_CONTENT_TYPE_MOVIE; + /** See {@link AudioAttributes#CONTENT_TYPE_MUSIC}. */ + public static final int AUDIO_CONTENT_TYPE_MUSIC = AudioAttributes.CONTENT_TYPE_MUSIC; /** - * @see android.media.AudioAttributes#CONTENT_TYPE_MUSIC + * @deprecated Use {@link #AUDIO_CONTENT_TYPE_MUSIC} instead. */ - public static final int CONTENT_TYPE_MUSIC = android.media.AudioAttributes.CONTENT_TYPE_MUSIC; + @Deprecated public static final int CONTENT_TYPE_MUSIC = AUDIO_CONTENT_TYPE_MUSIC; + /** See {@link AudioAttributes#CONTENT_TYPE_SONIFICATION}. */ + public static final int AUDIO_CONTENT_TYPE_SONIFICATION = + AudioAttributes.CONTENT_TYPE_SONIFICATION; /** - * @see android.media.AudioAttributes#CONTENT_TYPE_SONIFICATION + * @deprecated Use {@link #AUDIO_CONTENT_TYPE_SONIFICATION} instead. */ - public static final int CONTENT_TYPE_SONIFICATION = - android.media.AudioAttributes.CONTENT_TYPE_SONIFICATION; + @Deprecated public static final int CONTENT_TYPE_SONIFICATION = AUDIO_CONTENT_TYPE_SONIFICATION; + /** See {@link AudioAttributes#CONTENT_TYPE_SPEECH}. */ + public static final int AUDIO_CONTENT_TYPE_SPEECH = AudioAttributes.CONTENT_TYPE_SPEECH; /** - * @see android.media.AudioAttributes#CONTENT_TYPE_SPEECH + * @deprecated Use {@link #AUDIO_CONTENT_TYPE_SPEECH} instead. */ - public static final int CONTENT_TYPE_SPEECH = - android.media.AudioAttributes.CONTENT_TYPE_SPEECH; + @Deprecated public static final int CONTENT_TYPE_SPEECH = AUDIO_CONTENT_TYPE_SPEECH; + /** See {@link AudioAttributes#CONTENT_TYPE_UNKNOWN}. */ + public static final int AUDIO_CONTENT_TYPE_UNKNOWN = AudioAttributes.CONTENT_TYPE_UNKNOWN; /** - * @see android.media.AudioAttributes#CONTENT_TYPE_UNKNOWN + * @deprecated Use {@link #AUDIO_CONTENT_TYPE_UNKNOWN} instead. */ - public static final int CONTENT_TYPE_UNKNOWN = - android.media.AudioAttributes.CONTENT_TYPE_UNKNOWN; + @Deprecated public static final int CONTENT_TYPE_UNKNOWN = AUDIO_CONTENT_TYPE_UNKNOWN; /** - * Flags for {@link com.google.android.exoplayer2.audio.AudioAttributes}. Possible flag value is - * {@link #FLAG_AUDIBILITY_ENFORCED}. + * Flags for audio attributes. Possible flag value is {@link #FLAG_AUDIBILITY_ENFORCED}. * *

Note that {@code FLAG_HW_AV_SYNC} is not available because the player takes care of setting * the flag when tunneling is enabled via a track selector. */ + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. @Documented @Retention(RetentionPolicy.SOURCE) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) @IntDef( flag = true, value = {FLAG_AUDIBILITY_ENFORCED}) @@ -342,18 +477,20 @@ private C() {} android.media.AudioAttributes.FLAG_AUDIBILITY_ENFORCED; /** - * Usage types for {@link com.google.android.exoplayer2.audio.AudioAttributes}. One of {@link - * #USAGE_ALARM}, {@link #USAGE_ASSISTANCE_ACCESSIBILITY}, {@link - * #USAGE_ASSISTANCE_NAVIGATION_GUIDANCE}, {@link #USAGE_ASSISTANCE_SONIFICATION}, {@link - * #USAGE_ASSISTANT}, {@link #USAGE_GAME}, {@link #USAGE_MEDIA}, {@link #USAGE_NOTIFICATION}, - * {@link #USAGE_NOTIFICATION_COMMUNICATION_DELAYED}, {@link - * #USAGE_NOTIFICATION_COMMUNICATION_INSTANT}, {@link #USAGE_NOTIFICATION_COMMUNICATION_REQUEST}, - * {@link #USAGE_NOTIFICATION_EVENT}, {@link #USAGE_NOTIFICATION_RINGTONE}, {@link - * #USAGE_UNKNOWN}, {@link #USAGE_VOICE_COMMUNICATION} or {@link - * #USAGE_VOICE_COMMUNICATION_SIGNALLING}. + * Usage types for audio attributes. One of {@link #USAGE_ALARM}, {@link + * #USAGE_ASSISTANCE_ACCESSIBILITY}, {@link #USAGE_ASSISTANCE_NAVIGATION_GUIDANCE}, {@link + * #USAGE_ASSISTANCE_SONIFICATION}, {@link #USAGE_ASSISTANT}, {@link #USAGE_GAME}, {@link + * #USAGE_MEDIA}, {@link #USAGE_NOTIFICATION}, {@link #USAGE_NOTIFICATION_COMMUNICATION_DELAYED}, + * {@link #USAGE_NOTIFICATION_COMMUNICATION_INSTANT}, {@link + * #USAGE_NOTIFICATION_COMMUNICATION_REQUEST}, {@link #USAGE_NOTIFICATION_EVENT}, {@link + * #USAGE_NOTIFICATION_RINGTONE}, {@link #USAGE_UNKNOWN}, {@link #USAGE_VOICE_COMMUNICATION} or + * {@link #USAGE_VOICE_COMMUNICATION_SIGNALLING}. */ + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. @Documented @Retention(RetentionPolicy.SOURCE) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) @IntDef({ USAGE_ALARM, USAGE_ASSISTANCE_ACCESSIBILITY, @@ -377,7 +514,9 @@ private C() {} * @see android.media.AudioAttributes#USAGE_ALARM */ public static final int USAGE_ALARM = android.media.AudioAttributes.USAGE_ALARM; - /** @see android.media.AudioAttributes#USAGE_ASSISTANCE_ACCESSIBILITY */ + /** + * @see android.media.AudioAttributes#USAGE_ASSISTANCE_ACCESSIBILITY + */ public static final int USAGE_ASSISTANCE_ACCESSIBILITY = android.media.AudioAttributes.USAGE_ASSISTANCE_ACCESSIBILITY; /** @@ -390,7 +529,9 @@ private C() {} */ public static final int USAGE_ASSISTANCE_SONIFICATION = android.media.AudioAttributes.USAGE_ASSISTANCE_SONIFICATION; - /** @see android.media.AudioAttributes#USAGE_ASSISTANT */ + /** + * @see android.media.AudioAttributes#USAGE_ASSISTANT + */ public static final int USAGE_ASSISTANT = android.media.AudioAttributes.USAGE_ASSISTANT; /** * @see android.media.AudioAttributes#USAGE_GAME @@ -445,11 +586,14 @@ private C() {} android.media.AudioAttributes.USAGE_VOICE_COMMUNICATION_SIGNALLING; /** - * Capture policies for {@link com.google.android.exoplayer2.audio.AudioAttributes}. One of {@link - * #ALLOW_CAPTURE_BY_ALL}, {@link #ALLOW_CAPTURE_BY_NONE} or {@link #ALLOW_CAPTURE_BY_SYSTEM}. + * Capture policies for audio attributes. One of {@link #ALLOW_CAPTURE_BY_ALL}, {@link + * #ALLOW_CAPTURE_BY_NONE} or {@link #ALLOW_CAPTURE_BY_SYSTEM}. */ + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. @Documented @Retention(RetentionPolicy.SOURCE) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) @IntDef({ALLOW_CAPTURE_BY_ALL, ALLOW_CAPTURE_BY_NONE, ALLOW_CAPTURE_BY_SYSTEM}) public @interface AudioAllowedCapturePolicy {} /** See {@link android.media.AudioAttributes#ALLOW_CAPTURE_BY_ALL}. */ @@ -459,60 +603,33 @@ private C() {} /** See {@link android.media.AudioAttributes#ALLOW_CAPTURE_BY_SYSTEM}. */ public static final int ALLOW_CAPTURE_BY_SYSTEM = AudioAttributes.ALLOW_CAPTURE_BY_SYSTEM; - /** - * Audio focus types. One of {@link #AUDIOFOCUS_NONE}, {@link #AUDIOFOCUS_GAIN}, {@link - * #AUDIOFOCUS_GAIN_TRANSIENT}, {@link #AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK} or {@link - * #AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE}. - */ - @Documented - @Retention(RetentionPolicy.SOURCE) - @IntDef({ - AUDIOFOCUS_NONE, - AUDIOFOCUS_GAIN, - AUDIOFOCUS_GAIN_TRANSIENT, - AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK, - AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE - }) - public @interface AudioFocusGain {} - /** @see AudioManager#AUDIOFOCUS_NONE */ - public static final int AUDIOFOCUS_NONE = AudioManager.AUDIOFOCUS_NONE; - /** @see AudioManager#AUDIOFOCUS_GAIN */ - public static final int AUDIOFOCUS_GAIN = AudioManager.AUDIOFOCUS_GAIN; - /** @see AudioManager#AUDIOFOCUS_GAIN_TRANSIENT */ - public static final int AUDIOFOCUS_GAIN_TRANSIENT = AudioManager.AUDIOFOCUS_GAIN_TRANSIENT; - /** @see AudioManager#AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK */ - public static final int AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK = - AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK; - /** @see AudioManager#AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE */ - public static final int AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE = - AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE; - /** * Flags which can apply to a buffer containing a media sample. Possible flag values are {@link - * #BUFFER_FLAG_KEY_FRAME}, {@link #BUFFER_FLAG_END_OF_STREAM}, {@link #BUFFER_FLAG_LAST_SAMPLE}, - * {@link #BUFFER_FLAG_ENCRYPTED} and {@link #BUFFER_FLAG_DECODE_ONLY}. + * #BUFFER_FLAG_KEY_FRAME}, {@link #BUFFER_FLAG_END_OF_STREAM}, {@link #BUFFER_FLAG_FIRST_SAMPLE}, + * {@link #BUFFER_FLAG_LAST_SAMPLE}, {@link #BUFFER_FLAG_ENCRYPTED} and {@link + * #BUFFER_FLAG_DECODE_ONLY}. */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef( flag = true, value = { BUFFER_FLAG_KEY_FRAME, BUFFER_FLAG_END_OF_STREAM, + BUFFER_FLAG_FIRST_SAMPLE, BUFFER_FLAG_HAS_SUPPLEMENTAL_DATA, BUFFER_FLAG_LAST_SAMPLE, BUFFER_FLAG_ENCRYPTED, BUFFER_FLAG_DECODE_ONLY }) public @interface BufferFlags {} - /** - * Indicates that a buffer holds a synchronization sample. - */ + /** Indicates that a buffer holds a synchronization sample. */ public static final int BUFFER_FLAG_KEY_FRAME = MediaCodec.BUFFER_FLAG_KEY_FRAME; - /** - * Flag for empty buffers that signal that the end of the stream was reached. - */ + /** Flag for empty buffers that signal that the end of the stream was reached. */ public static final int BUFFER_FLAG_END_OF_STREAM = MediaCodec.BUFFER_FLAG_END_OF_STREAM; + /** Indicates that a buffer is known to contain the first media sample of the stream. */ + public static final int BUFFER_FLAG_FIRST_SAMPLE = 1 << 27; // 0x08000000 /** Indicates that a buffer has supplemental data. */ public static final int BUFFER_FLAG_HAS_SUPPLEMENTAL_DATA = 1 << 28; // 0x10000000 /** Indicates that a buffer is known to contain the last media sample of the stream. */ @@ -522,13 +639,13 @@ private C() {} /** Indicates that a buffer should be decoded but not rendered. */ public static final int BUFFER_FLAG_DECODE_ONLY = 1 << 31; // 0x80000000 - // LINT.IfChange /** * Video decoder output modes. Possible modes are {@link #VIDEO_OUTPUT_MODE_NONE}, {@link * #VIDEO_OUTPUT_MODE_YUV} and {@link #VIDEO_OUTPUT_MODE_SURFACE_YUV}. */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef(value = {VIDEO_OUTPUT_MODE_NONE, VIDEO_OUTPUT_MODE_YUV, VIDEO_OUTPUT_MODE_SURFACE_YUV}) public @interface VideoOutputMode {} /** Video decoder output mode is not set. */ @@ -537,49 +654,78 @@ private C() {} public static final int VIDEO_OUTPUT_MODE_YUV = 0; /** Video decoder output mode that renders 4:2:0 YUV planes directly to a surface. */ public static final int VIDEO_OUTPUT_MODE_SURFACE_YUV = 1; - // LINT.ThenChange( - // ../../../../../../../../../extensions/av1/src/main/jni/gav1_jni.cc, - // ../../../../../../../../../extensions/vp9/src/main/jni/vpx_jni.cc - // ) /** - * Video scaling modes for {@link MediaCodec}-based {@link Renderer}s. One of {@link - * #VIDEO_SCALING_MODE_SCALE_TO_FIT} or {@link #VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING}. + * Video scaling modes for {@link MediaCodec}-based renderers. One of {@link + * #VIDEO_SCALING_MODE_SCALE_TO_FIT}, {@link #VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING} or + * {@link #VIDEO_SCALING_MODE_DEFAULT}. */ + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. + @SuppressLint("UniqueConstants") // Intentional duplication to set VIDEO_SCALING_MODE_DEFAULT. @Documented @Retention(RetentionPolicy.SOURCE) - @IntDef(value = {VIDEO_SCALING_MODE_SCALE_TO_FIT, VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING}) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) + @IntDef({ + VIDEO_SCALING_MODE_SCALE_TO_FIT, + VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING, + VIDEO_SCALING_MODE_DEFAULT + }) public @interface VideoScalingMode {} - /** - * @see MediaCodec#VIDEO_SCALING_MODE_SCALE_TO_FIT - */ + /** See {@link MediaCodec#VIDEO_SCALING_MODE_SCALE_TO_FIT}. */ public static final int VIDEO_SCALING_MODE_SCALE_TO_FIT = MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT; - /** - * @see MediaCodec#VIDEO_SCALING_MODE_SCALE_TO_FIT - */ + /** See {@link MediaCodec#VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING}. */ public static final int VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING = MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING; + /** A default video scaling mode for {@link MediaCodec}-based renderers. */ + public static final int VIDEO_SCALING_MODE_DEFAULT = VIDEO_SCALING_MODE_SCALE_TO_FIT; + + /** Strategies for calling {@link Surface#setFrameRate}. */ + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) + @IntDef({VIDEO_CHANGE_FRAME_RATE_STRATEGY_OFF, VIDEO_CHANGE_FRAME_RATE_STRATEGY_ONLY_IF_SEAMLESS}) + public @interface VideoChangeFrameRateStrategy {} /** - * A default video scaling mode for {@link MediaCodec}-based {@link Renderer}s. + * Strategy to never call {@link Surface#setFrameRate}. Use this strategy if you prefer to call + * {@link Surface#setFrameRate} directly from application code. */ - public static final int VIDEO_SCALING_MODE_DEFAULT = VIDEO_SCALING_MODE_SCALE_TO_FIT; + public static final int VIDEO_CHANGE_FRAME_RATE_STRATEGY_OFF = Integer.MIN_VALUE; + /** + * Strategy to call {@link Surface#setFrameRate} with {@link + * Surface#CHANGE_FRAME_RATE_ONLY_IF_SEAMLESS} when the output frame rate is known. + */ + public static final int VIDEO_CHANGE_FRAME_RATE_STRATEGY_ONLY_IF_SEAMLESS = + Surface.CHANGE_FRAME_RATE_ONLY_IF_SEAMLESS; /** * Track selection flags. Possible flag values are {@link #SELECTION_FLAG_DEFAULT}, {@link * #SELECTION_FLAG_FORCED} and {@link #SELECTION_FLAG_AUTOSELECT}. */ + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. @Documented @Retention(RetentionPolicy.SOURCE) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) @IntDef( flag = true, value = {SELECTION_FLAG_DEFAULT, SELECTION_FLAG_FORCED, SELECTION_FLAG_AUTOSELECT}) public @interface SelectionFlags {} + // LINT.IfChange(selection_flags) + /** Indicates that the track should be selected if user preferences do not state otherwise. */ + public static final int SELECTION_FLAG_DEFAULT = 1; /** - * Indicates that the track should be selected if user preferences do not state otherwise. + * Indicates that the track should be selected if its language matches the language of the + * selected audio track and user preferences do not state otherwise. Only applies to text tracks. + * + *

    + *
  • {@link #CONTENT_TYPE_DASH} + *
  • {@link #CONTENT_TYPE_SS} + *
  • {@link #CONTENT_TYPE_HLS} + *
  • {@link #CONTENT_TYPE_RTSP} + *
  • {@link #CONTENT_TYPE_OTHER} + *
+ */ + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. @Documented @Retention(RetentionPolicy.SOURCE) - @IntDef({TYPE_DASH, TYPE_SS, TYPE_HLS, TYPE_OTHER}) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) + @IntDef({ + CONTENT_TYPE_DASH, + CONTENT_TYPE_SS, + CONTENT_TYPE_HLS, + CONTENT_TYPE_RTSP, + CONTENT_TYPE_OTHER + }) public @interface ContentType {} + /** Value representing a DASH manifest. */ + public static final int CONTENT_TYPE_DASH = 0; /** - * Value returned by {@link Util#inferContentType(String)} for DASH manifests. + * @deprecated Use {@link #CONTENT_TYPE_DASH} instead. */ - public static final int TYPE_DASH = 0; + @Deprecated public static final int TYPE_DASH = CONTENT_TYPE_DASH; + /** Value representing a Smooth Streaming manifest. */ + public static final int CONTENT_TYPE_SS = 1; /** - * Value returned by {@link Util#inferContentType(String)} for Smooth Streaming manifests. + * @deprecated Use {@link #CONTENT_TYPE_SS} instead. */ - public static final int TYPE_SS = 1; + @Deprecated public static final int TYPE_SS = CONTENT_TYPE_SS; + /** Value representing an HLS manifest. */ + public static final int CONTENT_TYPE_HLS = 2; /** - * Value returned by {@link Util#inferContentType(String)} for HLS manifests. + * @deprecated Use {@link #CONTENT_TYPE_HLS} instead. */ - public static final int TYPE_HLS = 2; + @Deprecated public static final int TYPE_HLS = CONTENT_TYPE_HLS; + /** Value representing an RTSP stream. */ + public static final int CONTENT_TYPE_RTSP = 3; /** - * Value returned by {@link Util#inferContentType(String)} for files other than DASH, HLS or - * Smooth Streaming manifests. + * @deprecated Use {@link #CONTENT_TYPE_RTSP} instead. */ - public static final int TYPE_OTHER = 3; - + @Deprecated public static final int TYPE_RTSP = CONTENT_TYPE_RTSP; + /** Value representing files other than DASH, HLS or Smooth Streaming manifests, or RTSP URIs. */ + public static final int CONTENT_TYPE_OTHER = 4; /** - * A return value for methods where the end of an input was encountered. + * @deprecated Use {@link #CONTENT_TYPE_OTHER} instead. */ + @Deprecated public static final int TYPE_OTHER = CONTENT_TYPE_OTHER; + + /** A return value for methods where the end of an input was encountered. */ public static final int RESULT_END_OF_INPUT = -1; /** * A return value for methods where the length of parsed data exceeds the maximum length allowed. */ public static final int RESULT_MAX_LENGTH_EXCEEDED = -2; - /** - * A return value for methods where nothing was read. - */ + /** A return value for methods where nothing was read. */ public static final int RESULT_NOTHING_READ = -3; - /** - * A return value for methods where a buffer was read. - */ + /** A return value for methods where a buffer was read. */ public static final int RESULT_BUFFER_READ = -4; - /** - * A return value for methods where a format was read. - */ + /** A return value for methods where a format was read. */ public static final int RESULT_FORMAT_READ = -5; + /** + * Represents a type of data. May be one of {@link #DATA_TYPE_UNKNOWN}, {@link #DATA_TYPE_MEDIA}, + * {@link #DATA_TYPE_MEDIA_INITIALIZATION}, {@link #DATA_TYPE_DRM}, {@link #DATA_TYPE_MANIFEST}, + * {@link #DATA_TYPE_TIME_SYNCHRONIZATION}, {@link #DATA_TYPE_AD}, or {@link + * #DATA_TYPE_MEDIA_PROGRESSIVE_LIVE}. May also be an app-defined value (see {@link + * #DATA_TYPE_CUSTOM_BASE}). + */ + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef( + open = true, + value = { + DATA_TYPE_UNKNOWN, + DATA_TYPE_MEDIA, + DATA_TYPE_MEDIA_INITIALIZATION, + DATA_TYPE_DRM, + DATA_TYPE_MANIFEST, + DATA_TYPE_TIME_SYNCHRONIZATION, + DATA_TYPE_AD, + DATA_TYPE_MEDIA_PROGRESSIVE_LIVE + }) + public @interface DataType {} /** A data type constant for data of unknown or unspecified type. */ public static final int DATA_TYPE_UNKNOWN = 0; /** A data type constant for media, typically containing media samples. */ @@ -661,6 +851,32 @@ private C() {} */ public static final int DATA_TYPE_CUSTOM_BASE = 10000; + /** + * Represents a type of media track. May be one of {@link #TRACK_TYPE_UNKNOWN}, {@link + * #TRACK_TYPE_DEFAULT}, {@link #TRACK_TYPE_AUDIO}, {@link #TRACK_TYPE_VIDEO}, {@link + * #TRACK_TYPE_TEXT}, {@link #TRACK_TYPE_IMAGE}, {@link #TRACK_TYPE_METADATA}, {@link + * #TRACK_TYPE_CAMERA_MOTION} or {@link #TRACK_TYPE_NONE}. May also be an app-defined value (see + * {@link #TRACK_TYPE_CUSTOM_BASE}). + */ + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef( + open = true, + value = { + TRACK_TYPE_UNKNOWN, + TRACK_TYPE_DEFAULT, + TRACK_TYPE_AUDIO, + TRACK_TYPE_VIDEO, + TRACK_TYPE_TEXT, + TRACK_TYPE_IMAGE, + TRACK_TYPE_METADATA, + TRACK_TYPE_CAMERA_MOTION, + TRACK_TYPE_NONE, + }) + public @interface TrackType {} + /** A type constant for a fake or empty track. */ + public static final int TRACK_TYPE_NONE = -2; /** A type constant for tracks of unknown type. */ public static final int TRACK_TYPE_UNKNOWN = -1; /** A type constant for tracks of some default type, where the type itself is unknown. */ @@ -671,12 +887,12 @@ private C() {} public static final int TRACK_TYPE_VIDEO = 2; /** A type constant for text tracks. */ public static final int TRACK_TYPE_TEXT = 3; + /** A type constant for image tracks. */ + public static final int TRACK_TYPE_IMAGE = 4; /** A type constant for metadata tracks. */ - public static final int TRACK_TYPE_METADATA = 4; + public static final int TRACK_TYPE_METADATA = 5; /** A type constant for camera motion tracks. */ - public static final int TRACK_TYPE_CAMERA_MOTION = 5; - /** A type constant for a dummy or empty track. */ - public static final int TRACK_TYPE_NONE = 6; + public static final int TRACK_TYPE_CAMERA_MOTION = 6; /** * Applications or extensions may define custom {@code TRACK_TYPE_*} constants greater than or * equal to this value. @@ -684,24 +900,33 @@ private C() {} public static final int TRACK_TYPE_CUSTOM_BASE = 10000; /** - * A selection reason constant for selections whose reasons are unknown or unspecified. + * Represents a reason for selection. May be one of {@link #SELECTION_REASON_UNKNOWN}, {@link + * #SELECTION_REASON_INITIAL}, {@link #SELECTION_REASON_MANUAL}, {@link + * #SELECTION_REASON_ADAPTIVE} or {@link #SELECTION_REASON_TRICK_PLAY}. May also be an app-defined + * value (see {@link #SELECTION_REASON_CUSTOM_BASE}). */ + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef( + open = true, + value = { + SELECTION_REASON_UNKNOWN, + SELECTION_REASON_INITIAL, + SELECTION_REASON_MANUAL, + SELECTION_REASON_ADAPTIVE, + SELECTION_REASON_TRICK_PLAY + }) + public @interface SelectionReason {} + /** A selection reason constant for selections whose reasons are unknown or unspecified. */ public static final int SELECTION_REASON_UNKNOWN = 0; - /** - * A selection reason constant for an initial track selection. - */ + /** A selection reason constant for an initial track selection. */ public static final int SELECTION_REASON_INITIAL = 1; - /** - * A selection reason constant for an manual (i.e. user initiated) track selection. - */ + /** A selection reason constant for an manual (i.e. user initiated) track selection. */ public static final int SELECTION_REASON_MANUAL = 2; - /** - * A selection reason constant for an adaptive track selection. - */ + /** A selection reason constant for an adaptive track selection. */ public static final int SELECTION_REASON_ADAPTIVE = 3; - /** - * A selection reason constant for a trick play track selection. - */ + /** A selection reason constant for a trick play track selection. */ public static final int SELECTION_REASON_TRICK_PLAY = 4; /** * Applications or extensions may define custom {@code SELECTION_REASON_*} constants greater than @@ -712,6 +937,17 @@ private C() {} /** A default size in bytes for an individual allocation that forms part of a larger buffer. */ public static final int DEFAULT_BUFFER_SEGMENT_SIZE = 64 * 1024; + /** A default seek back increment, in milliseconds. */ + public static final long DEFAULT_SEEK_BACK_INCREMENT_MS = 5_000; + /** A default seek forward increment, in milliseconds. */ + public static final long DEFAULT_SEEK_FORWARD_INCREMENT_MS = 15_000; + + /** + * A default maximum position for which a seek to previous will seek to the previous window, in + * milliseconds. + */ + public static final long DEFAULT_MAX_SEEK_TO_PREVIOUS_POSITION_MS = 3_000; + /** "cenc" scheme type name as defined in ISO/IEC 23001-7:2016. */ @SuppressWarnings("ConstantField") public static final String CENC_TYPE_cenc = "cenc"; @@ -729,125 +965,40 @@ private C() {} public static final String CENC_TYPE_cbcs = "cbcs"; /** - * The Nil UUID as defined by - * RFC4122. + * The Nil UUID as defined by RFC4122. */ public static final UUID UUID_NIL = new UUID(0L, 0L); /** - * UUID for the W3C - * Common PSSH + * UUID for the W3C Common PSSH * box. */ public static final UUID COMMON_PSSH_UUID = new UUID(0x1077EFECC0B24D02L, 0xACE33C1E52E2FB4BL); /** * UUID for the ClearKey DRM scheme. - *

- * ClearKey is supported on Android devices running Android 5.0 (API Level 21) and up. + * + *

ClearKey is supported on Android devices running Android 5.0 (API Level 21) and up. */ public static final UUID CLEARKEY_UUID = new UUID(0xE2719D58A985B3C9L, 0x781AB030AF78D30EL); /** * UUID for the Widevine DRM scheme. - *

- * Widevine is supported on Android devices running Android 4.3 (API Level 18) and up. + * + *

Widevine is supported on Android devices running Android 4.3 (API Level 18) and up. */ public static final UUID WIDEVINE_UUID = new UUID(0xEDEF8BA979D64ACEL, 0xA3C827DCD51D21EDL); /** * UUID for the PlayReady DRM scheme. - *

- * PlayReady is supported on all AndroidTV devices. Note that most other Android devices do not + * + *

PlayReady is supported on all AndroidTV devices. Note that most other Android devices do not * provide PlayReady support. */ public static final UUID PLAYREADY_UUID = new UUID(0x9A04F07998404286L, 0xAB92E65BE0885F95L); - /** - * The type of a message that can be passed to a video {@link Renderer} via {@link - * ExoPlayer#createMessage(Target)}. The message payload should be the target {@link Surface}, or - * null. - */ - public static final int MSG_SET_SURFACE = 1; - - /** - * A type of a message that can be passed to an audio {@link Renderer} via {@link - * ExoPlayer#createMessage(Target)}. The message payload should be a {@link Float} with 0 being - * silence and 1 being unity gain. - */ - public static final int MSG_SET_VOLUME = 2; - - /** - * A type of a message that can be passed to an audio {@link Renderer} via {@link - * ExoPlayer#createMessage(Target)}. The message payload should be an {@link - * com.google.android.exoplayer2.audio.AudioAttributes} instance that will configure the - * underlying audio track. If not set, the default audio attributes will be used. They are - * suitable for general media playback. - * - *

Setting the audio attributes during playback may introduce a short gap in audio output as - * the audio track is recreated. A new audio session id will also be generated. - * - *

If tunneling is enabled by the track selector, the specified audio attributes will be - * ignored, but they will take effect if audio is later played without tunneling. - * - *

If the device is running a build before platform API version 21, audio attributes cannot be - * set directly on the underlying audio track. In this case, the usage will be mapped onto an - * equivalent stream type using {@link Util#getStreamTypeForAudioUsage(int)}. - * - *

To get audio attributes that are equivalent to a legacy stream type, pass the stream type to - * {@link Util#getAudioUsageForStreamType(int)} and use the returned {@link C.AudioUsage} to build - * an audio attributes instance. - */ - public static final int MSG_SET_AUDIO_ATTRIBUTES = 3; - - /** - * The type of a message that can be passed to a {@link MediaCodec}-based video {@link Renderer} - * via {@link ExoPlayer#createMessage(Target)}. The message payload should be one of the integer - * scaling modes in {@link C.VideoScalingMode}. - * - *

Note that the scaling mode only applies if the {@link Surface} targeted by the renderer is - * owned by a {@link android.view.SurfaceView}. - */ - public static final int MSG_SET_SCALING_MODE = 4; - - /** - * A type of a message that can be passed to an audio {@link Renderer} via {@link - * ExoPlayer#createMessage(Target)}. The message payload should be an {@link AuxEffectInfo} - * instance representing an auxiliary audio effect for the underlying audio track. - */ - public static final int MSG_SET_AUX_EFFECT_INFO = 5; - - /** - * The type of a message that can be passed to a video {@link Renderer} via {@link - * ExoPlayer#createMessage(Target)}. The message payload should be a {@link - * VideoFrameMetadataListener} instance, or null. - */ - public static final int MSG_SET_VIDEO_FRAME_METADATA_LISTENER = 6; - - /** - * The type of a message that can be passed to a camera motion {@link Renderer} via {@link - * ExoPlayer#createMessage(Target)}. The message payload should be a {@link CameraMotionListener} - * instance, or null. - */ - public static final int MSG_SET_CAMERA_MOTION_LISTENER = 7; - - /** - * The type of a message that can be passed to a {@link SimpleDecoderVideoRenderer} via {@link - * ExoPlayer#createMessage(Target)}. The message payload should be the target {@link - * VideoDecoderOutputBufferRenderer}, or null. - * - *

This message is intended only for use with extension renderers that expect a {@link - * VideoDecoderOutputBufferRenderer}. For other use cases, an output surface should be passed via - * {@link #MSG_SET_SURFACE} instead. - */ - public static final int MSG_SET_VIDEO_DECODER_OUTPUT_BUFFER_RENDERER = 8; - - /** - * Applications or extensions may define custom {@code MSG_*} constants that can be passed to - * {@link Renderer}s. These custom constants must be greater than or equal to this value. - */ - public static final int MSG_CUSTOM_BASE = 10000; - /** * The stereo mode for 360/3D/VR videos. One of {@link Format#NO_VALUE}, {@link * #STEREO_MODE_MONO}, {@link #STEREO_MODE_TOP_BOTTOM}, {@link #STEREO_MODE_LEFT_RIGHT} or {@link @@ -855,6 +1006,7 @@ private C() {} */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({ Format.NO_VALUE, STEREO_MODE_MONO, @@ -863,51 +1015,49 @@ private C() {} STEREO_MODE_STEREO_MESH }) public @interface StereoMode {} - /** - * Indicates Monoscopic stereo layout, used with 360/3D/VR videos. - */ + /** Indicates Monoscopic stereo layout, used with 360/3D/VR videos. */ public static final int STEREO_MODE_MONO = 0; - /** - * Indicates Top-Bottom stereo layout, used with 360/3D/VR videos. - */ + /** Indicates Top-Bottom stereo layout, used with 360/3D/VR videos. */ public static final int STEREO_MODE_TOP_BOTTOM = 1; - /** - * Indicates Left-Right stereo layout, used with 360/3D/VR videos. - */ + /** Indicates Left-Right stereo layout, used with 360/3D/VR videos. */ public static final int STEREO_MODE_LEFT_RIGHT = 2; /** - * Indicates a stereo layout where the left and right eyes have separate meshes, - * used with 360/3D/VR videos. + * Indicates a stereo layout where the left and right eyes have separate meshes, used with + * 360/3D/VR videos. */ public static final int STEREO_MODE_STEREO_MESH = 3; + // LINT.IfChange(color_space) /** - * Video colorspaces. One of {@link Format#NO_VALUE}, {@link #COLOR_SPACE_BT709}, {@link - * #COLOR_SPACE_BT601} or {@link #COLOR_SPACE_BT2020}. + * Video colorspaces. One of {@link Format#NO_VALUE}, {@link #COLOR_SPACE_BT601}, {@link + * #COLOR_SPACE_BT709} or {@link #COLOR_SPACE_BT2020}. */ @Documented @Retention(RetentionPolicy.SOURCE) - @IntDef({Format.NO_VALUE, COLOR_SPACE_BT709, COLOR_SPACE_BT601, COLOR_SPACE_BT2020}) + @Target(TYPE_USE) + @IntDef({Format.NO_VALUE, COLOR_SPACE_BT601, COLOR_SPACE_BT709, COLOR_SPACE_BT2020}) public @interface ColorSpace {} - /** - * @see MediaFormat#COLOR_STANDARD_BT709 - */ - public static final int COLOR_SPACE_BT709 = MediaFormat.COLOR_STANDARD_BT709; /** * @see MediaFormat#COLOR_STANDARD_BT601_PAL */ public static final int COLOR_SPACE_BT601 = MediaFormat.COLOR_STANDARD_BT601_PAL; + /** + * @see MediaFormat#COLOR_STANDARD_BT709 + */ + public static final int COLOR_SPACE_BT709 = MediaFormat.COLOR_STANDARD_BT709; /** * @see MediaFormat#COLOR_STANDARD_BT2020 */ public static final int COLOR_SPACE_BT2020 = MediaFormat.COLOR_STANDARD_BT2020; + // LINT.IfChange(color_transfer) /** * Video color transfer characteristics. One of {@link Format#NO_VALUE}, {@link * #COLOR_TRANSFER_SDR}, {@link #COLOR_TRANSFER_ST2084} or {@link #COLOR_TRANSFER_HLG}. */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({Format.NO_VALUE, COLOR_TRANSFER_SDR, COLOR_TRANSFER_ST2084, COLOR_TRANSFER_HLG}) public @interface ColorTransfer {} /** @@ -923,12 +1073,14 @@ private C() {} */ public static final int COLOR_TRANSFER_HLG = MediaFormat.COLOR_TRANSFER_HLG; + // LINT.IfChange(color_range) /** * Video color range. One of {@link Format#NO_VALUE}, {@link #COLOR_RANGE_LIMITED} or {@link * #COLOR_RANGE_FULL}. */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({Format.NO_VALUE, COLOR_RANGE_LIMITED, COLOR_RANGE_FULL}) public @interface ColorRange {} /** @@ -943,6 +1095,7 @@ private C() {} /** Video projection types. */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({ Format.NO_VALUE, PROJECTION_RECTANGULAR, @@ -977,11 +1130,14 @@ private C() {} /** * Network connection type. One of {@link #NETWORK_TYPE_UNKNOWN}, {@link #NETWORK_TYPE_OFFLINE}, * {@link #NETWORK_TYPE_WIFI}, {@link #NETWORK_TYPE_2G}, {@link #NETWORK_TYPE_3G}, {@link - * #NETWORK_TYPE_4G}, {@link #NETWORK_TYPE_5G}, {@link #NETWORK_TYPE_CELLULAR_UNKNOWN}, {@link - * #NETWORK_TYPE_ETHERNET} or {@link #NETWORK_TYPE_OTHER}. + * #NETWORK_TYPE_4G}, {@link #NETWORK_TYPE_5G_SA}, {@link #NETWORK_TYPE_5G_NSA}, {@link + * #NETWORK_TYPE_CELLULAR_UNKNOWN}, {@link #NETWORK_TYPE_ETHERNET} or {@link #NETWORK_TYPE_OTHER}. */ + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. @Documented @Retention(RetentionPolicy.SOURCE) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) @IntDef({ NETWORK_TYPE_UNKNOWN, NETWORK_TYPE_OFFLINE, @@ -989,7 +1145,8 @@ private C() {} NETWORK_TYPE_2G, NETWORK_TYPE_3G, NETWORK_TYPE_4G, - NETWORK_TYPE_5G, + NETWORK_TYPE_5G_SA, + NETWORK_TYPE_5G_NSA, NETWORK_TYPE_CELLULAR_UNKNOWN, NETWORK_TYPE_ETHERNET, NETWORK_TYPE_OTHER @@ -1007,8 +1164,10 @@ private C() {} public static final int NETWORK_TYPE_3G = 4; /** Network type for a 4G cellular connection. */ public static final int NETWORK_TYPE_4G = 5; - /** Network type for a 5G cellular connection. */ - public static final int NETWORK_TYPE_5G = 9; + /** Network type for a 5G stand-alone (SA) cellular connection. */ + public static final int NETWORK_TYPE_5G_SA = 9; + /** Network type for a 5G non-stand-alone (NSA) cellular connection. */ + public static final int NETWORK_TYPE_5G_NSA = 10; /** * Network type for cellular connections which cannot be mapped to one of {@link * #NETWORK_TYPE_2G}, {@link #NETWORK_TYPE_3G}, or {@link #NETWORK_TYPE_4G}. @@ -1021,10 +1180,13 @@ private C() {} /** * Mode specifying whether the player should hold a WakeLock and a WifiLock. One of {@link - * #WAKE_MODE_NONE}, {@link #WAKE_MODE_LOCAL} and {@link #WAKE_MODE_NETWORK}. + * #WAKE_MODE_NONE}, {@link #WAKE_MODE_LOCAL} or {@link #WAKE_MODE_NETWORK}. */ + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. @Documented @Retention(RetentionPolicy.SOURCE) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) @IntDef({WAKE_MODE_NONE, WAKE_MODE_LOCAL, WAKE_MODE_NETWORK}) public @interface WakeMode {} /** @@ -1059,8 +1221,11 @@ private C() {} * {@link #ROLE_FLAG_TRANSCRIBES_DIALOG}, {@link #ROLE_FLAG_EASY_TO_READ} and {@link * #ROLE_FLAG_TRICK_PLAY}. */ + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. @Documented @Retention(RetentionPolicy.SOURCE) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) @IntDef( flag = true, value = { @@ -1081,6 +1246,7 @@ private C() {} ROLE_FLAG_TRICK_PLAY }) public @interface RoleFlags {} + // LINT.IfChange(role_flags) /** Indicates a main track. */ public static final int ROLE_FLAG_MAIN = 1; /** @@ -1128,37 +1294,118 @@ private C() {} public static final int ROLE_FLAG_TRICK_PLAY = 1 << 14; /** - * Converts a time in microseconds to the corresponding time in milliseconds, preserving - * {@link #TIME_UNSET} and {@link #TIME_END_OF_SOURCE} values. + * Level of renderer support for a format. One of {@link #FORMAT_HANDLED}, {@link + * #FORMAT_EXCEEDS_CAPABILITIES}, {@link #FORMAT_UNSUPPORTED_DRM}, {@link + * #FORMAT_UNSUPPORTED_SUBTYPE} or {@link #FORMAT_UNSUPPORTED_TYPE}. + */ + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) + @IntDef({ + FORMAT_HANDLED, + FORMAT_EXCEEDS_CAPABILITIES, + FORMAT_UNSUPPORTED_DRM, + FORMAT_UNSUPPORTED_SUBTYPE, + FORMAT_UNSUPPORTED_TYPE + }) + public @interface FormatSupport {} + // TODO(b/172315872) Renderer was a link. Link to equivalent concept or remove @code. + /** The {@code Renderer} is capable of rendering the format. */ + public static final int FORMAT_HANDLED = 0b100; + /** + * The {@code Renderer} is capable of rendering formats with the same MIME type, but the + * properties of the format exceed the renderer's capabilities. There is a chance the renderer + * will be able to play the format in practice because some renderers report their capabilities + * conservatively, but the expected outcome is that playback will fail. + * + *

Example: The {@code Renderer} is capable of rendering H264 and the format's MIME type is + * {@code MimeTypes#VIDEO_H264}, but the format's resolution exceeds the maximum limit supported + * by the underlying H264 decoder. + */ + public static final int FORMAT_EXCEEDS_CAPABILITIES = 0b011; + /** + * The {@code Renderer} is capable of rendering formats with the same MIME type, but is not + * capable of rendering the format because the format's drm protection is not supported. + * + *

Example: The {@code Renderer} is capable of rendering H264 and the format's MIME type is + * {@link MimeTypes#VIDEO_H264}, but the format indicates PlayReady drm protection whereas the + * renderer only supports Widevine. + */ + public static final int FORMAT_UNSUPPORTED_DRM = 0b010; + /** + * The {@code Renderer} is a general purpose renderer for formats of the same top-level type, but + * is not capable of rendering the format or any other format with the same MIME type because the + * sub-type is not supported. + * + *

Example: The {@code Renderer} is a general purpose audio renderer and the format's MIME type + * matches audio/[subtype], but there does not exist a suitable decoder for [subtype]. + */ + public static final int FORMAT_UNSUPPORTED_SUBTYPE = 0b001; + /** + * The {@code Renderer} is not capable of rendering the format, either because it does not support + * the format's top-level type, or because it's a specialized renderer for a different MIME type. * - * @param timeUs The time in microseconds. - * @return The corresponding time in milliseconds. + *

Example: The {@code Renderer} is a general purpose video renderer, but the format has an + * audio MIME type. */ + public static final int FORMAT_UNSUPPORTED_TYPE = 0b000; + + /** + * @deprecated Use {@link Util#usToMs(long)}. + */ + @InlineMe( + replacement = "Util.usToMs(timeUs)", + imports = {"com.google.android.exoplayer2.util.Util"}) + @Deprecated public static long usToMs(long timeUs) { - return (timeUs == TIME_UNSET || timeUs == TIME_END_OF_SOURCE) ? timeUs : (timeUs / 1000); + return Util.usToMs(timeUs); } /** - * Converts a time in milliseconds to the corresponding time in microseconds, preserving - * {@link #TIME_UNSET} values and {@link #TIME_END_OF_SOURCE} values. - * - * @param timeMs The time in milliseconds. - * @return The corresponding time in microseconds. + * @deprecated Use {@link Util#msToUs(long)}. */ + @InlineMe( + replacement = "Util.msToUs(timeMs)", + imports = {"com.google.android.exoplayer2.util.Util"}) + @Deprecated public static long msToUs(long timeMs) { - return (timeMs == TIME_UNSET || timeMs == TIME_END_OF_SOURCE) ? timeMs : (timeMs * 1000); + return Util.msToUs(timeMs); } /** - * Returns a newly generated audio session identifier, or {@link AudioManager#ERROR} if an error - * occurred in which case audio playback may fail. - * - * @see AudioManager#generateAudioSessionId() + * @deprecated Use {@link Util#generateAudioSessionIdV21(Context)}. */ - @TargetApi(21) + @InlineMe( + replacement = "Util.generateAudioSessionIdV21(context)", + imports = {"com.google.android.exoplayer2.util.Util"}) + @Deprecated + @RequiresApi(21) public static int generateAudioSessionIdV21(Context context) { - return ((AudioManager) context.getSystemService(Context.AUDIO_SERVICE)) - .generateAudioSessionId(); + return Util.generateAudioSessionIdV21(context); } + /** + * @deprecated Use {@link Util#getFormatSupportString(int)}. + */ + @InlineMe( + replacement = "Util.getFormatSupportString(formatSupport)", + imports = {"com.google.android.exoplayer2.util.Util"}) + @Deprecated + public static String getFormatSupportString(@FormatSupport int formatSupport) { + return Util.getFormatSupportString(formatSupport); + } + + /** + * @deprecated Use {@link Util#getErrorCodeForMediaDrmErrorCode(int)}. + */ + @InlineMe( + replacement = "Util.getErrorCodeForMediaDrmErrorCode(mediaDrmErrorCode)", + imports = {"com.google.android.exoplayer2.util.Util"}) + @Deprecated + public static @PlaybackException.ErrorCode int getErrorCodeForMediaDrmErrorCode( + int mediaDrmErrorCode) { + return Util.getErrorCodeForMediaDrmErrorCode(mediaDrmErrorCode); + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ControlDispatcher.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ControlDispatcher.java deleted file mode 100644 index f8749fc1a8..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ControlDispatcher.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright (C) 2017 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2; - -import com.google.android.exoplayer2.Player.RepeatMode; - -/** - * Dispatches operations to the {@link Player}. - *

- * Implementations may choose to suppress (e.g. prevent playback from resuming if audio focus is - * denied) or modify (e.g. change the seek position to prevent a user from seeking past a - * non-skippable advert) operations. - */ -public interface ControlDispatcher { - - /** - * Dispatches a {@link Player#setPlayWhenReady(boolean)} operation. - * - * @param player The {@link Player} to which the operation should be dispatched. - * @param playWhenReady Whether playback should proceed when ready. - * @return True if the operation was dispatched. False if suppressed. - */ - boolean dispatchSetPlayWhenReady(Player player, boolean playWhenReady); - - /** - * Dispatches a {@link Player#seekTo(int, long)} operation. - * - * @param player The {@link Player} to which the operation should be dispatched. - * @param windowIndex The index of the window. - * @param positionMs The seek position in the specified window, or {@link C#TIME_UNSET} to seek to - * the window's default position. - * @return True if the operation was dispatched. False if suppressed. - */ - boolean dispatchSeekTo(Player player, int windowIndex, long positionMs); - - /** - * Dispatches a {@link Player#setRepeatMode(int)} operation. - * - * @param player The {@link Player} to which the operation should be dispatched. - * @param repeatMode The repeat mode. - * @return True if the operation was dispatched. False if suppressed. - */ - boolean dispatchSetRepeatMode(Player player, @RepeatMode int repeatMode); - - /** - * Dispatches a {@link Player#setShuffleModeEnabled(boolean)} operation. - * - * @param player The {@link Player} to which the operation should be dispatched. - * @param shuffleModeEnabled Whether shuffling is enabled. - * @return True if the operation was dispatched. False if suppressed. - */ - boolean dispatchSetShuffleModeEnabled(Player player, boolean shuffleModeEnabled); - - /** - * Dispatches a {@link Player#stop()} operation. - * - * @param player The {@link Player} to which the operation should be dispatched. - * @param reset Whether the player should be reset. - * @return True if the operation was dispatched. False if suppressed. - */ - boolean dispatchStop(Player player, boolean reset); -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/DefaultControlDispatcher.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/DefaultControlDispatcher.java deleted file mode 100644 index df3ef36b88..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/DefaultControlDispatcher.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright (C) 2017 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2; - -import com.google.android.exoplayer2.Player.RepeatMode; - -/** - * Default {@link ControlDispatcher} that dispatches all operations to the player without - * modification. - */ -public class DefaultControlDispatcher implements ControlDispatcher { - - @Override - public boolean dispatchSetPlayWhenReady(Player player, boolean playWhenReady) { - player.setPlayWhenReady(playWhenReady); - return true; - } - - @Override - public boolean dispatchSeekTo(Player player, int windowIndex, long positionMs) { - player.seekTo(windowIndex, positionMs); - return true; - } - - @Override - public boolean dispatchSetRepeatMode(Player player, @RepeatMode int repeatMode) { - player.setRepeatMode(repeatMode); - return true; - } - - @Override - public boolean dispatchSetShuffleModeEnabled(Player player, boolean shuffleModeEnabled) { - player.setShuffleModeEnabled(shuffleModeEnabled); - return true; - } - - @Override - public boolean dispatchStop(Player player, boolean reset) { - player.stop(reset); - return true; - } -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/DefaultLivePlaybackSpeedControl.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/DefaultLivePlaybackSpeedControl.java new file mode 100644 index 0000000000..6fec5d2450 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/DefaultLivePlaybackSpeedControl.java @@ -0,0 +1,458 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2; + +import static com.google.common.primitives.Longs.max; +import static java.lang.Math.abs; +import static java.lang.Math.max; + +import android.os.SystemClock; +import com.google.android.exoplayer2.MediaItem.LiveConfiguration; +import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.Util; +import com.google.errorprone.annotations.CanIgnoreReturnValue; + +/** + * A {@link LivePlaybackSpeedControl} that adjusts the playback speed using a proportional + * controller. + * + *

The control mechanism calculates the adjusted speed as {@code 1.0 + proportionalControlFactor + * x (currentLiveOffsetSec - targetLiveOffsetSec)}. Unit speed (1.0f) is used, if the {@code + * currentLiveOffsetSec} is closer to {@code targetLiveOffsetSec} than the value set with {@link + * Builder#setMaxLiveOffsetErrorMsForUnitSpeed(long)}. + * + *

The resulting speed is clamped to a minimum and maximum speed defined by the media, the + * fallback values set with {@link Builder#setFallbackMinPlaybackSpeed(float)} and {@link + * Builder#setFallbackMaxPlaybackSpeed(float)} or the {@link #DEFAULT_FALLBACK_MIN_PLAYBACK_SPEED + * minimum} and {@link #DEFAULT_FALLBACK_MAX_PLAYBACK_SPEED maximum} fallback default values. + * + *

When the player rebuffers, the target live offset {@link + * Builder#setTargetLiveOffsetIncrementOnRebufferMs(long) is increased} to adjust to the reduced + * network capabilities. The live playback speed control also {@link + * Builder#setMinPossibleLiveOffsetSmoothingFactor(float) keeps track} of the minimum possible live + * offset to decrease the target live offset again if conditions improve. The minimum possible live + * offset is derived from the current offset and the duration of buffered media. + */ +public final class DefaultLivePlaybackSpeedControl implements LivePlaybackSpeedControl { + + /** + * The default minimum factor by which playback can be sped up that should be used if no minimum + * playback speed is defined by the media. + */ + public static final float DEFAULT_FALLBACK_MIN_PLAYBACK_SPEED = 0.97f; + + /** + * The default maximum factor by which playback can be sped up that should be used if no maximum + * playback speed is defined by the media. + */ + public static final float DEFAULT_FALLBACK_MAX_PLAYBACK_SPEED = 1.03f; + + /** + * The default {@link Builder#setMinUpdateIntervalMs(long) minimum interval} between playback + * speed changes, in milliseconds. + */ + public static final long DEFAULT_MIN_UPDATE_INTERVAL_MS = 1_000; + + /** + * The default {@link Builder#setProportionalControlFactor(float) proportional control factor} + * used to adjust the playback speed. + */ + public static final float DEFAULT_PROPORTIONAL_CONTROL_FACTOR = 0.1f; + + /** + * The default increment applied to the target live offset each time the player is rebuffering, in + * milliseconds + */ + public static final long DEFAULT_TARGET_LIVE_OFFSET_INCREMENT_ON_REBUFFER_MS = 500; + + /** + * The default smoothing factor when smoothing the minimum possible live offset that can be + * achieved during playback. + */ + public static final float DEFAULT_MIN_POSSIBLE_LIVE_OFFSET_SMOOTHING_FACTOR = 0.999f; + + /** + * The default maximum difference between the current live offset and the target live offset, in + * milliseconds, for which unit speed (1.0f) is used. + */ + public static final long DEFAULT_MAX_LIVE_OFFSET_ERROR_MS_FOR_UNIT_SPEED = 20; + + /** Builder for a {@link DefaultLivePlaybackSpeedControl}. */ + public static final class Builder { + + private float fallbackMinPlaybackSpeed; + private float fallbackMaxPlaybackSpeed; + private long minUpdateIntervalMs; + private float proportionalControlFactorUs; + private long maxLiveOffsetErrorUsForUnitSpeed; + private long targetLiveOffsetIncrementOnRebufferUs; + private float minPossibleLiveOffsetSmoothingFactor; + + /** Creates a builder. */ + public Builder() { + fallbackMinPlaybackSpeed = DEFAULT_FALLBACK_MIN_PLAYBACK_SPEED; + fallbackMaxPlaybackSpeed = DEFAULT_FALLBACK_MAX_PLAYBACK_SPEED; + minUpdateIntervalMs = DEFAULT_MIN_UPDATE_INTERVAL_MS; + proportionalControlFactorUs = DEFAULT_PROPORTIONAL_CONTROL_FACTOR / C.MICROS_PER_SECOND; + maxLiveOffsetErrorUsForUnitSpeed = + Util.msToUs(DEFAULT_MAX_LIVE_OFFSET_ERROR_MS_FOR_UNIT_SPEED); + targetLiveOffsetIncrementOnRebufferUs = + Util.msToUs(DEFAULT_TARGET_LIVE_OFFSET_INCREMENT_ON_REBUFFER_MS); + minPossibleLiveOffsetSmoothingFactor = DEFAULT_MIN_POSSIBLE_LIVE_OFFSET_SMOOTHING_FACTOR; + } + + /** + * Sets the minimum playback speed that should be used if no minimum playback speed is defined + * by the media. + * + *

The default is {@link #DEFAULT_FALLBACK_MIN_PLAYBACK_SPEED}. + * + * @param fallbackMinPlaybackSpeed The fallback minimum factor by which playback can be sped up. + * @return This builder, for convenience. + */ + @CanIgnoreReturnValue + public Builder setFallbackMinPlaybackSpeed(float fallbackMinPlaybackSpeed) { + Assertions.checkArgument(0 < fallbackMinPlaybackSpeed && fallbackMinPlaybackSpeed <= 1f); + this.fallbackMinPlaybackSpeed = fallbackMinPlaybackSpeed; + return this; + } + + /** + * Sets the maximum playback speed that should be used if no maximum playback speed is defined + * by the media. + * + *

The default is {@link #DEFAULT_FALLBACK_MAX_PLAYBACK_SPEED}. + * + * @param fallbackMaxPlaybackSpeed The fallback maximum factor by which playback can be sped up. + * @return This builder, for convenience. + */ + @CanIgnoreReturnValue + public Builder setFallbackMaxPlaybackSpeed(float fallbackMaxPlaybackSpeed) { + Assertions.checkArgument(fallbackMaxPlaybackSpeed >= 1f); + this.fallbackMaxPlaybackSpeed = fallbackMaxPlaybackSpeed; + return this; + } + + /** + * Sets the minimum interval between playback speed changes, in milliseconds. + * + *

The default is {@link #DEFAULT_MIN_UPDATE_INTERVAL_MS}. + * + * @param minUpdateIntervalMs The minimum interval between playback speed changes, in + * milliseconds. + * @return This builder, for convenience. + */ + @CanIgnoreReturnValue + public Builder setMinUpdateIntervalMs(long minUpdateIntervalMs) { + Assertions.checkArgument(minUpdateIntervalMs > 0); + this.minUpdateIntervalMs = minUpdateIntervalMs; + return this; + } + + /** + * Sets the proportional control factor used to adjust the playback speed. + * + *

The factor by which playback will be sped up is calculated as {@code 1.0 + + * proportionalControlFactor x (currentLiveOffsetSec - targetLiveOffsetSec)}. + * + *

The default is {@link #DEFAULT_PROPORTIONAL_CONTROL_FACTOR}. + * + * @param proportionalControlFactor The proportional control factor used to adjust the playback + * speed. + * @return This builder, for convenience. + */ + @CanIgnoreReturnValue + public Builder setProportionalControlFactor(float proportionalControlFactor) { + Assertions.checkArgument(proportionalControlFactor > 0); + this.proportionalControlFactorUs = proportionalControlFactor / C.MICROS_PER_SECOND; + return this; + } + + /** + * Sets the maximum difference between the current live offset and the target live offset, in + * milliseconds, for which unit speed (1.0f) is used. + * + *

The default is {@link #DEFAULT_MAX_LIVE_OFFSET_ERROR_MS_FOR_UNIT_SPEED}. + * + * @param maxLiveOffsetErrorMsForUnitSpeed The maximum live offset error for which unit speed is + * used, in milliseconds. + * @return This builder, for convenience. + */ + @CanIgnoreReturnValue + public Builder setMaxLiveOffsetErrorMsForUnitSpeed(long maxLiveOffsetErrorMsForUnitSpeed) { + Assertions.checkArgument(maxLiveOffsetErrorMsForUnitSpeed > 0); + this.maxLiveOffsetErrorUsForUnitSpeed = Util.msToUs(maxLiveOffsetErrorMsForUnitSpeed); + return this; + } + + /** + * Sets the increment applied to the target live offset each time the player is rebuffering, in + * milliseconds. + * + * @param targetLiveOffsetIncrementOnRebufferMs The increment applied to the target live offset + * when the player is rebuffering, in milliseconds + * @return This builder, for convenience. + */ + @CanIgnoreReturnValue + public Builder setTargetLiveOffsetIncrementOnRebufferMs( + long targetLiveOffsetIncrementOnRebufferMs) { + Assertions.checkArgument(targetLiveOffsetIncrementOnRebufferMs >= 0); + this.targetLiveOffsetIncrementOnRebufferUs = + Util.msToUs(targetLiveOffsetIncrementOnRebufferMs); + return this; + } + + /** + * Sets the smoothing factor when smoothing the minimum possible live offset that can be + * achieved during playback. + * + *

The live playback speed control keeps track of the minimum possible live offset achievable + * during playback to know whether it can reduce the current target live offset. The minimum + * possible live offset is defined as {@code currentLiveOffset - bufferedDuration}. As the + * minimum possible live offset is constantly changing, it is smoothed over recent samples by + * applying exponential smoothing: {@code smoothedMinPossibleOffset = smoothingFactor x + * smoothedMinPossibleOffset + (1-smoothingFactor) x currentMinPossibleOffset}. + * + * @param minPossibleLiveOffsetSmoothingFactor The smoothing factor. Must be ≥ 0 and < 1. + * @return This builder, for convenience. + */ + @CanIgnoreReturnValue + public Builder setMinPossibleLiveOffsetSmoothingFactor( + float minPossibleLiveOffsetSmoothingFactor) { + Assertions.checkArgument( + minPossibleLiveOffsetSmoothingFactor >= 0 && minPossibleLiveOffsetSmoothingFactor < 1f); + this.minPossibleLiveOffsetSmoothingFactor = minPossibleLiveOffsetSmoothingFactor; + return this; + } + + /** Builds an instance. */ + public DefaultLivePlaybackSpeedControl build() { + return new DefaultLivePlaybackSpeedControl( + fallbackMinPlaybackSpeed, + fallbackMaxPlaybackSpeed, + minUpdateIntervalMs, + proportionalControlFactorUs, + maxLiveOffsetErrorUsForUnitSpeed, + targetLiveOffsetIncrementOnRebufferUs, + minPossibleLiveOffsetSmoothingFactor); + } + } + + private final float fallbackMinPlaybackSpeed; + private final float fallbackMaxPlaybackSpeed; + private final long minUpdateIntervalMs; + private final float proportionalControlFactor; + private final long maxLiveOffsetErrorUsForUnitSpeed; + private final long targetLiveOffsetRebufferDeltaUs; + private final float minPossibleLiveOffsetSmoothingFactor; + + private long mediaConfigurationTargetLiveOffsetUs; + private long targetLiveOffsetOverrideUs; + private long idealTargetLiveOffsetUs; + private long minTargetLiveOffsetUs; + private long maxTargetLiveOffsetUs; + private long currentTargetLiveOffsetUs; + + private float maxPlaybackSpeed; + private float minPlaybackSpeed; + private float adjustedPlaybackSpeed; + private long lastPlaybackSpeedUpdateMs; + + private long smoothedMinPossibleLiveOffsetUs; + private long smoothedMinPossibleLiveOffsetDeviationUs; + + private DefaultLivePlaybackSpeedControl( + float fallbackMinPlaybackSpeed, + float fallbackMaxPlaybackSpeed, + long minUpdateIntervalMs, + float proportionalControlFactor, + long maxLiveOffsetErrorUsForUnitSpeed, + long targetLiveOffsetRebufferDeltaUs, + float minPossibleLiveOffsetSmoothingFactor) { + this.fallbackMinPlaybackSpeed = fallbackMinPlaybackSpeed; + this.fallbackMaxPlaybackSpeed = fallbackMaxPlaybackSpeed; + this.minUpdateIntervalMs = minUpdateIntervalMs; + this.proportionalControlFactor = proportionalControlFactor; + this.maxLiveOffsetErrorUsForUnitSpeed = maxLiveOffsetErrorUsForUnitSpeed; + this.targetLiveOffsetRebufferDeltaUs = targetLiveOffsetRebufferDeltaUs; + this.minPossibleLiveOffsetSmoothingFactor = minPossibleLiveOffsetSmoothingFactor; + mediaConfigurationTargetLiveOffsetUs = C.TIME_UNSET; + targetLiveOffsetOverrideUs = C.TIME_UNSET; + minTargetLiveOffsetUs = C.TIME_UNSET; + maxTargetLiveOffsetUs = C.TIME_UNSET; + minPlaybackSpeed = fallbackMinPlaybackSpeed; + maxPlaybackSpeed = fallbackMaxPlaybackSpeed; + adjustedPlaybackSpeed = 1.0f; + lastPlaybackSpeedUpdateMs = C.TIME_UNSET; + idealTargetLiveOffsetUs = C.TIME_UNSET; + currentTargetLiveOffsetUs = C.TIME_UNSET; + smoothedMinPossibleLiveOffsetUs = C.TIME_UNSET; + smoothedMinPossibleLiveOffsetDeviationUs = C.TIME_UNSET; + } + + @Override + public void setLiveConfiguration(LiveConfiguration liveConfiguration) { + mediaConfigurationTargetLiveOffsetUs = Util.msToUs(liveConfiguration.targetOffsetMs); + minTargetLiveOffsetUs = Util.msToUs(liveConfiguration.minOffsetMs); + maxTargetLiveOffsetUs = Util.msToUs(liveConfiguration.maxOffsetMs); + minPlaybackSpeed = + liveConfiguration.minPlaybackSpeed != C.RATE_UNSET + ? liveConfiguration.minPlaybackSpeed + : fallbackMinPlaybackSpeed; + maxPlaybackSpeed = + liveConfiguration.maxPlaybackSpeed != C.RATE_UNSET + ? liveConfiguration.maxPlaybackSpeed + : fallbackMaxPlaybackSpeed; + if (minPlaybackSpeed == 1f && maxPlaybackSpeed == 1f) { + // Don't bother calculating adjustments if it's not possible to change the speed. + mediaConfigurationTargetLiveOffsetUs = C.TIME_UNSET; + } + maybeResetTargetLiveOffsetUs(); + } + + @Override + public void setTargetLiveOffsetOverrideUs(long liveOffsetUs) { + targetLiveOffsetOverrideUs = liveOffsetUs; + maybeResetTargetLiveOffsetUs(); + } + + @Override + public void notifyRebuffer() { + if (currentTargetLiveOffsetUs == C.TIME_UNSET) { + return; + } + currentTargetLiveOffsetUs += targetLiveOffsetRebufferDeltaUs; + if (maxTargetLiveOffsetUs != C.TIME_UNSET + && currentTargetLiveOffsetUs > maxTargetLiveOffsetUs) { + currentTargetLiveOffsetUs = maxTargetLiveOffsetUs; + } + lastPlaybackSpeedUpdateMs = C.TIME_UNSET; + } + + @Override + public float getAdjustedPlaybackSpeed(long liveOffsetUs, long bufferedDurationUs) { + if (mediaConfigurationTargetLiveOffsetUs == C.TIME_UNSET) { + return 1f; + } + + updateSmoothedMinPossibleLiveOffsetUs(liveOffsetUs, bufferedDurationUs); + + if (lastPlaybackSpeedUpdateMs != C.TIME_UNSET + && SystemClock.elapsedRealtime() - lastPlaybackSpeedUpdateMs < minUpdateIntervalMs) { + return adjustedPlaybackSpeed; + } + lastPlaybackSpeedUpdateMs = SystemClock.elapsedRealtime(); + + adjustTargetLiveOffsetUs(liveOffsetUs); + long liveOffsetErrorUs = liveOffsetUs - currentTargetLiveOffsetUs; + if (Math.abs(liveOffsetErrorUs) < maxLiveOffsetErrorUsForUnitSpeed) { + adjustedPlaybackSpeed = 1f; + } else { + float calculatedSpeed = 1f + proportionalControlFactor * liveOffsetErrorUs; + adjustedPlaybackSpeed = + Util.constrainValue(calculatedSpeed, minPlaybackSpeed, maxPlaybackSpeed); + } + return adjustedPlaybackSpeed; + } + + @Override + public long getTargetLiveOffsetUs() { + return currentTargetLiveOffsetUs; + } + + private void maybeResetTargetLiveOffsetUs() { + long idealOffsetUs = C.TIME_UNSET; + if (mediaConfigurationTargetLiveOffsetUs != C.TIME_UNSET) { + idealOffsetUs = + targetLiveOffsetOverrideUs != C.TIME_UNSET + ? targetLiveOffsetOverrideUs + : mediaConfigurationTargetLiveOffsetUs; + if (minTargetLiveOffsetUs != C.TIME_UNSET && idealOffsetUs < minTargetLiveOffsetUs) { + idealOffsetUs = minTargetLiveOffsetUs; + } + if (maxTargetLiveOffsetUs != C.TIME_UNSET && idealOffsetUs > maxTargetLiveOffsetUs) { + idealOffsetUs = maxTargetLiveOffsetUs; + } + } + if (idealTargetLiveOffsetUs == idealOffsetUs) { + return; + } + idealTargetLiveOffsetUs = idealOffsetUs; + currentTargetLiveOffsetUs = idealOffsetUs; + smoothedMinPossibleLiveOffsetUs = C.TIME_UNSET; + smoothedMinPossibleLiveOffsetDeviationUs = C.TIME_UNSET; + lastPlaybackSpeedUpdateMs = C.TIME_UNSET; + } + + private void updateSmoothedMinPossibleLiveOffsetUs(long liveOffsetUs, long bufferedDurationUs) { + long minPossibleLiveOffsetUs = liveOffsetUs - bufferedDurationUs; + if (smoothedMinPossibleLiveOffsetUs == C.TIME_UNSET) { + smoothedMinPossibleLiveOffsetUs = minPossibleLiveOffsetUs; + smoothedMinPossibleLiveOffsetDeviationUs = 0; + } else { + // Use the maximum here to ensure we keep track of the upper bound of what is safely possible, + // not the average. + smoothedMinPossibleLiveOffsetUs = + max( + minPossibleLiveOffsetUs, + smooth( + smoothedMinPossibleLiveOffsetUs, + minPossibleLiveOffsetUs, + minPossibleLiveOffsetSmoothingFactor)); + long minPossibleLiveOffsetDeviationUs = + abs(minPossibleLiveOffsetUs - smoothedMinPossibleLiveOffsetUs); + smoothedMinPossibleLiveOffsetDeviationUs = + smooth( + smoothedMinPossibleLiveOffsetDeviationUs, + minPossibleLiveOffsetDeviationUs, + minPossibleLiveOffsetSmoothingFactor); + } + } + + private void adjustTargetLiveOffsetUs(long liveOffsetUs) { + // Stay in a safe distance (3 standard deviations = >99%) to the minimum possible live offset. + long safeOffsetUs = + smoothedMinPossibleLiveOffsetUs + 3 * smoothedMinPossibleLiveOffsetDeviationUs; + if (currentTargetLiveOffsetUs > safeOffsetUs) { + // There is room for decreasing the target offset towards the ideal or safe offset (whichever + // is larger). We want to limit the decrease so that the playback speed delta we achieve is + // the same as the maximum delta when slowing down towards the target. + long minUpdateIntervalUs = Util.msToUs(minUpdateIntervalMs); + long decrementToOffsetCurrentSpeedUs = + (long) ((adjustedPlaybackSpeed - 1f) * minUpdateIntervalUs); + long decrementToIncreaseSpeedUs = (long) ((maxPlaybackSpeed - 1f) * minUpdateIntervalUs); + long maxDecrementUs = decrementToOffsetCurrentSpeedUs + decrementToIncreaseSpeedUs; + currentTargetLiveOffsetUs = + max(safeOffsetUs, idealTargetLiveOffsetUs, currentTargetLiveOffsetUs - maxDecrementUs); + } else { + // We'd like to reach a stable condition where the current live offset stays just below the + // safe offset. But don't increase the target offset to more than what would allow us to slow + // down gradually from the current offset. + long offsetWhenSlowingDownNowUs = + liveOffsetUs - (long) (max(0f, adjustedPlaybackSpeed - 1f) / proportionalControlFactor); + currentTargetLiveOffsetUs = + Util.constrainValue(offsetWhenSlowingDownNowUs, currentTargetLiveOffsetUs, safeOffsetUs); + if (maxTargetLiveOffsetUs != C.TIME_UNSET + && currentTargetLiveOffsetUs > maxTargetLiveOffsetUs) { + currentTargetLiveOffsetUs = maxTargetLiveOffsetUs; + } + } + } + + private static long smooth(long smoothedValue, long newValue, float smoothingFactor) { + return (long) (smoothingFactor * smoothedValue + (1f - smoothingFactor) * newValue); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/DefaultLoadControl.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/DefaultLoadControl.java index 1244b96d94..078d70745a 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/DefaultLoadControl.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/DefaultLoadControl.java @@ -15,30 +15,33 @@ */ package com.google.android.exoplayer2; +import static com.google.android.exoplayer2.util.Assertions.checkState; +import static java.lang.Math.max; +import static java.lang.Math.min; + +import androidx.annotation.Nullable; import com.google.android.exoplayer2.source.TrackGroupArray; -import com.google.android.exoplayer2.trackselection.TrackSelectionArray; +import com.google.android.exoplayer2.trackselection.ExoTrackSelection; import com.google.android.exoplayer2.upstream.Allocator; import com.google.android.exoplayer2.upstream.DefaultAllocator; import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.Util; +import com.google.errorprone.annotations.CanIgnoreReturnValue; -/** - * The default {@link LoadControl} implementation. - */ +/** The default {@link LoadControl} implementation. */ public class DefaultLoadControl implements LoadControl { /** * The default minimum duration of media that the player will attempt to ensure is buffered at all - * times, in milliseconds. This value is only applied to playbacks without video. + * times, in milliseconds. */ - public static final int DEFAULT_MIN_BUFFER_MS = 15000; + public static final int DEFAULT_MIN_BUFFER_MS = 50_000; /** * The default maximum duration of media that the player will attempt to buffer, in milliseconds. - * For playbacks with video, this is also the default minimum duration of media that the player - * will attempt to ensure is buffered. */ - public static final int DEFAULT_MAX_BUFFER_MS = 50000; + public static final int DEFAULT_MAX_BUFFER_MS = 50_000; /** * The default duration of media that must be buffered for playback to start or resume following a @@ -59,7 +62,7 @@ public class DefaultLoadControl implements LoadControl { public static final int DEFAULT_TARGET_BUFFER_BYTES = C.LENGTH_UNSET; /** The default prioritization of buffer time constraints over size constraints. */ - public static final boolean DEFAULT_PRIORITIZE_TIME_OVER_SIZE_THRESHOLDS = true; + public static final boolean DEFAULT_PRIORITIZE_TIME_OVER_SIZE_THRESHOLDS = false; /** The default back buffer duration in milliseconds. */ public static final int DEFAULT_BACK_BUFFER_DURATION_MS = 0; @@ -68,10 +71,10 @@ public class DefaultLoadControl implements LoadControl { public static final boolean DEFAULT_RETAIN_BACK_BUFFER_FROM_KEYFRAME = false; /** A default size in bytes for a video buffer. */ - public static final int DEFAULT_VIDEO_BUFFER_SIZE = 500 * C.DEFAULT_BUFFER_SEGMENT_SIZE; + public static final int DEFAULT_VIDEO_BUFFER_SIZE = 2000 * C.DEFAULT_BUFFER_SEGMENT_SIZE; /** A default size in bytes for an audio buffer. */ - public static final int DEFAULT_AUDIO_BUFFER_SIZE = 54 * C.DEFAULT_BUFFER_SEGMENT_SIZE; + public static final int DEFAULT_AUDIO_BUFFER_SIZE = 200 * C.DEFAULT_BUFFER_SEGMENT_SIZE; /** A default size in bytes for a text buffer. */ public static final int DEFAULT_TEXT_BUFFER_SIZE = 2 * C.DEFAULT_BUFFER_SEGMENT_SIZE; @@ -82,16 +85,24 @@ public class DefaultLoadControl implements LoadControl { /** A default size in bytes for a camera motion buffer. */ public static final int DEFAULT_CAMERA_MOTION_BUFFER_SIZE = 2 * C.DEFAULT_BUFFER_SEGMENT_SIZE; + /** A default size in bytes for an image buffer. */ + public static final int DEFAULT_IMAGE_BUFFER_SIZE = 2 * C.DEFAULT_BUFFER_SEGMENT_SIZE; + /** A default size in bytes for a muxed buffer (e.g. containing video, audio and text). */ public static final int DEFAULT_MUXED_BUFFER_SIZE = DEFAULT_VIDEO_BUFFER_SIZE + DEFAULT_AUDIO_BUFFER_SIZE + DEFAULT_TEXT_BUFFER_SIZE; + /** + * The buffer size in bytes that will be used as a minimum target buffer in all cases. This is + * also the default target buffer before tracks are selected. + */ + public static final int DEFAULT_MIN_BUFFER_SIZE = 200 * C.DEFAULT_BUFFER_SEGMENT_SIZE; + /** Builder for {@link DefaultLoadControl}. */ public static final class Builder { - private DefaultAllocator allocator; - private int minBufferAudioMs; - private int minBufferVideoMs; + @Nullable private DefaultAllocator allocator; + private int minBufferMs; private int maxBufferMs; private int bufferForPlaybackMs; private int bufferForPlaybackAfterRebufferMs; @@ -99,12 +110,11 @@ public static final class Builder { private boolean prioritizeTimeOverSizeThresholds; private int backBufferDurationMs; private boolean retainBackBufferFromKeyframe; - private boolean createDefaultLoadControlCalled; + private boolean buildCalled; /** Constructs a new instance. */ public Builder() { - minBufferAudioMs = DEFAULT_MIN_BUFFER_MS; - minBufferVideoMs = DEFAULT_MAX_BUFFER_MS; + minBufferMs = DEFAULT_MIN_BUFFER_MS; maxBufferMs = DEFAULT_MAX_BUFFER_MS; bufferForPlaybackMs = DEFAULT_BUFFER_FOR_PLAYBACK_MS; bufferForPlaybackAfterRebufferMs = DEFAULT_BUFFER_FOR_PLAYBACK_AFTER_REBUFFER_MS; @@ -119,10 +129,11 @@ public Builder() { * * @param allocator The {@link DefaultAllocator}. * @return This builder, for convenience. - * @throws IllegalStateException If {@link #createDefaultLoadControl()} has already been called. + * @throws IllegalStateException If {@link #build()} has already been called. */ + @CanIgnoreReturnValue public Builder setAllocator(DefaultAllocator allocator) { - Assertions.checkState(!createDefaultLoadControlCalled); + checkState(!buildCalled); this.allocator = allocator; return this; } @@ -140,14 +151,15 @@ public Builder setAllocator(DefaultAllocator allocator) { * for playback to resume after a rebuffer, in milliseconds. A rebuffer is defined to be * caused by buffer depletion rather than a user action. * @return This builder, for convenience. - * @throws IllegalStateException If {@link #createDefaultLoadControl()} has already been called. + * @throws IllegalStateException If {@link #build()} has already been called. */ + @CanIgnoreReturnValue public Builder setBufferDurationsMs( int minBufferMs, int maxBufferMs, int bufferForPlaybackMs, int bufferForPlaybackAfterRebufferMs) { - Assertions.checkState(!createDefaultLoadControlCalled); + checkState(!buildCalled); assertGreaterOrEqual(bufferForPlaybackMs, 0, "bufferForPlaybackMs", "0"); assertGreaterOrEqual( bufferForPlaybackAfterRebufferMs, 0, "bufferForPlaybackAfterRebufferMs", "0"); @@ -158,8 +170,7 @@ public Builder setBufferDurationsMs( "minBufferMs", "bufferForPlaybackAfterRebufferMs"); assertGreaterOrEqual(maxBufferMs, minBufferMs, "maxBufferMs", "minBufferMs"); - this.minBufferAudioMs = minBufferMs; - this.minBufferVideoMs = minBufferMs; + this.minBufferMs = minBufferMs; this.maxBufferMs = maxBufferMs; this.bufferForPlaybackMs = bufferForPlaybackMs; this.bufferForPlaybackAfterRebufferMs = bufferForPlaybackAfterRebufferMs; @@ -172,10 +183,11 @@ public Builder setBufferDurationsMs( * * @param targetBufferBytes The target buffer size in bytes. * @return This builder, for convenience. - * @throws IllegalStateException If {@link #createDefaultLoadControl()} has already been called. + * @throws IllegalStateException If {@link #build()} has already been called. */ + @CanIgnoreReturnValue public Builder setTargetBufferBytes(int targetBufferBytes) { - Assertions.checkState(!createDefaultLoadControlCalled); + checkState(!buildCalled); this.targetBufferBytes = targetBufferBytes; return this; } @@ -187,10 +199,11 @@ public Builder setTargetBufferBytes(int targetBufferBytes) { * @param prioritizeTimeOverSizeThresholds Whether the load control prioritizes buffer time * constraints over buffer size constraints. * @return This builder, for convenience. - * @throws IllegalStateException If {@link #createDefaultLoadControl()} has already been called. + * @throws IllegalStateException If {@link #build()} has already been called. */ + @CanIgnoreReturnValue public Builder setPrioritizeTimeOverSizeThresholds(boolean prioritizeTimeOverSizeThresholds) { - Assertions.checkState(!createDefaultLoadControlCalled); + checkState(!buildCalled); this.prioritizeTimeOverSizeThresholds = prioritizeTimeOverSizeThresholds; return this; } @@ -203,27 +216,35 @@ public Builder setPrioritizeTimeOverSizeThresholds(boolean prioritizeTimeOverSiz * @param retainBackBufferFromKeyframe Whether the back buffer is retained from the previous * keyframe. * @return This builder, for convenience. - * @throws IllegalStateException If {@link #createDefaultLoadControl()} has already been called. + * @throws IllegalStateException If {@link #build()} has already been called. */ + @CanIgnoreReturnValue public Builder setBackBuffer(int backBufferDurationMs, boolean retainBackBufferFromKeyframe) { - Assertions.checkState(!createDefaultLoadControlCalled); + checkState(!buildCalled); assertGreaterOrEqual(backBufferDurationMs, 0, "backBufferDurationMs", "0"); this.backBufferDurationMs = backBufferDurationMs; this.retainBackBufferFromKeyframe = retainBackBufferFromKeyframe; return this; } - /** Creates a {@link DefaultLoadControl}. */ + /** + * @deprecated use {@link #build} instead. + */ + @Deprecated public DefaultLoadControl createDefaultLoadControl() { - Assertions.checkState(!createDefaultLoadControlCalled); - createDefaultLoadControlCalled = true; + return build(); + } + + /** Creates a {@link DefaultLoadControl}. */ + public DefaultLoadControl build() { + checkState(!buildCalled); + buildCalled = true; if (allocator == null) { allocator = new DefaultAllocator(/* trimOnReset= */ true, C.DEFAULT_BUFFER_SEGMENT_SIZE); } return new DefaultLoadControl( allocator, - minBufferAudioMs, - minBufferVideoMs, + minBufferMs, maxBufferMs, bufferForPlaybackMs, bufferForPlaybackAfterRebufferMs, @@ -236,8 +257,7 @@ public DefaultLoadControl createDefaultLoadControl() { private final DefaultAllocator allocator; - private final long minBufferAudioUs; - private final long minBufferVideoUs; + private final long minBufferUs; private final long maxBufferUs; private final long bufferForPlaybackUs; private final long bufferForPlaybackAfterRebufferUs; @@ -246,23 +266,14 @@ public DefaultLoadControl createDefaultLoadControl() { private final long backBufferDurationUs; private final boolean retainBackBufferFromKeyframe; - private int targetBufferSize; - private boolean isBuffering; - private boolean hasVideo; + private int targetBufferBytes; + private boolean isLoading; /** Constructs a new instance, using the {@code DEFAULT_*} constants defined in this class. */ - @SuppressWarnings("deprecation") public DefaultLoadControl() { - this(new DefaultAllocator(true, C.DEFAULT_BUFFER_SEGMENT_SIZE)); - } - - /** @deprecated Use {@link Builder} instead. */ - @Deprecated - public DefaultLoadControl(DefaultAllocator allocator) { this( - allocator, - /* minBufferAudioMs= */ DEFAULT_MIN_BUFFER_MS, - /* minBufferVideoMs= */ DEFAULT_MAX_BUFFER_MS, + new DefaultAllocator(true, C.DEFAULT_BUFFER_SEGMENT_SIZE), + DEFAULT_MIN_BUFFER_MS, DEFAULT_MAX_BUFFER_MS, DEFAULT_BUFFER_FOR_PLAYBACK_MS, DEFAULT_BUFFER_FOR_PLAYBACK_AFTER_REBUFFER_MS, @@ -272,8 +283,6 @@ public DefaultLoadControl(DefaultAllocator allocator) { DEFAULT_RETAIN_BACK_BUFFER_FROM_KEYFRAME); } - /** @deprecated Use {@link Builder} instead. */ - @Deprecated public DefaultLoadControl( DefaultAllocator allocator, int minBufferMs, @@ -281,61 +290,33 @@ public DefaultLoadControl( int bufferForPlaybackMs, int bufferForPlaybackAfterRebufferMs, int targetBufferBytes, - boolean prioritizeTimeOverSizeThresholds) { - this( - allocator, - /* minBufferAudioMs= */ minBufferMs, - /* minBufferVideoMs= */ minBufferMs, - maxBufferMs, - bufferForPlaybackMs, - bufferForPlaybackAfterRebufferMs, - targetBufferBytes, - prioritizeTimeOverSizeThresholds, - DEFAULT_BACK_BUFFER_DURATION_MS, - DEFAULT_RETAIN_BACK_BUFFER_FROM_KEYFRAME); - } - - protected DefaultLoadControl( - DefaultAllocator allocator, - int minBufferAudioMs, - int minBufferVideoMs, - int maxBufferMs, - int bufferForPlaybackMs, - int bufferForPlaybackAfterRebufferMs, - int targetBufferBytes, boolean prioritizeTimeOverSizeThresholds, int backBufferDurationMs, boolean retainBackBufferFromKeyframe) { assertGreaterOrEqual(bufferForPlaybackMs, 0, "bufferForPlaybackMs", "0"); assertGreaterOrEqual( bufferForPlaybackAfterRebufferMs, 0, "bufferForPlaybackAfterRebufferMs", "0"); + assertGreaterOrEqual(minBufferMs, bufferForPlaybackMs, "minBufferMs", "bufferForPlaybackMs"); assertGreaterOrEqual( - minBufferAudioMs, bufferForPlaybackMs, "minBufferAudioMs", "bufferForPlaybackMs"); - assertGreaterOrEqual( - minBufferVideoMs, bufferForPlaybackMs, "minBufferVideoMs", "bufferForPlaybackMs"); - assertGreaterOrEqual( - minBufferAudioMs, - bufferForPlaybackAfterRebufferMs, - "minBufferAudioMs", - "bufferForPlaybackAfterRebufferMs"); - assertGreaterOrEqual( - minBufferVideoMs, + minBufferMs, bufferForPlaybackAfterRebufferMs, - "minBufferVideoMs", + "minBufferMs", "bufferForPlaybackAfterRebufferMs"); - assertGreaterOrEqual(maxBufferMs, minBufferAudioMs, "maxBufferMs", "minBufferAudioMs"); - assertGreaterOrEqual(maxBufferMs, minBufferVideoMs, "maxBufferMs", "minBufferVideoMs"); + assertGreaterOrEqual(maxBufferMs, minBufferMs, "maxBufferMs", "minBufferMs"); assertGreaterOrEqual(backBufferDurationMs, 0, "backBufferDurationMs", "0"); this.allocator = allocator; - this.minBufferAudioUs = C.msToUs(minBufferAudioMs); - this.minBufferVideoUs = C.msToUs(minBufferVideoMs); - this.maxBufferUs = C.msToUs(maxBufferMs); - this.bufferForPlaybackUs = C.msToUs(bufferForPlaybackMs); - this.bufferForPlaybackAfterRebufferUs = C.msToUs(bufferForPlaybackAfterRebufferMs); + this.minBufferUs = Util.msToUs(minBufferMs); + this.maxBufferUs = Util.msToUs(maxBufferMs); + this.bufferForPlaybackUs = Util.msToUs(bufferForPlaybackMs); + this.bufferForPlaybackAfterRebufferUs = Util.msToUs(bufferForPlaybackAfterRebufferMs); this.targetBufferBytesOverwrite = targetBufferBytes; + this.targetBufferBytes = + targetBufferBytesOverwrite != C.LENGTH_UNSET + ? targetBufferBytesOverwrite + : DEFAULT_MIN_BUFFER_SIZE; this.prioritizeTimeOverSizeThresholds = prioritizeTimeOverSizeThresholds; - this.backBufferDurationUs = C.msToUs(backBufferDurationMs); + this.backBufferDurationUs = Util.msToUs(backBufferDurationMs); this.retainBackBufferFromKeyframe = retainBackBufferFromKeyframe; } @@ -345,14 +326,13 @@ public void onPrepared() { } @Override - public void onTracksSelected(Renderer[] renderers, TrackGroupArray trackGroups, - TrackSelectionArray trackSelections) { - hasVideo = hasVideo(renderers, trackSelections); - targetBufferSize = + public void onTracksSelected( + Renderer[] renderers, TrackGroupArray trackGroups, ExoTrackSelection[] trackSelections) { + targetBufferBytes = targetBufferBytesOverwrite == C.LENGTH_UNSET - ? calculateTargetBufferSize(renderers, trackSelections) + ? calculateTargetBufferBytes(renderers, trackSelections) : targetBufferBytesOverwrite; - allocator.setTargetBufferSize(targetBufferSize); + allocator.setTargetBufferSize(targetBufferBytes); } @Override @@ -381,33 +361,44 @@ public boolean retainBackBufferFromKeyframe() { } @Override - public boolean shouldContinueLoading(long bufferedDurationUs, float playbackSpeed) { - boolean targetBufferSizeReached = allocator.getTotalBytesAllocated() >= targetBufferSize; - long minBufferUs = hasVideo ? minBufferVideoUs : minBufferAudioUs; + public boolean shouldContinueLoading( + long playbackPositionUs, long bufferedDurationUs, float playbackSpeed) { + boolean targetBufferSizeReached = allocator.getTotalBytesAllocated() >= targetBufferBytes; + long minBufferUs = this.minBufferUs; if (playbackSpeed > 1) { // The playback speed is faster than real time, so scale up the minimum required media // duration to keep enough media buffered for a playout duration of minBufferUs. long mediaDurationMinBufferUs = Util.getMediaDurationForPlayoutDuration(minBufferUs, playbackSpeed); - minBufferUs = Math.min(mediaDurationMinBufferUs, maxBufferUs); + minBufferUs = min(mediaDurationMinBufferUs, maxBufferUs); } + // Prevent playback from getting stuck if minBufferUs is too small. + minBufferUs = max(minBufferUs, 500_000); if (bufferedDurationUs < minBufferUs) { - isBuffering = prioritizeTimeOverSizeThresholds || !targetBufferSizeReached; + isLoading = prioritizeTimeOverSizeThresholds || !targetBufferSizeReached; + if (!isLoading && bufferedDurationUs < 500_000) { + Log.w( + "DefaultLoadControl", + "Target buffer size reached with less than 500ms of buffered media data."); + } } else if (bufferedDurationUs >= maxBufferUs || targetBufferSizeReached) { - isBuffering = false; - } // Else don't change the buffering state - return isBuffering; + isLoading = false; + } // Else don't change the loading state. + return isLoading; } @Override public boolean shouldStartPlayback( - long bufferedDurationUs, float playbackSpeed, boolean rebuffering) { + long bufferedDurationUs, float playbackSpeed, boolean rebuffering, long targetLiveOffsetUs) { bufferedDurationUs = Util.getPlayoutDurationForMediaDuration(bufferedDurationUs, playbackSpeed); long minBufferDurationUs = rebuffering ? bufferForPlaybackAfterRebufferUs : bufferForPlaybackUs; + if (targetLiveOffsetUs != C.TIME_UNSET) { + minBufferDurationUs = min(targetLiveOffsetUs / 2, minBufferDurationUs); + } return minBufferDurationUs <= 0 || bufferedDurationUs >= minBufferDurationUs || (!prioritizeTimeOverSizeThresholds - && allocator.getTotalBytesAllocated() >= targetBufferSize); + && allocator.getTotalBytesAllocated() >= targetBufferBytes); } /** @@ -418,26 +409,29 @@ public boolean shouldStartPlayback( * @param trackSelectionArray The selected tracks. * @return The target buffer size in bytes. */ - protected int calculateTargetBufferSize( - Renderer[] renderers, TrackSelectionArray trackSelectionArray) { + protected int calculateTargetBufferBytes( + Renderer[] renderers, ExoTrackSelection[] trackSelectionArray) { int targetBufferSize = 0; for (int i = 0; i < renderers.length; i++) { - if (trackSelectionArray.get(i) != null) { + if (trackSelectionArray[i] != null) { targetBufferSize += getDefaultBufferSize(renderers[i].getTrackType()); } } - return targetBufferSize; + return max(DEFAULT_MIN_BUFFER_SIZE, targetBufferSize); } private void reset(boolean resetAllocator) { - targetBufferSize = 0; - isBuffering = false; + targetBufferBytes = + targetBufferBytesOverwrite == C.LENGTH_UNSET + ? DEFAULT_MIN_BUFFER_SIZE + : targetBufferBytesOverwrite; + isLoading = false; if (resetAllocator) { allocator.reset(); } } - private static int getDefaultBufferSize(int trackType) { + private static int getDefaultBufferSize(@C.TrackType int trackType) { switch (trackType) { case C.TRACK_TYPE_DEFAULT: return DEFAULT_MUXED_BUFFER_SIZE; @@ -451,22 +445,16 @@ private static int getDefaultBufferSize(int trackType) { return DEFAULT_METADATA_BUFFER_SIZE; case C.TRACK_TYPE_CAMERA_MOTION: return DEFAULT_CAMERA_MOTION_BUFFER_SIZE; + case C.TRACK_TYPE_IMAGE: + return DEFAULT_IMAGE_BUFFER_SIZE; case C.TRACK_TYPE_NONE: return 0; + case C.TRACK_TYPE_UNKNOWN: default: throw new IllegalArgumentException(); } } - private static boolean hasVideo(Renderer[] renderers, TrackSelectionArray trackSelectionArray) { - for (int i = 0; i < renderers.length; i++) { - if (renderers[i].getTrackType() == C.TRACK_TYPE_VIDEO && trackSelectionArray.get(i) != null) { - return true; - } - } - return false; - } - private static void assertGreaterOrEqual(int value1, int value2, String name1, String name2) { Assertions.checkArgument(value1 >= value2, name1 + " cannot be less than " + name2); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/DefaultMediaClock.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/DefaultMediaClock.java index 1971a4cefc..412a6fdbff 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/DefaultMediaClock.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/DefaultMediaClock.java @@ -16,32 +16,31 @@ package com.google.android.exoplayer2; import androidx.annotation.Nullable; +import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Clock; import com.google.android.exoplayer2.util.MediaClock; import com.google.android.exoplayer2.util.StandaloneMediaClock; /** - * Default {@link MediaClock} which uses a renderer media clock and falls back to a - * {@link StandaloneMediaClock} if necessary. + * Default {@link MediaClock} which uses a renderer media clock and falls back to a {@link + * StandaloneMediaClock} if necessary. */ /* package */ final class DefaultMediaClock implements MediaClock { - /** - * Listener interface to be notified of changes to the active playback parameters. - */ - public interface PlaybackParameterListener { + /** Listener interface to be notified of changes to the active playback parameters. */ + public interface PlaybackParametersListener { /** * Called when the active playback parameters changed. Will not be called for {@link * #setPlaybackParameters(PlaybackParameters)}. * - * @param newPlaybackParameters The newly active {@link PlaybackParameters}. + * @param newPlaybackParameters The newly active playback parameters. */ void onPlaybackParametersChanged(PlaybackParameters newPlaybackParameters); } private final StandaloneMediaClock standaloneClock; - private final PlaybackParameterListener listener; + private final PlaybackParametersListener listener; @Nullable private Renderer rendererClockSource; @Nullable private MediaClock rendererClock; @@ -49,30 +48,25 @@ public interface PlaybackParameterListener { private boolean standaloneClockIsStarted; /** - * Creates a new instance with listener for playback parameter changes and a {@link Clock} to use - * for the standalone clock implementation. + * Creates a new instance with a listener for playback parameters changes and a {@link Clock} to + * use for the standalone clock implementation. * - * @param listener A {@link PlaybackParameterListener} to listen for playback parameter - * changes. + * @param listener A {@link PlaybackParametersListener} to listen for playback parameters changes. * @param clock A {@link Clock}. */ - public DefaultMediaClock(PlaybackParameterListener listener, Clock clock) { + public DefaultMediaClock(PlaybackParametersListener listener, Clock clock) { this.listener = listener; this.standaloneClock = new StandaloneMediaClock(clock); isUsingStandaloneClock = true; } - /** - * Starts the standalone fallback clock. - */ + /** Starts the standalone fallback clock. */ public void start() { standaloneClockIsStarted = true; standaloneClock.start(); } - /** - * Stops the standalone fallback clock. - */ + /** Stops the standalone fallback clock. */ public void stop() { standaloneClockIsStarted = false; standaloneClock.stop(); @@ -96,7 +90,7 @@ public void resetPosition(long positionUs) { * clock is already provided. */ public void onRendererEnabled(Renderer renderer) throws ExoPlaybackException { - MediaClock rendererMediaClock = renderer.getMediaClock(); + @Nullable MediaClock rendererMediaClock = renderer.getMediaClock(); if (rendererMediaClock != null && rendererMediaClock != rendererClock) { if (rendererClock != null) { throw ExoPlaybackException.createForUnexpected( @@ -136,7 +130,9 @@ public long syncAndGetPositionUs(boolean isReadingAhead) { @Override public long getPositionUs() { - return isUsingStandaloneClock ? standaloneClock.getPositionUs() : rendererClock.getPositionUs(); + return isUsingStandaloneClock + ? standaloneClock.getPositionUs() + : Assertions.checkNotNull(rendererClock).getPositionUs(); } @Override @@ -163,6 +159,9 @@ private void syncClocks(boolean isReadingAhead) { } return; } + // We are either already using the renderer clock or switching from the standalone to the + // renderer clock, so it must be non-null. + MediaClock rendererClock = Assertions.checkNotNull(this.rendererClock); long rendererClockPositionUs = rendererClock.getPositionUs(); if (isUsingStandaloneClock) { // Ensure enabling the renderer clock doesn't jump backwards in time. diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/DefaultRenderersFactory.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/DefaultRenderersFactory.java index f53d72f598..5baba45950 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/DefaultRenderersFactory.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/DefaultRenderersFactory.java @@ -15,19 +15,22 @@ */ package com.google.android.exoplayer2; +import static java.lang.annotation.ElementType.TYPE_USE; + import android.content.Context; import android.media.MediaCodec; +import android.media.PlaybackParams; import android.os.Handler; import android.os.Looper; import androidx.annotation.IntDef; import androidx.annotation.Nullable; import com.google.android.exoplayer2.audio.AudioCapabilities; -import com.google.android.exoplayer2.audio.AudioProcessor; import com.google.android.exoplayer2.audio.AudioRendererEventListener; +import com.google.android.exoplayer2.audio.AudioSink; import com.google.android.exoplayer2.audio.DefaultAudioSink; import com.google.android.exoplayer2.audio.MediaCodecAudioRenderer; -import com.google.android.exoplayer2.drm.DrmSessionManager; -import com.google.android.exoplayer2.drm.FrameworkMediaCrypto; +import com.google.android.exoplayer2.mediacodec.DefaultMediaCodecAdapterFactory; +import com.google.android.exoplayer2.mediacodec.MediaCodecAdapter; import com.google.android.exoplayer2.mediacodec.MediaCodecSelector; import com.google.android.exoplayer2.metadata.MetadataOutput; import com.google.android.exoplayer2.metadata.MetadataRenderer; @@ -38,15 +41,15 @@ import com.google.android.exoplayer2.video.MediaCodecVideoRenderer; import com.google.android.exoplayer2.video.VideoRendererEventListener; import com.google.android.exoplayer2.video.spherical.CameraMotionRenderer; +import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; import java.lang.reflect.Constructor; import java.util.ArrayList; -/** - * Default {@link RenderersFactory} implementation. - */ +/** Default {@link RenderersFactory} implementation. */ public class DefaultRenderersFactory implements RenderersFactory { /** @@ -61,172 +64,206 @@ public class DefaultRenderersFactory implements RenderersFactory { */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({EXTENSION_RENDERER_MODE_OFF, EXTENSION_RENDERER_MODE_ON, EXTENSION_RENDERER_MODE_PREFER}) public @interface ExtensionRendererMode {} - /** - * Do not allow use of extension renderers. - */ + /** Do not allow use of extension renderers. */ public static final int EXTENSION_RENDERER_MODE_OFF = 0; /** * Allow use of extension renderers. Extension renderers are indexed after core renderers of the * same type. A {@link TrackSelector} that prefers the first suitable renderer will therefore - * prefer to use a core renderer to an extension renderer in the case that both are able to play - * a given track. + * prefer to use a core renderer to an extension renderer in the case that both are able to play a + * given track. */ public static final int EXTENSION_RENDERER_MODE_ON = 1; /** * Allow use of extension renderers. Extension renderers are indexed before core renderers of the * same type. A {@link TrackSelector} that prefers the first suitable renderer will therefore - * prefer to use an extension renderer to a core renderer in the case that both are able to play - * a given track. + * prefer to use an extension renderer to a core renderer in the case that both are able to play a + * given track. */ public static final int EXTENSION_RENDERER_MODE_PREFER = 2; - private static final String TAG = "DefaultRenderersFactory"; + /** + * The maximum number of frames that can be dropped between invocations of {@link + * VideoRendererEventListener#onDroppedFrames(int, long)}. + */ + public static final int MAX_DROPPED_VIDEO_FRAME_COUNT_TO_NOTIFY = 50; - protected static final int MAX_DROPPED_VIDEO_FRAME_COUNT_TO_NOTIFY = 50; + private static final String TAG = "DefaultRenderersFactory"; private final Context context; - @Nullable private DrmSessionManager drmSessionManager; - @ExtensionRendererMode private int extensionRendererMode; + private final DefaultMediaCodecAdapterFactory codecAdapterFactory; + private @ExtensionRendererMode int extensionRendererMode; private long allowedVideoJoiningTimeMs; - private boolean playClearSamplesWithoutKeys; private boolean enableDecoderFallback; private MediaCodecSelector mediaCodecSelector; + private boolean enableFloatOutput; + private boolean enableAudioTrackPlaybackParams; + private boolean enableOffload; - /** @param context A {@link Context}. */ + /** + * @param context A {@link Context}. + */ public DefaultRenderersFactory(Context context) { this.context = context; + codecAdapterFactory = new DefaultMediaCodecAdapterFactory(); extensionRendererMode = EXTENSION_RENDERER_MODE_OFF; allowedVideoJoiningTimeMs = DEFAULT_ALLOWED_VIDEO_JOINING_TIME_MS; mediaCodecSelector = MediaCodecSelector.DEFAULT; } /** - * @deprecated Use {@link #DefaultRenderersFactory(Context)} and pass {@link DrmSessionManager} - * directly to {@link SimpleExoPlayer.Builder}. + * Sets the extension renderer mode, which determines if and how available extension renderers are + * used. Note that extensions must be included in the application build for them to be considered + * available. + * + *

The default value is {@link #EXTENSION_RENDERER_MODE_OFF}. + * + * @param extensionRendererMode The extension renderer mode. + * @return This factory, for convenience. */ - @Deprecated - @SuppressWarnings("deprecation") - public DefaultRenderersFactory( - Context context, @Nullable DrmSessionManager drmSessionManager) { - this(context, drmSessionManager, EXTENSION_RENDERER_MODE_OFF); + @CanIgnoreReturnValue + public DefaultRenderersFactory setExtensionRendererMode( + @ExtensionRendererMode int extensionRendererMode) { + this.extensionRendererMode = extensionRendererMode; + return this; } /** - * @deprecated Use {@link #DefaultRenderersFactory(Context)} and {@link - * #setExtensionRendererMode(int)}. + * Enables {@link com.google.android.exoplayer2.mediacodec.MediaCodecRenderer} instances to + * operate their {@link MediaCodec} in asynchronous mode and perform asynchronous queueing. + * + *

This feature can be enabled only on devices with API versions >= 23. For devices with + * older API versions, this method is a no-op. + * + * @return This factory, for convenience. */ - @Deprecated - @SuppressWarnings("deprecation") - public DefaultRenderersFactory( - Context context, @ExtensionRendererMode int extensionRendererMode) { - this(context, extensionRendererMode, DEFAULT_ALLOWED_VIDEO_JOINING_TIME_MS); + @CanIgnoreReturnValue + public DefaultRenderersFactory forceEnableMediaCodecAsynchronousQueueing() { + codecAdapterFactory.forceEnableAsynchronous(); + return this; } /** - * @deprecated Use {@link #DefaultRenderersFactory(Context)} and {@link - * #setExtensionRendererMode(int)}, and pass {@link DrmSessionManager} directly to {@link - * SimpleExoPlayer.Builder}. + * Disables {@link com.google.android.exoplayer2.mediacodec.MediaCodecRenderer} instances from + * operating their {@link MediaCodec} in asynchronous mode and perform asynchronous queueing. + * {@link MediaCodec} instances will be operated synchronous mode. + * + * @return This factory, for convenience. */ - @Deprecated - @SuppressWarnings("deprecation") - public DefaultRenderersFactory( - Context context, - @Nullable DrmSessionManager drmSessionManager, - @ExtensionRendererMode int extensionRendererMode) { - this(context, drmSessionManager, extensionRendererMode, DEFAULT_ALLOWED_VIDEO_JOINING_TIME_MS); + @CanIgnoreReturnValue + public DefaultRenderersFactory forceDisableMediaCodecAsynchronousQueueing() { + codecAdapterFactory.forceDisableAsynchronous(); + return this; } /** - * @deprecated Use {@link #DefaultRenderersFactory(Context)}, {@link - * #setExtensionRendererMode(int)} and {@link #setAllowedVideoJoiningTimeMs(long)}. + * Enable synchronizing codec interactions with asynchronous buffer queueing. + * + *

This method is experimental, and will be renamed or removed in a future release. + * + * @param enabled Whether codec interactions will be synchronized with asynchronous buffer + * queueing. + * @return This factory, for convenience. */ - @Deprecated - @SuppressWarnings("deprecation") - public DefaultRenderersFactory( - Context context, - @ExtensionRendererMode int extensionRendererMode, - long allowedVideoJoiningTimeMs) { - this(context, null, extensionRendererMode, allowedVideoJoiningTimeMs); + @CanIgnoreReturnValue + public DefaultRenderersFactory experimentalSetSynchronizeCodecInteractionsWithQueueingEnabled( + boolean enabled) { + codecAdapterFactory.experimentalSetSynchronizeCodecInteractionsWithQueueingEnabled(enabled); + return this; } /** - * @deprecated Use {@link #DefaultRenderersFactory(Context)}, {@link - * #setExtensionRendererMode(int)} and {@link #setAllowedVideoJoiningTimeMs(long)}, and pass - * {@link DrmSessionManager} directly to {@link SimpleExoPlayer.Builder}. + * Sets whether to enable fallback to lower-priority decoders if decoder initialization fails. + * This may result in using a decoder that is less efficient or slower than the primary decoder. + * + * @param enableDecoderFallback Whether to enable fallback to lower-priority decoders if decoder + * initialization fails. + * @return This factory, for convenience. */ - @Deprecated - public DefaultRenderersFactory( - Context context, - @Nullable DrmSessionManager drmSessionManager, - @ExtensionRendererMode int extensionRendererMode, - long allowedVideoJoiningTimeMs) { - this.context = context; - this.extensionRendererMode = extensionRendererMode; - this.allowedVideoJoiningTimeMs = allowedVideoJoiningTimeMs; - this.drmSessionManager = drmSessionManager; - mediaCodecSelector = MediaCodecSelector.DEFAULT; + @CanIgnoreReturnValue + public DefaultRenderersFactory setEnableDecoderFallback(boolean enableDecoderFallback) { + this.enableDecoderFallback = enableDecoderFallback; + return this; } /** - * Sets the extension renderer mode, which determines if and how available extension renderers are - * used. Note that extensions must be included in the application build for them to be considered - * available. + * Sets a {@link MediaCodecSelector} for use by {@link MediaCodec} based renderers. * - *

The default value is {@link #EXTENSION_RENDERER_MODE_OFF}. + *

The default value is {@link MediaCodecSelector#DEFAULT}. * - * @param extensionRendererMode The extension renderer mode. + * @param mediaCodecSelector The {@link MediaCodecSelector}. * @return This factory, for convenience. */ - public DefaultRenderersFactory setExtensionRendererMode( - @ExtensionRendererMode int extensionRendererMode) { - this.extensionRendererMode = extensionRendererMode; + @CanIgnoreReturnValue + public DefaultRenderersFactory setMediaCodecSelector(MediaCodecSelector mediaCodecSelector) { + this.mediaCodecSelector = mediaCodecSelector; return this; } /** - * Sets whether renderers are permitted to play clear regions of encrypted media prior to having - * obtained the keys necessary to decrypt encrypted regions of the media. For encrypted media that - * starts with a short clear region, this allows playback to begin in parallel with key - * acquisition, which can reduce startup latency. + * Sets whether floating point audio should be output when possible. + * + *

Enabling floating point output disables audio processing, but may allow for higher quality + * audio output. * *

The default value is {@code false}. * - * @param playClearSamplesWithoutKeys Whether renderers are permitted to play clear regions of - * encrypted media prior to having obtained the keys necessary to decrypt encrypted regions of - * the media. + * @param enableFloatOutput Whether to enable use of floating point audio output, if available. * @return This factory, for convenience. */ - public DefaultRenderersFactory setPlayClearSamplesWithoutKeys( - boolean playClearSamplesWithoutKeys) { - this.playClearSamplesWithoutKeys = playClearSamplesWithoutKeys; + @CanIgnoreReturnValue + public DefaultRenderersFactory setEnableAudioFloatOutput(boolean enableFloatOutput) { + this.enableFloatOutput = enableFloatOutput; return this; } /** - * Sets whether to enable fallback to lower-priority decoders if decoder initialization fails. - * This may result in using a decoder that is less efficient or slower than the primary decoder. + * Sets whether audio should be played using the offload path. * - * @param enableDecoderFallback Whether to enable fallback to lower-priority decoders if decoder - * initialization fails. + *

Audio offload disables ExoPlayer audio processing, but significantly reduces the energy + * consumption of the playback when {@link + * ExoPlayer#experimentalSetOffloadSchedulingEnabled(boolean) offload scheduling} is enabled. + * + *

Most Android devices can only support one offload {@link android.media.AudioTrack} at a time + * and can invalidate it at any time. Thus an app can never be guaranteed that it will be able to + * play in offload. + * + *

The default value is {@code false}. + * + * @param enableOffload Whether to enable use of audio offload for supported formats, if + * available. * @return This factory, for convenience. */ - public DefaultRenderersFactory setEnableDecoderFallback(boolean enableDecoderFallback) { - this.enableDecoderFallback = enableDecoderFallback; + @CanIgnoreReturnValue + public DefaultRenderersFactory setEnableAudioOffload(boolean enableOffload) { + this.enableOffload = enableOffload; return this; } /** - * Sets a {@link MediaCodecSelector} for use by {@link MediaCodec} based renderers. + * Sets whether to enable setting playback speed using {@link + * android.media.AudioTrack#setPlaybackParams(PlaybackParams)}, which is supported from API level + * 23, rather than using application-level audio speed adjustment. This setting has no effect on + * builds before API level 23 (application-level speed adjustment will be used in all cases). * - *

The default value is {@link MediaCodecSelector#DEFAULT}. + *

If enabled and supported, new playback speed settings will take effect more quickly because + * they are applied at the audio mixer, rather than at the point of writing data to the track. * - * @param mediaCodecSelector The {@link MediaCodecSelector}. + *

When using this mode, the maximum supported playback speed is limited by the size of the + * audio track's buffer. If the requested speed is not supported the player's event listener will + * be notified twice on setting playback speed, once with the requested speed, then again with the + * old playback speed reflecting the fact that the requested speed was not supported. + * + * @param enableAudioTrackPlaybackParams Whether to enable setting playback speed using {@link + * android.media.AudioTrack#setPlaybackParams(PlaybackParams)}. * @return This factory, for convenience. */ - public DefaultRenderersFactory setMediaCodecSelector(MediaCodecSelector mediaCodecSelector) { - this.mediaCodecSelector = mediaCodecSelector; + @CanIgnoreReturnValue + public DefaultRenderersFactory setEnableAudioTrackPlaybackParams( + boolean enableAudioTrackPlaybackParams) { + this.enableAudioTrackPlaybackParams = enableAudioTrackPlaybackParams; return this; } @@ -240,6 +277,7 @@ public DefaultRenderersFactory setMediaCodecSelector(MediaCodecSelector mediaCod * seamlessly join an ongoing playback, in milliseconds. * @return This factory, for convenience. */ + @CanIgnoreReturnValue public DefaultRenderersFactory setAllowedVideoJoiningTimeMs(long allowedVideoJoiningTimeMs) { this.allowedVideoJoiningTimeMs = allowedVideoJoiningTimeMs; return this; @@ -251,38 +289,43 @@ public Renderer[] createRenderers( VideoRendererEventListener videoRendererEventListener, AudioRendererEventListener audioRendererEventListener, TextOutput textRendererOutput, - MetadataOutput metadataRendererOutput, - @Nullable DrmSessionManager drmSessionManager) { - if (drmSessionManager == null) { - drmSessionManager = this.drmSessionManager; - } + MetadataOutput metadataRendererOutput) { ArrayList renderersList = new ArrayList<>(); buildVideoRenderers( context, extensionRendererMode, mediaCodecSelector, - drmSessionManager, - playClearSamplesWithoutKeys, enableDecoderFallback, eventHandler, videoRendererEventListener, allowedVideoJoiningTimeMs, renderersList); - buildAudioRenderers( + @Nullable + AudioSink audioSink = + buildAudioSink(context, enableFloatOutput, enableAudioTrackPlaybackParams, enableOffload); + if (audioSink != null) { + buildAudioRenderers( + context, + extensionRendererMode, + mediaCodecSelector, + enableDecoderFallback, + audioSink, + eventHandler, + audioRendererEventListener, + renderersList); + } + buildTextRenderers( context, + textRendererOutput, + eventHandler.getLooper(), + extensionRendererMode, + renderersList); + buildMetadataRenderers( + context, + metadataRendererOutput, + eventHandler.getLooper(), extensionRendererMode, - mediaCodecSelector, - drmSessionManager, - playClearSamplesWithoutKeys, - enableDecoderFallback, - buildAudioProcessors(), - eventHandler, - audioRendererEventListener, renderersList); - buildTextRenderers(context, textRendererOutput, eventHandler.getLooper(), - extensionRendererMode, renderersList); - buildMetadataRenderers(context, metadataRendererOutput, eventHandler.getLooper(), - extensionRendererMode, renderersList); buildCameraMotionRenderers(context, extensionRendererMode, renderersList); buildMiscellaneousRenderers(context, eventHandler, extensionRendererMode, renderersList); return renderersList.toArray(new Renderer[0]); @@ -294,11 +337,6 @@ public Renderer[] createRenderers( * @param context The {@link Context} associated with the player. * @param extensionRendererMode The extension renderer mode. * @param mediaCodecSelector A decoder selector. - * @param drmSessionManager An optional {@link DrmSessionManager}. May be null if the player will - * not be used for DRM protected playbacks. - * @param playClearSamplesWithoutKeys Whether renderers are permitted to play clear regions of - * encrypted media prior to having obtained the keys necessary to decrypt encrypted regions of - * the media. * @param enableDecoderFallback Whether to enable fallback to lower-priority decoders if decoder * initialization fails. This may result in using a decoder that is slower/less efficient than * the primary decoder. @@ -312,24 +350,22 @@ protected void buildVideoRenderers( Context context, @ExtensionRendererMode int extensionRendererMode, MediaCodecSelector mediaCodecSelector, - @Nullable DrmSessionManager drmSessionManager, - boolean playClearSamplesWithoutKeys, boolean enableDecoderFallback, Handler eventHandler, VideoRendererEventListener eventListener, long allowedVideoJoiningTimeMs, ArrayList out) { - out.add( + MediaCodecVideoRenderer videoRenderer = new MediaCodecVideoRenderer( context, + getCodecAdapterFactory(), mediaCodecSelector, allowedVideoJoiningTimeMs, - drmSessionManager, - playClearSamplesWithoutKeys, enableDecoderFallback, eventHandler, eventListener, - MAX_DROPPED_VIDEO_FRAME_COUNT_TO_NOTIFY)); + MAX_DROPPED_VIDEO_FRAME_COUNT_TO_NOTIFY); + out.add(videoRenderer); if (extensionRendererMode == EXTENSION_RENDERER_MODE_OFF) { return; @@ -341,7 +377,6 @@ protected void buildVideoRenderers( try { // Full class names used for constructor args so the LINT rule triggers if any of them move. - // LINT.IfChange Class clazz = Class.forName("com.google.android.exoplayer2.ext.vp9.LibvpxVideoRenderer"); Constructor constructor = clazz.getConstructor( @@ -349,7 +384,6 @@ protected void buildVideoRenderers( android.os.Handler.class, com.google.android.exoplayer2.video.VideoRendererEventListener.class, int.class); - // LINT.ThenChange(../../../../../../../proguard-rules.txt) Renderer renderer = (Renderer) constructor.newInstance( @@ -368,7 +402,6 @@ protected void buildVideoRenderers( try { // Full class names used for constructor args so the LINT rule triggers if any of them move. - // LINT.IfChange Class clazz = Class.forName("com.google.android.exoplayer2.ext.av1.Libgav1VideoRenderer"); Constructor constructor = clazz.getConstructor( @@ -376,7 +409,6 @@ protected void buildVideoRenderers( android.os.Handler.class, com.google.android.exoplayer2.video.VideoRendererEventListener.class, int.class); - // LINT.ThenChange(../../../../../../../proguard-rules.txt) Renderer renderer = (Renderer) constructor.newInstance( @@ -400,16 +432,10 @@ protected void buildVideoRenderers( * @param context The {@link Context} associated with the player. * @param extensionRendererMode The extension renderer mode. * @param mediaCodecSelector A decoder selector. - * @param drmSessionManager An optional {@link DrmSessionManager}. May be null if the player will - * not be used for DRM protected playbacks. - * @param playClearSamplesWithoutKeys Whether renderers are permitted to play clear regions of - * encrypted media prior to having obtained the keys necessary to decrypt encrypted regions of - * the media. * @param enableDecoderFallback Whether to enable fallback to lower-priority decoders if decoder * initialization fails. This may result in using a decoder that is slower/less efficient than * the primary decoder. - * @param audioProcessors An array of {@link AudioProcessor}s that will process PCM audio buffers - * before output. May be empty. + * @param audioSink A sink to which the renderers will output. * @param eventHandler A handler to use when invoking event listeners and outputs. * @param eventListener An event listener. * @param out An array to which the built renderers should be appended. @@ -418,23 +444,21 @@ protected void buildAudioRenderers( Context context, @ExtensionRendererMode int extensionRendererMode, MediaCodecSelector mediaCodecSelector, - @Nullable DrmSessionManager drmSessionManager, - boolean playClearSamplesWithoutKeys, boolean enableDecoderFallback, - AudioProcessor[] audioProcessors, + AudioSink audioSink, Handler eventHandler, AudioRendererEventListener eventListener, ArrayList out) { - out.add( + MediaCodecAudioRenderer audioRenderer = new MediaCodecAudioRenderer( context, + getCodecAdapterFactory(), mediaCodecSelector, - drmSessionManager, - playClearSamplesWithoutKeys, enableDecoderFallback, eventHandler, eventListener, - new DefaultAudioSink(AudioCapabilities.getCapabilities(context), audioProcessors))); + audioSink); + out.add(audioRenderer); if (extensionRendererMode == EXTENSION_RENDERER_MODE_OFF) { return; @@ -444,18 +468,29 @@ protected void buildAudioRenderers( extensionRendererIndex--; } + try { + Class clazz = Class.forName("com.google.android.exoplayer2.decoder.midi.MidiRenderer"); + Constructor constructor = clazz.getConstructor(); + Renderer renderer = (Renderer) constructor.newInstance(); + out.add(extensionRendererIndex++, renderer); + Log.i(TAG, "Loaded MidiRenderer."); + } catch (ClassNotFoundException e) { + // Expected if the app was built without the extension. + } catch (Exception e) { + // The extension is present, but instantiation failed. + throw new RuntimeException("Error instantiating MIDI extension", e); + } + try { // Full class names used for constructor args so the LINT rule triggers if any of them move. - // LINT.IfChange Class clazz = Class.forName("com.google.android.exoplayer2.ext.opus.LibopusAudioRenderer"); Constructor constructor = clazz.getConstructor( android.os.Handler.class, com.google.android.exoplayer2.audio.AudioRendererEventListener.class, - com.google.android.exoplayer2.audio.AudioProcessor[].class); - // LINT.ThenChange(../../../../../../../proguard-rules.txt) + com.google.android.exoplayer2.audio.AudioSink.class); Renderer renderer = - (Renderer) constructor.newInstance(eventHandler, eventListener, audioProcessors); + (Renderer) constructor.newInstance(eventHandler, eventListener, audioSink); out.add(extensionRendererIndex++, renderer); Log.i(TAG, "Loaded LibopusAudioRenderer."); } catch (ClassNotFoundException e) { @@ -467,16 +502,14 @@ protected void buildAudioRenderers( try { // Full class names used for constructor args so the LINT rule triggers if any of them move. - // LINT.IfChange Class clazz = Class.forName("com.google.android.exoplayer2.ext.flac.LibflacAudioRenderer"); Constructor constructor = clazz.getConstructor( android.os.Handler.class, com.google.android.exoplayer2.audio.AudioRendererEventListener.class, - com.google.android.exoplayer2.audio.AudioProcessor[].class); - // LINT.ThenChange(../../../../../../../proguard-rules.txt) + com.google.android.exoplayer2.audio.AudioSink.class); Renderer renderer = - (Renderer) constructor.newInstance(eventHandler, eventListener, audioProcessors); + (Renderer) constructor.newInstance(eventHandler, eventListener, audioSink); out.add(extensionRendererIndex++, renderer); Log.i(TAG, "Loaded LibflacAudioRenderer."); } catch (ClassNotFoundException e) { @@ -488,17 +521,15 @@ protected void buildAudioRenderers( try { // Full class names used for constructor args so the LINT rule triggers if any of them move. - // LINT.IfChange Class clazz = Class.forName("com.google.android.exoplayer2.ext.ffmpeg.FfmpegAudioRenderer"); Constructor constructor = clazz.getConstructor( android.os.Handler.class, com.google.android.exoplayer2.audio.AudioRendererEventListener.class, - com.google.android.exoplayer2.audio.AudioProcessor[].class); - // LINT.ThenChange(../../../../../../../proguard-rules.txt) + com.google.android.exoplayer2.audio.AudioSink.class); Renderer renderer = - (Renderer) constructor.newInstance(eventHandler, eventListener, audioProcessors); + (Renderer) constructor.newInstance(eventHandler, eventListener, audioSink); out.add(extensionRendererIndex++, renderer); Log.i(TAG, "Loaded FfmpegAudioRenderer."); } catch (ClassNotFoundException e) { @@ -565,16 +596,49 @@ protected void buildCameraMotionRenderers( * @param extensionRendererMode The extension renderer mode. * @param out An array to which the built renderers should be appended. */ - protected void buildMiscellaneousRenderers(Context context, Handler eventHandler, - @ExtensionRendererMode int extensionRendererMode, ArrayList out) { + protected void buildMiscellaneousRenderers( + Context context, + Handler eventHandler, + @ExtensionRendererMode int extensionRendererMode, + ArrayList out) { // Do nothing. } /** - * Builds an array of {@link AudioProcessor}s that will process PCM audio before output. + * Builds an {@link AudioSink} to which the audio renderers will output. + * + * @param context The {@link Context} associated with the player. + * @param enableFloatOutput Whether to enable use of floating point audio output, if available. + * @param enableAudioTrackPlaybackParams Whether to enable setting playback speed using {@link + * android.media.AudioTrack#setPlaybackParams(PlaybackParams)}, if supported. + * @param enableOffload Whether to enable use of audio offload for supported formats, if + * available. + * @return The {@link AudioSink} to which the audio renderers will output. May be {@code null} if + * no audio renderers are required. If {@code null} is returned then {@link + * #buildAudioRenderers} will not be called. */ - protected AudioProcessor[] buildAudioProcessors() { - return new AudioProcessor[0]; + @Nullable + protected AudioSink buildAudioSink( + Context context, + boolean enableFloatOutput, + boolean enableAudioTrackPlaybackParams, + boolean enableOffload) { + return new DefaultAudioSink.Builder() + .setAudioCapabilities(AudioCapabilities.getCapabilities(context)) + .setEnableFloatOutput(enableFloatOutput) + .setEnableAudioTrackPlaybackParams(enableAudioTrackPlaybackParams) + .setOffloadMode( + enableOffload + ? DefaultAudioSink.OFFLOAD_MODE_ENABLED_GAPLESS_REQUIRED + : DefaultAudioSink.OFFLOAD_MODE_DISABLED) + .build(); } + /** + * Returns the {@link MediaCodecAdapter.Factory} that will be used when creating {@link + * com.google.android.exoplayer2.mediacodec.MediaCodecRenderer} instances. + */ + protected MediaCodecAdapter.Factory getCodecAdapterFactory() { + return codecAdapterFactory; + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/DeviceInfo.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/DeviceInfo.java new file mode 100644 index 0000000000..4ffc59adbd --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/DeviceInfo.java @@ -0,0 +1,111 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2; + +import static java.lang.annotation.ElementType.TYPE_USE; + +import android.os.Bundle; +import androidx.annotation.IntDef; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.util.Util; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** Information about the playback device. */ +public final class DeviceInfo implements Bundleable { + + /** Types of playback. One of {@link #PLAYBACK_TYPE_LOCAL} or {@link #PLAYBACK_TYPE_REMOTE}. */ + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef({ + PLAYBACK_TYPE_LOCAL, + PLAYBACK_TYPE_REMOTE, + }) + public @interface PlaybackType {} + /** Playback happens on the local device (e.g. phone). */ + public static final int PLAYBACK_TYPE_LOCAL = 0; + /** Playback happens outside of the device (e.g. a cast device). */ + public static final int PLAYBACK_TYPE_REMOTE = 1; + + /** Unknown DeviceInfo. */ + public static final DeviceInfo UNKNOWN = + new DeviceInfo(PLAYBACK_TYPE_LOCAL, /* minVolume= */ 0, /* maxVolume= */ 0); + + /** The type of playback. */ + public final @PlaybackType int playbackType; + /** The minimum volume that the device supports. */ + public final int minVolume; + /** The maximum volume that the device supports. */ + public final int maxVolume; + + /** Creates device information. */ + public DeviceInfo(@PlaybackType int playbackType, int minVolume, int maxVolume) { + this.playbackType = playbackType; + this.minVolume = minVolume; + this.maxVolume = maxVolume; + } + + @Override + public boolean equals(@Nullable Object obj) { + if (this == obj) { + return true; + } + if (!(obj instanceof DeviceInfo)) { + return false; + } + DeviceInfo other = (DeviceInfo) obj; + return playbackType == other.playbackType + && minVolume == other.minVolume + && maxVolume == other.maxVolume; + } + + @Override + public int hashCode() { + int result = 17; + result = 31 * result + playbackType; + result = 31 * result + minVolume; + result = 31 * result + maxVolume; + return result; + } + + // Bundleable implementation. + + private static final String FIELD_PLAYBACK_TYPE = Util.intToStringMaxRadix(0); + private static final String FIELD_MIN_VOLUME = Util.intToStringMaxRadix(1); + private static final String FIELD_MAX_VOLUME = Util.intToStringMaxRadix(2); + + @Override + public Bundle toBundle() { + Bundle bundle = new Bundle(); + bundle.putInt(FIELD_PLAYBACK_TYPE, playbackType); + bundle.putInt(FIELD_MIN_VOLUME, minVolume); + bundle.putInt(FIELD_MAX_VOLUME, maxVolume); + return bundle; + } + + /** Object that can restore {@link DeviceInfo} from a {@link Bundle}. */ + public static final Creator CREATOR = + bundle -> { + int playbackType = + bundle.getInt(FIELD_PLAYBACK_TYPE, /* defaultValue= */ PLAYBACK_TYPE_LOCAL); + int minVolume = bundle.getInt(FIELD_MIN_VOLUME, /* defaultValue= */ 0); + int maxVolume = bundle.getInt(FIELD_MAX_VOLUME, /* defaultValue= */ 0); + return new DeviceInfo(playbackType, minVolume, maxVolume); + }; +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ExoPlaybackException.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ExoPlaybackException.java index 653b6002d9..1fd94479b8 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ExoPlaybackException.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ExoPlaybackException.java @@ -15,47 +15,60 @@ */ package com.google.android.exoplayer2; +import static java.lang.annotation.ElementType.FIELD; +import static java.lang.annotation.ElementType.LOCAL_VARIABLE; +import static java.lang.annotation.ElementType.METHOD; +import static java.lang.annotation.ElementType.PARAMETER; +import static java.lang.annotation.ElementType.TYPE_USE; + +import android.os.Bundle; import android.os.SystemClock; +import android.text.TextUtils; +import androidx.annotation.CheckResult; import androidx.annotation.IntDef; import androidx.annotation.Nullable; -import com.google.android.exoplayer2.RendererCapabilities.FormatSupport; +import com.google.android.exoplayer2.C.FormatSupport; +import com.google.android.exoplayer2.source.MediaPeriodId; import com.google.android.exoplayer2.source.MediaSource; import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.Util; import java.io.IOException; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; -/** - * Thrown when a non-recoverable playback failure occurs. - */ -public final class ExoPlaybackException extends Exception { +/** Thrown when a non locally recoverable playback failure occurs. */ +public final class ExoPlaybackException extends PlaybackException { /** * The type of source that produced the error. One of {@link #TYPE_SOURCE}, {@link #TYPE_RENDERER} - * {@link #TYPE_UNEXPECTED}, {@link #TYPE_REMOTE} or {@link #TYPE_OUT_OF_MEMORY}. Note that new - * types may be added in the future and error handling should handle unknown type values. + * {@link #TYPE_UNEXPECTED} or {@link #TYPE_REMOTE}. Note that new types may be added in the + * future and error handling should handle unknown type values. */ + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. @Documented @Retention(RetentionPolicy.SOURCE) - @IntDef({TYPE_SOURCE, TYPE_RENDERER, TYPE_UNEXPECTED, TYPE_REMOTE, TYPE_OUT_OF_MEMORY}) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) + @IntDef({TYPE_SOURCE, TYPE_RENDERER, TYPE_UNEXPECTED, TYPE_REMOTE}) public @interface Type {} /** * The error occurred loading data from a {@link MediaSource}. - *

- * Call {@link #getSourceException()} to retrieve the underlying cause. + * + *

Call {@link #getSourceException()} to retrieve the underlying cause. */ public static final int TYPE_SOURCE = 0; /** * The error occurred in a {@link Renderer}. - *

- * Call {@link #getRendererException()} to retrieve the underlying cause. + * + *

Call {@link #getRendererException()} to retrieve the underlying cause. */ public static final int TYPE_RENDERER = 1; /** * The error was an unexpected {@link RuntimeException}. - *

- * Call {@link #getUnexpectedException()} to retrieve the underlying cause. + * + *

Call {@link #getUnexpectedException()} to retrieve the underlying cause. */ public static final int TYPE_UNEXPECTED = 2; /** @@ -64,15 +77,14 @@ public final class ExoPlaybackException extends Exception { *

Call {@link #getMessage()} to retrieve the message associated with the error. */ public static final int TYPE_REMOTE = 3; - /** The error was an {@link OutOfMemoryError}. */ - public static final int TYPE_OUT_OF_MEMORY = 4; /** The {@link Type} of the playback failure. */ - @Type public final int type; + public final @Type int type; - /** - * If {@link #type} is {@link #TYPE_RENDERER}, this is the index of the renderer. - */ + /** If {@link #type} is {@link #TYPE_RENDERER}, this is the name of the renderer. */ + @Nullable public final String rendererName; + + /** If {@link #type} is {@link #TYPE_RENDERER}, this is the index of the renderer. */ public final int rendererIndex; /** @@ -84,23 +96,29 @@ public final class ExoPlaybackException extends Exception { /** * If {@link #type} is {@link #TYPE_RENDERER}, this is the level of {@link FormatSupport} of the * renderer for {@link #rendererFormat}. If {@link #rendererFormat} is null, this is {@link - * RendererCapabilities#FORMAT_HANDLED}. + * C#FORMAT_HANDLED}. */ - @FormatSupport public final int rendererFormatSupport; + public final @FormatSupport int rendererFormatSupport; - /** The value of {@link SystemClock#elapsedRealtime()} when this exception was created. */ - public final long timestampMs; + /** The {@link MediaPeriodId} of the media associated with this error, or null if undetermined. */ + @Nullable public final MediaPeriodId mediaPeriodId; - @Nullable private final Throwable cause; + /** + * If {@link #type} is {@link #TYPE_RENDERER}, this field indicates whether the error may be + * recoverable by disabling and re-enabling (but not resetting) the renderers. For other + * {@link Type types} this field will always be {@code false}. + */ + /* package */ final boolean isRecoverable; /** * Creates an instance of type {@link #TYPE_SOURCE}. * * @param cause The cause of the failure. + * @param errorCode See {@link #errorCode}. * @return The created instance. */ - public static ExoPlaybackException createForSource(IOException cause) { - return new ExoPlaybackException(TYPE_SOURCE, cause); + public static ExoPlaybackException createForSource(IOException cause, int errorCode) { + return new ExoPlaybackException(TYPE_SOURCE, cause, errorCode); } /** @@ -112,83 +130,150 @@ public static ExoPlaybackException createForSource(IOException cause) { * or null if the renderer wasn't using a {@link Format}. * @param rendererFormatSupport The {@link FormatSupport} of the renderer for {@code * rendererFormat}. Ignored if {@code rendererFormat} is null. + * @param isRecoverable If the failure can be recovered by disabling and re-enabling the renderer. + * @param errorCode See {@link #errorCode}. * @return The created instance. */ public static ExoPlaybackException createForRenderer( - Exception cause, + Throwable cause, + String rendererName, int rendererIndex, @Nullable Format rendererFormat, - @FormatSupport int rendererFormatSupport) { + @FormatSupport int rendererFormatSupport, + boolean isRecoverable, + @ErrorCode int errorCode) { + return new ExoPlaybackException( TYPE_RENDERER, cause, + /* customMessage= */ null, + errorCode, + rendererName, rendererIndex, rendererFormat, - rendererFormat == null ? RendererCapabilities.FORMAT_HANDLED : rendererFormatSupport); + rendererFormat == null ? C.FORMAT_HANDLED : rendererFormatSupport, + isRecoverable); } /** - * Creates an instance of type {@link #TYPE_UNEXPECTED}. - * - * @param cause The cause of the failure. - * @return The created instance. + * @deprecated Use {@link #createForUnexpected(RuntimeException, int) + * createForUnexpected(RuntimeException, ERROR_CODE_UNSPECIFIED)} instead. */ + @Deprecated public static ExoPlaybackException createForUnexpected(RuntimeException cause) { - return new ExoPlaybackException(TYPE_UNEXPECTED, cause); + return createForUnexpected(cause, ERROR_CODE_UNSPECIFIED); } /** - * Creates an instance of type {@link #TYPE_REMOTE}. + * Creates an instance of type {@link #TYPE_UNEXPECTED}. * - * @param message The message associated with the error. + * @param cause The cause of the failure. + * @param errorCode See {@link #errorCode}. * @return The created instance. */ - public static ExoPlaybackException createForRemote(String message) { - return new ExoPlaybackException(TYPE_REMOTE, message); + public static ExoPlaybackException createForUnexpected( + RuntimeException cause, @ErrorCode int errorCode) { + return new ExoPlaybackException(TYPE_UNEXPECTED, cause, errorCode); } /** - * Creates an instance of type {@link #TYPE_OUT_OF_MEMORY}. + * Creates an instance of type {@link #TYPE_REMOTE}. * - * @param cause The cause of the failure. + * @param message The message associated with the error. * @return The created instance. */ - public static ExoPlaybackException createForOutOfMemoryError(OutOfMemoryError cause) { - return new ExoPlaybackException(TYPE_OUT_OF_MEMORY, cause); + public static ExoPlaybackException createForRemote(String message) { + return new ExoPlaybackException( + TYPE_REMOTE, + /* cause= */ null, + /* customMessage= */ message, + ERROR_CODE_REMOTE_ERROR, + /* rendererName= */ null, + /* rendererIndex= */ C.INDEX_UNSET, + /* rendererFormat= */ null, + /* rendererFormatSupport= */ C.FORMAT_HANDLED, + /* isRecoverable= */ false); } - private ExoPlaybackException(@Type int type, Throwable cause) { + private ExoPlaybackException(@Type int type, Throwable cause, @ErrorCode int errorCode) { this( type, cause, + /* customMessage= */ null, + errorCode, + /* rendererName= */ null, /* rendererIndex= */ C.INDEX_UNSET, /* rendererFormat= */ null, - /* rendererFormatSupport= */ RendererCapabilities.FORMAT_HANDLED); + /* rendererFormatSupport= */ C.FORMAT_HANDLED, + /* isRecoverable= */ false); } private ExoPlaybackException( @Type int type, - Throwable cause, + @Nullable Throwable cause, + @Nullable String customMessage, + @ErrorCode int errorCode, + @Nullable String rendererName, int rendererIndex, @Nullable Format rendererFormat, - @FormatSupport int rendererFormatSupport) { - super(cause); + @FormatSupport int rendererFormatSupport, + boolean isRecoverable) { + this( + deriveMessage( + type, + customMessage, + rendererName, + rendererIndex, + rendererFormat, + rendererFormatSupport), + cause, + errorCode, + type, + rendererName, + rendererIndex, + rendererFormat, + rendererFormatSupport, + /* mediaPeriodId= */ null, + /* timestampMs= */ SystemClock.elapsedRealtime(), + isRecoverable); + } + + private ExoPlaybackException(Bundle bundle) { + super(bundle); + type = bundle.getInt(FIELD_TYPE, /* defaultValue= */ TYPE_UNEXPECTED); + rendererName = bundle.getString(FIELD_RENDERER_NAME); + rendererIndex = bundle.getInt(FIELD_RENDERER_INDEX, /* defaultValue= */ C.INDEX_UNSET); + @Nullable Bundle rendererFormatBundle = bundle.getBundle(FIELD_RENDERER_FORMAT); + rendererFormat = + rendererFormatBundle == null ? null : Format.CREATOR.fromBundle(rendererFormatBundle); + rendererFormatSupport = + bundle.getInt(FIELD_RENDERER_FORMAT_SUPPORT, /* defaultValue= */ C.FORMAT_HANDLED); + isRecoverable = bundle.getBoolean(FIELD_IS_RECOVERABLE, /* defaultValue= */ false); + mediaPeriodId = null; + } + + private ExoPlaybackException( + String message, + @Nullable Throwable cause, + @ErrorCode int errorCode, + @Type int type, + @Nullable String rendererName, + int rendererIndex, + @Nullable Format rendererFormat, + @FormatSupport int rendererFormatSupport, + @Nullable MediaPeriodId mediaPeriodId, + long timestampMs, + boolean isRecoverable) { + super(message, cause, errorCode, timestampMs); + Assertions.checkArgument(!isRecoverable || type == TYPE_RENDERER); + Assertions.checkArgument(cause != null || type == TYPE_REMOTE); this.type = type; - this.cause = cause; + this.rendererName = rendererName; this.rendererIndex = rendererIndex; this.rendererFormat = rendererFormat; this.rendererFormatSupport = rendererFormatSupport; - timestampMs = SystemClock.elapsedRealtime(); - } - - private ExoPlaybackException(@Type int type, String message) { - super(message); - this.type = type; - rendererIndex = C.INDEX_UNSET; - rendererFormat = null; - rendererFormatSupport = RendererCapabilities.FORMAT_UNSUPPORTED_TYPE; - cause = null; - timestampMs = SystemClock.elapsedRealtime(); + this.mediaPeriodId = mediaPeriodId; + this.isRecoverable = isRecoverable; } /** @@ -198,7 +283,7 @@ private ExoPlaybackException(@Type int type, String message) { */ public IOException getSourceException() { Assertions.checkState(type == TYPE_SOURCE); - return (IOException) Assertions.checkNotNull(cause); + return (IOException) Assertions.checkNotNull(getCause()); } /** @@ -208,7 +293,7 @@ public IOException getSourceException() { */ public Exception getRendererException() { Assertions.checkState(type == TYPE_RENDERER); - return (Exception) Assertions.checkNotNull(cause); + return (Exception) Assertions.checkNotNull(getCause()); } /** @@ -218,16 +303,119 @@ public Exception getRendererException() { */ public RuntimeException getUnexpectedException() { Assertions.checkState(type == TYPE_UNEXPECTED); - return (RuntimeException) Assertions.checkNotNull(cause); + return (RuntimeException) Assertions.checkNotNull(getCause()); } + @Override + public boolean errorInfoEquals(@Nullable PlaybackException that) { + if (!super.errorInfoEquals(that)) { + return false; + } + // We know that is not null and is an ExoPlaybackException because of the super call returning + // true. + ExoPlaybackException other = (ExoPlaybackException) Util.castNonNull(that); + return type == other.type + && Util.areEqual(rendererName, other.rendererName) + && rendererIndex == other.rendererIndex + && Util.areEqual(rendererFormat, other.rendererFormat) + && rendererFormatSupport == other.rendererFormatSupport + && Util.areEqual(mediaPeriodId, other.mediaPeriodId) + && isRecoverable == other.isRecoverable; + } + + /** + * Returns a copy of this exception with the provided {@link MediaPeriodId}. + * + * @param mediaPeriodId The {@link MediaPeriodId}. + * @return The copied exception. + */ + @CheckResult + /* package */ ExoPlaybackException copyWithMediaPeriodId(@Nullable MediaPeriodId mediaPeriodId) { + return new ExoPlaybackException( + Util.castNonNull(getMessage()), + getCause(), + errorCode, + type, + rendererName, + rendererIndex, + rendererFormat, + rendererFormatSupport, + mediaPeriodId, + timestampMs, + isRecoverable); + } + + private static String deriveMessage( + @Type int type, + @Nullable String customMessage, + @Nullable String rendererName, + int rendererIndex, + @Nullable Format rendererFormat, + @FormatSupport int rendererFormatSupport) { + String message; + switch (type) { + case TYPE_SOURCE: + message = "Source error"; + break; + case TYPE_RENDERER: + message = + rendererName + + " error" + + ", index=" + + rendererIndex + + ", format=" + + rendererFormat + + ", format_supported=" + + Util.getFormatSupportString(rendererFormatSupport); + break; + case TYPE_REMOTE: + message = "Remote error"; + break; + case TYPE_UNEXPECTED: + default: + message = "Unexpected runtime error"; + break; + } + if (!TextUtils.isEmpty(customMessage)) { + message += ": " + customMessage; + } + return message; + } + + // Bundleable implementation. + + /** Object that can restore {@link ExoPlaybackException} from a {@link Bundle}. */ + public static final Creator CREATOR = ExoPlaybackException::new; + + private static final String FIELD_TYPE = Util.intToStringMaxRadix(FIELD_CUSTOM_ID_BASE + 1); + private static final String FIELD_RENDERER_NAME = + Util.intToStringMaxRadix(FIELD_CUSTOM_ID_BASE + 2); + private static final String FIELD_RENDERER_INDEX = + Util.intToStringMaxRadix(FIELD_CUSTOM_ID_BASE + 3); + private static final String FIELD_RENDERER_FORMAT = + Util.intToStringMaxRadix(FIELD_CUSTOM_ID_BASE + 4); + private static final String FIELD_RENDERER_FORMAT_SUPPORT = + Util.intToStringMaxRadix(FIELD_CUSTOM_ID_BASE + 5); + private static final String FIELD_IS_RECOVERABLE = + Util.intToStringMaxRadix(FIELD_CUSTOM_ID_BASE + 6); + /** - * Retrieves the underlying error when {@link #type} is {@link #TYPE_OUT_OF_MEMORY}. + * {@inheritDoc} * - * @throws IllegalStateException If {@link #type} is not {@link #TYPE_OUT_OF_MEMORY}. + *

It omits the {@link #mediaPeriodId} field. The {@link #mediaPeriodId} of an instance + * restored by {@link #CREATOR} will always be {@code null}. */ - public OutOfMemoryError getOutOfMemoryError() { - Assertions.checkState(type == TYPE_OUT_OF_MEMORY); - return (OutOfMemoryError) Assertions.checkNotNull(cause); + @Override + public Bundle toBundle() { + Bundle bundle = super.toBundle(); + bundle.putInt(FIELD_TYPE, type); + bundle.putString(FIELD_RENDERER_NAME, rendererName); + bundle.putInt(FIELD_RENDERER_INDEX, rendererIndex); + if (rendererFormat != null) { + bundle.putBundle(FIELD_RENDERER_FORMAT, rendererFormat.toBundle()); + } + bundle.putInt(FIELD_RENDERER_FORMAT_SUPPORT, rendererFormatSupport); + bundle.putBoolean(FIELD_IS_RECOVERABLE, isRecoverable); + return bundle; } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ExoPlayer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ExoPlayer.java index c2e5c7170f..367e835dd0 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ExoPlayer.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ExoPlayer.java @@ -15,36 +15,65 @@ */ package com.google.android.exoplayer2; +import static com.google.android.exoplayer2.util.Assertions.checkArgument; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Assertions.checkState; + import android.content.Context; +import android.media.AudioDeviceInfo; +import android.media.AudioTrack; +import android.media.MediaCodec; import android.os.Looper; +import android.os.Process; +import android.view.Surface; +import android.view.SurfaceHolder; +import android.view.SurfaceView; +import android.view.TextureView; +import androidx.annotation.IntRange; import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; import androidx.annotation.VisibleForTesting; import com.google.android.exoplayer2.analytics.AnalyticsCollector; +import com.google.android.exoplayer2.analytics.AnalyticsListener; +import com.google.android.exoplayer2.analytics.DefaultAnalyticsCollector; +import com.google.android.exoplayer2.audio.AudioAttributes; +import com.google.android.exoplayer2.audio.AudioSink; +import com.google.android.exoplayer2.audio.AuxEffectInfo; +import com.google.android.exoplayer2.audio.DefaultAudioSink; import com.google.android.exoplayer2.audio.MediaCodecAudioRenderer; +import com.google.android.exoplayer2.decoder.DecoderCounters; +import com.google.android.exoplayer2.extractor.DefaultExtractorsFactory; +import com.google.android.exoplayer2.extractor.ExtractorsFactory; import com.google.android.exoplayer2.metadata.MetadataRenderer; -import com.google.android.exoplayer2.source.ClippingMediaSource; -import com.google.android.exoplayer2.source.ConcatenatingMediaSource; -import com.google.android.exoplayer2.source.LoopingMediaSource; +import com.google.android.exoplayer2.source.DefaultMediaSourceFactory; import com.google.android.exoplayer2.source.MediaSource; -import com.google.android.exoplayer2.source.MergingMediaSource; -import com.google.android.exoplayer2.source.ProgressiveMediaSource; -import com.google.android.exoplayer2.source.SingleSampleMediaSource; +import com.google.android.exoplayer2.source.ShuffleOrder; +import com.google.android.exoplayer2.source.TrackGroupArray; +import com.google.android.exoplayer2.text.CueGroup; import com.google.android.exoplayer2.text.TextRenderer; import com.google.android.exoplayer2.trackselection.DefaultTrackSelector; +import com.google.android.exoplayer2.trackselection.TrackSelectionArray; import com.google.android.exoplayer2.trackselection.TrackSelector; import com.google.android.exoplayer2.upstream.BandwidthMeter; import com.google.android.exoplayer2.upstream.DataSource; import com.google.android.exoplayer2.upstream.DefaultBandwidthMeter; -import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Clock; +import com.google.android.exoplayer2.util.PriorityTaskManager; import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.video.MediaCodecVideoRenderer; +import com.google.android.exoplayer2.video.VideoFrameMetadataListener; +import com.google.android.exoplayer2.video.VideoSize; +import com.google.android.exoplayer2.video.spherical.CameraMotionListener; +import com.google.common.base.Function; +import com.google.common.base.Supplier; +import com.google.errorprone.annotations.CanIgnoreReturnValue; +import java.util.List; /** * An extensible media player that plays {@link MediaSource}s. Instances can be obtained from {@link - * SimpleExoPlayer.Builder} or {@link ExoPlayer.Builder}. + * Builder}. * - *

Player components

+ *

Player components

* *

ExoPlayer is designed to make few assumptions about (and hence impose few restrictions on) the * type of the media being played, how and where it is stored, and how it is rendered. Rather than @@ -53,20 +82,19 @@ * Components common to all ExoPlayer implementations are: * *

    - *
  • A {@link MediaSource} that defines the media to be played, loads the media, and from - * which the loaded media can be read. A MediaSource is injected via {@link - * #prepare(MediaSource)} at the start of playback. The library modules provide default - * implementations for progressive media files ({@link ProgressiveMediaSource}), DASH - * (DashMediaSource), SmoothStreaming (SsMediaSource) and HLS (HlsMediaSource), an - * implementation for loading single media samples ({@link SingleSampleMediaSource}) that's - * most often used for side-loaded subtitle files, and implementations for building more - * complex MediaSources from simpler ones ({@link MergingMediaSource}, {@link - * ConcatenatingMediaSource}, {@link LoopingMediaSource} and {@link ClippingMediaSource}). + *
  • {@link MediaSource MediaSources} that define the media to be played, load the media, + * and from which the loaded media can be read. MediaSources are created from {@link MediaItem + * MediaItems} by the {@link MediaSource.Factory} injected into the player {@link + * Builder#setMediaSourceFactory Builder}, or can be added directly by methods like {@link + * #setMediaSource(MediaSource)}. The library provides a {@link DefaultMediaSourceFactory} for + * progressive media files, DASH, SmoothStreaming and HLS, which also includes functionality + * for side-loading subtitle files and clipping media. *
  • {@link Renderer}s that render individual components of the media. The library * provides default implementations for common media types ({@link MediaCodecVideoRenderer}, * {@link MediaCodecAudioRenderer}, {@link TextRenderer} and {@link MetadataRenderer}). A * Renderer consumes media from the MediaSource being played. Renderers are injected when the - * player is created. + * player is created. The number of renderers and their respective track types can be obtained + * by calling {@link #getRendererCount()} and {@link #getRendererType(int)}. *
  • A {@link TrackSelector} that selects tracks provided by the MediaSource to be * consumed by each of the available Renderers. The library provides a default implementation * ({@link DefaultTrackSelector}) suitable for most use cases. A TrackSelector is injected @@ -89,7 +117,7 @@ * {@link DataSource} factories to be injected via their constructors. By providing a custom factory * it's possible to load data from a non-standard source, or through a different network stack. * - *

    Threading model

    + *

    Threading model

    * *

    The figure below shows ExoPlayer's threading model. * @@ -97,15 +125,15 @@ * threading model"> * *

      - *
    • ExoPlayer instances must be accessed from a single application thread. For the vast - * majority of cases this should be the application's main thread. Using the application's - * main thread is also a requirement when using ExoPlayer's UI components or the IMA - * extension. The thread on which an ExoPlayer instance must be accessed can be explicitly - * specified by passing a `Looper` when creating the player. If no `Looper` is specified, then - * the `Looper` of the thread that the player is created on is used, or if that thread does - * not have a `Looper`, the `Looper` of the application's main thread is used. In all cases - * the `Looper` of the thread from which the player must be accessed can be queried using - * {@link #getApplicationLooper()}. + *
    • ExoPlayer instances must be accessed from a single application thread unless indicated + * otherwise. For the vast majority of cases this should be the application's main thread. + * Using the application's main thread is also a requirement when using ExoPlayer's UI + * components or the IMA extension. The thread on which an ExoPlayer instance must be accessed + * can be explicitly specified by passing a `Looper` when creating the player. If no `Looper` + * is specified, then the `Looper` of the thread that the player is created on is used, or if + * that thread does not have a `Looper`, the `Looper` of the application's main thread is + * used. In all cases the `Looper` of the thread from which the player must be accessed can be + * queried using {@link #getApplicationLooper()}. *
    • Registered listeners are called on the thread associated with {@link * #getApplicationLooper()}. Note that this means registered listeners are called on the same * thread which must be used to access the player. @@ -125,90 +153,571 @@ */ public interface ExoPlayer extends Player { + /** + * @deprecated Use {@link ExoPlayer}, as the {@link AudioComponent} methods are defined by that + * interface. + */ + @Deprecated + interface AudioComponent { + + /** + * @deprecated Use {@link ExoPlayer#setAudioAttributes(AudioAttributes, boolean)} instead. + */ + @Deprecated + void setAudioAttributes(AudioAttributes audioAttributes, boolean handleAudioFocus); + + /** + * @deprecated Use {@link Player#getAudioAttributes()} instead. + */ + @Deprecated + AudioAttributes getAudioAttributes(); + + /** + * @deprecated Use {@link ExoPlayer#setAudioSessionId(int)} instead. + */ + @Deprecated + void setAudioSessionId(int audioSessionId); + + /** + * @deprecated Use {@link ExoPlayer#getAudioSessionId()} instead. + */ + @Deprecated + int getAudioSessionId(); + + /** + * @deprecated Use {@link ExoPlayer#setAuxEffectInfo(AuxEffectInfo)} instead. + */ + @Deprecated + void setAuxEffectInfo(AuxEffectInfo auxEffectInfo); + + /** + * @deprecated Use {@link ExoPlayer#clearAuxEffectInfo()} instead. + */ + @Deprecated + void clearAuxEffectInfo(); + + /** + * @deprecated Use {@link Player#setVolume(float)} instead. + */ + @Deprecated + void setVolume(float audioVolume); + + /** + * @deprecated Use {@link Player#getVolume()} instead. + */ + @Deprecated + float getVolume(); + + /** + * @deprecated Use {@link ExoPlayer#setSkipSilenceEnabled(boolean)} instead. + */ + @Deprecated + void setSkipSilenceEnabled(boolean skipSilenceEnabled); + + /** + * @deprecated Use {@link ExoPlayer#getSkipSilenceEnabled()} instead. + */ + @Deprecated + boolean getSkipSilenceEnabled(); + } + + /** + * @deprecated Use {@link ExoPlayer}, as the {@link VideoComponent} methods are defined by that + * interface. + */ + @Deprecated + interface VideoComponent { + + /** + * @deprecated Use {@link ExoPlayer#setVideoScalingMode(int)} instead. + */ + @Deprecated + void setVideoScalingMode(@C.VideoScalingMode int videoScalingMode); + + /** + * @deprecated Use {@link ExoPlayer#getVideoScalingMode()} instead. + */ + @Deprecated + @C.VideoScalingMode + int getVideoScalingMode(); + + /** + * @deprecated Use {@link ExoPlayer#setVideoChangeFrameRateStrategy(int)} instead. + */ + @Deprecated + void setVideoChangeFrameRateStrategy( + @C.VideoChangeFrameRateStrategy int videoChangeFrameRateStrategy); + + /** + * @deprecated Use {@link ExoPlayer#getVideoChangeFrameRateStrategy()} instead. + */ + @Deprecated + @C.VideoChangeFrameRateStrategy + int getVideoChangeFrameRateStrategy(); + + /** + * @deprecated Use {@link ExoPlayer#setVideoFrameMetadataListener(VideoFrameMetadataListener)} + * instead. + */ + @Deprecated + void setVideoFrameMetadataListener(VideoFrameMetadataListener listener); + + /** + * @deprecated Use {@link ExoPlayer#clearVideoFrameMetadataListener(VideoFrameMetadataListener)} + * instead. + */ + @Deprecated + void clearVideoFrameMetadataListener(VideoFrameMetadataListener listener); + + /** + * @deprecated Use {@link ExoPlayer#setCameraMotionListener(CameraMotionListener)} instead. + */ + @Deprecated + void setCameraMotionListener(CameraMotionListener listener); + + /** + * @deprecated Use {@link ExoPlayer#clearCameraMotionListener(CameraMotionListener)} instead. + */ + @Deprecated + void clearCameraMotionListener(CameraMotionListener listener); + + /** + * @deprecated Use {@link Player#clearVideoSurface()} instead. + */ + @Deprecated + void clearVideoSurface(); + + /** + * @deprecated Use {@link Player#clearVideoSurface(Surface)} instead. + */ + @Deprecated + void clearVideoSurface(@Nullable Surface surface); + + /** + * @deprecated Use {@link Player#setVideoSurface(Surface)} instead. + */ + @Deprecated + void setVideoSurface(@Nullable Surface surface); + + /** + * @deprecated Use {@link Player#setVideoSurfaceHolder(SurfaceHolder)} instead. + */ + @Deprecated + void setVideoSurfaceHolder(@Nullable SurfaceHolder surfaceHolder); + + /** + * @deprecated Use {@link Player#clearVideoSurfaceHolder(SurfaceHolder)} instead. + */ + @Deprecated + void clearVideoSurfaceHolder(@Nullable SurfaceHolder surfaceHolder); + + /** + * @deprecated Use {@link Player#setVideoSurfaceView(SurfaceView)} instead. + */ + @Deprecated + void setVideoSurfaceView(@Nullable SurfaceView surfaceView); + + /** + * @deprecated Use {@link Player#clearVideoSurfaceView(SurfaceView)} instead. + */ + @Deprecated + void clearVideoSurfaceView(@Nullable SurfaceView surfaceView); + + /** + * @deprecated Use {@link Player#setVideoTextureView(TextureView)} instead. + */ + @Deprecated + void setVideoTextureView(@Nullable TextureView textureView); + + /** + * @deprecated Use {@link Player#clearVideoTextureView(TextureView)} instead. + */ + @Deprecated + void clearVideoTextureView(@Nullable TextureView textureView); + + /** + * @deprecated Use {@link Player#getVideoSize()} instead. + */ + @Deprecated + VideoSize getVideoSize(); + } + + /** + * @deprecated Use {@link Player}, as the {@link TextComponent} methods are defined by that + * interface. + */ + @Deprecated + interface TextComponent { + + /** + * @deprecated Use {@link Player#getCurrentCues()} instead. + */ + @Deprecated + CueGroup getCurrentCues(); + } + + /** + * @deprecated Use {@link Player}, as the {@link DeviceComponent} methods are defined by that + * interface. + */ + @Deprecated + interface DeviceComponent { + + /** + * @deprecated Use {@link Player#getDeviceInfo()} instead. + */ + @Deprecated + DeviceInfo getDeviceInfo(); + + /** + * @deprecated Use {@link Player#getDeviceVolume()} instead. + */ + @Deprecated + int getDeviceVolume(); + + /** + * @deprecated Use {@link Player#isDeviceMuted()} instead. + */ + @Deprecated + boolean isDeviceMuted(); + + /** + * @deprecated Use {@link Player#setDeviceVolume(int)} instead. + */ + @Deprecated + void setDeviceVolume(int volume); + + /** + * @deprecated Use {@link Player#increaseDeviceVolume()} instead. + */ + @Deprecated + void increaseDeviceVolume(); + + /** + * @deprecated Use {@link Player#decreaseDeviceVolume()} instead. + */ + @Deprecated + void decreaseDeviceVolume(); + + /** + * @deprecated Use {@link Player#setDeviceMuted(boolean)} instead. + */ + @Deprecated + void setDeviceMuted(boolean muted); + } + + /** + * A listener for audio offload events. + * + *

      This class is experimental, and might be renamed, moved or removed in a future release. + */ + interface AudioOffloadListener { + /** + * Called when the player has started or stopped offload scheduling using {@link + * #experimentalSetOffloadSchedulingEnabled(boolean)}. + * + *

      This method is experimental, and will be renamed or removed in a future release. + */ + default void onExperimentalOffloadSchedulingEnabledChanged(boolean offloadSchedulingEnabled) {} + + /** + * Called when the player has started or finished sleeping for offload. + * + *

      This method is experimental, and will be renamed or removed in a future release. + */ + default void onExperimentalSleepingForOffloadChanged(boolean sleepingForOffload) {} + + /** + * Called when the value of {@link AudioTrack#isOffloadedPlayback} changes. + * + *

      This should not be generally required to be acted upon. But when offload is critical for + * efficiency, or audio features (gapless, playback speed), this will let the app know. + * + *

      This method is experimental, and will be renamed or removed in a future release. + */ + default void onExperimentalOffloadedPlayback(boolean offloadedPlayback) {} + } + /** * A builder for {@link ExoPlayer} instances. * - *

      See {@link #Builder(Context, Renderer...)} for the list of default values. + *

      See {@link #Builder(Context)} for the list of default values. */ + @SuppressWarnings("deprecation") final class Builder { - private final Renderer[] renderers; + /* package */ final Context context; - private Clock clock; - private TrackSelector trackSelector; - private LoadControl loadControl; - private BandwidthMeter bandwidthMeter; - private Looper looper; - private AnalyticsCollector analyticsCollector; - private boolean useLazyPreparation; - private boolean buildCalled; + /* package */ Clock clock; + /* package */ long foregroundModeTimeoutMs; + /* package */ Supplier renderersFactorySupplier; + /* package */ Supplier mediaSourceFactorySupplier; + /* package */ Supplier trackSelectorSupplier; + /* package */ Supplier loadControlSupplier; + /* package */ Supplier bandwidthMeterSupplier; + /* package */ Function analyticsCollectorFunction; + /* package */ Looper looper; + @Nullable /* package */ PriorityTaskManager priorityTaskManager; + /* package */ AudioAttributes audioAttributes; + /* package */ boolean handleAudioFocus; + @C.WakeMode /* package */ int wakeMode; + /* package */ boolean handleAudioBecomingNoisy; + /* package */ boolean skipSilenceEnabled; + @C.VideoScalingMode /* package */ int videoScalingMode; + @C.VideoChangeFrameRateStrategy /* package */ int videoChangeFrameRateStrategy; + /* package */ boolean useLazyPreparation; + /* package */ SeekParameters seekParameters; + /* package */ long seekBackIncrementMs; + /* package */ long seekForwardIncrementMs; + /* package */ LivePlaybackSpeedControl livePlaybackSpeedControl; + /* package */ long releaseTimeoutMs; + /* package */ long detachSurfaceTimeoutMs; + /* package */ boolean pauseAtEndOfMediaItems; + /* package */ boolean usePlatformDiagnostics; + @Nullable /* package */ Looper playbackLooper; + /* package */ boolean buildCalled; /** - * Creates a builder with a list of {@link Renderer Renderers}. + * Creates a builder. + * + *

      Use {@link #Builder(Context, RenderersFactory)}, {@link #Builder(Context, + * MediaSource.Factory)} or {@link #Builder(Context, RenderersFactory, MediaSource.Factory)} + * instead, if you intend to provide a custom {@link RenderersFactory}, {@link + * ExtractorsFactory} or {@link DefaultMediaSourceFactory}. This is to ensure that ProGuard or + * R8 can remove ExoPlayer's {@link DefaultRenderersFactory}, {@link DefaultExtractorsFactory} + * and {@link DefaultMediaSourceFactory} from the APK. * *

      The builder uses the following default values: * *

        + *
      • {@link RenderersFactory}: {@link DefaultRenderersFactory} *
      • {@link TrackSelector}: {@link DefaultTrackSelector} + *
      • {@link MediaSource.Factory}: {@link DefaultMediaSourceFactory} *
      • {@link LoadControl}: {@link DefaultLoadControl} *
      • {@link BandwidthMeter}: {@link DefaultBandwidthMeter#getSingletonInstance(Context)} + *
      • {@link LivePlaybackSpeedControl}: {@link DefaultLivePlaybackSpeedControl} *
      • {@link Looper}: The {@link Looper} associated with the current thread, or the {@link * Looper} of the application's main thread if the current thread doesn't have a {@link * Looper} *
      • {@link AnalyticsCollector}: {@link AnalyticsCollector} with {@link Clock#DEFAULT} + *
      • {@link PriorityTaskManager}: {@code null} (not used) + *
      • {@link AudioAttributes}: {@link AudioAttributes#DEFAULT}, not handling audio focus + *
      • {@link C.WakeMode}: {@link C#WAKE_MODE_NONE} + *
      • {@code handleAudioBecomingNoisy}: {@code false} + *
      • {@code skipSilenceEnabled}: {@code false} + *
      • {@link C.VideoScalingMode}: {@link C#VIDEO_SCALING_MODE_DEFAULT} + *
      • {@link C.VideoChangeFrameRateStrategy}: {@link + * C#VIDEO_CHANGE_FRAME_RATE_STRATEGY_ONLY_IF_SEAMLESS} *
      • {@code useLazyPreparation}: {@code true} + *
      • {@link SeekParameters}: {@link SeekParameters#DEFAULT} + *
      • {@code seekBackIncrementMs}: {@link C#DEFAULT_SEEK_BACK_INCREMENT_MS} + *
      • {@code seekForwardIncrementMs}: {@link C#DEFAULT_SEEK_FORWARD_INCREMENT_MS} + *
      • {@code releaseTimeoutMs}: {@link #DEFAULT_RELEASE_TIMEOUT_MS} + *
      • {@code detachSurfaceTimeoutMs}: {@link #DEFAULT_DETACH_SURFACE_TIMEOUT_MS} + *
      • {@code pauseAtEndOfMediaItems}: {@code false} + *
      • {@code usePlatformDiagnostics}: {@code true} *
      • {@link Clock}: {@link Clock#DEFAULT} + *
      • {@code playbackLooper}: {@code null} (create new thread) *
      * * @param context A {@link Context}. - * @param renderers The {@link Renderer Renderers} to be used by the player. */ - public Builder(Context context, Renderer... renderers) { + public Builder(Context context) { + this( + context, + () -> new DefaultRenderersFactory(context), + () -> new DefaultMediaSourceFactory(context, new DefaultExtractorsFactory())); + } + + /** + * Creates a builder with a custom {@link RenderersFactory}. + * + *

      See {@link #Builder(Context)} for a list of default values. + * + *

      Note that this constructor is only useful to try and ensure that ExoPlayer's {@link + * DefaultRenderersFactory} can be removed by ProGuard or R8. + * + * @param context A {@link Context}. + * @param renderersFactory A factory for creating {@link Renderer Renderers} to be used by the + * player. + */ + public Builder(Context context, RenderersFactory renderersFactory) { this( - renderers, - new DefaultTrackSelector(context), - new DefaultLoadControl(), - DefaultBandwidthMeter.getSingletonInstance(context), - Util.getLooper(), - new AnalyticsCollector(Clock.DEFAULT), - /* useLazyPreparation= */ true, - Clock.DEFAULT); + context, + () -> renderersFactory, + () -> new DefaultMediaSourceFactory(context, new DefaultExtractorsFactory())); + checkNotNull(renderersFactory); + } + + /** + * Creates a builder with a custom {@link MediaSource.Factory}. + * + *

      See {@link #Builder(Context)} for a list of default values. + * + *

      Note that this constructor is only useful to try and ensure that ExoPlayer's {@link + * DefaultMediaSourceFactory} (and therefore {@link DefaultExtractorsFactory}) can be removed by + * ProGuard or R8. + * + * @param context A {@link Context}. + * @param mediaSourceFactory A factory for creating a {@link MediaSource} from a {@link + * MediaItem}. + */ + public Builder(Context context, MediaSource.Factory mediaSourceFactory) { + this(context, () -> new DefaultRenderersFactory(context), () -> mediaSourceFactory); + checkNotNull(mediaSourceFactory); + } + + /** + * Creates a builder with a custom {@link RenderersFactory} and {@link MediaSource.Factory}. + * + *

      See {@link #Builder(Context)} for a list of default values. + * + *

      Note that this constructor is only useful to try and ensure that ExoPlayer's {@link + * DefaultRenderersFactory}, {@link DefaultMediaSourceFactory} (and therefore {@link + * DefaultExtractorsFactory}) can be removed by ProGuard or R8. + * + * @param context A {@link Context}. + * @param renderersFactory A factory for creating {@link Renderer Renderers} to be used by the + * player. + * @param mediaSourceFactory A factory for creating a {@link MediaSource} from a {@link + * MediaItem}. + */ + public Builder( + Context context, + RenderersFactory renderersFactory, + MediaSource.Factory mediaSourceFactory) { + this(context, () -> renderersFactory, () -> mediaSourceFactory); + checkNotNull(renderersFactory); + checkNotNull(mediaSourceFactory); } /** * Creates a builder with the specified custom components. * - *

      Note that this constructor is only useful if you try to ensure that ExoPlayer's default - * components can be removed by ProGuard or R8. For most components except renderers, there is - * only a marginal benefit of doing that. + *

      Note that this constructor is only useful to try and ensure that ExoPlayer's default + * components can be removed by ProGuard or R8. * - * @param renderers The {@link Renderer Renderers} to be used by the player. + * @param context A {@link Context}. + * @param renderersFactory A factory for creating {@link Renderer Renderers} to be used by the + * player. + * @param mediaSourceFactory A {@link MediaSource.Factory}. * @param trackSelector A {@link TrackSelector}. * @param loadControl A {@link LoadControl}. * @param bandwidthMeter A {@link BandwidthMeter}. - * @param looper A {@link Looper} that must be used for all calls to the player. * @param analyticsCollector An {@link AnalyticsCollector}. - * @param useLazyPreparation Whether media sources should be initialized lazily. - * @param clock A {@link Clock}. Should always be {@link Clock#DEFAULT}. */ public Builder( - Renderer[] renderers, + Context context, + RenderersFactory renderersFactory, + MediaSource.Factory mediaSourceFactory, TrackSelector trackSelector, LoadControl loadControl, BandwidthMeter bandwidthMeter, - Looper looper, - AnalyticsCollector analyticsCollector, - boolean useLazyPreparation, - Clock clock) { - Assertions.checkArgument(renderers.length > 0); - this.renderers = renderers; - this.trackSelector = trackSelector; - this.loadControl = loadControl; - this.bandwidthMeter = bandwidthMeter; - this.looper = looper; - this.analyticsCollector = analyticsCollector; - this.useLazyPreparation = useLazyPreparation; - this.clock = clock; + AnalyticsCollector analyticsCollector) { + this( + context, + () -> renderersFactory, + () -> mediaSourceFactory, + () -> trackSelector, + () -> loadControl, + () -> bandwidthMeter, + (clock) -> analyticsCollector); + checkNotNull(renderersFactory); + checkNotNull(mediaSourceFactory); + checkNotNull(trackSelector); + checkNotNull(bandwidthMeter); + checkNotNull(analyticsCollector); + } + + private Builder( + Context context, + Supplier renderersFactorySupplier, + Supplier mediaSourceFactorySupplier) { + this( + context, + renderersFactorySupplier, + mediaSourceFactorySupplier, + () -> new DefaultTrackSelector(context), + DefaultLoadControl::new, + () -> DefaultBandwidthMeter.getSingletonInstance(context), + DefaultAnalyticsCollector::new); + } + + private Builder( + Context context, + Supplier renderersFactorySupplier, + Supplier mediaSourceFactorySupplier, + Supplier trackSelectorSupplier, + Supplier loadControlSupplier, + Supplier bandwidthMeterSupplier, + Function analyticsCollectorFunction) { + this.context = checkNotNull(context); + this.renderersFactorySupplier = renderersFactorySupplier; + this.mediaSourceFactorySupplier = mediaSourceFactorySupplier; + this.trackSelectorSupplier = trackSelectorSupplier; + this.loadControlSupplier = loadControlSupplier; + this.bandwidthMeterSupplier = bandwidthMeterSupplier; + this.analyticsCollectorFunction = analyticsCollectorFunction; + looper = Util.getCurrentOrMainLooper(); + audioAttributes = AudioAttributes.DEFAULT; + wakeMode = C.WAKE_MODE_NONE; + videoScalingMode = C.VIDEO_SCALING_MODE_DEFAULT; + videoChangeFrameRateStrategy = C.VIDEO_CHANGE_FRAME_RATE_STRATEGY_ONLY_IF_SEAMLESS; + useLazyPreparation = true; + seekParameters = SeekParameters.DEFAULT; + seekBackIncrementMs = C.DEFAULT_SEEK_BACK_INCREMENT_MS; + seekForwardIncrementMs = C.DEFAULT_SEEK_FORWARD_INCREMENT_MS; + livePlaybackSpeedControl = new DefaultLivePlaybackSpeedControl.Builder().build(); + clock = Clock.DEFAULT; + releaseTimeoutMs = DEFAULT_RELEASE_TIMEOUT_MS; + detachSurfaceTimeoutMs = DEFAULT_DETACH_SURFACE_TIMEOUT_MS; + usePlatformDiagnostics = true; + } + + /** + * Sets a limit on the time a call to {@link #setForegroundMode} can spend. If a call to {@link + * #setForegroundMode} takes more than {@code timeoutMs} milliseconds to complete, the player + * will raise an error via {@link Player.Listener#onPlayerError}. + * + *

      This method is experimental, and will be renamed or removed in a future release. + * + * @param timeoutMs The time limit in milliseconds. + */ + @CanIgnoreReturnValue + public Builder experimentalSetForegroundModeTimeoutMs(long timeoutMs) { + checkState(!buildCalled); + foregroundModeTimeoutMs = timeoutMs; + return this; + } + + /** + * Sets the {@link RenderersFactory} that will be used by the player. + * + * @param renderersFactory A {@link RenderersFactory}. + * @return This builder. + * @throws IllegalStateException If {@link #build()} has already been called. + */ + @CanIgnoreReturnValue + public Builder setRenderersFactory(RenderersFactory renderersFactory) { + checkState(!buildCalled); + checkNotNull(renderersFactory); + this.renderersFactorySupplier = () -> renderersFactory; + return this; + } + + /** + * Sets the {@link MediaSource.Factory} that will be used by the player. + * + * @param mediaSourceFactory A {@link MediaSource.Factory}. + * @return This builder. + * @throws IllegalStateException If {@link #build()} has already been called. + */ + @CanIgnoreReturnValue + public Builder setMediaSourceFactory(MediaSource.Factory mediaSourceFactory) { + checkState(!buildCalled); + checkNotNull(mediaSourceFactory); + this.mediaSourceFactorySupplier = () -> mediaSourceFactory; + return this; } /** @@ -218,9 +727,11 @@ public Builder( * @return This builder. * @throws IllegalStateException If {@link #build()} has already been called. */ + @CanIgnoreReturnValue public Builder setTrackSelector(TrackSelector trackSelector) { - Assertions.checkState(!buildCalled); - this.trackSelector = trackSelector; + checkState(!buildCalled); + checkNotNull(trackSelector); + this.trackSelectorSupplier = () -> trackSelector; return this; } @@ -231,9 +742,11 @@ public Builder setTrackSelector(TrackSelector trackSelector) { * @return This builder. * @throws IllegalStateException If {@link #build()} has already been called. */ + @CanIgnoreReturnValue public Builder setLoadControl(LoadControl loadControl) { - Assertions.checkState(!buildCalled); - this.loadControl = loadControl; + checkState(!buildCalled); + checkNotNull(loadControl); + this.loadControlSupplier = () -> loadControl; return this; } @@ -244,9 +757,11 @@ public Builder setLoadControl(LoadControl loadControl) { * @return This builder. * @throws IllegalStateException If {@link #build()} has already been called. */ + @CanIgnoreReturnValue public Builder setBandwidthMeter(BandwidthMeter bandwidthMeter) { - Assertions.checkState(!buildCalled); - this.bandwidthMeter = bandwidthMeter; + checkState(!buildCalled); + checkNotNull(bandwidthMeter); + this.bandwidthMeterSupplier = () -> bandwidthMeter; return this; } @@ -258,8 +773,10 @@ public Builder setBandwidthMeter(BandwidthMeter bandwidthMeter) { * @return This builder. * @throws IllegalStateException If {@link #build()} has already been called. */ + @CanIgnoreReturnValue public Builder setLooper(Looper looper) { - Assertions.checkState(!buildCalled); + checkState(!buildCalled); + checkNotNull(looper); this.looper = looper; return this; } @@ -271,94 +788,749 @@ public Builder setLooper(Looper looper) { * @return This builder. * @throws IllegalStateException If {@link #build()} has already been called. */ + @CanIgnoreReturnValue public Builder setAnalyticsCollector(AnalyticsCollector analyticsCollector) { - Assertions.checkState(!buildCalled); - this.analyticsCollector = analyticsCollector; + checkState(!buildCalled); + checkNotNull(analyticsCollector); + this.analyticsCollectorFunction = (clock) -> analyticsCollector; return this; } /** - * Sets whether media sources should be initialized lazily. + * Sets an {@link PriorityTaskManager} that will be used by the player. * - *

      If false, all initial preparation steps (e.g., manifest loads) happen immediately. If - * true, these initial preparations are triggered only when the player starts buffering the - * media. + *

      The priority {@link C#PRIORITY_PLAYBACK} will be set while the player is loading. * - * @param useLazyPreparation Whether to use lazy preparation. + * @param priorityTaskManager A {@link PriorityTaskManager}, or null to not use one. * @return This builder. * @throws IllegalStateException If {@link #build()} has already been called. */ - public Builder setUseLazyPreparation(boolean useLazyPreparation) { - Assertions.checkState(!buildCalled); - this.useLazyPreparation = useLazyPreparation; + @CanIgnoreReturnValue + public Builder setPriorityTaskManager(@Nullable PriorityTaskManager priorityTaskManager) { + checkState(!buildCalled); + this.priorityTaskManager = priorityTaskManager; return this; } /** - * Sets the {@link Clock} that will be used by the player. Should only be set for testing - * purposes. + * Sets {@link AudioAttributes} that will be used by the player and whether to handle audio + * focus. * - * @param clock A {@link Clock}. + *

      If audio focus should be handled, the {@link AudioAttributes#usage} must be {@link + * C#USAGE_MEDIA} or {@link C#USAGE_GAME}. Other usages will throw an {@link + * IllegalArgumentException}. + * + * @param audioAttributes {@link AudioAttributes}. + * @param handleAudioFocus Whether the player should handle audio focus. * @return This builder. * @throws IllegalStateException If {@link #build()} has already been called. */ - @VisibleForTesting - public Builder setClock(Clock clock) { - Assertions.checkState(!buildCalled); - this.clock = clock; + @CanIgnoreReturnValue + public Builder setAudioAttributes(AudioAttributes audioAttributes, boolean handleAudioFocus) { + checkState(!buildCalled); + this.audioAttributes = checkNotNull(audioAttributes); + this.handleAudioFocus = handleAudioFocus; return this; } /** - * Builds an {@link ExoPlayer} instance. + * Sets the {@link C.WakeMode} that will be used by the player. + * + *

      Enabling this feature requires the {@link android.Manifest.permission#WAKE_LOCK} + * permission. It should be used together with a foreground {@link android.app.Service} for use + * cases where playback occurs and the screen is off (e.g. background audio playback). It is not + * useful when the screen will be kept on during playback (e.g. foreground video playback). * + *

      When enabled, the locks ({@link android.os.PowerManager.WakeLock} / {@link + * android.net.wifi.WifiManager.WifiLock}) will be held whenever the player is in the {@link + * #STATE_READY} or {@link #STATE_BUFFERING} states with {@code playWhenReady = true}. The locks + * held depend on the specified {@link C.WakeMode}. + * + * @param wakeMode A {@link C.WakeMode}. + * @return This builder. * @throws IllegalStateException If {@link #build()} has already been called. */ - public ExoPlayer build() { - Assertions.checkState(!buildCalled); - buildCalled = true; - return new ExoPlayerImpl( - renderers, trackSelector, loadControl, bandwidthMeter, clock, looper); + @CanIgnoreReturnValue + public Builder setWakeMode(@C.WakeMode int wakeMode) { + checkState(!buildCalled); + this.wakeMode = wakeMode; + return this; } - } - /** Returns the {@link Looper} associated with the playback thread. */ - Looper getPlaybackLooper(); + /** + * Sets whether the player should pause automatically when audio is rerouted from a headset to + * device speakers. See the + * audio becoming noisy documentation for more information. + * + * @param handleAudioBecomingNoisy Whether the player should pause automatically when audio is + * rerouted from a headset to device speakers. + * @return This builder. + * @throws IllegalStateException If {@link #build()} has already been called. + */ + @CanIgnoreReturnValue + public Builder setHandleAudioBecomingNoisy(boolean handleAudioBecomingNoisy) { + checkState(!buildCalled); + this.handleAudioBecomingNoisy = handleAudioBecomingNoisy; + return this; + } - /** - * Retries a failed or stopped playback. Does nothing if the player has been reset, or if playback - * has not failed or been stopped. - */ - void retry(); + /** + * Sets whether silences silences in the audio stream is enabled. + * + * @param skipSilenceEnabled Whether skipping silences is enabled. + * @return This builder. + * @throws IllegalStateException If {@link #build()} has already been called. + */ + @CanIgnoreReturnValue + public Builder setSkipSilenceEnabled(boolean skipSilenceEnabled) { + checkState(!buildCalled); + this.skipSilenceEnabled = skipSilenceEnabled; + return this; + } - /** - * Prepares the player to play the provided {@link MediaSource}. Equivalent to {@code - * prepare(mediaSource, true, true)}. - */ - void prepare(MediaSource mediaSource); + /** + * Sets the {@link C.VideoScalingMode} that will be used by the player. + * + *

      The scaling mode only applies if a {@link MediaCodec}-based video {@link Renderer} is + * enabled and if the output surface is owned by a {@link SurfaceView}. + * + * @param videoScalingMode A {@link C.VideoScalingMode}. + * @return This builder. + * @throws IllegalStateException If {@link #build()} has already been called. + */ + @CanIgnoreReturnValue + public Builder setVideoScalingMode(@C.VideoScalingMode int videoScalingMode) { + checkState(!buildCalled); + this.videoScalingMode = videoScalingMode; + return this; + } - /** - * Prepares the player to play the provided {@link MediaSource}, optionally resetting the playback - * position the default position in the first {@link Timeline.Window}. - * - * @param mediaSource The {@link MediaSource} to play. - * @param resetPosition Whether the playback position should be reset to the default position in - * the first {@link Timeline.Window}. If false, playback will start from the position defined - * by {@link #getCurrentWindowIndex()} and {@link #getCurrentPosition()}. - * @param resetState Whether the timeline, manifest, tracks and track selections should be reset. - * Should be true unless the player is being prepared to play the same media as it was playing - * previously (e.g. if playback failed and is being retried). + /** + * Sets a {@link C.VideoChangeFrameRateStrategy} that will be used by the player when provided + * with a video output {@link Surface}. + * + *

      The strategy only applies if a {@link MediaCodec}-based video {@link Renderer} is enabled. + * Applications wishing to use {@link Surface#CHANGE_FRAME_RATE_ALWAYS} should set the mode to + * {@link C#VIDEO_CHANGE_FRAME_RATE_STRATEGY_OFF} to disable calls to {@link + * Surface#setFrameRate} from ExoPlayer, and should then call {@link Surface#setFrameRate} + * directly from application code. + * + * @param videoChangeFrameRateStrategy A {@link C.VideoChangeFrameRateStrategy}. + * @return This builder. + * @throws IllegalStateException If {@link #build()} has already been called. + */ + @CanIgnoreReturnValue + public Builder setVideoChangeFrameRateStrategy( + @C.VideoChangeFrameRateStrategy int videoChangeFrameRateStrategy) { + checkState(!buildCalled); + this.videoChangeFrameRateStrategy = videoChangeFrameRateStrategy; + return this; + } + + /** + * Sets whether media sources should be initialized lazily. + * + *

      If false, all initial preparation steps (e.g., manifest loads) happen immediately. If + * true, these initial preparations are triggered only when the player starts buffering the + * media. + * + * @param useLazyPreparation Whether to use lazy preparation. + * @return This builder. + * @throws IllegalStateException If {@link #build()} has already been called. + */ + @CanIgnoreReturnValue + public Builder setUseLazyPreparation(boolean useLazyPreparation) { + checkState(!buildCalled); + this.useLazyPreparation = useLazyPreparation; + return this; + } + + /** + * Sets the parameters that control how seek operations are performed. + * + * @param seekParameters The {@link SeekParameters}. + * @return This builder. + * @throws IllegalStateException If {@link #build()} has already been called. + */ + @CanIgnoreReturnValue + public Builder setSeekParameters(SeekParameters seekParameters) { + checkState(!buildCalled); + this.seekParameters = checkNotNull(seekParameters); + return this; + } + + /** + * Sets the {@link #seekBack()} increment. + * + * @param seekBackIncrementMs The seek back increment, in milliseconds. + * @return This builder. + * @throws IllegalArgumentException If {@code seekBackIncrementMs} is non-positive. + * @throws IllegalStateException If {@link #build()} has already been called. + */ + @CanIgnoreReturnValue + public Builder setSeekBackIncrementMs(@IntRange(from = 1) long seekBackIncrementMs) { + checkArgument(seekBackIncrementMs > 0); + checkState(!buildCalled); + this.seekBackIncrementMs = seekBackIncrementMs; + return this; + } + + /** + * Sets the {@link #seekForward()} increment. + * + * @param seekForwardIncrementMs The seek forward increment, in milliseconds. + * @return This builder. + * @throws IllegalArgumentException If {@code seekForwardIncrementMs} is non-positive. + * @throws IllegalStateException If {@link #build()} has already been called. + */ + @CanIgnoreReturnValue + public Builder setSeekForwardIncrementMs(@IntRange(from = 1) long seekForwardIncrementMs) { + checkArgument(seekForwardIncrementMs > 0); + checkState(!buildCalled); + this.seekForwardIncrementMs = seekForwardIncrementMs; + return this; + } + + /** + * Sets a timeout for calls to {@link #release} and {@link #setForegroundMode}. + * + *

      If a call to {@link #release} or {@link #setForegroundMode} takes more than {@code + * timeoutMs} to complete, the player will report an error via {@link + * Player.Listener#onPlayerError}. + * + * @param releaseTimeoutMs The release timeout, in milliseconds. + * @return This builder. + * @throws IllegalStateException If {@link #build()} has already been called. + */ + @CanIgnoreReturnValue + public Builder setReleaseTimeoutMs(long releaseTimeoutMs) { + checkState(!buildCalled); + this.releaseTimeoutMs = releaseTimeoutMs; + return this; + } + + /** + * Sets a timeout for detaching a surface from the player. + * + *

      If detaching a surface or replacing a surface takes more than {@code + * detachSurfaceTimeoutMs} to complete, the player will report an error via {@link + * Player.Listener#onPlayerError}. + * + * @param detachSurfaceTimeoutMs The timeout for detaching a surface, in milliseconds. + * @return This builder. + * @throws IllegalStateException If {@link #build()} has already been called. + */ + @CanIgnoreReturnValue + public Builder setDetachSurfaceTimeoutMs(long detachSurfaceTimeoutMs) { + checkState(!buildCalled); + this.detachSurfaceTimeoutMs = detachSurfaceTimeoutMs; + return this; + } + + /** + * Sets whether to pause playback at the end of each media item. + * + *

      This means the player will pause at the end of each window in the current {@link + * #getCurrentTimeline() timeline}. Listeners will be informed by a call to {@link + * Player.Listener#onPlayWhenReadyChanged(boolean, int)} with the reason {@link + * Player#PLAY_WHEN_READY_CHANGE_REASON_END_OF_MEDIA_ITEM} when this happens. + * + * @param pauseAtEndOfMediaItems Whether to pause playback at the end of each media item. + * @return This builder. + * @throws IllegalStateException If {@link #build()} has already been called. + */ + @CanIgnoreReturnValue + public Builder setPauseAtEndOfMediaItems(boolean pauseAtEndOfMediaItems) { + checkState(!buildCalled); + this.pauseAtEndOfMediaItems = pauseAtEndOfMediaItems; + return this; + } + + /** + * Sets the {@link LivePlaybackSpeedControl} that will control the playback speed when playing + * live streams, in order to maintain a steady target offset from the live stream edge. + * + * @param livePlaybackSpeedControl The {@link LivePlaybackSpeedControl}. + * @return This builder. + * @throws IllegalStateException If {@link #build()} has already been called. + */ + @CanIgnoreReturnValue + public Builder setLivePlaybackSpeedControl(LivePlaybackSpeedControl livePlaybackSpeedControl) { + checkState(!buildCalled); + this.livePlaybackSpeedControl = checkNotNull(livePlaybackSpeedControl); + return this; + } + + /** + * Sets whether the player reports diagnostics data to the Android platform. + * + *

      If enabled, the player will use the {@link android.media.metrics.MediaMetricsManager} to + * create a {@link android.media.metrics.PlaybackSession} and forward playback events and + * performance data to this session. This helps to provide system performance and debugging + * information for media playback on the device. This data may also be collected by Google if sharing usage and diagnostics + * data is enabled by the user of the device. + * + * @param usePlatformDiagnostics Whether the player reports diagnostics data to the Android + * platform. + * @return This builder. + * @throws IllegalStateException If {@link #build()} has already been called. + */ + @CanIgnoreReturnValue + public Builder setUsePlatformDiagnostics(boolean usePlatformDiagnostics) { + checkState(!buildCalled); + this.usePlatformDiagnostics = usePlatformDiagnostics; + return this; + } + + /** + * Sets the {@link Clock} that will be used by the player. Should only be set for testing + * purposes. + * + * @param clock A {@link Clock}. + * @return This builder. + * @throws IllegalStateException If {@link #build()} has already been called. + */ + @CanIgnoreReturnValue + @VisibleForTesting + public Builder setClock(Clock clock) { + checkState(!buildCalled); + this.clock = clock; + return this; + } + + /** + * Sets the {@link Looper} that will be used for playback. + * + *

      The backing thread should run with priority {@link Process#THREAD_PRIORITY_AUDIO} and + * should handle messages within 10ms. + * + * @param playbackLooper A {@link looper}. + * @return This builder. + * @throws IllegalStateException If {@link #build()} has already been called. + */ + @CanIgnoreReturnValue + public Builder setPlaybackLooper(Looper playbackLooper) { + checkState(!buildCalled); + this.playbackLooper = playbackLooper; + return this; + } + + /** + * Builds an {@link ExoPlayer} instance. + * + * @throws IllegalStateException If this method has already been called. + */ + public ExoPlayer build() { + checkState(!buildCalled); + buildCalled = true; + return new ExoPlayerImpl(/* builder= */ this, /* wrappingPlayer= */ null); + } + + public SimpleExoPlayer buildSimpleExoPlayer() { + checkState(!buildCalled); + buildCalled = true; + return new SimpleExoPlayer(/* builder= */ this); + } + } + + /** + * The default timeout for calls to {@link #release} and {@link #setForegroundMode}, in + * milliseconds. + */ + long DEFAULT_RELEASE_TIMEOUT_MS = 500; + + /** The default timeout for detaching a surface from the player, in milliseconds. */ + long DEFAULT_DETACH_SURFACE_TIMEOUT_MS = 2_000; + + /** + * Equivalent to {@link Player#getPlayerError()}, except the exception is guaranteed to be an + * {@link ExoPlaybackException}. + */ + @Override + @Nullable + ExoPlaybackException getPlayerError(); + + /** + * @deprecated Use {@link ExoPlayer}, as the {@link AudioComponent} methods are defined by that + * interface. + */ + @Nullable + @Deprecated + AudioComponent getAudioComponent(); + + /** + * @deprecated Use {@link ExoPlayer}, as the {@link VideoComponent} methods are defined by that + * interface. + */ + @Nullable + @Deprecated + VideoComponent getVideoComponent(); + + /** + * @deprecated Use {@link Player}, as the {@link TextComponent} methods are defined by that + * interface. + */ + @Nullable + @Deprecated + TextComponent getTextComponent(); + + /** + * @deprecated Use {@link Player}, as the {@link DeviceComponent} methods are defined by that + * interface. + */ + @Nullable + @Deprecated + DeviceComponent getDeviceComponent(); + + /** + * Adds a listener to receive audio offload events. + * + *

      This method can be called from any thread. + * + * @param listener The listener to register. + */ + void addAudioOffloadListener(AudioOffloadListener listener); + + /** + * Removes a listener of audio offload events. + * + * @param listener The listener to unregister. + */ + void removeAudioOffloadListener(AudioOffloadListener listener); + + /** Returns the {@link AnalyticsCollector} used for collecting analytics events. */ + AnalyticsCollector getAnalyticsCollector(); + + /** + * Adds an {@link AnalyticsListener} to receive analytics events. + * + *

      This method can be called from any thread. + * + * @param listener The listener to be added. + */ + void addAnalyticsListener(AnalyticsListener listener); + + /** + * Removes an {@link AnalyticsListener}. + * + * @param listener The listener to be removed. + */ + void removeAnalyticsListener(AnalyticsListener listener); + + /** Returns the number of renderers. */ + int getRendererCount(); + + /** + * Returns the track type that the renderer at a given index handles. + * + *

      For example, a video renderer will return {@link C#TRACK_TYPE_VIDEO}, an audio renderer will + * return {@link C#TRACK_TYPE_AUDIO} and a text renderer will return {@link C#TRACK_TYPE_TEXT}. + * + * @param index The index of the renderer. + * @return The {@link C.TrackType track type} that the renderer handles. + */ + @C.TrackType + int getRendererType(int index); + + /** + * Returns the renderer at the given index. + * + * @param index The index of the renderer. + * @return The renderer at this index. + */ + Renderer getRenderer(int index); + + /** + * Returns the track selector that this player uses, or null if track selection is not supported. + */ + @Nullable + TrackSelector getTrackSelector(); + + /** + * Returns the available track groups. + * + * @see Listener#onTracksChanged(Tracks) + * @deprecated Use {@link #getCurrentTracks()}. + */ + @Deprecated + TrackGroupArray getCurrentTrackGroups(); + + /** + * Returns the current track selections for each renderer, which may include {@code null} elements + * if some renderers do not have any selected tracks. + * + * @see Listener#onTracksChanged(Tracks) + * @deprecated Use {@link #getCurrentTracks()}. + */ + @Deprecated + TrackSelectionArray getCurrentTrackSelections(); + + /** + * Returns the {@link Looper} associated with the playback thread. + * + *

      This method may be called from any thread. + */ + Looper getPlaybackLooper(); + + /** + * Returns the {@link Clock} used for playback. + * + *

      This method can be called from any thread. + */ + Clock getClock(); + + /** + * @deprecated Use {@link #prepare()} instead. + */ + @Deprecated + void retry(); + + /** + * @deprecated Use {@link #setMediaSource(MediaSource)} and {@link #prepare()} instead. */ + @Deprecated + void prepare(MediaSource mediaSource); + + /** + * @deprecated Use {@link #setMediaSource(MediaSource, boolean)} and {@link #prepare()} instead. + */ + @Deprecated void prepare(MediaSource mediaSource, boolean resetPosition, boolean resetState); + /** + * Clears the playlist, adds the specified {@link MediaSource MediaSources} and resets the + * position to the default position. + * + * @param mediaSources The new {@link MediaSource MediaSources}. + */ + void setMediaSources(List mediaSources); + + /** + * Clears the playlist and adds the specified {@link MediaSource MediaSources}. + * + * @param mediaSources The new {@link MediaSource MediaSources}. + * @param resetPosition Whether the playback position should be reset to the default position in + * the first {@link Timeline.Window}. If false, playback will start from the position defined + * by {@link #getCurrentMediaItemIndex()} and {@link #getCurrentPosition()}. + */ + void setMediaSources(List mediaSources, boolean resetPosition); + + /** + * Clears the playlist and adds the specified {@link MediaSource MediaSources}. + * + * @param mediaSources The new {@link MediaSource MediaSources}. + * @param startMediaItemIndex The media item index to start playback from. If {@link + * C#INDEX_UNSET} is passed, the current position is not reset. + * @param startPositionMs The position in milliseconds to start playback from. If {@link + * C#TIME_UNSET} is passed, the default position of the given media item is used. In any case, + * if {@code startMediaItemIndex} is set to {@link C#INDEX_UNSET}, this parameter is ignored + * and the position is not reset at all. + */ + void setMediaSources( + List mediaSources, int startMediaItemIndex, long startPositionMs); + + /** + * Clears the playlist, adds the specified {@link MediaSource} and resets the position to the + * default position. + * + * @param mediaSource The new {@link MediaSource}. + */ + void setMediaSource(MediaSource mediaSource); + + /** + * Clears the playlist and adds the specified {@link MediaSource}. + * + * @param mediaSource The new {@link MediaSource}. + * @param startPositionMs The position in milliseconds to start playback from. + */ + void setMediaSource(MediaSource mediaSource, long startPositionMs); + + /** + * Clears the playlist and adds the specified {@link MediaSource}. + * + * @param mediaSource The new {@link MediaSource}. + * @param resetPosition Whether the playback position should be reset to the default position. If + * false, playback will start from the position defined by {@link #getCurrentMediaItemIndex()} + * and {@link #getCurrentPosition()}. + */ + void setMediaSource(MediaSource mediaSource, boolean resetPosition); + + /** + * Adds a media source to the end of the playlist. + * + * @param mediaSource The {@link MediaSource} to add. + */ + void addMediaSource(MediaSource mediaSource); + + /** + * Adds a media source at the given index of the playlist. + * + * @param index The index at which to add the source. + * @param mediaSource The {@link MediaSource} to add. + */ + void addMediaSource(int index, MediaSource mediaSource); + + /** + * Adds a list of media sources to the end of the playlist. + * + * @param mediaSources The {@link MediaSource MediaSources} to add. + */ + void addMediaSources(List mediaSources); + + /** + * Adds a list of media sources at the given index of the playlist. + * + * @param index The index at which to add the media sources. + * @param mediaSources The {@link MediaSource MediaSources} to add. + */ + void addMediaSources(int index, List mediaSources); + + /** + * Sets the shuffle order. + * + * @param shuffleOrder The shuffle order. + */ + void setShuffleOrder(ShuffleOrder shuffleOrder); + + /** + * Sets the attributes for audio playback, used by the underlying audio track. If not set, the + * default audio attributes will be used. They are suitable for general media playback. + * + *

      Setting the audio attributes during playback may introduce a short gap in audio output as + * the audio track is recreated. A new audio session id will also be generated. + * + *

      If tunneling is enabled by the track selector, the specified audio attributes will be + * ignored, but they will take effect if audio is later played without tunneling. + * + *

      If the device is running a build before platform API version 21, audio attributes cannot be + * set directly on the underlying audio track. In this case, the usage will be mapped onto an + * equivalent stream type using {@link Util#getStreamTypeForAudioUsage(int)}. + * + *

      If audio focus should be handled, the {@link AudioAttributes#usage} must be {@link + * C#USAGE_MEDIA} or {@link C#USAGE_GAME}. Other usages will throw an {@link + * IllegalArgumentException}. + * + * @param audioAttributes The attributes to use for audio playback. + * @param handleAudioFocus True if the player should handle audio focus, false otherwise. + */ + void setAudioAttributes(AudioAttributes audioAttributes, boolean handleAudioFocus); + + /** + * Sets the ID of the audio session to attach to the underlying {@link android.media.AudioTrack}. + * + *

      The audio session ID can be generated using {@link Util#generateAudioSessionIdV21(Context)} + * for API 21+. + * + * @param audioSessionId The audio session ID, or {@link C#AUDIO_SESSION_ID_UNSET} if it should be + * generated by the framework. + */ + void setAudioSessionId(int audioSessionId); + + /** Returns the audio session identifier, or {@link C#AUDIO_SESSION_ID_UNSET} if not set. */ + int getAudioSessionId(); + + /** Sets information on an auxiliary audio effect to attach to the underlying audio track. */ + void setAuxEffectInfo(AuxEffectInfo auxEffectInfo); + + /** Detaches any previously attached auxiliary audio effect from the underlying audio track. */ + void clearAuxEffectInfo(); + + /** + * Sets the preferred audio device. + * + * @param audioDeviceInfo The preferred {@linkplain AudioDeviceInfo audio device}, or null to + * restore the default. + */ + @RequiresApi(23) + void setPreferredAudioDevice(@Nullable AudioDeviceInfo audioDeviceInfo); + + /** + * Sets whether skipping silences in the audio stream is enabled. + * + * @param skipSilenceEnabled Whether skipping silences in the audio stream is enabled. + */ + void setSkipSilenceEnabled(boolean skipSilenceEnabled); + + /** Returns whether skipping silences in the audio stream is enabled. */ + boolean getSkipSilenceEnabled(); + + /** + * Sets the {@link C.VideoScalingMode}. + * + *

      The scaling mode only applies if a {@link MediaCodec}-based video {@link Renderer} is + * enabled and if the output surface is owned by a {@link SurfaceView}. + * + * @param videoScalingMode The {@link C.VideoScalingMode}. + */ + void setVideoScalingMode(@C.VideoScalingMode int videoScalingMode); + + /** Returns the {@link C.VideoScalingMode}. */ + @C.VideoScalingMode + int getVideoScalingMode(); + + /** + * Sets a {@link C.VideoChangeFrameRateStrategy} that will be used by the player when provided + * with a video output {@link Surface}. + * + *

      The strategy only applies if a {@link MediaCodec}-based video {@link Renderer} is enabled. + * Applications wishing to use {@link Surface#CHANGE_FRAME_RATE_ALWAYS} should set the mode to + * {@link C#VIDEO_CHANGE_FRAME_RATE_STRATEGY_OFF} to disable calls to {@link Surface#setFrameRate} + * from ExoPlayer, and should then call {@link Surface#setFrameRate} directly from application + * code. + * + * @param videoChangeFrameRateStrategy A {@link C.VideoChangeFrameRateStrategy}. + */ + void setVideoChangeFrameRateStrategy( + @C.VideoChangeFrameRateStrategy int videoChangeFrameRateStrategy); + + /** Returns the {@link C.VideoChangeFrameRateStrategy}. */ + @C.VideoChangeFrameRateStrategy + int getVideoChangeFrameRateStrategy(); + + /** + * Sets a listener to receive video frame metadata events. + * + *

      This method is intended to be called by the same component that sets the {@link Surface} + * onto which video will be rendered. If using ExoPlayer's standard UI components, this method + * should not be called directly from application code. + * + * @param listener The listener. + */ + void setVideoFrameMetadataListener(VideoFrameMetadataListener listener); + + /** + * Clears the listener which receives video frame metadata events if it matches the one passed. + * Else does nothing. + * + * @param listener The listener to clear. + */ + void clearVideoFrameMetadataListener(VideoFrameMetadataListener listener); + + /** + * Sets a listener of camera motion events. + * + * @param listener The listener. + */ + void setCameraMotionListener(CameraMotionListener listener); + + /** + * Clears the listener which receives camera motion events if it matches the one passed. Else does + * nothing. + * + * @param listener The listener to clear. + */ + void clearCameraMotionListener(CameraMotionListener listener); + /** * Creates a message that can be sent to a {@link PlayerMessage.Target}. By default, the message * will be delivered immediately without blocking on the playback thread. The default {@link * PlayerMessage#getType()} is 0 and the default {@link PlayerMessage#getPayload()} is null. If a * position is specified with {@link PlayerMessage#setPosition(long)}, the message will be - * delivered at this position in the current window defined by {@link #getCurrentWindowIndex()}. - * Alternatively, the message can be sent at a specific window using {@link - * PlayerMessage#setPosition(int, long)}. + * delivered at this position in the current media item defined by {@link + * #getCurrentMediaItemIndex()}. Alternatively, the message can be sent at a specific mediaItem + * using {@link PlayerMessage#setPosition(int, long)}. */ PlayerMessage createMessage(PlayerMessage.Target target); @@ -401,4 +1573,138 @@ public ExoPlayer build() { * idle state. */ void setForegroundMode(boolean foregroundMode); + + /** + * Sets whether to pause playback at the end of each media item. + * + *

      This means the player will pause at the end of each window in the current {@link + * #getCurrentTimeline() timeline}. Listeners will be informed by a call to {@link + * Player.Listener#onPlayWhenReadyChanged(boolean, int)} with the reason {@link + * Player#PLAY_WHEN_READY_CHANGE_REASON_END_OF_MEDIA_ITEM} when this happens. + * + * @param pauseAtEndOfMediaItems Whether to pause playback at the end of each media item. + */ + void setPauseAtEndOfMediaItems(boolean pauseAtEndOfMediaItems); + + /** + * Returns whether the player pauses playback at the end of each media item. + * + * @see #setPauseAtEndOfMediaItems(boolean) + */ + boolean getPauseAtEndOfMediaItems(); + + /** Returns the audio format currently being played, or null if no audio is being played. */ + @Nullable + Format getAudioFormat(); + + /** Returns the video format currently being played, or null if no video is being played. */ + @Nullable + Format getVideoFormat(); + + /** Returns {@link DecoderCounters} for audio, or null if no audio is being played. */ + @Nullable + DecoderCounters getAudioDecoderCounters(); + + /** Returns {@link DecoderCounters} for video, or null if no video is being played. */ + @Nullable + DecoderCounters getVideoDecoderCounters(); + + /** + * Sets whether the player should pause automatically when audio is rerouted from a headset to + * device speakers. See the audio + * becoming noisy documentation for more information. + * + * @param handleAudioBecomingNoisy Whether the player should pause automatically when audio is + * rerouted from a headset to device speakers. + */ + void setHandleAudioBecomingNoisy(boolean handleAudioBecomingNoisy); + + /** + * @deprecated Use {@link #setWakeMode(int)} instead. + */ + @Deprecated + void setHandleWakeLock(boolean handleWakeLock); + + /** + * Sets how the player should keep the device awake for playback when the screen is off. + * + *

      Enabling this feature requires the {@link android.Manifest.permission#WAKE_LOCK} permission. + * It should be used together with a foreground {@link android.app.Service} for use cases where + * playback occurs and the screen is off (e.g. background audio playback). It is not useful when + * the screen will be kept on during playback (e.g. foreground video playback). + * + *

      When enabled, the locks ({@link android.os.PowerManager.WakeLock} / {@link + * android.net.wifi.WifiManager.WifiLock}) will be held whenever the player is in the {@link + * #STATE_READY} or {@link #STATE_BUFFERING} states with {@code playWhenReady = true}. The locks + * held depends on the specified {@link C.WakeMode}. + * + * @param wakeMode The {@link C.WakeMode} option to keep the device awake during playback. + */ + void setWakeMode(@C.WakeMode int wakeMode); + + /** + * Sets a {@link PriorityTaskManager}, or null to clear a previously set priority task manager. + * + *

      The priority {@link C#PRIORITY_PLAYBACK} will be set while the player is loading. + * + * @param priorityTaskManager The {@link PriorityTaskManager}, or null to clear a previously set + * priority task manager. + */ + void setPriorityTaskManager(@Nullable PriorityTaskManager priorityTaskManager); + + /** + * Sets whether audio offload scheduling is enabled. If enabled, ExoPlayer's main loop will run as + * rarely as possible when playing an audio stream using audio offload. + * + *

      Only use this scheduling mode if the player is not displaying anything to the user. For + * example when the application is in the background, or the screen is off. The player state + * (including position) is rarely updated (roughly between every 10 seconds and 1 minute). + * + *

      While offload scheduling is enabled, player events may be delivered severely delayed and + * apps should not interact with the player. When returning to the foreground, disable offload + * scheduling and wait for {@link + * AudioOffloadListener#onExperimentalOffloadSchedulingEnabledChanged(boolean)} to be called with + * {@code offloadSchedulingEnabled = false} before interacting with the player. + * + *

      This mode should save significant power when the phone is playing offload audio with the + * screen off. + * + *

      This mode only has an effect when playing an audio track in offload mode, which requires all + * the following: + * + *

        + *
      • Audio offload rendering is enabled in {@link + * DefaultRenderersFactory#setEnableAudioOffload} or the equivalent option passed to {@link + * DefaultAudioSink.Builder#setOffloadMode}. + *
      • An audio track is playing in a format that the device supports offloading (for example, + * MP3 or AAC). + *
      • The {@link AudioSink} is playing with an offload {@link AudioTrack}. + *
      + * + *

      The state where ExoPlayer main loop has been paused to save power during offload playback + * can be queried with {@link #experimentalIsSleepingForOffload()}. + * + *

      This method is experimental, and will be renamed or removed in a future release. + * + * @param offloadSchedulingEnabled Whether to enable offload scheduling. + */ + void experimentalSetOffloadSchedulingEnabled(boolean offloadSchedulingEnabled); + + /** + * Returns whether the player has paused its main loop to save power in offload scheduling mode. + * + * @see #experimentalSetOffloadSchedulingEnabled(boolean) + * @see AudioOffloadListener#onExperimentalSleepingForOffloadChanged(boolean) + */ + boolean experimentalIsSleepingForOffload(); + + /** + * Returns whether tunneling is enabled for + * the currently selected tracks. + * + * @see Player.Listener#onTracksChanged(Tracks) + */ + boolean isTunnelingEnabled(); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ExoPlayerFactory.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ExoPlayerFactory.java deleted file mode 100644 index e4f239df77..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ExoPlayerFactory.java +++ /dev/null @@ -1,350 +0,0 @@ -/* - * Copyright (C) 2016 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2; - -import android.content.Context; -import android.os.Looper; -import androidx.annotation.Nullable; -import com.google.android.exoplayer2.analytics.AnalyticsCollector; -import com.google.android.exoplayer2.drm.DrmSessionManager; -import com.google.android.exoplayer2.drm.FrameworkMediaCrypto; -import com.google.android.exoplayer2.source.MediaSource; -import com.google.android.exoplayer2.trackselection.DefaultTrackSelector; -import com.google.android.exoplayer2.trackselection.TrackSelector; -import com.google.android.exoplayer2.upstream.BandwidthMeter; -import com.google.android.exoplayer2.upstream.DefaultBandwidthMeter; -import com.google.android.exoplayer2.util.Clock; -import com.google.android.exoplayer2.util.Util; - -/** @deprecated Use {@link SimpleExoPlayer.Builder} or {@link ExoPlayer.Builder} instead. */ -@Deprecated -public final class ExoPlayerFactory { - - private ExoPlayerFactory() {} - - /** - * @deprecated Use {@link SimpleExoPlayer.Builder} instead. The {@link DrmSessionManager} cannot - * be passed to {@link SimpleExoPlayer.Builder} and should instead be injected into the {@link - * MediaSource} factories. - */ - @Deprecated - @SuppressWarnings("deprecation") - public static SimpleExoPlayer newSimpleInstance( - Context context, - TrackSelector trackSelector, - LoadControl loadControl, - @Nullable DrmSessionManager drmSessionManager, - @DefaultRenderersFactory.ExtensionRendererMode int extensionRendererMode) { - RenderersFactory renderersFactory = - new DefaultRenderersFactory(context).setExtensionRendererMode(extensionRendererMode); - return newSimpleInstance( - context, renderersFactory, trackSelector, loadControl, drmSessionManager); - } - - /** - * @deprecated Use {@link SimpleExoPlayer.Builder} instead. The {@link DrmSessionManager} cannot - * be passed to {@link SimpleExoPlayer.Builder} and should instead be injected into the {@link - * MediaSource} factories. - */ - @Deprecated - @SuppressWarnings("deprecation") - public static SimpleExoPlayer newSimpleInstance( - Context context, - TrackSelector trackSelector, - LoadControl loadControl, - @Nullable DrmSessionManager drmSessionManager, - @DefaultRenderersFactory.ExtensionRendererMode int extensionRendererMode, - long allowedVideoJoiningTimeMs) { - RenderersFactory renderersFactory = - new DefaultRenderersFactory(context) - .setExtensionRendererMode(extensionRendererMode) - .setAllowedVideoJoiningTimeMs(allowedVideoJoiningTimeMs); - return newSimpleInstance( - context, renderersFactory, trackSelector, loadControl, drmSessionManager); - } - - /** @deprecated Use {@link SimpleExoPlayer.Builder} instead. */ - @Deprecated - @SuppressWarnings("deprecation") - public static SimpleExoPlayer newSimpleInstance(Context context) { - return newSimpleInstance(context, new DefaultTrackSelector(context)); - } - - /** @deprecated Use {@link SimpleExoPlayer.Builder} instead. */ - @Deprecated - @SuppressWarnings("deprecation") - public static SimpleExoPlayer newSimpleInstance(Context context, TrackSelector trackSelector) { - return newSimpleInstance(context, new DefaultRenderersFactory(context), trackSelector); - } - - /** @deprecated Use {@link SimpleExoPlayer.Builder} instead. */ - @Deprecated - @SuppressWarnings("deprecation") - public static SimpleExoPlayer newSimpleInstance( - Context context, RenderersFactory renderersFactory, TrackSelector trackSelector) { - return newSimpleInstance(context, renderersFactory, trackSelector, new DefaultLoadControl()); - } - - /** @deprecated Use {@link SimpleExoPlayer.Builder} instead. */ - @Deprecated - @SuppressWarnings("deprecation") - public static SimpleExoPlayer newSimpleInstance( - Context context, TrackSelector trackSelector, LoadControl loadControl) { - RenderersFactory renderersFactory = new DefaultRenderersFactory(context); - return newSimpleInstance(context, renderersFactory, trackSelector, loadControl); - } - - /** - * @deprecated Use {@link SimpleExoPlayer.Builder} instead. The {@link DrmSessionManager} cannot - * be passed to {@link SimpleExoPlayer.Builder} and should instead be injected into the {@link - * MediaSource} factories. - */ - @Deprecated - @SuppressWarnings("deprecation") - public static SimpleExoPlayer newSimpleInstance( - Context context, - TrackSelector trackSelector, - LoadControl loadControl, - @Nullable DrmSessionManager drmSessionManager) { - RenderersFactory renderersFactory = new DefaultRenderersFactory(context); - return newSimpleInstance( - context, renderersFactory, trackSelector, loadControl, drmSessionManager); - } - - /** - * @deprecated Use {@link SimpleExoPlayer.Builder} instead. The {@link DrmSessionManager} cannot - * be passed to {@link SimpleExoPlayer.Builder} and should instead be injected into the {@link - * MediaSource} factories. - */ - @Deprecated - @SuppressWarnings("deprecation") - public static SimpleExoPlayer newSimpleInstance( - Context context, - RenderersFactory renderersFactory, - TrackSelector trackSelector, - @Nullable DrmSessionManager drmSessionManager) { - return newSimpleInstance( - context, renderersFactory, trackSelector, new DefaultLoadControl(), drmSessionManager); - } - - /** @deprecated Use {@link SimpleExoPlayer.Builder} instead. */ - @Deprecated - @SuppressWarnings("deprecation") - public static SimpleExoPlayer newSimpleInstance( - Context context, - RenderersFactory renderersFactory, - TrackSelector trackSelector, - LoadControl loadControl) { - return newSimpleInstance( - context, - renderersFactory, - trackSelector, - loadControl, - /* drmSessionManager= */ null, - Util.getLooper()); - } - - /** - * @deprecated Use {@link SimpleExoPlayer.Builder} instead. The {@link DrmSessionManager} cannot - * be passed to {@link SimpleExoPlayer.Builder} and should instead be injected into the {@link - * MediaSource} factories. - */ - @Deprecated - @SuppressWarnings("deprecation") - public static SimpleExoPlayer newSimpleInstance( - Context context, - RenderersFactory renderersFactory, - TrackSelector trackSelector, - LoadControl loadControl, - @Nullable DrmSessionManager drmSessionManager) { - return newSimpleInstance( - context, renderersFactory, trackSelector, loadControl, drmSessionManager, Util.getLooper()); - } - - /** - * @deprecated Use {@link SimpleExoPlayer.Builder} instead. The {@link DrmSessionManager} cannot - * be passed to {@link SimpleExoPlayer.Builder} and should instead be injected into the {@link - * MediaSource} factories. - */ - @Deprecated - @SuppressWarnings("deprecation") - public static SimpleExoPlayer newSimpleInstance( - Context context, - RenderersFactory renderersFactory, - TrackSelector trackSelector, - LoadControl loadControl, - @Nullable DrmSessionManager drmSessionManager, - BandwidthMeter bandwidthMeter) { - return newSimpleInstance( - context, - renderersFactory, - trackSelector, - loadControl, - drmSessionManager, - bandwidthMeter, - new AnalyticsCollector(Clock.DEFAULT), - Util.getLooper()); - } - - /** - * @deprecated Use {@link SimpleExoPlayer.Builder} instead. The {@link DrmSessionManager} cannot - * be passed to {@link SimpleExoPlayer.Builder} and should instead be injected into the {@link - * MediaSource} factories. - */ - @Deprecated - @SuppressWarnings("deprecation") - public static SimpleExoPlayer newSimpleInstance( - Context context, - RenderersFactory renderersFactory, - TrackSelector trackSelector, - LoadControl loadControl, - @Nullable DrmSessionManager drmSessionManager, - AnalyticsCollector analyticsCollector) { - return newSimpleInstance( - context, - renderersFactory, - trackSelector, - loadControl, - drmSessionManager, - analyticsCollector, - Util.getLooper()); - } - - /** - * @deprecated Use {@link SimpleExoPlayer.Builder} instead. The {@link DrmSessionManager} cannot - * be passed to {@link SimpleExoPlayer.Builder} and should instead be injected into the {@link - * MediaSource} factories. - */ - @Deprecated - @SuppressWarnings("deprecation") - public static SimpleExoPlayer newSimpleInstance( - Context context, - RenderersFactory renderersFactory, - TrackSelector trackSelector, - LoadControl loadControl, - @Nullable DrmSessionManager drmSessionManager, - Looper looper) { - return newSimpleInstance( - context, - renderersFactory, - trackSelector, - loadControl, - drmSessionManager, - new AnalyticsCollector(Clock.DEFAULT), - looper); - } - - /** - * @deprecated Use {@link SimpleExoPlayer.Builder} instead. The {@link DrmSessionManager} cannot - * be passed to {@link SimpleExoPlayer.Builder} and should instead be injected into the {@link - * MediaSource} factories. - */ - @Deprecated - @SuppressWarnings("deprecation") - public static SimpleExoPlayer newSimpleInstance( - Context context, - RenderersFactory renderersFactory, - TrackSelector trackSelector, - LoadControl loadControl, - @Nullable DrmSessionManager drmSessionManager, - AnalyticsCollector analyticsCollector, - Looper looper) { - return newSimpleInstance( - context, - renderersFactory, - trackSelector, - loadControl, - drmSessionManager, - DefaultBandwidthMeter.getSingletonInstance(context), - analyticsCollector, - looper); - } - - /** - * @deprecated Use {@link SimpleExoPlayer.Builder} instead. The {@link DrmSessionManager} cannot - * be passed to {@link SimpleExoPlayer.Builder} and should instead be injected into the {@link - * MediaSource} factories. - */ - @SuppressWarnings("deprecation") - @Deprecated - public static SimpleExoPlayer newSimpleInstance( - Context context, - RenderersFactory renderersFactory, - TrackSelector trackSelector, - LoadControl loadControl, - @Nullable DrmSessionManager drmSessionManager, - BandwidthMeter bandwidthMeter, - AnalyticsCollector analyticsCollector, - Looper looper) { - return new SimpleExoPlayer( - context, - renderersFactory, - trackSelector, - loadControl, - drmSessionManager, - bandwidthMeter, - analyticsCollector, - Clock.DEFAULT, - looper); - } - - /** @deprecated Use {@link ExoPlayer.Builder} instead. */ - @Deprecated - @SuppressWarnings("deprecation") - public static ExoPlayer newInstance( - Context context, Renderer[] renderers, TrackSelector trackSelector) { - return newInstance(context, renderers, trackSelector, new DefaultLoadControl()); - } - - /** @deprecated Use {@link ExoPlayer.Builder} instead. */ - @Deprecated - @SuppressWarnings("deprecation") - public static ExoPlayer newInstance( - Context context, Renderer[] renderers, TrackSelector trackSelector, LoadControl loadControl) { - return newInstance(context, renderers, trackSelector, loadControl, Util.getLooper()); - } - - /** @deprecated Use {@link ExoPlayer.Builder} instead. */ - @Deprecated - @SuppressWarnings("deprecation") - public static ExoPlayer newInstance( - Context context, - Renderer[] renderers, - TrackSelector trackSelector, - LoadControl loadControl, - Looper looper) { - return newInstance( - context, - renderers, - trackSelector, - loadControl, - DefaultBandwidthMeter.getSingletonInstance(context), - looper); - } - - /** @deprecated Use {@link ExoPlayer.Builder} instead. */ - @Deprecated - public static ExoPlayer newInstance( - Context context, - Renderer[] renderers, - TrackSelector trackSelector, - LoadControl loadControl, - BandwidthMeter bandwidthMeter, - Looper looper) { - return new ExoPlayerImpl( - renderers, trackSelector, loadControl, bandwidthMeter, Clock.DEFAULT, looper); - } -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ExoPlayerImpl.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ExoPlayerImpl.java index ca46e3138c..a704030075 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ExoPlayerImpl.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ExoPlayerImpl.java @@ -15,32 +15,108 @@ */ package com.google.android.exoplayer2; +import static com.google.android.exoplayer2.C.TRACK_TYPE_AUDIO; +import static com.google.android.exoplayer2.C.TRACK_TYPE_CAMERA_MOTION; +import static com.google.android.exoplayer2.C.TRACK_TYPE_VIDEO; +import static com.google.android.exoplayer2.Renderer.MSG_SET_AUDIO_ATTRIBUTES; +import static com.google.android.exoplayer2.Renderer.MSG_SET_AUDIO_SESSION_ID; +import static com.google.android.exoplayer2.Renderer.MSG_SET_AUX_EFFECT_INFO; +import static com.google.android.exoplayer2.Renderer.MSG_SET_CAMERA_MOTION_LISTENER; +import static com.google.android.exoplayer2.Renderer.MSG_SET_CHANGE_FRAME_RATE_STRATEGY; +import static com.google.android.exoplayer2.Renderer.MSG_SET_PREFERRED_AUDIO_DEVICE; +import static com.google.android.exoplayer2.Renderer.MSG_SET_SCALING_MODE; +import static com.google.android.exoplayer2.Renderer.MSG_SET_SKIP_SILENCE_ENABLED; +import static com.google.android.exoplayer2.Renderer.MSG_SET_VIDEO_FRAME_METADATA_LISTENER; +import static com.google.android.exoplayer2.Renderer.MSG_SET_VIDEO_OUTPUT; +import static com.google.android.exoplayer2.Renderer.MSG_SET_VOLUME; +import static com.google.android.exoplayer2.util.Assertions.checkArgument; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Assertions.checkState; +import static com.google.android.exoplayer2.util.Util.castNonNull; +import static java.lang.Math.max; +import static java.lang.Math.min; + import android.annotation.SuppressLint; +import android.content.Context; +import android.graphics.Rect; +import android.graphics.SurfaceTexture; +import android.media.AudioDeviceInfo; +import android.media.AudioFormat; +import android.media.AudioTrack; +import android.media.MediaFormat; +import android.media.metrics.LogSessionId; import android.os.Handler; import android.os.Looper; -import android.os.Message; import android.util.Pair; +import android.view.Surface; +import android.view.SurfaceHolder; +import android.view.SurfaceView; +import android.view.TextureView; +import androidx.annotation.DoNotInline; import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; import com.google.android.exoplayer2.PlayerMessage.Target; +import com.google.android.exoplayer2.Renderer.MessageType; +import com.google.android.exoplayer2.analytics.AnalyticsCollector; +import com.google.android.exoplayer2.analytics.AnalyticsListener; +import com.google.android.exoplayer2.analytics.DefaultAnalyticsCollector; +import com.google.android.exoplayer2.analytics.MediaMetricsListener; +import com.google.android.exoplayer2.analytics.PlayerId; +import com.google.android.exoplayer2.audio.AudioAttributes; +import com.google.android.exoplayer2.audio.AudioRendererEventListener; +import com.google.android.exoplayer2.audio.AuxEffectInfo; +import com.google.android.exoplayer2.decoder.DecoderCounters; +import com.google.android.exoplayer2.decoder.DecoderReuseEvaluation; +import com.google.android.exoplayer2.metadata.Metadata; +import com.google.android.exoplayer2.metadata.MetadataOutput; import com.google.android.exoplayer2.source.MediaSource; import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId; +import com.google.android.exoplayer2.source.ShuffleOrder; +import com.google.android.exoplayer2.source.TrackGroup; import com.google.android.exoplayer2.source.TrackGroupArray; -import com.google.android.exoplayer2.trackselection.TrackSelection; +import com.google.android.exoplayer2.text.Cue; +import com.google.android.exoplayer2.text.CueGroup; +import com.google.android.exoplayer2.text.TextOutput; +import com.google.android.exoplayer2.trackselection.ExoTrackSelection; import com.google.android.exoplayer2.trackselection.TrackSelectionArray; +import com.google.android.exoplayer2.trackselection.TrackSelectionParameters; import com.google.android.exoplayer2.trackselection.TrackSelector; import com.google.android.exoplayer2.trackselection.TrackSelectorResult; import com.google.android.exoplayer2.upstream.BandwidthMeter; -import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Clock; +import com.google.android.exoplayer2.util.ConditionVariable; +import com.google.android.exoplayer2.util.HandlerWrapper; +import com.google.android.exoplayer2.util.ListenerSet; import com.google.android.exoplayer2.util.Log; +import com.google.android.exoplayer2.util.PriorityTaskManager; +import com.google.android.exoplayer2.util.Size; import com.google.android.exoplayer2.util.Util; -import java.util.ArrayDeque; -import java.util.concurrent.CopyOnWriteArrayList; - -/** - * An {@link ExoPlayer} implementation. Instances can be obtained from {@link ExoPlayer.Builder}. - */ -/* package */ final class ExoPlayerImpl extends BasePlayer implements ExoPlayer { +import com.google.android.exoplayer2.video.VideoDecoderOutputBufferRenderer; +import com.google.android.exoplayer2.video.VideoFrameMetadataListener; +import com.google.android.exoplayer2.video.VideoListener; +import com.google.android.exoplayer2.video.VideoRendererEventListener; +import com.google.android.exoplayer2.video.VideoSize; +import com.google.android.exoplayer2.video.spherical.CameraMotionListener; +import com.google.android.exoplayer2.video.spherical.SphericalGLSurfaceView; +import com.google.common.collect.ImmutableList; +import com.google.errorprone.annotations.CanIgnoreReturnValue; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.CopyOnWriteArraySet; +import java.util.concurrent.TimeoutException; + +/** The default implementation of {@link ExoPlayer}. */ +/* package */ final class ExoPlayerImpl extends BasePlayer + implements ExoPlayer, + ExoPlayer.AudioComponent, + ExoPlayer.VideoComponent, + ExoPlayer.TextComponent, + ExoPlayer.DeviceComponent { + + static { + ExoPlayerLibraryInfo.registerModule("goog.exo.exoplayer"); + } private static final String TAG = "ExoPlayerImpl"; @@ -52,28 +128,83 @@ * operation. */ /* package */ final TrackSelectorResult emptyTrackSelectorResult; + /* package */ final Commands permanentAvailableCommands; + private final ConditionVariable constructorFinished; + private final Context applicationContext; + private final Player wrappingPlayer; private final Renderer[] renderers; private final TrackSelector trackSelector; - private final Handler eventHandler; + private final HandlerWrapper playbackInfoUpdateHandler; + private final ExoPlayerImplInternal.PlaybackInfoUpdateListener playbackInfoUpdateListener; private final ExoPlayerImplInternal internalPlayer; - private final Handler internalPlayerHandler; - private final CopyOnWriteArrayList listeners; - private final Timeline.Period period; - private final ArrayDeque pendingListenerNotifications; - private MediaSource mediaSource; - private boolean playWhenReady; - @PlaybackSuppressionReason private int playbackSuppressionReason; - @RepeatMode private int repeatMode; + private final ListenerSet listeners; + private final CopyOnWriteArraySet audioOffloadListeners; + private final Timeline.Period period; + private final List mediaSourceHolderSnapshots; + private final boolean useLazyPreparation; + private final MediaSource.Factory mediaSourceFactory; + private final AnalyticsCollector analyticsCollector; + private final Looper applicationLooper; + private final BandwidthMeter bandwidthMeter; + private final long seekBackIncrementMs; + private final long seekForwardIncrementMs; + private final Clock clock; + private final ComponentListener componentListener; + private final FrameMetadataListener frameMetadataListener; + private final AudioBecomingNoisyManager audioBecomingNoisyManager; + private final AudioFocusManager audioFocusManager; + private final StreamVolumeManager streamVolumeManager; + private final WakeLockManager wakeLockManager; + private final WifiLockManager wifiLockManager; + private final long detachSurfaceTimeoutMs; + + private @RepeatMode int repeatMode; private boolean shuffleModeEnabled; private int pendingOperationAcks; - private boolean hasPendingPrepare; - private boolean hasPendingSeek; + private @DiscontinuityReason int pendingDiscontinuityReason; + private boolean pendingDiscontinuity; + private @PlayWhenReadyChangeReason int pendingPlayWhenReadyChangeReason; private boolean foregroundMode; - private int pendingSetPlaybackParametersAcks; - private PlaybackParameters playbackParameters; private SeekParameters seekParameters; + private ShuffleOrder shuffleOrder; + private boolean pauseAtEndOfMediaItems; + private Commands availableCommands; + private MediaMetadata mediaMetadata; + private MediaMetadata playlistMetadata; + @Nullable private Format videoFormat; + @Nullable private Format audioFormat; + @Nullable private AudioTrack keepSessionIdAudioTrack; + @Nullable private Object videoOutput; + @Nullable private Surface ownedSurface; + @Nullable private SurfaceHolder surfaceHolder; + @Nullable private SphericalGLSurfaceView sphericalGLSurfaceView; + private boolean surfaceHolderSurfaceIsVideoOutput; + @Nullable private TextureView textureView; + private @C.VideoScalingMode int videoScalingMode; + private @C.VideoChangeFrameRateStrategy int videoChangeFrameRateStrategy; + private Size surfaceSize; + @Nullable private DecoderCounters videoDecoderCounters; + @Nullable private DecoderCounters audioDecoderCounters; + private int audioSessionId; + private AudioAttributes audioAttributes; + private float volume; + private boolean skipSilenceEnabled; + private CueGroup currentCueGroup; + @Nullable private VideoFrameMetadataListener videoFrameMetadataListener; + @Nullable private CameraMotionListener cameraMotionListener; + private boolean throwsWhenUsingWrongThread; + private boolean hasNotifiedFullWrongThreadWarning; + @Nullable private PriorityTaskManager priorityTaskManager; + private boolean isPriorityTaskManagerRegistered; + private boolean playerReleased; + private DeviceInfo deviceInfo; + private VideoSize videoSize; + + // MediaMetadata built from static (TrackGroup Format) and dynamic (onMetadata(Metadata)) metadata + // sources. + private MediaMetadata staticAndDynamicMediaMetadata; // Playback information when there is no pending seek/set source operation. private PlaybackInfo playbackInfo; @@ -83,307 +214,711 @@ private int maskingPeriodIndex; private long maskingWindowPositionMs; - /** - * Constructs an instance. Must be called from a thread that has an associated {@link Looper}. - * - * @param renderers The {@link Renderer}s that will be used by the instance. - * @param trackSelector The {@link TrackSelector} that will be used by the instance. - * @param loadControl The {@link LoadControl} that will be used by the instance. - * @param bandwidthMeter The {@link BandwidthMeter} that will be used by the instance. - * @param clock The {@link Clock} that will be used by the instance. - * @param looper The {@link Looper} which must be used for all calls to the player and which is - * used to call listeners on. - */ @SuppressLint("HandlerLeak") - public ExoPlayerImpl( - Renderer[] renderers, - TrackSelector trackSelector, - LoadControl loadControl, - BandwidthMeter bandwidthMeter, - Clock clock, - Looper looper) { - Log.i(TAG, "Init " + Integer.toHexString(System.identityHashCode(this)) + " [" - + ExoPlayerLibraryInfo.VERSION_SLASHY + "] [" + Util.DEVICE_DEBUG_INFO + "]"); - Assertions.checkState(renderers.length > 0); - this.renderers = Assertions.checkNotNull(renderers); - this.trackSelector = Assertions.checkNotNull(trackSelector); - this.playWhenReady = false; - this.repeatMode = Player.REPEAT_MODE_OFF; - this.shuffleModeEnabled = false; - this.listeners = new CopyOnWriteArrayList<>(); - emptyTrackSelectorResult = - new TrackSelectorResult( - new RendererConfiguration[renderers.length], - new TrackSelection[renderers.length], - null); - period = new Timeline.Period(); - playbackParameters = PlaybackParameters.DEFAULT; - seekParameters = SeekParameters.DEFAULT; - playbackSuppressionReason = PLAYBACK_SUPPRESSION_REASON_NONE; - eventHandler = - new Handler(looper) { - @Override - public void handleMessage(Message msg) { - ExoPlayerImpl.this.handleEvent(msg); - } - }; - playbackInfo = PlaybackInfo.createDummy(/* startPositionUs= */ 0, emptyTrackSelectorResult); - pendingListenerNotifications = new ArrayDeque<>(); - internalPlayer = - new ExoPlayerImplInternal( - renderers, - trackSelector, - emptyTrackSelectorResult, - loadControl, - bandwidthMeter, - playWhenReady, - repeatMode, - shuffleModeEnabled, - eventHandler, - clock); - internalPlayerHandler = new Handler(internalPlayer.getPlaybackLooper()); + public ExoPlayerImpl(ExoPlayer.Builder builder, @Nullable Player wrappingPlayer) { + constructorFinished = new ConditionVariable(); + try { + Log.i( + TAG, + "Init " + + Integer.toHexString(System.identityHashCode(this)) + + " [" + + ExoPlayerLibraryInfo.VERSION_SLASHY + + "] [" + + Util.DEVICE_DEBUG_INFO + + "]"); + applicationContext = builder.context.getApplicationContext(); + analyticsCollector = builder.analyticsCollectorFunction.apply(builder.clock); + priorityTaskManager = builder.priorityTaskManager; + audioAttributes = builder.audioAttributes; + videoScalingMode = builder.videoScalingMode; + videoChangeFrameRateStrategy = builder.videoChangeFrameRateStrategy; + skipSilenceEnabled = builder.skipSilenceEnabled; + detachSurfaceTimeoutMs = builder.detachSurfaceTimeoutMs; + componentListener = new ComponentListener(); + frameMetadataListener = new FrameMetadataListener(); + Handler eventHandler = new Handler(builder.looper); + renderers = + builder + .renderersFactorySupplier + .get() + .createRenderers( + eventHandler, + componentListener, + componentListener, + componentListener, + componentListener); + checkState(renderers.length > 0); + this.trackSelector = builder.trackSelectorSupplier.get(); + this.mediaSourceFactory = builder.mediaSourceFactorySupplier.get(); + this.bandwidthMeter = builder.bandwidthMeterSupplier.get(); + this.useLazyPreparation = builder.useLazyPreparation; + this.seekParameters = builder.seekParameters; + this.seekBackIncrementMs = builder.seekBackIncrementMs; + this.seekForwardIncrementMs = builder.seekForwardIncrementMs; + this.pauseAtEndOfMediaItems = builder.pauseAtEndOfMediaItems; + this.applicationLooper = builder.looper; + this.clock = builder.clock; + this.wrappingPlayer = wrappingPlayer == null ? this : wrappingPlayer; + listeners = + new ListenerSet<>( + applicationLooper, + clock, + (listener, flags) -> listener.onEvents(this.wrappingPlayer, new Events(flags))); + audioOffloadListeners = new CopyOnWriteArraySet<>(); + mediaSourceHolderSnapshots = new ArrayList<>(); + shuffleOrder = new ShuffleOrder.DefaultShuffleOrder(/* length= */ 0); + emptyTrackSelectorResult = + new TrackSelectorResult( + new RendererConfiguration[renderers.length], + new ExoTrackSelection[renderers.length], + Tracks.EMPTY, + /* info= */ null); + period = new Timeline.Period(); + permanentAvailableCommands = + new Commands.Builder() + .addAll( + COMMAND_PLAY_PAUSE, + COMMAND_PREPARE, + COMMAND_STOP, + COMMAND_SET_SPEED_AND_PITCH, + COMMAND_SET_SHUFFLE_MODE, + COMMAND_SET_REPEAT_MODE, + COMMAND_GET_CURRENT_MEDIA_ITEM, + COMMAND_GET_TIMELINE, + COMMAND_GET_MEDIA_ITEMS_METADATA, + COMMAND_SET_MEDIA_ITEMS_METADATA, + COMMAND_SET_MEDIA_ITEM, + COMMAND_CHANGE_MEDIA_ITEMS, + COMMAND_GET_TRACKS, + COMMAND_GET_AUDIO_ATTRIBUTES, + COMMAND_GET_VOLUME, + COMMAND_GET_DEVICE_VOLUME, + COMMAND_SET_VOLUME, + COMMAND_SET_DEVICE_VOLUME, + COMMAND_ADJUST_DEVICE_VOLUME, + COMMAND_SET_VIDEO_SURFACE, + COMMAND_GET_TEXT) + .addIf( + COMMAND_SET_TRACK_SELECTION_PARAMETERS, trackSelector.isSetParametersSupported()) + .build(); + availableCommands = + new Commands.Builder() + .addAll(permanentAvailableCommands) + .add(COMMAND_SEEK_TO_DEFAULT_POSITION) + .add(COMMAND_SEEK_TO_MEDIA_ITEM) + .build(); + playbackInfoUpdateHandler = clock.createHandler(applicationLooper, /* callback= */ null); + playbackInfoUpdateListener = + playbackInfoUpdate -> + playbackInfoUpdateHandler.post(() -> handlePlaybackInfo(playbackInfoUpdate)); + playbackInfo = PlaybackInfo.createDummy(emptyTrackSelectorResult); + analyticsCollector.setPlayer(this.wrappingPlayer, applicationLooper); + PlayerId playerId = + Util.SDK_INT < 31 + ? new PlayerId() + : Api31.registerMediaMetricsListener( + applicationContext, /* player= */ this, builder.usePlatformDiagnostics); + internalPlayer = + new ExoPlayerImplInternal( + renderers, + trackSelector, + emptyTrackSelectorResult, + builder.loadControlSupplier.get(), + bandwidthMeter, + repeatMode, + shuffleModeEnabled, + analyticsCollector, + seekParameters, + builder.livePlaybackSpeedControl, + builder.releaseTimeoutMs, + pauseAtEndOfMediaItems, + applicationLooper, + clock, + playbackInfoUpdateListener, + playerId, + builder.playbackLooper); + + volume = 1; + repeatMode = Player.REPEAT_MODE_OFF; + mediaMetadata = MediaMetadata.EMPTY; + playlistMetadata = MediaMetadata.EMPTY; + staticAndDynamicMediaMetadata = MediaMetadata.EMPTY; + maskingWindowIndex = C.INDEX_UNSET; + if (Util.SDK_INT < 21) { + audioSessionId = initializeKeepSessionIdAudioTrack(C.AUDIO_SESSION_ID_UNSET); + } else { + audioSessionId = Util.generateAudioSessionIdV21(applicationContext); + } + currentCueGroup = CueGroup.EMPTY_TIME_ZERO; + throwsWhenUsingWrongThread = true; + + addListener(analyticsCollector); + bandwidthMeter.addEventListener(new Handler(applicationLooper), analyticsCollector); + addAudioOffloadListener(componentListener); + if (builder.foregroundModeTimeoutMs > 0) { + internalPlayer.experimentalSetForegroundModeTimeoutMs(builder.foregroundModeTimeoutMs); + } + + audioBecomingNoisyManager = + new AudioBecomingNoisyManager(builder.context, eventHandler, componentListener); + audioBecomingNoisyManager.setEnabled(builder.handleAudioBecomingNoisy); + audioFocusManager = new AudioFocusManager(builder.context, eventHandler, componentListener); + audioFocusManager.setAudioAttributes(builder.handleAudioFocus ? audioAttributes : null); + streamVolumeManager = + new StreamVolumeManager(builder.context, eventHandler, componentListener); + streamVolumeManager.setStreamType(Util.getStreamTypeForAudioUsage(audioAttributes.usage)); + wakeLockManager = new WakeLockManager(builder.context); + wakeLockManager.setEnabled(builder.wakeMode != C.WAKE_MODE_NONE); + wifiLockManager = new WifiLockManager(builder.context); + wifiLockManager.setEnabled(builder.wakeMode == C.WAKE_MODE_NETWORK); + deviceInfo = createDeviceInfo(streamVolumeManager); + videoSize = VideoSize.UNKNOWN; + surfaceSize = Size.UNKNOWN; + + trackSelector.setAudioAttributes(audioAttributes); + sendRendererMessage(TRACK_TYPE_AUDIO, MSG_SET_AUDIO_SESSION_ID, audioSessionId); + sendRendererMessage(TRACK_TYPE_VIDEO, MSG_SET_AUDIO_SESSION_ID, audioSessionId); + sendRendererMessage(TRACK_TYPE_AUDIO, MSG_SET_AUDIO_ATTRIBUTES, audioAttributes); + sendRendererMessage(TRACK_TYPE_VIDEO, MSG_SET_SCALING_MODE, videoScalingMode); + sendRendererMessage( + TRACK_TYPE_VIDEO, MSG_SET_CHANGE_FRAME_RATE_STRATEGY, videoChangeFrameRateStrategy); + sendRendererMessage(TRACK_TYPE_AUDIO, MSG_SET_SKIP_SILENCE_ENABLED, skipSilenceEnabled); + sendRendererMessage( + TRACK_TYPE_VIDEO, MSG_SET_VIDEO_FRAME_METADATA_LISTENER, frameMetadataListener); + sendRendererMessage( + TRACK_TYPE_CAMERA_MOTION, MSG_SET_CAMERA_MOTION_LISTENER, frameMetadataListener); + } finally { + constructorFinished.open(); + } } + @CanIgnoreReturnValue + @SuppressWarnings("deprecation") // Returning deprecated class. @Override - @Nullable + @Deprecated public AudioComponent getAudioComponent() { - return null; + verifyApplicationThread(); + return this; } + @CanIgnoreReturnValue + @SuppressWarnings("deprecation") // Returning deprecated class. @Override - @Nullable + @Deprecated public VideoComponent getVideoComponent() { - return null; + verifyApplicationThread(); + return this; } + @CanIgnoreReturnValue + @SuppressWarnings("deprecation") // Returning deprecated class. @Override - @Nullable + @Deprecated public TextComponent getTextComponent() { - return null; + verifyApplicationThread(); + return this; } + @CanIgnoreReturnValue + @SuppressWarnings("deprecation") // Returning deprecated class. @Override - @Nullable - public MetadataComponent getMetadataComponent() { - return null; + @Deprecated + public DeviceComponent getDeviceComponent() { + verifyApplicationThread(); + return this; + } + + @Override + public void experimentalSetOffloadSchedulingEnabled(boolean offloadSchedulingEnabled) { + verifyApplicationThread(); + internalPlayer.experimentalSetOffloadSchedulingEnabled(offloadSchedulingEnabled); + for (AudioOffloadListener listener : audioOffloadListeners) { + listener.onExperimentalOffloadSchedulingEnabledChanged(offloadSchedulingEnabled); + } + } + + @Override + public boolean experimentalIsSleepingForOffload() { + verifyApplicationThread(); + return playbackInfo.sleepingForOffload; } @Override public Looper getPlaybackLooper() { + // Don't verify application thread. We allow calls to this method from any thread. return internalPlayer.getPlaybackLooper(); } @Override public Looper getApplicationLooper() { - return eventHandler.getLooper(); + // Don't verify application thread. We allow calls to this method from any thread. + return applicationLooper; } @Override - public void addListener(Player.EventListener listener) { - listeners.addIfAbsent(new ListenerHolder(listener)); + public Clock getClock() { + // Don't verify application thread. We allow calls to this method from any thread. + return clock; } @Override - public void removeListener(Player.EventListener listener) { - for (ListenerHolder listenerHolder : listeners) { - if (listenerHolder.listener.equals(listener)) { - listenerHolder.release(); - listeners.remove(listenerHolder); - } - } + public void addAudioOffloadListener(AudioOffloadListener listener) { + // Don't verify application thread. We allow calls to this method from any thread. + audioOffloadListeners.add(listener); + } + + @Override + public void removeAudioOffloadListener(AudioOffloadListener listener) { + verifyApplicationThread(); + audioOffloadListeners.remove(listener); + } + + @Override + public Commands getAvailableCommands() { + verifyApplicationThread(); + return availableCommands; } @Override - @State - public int getPlaybackState() { + public @State int getPlaybackState() { + verifyApplicationThread(); return playbackInfo.playbackState; } @Override - @PlaybackSuppressionReason - public int getPlaybackSuppressionReason() { - return playbackSuppressionReason; + public @PlaybackSuppressionReason int getPlaybackSuppressionReason() { + verifyApplicationThread(); + return playbackInfo.playbackSuppressionReason; } @Override @Nullable - public ExoPlaybackException getPlaybackError() { + public ExoPlaybackException getPlayerError() { + verifyApplicationThread(); return playbackInfo.playbackError; } @Override + @Deprecated public void retry() { - if (mediaSource != null && playbackInfo.playbackState == Player.STATE_IDLE) { - prepare(mediaSource, /* resetPosition= */ false, /* resetState= */ false); + verifyApplicationThread(); + prepare(); + } + + @Override + public void prepare() { + verifyApplicationThread(); + boolean playWhenReady = getPlayWhenReady(); + @AudioFocusManager.PlayerCommand + int playerCommand = audioFocusManager.updateAudioFocus(playWhenReady, Player.STATE_BUFFERING); + updatePlayWhenReady( + playWhenReady, playerCommand, getPlayWhenReadyChangeReason(playWhenReady, playerCommand)); + if (playbackInfo.playbackState != Player.STATE_IDLE) { + return; } + PlaybackInfo playbackInfo = this.playbackInfo.copyWithPlaybackError(null); + playbackInfo = + playbackInfo.copyWithPlaybackState( + playbackInfo.timeline.isEmpty() ? STATE_ENDED : STATE_BUFFERING); + // Trigger internal prepare first before updating the playback info and notifying external + // listeners to ensure that new operations issued in the listener notifications reach the + // player after this prepare. The internal player can't change the playback info immediately + // because it uses a callback. + pendingOperationAcks++; + internalPlayer.prepare(); + updatePlaybackInfo( + playbackInfo, + /* ignored */ TIMELINE_CHANGE_REASON_SOURCE_UPDATE, + /* ignored */ PLAY_WHEN_READY_CHANGE_REASON_USER_REQUEST, + /* seekProcessed= */ false, + /* positionDiscontinuity= */ false, + /* ignored */ DISCONTINUITY_REASON_INTERNAL, + /* ignored */ C.TIME_UNSET, + /* ignored */ C.INDEX_UNSET, + /* repeatCurrentMediaItem= */ false); } @Override + @Deprecated public void prepare(MediaSource mediaSource) { - prepare(mediaSource, /* resetPosition= */ true, /* resetState= */ true); + verifyApplicationThread(); + setMediaSource(mediaSource); + prepare(); } @Override + @Deprecated public void prepare(MediaSource mediaSource, boolean resetPosition, boolean resetState) { - this.mediaSource = mediaSource; - PlaybackInfo playbackInfo = - getResetPlaybackInfo( - resetPosition, - resetState, - /* resetError= */ true, - /* playbackState= */ Player.STATE_BUFFERING); - // Trigger internal prepare first before updating the playback info and notifying external - // listeners to ensure that new operations issued in the listener notifications reach the - // player after this prepare. The internal player can't change the playback info immediately - // because it uses a callback. - hasPendingPrepare = true; + verifyApplicationThread(); + setMediaSource(mediaSource, resetPosition); + prepare(); + } + + @Override + public void setMediaItems(List mediaItems, boolean resetPosition) { + verifyApplicationThread(); + setMediaSources(createMediaSources(mediaItems), resetPosition); + } + + @Override + public void setMediaItems(List mediaItems, int startIndex, long startPositionMs) { + verifyApplicationThread(); + setMediaSources(createMediaSources(mediaItems), startIndex, startPositionMs); + } + + @Override + public void setMediaSource(MediaSource mediaSource) { + verifyApplicationThread(); + setMediaSources(Collections.singletonList(mediaSource)); + } + + @Override + public void setMediaSource(MediaSource mediaSource, long startPositionMs) { + verifyApplicationThread(); + setMediaSources( + Collections.singletonList(mediaSource), /* startWindowIndex= */ 0, startPositionMs); + } + + @Override + public void setMediaSource(MediaSource mediaSource, boolean resetPosition) { + verifyApplicationThread(); + setMediaSources(Collections.singletonList(mediaSource), resetPosition); + } + + @Override + public void setMediaSources(List mediaSources) { + verifyApplicationThread(); + setMediaSources(mediaSources, /* resetPosition= */ true); + } + + @Override + public void setMediaSources(List mediaSources, boolean resetPosition) { + verifyApplicationThread(); + setMediaSourcesInternal( + mediaSources, + /* startWindowIndex= */ C.INDEX_UNSET, + /* startPositionMs= */ C.TIME_UNSET, + /* resetToDefaultPosition= */ resetPosition); + } + + @Override + public void setMediaSources( + List mediaSources, int startWindowIndex, long startPositionMs) { + verifyApplicationThread(); + setMediaSourcesInternal( + mediaSources, startWindowIndex, startPositionMs, /* resetToDefaultPosition= */ false); + } + + @Override + public void addMediaItems(int index, List mediaItems) { + verifyApplicationThread(); + addMediaSources(index, createMediaSources(mediaItems)); + } + + @Override + public void addMediaSource(MediaSource mediaSource) { + verifyApplicationThread(); + addMediaSources(Collections.singletonList(mediaSource)); + } + + @Override + public void addMediaSource(int index, MediaSource mediaSource) { + verifyApplicationThread(); + addMediaSources(index, Collections.singletonList(mediaSource)); + } + + @Override + public void addMediaSources(List mediaSources) { + verifyApplicationThread(); + addMediaSources(/* index= */ mediaSourceHolderSnapshots.size(), mediaSources); + } + + @Override + public void addMediaSources(int index, List mediaSources) { + verifyApplicationThread(); + checkArgument(index >= 0); + index = min(index, mediaSourceHolderSnapshots.size()); + Timeline oldTimeline = getCurrentTimeline(); pendingOperationAcks++; - internalPlayer.prepare(mediaSource, resetPosition, resetState); + List holders = addMediaSourceHolders(index, mediaSources); + Timeline newTimeline = createMaskingTimeline(); + PlaybackInfo newPlaybackInfo = + maskTimelineAndPosition( + playbackInfo, + newTimeline, + getPeriodPositionUsAfterTimelineChanged(oldTimeline, newTimeline)); + internalPlayer.addMediaSources(index, holders, shuffleOrder); updatePlaybackInfo( - playbackInfo, + newPlaybackInfo, + /* timelineChangeReason= */ TIMELINE_CHANGE_REASON_PLAYLIST_CHANGED, + /* ignored */ PLAY_WHEN_READY_CHANGE_REASON_USER_REQUEST, + /* seekProcessed= */ false, + /* positionDiscontinuity= */ false, + /* ignored */ DISCONTINUITY_REASON_INTERNAL, + /* ignored */ C.TIME_UNSET, + /* ignored */ C.INDEX_UNSET, + /* repeatCurrentMediaItem= */ false); + } + + @Override + public void removeMediaItems(int fromIndex, int toIndex) { + verifyApplicationThread(); + checkArgument(fromIndex >= 0 && toIndex >= fromIndex); + int playlistSize = mediaSourceHolderSnapshots.size(); + toIndex = min(toIndex, playlistSize); + if (fromIndex >= playlistSize || fromIndex == toIndex) { + // Do nothing. + return; + } + PlaybackInfo newPlaybackInfo = removeMediaItemsInternal(fromIndex, toIndex); + boolean positionDiscontinuity = + !newPlaybackInfo.periodId.periodUid.equals(playbackInfo.periodId.periodUid); + updatePlaybackInfo( + newPlaybackInfo, + /* timelineChangeReason= */ TIMELINE_CHANGE_REASON_PLAYLIST_CHANGED, + /* ignored */ PLAY_WHEN_READY_CHANGE_REASON_USER_REQUEST, + /* seekProcessed= */ false, + positionDiscontinuity, + DISCONTINUITY_REASON_REMOVE, + /* discontinuityWindowStartPositionUs= */ getCurrentPositionUsInternal(newPlaybackInfo), + /* ignored */ C.INDEX_UNSET, + /* repeatCurrentMediaItem= */ false); + } + + @Override + public void moveMediaItems(int fromIndex, int toIndex, int newFromIndex) { + verifyApplicationThread(); + checkArgument(fromIndex >= 0 && fromIndex <= toIndex && newFromIndex >= 0); + int playlistSize = mediaSourceHolderSnapshots.size(); + toIndex = min(toIndex, playlistSize); + newFromIndex = min(newFromIndex, playlistSize - (toIndex - fromIndex)); + if (fromIndex >= playlistSize || fromIndex == toIndex || fromIndex == newFromIndex) { + // Do nothing. + return; + } + Timeline oldTimeline = getCurrentTimeline(); + pendingOperationAcks++; + Util.moveItems(mediaSourceHolderSnapshots, fromIndex, toIndex, newFromIndex); + Timeline newTimeline = createMaskingTimeline(); + PlaybackInfo newPlaybackInfo = + maskTimelineAndPosition( + playbackInfo, + newTimeline, + getPeriodPositionUsAfterTimelineChanged(oldTimeline, newTimeline)); + internalPlayer.moveMediaSources(fromIndex, toIndex, newFromIndex, shuffleOrder); + updatePlaybackInfo( + newPlaybackInfo, + /* timelineChangeReason= */ TIMELINE_CHANGE_REASON_PLAYLIST_CHANGED, + /* ignored */ PLAY_WHEN_READY_CHANGE_REASON_USER_REQUEST, + /* seekProcessed= */ false, + /* positionDiscontinuity= */ false, + /* ignored */ DISCONTINUITY_REASON_INTERNAL, + /* ignored */ C.TIME_UNSET, + /* ignored */ C.INDEX_UNSET, + /* repeatCurrentMediaItem= */ false); + } + + @Override + public void setShuffleOrder(ShuffleOrder shuffleOrder) { + verifyApplicationThread(); + this.shuffleOrder = shuffleOrder; + Timeline timeline = createMaskingTimeline(); + PlaybackInfo newPlaybackInfo = + maskTimelineAndPosition( + playbackInfo, + timeline, + maskWindowPositionMsOrGetPeriodPositionUs( + timeline, getCurrentMediaItemIndex(), getCurrentPosition())); + pendingOperationAcks++; + internalPlayer.setShuffleOrder(shuffleOrder); + updatePlaybackInfo( + newPlaybackInfo, + /* timelineChangeReason= */ TIMELINE_CHANGE_REASON_PLAYLIST_CHANGED, + /* ignored */ PLAY_WHEN_READY_CHANGE_REASON_USER_REQUEST, + /* seekProcessed= */ false, /* positionDiscontinuity= */ false, /* ignored */ DISCONTINUITY_REASON_INTERNAL, - TIMELINE_CHANGE_REASON_RESET, - /* seekProcessed= */ false); + /* ignored */ C.TIME_UNSET, + /* ignored */ C.INDEX_UNSET, + /* repeatCurrentMediaItem= */ false); + } + + @Override + public void setPauseAtEndOfMediaItems(boolean pauseAtEndOfMediaItems) { + verifyApplicationThread(); + if (this.pauseAtEndOfMediaItems == pauseAtEndOfMediaItems) { + return; + } + this.pauseAtEndOfMediaItems = pauseAtEndOfMediaItems; + internalPlayer.setPauseAtEndOfWindow(pauseAtEndOfMediaItems); } + @Override + public boolean getPauseAtEndOfMediaItems() { + verifyApplicationThread(); + return pauseAtEndOfMediaItems; + } @Override public void setPlayWhenReady(boolean playWhenReady) { - setPlayWhenReady(playWhenReady, PLAYBACK_SUPPRESSION_REASON_NONE); - } - - public void setPlayWhenReady( - boolean playWhenReady, @PlaybackSuppressionReason int playbackSuppressionReason) { - boolean oldIsPlaying = isPlaying(); - boolean oldInternalPlayWhenReady = - this.playWhenReady && this.playbackSuppressionReason == PLAYBACK_SUPPRESSION_REASON_NONE; - boolean internalPlayWhenReady = - playWhenReady && playbackSuppressionReason == PLAYBACK_SUPPRESSION_REASON_NONE; - if (oldInternalPlayWhenReady != internalPlayWhenReady) { - internalPlayer.setPlayWhenReady(internalPlayWhenReady); - } - boolean playWhenReadyChanged = this.playWhenReady != playWhenReady; - boolean suppressionReasonChanged = this.playbackSuppressionReason != playbackSuppressionReason; - this.playWhenReady = playWhenReady; - this.playbackSuppressionReason = playbackSuppressionReason; - boolean isPlaying = isPlaying(); - boolean isPlayingChanged = oldIsPlaying != isPlaying; - if (playWhenReadyChanged || suppressionReasonChanged || isPlayingChanged) { - int playbackState = playbackInfo.playbackState; - notifyListeners( - listener -> { - if (playWhenReadyChanged) { - listener.onPlayerStateChanged(playWhenReady, playbackState); - } - if (suppressionReasonChanged) { - listener.onPlaybackSuppressionReasonChanged(playbackSuppressionReason); - } - if (isPlayingChanged) { - listener.onIsPlayingChanged(isPlaying); - } - }); - } + verifyApplicationThread(); + @AudioFocusManager.PlayerCommand + int playerCommand = audioFocusManager.updateAudioFocus(playWhenReady, getPlaybackState()); + updatePlayWhenReady( + playWhenReady, playerCommand, getPlayWhenReadyChangeReason(playWhenReady, playerCommand)); } @Override public boolean getPlayWhenReady() { - return playWhenReady; + verifyApplicationThread(); + return playbackInfo.playWhenReady; } @Override public void setRepeatMode(@RepeatMode int repeatMode) { + verifyApplicationThread(); if (this.repeatMode != repeatMode) { this.repeatMode = repeatMode; internalPlayer.setRepeatMode(repeatMode); - notifyListeners(listener -> listener.onRepeatModeChanged(repeatMode)); + listeners.queueEvent( + Player.EVENT_REPEAT_MODE_CHANGED, listener -> listener.onRepeatModeChanged(repeatMode)); + updateAvailableCommands(); + listeners.flushEvents(); } } @Override public @RepeatMode int getRepeatMode() { + verifyApplicationThread(); return repeatMode; } @Override public void setShuffleModeEnabled(boolean shuffleModeEnabled) { + verifyApplicationThread(); if (this.shuffleModeEnabled != shuffleModeEnabled) { this.shuffleModeEnabled = shuffleModeEnabled; internalPlayer.setShuffleModeEnabled(shuffleModeEnabled); - notifyListeners(listener -> listener.onShuffleModeEnabledChanged(shuffleModeEnabled)); + listeners.queueEvent( + Player.EVENT_SHUFFLE_MODE_ENABLED_CHANGED, + listener -> listener.onShuffleModeEnabledChanged(shuffleModeEnabled)); + updateAvailableCommands(); + listeners.flushEvents(); } } @Override public boolean getShuffleModeEnabled() { + verifyApplicationThread(); return shuffleModeEnabled; } @Override public boolean isLoading() { + verifyApplicationThread(); return playbackInfo.isLoading; } @Override - public void seekTo(int windowIndex, long positionMs) { + public void seekTo( + int mediaItemIndex, + long positionMs, + @Player.Command int seekCommand, + boolean isRepeatingCurrentItem) { + verifyApplicationThread(); + checkArgument(mediaItemIndex >= 0); + analyticsCollector.notifySeekStarted(); Timeline timeline = playbackInfo.timeline; - if (windowIndex < 0 || (!timeline.isEmpty() && windowIndex >= timeline.getWindowCount())) { - throw new IllegalSeekPositionException(timeline, windowIndex, positionMs); + if (!timeline.isEmpty() && mediaItemIndex >= timeline.getWindowCount()) { + return; } - hasPendingSeek = true; pendingOperationAcks++; if (isPlayingAd()) { // TODO: Investigate adding support for seeking during ads. This is complicated to do in // general because the midroll ad preceding the seek destination must be played before the // content position can be played, if a different ad is playing at the moment. Log.w(TAG, "seekTo ignored because an ad is playing"); - eventHandler - .obtainMessage( - ExoPlayerImplInternal.MSG_PLAYBACK_INFO_CHANGED, - /* operationAcks */ 1, - /* positionDiscontinuityReason */ C.INDEX_UNSET, - playbackInfo) - .sendToTarget(); + ExoPlayerImplInternal.PlaybackInfoUpdate playbackInfoUpdate = + new ExoPlayerImplInternal.PlaybackInfoUpdate(this.playbackInfo); + playbackInfoUpdate.incrementPendingOperationAcks(1); + playbackInfoUpdateListener.onPlaybackInfoUpdate(playbackInfoUpdate); return; } - maskingWindowIndex = windowIndex; - if (timeline.isEmpty()) { - maskingWindowPositionMs = positionMs == C.TIME_UNSET ? 0 : positionMs; - maskingPeriodIndex = 0; - } else { - long windowPositionUs = positionMs == C.TIME_UNSET - ? timeline.getWindow(windowIndex, window).getDefaultPositionUs() : C.msToUs(positionMs); - Pair periodUidAndPosition = - timeline.getPeriodPosition(window, period, windowIndex, windowPositionUs); - maskingWindowPositionMs = C.usToMs(windowPositionUs); - maskingPeriodIndex = timeline.getIndexOfPeriod(periodUidAndPosition.first); - } - internalPlayer.seekTo(timeline, windowIndex, C.msToUs(positionMs)); - notifyListeners(listener -> listener.onPositionDiscontinuity(DISCONTINUITY_REASON_SEEK)); + @Player.State + int newPlaybackState = + getPlaybackState() == Player.STATE_IDLE ? Player.STATE_IDLE : STATE_BUFFERING; + int oldMaskingMediaItemIndex = getCurrentMediaItemIndex(); + PlaybackInfo newPlaybackInfo = playbackInfo.copyWithPlaybackState(newPlaybackState); + newPlaybackInfo = + maskTimelineAndPosition( + newPlaybackInfo, + timeline, + maskWindowPositionMsOrGetPeriodPositionUs(timeline, mediaItemIndex, positionMs)); + internalPlayer.seekTo(timeline, mediaItemIndex, Util.msToUs(positionMs)); + updatePlaybackInfo( + newPlaybackInfo, + /* ignored */ TIMELINE_CHANGE_REASON_PLAYLIST_CHANGED, + /* ignored */ PLAY_WHEN_READY_CHANGE_REASON_USER_REQUEST, + /* seekProcessed= */ true, + /* positionDiscontinuity= */ true, + /* positionDiscontinuityReason= */ DISCONTINUITY_REASON_SEEK, + /* discontinuityWindowStartPositionUs= */ getCurrentPositionUsInternal(newPlaybackInfo), + oldMaskingMediaItemIndex, + isRepeatingCurrentItem); + } + + @Override + public long getSeekBackIncrement() { + verifyApplicationThread(); + return seekBackIncrementMs; } @Override - public void setPlaybackParameters(@Nullable PlaybackParameters playbackParameters) { + public long getSeekForwardIncrement() { + verifyApplicationThread(); + return seekForwardIncrementMs; + } + + @Override + public long getMaxSeekToPreviousPosition() { + verifyApplicationThread(); + return C.DEFAULT_MAX_SEEK_TO_PREVIOUS_POSITION_MS; + } + + @Override + public void setPlaybackParameters(PlaybackParameters playbackParameters) { + verifyApplicationThread(); if (playbackParameters == null) { playbackParameters = PlaybackParameters.DEFAULT; } - if (this.playbackParameters.equals(playbackParameters)) { + if (playbackInfo.playbackParameters.equals(playbackParameters)) { return; } - pendingSetPlaybackParametersAcks++; - this.playbackParameters = playbackParameters; + PlaybackInfo newPlaybackInfo = playbackInfo.copyWithPlaybackParameters(playbackParameters); + pendingOperationAcks++; internalPlayer.setPlaybackParameters(playbackParameters); - PlaybackParameters playbackParametersToNotify = playbackParameters; - notifyListeners(listener -> listener.onPlaybackParametersChanged(playbackParametersToNotify)); + updatePlaybackInfo( + newPlaybackInfo, + /* ignored */ TIMELINE_CHANGE_REASON_PLAYLIST_CHANGED, + /* ignored */ PLAY_WHEN_READY_CHANGE_REASON_USER_REQUEST, + /* seekProcessed= */ false, + /* positionDiscontinuity= */ false, + /* ignored */ DISCONTINUITY_REASON_INTERNAL, + /* ignored */ C.TIME_UNSET, + /* ignored */ C.INDEX_UNSET, + /* repeatCurrentMediaItem= */ false); } @Override public PlaybackParameters getPlaybackParameters() { - return playbackParameters; + verifyApplicationThread(); + return playbackInfo.playbackParameters; } @Override public void setSeekParameters(@Nullable SeekParameters seekParameters) { + verifyApplicationThread(); if (seekParameters == null) { seekParameters = SeekParameters.DEFAULT; } @@ -395,71 +930,105 @@ public void setSeekParameters(@Nullable SeekParameters seekParameters) { @Override public SeekParameters getSeekParameters() { + verifyApplicationThread(); return seekParameters; } @Override public void setForegroundMode(boolean foregroundMode) { + verifyApplicationThread(); if (this.foregroundMode != foregroundMode) { this.foregroundMode = foregroundMode; - internalPlayer.setForegroundMode(foregroundMode); + if (!internalPlayer.setForegroundMode(foregroundMode)) { + // One of the renderers timed out releasing its resources. + stopInternal( + /* reset= */ false, + ExoPlaybackException.createForUnexpected( + new ExoTimeoutException(ExoTimeoutException.TIMEOUT_OPERATION_SET_FOREGROUND_MODE), + PlaybackException.ERROR_CODE_TIMEOUT)); + } } } + @Override + public void stop() { + verifyApplicationThread(); + stop(/* reset= */ false); + } + @Override public void stop(boolean reset) { - if (reset) { - mediaSource = null; - } - PlaybackInfo playbackInfo = - getResetPlaybackInfo( - /* resetPosition= */ reset, - /* resetState= */ reset, - /* resetError= */ reset, - /* playbackState= */ Player.STATE_IDLE); - // Trigger internal stop first before updating the playback info and notifying external - // listeners to ensure that new operations issued in the listener notifications reach the - // player after this stop. The internal player can't change the playback info immediately - // because it uses a callback. - pendingOperationAcks++; - internalPlayer.stop(reset); - updatePlaybackInfo( - playbackInfo, - /* positionDiscontinuity= */ false, - /* ignored */ DISCONTINUITY_REASON_INTERNAL, - TIMELINE_CHANGE_REASON_RESET, - /* seekProcessed= */ false); + verifyApplicationThread(); + audioFocusManager.updateAudioFocus(getPlayWhenReady(), Player.STATE_IDLE); + stopInternal(reset, /* error= */ null); + currentCueGroup = new CueGroup(ImmutableList.of(), playbackInfo.positionUs); } @Override - public void release(boolean async) { - Log.i(TAG, "Release " + Integer.toHexString(System.identityHashCode(this)) + " [" - + ExoPlayerLibraryInfo.VERSION_SLASHY + "] [" + Util.DEVICE_DEBUG_INFO + "] [" - + ExoPlayerLibraryInfo.registeredModules() + "]"); - mediaSource = null; - internalPlayer.release(); - eventHandler.removeCallbacksAndMessages(null); - playbackInfo = - getResetPlaybackInfo( - /* resetPosition= */ false, - /* resetState= */ false, - /* resetError= */ false, - /* playbackState= */ Player.STATE_IDLE); + public void release() { + Log.i( + TAG, + "Release " + + Integer.toHexString(System.identityHashCode(this)) + + " [" + + ExoPlayerLibraryInfo.VERSION_SLASHY + + "] [" + + Util.DEVICE_DEBUG_INFO + + "] [" + + ExoPlayerLibraryInfo.registeredModules() + + "]"); + verifyApplicationThread(); + if (Util.SDK_INT < 21 && keepSessionIdAudioTrack != null) { + keepSessionIdAudioTrack.release(); + keepSessionIdAudioTrack = null; + } + audioBecomingNoisyManager.setEnabled(false); + streamVolumeManager.release(); + wakeLockManager.setStayAwake(false); + wifiLockManager.setStayAwake(false); + audioFocusManager.release(); + if (!internalPlayer.release()) { + // One of the renderers timed out releasing its resources. + listeners.sendEvent( + Player.EVENT_PLAYER_ERROR, + listener -> + listener.onPlayerError( + ExoPlaybackException.createForUnexpected( + new ExoTimeoutException(ExoTimeoutException.TIMEOUT_OPERATION_RELEASE), + PlaybackException.ERROR_CODE_TIMEOUT))); + } + listeners.release(); + playbackInfoUpdateHandler.removeCallbacksAndMessages(null); + bandwidthMeter.removeEventListener(analyticsCollector); + playbackInfo = playbackInfo.copyWithPlaybackState(Player.STATE_IDLE); + playbackInfo = playbackInfo.copyWithLoadingMediaPeriodId(playbackInfo.periodId); + playbackInfo.bufferedPositionUs = playbackInfo.positionUs; + playbackInfo.totalBufferedDurationUs = 0; + analyticsCollector.release(); + trackSelector.release(); + removeSurfaceCallbacks(); + if (ownedSurface != null) { + ownedSurface.release(); + ownedSurface = null; + } + if (isPriorityTaskManagerRegistered) { + checkNotNull(priorityTaskManager).remove(C.PRIORITY_PLAYBACK); + isPriorityTaskManagerRegistered = false; + } + currentCueGroup = CueGroup.EMPTY_TIME_ZERO; + playerReleased = true; } @Override public PlayerMessage createMessage(Target target) { - return new PlayerMessage( - internalPlayer, - target, - playbackInfo.timeline, - getCurrentWindowIndex(), - internalPlayerHandler); + verifyApplicationThread(); + return createMessageInternal(target); } @Override public int getCurrentPeriodIndex() { - if (shouldMaskPosition()) { + verifyApplicationThread(); + if (playbackInfo.timeline.isEmpty()) { return maskingPeriodIndex; } else { return playbackInfo.timeline.getIndexOfPeriod(playbackInfo.periodId.periodUid); @@ -467,42 +1036,36 @@ public int getCurrentPeriodIndex() { } @Override - public int getCurrentWindowIndex() { - if (shouldMaskPosition()) { - return maskingWindowIndex; - } else { - return playbackInfo.timeline.getPeriodByUid(playbackInfo.periodId.periodUid, period) - .windowIndex; - } + public int getCurrentMediaItemIndex() { + verifyApplicationThread(); + int currentWindowIndex = getCurrentWindowIndexInternal(); + return currentWindowIndex == C.INDEX_UNSET ? 0 : currentWindowIndex; } @Override public long getDuration() { + verifyApplicationThread(); if (isPlayingAd()) { MediaPeriodId periodId = playbackInfo.periodId; playbackInfo.timeline.getPeriodByUid(periodId.periodUid, period); long adDurationUs = period.getAdDurationUs(periodId.adGroupIndex, periodId.adIndexInAdGroup); - return C.usToMs(adDurationUs); + return Util.usToMs(adDurationUs); } return getContentDuration(); } @Override public long getCurrentPosition() { - if (shouldMaskPosition()) { - return maskingWindowPositionMs; - } else if (playbackInfo.periodId.isAd()) { - return C.usToMs(playbackInfo.positionUs); - } else { - return periodPositionUsToWindowPositionMs(playbackInfo.periodId, playbackInfo.positionUs); - } + verifyApplicationThread(); + return Util.usToMs(getCurrentPositionUsInternal(playbackInfo)); } @Override public long getBufferedPosition() { + verifyApplicationThread(); if (isPlayingAd()) { return playbackInfo.loadingMediaPeriodId.equals(playbackInfo.periodId) - ? C.usToMs(playbackInfo.bufferedPositionUs) + ? Util.usToMs(playbackInfo.bufferedPositionUs) : getDuration(); } return getContentBufferedPosition(); @@ -510,31 +1073,39 @@ public long getBufferedPosition() { @Override public long getTotalBufferedDuration() { - return C.usToMs(playbackInfo.totalBufferedDurationUs); + verifyApplicationThread(); + return Util.usToMs(playbackInfo.totalBufferedDurationUs); } @Override public boolean isPlayingAd() { - return !shouldMaskPosition() && playbackInfo.periodId.isAd(); + verifyApplicationThread(); + return playbackInfo.periodId.isAd(); } @Override public int getCurrentAdGroupIndex() { + verifyApplicationThread(); return isPlayingAd() ? playbackInfo.periodId.adGroupIndex : C.INDEX_UNSET; } @Override public int getCurrentAdIndexInAdGroup() { + verifyApplicationThread(); return isPlayingAd() ? playbackInfo.periodId.adIndexInAdGroup : C.INDEX_UNSET; } @Override public long getContentPosition() { + verifyApplicationThread(); if (isPlayingAd()) { playbackInfo.timeline.getPeriodByUid(playbackInfo.periodId.periodUid, period); - return playbackInfo.contentPositionUs == C.TIME_UNSET - ? playbackInfo.timeline.getWindow(getCurrentWindowIndex(), window).getDefaultPositionMs() - : period.getPositionInWindowMs() + C.usToMs(playbackInfo.contentPositionUs); + return playbackInfo.requestedContentPositionUs == C.TIME_UNSET + ? playbackInfo + .timeline + .getWindow(getCurrentMediaItemIndex(), window) + .getDefaultPositionMs() + : period.getPositionInWindowMs() + Util.usToMs(playbackInfo.requestedContentPositionUs); } else { return getCurrentPosition(); } @@ -542,12 +1113,13 @@ public long getContentPosition() { @Override public long getContentBufferedPosition() { - if (shouldMaskPosition()) { + verifyApplicationThread(); + if (playbackInfo.timeline.isEmpty()) { return maskingWindowPositionMs; } if (playbackInfo.loadingMediaPeriodId.windowSequenceNumber != playbackInfo.periodId.windowSequenceNumber) { - return playbackInfo.timeline.getWindow(getCurrentWindowIndex(), window).getDurationMs(); + return playbackInfo.timeline.getWindow(getCurrentMediaItemIndex(), window).getDurationMs(); } long contentBufferedPositionUs = playbackInfo.bufferedPositionUs; if (playbackInfo.loadingMediaPeriodId.isAd()) { @@ -559,290 +1131,2039 @@ public long getContentBufferedPosition() { contentBufferedPositionUs = loadingPeriod.durationUs; } } - return periodPositionUsToWindowPositionMs( - playbackInfo.loadingMediaPeriodId, contentBufferedPositionUs); + return Util.usToMs( + periodPositionUsToWindowPositionUs( + playbackInfo.timeline, playbackInfo.loadingMediaPeriodId, contentBufferedPositionUs)); } @Override public int getRendererCount() { + verifyApplicationThread(); return renderers.length; } @Override - public int getRendererType(int index) { + public @C.TrackType int getRendererType(int index) { + verifyApplicationThread(); return renderers[index].getTrackType(); } + @Override + public Renderer getRenderer(int index) { + verifyApplicationThread(); + return renderers[index]; + } + + @Override + public TrackSelector getTrackSelector() { + verifyApplicationThread(); + return trackSelector; + } + @Override public TrackGroupArray getCurrentTrackGroups() { + verifyApplicationThread(); return playbackInfo.trackGroups; } @Override public TrackSelectionArray getCurrentTrackSelections() { - return playbackInfo.trackSelectorResult.selections; + verifyApplicationThread(); + return new TrackSelectionArray(playbackInfo.trackSelectorResult.selections); } @Override - public Timeline getCurrentTimeline() { - return playbackInfo.timeline; + public Tracks getCurrentTracks() { + verifyApplicationThread(); + return playbackInfo.trackSelectorResult.tracks; } - // Not private so it can be called from an inner class without going through a thunk method. - /* package */ void handleEvent(Message msg) { - switch (msg.what) { - case ExoPlayerImplInternal.MSG_PLAYBACK_INFO_CHANGED: - handlePlaybackInfo( - (PlaybackInfo) msg.obj, - /* operationAcks= */ msg.arg1, - /* positionDiscontinuity= */ msg.arg2 != C.INDEX_UNSET, - /* positionDiscontinuityReason= */ msg.arg2); - break; - case ExoPlayerImplInternal.MSG_PLAYBACK_PARAMETERS_CHANGED: - handlePlaybackParameters((PlaybackParameters) msg.obj, /* operationAck= */ msg.arg1 != 0); - break; - default: - throw new IllegalStateException(); - } + @Override + public TrackSelectionParameters getTrackSelectionParameters() { + verifyApplicationThread(); + return trackSelector.getParameters(); } - private void handlePlaybackParameters( - PlaybackParameters playbackParameters, boolean operationAck) { - if (operationAck) { - pendingSetPlaybackParametersAcks--; - } - if (pendingSetPlaybackParametersAcks == 0) { - if (!this.playbackParameters.equals(playbackParameters)) { - this.playbackParameters = playbackParameters; - notifyListeners(listener -> listener.onPlaybackParametersChanged(playbackParameters)); - } + @Override + public void setTrackSelectionParameters(TrackSelectionParameters parameters) { + verifyApplicationThread(); + if (!trackSelector.isSetParametersSupported() + || parameters.equals(trackSelector.getParameters())) { + return; } + trackSelector.setParameters(parameters); + listeners.sendEvent( + EVENT_TRACK_SELECTION_PARAMETERS_CHANGED, + listener -> listener.onTrackSelectionParametersChanged(parameters)); } - private void handlePlaybackInfo( - PlaybackInfo playbackInfo, - int operationAcks, - boolean positionDiscontinuity, - @DiscontinuityReason int positionDiscontinuityReason) { - pendingOperationAcks -= operationAcks; - if (pendingOperationAcks == 0) { - if (playbackInfo.startPositionUs == C.TIME_UNSET) { - // Replace internal unset start position with externally visible start position of zero. - playbackInfo = - playbackInfo.copyWithNewPosition( - playbackInfo.periodId, - /* positionUs= */ 0, - playbackInfo.contentPositionUs, - playbackInfo.totalBufferedDurationUs); - } - if (!this.playbackInfo.timeline.isEmpty() && playbackInfo.timeline.isEmpty()) { - // Update the masking variables, which are used when the timeline becomes empty. - maskingPeriodIndex = 0; - maskingWindowIndex = 0; - maskingWindowPositionMs = 0; - } - @Player.TimelineChangeReason - int timelineChangeReason = - hasPendingPrepare - ? Player.TIMELINE_CHANGE_REASON_PREPARED - : Player.TIMELINE_CHANGE_REASON_DYNAMIC; - boolean seekProcessed = hasPendingSeek; - hasPendingPrepare = false; - hasPendingSeek = false; - updatePlaybackInfo( - playbackInfo, - positionDiscontinuity, - positionDiscontinuityReason, - timelineChangeReason, - seekProcessed); + @Override + public MediaMetadata getMediaMetadata() { + verifyApplicationThread(); + return mediaMetadata; + } + + @Override + public MediaMetadata getPlaylistMetadata() { + verifyApplicationThread(); + return playlistMetadata; + } + + @Override + public void setPlaylistMetadata(MediaMetadata playlistMetadata) { + verifyApplicationThread(); + checkNotNull(playlistMetadata); + if (playlistMetadata.equals(this.playlistMetadata)) { + return; } + this.playlistMetadata = playlistMetadata; + listeners.sendEvent( + EVENT_PLAYLIST_METADATA_CHANGED, + listener -> listener.onPlaylistMetadataChanged(this.playlistMetadata)); } - private PlaybackInfo getResetPlaybackInfo( - boolean resetPosition, - boolean resetState, - boolean resetError, - @Player.State int playbackState) { - if (resetPosition) { - maskingWindowIndex = 0; - maskingPeriodIndex = 0; - maskingWindowPositionMs = 0; - } else { - maskingWindowIndex = getCurrentWindowIndex(); - maskingPeriodIndex = getCurrentPeriodIndex(); - maskingWindowPositionMs = getCurrentPosition(); - } - // Also reset period-based PlaybackInfo positions if resetting the state. - resetPosition = resetPosition || resetState; - MediaPeriodId mediaPeriodId = - resetPosition - ? playbackInfo.getDummyFirstMediaPeriodId(shuffleModeEnabled, window, period) - : playbackInfo.periodId; - long startPositionUs = resetPosition ? 0 : playbackInfo.positionUs; - long contentPositionUs = resetPosition ? C.TIME_UNSET : playbackInfo.contentPositionUs; - return new PlaybackInfo( - resetState ? Timeline.EMPTY : playbackInfo.timeline, - mediaPeriodId, - startPositionUs, - contentPositionUs, - playbackState, - resetError ? null : playbackInfo.playbackError, - /* isLoading= */ false, - resetState ? TrackGroupArray.EMPTY : playbackInfo.trackGroups, - resetState ? emptyTrackSelectorResult : playbackInfo.trackSelectorResult, - mediaPeriodId, - startPositionUs, - /* totalBufferedDurationUs= */ 0, - startPositionUs); + @Override + public Timeline getCurrentTimeline() { + verifyApplicationThread(); + return playbackInfo.timeline; } - private void updatePlaybackInfo( - PlaybackInfo playbackInfo, - boolean positionDiscontinuity, - @Player.DiscontinuityReason int positionDiscontinuityReason, - @Player.TimelineChangeReason int timelineChangeReason, - boolean seekProcessed) { - boolean previousIsPlaying = isPlaying(); - // Assign playback info immediately such that all getters return the right values. - PlaybackInfo previousPlaybackInfo = this.playbackInfo; - this.playbackInfo = playbackInfo; - boolean isPlaying = isPlaying(); - notifyListeners( - new PlaybackInfoUpdate( - playbackInfo, - previousPlaybackInfo, - listeners, - trackSelector, - positionDiscontinuity, - positionDiscontinuityReason, - timelineChangeReason, - seekProcessed, - playWhenReady, - /* isPlayingChanged= */ previousIsPlaying != isPlaying)); + @Override + public void setVideoScalingMode(@C.VideoScalingMode int videoScalingMode) { + verifyApplicationThread(); + this.videoScalingMode = videoScalingMode; + sendRendererMessage(TRACK_TYPE_VIDEO, MSG_SET_SCALING_MODE, videoScalingMode); } - private void notifyListeners(ListenerInvocation listenerInvocation) { - CopyOnWriteArrayList listenerSnapshot = new CopyOnWriteArrayList<>(listeners); - notifyListeners(() -> invokeAll(listenerSnapshot, listenerInvocation)); + @Override + public @C.VideoScalingMode int getVideoScalingMode() { + verifyApplicationThread(); + return videoScalingMode; } - private void notifyListeners(Runnable listenerNotificationRunnable) { - boolean isRunningRecursiveListenerNotification = !pendingListenerNotifications.isEmpty(); - pendingListenerNotifications.addLast(listenerNotificationRunnable); - if (isRunningRecursiveListenerNotification) { + @Override + public void setVideoChangeFrameRateStrategy( + @C.VideoChangeFrameRateStrategy int videoChangeFrameRateStrategy) { + verifyApplicationThread(); + if (this.videoChangeFrameRateStrategy == videoChangeFrameRateStrategy) { return; } - while (!pendingListenerNotifications.isEmpty()) { - pendingListenerNotifications.peekFirst().run(); - pendingListenerNotifications.removeFirst(); - } - } - - private long periodPositionUsToWindowPositionMs(MediaPeriodId periodId, long positionUs) { - long positionMs = C.usToMs(positionUs); - playbackInfo.timeline.getPeriodByUid(periodId.periodUid, period); - positionMs += period.getPositionInWindowMs(); - return positionMs; - } - - private boolean shouldMaskPosition() { - return playbackInfo.timeline.isEmpty() || pendingOperationAcks > 0; - } - - private static final class PlaybackInfoUpdate implements Runnable { - - private final PlaybackInfo playbackInfo; - private final CopyOnWriteArrayList listenerSnapshot; - private final TrackSelector trackSelector; - private final boolean positionDiscontinuity; - private final @Player.DiscontinuityReason int positionDiscontinuityReason; - private final @Player.TimelineChangeReason int timelineChangeReason; - private final boolean seekProcessed; - private final boolean playbackStateChanged; - private final boolean playbackErrorChanged; - private final boolean timelineChanged; - private final boolean isLoadingChanged; - private final boolean trackSelectorResultChanged; - private final boolean playWhenReady; - private final boolean isPlayingChanged; - - public PlaybackInfoUpdate( - PlaybackInfo playbackInfo, - PlaybackInfo previousPlaybackInfo, - CopyOnWriteArrayList listeners, - TrackSelector trackSelector, - boolean positionDiscontinuity, - @DiscontinuityReason int positionDiscontinuityReason, - @TimelineChangeReason int timelineChangeReason, - boolean seekProcessed, - boolean playWhenReady, - boolean isPlayingChanged) { - this.playbackInfo = playbackInfo; - this.listenerSnapshot = new CopyOnWriteArrayList<>(listeners); - this.trackSelector = trackSelector; - this.positionDiscontinuity = positionDiscontinuity; - this.positionDiscontinuityReason = positionDiscontinuityReason; - this.timelineChangeReason = timelineChangeReason; - this.seekProcessed = seekProcessed; - this.playWhenReady = playWhenReady; - this.isPlayingChanged = isPlayingChanged; - playbackStateChanged = previousPlaybackInfo.playbackState != playbackInfo.playbackState; - playbackErrorChanged = - previousPlaybackInfo.playbackError != playbackInfo.playbackError - && playbackInfo.playbackError != null; - timelineChanged = previousPlaybackInfo.timeline != playbackInfo.timeline; - isLoadingChanged = previousPlaybackInfo.isLoading != playbackInfo.isLoading; - trackSelectorResultChanged = - previousPlaybackInfo.trackSelectorResult != playbackInfo.trackSelectorResult; - } - - @Override - public void run() { - if (timelineChanged || timelineChangeReason == TIMELINE_CHANGE_REASON_PREPARED) { - invokeAll( - listenerSnapshot, - listener -> listener.onTimelineChanged(playbackInfo.timeline, timelineChangeReason)); - } - if (positionDiscontinuity) { - invokeAll( - listenerSnapshot, - listener -> listener.onPositionDiscontinuity(positionDiscontinuityReason)); - } - if (playbackErrorChanged) { - invokeAll(listenerSnapshot, listener -> listener.onPlayerError(playbackInfo.playbackError)); - } - if (trackSelectorResultChanged) { - trackSelector.onSelectionActivated(playbackInfo.trackSelectorResult.info); - invokeAll( - listenerSnapshot, - listener -> - listener.onTracksChanged( - playbackInfo.trackGroups, playbackInfo.trackSelectorResult.selections)); - } - if (isLoadingChanged) { - invokeAll(listenerSnapshot, listener -> listener.onLoadingChanged(playbackInfo.isLoading)); - } - if (playbackStateChanged) { - invokeAll( - listenerSnapshot, - listener -> listener.onPlayerStateChanged(playWhenReady, playbackInfo.playbackState)); - } - if (isPlayingChanged) { - invokeAll( - listenerSnapshot, - listener -> - listener.onIsPlayingChanged(playbackInfo.playbackState == Player.STATE_READY)); - } - if (seekProcessed) { - invokeAll(listenerSnapshot, EventListener::onSeekProcessed); - } - } - } - - private static void invokeAll( - CopyOnWriteArrayList listeners, ListenerInvocation listenerInvocation) { - for (ListenerHolder listenerHolder : listeners) { - listenerHolder.invoke(listenerInvocation); + this.videoChangeFrameRateStrategy = videoChangeFrameRateStrategy; + sendRendererMessage( + TRACK_TYPE_VIDEO, MSG_SET_CHANGE_FRAME_RATE_STRATEGY, videoChangeFrameRateStrategy); + } + + @Override + public @C.VideoChangeFrameRateStrategy int getVideoChangeFrameRateStrategy() { + verifyApplicationThread(); + return videoChangeFrameRateStrategy; + } + + @Override + public VideoSize getVideoSize() { + verifyApplicationThread(); + return videoSize; + } + + @Override + public Size getSurfaceSize() { + verifyApplicationThread(); + return surfaceSize; + } + + @Override + public void clearVideoSurface() { + verifyApplicationThread(); + removeSurfaceCallbacks(); + setVideoOutputInternal(/* videoOutput= */ null); + maybeNotifySurfaceSizeChanged(/* width= */ 0, /* height= */ 0); + } + + @Override + public void clearVideoSurface(@Nullable Surface surface) { + verifyApplicationThread(); + if (surface != null && surface == videoOutput) { + clearVideoSurface(); + } + } + + @Override + public void setVideoSurface(@Nullable Surface surface) { + verifyApplicationThread(); + removeSurfaceCallbacks(); + setVideoOutputInternal(surface); + int newSurfaceSize = surface == null ? 0 : C.LENGTH_UNSET; + maybeNotifySurfaceSizeChanged(/* width= */ newSurfaceSize, /* height= */ newSurfaceSize); + } + + @Override + public void setVideoSurfaceHolder(@Nullable SurfaceHolder surfaceHolder) { + verifyApplicationThread(); + if (surfaceHolder == null) { + clearVideoSurface(); + } else { + removeSurfaceCallbacks(); + this.surfaceHolderSurfaceIsVideoOutput = true; + this.surfaceHolder = surfaceHolder; + surfaceHolder.addCallback(componentListener); + Surface surface = surfaceHolder.getSurface(); + if (surface != null && surface.isValid()) { + setVideoOutputInternal(surface); + Rect surfaceSize = surfaceHolder.getSurfaceFrame(); + maybeNotifySurfaceSizeChanged(surfaceSize.width(), surfaceSize.height()); + } else { + setVideoOutputInternal(/* videoOutput= */ null); + maybeNotifySurfaceSizeChanged(/* width= */ 0, /* height= */ 0); + } + } + } + + @Override + public void clearVideoSurfaceHolder(@Nullable SurfaceHolder surfaceHolder) { + verifyApplicationThread(); + if (surfaceHolder != null && surfaceHolder == this.surfaceHolder) { + clearVideoSurface(); + } + } + + @Override + public void setVideoSurfaceView(@Nullable SurfaceView surfaceView) { + verifyApplicationThread(); + if (surfaceView instanceof VideoDecoderOutputBufferRenderer) { + removeSurfaceCallbacks(); + setVideoOutputInternal(surfaceView); + setNonVideoOutputSurfaceHolderInternal(surfaceView.getHolder()); + } else if (surfaceView instanceof SphericalGLSurfaceView) { + removeSurfaceCallbacks(); + sphericalGLSurfaceView = (SphericalGLSurfaceView) surfaceView; + createMessageInternal(frameMetadataListener) + .setType(FrameMetadataListener.MSG_SET_SPHERICAL_SURFACE_VIEW) + .setPayload(sphericalGLSurfaceView) + .send(); + sphericalGLSurfaceView.addVideoSurfaceListener(componentListener); + setVideoOutputInternal(sphericalGLSurfaceView.getVideoSurface()); + setNonVideoOutputSurfaceHolderInternal(surfaceView.getHolder()); + } else { + setVideoSurfaceHolder(surfaceView == null ? null : surfaceView.getHolder()); + } + } + + @Override + public void clearVideoSurfaceView(@Nullable SurfaceView surfaceView) { + verifyApplicationThread(); + clearVideoSurfaceHolder(surfaceView == null ? null : surfaceView.getHolder()); + } + + @Override + public void setVideoTextureView(@Nullable TextureView textureView) { + verifyApplicationThread(); + if (textureView == null) { + clearVideoSurface(); + } else { + removeSurfaceCallbacks(); + this.textureView = textureView; + if (textureView.getSurfaceTextureListener() != null) { + Log.w(TAG, "Replacing existing SurfaceTextureListener."); + } + textureView.setSurfaceTextureListener(componentListener); + @Nullable + SurfaceTexture surfaceTexture = + textureView.isAvailable() ? textureView.getSurfaceTexture() : null; + if (surfaceTexture == null) { + setVideoOutputInternal(/* videoOutput= */ null); + maybeNotifySurfaceSizeChanged(/* width= */ 0, /* height= */ 0); + } else { + setSurfaceTextureInternal(surfaceTexture); + maybeNotifySurfaceSizeChanged(textureView.getWidth(), textureView.getHeight()); + } + } + } + + @Override + public void clearVideoTextureView(@Nullable TextureView textureView) { + verifyApplicationThread(); + if (textureView != null && textureView == this.textureView) { + clearVideoSurface(); + } + } + + @Override + public void setAudioAttributes(AudioAttributes newAudioAttributes, boolean handleAudioFocus) { + verifyApplicationThread(); + if (playerReleased) { + return; + } + if (!Util.areEqual(this.audioAttributes, newAudioAttributes)) { + this.audioAttributes = newAudioAttributes; + sendRendererMessage(TRACK_TYPE_AUDIO, MSG_SET_AUDIO_ATTRIBUTES, newAudioAttributes); + streamVolumeManager.setStreamType(Util.getStreamTypeForAudioUsage(newAudioAttributes.usage)); + // Queue event only and flush after updating playWhenReady in case both events are triggered. + listeners.queueEvent( + EVENT_AUDIO_ATTRIBUTES_CHANGED, + listener -> listener.onAudioAttributesChanged(newAudioAttributes)); + } + + audioFocusManager.setAudioAttributes(handleAudioFocus ? newAudioAttributes : null); + trackSelector.setAudioAttributes(newAudioAttributes); + boolean playWhenReady = getPlayWhenReady(); + @AudioFocusManager.PlayerCommand + int playerCommand = audioFocusManager.updateAudioFocus(playWhenReady, getPlaybackState()); + updatePlayWhenReady( + playWhenReady, playerCommand, getPlayWhenReadyChangeReason(playWhenReady, playerCommand)); + listeners.flushEvents(); + } + + @Override + public AudioAttributes getAudioAttributes() { + verifyApplicationThread(); + return audioAttributes; + } + + @Override + public void setAudioSessionId(int audioSessionId) { + verifyApplicationThread(); + if (this.audioSessionId == audioSessionId) { + return; + } + if (audioSessionId == C.AUDIO_SESSION_ID_UNSET) { + if (Util.SDK_INT < 21) { + audioSessionId = initializeKeepSessionIdAudioTrack(C.AUDIO_SESSION_ID_UNSET); + } else { + audioSessionId = Util.generateAudioSessionIdV21(applicationContext); + } + } else if (Util.SDK_INT < 21) { + // We need to re-initialize keepSessionIdAudioTrack to make sure the session is kept alive for + // as long as the player is using it. + initializeKeepSessionIdAudioTrack(audioSessionId); + } + this.audioSessionId = audioSessionId; + sendRendererMessage(TRACK_TYPE_AUDIO, MSG_SET_AUDIO_SESSION_ID, audioSessionId); + sendRendererMessage(TRACK_TYPE_VIDEO, MSG_SET_AUDIO_SESSION_ID, audioSessionId); + int finalAudioSessionId = audioSessionId; + listeners.sendEvent( + EVENT_AUDIO_SESSION_ID, listener -> listener.onAudioSessionIdChanged(finalAudioSessionId)); + } + + @Override + public int getAudioSessionId() { + verifyApplicationThread(); + return audioSessionId; + } + + @Override + public void setAuxEffectInfo(AuxEffectInfo auxEffectInfo) { + verifyApplicationThread(); + sendRendererMessage(TRACK_TYPE_AUDIO, MSG_SET_AUX_EFFECT_INFO, auxEffectInfo); + } + + @Override + public void clearAuxEffectInfo() { + verifyApplicationThread(); + setAuxEffectInfo(new AuxEffectInfo(AuxEffectInfo.NO_AUX_EFFECT_ID, /* sendLevel= */ 0f)); + } + + @RequiresApi(23) + @Override + public void setPreferredAudioDevice(@Nullable AudioDeviceInfo audioDeviceInfo) { + verifyApplicationThread(); + sendRendererMessage(TRACK_TYPE_AUDIO, MSG_SET_PREFERRED_AUDIO_DEVICE, audioDeviceInfo); + } + + @Override + public void setVolume(float volume) { + verifyApplicationThread(); + volume = Util.constrainValue(volume, /* min= */ 0, /* max= */ 1); + if (this.volume == volume) { + return; + } + this.volume = volume; + sendVolumeToRenderers(); + float finalVolume = volume; + listeners.sendEvent(EVENT_VOLUME_CHANGED, listener -> listener.onVolumeChanged(finalVolume)); + } + + @Override + public float getVolume() { + verifyApplicationThread(); + return volume; + } + + @Override + public boolean getSkipSilenceEnabled() { + verifyApplicationThread(); + return skipSilenceEnabled; + } + + @Override + public void setSkipSilenceEnabled(boolean newSkipSilenceEnabled) { + verifyApplicationThread(); + if (skipSilenceEnabled == newSkipSilenceEnabled) { + return; + } + skipSilenceEnabled = newSkipSilenceEnabled; + sendRendererMessage(TRACK_TYPE_AUDIO, MSG_SET_SKIP_SILENCE_ENABLED, newSkipSilenceEnabled); + listeners.sendEvent( + EVENT_SKIP_SILENCE_ENABLED_CHANGED, + listener -> listener.onSkipSilenceEnabledChanged(newSkipSilenceEnabled)); + } + + @Override + public AnalyticsCollector getAnalyticsCollector() { + verifyApplicationThread(); + return analyticsCollector; + } + + @Override + public void addAnalyticsListener(AnalyticsListener listener) { + // Don't verify application thread. We allow calls to this method from any thread. + analyticsCollector.addListener(checkNotNull(listener)); + } + + @Override + public void removeAnalyticsListener(AnalyticsListener listener) { + verifyApplicationThread(); + analyticsCollector.removeListener(checkNotNull(listener)); + } + + @Override + public void setHandleAudioBecomingNoisy(boolean handleAudioBecomingNoisy) { + verifyApplicationThread(); + if (playerReleased) { + return; + } + audioBecomingNoisyManager.setEnabled(handleAudioBecomingNoisy); + } + + @Override + public void setPriorityTaskManager(@Nullable PriorityTaskManager priorityTaskManager) { + verifyApplicationThread(); + if (Util.areEqual(this.priorityTaskManager, priorityTaskManager)) { + return; + } + if (isPriorityTaskManagerRegistered) { + checkNotNull(this.priorityTaskManager).remove(C.PRIORITY_PLAYBACK); + } + if (priorityTaskManager != null && isLoading()) { + priorityTaskManager.add(C.PRIORITY_PLAYBACK); + isPriorityTaskManagerRegistered = true; + } else { + isPriorityTaskManagerRegistered = false; + } + this.priorityTaskManager = priorityTaskManager; + } + + @Override + @Nullable + public Format getVideoFormat() { + verifyApplicationThread(); + return videoFormat; + } + + @Override + @Nullable + public Format getAudioFormat() { + verifyApplicationThread(); + return audioFormat; + } + + @Override + @Nullable + public DecoderCounters getVideoDecoderCounters() { + verifyApplicationThread(); + return videoDecoderCounters; + } + + @Override + @Nullable + public DecoderCounters getAudioDecoderCounters() { + verifyApplicationThread(); + return audioDecoderCounters; + } + + @Override + public void setVideoFrameMetadataListener(VideoFrameMetadataListener listener) { + verifyApplicationThread(); + videoFrameMetadataListener = listener; + createMessageInternal(frameMetadataListener) + .setType(FrameMetadataListener.MSG_SET_VIDEO_FRAME_METADATA_LISTENER) + .setPayload(listener) + .send(); + } + + @Override + public void clearVideoFrameMetadataListener(VideoFrameMetadataListener listener) { + verifyApplicationThread(); + if (videoFrameMetadataListener != listener) { + return; + } + createMessageInternal(frameMetadataListener) + .setType(FrameMetadataListener.MSG_SET_VIDEO_FRAME_METADATA_LISTENER) + .setPayload(null) + .send(); + } + + @Override + public void setCameraMotionListener(CameraMotionListener listener) { + verifyApplicationThread(); + cameraMotionListener = listener; + createMessageInternal(frameMetadataListener) + .setType(FrameMetadataListener.MSG_SET_CAMERA_MOTION_LISTENER) + .setPayload(listener) + .send(); + } + + @Override + public void clearCameraMotionListener(CameraMotionListener listener) { + verifyApplicationThread(); + if (cameraMotionListener != listener) { + return; + } + createMessageInternal(frameMetadataListener) + .setType(FrameMetadataListener.MSG_SET_CAMERA_MOTION_LISTENER) + .setPayload(null) + .send(); + } + + @Override + public CueGroup getCurrentCues() { + verifyApplicationThread(); + return currentCueGroup; + } + + @Override + public void addListener(Listener listener) { + // Don't verify application thread. We allow calls to this method from any thread. + listeners.add(checkNotNull(listener)); + } + + @Override + public void removeListener(Listener listener) { + verifyApplicationThread(); + listeners.remove(checkNotNull(listener)); + } + + @Override + public void setHandleWakeLock(boolean handleWakeLock) { + verifyApplicationThread(); + setWakeMode(handleWakeLock ? C.WAKE_MODE_LOCAL : C.WAKE_MODE_NONE); + } + + @Override + public void setWakeMode(@C.WakeMode int wakeMode) { + verifyApplicationThread(); + switch (wakeMode) { + case C.WAKE_MODE_NONE: + wakeLockManager.setEnabled(false); + wifiLockManager.setEnabled(false); + break; + case C.WAKE_MODE_LOCAL: + wakeLockManager.setEnabled(true); + wifiLockManager.setEnabled(false); + break; + case C.WAKE_MODE_NETWORK: + wakeLockManager.setEnabled(true); + wifiLockManager.setEnabled(true); + break; + default: + break; + } + } + + @Override + public DeviceInfo getDeviceInfo() { + verifyApplicationThread(); + return deviceInfo; + } + + @Override + public int getDeviceVolume() { + verifyApplicationThread(); + return streamVolumeManager.getVolume(); + } + + @Override + public boolean isDeviceMuted() { + verifyApplicationThread(); + return streamVolumeManager.isMuted(); + } + + @Override + public void setDeviceVolume(int volume) { + verifyApplicationThread(); + streamVolumeManager.setVolume(volume); + } + + @Override + public void increaseDeviceVolume() { + verifyApplicationThread(); + streamVolumeManager.increaseVolume(); + } + + @Override + public void decreaseDeviceVolume() { + verifyApplicationThread(); + streamVolumeManager.decreaseVolume(); + } + + @Override + public void setDeviceMuted(boolean muted) { + verifyApplicationThread(); + streamVolumeManager.setMuted(muted); + } + + @Override + public boolean isTunnelingEnabled() { + verifyApplicationThread(); + for (RendererConfiguration config : playbackInfo.trackSelectorResult.rendererConfigurations) { + if (config.tunneling) { + return true; + } + } + return false; + } + + @SuppressWarnings("deprecation") // Calling deprecated methods. + /* package */ void setThrowsWhenUsingWrongThread(boolean throwsWhenUsingWrongThread) { + this.throwsWhenUsingWrongThread = throwsWhenUsingWrongThread; + listeners.setThrowsWhenUsingWrongThread(throwsWhenUsingWrongThread); + if (analyticsCollector instanceof DefaultAnalyticsCollector) { + ((DefaultAnalyticsCollector) analyticsCollector) + .setThrowsWhenUsingWrongThread(throwsWhenUsingWrongThread); + } + } + + /** + * Stops the player. + * + * @param reset Whether the playlist should be cleared and whether the playback position and + * playback error should be reset. + * @param error An optional {@link ExoPlaybackException} to set. + */ + private void stopInternal(boolean reset, @Nullable ExoPlaybackException error) { + PlaybackInfo playbackInfo; + if (reset) { + playbackInfo = + removeMediaItemsInternal( + /* fromIndex= */ 0, /* toIndex= */ mediaSourceHolderSnapshots.size()); + playbackInfo = playbackInfo.copyWithPlaybackError(null); + } else { + playbackInfo = this.playbackInfo.copyWithLoadingMediaPeriodId(this.playbackInfo.periodId); + playbackInfo.bufferedPositionUs = playbackInfo.positionUs; + playbackInfo.totalBufferedDurationUs = 0; + } + playbackInfo = playbackInfo.copyWithPlaybackState(Player.STATE_IDLE); + if (error != null) { + playbackInfo = playbackInfo.copyWithPlaybackError(error); + } + pendingOperationAcks++; + internalPlayer.stop(); + boolean positionDiscontinuity = + playbackInfo.timeline.isEmpty() && !this.playbackInfo.timeline.isEmpty(); + updatePlaybackInfo( + playbackInfo, + TIMELINE_CHANGE_REASON_PLAYLIST_CHANGED, + /* ignored */ PLAY_WHEN_READY_CHANGE_REASON_USER_REQUEST, + /* seekProcessed= */ false, + positionDiscontinuity, + DISCONTINUITY_REASON_REMOVE, + /* discontinuityWindowStartPositionUs= */ getCurrentPositionUsInternal(playbackInfo), + /* ignored */ C.INDEX_UNSET, + /* repeatCurrentMediaItem= */ false); + } + + private int getCurrentWindowIndexInternal() { + if (playbackInfo.timeline.isEmpty()) { + return maskingWindowIndex; + } else { + return playbackInfo.timeline.getPeriodByUid(playbackInfo.periodId.periodUid, period) + .windowIndex; + } + } + + private long getCurrentPositionUsInternal(PlaybackInfo playbackInfo) { + if (playbackInfo.timeline.isEmpty()) { + return Util.msToUs(maskingWindowPositionMs); + } else if (playbackInfo.periodId.isAd()) { + return playbackInfo.positionUs; + } else { + return periodPositionUsToWindowPositionUs( + playbackInfo.timeline, playbackInfo.periodId, playbackInfo.positionUs); + } + } + + private List createMediaSources(List mediaItems) { + List mediaSources = new ArrayList<>(); + for (int i = 0; i < mediaItems.size(); i++) { + mediaSources.add(mediaSourceFactory.createMediaSource(mediaItems.get(i))); + } + return mediaSources; + } + + private void handlePlaybackInfo(ExoPlayerImplInternal.PlaybackInfoUpdate playbackInfoUpdate) { + pendingOperationAcks -= playbackInfoUpdate.operationAcks; + if (playbackInfoUpdate.positionDiscontinuity) { + pendingDiscontinuityReason = playbackInfoUpdate.discontinuityReason; + pendingDiscontinuity = true; + } + if (playbackInfoUpdate.hasPlayWhenReadyChangeReason) { + pendingPlayWhenReadyChangeReason = playbackInfoUpdate.playWhenReadyChangeReason; + } + if (pendingOperationAcks == 0) { + Timeline newTimeline = playbackInfoUpdate.playbackInfo.timeline; + if (!this.playbackInfo.timeline.isEmpty() && newTimeline.isEmpty()) { + // Update the masking variables, which are used when the timeline becomes empty because a + // ConcatenatingMediaSource has been cleared. + maskingWindowIndex = C.INDEX_UNSET; + maskingWindowPositionMs = 0; + maskingPeriodIndex = 0; + } + if (!newTimeline.isEmpty()) { + List timelines = ((PlaylistTimeline) newTimeline).getChildTimelines(); + checkState(timelines.size() == mediaSourceHolderSnapshots.size()); + for (int i = 0; i < timelines.size(); i++) { + mediaSourceHolderSnapshots.get(i).timeline = timelines.get(i); + } + } + boolean positionDiscontinuity = false; + long discontinuityWindowStartPositionUs = C.TIME_UNSET; + if (pendingDiscontinuity) { + positionDiscontinuity = + !playbackInfoUpdate.playbackInfo.periodId.equals(playbackInfo.periodId) + || playbackInfoUpdate.playbackInfo.discontinuityStartPositionUs + != playbackInfo.positionUs; + if (positionDiscontinuity) { + discontinuityWindowStartPositionUs = + newTimeline.isEmpty() || playbackInfoUpdate.playbackInfo.periodId.isAd() + ? playbackInfoUpdate.playbackInfo.discontinuityStartPositionUs + : periodPositionUsToWindowPositionUs( + newTimeline, + playbackInfoUpdate.playbackInfo.periodId, + playbackInfoUpdate.playbackInfo.discontinuityStartPositionUs); + } + } + pendingDiscontinuity = false; + updatePlaybackInfo( + playbackInfoUpdate.playbackInfo, + TIMELINE_CHANGE_REASON_SOURCE_UPDATE, + pendingPlayWhenReadyChangeReason, + /* seekProcessed= */ false, + positionDiscontinuity, + pendingDiscontinuityReason, + discontinuityWindowStartPositionUs, + /* ignored */ C.INDEX_UNSET, + /* repeatCurrentMediaItem= */ false); + } + } + + // Calling deprecated listeners. + @SuppressWarnings("deprecation") + private void updatePlaybackInfo( + PlaybackInfo playbackInfo, + @TimelineChangeReason int timelineChangeReason, + @PlayWhenReadyChangeReason int playWhenReadyChangeReason, + boolean seekProcessed, + boolean positionDiscontinuity, + @DiscontinuityReason int positionDiscontinuityReason, + long discontinuityWindowStartPositionUs, + int oldMaskingMediaItemIndex, + boolean repeatCurrentMediaItem) { + + // Assign playback info immediately such that all getters return the right values, but keep + // snapshot of previous and new state so that listener invocations are triggered correctly. + PlaybackInfo previousPlaybackInfo = this.playbackInfo; + PlaybackInfo newPlaybackInfo = playbackInfo; + this.playbackInfo = playbackInfo; + + boolean timelineChanged = !previousPlaybackInfo.timeline.equals(newPlaybackInfo.timeline); + Pair mediaItemTransitionInfo = + evaluateMediaItemTransitionReason( + newPlaybackInfo, + previousPlaybackInfo, + positionDiscontinuity, + positionDiscontinuityReason, + timelineChanged, + repeatCurrentMediaItem); + boolean mediaItemTransitioned = mediaItemTransitionInfo.first; + int mediaItemTransitionReason = mediaItemTransitionInfo.second; + MediaMetadata newMediaMetadata = mediaMetadata; + @Nullable MediaItem mediaItem = null; + if (mediaItemTransitioned) { + if (!newPlaybackInfo.timeline.isEmpty()) { + int windowIndex = + newPlaybackInfo.timeline.getPeriodByUid(newPlaybackInfo.periodId.periodUid, period) + .windowIndex; + mediaItem = newPlaybackInfo.timeline.getWindow(windowIndex, window).mediaItem; + } + staticAndDynamicMediaMetadata = MediaMetadata.EMPTY; + } + if (mediaItemTransitioned + || !previousPlaybackInfo.staticMetadata.equals(newPlaybackInfo.staticMetadata)) { + staticAndDynamicMediaMetadata = + staticAndDynamicMediaMetadata + .buildUpon() + .populateFromMetadata(newPlaybackInfo.staticMetadata) + .build(); + newMediaMetadata = buildUpdatedMediaMetadata(); + } + boolean metadataChanged = !newMediaMetadata.equals(mediaMetadata); + mediaMetadata = newMediaMetadata; + boolean playWhenReadyChanged = + previousPlaybackInfo.playWhenReady != newPlaybackInfo.playWhenReady; + boolean playbackStateChanged = + previousPlaybackInfo.playbackState != newPlaybackInfo.playbackState; + if (playbackStateChanged || playWhenReadyChanged) { + updateWakeAndWifiLock(); + } + boolean isLoadingChanged = previousPlaybackInfo.isLoading != newPlaybackInfo.isLoading; + if (isLoadingChanged) { + updatePriorityTaskManagerForIsLoadingChange(newPlaybackInfo.isLoading); + } + + if (timelineChanged) { + listeners.queueEvent( + Player.EVENT_TIMELINE_CHANGED, + listener -> listener.onTimelineChanged(newPlaybackInfo.timeline, timelineChangeReason)); + } + if (positionDiscontinuity) { + PositionInfo previousPositionInfo = + getPreviousPositionInfo( + positionDiscontinuityReason, previousPlaybackInfo, oldMaskingMediaItemIndex); + PositionInfo positionInfo = getPositionInfo(discontinuityWindowStartPositionUs); + listeners.queueEvent( + Player.EVENT_POSITION_DISCONTINUITY, + listener -> { + listener.onPositionDiscontinuity(positionDiscontinuityReason); + listener.onPositionDiscontinuity( + previousPositionInfo, positionInfo, positionDiscontinuityReason); + }); + } + if (mediaItemTransitioned) { + @Nullable final MediaItem finalMediaItem = mediaItem; + listeners.queueEvent( + Player.EVENT_MEDIA_ITEM_TRANSITION, + listener -> listener.onMediaItemTransition(finalMediaItem, mediaItemTransitionReason)); + } + if (previousPlaybackInfo.playbackError != newPlaybackInfo.playbackError) { + listeners.queueEvent( + Player.EVENT_PLAYER_ERROR, + listener -> listener.onPlayerErrorChanged(newPlaybackInfo.playbackError)); + if (newPlaybackInfo.playbackError != null) { + listeners.queueEvent( + Player.EVENT_PLAYER_ERROR, + listener -> listener.onPlayerError(newPlaybackInfo.playbackError)); + } + } + if (previousPlaybackInfo.trackSelectorResult != newPlaybackInfo.trackSelectorResult) { + trackSelector.onSelectionActivated(newPlaybackInfo.trackSelectorResult.info); + listeners.queueEvent( + Player.EVENT_TRACKS_CHANGED, + listener -> listener.onTracksChanged(newPlaybackInfo.trackSelectorResult.tracks)); + } + if (metadataChanged) { + final MediaMetadata finalMediaMetadata = mediaMetadata; + listeners.queueEvent( + EVENT_MEDIA_METADATA_CHANGED, + listener -> listener.onMediaMetadataChanged(finalMediaMetadata)); + } + if (isLoadingChanged) { + listeners.queueEvent( + Player.EVENT_IS_LOADING_CHANGED, + listener -> { + listener.onLoadingChanged(newPlaybackInfo.isLoading); + listener.onIsLoadingChanged(newPlaybackInfo.isLoading); + }); + } + if (playbackStateChanged || playWhenReadyChanged) { + listeners.queueEvent( + /* eventFlag= */ C.INDEX_UNSET, + listener -> + listener.onPlayerStateChanged( + newPlaybackInfo.playWhenReady, newPlaybackInfo.playbackState)); + } + if (playbackStateChanged) { + listeners.queueEvent( + Player.EVENT_PLAYBACK_STATE_CHANGED, + listener -> listener.onPlaybackStateChanged(newPlaybackInfo.playbackState)); + } + if (playWhenReadyChanged) { + listeners.queueEvent( + Player.EVENT_PLAY_WHEN_READY_CHANGED, + listener -> + listener.onPlayWhenReadyChanged( + newPlaybackInfo.playWhenReady, playWhenReadyChangeReason)); + } + if (previousPlaybackInfo.playbackSuppressionReason + != newPlaybackInfo.playbackSuppressionReason) { + listeners.queueEvent( + Player.EVENT_PLAYBACK_SUPPRESSION_REASON_CHANGED, + listener -> + listener.onPlaybackSuppressionReasonChanged( + newPlaybackInfo.playbackSuppressionReason)); + } + if (isPlaying(previousPlaybackInfo) != isPlaying(newPlaybackInfo)) { + listeners.queueEvent( + Player.EVENT_IS_PLAYING_CHANGED, + listener -> listener.onIsPlayingChanged(isPlaying(newPlaybackInfo))); + } + if (!previousPlaybackInfo.playbackParameters.equals(newPlaybackInfo.playbackParameters)) { + listeners.queueEvent( + Player.EVENT_PLAYBACK_PARAMETERS_CHANGED, + listener -> listener.onPlaybackParametersChanged(newPlaybackInfo.playbackParameters)); + } + if (seekProcessed) { + listeners.queueEvent(/* eventFlag= */ C.INDEX_UNSET, Listener::onSeekProcessed); + } + updateAvailableCommands(); + listeners.flushEvents(); + + if (previousPlaybackInfo.sleepingForOffload != newPlaybackInfo.sleepingForOffload) { + for (AudioOffloadListener listener : audioOffloadListeners) { + listener.onExperimentalSleepingForOffloadChanged(newPlaybackInfo.sleepingForOffload); + } + } + } + + private PositionInfo getPreviousPositionInfo( + @DiscontinuityReason int positionDiscontinuityReason, + PlaybackInfo oldPlaybackInfo, + int oldMaskingMediaItemIndex) { + @Nullable Object oldWindowUid = null; + @Nullable Object oldPeriodUid = null; + int oldMediaItemIndex = oldMaskingMediaItemIndex; + int oldPeriodIndex = C.INDEX_UNSET; + @Nullable MediaItem oldMediaItem = null; + Timeline.Period oldPeriod = new Timeline.Period(); + if (!oldPlaybackInfo.timeline.isEmpty()) { + oldPeriodUid = oldPlaybackInfo.periodId.periodUid; + oldPlaybackInfo.timeline.getPeriodByUid(oldPeriodUid, oldPeriod); + oldMediaItemIndex = oldPeriod.windowIndex; + oldPeriodIndex = oldPlaybackInfo.timeline.getIndexOfPeriod(oldPeriodUid); + oldWindowUid = oldPlaybackInfo.timeline.getWindow(oldMediaItemIndex, window).uid; + oldMediaItem = window.mediaItem; + } + long oldPositionUs; + long oldContentPositionUs; + if (positionDiscontinuityReason == DISCONTINUITY_REASON_AUTO_TRANSITION) { + if (oldPlaybackInfo.periodId.isAd()) { + // The old position is the end of the previous ad. + oldPositionUs = + oldPeriod.getAdDurationUs( + oldPlaybackInfo.periodId.adGroupIndex, oldPlaybackInfo.periodId.adIndexInAdGroup); + // The ad cue point is stored in the old requested content position. + oldContentPositionUs = getRequestedContentPositionUs(oldPlaybackInfo); + } else if (oldPlaybackInfo.periodId.nextAdGroupIndex != C.INDEX_UNSET) { + // The old position is the end of a clipped content before an ad group. Use the exact ad + // cue point as the transition position. + oldPositionUs = getRequestedContentPositionUs(playbackInfo); + oldContentPositionUs = oldPositionUs; + } else { + // The old position is the end of a Timeline period. Use the exact duration. + oldPositionUs = oldPeriod.positionInWindowUs + oldPeriod.durationUs; + oldContentPositionUs = oldPositionUs; + } + } else if (oldPlaybackInfo.periodId.isAd()) { + oldPositionUs = oldPlaybackInfo.positionUs; + oldContentPositionUs = getRequestedContentPositionUs(oldPlaybackInfo); + } else { + oldPositionUs = oldPeriod.positionInWindowUs + oldPlaybackInfo.positionUs; + oldContentPositionUs = oldPositionUs; + } + return new PositionInfo( + oldWindowUid, + oldMediaItemIndex, + oldMediaItem, + oldPeriodUid, + oldPeriodIndex, + Util.usToMs(oldPositionUs), + Util.usToMs(oldContentPositionUs), + oldPlaybackInfo.periodId.adGroupIndex, + oldPlaybackInfo.periodId.adIndexInAdGroup); + } + + private PositionInfo getPositionInfo(long discontinuityWindowStartPositionUs) { + @Nullable Object newWindowUid = null; + @Nullable Object newPeriodUid = null; + int newMediaItemIndex = getCurrentMediaItemIndex(); + int newPeriodIndex = C.INDEX_UNSET; + @Nullable MediaItem newMediaItem = null; + if (!playbackInfo.timeline.isEmpty()) { + newPeriodUid = playbackInfo.periodId.periodUid; + playbackInfo.timeline.getPeriodByUid(newPeriodUid, period); + newPeriodIndex = playbackInfo.timeline.getIndexOfPeriod(newPeriodUid); + newWindowUid = playbackInfo.timeline.getWindow(newMediaItemIndex, window).uid; + newMediaItem = window.mediaItem; + } + long positionMs = Util.usToMs(discontinuityWindowStartPositionUs); + return new PositionInfo( + newWindowUid, + newMediaItemIndex, + newMediaItem, + newPeriodUid, + newPeriodIndex, + positionMs, + /* contentPositionMs= */ playbackInfo.periodId.isAd() + ? Util.usToMs(getRequestedContentPositionUs(playbackInfo)) + : positionMs, + playbackInfo.periodId.adGroupIndex, + playbackInfo.periodId.adIndexInAdGroup); + } + + private static long getRequestedContentPositionUs(PlaybackInfo playbackInfo) { + Timeline.Window window = new Timeline.Window(); + Timeline.Period period = new Timeline.Period(); + playbackInfo.timeline.getPeriodByUid(playbackInfo.periodId.periodUid, period); + return playbackInfo.requestedContentPositionUs == C.TIME_UNSET + ? playbackInfo.timeline.getWindow(period.windowIndex, window).getDefaultPositionUs() + : period.getPositionInWindowUs() + playbackInfo.requestedContentPositionUs; + } + + private Pair evaluateMediaItemTransitionReason( + PlaybackInfo playbackInfo, + PlaybackInfo oldPlaybackInfo, + boolean positionDiscontinuity, + @DiscontinuityReason int positionDiscontinuityReason, + boolean timelineChanged, + boolean repeatCurrentMediaItem) { + + Timeline oldTimeline = oldPlaybackInfo.timeline; + Timeline newTimeline = playbackInfo.timeline; + if (newTimeline.isEmpty() && oldTimeline.isEmpty()) { + return new Pair<>(/* isTransitioning */ false, /* mediaItemTransitionReason */ C.INDEX_UNSET); + } else if (newTimeline.isEmpty() != oldTimeline.isEmpty()) { + return new Pair<>(/* isTransitioning */ true, MEDIA_ITEM_TRANSITION_REASON_PLAYLIST_CHANGED); + } + + int oldWindowIndex = + oldTimeline.getPeriodByUid(oldPlaybackInfo.periodId.periodUid, period).windowIndex; + Object oldWindowUid = oldTimeline.getWindow(oldWindowIndex, window).uid; + int newWindowIndex = + newTimeline.getPeriodByUid(playbackInfo.periodId.periodUid, period).windowIndex; + Object newWindowUid = newTimeline.getWindow(newWindowIndex, window).uid; + if (!oldWindowUid.equals(newWindowUid)) { + @Player.MediaItemTransitionReason int transitionReason; + if (positionDiscontinuity + && positionDiscontinuityReason == DISCONTINUITY_REASON_AUTO_TRANSITION) { + transitionReason = MEDIA_ITEM_TRANSITION_REASON_AUTO; + } else if (positionDiscontinuity + && positionDiscontinuityReason == DISCONTINUITY_REASON_SEEK) { + transitionReason = MEDIA_ITEM_TRANSITION_REASON_SEEK; + } else if (timelineChanged) { + transitionReason = MEDIA_ITEM_TRANSITION_REASON_PLAYLIST_CHANGED; + } else { + // A change in window uid must be justified by one of the reasons above. + throw new IllegalStateException(); + } + return new Pair<>(/* isTransitioning */ true, transitionReason); + } else { + // Only mark changes within the current item as a transition if we are repeating automatically + // or via a seek to next/previous. + if (positionDiscontinuity + && positionDiscontinuityReason == DISCONTINUITY_REASON_AUTO_TRANSITION + && oldPlaybackInfo.periodId.windowSequenceNumber + < playbackInfo.periodId.windowSequenceNumber) { + return new Pair<>(/* isTransitioning */ true, MEDIA_ITEM_TRANSITION_REASON_REPEAT); + } + if (positionDiscontinuity + && positionDiscontinuityReason == DISCONTINUITY_REASON_SEEK + && repeatCurrentMediaItem) { + return new Pair<>(/* isTransitioning */ true, MEDIA_ITEM_TRANSITION_REASON_SEEK); + } + } + return new Pair<>(/* isTransitioning */ false, /* mediaItemTransitionReason */ C.INDEX_UNSET); + } + + private void updateAvailableCommands() { + Commands previousAvailableCommands = availableCommands; + availableCommands = Util.getAvailableCommands(wrappingPlayer, permanentAvailableCommands); + if (!availableCommands.equals(previousAvailableCommands)) { + listeners.queueEvent( + Player.EVENT_AVAILABLE_COMMANDS_CHANGED, + listener -> listener.onAvailableCommandsChanged(availableCommands)); + } + } + + private void setMediaSourcesInternal( + List mediaSources, + int startWindowIndex, + long startPositionMs, + boolean resetToDefaultPosition) { + int currentWindowIndex = getCurrentWindowIndexInternal(); + long currentPositionMs = getCurrentPosition(); + pendingOperationAcks++; + if (!mediaSourceHolderSnapshots.isEmpty()) { + removeMediaSourceHolders( + /* fromIndex= */ 0, /* toIndexExclusive= */ mediaSourceHolderSnapshots.size()); + } + List holders = + addMediaSourceHolders(/* index= */ 0, mediaSources); + Timeline timeline = createMaskingTimeline(); + if (!timeline.isEmpty() && startWindowIndex >= timeline.getWindowCount()) { + throw new IllegalSeekPositionException(timeline, startWindowIndex, startPositionMs); + } + // Evaluate the actual start position. + if (resetToDefaultPosition) { + startWindowIndex = timeline.getFirstWindowIndex(shuffleModeEnabled); + startPositionMs = C.TIME_UNSET; + } else if (startWindowIndex == C.INDEX_UNSET) { + startWindowIndex = currentWindowIndex; + startPositionMs = currentPositionMs; + } + PlaybackInfo newPlaybackInfo = + maskTimelineAndPosition( + playbackInfo, + timeline, + maskWindowPositionMsOrGetPeriodPositionUs(timeline, startWindowIndex, startPositionMs)); + // Mask the playback state. + int maskingPlaybackState = newPlaybackInfo.playbackState; + if (startWindowIndex != C.INDEX_UNSET && newPlaybackInfo.playbackState != STATE_IDLE) { + // Position reset to startWindowIndex (results in pending initial seek). + if (timeline.isEmpty() || startWindowIndex >= timeline.getWindowCount()) { + // Setting an empty timeline or invalid seek transitions to ended. + maskingPlaybackState = STATE_ENDED; + } else { + maskingPlaybackState = STATE_BUFFERING; + } + } + newPlaybackInfo = newPlaybackInfo.copyWithPlaybackState(maskingPlaybackState); + internalPlayer.setMediaSources( + holders, startWindowIndex, Util.msToUs(startPositionMs), shuffleOrder); + boolean positionDiscontinuity = + !playbackInfo.periodId.periodUid.equals(newPlaybackInfo.periodId.periodUid) + && !playbackInfo.timeline.isEmpty(); + updatePlaybackInfo( + newPlaybackInfo, + /* timelineChangeReason= */ TIMELINE_CHANGE_REASON_PLAYLIST_CHANGED, + /* ignored */ PLAY_WHEN_READY_CHANGE_REASON_USER_REQUEST, + /* seekProcessed= */ false, + /* positionDiscontinuity= */ positionDiscontinuity, + DISCONTINUITY_REASON_REMOVE, + /* discontinuityWindowStartPositionUs= */ getCurrentPositionUsInternal(newPlaybackInfo), + /* ignored */ C.INDEX_UNSET, + /* repeatCurrentMediaItem= */ false); + } + + private List addMediaSourceHolders( + int index, List mediaSources) { + List holders = new ArrayList<>(); + for (int i = 0; i < mediaSources.size(); i++) { + MediaSourceList.MediaSourceHolder holder = + new MediaSourceList.MediaSourceHolder(mediaSources.get(i), useLazyPreparation); + holders.add(holder); + mediaSourceHolderSnapshots.add( + i + index, new MediaSourceHolderSnapshot(holder.uid, holder.mediaSource.getTimeline())); + } + shuffleOrder = + shuffleOrder.cloneAndInsert( + /* insertionIndex= */ index, /* insertionCount= */ holders.size()); + return holders; + } + + private PlaybackInfo removeMediaItemsInternal(int fromIndex, int toIndex) { + int currentIndex = getCurrentMediaItemIndex(); + Timeline oldTimeline = getCurrentTimeline(); + int currentMediaSourceCount = mediaSourceHolderSnapshots.size(); + pendingOperationAcks++; + removeMediaSourceHolders(fromIndex, /* toIndexExclusive= */ toIndex); + Timeline newTimeline = createMaskingTimeline(); + PlaybackInfo newPlaybackInfo = + maskTimelineAndPosition( + playbackInfo, + newTimeline, + getPeriodPositionUsAfterTimelineChanged(oldTimeline, newTimeline)); + // Player transitions to STATE_ENDED if the current index is part of the removed tail. + final boolean transitionsToEnded = + newPlaybackInfo.playbackState != STATE_IDLE + && newPlaybackInfo.playbackState != STATE_ENDED + && fromIndex < toIndex + && toIndex == currentMediaSourceCount + && currentIndex >= newPlaybackInfo.timeline.getWindowCount(); + if (transitionsToEnded) { + newPlaybackInfo = newPlaybackInfo.copyWithPlaybackState(STATE_ENDED); + } + internalPlayer.removeMediaSources(fromIndex, toIndex, shuffleOrder); + return newPlaybackInfo; + } + + private void removeMediaSourceHolders(int fromIndex, int toIndexExclusive) { + for (int i = toIndexExclusive - 1; i >= fromIndex; i--) { + mediaSourceHolderSnapshots.remove(i); + } + shuffleOrder = shuffleOrder.cloneAndRemove(fromIndex, toIndexExclusive); + } + + private Timeline createMaskingTimeline() { + return new PlaylistTimeline(mediaSourceHolderSnapshots, shuffleOrder); + } + + private PlaybackInfo maskTimelineAndPosition( + PlaybackInfo playbackInfo, Timeline timeline, @Nullable Pair periodPositionUs) { + checkArgument(timeline.isEmpty() || periodPositionUs != null); + Timeline oldTimeline = playbackInfo.timeline; + // Mask the timeline. + playbackInfo = playbackInfo.copyWithTimeline(timeline); + + if (timeline.isEmpty()) { + // Reset periodId and loadingPeriodId. + MediaPeriodId dummyMediaPeriodId = PlaybackInfo.getDummyPeriodForEmptyTimeline(); + long positionUs = Util.msToUs(maskingWindowPositionMs); + playbackInfo = + playbackInfo.copyWithNewPosition( + dummyMediaPeriodId, + positionUs, + /* requestedContentPositionUs= */ positionUs, + /* discontinuityStartPositionUs= */ positionUs, + /* totalBufferedDurationUs= */ 0, + TrackGroupArray.EMPTY, + emptyTrackSelectorResult, + /* staticMetadata= */ ImmutableList.of()); + playbackInfo = playbackInfo.copyWithLoadingMediaPeriodId(dummyMediaPeriodId); + playbackInfo.bufferedPositionUs = playbackInfo.positionUs; + return playbackInfo; + } + + Object oldPeriodUid = playbackInfo.periodId.periodUid; + boolean playingPeriodChanged = !oldPeriodUid.equals(castNonNull(periodPositionUs).first); + MediaPeriodId newPeriodId = + playingPeriodChanged ? new MediaPeriodId(periodPositionUs.first) : playbackInfo.periodId; + long newContentPositionUs = periodPositionUs.second; + long oldContentPositionUs = Util.msToUs(getContentPosition()); + if (!oldTimeline.isEmpty()) { + oldContentPositionUs -= + oldTimeline.getPeriodByUid(oldPeriodUid, period).getPositionInWindowUs(); + } + + if (playingPeriodChanged || newContentPositionUs < oldContentPositionUs) { + checkState(!newPeriodId.isAd()); + // The playing period changes or a backwards seek within the playing period occurs. + playbackInfo = + playbackInfo.copyWithNewPosition( + newPeriodId, + /* positionUs= */ newContentPositionUs, + /* requestedContentPositionUs= */ newContentPositionUs, + /* discontinuityStartPositionUs= */ newContentPositionUs, + /* totalBufferedDurationUs= */ 0, + playingPeriodChanged ? TrackGroupArray.EMPTY : playbackInfo.trackGroups, + playingPeriodChanged ? emptyTrackSelectorResult : playbackInfo.trackSelectorResult, + playingPeriodChanged ? ImmutableList.of() : playbackInfo.staticMetadata); + playbackInfo = playbackInfo.copyWithLoadingMediaPeriodId(newPeriodId); + playbackInfo.bufferedPositionUs = newContentPositionUs; + } else if (newContentPositionUs == oldContentPositionUs) { + // Period position remains unchanged. + int loadingPeriodIndex = + timeline.getIndexOfPeriod(playbackInfo.loadingMediaPeriodId.periodUid); + if (loadingPeriodIndex == C.INDEX_UNSET + || timeline.getPeriod(loadingPeriodIndex, period).windowIndex + != timeline.getPeriodByUid(newPeriodId.periodUid, period).windowIndex) { + // Discard periods after the playing period, if the loading period is discarded or the + // playing and loading period are not in the same window. + timeline.getPeriodByUid(newPeriodId.periodUid, period); + long maskedBufferedPositionUs = + newPeriodId.isAd() + ? period.getAdDurationUs(newPeriodId.adGroupIndex, newPeriodId.adIndexInAdGroup) + : period.durationUs; + playbackInfo = + playbackInfo.copyWithNewPosition( + newPeriodId, + /* positionUs= */ playbackInfo.positionUs, + /* requestedContentPositionUs= */ playbackInfo.positionUs, + playbackInfo.discontinuityStartPositionUs, + /* totalBufferedDurationUs= */ maskedBufferedPositionUs - playbackInfo.positionUs, + playbackInfo.trackGroups, + playbackInfo.trackSelectorResult, + playbackInfo.staticMetadata); + playbackInfo = playbackInfo.copyWithLoadingMediaPeriodId(newPeriodId); + playbackInfo.bufferedPositionUs = maskedBufferedPositionUs; + } + } else { + checkState(!newPeriodId.isAd()); + // A forward seek within the playing period (timeline did not change). + long maskedTotalBufferedDurationUs = + max( + 0, + playbackInfo.totalBufferedDurationUs - (newContentPositionUs - oldContentPositionUs)); + long maskedBufferedPositionUs = playbackInfo.bufferedPositionUs; + if (playbackInfo.loadingMediaPeriodId.equals(playbackInfo.periodId)) { + maskedBufferedPositionUs = newContentPositionUs + maskedTotalBufferedDurationUs; + } + playbackInfo = + playbackInfo.copyWithNewPosition( + newPeriodId, + /* positionUs= */ newContentPositionUs, + /* requestedContentPositionUs= */ newContentPositionUs, + /* discontinuityStartPositionUs= */ newContentPositionUs, + maskedTotalBufferedDurationUs, + playbackInfo.trackGroups, + playbackInfo.trackSelectorResult, + playbackInfo.staticMetadata); + playbackInfo.bufferedPositionUs = maskedBufferedPositionUs; + } + return playbackInfo; + } + + @Nullable + private Pair getPeriodPositionUsAfterTimelineChanged( + Timeline oldTimeline, Timeline newTimeline) { + long currentPositionMs = getContentPosition(); + if (oldTimeline.isEmpty() || newTimeline.isEmpty()) { + boolean isCleared = !oldTimeline.isEmpty() && newTimeline.isEmpty(); + return maskWindowPositionMsOrGetPeriodPositionUs( + newTimeline, + isCleared ? C.INDEX_UNSET : getCurrentWindowIndexInternal(), + isCleared ? C.TIME_UNSET : currentPositionMs); + } + int currentMediaItemIndex = getCurrentMediaItemIndex(); + @Nullable + Pair oldPeriodPositionUs = + oldTimeline.getPeriodPositionUs( + window, period, currentMediaItemIndex, Util.msToUs(currentPositionMs)); + Object periodUid = castNonNull(oldPeriodPositionUs).first; + if (newTimeline.getIndexOfPeriod(periodUid) != C.INDEX_UNSET) { + // The old period position is still available in the new timeline. + return oldPeriodPositionUs; + } + // Period uid not found in new timeline. Try to get subsequent period. + @Nullable + Object nextPeriodUid = + ExoPlayerImplInternal.resolveSubsequentPeriod( + window, period, repeatMode, shuffleModeEnabled, periodUid, oldTimeline, newTimeline); + if (nextPeriodUid != null) { + // Reset position to the default position of the window of the subsequent period. + newTimeline.getPeriodByUid(nextPeriodUid, period); + return maskWindowPositionMsOrGetPeriodPositionUs( + newTimeline, + period.windowIndex, + newTimeline.getWindow(period.windowIndex, window).getDefaultPositionMs()); + } else { + // No subsequent period found and the new timeline is not empty. Use the default position. + return maskWindowPositionMsOrGetPeriodPositionUs( + newTimeline, /* windowIndex= */ C.INDEX_UNSET, /* windowPositionMs= */ C.TIME_UNSET); + } + } + + @Nullable + private Pair maskWindowPositionMsOrGetPeriodPositionUs( + Timeline timeline, int windowIndex, long windowPositionMs) { + if (timeline.isEmpty()) { + // If empty we store the initial seek in the masking variables. + maskingWindowIndex = windowIndex; + maskingWindowPositionMs = windowPositionMs == C.TIME_UNSET ? 0 : windowPositionMs; + maskingPeriodIndex = 0; + return null; + } + if (windowIndex == C.INDEX_UNSET || windowIndex >= timeline.getWindowCount()) { + // Use default position of timeline if window index still unset or if a previous initial seek + // now turns out to be invalid. + windowIndex = timeline.getFirstWindowIndex(shuffleModeEnabled); + windowPositionMs = timeline.getWindow(windowIndex, window).getDefaultPositionMs(); + } + return timeline.getPeriodPositionUs(window, period, windowIndex, Util.msToUs(windowPositionMs)); + } + + private long periodPositionUsToWindowPositionUs( + Timeline timeline, MediaPeriodId periodId, long positionUs) { + timeline.getPeriodByUid(periodId.periodUid, period); + positionUs += period.getPositionInWindowUs(); + return positionUs; + } + + private PlayerMessage createMessageInternal(Target target) { + int currentWindowIndex = getCurrentWindowIndexInternal(); + return new PlayerMessage( + internalPlayer, + target, + playbackInfo.timeline, + currentWindowIndex == C.INDEX_UNSET ? 0 : currentWindowIndex, + clock, + internalPlayer.getPlaybackLooper()); + } + + /** + * Builds a {@link MediaMetadata} from the main sources. + * + *

      {@link MediaItem} {@link MediaMetadata} is prioritized, with any gaps/missing fields + * populated by metadata from static ({@link TrackGroup} {@link Format}) and dynamic ({@link + * MetadataOutput#onMetadata(Metadata)}) sources. + */ + private MediaMetadata buildUpdatedMediaMetadata() { + Timeline timeline = getCurrentTimeline(); + if (timeline.isEmpty()) { + return staticAndDynamicMediaMetadata; + } + MediaItem mediaItem = timeline.getWindow(getCurrentMediaItemIndex(), window).mediaItem; + // MediaItem metadata is prioritized over metadata within the media. + return staticAndDynamicMediaMetadata.buildUpon().populate(mediaItem.mediaMetadata).build(); + } + + private void removeSurfaceCallbacks() { + if (sphericalGLSurfaceView != null) { + createMessageInternal(frameMetadataListener) + .setType(FrameMetadataListener.MSG_SET_SPHERICAL_SURFACE_VIEW) + .setPayload(null) + .send(); + sphericalGLSurfaceView.removeVideoSurfaceListener(componentListener); + sphericalGLSurfaceView = null; + } + if (textureView != null) { + if (textureView.getSurfaceTextureListener() != componentListener) { + Log.w(TAG, "SurfaceTextureListener already unset or replaced."); + } else { + textureView.setSurfaceTextureListener(null); + } + textureView = null; + } + if (surfaceHolder != null) { + surfaceHolder.removeCallback(componentListener); + surfaceHolder = null; + } + } + + private void setSurfaceTextureInternal(SurfaceTexture surfaceTexture) { + Surface surface = new Surface(surfaceTexture); + setVideoOutputInternal(surface); + ownedSurface = surface; + } + + private void setVideoOutputInternal(@Nullable Object videoOutput) { + // Note: We don't turn this method into a no-op if the output is being replaced with itself so + // as to ensure onRenderedFirstFrame callbacks are still called in this case. + List messages = new ArrayList<>(); + for (Renderer renderer : renderers) { + if (renderer.getTrackType() == TRACK_TYPE_VIDEO) { + messages.add( + createMessageInternal(renderer) + .setType(MSG_SET_VIDEO_OUTPUT) + .setPayload(videoOutput) + .send()); + } + } + boolean messageDeliveryTimedOut = false; + if (this.videoOutput != null && this.videoOutput != videoOutput) { + // We're replacing an output. Block to ensure that this output will not be accessed by the + // renderers after this method returns. + try { + for (PlayerMessage message : messages) { + message.blockUntilDelivered(detachSurfaceTimeoutMs); + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } catch (TimeoutException e) { + messageDeliveryTimedOut = true; + } + if (this.videoOutput == ownedSurface) { + // We're replacing a surface that we are responsible for releasing. + ownedSurface.release(); + ownedSurface = null; + } + } + this.videoOutput = videoOutput; + if (messageDeliveryTimedOut) { + stopInternal( + /* reset= */ false, + ExoPlaybackException.createForUnexpected( + new ExoTimeoutException(ExoTimeoutException.TIMEOUT_OPERATION_DETACH_SURFACE), + PlaybackException.ERROR_CODE_TIMEOUT)); + } + } + + /** + * Sets the holder of the surface that will be displayed to the user, but which should + * not be the output for video renderers. This case occurs when video frames need to be + * rendered to an intermediate surface (which is not the one held by the provided holder). + * + * @param nonVideoOutputSurfaceHolder The holder of the surface that will eventually be displayed + * to the user. + */ + private void setNonVideoOutputSurfaceHolderInternal(SurfaceHolder nonVideoOutputSurfaceHolder) { + // Although we won't use the view's surface directly as the video output, still use the holder + // to query the surface size, to be informed in changes to the size via componentListener, and + // for equality checking in clearVideoSurfaceHolder. + surfaceHolderSurfaceIsVideoOutput = false; + surfaceHolder = nonVideoOutputSurfaceHolder; + surfaceHolder.addCallback(componentListener); + Surface surface = surfaceHolder.getSurface(); + if (surface != null && surface.isValid()) { + Rect surfaceSize = surfaceHolder.getSurfaceFrame(); + maybeNotifySurfaceSizeChanged(surfaceSize.width(), surfaceSize.height()); + } else { + maybeNotifySurfaceSizeChanged(/* width= */ 0, /* height= */ 0); + } + } + + private void maybeNotifySurfaceSizeChanged(int width, int height) { + if (width != surfaceSize.getWidth() || height != surfaceSize.getHeight()) { + surfaceSize = new Size(width, height); + listeners.sendEvent( + EVENT_SURFACE_SIZE_CHANGED, listener -> listener.onSurfaceSizeChanged(width, height)); + } + } + + private void sendVolumeToRenderers() { + float scaledVolume = volume * audioFocusManager.getVolumeMultiplier(); + sendRendererMessage(TRACK_TYPE_AUDIO, MSG_SET_VOLUME, scaledVolume); + } + + private void updatePlayWhenReady( + boolean playWhenReady, + @AudioFocusManager.PlayerCommand int playerCommand, + @Player.PlayWhenReadyChangeReason int playWhenReadyChangeReason) { + playWhenReady = playWhenReady && playerCommand != AudioFocusManager.PLAYER_COMMAND_DO_NOT_PLAY; + @PlaybackSuppressionReason + int playbackSuppressionReason = + playWhenReady && playerCommand != AudioFocusManager.PLAYER_COMMAND_PLAY_WHEN_READY + ? Player.PLAYBACK_SUPPRESSION_REASON_TRANSIENT_AUDIO_FOCUS_LOSS + : Player.PLAYBACK_SUPPRESSION_REASON_NONE; + if (playbackInfo.playWhenReady == playWhenReady + && playbackInfo.playbackSuppressionReason == playbackSuppressionReason) { + return; + } + pendingOperationAcks++; + PlaybackInfo playbackInfo = + this.playbackInfo.copyWithPlayWhenReady(playWhenReady, playbackSuppressionReason); + internalPlayer.setPlayWhenReady(playWhenReady, playbackSuppressionReason); + updatePlaybackInfo( + playbackInfo, + /* ignored */ TIMELINE_CHANGE_REASON_PLAYLIST_CHANGED, + playWhenReadyChangeReason, + /* seekProcessed= */ false, + /* positionDiscontinuity= */ false, + /* ignored */ DISCONTINUITY_REASON_INTERNAL, + /* ignored */ C.TIME_UNSET, + /* ignored */ C.INDEX_UNSET, + /* repeatCurrentMediaItem= */ false); + } + + private void updateWakeAndWifiLock() { + @State int playbackState = getPlaybackState(); + switch (playbackState) { + case Player.STATE_READY: + case Player.STATE_BUFFERING: + boolean isSleeping = experimentalIsSleepingForOffload(); + wakeLockManager.setStayAwake(getPlayWhenReady() && !isSleeping); + // The wifi lock is not released while sleeping to avoid interrupting downloads. + wifiLockManager.setStayAwake(getPlayWhenReady()); + break; + case Player.STATE_ENDED: + case Player.STATE_IDLE: + wakeLockManager.setStayAwake(false); + wifiLockManager.setStayAwake(false); + break; + default: + throw new IllegalStateException(); + } + } + + private void verifyApplicationThread() { + // The constructor may be executed on a background thread. Wait with accessing the player from + // the app thread until the constructor finished executing. + constructorFinished.blockUninterruptible(); + if (Thread.currentThread() != getApplicationLooper().getThread()) { + String message = + Util.formatInvariant( + "Player is accessed on the wrong thread.\n" + + "Current thread: '%s'\n" + + "Expected thread: '%s'\n" + + "See https://exoplayer.dev/issues/player-accessed-on-wrong-thread", + Thread.currentThread().getName(), getApplicationLooper().getThread().getName()); + if (throwsWhenUsingWrongThread) { + throw new IllegalStateException(message); + } + Log.w(TAG, message, hasNotifiedFullWrongThreadWarning ? null : new IllegalStateException()); + hasNotifiedFullWrongThreadWarning = true; + } + } + + private void sendRendererMessage( + @C.TrackType int trackType, int messageType, @Nullable Object payload) { + for (Renderer renderer : renderers) { + if (renderer.getTrackType() == trackType) { + createMessageInternal(renderer).setType(messageType).setPayload(payload).send(); + } + } + } + + /** + * Initializes {@link #keepSessionIdAudioTrack} to keep an audio session ID alive. If the audio + * session ID is {@link C#AUDIO_SESSION_ID_UNSET} then a new audio session ID is generated. + * + *

      Use of this method is only required on API level 21 and earlier. + * + * @param audioSessionId The audio session ID, or {@link C#AUDIO_SESSION_ID_UNSET} to generate a + * new one. + * @return The audio session ID. + */ + private int initializeKeepSessionIdAudioTrack(int audioSessionId) { + if (keepSessionIdAudioTrack != null + && keepSessionIdAudioTrack.getAudioSessionId() != audioSessionId) { + keepSessionIdAudioTrack.release(); + keepSessionIdAudioTrack = null; + } + if (keepSessionIdAudioTrack == null) { + int sampleRate = 4000; // Minimum sample rate supported by the platform. + int channelConfig = AudioFormat.CHANNEL_OUT_MONO; + @C.PcmEncoding int encoding = C.ENCODING_PCM_16BIT; + int bufferSize = 2; // Use a two byte buffer, as it is not actually used for playback. + keepSessionIdAudioTrack = + new AudioTrack( + C.STREAM_TYPE_DEFAULT, + sampleRate, + channelConfig, + encoding, + bufferSize, + AudioTrack.MODE_STATIC, + audioSessionId); + } + return keepSessionIdAudioTrack.getAudioSessionId(); + } + + private void updatePriorityTaskManagerForIsLoadingChange(boolean isLoading) { + if (priorityTaskManager != null) { + if (isLoading && !isPriorityTaskManagerRegistered) { + priorityTaskManager.add(C.PRIORITY_PLAYBACK); + isPriorityTaskManagerRegistered = true; + } else if (!isLoading && isPriorityTaskManagerRegistered) { + priorityTaskManager.remove(C.PRIORITY_PLAYBACK); + isPriorityTaskManagerRegistered = false; + } + } + } + + private static DeviceInfo createDeviceInfo(StreamVolumeManager streamVolumeManager) { + return new DeviceInfo( + DeviceInfo.PLAYBACK_TYPE_LOCAL, + streamVolumeManager.getMinVolume(), + streamVolumeManager.getMaxVolume()); + } + + private static int getPlayWhenReadyChangeReason(boolean playWhenReady, int playerCommand) { + return playWhenReady && playerCommand != AudioFocusManager.PLAYER_COMMAND_PLAY_WHEN_READY + ? PLAY_WHEN_READY_CHANGE_REASON_AUDIO_FOCUS_LOSS + : PLAY_WHEN_READY_CHANGE_REASON_USER_REQUEST; + } + + private static boolean isPlaying(PlaybackInfo playbackInfo) { + return playbackInfo.playbackState == Player.STATE_READY + && playbackInfo.playWhenReady + && playbackInfo.playbackSuppressionReason == PLAYBACK_SUPPRESSION_REASON_NONE; + } + + private static final class MediaSourceHolderSnapshot implements MediaSourceInfoHolder { + + private final Object uid; + + private Timeline timeline; + + public MediaSourceHolderSnapshot(Object uid, Timeline timeline) { + this.uid = uid; + this.timeline = timeline; + } + + @Override + public Object getUid() { + return uid; + } + + @Override + public Timeline getTimeline() { + return timeline; + } + } + + private final class ComponentListener + implements VideoRendererEventListener, + AudioRendererEventListener, + TextOutput, + MetadataOutput, + SurfaceHolder.Callback, + TextureView.SurfaceTextureListener, + SphericalGLSurfaceView.VideoSurfaceListener, + AudioFocusManager.PlayerControl, + AudioBecomingNoisyManager.EventListener, + StreamVolumeManager.Listener, + AudioOffloadListener { + + // VideoRendererEventListener implementation + + @Override + public void onVideoEnabled(DecoderCounters counters) { + videoDecoderCounters = counters; + analyticsCollector.onVideoEnabled(counters); + } + + @Override + public void onVideoDecoderInitialized( + String decoderName, long initializedTimestampMs, long initializationDurationMs) { + analyticsCollector.onVideoDecoderInitialized( + decoderName, initializedTimestampMs, initializationDurationMs); + } + + @Override + public void onVideoInputFormatChanged( + Format format, @Nullable DecoderReuseEvaluation decoderReuseEvaluation) { + videoFormat = format; + analyticsCollector.onVideoInputFormatChanged(format, decoderReuseEvaluation); + } + + @Override + public void onDroppedFrames(int count, long elapsed) { + analyticsCollector.onDroppedFrames(count, elapsed); + } + + @Override + public void onVideoSizeChanged(VideoSize newVideoSize) { + videoSize = newVideoSize; + listeners.sendEvent( + EVENT_VIDEO_SIZE_CHANGED, listener -> listener.onVideoSizeChanged(newVideoSize)); + } + + @Override + public void onRenderedFirstFrame(Object output, long renderTimeMs) { + analyticsCollector.onRenderedFirstFrame(output, renderTimeMs); + if (videoOutput == output) { + listeners.sendEvent(EVENT_RENDERED_FIRST_FRAME, Listener::onRenderedFirstFrame); + } + } + + @Override + public void onVideoDecoderReleased(String decoderName) { + analyticsCollector.onVideoDecoderReleased(decoderName); + } + + @Override + public void onVideoDisabled(DecoderCounters counters) { + analyticsCollector.onVideoDisabled(counters); + videoFormat = null; + videoDecoderCounters = null; + } + + @Override + public void onVideoFrameProcessingOffset(long totalProcessingOffsetUs, int frameCount) { + analyticsCollector.onVideoFrameProcessingOffset(totalProcessingOffsetUs, frameCount); + } + + @Override + public void onVideoCodecError(Exception videoCodecError) { + analyticsCollector.onVideoCodecError(videoCodecError); + } + + // AudioRendererEventListener implementation + + @Override + public void onAudioEnabled(DecoderCounters counters) { + audioDecoderCounters = counters; + analyticsCollector.onAudioEnabled(counters); + } + + @Override + public void onAudioDecoderInitialized( + String decoderName, long initializedTimestampMs, long initializationDurationMs) { + analyticsCollector.onAudioDecoderInitialized( + decoderName, initializedTimestampMs, initializationDurationMs); + } + + @Override + public void onAudioInputFormatChanged( + Format format, @Nullable DecoderReuseEvaluation decoderReuseEvaluation) { + audioFormat = format; + analyticsCollector.onAudioInputFormatChanged(format, decoderReuseEvaluation); + } + + @Override + public void onAudioPositionAdvancing(long playoutStartSystemTimeMs) { + analyticsCollector.onAudioPositionAdvancing(playoutStartSystemTimeMs); + } + + @Override + public void onAudioUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) { + analyticsCollector.onAudioUnderrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs); + } + + @Override + public void onAudioDecoderReleased(String decoderName) { + analyticsCollector.onAudioDecoderReleased(decoderName); + } + + @Override + public void onAudioDisabled(DecoderCounters counters) { + analyticsCollector.onAudioDisabled(counters); + audioFormat = null; + audioDecoderCounters = null; + } + + @Override + public void onSkipSilenceEnabledChanged(boolean newSkipSilenceEnabled) { + if (skipSilenceEnabled == newSkipSilenceEnabled) { + return; + } + skipSilenceEnabled = newSkipSilenceEnabled; + listeners.sendEvent( + EVENT_SKIP_SILENCE_ENABLED_CHANGED, + listener -> listener.onSkipSilenceEnabledChanged(newSkipSilenceEnabled)); + } + + @Override + public void onAudioSinkError(Exception audioSinkError) { + analyticsCollector.onAudioSinkError(audioSinkError); + } + + @Override + public void onAudioCodecError(Exception audioCodecError) { + analyticsCollector.onAudioCodecError(audioCodecError); + } + + // TextOutput implementation + @Override + public void onCues(List cues) { + listeners.sendEvent(EVENT_CUES, listener -> listener.onCues(cues)); + } + + @Override + public void onCues(CueGroup cueGroup) { + currentCueGroup = cueGroup; + listeners.sendEvent(EVENT_CUES, listener -> listener.onCues(cueGroup)); + } + + // MetadataOutput implementation + + @Override + public void onMetadata(Metadata metadata) { + staticAndDynamicMediaMetadata = + staticAndDynamicMediaMetadata.buildUpon().populateFromMetadata(metadata).build(); + MediaMetadata newMediaMetadata = buildUpdatedMediaMetadata(); + if (!newMediaMetadata.equals(mediaMetadata)) { + mediaMetadata = newMediaMetadata; + listeners.queueEvent( + EVENT_MEDIA_METADATA_CHANGED, + listener -> listener.onMediaMetadataChanged(mediaMetadata)); + } + listeners.queueEvent(EVENT_METADATA, listener -> listener.onMetadata(metadata)); + listeners.flushEvents(); + } + + // SurfaceHolder.Callback implementation + + @Override + public void surfaceCreated(SurfaceHolder holder) { + if (surfaceHolderSurfaceIsVideoOutput) { + setVideoOutputInternal(holder.getSurface()); + } + } + + @Override + public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { + maybeNotifySurfaceSizeChanged(width, height); + } + + @Override + public void surfaceDestroyed(SurfaceHolder holder) { + if (surfaceHolderSurfaceIsVideoOutput) { + setVideoOutputInternal(/* videoOutput= */ null); + } + maybeNotifySurfaceSizeChanged(/* width= */ 0, /* height= */ 0); + } + + // TextureView.SurfaceTextureListener implementation + + @Override + public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height) { + setSurfaceTextureInternal(surfaceTexture); + maybeNotifySurfaceSizeChanged(width, height); + } + + @Override + public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture, int width, int height) { + maybeNotifySurfaceSizeChanged(width, height); + } + + @Override + public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) { + for (com.google.android.exoplayer2.video.VideoListener videoListener : videoListeners) { + if (videoListener.onSurfaceDestroyed(surfaceTexture)) { + return false; + } + } + setVideoOutputInternal(/* videoOutput= */ null); + maybeNotifySurfaceSizeChanged(/* width= */ 0, /* height= */ 0); + return true; + } + + @Override + public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) { + for (com.google.android.exoplayer2.video.VideoListener videoListener : videoListeners) { + videoListener.onSurfaceTextureUpdated(surfaceTexture); + } + // Do nothing. + } + + // SphericalGLSurfaceView.VideoSurfaceListener + + @Override + public void onVideoSurfaceCreated(Surface surface) { + setVideoOutputInternal(surface); + } + + @Override + public void onVideoSurfaceDestroyed(Surface surface) { + setVideoOutputInternal(/* videoOutput= */ null); + } + + // AudioFocusManager.PlayerControl implementation + + @Override + public void setVolumeMultiplier(float volumeMultiplier) { + sendVolumeToRenderers(); + } + + @Override + public void executePlayerCommand(@AudioFocusManager.PlayerCommand int playerCommand) { + boolean playWhenReady = getPlayWhenReady(); + updatePlayWhenReady( + playWhenReady, playerCommand, getPlayWhenReadyChangeReason(playWhenReady, playerCommand)); + } + + // AudioBecomingNoisyManager.EventListener implementation. + + @Override + public void onAudioBecomingNoisy() { + updatePlayWhenReady( + /* playWhenReady= */ false, + AudioFocusManager.PLAYER_COMMAND_DO_NOT_PLAY, + Player.PLAY_WHEN_READY_CHANGE_REASON_AUDIO_BECOMING_NOISY); + } + + // StreamVolumeManager.Listener implementation. + + @Override + public void onStreamTypeChanged(@C.StreamType int streamType) { + DeviceInfo newDeviceInfo = createDeviceInfo(streamVolumeManager); + if (!newDeviceInfo.equals(deviceInfo)) { + deviceInfo = newDeviceInfo; + listeners.sendEvent( + EVENT_DEVICE_INFO_CHANGED, listener -> listener.onDeviceInfoChanged(newDeviceInfo)); + } + } + + @Override + public void onStreamVolumeChanged(int streamVolume, boolean streamMuted) { + listeners.sendEvent( + EVENT_DEVICE_VOLUME_CHANGED, + listener -> listener.onDeviceVolumeChanged(streamVolume, streamMuted)); + } + + // Player.AudioOffloadListener implementation. + + @Override + public void onExperimentalSleepingForOffloadChanged(boolean sleepingForOffload) { + updateWakeAndWifiLock(); + } + } + + /** Listeners that are called on the playback thread. */ + private static final class FrameMetadataListener + implements VideoFrameMetadataListener, CameraMotionListener, PlayerMessage.Target { + + public static final @MessageType int MSG_SET_VIDEO_FRAME_METADATA_LISTENER = + Renderer.MSG_SET_VIDEO_FRAME_METADATA_LISTENER; + + public static final @MessageType int MSG_SET_CAMERA_MOTION_LISTENER = + Renderer.MSG_SET_CAMERA_MOTION_LISTENER; + + public static final @MessageType int MSG_SET_SPHERICAL_SURFACE_VIEW = Renderer.MSG_CUSTOM_BASE; + + @Nullable private VideoFrameMetadataListener videoFrameMetadataListener; + @Nullable private CameraMotionListener cameraMotionListener; + @Nullable private VideoFrameMetadataListener internalVideoFrameMetadataListener; + @Nullable private CameraMotionListener internalCameraMotionListener; + + @Override + public void handleMessage(@MessageType int messageType, @Nullable Object message) { + switch (messageType) { + case MSG_SET_VIDEO_FRAME_METADATA_LISTENER: + videoFrameMetadataListener = (VideoFrameMetadataListener) message; + break; + case MSG_SET_CAMERA_MOTION_LISTENER: + cameraMotionListener = (CameraMotionListener) message; + break; + case MSG_SET_SPHERICAL_SURFACE_VIEW: + @Nullable SphericalGLSurfaceView surfaceView = (SphericalGLSurfaceView) message; + if (surfaceView == null) { + internalVideoFrameMetadataListener = null; + internalCameraMotionListener = null; + } else { + internalVideoFrameMetadataListener = surfaceView.getVideoFrameMetadataListener(); + internalCameraMotionListener = surfaceView.getCameraMotionListener(); + } + break; + case Renderer.MSG_SET_AUDIO_ATTRIBUTES: + case Renderer.MSG_SET_AUDIO_SESSION_ID: + case Renderer.MSG_SET_AUX_EFFECT_INFO: + case Renderer.MSG_SET_CHANGE_FRAME_RATE_STRATEGY: + case Renderer.MSG_SET_SCALING_MODE: + case Renderer.MSG_SET_SKIP_SILENCE_ENABLED: + case Renderer.MSG_SET_VIDEO_OUTPUT: + case Renderer.MSG_SET_VOLUME: + case Renderer.MSG_SET_WAKEUP_LISTENER: + default: + break; + } + } + + // VideoFrameMetadataListener + + @Override + public void onVideoFrameAboutToBeRendered( + long presentationTimeUs, + long releaseTimeNs, + Format format, + @Nullable MediaFormat mediaFormat) { + if (internalVideoFrameMetadataListener != null) { + internalVideoFrameMetadataListener.onVideoFrameAboutToBeRendered( + presentationTimeUs, releaseTimeNs, format, mediaFormat); + } + if (videoFrameMetadataListener != null) { + videoFrameMetadataListener.onVideoFrameAboutToBeRendered( + presentationTimeUs, releaseTimeNs, format, mediaFormat); + } + } + + // CameraMotionListener + + @Override + public void onCameraMotion(long timeUs, float[] rotation) { + if (internalCameraMotionListener != null) { + internalCameraMotionListener.onCameraMotion(timeUs, rotation); + } + if (cameraMotionListener != null) { + cameraMotionListener.onCameraMotion(timeUs, rotation); + } + } + + @Override + public void onCameraMotionReset() { + if (internalCameraMotionListener != null) { + internalCameraMotionListener.onCameraMotionReset(); + } + if (cameraMotionListener != null) { + cameraMotionListener.onCameraMotionReset(); + } + } + } + + @RequiresApi(31) + private static final class Api31 { + private Api31() {} + + @DoNotInline + public static PlayerId registerMediaMetricsListener( + Context context, ExoPlayerImpl player, boolean usePlatformDiagnostics) { + @Nullable MediaMetricsListener listener = MediaMetricsListener.create(context); + if (listener == null) { + Log.w(TAG, "MediaMetricsService unavailable."); + return new PlayerId(LogSessionId.LOG_SESSION_ID_NONE); + } + if (usePlatformDiagnostics) { + player.addAnalyticsListener(listener); + } + return new PlayerId(listener.getLogSessionId()); } } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ExoPlayerImplInternal.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ExoPlayerImplInternal.java index ddc54e9e6e..440ed8f660 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ExoPlayerImplInternal.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ExoPlayerImplInternal.java @@ -15,6 +15,11 @@ */ package com.google.android.exoplayer2; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Util.castNonNull; +import static java.lang.Math.max; +import static java.lang.Math.min; + import android.os.Handler; import android.os.HandlerThread; import android.os.Looper; @@ -22,28 +27,46 @@ import android.os.Process; import android.os.SystemClock; import android.util.Pair; +import androidx.annotation.CheckResult; import androidx.annotation.Nullable; -import com.google.android.exoplayer2.DefaultMediaClock.PlaybackParameterListener; +import com.google.android.exoplayer2.DefaultMediaClock.PlaybackParametersListener; +import com.google.android.exoplayer2.PlaybackException.ErrorCode; import com.google.android.exoplayer2.Player.DiscontinuityReason; +import com.google.android.exoplayer2.Player.PlayWhenReadyChangeReason; +import com.google.android.exoplayer2.Player.PlaybackSuppressionReason; +import com.google.android.exoplayer2.Player.RepeatMode; +import com.google.android.exoplayer2.analytics.AnalyticsCollector; +import com.google.android.exoplayer2.analytics.PlayerId; +import com.google.android.exoplayer2.drm.DrmSession; +import com.google.android.exoplayer2.metadata.Metadata; +import com.google.android.exoplayer2.metadata.MetadataRenderer; +import com.google.android.exoplayer2.source.BehindLiveWindowException; import com.google.android.exoplayer2.source.MediaPeriod; -import com.google.android.exoplayer2.source.MediaSource; import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId; -import com.google.android.exoplayer2.source.MediaSource.MediaSourceCaller; import com.google.android.exoplayer2.source.SampleStream; +import com.google.android.exoplayer2.source.ShuffleOrder; import com.google.android.exoplayer2.source.TrackGroupArray; -import com.google.android.exoplayer2.trackselection.TrackSelection; +import com.google.android.exoplayer2.source.ads.AdPlaybackState; +import com.google.android.exoplayer2.text.TextRenderer; +import com.google.android.exoplayer2.trackselection.ExoTrackSelection; import com.google.android.exoplayer2.trackselection.TrackSelector; import com.google.android.exoplayer2.trackselection.TrackSelectorResult; import com.google.android.exoplayer2.upstream.BandwidthMeter; +import com.google.android.exoplayer2.upstream.DataSourceException; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Clock; import com.google.android.exoplayer2.util.HandlerWrapper; import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.TraceUtil; import com.google.android.exoplayer2.util.Util; +import com.google.common.base.Supplier; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Sets; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; +import java.util.List; +import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; /** Implements the internal behavior of {@link ExoPlayerImpl}. */ @@ -51,15 +74,61 @@ implements Handler.Callback, MediaPeriod.Callback, TrackSelector.InvalidationListener, - MediaSourceCaller, - PlaybackParameterListener, + MediaSourceList.MediaSourceListInfoRefreshListener, + PlaybackParametersListener, PlayerMessage.Sender { private static final String TAG = "ExoPlayerImplInternal"; - // External messages - public static final int MSG_PLAYBACK_INFO_CHANGED = 0; - public static final int MSG_PLAYBACK_PARAMETERS_CHANGED = 1; + public static final class PlaybackInfoUpdate { + + private boolean hasPendingChange; + + public PlaybackInfo playbackInfo; + public int operationAcks; + public boolean positionDiscontinuity; + public @DiscontinuityReason int discontinuityReason; + public boolean hasPlayWhenReadyChangeReason; + public @PlayWhenReadyChangeReason int playWhenReadyChangeReason; + + public PlaybackInfoUpdate(PlaybackInfo playbackInfo) { + this.playbackInfo = playbackInfo; + } + + public void incrementPendingOperationAcks(int operationAcks) { + hasPendingChange |= operationAcks > 0; + this.operationAcks += operationAcks; + } + + public void setPlaybackInfo(PlaybackInfo playbackInfo) { + hasPendingChange |= this.playbackInfo != playbackInfo; + this.playbackInfo = playbackInfo; + } + + public void setPositionDiscontinuity(@DiscontinuityReason int discontinuityReason) { + if (positionDiscontinuity + && this.discontinuityReason != Player.DISCONTINUITY_REASON_INTERNAL) { + // We always prefer non-internal discontinuity reasons. We also assume that we won't report + // more than one non-internal discontinuity per message iteration. + Assertions.checkArgument(discontinuityReason == Player.DISCONTINUITY_REASON_INTERNAL); + return; + } + hasPendingChange = true; + positionDiscontinuity = true; + this.discontinuityReason = discontinuityReason; + } + + public void setPlayWhenReadyChangeReason( + @PlayWhenReadyChangeReason int playWhenReadyChangeReason) { + hasPendingChange = true; + this.hasPlayWhenReadyChangeReason = true; + this.playWhenReadyChangeReason = playWhenReadyChangeReason; + } + } + + public interface PlaybackInfoUpdateListener { + void onPlaybackInfoUpdate(ExoPlayerImplInternal.PlaybackInfoUpdate playbackInfo); + } // Internal messages private static final int MSG_PREPARE = 0; @@ -70,58 +139,86 @@ private static final int MSG_SET_SEEK_PARAMETERS = 5; private static final int MSG_STOP = 6; private static final int MSG_RELEASE = 7; - private static final int MSG_REFRESH_SOURCE_INFO = 8; - private static final int MSG_PERIOD_PREPARED = 9; - private static final int MSG_SOURCE_CONTINUE_LOADING_REQUESTED = 10; - private static final int MSG_TRACK_SELECTION_INVALIDATED = 11; - private static final int MSG_SET_REPEAT_MODE = 12; - private static final int MSG_SET_SHUFFLE_ENABLED = 13; - private static final int MSG_SET_FOREGROUND_MODE = 14; - private static final int MSG_SEND_MESSAGE = 15; - private static final int MSG_SEND_MESSAGE_TO_TARGET_THREAD = 16; - private static final int MSG_PLAYBACK_PARAMETERS_CHANGED_INTERNAL = 17; + private static final int MSG_PERIOD_PREPARED = 8; + private static final int MSG_SOURCE_CONTINUE_LOADING_REQUESTED = 9; + private static final int MSG_TRACK_SELECTION_INVALIDATED = 10; + private static final int MSG_SET_REPEAT_MODE = 11; + private static final int MSG_SET_SHUFFLE_ENABLED = 12; + private static final int MSG_SET_FOREGROUND_MODE = 13; + private static final int MSG_SEND_MESSAGE = 14; + private static final int MSG_SEND_MESSAGE_TO_TARGET_THREAD = 15; + private static final int MSG_PLAYBACK_PARAMETERS_CHANGED_INTERNAL = 16; + private static final int MSG_SET_MEDIA_SOURCES = 17; + private static final int MSG_ADD_MEDIA_SOURCES = 18; + private static final int MSG_MOVE_MEDIA_SOURCES = 19; + private static final int MSG_REMOVE_MEDIA_SOURCES = 20; + private static final int MSG_SET_SHUFFLE_ORDER = 21; + private static final int MSG_PLAYLIST_UPDATE_REQUESTED = 22; + private static final int MSG_SET_PAUSE_AT_END_OF_WINDOW = 23; + private static final int MSG_SET_OFFLOAD_SCHEDULING_ENABLED = 24; + private static final int MSG_ATTEMPT_RENDERER_ERROR_RECOVERY = 25; private static final int ACTIVE_INTERVAL_MS = 10; private static final int IDLE_INTERVAL_MS = 1000; + /** + * Duration for which the player needs to appear stuck before the playback is failed on the + * assumption that no further progress will be made. To appear stuck, the player's renderers must + * not be ready, there must be more media available to load, and the LoadControl must be refusing + * to load it. + */ + private static final long PLAYBACK_STUCK_AFTER_MS = 4000; + /** + * Threshold under which a buffered duration is assumed to be empty. We cannot use zero to account + * for buffers currently hold but not played by the renderer. + */ + private static final long PLAYBACK_BUFFER_EMPTY_THRESHOLD_US = 500_000; private final Renderer[] renderers; + private final Set renderersToReset; private final RendererCapabilities[] rendererCapabilities; private final TrackSelector trackSelector; private final TrackSelectorResult emptyTrackSelectorResult; private final LoadControl loadControl; private final BandwidthMeter bandwidthMeter; private final HandlerWrapper handler; - private final HandlerThread internalPlaybackThread; - private final Handler eventHandler; + @Nullable private final HandlerThread internalPlaybackThread; + private final Looper playbackLooper; private final Timeline.Window window; private final Timeline.Period period; private final long backBufferDurationUs; private final boolean retainBackBufferFromKeyframe; private final DefaultMediaClock mediaClock; - private final PlaybackInfoUpdate playbackInfoUpdate; private final ArrayList pendingMessages; private final Clock clock; + private final PlaybackInfoUpdateListener playbackInfoUpdateListener; private final MediaPeriodQueue queue; + private final MediaSourceList mediaSourceList; + private final LivePlaybackSpeedControl livePlaybackSpeedControl; + private final long releaseTimeoutMs; @SuppressWarnings("unused") private SeekParameters seekParameters; private PlaybackInfo playbackInfo; - private MediaSource mediaSource; - private Renderer[] enabledRenderers; + private PlaybackInfoUpdate playbackInfoUpdate; private boolean released; - private boolean playWhenReady; - private boolean rebuffering; + private boolean pauseAtEndOfWindow; + private boolean pendingPauseAtEndOfPeriod; + private boolean isRebuffering; private boolean shouldContinueLoading; - @Player.RepeatMode private int repeatMode; + private @Player.RepeatMode int repeatMode; private boolean shuffleModeEnabled; private boolean foregroundMode; - - private int pendingPrepareCount; - private SeekPosition pendingInitialSeekPosition; + private boolean requestForRendererSleep; + private boolean offloadSchedulingEnabled; + private int enabledRendererCount; + @Nullable private SeekPosition pendingInitialSeekPosition; private long rendererPositionUs; private int nextPendingMessageIndexHint; private boolean deliverPendingMessageAtStartPositionRequired; + @Nullable private ExoPlaybackException pendingRecoverableRendererError; + private long setForegroundModeTimeoutMs; + private long playbackMaybeBecameStuckAtMs; public ExoPlayerImplInternal( Renderer[] renderers, @@ -129,59 +226,98 @@ public ExoPlayerImplInternal( TrackSelectorResult emptyTrackSelectorResult, LoadControl loadControl, BandwidthMeter bandwidthMeter, - boolean playWhenReady, @Player.RepeatMode int repeatMode, boolean shuffleModeEnabled, - Handler eventHandler, - Clock clock) { + AnalyticsCollector analyticsCollector, + SeekParameters seekParameters, + LivePlaybackSpeedControl livePlaybackSpeedControl, + long releaseTimeoutMs, + boolean pauseAtEndOfWindow, + Looper applicationLooper, + Clock clock, + PlaybackInfoUpdateListener playbackInfoUpdateListener, + PlayerId playerId, + Looper playbackLooper) { + this.playbackInfoUpdateListener = playbackInfoUpdateListener; this.renderers = renderers; this.trackSelector = trackSelector; this.emptyTrackSelectorResult = emptyTrackSelectorResult; this.loadControl = loadControl; this.bandwidthMeter = bandwidthMeter; - this.playWhenReady = playWhenReady; this.repeatMode = repeatMode; this.shuffleModeEnabled = shuffleModeEnabled; - this.eventHandler = eventHandler; + this.seekParameters = seekParameters; + this.livePlaybackSpeedControl = livePlaybackSpeedControl; + this.releaseTimeoutMs = releaseTimeoutMs; + this.setForegroundModeTimeoutMs = releaseTimeoutMs; + this.pauseAtEndOfWindow = pauseAtEndOfWindow; this.clock = clock; - this.queue = new MediaPeriodQueue(); + playbackMaybeBecameStuckAtMs = C.TIME_UNSET; backBufferDurationUs = loadControl.getBackBufferDurationUs(); retainBackBufferFromKeyframe = loadControl.retainBackBufferFromKeyframe(); - seekParameters = SeekParameters.DEFAULT; - playbackInfo = - PlaybackInfo.createDummy(/* startPositionUs= */ C.TIME_UNSET, emptyTrackSelectorResult); - playbackInfoUpdate = new PlaybackInfoUpdate(); + playbackInfo = PlaybackInfo.createDummy(emptyTrackSelectorResult); + playbackInfoUpdate = new PlaybackInfoUpdate(playbackInfo); rendererCapabilities = new RendererCapabilities[renderers.length]; for (int i = 0; i < renderers.length; i++) { - renderers[i].setIndex(i); + renderers[i].init(/* index= */ i, playerId); rendererCapabilities[i] = renderers[i].getCapabilities(); } mediaClock = new DefaultMediaClock(this, clock); pendingMessages = new ArrayList<>(); - enabledRenderers = new Renderer[0]; + renderersToReset = Sets.newIdentityHashSet(); window = new Timeline.Window(); period = new Timeline.Period(); trackSelector.init(/* listener= */ this, bandwidthMeter); - // Note: The documentation for Process.THREAD_PRIORITY_AUDIO that states "Applications can - // not normally change to this priority" is incorrect. - internalPlaybackThread = - new HandlerThread("ExoPlayerImplInternal:Handler", Process.THREAD_PRIORITY_AUDIO); - internalPlaybackThread.start(); - handler = clock.createHandler(internalPlaybackThread.getLooper(), this); deliverPendingMessageAtStartPositionRequired = true; + + HandlerWrapper eventHandler = clock.createHandler(applicationLooper, /* callback= */ null); + queue = new MediaPeriodQueue(analyticsCollector, eventHandler); + mediaSourceList = + new MediaSourceList(/* listener= */ this, analyticsCollector, eventHandler, playerId); + + if (playbackLooper != null) { + internalPlaybackThread = null; + this.playbackLooper = playbackLooper; + } else { + // Note: The documentation for Process.THREAD_PRIORITY_AUDIO that states "Applications can + // not normally change to this priority" is incorrect. + internalPlaybackThread = + new HandlerThread("ExoPlayer:Playback", Process.THREAD_PRIORITY_AUDIO); + internalPlaybackThread.start(); + this.playbackLooper = internalPlaybackThread.getLooper(); + } + handler = clock.createHandler(this.playbackLooper, this); + } + + public void experimentalSetForegroundModeTimeoutMs(long setForegroundModeTimeoutMs) { + this.setForegroundModeTimeoutMs = setForegroundModeTimeoutMs; + } + + public void experimentalSetOffloadSchedulingEnabled(boolean offloadSchedulingEnabled) { + handler + .obtainMessage( + MSG_SET_OFFLOAD_SCHEDULING_ENABLED, offloadSchedulingEnabled ? 1 : 0, /* unused */ 0) + .sendToTarget(); + } + + public void prepare() { + handler.obtainMessage(MSG_PREPARE).sendToTarget(); } - public void prepare(MediaSource mediaSource, boolean resetPosition, boolean resetState) { + public void setPlayWhenReady( + boolean playWhenReady, @PlaybackSuppressionReason int playbackSuppressionReason) { handler - .obtainMessage(MSG_PREPARE, resetPosition ? 1 : 0, resetState ? 1 : 0, mediaSource) + .obtainMessage(MSG_SET_PLAY_WHEN_READY, playWhenReady ? 1 : 0, playbackSuppressionReason) .sendToTarget(); } - public void setPlayWhenReady(boolean playWhenReady) { - handler.obtainMessage(MSG_SET_PLAY_WHEN_READY, playWhenReady ? 1 : 0, 0).sendToTarget(); + public void setPauseAtEndOfWindow(boolean pauseAtEndOfWindow) { + handler + .obtainMessage(MSG_SET_PAUSE_AT_END_OF_WINDOW, pauseAtEndOfWindow ? 1 : 0, /* ignored */ 0) + .sendToTarget(); } public void setRepeatMode(@Player.RepeatMode int repeatMode) { @@ -206,13 +342,57 @@ public void setSeekParameters(SeekParameters seekParameters) { handler.obtainMessage(MSG_SET_SEEK_PARAMETERS, seekParameters).sendToTarget(); } - public void stop(boolean reset) { - handler.obtainMessage(MSG_STOP, reset ? 1 : 0, 0).sendToTarget(); + public void stop() { + handler.obtainMessage(MSG_STOP).sendToTarget(); + } + + public void setMediaSources( + List mediaSources, + int windowIndex, + long positionUs, + ShuffleOrder shuffleOrder) { + handler + .obtainMessage( + MSG_SET_MEDIA_SOURCES, + new MediaSourceListUpdateMessage(mediaSources, shuffleOrder, windowIndex, positionUs)) + .sendToTarget(); + } + + public void addMediaSources( + int index, List mediaSources, ShuffleOrder shuffleOrder) { + handler + .obtainMessage( + MSG_ADD_MEDIA_SOURCES, + index, + /* ignored */ 0, + new MediaSourceListUpdateMessage( + mediaSources, + shuffleOrder, + /* windowIndex= */ C.INDEX_UNSET, + /* positionUs= */ C.TIME_UNSET)) + .sendToTarget(); + } + + public void removeMediaSources(int fromIndex, int toIndex, ShuffleOrder shuffleOrder) { + handler + .obtainMessage(MSG_REMOVE_MEDIA_SOURCES, fromIndex, toIndex, shuffleOrder) + .sendToTarget(); + } + + public void moveMediaSources( + int fromIndex, int toIndex, int newFromIndex, ShuffleOrder shuffleOrder) { + MoveMediaItemsMessage moveMediaItemsMessage = + new MoveMediaItemsMessage(fromIndex, toIndex, newFromIndex, shuffleOrder); + handler.obtainMessage(MSG_MOVE_MEDIA_SOURCES, moveMediaItemsMessage).sendToTarget(); + } + + public void setShuffleOrder(ShuffleOrder shuffleOrder) { + handler.obtainMessage(MSG_SET_SHUFFLE_ORDER, shuffleOrder).sendToTarget(); } @Override public synchronized void sendMessage(PlayerMessage message) { - if (released || !internalPlaybackThread.isAlive()) { + if (released || !playbackLooper.getThread().isAlive()) { Log.w(TAG, "Ignoring messages sent after release."); message.markAsProcessed(/* isDelivered= */ false); return; @@ -220,62 +400,52 @@ public synchronized void sendMessage(PlayerMessage message) { handler.obtainMessage(MSG_SEND_MESSAGE, message).sendToTarget(); } - public synchronized void setForegroundMode(boolean foregroundMode) { - if (released || !internalPlaybackThread.isAlive()) { - return; + /** + * Sets the foreground mode. + * + * @param foregroundMode Whether foreground mode should be enabled. + * @return Whether the operations succeeded. If false, the operation timed out. + */ + public synchronized boolean setForegroundMode(boolean foregroundMode) { + if (released || !playbackLooper.getThread().isAlive()) { + return true; } if (foregroundMode) { handler.obtainMessage(MSG_SET_FOREGROUND_MODE, /* foregroundMode */ 1, 0).sendToTarget(); + return true; } else { AtomicBoolean processedFlag = new AtomicBoolean(); handler .obtainMessage(MSG_SET_FOREGROUND_MODE, /* foregroundMode */ 0, 0, processedFlag) .sendToTarget(); - boolean wasInterrupted = false; - while (!processedFlag.get()) { - try { - wait(); - } catch (InterruptedException e) { - wasInterrupted = true; - } - } - if (wasInterrupted) { - // Restore the interrupted status. - Thread.currentThread().interrupt(); - } + waitUninterruptibly(/* condition= */ processedFlag::get, setForegroundModeTimeoutMs); + return processedFlag.get(); } } - public synchronized void release() { - if (released || !internalPlaybackThread.isAlive()) { - return; + /** + * Releases the player. + * + * @return Whether the release succeeded. If false, the release timed out. + */ + public synchronized boolean release() { + if (released || !playbackLooper.getThread().isAlive()) { + return true; } handler.sendEmptyMessage(MSG_RELEASE); - boolean wasInterrupted = false; - while (!released) { - try { - wait(); - } catch (InterruptedException e) { - wasInterrupted = true; - } - } - if (wasInterrupted) { - // Restore the interrupted status. - Thread.currentThread().interrupt(); - } + waitUninterruptibly(/* condition= */ () -> released, releaseTimeoutMs); + return released; } public Looper getPlaybackLooper() { - return internalPlaybackThread.getLooper(); + return playbackLooper; } - // MediaSource.MediaSourceCaller implementation. + // Playlist.PlaylistInfoRefreshListener implementation. @Override - public void onSourceInfoRefreshed(MediaSource source, Timeline timeline) { - handler - .obtainMessage(MSG_REFRESH_SOURCE_INFO, new MediaSourceRefreshInfo(source, timeline)) - .sendToTarget(); + public void onPlaylistUpdateRequested() { + handler.sendEmptyMessage(MSG_PLAYLIST_UPDATE_REQUESTED); } // MediaPeriod.Callback implementation. @@ -297,11 +467,13 @@ public void onTrackSelectionsInvalidated() { handler.sendEmptyMessage(MSG_TRACK_SELECTION_INVALIDATED); } - // DefaultMediaClock.PlaybackParameterListener implementation. + // DefaultMediaClock.PlaybackParametersListener implementation. @Override - public void onPlaybackParametersChanged(PlaybackParameters playbackParameters) { - sendPlaybackParametersChangedInternal(playbackParameters, /* acknowledgeCommand= */ false); + public void onPlaybackParametersChanged(PlaybackParameters newPlaybackParameters) { + handler + .obtainMessage(MSG_PLAYBACK_PARAMETERS_CHANGED_INTERNAL, newPlaybackParameters) + .sendToTarget(); } // Handler.Callback implementation. @@ -311,13 +483,14 @@ public boolean handleMessage(Message msg) { try { switch (msg.what) { case MSG_PREPARE: - prepareInternal( - (MediaSource) msg.obj, - /* resetPosition= */ msg.arg1 != 0, - /* resetState= */ msg.arg2 != 0); + prepareInternal(); break; case MSG_SET_PLAY_WHEN_READY: - setPlayWhenReadyInternal(msg.arg1 != 0); + setPlayWhenReadyInternal( + /* playWhenReady= */ msg.arg1 != 0, + /* playbackSuppressionReason= */ msg.arg2, + /* operationAck= */ true, + Player.PLAY_WHEN_READY_CHANGE_REASON_USER_REQUEST); break; case MSG_SET_REPEAT_MODE: setRepeatModeInternal(msg.arg1); @@ -342,17 +515,11 @@ public boolean handleMessage(Message msg) { /* foregroundMode= */ msg.arg1 != 0, /* processedFlag= */ (AtomicBoolean) msg.obj); break; case MSG_STOP: - stopInternal( - /* forceResetRenderers= */ false, - /* resetPositionAndState= */ msg.arg1 != 0, - /* acknowledgeStop= */ true); + stopInternal(/* forceResetRenderers= */ false, /* acknowledgeStop= */ true); break; case MSG_PERIOD_PREPARED: handlePeriodPrepared((MediaPeriod) msg.obj); break; - case MSG_REFRESH_SOURCE_INFO: - handleSourceInfoRefreshed((MediaSourceRefreshInfo) msg.obj); - break; case MSG_SOURCE_CONTINUE_LOADING_REQUESTED: handleContinueLoadingRequested((MediaPeriod) msg.obj); break; @@ -360,8 +527,7 @@ public boolean handleMessage(Message msg) { reselectTracksInternal(); break; case MSG_PLAYBACK_PARAMETERS_CHANGED_INTERNAL: - handlePlaybackParameters( - (PlaybackParameters) msg.obj, /* acknowledgeCommand= */ msg.arg1 != 0); + handlePlaybackParameters((PlaybackParameters) msg.obj, /* acknowledgeCommand= */ false); break; case MSG_SEND_MESSAGE: sendMessageInternal((PlayerMessage) msg.obj); @@ -369,6 +535,33 @@ public boolean handleMessage(Message msg) { case MSG_SEND_MESSAGE_TO_TARGET_THREAD: sendMessageToTargetThread((PlayerMessage) msg.obj); break; + case MSG_SET_MEDIA_SOURCES: + setMediaItemsInternal((MediaSourceListUpdateMessage) msg.obj); + break; + case MSG_ADD_MEDIA_SOURCES: + addMediaItemsInternal((MediaSourceListUpdateMessage) msg.obj, msg.arg1); + break; + case MSG_MOVE_MEDIA_SOURCES: + moveMediaItemsInternal((MoveMediaItemsMessage) msg.obj); + break; + case MSG_REMOVE_MEDIA_SOURCES: + removeMediaItemsInternal(msg.arg1, msg.arg2, (ShuffleOrder) msg.obj); + break; + case MSG_SET_SHUFFLE_ORDER: + setShuffleOrderInternal((ShuffleOrder) msg.obj); + break; + case MSG_PLAYLIST_UPDATE_REQUESTED: + mediaSourceListUpdateRequestedInternal(); + break; + case MSG_SET_PAUSE_AT_END_OF_WINDOW: + setPauseAtEndOfWindowInternal(msg.arg1 != 0); + break; + case MSG_SET_OFFLOAD_SCHEDULING_ENABLED: + setOffloadSchedulingEnabledInternal(msg.arg1 == 1); + break; + case MSG_ATTEMPT_RENDERER_ERROR_RECOVERY: + attemptRendererErrorRecovery(); + break; case MSG_RELEASE: releaseInternal(); // Return immediately to not send playback info updates after release. @@ -376,95 +569,229 @@ public boolean handleMessage(Message msg) { default: return false; } - maybeNotifyPlaybackInfoChanged(); } catch (ExoPlaybackException e) { - Log.e(TAG, getExoPlaybackExceptionMessage(e), e); - stopInternal( - /* forceResetRenderers= */ true, - /* resetPositionAndState= */ false, - /* acknowledgeStop= */ false); - playbackInfo = playbackInfo.copyWithPlaybackError(e); - maybeNotifyPlaybackInfoChanged(); + if (e.type == ExoPlaybackException.TYPE_RENDERER) { + @Nullable MediaPeriodHolder readingPeriod = queue.getReadingPeriod(); + if (readingPeriod != null) { + // We can assume that all renderer errors happen in the context of the reading period. See + // [internal: b/150584930#comment4] for exceptions that aren't covered by this assumption. + e = e.copyWithMediaPeriodId(readingPeriod.info.id); + } + } + if (e.isRecoverable && pendingRecoverableRendererError == null) { + Log.w(TAG, "Recoverable renderer error", e); + pendingRecoverableRendererError = e; + // Given that the player is now in an unhandled exception state, the error needs to be + // recovered or the player stopped before any other message is handled. + handler.sendMessageAtFrontOfQueue( + handler.obtainMessage(MSG_ATTEMPT_RENDERER_ERROR_RECOVERY, e)); + } else { + if (pendingRecoverableRendererError != null) { + pendingRecoverableRendererError.addSuppressed(e); + e = pendingRecoverableRendererError; + } + Log.e(TAG, "Playback error", e); + stopInternal(/* forceResetRenderers= */ true, /* acknowledgeStop= */ false); + playbackInfo = playbackInfo.copyWithPlaybackError(e); + } + } catch (DrmSession.DrmSessionException e) { + handleIoException(e, e.errorCode); + } catch (ParserException e) { + @ErrorCode int errorCode; + if (e.dataType == C.DATA_TYPE_MEDIA) { + errorCode = + e.contentIsMalformed + ? PlaybackException.ERROR_CODE_PARSING_CONTAINER_MALFORMED + : PlaybackException.ERROR_CODE_PARSING_CONTAINER_UNSUPPORTED; + } else if (e.dataType == C.DATA_TYPE_MANIFEST) { + errorCode = + e.contentIsMalformed + ? PlaybackException.ERROR_CODE_PARSING_MANIFEST_MALFORMED + : PlaybackException.ERROR_CODE_PARSING_MANIFEST_UNSUPPORTED; + } else { + errorCode = PlaybackException.ERROR_CODE_UNSPECIFIED; + } + handleIoException(e, errorCode); + } catch (DataSourceException e) { + handleIoException(e, e.reason); + } catch (BehindLiveWindowException e) { + handleIoException(e, PlaybackException.ERROR_CODE_BEHIND_LIVE_WINDOW); } catch (IOException e) { - Log.e(TAG, "Source error", e); - stopInternal( - /* forceResetRenderers= */ false, - /* resetPositionAndState= */ false, - /* acknowledgeStop= */ false); - playbackInfo = playbackInfo.copyWithPlaybackError(ExoPlaybackException.createForSource(e)); - maybeNotifyPlaybackInfoChanged(); - } catch (RuntimeException | OutOfMemoryError e) { - Log.e(TAG, "Internal runtime error", e); - ExoPlaybackException error = - e instanceof OutOfMemoryError - ? ExoPlaybackException.createForOutOfMemoryError((OutOfMemoryError) e) - : ExoPlaybackException.createForUnexpected((RuntimeException) e); - stopInternal( - /* forceResetRenderers= */ true, - /* resetPositionAndState= */ false, - /* acknowledgeStop= */ false); + handleIoException(e, PlaybackException.ERROR_CODE_IO_UNSPECIFIED); + } catch (RuntimeException e) { + @ErrorCode int errorCode; + if (e instanceof IllegalStateException || e instanceof IllegalArgumentException) { + errorCode = PlaybackException.ERROR_CODE_FAILED_RUNTIME_CHECK; + } else { + errorCode = PlaybackException.ERROR_CODE_UNSPECIFIED; + } + ExoPlaybackException error = ExoPlaybackException.createForUnexpected(e, errorCode); + Log.e(TAG, "Playback error", error); + stopInternal(/* forceResetRenderers= */ true, /* acknowledgeStop= */ false); playbackInfo = playbackInfo.copyWithPlaybackError(error); - maybeNotifyPlaybackInfoChanged(); } + maybeNotifyPlaybackInfoChanged(); return true; } // Private methods. - private String getExoPlaybackExceptionMessage(ExoPlaybackException e) { - if (e.type != ExoPlaybackException.TYPE_RENDERER) { - return "Playback error."; + private void handleIoException(IOException e, @ErrorCode int errorCode) { + ExoPlaybackException error = ExoPlaybackException.createForSource(e, errorCode); + @Nullable MediaPeriodHolder playingPeriod = queue.getPlayingPeriod(); + if (playingPeriod != null) { + // We ensure that all IOException throwing methods are only executed for the playing period. + error = error.copyWithMediaPeriodId(playingPeriod.info.id); + } + Log.e(TAG, "Playback error", error); + stopInternal(/* forceResetRenderers= */ false, /* acknowledgeStop= */ false); + playbackInfo = playbackInfo.copyWithPlaybackError(error); + } + + /** + * Blocks the current thread until a condition becomes true or the specified amount of time has + * elapsed. + * + *

      If the current thread is interrupted while waiting for the condition to become true, this + * method will restore the interrupt after the condition became true or the operation times + * out. + * + * @param condition The condition. + * @param timeoutMs The time in milliseconds to wait for the condition to become true. + */ + private synchronized void waitUninterruptibly(Supplier condition, long timeoutMs) { + long deadlineMs = clock.elapsedRealtime() + timeoutMs; + long remainingMs = timeoutMs; + boolean wasInterrupted = false; + while (!condition.get() && remainingMs > 0) { + try { + clock.onThreadBlocked(); + wait(remainingMs); + } catch (InterruptedException e) { + wasInterrupted = true; + } + remainingMs = deadlineMs - clock.elapsedRealtime(); + } + if (wasInterrupted) { + // Restore the interrupted status. + Thread.currentThread().interrupt(); } - return "Renderer error: index=" - + e.rendererIndex - + ", type=" - + Util.getTrackTypeString(renderers[e.rendererIndex].getTrackType()) - + ", format=" - + e.rendererFormat - + ", rendererSupport=" - + RendererCapabilities.getFormatSupportString(e.rendererFormatSupport); } private void setState(int state) { if (playbackInfo.playbackState != state) { + if (state != Player.STATE_BUFFERING) { + playbackMaybeBecameStuckAtMs = C.TIME_UNSET; + } playbackInfo = playbackInfo.copyWithPlaybackState(state); } } private void maybeNotifyPlaybackInfoChanged() { - if (playbackInfoUpdate.hasPendingUpdate(playbackInfo)) { - eventHandler - .obtainMessage( - MSG_PLAYBACK_INFO_CHANGED, - playbackInfoUpdate.operationAcks, - playbackInfoUpdate.positionDiscontinuity - ? playbackInfoUpdate.discontinuityReason - : C.INDEX_UNSET, - playbackInfo) - .sendToTarget(); - playbackInfoUpdate.reset(playbackInfo); + playbackInfoUpdate.setPlaybackInfo(playbackInfo); + if (playbackInfoUpdate.hasPendingChange) { + playbackInfoUpdateListener.onPlaybackInfoUpdate(playbackInfoUpdate); + playbackInfoUpdate = new PlaybackInfoUpdate(playbackInfo); } } - private void prepareInternal(MediaSource mediaSource, boolean resetPosition, boolean resetState) { - pendingPrepareCount++; + private void prepareInternal() { + playbackInfoUpdate.incrementPendingOperationAcks(/* operationAcks= */ 1); resetInternal( /* resetRenderers= */ false, - /* releaseMediaSource= */ true, - resetPosition, - resetState, + /* resetPosition= */ false, + /* releaseMediaSourceList= */ false, /* resetError= */ true); loadControl.onPrepared(); - this.mediaSource = mediaSource; - setState(Player.STATE_BUFFERING); - mediaSource.prepareSource(/* caller= */ this, bandwidthMeter.getTransferListener()); + setState(playbackInfo.timeline.isEmpty() ? Player.STATE_ENDED : Player.STATE_BUFFERING); + mediaSourceList.prepare(bandwidthMeter.getTransferListener()); handler.sendEmptyMessage(MSG_DO_SOME_WORK); } - private void setPlayWhenReadyInternal(boolean playWhenReady) throws ExoPlaybackException { - rebuffering = false; - this.playWhenReady = playWhenReady; - if (!playWhenReady) { + private void setMediaItemsInternal(MediaSourceListUpdateMessage mediaSourceListUpdateMessage) + throws ExoPlaybackException { + playbackInfoUpdate.incrementPendingOperationAcks(/* operationAcks= */ 1); + if (mediaSourceListUpdateMessage.windowIndex != C.INDEX_UNSET) { + pendingInitialSeekPosition = + new SeekPosition( + new PlaylistTimeline( + mediaSourceListUpdateMessage.mediaSourceHolders, + mediaSourceListUpdateMessage.shuffleOrder), + mediaSourceListUpdateMessage.windowIndex, + mediaSourceListUpdateMessage.positionUs); + } + Timeline timeline = + mediaSourceList.setMediaSources( + mediaSourceListUpdateMessage.mediaSourceHolders, + mediaSourceListUpdateMessage.shuffleOrder); + handleMediaSourceListInfoRefreshed(timeline, /* isSourceRefresh= */ false); + } + + private void addMediaItemsInternal(MediaSourceListUpdateMessage addMessage, int insertionIndex) + throws ExoPlaybackException { + playbackInfoUpdate.incrementPendingOperationAcks(/* operationAcks= */ 1); + Timeline timeline = + mediaSourceList.addMediaSources( + insertionIndex == C.INDEX_UNSET ? mediaSourceList.getSize() : insertionIndex, + addMessage.mediaSourceHolders, + addMessage.shuffleOrder); + handleMediaSourceListInfoRefreshed(timeline, /* isSourceRefresh= */ false); + } + + private void moveMediaItemsInternal(MoveMediaItemsMessage moveMediaItemsMessage) + throws ExoPlaybackException { + playbackInfoUpdate.incrementPendingOperationAcks(/* operationAcks= */ 1); + Timeline timeline = + mediaSourceList.moveMediaSourceRange( + moveMediaItemsMessage.fromIndex, + moveMediaItemsMessage.toIndex, + moveMediaItemsMessage.newFromIndex, + moveMediaItemsMessage.shuffleOrder); + handleMediaSourceListInfoRefreshed(timeline, /* isSourceRefresh= */ false); + } + + private void removeMediaItemsInternal(int fromIndex, int toIndex, ShuffleOrder shuffleOrder) + throws ExoPlaybackException { + playbackInfoUpdate.incrementPendingOperationAcks(/* operationAcks= */ 1); + Timeline timeline = mediaSourceList.removeMediaSourceRange(fromIndex, toIndex, shuffleOrder); + handleMediaSourceListInfoRefreshed(timeline, /* isSourceRefresh= */ false); + } + + private void mediaSourceListUpdateRequestedInternal() throws ExoPlaybackException { + handleMediaSourceListInfoRefreshed( + mediaSourceList.createTimeline(), /* isSourceRefresh= */ true); + } + + private void setShuffleOrderInternal(ShuffleOrder shuffleOrder) throws ExoPlaybackException { + playbackInfoUpdate.incrementPendingOperationAcks(/* operationAcks= */ 1); + Timeline timeline = mediaSourceList.setShuffleOrder(shuffleOrder); + handleMediaSourceListInfoRefreshed(timeline, /* isSourceRefresh= */ false); + } + + private void notifyTrackSelectionPlayWhenReadyChanged(boolean playWhenReady) { + MediaPeriodHolder periodHolder = queue.getPlayingPeriod(); + while (periodHolder != null) { + for (ExoTrackSelection trackSelection : periodHolder.getTrackSelectorResult().selections) { + if (trackSelection != null) { + trackSelection.onPlayWhenReadyChanged(playWhenReady); + } + } + periodHolder = periodHolder.getNext(); + } + } + + private void setPlayWhenReadyInternal( + boolean playWhenReady, + @PlaybackSuppressionReason int playbackSuppressionReason, + boolean operationAck, + @Player.PlayWhenReadyChangeReason int reason) + throws ExoPlaybackException { + playbackInfoUpdate.incrementPendingOperationAcks(operationAck ? 1 : 0); + playbackInfoUpdate.setPlayWhenReadyChangeReason(reason); + playbackInfo = playbackInfo.copyWithPlayWhenReady(playWhenReady, playbackSuppressionReason); + isRebuffering = false; + notifyTrackSelectionPlayWhenReadyChanged(playWhenReady); + if (!shouldPlayWhenReady()) { stopRenderers(); updatePlaybackPositions(); } else { @@ -477,10 +804,33 @@ private void setPlayWhenReadyInternal(boolean playWhenReady) throws ExoPlaybackE } } + private void setPauseAtEndOfWindowInternal(boolean pauseAtEndOfWindow) + throws ExoPlaybackException { + this.pauseAtEndOfWindow = pauseAtEndOfWindow; + resetPendingPauseAtEndOfPeriod(); + if (pendingPauseAtEndOfPeriod && queue.getReadingPeriod() != queue.getPlayingPeriod()) { + // When pausing is required, we need to set the streams of the playing period final. If we + // already started reading the next period, we need to flush the renderers. + seekToCurrentPosition(/* sendDiscontinuity= */ true); + handleLoadingMediaPeriodChanged(/* loadingTrackSelectionChanged= */ false); + } + } + + private void setOffloadSchedulingEnabledInternal(boolean offloadSchedulingEnabled) { + if (offloadSchedulingEnabled == this.offloadSchedulingEnabled) { + return; + } + this.offloadSchedulingEnabled = offloadSchedulingEnabled; + if (!offloadSchedulingEnabled && playbackInfo.sleepingForOffload) { + // We need to wake the player up if offload scheduling is disabled and we are sleeping. + handler.sendEmptyMessage(MSG_DO_SOME_WORK); + } + } + private void setRepeatModeInternal(@Player.RepeatMode int repeatMode) throws ExoPlaybackException { this.repeatMode = repeatMode; - if (!queue.updateRepeatMode(repeatMode)) { + if (!queue.updateRepeatMode(playbackInfo.timeline, repeatMode)) { seekToCurrentPosition(/* sendDiscontinuity= */ true); } handleLoadingMediaPeriodChanged(/* loadingTrackSelectionChanged= */ false); @@ -489,7 +839,7 @@ private void setRepeatModeInternal(@Player.RepeatMode int repeatMode) private void setShuffleModeEnabledInternal(boolean shuffleModeEnabled) throws ExoPlaybackException { this.shuffleModeEnabled = shuffleModeEnabled; - if (!queue.updateShuffleModeEnabled(shuffleModeEnabled)) { + if (!queue.updateShuffleModeEnabled(playbackInfo.timeline, shuffleModeEnabled)) { seekToCurrentPosition(/* sendDiscontinuity= */ true); } handleLoadingMediaPeriodChanged(/* loadingTrackSelectionChanged= */ false); @@ -500,30 +850,46 @@ private void seekToCurrentPosition(boolean sendDiscontinuity) throws ExoPlayback // position of the playing period to make sure none of the removed period is played. MediaPeriodId periodId = queue.getPlayingPeriod().info.id; long newPositionUs = - seekToPeriodPosition(periodId, playbackInfo.positionUs, /* forceDisableRenderers= */ true); + seekToPeriodPosition( + periodId, + playbackInfo.positionUs, + /* forceDisableRenderers= */ true, + /* forceBufferingState= */ false); if (newPositionUs != playbackInfo.positionUs) { - playbackInfo = copyWithNewPosition(periodId, newPositionUs, playbackInfo.contentPositionUs); - if (sendDiscontinuity) { - playbackInfoUpdate.setPositionDiscontinuity(Player.DISCONTINUITY_REASON_INTERNAL); - } + playbackInfo = + handlePositionDiscontinuity( + periodId, + newPositionUs, + playbackInfo.requestedContentPositionUs, + playbackInfo.discontinuityStartPositionUs, + sendDiscontinuity, + Player.DISCONTINUITY_REASON_INTERNAL); } } private void startRenderers() throws ExoPlaybackException { - rebuffering = false; + isRebuffering = false; mediaClock.start(); - for (Renderer renderer : enabledRenderers) { - renderer.start(); + for (Renderer renderer : renderers) { + if (isRendererEnabled(renderer)) { + renderer.start(); + } } } private void stopRenderers() throws ExoPlaybackException { mediaClock.stop(); - for (Renderer renderer : enabledRenderers) { - ensureStopped(renderer); + for (Renderer renderer : renderers) { + if (isRendererEnabled(renderer)) { + ensureStopped(renderer); + } } } + private void attemptRendererErrorRecovery() throws ExoPlaybackException { + seekToCurrentPosition(/* sendDiscontinuity= */ true); + } + private void updatePlaybackPositions() throws ExoPlaybackException { MediaPeriodHolder playingPeriodHolder = queue.getPlayingPeriod(); if (playingPeriodHolder == null) { @@ -541,9 +907,13 @@ private void updatePlaybackPositions() throws ExoPlaybackException { // renderers are flushed. Only report the discontinuity externally if the position changed. if (discontinuityPositionUs != playbackInfo.positionUs) { playbackInfo = - copyWithNewPosition( - playbackInfo.periodId, discontinuityPositionUs, playbackInfo.contentPositionUs); - playbackInfoUpdate.setPositionDiscontinuity(Player.DISCONTINUITY_REASON_INTERNAL); + handlePositionDiscontinuity( + playbackInfo.periodId, + /* positionUs= */ discontinuityPositionUs, + playbackInfo.requestedContentPositionUs, + /* discontinuityStartPositionUs= */ discontinuityPositionUs, + /* reportDiscontinuity= */ true, + Player.DISCONTINUITY_REASON_INTERNAL); } } else { rendererPositionUs = @@ -558,16 +928,48 @@ private void updatePlaybackPositions() throws ExoPlaybackException { MediaPeriodHolder loadingPeriod = queue.getLoadingPeriod(); playbackInfo.bufferedPositionUs = loadingPeriod.getBufferedPositionUs(); playbackInfo.totalBufferedDurationUs = getTotalBufferedDurationUs(); + + // Adjust live playback speed to new position. + if (playbackInfo.playWhenReady + && playbackInfo.playbackState == Player.STATE_READY + && shouldUseLivePlaybackSpeedControl(playbackInfo.timeline, playbackInfo.periodId) + && playbackInfo.playbackParameters.speed == 1f) { + float adjustedSpeed = + livePlaybackSpeedControl.getAdjustedPlaybackSpeed( + getCurrentLiveOffsetUs(), getTotalBufferedDurationUs()); + if (mediaClock.getPlaybackParameters().speed != adjustedSpeed) { + mediaClock.setPlaybackParameters(playbackInfo.playbackParameters.withSpeed(adjustedSpeed)); + handlePlaybackParameters( + playbackInfo.playbackParameters, + /* currentPlaybackSpeed= */ mediaClock.getPlaybackParameters().speed, + /* updatePlaybackInfo= */ false, + /* acknowledgeCommand= */ false); + } + } + } + + private void notifyTrackSelectionRebuffer() { + MediaPeriodHolder periodHolder = queue.getPlayingPeriod(); + while (periodHolder != null) { + for (ExoTrackSelection trackSelection : periodHolder.getTrackSelectorResult().selections) { + if (trackSelection != null) { + trackSelection.onRebuffer(); + } + } + periodHolder = periodHolder.getNext(); + } } private void doSomeWork() throws ExoPlaybackException, IOException { long operationStartTimeMs = clock.uptimeMillis(); + // Remove other pending DO_SOME_WORK requests that are handled by this invocation. + handler.removeMessages(MSG_DO_SOME_WORK); + updatePeriods(); if (playbackInfo.playbackState == Player.STATE_IDLE || playbackInfo.playbackState == Player.STATE_ENDED) { - // Remove all messages. Prepare (in case of IDLE) or seek (in case of ENDED) will resume. - handler.removeMessages(MSG_DO_SOME_WORK); + // Nothing to do. Prepare (in case of IDLE) or seek (in case of ENDED) will resume. return; } @@ -590,7 +992,7 @@ private void doSomeWork() throws ExoPlaybackException, IOException { playbackInfo.positionUs - backBufferDurationUs, retainBackBufferFromKeyframe); for (int i = 0; i < renderers.length; i++) { Renderer renderer = renderers[i]; - if (renderer.getState() == Renderer.STATE_DISABLED) { + if (!isRendererEnabled(renderer)) { continue; } // TODO: Each renderer should return the maximum delay before which it wishes to be called @@ -604,10 +1006,7 @@ private void doSomeWork() throws ExoPlaybackException, IOException { // tracks in the current period have uneven durations and are still being read by another // renderer. See: https://github.com/google/ExoPlayer/issues/1874. boolean isReadingAhead = playingPeriodHolder.sampleStreams[i] != renderer.getStream(); - boolean isWaitingForNextStream = - !isReadingAhead - && playingPeriodHolder.getNext() != null - && renderer.hasReadStreamToEnd(); + boolean isWaitingForNextStream = !isReadingAhead && renderer.hasReadStreamToEnd(); boolean allowsPlayback = isReadingAhead || isWaitingForNextStream || renderer.isReady() || renderer.isEnded(); renderersAllowPlayback = renderersAllowPlayback && allowsPlayback; @@ -620,46 +1019,115 @@ private void doSomeWork() throws ExoPlaybackException, IOException { } long playingPeriodDurationUs = playingPeriodHolder.info.durationUs; - if (renderersEnded - && playingPeriodHolder.prepared - && (playingPeriodDurationUs == C.TIME_UNSET - || playingPeriodDurationUs <= playbackInfo.positionUs) - && playingPeriodHolder.info.isFinal) { + boolean finishedRendering = + renderersEnded + && playingPeriodHolder.prepared + && (playingPeriodDurationUs == C.TIME_UNSET + || playingPeriodDurationUs <= playbackInfo.positionUs); + if (finishedRendering && pendingPauseAtEndOfPeriod) { + pendingPauseAtEndOfPeriod = false; + setPlayWhenReadyInternal( + /* playWhenReady= */ false, + playbackInfo.playbackSuppressionReason, + /* operationAck= */ false, + Player.PLAY_WHEN_READY_CHANGE_REASON_END_OF_MEDIA_ITEM); + } + if (finishedRendering && playingPeriodHolder.info.isFinal) { setState(Player.STATE_ENDED); stopRenderers(); } else if (playbackInfo.playbackState == Player.STATE_BUFFERING && shouldTransitionToReadyState(renderersAllowPlayback)) { setState(Player.STATE_READY); - if (playWhenReady) { + pendingRecoverableRendererError = null; // Any pending error was successfully recovered from. + if (shouldPlayWhenReady()) { startRenderers(); } } else if (playbackInfo.playbackState == Player.STATE_READY - && !(enabledRenderers.length == 0 ? isTimelineReady() : renderersAllowPlayback)) { - rebuffering = playWhenReady; + && !(enabledRendererCount == 0 ? isTimelineReady() : renderersAllowPlayback)) { + isRebuffering = shouldPlayWhenReady(); setState(Player.STATE_BUFFERING); + if (isRebuffering) { + notifyTrackSelectionRebuffer(); + livePlaybackSpeedControl.notifyRebuffer(); + } stopRenderers(); } + boolean playbackMaybeStuck = false; if (playbackInfo.playbackState == Player.STATE_BUFFERING) { - for (Renderer renderer : enabledRenderers) { - renderer.maybeThrowStreamError(); + for (int i = 0; i < renderers.length; i++) { + if (isRendererEnabled(renderers[i]) + && renderers[i].getStream() == playingPeriodHolder.sampleStreams[i]) { + renderers[i].maybeThrowStreamError(); + } + } + if (!playbackInfo.isLoading + && playbackInfo.totalBufferedDurationUs < PLAYBACK_BUFFER_EMPTY_THRESHOLD_US + && isLoadingPossible()) { + // The renderers are not ready, there is more media available to load, and the LoadControl + // is refusing to load it (indicated by !playbackInfo.isLoading). This could be because the + // renderers are still transitioning to their ready states, but it could also indicate a + // stuck playback. The playbackInfo.totalBufferedDurationUs check further isolates the + // cause to a lack of media for the renderers to consume, to avoid classifying playbacks as + // stuck when they're waiting for other reasons (in particular, loading DRM keys). + playbackMaybeStuck = true; } } - if ((playWhenReady && playbackInfo.playbackState == Player.STATE_READY) - || playbackInfo.playbackState == Player.STATE_BUFFERING) { + if (!playbackMaybeStuck) { + playbackMaybeBecameStuckAtMs = C.TIME_UNSET; + } else if (playbackMaybeBecameStuckAtMs == C.TIME_UNSET) { + playbackMaybeBecameStuckAtMs = clock.elapsedRealtime(); + } else if (clock.elapsedRealtime() - playbackMaybeBecameStuckAtMs >= PLAYBACK_STUCK_AFTER_MS) { + throw new IllegalStateException("Playback stuck buffering and not loading"); + } + + boolean isPlaying = shouldPlayWhenReady() && playbackInfo.playbackState == Player.STATE_READY; + boolean sleepingForOffload = offloadSchedulingEnabled && requestForRendererSleep && isPlaying; + if (playbackInfo.sleepingForOffload != sleepingForOffload) { + playbackInfo = playbackInfo.copyWithSleepingForOffload(sleepingForOffload); + } + requestForRendererSleep = false; // A sleep request is only valid for the current doSomeWork. + + if (sleepingForOffload || playbackInfo.playbackState == Player.STATE_ENDED) { + // No need to schedule next work. + } else if (isPlaying || playbackInfo.playbackState == Player.STATE_BUFFERING) { + // We are actively playing or waiting for data to be ready. Schedule next work quickly. scheduleNextWork(operationStartTimeMs, ACTIVE_INTERVAL_MS); - } else if (enabledRenderers.length != 0 && playbackInfo.playbackState != Player.STATE_ENDED) { + } else if (playbackInfo.playbackState == Player.STATE_READY && enabledRendererCount != 0) { + // We are ready, but not playing. Schedule next work less often to handle non-urgent updates. scheduleNextWork(operationStartTimeMs, IDLE_INTERVAL_MS); - } else { - handler.removeMessages(MSG_DO_SOME_WORK); } TraceUtil.endSection(); } + private long getCurrentLiveOffsetUs() { + return getLiveOffsetUs( + playbackInfo.timeline, playbackInfo.periodId.periodUid, playbackInfo.positionUs); + } + + private long getLiveOffsetUs(Timeline timeline, Object periodUid, long periodPositionUs) { + int windowIndex = timeline.getPeriodByUid(periodUid, period).windowIndex; + timeline.getWindow(windowIndex, window); + if (window.windowStartTimeMs == C.TIME_UNSET || !window.isLive() || !window.isDynamic) { + return C.TIME_UNSET; + } + return Util.msToUs(window.getCurrentUnixTimeMs() - window.windowStartTimeMs) + - (periodPositionUs + period.getPositionInWindowUs()); + } + + private boolean shouldUseLivePlaybackSpeedControl( + Timeline timeline, MediaPeriodId mediaPeriodId) { + if (mediaPeriodId.isAd() || timeline.isEmpty()) { + return false; + } + int windowIndex = timeline.getPeriodByUid(mediaPeriodId.periodUid, period).windowIndex; + timeline.getWindow(windowIndex, window); + return window.isLive() && window.isDynamic && window.windowStartTimeMs != C.TIME_UNSET; + } + private void scheduleNextWork(long thisOperationStartTimeMs, long intervalMs) { - handler.removeMessages(MSG_DO_SOME_WORK); handler.sendEmptyMessageAtTime(MSG_DO_SOME_WORK, thisOperationStartTimeMs + intervalMs); } @@ -668,43 +1136,62 @@ private void seekToInternal(SeekPosition seekPosition) throws ExoPlaybackExcepti MediaPeriodId periodId; long periodPositionUs; - long contentPositionUs; + long requestedContentPositionUs; boolean seekPositionAdjusted; + @Nullable Pair resolvedSeekPosition = - resolveSeekPosition(seekPosition, /* trySubsequentPeriods= */ true); + resolveSeekPositionUs( + playbackInfo.timeline, + seekPosition, + /* trySubsequentPeriods= */ true, + repeatMode, + shuffleModeEnabled, + window, + period); if (resolvedSeekPosition == null) { // The seek position was valid for the timeline that it was performed into, but the // timeline has changed or is not ready and a suitable seek position could not be resolved. - periodId = playbackInfo.getDummyFirstMediaPeriodId(shuffleModeEnabled, window, period); - periodPositionUs = C.TIME_UNSET; - contentPositionUs = C.TIME_UNSET; - seekPositionAdjusted = true; + Pair firstPeriodAndPositionUs = + getPlaceholderFirstMediaPeriodPositionUs(playbackInfo.timeline); + periodId = firstPeriodAndPositionUs.first; + periodPositionUs = firstPeriodAndPositionUs.second; + requestedContentPositionUs = C.TIME_UNSET; + seekPositionAdjusted = !playbackInfo.timeline.isEmpty(); } else { // Update the resolved seek position to take ads into account. Object periodUid = resolvedSeekPosition.first; - contentPositionUs = resolvedSeekPosition.second; - periodId = queue.resolveMediaPeriodIdForAds(periodUid, contentPositionUs); + long resolvedContentPositionUs = resolvedSeekPosition.second; + requestedContentPositionUs = + seekPosition.windowPositionUs == C.TIME_UNSET ? C.TIME_UNSET : resolvedContentPositionUs; + periodId = + queue.resolveMediaPeriodIdForAdsAfterPeriodPositionChange( + playbackInfo.timeline, periodUid, resolvedContentPositionUs); if (periodId.isAd()) { - periodPositionUs = 0; + playbackInfo.timeline.getPeriodByUid(periodId.periodUid, period); + periodPositionUs = + period.getFirstAdIndexToPlay(periodId.adGroupIndex) == periodId.adIndexInAdGroup + ? period.getAdResumePositionUs() + : 0; seekPositionAdjusted = true; } else { - periodPositionUs = resolvedSeekPosition.second; + periodPositionUs = resolvedContentPositionUs; seekPositionAdjusted = seekPosition.windowPositionUs == C.TIME_UNSET; } } try { - if (mediaSource == null || pendingPrepareCount > 0) { + if (playbackInfo.timeline.isEmpty()) { // Save seek position for later, as we are still waiting for a prepared source. pendingInitialSeekPosition = seekPosition; - } else if (periodPositionUs == C.TIME_UNSET) { + } else if (resolvedSeekPosition == null) { // End playback, as we didn't manage to find a valid seek position. - setState(Player.STATE_ENDED); + if (playbackInfo.playbackState != Player.STATE_IDLE) { + setState(Player.STATE_ENDED); + } resetInternal( /* resetRenderers= */ false, - /* releaseMediaSource= */ false, /* resetPosition= */ true, - /* resetState= */ false, + /* releaseMediaSourceList= */ false, /* resetError= */ true); } else { // Execute the seek in the current media periods. @@ -718,49 +1205,71 @@ private void seekToInternal(SeekPosition seekPosition) throws ExoPlaybackExcepti playingPeriodHolder.mediaPeriod.getAdjustedSeekPositionUs( newPeriodPositionUs, seekParameters); } - if (C.usToMs(newPeriodPositionUs) == C.usToMs(playbackInfo.positionUs)) { + if (Util.usToMs(newPeriodPositionUs) == Util.usToMs(playbackInfo.positionUs) + && (playbackInfo.playbackState == Player.STATE_BUFFERING + || playbackInfo.playbackState == Player.STATE_READY)) { // Seek will be performed to the current position. Do nothing. periodPositionUs = playbackInfo.positionUs; return; } } - newPeriodPositionUs = seekToPeriodPosition(periodId, newPeriodPositionUs); + newPeriodPositionUs = + seekToPeriodPosition( + periodId, + newPeriodPositionUs, + /* forceBufferingState= */ playbackInfo.playbackState == Player.STATE_ENDED); seekPositionAdjusted |= periodPositionUs != newPeriodPositionUs; periodPositionUs = newPeriodPositionUs; + updatePlaybackSpeedSettingsForNewPeriod( + /* newTimeline= */ playbackInfo.timeline, + /* newPeriodId= */ periodId, + /* oldTimeline= */ playbackInfo.timeline, + /* oldPeriodId= */ playbackInfo.periodId, + /* positionForTargetOffsetOverrideUs= */ requestedContentPositionUs); } } finally { - playbackInfo = copyWithNewPosition(periodId, periodPositionUs, contentPositionUs); - if (seekPositionAdjusted) { - playbackInfoUpdate.setPositionDiscontinuity(Player.DISCONTINUITY_REASON_SEEK_ADJUSTMENT); - } + playbackInfo = + handlePositionDiscontinuity( + periodId, + periodPositionUs, + requestedContentPositionUs, + /* discontinuityStartPositionUs= */ periodPositionUs, + /* reportDiscontinuity= */ seekPositionAdjusted, + Player.DISCONTINUITY_REASON_SEEK_ADJUSTMENT); } } - private long seekToPeriodPosition(MediaPeriodId periodId, long periodPositionUs) + private long seekToPeriodPosition( + MediaPeriodId periodId, long periodPositionUs, boolean forceBufferingState) throws ExoPlaybackException { // Force disable renderers if they are reading from a period other than the one being played. return seekToPeriodPosition( - periodId, periodPositionUs, queue.getPlayingPeriod() != queue.getReadingPeriod()); + periodId, + periodPositionUs, + queue.getPlayingPeriod() != queue.getReadingPeriod(), + forceBufferingState); } private long seekToPeriodPosition( - MediaPeriodId periodId, long periodPositionUs, boolean forceDisableRenderers) + MediaPeriodId periodId, + long periodPositionUs, + boolean forceDisableRenderers, + boolean forceBufferingState) throws ExoPlaybackException { stopRenderers(); - rebuffering = false; - if (playbackInfo.playbackState != Player.STATE_IDLE && !playbackInfo.timeline.isEmpty()) { + isRebuffering = false; + if (forceBufferingState || playbackInfo.playbackState == Player.STATE_READY) { setState(Player.STATE_BUFFERING); } - // Clear the timeline, but keep the requested period if it is already prepared. - MediaPeriodHolder oldPlayingPeriodHolder = queue.getPlayingPeriod(); - MediaPeriodHolder newPlayingPeriodHolder = oldPlayingPeriodHolder; + // Find the requested period if it already exists. + @Nullable MediaPeriodHolder oldPlayingPeriodHolder = queue.getPlayingPeriod(); + @Nullable MediaPeriodHolder newPlayingPeriodHolder = oldPlayingPeriodHolder; while (newPlayingPeriodHolder != null) { - if (periodId.equals(newPlayingPeriodHolder.info.id) && newPlayingPeriodHolder.prepared) { - queue.removeAfter(newPlayingPeriodHolder); + if (periodId.equals(newPlayingPeriodHolder.info.id)) { break; } - newPlayingPeriodHolder = queue.advancePlayingPeriod(); + newPlayingPeriodHolder = newPlayingPeriodHolder.getNext(); } // Disable all renderers if the period being played is changing, if the seek results in negative @@ -769,20 +1278,28 @@ private long seekToPeriodPosition( || oldPlayingPeriodHolder != newPlayingPeriodHolder || (newPlayingPeriodHolder != null && newPlayingPeriodHolder.toRendererTime(periodPositionUs) < 0)) { - for (Renderer renderer : enabledRenderers) { + for (Renderer renderer : renderers) { disableRenderer(renderer); } - enabledRenderers = new Renderer[0]; - oldPlayingPeriodHolder = null; if (newPlayingPeriodHolder != null) { - newPlayingPeriodHolder.setRendererOffset(/* rendererPositionOffsetUs= */ 0); + // Update the queue and reenable renderers if the requested media period already exists. + while (queue.getPlayingPeriod() != newPlayingPeriodHolder) { + queue.advancePlayingPeriod(); + } + queue.removeAfter(newPlayingPeriodHolder); + newPlayingPeriodHolder.setRendererOffset( + MediaPeriodQueue.INITIAL_RENDERER_POSITION_OFFSET_US); + enableRenderers(); } } - // Update the holders. + // Do the actual seeking. if (newPlayingPeriodHolder != null) { - updatePlayingPeriodRenderers(oldPlayingPeriodHolder); - if (newPlayingPeriodHolder.hasEnabledTracks) { + queue.removeAfter(newPlayingPeriodHolder); + if (!newPlayingPeriodHolder.prepared) { + newPlayingPeriodHolder.info = + newPlayingPeriodHolder.info.copyWithStartPositionUs(periodPositionUs); + } else if (newPlayingPeriodHolder.hasEnabledTracks) { periodPositionUs = newPlayingPeriodHolder.mediaPeriod.seekToUs(periodPositionUs); newPlayingPeriodHolder.mediaPeriod.discardBuffer( periodPositionUs - backBufferDurationUs, retainBackBufferFromKeyframe); @@ -790,10 +1307,8 @@ private long seekToPeriodPosition( resetRendererPosition(periodPositionUs); maybeContinueLoading(); } else { - queue.clear(/* keepFrontPeriodUid= */ true); // New period has not been prepared. - playbackInfo = - playbackInfo.copyWithTrackInfo(TrackGroupArray.EMPTY, emptyTrackSelectorResult); + queue.clear(); resetRendererPosition(periodPositionUs); } @@ -806,19 +1321,21 @@ private void resetRendererPosition(long periodPositionUs) throws ExoPlaybackExce MediaPeriodHolder playingMediaPeriod = queue.getPlayingPeriod(); rendererPositionUs = playingMediaPeriod == null - ? periodPositionUs + ? MediaPeriodQueue.INITIAL_RENDERER_POSITION_OFFSET_US + periodPositionUs : playingMediaPeriod.toRendererTime(periodPositionUs); mediaClock.resetPosition(rendererPositionUs); - for (Renderer renderer : enabledRenderers) { - renderer.resetPosition(rendererPositionUs); + for (Renderer renderer : renderers) { + if (isRendererEnabled(renderer)) { + renderer.resetPosition(rendererPositionUs); + } } notifyTrackSelectionDiscontinuity(); } - private void setPlaybackParametersInternal(PlaybackParameters playbackParameters) { + private void setPlaybackParametersInternal(PlaybackParameters playbackParameters) + throws ExoPlaybackException { mediaClock.setPlaybackParameters(playbackParameters); - sendPlaybackParametersChangedInternal( - mediaClock.getPlaybackParameters(), /* acknowledgeCommand= */ true); + handlePlaybackParameters(mediaClock.getPlaybackParameters(), /* acknowledgeCommand= */ true); } private void setSeekParametersInternal(SeekParameters seekParameters) { @@ -831,7 +1348,7 @@ private void setForegroundModeInternal( this.foregroundMode = foregroundMode; if (!foregroundMode) { for (Renderer renderer : renderers) { - if (renderer.getState() == Renderer.STATE_DISABLED) { + if (!isRendererEnabled(renderer) && renderersToReset.remove(renderer)) { renderer.reset(); } } @@ -845,17 +1362,13 @@ private void setForegroundModeInternal( } } - private void stopInternal( - boolean forceResetRenderers, boolean resetPositionAndState, boolean acknowledgeStop) { + private void stopInternal(boolean forceResetRenderers, boolean acknowledgeStop) { resetInternal( /* resetRenderers= */ forceResetRenderers || !foregroundMode, - /* releaseMediaSource= */ true, - /* resetPosition= */ resetPositionAndState, - /* resetState= */ resetPositionAndState, - /* resetError= */ resetPositionAndState); - playbackInfoUpdate.incrementPendingOperationAcks( - pendingPrepareCount + (acknowledgeStop ? 1 : 0)); - pendingPrepareCount = 0; + /* resetPosition= */ false, + /* releaseMediaSourceList= */ true, + /* resetError= */ false); + playbackInfoUpdate.incrementPendingOperationAcks(acknowledgeStop ? 1 : 0); loadControl.onStopped(); setState(Player.STATE_IDLE); } @@ -863,13 +1376,14 @@ private void stopInternal( private void releaseInternal() { resetInternal( /* resetRenderers= */ true, - /* releaseMediaSource= */ true, - /* resetPosition= */ true, - /* resetState= */ true, + /* resetPosition= */ false, + /* releaseMediaSourceList= */ true, /* resetError= */ false); loadControl.onReleased(); setState(Player.STATE_IDLE); - internalPlaybackThread.quit(); + if (internalPlaybackThread != null) { + internalPlaybackThread.quit(); + } synchronized (this) { released = true; notifyAll(); @@ -878,15 +1392,15 @@ private void releaseInternal() { private void resetInternal( boolean resetRenderers, - boolean releaseMediaSource, boolean resetPosition, - boolean resetState, + boolean releaseMediaSourceList, boolean resetError) { handler.removeMessages(MSG_DO_SOME_WORK); - rebuffering = false; + pendingRecoverableRendererError = null; + isRebuffering = false; mediaClock.stop(); - rendererPositionUs = 0; - for (Renderer renderer : enabledRenderers) { + rendererPositionUs = MediaPeriodQueue.INITIAL_RENDERER_POSITION_OFFSET_US; + for (Renderer renderer : renderers) { try { disableRenderer(renderer); } catch (ExoPlaybackException | RuntimeException e) { @@ -896,84 +1410,105 @@ private void resetInternal( } if (resetRenderers) { for (Renderer renderer : renderers) { - try { - renderer.reset(); - } catch (RuntimeException e) { - // There's nothing we can do. - Log.e(TAG, "Reset failed.", e); + if (renderersToReset.remove(renderer)) { + try { + renderer.reset(); + } catch (RuntimeException e) { + // There's nothing we can do. + Log.e(TAG, "Reset failed.", e); + } } } } - enabledRenderers = new Renderer[0]; + enabledRendererCount = 0; + MediaPeriodId mediaPeriodId = playbackInfo.periodId; + long startPositionUs = playbackInfo.positionUs; + long requestedContentPositionUs = + playbackInfo.periodId.isAd() || isUsingPlaceholderPeriod(playbackInfo, period) + ? playbackInfo.requestedContentPositionUs + : playbackInfo.positionUs; + boolean resetTrackInfo = false; if (resetPosition) { pendingInitialSeekPosition = null; - } else if (resetState) { - // When resetting the state, also reset the period-based PlaybackInfo position and convert - // existing position to initial seek instead. - resetPosition = true; - if (pendingInitialSeekPosition == null && !playbackInfo.timeline.isEmpty()) { - playbackInfo.timeline.getPeriodByUid(playbackInfo.periodId.periodUid, period); - long windowPositionUs = playbackInfo.positionUs + period.getPositionInWindowUs(); - pendingInitialSeekPosition = - new SeekPosition(Timeline.EMPTY, period.windowIndex, windowPositionUs); + Pair firstPeriodAndPositionUs = + getPlaceholderFirstMediaPeriodPositionUs(playbackInfo.timeline); + mediaPeriodId = firstPeriodAndPositionUs.first; + startPositionUs = firstPeriodAndPositionUs.second; + requestedContentPositionUs = C.TIME_UNSET; + if (!mediaPeriodId.equals(playbackInfo.periodId)) { + resetTrackInfo = true; } } - queue.clear(/* keepFrontPeriodUid= */ !resetState); + queue.clear(); shouldContinueLoading = false; - if (resetState) { - queue.setTimeline(Timeline.EMPTY); - for (PendingMessageInfo pendingMessageInfo : pendingMessages) { - pendingMessageInfo.message.markAsProcessed(/* isDelivered= */ false); - } - pendingMessages.clear(); - } - MediaPeriodId mediaPeriodId = - resetPosition - ? playbackInfo.getDummyFirstMediaPeriodId(shuffleModeEnabled, window, period) - : playbackInfo.periodId; - // Set the start position to TIME_UNSET so that a subsequent seek to 0 isn't ignored. - long startPositionUs = resetPosition ? C.TIME_UNSET : playbackInfo.positionUs; - long contentPositionUs = resetPosition ? C.TIME_UNSET : playbackInfo.contentPositionUs; + playbackInfo = new PlaybackInfo( - resetState ? Timeline.EMPTY : playbackInfo.timeline, + playbackInfo.timeline, mediaPeriodId, - startPositionUs, - contentPositionUs, + requestedContentPositionUs, + /* discontinuityStartPositionUs= */ startPositionUs, playbackInfo.playbackState, resetError ? null : playbackInfo.playbackError, /* isLoading= */ false, - resetState ? TrackGroupArray.EMPTY : playbackInfo.trackGroups, - resetState ? emptyTrackSelectorResult : playbackInfo.trackSelectorResult, + resetTrackInfo ? TrackGroupArray.EMPTY : playbackInfo.trackGroups, + resetTrackInfo ? emptyTrackSelectorResult : playbackInfo.trackSelectorResult, + resetTrackInfo ? ImmutableList.of() : playbackInfo.staticMetadata, mediaPeriodId, - startPositionUs, + playbackInfo.playWhenReady, + playbackInfo.playbackSuppressionReason, + playbackInfo.playbackParameters, + /* bufferedPositionUs= */ startPositionUs, /* totalBufferedDurationUs= */ 0, - startPositionUs); - if (releaseMediaSource) { - if (mediaSource != null) { - try { - mediaSource.releaseSource(/* caller= */ this); - } catch (RuntimeException e) { - // There's nothing we can do. - Log.e(TAG, "Failed to release child source.", e); - } - mediaSource = null; - } + /* positionUs= */ startPositionUs, + /* sleepingForOffload= */ false); + if (releaseMediaSourceList) { + mediaSourceList.release(); } } + private Pair getPlaceholderFirstMediaPeriodPositionUs(Timeline timeline) { + if (timeline.isEmpty()) { + return Pair.create(PlaybackInfo.getDummyPeriodForEmptyTimeline(), 0L); + } + int firstWindowIndex = timeline.getFirstWindowIndex(shuffleModeEnabled); + Pair firstPeriodAndPositionUs = + timeline.getPeriodPositionUs( + window, period, firstWindowIndex, /* windowPositionUs= */ C.TIME_UNSET); + // Add ad metadata if any and propagate the window sequence number to new period id. + MediaPeriodId firstPeriodId = + queue.resolveMediaPeriodIdForAdsAfterPeriodPositionChange( + timeline, firstPeriodAndPositionUs.first, /* positionUs= */ 0); + long positionUs = firstPeriodAndPositionUs.second; + if (firstPeriodId.isAd()) { + timeline.getPeriodByUid(firstPeriodId.periodUid, period); + positionUs = + firstPeriodId.adIndexInAdGroup == period.getFirstAdIndexToPlay(firstPeriodId.adGroupIndex) + ? period.getAdResumePositionUs() + : 0; + } + return Pair.create(firstPeriodId, positionUs); + } + private void sendMessageInternal(PlayerMessage message) throws ExoPlaybackException { if (message.getPositionMs() == C.TIME_UNSET) { // If no delivery time is specified, trigger immediate message delivery. sendMessageToTarget(message); - } else if (mediaSource == null || pendingPrepareCount > 0) { + } else if (playbackInfo.timeline.isEmpty()) { // Still waiting for initial timeline to resolve position. pendingMessages.add(new PendingMessageInfo(message)); } else { PendingMessageInfo pendingMessageInfo = new PendingMessageInfo(message); - if (resolvePendingMessagePosition(pendingMessageInfo)) { + if (resolvePendingMessagePosition( + pendingMessageInfo, + /* newTimeline= */ playbackInfo.timeline, + /* previousTimeline= */ playbackInfo.timeline, + repeatMode, + shuffleModeEnabled, + window, + period)) { pendingMessages.add(pendingMessageInfo); // Ensure new message is inserted according to playback order. Collections.sort(pendingMessages); @@ -984,7 +1519,7 @@ private void sendMessageInternal(PlayerMessage message) throws ExoPlaybackExcept } private void sendMessageToTarget(PlayerMessage message) throws ExoPlaybackException { - if (message.getHandler().getLooper() == handler.getLooper()) { + if (message.getLooper() == playbackLooper) { deliverMessage(message); if (playbackInfo.playbackState == Player.STATE_READY || playbackInfo.playbackState == Player.STATE_BUFFERING) { @@ -997,21 +1532,23 @@ private void sendMessageToTarget(PlayerMessage message) throws ExoPlaybackExcept } private void sendMessageToTargetThread(final PlayerMessage message) { - Handler handler = message.getHandler(); - if (!handler.getLooper().getThread().isAlive()) { + Looper looper = message.getLooper(); + if (!looper.getThread().isAlive()) { Log.w("TAG", "Trying to send message on a dead thread."); message.markAsProcessed(/* isDelivered= */ false); return; } - handler.post( - () -> { - try { - deliverMessage(message); - } catch (ExoPlaybackException e) { - Log.e(TAG, "Unexpected error delivering message on external thread.", e); - throw new RuntimeException(e); - } - }); + clock + .createHandler(looper, /* callback= */ null) + .post( + () -> { + try { + deliverMessage(message); + } catch (ExoPlaybackException e) { + Log.e(TAG, "Unexpected error delivering message on external thread.", e); + throw new RuntimeException(e); + } + }); } private void deliverMessage(PlayerMessage message) throws ExoPlaybackException { @@ -1025,9 +1562,20 @@ private void deliverMessage(PlayerMessage message) throws ExoPlaybackException { } } - private void resolvePendingMessagePositions() { + private void resolvePendingMessagePositions(Timeline newTimeline, Timeline previousTimeline) { + if (newTimeline.isEmpty() && previousTimeline.isEmpty()) { + // Keep all messages unresolved until we have a non-empty timeline. + return; + } for (int i = pendingMessages.size() - 1; i >= 0; i--) { - if (!resolvePendingMessagePosition(pendingMessages.get(i))) { + if (!resolvePendingMessagePosition( + pendingMessages.get(i), + newTimeline, + previousTimeline, + repeatMode, + shuffleModeEnabled, + window, + period)) { // Unable to resolve a new position for the message. Remove it. pendingMessages.get(i).message.markAsProcessed(/* isDelivered= */ false); pendingMessages.remove(i); @@ -1037,51 +1585,22 @@ private void resolvePendingMessagePositions() { Collections.sort(pendingMessages); } - private boolean resolvePendingMessagePosition(PendingMessageInfo pendingMessageInfo) { - if (pendingMessageInfo.resolvedPeriodUid == null) { - // Position is still unresolved. Try to find window in current timeline. - Pair periodPosition = - resolveSeekPosition( - new SeekPosition( - pendingMessageInfo.message.getTimeline(), - pendingMessageInfo.message.getWindowIndex(), - C.msToUs(pendingMessageInfo.message.getPositionMs())), - /* trySubsequentPeriods= */ false); - if (periodPosition == null) { - return false; - } - pendingMessageInfo.setResolvedPosition( - playbackInfo.timeline.getIndexOfPeriod(periodPosition.first), - periodPosition.second, - periodPosition.first); - } else { - // Position has been resolved for a previous timeline. Try to find the updated period index. - int index = playbackInfo.timeline.getIndexOfPeriod(pendingMessageInfo.resolvedPeriodUid); - if (index == C.INDEX_UNSET) { - return false; - } - pendingMessageInfo.resolvedPeriodIndex = index; - } - return true; - } - private void maybeTriggerPendingMessages(long oldPeriodPositionUs, long newPeriodPositionUs) throws ExoPlaybackException { if (pendingMessages.isEmpty() || playbackInfo.periodId.isAd()) { return; } - // If this is the first call from the start position, include oldPeriodPositionUs in potential - // trigger positions, but make sure we deliver it only once. - if (playbackInfo.startPositionUs == oldPeriodPositionUs - && deliverPendingMessageAtStartPositionRequired) { + // If this is the first call after resetting the renderer position, include oldPeriodPositionUs + // in potential trigger positions, but make sure we deliver it only once. + if (deliverPendingMessageAtStartPositionRequired) { oldPeriodPositionUs--; + deliverPendingMessageAtStartPositionRequired = false; } - deliverPendingMessageAtStartPositionRequired = false; // Correct next index if necessary (e.g. after seeking, timeline changes, or new messages) int currentPeriodIndex = playbackInfo.timeline.getIndexOfPeriod(playbackInfo.periodId.periodUid); - int nextPendingMessageIndex = Math.min(nextPendingMessageIndexHint, pendingMessages.size()); + int nextPendingMessageIndex = min(nextPendingMessageIndexHint, pendingMessages.size()); PendingMessageInfo previousInfo = nextPendingMessageIndex > 0 ? pendingMessages.get(nextPendingMessageIndex - 1) : null; while (previousInfo != null @@ -1137,9 +1656,13 @@ private void ensureStopped(Renderer renderer) throws ExoPlaybackException { } private void disableRenderer(Renderer renderer) throws ExoPlaybackException { + if (!isRendererEnabled(renderer)) { + return; + } mediaClock.onRendererDisabled(renderer); ensureStopped(renderer); renderer.disable(); + enabledRendererCount--; } private void reselectTracksInternal() throws ExoPlaybackException { @@ -1175,24 +1698,26 @@ private void reselectTracksInternal() throws ExoPlaybackException { long periodPositionUs = playingPeriodHolder.applyTrackSelection( newTrackSelectorResult, playbackInfo.positionUs, recreateStreams, streamResetFlags); - if (playbackInfo.playbackState != Player.STATE_ENDED - && periodPositionUs != playbackInfo.positionUs) { - playbackInfo = - copyWithNewPosition( - playbackInfo.periodId, periodPositionUs, playbackInfo.contentPositionUs); - playbackInfoUpdate.setPositionDiscontinuity(Player.DISCONTINUITY_REASON_INTERNAL); + boolean hasDiscontinuity = + playbackInfo.playbackState != Player.STATE_ENDED + && periodPositionUs != playbackInfo.positionUs; + playbackInfo = + handlePositionDiscontinuity( + playbackInfo.periodId, + periodPositionUs, + playbackInfo.requestedContentPositionUs, + playbackInfo.discontinuityStartPositionUs, + hasDiscontinuity, + Player.DISCONTINUITY_REASON_INTERNAL); + if (hasDiscontinuity) { resetRendererPosition(periodPositionUs); } - int enabledRendererCount = 0; boolean[] rendererWasEnabledFlags = new boolean[renderers.length]; for (int i = 0; i < renderers.length; i++) { Renderer renderer = renderers[i]; - rendererWasEnabledFlags[i] = renderer.getState() != Renderer.STATE_DISABLED; + rendererWasEnabledFlags[i] = isRendererEnabled(renderer); SampleStream sampleStream = playingPeriodHolder.sampleStreams[i]; - if (sampleStream != null) { - enabledRendererCount++; - } if (rendererWasEnabledFlags[i]) { if (sampleStream != renderer.getStream()) { // We need to disable the renderer. @@ -1203,17 +1728,13 @@ private void reselectTracksInternal() throws ExoPlaybackException { } } } - playbackInfo = - playbackInfo.copyWithTrackInfo( - playingPeriodHolder.getTrackGroups(), playingPeriodHolder.getTrackSelectorResult()); - enableRenderers(rendererWasEnabledFlags, enabledRendererCount); + enableRenderers(rendererWasEnabledFlags); } else { // Release and re-prepare/buffer periods after the one whose selection changed. queue.removeAfter(periodHolder); if (periodHolder.prepared) { long loadingPeriodPositionUs = - Math.max( - periodHolder.info.startPositionUs, periodHolder.toPeriodTime(rendererPositionUs)); + max(periodHolder.info.startPositionUs, periodHolder.toPeriodTime(rendererPositionUs)); periodHolder.applyTrackSelection(newTrackSelectorResult, loadingPeriodPositionUs, false); } } @@ -1228,8 +1749,7 @@ private void reselectTracksInternal() throws ExoPlaybackException { private void updateTrackSelectionPlaybackSpeed(float playbackSpeed) { MediaPeriodHolder periodHolder = queue.getPlayingPeriod(); while (periodHolder != null) { - TrackSelection[] trackSelections = periodHolder.getTrackSelectorResult().selections.getAll(); - for (TrackSelection trackSelection : trackSelections) { + for (ExoTrackSelection trackSelection : periodHolder.getTrackSelectorResult().selections) { if (trackSelection != null) { trackSelection.onPlaybackSpeed(playbackSpeed); } @@ -1241,8 +1761,7 @@ private void updateTrackSelectionPlaybackSpeed(float playbackSpeed) { private void notifyTrackSelectionDiscontinuity() { MediaPeriodHolder periodHolder = queue.getPlayingPeriod(); while (periodHolder != null) { - TrackSelection[] trackSelections = periodHolder.getTrackSelectorResult().selections.getAll(); - for (TrackSelection trackSelection : trackSelections) { + for (ExoTrackSelection trackSelection : periodHolder.getTrackSelectorResult().selections) { if (trackSelection != null) { trackSelection.onDiscontinuity(); } @@ -1252,7 +1771,7 @@ private void notifyTrackSelectionDiscontinuity() { } private boolean shouldTransitionToReadyState(boolean renderersReadyOrEnded) { - if (enabledRenderers.length == 0) { + if (enabledRendererCount == 0) { // If there are no enabled renderers, determine whether we're ready based on the timeline. return isTimelineReady(); } @@ -1265,11 +1784,23 @@ private boolean shouldTransitionToReadyState(boolean renderersReadyOrEnded) { return true; } // Renderers are ready and we're loading. Ask the LoadControl whether to transition. + long targetLiveOffsetUs = + shouldUseLivePlaybackSpeedControl(playbackInfo.timeline, queue.getPlayingPeriod().info.id) + ? livePlaybackSpeedControl.getTargetLiveOffsetUs() + : C.TIME_UNSET; MediaPeriodHolder loadingHolder = queue.getLoadingPeriod(); - boolean bufferedToEnd = loadingHolder.isFullyBuffered() && loadingHolder.info.isFinal; - return bufferedToEnd + boolean isBufferedToEnd = loadingHolder.isFullyBuffered() && loadingHolder.info.isFinal; + // Ad loader implementations may only load ad media once playback has nearly reached the ad, but + // it is possible for playback to be stuck buffering waiting for this. Therefore, we start + // playback regardless of buffered duration if we are waiting for an ad media period to prepare. + boolean isAdPendingPreparation = loadingHolder.info.id.isAd() && !loadingHolder.prepared; + return isBufferedToEnd + || isAdPendingPreparation || loadControl.shouldStartPlayback( - getTotalBufferedDurationUs(), mediaClock.getPlaybackParameters().speed, rebuffering); + getTotalBufferedDurationUs(), + mediaClock.getPlaybackParameters().speed, + isRebuffering, + targetLiveOffsetUs); } private boolean isTimelineReady() { @@ -1277,114 +1808,133 @@ private boolean isTimelineReady() { long playingPeriodDurationUs = playingPeriodHolder.info.durationUs; return playingPeriodHolder.prepared && (playingPeriodDurationUs == C.TIME_UNSET - || playbackInfo.positionUs < playingPeriodDurationUs); - } - - private void maybeThrowSourceInfoRefreshError() throws IOException { - MediaPeriodHolder loadingPeriodHolder = queue.getLoadingPeriod(); - if (loadingPeriodHolder != null) { - // Defer throwing until we read all available media periods. - for (Renderer renderer : enabledRenderers) { - if (!renderer.hasReadStreamToEnd()) { - return; - } - } - } - mediaSource.maybeThrowSourceInfoRefreshError(); + || playbackInfo.positionUs < playingPeriodDurationUs + || !shouldPlayWhenReady()); } - private void handleSourceInfoRefreshed(MediaSourceRefreshInfo sourceRefreshInfo) + private void handleMediaSourceListInfoRefreshed(Timeline timeline, boolean isSourceRefresh) throws ExoPlaybackException { - if (sourceRefreshInfo.source != mediaSource) { - // Stale event. - return; - } - playbackInfoUpdate.incrementPendingOperationAcks(pendingPrepareCount); - pendingPrepareCount = 0; - - Timeline oldTimeline = playbackInfo.timeline; - Timeline timeline = sourceRefreshInfo.timeline; - queue.setTimeline(timeline); - playbackInfo = playbackInfo.copyWithTimeline(timeline); - resolvePendingMessagePositions(); - - MediaPeriodId newPeriodId = playbackInfo.periodId; - long oldContentPositionUs = - playbackInfo.periodId.isAd() ? playbackInfo.contentPositionUs : playbackInfo.positionUs; - long newContentPositionUs = oldContentPositionUs; - if (pendingInitialSeekPosition != null) { - // Resolve initial seek position. - Pair periodPosition = - resolveSeekPosition(pendingInitialSeekPosition, /* trySubsequentPeriods= */ true); - pendingInitialSeekPosition = null; - if (periodPosition == null) { - // The seek position was valid for the timeline that it was performed into, but the - // timeline has changed and a suitable seek position could not be resolved in the new one. - handleSourceInfoRefreshEndedPlayback(); - return; + PositionUpdateForPlaylistChange positionUpdate = + resolvePositionForPlaylistChange( + timeline, + playbackInfo, + pendingInitialSeekPosition, + queue, + repeatMode, + shuffleModeEnabled, + window, + period); + MediaPeriodId newPeriodId = positionUpdate.periodId; + long newRequestedContentPositionUs = positionUpdate.requestedContentPositionUs; + boolean forceBufferingState = positionUpdate.forceBufferingState; + long newPositionUs = positionUpdate.periodPositionUs; + boolean periodPositionChanged = + !playbackInfo.periodId.equals(newPeriodId) || newPositionUs != playbackInfo.positionUs; + try { + if (positionUpdate.endPlayback) { + if (playbackInfo.playbackState != Player.STATE_IDLE) { + setState(Player.STATE_ENDED); + } + resetInternal( + /* resetRenderers= */ false, + /* resetPosition= */ false, + /* releaseMediaSourceList= */ false, + /* resetError= */ true); } - newContentPositionUs = periodPosition.second; - newPeriodId = queue.resolveMediaPeriodIdForAds(periodPosition.first, newContentPositionUs); - } else if (oldContentPositionUs == C.TIME_UNSET && !timeline.isEmpty()) { - // Resolve unset start position to default position. - Pair defaultPosition = - getPeriodPosition( - timeline, timeline.getFirstWindowIndex(shuffleModeEnabled), C.TIME_UNSET); - newPeriodId = queue.resolveMediaPeriodIdForAds(defaultPosition.first, defaultPosition.second); - if (!newPeriodId.isAd()) { - // Keep unset start position if we need to play an ad first. - newContentPositionUs = defaultPosition.second; - } - } else if (timeline.getIndexOfPeriod(newPeriodId.periodUid) == C.INDEX_UNSET) { - // The current period isn't in the new timeline. Attempt to resolve a subsequent period whose - // window we can restart from. - Object newPeriodUid = resolveSubsequentPeriod(newPeriodId.periodUid, oldTimeline, timeline); - if (newPeriodUid == null) { - // We failed to resolve a suitable restart position. - handleSourceInfoRefreshEndedPlayback(); - return; + if (!periodPositionChanged) { + // We can keep the current playing period. Update the rest of the queued periods. + if (!queue.updateQueuedPeriods( + timeline, rendererPositionUs, getMaxRendererReadPositionUs())) { + seekToCurrentPosition(/* sendDiscontinuity= */ false); + } + } else if (!timeline.isEmpty()) { + // Something changed. Seek to new start position. + @Nullable MediaPeriodHolder periodHolder = queue.getPlayingPeriod(); + while (periodHolder != null) { + // Update the new playing media period info if it already exists. + if (periodHolder.info.id.equals(newPeriodId)) { + periodHolder.info = queue.getUpdatedMediaPeriodInfo(timeline, periodHolder.info); + periodHolder.updateClipping(); + } + periodHolder = periodHolder.getNext(); + } + newPositionUs = seekToPeriodPosition(newPeriodId, newPositionUs, forceBufferingState); } - // We resolved a subsequent period. Start at the default position in the corresponding window. - Pair defaultPosition = - getPeriodPosition( - timeline, timeline.getPeriodByUid(newPeriodUid, period).windowIndex, C.TIME_UNSET); - newContentPositionUs = defaultPosition.second; - newPeriodId = queue.resolveMediaPeriodIdForAds(defaultPosition.first, newContentPositionUs); - } else { - // Recheck if the current ad still needs to be played or if we need to start playing an ad. - newPeriodId = - queue.resolveMediaPeriodIdForAds(playbackInfo.periodId.periodUid, newContentPositionUs); - if (!playbackInfo.periodId.isAd() && !newPeriodId.isAd()) { - // Drop update if we keep playing the same content (MediaPeriod.periodUid are identical) and - // only MediaPeriodId.nextAdGroupIndex may have changed. This postpones a potential - // discontinuity until we reach the former next ad group position. - newPeriodId = playbackInfo.periodId; + } finally { + updatePlaybackSpeedSettingsForNewPeriod( + /* newTimeline= */ timeline, + newPeriodId, + /* oldTimeline= */ playbackInfo.timeline, + /* oldPeriodId= */ playbackInfo.periodId, + /* positionForTargetOffsetOverrideUs */ positionUpdate.setTargetLiveOffset + ? newPositionUs + : C.TIME_UNSET); + if (periodPositionChanged + || newRequestedContentPositionUs != playbackInfo.requestedContentPositionUs) { + Object oldPeriodUid = playbackInfo.periodId.periodUid; + Timeline oldTimeline = playbackInfo.timeline; + boolean reportDiscontinuity = + periodPositionChanged + && isSourceRefresh + && !oldTimeline.isEmpty() + && !oldTimeline.getPeriodByUid(oldPeriodUid, period).isPlaceholder; + playbackInfo = + handlePositionDiscontinuity( + newPeriodId, + newPositionUs, + newRequestedContentPositionUs, + playbackInfo.discontinuityStartPositionUs, + reportDiscontinuity, + timeline.getIndexOfPeriod(oldPeriodUid) == C.INDEX_UNSET + ? Player.DISCONTINUITY_REASON_REMOVE + : Player.DISCONTINUITY_REASON_SKIP); + } + resetPendingPauseAtEndOfPeriod(); + resolvePendingMessagePositions( + /* newTimeline= */ timeline, /* previousTimeline= */ playbackInfo.timeline); + playbackInfo = playbackInfo.copyWithTimeline(timeline); + if (!timeline.isEmpty()) { + // Retain pending seek position only while the timeline is still empty. + pendingInitialSeekPosition = null; + } + handleLoadingMediaPeriodChanged(/* loadingTrackSelectionChanged= */ false); + } + } + + private void updatePlaybackSpeedSettingsForNewPeriod( + Timeline newTimeline, + MediaPeriodId newPeriodId, + Timeline oldTimeline, + MediaPeriodId oldPeriodId, + long positionForTargetOffsetOverrideUs) { + if (!shouldUseLivePlaybackSpeedControl(newTimeline, newPeriodId)) { + // Live playback speed control is unused for the current period, reset speed to user-defined + // playback parameters or 1.0 for ad playback. + PlaybackParameters targetPlaybackParameters = + newPeriodId.isAd() ? PlaybackParameters.DEFAULT : playbackInfo.playbackParameters; + if (!mediaClock.getPlaybackParameters().equals(targetPlaybackParameters)) { + mediaClock.setPlaybackParameters(targetPlaybackParameters); } + return; } - - if (playbackInfo.periodId.equals(newPeriodId) && oldContentPositionUs == newContentPositionUs) { - // We can keep the current playing period. Update the rest of the queued periods. - if (!queue.updateQueuedPeriods(rendererPositionUs, getMaxRendererReadPositionUs())) { - seekToCurrentPosition(/* sendDiscontinuity= */ false); - } + int windowIndex = newTimeline.getPeriodByUid(newPeriodId.periodUid, period).windowIndex; + newTimeline.getWindow(windowIndex, window); + livePlaybackSpeedControl.setLiveConfiguration(castNonNull(window.liveConfiguration)); + if (positionForTargetOffsetOverrideUs != C.TIME_UNSET) { + livePlaybackSpeedControl.setTargetLiveOffsetOverrideUs( + getLiveOffsetUs(newTimeline, newPeriodId.periodUid, positionForTargetOffsetOverrideUs)); } else { - // Something changed. Seek to new start position. - MediaPeriodHolder periodHolder = queue.getPlayingPeriod(); - if (periodHolder != null) { - // Update the new playing media period info if it already exists. - while (periodHolder.getNext() != null) { - periodHolder = periodHolder.getNext(); - if (periodHolder.info.id.equals(newPeriodId)) { - periodHolder.info = queue.getUpdatedMediaPeriodInfo(periodHolder.info); - } - } + Object windowUid = window.uid; + @Nullable Object oldWindowUid = null; + if (!oldTimeline.isEmpty()) { + int oldWindowIndex = oldTimeline.getPeriodByUid(oldPeriodId.periodUid, period).windowIndex; + oldWindowUid = oldTimeline.getWindow(oldWindowIndex, window).uid; + } + if (!Util.areEqual(oldWindowUid, windowUid)) { + // Reset overridden target live offset to media values if window changes. + livePlaybackSpeedControl.setTargetLiveOffsetOverrideUs(C.TIME_UNSET); } - // Actually do the seek. - long newPositionUs = newPeriodId.isAd() ? 0 : newContentPositionUs; - long seekedToPositionUs = seekToPeriodPosition(newPeriodId, newPositionUs); - playbackInfo = copyWithNewPosition(newPeriodId, seekedToPositionUs, newContentPositionUs); } - handleLoadingMediaPeriodChanged(/* loadingTrackSelectionChanged= */ false); } private long getMaxRendererReadPositionUs() { @@ -1397,7 +1947,7 @@ private long getMaxRendererReadPositionUs() { return maxReadPositionUs; } for (int i = 0; i < renderers.length; i++) { - if (renderers[i].getState() == Renderer.STATE_DISABLED + if (!isRendererEnabled(renderers[i]) || renderers[i].getStream() != readingHolder.sampleStreams[i]) { // Ignore disabled renderers and renderers with sample streams from previous periods. continue; @@ -1406,159 +1956,47 @@ private long getMaxRendererReadPositionUs() { if (readingPositionUs == C.TIME_END_OF_SOURCE) { return C.TIME_END_OF_SOURCE; } else { - maxReadPositionUs = Math.max(readingPositionUs, maxReadPositionUs); + maxReadPositionUs = max(readingPositionUs, maxReadPositionUs); } } return maxReadPositionUs; } - private void handleSourceInfoRefreshEndedPlayback() { - if (playbackInfo.playbackState != Player.STATE_IDLE) { - setState(Player.STATE_ENDED); - } - // Reset, but retain the source so that it can still be used should a seek occur. - resetInternal( - /* resetRenderers= */ false, - /* releaseMediaSource= */ false, - /* resetPosition= */ true, - /* resetState= */ false, - /* resetError= */ true); - } - - /** - * Given a period index into an old timeline, finds the first subsequent period that also exists - * in a new timeline. The uid of this period in the new timeline is returned. - * - * @param oldPeriodUid The index of the period in the old timeline. - * @param oldTimeline The old timeline. - * @param newTimeline The new timeline. - * @return The uid in the new timeline of the first subsequent period, or null if no such period - * was found. - */ - private @Nullable Object resolveSubsequentPeriod( - Object oldPeriodUid, Timeline oldTimeline, Timeline newTimeline) { - int oldPeriodIndex = oldTimeline.getIndexOfPeriod(oldPeriodUid); - int newPeriodIndex = C.INDEX_UNSET; - int maxIterations = oldTimeline.getPeriodCount(); - for (int i = 0; i < maxIterations && newPeriodIndex == C.INDEX_UNSET; i++) { - oldPeriodIndex = - oldTimeline.getNextPeriodIndex( - oldPeriodIndex, period, window, repeatMode, shuffleModeEnabled); - if (oldPeriodIndex == C.INDEX_UNSET) { - // We've reached the end of the old timeline. - break; - } - newPeriodIndex = newTimeline.getIndexOfPeriod(oldTimeline.getUidOfPeriod(oldPeriodIndex)); - } - return newPeriodIndex == C.INDEX_UNSET ? null : newTimeline.getUidOfPeriod(newPeriodIndex); - } - - /** - * Converts a {@link SeekPosition} into the corresponding (periodUid, periodPositionUs) for the - * internal timeline. - * - * @param seekPosition The position to resolve. - * @param trySubsequentPeriods Whether the position can be resolved to a subsequent matching - * period if the original period is no longer available. - * @return The resolved position, or null if resolution was not successful. - * @throws IllegalSeekPositionException If the window index of the seek position is outside the - * bounds of the timeline. - */ - @Nullable - private Pair resolveSeekPosition( - SeekPosition seekPosition, boolean trySubsequentPeriods) { - Timeline timeline = playbackInfo.timeline; - Timeline seekTimeline = seekPosition.timeline; - if (timeline.isEmpty()) { - // We don't have a valid timeline yet, so we can't resolve the position. - return null; - } - if (seekTimeline.isEmpty()) { - // The application performed a blind seek with an empty timeline (most likely based on - // knowledge of what the future timeline will be). Use the internal timeline. - seekTimeline = timeline; - } - // Map the SeekPosition to a position in the corresponding timeline. - Pair periodPosition; - try { - periodPosition = - seekTimeline.getPeriodPosition( - window, period, seekPosition.windowIndex, seekPosition.windowPositionUs); - } catch (IndexOutOfBoundsException e) { - // The window index of the seek position was outside the bounds of the timeline. - return null; - } - if (timeline == seekTimeline) { - // Our internal timeline is the seek timeline, so the mapped position is correct. - return periodPosition; - } - // Attempt to find the mapped period in the internal timeline. - int periodIndex = timeline.getIndexOfPeriod(periodPosition.first); - if (periodIndex != C.INDEX_UNSET) { - // We successfully located the period in the internal timeline. - return periodPosition; - } - if (trySubsequentPeriods) { - // Try and find a subsequent period from the seek timeline in the internal timeline. - @Nullable - Object periodUid = resolveSubsequentPeriod(periodPosition.first, seekTimeline, timeline); - if (periodUid != null) { - // We found one. Use the default position of the corresponding window. - return getPeriodPosition( - timeline, timeline.getPeriodByUid(periodUid, period).windowIndex, C.TIME_UNSET); - } - } - // We didn't find one. Give up. - return null; - } - - /** - * Calls {@link Timeline#getPeriodPosition(Timeline.Window, Timeline.Period, int, long)} using the - * current timeline. - */ - private Pair getPeriodPosition( - Timeline timeline, int windowIndex, long windowPositionUs) { - return timeline.getPeriodPosition(window, period, windowIndex, windowPositionUs); - } - private void updatePeriods() throws ExoPlaybackException, IOException { - if (mediaSource == null) { - // The player has no media source yet. - return; - } - if (pendingPrepareCount > 0) { - // We're waiting to get information about periods. - mediaSource.maybeThrowSourceInfoRefreshError(); + if (playbackInfo.timeline.isEmpty() || !mediaSourceList.isPrepared()) { + // No periods available. return; } maybeUpdateLoadingPeriod(); maybeUpdateReadingPeriod(); + maybeUpdateReadingRenderers(); maybeUpdatePlayingPeriod(); } - private void maybeUpdateLoadingPeriod() throws ExoPlaybackException, IOException { + private void maybeUpdateLoadingPeriod() throws ExoPlaybackException { queue.reevaluateBuffer(rendererPositionUs); if (queue.shouldLoadNextMediaPeriod()) { + @Nullable MediaPeriodInfo info = queue.getNextMediaPeriodInfo(rendererPositionUs, playbackInfo); - if (info == null) { - maybeThrowSourceInfoRefreshError(); - } else { + if (info != null) { MediaPeriodHolder mediaPeriodHolder = queue.enqueueNextMediaPeriodHolder( rendererCapabilities, trackSelector, loadControl.getAllocator(), - mediaSource, + mediaSourceList, info, emptyTrackSelectorResult); mediaPeriodHolder.mediaPeriod.prepare(this, info.startPositionUs); if (queue.getPlayingPeriod() == mediaPeriodHolder) { - resetRendererPosition(mediaPeriodHolder.getStartPositionRendererTime()); + resetRendererPosition(info.startPositionUs); } handleLoadingMediaPeriodChanged(/* loadingTrackSelectionChanged= */ false); } } if (shouldContinueLoading) { + // We should still be loading, except when there is nothing to load or we have fully loaded + // the current period. shouldContinueLoading = isLoadingPossible(); updateIsLoading(); } else { @@ -1566,15 +2004,16 @@ private void maybeUpdateLoadingPeriod() throws ExoPlaybackException, IOException } } - private void maybeUpdateReadingPeriod() throws ExoPlaybackException { - MediaPeriodHolder readingPeriodHolder = queue.getReadingPeriod(); + private void maybeUpdateReadingPeriod() { + @Nullable MediaPeriodHolder readingPeriodHolder = queue.getReadingPeriod(); if (readingPeriodHolder == null) { return; } - if (readingPeriodHolder.getNext() == null) { - // We don't have a successor to advance the reading period to. - if (readingPeriodHolder.info.isFinal) { + if (readingPeriodHolder.getNext() == null || pendingPauseAtEndOfPeriod) { + // We don't have a successor to advance the reading period to or we want to let them end + // intentionally to pause at the end of the period. + if (readingPeriodHolder.info.isFinal || pendingPauseAtEndOfPeriod) { for (int i = 0; i < renderers.length; i++) { Renderer renderer = renderers[i]; SampleStream sampleStream = readingPeriodHolder.sampleStreams[i]; @@ -1583,7 +2022,12 @@ private void maybeUpdateReadingPeriod() throws ExoPlaybackException { if (sampleStream != null && renderer.getStream() == sampleStream && renderer.hasReadStreamToEnd()) { - renderer.setCurrentStreamFinal(); + long streamEndPositionUs = + readingPeriodHolder.info.durationUs != C.TIME_UNSET + && readingPeriodHolder.info.durationUs != C.TIME_END_OF_SOURCE + ? readingPeriodHolder.getRendererOffset() + readingPeriodHolder.info.durationUs + : C.TIME_UNSET; + setCurrentStreamFinal(renderer, streamEndPositionUs); } } } @@ -1594,56 +2038,101 @@ private void maybeUpdateReadingPeriod() throws ExoPlaybackException { return; } - if (!readingPeriodHolder.getNext().prepared) { - // The successor is not prepared yet. + if (!readingPeriodHolder.getNext().prepared + && rendererPositionUs < readingPeriodHolder.getNext().getStartPositionRendererTime()) { + // The successor is not prepared yet and playback hasn't reached the transition point. return; } + MediaPeriodHolder oldReadingPeriodHolder = readingPeriodHolder; TrackSelectorResult oldTrackSelectorResult = readingPeriodHolder.getTrackSelectorResult(); readingPeriodHolder = queue.advanceReadingPeriod(); TrackSelectorResult newTrackSelectorResult = readingPeriodHolder.getTrackSelectorResult(); - if (readingPeriodHolder.mediaPeriod.readDiscontinuity() != C.TIME_UNSET) { + updatePlaybackSpeedSettingsForNewPeriod( + /* newTimeline= */ playbackInfo.timeline, + /* newPeriodId= */ readingPeriodHolder.info.id, + /* oldTimeline= */ playbackInfo.timeline, + /* oldPeriodId= */ oldReadingPeriodHolder.info.id, + /* positionForTargetOffsetOverrideUs= */ C.TIME_UNSET); + + if (readingPeriodHolder.prepared + && readingPeriodHolder.mediaPeriod.readDiscontinuity() != C.TIME_UNSET) { // The new period starts with a discontinuity, so the renderers will play out all data, then // be disabled and re-enabled when they start playing the next period. - setAllRendererStreamsFinal(); + setAllRendererStreamsFinal( + /* streamEndPositionUs= */ readingPeriodHolder.getStartPositionRendererTime()); return; } for (int i = 0; i < renderers.length; i++) { - Renderer renderer = renderers[i]; - boolean rendererWasEnabled = oldTrackSelectorResult.isRendererEnabled(i); - if (rendererWasEnabled && !renderer.isCurrentStreamFinal()) { - // The renderer is enabled and its stream is not final, so we still have a chance to replace - // the sample streams. - TrackSelection newSelection = newTrackSelectorResult.selections.get(i); - boolean newRendererEnabled = newTrackSelectorResult.isRendererEnabled(i); + boolean oldRendererEnabled = oldTrackSelectorResult.isRendererEnabled(i); + boolean newRendererEnabled = newTrackSelectorResult.isRendererEnabled(i); + if (oldRendererEnabled && !renderers[i].isCurrentStreamFinal()) { boolean isNoSampleRenderer = rendererCapabilities[i].getTrackType() == C.TRACK_TYPE_NONE; RendererConfiguration oldConfig = oldTrackSelectorResult.rendererConfigurations[i]; RendererConfiguration newConfig = newTrackSelectorResult.rendererConfigurations[i]; - if (newRendererEnabled && newConfig.equals(oldConfig) && !isNoSampleRenderer) { - // Replace the renderer's SampleStream so the transition to playing the next period can - // be seamless. - // This should be avoided for no-sample renderer, because skipping ahead for such - // renderer doesn't have any benefit (the renderer does not consume the sample stream), - // and it will change the provided rendererOffsetUs while the renderer is still - // rendering from the playing media period. - Format[] formats = getFormats(newSelection); - renderer.replaceStream( - formats, - readingPeriodHolder.sampleStreams[i], - readingPeriodHolder.getRendererOffset()); - } else { + if (!newRendererEnabled || !newConfig.equals(oldConfig) || isNoSampleRenderer) { // The renderer will be disabled when transitioning to playing the next period, because // there's no new selection, or because a configuration change is required, or because // it's a no-sample renderer for which rendererOffsetUs should be updated only when // starting to play the next period. Mark the SampleStream as final to play out any // remaining data. - renderer.setCurrentStreamFinal(); + setCurrentStreamFinal( + renderers[i], + /* streamEndPositionUs= */ readingPeriodHolder.getStartPositionRendererTime()); } } } } + private void maybeUpdateReadingRenderers() throws ExoPlaybackException { + @Nullable MediaPeriodHolder readingPeriod = queue.getReadingPeriod(); + if (readingPeriod == null + || queue.getPlayingPeriod() == readingPeriod + || readingPeriod.allRenderersInCorrectState) { + // Not reading ahead or all renderers updated. + return; + } + if (replaceStreamsOrDisableRendererForTransition()) { + enableRenderers(); + } + } + + private boolean replaceStreamsOrDisableRendererForTransition() throws ExoPlaybackException { + MediaPeriodHolder readingPeriodHolder = queue.getReadingPeriod(); + TrackSelectorResult newTrackSelectorResult = readingPeriodHolder.getTrackSelectorResult(); + boolean needsToWaitForRendererToEnd = false; + for (int i = 0; i < renderers.length; i++) { + Renderer renderer = renderers[i]; + if (!isRendererEnabled(renderer)) { + continue; + } + boolean rendererIsReadingOldStream = + renderer.getStream() != readingPeriodHolder.sampleStreams[i]; + boolean rendererShouldBeEnabled = newTrackSelectorResult.isRendererEnabled(i); + if (rendererShouldBeEnabled && !rendererIsReadingOldStream) { + // All done. + continue; + } + if (!renderer.isCurrentStreamFinal()) { + // The renderer stream is not final, so we can replace the sample streams immediately. + Format[] formats = getFormats(newTrackSelectorResult.selections[i]); + renderer.replaceStream( + formats, + readingPeriodHolder.sampleStreams[i], + readingPeriodHolder.getStartPositionRendererTime(), + readingPeriodHolder.getRendererOffset()); + } else if (renderer.isEnded()) { + // The renderer has finished playback, so we can disable it now. + disableRenderer(renderer); + } else { + // We need to wait until rendering finished before disabling the renderer. + needsToWaitForRendererToEnd = true; + } + } + return !needsToWaitForRendererToEnd; + } + private void maybeUpdatePlayingPeriod() throws ExoPlaybackException { boolean advancedPlayingPeriod = false; while (shouldAdvancePlayingPeriod()) { @@ -1651,31 +2140,38 @@ private void maybeUpdatePlayingPeriod() throws ExoPlaybackException { // If we advance more than one period at a time, notify listeners after each update. maybeNotifyPlaybackInfoChanged(); } - MediaPeriodHolder oldPlayingPeriodHolder = queue.getPlayingPeriod(); - if (oldPlayingPeriodHolder == queue.getReadingPeriod()) { - // The reading period hasn't advanced yet, so we can't seamlessly replace the SampleStreams - // anymore and need to re-enable the renderers. Set all current streams final to do that. - setAllRendererStreamsFinal(); - } - MediaPeriodHolder newPlayingPeriodHolder = queue.advancePlayingPeriod(); - updatePlayingPeriodRenderers(oldPlayingPeriodHolder); + MediaPeriodHolder newPlayingPeriodHolder = checkNotNull(queue.advancePlayingPeriod()); + boolean isCancelledSSAIAdTransition = + playbackInfo.periodId.periodUid.equals(newPlayingPeriodHolder.info.id.periodUid) + && playbackInfo.periodId.adGroupIndex == C.INDEX_UNSET + && newPlayingPeriodHolder.info.id.adGroupIndex == C.INDEX_UNSET + && playbackInfo.periodId.nextAdGroupIndex + != newPlayingPeriodHolder.info.id.nextAdGroupIndex; playbackInfo = - copyWithNewPosition( + handlePositionDiscontinuity( newPlayingPeriodHolder.info.id, newPlayingPeriodHolder.info.startPositionUs, - newPlayingPeriodHolder.info.contentPositionUs); - int discontinuityReason = - oldPlayingPeriodHolder.info.isLastInTimelinePeriod - ? Player.DISCONTINUITY_REASON_PERIOD_TRANSITION - : Player.DISCONTINUITY_REASON_AD_INSERTION; - playbackInfoUpdate.setPositionDiscontinuity(discontinuityReason); + newPlayingPeriodHolder.info.requestedContentPositionUs, + /* discontinuityStartPositionUs= */ newPlayingPeriodHolder.info.startPositionUs, + /* reportDiscontinuity= */ !isCancelledSSAIAdTransition, + Player.DISCONTINUITY_REASON_AUTO_TRANSITION); + resetPendingPauseAtEndOfPeriod(); updatePlaybackPositions(); advancedPlayingPeriod = true; } } + private void resetPendingPauseAtEndOfPeriod() { + @Nullable MediaPeriodHolder playingPeriod = queue.getPlayingPeriod(); + pendingPauseAtEndOfPeriod = + playingPeriod != null && playingPeriod.info.isLastInTimelineWindow && pauseAtEndOfWindow; + } + private boolean shouldAdvancePlayingPeriod() { - if (!playWhenReady) { + if (!shouldPlayWhenReady()) { + return false; + } + if (pendingPauseAtEndOfPeriod) { return false; } MediaPeriodHolder playingPeriodHolder = queue.getPlayingPeriod(); @@ -1683,14 +2179,9 @@ private boolean shouldAdvancePlayingPeriod() { return false; } MediaPeriodHolder nextPlayingPeriodHolder = playingPeriodHolder.getNext(); - if (nextPlayingPeriodHolder == null) { - return false; - } - MediaPeriodHolder readingPeriodHolder = queue.getReadingPeriod(); - if (playingPeriodHolder == readingPeriodHolder && !hasReadingPeriodFinishedReading()) { - return false; - } - return rendererPositionUs >= nextPlayingPeriodHolder.getStartPositionRendererTime(); + return nextPlayingPeriodHolder != null + && rendererPositionUs >= nextPlayingPeriodHolder.getStartPositionRendererTime() + && nextPlayingPeriodHolder.allRenderersInCorrectState; } private boolean hasReadingPeriodFinishedReading() { @@ -1702,7 +2193,9 @@ private boolean hasReadingPeriodFinishedReading() { Renderer renderer = renderers[i]; SampleStream sampleStream = readingPeriodHolder.sampleStreams[i]; if (renderer.getStream() != sampleStream - || (sampleStream != null && !renderer.hasReadStreamToEnd())) { + || (sampleStream != null + && !renderer.hasReadStreamToEnd() + && !hasReachedServerSideInsertedAdsTransition(renderer, readingPeriodHolder))) { // The current reading period is still being read by at least one renderer. return false; } @@ -1710,14 +2203,36 @@ private boolean hasReadingPeriodFinishedReading() { return true; } - private void setAllRendererStreamsFinal() { + private boolean hasReachedServerSideInsertedAdsTransition( + Renderer renderer, MediaPeriodHolder reading) { + MediaPeriodHolder nextPeriod = reading.getNext(); + // We can advance the reading period early once we read beyond the transition point in a + // server-side inserted ads stream because we know the samples are read from the same underlying + // stream. This shortcut is helpful in case the transition point moved and renderers already + // read beyond the new transition point. But wait until the next period is actually prepared to + // allow a seamless transition. + return reading.info.isFollowedByTransitionToSameStream + && nextPeriod.prepared + && (renderer instanceof TextRenderer // [internal: b/181312195] + || renderer instanceof MetadataRenderer + || renderer.getReadingPositionUs() >= nextPeriod.getStartPositionRendererTime()); + } + + private void setAllRendererStreamsFinal(long streamEndPositionUs) { for (Renderer renderer : renderers) { if (renderer.getStream() != null) { - renderer.setCurrentStreamFinal(); + setCurrentStreamFinal(renderer, streamEndPositionUs); } } } + private void setCurrentStreamFinal(Renderer renderer, long streamEndPositionUs) { + renderer.setCurrentStreamFinal(); + if (renderer instanceof TextRenderer) { + ((TextRenderer) renderer).setFinalStreamEndPositionUs(streamEndPositionUs); + } + } + private void handlePeriodPrepared(MediaPeriod mediaPeriod) throws ExoPlaybackException { if (!queue.isLoading(mediaPeriod)) { // Stale event. @@ -1731,7 +2246,15 @@ private void handlePeriodPrepared(MediaPeriod mediaPeriod) throws ExoPlaybackExc if (loadingPeriodHolder == queue.getPlayingPeriod()) { // This is the first prepared period, so update the position and the renderers. resetRendererPosition(loadingPeriodHolder.info.startPositionUs); - updatePlayingPeriodRenderers(/* oldPlayingPeriodHolder= */ null); + enableRenderers(); + playbackInfo = + handlePositionDiscontinuity( + playbackInfo.periodId, + loadingPeriodHolder.info.startPositionUs, + playbackInfo.requestedContentPositionUs, + loadingPeriodHolder.info.startPositionUs, + /* reportDiscontinuity= */ false, + /* ignored */ Player.DISCONTINUITY_REASON_INTERNAL); } maybeContinueLoading(); } @@ -1748,14 +2271,30 @@ private void handleContinueLoadingRequested(MediaPeriod mediaPeriod) { private void handlePlaybackParameters( PlaybackParameters playbackParameters, boolean acknowledgeCommand) throws ExoPlaybackException { - eventHandler - .obtainMessage( - MSG_PLAYBACK_PARAMETERS_CHANGED, acknowledgeCommand ? 1 : 0, 0, playbackParameters) - .sendToTarget(); + handlePlaybackParameters( + playbackParameters, + playbackParameters.speed, + /* updatePlaybackInfo= */ true, + acknowledgeCommand); + } + + private void handlePlaybackParameters( + PlaybackParameters playbackParameters, + float currentPlaybackSpeed, + boolean updatePlaybackInfo, + boolean acknowledgeCommand) + throws ExoPlaybackException { + if (updatePlaybackInfo) { + if (acknowledgeCommand) { + playbackInfoUpdate.incrementPendingOperationAcks(1); + } + playbackInfo = playbackInfo.copyWithPlaybackParameters(playbackParameters); + } updateTrackSelectionPlaybackSpeed(playbackParameters.speed); for (Renderer renderer : renderers) { if (renderer != null) { - renderer.setOperatingRate(playbackParameters.speed); + renderer.setPlaybackSpeed( + currentPlaybackSpeed, /* targetPlaybackSpeed= */ playbackParameters.speed); } } } @@ -1772,10 +2311,31 @@ private boolean shouldContinueLoading() { if (!isLoadingPossible()) { return false; } + MediaPeriodHolder loadingPeriodHolder = queue.getLoadingPeriod(); long bufferedDurationUs = - getTotalBufferedDurationUs(queue.getLoadingPeriod().getNextLoadPositionUs()); - float playbackSpeed = mediaClock.getPlaybackParameters().speed; - return loadControl.shouldContinueLoading(bufferedDurationUs, playbackSpeed); + getTotalBufferedDurationUs(loadingPeriodHolder.getNextLoadPositionUs()); + long playbackPositionUs = + loadingPeriodHolder == queue.getPlayingPeriod() + ? loadingPeriodHolder.toPeriodTime(rendererPositionUs) + : loadingPeriodHolder.toPeriodTime(rendererPositionUs) + - loadingPeriodHolder.info.startPositionUs; + boolean shouldContinueLoading = + loadControl.shouldContinueLoading( + playbackPositionUs, bufferedDurationUs, mediaClock.getPlaybackParameters().speed); + if (!shouldContinueLoading + && bufferedDurationUs < PLAYBACK_BUFFER_EMPTY_THRESHOLD_US + && (backBufferDurationUs > 0 || retainBackBufferFromKeyframe)) { + // LoadControl doesn't want to continue loading despite no buffered data. Clear back buffer + // and try again in case it's blocked on memory usage of the back buffer. + queue + .getPlayingPeriod() + .mediaPeriod + .discardBuffer(playbackInfo.positionUs, /* toKeyframe= */ false); + shouldContinueLoading = + loadControl.shouldContinueLoading( + playbackPositionUs, bufferedDurationUs, mediaClock.getPlaybackParameters().speed); + } + return shouldContinueLoading; } private boolean isLoadingPossible() { @@ -1799,94 +2359,147 @@ private void updateIsLoading() { } } - private PlaybackInfo copyWithNewPosition( - MediaPeriodId mediaPeriodId, long positionUs, long contentPositionUs) { - deliverPendingMessageAtStartPositionRequired = true; - return playbackInfo.copyWithNewPosition( - mediaPeriodId, positionUs, contentPositionUs, getTotalBufferedDurationUs()); - } - - @SuppressWarnings("ParameterNotNullable") - private void updatePlayingPeriodRenderers(@Nullable MediaPeriodHolder oldPlayingPeriodHolder) - throws ExoPlaybackException { - MediaPeriodHolder newPlayingPeriodHolder = queue.getPlayingPeriod(); - if (newPlayingPeriodHolder == null || oldPlayingPeriodHolder == newPlayingPeriodHolder) { - return; + @CheckResult + private PlaybackInfo handlePositionDiscontinuity( + MediaPeriodId mediaPeriodId, + long positionUs, + long requestedContentPositionUs, + long discontinuityStartPositionUs, + boolean reportDiscontinuity, + @DiscontinuityReason int discontinuityReason) { + deliverPendingMessageAtStartPositionRequired = + deliverPendingMessageAtStartPositionRequired + || positionUs != playbackInfo.positionUs + || !mediaPeriodId.equals(playbackInfo.periodId); + resetPendingPauseAtEndOfPeriod(); + TrackGroupArray trackGroupArray = playbackInfo.trackGroups; + TrackSelectorResult trackSelectorResult = playbackInfo.trackSelectorResult; + List staticMetadata = playbackInfo.staticMetadata; + if (mediaSourceList.isPrepared()) { + @Nullable MediaPeriodHolder playingPeriodHolder = queue.getPlayingPeriod(); + trackGroupArray = + playingPeriodHolder == null + ? TrackGroupArray.EMPTY + : playingPeriodHolder.getTrackGroups(); + trackSelectorResult = + playingPeriodHolder == null + ? emptyTrackSelectorResult + : playingPeriodHolder.getTrackSelectorResult(); + staticMetadata = extractMetadataFromTrackSelectionArray(trackSelectorResult.selections); + // Ensure the media period queue requested content position matches the new playback info. + if (playingPeriodHolder != null + && playingPeriodHolder.info.requestedContentPositionUs != requestedContentPositionUs) { + playingPeriodHolder.info = + playingPeriodHolder.info.copyWithRequestedContentPositionUs(requestedContentPositionUs); + } + } else if (!mediaPeriodId.equals(playbackInfo.periodId)) { + // Reset previously kept track info if unprepared and the period changes. + trackGroupArray = TrackGroupArray.EMPTY; + trackSelectorResult = emptyTrackSelectorResult; + staticMetadata = ImmutableList.of(); + } + if (reportDiscontinuity) { + playbackInfoUpdate.setPositionDiscontinuity(discontinuityReason); } - int enabledRendererCount = 0; - boolean[] rendererWasEnabledFlags = new boolean[renderers.length]; - for (int i = 0; i < renderers.length; i++) { - Renderer renderer = renderers[i]; - rendererWasEnabledFlags[i] = renderer.getState() != Renderer.STATE_DISABLED; - if (newPlayingPeriodHolder.getTrackSelectorResult().isRendererEnabled(i)) { - enabledRendererCount++; - } - if (rendererWasEnabledFlags[i] - && (!newPlayingPeriodHolder.getTrackSelectorResult().isRendererEnabled(i) - || (renderer.isCurrentStreamFinal() - && renderer.getStream() == oldPlayingPeriodHolder.sampleStreams[i]))) { - // The renderer should be disabled before playing the next period, either because it's not - // needed to play the next period, or because we need to re-enable it as its current stream - // is final and it's not reading ahead. - disableRenderer(renderer); + return playbackInfo.copyWithNewPosition( + mediaPeriodId, + positionUs, + requestedContentPositionUs, + discontinuityStartPositionUs, + getTotalBufferedDurationUs(), + trackGroupArray, + trackSelectorResult, + staticMetadata); + } + + private ImmutableList extractMetadataFromTrackSelectionArray( + ExoTrackSelection[] trackSelections) { + ImmutableList.Builder result = new ImmutableList.Builder<>(); + boolean seenNonEmptyMetadata = false; + for (ExoTrackSelection trackSelection : trackSelections) { + if (trackSelection != null) { + Format format = trackSelection.getFormat(/* index= */ 0); + if (format.metadata == null) { + result.add(new Metadata()); + } else { + result.add(format.metadata); + seenNonEmptyMetadata = true; + } } } - playbackInfo = - playbackInfo.copyWithTrackInfo( - newPlayingPeriodHolder.getTrackGroups(), - newPlayingPeriodHolder.getTrackSelectorResult()); - enableRenderers(rendererWasEnabledFlags, enabledRendererCount); + return seenNonEmptyMetadata ? result.build() : ImmutableList.of(); } - private void enableRenderers(boolean[] rendererWasEnabledFlags, int totalEnabledRendererCount) - throws ExoPlaybackException { - enabledRenderers = new Renderer[totalEnabledRendererCount]; - int enabledRendererCount = 0; - TrackSelectorResult trackSelectorResult = queue.getPlayingPeriod().getTrackSelectorResult(); + private void enableRenderers() throws ExoPlaybackException { + enableRenderers(/* rendererWasEnabledFlags= */ new boolean[renderers.length]); + } + + private void enableRenderers(boolean[] rendererWasEnabledFlags) throws ExoPlaybackException { + MediaPeriodHolder readingMediaPeriod = queue.getReadingPeriod(); + TrackSelectorResult trackSelectorResult = readingMediaPeriod.getTrackSelectorResult(); // Reset all disabled renderers before enabling any new ones. This makes sure resources released // by the disabled renderers will be available to renderers that are being enabled. for (int i = 0; i < renderers.length; i++) { - if (!trackSelectorResult.isRendererEnabled(i)) { + if (!trackSelectorResult.isRendererEnabled(i) && renderersToReset.remove(renderers[i])) { renderers[i].reset(); } } // Enable the renderers. for (int i = 0; i < renderers.length; i++) { if (trackSelectorResult.isRendererEnabled(i)) { - enableRenderer(i, rendererWasEnabledFlags[i], enabledRendererCount++); + enableRenderer(i, rendererWasEnabledFlags[i]); } } + readingMediaPeriod.allRenderersInCorrectState = true; } - private void enableRenderer( - int rendererIndex, boolean wasRendererEnabled, int enabledRendererIndex) + private void enableRenderer(int rendererIndex, boolean wasRendererEnabled) throws ExoPlaybackException { - MediaPeriodHolder playingPeriodHolder = queue.getPlayingPeriod(); Renderer renderer = renderers[rendererIndex]; - enabledRenderers[enabledRendererIndex] = renderer; - if (renderer.getState() == Renderer.STATE_DISABLED) { - TrackSelectorResult trackSelectorResult = playingPeriodHolder.getTrackSelectorResult(); - RendererConfiguration rendererConfiguration = - trackSelectorResult.rendererConfigurations[rendererIndex]; - TrackSelection newSelection = trackSelectorResult.selections.get(rendererIndex); - Format[] formats = getFormats(newSelection); - // The renderer needs enabling with its new track selection. - boolean playing = playWhenReady && playbackInfo.playbackState == Player.STATE_READY; - // Consider as joining only if the renderer was previously disabled. - boolean joining = !wasRendererEnabled && playing; - // Enable the renderer. - renderer.enable( - rendererConfiguration, - formats, - playingPeriodHolder.sampleStreams[rendererIndex], - rendererPositionUs, - joining, - playingPeriodHolder.getRendererOffset()); - mediaClock.onRendererEnabled(renderer); - // Start the renderer if playing. - if (playing) { - renderer.start(); - } + if (isRendererEnabled(renderer)) { + return; + } + MediaPeriodHolder periodHolder = queue.getReadingPeriod(); + boolean mayRenderStartOfStream = periodHolder == queue.getPlayingPeriod(); + TrackSelectorResult trackSelectorResult = periodHolder.getTrackSelectorResult(); + RendererConfiguration rendererConfiguration = + trackSelectorResult.rendererConfigurations[rendererIndex]; + ExoTrackSelection newSelection = trackSelectorResult.selections[rendererIndex]; + Format[] formats = getFormats(newSelection); + // The renderer needs enabling with its new track selection. + boolean playing = shouldPlayWhenReady() && playbackInfo.playbackState == Player.STATE_READY; + // Consider as joining only if the renderer was previously disabled. + boolean joining = !wasRendererEnabled && playing; + // Enable the renderer. + enabledRendererCount++; + renderersToReset.add(renderer); + renderer.enable( + rendererConfiguration, + formats, + periodHolder.sampleStreams[rendererIndex], + rendererPositionUs, + joining, + mayRenderStartOfStream, + periodHolder.getStartPositionRendererTime(), + periodHolder.getRendererOffset()); + renderer.handleMessage( + Renderer.MSG_SET_WAKEUP_LISTENER, + new Renderer.WakeupListener() { + @Override + public void onSleep() { + requestForRendererSleep = true; + } + + @Override + public void onWakeup() { + handler.sendEmptyMessage(MSG_DO_SOME_WORK); + } + }); + + mediaClock.onRendererEnabled(renderer); + // Start the renderer if playing. + if (playing) { + renderer.start(); } } @@ -1924,7 +2537,7 @@ private long getTotalBufferedDurationUs(long bufferedPositionInLoadingPeriodUs) } long totalBufferedDurationUs = bufferedPositionInLoadingPeriodUs - loadingPeriodHolder.toPeriodTime(rendererPositionUs); - return Math.max(0, totalBufferedDurationUs); + return max(0, totalBufferedDurationUs); } private void updateLoadControlTrackSelection( @@ -1932,18 +2545,437 @@ private void updateLoadControlTrackSelection( loadControl.onTracksSelected(renderers, trackGroups, trackSelectorResult.selections); } - private void sendPlaybackParametersChangedInternal( - PlaybackParameters playbackParameters, boolean acknowledgeCommand) { - handler - .obtainMessage( - MSG_PLAYBACK_PARAMETERS_CHANGED_INTERNAL, - acknowledgeCommand ? 1 : 0, - 0, - playbackParameters) - .sendToTarget(); + private boolean shouldPlayWhenReady() { + return playbackInfo.playWhenReady + && playbackInfo.playbackSuppressionReason == Player.PLAYBACK_SUPPRESSION_REASON_NONE; + } + + private static PositionUpdateForPlaylistChange resolvePositionForPlaylistChange( + Timeline timeline, + PlaybackInfo playbackInfo, + @Nullable SeekPosition pendingInitialSeekPosition, + MediaPeriodQueue queue, + @RepeatMode int repeatMode, + boolean shuffleModeEnabled, + Timeline.Window window, + Timeline.Period period) { + if (timeline.isEmpty()) { + return new PositionUpdateForPlaylistChange( + PlaybackInfo.getDummyPeriodForEmptyTimeline(), + /* periodPositionUs= */ 0, + /* requestedContentPositionUs= */ C.TIME_UNSET, + /* forceBufferingState= */ false, + /* endPlayback= */ true, + /* setTargetLiveOffset= */ false); + } + MediaPeriodId oldPeriodId = playbackInfo.periodId; + Object newPeriodUid = oldPeriodId.periodUid; + boolean isUsingPlaceholderPeriod = isUsingPlaceholderPeriod(playbackInfo, period); + long oldContentPositionUs = + playbackInfo.periodId.isAd() || isUsingPlaceholderPeriod + ? playbackInfo.requestedContentPositionUs + : playbackInfo.positionUs; + long newContentPositionUs = oldContentPositionUs; + int startAtDefaultPositionWindowIndex = C.INDEX_UNSET; + boolean forceBufferingState = false; + boolean endPlayback = false; + boolean setTargetLiveOffset = false; + if (pendingInitialSeekPosition != null) { + // Resolve initial seek position. + @Nullable + Pair periodPosition = + resolveSeekPositionUs( + timeline, + pendingInitialSeekPosition, + /* trySubsequentPeriods= */ true, + repeatMode, + shuffleModeEnabled, + window, + period); + if (periodPosition == null) { + // The initial seek in the empty old timeline is invalid in the new timeline. + endPlayback = true; + startAtDefaultPositionWindowIndex = timeline.getFirstWindowIndex(shuffleModeEnabled); + } else { + // The pending seek has been resolved successfully in the new timeline. + if (pendingInitialSeekPosition.windowPositionUs == C.TIME_UNSET) { + startAtDefaultPositionWindowIndex = + timeline.getPeriodByUid(periodPosition.first, period).windowIndex; + } else { + newPeriodUid = periodPosition.first; + newContentPositionUs = periodPosition.second; + // Use explicit initial seek as new target live offset. + setTargetLiveOffset = true; + } + forceBufferingState = playbackInfo.playbackState == Player.STATE_ENDED; + } + } else if (playbackInfo.timeline.isEmpty()) { + // Resolve to default position if the old timeline is empty and no seek is requested above. + startAtDefaultPositionWindowIndex = timeline.getFirstWindowIndex(shuffleModeEnabled); + } else if (timeline.getIndexOfPeriod(newPeriodUid) == C.INDEX_UNSET) { + // The current period isn't in the new timeline. Attempt to resolve a subsequent period whose + // window we can restart from. + @Nullable + Object subsequentPeriodUid = + resolveSubsequentPeriod( + window, + period, + repeatMode, + shuffleModeEnabled, + newPeriodUid, + playbackInfo.timeline, + timeline); + if (subsequentPeriodUid == null) { + // We failed to resolve a suitable restart position but the timeline is not empty. + endPlayback = true; + startAtDefaultPositionWindowIndex = timeline.getFirstWindowIndex(shuffleModeEnabled); + } else { + // We resolved a subsequent period. Start at the default position in the corresponding + // window. + startAtDefaultPositionWindowIndex = + timeline.getPeriodByUid(subsequentPeriodUid, period).windowIndex; + } + } else if (oldContentPositionUs == C.TIME_UNSET) { + // The content was requested to start from its default position and we haven't used the + // resolved position yet. Re-resolve in case the default position changed. + startAtDefaultPositionWindowIndex = timeline.getPeriodByUid(newPeriodUid, period).windowIndex; + } else if (isUsingPlaceholderPeriod) { + // We previously requested a content position for a placeholder period, but haven't used it + // yet. Re-resolve the requested window position to the period position in case it changed. + playbackInfo.timeline.getPeriodByUid(oldPeriodId.periodUid, period); + if (playbackInfo.timeline.getWindow(period.windowIndex, window).firstPeriodIndex + == playbackInfo.timeline.getIndexOfPeriod(oldPeriodId.periodUid)) { + // Only need to resolve the first period in a window because subsequent periods must start + // at position 0 and don't need to be resolved. + long windowPositionUs = oldContentPositionUs + period.getPositionInWindowUs(); + int windowIndex = timeline.getPeriodByUid(newPeriodUid, period).windowIndex; + Pair periodPositionUs = + timeline.getPeriodPositionUs(window, period, windowIndex, windowPositionUs); + newPeriodUid = periodPositionUs.first; + newContentPositionUs = periodPositionUs.second; + } + // Use an explicitly requested content position as new target live offset. + setTargetLiveOffset = true; + } + + // Set period uid for default positions and resolve position for ad resolution. + long contentPositionForAdResolutionUs = newContentPositionUs; + if (startAtDefaultPositionWindowIndex != C.INDEX_UNSET) { + Pair defaultPositionUs = + timeline.getPeriodPositionUs( + window, + period, + startAtDefaultPositionWindowIndex, + /* windowPositionUs= */ C.TIME_UNSET); + newPeriodUid = defaultPositionUs.first; + contentPositionForAdResolutionUs = defaultPositionUs.second; + newContentPositionUs = C.TIME_UNSET; + } + + // Ensure ad insertion metadata is up to date. + MediaPeriodId periodIdWithAds = + queue.resolveMediaPeriodIdForAdsAfterPeriodPositionChange( + timeline, newPeriodUid, contentPositionForAdResolutionUs); + boolean earliestCuePointIsUnchangedOrLater = + periodIdWithAds.nextAdGroupIndex == C.INDEX_UNSET + || (oldPeriodId.nextAdGroupIndex != C.INDEX_UNSET + && periodIdWithAds.nextAdGroupIndex >= oldPeriodId.nextAdGroupIndex); + // Drop update if we keep playing the same content (MediaPeriod.periodUid are identical) and + // the only change is that MediaPeriodId.nextAdGroupIndex increased. This postpones a potential + // discontinuity until we reach the former next ad group position. + boolean sameOldAndNewPeriodUid = oldPeriodId.periodUid.equals(newPeriodUid); + boolean onlyNextAdGroupIndexIncreased = + sameOldAndNewPeriodUid + && !oldPeriodId.isAd() + && !periodIdWithAds.isAd() + && earliestCuePointIsUnchangedOrLater; + // Drop update if the change is from/to server-side inserted ads at the same content position to + // avoid any unintentional renderer reset. + boolean isInStreamAdChange = + isIgnorableServerSideAdInsertionPeriodChange( + isUsingPlaceholderPeriod, + oldPeriodId, + oldContentPositionUs, + periodIdWithAds, + timeline.getPeriodByUid(newPeriodUid, period), + newContentPositionUs); + MediaPeriodId newPeriodId = + onlyNextAdGroupIndexIncreased || isInStreamAdChange ? oldPeriodId : periodIdWithAds; + + long periodPositionUs = contentPositionForAdResolutionUs; + if (newPeriodId.isAd()) { + if (newPeriodId.equals(oldPeriodId)) { + periodPositionUs = playbackInfo.positionUs; + } else { + timeline.getPeriodByUid(newPeriodId.periodUid, period); + periodPositionUs = + newPeriodId.adIndexInAdGroup == period.getFirstAdIndexToPlay(newPeriodId.adGroupIndex) + ? period.getAdResumePositionUs() + : 0; + } + } + + return new PositionUpdateForPlaylistChange( + newPeriodId, + periodPositionUs, + newContentPositionUs, + forceBufferingState, + endPlayback, + setTargetLiveOffset); + } + + private static boolean isIgnorableServerSideAdInsertionPeriodChange( + boolean isUsingPlaceholderPeriod, + MediaPeriodId oldPeriodId, + long oldContentPositionUs, + MediaPeriodId newPeriodId, + Timeline.Period newPeriod, + long newContentPositionUs) { + if (isUsingPlaceholderPeriod + || oldContentPositionUs != newContentPositionUs + || !oldPeriodId.periodUid.equals(newPeriodId.periodUid)) { + // The period position changed. + return false; + } + if (oldPeriodId.isAd() && newPeriod.isServerSideInsertedAdGroup(oldPeriodId.adGroupIndex)) { + // Whether the old period was a server side ad that doesn't need skipping to the content. + return newPeriod.getAdState(oldPeriodId.adGroupIndex, oldPeriodId.adIndexInAdGroup) + != AdPlaybackState.AD_STATE_ERROR + && newPeriod.getAdState(oldPeriodId.adGroupIndex, oldPeriodId.adIndexInAdGroup) + != AdPlaybackState.AD_STATE_SKIPPED; + } + // If the new period is a server side inserted ad, we can just continue playing. + return newPeriodId.isAd() && newPeriod.isServerSideInsertedAdGroup(newPeriodId.adGroupIndex); + } + + private static boolean isUsingPlaceholderPeriod( + PlaybackInfo playbackInfo, Timeline.Period period) { + MediaPeriodId periodId = playbackInfo.periodId; + Timeline timeline = playbackInfo.timeline; + return timeline.isEmpty() || timeline.getPeriodByUid(periodId.periodUid, period).isPlaceholder; + } + + /** + * Updates pending message to a new timeline. + * + * @param pendingMessageInfo The pending message. + * @param newTimeline The new timeline. + * @param previousTimeline The previous timeline used to set the message positions. + * @param repeatMode The current repeat mode. + * @param shuffleModeEnabled The current shuffle mode. + * @param window A scratch window. + * @param period A scratch period. + * @return Whether the message position could be resolved to the current timeline. + */ + private static boolean resolvePendingMessagePosition( + PendingMessageInfo pendingMessageInfo, + Timeline newTimeline, + Timeline previousTimeline, + @Player.RepeatMode int repeatMode, + boolean shuffleModeEnabled, + Timeline.Window window, + Timeline.Period period) { + if (pendingMessageInfo.resolvedPeriodUid == null) { + // Position is still unresolved. Try to find window in new timeline. + long requestPositionUs = + pendingMessageInfo.message.getPositionMs() == C.TIME_END_OF_SOURCE + ? C.TIME_UNSET + : Util.msToUs(pendingMessageInfo.message.getPositionMs()); + @Nullable + Pair periodPosition = + resolveSeekPositionUs( + newTimeline, + new SeekPosition( + pendingMessageInfo.message.getTimeline(), + pendingMessageInfo.message.getMediaItemIndex(), + requestPositionUs), + /* trySubsequentPeriods= */ false, + repeatMode, + shuffleModeEnabled, + window, + period); + if (periodPosition == null) { + return false; + } + pendingMessageInfo.setResolvedPosition( + /* periodIndex= */ newTimeline.getIndexOfPeriod(periodPosition.first), + /* periodTimeUs= */ periodPosition.second, + /* periodUid= */ periodPosition.first); + if (pendingMessageInfo.message.getPositionMs() == C.TIME_END_OF_SOURCE) { + resolvePendingMessageEndOfStreamPosition(newTimeline, pendingMessageInfo, window, period); + } + return true; + } + // Position has been resolved for a previous timeline. Try to find the updated period index. + int index = newTimeline.getIndexOfPeriod(pendingMessageInfo.resolvedPeriodUid); + if (index == C.INDEX_UNSET) { + return false; + } + if (pendingMessageInfo.message.getPositionMs() == C.TIME_END_OF_SOURCE) { + // Re-resolve end of stream in case the duration changed. + resolvePendingMessageEndOfStreamPosition(newTimeline, pendingMessageInfo, window, period); + return true; + } + pendingMessageInfo.resolvedPeriodIndex = index; + previousTimeline.getPeriodByUid(pendingMessageInfo.resolvedPeriodUid, period); + if (period.isPlaceholder + && previousTimeline.getWindow(period.windowIndex, window).firstPeriodIndex + == previousTimeline.getIndexOfPeriod(pendingMessageInfo.resolvedPeriodUid)) { + // The position needs to be re-resolved because the window in the previous timeline wasn't + // fully prepared. Only resolve the first period in a window because subsequent periods must + // start at position 0 and don't need to be resolved. + long windowPositionUs = + pendingMessageInfo.resolvedPeriodTimeUs + period.getPositionInWindowUs(); + int windowIndex = + newTimeline.getPeriodByUid(pendingMessageInfo.resolvedPeriodUid, period).windowIndex; + Pair periodPositionUs = + newTimeline.getPeriodPositionUs(window, period, windowIndex, windowPositionUs); + pendingMessageInfo.setResolvedPosition( + /* periodIndex= */ newTimeline.getIndexOfPeriod(periodPositionUs.first), + /* periodTimeUs= */ periodPositionUs.second, + /* periodUid= */ periodPositionUs.first); + } + return true; + } + + private static void resolvePendingMessageEndOfStreamPosition( + Timeline timeline, + PendingMessageInfo messageInfo, + Timeline.Window window, + Timeline.Period period) { + int windowIndex = timeline.getPeriodByUid(messageInfo.resolvedPeriodUid, period).windowIndex; + int lastPeriodIndex = timeline.getWindow(windowIndex, window).lastPeriodIndex; + Object lastPeriodUid = timeline.getPeriod(lastPeriodIndex, period, /* setIds= */ true).uid; + long positionUs = period.durationUs != C.TIME_UNSET ? period.durationUs - 1 : Long.MAX_VALUE; + messageInfo.setResolvedPosition(lastPeriodIndex, positionUs, lastPeriodUid); + } + + /** + * Converts a {@link SeekPosition} into the corresponding (periodUid, periodPositionUs) for the + * internal timeline. + * + * @param seekPosition The position to resolve. + * @param trySubsequentPeriods Whether the position can be resolved to a subsequent matching + * period if the original period is no longer available. + * @return The resolved position, or null if resolution was not successful. + * @throws IllegalSeekPositionException If the window index of the seek position is outside the + * bounds of the timeline. + */ + @Nullable + private static Pair resolveSeekPositionUs( + Timeline timeline, + SeekPosition seekPosition, + boolean trySubsequentPeriods, + @RepeatMode int repeatMode, + boolean shuffleModeEnabled, + Timeline.Window window, + Timeline.Period period) { + Timeline seekTimeline = seekPosition.timeline; + if (timeline.isEmpty()) { + // We don't have a valid timeline yet, so we can't resolve the position. + return null; + } + if (seekTimeline.isEmpty()) { + // The application performed a blind seek with an empty timeline (most likely based on + // knowledge of what the future timeline will be). Use the internal timeline. + seekTimeline = timeline; + } + // Map the SeekPosition to a position in the corresponding timeline. + Pair periodPositionUs; + try { + periodPositionUs = + seekTimeline.getPeriodPositionUs( + window, period, seekPosition.windowIndex, seekPosition.windowPositionUs); + } catch (IndexOutOfBoundsException e) { + // The window index of the seek position was outside the bounds of the timeline. + return null; + } + if (timeline.equals(seekTimeline)) { + // Our internal timeline is the seek timeline, so the mapped position is correct. + return periodPositionUs; + } + // Attempt to find the mapped period in the internal timeline. + int periodIndex = timeline.getIndexOfPeriod(periodPositionUs.first); + if (periodIndex != C.INDEX_UNSET) { + // We successfully located the period in the internal timeline. + if (seekTimeline.getPeriodByUid(periodPositionUs.first, period).isPlaceholder + && seekTimeline.getWindow(period.windowIndex, window).firstPeriodIndex + == seekTimeline.getIndexOfPeriod(periodPositionUs.first)) { + // The seek timeline was using a placeholder, so we need to re-resolve using the updated + // timeline in case the resolved position changed. Only resolve the first period in a window + // because subsequent periods must start at position 0 and don't need to be resolved. + int newWindowIndex = timeline.getPeriodByUid(periodPositionUs.first, period).windowIndex; + periodPositionUs = + timeline.getPeriodPositionUs( + window, period, newWindowIndex, seekPosition.windowPositionUs); + } + return periodPositionUs; + } + if (trySubsequentPeriods) { + // Try and find a subsequent period from the seek timeline in the internal timeline. + @Nullable + Object periodUid = + resolveSubsequentPeriod( + window, + period, + repeatMode, + shuffleModeEnabled, + periodPositionUs.first, + seekTimeline, + timeline); + if (periodUid != null) { + // We found one. Use the default position of the corresponding window. + return timeline.getPeriodPositionUs( + window, + period, + timeline.getPeriodByUid(periodUid, period).windowIndex, + /* windowPositionUs= */ C.TIME_UNSET); + } + } + // We didn't find one. Give up. + return null; + } + + /** + * Given a period index into an old timeline, finds the first subsequent period that also exists + * in a new timeline. The uid of this period in the new timeline is returned. + * + * @param window A {@link Timeline.Window} to be used internally. + * @param period A {@link Timeline.Period} to be used internally. + * @param repeatMode The repeat mode to use. + * @param shuffleModeEnabled Whether the shuffle mode is enabled. + * @param oldPeriodUid The index of the period in the old timeline. + * @param oldTimeline The old timeline. + * @param newTimeline The new timeline. + * @return The uid in the new timeline of the first subsequent period, or null if no such period + * was found. + */ + /* package */ @Nullable + static Object resolveSubsequentPeriod( + Timeline.Window window, + Timeline.Period period, + @Player.RepeatMode int repeatMode, + boolean shuffleModeEnabled, + Object oldPeriodUid, + Timeline oldTimeline, + Timeline newTimeline) { + int oldPeriodIndex = oldTimeline.getIndexOfPeriod(oldPeriodUid); + int newPeriodIndex = C.INDEX_UNSET; + int maxIterations = oldTimeline.getPeriodCount(); + for (int i = 0; i < maxIterations && newPeriodIndex == C.INDEX_UNSET; i++) { + oldPeriodIndex = + oldTimeline.getNextPeriodIndex( + oldPeriodIndex, period, window, repeatMode, shuffleModeEnabled); + if (oldPeriodIndex == C.INDEX_UNSET) { + // We've reached the end of the old timeline. + break; + } + newPeriodIndex = newTimeline.getIndexOfPeriod(oldTimeline.getUidOfPeriod(oldPeriodIndex)); + } + return newPeriodIndex == C.INDEX_UNSET ? null : newTimeline.getUidOfPeriod(newPeriodIndex); } - private static Format[] getFormats(TrackSelection newSelection) { + private static Format[] getFormats(ExoTrackSelection newSelection) { // Build an array of formats contained by the selection. int length = newSelection != null ? newSelection.length() : 0; Format[] formats = new Format[length]; @@ -1953,6 +2985,10 @@ private static Format[] getFormats(TrackSelection newSelection) { return formats; } + private static boolean isRendererEnabled(Renderer renderer) { + return renderer.getState() != Renderer.STATE_DISABLED; + } + private static final class SeekPosition { public final Timeline timeline; @@ -1966,6 +3002,30 @@ public SeekPosition(Timeline timeline, int windowIndex, long windowPositionUs) { } } + private static final class PositionUpdateForPlaylistChange { + public final MediaPeriodId periodId; + public final long periodPositionUs; + public final long requestedContentPositionUs; + public final boolean forceBufferingState; + public final boolean endPlayback; + public final boolean setTargetLiveOffset; + + public PositionUpdateForPlaylistChange( + MediaPeriodId periodId, + long periodPositionUs, + long requestedContentPositionUs, + boolean forceBufferingState, + boolean endPlayback, + boolean setTargetLiveOffset) { + this.periodId = periodId; + this.periodPositionUs = periodPositionUs; + this.requestedContentPositionUs = requestedContentPositionUs; + this.forceBufferingState = forceBufferingState; + this.endPlayback = endPlayback; + this.setTargetLiveOffset = setTargetLiveOffset; + } + } + private static final class PendingMessageInfo implements Comparable { public final PlayerMessage message; @@ -2003,49 +3063,38 @@ public int compareTo(PendingMessageInfo other) { } } - private static final class MediaSourceRefreshInfo { + private static final class MediaSourceListUpdateMessage { - public final MediaSource source; - public final Timeline timeline; + private final List mediaSourceHolders; + private final ShuffleOrder shuffleOrder; + private final int windowIndex; + private final long positionUs; - public MediaSourceRefreshInfo(MediaSource source, Timeline timeline) { - this.source = source; - this.timeline = timeline; + private MediaSourceListUpdateMessage( + List mediaSourceHolders, + ShuffleOrder shuffleOrder, + int windowIndex, + long positionUs) { + this.mediaSourceHolders = mediaSourceHolders; + this.shuffleOrder = shuffleOrder; + this.windowIndex = windowIndex; + this.positionUs = positionUs; } } - private static final class PlaybackInfoUpdate { - - private PlaybackInfo lastPlaybackInfo; - private int operationAcks; - private boolean positionDiscontinuity; - private @DiscontinuityReason int discontinuityReason; + private static class MoveMediaItemsMessage { - public boolean hasPendingUpdate(PlaybackInfo playbackInfo) { - return playbackInfo != lastPlaybackInfo || operationAcks > 0 || positionDiscontinuity; - } - - public void reset(PlaybackInfo playbackInfo) { - lastPlaybackInfo = playbackInfo; - operationAcks = 0; - positionDiscontinuity = false; - } - - public void incrementPendingOperationAcks(int operationAcks) { - this.operationAcks += operationAcks; - } + public final int fromIndex; + public final int toIndex; + public final int newFromIndex; + public final ShuffleOrder shuffleOrder; - public void setPositionDiscontinuity(@DiscontinuityReason int discontinuityReason) { - if (positionDiscontinuity - && this.discontinuityReason != Player.DISCONTINUITY_REASON_INTERNAL) { - // We always prefer non-internal discontinuity reasons. We also assume that we won't report - // more than one non-internal discontinuity per message iteration. - Assertions.checkArgument(discontinuityReason == Player.DISCONTINUITY_REASON_INTERNAL); - return; - } - positionDiscontinuity = true; - this.discontinuityReason = discontinuityReason; + public MoveMediaItemsMessage( + int fromIndex, int toIndex, int newFromIndex, ShuffleOrder shuffleOrder) { + this.fromIndex = fromIndex; + this.toIndex = toIndex; + this.newFromIndex = newFromIndex; + this.shuffleOrder = shuffleOrder; } } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ExoPlayerLibraryInfo.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ExoPlayerLibraryInfo.java index 35b6199cd3..735838d253 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ExoPlayerLibraryInfo.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ExoPlayerLibraryInfo.java @@ -15,25 +15,23 @@ */ package com.google.android.exoplayer2; +import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.TraceUtil; import java.util.HashSet; -/** - * Information about the ExoPlayer library. - */ +/** Information about the media libraries. */ public final class ExoPlayerLibraryInfo { - /** - * A tag to use when logging library information. - */ - public static final String TAG = "ExoPlayer"; + /** A tag to use when logging library information. */ + public static final String TAG = "ExoPlayerLib"; /** The version of the library expressed as a string, for example "1.2.3". */ // Intentionally hardcoded. Do not derive from other constants (e.g. VERSION_INT) or vice versa. - public static final String VERSION = "2.11.7"; + public static final String VERSION = "2.18.3"; - /** The version of the library expressed as {@code "ExoPlayerLib/" + VERSION}. */ + /** The version of the library expressed as {@code TAG + "/" + VERSION}. */ // Intentionally hardcoded. Do not derive from other constants (e.g. VERSION) or vice versa. - public static final String VERSION_SLASHY = "ExoPlayerLib/2.11.7"; + public static final String VERSION_SLASHY = "ExoPlayerLib/2.18.3"; /** * The version of the library expressed as an integer, for example 1002003. @@ -43,21 +41,12 @@ public final class ExoPlayerLibraryInfo { * integer version 123045006 (123-045-006). */ // Intentionally hardcoded. Do not derive from other constants (e.g. VERSION) or vice versa. - public static final int VERSION_INT = 2011007; + public static final int VERSION_INT = 2_018_003; - /** - * Whether the library was compiled with {@link com.google.android.exoplayer2.util.Assertions} - * checks enabled. - */ + /** Whether the library was compiled with {@link Assertions} checks enabled. */ public static final boolean ASSERTIONS_ENABLED = true; - /** Whether an exception should be thrown in case of an OpenGl error. */ - public static final boolean GL_ASSERTIONS_ENABLED = false; - - /** - * Whether the library was compiled with {@link com.google.android.exoplayer2.util.TraceUtil} - * trace enabled. - */ + /** Whether the library was compiled with {@link TraceUtil} trace enabled. */ public static final boolean TRACE_ENABLED = true; private static final HashSet registeredModules = new HashSet<>(); @@ -65,9 +54,7 @@ public final class ExoPlayerLibraryInfo { private ExoPlayerLibraryInfo() {} // Prevents instantiation. - /** - * Returns a string consisting of registered module names separated by ", ". - */ + /** Returns a string consisting of registered module names separated by ", ". */ public static synchronized String registeredModules() { return registeredModulesString; } @@ -82,5 +69,4 @@ public static synchronized void registerModule(String name) { registeredModulesString = registeredModulesString + ", " + name; } } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ExoTimeoutException.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ExoTimeoutException.java new file mode 100644 index 0000000000..c51a656793 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ExoTimeoutException.java @@ -0,0 +1,87 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2; + +import static java.lang.annotation.ElementType.FIELD; +import static java.lang.annotation.ElementType.LOCAL_VARIABLE; +import static java.lang.annotation.ElementType.METHOD; +import static java.lang.annotation.ElementType.PARAMETER; +import static java.lang.annotation.ElementType.TYPE_USE; + +import androidx.annotation.IntDef; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** A timeout of an operation on the ExoPlayer playback thread. */ +public final class ExoTimeoutException extends RuntimeException { + + /** + * The operation which produced the timeout error. One of {@link #TIMEOUT_OPERATION_RELEASE}, + * {@link #TIMEOUT_OPERATION_SET_FOREGROUND_MODE}, {@link #TIMEOUT_OPERATION_DETACH_SURFACE} or + * {@link #TIMEOUT_OPERATION_UNDEFINED}. Note that new operations may be added in the future and + * error handling should handle unknown operation values. + */ + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) + @IntDef({ + TIMEOUT_OPERATION_UNDEFINED, + TIMEOUT_OPERATION_RELEASE, + TIMEOUT_OPERATION_SET_FOREGROUND_MODE, + TIMEOUT_OPERATION_DETACH_SURFACE + }) + public @interface TimeoutOperation {} + + /** The operation where this error occurred is not defined. */ + public static final int TIMEOUT_OPERATION_UNDEFINED = 0; + /** The error occurred in {@link Player#release}. */ + public static final int TIMEOUT_OPERATION_RELEASE = 1; + /** The error occurred in {@link ExoPlayer#setForegroundMode}. */ + public static final int TIMEOUT_OPERATION_SET_FOREGROUND_MODE = 2; + /** The error occurred while detaching a surface from the player. */ + public static final int TIMEOUT_OPERATION_DETACH_SURFACE = 3; + + /** The operation on the ExoPlayer playback thread that timed out. */ + public final @TimeoutOperation int timeoutOperation; + + /** + * Creates the timeout exception. + * + * @param timeoutOperation The {@link TimeoutOperation operation} that produced the timeout. + */ + public ExoTimeoutException(@TimeoutOperation int timeoutOperation) { + super(getErrorMessage(timeoutOperation)); + this.timeoutOperation = timeoutOperation; + } + + private static String getErrorMessage(@TimeoutOperation int timeoutOperation) { + switch (timeoutOperation) { + case TIMEOUT_OPERATION_RELEASE: + return "Player release timed out."; + case TIMEOUT_OPERATION_SET_FOREGROUND_MODE: + return "Setting foreground mode timed out."; + case TIMEOUT_OPERATION_DETACH_SURFACE: + return "Detaching surface timed out."; + case TIMEOUT_OPERATION_UNDEFINED: + default: + return "Undefined timeout."; + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/Format.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/Format.java index 19ed34405a..4e15771f9a 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/Format.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/Format.java @@ -15,29 +15,666 @@ */ package com.google.android.exoplayer2; -import android.os.Parcel; -import android.os.Parcelable; +import android.os.Bundle; import androidx.annotation.Nullable; import com.google.android.exoplayer2.drm.DrmInitData; -import com.google.android.exoplayer2.drm.DrmSession; -import com.google.android.exoplayer2.drm.ExoMediaCrypto; import com.google.android.exoplayer2.metadata.Metadata; +import com.google.android.exoplayer2.util.BundleableUtil; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.video.ColorInfo; +import com.google.common.base.Joiner; +import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.LinkedHashSet; import java.util.List; +import java.util.Set; +import java.util.UUID; /** - * Representation of a media format. + * Represents a media format. + * + *

      When building formats, populate all fields whose values are known and relevant to the type of + * format being constructed. For information about different types of format, see ExoPlayer's Supported formats page. + * + *

      Fields commonly relevant to all formats

      + * + *
        + *
      • {@link #id} + *
      • {@link #label} + *
      • {@link #language} + *
      • {@link #selectionFlags} + *
      • {@link #roleFlags} + *
      • {@link #averageBitrate} + *
      • {@link #peakBitrate} + *
      • {@link #codecs} + *
      • {@link #metadata} + *
      + * + *

      Fields relevant to container formats

      + * + *
        + *
      • {@link #containerMimeType} + *
      • If the container only contains a single media track, fields + * relevant to sample formats can are also be relevant and can be set to describe the + * sample format of that track. + *
      • If the container only contains one track of a given type (possibly alongside tracks of + * other types), then fields relevant to that track type can be set to describe the properties + * of the track. See the sections below for video, audio and text formats. + *
      + * + *

      Fields relevant to sample formats

      + * + *
        + *
      • {@link #sampleMimeType} + *
      • {@link #maxInputSize} + *
      • {@link #initializationData} + *
      • {@link #drmInitData} + *
      • {@link #subsampleOffsetUs} + *
      • Fields relevant to the sample format's track type are also relevant. See the sections below + * for video, audio and text formats. + *
      + * + *

      Fields relevant to video formats

      + * + *
        + *
      • {@link #width} + *
      • {@link #height} + *
      • {@link #frameRate} + *
      • {@link #rotationDegrees} + *
      • {@link #pixelWidthHeightRatio} + *
      • {@link #projectionData} + *
      • {@link #stereoMode} + *
      • {@link #colorInfo} + *
      + * + *

      Fields relevant to audio formats

      + * + *
        + *
      • {@link #channelCount} + *
      • {@link #sampleRate} + *
      • {@link #pcmEncoding} + *
      • {@link #encoderDelay} + *
      • {@link #encoderPadding} + *
      + * + *

      Fields relevant to text formats

      + * + *
        + *
      • {@link #accessibilityChannel} + *
      + * + *

      Fields relevant to image formats

      + * + *
        + *
      • {@link #tileCountHorizontal} + *
      • {@link #tileCountVertical} + *
      */ -public final class Format implements Parcelable { +public final class Format implements Bundleable { /** - * A value for various fields to indicate that the field's value is unknown or not applicable. + * Builds {@link Format} instances. + * + *

      Use Format#buildUpon() to obtain a builder representing an existing {@link Format}. + * + *

      When building formats, populate all fields whose values are known and relevant to the type + * of format being constructed. See the {@link Format} Javadoc for information about which fields + * should be set for different types of format. */ + public static final class Builder { + + @Nullable private String id; + @Nullable private String label; + @Nullable private String language; + private @C.SelectionFlags int selectionFlags; + private @C.RoleFlags int roleFlags; + private int averageBitrate; + private int peakBitrate; + @Nullable private String codecs; + @Nullable private Metadata metadata; + + // Container specific. + + @Nullable private String containerMimeType; + + // Sample specific. + + @Nullable private String sampleMimeType; + private int maxInputSize; + @Nullable private List initializationData; + @Nullable private DrmInitData drmInitData; + private long subsampleOffsetUs; + + // Video specific. + + private int width; + private int height; + private float frameRate; + private int rotationDegrees; + private float pixelWidthHeightRatio; + @Nullable private byte[] projectionData; + private @C.StereoMode int stereoMode; + @Nullable private ColorInfo colorInfo; + + // Audio specific. + + private int channelCount; + private int sampleRate; + private @C.PcmEncoding int pcmEncoding; + private int encoderDelay; + private int encoderPadding; + + // Text specific. + + private int accessibilityChannel; + + // Image specific + + private int tileCountHorizontal; + private int tileCountVertical; + + // Provided by the source. + + private @C.CryptoType int cryptoType; + + /** Creates a new instance with default values. */ + public Builder() { + averageBitrate = NO_VALUE; + peakBitrate = NO_VALUE; + // Sample specific. + maxInputSize = NO_VALUE; + subsampleOffsetUs = OFFSET_SAMPLE_RELATIVE; + // Video specific. + width = NO_VALUE; + height = NO_VALUE; + frameRate = NO_VALUE; + pixelWidthHeightRatio = 1.0f; + stereoMode = NO_VALUE; + // Audio specific. + channelCount = NO_VALUE; + sampleRate = NO_VALUE; + pcmEncoding = NO_VALUE; + // Text specific. + accessibilityChannel = NO_VALUE; + // Image specific. + tileCountHorizontal = NO_VALUE; + tileCountVertical = NO_VALUE; + // Provided by the source. + cryptoType = C.CRYPTO_TYPE_NONE; + } + + /** + * Creates a new instance to build upon the provided {@link Format}. + * + * @param format The {@link Format} to build upon. + */ + private Builder(Format format) { + this.id = format.id; + this.label = format.label; + this.language = format.language; + this.selectionFlags = format.selectionFlags; + this.roleFlags = format.roleFlags; + this.averageBitrate = format.averageBitrate; + this.peakBitrate = format.peakBitrate; + this.codecs = format.codecs; + this.metadata = format.metadata; + // Container specific. + this.containerMimeType = format.containerMimeType; + // Sample specific. + this.sampleMimeType = format.sampleMimeType; + this.maxInputSize = format.maxInputSize; + this.initializationData = format.initializationData; + this.drmInitData = format.drmInitData; + this.subsampleOffsetUs = format.subsampleOffsetUs; + // Video specific. + this.width = format.width; + this.height = format.height; + this.frameRate = format.frameRate; + this.rotationDegrees = format.rotationDegrees; + this.pixelWidthHeightRatio = format.pixelWidthHeightRatio; + this.projectionData = format.projectionData; + this.stereoMode = format.stereoMode; + this.colorInfo = format.colorInfo; + // Audio specific. + this.channelCount = format.channelCount; + this.sampleRate = format.sampleRate; + this.pcmEncoding = format.pcmEncoding; + this.encoderDelay = format.encoderDelay; + this.encoderPadding = format.encoderPadding; + // Text specific. + this.accessibilityChannel = format.accessibilityChannel; + // Image specific. + this.tileCountHorizontal = format.tileCountHorizontal; + this.tileCountVertical = format.tileCountVertical; + // Provided by the source. + this.cryptoType = format.cryptoType; + } + + /** + * Sets {@link Format#id}. The default value is {@code null}. + * + * @param id The {@link Format#id}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setId(@Nullable String id) { + this.id = id; + return this; + } + + /** + * Sets {@link Format#id} to {@link Integer#toString() Integer.toString(id)}. The default value + * is {@code null}. + * + * @param id The {@link Format#id}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setId(int id) { + this.id = Integer.toString(id); + return this; + } + + /** + * Sets {@link Format#label}. The default value is {@code null}. + * + * @param label The {@link Format#label}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setLabel(@Nullable String label) { + this.label = label; + return this; + } + + /** + * Sets {@link Format#language}. The default value is {@code null}. + * + * @param language The {@link Format#language}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setLanguage(@Nullable String language) { + this.language = language; + return this; + } + + /** + * Sets {@link Format#selectionFlags}. The default value is 0. + * + * @param selectionFlags The {@link Format#selectionFlags}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setSelectionFlags(@C.SelectionFlags int selectionFlags) { + this.selectionFlags = selectionFlags; + return this; + } + + /** + * Sets {@link Format#roleFlags}. The default value is 0. + * + * @param roleFlags The {@link Format#roleFlags}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setRoleFlags(@C.RoleFlags int roleFlags) { + this.roleFlags = roleFlags; + return this; + } + + /** + * Sets {@link Format#averageBitrate}. The default value is {@link #NO_VALUE}. + * + * @param averageBitrate The {@link Format#averageBitrate}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setAverageBitrate(int averageBitrate) { + this.averageBitrate = averageBitrate; + return this; + } + + /** + * Sets {@link Format#peakBitrate}. The default value is {@link #NO_VALUE}. + * + * @param peakBitrate The {@link Format#peakBitrate}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setPeakBitrate(int peakBitrate) { + this.peakBitrate = peakBitrate; + return this; + } + + /** + * Sets {@link Format#codecs}. The default value is {@code null}. + * + * @param codecs The {@link Format#codecs}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setCodecs(@Nullable String codecs) { + this.codecs = codecs; + return this; + } + + /** + * Sets {@link Format#metadata}. The default value is {@code null}. + * + * @param metadata The {@link Format#metadata}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setMetadata(@Nullable Metadata metadata) { + this.metadata = metadata; + return this; + } + + // Container specific. + + /** + * Sets {@link Format#containerMimeType}. The default value is {@code null}. + * + * @param containerMimeType The {@link Format#containerMimeType}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setContainerMimeType(@Nullable String containerMimeType) { + this.containerMimeType = containerMimeType; + return this; + } + + // Sample specific. + + /** + * Sets {@link Format#sampleMimeType}. The default value is {@code null}. + * + * @param sampleMimeType {@link Format#sampleMimeType}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setSampleMimeType(@Nullable String sampleMimeType) { + this.sampleMimeType = sampleMimeType; + return this; + } + + /** + * Sets {@link Format#maxInputSize}. The default value is {@link #NO_VALUE}. + * + * @param maxInputSize The {@link Format#maxInputSize}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setMaxInputSize(int maxInputSize) { + this.maxInputSize = maxInputSize; + return this; + } + + /** + * Sets {@link Format#initializationData}. The default value is {@code null}. + * + * @param initializationData The {@link Format#initializationData}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setInitializationData(@Nullable List initializationData) { + this.initializationData = initializationData; + return this; + } + + /** + * Sets {@link Format#drmInitData}. The default value is {@code null}. + * + * @param drmInitData The {@link Format#drmInitData}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setDrmInitData(@Nullable DrmInitData drmInitData) { + this.drmInitData = drmInitData; + return this; + } + + /** + * Sets {@link Format#subsampleOffsetUs}. The default value is {@link #OFFSET_SAMPLE_RELATIVE}. + * + * @param subsampleOffsetUs The {@link Format#subsampleOffsetUs}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setSubsampleOffsetUs(long subsampleOffsetUs) { + this.subsampleOffsetUs = subsampleOffsetUs; + return this; + } + + // Video specific. + + /** + * Sets {@link Format#width}. The default value is {@link #NO_VALUE}. + * + * @param width The {@link Format#width}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setWidth(int width) { + this.width = width; + return this; + } + + /** + * Sets {@link Format#height}. The default value is {@link #NO_VALUE}. + * + * @param height The {@link Format#height}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setHeight(int height) { + this.height = height; + return this; + } + + /** + * Sets {@link Format#frameRate}. The default value is {@link #NO_VALUE}. + * + * @param frameRate The {@link Format#frameRate}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setFrameRate(float frameRate) { + this.frameRate = frameRate; + return this; + } + + /** + * Sets {@link Format#rotationDegrees}. The default value is 0. + * + * @param rotationDegrees The {@link Format#rotationDegrees}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setRotationDegrees(int rotationDegrees) { + this.rotationDegrees = rotationDegrees; + return this; + } + + /** + * Sets {@link Format#pixelWidthHeightRatio}. The default value is 1.0f. + * + * @param pixelWidthHeightRatio The {@link Format#pixelWidthHeightRatio}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setPixelWidthHeightRatio(float pixelWidthHeightRatio) { + this.pixelWidthHeightRatio = pixelWidthHeightRatio; + return this; + } + + /** + * Sets {@link Format#projectionData}. The default value is {@code null}. + * + * @param projectionData The {@link Format#projectionData}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setProjectionData(@Nullable byte[] projectionData) { + this.projectionData = projectionData; + return this; + } + + /** + * Sets {@link Format#stereoMode}. The default value is {@link #NO_VALUE}. + * + * @param stereoMode The {@link Format#stereoMode}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setStereoMode(@C.StereoMode int stereoMode) { + this.stereoMode = stereoMode; + return this; + } + + /** + * Sets {@link Format#colorInfo}. The default value is {@code null}. + * + * @param colorInfo The {@link Format#colorInfo}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setColorInfo(@Nullable ColorInfo colorInfo) { + this.colorInfo = colorInfo; + return this; + } + + // Audio specific. + + /** + * Sets {@link Format#channelCount}. The default value is {@link #NO_VALUE}. + * + * @param channelCount The {@link Format#channelCount}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setChannelCount(int channelCount) { + this.channelCount = channelCount; + return this; + } + + /** + * Sets {@link Format#sampleRate}. The default value is {@link #NO_VALUE}. + * + * @param sampleRate The {@link Format#sampleRate}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setSampleRate(int sampleRate) { + this.sampleRate = sampleRate; + return this; + } + + /** + * Sets {@link Format#pcmEncoding}. The default value is {@link #NO_VALUE}. + * + * @param pcmEncoding The {@link Format#pcmEncoding}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setPcmEncoding(@C.PcmEncoding int pcmEncoding) { + this.pcmEncoding = pcmEncoding; + return this; + } + + /** + * Sets {@link Format#encoderDelay}. The default value is 0. + * + * @param encoderDelay The {@link Format#encoderDelay}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setEncoderDelay(int encoderDelay) { + this.encoderDelay = encoderDelay; + return this; + } + + /** + * Sets {@link Format#encoderPadding}. The default value is 0. + * + * @param encoderPadding The {@link Format#encoderPadding}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setEncoderPadding(int encoderPadding) { + this.encoderPadding = encoderPadding; + return this; + } + + // Text specific. + + /** + * Sets {@link Format#accessibilityChannel}. The default value is {@link #NO_VALUE}. + * + * @param accessibilityChannel The {@link Format#accessibilityChannel}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setAccessibilityChannel(int accessibilityChannel) { + this.accessibilityChannel = accessibilityChannel; + return this; + } + + // Image specific. + + /** + * Sets {@link Format#tileCountHorizontal}. The default value is {@link #NO_VALUE}. + * + * @param tileCountHorizontal The {@link Format#accessibilityChannel}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setTileCountHorizontal(int tileCountHorizontal) { + this.tileCountHorizontal = tileCountHorizontal; + return this; + } + + /** + * Sets {@link Format#tileCountVertical}. The default value is {@link #NO_VALUE}. + * + * @param tileCountVertical The {@link Format#accessibilityChannel}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setTileCountVertical(int tileCountVertical) { + this.tileCountVertical = tileCountVertical; + return this; + } + + // Provided by source. + + /** + * Sets {@link Format#cryptoType}. The default value is {@link C#CRYPTO_TYPE_NONE}. + * + * @param cryptoType The {@link C.CryptoType}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setCryptoType(@C.CryptoType int cryptoType) { + this.cryptoType = cryptoType; + return this; + } + + // Build. + + public Format build() { + return new Format(/* builder= */ this); + } + } + + /** A value for various fields to indicate that the field's value is unknown or not applicable. */ public static final int NO_VALUE = -1; /** @@ -46,16 +683,63 @@ public final class Format implements Parcelable { */ public static final long OFFSET_SAMPLE_RELATIVE = Long.MAX_VALUE; + private static final Format DEFAULT = new Builder().build(); + /** An identifier for the format, or null if unknown or not applicable. */ @Nullable public final String id; /** The human readable label, or null if unknown or not applicable. */ @Nullable public final String label; + /** The language as an IETF BCP 47 conformant tag, or null if unknown or not applicable. */ + @Nullable public final String language; /** Track selection flags. */ - @C.SelectionFlags public final int selectionFlags; + public final @C.SelectionFlags int selectionFlags; /** Track role flags. */ - @C.RoleFlags public final int roleFlags; + public final @C.RoleFlags int roleFlags; /** - * The average bandwidth in bits per second, or {@link #NO_VALUE} if unknown or not applicable. + * The average bitrate in bits per second, or {@link #NO_VALUE} if unknown or not applicable. The + * way in which this field is populated depends on the type of media to which the format + * corresponds: + * + *

        + *
      • DASH representations: Always {@link Format#NO_VALUE}. + *
      • HLS variants: The {@code AVERAGE-BANDWIDTH} attribute defined on the corresponding {@code + * EXT-X-STREAM-INF} tag in the multivariant playlist, or {@link Format#NO_VALUE} if not + * present. + *
      • SmoothStreaming track elements: The {@code Bitrate} attribute defined on the + * corresponding {@code TrackElement} in the manifest, or {@link Format#NO_VALUE} if not + * present. + *
      • Progressive container formats: Often {@link Format#NO_VALUE}, but may be populated with + * the average bitrate of the container if known. + *
      • Sample formats: Often {@link Format#NO_VALUE}, but may be populated with the average + * bitrate of the stream of samples with type {@link #sampleMimeType} if known. Note that if + * {@link #sampleMimeType} is a compressed format (e.g., {@link MimeTypes#AUDIO_AAC}), then + * this bitrate is for the stream of still compressed samples. + *
      + */ + public final int averageBitrate; + /** + * The peak bitrate in bits per second, or {@link #NO_VALUE} if unknown or not applicable. The way + * in which this field is populated depends on the type of media to which the format corresponds: + * + *
        + *
      • DASH representations: The {@code @bandwidth} attribute of the corresponding {@code + * Representation} element in the manifest. + *
      • HLS variants: The {@code BANDWIDTH} attribute defined on the corresponding {@code + * EXT-X-STREAM-INF} tag. + *
      • SmoothStreaming track elements: Always {@link Format#NO_VALUE}. + *
      • Progressive container formats: Often {@link Format#NO_VALUE}, but may be populated with + * the peak bitrate of the container if known. + *
      • Sample formats: Often {@link Format#NO_VALUE}, but may be populated with the peak bitrate + * of the stream of samples with type {@link #sampleMimeType} if known. Note that if {@link + * #sampleMimeType} is a compressed format (e.g., {@link MimeTypes#AUDIO_AAC}), then this + * bitrate is for the stream of still compressed samples. + *
      + */ + public final int peakBitrate; + /** + * The bitrate in bits per second. This is the peak bitrate if known, or else the average bitrate + * if known, or else {@link Format#NO_VALUE}. Equivalent to: {@code peakBitrate != NO_VALUE ? + * peakBitrate : averageBitrate}. */ public final int bitrate; /** Codecs of the format as described in RFC 6381, or null if unknown or not applicable. */ @@ -68,12 +752,9 @@ public final class Format implements Parcelable { /** The mime type of the container, or null if unknown or not applicable. */ @Nullable public final String containerMimeType; - // Elementary stream specific. + // Sample specific. - /** - * The mime type of the elementary stream (i.e. the individual samples), or null if unknown or not - * applicable. - */ + /** The sample mime type, or null if unknown or not applicable. */ @Nullable public final String sampleMimeType; /** * The maximum size of a buffer of data (typically one sample), or {@link #NO_VALUE} if unknown or @@ -81,8 +762,8 @@ public final class Format implements Parcelable { */ public final int maxInputSize; /** - * Initialization data that must be provided to the decoder. Will not be null, but may be empty - * if initialization data is not required. + * Initialization data that must be provided to the decoder. Will not be null, but may be empty if + * initialization data is not required. */ public final List initializationData; /** DRM initialization data if the stream is protected, or null otherwise. */ @@ -97,17 +778,11 @@ public final class Format implements Parcelable { // Video specific. - /** - * The width of the video in pixels, or {@link #NO_VALUE} if unknown or not applicable. - */ + /** The width of the video in pixels, or {@link #NO_VALUE} if unknown or not applicable. */ public final int width; - /** - * The height of the video in pixels, or {@link #NO_VALUE} if unknown or not applicable. - */ + /** The height of the video in pixels, or {@link #NO_VALUE} if unknown or not applicable. */ public final int height; - /** - * The frame rate in frames per second, or {@link #NO_VALUE} if unknown or not applicable. - */ + /** The frame rate in frames per second, or {@link #NO_VALUE} if unknown or not applicable. */ public final float frameRate; /** * The clockwise rotation that should be applied to the video for it to be rendered in the correct @@ -116,27 +791,22 @@ public final class Format implements Parcelable { public final int rotationDegrees; /** The width to height ratio of pixels in the video, or 1.0 if unknown or not applicable. */ public final float pixelWidthHeightRatio; + /** The projection data for 360/VR video, or null if not applicable. */ + @Nullable public final byte[] projectionData; /** * The stereo layout for 360/3D/VR video, or {@link #NO_VALUE} if not applicable. Valid stereo * modes are {@link C#STEREO_MODE_MONO}, {@link C#STEREO_MODE_TOP_BOTTOM}, {@link * C#STEREO_MODE_LEFT_RIGHT}, {@link C#STEREO_MODE_STEREO_MESH}. */ - @C.StereoMode - public final int stereoMode; - /** The projection data for 360/VR video, or null if not applicable. */ - @Nullable public final byte[] projectionData; - /** The color metadata associated with the video, helps with accurate color reproduction. */ + public final @C.StereoMode int stereoMode; + /** The color metadata associated with the video, or null if not applicable. */ @Nullable public final ColorInfo colorInfo; // Audio specific. - /** - * The number of audio channels, or {@link #NO_VALUE} if unknown or not applicable. - */ + /** The number of audio channels, or {@link #NO_VALUE} if unknown or not applicable. */ public final int channelCount; - /** - * The audio sampling rate in Hz, or {@link #NO_VALUE} if unknown or not applicable. - */ + /** The audio sampling rate in Hz, or {@link #NO_VALUE} if unknown or not applicable. */ public final int sampleRate; /** The {@link C.PcmEncoding} for PCM audio. Set to {@link #NO_VALUE} for other media types. */ public final @C.PcmEncoding int pcmEncoding; @@ -150,23 +820,29 @@ public final class Format implements Parcelable { */ public final int encoderPadding; - // Audio and text specific. + // Text specific. + + /** The Accessibility channel, or {@link #NO_VALUE} if not known or applicable. */ + public final int accessibilityChannel; + + // Image specific. - /** The language as an IETF BCP 47 conformant tag, or null if unknown or not applicable. */ - @Nullable public final String language; /** - * The Accessibility channel, or {@link #NO_VALUE} if not known or applicable. + * The number of horizontal tiles in an image, or {@link #NO_VALUE} if not known or applicable. */ - public final int accessibilityChannel; + public final int tileCountHorizontal; + /** The number of vertical tiles in an image, or {@link #NO_VALUE} if not known or applicable. */ + public final int tileCountVertical; // Provided by source. /** - * The type of the {@link ExoMediaCrypto} provided by the media source, if the media source can - * acquire a {@link DrmSession} for {@link #drmInitData}. Null if the media source cannot acquire - * a session for {@link #drmInitData}, or if not applicable. + * The type of crypto that must be used to decode samples associated with this format, or {@link + * C#CRYPTO_TYPE_NONE} if the content is not encrypted. Cannot be {@link C#CRYPTO_TYPE_NONE} if + * {@link #drmInitData} is non-null, but may be {@link C#CRYPTO_TYPE_UNSUPPORTED} to indicate that + * the samples are encrypted using an unsupported crypto type. */ - @Nullable public final Class exoMediaCryptoType; + public final @C.CryptoType int cryptoType; // Lazily initialized hashcode. private int hashCode; @@ -174,83 +850,9 @@ public final class Format implements Parcelable { // Video. /** - * @deprecated Use {@link #createVideoContainerFormat(String, String, String, String, String, - * Metadata, int, int, int, float, List, int, int)} instead. + * @deprecated Use {@link Format.Builder}. */ @Deprecated - public static Format createVideoContainerFormat( - @Nullable String id, - @Nullable String containerMimeType, - @Nullable String sampleMimeType, - @Nullable String codecs, - int bitrate, - int width, - int height, - float frameRate, - @Nullable List initializationData, - @C.SelectionFlags int selectionFlags) { - return createVideoContainerFormat( - id, - /* label= */ null, - containerMimeType, - sampleMimeType, - codecs, - /* metadata= */ null, - bitrate, - width, - height, - frameRate, - initializationData, - selectionFlags, - /* roleFlags= */ 0); - } - - public static Format createVideoContainerFormat( - @Nullable String id, - @Nullable String label, - @Nullable String containerMimeType, - @Nullable String sampleMimeType, - @Nullable String codecs, - @Nullable Metadata metadata, - int bitrate, - int width, - int height, - float frameRate, - @Nullable List initializationData, - @C.SelectionFlags int selectionFlags, - @C.RoleFlags int roleFlags) { - return new Format( - id, - label, - selectionFlags, - roleFlags, - bitrate, - codecs, - metadata, - containerMimeType, - sampleMimeType, - /* maxInputSize= */ NO_VALUE, - initializationData, - /* drmInitData= */ null, - OFFSET_SAMPLE_RELATIVE, - width, - height, - frameRate, - /* rotationDegrees= */ NO_VALUE, - /* pixelWidthHeightRatio= */ NO_VALUE, - /* projectionData= */ null, - /* stereoMode= */ NO_VALUE, - /* colorInfo= */ null, - /* channelCount= */ NO_VALUE, - /* sampleRate= */ NO_VALUE, - /* pcmEncoding= */ NO_VALUE, - /* encoderDelay= */ NO_VALUE, - /* encoderPadding= */ NO_VALUE, - /* language= */ null, - /* accessibilityChannel= */ NO_VALUE, - /* exoMediaCryptoType= */ null); - } - public static Format createVideoSampleFormat( @Nullable String id, @Nullable String sampleMimeType, @@ -262,52 +864,25 @@ public static Format createVideoSampleFormat( float frameRate, @Nullable List initializationData, @Nullable DrmInitData drmInitData) { - return createVideoSampleFormat( - id, - sampleMimeType, - codecs, - bitrate, - maxInputSize, - width, - height, - frameRate, - initializationData, - /* rotationDegrees= */ NO_VALUE, - /* pixelWidthHeightRatio= */ NO_VALUE, - drmInitData); - } - - public static Format createVideoSampleFormat( - @Nullable String id, - @Nullable String sampleMimeType, - @Nullable String codecs, - int bitrate, - int maxInputSize, - int width, - int height, - float frameRate, - @Nullable List initializationData, - int rotationDegrees, - float pixelWidthHeightRatio, - @Nullable DrmInitData drmInitData) { - return createVideoSampleFormat( - id, - sampleMimeType, - codecs, - bitrate, - maxInputSize, - width, - height, - frameRate, - initializationData, - rotationDegrees, - pixelWidthHeightRatio, - /* projectionData= */ null, - /* stereoMode= */ NO_VALUE, - /* colorInfo= */ null, - drmInitData); + return new Builder() + .setId(id) + .setAverageBitrate(bitrate) + .setPeakBitrate(bitrate) + .setCodecs(codecs) + .setSampleMimeType(sampleMimeType) + .setMaxInputSize(maxInputSize) + .setInitializationData(initializationData) + .setDrmInitData(drmInitData) + .setWidth(width) + .setHeight(height) + .setFrameRate(frameRate) + .build(); } + /** + * @deprecated Use {@link Format.Builder}. + */ + @Deprecated public static Format createVideoSampleFormat( @Nullable String id, @Nullable String sampleMimeType, @@ -320,122 +895,30 @@ public static Format createVideoSampleFormat( @Nullable List initializationData, int rotationDegrees, float pixelWidthHeightRatio, - @Nullable byte[] projectionData, - @C.StereoMode int stereoMode, - @Nullable ColorInfo colorInfo, @Nullable DrmInitData drmInitData) { - return new Format( - id, - /* label= */ null, - /* selectionFlags= */ 0, - /* roleFlags= */ 0, - bitrate, - codecs, - /* metadata= */ null, - /* containerMimeType= */ null, - sampleMimeType, - maxInputSize, - initializationData, - drmInitData, - OFFSET_SAMPLE_RELATIVE, - width, - height, - frameRate, - rotationDegrees, - pixelWidthHeightRatio, - projectionData, - stereoMode, - colorInfo, - /* channelCount= */ NO_VALUE, - /* sampleRate= */ NO_VALUE, - /* pcmEncoding= */ NO_VALUE, - /* encoderDelay= */ NO_VALUE, - /* encoderPadding= */ NO_VALUE, - /* language= */ null, - /* accessibilityChannel= */ NO_VALUE, - /* exoMediaCryptoType= */ null); + return new Builder() + .setId(id) + .setAverageBitrate(bitrate) + .setPeakBitrate(bitrate) + .setCodecs(codecs) + .setSampleMimeType(sampleMimeType) + .setMaxInputSize(maxInputSize) + .setInitializationData(initializationData) + .setDrmInitData(drmInitData) + .setWidth(width) + .setHeight(height) + .setFrameRate(frameRate) + .setRotationDegrees(rotationDegrees) + .setPixelWidthHeightRatio(pixelWidthHeightRatio) + .build(); } // Audio. /** - * @deprecated Use {@link #createAudioContainerFormat(String, String, String, String, String, - * Metadata, int, int, int, List, int, int, String)} instead. + * @deprecated Use {@link Format.Builder}. */ @Deprecated - public static Format createAudioContainerFormat( - @Nullable String id, - @Nullable String containerMimeType, - @Nullable String sampleMimeType, - @Nullable String codecs, - int bitrate, - int channelCount, - int sampleRate, - @Nullable List initializationData, - @C.SelectionFlags int selectionFlags, - @Nullable String language) { - return createAudioContainerFormat( - id, - /* label= */ null, - containerMimeType, - sampleMimeType, - codecs, - /* metadata= */ null, - bitrate, - channelCount, - sampleRate, - initializationData, - selectionFlags, - /* roleFlags= */ 0, - language); - } - - public static Format createAudioContainerFormat( - @Nullable String id, - @Nullable String label, - @Nullable String containerMimeType, - @Nullable String sampleMimeType, - @Nullable String codecs, - @Nullable Metadata metadata, - int bitrate, - int channelCount, - int sampleRate, - @Nullable List initializationData, - @C.SelectionFlags int selectionFlags, - @C.RoleFlags int roleFlags, - @Nullable String language) { - return new Format( - id, - label, - selectionFlags, - roleFlags, - bitrate, - codecs, - metadata, - containerMimeType, - sampleMimeType, - /* maxInputSize= */ NO_VALUE, - initializationData, - /* drmInitData= */ null, - OFFSET_SAMPLE_RELATIVE, - /* width= */ NO_VALUE, - /* height= */ NO_VALUE, - /* frameRate= */ NO_VALUE, - /* rotationDegrees= */ NO_VALUE, - /* pixelWidthHeightRatio= */ NO_VALUE, - /* projectionData= */ null, - /* stereoMode= */ NO_VALUE, - /* colorInfo= */ null, - channelCount, - sampleRate, - /* pcmEncoding= */ NO_VALUE, - /* encoderDelay= */ NO_VALUE, - /* encoderPadding= */ NO_VALUE, - language, - /* accessibilityChannel= */ NO_VALUE, - /* exoMediaCryptoType= */ null); - } - public static Format createAudioSampleFormat( @Nullable String id, @Nullable String sampleMimeType, @@ -448,52 +931,26 @@ public static Format createAudioSampleFormat( @Nullable DrmInitData drmInitData, @C.SelectionFlags int selectionFlags, @Nullable String language) { - return createAudioSampleFormat( - id, - sampleMimeType, - codecs, - bitrate, - maxInputSize, - channelCount, - sampleRate, - /* pcmEncoding= */ NO_VALUE, - initializationData, - drmInitData, - selectionFlags, - language); - } - - public static Format createAudioSampleFormat( - @Nullable String id, - @Nullable String sampleMimeType, - @Nullable String codecs, - int bitrate, - int maxInputSize, - int channelCount, - int sampleRate, - @C.PcmEncoding int pcmEncoding, - @Nullable List initializationData, - @Nullable DrmInitData drmInitData, - @C.SelectionFlags int selectionFlags, - @Nullable String language) { - return createAudioSampleFormat( - id, - sampleMimeType, - codecs, - bitrate, - maxInputSize, - channelCount, - sampleRate, - pcmEncoding, - /* encoderDelay= */ NO_VALUE, - /* encoderPadding= */ NO_VALUE, - initializationData, - drmInitData, - selectionFlags, - language, - /* metadata= */ null); + return new Builder() + .setId(id) + .setLanguage(language) + .setSelectionFlags(selectionFlags) + .setAverageBitrate(bitrate) + .setPeakBitrate(bitrate) + .setCodecs(codecs) + .setSampleMimeType(sampleMimeType) + .setMaxInputSize(maxInputSize) + .setInitializationData(initializationData) + .setDrmInitData(drmInitData) + .setChannelCount(channelCount) + .setSampleRate(sampleRate) + .build(); } + /** + * @deprecated Use {@link Format.Builder}. + */ + @Deprecated public static Format createAudioSampleFormat( @Nullable String id, @Nullable String sampleMimeType, @@ -503,297 +960,33 @@ public static Format createAudioSampleFormat( int channelCount, int sampleRate, @C.PcmEncoding int pcmEncoding, - int encoderDelay, - int encoderPadding, @Nullable List initializationData, @Nullable DrmInitData drmInitData, @C.SelectionFlags int selectionFlags, - @Nullable String language, - @Nullable Metadata metadata) { - return new Format( - id, - /* label= */ null, - selectionFlags, - /* roleFlags= */ 0, - bitrate, - codecs, - metadata, - /* containerMimeType= */ null, - sampleMimeType, - maxInputSize, - initializationData, - drmInitData, - OFFSET_SAMPLE_RELATIVE, - /* width= */ NO_VALUE, - /* height= */ NO_VALUE, - /* frameRate= */ NO_VALUE, - /* rotationDegrees= */ NO_VALUE, - /* pixelWidthHeightRatio= */ NO_VALUE, - /* projectionData= */ null, - /* stereoMode= */ NO_VALUE, - /* colorInfo= */ null, - channelCount, - sampleRate, - pcmEncoding, - encoderDelay, - encoderPadding, - language, - /* accessibilityChannel= */ NO_VALUE, - /* exoMediaCryptoType= */ null); - } - - // Text. - - public static Format createTextContainerFormat( - @Nullable String id, - @Nullable String label, - @Nullable String containerMimeType, - @Nullable String sampleMimeType, - @Nullable String codecs, - int bitrate, - @C.SelectionFlags int selectionFlags, - @C.RoleFlags int roleFlags, - @Nullable String language) { - return createTextContainerFormat( - id, - label, - containerMimeType, - sampleMimeType, - codecs, - bitrate, - selectionFlags, - roleFlags, - language, - /* accessibilityChannel= */ NO_VALUE); - } - - public static Format createTextContainerFormat( - @Nullable String id, - @Nullable String label, - @Nullable String containerMimeType, - @Nullable String sampleMimeType, - @Nullable String codecs, - int bitrate, - @C.SelectionFlags int selectionFlags, - @C.RoleFlags int roleFlags, - @Nullable String language, - int accessibilityChannel) { - return new Format( - id, - label, - selectionFlags, - roleFlags, - bitrate, - codecs, - /* metadata= */ null, - containerMimeType, - sampleMimeType, - /* maxInputSize= */ NO_VALUE, - /* initializationData= */ null, - /* drmInitData= */ null, - OFFSET_SAMPLE_RELATIVE, - /* width= */ NO_VALUE, - /* height= */ NO_VALUE, - /* frameRate= */ NO_VALUE, - /* rotationDegrees= */ NO_VALUE, - /* pixelWidthHeightRatio= */ NO_VALUE, - /* projectionData= */ null, - /* stereoMode= */ NO_VALUE, - /* colorInfo= */ null, - /* channelCount= */ NO_VALUE, - /* sampleRate= */ NO_VALUE, - /* pcmEncoding= */ NO_VALUE, - /* encoderDelay= */ NO_VALUE, - /* encoderPadding= */ NO_VALUE, - language, - accessibilityChannel, - /* exoMediaCryptoType= */ null); - } - - public static Format createTextSampleFormat( - @Nullable String id, - @Nullable String sampleMimeType, - @C.SelectionFlags int selectionFlags, @Nullable String language) { - return createTextSampleFormat(id, sampleMimeType, selectionFlags, language, null); - } - - public static Format createTextSampleFormat( - @Nullable String id, - @Nullable String sampleMimeType, - @C.SelectionFlags int selectionFlags, - @Nullable String language, - @Nullable DrmInitData drmInitData) { - return createTextSampleFormat( - id, - sampleMimeType, - /* codecs= */ null, - /* bitrate= */ NO_VALUE, - selectionFlags, - language, - NO_VALUE, - drmInitData, - OFFSET_SAMPLE_RELATIVE, - Collections.emptyList()); - } - - public static Format createTextSampleFormat( - @Nullable String id, - @Nullable String sampleMimeType, - @Nullable String codecs, - int bitrate, - @C.SelectionFlags int selectionFlags, - @Nullable String language, - int accessibilityChannel, - @Nullable DrmInitData drmInitData) { - return createTextSampleFormat( - id, - sampleMimeType, - codecs, - bitrate, - selectionFlags, - language, - accessibilityChannel, - drmInitData, - OFFSET_SAMPLE_RELATIVE, - Collections.emptyList()); - } - - public static Format createTextSampleFormat( - @Nullable String id, - @Nullable String sampleMimeType, - @Nullable String codecs, - int bitrate, - @C.SelectionFlags int selectionFlags, - @Nullable String language, - @Nullable DrmInitData drmInitData, - long subsampleOffsetUs) { - return createTextSampleFormat( - id, - sampleMimeType, - codecs, - bitrate, - selectionFlags, - language, - /* accessibilityChannel= */ NO_VALUE, - drmInitData, - subsampleOffsetUs, - Collections.emptyList()); - } - - public static Format createTextSampleFormat( - @Nullable String id, - @Nullable String sampleMimeType, - @Nullable String codecs, - int bitrate, - @C.SelectionFlags int selectionFlags, - @Nullable String language, - int accessibilityChannel, - @Nullable DrmInitData drmInitData, - long subsampleOffsetUs, - @Nullable List initializationData) { - return new Format( - id, - /* label= */ null, - selectionFlags, - /* roleFlags= */ 0, - bitrate, - codecs, - /* metadata= */ null, - /* containerMimeType= */ null, - sampleMimeType, - /* maxInputSize= */ NO_VALUE, - initializationData, - drmInitData, - subsampleOffsetUs, - /* width= */ NO_VALUE, - /* height= */ NO_VALUE, - /* frameRate= */ NO_VALUE, - /* rotationDegrees= */ NO_VALUE, - /* pixelWidthHeightRatio= */ NO_VALUE, - /* projectionData= */ null, - /* stereoMode= */ NO_VALUE, - /* colorInfo= */ null, - /* channelCount= */ NO_VALUE, - /* sampleRate= */ NO_VALUE, - /* pcmEncoding= */ NO_VALUE, - /* encoderDelay= */ NO_VALUE, - /* encoderPadding= */ NO_VALUE, - language, - accessibilityChannel, - /* exoMediaCryptoType= */ null); - } - - // Image. - - public static Format createImageSampleFormat( - @Nullable String id, - @Nullable String sampleMimeType, - @Nullable String codecs, - int bitrate, - @C.SelectionFlags int selectionFlags, - @Nullable List initializationData, - @Nullable String language, - @Nullable DrmInitData drmInitData) { - return new Format( - id, - /* label= */ null, - selectionFlags, - /* roleFlags= */ 0, - bitrate, - codecs, - /* metadata=*/ null, - /* containerMimeType= */ null, - sampleMimeType, - /* maxInputSize= */ NO_VALUE, - initializationData, - drmInitData, - OFFSET_SAMPLE_RELATIVE, - /* width= */ NO_VALUE, - /* height= */ NO_VALUE, - /* frameRate= */ NO_VALUE, - /* rotationDegrees= */ NO_VALUE, - /* pixelWidthHeightRatio= */ NO_VALUE, - /* projectionData= */ null, - /* stereoMode= */ NO_VALUE, - /* colorInfo= */ null, - /* channelCount= */ NO_VALUE, - /* sampleRate= */ NO_VALUE, - /* pcmEncoding= */ NO_VALUE, - /* encoderDelay= */ NO_VALUE, - /* encoderPadding= */ NO_VALUE, - language, - /* accessibilityChannel= */ NO_VALUE, - /* exoMediaCryptoType= */ null); + return new Builder() + .setId(id) + .setLanguage(language) + .setSelectionFlags(selectionFlags) + .setAverageBitrate(bitrate) + .setPeakBitrate(bitrate) + .setCodecs(codecs) + .setSampleMimeType(sampleMimeType) + .setMaxInputSize(maxInputSize) + .setInitializationData(initializationData) + .setDrmInitData(drmInitData) + .setChannelCount(channelCount) + .setSampleRate(sampleRate) + .setPcmEncoding(pcmEncoding) + .build(); } // Generic. /** - * @deprecated Use {@link #createContainerFormat(String, String, String, String, String, int, int, - * int, String)} instead. + * @deprecated Use {@link Format.Builder}. */ @Deprecated - public static Format createContainerFormat( - @Nullable String id, - @Nullable String containerMimeType, - @Nullable String sampleMimeType, - @Nullable String codecs, - int bitrate, - @C.SelectionFlags int selectionFlags, - @Nullable String language) { - return createContainerFormat( - id, - /* label= */ null, - containerMimeType, - sampleMimeType, - codecs, - bitrate, - selectionFlags, - /* roleFlags= */ 0, - language); - } - public static Format createContainerFormat( @Nullable String id, @Nullable String label, @@ -804,409 +997,150 @@ public static Format createContainerFormat( @C.SelectionFlags int selectionFlags, @C.RoleFlags int roleFlags, @Nullable String language) { - return new Format( - id, - label, - selectionFlags, - roleFlags, - bitrate, - codecs, - /* metadata= */ null, - containerMimeType, - sampleMimeType, - /* maxInputSize= */ NO_VALUE, - /* initializationData= */ null, - /* drmInitData= */ null, - OFFSET_SAMPLE_RELATIVE, - /* width= */ NO_VALUE, - /* height= */ NO_VALUE, - /* frameRate= */ NO_VALUE, - /* rotationDegrees= */ NO_VALUE, - /* pixelWidthHeightRatio= */ NO_VALUE, - /* projectionData= */ null, - /* stereoMode= */ NO_VALUE, - /* colorInfo= */ null, - /* channelCount= */ NO_VALUE, - /* sampleRate= */ NO_VALUE, - /* pcmEncoding= */ NO_VALUE, - /* encoderDelay= */ NO_VALUE, - /* encoderPadding= */ NO_VALUE, - language, - /* accessibilityChannel= */ NO_VALUE, - /* exoMediaCryptoType= */ null); - } - - public static Format createSampleFormat( - @Nullable String id, @Nullable String sampleMimeType, long subsampleOffsetUs) { - return new Format( - id, - /* label= */ null, - /* selectionFlags= */ 0, - /* roleFlags= */ 0, - /* bitrate= */ NO_VALUE, - /* codecs= */ null, - /* metadata= */ null, - /* containerMimeType= */ null, - sampleMimeType, - /* maxInputSize= */ NO_VALUE, - /* initializationData= */ null, - /* drmInitData= */ null, - subsampleOffsetUs, - /* width= */ NO_VALUE, - /* height= */ NO_VALUE, - /* frameRate= */ NO_VALUE, - /* rotationDegrees= */ NO_VALUE, - /* pixelWidthHeightRatio= */ NO_VALUE, - /* projectionData= */ null, - /* stereoMode= */ NO_VALUE, - /* colorInfo= */ null, - /* channelCount= */ NO_VALUE, - /* sampleRate= */ NO_VALUE, - /* pcmEncoding= */ NO_VALUE, - /* encoderDelay= */ NO_VALUE, - /* encoderPadding= */ NO_VALUE, - /* language= */ null, - /* accessibilityChannel= */ NO_VALUE, - /* exoMediaCryptoType= */ null); + return new Builder() + .setId(id) + .setLabel(label) + .setLanguage(language) + .setSelectionFlags(selectionFlags) + .setRoleFlags(roleFlags) + .setAverageBitrate(bitrate) + .setPeakBitrate(bitrate) + .setCodecs(codecs) + .setContainerMimeType(containerMimeType) + .setSampleMimeType(sampleMimeType) + .build(); } - public static Format createSampleFormat( - @Nullable String id, - @Nullable String sampleMimeType, - @Nullable String codecs, - int bitrate, - @Nullable DrmInitData drmInitData) { - return new Format( - id, - /* label= */ null, - /* selectionFlags= */ 0, - /* roleFlags= */ 0, - bitrate, - codecs, - /* metadata= */ null, - /* containerMimeType= */ null, - sampleMimeType, - /* maxInputSize= */ NO_VALUE, - /* initializationData= */ null, - drmInitData, - OFFSET_SAMPLE_RELATIVE, - /* width= */ NO_VALUE, - /* height= */ NO_VALUE, - /* frameRate= */ NO_VALUE, - /* rotationDegrees= */ NO_VALUE, - /* pixelWidthHeightRatio= */ NO_VALUE, - /* projectionData= */ null, - /* stereoMode= */ NO_VALUE, - /* colorInfo= */ null, - /* channelCount= */ NO_VALUE, - /* sampleRate= */ NO_VALUE, - /* pcmEncoding= */ NO_VALUE, - /* encoderDelay= */ NO_VALUE, - /* encoderPadding= */ NO_VALUE, - /* language= */ null, - /* accessibilityChannel= */ NO_VALUE, - /* exoMediaCryptoType= */ null); + /** + * @deprecated Use {@link Format.Builder}. + */ + @Deprecated + public static Format createSampleFormat(@Nullable String id, @Nullable String sampleMimeType) { + return new Builder().setId(id).setSampleMimeType(sampleMimeType).build(); } - /* package */ Format( - @Nullable String id, - @Nullable String label, - @C.SelectionFlags int selectionFlags, - @C.RoleFlags int roleFlags, - int bitrate, - @Nullable String codecs, - @Nullable Metadata metadata, - // Container specific. - @Nullable String containerMimeType, - // Elementary stream specific. - @Nullable String sampleMimeType, - int maxInputSize, - @Nullable List initializationData, - @Nullable DrmInitData drmInitData, - long subsampleOffsetUs, - // Video specific. - int width, - int height, - float frameRate, - int rotationDegrees, - float pixelWidthHeightRatio, - @Nullable byte[] projectionData, - @C.StereoMode int stereoMode, - @Nullable ColorInfo colorInfo, - // Audio specific. - int channelCount, - int sampleRate, - @C.PcmEncoding int pcmEncoding, - int encoderDelay, - int encoderPadding, - // Audio and text specific. - @Nullable String language, - int accessibilityChannel, - // Provided by source. - @Nullable Class exoMediaCryptoType) { - this.id = id; - this.label = label; - this.selectionFlags = selectionFlags; - this.roleFlags = roleFlags; - this.bitrate = bitrate; - this.codecs = codecs; - this.metadata = metadata; + private Format(Builder builder) { + id = builder.id; + label = builder.label; + language = Util.normalizeLanguageCode(builder.language); + selectionFlags = builder.selectionFlags; + roleFlags = builder.roleFlags; + averageBitrate = builder.averageBitrate; + peakBitrate = builder.peakBitrate; + bitrate = peakBitrate != NO_VALUE ? peakBitrate : averageBitrate; + codecs = builder.codecs; + metadata = builder.metadata; // Container specific. - this.containerMimeType = containerMimeType; - // Elementary stream specific. - this.sampleMimeType = sampleMimeType; - this.maxInputSize = maxInputSize; - this.initializationData = - initializationData == null ? Collections.emptyList() : initializationData; - this.drmInitData = drmInitData; - this.subsampleOffsetUs = subsampleOffsetUs; + containerMimeType = builder.containerMimeType; + // Sample specific. + sampleMimeType = builder.sampleMimeType; + maxInputSize = builder.maxInputSize; + initializationData = + builder.initializationData == null ? Collections.emptyList() : builder.initializationData; + drmInitData = builder.drmInitData; + subsampleOffsetUs = builder.subsampleOffsetUs; // Video specific. - this.width = width; - this.height = height; - this.frameRate = frameRate; - this.rotationDegrees = rotationDegrees == Format.NO_VALUE ? 0 : rotationDegrees; - this.pixelWidthHeightRatio = - pixelWidthHeightRatio == Format.NO_VALUE ? 1 : pixelWidthHeightRatio; - this.projectionData = projectionData; - this.stereoMode = stereoMode; - this.colorInfo = colorInfo; + width = builder.width; + height = builder.height; + frameRate = builder.frameRate; + rotationDegrees = builder.rotationDegrees == NO_VALUE ? 0 : builder.rotationDegrees; + pixelWidthHeightRatio = + builder.pixelWidthHeightRatio == NO_VALUE ? 1 : builder.pixelWidthHeightRatio; + projectionData = builder.projectionData; + stereoMode = builder.stereoMode; + colorInfo = builder.colorInfo; // Audio specific. - this.channelCount = channelCount; - this.sampleRate = sampleRate; - this.pcmEncoding = pcmEncoding; - this.encoderDelay = encoderDelay == Format.NO_VALUE ? 0 : encoderDelay; - this.encoderPadding = encoderPadding == Format.NO_VALUE ? 0 : encoderPadding; - // Audio and text specific. - this.language = Util.normalizeLanguageCode(language); - this.accessibilityChannel = accessibilityChannel; + channelCount = builder.channelCount; + sampleRate = builder.sampleRate; + pcmEncoding = builder.pcmEncoding; + encoderDelay = builder.encoderDelay == NO_VALUE ? 0 : builder.encoderDelay; + encoderPadding = builder.encoderPadding == NO_VALUE ? 0 : builder.encoderPadding; + // Text specific. + accessibilityChannel = builder.accessibilityChannel; + // Image specific. + tileCountHorizontal = builder.tileCountHorizontal; + tileCountVertical = builder.tileCountVertical; // Provided by source. - this.exoMediaCryptoType = exoMediaCryptoType; + if (builder.cryptoType == C.CRYPTO_TYPE_NONE && drmInitData != null) { + // Encrypted content cannot use CRYPTO_TYPE_NONE. + cryptoType = C.CRYPTO_TYPE_UNSUPPORTED; + } else { + cryptoType = builder.cryptoType; + } } - @SuppressWarnings("ResourceType") - /* package */ Format(Parcel in) { - id = in.readString(); - label = in.readString(); - selectionFlags = in.readInt(); - roleFlags = in.readInt(); - bitrate = in.readInt(); - codecs = in.readString(); - metadata = in.readParcelable(Metadata.class.getClassLoader()); - // Container specific. - containerMimeType = in.readString(); - // Elementary stream specific. - sampleMimeType = in.readString(); - maxInputSize = in.readInt(); - int initializationDataSize = in.readInt(); - initializationData = new ArrayList<>(initializationDataSize); - for (int i = 0; i < initializationDataSize; i++) { - initializationData.add(in.createByteArray()); - } - drmInitData = in.readParcelable(DrmInitData.class.getClassLoader()); - subsampleOffsetUs = in.readLong(); - // Video specific. - width = in.readInt(); - height = in.readInt(); - frameRate = in.readFloat(); - rotationDegrees = in.readInt(); - pixelWidthHeightRatio = in.readFloat(); - boolean hasProjectionData = Util.readBoolean(in); - projectionData = hasProjectionData ? in.createByteArray() : null; - stereoMode = in.readInt(); - colorInfo = in.readParcelable(ColorInfo.class.getClassLoader()); - // Audio specific. - channelCount = in.readInt(); - sampleRate = in.readInt(); - pcmEncoding = in.readInt(); - encoderDelay = in.readInt(); - encoderPadding = in.readInt(); - // Audio and text specific. - language = in.readString(); - accessibilityChannel = in.readInt(); - // Provided by source. - exoMediaCryptoType = null; + /** Returns a {@link Format.Builder} initialized with the values of this instance. */ + public Builder buildUpon() { + return new Builder(this); } + /** + * @deprecated Use {@link #buildUpon()} and {@link Builder#setMaxInputSize(int)}. + */ + @Deprecated public Format copyWithMaxInputSize(int maxInputSize) { - return new Format( - id, - label, - selectionFlags, - roleFlags, - bitrate, - codecs, - metadata, - containerMimeType, - sampleMimeType, - maxInputSize, - initializationData, - drmInitData, - subsampleOffsetUs, - width, - height, - frameRate, - rotationDegrees, - pixelWidthHeightRatio, - projectionData, - stereoMode, - colorInfo, - channelCount, - sampleRate, - pcmEncoding, - encoderDelay, - encoderPadding, - language, - accessibilityChannel, - exoMediaCryptoType); + return buildUpon().setMaxInputSize(maxInputSize).build(); } + /** + * @deprecated Use {@link #buildUpon()} and {@link Builder#setSubsampleOffsetUs(long)}. + */ + @Deprecated public Format copyWithSubsampleOffsetUs(long subsampleOffsetUs) { - return new Format( - id, - label, - selectionFlags, - roleFlags, - bitrate, - codecs, - metadata, - containerMimeType, - sampleMimeType, - maxInputSize, - initializationData, - drmInitData, - subsampleOffsetUs, - width, - height, - frameRate, - rotationDegrees, - pixelWidthHeightRatio, - projectionData, - stereoMode, - colorInfo, - channelCount, - sampleRate, - pcmEncoding, - encoderDelay, - encoderPadding, - language, - accessibilityChannel, - exoMediaCryptoType); + return buildUpon().setSubsampleOffsetUs(subsampleOffsetUs).build(); } + /** + * @deprecated Use {@link #buildUpon()} and {@link Builder#setLabel(String)} . + */ + @Deprecated public Format copyWithLabel(@Nullable String label) { - return new Format( - id, - label, - selectionFlags, - roleFlags, - bitrate, - codecs, - metadata, - containerMimeType, - sampleMimeType, - maxInputSize, - initializationData, - drmInitData, - subsampleOffsetUs, - width, - height, - frameRate, - rotationDegrees, - pixelWidthHeightRatio, - projectionData, - stereoMode, - colorInfo, - channelCount, - sampleRate, - pcmEncoding, - encoderDelay, - encoderPadding, - language, - accessibilityChannel, - exoMediaCryptoType); + return buildUpon().setLabel(label).build(); } - public Format copyWithContainerInfo( - @Nullable String id, - @Nullable String label, - @Nullable String sampleMimeType, - @Nullable String codecs, - @Nullable Metadata metadata, - int bitrate, - int width, - int height, - int channelCount, - @C.SelectionFlags int selectionFlags, - @Nullable String language) { - - if (this.metadata != null) { - metadata = this.metadata.copyWithAppendedEntriesFrom(metadata); - } - - return new Format( - id, - label, - selectionFlags, - roleFlags, - bitrate, - codecs, - metadata, - containerMimeType, - sampleMimeType, - maxInputSize, - initializationData, - drmInitData, - subsampleOffsetUs, - width, - height, - frameRate, - rotationDegrees, - pixelWidthHeightRatio, - projectionData, - stereoMode, - colorInfo, - channelCount, - sampleRate, - pcmEncoding, - encoderDelay, - encoderPadding, - language, - accessibilityChannel, - exoMediaCryptoType); + /** + * @deprecated Use {@link #withManifestFormatInfo(Format)}. + */ + @Deprecated + public Format copyWithManifestFormatInfo(Format manifestFormat) { + return withManifestFormatInfo(manifestFormat); } @SuppressWarnings("ReferenceEquality") - public Format copyWithManifestFormatInfo(Format manifestFormat) { + public Format withManifestFormatInfo(Format manifestFormat) { if (this == manifestFormat) { // No need to copy from ourselves. return this; } - int trackType = MimeTypes.getTrackType(sampleMimeType); + @C.TrackType int trackType = MimeTypes.getTrackType(sampleMimeType); // Use manifest value only. - String id = manifestFormat.id; + @Nullable String id = manifestFormat.id; // Prefer manifest values, but fill in from sample format if missing. - String label = manifestFormat.label != null ? manifestFormat.label : this.label; - String language = this.language; + @Nullable String label = manifestFormat.label != null ? manifestFormat.label : this.label; + @Nullable String language = this.language; if ((trackType == C.TRACK_TYPE_TEXT || trackType == C.TRACK_TYPE_AUDIO) && manifestFormat.language != null) { language = manifestFormat.language; } // Prefer sample format values, but fill in from manifest if missing. - int bitrate = this.bitrate == NO_VALUE ? manifestFormat.bitrate : this.bitrate; - String codecs = this.codecs; + int averageBitrate = + this.averageBitrate == NO_VALUE ? manifestFormat.averageBitrate : this.averageBitrate; + int peakBitrate = this.peakBitrate == NO_VALUE ? manifestFormat.peakBitrate : this.peakBitrate; + @Nullable String codecs = this.codecs; if (codecs == null) { // The manifest format may be muxed, so filter only codecs of this format's type. If we still // have more than one codec then we're unable to uniquely identify which codec to fill in. - String codecsOfType = Util.getCodecsOfType(manifestFormat.codecs, trackType); + @Nullable String codecsOfType = Util.getCodecsOfType(manifestFormat.codecs, trackType); if (Util.splitCodecs(codecsOfType).length == 1) { codecs = codecsOfType; } } + @Nullable Metadata metadata = this.metadata == null ? manifestFormat.metadata @@ -1220,284 +1154,79 @@ public Format copyWithManifestFormatInfo(Format manifestFormat) { // Merge manifest and sample format values. @C.SelectionFlags int selectionFlags = this.selectionFlags | manifestFormat.selectionFlags; @C.RoleFlags int roleFlags = this.roleFlags | manifestFormat.roleFlags; + @Nullable DrmInitData drmInitData = DrmInitData.createSessionCreationData(manifestFormat.drmInitData, this.drmInitData); - return new Format( - id, - label, - selectionFlags, - roleFlags, - bitrate, - codecs, - metadata, - containerMimeType, - sampleMimeType, - maxInputSize, - initializationData, - drmInitData, - subsampleOffsetUs, - width, - height, - frameRate, - rotationDegrees, - pixelWidthHeightRatio, - projectionData, - stereoMode, - colorInfo, - channelCount, - sampleRate, - pcmEncoding, - encoderDelay, - encoderPadding, - language, - accessibilityChannel, - exoMediaCryptoType); + return buildUpon() + .setId(id) + .setLabel(label) + .setLanguage(language) + .setSelectionFlags(selectionFlags) + .setRoleFlags(roleFlags) + .setAverageBitrate(averageBitrate) + .setPeakBitrate(peakBitrate) + .setCodecs(codecs) + .setMetadata(metadata) + .setDrmInitData(drmInitData) + .setFrameRate(frameRate) + .build(); } + /** + * @deprecated Use {@link #buildUpon()}, {@link Builder#setEncoderDelay(int)} and {@link + * Builder#setEncoderPadding(int)}. + */ + @Deprecated public Format copyWithGaplessInfo(int encoderDelay, int encoderPadding) { - return new Format( - id, - label, - selectionFlags, - roleFlags, - bitrate, - codecs, - metadata, - containerMimeType, - sampleMimeType, - maxInputSize, - initializationData, - drmInitData, - subsampleOffsetUs, - width, - height, - frameRate, - rotationDegrees, - pixelWidthHeightRatio, - projectionData, - stereoMode, - colorInfo, - channelCount, - sampleRate, - pcmEncoding, - encoderDelay, - encoderPadding, - language, - accessibilityChannel, - exoMediaCryptoType); + return buildUpon().setEncoderDelay(encoderDelay).setEncoderPadding(encoderPadding).build(); } + /** + * @deprecated Use {@link #buildUpon()} and {@link Builder#setFrameRate(float)}. + */ + @Deprecated public Format copyWithFrameRate(float frameRate) { - return new Format( - id, - label, - selectionFlags, - roleFlags, - bitrate, - codecs, - metadata, - containerMimeType, - sampleMimeType, - maxInputSize, - initializationData, - drmInitData, - subsampleOffsetUs, - width, - height, - frameRate, - rotationDegrees, - pixelWidthHeightRatio, - projectionData, - stereoMode, - colorInfo, - channelCount, - sampleRate, - pcmEncoding, - encoderDelay, - encoderPadding, - language, - accessibilityChannel, - exoMediaCryptoType); + return buildUpon().setFrameRate(frameRate).build(); } + /** + * @deprecated Use {@link #buildUpon()} and {@link Builder#setDrmInitData(DrmInitData)}. + */ + @Deprecated public Format copyWithDrmInitData(@Nullable DrmInitData drmInitData) { - return copyWithAdjustments(drmInitData, metadata); + return buildUpon().setDrmInitData(drmInitData).build(); } + /** + * @deprecated Use {@link #buildUpon()} and {@link Builder#setMetadata(Metadata)}. + */ + @Deprecated public Format copyWithMetadata(@Nullable Metadata metadata) { - return copyWithAdjustments(drmInitData, metadata); - } - - @SuppressWarnings("ReferenceEquality") - public Format copyWithAdjustments( - @Nullable DrmInitData drmInitData, @Nullable Metadata metadata) { - if (drmInitData == this.drmInitData && metadata == this.metadata) { - return this; - } - return new Format( - id, - label, - selectionFlags, - roleFlags, - bitrate, - codecs, - metadata, - containerMimeType, - sampleMimeType, - maxInputSize, - initializationData, - drmInitData, - subsampleOffsetUs, - width, - height, - frameRate, - rotationDegrees, - pixelWidthHeightRatio, - projectionData, - stereoMode, - colorInfo, - channelCount, - sampleRate, - pcmEncoding, - encoderDelay, - encoderPadding, - language, - accessibilityChannel, - exoMediaCryptoType); - } - - public Format copyWithRotationDegrees(int rotationDegrees) { - return new Format( - id, - label, - selectionFlags, - roleFlags, - bitrate, - codecs, - metadata, - containerMimeType, - sampleMimeType, - maxInputSize, - initializationData, - drmInitData, - subsampleOffsetUs, - width, - height, - frameRate, - rotationDegrees, - pixelWidthHeightRatio, - projectionData, - stereoMode, - colorInfo, - channelCount, - sampleRate, - pcmEncoding, - encoderDelay, - encoderPadding, - language, - accessibilityChannel, - exoMediaCryptoType); + return buildUpon().setMetadata(metadata).build(); } + /** + * @deprecated Use {@link #buildUpon()} and {@link Builder#setAverageBitrate(int)} and {@link + * Builder#setPeakBitrate(int)}. + */ + @Deprecated public Format copyWithBitrate(int bitrate) { - return new Format( - id, - label, - selectionFlags, - roleFlags, - bitrate, - codecs, - metadata, - containerMimeType, - sampleMimeType, - maxInputSize, - initializationData, - drmInitData, - subsampleOffsetUs, - width, - height, - frameRate, - rotationDegrees, - pixelWidthHeightRatio, - projectionData, - stereoMode, - colorInfo, - channelCount, - sampleRate, - pcmEncoding, - encoderDelay, - encoderPadding, - language, - accessibilityChannel, - exoMediaCryptoType); + return buildUpon().setAverageBitrate(bitrate).setPeakBitrate(bitrate).build(); } + /** + * @deprecated Use {@link #buildUpon()}, {@link Builder#setWidth(int)} and {@link + * Builder#setHeight(int)}. + */ + @Deprecated public Format copyWithVideoSize(int width, int height) { - return new Format( - id, - label, - selectionFlags, - roleFlags, - bitrate, - codecs, - metadata, - containerMimeType, - sampleMimeType, - maxInputSize, - initializationData, - drmInitData, - subsampleOffsetUs, - width, - height, - frameRate, - rotationDegrees, - pixelWidthHeightRatio, - projectionData, - stereoMode, - colorInfo, - channelCount, - sampleRate, - pcmEncoding, - encoderDelay, - encoderPadding, - language, - accessibilityChannel, - exoMediaCryptoType); + return buildUpon().setWidth(width).setHeight(height).build(); } - public Format copyWithExoMediaCryptoType( - @Nullable Class exoMediaCryptoType) { - return new Format( - id, - label, - selectionFlags, - roleFlags, - bitrate, - codecs, - metadata, - containerMimeType, - sampleMimeType, - maxInputSize, - initializationData, - drmInitData, - subsampleOffsetUs, - width, - height, - frameRate, - rotationDegrees, - pixelWidthHeightRatio, - projectionData, - stereoMode, - colorInfo, - channelCount, - sampleRate, - pcmEncoding, - encoderDelay, - encoderPadding, - language, - accessibilityChannel, - exoMediaCryptoType); + /** Returns a copy of this format with the specified {@link #cryptoType}. */ + public Format copyWithCryptoType(@C.CryptoType int cryptoType) { + return buildUpon().setCryptoType(cryptoType).build(); } /** @@ -1545,14 +1274,16 @@ public int hashCode() { int result = 17; result = 31 * result + (id == null ? 0 : id.hashCode()); result = 31 * result + (label != null ? label.hashCode() : 0); + result = 31 * result + (language == null ? 0 : language.hashCode()); result = 31 * result + selectionFlags; result = 31 * result + roleFlags; - result = 31 * result + bitrate; + result = 31 * result + averageBitrate; + result = 31 * result + peakBitrate; result = 31 * result + (codecs == null ? 0 : codecs.hashCode()); result = 31 * result + (metadata == null ? 0 : metadata.hashCode()); // Container specific. result = 31 * result + (containerMimeType == null ? 0 : containerMimeType.hashCode()); - // Elementary stream specific. + // Sample specific. result = 31 * result + (sampleMimeType == null ? 0 : sampleMimeType.hashCode()); result = 31 * result + maxInputSize; // [Omitted] initializationData. @@ -1573,11 +1304,13 @@ public int hashCode() { result = 31 * result + pcmEncoding; result = 31 * result + encoderDelay; result = 31 * result + encoderPadding; - // Audio and text specific. - result = 31 * result + (language == null ? 0 : language.hashCode()); + // Text specific. result = 31 * result + accessibilityChannel; - // Provided by source. - result = 31 * result + (exoMediaCryptoType == null ? 0 : exoMediaCryptoType.hashCode()); + // Image specific. + result = 31 * result + tileCountHorizontal; + result = 31 * result + tileCountVertical; + // Provided by the source. + result = 31 * result + cryptoType; hashCode = result; } return hashCode; @@ -1598,7 +1331,8 @@ public boolean equals(@Nullable Object obj) { // Field equality checks ordered by type, with the cheapest checks first. return selectionFlags == other.selectionFlags && roleFlags == other.roleFlags - && bitrate == other.bitrate + && averageBitrate == other.averageBitrate + && peakBitrate == other.peakBitrate && maxInputSize == other.maxInputSize && subsampleOffsetUs == other.subsampleOffsetUs && width == other.width @@ -1611,9 +1345,11 @@ public boolean equals(@Nullable Object obj) { && encoderDelay == other.encoderDelay && encoderPadding == other.encoderPadding && accessibilityChannel == other.accessibilityChannel + && tileCountHorizontal == other.tileCountHorizontal + && tileCountVertical == other.tileCountVertical + && cryptoType == other.cryptoType && Float.compare(frameRate, other.frameRate) == 0 && Float.compare(pixelWidthHeightRatio, other.pixelWidthHeightRatio) == 0 - && Util.areEqual(exoMediaCryptoType, other.exoMediaCryptoType) && Util.areEqual(id, other.id) && Util.areEqual(label, other.label) && Util.areEqual(codecs, other.codecs) @@ -1656,22 +1392,44 @@ public static String toLogString(@Nullable Format format) { } StringBuilder builder = new StringBuilder(); builder.append("id=").append(format.id).append(", mimeType=").append(format.sampleMimeType); - if (format.bitrate != Format.NO_VALUE) { + if (format.bitrate != NO_VALUE) { builder.append(", bitrate=").append(format.bitrate); } if (format.codecs != null) { builder.append(", codecs=").append(format.codecs); } - if (format.width != Format.NO_VALUE && format.height != Format.NO_VALUE) { + if (format.drmInitData != null) { + Set schemes = new LinkedHashSet<>(); + for (int i = 0; i < format.drmInitData.schemeDataCount; i++) { + UUID schemeUuid = format.drmInitData.get(i).uuid; + if (schemeUuid.equals(C.COMMON_PSSH_UUID)) { + schemes.add("cenc"); + } else if (schemeUuid.equals(C.CLEARKEY_UUID)) { + schemes.add("clearkey"); + } else if (schemeUuid.equals(C.PLAYREADY_UUID)) { + schemes.add("playready"); + } else if (schemeUuid.equals(C.WIDEVINE_UUID)) { + schemes.add("widevine"); + } else if (schemeUuid.equals(C.UUID_NIL)) { + schemes.add("universal"); + } else { + schemes.add("unknown (" + schemeUuid + ")"); + } + } + builder.append(", drm=["); + Joiner.on(',').appendTo(builder, schemes); + builder.append(']'); + } + if (format.width != NO_VALUE && format.height != NO_VALUE) { builder.append(", res=").append(format.width).append("x").append(format.height); } - if (format.frameRate != Format.NO_VALUE) { + if (format.frameRate != NO_VALUE) { builder.append(", fps=").append(format.frameRate); } - if (format.channelCount != Format.NO_VALUE) { + if (format.channelCount != NO_VALUE) { builder.append(", channels=").append(format.channelCount); } - if (format.sampleRate != Format.NO_VALUE) { + if (format.sampleRate != NO_VALUE) { builder.append(", sample_rate=").append(format.sampleRate); } if (format.language != null) { @@ -1680,71 +1438,259 @@ public static String toLogString(@Nullable Format format) { if (format.label != null) { builder.append(", label=").append(format.label); } + if (format.selectionFlags != 0) { + List selectionFlags = new ArrayList<>(); + // LINT.IfChange(selection_flags) + if ((format.selectionFlags & C.SELECTION_FLAG_AUTOSELECT) != 0) { + selectionFlags.add("auto"); + } + if ((format.selectionFlags & C.SELECTION_FLAG_DEFAULT) != 0) { + selectionFlags.add("default"); + } + if ((format.selectionFlags & C.SELECTION_FLAG_FORCED) != 0) { + selectionFlags.add("forced"); + } + builder.append(", selectionFlags=["); + Joiner.on(',').appendTo(builder, selectionFlags); + builder.append("]"); + } + if (format.roleFlags != 0) { + // LINT.IfChange(role_flags) + List roleFlags = new ArrayList<>(); + if ((format.roleFlags & C.ROLE_FLAG_MAIN) != 0) { + roleFlags.add("main"); + } + if ((format.roleFlags & C.ROLE_FLAG_ALTERNATE) != 0) { + roleFlags.add("alt"); + } + if ((format.roleFlags & C.ROLE_FLAG_SUPPLEMENTARY) != 0) { + roleFlags.add("supplementary"); + } + if ((format.roleFlags & C.ROLE_FLAG_COMMENTARY) != 0) { + roleFlags.add("commentary"); + } + if ((format.roleFlags & C.ROLE_FLAG_DUB) != 0) { + roleFlags.add("dub"); + } + if ((format.roleFlags & C.ROLE_FLAG_EMERGENCY) != 0) { + roleFlags.add("emergency"); + } + if ((format.roleFlags & C.ROLE_FLAG_CAPTION) != 0) { + roleFlags.add("caption"); + } + if ((format.roleFlags & C.ROLE_FLAG_SUBTITLE) != 0) { + roleFlags.add("subtitle"); + } + if ((format.roleFlags & C.ROLE_FLAG_SIGN) != 0) { + roleFlags.add("sign"); + } + if ((format.roleFlags & C.ROLE_FLAG_DESCRIBES_VIDEO) != 0) { + roleFlags.add("describes-video"); + } + if ((format.roleFlags & C.ROLE_FLAG_DESCRIBES_MUSIC_AND_SOUND) != 0) { + roleFlags.add("describes-music"); + } + if ((format.roleFlags & C.ROLE_FLAG_ENHANCED_DIALOG_INTELLIGIBILITY) != 0) { + roleFlags.add("enhanced-intelligibility"); + } + if ((format.roleFlags & C.ROLE_FLAG_TRANSCRIBES_DIALOG) != 0) { + roleFlags.add("transcribes-dialog"); + } + if ((format.roleFlags & C.ROLE_FLAG_EASY_TO_READ) != 0) { + roleFlags.add("easy-read"); + } + if ((format.roleFlags & C.ROLE_FLAG_TRICK_PLAY) != 0) { + roleFlags.add("trick-play"); + } + builder.append(", roleFlags=["); + Joiner.on(',').appendTo(builder, roleFlags); + builder.append("]"); + } return builder.toString(); } - // Parcelable implementation. + // Bundleable implementation. + + private static final String FIELD_ID = Util.intToStringMaxRadix(0); + private static final String FIELD_LABEL = Util.intToStringMaxRadix(1); + private static final String FIELD_LANGUAGE = Util.intToStringMaxRadix(2); + private static final String FIELD_SELECTION_FLAGS = Util.intToStringMaxRadix(3); + private static final String FIELD_ROLE_FLAGS = Util.intToStringMaxRadix(4); + private static final String FIELD_AVERAGE_BITRATE = Util.intToStringMaxRadix(5); + private static final String FIELD_PEAK_BITRATE = Util.intToStringMaxRadix(6); + private static final String FIELD_CODECS = Util.intToStringMaxRadix(7); + private static final String FIELD_METADATA = Util.intToStringMaxRadix(8); + private static final String FIELD_CONTAINER_MIME_TYPE = Util.intToStringMaxRadix(9); + private static final String FIELD_SAMPLE_MIME_TYPE = Util.intToStringMaxRadix(10); + private static final String FIELD_MAX_INPUT_SIZE = Util.intToStringMaxRadix(11); + private static final String FIELD_INITIALIZATION_DATA = Util.intToStringMaxRadix(12); + private static final String FIELD_DRM_INIT_DATA = Util.intToStringMaxRadix(13); + private static final String FIELD_SUBSAMPLE_OFFSET_US = Util.intToStringMaxRadix(14); + private static final String FIELD_WIDTH = Util.intToStringMaxRadix(15); + private static final String FIELD_HEIGHT = Util.intToStringMaxRadix(16); + private static final String FIELD_FRAME_RATE = Util.intToStringMaxRadix(17); + private static final String FIELD_ROTATION_DEGREES = Util.intToStringMaxRadix(18); + private static final String FIELD_PIXEL_WIDTH_HEIGHT_RATIO = Util.intToStringMaxRadix(19); + private static final String FIELD_PROJECTION_DATA = Util.intToStringMaxRadix(20); + private static final String FIELD_STEREO_MODE = Util.intToStringMaxRadix(21); + private static final String FIELD_COLOR_INFO = Util.intToStringMaxRadix(22); + private static final String FIELD_CHANNEL_COUNT = Util.intToStringMaxRadix(23); + private static final String FIELD_SAMPLE_RATE = Util.intToStringMaxRadix(24); + private static final String FIELD_PCM_ENCODING = Util.intToStringMaxRadix(25); + private static final String FIELD_ENCODER_DELAY = Util.intToStringMaxRadix(26); + private static final String FIELD_ENCODER_PADDING = Util.intToStringMaxRadix(27); + private static final String FIELD_ACCESSIBILITY_CHANNEL = Util.intToStringMaxRadix(28); + private static final String FIELD_CRYPTO_TYPE = Util.intToStringMaxRadix(29); + private static final String FIELD_TILE_COUNT_HORIZONTAL = Util.intToStringMaxRadix(30); + private static final String FIELD_TILE_COUNT_VERTICAL = Util.intToStringMaxRadix(31); @Override - public int describeContents() { - return 0; + public Bundle toBundle() { + return toBundle(/* excludeMetadata= */ false); } - @Override - public void writeToParcel(Parcel dest, int flags) { - dest.writeString(id); - dest.writeString(label); - dest.writeInt(selectionFlags); - dest.writeInt(roleFlags); - dest.writeInt(bitrate); - dest.writeString(codecs); - dest.writeParcelable(metadata, 0); + /** + * Returns a {@link Bundle} representing the information stored in this object. If {@code + * excludeMetadata} is true, {@linkplain Format#metadata metadata} is excluded. + */ + public Bundle toBundle(boolean excludeMetadata) { + Bundle bundle = new Bundle(); + bundle.putString(FIELD_ID, id); + bundle.putString(FIELD_LABEL, label); + bundle.putString(FIELD_LANGUAGE, language); + bundle.putInt(FIELD_SELECTION_FLAGS, selectionFlags); + bundle.putInt(FIELD_ROLE_FLAGS, roleFlags); + bundle.putInt(FIELD_AVERAGE_BITRATE, averageBitrate); + bundle.putInt(FIELD_PEAK_BITRATE, peakBitrate); + bundle.putString(FIELD_CODECS, codecs); + if (!excludeMetadata) { + // TODO (internal ref: b/239701618) + bundle.putParcelable(FIELD_METADATA, metadata); + } // Container specific. - dest.writeString(containerMimeType); - // Elementary stream specific. - dest.writeString(sampleMimeType); - dest.writeInt(maxInputSize); - int initializationDataSize = initializationData.size(); - dest.writeInt(initializationDataSize); - for (int i = 0; i < initializationDataSize; i++) { - dest.writeByteArray(initializationData.get(i)); - } - dest.writeParcelable(drmInitData, 0); - dest.writeLong(subsampleOffsetUs); + bundle.putString(FIELD_CONTAINER_MIME_TYPE, containerMimeType); + // Sample specific. + bundle.putString(FIELD_SAMPLE_MIME_TYPE, sampleMimeType); + bundle.putInt(FIELD_MAX_INPUT_SIZE, maxInputSize); + for (int i = 0; i < initializationData.size(); i++) { + bundle.putByteArray(keyForInitializationData(i), initializationData.get(i)); + } + // DrmInitData doesn't need to be Bundleable as it's only used in the playing process to + // initialize the decoder. + bundle.putParcelable(FIELD_DRM_INIT_DATA, drmInitData); + bundle.putLong(FIELD_SUBSAMPLE_OFFSET_US, subsampleOffsetUs); // Video specific. - dest.writeInt(width); - dest.writeInt(height); - dest.writeFloat(frameRate); - dest.writeInt(rotationDegrees); - dest.writeFloat(pixelWidthHeightRatio); - Util.writeBoolean(dest, projectionData != null); - if (projectionData != null) { - dest.writeByteArray(projectionData); - } - dest.writeInt(stereoMode); - dest.writeParcelable(colorInfo, flags); + bundle.putInt(FIELD_WIDTH, width); + bundle.putInt(FIELD_HEIGHT, height); + bundle.putFloat(FIELD_FRAME_RATE, frameRate); + bundle.putInt(FIELD_ROTATION_DEGREES, rotationDegrees); + bundle.putFloat(FIELD_PIXEL_WIDTH_HEIGHT_RATIO, pixelWidthHeightRatio); + bundle.putByteArray(FIELD_PROJECTION_DATA, projectionData); + bundle.putInt(FIELD_STEREO_MODE, stereoMode); + if (colorInfo != null) { + bundle.putBundle(FIELD_COLOR_INFO, colorInfo.toBundle()); + } // Audio specific. - dest.writeInt(channelCount); - dest.writeInt(sampleRate); - dest.writeInt(pcmEncoding); - dest.writeInt(encoderDelay); - dest.writeInt(encoderPadding); - // Audio and text specific. - dest.writeString(language); - dest.writeInt(accessibilityChannel); + bundle.putInt(FIELD_CHANNEL_COUNT, channelCount); + bundle.putInt(FIELD_SAMPLE_RATE, sampleRate); + bundle.putInt(FIELD_PCM_ENCODING, pcmEncoding); + bundle.putInt(FIELD_ENCODER_DELAY, encoderDelay); + bundle.putInt(FIELD_ENCODER_PADDING, encoderPadding); + // Text specific. + bundle.putInt(FIELD_ACCESSIBILITY_CHANNEL, accessibilityChannel); + // Image specific. + bundle.putInt(FIELD_TILE_COUNT_HORIZONTAL, tileCountHorizontal); + bundle.putInt(FIELD_TILE_COUNT_VERTICAL, tileCountVertical); + // Source specific. + bundle.putInt(FIELD_CRYPTO_TYPE, cryptoType); + return bundle; } - public static final Creator CREATOR = new Creator() { - - @Override - public Format createFromParcel(Parcel in) { - return new Format(in); + /** Object that can restore {@code Format} from a {@link Bundle}. */ + public static final Creator CREATOR = Format::fromBundle; + + private static Format fromBundle(Bundle bundle) { + Builder builder = new Builder(); + BundleableUtil.ensureClassLoader(bundle); + builder + .setId(defaultIfNull(bundle.getString(FIELD_ID), DEFAULT.id)) + .setLabel(defaultIfNull(bundle.getString(FIELD_LABEL), DEFAULT.label)) + .setLanguage(defaultIfNull(bundle.getString(FIELD_LANGUAGE), DEFAULT.language)) + .setSelectionFlags(bundle.getInt(FIELD_SELECTION_FLAGS, DEFAULT.selectionFlags)) + .setRoleFlags(bundle.getInt(FIELD_ROLE_FLAGS, DEFAULT.roleFlags)) + .setAverageBitrate(bundle.getInt(FIELD_AVERAGE_BITRATE, DEFAULT.averageBitrate)) + .setPeakBitrate(bundle.getInt(FIELD_PEAK_BITRATE, DEFAULT.peakBitrate)) + .setCodecs(defaultIfNull(bundle.getString(FIELD_CODECS), DEFAULT.codecs)) + .setMetadata(defaultIfNull(bundle.getParcelable(FIELD_METADATA), DEFAULT.metadata)) + // Container specific. + .setContainerMimeType( + defaultIfNull(bundle.getString(FIELD_CONTAINER_MIME_TYPE), DEFAULT.containerMimeType)) + // Sample specific. + .setSampleMimeType( + defaultIfNull(bundle.getString(FIELD_SAMPLE_MIME_TYPE), DEFAULT.sampleMimeType)) + .setMaxInputSize(bundle.getInt(FIELD_MAX_INPUT_SIZE, DEFAULT.maxInputSize)); + + List initializationData = new ArrayList<>(); + for (int i = 0; ; i++) { + @Nullable byte[] data = bundle.getByteArray(keyForInitializationData(i)); + if (data == null) { + break; + } + initializationData.add(data); } - - @Override - public Format[] newArray(int size) { - return new Format[size]; + builder + .setInitializationData(initializationData) + .setDrmInitData(bundle.getParcelable(FIELD_DRM_INIT_DATA)) + .setSubsampleOffsetUs(bundle.getLong(FIELD_SUBSAMPLE_OFFSET_US, DEFAULT.subsampleOffsetUs)) + // Video specific. + .setWidth(bundle.getInt(FIELD_WIDTH, DEFAULT.width)) + .setHeight(bundle.getInt(FIELD_HEIGHT, DEFAULT.height)) + .setFrameRate(bundle.getFloat(FIELD_FRAME_RATE, DEFAULT.frameRate)) + .setRotationDegrees(bundle.getInt(FIELD_ROTATION_DEGREES, DEFAULT.rotationDegrees)) + .setPixelWidthHeightRatio( + bundle.getFloat(FIELD_PIXEL_WIDTH_HEIGHT_RATIO, DEFAULT.pixelWidthHeightRatio)) + .setProjectionData(bundle.getByteArray(FIELD_PROJECTION_DATA)) + .setStereoMode(bundle.getInt(FIELD_STEREO_MODE, DEFAULT.stereoMode)); + Bundle colorInfoBundle = bundle.getBundle(FIELD_COLOR_INFO); + if (colorInfoBundle != null) { + builder.setColorInfo(ColorInfo.CREATOR.fromBundle(colorInfoBundle)); } + // Audio specific. + builder + .setChannelCount(bundle.getInt(FIELD_CHANNEL_COUNT, DEFAULT.channelCount)) + .setSampleRate(bundle.getInt(FIELD_SAMPLE_RATE, DEFAULT.sampleRate)) + .setPcmEncoding(bundle.getInt(FIELD_PCM_ENCODING, DEFAULT.pcmEncoding)) + .setEncoderDelay(bundle.getInt(FIELD_ENCODER_DELAY, DEFAULT.encoderDelay)) + .setEncoderPadding(bundle.getInt(FIELD_ENCODER_PADDING, DEFAULT.encoderPadding)) + // Text specific. + .setAccessibilityChannel( + bundle.getInt(FIELD_ACCESSIBILITY_CHANNEL, DEFAULT.accessibilityChannel)) + // Image specific. + .setTileCountHorizontal( + bundle.getInt(FIELD_TILE_COUNT_HORIZONTAL, DEFAULT.tileCountHorizontal)) + .setTileCountVertical(bundle.getInt(FIELD_TILE_COUNT_VERTICAL, DEFAULT.tileCountVertical)) + // Source specific. + .setCryptoType(bundle.getInt(FIELD_CRYPTO_TYPE, DEFAULT.cryptoType)); + + return builder.build(); + } + + private static String keyForInitializationData(int initialisationDataIndex) { + return FIELD_INITIALIZATION_DATA + + "_" + + Integer.toString(initialisationDataIndex, Character.MAX_RADIX); + } - }; + /** + * Utility method to get {@code defaultValue} if {@code value} is {@code null}. {@code + * defaultValue} can be {@code null}. + * + *

      Note: Current implementations of getters in {@link Bundle}, for example {@link + * Bundle#getString(String, String)} does not allow the defaultValue to be {@code null}, hence the + * need for this method. + */ + @Nullable + private static T defaultIfNull(@Nullable T value, @Nullable T defaultValue) { + return value != null ? value : defaultValue; + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/FormatHolder.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/FormatHolder.java index 7d21182de2..67f3cf6b44 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/FormatHolder.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/FormatHolder.java @@ -18,25 +18,17 @@ import androidx.annotation.Nullable; import com.google.android.exoplayer2.drm.DrmSession; -/** - * Holds a {@link Format}. - */ +/** Holds a {@link Format}. */ public final class FormatHolder { - /** Whether the {@link #format} setter also sets the {@link #drmSession} field. */ - // TODO: Remove once all Renderers and MediaSources have migrated to the new DRM model [Internal - // ref: b/129764794]. - public boolean includesDrmSession; - /** An accompanying context for decrypting samples in the format. */ - @Nullable public DrmSession drmSession; + @Nullable public DrmSession drmSession; /** The held {@link Format}. */ @Nullable public Format format; /** Clears the holder. */ public void clear() { - includesDrmSession = false; drmSession = null; format = null; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ForwardingPlayer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ForwardingPlayer.java new file mode 100644 index 0000000000..887ca7b082 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ForwardingPlayer.java @@ -0,0 +1,1109 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2; + +import android.os.Looper; +import android.view.Surface; +import android.view.SurfaceHolder; +import android.view.SurfaceView; +import android.view.TextureView; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.audio.AudioAttributes; +import com.google.android.exoplayer2.metadata.Metadata; +import com.google.android.exoplayer2.text.Cue; +import com.google.android.exoplayer2.text.CueGroup; +import com.google.android.exoplayer2.trackselection.TrackSelectionParameters; +import com.google.android.exoplayer2.util.Size; +import com.google.android.exoplayer2.video.VideoSize; +import java.util.List; + +/** + * A {@link Player} that forwards operations to another {@link Player}. Applications can use this + * class to suppress or modify specific operations, by overriding the respective methods. + */ +public class ForwardingPlayer implements Player { + + private final Player player; + + /** Creates a new instance that forwards all operations to {@code player}. */ + public ForwardingPlayer(Player player) { + this.player = player; + } + + /** Calls {@link Player#getApplicationLooper()} on the delegate and returns the result. */ + @Override + public Looper getApplicationLooper() { + return player.getApplicationLooper(); + } + + /** Calls {@link Player#addListener(Listener)} on the delegate. */ + @Override + public void addListener(Listener listener) { + player.addListener(new ForwardingListener(this, listener)); + } + + /** Calls {@link Player#removeListener(Listener)} on the delegate. */ + @Override + public void removeListener(Listener listener) { + player.removeListener(new ForwardingListener(this, listener)); + } + + /** Calls {@link Player#setMediaItems(List)} on the delegate. */ + @Override + public void setMediaItems(List mediaItems) { + player.setMediaItems(mediaItems); + } + + /** Calls {@link Player#setMediaItems(List, boolean)} ()} on the delegate. */ + @Override + public void setMediaItems(List mediaItems, boolean resetPosition) { + player.setMediaItems(mediaItems, resetPosition); + } + + /** Calls {@link Player#setMediaItems(List, int, long)} on the delegate. */ + @Override + public void setMediaItems(List mediaItems, int startIndex, long startPositionMs) { + player.setMediaItems(mediaItems, startIndex, startPositionMs); + } + + /** Calls {@link Player#setMediaItem(MediaItem)} on the delegate. */ + @Override + public void setMediaItem(MediaItem mediaItem) { + player.setMediaItem(mediaItem); + } + + /** Calls {@link Player#setMediaItem(MediaItem, long)} on the delegate. */ + @Override + public void setMediaItem(MediaItem mediaItem, long startPositionMs) { + player.setMediaItem(mediaItem, startPositionMs); + } + + /** Calls {@link Player#setMediaItem(MediaItem, boolean)} on the delegate. */ + @Override + public void setMediaItem(MediaItem mediaItem, boolean resetPosition) { + player.setMediaItem(mediaItem, resetPosition); + } + + /** Calls {@link Player#addMediaItem(MediaItem)} on the delegate. */ + @Override + public void addMediaItem(MediaItem mediaItem) { + player.addMediaItem(mediaItem); + } + + /** Calls {@link Player#addMediaItem(int, MediaItem)} on the delegate. */ + @Override + public void addMediaItem(int index, MediaItem mediaItem) { + player.addMediaItem(index, mediaItem); + } + + /** Calls {@link Player#addMediaItems(List)} on the delegate. */ + @Override + public void addMediaItems(List mediaItems) { + player.addMediaItems(mediaItems); + } + + /** Calls {@link Player#addMediaItems(int, List)} on the delegate. */ + @Override + public void addMediaItems(int index, List mediaItems) { + player.addMediaItems(index, mediaItems); + } + + /** Calls {@link Player#moveMediaItem(int, int)} on the delegate. */ + @Override + public void moveMediaItem(int currentIndex, int newIndex) { + player.moveMediaItem(currentIndex, newIndex); + } + + /** Calls {@link Player#moveMediaItems(int, int, int)} on the delegate. */ + @Override + public void moveMediaItems(int fromIndex, int toIndex, int newIndex) { + player.moveMediaItems(fromIndex, toIndex, newIndex); + } + + /** Calls {@link Player#removeMediaItem(int)} on the delegate. */ + @Override + public void removeMediaItem(int index) { + player.removeMediaItem(index); + } + + /** Calls {@link Player#removeMediaItems(int, int)} on the delegate. */ + @Override + public void removeMediaItems(int fromIndex, int toIndex) { + player.removeMediaItems(fromIndex, toIndex); + } + + /** Calls {@link Player#clearMediaItems()} on the delegate. */ + @Override + public void clearMediaItems() { + player.clearMediaItems(); + } + + /** Calls {@link Player#isCommandAvailable(int)} on the delegate and returns the result. */ + @Override + public boolean isCommandAvailable(@Command int command) { + return player.isCommandAvailable(command); + } + + /** Calls {@link Player#canAdvertiseSession()} on the delegate and returns the result. */ + @Override + public boolean canAdvertiseSession() { + return player.canAdvertiseSession(); + } + + /** Calls {@link Player#getAvailableCommands()} on the delegate and returns the result. */ + @Override + public Commands getAvailableCommands() { + return player.getAvailableCommands(); + } + + /** Calls {@link Player#prepare()} on the delegate. */ + @Override + public void prepare() { + player.prepare(); + } + + /** Calls {@link Player#getPlaybackState()} on the delegate and returns the result. */ + @Override + public int getPlaybackState() { + return player.getPlaybackState(); + } + + /** Calls {@link Player#getPlaybackSuppressionReason()} on the delegate and returns the result. */ + @Override + public int getPlaybackSuppressionReason() { + return player.getPlaybackSuppressionReason(); + } + + /** Calls {@link Player#isPlaying()} on the delegate and returns the result. */ + @Override + public boolean isPlaying() { + return player.isPlaying(); + } + + /** Calls {@link Player#getPlayerError()} on the delegate and returns the result. */ + @Nullable + @Override + public PlaybackException getPlayerError() { + return player.getPlayerError(); + } + + /** Calls {@link Player#play()} on the delegate. */ + @Override + public void play() { + player.play(); + } + + /** Calls {@link Player#pause()} on the delegate. */ + @Override + public void pause() { + player.pause(); + } + + /** Calls {@link Player#setPlayWhenReady(boolean)} on the delegate. */ + @Override + public void setPlayWhenReady(boolean playWhenReady) { + player.setPlayWhenReady(playWhenReady); + } + + /** Calls {@link Player#getPlayWhenReady()} on the delegate and returns the result. */ + @Override + public boolean getPlayWhenReady() { + return player.getPlayWhenReady(); + } + + /** Calls {@link Player#setRepeatMode(int)} on the delegate. */ + @Override + public void setRepeatMode(@RepeatMode int repeatMode) { + player.setRepeatMode(repeatMode); + } + + /** Calls {@link Player#getRepeatMode()} on the delegate and returns the result. */ + @Override + public int getRepeatMode() { + return player.getRepeatMode(); + } + + /** Calls {@link Player#setShuffleModeEnabled(boolean)} on the delegate. */ + @Override + public void setShuffleModeEnabled(boolean shuffleModeEnabled) { + player.setShuffleModeEnabled(shuffleModeEnabled); + } + + /** Calls {@link Player#getShuffleModeEnabled()} on the delegate and returns the result. */ + @Override + public boolean getShuffleModeEnabled() { + return player.getShuffleModeEnabled(); + } + + /** Calls {@link Player#isLoading()} on the delegate and returns the result. */ + @Override + public boolean isLoading() { + return player.isLoading(); + } + + /** Calls {@link Player#seekToDefaultPosition()} on the delegate. */ + @Override + public void seekToDefaultPosition() { + player.seekToDefaultPosition(); + } + + /** Calls {@link Player#seekToDefaultPosition(int)} on the delegate. */ + @Override + public void seekToDefaultPosition(int mediaItemIndex) { + player.seekToDefaultPosition(mediaItemIndex); + } + + /** Calls {@link Player#seekTo(long)} on the delegate. */ + @Override + public void seekTo(long positionMs) { + player.seekTo(positionMs); + } + + /** Calls {@link Player#seekTo(int, long)} on the delegate. */ + @Override + public void seekTo(int mediaItemIndex, long positionMs) { + player.seekTo(mediaItemIndex, positionMs); + } + + /** Calls {@link Player#getSeekBackIncrement()} on the delegate and returns the result. */ + @Override + public long getSeekBackIncrement() { + return player.getSeekBackIncrement(); + } + + /** Calls {@link Player#seekBack()} on the delegate. */ + @Override + public void seekBack() { + player.seekBack(); + } + + /** Calls {@link Player#getSeekForwardIncrement()} on the delegate and returns the result. */ + @Override + public long getSeekForwardIncrement() { + return player.getSeekForwardIncrement(); + } + + /** Calls {@link Player#seekForward()} on the delegate. */ + @Override + public void seekForward() { + player.seekForward(); + } + + /** + * Calls {@link Player#hasPrevious()} on the delegate and returns the result. + * + * @deprecated Use {@link #hasPreviousMediaItem()} instead. + */ + @SuppressWarnings("deprecation") // Forwarding to deprecated method + @Deprecated + @Override + public boolean hasPrevious() { + return player.hasPrevious(); + } + + /** + * Calls {@link Player#hasPreviousWindow()} on the delegate and returns the result. + * + * @deprecated Use {@link #hasPreviousMediaItem()} instead. + */ + @SuppressWarnings("deprecation") // Forwarding to deprecated method + @Deprecated + @Override + public boolean hasPreviousWindow() { + return player.hasPreviousWindow(); + } + + /** Calls {@link Player#hasPreviousMediaItem()} on the delegate and returns the result. */ + @Override + public boolean hasPreviousMediaItem() { + return player.hasPreviousMediaItem(); + } + + /** + * Calls {@link Player#previous()} on the delegate. + * + * @deprecated Use {@link #seekToPreviousMediaItem()} instead. + */ + @SuppressWarnings("deprecation") // Forwarding to deprecated method + @Deprecated + @Override + public void previous() { + player.previous(); + } + + /** + * Calls {@link Player#seekToPreviousWindow()} on the delegate. + * + * @deprecated Use {@link #seekToPreviousMediaItem()} instead. + */ + @SuppressWarnings("deprecation") // Forwarding to deprecated method + @Deprecated + @Override + public void seekToPreviousWindow() { + player.seekToPreviousWindow(); + } + + /** Calls {@link Player#seekToPreviousMediaItem()} on the delegate. */ + @Override + public void seekToPreviousMediaItem() { + player.seekToPreviousMediaItem(); + } + + /** Calls {@link Player#seekToPrevious()} on the delegate. */ + @Override + public void seekToPrevious() { + player.seekToPrevious(); + } + + /** Calls {@link Player#getMaxSeekToPreviousPosition()} on the delegate and returns the result. */ + @Override + public long getMaxSeekToPreviousPosition() { + return player.getMaxSeekToPreviousPosition(); + } + + /** + * Calls {@link Player#hasNext()} on the delegate and returns the result. + * + * @deprecated Use {@link #hasNextMediaItem()} instead. + */ + @SuppressWarnings("deprecation") // Forwarding to deprecated method + @Deprecated + @Override + public boolean hasNext() { + return player.hasNext(); + } + + /** + * Calls {@link Player#hasNextWindow()} on the delegate and returns the result. + * + * @deprecated Use {@link #hasNextMediaItem()} instead. + */ + @SuppressWarnings("deprecation") // Forwarding to deprecated method + @Deprecated + @Override + public boolean hasNextWindow() { + return player.hasNextWindow(); + } + + /** Calls {@link Player#hasNextMediaItem()} on the delegate and returns the result. */ + @Override + public boolean hasNextMediaItem() { + return player.hasNextMediaItem(); + } + + /** + * Calls {@link Player#next()} on the delegate. + * + * @deprecated Use {@link #seekToNextMediaItem()} instead. + */ + @SuppressWarnings("deprecation") // Forwarding to deprecated method + @Deprecated + @Override + public void next() { + player.next(); + } + + /** + * Calls {@link Player#seekToNextWindow()} on the delegate. + * + * @deprecated Use {@link #seekToNextMediaItem()} instead. + */ + @SuppressWarnings("deprecation") // Forwarding to deprecated method + @Deprecated + @Override + public void seekToNextWindow() { + player.seekToNextWindow(); + } + + /** Calls {@link Player#seekToNextMediaItem()} on the delegate. */ + @Override + public void seekToNextMediaItem() { + player.seekToNextMediaItem(); + } + + /** Calls {@link Player#seekToNext()} on the delegate. */ + @Override + public void seekToNext() { + player.seekToNext(); + } + + /** Calls {@link Player#setPlaybackParameters(PlaybackParameters)} on the delegate. */ + @Override + public void setPlaybackParameters(PlaybackParameters playbackParameters) { + player.setPlaybackParameters(playbackParameters); + } + + /** Calls {@link Player#setPlaybackSpeed(float)} on the delegate. */ + @Override + public void setPlaybackSpeed(float speed) { + player.setPlaybackSpeed(speed); + } + + /** Calls {@link Player#getPlaybackParameters()} on the delegate and returns the result. */ + @Override + public PlaybackParameters getPlaybackParameters() { + return player.getPlaybackParameters(); + } + + /** Calls {@link Player#stop()} on the delegate. */ + @Override + public void stop() { + player.stop(); + } + + /** + * Calls {@link Player#stop(boolean)} on the delegate. + * + * @deprecated Use {@link #stop()} and {@link #clearMediaItems()} (if {@code reset} is true) or + * just {@link #stop()} (if {@code reset} is false). Any player error will be cleared when + * {@link #prepare() re-preparing} the player. + */ + @SuppressWarnings("deprecation") // Forwarding to deprecated method + @Deprecated + @Override + public void stop(boolean reset) { + player.stop(reset); + } + + /** Calls {@link Player#release()} on the delegate. */ + @Override + public void release() { + player.release(); + } + + /** Calls {@link Player#getCurrentTracks()} on the delegate and returns the result. */ + @Override + public Tracks getCurrentTracks() { + return player.getCurrentTracks(); + } + + /** Calls {@link Player#getTrackSelectionParameters()} on the delegate and returns the result. */ + @Override + public TrackSelectionParameters getTrackSelectionParameters() { + return player.getTrackSelectionParameters(); + } + + /** Calls {@link Player#setTrackSelectionParameters(TrackSelectionParameters)} on the delegate. */ + @Override + public void setTrackSelectionParameters(TrackSelectionParameters parameters) { + player.setTrackSelectionParameters(parameters); + } + + /** Calls {@link Player#getMediaMetadata()} on the delegate and returns the result. */ + @Override + public MediaMetadata getMediaMetadata() { + return player.getMediaMetadata(); + } + + /** Calls {@link Player#getPlaylistMetadata()} on the delegate and returns the result. */ + @Override + public MediaMetadata getPlaylistMetadata() { + return player.getPlaylistMetadata(); + } + + /** Calls {@link Player#setPlaylistMetadata(MediaMetadata)} on the delegate. */ + @Override + public void setPlaylistMetadata(MediaMetadata mediaMetadata) { + player.setPlaylistMetadata(mediaMetadata); + } + + /** Calls {@link Player#getCurrentManifest()} on the delegate and returns the result. */ + @Nullable + @Override + public Object getCurrentManifest() { + return player.getCurrentManifest(); + } + + /** Calls {@link Player#getCurrentTimeline()} on the delegate and returns the result. */ + @Override + public Timeline getCurrentTimeline() { + return player.getCurrentTimeline(); + } + + /** Calls {@link Player#getCurrentPeriodIndex()} on the delegate and returns the result. */ + @Override + public int getCurrentPeriodIndex() { + return player.getCurrentPeriodIndex(); + } + + /** + * Calls {@link Player#getCurrentWindowIndex()} on the delegate and returns the result. + * + * @deprecated Use {@link #getCurrentMediaItemIndex()} instead. + */ + @SuppressWarnings("deprecation") // Forwarding to deprecated method + @Deprecated + @Override + public int getCurrentWindowIndex() { + return player.getCurrentWindowIndex(); + } + + /** Calls {@link Player#getCurrentMediaItemIndex()} on the delegate and returns the result. */ + @Override + public int getCurrentMediaItemIndex() { + return player.getCurrentMediaItemIndex(); + } + + /** + * Calls {@link Player#getNextWindowIndex()} on the delegate and returns the result. + * + * @deprecated Use {@link #getNextMediaItemIndex()} instead. + */ + @SuppressWarnings("deprecation") // Forwarding to deprecated method + @Deprecated + @Override + public int getNextWindowIndex() { + return player.getNextWindowIndex(); + } + + /** Calls {@link Player#getNextMediaItemIndex()} on the delegate and returns the result. */ + @Override + public int getNextMediaItemIndex() { + return player.getNextMediaItemIndex(); + } + + /** + * Calls {@link Player#getPreviousWindowIndex()} on the delegate and returns the result. + * + * @deprecated Use {@link #getPreviousMediaItemIndex()} instead. + */ + @SuppressWarnings("deprecation") // Forwarding to deprecated method + @Deprecated + @Override + public int getPreviousWindowIndex() { + return player.getPreviousWindowIndex(); + } + + /** Calls {@link Player#getPreviousMediaItemIndex()} on the delegate and returns the result. */ + @Override + public int getPreviousMediaItemIndex() { + return player.getPreviousMediaItemIndex(); + } + + /** Calls {@link Player#getCurrentMediaItem()} on the delegate and returns the result. */ + @Nullable + @Override + public MediaItem getCurrentMediaItem() { + return player.getCurrentMediaItem(); + } + + /** Calls {@link Player#getMediaItemCount()} on the delegate and returns the result. */ + @Override + public int getMediaItemCount() { + return player.getMediaItemCount(); + } + + /** Calls {@link Player#getMediaItemAt(int)} on the delegate and returns the result. */ + @Override + public MediaItem getMediaItemAt(int index) { + return player.getMediaItemAt(index); + } + + /** Calls {@link Player#getDuration()} on the delegate and returns the result. */ + @Override + public long getDuration() { + return player.getDuration(); + } + + /** Calls {@link Player#getCurrentPosition()} on the delegate and returns the result. */ + @Override + public long getCurrentPosition() { + return player.getCurrentPosition(); + } + + /** Calls {@link Player#getBufferedPosition()} on the delegate and returns the result. */ + @Override + public long getBufferedPosition() { + return player.getBufferedPosition(); + } + + /** Calls {@link Player#getBufferedPercentage()} on the delegate and returns the result. */ + @Override + public int getBufferedPercentage() { + return player.getBufferedPercentage(); + } + + /** Calls {@link Player#getTotalBufferedDuration()} on the delegate and returns the result. */ + @Override + public long getTotalBufferedDuration() { + return player.getTotalBufferedDuration(); + } + + /** + * Calls {@link Player#isCurrentWindowDynamic()} on the delegate and returns the result. + * + * @deprecated Use {@link #isCurrentMediaItemDynamic()} instead. + */ + @SuppressWarnings("deprecation") // Forwarding to deprecated method + @Deprecated + @Override + public boolean isCurrentWindowDynamic() { + return player.isCurrentWindowDynamic(); + } + + /** Calls {@link Player#isCurrentMediaItemDynamic()} on the delegate and returns the result. */ + @Override + public boolean isCurrentMediaItemDynamic() { + return player.isCurrentMediaItemDynamic(); + } + + /** + * Calls {@link Player#isCurrentWindowLive()} on the delegate and returns the result. + * + * @deprecated Use {@link #isCurrentMediaItemLive()} instead. + */ + @SuppressWarnings("deprecation") // Forwarding to deprecated method + @Deprecated + @Override + public boolean isCurrentWindowLive() { + return player.isCurrentWindowLive(); + } + + /** Calls {@link Player#isCurrentMediaItemLive()} on the delegate and returns the result. */ + @Override + public boolean isCurrentMediaItemLive() { + return player.isCurrentMediaItemLive(); + } + + /** Calls {@link Player#getCurrentLiveOffset()} on the delegate and returns the result. */ + @Override + public long getCurrentLiveOffset() { + return player.getCurrentLiveOffset(); + } + + /** + * Calls {@link Player#isCurrentWindowSeekable()} on the delegate and returns the result. + * + * @deprecated Use {@link #isCurrentMediaItemSeekable()} instead. + */ + @SuppressWarnings("deprecation") // Forwarding to deprecated method + @Deprecated + @Override + public boolean isCurrentWindowSeekable() { + return player.isCurrentWindowSeekable(); + } + + /** Calls {@link Player#isCurrentMediaItemSeekable()} on the delegate and returns the result. */ + @Override + public boolean isCurrentMediaItemSeekable() { + return player.isCurrentMediaItemSeekable(); + } + + /** Calls {@link Player#isPlayingAd()} on the delegate and returns the result. */ + @Override + public boolean isPlayingAd() { + return player.isPlayingAd(); + } + + /** Calls {@link Player#getCurrentAdGroupIndex()} on the delegate and returns the result. */ + @Override + public int getCurrentAdGroupIndex() { + return player.getCurrentAdGroupIndex(); + } + + /** Calls {@link Player#getCurrentAdIndexInAdGroup()} on the delegate and returns the result. */ + @Override + public int getCurrentAdIndexInAdGroup() { + return player.getCurrentAdIndexInAdGroup(); + } + + /** Calls {@link Player#getContentDuration()} on the delegate and returns the result. */ + @Override + public long getContentDuration() { + return player.getContentDuration(); + } + + /** Calls {@link Player#getContentPosition()} on the delegate and returns the result. */ + @Override + public long getContentPosition() { + return player.getContentPosition(); + } + + /** Calls {@link Player#getContentBufferedPosition()} on the delegate and returns the result. */ + @Override + public long getContentBufferedPosition() { + return player.getContentBufferedPosition(); + } + + /** Calls {@link Player#getAudioAttributes()} on the delegate and returns the result. */ + @Override + public AudioAttributes getAudioAttributes() { + return player.getAudioAttributes(); + } + + /** Calls {@link Player#setVolume(float)} on the delegate. */ + @Override + public void setVolume(float volume) { + player.setVolume(volume); + } + + /** Calls {@link Player#getVolume()} on the delegate and returns the result. */ + @Override + public float getVolume() { + return player.getVolume(); + } + + /** Calls {@link Player#getVideoSize()} on the delegate and returns the result. */ + @Override + public VideoSize getVideoSize() { + return player.getVideoSize(); + } + + /** Calls {@link Player#getSurfaceSize()} on the delegate and returns the result. */ + @Override + public Size getSurfaceSize() { + return player.getSurfaceSize(); + } + + /** Calls {@link Player#clearVideoSurface()} on the delegate. */ + @Override + public void clearVideoSurface() { + player.clearVideoSurface(); + } + + /** Calls {@link Player#clearVideoSurface(Surface)} on the delegate. */ + @Override + public void clearVideoSurface(@Nullable Surface surface) { + player.clearVideoSurface(surface); + } + + /** Calls {@link Player#setVideoSurface(Surface)} on the delegate. */ + @Override + public void setVideoSurface(@Nullable Surface surface) { + player.setVideoSurface(surface); + } + + /** Calls {@link Player#setVideoSurfaceHolder(SurfaceHolder)} on the delegate. */ + @Override + public void setVideoSurfaceHolder(@Nullable SurfaceHolder surfaceHolder) { + player.setVideoSurfaceHolder(surfaceHolder); + } + + /** Calls {@link Player#clearVideoSurfaceHolder(SurfaceHolder)} on the delegate. */ + @Override + public void clearVideoSurfaceHolder(@Nullable SurfaceHolder surfaceHolder) { + player.clearVideoSurfaceHolder(surfaceHolder); + } + + /** Calls {@link Player#setVideoSurfaceView(SurfaceView)} on the delegate. */ + @Override + public void setVideoSurfaceView(@Nullable SurfaceView surfaceView) { + player.setVideoSurfaceView(surfaceView); + } + + /** Calls {@link Player#clearVideoSurfaceView(SurfaceView)} on the delegate. */ + @Override + public void clearVideoSurfaceView(@Nullable SurfaceView surfaceView) { + player.clearVideoSurfaceView(surfaceView); + } + + /** Calls {@link Player#setVideoTextureView(TextureView)} on the delegate. */ + @Override + public void setVideoTextureView(@Nullable TextureView textureView) { + player.setVideoTextureView(textureView); + } + + /** Calls {@link Player#clearVideoTextureView(TextureView)} on the delegate. */ + @Override + public void clearVideoTextureView(@Nullable TextureView textureView) { + player.clearVideoTextureView(textureView); + } + + /** Calls {@link Player#getCurrentCues()} on the delegate and returns the result. */ + @Override + public CueGroup getCurrentCues() { + return player.getCurrentCues(); + } + + /** Calls {@link Player#getDeviceInfo()} on the delegate and returns the result. */ + @Override + public DeviceInfo getDeviceInfo() { + return player.getDeviceInfo(); + } + + /** Calls {@link Player#getDeviceVolume()} on the delegate and returns the result. */ + @Override + public int getDeviceVolume() { + return player.getDeviceVolume(); + } + + /** Calls {@link Player#isDeviceMuted()} on the delegate and returns the result. */ + @Override + public boolean isDeviceMuted() { + return player.isDeviceMuted(); + } + + /** Calls {@link Player#setDeviceVolume(int)} on the delegate. */ + @Override + public void setDeviceVolume(int volume) { + player.setDeviceVolume(volume); + } + + /** Calls {@link Player#increaseDeviceVolume()} on the delegate. */ + @Override + public void increaseDeviceVolume() { + player.increaseDeviceVolume(); + } + + /** Calls {@link Player#decreaseDeviceVolume()} on the delegate. */ + @Override + public void decreaseDeviceVolume() { + player.decreaseDeviceVolume(); + } + + /** Calls {@link Player#setDeviceMuted(boolean)} on the delegate. */ + @Override + public void setDeviceMuted(boolean muted) { + player.setDeviceMuted(muted); + } + + /** Returns the {@link Player} to which operations are forwarded. */ + public Player getWrappedPlayer() { + return player; + } + + private static final class ForwardingListener implements Listener { + + private final ForwardingPlayer forwardingPlayer; + private final Listener listener; + + public ForwardingListener(ForwardingPlayer forwardingPlayer, Listener listener) { + this.forwardingPlayer = forwardingPlayer; + this.listener = listener; + } + + @Override + public void onEvents(Player player, Events events) { + // Replace player with forwarding player. + listener.onEvents(forwardingPlayer, events); + } + + @Override + public void onTimelineChanged(Timeline timeline, @TimelineChangeReason int reason) { + listener.onTimelineChanged(timeline, reason); + } + + @Override + public void onMediaItemTransition( + @Nullable MediaItem mediaItem, @MediaItemTransitionReason int reason) { + listener.onMediaItemTransition(mediaItem, reason); + } + + @Override + public void onTracksChanged(Tracks tracks) { + listener.onTracksChanged(tracks); + } + + @Override + public void onMediaMetadataChanged(MediaMetadata mediaMetadata) { + listener.onMediaMetadataChanged(mediaMetadata); + } + + @Override + public void onPlaylistMetadataChanged(MediaMetadata mediaMetadata) { + listener.onPlaylistMetadataChanged(mediaMetadata); + } + + @Override + public void onIsLoadingChanged(boolean isLoading) { + listener.onIsLoadingChanged(isLoading); + } + + @Override + @SuppressWarnings("deprecation") + public void onLoadingChanged(boolean isLoading) { + listener.onIsLoadingChanged(isLoading); + } + + @Override + public void onAvailableCommandsChanged(Commands availableCommands) { + listener.onAvailableCommandsChanged(availableCommands); + } + + @Override + public void onTrackSelectionParametersChanged(TrackSelectionParameters parameters) { + listener.onTrackSelectionParametersChanged(parameters); + } + + @Override + @SuppressWarnings("deprecation") + public void onPlayerStateChanged(boolean playWhenReady, @State int playbackState) { + listener.onPlayerStateChanged(playWhenReady, playbackState); + } + + @Override + public void onPlaybackStateChanged(@State int playbackState) { + listener.onPlaybackStateChanged(playbackState); + } + + @Override + public void onPlayWhenReadyChanged( + boolean playWhenReady, @PlayWhenReadyChangeReason int reason) { + listener.onPlayWhenReadyChanged(playWhenReady, reason); + } + + @Override + public void onPlaybackSuppressionReasonChanged( + @PlayWhenReadyChangeReason int playbackSuppressionReason) { + listener.onPlaybackSuppressionReasonChanged(playbackSuppressionReason); + } + + @Override + public void onIsPlayingChanged(boolean isPlaying) { + listener.onIsPlayingChanged(isPlaying); + } + + @Override + public void onRepeatModeChanged(@RepeatMode int repeatMode) { + listener.onRepeatModeChanged(repeatMode); + } + + @Override + public void onShuffleModeEnabledChanged(boolean shuffleModeEnabled) { + listener.onShuffleModeEnabledChanged(shuffleModeEnabled); + } + + @Override + public void onPlayerError(PlaybackException error) { + listener.onPlayerError(error); + } + + @Override + public void onPlayerErrorChanged(@Nullable PlaybackException error) { + listener.onPlayerErrorChanged(error); + } + + @Override + @SuppressWarnings("deprecation") + public void onPositionDiscontinuity(@DiscontinuityReason int reason) { + listener.onPositionDiscontinuity(reason); + } + + @Override + public void onPositionDiscontinuity( + PositionInfo oldPosition, PositionInfo newPosition, @DiscontinuityReason int reason) { + listener.onPositionDiscontinuity(oldPosition, newPosition, reason); + } + + @Override + public void onPlaybackParametersChanged(PlaybackParameters playbackParameters) { + listener.onPlaybackParametersChanged(playbackParameters); + } + + @Override + public void onSeekBackIncrementChanged(long seekBackIncrementMs) { + listener.onSeekBackIncrementChanged(seekBackIncrementMs); + } + + @Override + public void onSeekForwardIncrementChanged(long seekForwardIncrementMs) { + listener.onSeekForwardIncrementChanged(seekForwardIncrementMs); + } + + @Override + public void onMaxSeekToPreviousPositionChanged(long maxSeekToPreviousPositionMs) { + listener.onMaxSeekToPreviousPositionChanged(maxSeekToPreviousPositionMs); + } + + @Override + @SuppressWarnings("deprecation") + public void onSeekProcessed() { + listener.onSeekProcessed(); + } + + @Override + public void onVideoSizeChanged(VideoSize videoSize) { + listener.onVideoSizeChanged(videoSize); + } + + @Override + public void onSurfaceSizeChanged(int width, int height) { + listener.onSurfaceSizeChanged(width, height); + } + + @Override + public void onRenderedFirstFrame() { + listener.onRenderedFirstFrame(); + } + + @Override + public void onAudioSessionIdChanged(int audioSessionId) { + listener.onAudioSessionIdChanged(audioSessionId); + } + + @Override + public void onAudioAttributesChanged(AudioAttributes audioAttributes) { + listener.onAudioAttributesChanged(audioAttributes); + } + + @Override + public void onVolumeChanged(float volume) { + listener.onVolumeChanged(volume); + } + + @Override + public void onSkipSilenceEnabledChanged(boolean skipSilenceEnabled) { + listener.onSkipSilenceEnabledChanged(skipSilenceEnabled); + } + + @Override + public void onCues(List cues) { + listener.onCues(cues); + } + + @Override + public void onCues(CueGroup cueGroup) { + listener.onCues(cueGroup); + } + + @Override + public void onMetadata(Metadata metadata) { + listener.onMetadata(metadata); + } + + @Override + public void onDeviceInfoChanged(DeviceInfo deviceInfo) { + listener.onDeviceInfoChanged(deviceInfo); + } + + @Override + public void onDeviceVolumeChanged(int volume, boolean muted) { + listener.onDeviceVolumeChanged(volume, muted); + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (!(o instanceof ForwardingListener)) { + return false; + } + ForwardingListener that = (ForwardingListener) o; + if (!forwardingPlayer.equals(that.forwardingPlayer)) { + return false; + } + return listener.equals(that.listener); + } + + @Override + public int hashCode() { + int result = forwardingPlayer.hashCode(); + result = 31 * result + listener.hashCode(); + return result; + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/HeartRating.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/HeartRating.java new file mode 100644 index 0000000000..91ef693b97 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/HeartRating.java @@ -0,0 +1,100 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2; + +import static com.google.android.exoplayer2.util.Assertions.checkArgument; + +import android.os.Bundle; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.util.Util; +import com.google.common.base.Objects; + +/** + * A rating expressed as "heart" or "no heart". It can be used to indicate whether the content is a + * favorite. + */ +public final class HeartRating extends Rating { + + private final boolean rated; + private final boolean isHeart; + + /** Creates a unrated instance. */ + public HeartRating() { + rated = false; + isHeart = false; + } + + /** + * Creates a rated instance. + * + * @param isHeart {@code true} for "heart", {@code false} for "no heart". + */ + public HeartRating(boolean isHeart) { + rated = true; + this.isHeart = isHeart; + } + + @Override + public boolean isRated() { + return rated; + } + + /** Returns whether the rating is "heart". */ + public boolean isHeart() { + return isHeart; + } + + @Override + public int hashCode() { + return Objects.hashCode(rated, isHeart); + } + + @Override + public boolean equals(@Nullable Object obj) { + if (!(obj instanceof HeartRating)) { + return false; + } + HeartRating other = (HeartRating) obj; + return isHeart == other.isHeart && rated == other.rated; + } + + // Bundleable implementation. + + private static final @RatingType int TYPE = RATING_TYPE_HEART; + + private static final String FIELD_RATED = Util.intToStringMaxRadix(1); + private static final String FIELD_IS_HEART = Util.intToStringMaxRadix(2); + + @Override + public Bundle toBundle() { + Bundle bundle = new Bundle(); + bundle.putInt(FIELD_RATING_TYPE, TYPE); + bundle.putBoolean(FIELD_RATED, rated); + bundle.putBoolean(FIELD_IS_HEART, isHeart); + return bundle; + } + + /** Object that can restore a {@link HeartRating} from a {@link Bundle}. */ + public static final Creator CREATOR = HeartRating::fromBundle; + + private static HeartRating fromBundle(Bundle bundle) { + checkArgument(bundle.getInt(FIELD_RATING_TYPE, /* defaultValue= */ RATING_TYPE_UNSET) == TYPE); + boolean isRated = bundle.getBoolean(FIELD_RATED, /* defaultValue= */ false); + return isRated + ? new HeartRating(bundle.getBoolean(FIELD_IS_HEART, /* defaultValue= */ false)) + : new HeartRating(); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/IllegalSeekPositionException.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/IllegalSeekPositionException.java index baa1cf3f79..71f57dec82 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/IllegalSeekPositionException.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/IllegalSeekPositionException.java @@ -16,22 +16,16 @@ package com.google.android.exoplayer2; /** - * Thrown when an attempt is made to seek to a position that does not exist in the player's - * {@link Timeline}. + * Thrown when an attempt is made to seek to a position that does not exist in the player's {@link + * Timeline}. */ public final class IllegalSeekPositionException extends IllegalStateException { - /** - * The {@link Timeline} in which the seek was attempted. - */ + /** The {@link Timeline} in which the seek was attempted. */ public final Timeline timeline; - /** - * The index of the window being seeked to. - */ + /** The index of the window being seeked to. */ public final int windowIndex; - /** - * The seek position in the specified window. - */ + /** The seek position in the specified window. */ public final long positionMs; /** @@ -44,5 +38,4 @@ public IllegalSeekPositionException(Timeline timeline, int windowIndex, long pos this.windowIndex = windowIndex; this.positionMs = positionMs; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/LegacyMediaPlayerWrapper.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/LegacyMediaPlayerWrapper.java new file mode 100644 index 0000000000..a2ff1d6346 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/LegacyMediaPlayerWrapper.java @@ -0,0 +1,59 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2; + +import android.media.MediaPlayer; +import android.os.Looper; +import com.google.common.util.concurrent.Futures; +import com.google.common.util.concurrent.ListenableFuture; + +/** A {@link Player} wrapper for the legacy Android platform {@link MediaPlayer}. */ +public final class LegacyMediaPlayerWrapper extends SimpleBasePlayer { + + private final MediaPlayer player; + + private boolean playWhenReady; + + /** + * Creates the {@link MediaPlayer} wrapper. + * + * @param looper The {@link Looper} used to call all methods on. + */ + public LegacyMediaPlayerWrapper(Looper looper) { + super(looper); + this.player = new MediaPlayer(); + } + + @Override + protected State getState() { + return new State.Builder() + .setAvailableCommands(new Commands.Builder().addAll(Player.COMMAND_PLAY_PAUSE).build()) + .setPlayWhenReady(playWhenReady, Player.PLAY_WHEN_READY_CHANGE_REASON_USER_REQUEST) + .build(); + } + + @Override + protected ListenableFuture handleSetPlayWhenReady(boolean playWhenReady) { + this.playWhenReady = playWhenReady; + // TODO: Only call these methods if the player is in Started or Paused state. + if (playWhenReady) { + player.start(); + } else { + player.pause(); + } + return Futures.immediateVoidFuture(); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/LivePlaybackSpeedControl.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/LivePlaybackSpeedControl.java new file mode 100644 index 0000000000..57f85486ab --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/LivePlaybackSpeedControl.java @@ -0,0 +1,67 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2; + +import com.google.android.exoplayer2.MediaItem.LiveConfiguration; + +/** + * Controls the playback speed while playing live content in order to maintain a steady target live + * offset. + */ +public interface LivePlaybackSpeedControl { + + /** + * Sets the live configuration defined by the media. + * + * @param liveConfiguration The {@link LiveConfiguration} as defined by the media. + */ + void setLiveConfiguration(LiveConfiguration liveConfiguration); + + /** + * Sets the target live offset in microseconds that overrides the live offset {@link + * #setLiveConfiguration configured} by the media. Passing {@code C.TIME_UNSET} deletes a previous + * override. + * + *

      If no target live offset is configured by {@link #setLiveConfiguration}, this override has + * no effect. + */ + void setTargetLiveOffsetOverrideUs(long liveOffsetUs); + + /** + * Notifies the live playback speed control that a rebuffer occurred. + * + *

      A rebuffer is defined to be caused by buffer depletion rather than a user action. Hence this + * method is not called during initial buffering or when buffering as a result of a seek + * operation. + */ + void notifyRebuffer(); + + /** + * Returns the adjusted playback speed in order get closer towards the {@link + * #getTargetLiveOffsetUs() target live offset}. + * + * @param liveOffsetUs The current live offset, in microseconds. + * @param bufferedDurationUs The duration of media that's currently buffered, in microseconds. + * @return The adjusted factor by which playback should be sped up. + */ + float getAdjustedPlaybackSpeed(long liveOffsetUs, long bufferedDurationUs); + + /** + * Returns the current target live offset, in microseconds, or {@link C#TIME_UNSET} if no target + * live offset is defined for the current media. + */ + long getTargetLiveOffsetUs(); +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/LoadControl.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/LoadControl.java index 80be0b9e71..66fa7a7f17 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/LoadControl.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/LoadControl.java @@ -17,17 +17,13 @@ import com.google.android.exoplayer2.source.TrackGroup; import com.google.android.exoplayer2.source.TrackGroupArray; -import com.google.android.exoplayer2.trackselection.TrackSelectionArray; +import com.google.android.exoplayer2.trackselection.ExoTrackSelection; import com.google.android.exoplayer2.upstream.Allocator; -/** - * Controls buffering of media. - */ +/** Controls buffering of media. */ public interface LoadControl { - /** - * Called by the player when prepared with a new source. - */ + /** Called by the player when prepared with a new source. */ void onPrepared(); /** @@ -37,33 +33,27 @@ public interface LoadControl { * @param trackGroups The {@link TrackGroup}s from which the selection was made. * @param trackSelections The track selections that were made. */ - void onTracksSelected(Renderer[] renderers, TrackGroupArray trackGroups, - TrackSelectionArray trackSelections); + void onTracksSelected( + Renderer[] renderers, TrackGroupArray trackGroups, ExoTrackSelection[] trackSelections); - /** - * Called by the player when stopped. - */ + /** Called by the player when stopped. */ void onStopped(); - /** - * Called by the player when released. - */ + /** Called by the player when released. */ void onReleased(); - /** - * Returns the {@link Allocator} that should be used to obtain media buffer allocations. - */ + /** Returns the {@link Allocator} that should be used to obtain media buffer allocations. */ Allocator getAllocator(); /** * Returns the duration of media to retain in the buffer prior to the current playback position, * for fast backward seeking. - *

      - * Note: If {@link #retainBackBufferFromKeyframe()} is false then seeking in the back-buffer will - * only be fast if the back-buffer contains a keyframe prior to the seek position. - *

      - * Note: Implementations should return a single value. Dynamic changes to the back-buffer are not - * currently supported. + * + *

      Note: If {@link #retainBackBufferFromKeyframe()} is false then seeking in the back-buffer + * will only be fast if the back-buffer contains a keyframe prior to the seek position. + * + *

      Note: Implementations should return a single value. Dynamic changes to the back-buffer are + * not currently supported. * * @return The duration of media to retain in the buffer prior to the current playback position, * in microseconds. @@ -73,28 +63,36 @@ void onTracksSelected(Renderer[] renderers, TrackGroupArray trackGroups, /** * Returns whether media should be retained from the keyframe before the current playback position * minus {@link #getBackBufferDurationUs()}, rather than any sample before or at that position. - *

      - * Warning: Returning true will cause the back-buffer size to depend on the spacing of keyframes - * in the media being played. Returning true is not recommended unless you control the media and - * are comfortable with the back-buffer size exceeding {@link #getBackBufferDurationUs()} by as - * much as the maximum duration between adjacent keyframes in the media. - *

      - * Note: Implementations should return a single value. Dynamic changes to the back-buffer are not - * currently supported. + * + *

      Warning: Returning true will cause the back-buffer size to depend on the spacing of + * keyframes in the media being played. Returning true is not recommended unless you control the + * media and are comfortable with the back-buffer size exceeding {@link + * #getBackBufferDurationUs()} by as much as the maximum duration between adjacent keyframes in + * the media. + * + *

      Note: Implementations should return a single value. Dynamic changes to the back-buffer are + * not currently supported. * * @return Whether media should be retained from the keyframe before the current playback position - * minus {@link #getBackBufferDurationUs()}, rather than any sample before or at that position. + * minus {@link #getBackBufferDurationUs()}, rather than any sample before or at that + * position. */ boolean retainBackBufferFromKeyframe(); /** * Called by the player to determine whether it should continue to load the source. * + * @param playbackPositionUs The current playback position in microseconds, relative to the start + * of the {@link Timeline.Period period} that will continue to be loaded if this method + * returns {@code true}. If playback of this period has not yet started, the value will be + * negative and equal in magnitude to the duration of any media in previous periods still to + * be played. * @param bufferedDurationUs The duration of media that's currently buffered. - * @param playbackSpeed The current playback speed. + * @param playbackSpeed The current factor by which playback is sped up. * @return Whether the loading should continue. */ - boolean shouldContinueLoading(long bufferedDurationUs, float playbackSpeed); + boolean shouldContinueLoading( + long playbackPositionUs, long bufferedDurationUs, float playbackSpeed); /** * Called repeatedly by the player when it's loading the source, has yet to start playback, and @@ -103,11 +101,15 @@ void onTracksSelected(Renderer[] renderers, TrackGroupArray trackGroups, * false} until some condition has been met (e.g. a certain amount of media is buffered). * * @param bufferedDurationUs The duration of media that's currently buffered. - * @param playbackSpeed The current playback speed. + * @param playbackSpeed The current factor by which playback is sped up. * @param rebuffering Whether the player is rebuffering. A rebuffer is defined to be caused by * buffer depletion rather than a user action. Hence this parameter is false during initial * buffering and when buffering as a result of a seek operation. + * @param targetLiveOffsetUs The desired playback position offset to the live edge in + * microseconds, or {@link C#TIME_UNSET} if the media is not a live stream or no offset is + * configured. * @return Whether playback should be allowed to start or resume. */ - boolean shouldStartPlayback(long bufferedDurationUs, float playbackSpeed, boolean rebuffering); + boolean shouldStartPlayback( + long bufferedDurationUs, float playbackSpeed, boolean rebuffering, long targetLiveOffsetUs); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/MediaItem.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/MediaItem.java new file mode 100644 index 0000000000..615fb2b3ae --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/MediaItem.java @@ -0,0 +1,2062 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2; + +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Assertions.checkState; + +import android.net.Uri; +import android.os.Bundle; +import androidx.annotation.IntRange; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.offline.StreamKey; +import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.Util; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.errorprone.annotations.CanIgnoreReturnValue; +import com.google.errorprone.annotations.InlineMe; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.UUID; + +/** Representation of a media item. */ +public final class MediaItem implements Bundleable { + + /** + * Creates a {@link MediaItem} for the given URI. + * + * @param uri The URI. + * @return An {@link MediaItem} for the given URI. + */ + public static MediaItem fromUri(String uri) { + return new MediaItem.Builder().setUri(uri).build(); + } + + /** + * Creates a {@link MediaItem} for the given {@link Uri URI}. + * + * @param uri The {@link Uri uri}. + * @return An {@link MediaItem} for the given URI. + */ + public static MediaItem fromUri(Uri uri) { + return new MediaItem.Builder().setUri(uri).build(); + } + + /** A builder for {@link MediaItem} instances. */ + public static final class Builder { + + @Nullable private String mediaId; + @Nullable private Uri uri; + @Nullable private String mimeType; + // TODO: Change this to ClippingProperties once all the deprecated individual setters are + // removed. + private ClippingConfiguration.Builder clippingConfiguration; + // TODO: Change this to @Nullable DrmConfiguration once all the deprecated individual setters + // are removed. + private DrmConfiguration.Builder drmConfiguration; + private List streamKeys; + @Nullable private String customCacheKey; + private ImmutableList subtitleConfigurations; + @Nullable private AdsConfiguration adsConfiguration; + @Nullable private Object tag; + @Nullable private MediaMetadata mediaMetadata; + // TODO: Change this to LiveConfiguration once all the deprecated individual setters + // are removed. + private LiveConfiguration.Builder liveConfiguration; + private RequestMetadata requestMetadata; + + /** Creates a builder. */ + @SuppressWarnings("deprecation") // Temporarily uses DrmConfiguration.Builder() constructor. + public Builder() { + clippingConfiguration = new ClippingConfiguration.Builder(); + drmConfiguration = new DrmConfiguration.Builder(); + streamKeys = Collections.emptyList(); + subtitleConfigurations = ImmutableList.of(); + liveConfiguration = new LiveConfiguration.Builder(); + requestMetadata = RequestMetadata.EMPTY; + } + + private Builder(MediaItem mediaItem) { + this(); + clippingConfiguration = mediaItem.clippingConfiguration.buildUpon(); + mediaId = mediaItem.mediaId; + mediaMetadata = mediaItem.mediaMetadata; + liveConfiguration = mediaItem.liveConfiguration.buildUpon(); + requestMetadata = mediaItem.requestMetadata; + @Nullable LocalConfiguration localConfiguration = mediaItem.localConfiguration; + if (localConfiguration != null) { + customCacheKey = localConfiguration.customCacheKey; + mimeType = localConfiguration.mimeType; + uri = localConfiguration.uri; + streamKeys = localConfiguration.streamKeys; + subtitleConfigurations = localConfiguration.subtitleConfigurations; + tag = localConfiguration.tag; + drmConfiguration = + localConfiguration.drmConfiguration != null + ? localConfiguration.drmConfiguration.buildUpon() + : new DrmConfiguration.Builder(); + adsConfiguration = localConfiguration.adsConfiguration; + } + } + + /** + * Sets the optional media ID which identifies the media item. + * + *

      By default {@link #DEFAULT_MEDIA_ID} is used. + */ + @CanIgnoreReturnValue + public Builder setMediaId(String mediaId) { + this.mediaId = checkNotNull(mediaId); + return this; + } + + /** + * Sets the optional URI. + * + *

      If {@code uri} is null or unset then no {@link LocalConfiguration} object is created + * during {@link #build()} and no other {@code Builder} methods that would populate {@link + * MediaItem#localConfiguration} should be called. + */ + @CanIgnoreReturnValue + public Builder setUri(@Nullable String uri) { + return setUri(uri == null ? null : Uri.parse(uri)); + } + + /** + * Sets the optional URI. + * + *

      If {@code uri} is null or unset then no {@link LocalConfiguration} object is created + * during {@link #build()} and no other {@code Builder} methods that would populate {@link + * MediaItem#localConfiguration} should be called. + */ + @CanIgnoreReturnValue + public Builder setUri(@Nullable Uri uri) { + this.uri = uri; + return this; + } + + /** + * Sets the optional MIME type. + * + *

      The MIME type may be used as a hint for inferring the type of the media item. + * + *

      This method should only be called if {@link #setUri} is passed a non-null value. + * + * @param mimeType The MIME type. + */ + @CanIgnoreReturnValue + public Builder setMimeType(@Nullable String mimeType) { + this.mimeType = mimeType; + return this; + } + + /** Sets the {@link ClippingConfiguration}, defaults to {@link ClippingConfiguration#UNSET}. */ + @CanIgnoreReturnValue + public Builder setClippingConfiguration(ClippingConfiguration clippingConfiguration) { + this.clippingConfiguration = clippingConfiguration.buildUpon(); + return this; + } + + /** + * @deprecated Use {@link #setClippingConfiguration(ClippingConfiguration)} and {@link + * ClippingConfiguration.Builder#setStartPositionMs(long)} instead. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setClipStartPositionMs(@IntRange(from = 0) long startPositionMs) { + clippingConfiguration.setStartPositionMs(startPositionMs); + return this; + } + + /** + * @deprecated Use {@link #setClippingConfiguration(ClippingConfiguration)} and {@link + * ClippingConfiguration.Builder#setEndPositionMs(long)} instead. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setClipEndPositionMs(long endPositionMs) { + clippingConfiguration.setEndPositionMs(endPositionMs); + return this; + } + + /** + * @deprecated Use {@link #setClippingConfiguration(ClippingConfiguration)} and {@link + * ClippingConfiguration.Builder#setRelativeToLiveWindow(boolean)} instead. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setClipRelativeToLiveWindow(boolean relativeToLiveWindow) { + clippingConfiguration.setRelativeToLiveWindow(relativeToLiveWindow); + return this; + } + + /** + * @deprecated Use {@link #setClippingConfiguration(ClippingConfiguration)} and {@link + * ClippingConfiguration.Builder#setRelativeToDefaultPosition(boolean)} instead. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setClipRelativeToDefaultPosition(boolean relativeToDefaultPosition) { + clippingConfiguration.setRelativeToDefaultPosition(relativeToDefaultPosition); + return this; + } + + /** + * @deprecated Use {@link #setClippingConfiguration(ClippingConfiguration)} and {@link + * ClippingConfiguration.Builder#setStartsAtKeyFrame(boolean)} instead. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setClipStartsAtKeyFrame(boolean startsAtKeyFrame) { + clippingConfiguration.setStartsAtKeyFrame(startsAtKeyFrame); + return this; + } + + /** Sets the optional DRM configuration. */ + @CanIgnoreReturnValue + public Builder setDrmConfiguration(@Nullable DrmConfiguration drmConfiguration) { + this.drmConfiguration = + drmConfiguration != null ? drmConfiguration.buildUpon() : new DrmConfiguration.Builder(); + return this; + } + + /** + * @deprecated Use {@link #setDrmConfiguration(DrmConfiguration)} and {@link + * DrmConfiguration.Builder#setLicenseUri(Uri)} instead. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setDrmLicenseUri(@Nullable Uri licenseUri) { + drmConfiguration.setLicenseUri(licenseUri); + return this; + } + + /** + * @deprecated Use {@link #setDrmConfiguration(DrmConfiguration)} and {@link + * DrmConfiguration.Builder#setLicenseUri(String)} instead. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setDrmLicenseUri(@Nullable String licenseUri) { + drmConfiguration.setLicenseUri(licenseUri); + return this; + } + + /** + * @deprecated Use {@link #setDrmConfiguration(DrmConfiguration)} and {@link + * DrmConfiguration.Builder#setLicenseRequestHeaders(Map)} instead. Note that {@link + * DrmConfiguration.Builder#setLicenseRequestHeaders(Map)} doesn't accept null, use an empty + * map to clear the headers. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setDrmLicenseRequestHeaders( + @Nullable Map licenseRequestHeaders) { + drmConfiguration.setLicenseRequestHeaders( + licenseRequestHeaders != null ? licenseRequestHeaders : ImmutableMap.of()); + return this; + } + + /** + * @deprecated Use {@link #setDrmConfiguration(DrmConfiguration)} and pass the {@code uuid} to + * {@link DrmConfiguration.Builder#Builder(UUID)} instead. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setDrmUuid(@Nullable UUID uuid) { + drmConfiguration.setNullableScheme(uuid); + return this; + } + + /** + * @deprecated Use {@link #setDrmConfiguration(DrmConfiguration)} and {@link + * DrmConfiguration.Builder#setMultiSession(boolean)} instead. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setDrmMultiSession(boolean multiSession) { + drmConfiguration.setMultiSession(multiSession); + return this; + } + + /** + * @deprecated Use {@link #setDrmConfiguration(DrmConfiguration)} and {@link + * DrmConfiguration.Builder#setForceDefaultLicenseUri(boolean)} instead. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setDrmForceDefaultLicenseUri(boolean forceDefaultLicenseUri) { + drmConfiguration.setForceDefaultLicenseUri(forceDefaultLicenseUri); + return this; + } + + /** + * @deprecated Use {@link #setDrmConfiguration(DrmConfiguration)} and {@link + * DrmConfiguration.Builder#setPlayClearContentWithoutKey(boolean)} instead. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setDrmPlayClearContentWithoutKey(boolean playClearContentWithoutKey) { + drmConfiguration.setPlayClearContentWithoutKey(playClearContentWithoutKey); + return this; + } + + /** + * @deprecated Use {@link #setDrmConfiguration(DrmConfiguration)} and {@link + * DrmConfiguration.Builder#setForceSessionsForAudioAndVideoTracks(boolean)} instead. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setDrmSessionForClearPeriods(boolean sessionForClearPeriods) { + drmConfiguration.setForceSessionsForAudioAndVideoTracks(sessionForClearPeriods); + return this; + } + + /** + * @deprecated Use {@link #setDrmConfiguration(DrmConfiguration)} and {@link + * DrmConfiguration.Builder#setForcedSessionTrackTypes(List)} instead. Note that {@link + * DrmConfiguration.Builder#setForcedSessionTrackTypes(List)} doesn't accept null, use an + * empty list to clear the contents. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setDrmSessionForClearTypes( + @Nullable List<@C.TrackType Integer> sessionForClearTypes) { + drmConfiguration.setForcedSessionTrackTypes( + sessionForClearTypes != null ? sessionForClearTypes : ImmutableList.of()); + return this; + } + + /** + * @deprecated Use {@link #setDrmConfiguration(DrmConfiguration)} and {@link + * DrmConfiguration.Builder#setKeySetId(byte[])} instead. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setDrmKeySetId(@Nullable byte[] keySetId) { + drmConfiguration.setKeySetId(keySetId); + return this; + } + + /** + * Sets the optional stream keys by which the manifest is filtered (only used for adaptive + * streams). + * + *

      {@code null} or an empty {@link List} can be used for a reset. + * + *

      If {@link #setUri} is passed a non-null {@code uri}, the stream keys are used to create a + * {@link LocalConfiguration} object. Otherwise they will be ignored. + */ + @CanIgnoreReturnValue + public Builder setStreamKeys(@Nullable List streamKeys) { + this.streamKeys = + streamKeys != null && !streamKeys.isEmpty() + ? Collections.unmodifiableList(new ArrayList<>(streamKeys)) + : Collections.emptyList(); + return this; + } + + /** + * Sets the optional custom cache key (only used for progressive streams). + * + *

      This method should only be called if {@link #setUri} is passed a non-null value. + */ + @CanIgnoreReturnValue + public Builder setCustomCacheKey(@Nullable String customCacheKey) { + this.customCacheKey = customCacheKey; + return this; + } + + /** + * @deprecated Use {@link #setSubtitleConfigurations(List)} instead. Note that {@link + * #setSubtitleConfigurations(List)} doesn't accept null, use an empty list to clear the + * contents. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setSubtitles(@Nullable List subtitles) { + this.subtitleConfigurations = + subtitles != null ? ImmutableList.copyOf(subtitles) : ImmutableList.of(); + return this; + } + + /** + * Sets the optional subtitles. + * + *

      This method should only be called if {@link #setUri} is passed a non-null value. + */ + @CanIgnoreReturnValue + public Builder setSubtitleConfigurations(List subtitleConfigurations) { + this.subtitleConfigurations = ImmutableList.copyOf(subtitleConfigurations); + return this; + } + + /** + * Sets the optional {@link AdsConfiguration}. + * + *

      This method should only be called if {@link #setUri} is passed a non-null value. + */ + @CanIgnoreReturnValue + public Builder setAdsConfiguration(@Nullable AdsConfiguration adsConfiguration) { + this.adsConfiguration = adsConfiguration; + return this; + } + + /** + * @deprecated Use {@link #setAdsConfiguration(AdsConfiguration)}, parse the {@code adTagUri} + * with {@link Uri#parse(String)} and pass the result to {@link + * AdsConfiguration.Builder#Builder(Uri)} instead. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setAdTagUri(@Nullable String adTagUri) { + return setAdTagUri(adTagUri != null ? Uri.parse(adTagUri) : null); + } + + /** + * @deprecated Use {@link #setAdsConfiguration(AdsConfiguration)} and pass the {@code adTagUri} + * to {@link AdsConfiguration.Builder#Builder(Uri)} instead. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setAdTagUri(@Nullable Uri adTagUri) { + return setAdTagUri(adTagUri, /* adsId= */ null); + } + + /** + * @deprecated Use {@link #setAdsConfiguration(AdsConfiguration)}, pass the {@code adTagUri} to + * {@link AdsConfiguration.Builder#Builder(Uri)} and the {@code adsId} to {@link + * AdsConfiguration.Builder#setAdsId(Object)} instead. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setAdTagUri(@Nullable Uri adTagUri, @Nullable Object adsId) { + this.adsConfiguration = + adTagUri != null ? new AdsConfiguration.Builder(adTagUri).setAdsId(adsId).build() : null; + return this; + } + + /** Sets the {@link LiveConfiguration}. Defaults to {@link LiveConfiguration#UNSET}. */ + @CanIgnoreReturnValue + public Builder setLiveConfiguration(LiveConfiguration liveConfiguration) { + this.liveConfiguration = liveConfiguration.buildUpon(); + return this; + } + + /** + * @deprecated Use {@link #setLiveConfiguration(LiveConfiguration)} and {@link + * LiveConfiguration.Builder#setTargetOffsetMs(long)}. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setLiveTargetOffsetMs(long liveTargetOffsetMs) { + liveConfiguration.setTargetOffsetMs(liveTargetOffsetMs); + return this; + } + + /** + * @deprecated Use {@link #setLiveConfiguration(LiveConfiguration)} and {@link + * LiveConfiguration.Builder#setMinOffsetMs(long)}. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setLiveMinOffsetMs(long liveMinOffsetMs) { + liveConfiguration.setMinOffsetMs(liveMinOffsetMs); + return this; + } + + /** + * @deprecated Use {@link #setLiveConfiguration(LiveConfiguration)} and {@link + * LiveConfiguration.Builder#setMaxOffsetMs(long)}. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setLiveMaxOffsetMs(long liveMaxOffsetMs) { + liveConfiguration.setMaxOffsetMs(liveMaxOffsetMs); + return this; + } + + /** + * @deprecated Use {@link #setLiveConfiguration(LiveConfiguration)} and {@link + * LiveConfiguration.Builder#setMinPlaybackSpeed(float)}. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setLiveMinPlaybackSpeed(float minPlaybackSpeed) { + liveConfiguration.setMinPlaybackSpeed(minPlaybackSpeed); + return this; + } + + /** + * @deprecated Use {@link #setLiveConfiguration(LiveConfiguration)} and {@link + * LiveConfiguration.Builder#setMaxPlaybackSpeed(float)}. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setLiveMaxPlaybackSpeed(float maxPlaybackSpeed) { + liveConfiguration.setMaxPlaybackSpeed(maxPlaybackSpeed); + return this; + } + + /** + * Sets the optional tag for custom attributes. The tag for the media source which will be + * published in the {@code com.google.android.exoplayer2.Timeline} of the source as {@code + * com.google.android.exoplayer2.Timeline.Window#tag}. + * + *

      This method should only be called if {@link #setUri} is passed a non-null value. + */ + @CanIgnoreReturnValue + public Builder setTag(@Nullable Object tag) { + this.tag = tag; + return this; + } + + /** Sets the media metadata. */ + @CanIgnoreReturnValue + public Builder setMediaMetadata(MediaMetadata mediaMetadata) { + this.mediaMetadata = mediaMetadata; + return this; + } + + /** Sets the request metadata. */ + @CanIgnoreReturnValue + public Builder setRequestMetadata(RequestMetadata requestMetadata) { + this.requestMetadata = requestMetadata; + return this; + } + + /** Returns a new {@link MediaItem} instance with the current builder values. */ + @SuppressWarnings("deprecation") // Using PlaybackProperties while it exists. + public MediaItem build() { + // TODO: remove this check once all the deprecated individual DRM setters are removed. + checkState(drmConfiguration.licenseUri == null || drmConfiguration.scheme != null); + @Nullable PlaybackProperties localConfiguration = null; + @Nullable Uri uri = this.uri; + if (uri != null) { + localConfiguration = + new PlaybackProperties( + uri, + mimeType, + drmConfiguration.scheme != null ? drmConfiguration.build() : null, + adsConfiguration, + streamKeys, + customCacheKey, + subtitleConfigurations, + tag); + } + return new MediaItem( + mediaId != null ? mediaId : DEFAULT_MEDIA_ID, + clippingConfiguration.buildClippingProperties(), + localConfiguration, + liveConfiguration.build(), + mediaMetadata != null ? mediaMetadata : MediaMetadata.EMPTY, + requestMetadata); + } + } + + /** DRM configuration for a media item. */ + public static final class DrmConfiguration { + + /** Builder for {@link DrmConfiguration}. */ + public static final class Builder { + + // TODO remove @Nullable annotation when the deprecated zero-arg constructor is removed. + @Nullable private UUID scheme; + @Nullable private Uri licenseUri; + private ImmutableMap licenseRequestHeaders; + private boolean multiSession; + private boolean playClearContentWithoutKey; + private boolean forceDefaultLicenseUri; + private ImmutableList<@C.TrackType Integer> forcedSessionTrackTypes; + @Nullable private byte[] keySetId; + + /** + * Constructs an instance. + * + * @param scheme The {@link UUID} of the protection scheme. + */ + public Builder(UUID scheme) { + this.scheme = scheme; + this.licenseRequestHeaders = ImmutableMap.of(); + this.forcedSessionTrackTypes = ImmutableList.of(); + } + + /** + * @deprecated This only exists to support the deprecated setters for individual DRM + * properties on {@link MediaItem.Builder}. + */ + @Deprecated + private Builder() { + this.licenseRequestHeaders = ImmutableMap.of(); + this.forcedSessionTrackTypes = ImmutableList.of(); + } + + private Builder(DrmConfiguration drmConfiguration) { + this.scheme = drmConfiguration.scheme; + this.licenseUri = drmConfiguration.licenseUri; + this.licenseRequestHeaders = drmConfiguration.licenseRequestHeaders; + this.multiSession = drmConfiguration.multiSession; + this.playClearContentWithoutKey = drmConfiguration.playClearContentWithoutKey; + this.forceDefaultLicenseUri = drmConfiguration.forceDefaultLicenseUri; + this.forcedSessionTrackTypes = drmConfiguration.forcedSessionTrackTypes; + this.keySetId = drmConfiguration.keySetId; + } + + /** Sets the {@link UUID} of the protection scheme. */ + @CanIgnoreReturnValue + public Builder setScheme(UUID scheme) { + this.scheme = scheme; + return this; + } + + /** + * @deprecated This only exists to support the deprecated {@link + * MediaItem.Builder#setDrmUuid(UUID)}. + */ + @CanIgnoreReturnValue + @Deprecated + private Builder setNullableScheme(@Nullable UUID scheme) { + this.scheme = scheme; + return this; + } + + /** Sets the optional default DRM license server URI. */ + @CanIgnoreReturnValue + public Builder setLicenseUri(@Nullable Uri licenseUri) { + this.licenseUri = licenseUri; + return this; + } + + /** Sets the optional default DRM license server URI. */ + @CanIgnoreReturnValue + public Builder setLicenseUri(@Nullable String licenseUri) { + this.licenseUri = licenseUri == null ? null : Uri.parse(licenseUri); + return this; + } + + /** Sets the optional request headers attached to DRM license requests. */ + @CanIgnoreReturnValue + public Builder setLicenseRequestHeaders(Map licenseRequestHeaders) { + this.licenseRequestHeaders = ImmutableMap.copyOf(licenseRequestHeaders); + return this; + } + + /** Sets whether multi session is enabled. */ + @CanIgnoreReturnValue + public Builder setMultiSession(boolean multiSession) { + this.multiSession = multiSession; + return this; + } + + /** + * Sets whether to always use the default DRM license server URI even if the media specifies + * its own DRM license server URI. + */ + @CanIgnoreReturnValue + public Builder setForceDefaultLicenseUri(boolean forceDefaultLicenseUri) { + this.forceDefaultLicenseUri = forceDefaultLicenseUri; + return this; + } + + /** + * Sets whether clear samples within protected content should be played when keys for the + * encrypted part of the content have yet to be loaded. + */ + @CanIgnoreReturnValue + public Builder setPlayClearContentWithoutKey(boolean playClearContentWithoutKey) { + this.playClearContentWithoutKey = playClearContentWithoutKey; + return this; + } + + /** + * @deprecated Use {@link #setForceSessionsForAudioAndVideoTracks(boolean)} instead. + */ + @CanIgnoreReturnValue + @Deprecated + @InlineMe( + replacement = + "this.setForceSessionsForAudioAndVideoTracks(forceSessionsForAudioAndVideoTracks)") + public Builder forceSessionsForAudioAndVideoTracks( + boolean forceSessionsForAudioAndVideoTracks) { + return setForceSessionsForAudioAndVideoTracks(forceSessionsForAudioAndVideoTracks); + } + + /** + * Sets whether a DRM session should be used for clear tracks of type {@link + * C#TRACK_TYPE_VIDEO} and {@link C#TRACK_TYPE_AUDIO}. + * + *

      This method overrides what has been set by previously calling {@link + * #setForcedSessionTrackTypes(List)}. + */ + @CanIgnoreReturnValue + public Builder setForceSessionsForAudioAndVideoTracks( + boolean forceSessionsForAudioAndVideoTracks) { + this.setForcedSessionTrackTypes( + forceSessionsForAudioAndVideoTracks + ? ImmutableList.of(C.TRACK_TYPE_VIDEO, C.TRACK_TYPE_AUDIO) + : ImmutableList.of()); + return this; + } + + /** + * Sets a list of {@link C.TrackType track type} constants for which to use a DRM session even + * when the tracks are in the clear. + * + *

      For the common case of using a DRM session for {@link C#TRACK_TYPE_VIDEO} and {@link + * C#TRACK_TYPE_AUDIO}, {@link #setForceSessionsForAudioAndVideoTracks(boolean)} can be used. + * + *

      This method overrides what has been set by previously calling {@link + * #setForceSessionsForAudioAndVideoTracks(boolean)}. + */ + @CanIgnoreReturnValue + public Builder setForcedSessionTrackTypes( + List<@C.TrackType Integer> forcedSessionTrackTypes) { + this.forcedSessionTrackTypes = ImmutableList.copyOf(forcedSessionTrackTypes); + return this; + } + + /** + * Sets the key set ID of the offline license. + * + *

      The key set ID identifies an offline license. The ID is required to query, renew or + * release an existing offline license (see {@code DefaultDrmSessionManager#setMode(int + * mode,byte[] offlineLicenseKeySetId)}). + */ + @CanIgnoreReturnValue + public Builder setKeySetId(@Nullable byte[] keySetId) { + this.keySetId = keySetId != null ? Arrays.copyOf(keySetId, keySetId.length) : null; + return this; + } + + public DrmConfiguration build() { + + return new DrmConfiguration(this); + } + } + + /** The UUID of the protection scheme. */ + public final UUID scheme; + + /** + * @deprecated Use {@link #scheme} instead. + */ + @Deprecated public final UUID uuid; + + /** + * Optional default DRM license server {@link Uri}. If {@code null} then the DRM license server + * must be specified by the media. + */ + @Nullable public final Uri licenseUri; + + /** + * @deprecated Use {@link #licenseRequestHeaders} instead. + */ + @Deprecated public final ImmutableMap requestHeaders; + + /** The headers to attach to requests sent to the DRM license server. */ + public final ImmutableMap licenseRequestHeaders; + + /** Whether the DRM configuration is multi session enabled. */ + public final boolean multiSession; + + /** + * Whether clear samples within protected content should be played when keys for the encrypted + * part of the content have yet to be loaded. + */ + public final boolean playClearContentWithoutKey; + + /** + * Whether to force use of {@link #licenseUri} even if the media specifies its own DRM license + * server URI. + */ + public final boolean forceDefaultLicenseUri; + + /** + * @deprecated Use {@link #forcedSessionTrackTypes}. + */ + @Deprecated public final ImmutableList<@C.TrackType Integer> sessionForClearTypes; + /** + * The types of tracks for which to always use a DRM session even if the content is unencrypted. + */ + public final ImmutableList<@C.TrackType Integer> forcedSessionTrackTypes; + + @Nullable private final byte[] keySetId; + + @SuppressWarnings("deprecation") // Setting deprecated field + private DrmConfiguration(Builder builder) { + checkState(!(builder.forceDefaultLicenseUri && builder.licenseUri == null)); + this.scheme = checkNotNull(builder.scheme); + this.uuid = scheme; + this.licenseUri = builder.licenseUri; + this.requestHeaders = builder.licenseRequestHeaders; + this.licenseRequestHeaders = builder.licenseRequestHeaders; + this.multiSession = builder.multiSession; + this.forceDefaultLicenseUri = builder.forceDefaultLicenseUri; + this.playClearContentWithoutKey = builder.playClearContentWithoutKey; + this.sessionForClearTypes = builder.forcedSessionTrackTypes; + this.forcedSessionTrackTypes = builder.forcedSessionTrackTypes; + this.keySetId = + builder.keySetId != null + ? Arrays.copyOf(builder.keySetId, builder.keySetId.length) + : null; + } + + /** Returns the key set ID of the offline license. */ + @Nullable + public byte[] getKeySetId() { + return keySetId != null ? Arrays.copyOf(keySetId, keySetId.length) : null; + } + + /** Returns a {@link Builder} initialized with the values of this instance. */ + public Builder buildUpon() { + return new Builder(this); + } + + @Override + public boolean equals(@Nullable Object obj) { + if (this == obj) { + return true; + } + if (!(obj instanceof DrmConfiguration)) { + return false; + } + + DrmConfiguration other = (DrmConfiguration) obj; + return scheme.equals(other.scheme) + && Util.areEqual(licenseUri, other.licenseUri) + && Util.areEqual(licenseRequestHeaders, other.licenseRequestHeaders) + && multiSession == other.multiSession + && forceDefaultLicenseUri == other.forceDefaultLicenseUri + && playClearContentWithoutKey == other.playClearContentWithoutKey + && forcedSessionTrackTypes.equals(other.forcedSessionTrackTypes) + && Arrays.equals(keySetId, other.keySetId); + } + + @Override + public int hashCode() { + int result = scheme.hashCode(); + result = 31 * result + (licenseUri != null ? licenseUri.hashCode() : 0); + result = 31 * result + licenseRequestHeaders.hashCode(); + result = 31 * result + (multiSession ? 1 : 0); + result = 31 * result + (forceDefaultLicenseUri ? 1 : 0); + result = 31 * result + (playClearContentWithoutKey ? 1 : 0); + result = 31 * result + forcedSessionTrackTypes.hashCode(); + result = 31 * result + Arrays.hashCode(keySetId); + return result; + } + } + + /** Configuration for playing back linear ads with a media item. */ + public static final class AdsConfiguration { + + /** Builder for {@link AdsConfiguration} instances. */ + public static final class Builder { + + private Uri adTagUri; + @Nullable private Object adsId; + + /** + * Constructs a new instance. + * + * @param adTagUri The ad tag URI to load. + */ + public Builder(Uri adTagUri) { + this.adTagUri = adTagUri; + } + + /** Sets the ad tag URI to load. */ + @CanIgnoreReturnValue + public Builder setAdTagUri(Uri adTagUri) { + this.adTagUri = adTagUri; + return this; + } + + /** + * Sets the ads identifier. + * + *

      See details on {@link AdsConfiguration#adsId} for how the ads identifier is used and how + * it's calculated if not explicitly set. + */ + @CanIgnoreReturnValue + public Builder setAdsId(@Nullable Object adsId) { + this.adsId = adsId; + return this; + } + + public AdsConfiguration build() { + return new AdsConfiguration(this); + } + } + + /** The ad tag URI to load. */ + public final Uri adTagUri; + + /** + * An opaque identifier for ad playback state associated with this item, or {@code null} if the + * combination of the {@link MediaItem.Builder#setMediaId(String) media ID} and {@link #adTagUri + * ad tag URI} should be used as the ads identifier. + * + *

      Media items in the playlist that have the same ads identifier and ads loader share the + * same ad playback state. To resume ad playback when recreating the playlist on returning from + * the background, pass the same ads identifiers to the player. + */ + @Nullable public final Object adsId; + + private AdsConfiguration(Builder builder) { + this.adTagUri = builder.adTagUri; + this.adsId = builder.adsId; + } + + /** Returns a {@link Builder} initialized with the values of this instance. */ + public Builder buildUpon() { + return new Builder(adTagUri).setAdsId(adsId); + } + + @Override + public boolean equals(@Nullable Object obj) { + if (this == obj) { + return true; + } + if (!(obj instanceof AdsConfiguration)) { + return false; + } + + AdsConfiguration other = (AdsConfiguration) obj; + return adTagUri.equals(other.adTagUri) && Util.areEqual(adsId, other.adsId); + } + + @Override + public int hashCode() { + int result = adTagUri.hashCode(); + result = 31 * result + (adsId != null ? adsId.hashCode() : 0); + return result; + } + } + + /** Properties for local playback. */ + // TODO: Mark this final when PlaybackProperties is deleted. + public static class LocalConfiguration { + + /** The {@link Uri}. */ + public final Uri uri; + + /** + * The optional MIME type of the item, or {@code null} if unspecified. + * + *

      The MIME type can be used to disambiguate media items that have a URI which does not allow + * to infer the actual media type. + */ + @Nullable public final String mimeType; + + /** Optional {@link DrmConfiguration} for the media. */ + @Nullable public final DrmConfiguration drmConfiguration; + + /** Optional ads configuration. */ + @Nullable public final AdsConfiguration adsConfiguration; + + /** Optional stream keys by which the manifest is filtered. */ + public final List streamKeys; + + /** Optional custom cache key (only used for progressive streams). */ + @Nullable public final String customCacheKey; + + /** Optional subtitles to be sideloaded. */ + public final ImmutableList subtitleConfigurations; + /** + * @deprecated Use {@link #subtitleConfigurations} instead. + */ + @Deprecated public final List subtitles; + + /** + * Optional tag for custom attributes. The tag for the media source which will be published in + * the {@code com.google.android.exoplayer2.Timeline} of the source as {@code + * com.google.android.exoplayer2.Timeline.Window#tag}. + */ + @Nullable public final Object tag; + + @SuppressWarnings("deprecation") // Setting deprecated subtitles field. + private LocalConfiguration( + Uri uri, + @Nullable String mimeType, + @Nullable DrmConfiguration drmConfiguration, + @Nullable AdsConfiguration adsConfiguration, + List streamKeys, + @Nullable String customCacheKey, + ImmutableList subtitleConfigurations, + @Nullable Object tag) { + this.uri = uri; + this.mimeType = mimeType; + this.drmConfiguration = drmConfiguration; + this.adsConfiguration = adsConfiguration; + this.streamKeys = streamKeys; + this.customCacheKey = customCacheKey; + this.subtitleConfigurations = subtitleConfigurations; + ImmutableList.Builder subtitles = ImmutableList.builder(); + for (int i = 0; i < subtitleConfigurations.size(); i++) { + subtitles.add(subtitleConfigurations.get(i).buildUpon().buildSubtitle()); + } + this.subtitles = subtitles.build(); + this.tag = tag; + } + + @Override + public boolean equals(@Nullable Object obj) { + if (this == obj) { + return true; + } + if (!(obj instanceof LocalConfiguration)) { + return false; + } + LocalConfiguration other = (LocalConfiguration) obj; + + return uri.equals(other.uri) + && Util.areEqual(mimeType, other.mimeType) + && Util.areEqual(drmConfiguration, other.drmConfiguration) + && Util.areEqual(adsConfiguration, other.adsConfiguration) + && streamKeys.equals(other.streamKeys) + && Util.areEqual(customCacheKey, other.customCacheKey) + && subtitleConfigurations.equals(other.subtitleConfigurations) + && Util.areEqual(tag, other.tag); + } + + @Override + public int hashCode() { + int result = uri.hashCode(); + result = 31 * result + (mimeType == null ? 0 : mimeType.hashCode()); + result = 31 * result + (drmConfiguration == null ? 0 : drmConfiguration.hashCode()); + result = 31 * result + (adsConfiguration == null ? 0 : adsConfiguration.hashCode()); + result = 31 * result + streamKeys.hashCode(); + result = 31 * result + (customCacheKey == null ? 0 : customCacheKey.hashCode()); + result = 31 * result + subtitleConfigurations.hashCode(); + result = 31 * result + (tag == null ? 0 : tag.hashCode()); + return result; + } + } + + /** + * @deprecated Use {@link LocalConfiguration}. + */ + @Deprecated + public static final class PlaybackProperties extends LocalConfiguration { + + private PlaybackProperties( + Uri uri, + @Nullable String mimeType, + @Nullable DrmConfiguration drmConfiguration, + @Nullable AdsConfiguration adsConfiguration, + List streamKeys, + @Nullable String customCacheKey, + ImmutableList subtitleConfigurations, + @Nullable Object tag) { + super( + uri, + mimeType, + drmConfiguration, + adsConfiguration, + streamKeys, + customCacheKey, + subtitleConfigurations, + tag); + } + } + + /** Live playback configuration. */ + public static final class LiveConfiguration implements Bundleable { + + /** Builder for {@link LiveConfiguration} instances. */ + public static final class Builder { + private long targetOffsetMs; + private long minOffsetMs; + private long maxOffsetMs; + private float minPlaybackSpeed; + private float maxPlaybackSpeed; + + /** Creates a new instance with default values. */ + public Builder() { + this.targetOffsetMs = C.TIME_UNSET; + this.minOffsetMs = C.TIME_UNSET; + this.maxOffsetMs = C.TIME_UNSET; + this.minPlaybackSpeed = C.RATE_UNSET; + this.maxPlaybackSpeed = C.RATE_UNSET; + } + + private Builder(LiveConfiguration liveConfiguration) { + this.targetOffsetMs = liveConfiguration.targetOffsetMs; + this.minOffsetMs = liveConfiguration.minOffsetMs; + this.maxOffsetMs = liveConfiguration.maxOffsetMs; + this.minPlaybackSpeed = liveConfiguration.minPlaybackSpeed; + this.maxPlaybackSpeed = liveConfiguration.maxPlaybackSpeed; + } + + /** + * Sets the target live offset, in milliseconds. + * + *

      See {@code Player#getCurrentLiveOffset()}. + * + *

      Defaults to {@link C#TIME_UNSET}, indicating the media-defined default will be used. + */ + @CanIgnoreReturnValue + public Builder setTargetOffsetMs(long targetOffsetMs) { + this.targetOffsetMs = targetOffsetMs; + return this; + } + + /** + * Sets the minimum allowed live offset, in milliseconds. + * + *

      See {@code Player#getCurrentLiveOffset()}. + * + *

      Defaults to {@link C#TIME_UNSET}, indicating the media-defined default will be used. + */ + @CanIgnoreReturnValue + public Builder setMinOffsetMs(long minOffsetMs) { + this.minOffsetMs = minOffsetMs; + return this; + } + + /** + * Sets the maximum allowed live offset, in milliseconds. + * + *

      See {@code Player#getCurrentLiveOffset()}. + * + *

      Defaults to {@link C#TIME_UNSET}, indicating the media-defined default will be used. + */ + @CanIgnoreReturnValue + public Builder setMaxOffsetMs(long maxOffsetMs) { + this.maxOffsetMs = maxOffsetMs; + return this; + } + + /** + * Sets the minimum playback speed. + * + *

      Defaults to {@link C#RATE_UNSET}, indicating the media-defined default will be used. + */ + @CanIgnoreReturnValue + public Builder setMinPlaybackSpeed(float minPlaybackSpeed) { + this.minPlaybackSpeed = minPlaybackSpeed; + return this; + } + + /** + * Sets the maximum playback speed. + * + *

      Defaults to {@link C#RATE_UNSET}, indicating the media-defined default will be used. + */ + @CanIgnoreReturnValue + public Builder setMaxPlaybackSpeed(float maxPlaybackSpeed) { + this.maxPlaybackSpeed = maxPlaybackSpeed; + return this; + } + + /** Creates a {@link LiveConfiguration} with the values from this builder. */ + public LiveConfiguration build() { + return new LiveConfiguration(this); + } + } + + /** + * A live playback configuration with unset values, meaning media-defined default values will be + * used. + */ + public static final LiveConfiguration UNSET = new LiveConfiguration.Builder().build(); + + /** + * Target offset from the live edge, in milliseconds, or {@link C#TIME_UNSET} to use the + * media-defined default. + */ + public final long targetOffsetMs; + + /** + * The minimum allowed offset from the live edge, in milliseconds, or {@link C#TIME_UNSET} to + * use the media-defined default. + */ + public final long minOffsetMs; + + /** + * The maximum allowed offset from the live edge, in milliseconds, or {@link C#TIME_UNSET} to + * use the media-defined default. + */ + public final long maxOffsetMs; + + /** + * Minimum factor by which playback can be sped up, or {@link C#RATE_UNSET} to use the + * media-defined default. + */ + public final float minPlaybackSpeed; + + /** + * Maximum factor by which playback can be sped up, or {@link C#RATE_UNSET} to use the + * media-defined default. + */ + public final float maxPlaybackSpeed; + + @SuppressWarnings("deprecation") // Using the deprecated constructor while it exists. + private LiveConfiguration(Builder builder) { + this( + builder.targetOffsetMs, + builder.minOffsetMs, + builder.maxOffsetMs, + builder.minPlaybackSpeed, + builder.maxPlaybackSpeed); + } + + /** + * @deprecated Use {@link Builder} instead. + */ + @Deprecated + public LiveConfiguration( + long targetOffsetMs, + long minOffsetMs, + long maxOffsetMs, + float minPlaybackSpeed, + float maxPlaybackSpeed) { + this.targetOffsetMs = targetOffsetMs; + this.minOffsetMs = minOffsetMs; + this.maxOffsetMs = maxOffsetMs; + this.minPlaybackSpeed = minPlaybackSpeed; + this.maxPlaybackSpeed = maxPlaybackSpeed; + } + + /** Returns a {@link Builder} initialized with the values of this instance. */ + public Builder buildUpon() { + return new Builder(this); + } + + @Override + public boolean equals(@Nullable Object obj) { + if (this == obj) { + return true; + } + if (!(obj instanceof LiveConfiguration)) { + return false; + } + LiveConfiguration other = (LiveConfiguration) obj; + + return targetOffsetMs == other.targetOffsetMs + && minOffsetMs == other.minOffsetMs + && maxOffsetMs == other.maxOffsetMs + && minPlaybackSpeed == other.minPlaybackSpeed + && maxPlaybackSpeed == other.maxPlaybackSpeed; + } + + @Override + public int hashCode() { + int result = (int) (targetOffsetMs ^ (targetOffsetMs >>> 32)); + result = 31 * result + (int) (minOffsetMs ^ (minOffsetMs >>> 32)); + result = 31 * result + (int) (maxOffsetMs ^ (maxOffsetMs >>> 32)); + result = 31 * result + (minPlaybackSpeed != 0 ? Float.floatToIntBits(minPlaybackSpeed) : 0); + result = 31 * result + (maxPlaybackSpeed != 0 ? Float.floatToIntBits(maxPlaybackSpeed) : 0); + return result; + } + + // Bundleable implementation. + + private static final String FIELD_TARGET_OFFSET_MS = Util.intToStringMaxRadix(0); + private static final String FIELD_MIN_OFFSET_MS = Util.intToStringMaxRadix(1); + private static final String FIELD_MAX_OFFSET_MS = Util.intToStringMaxRadix(2); + private static final String FIELD_MIN_PLAYBACK_SPEED = Util.intToStringMaxRadix(3); + private static final String FIELD_MAX_PLAYBACK_SPEED = Util.intToStringMaxRadix(4); + + @Override + public Bundle toBundle() { + Bundle bundle = new Bundle(); + if (targetOffsetMs != UNSET.targetOffsetMs) { + bundle.putLong(FIELD_TARGET_OFFSET_MS, targetOffsetMs); + } + if (minOffsetMs != UNSET.minOffsetMs) { + bundle.putLong(FIELD_MIN_OFFSET_MS, minOffsetMs); + } + if (maxOffsetMs != UNSET.maxOffsetMs) { + bundle.putLong(FIELD_MAX_OFFSET_MS, maxOffsetMs); + } + if (minPlaybackSpeed != UNSET.minPlaybackSpeed) { + bundle.putFloat(FIELD_MIN_PLAYBACK_SPEED, minPlaybackSpeed); + } + if (maxPlaybackSpeed != UNSET.maxPlaybackSpeed) { + bundle.putFloat(FIELD_MAX_PLAYBACK_SPEED, maxPlaybackSpeed); + } + return bundle; + } + + /** Object that can restore {@link LiveConfiguration} from a {@link Bundle}. */ + public static final Creator CREATOR = + bundle -> + new LiveConfiguration( + bundle.getLong(FIELD_TARGET_OFFSET_MS, /* defaultValue= */ UNSET.targetOffsetMs), + bundle.getLong(FIELD_MIN_OFFSET_MS, /* defaultValue= */ UNSET.minOffsetMs), + bundle.getLong(FIELD_MAX_OFFSET_MS, /* defaultValue= */ UNSET.maxOffsetMs), + bundle.getFloat( + FIELD_MIN_PLAYBACK_SPEED, /* defaultValue= */ UNSET.minPlaybackSpeed), + bundle.getFloat( + FIELD_MAX_PLAYBACK_SPEED, /* defaultValue= */ UNSET.maxPlaybackSpeed)); + } + + /** Properties for a text track. */ + // TODO: Mark this final when Subtitle is deleted. + public static class SubtitleConfiguration { + + /** Builder for {@link SubtitleConfiguration} instances. */ + public static final class Builder { + private Uri uri; + @Nullable private String mimeType; + @Nullable private String language; + private @C.SelectionFlags int selectionFlags; + private @C.RoleFlags int roleFlags; + @Nullable private String label; + @Nullable private String id; + + /** + * Constructs an instance. + * + * @param uri The {@link Uri} to the subtitle file. + */ + public Builder(Uri uri) { + this.uri = uri; + } + + private Builder(SubtitleConfiguration subtitleConfiguration) { + this.uri = subtitleConfiguration.uri; + this.mimeType = subtitleConfiguration.mimeType; + this.language = subtitleConfiguration.language; + this.selectionFlags = subtitleConfiguration.selectionFlags; + this.roleFlags = subtitleConfiguration.roleFlags; + this.label = subtitleConfiguration.label; + this.id = subtitleConfiguration.id; + } + + /** Sets the {@link Uri} to the subtitle file. */ + @CanIgnoreReturnValue + public Builder setUri(Uri uri) { + this.uri = uri; + return this; + } + + /** Sets the MIME type. */ + @CanIgnoreReturnValue + public Builder setMimeType(@Nullable String mimeType) { + this.mimeType = mimeType; + return this; + } + + /** Sets the optional language of the subtitle file. */ + @CanIgnoreReturnValue + public Builder setLanguage(@Nullable String language) { + this.language = language; + return this; + } + + /** Sets the flags used for track selection. */ + @CanIgnoreReturnValue + public Builder setSelectionFlags(@C.SelectionFlags int selectionFlags) { + this.selectionFlags = selectionFlags; + return this; + } + + /** Sets the role flags. These are used for track selection. */ + @CanIgnoreReturnValue + public Builder setRoleFlags(@C.RoleFlags int roleFlags) { + this.roleFlags = roleFlags; + return this; + } + + /** Sets the optional label for this subtitle track. */ + @CanIgnoreReturnValue + public Builder setLabel(@Nullable String label) { + this.label = label; + return this; + } + + /** Sets the optional ID for this subtitle track. */ + @CanIgnoreReturnValue + public Builder setId(@Nullable String id) { + this.id = id; + return this; + } + + /** Creates a {@link SubtitleConfiguration} from the values of this builder. */ + public SubtitleConfiguration build() { + return new SubtitleConfiguration(this); + } + + private Subtitle buildSubtitle() { + return new Subtitle(this); + } + } + + /** The {@link Uri} to the subtitle file. */ + public final Uri uri; + /** The optional MIME type of the subtitle file, or {@code null} if unspecified. */ + @Nullable public final String mimeType; + /** The language. */ + @Nullable public final String language; + /** The selection flags. */ + public final @C.SelectionFlags int selectionFlags; + /** The role flags. */ + public final @C.RoleFlags int roleFlags; + /** The label. */ + @Nullable public final String label; + /** + * The ID of the subtitles. This will be propagated to the {@link Format#id} of the subtitle + * track created from this configuration. + */ + @Nullable public final String id; + + private SubtitleConfiguration( + Uri uri, + String mimeType, + @Nullable String language, + int selectionFlags, + int roleFlags, + @Nullable String label, + @Nullable String id) { + this.uri = uri; + this.mimeType = mimeType; + this.language = language; + this.selectionFlags = selectionFlags; + this.roleFlags = roleFlags; + this.label = label; + this.id = id; + } + + private SubtitleConfiguration(Builder builder) { + this.uri = builder.uri; + this.mimeType = builder.mimeType; + this.language = builder.language; + this.selectionFlags = builder.selectionFlags; + this.roleFlags = builder.roleFlags; + this.label = builder.label; + this.id = builder.id; + } + + /** Returns a {@link Builder} initialized with the values of this instance. */ + public Builder buildUpon() { + return new Builder(this); + } + + @Override + public boolean equals(@Nullable Object obj) { + if (this == obj) { + return true; + } + if (!(obj instanceof SubtitleConfiguration)) { + return false; + } + + SubtitleConfiguration other = (SubtitleConfiguration) obj; + + return uri.equals(other.uri) + && Util.areEqual(mimeType, other.mimeType) + && Util.areEqual(language, other.language) + && selectionFlags == other.selectionFlags + && roleFlags == other.roleFlags + && Util.areEqual(label, other.label) + && Util.areEqual(id, other.id); + } + + @Override + public int hashCode() { + int result = uri.hashCode(); + result = 31 * result + (mimeType == null ? 0 : mimeType.hashCode()); + result = 31 * result + (language == null ? 0 : language.hashCode()); + result = 31 * result + selectionFlags; + result = 31 * result + roleFlags; + result = 31 * result + (label == null ? 0 : label.hashCode()); + result = 31 * result + (id == null ? 0 : id.hashCode()); + return result; + } + } + + /** + * @deprecated Use {@link MediaItem.SubtitleConfiguration} instead + */ + @Deprecated + public static final class Subtitle extends SubtitleConfiguration { + + /** + * @deprecated Use {@link Builder} instead. + */ + @Deprecated + public Subtitle(Uri uri, String mimeType, @Nullable String language) { + this(uri, mimeType, language, /* selectionFlags= */ 0); + } + + /** + * @deprecated Use {@link Builder} instead. + */ + @Deprecated + public Subtitle( + Uri uri, String mimeType, @Nullable String language, @C.SelectionFlags int selectionFlags) { + this(uri, mimeType, language, selectionFlags, /* roleFlags= */ 0, /* label= */ null); + } + + /** + * @deprecated Use {@link Builder} instead. + */ + @Deprecated + public Subtitle( + Uri uri, + String mimeType, + @Nullable String language, + @C.SelectionFlags int selectionFlags, + @C.RoleFlags int roleFlags, + @Nullable String label) { + super(uri, mimeType, language, selectionFlags, roleFlags, label, /* id= */ null); + } + + private Subtitle(Builder builder) { + super(builder); + } + } + + /** Optionally clips the media item to a custom start and end position. */ + // TODO: Mark this final when ClippingProperties is deleted. + public static class ClippingConfiguration implements Bundleable { + + /** A clipping configuration with default values. */ + public static final ClippingConfiguration UNSET = new ClippingConfiguration.Builder().build(); + + /** Builder for {@link ClippingConfiguration} instances. */ + public static final class Builder { + private long startPositionMs; + private long endPositionMs; + private boolean relativeToLiveWindow; + private boolean relativeToDefaultPosition; + private boolean startsAtKeyFrame; + + /** Creates a new instance with default values. */ + public Builder() { + endPositionMs = C.TIME_END_OF_SOURCE; + } + + private Builder(ClippingConfiguration clippingConfiguration) { + startPositionMs = clippingConfiguration.startPositionMs; + endPositionMs = clippingConfiguration.endPositionMs; + relativeToLiveWindow = clippingConfiguration.relativeToLiveWindow; + relativeToDefaultPosition = clippingConfiguration.relativeToDefaultPosition; + startsAtKeyFrame = clippingConfiguration.startsAtKeyFrame; + } + + /** + * Sets the optional start position in milliseconds which must be a value larger than or equal + * to zero (Default: 0). + */ + @CanIgnoreReturnValue + public Builder setStartPositionMs(@IntRange(from = 0) long startPositionMs) { + Assertions.checkArgument(startPositionMs >= 0); + this.startPositionMs = startPositionMs; + return this; + } + + /** + * Sets the optional end position in milliseconds which must be a value larger than or equal + * to zero, or {@link C#TIME_END_OF_SOURCE} to end when playback reaches the end of media + * (Default: {@link C#TIME_END_OF_SOURCE}). + */ + @CanIgnoreReturnValue + public Builder setEndPositionMs(long endPositionMs) { + Assertions.checkArgument(endPositionMs == C.TIME_END_OF_SOURCE || endPositionMs >= 0); + this.endPositionMs = endPositionMs; + return this; + } + + /** + * Sets whether the start/end positions should move with the live window for live streams. If + * {@code false}, live streams end when playback reaches the end position in live window seen + * when the media is first loaded (Default: {@code false}). + */ + @CanIgnoreReturnValue + public Builder setRelativeToLiveWindow(boolean relativeToLiveWindow) { + this.relativeToLiveWindow = relativeToLiveWindow; + return this; + } + + /** + * Sets whether the start position and the end position are relative to the default position + * in the window (Default: {@code false}). + */ + @CanIgnoreReturnValue + public Builder setRelativeToDefaultPosition(boolean relativeToDefaultPosition) { + this.relativeToDefaultPosition = relativeToDefaultPosition; + return this; + } + + /** + * Sets whether the start point is guaranteed to be a key frame. If {@code false}, the + * playback transition into the clip may not be seamless (Default: {@code false}). + */ + @CanIgnoreReturnValue + public Builder setStartsAtKeyFrame(boolean startsAtKeyFrame) { + this.startsAtKeyFrame = startsAtKeyFrame; + return this; + } + + /** + * Returns a {@link ClippingConfiguration} instance initialized with the values of this + * builder. + */ + public ClippingConfiguration build() { + return buildClippingProperties(); + } + + /** + * @deprecated Use {@link #build()} instead. + */ + @Deprecated + public ClippingProperties buildClippingProperties() { + return new ClippingProperties(this); + } + } + + /** The start position in milliseconds. This is a value larger than or equal to zero. */ + @IntRange(from = 0) + public final long startPositionMs; + + /** + * The end position in milliseconds. This is a value larger than or equal to zero or {@link + * C#TIME_END_OF_SOURCE} to play to the end of the stream. + */ + public final long endPositionMs; + + /** + * Whether the clipping of active media periods moves with a live window. If {@code false}, + * playback ends when it reaches {@link #endPositionMs}. + */ + public final boolean relativeToLiveWindow; + + /** + * Whether {@link #startPositionMs} and {@link #endPositionMs} are relative to the default + * position. + */ + public final boolean relativeToDefaultPosition; + + /** Sets whether the start point is guaranteed to be a key frame. */ + public final boolean startsAtKeyFrame; + + private ClippingConfiguration(Builder builder) { + this.startPositionMs = builder.startPositionMs; + this.endPositionMs = builder.endPositionMs; + this.relativeToLiveWindow = builder.relativeToLiveWindow; + this.relativeToDefaultPosition = builder.relativeToDefaultPosition; + this.startsAtKeyFrame = builder.startsAtKeyFrame; + } + + /** Returns a {@link Builder} initialized with the values of this instance. */ + public Builder buildUpon() { + return new Builder(this); + } + + @Override + public boolean equals(@Nullable Object obj) { + if (this == obj) { + return true; + } + if (!(obj instanceof ClippingConfiguration)) { + return false; + } + + ClippingConfiguration other = (ClippingConfiguration) obj; + + return startPositionMs == other.startPositionMs + && endPositionMs == other.endPositionMs + && relativeToLiveWindow == other.relativeToLiveWindow + && relativeToDefaultPosition == other.relativeToDefaultPosition + && startsAtKeyFrame == other.startsAtKeyFrame; + } + + @Override + public int hashCode() { + int result = (int) (startPositionMs ^ (startPositionMs >>> 32)); + result = 31 * result + (int) (endPositionMs ^ (endPositionMs >>> 32)); + result = 31 * result + (relativeToLiveWindow ? 1 : 0); + result = 31 * result + (relativeToDefaultPosition ? 1 : 0); + result = 31 * result + (startsAtKeyFrame ? 1 : 0); + return result; + } + + // Bundleable implementation. + + private static final String FIELD_START_POSITION_MS = Util.intToStringMaxRadix(0); + private static final String FIELD_END_POSITION_MS = Util.intToStringMaxRadix(1); + private static final String FIELD_RELATIVE_TO_LIVE_WINDOW = Util.intToStringMaxRadix(2); + private static final String FIELD_RELATIVE_TO_DEFAULT_POSITION = Util.intToStringMaxRadix(3); + private static final String FIELD_STARTS_AT_KEY_FRAME = Util.intToStringMaxRadix(4); + + @Override + public Bundle toBundle() { + Bundle bundle = new Bundle(); + if (startPositionMs != UNSET.startPositionMs) { + bundle.putLong(FIELD_START_POSITION_MS, startPositionMs); + } + if (endPositionMs != UNSET.endPositionMs) { + bundle.putLong(FIELD_END_POSITION_MS, endPositionMs); + } + if (relativeToLiveWindow != UNSET.relativeToLiveWindow) { + bundle.putBoolean(FIELD_RELATIVE_TO_LIVE_WINDOW, relativeToLiveWindow); + } + if (relativeToDefaultPosition != UNSET.relativeToDefaultPosition) { + bundle.putBoolean(FIELD_RELATIVE_TO_DEFAULT_POSITION, relativeToDefaultPosition); + } + if (startsAtKeyFrame != UNSET.startsAtKeyFrame) { + bundle.putBoolean(FIELD_STARTS_AT_KEY_FRAME, startsAtKeyFrame); + } + return bundle; + } + + /** Object that can restore {@link ClippingConfiguration} from a {@link Bundle}. */ + public static final Creator CREATOR = + bundle -> + new ClippingConfiguration.Builder() + .setStartPositionMs( + bundle.getLong( + FIELD_START_POSITION_MS, /* defaultValue= */ UNSET.startPositionMs)) + .setEndPositionMs( + bundle.getLong(FIELD_END_POSITION_MS, /* defaultValue= */ UNSET.endPositionMs)) + .setRelativeToLiveWindow( + bundle.getBoolean( + FIELD_RELATIVE_TO_LIVE_WINDOW, + /* defaultValue= */ UNSET.relativeToLiveWindow)) + .setRelativeToDefaultPosition( + bundle.getBoolean( + FIELD_RELATIVE_TO_DEFAULT_POSITION, + /* defaultValue= */ UNSET.relativeToDefaultPosition)) + .setStartsAtKeyFrame( + bundle.getBoolean( + FIELD_STARTS_AT_KEY_FRAME, /* defaultValue= */ UNSET.startsAtKeyFrame)) + .buildClippingProperties(); + } + + /** + * @deprecated Use {@link ClippingConfiguration} instead. + */ + @Deprecated + public static final class ClippingProperties extends ClippingConfiguration { + public static final ClippingProperties UNSET = + new ClippingConfiguration.Builder().buildClippingProperties(); + + private ClippingProperties(Builder builder) { + super(builder); + } + } + + /** + * Metadata that helps the player to understand a playback request represented by a {@link + * MediaItem}. + * + *

      This metadata is most useful for cases where playback requests are forwarded to other player + * instances (e.g. from a {@code androidx.media3.session.MediaController}) and the player creating + * the request doesn't know the required {@link LocalConfiguration} for playback. + */ + public static final class RequestMetadata implements Bundleable { + + /** Empty request metadata. */ + public static final RequestMetadata EMPTY = new Builder().build(); + + /** Builder for {@link RequestMetadata} instances. */ + public static final class Builder { + + @Nullable private Uri mediaUri; + @Nullable private String searchQuery; + @Nullable private Bundle extras; + + /** Constructs an instance. */ + public Builder() {} + + private Builder(RequestMetadata requestMetadata) { + this.mediaUri = requestMetadata.mediaUri; + this.searchQuery = requestMetadata.searchQuery; + this.extras = requestMetadata.extras; + } + + /** Sets the URI of the requested media, or null if not known or applicable. */ + @CanIgnoreReturnValue + public Builder setMediaUri(@Nullable Uri mediaUri) { + this.mediaUri = mediaUri; + return this; + } + + /** Sets the search query for the requested media, or null if not applicable. */ + @CanIgnoreReturnValue + public Builder setSearchQuery(@Nullable String searchQuery) { + this.searchQuery = searchQuery; + return this; + } + + /** Sets optional extras {@link Bundle}. */ + @CanIgnoreReturnValue + public Builder setExtras(@Nullable Bundle extras) { + this.extras = extras; + return this; + } + + /** Builds the request metadata. */ + public RequestMetadata build() { + return new RequestMetadata(this); + } + } + + /** The URI of the requested media, or null if not known or applicable. */ + @Nullable public final Uri mediaUri; + + /** The search query for the requested media, or null if not applicable. */ + @Nullable public final String searchQuery; + + /** + * Optional extras {@link Bundle}. + * + *

      Given the complexities of checking the equality of two {@link Bundle}s, this is not + * considered in the {@link #equals(Object)} or {@link #hashCode()}. + */ + @Nullable public final Bundle extras; + + private RequestMetadata(Builder builder) { + this.mediaUri = builder.mediaUri; + this.searchQuery = builder.searchQuery; + this.extras = builder.extras; + } + + /** Returns a {@link Builder} initialized with the values of this instance. */ + public Builder buildUpon() { + return new Builder(this); + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (!(o instanceof RequestMetadata)) { + return false; + } + RequestMetadata that = (RequestMetadata) o; + return Util.areEqual(mediaUri, that.mediaUri) && Util.areEqual(searchQuery, that.searchQuery); + } + + @Override + public int hashCode() { + int result = mediaUri == null ? 0 : mediaUri.hashCode(); + result = 31 * result + (searchQuery == null ? 0 : searchQuery.hashCode()); + return result; + } + + // Bundleable implementation. + + private static final String FIELD_MEDIA_URI = Util.intToStringMaxRadix(0); + private static final String FIELD_SEARCH_QUERY = Util.intToStringMaxRadix(1); + private static final String FIELD_EXTRAS = Util.intToStringMaxRadix(2); + + @Override + public Bundle toBundle() { + Bundle bundle = new Bundle(); + if (mediaUri != null) { + bundle.putParcelable(FIELD_MEDIA_URI, mediaUri); + } + if (searchQuery != null) { + bundle.putString(FIELD_SEARCH_QUERY, searchQuery); + } + if (extras != null) { + bundle.putBundle(FIELD_EXTRAS, extras); + } + return bundle; + } + + /** Object that can restore {@link RequestMetadata} from a {@link Bundle}. */ + public static final Creator CREATOR = + bundle -> + new RequestMetadata.Builder() + .setMediaUri(bundle.getParcelable(FIELD_MEDIA_URI)) + .setSearchQuery(bundle.getString(FIELD_SEARCH_QUERY)) + .setExtras(bundle.getBundle(FIELD_EXTRAS)) + .build(); + } + + /** + * The default media ID that is used if the media ID is not explicitly set by {@link + * Builder#setMediaId(String)}. + */ + public static final String DEFAULT_MEDIA_ID = ""; + + /** Empty {@link MediaItem}. */ + public static final MediaItem EMPTY = new MediaItem.Builder().build(); + + /** Identifies the media item. */ + public final String mediaId; + + /** + * Optional configuration for local playback. May be {@code null} if shared over process + * boundaries. + */ + @Nullable public final LocalConfiguration localConfiguration; + /** + * @deprecated Use {@link #localConfiguration} instead. + */ + @Deprecated @Nullable public final PlaybackProperties playbackProperties; + + /** The live playback configuration. */ + public final LiveConfiguration liveConfiguration; + + /** The media metadata. */ + public final MediaMetadata mediaMetadata; + + /** The clipping properties. */ + public final ClippingConfiguration clippingConfiguration; + /** + * @deprecated Use {@link #clippingConfiguration} instead. + */ + @Deprecated public final ClippingProperties clippingProperties; + + /** The media {@link RequestMetadata}. */ + public final RequestMetadata requestMetadata; + + // Using PlaybackProperties and ClippingProperties until they're deleted. + @SuppressWarnings("deprecation") + private MediaItem( + String mediaId, + ClippingProperties clippingConfiguration, + @Nullable PlaybackProperties localConfiguration, + LiveConfiguration liveConfiguration, + MediaMetadata mediaMetadata, + RequestMetadata requestMetadata) { + this.mediaId = mediaId; + this.localConfiguration = localConfiguration; + this.playbackProperties = localConfiguration; + this.liveConfiguration = liveConfiguration; + this.mediaMetadata = mediaMetadata; + this.clippingConfiguration = clippingConfiguration; + this.clippingProperties = clippingConfiguration; + this.requestMetadata = requestMetadata; + } + + /** Returns a {@link Builder} initialized with the values of this instance. */ + public Builder buildUpon() { + return new Builder(this); + } + + @Override + public boolean equals(@Nullable Object obj) { + if (this == obj) { + return true; + } + if (!(obj instanceof MediaItem)) { + return false; + } + + MediaItem other = (MediaItem) obj; + + return Util.areEqual(mediaId, other.mediaId) + && clippingConfiguration.equals(other.clippingConfiguration) + && Util.areEqual(localConfiguration, other.localConfiguration) + && Util.areEqual(liveConfiguration, other.liveConfiguration) + && Util.areEqual(mediaMetadata, other.mediaMetadata) + && Util.areEqual(requestMetadata, other.requestMetadata); + } + + @Override + public int hashCode() { + int result = mediaId.hashCode(); + result = 31 * result + (localConfiguration != null ? localConfiguration.hashCode() : 0); + result = 31 * result + liveConfiguration.hashCode(); + result = 31 * result + clippingConfiguration.hashCode(); + result = 31 * result + mediaMetadata.hashCode(); + result = 31 * result + requestMetadata.hashCode(); + return result; + } + + // Bundleable implementation. + private static final String FIELD_MEDIA_ID = Util.intToStringMaxRadix(0); + private static final String FIELD_LIVE_CONFIGURATION = Util.intToStringMaxRadix(1); + private static final String FIELD_MEDIA_METADATA = Util.intToStringMaxRadix(2); + private static final String FIELD_CLIPPING_PROPERTIES = Util.intToStringMaxRadix(3); + private static final String FIELD_REQUEST_METADATA = Util.intToStringMaxRadix(4); + + /** + * {@inheritDoc} + * + *

      It omits the {@link #localConfiguration} field. The {@link #localConfiguration} of an + * instance restored by {@link #CREATOR} will always be {@code null}. + */ + @Override + public Bundle toBundle() { + Bundle bundle = new Bundle(); + if (!mediaId.equals(DEFAULT_MEDIA_ID)) { + bundle.putString(FIELD_MEDIA_ID, mediaId); + } + if (!liveConfiguration.equals(LiveConfiguration.UNSET)) { + bundle.putBundle(FIELD_LIVE_CONFIGURATION, liveConfiguration.toBundle()); + } + if (!mediaMetadata.equals(MediaMetadata.EMPTY)) { + bundle.putBundle(FIELD_MEDIA_METADATA, mediaMetadata.toBundle()); + } + if (!clippingConfiguration.equals(ClippingConfiguration.UNSET)) { + bundle.putBundle(FIELD_CLIPPING_PROPERTIES, clippingConfiguration.toBundle()); + } + if (!requestMetadata.equals(RequestMetadata.EMPTY)) { + bundle.putBundle(FIELD_REQUEST_METADATA, requestMetadata.toBundle()); + } + return bundle; + } + + /** + * Object that can restore {@link MediaItem} from a {@link Bundle}. + * + *

      The {@link #localConfiguration} of a restored instance will always be {@code null}. + */ + public static final Creator CREATOR = MediaItem::fromBundle; + + @SuppressWarnings("deprecation") // Unbundling to ClippingProperties while it still exists. + private static MediaItem fromBundle(Bundle bundle) { + String mediaId = checkNotNull(bundle.getString(FIELD_MEDIA_ID, DEFAULT_MEDIA_ID)); + @Nullable Bundle liveConfigurationBundle = bundle.getBundle(FIELD_LIVE_CONFIGURATION); + LiveConfiguration liveConfiguration; + if (liveConfigurationBundle == null) { + liveConfiguration = LiveConfiguration.UNSET; + } else { + liveConfiguration = LiveConfiguration.CREATOR.fromBundle(liveConfigurationBundle); + } + @Nullable Bundle mediaMetadataBundle = bundle.getBundle(FIELD_MEDIA_METADATA); + MediaMetadata mediaMetadata; + if (mediaMetadataBundle == null) { + mediaMetadata = MediaMetadata.EMPTY; + } else { + mediaMetadata = MediaMetadata.CREATOR.fromBundle(mediaMetadataBundle); + } + @Nullable Bundle clippingConfigurationBundle = bundle.getBundle(FIELD_CLIPPING_PROPERTIES); + ClippingProperties clippingConfiguration; + if (clippingConfigurationBundle == null) { + clippingConfiguration = ClippingProperties.UNSET; + } else { + clippingConfiguration = ClippingConfiguration.CREATOR.fromBundle(clippingConfigurationBundle); + } + @Nullable Bundle requestMetadataBundle = bundle.getBundle(FIELD_REQUEST_METADATA); + RequestMetadata requestMetadata; + if (requestMetadataBundle == null) { + requestMetadata = RequestMetadata.EMPTY; + } else { + requestMetadata = RequestMetadata.CREATOR.fromBundle(requestMetadataBundle); + } + return new MediaItem( + mediaId, + clippingConfiguration, + /* localConfiguration= */ null, + liveConfiguration, + mediaMetadata, + requestMetadata); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/MediaMetadata.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/MediaMetadata.java new file mode 100644 index 0000000000..4b6900e8fd --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/MediaMetadata.java @@ -0,0 +1,1392 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2; + +import static java.lang.annotation.ElementType.FIELD; +import static java.lang.annotation.ElementType.LOCAL_VARIABLE; +import static java.lang.annotation.ElementType.METHOD; +import static java.lang.annotation.ElementType.PARAMETER; +import static java.lang.annotation.ElementType.TYPE_USE; + +import android.net.Uri; +import android.os.Bundle; +import androidx.annotation.IntDef; +import androidx.annotation.IntRange; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.metadata.Metadata; +import com.google.android.exoplayer2.util.Util; +import com.google.common.base.Objects; +import com.google.errorprone.annotations.CanIgnoreReturnValue; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import java.util.Arrays; +import java.util.List; + +/** + * Metadata of a {@link MediaItem}, playlist, or a combination of multiple sources of {@link + * Metadata}. + */ +public final class MediaMetadata implements Bundleable { + + /** A builder for {@link MediaMetadata} instances. */ + public static final class Builder { + + @Nullable private CharSequence title; + @Nullable private CharSequence artist; + @Nullable private CharSequence albumTitle; + @Nullable private CharSequence albumArtist; + @Nullable private CharSequence displayTitle; + @Nullable private CharSequence subtitle; + @Nullable private CharSequence description; + @Nullable private Rating userRating; + @Nullable private Rating overallRating; + @Nullable private byte[] artworkData; + @Nullable private @PictureType Integer artworkDataType; + @Nullable private Uri artworkUri; + @Nullable private Integer trackNumber; + @Nullable private Integer totalTrackCount; + @Nullable private @FolderType Integer folderType; + @Nullable private Boolean isBrowsable; + @Nullable private Boolean isPlayable; + @Nullable private Integer recordingYear; + @Nullable private Integer recordingMonth; + @Nullable private Integer recordingDay; + @Nullable private Integer releaseYear; + @Nullable private Integer releaseMonth; + @Nullable private Integer releaseDay; + @Nullable private CharSequence writer; + @Nullable private CharSequence composer; + @Nullable private CharSequence conductor; + @Nullable private Integer discNumber; + @Nullable private Integer totalDiscCount; + @Nullable private CharSequence genre; + @Nullable private CharSequence compilation; + @Nullable private CharSequence station; + @Nullable private @MediaType Integer mediaType; + @Nullable private Bundle extras; + + public Builder() {} + + private Builder(MediaMetadata mediaMetadata) { + this.title = mediaMetadata.title; + this.artist = mediaMetadata.artist; + this.albumTitle = mediaMetadata.albumTitle; + this.albumArtist = mediaMetadata.albumArtist; + this.displayTitle = mediaMetadata.displayTitle; + this.subtitle = mediaMetadata.subtitle; + this.description = mediaMetadata.description; + this.userRating = mediaMetadata.userRating; + this.overallRating = mediaMetadata.overallRating; + this.artworkData = mediaMetadata.artworkData; + this.artworkDataType = mediaMetadata.artworkDataType; + this.artworkUri = mediaMetadata.artworkUri; + this.trackNumber = mediaMetadata.trackNumber; + this.totalTrackCount = mediaMetadata.totalTrackCount; + this.folderType = mediaMetadata.folderType; + this.isBrowsable = mediaMetadata.isBrowsable; + this.isPlayable = mediaMetadata.isPlayable; + this.recordingYear = mediaMetadata.recordingYear; + this.recordingMonth = mediaMetadata.recordingMonth; + this.recordingDay = mediaMetadata.recordingDay; + this.releaseYear = mediaMetadata.releaseYear; + this.releaseMonth = mediaMetadata.releaseMonth; + this.releaseDay = mediaMetadata.releaseDay; + this.writer = mediaMetadata.writer; + this.composer = mediaMetadata.composer; + this.conductor = mediaMetadata.conductor; + this.discNumber = mediaMetadata.discNumber; + this.totalDiscCount = mediaMetadata.totalDiscCount; + this.genre = mediaMetadata.genre; + this.compilation = mediaMetadata.compilation; + this.station = mediaMetadata.station; + this.mediaType = mediaMetadata.mediaType; + this.extras = mediaMetadata.extras; + } + + /** Sets the title. */ + @CanIgnoreReturnValue + public Builder setTitle(@Nullable CharSequence title) { + this.title = title; + return this; + } + + /** Sets the artist. */ + @CanIgnoreReturnValue + public Builder setArtist(@Nullable CharSequence artist) { + this.artist = artist; + return this; + } + + /** Sets the album title. */ + @CanIgnoreReturnValue + public Builder setAlbumTitle(@Nullable CharSequence albumTitle) { + this.albumTitle = albumTitle; + return this; + } + + /** Sets the album artist. */ + @CanIgnoreReturnValue + public Builder setAlbumArtist(@Nullable CharSequence albumArtist) { + this.albumArtist = albumArtist; + return this; + } + + /** Sets the display title. */ + @CanIgnoreReturnValue + public Builder setDisplayTitle(@Nullable CharSequence displayTitle) { + this.displayTitle = displayTitle; + return this; + } + + /** + * Sets the subtitle. + * + *

      This is the secondary title of the media, unrelated to closed captions. + */ + @CanIgnoreReturnValue + public Builder setSubtitle(@Nullable CharSequence subtitle) { + this.subtitle = subtitle; + return this; + } + + /** Sets the description. */ + @CanIgnoreReturnValue + public Builder setDescription(@Nullable CharSequence description) { + this.description = description; + return this; + } + + /** Sets the user {@link Rating}. */ + @CanIgnoreReturnValue + public Builder setUserRating(@Nullable Rating userRating) { + this.userRating = userRating; + return this; + } + + /** Sets the overall {@link Rating}. */ + @CanIgnoreReturnValue + public Builder setOverallRating(@Nullable Rating overallRating) { + this.overallRating = overallRating; + return this; + } + + /** + * @deprecated Use {@link #setArtworkData(byte[] data, Integer pictureType)} or {@link + * #maybeSetArtworkData(byte[] data, int pictureType)}, providing a {@link PictureType}. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setArtworkData(@Nullable byte[] artworkData) { + return setArtworkData(artworkData, /* artworkDataType= */ null); + } + + /** + * Sets the artwork data as a compressed byte array with an associated {@link PictureType + * artworkDataType}. + */ + @CanIgnoreReturnValue + public Builder setArtworkData( + @Nullable byte[] artworkData, @Nullable @PictureType Integer artworkDataType) { + this.artworkData = artworkData == null ? null : artworkData.clone(); + this.artworkDataType = artworkDataType; + return this; + } + + /** + * Sets the artwork data as a compressed byte array in the event that the associated {@link + * PictureType} is {@link #PICTURE_TYPE_FRONT_COVER}, the existing {@link PictureType} is not + * {@link #PICTURE_TYPE_FRONT_COVER}, or the current artworkData is not set. + * + *

      Use {@link #setArtworkData(byte[], Integer)} to set the artwork data without checking the + * {@link PictureType}. + */ + @CanIgnoreReturnValue + public Builder maybeSetArtworkData(byte[] artworkData, @PictureType int artworkDataType) { + if (this.artworkData == null + || Util.areEqual(artworkDataType, PICTURE_TYPE_FRONT_COVER) + || !Util.areEqual(this.artworkDataType, PICTURE_TYPE_FRONT_COVER)) { + this.artworkData = artworkData.clone(); + this.artworkDataType = artworkDataType; + } + return this; + } + + /** Sets the artwork {@link Uri}. */ + @CanIgnoreReturnValue + public Builder setArtworkUri(@Nullable Uri artworkUri) { + this.artworkUri = artworkUri; + return this; + } + + /** Sets the track number. */ + @CanIgnoreReturnValue + public Builder setTrackNumber(@Nullable Integer trackNumber) { + this.trackNumber = trackNumber; + return this; + } + + /** Sets the total number of tracks. */ + @CanIgnoreReturnValue + public Builder setTotalTrackCount(@Nullable Integer totalTrackCount) { + this.totalTrackCount = totalTrackCount; + return this; + } + + /** + * Sets the {@link FolderType}. + * + *

      This method will be deprecated. Use {@link #setIsBrowsable} to indicate if an item is a + * browsable folder and use {@link #setMediaType} to indicate the type of the folder. + */ + @CanIgnoreReturnValue + public Builder setFolderType(@Nullable @FolderType Integer folderType) { + this.folderType = folderType; + return this; + } + + /** Sets whether the media is a browsable folder. */ + @CanIgnoreReturnValue + public Builder setIsBrowsable(@Nullable Boolean isBrowsable) { + this.isBrowsable = isBrowsable; + return this; + } + + /** Sets whether the media is playable. */ + @CanIgnoreReturnValue + public Builder setIsPlayable(@Nullable Boolean isPlayable) { + this.isPlayable = isPlayable; + return this; + } + + /** + * @deprecated Use {@link #setRecordingYear(Integer)} instead. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setYear(@Nullable Integer year) { + return setRecordingYear(year); + } + + /** Sets the year of the recording date. */ + @CanIgnoreReturnValue + public Builder setRecordingYear(@Nullable Integer recordingYear) { + this.recordingYear = recordingYear; + return this; + } + + /** + * Sets the month of the recording date. + * + *

      Value should be between 1 and 12. + */ + @CanIgnoreReturnValue + public Builder setRecordingMonth( + @Nullable @IntRange(from = 1, to = 12) Integer recordingMonth) { + this.recordingMonth = recordingMonth; + return this; + } + + /** + * Sets the day of the recording date. + * + *

      Value should be between 1 and 31. + */ + @CanIgnoreReturnValue + public Builder setRecordingDay(@Nullable @IntRange(from = 1, to = 31) Integer recordingDay) { + this.recordingDay = recordingDay; + return this; + } + + /** Sets the year of the release date. */ + @CanIgnoreReturnValue + public Builder setReleaseYear(@Nullable Integer releaseYear) { + this.releaseYear = releaseYear; + return this; + } + + /** + * Sets the month of the release date. + * + *

      Value should be between 1 and 12. + */ + @CanIgnoreReturnValue + public Builder setReleaseMonth(@Nullable @IntRange(from = 1, to = 12) Integer releaseMonth) { + this.releaseMonth = releaseMonth; + return this; + } + + /** + * Sets the day of the release date. + * + *

      Value should be between 1 and 31. + */ + @CanIgnoreReturnValue + public Builder setReleaseDay(@Nullable @IntRange(from = 1, to = 31) Integer releaseDay) { + this.releaseDay = releaseDay; + return this; + } + + /** Sets the writer. */ + @CanIgnoreReturnValue + public Builder setWriter(@Nullable CharSequence writer) { + this.writer = writer; + return this; + } + + /** Sets the composer. */ + @CanIgnoreReturnValue + public Builder setComposer(@Nullable CharSequence composer) { + this.composer = composer; + return this; + } + + /** Sets the conductor. */ + @CanIgnoreReturnValue + public Builder setConductor(@Nullable CharSequence conductor) { + this.conductor = conductor; + return this; + } + + /** Sets the disc number. */ + @CanIgnoreReturnValue + public Builder setDiscNumber(@Nullable Integer discNumber) { + this.discNumber = discNumber; + return this; + } + + /** Sets the total number of discs. */ + @CanIgnoreReturnValue + public Builder setTotalDiscCount(@Nullable Integer totalDiscCount) { + this.totalDiscCount = totalDiscCount; + return this; + } + + /** Sets the genre. */ + @CanIgnoreReturnValue + public Builder setGenre(@Nullable CharSequence genre) { + this.genre = genre; + return this; + } + + /** Sets the compilation. */ + @CanIgnoreReturnValue + public Builder setCompilation(@Nullable CharSequence compilation) { + this.compilation = compilation; + return this; + } + + /** Sets the name of the station streaming the media. */ + @CanIgnoreReturnValue + public Builder setStation(@Nullable CharSequence station) { + this.station = station; + return this; + } + + /** Sets the {@link MediaType}. */ + @CanIgnoreReturnValue + public Builder setMediaType(@Nullable @MediaType Integer mediaType) { + this.mediaType = mediaType; + return this; + } + + /** Sets the extras {@link Bundle}. */ + @CanIgnoreReturnValue + public Builder setExtras(@Nullable Bundle extras) { + this.extras = extras; + return this; + } + + /** + * Sets all fields supported by the {@link Metadata.Entry entries} within the {@link Metadata}. + * + *

      Fields are only set if the {@link Metadata.Entry} has an implementation for {@link + * Metadata.Entry#populateMediaMetadata(Builder)}. + * + *

      In the event that multiple {@link Metadata.Entry} objects within the {@link Metadata} + * relate to the same {@link MediaMetadata} field, then the last one will be used. + */ + @CanIgnoreReturnValue + public Builder populateFromMetadata(Metadata metadata) { + for (int i = 0; i < metadata.length(); i++) { + Metadata.Entry entry = metadata.get(i); + entry.populateMediaMetadata(this); + } + return this; + } + + /** + * Sets all fields supported by the {@link Metadata.Entry entries} within the list of {@link + * Metadata}. + * + *

      Fields are only set if the {@link Metadata.Entry} has an implementation for {@link + * Metadata.Entry#populateMediaMetadata(Builder)}. + * + *

      In the event that multiple {@link Metadata.Entry} objects within any of the {@link + * Metadata} relate to the same {@link MediaMetadata} field, then the last one will be used. + */ + @CanIgnoreReturnValue + public Builder populateFromMetadata(List metadataList) { + for (int i = 0; i < metadataList.size(); i++) { + Metadata metadata = metadataList.get(i); + for (int j = 0; j < metadata.length(); j++) { + Metadata.Entry entry = metadata.get(j); + entry.populateMediaMetadata(this); + } + } + return this; + } + + /** Populates all the fields from {@code mediaMetadata}, provided they are non-null. */ + @CanIgnoreReturnValue + public Builder populate(@Nullable MediaMetadata mediaMetadata) { + if (mediaMetadata == null) { + return this; + } + if (mediaMetadata.title != null) { + setTitle(mediaMetadata.title); + } + if (mediaMetadata.artist != null) { + setArtist(mediaMetadata.artist); + } + if (mediaMetadata.albumTitle != null) { + setAlbumTitle(mediaMetadata.albumTitle); + } + if (mediaMetadata.albumArtist != null) { + setAlbumArtist(mediaMetadata.albumArtist); + } + if (mediaMetadata.displayTitle != null) { + setDisplayTitle(mediaMetadata.displayTitle); + } + if (mediaMetadata.subtitle != null) { + setSubtitle(mediaMetadata.subtitle); + } + if (mediaMetadata.description != null) { + setDescription(mediaMetadata.description); + } + if (mediaMetadata.userRating != null) { + setUserRating(mediaMetadata.userRating); + } + if (mediaMetadata.overallRating != null) { + setOverallRating(mediaMetadata.overallRating); + } + if (mediaMetadata.artworkData != null) { + setArtworkData(mediaMetadata.artworkData, mediaMetadata.artworkDataType); + } + if (mediaMetadata.artworkUri != null) { + setArtworkUri(mediaMetadata.artworkUri); + } + if (mediaMetadata.trackNumber != null) { + setTrackNumber(mediaMetadata.trackNumber); + } + if (mediaMetadata.totalTrackCount != null) { + setTotalTrackCount(mediaMetadata.totalTrackCount); + } + if (mediaMetadata.folderType != null) { + setFolderType(mediaMetadata.folderType); + } + if (mediaMetadata.isBrowsable != null) { + setIsBrowsable(mediaMetadata.isBrowsable); + } + if (mediaMetadata.isPlayable != null) { + setIsPlayable(mediaMetadata.isPlayable); + } + if (mediaMetadata.year != null) { + setRecordingYear(mediaMetadata.year); + } + if (mediaMetadata.recordingYear != null) { + setRecordingYear(mediaMetadata.recordingYear); + } + if (mediaMetadata.recordingMonth != null) { + setRecordingMonth(mediaMetadata.recordingMonth); + } + if (mediaMetadata.recordingDay != null) { + setRecordingDay(mediaMetadata.recordingDay); + } + if (mediaMetadata.releaseYear != null) { + setReleaseYear(mediaMetadata.releaseYear); + } + if (mediaMetadata.releaseMonth != null) { + setReleaseMonth(mediaMetadata.releaseMonth); + } + if (mediaMetadata.releaseDay != null) { + setReleaseDay(mediaMetadata.releaseDay); + } + if (mediaMetadata.writer != null) { + setWriter(mediaMetadata.writer); + } + if (mediaMetadata.composer != null) { + setComposer(mediaMetadata.composer); + } + if (mediaMetadata.conductor != null) { + setConductor(mediaMetadata.conductor); + } + if (mediaMetadata.discNumber != null) { + setDiscNumber(mediaMetadata.discNumber); + } + if (mediaMetadata.totalDiscCount != null) { + setTotalDiscCount(mediaMetadata.totalDiscCount); + } + if (mediaMetadata.genre != null) { + setGenre(mediaMetadata.genre); + } + if (mediaMetadata.compilation != null) { + setCompilation(mediaMetadata.compilation); + } + if (mediaMetadata.station != null) { + setStation(mediaMetadata.station); + } + if (mediaMetadata.mediaType != null) { + setMediaType(mediaMetadata.mediaType); + } + if (mediaMetadata.extras != null) { + setExtras(mediaMetadata.extras); + } + + return this; + } + + /** Returns a new {@link MediaMetadata} instance with the current builder values. */ + public MediaMetadata build() { + return new MediaMetadata(/* builder= */ this); + } + } + + /** + * The type of content described by the media item. + * + *

      One of {@link #MEDIA_TYPE_MIXED}, {@link #MEDIA_TYPE_MUSIC}, {@link + * #MEDIA_TYPE_AUDIO_BOOK_CHAPTER}, {@link #MEDIA_TYPE_PODCAST_EPISODE}, {@link + * #MEDIA_TYPE_RADIO_STATION}, {@link #MEDIA_TYPE_NEWS}, {@link #MEDIA_TYPE_VIDEO}, {@link + * #MEDIA_TYPE_TRAILER}, {@link #MEDIA_TYPE_MOVIE}, {@link #MEDIA_TYPE_TV_SHOW}, {@link + * #MEDIA_TYPE_ALBUM}, {@link #MEDIA_TYPE_ARTIST}, {@link #MEDIA_TYPE_GENRE}, {@link + * #MEDIA_TYPE_PLAYLIST}, {@link #MEDIA_TYPE_YEAR}, {@link #MEDIA_TYPE_AUDIO_BOOK}, {@link + * #MEDIA_TYPE_PODCAST}, {@link #MEDIA_TYPE_TV_CHANNEL}, {@link #MEDIA_TYPE_TV_SERIES}, {@link + * #MEDIA_TYPE_TV_SEASON}, {@link #MEDIA_TYPE_FOLDER_MIXED}, {@link #MEDIA_TYPE_FOLDER_ALBUMS}, + * {@link #MEDIA_TYPE_FOLDER_ARTISTS}, {@link #MEDIA_TYPE_FOLDER_GENRES}, {@link + * #MEDIA_TYPE_FOLDER_PLAYLISTS}, {@link #MEDIA_TYPE_FOLDER_YEARS}, {@link + * #MEDIA_TYPE_FOLDER_AUDIO_BOOKS}, {@link #MEDIA_TYPE_FOLDER_PODCASTS}, {@link + * #MEDIA_TYPE_FOLDER_TV_CHANNELS}, {@link #MEDIA_TYPE_FOLDER_TV_SERIES}, {@link + * #MEDIA_TYPE_FOLDER_TV_SHOWS}, {@link #MEDIA_TYPE_FOLDER_RADIO_STATIONS}, {@link + * #MEDIA_TYPE_FOLDER_NEWS}, {@link #MEDIA_TYPE_FOLDER_VIDEOS}, {@link + * #MEDIA_TYPE_FOLDER_TRAILERS} or {@link #MEDIA_TYPE_FOLDER_MOVIES}. + */ + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef({ + MEDIA_TYPE_MIXED, + MEDIA_TYPE_MUSIC, + MEDIA_TYPE_AUDIO_BOOK_CHAPTER, + MEDIA_TYPE_PODCAST_EPISODE, + MEDIA_TYPE_RADIO_STATION, + MEDIA_TYPE_NEWS, + MEDIA_TYPE_VIDEO, + MEDIA_TYPE_TRAILER, + MEDIA_TYPE_MOVIE, + MEDIA_TYPE_TV_SHOW, + MEDIA_TYPE_ALBUM, + MEDIA_TYPE_ARTIST, + MEDIA_TYPE_GENRE, + MEDIA_TYPE_PLAYLIST, + MEDIA_TYPE_YEAR, + MEDIA_TYPE_AUDIO_BOOK, + MEDIA_TYPE_PODCAST, + MEDIA_TYPE_TV_CHANNEL, + MEDIA_TYPE_TV_SERIES, + MEDIA_TYPE_TV_SEASON, + MEDIA_TYPE_FOLDER_MIXED, + MEDIA_TYPE_FOLDER_ALBUMS, + MEDIA_TYPE_FOLDER_ARTISTS, + MEDIA_TYPE_FOLDER_GENRES, + MEDIA_TYPE_FOLDER_PLAYLISTS, + MEDIA_TYPE_FOLDER_YEARS, + MEDIA_TYPE_FOLDER_AUDIO_BOOKS, + MEDIA_TYPE_FOLDER_PODCASTS, + MEDIA_TYPE_FOLDER_TV_CHANNELS, + MEDIA_TYPE_FOLDER_TV_SERIES, + MEDIA_TYPE_FOLDER_TV_SHOWS, + MEDIA_TYPE_FOLDER_RADIO_STATIONS, + MEDIA_TYPE_FOLDER_NEWS, + MEDIA_TYPE_FOLDER_VIDEOS, + MEDIA_TYPE_FOLDER_TRAILERS, + MEDIA_TYPE_FOLDER_MOVIES, + }) + public @interface MediaType {} + + /** Media of undetermined type or a mix of multiple {@linkplain MediaType media types}. */ + public static final int MEDIA_TYPE_MIXED = 0; + /** {@link MediaType} for music. */ + public static final int MEDIA_TYPE_MUSIC = 1; + /** {@link MediaType} for an audio book chapter. */ + public static final int MEDIA_TYPE_AUDIO_BOOK_CHAPTER = 2; + /** {@link MediaType} for a podcast episode. */ + public static final int MEDIA_TYPE_PODCAST_EPISODE = 3; + /** {@link MediaType} for a radio station. */ + public static final int MEDIA_TYPE_RADIO_STATION = 4; + /** {@link MediaType} for news. */ + public static final int MEDIA_TYPE_NEWS = 5; + /** {@link MediaType} for a video. */ + public static final int MEDIA_TYPE_VIDEO = 6; + /** {@link MediaType} for a movie trailer. */ + public static final int MEDIA_TYPE_TRAILER = 7; + /** {@link MediaType} for a movie. */ + public static final int MEDIA_TYPE_MOVIE = 8; + /** {@link MediaType} for a TV show. */ + public static final int MEDIA_TYPE_TV_SHOW = 9; + /** + * {@link MediaType} for a group of items (e.g., {@link #MEDIA_TYPE_MUSIC music}) belonging to an + * album. + */ + public static final int MEDIA_TYPE_ALBUM = 10; + /** + * {@link MediaType} for a group of items (e.g., {@link #MEDIA_TYPE_MUSIC music}) from the same + * artist. + */ + public static final int MEDIA_TYPE_ARTIST = 11; + /** + * {@link MediaType} for a group of items (e.g., {@link #MEDIA_TYPE_MUSIC music}) of the same + * genre. + */ + public static final int MEDIA_TYPE_GENRE = 12; + /** + * {@link MediaType} for a group of items (e.g., {@link #MEDIA_TYPE_MUSIC music}) forming a + * playlist. + */ + public static final int MEDIA_TYPE_PLAYLIST = 13; + /** + * {@link MediaType} for a group of items (e.g., {@link #MEDIA_TYPE_MUSIC music}) from the same + * year. + */ + public static final int MEDIA_TYPE_YEAR = 14; + /** + * {@link MediaType} for a group of items forming an audio book. Items in this group are typically + * of type {@link #MEDIA_TYPE_AUDIO_BOOK_CHAPTER}. + */ + public static final int MEDIA_TYPE_AUDIO_BOOK = 15; + /** + * {@link MediaType} for a group of items belonging to a podcast. Items in this group are + * typically of type {@link #MEDIA_TYPE_PODCAST_EPISODE}. + */ + public static final int MEDIA_TYPE_PODCAST = 16; + /** + * {@link MediaType} for a group of items that are part of a TV channel. Items in this group are + * typically of type {@link #MEDIA_TYPE_TV_SHOW}, {@link #MEDIA_TYPE_TV_SERIES} or {@link + * #MEDIA_TYPE_MOVIE}. + */ + public static final int MEDIA_TYPE_TV_CHANNEL = 17; + /** + * {@link MediaType} for a group of items that are part of a TV series. Items in this group are + * typically of type {@link #MEDIA_TYPE_TV_SHOW} or {@link #MEDIA_TYPE_TV_SEASON}. + */ + public static final int MEDIA_TYPE_TV_SERIES = 18; + /** + * {@link MediaType} for a group of items that are part of a TV series. Items in this group are + * typically of type {@link #MEDIA_TYPE_TV_SHOW}. + */ + public static final int MEDIA_TYPE_TV_SEASON = 19; + /** {@link MediaType} for a folder with mixed or undetermined content. */ + public static final int MEDIA_TYPE_FOLDER_MIXED = 20; + /** {@link MediaType} for a folder containing {@linkplain #MEDIA_TYPE_ALBUM albums}. */ + public static final int MEDIA_TYPE_FOLDER_ALBUMS = 21; + /** {@link MediaType} for a folder containing {@linkplain #FIELD_ARTIST artists}. */ + public static final int MEDIA_TYPE_FOLDER_ARTISTS = 22; + /** {@link MediaType} for a folder containing {@linkplain #MEDIA_TYPE_GENRE genres}. */ + public static final int MEDIA_TYPE_FOLDER_GENRES = 23; + /** {@link MediaType} for a folder containing {@linkplain #MEDIA_TYPE_PLAYLIST playlists}. */ + public static final int MEDIA_TYPE_FOLDER_PLAYLISTS = 24; + /** {@link MediaType} for a folder containing {@linkplain #MEDIA_TYPE_YEAR years}. */ + public static final int MEDIA_TYPE_FOLDER_YEARS = 25; + /** {@link MediaType} for a folder containing {@linkplain #MEDIA_TYPE_AUDIO_BOOK audio books}. */ + public static final int MEDIA_TYPE_FOLDER_AUDIO_BOOKS = 26; + /** {@link MediaType} for a folder containing {@linkplain #MEDIA_TYPE_PODCAST podcasts}. */ + public static final int MEDIA_TYPE_FOLDER_PODCASTS = 27; + /** {@link MediaType} for a folder containing {@linkplain #MEDIA_TYPE_TV_CHANNEL TV channels}. */ + public static final int MEDIA_TYPE_FOLDER_TV_CHANNELS = 28; + /** {@link MediaType} for a folder containing {@linkplain #MEDIA_TYPE_TV_SERIES TV series}. */ + public static final int MEDIA_TYPE_FOLDER_TV_SERIES = 29; + /** {@link MediaType} for a folder containing {@linkplain #MEDIA_TYPE_TV_SHOW TV shows}. */ + public static final int MEDIA_TYPE_FOLDER_TV_SHOWS = 30; + /** + * {@link MediaType} for a folder containing {@linkplain #MEDIA_TYPE_RADIO_STATION radio + * stations}. + */ + public static final int MEDIA_TYPE_FOLDER_RADIO_STATIONS = 31; + /** {@link MediaType} for a folder containing {@linkplain #MEDIA_TYPE_NEWS news}. */ + public static final int MEDIA_TYPE_FOLDER_NEWS = 32; + /** {@link MediaType} for a folder containing {@linkplain #MEDIA_TYPE_VIDEO videos}. */ + public static final int MEDIA_TYPE_FOLDER_VIDEOS = 33; + /** {@link MediaType} for a folder containing {@linkplain #MEDIA_TYPE_TRAILER movie trailers}. */ + public static final int MEDIA_TYPE_FOLDER_TRAILERS = 34; + /** {@link MediaType} for a folder containing {@linkplain #MEDIA_TYPE_MOVIE movies}. */ + public static final int MEDIA_TYPE_FOLDER_MOVIES = 35; + + /** + * The folder type of the media item. + * + *

      This can be used as the type of a browsable bluetooth folder (see section 6.10.2.2 of the Bluetooth + * AVRCP 1.6.2). + * + *

      One of {@link #FOLDER_TYPE_NONE}, {@link #FOLDER_TYPE_MIXED}, {@link #FOLDER_TYPE_TITLES}, + * {@link #FOLDER_TYPE_ALBUMS}, {@link #FOLDER_TYPE_ARTISTS}, {@link #FOLDER_TYPE_GENRES}, {@link + * #FOLDER_TYPE_PLAYLISTS} or {@link #FOLDER_TYPE_YEARS}. + */ + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) + @IntDef({ + FOLDER_TYPE_NONE, + FOLDER_TYPE_MIXED, + FOLDER_TYPE_TITLES, + FOLDER_TYPE_ALBUMS, + FOLDER_TYPE_ARTISTS, + FOLDER_TYPE_GENRES, + FOLDER_TYPE_PLAYLISTS, + FOLDER_TYPE_YEARS + }) + public @interface FolderType {} + + /** Type for an item that is not a folder. */ + public static final int FOLDER_TYPE_NONE = -1; + /** Type for a folder containing media of mixed types. */ + public static final int FOLDER_TYPE_MIXED = 0; + /** Type for a folder containing only playable media. */ + public static final int FOLDER_TYPE_TITLES = 1; + /** Type for a folder containing media categorized by album. */ + public static final int FOLDER_TYPE_ALBUMS = 2; + /** Type for a folder containing media categorized by artist. */ + public static final int FOLDER_TYPE_ARTISTS = 3; + /** Type for a folder containing media categorized by genre. */ + public static final int FOLDER_TYPE_GENRES = 4; + /** Type for a folder containing a playlist. */ + public static final int FOLDER_TYPE_PLAYLISTS = 5; + /** Type for a folder containing media categorized by year. */ + public static final int FOLDER_TYPE_YEARS = 6; + + /** + * The picture type of the artwork. + * + *

      Values sourced from the ID3 v2.4 specification (See section 4.14 of + * https://id3.org/id3v2.4.0-frames). + * + *

      One of {@link #PICTURE_TYPE_OTHER}, {@link #PICTURE_TYPE_FILE_ICON}, {@link + * #PICTURE_TYPE_FILE_ICON_OTHER}, {@link #PICTURE_TYPE_FRONT_COVER}, {@link + * #PICTURE_TYPE_BACK_COVER}, {@link #PICTURE_TYPE_LEAFLET_PAGE}, {@link #PICTURE_TYPE_MEDIA}, + * {@link #PICTURE_TYPE_LEAD_ARTIST_PERFORMER}, {@link #PICTURE_TYPE_ARTIST_PERFORMER}, {@link + * #PICTURE_TYPE_CONDUCTOR}, {@link #PICTURE_TYPE_BAND_ORCHESTRA}, {@link #PICTURE_TYPE_COMPOSER}, + * {@link #PICTURE_TYPE_LYRICIST}, {@link #PICTURE_TYPE_RECORDING_LOCATION}, {@link + * #PICTURE_TYPE_DURING_RECORDING}, {@link #PICTURE_TYPE_DURING_PERFORMANCE}, {@link + * #PICTURE_TYPE_MOVIE_VIDEO_SCREEN_CAPTURE}, {@link #PICTURE_TYPE_A_BRIGHT_COLORED_FISH}, {@link + * #PICTURE_TYPE_ILLUSTRATION}, {@link #PICTURE_TYPE_BAND_ARTIST_LOGO} or {@link + * #PICTURE_TYPE_PUBLISHER_STUDIO_LOGO}. + */ + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) + @IntDef({ + PICTURE_TYPE_OTHER, + PICTURE_TYPE_FILE_ICON, + PICTURE_TYPE_FILE_ICON_OTHER, + PICTURE_TYPE_FRONT_COVER, + PICTURE_TYPE_BACK_COVER, + PICTURE_TYPE_LEAFLET_PAGE, + PICTURE_TYPE_MEDIA, + PICTURE_TYPE_LEAD_ARTIST_PERFORMER, + PICTURE_TYPE_ARTIST_PERFORMER, + PICTURE_TYPE_CONDUCTOR, + PICTURE_TYPE_BAND_ORCHESTRA, + PICTURE_TYPE_COMPOSER, + PICTURE_TYPE_LYRICIST, + PICTURE_TYPE_RECORDING_LOCATION, + PICTURE_TYPE_DURING_RECORDING, + PICTURE_TYPE_DURING_PERFORMANCE, + PICTURE_TYPE_MOVIE_VIDEO_SCREEN_CAPTURE, + PICTURE_TYPE_A_BRIGHT_COLORED_FISH, + PICTURE_TYPE_ILLUSTRATION, + PICTURE_TYPE_BAND_ARTIST_LOGO, + PICTURE_TYPE_PUBLISHER_STUDIO_LOGO + }) + public @interface PictureType {} + + public static final int PICTURE_TYPE_OTHER = 0x00; + public static final int PICTURE_TYPE_FILE_ICON = 0x01; + public static final int PICTURE_TYPE_FILE_ICON_OTHER = 0x02; + public static final int PICTURE_TYPE_FRONT_COVER = 0x03; + public static final int PICTURE_TYPE_BACK_COVER = 0x04; + public static final int PICTURE_TYPE_LEAFLET_PAGE = 0x05; + public static final int PICTURE_TYPE_MEDIA = 0x06; + public static final int PICTURE_TYPE_LEAD_ARTIST_PERFORMER = 0x07; + public static final int PICTURE_TYPE_ARTIST_PERFORMER = 0x08; + public static final int PICTURE_TYPE_CONDUCTOR = 0x09; + public static final int PICTURE_TYPE_BAND_ORCHESTRA = 0x0A; + public static final int PICTURE_TYPE_COMPOSER = 0x0B; + public static final int PICTURE_TYPE_LYRICIST = 0x0C; + public static final int PICTURE_TYPE_RECORDING_LOCATION = 0x0D; + public static final int PICTURE_TYPE_DURING_RECORDING = 0x0E; + public static final int PICTURE_TYPE_DURING_PERFORMANCE = 0x0F; + public static final int PICTURE_TYPE_MOVIE_VIDEO_SCREEN_CAPTURE = 0x10; + public static final int PICTURE_TYPE_A_BRIGHT_COLORED_FISH = 0x11; + public static final int PICTURE_TYPE_ILLUSTRATION = 0x12; + public static final int PICTURE_TYPE_BAND_ARTIST_LOGO = 0x13; + public static final int PICTURE_TYPE_PUBLISHER_STUDIO_LOGO = 0x14; + + /** Empty {@link MediaMetadata}. */ + public static final MediaMetadata EMPTY = new MediaMetadata.Builder().build(); + + /** Optional title. */ + @Nullable public final CharSequence title; + /** Optional artist. */ + @Nullable public final CharSequence artist; + /** Optional album title. */ + @Nullable public final CharSequence albumTitle; + /** Optional album artist. */ + @Nullable public final CharSequence albumArtist; + /** Optional display title. */ + @Nullable public final CharSequence displayTitle; + /** + * Optional subtitle. + * + *

      This is the secondary title of the media, unrelated to closed captions. + */ + @Nullable public final CharSequence subtitle; + /** Optional description. */ + @Nullable public final CharSequence description; + /** Optional user {@link Rating}. */ + @Nullable public final Rating userRating; + /** Optional overall {@link Rating}. */ + @Nullable public final Rating overallRating; + /** Optional artwork data as a compressed byte array. */ + @Nullable public final byte[] artworkData; + /** Optional {@link PictureType} of the artwork data. */ + @Nullable public final @PictureType Integer artworkDataType; + /** Optional artwork {@link Uri}. */ + @Nullable public final Uri artworkUri; + /** Optional track number. */ + @Nullable public final Integer trackNumber; + /** Optional total number of tracks. */ + @Nullable public final Integer totalTrackCount; + /** + * Optional {@link FolderType}. + * + *

      This field will be deprecated. Use {@link #isBrowsable} to indicate if an item is a + * browsable folder and use {@link #mediaType} to indicate the type of the folder. + */ + @Nullable public final @FolderType Integer folderType; + /** Optional boolean to indicate that the media is a browsable folder. */ + @Nullable public final Boolean isBrowsable; + /** Optional boolean to indicate that the media is playable. */ + @Nullable public final Boolean isPlayable; + /** + * @deprecated Use {@link #recordingYear} instead. + */ + @Deprecated @Nullable public final Integer year; + /** Optional year of the recording date. */ + @Nullable public final Integer recordingYear; + /** + * Optional month of the recording date. + * + *

      Note that there is no guarantee that the month and day are a valid combination. + */ + @Nullable public final Integer recordingMonth; + /** + * Optional day of the recording date. + * + *

      Note that there is no guarantee that the month and day are a valid combination. + */ + @Nullable public final Integer recordingDay; + + /** Optional year of the release date. */ + @Nullable public final Integer releaseYear; + /** + * Optional month of the release date. + * + *

      Note that there is no guarantee that the month and day are a valid combination. + */ + @Nullable public final Integer releaseMonth; + /** + * Optional day of the release date. + * + *

      Note that there is no guarantee that the month and day are a valid combination. + */ + @Nullable public final Integer releaseDay; + /** Optional writer. */ + @Nullable public final CharSequence writer; + /** Optional composer. */ + @Nullable public final CharSequence composer; + /** Optional conductor. */ + @Nullable public final CharSequence conductor; + /** Optional disc number. */ + @Nullable public final Integer discNumber; + /** Optional total number of discs. */ + @Nullable public final Integer totalDiscCount; + /** Optional genre. */ + @Nullable public final CharSequence genre; + /** Optional compilation. */ + @Nullable public final CharSequence compilation; + /** Optional name of the station streaming the media. */ + @Nullable public final CharSequence station; + /** Optional {@link MediaType}. */ + @Nullable public final @MediaType Integer mediaType; + + /** + * Optional extras {@link Bundle}. + * + *

      Given the complexities of checking the equality of two {@link Bundle}s, this is not + * considered in the {@link #equals(Object)} or {@link #hashCode()}. + */ + @Nullable public final Bundle extras; + + private MediaMetadata(Builder builder) { + // Handle compatibility for deprecated fields. + @Nullable Boolean isBrowsable = builder.isBrowsable; + @Nullable Integer folderType = builder.folderType; + @Nullable Integer mediaType = builder.mediaType; + if (isBrowsable != null) { + if (!isBrowsable) { + folderType = FOLDER_TYPE_NONE; + } else if (folderType == null || folderType == FOLDER_TYPE_NONE) { + folderType = mediaType != null ? getFolderTypeFromMediaType(mediaType) : FOLDER_TYPE_MIXED; + } + } else if (folderType != null) { + isBrowsable = folderType != FOLDER_TYPE_NONE; + if (isBrowsable && mediaType == null) { + mediaType = getMediaTypeFromFolderType(folderType); + } + } + this.title = builder.title; + this.artist = builder.artist; + this.albumTitle = builder.albumTitle; + this.albumArtist = builder.albumArtist; + this.displayTitle = builder.displayTitle; + this.subtitle = builder.subtitle; + this.description = builder.description; + this.userRating = builder.userRating; + this.overallRating = builder.overallRating; + this.artworkData = builder.artworkData; + this.artworkDataType = builder.artworkDataType; + this.artworkUri = builder.artworkUri; + this.trackNumber = builder.trackNumber; + this.totalTrackCount = builder.totalTrackCount; + this.folderType = folderType; + this.isBrowsable = isBrowsable; + this.isPlayable = builder.isPlayable; + this.year = builder.recordingYear; + this.recordingYear = builder.recordingYear; + this.recordingMonth = builder.recordingMonth; + this.recordingDay = builder.recordingDay; + this.releaseYear = builder.releaseYear; + this.releaseMonth = builder.releaseMonth; + this.releaseDay = builder.releaseDay; + this.writer = builder.writer; + this.composer = builder.composer; + this.conductor = builder.conductor; + this.discNumber = builder.discNumber; + this.totalDiscCount = builder.totalDiscCount; + this.genre = builder.genre; + this.compilation = builder.compilation; + this.station = builder.station; + this.mediaType = mediaType; + this.extras = builder.extras; + } + + /** Returns a new {@link Builder} instance with the current {@link MediaMetadata} fields. */ + public Builder buildUpon() { + return new Builder(/* mediaMetadata= */ this); + } + + @Override + public boolean equals(@Nullable Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + MediaMetadata that = (MediaMetadata) obj; + return Util.areEqual(title, that.title) + && Util.areEqual(artist, that.artist) + && Util.areEqual(albumTitle, that.albumTitle) + && Util.areEqual(albumArtist, that.albumArtist) + && Util.areEqual(displayTitle, that.displayTitle) + && Util.areEqual(subtitle, that.subtitle) + && Util.areEqual(description, that.description) + && Util.areEqual(userRating, that.userRating) + && Util.areEqual(overallRating, that.overallRating) + && Arrays.equals(artworkData, that.artworkData) + && Util.areEqual(artworkDataType, that.artworkDataType) + && Util.areEqual(artworkUri, that.artworkUri) + && Util.areEqual(trackNumber, that.trackNumber) + && Util.areEqual(totalTrackCount, that.totalTrackCount) + && Util.areEqual(folderType, that.folderType) + && Util.areEqual(isBrowsable, that.isBrowsable) + && Util.areEqual(isPlayable, that.isPlayable) + && Util.areEqual(recordingYear, that.recordingYear) + && Util.areEqual(recordingMonth, that.recordingMonth) + && Util.areEqual(recordingDay, that.recordingDay) + && Util.areEqual(releaseYear, that.releaseYear) + && Util.areEqual(releaseMonth, that.releaseMonth) + && Util.areEqual(releaseDay, that.releaseDay) + && Util.areEqual(writer, that.writer) + && Util.areEqual(composer, that.composer) + && Util.areEqual(conductor, that.conductor) + && Util.areEqual(discNumber, that.discNumber) + && Util.areEqual(totalDiscCount, that.totalDiscCount) + && Util.areEqual(genre, that.genre) + && Util.areEqual(compilation, that.compilation) + && Util.areEqual(station, that.station) + && Util.areEqual(mediaType, that.mediaType); + } + + @Override + public int hashCode() { + return Objects.hashCode( + title, + artist, + albumTitle, + albumArtist, + displayTitle, + subtitle, + description, + userRating, + overallRating, + Arrays.hashCode(artworkData), + artworkDataType, + artworkUri, + trackNumber, + totalTrackCount, + folderType, + isBrowsable, + isPlayable, + recordingYear, + recordingMonth, + recordingDay, + releaseYear, + releaseMonth, + releaseDay, + writer, + composer, + conductor, + discNumber, + totalDiscCount, + genre, + compilation, + station, + mediaType); + } + + // Bundleable implementation. + + private static final String FIELD_TITLE = Util.intToStringMaxRadix(0); + private static final String FIELD_ARTIST = Util.intToStringMaxRadix(1); + private static final String FIELD_ALBUM_TITLE = Util.intToStringMaxRadix(2); + private static final String FIELD_ALBUM_ARTIST = Util.intToStringMaxRadix(3); + private static final String FIELD_DISPLAY_TITLE = Util.intToStringMaxRadix(4); + private static final String FIELD_SUBTITLE = Util.intToStringMaxRadix(5); + private static final String FIELD_DESCRIPTION = Util.intToStringMaxRadix(6); + // 7 is reserved to maintain backward compatibility for a previously defined field. + private static final String FIELD_USER_RATING = Util.intToStringMaxRadix(8); + private static final String FIELD_OVERALL_RATING = Util.intToStringMaxRadix(9); + private static final String FIELD_ARTWORK_DATA = Util.intToStringMaxRadix(10); + private static final String FIELD_ARTWORK_URI = Util.intToStringMaxRadix(11); + private static final String FIELD_TRACK_NUMBER = Util.intToStringMaxRadix(12); + private static final String FIELD_TOTAL_TRACK_COUNT = Util.intToStringMaxRadix(13); + private static final String FIELD_FOLDER_TYPE = Util.intToStringMaxRadix(14); + private static final String FIELD_IS_PLAYABLE = Util.intToStringMaxRadix(15); + private static final String FIELD_RECORDING_YEAR = Util.intToStringMaxRadix(16); + private static final String FIELD_RECORDING_MONTH = Util.intToStringMaxRadix(17); + private static final String FIELD_RECORDING_DAY = Util.intToStringMaxRadix(18); + private static final String FIELD_RELEASE_YEAR = Util.intToStringMaxRadix(19); + private static final String FIELD_RELEASE_MONTH = Util.intToStringMaxRadix(20); + private static final String FIELD_RELEASE_DAY = Util.intToStringMaxRadix(21); + private static final String FIELD_WRITER = Util.intToStringMaxRadix(22); + private static final String FIELD_COMPOSER = Util.intToStringMaxRadix(23); + private static final String FIELD_CONDUCTOR = Util.intToStringMaxRadix(24); + private static final String FIELD_DISC_NUMBER = Util.intToStringMaxRadix(25); + private static final String FIELD_TOTAL_DISC_COUNT = Util.intToStringMaxRadix(26); + private static final String FIELD_GENRE = Util.intToStringMaxRadix(27); + private static final String FIELD_COMPILATION = Util.intToStringMaxRadix(28); + private static final String FIELD_ARTWORK_DATA_TYPE = Util.intToStringMaxRadix(29); + private static final String FIELD_STATION = Util.intToStringMaxRadix(30); + private static final String FIELD_MEDIA_TYPE = Util.intToStringMaxRadix(31); + private static final String FIELD_IS_BROWSABLE = Util.intToStringMaxRadix(32); + private static final String FIELD_EXTRAS = Util.intToStringMaxRadix(1000); + + @Override + public Bundle toBundle() { + Bundle bundle = new Bundle(); + if (title != null) { + bundle.putCharSequence(FIELD_TITLE, title); + } + if (artist != null) { + bundle.putCharSequence(FIELD_ARTIST, artist); + } + if (albumTitle != null) { + bundle.putCharSequence(FIELD_ALBUM_TITLE, albumTitle); + } + if (albumArtist != null) { + bundle.putCharSequence(FIELD_ALBUM_ARTIST, albumArtist); + } + if (displayTitle != null) { + bundle.putCharSequence(FIELD_DISPLAY_TITLE, displayTitle); + } + if (subtitle != null) { + bundle.putCharSequence(FIELD_SUBTITLE, subtitle); + } + if (description != null) { + bundle.putCharSequence(FIELD_DESCRIPTION, description); + } + if (artworkData != null) { + bundle.putByteArray(FIELD_ARTWORK_DATA, artworkData); + } + if (artworkUri != null) { + bundle.putParcelable(FIELD_ARTWORK_URI, artworkUri); + } + if (writer != null) { + bundle.putCharSequence(FIELD_WRITER, writer); + } + if (composer != null) { + bundle.putCharSequence(FIELD_COMPOSER, composer); + } + if (conductor != null) { + bundle.putCharSequence(FIELD_CONDUCTOR, conductor); + } + if (genre != null) { + bundle.putCharSequence(FIELD_GENRE, genre); + } + if (compilation != null) { + bundle.putCharSequence(FIELD_COMPILATION, compilation); + } + if (station != null) { + bundle.putCharSequence(FIELD_STATION, station); + } + if (userRating != null) { + bundle.putBundle(FIELD_USER_RATING, userRating.toBundle()); + } + if (overallRating != null) { + bundle.putBundle(FIELD_OVERALL_RATING, overallRating.toBundle()); + } + if (trackNumber != null) { + bundle.putInt(FIELD_TRACK_NUMBER, trackNumber); + } + if (totalTrackCount != null) { + bundle.putInt(FIELD_TOTAL_TRACK_COUNT, totalTrackCount); + } + if (folderType != null) { + bundle.putInt(FIELD_FOLDER_TYPE, folderType); + } + if (isBrowsable != null) { + bundle.putBoolean(FIELD_IS_BROWSABLE, isBrowsable); + } + if (isPlayable != null) { + bundle.putBoolean(FIELD_IS_PLAYABLE, isPlayable); + } + if (recordingYear != null) { + bundle.putInt(FIELD_RECORDING_YEAR, recordingYear); + } + if (recordingMonth != null) { + bundle.putInt(FIELD_RECORDING_MONTH, recordingMonth); + } + if (recordingDay != null) { + bundle.putInt(FIELD_RECORDING_DAY, recordingDay); + } + if (releaseYear != null) { + bundle.putInt(FIELD_RELEASE_YEAR, releaseYear); + } + if (releaseMonth != null) { + bundle.putInt(FIELD_RELEASE_MONTH, releaseMonth); + } + if (releaseDay != null) { + bundle.putInt(FIELD_RELEASE_DAY, releaseDay); + } + if (discNumber != null) { + bundle.putInt(FIELD_DISC_NUMBER, discNumber); + } + if (totalDiscCount != null) { + bundle.putInt(FIELD_TOTAL_DISC_COUNT, totalDiscCount); + } + if (artworkDataType != null) { + bundle.putInt(FIELD_ARTWORK_DATA_TYPE, artworkDataType); + } + if (mediaType != null) { + bundle.putInt(FIELD_MEDIA_TYPE, mediaType); + } + if (extras != null) { + bundle.putBundle(FIELD_EXTRAS, extras); + } + return bundle; + } + + /** Object that can restore {@link MediaMetadata} from a {@link Bundle}. */ + public static final Creator CREATOR = MediaMetadata::fromBundle; + + private static MediaMetadata fromBundle(Bundle bundle) { + Builder builder = new Builder(); + builder + .setTitle(bundle.getCharSequence(FIELD_TITLE)) + .setArtist(bundle.getCharSequence(FIELD_ARTIST)) + .setAlbumTitle(bundle.getCharSequence(FIELD_ALBUM_TITLE)) + .setAlbumArtist(bundle.getCharSequence(FIELD_ALBUM_ARTIST)) + .setDisplayTitle(bundle.getCharSequence(FIELD_DISPLAY_TITLE)) + .setSubtitle(bundle.getCharSequence(FIELD_SUBTITLE)) + .setDescription(bundle.getCharSequence(FIELD_DESCRIPTION)) + .setArtworkData( + bundle.getByteArray(FIELD_ARTWORK_DATA), + bundle.containsKey(FIELD_ARTWORK_DATA_TYPE) + ? bundle.getInt(FIELD_ARTWORK_DATA_TYPE) + : null) + .setArtworkUri(bundle.getParcelable(FIELD_ARTWORK_URI)) + .setWriter(bundle.getCharSequence(FIELD_WRITER)) + .setComposer(bundle.getCharSequence(FIELD_COMPOSER)) + .setConductor(bundle.getCharSequence(FIELD_CONDUCTOR)) + .setGenre(bundle.getCharSequence(FIELD_GENRE)) + .setCompilation(bundle.getCharSequence(FIELD_COMPILATION)) + .setStation(bundle.getCharSequence(FIELD_STATION)) + .setExtras(bundle.getBundle(FIELD_EXTRAS)); + + if (bundle.containsKey(FIELD_USER_RATING)) { + @Nullable Bundle fieldBundle = bundle.getBundle(FIELD_USER_RATING); + if (fieldBundle != null) { + builder.setUserRating(Rating.CREATOR.fromBundle(fieldBundle)); + } + } + if (bundle.containsKey(FIELD_OVERALL_RATING)) { + @Nullable Bundle fieldBundle = bundle.getBundle(FIELD_OVERALL_RATING); + if (fieldBundle != null) { + builder.setOverallRating(Rating.CREATOR.fromBundle(fieldBundle)); + } + } + if (bundle.containsKey(FIELD_TRACK_NUMBER)) { + builder.setTrackNumber(bundle.getInt(FIELD_TRACK_NUMBER)); + } + if (bundle.containsKey(FIELD_TOTAL_TRACK_COUNT)) { + builder.setTotalTrackCount(bundle.getInt(FIELD_TOTAL_TRACK_COUNT)); + } + if (bundle.containsKey(FIELD_FOLDER_TYPE)) { + builder.setFolderType(bundle.getInt(FIELD_FOLDER_TYPE)); + } + if (bundle.containsKey(FIELD_IS_BROWSABLE)) { + builder.setIsBrowsable(bundle.getBoolean(FIELD_IS_BROWSABLE)); + } + if (bundle.containsKey(FIELD_IS_PLAYABLE)) { + builder.setIsPlayable(bundle.getBoolean(FIELD_IS_PLAYABLE)); + } + if (bundle.containsKey(FIELD_RECORDING_YEAR)) { + builder.setRecordingYear(bundle.getInt(FIELD_RECORDING_YEAR)); + } + if (bundle.containsKey(FIELD_RECORDING_MONTH)) { + builder.setRecordingMonth(bundle.getInt(FIELD_RECORDING_MONTH)); + } + if (bundle.containsKey(FIELD_RECORDING_DAY)) { + builder.setRecordingDay(bundle.getInt(FIELD_RECORDING_DAY)); + } + if (bundle.containsKey(FIELD_RELEASE_YEAR)) { + builder.setReleaseYear(bundle.getInt(FIELD_RELEASE_YEAR)); + } + if (bundle.containsKey(FIELD_RELEASE_MONTH)) { + builder.setReleaseMonth(bundle.getInt(FIELD_RELEASE_MONTH)); + } + if (bundle.containsKey(FIELD_RELEASE_DAY)) { + builder.setReleaseDay(bundle.getInt(FIELD_RELEASE_DAY)); + } + if (bundle.containsKey(FIELD_DISC_NUMBER)) { + builder.setDiscNumber(bundle.getInt(FIELD_DISC_NUMBER)); + } + if (bundle.containsKey(FIELD_TOTAL_DISC_COUNT)) { + builder.setTotalDiscCount(bundle.getInt(FIELD_TOTAL_DISC_COUNT)); + } + if (bundle.containsKey(FIELD_MEDIA_TYPE)) { + builder.setMediaType(bundle.getInt(FIELD_MEDIA_TYPE)); + } + + return builder.build(); + } + + private static @FolderType int getFolderTypeFromMediaType(@MediaType int mediaType) { + switch (mediaType) { + case MEDIA_TYPE_ALBUM: + case MEDIA_TYPE_ARTIST: + case MEDIA_TYPE_AUDIO_BOOK: + case MEDIA_TYPE_AUDIO_BOOK_CHAPTER: + case MEDIA_TYPE_FOLDER_MOVIES: + case MEDIA_TYPE_FOLDER_NEWS: + case MEDIA_TYPE_FOLDER_RADIO_STATIONS: + case MEDIA_TYPE_FOLDER_TRAILERS: + case MEDIA_TYPE_FOLDER_VIDEOS: + case MEDIA_TYPE_GENRE: + case MEDIA_TYPE_MOVIE: + case MEDIA_TYPE_MUSIC: + case MEDIA_TYPE_NEWS: + case MEDIA_TYPE_PLAYLIST: + case MEDIA_TYPE_PODCAST: + case MEDIA_TYPE_PODCAST_EPISODE: + case MEDIA_TYPE_RADIO_STATION: + case MEDIA_TYPE_TRAILER: + case MEDIA_TYPE_TV_CHANNEL: + case MEDIA_TYPE_TV_SEASON: + case MEDIA_TYPE_TV_SERIES: + case MEDIA_TYPE_TV_SHOW: + case MEDIA_TYPE_VIDEO: + case MEDIA_TYPE_YEAR: + return FOLDER_TYPE_TITLES; + case MEDIA_TYPE_FOLDER_ALBUMS: + return FOLDER_TYPE_ALBUMS; + case MEDIA_TYPE_FOLDER_ARTISTS: + return FOLDER_TYPE_ARTISTS; + case MEDIA_TYPE_FOLDER_GENRES: + return FOLDER_TYPE_GENRES; + case MEDIA_TYPE_FOLDER_PLAYLISTS: + return FOLDER_TYPE_PLAYLISTS; + case MEDIA_TYPE_FOLDER_YEARS: + return FOLDER_TYPE_YEARS; + case MEDIA_TYPE_FOLDER_AUDIO_BOOKS: + case MEDIA_TYPE_FOLDER_MIXED: + case MEDIA_TYPE_FOLDER_TV_CHANNELS: + case MEDIA_TYPE_FOLDER_TV_SERIES: + case MEDIA_TYPE_FOLDER_TV_SHOWS: + case MEDIA_TYPE_FOLDER_PODCASTS: + case MEDIA_TYPE_MIXED: + default: + return FOLDER_TYPE_MIXED; + } + } + + private static @MediaType int getMediaTypeFromFolderType(@FolderType int folderType) { + switch (folderType) { + case FOLDER_TYPE_ALBUMS: + return MEDIA_TYPE_FOLDER_ALBUMS; + case FOLDER_TYPE_ARTISTS: + return MEDIA_TYPE_FOLDER_ARTISTS; + case FOLDER_TYPE_GENRES: + return MEDIA_TYPE_FOLDER_GENRES; + case FOLDER_TYPE_PLAYLISTS: + return MEDIA_TYPE_FOLDER_PLAYLISTS; + case FOLDER_TYPE_TITLES: + return MEDIA_TYPE_MIXED; + case FOLDER_TYPE_YEARS: + return MEDIA_TYPE_FOLDER_YEARS; + case FOLDER_TYPE_MIXED: + case FOLDER_TYPE_NONE: + default: + return MEDIA_TYPE_FOLDER_MIXED; + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/MediaPeriodHolder.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/MediaPeriodHolder.java index 850d2b7d10..2e0d193ee3 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/MediaPeriodHolder.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/MediaPeriodHolder.java @@ -15,16 +15,16 @@ */ package com.google.android.exoplayer2; +import static java.lang.Math.max; + import androidx.annotation.Nullable; import com.google.android.exoplayer2.source.ClippingMediaPeriod; import com.google.android.exoplayer2.source.EmptySampleStream; import com.google.android.exoplayer2.source.MediaPeriod; -import com.google.android.exoplayer2.source.MediaSource; import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId; import com.google.android.exoplayer2.source.SampleStream; import com.google.android.exoplayer2.source.TrackGroupArray; -import com.google.android.exoplayer2.trackselection.TrackSelection; -import com.google.android.exoplayer2.trackselection.TrackSelectionArray; +import com.google.android.exoplayer2.trackselection.ExoTrackSelection; import com.google.android.exoplayer2.trackselection.TrackSelector; import com.google.android.exoplayer2.trackselection.TrackSelectorResult; import com.google.android.exoplayer2.upstream.Allocator; @@ -52,11 +52,22 @@ public boolean hasEnabledTracks; /** {@link MediaPeriodInfo} about this media period. */ public MediaPeriodInfo info; + /** + * Whether all renderers are in the correct state for this {@link #mediaPeriod}. + * + *

      Renderers that are needed must have been enabled with the {@link #sampleStreams} for this + * {@link #mediaPeriod}. This means either {@link Renderer#enable(RendererConfiguration, Format[], + * SampleStream, long, boolean, boolean, long, long)} or {@link Renderer#replaceStream(Format[], + * SampleStream, long, long)} has been called. + * + *

      Renderers that are not needed must have been {@link Renderer#disable() disabled}. + */ + public boolean allRenderersInCorrectState; private final boolean[] mayRetainStreamFlags; private final RendererCapabilities[] rendererCapabilities; private final TrackSelector trackSelector; - private final MediaSource mediaSource; + private final MediaSourceList mediaSourceList; @Nullable private MediaPeriodHolder next; private TrackGroupArray trackGroups; @@ -70,7 +81,7 @@ * @param rendererPositionOffsetUs The renderer time of the start of the period, in microseconds. * @param trackSelector The track selector. * @param allocator The allocator. - * @param mediaSource The media source that produced the media period. + * @param mediaSourceList The playlist. * @param info Information used to identify this media period in its timeline period. * @param emptyTrackSelectorResult A {@link TrackSelectorResult} with empty selections for each * renderer. @@ -80,13 +91,13 @@ public MediaPeriodHolder( long rendererPositionOffsetUs, TrackSelector trackSelector, Allocator allocator, - MediaSource mediaSource, + MediaSourceList mediaSourceList, MediaPeriodInfo info, TrackSelectorResult emptyTrackSelectorResult) { this.rendererCapabilities = rendererCapabilities; this.rendererPositionOffsetUs = rendererPositionOffsetUs; this.trackSelector = trackSelector; - this.mediaSource = mediaSource; + this.mediaSourceList = mediaSourceList; this.uid = info.id.periodUid; this.info = info; this.trackGroups = TrackGroupArray.EMPTY; @@ -95,7 +106,7 @@ public MediaPeriodHolder( mayRetainStreamFlags = new boolean[rendererCapabilities.length]; mediaPeriod = createMediaPeriod( - info.id, mediaSource, allocator, info.startPositionUs, info.endPositionUs); + info.id, mediaSourceList, allocator, info.startPositionUs, info.endPositionUs); } /** @@ -165,7 +176,7 @@ public long getNextLoadPositionUs() { /** * Handles period preparation. * - * @param playbackSpeed The current playback speed. + * @param playbackSpeed The current factor by which playback is sped up. * @param timeline The current {@link Timeline}. * @throws ExoPlaybackException If an error occurs during track selection. */ @@ -173,9 +184,14 @@ public void handlePrepared(float playbackSpeed, Timeline timeline) throws ExoPla prepared = true; trackGroups = mediaPeriod.getTrackGroups(); TrackSelectorResult selectorResult = selectTracks(playbackSpeed, timeline); + long requestedStartPositionUs = info.startPositionUs; + if (info.durationUs != C.TIME_UNSET && requestedStartPositionUs >= info.durationUs) { + // Make sure start position doesn't exceed period duration. + requestedStartPositionUs = max(0, info.durationUs - 1); + } long newStartPositionUs = applyTrackSelection( - selectorResult, info.startPositionUs, /* forceRecreateStreams= */ false); + selectorResult, requestedStartPositionUs, /* forceRecreateStreams= */ false); rendererPositionOffsetUs += info.startPositionUs - newStartPositionUs; info = info.copyWithStartPositionUs(newStartPositionUs); } @@ -211,7 +227,7 @@ public void continueLoading(long rendererPositionUs) { *

      The new track selection needs to be applied with {@link * #applyTrackSelection(TrackSelectorResult, long, boolean)} before taking effect. * - * @param playbackSpeed The current playback speed. + * @param playbackSpeed The current factor by which playback is sped up. * @param timeline The current {@link Timeline}. * @return The {@link TrackSelectorResult}. * @throws ExoPlaybackException If an error occurs during track selection. @@ -220,7 +236,7 @@ public TrackSelectorResult selectTracks(float playbackSpeed, Timeline timeline) throws ExoPlaybackException { TrackSelectorResult selectorResult = trackSelector.selectTracks(rendererCapabilities, getTrackGroups(), info.id, timeline); - for (TrackSelection trackSelection : selectorResult.selections.getAll()) { + for (ExoTrackSelection trackSelection : selectorResult.selections) { if (trackSelection != null) { trackSelection.onPlaybackSpeed(playbackSpeed); } @@ -276,17 +292,16 @@ public long applyTrackSelection( trackSelectorResult = newTrackSelectorResult; enableTrackSelectionsInResult(); // Disable streams on the period and get new streams for updated/newly-enabled tracks. - TrackSelectionArray trackSelections = newTrackSelectorResult.selections; positionUs = mediaPeriod.selectTracks( - trackSelections.getAll(), + newTrackSelectorResult.selections, mayRetainStreamFlags, sampleStreams, streamResetFlags, positionUs); associateNoSampleRenderersWithEmptySampleStream(sampleStreams); - // Update whether we have enabled tracks and sanity check the expected streams are non-null. + // Update whether we have enabled tracks and check that the expected streams are non-null. hasEnabledTracks = false; for (int i = 0; i < sampleStreams.length; i++) { if (sampleStreams[i] != null) { @@ -296,7 +311,7 @@ public long applyTrackSelection( hasEnabledTracks = true; } } else { - Assertions.checkState(trackSelections.get(i) == null); + Assertions.checkState(newTrackSelectorResult.selections[i] == null); } } return positionUs; @@ -305,7 +320,7 @@ public long applyTrackSelection( /** Releases the media period. No other method should be called after the release. */ public void release() { disableTrackSelectionsInResult(); - releaseMediaPeriod(info.endPositionUs, mediaSource, mediaPeriod); + releaseMediaPeriod(mediaSourceList, mediaPeriod); } /** @@ -342,13 +357,22 @@ public TrackSelectorResult getTrackSelectorResult() { return trackSelectorResult; } + /** Updates the clipping to {@link MediaPeriodInfo#endPositionUs} if required. */ + public void updateClipping() { + if (mediaPeriod instanceof ClippingMediaPeriod) { + long endPositionUs = + info.endPositionUs == C.TIME_UNSET ? C.TIME_END_OF_SOURCE : info.endPositionUs; + ((ClippingMediaPeriod) mediaPeriod).updateClipping(/* startUs= */ 0, endPositionUs); + } + } + private void enableTrackSelectionsInResult() { if (!isLoadingMediaPeriod()) { return; } for (int i = 0; i < trackSelectorResult.length; i++) { boolean rendererEnabled = trackSelectorResult.isRendererEnabled(i); - TrackSelection trackSelection = trackSelectorResult.selections.get(i); + ExoTrackSelection trackSelection = trackSelectorResult.selections[i]; if (rendererEnabled && trackSelection != null) { trackSelection.enable(); } @@ -361,7 +385,7 @@ private void disableTrackSelectionsInResult() { } for (int i = 0; i < trackSelectorResult.length; i++) { boolean rendererEnabled = trackSelectorResult.isRendererEnabled(i); - TrackSelection trackSelection = trackSelectorResult.selections.get(i); + ExoTrackSelection trackSelection = trackSelectorResult.selections[i]; if (rendererEnabled && trackSelection != null) { trackSelection.disable(); } @@ -369,7 +393,7 @@ private void disableTrackSelectionsInResult() { } /** - * For each renderer of type {@link C#TRACK_TYPE_NONE}, we will remove the dummy {@link + * For each renderer of type {@link C#TRACK_TYPE_NONE}, we will remove the {@link * EmptySampleStream} that was associated with it. */ private void disassociateNoSampleRenderersWithEmptySampleStream( @@ -383,7 +407,7 @@ private void disassociateNoSampleRenderersWithEmptySampleStream( /** * For each renderer of type {@link C#TRACK_TYPE_NONE} that was enabled, we will associate it with - * a dummy {@link EmptySampleStream}. + * an {@link EmptySampleStream}. */ private void associateNoSampleRenderersWithEmptySampleStream( @NullableType SampleStream[] sampleStreams) { @@ -402,12 +426,12 @@ private boolean isLoadingMediaPeriod() { /** Returns a media period corresponding to the given {@code id}. */ private static MediaPeriod createMediaPeriod( MediaPeriodId id, - MediaSource mediaSource, + MediaSourceList mediaSourceList, Allocator allocator, long startPositionUs, long endPositionUs) { - MediaPeriod mediaPeriod = mediaSource.createPeriod(id, allocator, startPositionUs); - if (endPositionUs != C.TIME_UNSET && endPositionUs != C.TIME_END_OF_SOURCE) { + MediaPeriod mediaPeriod = mediaSourceList.createPeriod(id, allocator, startPositionUs); + if (endPositionUs != C.TIME_UNSET) { mediaPeriod = new ClippingMediaPeriod( mediaPeriod, /* enableInitialDiscontinuity= */ true, /* startUs= */ 0, endPositionUs); @@ -416,13 +440,12 @@ private static MediaPeriod createMediaPeriod( } /** Releases the given {@code mediaPeriod}, logging and suppressing any errors. */ - private static void releaseMediaPeriod( - long endPositionUs, MediaSource mediaSource, MediaPeriod mediaPeriod) { + private static void releaseMediaPeriod(MediaSourceList mediaSourceList, MediaPeriod mediaPeriod) { try { - if (endPositionUs != C.TIME_UNSET && endPositionUs != C.TIME_END_OF_SOURCE) { - mediaSource.releasePeriod(((ClippingMediaPeriod) mediaPeriod).mediaPeriod); + if (mediaPeriod instanceof ClippingMediaPeriod) { + mediaSourceList.releasePeriod(((ClippingMediaPeriod) mediaPeriod).mediaPeriod); } else { - mediaSource.releasePeriod(mediaPeriod); + mediaSourceList.releasePeriod(mediaPeriod); } } catch (RuntimeException e) { // There's nothing we can do. diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/MediaPeriodInfo.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/MediaPeriodInfo.java index 2733df7ba6..5468760980 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/MediaPeriodInfo.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/MediaPeriodInfo.java @@ -18,6 +18,7 @@ import androidx.annotation.Nullable; import com.google.android.exoplayer2.source.MediaPeriod; import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId; +import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Util; /** Stores the information required to load and play a {@link MediaPeriod}. */ @@ -28,16 +29,19 @@ /** The start position of the media to play within the media period, in microseconds. */ public final long startPositionUs; /** - * If this is an ad, the position to play in the next content media period. {@link C#TIME_UNSET} - * if this is not an ad or the next content media period should be played from its default - * position. + * The requested next start position for the current timeline period, in microseconds, or {@link + * C#TIME_UNSET} if the period was requested to start at its default position. + * + *

      Note that if {@link #id} refers to an ad, this is the requested start position for the + * suspended content. */ - public final long contentPositionUs; + public final long requestedContentPositionUs; /** * The end position to which the media period's content is clipped in order to play a following ad - * group, in microseconds, or {@link C#TIME_UNSET} if there is no following ad group or if this - * media period is an ad. The value {@link C#TIME_END_OF_SOURCE} indicates that a postroll ad - * follows at the end of this content media period. + * group or to terminate a server side ad inserted stream before a played postroll, in + * microseconds, or {@link C#TIME_UNSET} if the content is not clipped or if this media period is + * an ad. The value {@link C#TIME_END_OF_SOURCE} indicates that a postroll ad follows at the end + * of this content media period. */ public final long endPositionUs; /** @@ -46,11 +50,19 @@ * known. */ public final long durationUs; + /** + * Whether this media period is followed by a transition to another media period of the same + * server-side inserted ad stream. If true, {@link #isLastInTimelinePeriod}, {@link + * #isLastInTimelineWindow} and {@link #isFinal} will all be false. + */ + public final boolean isFollowedByTransitionToSameStream; /** * Whether this is the last media period in its timeline period (e.g., a postroll ad, or a media * period corresponding to a timeline period without ads). */ public final boolean isLastInTimelinePeriod; + /** Whether this is the last media period in its timeline window. */ + public final boolean isLastInTimelineWindow; /** * Whether this is the last media period in the entire timeline. If true, {@link * #isLastInTimelinePeriod} will also be true. @@ -60,17 +72,26 @@ MediaPeriodInfo( MediaPeriodId id, long startPositionUs, - long contentPositionUs, + long requestedContentPositionUs, long endPositionUs, long durationUs, + boolean isFollowedByTransitionToSameStream, boolean isLastInTimelinePeriod, + boolean isLastInTimelineWindow, boolean isFinal) { + Assertions.checkArgument(!isFinal || isLastInTimelinePeriod); + Assertions.checkArgument(!isLastInTimelineWindow || isLastInTimelinePeriod); + Assertions.checkArgument( + !isFollowedByTransitionToSameStream + || (!isLastInTimelinePeriod && !isLastInTimelineWindow && !isFinal)); this.id = id; this.startPositionUs = startPositionUs; - this.contentPositionUs = contentPositionUs; + this.requestedContentPositionUs = requestedContentPositionUs; this.endPositionUs = endPositionUs; this.durationUs = durationUs; + this.isFollowedByTransitionToSameStream = isFollowedByTransitionToSameStream; this.isLastInTimelinePeriod = isLastInTimelinePeriod; + this.isLastInTimelineWindow = isLastInTimelineWindow; this.isFinal = isFinal; } @@ -84,27 +105,31 @@ public MediaPeriodInfo copyWithStartPositionUs(long startPositionUs) { : new MediaPeriodInfo( id, startPositionUs, - contentPositionUs, + requestedContentPositionUs, endPositionUs, durationUs, + isFollowedByTransitionToSameStream, isLastInTimelinePeriod, + isLastInTimelineWindow, isFinal); } /** - * Returns a copy of this instance with the content position set to the specified value. May - * return the same instance if nothing changed. + * Returns a copy of this instance with the requested content position set to the specified value. + * May return the same instance if nothing changed. */ - public MediaPeriodInfo copyWithContentPositionUs(long contentPositionUs) { - return contentPositionUs == this.contentPositionUs + public MediaPeriodInfo copyWithRequestedContentPositionUs(long requestedContentPositionUs) { + return requestedContentPositionUs == this.requestedContentPositionUs ? this : new MediaPeriodInfo( id, startPositionUs, - contentPositionUs, + requestedContentPositionUs, endPositionUs, durationUs, + isFollowedByTransitionToSameStream, isLastInTimelinePeriod, + isLastInTimelineWindow, isFinal); } @@ -118,10 +143,12 @@ public boolean equals(@Nullable Object o) { } MediaPeriodInfo that = (MediaPeriodInfo) o; return startPositionUs == that.startPositionUs - && contentPositionUs == that.contentPositionUs + && requestedContentPositionUs == that.requestedContentPositionUs && endPositionUs == that.endPositionUs && durationUs == that.durationUs + && isFollowedByTransitionToSameStream == that.isFollowedByTransitionToSameStream && isLastInTimelinePeriod == that.isLastInTimelinePeriod + && isLastInTimelineWindow == that.isLastInTimelineWindow && isFinal == that.isFinal && Util.areEqual(id, that.id); } @@ -131,10 +158,12 @@ public int hashCode() { int result = 17; result = 31 * result + id.hashCode(); result = 31 * result + (int) startPositionUs; - result = 31 * result + (int) contentPositionUs; + result = 31 * result + (int) requestedContentPositionUs; result = 31 * result + (int) endPositionUs; result = 31 * result + (int) durationUs; + result = 31 * result + (isFollowedByTransitionToSameStream ? 1 : 0); result = 31 * result + (isLastInTimelinePeriod ? 1 : 0); + result = 31 * result + (isLastInTimelineWindow ? 1 : 0); result = 31 * result + (isFinal ? 1 : 0); return result; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/MediaPeriodQueue.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/MediaPeriodQueue.java index 901b7b4d94..0651dd8bf8 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/MediaPeriodQueue.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/MediaPeriodQueue.java @@ -15,16 +15,23 @@ */ package com.google.android.exoplayer2; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static java.lang.Math.max; + +import android.os.Handler; import android.util.Pair; import androidx.annotation.Nullable; import com.google.android.exoplayer2.Player.RepeatMode; +import com.google.android.exoplayer2.analytics.AnalyticsCollector; import com.google.android.exoplayer2.source.MediaPeriod; -import com.google.android.exoplayer2.source.MediaSource; import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId; +import com.google.android.exoplayer2.source.ads.AdPlaybackState; import com.google.android.exoplayer2.trackselection.TrackSelector; import com.google.android.exoplayer2.trackselection.TrackSelectorResult; import com.google.android.exoplayer2.upstream.Allocator; import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.HandlerWrapper; +import com.google.common.collect.ImmutableList; /** * Holds a queue of media periods, from the currently playing media period at the front to the @@ -33,6 +40,26 @@ */ /* package */ final class MediaPeriodQueue { + /** + * Initial renderer position offset used for the first item in the queue, in microseconds. + * + *

      Choosing a positive value, larger than any reasonable single media duration, ensures three + * things: + * + *

        + *
      • Media that accidentally or intentionally starts with small negative timestamps doesn't + * send samples with negative timestamps to decoders. This makes rendering more robust as + * many decoders are known to have problems with negative timestamps. + *
      • Enqueueing media after the initial item with a non-zero start offset (e.g. content after + * ad breaks or live streams) is virtually guaranteed to stay in the positive timestamp + * range even when seeking back. This prevents renderer resets that are required if the + * allowed timestamp range may become negative. + *
      • Choosing a large value with zeros at all relevant digits simplifies debugging as the + * original timestamp of the media is still visible. + *
      + */ + public static final long INITIAL_RENDERER_POSITION_OFFSET_US = 1_000_000_000_000L; + /** * Limits the maximum number of periods to buffer ahead of the current playing period. The * buffering policy normally prevents buffering too far ahead, but the policy could allow too many @@ -42,9 +69,10 @@ private final Timeline.Period period; private final Timeline.Window window; + private final AnalyticsCollector analyticsCollector; + private final HandlerWrapper analyticsCollectorHandler; private long nextWindowSequenceNumber; - private Timeline timeline; private @RepeatMode int repeatMode; private boolean shuffleModeEnabled; @Nullable private MediaPeriodHolder playing; @@ -54,37 +82,45 @@ @Nullable private Object oldFrontPeriodUid; private long oldFrontPeriodWindowSequenceNumber; - /** Creates a new media period queue. */ - public MediaPeriodQueue() { - period = new Timeline.Period(); - window = new Timeline.Window(); - timeline = Timeline.EMPTY; - } - /** - * Sets the {@link Timeline}. Call {@link #updateQueuedPeriods(long, long)} to update the queued - * media periods to take into account the new timeline. + * Creates a new media period queue. + * + * @param analyticsCollector An {@link AnalyticsCollector} to be informed of queue changes. + * @param analyticsCollectorHandler The {@link Handler} to call {@link AnalyticsCollector} methods + * on. */ - public void setTimeline(Timeline timeline) { - this.timeline = timeline; + public MediaPeriodQueue( + AnalyticsCollector analyticsCollector, HandlerWrapper analyticsCollectorHandler) { + this.analyticsCollector = analyticsCollector; + this.analyticsCollectorHandler = analyticsCollectorHandler; + period = new Timeline.Period(); + window = new Timeline.Window(); } /** * Sets the {@link RepeatMode} and returns whether the repeat mode change has been fully handled. * If not, it is necessary to seek to the current playback position. + * + * @param timeline The current timeline. + * @param repeatMode The new repeat mode. + * @return Whether the repeat mode change has been fully handled. */ - public boolean updateRepeatMode(@RepeatMode int repeatMode) { + public boolean updateRepeatMode(Timeline timeline, @RepeatMode int repeatMode) { this.repeatMode = repeatMode; - return updateForPlaybackModeChange(); + return updateForPlaybackModeChange(timeline); } /** * Sets whether shuffling is enabled and returns whether the shuffle mode change has been fully * handled. If not, it is necessary to seek to the current playback position. + * + * @param timeline The current timeline. + * @param shuffleModeEnabled Whether shuffling mode is enabled. + * @return Whether the shuffle mode change has been fully handled. */ - public boolean updateShuffleModeEnabled(boolean shuffleModeEnabled) { + public boolean updateShuffleModeEnabled(Timeline timeline, boolean shuffleModeEnabled) { this.shuffleModeEnabled = shuffleModeEnabled; - return updateForPlaybackModeChange(); + return updateForPlaybackModeChange(timeline); } /** Returns whether {@code mediaPeriod} is the current loading media period. */ @@ -120,11 +156,12 @@ public boolean shouldLoadNextMediaPeriod() { * @return The {@link MediaPeriodInfo} for the next media period to load, or {@code null} if not * yet known. */ - public @Nullable MediaPeriodInfo getNextMediaPeriodInfo( + @Nullable + public MediaPeriodInfo getNextMediaPeriodInfo( long rendererPositionUs, PlaybackInfo playbackInfo) { return loading == null ? getFirstMediaPeriodInfo(playbackInfo) - : getFollowingMediaPeriodInfo(loading, rendererPositionUs); + : getFollowingMediaPeriodInfo(playbackInfo.timeline, loading, rendererPositionUs); } /** @@ -134,7 +171,7 @@ public boolean shouldLoadNextMediaPeriod() { * @param rendererCapabilities The renderer capabilities. * @param trackSelector The track selector. * @param allocator The allocator. - * @param mediaSource The media source that produced the media period. + * @param mediaSourceList The list of media sources. * @param info Information used to identify this media period in its timeline period. * @param emptyTrackSelectorResult A {@link TrackSelectorResult} with empty selections for each * renderer. @@ -143,14 +180,12 @@ public MediaPeriodHolder enqueueNextMediaPeriodHolder( RendererCapabilities[] rendererCapabilities, TrackSelector trackSelector, Allocator allocator, - MediaSource mediaSource, + MediaSourceList mediaSourceList, MediaPeriodInfo info, TrackSelectorResult emptyTrackSelectorResult) { long rendererPositionOffsetUs = loading == null - ? (info.id.isAd() && info.contentPositionUs != C.TIME_UNSET - ? info.contentPositionUs - : 0) + ? INITIAL_RENDERER_POSITION_OFFSET_US : (loading.getRendererOffset() + loading.info.durationUs - info.startPositionUs); MediaPeriodHolder newPeriodHolder = new MediaPeriodHolder( @@ -158,7 +193,7 @@ public MediaPeriodHolder enqueueNextMediaPeriodHolder( rendererPositionOffsetUs, trackSelector, allocator, - mediaSource, + mediaSourceList, info, emptyTrackSelectorResult); if (loading != null) { @@ -170,6 +205,7 @@ public MediaPeriodHolder enqueueNextMediaPeriodHolder( oldFrontPeriodUid = null; loading = newPeriodHolder; length++; + notifyQueueUpdate(); return newPeriodHolder; } @@ -205,6 +241,7 @@ public MediaPeriodHolder getReadingPeriod() { public MediaPeriodHolder advanceReadingPeriod() { Assertions.checkState(reading != null && reading.getNext() != null); reading = reading.getNext(); + notifyQueueUpdate(); return reading; } @@ -230,6 +267,7 @@ public MediaPeriodHolder advancePlayingPeriod() { oldFrontPeriodWindowSequenceNumber = playing.info.id.windowSequenceNumber; } playing = playing.getNext(); + notifyQueueUpdate(); return playing; } @@ -243,6 +281,9 @@ public MediaPeriodHolder advancePlayingPeriod() { */ public boolean removeAfter(MediaPeriodHolder mediaPeriodHolder) { Assertions.checkState(mediaPeriodHolder != null); + if (mediaPeriodHolder.equals(loading)) { + return false; + } boolean removedReading = false; loading = mediaPeriodHolder; while (mediaPeriodHolder.getNext() != null) { @@ -255,29 +296,27 @@ public boolean removeAfter(MediaPeriodHolder mediaPeriodHolder) { length--; } loading.setNext(null); + notifyQueueUpdate(); return removedReading; } - /** - * Clears the queue. - * - * @param keepFrontPeriodUid Whether the queue should keep the id of the media period in the front - * of queue (typically the playing one) for later reuse. - */ - public void clear(boolean keepFrontPeriodUid) { - MediaPeriodHolder front = playing; - if (front != null) { - oldFrontPeriodUid = keepFrontPeriodUid ? front.uid : null; - oldFrontPeriodWindowSequenceNumber = front.info.id.windowSequenceNumber; - removeAfter(front); + /** Clears the queue. */ + public void clear() { + if (length == 0) { + return; + } + MediaPeriodHolder front = Assertions.checkStateNotNull(playing); + oldFrontPeriodUid = front.uid; + oldFrontPeriodWindowSequenceNumber = front.info.id.windowSequenceNumber; + while (front != null) { front.release(); - } else if (!keepFrontPeriodUid) { - oldFrontPeriodUid = null; + front = front.getNext(); } playing = null; loading = null; reading = null; length = 0; + notifyQueueUpdate(); } /** @@ -286,16 +325,18 @@ public void clear(boolean keepFrontPeriodUid) { * current playback position. The method assumes that the first media period in the queue is still * consistent with the new timeline. * + * @param timeline The new timeline. * @param rendererPositionUs The current renderer position in microseconds. * @param maxRendererReadPositionUs The maximum renderer position up to which renderers have read * the current reading media period in microseconds, or {@link C#TIME_END_OF_SOURCE} if they * have read to the end. * @return Whether the timeline change has been handled completely. */ - public boolean updateQueuedPeriods(long rendererPositionUs, long maxRendererReadPositionUs) { + public boolean updateQueuedPeriods( + Timeline timeline, long rendererPositionUs, long maxRendererReadPositionUs) { // TODO: Merge this into setTimeline so that the queue gets updated as soon as the new timeline - // is set, once all cases handled by ExoPlayerImplInternal.handleSourceInfoRefreshed can be - // handled here. + // is set, once all cases handled by ExoPlayerImplInternal.handleMediaSourceListInfoRefreshed + // can be handled here. MediaPeriodHolder previousPeriodHolder = null; MediaPeriodHolder periodHolder = playing; while (periodHolder != null) { @@ -305,11 +346,12 @@ public boolean updateQueuedPeriods(long rendererPositionUs, long maxRendererRead MediaPeriodInfo newPeriodInfo; if (previousPeriodHolder == null) { // The id and start position of the first period have already been verified by - // ExoPlayerImplInternal.handleSourceInfoRefreshed. Just update duration, isLastInTimeline - // and isLastInPeriod flags. - newPeriodInfo = getUpdatedMediaPeriodInfo(oldPeriodInfo); + // ExoPlayerImplInternal.handleMediaSourceListInfoRefreshed. Just update duration, + // isLastInTimeline and isLastInPeriod flags. + newPeriodInfo = getUpdatedMediaPeriodInfo(timeline, oldPeriodInfo); } else { - newPeriodInfo = getFollowingMediaPeriodInfo(previousPeriodHolder, rendererPositionUs); + newPeriodInfo = + getFollowingMediaPeriodInfo(timeline, previousPeriodHolder, rendererPositionUs); if (newPeriodInfo == null) { // We've loaded a next media period that is not in the new timeline. return !removeAfter(previousPeriodHolder); @@ -320,18 +362,23 @@ public boolean updateQueuedPeriods(long rendererPositionUs, long maxRendererRead } } - // Use new period info, but keep old content position. - periodHolder.info = newPeriodInfo.copyWithContentPositionUs(oldPeriodInfo.contentPositionUs); + // Use the new period info, but keep the old requested content position to avoid overriding it + // by the default content position generated in getFollowingMediaPeriodInfo. + periodHolder.info = + newPeriodInfo.copyWithRequestedContentPositionUs( + oldPeriodInfo.requestedContentPositionUs); if (!areDurationsCompatible(oldPeriodInfo.durationUs, newPeriodInfo.durationUs)) { // The period duration changed. Remove all subsequent periods and check whether we read // beyond the new duration. + periodHolder.updateClipping(); long newDurationInRendererTime = newPeriodInfo.durationUs == C.TIME_UNSET ? Long.MAX_VALUE : periodHolder.toRendererTime(newPeriodInfo.durationUs); boolean isReadingAndReadBeyondNewDuration = periodHolder == reading + && !periodHolder.info.isFollowedByTransitionToSameStream && (maxRendererReadPositionUs == C.TIME_END_OF_SOURCE || maxRendererReadPositionUs >= newDurationInRendererTime); boolean readingPeriodRemoved = removeAfter(periodHolder); @@ -349,27 +396,40 @@ public boolean updateQueuedPeriods(long rendererPositionUs, long maxRendererRead * account the current timeline. This method must only be called if the period is still part of * the current timeline. * + * @param timeline The current timeline used to update the media period. * @param info Media period info for a media period based on an old timeline. * @return The updated media period info for the current timeline. */ - public MediaPeriodInfo getUpdatedMediaPeriodInfo(MediaPeriodInfo info) { + public MediaPeriodInfo getUpdatedMediaPeriodInfo(Timeline timeline, MediaPeriodInfo info) { MediaPeriodId id = info.id; boolean isLastInPeriod = isLastInPeriod(id); - boolean isLastInTimeline = isLastInTimeline(id, isLastInPeriod); + boolean isLastInWindow = isLastInWindow(timeline, id); + boolean isLastInTimeline = isLastInTimeline(timeline, id, isLastInPeriod); timeline.getPeriodByUid(info.id.periodUid, period); + long endPositionUs = + id.isAd() || id.nextAdGroupIndex == C.INDEX_UNSET + ? C.TIME_UNSET + : period.getAdGroupTimeUs(id.nextAdGroupIndex); long durationUs = id.isAd() ? period.getAdDurationUs(id.adGroupIndex, id.adIndexInAdGroup) - : (info.endPositionUs == C.TIME_UNSET || info.endPositionUs == C.TIME_END_OF_SOURCE + : (endPositionUs == C.TIME_UNSET || endPositionUs == C.TIME_END_OF_SOURCE ? period.getDurationUs() - : info.endPositionUs); + : endPositionUs); + boolean isFollowedByTransitionToSameStream = + id.isAd() + ? period.isServerSideInsertedAdGroup(id.adGroupIndex) + : (id.nextAdGroupIndex != C.INDEX_UNSET + && period.isServerSideInsertedAdGroup(id.nextAdGroupIndex)); return new MediaPeriodInfo( id, info.startPositionUs, - info.contentPositionUs, - info.endPositionUs, + info.requestedContentPositionUs, + endPositionUs, durationUs, + isFollowedByTransitionToSameStream, isLastInPeriod, + isLastInWindow, isLastInTimeline); } @@ -378,30 +438,50 @@ public MediaPeriodInfo getUpdatedMediaPeriodInfo(MediaPeriodInfo info) { * played, returning an identifier for an ad group if one needs to be played before the specified * position, or an identifier for a content media period if not. * + * @param timeline The timeline the period is part of. * @param periodUid The uid of the timeline period to play. * @param positionUs The next content position in the period to play. * @return The identifier for the first media period to play, taking into account unplayed ads. */ - public MediaPeriodId resolveMediaPeriodIdForAds(Object periodUid, long positionUs) { - long windowSequenceNumber = resolvePeriodIndexToWindowSequenceNumber(periodUid); - return resolveMediaPeriodIdForAds(periodUid, positionUs, windowSequenceNumber); + public MediaPeriodId resolveMediaPeriodIdForAds( + Timeline timeline, Object periodUid, long positionUs) { + long windowSequenceNumber = resolvePeriodIndexToWindowSequenceNumber(timeline, periodUid); + return resolveMediaPeriodIdForAds( + timeline, periodUid, positionUs, windowSequenceNumber, window, period); } - // Internal methods. - /** * Resolves the specified timeline period and position to a {@link MediaPeriodId} that should be * played, returning an identifier for an ad group if one needs to be played before the specified * position, or an identifier for a content media period if not. * + * @param timeline The timeline the period is part of. * @param periodUid The uid of the timeline period to play. * @param positionUs The next content position in the period to play. * @param windowSequenceNumber The sequence number of the window in the buffered sequence of * windows this period is part of. + * @param period A scratch {@link Timeline.Period}. * @return The identifier for the first media period to play, taking into account unplayed ads. */ - private MediaPeriodId resolveMediaPeriodIdForAds( - Object periodUid, long positionUs, long windowSequenceNumber) { + private static MediaPeriodId resolveMediaPeriodIdForAds( + Timeline timeline, + Object periodUid, + long positionUs, + long windowSequenceNumber, + Timeline.Window window, + Timeline.Period period) { + timeline.getPeriodByUid(periodUid, period); + timeline.getWindow(period.windowIndex, window); + int periodIndex = timeline.getIndexOfPeriod(periodUid); + // Skip ignorable server side inserted ad periods. + while ((period.durationUs == 0 + && period.getAdGroupCount() > 0 + && period.isServerSideInsertedAdGroup(period.getRemovedAdGroupCount()) + && period.getAdGroupIndexForPositionUs(0) == C.INDEX_UNSET) + && periodIndex++ < window.lastPeriodIndex) { + timeline.getPeriod(periodIndex, period, /* setIds= */ true); + periodUid = checkNotNull(period.uid); + } timeline.getPeriodByUid(periodUid, period); int adGroupIndex = period.getAdGroupIndexForPositionUs(positionUs); if (adGroupIndex == C.INDEX_UNSET) { @@ -413,15 +493,65 @@ private MediaPeriodId resolveMediaPeriodIdForAds( } } + /** + * Resolves the specified timeline period and position to a {@link MediaPeriodId} that should be + * played after a period position change, returning an identifier for an ad group if one needs to + * be played before the specified position, or an identifier for a content media period if not. + * + * @param timeline The timeline the period is part of. + * @param periodUid The uid of the timeline period to play. + * @param positionUs The next content position in the period to play. + * @return The identifier for the first media period to play, taking into account unplayed ads. + */ + public MediaPeriodId resolveMediaPeriodIdForAdsAfterPeriodPositionChange( + Timeline timeline, Object periodUid, long positionUs) { + long windowSequenceNumber = resolvePeriodIndexToWindowSequenceNumber(timeline, periodUid); + // Check for preceding ad periods in multi-period window. + timeline.getPeriodByUid(periodUid, period); + timeline.getWindow(period.windowIndex, window); + Object periodUidToPlay = periodUid; + boolean seenAdPeriod = false; + for (int i = timeline.getIndexOfPeriod(periodUid); i >= window.firstPeriodIndex; i--) { + timeline.getPeriod(/* periodIndex= */ i, period, /* setIds= */ true); + boolean isAdPeriod = period.getAdGroupCount() > 0; + seenAdPeriod |= isAdPeriod; + if (period.getAdGroupIndexForPositionUs(period.durationUs) != C.INDEX_UNSET) { + // Roll forward to preceding un-played ad period. + periodUidToPlay = checkNotNull(period.uid); + } + if (seenAdPeriod && (!isAdPeriod || period.durationUs != 0)) { + // Stop for any periods except un-played ads with no content. + break; + } + } + return resolveMediaPeriodIdForAds( + timeline, periodUidToPlay, positionUs, windowSequenceNumber, window, period); + } + + // Internal methods. + + private void notifyQueueUpdate() { + ImmutableList.Builder builder = ImmutableList.builder(); + @Nullable MediaPeriodHolder period = playing; + while (period != null) { + builder.add(period.info.id); + period = period.getNext(); + } + @Nullable MediaPeriodId readingPeriodId = reading == null ? null : reading.info.id; + analyticsCollectorHandler.post( + () -> analyticsCollector.updateMediaPeriodQueueInfo(builder.build(), readingPeriodId)); + } + /** * Resolves the specified period uid to a corresponding window sequence number. Either by reusing * the window sequence number of an existing matching media period or by creating a new window * sequence number. * + * @param timeline The timeline the period is part of. * @param periodUid The uid of the timeline period. * @return A window sequence number for a media period created for this timeline period. */ - private long resolvePeriodIndexToWindowSequenceNumber(Object periodUid) { + private long resolvePeriodIndexToWindowSequenceNumber(Timeline timeline, Object periodUid) { int windowIndex = timeline.getPeriodByUid(periodUid, period).windowIndex; if (oldFrontPeriodUid != null) { int oldFrontPeriodIndex = timeline.getIndexOfPeriod(oldFrontPeriodUid); @@ -481,8 +611,10 @@ private boolean areDurationsCompatible(long previousDurationUs, long newDuration /** * Updates the queue for any playback mode change, and returns whether the change was fully * handled. If not, it is necessary to seek to the current playback position. + * + * @param timeline The current timeline. */ - private boolean updateForPlaybackModeChange() { + private boolean updateForPlaybackModeChange(Timeline timeline) { // Find the last existing period holder that matches the new period order. MediaPeriodHolder lastValidPeriodHolder = playing; if (lastValidPeriodHolder == null) { @@ -514,7 +646,7 @@ private boolean updateForPlaybackModeChange() { boolean readingPeriodRemoved = removeAfter(lastValidPeriodHolder); // Update the period info for the last holder, as it may now be the last period in the timeline. - lastValidPeriodHolder.info = getUpdatedMediaPeriodInfo(lastValidPeriodHolder.info); + lastValidPeriodHolder.info = getUpdatedMediaPeriodInfo(timeline, lastValidPeriodHolder.info); // If renderers may have read from a period that's been removed, it is necessary to restart. return !readingPeriodRemoved; @@ -523,22 +655,28 @@ private boolean updateForPlaybackModeChange() { /** * Returns the first {@link MediaPeriodInfo} to play, based on the specified playback position. */ + @Nullable private MediaPeriodInfo getFirstMediaPeriodInfo(PlaybackInfo playbackInfo) { return getMediaPeriodInfo( - playbackInfo.periodId, playbackInfo.contentPositionUs, playbackInfo.startPositionUs); + playbackInfo.timeline, + playbackInfo.periodId, + playbackInfo.requestedContentPositionUs, + playbackInfo.positionUs); } /** * Returns the {@link MediaPeriodInfo} for the media period following {@code mediaPeriodHolder}'s * media period. * + * @param timeline The current timeline. * @param mediaPeriodHolder The media period holder. * @param rendererPositionUs The current renderer position in microseconds. * @return The following media period's info, or {@code null} if it is not yet possible to get the * next media period info. */ - private @Nullable MediaPeriodInfo getFollowingMediaPeriodInfo( - MediaPeriodHolder mediaPeriodHolder, long rendererPositionUs) { + @Nullable + private MediaPeriodInfo getFollowingMediaPeriodInfo( + Timeline timeline, MediaPeriodHolder mediaPeriodHolder, long rendererPositionUs) { // TODO: This method is called repeatedly from ExoPlayerImplInternal.maybeUpdateLoadingPeriod // but if the timeline is not ready to provide the next period it can't return a non-null value // until the timeline is updated. Store whether the next timeline period is ready when the @@ -558,44 +696,58 @@ private MediaPeriodInfo getFirstMediaPeriodInfo(PlaybackInfo playbackInfo) { // We can't create a next period yet. return null; } - - long startPositionUs; - long contentPositionUs; + // We either start a new period in the same window or the first period in the next window. + long startPositionUs = 0; + long contentPositionUs = 0; int nextWindowIndex = timeline.getPeriod(nextPeriodIndex, period, /* setIds= */ true).windowIndex; - Object nextPeriodUid = period.uid; + Object nextPeriodUid = checkNotNull(period.uid); long windowSequenceNumber = mediaPeriodInfo.id.windowSequenceNumber; if (timeline.getWindow(nextWindowIndex, window).firstPeriodIndex == nextPeriodIndex) { // We're starting to buffer a new window. When playback transitions to this window we'll // want it to be from its default start position, so project the default start position // forward by the duration of the buffer, and start buffering from this point. contentPositionUs = C.TIME_UNSET; - Pair defaultPosition = - timeline.getPeriodPosition( + @Nullable + Pair defaultPositionUs = + timeline.getPeriodPositionUs( window, period, nextWindowIndex, /* windowPositionUs= */ C.TIME_UNSET, - /* defaultPositionProjectionUs= */ Math.max(0, bufferedDurationUs)); - if (defaultPosition == null) { + /* defaultPositionProjectionUs= */ max(0, bufferedDurationUs)); + if (defaultPositionUs == null) { return null; } - nextPeriodUid = defaultPosition.first; - startPositionUs = defaultPosition.second; - MediaPeriodHolder nextMediaPeriodHolder = mediaPeriodHolder.getNext(); + nextPeriodUid = defaultPositionUs.first; + startPositionUs = defaultPositionUs.second; + @Nullable MediaPeriodHolder nextMediaPeriodHolder = mediaPeriodHolder.getNext(); if (nextMediaPeriodHolder != null && nextMediaPeriodHolder.uid.equals(nextPeriodUid)) { windowSequenceNumber = nextMediaPeriodHolder.info.id.windowSequenceNumber; } else { windowSequenceNumber = nextWindowSequenceNumber++; } - } else { - // We're starting to buffer a new period within the same window. - startPositionUs = 0; - contentPositionUs = 0; } + + @Nullable MediaPeriodId periodId = - resolveMediaPeriodIdForAds(nextPeriodUid, startPositionUs, windowSequenceNumber); - return getMediaPeriodInfo(periodId, contentPositionUs, startPositionUs); + resolveMediaPeriodIdForAds( + timeline, nextPeriodUid, startPositionUs, windowSequenceNumber, window, period); + if (contentPositionUs != C.TIME_UNSET + && mediaPeriodInfo.requestedContentPositionUs != C.TIME_UNSET) { + boolean isPrecedingPeriodAnAd = + timeline.getPeriodByUid(mediaPeriodInfo.id.periodUid, period).getAdGroupCount() > 0 + && period.isServerSideInsertedAdGroup(period.getRemovedAdGroupCount()); + // Handle the requested content position for period transitions within the same window. + if (periodId.isAd() && isPrecedingPeriodAnAd) { + // Propagate the requested position to the following ad period in the same window. + contentPositionUs = mediaPeriodInfo.requestedContentPositionUs; + } else if (isPrecedingPeriodAnAd) { + // Use the requested content position of the preceding ad period as the start position. + startPositionUs = mediaPeriodInfo.requestedContentPositionUs; + } + } + return getMediaPeriodInfo(timeline, periodId, contentPositionUs, startPositionUs); } MediaPeriodId currentPeriodId = mediaPeriodInfo.id; @@ -610,76 +762,97 @@ private MediaPeriodInfo getFirstMediaPeriodInfo(PlaybackInfo playbackInfo) { period.getNextAdIndexToPlay(adGroupIndex, currentPeriodId.adIndexInAdGroup); if (nextAdIndexInAdGroup < adCountInCurrentAdGroup) { // Play the next ad in the ad group if it's available. - return !period.isAdAvailable(adGroupIndex, nextAdIndexInAdGroup) - ? null - : getMediaPeriodInfoForAd( - currentPeriodId.periodUid, - adGroupIndex, - nextAdIndexInAdGroup, - mediaPeriodInfo.contentPositionUs, - currentPeriodId.windowSequenceNumber); + return getMediaPeriodInfoForAd( + timeline, + currentPeriodId.periodUid, + adGroupIndex, + nextAdIndexInAdGroup, + mediaPeriodInfo.requestedContentPositionUs, + currentPeriodId.windowSequenceNumber); } else { // Play content from the ad group position. - long startPositionUs = mediaPeriodInfo.contentPositionUs; + long startPositionUs = mediaPeriodInfo.requestedContentPositionUs; if (startPositionUs == C.TIME_UNSET) { // If we're transitioning from an ad group to content starting from its default position, // project the start position forward as if this were a transition to a new window. - Pair defaultPosition = - timeline.getPeriodPosition( + @Nullable + Pair defaultPositionUs = + timeline.getPeriodPositionUs( window, period, period.windowIndex, /* windowPositionUs= */ C.TIME_UNSET, - /* defaultPositionProjectionUs= */ Math.max(0, bufferedDurationUs)); - if (defaultPosition == null) { + /* defaultPositionProjectionUs= */ max(0, bufferedDurationUs)); + if (defaultPositionUs == null) { return null; } - startPositionUs = defaultPosition.second; + startPositionUs = defaultPositionUs.second; } + long minStartPositionUs = + getMinStartPositionAfterAdGroupUs( + timeline, currentPeriodId.periodUid, currentPeriodId.adGroupIndex); return getMediaPeriodInfoForContent( - currentPeriodId.periodUid, startPositionUs, currentPeriodId.windowSequenceNumber); + timeline, + currentPeriodId.periodUid, + max(minStartPositionUs, startPositionUs), + mediaPeriodInfo.requestedContentPositionUs, + currentPeriodId.windowSequenceNumber); } } else { - // Play the next ad group if it's available. - int nextAdGroupIndex = period.getAdGroupIndexForPositionUs(mediaPeriodInfo.endPositionUs); - if (nextAdGroupIndex == C.INDEX_UNSET) { - // The next ad group can't be played. Play content from the previous end position instead. + // Play the next ad group if it's still available. + int adIndexInAdGroup = period.getFirstAdIndexToPlay(currentPeriodId.nextAdGroupIndex); + boolean isPlayedServerSideInsertedAd = + period.isServerSideInsertedAdGroup(currentPeriodId.nextAdGroupIndex) + && period.getAdState(currentPeriodId.nextAdGroupIndex, adIndexInAdGroup) + == AdPlaybackState.AD_STATE_PLAYED; + if (adIndexInAdGroup == period.getAdCountInAdGroup(currentPeriodId.nextAdGroupIndex) + || isPlayedServerSideInsertedAd) { + // The next ad group has no ads left to play or is a played SSAI ad group. Play content from + // the end position instead. + long startPositionUs = + getMinStartPositionAfterAdGroupUs( + timeline, currentPeriodId.periodUid, currentPeriodId.nextAdGroupIndex); return getMediaPeriodInfoForContent( + timeline, currentPeriodId.periodUid, - /* startPositionUs= */ mediaPeriodInfo.durationUs, + startPositionUs, + /* requestedContentPositionUs= */ mediaPeriodInfo.durationUs, currentPeriodId.windowSequenceNumber); } - int adIndexInAdGroup = period.getFirstAdIndexToPlay(nextAdGroupIndex); - return !period.isAdAvailable(nextAdGroupIndex, adIndexInAdGroup) - ? null - : getMediaPeriodInfoForAd( - currentPeriodId.periodUid, - nextAdGroupIndex, - adIndexInAdGroup, - /* contentPositionUs= */ mediaPeriodInfo.durationUs, - currentPeriodId.windowSequenceNumber); + return getMediaPeriodInfoForAd( + timeline, + currentPeriodId.periodUid, + currentPeriodId.nextAdGroupIndex, + adIndexInAdGroup, + /* contentPositionUs= */ mediaPeriodInfo.durationUs, + currentPeriodId.windowSequenceNumber); } } + @Nullable private MediaPeriodInfo getMediaPeriodInfo( - MediaPeriodId id, long contentPositionUs, long startPositionUs) { + Timeline timeline, MediaPeriodId id, long requestedContentPositionUs, long startPositionUs) { timeline.getPeriodByUid(id.periodUid, period); if (id.isAd()) { - if (!period.isAdAvailable(id.adGroupIndex, id.adIndexInAdGroup)) { - return null; - } return getMediaPeriodInfoForAd( + timeline, id.periodUid, id.adGroupIndex, id.adIndexInAdGroup, - contentPositionUs, + requestedContentPositionUs, id.windowSequenceNumber); } else { - return getMediaPeriodInfoForContent(id.periodUid, startPositionUs, id.windowSequenceNumber); + return getMediaPeriodInfoForContent( + timeline, + id.periodUid, + startPositionUs, + requestedContentPositionUs, + id.windowSequenceNumber); } } private MediaPeriodInfo getMediaPeriodInfoForAd( + Timeline timeline, Object periodUid, int adGroupIndex, int adIndexInAdGroup, @@ -695,37 +868,74 @@ private MediaPeriodInfo getMediaPeriodInfoForAd( adIndexInAdGroup == period.getFirstAdIndexToPlay(adGroupIndex) ? period.getAdResumePositionUs() : 0; + boolean isFollowedByTransitionToSameStream = + period.isServerSideInsertedAdGroup(id.adGroupIndex); + if (durationUs != C.TIME_UNSET && startPositionUs >= durationUs) { + // Ensure start position doesn't exceed duration. + startPositionUs = max(0, durationUs - 1); + } return new MediaPeriodInfo( id, startPositionUs, contentPositionUs, /* endPositionUs= */ C.TIME_UNSET, durationUs, + isFollowedByTransitionToSameStream, /* isLastInTimelinePeriod= */ false, + /* isLastInTimelineWindow= */ false, /* isFinal= */ false); } private MediaPeriodInfo getMediaPeriodInfoForContent( - Object periodUid, long startPositionUs, long windowSequenceNumber) { + Timeline timeline, + Object periodUid, + long startPositionUs, + long requestedContentPositionUs, + long windowSequenceNumber) { + timeline.getPeriodByUid(periodUid, period); int nextAdGroupIndex = period.getAdGroupIndexAfterPositionUs(startPositionUs); + boolean clipPeriodAtContentDuration = false; + if (nextAdGroupIndex == C.INDEX_UNSET) { + // Clip SSAI streams when at the end of the period. + clipPeriodAtContentDuration = + period.getAdGroupCount() > 0 + && period.isServerSideInsertedAdGroup(period.getRemovedAdGroupCount()); + } else if (period.isServerSideInsertedAdGroup(nextAdGroupIndex) + && period.getAdGroupTimeUs(nextAdGroupIndex) == period.durationUs) { + if (period.hasPlayedAdGroup(nextAdGroupIndex)) { + // Clip period before played SSAI post-rolls. + nextAdGroupIndex = C.INDEX_UNSET; + clipPeriodAtContentDuration = true; + } + } MediaPeriodId id = new MediaPeriodId(periodUid, windowSequenceNumber, nextAdGroupIndex); boolean isLastInPeriod = isLastInPeriod(id); - boolean isLastInTimeline = isLastInTimeline(id, isLastInPeriod); + boolean isLastInWindow = isLastInWindow(timeline, id); + boolean isLastInTimeline = isLastInTimeline(timeline, id, isLastInPeriod); + boolean isFollowedByTransitionToSameStream = + nextAdGroupIndex != C.INDEX_UNSET && period.isServerSideInsertedAdGroup(nextAdGroupIndex); long endPositionUs = nextAdGroupIndex != C.INDEX_UNSET ? period.getAdGroupTimeUs(nextAdGroupIndex) - : C.TIME_UNSET; + : clipPeriodAtContentDuration ? period.durationUs : C.TIME_UNSET; long durationUs = endPositionUs == C.TIME_UNSET || endPositionUs == C.TIME_END_OF_SOURCE ? period.durationUs : endPositionUs; + if (durationUs != C.TIME_UNSET && startPositionUs >= durationUs) { + // Ensure start position doesn't exceed duration. + boolean endAtLastFrame = isLastInTimeline || !clipPeriodAtContentDuration; + startPositionUs = max(0, durationUs - (endAtLastFrame ? 1 : 0)); + } return new MediaPeriodInfo( id, startPositionUs, - /* contentPositionUs= */ C.TIME_UNSET, + requestedContentPositionUs, endPositionUs, durationUs, + isFollowedByTransitionToSameStream, isLastInPeriod, + isLastInWindow, isLastInTimeline); } @@ -733,11 +943,31 @@ private boolean isLastInPeriod(MediaPeriodId id) { return !id.isAd() && id.nextAdGroupIndex == C.INDEX_UNSET; } - private boolean isLastInTimeline(MediaPeriodId id, boolean isLastMediaPeriodInPeriod) { + private boolean isLastInWindow(Timeline timeline, MediaPeriodId id) { + if (!isLastInPeriod(id)) { + return false; + } + int windowIndex = timeline.getPeriodByUid(id.periodUid, period).windowIndex; + int periodIndex = timeline.getIndexOfPeriod(id.periodUid); + return timeline.getWindow(windowIndex, window).lastPeriodIndex == periodIndex; + } + + private boolean isLastInTimeline( + Timeline timeline, MediaPeriodId id, boolean isLastMediaPeriodInPeriod) { int periodIndex = timeline.getIndexOfPeriod(id.periodUid); int windowIndex = timeline.getPeriod(periodIndex, period).windowIndex; return !timeline.getWindow(windowIndex, window).isDynamic && timeline.isLastPeriod(periodIndex, period, window, repeatMode, shuffleModeEnabled) && isLastMediaPeriodInPeriod; } + + private long getMinStartPositionAfterAdGroupUs( + Timeline timeline, Object periodUid, int adGroupIndex) { + timeline.getPeriodByUid(periodUid, period); + long startPositionUs = period.getAdGroupTimeUs(adGroupIndex); + if (startPositionUs == C.TIME_END_OF_SOURCE) { + return period.durationUs; + } + return startPositionUs + period.getContentResumeOffsetUs(adGroupIndex); + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/MediaSourceInfoHolder.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/MediaSourceInfoHolder.java new file mode 100644 index 0000000000..f8624995ad --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/MediaSourceInfoHolder.java @@ -0,0 +1,28 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2; + +import com.google.android.exoplayer2.source.MediaSource; + +/** A holder of information about a {@link MediaSource}. */ +/* package */ interface MediaSourceInfoHolder { + + /** Returns the uid of the {@link MediaSourceList.MediaSourceHolder}. */ + Object getUid(); + + /** Returns the timeline. */ + Timeline getTimeline(); +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/MediaSourceList.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/MediaSourceList.java new file mode 100644 index 0000000000..7c65a1540d --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/MediaSourceList.java @@ -0,0 +1,738 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2; + +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static java.lang.Math.max; +import static java.lang.Math.min; + +import android.os.Handler; +import android.util.Pair; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.analytics.AnalyticsCollector; +import com.google.android.exoplayer2.analytics.PlayerId; +import com.google.android.exoplayer2.drm.DrmSession; +import com.google.android.exoplayer2.drm.DrmSessionEventListener; +import com.google.android.exoplayer2.source.LoadEventInfo; +import com.google.android.exoplayer2.source.MaskingMediaPeriod; +import com.google.android.exoplayer2.source.MaskingMediaSource; +import com.google.android.exoplayer2.source.MediaLoadData; +import com.google.android.exoplayer2.source.MediaPeriod; +import com.google.android.exoplayer2.source.MediaSource; +import com.google.android.exoplayer2.source.MediaSourceEventListener; +import com.google.android.exoplayer2.source.ShuffleOrder; +import com.google.android.exoplayer2.source.ShuffleOrder.DefaultShuffleOrder; +import com.google.android.exoplayer2.upstream.Allocator; +import com.google.android.exoplayer2.upstream.TransferListener; +import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.HandlerWrapper; +import com.google.android.exoplayer2.util.Log; +import com.google.android.exoplayer2.util.Util; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.IdentityHashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; +import org.checkerframework.checker.nullness.compatqual.NullableType; + +/** + * Concatenates multiple {@link MediaSource}s. The list of {@link MediaSource}s can be modified + * during playback. It is valid for the same {@link MediaSource} instance to be present more than + * once in the playlist. + * + *

      With the exception of the constructor, all methods are called on the playback thread. + */ +/* package */ final class MediaSourceList { + + /** Listener for source events. */ + public interface MediaSourceListInfoRefreshListener { + + /** + * Called when the timeline of a media item has changed and a new timeline that reflects the + * current playlist state needs to be created by calling {@link #createTimeline()}. + * + *

      Called on the playback thread. + */ + void onPlaylistUpdateRequested(); + } + + private static final String TAG = "MediaSourceList"; + + private final PlayerId playerId; + private final List mediaSourceHolders; + private final IdentityHashMap mediaSourceByMediaPeriod; + private final Map mediaSourceByUid; + private final MediaSourceListInfoRefreshListener mediaSourceListInfoListener; + private final HashMap childSources; + private final Set enabledMediaSourceHolders; + private final AnalyticsCollector eventListener; + private final HandlerWrapper eventHandler; + private ShuffleOrder shuffleOrder; + private boolean isPrepared; + + @Nullable private TransferListener mediaTransferListener; + + /** + * Creates the media source list. + * + * @param listener The {@link MediaSourceListInfoRefreshListener} to be informed of timeline + * changes. + * @param analyticsCollector An {@link AnalyticsCollector} to be registered for media source + * events. + * @param analyticsCollectorHandler The {@link Handler} to call {@link AnalyticsCollector} methods + * on. + * @param playerId The {@link PlayerId} of the player using this list. + */ + public MediaSourceList( + MediaSourceListInfoRefreshListener listener, + AnalyticsCollector analyticsCollector, + HandlerWrapper analyticsCollectorHandler, + PlayerId playerId) { + this.playerId = playerId; + mediaSourceListInfoListener = listener; + shuffleOrder = new DefaultShuffleOrder(0); + mediaSourceByMediaPeriod = new IdentityHashMap<>(); + mediaSourceByUid = new HashMap<>(); + mediaSourceHolders = new ArrayList<>(); + eventListener = analyticsCollector; + eventHandler = analyticsCollectorHandler; + childSources = new HashMap<>(); + enabledMediaSourceHolders = new HashSet<>(); + } + + /** + * Sets the media sources replacing any sources previously contained in the playlist. + * + * @param holders The list of {@link MediaSourceHolder}s to set. + * @param shuffleOrder The new shuffle order. + * @return The new {@link Timeline}. + */ + public Timeline setMediaSources(List holders, ShuffleOrder shuffleOrder) { + removeMediaSourcesInternal(/* fromIndex= */ 0, /* toIndex= */ mediaSourceHolders.size()); + return addMediaSources(/* index= */ this.mediaSourceHolders.size(), holders, shuffleOrder); + } + + /** + * Adds multiple {@link MediaSourceHolder}s to the playlist. + * + * @param index The index at which the new {@link MediaSourceHolder}s will be inserted. This index + * must be in the range of 0 <= index <= {@link #getSize()}. + * @param holders A list of {@link MediaSourceHolder}s to be added. + * @param shuffleOrder The new shuffle order. + * @return The new {@link Timeline}. + */ + public Timeline addMediaSources( + int index, List holders, ShuffleOrder shuffleOrder) { + if (!holders.isEmpty()) { + this.shuffleOrder = shuffleOrder; + for (int insertionIndex = index; insertionIndex < index + holders.size(); insertionIndex++) { + MediaSourceHolder holder = holders.get(insertionIndex - index); + if (insertionIndex > 0) { + MediaSourceHolder previousHolder = mediaSourceHolders.get(insertionIndex - 1); + Timeline previousTimeline = previousHolder.mediaSource.getTimeline(); + holder.reset( + /* firstWindowIndexInChild= */ previousHolder.firstWindowIndexInChild + + previousTimeline.getWindowCount()); + } else { + holder.reset(/* firstWindowIndexInChild= */ 0); + } + Timeline newTimeline = holder.mediaSource.getTimeline(); + correctOffsets( + /* startIndex= */ insertionIndex, + /* windowOffsetUpdate= */ newTimeline.getWindowCount()); + mediaSourceHolders.add(insertionIndex, holder); + mediaSourceByUid.put(holder.uid, holder); + if (isPrepared) { + prepareChildSource(holder); + if (mediaSourceByMediaPeriod.isEmpty()) { + enabledMediaSourceHolders.add(holder); + } else { + disableChildSource(holder); + } + } + } + } + return createTimeline(); + } + + /** + * Removes a range of {@link MediaSourceHolder}s from the playlist, by specifying an initial index + * (included) and a final index (excluded). + * + *

      Note: when specified range is empty, no actual media source is removed and no exception is + * thrown. + * + * @param fromIndex The initial range index, pointing to the first media source that will be + * removed. This index must be in the range of 0 <= index <= {@link #getSize()}. + * @param toIndex The final range index, pointing to the first media source that will be left + * untouched. This index must be in the range of 0 <= index <= {@link #getSize()}. + * @param shuffleOrder The new shuffle order. + * @return The new {@link Timeline}. + * @throws IllegalArgumentException When the range is malformed, i.e. {@code fromIndex} < 0, + * {@code toIndex} > {@link #getSize()}, {@code fromIndex} > {@code toIndex} + */ + public Timeline removeMediaSourceRange(int fromIndex, int toIndex, ShuffleOrder shuffleOrder) { + Assertions.checkArgument(fromIndex >= 0 && fromIndex <= toIndex && toIndex <= getSize()); + this.shuffleOrder = shuffleOrder; + removeMediaSourcesInternal(fromIndex, toIndex); + return createTimeline(); + } + + /** + * Moves an existing media source within the playlist. + * + * @param currentIndex The current index of the media source in the playlist. This index must be + * in the range of 0 <= index < {@link #getSize()}. + * @param newIndex The target index of the media source in the playlist. This index must be in the + * range of 0 <= index < {@link #getSize()}. + * @param shuffleOrder The new shuffle order. + * @return The new {@link Timeline}. + * @throws IllegalArgumentException When an index is invalid, i.e. {@code currentIndex} < 0, + * {@code currentIndex} >= {@link #getSize()}, {@code newIndex} < 0 + */ + public Timeline moveMediaSource(int currentIndex, int newIndex, ShuffleOrder shuffleOrder) { + return moveMediaSourceRange(currentIndex, currentIndex + 1, newIndex, shuffleOrder); + } + + /** + * Moves a range of media sources within the playlist. + * + *

      Note: when specified range is empty or the from index equals the new from index, no actual + * media source is moved and no exception is thrown. + * + * @param fromIndex The initial range index, pointing to the first media source of the range that + * will be moved. This index must be in the range of 0 <= index <= {@link #getSize()}. + * @param toIndex The final range index, pointing to the first media source that will be left + * untouched. This index must be larger or equals than {@code fromIndex}. + * @param newFromIndex The target index of the first media source of the range that will be moved. + * @param shuffleOrder The new shuffle order. + * @return The new {@link Timeline}. + * @throws IllegalArgumentException When the range is malformed, i.e. {@code fromIndex} < 0, + * {@code toIndex} < {@code fromIndex}, {@code fromIndex} > {@code toIndex}, {@code + * newFromIndex} < 0 + */ + public Timeline moveMediaSourceRange( + int fromIndex, int toIndex, int newFromIndex, ShuffleOrder shuffleOrder) { + Assertions.checkArgument( + fromIndex >= 0 && fromIndex <= toIndex && toIndex <= getSize() && newFromIndex >= 0); + this.shuffleOrder = shuffleOrder; + if (fromIndex == toIndex || fromIndex == newFromIndex) { + return createTimeline(); + } + int startIndex = min(fromIndex, newFromIndex); + int newEndIndex = newFromIndex + (toIndex - fromIndex) - 1; + int endIndex = max(newEndIndex, toIndex - 1); + int windowOffset = mediaSourceHolders.get(startIndex).firstWindowIndexInChild; + Util.moveItems(mediaSourceHolders, fromIndex, toIndex, newFromIndex); + for (int i = startIndex; i <= endIndex; i++) { + MediaSourceHolder holder = mediaSourceHolders.get(i); + holder.firstWindowIndexInChild = windowOffset; + windowOffset += holder.mediaSource.getTimeline().getWindowCount(); + } + return createTimeline(); + } + + /** Clears the playlist. */ + public Timeline clear(@Nullable ShuffleOrder shuffleOrder) { + this.shuffleOrder = shuffleOrder != null ? shuffleOrder : this.shuffleOrder.cloneAndClear(); + removeMediaSourcesInternal(/* fromIndex= */ 0, /* toIndex= */ getSize()); + return createTimeline(); + } + + /** Whether the playlist is prepared. */ + public boolean isPrepared() { + return isPrepared; + } + + /** Returns the number of media sources in the playlist. */ + public int getSize() { + return mediaSourceHolders.size(); + } + + /** + * Sets a new shuffle order to use when shuffling the child media sources. + * + * @param shuffleOrder A {@link ShuffleOrder}. + */ + public Timeline setShuffleOrder(ShuffleOrder shuffleOrder) { + int size = getSize(); + if (shuffleOrder.getLength() != size) { + shuffleOrder = + shuffleOrder + .cloneAndClear() + .cloneAndInsert(/* insertionIndex= */ 0, /* insertionCount= */ size); + } + this.shuffleOrder = shuffleOrder; + return createTimeline(); + } + + /** Prepares the playlist. */ + public void prepare(@Nullable TransferListener mediaTransferListener) { + Assertions.checkState(!isPrepared); + this.mediaTransferListener = mediaTransferListener; + for (int i = 0; i < mediaSourceHolders.size(); i++) { + MediaSourceHolder mediaSourceHolder = mediaSourceHolders.get(i); + prepareChildSource(mediaSourceHolder); + enabledMediaSourceHolders.add(mediaSourceHolder); + } + isPrepared = true; + } + + /** + * Returns a new {@link MediaPeriod} identified by {@code periodId}. + * + * @param id The identifier of the period. + * @param allocator An {@link Allocator} from which to obtain media buffer allocations. + * @param startPositionUs The expected start position, in microseconds. + * @return A new {@link MediaPeriod}. + */ + public MediaPeriod createPeriod( + MediaSource.MediaPeriodId id, Allocator allocator, long startPositionUs) { + Object mediaSourceHolderUid = getMediaSourceHolderUid(id.periodUid); + MediaSource.MediaPeriodId childMediaPeriodId = + id.copyWithPeriodUid(getChildPeriodUid(id.periodUid)); + MediaSourceHolder holder = checkNotNull(mediaSourceByUid.get(mediaSourceHolderUid)); + enableMediaSource(holder); + holder.activeMediaPeriodIds.add(childMediaPeriodId); + MediaPeriod mediaPeriod = + holder.mediaSource.createPeriod(childMediaPeriodId, allocator, startPositionUs); + mediaSourceByMediaPeriod.put(mediaPeriod, holder); + disableUnusedMediaSources(); + return mediaPeriod; + } + + /** + * Releases the period. + * + * @param mediaPeriod The period to release. + */ + public void releasePeriod(MediaPeriod mediaPeriod) { + MediaSourceHolder holder = checkNotNull(mediaSourceByMediaPeriod.remove(mediaPeriod)); + holder.mediaSource.releasePeriod(mediaPeriod); + holder.activeMediaPeriodIds.remove(((MaskingMediaPeriod) mediaPeriod).id); + if (!mediaSourceByMediaPeriod.isEmpty()) { + disableUnusedMediaSources(); + } + maybeReleaseChildSource(holder); + } + + /** Releases the playlist. */ + public void release() { + for (MediaSourceAndListener childSource : childSources.values()) { + try { + childSource.mediaSource.releaseSource(childSource.caller); + } catch (RuntimeException e) { + // There's nothing we can do. + Log.e(TAG, "Failed to release child source.", e); + } + childSource.mediaSource.removeEventListener(childSource.eventListener); + childSource.mediaSource.removeDrmEventListener(childSource.eventListener); + } + childSources.clear(); + enabledMediaSourceHolders.clear(); + isPrepared = false; + } + + /** Creates a timeline reflecting the current state of the playlist. */ + public Timeline createTimeline() { + if (mediaSourceHolders.isEmpty()) { + return Timeline.EMPTY; + } + int windowOffset = 0; + for (int i = 0; i < mediaSourceHolders.size(); i++) { + MediaSourceHolder mediaSourceHolder = mediaSourceHolders.get(i); + mediaSourceHolder.firstWindowIndexInChild = windowOffset; + windowOffset += mediaSourceHolder.mediaSource.getTimeline().getWindowCount(); + } + return new PlaylistTimeline(mediaSourceHolders, shuffleOrder); + } + + // Internal methods. + + private void enableMediaSource(MediaSourceHolder mediaSourceHolder) { + enabledMediaSourceHolders.add(mediaSourceHolder); + @Nullable MediaSourceAndListener enabledChild = childSources.get(mediaSourceHolder); + if (enabledChild != null) { + enabledChild.mediaSource.enable(enabledChild.caller); + } + } + + private void disableUnusedMediaSources() { + Iterator iterator = enabledMediaSourceHolders.iterator(); + while (iterator.hasNext()) { + MediaSourceHolder holder = iterator.next(); + if (holder.activeMediaPeriodIds.isEmpty()) { + disableChildSource(holder); + iterator.remove(); + } + } + } + + private void disableChildSource(MediaSourceHolder holder) { + @Nullable MediaSourceAndListener disabledChild = childSources.get(holder); + if (disabledChild != null) { + disabledChild.mediaSource.disable(disabledChild.caller); + } + } + + private void removeMediaSourcesInternal(int fromIndex, int toIndex) { + for (int index = toIndex - 1; index >= fromIndex; index--) { + MediaSourceHolder holder = mediaSourceHolders.remove(index); + mediaSourceByUid.remove(holder.uid); + Timeline oldTimeline = holder.mediaSource.getTimeline(); + correctOffsets( + /* startIndex= */ index, /* windowOffsetUpdate= */ -oldTimeline.getWindowCount()); + holder.isRemoved = true; + if (isPrepared) { + maybeReleaseChildSource(holder); + } + } + } + + private void correctOffsets(int startIndex, int windowOffsetUpdate) { + for (int i = startIndex; i < mediaSourceHolders.size(); i++) { + MediaSourceHolder mediaSourceHolder = mediaSourceHolders.get(i); + mediaSourceHolder.firstWindowIndexInChild += windowOffsetUpdate; + } + } + + // Internal methods to manage child sources. + + @Nullable + private static MediaSource.MediaPeriodId getMediaPeriodIdForChildMediaPeriodId( + MediaSourceHolder mediaSourceHolder, MediaSource.MediaPeriodId mediaPeriodId) { + for (int i = 0; i < mediaSourceHolder.activeMediaPeriodIds.size(); i++) { + // Ensure the reported media period id has the same window sequence number as the one created + // by this media source. Otherwise it does not belong to this child source. + if (mediaSourceHolder.activeMediaPeriodIds.get(i).windowSequenceNumber + == mediaPeriodId.windowSequenceNumber) { + Object periodUid = getPeriodUid(mediaSourceHolder, mediaPeriodId.periodUid); + return mediaPeriodId.copyWithPeriodUid(periodUid); + } + } + return null; + } + + private static int getWindowIndexForChildWindowIndex( + MediaSourceHolder mediaSourceHolder, int windowIndex) { + return windowIndex + mediaSourceHolder.firstWindowIndexInChild; + } + + private void prepareChildSource(MediaSourceHolder holder) { + MediaSource mediaSource = holder.mediaSource; + MediaSource.MediaSourceCaller caller = + (source, timeline) -> mediaSourceListInfoListener.onPlaylistUpdateRequested(); + ForwardingEventListener eventListener = new ForwardingEventListener(holder); + childSources.put(holder, new MediaSourceAndListener(mediaSource, caller, eventListener)); + mediaSource.addEventListener(Util.createHandlerForCurrentOrMainLooper(), eventListener); + mediaSource.addDrmEventListener(Util.createHandlerForCurrentOrMainLooper(), eventListener); + mediaSource.prepareSource(caller, mediaTransferListener, playerId); + } + + private void maybeReleaseChildSource(MediaSourceHolder mediaSourceHolder) { + // Release if the source has been removed from the playlist and no periods are still active. + if (mediaSourceHolder.isRemoved && mediaSourceHolder.activeMediaPeriodIds.isEmpty()) { + MediaSourceAndListener removedChild = checkNotNull(childSources.remove(mediaSourceHolder)); + removedChild.mediaSource.releaseSource(removedChild.caller); + removedChild.mediaSource.removeEventListener(removedChild.eventListener); + removedChild.mediaSource.removeDrmEventListener(removedChild.eventListener); + enabledMediaSourceHolders.remove(mediaSourceHolder); + } + } + + /** Return uid of media source holder from period uid of concatenated source. */ + private static Object getMediaSourceHolderUid(Object periodUid) { + return PlaylistTimeline.getChildTimelineUidFromConcatenatedUid(periodUid); + } + + /** Return uid of child period from period uid of concatenated source. */ + private static Object getChildPeriodUid(Object periodUid) { + return PlaylistTimeline.getChildPeriodUidFromConcatenatedUid(periodUid); + } + + private static Object getPeriodUid(MediaSourceHolder holder, Object childPeriodUid) { + return PlaylistTimeline.getConcatenatedUid(holder.uid, childPeriodUid); + } + + /** Data class to hold playlist media sources together with meta data needed to process them. */ + /* package */ static final class MediaSourceHolder implements MediaSourceInfoHolder { + + public final MaskingMediaSource mediaSource; + public final Object uid; + public final List activeMediaPeriodIds; + + public int firstWindowIndexInChild; + public boolean isRemoved; + + public MediaSourceHolder(MediaSource mediaSource, boolean useLazyPreparation) { + this.mediaSource = new MaskingMediaSource(mediaSource, useLazyPreparation); + this.activeMediaPeriodIds = new ArrayList<>(); + this.uid = new Object(); + } + + public void reset(int firstWindowIndexInChild) { + this.firstWindowIndexInChild = firstWindowIndexInChild; + this.isRemoved = false; + this.activeMediaPeriodIds.clear(); + } + + @Override + public Object getUid() { + return uid; + } + + @Override + public Timeline getTimeline() { + return mediaSource.getTimeline(); + } + } + + private static final class MediaSourceAndListener { + + public final MediaSource mediaSource; + public final MediaSource.MediaSourceCaller caller; + public final ForwardingEventListener eventListener; + + public MediaSourceAndListener( + MediaSource mediaSource, + MediaSource.MediaSourceCaller caller, + ForwardingEventListener eventListener) { + this.mediaSource = mediaSource; + this.caller = caller; + this.eventListener = eventListener; + } + } + + private final class ForwardingEventListener + implements MediaSourceEventListener, DrmSessionEventListener { + + private final MediaSourceList.MediaSourceHolder id; + + public ForwardingEventListener(MediaSourceList.MediaSourceHolder id) { + this.id = id; + } + + // MediaSourceEventListener implementation + + @Override + public void onLoadStarted( + int windowIndex, + @Nullable MediaSource.MediaPeriodId mediaPeriodId, + LoadEventInfo loadEventData, + MediaLoadData mediaLoadData) { + @Nullable + Pair eventParameters = + getEventParameters(windowIndex, mediaPeriodId); + if (eventParameters != null) { + eventHandler.post( + () -> + eventListener.onLoadStarted( + eventParameters.first, eventParameters.second, loadEventData, mediaLoadData)); + } + } + + @Override + public void onLoadCompleted( + int windowIndex, + @Nullable MediaSource.MediaPeriodId mediaPeriodId, + LoadEventInfo loadEventData, + MediaLoadData mediaLoadData) { + @Nullable + Pair eventParameters = + getEventParameters(windowIndex, mediaPeriodId); + if (eventParameters != null) { + eventHandler.post( + () -> + eventListener.onLoadCompleted( + eventParameters.first, eventParameters.second, loadEventData, mediaLoadData)); + } + } + + @Override + public void onLoadCanceled( + int windowIndex, + @Nullable MediaSource.MediaPeriodId mediaPeriodId, + LoadEventInfo loadEventData, + MediaLoadData mediaLoadData) { + @Nullable + Pair eventParameters = + getEventParameters(windowIndex, mediaPeriodId); + if (eventParameters != null) { + eventHandler.post( + () -> + eventListener.onLoadCanceled( + eventParameters.first, eventParameters.second, loadEventData, mediaLoadData)); + } + } + + @Override + public void onLoadError( + int windowIndex, + @Nullable MediaSource.MediaPeriodId mediaPeriodId, + LoadEventInfo loadEventData, + MediaLoadData mediaLoadData, + IOException error, + boolean wasCanceled) { + @Nullable + Pair eventParameters = + getEventParameters(windowIndex, mediaPeriodId); + if (eventParameters != null) { + eventHandler.post( + () -> + eventListener.onLoadError( + eventParameters.first, + eventParameters.second, + loadEventData, + mediaLoadData, + error, + wasCanceled)); + } + } + + @Override + public void onUpstreamDiscarded( + int windowIndex, + @Nullable MediaSource.MediaPeriodId mediaPeriodId, + MediaLoadData mediaLoadData) { + @Nullable + Pair eventParameters = + getEventParameters(windowIndex, mediaPeriodId); + if (eventParameters != null) { + eventHandler.post( + () -> + eventListener.onUpstreamDiscarded( + eventParameters.first, checkNotNull(eventParameters.second), mediaLoadData)); + } + } + + @Override + public void onDownstreamFormatChanged( + int windowIndex, + @Nullable MediaSource.MediaPeriodId mediaPeriodId, + MediaLoadData mediaLoadData) { + @Nullable + Pair eventParameters = + getEventParameters(windowIndex, mediaPeriodId); + if (eventParameters != null) { + eventHandler.post( + () -> + eventListener.onDownstreamFormatChanged( + eventParameters.first, eventParameters.second, mediaLoadData)); + } + } + + // DrmSessionEventListener implementation + + @Override + public void onDrmSessionAcquired( + int windowIndex, + @Nullable MediaSource.MediaPeriodId mediaPeriodId, + @DrmSession.State int state) { + @Nullable + Pair eventParameters = + getEventParameters(windowIndex, mediaPeriodId); + if (eventParameters != null) { + eventHandler.post( + () -> + eventListener.onDrmSessionAcquired( + eventParameters.first, eventParameters.second, state)); + } + } + + @Override + public void onDrmKeysLoaded( + int windowIndex, @Nullable MediaSource.MediaPeriodId mediaPeriodId) { + @Nullable + Pair eventParameters = + getEventParameters(windowIndex, mediaPeriodId); + if (eventParameters != null) { + eventHandler.post( + () -> eventListener.onDrmKeysLoaded(eventParameters.first, eventParameters.second)); + } + } + + @Override + public void onDrmSessionManagerError( + int windowIndex, @Nullable MediaSource.MediaPeriodId mediaPeriodId, Exception error) { + @Nullable + Pair eventParameters = + getEventParameters(windowIndex, mediaPeriodId); + if (eventParameters != null) { + eventHandler.post( + () -> + eventListener.onDrmSessionManagerError( + eventParameters.first, eventParameters.second, error)); + } + } + + @Override + public void onDrmKeysRestored( + int windowIndex, @Nullable MediaSource.MediaPeriodId mediaPeriodId) { + @Nullable + Pair eventParameters = + getEventParameters(windowIndex, mediaPeriodId); + if (eventParameters != null) { + eventHandler.post( + () -> eventListener.onDrmKeysRestored(eventParameters.first, eventParameters.second)); + } + } + + @Override + public void onDrmKeysRemoved( + int windowIndex, @Nullable MediaSource.MediaPeriodId mediaPeriodId) { + @Nullable + Pair eventParameters = + getEventParameters(windowIndex, mediaPeriodId); + if (eventParameters != null) { + eventHandler.post( + () -> eventListener.onDrmKeysRemoved(eventParameters.first, eventParameters.second)); + } + } + + @Override + public void onDrmSessionReleased( + int windowIndex, @Nullable MediaSource.MediaPeriodId mediaPeriodId) { + @Nullable + Pair eventParameters = + getEventParameters(windowIndex, mediaPeriodId); + if (eventParameters != null) { + eventHandler.post( + () -> + eventListener.onDrmSessionReleased(eventParameters.first, eventParameters.second)); + } + } + + /** Updates the event parameters and returns whether the event should be dispatched. */ + @Nullable + private Pair getEventParameters( + int childWindowIndex, @Nullable MediaSource.MediaPeriodId childMediaPeriodId) { + @Nullable MediaSource.MediaPeriodId mediaPeriodId = null; + if (childMediaPeriodId != null) { + mediaPeriodId = getMediaPeriodIdForChildMediaPeriodId(id, childMediaPeriodId); + if (mediaPeriodId == null) { + // Media period not found. Ignore event. + return null; + } + } + int windowIndex = getWindowIndexForChildWindowIndex(id, childWindowIndex); + return Pair.create(windowIndex, mediaPeriodId); + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/MetadataRetriever.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/MetadataRetriever.java new file mode 100644 index 0000000000..90ba6ef720 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/MetadataRetriever.java @@ -0,0 +1,223 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.android.exoplayer2; + +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; + +import android.content.Context; +import android.os.Handler; +import android.os.HandlerThread; +import android.os.Message; +import androidx.annotation.VisibleForTesting; +import com.google.android.exoplayer2.analytics.PlayerId; +import com.google.android.exoplayer2.extractor.DefaultExtractorsFactory; +import com.google.android.exoplayer2.extractor.ExtractorsFactory; +import com.google.android.exoplayer2.extractor.mp4.Mp4Extractor; +import com.google.android.exoplayer2.source.DefaultMediaSourceFactory; +import com.google.android.exoplayer2.source.MediaPeriod; +import com.google.android.exoplayer2.source.MediaSource; +import com.google.android.exoplayer2.source.TrackGroupArray; +import com.google.android.exoplayer2.upstream.Allocator; +import com.google.android.exoplayer2.upstream.DefaultAllocator; +import com.google.android.exoplayer2.util.Clock; +import com.google.android.exoplayer2.util.HandlerWrapper; +import com.google.common.util.concurrent.ListenableFuture; +import com.google.common.util.concurrent.SettableFuture; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; + +// TODO(internal b/161127201): discard samples written to the sample queue. +/** Retrieves the static metadata of {@link MediaItem MediaItems}. */ +public final class MetadataRetriever { + + private MetadataRetriever() {} + + /** + * Retrieves the {@link TrackGroupArray} corresponding to a {@link MediaItem}. + * + *

      This is equivalent to using {@link #retrieveMetadata(MediaSource.Factory, MediaItem)} with a + * {@link DefaultMediaSourceFactory} and a {@link DefaultExtractorsFactory} with {@link + * Mp4Extractor#FLAG_READ_MOTION_PHOTO_METADATA} and {@link Mp4Extractor#FLAG_READ_SEF_DATA} set. + * + * @param context The {@link Context}. + * @param mediaItem The {@link MediaItem} whose metadata should be retrieved. + * @return A {@link ListenableFuture} of the result. + */ + public static ListenableFuture retrieveMetadata( + Context context, MediaItem mediaItem) { + return retrieveMetadata(context, mediaItem, Clock.DEFAULT); + } + + /** + * Retrieves the {@link TrackGroupArray} corresponding to a {@link MediaItem}. + * + *

      This method is thread-safe. + * + * @param mediaSourceFactory mediaSourceFactory The {@link MediaSource.Factory} to use to read the + * data. + * @param mediaItem The {@link MediaItem} whose metadata should be retrieved. + * @return A {@link ListenableFuture} of the result. + */ + public static ListenableFuture retrieveMetadata( + MediaSource.Factory mediaSourceFactory, MediaItem mediaItem) { + return retrieveMetadata(mediaSourceFactory, mediaItem, Clock.DEFAULT); + } + + @VisibleForTesting + /* package */ static ListenableFuture retrieveMetadata( + Context context, MediaItem mediaItem, Clock clock) { + ExtractorsFactory extractorsFactory = + new DefaultExtractorsFactory() + .setMp4ExtractorFlags( + Mp4Extractor.FLAG_READ_MOTION_PHOTO_METADATA | Mp4Extractor.FLAG_READ_SEF_DATA); + MediaSource.Factory mediaSourceFactory = + new DefaultMediaSourceFactory(context, extractorsFactory); + return retrieveMetadata(mediaSourceFactory, mediaItem, clock); + } + + private static ListenableFuture retrieveMetadata( + MediaSource.Factory mediaSourceFactory, MediaItem mediaItem, Clock clock) { + // Recreate thread and handler every time this method is called so that it can be used + // concurrently. + return new MetadataRetrieverInternal(mediaSourceFactory, clock).retrieveMetadata(mediaItem); + } + + private static final class MetadataRetrieverInternal { + + private static final int MESSAGE_PREPARE_SOURCE = 0; + private static final int MESSAGE_CHECK_FOR_FAILURE = 1; + private static final int MESSAGE_CONTINUE_LOADING = 2; + private static final int MESSAGE_RELEASE = 3; + + private final MediaSource.Factory mediaSourceFactory; + private final HandlerThread mediaSourceThread; + private final HandlerWrapper mediaSourceHandler; + private final SettableFuture trackGroupsFuture; + + public MetadataRetrieverInternal(MediaSource.Factory mediaSourceFactory, Clock clock) { + this.mediaSourceFactory = mediaSourceFactory; + mediaSourceThread = new HandlerThread("ExoPlayer:MetadataRetriever"); + mediaSourceThread.start(); + mediaSourceHandler = + clock.createHandler(mediaSourceThread.getLooper(), new MediaSourceHandlerCallback()); + trackGroupsFuture = SettableFuture.create(); + } + + public ListenableFuture retrieveMetadata(MediaItem mediaItem) { + mediaSourceHandler.obtainMessage(MESSAGE_PREPARE_SOURCE, mediaItem).sendToTarget(); + return trackGroupsFuture; + } + + private final class MediaSourceHandlerCallback implements Handler.Callback { + + private static final int ERROR_POLL_INTERVAL_MS = 100; + + private final MediaSourceCaller mediaSourceCaller; + + private @MonotonicNonNull MediaSource mediaSource; + private @MonotonicNonNull MediaPeriod mediaPeriod; + + public MediaSourceHandlerCallback() { + mediaSourceCaller = new MediaSourceCaller(); + } + + @Override + public boolean handleMessage(Message msg) { + switch (msg.what) { + case MESSAGE_PREPARE_SOURCE: + MediaItem mediaItem = (MediaItem) msg.obj; + mediaSource = mediaSourceFactory.createMediaSource(mediaItem); + mediaSource.prepareSource( + mediaSourceCaller, /* mediaTransferListener= */ null, PlayerId.UNSET); + mediaSourceHandler.sendEmptyMessage(MESSAGE_CHECK_FOR_FAILURE); + return true; + case MESSAGE_CHECK_FOR_FAILURE: + try { + if (mediaPeriod == null) { + checkNotNull(mediaSource).maybeThrowSourceInfoRefreshError(); + } else { + mediaPeriod.maybeThrowPrepareError(); + } + mediaSourceHandler.sendEmptyMessageDelayed( + MESSAGE_CHECK_FOR_FAILURE, /* delayMs= */ ERROR_POLL_INTERVAL_MS); + } catch (Exception e) { + trackGroupsFuture.setException(e); + mediaSourceHandler.obtainMessage(MESSAGE_RELEASE).sendToTarget(); + } + return true; + case MESSAGE_CONTINUE_LOADING: + checkNotNull(mediaPeriod).continueLoading(/* positionUs= */ 0); + return true; + case MESSAGE_RELEASE: + if (mediaPeriod != null) { + checkNotNull(mediaSource).releasePeriod(mediaPeriod); + } + checkNotNull(mediaSource).releaseSource(mediaSourceCaller); + mediaSourceHandler.removeCallbacksAndMessages(/* token= */ null); + mediaSourceThread.quit(); + return true; + default: + return false; + } + } + + private final class MediaSourceCaller implements MediaSource.MediaSourceCaller { + + private final MediaPeriodCallback mediaPeriodCallback; + private final Allocator allocator; + + private boolean mediaPeriodCreated; + + public MediaSourceCaller() { + mediaPeriodCallback = new MediaPeriodCallback(); + allocator = + new DefaultAllocator( + /* trimOnReset= */ true, + /* individualAllocationSize= */ C.DEFAULT_BUFFER_SEGMENT_SIZE); + } + + @Override + public void onSourceInfoRefreshed(MediaSource source, Timeline timeline) { + if (mediaPeriodCreated) { + // Ignore dynamic updates. + return; + } + mediaPeriodCreated = true; + mediaPeriod = + source.createPeriod( + new MediaSource.MediaPeriodId(timeline.getUidOfPeriod(/* periodIndex= */ 0)), + allocator, + /* startPositionUs= */ 0); + mediaPeriod.prepare(mediaPeriodCallback, /* positionUs= */ 0); + } + + private final class MediaPeriodCallback implements MediaPeriod.Callback { + + @Override + public void onPrepared(MediaPeriod mediaPeriod) { + trackGroupsFuture.set(mediaPeriod.getTrackGroups()); + mediaSourceHandler.obtainMessage(MESSAGE_RELEASE).sendToTarget(); + } + + @Override + public void onContinueLoadingRequested(MediaPeriod mediaPeriod) { + mediaSourceHandler.obtainMessage(MESSAGE_CONTINUE_LOADING).sendToTarget(); + } + } + } + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/NoSampleRenderer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/NoSampleRenderer.java index f03e8ab25c..6d365723ef 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/NoSampleRenderer.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/NoSampleRenderer.java @@ -16,7 +16,7 @@ package com.google.android.exoplayer2; import androidx.annotation.Nullable; - +import com.google.android.exoplayer2.analytics.PlayerId; import com.google.android.exoplayer2.source.SampleStream; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.MediaClock; @@ -31,15 +31,15 @@ */ public abstract class NoSampleRenderer implements Renderer, RendererCapabilities { - @MonotonicNonNull - private RendererConfiguration configuration; + + private @MonotonicNonNull RendererConfiguration configuration; private int index; private int state; @Nullable private SampleStream stream; private boolean streamIsFinal; @Override - public final int getTrackType() { + public final @C.TrackType int getTrackType() { return C.TRACK_TYPE_NONE; } @@ -49,7 +49,7 @@ public final RendererCapabilities getCapabilities() { } @Override - public final void setIndex(int index) { + public final void init(int index, PlayerId playerId) { this.index = index; } @@ -64,30 +64,22 @@ public final int getState() { return state; } - /** - * Replaces the {@link SampleStream} that will be associated with this renderer. - *

      - * This method may be called when the renderer is in the following states: - * {@link #STATE_DISABLED}. - * - * @param configuration The renderer configuration. - * @param formats The enabled formats. Should be empty. - * @param stream The {@link SampleStream} from which the renderer should consume. - * @param positionUs The player's current position. - * @param joining Whether this renderer is being enabled to join an ongoing playback. - * @param offsetUs The offset that should be subtracted from {@code positionUs} - * to get the playback position with respect to the media. - * @throws ExoPlaybackException If an error occurs. - */ @Override - public final void enable(RendererConfiguration configuration, Format[] formats, - SampleStream stream, long positionUs, boolean joining, long offsetUs) + public final void enable( + RendererConfiguration configuration, + Format[] formats, + SampleStream stream, + long positionUs, + boolean joining, + boolean mayRenderStartOfStream, + long startPositionUs, + long offsetUs) throws ExoPlaybackException { Assertions.checkState(state == STATE_DISABLED); this.configuration = configuration; state = STATE_ENABLED; onEnabled(joining); - replaceStream(formats, stream, offsetUs); + replaceStream(formats, stream, startPositionUs, offsetUs); onPositionReset(positionUs, joining); } @@ -98,20 +90,9 @@ public final void start() throws ExoPlaybackException { onStarted(); } - /** - * Replaces the {@link SampleStream} that will be associated with this renderer. - *

      - * This method may be called when the renderer is in the following states: - * {@link #STATE_ENABLED}, {@link #STATE_STARTED}. - * - * @param formats The enabled formats. Should be empty. - * @param stream The {@link SampleStream} to be associated with this renderer. - * @param offsetUs The offset that should be subtracted from {@code positionUs} in - * {@link #render(long, long)} to get the playback position with respect to the media. - * @throws ExoPlaybackException If an error occurs. - */ @Override - public final void replaceStream(Format[] formats, SampleStream stream, long offsetUs) + public final void replaceStream( + Format[] formats, SampleStream stream, long startPositionUs, long offsetUs) throws ExoPlaybackException { Assertions.checkState(!streamIsFinal); this.stream = stream; @@ -145,8 +126,7 @@ public final boolean isCurrentStreamFinal() { } @Override - public final void maybeThrowStreamError() throws IOException { - } + public final void maybeThrowStreamError() throws IOException {} @Override public final void resetPosition(long positionUs) throws ExoPlaybackException { @@ -155,7 +135,7 @@ public final void resetPosition(long positionUs) throws ExoPlaybackException { } @Override - public final void stop() throws ExoPlaybackException { + public final void stop() { Assertions.checkState(state == STATE_STARTED); state = STATE_ENABLED; onStopped(); @@ -189,21 +169,20 @@ public boolean isEnded() { // RendererCapabilities implementation. @Override - @Capabilities - public int supportsFormat(Format format) throws ExoPlaybackException { - return RendererCapabilities.create(FORMAT_UNSUPPORTED_TYPE); + public @Capabilities int supportsFormat(Format format) throws ExoPlaybackException { + return RendererCapabilities.create(C.FORMAT_UNSUPPORTED_TYPE); } @Override - @AdaptiveSupport - public int supportsMixedMimeTypeAdaptation() throws ExoPlaybackException { + public @AdaptiveSupport int supportsMixedMimeTypeAdaptation() throws ExoPlaybackException { return ADAPTIVE_NOT_SUPPORTED; } // PlayerMessage.CCTarget implementation. @Override - public void handleMessage(int what, @Nullable Object object) throws ExoPlaybackException { + public void handleMessage(@MessageType int messageType, @Nullable Object message) + throws ExoPlaybackException { // Do nothing. } @@ -211,8 +190,8 @@ public void handleMessage(int what, @Nullable Object object) throws ExoPlaybackE /** * Called when the renderer is enabled. - *

      - * The default implementation is a no-op. + * + *

      The default implementation is a no-op. * * @param joining Whether this renderer is being enabled to join an ongoing playback. * @throws ExoPlaybackException If an error occurs. @@ -223,11 +202,11 @@ protected void onEnabled(boolean joining) throws ExoPlaybackException { /** * Called when the renderer's offset has been changed. - *

      - * The default implementation is a no-op. * - * @param offsetUs The offset that should be subtracted from {@code positionUs} in - * {@link #render(long, long)} to get the playback position with respect to the media. + *

      The default implementation is a no-op. + * + * @param offsetUs The offset that should be subtracted from {@code positionUs} in {@link + * #render(long, long)} to get the playback position with respect to the media. * @throws ExoPlaybackException If an error occurs. */ protected void onRendererOffsetChanged(long offsetUs) throws ExoPlaybackException { @@ -235,11 +214,11 @@ protected void onRendererOffsetChanged(long offsetUs) throws ExoPlaybackExceptio } /** - * Called when the position is reset. This occurs when the renderer is enabled after - * {@link #onRendererOffsetChanged(long)} has been called, and also when a position - * discontinuity is encountered. - *

      - * The default implementation is a no-op. + * Called when the position is reset. This occurs when the renderer is enabled after {@link + * #onRendererOffsetChanged(long)} has been called, and also when a position discontinuity is + * encountered. + * + *

      The default implementation is a no-op. * * @param positionUs The new playback position in microseconds. * @param joining Whether this renderer is being enabled to join an ongoing playback. @@ -251,8 +230,8 @@ protected void onPositionReset(long positionUs, boolean joining) throws ExoPlayb /** * Called when the renderer is started. - *

      - * The default implementation is a no-op. + * + *

      The default implementation is a no-op. * * @throws ExoPlaybackException If an error occurs. */ @@ -262,19 +241,17 @@ protected void onStarted() throws ExoPlaybackException { /** * Called when the renderer is stopped. - *

      - * The default implementation is a no-op. * - * @throws ExoPlaybackException If an error occurs. + *

      The default implementation is a no-op. */ - protected void onStopped() throws ExoPlaybackException { + protected void onStopped() { // Do nothing. } /** * Called when the renderer is disabled. - *

      - * The default implementation is a no-op. + * + *

      The default implementation is a no-op. */ protected void onDisabled() { // Do nothing. @@ -300,11 +277,8 @@ protected final RendererConfiguration getConfiguration() { return configuration; } - /** - * Returns the index of the renderer within the player. - */ + /** Returns the index of the renderer within the player. */ protected final int getIndex() { return index; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ParserException.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ParserException.java index e0cae0cf3a..1e6c3a88f2 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ParserException.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ParserException.java @@ -15,37 +15,94 @@ */ package com.google.android.exoplayer2; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C.DataType; import java.io.IOException; -/** - * Thrown when an error occurs parsing media data and metadata. - */ +/** Thrown when an error occurs parsing media data and metadata. */ public class ParserException extends IOException { - public ParserException() { - super(); + /** + * Creates a new instance for which {@link #contentIsMalformed} is true and {@link #dataType} is + * {@link C#DATA_TYPE_UNKNOWN}. + * + * @param message See {@link #getMessage()}. + * @param cause See {@link #getCause()}. + * @return The created instance. + */ + public static ParserException createForMalformedDataOfUnknownType( + @Nullable String message, @Nullable Throwable cause) { + return new ParserException(message, cause, /* contentIsMalformed= */ true, C.DATA_TYPE_UNKNOWN); } /** - * @param message The detail message for the exception. + * Creates a new instance for which {@link #contentIsMalformed} is true and {@link #dataType} is + * {@link C#DATA_TYPE_MEDIA}. + * + * @param message See {@link #getMessage()}. + * @param cause See {@link #getCause()}. + * @return The created instance. */ - public ParserException(String message) { - super(message); + public static ParserException createForMalformedContainer( + @Nullable String message, @Nullable Throwable cause) { + return new ParserException(message, cause, /* contentIsMalformed= */ true, C.DATA_TYPE_MEDIA); } /** - * @param cause The cause for the exception. + * Creates a new instance for which {@link #contentIsMalformed} is true and {@link #dataType} is + * {@link C#DATA_TYPE_MANIFEST}. + * + * @param message See {@link #getMessage()}. + * @param cause See {@link #getCause()}. + * @return The created instance. */ - public ParserException(Throwable cause) { - super(cause); + public static ParserException createForMalformedManifest( + @Nullable String message, @Nullable Throwable cause) { + return new ParserException( + message, cause, /* contentIsMalformed= */ true, C.DATA_TYPE_MANIFEST); } /** - * @param message The detail message for the exception. - * @param cause The cause for the exception. + * Creates a new instance for which {@link #contentIsMalformed} is false and {@link #dataType} is + * {@link C#DATA_TYPE_MANIFEST}. + * + * @param message See {@link #getMessage()}. + * @param cause See {@link #getCause()}. + * @return The created instance. */ - public ParserException(String message, Throwable cause) { - super(message, cause); + public static ParserException createForManifestWithUnsupportedFeature( + @Nullable String message, @Nullable Throwable cause) { + return new ParserException( + message, cause, /* contentIsMalformed= */ false, C.DATA_TYPE_MANIFEST); } + /** + * Creates a new instance for which {@link #contentIsMalformed} is false and {@link #dataType} is + * {@link C#DATA_TYPE_MEDIA}. + * + * @param message See {@link #getMessage()}. + * @return The created instance. + */ + public static ParserException createForUnsupportedContainerFeature(@Nullable String message) { + return new ParserException( + message, /* cause= */ null, /* contentIsMalformed= */ false, C.DATA_TYPE_MEDIA); + } + + /** + * Whether the parsing error was caused by a bitstream not following the expected format. May be + * false when a parser encounters a legal condition which it does not support. + */ + public final boolean contentIsMalformed; + /** The {@link DataType data type} of the parsed bitstream. */ + public final int dataType; + + protected ParserException( + @Nullable String message, + @Nullable Throwable cause, + boolean contentIsMalformed, + @DataType int dataType) { + super(message, cause); + this.contentIsMalformed = contentIsMalformed; + this.dataType = dataType; + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/PercentageRating.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/PercentageRating.java new file mode 100644 index 0000000000..3d30d493a2 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/PercentageRating.java @@ -0,0 +1,94 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2; + +import static com.google.android.exoplayer2.util.Assertions.checkArgument; + +import android.os.Bundle; +import androidx.annotation.FloatRange; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.util.Util; +import com.google.common.base.Objects; + +/** A rating expressed as a percentage. */ +public final class PercentageRating extends Rating { + + private final float percent; + + /** Creates a unrated instance. */ + public PercentageRating() { + percent = RATING_UNSET; + } + + /** + * Creates a rated instance with the given percentage. + * + * @param percent The percentage value of the rating. + */ + public PercentageRating(@FloatRange(from = 0, to = 100) float percent) { + checkArgument(percent >= 0.0f && percent <= 100.0f, "percent must be in the range of [0, 100]"); + this.percent = percent; + } + + @Override + public boolean isRated() { + return percent != RATING_UNSET; + } + + /** + * Returns the percent value of this rating. Will be within the range {@code [0f, 100f]}, or + * {@link #RATING_UNSET} if unrated. + */ + public float getPercent() { + return percent; + } + + @Override + public int hashCode() { + return Objects.hashCode(percent); + } + + @Override + public boolean equals(@Nullable Object obj) { + if (!(obj instanceof PercentageRating)) { + return false; + } + return percent == ((PercentageRating) obj).percent; + } + + // Bundleable implementation. + + private static final @RatingType int TYPE = RATING_TYPE_PERCENTAGE; + + private static final String FIELD_PERCENT = Util.intToStringMaxRadix(1); + + @Override + public Bundle toBundle() { + Bundle bundle = new Bundle(); + bundle.putInt(FIELD_RATING_TYPE, TYPE); + bundle.putFloat(FIELD_PERCENT, percent); + return bundle; + } + + /** Object that can restore a {@link PercentageRating} from a {@link Bundle}. */ + public static final Creator CREATOR = PercentageRating::fromBundle; + + private static PercentageRating fromBundle(Bundle bundle) { + checkArgument(bundle.getInt(FIELD_RATING_TYPE, /* defaultValue= */ RATING_TYPE_UNSET) == TYPE); + float percent = bundle.getFloat(FIELD_PERCENT, /* defaultValue= */ RATING_UNSET); + return percent == RATING_UNSET ? new PercentageRating() : new PercentageRating(percent); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/PlaybackException.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/PlaybackException.java new file mode 100644 index 0000000000..b2950639e0 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/PlaybackException.java @@ -0,0 +1,471 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2; + +import static java.lang.annotation.ElementType.FIELD; +import static java.lang.annotation.ElementType.LOCAL_VARIABLE; +import static java.lang.annotation.ElementType.METHOD; +import static java.lang.annotation.ElementType.PARAMETER; +import static java.lang.annotation.ElementType.TYPE_USE; + +import android.net.ConnectivityManager; +import android.os.Bundle; +import android.os.RemoteException; +import android.os.SystemClock; +import android.text.TextUtils; +import androidx.annotation.CallSuper; +import androidx.annotation.IntDef; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.util.Clock; +import com.google.android.exoplayer2.util.Util; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** Thrown when a non locally recoverable playback failure occurs. */ +public class PlaybackException extends Exception implements Bundleable { + + /** + * Codes that identify causes of player errors. + * + *

      This list of errors may be extended in future versions, and {@link Player} implementations + * may define custom error codes. + */ + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) + @IntDef( + open = true, + value = { + ERROR_CODE_UNSPECIFIED, + ERROR_CODE_REMOTE_ERROR, + ERROR_CODE_BEHIND_LIVE_WINDOW, + ERROR_CODE_TIMEOUT, + ERROR_CODE_FAILED_RUNTIME_CHECK, + ERROR_CODE_IO_UNSPECIFIED, + ERROR_CODE_IO_NETWORK_CONNECTION_FAILED, + ERROR_CODE_IO_NETWORK_CONNECTION_TIMEOUT, + ERROR_CODE_IO_INVALID_HTTP_CONTENT_TYPE, + ERROR_CODE_IO_BAD_HTTP_STATUS, + ERROR_CODE_IO_FILE_NOT_FOUND, + ERROR_CODE_IO_NO_PERMISSION, + ERROR_CODE_IO_CLEARTEXT_NOT_PERMITTED, + ERROR_CODE_IO_READ_POSITION_OUT_OF_RANGE, + ERROR_CODE_PARSING_CONTAINER_MALFORMED, + ERROR_CODE_PARSING_MANIFEST_MALFORMED, + ERROR_CODE_PARSING_CONTAINER_UNSUPPORTED, + ERROR_CODE_PARSING_MANIFEST_UNSUPPORTED, + ERROR_CODE_DECODER_INIT_FAILED, + ERROR_CODE_DECODER_QUERY_FAILED, + ERROR_CODE_DECODING_FAILED, + ERROR_CODE_DECODING_FORMAT_EXCEEDS_CAPABILITIES, + ERROR_CODE_DECODING_FORMAT_UNSUPPORTED, + ERROR_CODE_AUDIO_TRACK_INIT_FAILED, + ERROR_CODE_AUDIO_TRACK_WRITE_FAILED, + ERROR_CODE_DRM_UNSPECIFIED, + ERROR_CODE_DRM_SCHEME_UNSUPPORTED, + ERROR_CODE_DRM_PROVISIONING_FAILED, + ERROR_CODE_DRM_CONTENT_ERROR, + ERROR_CODE_DRM_LICENSE_ACQUISITION_FAILED, + ERROR_CODE_DRM_DISALLOWED_OPERATION, + ERROR_CODE_DRM_SYSTEM_ERROR, + ERROR_CODE_DRM_DEVICE_REVOKED, + ERROR_CODE_DRM_LICENSE_EXPIRED + }) + public @interface ErrorCode {} + + // Miscellaneous errors (1xxx). + + /** Caused by an error whose cause could not be identified. */ + public static final int ERROR_CODE_UNSPECIFIED = 1000; + /** + * Caused by an unidentified error in a remote Player, which is a Player that runs on a different + * host or process. + */ + public static final int ERROR_CODE_REMOTE_ERROR = 1001; + /** Caused by the loading position falling behind the sliding window of available live content. */ + public static final int ERROR_CODE_BEHIND_LIVE_WINDOW = 1002; + /** Caused by a generic timeout. */ + public static final int ERROR_CODE_TIMEOUT = 1003; + /** + * Caused by a failed runtime check. + * + *

      This can happen when the application fails to comply with the player's API requirements (for + * example, by passing invalid arguments), or when the player reaches an invalid state. + */ + public static final int ERROR_CODE_FAILED_RUNTIME_CHECK = 1004; + + // Input/Output errors (2xxx). + + /** Caused by an Input/Output error which could not be identified. */ + public static final int ERROR_CODE_IO_UNSPECIFIED = 2000; + /** + * Caused by a network connection failure. + * + *

      The following is a non-exhaustive list of possible reasons: + * + *

        + *
      • There is no network connectivity (you can check this by querying {@link + * ConnectivityManager#getActiveNetwork}). + *
      • The URL's domain is misspelled or does not exist. + *
      • The target host is unreachable. + *
      • The server unexpectedly closes the connection. + *
      + */ + public static final int ERROR_CODE_IO_NETWORK_CONNECTION_FAILED = 2001; + /** Caused by a network timeout, meaning the server is taking too long to fulfill a request. */ + public static final int ERROR_CODE_IO_NETWORK_CONNECTION_TIMEOUT = 2002; + /** + * Caused by a server returning a resource with an invalid "Content-Type" HTTP header value. + * + *

      For example, this can happen when the player is expecting a piece of media, but the server + * returns a paywall HTML page, with content type "text/html". + */ + public static final int ERROR_CODE_IO_INVALID_HTTP_CONTENT_TYPE = 2003; + /** Caused by an HTTP server returning an unexpected HTTP response status code. */ + public static final int ERROR_CODE_IO_BAD_HTTP_STATUS = 2004; + /** Caused by a non-existent file. */ + public static final int ERROR_CODE_IO_FILE_NOT_FOUND = 2005; + /** + * Caused by lack of permission to perform an IO operation. For example, lack of permission to + * access internet or external storage. + */ + public static final int ERROR_CODE_IO_NO_PERMISSION = 2006; + /** + * Caused by the player trying to access cleartext HTTP traffic (meaning http:// rather than + * https://) when the app's Network Security Configuration does not permit it. + * + *

      See this corresponding + * troubleshooting topic. + */ + public static final int ERROR_CODE_IO_CLEARTEXT_NOT_PERMITTED = 2007; + /** Caused by reading data out of the data bound. */ + public static final int ERROR_CODE_IO_READ_POSITION_OUT_OF_RANGE = 2008; + + // Content parsing errors (3xxx). + + /** Caused by a parsing error associated with a media container format bitstream. */ + public static final int ERROR_CODE_PARSING_CONTAINER_MALFORMED = 3001; + /** + * Caused by a parsing error associated with a media manifest. Examples of a media manifest are a + * DASH or a SmoothStreaming manifest, or an HLS playlist. + */ + public static final int ERROR_CODE_PARSING_MANIFEST_MALFORMED = 3002; + /** + * Caused by attempting to extract a file with an unsupported media container format, or an + * unsupported media container feature. + */ + public static final int ERROR_CODE_PARSING_CONTAINER_UNSUPPORTED = 3003; + /** + * Caused by an unsupported feature in a media manifest. Examples of a media manifest are a DASH + * or a SmoothStreaming manifest, or an HLS playlist. + */ + public static final int ERROR_CODE_PARSING_MANIFEST_UNSUPPORTED = 3004; + + // Decoding errors (4xxx). + + /** Caused by a decoder initialization failure. */ + public static final int ERROR_CODE_DECODER_INIT_FAILED = 4001; + /** Caused by a decoder query failure. */ + public static final int ERROR_CODE_DECODER_QUERY_FAILED = 4002; + /** Caused by a failure while trying to decode media samples. */ + public static final int ERROR_CODE_DECODING_FAILED = 4003; + /** Caused by trying to decode content whose format exceeds the capabilities of the device. */ + public static final int ERROR_CODE_DECODING_FORMAT_EXCEEDS_CAPABILITIES = 4004; + /** Caused by trying to decode content whose format is not supported. */ + public static final int ERROR_CODE_DECODING_FORMAT_UNSUPPORTED = 4005; + + // AudioTrack errors (5xxx). + + /** Caused by an AudioTrack initialization failure. */ + public static final int ERROR_CODE_AUDIO_TRACK_INIT_FAILED = 5001; + /** Caused by an AudioTrack write operation failure. */ + public static final int ERROR_CODE_AUDIO_TRACK_WRITE_FAILED = 5002; + + // DRM errors (6xxx). + + /** Caused by an unspecified error related to DRM protection. */ + public static final int ERROR_CODE_DRM_UNSPECIFIED = 6000; + /** + * Caused by a chosen DRM protection scheme not being supported by the device. Examples of DRM + * protection schemes are ClearKey and Widevine. + */ + public static final int ERROR_CODE_DRM_SCHEME_UNSUPPORTED = 6001; + /** Caused by a failure while provisioning the device. */ + public static final int ERROR_CODE_DRM_PROVISIONING_FAILED = 6002; + /** + * Caused by attempting to play incompatible DRM-protected content. + * + *

      For example, this can happen when attempting to play a DRM protected stream using a scheme + * (like Widevine) for which there is no corresponding license acquisition data (like a pssh box). + */ + public static final int ERROR_CODE_DRM_CONTENT_ERROR = 6003; + /** Caused by a failure while trying to obtain a license. */ + public static final int ERROR_CODE_DRM_LICENSE_ACQUISITION_FAILED = 6004; + /** Caused by an operation being disallowed by a license policy. */ + public static final int ERROR_CODE_DRM_DISALLOWED_OPERATION = 6005; + /** Caused by an error in the DRM system. */ + public static final int ERROR_CODE_DRM_SYSTEM_ERROR = 6006; + /** Caused by the device having revoked DRM privileges. */ + public static final int ERROR_CODE_DRM_DEVICE_REVOKED = 6007; + /** Caused by an expired DRM license being loaded into an open DRM session. */ + public static final int ERROR_CODE_DRM_LICENSE_EXPIRED = 6008; + + /** + * Player implementations that want to surface custom errors can use error codes greater than this + * value, so as to avoid collision with other error codes defined in this class. + */ + public static final int CUSTOM_ERROR_CODE_BASE = 1000000; + + /** Returns the name of a given {@code errorCode}. */ + public static String getErrorCodeName(@ErrorCode int errorCode) { + switch (errorCode) { + case ERROR_CODE_UNSPECIFIED: + return "ERROR_CODE_UNSPECIFIED"; + case ERROR_CODE_REMOTE_ERROR: + return "ERROR_CODE_REMOTE_ERROR"; + case ERROR_CODE_BEHIND_LIVE_WINDOW: + return "ERROR_CODE_BEHIND_LIVE_WINDOW"; + case ERROR_CODE_TIMEOUT: + return "ERROR_CODE_TIMEOUT"; + case ERROR_CODE_FAILED_RUNTIME_CHECK: + return "ERROR_CODE_FAILED_RUNTIME_CHECK"; + case ERROR_CODE_IO_UNSPECIFIED: + return "ERROR_CODE_IO_UNSPECIFIED"; + case ERROR_CODE_IO_NETWORK_CONNECTION_FAILED: + return "ERROR_CODE_IO_NETWORK_CONNECTION_FAILED"; + case ERROR_CODE_IO_NETWORK_CONNECTION_TIMEOUT: + return "ERROR_CODE_IO_NETWORK_CONNECTION_TIMEOUT"; + case ERROR_CODE_IO_INVALID_HTTP_CONTENT_TYPE: + return "ERROR_CODE_IO_INVALID_HTTP_CONTENT_TYPE"; + case ERROR_CODE_IO_BAD_HTTP_STATUS: + return "ERROR_CODE_IO_BAD_HTTP_STATUS"; + case ERROR_CODE_IO_FILE_NOT_FOUND: + return "ERROR_CODE_IO_FILE_NOT_FOUND"; + case ERROR_CODE_IO_NO_PERMISSION: + return "ERROR_CODE_IO_NO_PERMISSION"; + case ERROR_CODE_IO_CLEARTEXT_NOT_PERMITTED: + return "ERROR_CODE_IO_CLEARTEXT_NOT_PERMITTED"; + case ERROR_CODE_IO_READ_POSITION_OUT_OF_RANGE: + return "ERROR_CODE_IO_READ_POSITION_OUT_OF_RANGE"; + case ERROR_CODE_PARSING_CONTAINER_MALFORMED: + return "ERROR_CODE_PARSING_CONTAINER_MALFORMED"; + case ERROR_CODE_PARSING_MANIFEST_MALFORMED: + return "ERROR_CODE_PARSING_MANIFEST_MALFORMED"; + case ERROR_CODE_PARSING_CONTAINER_UNSUPPORTED: + return "ERROR_CODE_PARSING_CONTAINER_UNSUPPORTED"; + case ERROR_CODE_PARSING_MANIFEST_UNSUPPORTED: + return "ERROR_CODE_PARSING_MANIFEST_UNSUPPORTED"; + case ERROR_CODE_DECODER_INIT_FAILED: + return "ERROR_CODE_DECODER_INIT_FAILED"; + case ERROR_CODE_DECODER_QUERY_FAILED: + return "ERROR_CODE_DECODER_QUERY_FAILED"; + case ERROR_CODE_DECODING_FAILED: + return "ERROR_CODE_DECODING_FAILED"; + case ERROR_CODE_DECODING_FORMAT_EXCEEDS_CAPABILITIES: + return "ERROR_CODE_DECODING_FORMAT_EXCEEDS_CAPABILITIES"; + case ERROR_CODE_DECODING_FORMAT_UNSUPPORTED: + return "ERROR_CODE_DECODING_FORMAT_UNSUPPORTED"; + case ERROR_CODE_AUDIO_TRACK_INIT_FAILED: + return "ERROR_CODE_AUDIO_TRACK_INIT_FAILED"; + case ERROR_CODE_AUDIO_TRACK_WRITE_FAILED: + return "ERROR_CODE_AUDIO_TRACK_WRITE_FAILED"; + case ERROR_CODE_DRM_UNSPECIFIED: + return "ERROR_CODE_DRM_UNSPECIFIED"; + case ERROR_CODE_DRM_SCHEME_UNSUPPORTED: + return "ERROR_CODE_DRM_SCHEME_UNSUPPORTED"; + case ERROR_CODE_DRM_PROVISIONING_FAILED: + return "ERROR_CODE_DRM_PROVISIONING_FAILED"; + case ERROR_CODE_DRM_CONTENT_ERROR: + return "ERROR_CODE_DRM_CONTENT_ERROR"; + case ERROR_CODE_DRM_LICENSE_ACQUISITION_FAILED: + return "ERROR_CODE_DRM_LICENSE_ACQUISITION_FAILED"; + case ERROR_CODE_DRM_DISALLOWED_OPERATION: + return "ERROR_CODE_DRM_DISALLOWED_OPERATION"; + case ERROR_CODE_DRM_SYSTEM_ERROR: + return "ERROR_CODE_DRM_SYSTEM_ERROR"; + case ERROR_CODE_DRM_DEVICE_REVOKED: + return "ERROR_CODE_DRM_DEVICE_REVOKED"; + case ERROR_CODE_DRM_LICENSE_EXPIRED: + return "ERROR_CODE_DRM_LICENSE_EXPIRED"; + default: + if (errorCode >= CUSTOM_ERROR_CODE_BASE) { + return "custom error code"; + } else { + return "invalid error code"; + } + } + } + + /** + * Equivalent to {@link PlaybackException#getErrorCodeName(int) + * PlaybackException.getErrorCodeName(this.errorCode)}. + */ + public final String getErrorCodeName() { + return getErrorCodeName(errorCode); + } + + /** An error code which identifies the cause of the playback failure. */ + public final @ErrorCode int errorCode; + + /** The value of {@link SystemClock#elapsedRealtime()} when this exception was created. */ + public final long timestampMs; + + /** + * Creates an instance. + * + * @param errorCode A number which identifies the cause of the error. May be one of the {@link + * ErrorCode ErrorCodes}. + * @param cause See {@link #getCause()}. + * @param message See {@link #getMessage()}. + */ + public PlaybackException( + @Nullable String message, @Nullable Throwable cause, @ErrorCode int errorCode) { + this(message, cause, errorCode, Clock.DEFAULT.elapsedRealtime()); + } + + /** Creates a new instance using the fields obtained from the given {@link Bundle}. */ + protected PlaybackException(Bundle bundle) { + this( + /* message= */ bundle.getString(FIELD_STRING_MESSAGE), + /* cause= */ getCauseFromBundle(bundle), + /* errorCode= */ bundle.getInt( + FIELD_INT_ERROR_CODE, /* defaultValue= */ ERROR_CODE_UNSPECIFIED), + /* timestampMs= */ bundle.getLong( + FIELD_LONG_TIMESTAMP_MS, /* defaultValue= */ SystemClock.elapsedRealtime())); + } + + /** Creates a new instance using the given values. */ + protected PlaybackException( + @Nullable String message, + @Nullable Throwable cause, + @ErrorCode int errorCode, + long timestampMs) { + super(message, cause); + this.errorCode = errorCode; + this.timestampMs = timestampMs; + } + + /** + * Returns whether the error data associated to this exception equals the error data associated to + * {@code other}. + * + *

      Note that this method does not compare the exceptions' stacktraces. + */ + @CallSuper + public boolean errorInfoEquals(@Nullable PlaybackException other) { + if (this == other) { + return true; + } + if (other == null || getClass() != other.getClass()) { + return false; + } + + @Nullable Throwable thisCause = getCause(); + @Nullable Throwable thatCause = other.getCause(); + if (thisCause != null && thatCause != null) { + if (!Util.areEqual(thisCause.getMessage(), thatCause.getMessage())) { + return false; + } + if (!Util.areEqual(thisCause.getClass(), thatCause.getClass())) { + return false; + } + } else if (thisCause != null || thatCause != null) { + return false; + } + return errorCode == other.errorCode + && Util.areEqual(getMessage(), other.getMessage()) + && timestampMs == other.timestampMs; + } + + // Bundleable implementation. + + private static final String FIELD_INT_ERROR_CODE = Util.intToStringMaxRadix(0); + private static final String FIELD_LONG_TIMESTAMP_MS = Util.intToStringMaxRadix(1); + private static final String FIELD_STRING_MESSAGE = Util.intToStringMaxRadix(2); + private static final String FIELD_STRING_CAUSE_CLASS_NAME = Util.intToStringMaxRadix(3); + private static final String FIELD_STRING_CAUSE_MESSAGE = Util.intToStringMaxRadix(4); + + /** + * Defines a minimum field ID value for subclasses to use when implementing {@link #toBundle()} + * and {@link Bundleable.Creator}. + * + *

      Subclasses should obtain their {@link Bundle Bundle's} field keys by applying a non-negative + * offset on this constant and passing the result to {@link Util#intToStringMaxRadix(int)}. + */ + protected static final int FIELD_CUSTOM_ID_BASE = 1000; + + /** Object that can create a {@link PlaybackException} from a {@link Bundle}. */ + public static final Creator CREATOR = PlaybackException::new; + + @CallSuper + @Override + public Bundle toBundle() { + Bundle bundle = new Bundle(); + bundle.putInt(FIELD_INT_ERROR_CODE, errorCode); + bundle.putLong(FIELD_LONG_TIMESTAMP_MS, timestampMs); + bundle.putString(FIELD_STRING_MESSAGE, getMessage()); + @Nullable Throwable cause = getCause(); + if (cause != null) { + bundle.putString(FIELD_STRING_CAUSE_CLASS_NAME, cause.getClass().getName()); + bundle.putString(FIELD_STRING_CAUSE_MESSAGE, cause.getMessage()); + } + return bundle; + } + + // Creates a new {@link Throwable} with possibly {@code null} message. + @SuppressWarnings("nullness:argument") + private static Throwable createThrowable(Class clazz, @Nullable String message) + throws Exception { + return (Throwable) clazz.getConstructor(String.class).newInstance(message); + } + + // Creates a new {@link RemoteException} with possibly {@code null} message. + @SuppressWarnings("nullness:argument") + private static RemoteException createRemoteException(@Nullable String message) { + return new RemoteException(message); + } + + @Nullable + private static Throwable getCauseFromBundle(Bundle bundle) { + @Nullable String causeClassName = bundle.getString(FIELD_STRING_CAUSE_CLASS_NAME); + @Nullable String causeMessage = bundle.getString(FIELD_STRING_CAUSE_MESSAGE); + @Nullable Throwable cause = null; + if (!TextUtils.isEmpty(causeClassName)) { + try { + Class clazz = + Class.forName( + causeClassName, /* initialize= */ true, PlaybackException.class.getClassLoader()); + if (Throwable.class.isAssignableFrom(clazz)) { + cause = createThrowable(clazz, causeMessage); + } + } catch (Throwable e) { + // There was an error while creating the cause using reflection, do nothing here and let the + // finally block handle the issue. + } finally { + if (cause == null) { + // The bundle has fields to represent the cause, but we were unable to re-create the + // exception using reflection. We instantiate a RemoteException to reflect this problem. + cause = createRemoteException(causeMessage); + } + } + } + return cause; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/PlaybackInfo.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/PlaybackInfo.java index 9d2a3b5459..5a985901d6 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/PlaybackInfo.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/PlaybackInfo.java @@ -17,20 +17,22 @@ import androidx.annotation.CheckResult; import androidx.annotation.Nullable; +import com.google.android.exoplayer2.Player.PlaybackSuppressionReason; +import com.google.android.exoplayer2.metadata.Metadata; import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId; import com.google.android.exoplayer2.source.TrackGroupArray; import com.google.android.exoplayer2.trackselection.TrackSelectorResult; +import com.google.common.collect.ImmutableList; +import java.util.List; -/** - * Information about an ongoing playback. - */ +/** Information about an ongoing playback. */ /* package */ final class PlaybackInfo { /** - * Dummy media period id used while the timeline is empty and no period id is specified. This id - * is used when playback infos are created with {@link #createDummy(long, TrackSelectorResult)}. + * Placeholder media period id used while the timeline is empty and no period id is specified. + * This id is used when playback infos are created with {@link #createDummy(TrackSelectorResult)}. */ - private static final MediaPeriodId DUMMY_MEDIA_PERIOD_ID = + private static final MediaPeriodId PLACEHOLDER_MEDIA_PERIOD_ID = new MediaPeriodId(/* periodUid= */ new Object()); /** The current {@link Timeline}. */ @@ -38,20 +40,18 @@ /** The {@link MediaPeriodId} of the currently playing media period in the {@link #timeline}. */ public final MediaPeriodId periodId; /** - * The start position at which playback started in {@link #periodId} relative to the start of the - * associated period in the {@link #timeline}, in microseconds. Note that this value changes for - * each position discontinuity. - */ - public final long startPositionUs; - /** - * If {@link #periodId} refers to an ad, the position of the suspended content relative to the - * start of the associated period in the {@link #timeline}, in microseconds. {@link C#TIME_UNSET} - * if {@link #periodId} does not refer to an ad or if the suspended content should be played from - * its default position. + * The requested next start position for the current period in the {@link #timeline}, in + * microseconds, or {@link C#TIME_UNSET} if the period was requested to start at its default + * position. + * + *

      Note that if {@link #periodId} refers to an ad, this is the requested start position for the + * suspended content. */ - public final long contentPositionUs; + public final long requestedContentPositionUs; + /** The start position after a reported position discontinuity, in microseconds. */ + public final long discontinuityStartPositionUs; /** The current playback state. One of the {@link Player}.STATE_ constants. */ - @Player.State public final int playbackState; + public final @Player.State int playbackState; /** The current playback error, or null if this is not an error state. */ @Nullable public final ExoPlaybackException playbackError; /** Whether the player is currently loading. */ @@ -60,8 +60,18 @@ public final TrackGroupArray trackGroups; /** The result of the current track selection. */ public final TrackSelectorResult trackSelectorResult; + /** The current static metadata of the track selections. */ + public final List staticMetadata; /** The {@link MediaPeriodId} of the currently loading media period in the {@link #timeline}. */ public final MediaPeriodId loadingMediaPeriodId; + /** Whether playback should proceed when {@link #playbackState} == {@link Player#STATE_READY}. */ + public final boolean playWhenReady; + /** Reason why playback is suppressed even though {@link #playWhenReady} is {@code true}. */ + public final @PlaybackSuppressionReason int playbackSuppressionReason; + /** The playback parameters. */ + public final PlaybackParameters playbackParameters; + /** Whether the main player loop is sleeping, while using offload scheduling. */ + public final boolean sleepingForOffload; /** * Position up to which media is buffered in {@link #loadingMediaPeriodId) relative to the start @@ -80,30 +90,33 @@ public volatile long positionUs; /** - * Creates empty dummy playback info which can be used for masking as long as no real playback - * info is available. + * Creates an empty placeholder playback info which can be used for masking as long as no real + * playback info is available. * - * @param startPositionUs The start position at which playback should start, in microseconds. * @param emptyTrackSelectorResult An empty track selector result with null entries for each * renderer. - * @return A dummy playback info. + * @return A placeholder playback info. */ - public static PlaybackInfo createDummy( - long startPositionUs, TrackSelectorResult emptyTrackSelectorResult) { + public static PlaybackInfo createDummy(TrackSelectorResult emptyTrackSelectorResult) { return new PlaybackInfo( Timeline.EMPTY, - DUMMY_MEDIA_PERIOD_ID, - startPositionUs, - /* contentPositionUs= */ C.TIME_UNSET, + PLACEHOLDER_MEDIA_PERIOD_ID, + /* requestedContentPositionUs= */ C.TIME_UNSET, + /* discontinuityStartPositionUs= */ 0, Player.STATE_IDLE, /* playbackError= */ null, /* isLoading= */ false, TrackGroupArray.EMPTY, emptyTrackSelectorResult, - DUMMY_MEDIA_PERIOD_ID, - startPositionUs, + /* staticMetadata= */ ImmutableList.of(), + PLACEHOLDER_MEDIA_PERIOD_ID, + /* playWhenReady= */ false, + Player.PLAYBACK_SUPPRESSION_REASON_NONE, + PlaybackParameters.DEFAULT, + /* bufferedPositionUs= */ 0, /* totalBufferedDurationUs= */ 0, - startPositionUs); + /* positionUs= */ 0, + /* sleepingForOffload= */ false); } /** @@ -111,71 +124,64 @@ public static PlaybackInfo createDummy( * * @param timeline See {@link #timeline}. * @param periodId See {@link #periodId}. - * @param startPositionUs See {@link #startPositionUs}. - * @param contentPositionUs See {@link #contentPositionUs}. + * @param requestedContentPositionUs See {@link #requestedContentPositionUs}. * @param playbackState See {@link #playbackState}. + * @param playbackError See {@link #playbackError}. * @param isLoading See {@link #isLoading}. * @param trackGroups See {@link #trackGroups}. * @param trackSelectorResult See {@link #trackSelectorResult}. + * @param staticMetadata See {@link #staticMetadata}. * @param loadingMediaPeriodId See {@link #loadingMediaPeriodId}. + * @param playWhenReady See {@link #playWhenReady}. + * @param playbackSuppressionReason See {@link #playbackSuppressionReason}. + * @param playbackParameters See {@link #playbackParameters}. * @param bufferedPositionUs See {@link #bufferedPositionUs}. * @param totalBufferedDurationUs See {@link #totalBufferedDurationUs}. * @param positionUs See {@link #positionUs}. + * @param sleepingForOffload See {@link #sleepingForOffload}. */ public PlaybackInfo( Timeline timeline, MediaPeriodId periodId, - long startPositionUs, - long contentPositionUs, + long requestedContentPositionUs, + long discontinuityStartPositionUs, @Player.State int playbackState, @Nullable ExoPlaybackException playbackError, boolean isLoading, TrackGroupArray trackGroups, TrackSelectorResult trackSelectorResult, + List staticMetadata, MediaPeriodId loadingMediaPeriodId, + boolean playWhenReady, + @PlaybackSuppressionReason int playbackSuppressionReason, + PlaybackParameters playbackParameters, long bufferedPositionUs, long totalBufferedDurationUs, - long positionUs) { + long positionUs, + boolean sleepingForOffload) { this.timeline = timeline; this.periodId = periodId; - this.startPositionUs = startPositionUs; - this.contentPositionUs = contentPositionUs; + this.requestedContentPositionUs = requestedContentPositionUs; + this.discontinuityStartPositionUs = discontinuityStartPositionUs; this.playbackState = playbackState; this.playbackError = playbackError; this.isLoading = isLoading; this.trackGroups = trackGroups; this.trackSelectorResult = trackSelectorResult; + this.staticMetadata = staticMetadata; this.loadingMediaPeriodId = loadingMediaPeriodId; + this.playWhenReady = playWhenReady; + this.playbackSuppressionReason = playbackSuppressionReason; + this.playbackParameters = playbackParameters; this.bufferedPositionUs = bufferedPositionUs; this.totalBufferedDurationUs = totalBufferedDurationUs; this.positionUs = positionUs; + this.sleepingForOffload = sleepingForOffload; } - /** - * Returns dummy media period id for the first-to-be-played period of the current timeline. - * - * @param shuffleModeEnabled Whether shuffle mode is enabled. - * @param window A writable {@link Timeline.Window}. - * @param period A writable {@link Timeline.Period}. - * @return A dummy media period id for the first-to-be-played period of the current timeline. - */ - public MediaPeriodId getDummyFirstMediaPeriodId( - boolean shuffleModeEnabled, Timeline.Window window, Timeline.Period period) { - if (timeline.isEmpty()) { - return DUMMY_MEDIA_PERIOD_ID; - } - int firstWindowIndex = timeline.getFirstWindowIndex(shuffleModeEnabled); - int firstPeriodIndex = timeline.getWindow(firstWindowIndex, window).firstPeriodIndex; - int currentPeriodIndex = timeline.getIndexOfPeriod(periodId.periodUid); - long windowSequenceNumber = C.INDEX_UNSET; - if (currentPeriodIndex != C.INDEX_UNSET) { - int currentWindowIndex = timeline.getPeriod(currentPeriodIndex, period).windowIndex; - if (firstWindowIndex == currentWindowIndex) { - // Keep window sequence number if the new position is still in the same window. - windowSequenceNumber = periodId.windowSequenceNumber; - } - } - return new MediaPeriodId(timeline.getUidOfPeriod(firstPeriodIndex), windowSequenceNumber); + /** Returns a placeholder period id for an empty timeline. */ + public static MediaPeriodId getDummyPeriodForEmptyTimeline() { + return PLACEHOLDER_MEDIA_PERIOD_ID; } /** @@ -183,31 +189,45 @@ public MediaPeriodId getDummyFirstMediaPeriodId( * * @param periodId New playing media period. See {@link #periodId}. * @param positionUs New position. See {@link #positionUs}. - * @param contentPositionUs New content position. See {@link #contentPositionUs}. Value is ignored - * if {@code periodId.isAd()} is true. + * @param requestedContentPositionUs New requested content position. See {@link + * #requestedContentPositionUs}. * @param totalBufferedDurationUs New buffered duration. See {@link #totalBufferedDurationUs}. + * @param trackGroups The track groups for the new position. See {@link #trackGroups}. + * @param trackSelectorResult The track selector result for the new position. See {@link + * #trackSelectorResult}. + * @param staticMetadata The static metadata for the track selections. See {@link + * #staticMetadata}. * @return Copied playback info with new playing position. */ @CheckResult public PlaybackInfo copyWithNewPosition( MediaPeriodId periodId, long positionUs, - long contentPositionUs, - long totalBufferedDurationUs) { + long requestedContentPositionUs, + long discontinuityStartPositionUs, + long totalBufferedDurationUs, + TrackGroupArray trackGroups, + TrackSelectorResult trackSelectorResult, + List staticMetadata) { return new PlaybackInfo( timeline, periodId, - positionUs, - periodId.isAd() ? contentPositionUs : C.TIME_UNSET, + requestedContentPositionUs, + discontinuityStartPositionUs, playbackState, playbackError, isLoading, trackGroups, trackSelectorResult, + staticMetadata, loadingMediaPeriodId, + playWhenReady, + playbackSuppressionReason, + playbackParameters, bufferedPositionUs, totalBufferedDurationUs, - positionUs); + positionUs, + sleepingForOffload); } /** @@ -221,17 +241,22 @@ public PlaybackInfo copyWithTimeline(Timeline timeline) { return new PlaybackInfo( timeline, periodId, - startPositionUs, - contentPositionUs, + requestedContentPositionUs, + discontinuityStartPositionUs, playbackState, playbackError, isLoading, trackGroups, trackSelectorResult, + staticMetadata, loadingMediaPeriodId, + playWhenReady, + playbackSuppressionReason, + playbackParameters, bufferedPositionUs, totalBufferedDurationUs, - positionUs); + positionUs, + sleepingForOffload); } /** @@ -245,17 +270,22 @@ public PlaybackInfo copyWithPlaybackState(int playbackState) { return new PlaybackInfo( timeline, periodId, - startPositionUs, - contentPositionUs, + requestedContentPositionUs, + discontinuityStartPositionUs, playbackState, playbackError, isLoading, trackGroups, trackSelectorResult, + staticMetadata, loadingMediaPeriodId, + playWhenReady, + playbackSuppressionReason, + playbackParameters, bufferedPositionUs, totalBufferedDurationUs, - positionUs); + positionUs, + sleepingForOffload); } /** @@ -269,17 +299,22 @@ public PlaybackInfo copyWithPlaybackError(@Nullable ExoPlaybackException playbac return new PlaybackInfo( timeline, periodId, - startPositionUs, - contentPositionUs, + requestedContentPositionUs, + discontinuityStartPositionUs, playbackState, playbackError, isLoading, trackGroups, trackSelectorResult, + staticMetadata, loadingMediaPeriodId, + playWhenReady, + playbackSuppressionReason, + playbackParameters, bufferedPositionUs, totalBufferedDurationUs, - positionUs); + positionUs, + sleepingForOffload); } /** @@ -293,66 +328,141 @@ public PlaybackInfo copyWithIsLoading(boolean isLoading) { return new PlaybackInfo( timeline, periodId, - startPositionUs, - contentPositionUs, + requestedContentPositionUs, + discontinuityStartPositionUs, playbackState, playbackError, isLoading, trackGroups, trackSelectorResult, + staticMetadata, loadingMediaPeriodId, + playWhenReady, + playbackSuppressionReason, + playbackParameters, bufferedPositionUs, totalBufferedDurationUs, - positionUs); + positionUs, + sleepingForOffload); } /** - * Copies playback info with new track information. + * Copies playback info with new loading media period. * - * @param trackGroups New track groups. See {@link #trackGroups}. - * @param trackSelectorResult New track selector result. See {@link #trackSelectorResult}. - * @return Copied playback info with new track information. + * @param loadingMediaPeriodId New loading media period id. See {@link #loadingMediaPeriodId}. + * @return Copied playback info with new loading media period. */ @CheckResult - public PlaybackInfo copyWithTrackInfo( - TrackGroupArray trackGroups, TrackSelectorResult trackSelectorResult) { + public PlaybackInfo copyWithLoadingMediaPeriodId(MediaPeriodId loadingMediaPeriodId) { return new PlaybackInfo( timeline, periodId, - startPositionUs, - contentPositionUs, + requestedContentPositionUs, + discontinuityStartPositionUs, playbackState, playbackError, isLoading, trackGroups, trackSelectorResult, + staticMetadata, loadingMediaPeriodId, + playWhenReady, + playbackSuppressionReason, + playbackParameters, bufferedPositionUs, totalBufferedDurationUs, - positionUs); + positionUs, + sleepingForOffload); } /** - * Copies playback info with new loading media period. + * Copies playback info with new information about whether playback should proceed when ready. * - * @param loadingMediaPeriodId New loading media period id. See {@link #loadingMediaPeriodId}. - * @return Copied playback info with new loading media period. + * @param playWhenReady Whether playback should proceed when {@link #playbackState} == {@link + * Player#STATE_READY}. + * @param playbackSuppressionReason Reason why playback is suppressed even though {@link + * #playWhenReady} is {@code true}. + * @return Copied playback info with new information. */ @CheckResult - public PlaybackInfo copyWithLoadingMediaPeriodId(MediaPeriodId loadingMediaPeriodId) { + public PlaybackInfo copyWithPlayWhenReady( + boolean playWhenReady, @PlaybackSuppressionReason int playbackSuppressionReason) { return new PlaybackInfo( timeline, periodId, - startPositionUs, - contentPositionUs, + requestedContentPositionUs, + discontinuityStartPositionUs, playbackState, playbackError, isLoading, trackGroups, trackSelectorResult, + staticMetadata, loadingMediaPeriodId, + playWhenReady, + playbackSuppressionReason, + playbackParameters, bufferedPositionUs, totalBufferedDurationUs, - positionUs); + positionUs, + sleepingForOffload); + } + + /** + * Copies playback info with new playback parameters. + * + * @param playbackParameters New playback parameters. See {@link #playbackParameters}. + * @return Copied playback info with new playback parameters. + */ + @CheckResult + public PlaybackInfo copyWithPlaybackParameters(PlaybackParameters playbackParameters) { + return new PlaybackInfo( + timeline, + periodId, + requestedContentPositionUs, + discontinuityStartPositionUs, + playbackState, + playbackError, + isLoading, + trackGroups, + trackSelectorResult, + staticMetadata, + loadingMediaPeriodId, + playWhenReady, + playbackSuppressionReason, + playbackParameters, + bufferedPositionUs, + totalBufferedDurationUs, + positionUs, + sleepingForOffload); + } + + /** + * Copies playback info with new sleepingForOffload. + * + * @param sleepingForOffload New main player loop sleeping state. See {@link #sleepingForOffload}. + * @return Copied playback info with new main player loop sleeping state. + */ + @CheckResult + public PlaybackInfo copyWithSleepingForOffload(boolean sleepingForOffload) { + return new PlaybackInfo( + timeline, + periodId, + requestedContentPositionUs, + discontinuityStartPositionUs, + playbackState, + playbackError, + isLoading, + trackGroups, + trackSelectorResult, + staticMetadata, + loadingMediaPeriodId, + playWhenReady, + playbackSuppressionReason, + playbackParameters, + bufferedPositionUs, + totalBufferedDurationUs, + positionUs, + sleepingForOffload); } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/PlaybackParameters.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/PlaybackParameters.java index 057cb371e5..185dfc0190 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/PlaybackParameters.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/PlaybackParameters.java @@ -15,64 +15,52 @@ */ package com.google.android.exoplayer2; +import android.os.Bundle; +import androidx.annotation.CheckResult; +import androidx.annotation.FloatRange; import androidx.annotation.Nullable; import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.Util; -/** - * The parameters that apply to playback. - */ -public final class PlaybackParameters { +/** Parameters that apply to playback, including speed setting. */ +public final class PlaybackParameters implements Bundleable { - /** - * The default playback parameters: real-time playback with no pitch modification or silence - * skipping. - */ + /** The default playback parameters: real-time playback with no silence skipping. */ public static final PlaybackParameters DEFAULT = new PlaybackParameters(/* speed= */ 1f); /** The factor by which playback will be sped up. */ public final float speed; - /** The factor by which the audio pitch will be scaled. */ + /** The factor by which pitch will be shifted. */ public final float pitch; - /** Whether to skip silence in the input. */ - public final boolean skipSilence; - private final int scaledUsPerMs; /** - * Creates new playback parameters that set the playback speed. + * Creates new playback parameters that set the playback speed. The pitch of audio will not be + * adjusted, so the effect is to time-stretch the audio. * * @param speed The factor by which playback will be sped up. Must be greater than zero. */ public PlaybackParameters(float speed) { - this(speed, /* pitch= */ 1f, /* skipSilence= */ false); - } - - /** - * Creates new playback parameters that set the playback speed and audio pitch scaling factor. - * - * @param speed The factor by which playback will be sped up. Must be greater than zero. - * @param pitch The factor by which the audio pitch will be scaled. Must be greater than zero. - */ - public PlaybackParameters(float speed, float pitch) { - this(speed, pitch, /* skipSilence= */ false); + this(speed, /* pitch= */ 1f); } /** - * Creates new playback parameters that set the playback speed, audio pitch scaling factor and - * whether to skip silence in the audio stream. + * Creates new playback parameters that set the playback speed/pitch. * * @param speed The factor by which playback will be sped up. Must be greater than zero. - * @param pitch The factor by which the audio pitch will be scaled. Must be greater than zero. - * @param skipSilence Whether to skip silences in the audio stream. + * @param pitch The factor by which the pitch of audio will be adjusted. Must be greater than + * zero. Useful values are {@code 1} (to time-stretch audio) and the same value as passed in + * as the {@code speed} (to resample audio, which is useful for slow-motion videos). */ - public PlaybackParameters(float speed, float pitch, boolean skipSilence) { + public PlaybackParameters( + @FloatRange(from = 0, fromInclusive = false) float speed, + @FloatRange(from = 0, fromInclusive = false) float pitch) { Assertions.checkArgument(speed > 0); Assertions.checkArgument(pitch > 0); this.speed = speed; this.pitch = pitch; - this.skipSilence = skipSilence; scaledUsPerMs = Math.round(speed * 1000f); } @@ -87,6 +75,17 @@ public long getMediaTimeUsForPlayoutTimeMs(long timeMs) { return timeMs * scaledUsPerMs; } + /** + * Returns a copy with the given speed. + * + * @param speed The new speed. Must be greater than zero. + * @return The copied playback parameters. + */ + @CheckResult + public PlaybackParameters withSpeed(@FloatRange(from = 0, fromInclusive = false) float speed) { + return new PlaybackParameters(speed, pitch); + } + @Override public boolean equals(@Nullable Object obj) { if (this == obj) { @@ -96,9 +95,7 @@ public boolean equals(@Nullable Object obj) { return false; } PlaybackParameters other = (PlaybackParameters) obj; - return this.speed == other.speed - && this.pitch == other.pitch - && this.skipSilence == other.skipSilence; + return this.speed == other.speed && this.pitch == other.pitch; } @Override @@ -106,8 +103,32 @@ public int hashCode() { int result = 17; result = 31 * result + Float.floatToRawIntBits(speed); result = 31 * result + Float.floatToRawIntBits(pitch); - result = 31 * result + (skipSilence ? 1 : 0); return result; } + @Override + public String toString() { + return Util.formatInvariant("PlaybackParameters(speed=%.2f, pitch=%.2f)", speed, pitch); + } + + // Bundleable implementation. + + private static final String FIELD_SPEED = Util.intToStringMaxRadix(0); + private static final String FIELD_PITCH = Util.intToStringMaxRadix(1); + + @Override + public Bundle toBundle() { + Bundle bundle = new Bundle(); + bundle.putFloat(FIELD_SPEED, speed); + bundle.putFloat(FIELD_PITCH, pitch); + return bundle; + } + + /** Object that can restore {@link PlaybackParameters} from a {@link Bundle}. */ + public static final Creator CREATOR = + bundle -> { + float speed = bundle.getFloat(FIELD_SPEED, /* defaultValue= */ 1f); + float pitch = bundle.getFloat(FIELD_PITCH, /* defaultValue= */ 1f); + return new PlaybackParameters(speed, pitch); + }; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/PlaybackPreparer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/PlaybackPreparer.java deleted file mode 100644 index 8ff7f50402..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/PlaybackPreparer.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright (C) 2018 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2; - -/** Called to prepare a playback. */ -public interface PlaybackPreparer { - - /** Called to prepare a playback. */ - void preparePlayback(); -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/Player.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/Player.java index 3871730c9c..8c2aa6fc34 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/Player.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/Player.java @@ -15,527 +15,1057 @@ */ package com.google.android.exoplayer2; +import static java.lang.annotation.ElementType.FIELD; +import static java.lang.annotation.ElementType.LOCAL_VARIABLE; +import static java.lang.annotation.ElementType.METHOD; +import static java.lang.annotation.ElementType.PARAMETER; +import static java.lang.annotation.ElementType.TYPE_USE; + +import android.os.Bundle; import android.os.Looper; import android.view.Surface; import android.view.SurfaceHolder; import android.view.SurfaceView; import android.view.TextureView; +import androidx.annotation.FloatRange; import androidx.annotation.IntDef; +import androidx.annotation.IntRange; import androidx.annotation.Nullable; -import com.google.android.exoplayer2.C.VideoScalingMode; import com.google.android.exoplayer2.audio.AudioAttributes; -import com.google.android.exoplayer2.audio.AudioListener; -import com.google.android.exoplayer2.audio.AuxEffectInfo; -import com.google.android.exoplayer2.metadata.MetadataOutput; -import com.google.android.exoplayer2.source.TrackGroupArray; -import com.google.android.exoplayer2.text.TextOutput; -import com.google.android.exoplayer2.trackselection.TrackSelectionArray; +import com.google.android.exoplayer2.metadata.Metadata; +import com.google.android.exoplayer2.text.Cue; +import com.google.android.exoplayer2.text.CueGroup; +import com.google.android.exoplayer2.trackselection.TrackSelectionParameters; +import com.google.android.exoplayer2.util.FlagSet; +import com.google.android.exoplayer2.util.Size; import com.google.android.exoplayer2.util.Util; -import com.google.android.exoplayer2.video.VideoDecoderOutputBufferRenderer; -import com.google.android.exoplayer2.video.VideoFrameMetadataListener; import com.google.android.exoplayer2.video.VideoListener; -import com.google.android.exoplayer2.video.spherical.CameraMotionListener; +import com.google.android.exoplayer2.video.VideoSize; +import com.google.common.base.Objects; +import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import java.util.ArrayList; +import java.util.List; /** * A media player interface defining traditional high-level functionality, such as the ability to * play, pause, seek and query properties of the currently playing media. - *

      - * Some important properties of media players that implement this interface are: + * + *

      All methods must be called from a single {@linkplain #getApplicationLooper() application + * thread} unless indicated otherwise. Callbacks in registered listeners are called on the same + * thread. + * + *

      This interface includes some convenience methods that can be implemented by calling other + * methods in the interface. {@link BasePlayer} implements these convenience methods so inheriting + * {@link BasePlayer} is recommended when implementing the interface so that only the minimal set of + * required methods can be implemented. + * + *

      Some important properties of media players that implement this interface are: + * *

        - *
      • They can provide a {@link Timeline} representing the structure of the media being played, - * which can be obtained by calling {@link #getCurrentTimeline()}.
      • - *
      • They can provide a {@link TrackGroupArray} defining the currently available tracks, - * which can be obtained by calling {@link #getCurrentTrackGroups()}.
      • - *
      • They contain a number of renderers, each of which is able to render tracks of a single - * type (e.g. audio, video or text). The number of renderers and their respective track types - * can be obtained by calling {@link #getRendererCount()} and {@link #getRendererType(int)}. - *
      • - *
      • They can provide a {@link TrackSelectionArray} defining which of the currently available - * tracks are selected to be rendered by each renderer. This can be obtained by calling - * {@link #getCurrentTrackSelections()}}.
      • + *
      • They can provide a {@link Timeline} representing the structure of the media being played, + * which can be obtained by calling {@link #getCurrentTimeline()}. + *
      • They can provide a {@link Tracks} defining the currently available tracks and which are + * selected to be rendered, which can be obtained by calling {@link #getCurrentTracks()}. *
      */ public interface Player { - /** The audio component of a {@link Player}. */ - interface AudioComponent { + /** A set of {@linkplain Event events}. */ + final class Events { + + private final FlagSet flags; /** - * Adds a listener to receive audio events. + * Creates an instance. * - * @param listener The listener to register. + * @param flags The {@link FlagSet} containing the {@linkplain Event events}. */ - void addAudioListener(AudioListener listener); + public Events(FlagSet flags) { + this.flags = flags; + } /** - * Removes a listener of audio events. + * Returns whether the given {@link Event} occurred. * - * @param listener The listener to unregister. + * @param event The {@link Event}. + * @return Whether the {@link Event} occurred. */ - void removeAudioListener(AudioListener listener); + public boolean contains(@Event int event) { + return flags.contains(event); + } /** - * Sets the attributes for audio playback, used by the underlying audio track. If not set, the - * default audio attributes will be used. They are suitable for general media playback. + * Returns whether any of the given {@linkplain Event events} occurred. * - *

      Setting the audio attributes during playback may introduce a short gap in audio output as - * the audio track is recreated. A new audio session id will also be generated. + * @param events The {@linkplain Event events}. + * @return Whether any of the {@linkplain Event events} occurred. + */ + public boolean containsAny(@Event int... events) { + return flags.containsAny(events); + } + + /** Returns the number of events in the set. */ + public int size() { + return flags.size(); + } + + /** + * Returns the {@link Event} at the given index. * - *

      If tunneling is enabled by the track selector, the specified audio attributes will be - * ignored, but they will take effect if audio is later played without tunneling. + *

      Although index-based access is possible, it doesn't imply a particular order of these + * events. * - *

      If the device is running a build before platform API version 21, audio attributes cannot - * be set directly on the underlying audio track. In this case, the usage will be mapped onto an - * equivalent stream type using {@link Util#getStreamTypeForAudioUsage(int)}. + * @param index The index. Must be between 0 (inclusive) and {@link #size()} (exclusive). + * @return The {@link Event} at the given index. + * @throws IndexOutOfBoundsException If index is outside the allowed range. + */ + public @Event int get(int index) { + return flags.get(index); + } + + @Override + public int hashCode() { + return flags.hashCode(); + } + + @Override + public boolean equals(@Nullable Object obj) { + if (this == obj) { + return true; + } + if (!(obj instanceof Events)) { + return false; + } + Events other = (Events) obj; + return flags.equals(other.flags); + } + } + + /** Position info describing a playback position involved in a discontinuity. */ + final class PositionInfo implements Bundleable { + + /** + * The UID of the window, or {@code null} if the timeline is {@link Timeline#isEmpty() empty}. + */ + @Nullable public final Object windowUid; + /** + * @deprecated Use {@link #mediaItemIndex} instead. + */ + @Deprecated public final int windowIndex; + /** The media item index. */ + public final int mediaItemIndex; + /** The media item, or {@code null} if the timeline is {@link Timeline#isEmpty() empty}. */ + @Nullable public final MediaItem mediaItem; + /** + * The UID of the period, or {@code null} if the timeline is {@link Timeline#isEmpty() empty}. + */ + @Nullable public final Object periodUid; + /** The period index. */ + public final int periodIndex; + /** The playback position, in milliseconds. */ + public final long positionMs; + /** + * The content position, in milliseconds. * - * @param audioAttributes The attributes to use for audio playback. - * @deprecated Use {@link AudioComponent#setAudioAttributes(AudioAttributes, boolean)}. + *

      If {@link #adGroupIndex} is {@link C#INDEX_UNSET}, this is the same as {@link + * #positionMs}. + */ + public final long contentPositionMs; + /** + * The ad group index if the playback position is within an ad, {@link C#INDEX_UNSET} otherwise. + */ + public final int adGroupIndex; + /** + * The index of the ad within the ad group if the playback position is within an ad, {@link + * C#INDEX_UNSET} otherwise. + */ + public final int adIndexInAdGroup; + + /** + * @deprecated Use {@link #PositionInfo(Object, int, MediaItem, Object, int, long, long, int, + * int)} instead. */ @Deprecated - void setAudioAttributes(AudioAttributes audioAttributes); + public PositionInfo( + @Nullable Object windowUid, + int mediaItemIndex, + @Nullable Object periodUid, + int periodIndex, + long positionMs, + long contentPositionMs, + int adGroupIndex, + int adIndexInAdGroup) { + this( + windowUid, + mediaItemIndex, + MediaItem.EMPTY, + periodUid, + periodIndex, + positionMs, + contentPositionMs, + adGroupIndex, + adIndexInAdGroup); + } + + /** Creates an instance. */ + @SuppressWarnings("deprecation") // Setting deprecated windowIndex field + public PositionInfo( + @Nullable Object windowUid, + int mediaItemIndex, + @Nullable MediaItem mediaItem, + @Nullable Object periodUid, + int periodIndex, + long positionMs, + long contentPositionMs, + int adGroupIndex, + int adIndexInAdGroup) { + this.windowUid = windowUid; + this.windowIndex = mediaItemIndex; + this.mediaItemIndex = mediaItemIndex; + this.mediaItem = mediaItem; + this.periodUid = periodUid; + this.periodIndex = periodIndex; + this.positionMs = positionMs; + this.contentPositionMs = contentPositionMs; + this.adGroupIndex = adGroupIndex; + this.adIndexInAdGroup = adIndexInAdGroup; + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + PositionInfo that = (PositionInfo) o; + return mediaItemIndex == that.mediaItemIndex + && periodIndex == that.periodIndex + && positionMs == that.positionMs + && contentPositionMs == that.contentPositionMs + && adGroupIndex == that.adGroupIndex + && adIndexInAdGroup == that.adIndexInAdGroup + && Objects.equal(windowUid, that.windowUid) + && Objects.equal(periodUid, that.periodUid) + && Objects.equal(mediaItem, that.mediaItem); + } + + @Override + public int hashCode() { + return Objects.hashCode( + windowUid, + mediaItemIndex, + mediaItem, + periodUid, + periodIndex, + positionMs, + contentPositionMs, + adGroupIndex, + adIndexInAdGroup); + } + + // Bundleable implementation. + + private static final String FIELD_MEDIA_ITEM_INDEX = Util.intToStringMaxRadix(0); + private static final String FIELD_MEDIA_ITEM = Util.intToStringMaxRadix(1); + private static final String FIELD_PERIOD_INDEX = Util.intToStringMaxRadix(2); + private static final String FIELD_POSITION_MS = Util.intToStringMaxRadix(3); + private static final String FIELD_CONTENT_POSITION_MS = Util.intToStringMaxRadix(4); + private static final String FIELD_AD_GROUP_INDEX = Util.intToStringMaxRadix(5); + private static final String FIELD_AD_INDEX_IN_AD_GROUP = Util.intToStringMaxRadix(6); /** - * Sets the attributes for audio playback, used by the underlying audio track. If not set, the - * default audio attributes will be used. They are suitable for general media playback. - * - *

      Setting the audio attributes during playback may introduce a short gap in audio output as - * the audio track is recreated. A new audio session id will also be generated. - * - *

      If tunneling is enabled by the track selector, the specified audio attributes will be - * ignored, but they will take effect if audio is later played without tunneling. + * {@inheritDoc} * - *

      If the device is running a build before platform API version 21, audio attributes cannot - * be set directly on the underlying audio track. In this case, the usage will be mapped onto an - * equivalent stream type using {@link Util#getStreamTypeForAudioUsage(int)}. - * - *

      If audio focus should be handled, the {@link AudioAttributes#usage} must be {@link - * C#USAGE_MEDIA} or {@link C#USAGE_GAME}. Other usages will throw an {@link - * IllegalArgumentException}. + *

      It omits the {@link #windowUid} and {@link #periodUid} fields. The {@link #windowUid} and + * {@link #periodUid} of an instance restored by {@link #CREATOR} will always be {@code null}. + */ + @Override + public Bundle toBundle() { + return toBundle(/* canAccessCurrentMediaItem= */ true, /* canAccessTimeline= */ true); + } + + /** + * Returns a {@link Bundle} representing the information stored in this object, filtered by + * available commands. * - * @param audioAttributes The attributes to use for audio playback. - * @param handleAudioFocus True if the player should handle audio focus, false otherwise. + * @param canAccessCurrentMediaItem Whether the {@link Bundle} should contain information + * accessbile with {@link #COMMAND_GET_CURRENT_MEDIA_ITEM}. + * @param canAccessTimeline Whether the {@link Bundle} should contain information accessbile + * with {@link #COMMAND_GET_TIMELINE}. */ - void setAudioAttributes(AudioAttributes audioAttributes, boolean handleAudioFocus); + public Bundle toBundle(boolean canAccessCurrentMediaItem, boolean canAccessTimeline) { + Bundle bundle = new Bundle(); + bundle.putInt(FIELD_MEDIA_ITEM_INDEX, canAccessTimeline ? mediaItemIndex : 0); + if (mediaItem != null && canAccessCurrentMediaItem) { + bundle.putBundle(FIELD_MEDIA_ITEM, mediaItem.toBundle()); + } + bundle.putInt(FIELD_PERIOD_INDEX, canAccessTimeline ? periodIndex : 0); + bundle.putLong(FIELD_POSITION_MS, canAccessCurrentMediaItem ? positionMs : 0); + bundle.putLong(FIELD_CONTENT_POSITION_MS, canAccessCurrentMediaItem ? contentPositionMs : 0); + bundle.putInt(FIELD_AD_GROUP_INDEX, canAccessCurrentMediaItem ? adGroupIndex : C.INDEX_UNSET); + bundle.putInt( + FIELD_AD_INDEX_IN_AD_GROUP, canAccessCurrentMediaItem ? adIndexInAdGroup : C.INDEX_UNSET); + return bundle; + } - /** Returns the attributes for audio playback. */ - AudioAttributes getAudioAttributes(); + /** Object that can restore {@link PositionInfo} from a {@link Bundle}. */ + public static final Creator CREATOR = PositionInfo::fromBundle; + + private static PositionInfo fromBundle(Bundle bundle) { + int mediaItemIndex = bundle.getInt(FIELD_MEDIA_ITEM_INDEX, /* defaultValue= */ 0); + @Nullable Bundle mediaItemBundle = bundle.getBundle(FIELD_MEDIA_ITEM); + @Nullable + MediaItem mediaItem = + mediaItemBundle == null ? null : MediaItem.CREATOR.fromBundle(mediaItemBundle); + int periodIndex = bundle.getInt(FIELD_PERIOD_INDEX, /* defaultValue= */ 0); + long positionMs = bundle.getLong(FIELD_POSITION_MS, /* defaultValue= */ 0); + long contentPositionMs = bundle.getLong(FIELD_CONTENT_POSITION_MS, /* defaultValue= */ 0); + int adGroupIndex = bundle.getInt(FIELD_AD_GROUP_INDEX, /* defaultValue= */ C.INDEX_UNSET); + int adIndexInAdGroup = + bundle.getInt(FIELD_AD_INDEX_IN_AD_GROUP, /* defaultValue= */ C.INDEX_UNSET); + return new PositionInfo( + /* windowUid= */ null, + mediaItemIndex, + mediaItem, + /* periodUid= */ null, + periodIndex, + positionMs, + contentPositionMs, + adGroupIndex, + adIndexInAdGroup); + } + } + + /** + * A set of {@linkplain Command commands}. + * + *

      Instances are immutable. + */ + final class Commands implements Bundleable { + + /** A builder for {@link Commands} instances. */ + public static final class Builder { + + private static final @Command int[] SUPPORTED_COMMANDS = { + COMMAND_PLAY_PAUSE, + COMMAND_PREPARE, + COMMAND_STOP, + COMMAND_SEEK_TO_DEFAULT_POSITION, + COMMAND_SEEK_IN_CURRENT_MEDIA_ITEM, + COMMAND_SEEK_TO_PREVIOUS_MEDIA_ITEM, + COMMAND_SEEK_TO_PREVIOUS, + COMMAND_SEEK_TO_NEXT_MEDIA_ITEM, + COMMAND_SEEK_TO_NEXT, + COMMAND_SEEK_TO_MEDIA_ITEM, + COMMAND_SEEK_BACK, + COMMAND_SEEK_FORWARD, + COMMAND_SET_SPEED_AND_PITCH, + COMMAND_SET_SHUFFLE_MODE, + COMMAND_SET_REPEAT_MODE, + COMMAND_GET_CURRENT_MEDIA_ITEM, + COMMAND_GET_TIMELINE, + COMMAND_GET_MEDIA_ITEMS_METADATA, + COMMAND_SET_MEDIA_ITEMS_METADATA, + COMMAND_SET_MEDIA_ITEM, + COMMAND_CHANGE_MEDIA_ITEMS, + COMMAND_GET_AUDIO_ATTRIBUTES, + COMMAND_GET_VOLUME, + COMMAND_GET_DEVICE_VOLUME, + COMMAND_SET_VOLUME, + COMMAND_SET_DEVICE_VOLUME, + COMMAND_ADJUST_DEVICE_VOLUME, + COMMAND_SET_VIDEO_SURFACE, + COMMAND_GET_TEXT, + COMMAND_SET_TRACK_SELECTION_PARAMETERS, + COMMAND_GET_TRACKS, + }; + + private final FlagSet.Builder flagsBuilder; + + /** Creates a builder. */ + public Builder() { + flagsBuilder = new FlagSet.Builder(); + } + + private Builder(Commands commands) { + flagsBuilder = new FlagSet.Builder(); + flagsBuilder.addAll(commands.flags); + } + + /** + * Adds a {@link Command}. + * + * @param command A {@link Command}. + * @return This builder. + * @throws IllegalStateException If {@link #build()} has already been called. + */ + @CanIgnoreReturnValue + public Builder add(@Command int command) { + flagsBuilder.add(command); + return this; + } + + /** + * Adds a {@link Command} if the provided condition is true. Does nothing otherwise. + * + * @param command A {@link Command}. + * @param condition A condition. + * @return This builder. + * @throws IllegalStateException If {@link #build()} has already been called. + */ + @CanIgnoreReturnValue + public Builder addIf(@Command int command, boolean condition) { + flagsBuilder.addIf(command, condition); + return this; + } + + /** + * Adds {@linkplain Command commands}. + * + * @param commands The {@linkplain Command commands} to add. + * @return This builder. + * @throws IllegalStateException If {@link #build()} has already been called. + */ + @CanIgnoreReturnValue + public Builder addAll(@Command int... commands) { + flagsBuilder.addAll(commands); + return this; + } + + /** + * Adds {@link Commands}. + * + * @param commands The set of {@linkplain Command commands} to add. + * @return This builder. + * @throws IllegalStateException If {@link #build()} has already been called. + */ + @CanIgnoreReturnValue + public Builder addAll(Commands commands) { + flagsBuilder.addAll(commands.flags); + return this; + } + + /** + * Adds all existing {@linkplain Command commands}. + * + * @return This builder. + * @throws IllegalStateException If {@link #build()} has already been called. + */ + @CanIgnoreReturnValue + public Builder addAllCommands() { + flagsBuilder.addAll(SUPPORTED_COMMANDS); + return this; + } + + /** + * Removes a {@link Command}. + * + * @param command A {@link Command}. + * @return This builder. + * @throws IllegalStateException If {@link #build()} has already been called. + */ + @CanIgnoreReturnValue + public Builder remove(@Command int command) { + flagsBuilder.remove(command); + return this; + } + + /** + * Removes a {@link Command} if the provided condition is true. Does nothing otherwise. + * + * @param command A {@link Command}. + * @param condition A condition. + * @return This builder. + * @throws IllegalStateException If {@link #build()} has already been called. + */ + @CanIgnoreReturnValue + public Builder removeIf(@Command int command, boolean condition) { + flagsBuilder.removeIf(command, condition); + return this; + } + + /** + * Removes {@linkplain Command commands}. + * + * @param commands The {@linkplain Command commands} to remove. + * @return This builder. + * @throws IllegalStateException If {@link #build()} has already been called. + */ + @CanIgnoreReturnValue + public Builder removeAll(@Command int... commands) { + flagsBuilder.removeAll(commands); + return this; + } + + /** + * Builds a {@link Commands} instance. + * + * @throws IllegalStateException If this method has already been called. + */ + public Commands build() { + return new Commands(flagsBuilder.build()); + } + } + + /** An empty set of commands. */ + public static final Commands EMPTY = new Builder().build(); + + private final FlagSet flags; + + private Commands(FlagSet flags) { + this.flags = flags; + } + + /** Returns a {@link Builder} initialized with the values of this instance. */ + public Builder buildUpon() { + return new Builder(this); + } - /** Returns the audio session identifier, or {@link C#AUDIO_SESSION_ID_UNSET} if not set. */ - int getAudioSessionId(); + /** Returns whether the set of commands contains the specified {@link Command}. */ + public boolean contains(@Command int command) { + return flags.contains(command); + } - /** Sets information on an auxiliary audio effect to attach to the underlying audio track. */ - void setAuxEffectInfo(AuxEffectInfo auxEffectInfo); + /** Returns whether the set of commands contains at least one of the given {@code commands}. */ + public boolean containsAny(@Command int... commands) { + return flags.containsAny(commands); + } - /** Detaches any previously attached auxiliary audio effect from the underlying audio track. */ - void clearAuxEffectInfo(); + /** Returns the number of commands in this set. */ + public int size() { + return flags.size(); + } /** - * Sets the audio volume, with 0 being silence and 1 being unity gain. + * Returns the {@link Command} at the given index. * - * @param audioVolume The audio volume. + * @param index The index. Must be between 0 (inclusive) and {@link #size()} (exclusive). + * @return The {@link Command} at the given index. + * @throws IndexOutOfBoundsException If index is outside the allowed range. */ - void setVolume(float audioVolume); + public @Command int get(int index) { + return flags.get(index); + } + + @Override + public boolean equals(@Nullable Object obj) { + if (this == obj) { + return true; + } + if (!(obj instanceof Commands)) { + return false; + } + Commands commands = (Commands) obj; + return flags.equals(commands.flags); + } + + @Override + public int hashCode() { + return flags.hashCode(); + } + + // Bundleable implementation. + + private static final String FIELD_COMMANDS = Util.intToStringMaxRadix(0); + + @Override + public Bundle toBundle() { + Bundle bundle = new Bundle(); + ArrayList commandsBundle = new ArrayList<>(); + for (int i = 0; i < flags.size(); i++) { + commandsBundle.add(flags.get(i)); + } + bundle.putIntegerArrayList(FIELD_COMMANDS, commandsBundle); + return bundle; + } + + /** Object that can restore {@link Commands} from a {@link Bundle}. */ + public static final Creator CREATOR = Commands::fromBundle; - /** Returns the audio volume, with 0 being silence and 1 being unity gain. */ - float getVolume(); + private static Commands fromBundle(Bundle bundle) { + @Nullable ArrayList commands = bundle.getIntegerArrayList(FIELD_COMMANDS); + if (commands == null) { + return Commands.EMPTY; + } + Builder builder = new Builder(); + for (int i = 0; i < commands.size(); i++) { + builder.add(commands.get(i)); + } + return builder.build(); + } } - /** The video component of a {@link Player}. */ - interface VideoComponent { + /** + * Listener for changes in a {@link Player}. + * + *

      All methods have no-op default implementations to allow selective overrides. + * + *

      If the return value of a {@link Player} getter changes due to a change in {@linkplain + * #onAvailableCommandsChanged(Commands) command availability}, the corresponding listener + * method(s) will be invoked. If the return value of a {@link Player} getter does not change + * because the corresponding command is {@linkplain #onAvailableCommandsChanged(Commands) not + * available}, the corresponding listener method will not be invoked. + */ + interface Listener { /** - * Sets the {@link VideoScalingMode}. + * Called when one or more player states changed. + * + *

      State changes and events that happen within one {@link Looper} message queue iteration are + * reported together and only after all individual callbacks were triggered. * - * @param videoScalingMode The {@link VideoScalingMode}. + *

      Listeners should prefer this method over individual callbacks in the following cases: + * + *

        + *
      • They intend to trigger the same logic for multiple events (e.g. when updating a UI for + * both {@link #onPlaybackStateChanged(int)} and {@link #onPlayWhenReadyChanged(boolean, + * int)}). + *
      • They need access to the {@link Player} object to trigger further events (e.g. to call + * {@link Player#seekTo(long)} after a {@link #onMediaItemTransition(MediaItem, int)}). + *
      • They intend to use multiple state values together or in combination with {@link Player} + * getter methods. For example using {@link #getCurrentMediaItemIndex()} with the {@code + * timeline} provided in {@link #onTimelineChanged(Timeline, int)} is only safe from + * within this method. + *
      • They are interested in events that logically happened together (e.g {@link + * #onPlaybackStateChanged(int)} to {@link #STATE_BUFFERING} because of {@link + * #onMediaItemTransition(MediaItem, int)}). + *
      + * + * @param player The {@link Player} whose state changed. Use the getters to obtain the latest + * states. + * @param events The {@link Events} that happened in this iteration, indicating which player + * states changed. */ - void setVideoScalingMode(@VideoScalingMode int videoScalingMode); - - /** Returns the {@link VideoScalingMode}. */ - @VideoScalingMode - int getVideoScalingMode(); + default void onEvents(Player player, Events events) {} /** - * Adds a listener to receive video events. + * Called when the value of {@link Player#getCurrentTimeline()} changes. + * + *

      Note that the current {@link MediaItem} or playback position may change as a result of a + * timeline change. If playback can't continue smoothly because of this timeline change, a + * separate {@link #onPositionDiscontinuity(PositionInfo, PositionInfo, int)} callback will be + * triggered. * - * @param listener The listener to register. + *

      {@link #onEvents(Player, Events)} will also be called to report this event along with + * other events that happen in the same {@link Looper} message queue iteration. + * + * @param timeline The latest timeline. Never null, but may be empty. + * @param reason The {@link TimelineChangeReason} responsible for this timeline change. */ - void addVideoListener(VideoListener listener); + default void onTimelineChanged(Timeline timeline, @TimelineChangeReason int reason) {} /** - * Removes a listener of video events. + * Called when playback transitions to a media item or starts repeating a media item according + * to the current {@link #getRepeatMode() repeat mode}. + * + *

      Note that this callback is also called when the value of {@link #getCurrentTimeline()} + * becomes non-empty or empty. * - * @param listener The listener to unregister. + *

      {@link #onEvents(Player, Events)} will also be called to report this event along with + * other events that happen in the same {@link Looper} message queue iteration. + * + * @param mediaItem The {@link MediaItem}. May be null if the playlist becomes empty. + * @param reason The reason for the transition. */ - void removeVideoListener(VideoListener listener); + default void onMediaItemTransition( + @Nullable MediaItem mediaItem, @MediaItemTransitionReason int reason) {} /** - * Sets a listener to receive video frame metadata events. + * Called when the value of {@link Player#getCurrentTracks()} changes. * - *

      This method is intended to be called by the same component that sets the {@link Surface} - * onto which video will be rendered. If using ExoPlayer's standard UI components, this method - * should not be called directly from application code. + *

      {@link #onEvents(Player, Events)} will also be called to report this event along with + * other events that happen in the same {@link Looper} message queue iteration. * - * @param listener The listener. + * @param tracks The available tracks information. Never null, but may be of length zero. */ - void setVideoFrameMetadataListener(VideoFrameMetadataListener listener); + default void onTracksChanged(Tracks tracks) {} /** - * Clears the listener which receives video frame metadata events if it matches the one passed. - * Else does nothing. + * Called when the value of {@link Player#getMediaMetadata()} changes. + * + *

      This method may be called multiple times in quick succession. * - * @param listener The listener to clear. + *

      {@link #onEvents(Player, Events)} will also be called to report this event along with + * other events that happen in the same {@link Looper} message queue iteration. + * + * @param mediaMetadata The combined {@link MediaMetadata}. */ - void clearVideoFrameMetadataListener(VideoFrameMetadataListener listener); + default void onMediaMetadataChanged(MediaMetadata mediaMetadata) {} /** - * Sets a listener of camera motion events. + * Called when the value of {@link Player#getPlaylistMetadata()} changes. * - * @param listener The listener. + *

      {@link #onEvents(Player, Events)} will also be called to report this event along with + * other events that happen in the same {@link Looper} message queue iteration. */ - void setCameraMotionListener(CameraMotionListener listener); + default void onPlaylistMetadataChanged(MediaMetadata mediaMetadata) {} /** - * Clears the listener which receives camera motion events if it matches the one passed. Else - * does nothing. + * Called when the player starts or stops loading the source. * - * @param listener The listener to clear. + *

      {@link #onEvents(Player, Events)} will also be called to report this event along with + * other events that happen in the same {@link Looper} message queue iteration. + * + * @param isLoading Whether the source is currently being loaded. */ - void clearCameraMotionListener(CameraMotionListener listener); + default void onIsLoadingChanged(boolean isLoading) {} /** - * Clears any {@link Surface}, {@link SurfaceHolder}, {@link SurfaceView} or {@link TextureView} - * currently set on the player. + * @deprecated Use {@link #onIsLoadingChanged(boolean)} instead. */ - void clearVideoSurface(); + @Deprecated + default void onLoadingChanged(boolean isLoading) {} /** - * Clears the {@link Surface} onto which video is being rendered if it matches the one passed. - * Else does nothing. + * Called when the value returned from {@link #isCommandAvailable(int)} changes for at least one + * {@link Command}. * - * @param surface The surface to clear. + *

      {@link #onEvents(Player, Events)} will also be called to report this event along with + * other events that happen in the same {@link Looper} message queue iteration. + * + * @param availableCommands The available {@link Commands}. */ - void clearVideoSurface(@Nullable Surface surface); + default void onAvailableCommandsChanged(Commands availableCommands) {} /** - * Sets the {@link Surface} onto which video will be rendered. The caller is responsible for - * tracking the lifecycle of the surface, and must clear the surface by calling {@code - * setVideoSurface(null)} if the surface is destroyed. + * Called when the value returned from {@link #getTrackSelectionParameters()} changes. * - *

      If the surface is held by a {@link SurfaceView}, {@link TextureView} or {@link - * SurfaceHolder} then it's recommended to use {@link #setVideoSurfaceView(SurfaceView)}, {@link - * #setVideoTextureView(TextureView)} or {@link #setVideoSurfaceHolder(SurfaceHolder)} rather - * than this method, since passing the holder allows the player to track the lifecycle of the - * surface automatically. + *

      {@link #onEvents(Player, Events)} will also be called to report this event along with + * other events that happen in the same {@link Looper} message queue iteration. * - * @param surface The {@link Surface}. + * @param parameters The new {@link TrackSelectionParameters}. */ - void setVideoSurface(@Nullable Surface surface); + default void onTrackSelectionParametersChanged(TrackSelectionParameters parameters) {} /** - * Sets the {@link SurfaceHolder} that holds the {@link Surface} onto which video will be - * rendered. The player will track the lifecycle of the surface automatically. - * - * @param surfaceHolder The surface holder. + * @deprecated Use {@link #onPlaybackStateChanged(int)} and {@link + * #onPlayWhenReadyChanged(boolean, int)} instead. */ - void setVideoSurfaceHolder(@Nullable SurfaceHolder surfaceHolder); + @Deprecated + default void onPlayerStateChanged(boolean playWhenReady, @State int playbackState) {} /** - * Clears the {@link SurfaceHolder} that holds the {@link Surface} onto which video is being - * rendered if it matches the one passed. Else does nothing. + * Called when the value returned from {@link #getPlaybackState()} changes. * - * @param surfaceHolder The surface holder to clear. + *

      {@link #onEvents(Player, Events)} will also be called to report this event along with + * other events that happen in the same {@link Looper} message queue iteration. + * + * @param playbackState The new playback {@link State}. */ - void clearVideoSurfaceHolder(@Nullable SurfaceHolder surfaceHolder); + default void onPlaybackStateChanged(@State int playbackState) {} /** - * Sets the {@link SurfaceView} onto which video will be rendered. The player will track the - * lifecycle of the surface automatically. + * Called when the value returned from {@link #getPlayWhenReady()} changes. + * + *

      {@link #onEvents(Player, Events)} will also be called to report this event along with + * other events that happen in the same {@link Looper} message queue iteration. * - * @param surfaceView The surface view. + * @param playWhenReady Whether playback will proceed when ready. + * @param reason The {@link PlayWhenReadyChangeReason} for the change. */ - void setVideoSurfaceView(@Nullable SurfaceView surfaceView); + default void onPlayWhenReadyChanged( + boolean playWhenReady, @PlayWhenReadyChangeReason int reason) {} /** - * Clears the {@link SurfaceView} onto which video is being rendered if it matches the one - * passed. Else does nothing. + * Called when the value returned from {@link #getPlaybackSuppressionReason()} changes. * - * @param surfaceView The texture view to clear. + *

      {@link #onEvents(Player, Events)} will also be called to report this event along with + * other events that happen in the same {@link Looper} message queue iteration. + * + * @param playbackSuppressionReason The current {@link PlaybackSuppressionReason}. */ - void clearVideoSurfaceView(@Nullable SurfaceView surfaceView); + default void onPlaybackSuppressionReasonChanged( + @PlaybackSuppressionReason int playbackSuppressionReason) {} /** - * Sets the {@link TextureView} onto which video will be rendered. The player will track the - * lifecycle of the surface automatically. + * Called when the value of {@link #isPlaying()} changes. + * + *

      {@link #onEvents(Player, Events)} will also be called to report this event along with + * other events that happen in the same {@link Looper} message queue iteration. * - * @param textureView The texture view. + * @param isPlaying Whether the player is playing. */ - void setVideoTextureView(@Nullable TextureView textureView); + default void onIsPlayingChanged(boolean isPlaying) {} /** - * Clears the {@link TextureView} onto which video is being rendered if it matches the one - * passed. Else does nothing. + * Called when the value of {@link #getRepeatMode()} changes. + * + *

      {@link #onEvents(Player, Events)} will also be called to report this event along with + * other events that happen in the same {@link Looper} message queue iteration. * - * @param textureView The texture view to clear. + * @param repeatMode The {@link RepeatMode} used for playback. */ - void clearVideoTextureView(@Nullable TextureView textureView); + default void onRepeatModeChanged(@RepeatMode int repeatMode) {} /** - * Sets the video decoder output buffer renderer. This is intended for use only with extension - * renderers that accept {@link C#MSG_SET_VIDEO_DECODER_OUTPUT_BUFFER_RENDERER}. For most use - * cases, an output surface or view should be passed via {@link #setVideoSurface(Surface)} or - * {@link #setVideoSurfaceView(SurfaceView)} instead. + * Called when the value of {@link #getShuffleModeEnabled()} changes. + * + *

      {@link #onEvents(Player, Events)} will also be called to report this event along with + * other events that happen in the same {@link Looper} message queue iteration. * - * @param videoDecoderOutputBufferRenderer The video decoder output buffer renderer, or {@code - * null} to clear the output buffer renderer. + * @param shuffleModeEnabled Whether shuffling of {@linkplain MediaItem media items} is enabled. */ - void setVideoDecoderOutputBufferRenderer( - @Nullable VideoDecoderOutputBufferRenderer videoDecoderOutputBufferRenderer); + default void onShuffleModeEnabledChanged(boolean shuffleModeEnabled) {} - /** Clears the video decoder output buffer renderer. */ - void clearVideoDecoderOutputBufferRenderer(); + /** + * Called when an error occurs. The playback state will transition to {@link #STATE_IDLE} + * immediately after this method is called. The player instance can still be used, and {@link + * #release()} must still be called on the player should it no longer be required. + * + *

      {@link #onEvents(Player, Events)} will also be called to report this event along with + * other events that happen in the same {@link Looper} message queue iteration. + * + *

      Implementations of Player may pass an instance of a subclass of {@link PlaybackException} + * to this method in order to include more information about the error. + * + * @param error The error. + */ + default void onPlayerError(PlaybackException error) {} /** - * Clears the video decoder output buffer renderer if it matches the one passed. Else does - * nothing. + * Called when the {@link PlaybackException} returned by {@link #getPlayerError()} changes. * - * @param videoDecoderOutputBufferRenderer The video decoder output buffer renderer to clear. + *

      {@link #onEvents(Player, Events)} will also be called to report this event along with + * other events that happen in the same {@link Looper} message queue iteration. + * + *

      Implementations of Player may pass an instance of a subclass of {@link PlaybackException} + * to this method in order to include more information about the error. + * + * @param error The new error, or null if the error is being cleared. */ - void clearVideoDecoderOutputBufferRenderer( - @Nullable VideoDecoderOutputBufferRenderer videoDecoderOutputBufferRenderer); - } + default void onPlayerErrorChanged(@Nullable PlaybackException error) {} - /** The text component of a {@link Player}. */ - interface TextComponent { + /** + * @deprecated Use {@link #onPositionDiscontinuity(PositionInfo, PositionInfo, int)} instead. + */ + @Deprecated + default void onPositionDiscontinuity(@DiscontinuityReason int reason) {} /** - * Registers an output to receive text events. + * Called when a position discontinuity occurs. + * + *

      A position discontinuity occurs when the playing period changes, the playback position + * jumps within the period currently being played, or when the playing period has been skipped + * or removed. * - * @param listener The output to register. + *

      {@link #onEvents(Player, Events)} will also be called to report this event along with + * other events that happen in the same {@link Looper} message queue iteration. + * + * @param oldPosition The position before the discontinuity. + * @param newPosition The position after the discontinuity. + * @param reason The {@link DiscontinuityReason} responsible for the discontinuity. */ - void addTextOutput(TextOutput listener); + default void onPositionDiscontinuity( + PositionInfo oldPosition, PositionInfo newPosition, @DiscontinuityReason int reason) {} /** - * Removes a text output. + * Called when the value of {@link #getPlaybackParameters()} changes. The playback parameters + * may change due to a call to {@link #setPlaybackParameters(PlaybackParameters)}, or the player + * itself may change them (for example, if audio playback switches to passthrough or offload + * mode, where speed adjustment is no longer possible). * - * @param listener The output to remove. + *

      {@link #onEvents(Player, Events)} will also be called to report this event along with + * other events that happen in the same {@link Looper} message queue iteration. + * + * @param playbackParameters The playback parameters. */ - void removeTextOutput(TextOutput listener); - } - - /** The metadata component of a {@link Player}. */ - interface MetadataComponent { + default void onPlaybackParametersChanged(PlaybackParameters playbackParameters) {} /** - * Adds a {@link MetadataOutput} to receive metadata. + * Called when the value of {@link #getSeekBackIncrement()} changes. + * + *

      {@link #onEvents(Player, Events)} will also be called to report this event along with + * other events that happen in the same {@link Looper} message queue iteration. * - * @param output The output to register. + * @param seekBackIncrementMs The {@link #seekBack()} increment, in milliseconds. */ - void addMetadataOutput(MetadataOutput output); + default void onSeekBackIncrementChanged(long seekBackIncrementMs) {} /** - * Removes a {@link MetadataOutput}. + * Called when the value of {@link #getSeekForwardIncrement()} changes. * - * @param output The output to remove. + *

      {@link #onEvents(Player, Events)} will also be called to report this event along with + * other events that happen in the same {@link Looper} message queue iteration. + * + * @param seekForwardIncrementMs The {@link #seekForward()} increment, in milliseconds. */ - void removeMetadataOutput(MetadataOutput output); - } - - /** - * Listener of changes in player state. All methods have no-op default implementations to allow - * selective overrides. - */ - interface EventListener { + default void onSeekForwardIncrementChanged(long seekForwardIncrementMs) {} /** - * Called when the timeline has been refreshed. + * Called when the value of {@link #getMaxSeekToPreviousPosition()} changes. * - *

      Note that if the timeline has changed then a position discontinuity may also have - * occurred. For example, the current period index may have changed as a result of periods being - * added or removed from the timeline. This will not be reported via a separate call to - * {@link #onPositionDiscontinuity(int)}. + *

      {@link #onEvents(Player, Events)} will also be called to report this event along with + * other events that happen in the same {@link Looper} message queue iteration. * - * @param timeline The latest timeline. Never null, but may be empty. - * @param reason The {@link TimelineChangeReason} responsible for this timeline change. + * @param maxSeekToPreviousPositionMs The maximum position for which {@link #seekToPrevious()} + * seeks to the previous position, in milliseconds. */ - @SuppressWarnings("deprecation") - default void onTimelineChanged(Timeline timeline, @TimelineChangeReason int reason) { - Object manifest = null; - if (timeline.getWindowCount() == 1) { - // Legacy behavior was to report the manifest for single window timelines only. - Timeline.Window window = new Timeline.Window(); - manifest = timeline.getWindow(0, window).manifest; - } - // Call deprecated version. - onTimelineChanged(timeline, manifest, reason); - } + default void onMaxSeekToPreviousPositionChanged(long maxSeekToPreviousPositionMs) {} + + /** + * @deprecated Seeks are processed without delay. Listen to {@link + * #onPositionDiscontinuity(PositionInfo, PositionInfo, int)} with reason {@link + * #DISCONTINUITY_REASON_SEEK} instead. + */ + @Deprecated + default void onSeekProcessed() {} /** - * Called when the timeline and/or manifest has been refreshed. + * Called when the audio session ID changes. * - *

      Note that if the timeline has changed then a position discontinuity may also have - * occurred. For example, the current period index may have changed as a result of periods being - * added or removed from the timeline. This will not be reported via a separate call to - * {@link #onPositionDiscontinuity(int)}. + *

      {@link #onEvents(Player, Events)} will also be called to report this event along with + * other events that happen in the same {@link Looper} message queue iteration. * - * @param timeline The latest timeline. Never null, but may be empty. - * @param manifest The latest manifest. May be null. - * @param reason The {@link TimelineChangeReason} responsible for this timeline change. - * @deprecated Use {@link #onTimelineChanged(Timeline, int)} instead. The manifest can be - * accessed by using {@link #getCurrentManifest()} or {@code timeline.getWindow(windowIndex, - * window).manifest} for a given window index. + * @param audioSessionId The audio session ID. */ - @Deprecated - default void onTimelineChanged( - Timeline timeline, @Nullable Object manifest, @TimelineChangeReason int reason) {} + default void onAudioSessionIdChanged(int audioSessionId) {} /** - * Called when the available or selected tracks change. + * Called when the value of {@link #getAudioAttributes()} changes. + * + *

      {@link #onEvents(Player, Events)} will also be called to report this event along with + * other events that happen in the same {@link Looper} message queue iteration. * - * @param trackGroups The available tracks. Never null, but may be of length zero. - * @param trackSelections The track selections for each renderer. Never null and always of - * length {@link #getRendererCount()}, but may contain null elements. + * @param audioAttributes The audio attributes. */ - default void onTracksChanged( - TrackGroupArray trackGroups, TrackSelectionArray trackSelections) {} + default void onAudioAttributesChanged(AudioAttributes audioAttributes) {} /** - * Called when the player starts or stops loading the source. + * Called when the value of {@link #getVolume()} changes. * - * @param isLoading Whether the source is currently being loaded. + *

      {@link #onEvents(Player, Events)} will also be called to report this event along with + * other events that happen in the same {@link Looper} message queue iteration. + * + * @param volume The new volume, with 0 being silence and 1 being unity gain. */ - default void onLoadingChanged(boolean isLoading) {} + default void onVolumeChanged(float volume) {} /** - * Called when the value returned from either {@link #getPlayWhenReady()} or {@link - * #getPlaybackState()} changes. + * Called when skipping silences is enabled or disabled in the audio stream. * - * @param playWhenReady Whether playback will proceed when ready. - * @param playbackState The new {@link State playback state}. + *

      {@link #onEvents(Player, Events)} will also be called to report this event along with + * other events that happen in the same {@link Looper} message queue iteration. + * + * @param skipSilenceEnabled Whether skipping silences in the audio stream is enabled. */ - default void onPlayerStateChanged(boolean playWhenReady, @State int playbackState) {} + default void onSkipSilenceEnabledChanged(boolean skipSilenceEnabled) {} /** - * Called when the value returned from {@link #getPlaybackSuppressionReason()} changes. + * Called when the device information changes * - * @param playbackSuppressionReason The current {@link PlaybackSuppressionReason}. + *

      {@link #onEvents(Player, Events)} will also be called to report this event along with + * other events that happen in the same {@link Looper} message queue iteration. + * + * @param deviceInfo The new {@link DeviceInfo}. */ - default void onPlaybackSuppressionReasonChanged( - @PlaybackSuppressionReason int playbackSuppressionReason) {} + default void onDeviceInfoChanged(DeviceInfo deviceInfo) {} /** - * Called when the value of {@link #isPlaying()} changes. + * Called when the value of {@link #getDeviceVolume()} or {@link #isDeviceMuted()} changes. * - * @param isPlaying Whether the player is playing. + *

      {@link #onEvents(Player, Events)} will also be called to report this event along with + * other events that happen in the same {@link Looper} message queue iteration. + * + * @param volume The new device volume, with 0 being silence and 1 being unity gain. + * @param muted Whether the device is muted. */ - default void onIsPlayingChanged(boolean isPlaying) {} + default void onDeviceVolumeChanged(int volume, boolean muted) {} /** - * Called when the value of {@link #getRepeatMode()} changes. + * Called each time there's a change in the size of the video being rendered. * - * @param repeatMode The {@link RepeatMode} used for playback. + *

      {@link #onEvents(Player, Events)} will also be called to report this event along with + * other events that happen in the same {@link Looper} message queue iteration. + * + * @param videoSize The new size of the video. */ - default void onRepeatModeChanged(@RepeatMode int repeatMode) {} + default void onVideoSizeChanged(VideoSize videoSize) {} /** - * Called when the value of {@link #getShuffleModeEnabled()} changes. + * Called each time there's a change in the size of the surface onto which the video is being + * rendered. + * + *

      {@link #onEvents(Player, Events)} will also be called to report this event along with + * other events that happen in the same {@link Looper} message queue iteration. * - * @param shuffleModeEnabled Whether shuffling of windows is enabled. + * @param width The surface width in pixels. May be {@link C#LENGTH_UNSET} if unknown, or 0 if + * the video is not rendered onto a surface. + * @param height The surface height in pixels. May be {@link C#LENGTH_UNSET} if unknown, or 0 if + * the video is not rendered onto a surface. */ - default void onShuffleModeEnabledChanged(boolean shuffleModeEnabled) {} + default void onSurfaceSizeChanged(int width, int height) {} /** - * Called when an error occurs. The playback state will transition to {@link #STATE_IDLE} - * immediately after this method is called. The player instance can still be used, and {@link - * #release()} must still be called on the player should it no longer be required. + * Called when a frame is rendered for the first time since setting the surface, or since the + * renderer was reset, or since the stream being rendered was changed. * - * @param error The error. + *

      {@link #onEvents(Player, Events)} will also be called to report this event along with + * other events that happen in the same {@link Looper} message queue iteration. */ - default void onPlayerError(ExoPlaybackException error) {} + default void onRenderedFirstFrame() {} /** - * Called when a position discontinuity occurs without a change to the timeline. A position - * discontinuity occurs when the current window or period index changes (as a result of playback - * transitioning from one period in the timeline to the next), or when the playback position - * jumps within the period currently being played (as a result of a seek being performed, or - * when the source introduces a discontinuity internally). + * Called when the value of {@link #getCurrentCues()} changes. * - *

      When a position discontinuity occurs as a result of a change to the timeline this method - * is not called. {@link #onTimelineChanged(Timeline, int)} is called in this case. + *

      Both this method and {@link #onCues(CueGroup)} are called when there is a change in the + * cues. You should only implement one or the other. * - * @param reason The {@link DiscontinuityReason} responsible for the discontinuity. + *

      {@link #onEvents(Player, Events)} will also be called to report this event along with + * other events that happen in the same {@link Looper} message queue iteration. + * + * @deprecated Use {@link #onCues(CueGroup)} instead. */ - default void onPositionDiscontinuity(@DiscontinuityReason int reason) {} + @Deprecated + default void onCues(List cues) {} /** - * Called when the current playback parameters change. The playback parameters may change due to - * a call to {@link #setPlaybackParameters(PlaybackParameters)}, or the player itself may change - * them (for example, if audio playback switches to passthrough mode, where speed adjustment is - * no longer possible). + * Called when the value of {@link #getCurrentCues()} changes. * - * @param playbackParameters The playback parameters. + *

      Both this method and {@link #onCues(List)} are called when there is a change in the cues. + * You should only implement one or the other. + * + *

      {@link #onEvents(Player, Events)} will also be called to report this event along with + * other events that happen in the same {@link Looper} message queue iteration. */ - default void onPlaybackParametersChanged(PlaybackParameters playbackParameters) {} + default void onCues(CueGroup cueGroup) {} /** - * Called when all pending seek requests have been processed by the player. This is guaranteed - * to happen after any necessary changes to the player state were reported to {@link - * #onPlayerStateChanged(boolean, int)}. + * Called when there is metadata associated with the current playback time. + * + *

      {@link #onEvents(Player, Events)} will also be called to report this event along with + * other events that happen in the same {@link Looper} message queue iteration. + * + * @param metadata The metadata. */ - default void onSeekProcessed() {} + default void onMetadata(Metadata metadata) {} } /** - * @deprecated Use {@link EventListener} interface directly for selective overrides as all methods - * are implemented as no-op default methods. - */ - @Deprecated - abstract class DefaultEventListener implements EventListener { - - @Override - public void onTimelineChanged(Timeline timeline, @TimelineChangeReason int reason) { - Object manifest = null; - if (timeline.getWindowCount() == 1) { - // Legacy behavior was to report the manifest for single window timelines only. - Timeline.Window window = new Timeline.Window(); - manifest = timeline.getWindow(0, window).manifest; - } - // Call deprecated version. - onTimelineChanged(timeline, manifest, reason); - } - - @Override - @SuppressWarnings("deprecation") - public void onTimelineChanged( - Timeline timeline, @Nullable Object manifest, @TimelineChangeReason int reason) { - // Call deprecated version. Otherwise, do nothing. - onTimelineChanged(timeline, manifest); - } - - /** @deprecated Use {@link EventListener#onTimelineChanged(Timeline, int)} instead. */ - @Deprecated - public void onTimelineChanged(Timeline timeline, @Nullable Object manifest) { - // Do nothing. - } - } - - /** - * Playback state. One of {@link #STATE_IDLE}, {@link #STATE_BUFFERING}, {@link #STATE_READY} or - * {@link #STATE_ENDED}. + * Playback state. One of {@link #STATE_IDLE}, {@link #STATE_BUFFERING}, {@link #STATE_READY} or + * {@link #STATE_ENDED}. */ + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. @Documented @Retention(RetentionPolicy.SOURCE) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) @IntDef({STATE_IDLE, STATE_BUFFERING, STATE_READY, STATE_ENDED}) @interface State {} /** - * The player does not have any media to play. + * The player is idle, meaning it holds only limited resources. The player must be {@link + * #prepare() prepared} before it will play the media. */ int STATE_IDLE = 1; /** - * The player is not able to immediately play from its current position. This state typically - * occurs when more data needs to be loaded. + * The player is not able to immediately play the media, but is doing work toward being able to do + * so. This state typically occurs when the player needs to buffer more data before playback can + * start. */ int STATE_BUFFERING = 2; /** @@ -543,18 +1073,51 @@ public void onTimelineChanged(Timeline timeline, @Nullable Object manifest) { * {@link #getPlayWhenReady()} is true, and paused otherwise. */ int STATE_READY = 3; + /** The player has finished playing the media. */ + int STATE_ENDED = 4; + /** - * The player has finished playing the media. + * Reasons for {@link #getPlayWhenReady() playWhenReady} changes. One of {@link + * #PLAY_WHEN_READY_CHANGE_REASON_USER_REQUEST}, {@link + * #PLAY_WHEN_READY_CHANGE_REASON_AUDIO_FOCUS_LOSS}, {@link + * #PLAY_WHEN_READY_CHANGE_REASON_AUDIO_BECOMING_NOISY}, {@link + * #PLAY_WHEN_READY_CHANGE_REASON_REMOTE} or {@link + * #PLAY_WHEN_READY_CHANGE_REASON_END_OF_MEDIA_ITEM}. */ - int STATE_ENDED = 4; + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) + @IntDef({ + PLAY_WHEN_READY_CHANGE_REASON_USER_REQUEST, + PLAY_WHEN_READY_CHANGE_REASON_AUDIO_FOCUS_LOSS, + PLAY_WHEN_READY_CHANGE_REASON_AUDIO_BECOMING_NOISY, + PLAY_WHEN_READY_CHANGE_REASON_REMOTE, + PLAY_WHEN_READY_CHANGE_REASON_END_OF_MEDIA_ITEM + }) + @interface PlayWhenReadyChangeReason {} + /** Playback has been started or paused by a call to {@link #setPlayWhenReady(boolean)}. */ + int PLAY_WHEN_READY_CHANGE_REASON_USER_REQUEST = 1; + /** Playback has been paused because of a loss of audio focus. */ + int PLAY_WHEN_READY_CHANGE_REASON_AUDIO_FOCUS_LOSS = 2; + /** Playback has been paused to avoid becoming noisy. */ + int PLAY_WHEN_READY_CHANGE_REASON_AUDIO_BECOMING_NOISY = 3; + /** Playback has been started or paused because of a remote change. */ + int PLAY_WHEN_READY_CHANGE_REASON_REMOTE = 4; + /** Playback has been paused at the end of a media item. */ + int PLAY_WHEN_READY_CHANGE_REASON_END_OF_MEDIA_ITEM = 5; /** * Reason why playback is suppressed even though {@link #getPlayWhenReady()} is {@code true}. One * of {@link #PLAYBACK_SUPPRESSION_REASON_NONE} or {@link * #PLAYBACK_SUPPRESSION_REASON_TRANSIENT_AUDIO_FOCUS_LOSS}. */ + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. @Documented @Retention(RetentionPolicy.SOURCE) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) @IntDef({ PLAYBACK_SUPPRESSION_REASON_NONE, PLAYBACK_SUPPRESSION_REASON_TRANSIENT_AUDIO_FOCUS_LOSS @@ -569,43 +1132,63 @@ public void onTimelineChanged(Timeline timeline, @Nullable Object manifest) { * Repeat modes for playback. One of {@link #REPEAT_MODE_OFF}, {@link #REPEAT_MODE_ONE} or {@link * #REPEAT_MODE_ALL}. */ + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. @Documented @Retention(RetentionPolicy.SOURCE) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) @IntDef({REPEAT_MODE_OFF, REPEAT_MODE_ONE, REPEAT_MODE_ALL}) @interface RepeatMode {} /** - * Normal playback without repetition. + * Normal playback without repetition. "Previous" and "Next" actions move to the previous and next + * {@link MediaItem} respectively, and do nothing when there is no previous or next {@link + * MediaItem} to move to. */ int REPEAT_MODE_OFF = 0; /** - * "Repeat One" mode to repeat the currently playing window infinitely. + * Repeats the currently playing {@link MediaItem} infinitely during ongoing playback. "Previous" + * and "Next" actions behave as they do in {@link #REPEAT_MODE_OFF}, moving to the previous and + * next {@link MediaItem} respectively, and doing nothing when there is no previous or next {@link + * MediaItem} to move to. */ int REPEAT_MODE_ONE = 1; /** - * "Repeat All" mode to repeat the entire timeline infinitely. + * Repeats the entire timeline infinitely. "Previous" and "Next" actions behave as they do in + * {@link #REPEAT_MODE_OFF}, but with looping at the ends so that "Previous" when playing the + * first {@link MediaItem} will move to the last {@link MediaItem}, and "Next" when playing the + * last {@link MediaItem} will move to the first {@link MediaItem}. */ int REPEAT_MODE_ALL = 2; /** - * Reasons for position discontinuities. One of {@link #DISCONTINUITY_REASON_PERIOD_TRANSITION}, + * Reasons for position discontinuities. One of {@link #DISCONTINUITY_REASON_AUTO_TRANSITION}, * {@link #DISCONTINUITY_REASON_SEEK}, {@link #DISCONTINUITY_REASON_SEEK_ADJUSTMENT}, {@link - * #DISCONTINUITY_REASON_AD_INSERTION} or {@link #DISCONTINUITY_REASON_INTERNAL}. + * #DISCONTINUITY_REASON_SKIP}, {@link #DISCONTINUITY_REASON_REMOVE} or {@link + * #DISCONTINUITY_REASON_INTERNAL}. */ + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. @Documented @Retention(RetentionPolicy.SOURCE) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) @IntDef({ - DISCONTINUITY_REASON_PERIOD_TRANSITION, + DISCONTINUITY_REASON_AUTO_TRANSITION, DISCONTINUITY_REASON_SEEK, DISCONTINUITY_REASON_SEEK_ADJUSTMENT, - DISCONTINUITY_REASON_AD_INSERTION, + DISCONTINUITY_REASON_SKIP, + DISCONTINUITY_REASON_REMOVE, DISCONTINUITY_REASON_INTERNAL }) @interface DiscontinuityReason {} /** * Automatic playback transition from one period in the timeline to the next. The period index may * be the same as it was before the discontinuity in case the current period is repeated. + * + *

      This reason also indicates an automatic transition from the content period to an inserted ad + * period or vice versa. Or a transition caused by another player (e.g. multiple controllers can + * control the same playback on a remote device). */ - int DISCONTINUITY_REASON_PERIOD_TRANSITION = 0; + int DISCONTINUITY_REASON_AUTO_TRANSITION = 0; /** Seek within the current period or to another period. */ int DISCONTINUITY_REASON_SEEK = 1; /** @@ -613,428 +1196,1889 @@ public void onTimelineChanged(Timeline timeline, @Nullable Object manifest) { * permitted to be inexact. */ int DISCONTINUITY_REASON_SEEK_ADJUSTMENT = 2; - /** Discontinuity to or from an ad within one period in the timeline. */ - int DISCONTINUITY_REASON_AD_INSERTION = 3; - /** Discontinuity introduced internally by the source. */ - int DISCONTINUITY_REASON_INTERNAL = 4; + /** Discontinuity introduced by a skipped period (for instance a skipped ad). */ + int DISCONTINUITY_REASON_SKIP = 3; + /** Discontinuity caused by the removal of the current period from the {@link Timeline}. */ + int DISCONTINUITY_REASON_REMOVE = 4; + /** Discontinuity introduced internally (e.g. by the source). */ + int DISCONTINUITY_REASON_INTERNAL = 5; /** - * Reasons for timeline changes. One of {@link #TIMELINE_CHANGE_REASON_PREPARED}, {@link - * #TIMELINE_CHANGE_REASON_RESET} or {@link #TIMELINE_CHANGE_REASON_DYNAMIC}. + * Reasons for timeline changes. One of {@link #TIMELINE_CHANGE_REASON_PLAYLIST_CHANGED} or {@link + * #TIMELINE_CHANGE_REASON_SOURCE_UPDATE}. */ + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. @Documented @Retention(RetentionPolicy.SOURCE) - @IntDef({ - TIMELINE_CHANGE_REASON_PREPARED, - TIMELINE_CHANGE_REASON_RESET, - TIMELINE_CHANGE_REASON_DYNAMIC - }) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) + @IntDef({TIMELINE_CHANGE_REASON_PLAYLIST_CHANGED, TIMELINE_CHANGE_REASON_SOURCE_UPDATE}) @interface TimelineChangeReason {} - /** Timeline and manifest changed as a result of a player initialization with new media. */ - int TIMELINE_CHANGE_REASON_PREPARED = 0; - /** Timeline and manifest changed as a result of a player reset. */ - int TIMELINE_CHANGE_REASON_RESET = 1; + /** Timeline changed as a result of a change of the playlist items or the order of the items. */ + int TIMELINE_CHANGE_REASON_PLAYLIST_CHANGED = 0; /** - * Timeline or manifest changed as a result of an dynamic update introduced by the played media. + * Timeline changed as a result of a source update (e.g. result of a dynamic update by the played + * media). + * + *

      This reason also indicates a change caused by another player (e.g. multiple controllers can + * control the same playback on the remote device). */ - int TIMELINE_CHANGE_REASON_DYNAMIC = 2; - - /** Returns the component of this player for audio output, or null if audio is not supported. */ - @Nullable - AudioComponent getAudioComponent(); - - /** Returns the component of this player for video output, or null if video is not supported. */ - @Nullable - VideoComponent getVideoComponent(); - - /** Returns the component of this player for text output, or null if text is not supported. */ - @Nullable - TextComponent getTextComponent(); + int TIMELINE_CHANGE_REASON_SOURCE_UPDATE = 1; /** - * Returns the component of this player for metadata output, or null if metadata is not supported. + * Reasons for media item transitions. One of {@link #MEDIA_ITEM_TRANSITION_REASON_REPEAT}, {@link + * #MEDIA_ITEM_TRANSITION_REASON_AUTO}, {@link #MEDIA_ITEM_TRANSITION_REASON_SEEK} or {@link + * #MEDIA_ITEM_TRANSITION_REASON_PLAYLIST_CHANGED}. */ - @Nullable - MetadataComponent getMetadataComponent(); - + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) + @IntDef({ + MEDIA_ITEM_TRANSITION_REASON_REPEAT, + MEDIA_ITEM_TRANSITION_REASON_AUTO, + MEDIA_ITEM_TRANSITION_REASON_SEEK, + MEDIA_ITEM_TRANSITION_REASON_PLAYLIST_CHANGED + }) + @interface MediaItemTransitionReason {} + /** The media item has been repeated. */ + int MEDIA_ITEM_TRANSITION_REASON_REPEAT = 0; /** - * Returns the {@link Looper} associated with the application thread that's used to access the - * player and on which player events are received. + * Playback has automatically transitioned to the next media item. + * + *

      This reason also indicates a transition caused by another player (e.g. multiple controllers + * can control the same playback on a remote device). */ - Looper getApplicationLooper(); + int MEDIA_ITEM_TRANSITION_REASON_AUTO = 1; + /** A seek to another media item has occurred. */ + int MEDIA_ITEM_TRANSITION_REASON_SEEK = 2; + /** + * The current media item has changed because of a change in the playlist. This can either be if + * the media item previously being played has been removed, or when the playlist becomes non-empty + * after being empty. + */ + int MEDIA_ITEM_TRANSITION_REASON_PLAYLIST_CHANGED = 3; /** - * Register a listener to receive events from the player. The listener's methods will be called on - * the thread that was used to construct the player. However, if the thread used to construct the - * player does not have a {@link Looper}, then the listener will be called on the main thread. + * Events that can be reported via {@link Listener#onEvents(Player, Events)}. * - * @param listener The listener to register. + *

      One of the {@link Player}{@code .EVENT_*} values. */ - void addListener(EventListener listener); - + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) + @IntDef({ + EVENT_TIMELINE_CHANGED, + EVENT_MEDIA_ITEM_TRANSITION, + EVENT_TRACKS_CHANGED, + EVENT_IS_LOADING_CHANGED, + EVENT_PLAYBACK_STATE_CHANGED, + EVENT_PLAY_WHEN_READY_CHANGED, + EVENT_PLAYBACK_SUPPRESSION_REASON_CHANGED, + EVENT_IS_PLAYING_CHANGED, + EVENT_REPEAT_MODE_CHANGED, + EVENT_SHUFFLE_MODE_ENABLED_CHANGED, + EVENT_PLAYER_ERROR, + EVENT_POSITION_DISCONTINUITY, + EVENT_PLAYBACK_PARAMETERS_CHANGED, + EVENT_AVAILABLE_COMMANDS_CHANGED, + EVENT_MEDIA_METADATA_CHANGED, + EVENT_PLAYLIST_METADATA_CHANGED, + EVENT_SEEK_BACK_INCREMENT_CHANGED, + EVENT_SEEK_FORWARD_INCREMENT_CHANGED, + EVENT_MAX_SEEK_TO_PREVIOUS_POSITION_CHANGED, + EVENT_TRACK_SELECTION_PARAMETERS_CHANGED, + EVENT_AUDIO_ATTRIBUTES_CHANGED, + EVENT_AUDIO_SESSION_ID, + EVENT_VOLUME_CHANGED, + EVENT_SKIP_SILENCE_ENABLED_CHANGED, + EVENT_SURFACE_SIZE_CHANGED, + EVENT_VIDEO_SIZE_CHANGED, + EVENT_RENDERED_FIRST_FRAME, + EVENT_CUES, + EVENT_METADATA, + EVENT_DEVICE_INFO_CHANGED, + EVENT_DEVICE_VOLUME_CHANGED + }) + @interface Event {} + /** {@link #getCurrentTimeline()} changed. */ + int EVENT_TIMELINE_CHANGED = 0; + /** {@link #getCurrentMediaItem()} changed or the player started repeating the current item. */ + int EVENT_MEDIA_ITEM_TRANSITION = 1; + /** {@link #getCurrentTracks()} changed. */ + int EVENT_TRACKS_CHANGED = 2; + /** {@link #isLoading()} ()} changed. */ + int EVENT_IS_LOADING_CHANGED = 3; + /** {@link #getPlaybackState()} changed. */ + int EVENT_PLAYBACK_STATE_CHANGED = 4; + /** {@link #getPlayWhenReady()} changed. */ + int EVENT_PLAY_WHEN_READY_CHANGED = 5; + /** {@link #getPlaybackSuppressionReason()} changed. */ + int EVENT_PLAYBACK_SUPPRESSION_REASON_CHANGED = 6; + /** {@link #isPlaying()} changed. */ + int EVENT_IS_PLAYING_CHANGED = 7; + /** {@link #getRepeatMode()} changed. */ + int EVENT_REPEAT_MODE_CHANGED = 8; + /** {@link #getShuffleModeEnabled()} changed. */ + int EVENT_SHUFFLE_MODE_ENABLED_CHANGED = 9; + /** {@link #getPlayerError()} changed. */ + int EVENT_PLAYER_ERROR = 10; + /** + * A position discontinuity occurred. See {@link Listener#onPositionDiscontinuity(PositionInfo, + * PositionInfo, int)}. + */ + int EVENT_POSITION_DISCONTINUITY = 11; + /** {@link #getPlaybackParameters()} changed. */ + int EVENT_PLAYBACK_PARAMETERS_CHANGED = 12; + /** {@link #isCommandAvailable(int)} changed for at least one {@link Command}. */ + int EVENT_AVAILABLE_COMMANDS_CHANGED = 13; + /** {@link #getMediaMetadata()} changed. */ + int EVENT_MEDIA_METADATA_CHANGED = 14; + /** {@link #getPlaylistMetadata()} changed. */ + int EVENT_PLAYLIST_METADATA_CHANGED = 15; + /** {@link #getSeekBackIncrement()} changed. */ + int EVENT_SEEK_BACK_INCREMENT_CHANGED = 16; + /** {@link #getSeekForwardIncrement()} changed. */ + int EVENT_SEEK_FORWARD_INCREMENT_CHANGED = 17; + /** {@link #getMaxSeekToPreviousPosition()} changed. */ + int EVENT_MAX_SEEK_TO_PREVIOUS_POSITION_CHANGED = 18; + /** {@link #getTrackSelectionParameters()} changed. */ + int EVENT_TRACK_SELECTION_PARAMETERS_CHANGED = 19; + /** {@link #getAudioAttributes()} changed. */ + int EVENT_AUDIO_ATTRIBUTES_CHANGED = 20; + /** The audio session id was set. */ + int EVENT_AUDIO_SESSION_ID = 21; + /** {@link #getVolume()} changed. */ + int EVENT_VOLUME_CHANGED = 22; + /** Skipping silences in the audio stream is enabled or disabled. */ + int EVENT_SKIP_SILENCE_ENABLED_CHANGED = 23; + /** The size of the surface onto which the video is being rendered changed. */ + int EVENT_SURFACE_SIZE_CHANGED = 24; + /** {@link #getVideoSize()} changed. */ + int EVENT_VIDEO_SIZE_CHANGED = 25; + /** + * A frame is rendered for the first time since setting the surface, or since the renderer was + * reset, or since the stream being rendered was changed. + */ + int EVENT_RENDERED_FIRST_FRAME = 26; + /** {@link #getCurrentCues()} changed. */ + int EVENT_CUES = 27; + /** Metadata associated with the current playback time changed. */ + int EVENT_METADATA = 28; + /** {@link #getDeviceInfo()} changed. */ + int EVENT_DEVICE_INFO_CHANGED = 29; + /** {@link #getDeviceVolume()} changed. */ + int EVENT_DEVICE_VOLUME_CHANGED = 30; + + /** + * Commands that indicate which method calls are currently permitted on a particular {@code + * Player} instance. + * + *

      The currently available commands can be inspected with {@link #getAvailableCommands()} and + * {@link #isCommandAvailable(int)}. + * + *

      See the documentation of each command constant for the details of which methods it permits + * calling. + * + *

      One of the following values: + * + *

        + *
      • {@link #COMMAND_PLAY_PAUSE} + *
      • {@link #COMMAND_PREPARE} + *
      • {@link #COMMAND_STOP} + *
      • {@link #COMMAND_SEEK_TO_DEFAULT_POSITION} + *
      • {@link #COMMAND_SEEK_IN_CURRENT_MEDIA_ITEM} + *
      • {@link #COMMAND_SEEK_TO_PREVIOUS_MEDIA_ITEM} + *
      • {@link #COMMAND_SEEK_TO_PREVIOUS} + *
      • {@link #COMMAND_SEEK_TO_NEXT_MEDIA_ITEM} + *
      • {@link #COMMAND_SEEK_TO_NEXT} + *
      • {@link #COMMAND_SEEK_TO_MEDIA_ITEM} + *
      • {@link #COMMAND_SEEK_BACK} + *
      • {@link #COMMAND_SEEK_FORWARD} + *
      • {@link #COMMAND_SET_SPEED_AND_PITCH} + *
      • {@link #COMMAND_SET_SHUFFLE_MODE} + *
      • {@link #COMMAND_SET_REPEAT_MODE} + *
      • {@link #COMMAND_GET_CURRENT_MEDIA_ITEM} + *
      • {@link #COMMAND_GET_TIMELINE} + *
      • {@link #COMMAND_GET_MEDIA_ITEMS_METADATA} + *
      • {@link #COMMAND_SET_MEDIA_ITEMS_METADATA} + *
      • {@link #COMMAND_SET_MEDIA_ITEM} + *
      • {@link #COMMAND_CHANGE_MEDIA_ITEMS} + *
      • {@link #COMMAND_GET_AUDIO_ATTRIBUTES} + *
      • {@link #COMMAND_GET_VOLUME} + *
      • {@link #COMMAND_GET_DEVICE_VOLUME} + *
      • {@link #COMMAND_SET_VOLUME} + *
      • {@link #COMMAND_SET_DEVICE_VOLUME} + *
      • {@link #COMMAND_ADJUST_DEVICE_VOLUME} + *
      • {@link #COMMAND_SET_VIDEO_SURFACE} + *
      • {@link #COMMAND_GET_TEXT} + *
      • {@link #COMMAND_SET_TRACK_SELECTION_PARAMETERS} + *
      • {@link #COMMAND_GET_TRACKS} + *
      + */ + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) + @IntDef({ + COMMAND_INVALID, + COMMAND_PLAY_PAUSE, + COMMAND_PREPARE, + COMMAND_STOP, + COMMAND_SEEK_TO_DEFAULT_POSITION, + COMMAND_SEEK_IN_CURRENT_MEDIA_ITEM, + COMMAND_SEEK_TO_PREVIOUS_MEDIA_ITEM, + COMMAND_SEEK_TO_PREVIOUS, + COMMAND_SEEK_TO_NEXT_MEDIA_ITEM, + COMMAND_SEEK_TO_NEXT, + COMMAND_SEEK_TO_MEDIA_ITEM, + COMMAND_SEEK_BACK, + COMMAND_SEEK_FORWARD, + COMMAND_SET_SPEED_AND_PITCH, + COMMAND_SET_SHUFFLE_MODE, + COMMAND_SET_REPEAT_MODE, + COMMAND_GET_CURRENT_MEDIA_ITEM, + COMMAND_GET_TIMELINE, + COMMAND_GET_MEDIA_ITEMS_METADATA, + COMMAND_SET_MEDIA_ITEMS_METADATA, + COMMAND_SET_MEDIA_ITEM, + COMMAND_CHANGE_MEDIA_ITEMS, + COMMAND_GET_AUDIO_ATTRIBUTES, + COMMAND_GET_VOLUME, + COMMAND_GET_DEVICE_VOLUME, + COMMAND_SET_VOLUME, + COMMAND_SET_DEVICE_VOLUME, + COMMAND_ADJUST_DEVICE_VOLUME, + COMMAND_SET_VIDEO_SURFACE, + COMMAND_GET_TEXT, + COMMAND_SET_TRACK_SELECTION_PARAMETERS, + COMMAND_GET_TRACKS, + }) + @interface Command {} /** - * Unregister a listener. The listener will no longer receive events from the player. + * Command to start, pause or resume playback. * - * @param listener The listener to unregister. + *

      The following methods must only be called if this command is {@linkplain + * #isCommandAvailable(int) available}: + * + *

        + *
      • {@link #play()} + *
      • {@link #pause()} + *
      • {@link #setPlayWhenReady(boolean)} + *
      */ - void removeListener(EventListener listener); + int COMMAND_PLAY_PAUSE = 1; /** - * Returns the current {@link State playback state} of the player. + * Command to prepare the player. * - * @return The current {@link State playback state}. + *

      The {@link #prepare()} method must only be called if this command is {@linkplain + * #isCommandAvailable(int) available}. */ - @State - int getPlaybackState(); + int COMMAND_PREPARE = 2; /** - * Returns the reason why playback is suppressed even though {@link #getPlayWhenReady()} is {@code - * true}, or {@link #PLAYBACK_SUPPRESSION_REASON_NONE} if playback is not suppressed. + * Command to stop playback. * - * @return The current {@link PlaybackSuppressionReason playback suppression reason}. + *

      The {@link #stop()} method must only be called if this command is {@linkplain + * #isCommandAvailable(int) available}. */ - @PlaybackSuppressionReason - int getPlaybackSuppressionReason(); + int COMMAND_STOP = 3; /** - * Returns whether the player is playing, i.e. {@link #getContentPosition()} is advancing. - * - *

      If {@code false}, then at least one of the following is true: + * Command to seek to the default position of the current {@link MediaItem}. * - *

        - *
      • The {@link #getPlaybackState() playback state} is not {@link #STATE_READY ready}. - *
      • There is no {@link #getPlayWhenReady() intention to play}. - *
      • Playback is {@link #getPlaybackSuppressionReason() suppressed for other reasons}. - *
      + *

      The {@link #seekToDefaultPosition()} method must only be called if this command is + * {@linkplain #isCommandAvailable(int) available}. + */ + int COMMAND_SEEK_TO_DEFAULT_POSITION = 4; + + /** + * Command to seek anywhere inside the current {@link MediaItem}. * - * @return Whether the player is playing. + *

      The {@link #seekTo(long)} method must only be called if this command is {@linkplain + * #isCommandAvailable(int) available}. */ - boolean isPlaying(); + int COMMAND_SEEK_IN_CURRENT_MEDIA_ITEM = 5; + /** + * @deprecated Use {@link #COMMAND_SEEK_IN_CURRENT_MEDIA_ITEM} instead. + */ + @Deprecated int COMMAND_SEEK_IN_CURRENT_WINDOW = COMMAND_SEEK_IN_CURRENT_MEDIA_ITEM; /** - * Returns the error that caused playback to fail. This is the same error that will have been - * reported via {@link Player.EventListener#onPlayerError(ExoPlaybackException)} at the time of - * failure. It can be queried using this method until {@code stop(true)} is called or the player - * is re-prepared. + * Command to seek to the default position of the previous {@link MediaItem}. * - *

      Note that this method will always return {@code null} if {@link #getPlaybackState()} is not - * {@link #STATE_IDLE}. + *

      The {@link #seekToPreviousMediaItem()} method must only be called if this command is + * {@linkplain #isCommandAvailable(int) available}. + */ + int COMMAND_SEEK_TO_PREVIOUS_MEDIA_ITEM = 6; + /** + * @deprecated Use {@link #COMMAND_SEEK_TO_PREVIOUS_MEDIA_ITEM} instead. + */ + @Deprecated int COMMAND_SEEK_TO_PREVIOUS_WINDOW = COMMAND_SEEK_TO_PREVIOUS_MEDIA_ITEM; + /** + * Command to seek to an earlier position in the current {@link MediaItem} or the default position + * of the previous {@link MediaItem}. * - * @return The error, or {@code null}. + *

      The {@link #seekToPrevious()} method must only be called if this command is {@linkplain + * #isCommandAvailable(int) available}. */ - @Nullable - ExoPlaybackException getPlaybackError(); - + int COMMAND_SEEK_TO_PREVIOUS = 7; /** - * Sets whether playback should proceed when {@link #getPlaybackState()} == {@link #STATE_READY}. - *

      - * If the player is already in the ready state then this method can be used to pause and resume - * playback. + * Command to seek to the default position of the next {@link MediaItem}. * - * @param playWhenReady Whether playback should proceed when ready. + *

      The {@link #seekToNextMediaItem()} method must only be called if this command is {@linkplain + * #isCommandAvailable(int) available}. */ - void setPlayWhenReady(boolean playWhenReady); - + int COMMAND_SEEK_TO_NEXT_MEDIA_ITEM = 8; /** - * Whether playback will proceed when {@link #getPlaybackState()} == {@link #STATE_READY}. + * @deprecated Use {@link #COMMAND_SEEK_TO_NEXT_MEDIA_ITEM} instead. + */ + @Deprecated int COMMAND_SEEK_TO_NEXT_WINDOW = COMMAND_SEEK_TO_NEXT_MEDIA_ITEM; + /** + * Command to seek to a later position in the current {@link MediaItem} or the default position of + * the next {@link MediaItem}. * - * @return Whether playback will proceed when ready. + *

      The {@link #seekToNext()} method must only be called if this command is {@linkplain + * #isCommandAvailable(int) available}. */ - boolean getPlayWhenReady(); + int COMMAND_SEEK_TO_NEXT = 9; /** - * Sets the {@link RepeatMode} to be used for playback. + * Command to seek anywhere in any {@link MediaItem}. * - * @param repeatMode The repeat mode. + *

      The following methods must only be called if this command is {@linkplain + * #isCommandAvailable(int) available}: + * + *

        + *
      • {@link #seekTo(int, long)} + *
      • {@link #seekToDefaultPosition(int)} + *
      */ - void setRepeatMode(@RepeatMode int repeatMode); - + int COMMAND_SEEK_TO_MEDIA_ITEM = 10; /** - * Returns the current {@link RepeatMode} used for playback. + * @deprecated Use {@link #COMMAND_SEEK_TO_MEDIA_ITEM} instead. + */ + @Deprecated int COMMAND_SEEK_TO_WINDOW = COMMAND_SEEK_TO_MEDIA_ITEM; + /** + * Command to seek back by a fixed increment inside the current {@link MediaItem}. * - * @return The current repeat mode. + *

      The {@link #seekBack()} method must only be called if this command is {@linkplain + * #isCommandAvailable(int) available}. */ - @RepeatMode int getRepeatMode(); - + int COMMAND_SEEK_BACK = 11; /** - * Sets whether shuffling of windows is enabled. + * Command to seek forward by a fixed increment inside the current {@link MediaItem}. * - * @param shuffleModeEnabled Whether shuffling is enabled. + *

      The {@link #seekForward()} method must only be called if this command is {@linkplain + * #isCommandAvailable(int) available}. */ - void setShuffleModeEnabled(boolean shuffleModeEnabled); + int COMMAND_SEEK_FORWARD = 12; /** - * Returns whether shuffling of windows is enabled. + * Command to set the playback speed and pitch. + * + *

      The following methods must only be called if this command is {@linkplain + * #isCommandAvailable(int) available}: + * + *

        + *
      • {@link #setPlaybackParameters(PlaybackParameters)} + *
      • {@link #setPlaybackSpeed(float)} + *
      */ - boolean getShuffleModeEnabled(); + int COMMAND_SET_SPEED_AND_PITCH = 13; /** - * Whether the player is currently loading the source. + * Command to enable shuffling. * - * @return Whether the player is currently loading the source. + *

      The {@link #setShuffleModeEnabled(boolean)} method must only be called if this command is + * {@linkplain #isCommandAvailable(int) available}. */ - boolean isLoading(); + int COMMAND_SET_SHUFFLE_MODE = 14; /** - * Seeks to the default position associated with the current window. The position can depend on - * the type of media being played. For live streams it will typically be the live edge of the - * window. For other streams it will typically be the start of the window. + * Command to set the repeat mode. + * + *

      The {@link #setRepeatMode(int)} method must only be called if this command is {@linkplain + * #isCommandAvailable(int) available}. */ - void seekToDefaultPosition(); + int COMMAND_SET_REPEAT_MODE = 15; /** - * Seeks to the default position associated with the specified window. The position can depend on - * the type of media being played. For live streams it will typically be the live edge of the - * window. For other streams it will typically be the start of the window. + * Command to get information about the currently playing {@link MediaItem}. + * + *

      The following methods must only be called if this command is {@linkplain + * #isCommandAvailable(int) available}: * - * @param windowIndex The index of the window whose associated default position should be seeked - * to. + *

        + *
      • {@link #getCurrentMediaItem()} + *
      • {@link #isCurrentMediaItemDynamic()} + *
      • {@link #isCurrentMediaItemLive()} + *
      • {@link #isCurrentMediaItemSeekable()} + *
      • {@link #getCurrentLiveOffset()} + *
      • {@link #getDuration()} + *
      • {@link #getCurrentPosition()} + *
      • {@link #getBufferedPosition()} + *
      • {@link #getContentDuration()} + *
      • {@link #getContentPosition()} + *
      • {@link #getContentBufferedPosition()} + *
      • {@link #getTotalBufferedDuration()} + *
      • {@link #isPlayingAd()} + *
      • {@link #getCurrentAdGroupIndex()} + *
      • {@link #getCurrentAdIndexInAdGroup()} + *
      */ - void seekToDefaultPosition(int windowIndex); + int COMMAND_GET_CURRENT_MEDIA_ITEM = 16; /** - * Seeks to a position specified in milliseconds in the current window. + * Command to get the information about the current timeline. * - * @param positionMs The seek position in the current window, or {@link C#TIME_UNSET} to seek to - * the window's default position. + *

      The following methods must only be called if this command is {@linkplain + * #isCommandAvailable(int) available}: + * + *

        + *
      • {@link #getCurrentTimeline()} + *
      • {@link #getCurrentMediaItemIndex()} + *
      • {@link #getCurrentPeriodIndex()} + *
      • {@link #getMediaItemCount()} + *
      • {@link #getMediaItemAt(int)} + *
      • {@link #getNextMediaItemIndex()} + *
      • {@link #getPreviousMediaItemIndex()} + *
      • {@link #hasPreviousMediaItem()} + *
      • {@link #hasNextMediaItem()} + *
      */ - void seekTo(long positionMs); + int COMMAND_GET_TIMELINE = 17; /** - * Seeks to a position specified in milliseconds in the specified window. + * Command to get metadata related to the playlist and current {@link MediaItem}. * - * @param windowIndex The index of the window. - * @param positionMs The seek position in the specified window, or {@link C#TIME_UNSET} to seek to - * the window's default position. - * @throws IllegalSeekPositionException If the player has a non-empty timeline and the provided - * {@code windowIndex} is not within the bounds of the current timeline. + *

      The following methods must only be called if this command is {@linkplain + * #isCommandAvailable(int) available}: + * + *

        + *
      • {@link #getMediaMetadata()} + *
      • {@link #getPlaylistMetadata()} + *
      */ - void seekTo(int windowIndex, long positionMs); + // TODO(b/263132691): Rename this to COMMAND_GET_METADATA + int COMMAND_GET_MEDIA_ITEMS_METADATA = 18; /** - * Returns whether a previous window exists, which may depend on the current repeat mode and - * whether shuffle mode is enabled. + * Command to set the playlist metadata. + * + *

      The {@link #setPlaylistMetadata(MediaMetadata)} method must only be called if this command + * is {@linkplain #isCommandAvailable(int) available}. */ - boolean hasPrevious(); + // TODO(b/263132691): Rename this to COMMAND_SET_PLAYLIST_METADATA + int COMMAND_SET_MEDIA_ITEMS_METADATA = 19; /** - * Seeks to the default position of the previous window in the timeline, which may depend on the - * current repeat mode and whether shuffle mode is enabled. Does nothing if {@link #hasPrevious()} - * is {@code false}. + * Command to set a {@link MediaItem}. + * + *

      The following methods must only be called if this command is {@linkplain + * #isCommandAvailable(int) available}: + * + *

        + *
      • {@link #setMediaItem(MediaItem)} + *
      • {@link #setMediaItem(MediaItem, boolean)} + *
      • {@link #setMediaItem(MediaItem, long)} + *
      + */ + int COMMAND_SET_MEDIA_ITEM = 31; + /** + * Command to change the {@linkplain MediaItem media items} in the playlist. + * + *

      The following methods must only be called if this command is {@linkplain + * #isCommandAvailable(int) available}: + * + *

        + *
      • {@link #addMediaItem(MediaItem)} + *
      • {@link #addMediaItem(int, MediaItem)} + *
      • {@link #addMediaItems(List)} + *
      • {@link #addMediaItems(int, List)} + *
      • {@link #clearMediaItems()} + *
      • {@link #moveMediaItem(int, int)} + *
      • {@link #moveMediaItems(int, int, int)} + *
      • {@link #removeMediaItem(int)} + *
      • {@link #removeMediaItems(int, int)} + *
      • {@link #setMediaItems(List)} + *
      • {@link #setMediaItems(List, boolean)} + *
      • {@link #setMediaItems(List, int, long)} + *
      */ - void previous(); + int COMMAND_CHANGE_MEDIA_ITEMS = 20; /** - * Returns whether a next window exists, which may depend on the current repeat mode and whether - * shuffle mode is enabled. + * Command to get the player current {@link AudioAttributes}. + * + *

      The {@link #getAudioAttributes()} method must only be called if this command is {@linkplain + * #isCommandAvailable(int) available}. */ - boolean hasNext(); + int COMMAND_GET_AUDIO_ATTRIBUTES = 21; /** - * Seeks to the default position of the next window in the timeline, which may depend on the - * current repeat mode and whether shuffle mode is enabled. Does nothing if {@link #hasNext()} is - * {@code false}. + * Command to get the player volume. + * + *

      The {@link #getVolume()} method must only be called if this command is {@linkplain + * #isCommandAvailable(int) available}. */ - void next(); + int COMMAND_GET_VOLUME = 22; /** - * Attempts to set the playback parameters. Passing {@code null} sets the parameters to the - * default, {@link PlaybackParameters#DEFAULT}, which means there is no speed or pitch adjustment. + * Command to get the device volume and whether it is muted. * - *

      Playback parameters changes may cause the player to buffer. {@link - * EventListener#onPlaybackParametersChanged(PlaybackParameters)} will be called whenever the - * currently active playback parameters change. + *

      The following methods must only be called if this command is {@linkplain + * #isCommandAvailable(int) available}: * - * @param playbackParameters The playback parameters, or {@code null} to use the defaults. + *

        + *
      • {@link #getDeviceVolume()} + *
      • {@link #isDeviceMuted()} + *
      */ - void setPlaybackParameters(@Nullable PlaybackParameters playbackParameters); + int COMMAND_GET_DEVICE_VOLUME = 23; /** - * Returns the currently active playback parameters. + * Command to set the player volume. * - * @see EventListener#onPlaybackParametersChanged(PlaybackParameters) + *

      The {@link #setVolume(float)} method must only be called if this command is {@linkplain + * #isCommandAvailable(int) available}. */ - PlaybackParameters getPlaybackParameters(); + int COMMAND_SET_VOLUME = 24; + /** + * Command to set the device volume. + * + *

      The {@link #setDeviceVolume(int)} method must only be called if this command is {@linkplain + * #isCommandAvailable(int) available}. + */ + int COMMAND_SET_DEVICE_VOLUME = 25; /** - * Stops playback without resetting the player. Use {@code setPlayWhenReady(false)} rather than - * this method if the intention is to pause playback. + * Command to increase and decrease the device volume and mute it. * - *

      Calling this method will cause the playback state to transition to {@link #STATE_IDLE}. The - * player instance can still be used, and {@link #release()} must still be called on the player if - * it's no longer required. + *

      The following methods must only be called if this command is {@linkplain + * #isCommandAvailable(int) available}: * - *

      Calling this method does not reset the playback position. + *

        + *
      • {@link #increaseDeviceVolume()} + *
      • {@link #decreaseDeviceVolume()} + *
      • {@link #setDeviceMuted(boolean)} + *
      */ - void stop(); + int COMMAND_ADJUST_DEVICE_VOLUME = 26; /** - * Stops playback and optionally resets the player. Use {@code setPlayWhenReady(false)} rather - * than this method if the intention is to pause playback. + * Command to set and clear the surface on which to render the video. * - *

      Calling this method will cause the playback state to transition to {@link #STATE_IDLE}. The - * player instance can still be used, and {@link #release()} must still be called on the player if - * it's no longer required. + *

      The following methods must only be called if this command is {@linkplain + * #isCommandAvailable(int) available}: * - * @param reset Whether the player should be reset. + *

        + *
      • {@link #setVideoSurface(Surface)} + *
      • {@link #clearVideoSurface()} + *
      • {@link #clearVideoSurface(Surface)} + *
      • {@link #setVideoSurfaceHolder(SurfaceHolder)} + *
      • {@link #clearVideoSurfaceHolder(SurfaceHolder)} + *
      • {@link #setVideoSurfaceView(SurfaceView)} + *
      • {@link #clearVideoSurfaceView(SurfaceView)} + *
      */ - void stop(boolean reset); + int COMMAND_SET_VIDEO_SURFACE = 27; /** - * Releases the player. This method must be called when the player is no longer required. The - * player must not be used after calling this method. + * Command to get the text that should currently be displayed by the player. + * + *

      The {@link #getCurrentCues()} method must only be called if this command is {@linkplain + * #isCommandAvailable(int) available}. */ - void release(boolean async); + int COMMAND_GET_TEXT = 28; /** - * Returns the number of renderers. + * Command to set the player's track selection parameters. + * + *

      The {@link #setTrackSelectionParameters(TrackSelectionParameters)} method must only be + * called if this command is {@linkplain #isCommandAvailable(int) available}. */ - int getRendererCount(); + int COMMAND_SET_TRACK_SELECTION_PARAMETERS = 29; /** - * Returns the track type that the renderer at a given index handles. + * Command to get details of the current track selection. * - * @see Renderer#getTrackType() - * @param index The index of the renderer. - * @return One of the {@code TRACK_TYPE_*} constants defined in {@link C}. + *

      The {@link #getCurrentTracks()} method must only be called if this command is {@linkplain + * #isCommandAvailable(int) available}. */ - int getRendererType(int index); + int COMMAND_GET_TRACKS = 30; + + /** Represents an invalid {@link Command}. */ + int COMMAND_INVALID = -1; /** - * Returns the available track groups. + * Returns the {@link Looper} associated with the application thread that's used to access the + * player and on which player events are received. + * + *

      This method can be called from any thread. */ - TrackGroupArray getCurrentTrackGroups(); + Looper getApplicationLooper(); /** - * Returns the current track selections for each renderer. + * Registers a listener to receive all events from the player. + * + *

      The listener's methods will be called on the thread associated with {@link + * #getApplicationLooper()}. + * + *

      This method can be called from any thread. + * + * @param listener The listener to register. */ - TrackSelectionArray getCurrentTrackSelections(); + void addListener(Listener listener); /** - * Returns the current manifest. The type depends on the type of media being played. May be null. + * Unregister a listener registered through {@link #addListener(Listener)}. The listener will no + * longer receive events. + * + * @param listener The listener to unregister. */ - @Nullable Object getCurrentManifest(); + void removeListener(Listener listener); /** - * Returns the current {@link Timeline}. Never null, but may be empty. + * Clears the playlist, adds the specified {@linkplain MediaItem media items} and resets the + * position to the default position. + * + *

      This method must only be called if {@link #COMMAND_CHANGE_MEDIA_ITEMS} is {@linkplain + * #getAvailableCommands() available}. + * + * @param mediaItems The new {@linkplain MediaItem media items}. */ - Timeline getCurrentTimeline(); + void setMediaItems(List mediaItems); /** - * Returns the index of the period currently being played. + * Clears the playlist and adds the specified {@linkplain MediaItem media items}. + * + *

      This method must only be called if {@link #COMMAND_CHANGE_MEDIA_ITEMS} is {@linkplain + * #getAvailableCommands() available}. + * + * @param mediaItems The new {@linkplain MediaItem media items}. + * @param resetPosition Whether the playback position should be reset to the default position in + * the first {@link Timeline.Window}. If false, playback will start from the position defined + * by {@link #getCurrentMediaItemIndex()} and {@link #getCurrentPosition()}. */ - int getCurrentPeriodIndex(); + void setMediaItems(List mediaItems, boolean resetPosition); /** - * Returns the index of the window currently being played. + * Clears the playlist and adds the specified {@linkplain MediaItem media items}. + * + *

      This method must only be called if {@link #COMMAND_CHANGE_MEDIA_ITEMS} is {@linkplain + * #getAvailableCommands() available}. + * + * @param mediaItems The new {@linkplain MediaItem media items}. + * @param startIndex The {@link MediaItem} index to start playback from. If {@link C#INDEX_UNSET} + * is passed, the current position is not reset. + * @param startPositionMs The position in milliseconds to start playback from. If {@link + * C#TIME_UNSET} is passed, the default position of the given {@link MediaItem} is used. In + * any case, if {@code startIndex} is set to {@link C#INDEX_UNSET}, this parameter is ignored + * and the position is not reset at all. + * @throws IllegalSeekPositionException If the provided {@code startIndex} is not within the + * bounds of the list of media items. */ - int getCurrentWindowIndex(); + void setMediaItems(List mediaItems, int startIndex, long startPositionMs); /** - * Returns the index of the next timeline window to be played, which may depend on the current - * repeat mode and whether shuffle mode is enabled. Returns {@link C#INDEX_UNSET} if the window - * currently being played is the last window. + * Clears the playlist, adds the specified {@link MediaItem} and resets the position to the + * default position. + * + *

      This method must only be called if {@link #COMMAND_SET_MEDIA_ITEM} is {@linkplain + * #getAvailableCommands() available}. + * + * @param mediaItem The new {@link MediaItem}. */ - int getNextWindowIndex(); + void setMediaItem(MediaItem mediaItem); /** - * Returns the index of the previous timeline window to be played, which may depend on the current - * repeat mode and whether shuffle mode is enabled. Returns {@link C#INDEX_UNSET} if the window - * currently being played is the first window. + * Clears the playlist and adds the specified {@link MediaItem}. + * + *

      This method must only be called if {@link #COMMAND_SET_MEDIA_ITEM} is {@linkplain + * #getAvailableCommands() available}. + * + * @param mediaItem The new {@link MediaItem}. + * @param startPositionMs The position in milliseconds to start playback from. */ - int getPreviousWindowIndex(); + void setMediaItem(MediaItem mediaItem, long startPositionMs); /** - * Returns the tag of the currently playing window in the timeline. May be null if no tag is set - * or the timeline is not yet available. + * Clears the playlist and adds the specified {@link MediaItem}. + * + *

      This method must only be called if {@link #COMMAND_SET_MEDIA_ITEM} is {@linkplain + * #getAvailableCommands() available}. + * + * @param mediaItem The new {@link MediaItem}. + * @param resetPosition Whether the playback position should be reset to the default position. If + * false, playback will start from the position defined by {@link #getCurrentMediaItemIndex()} + * and {@link #getCurrentPosition()}. */ - @Nullable Object getCurrentTag(); + void setMediaItem(MediaItem mediaItem, boolean resetPosition); /** - * Returns the duration of the current content window or ad in milliseconds, or {@link - * C#TIME_UNSET} if the duration is not known. + * Adds a media item to the end of the playlist. + * + *

      This method must only be called if {@link #COMMAND_CHANGE_MEDIA_ITEMS} is {@linkplain + * #getAvailableCommands() available}. + * + * @param mediaItem The {@link MediaItem} to add. */ - long getDuration(); + void addMediaItem(MediaItem mediaItem); - /** Returns the playback position in the current content window or ad, in milliseconds. */ - long getCurrentPosition(); + /** + * Adds a media item at the given index of the playlist. + * + *

      This method must only be called if {@link #COMMAND_CHANGE_MEDIA_ITEMS} is {@linkplain + * #getAvailableCommands() available}. + * + * @param index The index at which to add the media item. If the index is larger than the size of + * the playlist, the media item is added to the end of the playlist. + * @param mediaItem The {@link MediaItem} to add. + */ + void addMediaItem(int index, MediaItem mediaItem); /** - * Returns an estimate of the position in the current content window or ad up to which data is - * buffered, in milliseconds. + * Adds a list of media items to the end of the playlist. + * + *

      This method must only be called if {@link #COMMAND_CHANGE_MEDIA_ITEMS} is {@linkplain + * #getAvailableCommands() available}. + * + * @param mediaItems The {@linkplain MediaItem media items} to add. */ - long getBufferedPosition(); + void addMediaItems(List mediaItems); /** - * Returns an estimate of the percentage in the current content window or ad up to which data is - * buffered, or 0 if no estimate is available. + * Adds a list of media items at the given index of the playlist. + * + *

      This method must only be called if {@link #COMMAND_CHANGE_MEDIA_ITEMS} is {@linkplain + * #getAvailableCommands() available}. + * + * @param index The index at which to add the media items. If the index is larger than the size of + * the playlist, the media items are added to the end of the playlist. + * @param mediaItems The {@linkplain MediaItem media items} to add. */ - int getBufferedPercentage(); + void addMediaItems(int index, List mediaItems); /** - * Returns an estimate of the total buffered duration from the current position, in milliseconds. - * This includes pre-buffered data for subsequent ads and windows. + * Moves the media item at the current index to the new index. + * + *

      This method must only be called if {@link #COMMAND_CHANGE_MEDIA_ITEMS} is {@linkplain + * #getAvailableCommands() available}. + * + * @param currentIndex The current index of the media item to move. If the index is larger than + * the size of the playlist, the request is ignored. + * @param newIndex The new index of the media item. If the new index is larger than the size of + * the playlist the item is moved to the end of the playlist. */ - long getTotalBufferedDuration(); + void moveMediaItem(int currentIndex, int newIndex); /** - * Returns whether the current window is dynamic, or {@code false} if the {@link Timeline} is - * empty. + * Moves the media item range to the new index. * - * @see Timeline.Window#isDynamic + *

      This method must only be called if {@link #COMMAND_CHANGE_MEDIA_ITEMS} is {@linkplain + * #getAvailableCommands() available}. + * + * @param fromIndex The start of the range to move. If the index is larger than the size of the + * playlist, the request is ignored. + * @param toIndex The first item not to be included in the range (exclusive). If the index is + * larger than the size of the playlist, items up to the end of the playlist are moved. + * @param newIndex The new index of the first media item of the range. If the new index is larger + * than the size of the remaining playlist after removing the range, the range is moved to the + * end of the playlist. */ - boolean isCurrentWindowDynamic(); + void moveMediaItems(int fromIndex, int toIndex, int newIndex); /** - * Returns whether the current window is live, or {@code false} if the {@link Timeline} is empty. + * Removes the media item at the given index of the playlist. * - * @see Timeline.Window#isLive + *

      This method must only be called if {@link #COMMAND_CHANGE_MEDIA_ITEMS} is {@linkplain + * #getAvailableCommands() available}. + * + * @param index The index at which to remove the media item. If the index is larger than the size + * of the playlist, the request is ignored. */ - boolean isCurrentWindowLive(); + void removeMediaItem(int index); /** - * Returns whether the current window is seekable, or {@code false} if the {@link Timeline} is - * empty. + * Removes a range of media items from the playlist. * - * @see Timeline.Window#isSeekable + *

      This method must only be called if {@link #COMMAND_CHANGE_MEDIA_ITEMS} is {@linkplain + * #getAvailableCommands() available}. + * + * @param fromIndex The index at which to start removing media items. If the index is larger than + * the size of the playlist, the request is ignored. + * @param toIndex The index of the first item to be kept (exclusive). If the index is larger than + * the size of the playlist, media items up to the end of the playlist are removed. */ - boolean isCurrentWindowSeekable(); + void removeMediaItems(int fromIndex, int toIndex); /** - * Returns whether the player is currently playing an ad. + * Clears the playlist. + * + *

      This method must only be called if {@link #COMMAND_CHANGE_MEDIA_ITEMS} is {@linkplain + * #getAvailableCommands() available}. */ - boolean isPlayingAd(); + void clearMediaItems(); /** - * If {@link #isPlayingAd()} returns true, returns the index of the ad group in the period - * currently being played. Returns {@link C#INDEX_UNSET} otherwise. + * Returns whether the provided {@link Command} is available. + * + *

      This method does not execute the command. + * + * @param command A {@link Command}. + * @return Whether the {@link Command} is available. + * @see Listener#onAvailableCommandsChanged(Commands) */ - int getCurrentAdGroupIndex(); + boolean isCommandAvailable(@Command int command); + + /** Returns whether the player can be used to advertise a media session. */ + boolean canAdvertiseSession(); /** - * If {@link #isPlayingAd()} returns true, returns the index of the ad in its ad group. Returns - * {@link C#INDEX_UNSET} otherwise. + * Returns the player's currently available {@link Commands}. + * + *

      The returned {@link Commands} are not updated when available commands change. Use {@link + * Listener#onAvailableCommandsChanged(Commands)} to get an update when the available commands + * change. + * + * @return The currently available {@link Commands}. + * @see Listener#onAvailableCommandsChanged */ - int getCurrentAdIndexInAdGroup(); + Commands getAvailableCommands(); /** - * If {@link #isPlayingAd()} returns {@code true}, returns the duration of the current content - * window in milliseconds, or {@link C#TIME_UNSET} if the duration is not known. If there is no ad - * playing, the returned duration is the same as that returned by {@link #getDuration()}. + * Prepares the player. + * + *

      This method must only be called if {@link #COMMAND_PREPARE} is {@linkplain + * #getAvailableCommands() available}. + * + *

      This will move the player out of {@link #STATE_IDLE idle state} and the player will start + * loading media and acquire resources needed for playback. */ - long getContentDuration(); + void prepare(); /** - * If {@link #isPlayingAd()} returns {@code true}, returns the content position that will be - * played once all ads in the ad group have finished playing, in milliseconds. If there is no ad - * playing, the returned position is the same as that returned by {@link #getCurrentPosition()}. + * Returns the current {@linkplain State playback state} of the player. + * + * @return The current {@linkplain State playback state}. + * @see Listener#onPlaybackStateChanged(int) */ - long getContentPosition(); + @State + int getPlaybackState(); /** - * If {@link #isPlayingAd()} returns {@code true}, returns an estimate of the content position in - * the current content window up to which data is buffered, in milliseconds. If there is no ad - * playing, the returned position is the same as that returned by {@link #getBufferedPosition()}. + * Returns the reason why playback is suppressed even though {@link #getPlayWhenReady()} is {@code + * true}, or {@link #PLAYBACK_SUPPRESSION_REASON_NONE} if playback is not suppressed. + * + * @return The current {@link PlaybackSuppressionReason}. + * @see Listener#onPlaybackSuppressionReasonChanged(int) */ - long getContentBufferedPosition(); + @PlaybackSuppressionReason + int getPlaybackSuppressionReason(); + + /** + * Returns whether the player is playing, i.e. {@link #getCurrentPosition()} is advancing. + * + *

      If {@code false}, then at least one of the following is true: + * + *

        + *
      • The {@link #getPlaybackState() playback state} is not {@link #STATE_READY ready}. + *
      • There is no {@link #getPlayWhenReady() intention to play}. + *
      • Playback is {@link #getPlaybackSuppressionReason() suppressed for other reasons}. + *
      + * + * @return Whether the player is playing. + * @see Listener#onIsPlayingChanged(boolean) + */ + boolean isPlaying(); + + /** + * Returns the error that caused playback to fail. This is the same error that will have been + * reported via {@link Listener#onPlayerError(PlaybackException)} at the time of failure. It can + * be queried using this method until the player is re-prepared. + * + *

      Note that this method will always return {@code null} if {@link #getPlaybackState()} is not + * {@link #STATE_IDLE}. + * + * @return The error, or {@code null}. + * @see Listener#onPlayerError(PlaybackException) + */ + @Nullable + PlaybackException getPlayerError(); + + /** + * Resumes playback as soon as {@link #getPlaybackState()} == {@link #STATE_READY}. Equivalent to + * {@link #setPlayWhenReady(boolean) setPlayWhenReady(true)}. + * + *

      This method must only be called if {@link #COMMAND_PLAY_PAUSE} is {@linkplain + * #getAvailableCommands() available}. + */ + void play(); + + /** + * Pauses playback. Equivalent to {@link #setPlayWhenReady(boolean) setPlayWhenReady(false)}. + * + *

      This method must only be called if {@link #COMMAND_PLAY_PAUSE} is {@linkplain + * #getAvailableCommands() available}. + */ + void pause(); + + /** + * Sets whether playback should proceed when {@link #getPlaybackState()} == {@link #STATE_READY}. + * + *

      If the player is already in the ready state then this method pauses and resumes playback. + * + *

      This method must only be called if {@link #COMMAND_PLAY_PAUSE} is {@linkplain + * #getAvailableCommands() available}. + * + * @param playWhenReady Whether playback should proceed when ready. + */ + void setPlayWhenReady(boolean playWhenReady); + + /** + * Whether playback will proceed when {@link #getPlaybackState()} == {@link #STATE_READY}. + * + * @return Whether playback will proceed when ready. + * @see Listener#onPlayWhenReadyChanged(boolean, int) + */ + boolean getPlayWhenReady(); + + /** + * Sets the {@link RepeatMode} to be used for playback. + * + *

      This method must only be called if {@link #COMMAND_SET_REPEAT_MODE} is {@linkplain + * #getAvailableCommands() available}. + * + * @param repeatMode The repeat mode. + */ + void setRepeatMode(@RepeatMode int repeatMode); + + /** + * Returns the current {@link RepeatMode} used for playback. + * + * @return The current repeat mode. + * @see Listener#onRepeatModeChanged(int) + */ + @RepeatMode + int getRepeatMode(); + + /** + * Sets whether shuffling of media items is enabled. + * + *

      This method must only be called if {@link #COMMAND_SET_SHUFFLE_MODE} is {@linkplain + * #getAvailableCommands() available}. + * + * @param shuffleModeEnabled Whether shuffling is enabled. + */ + void setShuffleModeEnabled(boolean shuffleModeEnabled); + + /** + * Returns whether shuffling of media items is enabled. + * + * @see Listener#onShuffleModeEnabledChanged(boolean) + */ + boolean getShuffleModeEnabled(); + + /** + * Whether the player is currently loading the source. + * + * @return Whether the player is currently loading the source. + * @see Listener#onIsLoadingChanged(boolean) + */ + boolean isLoading(); + + /** + * Seeks to the default position associated with the current {@link MediaItem}. The position can + * depend on the type of media being played. For live streams it will typically be the live edge. + * For other streams it will typically be the start. + * + *

      This method must only be called if {@link #COMMAND_SEEK_TO_DEFAULT_POSITION} is {@linkplain + * #getAvailableCommands() available}. + */ + void seekToDefaultPosition(); + + /** + * Seeks to the default position associated with the specified {@link MediaItem}. The position can + * depend on the type of media being played. For live streams it will typically be the live edge. + * For other streams it will typically be the start. + * + *

      This method must only be called if {@link #COMMAND_SEEK_TO_MEDIA_ITEM} is {@linkplain + * #getAvailableCommands() available}. + * + * @param mediaItemIndex The index of the {@link MediaItem} whose associated default position + * should be seeked to. If the index is larger than the size of the playlist, the request is + * ignored. + */ + void seekToDefaultPosition(int mediaItemIndex); + + /** + * Seeks to a position specified in milliseconds in the current {@link MediaItem}. + * + *

      This method must only be called if {@link #COMMAND_SEEK_IN_CURRENT_MEDIA_ITEM} is + * {@linkplain #getAvailableCommands() available}. + * + * @param positionMs The seek position in the current {@link MediaItem}, or {@link C#TIME_UNSET} + * to seek to the media item's default position. + */ + void seekTo(long positionMs); + + /** + * Seeks to a position specified in milliseconds in the specified {@link MediaItem}. + * + *

      This method must only be called if {@link #COMMAND_SEEK_TO_MEDIA_ITEM} is {@linkplain + * #getAvailableCommands() available}. + * + * @param mediaItemIndex The index of the {@link MediaItem}. If the index is larger than the size + * of the playlist, the request is ignored. + * @param positionMs The seek position in the specified {@link MediaItem}, or {@link C#TIME_UNSET} + * to seek to the media item's default position. + */ + void seekTo(int mediaItemIndex, long positionMs); + + /** + * Returns the {@link #seekBack()} increment. + * + * @return The seek back increment, in milliseconds. + * @see Listener#onSeekBackIncrementChanged(long) + */ + long getSeekBackIncrement(); + + /** + * Seeks back in the current {@link MediaItem} by {@link #getSeekBackIncrement()} milliseconds. + * + *

      This method must only be called if {@link #COMMAND_SEEK_BACK} is {@linkplain + * #getAvailableCommands() available}. + */ + void seekBack(); + + /** + * Returns the {@link #seekForward()} increment. + * + * @return The seek forward increment, in milliseconds. + * @see Listener#onSeekForwardIncrementChanged(long) + */ + long getSeekForwardIncrement(); + + /** + * Seeks forward in the current {@link MediaItem} by {@link #getSeekForwardIncrement()} + * milliseconds. + * + *

      This method must only be called if {@link #COMMAND_SEEK_FORWARD} is {@linkplain + * #getAvailableCommands() available}. + */ + void seekForward(); + + /** + * @deprecated Use {@link #hasPreviousMediaItem()} instead. + */ + @Deprecated + boolean hasPrevious(); + + /** + * @deprecated Use {@link #hasPreviousMediaItem()} instead. + */ + @Deprecated + boolean hasPreviousWindow(); + + /** + * Returns whether a previous media item exists, which may depend on the current repeat mode and + * whether shuffle mode is enabled. + * + *

      Note: When the repeat mode is {@link #REPEAT_MODE_ONE}, this method behaves the same as when + * the current repeat mode is {@link #REPEAT_MODE_OFF}. See {@link #REPEAT_MODE_ONE} for more + * details. + * + *

      This method must only be called if {@link #COMMAND_GET_TIMELINE} is {@linkplain + * #getAvailableCommands() available}. + */ + boolean hasPreviousMediaItem(); + + /** + * @deprecated Use {@link #seekToPreviousMediaItem()} instead. + */ + @Deprecated + void previous(); + + /** + * @deprecated Use {@link #seekToPreviousMediaItem()} instead. + */ + @Deprecated + void seekToPreviousWindow(); + + /** + * Seeks to the default position of the previous {@link MediaItem}, which may depend on the + * current repeat mode and whether shuffle mode is enabled. Does nothing if {@link + * #hasPreviousMediaItem()} is {@code false}. + * + *

      Note: When the repeat mode is {@link #REPEAT_MODE_ONE}, this method behaves the same as when + * the current repeat mode is {@link #REPEAT_MODE_OFF}. See {@link #REPEAT_MODE_ONE} for more + * details. + * + *

      This method must only be called if {@link #COMMAND_SEEK_TO_PREVIOUS_MEDIA_ITEM} is + * {@linkplain #getAvailableCommands() available}. + */ + void seekToPreviousMediaItem(); + + /** + * Returns the maximum position for which {@link #seekToPrevious()} seeks to the previous {@link + * MediaItem}, in milliseconds. + * + * @return The maximum seek to previous position, in milliseconds. + * @see Listener#onMaxSeekToPreviousPositionChanged(long) + */ + long getMaxSeekToPreviousPosition(); + + /** + * Seeks to an earlier position in the current or previous {@link MediaItem} (if available). More + * precisely: + * + *

        + *
      • If the timeline is empty or seeking is not possible, does nothing. + *
      • Otherwise, if the current {@link MediaItem} is {@link #isCurrentMediaItemLive()} live} + * and {@link #isCurrentMediaItemSeekable() unseekable}, then: + *
          + *
        • If {@link #hasPreviousMediaItem() a previous media item exists}, seeks to the + * default position of the previous media item. + *
        • Otherwise, does nothing. + *
        + *
      • Otherwise, if {@link #hasPreviousMediaItem() a previous media item exists} and the {@link + * #getCurrentPosition() current position} is less than {@link + * #getMaxSeekToPreviousPosition()}, seeks to the default position of the previous {@link + * MediaItem}. + *
      • Otherwise, seeks to 0 in the current {@link MediaItem}. + *
      + * + *

      This method must only be called if {@link #COMMAND_SEEK_TO_PREVIOUS} is {@linkplain + * #getAvailableCommands() available}. + */ + void seekToPrevious(); + + /** + * @deprecated Use {@link #hasNextMediaItem()} instead. + */ + @Deprecated + boolean hasNext(); + + /** + * @deprecated Use {@link #hasNextMediaItem()} instead. + */ + @Deprecated + boolean hasNextWindow(); + + /** + * Returns whether a next {@link MediaItem} exists, which may depend on the current repeat mode + * and whether shuffle mode is enabled. + * + *

      Note: When the repeat mode is {@link #REPEAT_MODE_ONE}, this method behaves the same as when + * the current repeat mode is {@link #REPEAT_MODE_OFF}. See {@link #REPEAT_MODE_ONE} for more + * details. + * + *

      This method must only be called if {@link #COMMAND_GET_TIMELINE} is {@linkplain + * #getAvailableCommands() available}. + */ + boolean hasNextMediaItem(); + + /** + * @deprecated Use {@link #seekToNextMediaItem()} instead. + */ + @Deprecated + void next(); + + /** + * @deprecated Use {@link #seekToNextMediaItem()} instead. + */ + @Deprecated + void seekToNextWindow(); + + /** + * Seeks to the default position of the next {@link MediaItem}, which may depend on the current + * repeat mode and whether shuffle mode is enabled. Does nothing if {@link #hasNextMediaItem()} is + * {@code false}. + * + *

      Note: When the repeat mode is {@link #REPEAT_MODE_ONE}, this method behaves the same as when + * the current repeat mode is {@link #REPEAT_MODE_OFF}. See {@link #REPEAT_MODE_ONE} for more + * details. + * + *

      This method must only be called if {@link #COMMAND_SEEK_TO_NEXT_MEDIA_ITEM} is {@linkplain + * #getAvailableCommands() available}. + */ + void seekToNextMediaItem(); + + /** + * Seeks to a later position in the current or next {@link MediaItem} (if available). More + * precisely: + * + *

        + *
      • If the timeline is empty or seeking is not possible, does nothing. + *
      • Otherwise, if {@link #hasNextMediaItem() a next media item exists}, seeks to the default + * position of the next {@link MediaItem}. + *
      • Otherwise, if the current {@link MediaItem} is {@link #isCurrentMediaItemLive() live} and + * has not ended, seeks to the live edge of the current {@link MediaItem}. + *
      • Otherwise, does nothing. + *
      + * + *

      This method must only be called if {@link #COMMAND_SEEK_TO_NEXT} is {@linkplain + * #getAvailableCommands() available}. + */ + void seekToNext(); + + /** + * Attempts to set the playback parameters. Passing {@link PlaybackParameters#DEFAULT} resets the + * player to the default, which means there is no speed or pitch adjustment. + * + *

      Playback parameters changes may cause the player to buffer. {@link + * Listener#onPlaybackParametersChanged(PlaybackParameters)} will be called whenever the currently + * active playback parameters change. + * + *

      This method must only be called if {@link #COMMAND_SET_SPEED_AND_PITCH} is {@linkplain + * #getAvailableCommands() available}. + * + * @param playbackParameters The playback parameters. + */ + void setPlaybackParameters(PlaybackParameters playbackParameters); + + /** + * Changes the rate at which playback occurs. The pitch is not changed. + * + *

      This is equivalent to {@code + * setPlaybackParameters(getPlaybackParameters().withSpeed(speed))}. + * + *

      This method must only be called if {@link #COMMAND_SET_SPEED_AND_PITCH} is {@linkplain + * #getAvailableCommands() available}. + * + * @param speed The linear factor by which playback will be sped up. Must be higher than 0. 1 is + * normal speed, 2 is twice as fast, 0.5 is half normal speed. + */ + void setPlaybackSpeed(@FloatRange(from = 0, fromInclusive = false) float speed); + + /** + * Returns the currently active playback parameters. + * + * @see Listener#onPlaybackParametersChanged(PlaybackParameters) + */ + PlaybackParameters getPlaybackParameters(); + + /** + * Stops playback without resetting the playlist. Use {@link #pause()} rather than this method if + * the intention is to pause playback. + * + *

      Calling this method will cause the playback state to transition to {@link #STATE_IDLE} and + * the player will release the loaded media and resources required for playback. The player + * instance can still be used by calling {@link #prepare()} again, and {@link #release()} must + * still be called on the player if it's no longer required. + * + *

      Calling this method does not clear the playlist, reset the playback position or the playback + * error. + * + *

      This method must only be called if {@link #COMMAND_STOP} is {@linkplain + * #getAvailableCommands() available}. + */ + void stop(); + + /** + * @deprecated Use {@link #stop()} and {@link #clearMediaItems()} (if {@code reset} is true) or + * just {@link #stop()} (if {@code reset} is false). Any player error will be cleared when + * {@link #prepare() re-preparing} the player. + */ + @Deprecated + void stop(boolean reset); + + /** + * Releases the player. This method must be called when the player is no longer required. The + * player must not be used after calling this method. + */ + // TODO(b/261158047): Document that COMMAND_RELEASE must be available once it exists. + void release(); + + /** + * Returns the current tracks. + * + *

      This method must only be called if {@link #COMMAND_GET_TRACKS} is {@linkplain + * #getAvailableCommands() available}. + * + * @see Listener#onTracksChanged(Tracks) + */ + Tracks getCurrentTracks(); + + /** + * Returns the parameters constraining the track selection. + * + * @see Listener#onTrackSelectionParametersChanged} + */ + TrackSelectionParameters getTrackSelectionParameters(); + + /** + * Sets the parameters constraining the track selection. + * + *

      Unsupported parameters will be silently ignored. + * + *

      Use {@link #getTrackSelectionParameters()} to retrieve the current parameters. For example, + * the following snippet restricts video to SD whilst keep other track selection parameters + * unchanged: + * + *

      {@code
      +   * player.setTrackSelectionParameters(
      +   *   player.getTrackSelectionParameters()
      +   *         .buildUpon()
      +   *         .setMaxVideoSizeSd()
      +   *         .build())
      +   * }
      + * + *

      This method must only be called if {@link #COMMAND_SET_TRACK_SELECTION_PARAMETERS} is + * {@linkplain #getAvailableCommands() available}. + */ + void setTrackSelectionParameters(TrackSelectionParameters parameters); + + /** + * Returns the current combined {@link MediaMetadata}, or {@link MediaMetadata#EMPTY} if not + * supported. + * + *

      This {@link MediaMetadata} is a combination of the {@link MediaItem#mediaMetadata MediaItem + * metadata}, the static metadata in the media's {@link Format#metadata Format}, and any timed + * metadata that has been parsed from the media and output via {@link + * Listener#onMetadata(Metadata)}. If a field is populated in the {@link MediaItem#mediaMetadata}, + * it will be prioritised above the same field coming from static or timed metadata. + * + *

      This method must only be called if {@link #COMMAND_GET_MEDIA_ITEMS_METADATA} is {@linkplain + * #getAvailableCommands() available}. + */ + MediaMetadata getMediaMetadata(); + + /** + * Returns the playlist {@link MediaMetadata}, as set by {@link + * #setPlaylistMetadata(MediaMetadata)}, or {@link MediaMetadata#EMPTY} if not supported. + * + *

      This method must only be called if {@link #COMMAND_GET_MEDIA_ITEMS_METADATA} is {@linkplain + * #getAvailableCommands() available}. + */ + MediaMetadata getPlaylistMetadata(); + + /** + * Sets the playlist {@link MediaMetadata}. + * + *

      This method must only be called if {@link #COMMAND_SET_MEDIA_ITEMS_METADATA} is {@linkplain + * #getAvailableCommands() available}. + */ + void setPlaylistMetadata(MediaMetadata mediaMetadata); + + /** + * Returns the current manifest. The type depends on the type of media being played. May be null. + */ + @Nullable + Object getCurrentManifest(); + + /** + * Returns the current {@link Timeline}. Never null, but may be empty. + * + *

      This method must only be called if {@link #COMMAND_GET_TIMELINE} is {@linkplain + * #getAvailableCommands() available}. + * + * @see Listener#onTimelineChanged(Timeline, int) + */ + Timeline getCurrentTimeline(); + + /** + * Returns the index of the period currently being played. + * + *

      This method must only be called if {@link #COMMAND_GET_TIMELINE} is {@linkplain + * #getAvailableCommands() available}. + */ + int getCurrentPeriodIndex(); + + /** + * @deprecated Use {@link #getCurrentMediaItemIndex()} instead. + */ + @Deprecated + int getCurrentWindowIndex(); + + /** + * Returns the index of the current {@link MediaItem} in the {@link #getCurrentTimeline() + * timeline}, or the prospective index if the {@link #getCurrentTimeline() current timeline} is + * empty. + * + *

      This method must only be called if {@link #COMMAND_GET_TIMELINE} is {@linkplain + * #getAvailableCommands() available}. + */ + int getCurrentMediaItemIndex(); + + /** + * @deprecated Use {@link #getNextMediaItemIndex()} instead. + */ + @Deprecated + int getNextWindowIndex(); + + /** + * Returns the index of the {@link MediaItem} that will be played if {@link + * #seekToNextMediaItem()} is called, which may depend on the current repeat mode and whether + * shuffle mode is enabled. Returns {@link C#INDEX_UNSET} if {@link #hasNextMediaItem()} is {@code + * false}. + * + *

      Note: When the repeat mode is {@link #REPEAT_MODE_ONE}, this method behaves the same as when + * the current repeat mode is {@link #REPEAT_MODE_OFF}. See {@link #REPEAT_MODE_ONE} for more + * details. + * + *

      This method must only be called if {@link #COMMAND_GET_TIMELINE} is {@linkplain + * #getAvailableCommands() available}. + */ + int getNextMediaItemIndex(); + + /** + * @deprecated Use {@link #getPreviousMediaItemIndex()} instead. + */ + @Deprecated + int getPreviousWindowIndex(); + + /** + * Returns the index of the {@link MediaItem} that will be played if {@link + * #seekToPreviousMediaItem()} is called, which may depend on the current repeat mode and whether + * shuffle mode is enabled. Returns {@link C#INDEX_UNSET} if {@link #hasPreviousMediaItem()} is + * {@code false}. + * + *

      Note: When the repeat mode is {@link #REPEAT_MODE_ONE}, this method behaves the same as when + * the current repeat mode is {@link #REPEAT_MODE_OFF}. See {@link #REPEAT_MODE_ONE} for more + * details. + * + *

      This method must only be called if {@link #COMMAND_GET_TIMELINE} is {@linkplain + * #getAvailableCommands() available}. + */ + int getPreviousMediaItemIndex(); + + /** + * Returns the currently playing {@link MediaItem}. May be null if the timeline is empty. + * + *

      This method must only be called if {@link #COMMAND_GET_CURRENT_MEDIA_ITEM} is {@linkplain + * #getAvailableCommands() available}. + * + * @see Listener#onMediaItemTransition(MediaItem, int) + */ + @Nullable + MediaItem getCurrentMediaItem(); + + /** + * Returns the number of {@linkplain MediaItem media items} in the playlist. + * + *

      This method must only be called if {@link #COMMAND_GET_TIMELINE} is {@linkplain + * #getAvailableCommands() available}. + */ + int getMediaItemCount(); + + /** + * Returns the {@link MediaItem} at the given index. + * + *

      This method must only be called if {@link #COMMAND_GET_TIMELINE} is {@linkplain + * #getAvailableCommands() available}. + */ + MediaItem getMediaItemAt(int index); + + /** + * Returns the duration of the current content or ad in milliseconds, or {@link C#TIME_UNSET} if + * the duration is not known. + * + *

      This method must only be called if {@link #COMMAND_GET_CURRENT_MEDIA_ITEM} is {@linkplain + * #getAvailableCommands() available}. + */ + long getDuration(); + + /** + * Returns the playback position in the current content or ad, in milliseconds, or the prospective + * position in milliseconds if the {@link #getCurrentTimeline() current timeline} is empty. + * + *

      This method must only be called if {@link #COMMAND_GET_CURRENT_MEDIA_ITEM} is {@linkplain + * #getAvailableCommands() available}. + */ + long getCurrentPosition(); + + /** + * Returns an estimate of the position in the current content or ad up to which data is buffered, + * in milliseconds. + * + *

      This method must only be called if {@link #COMMAND_GET_CURRENT_MEDIA_ITEM} is {@linkplain + * #getAvailableCommands() available}. + */ + long getBufferedPosition(); + + /** + * Returns an estimate of the percentage in the current content or ad up to which data is + * buffered, or 0 if no estimate is available. + */ + @IntRange(from = 0, to = 100) + int getBufferedPercentage(); + + /** + * Returns an estimate of the total buffered duration from the current position, in milliseconds. + * This includes pre-buffered data for subsequent ads and {@linkplain MediaItem media items}. + * + *

      This method must only be called if {@link #COMMAND_GET_CURRENT_MEDIA_ITEM} is {@linkplain + * #getAvailableCommands() available}. + */ + long getTotalBufferedDuration(); + + /** + * @deprecated Use {@link #isCurrentMediaItemDynamic()} instead. + */ + @Deprecated + boolean isCurrentWindowDynamic(); + + /** + * Returns whether the current {@link MediaItem} is dynamic (may change when the {@link Timeline} + * is updated), or {@code false} if the {@link Timeline} is empty. + * + *

      This method must only be called if {@link #COMMAND_GET_CURRENT_MEDIA_ITEM} is {@linkplain + * #getAvailableCommands() available}. + * + * @see Timeline.Window#isDynamic + */ + boolean isCurrentMediaItemDynamic(); + + /** + * @deprecated Use {@link #isCurrentMediaItemLive()} instead. + */ + @Deprecated + boolean isCurrentWindowLive(); + + /** + * Returns whether the current {@link MediaItem} is live, or {@code false} if the {@link Timeline} + * is empty. + * + *

      This method must only be called if {@link #COMMAND_GET_CURRENT_MEDIA_ITEM} is {@linkplain + * #getAvailableCommands() available}. + * + * @see Timeline.Window#isLive() + */ + boolean isCurrentMediaItemLive(); + + /** + * Returns the offset of the current playback position from the live edge in milliseconds, or + * {@link C#TIME_UNSET} if the current {@link MediaItem} {@link #isCurrentMediaItemLive()} isn't + * live} or the offset is unknown. + * + *

      The offset is calculated as {@code currentTime - playbackPosition}, so should usually be + * positive. + * + *

      Note that this offset may rely on an accurate local time, so this method may return an + * incorrect value if the difference between system clock and server clock is unknown. + * + *

      This method must only be called if {@link #COMMAND_GET_CURRENT_MEDIA_ITEM} is {@linkplain + * #getAvailableCommands() available}. + */ + long getCurrentLiveOffset(); + + /** + * @deprecated Use {@link #isCurrentMediaItemSeekable()} instead. + */ + @Deprecated + boolean isCurrentWindowSeekable(); + + /** + * Returns whether the current {@link MediaItem} is seekable, or {@code false} if the {@link + * Timeline} is empty. + * + *

      This method must only be called if {@link #COMMAND_GET_CURRENT_MEDIA_ITEM} is {@linkplain + * #getAvailableCommands() available}. + * + * @see Timeline.Window#isSeekable + */ + boolean isCurrentMediaItemSeekable(); + + /** + * Returns whether the player is currently playing an ad. + * + *

      This method must only be called if {@link #COMMAND_GET_CURRENT_MEDIA_ITEM} is {@linkplain + * #getAvailableCommands() available}. + */ + boolean isPlayingAd(); + + /** + * If {@link #isPlayingAd()} returns true, returns the index of the ad group in the period + * currently being played. Returns {@link C#INDEX_UNSET} otherwise. + * + *

      This method must only be called if {@link #COMMAND_GET_CURRENT_MEDIA_ITEM} is {@linkplain + * #getAvailableCommands() available}. + */ + int getCurrentAdGroupIndex(); + + /** + * If {@link #isPlayingAd()} returns true, returns the index of the ad in its ad group. Returns + * {@link C#INDEX_UNSET} otherwise. + * + *

      This method must only be called if {@link #COMMAND_GET_CURRENT_MEDIA_ITEM} is {@linkplain + * #getAvailableCommands() available}. + */ + int getCurrentAdIndexInAdGroup(); + + /** + * If {@link #isPlayingAd()} returns {@code true}, returns the duration of the current content in + * milliseconds, or {@link C#TIME_UNSET} if the duration is not known. If there is no ad playing, + * the returned duration is the same as that returned by {@link #getDuration()}. + * + *

      This method must only be called if {@link #COMMAND_GET_CURRENT_MEDIA_ITEM} is {@linkplain + * #getAvailableCommands() available}. + */ + long getContentDuration(); + + /** + * If {@link #isPlayingAd()} returns {@code true}, returns the content position that will be + * played once all ads in the ad group have finished playing, in milliseconds. If there is no ad + * playing, the returned position is the same as that returned by {@link #getCurrentPosition()}. + * + *

      This method must only be called if {@link #COMMAND_GET_CURRENT_MEDIA_ITEM} is {@linkplain + * #getAvailableCommands() available}. + */ + long getContentPosition(); + + /** + * If {@link #isPlayingAd()} returns {@code true}, returns an estimate of the content position in + * the current content up to which data is buffered, in milliseconds. If there is no ad playing, + * the returned position is the same as that returned by {@link #getBufferedPosition()}. + * + *

      This method must only be called if {@link #COMMAND_GET_CURRENT_MEDIA_ITEM} is {@linkplain + * #getAvailableCommands() available}. + */ + long getContentBufferedPosition(); + + /** + * Returns the attributes for audio playback. + * + *

      This method must only be called if {@link #COMMAND_GET_AUDIO_ATTRIBUTES} is {@linkplain + * #getAvailableCommands() available}. + */ + AudioAttributes getAudioAttributes(); + + /** + * Sets the audio volume, valid values are between 0 (silence) and 1 (unity gain, signal + * unchanged), inclusive. + * + *

      This method must only be called if {@link #COMMAND_SET_VOLUME} is {@linkplain + * #getAvailableCommands() available}. + * + * @param volume Linear output gain to apply to all audio channels. + */ + void setVolume(@FloatRange(from = 0, to = 1.0) float volume); + + /** + * Returns the audio volume, with 0 being silence and 1 being unity gain (signal unchanged). + * + *

      This method must only be called if {@link #COMMAND_GET_VOLUME} is {@linkplain + * #getAvailableCommands() available}. + * + * @return The linear gain applied to all audio channels. + */ + @FloatRange(from = 0, to = 1.0) + float getVolume(); + + /** + * Clears any {@link Surface}, {@link SurfaceHolder}, {@link SurfaceView} or {@link TextureView} + * currently set on the player. + * + *

      This method must only be called if {@link #COMMAND_SET_VIDEO_SURFACE} is {@linkplain + * #getAvailableCommands() available}. + */ + void clearVideoSurface(); + + /** + * Clears the {@link Surface} onto which video is being rendered if it matches the one passed. + * Else does nothing. + * + *

      This method must only be called if {@link #COMMAND_SET_VIDEO_SURFACE} is {@linkplain + * #getAvailableCommands() available}. + * + * @param surface The surface to clear. + */ + void clearVideoSurface(@Nullable Surface surface); + + /** + * Sets the {@link Surface} onto which video will be rendered. The caller is responsible for + * tracking the lifecycle of the surface, and must clear the surface by calling {@code + * setVideoSurface(null)} if the surface is destroyed. + * + *

      If the surface is held by a {@link SurfaceView}, {@link TextureView} or {@link + * SurfaceHolder} then it's recommended to use {@link #setVideoSurfaceView(SurfaceView)}, {@link + * #setVideoTextureView(TextureView)} or {@link #setVideoSurfaceHolder(SurfaceHolder)} rather than + * this method, since passing the holder allows the player to track the lifecycle of the surface + * automatically. + * + *

      This method must only be called if {@link #COMMAND_SET_VIDEO_SURFACE} is {@linkplain + * #getAvailableCommands() available}. + * + * @param surface The {@link Surface}. + */ + void setVideoSurface(@Nullable Surface surface); + + /** + * Sets the {@link SurfaceHolder} that holds the {@link Surface} onto which video will be + * rendered. The player will track the lifecycle of the surface automatically. + * + *

      The thread that calls the {@link SurfaceHolder.Callback} methods must be the thread + * associated with {@link #getApplicationLooper()}. + * + *

      This method must only be called if {@link #COMMAND_SET_VIDEO_SURFACE} is {@linkplain + * #getAvailableCommands() available}. + * + * @param surfaceHolder The surface holder. + */ + void setVideoSurfaceHolder(@Nullable SurfaceHolder surfaceHolder); + + /** + * Clears the {@link SurfaceHolder} that holds the {@link Surface} onto which video is being + * rendered if it matches the one passed. Else does nothing. + * + *

      This method must only be called if {@link #COMMAND_SET_VIDEO_SURFACE} is {@linkplain + * #getAvailableCommands() available}. + * + * @param surfaceHolder The surface holder to clear. + */ + void clearVideoSurfaceHolder(@Nullable SurfaceHolder surfaceHolder); + + /** + * Sets the {@link SurfaceView} onto which video will be rendered. The player will track the + * lifecycle of the surface automatically. + * + *

      The thread that calls the {@link SurfaceHolder.Callback} methods must be the thread + * associated with {@link #getApplicationLooper()}. + * + *

      This method must only be called if {@link #COMMAND_SET_VIDEO_SURFACE} is {@linkplain + * #getAvailableCommands() available}. + * + * @param surfaceView The surface view. + */ + void setVideoSurfaceView(@Nullable SurfaceView surfaceView); + + /** + * Clears the {@link SurfaceView} onto which video is being rendered if it matches the one passed. + * Else does nothing. + * + *

      This method must only be called if {@link #COMMAND_SET_VIDEO_SURFACE} is {@linkplain + * #getAvailableCommands() available}. + * + * @param surfaceView The texture view to clear. + */ + void clearVideoSurfaceView(@Nullable SurfaceView surfaceView); + + /** + * Sets the {@link TextureView} onto which video will be rendered. The player will track the + * lifecycle of the surface automatically. + * + *

      The thread that calls the {@link TextureView.SurfaceTextureListener} methods must be the + * thread associated with {@link #getApplicationLooper()}. + * + *

      This method must only be called if {@link #COMMAND_SET_VIDEO_SURFACE} is {@linkplain + * #getAvailableCommands() available}. + * + * @param textureView The texture view. + */ + void setVideoTextureView(@Nullable TextureView textureView); + + /** + * Clears the {@link TextureView} onto which video is being rendered if it matches the one passed. + * Else does nothing. + * + *

      This method must only be called if {@link #COMMAND_SET_VIDEO_SURFACE} is {@linkplain + * #getAvailableCommands() available}. + * + * @param textureView The texture view to clear. + */ + void clearVideoTextureView(@Nullable TextureView textureView); + + /** + * Gets the size of the video. + * + *

      The video's width and height are {@code 0} if there is no video or its size has not been + * determined yet. + * + * @see Listener#onVideoSizeChanged(VideoSize) + */ + VideoSize getVideoSize(); + + /** + * Gets the size of the surface on which the video is rendered. + * + * @see Listener#onSurfaceSizeChanged(int, int) + */ + Size getSurfaceSize(); + + /** + * Returns the current {@link CueGroup}. + * + *

      This method must only be called if {@link #COMMAND_GET_TEXT} is {@linkplain + * #getAvailableCommands() available}. + */ + CueGroup getCurrentCues(); + + /** Gets the device information. */ + DeviceInfo getDeviceInfo(); + + /** + * Gets the current volume of the device. + * + *

      For devices with {@link DeviceInfo#PLAYBACK_TYPE_LOCAL local playback}, the volume returned + * by this method varies according to the current {@link C.StreamType stream type}. The stream + * type is determined by {@link AudioAttributes#usage} which can be converted to stream type with + * {@link Util#getStreamTypeForAudioUsage(int)}. + * + *

      For devices with {@link DeviceInfo#PLAYBACK_TYPE_REMOTE remote playback}, the volume of the + * remote device is returned. + * + *

      This method must only be called if {@link #COMMAND_GET_DEVICE_VOLUME} is {@linkplain + * #getAvailableCommands() available}. + */ + @IntRange(from = 0) + int getDeviceVolume(); + + /** + * Gets whether the device is muted or not. + * + *

      This method must only be called if {@link #COMMAND_GET_DEVICE_VOLUME} is {@linkplain + * #getAvailableCommands() available}. + */ + boolean isDeviceMuted(); + + /** + * Sets the volume of the device. + * + *

      This method must only be called if {@link #COMMAND_SET_DEVICE_VOLUME} is {@linkplain + * #getAvailableCommands() available}. + * + * @param volume The volume to set. + */ + void setDeviceVolume(@IntRange(from = 0) int volume); + + /** + * Increases the volume of the device. + * + *

      This method must only be called if {@link #COMMAND_ADJUST_DEVICE_VOLUME} is {@linkplain + * #getAvailableCommands() available}. + */ + void increaseDeviceVolume(); + + /** + * Decreases the volume of the device. + * + *

      This method must only be called if {@link #COMMAND_ADJUST_DEVICE_VOLUME} is {@linkplain + * #getAvailableCommands() available}. + */ + void decreaseDeviceVolume(); + + /** + * Sets the mute state of the device. + * + *

      This method must only be called if {@link #COMMAND_ADJUST_DEVICE_VOLUME} is {@linkplain + * #getAvailableCommands() available}. + */ + void setDeviceMuted(boolean muted); + + ArrayList videoListeners = new ArrayList<>(); + + default void addVideoListener(com.google.android.exoplayer2.video.VideoListener listener) { + videoListeners.add(listener); + } + + default void removeVideoListener(com.google.android.exoplayer2.video.VideoListener listener) { + videoListeners.remove(listener); + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/PlayerMessage.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/PlayerMessage.java index 49309181a0..a3a677b465 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/PlayerMessage.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/PlayerMessage.java @@ -16,8 +16,13 @@ package com.google.android.exoplayer2; import android.os.Handler; +import android.os.Looper; import androidx.annotation.Nullable; +import com.google.android.exoplayer2.Renderer.MessageType; import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.Clock; +import com.google.errorprone.annotations.CanIgnoreReturnValue; +import java.util.concurrent.TimeoutException; /** * Defines a player message which can be sent with a {@link Sender} and received by a {@link @@ -32,11 +37,12 @@ public interface Target { * Handles a message delivered to the target. * * @param messageType The message type. - * @param payload The message payload. + * @param message The message payload. * @throws ExoPlaybackException If an error occurred whilst handling the message. Should only be * thrown by targets that handle messages on the playback thread. */ - void handleMessage(int messageType, @Nullable Object payload) throws ExoPlaybackException; + void handleMessage(@MessageType int messageType, @Nullable Object message) + throws ExoPlaybackException; } /** A sender for messages. */ @@ -52,12 +58,13 @@ public interface Sender { private final Target target; private final Sender sender; + private final Clock clock; private final Timeline timeline; private int type; @Nullable private Object payload; - private Handler handler; - private int windowIndex; + private Looper looper; + private int mediaItemIndex; private long positionMs; private boolean deleteAfterDelivery; private boolean isSent; @@ -72,22 +79,25 @@ public interface Sender { * @param target The {@link Target} the message is sent to. * @param timeline The timeline used when setting the position with {@link #setPosition(long)}. If * set to {@link Timeline#EMPTY}, any position can be specified. - * @param defaultWindowIndex The default window index in the {@code timeline} when no other window - * index is specified. - * @param defaultHandler The default handler to send the message on when no other handler is + * @param defaultMediaItemIndex The default media item index in the {@code timeline} when no other + * media item index is specified. + * @param clock The {@link Clock}. + * @param defaultLooper The default {@link Looper} to send the message on when no other looper is * specified. */ public PlayerMessage( Sender sender, Target target, Timeline timeline, - int defaultWindowIndex, - Handler defaultHandler) { + int defaultMediaItemIndex, + Clock clock, + Looper defaultLooper) { this.sender = sender; this.target = target; this.timeline = timeline; - this.handler = defaultHandler; - this.windowIndex = defaultWindowIndex; + this.looper = defaultLooper; + this.clock = clock; + this.mediaItemIndex = defaultMediaItemIndex; this.positionMs = C.TIME_UNSET; this.deleteAfterDelivery = true; } @@ -109,6 +119,7 @@ public Target getTarget() { * @return This message. * @throws IllegalStateException If {@link #send()} has already been called. */ + @CanIgnoreReturnValue public PlayerMessage setType(int messageType) { Assertions.checkState(!isSent); this.type = messageType; @@ -127,6 +138,7 @@ public int getType() { * @return This message. * @throws IllegalStateException If {@link #send()} has already been called. */ + @CanIgnoreReturnValue public PlayerMessage setPayload(@Nullable Object payload) { Assertions.checkState(!isSent); this.payload = payload; @@ -140,39 +152,53 @@ public Object getPayload() { } /** - * Sets the handler the message is delivered on. + * @deprecated Use {@link #setLooper(Looper)} instead. + */ + @CanIgnoreReturnValue + @Deprecated + public PlayerMessage setHandler(Handler handler) { + return setLooper(handler.getLooper()); + } + + /** + * Sets the {@link Looper} the message is delivered on. * - * @param handler A {@link Handler}. + * @param looper A {@link Looper}. * @return This message. * @throws IllegalStateException If {@link #send()} has already been called. */ - public PlayerMessage setHandler(Handler handler) { + @CanIgnoreReturnValue + public PlayerMessage setLooper(Looper looper) { Assertions.checkState(!isSent); - this.handler = handler; + this.looper = looper; return this; } - /** Returns the handler the message is delivered on. */ - public Handler getHandler() { - return handler; + /** Returns the {@link Looper} the message is delivered on. */ + public Looper getLooper() { + return looper; } /** - * Returns position in window at {@link #getWindowIndex()} at which the message will be delivered, - * in milliseconds. If {@link C#TIME_UNSET}, the message will be delivered immediately. + * Returns position in the media item at {@link #getMediaItemIndex()} at which the message will be + * delivered, in milliseconds. If {@link C#TIME_UNSET}, the message will be delivered immediately. + * If {@link C#TIME_END_OF_SOURCE}, the message will be delivered at the end of the media item at + * {@link #getMediaItemIndex()}. */ public long getPositionMs() { return positionMs; } /** - * Sets a position in the current window at which the message will be delivered. + * Sets a position in the current media item at which the message will be delivered. * - * @param positionMs The position in the current window at which the message will be sent, in - * milliseconds. + * @param positionMs The position in the current media item at which the message will be sent, in + * milliseconds, or {@link C#TIME_END_OF_SOURCE} to deliver the message at the end of the + * current media item. * @return This message. * @throws IllegalStateException If {@link #send()} has already been called. */ + @CanIgnoreReturnValue public PlayerMessage setPosition(long positionMs) { Assertions.checkState(!isSent); this.positionMs = positionMs; @@ -180,30 +206,33 @@ public PlayerMessage setPosition(long positionMs) { } /** - * Sets a position in a window at which the message will be delivered. + * Sets a position in a media item at which the message will be delivered. * - * @param windowIndex The index of the window at which the message will be sent. - * @param positionMs The position in the window with index {@code windowIndex} at which the - * message will be sent, in milliseconds. + * @param mediaItemIndex The index of the media item at which the message will be sent. + * @param positionMs The position in the media item with index {@code mediaItemIndex} at which the + * message will be sent, in milliseconds, or {@link C#TIME_END_OF_SOURCE} to deliver the + * message at the end of the media item with index {@code mediaItemIndex}. * @return This message. * @throws IllegalSeekPositionException If the timeline returned by {@link #getTimeline()} is not - * empty and the provided window index is not within the bounds of the timeline. + * empty and the provided media item index is not within the bounds of the timeline. * @throws IllegalStateException If {@link #send()} has already been called. */ - public PlayerMessage setPosition(int windowIndex, long positionMs) { + @CanIgnoreReturnValue + public PlayerMessage setPosition(int mediaItemIndex, long positionMs) { Assertions.checkState(!isSent); Assertions.checkArgument(positionMs != C.TIME_UNSET); - if (windowIndex < 0 || (!timeline.isEmpty() && windowIndex >= timeline.getWindowCount())) { - throw new IllegalSeekPositionException(timeline, windowIndex, positionMs); + if (mediaItemIndex < 0 + || (!timeline.isEmpty() && mediaItemIndex >= timeline.getWindowCount())) { + throw new IllegalSeekPositionException(timeline, mediaItemIndex, positionMs); } - this.windowIndex = windowIndex; + this.mediaItemIndex = mediaItemIndex; this.positionMs = positionMs; return this; } - /** Returns window index at which the message will be delivered. */ - public int getWindowIndex() { - return windowIndex; + /** Returns media item index at which the message will be delivered. */ + public int getMediaItemIndex() { + return mediaItemIndex; } /** @@ -215,6 +244,7 @@ public int getWindowIndex() { * @return This message. * @throws IllegalStateException If {@link #send()} has already been called. */ + @CanIgnoreReturnValue public PlayerMessage setDeleteAfterDelivery(boolean deleteAfterDelivery) { Assertions.checkState(!isSent); this.deleteAfterDelivery = deleteAfterDelivery; @@ -228,12 +258,12 @@ public boolean getDeleteAfterDelivery() { /** * Sends the message. If the target throws an {@link ExoPlaybackException} then it is propagated - * out of the player as an error using {@link - * Player.EventListener#onPlayerError(ExoPlaybackException)}. + * out of the player as an error using {@link Player.Listener#onPlayerError(PlaybackException)}. * * @return This message. * @throws IllegalStateException If this message has already been sent. */ + @CanIgnoreReturnValue public PlayerMessage send() { Assertions.checkState(!isSent); if (positionMs == C.TIME_UNSET) { @@ -250,6 +280,7 @@ public PlayerMessage send() { * @return This message. * @throws IllegalStateException If this method is called before {@link #send()}. */ + @CanIgnoreReturnValue public synchronized PlayerMessage cancel() { Assertions.checkState(isSent); isCanceled = true; @@ -262,23 +293,37 @@ public synchronized boolean isCanceled() { return isCanceled; } + /** + * Marks the message as processed. Should only be called by a {@link Sender} and may be called + * multiple times. + * + * @param isDelivered Whether the message has been delivered to its target. The message is + * considered as being delivered when this method has been called with {@code isDelivered} set + * to true at least once. + */ + public synchronized void markAsProcessed(boolean isDelivered) { + this.isDelivered |= isDelivered; + isProcessed = true; + notifyAll(); + } + /** * Blocks until after the message has been delivered or the player is no longer able to deliver * the message. * - *

      Note that this method can't be called if the current thread is the same thread used by the - * message handler set with {@link #setHandler(Handler)} as it would cause a deadlock. + *

      Note that this method must not be called if the current thread is the same thread used by + * the message {@link #getLooper() looper} as it would cause a deadlock. * * @return Whether the message was delivered successfully. * @throws IllegalStateException If this method is called before {@link #send()}. * @throws IllegalStateException If this method is called on the same thread used by the message - * handler set with {@link #setHandler(Handler)}. + * {@link #getLooper() looper}. * @throws InterruptedException If the current thread is interrupted while waiting for the message * to be delivered. */ public synchronized boolean blockUntilDelivered() throws InterruptedException { Assertions.checkState(isSent); - Assertions.checkState(handler.getLooper().getThread() != Thread.currentThread()); + Assertions.checkState(looper.getThread() != Thread.currentThread()); while (!isProcessed) { wait(); } @@ -286,16 +331,37 @@ public synchronized boolean blockUntilDelivered() throws InterruptedException { } /** - * Marks the message as processed. Should only be called by a {@link Sender} and may be called - * multiple times. + * Blocks until after the message has been delivered or the player is no longer able to deliver + * the message or the specified timeout elapsed. * - * @param isDelivered Whether the message has been delivered to its target. The message is - * considered as being delivered when this method has been called with {@code isDelivered} set - * to true at least once. + *

      Note that this method must not be called if the current thread is the same thread used by + * the message {@link #getLooper() looper} as it would cause a deadlock. + * + * @param timeoutMs The timeout in milliseconds. + * @return Whether the message was delivered successfully. + * @throws IllegalStateException If this method is called before {@link #send()}. + * @throws IllegalStateException If this method is called on the same thread used by the message + * {@link #getLooper() looper}. + * @throws TimeoutException If the {@code timeoutMs} elapsed and this message has not been + * delivered and the player is still able to deliver the message. + * @throws InterruptedException If the current thread is interrupted while waiting for the message + * to be delivered. */ - public synchronized void markAsProcessed(boolean isDelivered) { - this.isDelivered |= isDelivered; - isProcessed = true; - notifyAll(); + public synchronized boolean blockUntilDelivered(long timeoutMs) + throws InterruptedException, TimeoutException { + Assertions.checkState(isSent); + Assertions.checkState(looper.getThread() != Thread.currentThread()); + + long deadlineMs = clock.elapsedRealtime() + timeoutMs; + long remainingMs = timeoutMs; + while (!isProcessed && remainingMs > 0) { + clock.onThreadBlocked(); + wait(remainingMs); + remainingMs = deadlineMs - clock.elapsedRealtime(); + } + if (!isProcessed) { + throw new TimeoutException("Message delivery timed out."); + } + return isDelivered; } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/PlaylistTimeline.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/PlaylistTimeline.java new file mode 100644 index 0000000000..3b93041348 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/PlaylistTimeline.java @@ -0,0 +1,113 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2; + +import com.google.android.exoplayer2.source.ShuffleOrder; +import com.google.android.exoplayer2.util.Util; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; + +/** Timeline exposing concatenated timelines of playlist media sources. */ +/* package */ final class PlaylistTimeline extends AbstractConcatenatedTimeline { + + private final int windowCount; + private final int periodCount; + private final int[] firstPeriodInChildIndices; + private final int[] firstWindowInChildIndices; + private final Timeline[] timelines; + private final Object[] uids; + private final HashMap childIndexByUid; + + /** Creates an instance. */ + public PlaylistTimeline( + Collection mediaSourceInfoHolders, + ShuffleOrder shuffleOrder) { + super(/* isAtomic= */ false, shuffleOrder); + int childCount = mediaSourceInfoHolders.size(); + firstPeriodInChildIndices = new int[childCount]; + firstWindowInChildIndices = new int[childCount]; + timelines = new Timeline[childCount]; + uids = new Object[childCount]; + childIndexByUid = new HashMap<>(); + int index = 0; + int windowCount = 0; + int periodCount = 0; + for (MediaSourceInfoHolder mediaSourceInfoHolder : mediaSourceInfoHolders) { + timelines[index] = mediaSourceInfoHolder.getTimeline(); + firstWindowInChildIndices[index] = windowCount; + firstPeriodInChildIndices[index] = periodCount; + windowCount += timelines[index].getWindowCount(); + periodCount += timelines[index].getPeriodCount(); + uids[index] = mediaSourceInfoHolder.getUid(); + childIndexByUid.put(uids[index], index++); + } + this.windowCount = windowCount; + this.periodCount = periodCount; + } + + /** Returns the child timelines. */ + /* package */ List getChildTimelines() { + return Arrays.asList(timelines); + } + + @Override + protected int getChildIndexByPeriodIndex(int periodIndex) { + return Util.binarySearchFloor(firstPeriodInChildIndices, periodIndex + 1, false, false); + } + + @Override + protected int getChildIndexByWindowIndex(int windowIndex) { + return Util.binarySearchFloor(firstWindowInChildIndices, windowIndex + 1, false, false); + } + + @Override + protected int getChildIndexByChildUid(Object childUid) { + Integer index = childIndexByUid.get(childUid); + return index == null ? C.INDEX_UNSET : index; + } + + @Override + protected Timeline getTimelineByChildIndex(int childIndex) { + return timelines[childIndex]; + } + + @Override + protected int getFirstPeriodIndexByChildIndex(int childIndex) { + return firstPeriodInChildIndices[childIndex]; + } + + @Override + protected int getFirstWindowIndexByChildIndex(int childIndex) { + return firstWindowInChildIndices[childIndex]; + } + + @Override + protected Object getChildUidByChildIndex(int childIndex) { + return uids[childIndex]; + } + + @Override + public int getWindowCount() { + return windowCount; + } + + @Override + public int getPeriodCount() { + return periodCount; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/Rating.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/Rating.java new file mode 100644 index 0000000000..1962db64c0 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/Rating.java @@ -0,0 +1,85 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2; + +import static java.lang.annotation.ElementType.TYPE_USE; + +import android.os.Bundle; +import androidx.annotation.IntDef; +import com.google.android.exoplayer2.util.Util; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * A rating for media content. The style of a rating can be one of {@link HeartRating}, {@link + * PercentageRating}, {@link StarRating}, or {@link ThumbRating}. + */ +public abstract class Rating implements Bundleable { + + /** A float value that denotes the rating is unset. */ + /* package */ static final float RATING_UNSET = -1.0f; + + // Default package-private constructor to prevent extending Rating class outside this package. + /* package */ Rating() {} + + /** Whether the rating exists or not. */ + public abstract boolean isRated(); + + // Bundleable implementation. + + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef({ + RATING_TYPE_UNSET, + RATING_TYPE_HEART, + RATING_TYPE_PERCENTAGE, + RATING_TYPE_STAR, + RATING_TYPE_THUMB + }) + /* package */ @interface RatingType {} + + /* package */ static final int RATING_TYPE_UNSET = -1; + /* package */ static final int RATING_TYPE_HEART = 0; + /* package */ static final int RATING_TYPE_PERCENTAGE = 1; + /* package */ static final int RATING_TYPE_STAR = 2; + /* package */ static final int RATING_TYPE_THUMB = 3; + + /* package */ static final String FIELD_RATING_TYPE = Util.intToStringMaxRadix(0); + + /** Object that can restore a {@link Rating} from a {@link Bundle}. */ + public static final Creator CREATOR = Rating::fromBundle; + + private static Rating fromBundle(Bundle bundle) { + @RatingType + int ratingType = bundle.getInt(FIELD_RATING_TYPE, /* defaultValue= */ RATING_TYPE_UNSET); + switch (ratingType) { + case RATING_TYPE_HEART: + return HeartRating.CREATOR.fromBundle(bundle); + case RATING_TYPE_PERCENTAGE: + return PercentageRating.CREATOR.fromBundle(bundle); + case RATING_TYPE_STAR: + return StarRating.CREATOR.fromBundle(bundle); + case RATING_TYPE_THUMB: + return ThumbRating.CREATOR.fromBundle(bundle); + case RATING_TYPE_UNSET: + default: + throw new IllegalArgumentException("Unknown RatingType: " + ratingType); + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/Renderer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/Renderer.java index b699162e2f..f8ffe47ef3 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/Renderer.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/Renderer.java @@ -15,14 +15,26 @@ */ package com.google.android.exoplayer2; +import static java.lang.annotation.ElementType.TYPE_USE; + +import android.media.MediaCodec; +import android.view.Surface; import androidx.annotation.IntDef; import androidx.annotation.Nullable; +import com.google.android.exoplayer2.analytics.PlayerId; +import com.google.android.exoplayer2.audio.AudioAttributes; +import com.google.android.exoplayer2.audio.AuxEffectInfo; import com.google.android.exoplayer2.source.SampleStream; import com.google.android.exoplayer2.util.MediaClock; +import com.google.android.exoplayer2.util.Util; +import com.google.android.exoplayer2.video.VideoDecoderOutputBufferRenderer; +import com.google.android.exoplayer2.video.VideoFrameMetadataListener; +import com.google.android.exoplayer2.video.spherical.CameraMotionListener; import java.io.IOException; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; /** * Renders media read from a {@link SampleStream}. @@ -37,18 +49,177 @@ */ public interface Renderer extends PlayerMessage.Target { + /** + * Some renderers can signal when {@link #render(long, long)} should be called. + * + *

      That allows the player to sleep until the next wakeup, instead of calling {@link + * #render(long, long)} in a tight loop. The aim of this interrupt based scheduling is to save + * power. + */ + interface WakeupListener { + + /** + * The renderer no longer needs to render until the next wakeup. + * + *

      Must be called from the thread ExoPlayer invokes the renderer from. + */ + void onSleep(); + + /** + * The renderer needs to render some frames. The client should call {@link #render(long, long)} + * at its earliest convenience. + * + *

      Can be called from any thread. + */ + void onWakeup(); + } + + /** + * Represents a type of message that can be passed to a renderer. May be one of {@link + * #MSG_SET_VIDEO_OUTPUT}, {@link #MSG_SET_VOLUME}, {@link #MSG_SET_AUDIO_ATTRIBUTES}, {@link + * #MSG_SET_SCALING_MODE}, {@link #MSG_SET_CHANGE_FRAME_RATE_STRATEGY}, {@link + * #MSG_SET_AUX_EFFECT_INFO}, {@link #MSG_SET_VIDEO_FRAME_METADATA_LISTENER}, {@link + * #MSG_SET_CAMERA_MOTION_LISTENER}, {@link #MSG_SET_SKIP_SILENCE_ENABLED}, {@link + * #MSG_SET_AUDIO_SESSION_ID} or {@link #MSG_SET_WAKEUP_LISTENER}. May also be an app-defined + * value (see {@link #MSG_CUSTOM_BASE}). + */ + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef( + open = true, + value = { + MSG_SET_VIDEO_OUTPUT, + MSG_SET_VOLUME, + MSG_SET_AUDIO_ATTRIBUTES, + MSG_SET_SCALING_MODE, + MSG_SET_CHANGE_FRAME_RATE_STRATEGY, + MSG_SET_AUX_EFFECT_INFO, + MSG_SET_VIDEO_FRAME_METADATA_LISTENER, + MSG_SET_CAMERA_MOTION_LISTENER, + MSG_SET_SKIP_SILENCE_ENABLED, + MSG_SET_AUDIO_SESSION_ID, + MSG_SET_WAKEUP_LISTENER + }) + public @interface MessageType {} + /** + * The type of a message that can be passed to a video renderer via {@link + * ExoPlayer#createMessage(PlayerMessage.Target)}. The message payload is normally a {@link + * Surface}, however some video renderers may accept other outputs (e.g., {@link + * VideoDecoderOutputBufferRenderer}). + * + *

      If the receiving renderer does not support the payload type as an output, then it will clear + * any existing output that it has. + */ + int MSG_SET_VIDEO_OUTPUT = 1; + /** + * A type of a message that can be passed to an audio renderer via {@link + * ExoPlayer#createMessage(PlayerMessage.Target)}. The message payload should be a {@link Float} + * with 0 being silence and 1 being unity gain. + */ + int MSG_SET_VOLUME = 2; + /** + * A type of a message that can be passed to an audio renderer via {@link + * ExoPlayer#createMessage(PlayerMessage.Target)}. The message payload should be an {@link + * AudioAttributes} instance that will configure the underlying audio track. If not set, the + * default audio attributes will be used. They are suitable for general media playback. + * + *

      Setting the audio attributes during playback may introduce a short gap in audio output as + * the audio track is recreated. A new audio session id will also be generated. + * + *

      If tunneling is enabled by the track selector, the specified audio attributes will be + * ignored, but they will take effect if audio is later played without tunneling. + * + *

      If the device is running a build before platform API version 21, audio attributes cannot be + * set directly on the underlying audio track. In this case, the usage will be mapped onto an + * equivalent stream type using {@link Util#getStreamTypeForAudioUsage(int)}. + * + *

      To get audio attributes that are equivalent to a legacy stream type, pass the stream type to + * {@link Util#getAudioUsageForStreamType(int)} and use the returned {@link C.AudioUsage} to build + * an audio attributes instance. + */ + int MSG_SET_AUDIO_ATTRIBUTES = 3; + /** + * The type of a message that can be passed to a {@link MediaCodec}-based video renderer via + * {@link ExoPlayer#createMessage(PlayerMessage.Target)}. The message payload should be one of the + * integer scaling modes in {@link C.VideoScalingMode}. + * + *

      Note that the scaling mode only applies if the {@link Surface} targeted by the renderer is + * owned by a {@link android.view.SurfaceView}. + */ + int MSG_SET_SCALING_MODE = 4; + /** + * The type of a message that can be passed to a video renderer via {@link + * ExoPlayer#createMessage(PlayerMessage.Target)}. The message payload should be one of the + * integer strategy constants in {@link C.VideoChangeFrameRateStrategy}. + */ + int MSG_SET_CHANGE_FRAME_RATE_STRATEGY = 5; + /** + * A type of a message that can be passed to an audio renderer via {@link + * ExoPlayer#createMessage(PlayerMessage.Target)}. The message payload should be an {@link + * AuxEffectInfo} instance representing an auxiliary audio effect for the underlying audio track. + */ + int MSG_SET_AUX_EFFECT_INFO = 6; + /** + * The type of a message that can be passed to a video renderer via {@link + * ExoPlayer#createMessage(PlayerMessage.Target)}. The message payload should be a {@link + * VideoFrameMetadataListener} instance, or null. + */ + int MSG_SET_VIDEO_FRAME_METADATA_LISTENER = 7; + /** + * The type of a message that can be passed to a camera motion renderer via {@link + * ExoPlayer#createMessage(PlayerMessage.Target)}. The message payload should be a {@link + * CameraMotionListener} instance, or null. + */ + int MSG_SET_CAMERA_MOTION_LISTENER = 8; + /** + * The type of a message that can be passed to an audio renderer via {@link + * ExoPlayer#createMessage(PlayerMessage.Target)}. The message payload should be a {@link Boolean} + * instance telling whether to enable or disable skipping silences in the audio stream. + */ + int MSG_SET_SKIP_SILENCE_ENABLED = 9; + /** + * The type of a message that can be passed to audio and video renderers via {@link + * ExoPlayer#createMessage(PlayerMessage.Target)}. The message payload should be an {@link + * Integer} instance representing the audio session ID that will be attached to the underlying + * audio track. Video renderers that support tunneling will use the audio session ID when + * tunneling is enabled. + */ + int MSG_SET_AUDIO_SESSION_ID = 10; + /** + * The type of a message that can be passed to a {@link Renderer} via {@link + * ExoPlayer#createMessage(PlayerMessage.Target)}, to inform the renderer that it can schedule + * waking up another component. + * + *

      The message payload must be a {@link WakeupListener} instance. + */ + int MSG_SET_WAKEUP_LISTENER = 11; + /** + * The type of a message that can be passed to audio renderers via {@link + * ExoPlayer#createMessage(PlayerMessage.Target)}. The message payload should be an {@link + * android.media.AudioDeviceInfo} instance representing the preferred audio device, or null to + * restore the default. + */ + int MSG_SET_PREFERRED_AUDIO_DEVICE = 12; + /** + * Applications or extensions may define custom {@code MSG_*} constants that can be passed to + * renderers. These custom constants must be greater than or equal to this value. + */ + int MSG_CUSTOM_BASE = 10000; + /** * The renderer states. One of {@link #STATE_DISABLED}, {@link #STATE_ENABLED} or {@link * #STATE_STARTED}. */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({STATE_DISABLED, STATE_ENABLED, STATE_STARTED}) @interface State {} /** - * The renderer is disabled. A renderer in this state may hold resources that it requires for - * rendering (e.g. media decoders), for use if it's subsequently enabled. {@link #reset()} can be - * called to force the renderer to release these resources. + * The renderer is disabled. A renderer in this state will not proactively acquire resources that + * it requires for rendering (e.g., media decoders), but may continue to hold any that it already + * has. {@link #reset()} can be called to force the renderer to release such resources. */ int STATE_DISABLED = 0; /** @@ -64,12 +235,20 @@ public interface Renderer extends PlayerMessage.Target { int STATE_STARTED = 2; /** - * Returns the track type that the {@link Renderer} handles. For example, a video renderer will - * return {@link C#TRACK_TYPE_VIDEO}, an audio renderer will return {@link C#TRACK_TYPE_AUDIO}, a - * text renderer will return {@link C#TRACK_TYPE_TEXT}, and so on. + * Returns the name of this renderer, for logging and debugging purposes. Should typically be the + * renderer's (un-obfuscated) class name. + * + * @return The name of this renderer. + */ + String getName(); + + /** + * Returns the track type that the renderer handles. * - * @return One of the {@code TRACK_TYPE_*} constants defined in {@link C}. + * @see ExoPlayer#getRendererType(int) + * @return The {@link C.TrackType track type}. */ + @C.TrackType int getTrackType(); /** @@ -80,11 +259,12 @@ public interface Renderer extends PlayerMessage.Target { RendererCapabilities getCapabilities(); /** - * Sets the index of this renderer within the player. + * Initializes the renderer for playback with a player. * - * @param index The renderer index. + * @param index The renderer index within the player. + * @param playerId The {@link PlayerId} of the player. */ - void setIndex(int index); + void init(int index, PlayerId playerId); /** * If the renderer advances its own playback position then this method returns a corresponding @@ -108,28 +288,39 @@ public interface Renderer extends PlayerMessage.Target { /** * Enables the renderer to consume from the specified {@link SampleStream}. - *

      - * This method may be called when the renderer is in the following states: - * {@link #STATE_DISABLED}. + * + *

      This method may be called when the renderer is in the following states: {@link + * #STATE_DISABLED}. * * @param configuration The renderer configuration. * @param formats The enabled formats. * @param stream The {@link SampleStream} from which the renderer should consume. * @param positionUs The player's current position. * @param joining Whether this renderer is being enabled to join an ongoing playback. - * @param offsetUs The offset to be added to timestamps of buffers read from {@code stream} - * before they are rendered. + * @param mayRenderStartOfStream Whether this renderer is allowed to render the start of the + * stream even if the state is not {@link #STATE_STARTED} yet. + * @param startPositionUs The start position of the stream in renderer time (microseconds). + * @param offsetUs The offset to be added to timestamps of buffers read from {@code stream} before + * they are rendered. * @throws ExoPlaybackException If an error occurs. */ - void enable(RendererConfiguration configuration, Format[] formats, SampleStream stream, - long positionUs, boolean joining, long offsetUs) throws ExoPlaybackException; + void enable( + RendererConfiguration configuration, + Format[] formats, + SampleStream stream, + long positionUs, + boolean joining, + boolean mayRenderStartOfStream, + long startPositionUs, + long offsetUs) + throws ExoPlaybackException; /** * Starts the renderer, meaning that calls to {@link #render(long, long)} will cause media to be * rendered. - *

      - * This method may be called when the renderer is in the following states: - * {@link #STATE_ENABLED}. + * + *

      This method may be called when the renderer is in the following states: {@link + * #STATE_ENABLED}. * * @throws ExoPlaybackException If an error occurs. */ @@ -137,17 +328,18 @@ void enable(RendererConfiguration configuration, Format[] formats, SampleStream /** * Replaces the {@link SampleStream} from which samples will be consumed. - *

      - * This method may be called when the renderer is in the following states: - * {@link #STATE_ENABLED}, {@link #STATE_STARTED}. + * + *

      This method may be called when the renderer is in the following states: {@link + * #STATE_ENABLED}, {@link #STATE_STARTED}. * * @param formats The enabled formats. * @param stream The {@link SampleStream} from which the renderer should consume. + * @param startPositionUs The start position of the new stream in renderer time (microseconds). * @param offsetUs The offset to be added to timestamps of buffers read from {@code stream} before * they are rendered. * @throws ExoPlaybackException If an error occurs. */ - void replaceStream(Format[] formats, SampleStream stream, long offsetUs) + void replaceStream(Format[] formats, SampleStream stream, long startPositionUs, long offsetUs) throws ExoPlaybackException; /** Returns the {@link SampleStream} being consumed, or null if the renderer is disabled. */ @@ -156,16 +348,15 @@ void replaceStream(Format[] formats, SampleStream stream, long offsetUs) /** * Returns whether the renderer has read the current {@link SampleStream} to the end. - *

      - * This method may be called when the renderer is in the following states: - * {@link #STATE_ENABLED}, {@link #STATE_STARTED}. + * + *

      This method may be called when the renderer is in the following states: {@link + * #STATE_ENABLED}, {@link #STATE_STARTED}. */ boolean hasReadStreamToEnd(); /** - * Returns the playback position up to which the renderer has read samples from the current {@link - * SampleStream}, in microseconds, or {@link C#TIME_END_OF_SOURCE} if the renderer has read the - * current {@link SampleStream} to the end. + * Returns the renderer time up to which the renderer has read samples, in microseconds, or {@link + * C#TIME_END_OF_SOURCE} if the renderer has read the current {@link SampleStream} to the end. * *

      This method may be called when the renderer is in the following states: {@link * #STATE_ENABLED}, {@link #STATE_STARTED}. @@ -175,9 +366,9 @@ void replaceStream(Format[] formats, SampleStream stream, long offsetUs) /** * Signals to the renderer that the current {@link SampleStream} will be the final one supplied * before it is next disabled or reset. - *

      - * This method may be called when the renderer is in the following states: - * {@link #STATE_ENABLED}, {@link #STATE_STARTED}. + * + *

      This method may be called when the renderer is in the following states: {@link + * #STATE_ENABLED}, {@link #STATE_STARTED}. */ void setCurrentStreamFinal(); @@ -190,9 +381,9 @@ void replaceStream(Format[] formats, SampleStream stream, long offsetUs) /** * Throws an error that's preventing the renderer from reading from its {@link SampleStream}. Does * nothing if no such error exists. - *

      - * This method may be called when the renderer is in the following states: - * {@link #STATE_ENABLED}, {@link #STATE_STARTED}. + * + *

      This method may be called when the renderer is in the following states: {@link + * #STATE_ENABLED}, {@link #STATE_STARTED}. * * @throws IOException An error that's preventing the renderer from making progress or buffering * more data. @@ -201,12 +392,12 @@ void replaceStream(Format[] formats, SampleStream stream, long offsetUs) /** * Signals to the renderer that a position discontinuity has occurred. - *

      - * After a position discontinuity, the renderer's {@link SampleStream} is guaranteed to provide + * + *

      After a position discontinuity, the renderer's {@link SampleStream} is guaranteed to provide * samples starting from a key frame. - *

      - * This method may be called when the renderer is in the following states: - * {@link #STATE_ENABLED}, {@link #STATE_STARTED}. + * + *

      This method may be called when the renderer is in the following states: {@link + * #STATE_ENABLED}, {@link #STATE_STARTED}. * * @param positionUs The new playback position in microseconds. * @throws ExoPlaybackException If an error occurs handling the reset. @@ -214,34 +405,41 @@ void replaceStream(Format[] formats, SampleStream stream, long offsetUs) void resetPosition(long positionUs) throws ExoPlaybackException; /** - * Sets the operating rate of this renderer, where 1 is the default rate, 2 is twice the default - * rate, 0.5 is half the default rate and so on. The operating rate is a hint to the renderer of - * the speed at which playback will proceed, and may be used for resource planning. + * Indicates the playback speed to this renderer. * *

      The default implementation is a no-op. * - * @param operatingRate The operating rate. - * @throws ExoPlaybackException If an error occurs handling the operating rate. + * @param currentPlaybackSpeed The factor by which playback is currently sped up. + * @param targetPlaybackSpeed The target factor by which playback should be sped up. This may be + * different from {@code currentPlaybackSpeed}, for example, if the speed is temporarily + * adjusted for live playback. + * @throws ExoPlaybackException If an error occurs handling the playback speed. */ - default void setOperatingRate(float operatingRate) throws ExoPlaybackException {} + default void setPlaybackSpeed(float currentPlaybackSpeed, float targetPlaybackSpeed) + throws ExoPlaybackException {} /** * Incrementally renders the {@link SampleStream}. - *

      - * If the renderer is in the {@link #STATE_ENABLED} state then each call to this method will do - * work toward being ready to render the {@link SampleStream} when the renderer is started. It may - * also render the very start of the media, for example the first frame of a video stream. If the + * + *

      If the renderer is in the {@link #STATE_ENABLED} state then each call to this method will do + * work toward being ready to render the {@link SampleStream} when the renderer is started. If the * renderer is in the {@link #STATE_STARTED} state then calls to this method will render the * {@link SampleStream} in sync with the specified media positions. - *

      - * This method should return quickly, and should not block if the renderer is unable to make + * + *

      The renderer may also render the very start of the media at the current position (e.g. the + * first frame of a video stream) while still in the {@link #STATE_ENABLED} state, unless it's the + * initial start of the media after calling {@link #enable(RendererConfiguration, Format[], + * SampleStream, long, boolean, boolean, long, long)} with {@code mayRenderStartOfStream} set to + * {@code false}. + * + *

      This method should return quickly, and should not block if the renderer is unable to make * useful progress. - *

      - * This method may be called when the renderer is in the following states: - * {@link #STATE_ENABLED}, {@link #STATE_STARTED}. * - * @param positionUs The current media time in microseconds, measured at the start of the - * current iteration of the rendering loop. + *

      This method may be called when the renderer is in the following states: {@link + * #STATE_ENABLED}, {@link #STATE_STARTED}. + * + * @param positionUs The current media time in microseconds, measured at the start of the current + * iteration of the rendering loop. * @param elapsedRealtimeUs {@link android.os.SystemClock#elapsedRealtime()} in microseconds, * measured at the start of the current iteration of the rendering loop. * @throws ExoPlaybackException If an error occurs. @@ -250,28 +448,28 @@ default void setOperatingRate(float operatingRate) throws ExoPlaybackException { /** * Whether the renderer is able to immediately render media from the current position. - *

      - * If the renderer is in the {@link #STATE_STARTED} state then returning true indicates that the - * renderer has everything that it needs to continue playback. Returning false indicates that + * + *

      If the renderer is in the {@link #STATE_STARTED} state then returning true indicates that + * the renderer has everything that it needs to continue playback. Returning false indicates that * the player should pause until the renderer is ready. - *

      - * If the renderer is in the {@link #STATE_ENABLED} state then returning true indicates that the - * renderer is ready for playback to be started. Returning false indicates that it is not. - *

      - * This method may be called when the renderer is in the following states: - * {@link #STATE_ENABLED}, {@link #STATE_STARTED}. + * + *

      If the renderer is in the {@link #STATE_ENABLED} state then returning true indicates that + * the renderer is ready for playback to be started. Returning false indicates that it is not. + * + *

      This method may be called when the renderer is in the following states: {@link + * #STATE_ENABLED}, {@link #STATE_STARTED}. * * @return Whether the renderer is ready to render media. */ boolean isReady(); /** - * Whether the renderer is ready for the {@link ExoPlayer} instance to transition to - * {@link Player#STATE_ENDED}. The player will make this transition as soon as {@code true} is - * returned by all of its {@link Renderer}s. - *

      - * This method may be called when the renderer is in the following states: - * {@link #STATE_ENABLED}, {@link #STATE_STARTED}. + * Whether the renderer is ready for the {@link ExoPlayer} instance to transition to {@link + * Player#STATE_ENDED}. The player will make this transition as soon as {@code true} is returned + * by all of its renderers. + * + *

      This method may be called when the renderer is in the following states: {@link + * #STATE_ENABLED}, {@link #STATE_STARTED}. * * @return Whether the renderer is ready for the player to transition to the ended state. */ @@ -279,19 +477,17 @@ default void setOperatingRate(float operatingRate) throws ExoPlaybackException { /** * Stops the renderer, transitioning it to the {@link #STATE_ENABLED} state. - *

      - * This method may be called when the renderer is in the following states: - * {@link #STATE_STARTED}. * - * @throws ExoPlaybackException If an error occurs. + *

      This method may be called when the renderer is in the following states: {@link + * #STATE_STARTED}. */ - void stop() throws ExoPlaybackException; + void stop(); /** * Disable the renderer, transitioning it to the {@link #STATE_DISABLED} state. - *

      - * This method may be called when the renderer is in the following states: - * {@link #STATE_ENABLED}. + * + *

      This method may be called when the renderer is in the following states: {@link + * #STATE_ENABLED}. */ void disable(); diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/RendererCapabilities.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/RendererCapabilities.java index a75765262b..f409cfbd07 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/RendererCapabilities.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/RendererCapabilities.java @@ -15,25 +15,25 @@ */ package com.google.android.exoplayer2; +import static java.lang.annotation.ElementType.TYPE_USE; + import android.annotation.SuppressLint; import androidx.annotation.IntDef; -import com.google.android.exoplayer2.util.MimeTypes; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; -/** - * Defines the capabilities of a {@link Renderer}. - */ +/** Defines the capabilities of a {@link Renderer}. */ public interface RendererCapabilities { /** - * Level of renderer support for a format. One of {@link #FORMAT_HANDLED}, {@link - * #FORMAT_EXCEEDS_CAPABILITIES}, {@link #FORMAT_UNSUPPORTED_DRM}, {@link - * #FORMAT_UNSUPPORTED_SUBTYPE} or {@link #FORMAT_UNSUPPORTED_TYPE}. + * @deprecated Use {@link C.FormatSupport} instead. */ + @SuppressWarnings("deprecation") @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({ FORMAT_HANDLED, FORMAT_EXCEEDS_CAPABILITIES, @@ -41,52 +41,30 @@ public interface RendererCapabilities { FORMAT_UNSUPPORTED_SUBTYPE, FORMAT_UNSUPPORTED_TYPE }) + @Deprecated @interface FormatSupport {} - - /** A mask to apply to {@link Capabilities} to obtain the {@link FormatSupport} only. */ + /** A mask to apply to {@link Capabilities} to obtain the {@link C.FormatSupport} only. */ int FORMAT_SUPPORT_MASK = 0b111; /** - * The {@link Renderer} is capable of rendering the format. + * @deprecated Use {@link C#FORMAT_HANDLED} instead. */ - int FORMAT_HANDLED = 0b100; + @Deprecated int FORMAT_HANDLED = C.FORMAT_HANDLED; /** - * The {@link Renderer} is capable of rendering formats with the same mime type, but the - * properties of the format exceed the renderer's capabilities. There is a chance the renderer - * will be able to play the format in practice because some renderers report their capabilities - * conservatively, but the expected outcome is that playback will fail. - *

      - * Example: The {@link Renderer} is capable of rendering H264 and the format's mime type is - * {@link MimeTypes#VIDEO_H264}, but the format's resolution exceeds the maximum limit supported - * by the underlying H264 decoder. + * @deprecated Use {@link C#FORMAT_EXCEEDS_CAPABILITIES} instead. */ - int FORMAT_EXCEEDS_CAPABILITIES = 0b011; + @Deprecated int FORMAT_EXCEEDS_CAPABILITIES = C.FORMAT_EXCEEDS_CAPABILITIES; /** - * The {@link Renderer} is capable of rendering formats with the same mime type, but is not - * capable of rendering the format because the format's drm protection is not supported. - *

      - * Example: The {@link Renderer} is capable of rendering H264 and the format's mime type is - * {@link MimeTypes#VIDEO_H264}, but the format indicates PlayReady drm protection where-as the - * renderer only supports Widevine. + * @deprecated Use {@link C#FORMAT_UNSUPPORTED_DRM} instead. */ - int FORMAT_UNSUPPORTED_DRM = 0b010; + @Deprecated int FORMAT_UNSUPPORTED_DRM = C.FORMAT_UNSUPPORTED_DRM; /** - * The {@link Renderer} is a general purpose renderer for formats of the same top-level type, - * but is not capable of rendering the format or any other format with the same mime type because - * the sub-type is not supported. - *

      - * Example: The {@link Renderer} is a general purpose audio renderer and the format's - * mime type matches audio/[subtype], but there does not exist a suitable decoder for [subtype]. + * @deprecated Use {@link C#FORMAT_UNSUPPORTED_SUBTYPE} instead. */ - int FORMAT_UNSUPPORTED_SUBTYPE = 0b001; + @Deprecated int FORMAT_UNSUPPORTED_SUBTYPE = C.FORMAT_UNSUPPORTED_SUBTYPE; /** - * The {@link Renderer} is not capable of rendering the format, either because it does not - * support the format's top-level type, or because it's a specialized renderer for a different - * mime type. - *

      - * Example: The {@link Renderer} is a general purpose video renderer, but the format has an - * audio mime type. + * @deprecated Use {@link C#FORMAT_UNSUPPORTED_TYPE} instead. */ - int FORMAT_UNSUPPORTED_TYPE = 0b000; + @Deprecated int FORMAT_UNSUPPORTED_TYPE = C.FORMAT_UNSUPPORTED_TYPE; /** * Level of renderer support for adaptive format switches. One of {@link #ADAPTIVE_SEAMLESS}, @@ -94,24 +72,21 @@ public interface RendererCapabilities { */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({ADAPTIVE_SEAMLESS, ADAPTIVE_NOT_SEAMLESS, ADAPTIVE_NOT_SUPPORTED}) @interface AdaptiveSupport {} /** A mask to apply to {@link Capabilities} to obtain the {@link AdaptiveSupport} only. */ - int ADAPTIVE_SUPPORT_MASK = 0b11000; - /** - * The {@link Renderer} can seamlessly adapt between formats. - */ - int ADAPTIVE_SEAMLESS = 0b10000; + int ADAPTIVE_SUPPORT_MASK = 0b11 << 3; + /** The {@link Renderer} can seamlessly adapt between formats. */ + int ADAPTIVE_SEAMLESS = 0b10 << 3; /** * The {@link Renderer} can adapt between formats, but may suffer a brief discontinuity * (~50-100ms) when adaptation occurs. */ - int ADAPTIVE_NOT_SEAMLESS = 0b01000; - /** - * The {@link Renderer} does not support adaptation between formats. - */ - int ADAPTIVE_NOT_SUPPORTED = 0b00000; + int ADAPTIVE_NOT_SEAMLESS = 0b01 << 3; + /** The {@link Renderer} does not support adaptation between formats. */ + int ADAPTIVE_NOT_SUPPORTED = 0; /** * Level of renderer support for tunneling. One of {@link #TUNNELING_SUPPORTED} or {@link @@ -119,95 +94,177 @@ public interface RendererCapabilities { */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({TUNNELING_SUPPORTED, TUNNELING_NOT_SUPPORTED}) @interface TunnelingSupport {} - /** A mask to apply to {@link Capabilities} to obtain the {@link TunnelingSupport} only. */ - int TUNNELING_SUPPORT_MASK = 0b100000; + /** A mask to apply to {@link Capabilities} to obtain {@link TunnelingSupport} only. */ + int TUNNELING_SUPPORT_MASK = 0b1 << 5; + /** The {@link Renderer} supports tunneled output. */ + int TUNNELING_SUPPORTED = 0b1 << 5; + /** The {@link Renderer} does not support tunneled output. */ + int TUNNELING_NOT_SUPPORTED = 0; + /** - * The {@link Renderer} supports tunneled output. + * Level of renderer support for hardware acceleration. One of {@link + * #HARDWARE_ACCELERATION_SUPPORTED} and {@link #HARDWARE_ACCELERATION_NOT_SUPPORTED}. + * + *

      For video renderers, the level of support is indicated for non-tunneled output. */ - int TUNNELING_SUPPORTED = 0b100000; + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef({ + HARDWARE_ACCELERATION_SUPPORTED, + HARDWARE_ACCELERATION_NOT_SUPPORTED, + }) + @interface HardwareAccelerationSupport {} + /** A mask to apply to {@link Capabilities} to obtain {@link HardwareAccelerationSupport} only. */ + int HARDWARE_ACCELERATION_SUPPORT_MASK = 0b1 << 6; + /** The renderer is able to use hardware acceleration. */ + int HARDWARE_ACCELERATION_SUPPORTED = 0b1 << 6; + /** The renderer is not able to use hardware acceleration. */ + int HARDWARE_ACCELERATION_NOT_SUPPORTED = 0; + /** - * The {@link Renderer} does not support tunneled output. + * Level of decoder support. One of {@link #DECODER_SUPPORT_FALLBACK_MIMETYPE}, {@link + * #DECODER_SUPPORT_FALLBACK}, and {@link #DECODER_SUPPORT_PRIMARY}. + * + *

      For video renderers, the level of support is indicated for non-tunneled output. */ - int TUNNELING_NOT_SUPPORTED = 0b000000; + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef({DECODER_SUPPORT_FALLBACK_MIMETYPE, DECODER_SUPPORT_PRIMARY, DECODER_SUPPORT_FALLBACK}) + @interface DecoderSupport {} + /** A mask to apply to {@link Capabilities} to obtain {@link DecoderSupport} only. */ + int MODE_SUPPORT_MASK = 0b11 << 7; + /** + * The format's MIME type is unsupported and the renderer may use a decoder for a fallback MIME + * type. + */ + int DECODER_SUPPORT_FALLBACK_MIMETYPE = 0b10 << 7; + /** The renderer is able to use the primary decoder for the format's MIME type. */ + int DECODER_SUPPORT_PRIMARY = 0b1 << 7; + /** The format exceeds the primary decoder's capabilities but is supported by fallback decoder */ + int DECODER_SUPPORT_FALLBACK = 0; /** * Combined renderer capabilities. * - *

      This is a bitwise OR of {@link FormatSupport}, {@link AdaptiveSupport} and {@link - * TunnelingSupport}. Use {@link #getFormatSupport(int)}, {@link #getAdaptiveSupport(int)} or - * {@link #getTunnelingSupport(int)} to obtain the individual flags. And use {@link #create(int)} - * or {@link #create(int, int, int)} to create the combined capabilities. + *

      This is a bitwise OR of {@link C.FormatSupport}, {@link AdaptiveSupport}, {@link + * TunnelingSupport}, {@link HardwareAccelerationSupport} and {@link DecoderSupport}. Use {@link + * #getFormatSupport}, {@link #getAdaptiveSupport}, {@link #getTunnelingSupport}, {@link + * #getHardwareAccelerationSupport} and {@link #getDecoderSupport} to obtain individual + * components. Use {@link #create(int)}, {@link #create(int, int, int)} or {@link #create(int, + * int, int, int, int)} to create combined capabilities from individual components. * *

      Possible values: * *

        - *
      • {@link FormatSupport}: The level of support for the format itself. One of {@link - * #FORMAT_HANDLED}, {@link #FORMAT_EXCEEDS_CAPABILITIES}, {@link #FORMAT_UNSUPPORTED_DRM}, - * {@link #FORMAT_UNSUPPORTED_SUBTYPE} and {@link #FORMAT_UNSUPPORTED_TYPE}. + *
      • {@link C.FormatSupport}: The level of support for the format itself. One of {@link + * C#FORMAT_HANDLED}, {@link C#FORMAT_EXCEEDS_CAPABILITIES}, {@link + * C#FORMAT_UNSUPPORTED_DRM}, {@link C#FORMAT_UNSUPPORTED_SUBTYPE} and {@link + * C#FORMAT_UNSUPPORTED_TYPE}. *
      • {@link AdaptiveSupport}: The level of support for adapting from the format to another * format of the same mime type. One of {@link #ADAPTIVE_SEAMLESS}, {@link * #ADAPTIVE_NOT_SEAMLESS} and {@link #ADAPTIVE_NOT_SUPPORTED}. Only set if the level of - * support for the format itself is {@link #FORMAT_HANDLED} or {@link - * #FORMAT_EXCEEDS_CAPABILITIES}. + * support for the format itself is {@link C#FORMAT_HANDLED} or {@link + * C#FORMAT_EXCEEDS_CAPABILITIES}. *
      • {@link TunnelingSupport}: The level of support for tunneling. One of {@link * #TUNNELING_SUPPORTED} and {@link #TUNNELING_NOT_SUPPORTED}. Only set if the level of - * support for the format itself is {@link #FORMAT_HANDLED} or {@link - * #FORMAT_EXCEEDS_CAPABILITIES}. + * support for the format itself is {@link C#FORMAT_HANDLED} or {@link + * C#FORMAT_EXCEEDS_CAPABILITIES}. + *
      • {@link HardwareAccelerationSupport}: The level of support for hardware acceleration. One + * of {@link #HARDWARE_ACCELERATION_SUPPORTED} and {@link + * #HARDWARE_ACCELERATION_NOT_SUPPORTED}. + *
      • {@link DecoderSupport}: The level of decoder support. One of {@link + * #DECODER_SUPPORT_PRIMARY} and {@link #DECODER_SUPPORT_FALLBACK}. *
      */ @Documented @Retention(RetentionPolicy.SOURCE) // Intentionally empty to prevent assignment or comparison with individual flags without masking. + @Target(TYPE_USE) @IntDef({}) @interface Capabilities {} /** - * Returns {@link Capabilities} for the given {@link FormatSupport}. + * Returns {@link Capabilities} for the given {@link C.FormatSupport}. * - *

      The {@link AdaptiveSupport} is set to {@link #ADAPTIVE_NOT_SUPPORTED} and {{@link - * TunnelingSupport} is set to {@link #TUNNELING_NOT_SUPPORTED}. + *

      {@link AdaptiveSupport} is set to {@link #ADAPTIVE_NOT_SUPPORTED}, {@link TunnelingSupport} + * is set to {@link #TUNNELING_NOT_SUPPORTED}, {@link HardwareAccelerationSupport} is set to + * {@link #HARDWARE_ACCELERATION_NOT_SUPPORTED} and {@link DecoderSupport} is set to {@link + * #DECODER_SUPPORT_PRIMARY}. * - * @param formatSupport The {@link FormatSupport}. - * @return The combined {@link Capabilities} of the given {@link FormatSupport}, {@link + * @param formatSupport The {@link C.FormatSupport}. + * @return The combined {@link Capabilities} of the given {@link C.FormatSupport}, {@link * #ADAPTIVE_NOT_SUPPORTED} and {@link #TUNNELING_NOT_SUPPORTED}. */ - @Capabilities - static int create(@FormatSupport int formatSupport) { + static @Capabilities int create(@C.FormatSupport int formatSupport) { return create(formatSupport, ADAPTIVE_NOT_SUPPORTED, TUNNELING_NOT_SUPPORTED); } /** - * Returns {@link Capabilities} combining the given {@link FormatSupport}, {@link AdaptiveSupport} - * and {@link TunnelingSupport}. + * Returns {@link Capabilities} combining the given {@link C.FormatSupport}, {@link + * AdaptiveSupport} and {@link TunnelingSupport}. + * + *

      {@link HardwareAccelerationSupport} is set to {@link #HARDWARE_ACCELERATION_NOT_SUPPORTED} + * and {@link DecoderSupport} is set to {@link #DECODER_SUPPORT_PRIMARY}. + * + * @param formatSupport The {@link C.FormatSupport}. + * @param adaptiveSupport The {@link AdaptiveSupport}. + * @param tunnelingSupport The {@link TunnelingSupport}. + * @return The combined {@link Capabilities}. + */ + static @Capabilities int create( + @C.FormatSupport int formatSupport, + @AdaptiveSupport int adaptiveSupport, + @TunnelingSupport int tunnelingSupport) { + return create( + formatSupport, + adaptiveSupport, + tunnelingSupport, + HARDWARE_ACCELERATION_NOT_SUPPORTED, + DECODER_SUPPORT_PRIMARY); + } + + /** + * Returns {@link Capabilities} combining the given {@link C.FormatSupport}, {@link + * AdaptiveSupport}, {@link TunnelingSupport}, {@link HardwareAccelerationSupport} and {@link + * DecoderSupport}. * - * @param formatSupport The {@link FormatSupport}. + * @param formatSupport The {@link C.FormatSupport}. * @param adaptiveSupport The {@link AdaptiveSupport}. * @param tunnelingSupport The {@link TunnelingSupport}. + * @param hardwareAccelerationSupport The {@link HardwareAccelerationSupport}. + * @param decoderSupport The {@link DecoderSupport}. * @return The combined {@link Capabilities}. */ // Suppression needed for IntDef casting. @SuppressLint("WrongConstant") - @Capabilities - static int create( - @FormatSupport int formatSupport, + static @Capabilities int create( + @C.FormatSupport int formatSupport, @AdaptiveSupport int adaptiveSupport, - @TunnelingSupport int tunnelingSupport) { - return formatSupport | adaptiveSupport | tunnelingSupport; + @TunnelingSupport int tunnelingSupport, + @HardwareAccelerationSupport int hardwareAccelerationSupport, + @DecoderSupport int decoderSupport) { + return formatSupport + | adaptiveSupport + | tunnelingSupport + | hardwareAccelerationSupport + | decoderSupport; } /** - * Returns the {@link FormatSupport} from the combined {@link Capabilities}. + * Returns the {@link C.FormatSupport} from the combined {@link Capabilities}. * * @param supportFlags The combined {@link Capabilities}. - * @return The {@link FormatSupport} only. + * @return The {@link C.FormatSupport} only. */ // Suppression needed for IntDef casting. @SuppressLint("WrongConstant") - @FormatSupport - static int getFormatSupport(@Capabilities int supportFlags) { + static @C.FormatSupport int getFormatSupport(@Capabilities int supportFlags) { return supportFlags & FORMAT_SUPPORT_MASK; } @@ -219,8 +276,7 @@ static int getFormatSupport(@Capabilities int supportFlags) { */ // Suppression needed for IntDef casting. @SuppressLint("WrongConstant") - @AdaptiveSupport - static int getAdaptiveSupport(@Capabilities int supportFlags) { + static @AdaptiveSupport int getAdaptiveSupport(@Capabilities int supportFlags) { return supportFlags & ADAPTIVE_SUPPORT_MASK; } @@ -232,42 +288,47 @@ static int getAdaptiveSupport(@Capabilities int supportFlags) { */ // Suppression needed for IntDef casting. @SuppressLint("WrongConstant") - @TunnelingSupport - static int getTunnelingSupport(@Capabilities int supportFlags) { + static @TunnelingSupport int getTunnelingSupport(@Capabilities int supportFlags) { return supportFlags & TUNNELING_SUPPORT_MASK; } /** - * Returns string representation of a {@link FormatSupport} flag. + * Returns the {@link HardwareAccelerationSupport} from the combined {@link Capabilities}. * - * @param formatSupport A {@link FormatSupport} flag. - * @return A string representation of the flag. + * @param supportFlags The combined {@link Capabilities}. + * @return The {@link HardwareAccelerationSupport} only. */ - static String getFormatSupportString(@FormatSupport int formatSupport) { - switch (formatSupport) { - case RendererCapabilities.FORMAT_HANDLED: - return "YES"; - case RendererCapabilities.FORMAT_EXCEEDS_CAPABILITIES: - return "NO_EXCEEDS_CAPABILITIES"; - case RendererCapabilities.FORMAT_UNSUPPORTED_DRM: - return "NO_UNSUPPORTED_DRM"; - case RendererCapabilities.FORMAT_UNSUPPORTED_SUBTYPE: - return "NO_UNSUPPORTED_TYPE"; - case RendererCapabilities.FORMAT_UNSUPPORTED_TYPE: - return "NO"; - default: - throw new IllegalStateException(); - } + // Suppression needed for IntDef casting. + @SuppressLint("WrongConstant") + static @HardwareAccelerationSupport int getHardwareAccelerationSupport( + @Capabilities int supportFlags) { + return supportFlags & HARDWARE_ACCELERATION_SUPPORT_MASK; } + /** + * Returns the {@link DecoderSupport} from the combined {@link Capabilities}. + * + * @param supportFlags The combined {@link Capabilities}. + * @return The {@link DecoderSupport} only. + */ + // Suppression needed for IntDef casting. + @SuppressLint("WrongConstant") + static @DecoderSupport int getDecoderSupport(@Capabilities int supportFlags) { + return supportFlags & MODE_SUPPORT_MASK; + } + + /** Returns the name of the {@link Renderer}. */ + String getName(); + /** * Returns the track type that the {@link Renderer} handles. For example, a video renderer will * return {@link C#TRACK_TYPE_VIDEO}, an audio renderer will return {@link C#TRACK_TYPE_AUDIO}, a * text renderer will return {@link C#TRACK_TYPE_TEXT}, and so on. * * @see Renderer#getTrackType() - * @return One of the {@code TRACK_TYPE_*} constants defined in {@link C}. + * @return The {@link C.TrackType track type}. */ + @C.TrackType int getTrackType(); /** diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/RendererConfiguration.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/RendererConfiguration.java index bc8c6ff633..333514eed7 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/RendererConfiguration.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/RendererConfiguration.java @@ -17,29 +17,21 @@ import androidx.annotation.Nullable; -/** - * The configuration of a {@link Renderer}. - */ +/** The configuration of a {@link Renderer}. */ public final class RendererConfiguration { - /** - * The default configuration. - */ + /** The default configuration. */ public static final RendererConfiguration DEFAULT = - new RendererConfiguration(C.AUDIO_SESSION_ID_UNSET); + new RendererConfiguration(/* tunneling= */ false); - /** - * The audio session id to use for tunneling, or {@link C#AUDIO_SESSION_ID_UNSET} if tunneling - * should not be enabled. - */ - public final int tunnelingAudioSessionId; + /** Whether to enable tunneling. */ + public final boolean tunneling; /** - * @param tunnelingAudioSessionId The audio session id to use for tunneling, or - * {@link C#AUDIO_SESSION_ID_UNSET} if tunneling should not be enabled. + * @param tunneling Whether to enable tunneling. */ - public RendererConfiguration(int tunnelingAudioSessionId) { - this.tunnelingAudioSessionId = tunnelingAudioSessionId; + public RendererConfiguration(boolean tunneling) { + this.tunneling = tunneling; } @Override @@ -51,12 +43,11 @@ public boolean equals(@Nullable Object obj) { return false; } RendererConfiguration other = (RendererConfiguration) obj; - return tunnelingAudioSessionId == other.tunnelingAudioSessionId; + return tunneling == other.tunneling; } @Override public int hashCode() { - return tunnelingAudioSessionId; + return tunneling ? 0 : 1; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/RenderersFactory.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/RenderersFactory.java index 6f0d125bcf..d012bfaaed 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/RenderersFactory.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/RenderersFactory.java @@ -16,28 +16,22 @@ package com.google.android.exoplayer2; import android.os.Handler; -import androidx.annotation.Nullable; import com.google.android.exoplayer2.audio.AudioRendererEventListener; -import com.google.android.exoplayer2.drm.DrmSessionManager; -import com.google.android.exoplayer2.drm.FrameworkMediaCrypto; import com.google.android.exoplayer2.metadata.MetadataOutput; import com.google.android.exoplayer2.text.TextOutput; import com.google.android.exoplayer2.video.VideoRendererEventListener; -/** - * Builds {@link Renderer} instances for use by a {@link SimpleExoPlayer}. - */ +/** Builds {@link Renderer} instances for use by an {@link ExoPlayer}. */ public interface RenderersFactory { /** - * Builds the {@link Renderer} instances for a {@link SimpleExoPlayer}. + * Builds the {@link Renderer} instances for an {@link ExoPlayer}. * * @param eventHandler A handler to use when invoking event listeners and outputs. * @param videoRendererEventListener An event listener for video renderers. * @param audioRendererEventListener An event listener for audio renderers. * @param textRendererOutput An output for text renderers. * @param metadataRendererOutput An output for metadata renderers. - * @param drmSessionManager A drm session manager used by renderers. * @return The {@link Renderer instances}. */ Renderer[] createRenderers( @@ -45,6 +39,5 @@ Renderer[] createRenderers( VideoRendererEventListener videoRendererEventListener, AudioRendererEventListener audioRendererEventListener, TextOutput textRendererOutput, - MetadataOutput metadataRendererOutput, - @Nullable DrmSessionManager drmSessionManager); + MetadataOutput metadataRendererOutput); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/SeekParameters.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/SeekParameters.java index 7a0ad67a28..7b31896c2d 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/SeekParameters.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/SeekParameters.java @@ -17,6 +17,7 @@ import androidx.annotation.Nullable; import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.Util; /** * Parameters that apply to seeking. @@ -71,6 +72,41 @@ public SeekParameters(long toleranceBeforeUs, long toleranceAfterUs) { this.toleranceAfterUs = toleranceAfterUs; } + /** + * Resolves a seek based on the parameters, given the requested seek position and two candidate + * sync points. + * + * @param positionUs The requested seek position, in microseocnds. + * @param firstSyncUs The first candidate seek point, in micrseconds. + * @param secondSyncUs The second candidate seek point, in microseconds. May equal {@code + * firstSyncUs} if there's only one candidate. + * @return The resolved seek position, in microseconds. + */ + public long resolveSeekPositionUs(long positionUs, long firstSyncUs, long secondSyncUs) { + if (toleranceBeforeUs == 0 && toleranceAfterUs == 0) { + return positionUs; + } + long minPositionUs = + Util.subtractWithOverflowDefault(positionUs, toleranceBeforeUs, Long.MIN_VALUE); + long maxPositionUs = Util.addWithOverflowDefault(positionUs, toleranceAfterUs, Long.MAX_VALUE); + boolean firstSyncPositionValid = minPositionUs <= firstSyncUs && firstSyncUs <= maxPositionUs; + boolean secondSyncPositionValid = + minPositionUs <= secondSyncUs && secondSyncUs <= maxPositionUs; + if (firstSyncPositionValid && secondSyncPositionValid) { + if (Math.abs(firstSyncUs - positionUs) <= Math.abs(secondSyncUs - positionUs)) { + return firstSyncUs; + } else { + return secondSyncUs; + } + } else if (firstSyncPositionValid) { + return firstSyncUs; + } else if (secondSyncPositionValid) { + return secondSyncUs; + } else { + return minPositionUs; + } + } + @Override public boolean equals(@Nullable Object obj) { if (this == obj) { diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/SimpleBasePlayer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/SimpleBasePlayer.java new file mode 100644 index 0000000000..ebb0f25061 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/SimpleBasePlayer.java @@ -0,0 +1,3803 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2; + +import static androidx.annotation.VisibleForTesting.PROTECTED; +import static com.google.android.exoplayer2.util.Assertions.checkArgument; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Util.castNonNull; +import static com.google.android.exoplayer2.util.Util.msToUs; +import static com.google.android.exoplayer2.util.Util.usToMs; +import static java.lang.Math.max; +import static java.lang.Math.min; + +import android.graphics.Rect; +import android.os.Looper; +import android.os.SystemClock; +import android.util.Pair; +import android.view.Surface; +import android.view.SurfaceHolder; +import android.view.SurfaceView; +import android.view.TextureView; +import androidx.annotation.FloatRange; +import androidx.annotation.IntRange; +import androidx.annotation.Nullable; +import androidx.annotation.VisibleForTesting; +import com.google.android.exoplayer2.audio.AudioAttributes; +import com.google.android.exoplayer2.metadata.Metadata; +import com.google.android.exoplayer2.source.ads.AdPlaybackState; +import com.google.android.exoplayer2.text.CueGroup; +import com.google.android.exoplayer2.trackselection.TrackSelectionParameters; +import com.google.android.exoplayer2.util.Clock; +import com.google.android.exoplayer2.util.HandlerWrapper; +import com.google.android.exoplayer2.util.ListenerSet; +import com.google.android.exoplayer2.util.Size; +import com.google.android.exoplayer2.util.Util; +import com.google.android.exoplayer2.video.VideoSize; +import com.google.common.base.Supplier; +import com.google.common.collect.ImmutableList; +import com.google.common.util.concurrent.Futures; +import com.google.common.util.concurrent.ListenableFuture; +import com.google.errorprone.annotations.CanIgnoreReturnValue; +import com.google.errorprone.annotations.ForOverride; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import org.checkerframework.checker.nullness.qual.EnsuresNonNull; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; +import org.checkerframework.checker.nullness.qual.RequiresNonNull; + +/** + * A base implementation for {@link Player} that reduces the number of methods to implement to a + * minimum. + * + *

      Implementation notes: + * + *

        + *
      • Subclasses must override {@link #getState()} to populate the current player state on + * request. + *
      • The {@link State} should set the {@linkplain State.Builder#setAvailableCommands available + * commands} to indicate which {@link Player} methods are supported. + *
      • All setter-like player methods (for example, {@link #setPlayWhenReady}) forward to + * overridable methods (for example, {@link #handleSetPlayWhenReady}) that can be used to + * handle these requests. These methods return a {@link ListenableFuture} to indicate when the + * request has been handled and is fully reflected in the values returned from {@link + * #getState}. This class will automatically request a state update once the request is done. + * If the state changes can be handled synchronously, these methods can return Guava's {@link + * Futures#immediateVoidFuture()}. + *
      • Subclasses can manually trigger state updates with {@link #invalidateState}, for example if + * something changes independent of {@link Player} method calls. + *
      + * + * This base class handles various aspects of the player implementation to simplify the subclass: + * + *
        + *
      • The {@link State} can only be created with allowed combinations of state values, avoiding + * any invalid player states. + *
      • Only functionality that is declared as {@linkplain Player.Command available} needs to be + * implemented. Other methods are automatically ignored. + *
      • Listener handling and informing listeners of state changes is handled automatically. + *
      • The base class provides a framework for asynchronous handling of method calls. It changes + * the visible playback state immediately to the most likely outcome to ensure the + * user-visible state changes look like synchronous operations. The state is then updated + * again once the asynchronous method calls have been fully handled. + *
      + */ +public abstract class SimpleBasePlayer extends BasePlayer { + + /** An immutable state description of the player. */ + protected static final class State { + + /** A builder for {@link State} objects. */ + public static final class Builder { + + private Commands availableCommands; + private boolean playWhenReady; + private @PlayWhenReadyChangeReason int playWhenReadyChangeReason; + private @Player.State int playbackState; + private @PlaybackSuppressionReason int playbackSuppressionReason; + @Nullable private PlaybackException playerError; + private @RepeatMode int repeatMode; + private boolean shuffleModeEnabled; + private boolean isLoading; + private long seekBackIncrementMs; + private long seekForwardIncrementMs; + private long maxSeekToPreviousPositionMs; + private PlaybackParameters playbackParameters; + private TrackSelectionParameters trackSelectionParameters; + private AudioAttributes audioAttributes; + private float volume; + private VideoSize videoSize; + private CueGroup currentCues; + private DeviceInfo deviceInfo; + private int deviceVolume; + private boolean isDeviceMuted; + private Size surfaceSize; + private boolean newlyRenderedFirstFrame; + private Metadata timedMetadata; + private ImmutableList playlist; + private Timeline timeline; + private MediaMetadata playlistMetadata; + private int currentMediaItemIndex; + private int currentAdGroupIndex; + private int currentAdIndexInAdGroup; + @Nullable private Long contentPositionMs; + private PositionSupplier contentPositionMsSupplier; + @Nullable private Long adPositionMs; + private PositionSupplier adPositionMsSupplier; + private PositionSupplier contentBufferedPositionMsSupplier; + private PositionSupplier adBufferedPositionMsSupplier; + private PositionSupplier totalBufferedDurationMsSupplier; + private boolean hasPositionDiscontinuity; + private @Player.DiscontinuityReason int positionDiscontinuityReason; + private long discontinuityPositionMs; + + /** Creates the builder. */ + public Builder() { + availableCommands = Commands.EMPTY; + playWhenReady = false; + playWhenReadyChangeReason = Player.PLAY_WHEN_READY_CHANGE_REASON_USER_REQUEST; + playbackState = Player.STATE_IDLE; + playbackSuppressionReason = Player.PLAYBACK_SUPPRESSION_REASON_NONE; + playerError = null; + repeatMode = Player.REPEAT_MODE_OFF; + shuffleModeEnabled = false; + isLoading = false; + seekBackIncrementMs = C.DEFAULT_SEEK_BACK_INCREMENT_MS; + seekForwardIncrementMs = C.DEFAULT_SEEK_FORWARD_INCREMENT_MS; + maxSeekToPreviousPositionMs = C.DEFAULT_MAX_SEEK_TO_PREVIOUS_POSITION_MS; + playbackParameters = PlaybackParameters.DEFAULT; + trackSelectionParameters = TrackSelectionParameters.DEFAULT_WITHOUT_CONTEXT; + audioAttributes = AudioAttributes.DEFAULT; + volume = 1f; + videoSize = VideoSize.UNKNOWN; + currentCues = CueGroup.EMPTY_TIME_ZERO; + deviceInfo = DeviceInfo.UNKNOWN; + deviceVolume = 0; + isDeviceMuted = false; + surfaceSize = Size.UNKNOWN; + newlyRenderedFirstFrame = false; + timedMetadata = new Metadata(/* presentationTimeUs= */ C.TIME_UNSET); + playlist = ImmutableList.of(); + timeline = Timeline.EMPTY; + playlistMetadata = MediaMetadata.EMPTY; + currentMediaItemIndex = C.INDEX_UNSET; + currentAdGroupIndex = C.INDEX_UNSET; + currentAdIndexInAdGroup = C.INDEX_UNSET; + contentPositionMs = null; + contentPositionMsSupplier = PositionSupplier.getConstant(C.TIME_UNSET); + adPositionMs = null; + adPositionMsSupplier = PositionSupplier.ZERO; + contentBufferedPositionMsSupplier = PositionSupplier.getConstant(C.TIME_UNSET); + adBufferedPositionMsSupplier = PositionSupplier.ZERO; + totalBufferedDurationMsSupplier = PositionSupplier.ZERO; + hasPositionDiscontinuity = false; + positionDiscontinuityReason = Player.DISCONTINUITY_REASON_INTERNAL; + discontinuityPositionMs = 0; + } + + private Builder(State state) { + this.availableCommands = state.availableCommands; + this.playWhenReady = state.playWhenReady; + this.playWhenReadyChangeReason = state.playWhenReadyChangeReason; + this.playbackState = state.playbackState; + this.playbackSuppressionReason = state.playbackSuppressionReason; + this.playerError = state.playerError; + this.repeatMode = state.repeatMode; + this.shuffleModeEnabled = state.shuffleModeEnabled; + this.isLoading = state.isLoading; + this.seekBackIncrementMs = state.seekBackIncrementMs; + this.seekForwardIncrementMs = state.seekForwardIncrementMs; + this.maxSeekToPreviousPositionMs = state.maxSeekToPreviousPositionMs; + this.playbackParameters = state.playbackParameters; + this.trackSelectionParameters = state.trackSelectionParameters; + this.audioAttributes = state.audioAttributes; + this.volume = state.volume; + this.videoSize = state.videoSize; + this.currentCues = state.currentCues; + this.deviceInfo = state.deviceInfo; + this.deviceVolume = state.deviceVolume; + this.isDeviceMuted = state.isDeviceMuted; + this.surfaceSize = state.surfaceSize; + this.newlyRenderedFirstFrame = state.newlyRenderedFirstFrame; + this.timedMetadata = state.timedMetadata; + this.playlist = state.playlist; + this.timeline = state.timeline; + this.playlistMetadata = state.playlistMetadata; + this.currentMediaItemIndex = state.currentMediaItemIndex; + this.currentAdGroupIndex = state.currentAdGroupIndex; + this.currentAdIndexInAdGroup = state.currentAdIndexInAdGroup; + this.contentPositionMs = null; + this.contentPositionMsSupplier = state.contentPositionMsSupplier; + this.adPositionMs = null; + this.adPositionMsSupplier = state.adPositionMsSupplier; + this.contentBufferedPositionMsSupplier = state.contentBufferedPositionMsSupplier; + this.adBufferedPositionMsSupplier = state.adBufferedPositionMsSupplier; + this.totalBufferedDurationMsSupplier = state.totalBufferedDurationMsSupplier; + this.hasPositionDiscontinuity = state.hasPositionDiscontinuity; + this.positionDiscontinuityReason = state.positionDiscontinuityReason; + this.discontinuityPositionMs = state.discontinuityPositionMs; + } + + /** + * Sets the available {@link Commands}. + * + * @param availableCommands The available {@link Commands}. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setAvailableCommands(Commands availableCommands) { + this.availableCommands = availableCommands; + return this; + } + + /** + * Sets whether playback should proceed when ready and not suppressed. + * + * @param playWhenReady Whether playback should proceed when ready and not suppressed. + * @param playWhenReadyChangeReason The {@linkplain PlayWhenReadyChangeReason reason} for + * changing the value. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setPlayWhenReady( + boolean playWhenReady, @PlayWhenReadyChangeReason int playWhenReadyChangeReason) { + this.playWhenReady = playWhenReady; + this.playWhenReadyChangeReason = playWhenReadyChangeReason; + return this; + } + + /** + * Sets the {@linkplain Player.State state} of the player. + * + *

      If the {@linkplain #setPlaylist playlist} is empty, the state must be either {@link + * Player#STATE_IDLE} or {@link Player#STATE_ENDED}. + * + * @param playbackState The {@linkplain Player.State state} of the player. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setPlaybackState(@Player.State int playbackState) { + this.playbackState = playbackState; + return this; + } + + /** + * Sets the reason why playback is suppressed even if {@link #getPlayWhenReady()} is true. + * + * @param playbackSuppressionReason The {@link Player.PlaybackSuppressionReason} why playback + * is suppressed even if {@link #getPlayWhenReady()} is true. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setPlaybackSuppressionReason( + @Player.PlaybackSuppressionReason int playbackSuppressionReason) { + this.playbackSuppressionReason = playbackSuppressionReason; + return this; + } + + /** + * Sets last error that caused playback to fail, or null if there was no error. + * + *

      The {@linkplain #setPlaybackState playback state} must be set to {@link + * Player#STATE_IDLE} while an error is set. + * + * @param playerError The last error that caused playback to fail, or null if there was no + * error. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setPlayerError(@Nullable PlaybackException playerError) { + this.playerError = playerError; + return this; + } + + /** + * Sets the {@link RepeatMode} used for playback. + * + * @param repeatMode The {@link RepeatMode} used for playback. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setRepeatMode(@Player.RepeatMode int repeatMode) { + this.repeatMode = repeatMode; + return this; + } + + /** + * Sets whether shuffling of media items is enabled. + * + * @param shuffleModeEnabled Whether shuffling of media items is enabled. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setShuffleModeEnabled(boolean shuffleModeEnabled) { + this.shuffleModeEnabled = shuffleModeEnabled; + return this; + } + + /** + * Sets whether the player is currently loading its source. + * + *

      The player can not be marked as loading if the {@linkplain #setPlaybackState state} is + * {@link Player#STATE_IDLE} or {@link Player#STATE_ENDED}. + * + * @param isLoading Whether the player is currently loading its source. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setIsLoading(boolean isLoading) { + this.isLoading = isLoading; + return this; + } + + /** + * Sets the {@link Player#seekBack()} increment in milliseconds. + * + * @param seekBackIncrementMs The {@link Player#seekBack()} increment in milliseconds. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setSeekBackIncrementMs(long seekBackIncrementMs) { + this.seekBackIncrementMs = seekBackIncrementMs; + return this; + } + + /** + * Sets the {@link Player#seekForward()} increment in milliseconds. + * + * @param seekForwardIncrementMs The {@link Player#seekForward()} increment in milliseconds. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setSeekForwardIncrementMs(long seekForwardIncrementMs) { + this.seekForwardIncrementMs = seekForwardIncrementMs; + return this; + } + + /** + * Sets the maximum position for which {@link #seekToPrevious()} seeks to the previous item, + * in milliseconds. + * + * @param maxSeekToPreviousPositionMs The maximum position for which {@link #seekToPrevious()} + * seeks to the previous item, in milliseconds. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setMaxSeekToPreviousPositionMs(long maxSeekToPreviousPositionMs) { + this.maxSeekToPreviousPositionMs = maxSeekToPreviousPositionMs; + return this; + } + + /** + * Sets the currently active {@link PlaybackParameters}. + * + * @param playbackParameters The currently active {@link PlaybackParameters}. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setPlaybackParameters(PlaybackParameters playbackParameters) { + this.playbackParameters = playbackParameters; + return this; + } + + /** + * Sets the currently active {@link TrackSelectionParameters}. + * + * @param trackSelectionParameters The currently active {@link TrackSelectionParameters}. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setTrackSelectionParameters( + TrackSelectionParameters trackSelectionParameters) { + this.trackSelectionParameters = trackSelectionParameters; + return this; + } + + /** + * Sets the current {@link AudioAttributes}. + * + * @param audioAttributes The current {@link AudioAttributes}. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setAudioAttributes(AudioAttributes audioAttributes) { + this.audioAttributes = audioAttributes; + return this; + } + + /** + * Sets the current audio volume, with 0 being silence and 1 being unity gain (signal + * unchanged). + * + * @param volume The current audio volume, with 0 being silence and 1 being unity gain (signal + * unchanged). + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setVolume(@FloatRange(from = 0, to = 1.0) float volume) { + checkArgument(volume >= 0.0f && volume <= 1.0f); + this.volume = volume; + return this; + } + + /** + * Sets the current video size. + * + * @param videoSize The current video size. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setVideoSize(VideoSize videoSize) { + this.videoSize = videoSize; + return this; + } + + /** + * Sets the current {@linkplain CueGroup cues}. + * + * @param currentCues The current {@linkplain CueGroup cues}. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setCurrentCues(CueGroup currentCues) { + this.currentCues = currentCues; + return this; + } + + /** + * Sets the {@link DeviceInfo}. + * + * @param deviceInfo The {@link DeviceInfo}. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setDeviceInfo(DeviceInfo deviceInfo) { + this.deviceInfo = deviceInfo; + return this; + } + + /** + * Sets the current device volume. + * + * @param deviceVolume The current device volume. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setDeviceVolume(@IntRange(from = 0) int deviceVolume) { + checkArgument(deviceVolume >= 0); + this.deviceVolume = deviceVolume; + return this; + } + + /** + * Sets whether the device is muted. + * + * @param isDeviceMuted Whether the device is muted. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setIsDeviceMuted(boolean isDeviceMuted) { + this.isDeviceMuted = isDeviceMuted; + return this; + } + + /** + * Sets the size of the surface onto which the video is being rendered. + * + * @param surfaceSize The surface size. Dimensions may be {@link C#LENGTH_UNSET} if unknown, + * or 0 if the video is not rendered onto a surface. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setSurfaceSize(Size surfaceSize) { + this.surfaceSize = surfaceSize; + return this; + } + + /** + * Sets whether a frame has been rendered for the first time since setting the surface, a + * rendering reset, or since the stream being rendered was changed. + * + *

      Note: As this will trigger a {@link Listener#onRenderedFirstFrame()} event, the flag + * should only be set for the first {@link State} update after the first frame was rendered. + * + * @param newlyRenderedFirstFrame Whether the first frame was newly rendered. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setNewlyRenderedFirstFrame(boolean newlyRenderedFirstFrame) { + this.newlyRenderedFirstFrame = newlyRenderedFirstFrame; + return this; + } + + /** + * Sets the most recent timed {@link Metadata}. + * + *

      Metadata with a {@link Metadata#presentationTimeUs} of {@link C#TIME_UNSET} will not be + * forwarded to listeners. + * + * @param timedMetadata The most recent timed {@link Metadata}. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setTimedMetadata(Metadata timedMetadata) { + this.timedMetadata = timedMetadata; + return this; + } + + /** + * Sets the list of {@link MediaItemData media items} in the playlist. + * + *

      All items must have unique {@linkplain MediaItemData.Builder#setUid UIDs}. + * + * @param playlist The list of {@link MediaItemData media items} in the playlist. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setPlaylist(List playlist) { + HashSet uids = new HashSet<>(); + for (int i = 0; i < playlist.size(); i++) { + checkArgument(uids.add(playlist.get(i).uid), "Duplicate MediaItemData UID in playlist"); + } + this.playlist = ImmutableList.copyOf(playlist); + this.timeline = new PlaylistTimeline(this.playlist); + return this; + } + + /** + * Sets the playlist {@link MediaMetadata}. + * + * @param playlistMetadata The playlist {@link MediaMetadata}. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setPlaylistMetadata(MediaMetadata playlistMetadata) { + this.playlistMetadata = playlistMetadata; + return this; + } + + /** + * Sets the current media item index. + * + *

      The media item index must be less than the number of {@linkplain #setPlaylist media + * items in the playlist}, if set. + * + * @param currentMediaItemIndex The current media item index, or {@link C#INDEX_UNSET} to + * assume the default first item in the playlist. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setCurrentMediaItemIndex(int currentMediaItemIndex) { + this.currentMediaItemIndex = currentMediaItemIndex; + return this; + } + + /** + * Sets the current ad indices, or {@link C#INDEX_UNSET} if no ad is playing. + * + *

      Either both indices need to be {@link C#INDEX_UNSET} or both are not {@link + * C#INDEX_UNSET}. + * + *

      Ads indices can only be set if there is a corresponding {@link AdPlaybackState} defined + * in the current {@linkplain MediaItemData.Builder#setPeriods period}. + * + * @param adGroupIndex The current ad group index, or {@link C#INDEX_UNSET} if no ad is + * playing. + * @param adIndexInAdGroup The current ad index in the ad group, or {@link C#INDEX_UNSET} if + * no ad is playing. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setCurrentAd(int adGroupIndex, int adIndexInAdGroup) { + checkArgument((adGroupIndex == C.INDEX_UNSET) == (adIndexInAdGroup == C.INDEX_UNSET)); + this.currentAdGroupIndex = adGroupIndex; + this.currentAdIndexInAdGroup = adIndexInAdGroup; + return this; + } + + /** + * Sets the current content playback position in milliseconds. + * + *

      This position will be converted to an advancing {@link PositionSupplier} if the overall + * state indicates an advancing playback position. + * + *

      This method overrides any other {@link PositionSupplier} set via {@link + * #setContentPositionMs(PositionSupplier)}. + * + * @param positionMs The current content playback position in milliseconds, or {@link + * C#TIME_UNSET} to indicate the default start position. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setContentPositionMs(long positionMs) { + this.contentPositionMs = positionMs; + return this; + } + + /** + * Sets the {@link PositionSupplier} for the current content playback position in + * milliseconds. + * + *

      The supplier is expected to return the updated position on every call if the playback is + * advancing, for example by using {@link PositionSupplier#getExtrapolating}. + * + *

      This method overrides any other position set via {@link #setContentPositionMs(long)}. + * + * @param contentPositionMsSupplier The {@link PositionSupplier} for the current content + * playback position in milliseconds, or {@link C#TIME_UNSET} to indicate the default + * start position. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setContentPositionMs(PositionSupplier contentPositionMsSupplier) { + this.contentPositionMs = null; + this.contentPositionMsSupplier = contentPositionMsSupplier; + return this; + } + + /** + * Sets the current ad playback position in milliseconds. The value is unused if no ad is + * playing. + * + *

      This position will be converted to an advancing {@link PositionSupplier} if the overall + * state indicates an advancing ad playback position. + * + *

      This method overrides any other {@link PositionSupplier} set via {@link + * #setAdPositionMs(PositionSupplier)}. + * + * @param positionMs The current ad playback position in milliseconds. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setAdPositionMs(long positionMs) { + this.adPositionMs = positionMs; + return this; + } + + /** + * Sets the {@link PositionSupplier} for the current ad playback position in milliseconds. The + * value is unused if no ad is playing. + * + *

      The supplier is expected to return the updated position on every call if the playback is + * advancing, for example by using {@link PositionSupplier#getExtrapolating}. + * + *

      This method overrides any other position set via {@link #setAdPositionMs(long)}. + * + * @param adPositionMsSupplier The {@link PositionSupplier} for the current ad playback + * position in milliseconds. The value is unused if no ad is playing. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setAdPositionMs(PositionSupplier adPositionMsSupplier) { + this.adPositionMs = null; + this.adPositionMsSupplier = adPositionMsSupplier; + return this; + } + + /** + * Sets the {@link PositionSupplier} for the estimated position up to which the currently + * playing content is buffered, in milliseconds. + * + * @param contentBufferedPositionMsSupplier The {@link PositionSupplier} for the estimated + * position up to which the currently playing content is buffered, in milliseconds, or + * {@link C#TIME_UNSET} to indicate the default start position. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setContentBufferedPositionMs( + PositionSupplier contentBufferedPositionMsSupplier) { + this.contentBufferedPositionMsSupplier = contentBufferedPositionMsSupplier; + return this; + } + + /** + * Sets the {@link PositionSupplier} for the estimated position up to which the currently + * playing ad is buffered, in milliseconds. The value is unused if no ad is playing. + * + * @param adBufferedPositionMsSupplier The {@link PositionSupplier} for the estimated position + * up to which the currently playing ad is buffered, in milliseconds. The value is unused + * if no ad is playing. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setAdBufferedPositionMs(PositionSupplier adBufferedPositionMsSupplier) { + this.adBufferedPositionMsSupplier = adBufferedPositionMsSupplier; + return this; + } + + /** + * Sets the {@link PositionSupplier} for the estimated total buffered duration in + * milliseconds. + * + * @param totalBufferedDurationMsSupplier The {@link PositionSupplier} for the estimated total + * buffered duration in milliseconds. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setTotalBufferedDurationMs(PositionSupplier totalBufferedDurationMsSupplier) { + this.totalBufferedDurationMsSupplier = totalBufferedDurationMsSupplier; + return this; + } + + /** + * Signals that a position discontinuity happened since the last player update and sets the + * reason for it. + * + * @param positionDiscontinuityReason The {@linkplain Player.DiscontinuityReason reason} for + * the discontinuity. + * @param discontinuityPositionMs The position, in milliseconds, in the current content or ad + * from which playback continues after the discontinuity. + * @return This builder. + * @see #clearPositionDiscontinuity + */ + @CanIgnoreReturnValue + public Builder setPositionDiscontinuity( + @Player.DiscontinuityReason int positionDiscontinuityReason, + long discontinuityPositionMs) { + this.hasPositionDiscontinuity = true; + this.positionDiscontinuityReason = positionDiscontinuityReason; + this.discontinuityPositionMs = discontinuityPositionMs; + return this; + } + + /** + * Clears a previously set position discontinuity signal. + * + * @return This builder. + * @see #hasPositionDiscontinuity + */ + @CanIgnoreReturnValue + public Builder clearPositionDiscontinuity() { + this.hasPositionDiscontinuity = false; + return this; + } + + /** Builds the {@link State}. */ + public State build() { + return new State(this); + } + } + + /** The available {@link Commands}. */ + public final Commands availableCommands; + /** Whether playback should proceed when ready and not suppressed. */ + public final boolean playWhenReady; + /** The last reason for changing {@link #playWhenReady}. */ + public final @PlayWhenReadyChangeReason int playWhenReadyChangeReason; + /** The {@linkplain Player.State state} of the player. */ + public final @Player.State int playbackState; + /** The reason why playback is suppressed even if {@link #getPlayWhenReady()} is true. */ + public final @PlaybackSuppressionReason int playbackSuppressionReason; + /** The last error that caused playback to fail, or null if there was no error. */ + @Nullable public final PlaybackException playerError; + /** The {@link RepeatMode} used for playback. */ + public final @RepeatMode int repeatMode; + /** Whether shuffling of media items is enabled. */ + public final boolean shuffleModeEnabled; + /** Whether the player is currently loading its source. */ + public final boolean isLoading; + /** The {@link Player#seekBack()} increment in milliseconds. */ + public final long seekBackIncrementMs; + /** The {@link Player#seekForward()} increment in milliseconds. */ + public final long seekForwardIncrementMs; + /** + * The maximum position for which {@link #seekToPrevious()} seeks to the previous item, in + * milliseconds. + */ + public final long maxSeekToPreviousPositionMs; + /** The currently active {@link PlaybackParameters}. */ + public final PlaybackParameters playbackParameters; + /** The currently active {@link TrackSelectionParameters}. */ + public final TrackSelectionParameters trackSelectionParameters; + /** The current {@link AudioAttributes}. */ + public final AudioAttributes audioAttributes; + /** The current audio volume, with 0 being silence and 1 being unity gain (signal unchanged). */ + @FloatRange(from = 0, to = 1.0) + public final float volume; + /** The current video size. */ + public final VideoSize videoSize; + /** The current {@linkplain CueGroup cues}. */ + public final CueGroup currentCues; + /** The {@link DeviceInfo}. */ + public final DeviceInfo deviceInfo; + /** The current device volume. */ + @IntRange(from = 0) + public final int deviceVolume; + /** Whether the device is muted. */ + public final boolean isDeviceMuted; + /** The size of the surface onto which the video is being rendered. */ + public final Size surfaceSize; + /** + * Whether a frame has been rendered for the first time since setting the surface, a rendering + * reset, or since the stream being rendered was changed. + */ + public final boolean newlyRenderedFirstFrame; + /** The most recent timed metadata. */ + public final Metadata timedMetadata; + /** The media items in the playlist. */ + public final ImmutableList playlist; + /** The {@link Timeline} derived from the {@link #playlist}. */ + public final Timeline timeline; + /** The playlist {@link MediaMetadata}. */ + public final MediaMetadata playlistMetadata; + /** + * The current media item index, or {@link C#INDEX_UNSET} to assume the default first item of + * the playlist is played. + */ + public final int currentMediaItemIndex; + /** The current ad group index, or {@link C#INDEX_UNSET} if no ad is playing. */ + public final int currentAdGroupIndex; + /** The current ad index in the ad group, or {@link C#INDEX_UNSET} if no ad is playing. */ + public final int currentAdIndexInAdGroup; + /** + * The {@link PositionSupplier} for the current content playback position in milliseconds, or + * {@link C#TIME_UNSET} to indicate the default start position. + */ + public final PositionSupplier contentPositionMsSupplier; + /** + * The {@link PositionSupplier} for the current ad playback position in milliseconds. The value + * is unused if no ad is playing. + */ + public final PositionSupplier adPositionMsSupplier; + /** + * The {@link PositionSupplier} for the estimated position up to which the currently playing + * content is buffered, in milliseconds, or {@link C#TIME_UNSET} to indicate the default start + * position. + */ + public final PositionSupplier contentBufferedPositionMsSupplier; + /** + * The {@link PositionSupplier} for the estimated position up to which the currently playing ad + * is buffered, in milliseconds. The value is unused if no ad is playing. + */ + public final PositionSupplier adBufferedPositionMsSupplier; + /** The {@link PositionSupplier} for the estimated total buffered duration in milliseconds. */ + public final PositionSupplier totalBufferedDurationMsSupplier; + /** Signals that a position discontinuity happened since the last update to the player. */ + public final boolean hasPositionDiscontinuity; + /** + * The {@linkplain Player.DiscontinuityReason reason} for the last position discontinuity. The + * value is unused if {@link #hasPositionDiscontinuity} is {@code false}. + */ + public final @Player.DiscontinuityReason int positionDiscontinuityReason; + /** + * The position, in milliseconds, in the current content or ad from which playback continued + * after the discontinuity. The value is unused if {@link #hasPositionDiscontinuity} is {@code + * false}. + */ + public final long discontinuityPositionMs; + + private State(Builder builder) { + if (builder.timeline.isEmpty()) { + checkArgument( + builder.playbackState == Player.STATE_IDLE + || builder.playbackState == Player.STATE_ENDED, + "Empty playlist only allowed in STATE_IDLE or STATE_ENDED"); + checkArgument( + builder.currentAdGroupIndex == C.INDEX_UNSET + && builder.currentAdIndexInAdGroup == C.INDEX_UNSET, + "Ads not allowed if playlist is empty"); + } else { + int mediaItemIndex = builder.currentMediaItemIndex; + if (mediaItemIndex == C.INDEX_UNSET) { + mediaItemIndex = 0; // TODO: Use shuffle order to find first index. + } else { + checkArgument( + builder.currentMediaItemIndex < builder.timeline.getWindowCount(), + "currentMediaItemIndex must be less than playlist.size()"); + } + if (builder.currentAdGroupIndex != C.INDEX_UNSET) { + Timeline.Period period = new Timeline.Period(); + Timeline.Window window = new Timeline.Window(); + long contentPositionMs = + builder.contentPositionMs != null + ? builder.contentPositionMs + : builder.contentPositionMsSupplier.get(); + int periodIndex = + getPeriodIndexFromWindowPosition( + builder.timeline, mediaItemIndex, contentPositionMs, window, period); + builder.timeline.getPeriod(periodIndex, period); + checkArgument( + builder.currentAdGroupIndex < period.getAdGroupCount(), + "PeriodData has less ad groups than adGroupIndex"); + int adCountInGroup = period.getAdCountInAdGroup(builder.currentAdGroupIndex); + if (adCountInGroup != C.LENGTH_UNSET) { + checkArgument( + builder.currentAdIndexInAdGroup < adCountInGroup, + "Ad group has less ads than adIndexInGroupIndex"); + } + } + } + if (builder.playerError != null) { + checkArgument( + builder.playbackState == Player.STATE_IDLE, "Player error only allowed in STATE_IDLE"); + } + if (builder.playbackState == Player.STATE_IDLE + || builder.playbackState == Player.STATE_ENDED) { + checkArgument( + !builder.isLoading, "isLoading only allowed when not in STATE_IDLE or STATE_ENDED"); + } + PositionSupplier contentPositionMsSupplier = builder.contentPositionMsSupplier; + if (builder.contentPositionMs != null) { + if (builder.currentAdGroupIndex == C.INDEX_UNSET + && builder.playWhenReady + && builder.playbackState == Player.STATE_READY + && builder.playbackSuppressionReason == Player.PLAYBACK_SUPPRESSION_REASON_NONE + && builder.contentPositionMs != C.TIME_UNSET) { + contentPositionMsSupplier = + PositionSupplier.getExtrapolating( + builder.contentPositionMs, builder.playbackParameters.speed); + } else { + contentPositionMsSupplier = PositionSupplier.getConstant(builder.contentPositionMs); + } + } + PositionSupplier adPositionMsSupplier = builder.adPositionMsSupplier; + if (builder.adPositionMs != null) { + if (builder.currentAdGroupIndex != C.INDEX_UNSET + && builder.playWhenReady + && builder.playbackState == Player.STATE_READY + && builder.playbackSuppressionReason == Player.PLAYBACK_SUPPRESSION_REASON_NONE) { + adPositionMsSupplier = + PositionSupplier.getExtrapolating(builder.adPositionMs, /* playbackSpeed= */ 1f); + } else { + adPositionMsSupplier = PositionSupplier.getConstant(builder.adPositionMs); + } + } + this.availableCommands = builder.availableCommands; + this.playWhenReady = builder.playWhenReady; + this.playWhenReadyChangeReason = builder.playWhenReadyChangeReason; + this.playbackState = builder.playbackState; + this.playbackSuppressionReason = builder.playbackSuppressionReason; + this.playerError = builder.playerError; + this.repeatMode = builder.repeatMode; + this.shuffleModeEnabled = builder.shuffleModeEnabled; + this.isLoading = builder.isLoading; + this.seekBackIncrementMs = builder.seekBackIncrementMs; + this.seekForwardIncrementMs = builder.seekForwardIncrementMs; + this.maxSeekToPreviousPositionMs = builder.maxSeekToPreviousPositionMs; + this.playbackParameters = builder.playbackParameters; + this.trackSelectionParameters = builder.trackSelectionParameters; + this.audioAttributes = builder.audioAttributes; + this.volume = builder.volume; + this.videoSize = builder.videoSize; + this.currentCues = builder.currentCues; + this.deviceInfo = builder.deviceInfo; + this.deviceVolume = builder.deviceVolume; + this.isDeviceMuted = builder.isDeviceMuted; + this.surfaceSize = builder.surfaceSize; + this.newlyRenderedFirstFrame = builder.newlyRenderedFirstFrame; + this.timedMetadata = builder.timedMetadata; + this.playlist = builder.playlist; + this.timeline = builder.timeline; + this.playlistMetadata = builder.playlistMetadata; + this.currentMediaItemIndex = builder.currentMediaItemIndex; + this.currentAdGroupIndex = builder.currentAdGroupIndex; + this.currentAdIndexInAdGroup = builder.currentAdIndexInAdGroup; + this.contentPositionMsSupplier = contentPositionMsSupplier; + this.adPositionMsSupplier = adPositionMsSupplier; + this.contentBufferedPositionMsSupplier = builder.contentBufferedPositionMsSupplier; + this.adBufferedPositionMsSupplier = builder.adBufferedPositionMsSupplier; + this.totalBufferedDurationMsSupplier = builder.totalBufferedDurationMsSupplier; + this.hasPositionDiscontinuity = builder.hasPositionDiscontinuity; + this.positionDiscontinuityReason = builder.positionDiscontinuityReason; + this.discontinuityPositionMs = builder.discontinuityPositionMs; + } + + /** Returns a {@link Builder} pre-populated with the current state values. */ + public Builder buildUpon() { + return new Builder(this); + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (!(o instanceof State)) { + return false; + } + State state = (State) o; + return playWhenReady == state.playWhenReady + && playWhenReadyChangeReason == state.playWhenReadyChangeReason + && availableCommands.equals(state.availableCommands) + && playbackState == state.playbackState + && playbackSuppressionReason == state.playbackSuppressionReason + && Util.areEqual(playerError, state.playerError) + && repeatMode == state.repeatMode + && shuffleModeEnabled == state.shuffleModeEnabled + && isLoading == state.isLoading + && seekBackIncrementMs == state.seekBackIncrementMs + && seekForwardIncrementMs == state.seekForwardIncrementMs + && maxSeekToPreviousPositionMs == state.maxSeekToPreviousPositionMs + && playbackParameters.equals(state.playbackParameters) + && trackSelectionParameters.equals(state.trackSelectionParameters) + && audioAttributes.equals(state.audioAttributes) + && volume == state.volume + && videoSize.equals(state.videoSize) + && currentCues.equals(state.currentCues) + && deviceInfo.equals(state.deviceInfo) + && deviceVolume == state.deviceVolume + && isDeviceMuted == state.isDeviceMuted + && surfaceSize.equals(state.surfaceSize) + && newlyRenderedFirstFrame == state.newlyRenderedFirstFrame + && timedMetadata.equals(state.timedMetadata) + && playlist.equals(state.playlist) + && playlistMetadata.equals(state.playlistMetadata) + && currentMediaItemIndex == state.currentMediaItemIndex + && currentAdGroupIndex == state.currentAdGroupIndex + && currentAdIndexInAdGroup == state.currentAdIndexInAdGroup + && contentPositionMsSupplier.equals(state.contentPositionMsSupplier) + && adPositionMsSupplier.equals(state.adPositionMsSupplier) + && contentBufferedPositionMsSupplier.equals(state.contentBufferedPositionMsSupplier) + && adBufferedPositionMsSupplier.equals(state.adBufferedPositionMsSupplier) + && totalBufferedDurationMsSupplier.equals(state.totalBufferedDurationMsSupplier) + && hasPositionDiscontinuity == state.hasPositionDiscontinuity + && positionDiscontinuityReason == state.positionDiscontinuityReason + && discontinuityPositionMs == state.discontinuityPositionMs; + } + + @Override + public int hashCode() { + int result = 7; + result = 31 * result + availableCommands.hashCode(); + result = 31 * result + (playWhenReady ? 1 : 0); + result = 31 * result + playWhenReadyChangeReason; + result = 31 * result + playbackState; + result = 31 * result + playbackSuppressionReason; + result = 31 * result + (playerError == null ? 0 : playerError.hashCode()); + result = 31 * result + repeatMode; + result = 31 * result + (shuffleModeEnabled ? 1 : 0); + result = 31 * result + (isLoading ? 1 : 0); + result = 31 * result + (int) (seekBackIncrementMs ^ (seekBackIncrementMs >>> 32)); + result = 31 * result + (int) (seekForwardIncrementMs ^ (seekForwardIncrementMs >>> 32)); + result = + 31 * result + (int) (maxSeekToPreviousPositionMs ^ (maxSeekToPreviousPositionMs >>> 32)); + result = 31 * result + playbackParameters.hashCode(); + result = 31 * result + trackSelectionParameters.hashCode(); + result = 31 * result + audioAttributes.hashCode(); + result = 31 * result + Float.floatToRawIntBits(volume); + result = 31 * result + videoSize.hashCode(); + result = 31 * result + currentCues.hashCode(); + result = 31 * result + deviceInfo.hashCode(); + result = 31 * result + deviceVolume; + result = 31 * result + (isDeviceMuted ? 1 : 0); + result = 31 * result + surfaceSize.hashCode(); + result = 31 * result + (newlyRenderedFirstFrame ? 1 : 0); + result = 31 * result + timedMetadata.hashCode(); + result = 31 * result + playlist.hashCode(); + result = 31 * result + playlistMetadata.hashCode(); + result = 31 * result + currentMediaItemIndex; + result = 31 * result + currentAdGroupIndex; + result = 31 * result + currentAdIndexInAdGroup; + result = 31 * result + contentPositionMsSupplier.hashCode(); + result = 31 * result + adPositionMsSupplier.hashCode(); + result = 31 * result + contentBufferedPositionMsSupplier.hashCode(); + result = 31 * result + adBufferedPositionMsSupplier.hashCode(); + result = 31 * result + totalBufferedDurationMsSupplier.hashCode(); + result = 31 * result + (hasPositionDiscontinuity ? 1 : 0); + result = 31 * result + positionDiscontinuityReason; + result = 31 * result + (int) (discontinuityPositionMs ^ (discontinuityPositionMs >>> 32)); + return result; + } + } + + private static final class PlaylistTimeline extends Timeline { + + private final ImmutableList playlist; + private final int[] firstPeriodIndexByWindowIndex; + private final int[] windowIndexByPeriodIndex; + private final HashMap periodIndexByUid; + + public PlaylistTimeline(ImmutableList playlist) { + int mediaItemCount = playlist.size(); + this.playlist = playlist; + this.firstPeriodIndexByWindowIndex = new int[mediaItemCount]; + int periodCount = 0; + for (int i = 0; i < mediaItemCount; i++) { + MediaItemData mediaItemData = playlist.get(i); + firstPeriodIndexByWindowIndex[i] = periodCount; + periodCount += getPeriodCountInMediaItem(mediaItemData); + } + this.windowIndexByPeriodIndex = new int[periodCount]; + this.periodIndexByUid = new HashMap<>(); + int periodIndex = 0; + for (int i = 0; i < mediaItemCount; i++) { + MediaItemData mediaItemData = playlist.get(i); + for (int j = 0; j < getPeriodCountInMediaItem(mediaItemData); j++) { + periodIndexByUid.put(mediaItemData.getPeriodUid(j), periodIndex); + windowIndexByPeriodIndex[periodIndex] = i; + periodIndex++; + } + } + } + + @Override + public int getWindowCount() { + return playlist.size(); + } + + @Override + public int getNextWindowIndex(int windowIndex, int repeatMode, boolean shuffleModeEnabled) { + // TODO: Support shuffle order. + return super.getNextWindowIndex(windowIndex, repeatMode, shuffleModeEnabled); + } + + @Override + public int getPreviousWindowIndex(int windowIndex, int repeatMode, boolean shuffleModeEnabled) { + // TODO: Support shuffle order. + return super.getPreviousWindowIndex(windowIndex, repeatMode, shuffleModeEnabled); + } + + @Override + public int getLastWindowIndex(boolean shuffleModeEnabled) { + // TODO: Support shuffle order. + return super.getLastWindowIndex(shuffleModeEnabled); + } + + @Override + public int getFirstWindowIndex(boolean shuffleModeEnabled) { + // TODO: Support shuffle order. + return super.getFirstWindowIndex(shuffleModeEnabled); + } + + @Override + public Window getWindow(int windowIndex, Window window, long defaultPositionProjectionUs) { + return playlist + .get(windowIndex) + .getWindow(firstPeriodIndexByWindowIndex[windowIndex], window); + } + + @Override + public int getPeriodCount() { + return windowIndexByPeriodIndex.length; + } + + @Override + public Period getPeriodByUid(Object periodUid, Period period) { + int periodIndex = checkNotNull(periodIndexByUid.get(periodUid)); + return getPeriod(periodIndex, period, /* setIds= */ true); + } + + @Override + public Period getPeriod(int periodIndex, Period period, boolean setIds) { + int windowIndex = windowIndexByPeriodIndex[periodIndex]; + int periodIndexInWindow = periodIndex - firstPeriodIndexByWindowIndex[windowIndex]; + return playlist.get(windowIndex).getPeriod(windowIndex, periodIndexInWindow, period); + } + + @Override + public int getIndexOfPeriod(Object uid) { + @Nullable Integer index = periodIndexByUid.get(uid); + return index == null ? C.INDEX_UNSET : index; + } + + @Override + public Object getUidOfPeriod(int periodIndex) { + int windowIndex = windowIndexByPeriodIndex[periodIndex]; + int periodIndexInWindow = periodIndex - firstPeriodIndexByWindowIndex[windowIndex]; + return playlist.get(windowIndex).getPeriodUid(periodIndexInWindow); + } + + private static int getPeriodCountInMediaItem(MediaItemData mediaItemData) { + return mediaItemData.periods.isEmpty() ? 1 : mediaItemData.periods.size(); + } + } + + /** + * An immutable description of an item in the playlist, containing both static setup information + * like {@link MediaItem} and dynamic data that is generally read from the media like the + * duration. + */ + protected static final class MediaItemData { + + /** A builder for {@link MediaItemData} objects. */ + public static final class Builder { + + private Object uid; + private Tracks tracks; + private MediaItem mediaItem; + @Nullable private MediaMetadata mediaMetadata; + @Nullable private Object manifest; + @Nullable private MediaItem.LiveConfiguration liveConfiguration; + private long presentationStartTimeMs; + private long windowStartTimeMs; + private long elapsedRealtimeEpochOffsetMs; + private boolean isSeekable; + private boolean isDynamic; + private long defaultPositionUs; + private long durationUs; + private long positionInFirstPeriodUs; + private boolean isPlaceholder; + private ImmutableList periods; + + /** + * Creates the builder. + * + * @param uid The unique identifier of the media item within a playlist. This value will be + * set as {@link Timeline.Window#uid} for this item. + */ + public Builder(Object uid) { + this.uid = uid; + tracks = Tracks.EMPTY; + mediaItem = MediaItem.EMPTY; + mediaMetadata = null; + manifest = null; + liveConfiguration = null; + presentationStartTimeMs = C.TIME_UNSET; + windowStartTimeMs = C.TIME_UNSET; + elapsedRealtimeEpochOffsetMs = C.TIME_UNSET; + isSeekable = false; + isDynamic = false; + defaultPositionUs = 0; + durationUs = C.TIME_UNSET; + positionInFirstPeriodUs = 0; + isPlaceholder = false; + periods = ImmutableList.of(); + } + + private Builder(MediaItemData mediaItemData) { + this.uid = mediaItemData.uid; + this.tracks = mediaItemData.tracks; + this.mediaItem = mediaItemData.mediaItem; + this.mediaMetadata = mediaItemData.mediaMetadata; + this.manifest = mediaItemData.manifest; + this.liveConfiguration = mediaItemData.liveConfiguration; + this.presentationStartTimeMs = mediaItemData.presentationStartTimeMs; + this.windowStartTimeMs = mediaItemData.windowStartTimeMs; + this.elapsedRealtimeEpochOffsetMs = mediaItemData.elapsedRealtimeEpochOffsetMs; + this.isSeekable = mediaItemData.isSeekable; + this.isDynamic = mediaItemData.isDynamic; + this.defaultPositionUs = mediaItemData.defaultPositionUs; + this.durationUs = mediaItemData.durationUs; + this.positionInFirstPeriodUs = mediaItemData.positionInFirstPeriodUs; + this.isPlaceholder = mediaItemData.isPlaceholder; + this.periods = mediaItemData.periods; + } + + /** + * Sets the unique identifier of this media item within a playlist. + * + *

      This value will be set as {@link Timeline.Window#uid} for this item. + * + * @param uid The unique identifier of this media item within a playlist. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setUid(Object uid) { + this.uid = uid; + return this; + } + + /** + * Sets the {@link Tracks} of this media item. + * + * @param tracks The {@link Tracks} of this media item. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setTracks(Tracks tracks) { + this.tracks = tracks; + return this; + } + + /** + * Sets the {@link MediaItem}. + * + * @param mediaItem The {@link MediaItem}. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setMediaItem(MediaItem mediaItem) { + this.mediaItem = mediaItem; + return this; + } + + /** + * Sets the {@link MediaMetadata}. + * + *

      This data includes static data from the {@link MediaItem#mediaMetadata MediaItem} and + * the media's {@link Format#metadata Format}, as well any dynamic metadata that has been + * parsed from the media. If null, the metadata is assumed to be the simple combination of the + * {@link MediaItem#mediaMetadata MediaItem} metadata and the metadata of the selected {@link + * Format#metadata Formats}. + * + * @param mediaMetadata The {@link MediaMetadata}, or null to assume that the metadata is the + * simple combination of the {@link MediaItem#mediaMetadata MediaItem} metadata and the + * metadata of the selected {@link Format#metadata Formats}. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setMediaMetadata(@Nullable MediaMetadata mediaMetadata) { + this.mediaMetadata = mediaMetadata; + return this; + } + + /** + * Sets the manifest of the media item. + * + * @param manifest The manifest of the media item, or null if not applicable. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setManifest(@Nullable Object manifest) { + this.manifest = manifest; + return this; + } + + /** + * Sets the active {@link MediaItem.LiveConfiguration}, or null if the media item is not live. + * + * @param liveConfiguration The active {@link MediaItem.LiveConfiguration}, or null if the + * media item is not live. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setLiveConfiguration(@Nullable MediaItem.LiveConfiguration liveConfiguration) { + this.liveConfiguration = liveConfiguration; + return this; + } + + /** + * Sets the start time of the live presentation. + * + *

      This value can only be set to anything other than {@link C#TIME_UNSET} if the stream is + * {@linkplain #setLiveConfiguration live}. + * + * @param presentationStartTimeMs The start time of the live presentation, in milliseconds + * since the Unix epoch, or {@link C#TIME_UNSET} if unknown or not applicable. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setPresentationStartTimeMs(long presentationStartTimeMs) { + this.presentationStartTimeMs = presentationStartTimeMs; + return this; + } + + /** + * Sets the start time of the live window. + * + *

      This value can only be set to anything other than {@link C#TIME_UNSET} if the stream is + * {@linkplain #setLiveConfiguration live}. The value should also be greater or equal than the + * {@linkplain #setPresentationStartTimeMs presentation start time}, if set. + * + * @param windowStartTimeMs The start time of the live window, in milliseconds since the Unix + * epoch, or {@link C#TIME_UNSET} if unknown or not applicable. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setWindowStartTimeMs(long windowStartTimeMs) { + this.windowStartTimeMs = windowStartTimeMs; + return this; + } + + /** + * Sets the offset between {@link SystemClock#elapsedRealtime()} and the time since the Unix + * epoch according to the clock of the media origin server. + * + *

      This value can only be set to anything other than {@link C#TIME_UNSET} if the stream is + * {@linkplain #setLiveConfiguration live}. + * + * @param elapsedRealtimeEpochOffsetMs The offset between {@link + * SystemClock#elapsedRealtime()} and the time since the Unix epoch according to the clock + * of the media origin server, or {@link C#TIME_UNSET} if unknown or not applicable. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setElapsedRealtimeEpochOffsetMs(long elapsedRealtimeEpochOffsetMs) { + this.elapsedRealtimeEpochOffsetMs = elapsedRealtimeEpochOffsetMs; + return this; + } + + /** + * Sets whether it's possible to seek within this media item. + * + * @param isSeekable Whether it's possible to seek within this media item. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setIsSeekable(boolean isSeekable) { + this.isSeekable = isSeekable; + return this; + } + + /** + * Sets whether this media item may change over time, for example a moving live window. + * + * @param isDynamic Whether this media item may change over time, for example a moving live + * window. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setIsDynamic(boolean isDynamic) { + this.isDynamic = isDynamic; + return this; + } + + /** + * Sets the default position relative to the start of the media item at which to begin + * playback, in microseconds. + * + *

      The default position must be less or equal to the {@linkplain #setDurationUs duration}, + * is set. + * + * @param defaultPositionUs The default position relative to the start of the media item at + * which to begin playback, in microseconds. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setDefaultPositionUs(long defaultPositionUs) { + checkArgument(defaultPositionUs >= 0); + this.defaultPositionUs = defaultPositionUs; + return this; + } + + /** + * Sets the duration of the media item, in microseconds. + * + *

      If both this duration and all {@linkplain #setPeriods period} durations are set, the sum + * of this duration and the {@linkplain #setPositionInFirstPeriodUs offset in the first + * period} must match the total duration of all periods. + * + * @param durationUs The duration of the media item, in microseconds, or {@link C#TIME_UNSET} + * if unknown. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setDurationUs(long durationUs) { + checkArgument(durationUs == C.TIME_UNSET || durationUs >= 0); + this.durationUs = durationUs; + return this; + } + + /** + * Sets the position of the start of this media item relative to the start of the first period + * belonging to it, in microseconds. + * + * @param positionInFirstPeriodUs The position of the start of this media item relative to the + * start of the first period belonging to it, in microseconds. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setPositionInFirstPeriodUs(long positionInFirstPeriodUs) { + checkArgument(positionInFirstPeriodUs >= 0); + this.positionInFirstPeriodUs = positionInFirstPeriodUs; + return this; + } + + /** + * Sets whether this media item contains placeholder information because the real information + * has yet to be loaded. + * + * @param isPlaceholder Whether this media item contains placeholder information because the + * real information has yet to be loaded. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setIsPlaceholder(boolean isPlaceholder) { + this.isPlaceholder = isPlaceholder; + return this; + } + + /** + * Sets the list of {@linkplain PeriodData periods} in this media item. + * + *

      All periods must have unique {@linkplain PeriodData.Builder#setUid UIDs} and only the + * last period is allowed to have an unset {@linkplain PeriodData.Builder#setDurationUs + * duration}. + * + * @param periods The list of {@linkplain PeriodData periods} in this media item, or an empty + * list to assume a single period without ads and the same duration as the media item. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setPeriods(List periods) { + int periodCount = periods.size(); + for (int i = 0; i < periodCount - 1; i++) { + checkArgument( + periods.get(i).durationUs != C.TIME_UNSET, "Periods other than last need a duration"); + for (int j = i + 1; j < periodCount; j++) { + checkArgument( + !periods.get(i).uid.equals(periods.get(j).uid), + "Duplicate PeriodData UIDs in period list"); + } + } + this.periods = ImmutableList.copyOf(periods); + return this; + } + + /** Builds the {@link MediaItemData}. */ + public MediaItemData build() { + return new MediaItemData(this); + } + } + + /** The unique identifier of this media item. */ + public final Object uid; + /** The {@link Tracks} of this media item. */ + public final Tracks tracks; + /** The {@link MediaItem}. */ + public final MediaItem mediaItem; + /** + * The {@link MediaMetadata}, including static data from the {@link MediaItem#mediaMetadata + * MediaItem} and the media's {@link Format#metadata Format}, as well any dynamic metadata that + * has been parsed from the media. If null, the metadata is assumed to be the simple combination + * of the {@link MediaItem#mediaMetadata MediaItem} metadata and the metadata of the selected + * {@link Format#metadata Formats}. + */ + @Nullable public final MediaMetadata mediaMetadata; + /** The manifest of the media item, or null if not applicable. */ + @Nullable public final Object manifest; + /** The active {@link MediaItem.LiveConfiguration}, or null if the media item is not live. */ + @Nullable public final MediaItem.LiveConfiguration liveConfiguration; + /** + * The start time of the live presentation, in milliseconds since the Unix epoch, or {@link + * C#TIME_UNSET} if unknown or not applicable. + */ + public final long presentationStartTimeMs; + /** + * The start time of the live window, in milliseconds since the Unix epoch, or {@link + * C#TIME_UNSET} if unknown or not applicable. + */ + public final long windowStartTimeMs; + /** + * The offset between {@link SystemClock#elapsedRealtime()} and the time since the Unix epoch + * according to the clock of the media origin server, or {@link C#TIME_UNSET} if unknown or not + * applicable. + */ + public final long elapsedRealtimeEpochOffsetMs; + /** Whether it's possible to seek within this media item. */ + public final boolean isSeekable; + /** Whether this media item may change over time, for example a moving live window. */ + public final boolean isDynamic; + /** + * The default position relative to the start of the media item at which to begin playback, in + * microseconds. + */ + public final long defaultPositionUs; + /** The duration of the media item, in microseconds, or {@link C#TIME_UNSET} if unknown. */ + public final long durationUs; + /** + * The position of the start of this media item relative to the start of the first period + * belonging to it, in microseconds. + */ + public final long positionInFirstPeriodUs; + /** + * Whether this media item contains placeholder information because the real information has yet + * to be loaded. + */ + public final boolean isPlaceholder; + /** + * The list of {@linkplain PeriodData periods} in this media item, or an empty list to assume a + * single period without ads and the same duration as the media item. + */ + public final ImmutableList periods; + + private final long[] periodPositionInWindowUs; + private final MediaMetadata combinedMediaMetadata; + + private MediaItemData(Builder builder) { + if (builder.liveConfiguration == null) { + checkArgument( + builder.presentationStartTimeMs == C.TIME_UNSET, + "presentationStartTimeMs can only be set if liveConfiguration != null"); + checkArgument( + builder.windowStartTimeMs == C.TIME_UNSET, + "windowStartTimeMs can only be set if liveConfiguration != null"); + checkArgument( + builder.elapsedRealtimeEpochOffsetMs == C.TIME_UNSET, + "elapsedRealtimeEpochOffsetMs can only be set if liveConfiguration != null"); + } else if (builder.presentationStartTimeMs != C.TIME_UNSET + && builder.windowStartTimeMs != C.TIME_UNSET) { + checkArgument( + builder.windowStartTimeMs >= builder.presentationStartTimeMs, + "windowStartTimeMs can't be less than presentationStartTimeMs"); + } + int periodCount = builder.periods.size(); + if (builder.durationUs != C.TIME_UNSET) { + checkArgument( + builder.defaultPositionUs <= builder.durationUs, + "defaultPositionUs can't be greater than durationUs"); + } + this.uid = builder.uid; + this.tracks = builder.tracks; + this.mediaItem = builder.mediaItem; + this.mediaMetadata = builder.mediaMetadata; + this.manifest = builder.manifest; + this.liveConfiguration = builder.liveConfiguration; + this.presentationStartTimeMs = builder.presentationStartTimeMs; + this.windowStartTimeMs = builder.windowStartTimeMs; + this.elapsedRealtimeEpochOffsetMs = builder.elapsedRealtimeEpochOffsetMs; + this.isSeekable = builder.isSeekable; + this.isDynamic = builder.isDynamic; + this.defaultPositionUs = builder.defaultPositionUs; + this.durationUs = builder.durationUs; + this.positionInFirstPeriodUs = builder.positionInFirstPeriodUs; + this.isPlaceholder = builder.isPlaceholder; + this.periods = builder.periods; + periodPositionInWindowUs = new long[periods.size()]; + if (!periods.isEmpty()) { + periodPositionInWindowUs[0] = -positionInFirstPeriodUs; + for (int i = 0; i < periodCount - 1; i++) { + periodPositionInWindowUs[i + 1] = periodPositionInWindowUs[i] + periods.get(i).durationUs; + } + } + combinedMediaMetadata = + mediaMetadata != null ? mediaMetadata : getCombinedMediaMetadata(mediaItem, tracks); + } + + /** Returns a {@link Builder} pre-populated with the current values. */ + public Builder buildUpon() { + return new Builder(this); + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (!(o instanceof MediaItemData)) { + return false; + } + MediaItemData mediaItemData = (MediaItemData) o; + return this.uid.equals(mediaItemData.uid) + && this.tracks.equals(mediaItemData.tracks) + && this.mediaItem.equals(mediaItemData.mediaItem) + && Util.areEqual(this.mediaMetadata, mediaItemData.mediaMetadata) + && Util.areEqual(this.manifest, mediaItemData.manifest) + && Util.areEqual(this.liveConfiguration, mediaItemData.liveConfiguration) + && this.presentationStartTimeMs == mediaItemData.presentationStartTimeMs + && this.windowStartTimeMs == mediaItemData.windowStartTimeMs + && this.elapsedRealtimeEpochOffsetMs == mediaItemData.elapsedRealtimeEpochOffsetMs + && this.isSeekable == mediaItemData.isSeekable + && this.isDynamic == mediaItemData.isDynamic + && this.defaultPositionUs == mediaItemData.defaultPositionUs + && this.durationUs == mediaItemData.durationUs + && this.positionInFirstPeriodUs == mediaItemData.positionInFirstPeriodUs + && this.isPlaceholder == mediaItemData.isPlaceholder + && this.periods.equals(mediaItemData.periods); + } + + @Override + public int hashCode() { + int result = 7; + result = 31 * result + uid.hashCode(); + result = 31 * result + tracks.hashCode(); + result = 31 * result + mediaItem.hashCode(); + result = 31 * result + (mediaMetadata == null ? 0 : mediaMetadata.hashCode()); + result = 31 * result + (manifest == null ? 0 : manifest.hashCode()); + result = 31 * result + (liveConfiguration == null ? 0 : liveConfiguration.hashCode()); + result = 31 * result + (int) (presentationStartTimeMs ^ (presentationStartTimeMs >>> 32)); + result = 31 * result + (int) (windowStartTimeMs ^ (windowStartTimeMs >>> 32)); + result = + 31 * result + + (int) (elapsedRealtimeEpochOffsetMs ^ (elapsedRealtimeEpochOffsetMs >>> 32)); + result = 31 * result + (isSeekable ? 1 : 0); + result = 31 * result + (isDynamic ? 1 : 0); + result = 31 * result + (int) (defaultPositionUs ^ (defaultPositionUs >>> 32)); + result = 31 * result + (int) (durationUs ^ (durationUs >>> 32)); + result = 31 * result + (int) (positionInFirstPeriodUs ^ (positionInFirstPeriodUs >>> 32)); + result = 31 * result + (isPlaceholder ? 1 : 0); + result = 31 * result + periods.hashCode(); + return result; + } + + private Timeline.Window getWindow(int firstPeriodIndex, Timeline.Window window) { + int periodCount = periods.isEmpty() ? 1 : periods.size(); + window.set( + uid, + mediaItem, + manifest, + presentationStartTimeMs, + windowStartTimeMs, + elapsedRealtimeEpochOffsetMs, + isSeekable, + isDynamic, + liveConfiguration, + defaultPositionUs, + durationUs, + firstPeriodIndex, + /* lastPeriodIndex= */ firstPeriodIndex + periodCount - 1, + positionInFirstPeriodUs); + window.isPlaceholder = isPlaceholder; + return window; + } + + private Timeline.Period getPeriod( + int windowIndex, int periodIndexInMediaItem, Timeline.Period period) { + if (periods.isEmpty()) { + period.set( + /* id= */ uid, + uid, + windowIndex, + /* durationUs= */ positionInFirstPeriodUs + durationUs, + /* positionInWindowUs= */ 0, + AdPlaybackState.NONE, + isPlaceholder); + } else { + PeriodData periodData = periods.get(periodIndexInMediaItem); + Object periodId = periodData.uid; + Object periodUid = Pair.create(uid, periodId); + period.set( + periodId, + periodUid, + windowIndex, + periodData.durationUs, + periodPositionInWindowUs[periodIndexInMediaItem], + periodData.adPlaybackState, + periodData.isPlaceholder); + } + return period; + } + + private Object getPeriodUid(int periodIndexInMediaItem) { + if (periods.isEmpty()) { + return uid; + } + Object periodId = periods.get(periodIndexInMediaItem).uid; + return Pair.create(uid, periodId); + } + + private static MediaMetadata getCombinedMediaMetadata(MediaItem mediaItem, Tracks tracks) { + MediaMetadata.Builder metadataBuilder = new MediaMetadata.Builder(); + int trackGroupCount = tracks.getGroups().size(); + for (int i = 0; i < trackGroupCount; i++) { + Tracks.Group group = tracks.getGroups().get(i); + for (int j = 0; j < group.length; j++) { + if (group.isTrackSelected(j)) { + Format format = group.getTrackFormat(j); + if (format.metadata != null) { + for (int k = 0; k < format.metadata.length(); k++) { + format.metadata.get(k).populateMediaMetadata(metadataBuilder); + } + } + } + } + } + return metadataBuilder.populate(mediaItem.mediaMetadata).build(); + } + } + + /** Data describing the properties of a period inside a {@link MediaItemData}. */ + protected static final class PeriodData { + + /** A builder for {@link PeriodData} objects. */ + public static final class Builder { + + private Object uid; + private long durationUs; + private AdPlaybackState adPlaybackState; + private boolean isPlaceholder; + + /** + * Creates the builder. + * + * @param uid The unique identifier of the period within its media item. + */ + public Builder(Object uid) { + this.uid = uid; + this.durationUs = 0; + this.adPlaybackState = AdPlaybackState.NONE; + this.isPlaceholder = false; + } + + private Builder(PeriodData periodData) { + this.uid = periodData.uid; + this.durationUs = periodData.durationUs; + this.adPlaybackState = periodData.adPlaybackState; + this.isPlaceholder = periodData.isPlaceholder; + } + + /** + * Sets the unique identifier of the period within its media item. + * + * @param uid The unique identifier of the period within its media item. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setUid(Object uid) { + this.uid = uid; + return this; + } + + /** + * Sets the total duration of the period, in microseconds, or {@link C#TIME_UNSET} if unknown. + * + *

      Only the last period in a media item can have an unknown duration. + * + * @param durationUs The total duration of the period, in microseconds, or {@link + * C#TIME_UNSET} if unknown. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setDurationUs(long durationUs) { + checkArgument(durationUs == C.TIME_UNSET || durationUs >= 0); + this.durationUs = durationUs; + return this; + } + + /** + * Sets the {@link AdPlaybackState}. + * + * @param adPlaybackState The {@link AdPlaybackState}, or {@link AdPlaybackState#NONE} if + * there are no ads. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setAdPlaybackState(AdPlaybackState adPlaybackState) { + this.adPlaybackState = adPlaybackState; + return this; + } + + /** + * Sets whether this period contains placeholder information because the real information has + * yet to be loaded + * + * @param isPlaceholder Whether this period contains placeholder information because the real + * information has yet to be loaded. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setIsPlaceholder(boolean isPlaceholder) { + this.isPlaceholder = isPlaceholder; + return this; + } + + /** Builds the {@link PeriodData}. */ + public PeriodData build() { + return new PeriodData(this); + } + } + + /** The unique identifier of the period within its media item. */ + public final Object uid; + /** + * The total duration of the period, in microseconds, or {@link C#TIME_UNSET} if unknown. Only + * the last period in a media item can have an unknown duration. + */ + public final long durationUs; + /** + * The {@link AdPlaybackState} of the period, or {@link AdPlaybackState#NONE} if there are no + * ads. + */ + public final AdPlaybackState adPlaybackState; + /** + * Whether this period contains placeholder information because the real information has yet to + * be loaded. + */ + public final boolean isPlaceholder; + + private PeriodData(Builder builder) { + this.uid = builder.uid; + this.durationUs = builder.durationUs; + this.adPlaybackState = builder.adPlaybackState; + this.isPlaceholder = builder.isPlaceholder; + } + + /** Returns a {@link Builder} pre-populated with the current values. */ + public Builder buildUpon() { + return new Builder(this); + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (!(o instanceof PeriodData)) { + return false; + } + PeriodData periodData = (PeriodData) o; + return this.uid.equals(periodData.uid) + && this.durationUs == periodData.durationUs + && this.adPlaybackState.equals(periodData.adPlaybackState) + && this.isPlaceholder == periodData.isPlaceholder; + } + + @Override + public int hashCode() { + int result = 7; + result = 31 * result + uid.hashCode(); + result = 31 * result + (int) (durationUs ^ (durationUs >>> 32)); + result = 31 * result + adPlaybackState.hashCode(); + result = 31 * result + (isPlaceholder ? 1 : 0); + return result; + } + } + + /** A supplier for a position. */ + protected interface PositionSupplier { + + /** An instance returning a constant position of zero. */ + PositionSupplier ZERO = getConstant(/* positionMs= */ 0); + + /** + * Returns an instance that returns a constant value. + * + * @param positionMs The constant position to return, in milliseconds. + */ + static PositionSupplier getConstant(long positionMs) { + return () -> positionMs; + } + + /** + * Returns an instance that extrapolates the provided position into the future. + * + * @param currentPositionMs The current position in milliseconds. + * @param playbackSpeed The playback speed with which the position is assumed to increase. + */ + static PositionSupplier getExtrapolating(long currentPositionMs, float playbackSpeed) { + long startTimeMs = SystemClock.elapsedRealtime(); + return () -> { + long currentTimeMs = SystemClock.elapsedRealtime(); + return currentPositionMs + (long) ((currentTimeMs - startTimeMs) * playbackSpeed); + }; + } + + /** Returns the position. */ + long get(); + } + + /** + * Position difference threshold below which we do not automatically report a position + * discontinuity, in milliseconds. + */ + private static final long POSITION_DISCONTINUITY_THRESHOLD_MS = 1000; + + private final ListenerSet listeners; + private final Looper applicationLooper; + private final HandlerWrapper applicationHandler; + private final HashSet> pendingOperations; + private final Timeline.Period period; + + private @MonotonicNonNull State state; + private boolean released; + + /** + * Creates the base class. + * + * @param applicationLooper The {@link Looper} that must be used for all calls to the player and + * that is used to call listeners on. + */ + protected SimpleBasePlayer(Looper applicationLooper) { + this(applicationLooper, Clock.DEFAULT); + } + + /** + * Creates the base class. + * + * @param applicationLooper The {@link Looper} that must be used for all calls to the player and + * that is used to call listeners on. + * @param clock The {@link Clock} that will be used by the player. + */ + protected SimpleBasePlayer(Looper applicationLooper, Clock clock) { + this.applicationLooper = applicationLooper; + applicationHandler = clock.createHandler(applicationLooper, /* callback= */ null); + pendingOperations = new HashSet<>(); + period = new Timeline.Period(); + @SuppressWarnings("nullness:argument.type.incompatible") // Using this in constructor. + ListenerSet listenerSet = + new ListenerSet<>( + applicationLooper, + clock, + (listener, flags) -> listener.onEvents(/* player= */ this, new Events(flags))); + listeners = listenerSet; + } + + @Override + public final void addListener(Listener listener) { + // Don't verify application thread. We allow calls to this method from any thread. + listeners.add(checkNotNull(listener)); + } + + @Override + public final void removeListener(Listener listener) { + verifyApplicationThreadAndInitState(); + listeners.remove(listener); + } + + @Override + public final Looper getApplicationLooper() { + // Don't verify application thread. We allow calls to this method from any thread. + return applicationLooper; + } + + @Override + public final Commands getAvailableCommands() { + verifyApplicationThreadAndInitState(); + return state.availableCommands; + } + + @Override + public final void setPlayWhenReady(boolean playWhenReady) { + verifyApplicationThreadAndInitState(); + // Use a local copy to ensure the lambda below uses the current state value. + State state = this.state; + if (!shouldHandleCommand(Player.COMMAND_PLAY_PAUSE)) { + return; + } + updateStateForPendingOperation( + /* pendingOperation= */ handleSetPlayWhenReady(playWhenReady), + /* placeholderStateSupplier= */ () -> + state + .buildUpon() + .setPlayWhenReady(playWhenReady, Player.PLAY_WHEN_READY_CHANGE_REASON_USER_REQUEST) + .build()); + } + + @Override + public final boolean getPlayWhenReady() { + verifyApplicationThreadAndInitState(); + return state.playWhenReady; + } + + @Override + public final void setMediaItems(List mediaItems, boolean resetPosition) { + verifyApplicationThreadAndInitState(); + int startIndex = resetPosition ? C.INDEX_UNSET : state.currentMediaItemIndex; + long startPositionMs = resetPosition ? C.TIME_UNSET : state.contentPositionMsSupplier.get(); + setMediaItemsInternal(mediaItems, startIndex, startPositionMs); + } + + @Override + public final void setMediaItems( + List mediaItems, int startIndex, long startPositionMs) { + verifyApplicationThreadAndInitState(); + if (startIndex == C.INDEX_UNSET) { + startIndex = state.currentMediaItemIndex; + startPositionMs = state.contentPositionMsSupplier.get(); + } + setMediaItemsInternal(mediaItems, startIndex, startPositionMs); + } + + @RequiresNonNull("state") + private void setMediaItemsInternal( + List mediaItems, int startIndex, long startPositionMs) { + checkArgument(startIndex == C.INDEX_UNSET || startIndex >= 0); + // Use a local copy to ensure the lambda below uses the current state value. + State state = this.state; + if (!shouldHandleCommand(Player.COMMAND_CHANGE_MEDIA_ITEMS) + && (mediaItems.size() != 1 || !shouldHandleCommand(Player.COMMAND_SET_MEDIA_ITEM))) { + return; + } + updateStateForPendingOperation( + /* pendingOperation= */ handleSetMediaItems(mediaItems, startIndex, startPositionMs), + /* placeholderStateSupplier= */ () -> { + ArrayList placeholderPlaylist = new ArrayList<>(); + for (int i = 0; i < mediaItems.size(); i++) { + placeholderPlaylist.add(getPlaceholderMediaItemData(mediaItems.get(i))); + } + return getStateWithNewPlaylistAndPosition( + state, placeholderPlaylist, startIndex, startPositionMs); + }); + } + + @Override + public final void addMediaItems(int index, List mediaItems) { + verifyApplicationThreadAndInitState(); + checkArgument(index >= 0); + // Use a local copy to ensure the lambda below uses the current state value. + State state = this.state; + int playlistSize = state.playlist.size(); + if (!shouldHandleCommand(Player.COMMAND_CHANGE_MEDIA_ITEMS) || mediaItems.isEmpty()) { + return; + } + int correctedIndex = min(index, playlistSize); + updateStateForPendingOperation( + /* pendingOperation= */ handleAddMediaItems(correctedIndex, mediaItems), + /* placeholderStateSupplier= */ () -> { + ArrayList placeholderPlaylist = new ArrayList<>(state.playlist); + for (int i = 0; i < mediaItems.size(); i++) { + placeholderPlaylist.add( + i + correctedIndex, getPlaceholderMediaItemData(mediaItems.get(i))); + } + return getStateWithNewPlaylist(state, placeholderPlaylist, period); + }); + } + + @Override + public final void moveMediaItems(int fromIndex, int toIndex, int newIndex) { + verifyApplicationThreadAndInitState(); + checkArgument(fromIndex >= 0 && toIndex >= fromIndex && newIndex >= 0); + // Use a local copy to ensure the lambda below uses the current state value. + State state = this.state; + int playlistSize = state.playlist.size(); + if (!shouldHandleCommand(Player.COMMAND_CHANGE_MEDIA_ITEMS) + || playlistSize == 0 + || fromIndex >= playlistSize) { + return; + } + int correctedToIndex = min(toIndex, playlistSize); + int correctedNewIndex = min(newIndex, state.playlist.size() - (correctedToIndex - fromIndex)); + if (fromIndex == correctedToIndex || correctedNewIndex == fromIndex) { + return; + } + updateStateForPendingOperation( + /* pendingOperation= */ handleMoveMediaItems( + fromIndex, correctedToIndex, correctedNewIndex), + /* placeholderStateSupplier= */ () -> { + ArrayList placeholderPlaylist = new ArrayList<>(state.playlist); + Util.moveItems(placeholderPlaylist, fromIndex, correctedToIndex, correctedNewIndex); + return getStateWithNewPlaylist(state, placeholderPlaylist, period); + }); + } + + @Override + public final void removeMediaItems(int fromIndex, int toIndex) { + verifyApplicationThreadAndInitState(); + checkArgument(fromIndex >= 0 && toIndex >= fromIndex); + // Use a local copy to ensure the lambda below uses the current state value. + State state = this.state; + int playlistSize = state.playlist.size(); + if (!shouldHandleCommand(Player.COMMAND_CHANGE_MEDIA_ITEMS) + || playlistSize == 0 + || fromIndex >= playlistSize) { + return; + } + int correctedToIndex = min(toIndex, playlistSize); + if (fromIndex == correctedToIndex) { + return; + } + updateStateForPendingOperation( + /* pendingOperation= */ handleRemoveMediaItems(fromIndex, correctedToIndex), + /* placeholderStateSupplier= */ () -> { + ArrayList placeholderPlaylist = new ArrayList<>(state.playlist); + Util.removeRange(placeholderPlaylist, fromIndex, correctedToIndex); + return getStateWithNewPlaylist(state, placeholderPlaylist, period); + }); + } + + @Override + public final void prepare() { + verifyApplicationThreadAndInitState(); + // Use a local copy to ensure the lambda below uses the current state value. + State state = this.state; + if (!shouldHandleCommand(Player.COMMAND_PREPARE)) { + return; + } + updateStateForPendingOperation( + /* pendingOperation= */ handlePrepare(), + /* placeholderStateSupplier= */ () -> + state + .buildUpon() + .setPlayerError(null) + .setPlaybackState(state.timeline.isEmpty() ? STATE_ENDED : STATE_BUFFERING) + .build()); + } + + @Override + @Player.State + public final int getPlaybackState() { + verifyApplicationThreadAndInitState(); + return state.playbackState; + } + + @Override + public final int getPlaybackSuppressionReason() { + verifyApplicationThreadAndInitState(); + return state.playbackSuppressionReason; + } + + @Nullable + @Override + public final PlaybackException getPlayerError() { + verifyApplicationThreadAndInitState(); + return state.playerError; + } + + @Override + public final void setRepeatMode(@Player.RepeatMode int repeatMode) { + verifyApplicationThreadAndInitState(); + // Use a local copy to ensure the lambda below uses the current state value. + State state = this.state; + if (!shouldHandleCommand(Player.COMMAND_SET_REPEAT_MODE)) { + return; + } + updateStateForPendingOperation( + /* pendingOperation= */ handleSetRepeatMode(repeatMode), + /* placeholderStateSupplier= */ () -> state.buildUpon().setRepeatMode(repeatMode).build()); + } + + @Override + @Player.RepeatMode + public final int getRepeatMode() { + verifyApplicationThreadAndInitState(); + return state.repeatMode; + } + + @Override + public final void setShuffleModeEnabled(boolean shuffleModeEnabled) { + verifyApplicationThreadAndInitState(); + // Use a local copy to ensure the lambda below uses the current state value. + State state = this.state; + if (!shouldHandleCommand(Player.COMMAND_SET_SHUFFLE_MODE)) { + return; + } + updateStateForPendingOperation( + /* pendingOperation= */ handleSetShuffleModeEnabled(shuffleModeEnabled), + /* placeholderStateSupplier= */ () -> + state.buildUpon().setShuffleModeEnabled(shuffleModeEnabled).build()); + } + + @Override + public final boolean getShuffleModeEnabled() { + verifyApplicationThreadAndInitState(); + return state.shuffleModeEnabled; + } + + @Override + public final boolean isLoading() { + verifyApplicationThreadAndInitState(); + return state.isLoading; + } + + @Override + @VisibleForTesting(otherwise = PROTECTED) + public final void seekTo( + int mediaItemIndex, + long positionMs, + @Player.Command int seekCommand, + boolean isRepeatingCurrentItem) { + verifyApplicationThreadAndInitState(); + checkArgument(mediaItemIndex >= 0); + // Use a local copy to ensure the lambda below uses the current state value. + State state = this.state; + if (!shouldHandleCommand(seekCommand) + || isPlayingAd() + || (!state.playlist.isEmpty() && mediaItemIndex >= state.playlist.size())) { + return; + } + updateStateForPendingOperation( + /* pendingOperation= */ handleSeek(mediaItemIndex, positionMs, seekCommand), + /* placeholderStateSupplier= */ () -> + getStateWithNewPlaylistAndPosition(state, state.playlist, mediaItemIndex, positionMs), + /* seeked= */ true, + isRepeatingCurrentItem); + } + + @Override + public final long getSeekBackIncrement() { + verifyApplicationThreadAndInitState(); + return state.seekBackIncrementMs; + } + + @Override + public final long getSeekForwardIncrement() { + verifyApplicationThreadAndInitState(); + return state.seekForwardIncrementMs; + } + + @Override + public final long getMaxSeekToPreviousPosition() { + verifyApplicationThreadAndInitState(); + return state.maxSeekToPreviousPositionMs; + } + + @Override + public final void setPlaybackParameters(PlaybackParameters playbackParameters) { + verifyApplicationThreadAndInitState(); + // Use a local copy to ensure the lambda below uses the current state value. + State state = this.state; + if (!shouldHandleCommand(Player.COMMAND_SET_SPEED_AND_PITCH)) { + return; + } + updateStateForPendingOperation( + /* pendingOperation= */ handleSetPlaybackParameters(playbackParameters), + /* placeholderStateSupplier= */ () -> + state.buildUpon().setPlaybackParameters(playbackParameters).build()); + } + + @Override + public final PlaybackParameters getPlaybackParameters() { + verifyApplicationThreadAndInitState(); + return state.playbackParameters; + } + + @Override + public final void stop() { + verifyApplicationThreadAndInitState(); + // Use a local copy to ensure the lambda below uses the current state value. + State state = this.state; + if (!shouldHandleCommand(Player.COMMAND_STOP)) { + return; + } + updateStateForPendingOperation( + /* pendingOperation= */ handleStop(), + /* placeholderStateSupplier= */ () -> + state + .buildUpon() + .setPlaybackState(Player.STATE_IDLE) + .setTotalBufferedDurationMs(PositionSupplier.ZERO) + .setContentBufferedPositionMs( + PositionSupplier.getConstant(getContentPositionMsInternal(state))) + .setAdBufferedPositionMs(state.adPositionMsSupplier) + .setIsLoading(false) + .build()); + } + + @Override + public final void stop(boolean reset) { + stop(); + if (reset) { + clearMediaItems(); + } + } + + @Override + public final void release() { + verifyApplicationThreadAndInitState(); + // Use a local copy to ensure the lambda below uses the current state value. + State state = this.state; + if (released) { // TODO(b/261158047): Replace by !shouldHandleCommand(Player.COMMAND_RELEASE) + return; + } + updateStateForPendingOperation( + /* pendingOperation= */ handleRelease(), /* placeholderStateSupplier= */ () -> state); + released = true; + listeners.release(); + // Enforce some final state values in case getters are called after release. + this.state = + this.state + .buildUpon() + .setPlaybackState(Player.STATE_IDLE) + .setTotalBufferedDurationMs(PositionSupplier.ZERO) + .setContentBufferedPositionMs( + PositionSupplier.getConstant(getContentPositionMsInternal(state))) + .setAdBufferedPositionMs(state.adPositionMsSupplier) + .setIsLoading(false) + .build(); + } + + @Override + public final Tracks getCurrentTracks() { + verifyApplicationThreadAndInitState(); + return getCurrentTracksInternal(state); + } + + @Override + public final TrackSelectionParameters getTrackSelectionParameters() { + verifyApplicationThreadAndInitState(); + return state.trackSelectionParameters; + } + + @Override + public final void setTrackSelectionParameters(TrackSelectionParameters parameters) { + verifyApplicationThreadAndInitState(); + // Use a local copy to ensure the lambda below uses the current state value. + State state = this.state; + if (!shouldHandleCommand(Player.COMMAND_SET_TRACK_SELECTION_PARAMETERS)) { + return; + } + updateStateForPendingOperation( + /* pendingOperation= */ handleSetTrackSelectionParameters(parameters), + /* placeholderStateSupplier= */ () -> + state.buildUpon().setTrackSelectionParameters(parameters).build()); + } + + @Override + public final MediaMetadata getMediaMetadata() { + verifyApplicationThreadAndInitState(); + return getMediaMetadataInternal(state); + } + + @Override + public final MediaMetadata getPlaylistMetadata() { + verifyApplicationThreadAndInitState(); + return state.playlistMetadata; + } + + @Override + public final void setPlaylistMetadata(MediaMetadata mediaMetadata) { + verifyApplicationThreadAndInitState(); + // Use a local copy to ensure the lambda below uses the current state value. + State state = this.state; + if (!shouldHandleCommand(Player.COMMAND_SET_MEDIA_ITEMS_METADATA)) { + return; + } + updateStateForPendingOperation( + /* pendingOperation= */ handleSetPlaylistMetadata(mediaMetadata), + /* placeholderStateSupplier= */ () -> + state.buildUpon().setPlaylistMetadata(mediaMetadata).build()); + } + + @Override + public final Timeline getCurrentTimeline() { + verifyApplicationThreadAndInitState(); + return state.timeline; + } + + @Override + public final int getCurrentPeriodIndex() { + verifyApplicationThreadAndInitState(); + return getCurrentPeriodIndexInternal(state, window, period); + } + + @Override + public final int getCurrentMediaItemIndex() { + verifyApplicationThreadAndInitState(); + return getCurrentMediaItemIndexInternal(state); + } + + @Override + public final long getDuration() { + verifyApplicationThreadAndInitState(); + if (isPlayingAd()) { + state.timeline.getPeriod(getCurrentPeriodIndex(), period); + long adDurationUs = + period.getAdDurationUs(state.currentAdGroupIndex, state.currentAdIndexInAdGroup); + return Util.usToMs(adDurationUs); + } + return getContentDuration(); + } + + @Override + public final long getCurrentPosition() { + verifyApplicationThreadAndInitState(); + return isPlayingAd() ? state.adPositionMsSupplier.get() : getContentPosition(); + } + + @Override + public final long getBufferedPosition() { + verifyApplicationThreadAndInitState(); + return isPlayingAd() + ? max(state.adBufferedPositionMsSupplier.get(), state.adPositionMsSupplier.get()) + : getContentBufferedPosition(); + } + + @Override + public final long getTotalBufferedDuration() { + verifyApplicationThreadAndInitState(); + return state.totalBufferedDurationMsSupplier.get(); + } + + @Override + public final boolean isPlayingAd() { + verifyApplicationThreadAndInitState(); + return state.currentAdGroupIndex != C.INDEX_UNSET; + } + + @Override + public final int getCurrentAdGroupIndex() { + verifyApplicationThreadAndInitState(); + return state.currentAdGroupIndex; + } + + @Override + public final int getCurrentAdIndexInAdGroup() { + verifyApplicationThreadAndInitState(); + return state.currentAdIndexInAdGroup; + } + + @Override + public final long getContentPosition() { + verifyApplicationThreadAndInitState(); + return getContentPositionMsInternal(state); + } + + @Override + public final long getContentBufferedPosition() { + verifyApplicationThreadAndInitState(); + return max(getContentBufferedPositionMsInternal(state), getContentPositionMsInternal(state)); + } + + @Override + public final AudioAttributes getAudioAttributes() { + verifyApplicationThreadAndInitState(); + return state.audioAttributes; + } + + @Override + public final void setVolume(float volume) { + verifyApplicationThreadAndInitState(); + // Use a local copy to ensure the lambda below uses the current state value. + State state = this.state; + if (!shouldHandleCommand(Player.COMMAND_SET_VOLUME)) { + return; + } + updateStateForPendingOperation( + /* pendingOperation= */ handleSetVolume(volume), + /* placeholderStateSupplier= */ () -> state.buildUpon().setVolume(volume).build()); + } + + @Override + public final float getVolume() { + verifyApplicationThreadAndInitState(); + return state.volume; + } + + @Override + public final void setVideoSurface(@Nullable Surface surface) { + verifyApplicationThreadAndInitState(); + // Use a local copy to ensure the lambda below uses the current state value. + State state = this.state; + if (!shouldHandleCommand(Player.COMMAND_SET_VIDEO_SURFACE)) { + return; + } + if (surface == null) { + clearVideoSurface(); + return; + } + updateStateForPendingOperation( + /* pendingOperation= */ handleSetVideoOutput(surface), + /* placeholderStateSupplier= */ () -> + state.buildUpon().setSurfaceSize(Size.UNKNOWN).build()); + } + + @Override + public final void setVideoSurfaceHolder(@Nullable SurfaceHolder surfaceHolder) { + verifyApplicationThreadAndInitState(); + // Use a local copy to ensure the lambda below uses the current state value. + State state = this.state; + if (!shouldHandleCommand(Player.COMMAND_SET_VIDEO_SURFACE)) { + return; + } + if (surfaceHolder == null) { + clearVideoSurface(); + return; + } + updateStateForPendingOperation( + /* pendingOperation= */ handleSetVideoOutput(surfaceHolder), + /* placeholderStateSupplier= */ () -> + state.buildUpon().setSurfaceSize(getSurfaceHolderSize(surfaceHolder)).build()); + } + + @Override + public final void setVideoSurfaceView(@Nullable SurfaceView surfaceView) { + verifyApplicationThreadAndInitState(); + // Use a local copy to ensure the lambda below uses the current state value. + State state = this.state; + if (!shouldHandleCommand(Player.COMMAND_SET_VIDEO_SURFACE)) { + return; + } + if (surfaceView == null) { + clearVideoSurface(); + return; + } + updateStateForPendingOperation( + /* pendingOperation= */ handleSetVideoOutput(surfaceView), + /* placeholderStateSupplier= */ () -> + state + .buildUpon() + .setSurfaceSize(getSurfaceHolderSize(surfaceView.getHolder())) + .build()); + } + + @Override + public final void setVideoTextureView(@Nullable TextureView textureView) { + verifyApplicationThreadAndInitState(); + // Use a local copy to ensure the lambda below uses the current state value. + State state = this.state; + if (!shouldHandleCommand(Player.COMMAND_SET_VIDEO_SURFACE)) { + return; + } + if (textureView == null) { + clearVideoSurface(); + return; + } + Size surfaceSize; + if (textureView.isAvailable()) { + surfaceSize = new Size(textureView.getWidth(), textureView.getHeight()); + } else { + surfaceSize = Size.ZERO; + } + updateStateForPendingOperation( + /* pendingOperation= */ handleSetVideoOutput(textureView), + /* placeholderStateSupplier= */ () -> + state.buildUpon().setSurfaceSize(surfaceSize).build()); + } + + @Override + public final void clearVideoSurface() { + clearVideoOutput(/* videoOutput= */ null); + } + + @Override + public final void clearVideoSurface(@Nullable Surface surface) { + clearVideoOutput(surface); + } + + @Override + public final void clearVideoSurfaceHolder(@Nullable SurfaceHolder surfaceHolder) { + clearVideoOutput(surfaceHolder); + } + + @Override + public final void clearVideoSurfaceView(@Nullable SurfaceView surfaceView) { + clearVideoOutput(surfaceView); + } + + @Override + public final void clearVideoTextureView(@Nullable TextureView textureView) { + clearVideoOutput(textureView); + } + + private void clearVideoOutput(@Nullable Object videoOutput) { + verifyApplicationThreadAndInitState(); + // Use a local copy to ensure the lambda below uses the current state value. + State state = this.state; + if (!shouldHandleCommand(Player.COMMAND_SET_VIDEO_SURFACE)) { + return; + } + updateStateForPendingOperation( + /* pendingOperation= */ handleClearVideoOutput(videoOutput), + /* placeholderStateSupplier= */ () -> state.buildUpon().setSurfaceSize(Size.ZERO).build()); + } + + @Override + public final VideoSize getVideoSize() { + verifyApplicationThreadAndInitState(); + return state.videoSize; + } + + @Override + public final Size getSurfaceSize() { + verifyApplicationThreadAndInitState(); + return state.surfaceSize; + } + + @Override + public final CueGroup getCurrentCues() { + verifyApplicationThreadAndInitState(); + return state.currentCues; + } + + @Override + public final DeviceInfo getDeviceInfo() { + verifyApplicationThreadAndInitState(); + return state.deviceInfo; + } + + @Override + public final int getDeviceVolume() { + verifyApplicationThreadAndInitState(); + return state.deviceVolume; + } + + @Override + public final boolean isDeviceMuted() { + verifyApplicationThreadAndInitState(); + return state.isDeviceMuted; + } + + @Override + public final void setDeviceVolume(int volume) { + verifyApplicationThreadAndInitState(); + // Use a local copy to ensure the lambda below uses the current state value. + State state = this.state; + if (!shouldHandleCommand(Player.COMMAND_SET_DEVICE_VOLUME)) { + return; + } + updateStateForPendingOperation( + /* pendingOperation= */ handleSetDeviceVolume(volume), + /* placeholderStateSupplier= */ () -> state.buildUpon().setDeviceVolume(volume).build()); + } + + @Override + public final void increaseDeviceVolume() { + verifyApplicationThreadAndInitState(); + // Use a local copy to ensure the lambda below uses the current state value. + State state = this.state; + if (!shouldHandleCommand(Player.COMMAND_ADJUST_DEVICE_VOLUME)) { + return; + } + updateStateForPendingOperation( + /* pendingOperation= */ handleIncreaseDeviceVolume(), + /* placeholderStateSupplier= */ () -> + state.buildUpon().setDeviceVolume(state.deviceVolume + 1).build()); + } + + @Override + public final void decreaseDeviceVolume() { + verifyApplicationThreadAndInitState(); + // Use a local copy to ensure the lambda below uses the current state value. + State state = this.state; + if (!shouldHandleCommand(Player.COMMAND_ADJUST_DEVICE_VOLUME)) { + return; + } + updateStateForPendingOperation( + /* pendingOperation= */ handleDecreaseDeviceVolume(), + /* placeholderStateSupplier= */ () -> + state.buildUpon().setDeviceVolume(max(0, state.deviceVolume - 1)).build()); + } + + @Override + public final void setDeviceMuted(boolean muted) { + verifyApplicationThreadAndInitState(); + // Use a local copy to ensure the lambda below uses the current state value. + State state = this.state; + if (!shouldHandleCommand(Player.COMMAND_ADJUST_DEVICE_VOLUME)) { + return; + } + updateStateForPendingOperation( + /* pendingOperation= */ handleSetDeviceMuted(muted), + /* placeholderStateSupplier= */ () -> state.buildUpon().setIsDeviceMuted(muted).build()); + } + + /** + * Invalidates the current state. + * + *

      Triggers a call to {@link #getState()} and informs listeners if the state changed. + * + *

      Note that this may not have an immediate effect while there are still player methods being + * handled asynchronously. The state will be invalidated automatically once these pending + * synchronous operations are finished and there is no need to call this method again. + */ + protected final void invalidateState() { + verifyApplicationThreadAndInitState(); + if (!pendingOperations.isEmpty() || released) { + return; + } + updateStateAndInformListeners( + getState(), /* seeked= */ false, /* isRepeatingCurrentItem= */ false); + } + + /** + * Returns the current {@link State} of the player. + * + *

      The {@link State} should include all {@linkplain + * State.Builder#setAvailableCommands(Commands) available commands} indicating which player + * methods are allowed to be called. + * + *

      Note that this method won't be called while asynchronous handling of player methods is in + * progress. This means that the implementation doesn't need to handle state changes caused by + * these asynchronous operations until they are done and can return the currently known state + * directly. The placeholder state used while these asynchronous operations are in progress can be + * customized by overriding {@link #getPlaceholderState(State)} if required. + */ + @ForOverride + protected abstract State getState(); + + /** + * Returns the placeholder state used while a player method is handled asynchronously. + * + *

      The {@code suggestedPlaceholderState} already contains the most likely state update, for + * example setting {@link State#playWhenReady} to true if {@code player.setPlayWhenReady(true)} is + * called, and an implementations only needs to override this method if it can determine a more + * accurate placeholder state. + * + * @param suggestedPlaceholderState The suggested placeholder {@link State}, including the most + * likely outcome of handling all pending asynchronous operations. + * @return The placeholder {@link State} to use while asynchronous operations are pending. + */ + @ForOverride + protected State getPlaceholderState(State suggestedPlaceholderState) { + return suggestedPlaceholderState; + } + + /** + * Returns the placeholder {@link MediaItemData} used for a new {@link MediaItem} added to the + * playlist. + * + *

      An implementation only needs to override this method if it can determine a more accurate + * placeholder state than the default. + * + * @param mediaItem The {@link MediaItem} added to the playlist. + * @return The {@link MediaItemData} used as placeholder while adding the item to the playlist is + * in progress. + */ + @ForOverride + protected MediaItemData getPlaceholderMediaItemData(MediaItem mediaItem) { + return new MediaItemData.Builder(new PlaceholderUid()) + .setMediaItem(mediaItem) + .setIsDynamic(true) + .setIsPlaceholder(true) + .build(); + } + + /** + * Handles calls to {@link Player#setPlayWhenReady}, {@link Player#play} and {@link Player#pause}. + * + *

      Will only be called if {@link Player#COMMAND_PLAY_PAUSE} is available. + * + * @param playWhenReady The requested {@link State#playWhenReady} + * @return A {@link ListenableFuture} indicating the completion of all immediate {@link State} + * changes caused by this call. + */ + @ForOverride + protected ListenableFuture handleSetPlayWhenReady(boolean playWhenReady) { + throw new IllegalStateException("Missing implementation to handle COMMAND_PLAY_PAUSE"); + } + + /** + * Handles calls to {@link Player#prepare}. + * + *

      Will only be called if {@link Player#COMMAND_PREPARE} is available. + * + * @return A {@link ListenableFuture} indicating the completion of all immediate {@link State} + * changes caused by this call. + */ + @ForOverride + protected ListenableFuture handlePrepare() { + throw new IllegalStateException("Missing implementation to handle COMMAND_PREPARE"); + } + + /** + * Handles calls to {@link Player#stop}. + * + *

      Will only be called if {@link Player#COMMAND_STOP} is available. + * + * @return A {@link ListenableFuture} indicating the completion of all immediate {@link State} + * changes caused by this call. + */ + @ForOverride + protected ListenableFuture handleStop() { + throw new IllegalStateException("Missing implementation to handle COMMAND_STOP"); + } + + /** + * Handles calls to {@link Player#release}. + * + * @return A {@link ListenableFuture} indicating the completion of all immediate {@link State} + * changes caused by this call. + */ + // TODO(b/261158047): Add that this method will only be called if COMMAND_RELEASE is available. + @ForOverride + protected ListenableFuture handleRelease() { + throw new IllegalStateException("Missing implementation to handle COMMAND_RELEASE"); + } + + /** + * Handles calls to {@link Player#setRepeatMode}. + * + *

      Will only be called if {@link Player#COMMAND_SET_REPEAT_MODE} is available. + * + * @param repeatMode The requested {@link RepeatMode}. + * @return A {@link ListenableFuture} indicating the completion of all immediate {@link State} + * changes caused by this call. + */ + @ForOverride + protected ListenableFuture handleSetRepeatMode(@RepeatMode int repeatMode) { + throw new IllegalStateException("Missing implementation to handle COMMAND_SET_REPEAT_MODE"); + } + + /** + * Handles calls to {@link Player#setShuffleModeEnabled}. + * + *

      Will only be called if {@link Player#COMMAND_SET_SHUFFLE_MODE} is available. + * + * @param shuffleModeEnabled Whether shuffle mode was requested to be enabled. + * @return A {@link ListenableFuture} indicating the completion of all immediate {@link State} + * changes caused by this call. + */ + @ForOverride + protected ListenableFuture handleSetShuffleModeEnabled(boolean shuffleModeEnabled) { + throw new IllegalStateException("Missing implementation to handle COMMAND_SET_SHUFFLE_MODE"); + } + + /** + * Handles calls to {@link Player#setPlaybackParameters} or {@link Player#setPlaybackSpeed}. + * + *

      Will only be called if {@link Player#COMMAND_SET_SPEED_AND_PITCH} is available. + * + * @param playbackParameters The requested {@link PlaybackParameters}. + * @return A {@link ListenableFuture} indicating the completion of all immediate {@link State} + * changes caused by this call. + */ + @ForOverride + protected ListenableFuture handleSetPlaybackParameters(PlaybackParameters playbackParameters) { + throw new IllegalStateException("Missing implementation to handle COMMAND_SET_SPEED_AND_PITCH"); + } + + /** + * Handles calls to {@link Player#setTrackSelectionParameters}. + * + *

      Will only be called if {@link Player#COMMAND_SET_TRACK_SELECTION_PARAMETERS} is available. + * + * @param trackSelectionParameters The requested {@link TrackSelectionParameters}. + * @return A {@link ListenableFuture} indicating the completion of all immediate {@link State} + * changes caused by this call. + */ + @ForOverride + protected ListenableFuture handleSetTrackSelectionParameters( + TrackSelectionParameters trackSelectionParameters) { + throw new IllegalStateException( + "Missing implementation to handle COMMAND_SET_TRACK_SELECTION_PARAMETERS"); + } + + /** + * Handles calls to {@link Player#setPlaylistMetadata}. + * + *

      Will only be called if {@link Player#COMMAND_SET_MEDIA_ITEMS_METADATA} is available. + * + * @param playlistMetadata The requested {@linkplain MediaMetadata playlist metadata}. + * @return A {@link ListenableFuture} indicating the completion of all immediate {@link State} + * changes caused by this call. + */ + @ForOverride + protected ListenableFuture handleSetPlaylistMetadata(MediaMetadata playlistMetadata) { + throw new IllegalStateException( + "Missing implementation to handle COMMAND_SET_MEDIA_ITEMS_METADATA"); + } + + /** + * Handles calls to {@link Player#setVolume}. + * + *

      Will only be called if {@link Player#COMMAND_SET_VOLUME} is available. + * + * @param volume The requested audio volume, with 0 being silence and 1 being unity gain (signal + * unchanged). + * @return A {@link ListenableFuture} indicating the completion of all immediate {@link State} + * changes caused by this call. + */ + @ForOverride + protected ListenableFuture handleSetVolume(@FloatRange(from = 0, to = 1.0) float volume) { + throw new IllegalStateException("Missing implementation to handle COMMAND_SET_VOLUME"); + } + + /** + * Handles calls to {@link Player#setDeviceVolume}. + * + *

      Will only be called if {@link Player#COMMAND_SET_DEVICE_VOLUME} is available. + * + * @param deviceVolume The requested device volume. + * @return A {@link ListenableFuture} indicating the completion of all immediate {@link State} + * changes caused by this call. + */ + @ForOverride + protected ListenableFuture handleSetDeviceVolume(@IntRange(from = 0) int deviceVolume) { + throw new IllegalStateException("Missing implementation to handle COMMAND_SET_DEVICE_VOLUME"); + } + + /** + * Handles calls to {@link Player#increaseDeviceVolume()}. + * + *

      Will only be called if {@link Player#COMMAND_ADJUST_DEVICE_VOLUME} is available. + * + * @return A {@link ListenableFuture} indicating the completion of all immediate {@link State} + * changes caused by this call. + */ + @ForOverride + protected ListenableFuture handleIncreaseDeviceVolume() { + throw new IllegalStateException( + "Missing implementation to handle COMMAND_ADJUST_DEVICE_VOLUME"); + } + + /** + * Handles calls to {@link Player#decreaseDeviceVolume()}. + * + *

      Will only be called if {@link Player#COMMAND_ADJUST_DEVICE_VOLUME} is available. + * + * @return A {@link ListenableFuture} indicating the completion of all immediate {@link State} + * changes caused by this call. + */ + @ForOverride + protected ListenableFuture handleDecreaseDeviceVolume() { + throw new IllegalStateException( + "Missing implementation to handle COMMAND_ADJUST_DEVICE_VOLUME"); + } + + /** + * Handles calls to {@link Player#setDeviceMuted}. + * + *

      Will only be called if {@link Player#COMMAND_ADJUST_DEVICE_VOLUME} is available. + * + * @param muted Whether the device was requested to be muted. + * @return A {@link ListenableFuture} indicating the completion of all immediate {@link State} + * changes caused by this call. + */ + @ForOverride + protected ListenableFuture handleSetDeviceMuted(boolean muted) { + throw new IllegalStateException( + "Missing implementation to handle COMMAND_ADJUST_DEVICE_VOLUME"); + } + + /** + * Handles calls to set the video output. + * + *

      Will only be called if {@link Player#COMMAND_SET_VIDEO_SURFACE} is available. + * + * @param videoOutput The requested video output. This is either a {@link Surface}, {@link + * SurfaceHolder}, {@link TextureView} or {@link SurfaceView}. + * @return A {@link ListenableFuture} indicating the completion of all immediate {@link State} + * changes caused by this call. + */ + @ForOverride + protected ListenableFuture handleSetVideoOutput(Object videoOutput) { + throw new IllegalStateException("Missing implementation to handle COMMAND_SET_VIDEO_SURFACE"); + } + + /** + * Handles calls to clear the video output. + * + *

      Will only be called if {@link Player#COMMAND_SET_VIDEO_SURFACE} is available. + * + * @param videoOutput The video output to clear. If null any current output should be cleared. If + * non-null, the output should only be cleared if it matches the provided argument. This is + * either a {@link Surface}, {@link SurfaceHolder}, {@link TextureView} or {@link + * SurfaceView}. + * @return A {@link ListenableFuture} indicating the completion of all immediate {@link State} + * changes caused by this call. + */ + @ForOverride + protected ListenableFuture handleClearVideoOutput(@Nullable Object videoOutput) { + throw new IllegalStateException("Missing implementation to handle COMMAND_SET_VIDEO_SURFACE"); + } + + /** + * Handles calls to {@link Player#setMediaItem} and {@link Player#setMediaItems}. + * + *

      Will only be called if {@link Player#COMMAND_SET_MEDIA_ITEM} or {@link + * Player#COMMAND_CHANGE_MEDIA_ITEMS} is available. If only {@link Player#COMMAND_SET_MEDIA_ITEM} + * is available, the list of media items will always contain exactly one item. + * + * @param mediaItems The media items to add. + * @param startIndex The index at which to start playback from, or {@link C#INDEX_UNSET} to start + * at the default item. + * @param startPositionMs The position in milliseconds to start playback from, or {@link + * C#TIME_UNSET} to start at the default position in the media item. + * @return A {@link ListenableFuture} indicating the completion of all immediate {@link State} + * changes caused by this call. + */ + @ForOverride + protected ListenableFuture handleSetMediaItems( + List mediaItems, int startIndex, long startPositionMs) { + throw new IllegalStateException("Missing implementation to handle COMMAND_SET_MEDIA_ITEM(S)"); + } + + /** + * Handles calls to {@link Player#addMediaItem} and {@link Player#addMediaItems}. + * + *

      Will only be called if {@link Player#COMMAND_CHANGE_MEDIA_ITEMS} is available. + * + * @param index The index at which to add the items. The index is in the range 0 <= {@code + * index} <= {@link #getMediaItemCount()}. + * @param mediaItems The media items to add. + * @return A {@link ListenableFuture} indicating the completion of all immediate {@link State} + * changes caused by this call. + */ + @ForOverride + protected ListenableFuture handleAddMediaItems(int index, List mediaItems) { + throw new IllegalStateException("Missing implementation to handle COMMAND_CHANGE_MEDIA_ITEMS"); + } + + /** + * Handles calls to {@link Player#moveMediaItem} and {@link Player#moveMediaItems}. + * + *

      Will only be called if {@link Player#COMMAND_CHANGE_MEDIA_ITEMS} is available. + * + * @param fromIndex The start index of the items to move. The index is in the range 0 <= {@code + * fromIndex} < {@link #getMediaItemCount()}. + * @param toIndex The index of the first item not to be included in the move (exclusive). The + * index is in the range {@code fromIndex} < {@code toIndex} <= {@link + * #getMediaItemCount()}. + * @param newIndex The new index of the first moved item. The index is in the range {@code 0} + * <= {@code newIndex} < {@link #getMediaItemCount() - (toIndex - fromIndex)}. + * @return A {@link ListenableFuture} indicating the completion of all immediate {@link State} + * changes caused by this call. + */ + @ForOverride + protected ListenableFuture handleMoveMediaItems(int fromIndex, int toIndex, int newIndex) { + throw new IllegalStateException("Missing implementation to handle COMMAND_CHANGE_MEDIA_ITEMS"); + } + + /** + * Handles calls to {@link Player#removeMediaItem} and {@link Player#removeMediaItems}. + * + *

      Will only be called if {@link Player#COMMAND_CHANGE_MEDIA_ITEMS} is available. + * + * @param fromIndex The index at which to start removing media items. The index is in the range 0 + * <= {@code fromIndex} < {@link #getMediaItemCount()}. + * @param toIndex The index of the first item to be kept (exclusive). The index is in the range + * {@code fromIndex} < {@code toIndex} <= {@link #getMediaItemCount()}. + * @return A {@link ListenableFuture} indicating the completion of all immediate {@link State} + * changes caused by this call. + */ + @ForOverride + protected ListenableFuture handleRemoveMediaItems(int fromIndex, int toIndex) { + throw new IllegalStateException("Missing implementation to handle COMMAND_CHANGE_MEDIA_ITEMS"); + } + + /** + * Handles calls to {@link Player#seekTo} and other seek operations (for example, {@link + * Player#seekToNext}). + * + *

      Will only be called if the appropriate {@link Player.Command}, for example {@link + * Player#COMMAND_SEEK_TO_MEDIA_ITEM} or {@link Player#COMMAND_SEEK_TO_NEXT}, is available. + * + * @param mediaItemIndex The media item index to seek to. The index is in the range 0 <= {@code + * mediaItemIndex} < {@code mediaItems.size()}. + * @param positionMs The position in milliseconds to start playback from, or {@link C#TIME_UNSET} + * to start at the default position in the media item. + * @param seekCommand The {@link Player.Command} used to trigger the seek. + * @return A {@link ListenableFuture} indicating the completion of all immediate {@link State} + * changes caused by this call. + */ + @ForOverride + protected ListenableFuture handleSeek( + int mediaItemIndex, long positionMs, @Player.Command int seekCommand) { + throw new IllegalStateException("Missing implementation to handle one of the COMMAND_SEEK_*"); + } + + @RequiresNonNull("state") + private boolean shouldHandleCommand(@Player.Command int commandCode) { + return !released && state.availableCommands.contains(commandCode); + } + + @SuppressWarnings("deprecation") // Calling deprecated listener methods. + @RequiresNonNull("state") + private void updateStateAndInformListeners( + State newState, boolean seeked, boolean isRepeatingCurrentItem) { + State previousState = state; + // Assign new state immediately such that all getters return the right values, but use a + // snapshot of the previous and new state so that listener invocations are triggered correctly. + this.state = newState; + if (newState.hasPositionDiscontinuity || newState.newlyRenderedFirstFrame) { + // Clear one-time events to avoid signalling them again later. + this.state = + this.state + .buildUpon() + .clearPositionDiscontinuity() + .setNewlyRenderedFirstFrame(false) + .build(); + } + + boolean playWhenReadyChanged = previousState.playWhenReady != newState.playWhenReady; + boolean playbackStateChanged = previousState.playbackState != newState.playbackState; + Tracks previousTracks = getCurrentTracksInternal(previousState); + Tracks newTracks = getCurrentTracksInternal(newState); + MediaMetadata previousMediaMetadata = getMediaMetadataInternal(previousState); + MediaMetadata newMediaMetadata = getMediaMetadataInternal(newState); + int positionDiscontinuityReason = + getPositionDiscontinuityReason(previousState, newState, seeked, window, period); + boolean timelineChanged = !previousState.timeline.equals(newState.timeline); + int mediaItemTransitionReason = + getMediaItemTransitionReason( + previousState, newState, positionDiscontinuityReason, isRepeatingCurrentItem, window); + + if (timelineChanged) { + @Player.TimelineChangeReason + int timelineChangeReason = getTimelineChangeReason(previousState.playlist, newState.playlist); + listeners.queueEvent( + Player.EVENT_TIMELINE_CHANGED, + listener -> listener.onTimelineChanged(newState.timeline, timelineChangeReason)); + } + if (positionDiscontinuityReason != C.INDEX_UNSET) { + PositionInfo previousPositionInfo = + getPositionInfo(previousState, /* useDiscontinuityPosition= */ false, window, period); + PositionInfo positionInfo = + getPositionInfo( + newState, + /* useDiscontinuityPosition= */ newState.hasPositionDiscontinuity, + window, + period); + listeners.queueEvent( + Player.EVENT_POSITION_DISCONTINUITY, + listener -> { + listener.onPositionDiscontinuity(positionDiscontinuityReason); + listener.onPositionDiscontinuity( + previousPositionInfo, positionInfo, positionDiscontinuityReason); + }); + } + if (mediaItemTransitionReason != C.INDEX_UNSET) { + @Nullable + MediaItem mediaItem = + newState.timeline.isEmpty() + ? null + : newState.playlist.get(getCurrentMediaItemIndexInternal(newState)).mediaItem; + listeners.queueEvent( + Player.EVENT_MEDIA_ITEM_TRANSITION, + listener -> listener.onMediaItemTransition(mediaItem, mediaItemTransitionReason)); + } + if (!Util.areEqual(previousState.playerError, newState.playerError)) { + listeners.queueEvent( + Player.EVENT_PLAYER_ERROR, + listener -> listener.onPlayerErrorChanged(newState.playerError)); + if (newState.playerError != null) { + listeners.queueEvent( + Player.EVENT_PLAYER_ERROR, + listener -> listener.onPlayerError(castNonNull(newState.playerError))); + } + } + if (!previousState.trackSelectionParameters.equals(newState.trackSelectionParameters)) { + listeners.queueEvent( + Player.EVENT_TRACK_SELECTION_PARAMETERS_CHANGED, + listener -> + listener.onTrackSelectionParametersChanged(newState.trackSelectionParameters)); + } + if (!previousTracks.equals(newTracks)) { + listeners.queueEvent( + Player.EVENT_TRACKS_CHANGED, listener -> listener.onTracksChanged(newTracks)); + } + if (!previousMediaMetadata.equals(newMediaMetadata)) { + listeners.queueEvent( + EVENT_MEDIA_METADATA_CHANGED, + listener -> listener.onMediaMetadataChanged(newMediaMetadata)); + } + if (previousState.isLoading != newState.isLoading) { + listeners.queueEvent( + Player.EVENT_IS_LOADING_CHANGED, + listener -> { + listener.onLoadingChanged(newState.isLoading); + listener.onIsLoadingChanged(newState.isLoading); + }); + } + if (playWhenReadyChanged || playbackStateChanged) { + listeners.queueEvent( + /* eventFlag= */ C.INDEX_UNSET, + listener -> + listener.onPlayerStateChanged(newState.playWhenReady, newState.playbackState)); + } + if (playbackStateChanged) { + listeners.queueEvent( + Player.EVENT_PLAYBACK_STATE_CHANGED, + listener -> listener.onPlaybackStateChanged(newState.playbackState)); + } + if (playWhenReadyChanged + || previousState.playWhenReadyChangeReason != newState.playWhenReadyChangeReason) { + listeners.queueEvent( + Player.EVENT_PLAY_WHEN_READY_CHANGED, + listener -> + listener.onPlayWhenReadyChanged( + newState.playWhenReady, newState.playWhenReadyChangeReason)); + } + if (previousState.playbackSuppressionReason != newState.playbackSuppressionReason) { + listeners.queueEvent( + Player.EVENT_PLAYBACK_SUPPRESSION_REASON_CHANGED, + listener -> + listener.onPlaybackSuppressionReasonChanged(newState.playbackSuppressionReason)); + } + if (isPlaying(previousState) != isPlaying(newState)) { + listeners.queueEvent( + Player.EVENT_IS_PLAYING_CHANGED, + listener -> listener.onIsPlayingChanged(isPlaying(newState))); + } + if (!previousState.playbackParameters.equals(newState.playbackParameters)) { + listeners.queueEvent( + Player.EVENT_PLAYBACK_PARAMETERS_CHANGED, + listener -> listener.onPlaybackParametersChanged(newState.playbackParameters)); + } + if (previousState.repeatMode != newState.repeatMode) { + listeners.queueEvent( + Player.EVENT_REPEAT_MODE_CHANGED, + listener -> listener.onRepeatModeChanged(newState.repeatMode)); + } + if (previousState.shuffleModeEnabled != newState.shuffleModeEnabled) { + listeners.queueEvent( + Player.EVENT_SHUFFLE_MODE_ENABLED_CHANGED, + listener -> listener.onShuffleModeEnabledChanged(newState.shuffleModeEnabled)); + } + if (previousState.seekBackIncrementMs != newState.seekBackIncrementMs) { + listeners.queueEvent( + Player.EVENT_SEEK_BACK_INCREMENT_CHANGED, + listener -> listener.onSeekBackIncrementChanged(newState.seekBackIncrementMs)); + } + if (previousState.seekForwardIncrementMs != newState.seekForwardIncrementMs) { + listeners.queueEvent( + Player.EVENT_SEEK_FORWARD_INCREMENT_CHANGED, + listener -> listener.onSeekForwardIncrementChanged(newState.seekForwardIncrementMs)); + } + if (previousState.maxSeekToPreviousPositionMs != newState.maxSeekToPreviousPositionMs) { + listeners.queueEvent( + Player.EVENT_MAX_SEEK_TO_PREVIOUS_POSITION_CHANGED, + listener -> + listener.onMaxSeekToPreviousPositionChanged(newState.maxSeekToPreviousPositionMs)); + } + if (!previousState.audioAttributes.equals(newState.audioAttributes)) { + listeners.queueEvent( + Player.EVENT_AUDIO_ATTRIBUTES_CHANGED, + listener -> listener.onAudioAttributesChanged(newState.audioAttributes)); + } + if (!previousState.videoSize.equals(newState.videoSize)) { + listeners.queueEvent( + Player.EVENT_VIDEO_SIZE_CHANGED, + listener -> listener.onVideoSizeChanged(newState.videoSize)); + } + if (!previousState.deviceInfo.equals(newState.deviceInfo)) { + listeners.queueEvent( + Player.EVENT_DEVICE_INFO_CHANGED, + listener -> listener.onDeviceInfoChanged(newState.deviceInfo)); + } + if (!previousState.playlistMetadata.equals(newState.playlistMetadata)) { + listeners.queueEvent( + Player.EVENT_PLAYLIST_METADATA_CHANGED, + listener -> listener.onPlaylistMetadataChanged(newState.playlistMetadata)); + } + if (newState.newlyRenderedFirstFrame) { + listeners.queueEvent(Player.EVENT_RENDERED_FIRST_FRAME, Listener::onRenderedFirstFrame); + } + if (!previousState.surfaceSize.equals(newState.surfaceSize)) { + listeners.queueEvent( + Player.EVENT_SURFACE_SIZE_CHANGED, + listener -> + listener.onSurfaceSizeChanged( + newState.surfaceSize.getWidth(), newState.surfaceSize.getHeight())); + } + if (previousState.volume != newState.volume) { + listeners.queueEvent( + Player.EVENT_VOLUME_CHANGED, listener -> listener.onVolumeChanged(newState.volume)); + } + if (previousState.deviceVolume != newState.deviceVolume + || previousState.isDeviceMuted != newState.isDeviceMuted) { + listeners.queueEvent( + Player.EVENT_DEVICE_VOLUME_CHANGED, + listener -> + listener.onDeviceVolumeChanged(newState.deviceVolume, newState.isDeviceMuted)); + } + if (!previousState.currentCues.equals(newState.currentCues)) { + listeners.queueEvent( + Player.EVENT_CUES, + listener -> { + listener.onCues(newState.currentCues.cues); + listener.onCues(newState.currentCues); + }); + } + if (!previousState.timedMetadata.equals(newState.timedMetadata) + && newState.timedMetadata.presentationTimeUs != C.TIME_UNSET) { + listeners.queueEvent( + Player.EVENT_METADATA, listener -> listener.onMetadata(newState.timedMetadata)); + } + if (positionDiscontinuityReason == Player.DISCONTINUITY_REASON_SEEK) { + listeners.queueEvent(/* eventFlag= */ C.INDEX_UNSET, Listener::onSeekProcessed); + } + if (!previousState.availableCommands.equals(newState.availableCommands)) { + listeners.queueEvent( + Player.EVENT_AVAILABLE_COMMANDS_CHANGED, + listener -> listener.onAvailableCommandsChanged(newState.availableCommands)); + } + listeners.flushEvents(); + } + + @EnsuresNonNull("state") + private void verifyApplicationThreadAndInitState() { + if (Thread.currentThread() != applicationLooper.getThread()) { + String message = + Util.formatInvariant( + "Player is accessed on the wrong thread.\n" + + "Current thread: '%s'\n" + + "Expected thread: '%s'\n" + + "See https://exoplayer.dev/issues/player-accessed-on-wrong-thread", + Thread.currentThread().getName(), applicationLooper.getThread().getName()); + throw new IllegalStateException(message); + } + if (state == null) { + // First time accessing state. + state = getState(); + } + } + + @RequiresNonNull("state") + private void updateStateForPendingOperation( + ListenableFuture pendingOperation, Supplier placeholderStateSupplier) { + updateStateForPendingOperation( + pendingOperation, + placeholderStateSupplier, + /* seeked= */ false, + /* isRepeatingCurrentItem= */ false); + } + + @RequiresNonNull("state") + private void updateStateForPendingOperation( + ListenableFuture pendingOperation, + Supplier placeholderStateSupplier, + boolean seeked, + boolean isRepeatingCurrentItem) { + if (pendingOperation.isDone() && pendingOperations.isEmpty()) { + updateStateAndInformListeners(getState(), seeked, isRepeatingCurrentItem); + } else { + pendingOperations.add(pendingOperation); + State suggestedPlaceholderState = placeholderStateSupplier.get(); + updateStateAndInformListeners( + getPlaceholderState(suggestedPlaceholderState), seeked, isRepeatingCurrentItem); + pendingOperation.addListener( + () -> { + castNonNull(state); // Already checked by method @RequiresNonNull pre-condition. + pendingOperations.remove(pendingOperation); + if (pendingOperations.isEmpty() && !released) { + updateStateAndInformListeners( + getState(), /* seeked= */ false, /* isRepeatingCurrentItem= */ false); + } + }, + this::postOrRunOnApplicationHandler); + } + } + + private void postOrRunOnApplicationHandler(Runnable runnable) { + if (applicationHandler.getLooper() == Looper.myLooper()) { + runnable.run(); + } else { + applicationHandler.post(runnable); + } + } + + private static boolean isPlaying(State state) { + return state.playWhenReady + && state.playbackState == Player.STATE_READY + && state.playbackSuppressionReason == PLAYBACK_SUPPRESSION_REASON_NONE; + } + + private static Tracks getCurrentTracksInternal(State state) { + return state.playlist.isEmpty() + ? Tracks.EMPTY + : state.playlist.get(getCurrentMediaItemIndexInternal(state)).tracks; + } + + private static MediaMetadata getMediaMetadataInternal(State state) { + return state.playlist.isEmpty() + ? MediaMetadata.EMPTY + : state.playlist.get(getCurrentMediaItemIndexInternal(state)).combinedMediaMetadata; + } + + private static int getCurrentMediaItemIndexInternal(State state) { + if (state.currentMediaItemIndex != C.INDEX_UNSET) { + return state.currentMediaItemIndex; + } + return 0; // TODO: Use shuffle order to get first item if playlist is not empty. + } + + private static long getContentPositionMsInternal(State state) { + return getPositionOrDefaultInMediaItem(state.contentPositionMsSupplier.get(), state); + } + + private static long getContentBufferedPositionMsInternal(State state) { + return getPositionOrDefaultInMediaItem(state.contentBufferedPositionMsSupplier.get(), state); + } + + private static long getPositionOrDefaultInMediaItem(long positionMs, State state) { + if (positionMs != C.TIME_UNSET) { + return positionMs; + } + if (state.playlist.isEmpty()) { + return 0; + } + return usToMs(state.playlist.get(getCurrentMediaItemIndexInternal(state)).defaultPositionUs); + } + + private static int getCurrentPeriodIndexInternal( + State state, Timeline.Window window, Timeline.Period period) { + int currentMediaItemIndex = getCurrentMediaItemIndexInternal(state); + if (state.timeline.isEmpty()) { + return currentMediaItemIndex; + } + return getPeriodIndexFromWindowPosition( + state.timeline, currentMediaItemIndex, getContentPositionMsInternal(state), window, period); + } + + private static int getPeriodIndexFromWindowPosition( + Timeline timeline, + int windowIndex, + long windowPositionMs, + Timeline.Window window, + Timeline.Period period) { + Object periodUid = + timeline.getPeriodPositionUs(window, period, windowIndex, msToUs(windowPositionMs)).first; + return timeline.getIndexOfPeriod(periodUid); + } + + private static @Player.TimelineChangeReason int getTimelineChangeReason( + List previousPlaylist, List newPlaylist) { + if (previousPlaylist.size() != newPlaylist.size()) { + return Player.TIMELINE_CHANGE_REASON_PLAYLIST_CHANGED; + } + for (int i = 0; i < previousPlaylist.size(); i++) { + Object previousUid = previousPlaylist.get(i).uid; + Object newUid = newPlaylist.get(i).uid; + boolean resolvedAutoGeneratedPlaceholder = + previousUid instanceof PlaceholderUid && !(newUid instanceof PlaceholderUid); + if (!previousUid.equals(newUid) && !resolvedAutoGeneratedPlaceholder) { + return Player.TIMELINE_CHANGE_REASON_PLAYLIST_CHANGED; + } + } + return Player.TIMELINE_CHANGE_REASON_SOURCE_UPDATE; + } + + private static int getPositionDiscontinuityReason( + State previousState, + State newState, + boolean seeked, + Timeline.Window window, + Timeline.Period period) { + if (newState.hasPositionDiscontinuity) { + // We were asked to report a discontinuity. + return newState.positionDiscontinuityReason; + } + if (seeked) { + return Player.DISCONTINUITY_REASON_SEEK; + } + if (previousState.playlist.isEmpty()) { + // First change from an empty playlist is not reported as a discontinuity. + return C.INDEX_UNSET; + } + if (newState.playlist.isEmpty()) { + // The playlist became empty. + return Player.DISCONTINUITY_REASON_REMOVE; + } + Object previousPeriodUid = + previousState.timeline.getUidOfPeriod( + getCurrentPeriodIndexInternal(previousState, window, period)); + Object newPeriodUid = + newState.timeline.getUidOfPeriod(getCurrentPeriodIndexInternal(newState, window, period)); + if (previousPeriodUid instanceof PlaceholderUid && !(newPeriodUid instanceof PlaceholderUid)) { + // An auto-generated placeholder was resolved to a real item. + return C.INDEX_UNSET; + } + if (!newPeriodUid.equals(previousPeriodUid) + || previousState.currentAdGroupIndex != newState.currentAdGroupIndex + || previousState.currentAdIndexInAdGroup != newState.currentAdIndexInAdGroup) { + // The current period or ad inside a period changed. + if (newState.timeline.getIndexOfPeriod(previousPeriodUid) == C.INDEX_UNSET) { + // The previous period no longer exists. + return Player.DISCONTINUITY_REASON_REMOVE; + } + // Check if reached the previous period's or ad's duration to assume an auto-transition. + long previousPositionMs = + getCurrentPeriodOrAdPositionMs(previousState, previousPeriodUid, period); + long previousDurationMs = getPeriodOrAdDurationMs(previousState, previousPeriodUid, period); + return previousDurationMs != C.TIME_UNSET && previousPositionMs >= previousDurationMs + ? Player.DISCONTINUITY_REASON_AUTO_TRANSITION + : Player.DISCONTINUITY_REASON_SKIP; + } + // We are in the same content period or ad. Check if the position deviates more than a + // reasonable threshold from the previous one. + long previousPositionMs = + getCurrentPeriodOrAdPositionMs(previousState, previousPeriodUid, period); + long newPositionMs = getCurrentPeriodOrAdPositionMs(newState, newPeriodUid, period); + if (Math.abs(previousPositionMs - newPositionMs) < POSITION_DISCONTINUITY_THRESHOLD_MS) { + return C.INDEX_UNSET; + } + // Check if we previously reached the end of the item to assume an auto-repetition. + long previousDurationMs = getPeriodOrAdDurationMs(previousState, previousPeriodUid, period); + return previousDurationMs != C.TIME_UNSET && previousPositionMs >= previousDurationMs + ? Player.DISCONTINUITY_REASON_AUTO_TRANSITION + : Player.DISCONTINUITY_REASON_INTERNAL; + } + + private static long getCurrentPeriodOrAdPositionMs( + State state, Object currentPeriodUid, Timeline.Period period) { + return state.currentAdGroupIndex != C.INDEX_UNSET + ? state.adPositionMsSupplier.get() + : getContentPositionMsInternal(state) + - state.timeline.getPeriodByUid(currentPeriodUid, period).getPositionInWindowMs(); + } + + private static long getPeriodOrAdDurationMs( + State state, Object currentPeriodUid, Timeline.Period period) { + state.timeline.getPeriodByUid(currentPeriodUid, period); + long periodOrAdDurationUs = + state.currentAdGroupIndex == C.INDEX_UNSET + ? period.durationUs + : period.getAdDurationUs(state.currentAdGroupIndex, state.currentAdIndexInAdGroup); + return usToMs(periodOrAdDurationUs); + } + + private static PositionInfo getPositionInfo( + State state, + boolean useDiscontinuityPosition, + Timeline.Window window, + Timeline.Period period) { + @Nullable Object windowUid = null; + @Nullable Object periodUid = null; + int mediaItemIndex = getCurrentMediaItemIndexInternal(state); + int periodIndex = C.INDEX_UNSET; + @Nullable MediaItem mediaItem = null; + if (!state.timeline.isEmpty()) { + periodIndex = getCurrentPeriodIndexInternal(state, window, period); + periodUid = state.timeline.getPeriod(periodIndex, period, /* setIds= */ true).uid; + windowUid = state.timeline.getWindow(mediaItemIndex, window).uid; + mediaItem = window.mediaItem; + } + long contentPositionMs; + long positionMs; + if (useDiscontinuityPosition) { + positionMs = state.discontinuityPositionMs; + contentPositionMs = + state.currentAdGroupIndex == C.INDEX_UNSET + ? positionMs + : getContentPositionMsInternal(state); + } else { + contentPositionMs = getContentPositionMsInternal(state); + positionMs = + state.currentAdGroupIndex != C.INDEX_UNSET + ? state.adPositionMsSupplier.get() + : contentPositionMs; + } + return new PositionInfo( + windowUid, + mediaItemIndex, + mediaItem, + periodUid, + periodIndex, + positionMs, + contentPositionMs, + state.currentAdGroupIndex, + state.currentAdIndexInAdGroup); + } + + private static int getMediaItemTransitionReason( + State previousState, + State newState, + int positionDiscontinuityReason, + boolean isRepeatingCurrentItem, + Timeline.Window window) { + Timeline previousTimeline = previousState.timeline; + Timeline newTimeline = newState.timeline; + if (newTimeline.isEmpty() && previousTimeline.isEmpty()) { + return C.INDEX_UNSET; + } else if (newTimeline.isEmpty() != previousTimeline.isEmpty()) { + return MEDIA_ITEM_TRANSITION_REASON_PLAYLIST_CHANGED; + } + Object previousWindowUid = + previousState.timeline.getWindow(getCurrentMediaItemIndexInternal(previousState), window) + .uid; + Object newWindowUid = + newState.timeline.getWindow(getCurrentMediaItemIndexInternal(newState), window).uid; + if (previousWindowUid instanceof PlaceholderUid && !(newWindowUid instanceof PlaceholderUid)) { + // An auto-generated placeholder was resolved to a real item. + return C.INDEX_UNSET; + } + if (!previousWindowUid.equals(newWindowUid)) { + if (positionDiscontinuityReason == DISCONTINUITY_REASON_AUTO_TRANSITION) { + return MEDIA_ITEM_TRANSITION_REASON_AUTO; + } else if (positionDiscontinuityReason == DISCONTINUITY_REASON_SEEK) { + return MEDIA_ITEM_TRANSITION_REASON_SEEK; + } else { + return MEDIA_ITEM_TRANSITION_REASON_PLAYLIST_CHANGED; + } + } + // Only mark changes within the current item as a transition if we are repeating automatically + // or via a seek to next/previous. + if (positionDiscontinuityReason == DISCONTINUITY_REASON_AUTO_TRANSITION + && getContentPositionMsInternal(previousState) > getContentPositionMsInternal(newState)) { + return MEDIA_ITEM_TRANSITION_REASON_REPEAT; + } + if (positionDiscontinuityReason == DISCONTINUITY_REASON_SEEK && isRepeatingCurrentItem) { + return MEDIA_ITEM_TRANSITION_REASON_SEEK; + } + return C.INDEX_UNSET; + } + + private static Size getSurfaceHolderSize(SurfaceHolder surfaceHolder) { + if (!surfaceHolder.getSurface().isValid()) { + return Size.ZERO; + } + Rect surfaceFrame = surfaceHolder.getSurfaceFrame(); + return new Size(surfaceFrame.width(), surfaceFrame.height()); + } + + private static int getMediaItemIndexInNewPlaylist( + List oldPlaylist, + Timeline newPlaylistTimeline, + int oldMediaItemIndex, + Timeline.Period period) { + if (oldPlaylist.isEmpty()) { + return oldMediaItemIndex < newPlaylistTimeline.getWindowCount() + ? oldMediaItemIndex + : C.INDEX_UNSET; + } + Object oldFirstPeriodUid = + oldPlaylist.get(oldMediaItemIndex).getPeriodUid(/* periodIndexInMediaItem= */ 0); + if (newPlaylistTimeline.getIndexOfPeriod(oldFirstPeriodUid) == C.INDEX_UNSET) { + return C.INDEX_UNSET; + } + return newPlaylistTimeline.getPeriodByUid(oldFirstPeriodUid, period).windowIndex; + } + + private static State getStateWithNewPlaylist( + State oldState, List newPlaylist, Timeline.Period period) { + State.Builder stateBuilder = oldState.buildUpon(); + stateBuilder.setPlaylist(newPlaylist); + Timeline newTimeline = stateBuilder.timeline; + long oldPositionMs = oldState.contentPositionMsSupplier.get(); + int oldIndex = getCurrentMediaItemIndexInternal(oldState); + int newIndex = getMediaItemIndexInNewPlaylist(oldState.playlist, newTimeline, oldIndex, period); + long newPositionMs = newIndex == C.INDEX_UNSET ? C.TIME_UNSET : oldPositionMs; + // If the current item no longer exists, try to find a matching subsequent item. + for (int i = oldIndex + 1; newIndex == C.INDEX_UNSET && i < oldState.playlist.size(); i++) { + // TODO: Use shuffle order to iterate. + newIndex = + getMediaItemIndexInNewPlaylist( + oldState.playlist, newTimeline, /* oldMediaItemIndex= */ i, period); + } + // If this fails, transition to ENDED state. + if (oldState.playbackState != Player.STATE_IDLE && newIndex == C.INDEX_UNSET) { + stateBuilder.setPlaybackState(Player.STATE_ENDED).setIsLoading(false); + } + return buildStateForNewPosition( + stateBuilder, + oldState, + oldPositionMs, + newPlaylist, + newIndex, + newPositionMs, + /* keepAds= */ true); + } + + private static State getStateWithNewPlaylistAndPosition( + State oldState, List newPlaylist, int newIndex, long newPositionMs) { + State.Builder stateBuilder = oldState.buildUpon(); + stateBuilder.setPlaylist(newPlaylist); + if (oldState.playbackState != Player.STATE_IDLE) { + if (newPlaylist.isEmpty()) { + stateBuilder.setPlaybackState(Player.STATE_ENDED).setIsLoading(false); + } else { + stateBuilder.setPlaybackState(Player.STATE_BUFFERING); + } + } + long oldPositionMs = oldState.contentPositionMsSupplier.get(); + return buildStateForNewPosition( + stateBuilder, + oldState, + oldPositionMs, + newPlaylist, + newIndex, + newPositionMs, + /* keepAds= */ false); + } + + private static State buildStateForNewPosition( + State.Builder stateBuilder, + State oldState, + long oldPositionMs, + List newPlaylist, + int newIndex, + long newPositionMs, + boolean keepAds) { + // Resolve unset or invalid index and position. + oldPositionMs = getPositionOrDefaultInMediaItem(oldPositionMs, oldState); + if (!newPlaylist.isEmpty() && (newIndex == C.INDEX_UNSET || newIndex >= newPlaylist.size())) { + newIndex = 0; // TODO: Use shuffle order to get first index. + newPositionMs = C.TIME_UNSET; + } + if (!newPlaylist.isEmpty() && newPositionMs == C.TIME_UNSET) { + newPositionMs = usToMs(newPlaylist.get(newIndex).defaultPositionUs); + } + boolean oldOrNewPlaylistEmpty = oldState.playlist.isEmpty() || newPlaylist.isEmpty(); + boolean mediaItemChanged = + !oldOrNewPlaylistEmpty + && !oldState + .playlist + .get(getCurrentMediaItemIndexInternal(oldState)) + .uid + .equals(newPlaylist.get(newIndex).uid); + if (oldOrNewPlaylistEmpty || mediaItemChanged || newPositionMs < oldPositionMs) { + // New item or seeking back. Assume no buffer and no ad playback persists. + stateBuilder + .setCurrentMediaItemIndex(newIndex) + .setCurrentAd(C.INDEX_UNSET, C.INDEX_UNSET) + .setContentPositionMs(newPositionMs) + .setContentBufferedPositionMs(PositionSupplier.getConstant(newPositionMs)) + .setTotalBufferedDurationMs(PositionSupplier.ZERO); + } else if (newPositionMs == oldPositionMs) { + // Unchanged position. Assume ad playback and buffer in current item persists. + stateBuilder.setCurrentMediaItemIndex(newIndex); + if (oldState.currentAdGroupIndex != C.INDEX_UNSET && keepAds) { + stateBuilder.setTotalBufferedDurationMs( + PositionSupplier.getConstant( + oldState.adBufferedPositionMsSupplier.get() - oldState.adPositionMsSupplier.get())); + } else { + stateBuilder + .setCurrentAd(C.INDEX_UNSET, C.INDEX_UNSET) + .setTotalBufferedDurationMs( + PositionSupplier.getConstant( + getContentBufferedPositionMsInternal(oldState) - oldPositionMs)); + } + } else { + // Seeking forward. Assume remaining buffer in current item persist, but no ad playback. + long contentBufferedDurationMs = + max(getContentBufferedPositionMsInternal(oldState), newPositionMs); + long totalBufferedDurationMs = + max(0, oldState.totalBufferedDurationMsSupplier.get() - (newPositionMs - oldPositionMs)); + stateBuilder + .setCurrentMediaItemIndex(newIndex) + .setCurrentAd(C.INDEX_UNSET, C.INDEX_UNSET) + .setContentPositionMs(newPositionMs) + .setContentBufferedPositionMs(PositionSupplier.getConstant(contentBufferedDurationMs)) + .setTotalBufferedDurationMs(PositionSupplier.getConstant(totalBufferedDurationMs)); + } + return stateBuilder.build(); + } + + private static final class PlaceholderUid {} +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/SimpleExoPlayer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/SimpleExoPlayer.java index 4fa7f58e2d..ac916eb781 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/SimpleExoPlayer.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/SimpleExoPlayer.java @@ -15,1850 +15,1311 @@ */ package com.google.android.exoplayer2; -import android.annotation.TargetApi; +import static androidx.annotation.VisibleForTesting.PROTECTED; + import android.content.Context; -import android.graphics.Rect; -import android.graphics.SurfaceTexture; -import android.media.MediaCodec; -import android.media.PlaybackParams; -import android.os.Handler; +import android.media.AudioDeviceInfo; import android.os.Looper; import android.view.Surface; import android.view.SurfaceHolder; import android.view.SurfaceView; import android.view.TextureView; +import androidx.annotation.IntRange; import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; import androidx.annotation.VisibleForTesting; import com.google.android.exoplayer2.analytics.AnalyticsCollector; import com.google.android.exoplayer2.analytics.AnalyticsListener; import com.google.android.exoplayer2.audio.AudioAttributes; -import com.google.android.exoplayer2.audio.AudioListener; -import com.google.android.exoplayer2.audio.AudioRendererEventListener; import com.google.android.exoplayer2.audio.AuxEffectInfo; import com.google.android.exoplayer2.decoder.DecoderCounters; -import com.google.android.exoplayer2.drm.DefaultDrmSessionManager; -import com.google.android.exoplayer2.drm.DrmSessionManager; -import com.google.android.exoplayer2.drm.FrameworkMediaCrypto; -import com.google.android.exoplayer2.metadata.Metadata; -import com.google.android.exoplayer2.metadata.MetadataOutput; +import com.google.android.exoplayer2.extractor.ExtractorsFactory; +import com.google.android.exoplayer2.source.DefaultMediaSourceFactory; import com.google.android.exoplayer2.source.MediaSource; +import com.google.android.exoplayer2.source.ShuffleOrder; import com.google.android.exoplayer2.source.TrackGroupArray; -import com.google.android.exoplayer2.text.Cue; -import com.google.android.exoplayer2.text.TextOutput; -import com.google.android.exoplayer2.trackselection.DefaultTrackSelector; +import com.google.android.exoplayer2.text.CueGroup; import com.google.android.exoplayer2.trackselection.TrackSelectionArray; +import com.google.android.exoplayer2.trackselection.TrackSelectionParameters; import com.google.android.exoplayer2.trackselection.TrackSelector; import com.google.android.exoplayer2.upstream.BandwidthMeter; -import com.google.android.exoplayer2.upstream.DefaultBandwidthMeter; -import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Clock; -import com.google.android.exoplayer2.util.Log; +import com.google.android.exoplayer2.util.ConditionVariable; import com.google.android.exoplayer2.util.PriorityTaskManager; -import com.google.android.exoplayer2.util.Util; -import com.google.android.exoplayer2.video.VideoDecoderOutputBufferRenderer; +import com.google.android.exoplayer2.util.Size; import com.google.android.exoplayer2.video.VideoFrameMetadataListener; -import com.google.android.exoplayer2.video.VideoRendererEventListener; +import com.google.android.exoplayer2.video.VideoSize; import com.google.android.exoplayer2.video.spherical.CameraMotionListener; +import com.google.errorprone.annotations.CanIgnoreReturnValue; import org.telegram.messenger.Utilities; -import java.util.ArrayList; -import java.util.Collections; import java.util.List; -import java.util.concurrent.CopyOnWriteArraySet; /** - * An {@link ExoPlayer} implementation that uses default {@link Renderer} components. Instances can - * be obtained from {@link SimpleExoPlayer.Builder}. + * @deprecated Use {@link ExoPlayer} instead. */ +@Deprecated public class SimpleExoPlayer extends BasePlayer implements ExoPlayer, - Player.AudioComponent, - Player.VideoComponent, - Player.TextComponent, - Player.MetadataComponent { - - /** @deprecated Use {@link com.google.android.exoplayer2.video.VideoListener}. */ - @Deprecated - public interface VideoListener extends com.google.android.exoplayer2.video.VideoListener {} + ExoPlayer.AudioComponent, + ExoPlayer.VideoComponent, + ExoPlayer.TextComponent, + ExoPlayer.DeviceComponent { /** - * A builder for {@link SimpleExoPlayer} instances. - * - *

      See {@link #Builder(Context)} for the list of default values. + * @deprecated Use {@link ExoPlayer.Builder} instead. */ + @Deprecated + @SuppressWarnings("deprecation") public static final class Builder { - private final Context context; - private final RenderersFactory renderersFactory; - - private Clock clock; - private TrackSelector trackSelector; - private LoadControl loadControl; - private BandwidthMeter bandwidthMeter; - private AnalyticsCollector analyticsCollector; - private Looper looper; - private boolean useLazyPreparation; - private boolean buildCalled; + private final ExoPlayer.Builder wrappedBuilder; /** - * Creates a builder. - * - *

      Use {@link #Builder(Context, RenderersFactory)} instead, if you intend to provide a custom - * {@link RenderersFactory}. This is to ensure that ProGuard or R8 can remove ExoPlayer's {@link - * DefaultRenderersFactory} from the APK. - * - *

      The builder uses the following default values: - * - *

        - *
      • {@link RenderersFactory}: {@link DefaultRenderersFactory} - *
      • {@link TrackSelector}: {@link DefaultTrackSelector} - *
      • {@link LoadControl}: {@link DefaultLoadControl} - *
      • {@link BandwidthMeter}: {@link DefaultBandwidthMeter#getSingletonInstance(Context)} - *
      • {@link Looper}: The {@link Looper} associated with the current thread, or the {@link - * Looper} of the application's main thread if the current thread doesn't have a {@link - * Looper} - *
      • {@link AnalyticsCollector}: {@link AnalyticsCollector} with {@link Clock#DEFAULT} - *
      • {@code useLazyPreparation}: {@code true} - *
      • {@link Clock}: {@link Clock#DEFAULT} - *
      - * - * @param context A {@link Context}. + * @deprecated Use {@link ExoPlayer.Builder#Builder(Context)} instead. */ + @Deprecated public Builder(Context context) { - this(context, new DefaultRenderersFactory(context)); + wrappedBuilder = new ExoPlayer.Builder(context); } /** - * Creates a builder with a custom {@link RenderersFactory}. - * - *

      See {@link #Builder(Context)} for a list of default values. - * - * @param context A {@link Context}. - * @param renderersFactory A factory for creating {@link Renderer Renderers} to be used by the - * player. + * @deprecated Use {@link ExoPlayer.Builder#Builder(Context, RenderersFactory)} instead. */ + @Deprecated public Builder(Context context, RenderersFactory renderersFactory) { - this( - context, - renderersFactory, - new DefaultTrackSelector(context), - new DefaultLoadControl(), - DefaultBandwidthMeter.getSingletonInstance(context), - Util.getLooper(), - new AnalyticsCollector(Clock.DEFAULT), - /* useLazyPreparation= */ true, - Clock.DEFAULT); + wrappedBuilder = new ExoPlayer.Builder(context, renderersFactory); + } + + /** + * @deprecated Use {@link ExoPlayer.Builder#Builder(Context, MediaSource.Factory)} and {@link + * DefaultMediaSourceFactory#DefaultMediaSourceFactory(Context, ExtractorsFactory)} instead. + */ + @Deprecated + public Builder(Context context, ExtractorsFactory extractorsFactory) { + wrappedBuilder = + new ExoPlayer.Builder(context, new DefaultMediaSourceFactory(context, extractorsFactory)); + } + + /** + * @deprecated Use {@link ExoPlayer.Builder#Builder(Context, RenderersFactory, + * MediaSource.Factory)} and {@link + * DefaultMediaSourceFactory#DefaultMediaSourceFactory(Context, ExtractorsFactory)} instead. + */ + @Deprecated + public Builder( + Context context, RenderersFactory renderersFactory, ExtractorsFactory extractorsFactory) { + wrappedBuilder = + new ExoPlayer.Builder( + context, renderersFactory, new DefaultMediaSourceFactory(context, extractorsFactory)); } /** - * Creates a builder with the specified custom components. - * - *

      Note that this constructor is only useful if you try to ensure that ExoPlayer's default - * components can be removed by ProGuard or R8. For most components except renderers, there is - * only a marginal benefit of doing that. - * - * @param context A {@link Context}. - * @param renderersFactory A factory for creating {@link Renderer Renderers} to be used by the - * player. - * @param trackSelector A {@link TrackSelector}. - * @param loadControl A {@link LoadControl}. - * @param bandwidthMeter A {@link BandwidthMeter}. - * @param looper A {@link Looper} that must be used for all calls to the player. - * @param analyticsCollector An {@link AnalyticsCollector}. - * @param useLazyPreparation Whether media sources should be initialized lazily. - * @param clock A {@link Clock}. Should always be {@link Clock#DEFAULT}. + * @deprecated Use {@link ExoPlayer.Builder#Builder(Context, RenderersFactory, + * MediaSource.Factory, TrackSelector, LoadControl, BandwidthMeter, AnalyticsCollector)} + * instead. */ + @Deprecated public Builder( Context context, RenderersFactory renderersFactory, TrackSelector trackSelector, + MediaSource.Factory mediaSourceFactory, LoadControl loadControl, BandwidthMeter bandwidthMeter, - Looper looper, - AnalyticsCollector analyticsCollector, - boolean useLazyPreparation, - Clock clock) { - this.context = context; - this.renderersFactory = renderersFactory; - this.trackSelector = trackSelector; - this.loadControl = loadControl; - this.bandwidthMeter = bandwidthMeter; - this.looper = looper; - this.analyticsCollector = analyticsCollector; - this.useLazyPreparation = useLazyPreparation; - this.clock = clock; + AnalyticsCollector analyticsCollector) { + wrappedBuilder = + new ExoPlayer.Builder( + context, + renderersFactory, + mediaSourceFactory, + trackSelector, + loadControl, + bandwidthMeter, + analyticsCollector); } /** - * Sets the {@link TrackSelector} that will be used by the player. - * - * @param trackSelector A {@link TrackSelector}. - * @return This builder. - * @throws IllegalStateException If {@link #build()} has already been called. + * @deprecated Use {@link ExoPlayer.Builder#experimentalSetForegroundModeTimeoutMs(long)} + * instead. */ + @CanIgnoreReturnValue + @Deprecated + public Builder experimentalSetForegroundModeTimeoutMs(long timeoutMs) { + wrappedBuilder.experimentalSetForegroundModeTimeoutMs(timeoutMs); + return this; + } + + /** + * @deprecated Use {@link ExoPlayer.Builder#setTrackSelector(TrackSelector)} instead. + */ + @CanIgnoreReturnValue + @Deprecated public Builder setTrackSelector(TrackSelector trackSelector) { - Assertions.checkState(!buildCalled); - this.trackSelector = trackSelector; + wrappedBuilder.setTrackSelector(trackSelector); + return this; + } + + /** + * @deprecated Use {@link ExoPlayer.Builder#setMediaSourceFactory(MediaSource.Factory)} instead. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setMediaSourceFactory(MediaSource.Factory mediaSourceFactory) { + wrappedBuilder.setMediaSourceFactory(mediaSourceFactory); return this; } /** - * Sets the {@link LoadControl} that will be used by the player. - * - * @param loadControl A {@link LoadControl}. - * @return This builder. - * @throws IllegalStateException If {@link #build()} has already been called. + * @deprecated Use {@link ExoPlayer.Builder#setLoadControl(LoadControl)} instead. */ + @CanIgnoreReturnValue + @Deprecated public Builder setLoadControl(LoadControl loadControl) { - Assertions.checkState(!buildCalled); - this.loadControl = loadControl; + wrappedBuilder.setLoadControl(loadControl); return this; } /** - * Sets the {@link BandwidthMeter} that will be used by the player. - * - * @param bandwidthMeter A {@link BandwidthMeter}. - * @return This builder. - * @throws IllegalStateException If {@link #build()} has already been called. + * @deprecated Use {@link ExoPlayer.Builder#setBandwidthMeter(BandwidthMeter)} instead. */ + @CanIgnoreReturnValue + @Deprecated public Builder setBandwidthMeter(BandwidthMeter bandwidthMeter) { - Assertions.checkState(!buildCalled); - this.bandwidthMeter = bandwidthMeter; + wrappedBuilder.setBandwidthMeter(bandwidthMeter); return this; } /** - * Sets the {@link Looper} that must be used for all calls to the player and that is used to - * call listeners on. - * - * @param looper A {@link Looper}. - * @return This builder. - * @throws IllegalStateException If {@link #build()} has already been called. + * @deprecated Use {@link ExoPlayer.Builder#setLooper(Looper)} instead. */ + @CanIgnoreReturnValue + @Deprecated public Builder setLooper(Looper looper) { - Assertions.checkState(!buildCalled); - this.looper = looper; + wrappedBuilder.setLooper(looper); return this; } /** - * Sets the {@link AnalyticsCollector} that will collect and forward all player events. - * - * @param analyticsCollector An {@link AnalyticsCollector}. - * @return This builder. - * @throws IllegalStateException If {@link #build()} has already been called. + * @deprecated Use {@link ExoPlayer.Builder#setAnalyticsCollector(AnalyticsCollector)} instead. */ + @CanIgnoreReturnValue + @Deprecated public Builder setAnalyticsCollector(AnalyticsCollector analyticsCollector) { - Assertions.checkState(!buildCalled); - this.analyticsCollector = analyticsCollector; + wrappedBuilder.setAnalyticsCollector(analyticsCollector); return this; } /** - * Sets whether media sources should be initialized lazily. - * - *

      If false, all initial preparation steps (e.g., manifest loads) happen immediately. If - * true, these initial preparations are triggered only when the player starts buffering the - * media. - * - * @param useLazyPreparation Whether to use lazy preparation. - * @return This builder. - * @throws IllegalStateException If {@link #build()} has already been called. + * @deprecated Use {@link ExoPlayer.Builder#setPriorityTaskManager(PriorityTaskManager)} + * instead. */ + @CanIgnoreReturnValue + @Deprecated + public Builder setPriorityTaskManager(@Nullable PriorityTaskManager priorityTaskManager) { + wrappedBuilder.setPriorityTaskManager(priorityTaskManager); + return this; + } + + /** + * @deprecated Use {@link ExoPlayer.Builder#setAudioAttributes(AudioAttributes, boolean)} + * instead. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setAudioAttributes(AudioAttributes audioAttributes, boolean handleAudioFocus) { + wrappedBuilder.setAudioAttributes(audioAttributes, handleAudioFocus); + return this; + } + + /** + * @deprecated Use {@link ExoPlayer.Builder#setWakeMode(int)} instead. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setWakeMode(@C.WakeMode int wakeMode) { + wrappedBuilder.setWakeMode(wakeMode); + return this; + } + + /** + * @deprecated Use {@link ExoPlayer.Builder#setHandleAudioBecomingNoisy(boolean)} instead. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setHandleAudioBecomingNoisy(boolean handleAudioBecomingNoisy) { + wrappedBuilder.setHandleAudioBecomingNoisy(handleAudioBecomingNoisy); + return this; + } + + /** + * @deprecated Use {@link ExoPlayer.Builder#setSkipSilenceEnabled(boolean)} instead. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setSkipSilenceEnabled(boolean skipSilenceEnabled) { + wrappedBuilder.setSkipSilenceEnabled(skipSilenceEnabled); + return this; + } + + /** + * @deprecated Use {@link ExoPlayer.Builder#setVideoScalingMode(int)} instead. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setVideoScalingMode(@C.VideoScalingMode int videoScalingMode) { + wrappedBuilder.setVideoScalingMode(videoScalingMode); + return this; + } + + /** + * @deprecated Use {@link ExoPlayer.Builder#setVideoChangeFrameRateStrategy(int)} instead. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setVideoChangeFrameRateStrategy( + @C.VideoChangeFrameRateStrategy int videoChangeFrameRateStrategy) { + wrappedBuilder.setVideoChangeFrameRateStrategy(videoChangeFrameRateStrategy); + return this; + } + + /** + * @deprecated Use {@link ExoPlayer.Builder#setUseLazyPreparation(boolean)} instead. + */ + @CanIgnoreReturnValue + @Deprecated public Builder setUseLazyPreparation(boolean useLazyPreparation) { - Assertions.checkState(!buildCalled); - this.useLazyPreparation = useLazyPreparation; + wrappedBuilder.setUseLazyPreparation(useLazyPreparation); + return this; + } + + /** + * @deprecated Use {@link ExoPlayer.Builder#setSeekParameters(SeekParameters)} instead. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setSeekParameters(SeekParameters seekParameters) { + wrappedBuilder.setSeekParameters(seekParameters); + return this; + } + + /** + * @deprecated Use {@link ExoPlayer.Builder#setSeekBackIncrementMs(long)} instead. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setSeekBackIncrementMs(@IntRange(from = 1) long seekBackIncrementMs) { + wrappedBuilder.setSeekBackIncrementMs(seekBackIncrementMs); + return this; + } + + /** + * @deprecated Use {@link ExoPlayer.Builder#setSeekForwardIncrementMs(long)} instead. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setSeekForwardIncrementMs(@IntRange(from = 1) long seekForwardIncrementMs) { + wrappedBuilder.setSeekForwardIncrementMs(seekForwardIncrementMs); + return this; + } + + /** + * @deprecated Use {@link ExoPlayer.Builder#setReleaseTimeoutMs(long)} instead. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setReleaseTimeoutMs(long releaseTimeoutMs) { + wrappedBuilder.setReleaseTimeoutMs(releaseTimeoutMs); + return this; + } + + /** + * @deprecated Use {@link ExoPlayer.Builder#setDetachSurfaceTimeoutMs(long)} instead. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setDetachSurfaceTimeoutMs(long detachSurfaceTimeoutMs) { + wrappedBuilder.setDetachSurfaceTimeoutMs(detachSurfaceTimeoutMs); return this; } /** - * Sets the {@link Clock} that will be used by the player. Should only be set for testing - * purposes. - * - * @param clock A {@link Clock}. - * @return This builder. - * @throws IllegalStateException If {@link #build()} has already been called. + * @deprecated Use {@link ExoPlayer.Builder#setPauseAtEndOfMediaItems(boolean)} instead. */ + @CanIgnoreReturnValue + @Deprecated + public Builder setPauseAtEndOfMediaItems(boolean pauseAtEndOfMediaItems) { + wrappedBuilder.setPauseAtEndOfMediaItems(pauseAtEndOfMediaItems); + return this; + } + + /** + * @deprecated Use {@link + * ExoPlayer.Builder#setLivePlaybackSpeedControl(LivePlaybackSpeedControl)} instead. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setLivePlaybackSpeedControl(LivePlaybackSpeedControl livePlaybackSpeedControl) { + wrappedBuilder.setLivePlaybackSpeedControl(livePlaybackSpeedControl); + return this; + } + + /** + * @deprecated Use {@link ExoPlayer.Builder#setClock(Clock)} instead. + */ + @CanIgnoreReturnValue + @Deprecated @VisibleForTesting public Builder setClock(Clock clock) { - Assertions.checkState(!buildCalled); - this.clock = clock; + wrappedBuilder.setClock(clock); return this; } /** - * Builds a {@link SimpleExoPlayer} instance. - * - * @throws IllegalStateException If {@link #build()} has already been called. + * @deprecated Use {@link ExoPlayer.Builder#build()} instead. */ + @Deprecated public SimpleExoPlayer build() { - Assertions.checkState(!buildCalled); - buildCalled = true; - return new SimpleExoPlayer( - context, - renderersFactory, - trackSelector, - loadControl, - bandwidthMeter, - analyticsCollector, - clock, - looper); + return wrappedBuilder.buildSimpleExoPlayer(); } } - private static final String TAG = "SimpleExoPlayer"; - - protected final Renderer[] renderers; - private final ExoPlayerImpl player; - private final Handler eventHandler; - private final ComponentListener componentListener; - private final CopyOnWriteArraySet - videoListeners; - private final CopyOnWriteArraySet audioListeners; - private final CopyOnWriteArraySet textOutputs; - private final CopyOnWriteArraySet metadataOutputs; - private final CopyOnWriteArraySet videoDebugListeners; - private final CopyOnWriteArraySet audioDebugListeners; - private final BandwidthMeter bandwidthMeter; - private final AnalyticsCollector analyticsCollector; - - private final AudioBecomingNoisyManager audioBecomingNoisyManager; - private final AudioFocusManager audioFocusManager; - private final WakeLockManager wakeLockManager; - private final WifiLockManager wifiLockManager; - - - @Nullable private Format videoFormat; - @Nullable private Format audioFormat; - - @Nullable private VideoDecoderOutputBufferRenderer videoDecoderOutputBufferRenderer; - @Nullable private Surface surface; - private boolean ownsSurface; - private @C.VideoScalingMode int videoScalingMode; - @Nullable private SurfaceHolder surfaceHolder; - @Nullable private TextureView textureView; - private int surfaceWidth; - private int surfaceHeight; - @Nullable private DecoderCounters videoDecoderCounters; - @Nullable private DecoderCounters audioDecoderCounters; - private int audioSessionId; - private AudioAttributes audioAttributes; - private float audioVolume; - @Nullable private MediaSource mediaSource; - private List currentCues; - @Nullable private VideoFrameMetadataListener videoFrameMetadataListener; - @Nullable private CameraMotionListener cameraMotionListener; - private boolean hasNotifiedFullWrongThreadWarning; - @Nullable private PriorityTaskManager priorityTaskManager; - private boolean isPriorityTaskManagerRegistered; - private boolean playerReleased; + private final ConditionVariable constructorFinished; /** - * @param context A {@link Context}. - * @param renderersFactory A factory for creating {@link Renderer}s to be used by the instance. - * @param trackSelector The {@link TrackSelector} that will be used by the instance. - * @param loadControl The {@link LoadControl} that will be used by the instance. - * @param bandwidthMeter The {@link BandwidthMeter} that will be used by the instance. - * @param analyticsCollector A factory for creating the {@link AnalyticsCollector} that will - * collect and forward all player events. - * @param clock The {@link Clock} that will be used by the instance. Should always be {@link - * Clock#DEFAULT}, unless the player is being used from a test. - * @param looper The {@link Looper} which must be used for all calls to the player and which is - * used to call listeners on. + * @deprecated Use the {@link ExoPlayer.Builder}. */ - @SuppressWarnings("deprecation") + @Deprecated protected SimpleExoPlayer( Context context, RenderersFactory renderersFactory, TrackSelector trackSelector, + MediaSource.Factory mediaSourceFactory, LoadControl loadControl, BandwidthMeter bandwidthMeter, AnalyticsCollector analyticsCollector, + boolean useLazyPreparation, Clock clock, - Looper looper) { + Looper applicationLooper) { this( - context, - renderersFactory, - trackSelector, - loadControl, - DrmSessionManager.getDummyDrmSessionManager(), - bandwidthMeter, - analyticsCollector, - clock, - looper); + new ExoPlayer.Builder( + context, + renderersFactory, + mediaSourceFactory, + trackSelector, + loadControl, + bandwidthMeter, + analyticsCollector) + .setUseLazyPreparation(useLazyPreparation) + .setClock(clock) + .setLooper(applicationLooper)); } /** - * @param context A {@link Context}. - * @param renderersFactory A factory for creating {@link Renderer}s to be used by the instance. - * @param trackSelector The {@link TrackSelector} that will be used by the instance. - * @param loadControl The {@link LoadControl} that will be used by the instance. - * @param drmSessionManager An optional {@link DrmSessionManager}. May be null if the instance - * will not be used for DRM protected playbacks. - * @param bandwidthMeter The {@link BandwidthMeter} that will be used by the instance. - * @param analyticsCollector The {@link AnalyticsCollector} that will collect and forward all - * player events. - * @param clock The {@link Clock} that will be used by the instance. Should always be {@link - * Clock#DEFAULT}, unless the player is being used from a test. - * @param looper The {@link Looper} which must be used for all calls to the player and which is - * used to call listeners on. - * @deprecated Use {@link #SimpleExoPlayer(Context, RenderersFactory, TrackSelector, LoadControl, - * BandwidthMeter, AnalyticsCollector, Clock, Looper)} instead, and pass the {@link - * DrmSessionManager} to the {@link MediaSource} factories. + * @param builder The {@link Builder} to obtain all construction parameters. */ - @Deprecated - protected SimpleExoPlayer( - Context context, - RenderersFactory renderersFactory, - TrackSelector trackSelector, - LoadControl loadControl, - @Nullable DrmSessionManager drmSessionManager, - BandwidthMeter bandwidthMeter, - AnalyticsCollector analyticsCollector, - Clock clock, - Looper looper) { - this.bandwidthMeter = bandwidthMeter; - this.analyticsCollector = analyticsCollector; - componentListener = new ComponentListener(); - videoListeners = new CopyOnWriteArraySet<>(); - audioListeners = new CopyOnWriteArraySet<>(); - textOutputs = new CopyOnWriteArraySet<>(); - metadataOutputs = new CopyOnWriteArraySet<>(); - videoDebugListeners = new CopyOnWriteArraySet<>(); - audioDebugListeners = new CopyOnWriteArraySet<>(); - eventHandler = new Handler(looper); - renderers = - renderersFactory.createRenderers( - eventHandler, - componentListener, - componentListener, - componentListener, - componentListener, - drmSessionManager); - - // Set initial values. - audioVolume = 1; - audioSessionId = C.AUDIO_SESSION_ID_UNSET; - audioAttributes = AudioAttributes.DEFAULT; - videoScalingMode = C.VIDEO_SCALING_MODE_DEFAULT; - currentCues = Collections.emptyList(); - - // Build the player and associated objects. - player = - new ExoPlayerImpl(renderers, trackSelector, loadControl, bandwidthMeter, clock, looper); - analyticsCollector.setPlayer(player); - player.addListener(analyticsCollector); - player.addListener(componentListener); - videoDebugListeners.add(analyticsCollector); - videoListeners.add(analyticsCollector); - audioDebugListeners.add(analyticsCollector); - audioListeners.add(analyticsCollector); - addMetadataOutput(analyticsCollector); - bandwidthMeter.addEventListener(eventHandler, analyticsCollector); - if (drmSessionManager instanceof DefaultDrmSessionManager) { - ((DefaultDrmSessionManager) drmSessionManager).addListener(eventHandler, analyticsCollector); + protected SimpleExoPlayer(Builder builder) { + this(builder.wrappedBuilder); + } + + /** + * @param builder The {@link ExoPlayer.Builder} to obtain all construction parameters. + */ + /* package */ SimpleExoPlayer(ExoPlayer.Builder builder) { + constructorFinished = new ConditionVariable(); + try { + player = new ExoPlayerImpl(builder, /* wrappingPlayer= */ this); + } finally { + constructorFinished.open(); } - audioBecomingNoisyManager = - new AudioBecomingNoisyManager(context, eventHandler, componentListener); - audioFocusManager = new AudioFocusManager(context, eventHandler, componentListener); - wakeLockManager = new WakeLockManager(context); - wifiLockManager = new WifiLockManager(context); } + @Override + public void experimentalSetOffloadSchedulingEnabled(boolean offloadSchedulingEnabled) { + blockUntilConstructorFinished(); + player.experimentalSetOffloadSchedulingEnabled(offloadSchedulingEnabled); + } + + @Override + public boolean experimentalIsSleepingForOffload() { + blockUntilConstructorFinished(); + return player.experimentalIsSleepingForOffload(); + } + + /** + * @deprecated Use {@link ExoPlayer}, as the {@link AudioComponent} methods are defined by that + * interface. + */ + @Deprecated @Override @Nullable public AudioComponent getAudioComponent() { return this; } + /** + * @deprecated Use {@link ExoPlayer}, as the {@link VideoComponent} methods are defined by that + * interface. + */ + @Deprecated @Override @Nullable public VideoComponent getVideoComponent() { return this; } + /** + * @deprecated Use {@link Player}, as the {@link TextComponent} methods are defined by that + * interface. + */ + @Deprecated @Override @Nullable public TextComponent getTextComponent() { return this; } + /** + * @deprecated Use {@link Player}, as the {@link DeviceComponent} methods are defined by that + * interface. + */ + @Deprecated @Override @Nullable - public MetadataComponent getMetadataComponent() { + public DeviceComponent getDeviceComponent() { return this; } - /** - * Sets the video scaling mode. - * - *

      Note that the scaling mode only applies if a {@link MediaCodec}-based video {@link Renderer} - * is enabled and if the output surface is owned by a {@link android.view.SurfaceView}. - * - * @param videoScalingMode The video scaling mode. - */ @Override public void setVideoScalingMode(@C.VideoScalingMode int videoScalingMode) { - verifyApplicationThread(); - this.videoScalingMode = videoScalingMode; - for (Renderer renderer : renderers) { - if (renderer.getTrackType() == C.TRACK_TYPE_VIDEO) { - player - .createMessage(renderer) - .setType(C.MSG_SET_SCALING_MODE) - .setPayload(videoScalingMode) - .send(); - } - } + blockUntilConstructorFinished(); + player.setVideoScalingMode(videoScalingMode); } @Override public @C.VideoScalingMode int getVideoScalingMode() { - return videoScalingMode; + blockUntilConstructorFinished(); + return player.getVideoScalingMode(); } @Override - public void clearVideoSurface() { - verifyApplicationThread(); - removeSurfaceCallbacks(); - setVideoSurfaceInternal(/* surface= */ null, /* ownsSurface= */ false); - maybeNotifySurfaceSizeChanged(/* width= */ 0, /* height= */ 0); + public void setVideoChangeFrameRateStrategy( + @C.VideoChangeFrameRateStrategy int videoChangeFrameRateStrategy) { + blockUntilConstructorFinished(); + player.setVideoChangeFrameRateStrategy(videoChangeFrameRateStrategy); } @Override - public void clearVideoSurface(@Nullable Surface surface) { - verifyApplicationThread(); - if (surface != null && surface == this.surface) { - clearVideoSurface(); - } + public @C.VideoChangeFrameRateStrategy int getVideoChangeFrameRateStrategy() { + blockUntilConstructorFinished(); + return player.getVideoChangeFrameRateStrategy(); } @Override - public void setVideoSurface(@Nullable Surface surface) { - verifyApplicationThread(); - removeSurfaceCallbacks(); - if (surface != null) { - clearVideoDecoderOutputBufferRenderer(); - } - setVideoSurfaceInternal(surface, /* ownsSurface= */ false); - int newSurfaceSize = surface == null ? 0 : C.LENGTH_UNSET; - maybeNotifySurfaceSizeChanged(/* width= */ newSurfaceSize, /* height= */ newSurfaceSize); + public VideoSize getVideoSize() { + blockUntilConstructorFinished(); + return player.getVideoSize(); } @Override - public void setVideoSurfaceHolder(@Nullable SurfaceHolder surfaceHolder) { - verifyApplicationThread(); - removeSurfaceCallbacks(); - if (surfaceHolder != null) { - clearVideoDecoderOutputBufferRenderer(); - } - this.surfaceHolder = surfaceHolder; - if (surfaceHolder == null) { - setVideoSurfaceInternal(null, /* ownsSurface= */ false); - maybeNotifySurfaceSizeChanged(/* width= */ 0, /* height= */ 0); - } else { - surfaceHolder.addCallback(componentListener); - Surface surface = surfaceHolder.getSurface(); - if (surface != null && surface.isValid()) { - setVideoSurfaceInternal(surface, /* ownsSurface= */ false); - Rect surfaceSize = surfaceHolder.getSurfaceFrame(); - maybeNotifySurfaceSizeChanged(surfaceSize.width(), surfaceSize.height()); - } else { - setVideoSurfaceInternal(/* surface= */ null, /* ownsSurface= */ false); - maybeNotifySurfaceSizeChanged(/* width= */ 0, /* height= */ 0); - } - } + public Size getSurfaceSize() { + blockUntilConstructorFinished(); + return player.getSurfaceSize(); } @Override - public void clearVideoSurfaceHolder(@Nullable SurfaceHolder surfaceHolder) { - verifyApplicationThread(); - if (surfaceHolder != null && surfaceHolder == this.surfaceHolder) { - setVideoSurfaceHolder(null); - } + public void clearVideoSurface() { + blockUntilConstructorFinished(); + player.clearVideoSurface(); } @Override - public void setVideoSurfaceView(@Nullable SurfaceView surfaceView) { - setVideoSurfaceHolder(surfaceView == null ? null : surfaceView.getHolder()); + public void clearVideoSurface(@Nullable Surface surface) { + blockUntilConstructorFinished(); + player.clearVideoSurface(surface); } @Override - public void clearVideoSurfaceView(@Nullable SurfaceView surfaceView) { - clearVideoSurfaceHolder(surfaceView == null ? null : surfaceView.getHolder()); + public void setVideoSurface(@Nullable Surface surface) { + blockUntilConstructorFinished(); + player.setVideoSurface(surface); } @Override - public void setVideoTextureView(@Nullable TextureView textureView) { - if (this.textureView == textureView) { - return; - } - verifyApplicationThread(); - removeSurfaceCallbacks(); - if (textureView != null) { - clearVideoDecoderOutputBufferRenderer(); - } - this.textureView = textureView; - if (textureView == null) { - setVideoSurfaceInternal(/* surface= */ null, /* ownsSurface= */ true); - maybeNotifySurfaceSizeChanged(/* width= */ 0, /* height= */ 0); - } else { - if (textureView.getSurfaceTextureListener() != null) { - Log.w(TAG, "Replacing existing SurfaceTextureListener."); - } - textureView.setSurfaceTextureListener(componentListener); - SurfaceTexture surfaceTexture = - textureView.isAvailable() ? textureView.getSurfaceTexture() : null; - if (surfaceTexture == null) { - setVideoSurfaceInternal(/* surface= */ null, /* ownsSurface= */ true); - maybeNotifySurfaceSizeChanged(/* width= */ 0, /* height= */ 0); - } else { - setVideoSurfaceInternal(new Surface(surfaceTexture), /* ownsSurface= */ true); - maybeNotifySurfaceSizeChanged(textureView.getWidth(), textureView.getHeight()); - } - } + public void setVideoSurfaceHolder(@Nullable SurfaceHolder surfaceHolder) { + blockUntilConstructorFinished(); + player.setVideoSurfaceHolder(surfaceHolder); } @Override - public void clearVideoTextureView(@Nullable TextureView textureView) { - verifyApplicationThread(); - if (textureView != null && textureView == this.textureView) { - setVideoTextureView(null); - } + public void clearVideoSurfaceHolder(@Nullable SurfaceHolder surfaceHolder) { + blockUntilConstructorFinished(); + player.clearVideoSurfaceHolder(surfaceHolder); } @Override - public void setVideoDecoderOutputBufferRenderer( - @Nullable VideoDecoderOutputBufferRenderer videoDecoderOutputBufferRenderer) { - verifyApplicationThread(); - if (videoDecoderOutputBufferRenderer != null) { - clearVideoSurface(); - } - setVideoDecoderOutputBufferRendererInternal(videoDecoderOutputBufferRenderer); + public void setVideoSurfaceView(@Nullable SurfaceView surfaceView) { + blockUntilConstructorFinished(); + player.setVideoSurfaceView(surfaceView); } @Override - public void clearVideoDecoderOutputBufferRenderer() { - verifyApplicationThread(); - setVideoDecoderOutputBufferRendererInternal(/* videoDecoderOutputBufferRenderer= */ null); + public void clearVideoSurfaceView(@Nullable SurfaceView surfaceView) { + blockUntilConstructorFinished(); + player.clearVideoSurfaceView(surfaceView); } @Override - public void clearVideoDecoderOutputBufferRenderer( - @Nullable VideoDecoderOutputBufferRenderer videoDecoderOutputBufferRenderer) { - verifyApplicationThread(); - if (videoDecoderOutputBufferRenderer != null - && videoDecoderOutputBufferRenderer == this.videoDecoderOutputBufferRenderer) { - clearVideoDecoderOutputBufferRenderer(); - } + public void setVideoTextureView(@Nullable TextureView textureView) { + blockUntilConstructorFinished(); + player.setVideoTextureView(textureView); } @Override - public void addAudioListener(AudioListener listener) { - audioListeners.add(listener); + public void clearVideoTextureView(@Nullable TextureView textureView) { + blockUntilConstructorFinished(); + player.clearVideoTextureView(textureView); } @Override - public void removeAudioListener(AudioListener listener) { - audioListeners.remove(listener); + public void addAudioOffloadListener(AudioOffloadListener listener) { + blockUntilConstructorFinished(); + player.addAudioOffloadListener(listener); } @Override - public void setAudioAttributes(AudioAttributes audioAttributes) { - setAudioAttributes(audioAttributes, /* handleAudioFocus= */ false); + public void removeAudioOffloadListener(AudioOffloadListener listener) { + blockUntilConstructorFinished(); + player.removeAudioOffloadListener(listener); } @Override public void setAudioAttributes(AudioAttributes audioAttributes, boolean handleAudioFocus) { - verifyApplicationThread(); - if (playerReleased) { - return; - } - if (!Util.areEqual(this.audioAttributes, audioAttributes)) { - this.audioAttributes = audioAttributes; - for (Renderer renderer : renderers) { - if (renderer.getTrackType() == C.TRACK_TYPE_AUDIO) { - player - .createMessage(renderer) - .setType(C.MSG_SET_AUDIO_ATTRIBUTES) - .setPayload(audioAttributes) - .send(); - } - } - for (AudioListener audioListener : audioListeners) { - audioListener.onAudioAttributesChanged(audioAttributes); - } - } - - audioFocusManager.setAudioAttributes(handleAudioFocus ? audioAttributes : null); - boolean playWhenReady = getPlayWhenReady(); - @AudioFocusManager.PlayerCommand - int playerCommand = audioFocusManager.updateAudioFocus(playWhenReady, getPlaybackState()); - updatePlayWhenReady(playWhenReady, playerCommand); + blockUntilConstructorFinished(); + player.setAudioAttributes(audioAttributes, handleAudioFocus); } @Override public AudioAttributes getAudioAttributes() { - return audioAttributes; + blockUntilConstructorFinished(); + return player.getAudioAttributes(); + } + + @Override + public void setAudioSessionId(int audioSessionId) { + blockUntilConstructorFinished(); + player.setAudioSessionId(audioSessionId); } @Override public int getAudioSessionId() { - return audioSessionId; + blockUntilConstructorFinished(); + return player.getAudioSessionId(); } @Override public void setAuxEffectInfo(AuxEffectInfo auxEffectInfo) { - verifyApplicationThread(); - for (Renderer renderer : renderers) { - if (renderer.getTrackType() == C.TRACK_TYPE_AUDIO) { - player - .createMessage(renderer) - .setType(C.MSG_SET_AUX_EFFECT_INFO) - .setPayload(auxEffectInfo) - .send(); - } - } + blockUntilConstructorFinished(); + player.setAuxEffectInfo(auxEffectInfo); } @Override public void clearAuxEffectInfo() { - setAuxEffectInfo(new AuxEffectInfo(AuxEffectInfo.NO_AUX_EFFECT_ID, /* sendLevel= */ 0f)); + blockUntilConstructorFinished(); + player.clearAuxEffectInfo(); } + @RequiresApi(23) @Override - public void setVolume(float audioVolume) { - verifyApplicationThread(); - audioVolume = Util.constrainValue(audioVolume, /* min= */ 0, /* max= */ 1); - if (this.audioVolume == audioVolume) { - return; - } - this.audioVolume = audioVolume; - sendVolumeToRenderers(); - for (AudioListener audioListener : audioListeners) { - audioListener.onVolumeChanged(audioVolume); - } + public void setPreferredAudioDevice(@Nullable AudioDeviceInfo audioDeviceInfo) { + blockUntilConstructorFinished(); + player.setPreferredAudioDevice(audioDeviceInfo); + } + + @Override + public void setVolume(float volume) { + blockUntilConstructorFinished(); + player.setVolume(volume); } @Override public float getVolume() { - return audioVolume; + blockUntilConstructorFinished(); + return player.getVolume(); } - /** - * Sets the stream type for audio playback, used by the underlying audio track. - * - *

      Setting the stream type during playback may introduce a short gap in audio output as the - * audio track is recreated. A new audio session id will also be generated. - * - *

      Calling this method overwrites any attributes set previously by calling {@link - * #setAudioAttributes(AudioAttributes)}. - * - * @deprecated Use {@link #setAudioAttributes(AudioAttributes)}. - * @param streamType The stream type for audio playback. - */ - @Deprecated - public void setAudioStreamType(@C.StreamType int streamType) { - @C.AudioUsage int usage = Util.getAudioUsageForStreamType(streamType); - @C.AudioContentType int contentType = Util.getAudioContentTypeForStreamType(streamType); - AudioAttributes audioAttributes = - new AudioAttributes.Builder().setUsage(usage).setContentType(contentType).build(); - setAudioAttributes(audioAttributes); + @Override + public boolean getSkipSilenceEnabled() { + blockUntilConstructorFinished(); + return player.getSkipSilenceEnabled(); } - /** - * Returns the stream type for audio playback. - * - * @deprecated Use {@link #getAudioAttributes()}. - */ - @Deprecated - public @C.StreamType int getAudioStreamType() { - return Util.getStreamTypeForAudioUsage(audioAttributes.usage); + @Override + public void setSkipSilenceEnabled(boolean skipSilenceEnabled) { + blockUntilConstructorFinished(); + player.setSkipSilenceEnabled(skipSilenceEnabled); } - /** Returns the {@link AnalyticsCollector} used for collecting analytics events. */ + @Override public AnalyticsCollector getAnalyticsCollector() { - return analyticsCollector; + blockUntilConstructorFinished(); + return player.getAnalyticsCollector(); } - /** - * Adds an {@link AnalyticsListener} to receive analytics events. - * - * @param listener The listener to be added. - */ + @Override public void addAnalyticsListener(AnalyticsListener listener) { - verifyApplicationThread(); - analyticsCollector.addListener(listener); + blockUntilConstructorFinished(); + player.addAnalyticsListener(listener); } - /** - * Removes an {@link AnalyticsListener}. - * - * @param listener The listener to be removed. - */ + @Override public void removeAnalyticsListener(AnalyticsListener listener) { - verifyApplicationThread(); - analyticsCollector.removeListener(listener); + blockUntilConstructorFinished(); + player.removeAnalyticsListener(listener); } - /** - * Sets whether the player should pause automatically when audio is rerouted from a headset to - * device speakers. See the audio - * becoming noisy documentation for more information. - * - *

      This feature is not enabled by default. - * - * @param handleAudioBecomingNoisy Whether the player should pause automatically when audio is - * rerouted from a headset to device speakers. - */ + @Override public void setHandleAudioBecomingNoisy(boolean handleAudioBecomingNoisy) { - verifyApplicationThread(); - if (playerReleased) { - return; - } - audioBecomingNoisyManager.setEnabled(handleAudioBecomingNoisy); + blockUntilConstructorFinished(); + player.setHandleAudioBecomingNoisy(handleAudioBecomingNoisy); } - /** - * Sets a {@link PriorityTaskManager}, or null to clear a previously set priority task manager. - * - *

      The priority {@link C#PRIORITY_PLAYBACK} will be set while the player is loading. - * - * @param priorityTaskManager The {@link PriorityTaskManager}, or null to clear a previously set - * priority task manager. - */ + @Override public void setPriorityTaskManager(@Nullable PriorityTaskManager priorityTaskManager) { - verifyApplicationThread(); - if (Util.areEqual(this.priorityTaskManager, priorityTaskManager)) { - return; - } - if (isPriorityTaskManagerRegistered) { - Assertions.checkNotNull(this.priorityTaskManager).remove(C.PRIORITY_PLAYBACK); - } - if (priorityTaskManager != null && isLoading()) { - priorityTaskManager.add(C.PRIORITY_PLAYBACK); - isPriorityTaskManagerRegistered = true; - } else { - isPriorityTaskManagerRegistered = false; - } - this.priorityTaskManager = priorityTaskManager; - } - - /** - * Sets the {@link PlaybackParams} governing audio playback. - * - * @deprecated Use {@link #setPlaybackParameters(PlaybackParameters)}. - * @param params The {@link PlaybackParams}, or null to clear any previously set parameters. - */ - @Deprecated - @TargetApi(23) - public void setPlaybackParams(@Nullable PlaybackParams params) { - PlaybackParameters playbackParameters; - if (params != null) { - params.allowDefaults(); - playbackParameters = new PlaybackParameters(params.getSpeed(), params.getPitch()); - } else { - playbackParameters = null; - } - setPlaybackParameters(playbackParameters); + blockUntilConstructorFinished(); + player.setPriorityTaskManager(priorityTaskManager); } - /** Returns the video format currently being played, or null if no video is being played. */ + @Override @Nullable public Format getVideoFormat() { - return videoFormat; + blockUntilConstructorFinished(); + return player.getVideoFormat(); } - /** Returns the audio format currently being played, or null if no audio is being played. */ + @Override @Nullable public Format getAudioFormat() { - return audioFormat; + blockUntilConstructorFinished(); + return player.getAudioFormat(); } - /** Returns {@link DecoderCounters} for video, or null if no video is being played. */ + @Override @Nullable public DecoderCounters getVideoDecoderCounters() { - return videoDecoderCounters; + blockUntilConstructorFinished(); + return player.getVideoDecoderCounters(); } - /** Returns {@link DecoderCounters} for audio, or null if no audio is being played. */ + @Override @Nullable public DecoderCounters getAudioDecoderCounters() { - return audioDecoderCounters; + blockUntilConstructorFinished(); + return player.getAudioDecoderCounters(); } @Override - public void addVideoListener(com.google.android.exoplayer2.video.VideoListener listener) { - videoListeners.add(listener); + public void setVideoFrameMetadataListener(VideoFrameMetadataListener listener) { + blockUntilConstructorFinished(); + player.setVideoFrameMetadataListener(listener); } @Override - public void removeVideoListener(com.google.android.exoplayer2.video.VideoListener listener) { - videoListeners.remove(listener); + public void clearVideoFrameMetadataListener(VideoFrameMetadataListener listener) { + blockUntilConstructorFinished(); + player.clearVideoFrameMetadataListener(listener); } @Override - public void setVideoFrameMetadataListener(VideoFrameMetadataListener listener) { - verifyApplicationThread(); - videoFrameMetadataListener = listener; - for (Renderer renderer : renderers) { - if (renderer.getTrackType() == C.TRACK_TYPE_VIDEO) { - player - .createMessage(renderer) - .setType(C.MSG_SET_VIDEO_FRAME_METADATA_LISTENER) - .setPayload(listener) - .send(); - } - } + public void setCameraMotionListener(CameraMotionListener listener) { + blockUntilConstructorFinished(); + player.setCameraMotionListener(listener); } @Override - public void clearVideoFrameMetadataListener(VideoFrameMetadataListener listener) { - verifyApplicationThread(); - if (videoFrameMetadataListener != listener) { - return; - } - for (Renderer renderer : renderers) { - if (renderer.getTrackType() == C.TRACK_TYPE_VIDEO) { - player - .createMessage(renderer) - .setType(C.MSG_SET_VIDEO_FRAME_METADATA_LISTENER) - .setPayload(null) - .send(); - } - } + public void clearCameraMotionListener(CameraMotionListener listener) { + blockUntilConstructorFinished(); + player.clearCameraMotionListener(listener); } @Override - public void setCameraMotionListener(CameraMotionListener listener) { - verifyApplicationThread(); - cameraMotionListener = listener; - for (Renderer renderer : renderers) { - if (renderer.getTrackType() == C.TRACK_TYPE_CAMERA_MOTION) { - player - .createMessage(renderer) - .setType(C.MSG_SET_CAMERA_MOTION_LISTENER) - .setPayload(listener) - .send(); - } - } + public CueGroup getCurrentCues() { + blockUntilConstructorFinished(); + return player.getCurrentCues(); } + // ExoPlayer implementation + @Override - public void clearCameraMotionListener(CameraMotionListener listener) { - verifyApplicationThread(); - if (cameraMotionListener != listener) { - return; - } - for (Renderer renderer : renderers) { - if (renderer.getTrackType() == C.TRACK_TYPE_CAMERA_MOTION) { - player - .createMessage(renderer) - .setType(C.MSG_SET_CAMERA_MOTION_LISTENER) - .setPayload(null) - .send(); - } - } + public Looper getPlaybackLooper() { + blockUntilConstructorFinished(); + return player.getPlaybackLooper(); } - /** - * Sets a listener to receive video events, removing all existing listeners. - * - * @param listener The listener. - * @deprecated Use {@link #addVideoListener(com.google.android.exoplayer2.video.VideoListener)}. - */ - @Deprecated - @SuppressWarnings("deprecation") - public void setVideoListener(VideoListener listener) { - videoListeners.clear(); - if (listener != null) { - addVideoListener(listener); - } + @Override + public Looper getApplicationLooper() { + blockUntilConstructorFinished(); + return player.getApplicationLooper(); } - /** - * Equivalent to {@link #removeVideoListener(com.google.android.exoplayer2.video.VideoListener)}. - * - * @param listener The listener to clear. - * @deprecated Use {@link - * #removeVideoListener(com.google.android.exoplayer2.video.VideoListener)}. - */ - @Deprecated - @SuppressWarnings("deprecation") - public void clearVideoListener(VideoListener listener) { - removeVideoListener(listener); + @Override + public Clock getClock() { + blockUntilConstructorFinished(); + return player.getClock(); } @Override - public void addTextOutput(TextOutput listener) { - if (!currentCues.isEmpty()) { - listener.onCues(currentCues); - } - textOutputs.add(listener); + public void addListener(Listener listener) { + blockUntilConstructorFinished(); + player.addListener(listener); } @Override - public void removeTextOutput(TextOutput listener) { - textOutputs.remove(listener); + public void removeListener(Listener listener) { + blockUntilConstructorFinished(); + player.removeListener(listener); } - /** - * Sets an output to receive text events, removing all existing outputs. - * - * @param output The output. - * @deprecated Use {@link #addTextOutput(TextOutput)}. - */ - @Deprecated - public void setTextOutput(TextOutput output) { - textOutputs.clear(); - if (output != null) { - addTextOutput(output); - } + @Override + public @State int getPlaybackState() { + blockUntilConstructorFinished(); + return player.getPlaybackState(); + } + + @Override + public @PlaybackSuppressionReason int getPlaybackSuppressionReason() { + blockUntilConstructorFinished(); + return player.getPlaybackSuppressionReason(); + } + + @Override + @Nullable + public ExoPlaybackException getPlayerError() { + blockUntilConstructorFinished(); + return player.getPlayerError(); } /** - * Equivalent to {@link #removeTextOutput(TextOutput)}. - * - * @param output The output to clear. - * @deprecated Use {@link #removeTextOutput(TextOutput)}. + * @deprecated Use {@link #prepare()} instead. */ @Deprecated - public void clearTextOutput(TextOutput output) { - removeTextOutput(output); + @Override + @SuppressWarnings("deprecation") // Calling deprecated method. + public void retry() { + blockUntilConstructorFinished(); + player.retry(); } @Override - public void addMetadataOutput(MetadataOutput listener) { - metadataOutputs.add(listener); + public Commands getAvailableCommands() { + blockUntilConstructorFinished(); + return player.getAvailableCommands(); } @Override - public void removeMetadataOutput(MetadataOutput listener) { - metadataOutputs.remove(listener); + public void prepare() { + blockUntilConstructorFinished(); + player.prepare(); } /** - * Sets an output to receive metadata events, removing all existing outputs. - * - * @param output The output. - * @deprecated Use {@link #addMetadataOutput(MetadataOutput)}. + * @deprecated Use {@link #setMediaSource(MediaSource)} and {@link ExoPlayer#prepare()} instead. */ @Deprecated - public void setMetadataOutput(MetadataOutput output) { - metadataOutputs.retainAll(Collections.singleton(analyticsCollector)); - if (output != null) { - addMetadataOutput(output); - } + @Override + @SuppressWarnings("deprecation") // Forwarding deprecated method. + public void prepare(MediaSource mediaSource) { + blockUntilConstructorFinished(); + player.prepare(mediaSource); } /** - * Equivalent to {@link #removeMetadataOutput(MetadataOutput)}. - * - * @param output The output to clear. - * @deprecated Use {@link #removeMetadataOutput(MetadataOutput)}. + * @deprecated Use {@link #setMediaSource(MediaSource, boolean)} and {@link ExoPlayer#prepare()} + * instead. */ @Deprecated - public void clearMetadataOutput(MetadataOutput output) { - removeMetadataOutput(output); + @Override + @SuppressWarnings("deprecation") // Forwarding deprecated method. + public void prepare(MediaSource mediaSource, boolean resetPosition, boolean resetState) { + blockUntilConstructorFinished(); + player.prepare(mediaSource, resetPosition, resetState); } - /** - * @deprecated Use {@link #addAnalyticsListener(AnalyticsListener)} to get more detailed debug - * information. - */ - @Deprecated - @SuppressWarnings("deprecation") - public void setVideoDebugListener(VideoRendererEventListener listener) { - videoDebugListeners.retainAll(Collections.singleton(analyticsCollector)); - if (listener != null) { - addVideoDebugListener(listener); - } + @Override + public void setMediaItems(List mediaItems, boolean resetPosition) { + blockUntilConstructorFinished(); + player.setMediaItems(mediaItems, resetPosition); } - /** - * @deprecated Use {@link #addAnalyticsListener(AnalyticsListener)} to get more detailed debug - * information. - */ - @Deprecated - public void addVideoDebugListener(VideoRendererEventListener listener) { - videoDebugListeners.add(listener); + @Override + public void setMediaItems(List mediaItems, int startIndex, long startPositionMs) { + blockUntilConstructorFinished(); + player.setMediaItems(mediaItems, startIndex, startPositionMs); } - /** - * @deprecated Use {@link #addAnalyticsListener(AnalyticsListener)} and {@link - * #removeAnalyticsListener(AnalyticsListener)} to get more detailed debug information. - */ - @Deprecated - public void removeVideoDebugListener(VideoRendererEventListener listener) { - videoDebugListeners.remove(listener); + @Override + public void setMediaSources(List mediaSources) { + blockUntilConstructorFinished(); + player.setMediaSources(mediaSources); } - /** - * @deprecated Use {@link #addAnalyticsListener(AnalyticsListener)} to get more detailed debug - * information. - */ - @Deprecated - @SuppressWarnings("deprecation") - public void setAudioDebugListener(AudioRendererEventListener listener) { - audioDebugListeners.retainAll(Collections.singleton(analyticsCollector)); - if (listener != null) { - addAudioDebugListener(listener); - } + @Override + public void setMediaSources(List mediaSources, boolean resetPosition) { + blockUntilConstructorFinished(); + player.setMediaSources(mediaSources, resetPosition); } - /** - * @deprecated Use {@link #addAnalyticsListener(AnalyticsListener)} to get more detailed debug - * information. - */ - @Deprecated - public void addAudioDebugListener(AudioRendererEventListener listener) { - audioDebugListeners.add(listener); + @Override + public void setMediaSources( + List mediaSources, int startMediaItemIndex, long startPositionMs) { + blockUntilConstructorFinished(); + player.setMediaSources(mediaSources, startMediaItemIndex, startPositionMs); } - /** - * @deprecated Use {@link #addAnalyticsListener(AnalyticsListener)} and {@link - * #removeAnalyticsListener(AnalyticsListener)} to get more detailed debug information. - */ - @Deprecated - public void removeAudioDebugListener(AudioRendererEventListener listener) { - audioDebugListeners.remove(listener); + @Override + public void setMediaSource(MediaSource mediaSource) { + blockUntilConstructorFinished(); + player.setMediaSource(mediaSource); } - // ExoPlayer implementation - @Override - public Looper getPlaybackLooper() { - return player.getPlaybackLooper(); + public void setMediaSource(MediaSource mediaSource, boolean resetPosition) { + blockUntilConstructorFinished(); + player.setMediaSource(mediaSource, resetPosition); } @Override - public Looper getApplicationLooper() { - return player.getApplicationLooper(); + public void setMediaSource(MediaSource mediaSource, long startPositionMs) { + blockUntilConstructorFinished(); + player.setMediaSource(mediaSource, startPositionMs); } @Override - public void addListener(Player.EventListener listener) { - verifyApplicationThread(); - player.addListener(listener); + public void addMediaItems(int index, List mediaItems) { + blockUntilConstructorFinished(); + player.addMediaItems(index, mediaItems); } @Override - public void removeListener(Player.EventListener listener) { - verifyApplicationThread(); - player.removeListener(listener); + public void addMediaSource(MediaSource mediaSource) { + blockUntilConstructorFinished(); + player.addMediaSource(mediaSource); } @Override - @State - public int getPlaybackState() { - verifyApplicationThread(); - return player.getPlaybackState(); + public void addMediaSource(int index, MediaSource mediaSource) { + blockUntilConstructorFinished(); + player.addMediaSource(index, mediaSource); } @Override - @PlaybackSuppressionReason - public int getPlaybackSuppressionReason() { - verifyApplicationThread(); - return player.getPlaybackSuppressionReason(); + public void addMediaSources(List mediaSources) { + blockUntilConstructorFinished(); + player.addMediaSources(mediaSources); } @Override - @Nullable - public ExoPlaybackException getPlaybackError() { - verifyApplicationThread(); - return player.getPlaybackError(); + public void addMediaSources(int index, List mediaSources) { + blockUntilConstructorFinished(); + player.addMediaSources(index, mediaSources); } @Override - public void retry() { - verifyApplicationThread(); - if (mediaSource != null - && (getPlaybackError() != null || getPlaybackState() == Player.STATE_IDLE)) { - prepare(mediaSource, /* resetPosition= */ false, /* resetState= */ false); - } + public void moveMediaItems(int fromIndex, int toIndex, int newIndex) { + blockUntilConstructorFinished(); + player.moveMediaItems(fromIndex, toIndex, newIndex); } @Override - public void prepare(MediaSource mediaSource) { - prepare(mediaSource, /* resetPosition= */ true, /* resetState= */ true); + public void removeMediaItems(int fromIndex, int toIndex) { + blockUntilConstructorFinished(); + player.removeMediaItems(fromIndex, toIndex); } @Override - public void prepare(MediaSource mediaSource, boolean resetPosition, boolean resetState) { - verifyApplicationThread(); - if (this.mediaSource != null) { - this.mediaSource.removeEventListener(analyticsCollector); - analyticsCollector.resetForNewMediaSource(); - } - this.mediaSource = mediaSource; - mediaSource.addEventListener(eventHandler, analyticsCollector); - boolean playWhenReady = getPlayWhenReady(); - @AudioFocusManager.PlayerCommand - int playerCommand = audioFocusManager.updateAudioFocus(playWhenReady, Player.STATE_BUFFERING); - updatePlayWhenReady(playWhenReady, playerCommand); - player.prepare(mediaSource, resetPosition, resetState); + public void setShuffleOrder(ShuffleOrder shuffleOrder) { + blockUntilConstructorFinished(); + player.setShuffleOrder(shuffleOrder); } @Override public void setPlayWhenReady(boolean playWhenReady) { - verifyApplicationThread(); - @AudioFocusManager.PlayerCommand - int playerCommand = audioFocusManager.updateAudioFocus(playWhenReady, getPlaybackState()); - updatePlayWhenReady(playWhenReady, playerCommand); + blockUntilConstructorFinished(); + player.setPlayWhenReady(playWhenReady); } @Override public boolean getPlayWhenReady() { - verifyApplicationThread(); + blockUntilConstructorFinished(); return player.getPlayWhenReady(); } + @Override + public void setPauseAtEndOfMediaItems(boolean pauseAtEndOfMediaItems) { + blockUntilConstructorFinished(); + player.setPauseAtEndOfMediaItems(pauseAtEndOfMediaItems); + } + + @Override + public boolean getPauseAtEndOfMediaItems() { + blockUntilConstructorFinished(); + return player.getPauseAtEndOfMediaItems(); + } + @Override public @RepeatMode int getRepeatMode() { - verifyApplicationThread(); + blockUntilConstructorFinished(); return player.getRepeatMode(); } @Override public void setRepeatMode(@RepeatMode int repeatMode) { - verifyApplicationThread(); + blockUntilConstructorFinished(); player.setRepeatMode(repeatMode); } @Override public void setShuffleModeEnabled(boolean shuffleModeEnabled) { - verifyApplicationThread(); + blockUntilConstructorFinished(); player.setShuffleModeEnabled(shuffleModeEnabled); } @Override public boolean getShuffleModeEnabled() { - verifyApplicationThread(); + blockUntilConstructorFinished(); return player.getShuffleModeEnabled(); } @Override public boolean isLoading() { - verifyApplicationThread(); + blockUntilConstructorFinished(); return player.isLoading(); } + @SuppressWarnings("ForOverride") // Forwarding to ForOverride method in ExoPlayerImpl. + @Override + @VisibleForTesting(otherwise = PROTECTED) + public void seekTo( + int mediaItemIndex, + long positionMs, + @Player.Command int seekCommand, + boolean isRepeatingCurrentItem) { + blockUntilConstructorFinished(); + player.seekTo(mediaItemIndex, positionMs, seekCommand, isRepeatingCurrentItem); + } + + @Override + public long getSeekBackIncrement() { + blockUntilConstructorFinished(); + return player.getSeekBackIncrement(); + } + + @Override + public long getSeekForwardIncrement() { + blockUntilConstructorFinished(); + return player.getSeekForwardIncrement(); + } + @Override - public void seekTo(int windowIndex, long positionMs) { - verifyApplicationThread(); - analyticsCollector.notifySeekStarted(); - player.seekTo(windowIndex, positionMs); + public long getMaxSeekToPreviousPosition() { + blockUntilConstructorFinished(); + return player.getMaxSeekToPreviousPosition(); } @Override - public void setPlaybackParameters(@Nullable PlaybackParameters playbackParameters) { - verifyApplicationThread(); + public void setPlaybackParameters(PlaybackParameters playbackParameters) { + blockUntilConstructorFinished(); player.setPlaybackParameters(playbackParameters); } @Override public PlaybackParameters getPlaybackParameters() { - verifyApplicationThread(); + blockUntilConstructorFinished(); return player.getPlaybackParameters(); } @Override public void setSeekParameters(@Nullable SeekParameters seekParameters) { - verifyApplicationThread(); + blockUntilConstructorFinished(); player.setSeekParameters(seekParameters); } @Override public SeekParameters getSeekParameters() { - verifyApplicationThread(); + blockUntilConstructorFinished(); return player.getSeekParameters(); } @Override public void setForegroundMode(boolean foregroundMode) { + blockUntilConstructorFinished(); player.setForegroundMode(foregroundMode); } + @Override + public void stop() { + blockUntilConstructorFinished(); + player.stop(); + } + + /** + * @deprecated Use {@link #stop()} and {@link #clearMediaItems()} (if {@code reset} is true) or + * just {@link #stop()} (if {@code reset} is false). Any player error will be cleared when + * {@link #prepare() re-preparing} the player. + */ + @Deprecated @Override public void stop(boolean reset) { - verifyApplicationThread(); - audioFocusManager.updateAudioFocus(getPlayWhenReady(), Player.STATE_IDLE); + blockUntilConstructorFinished(); player.stop(reset); - if (mediaSource != null) { - mediaSource.removeEventListener(analyticsCollector); - analyticsCollector.resetForNewMediaSource(); - if (reset) { - mediaSource = null; - } - } - currentCues = Collections.emptyList(); } @Override + public void release() { + this.release(false); + } + public void release(boolean async) { - verifyApplicationThread(); - audioBecomingNoisyManager.setEnabled(false); - wakeLockManager.setStayAwake(false); - wifiLockManager.setStayAwake(false); - audioFocusManager.release(); - if (async) { - Utilities.globalQueue.postRunnable(() -> player.release(async)); - } else { - player.release(async); - } - removeSurfaceCallbacks(); - if (surface != null) { - if (ownsSurface) { - surface.release(); - } - surface = null; - } - if (mediaSource != null) { - mediaSource.removeEventListener(analyticsCollector); - mediaSource = null; - } - if (isPriorityTaskManagerRegistered) { - Assertions.checkNotNull(priorityTaskManager).remove(C.PRIORITY_PLAYBACK); - isPriorityTaskManagerRegistered = false; - } - bandwidthMeter.removeEventListener(analyticsCollector); - currentCues = Collections.emptyList(); - playerReleased = true; + blockUntilConstructorFinished(); +// if (async) { +// Utilities.globalQueue.postRunnable(() -> player.release()); +// } else { + player.release(); + // } } @Override public PlayerMessage createMessage(PlayerMessage.Target target) { - verifyApplicationThread(); + blockUntilConstructorFinished(); return player.createMessage(target); } @Override public int getRendererCount() { - verifyApplicationThread(); + blockUntilConstructorFinished(); return player.getRendererCount(); } @Override - public int getRendererType(int index) { - verifyApplicationThread(); + public @C.TrackType int getRendererType(int index) { + blockUntilConstructorFinished(); return player.getRendererType(index); } + @Override + public Renderer getRenderer(int index) { + blockUntilConstructorFinished(); + return player.getRenderer(index); + } + + @Override + public TrackSelector getTrackSelector() { + blockUntilConstructorFinished(); + return player.getTrackSelector(); + } + + /** + * @deprecated Use {@link #getCurrentTracks()}. + */ + @Deprecated @Override public TrackGroupArray getCurrentTrackGroups() { - verifyApplicationThread(); + blockUntilConstructorFinished(); return player.getCurrentTrackGroups(); } + /** + * @deprecated Use {@link #getCurrentTracks()}. + */ + @Deprecated @Override public TrackSelectionArray getCurrentTrackSelections() { - verifyApplicationThread(); + blockUntilConstructorFinished(); return player.getCurrentTrackSelections(); } + @Override + public Tracks getCurrentTracks() { + blockUntilConstructorFinished(); + return player.getCurrentTracks(); + } + + @Override + public TrackSelectionParameters getTrackSelectionParameters() { + blockUntilConstructorFinished(); + return player.getTrackSelectionParameters(); + } + + @Override + public void setTrackSelectionParameters(TrackSelectionParameters parameters) { + blockUntilConstructorFinished(); + player.setTrackSelectionParameters(parameters); + } + + @Override + public MediaMetadata getMediaMetadata() { + blockUntilConstructorFinished(); + return player.getMediaMetadata(); + } + + @Override + public MediaMetadata getPlaylistMetadata() { + blockUntilConstructorFinished(); + return player.getPlaylistMetadata(); + } + + @Override + public void setPlaylistMetadata(MediaMetadata mediaMetadata) { + blockUntilConstructorFinished(); + player.setPlaylistMetadata(mediaMetadata); + } + @Override public Timeline getCurrentTimeline() { - verifyApplicationThread(); + blockUntilConstructorFinished(); return player.getCurrentTimeline(); } @Override public int getCurrentPeriodIndex() { - verifyApplicationThread(); + blockUntilConstructorFinished(); return player.getCurrentPeriodIndex(); } @Override - public int getCurrentWindowIndex() { - verifyApplicationThread(); - return player.getCurrentWindowIndex(); + public int getCurrentMediaItemIndex() { + blockUntilConstructorFinished(); + return player.getCurrentMediaItemIndex(); } @Override public long getDuration() { - verifyApplicationThread(); + blockUntilConstructorFinished(); return player.getDuration(); } @Override public long getCurrentPosition() { - verifyApplicationThread(); + blockUntilConstructorFinished(); return player.getCurrentPosition(); } @Override public long getBufferedPosition() { - verifyApplicationThread(); + blockUntilConstructorFinished(); return player.getBufferedPosition(); } @Override public long getTotalBufferedDuration() { - verifyApplicationThread(); + blockUntilConstructorFinished(); return player.getTotalBufferedDuration(); } @Override public boolean isPlayingAd() { - verifyApplicationThread(); + blockUntilConstructorFinished(); return player.isPlayingAd(); } @Override public int getCurrentAdGroupIndex() { - verifyApplicationThread(); + blockUntilConstructorFinished(); return player.getCurrentAdGroupIndex(); } @Override public int getCurrentAdIndexInAdGroup() { - verifyApplicationThread(); + blockUntilConstructorFinished(); return player.getCurrentAdIndexInAdGroup(); } @Override public long getContentPosition() { - verifyApplicationThread(); + blockUntilConstructorFinished(); return player.getContentPosition(); } @Override public long getContentBufferedPosition() { - verifyApplicationThread(); + blockUntilConstructorFinished(); return player.getContentBufferedPosition(); } /** - * Sets whether the player should use a {@link android.os.PowerManager.WakeLock} to ensure the - * device stays awake for playback, even when the screen is off. - * - *

      Enabling this feature requires the {@link android.Manifest.permission#WAKE_LOCK} permission. - * It should be used together with a foreground {@link android.app.Service} for use cases where - * playback can occur when the screen is off (e.g. background audio playback). It is not useful if - * the screen will always be on during playback (e.g. foreground video playback). - * - *

      This feature is not enabled by default. If enabled, a WakeLock is held whenever the player - * is in the {@link #STATE_READY READY} or {@link #STATE_BUFFERING BUFFERING} states with {@code - * playWhenReady = true}. - * - * @param handleWakeLock Whether the player should use a {@link android.os.PowerManager.WakeLock} - * to ensure the device stays awake for playback, even when the screen is off. * @deprecated Use {@link #setWakeMode(int)} instead. */ @Deprecated + @Override public void setHandleWakeLock(boolean handleWakeLock) { - setWakeMode(handleWakeLock ? C.WAKE_MODE_LOCAL : C.WAKE_MODE_NONE); + blockUntilConstructorFinished(); + player.setHandleWakeLock(handleWakeLock); } - /** - * Sets how the player should keep the device awake for playback when the screen is off. - * - *

      Enabling this feature requires the {@link android.Manifest.permission#WAKE_LOCK} permission. - * It should be used together with a foreground {@link android.app.Service} for use cases where - * playback occurs and the screen is off (e.g. background audio playback). It is not useful when - * the screen will be kept on during playback (e.g. foreground video playback). - * - *

      When enabled, the locks ({@link android.os.PowerManager.WakeLock} / {@link - * android.net.wifi.WifiManager.WifiLock}) will be held whenever the player is in the {@link - * #STATE_READY} or {@link #STATE_BUFFERING} states with {@code playWhenReady = true}. The locks - * held depends on the specified {@link C.WakeMode}. - * - * @param wakeMode The {@link C.WakeMode} option to keep the device awake during playback. - */ + @Override public void setWakeMode(@C.WakeMode int wakeMode) { - switch (wakeMode) { - case C.WAKE_MODE_NONE: - wakeLockManager.setEnabled(false); - wifiLockManager.setEnabled(false); - break; - case C.WAKE_MODE_LOCAL: - wakeLockManager.setEnabled(true); - wifiLockManager.setEnabled(false); - break; - case C.WAKE_MODE_NETWORK: - wakeLockManager.setEnabled(true); - wifiLockManager.setEnabled(true); - break; - default: - break; - } + blockUntilConstructorFinished(); + player.setWakeMode(wakeMode); } - // Internal methods. - - private void removeSurfaceCallbacks() { - if (textureView != null) { - if (textureView.getSurfaceTextureListener() != componentListener) { - Log.w(TAG, "SurfaceTextureListener already unset or replaced."); - } else { - textureView.setSurfaceTextureListener(null); - } - textureView = null; - } - if (surfaceHolder != null) { - surfaceHolder.removeCallback(componentListener); - surfaceHolder = null; - } + @Override + public DeviceInfo getDeviceInfo() { + blockUntilConstructorFinished(); + return player.getDeviceInfo(); } - private void setVideoSurfaceInternal(@Nullable Surface surface, boolean ownsSurface) { - // Note: We don't turn this method into a no-op if the surface is being replaced with itself - // so as to ensure onRenderedFirstFrame callbacks are still called in this case. - List messages = new ArrayList<>(); - for (Renderer renderer : renderers) { - if (renderer.getTrackType() == C.TRACK_TYPE_VIDEO) { - messages.add( - player.createMessage(renderer).setType(C.MSG_SET_SURFACE).setPayload(surface).send()); - } - } - if (this.surface != null && this.surface != surface) { - // We're replacing a surface. Block to ensure that it's not accessed after the method returns. - try { - for (PlayerMessage message : messages) { - message.blockUntilDelivered(); - } - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - } - // If we created the previous surface, we are responsible for releasing it. - if (this.ownsSurface) { - this.surface.release(); - } - } - this.surface = surface; - this.ownsSurface = ownsSurface; - } - - private void setVideoDecoderOutputBufferRendererInternal( - @Nullable VideoDecoderOutputBufferRenderer videoDecoderOutputBufferRenderer) { - for (Renderer renderer : renderers) { - if (renderer.getTrackType() == C.TRACK_TYPE_VIDEO) { - player - .createMessage(renderer) - .setType(C.MSG_SET_VIDEO_DECODER_OUTPUT_BUFFER_RENDERER) - .setPayload(videoDecoderOutputBufferRenderer) - .send(); - } - } - this.videoDecoderOutputBufferRenderer = videoDecoderOutputBufferRenderer; + @Override + public int getDeviceVolume() { + blockUntilConstructorFinished(); + return player.getDeviceVolume(); } - private void maybeNotifySurfaceSizeChanged(int width, int height) { - if (width != surfaceWidth || height != surfaceHeight) { - surfaceWidth = width; - surfaceHeight = height; - for (com.google.android.exoplayer2.video.VideoListener videoListener : videoListeners) { - videoListener.onSurfaceSizeChanged(width, height); - } - } + @Override + public boolean isDeviceMuted() { + blockUntilConstructorFinished(); + return player.isDeviceMuted(); } - private void sendVolumeToRenderers() { - float scaledVolume = audioVolume * audioFocusManager.getVolumeMultiplier(); - for (Renderer renderer : renderers) { - if (renderer.getTrackType() == C.TRACK_TYPE_AUDIO) { - player.createMessage(renderer).setType(C.MSG_SET_VOLUME).setPayload(scaledVolume).send(); - } - } + @Override + public void setDeviceVolume(int volume) { + blockUntilConstructorFinished(); + player.setDeviceVolume(volume); } - private void updatePlayWhenReady( - boolean playWhenReady, @AudioFocusManager.PlayerCommand int playerCommand) { - playWhenReady = playWhenReady && playerCommand != AudioFocusManager.PLAYER_COMMAND_DO_NOT_PLAY; - @PlaybackSuppressionReason - int playbackSuppressionReason = - playWhenReady && playerCommand != AudioFocusManager.PLAYER_COMMAND_PLAY_WHEN_READY - ? Player.PLAYBACK_SUPPRESSION_REASON_TRANSIENT_AUDIO_FOCUS_LOSS - : Player.PLAYBACK_SUPPRESSION_REASON_NONE; - player.setPlayWhenReady(playWhenReady, playbackSuppressionReason); - } - - private void verifyApplicationThread() { - if (Looper.myLooper() != getApplicationLooper()) { - Log.w( - TAG, - "Player is accessed on the wrong thread. See " - + "https://exoplayer.dev/issues/player-accessed-on-wrong-thread", - hasNotifiedFullWrongThreadWarning ? null : new IllegalStateException()); - hasNotifiedFullWrongThreadWarning = true; - } + @Override + public void increaseDeviceVolume() { + blockUntilConstructorFinished(); + player.increaseDeviceVolume(); } - private void updateWakeAndWifiLock() { - @State int playbackState = getPlaybackState(); - switch (playbackState) { - case Player.STATE_READY: - case Player.STATE_BUFFERING: - wakeLockManager.setStayAwake(getPlayWhenReady()); - wifiLockManager.setStayAwake(getPlayWhenReady()); - break; - case Player.STATE_ENDED: - case Player.STATE_IDLE: - wakeLockManager.setStayAwake(false); - wifiLockManager.setStayAwake(false); - break; - default: - throw new IllegalStateException(); - } + @Override + public void decreaseDeviceVolume() { + blockUntilConstructorFinished(); + player.decreaseDeviceVolume(); } - private final class ComponentListener - implements VideoRendererEventListener, - AudioRendererEventListener, - TextOutput, - MetadataOutput, - SurfaceHolder.Callback, - TextureView.SurfaceTextureListener, - AudioFocusManager.PlayerControl, - AudioBecomingNoisyManager.EventListener, - Player.EventListener { - - // VideoRendererEventListener implementation - - @Override - public void onVideoEnabled(DecoderCounters counters) { - videoDecoderCounters = counters; - for (VideoRendererEventListener videoDebugListener : videoDebugListeners) { - videoDebugListener.onVideoEnabled(counters); - } - } - - @Override - public void onVideoDecoderInitialized( - String decoderName, long initializedTimestampMs, long initializationDurationMs) { - for (VideoRendererEventListener videoDebugListener : videoDebugListeners) { - videoDebugListener.onVideoDecoderInitialized( - decoderName, initializedTimestampMs, initializationDurationMs); - } - } - - @Override - public void onVideoInputFormatChanged(Format format) { - videoFormat = format; - for (VideoRendererEventListener videoDebugListener : videoDebugListeners) { - videoDebugListener.onVideoInputFormatChanged(format); - } - } - - @Override - public void onDroppedFrames(int count, long elapsed) { - for (VideoRendererEventListener videoDebugListener : videoDebugListeners) { - videoDebugListener.onDroppedFrames(count, elapsed); - } - } - - @Override - public void onVideoSizeChanged( - int width, int height, int unappliedRotationDegrees, float pixelWidthHeightRatio) { - for (com.google.android.exoplayer2.video.VideoListener videoListener : videoListeners) { - // Prevent duplicate notification if a listener is both a VideoRendererEventListener and - // a VideoListener, as they have the same method signature. - if (!videoDebugListeners.contains(videoListener)) { - videoListener.onVideoSizeChanged( - width, height, unappliedRotationDegrees, pixelWidthHeightRatio); - } - } - for (VideoRendererEventListener videoDebugListener : videoDebugListeners) { - videoDebugListener.onVideoSizeChanged( - width, height, unappliedRotationDegrees, pixelWidthHeightRatio); - } - } - - @Override - public void onRenderedFirstFrame(Surface surface) { - if (SimpleExoPlayer.this.surface == surface) { - for (com.google.android.exoplayer2.video.VideoListener videoListener : videoListeners) { - videoListener.onRenderedFirstFrame(); - } - } - for (VideoRendererEventListener videoDebugListener : videoDebugListeners) { - videoDebugListener.onRenderedFirstFrame(surface); - } - } - - @Override - public void onVideoDisabled(DecoderCounters counters) { - for (VideoRendererEventListener videoDebugListener : videoDebugListeners) { - videoDebugListener.onVideoDisabled(counters); - } - videoFormat = null; - videoDecoderCounters = null; - } - - // AudioRendererEventListener implementation - - @Override - public void onAudioEnabled(DecoderCounters counters) { - audioDecoderCounters = counters; - for (AudioRendererEventListener audioDebugListener : audioDebugListeners) { - audioDebugListener.onAudioEnabled(counters); - } - } - - @Override - public void onAudioSessionId(int sessionId) { - if (audioSessionId == sessionId) { - return; - } - audioSessionId = sessionId; - for (AudioListener audioListener : audioListeners) { - // Prevent duplicate notification if a listener is both a AudioRendererEventListener and - // a AudioListener, as they have the same method signature. - if (!audioDebugListeners.contains(audioListener)) { - audioListener.onAudioSessionId(sessionId); - } - } - for (AudioRendererEventListener audioDebugListener : audioDebugListeners) { - audioDebugListener.onAudioSessionId(sessionId); - } - } - - @Override - public void onAudioDecoderInitialized( - String decoderName, long initializedTimestampMs, long initializationDurationMs) { - for (AudioRendererEventListener audioDebugListener : audioDebugListeners) { - audioDebugListener.onAudioDecoderInitialized( - decoderName, initializedTimestampMs, initializationDurationMs); - } - } - - @Override - public void onAudioInputFormatChanged(Format format) { - audioFormat = format; - for (AudioRendererEventListener audioDebugListener : audioDebugListeners) { - audioDebugListener.onAudioInputFormatChanged(format); - } - } - - @Override - public void onAudioSinkUnderrun( - int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) { - for (AudioRendererEventListener audioDebugListener : audioDebugListeners) { - audioDebugListener.onAudioSinkUnderrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs); - } - } - - @Override - public void onAudioDisabled(DecoderCounters counters) { - for (AudioRendererEventListener audioDebugListener : audioDebugListeners) { - audioDebugListener.onAudioDisabled(counters); - } - audioFormat = null; - audioDecoderCounters = null; - audioSessionId = C.AUDIO_SESSION_ID_UNSET; - } - - // TextOutput implementation - - @Override - public void onCues(List cues) { - currentCues = cues; - for (TextOutput textOutput : textOutputs) { - textOutput.onCues(cues); - } - } - - // MetadataOutput implementation - - @Override - public void onMetadata(Metadata metadata) { - for (MetadataOutput metadataOutput : metadataOutputs) { - metadataOutput.onMetadata(metadata); - } - } - - // SurfaceHolder.Callback implementation - - @Override - public void surfaceCreated(SurfaceHolder holder) { - setVideoSurfaceInternal(holder.getSurface(), false); - } - - @Override - public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { - maybeNotifySurfaceSizeChanged(width, height); - } - - @Override - public void surfaceDestroyed(SurfaceHolder holder) { - setVideoSurfaceInternal(/* surface= */ null, /* ownsSurface= */ false); - maybeNotifySurfaceSizeChanged(/* width= */ 0, /* height= */ 0); - } - - // TextureView.SurfaceTextureListener implementation - - @Override - public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height) { - setVideoSurfaceInternal(new Surface(surfaceTexture), /* ownsSurface= */ true); - maybeNotifySurfaceSizeChanged(width, height); - } - - @Override - public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture, int width, int height) { - maybeNotifySurfaceSizeChanged(width, height); - } - - @Override - public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) { - for (com.google.android.exoplayer2.video.VideoListener videoListener : videoListeners) { - if (videoListener.onSurfaceDestroyed(surfaceTexture)) { - return false; - } - } - setVideoSurfaceInternal(/* surface= */ null, /* ownsSurface= */ true); - maybeNotifySurfaceSizeChanged(/* width= */ 0, /* height= */ 0); - return true; - } - - @Override - public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) { - for (com.google.android.exoplayer2.video.VideoListener videoListener : videoListeners) { - videoListener.onSurfaceTextureUpdated(surfaceTexture); - } - // Do nothing. - } - - // AudioFocusManager.PlayerControl implementation - - @Override - public void setVolumeMultiplier(float volumeMultiplier) { - sendVolumeToRenderers(); - } - - @Override - public void executePlayerCommand(@AudioFocusManager.PlayerCommand int playerCommand) { - updatePlayWhenReady(getPlayWhenReady(), playerCommand); - } - - // AudioBecomingNoisyManager.EventListener implementation. + @Override + public void setDeviceMuted(boolean muted) { + blockUntilConstructorFinished(); + player.setDeviceMuted(muted); + } - @Override - public void onAudioBecomingNoisy() { - setPlayWhenReady(false); - } + @Override + public boolean isTunnelingEnabled() { + blockUntilConstructorFinished(); + return player.isTunnelingEnabled(); + } - // Player.EventListener implementation. - - @Override - public void onLoadingChanged(boolean isLoading) { - if (priorityTaskManager != null) { - if (isLoading && !isPriorityTaskManagerRegistered) { - priorityTaskManager.add(C.PRIORITY_PLAYBACK); - isPriorityTaskManagerRegistered = true; - } else if (!isLoading && isPriorityTaskManagerRegistered) { - priorityTaskManager.remove(C.PRIORITY_PLAYBACK); - isPriorityTaskManagerRegistered = false; - } - } - } + /* package */ void setThrowsWhenUsingWrongThread(boolean throwsWhenUsingWrongThread) { + blockUntilConstructorFinished(); + player.setThrowsWhenUsingWrongThread(throwsWhenUsingWrongThread); + } - @Override - public void onPlayerStateChanged(boolean playWhenReady, @State int playbackState) { - updateWakeAndWifiLock(); - } + private void blockUntilConstructorFinished() { + // The constructor may be executed on a background thread. Wait with accessing the player from + // the app thread until the constructor finished executing. + constructorFinished.blockUninterruptible(); } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/StarRating.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/StarRating.java new file mode 100644 index 0000000000..8233fed2e9 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/StarRating.java @@ -0,0 +1,126 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2; + +import static com.google.android.exoplayer2.util.Assertions.checkArgument; + +import android.os.Bundle; +import androidx.annotation.FloatRange; +import androidx.annotation.IntRange; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.util.Util; +import com.google.common.base.Objects; + +/** A rating expressed as a fractional number of stars. */ +public final class StarRating extends Rating { + + @IntRange(from = 1) + private final int maxStars; + + private final float starRating; + + /** + * Creates a unrated instance with {@code maxStars}. If {@code maxStars} is not a positive + * integer, it will throw an {@link IllegalArgumentException}. + * + * @param maxStars The maximum number of stars this rating can have. + */ + public StarRating(@IntRange(from = 1) int maxStars) { + checkArgument(maxStars > 0, "maxStars must be a positive integer"); + this.maxStars = maxStars; + starRating = RATING_UNSET; + } + + /** + * Creates a rated instance with {@code maxStars} and the given fractional number of stars. + * Non-integer values may be used to represent an average rating value. If {@code maxStars} is not + * a positive integer or {@code starRating} is out of range, it will throw an {@link + * IllegalArgumentException}. + * + * @param maxStars The maximum number of stars this rating can have. + * @param starRating A fractional number of stars of this rating from {@code 0f} to {@code + * maxStars}. + */ + public StarRating(@IntRange(from = 1) int maxStars, @FloatRange(from = 0.0) float starRating) { + checkArgument(maxStars > 0, "maxStars must be a positive integer"); + checkArgument( + starRating >= 0.0f && starRating <= maxStars, "starRating is out of range [0, maxStars]"); + this.maxStars = maxStars; + this.starRating = starRating; + } + + @Override + public boolean isRated() { + return starRating != RATING_UNSET; + } + + /** Returns the maximum number of stars. Must be a positive number. */ + @IntRange(from = 1) + public int getMaxStars() { + return maxStars; + } + + /** + * Returns the fractional number of stars of this rating. Will range from {@code 0f} to {@link + * #maxStars}, or {@link #RATING_UNSET} if unrated. + */ + public float getStarRating() { + return starRating; + } + + @Override + public int hashCode() { + return Objects.hashCode(maxStars, starRating); + } + + @Override + public boolean equals(@Nullable Object obj) { + if (!(obj instanceof StarRating)) { + return false; + } + StarRating other = (StarRating) obj; + return maxStars == other.maxStars && starRating == other.starRating; + } + + // Bundleable implementation. + + private static final @RatingType int TYPE = RATING_TYPE_STAR; + private static final int MAX_STARS_DEFAULT = 5; + + private static final String FIELD_MAX_STARS = Util.intToStringMaxRadix(1); + private static final String FIELD_STAR_RATING = Util.intToStringMaxRadix(2); + + @Override + public Bundle toBundle() { + Bundle bundle = new Bundle(); + bundle.putInt(FIELD_RATING_TYPE, TYPE); + bundle.putInt(FIELD_MAX_STARS, maxStars); + bundle.putFloat(FIELD_STAR_RATING, starRating); + return bundle; + } + + /** Object that can restore a {@link StarRating} from a {@link Bundle}. */ + public static final Creator CREATOR = StarRating::fromBundle; + + private static StarRating fromBundle(Bundle bundle) { + checkArgument(bundle.getInt(FIELD_RATING_TYPE, /* defaultValue= */ RATING_TYPE_UNSET) == TYPE); + int maxStars = bundle.getInt(FIELD_MAX_STARS, /* defaultValue= */ MAX_STARS_DEFAULT); + float starRating = bundle.getFloat(FIELD_STAR_RATING, /* defaultValue= */ RATING_UNSET); + return starRating == RATING_UNSET + ? new StarRating(maxStars) + : new StarRating(maxStars, starRating); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/StreamVolumeManager.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/StreamVolumeManager.java new file mode 100644 index 0000000000..41e1c0f6ca --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/StreamVolumeManager.java @@ -0,0 +1,217 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2; + +import android.content.BroadcastReceiver; +import android.content.Context; +import android.content.Intent; +import android.content.IntentFilter; +import android.media.AudioManager; +import android.os.Handler; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.Log; +import com.google.android.exoplayer2.util.Util; + +/** A manager that wraps {@link AudioManager} to control/listen audio stream volume. */ +/* package */ final class StreamVolumeManager { + + /** A listener for changes in the manager. */ + public interface Listener { + + /** Called when the audio stream type is changed. */ + void onStreamTypeChanged(@C.StreamType int streamType); + + /** Called when the audio stream volume or mute state is changed. */ + void onStreamVolumeChanged(int streamVolume, boolean streamMuted); + } + + private static final String TAG = "StreamVolumeManager"; + + // TODO(b/151280453): Replace the hidden intent action with an official one. + // Copied from AudioManager#VOLUME_CHANGED_ACTION + private static final String VOLUME_CHANGED_ACTION = "android.media.VOLUME_CHANGED_ACTION"; + + // TODO(b/153317944): Allow users to override these flags. + private static final int VOLUME_FLAGS = AudioManager.FLAG_SHOW_UI; + + private final Context applicationContext; + private final Handler eventHandler; + private final Listener listener; + private final AudioManager audioManager; + + @Nullable private VolumeChangeReceiver receiver; + private @C.StreamType int streamType; + private int volume; + private boolean muted; + + /** Creates a manager. */ + public StreamVolumeManager(Context context, Handler eventHandler, Listener listener) { + applicationContext = context.getApplicationContext(); + this.eventHandler = eventHandler; + this.listener = listener; + audioManager = + Assertions.checkStateNotNull( + (AudioManager) applicationContext.getSystemService(Context.AUDIO_SERVICE)); + + streamType = C.STREAM_TYPE_DEFAULT; + volume = getVolumeFromManager(audioManager, streamType); + muted = getMutedFromManager(audioManager, streamType); + + VolumeChangeReceiver receiver = new VolumeChangeReceiver(); + IntentFilter filter = new IntentFilter(VOLUME_CHANGED_ACTION); + try { + Util.registerReceiverNotExported(applicationContext, receiver, filter); + this.receiver = receiver; + } catch (RuntimeException e) { + Log.w(TAG, "Error registering stream volume receiver", e); + } + } + + /** Sets the audio stream type. */ + public void setStreamType(@C.StreamType int streamType) { + if (this.streamType == streamType) { + return; + } + this.streamType = streamType; + + updateVolumeAndNotifyIfChanged(); + listener.onStreamTypeChanged(streamType); + } + + /** + * Gets the minimum volume for the current audio stream. It can be changed if {@link + * #setStreamType(int)} is called. + */ + public int getMinVolume() { + return Util.SDK_INT >= 28 ? audioManager.getStreamMinVolume(streamType) : 0; + } + + /** + * Gets the maximum volume for the current audio stream. It can be changed if {@link + * #setStreamType(int)} is called. + */ + public int getMaxVolume() { + return audioManager.getStreamMaxVolume(streamType); + } + + /** Gets the current volume for the current audio stream. */ + public int getVolume() { + return volume; + } + + /** Gets whether the current audio stream is muted or not. */ + public boolean isMuted() { + return muted; + } + + /** + * Sets the volume with the given value for the current audio stream. The value should be between + * {@link #getMinVolume()} and {@link #getMaxVolume()}, otherwise it will be ignored. + */ + public void setVolume(int volume) { + if (volume < getMinVolume() || volume > getMaxVolume()) { + return; + } + audioManager.setStreamVolume(streamType, volume, VOLUME_FLAGS); + updateVolumeAndNotifyIfChanged(); + } + + /** + * Increases the volume by one for the current audio stream. It will be ignored if the current + * volume is equal to {@link #getMaxVolume()}. + */ + public void increaseVolume() { + if (volume >= getMaxVolume()) { + return; + } + audioManager.adjustStreamVolume(streamType, AudioManager.ADJUST_RAISE, VOLUME_FLAGS); + updateVolumeAndNotifyIfChanged(); + } + + /** + * Decreases the volume by one for the current audio stream. It will be ignored if the current + * volume is equal to {@link #getMinVolume()}. + */ + public void decreaseVolume() { + if (volume <= getMinVolume()) { + return; + } + audioManager.adjustStreamVolume(streamType, AudioManager.ADJUST_LOWER, VOLUME_FLAGS); + updateVolumeAndNotifyIfChanged(); + } + + /** Sets the mute state of the current audio stream. */ + public void setMuted(boolean muted) { + if (Util.SDK_INT >= 23) { + audioManager.adjustStreamVolume( + streamType, muted ? AudioManager.ADJUST_MUTE : AudioManager.ADJUST_UNMUTE, VOLUME_FLAGS); + } else { + audioManager.setStreamMute(streamType, muted); + } + updateVolumeAndNotifyIfChanged(); + } + + /** Releases the manager. It must be called when the manager is no longer required. */ + public void release() { + if (receiver != null) { + try { + applicationContext.unregisterReceiver(receiver); + } catch (RuntimeException e) { + Log.w(TAG, "Error unregistering stream volume receiver", e); + } + receiver = null; + } + } + + private void updateVolumeAndNotifyIfChanged() { + int newVolume = getVolumeFromManager(audioManager, streamType); + boolean newMuted = getMutedFromManager(audioManager, streamType); + if (volume != newVolume || muted != newMuted) { + volume = newVolume; + muted = newMuted; + listener.onStreamVolumeChanged(newVolume, newMuted); + } + } + + private static int getVolumeFromManager(AudioManager audioManager, @C.StreamType int streamType) { + // AudioManager#getStreamVolume(int) throws an exception on some devices. See + // https://github.com/google/ExoPlayer/issues/8191. + try { + return audioManager.getStreamVolume(streamType); + } catch (RuntimeException e) { + Log.w(TAG, "Could not retrieve stream volume for stream type " + streamType, e); + return audioManager.getStreamMaxVolume(streamType); + } + } + + private static boolean getMutedFromManager( + AudioManager audioManager, @C.StreamType int streamType) { + if (Util.SDK_INT >= 23) { + return audioManager.isStreamMute(streamType); + } else { + return getVolumeFromManager(audioManager, streamType) == 0; + } + } + + private final class VolumeChangeReceiver extends BroadcastReceiver { + + @Override + public void onReceive(Context context, Intent intent) { + eventHandler.post(StreamVolumeManager.this::updateVolumeAndNotifyIfChanged); + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ThumbRating.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ThumbRating.java new file mode 100644 index 0000000000..3e38c79057 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ThumbRating.java @@ -0,0 +1,97 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2; + +import static com.google.android.exoplayer2.util.Assertions.checkArgument; + +import android.os.Bundle; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.util.Util; +import com.google.common.base.Objects; + +/** A rating expressed as "thumbs up" or "thumbs down". */ +public final class ThumbRating extends Rating { + + private final boolean rated; + private final boolean isThumbsUp; + + /** Creates a unrated instance. */ + public ThumbRating() { + rated = false; + isThumbsUp = false; + } + + /** + * Creates a rated instance. + * + * @param isThumbsUp {@code true} for "thumbs up", {@code false} for "thumbs down". + */ + public ThumbRating(boolean isThumbsUp) { + rated = true; + this.isThumbsUp = isThumbsUp; + } + + @Override + public boolean isRated() { + return rated; + } + + /** Returns whether the rating is "thumbs up". */ + public boolean isThumbsUp() { + return isThumbsUp; + } + + @Override + public int hashCode() { + return Objects.hashCode(rated, isThumbsUp); + } + + @Override + public boolean equals(@Nullable Object obj) { + if (!(obj instanceof ThumbRating)) { + return false; + } + ThumbRating other = (ThumbRating) obj; + return isThumbsUp == other.isThumbsUp && rated == other.rated; + } + + // Bundleable implementation. + + private static final @RatingType int TYPE = RATING_TYPE_THUMB; + + private static final String FIELD_RATED = Util.intToStringMaxRadix(1); + private static final String FIELD_IS_THUMBS_UP = Util.intToStringMaxRadix(2); + + @Override + public Bundle toBundle() { + Bundle bundle = new Bundle(); + bundle.putInt(FIELD_RATING_TYPE, TYPE); + bundle.putBoolean(FIELD_RATED, rated); + bundle.putBoolean(FIELD_IS_THUMBS_UP, isThumbsUp); + return bundle; + } + + /** Object that can restore a {@link ThumbRating} from a {@link Bundle}. */ + public static final Creator CREATOR = ThumbRating::fromBundle; + + private static ThumbRating fromBundle(Bundle bundle) { + checkArgument(bundle.getInt(FIELD_RATING_TYPE, /* defaultValue= */ RATING_TYPE_UNSET) == TYPE); + boolean rated = bundle.getBoolean(FIELD_RATED, /* defaultValue= */ false); + return rated + ? new ThumbRating(bundle.getBoolean(FIELD_IS_THUMBS_UP, /* defaultValue= */ false)) + : new ThumbRating(); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/Timeline.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/Timeline.java index 4dac71559a..6fb855cc25 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/Timeline.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/Timeline.java @@ -15,11 +15,27 @@ */ package com.google.android.exoplayer2; +import static com.google.android.exoplayer2.source.ads.AdPlaybackState.AD_STATE_UNAVAILABLE; +import static com.google.android.exoplayer2.util.Assertions.checkArgument; +import static com.google.android.exoplayer2.util.Assertions.checkState; +import static java.lang.Math.max; +import static java.lang.Math.min; + +import android.net.Uri; +import android.os.Bundle; +import android.os.IBinder; +import android.os.SystemClock; import android.util.Pair; import androidx.annotation.Nullable; import com.google.android.exoplayer2.source.ads.AdPlaybackState; import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.BundleUtil; import com.google.android.exoplayer2.util.Util; +import com.google.common.collect.ImmutableList; +import com.google.errorprone.annotations.CanIgnoreReturnValue; +import com.google.errorprone.annotations.InlineMe; +import java.util.ArrayList; +import java.util.List; /** * A flexible representation of the structure of media. A timeline is able to represent the @@ -43,67 +59,79 @@ * *

      The following examples illustrate timelines for various use cases. * - *

      Single media file or on-demand stream

      + *

      Single media file or on-demand stream

      * *

      Example timeline for a
- * single file A timeline for a single media file or on-demand stream consists of a single period - * and window. The window spans the whole period, indicating that all parts of the media are - * available for playback. The window's default position is typically at the start of the period - * (indicated by the black dot in the figure above). + * single file"> * - *

      Playlist of media files or on-demand streams

      + *

      A timeline for a single media file or on-demand stream consists of a single period and window. + * The window spans the whole period, indicating that all parts of the media are available for + * playback. The window's default position is typically at the start of the period (indicated by the + * black dot in the figure above). + * + *

      Playlist of media files or on-demand streams

      * *

      Example timeline for a
- * playlist of files A timeline for a playlist of media files or on-demand streams consists of - * multiple periods, each with its own window. Each window spans the whole of the corresponding - * period, and typically has a default position at the start of the period. The properties of the - * periods and windows (e.g. their durations and whether the window is seekable) will often only - * become known when the player starts buffering the corresponding file or stream. + * playlist of files"> + * + *

      A timeline for a playlist of media files or on-demand streams consists of multiple periods, + * each with its own window. Each window spans the whole of the corresponding period, and typically + * has a default position at the start of the period. The properties of the periods and windows + * (e.g. their durations and whether the window is seekable) will often only become known when the + * player starts buffering the corresponding file or stream. * - *

      Live stream with limited availability

      + *

      Live stream with limited availability

      * *

      Example timeline for
- * a live stream with limited availability A timeline for a live stream consists of a period whose - * duration is unknown, since it's continually extending as more content is broadcast. If content - * only remains available for a limited period of time then the window may start at a non-zero - * position, defining the region of content that can still be played. The window will have {@link - * Window#isLive} set to true to indicate it's a live stream and {@link Window#isDynamic} set to - * true as long as we expect changes to the live window. Its default position is typically near to - * the live edge (indicated by the black dot in the figure above). + * a live stream with limited availability"> + * + *

      A timeline for a live stream consists of a period whose duration is unknown, since it's + * continually extending as more content is broadcast. If content only remains available for a + * limited period of time then the window may start at a non-zero position, defining the region of + * content that can still be played. The window will return true from {@link Window#isLive()} to + * indicate it's a live stream and {@link Window#isDynamic} will be set to true as long as we expect + * changes to the live window. Its default position is typically near to the live edge (indicated by + * the black dot in the figure above). * - *

      Live stream with indefinite availability

      + *

      Live stream with indefinite availability

      * *

      Example timeline
- * for a live stream with indefinite availability A timeline for a live stream with indefinite - * availability is similar to the Live stream with limited availability - * case, except that the window starts at the beginning of the period to indicate that all of the - * previously broadcast content can still be played. + * for a live stream with indefinite availability"> * - *

      Live stream with multiple periods

      + *

      A timeline for a live stream with indefinite availability is similar to the Live stream with limited availability case, except that the window + * starts at the beginning of the period to indicate that all of the previously broadcast content + * can still be played. + * + *

      Live stream with multiple periods

      * *

      Example timeline
- * for a live stream with multiple periods This case arises when a live stream is explicitly - * divided into separate periods, for example at content boundaries. This case is similar to the Live stream with limited availability case, except that the window may - * span more than one period. Multiple periods are also possible in the indefinite availability - * case. + * for a live stream with multiple periods"> + * + *

      This case arises when a live stream is explicitly divided into separate periods, for example + * at content boundaries. This case is similar to the Live stream with + * limited availability case, except that the window may span more than one period. Multiple + * periods are also possible in the indefinite availability case. * - *

      On-demand stream followed by live stream

      + *

      On-demand stream followed by live stream

      * *

      Example timeline for an
- * on-demand stream followed by a live stream This case is the concatenation of the Single media file or on-demand stream and Live - * stream with multiple periods cases. When playback of the on-demand stream ends, playback of - * the live stream will start from its default position near the live edge. + * on-demand stream followed by a live stream"> * - *

      On-demand stream with mid-roll ads

      + *

      This case is the concatenation of the Single media file or on-demand + * stream and Live stream with multiple periods cases. When playback + * of the on-demand stream ends, playback of the live stream will start from its default position + * near the live edge. + * + *

      On-demand stream with mid-roll ads

      * *

      Example
- * timeline for an on-demand stream with mid-roll ad groups This case includes mid-roll ad groups, - * which are defined as part of the timeline's single period. The period can be queried for - * information about the ad groups and the ads they contain. + * timeline for an on-demand stream with mid-roll ad groups"> + * + *

      This case includes mid-roll ad groups, which are defined as part of the timeline's single + * period. The period can be queried for information about the ad groups and the ads they contain. */ -public abstract class Timeline { +public abstract class Timeline implements Bundleable { /** * Holds information about a window in a {@link Timeline}. A window usually corresponds to one @@ -115,40 +143,62 @@ public abstract class Timeline { *

      Information defined by a
    * timeline window */ - public static final class Window { + public static final class Window implements Bundleable { /** * A {@link #uid} for a window that must be used for single-window {@link Timeline Timelines}. */ public static final Object SINGLE_WINDOW_UID = new Object(); + private static final Object FAKE_WINDOW_UID = new Object(); + + private static final MediaItem PLACEHOLDER_MEDIA_ITEM = + new MediaItem.Builder() + .setMediaId("com.google.android.exoplayer2.Timeline") + .setUri(Uri.EMPTY) + .build(); + /** * A unique identifier for the window. Single-window {@link Timeline Timelines} must use {@link * #SINGLE_WINDOW_UID}. */ public Object uid; - /** A tag for the window. Not necessarily unique. */ - @Nullable public Object tag; + /** + * @deprecated Use {@link #mediaItem} instead. + */ + @Deprecated @Nullable public Object tag; + + /** The {@link MediaItem} associated to the window. Not necessarily unique. */ + public MediaItem mediaItem; /** The manifest of the window. May be {@code null}. */ @Nullable public Object manifest; /** * The start time of the presentation to which this window belongs in milliseconds since the - * epoch, or {@link C#TIME_UNSET} if unknown or not applicable. For informational purposes only. + * Unix epoch, or {@link C#TIME_UNSET} if unknown or not applicable. For informational purposes + * only. */ public long presentationStartTimeMs; /** - * The window's start time in milliseconds since the epoch, or {@link C#TIME_UNSET} if unknown - * or not applicable. For informational purposes only. + * The window's start time in milliseconds since the Unix epoch, or {@link C#TIME_UNSET} if + * unknown or not applicable. */ public long windowStartTimeMs; /** - * Whether it's possible to seek within this window. + * The offset between {@link SystemClock#elapsedRealtime()} and the time since the Unix epoch + * according to the clock of the media origin server, or {@link C#TIME_UNSET} if unknown or not + * applicable. + * + *

      Note that the current Unix time can be retrieved using {@link #getCurrentUnixTimeMs()} and + * is calculated as {@code SystemClock.elapsedRealtime() + elapsedRealtimeEpochOffsetMs}. */ + public long elapsedRealtimeEpochOffsetMs; + + /** Whether it's possible to seek within this window. */ public boolean isSeekable; // TODO: Split this to better describe which parts of the window might change. For example it @@ -160,19 +210,21 @@ public static final class Window { public boolean isDynamic; /** - * Whether the media in this window is live. For informational purposes only. - * - *

      Check {@link #isDynamic} to know whether this window may still change. + * @deprecated Use {@link #isLive()} instead. */ - public boolean isLive; + @Deprecated public boolean isLive; - /** The index of the first period that belongs to this window. */ - public int firstPeriodIndex; + /** + * The {@link MediaItem.LiveConfiguration} that is used or null if {@link #isLive()} returns + * false. + */ + @Nullable public MediaItem.LiveConfiguration liveConfiguration; /** - * The index of the last period that belongs to this window. + * Whether this window contains placeholder information because the real information has yet to + * be loaded. */ - public int lastPeriodIndex; + public boolean isPlaceholder; /** * The default position relative to the start of the window at which to begin playback, in @@ -182,11 +234,15 @@ public static final class Window { */ public long defaultPositionUs; - /** - * The duration of this window in microseconds, or {@link C#TIME_UNSET} if unknown. - */ + /** The duration of this window in microseconds, or {@link C#TIME_UNSET} if unknown. */ public long durationUs; + /** The index of the first period that belongs to this window. */ + public int firstPeriodIndex; + + /** The index of the last period that belongs to this window. */ + public int lastPeriodIndex; + /** * The position of the start of this window relative to the start of the first period belonging * to it, in microseconds. @@ -196,36 +252,47 @@ public static final class Window { /** Creates window. */ public Window() { uid = SINGLE_WINDOW_UID; + mediaItem = PLACEHOLDER_MEDIA_ITEM; } /** Sets the data held by this window. */ + @CanIgnoreReturnValue + @SuppressWarnings("deprecation") public Window set( Object uid, - @Nullable Object tag, + @Nullable MediaItem mediaItem, @Nullable Object manifest, long presentationStartTimeMs, long windowStartTimeMs, + long elapsedRealtimeEpochOffsetMs, boolean isSeekable, boolean isDynamic, - boolean isLive, + @Nullable MediaItem.LiveConfiguration liveConfiguration, long defaultPositionUs, long durationUs, int firstPeriodIndex, int lastPeriodIndex, long positionInFirstPeriodUs) { this.uid = uid; - this.tag = tag; + this.mediaItem = mediaItem != null ? mediaItem : PLACEHOLDER_MEDIA_ITEM; + this.tag = + mediaItem != null && mediaItem.localConfiguration != null + ? mediaItem.localConfiguration.tag + : null; this.manifest = manifest; this.presentationStartTimeMs = presentationStartTimeMs; this.windowStartTimeMs = windowStartTimeMs; + this.elapsedRealtimeEpochOffsetMs = elapsedRealtimeEpochOffsetMs; this.isSeekable = isSeekable; this.isDynamic = isDynamic; - this.isLive = isLive; + this.isLive = liveConfiguration != null; + this.liveConfiguration = liveConfiguration; this.defaultPositionUs = defaultPositionUs; this.durationUs = durationUs; this.firstPeriodIndex = firstPeriodIndex; this.lastPeriodIndex = lastPeriodIndex; this.positionInFirstPeriodUs = positionInFirstPeriodUs; + this.isPlaceholder = false; return this; } @@ -236,7 +303,7 @@ public Window set( * whilst remaining within the bounds of the window. */ public long getDefaultPositionMs() { - return C.usToMs(defaultPositionUs); + return Util.usToMs(defaultPositionUs); } /** @@ -249,16 +316,12 @@ public long getDefaultPositionUs() { return defaultPositionUs; } - /** - * Returns the duration of the window in milliseconds, or {@link C#TIME_UNSET} if unknown. - */ + /** Returns the duration of the window in milliseconds, or {@link C#TIME_UNSET} if unknown. */ public long getDurationMs() { - return C.usToMs(durationUs); + return Util.usToMs(durationUs); } - /** - * Returns the duration of this window in microseconds, or {@link C#TIME_UNSET} if unknown. - */ + /** Returns the duration of this window in microseconds, or {@link C#TIME_UNSET} if unknown. */ public long getDurationUs() { return durationUs; } @@ -268,7 +331,7 @@ public long getDurationUs() { * belonging to it, in milliseconds. */ public long getPositionInFirstPeriodMs() { - return C.usToMs(positionInFirstPeriodUs); + return Util.usToMs(positionInFirstPeriodUs); } /** @@ -279,6 +342,25 @@ public long getPositionInFirstPeriodUs() { return positionInFirstPeriodUs; } + /** + * Returns the current time in milliseconds since the Unix epoch. + * + *

      This method applies {@link #elapsedRealtimeEpochOffsetMs known corrections} made available + * by the media such that this time corresponds to the clock of the media origin server. + */ + public long getCurrentUnixTimeMs() { + return Util.getNowUnixTimeMs(elapsedRealtimeEpochOffsetMs); + } + + /** Returns whether this is a live stream. */ + // Verifies whether the deprecated isLive member field is in a correct state. + @SuppressWarnings("deprecation") + public boolean isLive() { + checkState(isLive == (liveConfiguration != null)); + return liveConfiguration != null; + } + + // Provide backward compatibility for tag. @Override public boolean equals(@Nullable Object obj) { if (this == obj) { @@ -289,13 +371,15 @@ public boolean equals(@Nullable Object obj) { } Window that = (Window) obj; return Util.areEqual(uid, that.uid) - && Util.areEqual(tag, that.tag) + && Util.areEqual(mediaItem, that.mediaItem) && Util.areEqual(manifest, that.manifest) + && Util.areEqual(liveConfiguration, that.liveConfiguration) && presentationStartTimeMs == that.presentationStartTimeMs && windowStartTimeMs == that.windowStartTimeMs + && elapsedRealtimeEpochOffsetMs == that.elapsedRealtimeEpochOffsetMs && isSeekable == that.isSeekable && isDynamic == that.isDynamic - && isLive == that.isLive + && isPlaceholder == that.isPlaceholder && defaultPositionUs == that.defaultPositionUs && durationUs == that.durationUs && firstPeriodIndex == that.firstPeriodIndex @@ -303,17 +387,22 @@ public boolean equals(@Nullable Object obj) { && positionInFirstPeriodUs == that.positionInFirstPeriodUs; } + // Provide backward compatibility for tag. @Override public int hashCode() { int result = 7; result = 31 * result + uid.hashCode(); - result = 31 * result + (tag == null ? 0 : tag.hashCode()); + result = 31 * result + mediaItem.hashCode(); result = 31 * result + (manifest == null ? 0 : manifest.hashCode()); + result = 31 * result + (liveConfiguration == null ? 0 : liveConfiguration.hashCode()); result = 31 * result + (int) (presentationStartTimeMs ^ (presentationStartTimeMs >>> 32)); result = 31 * result + (int) (windowStartTimeMs ^ (windowStartTimeMs >>> 32)); + result = + 31 * result + + (int) (elapsedRealtimeEpochOffsetMs ^ (elapsedRealtimeEpochOffsetMs >>> 32)); result = 31 * result + (isSeekable ? 1 : 0); result = 31 * result + (isDynamic ? 1 : 0); - result = 31 * result + (isLive ? 1 : 0); + result = 31 * result + (isPlaceholder ? 1 : 0); result = 31 * result + (int) (defaultPositionUs ^ (defaultPositionUs >>> 32)); result = 31 * result + (int) (durationUs ^ (durationUs >>> 32)); result = 31 * result + firstPeriodIndex; @@ -321,6 +410,132 @@ public int hashCode() { result = 31 * result + (int) (positionInFirstPeriodUs ^ (positionInFirstPeriodUs >>> 32)); return result; } + + // Bundleable implementation. + + private static final String FIELD_MEDIA_ITEM = Util.intToStringMaxRadix(1); + private static final String FIELD_PRESENTATION_START_TIME_MS = Util.intToStringMaxRadix(2); + private static final String FIELD_WINDOW_START_TIME_MS = Util.intToStringMaxRadix(3); + private static final String FIELD_ELAPSED_REALTIME_EPOCH_OFFSET_MS = + Util.intToStringMaxRadix(4); + private static final String FIELD_IS_SEEKABLE = Util.intToStringMaxRadix(5); + private static final String FIELD_IS_DYNAMIC = Util.intToStringMaxRadix(6); + private static final String FIELD_LIVE_CONFIGURATION = Util.intToStringMaxRadix(7); + private static final String FIELD_IS_PLACEHOLDER = Util.intToStringMaxRadix(8); + private static final String FIELD_DEFAULT_POSITION_US = Util.intToStringMaxRadix(9); + private static final String FIELD_DURATION_US = Util.intToStringMaxRadix(10); + private static final String FIELD_FIRST_PERIOD_INDEX = Util.intToStringMaxRadix(11); + private static final String FIELD_LAST_PERIOD_INDEX = Util.intToStringMaxRadix(12); + private static final String FIELD_POSITION_IN_FIRST_PERIOD_US = Util.intToStringMaxRadix(13); + + /** + * {@inheritDoc} + * + *

      It omits the {@link #uid} and {@link #manifest} fields. The {@link #uid} of an instance + * restored by {@link #CREATOR} will be a fake {@link Object} and the {@link #manifest} of the + * instance will be {@code null}. + */ + @Override + public Bundle toBundle() { + Bundle bundle = new Bundle(); + if (!MediaItem.EMPTY.equals(mediaItem)) { + bundle.putBundle(FIELD_MEDIA_ITEM, mediaItem.toBundle()); + } + if (presentationStartTimeMs != C.TIME_UNSET) { + bundle.putLong(FIELD_PRESENTATION_START_TIME_MS, presentationStartTimeMs); + } + if (windowStartTimeMs != C.TIME_UNSET) { + bundle.putLong(FIELD_WINDOW_START_TIME_MS, windowStartTimeMs); + } + if (elapsedRealtimeEpochOffsetMs != C.TIME_UNSET) { + bundle.putLong(FIELD_ELAPSED_REALTIME_EPOCH_OFFSET_MS, elapsedRealtimeEpochOffsetMs); + } + if (isSeekable) { + bundle.putBoolean(FIELD_IS_SEEKABLE, isSeekable); + } + if (isDynamic) { + bundle.putBoolean(FIELD_IS_DYNAMIC, isDynamic); + } + + @Nullable MediaItem.LiveConfiguration liveConfiguration = this.liveConfiguration; + if (liveConfiguration != null) { + bundle.putBundle(FIELD_LIVE_CONFIGURATION, liveConfiguration.toBundle()); + } + if (isPlaceholder) { + bundle.putBoolean(FIELD_IS_PLACEHOLDER, isPlaceholder); + } + if (defaultPositionUs != 0) { + bundle.putLong(FIELD_DEFAULT_POSITION_US, defaultPositionUs); + } + if (durationUs != C.TIME_UNSET) { + bundle.putLong(FIELD_DURATION_US, durationUs); + } + if (firstPeriodIndex != 0) { + bundle.putInt(FIELD_FIRST_PERIOD_INDEX, firstPeriodIndex); + } + if (lastPeriodIndex != 0) { + bundle.putInt(FIELD_LAST_PERIOD_INDEX, lastPeriodIndex); + } + if (positionInFirstPeriodUs != 0) { + bundle.putLong(FIELD_POSITION_IN_FIRST_PERIOD_US, positionInFirstPeriodUs); + } + return bundle; + } + + /** + * Object that can restore {@link Period} from a {@link Bundle}. + * + *

      The {@link #uid} of a restored instance will be a fake {@link Object} and the {@link + * #manifest} of the instance will be {@code null}. + */ + public static final Creator CREATOR = Window::fromBundle; + + private static Window fromBundle(Bundle bundle) { + @Nullable Bundle mediaItemBundle = bundle.getBundle(FIELD_MEDIA_ITEM); + @Nullable + MediaItem mediaItem = + mediaItemBundle != null ? MediaItem.CREATOR.fromBundle(mediaItemBundle) : MediaItem.EMPTY; + long presentationStartTimeMs = + bundle.getLong(FIELD_PRESENTATION_START_TIME_MS, /* defaultValue= */ C.TIME_UNSET); + long windowStartTimeMs = + bundle.getLong(FIELD_WINDOW_START_TIME_MS, /* defaultValue= */ C.TIME_UNSET); + long elapsedRealtimeEpochOffsetMs = + bundle.getLong(FIELD_ELAPSED_REALTIME_EPOCH_OFFSET_MS, /* defaultValue= */ C.TIME_UNSET); + boolean isSeekable = bundle.getBoolean(FIELD_IS_SEEKABLE, /* defaultValue= */ false); + boolean isDynamic = bundle.getBoolean(FIELD_IS_DYNAMIC, /* defaultValue= */ false); + @Nullable Bundle liveConfigurationBundle = bundle.getBundle(FIELD_LIVE_CONFIGURATION); + @Nullable + MediaItem.LiveConfiguration liveConfiguration = + liveConfigurationBundle != null + ? MediaItem.LiveConfiguration.CREATOR.fromBundle(liveConfigurationBundle) + : null; + boolean isPlaceHolder = bundle.getBoolean(FIELD_IS_PLACEHOLDER, /* defaultValue= */ false); + long defaultPositionUs = bundle.getLong(FIELD_DEFAULT_POSITION_US, /* defaultValue= */ 0); + long durationUs = bundle.getLong(FIELD_DURATION_US, /* defaultValue= */ C.TIME_UNSET); + int firstPeriodIndex = bundle.getInt(FIELD_FIRST_PERIOD_INDEX, /* defaultValue= */ 0); + int lastPeriodIndex = bundle.getInt(FIELD_LAST_PERIOD_INDEX, /* defaultValue= */ 0); + long positionInFirstPeriodUs = + bundle.getLong(FIELD_POSITION_IN_FIRST_PERIOD_US, /* defaultValue= */ 0); + + Window window = new Window(); + window.set( + FAKE_WINDOW_UID, + mediaItem, + /* manifest= */ null, + presentationStartTimeMs, + windowStartTimeMs, + elapsedRealtimeEpochOffsetMs, + isSeekable, + isDynamic, + liveConfiguration, + defaultPositionUs, + durationUs, + firstPeriodIndex, + lastPeriodIndex, + positionInFirstPeriodUs); + window.isPlaceholder = isPlaceHolder; + return window; + } } /** @@ -334,7 +549,7 @@ public int hashCode() { *

      Information defined by a
    * period */ - public static final class Period { + public static final class Period implements Bundleable { /** * An identifier for the period. Not necessarily unique. May be null if the ids of the period @@ -347,17 +562,25 @@ public static final class Period { */ @Nullable public Object uid; + /** The index of the window to which this period belongs. */ + public int windowIndex; + + /** The duration of this period in microseconds, or {@link C#TIME_UNSET} if unknown. */ + public long durationUs; + /** - * The index of the window to which this period belongs. + * The position of the start of this period relative to the start of the window to which it + * belongs, in microseconds. May be negative if the start of the period is not within the + * window. */ - public int windowIndex; + public long positionInWindowUs; /** - * The duration of this period in microseconds, or {@link C#TIME_UNSET} if unknown. + * Whether this period contains placeholder information because the real information has yet to + * be loaded. */ - public long durationUs; + public boolean isPlaceholder; - private long positionInWindowUs; private AdPlaybackState adPlaybackState; /** Creates a new instance with no ad playback state. */ @@ -380,13 +603,21 @@ public Period() { * period is not within the window. * @return This period, for convenience. */ + @CanIgnoreReturnValue public Period set( @Nullable Object id, @Nullable Object uid, int windowIndex, long durationUs, long positionInWindowUs) { - return set(id, uid, windowIndex, durationUs, positionInWindowUs, AdPlaybackState.NONE); + return set( + id, + uid, + windowIndex, + durationUs, + positionInWindowUs, + AdPlaybackState.NONE, + /* isPlaceholder= */ false); } /** @@ -404,34 +635,35 @@ public Period set( * period is not within the window. * @param adPlaybackState The state of the period's ads, or {@link AdPlaybackState#NONE} if * there are no ads. + * @param isPlaceholder Whether this period contains placeholder information because the real + * information has yet to be loaded. * @return This period, for convenience. */ + @CanIgnoreReturnValue public Period set( @Nullable Object id, @Nullable Object uid, int windowIndex, long durationUs, long positionInWindowUs, - AdPlaybackState adPlaybackState) { + AdPlaybackState adPlaybackState, + boolean isPlaceholder) { this.id = id; this.uid = uid; this.windowIndex = windowIndex; this.durationUs = durationUs; this.positionInWindowUs = positionInWindowUs; this.adPlaybackState = adPlaybackState; + this.isPlaceholder = isPlaceholder; return this; } - /** - * Returns the duration of the period in milliseconds, or {@link C#TIME_UNSET} if unknown. - */ + /** Returns the duration of the period in milliseconds, or {@link C#TIME_UNSET} if unknown. */ public long getDurationMs() { - return C.usToMs(durationUs); + return Util.usToMs(durationUs); } - /** - * Returns the duration of this period in microseconds, or {@link C#TIME_UNSET} if unknown. - */ + /** Returns the duration of this period in microseconds, or {@link C#TIME_UNSET} if unknown. */ public long getDurationUs() { return durationUs; } @@ -442,7 +674,7 @@ public long getDurationUs() { * window. */ public long getPositionInWindowMs() { - return C.usToMs(positionInWindowUs); + return Util.usToMs(positionInWindowUs); } /** @@ -454,13 +686,25 @@ public long getPositionInWindowUs() { return positionInWindowUs; } - /** - * Returns the number of ad groups in the period. - */ + /** Returns the opaque identifier for ads played with this period, or {@code null} if unset. */ + @Nullable + public Object getAdsId() { + return adPlaybackState.adsId; + } + + /** Returns the number of ad groups in the period. */ public int getAdGroupCount() { return adPlaybackState.adGroupCount; } + /** + * Returns the number of removed ad groups in the period. Ad groups with indices between {@code + * 0} (inclusive) and {@code removedAdGroupCount} (exclusive) will be empty. + */ + public int getRemovedAdGroupCount() { + return adPlaybackState.removedAdGroupCount; + } + /** * Returns the time of the ad group at index {@code adGroupIndex} in the period, in * microseconds. @@ -470,7 +714,7 @@ public int getAdGroupCount() { * Period}, in microseconds, or {@link C#TIME_END_OF_SOURCE} for a post-roll ad group. */ public long getAdGroupTimeUs(int adGroupIndex) { - return adPlaybackState.adGroupTimesUs[adGroupIndex]; + return adPlaybackState.getAdGroup(adGroupIndex).timeUs; } /** @@ -482,7 +726,7 @@ public long getAdGroupTimeUs(int adGroupIndex) { * if no ads should be played. */ public int getFirstAdIndexToPlay(int adGroupIndex) { - return adPlaybackState.adGroups[adGroupIndex].getFirstAdIndexToPlay(); + return adPlaybackState.getAdGroup(adGroupIndex).getFirstAdIndexToPlay(); } /** @@ -496,23 +740,26 @@ public int getFirstAdIndexToPlay(int adGroupIndex) { * if the ad group does not have any ads remaining to play. */ public int getNextAdIndexToPlay(int adGroupIndex, int lastPlayedAdIndex) { - return adPlaybackState.adGroups[adGroupIndex].getNextAdIndexToPlay(lastPlayedAdIndex); + return adPlaybackState.getAdGroup(adGroupIndex).getNextAdIndexToPlay(lastPlayedAdIndex); } /** - * Returns whether the ad group at index {@code adGroupIndex} has been played. + * Returns whether all ads in the ad group at index {@code adGroupIndex} have been played, + * skipped or failed. * * @param adGroupIndex The ad group index. - * @return Whether the ad group at index {@code adGroupIndex} has been played. + * @return Whether all ads in the ad group at index {@code adGroupIndex} have been played, + * skipped or failed. */ public boolean hasPlayedAdGroup(int adGroupIndex) { - return !adPlaybackState.adGroups[adGroupIndex].hasUnplayedAds(); + return !adPlaybackState.getAdGroup(adGroupIndex).hasUnplayedAds(); } /** - * Returns the index of the ad group at or before {@code positionUs} in the period, if that ad - * group is unplayed. Returns {@link C#INDEX_UNSET} if the ad group at or before {@code - * positionUs} has no ads remaining to be played, or if there is no such ad group. + * Returns the index of the ad group at or before {@code positionUs} in the period that should + * be played before the content at {@code positionUs}. Returns {@link C#INDEX_UNSET} if the ad + * group at or before {@code positionUs} has no ads remaining to be played, or if there is no + * such ad group. * * @param positionUs The period position at or before which to find an ad group, in * microseconds. @@ -524,7 +771,7 @@ public int getAdGroupIndexForPositionUs(long positionUs) { /** * Returns the index of the next ad group after {@code positionUs} in the period that has ads - * remaining to be played. Returns {@link C#INDEX_UNSET} if there is no such ad group. + * that should be played. Returns {@link C#INDEX_UNSET} if there is no such ad group. * * @param positionUs The period position after which to find an ad group, in microseconds. * @return The index of the ad group, or {@link C#INDEX_UNSET}. @@ -534,40 +781,42 @@ public int getAdGroupIndexAfterPositionUs(long positionUs) { } /** - * Returns the number of ads in the ad group at index {@code adGroupIndex}, or - * {@link C#LENGTH_UNSET} if not yet known. + * Returns the number of ads in the ad group at index {@code adGroupIndex}, or {@link + * C#LENGTH_UNSET} if not yet known. * * @param adGroupIndex The ad group index. * @return The number of ads in the ad group, or {@link C#LENGTH_UNSET} if not yet known. */ public int getAdCountInAdGroup(int adGroupIndex) { - return adPlaybackState.adGroups[adGroupIndex].count; + return adPlaybackState.getAdGroup(adGroupIndex).count; } /** - * Returns whether the URL for the specified ad is known. + * Returns the duration of the ad at index {@code adIndexInAdGroup} in the ad group at {@code + * adGroupIndex}, in microseconds, or {@link C#TIME_UNSET} if not yet known. * * @param adGroupIndex The ad group index. * @param adIndexInAdGroup The ad index in the ad group. - * @return Whether the URL for the specified ad is known. + * @return The duration of the ad, or {@link C#TIME_UNSET} if not yet known. */ - public boolean isAdAvailable(int adGroupIndex, int adIndexInAdGroup) { - AdPlaybackState.AdGroup adGroup = adPlaybackState.adGroups[adGroupIndex]; - return adGroup.count != C.LENGTH_UNSET - && adGroup.states[adIndexInAdGroup] != AdPlaybackState.AD_STATE_UNAVAILABLE; + public long getAdDurationUs(int adGroupIndex, int adIndexInAdGroup) { + AdPlaybackState.AdGroup adGroup = adPlaybackState.getAdGroup(adGroupIndex); + return adGroup.count != C.LENGTH_UNSET ? adGroup.durationsUs[adIndexInAdGroup] : C.TIME_UNSET; } /** - * Returns the duration of the ad at index {@code adIndexInAdGroup} in the ad group at - * {@code adGroupIndex}, in microseconds, or {@link C#TIME_UNSET} if not yet known. + * Returns the state of the ad at index {@code adIndexInAdGroup} in the ad group at {@code + * adGroupIndex}, or {@link AdPlaybackState#AD_STATE_UNAVAILABLE} if not yet known. * * @param adGroupIndex The ad group index. - * @param adIndexInAdGroup The ad index in the ad group. - * @return The duration of the ad, or {@link C#TIME_UNSET} if not yet known. + * @return The state of the ad, or {@link AdPlaybackState#AD_STATE_UNAVAILABLE} if not yet + * known. */ - public long getAdDurationUs(int adGroupIndex, int adIndexInAdGroup) { - AdPlaybackState.AdGroup adGroup = adPlaybackState.adGroups[adGroupIndex]; - return adGroup.count != C.LENGTH_UNSET ? adGroup.durationsUs[adIndexInAdGroup] : C.TIME_UNSET; + public int getAdState(int adGroupIndex, int adIndexInAdGroup) { + AdPlaybackState.AdGroup adGroup = adPlaybackState.getAdGroup(adGroupIndex); + return adGroup.count != C.LENGTH_UNSET + ? adGroup.states[adIndexInAdGroup] + : AD_STATE_UNAVAILABLE; } /** @@ -578,6 +827,28 @@ public long getAdResumePositionUs() { return adPlaybackState.adResumePositionUs; } + /** + * Returns whether the ad group at index {@code adGroupIndex} is server-side inserted and part + * of the content stream. + * + * @param adGroupIndex The ad group index. + * @return Whether this ad group is server-side inserted and part of the content stream. + */ + public boolean isServerSideInsertedAdGroup(int adGroupIndex) { + return adPlaybackState.getAdGroup(adGroupIndex).isServerSideInserted; + } + + /** + * Returns the offset in microseconds which should be added to the content stream when resuming + * playback after the specified ad group. + * + * @param adGroupIndex The ad group index. + * @return The offset that should be added to the content stream, in microseconds. + */ + public long getContentResumeOffsetUs(int adGroupIndex) { + return adPlaybackState.getAdGroup(adGroupIndex).contentResumeOffsetUs; + } + @Override public boolean equals(@Nullable Object obj) { if (this == obj) { @@ -592,6 +863,7 @@ public boolean equals(@Nullable Object obj) { && windowIndex == that.windowIndex && durationUs == that.durationUs && positionInWindowUs == that.positionInWindowUs + && isPlaceholder == that.isPlaceholder && Util.areEqual(adPlaybackState, that.adPlaybackState); } @@ -603,9 +875,75 @@ public int hashCode() { result = 31 * result + windowIndex; result = 31 * result + (int) (durationUs ^ (durationUs >>> 32)); result = 31 * result + (int) (positionInWindowUs ^ (positionInWindowUs >>> 32)); - result = 31 * result + (adPlaybackState == null ? 0 : adPlaybackState.hashCode()); + result = 31 * result + (isPlaceholder ? 1 : 0); + result = 31 * result + adPlaybackState.hashCode(); return result; } + + // Bundleable implementation. + + private static final String FIELD_WINDOW_INDEX = Util.intToStringMaxRadix(0); + private static final String FIELD_DURATION_US = Util.intToStringMaxRadix(1); + private static final String FIELD_POSITION_IN_WINDOW_US = Util.intToStringMaxRadix(2); + private static final String FIELD_PLACEHOLDER = Util.intToStringMaxRadix(3); + private static final String FIELD_AD_PLAYBACK_STATE = Util.intToStringMaxRadix(4); + + /** + * {@inheritDoc} + * + *

      It omits the {@link #id} and {@link #uid} fields so these fields of an instance restored + * by {@link #CREATOR} will always be {@code null}. + */ + @Override + public Bundle toBundle() { + Bundle bundle = new Bundle(); + if (windowIndex != 0) { + bundle.putInt(FIELD_WINDOW_INDEX, windowIndex); + } + if (durationUs != C.TIME_UNSET) { + bundle.putLong(FIELD_DURATION_US, durationUs); + } + if (positionInWindowUs != 0) { + bundle.putLong(FIELD_POSITION_IN_WINDOW_US, positionInWindowUs); + } + if (isPlaceholder) { + bundle.putBoolean(FIELD_PLACEHOLDER, isPlaceholder); + } + if (!adPlaybackState.equals(AdPlaybackState.NONE)) { + bundle.putBundle(FIELD_AD_PLAYBACK_STATE, adPlaybackState.toBundle()); + } + return bundle; + } + + /** + * Object that can restore {@link Period} from a {@link Bundle}. + * + *

      The {@link #id} and {@link #uid} of restored instances will always be {@code null}. + */ + public static final Creator CREATOR = Period::fromBundle; + + private static Period fromBundle(Bundle bundle) { + int windowIndex = bundle.getInt(FIELD_WINDOW_INDEX, /* defaultValue= */ 0); + long durationUs = bundle.getLong(FIELD_DURATION_US, /* defaultValue= */ C.TIME_UNSET); + long positionInWindowUs = bundle.getLong(FIELD_POSITION_IN_WINDOW_US, /* defaultValue= */ 0); + boolean isPlaceholder = bundle.getBoolean(FIELD_PLACEHOLDER, /* defaultValue= */ false); + @Nullable Bundle adPlaybackStateBundle = bundle.getBundle(FIELD_AD_PLAYBACK_STATE); + AdPlaybackState adPlaybackState = + adPlaybackStateBundle != null + ? AdPlaybackState.CREATOR.fromBundle(adPlaybackStateBundle) + : AdPlaybackState.NONE; + + Period period = new Period(); + period.set( + /* id= */ null, + /* uid= */ null, + windowIndex, + durationUs, + positionInWindowUs, + adPlaybackState, + isPlaceholder); + return period; + } } /** An empty timeline. */ @@ -643,16 +981,14 @@ public Object getUidOfPeriod(int periodIndex) { } }; - /** - * Returns whether the timeline is empty. - */ + protected Timeline() {} + + /** Returns whether the timeline is empty. */ public final boolean isEmpty() { return getWindowCount() == 0; } - /** - * Returns the number of windows in the timeline. - */ + /** Returns the number of windows in the timeline. */ public abstract int getWindowCount(); /** @@ -664,17 +1000,19 @@ public final boolean isEmpty() { * @param shuffleModeEnabled Whether shuffling is enabled. * @return The index of the next window, or {@link C#INDEX_UNSET} if this is the last window. */ - public int getNextWindowIndex(int windowIndex, @Player.RepeatMode int repeatMode, - boolean shuffleModeEnabled) { + public int getNextWindowIndex( + int windowIndex, @Player.RepeatMode int repeatMode, boolean shuffleModeEnabled) { switch (repeatMode) { case Player.REPEAT_MODE_OFF: - return windowIndex == getLastWindowIndex(shuffleModeEnabled) ? C.INDEX_UNSET + return windowIndex == getLastWindowIndex(shuffleModeEnabled) + ? C.INDEX_UNSET : windowIndex + 1; case Player.REPEAT_MODE_ONE: return windowIndex; case Player.REPEAT_MODE_ALL: return windowIndex == getLastWindowIndex(shuffleModeEnabled) - ? getFirstWindowIndex(shuffleModeEnabled) : windowIndex + 1; + ? getFirstWindowIndex(shuffleModeEnabled) + : windowIndex + 1; default: throw new IllegalStateException(); } @@ -689,17 +1027,19 @@ public int getNextWindowIndex(int windowIndex, @Player.RepeatMode int repeatMode * @param shuffleModeEnabled Whether shuffling is enabled. * @return The index of the previous window, or {@link C#INDEX_UNSET} if this is the first window. */ - public int getPreviousWindowIndex(int windowIndex, @Player.RepeatMode int repeatMode, - boolean shuffleModeEnabled) { + public int getPreviousWindowIndex( + int windowIndex, @Player.RepeatMode int repeatMode, boolean shuffleModeEnabled) { switch (repeatMode) { case Player.REPEAT_MODE_OFF: - return windowIndex == getFirstWindowIndex(shuffleModeEnabled) ? C.INDEX_UNSET + return windowIndex == getFirstWindowIndex(shuffleModeEnabled) + ? C.INDEX_UNSET : windowIndex - 1; case Player.REPEAT_MODE_ONE: return windowIndex; case Player.REPEAT_MODE_ALL: return windowIndex == getFirstWindowIndex(shuffleModeEnabled) - ? getLastWindowIndex(shuffleModeEnabled) : windowIndex - 1; + ? getLastWindowIndex(shuffleModeEnabled) + : windowIndex - 1; default: throw new IllegalStateException(); } @@ -740,12 +1080,6 @@ public final Window getWindow(int windowIndex, Window window) { return getWindow(windowIndex, window, /* defaultPositionProjectionUs= */ 0); } - /** @deprecated Use {@link #getWindow(int, Window)} instead. Tags will always be set. */ - @Deprecated - public final Window getWindow(int windowIndex, Window window, boolean setTag) { - return getWindow(windowIndex, window, /* defaultPositionProjectionUs= */ 0); - } - /** * Populates a {@link Window} with data for the window at the specified index. * @@ -758,9 +1092,7 @@ public final Window getWindow(int windowIndex, Window window, boolean setTag) { public abstract Window getWindow( int windowIndex, Window window, long defaultPositionProjectionUs); - /** - * Returns the number of periods in the timeline. - */ + /** Returns the number of periods in the timeline. */ public abstract int getPeriodCount(); /** @@ -774,8 +1106,12 @@ public abstract Window getWindow( * @param shuffleModeEnabled Whether shuffling is enabled. * @return The index of the next period, or {@link C#INDEX_UNSET} if this is the last period. */ - public final int getNextPeriodIndex(int periodIndex, Period period, Window window, - @Player.RepeatMode int repeatMode, boolean shuffleModeEnabled) { + public final int getNextPeriodIndex( + int periodIndex, + Period period, + Window window, + @Player.RepeatMode int repeatMode, + boolean shuffleModeEnabled) { int windowIndex = getPeriod(periodIndex, period).windowIndex; if (getWindow(windowIndex, window).lastPeriodIndex == periodIndex) { int nextWindowIndex = getNextWindowIndex(windowIndex, repeatMode, shuffleModeEnabled); @@ -788,8 +1124,8 @@ public final int getNextPeriodIndex(int periodIndex, Period period, Window windo } /** - * Returns whether the given period is the last period of the timeline depending on the - * {@code repeatMode} and whether shuffling is enabled. + * Returns whether the given period is the last period of the timeline depending on the {@code + * repeatMode} and whether shuffling is enabled. * * @param periodIndex A period index. * @param period A {@link Period} to be used internally. Must not be null. @@ -798,25 +1134,59 @@ public final int getNextPeriodIndex(int periodIndex, Period period, Window windo * @param shuffleModeEnabled Whether shuffling is enabled. * @return Whether the period of the given index is the last period of the timeline. */ - public final boolean isLastPeriod(int periodIndex, Period period, Window window, - @Player.RepeatMode int repeatMode, boolean shuffleModeEnabled) { + public final boolean isLastPeriod( + int periodIndex, + Period period, + Window window, + @Player.RepeatMode int repeatMode, + boolean shuffleModeEnabled) { return getNextPeriodIndex(periodIndex, period, window, repeatMode, shuffleModeEnabled) == C.INDEX_UNSET; } /** - * Calls {@link #getPeriodPosition(Window, Period, int, long, long)} with a zero default position - * projection. + * @deprecated Use {@link #getPeriodPositionUs(Window, Period, int, long)} instead. + */ + @Deprecated + @InlineMe(replacement = "this.getPeriodPositionUs(window, period, windowIndex, windowPositionUs)") + public final Pair getPeriodPosition( + Window window, Period period, int windowIndex, long windowPositionUs) { + return getPeriodPositionUs(window, period, windowIndex, windowPositionUs); + } + /** + * @deprecated Use {@link #getPeriodPositionUs(Window, Period, int, long, long)} instead. */ + @Deprecated + @Nullable + @InlineMe( + replacement = + "this.getPeriodPositionUs(" + + "window, period, windowIndex, windowPositionUs, defaultPositionProjectionUs)") public final Pair getPeriodPosition( + Window window, + Period period, + int windowIndex, + long windowPositionUs, + long defaultPositionProjectionUs) { + return getPeriodPositionUs( + window, period, windowIndex, windowPositionUs, defaultPositionProjectionUs); + } + + /** + * Calls {@link #getPeriodPositionUs(Window, Period, int, long)} with a zero default position + * projection. + */ + public final Pair getPeriodPositionUs( Window window, Period period, int windowIndex, long windowPositionUs) { return Assertions.checkNotNull( - getPeriodPosition( + getPeriodPositionUs( window, period, windowIndex, windowPositionUs, /* defaultPositionProjectionUs= */ 0)); } /** - * Converts (windowIndex, windowPositionUs) to the corresponding (periodUid, periodPositionUs). + * Converts {@code (windowIndex, windowPositionUs)} to the corresponding {@code (periodUid, + * periodPositionUs)}. The returned {@code periodPositionUs} is constrained to be non-negative, + * and to be less than the containing period's duration if it is known. * * @param window A {@link Window} that may be overwritten. * @param period A {@link Period} that may be overwritten. @@ -830,7 +1200,7 @@ public final Pair getPeriodPosition( * position could not be projected by {@code defaultPositionProjectionUs}. */ @Nullable - public final Pair getPeriodPosition( + public final Pair getPeriodPositionUs( Window window, Period period, int windowIndex, @@ -845,13 +1215,20 @@ public final Pair getPeriodPosition( } } int periodIndex = window.firstPeriodIndex; - long periodPositionUs = window.getPositionInFirstPeriodUs() + windowPositionUs; - long periodDurationUs = getPeriod(periodIndex, period, /* setIds= */ true).getDurationUs(); - while (periodDurationUs != C.TIME_UNSET && periodPositionUs >= periodDurationUs - && periodIndex < window.lastPeriodIndex) { - periodPositionUs -= periodDurationUs; - periodDurationUs = getPeriod(++periodIndex, period, /* setIds= */ true).getDurationUs(); + getPeriod(periodIndex, period); + while (periodIndex < window.lastPeriodIndex + && period.positionInWindowUs != windowPositionUs + && getPeriod(periodIndex + 1, period).positionInWindowUs <= windowPositionUs) { + periodIndex++; + } + getPeriod(periodIndex, period, /* setIds= */ true); + long periodPositionUs = windowPositionUs - period.positionInWindowUs; + // The period positions must be less than the period duration, if it is known. + if (period.durationUs != C.TIME_UNSET) { + periodPositionUs = min(periodPositionUs, period.durationUs - 1); } + // Period positions cannot be negative. + periodPositionUs = max(0, periodPositionUs); return Pair.create(Assertions.checkNotNull(period.uid), periodPositionUs); } @@ -934,6 +1311,27 @@ public boolean equals(@Nullable Object obj) { return false; } } + + // Check shuffled order + int windowIndex = getFirstWindowIndex(/* shuffleModeEnabled= */ true); + if (windowIndex != other.getFirstWindowIndex(/* shuffleModeEnabled= */ true)) { + return false; + } + int lastWindowIndex = getLastWindowIndex(/* shuffleModeEnabled= */ true); + if (lastWindowIndex != other.getLastWindowIndex(/* shuffleModeEnabled= */ true)) { + return false; + } + while (windowIndex != lastWindowIndex) { + int nextWindowIndex = + getNextWindowIndex(windowIndex, Player.REPEAT_MODE_OFF, /* shuffleModeEnabled= */ true); + if (nextWindowIndex + != other.getNextWindowIndex( + windowIndex, Player.REPEAT_MODE_OFF, /* shuffleModeEnabled= */ true)) { + return false; + } + windowIndex = nextWindowIndex; + } + return true; } @@ -950,6 +1348,265 @@ public int hashCode() { for (int i = 0; i < getPeriodCount(); i++) { result = 31 * result + getPeriod(i, period, /* setIds= */ true).hashCode(); } + + for (int windowIndex = getFirstWindowIndex(true); + windowIndex != C.INDEX_UNSET; + windowIndex = getNextWindowIndex(windowIndex, Player.REPEAT_MODE_OFF, true)) { + result = 31 * result + windowIndex; + } + return result; } + + // Bundleable implementation. + + private static final String FIELD_WINDOWS = Util.intToStringMaxRadix(0); + private static final String FIELD_PERIODS = Util.intToStringMaxRadix(1); + private static final String FIELD_SHUFFLED_WINDOW_INDICES = Util.intToStringMaxRadix(2); + + /** + * {@inheritDoc} + * + *

      The {@link #getWindow(int, Window)} windows} and {@link #getPeriod(int, Period) periods} of + * an instance restored by {@link #CREATOR} may have missing fields as described in {@link + * Window#toBundle()} and {@link Period#toBundle()}. + */ + @Override + public final Bundle toBundle() { + List windowBundles = new ArrayList<>(); + int windowCount = getWindowCount(); + Window window = new Window(); + for (int i = 0; i < windowCount; i++) { + windowBundles.add(getWindow(i, window, /* defaultPositionProjectionUs= */ 0).toBundle()); + } + + List periodBundles = new ArrayList<>(); + int periodCount = getPeriodCount(); + Period period = new Period(); + for (int i = 0; i < periodCount; i++) { + periodBundles.add(getPeriod(i, period, /* setIds= */ false).toBundle()); + } + + int[] shuffledWindowIndices = new int[windowCount]; + if (windowCount > 0) { + shuffledWindowIndices[0] = getFirstWindowIndex(/* shuffleModeEnabled= */ true); + } + for (int i = 1; i < windowCount; i++) { + shuffledWindowIndices[i] = + getNextWindowIndex( + shuffledWindowIndices[i - 1], Player.REPEAT_MODE_OFF, /* shuffleModeEnabled= */ true); + } + + Bundle bundle = new Bundle(); + BundleUtil.putBinder(bundle, FIELD_WINDOWS, new BundleListRetriever(windowBundles)); + BundleUtil.putBinder(bundle, FIELD_PERIODS, new BundleListRetriever(periodBundles)); + bundle.putIntArray(FIELD_SHUFFLED_WINDOW_INDICES, shuffledWindowIndices); + return bundle; + } + + /** + * Returns a {@link Bundle} containing just the specified {@link Window}. + * + *

      The {@link #getWindow(int, Window)} windows} and {@link #getPeriod(int, Period) periods} of + * an instance restored by {@link #CREATOR} may have missing fields as described in {@link + * Window#toBundle()} and {@link Period#toBundle()}. + * + * @param windowIndex The index of the {@link Window} to include in the {@link Bundle}. + */ + public final Bundle toBundleWithOneWindowOnly(int windowIndex) { + Window window = getWindow(windowIndex, new Window(), /* defaultPositionProjectionUs= */ 0); + + List periodBundles = new ArrayList<>(); + Period period = new Period(); + for (int i = window.firstPeriodIndex; i <= window.lastPeriodIndex; i++) { + getPeriod(i, period, /* setIds= */ false); + period.windowIndex = 0; + periodBundles.add(period.toBundle()); + } + + window.lastPeriodIndex = window.lastPeriodIndex - window.firstPeriodIndex; + window.firstPeriodIndex = 0; + Bundle windowBundle = window.toBundle(); + + Bundle bundle = new Bundle(); + BundleUtil.putBinder( + bundle, FIELD_WINDOWS, new BundleListRetriever(ImmutableList.of(windowBundle))); + BundleUtil.putBinder(bundle, FIELD_PERIODS, new BundleListRetriever(periodBundles)); + bundle.putIntArray(FIELD_SHUFFLED_WINDOW_INDICES, new int[] {0}); + return bundle; + } + + /** + * Object that can restore a {@link Timeline} from a {@link Bundle}. + * + *

      The {@link #getWindow(int, Window)} windows} and {@link #getPeriod(int, Period) periods} of + * a restored instance may have missing fields as described in {@link Window#CREATOR} and {@link + * Period#CREATOR}. + */ + public static final Creator CREATOR = Timeline::fromBundle; + + private static Timeline fromBundle(Bundle bundle) { + ImmutableList windows = + fromBundleListRetriever(Window.CREATOR, BundleUtil.getBinder(bundle, FIELD_WINDOWS)); + ImmutableList periods = + fromBundleListRetriever(Period.CREATOR, BundleUtil.getBinder(bundle, FIELD_PERIODS)); + @Nullable int[] shuffledWindowIndices = bundle.getIntArray(FIELD_SHUFFLED_WINDOW_INDICES); + return new RemotableTimeline( + windows, + periods, + shuffledWindowIndices == null + ? generateUnshuffledIndices(windows.size()) + : shuffledWindowIndices); + } + + private static ImmutableList fromBundleListRetriever( + Creator creator, @Nullable IBinder binder) { + if (binder == null) { + return ImmutableList.of(); + } + ImmutableList.Builder builder = new ImmutableList.Builder<>(); + List bundleList = BundleListRetriever.getList(binder); + for (int i = 0; i < bundleList.size(); i++) { + builder.add(creator.fromBundle(bundleList.get(i))); + } + return builder.build(); + } + + private static int[] generateUnshuffledIndices(int n) { + int[] indices = new int[n]; + for (int i = 0; i < n; i++) { + indices[i] = i; + } + return indices; + } + + /** + * A concrete class of {@link Timeline} to restore a {@link Timeline} instance from a {@link + * Bundle} sent by another process via {@link IBinder}. + */ + public static final class RemotableTimeline extends Timeline { + + private final ImmutableList windows; + private final ImmutableList periods; + private final int[] shuffledWindowIndices; + private final int[] windowIndicesInShuffled; + + public RemotableTimeline( + ImmutableList windows, ImmutableList periods, int[] shuffledWindowIndices) { + checkArgument(windows.size() == shuffledWindowIndices.length); + this.windows = windows; + this.periods = periods; + this.shuffledWindowIndices = shuffledWindowIndices; + windowIndicesInShuffled = new int[shuffledWindowIndices.length]; + for (int i = 0; i < shuffledWindowIndices.length; i++) { + windowIndicesInShuffled[shuffledWindowIndices[i]] = i; + } + } + + @Override + public int getWindowCount() { + return windows.size(); + } + + @Override + public Window getWindow(int windowIndex, Window window, long defaultPositionProjectionUs) { + Window w = windows.get(windowIndex); + window.set( + w.uid, + w.mediaItem, + w.manifest, + w.presentationStartTimeMs, + w.windowStartTimeMs, + w.elapsedRealtimeEpochOffsetMs, + w.isSeekable, + w.isDynamic, + w.liveConfiguration, + w.defaultPositionUs, + w.durationUs, + w.firstPeriodIndex, + w.lastPeriodIndex, + w.positionInFirstPeriodUs); + window.isPlaceholder = w.isPlaceholder; + return window; + } + + @Override + public int getNextWindowIndex( + int windowIndex, @Player.RepeatMode int repeatMode, boolean shuffleModeEnabled) { + if (repeatMode == Player.REPEAT_MODE_ONE) { + return windowIndex; + } + if (windowIndex == getLastWindowIndex(shuffleModeEnabled)) { + return repeatMode == Player.REPEAT_MODE_ALL + ? getFirstWindowIndex(shuffleModeEnabled) + : C.INDEX_UNSET; + } + return shuffleModeEnabled + ? shuffledWindowIndices[windowIndicesInShuffled[windowIndex] + 1] + : windowIndex + 1; + } + + @Override + public int getPreviousWindowIndex( + int windowIndex, @Player.RepeatMode int repeatMode, boolean shuffleModeEnabled) { + if (repeatMode == Player.REPEAT_MODE_ONE) { + return windowIndex; + } + if (windowIndex == getFirstWindowIndex(shuffleModeEnabled)) { + return repeatMode == Player.REPEAT_MODE_ALL + ? getLastWindowIndex(shuffleModeEnabled) + : C.INDEX_UNSET; + } + return shuffleModeEnabled + ? shuffledWindowIndices[windowIndicesInShuffled[windowIndex] - 1] + : windowIndex - 1; + } + + @Override + public int getLastWindowIndex(boolean shuffleModeEnabled) { + if (isEmpty()) { + return C.INDEX_UNSET; + } + return shuffleModeEnabled + ? shuffledWindowIndices[getWindowCount() - 1] + : getWindowCount() - 1; + } + + @Override + public int getFirstWindowIndex(boolean shuffleModeEnabled) { + if (isEmpty()) { + return C.INDEX_UNSET; + } + return shuffleModeEnabled ? shuffledWindowIndices[0] : 0; + } + + @Override + public int getPeriodCount() { + return periods.size(); + } + + @Override + public Period getPeriod(int periodIndex, Period period, boolean setIds) { + Period p = periods.get(periodIndex); + period.set( + p.id, + p.uid, + p.windowIndex, + p.durationUs, + p.positionInWindowUs, + p.adPlaybackState, + p.isPlaceholder); + return period; + } + + @Override + public int getIndexOfPeriod(Object uid) { + throw new UnsupportedOperationException(); + } + + @Override + public Object getUidOfPeriod(int periodIndex) { + throw new UnsupportedOperationException(); + } + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/Tracks.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/Tracks.java new file mode 100644 index 0000000000..ed7ff22822 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/Tracks.java @@ -0,0 +1,379 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2; + +import static com.google.android.exoplayer2.util.Assertions.checkArgument; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.BundleableUtil.toBundleArrayList; + +import android.os.Bundle; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.source.TrackGroup; +import com.google.android.exoplayer2.util.BundleableUtil; +import com.google.android.exoplayer2.util.Util; +import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableList; +import com.google.common.primitives.Booleans; +import java.util.Arrays; +import java.util.List; + +/** Information about groups of tracks. */ +public final class Tracks implements Bundleable { + + /** + * Information about a single group of tracks, including the underlying {@link TrackGroup}, the + * level to which each track is supported by the player, and whether any of the tracks are + * selected. + */ + public static final class Group implements Bundleable { + + /** The number of tracks in the group. */ + public final int length; + + private final TrackGroup mediaTrackGroup; + private final boolean adaptiveSupported; + private final @C.FormatSupport int[] trackSupport; + private final boolean[] trackSelected; + + /** + * Constructs an instance. + * + * @param mediaTrackGroup The underlying {@link TrackGroup} defined by the media. + * @param adaptiveSupported Whether the player supports adaptive selections containing more than + * one track in the group. + * @param trackSupport The {@link C.FormatSupport} of each track in the group. + * @param trackSelected Whether each track in the {@code trackGroup} is selected. + */ + public Group( + TrackGroup mediaTrackGroup, + boolean adaptiveSupported, + @C.FormatSupport int[] trackSupport, + boolean[] trackSelected) { + length = mediaTrackGroup.length; + checkArgument(length == trackSupport.length && length == trackSelected.length); + this.mediaTrackGroup = mediaTrackGroup; + this.adaptiveSupported = adaptiveSupported && length > 1; + this.trackSupport = trackSupport.clone(); + this.trackSelected = trackSelected.clone(); + } + + /** + * Returns the underlying {@link TrackGroup} defined by the media. + * + *

      Unlike this class, {@link TrackGroup} only contains information defined by the media + * itself, and does not contain runtime information such as which tracks are supported and + * currently selected. This makes it suitable for use as a {@code key} in certain {@code (key, + * value)} data structures. + */ + public TrackGroup getMediaTrackGroup() { + return mediaTrackGroup; + } + + /** + * Returns the {@link Format} for a specified track. + * + * @param trackIndex The index of the track in the group. + * @return The {@link Format} of the track. + */ + public Format getTrackFormat(int trackIndex) { + return mediaTrackGroup.getFormat(trackIndex); + } + + /** + * Returns the level of support for a specified track. + * + * @param trackIndex The index of the track in the group. + * @return The {@link C.FormatSupport} of the track. + */ + public @C.FormatSupport int getTrackSupport(int trackIndex) { + return trackSupport[trackIndex]; + } + + /** + * Returns whether a specified track is supported for playback, without exceeding the advertised + * capabilities of the device. Equivalent to {@code isTrackSupported(trackIndex, false)}. + * + * @param trackIndex The index of the track in the group. + * @return True if the track's format can be played, false otherwise. + */ + public boolean isTrackSupported(int trackIndex) { + return isTrackSupported(trackIndex, /* allowExceedsCapabilities= */ false); + } + + /** + * Returns whether a specified track is supported for playback. + * + * @param trackIndex The index of the track in the group. + * @param allowExceedsCapabilities Whether to consider the track as supported if it has a + * supported {@link Format#sampleMimeType MIME type}, but otherwise exceeds the advertised + * capabilities of the device. For example, a video track for which there's a corresponding + * decoder whose maximum advertised resolution is exceeded by the resolution of the track. + * Such tracks may be playable in some cases. + * @return True if the track's format can be played, false otherwise. + */ + public boolean isTrackSupported(int trackIndex, boolean allowExceedsCapabilities) { + return trackSupport[trackIndex] == C.FORMAT_HANDLED + || (allowExceedsCapabilities + && trackSupport[trackIndex] == C.FORMAT_EXCEEDS_CAPABILITIES); + } + + /** Returns whether at least one track in the group is selected for playback. */ + public boolean isSelected() { + return Booleans.contains(trackSelected, true); + } + + /** Returns whether adaptive selections containing more than one track are supported. */ + public boolean isAdaptiveSupported() { + return adaptiveSupported; + } + + /** + * Returns whether at least one track in the group is supported for playback, without exceeding + * the advertised capabilities of the device. Equivalent to {@code isSupported(false)}. + */ + public boolean isSupported() { + return isSupported(/* allowExceedsCapabilities= */ false); + } + + /** + * Returns whether at least one track in the group is supported for playback. + * + * @param allowExceedsCapabilities Whether to consider a track as supported if it has a + * supported {@link Format#sampleMimeType MIME type}, but otherwise exceeds the advertised + * capabilities of the device. For example, a video track for which there's a corresponding + * decoder whose maximum advertised resolution is exceeded by the resolution of the track. + * Such tracks may be playable in some cases. + */ + public boolean isSupported(boolean allowExceedsCapabilities) { + for (int i = 0; i < trackSupport.length; i++) { + if (isTrackSupported(i, allowExceedsCapabilities)) { + return true; + } + } + return false; + } + + /** + * Returns whether a specified track is selected for playback. + * + *

      Note that multiple tracks in the group may be selected. This is common in adaptive + * streaming, where tracks of different qualities are selected and the player switches between + * them during playback (e.g., based on the available network bandwidth). + * + *

      This class doesn't provide a way to determine which of the selected tracks is currently + * playing, however some player implementations have ways of getting such information. For + * example, ExoPlayer provides this information via {@code ExoTrackSelection.getSelectedFormat}. + * + * @param trackIndex The index of the track in the group. + * @return True if the track is selected, false otherwise. + */ + public boolean isTrackSelected(int trackIndex) { + return trackSelected[trackIndex]; + } + + /** Returns the {@link C.TrackType} of the group. */ + public @C.TrackType int getType() { + return mediaTrackGroup.type; + } + + @Override + public boolean equals(@Nullable Object other) { + if (this == other) { + return true; + } + if (other == null || getClass() != other.getClass()) { + return false; + } + Group that = (Group) other; + return adaptiveSupported == that.adaptiveSupported + && mediaTrackGroup.equals(that.mediaTrackGroup) + && Arrays.equals(trackSupport, that.trackSupport) + && Arrays.equals(trackSelected, that.trackSelected); + } + + @Override + public int hashCode() { + int result = mediaTrackGroup.hashCode(); + result = 31 * result + (adaptiveSupported ? 1 : 0); + result = 31 * result + Arrays.hashCode(trackSupport); + result = 31 * result + Arrays.hashCode(trackSelected); + return result; + } + + // Bundleable implementation. + + private static final String FIELD_TRACK_GROUP = Util.intToStringMaxRadix(0); + private static final String FIELD_TRACK_SUPPORT = Util.intToStringMaxRadix(1); + private static final String FIELD_TRACK_SELECTED = Util.intToStringMaxRadix(3); + private static final String FIELD_ADAPTIVE_SUPPORTED = Util.intToStringMaxRadix(4); + + @Override + public Bundle toBundle() { + Bundle bundle = new Bundle(); + bundle.putBundle(FIELD_TRACK_GROUP, mediaTrackGroup.toBundle()); + bundle.putIntArray(FIELD_TRACK_SUPPORT, trackSupport); + bundle.putBooleanArray(FIELD_TRACK_SELECTED, trackSelected); + bundle.putBoolean(FIELD_ADAPTIVE_SUPPORTED, adaptiveSupported); + return bundle; + } + + /** Object that can restore a group of tracks from a {@link Bundle}. */ + public static final Creator CREATOR = + bundle -> { + // Can't create a Tracks.Group without a TrackGroup + TrackGroup trackGroup = + TrackGroup.CREATOR.fromBundle(checkNotNull(bundle.getBundle(FIELD_TRACK_GROUP))); + final @C.FormatSupport int[] trackSupport = + MoreObjects.firstNonNull( + bundle.getIntArray(FIELD_TRACK_SUPPORT), new int[trackGroup.length]); + boolean[] selected = + MoreObjects.firstNonNull( + bundle.getBooleanArray(FIELD_TRACK_SELECTED), new boolean[trackGroup.length]); + boolean adaptiveSupported = bundle.getBoolean(FIELD_ADAPTIVE_SUPPORTED, false); + return new Group(trackGroup, adaptiveSupported, trackSupport, selected); + }; + } + + /** Empty tracks. */ + public static final Tracks EMPTY = new Tracks(ImmutableList.of()); + + private final ImmutableList groups; + + /** + * Constructs an instance. + * + * @param groups The {@link Group groups} of tracks. + */ + public Tracks(List groups) { + this.groups = ImmutableList.copyOf(groups); + } + + /** Returns the {@link Group groups} of tracks. */ + public ImmutableList getGroups() { + return groups; + } + + /** Returns {@code true} if there are no tracks, and {@code false} otherwise. */ + public boolean isEmpty() { + return groups.isEmpty(); + } + + /** Returns true if there are tracks of type {@code trackType}, and false otherwise. */ + public boolean containsType(@C.TrackType int trackType) { + for (int i = 0; i < groups.size(); i++) { + if (groups.get(i).getType() == trackType) { + return true; + } + } + return false; + } + + /** + * Returns true if at least one track of type {@code trackType} is {@link + * Group#isTrackSupported(int) supported}. + */ + public boolean isTypeSupported(@C.TrackType int trackType) { + return isTypeSupported(trackType, /* allowExceedsCapabilities= */ false); + } + + /** + * Returns true if at least one track of type {@code trackType} is {@link + * Group#isTrackSupported(int, boolean) supported}. + * + * @param allowExceedsCapabilities Whether to consider the track as supported if it has a + * supported {@link Format#sampleMimeType MIME type}, but otherwise exceeds the advertised + * capabilities of the device. For example, a video track for which there's a corresponding + * decoder whose maximum advertised resolution is exceeded by the resolution of the track. + * Such tracks may be playable in some cases. + */ + public boolean isTypeSupported(@C.TrackType int trackType, boolean allowExceedsCapabilities) { + for (int i = 0; i < groups.size(); i++) { + if (groups.get(i).getType() == trackType) { + if (groups.get(i).isSupported(allowExceedsCapabilities)) { + return true; + } + } + } + return false; + } + + /** + * @deprecated Use {@link #containsType(int)} and {@link #isTypeSupported(int)}. + */ + @Deprecated + @SuppressWarnings("deprecation") + public boolean isTypeSupportedOrEmpty(@C.TrackType int trackType) { + return isTypeSupportedOrEmpty(trackType, /* allowExceedsCapabilities= */ false); + } + + /** + * @deprecated Use {@link #containsType(int)} and {@link #isTypeSupported(int, boolean)}. + */ + @Deprecated + public boolean isTypeSupportedOrEmpty( + @C.TrackType int trackType, boolean allowExceedsCapabilities) { + return !containsType(trackType) || isTypeSupported(trackType, allowExceedsCapabilities); + } + + /** Returns true if at least one track of the type {@code trackType} is selected for playback. */ + public boolean isTypeSelected(@C.TrackType int trackType) { + for (int i = 0; i < groups.size(); i++) { + Group group = groups.get(i); + if (group.isSelected() && group.getType() == trackType) { + return true; + } + } + return false; + } + + @Override + public boolean equals(@Nullable Object other) { + if (this == other) { + return true; + } + if (other == null || getClass() != other.getClass()) { + return false; + } + Tracks that = (Tracks) other; + return groups.equals(that.groups); + } + + @Override + public int hashCode() { + return groups.hashCode(); + } + // Bundleable implementation. + + private static final String FIELD_TRACK_GROUPS = Util.intToStringMaxRadix(0); + + @Override + public Bundle toBundle() { + Bundle bundle = new Bundle(); + bundle.putParcelableArrayList(FIELD_TRACK_GROUPS, toBundleArrayList(groups)); + return bundle; + } + + /** Object that can restore tracks from a {@link Bundle}. */ + public static final Creator CREATOR = + bundle -> { + @Nullable List groupBundles = bundle.getParcelableArrayList(FIELD_TRACK_GROUPS); + List groups = + groupBundles == null + ? ImmutableList.of() + : BundleableUtil.fromBundleList(Group.CREATOR, groupBundles); + return new Tracks(groups); + }; +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/analytics/AnalyticsCollector.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/analytics/AnalyticsCollector.java index f3d2d903e5..efad6a1682 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/analytics/AnalyticsCollector.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/analytics/AnalyticsCollector.java @@ -15,878 +15,268 @@ */ package com.google.android.exoplayer2.analytics; -import android.graphics.SurfaceTexture; +import android.media.AudioTrack; +import android.media.MediaCodec; +import android.media.MediaCodec.CodecException; +import android.os.Looper; +import android.os.SystemClock; import android.view.Surface; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; -import com.google.android.exoplayer2.ExoPlaybackException; import com.google.android.exoplayer2.Format; -import com.google.android.exoplayer2.PlaybackParameters; import com.google.android.exoplayer2.Player; -import com.google.android.exoplayer2.Player.PlaybackSuppressionReason; -import com.google.android.exoplayer2.Timeline; -import com.google.android.exoplayer2.Timeline.Period; -import com.google.android.exoplayer2.Timeline.Window; -import com.google.android.exoplayer2.analytics.AnalyticsListener.EventTime; -import com.google.android.exoplayer2.audio.AudioAttributes; -import com.google.android.exoplayer2.audio.AudioListener; -import com.google.android.exoplayer2.audio.AudioRendererEventListener; +import com.google.android.exoplayer2.audio.AudioSink; import com.google.android.exoplayer2.decoder.DecoderCounters; -import com.google.android.exoplayer2.drm.DefaultDrmSessionEventListener; -import com.google.android.exoplayer2.metadata.Metadata; -import com.google.android.exoplayer2.metadata.MetadataOutput; +import com.google.android.exoplayer2.decoder.DecoderException; +import com.google.android.exoplayer2.decoder.DecoderReuseEvaluation; +import com.google.android.exoplayer2.drm.DrmSessionEventListener; import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId; import com.google.android.exoplayer2.source.MediaSourceEventListener; -import com.google.android.exoplayer2.source.TrackGroupArray; -import com.google.android.exoplayer2.trackselection.TrackSelectionArray; import com.google.android.exoplayer2.upstream.BandwidthMeter; -import com.google.android.exoplayer2.util.Assertions; -import com.google.android.exoplayer2.util.Clock; -import com.google.android.exoplayer2.video.VideoListener; -import com.google.android.exoplayer2.video.VideoRendererEventListener; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; +import com.google.android.exoplayer2.video.VideoDecoderOutputBufferRenderer; import java.util.List; -import java.util.Set; -import java.util.concurrent.CopyOnWriteArraySet; -import org.checkerframework.checker.nullness.qual.MonotonicNonNull; -import org.checkerframework.checker.nullness.qual.RequiresNonNull; /** - * Data collector which is able to forward analytics events to {@link AnalyticsListener}s by - * listening to all available ExoPlayer listeners. + * Interface for data collectors that forward analytics events to {@link AnalyticsListener + * AnalyticsListeners}. */ -public class AnalyticsCollector - implements Player.EventListener, - MetadataOutput, - AudioRendererEventListener, - VideoRendererEventListener, +public interface AnalyticsCollector + extends Player.Listener, MediaSourceEventListener, BandwidthMeter.EventListener, - DefaultDrmSessionEventListener, - VideoListener, - AudioListener { - - private final CopyOnWriteArraySet listeners; - private final Clock clock; - private final Window window; - private final MediaPeriodQueueTracker mediaPeriodQueueTracker; - - private @MonotonicNonNull Player player; - - /** - * Creates an analytics collector. - * - * @param clock A {@link Clock} used to generate timestamps. - */ - public AnalyticsCollector(Clock clock) { - this.clock = Assertions.checkNotNull(clock); - listeners = new CopyOnWriteArraySet<>(); - mediaPeriodQueueTracker = new MediaPeriodQueueTracker(); - window = new Window(); - } + DrmSessionEventListener { /** * Adds a listener for analytics events. * * @param listener The listener to add. */ - public void addListener(AnalyticsListener listener) { - listeners.add(listener); - } + void addListener(AnalyticsListener listener); /** * Removes a previously added analytics event listener. * * @param listener The listener to remove. */ - public void removeListener(AnalyticsListener listener) { - listeners.remove(listener); - } + void removeListener(AnalyticsListener listener); /** * Sets the player for which data will be collected. Must only be called if no player has been set * yet or the current player is idle. * * @param player The {@link Player} for which data will be collected. + * @param looper The {@link Looper} used for listener callbacks. */ - public void setPlayer(Player player) { - Assertions.checkState( - this.player == null || mediaPeriodQueueTracker.mediaPeriodInfoQueue.isEmpty()); - this.player = Assertions.checkNotNull(player); - } - - // External events. + void setPlayer(Player player, Looper looper); /** - * Notify analytics collector that a seek operation will start. Should be called before the player - * adjusts its state and position to the seek. + * Releases the collector. Must be called after the player for which data is collected has been + * released. */ - public final void notifySeekStarted() { - if (!mediaPeriodQueueTracker.isSeeking()) { - EventTime eventTime = generatePlayingMediaPeriodEventTime(); - mediaPeriodQueueTracker.onSeekStarted(); - for (AnalyticsListener listener : listeners) { - listener.onSeekStarted(eventTime); - } - } - } + void release(); /** - * Resets the analytics collector for a new media source. Should be called before the player is - * prepared with a new media source. + * Updates the playback queue information used for event association. + * + *

      Should only be called by the player controlling the queue and not from app code. + * + * @param queue The playback queue of media periods identified by their {@link MediaPeriodId}. + * @param readingPeriod The media period in the queue that is currently being read by renderers, + * or null if the queue is empty. */ - public final void resetForNewMediaSource() { - // Copying the list is needed because onMediaPeriodReleased will modify the list. - List mediaPeriodInfos = - new ArrayList<>(mediaPeriodQueueTracker.mediaPeriodInfoQueue); - for (MediaPeriodInfo mediaPeriodInfo : mediaPeriodInfos) { - onMediaPeriodReleased(mediaPeriodInfo.windowIndex, mediaPeriodInfo.mediaPeriodId); - } - } - - // MetadataOutput implementation. - - @Override - public final void onMetadata(Metadata metadata) { - EventTime eventTime = generatePlayingMediaPeriodEventTime(); - for (AnalyticsListener listener : listeners) { - listener.onMetadata(eventTime, metadata); - } - } - - // AudioRendererEventListener implementation. - - @Override - public final void onAudioEnabled(DecoderCounters counters) { - // The renderers are only enabled after we changed the playing media period. - EventTime eventTime = generatePlayingMediaPeriodEventTime(); - for (AnalyticsListener listener : listeners) { - listener.onDecoderEnabled(eventTime, C.TRACK_TYPE_AUDIO, counters); - } - } - - @Override - public final void onAudioDecoderInitialized( - String decoderName, long initializedTimestampMs, long initializationDurationMs) { - EventTime eventTime = generateReadingMediaPeriodEventTime(); - for (AnalyticsListener listener : listeners) { - listener.onDecoderInitialized( - eventTime, C.TRACK_TYPE_AUDIO, decoderName, initializationDurationMs); - } - } - - @Override - public final void onAudioInputFormatChanged(Format format) { - EventTime eventTime = generateReadingMediaPeriodEventTime(); - for (AnalyticsListener listener : listeners) { - listener.onDecoderInputFormatChanged(eventTime, C.TRACK_TYPE_AUDIO, format); - } - } - - @Override - public final void onAudioSinkUnderrun( - int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) { - EventTime eventTime = generateReadingMediaPeriodEventTime(); - for (AnalyticsListener listener : listeners) { - listener.onAudioUnderrun(eventTime, bufferSize, bufferSizeMs, elapsedSinceLastFeedMs); - } - } - - @Override - public final void onAudioDisabled(DecoderCounters counters) { - // The renderers are disabled after we changed the playing media period on the playback thread - // but before this change is reported to the app thread. - EventTime eventTime = generateLastReportedPlayingMediaPeriodEventTime(); - for (AnalyticsListener listener : listeners) { - listener.onDecoderDisabled(eventTime, C.TRACK_TYPE_AUDIO, counters); - } - } - - // AudioListener implementation. - - @Override - public final void onAudioSessionId(int audioSessionId) { - EventTime eventTime = generateReadingMediaPeriodEventTime(); - for (AnalyticsListener listener : listeners) { - listener.onAudioSessionId(eventTime, audioSessionId); - } - } - - @Override - public void onAudioAttributesChanged(AudioAttributes audioAttributes) { - EventTime eventTime = generateReadingMediaPeriodEventTime(); - for (AnalyticsListener listener : listeners) { - listener.onAudioAttributesChanged(eventTime, audioAttributes); - } - } - - @Override - public void onVolumeChanged(float audioVolume) { - EventTime eventTime = generateReadingMediaPeriodEventTime(); - for (AnalyticsListener listener : listeners) { - listener.onVolumeChanged(eventTime, audioVolume); - } - } - - // VideoRendererEventListener implementation. - - @Override - public final void onVideoEnabled(DecoderCounters counters) { - // The renderers are only enabled after we changed the playing media period. - EventTime eventTime = generatePlayingMediaPeriodEventTime(); - for (AnalyticsListener listener : listeners) { - listener.onDecoderEnabled(eventTime, C.TRACK_TYPE_VIDEO, counters); - } - } - - @Override - public final void onVideoDecoderInitialized( - String decoderName, long initializedTimestampMs, long initializationDurationMs) { - EventTime eventTime = generateReadingMediaPeriodEventTime(); - for (AnalyticsListener listener : listeners) { - listener.onDecoderInitialized( - eventTime, C.TRACK_TYPE_VIDEO, decoderName, initializationDurationMs); - } - } - - @Override - public final void onVideoInputFormatChanged(Format format) { - EventTime eventTime = generateReadingMediaPeriodEventTime(); - for (AnalyticsListener listener : listeners) { - listener.onDecoderInputFormatChanged(eventTime, C.TRACK_TYPE_VIDEO, format); - } - } - - @Override - public final void onDroppedFrames(int count, long elapsedMs) { - EventTime eventTime = generateLastReportedPlayingMediaPeriodEventTime(); - for (AnalyticsListener listener : listeners) { - listener.onDroppedVideoFrames(eventTime, count, elapsedMs); - } - } - - @Override - public final void onVideoDisabled(DecoderCounters counters) { - // The renderers are disabled after we changed the playing media period on the playback thread - // but before this change is reported to the app thread. - EventTime eventTime = generateLastReportedPlayingMediaPeriodEventTime(); - for (AnalyticsListener listener : listeners) { - listener.onDecoderDisabled(eventTime, C.TRACK_TYPE_VIDEO, counters); - } - } - - @Override - public final void onRenderedFirstFrame(@Nullable Surface surface) { - EventTime eventTime = generateReadingMediaPeriodEventTime(); - for (AnalyticsListener listener : listeners) { - listener.onRenderedFirstFrame(eventTime, surface); - } - } - - // VideoListener implementation. - - @Override - public final void onRenderedFirstFrame() { - // Do nothing. Already reported in VideoRendererEventListener.onRenderedFirstFrame. - } - - @Override - public final void onVideoSizeChanged( - int width, int height, int unappliedRotationDegrees, float pixelWidthHeightRatio) { - EventTime eventTime = generateReadingMediaPeriodEventTime(); - for (AnalyticsListener listener : listeners) { - listener.onVideoSizeChanged( - eventTime, width, height, unappliedRotationDegrees, pixelWidthHeightRatio); - } - } - - @Override - public void onSurfaceSizeChanged(int width, int height) { - EventTime eventTime = generateReadingMediaPeriodEventTime(); - for (AnalyticsListener listener : listeners) { - listener.onSurfaceSizeChanged(eventTime, width, height); - } - } - - @Override - public boolean onSurfaceDestroyed(SurfaceTexture surfaceTexture) { - return false; - } - - @Override - public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) { - - } - - // MediaSourceEventListener implementation. - - @Override - public final void onMediaPeriodCreated(int windowIndex, MediaPeriodId mediaPeriodId) { - mediaPeriodQueueTracker.onMediaPeriodCreated(windowIndex, mediaPeriodId); - EventTime eventTime = generateMediaPeriodEventTime(windowIndex, mediaPeriodId); - for (AnalyticsListener listener : listeners) { - listener.onMediaPeriodCreated(eventTime); - } - } - - @Override - public final void onMediaPeriodReleased(int windowIndex, MediaPeriodId mediaPeriodId) { - EventTime eventTime = generateMediaPeriodEventTime(windowIndex, mediaPeriodId); - if (mediaPeriodQueueTracker.onMediaPeriodReleased(mediaPeriodId)) { - for (AnalyticsListener listener : listeners) { - listener.onMediaPeriodReleased(eventTime); - } - } - } - - @Override - public final void onLoadStarted( - int windowIndex, - @Nullable MediaPeriodId mediaPeriodId, - LoadEventInfo loadEventInfo, - MediaLoadData mediaLoadData) { - EventTime eventTime = generateMediaPeriodEventTime(windowIndex, mediaPeriodId); - for (AnalyticsListener listener : listeners) { - listener.onLoadStarted(eventTime, loadEventInfo, mediaLoadData); - } - } - - @Override - public final void onLoadCompleted( - int windowIndex, - @Nullable MediaPeriodId mediaPeriodId, - LoadEventInfo loadEventInfo, - MediaLoadData mediaLoadData) { - EventTime eventTime = generateMediaPeriodEventTime(windowIndex, mediaPeriodId); - for (AnalyticsListener listener : listeners) { - listener.onLoadCompleted(eventTime, loadEventInfo, mediaLoadData); - } - } - - @Override - public final void onLoadCanceled( - int windowIndex, - @Nullable MediaPeriodId mediaPeriodId, - LoadEventInfo loadEventInfo, - MediaLoadData mediaLoadData) { - EventTime eventTime = generateMediaPeriodEventTime(windowIndex, mediaPeriodId); - for (AnalyticsListener listener : listeners) { - listener.onLoadCanceled(eventTime, loadEventInfo, mediaLoadData); - } - } - - @Override - public final void onLoadError( - int windowIndex, - @Nullable MediaPeriodId mediaPeriodId, - LoadEventInfo loadEventInfo, - MediaLoadData mediaLoadData, - IOException error, - boolean wasCanceled) { - EventTime eventTime = generateMediaPeriodEventTime(windowIndex, mediaPeriodId); - for (AnalyticsListener listener : listeners) { - listener.onLoadError(eventTime, loadEventInfo, mediaLoadData, error, wasCanceled); - } - } - - @Override - public final void onReadingStarted(int windowIndex, MediaPeriodId mediaPeriodId) { - mediaPeriodQueueTracker.onReadingStarted(mediaPeriodId); - EventTime eventTime = generateMediaPeriodEventTime(windowIndex, mediaPeriodId); - for (AnalyticsListener listener : listeners) { - listener.onReadingStarted(eventTime); - } - } - - @Override - public final void onUpstreamDiscarded( - int windowIndex, @Nullable MediaPeriodId mediaPeriodId, MediaLoadData mediaLoadData) { - EventTime eventTime = generateMediaPeriodEventTime(windowIndex, mediaPeriodId); - for (AnalyticsListener listener : listeners) { - listener.onUpstreamDiscarded(eventTime, mediaLoadData); - } - } - - @Override - public final void onDownstreamFormatChanged( - int windowIndex, @Nullable MediaPeriodId mediaPeriodId, MediaLoadData mediaLoadData) { - EventTime eventTime = generateMediaPeriodEventTime(windowIndex, mediaPeriodId); - for (AnalyticsListener listener : listeners) { - listener.onDownstreamFormatChanged(eventTime, mediaLoadData); - } - } - - // Player.EventListener implementation. - - // TODO: Add onFinishedReportingChanges to Player.EventListener to know when a set of simultaneous - // callbacks finished. This helps to assign exactly the same EventTime to all of them instead of - // having slightly different real times. - - @Override - public final void onTimelineChanged(Timeline timeline, @Player.TimelineChangeReason int reason) { - mediaPeriodQueueTracker.onTimelineChanged(timeline); - EventTime eventTime = generatePlayingMediaPeriodEventTime(); - for (AnalyticsListener listener : listeners) { - listener.onTimelineChanged(eventTime, reason); - } - } - - @Override - public final void onTracksChanged( - TrackGroupArray trackGroups, TrackSelectionArray trackSelections) { - EventTime eventTime = generatePlayingMediaPeriodEventTime(); - for (AnalyticsListener listener : listeners) { - listener.onTracksChanged(eventTime, trackGroups, trackSelections); - } - } - - @Override - public final void onLoadingChanged(boolean isLoading) { - EventTime eventTime = generatePlayingMediaPeriodEventTime(); - for (AnalyticsListener listener : listeners) { - listener.onLoadingChanged(eventTime, isLoading); - } - } - - @Override - public final void onPlayerStateChanged(boolean playWhenReady, @Player.State int playbackState) { - EventTime eventTime = generatePlayingMediaPeriodEventTime(); - for (AnalyticsListener listener : listeners) { - listener.onPlayerStateChanged(eventTime, playWhenReady, playbackState); - } - } - - @Override - public void onPlaybackSuppressionReasonChanged( - @PlaybackSuppressionReason int playbackSuppressionReason) { - EventTime eventTime = generatePlayingMediaPeriodEventTime(); - for (AnalyticsListener listener : listeners) { - listener.onPlaybackSuppressionReasonChanged(eventTime, playbackSuppressionReason); - } - } - - @Override - public void onIsPlayingChanged(boolean isPlaying) { - EventTime eventTime = generatePlayingMediaPeriodEventTime(); - for (AnalyticsListener listener : listeners) { - listener.onIsPlayingChanged(eventTime, isPlaying); - } - } - - @Override - public final void onRepeatModeChanged(@Player.RepeatMode int repeatMode) { - EventTime eventTime = generatePlayingMediaPeriodEventTime(); - for (AnalyticsListener listener : listeners) { - listener.onRepeatModeChanged(eventTime, repeatMode); - } - } - - @Override - public final void onShuffleModeEnabledChanged(boolean shuffleModeEnabled) { - EventTime eventTime = generatePlayingMediaPeriodEventTime(); - for (AnalyticsListener listener : listeners) { - listener.onShuffleModeChanged(eventTime, shuffleModeEnabled); - } - } - - @Override - public final void onPlayerError(ExoPlaybackException error) { - EventTime eventTime = generateLastReportedPlayingMediaPeriodEventTime(); - for (AnalyticsListener listener : listeners) { - listener.onPlayerError(eventTime, error); - } - } - - @Override - public final void onPositionDiscontinuity(@Player.DiscontinuityReason int reason) { - mediaPeriodQueueTracker.onPositionDiscontinuity(reason); - EventTime eventTime = generatePlayingMediaPeriodEventTime(); - for (AnalyticsListener listener : listeners) { - listener.onPositionDiscontinuity(eventTime, reason); - } - } - - @Override - public final void onPlaybackParametersChanged(PlaybackParameters playbackParameters) { - EventTime eventTime = generatePlayingMediaPeriodEventTime(); - for (AnalyticsListener listener : listeners) { - listener.onPlaybackParametersChanged(eventTime, playbackParameters); - } - } - - @Override - public final void onSeekProcessed() { - if (mediaPeriodQueueTracker.isSeeking()) { - mediaPeriodQueueTracker.onSeekProcessed(); - EventTime eventTime = generatePlayingMediaPeriodEventTime(); - for (AnalyticsListener listener : listeners) { - listener.onSeekProcessed(eventTime); - } - } - } - - // BandwidthMeter.Listener implementation. - - @Override - public final void onBandwidthSample(int elapsedMs, long bytes, long bitrate) { - EventTime eventTime = generateLoadingMediaPeriodEventTime(); - for (AnalyticsListener listener : listeners) { - listener.onBandwidthEstimate(eventTime, elapsedMs, bytes, bitrate); - } - } - - // DefaultDrmSessionManager.EventListener implementation. - - @Override - public final void onDrmSessionAcquired() { - EventTime eventTime = generateReadingMediaPeriodEventTime(); - for (AnalyticsListener listener : listeners) { - listener.onDrmSessionAcquired(eventTime); - } - } - - @Override - public final void onDrmKeysLoaded() { - EventTime eventTime = generateReadingMediaPeriodEventTime(); - for (AnalyticsListener listener : listeners) { - listener.onDrmKeysLoaded(eventTime); - } - } - - @Override - public final void onDrmSessionManagerError(Exception error) { - EventTime eventTime = generateReadingMediaPeriodEventTime(); - for (AnalyticsListener listener : listeners) { - listener.onDrmSessionManagerError(eventTime, error); - } - } - - @Override - public final void onDrmKeysRestored() { - EventTime eventTime = generateReadingMediaPeriodEventTime(); - for (AnalyticsListener listener : listeners) { - listener.onDrmKeysRestored(eventTime); - } - } - - @Override - public final void onDrmKeysRemoved() { - EventTime eventTime = generateReadingMediaPeriodEventTime(); - for (AnalyticsListener listener : listeners) { - listener.onDrmKeysRemoved(eventTime); - } - } - - @Override - public final void onDrmSessionReleased() { - EventTime eventTime = generateLastReportedPlayingMediaPeriodEventTime(); - for (AnalyticsListener listener : listeners) { - listener.onDrmSessionReleased(eventTime); - } - } - - // Internal methods. - - /** Returns read-only set of registered listeners. */ - protected Set getListeners() { - return Collections.unmodifiableSet(listeners); - } + void updateMediaPeriodQueueInfo(List queue, @Nullable MediaPeriodId readingPeriod); - /** Returns a new {@link EventTime} for the specified timeline, window and media period id. */ - @RequiresNonNull("player") - protected EventTime generateEventTime( - Timeline timeline, int windowIndex, @Nullable MediaPeriodId mediaPeriodId) { - if (timeline.isEmpty()) { - // Ensure media period id is only reported together with a valid timeline. - mediaPeriodId = null; - } - long realtimeMs = clock.elapsedRealtime(); - long eventPositionMs; - boolean isInCurrentWindow = - timeline == player.getCurrentTimeline() && windowIndex == player.getCurrentWindowIndex(); - if (mediaPeriodId != null && mediaPeriodId.isAd()) { - boolean isCurrentAd = - isInCurrentWindow - && player.getCurrentAdGroupIndex() == mediaPeriodId.adGroupIndex - && player.getCurrentAdIndexInAdGroup() == mediaPeriodId.adIndexInAdGroup; - // Assume start position of 0 for future ads. - eventPositionMs = isCurrentAd ? player.getCurrentPosition() : 0; - } else if (isInCurrentWindow) { - eventPositionMs = player.getContentPosition(); - } else { - // Assume default start position for future content windows. If timeline is not available yet, - // assume start position of 0. - eventPositionMs = - timeline.isEmpty() ? 0 : timeline.getWindow(windowIndex, window).getDefaultPositionMs(); - } - return new EventTime( - realtimeMs, - timeline, - windowIndex, - mediaPeriodId, - eventPositionMs, - player.getCurrentPosition(), - player.getTotalBufferedDuration()); - } - - private EventTime generateEventTime(@Nullable MediaPeriodInfo mediaPeriodInfo) { - Assertions.checkNotNull(player); - if (mediaPeriodInfo == null) { - int windowIndex = player.getCurrentWindowIndex(); - mediaPeriodInfo = mediaPeriodQueueTracker.tryResolveWindowIndex(windowIndex); - if (mediaPeriodInfo == null) { - Timeline timeline = player.getCurrentTimeline(); - boolean windowIsInTimeline = windowIndex < timeline.getWindowCount(); - return generateEventTime( - windowIsInTimeline ? timeline : Timeline.EMPTY, windowIndex, /* mediaPeriodId= */ null); - } - } - return generateEventTime( - mediaPeriodInfo.timeline, mediaPeriodInfo.windowIndex, mediaPeriodInfo.mediaPeriodId); - } - - private EventTime generateLastReportedPlayingMediaPeriodEventTime() { - return generateEventTime(mediaPeriodQueueTracker.getLastReportedPlayingMediaPeriod()); - } - - private EventTime generatePlayingMediaPeriodEventTime() { - return generateEventTime(mediaPeriodQueueTracker.getPlayingMediaPeriod()); - } - - private EventTime generateReadingMediaPeriodEventTime() { - return generateEventTime(mediaPeriodQueueTracker.getReadingMediaPeriod()); - } - - private EventTime generateLoadingMediaPeriodEventTime() { - return generateEventTime(mediaPeriodQueueTracker.getLoadingMediaPeriod()); - } - - private EventTime generateMediaPeriodEventTime( - int windowIndex, @Nullable MediaPeriodId mediaPeriodId) { - Assertions.checkNotNull(player); - if (mediaPeriodId != null) { - MediaPeriodInfo mediaPeriodInfo = mediaPeriodQueueTracker.getMediaPeriodInfo(mediaPeriodId); - return mediaPeriodInfo != null - ? generateEventTime(mediaPeriodInfo) - : generateEventTime(Timeline.EMPTY, windowIndex, mediaPeriodId); - } - Timeline timeline = player.getCurrentTimeline(); - boolean windowIsInTimeline = windowIndex < timeline.getWindowCount(); - return generateEventTime( - windowIsInTimeline ? timeline : Timeline.EMPTY, windowIndex, /* mediaPeriodId= */ null); - } - - /** Keeps track of the active media periods and currently playing and reading media period. */ - private static final class MediaPeriodQueueTracker { - - // TODO: Investigate reporting MediaPeriodId in renderer events and adding a listener of queue - // changes, which would hopefully remove the need to track the queue here. - - private final ArrayList mediaPeriodInfoQueue; - private final HashMap mediaPeriodIdToInfo; - private final Period period; + /** + * Notify analytics collector that a seek operation will start. Should be called before the player + * adjusts its state and position to the seek. + */ + void notifySeekStarted(); - @Nullable private MediaPeriodInfo lastPlayingMediaPeriod; - @Nullable private MediaPeriodInfo lastReportedPlayingMediaPeriod; - @Nullable private MediaPeriodInfo readingMediaPeriod; - private Timeline timeline; - private boolean isSeeking; + // Audio events. - public MediaPeriodQueueTracker() { - mediaPeriodInfoQueue = new ArrayList<>(); - mediaPeriodIdToInfo = new HashMap<>(); - period = new Period(); - timeline = Timeline.EMPTY; - } + /** + * Called when the audio renderer is enabled. + * + * @param counters {@link DecoderCounters} that will be updated by the audio renderer for as long + * as it remains enabled. + */ + void onAudioEnabled(DecoderCounters counters); - /** - * Returns the {@link MediaPeriodInfo} of the media period in the front of the queue. This is - * the playing media period unless the player hasn't started playing yet (in which case it is - * the loading media period or null). While the player is seeking or preparing, this method will - * always return null to reflect the uncertainty about the current playing period. May also be - * null, if the timeline is empty or no media period is active yet. - */ - @Nullable - public MediaPeriodInfo getPlayingMediaPeriod() { - return mediaPeriodInfoQueue.isEmpty() || timeline.isEmpty() || isSeeking - ? null - : mediaPeriodInfoQueue.get(0); - } + /** + * Called when a audio decoder is created. + * + * @param decoderName The audio decoder that was created. + * @param initializedTimestampMs {@link SystemClock#elapsedRealtime()} when initialization + * finished. + * @param initializationDurationMs The time taken to initialize the decoder in milliseconds. + */ + void onAudioDecoderInitialized( + String decoderName, long initializedTimestampMs, long initializationDurationMs); - /** - * Returns the {@link MediaPeriodInfo} of the currently playing media period. This is the - * publicly reported period which should always match {@link Player#getCurrentPeriodIndex()} - * unless the player is currently seeking or being prepared in which case the previous period is - * reported until the seek or preparation is processed. May be null, if no media period is - * active yet. - */ - @Nullable - public MediaPeriodInfo getLastReportedPlayingMediaPeriod() { - return lastReportedPlayingMediaPeriod; - } + /** + * Called when the format of the media being consumed by the audio renderer changes. + * + * @param format The new format. + * @param decoderReuseEvaluation The result of the evaluation to determine whether an existing + * decoder instance can be reused for the new format, or {@code null} if the renderer did not + * have a decoder. + */ + void onAudioInputFormatChanged( + Format format, @Nullable DecoderReuseEvaluation decoderReuseEvaluation); - /** - * Returns the {@link MediaPeriodInfo} of the media period currently being read by the player. - * May be null, if the player is not reading a media period. - */ - @Nullable - public MediaPeriodInfo getReadingMediaPeriod() { - return readingMediaPeriod; - } + /** + * Called when the audio position has increased for the first time since the last pause or + * position reset. + * + * @param playoutStartSystemTimeMs The approximate derived {@link System#currentTimeMillis()} at + * which playout started. + */ + void onAudioPositionAdvancing(long playoutStartSystemTimeMs); - /** - * Returns the {@link MediaPeriodInfo} of the media period at the end of the queue which is - * currently loading or will be the next one loading. May be null, if no media period is active - * yet. - */ - @Nullable - public MediaPeriodInfo getLoadingMediaPeriod() { - return mediaPeriodInfoQueue.isEmpty() - ? null - : mediaPeriodInfoQueue.get(mediaPeriodInfoQueue.size() - 1); - } + /** + * Called when an audio underrun occurs. + * + * @param bufferSize The size of the audio output buffer, in bytes. + * @param bufferSizeMs The size of the audio output buffer, in milliseconds, if it contains PCM + * encoded audio. {@link C#TIME_UNSET} if the output buffer contains non-PCM encoded audio. + * @param elapsedSinceLastFeedMs The time since audio was last written to the output buffer. + */ + void onAudioUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs); - /** Returns the {@link MediaPeriodInfo} for the given {@link MediaPeriodId}. */ - @Nullable - public MediaPeriodInfo getMediaPeriodInfo(MediaPeriodId mediaPeriodId) { - return mediaPeriodIdToInfo.get(mediaPeriodId); - } + /** + * Called when a audio decoder is released. + * + * @param decoderName The audio decoder that was released. + */ + void onAudioDecoderReleased(String decoderName); - /** Returns whether the player is currently seeking. */ - public boolean isSeeking() { - return isSeeking; - } + /** + * Called when the audio renderer is disabled. + * + * @param counters {@link DecoderCounters} that were updated by the audio renderer. + */ + void onAudioDisabled(DecoderCounters counters); - /** - * Tries to find an existing media period info from the specified window index. Only returns a - * non-null media period info if there is a unique, unambiguous match. - */ - @Nullable - public MediaPeriodInfo tryResolveWindowIndex(int windowIndex) { - MediaPeriodInfo match = null; - for (int i = 0; i < mediaPeriodInfoQueue.size(); i++) { - MediaPeriodInfo info = mediaPeriodInfoQueue.get(i); - int periodIndex = timeline.getIndexOfPeriod(info.mediaPeriodId.periodUid); - if (periodIndex != C.INDEX_UNSET - && timeline.getPeriod(periodIndex, period).windowIndex == windowIndex) { - if (match != null) { - // Ambiguous match. - return null; - } - match = info; - } - } - return match; - } + /** + * Called when {@link AudioSink} has encountered an error. + * + *

      If the sink writes to a platform {@link AudioTrack}, this will be called for all {@link + * AudioTrack} errors. + * + * @param audioSinkError The error that occurred. Typically an {@link + * AudioSink.InitializationException}, a {@link AudioSink.WriteException}, or an {@link + * AudioSink.UnexpectedDiscontinuityException}. + */ + void onAudioSinkError(Exception audioSinkError); - /** Updates the queue with a reported position discontinuity . */ - public void onPositionDiscontinuity(@Player.DiscontinuityReason int reason) { - lastReportedPlayingMediaPeriod = lastPlayingMediaPeriod; - } + /** + * Called when an audio decoder encounters an error. + * + * @param audioCodecError The error. Typically a {@link CodecException} if the renderer uses + * {@link MediaCodec}, or a {@link DecoderException} if the renderer uses a software decoder. + */ + void onAudioCodecError(Exception audioCodecError); - /** Updates the queue with a reported timeline change. */ - public void onTimelineChanged(Timeline timeline) { - for (int i = 0; i < mediaPeriodInfoQueue.size(); i++) { - MediaPeriodInfo newMediaPeriodInfo = - updateMediaPeriodInfoToNewTimeline(mediaPeriodInfoQueue.get(i), timeline); - mediaPeriodInfoQueue.set(i, newMediaPeriodInfo); - mediaPeriodIdToInfo.put(newMediaPeriodInfo.mediaPeriodId, newMediaPeriodInfo); - } - if (readingMediaPeriod != null) { - readingMediaPeriod = updateMediaPeriodInfoToNewTimeline(readingMediaPeriod, timeline); - } - this.timeline = timeline; - lastReportedPlayingMediaPeriod = lastPlayingMediaPeriod; - } + // Video events. - /** Updates the queue with a reported start of seek. */ - public void onSeekStarted() { - isSeeking = true; - } + /** + * Called when the video renderer is enabled. + * + * @param counters {@link DecoderCounters} that will be updated by the video renderer for as long + * as it remains enabled. + */ + void onVideoEnabled(DecoderCounters counters); - /** Updates the queue with a reported processed seek. */ - public void onSeekProcessed() { - isSeeking = false; - lastReportedPlayingMediaPeriod = lastPlayingMediaPeriod; - } + /** + * Called when a video decoder is created. + * + * @param decoderName The decoder that was created. + * @param initializedTimestampMs {@link SystemClock#elapsedRealtime()} when initialization + * finished. + * @param initializationDurationMs The time taken to initialize the decoder in milliseconds. + */ + void onVideoDecoderInitialized( + String decoderName, long initializedTimestampMs, long initializationDurationMs); - /** Updates the queue with a newly created media period. */ - public void onMediaPeriodCreated(int windowIndex, MediaPeriodId mediaPeriodId) { - int periodIndex = timeline.getIndexOfPeriod(mediaPeriodId.periodUid); - boolean isInTimeline = periodIndex != C.INDEX_UNSET; - MediaPeriodInfo mediaPeriodInfo = - new MediaPeriodInfo( - mediaPeriodId, - isInTimeline ? timeline : Timeline.EMPTY, - isInTimeline ? timeline.getPeriod(periodIndex, period).windowIndex : windowIndex); - mediaPeriodInfoQueue.add(mediaPeriodInfo); - mediaPeriodIdToInfo.put(mediaPeriodId, mediaPeriodInfo); - lastPlayingMediaPeriod = mediaPeriodInfoQueue.get(0); - if (mediaPeriodInfoQueue.size() == 1 && !timeline.isEmpty()) { - lastReportedPlayingMediaPeriod = lastPlayingMediaPeriod; - } - } + /** + * Called when the format of the media being consumed by the video renderer changes. + * + * @param format The new format. + * @param decoderReuseEvaluation The result of the evaluation to determine whether an existing + * decoder instance can be reused for the new format, or {@code null} if the renderer did not + * have a decoder. + */ + void onVideoInputFormatChanged( + Format format, @Nullable DecoderReuseEvaluation decoderReuseEvaluation); - /** - * Updates the queue with a released media period. Returns whether the media period was still in - * the queue. - */ - public boolean onMediaPeriodReleased(MediaPeriodId mediaPeriodId) { - MediaPeriodInfo mediaPeriodInfo = mediaPeriodIdToInfo.remove(mediaPeriodId); - if (mediaPeriodInfo == null) { - // The media period has already been removed from the queue in resetForNewMediaSource(). - return false; - } - mediaPeriodInfoQueue.remove(mediaPeriodInfo); - if (readingMediaPeriod != null && mediaPeriodId.equals(readingMediaPeriod.mediaPeriodId)) { - readingMediaPeriod = mediaPeriodInfoQueue.isEmpty() ? null : mediaPeriodInfoQueue.get(0); - } - if (!mediaPeriodInfoQueue.isEmpty()) { - lastPlayingMediaPeriod = mediaPeriodInfoQueue.get(0); - } - return true; - } + /** + * Called to report the number of frames dropped by the video renderer. Dropped frames are + * reported whenever the renderer is stopped having dropped frames, and optionally, whenever the + * count reaches a specified threshold whilst the renderer is started. + * + * @param count The number of dropped frames. + * @param elapsedMs The duration in milliseconds over which the frames were dropped. This duration + * is timed from when the renderer was started or from when dropped frames were last reported + * (whichever was more recent), and not from when the first of the reported drops occurred. + */ + void onDroppedFrames(int count, long elapsedMs); - /** Update the queue with a change in the reading media period. */ - public void onReadingStarted(MediaPeriodId mediaPeriodId) { - readingMediaPeriod = mediaPeriodIdToInfo.get(mediaPeriodId); - } + /** + * Called when a video decoder is released. + * + * @param decoderName The video decoder that was released. + */ + void onVideoDecoderReleased(String decoderName); - private MediaPeriodInfo updateMediaPeriodInfoToNewTimeline( - MediaPeriodInfo info, Timeline newTimeline) { - int newPeriodIndex = newTimeline.getIndexOfPeriod(info.mediaPeriodId.periodUid); - if (newPeriodIndex == C.INDEX_UNSET) { - // Media period is not yet or no longer available in the new timeline. Keep it as it is. - return info; - } - int newWindowIndex = newTimeline.getPeriod(newPeriodIndex, period).windowIndex; - return new MediaPeriodInfo(info.mediaPeriodId, newTimeline, newWindowIndex); - } - } + /** + * Called when the video renderer is disabled. + * + * @param counters {@link DecoderCounters} that were updated by the video renderer. + */ + void onVideoDisabled(DecoderCounters counters); - /** Information about a media period and its associated timeline. */ - private static final class MediaPeriodInfo { + /** + * Called when a frame is rendered for the first time since setting the output, or since the + * renderer was reset, or since the stream being rendered was changed. + * + * @param output The output of the video renderer. Normally a {@link Surface}, however some video + * renderers may have other output types (e.g., a {@link VideoDecoderOutputBufferRenderer}). + * @param renderTimeMs The {@link SystemClock#elapsedRealtime()} when the frame was rendered. + */ + void onRenderedFirstFrame(Object output, long renderTimeMs); - /** The {@link MediaPeriodId} of the media period. */ - public final MediaPeriodId mediaPeriodId; - /** - * The {@link Timeline} in which the media period can be found. Or {@link Timeline#EMPTY} if the - * media period is not part of a known timeline yet. - */ - public final Timeline timeline; - /** - * The window index of the media period in the timeline. If the timeline is empty, this is the - * prospective window index. - */ - public final int windowIndex; + /** + * Called to report the video processing offset of video frames processed by the video renderer. + * + *

      Video processing offset represents how early a video frame is processed compared to the + * player's current position. For each video frame, the offset is calculated as Pvf + * - Ppl where Pvf is the presentation timestamp of the video + * frame and Ppl is the current position of the player. Positive values + * indicate the frame was processed early enough whereas negative values indicate that the + * player's position had progressed beyond the frame's timestamp when the frame was processed (and + * the frame was probably dropped). + * + *

      The renderer reports the sum of video processing offset samples (one sample per processed + * video frame: dropped, skipped or rendered) and the total number of samples. + * + * @param totalProcessingOffsetUs The sum of all video frame processing offset samples for the + * video frames processed by the renderer in microseconds. + * @param frameCount The number of samples included in the {@code totalProcessingOffsetUs}. + */ + void onVideoFrameProcessingOffset(long totalProcessingOffsetUs, int frameCount); - public MediaPeriodInfo(MediaPeriodId mediaPeriodId, Timeline timeline, int windowIndex) { - this.mediaPeriodId = mediaPeriodId; - this.timeline = timeline; - this.windowIndex = windowIndex; - } - } + /** + * Called when a video decoder encounters an error. + * + *

      This method being called does not indicate that playback has failed, or that it will fail. + * The player may be able to recover from the error. Hence applications should not + * implement this method to display a user visible error or initiate an application level retry. + * {@link Player.Listener#onPlayerError} is the appropriate place to implement such behavior. This + * method is called to provide the application with an opportunity to log the error if it wishes + * to do so. + * + * @param videoCodecError The error. Typically a {@link CodecException} if the renderer uses + * {@link MediaCodec}, or a {@link DecoderException} if the renderer uses a software decoder. + */ + void onVideoCodecError(Exception videoCodecError); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/analytics/AnalyticsListener.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/analytics/AnalyticsListener.java index e16d92df9e..5cce607660 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/analytics/AnalyticsListener.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/analytics/AnalyticsListener.java @@ -15,27 +15,59 @@ */ package com.google.android.exoplayer2.analytics; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static java.lang.annotation.ElementType.FIELD; +import static java.lang.annotation.ElementType.LOCAL_VARIABLE; +import static java.lang.annotation.ElementType.METHOD; +import static java.lang.annotation.ElementType.PARAMETER; +import static java.lang.annotation.ElementType.TYPE_USE; + +import android.media.MediaCodec; +import android.media.MediaCodec.CodecException; +import android.os.Looper; +import android.os.SystemClock; +import android.util.SparseArray; import android.view.Surface; +import androidx.annotation.IntDef; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; -import com.google.android.exoplayer2.ExoPlaybackException; +import com.google.android.exoplayer2.DeviceInfo; import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.MediaItem; +import com.google.android.exoplayer2.MediaMetadata; +import com.google.android.exoplayer2.PlaybackException; import com.google.android.exoplayer2.PlaybackParameters; import com.google.android.exoplayer2.Player; import com.google.android.exoplayer2.Player.DiscontinuityReason; import com.google.android.exoplayer2.Player.PlaybackSuppressionReason; import com.google.android.exoplayer2.Player.TimelineChangeReason; import com.google.android.exoplayer2.Timeline; +import com.google.android.exoplayer2.Tracks; import com.google.android.exoplayer2.audio.AudioAttributes; import com.google.android.exoplayer2.audio.AudioSink; import com.google.android.exoplayer2.decoder.DecoderCounters; +import com.google.android.exoplayer2.decoder.DecoderException; +import com.google.android.exoplayer2.decoder.DecoderReuseEvaluation; +import com.google.android.exoplayer2.drm.DrmSession; import com.google.android.exoplayer2.metadata.Metadata; +import com.google.android.exoplayer2.metadata.MetadataOutput; +import com.google.android.exoplayer2.source.LoadEventInfo; +import com.google.android.exoplayer2.source.MediaLoadData; import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId; -import com.google.android.exoplayer2.source.MediaSourceEventListener.LoadEventInfo; -import com.google.android.exoplayer2.source.MediaSourceEventListener.MediaLoadData; -import com.google.android.exoplayer2.source.TrackGroupArray; -import com.google.android.exoplayer2.trackselection.TrackSelectionArray; +import com.google.android.exoplayer2.text.Cue; +import com.google.android.exoplayer2.text.CueGroup; +import com.google.android.exoplayer2.trackselection.TrackSelection; +import com.google.android.exoplayer2.trackselection.TrackSelectionParameters; +import com.google.android.exoplayer2.util.FlagSet; +import com.google.android.exoplayer2.video.VideoDecoderOutputBufferRenderer; +import com.google.android.exoplayer2.video.VideoSize; +import com.google.common.base.Objects; import java.io.IOException; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import java.util.List; /** * A listener for analytics events. @@ -44,9 +76,295 @@ * time at the time of the event. * *

      All methods have no-op default implementations to allow selective overrides. + * + *

      Listeners can choose to implement individual events (e.g. {@link + * #onIsPlayingChanged(EventTime, boolean)}) or {@link #onEvents(Player, Events)}, which is called + * after one or more events occurred together. */ public interface AnalyticsListener { + /** A set of {@link EventFlags}. */ + final class Events { + + private final FlagSet flags; + private final SparseArray eventTimes; + + /** + * Creates an instance. + * + * @param flags The {@link FlagSet} containing the {@link EventFlags} in the set. + * @param eventTimes A map from {@link EventFlags} to {@link EventTime}. Must at least contain + * all the events recorded in {@code flags}. Events that are not recorded in {@code flags} + * are ignored. + */ + public Events(FlagSet flags, SparseArray eventTimes) { + this.flags = flags; + SparseArray flagsToTimes = new SparseArray<>(/* initialCapacity= */ flags.size()); + for (int i = 0; i < flags.size(); i++) { + @EventFlags int eventFlag = flags.get(i); + flagsToTimes.append(eventFlag, checkNotNull(eventTimes.get(eventFlag))); + } + this.eventTimes = flagsToTimes; + } + + /** + * Returns the {@link EventTime} for the specified event. + * + * @param event The {@link EventFlags event}. + * @return The {@link EventTime} of this event. + */ + public EventTime getEventTime(@EventFlags int event) { + return checkNotNull(eventTimes.get(event)); + } + + /** + * Returns whether the given event occurred. + * + * @param event The {@link EventFlags event}. + * @return Whether the event occurred. + */ + public boolean contains(@EventFlags int event) { + return flags.contains(event); + } + + /** + * Returns whether any of the given events occurred. + * + * @param events The {@link EventFlags events}. + * @return Whether any of the events occurred. + */ + public boolean containsAny(@EventFlags int... events) { + return flags.containsAny(events); + } + + /** Returns the number of events in the set. */ + public int size() { + return flags.size(); + } + + /** + * Returns the {@link EventFlags event} at the given index. + * + *

      Although index-based access is possible, it doesn't imply a particular order of these + * events. + * + * @param index The index. Must be between 0 (inclusive) and {@link #size()} (exclusive). + * @return The {@link EventFlags event} at the given index. + */ + public @EventFlags int get(int index) { + return flags.get(index); + } + } + + /** + * Events that can be reported via {@link #onEvents(Player, Events)}. + * + *

      One of the {@link AnalyticsListener}{@code .EVENT_*} flags. + */ + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) + @IntDef({ + EVENT_TIMELINE_CHANGED, + EVENT_MEDIA_ITEM_TRANSITION, + EVENT_TRACKS_CHANGED, + EVENT_IS_LOADING_CHANGED, + EVENT_PLAYBACK_STATE_CHANGED, + EVENT_PLAY_WHEN_READY_CHANGED, + EVENT_PLAYBACK_SUPPRESSION_REASON_CHANGED, + EVENT_IS_PLAYING_CHANGED, + EVENT_REPEAT_MODE_CHANGED, + EVENT_SHUFFLE_MODE_ENABLED_CHANGED, + EVENT_PLAYER_ERROR, + EVENT_POSITION_DISCONTINUITY, + EVENT_PLAYBACK_PARAMETERS_CHANGED, + EVENT_AVAILABLE_COMMANDS_CHANGED, + EVENT_MEDIA_METADATA_CHANGED, + EVENT_PLAYLIST_METADATA_CHANGED, + EVENT_SEEK_BACK_INCREMENT_CHANGED, + EVENT_SEEK_FORWARD_INCREMENT_CHANGED, + EVENT_MAX_SEEK_TO_PREVIOUS_POSITION_CHANGED, + EVENT_TRACK_SELECTION_PARAMETERS_CHANGED, + EVENT_DEVICE_INFO_CHANGED, + EVENT_DEVICE_VOLUME_CHANGED, + EVENT_LOAD_STARTED, + EVENT_LOAD_COMPLETED, + EVENT_LOAD_CANCELED, + EVENT_LOAD_ERROR, + EVENT_DOWNSTREAM_FORMAT_CHANGED, + EVENT_UPSTREAM_DISCARDED, + EVENT_BANDWIDTH_ESTIMATE, + EVENT_METADATA, + EVENT_CUES, + EVENT_AUDIO_ENABLED, + EVENT_AUDIO_DECODER_INITIALIZED, + EVENT_AUDIO_INPUT_FORMAT_CHANGED, + EVENT_AUDIO_POSITION_ADVANCING, + EVENT_AUDIO_UNDERRUN, + EVENT_AUDIO_DECODER_RELEASED, + EVENT_AUDIO_DISABLED, + EVENT_AUDIO_SESSION_ID, + EVENT_AUDIO_ATTRIBUTES_CHANGED, + EVENT_SKIP_SILENCE_ENABLED_CHANGED, + EVENT_AUDIO_SINK_ERROR, + EVENT_VOLUME_CHANGED, + EVENT_VIDEO_ENABLED, + EVENT_VIDEO_DECODER_INITIALIZED, + EVENT_VIDEO_INPUT_FORMAT_CHANGED, + EVENT_DROPPED_VIDEO_FRAMES, + EVENT_VIDEO_DECODER_RELEASED, + EVENT_VIDEO_DISABLED, + EVENT_VIDEO_FRAME_PROCESSING_OFFSET, + EVENT_RENDERED_FIRST_FRAME, + EVENT_VIDEO_SIZE_CHANGED, + EVENT_SURFACE_SIZE_CHANGED, + EVENT_DRM_SESSION_ACQUIRED, + EVENT_DRM_KEYS_LOADED, + EVENT_DRM_SESSION_MANAGER_ERROR, + EVENT_DRM_KEYS_RESTORED, + EVENT_DRM_KEYS_REMOVED, + EVENT_DRM_SESSION_RELEASED, + EVENT_PLAYER_RELEASED, + EVENT_AUDIO_CODEC_ERROR, + EVENT_VIDEO_CODEC_ERROR, + }) + @interface EventFlags {} + /** {@link Player#getCurrentTimeline()} changed. */ + int EVENT_TIMELINE_CHANGED = Player.EVENT_TIMELINE_CHANGED; + /** + * {@link Player#getCurrentMediaItem()} changed or the player started repeating the current item. + */ + int EVENT_MEDIA_ITEM_TRANSITION = Player.EVENT_MEDIA_ITEM_TRANSITION; + /** {@link Player#getCurrentTracks()} changed. */ + int EVENT_TRACKS_CHANGED = Player.EVENT_TRACKS_CHANGED; + /** {@link Player#isLoading()} ()} changed. */ + int EVENT_IS_LOADING_CHANGED = Player.EVENT_IS_LOADING_CHANGED; + /** {@link Player#getPlaybackState()} changed. */ + int EVENT_PLAYBACK_STATE_CHANGED = Player.EVENT_PLAYBACK_STATE_CHANGED; + /** {@link Player#getPlayWhenReady()} changed. */ + int EVENT_PLAY_WHEN_READY_CHANGED = Player.EVENT_PLAY_WHEN_READY_CHANGED; + /** {@link Player#getPlaybackSuppressionReason()} changed. */ + int EVENT_PLAYBACK_SUPPRESSION_REASON_CHANGED = Player.EVENT_PLAYBACK_SUPPRESSION_REASON_CHANGED; + /** {@link Player#isPlaying()} changed. */ + int EVENT_IS_PLAYING_CHANGED = Player.EVENT_IS_PLAYING_CHANGED; + /** {@link Player#getRepeatMode()} changed. */ + int EVENT_REPEAT_MODE_CHANGED = Player.EVENT_REPEAT_MODE_CHANGED; + /** {@link Player#getShuffleModeEnabled()} changed. */ + int EVENT_SHUFFLE_MODE_ENABLED_CHANGED = Player.EVENT_SHUFFLE_MODE_ENABLED_CHANGED; + /** {@link Player#getPlayerError()} changed. */ + int EVENT_PLAYER_ERROR = Player.EVENT_PLAYER_ERROR; + /** + * A position discontinuity occurred. See {@link + * Player.Listener#onPositionDiscontinuity(Player.PositionInfo, Player.PositionInfo, int)}. + */ + int EVENT_POSITION_DISCONTINUITY = Player.EVENT_POSITION_DISCONTINUITY; + /** {@link Player#getPlaybackParameters()} changed. */ + int EVENT_PLAYBACK_PARAMETERS_CHANGED = Player.EVENT_PLAYBACK_PARAMETERS_CHANGED; + /** {@link Player#getAvailableCommands()} changed. */ + int EVENT_AVAILABLE_COMMANDS_CHANGED = Player.EVENT_AVAILABLE_COMMANDS_CHANGED; + /** {@link Player#getMediaMetadata()} changed. */ + int EVENT_MEDIA_METADATA_CHANGED = Player.EVENT_MEDIA_METADATA_CHANGED; + /** {@link Player#getPlaylistMetadata()} changed. */ + int EVENT_PLAYLIST_METADATA_CHANGED = Player.EVENT_PLAYLIST_METADATA_CHANGED; + /** {@link Player#getSeekBackIncrement()} changed. */ + int EVENT_SEEK_BACK_INCREMENT_CHANGED = Player.EVENT_SEEK_BACK_INCREMENT_CHANGED; + /** {@link Player#getSeekForwardIncrement()} changed. */ + int EVENT_SEEK_FORWARD_INCREMENT_CHANGED = Player.EVENT_SEEK_FORWARD_INCREMENT_CHANGED; + /** {@link Player#getMaxSeekToPreviousPosition()} changed. */ + int EVENT_MAX_SEEK_TO_PREVIOUS_POSITION_CHANGED = + Player.EVENT_MAX_SEEK_TO_PREVIOUS_POSITION_CHANGED; + /** {@link Player#getTrackSelectionParameters()} changed. */ + int EVENT_TRACK_SELECTION_PARAMETERS_CHANGED = Player.EVENT_TRACK_SELECTION_PARAMETERS_CHANGED; + /** Audio attributes changed. */ + int EVENT_AUDIO_ATTRIBUTES_CHANGED = Player.EVENT_AUDIO_ATTRIBUTES_CHANGED; + /** An audio session id was set. */ + int EVENT_AUDIO_SESSION_ID = Player.EVENT_AUDIO_SESSION_ID; + /** The volume changed. */ + int EVENT_VOLUME_CHANGED = Player.EVENT_VOLUME_CHANGED; + /** Skipping silences was enabled or disabled in the audio stream. */ + int EVENT_SKIP_SILENCE_ENABLED_CHANGED = Player.EVENT_SKIP_SILENCE_ENABLED_CHANGED; + /** The surface size changed. */ + int EVENT_SURFACE_SIZE_CHANGED = Player.EVENT_SURFACE_SIZE_CHANGED; + /** The video size changed. */ + int EVENT_VIDEO_SIZE_CHANGED = Player.EVENT_VIDEO_SIZE_CHANGED; + /** + * The first frame has been rendered since setting the surface, since the renderer was reset or + * since the stream changed. + */ + int EVENT_RENDERED_FIRST_FRAME = Player.EVENT_RENDERED_FIRST_FRAME; + /** Metadata associated with the current playback time was reported. */ + int EVENT_METADATA = Player.EVENT_METADATA; + /** {@link Player#getCurrentCues()} changed. */ + int EVENT_CUES = Player.EVENT_CUES; + /** {@link Player#getDeviceInfo()} changed. */ + int EVENT_DEVICE_INFO_CHANGED = Player.EVENT_DEVICE_INFO_CHANGED; + /** {@link Player#getDeviceVolume()} changed. */ + int EVENT_DEVICE_VOLUME_CHANGED = Player.EVENT_DEVICE_VOLUME_CHANGED; + /** A source started loading data. */ + int EVENT_LOAD_STARTED = 1000; // Intentional gap to leave space for new Player events + /** A source started completed loading data. */ + int EVENT_LOAD_COMPLETED = 1001; + /** A source canceled loading data. */ + int EVENT_LOAD_CANCELED = 1002; + /** A source had a non-fatal error loading data. */ + int EVENT_LOAD_ERROR = 1003; + /** The downstream format sent to renderers changed. */ + int EVENT_DOWNSTREAM_FORMAT_CHANGED = 1004; + /** Data was removed from the end of the media buffer. */ + int EVENT_UPSTREAM_DISCARDED = 1005; + /** The bandwidth estimate has been updated. */ + int EVENT_BANDWIDTH_ESTIMATE = 1006; + /** An audio renderer was enabled. */ + int EVENT_AUDIO_ENABLED = 1007; + /** An audio renderer created a decoder. */ + int EVENT_AUDIO_DECODER_INITIALIZED = 1008; + /** The format consumed by an audio renderer changed. */ + int EVENT_AUDIO_INPUT_FORMAT_CHANGED = 1009; + /** The audio position has increased for the first time since the last pause or position reset. */ + int EVENT_AUDIO_POSITION_ADVANCING = 1010; + /** An audio underrun occurred. */ + int EVENT_AUDIO_UNDERRUN = 1011; + /** An audio renderer released a decoder. */ + int EVENT_AUDIO_DECODER_RELEASED = 1012; + /** An audio renderer was disabled. */ + int EVENT_AUDIO_DISABLED = 1013; + /** The audio sink encountered a non-fatal error. */ + int EVENT_AUDIO_SINK_ERROR = 1014; + /** A video renderer was enabled. */ + int EVENT_VIDEO_ENABLED = 1015; + /** A video renderer created a decoder. */ + int EVENT_VIDEO_DECODER_INITIALIZED = 1016; + /** The format consumed by a video renderer changed. */ + int EVENT_VIDEO_INPUT_FORMAT_CHANGED = 1017; + /** Video frames have been dropped. */ + int EVENT_DROPPED_VIDEO_FRAMES = 1018; + /** A video renderer released a decoder. */ + int EVENT_VIDEO_DECODER_RELEASED = 1019; + /** A video renderer was disabled. */ + int EVENT_VIDEO_DISABLED = 1020; + /** Video frame processing offset data has been reported. */ + int EVENT_VIDEO_FRAME_PROCESSING_OFFSET = 1021; + /** A DRM session has been acquired. */ + int EVENT_DRM_SESSION_ACQUIRED = 1022; + /** DRM keys were loaded. */ + int EVENT_DRM_KEYS_LOADED = 1023; + /** A non-fatal DRM session manager error occurred. */ + int EVENT_DRM_SESSION_MANAGER_ERROR = 1024; + /** DRM keys were restored. */ + int EVENT_DRM_KEYS_RESTORED = 1025; + /** DRM keys were removed. */ + int EVENT_DRM_KEYS_REMOVED = 1026; + /** A DRM session has been released. */ + int EVENT_DRM_SESSION_RELEASED = 1027; + /** The player was released. */ + int EVENT_PLAYER_RELEASED = 1028; + /** The audio codec encountered an error. */ + int EVENT_AUDIO_CODEC_ERROR = 1029; + /** The video codec encountered an error. */ + int EVENT_VIDEO_CODEC_ERROR = 1030; + /** Time information of an event. */ final class EventTime { @@ -56,7 +374,7 @@ final class EventTime { */ public final long realtimeMs; - /** Timeline at the time of the event. */ + /** Most recent {@link Timeline} that contains the event position. */ public final Timeline timeline; /** @@ -66,8 +384,8 @@ final class EventTime { public final int windowIndex; /** - * Media period identifier for the media period this event belongs to, or {@code null} if the - * event is not associated with a specific media period. + * {@link MediaPeriodId Media period identifier} for the media period this event belongs to, or + * {@code null} if the event is not associated with a specific media period. */ @Nullable public final MediaPeriodId mediaPeriodId; @@ -77,8 +395,27 @@ final class EventTime { public final long eventPlaybackPositionMs; /** - * Position in the current timeline window ({@link Player#getCurrentWindowIndex()}) or the - * currently playing ad at the time of the event, in milliseconds. + * The current {@link Timeline} at the time of the event (equivalent to {@link + * Player#getCurrentTimeline()}). + */ + public final Timeline currentTimeline; + + /** + * The current window index in {@link #currentTimeline} at the time of the event, or the + * prospective window index if the timeline is not yet known and empty (equivalent to {@link + * Player#getCurrentMediaItemIndex()}). + */ + public final int currentWindowIndex; + + /** + * {@link MediaPeriodId Media period identifier} for the currently playing media period at the + * time of the event, or {@code null} if no current media period identifier is available. + */ + @Nullable public final MediaPeriodId currentMediaPeriodId; + + /** + * Position in the {@link #currentWindowIndex current timeline window} or the currently playing + * ad at the time of the event, in milliseconds. */ public final long currentPlaybackPositionMs; @@ -91,19 +428,27 @@ final class EventTime { /** * @param realtimeMs Elapsed real-time as returned by {@code SystemClock.elapsedRealtime()} at * the time of the event, in milliseconds. - * @param timeline Timeline at the time of the event. - * @param windowIndex Window index in the {@link #timeline} this event belongs to, or the + * @param timeline Most recent {@link Timeline} that contains the event position. + * @param windowIndex Window index in the {@code timeline} this event belongs to, or the * prospective window index if the timeline is not yet known and empty. - * @param mediaPeriodId Media period identifier for the media period this event belongs to, or - * {@code null} if the event is not associated with a specific media period. + * @param mediaPeriodId {@link MediaPeriodId Media period identifier} for the media period this + * event belongs to, or {@code null} if the event is not associated with a specific media + * period. * @param eventPlaybackPositionMs Position in the window or ad this event belongs to at the time * of the event, in milliseconds. - * @param currentPlaybackPositionMs Position in the current timeline window ({@link - * Player#getCurrentWindowIndex()}) or the currently playing ad at the time of the event, in - * milliseconds. - * @param totalBufferedDurationMs Total buffered duration from {@link - * #currentPlaybackPositionMs} at the time of the event, in milliseconds. This includes - * pre-buffered data for subsequent ads and windows. + * @param currentTimeline The current {@link Timeline} at the time of the event (equivalent to + * {@link Player#getCurrentTimeline()}). + * @param currentWindowIndex The current window index in {@code currentTimeline} at the time of + * the event, or the prospective window index if the timeline is not yet known and empty + * (equivalent to {@link Player#getCurrentMediaItemIndex()}). + * @param currentMediaPeriodId {@link MediaPeriodId Media period identifier} for the currently + * playing media period at the time of the event, or {@code null} if no current media period + * identifier is available. + * @param currentPlaybackPositionMs Position in the current timeline window or the currently + * playing ad at the time of the event, in milliseconds. + * @param totalBufferedDurationMs Total buffered duration from {@code currentPlaybackPositionMs} + * at the time of the event, in milliseconds. This includes pre-buffered data for subsequent + * ads and windows. */ public EventTime( long realtimeMs, @@ -111,6 +456,9 @@ public EventTime( int windowIndex, @Nullable MediaPeriodId mediaPeriodId, long eventPlaybackPositionMs, + Timeline currentTimeline, + int currentWindowIndex, + @Nullable MediaPeriodId currentMediaPeriodId, long currentPlaybackPositionMs, long totalBufferedDurationMs) { this.realtimeMs = realtimeMs; @@ -118,21 +466,76 @@ public EventTime( this.windowIndex = windowIndex; this.mediaPeriodId = mediaPeriodId; this.eventPlaybackPositionMs = eventPlaybackPositionMs; + this.currentTimeline = currentTimeline; + this.currentWindowIndex = currentWindowIndex; + this.currentMediaPeriodId = currentMediaPeriodId; this.currentPlaybackPositionMs = currentPlaybackPositionMs; this.totalBufferedDurationMs = totalBufferedDurationMs; } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + EventTime eventTime = (EventTime) o; + return realtimeMs == eventTime.realtimeMs + && windowIndex == eventTime.windowIndex + && eventPlaybackPositionMs == eventTime.eventPlaybackPositionMs + && currentWindowIndex == eventTime.currentWindowIndex + && currentPlaybackPositionMs == eventTime.currentPlaybackPositionMs + && totalBufferedDurationMs == eventTime.totalBufferedDurationMs + && Objects.equal(timeline, eventTime.timeline) + && Objects.equal(mediaPeriodId, eventTime.mediaPeriodId) + && Objects.equal(currentTimeline, eventTime.currentTimeline) + && Objects.equal(currentMediaPeriodId, eventTime.currentMediaPeriodId); + } + + @Override + public int hashCode() { + return Objects.hashCode( + realtimeMs, + timeline, + windowIndex, + mediaPeriodId, + eventPlaybackPositionMs, + currentTimeline, + currentWindowIndex, + currentMediaPeriodId, + currentPlaybackPositionMs, + totalBufferedDurationMs); + } } /** - * Called when the player state changed. - * - * @param eventTime The event time. - * @param playWhenReady Whether the playback will proceed when ready. - * @param playbackState The new {@link Player.State playback state}. + * @deprecated Use {@link #onPlaybackStateChanged(EventTime, int)} and {@link + * #onPlayWhenReadyChanged(EventTime, boolean, int)} instead. */ + @Deprecated default void onPlayerStateChanged( EventTime eventTime, boolean playWhenReady, @Player.State int playbackState) {} + /** + * Called when the playback state changed. + * + * @param eventTime The event time. + * @param state The new {@link Player.State playback state}. + */ + default void onPlaybackStateChanged(EventTime eventTime, @Player.State int state) {} + + /** + * Called when the value changed that indicates whether playback will proceed when ready. + * + * @param eventTime The event time. + * @param playWhenReady Whether playback will proceed when ready. + * @param reason The {@link Player.PlayWhenReadyChangeReason reason} of the change. + */ + default void onPlayWhenReadyChanged( + EventTime eventTime, boolean playWhenReady, @Player.PlayWhenReadyChangeReason int reason) {} + /** * Called when playback suppression reason changed. * @@ -159,25 +562,51 @@ default void onIsPlayingChanged(EventTime eventTime, boolean isPlaying) {} default void onTimelineChanged(EventTime eventTime, @TimelineChangeReason int reason) {} /** - * Called when a position discontinuity occurred. + * Called when playback transitions to a different media item. * * @param eventTime The event time. - * @param reason The reason for the position discontinuity. + * @param mediaItem The media item. + * @param reason The reason for the media item transition. + */ + default void onMediaItemTransition( + EventTime eventTime, + @Nullable MediaItem mediaItem, + @Player.MediaItemTransitionReason int reason) {} + + /** + * @deprecated Use {@link #onPositionDiscontinuity(EventTime, Player.PositionInfo, + * Player.PositionInfo, int)} instead. */ + @Deprecated default void onPositionDiscontinuity(EventTime eventTime, @DiscontinuityReason int reason) {} /** - * Called when a seek operation started. + * Called when a position discontinuity occurred. * * @param eventTime The event time. + * @param oldPosition The position before the discontinuity. + * @param newPosition The position after the discontinuity. + * @param reason The reason for the position discontinuity. + */ + default void onPositionDiscontinuity( + EventTime eventTime, + Player.PositionInfo oldPosition, + Player.PositionInfo newPosition, + @DiscontinuityReason int reason) {} + + /** + * @deprecated Use {@link #onPositionDiscontinuity(EventTime, Player.PositionInfo, + * Player.PositionInfo, int)} instead, listening to changes with {@link + * Player#DISCONTINUITY_REASON_SEEK}. */ + @Deprecated default void onSeekStarted(EventTime eventTime) {} /** - * Called when a seek operation was processed. - * - * @param eventTime The event time. + * @deprecated Seeks are processed without delay. Use {@link #onPositionDiscontinuity(EventTime, + * int)} with reason {@link Player#DISCONTINUITY_REASON_SEEK} instead. */ + @Deprecated default void onSeekProcessed(EventTime eventTime) {} /** @@ -189,6 +618,32 @@ default void onSeekProcessed(EventTime eventTime) {} default void onPlaybackParametersChanged( EventTime eventTime, PlaybackParameters playbackParameters) {} + /** + * Called when the seek back increment changed. + * + * @param eventTime The event time. + * @param seekBackIncrementMs The seek back increment, in milliseconds. + */ + default void onSeekBackIncrementChanged(EventTime eventTime, long seekBackIncrementMs) {} + + /** + * Called when the seek forward increment changed. + * + * @param eventTime The event time. + * @param seekForwardIncrementMs The seek forward increment, in milliseconds. + */ + default void onSeekForwardIncrementChanged(EventTime eventTime, long seekForwardIncrementMs) {} + + /** + * Called when the maximum position for which {@link Player#seekToPrevious()} seeks to the + * previous window changes. + * + * @param eventTime The event time. + * @param maxSeekToPreviousPositionMs The maximum seek to previous position, in milliseconds. + */ + default void onMaxSeekToPreviousPositionChanged( + EventTime eventTime, long maxSeekToPreviousPositionMs) {} + /** * Called when the repeat mode changed. * @@ -211,25 +666,80 @@ default void onShuffleModeChanged(EventTime eventTime, boolean shuffleModeEnable * @param eventTime The event time. * @param isLoading Whether the player is loading. */ + default void onIsLoadingChanged(EventTime eventTime, boolean isLoading) {} + + /** + * @deprecated Use {@link #onIsLoadingChanged(EventTime, boolean)} instead. + */ + @Deprecated default void onLoadingChanged(EventTime eventTime, boolean isLoading) {} + /** + * Called when the player's available commands changed. + * + * @param eventTime The event time. + * @param availableCommands The available commands. + */ + default void onAvailableCommandsChanged(EventTime eventTime, Player.Commands availableCommands) {} + /** * Called when a fatal player error occurred. * + *

      Implementations of {@link Player} may pass an instance of a subclass of {@link + * PlaybackException} to this method in order to include more information about the error. + * * @param eventTime The event time. * @param error The error. */ - default void onPlayerError(EventTime eventTime, ExoPlaybackException error) {} + default void onPlayerError(EventTime eventTime, PlaybackException error) {} + + /** + * Called when the {@link PlaybackException} returned by {@link Player#getPlayerError()} changes. + * + *

      Implementations of Player may pass an instance of a subclass of {@link PlaybackException} to + * this method in order to include more information about the error. + * + * @param eventTime The event time. + * @param error The new error, or null if the error is being cleared. + */ + default void onPlayerErrorChanged(EventTime eventTime, @Nullable PlaybackException error) {} + + /** + * Called when the tracks change. + * + * @param eventTime The event time. + * @param tracks The tracks. Never null, but may be of length zero. + */ + default void onTracksChanged(EventTime eventTime, Tracks tracks) {} /** - * Called when the available or selected tracks for the renderers changed. + * Called when track selection parameters change. * * @param eventTime The event time. - * @param trackGroups The available tracks. May be empty. - * @param trackSelections The track selections for each renderer. May contain null elements. + * @param trackSelectionParameters The new {@link TrackSelectionParameters}. */ - default void onTracksChanged( - EventTime eventTime, TrackGroupArray trackGroups, TrackSelectionArray trackSelections) {} + default void onTrackSelectionParametersChanged( + EventTime eventTime, TrackSelectionParameters trackSelectionParameters) {} + + /** + * Called when the combined {@link MediaMetadata} changes. + * + *

      The provided {@link MediaMetadata} is a combination of the {@link MediaItem#mediaMetadata} + * and the static and dynamic metadata from the {@link TrackSelection#getFormat(int) track + * selections' formats} and {@link MetadataOutput#onMetadata(Metadata)}. + * + * @param eventTime The event time. + * @param mediaMetadata The combined {@link MediaMetadata}. + */ + default void onMediaMetadataChanged(EventTime eventTime, MediaMetadata mediaMetadata) {} + + /** + * Called when the playlist {@link MediaMetadata} changes. + * + * @param eventTime The event time. + * @param playlistMetadata The playlist {@link MediaMetadata}. + */ + default void onPlaylistMetadataChanged(EventTime eventTime, MediaMetadata playlistMetadata) {} /** * Called when a media source started loading data. @@ -262,8 +772,14 @@ default void onLoadCanceled( EventTime eventTime, LoadEventInfo loadEventInfo, MediaLoadData mediaLoadData) {} /** - * Called when a media source loading error occurred. These errors are just for informational - * purposes and the player may recover. + * Called when a media source loading error occurred. + * + *

      This method being called does not indicate that playback has failed, or that it will fail. + * The player may be able to recover from the error. Hence applications should not + * implement this method to display a user visible error or initiate an application level retry. + * {@link Player.Listener#onPlayerError} is the appropriate place to implement such behavior. This + * method is called to provide the application with an opportunity to log the error if it wishes + * to do so. * * @param eventTime The event time. * @param loadEventInfo The {@link LoadEventInfo} defining the load event. @@ -296,107 +812,173 @@ default void onDownstreamFormatChanged(EventTime eventTime, MediaLoadData mediaL default void onUpstreamDiscarded(EventTime eventTime, MediaLoadData mediaLoadData) {} /** - * Called when a media source created a media period. + * Called when the bandwidth estimate for the current data source has been updated. * * @param eventTime The event time. + * @param totalLoadTimeMs The total time spend loading this update is based on, in milliseconds. + * @param totalBytesLoaded The total bytes loaded this update is based on. + * @param bitrateEstimate The bandwidth estimate, in bits per second. */ - default void onMediaPeriodCreated(EventTime eventTime) {} + default void onBandwidthEstimate( + EventTime eventTime, int totalLoadTimeMs, long totalBytesLoaded, long bitrateEstimate) {} /** - * Called when a media source released a media period. + * Called when there is {@link Metadata} associated with the current playback time. * * @param eventTime The event time. + * @param metadata The metadata. */ - default void onMediaPeriodReleased(EventTime eventTime) {} + default void onMetadata(EventTime eventTime, Metadata metadata) {} /** - * Called when the player started reading a media period. + * Called when there is a change in the {@link Cue Cues}. + * + *

      Both {@link #onCues(EventTime, List)} and {@link #onCues(EventTime, CueGroup)} are called + * when there is a change in the cues. You should only implement one or the other. * * @param eventTime The event time. + * @param cues The {@link Cue Cues}. + * @deprecated Use {@link #onCues(EventTime, CueGroup)} instead. */ - default void onReadingStarted(EventTime eventTime) {} + @Deprecated + default void onCues(EventTime eventTime, List cues) {} /** - * Called when the bandwidth estimate for the current data source has been updated. + * Called when there is a change in the {@link CueGroup}. + * + *

      Both {@link #onCues(EventTime, List)} and {@link #onCues(EventTime, CueGroup)} are called + * when there is a change in the cues. You should only implement one or the other. * * @param eventTime The event time. - * @param totalLoadTimeMs The total time spend loading this update is based on, in milliseconds. - * @param totalBytesLoaded The total bytes loaded this update is based on. - * @param bitrateEstimate The bandwidth estimate, in bits per second. + * @param cueGroup The {@link CueGroup}. */ - default void onBandwidthEstimate( - EventTime eventTime, int totalLoadTimeMs, long totalBytesLoaded, long bitrateEstimate) {} + default void onCues(EventTime eventTime, CueGroup cueGroup) {} /** - * Called when the output surface size changed. + * @deprecated Use {@link #onAudioEnabled} and {@link #onVideoEnabled} instead. + */ + @Deprecated + default void onDecoderEnabled( + EventTime eventTime, int trackType, DecoderCounters decoderCounters) {} + + /** + * @deprecated Use {@link #onAudioDecoderInitialized} and {@link #onVideoDecoderInitialized} + * instead. + */ + @Deprecated + default void onDecoderInitialized( + EventTime eventTime, int trackType, String decoderName, long initializationDurationMs) {} + + /** + * @deprecated Use {@link #onAudioInputFormatChanged(EventTime, Format, DecoderReuseEvaluation)} + * and {@link #onVideoInputFormatChanged(EventTime, Format, DecoderReuseEvaluation)}. instead. + */ + @Deprecated + default void onDecoderInputFormatChanged(EventTime eventTime, int trackType, Format format) {} + + /** + * @deprecated Use {@link #onAudioDisabled} and {@link #onVideoDisabled} instead. + */ + @Deprecated + default void onDecoderDisabled( + EventTime eventTime, int trackType, DecoderCounters decoderCounters) {} + + /** + * Called when an audio renderer is enabled. * * @param eventTime The event time. - * @param width The surface width in pixels. May be {@link C#LENGTH_UNSET} if unknown, or 0 if the - * video is not rendered onto a surface. - * @param height The surface height in pixels. May be {@link C#LENGTH_UNSET} if unknown, or 0 if - * the video is not rendered onto a surface. + * @param decoderCounters {@link DecoderCounters} that will be updated by the renderer for as long + * as it remains enabled. */ - default void onSurfaceSizeChanged(EventTime eventTime, int width, int height) {} + default void onAudioEnabled(EventTime eventTime, DecoderCounters decoderCounters) {} /** - * Called when there is {@link Metadata} associated with the current playback time. + * Called when an audio renderer creates a decoder. * * @param eventTime The event time. - * @param metadata The metadata. + * @param decoderName The decoder that was created. + * @param initializedTimestampMs {@link SystemClock#elapsedRealtime()} when initialization + * finished. + * @param initializationDurationMs The time taken to initialize the decoder in milliseconds. */ - default void onMetadata(EventTime eventTime, Metadata metadata) {} + default void onAudioDecoderInitialized( + EventTime eventTime, + String decoderName, + long initializedTimestampMs, + long initializationDurationMs) {} + + /** + * @deprecated Use {@link #onAudioDecoderInitialized(EventTime, String, long, long)}. + */ + @Deprecated + default void onAudioDecoderInitialized( + EventTime eventTime, String decoderName, long initializationDurationMs) {} + + /** + * @deprecated Use {@link #onAudioInputFormatChanged(EventTime, Format, DecoderReuseEvaluation)}. + */ + @Deprecated + default void onAudioInputFormatChanged(EventTime eventTime, Format format) {} /** - * Called when an audio or video decoder has been enabled. + * Called when the format of the media being consumed by an audio renderer changes. * * @param eventTime The event time. - * @param trackType The track type of the enabled decoder. Either {@link C#TRACK_TYPE_AUDIO} or - * {@link C#TRACK_TYPE_VIDEO}. - * @param decoderCounters The accumulated event counters associated with this decoder. + * @param format The new format. + * @param decoderReuseEvaluation The result of the evaluation to determine whether an existing + * decoder instance can be reused for the new format, or {@code null} if the renderer did not + * have a decoder. */ - default void onDecoderEnabled( - EventTime eventTime, int trackType, DecoderCounters decoderCounters) {} + default void onAudioInputFormatChanged( + EventTime eventTime, + Format format, + @Nullable DecoderReuseEvaluation decoderReuseEvaluation) {} /** - * Called when an audio or video decoder has been initialized. + * Called when the audio position has increased for the first time since the last pause or + * position reset. * * @param eventTime The event time. - * @param trackType The track type of the initialized decoder. Either {@link C#TRACK_TYPE_AUDIO} - * or {@link C#TRACK_TYPE_VIDEO}. - * @param decoderName The decoder that was created. - * @param initializationDurationMs Time taken to initialize the decoder, in milliseconds. + * @param playoutStartSystemTimeMs The approximate derived {@link System#currentTimeMillis()} at + * which playout started. */ - default void onDecoderInitialized( - EventTime eventTime, int trackType, String decoderName, long initializationDurationMs) {} + default void onAudioPositionAdvancing(EventTime eventTime, long playoutStartSystemTimeMs) {} /** - * Called when an audio or video decoder input format changed. + * Called when an audio underrun occurs. * * @param eventTime The event time. - * @param trackType The track type of the decoder whose format changed. Either {@link - * C#TRACK_TYPE_AUDIO} or {@link C#TRACK_TYPE_VIDEO}. - * @param format The new input format for the decoder. + * @param bufferSize The size of the audio output buffer, in bytes. + * @param bufferSizeMs The size of the audio output buffer, in milliseconds, if it contains PCM + * encoded audio. {@link C#TIME_UNSET} if the output buffer contains non-PCM encoded audio. + * @param elapsedSinceLastFeedMs The time since audio was last written to the output buffer. */ - default void onDecoderInputFormatChanged(EventTime eventTime, int trackType, Format format) {} + default void onAudioUnderrun( + EventTime eventTime, int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) {} /** - * Called when an audio or video decoder has been disabled. + * Called when an audio renderer releases a decoder. * * @param eventTime The event time. - * @param trackType The track type of the disabled decoder. Either {@link C#TRACK_TYPE_AUDIO} or - * {@link C#TRACK_TYPE_VIDEO}. - * @param decoderCounters The accumulated event counters associated with this decoder. + * @param decoderName The decoder that was released. */ - default void onDecoderDisabled( - EventTime eventTime, int trackType, DecoderCounters decoderCounters) {} + default void onAudioDecoderReleased(EventTime eventTime, String decoderName) {} + + /** + * Called when an audio renderer is disabled. + * + * @param eventTime The event time. + * @param decoderCounters {@link DecoderCounters} that were updated by the renderer. + */ + default void onAudioDisabled(EventTime eventTime, DecoderCounters decoderCounters) {} /** - * Called when the audio session id is set. + * Called when the audio session ID changes. * * @param eventTime The event time. - * @param audioSessionId The audio session id. + * @param audioSessionId The audio session ID. */ - default void onAudioSessionId(EventTime eventTime, int audioSessionId) {} + default void onAudioSessionIdChanged(EventTime eventTime, int audioSessionId) {} /** * Called when the audio attributes change. @@ -406,6 +988,47 @@ default void onAudioSessionId(EventTime eventTime, int audioSessionId) {} */ default void onAudioAttributesChanged(EventTime eventTime, AudioAttributes audioAttributes) {} + /** + * Called when skipping silences is enabled or disabled in the audio stream. + * + * @param eventTime The event time. + * @param skipSilenceEnabled Whether skipping silences in the audio stream is enabled. + */ + default void onSkipSilenceEnabledChanged(EventTime eventTime, boolean skipSilenceEnabled) {} + + /** + * Called when {@link AudioSink} has encountered an error. + * + *

      This method being called does not indicate that playback has failed, or that it will fail. + * The player may be able to recover from the error. Hence applications should not + * implement this method to display a user visible error or initiate an application level retry. + * {@link Player.Listener#onPlayerError} is the appropriate place to implement such behavior. This + * method is called to provide the application with an opportunity to log the error if it wishes + * to do so. + * + * @param eventTime The event time. + * @param audioSinkError The error that occurred. Typically an {@link + * AudioSink.InitializationException}, a {@link AudioSink.WriteException}, or an {@link + * AudioSink.UnexpectedDiscontinuityException}. + */ + default void onAudioSinkError(EventTime eventTime, Exception audioSinkError) {} + + /** + * Called when an audio decoder encounters an error. + * + *

      This method being called does not indicate that playback has failed, or that it will fail. + * The player may be able to recover from the error. Hence applications should not + * implement this method to display a user visible error or initiate an application level retry. + * {@link Player.Listener#onPlayerError} is the appropriate place to implement such behavior. This + * method is called to provide the application with an opportunity to log the error if it wishes + * to do so. + * + * @param eventTime The event time. + * @param audioCodecError The error. Typically a {@link CodecException} if the renderer uses + * {@link MediaCodec}, or a {@link DecoderException} if the renderer uses a software decoder. + */ + default void onAudioCodecError(EventTime eventTime, Exception audioCodecError) {} + /** * Called when the volume changes. * @@ -415,17 +1038,72 @@ default void onAudioAttributesChanged(EventTime eventTime, AudioAttributes audio default void onVolumeChanged(EventTime eventTime, float volume) {} /** - * Called when an audio underrun occurred. + * Called when the device information changes * * @param eventTime The event time. - * @param bufferSize The size of the {@link AudioSink}'s buffer, in bytes. - * @param bufferSizeMs The size of the {@link AudioSink}'s buffer, in milliseconds, if it is - * configured for PCM output. {@link C#TIME_UNSET} if it is configured for passthrough output, - * as the buffered media can have a variable bitrate so the duration may be unknown. - * @param elapsedSinceLastFeedMs The time since the {@link AudioSink} was last fed data. + * @param deviceInfo The new {@link DeviceInfo}. */ - default void onAudioUnderrun( - EventTime eventTime, int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) {} + default void onDeviceInfoChanged(EventTime eventTime, DeviceInfo deviceInfo) {} + + /** + * Called when the device volume or mute state changes. + * + * @param eventTime The event time. + * @param volume The new device volume, with 0 being silence and 1 being unity gain. + * @param muted Whether the device is muted. + */ + default void onDeviceVolumeChanged(EventTime eventTime, int volume, boolean muted) {} + + /** + * Called when a video renderer is enabled. + * + * @param eventTime The event time. + * @param decoderCounters {@link DecoderCounters} that will be updated by the renderer for as long + * as it remains enabled. + */ + default void onVideoEnabled(EventTime eventTime, DecoderCounters decoderCounters) {} + + /** + * Called when a video renderer creates a decoder. + * + * @param eventTime The event time. + * @param decoderName The decoder that was created. + * @param initializedTimestampMs {@link SystemClock#elapsedRealtime()} when initialization + * finished. + * @param initializationDurationMs The time taken to initialize the decoder in milliseconds. + */ + default void onVideoDecoderInitialized( + EventTime eventTime, + String decoderName, + long initializedTimestampMs, + long initializationDurationMs) {} + + /** + * @deprecated Use {@link #onVideoDecoderInitialized(EventTime, String, long, long)}. + */ + @Deprecated + default void onVideoDecoderInitialized( + EventTime eventTime, String decoderName, long initializationDurationMs) {} + + /** + * @deprecated Use {@link #onVideoInputFormatChanged(EventTime, Format, DecoderReuseEvaluation)}. + */ + @Deprecated + default void onVideoInputFormatChanged(EventTime eventTime, Format format) {} + + /** + * Called when the format of the media being consumed by a video renderer changes. + * + * @param eventTime The event time. + * @param format The new format. + * @param decoderReuseEvaluation The result of the evaluation to determine whether an existing + * decoder instance can be reused for the new format, or {@code null} if the renderer did not + * have a decoder. + */ + default void onVideoInputFormatChanged( + EventTime eventTime, + Format format, + @Nullable DecoderReuseEvaluation decoderReuseEvaluation) {} /** * Called after video frames have been dropped. @@ -438,19 +1116,80 @@ default void onAudioUnderrun( */ default void onDroppedVideoFrames(EventTime eventTime, int droppedFrames, long elapsedMs) {} + /** + * Called when a video renderer releases a decoder. + * + * @param eventTime The event time. + * @param decoderName The decoder that was released. + */ + default void onVideoDecoderReleased(EventTime eventTime, String decoderName) {} + + /** + * Called when a video renderer is disabled. + * + * @param eventTime The event time. + * @param decoderCounters {@link DecoderCounters} that were updated by the renderer. + */ + default void onVideoDisabled(EventTime eventTime, DecoderCounters decoderCounters) {} + + /** + * Called when there is an update to the video frame processing offset reported by a video + * renderer. + * + *

      The processing offset for a video frame is the difference between the time at which the + * frame became available to render, and the time at which it was scheduled to be rendered. A + * positive value indicates the frame became available early enough, whereas a negative value + * indicates that the frame wasn't available until after the time at which it should have been + * rendered. + * + * @param eventTime The event time. + * @param totalProcessingOffsetUs The sum of the video frame processing offsets for frames + * rendered since the last call to this method. + * @param frameCount The number to samples included in {@code totalProcessingOffsetUs}. + */ + default void onVideoFrameProcessingOffset( + EventTime eventTime, long totalProcessingOffsetUs, int frameCount) {} + + /** + * Called when a video decoder encounters an error. + * + *

      This method being called does not indicate that playback has failed, or that it will fail. + * The player may be able to recover from the error. Hence applications should not + * implement this method to display a user visible error or initiate an application level retry. + * {@link Player.Listener#onPlayerError} is the appropriate place to implement such behavior. This + * method is called to provide the application with an opportunity to log the error if it wishes + * to do so. + * + * @param eventTime The event time. + * @param videoCodecError The error. Typically a {@link CodecException} if the renderer uses + * {@link MediaCodec}, or a {@link DecoderException} if the renderer uses a software decoder. + */ + default void onVideoCodecError(EventTime eventTime, Exception videoCodecError) {} + + /** + * Called when a frame is rendered for the first time since setting the surface, or since the + * renderer was reset, or since the stream being rendered was changed. + * + * @param eventTime The event time. + * @param output The output to which a frame has been rendered. Normally a {@link Surface}, + * however may also be other output types (e.g., a {@link VideoDecoderOutputBufferRenderer}). + * @param renderTimeMs {@link SystemClock#elapsedRealtime()} when the first frame was rendered. + */ + default void onRenderedFirstFrame(EventTime eventTime, Object output, long renderTimeMs) {} + /** * Called before a frame is rendered for the first time since setting the surface, and each time * there's a change in the size or pixel aspect ratio of the video being rendered. * * @param eventTime The event time. - * @param width The width of the video. - * @param height The height of the video. - * @param unappliedRotationDegrees For videos that require a rotation, this is the clockwise - * rotation in degrees that the application should apply for the video for it to be rendered - * in the correct orientation. This value will always be zero on API levels 21 and above, - * since the renderer will apply all necessary rotations internally. - * @param pixelWidthHeightRatio The width to height ratio of each pixel. + * @param videoSize The new size of the video. */ + default void onVideoSizeChanged(EventTime eventTime, VideoSize videoSize) {} + + /** + * @deprecated Implement {@link #onVideoSizeChanged(EventTime eventTime, VideoSize)} instead. + */ + @Deprecated default void onVideoSizeChanged( EventTime eventTime, int width, @@ -459,21 +1198,29 @@ default void onVideoSizeChanged( float pixelWidthHeightRatio) {} /** - * Called when a frame is rendered for the first time since setting the surface, and when a frame - * is rendered for the first time since the renderer was reset. + * Called when the output surface size changed. * * @param eventTime The event time. - * @param surface The {@link Surface} to which a first frame has been rendered, or {@code null} if - * the renderer renders to something that isn't a {@link Surface}. + * @param width The surface width in pixels. May be {@link C#LENGTH_UNSET} if unknown, or 0 if the + * video is not rendered onto a surface. + * @param height The surface height in pixels. May be {@link C#LENGTH_UNSET} if unknown, or 0 if + * the video is not rendered onto a surface. + */ + default void onSurfaceSizeChanged(EventTime eventTime, int width, int height) {} + + /** + * @deprecated Implement {@link #onDrmSessionAcquired(EventTime, int)} instead. */ - default void onRenderedFirstFrame(EventTime eventTime, @Nullable Surface surface) {} + @Deprecated + default void onDrmSessionAcquired(EventTime eventTime) {} /** * Called each time a drm session is acquired. * * @param eventTime The event time. + * @param state The {@link DrmSession.State} of the session when the acquisition completed. */ - default void onDrmSessionAcquired(EventTime eventTime) {} + default void onDrmSessionAcquired(EventTime eventTime, @DrmSession.State int state) {} /** * Called each time drm keys are loaded. @@ -483,8 +1230,14 @@ default void onDrmSessionAcquired(EventTime eventTime) {} default void onDrmKeysLoaded(EventTime eventTime) {} /** - * Called when a drm error occurs. These errors are just for informational purposes and the player - * may recover. + * Called when a drm error occurs. + * + *

      This method being called does not indicate that playback has failed, or that it will fail. + * The player may be able to recover from the error. Hence applications should not + * implement this method to display a user visible error or initiate an application level retry. + * {@link Player.Listener#onPlayerError} is the appropriate place to implement such behavior. This + * method is called to provide the application with an opportunity to log the error if it wishes + * to do so. * * @param eventTime The event time. * @param error The error. @@ -511,4 +1264,40 @@ default void onDrmKeysRemoved(EventTime eventTime) {} * @param eventTime The event time. */ default void onDrmSessionReleased(EventTime eventTime) {} + + /** + * Called when the {@link Player} is released. + * + * @param eventTime The event time. + */ + default void onPlayerReleased(EventTime eventTime) {} + + /** + * Called after one or more events occurred. + * + *

      State changes and events that happen within one {@link Looper} message queue iteration are + * reported together and only after all individual callbacks were triggered. + * + *

      Listeners should prefer this method over individual callbacks in the following cases: + * + *

        + *
      • They intend to trigger the same logic for multiple events (e.g. when updating a UI for + * both {@link #onPlaybackStateChanged(EventTime, int)} and {@link + * #onPlayWhenReadyChanged(EventTime, boolean, int)}). + *
      • They need access to the {@link Player} object to trigger further events (e.g. to call + * {@link Player#seekTo(long)} after a {@link + * AnalyticsListener#onMediaItemTransition(EventTime, MediaItem, int)}). + *
      • They intend to use multiple state values together or in combination with {@link Player} + * getter methods. For example using {@link Player#getCurrentMediaItemIndex()} with the + * {@code timeline} provided in {@link #onTimelineChanged(EventTime, int)} is only safe from + * within this method. + *
      • They are interested in events that logically happened together (e.g {@link + * #onPlaybackStateChanged(EventTime, int)} to {@link Player#STATE_BUFFERING} because of + * {@link #onMediaItemTransition(EventTime, MediaItem, int)}). + *
      + * + * @param player The {@link Player}. + * @param events The {@link Events} that occurred in this iteration. + */ + default void onEvents(Player player, Events events) {} } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/analytics/DefaultAnalyticsCollector.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/analytics/DefaultAnalyticsCollector.java new file mode 100644 index 0000000000..4d3eda9373 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/analytics/DefaultAnalyticsCollector.java @@ -0,0 +1,1223 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.analytics; + +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Assertions.checkState; +import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; + +import android.os.Looper; +import android.util.SparseArray; +import androidx.annotation.CallSuper; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.DeviceInfo; +import com.google.android.exoplayer2.ExoPlaybackException; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.MediaItem; +import com.google.android.exoplayer2.MediaMetadata; +import com.google.android.exoplayer2.PlaybackException; +import com.google.android.exoplayer2.PlaybackParameters; +import com.google.android.exoplayer2.Player; +import com.google.android.exoplayer2.Player.DiscontinuityReason; +import com.google.android.exoplayer2.Player.PlaybackSuppressionReason; +import com.google.android.exoplayer2.Timeline; +import com.google.android.exoplayer2.Timeline.Period; +import com.google.android.exoplayer2.Timeline.Window; +import com.google.android.exoplayer2.Tracks; +import com.google.android.exoplayer2.analytics.AnalyticsListener.EventTime; +import com.google.android.exoplayer2.audio.AudioAttributes; +import com.google.android.exoplayer2.decoder.DecoderCounters; +import com.google.android.exoplayer2.decoder.DecoderReuseEvaluation; +import com.google.android.exoplayer2.drm.DrmSession; +import com.google.android.exoplayer2.metadata.Metadata; +import com.google.android.exoplayer2.source.LoadEventInfo; +import com.google.android.exoplayer2.source.MediaLoadData; +import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId; +import com.google.android.exoplayer2.text.Cue; +import com.google.android.exoplayer2.text.CueGroup; +import com.google.android.exoplayer2.trackselection.TrackSelectionParameters; +import com.google.android.exoplayer2.util.Clock; +import com.google.android.exoplayer2.util.HandlerWrapper; +import com.google.android.exoplayer2.util.ListenerSet; +import com.google.android.exoplayer2.util.Util; +import com.google.android.exoplayer2.video.VideoSize; +import com.google.common.base.Objects; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Iterables; +import java.io.IOException; +import java.util.List; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; +import org.checkerframework.checker.nullness.qual.RequiresNonNull; + +/** + * Data collector that forwards analytics events to {@link AnalyticsListener AnalyticsListeners}. + */ +public class DefaultAnalyticsCollector implements AnalyticsCollector { + + private final Clock clock; + private final Period period; + private final Window window; + private final MediaPeriodQueueTracker mediaPeriodQueueTracker; + private final SparseArray eventTimes; + + private ListenerSet listeners; + private @MonotonicNonNull Player player; + private @MonotonicNonNull HandlerWrapper handler; + private boolean isSeeking; + + /** + * Creates an analytics collector. + * + * @param clock A {@link Clock} used to generate timestamps. + */ + public DefaultAnalyticsCollector(Clock clock) { + this.clock = checkNotNull(clock); + listeners = new ListenerSet<>(Util.getCurrentOrMainLooper(), clock, (listener, flags) -> {}); + period = new Period(); + window = new Window(); + mediaPeriodQueueTracker = new MediaPeriodQueueTracker(period); + eventTimes = new SparseArray<>(); + } + + /** + * Sets whether methods throw when using the wrong thread. + * + *

      Do not use this method unless to support legacy use cases. + * + * @param throwsWhenUsingWrongThread Whether to throw when using the wrong thread. + * @deprecated Do not use this method and ensure all calls are made from the correct thread. + */ + @SuppressWarnings("deprecation") // Calling deprecated method. + @Deprecated + public void setThrowsWhenUsingWrongThread(boolean throwsWhenUsingWrongThread) { + listeners.setThrowsWhenUsingWrongThread(throwsWhenUsingWrongThread); + } + + @Override + @CallSuper + public void addListener(AnalyticsListener listener) { + checkNotNull(listener); + listeners.add(listener); + } + + @Override + @CallSuper + public void removeListener(AnalyticsListener listener) { + listeners.remove(listener); + } + + @Override + @CallSuper + public void setPlayer(Player player, Looper looper) { + checkState(this.player == null || mediaPeriodQueueTracker.mediaPeriodQueue.isEmpty()); + this.player = checkNotNull(player); + handler = clock.createHandler(looper, null); + listeners = + listeners.copy( + looper, + (listener, flags) -> + listener.onEvents(player, new AnalyticsListener.Events(flags, eventTimes))); + } + + @Override + @CallSuper + public void release() { + // Release lazily so that all events that got triggered as part of player.release() + // are still delivered to all listeners and onPlayerReleased() is delivered last. + checkStateNotNull(handler).post(this::releaseInternal); + } + + @Override + public final void updateMediaPeriodQueueInfo( + List queue, @Nullable MediaPeriodId readingPeriod) { + mediaPeriodQueueTracker.onQueueUpdated(queue, readingPeriod, checkNotNull(player)); + } + + // External events. + + @Override + @SuppressWarnings("deprecation") // Calling deprecated listener method. + public final void notifySeekStarted() { + if (!isSeeking) { + EventTime eventTime = generateCurrentPlayerMediaPeriodEventTime(); + isSeeking = true; + sendEvent( + eventTime, /* eventFlag= */ C.INDEX_UNSET, listener -> listener.onSeekStarted(eventTime)); + } + } + + // Audio events. + + @SuppressWarnings("deprecation") // Calling deprecated listener method. + @Override + public final void onAudioEnabled(DecoderCounters counters) { + EventTime eventTime = generateReadingMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_AUDIO_ENABLED, + listener -> { + listener.onAudioEnabled(eventTime, counters); + listener.onDecoderEnabled(eventTime, C.TRACK_TYPE_AUDIO, counters); + }); + } + + @SuppressWarnings("deprecation") // Calling deprecated listener method. + @Override + public final void onAudioDecoderInitialized( + String decoderName, long initializedTimestampMs, long initializationDurationMs) { + EventTime eventTime = generateReadingMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_AUDIO_DECODER_INITIALIZED, + listener -> { + listener.onAudioDecoderInitialized(eventTime, decoderName, initializationDurationMs); + listener.onAudioDecoderInitialized( + eventTime, decoderName, initializedTimestampMs, initializationDurationMs); + listener.onDecoderInitialized( + eventTime, C.TRACK_TYPE_AUDIO, decoderName, initializationDurationMs); + }); + } + + @SuppressWarnings("deprecation") // Calling deprecated listener method. + @Override + public final void onAudioInputFormatChanged( + Format format, @Nullable DecoderReuseEvaluation decoderReuseEvaluation) { + EventTime eventTime = generateReadingMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_AUDIO_INPUT_FORMAT_CHANGED, + listener -> { + listener.onAudioInputFormatChanged(eventTime, format); + listener.onAudioInputFormatChanged(eventTime, format, decoderReuseEvaluation); + listener.onDecoderInputFormatChanged(eventTime, C.TRACK_TYPE_AUDIO, format); + }); + } + + @Override + public final void onAudioPositionAdvancing(long playoutStartSystemTimeMs) { + EventTime eventTime = generateReadingMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_AUDIO_POSITION_ADVANCING, + listener -> listener.onAudioPositionAdvancing(eventTime, playoutStartSystemTimeMs)); + } + + @Override + public final void onAudioUnderrun( + int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) { + EventTime eventTime = generateReadingMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_AUDIO_UNDERRUN, + listener -> + listener.onAudioUnderrun(eventTime, bufferSize, bufferSizeMs, elapsedSinceLastFeedMs)); + } + + @Override + public final void onAudioDecoderReleased(String decoderName) { + EventTime eventTime = generateReadingMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_AUDIO_DECODER_RELEASED, + listener -> listener.onAudioDecoderReleased(eventTime, decoderName)); + } + + @Override + @SuppressWarnings("deprecation") // Calling deprecated listener method. + public final void onAudioDisabled(DecoderCounters counters) { + EventTime eventTime = generatePlayingMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_AUDIO_DISABLED, + listener -> { + listener.onAudioDisabled(eventTime, counters); + listener.onDecoderDisabled(eventTime, C.TRACK_TYPE_AUDIO, counters); + }); + } + + @Override + public final void onAudioSinkError(Exception audioSinkError) { + EventTime eventTime = generateReadingMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_AUDIO_SINK_ERROR, + listener -> listener.onAudioSinkError(eventTime, audioSinkError)); + } + + @Override + public final void onAudioCodecError(Exception audioCodecError) { + EventTime eventTime = generateReadingMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_AUDIO_CODEC_ERROR, + listener -> listener.onAudioCodecError(eventTime, audioCodecError)); + } + + @Override + public final void onVolumeChanged(float volume) { + EventTime eventTime = generateReadingMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_VOLUME_CHANGED, + listener -> listener.onVolumeChanged(eventTime, volume)); + } + + // Video events. + + @Override + @SuppressWarnings("deprecation") // Calling deprecated listener method. + public final void onVideoEnabled(DecoderCounters counters) { + EventTime eventTime = generateReadingMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_VIDEO_ENABLED, + listener -> { + listener.onVideoEnabled(eventTime, counters); + listener.onDecoderEnabled(eventTime, C.TRACK_TYPE_VIDEO, counters); + }); + } + + @Override + @SuppressWarnings("deprecation") // Calling deprecated listener method. + public final void onVideoDecoderInitialized( + String decoderName, long initializedTimestampMs, long initializationDurationMs) { + EventTime eventTime = generateReadingMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_VIDEO_DECODER_INITIALIZED, + listener -> { + listener.onVideoDecoderInitialized(eventTime, decoderName, initializationDurationMs); + listener.onVideoDecoderInitialized( + eventTime, decoderName, initializedTimestampMs, initializationDurationMs); + listener.onDecoderInitialized( + eventTime, C.TRACK_TYPE_VIDEO, decoderName, initializationDurationMs); + }); + } + + @Override + @SuppressWarnings("deprecation") // Calling deprecated listener method. + public final void onVideoInputFormatChanged( + Format format, @Nullable DecoderReuseEvaluation decoderReuseEvaluation) { + EventTime eventTime = generateReadingMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_VIDEO_INPUT_FORMAT_CHANGED, + listener -> { + listener.onVideoInputFormatChanged(eventTime, format); + listener.onVideoInputFormatChanged(eventTime, format, decoderReuseEvaluation); + listener.onDecoderInputFormatChanged(eventTime, C.TRACK_TYPE_VIDEO, format); + }); + } + + @Override + public final void onDroppedFrames(int count, long elapsedMs) { + EventTime eventTime = generatePlayingMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_DROPPED_VIDEO_FRAMES, + listener -> listener.onDroppedVideoFrames(eventTime, count, elapsedMs)); + } + + @Override + public final void onVideoDecoderReleased(String decoderName) { + EventTime eventTime = generateReadingMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_VIDEO_DECODER_RELEASED, + listener -> listener.onVideoDecoderReleased(eventTime, decoderName)); + } + + @Override + @SuppressWarnings("deprecation") // Calling deprecated listener method. + public final void onVideoDisabled(DecoderCounters counters) { + EventTime eventTime = generatePlayingMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_VIDEO_DISABLED, + listener -> { + listener.onVideoDisabled(eventTime, counters); + listener.onDecoderDisabled(eventTime, C.TRACK_TYPE_VIDEO, counters); + }); + } + + @Override + public final void onRenderedFirstFrame(Object output, long renderTimeMs) { + EventTime eventTime = generateReadingMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_RENDERED_FIRST_FRAME, + listener -> listener.onRenderedFirstFrame(eventTime, output, renderTimeMs)); + } + + @Override + public final void onVideoFrameProcessingOffset(long totalProcessingOffsetUs, int frameCount) { + EventTime eventTime = generatePlayingMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_VIDEO_FRAME_PROCESSING_OFFSET, + listener -> + listener.onVideoFrameProcessingOffset(eventTime, totalProcessingOffsetUs, frameCount)); + } + + @Override + public final void onVideoCodecError(Exception videoCodecError) { + EventTime eventTime = generateReadingMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_VIDEO_CODEC_ERROR, + listener -> listener.onVideoCodecError(eventTime, videoCodecError)); + } + + @Override + public final void onSurfaceSizeChanged(int width, int height) { + EventTime eventTime = generateReadingMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_SURFACE_SIZE_CHANGED, + listener -> listener.onSurfaceSizeChanged(eventTime, width, height)); + } + + // MediaSourceEventListener implementation. + + @Override + public final void onLoadStarted( + int windowIndex, + @Nullable MediaPeriodId mediaPeriodId, + LoadEventInfo loadEventInfo, + MediaLoadData mediaLoadData) { + EventTime eventTime = generateMediaPeriodEventTime(windowIndex, mediaPeriodId); + sendEvent( + eventTime, + AnalyticsListener.EVENT_LOAD_STARTED, + listener -> listener.onLoadStarted(eventTime, loadEventInfo, mediaLoadData)); + } + + @Override + public final void onLoadCompleted( + int windowIndex, + @Nullable MediaPeriodId mediaPeriodId, + LoadEventInfo loadEventInfo, + MediaLoadData mediaLoadData) { + EventTime eventTime = generateMediaPeriodEventTime(windowIndex, mediaPeriodId); + sendEvent( + eventTime, + AnalyticsListener.EVENT_LOAD_COMPLETED, + listener -> listener.onLoadCompleted(eventTime, loadEventInfo, mediaLoadData)); + } + + @Override + public final void onLoadCanceled( + int windowIndex, + @Nullable MediaPeriodId mediaPeriodId, + LoadEventInfo loadEventInfo, + MediaLoadData mediaLoadData) { + EventTime eventTime = generateMediaPeriodEventTime(windowIndex, mediaPeriodId); + sendEvent( + eventTime, + AnalyticsListener.EVENT_LOAD_CANCELED, + listener -> listener.onLoadCanceled(eventTime, loadEventInfo, mediaLoadData)); + } + + @Override + public final void onLoadError( + int windowIndex, + @Nullable MediaPeriodId mediaPeriodId, + LoadEventInfo loadEventInfo, + MediaLoadData mediaLoadData, + IOException error, + boolean wasCanceled) { + EventTime eventTime = generateMediaPeriodEventTime(windowIndex, mediaPeriodId); + sendEvent( + eventTime, + AnalyticsListener.EVENT_LOAD_ERROR, + listener -> + listener.onLoadError(eventTime, loadEventInfo, mediaLoadData, error, wasCanceled)); + } + + @Override + public final void onUpstreamDiscarded( + int windowIndex, @Nullable MediaPeriodId mediaPeriodId, MediaLoadData mediaLoadData) { + EventTime eventTime = generateMediaPeriodEventTime(windowIndex, mediaPeriodId); + sendEvent( + eventTime, + AnalyticsListener.EVENT_UPSTREAM_DISCARDED, + listener -> listener.onUpstreamDiscarded(eventTime, mediaLoadData)); + } + + @Override + public final void onDownstreamFormatChanged( + int windowIndex, @Nullable MediaPeriodId mediaPeriodId, MediaLoadData mediaLoadData) { + EventTime eventTime = generateMediaPeriodEventTime(windowIndex, mediaPeriodId); + sendEvent( + eventTime, + AnalyticsListener.EVENT_DOWNSTREAM_FORMAT_CHANGED, + listener -> listener.onDownstreamFormatChanged(eventTime, mediaLoadData)); + } + + // Player.Listener implementation. + + // TODO: Use Player.Listener.onEvents to know when a set of simultaneous callbacks finished. + // This helps to assign exactly the same EventTime to all of them instead of having slightly + // different real times. + + @Override + public final void onTimelineChanged(Timeline timeline, @Player.TimelineChangeReason int reason) { + mediaPeriodQueueTracker.onTimelineChanged(checkNotNull(player)); + EventTime eventTime = generateCurrentPlayerMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_TIMELINE_CHANGED, + listener -> listener.onTimelineChanged(eventTime, reason)); + } + + @Override + public final void onMediaItemTransition( + @Nullable MediaItem mediaItem, @Player.MediaItemTransitionReason int reason) { + EventTime eventTime = generateCurrentPlayerMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_MEDIA_ITEM_TRANSITION, + listener -> listener.onMediaItemTransition(eventTime, mediaItem, reason)); + } + + @Override + public void onTracksChanged(Tracks tracks) { + EventTime eventTime = generateCurrentPlayerMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_TRACKS_CHANGED, + listener -> listener.onTracksChanged(eventTime, tracks)); + } + + @SuppressWarnings("deprecation") // Implementing deprecated method. + @Override + public void onLoadingChanged(boolean isLoading) { + // Do nothing. Handled by non-deprecated onIsLoadingChanged. + } + + @SuppressWarnings("deprecation") // Calling deprecated listener method. + @Override + public final void onIsLoadingChanged(boolean isLoading) { + EventTime eventTime = generateCurrentPlayerMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_IS_LOADING_CHANGED, + listener -> { + listener.onLoadingChanged(eventTime, isLoading); + listener.onIsLoadingChanged(eventTime, isLoading); + }); + } + + @Override + public void onAvailableCommandsChanged(Player.Commands availableCommands) { + EventTime eventTime = generateCurrentPlayerMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_AVAILABLE_COMMANDS_CHANGED, + listener -> listener.onAvailableCommandsChanged(eventTime, availableCommands)); + } + + @SuppressWarnings("deprecation") // Implementing and calling deprecated listener method. + @Override + public final void onPlayerStateChanged(boolean playWhenReady, @Player.State int playbackState) { + EventTime eventTime = generateCurrentPlayerMediaPeriodEventTime(); + sendEvent( + eventTime, + /* eventFlag= */ C.INDEX_UNSET, + listener -> listener.onPlayerStateChanged(eventTime, playWhenReady, playbackState)); + } + + @Override + public final void onPlaybackStateChanged(@Player.State int playbackState) { + EventTime eventTime = generateCurrentPlayerMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_PLAYBACK_STATE_CHANGED, + listener -> listener.onPlaybackStateChanged(eventTime, playbackState)); + } + + @Override + public final void onPlayWhenReadyChanged( + boolean playWhenReady, @Player.PlayWhenReadyChangeReason int reason) { + EventTime eventTime = generateCurrentPlayerMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_PLAY_WHEN_READY_CHANGED, + listener -> listener.onPlayWhenReadyChanged(eventTime, playWhenReady, reason)); + } + + @Override + public final void onPlaybackSuppressionReasonChanged( + @PlaybackSuppressionReason int playbackSuppressionReason) { + EventTime eventTime = generateCurrentPlayerMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_PLAYBACK_SUPPRESSION_REASON_CHANGED, + listener -> + listener.onPlaybackSuppressionReasonChanged(eventTime, playbackSuppressionReason)); + } + + @Override + public void onIsPlayingChanged(boolean isPlaying) { + EventTime eventTime = generateCurrentPlayerMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_IS_PLAYING_CHANGED, + listener -> listener.onIsPlayingChanged(eventTime, isPlaying)); + } + + @Override + public final void onRepeatModeChanged(@Player.RepeatMode int repeatMode) { + EventTime eventTime = generateCurrentPlayerMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_REPEAT_MODE_CHANGED, + listener -> listener.onRepeatModeChanged(eventTime, repeatMode)); + } + + @Override + public final void onShuffleModeEnabledChanged(boolean shuffleModeEnabled) { + EventTime eventTime = generateCurrentPlayerMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_SHUFFLE_MODE_ENABLED_CHANGED, + listener -> listener.onShuffleModeChanged(eventTime, shuffleModeEnabled)); + } + + @Override + public final void onPlayerError(PlaybackException error) { + EventTime eventTime = getEventTimeForErrorEvent(error); + sendEvent( + eventTime, + AnalyticsListener.EVENT_PLAYER_ERROR, + listener -> listener.onPlayerError(eventTime, error)); + } + + @Override + public void onPlayerErrorChanged(@Nullable PlaybackException error) { + EventTime eventTime = getEventTimeForErrorEvent(error); + sendEvent( + eventTime, + AnalyticsListener.EVENT_PLAYER_ERROR, + listener -> listener.onPlayerErrorChanged(eventTime, error)); + } + + @SuppressWarnings("deprecation") // Implementing deprecated method. + @Override + public void onPositionDiscontinuity(@DiscontinuityReason int reason) { + // Do nothing. Handled by non-deprecated onPositionDiscontinuity. + } + + // Calling deprecated callback. + @SuppressWarnings("deprecation") + @Override + public final void onPositionDiscontinuity( + Player.PositionInfo oldPosition, + Player.PositionInfo newPosition, + @Player.DiscontinuityReason int reason) { + if (reason == Player.DISCONTINUITY_REASON_SEEK) { + isSeeking = false; + } + mediaPeriodQueueTracker.onPositionDiscontinuity(checkNotNull(player)); + EventTime eventTime = generateCurrentPlayerMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_POSITION_DISCONTINUITY, + listener -> { + listener.onPositionDiscontinuity(eventTime, reason); + listener.onPositionDiscontinuity(eventTime, oldPosition, newPosition, reason); + }); + } + + @Override + public final void onPlaybackParametersChanged(PlaybackParameters playbackParameters) { + EventTime eventTime = generateCurrentPlayerMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_PLAYBACK_PARAMETERS_CHANGED, + listener -> listener.onPlaybackParametersChanged(eventTime, playbackParameters)); + } + + @Override + public void onSeekBackIncrementChanged(long seekBackIncrementMs) { + EventTime eventTime = generateCurrentPlayerMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_SEEK_BACK_INCREMENT_CHANGED, + listener -> listener.onSeekBackIncrementChanged(eventTime, seekBackIncrementMs)); + } + + @Override + public void onSeekForwardIncrementChanged(long seekForwardIncrementMs) { + EventTime eventTime = generateCurrentPlayerMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_SEEK_FORWARD_INCREMENT_CHANGED, + listener -> listener.onSeekForwardIncrementChanged(eventTime, seekForwardIncrementMs)); + } + + @Override + public void onMaxSeekToPreviousPositionChanged(long maxSeekToPreviousPositionMs) { + EventTime eventTime = generateCurrentPlayerMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_MAX_SEEK_TO_PREVIOUS_POSITION_CHANGED, + listener -> + listener.onMaxSeekToPreviousPositionChanged(eventTime, maxSeekToPreviousPositionMs)); + } + + @Override + public void onMediaMetadataChanged(MediaMetadata mediaMetadata) { + EventTime eventTime = generateCurrentPlayerMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_MEDIA_METADATA_CHANGED, + listener -> listener.onMediaMetadataChanged(eventTime, mediaMetadata)); + } + + @Override + public void onPlaylistMetadataChanged(MediaMetadata playlistMetadata) { + EventTime eventTime = generateCurrentPlayerMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_PLAYLIST_METADATA_CHANGED, + listener -> listener.onPlaylistMetadataChanged(eventTime, playlistMetadata)); + } + + @Override + public final void onMetadata(Metadata metadata) { + EventTime eventTime = generateCurrentPlayerMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_METADATA, + listener -> listener.onMetadata(eventTime, metadata)); + } + + @SuppressWarnings("deprecation") // Implementing and calling deprecated listener method. + @Override + public void onCues(List cues) { + EventTime eventTime = generateCurrentPlayerMediaPeriodEventTime(); + sendEvent( + eventTime, AnalyticsListener.EVENT_CUES, listener -> listener.onCues(eventTime, cues)); + } + + @Override + public void onCues(CueGroup cueGroup) { + EventTime eventTime = generateCurrentPlayerMediaPeriodEventTime(); + sendEvent( + eventTime, AnalyticsListener.EVENT_CUES, listener -> listener.onCues(eventTime, cueGroup)); + } + + @SuppressWarnings("deprecation") // Implementing and calling deprecated listener method. + @Override + public final void onSeekProcessed() { + EventTime eventTime = generateCurrentPlayerMediaPeriodEventTime(); + sendEvent( + eventTime, /* eventFlag= */ C.INDEX_UNSET, listener -> listener.onSeekProcessed(eventTime)); + } + + @Override + public final void onSkipSilenceEnabledChanged(boolean skipSilenceEnabled) { + EventTime eventTime = generateReadingMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_SKIP_SILENCE_ENABLED_CHANGED, + listener -> listener.onSkipSilenceEnabledChanged(eventTime, skipSilenceEnabled)); + } + + @Override + public final void onAudioSessionIdChanged(int audioSessionId) { + EventTime eventTime = generateReadingMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_AUDIO_SESSION_ID, + listener -> listener.onAudioSessionIdChanged(eventTime, audioSessionId)); + } + + @Override + public final void onAudioAttributesChanged(AudioAttributes audioAttributes) { + EventTime eventTime = generateReadingMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_AUDIO_ATTRIBUTES_CHANGED, + listener -> listener.onAudioAttributesChanged(eventTime, audioAttributes)); + } + + @SuppressWarnings("deprecation") // Calling deprecated listener method. + @Override + public final void onVideoSizeChanged(VideoSize videoSize) { + EventTime eventTime = generateReadingMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_VIDEO_SIZE_CHANGED, + listener -> { + listener.onVideoSizeChanged(eventTime, videoSize); + listener.onVideoSizeChanged( + eventTime, + videoSize.width, + videoSize.height, + videoSize.unappliedRotationDegrees, + videoSize.pixelWidthHeightRatio); + }); + } + + @Override + public void onTrackSelectionParametersChanged(TrackSelectionParameters parameters) { + EventTime eventTime = generateCurrentPlayerMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_TRACK_SELECTION_PARAMETERS_CHANGED, + listener -> listener.onTrackSelectionParametersChanged(eventTime, parameters)); + } + + @Override + public void onDeviceInfoChanged(DeviceInfo deviceInfo) { + EventTime eventTime = generateCurrentPlayerMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_DEVICE_INFO_CHANGED, + listener -> listener.onDeviceInfoChanged(eventTime, deviceInfo)); + } + + @Override + public void onDeviceVolumeChanged(int volume, boolean muted) { + EventTime eventTime = generateCurrentPlayerMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_DEVICE_VOLUME_CHANGED, + listener -> listener.onDeviceVolumeChanged(eventTime, volume, muted)); + } + + @SuppressWarnings("UngroupedOverloads") // Grouped by interface. + @Override + public void onRenderedFirstFrame() { + // Do nothing. Handled by onRenderedFirstFrame call with additional parameters. + } + + @Override + public void onEvents(Player player, Player.Events events) { + // Do nothing. AnalyticsCollector issues its own onEvents. + } + + // BandwidthMeter.EventListener implementation. + + @Override + public final void onBandwidthSample(int elapsedMs, long bytesTransferred, long bitrateEstimate) { + EventTime eventTime = generateLoadingMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_BANDWIDTH_ESTIMATE, + listener -> + listener.onBandwidthEstimate(eventTime, elapsedMs, bytesTransferred, bitrateEstimate)); + } + + // DrmSessionEventListener implementation. + + @Override + @SuppressWarnings("deprecation") // Calls deprecated listener method. + public final void onDrmSessionAcquired( + int windowIndex, @Nullable MediaPeriodId mediaPeriodId, @DrmSession.State int state) { + EventTime eventTime = generateMediaPeriodEventTime(windowIndex, mediaPeriodId); + sendEvent( + eventTime, + AnalyticsListener.EVENT_DRM_SESSION_ACQUIRED, + listener -> { + listener.onDrmSessionAcquired(eventTime); + listener.onDrmSessionAcquired(eventTime, state); + }); + } + + @Override + public final void onDrmKeysLoaded(int windowIndex, @Nullable MediaPeriodId mediaPeriodId) { + EventTime eventTime = generateMediaPeriodEventTime(windowIndex, mediaPeriodId); + sendEvent( + eventTime, + AnalyticsListener.EVENT_DRM_KEYS_LOADED, + listener -> listener.onDrmKeysLoaded(eventTime)); + } + + @Override + public final void onDrmSessionManagerError( + int windowIndex, @Nullable MediaPeriodId mediaPeriodId, Exception error) { + EventTime eventTime = generateMediaPeriodEventTime(windowIndex, mediaPeriodId); + sendEvent( + eventTime, + AnalyticsListener.EVENT_DRM_SESSION_MANAGER_ERROR, + listener -> listener.onDrmSessionManagerError(eventTime, error)); + } + + @Override + public final void onDrmKeysRestored(int windowIndex, @Nullable MediaPeriodId mediaPeriodId) { + EventTime eventTime = generateMediaPeriodEventTime(windowIndex, mediaPeriodId); + sendEvent( + eventTime, + AnalyticsListener.EVENT_DRM_KEYS_RESTORED, + listener -> listener.onDrmKeysRestored(eventTime)); + } + + @Override + public final void onDrmKeysRemoved(int windowIndex, @Nullable MediaPeriodId mediaPeriodId) { + EventTime eventTime = generateMediaPeriodEventTime(windowIndex, mediaPeriodId); + sendEvent( + eventTime, + AnalyticsListener.EVENT_DRM_KEYS_REMOVED, + listener -> listener.onDrmKeysRemoved(eventTime)); + } + + @Override + public final void onDrmSessionReleased(int windowIndex, @Nullable MediaPeriodId mediaPeriodId) { + EventTime eventTime = generateMediaPeriodEventTime(windowIndex, mediaPeriodId); + sendEvent( + eventTime, + AnalyticsListener.EVENT_DRM_SESSION_RELEASED, + listener -> listener.onDrmSessionReleased(eventTime)); + } + + // Internal methods. + + /** + * Sends an event to registered listeners. + * + * @param eventTime The {@link EventTime} to report. + * @param eventFlag An integer flag indicating the type of the event, or {@link C#INDEX_UNSET} to + * report this event without flag. + * @param eventInvocation The event. + */ + protected final void sendEvent( + EventTime eventTime, int eventFlag, ListenerSet.Event eventInvocation) { + eventTimes.put(eventFlag, eventTime); + listeners.sendEvent(eventFlag, eventInvocation); + } + + /** Generates an {@link EventTime} for the currently playing item in the player. */ + protected final EventTime generateCurrentPlayerMediaPeriodEventTime() { + return generateEventTime(mediaPeriodQueueTracker.getCurrentPlayerMediaPeriod()); + } + + /** Returns a new {@link EventTime} for the specified timeline, window and media period id. */ + @RequiresNonNull("player") + protected final EventTime generateEventTime( + Timeline timeline, int windowIndex, @Nullable MediaPeriodId mediaPeriodId) { + if (timeline.isEmpty()) { + // Ensure media period id is only reported together with a valid timeline. + mediaPeriodId = null; + } + long realtimeMs = clock.elapsedRealtime(); + long eventPositionMs; + boolean isInCurrentWindow = + timeline.equals(player.getCurrentTimeline()) + && windowIndex == player.getCurrentMediaItemIndex(); + if (mediaPeriodId != null && mediaPeriodId.isAd()) { + boolean isCurrentAd = + isInCurrentWindow + && player.getCurrentAdGroupIndex() == mediaPeriodId.adGroupIndex + && player.getCurrentAdIndexInAdGroup() == mediaPeriodId.adIndexInAdGroup; + // Assume start position of 0 for future ads. + eventPositionMs = isCurrentAd ? player.getCurrentPosition() : 0; + } else if (isInCurrentWindow) { + eventPositionMs = player.getContentPosition(); + } else { + // Assume default start position for future content windows. If timeline is not available yet, + // assume start position of 0. + eventPositionMs = + timeline.isEmpty() ? 0 : timeline.getWindow(windowIndex, window).getDefaultPositionMs(); + } + @Nullable + MediaPeriodId currentMediaPeriodId = mediaPeriodQueueTracker.getCurrentPlayerMediaPeriod(); + return new EventTime( + realtimeMs, + timeline, + windowIndex, + mediaPeriodId, + eventPositionMs, + player.getCurrentTimeline(), + player.getCurrentMediaItemIndex(), + currentMediaPeriodId, + player.getCurrentPosition(), + player.getTotalBufferedDuration()); + } + + private void releaseInternal() { + EventTime eventTime = generateCurrentPlayerMediaPeriodEventTime(); + sendEvent( + eventTime, + AnalyticsListener.EVENT_PLAYER_RELEASED, + listener -> listener.onPlayerReleased(eventTime)); + listeners.release(); + } + + private EventTime generateEventTime(@Nullable MediaPeriodId mediaPeriodId) { + checkNotNull(player); + @Nullable + Timeline knownTimeline = + mediaPeriodId == null + ? null + : mediaPeriodQueueTracker.getMediaPeriodIdTimeline(mediaPeriodId); + if (mediaPeriodId == null || knownTimeline == null) { + int windowIndex = player.getCurrentMediaItemIndex(); + Timeline timeline = player.getCurrentTimeline(); + boolean windowIsInTimeline = windowIndex < timeline.getWindowCount(); + return generateEventTime( + windowIsInTimeline ? timeline : Timeline.EMPTY, windowIndex, /* mediaPeriodId= */ null); + } + int windowIndex = knownTimeline.getPeriodByUid(mediaPeriodId.periodUid, period).windowIndex; + return generateEventTime(knownTimeline, windowIndex, mediaPeriodId); + } + + private EventTime generatePlayingMediaPeriodEventTime() { + return generateEventTime(mediaPeriodQueueTracker.getPlayingMediaPeriod()); + } + + private EventTime generateReadingMediaPeriodEventTime() { + return generateEventTime(mediaPeriodQueueTracker.getReadingMediaPeriod()); + } + + private EventTime generateLoadingMediaPeriodEventTime() { + return generateEventTime(mediaPeriodQueueTracker.getLoadingMediaPeriod()); + } + + private EventTime generateMediaPeriodEventTime( + int windowIndex, @Nullable MediaPeriodId mediaPeriodId) { + checkNotNull(player); + if (mediaPeriodId != null) { + boolean isInKnownTimeline = + mediaPeriodQueueTracker.getMediaPeriodIdTimeline(mediaPeriodId) != null; + return isInKnownTimeline + ? generateEventTime(mediaPeriodId) + : generateEventTime(Timeline.EMPTY, windowIndex, mediaPeriodId); + } + Timeline timeline = player.getCurrentTimeline(); + boolean windowIsInTimeline = windowIndex < timeline.getWindowCount(); + return generateEventTime( + windowIsInTimeline ? timeline : Timeline.EMPTY, windowIndex, /* mediaPeriodId= */ null); + } + + private EventTime getEventTimeForErrorEvent(@Nullable PlaybackException error) { + if (error instanceof ExoPlaybackException) { + ExoPlaybackException exoError = (ExoPlaybackException) error; + if (exoError.mediaPeriodId != null) { + return generateEventTime(new MediaPeriodId(exoError.mediaPeriodId)); + } + } + return generateCurrentPlayerMediaPeriodEventTime(); + } + + /** Keeps track of the active media periods and currently playing and reading media period. */ + private static final class MediaPeriodQueueTracker { + + // TODO: Investigate reporting MediaPeriodId in renderer events. + + private final Period period; + + private ImmutableList mediaPeriodQueue; + private ImmutableMap mediaPeriodTimelines; + @Nullable private MediaPeriodId currentPlayerMediaPeriod; + private @MonotonicNonNull MediaPeriodId playingMediaPeriod; + private @MonotonicNonNull MediaPeriodId readingMediaPeriod; + + public MediaPeriodQueueTracker(Period period) { + this.period = period; + mediaPeriodQueue = ImmutableList.of(); + mediaPeriodTimelines = ImmutableMap.of(); + } + + /** + * Returns the {@link MediaPeriodId} of the media period corresponding the current position of + * the player. + * + *

      May be null if no matching media period has been created yet. + */ + @Nullable + public MediaPeriodId getCurrentPlayerMediaPeriod() { + return currentPlayerMediaPeriod; + } + + /** + * Returns the {@link MediaPeriodId} of the media period at the front of the queue. If the queue + * is empty, this is the last media period which was at the front of the queue. + * + *

      May be null, if no media period has been created yet. + */ + @Nullable + public MediaPeriodId getPlayingMediaPeriod() { + return playingMediaPeriod; + } + + /** + * Returns the {@link MediaPeriodId} of the media period currently being read by the player. If + * the queue is empty, this is the last media period which was read by the player. + * + *

      May be null, if no media period has been created yet. + */ + @Nullable + public MediaPeriodId getReadingMediaPeriod() { + return readingMediaPeriod; + } + + /** + * Returns the {@link MediaPeriodId} of the media period at the end of the queue which is + * currently loading or will be the next one loading. + * + *

      May be null, if no media period is active yet. + */ + @Nullable + public MediaPeriodId getLoadingMediaPeriod() { + return mediaPeriodQueue.isEmpty() ? null : Iterables.getLast(mediaPeriodQueue); + } + + /** + * Returns the most recent {@link Timeline} for the given {@link MediaPeriodId}, or null if no + * timeline is available. + */ + @Nullable + public Timeline getMediaPeriodIdTimeline(MediaPeriodId mediaPeriodId) { + return mediaPeriodTimelines.get(mediaPeriodId); + } + + /** Updates the queue tracker with a reported position discontinuity. */ + public void onPositionDiscontinuity(Player player) { + currentPlayerMediaPeriod = + findCurrentPlayerMediaPeriodInQueue(player, mediaPeriodQueue, playingMediaPeriod, period); + } + + /** Updates the queue tracker with a reported timeline change. */ + public void onTimelineChanged(Player player) { + currentPlayerMediaPeriod = + findCurrentPlayerMediaPeriodInQueue(player, mediaPeriodQueue, playingMediaPeriod, period); + updateMediaPeriodTimelines(/* preferredTimeline= */ player.getCurrentTimeline()); + } + + /** Updates the queue tracker to a new queue of media periods. */ + public void onQueueUpdated( + List queue, @Nullable MediaPeriodId readingPeriod, Player player) { + mediaPeriodQueue = ImmutableList.copyOf(queue); + if (!queue.isEmpty()) { + playingMediaPeriod = queue.get(0); + readingMediaPeriod = checkNotNull(readingPeriod); + } + if (currentPlayerMediaPeriod == null) { + currentPlayerMediaPeriod = + findCurrentPlayerMediaPeriodInQueue( + player, mediaPeriodQueue, playingMediaPeriod, period); + } + updateMediaPeriodTimelines(/* preferredTimeline= */ player.getCurrentTimeline()); + } + + private void updateMediaPeriodTimelines(Timeline preferredTimeline) { + ImmutableMap.Builder builder = ImmutableMap.builder(); + if (mediaPeriodQueue.isEmpty()) { + addTimelineForMediaPeriodId(builder, playingMediaPeriod, preferredTimeline); + if (!Objects.equal(readingMediaPeriod, playingMediaPeriod)) { + addTimelineForMediaPeriodId(builder, readingMediaPeriod, preferredTimeline); + } + if (!Objects.equal(currentPlayerMediaPeriod, playingMediaPeriod) + && !Objects.equal(currentPlayerMediaPeriod, readingMediaPeriod)) { + addTimelineForMediaPeriodId(builder, currentPlayerMediaPeriod, preferredTimeline); + } + } else { + for (int i = 0; i < mediaPeriodQueue.size(); i++) { + addTimelineForMediaPeriodId(builder, mediaPeriodQueue.get(i), preferredTimeline); + } + if (!mediaPeriodQueue.contains(currentPlayerMediaPeriod)) { + addTimelineForMediaPeriodId(builder, currentPlayerMediaPeriod, preferredTimeline); + } + } + mediaPeriodTimelines = builder.buildOrThrow(); + } + + private void addTimelineForMediaPeriodId( + ImmutableMap.Builder mediaPeriodTimelinesBuilder, + @Nullable MediaPeriodId mediaPeriodId, + Timeline preferredTimeline) { + if (mediaPeriodId == null) { + return; + } + if (preferredTimeline.getIndexOfPeriod(mediaPeriodId.periodUid) != C.INDEX_UNSET) { + mediaPeriodTimelinesBuilder.put(mediaPeriodId, preferredTimeline); + } else { + @Nullable Timeline existingTimeline = mediaPeriodTimelines.get(mediaPeriodId); + if (existingTimeline != null) { + mediaPeriodTimelinesBuilder.put(mediaPeriodId, existingTimeline); + } + } + } + + @Nullable + private static MediaPeriodId findCurrentPlayerMediaPeriodInQueue( + Player player, + ImmutableList mediaPeriodQueue, + @Nullable MediaPeriodId playingMediaPeriod, + Period period) { + Timeline playerTimeline = player.getCurrentTimeline(); + int playerPeriodIndex = player.getCurrentPeriodIndex(); + @Nullable + Object playerPeriodUid = + playerTimeline.isEmpty() ? null : playerTimeline.getUidOfPeriod(playerPeriodIndex); + int playerNextAdGroupIndex = + player.isPlayingAd() || playerTimeline.isEmpty() + ? C.INDEX_UNSET + : playerTimeline + .getPeriod(playerPeriodIndex, period) + .getAdGroupIndexAfterPositionUs( + Util.msToUs(player.getCurrentPosition()) - period.getPositionInWindowUs()); + for (int i = 0; i < mediaPeriodQueue.size(); i++) { + MediaPeriodId mediaPeriodId = mediaPeriodQueue.get(i); + if (isMatchingMediaPeriod( + mediaPeriodId, + playerPeriodUid, + player.isPlayingAd(), + player.getCurrentAdGroupIndex(), + player.getCurrentAdIndexInAdGroup(), + playerNextAdGroupIndex)) { + return mediaPeriodId; + } + } + if (mediaPeriodQueue.isEmpty() && playingMediaPeriod != null) { + if (isMatchingMediaPeriod( + playingMediaPeriod, + playerPeriodUid, + player.isPlayingAd(), + player.getCurrentAdGroupIndex(), + player.getCurrentAdIndexInAdGroup(), + playerNextAdGroupIndex)) { + return playingMediaPeriod; + } + } + return null; + } + + private static boolean isMatchingMediaPeriod( + MediaPeriodId mediaPeriodId, + @Nullable Object playerPeriodUid, + boolean isPlayingAd, + int playerAdGroupIndex, + int playerAdIndexInAdGroup, + int playerNextAdGroupIndex) { + if (!mediaPeriodId.periodUid.equals(playerPeriodUid)) { + return false; + } + // Timeline period matches. Still need to check ad information. + return (isPlayingAd + && mediaPeriodId.adGroupIndex == playerAdGroupIndex + && mediaPeriodId.adIndexInAdGroup == playerAdIndexInAdGroup) + || (!isPlayingAd + && mediaPeriodId.adGroupIndex == C.INDEX_UNSET + && mediaPeriodId.nextAdGroupIndex == playerNextAdGroupIndex); + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/analytics/DefaultAnalyticsListener.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/analytics/DefaultAnalyticsListener.java deleted file mode 100644 index d487a8aa99..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/analytics/DefaultAnalyticsListener.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright (C) 2018 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.analytics; - -/** - * @deprecated Use {@link AnalyticsListener} directly for selective overrides as all methods are - * implemented as no-op default methods. - */ -@Deprecated -public abstract class DefaultAnalyticsListener implements AnalyticsListener {} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/analytics/DefaultPlaybackSessionManager.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/analytics/DefaultPlaybackSessionManager.java index 04f3ba154a..0e3dd578d6 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/analytics/DefaultPlaybackSessionManager.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/analytics/DefaultPlaybackSessionManager.java @@ -15,6 +15,8 @@ */ package com.google.android.exoplayer2.analytics; +import static java.lang.Math.max; + import android.util.Base64; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; @@ -25,32 +27,52 @@ import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Util; +import com.google.common.base.Supplier; import java.util.HashMap; import java.util.Iterator; import java.util.Random; import org.checkerframework.checker.nullness.qual.MonotonicNonNull; +import org.checkerframework.checker.nullness.qual.RequiresNonNull; /** * Default {@link PlaybackSessionManager} which instantiates a new session for each window in the * timeline and also for each ad within the windows. * - *

      Sessions are identified by Base64-encoded, URL-safe, random strings. + *

      By default, sessions are identified by Base64-encoded, URL-safe, random strings. */ public final class DefaultPlaybackSessionManager implements PlaybackSessionManager { + /** Default generator for unique session ids that are random, Based64-encoded and URL-safe. */ + public static final Supplier DEFAULT_SESSION_ID_GENERATOR = + DefaultPlaybackSessionManager::generateDefaultSessionId; + private static final Random RANDOM = new Random(); private static final int SESSION_ID_LENGTH = 12; private final Timeline.Window window; private final Timeline.Period period; private final HashMap sessions; + private final Supplier sessionIdGenerator; private @MonotonicNonNull Listener listener; private Timeline currentTimeline; @Nullable private String currentSessionId; - /** Creates session manager. */ + /** + * Creates session manager with a {@link #DEFAULT_SESSION_ID_GENERATOR} to generate session ids. + */ public DefaultPlaybackSessionManager() { + this(DEFAULT_SESSION_ID_GENERATOR); + } + + /** + * Creates session manager. + * + * @param sessionIdGenerator A generator for new session ids. All generated session ids must be + * unique. + */ + public DefaultPlaybackSessionManager(Supplier sessionIdGenerator) { + this.sessionIdGenerator = sessionIdGenerator; window = new Timeline.Window(); period = new Timeline.Period(); sessions = new HashMap<>(); @@ -82,6 +104,10 @@ public synchronized boolean belongsToSession(EventTime eventTime, String session @Override public synchronized void updateSessions(EventTime eventTime) { Assertions.checkNotNull(listener); + if (eventTime.timeline.isEmpty()) { + // Don't try to create new sessions for empty timelines. + return; + } @Nullable SessionDescriptor currentSession = sessions.get(currentSessionId); if (eventTime.mediaPeriodId != null && currentSession != null) { // If we receive an event associated with a media period, then it needs to be either part of @@ -101,6 +127,38 @@ public synchronized void updateSessions(EventTime eventTime) { if (currentSessionId == null) { currentSessionId = eventSession.sessionId; } + if (eventTime.mediaPeriodId != null && eventTime.mediaPeriodId.isAd()) { + // Ensure that the content session for an ad session is created first. + MediaPeriodId contentMediaPeriodId = + new MediaPeriodId( + eventTime.mediaPeriodId.periodUid, + eventTime.mediaPeriodId.windowSequenceNumber, + eventTime.mediaPeriodId.adGroupIndex); + SessionDescriptor contentSession = + getOrAddSession(eventTime.windowIndex, contentMediaPeriodId); + if (!contentSession.isCreated) { + contentSession.isCreated = true; + eventTime.timeline.getPeriodByUid(eventTime.mediaPeriodId.periodUid, period); + long adGroupPositionMs = + Util.usToMs(period.getAdGroupTimeUs(eventTime.mediaPeriodId.adGroupIndex)) + + period.getPositionInWindowMs(); + // getAdGroupTimeUs may return 0 for prerolls despite period offset. + adGroupPositionMs = max(0, adGroupPositionMs); + EventTime eventTimeForContent = + new EventTime( + eventTime.realtimeMs, + eventTime.timeline, + eventTime.windowIndex, + contentMediaPeriodId, + /* eventPlaybackPositionMs= */ adGroupPositionMs, + eventTime.currentTimeline, + eventTime.currentWindowIndex, + eventTime.currentMediaPeriodId, + eventTime.currentPlaybackPositionMs, + eventTime.totalBufferedDurationMs); + listener.onSessionCreated(eventTimeForContent, contentSession.sessionId); + } + } if (!eventSession.isCreated) { eventSession.isCreated = true; listener.onSessionCreated(eventTime, eventSession.sessionId); @@ -112,14 +170,15 @@ public synchronized void updateSessions(EventTime eventTime) { } @Override - public synchronized void handleTimelineUpdate(EventTime eventTime) { + public synchronized void updateSessionsWithTimelineChange(EventTime eventTime) { Assertions.checkNotNull(listener); Timeline previousTimeline = currentTimeline; currentTimeline = eventTime.timeline; Iterator iterator = sessions.values().iterator(); while (iterator.hasNext()) { SessionDescriptor session = iterator.next(); - if (!session.tryResolvingToNewTimeline(previousTimeline, currentTimeline)) { + if (!session.tryResolvingToNewTimeline(previousTimeline, currentTimeline) + || session.isFinishedAtEventTime(eventTime)) { iterator.remove(); if (session.isCreated) { if (session.sessionId.equals(currentSessionId)) { @@ -130,16 +189,14 @@ public synchronized void handleTimelineUpdate(EventTime eventTime) { } } } - handlePositionDiscontinuity(eventTime, Player.DISCONTINUITY_REASON_INTERNAL); + updateCurrentSession(eventTime); } @Override - public synchronized void handlePositionDiscontinuity( + public synchronized void updateSessionsWithDiscontinuity( EventTime eventTime, @DiscontinuityReason int reason) { Assertions.checkNotNull(listener); - boolean hasAutomaticTransition = - reason == Player.DISCONTINUITY_REASON_PERIOD_TRANSITION - || reason == Player.DISCONTINUITY_REASON_AD_INSERTION; + boolean hasAutomaticTransition = reason == Player.DISCONTINUITY_REASON_AUTO_TRANSITION; Iterator iterator = sessions.values().iterator(); while (iterator.hasNext()) { SessionDescriptor session = iterator.next(); @@ -156,10 +213,41 @@ public synchronized void handlePositionDiscontinuity( } } } + updateCurrentSession(eventTime); + } + + @Override + @Nullable + public synchronized String getActiveSessionId() { + return currentSessionId; + } + + @Override + public synchronized void finishAllSessions(EventTime eventTime) { + currentSessionId = null; + Iterator iterator = sessions.values().iterator(); + while (iterator.hasNext()) { + SessionDescriptor session = iterator.next(); + iterator.remove(); + if (session.isCreated && listener != null) { + listener.onSessionFinished( + eventTime, session.sessionId, /* automaticTransitionToNextPlayback= */ false); + } + } + } + + @RequiresNonNull("listener") + private void updateCurrentSession(EventTime eventTime) { + if (eventTime.timeline.isEmpty()) { + // Clear current session if the Timeline is empty. + currentSessionId = null; + return; + } @Nullable SessionDescriptor previousSessionDescriptor = sessions.get(currentSessionId); SessionDescriptor currentSessionDescriptor = getOrAddSession(eventTime.windowIndex, eventTime.mediaPeriodId); currentSessionId = currentSessionDescriptor.sessionId; + updateSessions(eventTime); if (eventTime.mediaPeriodId != null && eventTime.mediaPeriodId.isAd() && (previousSessionDescriptor == null @@ -176,24 +264,8 @@ public synchronized void handlePositionDiscontinuity( eventTime.mediaPeriodId.periodUid, eventTime.mediaPeriodId.windowSequenceNumber); SessionDescriptor contentSession = getOrAddSession(eventTime.windowIndex, contentMediaPeriodId); - if (contentSession.isCreated && currentSessionDescriptor.isCreated) { - listener.onAdPlaybackStarted( - eventTime, contentSession.sessionId, currentSessionDescriptor.sessionId); - } - } - } - - @Override - public void finishAllSessions(EventTime eventTime) { - currentSessionId = null; - Iterator iterator = sessions.values().iterator(); - while (iterator.hasNext()) { - SessionDescriptor session = iterator.next(); - iterator.remove(); - if (session.isCreated && listener != null) { - listener.onSessionFinished( - eventTime, session.sessionId, /* automaticTransitionToNextPlayback= */ false); - } + listener.onAdPlaybackStarted( + eventTime, contentSession.sessionId, currentSessionDescriptor.sessionId); } } @@ -221,14 +293,14 @@ private SessionDescriptor getOrAddSession( } } if (bestMatch == null) { - String sessionId = generateSessionId(); + String sessionId = sessionIdGenerator.get(); bestMatch = new SessionDescriptor(sessionId, windowIndex, mediaPeriodId); sessions.put(sessionId, bestMatch); } return bestMatch; } - private static String generateSessionId() { + private static String generateDefaultSessionId() { byte[] randomBytes = new byte[SESSION_ID_LENGTH]; RANDOM.nextBytes(randomBytes); return Base64.encodeToString(randomBytes, Base64.URL_SAFE | Base64.NO_WRAP); @@ -308,15 +380,15 @@ public void maybeSetWindowSequenceNumber( } public boolean isFinishedAtEventTime(EventTime eventTime) { - if (windowSequenceNumber == C.INDEX_UNSET) { - // Sessions with unspecified window sequence number are kept until we know more. - return false; - } if (eventTime.mediaPeriodId == null) { // For event times without media period id (e.g. after seek to new window), we only keep // sessions of this window. return windowIndex != eventTime.windowIndex; } + if (windowSequenceNumber == C.INDEX_UNSET) { + // Sessions with unspecified window sequence number are kept until we know more. + return false; + } if (eventTime.mediaPeriodId.windowSequenceNumber > windowSequenceNumber) { // All past window sequence numbers are finished. return true; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/analytics/MediaMetricsListener.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/analytics/MediaMetricsListener.java new file mode 100644 index 0000000000..46be1e14c0 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/analytics/MediaMetricsListener.java @@ -0,0 +1,894 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.analytics; + +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Util.castNonNull; + +import android.annotation.SuppressLint; +import android.content.Context; +import android.media.DeniedByServerException; +import android.media.MediaCodec; +import android.media.MediaDrm; +import android.media.MediaDrmResetException; +import android.media.NotProvisionedException; +import android.media.metrics.LogSessionId; +import android.media.metrics.MediaMetricsManager; +import android.media.metrics.NetworkEvent; +import android.media.metrics.PlaybackErrorEvent; +import android.media.metrics.PlaybackMetrics; +import android.media.metrics.PlaybackSession; +import android.media.metrics.PlaybackStateEvent; +import android.media.metrics.TrackChangeEvent; +import android.os.SystemClock; +import android.system.ErrnoException; +import android.system.OsConstants; +import android.util.Pair; +import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.C.ContentType; +import com.google.android.exoplayer2.ExoPlaybackException; +import com.google.android.exoplayer2.ExoPlayerLibraryInfo; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.MediaItem; +import com.google.android.exoplayer2.ParserException; +import com.google.android.exoplayer2.PlaybackException; +import com.google.android.exoplayer2.Player; +import com.google.android.exoplayer2.Timeline; +import com.google.android.exoplayer2.Tracks; +import com.google.android.exoplayer2.audio.AudioSink; +import com.google.android.exoplayer2.decoder.DecoderCounters; +import com.google.android.exoplayer2.drm.DefaultDrmSessionManager; +import com.google.android.exoplayer2.drm.DrmInitData; +import com.google.android.exoplayer2.drm.DrmSession; +import com.google.android.exoplayer2.drm.UnsupportedDrmException; +import com.google.android.exoplayer2.mediacodec.MediaCodecDecoderException; +import com.google.android.exoplayer2.mediacodec.MediaCodecRenderer; +import com.google.android.exoplayer2.source.LoadEventInfo; +import com.google.android.exoplayer2.source.MediaLoadData; +import com.google.android.exoplayer2.source.MediaSource; +import com.google.android.exoplayer2.upstream.FileDataSource; +import com.google.android.exoplayer2.upstream.HttpDataSource; +import com.google.android.exoplayer2.upstream.UdpDataSource; +import com.google.android.exoplayer2.util.NetworkTypeObserver; +import com.google.android.exoplayer2.util.Util; +import com.google.android.exoplayer2.video.VideoSize; +import com.google.common.collect.ImmutableList; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.net.SocketTimeoutException; +import java.net.UnknownHostException; +import java.util.HashMap; +import java.util.UUID; +import org.checkerframework.checker.nullness.compatqual.NullableType; +import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf; +import org.checkerframework.checker.nullness.qual.RequiresNonNull; + +/** + * An {@link AnalyticsListener} that interacts with the Android {@link MediaMetricsManager}. + * + *

      It listens to playback events and forwards them to a {@link PlaybackSession}. The {@link + * LogSessionId} of the playback session can be obtained with {@link #getLogSessionId()}. + */ +@RequiresApi(31) +public final class MediaMetricsListener + implements AnalyticsListener, PlaybackSessionManager.Listener { + + /** + * Creates a media metrics listener. + * + * @param context A context. + * @return The {@link MediaMetricsListener}, or null if the {@link Context#MEDIA_METRICS_SERVICE + * media metrics service} isn't available. + */ + @Nullable + public static MediaMetricsListener create(Context context) { + @Nullable + MediaMetricsManager mediaMetricsManager = + (MediaMetricsManager) context.getSystemService(Context.MEDIA_METRICS_SERVICE); + return mediaMetricsManager == null + ? null + : new MediaMetricsListener(context, mediaMetricsManager.createPlaybackSession()); + } + + private final Context context; + private final PlaybackSessionManager sessionManager; + private final PlaybackSession playbackSession; + private final long startTimeMs; + private final Timeline.Window window; + private final Timeline.Period period; + private final HashMap bandwidthTimeMs; + private final HashMap bandwidthBytes; + + @Nullable private String activeSessionId; + @Nullable private PlaybackMetrics.Builder metricsBuilder; + private @Player.DiscontinuityReason int discontinuityReason; + private int currentPlaybackState; + private int currentNetworkType; + @Nullable private PlaybackException pendingPlayerError; + @Nullable private PendingFormatUpdate pendingVideoFormat; + @Nullable private PendingFormatUpdate pendingAudioFormat; + @Nullable private PendingFormatUpdate pendingTextFormat; + @Nullable private Format currentVideoFormat; + @Nullable private Format currentAudioFormat; + @Nullable private Format currentTextFormat; + private boolean isSeeking; + private int ioErrorType; + private boolean hasFatalError; + private int droppedFrames; + private int playedFrames; + private int audioUnderruns; + private boolean reportedEventsForCurrentSession; + + /** + * Creates the listener. + * + * @param context A {@link Context}. + */ + private MediaMetricsListener(Context context, PlaybackSession playbackSession) { + context = context.getApplicationContext(); + this.context = context; + this.playbackSession = playbackSession; + window = new Timeline.Window(); + period = new Timeline.Period(); + bandwidthBytes = new HashMap<>(); + bandwidthTimeMs = new HashMap<>(); + startTimeMs = SystemClock.elapsedRealtime(); + currentPlaybackState = PlaybackStateEvent.STATE_NOT_STARTED; + currentNetworkType = NetworkEvent.NETWORK_TYPE_UNKNOWN; + sessionManager = new DefaultPlaybackSessionManager(); + sessionManager.setListener(this); + } + + /** Returns the {@link LogSessionId} used by this listener. */ + public LogSessionId getLogSessionId() { + return playbackSession.getSessionId(); + } + + // PlaybackSessionManager.Listener implementation. + + @Override + public void onSessionCreated(EventTime eventTime, String sessionId) {} + + @Override + public void onSessionActive(EventTime eventTime, String sessionId) { + if (eventTime.mediaPeriodId != null && eventTime.mediaPeriodId.isAd()) { + // Ignore ad sessions. + return; + } + finishCurrentSession(); + activeSessionId = sessionId; + metricsBuilder = + new PlaybackMetrics.Builder() + .setPlayerName(ExoPlayerLibraryInfo.TAG) + .setPlayerVersion(ExoPlayerLibraryInfo.VERSION); + maybeUpdateTimelineMetadata(eventTime.timeline, eventTime.mediaPeriodId); + } + + @Override + public void onAdPlaybackStarted( + EventTime eventTime, String contentSessionId, String adSessionId) {} + + @Override + public void onSessionFinished( + EventTime eventTime, String sessionId, boolean automaticTransitionToNextPlayback) { + if ((eventTime.mediaPeriodId != null && eventTime.mediaPeriodId.isAd()) + || !sessionId.equals(activeSessionId)) { + // Ignore ad sessions and other sessions that are finished before becoming active. + } else { + finishCurrentSession(); + } + bandwidthTimeMs.remove(sessionId); + bandwidthBytes.remove(sessionId); + } + + // AnalyticsListener implementation. + + @Override + public void onPositionDiscontinuity( + EventTime eventTime, + Player.PositionInfo oldPosition, + Player.PositionInfo newPosition, + @Player.DiscontinuityReason int reason) { + if (reason == Player.DISCONTINUITY_REASON_SEEK) { + isSeeking = true; + } + discontinuityReason = reason; + } + + @Override + public void onVideoDisabled(EventTime eventTime, DecoderCounters decoderCounters) { + // TODO(b/181122234): DecoderCounters are not re-reported at period boundaries. + droppedFrames += decoderCounters.droppedBufferCount; + playedFrames += decoderCounters.renderedOutputBufferCount; + } + + @Override + public void onBandwidthEstimate( + EventTime eventTime, int totalLoadTimeMs, long totalBytesLoaded, long bitrateEstimate) { + if (eventTime.mediaPeriodId != null) { + String sessionId = + sessionManager.getSessionForMediaPeriodId( + eventTime.timeline, checkNotNull(eventTime.mediaPeriodId)); + @Nullable Long prevBandwidthBytes = bandwidthBytes.get(sessionId); + @Nullable Long prevBandwidthTimeMs = bandwidthTimeMs.get(sessionId); + bandwidthBytes.put( + sessionId, (prevBandwidthBytes == null ? 0 : prevBandwidthBytes) + totalBytesLoaded); + bandwidthTimeMs.put( + sessionId, (prevBandwidthTimeMs == null ? 0 : prevBandwidthTimeMs) + totalLoadTimeMs); + } + } + + @Override + public void onDownstreamFormatChanged(EventTime eventTime, MediaLoadData mediaLoadData) { + if (eventTime.mediaPeriodId == null) { + // This event arrived after the media has been removed from the playlist or a custom + // MediaSource forgot to set the right id. Ignore the track change in these cases. + return; + } + PendingFormatUpdate update = + new PendingFormatUpdate( + checkNotNull(mediaLoadData.trackFormat), + mediaLoadData.trackSelectionReason, + sessionManager.getSessionForMediaPeriodId( + eventTime.timeline, checkNotNull(eventTime.mediaPeriodId))); + switch (mediaLoadData.trackType) { + case C.TRACK_TYPE_VIDEO: + case C.TRACK_TYPE_DEFAULT: + pendingVideoFormat = update; + break; + case C.TRACK_TYPE_AUDIO: + pendingAudioFormat = update; + break; + case C.TRACK_TYPE_TEXT: + pendingTextFormat = update; + break; + default: + // Other track type. Ignore. + } + } + + @Override + public void onVideoSizeChanged(EventTime eventTime, VideoSize videoSize) { + @Nullable PendingFormatUpdate pendingVideoFormat = this.pendingVideoFormat; + if (pendingVideoFormat != null && pendingVideoFormat.format.height == Format.NO_VALUE) { + Format formatWithHeightAndWidth = + pendingVideoFormat + .format + .buildUpon() + .setWidth(videoSize.width) + .setHeight(videoSize.height) + .build(); + this.pendingVideoFormat = + new PendingFormatUpdate( + formatWithHeightAndWidth, + pendingVideoFormat.selectionReason, + pendingVideoFormat.sessionId); + } + } + + @Override + public void onLoadError( + EventTime eventTime, + LoadEventInfo loadEventInfo, + MediaLoadData mediaLoadData, + IOException error, + boolean wasCanceled) { + ioErrorType = mediaLoadData.dataType; + } + + @Override + public void onPlayerError(EventTime eventTime, PlaybackException error) { + pendingPlayerError = error; + } + + @Override + public void onEvents(Player player, Events events) { + if (events.size() == 0) { + return; + } + maybeAddSessions(events); + + long realtimeMs = SystemClock.elapsedRealtime(); + maybeUpdateMetricsBuilderValues(player, events); + maybeReportPlaybackError(realtimeMs); + maybeReportTrackChanges(player, events, realtimeMs); + maybeReportNetworkChange(realtimeMs); + maybeReportPlaybackStateChange(player, events, realtimeMs); + + if (events.contains(AnalyticsListener.EVENT_PLAYER_RELEASED)) { + sessionManager.finishAllSessions(events.getEventTime(EVENT_PLAYER_RELEASED)); + } + } + + private void maybeAddSessions(Events events) { + for (int i = 0; i < events.size(); i++) { + @EventFlags int event = events.get(i); + EventTime eventTime = events.getEventTime(event); + if (event == EVENT_TIMELINE_CHANGED) { + sessionManager.updateSessionsWithTimelineChange(eventTime); + } else if (event == EVENT_POSITION_DISCONTINUITY) { + sessionManager.updateSessionsWithDiscontinuity(eventTime, discontinuityReason); + } else { + sessionManager.updateSessions(eventTime); + } + } + } + + private void maybeUpdateMetricsBuilderValues(Player player, Events events) { + if (events.contains(EVENT_TIMELINE_CHANGED)) { + EventTime eventTime = events.getEventTime(EVENT_TIMELINE_CHANGED); + if (metricsBuilder != null) { + maybeUpdateTimelineMetadata(eventTime.timeline, eventTime.mediaPeriodId); + } + } + if (events.contains(EVENT_TRACKS_CHANGED) && metricsBuilder != null) { + @Nullable DrmInitData drmInitData = getDrmInitData(player.getCurrentTracks().getGroups()); + if (drmInitData != null) { + castNonNull(metricsBuilder).setDrmType(getDrmType(drmInitData)); + } + } + if (events.contains(EVENT_AUDIO_UNDERRUN)) { + audioUnderruns++; + } + } + + private void maybeReportPlaybackError(long realtimeMs) { + @Nullable PlaybackException error = pendingPlayerError; + if (error == null) { + return; + } + ErrorInfo errorInfo = + getErrorInfo( + error, context, /* lastIoErrorForManifest= */ ioErrorType == C.DATA_TYPE_MANIFEST); + playbackSession.reportPlaybackErrorEvent( + new PlaybackErrorEvent.Builder() + .setTimeSinceCreatedMillis(realtimeMs - startTimeMs) + .setErrorCode(errorInfo.errorCode) + .setSubErrorCode(errorInfo.subErrorCode) + .setException(error) + .build()); + reportedEventsForCurrentSession = true; + pendingPlayerError = null; + } + + private void maybeReportTrackChanges(Player player, Events events, long realtimeMs) { + if (events.contains(EVENT_TRACKS_CHANGED)) { + Tracks tracks = player.getCurrentTracks(); + boolean isVideoSelected = tracks.isTypeSelected(C.TRACK_TYPE_VIDEO); + boolean isAudioSelected = tracks.isTypeSelected(C.TRACK_TYPE_AUDIO); + boolean isTextSelected = tracks.isTypeSelected(C.TRACK_TYPE_TEXT); + if (isVideoSelected || isAudioSelected || isTextSelected) { + // Ignore updates with insufficient information where no tracks are selected. + if (!isVideoSelected) { + maybeUpdateVideoFormat(realtimeMs, /* videoFormat= */ null, C.SELECTION_REASON_UNKNOWN); + } + if (!isAudioSelected) { + maybeUpdateAudioFormat(realtimeMs, /* audioFormat= */ null, C.SELECTION_REASON_UNKNOWN); + } + if (!isTextSelected) { + maybeUpdateTextFormat(realtimeMs, /* textFormat= */ null, C.SELECTION_REASON_UNKNOWN); + } + } + } + if (canReportPendingFormatUpdate(pendingVideoFormat) + && pendingVideoFormat.format.height != Format.NO_VALUE) { + maybeUpdateVideoFormat( + realtimeMs, pendingVideoFormat.format, pendingVideoFormat.selectionReason); + pendingVideoFormat = null; + } + if (canReportPendingFormatUpdate(pendingAudioFormat)) { + maybeUpdateAudioFormat( + realtimeMs, pendingAudioFormat.format, pendingAudioFormat.selectionReason); + pendingAudioFormat = null; + } + if (canReportPendingFormatUpdate(pendingTextFormat)) { + maybeUpdateTextFormat( + realtimeMs, pendingTextFormat.format, pendingTextFormat.selectionReason); + pendingTextFormat = null; + } + } + + @EnsuresNonNullIf(result = true, expression = "#1") + private boolean canReportPendingFormatUpdate(@Nullable PendingFormatUpdate pendingFormatUpdate) { + return pendingFormatUpdate != null + && pendingFormatUpdate.sessionId.equals(sessionManager.getActiveSessionId()); + } + + private void maybeReportNetworkChange(long realtimeMs) { + int networkType = getNetworkType(context); + if (networkType != currentNetworkType) { + currentNetworkType = networkType; + playbackSession.reportNetworkEvent( + new NetworkEvent.Builder() + .setNetworkType(networkType) + .setTimeSinceCreatedMillis(realtimeMs - startTimeMs) + .build()); + } + } + + private void maybeReportPlaybackStateChange(Player player, Events events, long realtimeMs) { + if (player.getPlaybackState() != Player.STATE_BUFFERING) { + isSeeking = false; + } + if (player.getPlayerError() == null) { + hasFatalError = false; + } else if (events.contains(EVENT_PLAYER_ERROR)) { + hasFatalError = true; + } + int newPlaybackState = resolveNewPlaybackState(player); + if (currentPlaybackState != newPlaybackState) { + currentPlaybackState = newPlaybackState; + reportedEventsForCurrentSession = true; + playbackSession.reportPlaybackStateEvent( + new PlaybackStateEvent.Builder() + .setState(currentPlaybackState) + .setTimeSinceCreatedMillis(realtimeMs - startTimeMs) + .build()); + } + } + + private int resolveNewPlaybackState(Player player) { + @Player.State int playerPlaybackState = player.getPlaybackState(); + if (isSeeking) { + // Seeking takes precedence over errors such that we report a seek while in error state. + return PlaybackStateEvent.STATE_SEEKING; + } else if (hasFatalError) { + return PlaybackStateEvent.STATE_FAILED; + } else if (playerPlaybackState == Player.STATE_ENDED) { + return PlaybackStateEvent.STATE_ENDED; + } else if (playerPlaybackState == Player.STATE_BUFFERING) { + if (currentPlaybackState == PlaybackStateEvent.STATE_NOT_STARTED + || currentPlaybackState == PlaybackStateEvent.STATE_JOINING_FOREGROUND) { + return PlaybackStateEvent.STATE_JOINING_FOREGROUND; + } + if (!player.getPlayWhenReady()) { + return PlaybackStateEvent.STATE_PAUSED_BUFFERING; + } + return player.getPlaybackSuppressionReason() != Player.PLAYBACK_SUPPRESSION_REASON_NONE + ? PlaybackStateEvent.STATE_SUPPRESSED_BUFFERING + : PlaybackStateEvent.STATE_BUFFERING; + } else if (playerPlaybackState == Player.STATE_READY) { + if (!player.getPlayWhenReady()) { + return PlaybackStateEvent.STATE_PAUSED; + } + return player.getPlaybackSuppressionReason() != Player.PLAYBACK_SUPPRESSION_REASON_NONE + ? PlaybackStateEvent.STATE_SUPPRESSED + : PlaybackStateEvent.STATE_PLAYING; + } else if (playerPlaybackState == Player.STATE_IDLE + && currentPlaybackState != PlaybackStateEvent.STATE_NOT_STARTED) { + // This case only applies for calls to player.stop(). All other IDLE cases are handled by + // !isForeground, hasFatalError or isSuspended. NOT_STARTED is deliberately ignored. + return PlaybackStateEvent.STATE_STOPPED; + } + return currentPlaybackState; + } + + private void maybeUpdateVideoFormat( + long realtimeMs, @Nullable Format videoFormat, @C.SelectionReason int trackSelectionReason) { + if (Util.areEqual(currentVideoFormat, videoFormat)) { + return; + } + if (currentVideoFormat == null && trackSelectionReason == C.SELECTION_REASON_UNKNOWN) { + trackSelectionReason = C.SELECTION_REASON_INITIAL; + } + currentVideoFormat = videoFormat; + reportTrackChangeEvent( + TrackChangeEvent.TRACK_TYPE_VIDEO, realtimeMs, videoFormat, trackSelectionReason); + } + + private void maybeUpdateAudioFormat( + long realtimeMs, @Nullable Format audioFormat, @C.SelectionReason int trackSelectionReason) { + if (Util.areEqual(currentAudioFormat, audioFormat)) { + return; + } + if (currentAudioFormat == null && trackSelectionReason == C.SELECTION_REASON_UNKNOWN) { + trackSelectionReason = C.SELECTION_REASON_INITIAL; + } + currentAudioFormat = audioFormat; + reportTrackChangeEvent( + TrackChangeEvent.TRACK_TYPE_AUDIO, realtimeMs, audioFormat, trackSelectionReason); + } + + private void maybeUpdateTextFormat( + long realtimeMs, @Nullable Format textFormat, @C.SelectionReason int trackSelectionReason) { + if (Util.areEqual(currentTextFormat, textFormat)) { + return; + } + if (currentTextFormat == null && trackSelectionReason == C.SELECTION_REASON_UNKNOWN) { + trackSelectionReason = C.SELECTION_REASON_INITIAL; + } + currentTextFormat = textFormat; + reportTrackChangeEvent( + TrackChangeEvent.TRACK_TYPE_TEXT, realtimeMs, textFormat, trackSelectionReason); + } + + private void reportTrackChangeEvent( + int type, + long realtimeMs, + @Nullable Format format, + @C.SelectionReason int trackSelectionReason) { + TrackChangeEvent.Builder builder = + new TrackChangeEvent.Builder(type).setTimeSinceCreatedMillis(realtimeMs - startTimeMs); + if (format != null) { + builder.setTrackState(TrackChangeEvent.TRACK_STATE_ON); + builder.setTrackChangeReason(getTrackChangeReason(trackSelectionReason)); + if (format.containerMimeType != null) { + // TODO(b/181121074): Progressive container mime type is not filled in by MediaSource. + builder.setContainerMimeType(format.containerMimeType); + } + if (format.sampleMimeType != null) { + builder.setSampleMimeType(format.sampleMimeType); + } + if (format.codecs != null) { + builder.setCodecName(format.codecs); + } + if (format.bitrate != Format.NO_VALUE) { + builder.setBitrate(format.bitrate); + } + if (format.width != Format.NO_VALUE) { + builder.setWidth(format.width); + } + if (format.height != Format.NO_VALUE) { + builder.setHeight(format.height); + } + if (format.channelCount != Format.NO_VALUE) { + builder.setChannelCount(format.channelCount); + } + if (format.sampleRate != Format.NO_VALUE) { + builder.setAudioSampleRate(format.sampleRate); + } + if (format.language != null) { + Pair languageAndRegion = + getLanguageAndRegion(format.language); + builder.setLanguage(languageAndRegion.first); + if (languageAndRegion.second != null) { + builder.setLanguageRegion(languageAndRegion.second); + } + } + if (format.frameRate != Format.NO_VALUE) { + builder.setVideoFrameRate(format.frameRate); + } + } else { + builder.setTrackState(TrackChangeEvent.TRACK_STATE_OFF); + } + reportedEventsForCurrentSession = true; + playbackSession.reportTrackChangeEvent(builder.build()); + } + + @RequiresNonNull("metricsBuilder") + private void maybeUpdateTimelineMetadata( + Timeline timeline, @Nullable MediaSource.MediaPeriodId mediaPeriodId) { + PlaybackMetrics.Builder metricsBuilder = this.metricsBuilder; + if (mediaPeriodId == null) { + return; + } + int periodIndex = timeline.getIndexOfPeriod(mediaPeriodId.periodUid); + if (periodIndex == C.INDEX_UNSET) { + return; + } + timeline.getPeriod(periodIndex, period); + timeline.getWindow(period.windowIndex, window); + metricsBuilder.setStreamType(getStreamType(window.mediaItem)); + if (window.durationUs != C.TIME_UNSET + && !window.isPlaceholder + && !window.isDynamic + && !window.isLive()) { + metricsBuilder.setMediaDurationMillis(window.getDurationMs()); + } + metricsBuilder.setPlaybackType( + window.isLive() ? PlaybackMetrics.PLAYBACK_TYPE_LIVE : PlaybackMetrics.PLAYBACK_TYPE_VOD); + reportedEventsForCurrentSession = true; + } + + private void finishCurrentSession() { + if (metricsBuilder != null && reportedEventsForCurrentSession) { + metricsBuilder.setAudioUnderrunCount(audioUnderruns); + metricsBuilder.setVideoFramesDropped(droppedFrames); + metricsBuilder.setVideoFramesPlayed(playedFrames); + @Nullable Long networkTimeMs = bandwidthTimeMs.get(activeSessionId); + metricsBuilder.setNetworkTransferDurationMillis(networkTimeMs == null ? 0 : networkTimeMs); + // TODO(b/181121847): Report localBytesRead. This requires additional callbacks or plumbing. + @Nullable Long networkBytes = bandwidthBytes.get(activeSessionId); + metricsBuilder.setNetworkBytesRead(networkBytes == null ? 0 : networkBytes); + // TODO(b/181121847): Detect stream sources mixed and local depending on localBytesRead. + metricsBuilder.setStreamSource( + networkBytes != null && networkBytes > 0 + ? PlaybackMetrics.STREAM_SOURCE_NETWORK + : PlaybackMetrics.STREAM_SOURCE_UNKNOWN); + playbackSession.reportPlaybackMetrics(metricsBuilder.build()); + } + metricsBuilder = null; + activeSessionId = null; + audioUnderruns = 0; + droppedFrames = 0; + playedFrames = 0; + currentVideoFormat = null; + currentAudioFormat = null; + currentTextFormat = null; + reportedEventsForCurrentSession = false; + } + + private static int getTrackChangeReason(@C.SelectionReason int trackSelectionReason) { + switch (trackSelectionReason) { + case C.SELECTION_REASON_INITIAL: + return TrackChangeEvent.TRACK_CHANGE_REASON_INITIAL; + case C.SELECTION_REASON_ADAPTIVE: + return TrackChangeEvent.TRACK_CHANGE_REASON_ADAPTIVE; + case C.SELECTION_REASON_MANUAL: + return TrackChangeEvent.TRACK_CHANGE_REASON_MANUAL; + case C.SELECTION_REASON_TRICK_PLAY: + case C.SELECTION_REASON_UNKNOWN: + default: + return TrackChangeEvent.TRACK_CHANGE_REASON_OTHER; + } + } + + private static Pair getLanguageAndRegion(String languageCode) { + String[] parts = Util.split(languageCode, "-"); + return Pair.create(parts[0], parts.length >= 2 ? parts[1] : null); + } + + private static int getNetworkType(Context context) { + switch (NetworkTypeObserver.getInstance(context).getNetworkType()) { + case C.NETWORK_TYPE_WIFI: + return NetworkEvent.NETWORK_TYPE_WIFI; + case C.NETWORK_TYPE_2G: + return NetworkEvent.NETWORK_TYPE_2G; + case C.NETWORK_TYPE_3G: + return NetworkEvent.NETWORK_TYPE_3G; + case C.NETWORK_TYPE_4G: + return NetworkEvent.NETWORK_TYPE_4G; + case C.NETWORK_TYPE_5G_SA: + return NetworkEvent.NETWORK_TYPE_5G_SA; + case C.NETWORK_TYPE_5G_NSA: + return NetworkEvent.NETWORK_TYPE_5G_NSA; + case C.NETWORK_TYPE_ETHERNET: + return NetworkEvent.NETWORK_TYPE_ETHERNET; + case C.NETWORK_TYPE_OFFLINE: + return NetworkEvent.NETWORK_TYPE_OFFLINE; + case C.NETWORK_TYPE_UNKNOWN: + return NetworkEvent.NETWORK_TYPE_UNKNOWN; + default: + return NetworkEvent.NETWORK_TYPE_OTHER; + } + } + + private static int getStreamType(MediaItem mediaItem) { + if (mediaItem.localConfiguration == null) { + return PlaybackMetrics.STREAM_TYPE_UNKNOWN; + } + @ContentType + int contentType = + Util.inferContentTypeForUriAndMimeType( + mediaItem.localConfiguration.uri, mediaItem.localConfiguration.mimeType); + switch (contentType) { + case C.CONTENT_TYPE_HLS: + return PlaybackMetrics.STREAM_TYPE_HLS; + case C.CONTENT_TYPE_DASH: + return PlaybackMetrics.STREAM_TYPE_DASH; + case C.CONTENT_TYPE_SS: + return PlaybackMetrics.STREAM_TYPE_SS; + case C.CONTENT_TYPE_RTSP: + default: + return PlaybackMetrics.STREAM_TYPE_OTHER; + } + } + + private static ErrorInfo getErrorInfo( + PlaybackException error, Context context, boolean lastIoErrorForManifest) { + if (error.errorCode == PlaybackException.ERROR_CODE_REMOTE_ERROR) { + return new ErrorInfo(PlaybackErrorEvent.ERROR_PLAYER_REMOTE, /* subErrorCode= */ 0); + } + // Unpack the PlaybackException. + // TODO(b/190203080): Use error codes instead of the Exception's cause where possible. + boolean isRendererExoPlaybackException = false; + int rendererFormatSupport = C.FORMAT_UNSUPPORTED_TYPE; + if (error instanceof ExoPlaybackException) { + ExoPlaybackException exoPlaybackException = (ExoPlaybackException) error; + isRendererExoPlaybackException = + exoPlaybackException.type == ExoPlaybackException.TYPE_RENDERER; + rendererFormatSupport = exoPlaybackException.rendererFormatSupport; + } + Throwable cause = checkNotNull(error.getCause()); + if (cause instanceof IOException) { + if (cause instanceof HttpDataSource.InvalidResponseCodeException) { + int responseCode = ((HttpDataSource.InvalidResponseCodeException) cause).responseCode; + return new ErrorInfo( + PlaybackErrorEvent.ERROR_IO_BAD_HTTP_STATUS, /* subErrorCode= */ responseCode); + } else if (cause instanceof HttpDataSource.InvalidContentTypeException + || cause instanceof ParserException) { + return new ErrorInfo( + lastIoErrorForManifest + ? PlaybackErrorEvent.ERROR_PARSING_MANIFEST_MALFORMED + : PlaybackErrorEvent.ERROR_PARSING_CONTAINER_MALFORMED, + /* subErrorCode= */ 0); + } else if (cause instanceof HttpDataSource.HttpDataSourceException + || cause instanceof UdpDataSource.UdpDataSourceException) { + if (NetworkTypeObserver.getInstance(context).getNetworkType() == C.NETWORK_TYPE_OFFLINE) { + return new ErrorInfo( + PlaybackErrorEvent.ERROR_IO_NETWORK_UNAVAILABLE, /* subErrorCode= */ 0); + } else { + @Nullable Throwable detailedCause = cause.getCause(); + if (detailedCause instanceof UnknownHostException) { + return new ErrorInfo(PlaybackErrorEvent.ERROR_IO_DNS_FAILED, /* subErrorCode= */ 0); + } else if (detailedCause instanceof SocketTimeoutException) { + return new ErrorInfo( + PlaybackErrorEvent.ERROR_IO_CONNECTION_TIMEOUT, /* subErrorCode= */ 0); + } else if (cause instanceof HttpDataSource.HttpDataSourceException + && ((HttpDataSource.HttpDataSourceException) cause).type + == HttpDataSource.HttpDataSourceException.TYPE_OPEN) { + return new ErrorInfo( + PlaybackErrorEvent.ERROR_IO_NETWORK_CONNECTION_FAILED, /* subErrorCode= */ 0); + } else { + return new ErrorInfo( + PlaybackErrorEvent.ERROR_IO_CONNECTION_CLOSED, /* subErrorCode= */ 0); + } + } + } else if (error.errorCode == PlaybackException.ERROR_CODE_BEHIND_LIVE_WINDOW) { + return new ErrorInfo( + PlaybackErrorEvent.ERROR_PLAYER_BEHIND_LIVE_WINDOW, /* subErrorCode= */ 0); + } else if (cause instanceof DrmSession.DrmSessionException) { + // Unpack DrmSessionException. + cause = checkNotNull(cause.getCause()); + if (Util.SDK_INT >= 21 && cause instanceof MediaDrm.MediaDrmStateException) { + String diagnosticsInfo = ((MediaDrm.MediaDrmStateException) cause).getDiagnosticInfo(); + int subErrorCode = Util.getErrorCodeFromPlatformDiagnosticsInfo(diagnosticsInfo); + int errorCode = getDrmErrorCode(subErrorCode); + return new ErrorInfo(errorCode, subErrorCode); + } else if (Util.SDK_INT >= 23 && cause instanceof MediaDrmResetException) { + return new ErrorInfo(PlaybackErrorEvent.ERROR_DRM_SYSTEM_ERROR, /* subErrorCode= */ 0); + } else if (Util.SDK_INT >= 18 && cause instanceof NotProvisionedException) { + return new ErrorInfo( + PlaybackErrorEvent.ERROR_DRM_PROVISIONING_FAILED, /* subErrorCode= */ 0); + } else if (Util.SDK_INT >= 18 && cause instanceof DeniedByServerException) { + return new ErrorInfo(PlaybackErrorEvent.ERROR_DRM_DEVICE_REVOKED, /* subErrorCode= */ 0); + } else if (cause instanceof UnsupportedDrmException) { + return new ErrorInfo( + PlaybackErrorEvent.ERROR_DRM_SCHEME_UNSUPPORTED, /* subErrorCode= */ 0); + } else if (cause instanceof DefaultDrmSessionManager.MissingSchemeDataException) { + return new ErrorInfo(PlaybackErrorEvent.ERROR_DRM_CONTENT_ERROR, /* subErrorCode= */ 0); + } else { + return new ErrorInfo(PlaybackErrorEvent.ERROR_DRM_OTHER, /* subErrorCode= */ 0); + } + } else if (cause instanceof FileDataSource.FileDataSourceException + && cause.getCause() instanceof FileNotFoundException) { + @Nullable Throwable notFoundCause = checkNotNull(cause.getCause()).getCause(); + if (Util.SDK_INT >= 21 + && notFoundCause instanceof ErrnoException + && ((ErrnoException) notFoundCause).errno == OsConstants.EACCES) { + return new ErrorInfo(PlaybackErrorEvent.ERROR_IO_NO_PERMISSION, /* subErrorCode= */ 0); + } else { + return new ErrorInfo(PlaybackErrorEvent.ERROR_IO_FILE_NOT_FOUND, /* subErrorCode= */ 0); + } + } else { + return new ErrorInfo(PlaybackErrorEvent.ERROR_IO_OTHER, /* subErrorCode= */ 0); + } + } else if (isRendererExoPlaybackException + && (rendererFormatSupport == C.FORMAT_UNSUPPORTED_TYPE + || rendererFormatSupport == C.FORMAT_UNSUPPORTED_SUBTYPE)) { + return new ErrorInfo( + PlaybackErrorEvent.ERROR_DECODING_FORMAT_UNSUPPORTED, /* subErrorCode= */ 0); + } else if (isRendererExoPlaybackException + && rendererFormatSupport == C.FORMAT_EXCEEDS_CAPABILITIES) { + return new ErrorInfo( + PlaybackErrorEvent.ERROR_DECODING_FORMAT_EXCEEDS_CAPABILITIES, /* subErrorCode= */ 0); + } else if (isRendererExoPlaybackException + && rendererFormatSupport == C.FORMAT_UNSUPPORTED_DRM) { + return new ErrorInfo(PlaybackErrorEvent.ERROR_DRM_SCHEME_UNSUPPORTED, /* subErrorCode= */ 0); + } else if (cause instanceof MediaCodecRenderer.DecoderInitializationException) { + @Nullable + String diagnosticsInfo = + ((MediaCodecRenderer.DecoderInitializationException) cause).diagnosticInfo; + int subErrorCode = Util.getErrorCodeFromPlatformDiagnosticsInfo(diagnosticsInfo); + return new ErrorInfo(PlaybackErrorEvent.ERROR_DECODER_INIT_FAILED, subErrorCode); + } else if (cause instanceof MediaCodecDecoderException) { + @Nullable String diagnosticsInfo = ((MediaCodecDecoderException) cause).diagnosticInfo; + int subErrorCode = Util.getErrorCodeFromPlatformDiagnosticsInfo(diagnosticsInfo); + return new ErrorInfo(PlaybackErrorEvent.ERROR_DECODING_FAILED, subErrorCode); + } else if (cause instanceof OutOfMemoryError) { + return new ErrorInfo(PlaybackErrorEvent.ERROR_DECODING_FAILED, /* subErrorCode= */ 0); + } else if (cause instanceof AudioSink.InitializationException) { + int subErrorCode = ((AudioSink.InitializationException) cause).audioTrackState; + return new ErrorInfo(PlaybackErrorEvent.ERROR_AUDIO_TRACK_INIT_FAILED, subErrorCode); + } else if (cause instanceof AudioSink.WriteException) { + int subErrorCode = ((AudioSink.WriteException) cause).errorCode; + return new ErrorInfo(PlaybackErrorEvent.ERROR_AUDIO_TRACK_WRITE_FAILED, subErrorCode); + } else if (Util.SDK_INT >= 16 && cause instanceof MediaCodec.CryptoException) { + int subErrorCode = ((MediaCodec.CryptoException) cause).getErrorCode(); + int errorCode = getDrmErrorCode(subErrorCode); + return new ErrorInfo(errorCode, subErrorCode); + } else { + return new ErrorInfo(PlaybackErrorEvent.ERROR_PLAYER_OTHER, /* subErrorCode= */ 0); + } + } + + @Nullable + private static DrmInitData getDrmInitData(ImmutableList trackGroups) { + for (Tracks.Group trackGroup : trackGroups) { + for (int trackIndex = 0; trackIndex < trackGroup.length; trackIndex++) { + if (trackGroup.isTrackSelected(trackIndex)) { + @Nullable DrmInitData drmInitData = trackGroup.getTrackFormat(trackIndex).drmInitData; + if (drmInitData != null) { + return drmInitData; + } + } + } + } + return null; + } + + private static int getDrmType(DrmInitData drmInitData) { + for (int i = 0; i < drmInitData.schemeDataCount; i++) { + UUID uuid = drmInitData.get(i).uuid; + if (uuid.equals(C.WIDEVINE_UUID)) { + // TODO(b/77625596): Forward MediaDrm metrics to distinguish between L1 and L3 and to set + // the drm session id. + return PlaybackMetrics.DRM_TYPE_WIDEVINE_L1; + } + if (uuid.equals(C.PLAYREADY_UUID)) { + return PlaybackMetrics.DRM_TYPE_PLAY_READY; + } + if (uuid.equals(C.CLEARKEY_UUID)) { + return PlaybackMetrics.DRM_TYPE_CLEARKEY; + } + } + return PlaybackMetrics.DRM_TYPE_OTHER; + } + + @SuppressLint("SwitchIntDef") // Only DRM error codes are relevant here. + private static int getDrmErrorCode(int mediaDrmErrorCode) { + switch (Util.getErrorCodeForMediaDrmErrorCode(mediaDrmErrorCode)) { + case PlaybackException.ERROR_CODE_DRM_PROVISIONING_FAILED: + return PlaybackErrorEvent.ERROR_DRM_PROVISIONING_FAILED; + case PlaybackException.ERROR_CODE_DRM_LICENSE_ACQUISITION_FAILED: + return PlaybackErrorEvent.ERROR_DRM_LICENSE_ACQUISITION_FAILED; + case PlaybackException.ERROR_CODE_DRM_DISALLOWED_OPERATION: + return PlaybackErrorEvent.ERROR_DRM_DISALLOWED_OPERATION; + case PlaybackException.ERROR_CODE_DRM_CONTENT_ERROR: + return PlaybackErrorEvent.ERROR_DRM_CONTENT_ERROR; + case PlaybackException.ERROR_CODE_DRM_SYSTEM_ERROR: + default: + return PlaybackErrorEvent.ERROR_DRM_SYSTEM_ERROR; + } + } + + private static final class ErrorInfo { + + public final int errorCode; + public final int subErrorCode; + + public ErrorInfo(int errorCode, int subErrorCode) { + this.errorCode = errorCode; + this.subErrorCode = subErrorCode; + } + } + + private static final class PendingFormatUpdate { + + public final Format format; + public final @C.SelectionReason int selectionReason; + public final String sessionId; + + public PendingFormatUpdate( + Format format, @C.SelectionReason int selectionReason, String sessionId) { + this.format = format; + this.selectionReason = selectionReason; + this.sessionId = sessionId; + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/analytics/PlaybackSessionManager.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/analytics/PlaybackSessionManager.java index 7045779125..786da8c2fc 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/analytics/PlaybackSessionManager.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/analytics/PlaybackSessionManager.java @@ -15,6 +15,7 @@ */ package com.google.android.exoplayer2.analytics; +import androidx.annotation.Nullable; import com.google.android.exoplayer2.Player.DiscontinuityReason; import com.google.android.exoplayer2.Timeline; import com.google.android.exoplayer2.analytics.AnalyticsListener.EventTime; @@ -99,24 +100,41 @@ void onSessionFinished( /** * Updates or creates sessions based on a player {@link EventTime}. * + *

      Call {@link #updateSessionsWithTimelineChange(EventTime)} or {@link + * #updateSessionsWithDiscontinuity(EventTime, int)} if the event is a {@link Timeline} change or + * a position discontinuity respectively. + * * @param eventTime The {@link EventTime}. */ void updateSessions(EventTime eventTime); /** - * Updates the session associations to a new timeline. + * Updates or creates sessions based on a {@link Timeline} change at {@link EventTime}. + * + *

      Should be called instead of {@link #updateSessions(EventTime)} if a {@link Timeline} change + * occurred. * - * @param eventTime The event time with the timeline change. + * @param eventTime The {@link EventTime} with the timeline change. */ - void handleTimelineUpdate(EventTime eventTime); + void updateSessionsWithTimelineChange(EventTime eventTime); /** - * Handles a position discontinuity. + * Updates or creates sessions based on a position discontinuity at {@link EventTime}. + * + *

      Should be called instead of {@link #updateSessions(EventTime)} if a position discontinuity + * occurred. * - * @param eventTime The event time of the position discontinuity. + * @param eventTime The {@link EventTime} of the position discontinuity. * @param reason The {@link DiscontinuityReason}. */ - void handlePositionDiscontinuity(EventTime eventTime, @DiscontinuityReason int reason); + void updateSessionsWithDiscontinuity(EventTime eventTime, @DiscontinuityReason int reason); + + /** + * Returns the session identifier of the session that is currently actively playing, or {@code + * null} if there no such session. + */ + @Nullable + String getActiveSessionId(); /** * Finishes all existing sessions and calls their respective {@link diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/analytics/PlaybackStats.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/analytics/PlaybackStats.java index b370c893de..029d1ea0db 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/analytics/PlaybackStats.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/analytics/PlaybackStats.java @@ -15,37 +15,164 @@ */ package com.google.android.exoplayer2.analytics; +import static java.lang.Math.max; +import static java.lang.Math.min; +import static java.lang.annotation.ElementType.TYPE_USE; + import android.os.SystemClock; -import android.util.Pair; import androidx.annotation.IntDef; +import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.analytics.AnalyticsListener.EventTime; import java.lang.annotation.Documented; -import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import java.util.Collections; import java.util.List; -import org.checkerframework.checker.nullness.compatqual.NullableType; /** Statistics about playbacks. */ public final class PlaybackStats { + /** Stores a playback state with the event time at which it became active. */ + public static final class EventTimeAndPlaybackState { + /** The event time at which the playback state became active. */ + public final EventTime eventTime; + /** The playback state that became active. */ + public final @PlaybackState int playbackState; + + /** + * Creates a new timed playback state event. + * + * @param eventTime The event time at which the playback state became active. + * @param playbackState The playback state that became active. + */ + public EventTimeAndPlaybackState(EventTime eventTime, @PlaybackState int playbackState) { + this.eventTime = eventTime; + this.playbackState = playbackState; + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + EventTimeAndPlaybackState that = (EventTimeAndPlaybackState) o; + if (playbackState != that.playbackState) { + return false; + } + return eventTime.equals(that.eventTime); + } + + @Override + public int hashCode() { + int result = eventTime.hashCode(); + result = 31 * result + playbackState; + return result; + } + } + + /** + * Stores a format with the event time at which it started being used, or {@code null} to indicate + * that no format was used. + */ + public static final class EventTimeAndFormat { + /** The event time associated with {@link #format}. */ + public final EventTime eventTime; + /** The format that started being used, or {@code null} if no format was used. */ + @Nullable public final Format format; + + /** + * Creates a new timed format event. + * + * @param eventTime The event time associated with {@code format}. + * @param format The format that started being used, or {@code null} if no format was used. + */ + public EventTimeAndFormat(EventTime eventTime, @Nullable Format format) { + this.eventTime = eventTime; + this.format = format; + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + EventTimeAndFormat that = (EventTimeAndFormat) o; + if (!eventTime.equals(that.eventTime)) { + return false; + } + return format != null ? format.equals(that.format) : that.format == null; + } + + @Override + public int hashCode() { + int result = eventTime.hashCode(); + result = 31 * result + (format != null ? format.hashCode() : 0); + return result; + } + } + + /** Stores an exception with the event time at which it occurred. */ + public static final class EventTimeAndException { + /** The event time at which the exception occurred. */ + public final EventTime eventTime; + /** The exception that was thrown. */ + public final Exception exception; + + /** + * Creates a new timed exception event. + * + * @param eventTime The event time at which the exception occurred. + * @param exception The exception that was thrown. + */ + public EventTimeAndException(EventTime eventTime, Exception exception) { + this.eventTime = eventTime; + this.exception = exception; + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + EventTimeAndException that = (EventTimeAndException) o; + if (!eventTime.equals(that.eventTime)) { + return false; + } + return exception.equals(that.exception); + } + + @Override + public int hashCode() { + int result = eventTime.hashCode(); + result = 31 * result + exception.hashCode(); + return result; + } + } + /** * State of a playback. One of {@link #PLAYBACK_STATE_NOT_STARTED}, {@link * #PLAYBACK_STATE_JOINING_FOREGROUND}, {@link #PLAYBACK_STATE_JOINING_BACKGROUND}, {@link * #PLAYBACK_STATE_PLAYING}, {@link #PLAYBACK_STATE_PAUSED}, {@link #PLAYBACK_STATE_SEEKING}, * {@link #PLAYBACK_STATE_BUFFERING}, {@link #PLAYBACK_STATE_PAUSED_BUFFERING}, {@link - * #PLAYBACK_STATE_SEEK_BUFFERING}, {@link #PLAYBACK_STATE_SUPPRESSED}, {@link - * #PLAYBACK_STATE_SUPPRESSED_BUFFERING}, {@link #PLAYBACK_STATE_ENDED}, {@link - * #PLAYBACK_STATE_STOPPED}, {@link #PLAYBACK_STATE_FAILED}, {@link + * #PLAYBACK_STATE_SUPPRESSED}, {@link #PLAYBACK_STATE_SUPPRESSED_BUFFERING}, {@link + * #PLAYBACK_STATE_ENDED}, {@link #PLAYBACK_STATE_STOPPED}, {@link #PLAYBACK_STATE_FAILED}, {@link * #PLAYBACK_STATE_INTERRUPTED_BY_AD} or {@link #PLAYBACK_STATE_ABANDONED}. */ @Documented @Retention(RetentionPolicy.SOURCE) - @Target({ElementType.TYPE_PARAMETER, ElementType.TYPE_USE}) + @Target(TYPE_USE) @IntDef({ PLAYBACK_STATE_NOT_STARTED, PLAYBACK_STATE_JOINING_BACKGROUND, @@ -55,7 +182,6 @@ public final class PlaybackStats { PLAYBACK_STATE_SEEKING, PLAYBACK_STATE_BUFFERING, PLAYBACK_STATE_PAUSED_BUFFERING, - PLAYBACK_STATE_SEEK_BUFFERING, PLAYBACK_STATE_SUPPRESSED, PLAYBACK_STATE_SUPPRESSED_BUFFERING, PLAYBACK_STATE_ENDED, @@ -81,8 +207,6 @@ public final class PlaybackStats { public static final int PLAYBACK_STATE_BUFFERING = 6; /** Playback is buffering while paused. */ public static final int PLAYBACK_STATE_PAUSED_BUFFERING = 7; - /** Playback is buffering after a seek. */ - public static final int PLAYBACK_STATE_SEEK_BUFFERING = 8; /** Playback is suppressed (e.g. due to audio focus loss). */ public static final int PLAYBACK_STATE_SUPPRESSED = 9; /** Playback is suppressed (e.g. due to audio focus loss) while buffering to resume a playback. */ @@ -155,7 +279,7 @@ public static PlaybackStats merge(PlaybackStats... playbackStats) { if (firstReportedTimeMs == C.TIME_UNSET) { firstReportedTimeMs = stats.firstReportedTimeMs; } else if (stats.firstReportedTimeMs != C.TIME_UNSET) { - firstReportedTimeMs = Math.min(firstReportedTimeMs, stats.firstReportedTimeMs); + firstReportedTimeMs = min(firstReportedTimeMs, stats.firstReportedTimeMs); } foregroundPlaybackCount += stats.foregroundPlaybackCount; abandonedBeforeReadyCount += stats.abandonedBeforeReadyCount; @@ -174,7 +298,7 @@ public static PlaybackStats merge(PlaybackStats... playbackStats) { if (maxRebufferTimeMs == C.TIME_UNSET) { maxRebufferTimeMs = stats.maxRebufferTimeMs; } else if (stats.maxRebufferTimeMs != C.TIME_UNSET) { - maxRebufferTimeMs = Math.max(maxRebufferTimeMs, stats.maxRebufferTimeMs); + maxRebufferTimeMs = max(maxRebufferTimeMs, stats.maxRebufferTimeMs); } adPlaybackCount += stats.adPlaybackCount; totalVideoFormatHeightTimeMs += stats.totalVideoFormatHeightTimeMs; @@ -258,10 +382,10 @@ public static PlaybackStats merge(PlaybackStats... playbackStats) { // Playback state stats. /** - * The playback state history as ordered pairs of the {@link EventTime} at which a state became - * active and the {@link PlaybackState}. + * The playback state history as {@link EventTimeAndPlaybackState EventTimeAndPlaybackStates} + * ordered by {@code EventTime.realTimeMs}. */ - public final List> playbackStateHistory; + public final List playbackStateHistory; /** * The media time history as an ordered list of long[2] arrays with [0] being the realtime as * returned by {@code SystemClock.elapsedRealtime()} and [1] being the media time at this @@ -319,15 +443,15 @@ public static PlaybackStats merge(PlaybackStats... playbackStats) { // Format stats. /** - * The video format history as ordered pairs of the {@link EventTime} at which a format started - * being used and the {@link Format}. The {@link Format} may be null if no video format was used. + * The video format history as {@link EventTimeAndFormat EventTimeAndFormats} ordered by {@code + * EventTime.realTimeMs}. The {@link Format} may be null if no video format was used. */ - public final List> videoFormatHistory; + public final List videoFormatHistory; /** - * The audio format history as ordered pairs of the {@link EventTime} at which a format started - * being used and the {@link Format}. The {@link Format} may be null if no audio format was used. + * The audio format history as {@link EventTimeAndFormat EventTimeAndFormats} ordered by {@code + * EventTime.realTimeMs}. The {@link Format} may be null if no audio format was used. */ - public final List> audioFormatHistory; + public final List audioFormatHistory; /** The total media time for which video format height data is available, in milliseconds. */ public final long totalVideoFormatHeightTimeMs; /** @@ -400,23 +524,23 @@ public static PlaybackStats merge(PlaybackStats... playbackStats) { */ public final int nonFatalErrorCount; /** - * The history of fatal errors as ordered pairs of the {@link EventTime} at which an error - * occurred and the error. Errors are fatal if playback stopped due to this error. + * The history of fatal errors as {@link EventTimeAndException EventTimeAndExceptions} ordered by + * {@code EventTime.realTimeMs}. Errors are fatal if playback stopped due to this error. */ - public final List> fatalErrorHistory; + public final List fatalErrorHistory; /** - * The history of non-fatal errors as ordered pairs of the {@link EventTime} at which an error - * occurred and the error. Error are non-fatal if playback can recover from the error without - * stopping. + * The history of non-fatal errors as {@link EventTimeAndException EventTimeAndExceptions} ordered + * by {@code EventTime.realTimeMs}. Errors are non-fatal if playback can recover from the error + * without stopping. */ - public final List> nonFatalErrorHistory; + public final List nonFatalErrorHistory; private final long[] playbackStateDurationsMs; /* package */ PlaybackStats( int playbackCount, long[] playbackStateDurationsMs, - List> playbackStateHistory, + List playbackStateHistory, List mediaTimeHistory, long firstReportedTimeMs, int foregroundPlaybackCount, @@ -431,8 +555,8 @@ public static PlaybackStats merge(PlaybackStats... playbackStats) { int totalRebufferCount, long maxRebufferTimeMs, int adPlaybackCount, - List> videoFormatHistory, - List> audioFormatHistory, + List videoFormatHistory, + List audioFormatHistory, long totalVideoFormatHeightTimeMs, long totalVideoFormatHeightTimeProduct, long totalVideoFormatBitrateTimeMs, @@ -452,8 +576,8 @@ public static PlaybackStats merge(PlaybackStats... playbackStats) { int fatalErrorPlaybackCount, int fatalErrorCount, int nonFatalErrorCount, - List> fatalErrorHistory, - List> nonFatalErrorHistory) { + List fatalErrorHistory, + List nonFatalErrorHistory) { this.playbackCount = playbackCount; this.playbackStateDurationsMs = playbackStateDurationsMs; this.playbackStateHistory = Collections.unmodifiableList(playbackStateHistory); @@ -515,11 +639,11 @@ public long getPlaybackStateDurationMs(@PlaybackState int playbackState) { */ public @PlaybackState int getPlaybackStateAtTime(long realtimeMs) { @PlaybackState int state = PLAYBACK_STATE_NOT_STARTED; - for (Pair timeAndState : playbackStateHistory) { - if (timeAndState.first.realtimeMs > realtimeMs) { + for (EventTimeAndPlaybackState timeAndState : playbackStateHistory) { + if (timeAndState.eventTime.realtimeMs > realtimeMs) { break; } - state = timeAndState.second; + state = timeAndState.playbackState; } return state; } @@ -644,8 +768,7 @@ public long getMeanSingleRebufferTimeMs() { * milliseconds. */ public long getTotalSeekTimeMs() { - return getPlaybackStateDurationMs(PLAYBACK_STATE_SEEKING) - + getPlaybackStateDurationMs(PLAYBACK_STATE_SEEK_BUFFERING); + return getPlaybackStateDurationMs(PLAYBACK_STATE_SEEKING); } /** @@ -674,8 +797,7 @@ public long getMeanSingleSeekTimeMs() { public long getTotalWaitTimeMs() { return getPlaybackStateDurationMs(PLAYBACK_STATE_JOINING_FOREGROUND) + getPlaybackStateDurationMs(PLAYBACK_STATE_BUFFERING) - + getPlaybackStateDurationMs(PLAYBACK_STATE_SEEKING) - + getPlaybackStateDurationMs(PLAYBACK_STATE_SEEK_BUFFERING); + + getPlaybackStateDurationMs(PLAYBACK_STATE_SEEKING); } /** diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/analytics/PlaybackStatsListener.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/analytics/PlaybackStatsListener.java index 46c0a05342..e9550c136b 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/analytics/PlaybackStatsListener.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/analytics/PlaybackStatsListener.java @@ -15,26 +15,29 @@ */ package com.google.android.exoplayer2.analytics; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static java.lang.Math.max; + import android.os.SystemClock; import android.util.Pair; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; -import com.google.android.exoplayer2.ExoPlaybackException; import com.google.android.exoplayer2.Format; -import com.google.android.exoplayer2.PlaybackParameters; +import com.google.android.exoplayer2.PlaybackException; import com.google.android.exoplayer2.Player; import com.google.android.exoplayer2.Timeline; import com.google.android.exoplayer2.Timeline.Period; +import com.google.android.exoplayer2.Tracks; +import com.google.android.exoplayer2.analytics.PlaybackStats.EventTimeAndException; +import com.google.android.exoplayer2.analytics.PlaybackStats.EventTimeAndFormat; +import com.google.android.exoplayer2.analytics.PlaybackStats.EventTimeAndPlaybackState; import com.google.android.exoplayer2.analytics.PlaybackStats.PlaybackState; +import com.google.android.exoplayer2.source.LoadEventInfo; +import com.google.android.exoplayer2.source.MediaLoadData; import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId; -import com.google.android.exoplayer2.source.MediaSourceEventListener.LoadEventInfo; -import com.google.android.exoplayer2.source.MediaSourceEventListener.MediaLoadData; -import com.google.android.exoplayer2.source.TrackGroupArray; -import com.google.android.exoplayer2.trackselection.TrackSelection; -import com.google.android.exoplayer2.trackselection.TrackSelectionArray; import com.google.android.exoplayer2.util.Assertions; -import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.Util; +import com.google.android.exoplayer2.video.VideoSize; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; @@ -42,7 +45,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import org.checkerframework.checker.nullness.compatqual.NullableType; /** * {@link AnalyticsListener} to gather {@link PlaybackStats} from the player. @@ -77,12 +79,17 @@ public interface Callback { private final Period period; private PlaybackStats finishedPlaybackStats; - @Nullable private String activeContentPlayback; - @Nullable private String activeAdPlayback; - private boolean playWhenReady; - @Player.State private int playbackState; - private boolean isSuppressed; - private float playbackSpeed; + + @Nullable private String discontinuityFromSession; + private long discontinuityFromPositionMs; + private @Player.DiscontinuityReason int discontinuityReason; + private int droppedFrames; + @Nullable private Exception nonFatalException; + private long bandwidthTimeMs; + private long bandwidthBytes; + @Nullable private Format videoFormat; + @Nullable private Format audioFormat; + private VideoSize videoSize; /** * Creates listener for playback stats. @@ -98,10 +105,8 @@ public PlaybackStatsListener(boolean keepHistory, @Nullable Callback callback) { playbackStatsTrackers = new HashMap<>(); sessionStartEventTimes = new HashMap<>(); finishedPlaybackStats = PlaybackStats.EMPTY; - playWhenReady = false; - playbackState = Player.STATE_IDLE; - playbackSpeed = 1f; period = new Period(); + videoSize = VideoSize.UNKNOWN; sessionManager.setListener(this); } @@ -131,101 +136,43 @@ public PlaybackStats getCombinedPlaybackStats() { */ @Nullable public PlaybackStats getPlaybackStats() { + @Nullable String activeSessionId = sessionManager.getActiveSessionId(); + @Nullable PlaybackStatsTracker activeStatsTracker = - activeAdPlayback != null - ? playbackStatsTrackers.get(activeAdPlayback) - : activeContentPlayback != null - ? playbackStatsTrackers.get(activeContentPlayback) - : null; + activeSessionId == null ? null : playbackStatsTrackers.get(activeSessionId); return activeStatsTracker == null ? null : activeStatsTracker.build(/* isFinal= */ false); } - /** - * Finishes all pending playback sessions. Should be called when the listener is removed from the - * player or when the player is released. - */ - public void finishAllSessions() { - // TODO: Add AnalyticsListener.onAttachedToPlayer and onDetachedFromPlayer to auto-release with - // an actual EventTime. Should also simplify other cases where the listener needs to be released - // separately from the player. - EventTime dummyEventTime = - new EventTime( - SystemClock.elapsedRealtime(), - Timeline.EMPTY, - /* windowIndex= */ 0, - /* mediaPeriodId= */ null, - /* eventPlaybackPositionMs= */ 0, - /* currentPlaybackPositionMs= */ 0, - /* totalBufferedDurationMs= */ 0); - sessionManager.finishAllSessions(dummyEventTime); - } - // PlaybackSessionManager.Listener implementation. @Override - public void onSessionCreated(EventTime eventTime, String session) { + public void onSessionCreated(EventTime eventTime, String sessionId) { PlaybackStatsTracker tracker = new PlaybackStatsTracker(keepHistory, eventTime); - tracker.onPlayerStateChanged( - eventTime, playWhenReady, playbackState, /* belongsToPlayback= */ true); - tracker.onIsSuppressedChanged(eventTime, isSuppressed, /* belongsToPlayback= */ true); - tracker.onPlaybackSpeedChanged(eventTime, playbackSpeed); - playbackStatsTrackers.put(session, tracker); - sessionStartEventTimes.put(session, eventTime); + playbackStatsTrackers.put(sessionId, tracker); + sessionStartEventTimes.put(sessionId, eventTime); } @Override - public void onSessionActive(EventTime eventTime, String session) { - Assertions.checkNotNull(playbackStatsTrackers.get(session)).onForeground(eventTime); - if (eventTime.mediaPeriodId != null && eventTime.mediaPeriodId.isAd()) { - activeAdPlayback = session; - } else { - activeContentPlayback = session; - } + public void onSessionActive(EventTime eventTime, String sessionId) { + checkNotNull(playbackStatsTrackers.get(sessionId)).onForeground(); } @Override - public void onAdPlaybackStarted(EventTime eventTime, String contentSession, String adSession) { - Assertions.checkState(Assertions.checkNotNull(eventTime.mediaPeriodId).isAd()); - long contentPeriodPositionUs = - eventTime - .timeline - .getPeriodByUid(eventTime.mediaPeriodId.periodUid, period) - .getAdGroupTimeUs(eventTime.mediaPeriodId.adGroupIndex); - long contentWindowPositionUs = - contentPeriodPositionUs == C.TIME_END_OF_SOURCE - ? C.TIME_END_OF_SOURCE - : contentPeriodPositionUs + period.getPositionInWindowUs(); - EventTime contentEventTime = - new EventTime( - eventTime.realtimeMs, - eventTime.timeline, - eventTime.windowIndex, - new MediaPeriodId( - eventTime.mediaPeriodId.periodUid, - eventTime.mediaPeriodId.windowSequenceNumber, - eventTime.mediaPeriodId.adGroupIndex), - /* eventPlaybackPositionMs= */ C.usToMs(contentWindowPositionUs), - eventTime.currentPlaybackPositionMs, - eventTime.totalBufferedDurationMs); - Assertions.checkNotNull(playbackStatsTrackers.get(contentSession)) - .onInterruptedByAd(contentEventTime); + public void onAdPlaybackStarted( + EventTime eventTime, String contentSessionId, String adSessionId) { + checkNotNull(playbackStatsTrackers.get(contentSessionId)).onInterruptedByAd(); } @Override - public void onSessionFinished(EventTime eventTime, String session, boolean automaticTransition) { - if (session.equals(activeAdPlayback)) { - activeAdPlayback = null; - } else if (session.equals(activeContentPlayback)) { - activeContentPlayback = null; - } - PlaybackStatsTracker tracker = Assertions.checkNotNull(playbackStatsTrackers.remove(session)); - EventTime startEventTime = Assertions.checkNotNull(sessionStartEventTimes.remove(session)); - if (automaticTransition) { - // Simulate ENDED state to record natural ending of playback. - tracker.onPlayerStateChanged( - eventTime, /* playWhenReady= */ true, Player.STATE_ENDED, /* belongsToPlayback= */ false); - } - tracker.onFinished(eventTime); + public void onSessionFinished( + EventTime eventTime, String sessionId, boolean automaticTransitionToNextPlayback) { + PlaybackStatsTracker tracker = checkNotNull(playbackStatsTrackers.remove(sessionId)); + EventTime startEventTime = checkNotNull(sessionStartEventTimes.remove(sessionId)); + long discontinuityFromPositionMs = + sessionId.equals(discontinuityFromSession) + ? this.discontinuityFromPositionMs + : C.TIME_UNSET; + tracker.onFinished(eventTime, automaticTransitionToNextPlayback, discontinuityFromPositionMs); PlaybackStats playbackStats = tracker.build(/* isFinal= */ true); finishedPlaybackStats = PlaybackStats.merge(finishedPlaybackStats, playbackStats); if (callback != null) { @@ -236,203 +183,170 @@ public void onSessionFinished(EventTime eventTime, String session, boolean autom // AnalyticsListener implementation. @Override - public void onPlayerStateChanged( - EventTime eventTime, boolean playWhenReady, @Player.State int playbackState) { - this.playWhenReady = playWhenReady; - this.playbackState = playbackState; - maybeAddSession(eventTime); - for (String session : playbackStatsTrackers.keySet()) { - boolean belongsToPlayback = sessionManager.belongsToSession(eventTime, session); - playbackStatsTrackers - .get(session) - .onPlayerStateChanged(eventTime, playWhenReady, playbackState, belongsToPlayback); - } - } - - @Override - public void onPlaybackSuppressionReasonChanged( - EventTime eventTime, int playbackSuppressionReason) { - isSuppressed = playbackSuppressionReason != Player.PLAYBACK_SUPPRESSION_REASON_NONE; - maybeAddSession(eventTime); - for (String session : playbackStatsTrackers.keySet()) { - boolean belongsToPlayback = sessionManager.belongsToSession(eventTime, session); - playbackStatsTrackers - .get(session) - .onIsSuppressedChanged(eventTime, isSuppressed, belongsToPlayback); - } - } - - @Override - public void onTimelineChanged(EventTime eventTime, int reason) { - sessionManager.handleTimelineUpdate(eventTime); - maybeAddSession(eventTime); - for (String session : playbackStatsTrackers.keySet()) { - if (sessionManager.belongsToSession(eventTime, session)) { - playbackStatsTrackers.get(session).onPositionDiscontinuity(eventTime); - } - } - } - - @Override - public void onPositionDiscontinuity(EventTime eventTime, int reason) { - sessionManager.handlePositionDiscontinuity(eventTime, reason); - maybeAddSession(eventTime); - for (String session : playbackStatsTrackers.keySet()) { - if (sessionManager.belongsToSession(eventTime, session)) { - playbackStatsTrackers.get(session).onPositionDiscontinuity(eventTime); - } - } - } - - @Override - public void onSeekStarted(EventTime eventTime) { - maybeAddSession(eventTime); - for (String session : playbackStatsTrackers.keySet()) { - if (sessionManager.belongsToSession(eventTime, session)) { - playbackStatsTrackers.get(session).onSeekStarted(eventTime); - } - } - } - - @Override - public void onSeekProcessed(EventTime eventTime) { - maybeAddSession(eventTime); - for (String session : playbackStatsTrackers.keySet()) { - if (sessionManager.belongsToSession(eventTime, session)) { - playbackStatsTrackers.get(session).onSeekProcessed(eventTime); - } - } + public void onPositionDiscontinuity( + EventTime eventTime, + Player.PositionInfo oldPosition, + Player.PositionInfo newPosition, + @Player.DiscontinuityReason int reason) { + if (discontinuityFromSession == null) { + discontinuityFromSession = sessionManager.getActiveSessionId(); + discontinuityFromPositionMs = oldPosition.positionMs; + } + discontinuityReason = reason; } @Override - public void onPlayerError(EventTime eventTime, ExoPlaybackException error) { - maybeAddSession(eventTime); - for (String session : playbackStatsTrackers.keySet()) { - if (sessionManager.belongsToSession(eventTime, session)) { - playbackStatsTrackers.get(session).onFatalError(eventTime, error); - } - } + public void onDroppedVideoFrames(EventTime eventTime, int droppedFrames, long elapsedMs) { + this.droppedFrames = droppedFrames; } @Override - public void onPlaybackParametersChanged( - EventTime eventTime, PlaybackParameters playbackParameters) { - playbackSpeed = playbackParameters.speed; - maybeAddSession(eventTime); - for (PlaybackStatsTracker tracker : playbackStatsTrackers.values()) { - tracker.onPlaybackSpeedChanged(eventTime, playbackSpeed); - } + public void onLoadError( + EventTime eventTime, + LoadEventInfo loadEventInfo, + MediaLoadData mediaLoadData, + IOException error, + boolean wasCanceled) { + nonFatalException = error; } @Override - public void onTracksChanged( - EventTime eventTime, TrackGroupArray trackGroups, TrackSelectionArray trackSelections) { - maybeAddSession(eventTime); - for (String session : playbackStatsTrackers.keySet()) { - if (sessionManager.belongsToSession(eventTime, session)) { - playbackStatsTrackers.get(session).onTracksChanged(eventTime, trackSelections); - } - } + public void onDrmSessionManagerError(EventTime eventTime, Exception error) { + nonFatalException = error; } @Override - public void onLoadStarted( - EventTime eventTime, LoadEventInfo loadEventInfo, MediaLoadData mediaLoadData) { - maybeAddSession(eventTime); - for (String session : playbackStatsTrackers.keySet()) { - if (sessionManager.belongsToSession(eventTime, session)) { - playbackStatsTrackers.get(session).onLoadStarted(eventTime); - } - } + public void onBandwidthEstimate( + EventTime eventTime, int totalLoadTimeMs, long totalBytesLoaded, long bitrateEstimate) { + bandwidthTimeMs = totalLoadTimeMs; + bandwidthBytes = totalBytesLoaded; } @Override public void onDownstreamFormatChanged(EventTime eventTime, MediaLoadData mediaLoadData) { - maybeAddSession(eventTime); - for (String session : playbackStatsTrackers.keySet()) { - if (sessionManager.belongsToSession(eventTime, session)) { - playbackStatsTrackers.get(session).onDownstreamFormatChanged(eventTime, mediaLoadData); - } + if (mediaLoadData.trackType == C.TRACK_TYPE_VIDEO + || mediaLoadData.trackType == C.TRACK_TYPE_DEFAULT) { + videoFormat = mediaLoadData.trackFormat; + } else if (mediaLoadData.trackType == C.TRACK_TYPE_AUDIO) { + audioFormat = mediaLoadData.trackFormat; } } @Override - public void onVideoSizeChanged( - EventTime eventTime, - int width, - int height, - int unappliedRotationDegrees, - float pixelWidthHeightRatio) { - maybeAddSession(eventTime); - for (String session : playbackStatsTrackers.keySet()) { - if (sessionManager.belongsToSession(eventTime, session)) { - playbackStatsTrackers.get(session).onVideoSizeChanged(eventTime, width, height); - } - } + public void onVideoSizeChanged(EventTime eventTime, VideoSize videoSize) { + this.videoSize = videoSize; } @Override - public void onBandwidthEstimate( - EventTime eventTime, int totalLoadTimeMs, long totalBytesLoaded, long bitrateEstimate) { - maybeAddSession(eventTime); - for (String session : playbackStatsTrackers.keySet()) { - if (sessionManager.belongsToSession(eventTime, session)) { - playbackStatsTrackers.get(session).onBandwidthData(totalLoadTimeMs, totalBytesLoaded); - } + public void onEvents(Player player, Events events) { + if (events.size() == 0) { + return; } - } - - @Override - public void onAudioUnderrun( - EventTime eventTime, int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) { - maybeAddSession(eventTime); + maybeAddSessions(events); for (String session : playbackStatsTrackers.keySet()) { - if (sessionManager.belongsToSession(eventTime, session)) { - playbackStatsTrackers.get(session).onAudioUnderrun(); - } + Pair eventTimeAndBelongsToPlayback = findBestEventTime(events, session); + PlaybackStatsTracker tracker = playbackStatsTrackers.get(session); + boolean hasDiscontinuityToPlayback = hasEvent(events, session, EVENT_POSITION_DISCONTINUITY); + boolean hasDroppedFrames = hasEvent(events, session, EVENT_DROPPED_VIDEO_FRAMES); + boolean hasAudioUnderrun = hasEvent(events, session, EVENT_AUDIO_UNDERRUN); + boolean startedLoading = hasEvent(events, session, EVENT_LOAD_STARTED); + boolean hasFatalError = hasEvent(events, session, EVENT_PLAYER_ERROR); + boolean hasNonFatalException = + hasEvent(events, session, EVENT_LOAD_ERROR) + || hasEvent(events, session, EVENT_DRM_SESSION_MANAGER_ERROR); + boolean hasBandwidthData = hasEvent(events, session, EVENT_BANDWIDTH_ESTIMATE); + boolean hasFormatData = hasEvent(events, session, EVENT_DOWNSTREAM_FORMAT_CHANGED); + boolean hasVideoSize = hasEvent(events, session, EVENT_VIDEO_SIZE_CHANGED); + tracker.onEvents( + player, + /* eventTime= */ eventTimeAndBelongsToPlayback.first, + /* belongsToPlayback= */ eventTimeAndBelongsToPlayback.second, + session.equals(discontinuityFromSession) ? discontinuityFromPositionMs : C.TIME_UNSET, + hasDiscontinuityToPlayback, + hasDroppedFrames ? droppedFrames : 0, + hasAudioUnderrun, + startedLoading, + hasFatalError ? player.getPlayerError() : null, + hasNonFatalException ? nonFatalException : null, + hasBandwidthData ? bandwidthTimeMs : 0, + hasBandwidthData ? bandwidthBytes : 0, + hasFormatData ? videoFormat : null, + hasFormatData ? audioFormat : null, + hasVideoSize ? videoSize : null); + } + videoFormat = null; + audioFormat = null; + discontinuityFromSession = null; + if (events.contains(AnalyticsListener.EVENT_PLAYER_RELEASED)) { + sessionManager.finishAllSessions(events.getEventTime(EVENT_PLAYER_RELEASED)); } } - @Override - public void onDroppedVideoFrames(EventTime eventTime, int droppedFrames, long elapsedMs) { - maybeAddSession(eventTime); - for (String session : playbackStatsTrackers.keySet()) { - if (sessionManager.belongsToSession(eventTime, session)) { - playbackStatsTrackers.get(session).onDroppedVideoFrames(droppedFrames); + private void maybeAddSessions(Events events) { + for (int i = 0; i < events.size(); i++) { + @EventFlags int event = events.get(i); + EventTime eventTime = events.getEventTime(event); + if (event == EVENT_TIMELINE_CHANGED) { + sessionManager.updateSessionsWithTimelineChange(eventTime); + } else if (event == EVENT_POSITION_DISCONTINUITY) { + sessionManager.updateSessionsWithDiscontinuity(eventTime, discontinuityReason); + } else { + sessionManager.updateSessions(eventTime); } } } - @Override - public void onLoadError( - EventTime eventTime, - LoadEventInfo loadEventInfo, - MediaLoadData mediaLoadData, - IOException error, - boolean wasCanceled) { - maybeAddSession(eventTime); - for (String session : playbackStatsTrackers.keySet()) { - if (sessionManager.belongsToSession(eventTime, session)) { - playbackStatsTrackers.get(session).onNonFatalError(eventTime, error); - } - } + private Pair findBestEventTime(Events events, String session) { + @Nullable EventTime eventTime = null; + boolean belongsToPlayback = false; + for (int i = 0; i < events.size(); i++) { + @EventFlags int event = events.get(i); + EventTime newEventTime = events.getEventTime(event); + boolean newBelongsToPlayback = sessionManager.belongsToSession(newEventTime, session); + if (eventTime == null + || (newBelongsToPlayback && !belongsToPlayback) + || (newBelongsToPlayback == belongsToPlayback + && newEventTime.realtimeMs > eventTime.realtimeMs)) { + // Prefer event times for the current playback and prefer later timestamps. + eventTime = newEventTime; + belongsToPlayback = newBelongsToPlayback; + } + } + checkNotNull(eventTime); + if (!belongsToPlayback && eventTime.mediaPeriodId != null && eventTime.mediaPeriodId.isAd()) { + // Replace ad event time with content event time unless it's for the ad playback itself. + long contentPeriodPositionUs = + eventTime + .timeline + .getPeriodByUid(eventTime.mediaPeriodId.periodUid, period) + .getAdGroupTimeUs(eventTime.mediaPeriodId.adGroupIndex); + if (contentPeriodPositionUs == C.TIME_END_OF_SOURCE) { + contentPeriodPositionUs = period.durationUs; + } + long contentWindowPositionUs = contentPeriodPositionUs + period.getPositionInWindowUs(); + eventTime = + new EventTime( + eventTime.realtimeMs, + eventTime.timeline, + eventTime.windowIndex, + new MediaPeriodId( + eventTime.mediaPeriodId.periodUid, + eventTime.mediaPeriodId.windowSequenceNumber, + eventTime.mediaPeriodId.adGroupIndex), + /* eventPlaybackPositionMs= */ Util.usToMs(contentWindowPositionUs), + eventTime.timeline, + eventTime.currentWindowIndex, + eventTime.currentMediaPeriodId, + eventTime.currentPlaybackPositionMs, + eventTime.totalBufferedDurationMs); + belongsToPlayback = sessionManager.belongsToSession(eventTime, session); + } + return Pair.create(eventTime, belongsToPlayback); } - @Override - public void onDrmSessionManagerError(EventTime eventTime, Exception error) { - maybeAddSession(eventTime); - for (String session : playbackStatsTrackers.keySet()) { - if (sessionManager.belongsToSession(eventTime, session)) { - playbackStatsTrackers.get(session).onNonFatalError(eventTime, error); - } - } - } - - private void maybeAddSession(EventTime eventTime) { - boolean isCompletelyIdle = eventTime.timeline.isEmpty() && playbackState == Player.STATE_IDLE; - if (!isCompletelyIdle) { - sessionManager.updateSessions(eventTime); - } + private boolean hasEvent(Events events, String session, @EventFlags int event) { + return events.contains(event) + && sessionManager.belongsToSession(events.getEventTime(event), session); } /** Tracker for playback stats of a single playback. */ @@ -441,12 +355,12 @@ private static final class PlaybackStatsTracker { // Final stats. private final boolean keepHistory; private final long[] playbackStateDurationsMs; - private final List> playbackStateHistory; + private final List playbackStateHistory; private final List mediaTimeHistory; - private final List> videoFormatHistory; - private final List> audioFormatHistory; - private final List> fatalErrorHistory; - private final List> nonFatalErrorHistory; + private final List videoFormatHistory; + private final List audioFormatHistory; + private final List fatalErrorHistory; + private final List nonFatalErrorHistory; private final boolean isAd; private long firstReportedTimeMs; @@ -480,10 +394,6 @@ private static final class PlaybackStatsTracker { private boolean isSeeking; private boolean isForeground; private boolean isInterruptedByAd; - private boolean isFinished; - private boolean playWhenReady; - @Player.State private int playerPlaybackState; - private boolean isSuppressed; private boolean hasFatalError; private boolean startedLoading; private long lastRebufferStartTimeMs; @@ -510,7 +420,6 @@ public PlaybackStatsTracker(boolean keepHistory, EventTime startTime) { nonFatalErrorHistory = keepHistory ? new ArrayList<>() : Collections.emptyList(); currentPlaybackState = PlaybackStats.PLAYBACK_STATE_NOT_STARTED; currentPlaybackStateStartTimeMs = startTime.realtimeMs; - playerPlaybackState = Player.STATE_IDLE; firstReportedTimeMs = C.TIME_UNSET; maxRebufferTimeMs = C.TIME_UNSET; isAd = startTime.mediaPeriodId != null && startTime.mediaPeriodId.isAd(); @@ -520,238 +429,152 @@ public PlaybackStatsTracker(boolean keepHistory, EventTime startTime) { currentPlaybackSpeed = 1f; } - /** - * Notifies the tracker of a player state change event, including all player state changes while - * the playback is not in the foreground. - * - * @param eventTime The {@link EventTime}. - * @param playWhenReady Whether the playback will proceed when ready. - * @param playbackState The current {@link Player.State}. - * @param belongsToPlayback Whether the {@code eventTime} belongs to the current playback. - */ - public void onPlayerStateChanged( - EventTime eventTime, - boolean playWhenReady, - @Player.State int playbackState, - boolean belongsToPlayback) { - this.playWhenReady = playWhenReady; - playerPlaybackState = playbackState; - if (playbackState != Player.STATE_IDLE) { - hasFatalError = false; - } - if (playbackState == Player.STATE_IDLE || playbackState == Player.STATE_ENDED) { - isInterruptedByAd = false; - } - maybeUpdatePlaybackState(eventTime, belongsToPlayback); - } - - /** - * Notifies the tracker of a change to the playback suppression (e.g. due to audio focus loss), - * including all updates while the playback is not in the foreground. - * - * @param eventTime The {@link EventTime}. - * @param isSuppressed Whether playback is suppressed. - * @param belongsToPlayback Whether the {@code eventTime} belongs to the current playback. - */ - public void onIsSuppressedChanged( - EventTime eventTime, boolean isSuppressed, boolean belongsToPlayback) { - this.isSuppressed = isSuppressed; - maybeUpdatePlaybackState(eventTime, belongsToPlayback); - } - - /** - * Notifies the tracker of a position discontinuity or timeline update for the current playback. - * - * @param eventTime The {@link EventTime}. - */ - public void onPositionDiscontinuity(EventTime eventTime) { - isInterruptedByAd = false; - maybeUpdatePlaybackState(eventTime, /* belongsToPlayback= */ true); - } - - /** - * Notifies the tracker of the start of a seek in the current playback. - * - * @param eventTime The {@link EventTime}. - */ - public void onSeekStarted(EventTime eventTime) { - isSeeking = true; - maybeUpdatePlaybackState(eventTime, /* belongsToPlayback= */ true); - } - - /** - * Notifies the tracker of a seek has been processed in the current playback. - * - * @param eventTime The {@link EventTime}. - */ - public void onSeekProcessed(EventTime eventTime) { - isSeeking = false; - maybeUpdatePlaybackState(eventTime, /* belongsToPlayback= */ true); - } - - /** - * Notifies the tracker of fatal player error in the current playback. - * - * @param eventTime The {@link EventTime}. - */ - public void onFatalError(EventTime eventTime, Exception error) { - fatalErrorCount++; - if (keepHistory) { - fatalErrorHistory.add(Pair.create(eventTime, error)); - } - hasFatalError = true; - isInterruptedByAd = false; - isSeeking = false; - maybeUpdatePlaybackState(eventTime, /* belongsToPlayback= */ true); - } - - /** - * Notifies the tracker that a load for the current playback has started. - * - * @param eventTime The {@link EventTime}. - */ - public void onLoadStarted(EventTime eventTime) { - startedLoading = true; - maybeUpdatePlaybackState(eventTime, /* belongsToPlayback= */ true); - } - - /** - * Notifies the tracker that the current playback became the active foreground playback. - * - * @param eventTime The {@link EventTime}. - */ - public void onForeground(EventTime eventTime) { + /** Notifies the tracker that the current playback became the active foreground playback. */ + public void onForeground() { isForeground = true; - maybeUpdatePlaybackState(eventTime, /* belongsToPlayback= */ true); } - /** - * Notifies the tracker that the current playback has been interrupted for ad playback. - * - * @param eventTime The {@link EventTime}. - */ - public void onInterruptedByAd(EventTime eventTime) { + /** Notifies the tracker that the current playback is interrupted by an ad. */ + public void onInterruptedByAd() { isInterruptedByAd = true; isSeeking = false; - maybeUpdatePlaybackState(eventTime, /* belongsToPlayback= */ true); } /** * Notifies the tracker that the current playback has finished. * - * @param eventTime The {@link EventTime}. Not guaranteed to belong to the current playback. + * @param eventTime The {@link EventTime}. Does not belong to this playback. + * @param automaticTransition Whether the playback finished because of an automatic transition + * to the next playback item. + * @param discontinuityFromPositionMs The position before the discontinuity from this playback, + * {@link C#TIME_UNSET} if no discontinuity started from this playback. */ - public void onFinished(EventTime eventTime) { - isFinished = true; - maybeUpdatePlaybackState(eventTime, /* belongsToPlayback= */ false); + public void onFinished( + EventTime eventTime, boolean automaticTransition, long discontinuityFromPositionMs) { + // Simulate state change to ENDED to record natural ending of playback. + @PlaybackState + int finalPlaybackState = + currentPlaybackState == PlaybackStats.PLAYBACK_STATE_ENDED || automaticTransition + ? PlaybackStats.PLAYBACK_STATE_ENDED + : PlaybackStats.PLAYBACK_STATE_ABANDONED; + maybeUpdateMediaTimeHistory(eventTime.realtimeMs, discontinuityFromPositionMs); + maybeRecordVideoFormatTime(eventTime.realtimeMs); + maybeRecordAudioFormatTime(eventTime.realtimeMs); + updatePlaybackState(finalPlaybackState, eventTime); } /** - * Notifies the tracker that the track selection for the current playback changed. + * Notifies the tracker of new events. * - * @param eventTime The {@link EventTime}. - * @param trackSelections The new {@link TrackSelectionArray}. + * @param player The {@link Player}. + * @param eventTime The {@link EventTime} of the events. + * @param belongsToPlayback Whether the {@code eventTime} belongs to this playback. + * @param discontinuityFromPositionMs The position before the discontinuity from this playback, + * or {@link C#TIME_UNSET} if no discontinuity started from this playback. + * @param hasDiscontinuity Whether a discontinuity to this playback occurred. + * @param droppedFrameCount The number of newly dropped frames for this playback. + * @param hasAudioUnderun Whether a new audio underrun occurred for this playback. + * @param startedLoading Whether this playback started loading. + * @param fatalError A fatal error for this playback, or null. + * @param nonFatalException A non-fatal exception for this playback, or null. + * @param bandwidthTimeMs The time in milliseconds spent loading for this playback. + * @param bandwidthBytes The number of bytes loaded for this playback. + * @param videoFormat A reported downstream video format for this playback, or null. + * @param audioFormat A reported downstream audio format for this playback, or null. + * @param videoSize The reported video size for this playback, or null. */ - public void onTracksChanged(EventTime eventTime, TrackSelectionArray trackSelections) { - boolean videoEnabled = false; - boolean audioEnabled = false; - for (TrackSelection trackSelection : trackSelections.getAll()) { - if (trackSelection != null && trackSelection.length() > 0) { - int trackType = MimeTypes.getTrackType(trackSelection.getFormat(0).sampleMimeType); - if (trackType == C.TRACK_TYPE_VIDEO) { - videoEnabled = true; - } else if (trackType == C.TRACK_TYPE_AUDIO) { - audioEnabled = true; - } - } + public void onEvents( + Player player, + EventTime eventTime, + boolean belongsToPlayback, + long discontinuityFromPositionMs, + boolean hasDiscontinuity, + int droppedFrameCount, + boolean hasAudioUnderun, + boolean startedLoading, + @Nullable PlaybackException fatalError, + @Nullable Exception nonFatalException, + long bandwidthTimeMs, + long bandwidthBytes, + @Nullable Format videoFormat, + @Nullable Format audioFormat, + @Nullable VideoSize videoSize) { + if (discontinuityFromPositionMs != C.TIME_UNSET) { + maybeUpdateMediaTimeHistory(eventTime.realtimeMs, discontinuityFromPositionMs); + isSeeking = true; + } + if (player.getPlaybackState() != Player.STATE_BUFFERING) { + isSeeking = false; + } + int playerPlaybackState = player.getPlaybackState(); + if (playerPlaybackState == Player.STATE_IDLE + || playerPlaybackState == Player.STATE_ENDED + || hasDiscontinuity) { + isInterruptedByAd = false; } - if (!videoEnabled) { - maybeUpdateVideoFormat(eventTime, /* newFormat= */ null); + if (fatalError != null) { + hasFatalError = true; + fatalErrorCount++; + if (keepHistory) { + fatalErrorHistory.add(new EventTimeAndException(eventTime, fatalError)); + } + } else if (player.getPlayerError() == null) { + hasFatalError = false; } - if (!audioEnabled) { - maybeUpdateAudioFormat(eventTime, /* newFormat= */ null); + if (isForeground && !isInterruptedByAd) { + Tracks currentTracks = player.getCurrentTracks(); + if (!currentTracks.isTypeSelected(C.TRACK_TYPE_VIDEO)) { + maybeUpdateVideoFormat(eventTime, /* newFormat= */ null); + } + if (!currentTracks.isTypeSelected(C.TRACK_TYPE_AUDIO)) { + maybeUpdateAudioFormat(eventTime, /* newFormat= */ null); + } } - } - - /** - * Notifies the tracker that a format being read by the renderers for the current playback - * changed. - * - * @param eventTime The {@link EventTime}. - * @param mediaLoadData The {@link MediaLoadData} describing the format change. - */ - public void onDownstreamFormatChanged(EventTime eventTime, MediaLoadData mediaLoadData) { - if (mediaLoadData.trackType == C.TRACK_TYPE_VIDEO - || mediaLoadData.trackType == C.TRACK_TYPE_DEFAULT) { - maybeUpdateVideoFormat(eventTime, mediaLoadData.trackFormat); - } else if (mediaLoadData.trackType == C.TRACK_TYPE_AUDIO) { - maybeUpdateAudioFormat(eventTime, mediaLoadData.trackFormat); + if (videoFormat != null) { + maybeUpdateVideoFormat(eventTime, videoFormat); + } + if (audioFormat != null) { + maybeUpdateAudioFormat(eventTime, audioFormat); + } + if (currentVideoFormat != null + && currentVideoFormat.height == Format.NO_VALUE + && videoSize != null) { + Format formatWithHeightAndWidth = + currentVideoFormat + .buildUpon() + .setWidth(videoSize.width) + .setHeight(videoSize.height) + .build(); + maybeUpdateVideoFormat(eventTime, formatWithHeightAndWidth); + } + if (startedLoading) { + this.startedLoading = true; + } + if (hasAudioUnderun) { + audioUnderruns++; + } + this.droppedFrames += droppedFrameCount; + this.bandwidthTimeMs += bandwidthTimeMs; + this.bandwidthBytes += bandwidthBytes; + if (nonFatalException != null) { + nonFatalErrorCount++; + if (keepHistory) { + nonFatalErrorHistory.add(new EventTimeAndException(eventTime, nonFatalException)); + } } - } - /** - * Notifies the tracker that the video size for the current playback changed. - * - * @param eventTime The {@link EventTime}. - * @param width The video width in pixels. - * @param height The video height in pixels. - */ - public void onVideoSizeChanged(EventTime eventTime, int width, int height) { - if (currentVideoFormat != null && currentVideoFormat.height == Format.NO_VALUE) { - Format formatWithHeight = currentVideoFormat.copyWithVideoSize(width, height); - maybeUpdateVideoFormat(eventTime, formatWithHeight); + @PlaybackState int newPlaybackState = resolveNewPlaybackState(player); + float newPlaybackSpeed = player.getPlaybackParameters().speed; + if (currentPlaybackState != newPlaybackState || currentPlaybackSpeed != newPlaybackSpeed) { + maybeUpdateMediaTimeHistory( + eventTime.realtimeMs, + belongsToPlayback ? eventTime.eventPlaybackPositionMs : C.TIME_UNSET); + maybeRecordVideoFormatTime(eventTime.realtimeMs); + maybeRecordAudioFormatTime(eventTime.realtimeMs); } - } - - /** - * Notifies the tracker of a playback speed change, including all playback speed changes while - * the playback is not in the foreground. - * - * @param eventTime The {@link EventTime}. - * @param playbackSpeed The new playback speed. - */ - public void onPlaybackSpeedChanged(EventTime eventTime, float playbackSpeed) { - maybeUpdateMediaTimeHistory(eventTime.realtimeMs, eventTime.eventPlaybackPositionMs); - maybeRecordVideoFormatTime(eventTime.realtimeMs); - maybeRecordAudioFormatTime(eventTime.realtimeMs); - currentPlaybackSpeed = playbackSpeed; - } - - /** Notifies the builder of an audio underrun for the current playback. */ - public void onAudioUnderrun() { - audioUnderruns++; - } - - /** - * Notifies the tracker of dropped video frames for the current playback. - * - * @param droppedFrames The number of dropped video frames. - */ - public void onDroppedVideoFrames(int droppedFrames) { - this.droppedFrames += droppedFrames; - } - - /** - * Notifies the tracker of bandwidth measurement data for the current playback. - * - * @param timeMs The time for which bandwidth measurement data is available, in milliseconds. - * @param bytes The bytes transferred during {@code timeMs}. - */ - public void onBandwidthData(long timeMs, long bytes) { - bandwidthTimeMs += timeMs; - bandwidthBytes += bytes; - } - - /** - * Notifies the tracker of a non-fatal error in the current playback. - * - * @param eventTime The {@link EventTime}. - * @param error The error. - */ - public void onNonFatalError(EventTime eventTime, Exception error) { - nonFatalErrorCount++; - if (keepHistory) { - nonFatalErrorHistory.add(Pair.create(eventTime, error)); + currentPlaybackSpeed = newPlaybackSpeed; + if (currentPlaybackState != newPlaybackState) { + updatePlaybackState(newPlaybackState, eventTime); } } @@ -767,7 +590,7 @@ public PlaybackStats build(boolean isFinal) { long buildTimeMs = SystemClock.elapsedRealtime(); playbackStateDurationsMs = Arrays.copyOf(this.playbackStateDurationsMs, PlaybackStats.PLAYBACK_STATE_COUNT); - long lastStateDurationMs = Math.max(0, buildTimeMs - currentPlaybackStateStartTimeMs); + long lastStateDurationMs = max(0, buildTimeMs - currentPlaybackStateStartTimeMs); playbackStateDurationsMs[currentPlaybackState] += lastStateDurationMs; maybeUpdateMaxRebufferTimeMs(buildTimeMs); maybeRecordVideoFormatTime(buildTimeMs); @@ -784,9 +607,9 @@ public PlaybackStats build(boolean isFinal) { : playbackStateDurationsMs[PlaybackStats.PLAYBACK_STATE_JOINING_FOREGROUND]; boolean hasBackgroundJoin = playbackStateDurationsMs[PlaybackStats.PLAYBACK_STATE_JOINING_BACKGROUND] > 0; - List> videoHistory = + List videoHistory = isFinal ? videoFormatHistory : new ArrayList<>(videoFormatHistory); - List> audioHistory = + List audioHistory = isFinal ? audioFormatHistory : new ArrayList<>(audioFormatHistory); return new PlaybackStats( /* playbackCount= */ 1, @@ -831,13 +654,8 @@ public PlaybackStats build(boolean isFinal) { nonFatalErrorHistory); } - private void maybeUpdatePlaybackState(EventTime eventTime, boolean belongsToPlayback) { - @PlaybackState int newPlaybackState = resolveNewPlaybackState(); - if (newPlaybackState == currentPlaybackState) { - return; - } + private void updatePlaybackState(@PlaybackState int newPlaybackState, EventTime eventTime) { Assertions.checkArgument(eventTime.realtimeMs >= currentPlaybackStateStartTimeMs); - long stateDurationMs = eventTime.realtimeMs - currentPlaybackStateStartTimeMs; playbackStateDurationsMs[currentPlaybackState] += stateDurationMs; if (firstReportedTimeMs == C.TIME_UNSET) { @@ -861,28 +679,18 @@ private void maybeUpdatePlaybackState(EventTime eventTime, boolean belongsToPlay && newPlaybackState == PlaybackStats.PLAYBACK_STATE_PAUSED_BUFFERING) { pauseBufferCount++; } - - maybeUpdateMediaTimeHistory( - eventTime.realtimeMs, - /* mediaTimeMs= */ belongsToPlayback ? eventTime.eventPlaybackPositionMs : C.TIME_UNSET); maybeUpdateMaxRebufferTimeMs(eventTime.realtimeMs); - maybeRecordVideoFormatTime(eventTime.realtimeMs); - maybeRecordAudioFormatTime(eventTime.realtimeMs); currentPlaybackState = newPlaybackState; currentPlaybackStateStartTimeMs = eventTime.realtimeMs; if (keepHistory) { - playbackStateHistory.add(Pair.create(eventTime, currentPlaybackState)); + playbackStateHistory.add(new EventTimeAndPlaybackState(eventTime, currentPlaybackState)); } } - private @PlaybackState int resolveNewPlaybackState() { - if (isFinished) { - // Keep VIDEO_STATE_ENDED if playback naturally ended (or progressed to next item). - return currentPlaybackState == PlaybackStats.PLAYBACK_STATE_ENDED - ? PlaybackStats.PLAYBACK_STATE_ENDED - : PlaybackStats.PLAYBACK_STATE_ABANDONED; - } else if (isSeeking) { + private @PlaybackState int resolveNewPlaybackState(Player player) { + @Player.State int playerPlaybackState = player.getPlaybackState(); + if (isSeeking && isForeground) { // Seeking takes precedence over errors such that we report a seek while in error state. return PlaybackStats.PLAYBACK_STATE_SEEKING; } else if (hasFatalError) { @@ -903,21 +711,17 @@ private void maybeUpdatePlaybackState(EventTime eventTime, boolean belongsToPlay || currentPlaybackState == PlaybackStats.PLAYBACK_STATE_INTERRUPTED_BY_AD) { return PlaybackStats.PLAYBACK_STATE_JOINING_FOREGROUND; } - if (currentPlaybackState == PlaybackStats.PLAYBACK_STATE_SEEKING - || currentPlaybackState == PlaybackStats.PLAYBACK_STATE_SEEK_BUFFERING) { - return PlaybackStats.PLAYBACK_STATE_SEEK_BUFFERING; - } - if (!playWhenReady) { + if (!player.getPlayWhenReady()) { return PlaybackStats.PLAYBACK_STATE_PAUSED_BUFFERING; } - return isSuppressed + return player.getPlaybackSuppressionReason() != Player.PLAYBACK_SUPPRESSION_REASON_NONE ? PlaybackStats.PLAYBACK_STATE_SUPPRESSED_BUFFERING : PlaybackStats.PLAYBACK_STATE_BUFFERING; } else if (playerPlaybackState == Player.STATE_READY) { - if (!playWhenReady) { + if (!player.getPlayWhenReady()) { return PlaybackStats.PLAYBACK_STATE_PAUSED; } - return isSuppressed + return player.getPlaybackSuppressionReason() != Player.PLAYBACK_SUPPRESSION_REASON_NONE ? PlaybackStats.PLAYBACK_STATE_SUPPRESSED : PlaybackStats.PLAYBACK_STATE_PLAYING; } else if (playerPlaybackState == Player.STATE_IDLE @@ -953,10 +757,12 @@ private void maybeUpdateMediaTimeHistory(long realtimeMs, long mediaTimeMs) { } } } - mediaTimeHistory.add( - mediaTimeMs == C.TIME_UNSET - ? guessMediaTimeBasedOnElapsedRealtime(realtimeMs) - : new long[] {realtimeMs, mediaTimeMs}); + + if (mediaTimeMs != C.TIME_UNSET) { + mediaTimeHistory.add(new long[] {realtimeMs, mediaTimeMs}); + } else if (!mediaTimeHistory.isEmpty()) { + mediaTimeHistory.add(guessMediaTimeBasedOnElapsedRealtime(realtimeMs)); + } } private long[] guessMediaTimeBasedOnElapsedRealtime(long realtimeMs) { @@ -984,7 +790,7 @@ private void maybeUpdateVideoFormat(EventTime eventTime, @Nullable Format newFor } currentVideoFormat = newFormat; if (keepHistory) { - videoFormatHistory.add(Pair.create(eventTime, currentVideoFormat)); + videoFormatHistory.add(new EventTimeAndFormat(eventTime, currentVideoFormat)); } } @@ -1000,7 +806,7 @@ private void maybeUpdateAudioFormat(EventTime eventTime, @Nullable Format newFor } currentAudioFormat = newFormat; if (keepHistory) { - audioFormatHistory.add(Pair.create(eventTime, currentAudioFormat)); + audioFormatHistory.add(new EventTimeAndFormat(eventTime, currentAudioFormat)); } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/analytics/PlayerId.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/analytics/PlayerId.java new file mode 100644 index 0000000000..0e72e7453a --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/analytics/PlayerId.java @@ -0,0 +1,75 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.analytics; + +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Assertions.checkState; + +import android.media.metrics.LogSessionId; +import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; +import com.google.android.exoplayer2.util.Util; + +/** Identifier for a player instance. */ +public final class PlayerId { + + /** + * A player identifier with unset default values that can be used as a placeholder or for testing. + */ + public static final PlayerId UNSET = + Util.SDK_INT < 31 ? new PlayerId() : new PlayerId(LogSessionIdApi31.UNSET); + + @Nullable private final LogSessionIdApi31 logSessionIdApi31; + + /** Creates an instance for API < 31. */ + public PlayerId() { + this(/* logSessionIdApi31= */ (LogSessionIdApi31) null); + checkState(Util.SDK_INT < 31); + } + + /** + * Creates an instance for API ≥ 31. + * + * @param logSessionId The {@link LogSessionId} used for this player. + */ + @RequiresApi(31) + public PlayerId(LogSessionId logSessionId) { + this(new LogSessionIdApi31(logSessionId)); + } + + private PlayerId(@Nullable LogSessionIdApi31 logSessionIdApi31) { + this.logSessionIdApi31 = logSessionIdApi31; + } + + /** Returns the {@link LogSessionId} for this player instance. */ + @RequiresApi(31) + public LogSessionId getLogSessionId() { + return checkNotNull(logSessionIdApi31).logSessionId; + } + + @RequiresApi(31) + private static final class LogSessionIdApi31 { + + public static final LogSessionIdApi31 UNSET = + new LogSessionIdApi31(LogSessionId.LOG_SESSION_ID_NONE); + + public final LogSessionId logSessionId; + + public LogSessionIdApi31(LogSessionId logSessionId) { + this.logSessionId = logSessionId; + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AacUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AacUtil.java new file mode 100644 index 0000000000..f787a3959d --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AacUtil.java @@ -0,0 +1,381 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.audio; + +import static java.lang.annotation.ElementType.TYPE_USE; + +import androidx.annotation.IntDef; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.ParserException; +import com.google.android.exoplayer2.util.Log; +import com.google.android.exoplayer2.util.ParsableBitArray; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** Utility methods for handling AAC audio streams. */ +public final class AacUtil { + + private static final String TAG = "AacUtil"; + + /** Holds sample format information for AAC audio. */ + public static final class Config { + + /** The sample rate in Hertz. */ + public final int sampleRateHz; + /** The number of channels. */ + public final int channelCount; + /** The RFC 6381 codecs string. */ + public final String codecs; + + private Config(int sampleRateHz, int channelCount, String codecs) { + this.sampleRateHz = sampleRateHz; + this.channelCount = channelCount; + this.codecs = codecs; + } + } + + // Audio sample count constants assume the frameLengthFlag in the access unit is 0. + /** + * Number of raw audio samples that are produced per channel when decoding an AAC LC access unit. + */ + public static final int AAC_LC_AUDIO_SAMPLE_COUNT = 1024; + /** + * Number of raw audio samples that are produced per channel when decoding an AAC XHE access unit. + */ + public static final int AAC_XHE_AUDIO_SAMPLE_COUNT = AAC_LC_AUDIO_SAMPLE_COUNT; + /** + * Number of raw audio samples that are produced per channel when decoding an AAC HE access unit. + */ + public static final int AAC_HE_AUDIO_SAMPLE_COUNT = 2048; + /** + * Number of raw audio samples that are produced per channel when decoding an AAC LD access unit. + */ + public static final int AAC_LD_AUDIO_SAMPLE_COUNT = 512; + + // Maximum bitrates for AAC profiles from the Fraunhofer FDK AAC encoder documentation: + // https://cs.android.com/android/platform/superproject/+/android-9.0.0_r8:external/aac/libAACenc/include/aacenc_lib.h;l=718 + /** Maximum rate for an AAC LC audio stream, in bytes per second. */ + public static final int AAC_LC_MAX_RATE_BYTES_PER_SECOND = 800 * 1000 / 8; + /** Maximum rate for an AAC HE V1 audio stream, in bytes per second. */ + public static final int AAC_HE_V1_MAX_RATE_BYTES_PER_SECOND = 128 * 1000 / 8; + /** Maximum rate for an AAC HE V2 audio stream, in bytes per second. */ + public static final int AAC_HE_V2_MAX_RATE_BYTES_PER_SECOND = 56 * 1000 / 8; + /** + * Maximum rate for an AAC XHE audio stream, in bytes per second. + * + *

      Fraunhofer documentation says "500 kbit/s and above" for stereo, so we use a rate generously + * above the 500 kbit/s level. + */ + public static final int AAC_XHE_MAX_RATE_BYTES_PER_SECOND = 2048 * 1000 / 8; + /** + * Maximum rate for an AAC ELD audio stream, in bytes per second. + * + *

      Fraunhofer documentation shows AAC-ELD as useful for up to ~ 64 kbit/s so we use this value. + */ + public static final int AAC_ELD_MAX_RATE_BYTES_PER_SECOND = 64 * 1000 / 8; + + private static final int AUDIO_SPECIFIC_CONFIG_FREQUENCY_INDEX_ARBITRARY = 0xF; + private static final int[] AUDIO_SPECIFIC_CONFIG_SAMPLING_RATE_TABLE = + new int[] { + 96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350 + }; + private static final int AUDIO_SPECIFIC_CONFIG_CHANNEL_CONFIGURATION_INVALID = -1; + /** + * In the channel configurations below, <A> indicates a single channel element; (A, B) + * indicates a channel pair element; and [A] indicates a low-frequency effects element. The + * speaker mapping short forms used are: + * + *

        + *
      • FC: front center + *
      • BC: back center + *
      • FL/FR: front left/right + *
      • FCL/FCR: front center left/right + *
      • FTL/FTR: front top left/right + *
      • SL/SR: back surround left/right + *
      • BL/BR: back left/right + *
      • LFE: low frequency effects + *
      + */ + private static final int[] AUDIO_SPECIFIC_CONFIG_CHANNEL_COUNT_TABLE = + new int[] { + 0, + 1, /* mono: */ + 2, /* stereo: (FL, FR) */ + 3, /* 3.0: , (FL, FR) */ + 4, /* 4.0: , (FL, FR), */ + 5, /* 5.0 back: , (FL, FR), (SL, SR) */ + 6, /* 5.1 back: , (FL, FR), (SL, SR), , [LFE] */ + 8, /* 7.1 wide back: , (FCL, FCR), (FL, FR), (SL, SR), [LFE] */ + AUDIO_SPECIFIC_CONFIG_CHANNEL_CONFIGURATION_INVALID, + AUDIO_SPECIFIC_CONFIG_CHANNEL_CONFIGURATION_INVALID, + AUDIO_SPECIFIC_CONFIG_CHANNEL_CONFIGURATION_INVALID, + 7, /* 6.1: , (FL, FR), (SL, SR), , [LFE] */ + 8, /* 7.1: , (FL, FR), (SL, SR), (BL, BR), [LFE] */ + AUDIO_SPECIFIC_CONFIG_CHANNEL_CONFIGURATION_INVALID, + 8, /* 7.1 top: , (FL, FR), (SL, SR), [LFE], (FTL, FTR) */ + AUDIO_SPECIFIC_CONFIG_CHANNEL_CONFIGURATION_INVALID + }; + + /** + * Prefix for the RFC 6381 codecs string for AAC formats. To form a full codecs string, suffix the + * decimal AudioObjectType. + */ + private static final String CODECS_STRING_PREFIX = "mp4a.40."; + + // Advanced Audio Coding Low-Complexity profile. + public static final int AUDIO_OBJECT_TYPE_AAC_LC = 2; + // Spectral Band Replication. + public static final int AUDIO_OBJECT_TYPE_AAC_SBR = 5; + // Error Resilient Bit-Sliced Arithmetic Coding. + public static final int AUDIO_OBJECT_TYPE_AAC_ER_BSAC = 22; + // Enhanced low delay. + public static final int AUDIO_OBJECT_TYPE_AAC_ELD = 23; + // Parametric Stereo. + public static final int AUDIO_OBJECT_TYPE_AAC_PS = 29; + // Escape code for extended audio object types. + private static final int AUDIO_OBJECT_TYPE_ESCAPE = 31; + // Extended high efficiency. + public static final int AUDIO_OBJECT_TYPE_AAC_XHE = 42; + + /** + * Valid AAC Audio object types. One of {@link #AUDIO_OBJECT_TYPE_AAC_LC}, {@link + * #AUDIO_OBJECT_TYPE_AAC_SBR}, {@link #AUDIO_OBJECT_TYPE_AAC_ER_BSAC}, {@link + * #AUDIO_OBJECT_TYPE_AAC_ELD}, {@link #AUDIO_OBJECT_TYPE_AAC_PS} or {@link + * #AUDIO_OBJECT_TYPE_AAC_XHE}. + */ + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef({ + AUDIO_OBJECT_TYPE_AAC_LC, + AUDIO_OBJECT_TYPE_AAC_SBR, + AUDIO_OBJECT_TYPE_AAC_ER_BSAC, + AUDIO_OBJECT_TYPE_AAC_ELD, + AUDIO_OBJECT_TYPE_AAC_PS, + AUDIO_OBJECT_TYPE_AAC_XHE + }) + public @interface AacAudioObjectType {} + + /** + * Parses an AAC AudioSpecificConfig, as defined in ISO 14496-3 1.6.2.1 + * + * @param audioSpecificConfig A byte array containing the AudioSpecificConfig to parse. + * @return The parsed configuration. + * @throws ParserException If the AudioSpecificConfig cannot be parsed because it is invalid or + * unsupported. + */ + public static Config parseAudioSpecificConfig(byte[] audioSpecificConfig) throws ParserException { + return parseAudioSpecificConfig( + new ParsableBitArray(audioSpecificConfig), /* forceReadToEnd= */ false); + } + + /** + * Parses an AAC AudioSpecificConfig, as defined in ISO 14496-3 1.6.2.1 + * + * @param bitArray A {@link ParsableBitArray} containing the AudioSpecificConfig to parse. The + * position is advanced to the end of the AudioSpecificConfig. + * @param forceReadToEnd Whether the entire AudioSpecificConfig should be read. Required for + * knowing the length of the configuration payload. + * @return The parsed configuration. + * @throws ParserException If the AudioSpecificConfig cannot be parsed because it is invalid or + * unsupported. + */ + public static Config parseAudioSpecificConfig(ParsableBitArray bitArray, boolean forceReadToEnd) + throws ParserException { + int audioObjectType = getAudioObjectType(bitArray); + int sampleRateHz = getSamplingFrequency(bitArray); + int channelConfiguration = bitArray.readBits(4); + String codecs = CODECS_STRING_PREFIX + audioObjectType; + if (audioObjectType == AUDIO_OBJECT_TYPE_AAC_SBR + || audioObjectType == AUDIO_OBJECT_TYPE_AAC_PS) { + // For an AAC bitstream using spectral band replication (SBR) or parametric stereo (PS) with + // explicit signaling, we return the extension sampling frequency as the sample rate of the + // content; this is identical to the sample rate of the decoded output but may differ from + // the sample rate set above. + // Use the extensionSamplingFrequencyIndex. + sampleRateHz = getSamplingFrequency(bitArray); + audioObjectType = getAudioObjectType(bitArray); + if (audioObjectType == AUDIO_OBJECT_TYPE_AAC_ER_BSAC) { + // Use the extensionChannelConfiguration. + channelConfiguration = bitArray.readBits(4); + } + } + + if (forceReadToEnd) { + switch (audioObjectType) { + case 1: + case 2: + case 3: + case 4: + case 6: + case 7: + case 17: + case 19: + case 20: + case 21: + case 22: + case 23: + parseGaSpecificConfig(bitArray, audioObjectType, channelConfiguration); + break; + default: + throw ParserException.createForUnsupportedContainerFeature( + "Unsupported audio object type: " + audioObjectType); + } + switch (audioObjectType) { + case 17: + case 19: + case 20: + case 21: + case 22: + case 23: + int epConfig = bitArray.readBits(2); + if (epConfig == 2 || epConfig == 3) { + throw ParserException.createForUnsupportedContainerFeature( + "Unsupported epConfig: " + epConfig); + } + break; + default: + break; + } + } + // For supported containers, bits_to_decode() is always 0. + int channelCount = AUDIO_SPECIFIC_CONFIG_CHANNEL_COUNT_TABLE[channelConfiguration]; + if (channelCount == AUDIO_SPECIFIC_CONFIG_CHANNEL_CONFIGURATION_INVALID) { + throw ParserException.createForMalformedContainer(/* message= */ null, /* cause= */ null); + } + return new Config(sampleRateHz, channelCount, codecs); + } + + /** + * Builds a simple AAC LC AudioSpecificConfig, as defined in ISO 14496-3 1.6.2.1 + * + * @param sampleRate The sample rate in Hz. + * @param channelCount The channel count. + * @return The AudioSpecificConfig. + */ + public static byte[] buildAacLcAudioSpecificConfig(int sampleRate, int channelCount) { + int sampleRateIndex = C.INDEX_UNSET; + for (int i = 0; i < AUDIO_SPECIFIC_CONFIG_SAMPLING_RATE_TABLE.length; ++i) { + if (sampleRate == AUDIO_SPECIFIC_CONFIG_SAMPLING_RATE_TABLE[i]) { + sampleRateIndex = i; + } + } + int channelConfig = C.INDEX_UNSET; + for (int i = 0; i < AUDIO_SPECIFIC_CONFIG_CHANNEL_COUNT_TABLE.length; ++i) { + if (channelCount == AUDIO_SPECIFIC_CONFIG_CHANNEL_COUNT_TABLE[i]) { + channelConfig = i; + } + } + if (sampleRate == C.INDEX_UNSET || channelConfig == C.INDEX_UNSET) { + throw new IllegalArgumentException( + "Invalid sample rate or number of channels: " + sampleRate + ", " + channelCount); + } + return buildAudioSpecificConfig(AUDIO_OBJECT_TYPE_AAC_LC, sampleRateIndex, channelConfig); + } + + /** + * Builds a simple AudioSpecificConfig, as defined in ISO 14496-3 1.6.2.1 + * + * @param audioObjectType The audio object type. + * @param sampleRateIndex The sample rate index. + * @param channelConfig The channel configuration. + * @return The AudioSpecificConfig. + */ + public static byte[] buildAudioSpecificConfig( + int audioObjectType, int sampleRateIndex, int channelConfig) { + byte[] specificConfig = new byte[2]; + specificConfig[0] = (byte) (((audioObjectType << 3) & 0xF8) | ((sampleRateIndex >> 1) & 0x07)); + specificConfig[1] = (byte) (((sampleRateIndex << 7) & 0x80) | ((channelConfig << 3) & 0x78)); + return specificConfig; + } + + /** + * Returns the AAC audio object type as specified in 14496-3 (2005) Table 1.14. + * + * @param bitArray The bit array containing the audio specific configuration. + * @return The audio object type. + */ + private static int getAudioObjectType(ParsableBitArray bitArray) { + int audioObjectType = bitArray.readBits(5); + if (audioObjectType == AUDIO_OBJECT_TYPE_ESCAPE) { + audioObjectType = 32 + bitArray.readBits(6); + } + return audioObjectType; + } + + /** + * Returns the AAC sampling frequency (or extension sampling frequency) as specified in 14496-3 + * (2005) Table 1.13. + * + * @param bitArray The bit array containing the audio specific configuration. + * @return The sampling frequency. + * @throws ParserException If the audio specific configuration is invalid. + */ + private static int getSamplingFrequency(ParsableBitArray bitArray) throws ParserException { + int samplingFrequency; + int frequencyIndex = bitArray.readBits(4); + if (frequencyIndex == AUDIO_SPECIFIC_CONFIG_FREQUENCY_INDEX_ARBITRARY) { + if (bitArray.bitsLeft() < 24) { + throw ParserException.createForMalformedContainer( + /* message= */ "AAC header insufficient data", /* cause= */ null); + } + samplingFrequency = bitArray.readBits(24); + } else if (frequencyIndex < 13) { + samplingFrequency = AUDIO_SPECIFIC_CONFIG_SAMPLING_RATE_TABLE[frequencyIndex]; + } else { + throw ParserException.createForMalformedContainer( + /* message= */ "AAC header wrong Sampling Frequency Index", /* cause= */ null); + } + return samplingFrequency; + } + + private static void parseGaSpecificConfig( + ParsableBitArray bitArray, int audioObjectType, int channelConfiguration) { + boolean frameLengthFlag = bitArray.readBit(); + if (frameLengthFlag) { + Log.w(TAG, "Unexpected frameLengthFlag = 1"); + } + boolean dependsOnCoreDecoder = bitArray.readBit(); + if (dependsOnCoreDecoder) { + bitArray.skipBits(14); // coreCoderDelay. + } + boolean extensionFlag = bitArray.readBit(); + if (channelConfiguration == 0) { + throw new UnsupportedOperationException(); // TODO: Implement programConfigElement(); + } + if (audioObjectType == 6 || audioObjectType == 20) { + bitArray.skipBits(3); // layerNr. + } + if (extensionFlag) { + if (audioObjectType == 22) { + bitArray.skipBits(16); // numOfSubFrame (5), layer_length(11). + } + if (audioObjectType == 17 + || audioObjectType == 19 + || audioObjectType == 20 + || audioObjectType == 23) { + // aacSectionDataResilienceFlag, aacScalefactorDataResilienceFlag, + // aacSpectralDataResilienceFlag. + bitArray.skipBits(3); + } + bitArray.skipBits(1); // extensionFlag3. + } + } + + private AacUtil() {} +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/Ac3Util.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/Ac3Util.java index 53803ada4e..4ef68bf345 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/Ac3Util.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/Ac3Util.java @@ -15,6 +15,8 @@ */ package com.google.android.exoplayer2.audio; +import static java.lang.annotation.ElementType.TYPE_USE; + import androidx.annotation.IntDef; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; @@ -24,9 +26,11 @@ import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.ParsableBitArray; import com.google.android.exoplayer2.util.ParsableByteArray; +import com.google.android.exoplayer2.util.Util; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; import java.nio.ByteBuffer; /** @@ -44,6 +48,7 @@ public static final class SyncFrameInfo { */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({STREAM_TYPE_UNDEFINED, STREAM_TYPE_TYPE0, STREAM_TYPE_TYPE1, STREAM_TYPE_TYPE2}) public @interface StreamType {} /** Undefined AC3 stream type. */ @@ -65,22 +70,16 @@ public static final class SyncFrameInfo { * #STREAM_TYPE_UNDEFINED} otherwise. */ public final @StreamType int streamType; - /** - * The audio sampling rate in Hz. - */ + /** The audio sampling rate in Hz. */ public final int sampleRate; - /** - * The number of audio channels - */ + /** The number of audio channels */ public final int channelCount; - /** - * The size of the frame. - */ + /** The size of the frame. */ public final int frameSize; - /** - * Number of audio samples in the frame. - */ + /** Number of audio samples in the frame. */ public final int sampleCount; + /** The bitrate of audio samples. */ + public final int bitrate; private SyncFrameInfo( @Nullable String mimeType, @@ -88,17 +87,25 @@ private SyncFrameInfo( int channelCount, int sampleRate, int frameSize, - int sampleCount) { + int sampleCount, + int bitrate) { this.mimeType = mimeType; this.streamType = streamType; this.channelCount = channelCount; this.sampleRate = sampleRate; this.frameSize = frameSize; this.sampleCount = sampleCount; + this.bitrate = bitrate; } - } + /** Maximum rate for an AC-3 audio stream, in bytes per second. */ + public static final int AC3_MAX_RATE_BYTES_PER_SECOND = 640 * 1000 / 8; + /** Maximum rate for an E-AC-3 audio stream, in bytes per second. */ + public static final int E_AC3_MAX_RATE_BYTES_PER_SECOND = 6144 * 1000 / 8; + /** Maximum rate for a TrueHD audio stream, in bytes per second. */ + public static final int TRUEHD_MAX_RATE_BYTES_PER_SECOND = 24500 * 1000 / 8; + /** * The number of samples to store in each output chunk when rechunking TrueHD streams. The number * of samples extracted from the container corresponding to one syncframe must be an integer @@ -110,27 +117,17 @@ private SyncFrameInfo( */ public static final int TRUEHD_SYNCFRAME_PREFIX_LENGTH = 10; - /** - * The number of new samples per (E-)AC-3 audio block. - */ + /** The number of new samples per (E-)AC-3 audio block. */ private static final int AUDIO_SAMPLES_PER_AUDIO_BLOCK = 256; /** Each syncframe has 6 blocks that provide 256 new audio samples. See subsection 4.1. */ private static final int AC3_SYNCFRAME_AUDIO_SAMPLE_COUNT = 6 * AUDIO_SAMPLES_PER_AUDIO_BLOCK; - /** - * Number of audio blocks per E-AC-3 syncframe, indexed by numblkscod. - */ + /** Number of audio blocks per E-AC-3 syncframe, indexed by numblkscod. */ private static final int[] BLOCKS_PER_SYNCFRAME_BY_NUMBLKSCOD = new int[] {1, 2, 3, 6}; - /** - * Sample rates, indexed by fscod. - */ + /** Sample rates, indexed by fscod. */ private static final int[] SAMPLE_RATE_BY_FSCOD = new int[] {48000, 44100, 32000}; - /** - * Sample rates, indexed by fscod2 (E-AC-3). - */ + /** Sample rates, indexed by fscod2 (E-AC-3). */ private static final int[] SAMPLE_RATE_BY_FSCOD2 = new int[] {24000, 22050, 16000}; - /** - * Channel counts, indexed by acmod. - */ + /** Channel counts, indexed by acmod. */ private static final int[] CHANNEL_COUNT_BY_ACMOD = new int[] {2, 1, 2, 3, 3, 4, 4, 5}; /** Nominal bitrates in kbps, indexed by frmsizecod / 2. (See table 4.13.) */ private static final int[] BITRATE_BY_HALF_FRMSIZECOD = @@ -156,25 +153,31 @@ private SyncFrameInfo( */ public static Format parseAc3AnnexFFormat( ParsableByteArray data, String trackId, String language, @Nullable DrmInitData drmInitData) { - int fscod = (data.readUnsignedByte() & 0xC0) >> 6; + ParsableBitArray dataBitArray = new ParsableBitArray(); + dataBitArray.reset(data); + + int fscod = dataBitArray.readBits(2); int sampleRate = SAMPLE_RATE_BY_FSCOD[fscod]; - int nextByte = data.readUnsignedByte(); - int channelCount = CHANNEL_COUNT_BY_ACMOD[(nextByte & 0x38) >> 3]; - if ((nextByte & 0x04) != 0) { // lfeon + dataBitArray.skipBits(8); // bsid, bsmod + int channelCount = CHANNEL_COUNT_BY_ACMOD[dataBitArray.readBits(3)]; // acmod + if (dataBitArray.readBits(1) != 0) { // lfeon channelCount++; } - return Format.createAudioSampleFormat( - trackId, - MimeTypes.AUDIO_AC3, - /* codecs= */ null, - Format.NO_VALUE, - Format.NO_VALUE, - channelCount, - sampleRate, - /* initializationData= */ null, - drmInitData, - /* selectionFlags= */ 0, - language); + int halfFrmsizecod = dataBitArray.readBits(5); // bit_rate_code + int constantBitrate = BITRATE_BY_HALF_FRMSIZECOD[halfFrmsizecod] * 1000; + // Update data position + dataBitArray.byteAlign(); + data.setPosition(dataBitArray.getBytePosition()); + return new Format.Builder() + .setId(trackId) + .setSampleMimeType(MimeTypes.AUDIO_AC3) + .setChannelCount(channelCount) + .setSampleRate(sampleRate) + .setDrmInitData(drmInitData) + .setLanguage(language) + .setAverageBitrate(constantBitrate) + .setPeakBitrate(constantBitrate) + .build(); } /** @@ -189,47 +192,54 @@ public static Format parseAc3AnnexFFormat( */ public static Format parseEAc3AnnexFFormat( ParsableByteArray data, String trackId, String language, @Nullable DrmInitData drmInitData) { - data.skipBytes(2); // data_rate, num_ind_sub + ParsableBitArray dataBitArray = new ParsableBitArray(); + dataBitArray.reset(data); + + int peakBitrate = dataBitArray.readBits(13) * 1000; // data_rate + dataBitArray.skipBits(3); // num_ind_sub // Read the first independent substream. - int fscod = (data.readUnsignedByte() & 0xC0) >> 6; + int fscod = dataBitArray.readBits(2); int sampleRate = SAMPLE_RATE_BY_FSCOD[fscod]; - int nextByte = data.readUnsignedByte(); - int channelCount = CHANNEL_COUNT_BY_ACMOD[(nextByte & 0x0E) >> 1]; - if ((nextByte & 0x01) != 0) { // lfeon + dataBitArray.skipBits(10); // bsid, reserved, asvc, bsmod + int channelCount = CHANNEL_COUNT_BY_ACMOD[dataBitArray.readBits(3)]; // acmod + if (dataBitArray.readBits(1) != 0) { // lfeon channelCount++; } // Read the first dependent substream. - nextByte = data.readUnsignedByte(); - int numDepSub = ((nextByte & 0x1E) >> 1); + dataBitArray.skipBits(3); // reserved + int numDepSub = dataBitArray.readBits(4); // num_dep_sub + dataBitArray.skipBits(1); // numDepSub > 0 ? LFE2 : reserved if (numDepSub > 0) { - int lowByteChanLoc = data.readUnsignedByte(); + dataBitArray.skipBytes(6); // other channel configurations // Read Lrs/Rrs pair // TODO: Read other channel configuration - if ((lowByteChanLoc & 0x02) != 0) { + if (dataBitArray.readBits(1) != 0) { channelCount += 2; } + dataBitArray.skipBits(1); // Lc/Rc pair } + String mimeType = MimeTypes.AUDIO_E_AC3; - if (data.bytesLeft() > 0) { - nextByte = data.readUnsignedByte(); - if ((nextByte & 0x01) != 0) { // flag_ec3_extension_type_a + if (dataBitArray.bitsLeft() > 7) { + dataBitArray.skipBits(7); // reserved + if (dataBitArray.readBits(1) != 0) { // flag_ec3_extension_type_a mimeType = MimeTypes.AUDIO_E_AC3_JOC; } } - return Format.createAudioSampleFormat( - trackId, - mimeType, - /* codecs= */ null, - Format.NO_VALUE, - Format.NO_VALUE, - channelCount, - sampleRate, - /* initializationData= */ null, - drmInitData, - /* selectionFlags= */ 0, - language); + // Update data position + dataBitArray.byteAlign(); + data.setPosition(dataBitArray.getBytePosition()); + return new Format.Builder() + .setId(trackId) + .setSampleMimeType(mimeType) + .setChannelCount(channelCount) + .setSampleRate(sampleRate) + .setDrmInitData(drmInitData) + .setLanguage(language) + .setPeakBitrate(peakBitrate) + .build(); } /** @@ -253,6 +263,7 @@ public static SyncFrameInfo parseAc3SyncframeInfo(ParsableBitArray data) { int sampleCount; boolean lfeon; int channelCount; + int bitrate; if (isEac3) { // Subsection E.1.2. data.skipBits(16); // syncword @@ -285,6 +296,7 @@ public static SyncFrameInfo parseAc3SyncframeInfo(ParsableBitArray data) { sampleRate = SAMPLE_RATE_BY_FSCOD[fscod]; } sampleCount = AUDIO_SAMPLES_PER_AUDIO_BLOCK * audioBlocks; + bitrate = calculateEac3Bitrate(frameSize, sampleRate, audioBlocks); acmod = data.readBits(3); lfeon = data.readBit(); channelCount = CHANNEL_COUNT_BY_ACMOD[acmod] + (lfeon ? 1 : 0); @@ -316,7 +328,7 @@ public static SyncFrameInfo parseAc3SyncframeInfo(ParsableBitArray data) { } if (streamType == SyncFrameInfo.STREAM_TYPE_TYPE0) { if (data.readBit()) { // pgmscle - data.skipBits(6); //pgmscl + data.skipBits(6); // pgmscl } if (acmod == 0 && data.readBit()) { // pgmscl2e data.skipBits(6); // pgmscl2 @@ -440,6 +452,7 @@ public static SyncFrameInfo parseAc3SyncframeInfo(ParsableBitArray data) { mimeType = null; } int frmsizecod = data.readBits(6); + bitrate = BITRATE_BY_HALF_FRMSIZECOD[frmsizecod / 2] * 1000; frameSize = getAc3SyncframeSize(fscod, frmsizecod); data.skipBits(5 + 3); // bsid, bsmod acmod = data.readBits(3); @@ -459,7 +472,7 @@ public static SyncFrameInfo parseAc3SyncframeInfo(ParsableBitArray data) { channelCount = CHANNEL_COUNT_BY_ACMOD[acmod] + (lfeon ? 1 : 0); } return new SyncFrameInfo( - mimeType, streamType, channelCount, sampleRate, frameSize, sampleCount); + mimeType, streamType, channelCount, sampleRate, frameSize, sampleCount, bitrate); } /** @@ -517,7 +530,7 @@ public static int findTrueHdSyncframeOffset(ByteBuffer buffer) { int endIndex = buffer.limit() - TRUEHD_SYNCFRAME_PREFIX_LENGTH; for (int i = startIndex; i <= endIndex; i++) { // The syncword ends 0xBA for TrueHD or 0xBB for MLP. - if ((buffer.getInt(i + 4) & 0xFEFFFFFF) == 0xBA6F72F8) { + if ((Util.getBigEndianInt(buffer, i + 4) & 0xFFFFFFFE) == 0xF8726FBA) { return i - startIndex; } } @@ -562,7 +575,9 @@ public static int parseTrueHdSyncframeAudioSampleCount(ByteBuffer buffer, int of private static int getAc3SyncframeSize(int fscod, int frmsizecod) { int halfFrmsizecod = frmsizecod / 2; - if (fscod < 0 || fscod >= SAMPLE_RATE_BY_FSCOD.length || frmsizecod < 0 + if (fscod < 0 + || fscod >= SAMPLE_RATE_BY_FSCOD.length + || frmsizecod < 0 || halfFrmsizecod >= SYNCFRAME_SIZE_WORDS_BY_HALF_FRMSIZECOD_44_1.length) { // Invalid values provided. return C.LENGTH_UNSET; @@ -579,6 +594,15 @@ private static int getAc3SyncframeSize(int fscod, int frmsizecod) { } } - private Ac3Util() {} + /** + * Derived from the formula defined in F.6.2.2 to calculate data_rate for the (E-)AC3 bitstream. + * Note: The formula is based on frmsiz read from the spec. We already do some modifications to it + * when deriving frameSize from the read value. The formula used here is adapted to accommodate + * that modification. + */ + private static int calculateEac3Bitrate(int frameSize, int sampleRate, int audioBlocks) { + return (frameSize * sampleRate) / (audioBlocks * 32); + } + private Ac3Util() {} } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/Ac4Util.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/Ac4Util.java index b9f1dc5460..96712f04cb 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/Ac4Util.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/Ac4Util.java @@ -54,6 +54,9 @@ private SyncFrameInfo( public static final int AC40_SYNCWORD = 0xAC40; public static final int AC41_SYNCWORD = 0xAC41; + /** Maximum rate for an AC-4 audio stream, in bytes per second. */ + public static final int MAX_RATE_BYTES_PER_SECOND = 2688 * 1000 / 8; + /** The channel count of AC-4 stream. */ // TODO: Parse AC-4 stream channel count. private static final int CHANNEL_COUNT_2 = 2; @@ -104,18 +107,14 @@ public static Format parseAc4AnnexEFormat( ParsableByteArray data, String trackId, String language, @Nullable DrmInitData drmInitData) { data.skipBytes(1); // ac4_dsi_version, bitstream_version[0:5] int sampleRate = ((data.readUnsignedByte() & 0x20) >> 5 == 1) ? 48000 : 44100; - return Format.createAudioSampleFormat( - trackId, - MimeTypes.AUDIO_AC4, - /* codecs= */ null, - /* bitrate= */ Format.NO_VALUE, - /* maxInputSize= */ Format.NO_VALUE, - CHANNEL_COUNT_2, - sampleRate, - /* initializationData= */ null, - drmInitData, - /* selectionFlags= */ 0, - language); + return new Format.Builder() + .setId(trackId) + .setSampleMimeType(MimeTypes.AUDIO_AC4) + .setChannelCount(CHANNEL_COUNT_2) + .setSampleRate(sampleRate) + .setDrmInitData(drmInitData) + .setLanguage(language) + .build(); } /** @@ -224,13 +223,14 @@ public static int parseAc4SyncframeAudioSampleCount(ByteBuffer buffer) { public static void getAc4SampleHeader(int size, ParsableByteArray buffer) { // See ETSI TS 103 190-1 V1.3.1, Annex G. buffer.reset(SAMPLE_HEADER_SIZE); - buffer.data[0] = (byte) 0xAC; - buffer.data[1] = 0x40; - buffer.data[2] = (byte) 0xFF; - buffer.data[3] = (byte) 0xFF; - buffer.data[4] = (byte) ((size >> 16) & 0xFF); - buffer.data[5] = (byte) ((size >> 8) & 0xFF); - buffer.data[6] = (byte) (size & 0xFF); + byte[] data = buffer.getData(); + data[0] = (byte) 0xAC; + data[1] = 0x40; + data[2] = (byte) 0xFF; + data[3] = (byte) 0xFF; + data[4] = (byte) ((size >> 16) & 0xFF); + data[5] = (byte) ((size >> 8) & 0xFF); + data[6] = (byte) (size & 0xFF); } private static int readVariableBits(ParsableBitArray data, int bitsPerRead) { diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioAttributes.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioAttributes.java index 516df8147c..aa181119bd 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioAttributes.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioAttributes.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2017 The Android Open Source Project + * Copyright 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,121 +15,157 @@ */ package com.google.android.exoplayer2.audio; -import android.annotation.TargetApi; +import android.os.Bundle; +import androidx.annotation.DoNotInline; import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; +import com.google.android.exoplayer2.Bundleable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.util.Util; +import com.google.errorprone.annotations.CanIgnoreReturnValue; /** - * Attributes for audio playback, which configure the underlying platform - * {@link android.media.AudioTrack}. - *

      - * To set the audio attributes, create an instance using the {@link Builder} and either pass it to - * {@link com.google.android.exoplayer2.SimpleExoPlayer#setAudioAttributes(AudioAttributes)} or - * send a message of type {@link C#MSG_SET_AUDIO_ATTRIBUTES} to the audio renderers. - *

      - * This class is based on {@link android.media.AudioAttributes}, but can be used on all supported + * Attributes for audio playback, which configure the underlying platform {@link + * android.media.AudioTrack}. + * + *

      To set the audio attributes, create an instance using the {@link Builder} and either pass it + * to the player or send a message of type {@code Renderer#MSG_SET_AUDIO_ATTRIBUTES} to the audio + * renderers. + * + *

      This class is based on {@link android.media.AudioAttributes}, but can be used on all supported * API versions. */ -public final class AudioAttributes { +public final class AudioAttributes implements Bundleable { - public static final AudioAttributes DEFAULT = new Builder().build(); + /** A direct wrapper around {@link android.media.AudioAttributes}. */ + @RequiresApi(21) + public static final class AudioAttributesV21 { + public final android.media.AudioAttributes audioAttributes; + + private AudioAttributesV21(AudioAttributes audioAttributes) { + android.media.AudioAttributes.Builder builder = + new android.media.AudioAttributes.Builder() + .setContentType(audioAttributes.contentType) + .setFlags(audioAttributes.flags) + .setUsage(audioAttributes.usage); + if (Util.SDK_INT >= 29) { + Api29.setAllowedCapturePolicy(builder, audioAttributes.allowedCapturePolicy); + } + if (Util.SDK_INT >= 32) { + Api32.setSpatializationBehavior(builder, audioAttributes.spatializationBehavior); + } + this.audioAttributes = builder.build(); + } + } /** - * Builder for {@link AudioAttributes}. + * The default audio attributes, where the content type is {@link C#AUDIO_CONTENT_TYPE_UNKNOWN}, + * usage is {@link C#USAGE_MEDIA}, capture policy is {@link C#ALLOW_CAPTURE_BY_ALL} and no flags + * are set. */ + public static final AudioAttributes DEFAULT = new Builder().build(); + + /** Builder for {@link AudioAttributes}. */ public static final class Builder { private @C.AudioContentType int contentType; private @C.AudioFlags int flags; private @C.AudioUsage int usage; private @C.AudioAllowedCapturePolicy int allowedCapturePolicy; + private @C.SpatializationBehavior int spatializationBehavior; /** * Creates a new builder for {@link AudioAttributes}. * - *

      By default the content type is {@link C#CONTENT_TYPE_UNKNOWN}, usage is {@link + *

      By default the content type is {@link C#AUDIO_CONTENT_TYPE_UNKNOWN}, usage is {@link * C#USAGE_MEDIA}, capture policy is {@link C#ALLOW_CAPTURE_BY_ALL} and no flags are set. */ public Builder() { - contentType = C.CONTENT_TYPE_UNKNOWN; + contentType = C.AUDIO_CONTENT_TYPE_UNKNOWN; flags = 0; usage = C.USAGE_MEDIA; allowedCapturePolicy = C.ALLOW_CAPTURE_BY_ALL; + spatializationBehavior = C.SPATIALIZATION_BEHAVIOR_AUTO; } - /** - * @see android.media.AudioAttributes.Builder#setContentType(int) - */ + /** See {@link android.media.AudioAttributes.Builder#setContentType(int)} */ + @CanIgnoreReturnValue public Builder setContentType(@C.AudioContentType int contentType) { this.contentType = contentType; return this; } - /** - * @see android.media.AudioAttributes.Builder#setFlags(int) - */ + /** See {@link android.media.AudioAttributes.Builder#setFlags(int)} */ + @CanIgnoreReturnValue public Builder setFlags(@C.AudioFlags int flags) { this.flags = flags; return this; } - /** - * @see android.media.AudioAttributes.Builder#setUsage(int) - */ + /** See {@link android.media.AudioAttributes.Builder#setUsage(int)} */ + @CanIgnoreReturnValue public Builder setUsage(@C.AudioUsage int usage) { this.usage = usage; return this; } /** See {@link android.media.AudioAttributes.Builder#setAllowedCapturePolicy(int)}. */ + @CanIgnoreReturnValue public Builder setAllowedCapturePolicy(@C.AudioAllowedCapturePolicy int allowedCapturePolicy) { this.allowedCapturePolicy = allowedCapturePolicy; return this; } + /** See {@link android.media.AudioAttributes.Builder#setSpatializationBehavior(int)}. */ + @CanIgnoreReturnValue + public Builder setSpatializationBehavior(@C.SpatializationBehavior int spatializationBehavior) { + this.spatializationBehavior = spatializationBehavior; + return this; + } + /** Creates an {@link AudioAttributes} instance from this builder. */ public AudioAttributes build() { - return new AudioAttributes(contentType, flags, usage, allowedCapturePolicy); + return new AudioAttributes( + contentType, flags, usage, allowedCapturePolicy, spatializationBehavior); } - } + /** The {@link C.AudioContentType}. */ public final @C.AudioContentType int contentType; + /** The {@link C.AudioFlags}. */ public final @C.AudioFlags int flags; + /** The {@link C.AudioUsage}. */ public final @C.AudioUsage int usage; + /** The {@link C.AudioAllowedCapturePolicy}. */ public final @C.AudioAllowedCapturePolicy int allowedCapturePolicy; + /** The {@link C.SpatializationBehavior}. */ + public final @C.SpatializationBehavior int spatializationBehavior; - @Nullable private android.media.AudioAttributes audioAttributesV21; + @Nullable private AudioAttributesV21 audioAttributesV21; private AudioAttributes( @C.AudioContentType int contentType, @C.AudioFlags int flags, @C.AudioUsage int usage, - @C.AudioAllowedCapturePolicy int allowedCapturePolicy) { + @C.AudioAllowedCapturePolicy int allowedCapturePolicy, + @C.SpatializationBehavior int spatializationBehavior) { this.contentType = contentType; this.flags = flags; this.usage = usage; this.allowedCapturePolicy = allowedCapturePolicy; + this.spatializationBehavior = spatializationBehavior; } /** - * Returns a {@link android.media.AudioAttributes} from this instance. + * Returns a {@link AudioAttributesV21} from this instance. * - *

      Field {@link AudioAttributes#allowedCapturePolicy} is ignored for API levels prior to 29. + *

      Some fields are ignored if the corresponding {@link android.media.AudioAttributes.Builder} + * setter is not available on the current API level. */ - @TargetApi(21) - public android.media.AudioAttributes getAudioAttributesV21() { + @RequiresApi(21) + public AudioAttributesV21 getAudioAttributesV21() { if (audioAttributesV21 == null) { - android.media.AudioAttributes.Builder builder = - new android.media.AudioAttributes.Builder() - .setContentType(contentType) - .setFlags(flags) - .setUsage(usage); - if (Util.SDK_INT >= 29) { - builder.setAllowedCapturePolicy(allowedCapturePolicy); - } - audioAttributesV21 = builder.build(); + audioAttributesV21 = new AudioAttributesV21(this); } return audioAttributesV21; } @@ -146,7 +182,8 @@ public boolean equals(@Nullable Object obj) { return this.contentType == other.contentType && this.flags == other.flags && this.usage == other.usage - && this.allowedCapturePolicy == other.allowedCapturePolicy; + && this.allowedCapturePolicy == other.allowedCapturePolicy + && this.spatializationBehavior == other.spatializationBehavior; } @Override @@ -156,7 +193,68 @@ public int hashCode() { result = 31 * result + flags; result = 31 * result + usage; result = 31 * result + allowedCapturePolicy; + result = 31 * result + spatializationBehavior; return result; } + // Bundleable implementation. + + private static final String FIELD_CONTENT_TYPE = Util.intToStringMaxRadix(0); + private static final String FIELD_FLAGS = Util.intToStringMaxRadix(1); + private static final String FIELD_USAGE = Util.intToStringMaxRadix(2); + private static final String FIELD_ALLOWED_CAPTURE_POLICY = Util.intToStringMaxRadix(3); + private static final String FIELD_SPATIALIZATION_BEHAVIOR = Util.intToStringMaxRadix(4); + + @Override + public Bundle toBundle() { + Bundle bundle = new Bundle(); + bundle.putInt(FIELD_CONTENT_TYPE, contentType); + bundle.putInt(FIELD_FLAGS, flags); + bundle.putInt(FIELD_USAGE, usage); + bundle.putInt(FIELD_ALLOWED_CAPTURE_POLICY, allowedCapturePolicy); + bundle.putInt(FIELD_SPATIALIZATION_BEHAVIOR, spatializationBehavior); + return bundle; + } + + /** Object that can restore {@link AudioAttributes} from a {@link Bundle}. */ + public static final Creator CREATOR = + bundle -> { + Builder builder = new Builder(); + if (bundle.containsKey(FIELD_CONTENT_TYPE)) { + builder.setContentType(bundle.getInt(FIELD_CONTENT_TYPE)); + } + if (bundle.containsKey(FIELD_FLAGS)) { + builder.setFlags(bundle.getInt(FIELD_FLAGS)); + } + if (bundle.containsKey(FIELD_USAGE)) { + builder.setUsage(bundle.getInt(FIELD_USAGE)); + } + if (bundle.containsKey(FIELD_ALLOWED_CAPTURE_POLICY)) { + builder.setAllowedCapturePolicy(bundle.getInt(FIELD_ALLOWED_CAPTURE_POLICY)); + } + if (bundle.containsKey(FIELD_SPATIALIZATION_BEHAVIOR)) { + builder.setSpatializationBehavior(bundle.getInt(FIELD_SPATIALIZATION_BEHAVIOR)); + } + return builder.build(); + }; + + @RequiresApi(29) + private static final class Api29 { + @DoNotInline + public static void setAllowedCapturePolicy( + android.media.AudioAttributes.Builder builder, + @C.AudioAllowedCapturePolicy int allowedCapturePolicy) { + builder.setAllowedCapturePolicy(allowedCapturePolicy); + } + } + + @RequiresApi(32) + private static final class Api32 { + @DoNotInline + public static void setSpatializationBehavior( + android.media.AudioAttributes.Builder builder, + @C.SpatializationBehavior int spatializationBehavior) { + builder.setSpatializationBehavior(spatializationBehavior); + } + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioCapabilities.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioCapabilities.java index 25c0e70ae5..066786bd60 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioCapabilities.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioCapabilities.java @@ -15,30 +15,43 @@ */ package com.google.android.exoplayer2.audio; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; + import android.annotation.SuppressLint; -import android.annotation.TargetApi; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; +import android.media.AudioAttributes; import android.media.AudioFormat; import android.media.AudioManager; +import android.media.AudioTrack; import android.net.Uri; import android.provider.Settings.Global; +import android.util.Pair; +import androidx.annotation.DoNotInline; import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.Util; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.primitives.Ints; import java.util.Arrays; /** Represents the set of audio formats that a device is capable of playing. */ -@TargetApi(21) public final class AudioCapabilities { private static final int DEFAULT_MAX_CHANNEL_COUNT = 8; + private static final int DEFAULT_SAMPLE_RATE_HZ = 48_000; /** The minimum audio capabilities supported by all devices. */ public static final AudioCapabilities DEFAULT_AUDIO_CAPABILITIES = new AudioCapabilities(new int[] {AudioFormat.ENCODING_PCM_16BIT}, DEFAULT_MAX_CHANNEL_COUNT); /** Audio capabilities when the device specifies external surround sound. */ + @SuppressWarnings("InlinedApi") private static final AudioCapabilities EXTERNAL_SURROUND_SOUND_CAPABILITIES = new AudioCapabilities( new int[] { @@ -46,6 +59,21 @@ public final class AudioCapabilities { }, DEFAULT_MAX_CHANNEL_COUNT); + /** + * All surround sound encodings that a device may be capable of playing mapped to a maximum + * channel count. + */ + private static final ImmutableMap ALL_SURROUND_ENCODINGS_AND_MAX_CHANNELS = + new ImmutableMap.Builder() + .put(C.ENCODING_AC3, 6) + .put(C.ENCODING_AC4, 6) + .put(C.ENCODING_DTS, 6) + .put(C.ENCODING_E_AC3_JOC, 6) + .put(C.ENCODING_E_AC3, 8) + .put(C.ENCODING_DTS_HD, 8) + .put(C.ENCODING_DOLBY_TRUEHD, 8) + .buildOrThrow(); + /** Global settings key for devices that can specify external surround sound. */ private static final String EXTERNAL_SURROUND_SOUND_KEY = "external_surround_sound_enabled"; @@ -58,8 +86,8 @@ public final class AudioCapabilities { @SuppressWarnings("InlinedApi") public static AudioCapabilities getCapabilities(Context context) { Intent intent = - context.registerReceiver( - /* receiver= */ null, new IntentFilter(AudioManager.ACTION_HDMI_AUDIO_PLUG)); + Util.registerReceiverNotExported( + context, /* receiver= */ null, new IntentFilter(AudioManager.ACTION_HDMI_AUDIO_PLUG)); return getCapabilities(context, intent); } @@ -69,6 +97,14 @@ public static AudioCapabilities getCapabilities(Context context) { && Global.getInt(context.getContentResolver(), EXTERNAL_SURROUND_SOUND_KEY, 0) == 1) { return EXTERNAL_SURROUND_SOUND_CAPABILITIES; } + // AudioTrack.isDirectPlaybackSupported returns true for encodings that are supported for audio + // offload, as well as for encodings we want to list for passthrough mode. Therefore we only use + // it on TV and automotive devices, which generally shouldn't support audio offload for surround + // encodings. + if (Util.SDK_INT >= 29 && (Util.isTv(context) || Util.isAutomotive(context))) { + return new AudioCapabilities( + Api29.getDirectPlaybackSupportedEncodings(), DEFAULT_MAX_CHANNEL_COUNT); + } if (intent == null || intent.getIntExtra(AudioManager.EXTRA_AUDIO_PLUG_STATE, 0) == 0) { return DEFAULT_AUDIO_CAPABILITIES; } @@ -117,20 +153,74 @@ public AudioCapabilities(@Nullable int[] supportedEncodings, int maxChannelCount /** * Returns whether this device supports playback of the specified audio {@code encoding}. * - * @param encoding One of {@link android.media.AudioFormat}'s {@code ENCODING_*} constants. + * @param encoding One of {@link C.Encoding}'s {@code ENCODING_*} constants. * @return Whether this device supports playback the specified audio {@code encoding}. */ - public boolean supportsEncoding(int encoding) { + public boolean supportsEncoding(@C.Encoding int encoding) { return Arrays.binarySearch(supportedEncodings, encoding) >= 0; } - /** - * Returns the maximum number of channels the device can play at the same time. - */ + /** Returns the maximum number of channels the device can play at the same time. */ public int getMaxChannelCount() { return maxChannelCount; } + /** Returns whether the device can do passthrough playback for {@code format}. */ + public boolean isPassthroughPlaybackSupported(Format format) { + return getEncodingAndChannelConfigForPassthrough(format) != null; + } + + /** + * Returns the encoding and channel config to use when configuring an {@link AudioTrack} in + * passthrough mode for the specified {@link Format}. Returns {@code null} if passthrough of the + * format is unsupported. + * + * @param format The {@link Format}. + * @return The encoding and channel config to use, or {@code null} if passthrough of the format is + * unsupported. + */ + @Nullable + public Pair getEncodingAndChannelConfigForPassthrough(Format format) { + @C.Encoding + int encoding = MimeTypes.getEncoding(checkNotNull(format.sampleMimeType), format.codecs); + // Check that this is an encoding known to work for passthrough. This avoids trying to use + // passthrough with an encoding where the device/app reports it's capable but it is untested or + // known to be broken (for example AAC-LC). + if (!ALL_SURROUND_ENCODINGS_AND_MAX_CHANNELS.containsKey(encoding)) { + return null; + } + + if (encoding == C.ENCODING_E_AC3_JOC && !supportsEncoding(C.ENCODING_E_AC3_JOC)) { + // E-AC3 receivers support E-AC3 JOC streams (but decode only the base layer). + encoding = C.ENCODING_E_AC3; + } else if (encoding == C.ENCODING_DTS_HD && !supportsEncoding(C.ENCODING_DTS_HD)) { + // DTS receivers support DTS-HD streams (but decode only the core layer). + encoding = C.ENCODING_DTS; + } + if (!supportsEncoding(encoding)) { + return null; + } + int channelCount; + if (format.channelCount == Format.NO_VALUE || encoding == C.ENCODING_E_AC3_JOC) { + // In HLS chunkless preparation, the format channel count and sample rate may be unset. See + // https://github.com/google/ExoPlayer/issues/10204 and b/222127949 for more details. + // For E-AC3 JOC, the format is object based so the format channel count is arbitrary. + int sampleRate = + format.sampleRate != Format.NO_VALUE ? format.sampleRate : DEFAULT_SAMPLE_RATE_HZ; + channelCount = getMaxSupportedChannelCountForPassthrough(encoding, sampleRate); + } else { + channelCount = format.channelCount; + if (channelCount > maxChannelCount) { + return null; + } + } + int channelConfig = getChannelConfigForPassthrough(channelCount); + if (channelConfig == AudioFormat.CHANNEL_INVALID) { + return null; + } + return Pair.create(encoding, channelConfig); + } + @Override public boolean equals(@Nullable Object other) { if (this == other) { @@ -151,11 +241,105 @@ public int hashCode() { @Override public String toString() { - return "AudioCapabilities[maxChannelCount=" + maxChannelCount - + ", supportedEncodings=" + Arrays.toString(supportedEncodings) + "]"; + return "AudioCapabilities[maxChannelCount=" + + maxChannelCount + + ", supportedEncodings=" + + Arrays.toString(supportedEncodings) + + "]"; } private static boolean deviceMaySetExternalSurroundSoundGlobalSetting() { - return Util.SDK_INT >= 17 && "Amazon".equals(Util.MANUFACTURER); + return Util.SDK_INT >= 17 + && ("Amazon".equals(Util.MANUFACTURER) || "Xiaomi".equals(Util.MANUFACTURER)); + } + + /** + * Returns the maximum number of channels supported for passthrough playback of audio in the given + * encoding, or {@code 0} if the format is unsupported. + */ + private static int getMaxSupportedChannelCountForPassthrough( + @C.Encoding int encoding, int sampleRate) { + // From API 29 we can get the channel count from the platform, but before then there is no way + // to query the platform so we assume the channel count matches the maximum channel count per + // audio encoding spec. + if (Util.SDK_INT >= 29) { + return Api29.getMaxSupportedChannelCountForPassthrough(encoding, sampleRate); + } + return checkNotNull(ALL_SURROUND_ENCODINGS_AND_MAX_CHANNELS.getOrDefault(encoding, 0)); + } + + private static int getChannelConfigForPassthrough(int channelCount) { + if (Util.SDK_INT <= 28) { + // In passthrough mode the channel count used to configure the audio track doesn't affect how + // the stream is handled, except that some devices do overly-strict channel configuration + // checks. Therefore we override the channel count so that a known-working channel + // configuration is chosen in all cases. See [Internal: b/29116190]. + if (channelCount == 7) { + channelCount = 8; + } else if (channelCount == 3 || channelCount == 4 || channelCount == 5) { + channelCount = 6; + } + } + + // Workaround for Nexus Player not reporting support for mono passthrough. See + // [Internal: b/34268671]. + if (Util.SDK_INT <= 26 && "fugu".equals(Util.DEVICE) && channelCount == 1) { + channelCount = 2; + } + + return Util.getAudioTrackChannelConfig(channelCount); + } + + @RequiresApi(29) + private static final class Api29 { + private static final AudioAttributes DEFAULT_AUDIO_ATTRIBUTES = + new AudioAttributes.Builder() + .setUsage(AudioAttributes.USAGE_MEDIA) + .setContentType(AudioAttributes.CONTENT_TYPE_MOVIE) + .setFlags(0) + .build(); + + private Api29() {} + + @DoNotInline + public static int[] getDirectPlaybackSupportedEncodings() { + ImmutableList.Builder supportedEncodingsListBuilder = ImmutableList.builder(); + for (int encoding : ALL_SURROUND_ENCODINGS_AND_MAX_CHANNELS.keySet()) { + if (AudioTrack.isDirectPlaybackSupported( + new AudioFormat.Builder() + .setChannelMask(AudioFormat.CHANNEL_OUT_STEREO) + .setEncoding(encoding) + .setSampleRate(DEFAULT_SAMPLE_RATE_HZ) + .build(), + DEFAULT_AUDIO_ATTRIBUTES)) { + supportedEncodingsListBuilder.add(encoding); + } + } + supportedEncodingsListBuilder.add(AudioFormat.ENCODING_PCM_16BIT); + return Ints.toArray(supportedEncodingsListBuilder.build()); + } + + /** + * Returns the maximum number of channels supported for passthrough playback of audio in the + * given format, or {@code 0} if the format is unsupported. + */ + @DoNotInline + public static int getMaxSupportedChannelCountForPassthrough( + @C.Encoding int encoding, int sampleRate) { + // TODO(internal b/234351617): Query supported channel masks directly once it's supported, + // see also b/25994457. + for (int channelCount = DEFAULT_MAX_CHANNEL_COUNT; channelCount > 0; channelCount--) { + AudioFormat audioFormat = + new AudioFormat.Builder() + .setEncoding(encoding) + .setSampleRate(sampleRate) + .setChannelMask(Util.getAudioTrackChannelConfig(channelCount)) + .build(); + if (AudioTrack.isDirectPlaybackSupported(audioFormat, DEFAULT_AUDIO_ATTRIBUTES)) { + return channelCount; + } + } + return 0; + } } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioCapabilitiesReceiver.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioCapabilitiesReceiver.java index fe84c49656..0707824296 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioCapabilitiesReceiver.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioCapabilitiesReceiver.java @@ -34,9 +34,7 @@ */ public final class AudioCapabilitiesReceiver { - /** - * Listener notified when audio capabilities change. - */ + /** Listener notified when audio capabilities change. */ public interface Listener { /** @@ -45,7 +43,6 @@ public interface Listener { * @param audioCapabilities The current audio capabilities for the device. */ void onAudioCapabilitiesChanged(AudioCapabilities audioCapabilities); - } private final Context context; @@ -54,7 +51,7 @@ public interface Listener { @Nullable private final BroadcastReceiver receiver; @Nullable private final ExternalSurroundSoundSettingObserver externalSurroundSoundSettingObserver; - /* package */ @Nullable AudioCapabilities audioCapabilities; + @Nullable /* package */ AudioCapabilities audioCapabilities; private boolean registered; /** @@ -65,7 +62,7 @@ public AudioCapabilitiesReceiver(Context context, Listener listener) { context = context.getApplicationContext(); this.context = context; this.listener = Assertions.checkNotNull(listener); - handler = new Handler(Util.getLooper()); + handler = Util.createHandlerForCurrentOrMainLooper(); receiver = Util.SDK_INT >= 21 ? new HdmiAudioPlugBroadcastReceiver() : null; Uri externalSurroundSoundUri = AudioCapabilities.getExternalSurroundSoundGlobalSettingUri(); externalSurroundSoundSettingObserver = @@ -77,8 +74,8 @@ public AudioCapabilitiesReceiver(Context context, Listener listener) { /** * Registers the receiver, meaning it will notify the listener when audio capability changes - * occur. The current audio capabilities will be returned. It is important to call - * {@link #unregister} when the receiver is no longer required. + * occur. The current audio capabilities will be returned. It is important to call {@link + * #unregister} when the receiver is no longer required. * * @return The current audio capabilities for the device. */ @@ -91,12 +88,10 @@ public AudioCapabilities register() { if (externalSurroundSoundSettingObserver != null) { externalSurroundSoundSettingObserver.register(); } - Intent stickyIntent = null; + @Nullable Intent stickyIntent = null; if (receiver != null) { IntentFilter intentFilter = new IntentFilter(AudioManager.ACTION_HDMI_AUDIO_PLUG); - stickyIntent = - context.registerReceiver( - receiver, intentFilter, /* broadcastPermission= */ null, handler); + stickyIntent = Util.registerReceiverNotExported(context, receiver, intentFilter, handler); } audioCapabilities = AudioCapabilities.getCapabilities(context, stickyIntent); return audioCapabilities; @@ -162,5 +157,4 @@ public void onChange(boolean selfChange) { onNewAudioCapabilities(AudioCapabilities.getCapabilities(context)); } } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioDecoderException.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioDecoderException.java deleted file mode 100644 index ac4f632d62..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioDecoderException.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright (C) 2016 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.audio; - -/** Thrown when an audio decoder error occurs. */ -public class AudioDecoderException extends Exception { - - /** @param message The detail message for this exception. */ - public AudioDecoderException(String message) { - super(message); - } - - /** - * @param message The detail message for this exception. - * @param cause the cause (which is saved for later retrieval by the {@link #getCause()} method). - * A null value is permitted, and indicates that the cause is nonexistent or unknown. - */ - public AudioDecoderException(String message, Throwable cause) { - super(message, cause); - } - -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioListener.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioListener.java deleted file mode 100644 index 8ce365b283..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioListener.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright (C) 2018 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.audio; - -/** A listener for changes in audio configuration. */ -public interface AudioListener { - - /** - * Called when the audio session is set. - * - * @param audioSessionId The audio session id. - */ - default void onAudioSessionId(int audioSessionId) {} - - /** - * Called when the audio attributes change. - * - * @param audioAttributes The audio attributes. - */ - default void onAudioAttributesChanged(AudioAttributes audioAttributes) {} - - /** - * Called when the volume changes. - * - * @param volume The new volume, with 0 being silence and 1 being unity gain. - */ - default void onVolumeChanged(float volume) {} -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioProcessor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioProcessor.java index f75b2cd317..791f9aa050 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioProcessor.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioProcessor.java @@ -15,9 +15,12 @@ */ package com.google.android.exoplayer2.audio; +import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.util.Util; +import com.google.common.base.Objects; +import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.nio.ByteBuffer; import java.nio.ByteOrder; @@ -43,7 +46,7 @@ final class AudioFormat { /** The number of interleaved channels. */ public final int channelCount; /** The type of linear PCM encoding. */ - @C.PcmEncoding public final int encoding; + public final @C.PcmEncoding int encoding; /** The number of bytes used to represent one audio frame. */ public final int bytesPerFrame; @@ -68,6 +71,25 @@ public String toString() { + encoding + ']'; } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (!(o instanceof AudioFormat)) { + return false; + } + AudioFormat that = (AudioFormat) o; + return sampleRate == that.sampleRate + && channelCount == that.channelCount + && encoding == that.encoding; + } + + @Override + public int hashCode() { + return Objects.hashCode(sampleRate, channelCount, encoding); + } } /** Exception thrown when a processor can't be configured for a given input audio format. */ @@ -76,7 +98,6 @@ final class UnhandledAudioFormatException extends Exception { public UnhandledAudioFormatException(AudioFormat inputAudioFormat) { super("Unhandled format: " + inputAudioFormat); } - } /** An empty, direct {@link ByteBuffer}. */ @@ -97,28 +118,30 @@ public UnhandledAudioFormatException(AudioFormat inputAudioFormat) { * @return The configured output audio format if this instance is {@link #isActive() active}. * @throws UnhandledAudioFormatException Thrown if the specified format can't be handled as input. */ + @CanIgnoreReturnValue AudioFormat configure(AudioFormat inputAudioFormat) throws UnhandledAudioFormatException; /** Returns whether the processor is configured and will process input buffers. */ boolean isActive(); /** - * Queues audio data between the position and limit of the input {@code buffer} for processing. - * {@code buffer} must be a direct byte buffer with native byte order. Its contents are treated as - * read-only. Its position will be advanced by the number of bytes consumed (which may be zero). - * The caller retains ownership of the provided buffer. Calling this method invalidates any - * previous buffer returned by {@link #getOutput()}. + * Queues audio data between the position and limit of the {@code inputBuffer} for processing. + * After calling this method, processed output may be available via {@link #getOutput()}. Calling + * {@code queueInput(ByteBuffer)} again invalidates any pending output. * - * @param buffer The input buffer to process. + * @param inputBuffer The input buffer to process. It must be a direct byte buffer with native + * byte order. Its contents are treated as read-only. Its position will be advanced by the + * number of bytes consumed (which may be zero). The caller retains ownership of the provided + * buffer. */ - void queueInput(ByteBuffer buffer); + void queueInput(ByteBuffer inputBuffer); /** - * Queues an end of stream signal. After this method has been called, - * {@link #queueInput(ByteBuffer)} may not be called until after the next call to - * {@link #flush()}. Calling {@link #getOutput()} will return any remaining output data. Multiple - * calls may be required to read all of the remaining output data. {@link #isEnded()} will return - * {@code true} once all remaining output data has been read. + * Queues an end of stream signal. After this method has been called, {@link + * #queueInput(ByteBuffer)} may not be called until after the next call to {@link #flush()}. + * Calling {@link #getOutput()} will return any remaining output data. Multiple calls may be + * required to read all of the remaining output data. {@link #isEnded()} will return {@code true} + * once all remaining output data has been read. */ void queueEndOfStream(); @@ -132,8 +155,8 @@ public UnhandledAudioFormatException(AudioFormat inputAudioFormat) { ByteBuffer getOutput(); /** - * Returns whether this processor will return no more output from {@link #getOutput()} until it - * has been {@link #flush()}ed and more input has been queued. + * Returns whether this processor will return no more output from {@link #getOutput()} until + * {@link #flush()} has been called and more input has been queued. */ boolean isEnded(); diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioProcessorChain.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioProcessorChain.java new file mode 100644 index 0000000000..0c28ef6d6a --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioProcessorChain.java @@ -0,0 +1,72 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.audio; + +import com.google.android.exoplayer2.PlaybackParameters; + +/** + * Provides a chain of audio processors, which are used for any user-defined processing and applying + * playback parameters (if supported). Because applying playback parameters can skip and + * stretch/compress audio, the sink will query the chain for information on how to transform its + * output position to map it onto a media position, via {@link #getMediaDuration(long)} and {@link + * #getSkippedOutputFrameCount()}. + */ +public interface AudioProcessorChain { + + /** + * Returns the fixed chain of audio processors that will process audio. This method is called once + * during initialization, but audio processors may change state to become active/inactive during + * playback. + */ + AudioProcessor[] getAudioProcessors(); + + /** + * Configures audio processors to apply the specified playback parameters immediately, returning + * the new playback parameters, which may differ from those passed in. Only called when processors + * have no input pending. + * + * @param playbackParameters The playback parameters to try to apply. + * @return The playback parameters that were actually applied. + */ + PlaybackParameters applyPlaybackParameters(PlaybackParameters playbackParameters); + + /** + * Configures audio processors to apply whether to skip silences immediately, returning the new + * value. Only called when processors have no input pending. + * + * @param skipSilenceEnabled Whether silences should be skipped in the audio stream. + * @return The new value. + */ + boolean applySkipSilenceEnabled(boolean skipSilenceEnabled); + + /** + * Returns the media duration corresponding to the specified playout duration, taking speed + * adjustment due to audio processing into account. + * + *

      The scaling performed by this method will use the actual playback speed achieved by the + * audio processor chain, on average, since it was last flushed. This may differ very slightly + * from the target playback speed. + * + * @param playoutDuration The playout duration to scale. + * @return The corresponding media duration, in the same units as {@code duration}. + */ + long getMediaDuration(long playoutDuration); + + /** + * Returns the number of output audio frames skipped since the audio processors were last flushed. + */ + long getSkippedOutputFrameCount(); +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioRendererEventListener.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioRendererEventListener.java index bf5822caf6..7cba4ec06c 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioRendererEventListener.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioRendererEventListener.java @@ -17,13 +17,19 @@ import static com.google.android.exoplayer2.util.Util.castNonNull; +import android.media.AudioTrack; +import android.media.MediaCodec; +import android.media.MediaCodec.CodecException; import android.os.Handler; import android.os.SystemClock; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.Player; import com.google.android.exoplayer2.Renderer; import com.google.android.exoplayer2.decoder.DecoderCounters; +import com.google.android.exoplayer2.decoder.DecoderException; +import com.google.android.exoplayer2.decoder.DecoderReuseEvaluation; import com.google.android.exoplayer2.util.Assertions; /** @@ -40,13 +46,6 @@ public interface AudioRendererEventListener { */ default void onAudioEnabled(DecoderCounters counters) {} - /** - * Called when the audio session is set. - * - * @param audioSessionId The audio session id. - */ - default void onAudioSessionId(int audioSessionId) {} - /** * Called when a decoder is created. * @@ -58,24 +57,48 @@ default void onAudioSessionId(int audioSessionId) {} default void onAudioDecoderInitialized( String decoderName, long initializedTimestampMs, long initializationDurationMs) {} + /** + * @deprecated Use {@link #onAudioInputFormatChanged(Format, DecoderReuseEvaluation)}. + */ + @Deprecated + default void onAudioInputFormatChanged(Format format) {} + /** * Called when the format of the media being consumed by the renderer changes. * * @param format The new format. + * @param decoderReuseEvaluation The result of the evaluation to determine whether an existing + * decoder instance can be reused for the new format, or {@code null} if the renderer did not + * have a decoder. */ - default void onAudioInputFormatChanged(Format format) {} + default void onAudioInputFormatChanged( + Format format, @Nullable DecoderReuseEvaluation decoderReuseEvaluation) {} + + /** + * Called when the audio position has increased for the first time since the last pause or + * position reset. + * + * @param playoutStartSystemTimeMs The approximate derived {@link System#currentTimeMillis()} at + * which playout started. + */ + default void onAudioPositionAdvancing(long playoutStartSystemTimeMs) {} /** - * Called when an {@link AudioSink} underrun occurs. + * Called when an audio underrun occurs. * - * @param bufferSize The size of the {@link AudioSink}'s buffer, in bytes. - * @param bufferSizeMs The size of the {@link AudioSink}'s buffer, in milliseconds, if it is - * configured for PCM output. {@link C#TIME_UNSET} if it is configured for passthrough output, - * as the buffered media can have a variable bitrate so the duration may be unknown. - * @param elapsedSinceLastFeedMs The time since the {@link AudioSink} was last fed data. + * @param bufferSize The size of the audio output buffer, in bytes. + * @param bufferSizeMs The size of the audio output buffer, in milliseconds, if it contains PCM + * encoded audio. {@link C#TIME_UNSET} if the output buffer contains non-PCM encoded audio. + * @param elapsedSinceLastFeedMs The time since audio was last written to the output buffer. */ - default void onAudioSinkUnderrun( - int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) {} + default void onAudioUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) {} + + /** + * Called when a decoder is released. + * + * @param decoderName The decoder that was released. + */ + default void onAudioDecoderReleased(String decoderName) {} /** * Called when the renderer is disabled. @@ -85,38 +108,73 @@ default void onAudioSinkUnderrun( default void onAudioDisabled(DecoderCounters counters) {} /** - * Dispatches events to a {@link AudioRendererEventListener}. + * Called when skipping silences is enabled or disabled in the audio stream. + * + * @param skipSilenceEnabled Whether skipping silences in the audio stream is enabled. + */ + default void onSkipSilenceEnabledChanged(boolean skipSilenceEnabled) {} + + /** + * Called when an audio decoder encounters an error. + * + *

      This method being called does not indicate that playback has failed, or that it will fail. + * The player may be able to recover from the error. Hence applications should not + * implement this method to display a user visible error or initiate an application level retry. + * {@link Player.Listener#onPlayerError} is the appropriate place to implement such behavior. This + * method is called to provide the application with an opportunity to log the error if it wishes + * to do so. + * + * @param audioCodecError The error. Typically a {@link CodecException} if the renderer uses + * {@link MediaCodec}, or a {@link DecoderException} if the renderer uses a software decoder. */ + default void onAudioCodecError(Exception audioCodecError) {} + + /** + * Called when {@link AudioSink} has encountered an error. + * + *

      If the sink writes to a platform {@link AudioTrack}, this will be called for all {@link + * AudioTrack} errors. + * + *

      This method being called does not indicate that playback has failed, or that it will fail. + * The player may be able to recover from the error. Hence applications should not + * implement this method to display a user visible error or initiate an application level retry. + * {@link Player.Listener#onPlayerError} is the appropriate place to implement such behavior. This + * method is called to provide the application with an opportunity to log the error if it wishes + * to do so. + * + * @param audioSinkError The error that occurred. Typically an {@link + * AudioSink.InitializationException}, a {@link AudioSink.WriteException}, or an {@link + * AudioSink.UnexpectedDiscontinuityException}. + */ + default void onAudioSinkError(Exception audioSinkError) {} + + /** Dispatches events to an {@link AudioRendererEventListener}. */ final class EventDispatcher { @Nullable private final Handler handler; @Nullable private final AudioRendererEventListener listener; /** - * @param handler A handler for dispatching events, or null if creating a dummy instance. - * @param listener The listener to which events should be dispatched, or null if creating a - * dummy instance. + * @param handler A handler for dispatching events, or null if events should not be dispatched. + * @param listener The listener to which events should be dispatched, or null if events should + * not be dispatched. */ - public EventDispatcher(@Nullable Handler handler, - @Nullable AudioRendererEventListener listener) { + public EventDispatcher( + @Nullable Handler handler, @Nullable AudioRendererEventListener listener) { this.handler = listener != null ? Assertions.checkNotNull(handler) : null; this.listener = listener; } - /** - * Invokes {@link AudioRendererEventListener#onAudioEnabled(DecoderCounters)}. - */ - public void enabled(final DecoderCounters decoderCounters) { + /** Invokes {@link AudioRendererEventListener#onAudioEnabled(DecoderCounters)}. */ + public void enabled(DecoderCounters decoderCounters) { if (handler != null) { handler.post(() -> castNonNull(listener).onAudioEnabled(decoderCounters)); } } - /** - * Invokes {@link AudioRendererEventListener#onAudioDecoderInitialized(String, long, long)}. - */ - public void decoderInitialized(final String decoderName, - final long initializedTimestampMs, final long initializationDurationMs) { + /** Invokes {@link AudioRendererEventListener#onAudioDecoderInitialized(String, long, long)}. */ + public void decoderInitialized( + String decoderName, long initializedTimestampMs, long initializationDurationMs) { if (handler != null) { handler.post( () -> @@ -126,32 +184,46 @@ public void decoderInitialized(final String decoderName, } } - /** - * Invokes {@link AudioRendererEventListener#onAudioInputFormatChanged(Format)}. - */ - public void inputFormatChanged(final Format format) { + /** Invokes {@link AudioRendererEventListener#onAudioInputFormatChanged(Format)}. */ + @SuppressWarnings("deprecation") // Calling deprecated listener method. + public void inputFormatChanged( + Format format, @Nullable DecoderReuseEvaluation decoderReuseEvaluation) { + if (handler != null) { + handler.post( + () -> { + castNonNull(listener).onAudioInputFormatChanged(format); + castNonNull(listener).onAudioInputFormatChanged(format, decoderReuseEvaluation); + }); + } + } + + /** Invokes {@link AudioRendererEventListener#onAudioPositionAdvancing(long)}. */ + public void positionAdvancing(long playoutStartSystemTimeMs) { if (handler != null) { - handler.post(() -> castNonNull(listener).onAudioInputFormatChanged(format)); + handler.post( + () -> castNonNull(listener).onAudioPositionAdvancing(playoutStartSystemTimeMs)); } } - /** - * Invokes {@link AudioRendererEventListener#onAudioSinkUnderrun(int, long, long)}. - */ - public void audioTrackUnderrun(final int bufferSize, final long bufferSizeMs, - final long elapsedSinceLastFeedMs) { + /** Invokes {@link AudioRendererEventListener#onAudioUnderrun(int, long, long)}. */ + public void underrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) { if (handler != null) { handler.post( () -> castNonNull(listener) - .onAudioSinkUnderrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs)); + .onAudioUnderrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs)); } } - /** - * Invokes {@link AudioRendererEventListener#onAudioDisabled(DecoderCounters)}. - */ - public void disabled(final DecoderCounters counters) { + /** Invokes {@link AudioRendererEventListener#onAudioDecoderReleased(String)}. */ + public void decoderReleased(String decoderName) { + if (handler != null) { + handler.post(() -> castNonNull(listener).onAudioDecoderReleased(decoderName)); + } + } + + /** Invokes {@link AudioRendererEventListener#onAudioDisabled(DecoderCounters)}. */ + public void disabled(DecoderCounters counters) { counters.ensureUpdated(); if (handler != null) { handler.post( @@ -162,12 +234,24 @@ public void disabled(final DecoderCounters counters) { } } - /** - * Invokes {@link AudioRendererEventListener#onAudioSessionId(int)}. - */ - public void audioSessionId(final int audioSessionId) { + /** Invokes {@link AudioRendererEventListener#onSkipSilenceEnabledChanged(boolean)}. */ + public void skipSilenceEnabledChanged(boolean skipSilenceEnabled) { + if (handler != null) { + handler.post(() -> castNonNull(listener).onSkipSilenceEnabledChanged(skipSilenceEnabled)); + } + } + + /** Invokes {@link AudioRendererEventListener#onAudioSinkError(Exception)}. */ + public void audioSinkError(Exception audioSinkError) { + if (handler != null) { + handler.post(() -> castNonNull(listener).onAudioSinkError(audioSinkError)); + } + } + + /** Invokes {@link AudioRendererEventListener#onAudioCodecError(Exception)}. */ + public void audioCodecError(Exception audioCodecError) { if (handler != null) { - handler.post(() -> castNonNull(listener).onAudioSessionId(audioSessionId)); + handler.post(() -> castNonNull(listener).onAudioCodecError(audioCodecError)); } } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioSink.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioSink.java index f2458a7471..c51a3ddd8e 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioSink.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioSink.java @@ -15,64 +15,77 @@ */ package com.google.android.exoplayer2.audio; +import static java.lang.annotation.ElementType.TYPE_USE; + +import android.media.AudioDeviceInfo; import android.media.AudioTrack; +import androidx.annotation.IntDef; import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.ExoPlaybackException; import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.PlaybackException; import com.google.android.exoplayer2.PlaybackParameters; +import com.google.android.exoplayer2.Player; +import com.google.android.exoplayer2.analytics.PlayerId; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; import java.nio.ByteBuffer; /** * A sink that consumes audio data. * - *

      Before starting playback, specify the input audio format by calling {@link #configure(int, - * int, int, int, int[], int, int)}. + *

      Before starting playback, specify the input audio format by calling {@link #configure(Format, + * int, int[])}. * - *

      Call {@link #handleBuffer(ByteBuffer, long)} to write data, and {@link #handleDiscontinuity()} - * when the data being fed is discontinuous. Call {@link #play()} to start playing the written data. + *

      Call {@link #handleBuffer(ByteBuffer, long, int)} to write data, and {@link + * #handleDiscontinuity()} when the data being fed is discontinuous. Call {@link #play()} to start + * playing the written data. * - *

      Call {@link #configure(int, int, int, int, int[], int, int)} whenever the input format - * changes. The sink will be reinitialized on the next call to {@link #handleBuffer(ByteBuffer, - * long)}. + *

      Call {@link #configure(Format, int, int[])} whenever the input format changes. The sink will + * be reinitialized on the next call to {@link #handleBuffer(ByteBuffer, long, int)}. * *

      Call {@link #flush()} to prepare the sink to receive audio data from a new playback position. * *

      Call {@link #playToEndOfStream()} repeatedly to play out all data when no more input buffers - * will be provided via {@link #handleBuffer(ByteBuffer, long)} until the next {@link #flush()}. - * Call {@link #reset()} when the instance is no longer required. + * will be provided via {@link #handleBuffer(ByteBuffer, long, int)} until the next {@link + * #flush()}. Call {@link #reset()} when the instance is no longer required. * *

      The implementation may be backed by a platform {@link AudioTrack}. In this case, {@link * #setAudioSessionId(int)}, {@link #setAudioAttributes(AudioAttributes)}, {@link - * #enableTunnelingV21(int)} and/or {@link #disableTunneling()} may be called before writing data to - * the sink. These methods may also be called after writing data to the sink, in which case it will - * be reinitialized as required. For implementations that are not based on platform {@link + * #enableTunnelingV21()} and {@link #disableTunneling()} may be called before writing data to the + * sink. These methods may also be called after writing data to the sink, in which case it will be + * reinitialized as required. For implementations that are not based on platform {@link * AudioTrack}s, calling methods relating to audio sessions, audio attributes, and tunneling may * have no effect. */ public interface AudioSink { - /** - * Listener for audio sink events. - */ + /** Listener for audio sink events. */ interface Listener { - /** - * Called if the audio sink has started rendering audio to a new platform audio session. - * - * @param audioSessionId The newly generated audio session's identifier. - */ - void onAudioSessionId(int audioSessionId); - /** * Called when the audio sink handles a buffer whose timestamp is discontinuous with the last * buffer handled since it was reset. */ void onPositionDiscontinuity(); + /** + * Called when the audio sink's position has increased for the first time since it was last + * paused or flushed. + * + * @param playoutStartSystemTimeMs The approximate derived {@link System#currentTimeMillis()} at + * which playout started. Only valid if the audio track has not underrun. + */ + default void onPositionAdvancing(long playoutStartSystemTimeMs) {} + /** * Called when the audio sink runs out of data. - *

      - * An audio sink implementation may never call this method (for example, if audio data is + * + *

      An audio sink implementation may never call this method (for example, if audio data is * consumed in batches rather than based on the sink's own clock). * * @param bufferSize The size of the sink's buffer, in bytes. @@ -83,80 +96,184 @@ interface Listener { */ void onUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs); - } + /** + * Called when skipping silences is enabled or disabled. + * + * @param skipSilenceEnabled Whether skipping silences is enabled. + */ + void onSkipSilenceEnabledChanged(boolean skipSilenceEnabled); - /** - * Thrown when a failure occurs configuring the sink. - */ - final class ConfigurationException extends Exception { + /** Called when the offload buffer has been partially emptied. */ + default void onOffloadBufferEmptying() {} + + /** Called when the offload buffer has been filled completely. */ + default void onOffloadBufferFull() {} /** - * Creates a new configuration exception with the specified {@code cause} and no message. + * Called when {@link AudioSink} has encountered an error. + * + *

      If the sink writes to a platform {@link AudioTrack}, this will called for all {@link + * AudioTrack} errors. + * + *

      This method being called does not indicate that playback has failed, or that it will fail. + * The player may be able to recover from the error (for example by recreating the AudioTrack, + * possibly with different settings) and continue. Hence applications should not + * implement this method to display a user visible error or initiate an application level retry + * ({@link Player.Listener#onPlayerError} is the appropriate place to implement such behavior). + * This method is called to provide the application with an opportunity to log the error if it + * wishes to do so. + * + *

      Fatal errors that cannot be recovered will be reported wrapped in a {@link + * ExoPlaybackException} by {@link Player.Listener#onPlayerError(PlaybackException)}. + * + * @param audioSinkError The error that occurred. Typically an {@link InitializationException}, + * a {@link WriteException}, or an {@link UnexpectedDiscontinuityException}. */ - public ConfigurationException(Throwable cause) { + default void onAudioSinkError(Exception audioSinkError) {} + } + + /** Thrown when a failure occurs configuring the sink. */ + final class ConfigurationException extends Exception { + + /** Input {@link Format} of the sink when the configuration failure occurs. */ + public final Format format; + + /** Creates a new configuration exception with the specified {@code cause} and no message. */ + public ConfigurationException(Throwable cause, Format format) { super(cause); + this.format = format; } - /** - * Creates a new configuration exception with the specified {@code message} and no cause. - */ - public ConfigurationException(String message) { + /** Creates a new configuration exception with the specified {@code message} and no cause. */ + public ConfigurationException(String message, Format format) { super(message); + this.format = format; } - } - /** - * Thrown when a failure occurs initializing the sink. - */ + /** Thrown when a failure occurs initializing the sink. */ final class InitializationException extends Exception { - /** - * The underlying {@link AudioTrack}'s state, if applicable. - */ + /** The underlying {@link AudioTrack}'s state. */ public final int audioTrackState; + /** If the exception can be recovered by recreating the sink. */ + public final boolean isRecoverable; + /** The input {@link Format} of the sink when the error occurs. */ + public final Format format; /** - * @param audioTrackState The underlying {@link AudioTrack}'s state, if applicable. + * Creates a new instance. + * + * @param audioTrackState The underlying {@link AudioTrack}'s state. * @param sampleRate The requested sample rate in Hz. * @param channelConfig The requested channel configuration. * @param bufferSize The requested buffer size in bytes. + * @param format The input format of the sink when the error occurs. + * @param isRecoverable Whether the exception can be recovered by recreating the sink. + * @param audioTrackException Exception thrown during the creation of the {@link AudioTrack}. */ - public InitializationException(int audioTrackState, int sampleRate, int channelConfig, - int bufferSize) { - super("AudioTrack init failed: " + audioTrackState + ", Config(" + sampleRate + ", " - + channelConfig + ", " + bufferSize + ")"); + public InitializationException( + int audioTrackState, + int sampleRate, + int channelConfig, + int bufferSize, + Format format, + boolean isRecoverable, + @Nullable Exception audioTrackException) { + super( + "AudioTrack init failed " + + audioTrackState + + " " + + ("Config(" + sampleRate + ", " + channelConfig + ", " + bufferSize + ")") + + (isRecoverable ? " (recoverable)" : ""), + audioTrackException); this.audioTrackState = audioTrackState; + this.isRecoverable = isRecoverable; + this.format = format; } - } - /** - * Thrown when a failure occurs writing to the sink. - */ + /** Thrown when a failure occurs writing to the sink. */ final class WriteException extends Exception { /** * The error value returned from the sink implementation. If the sink writes to a platform - * {@link AudioTrack}, this will be the error value returned from - * {@link AudioTrack#write(byte[], int, int)} or {@link AudioTrack#write(ByteBuffer, int, int)}. + * {@link AudioTrack}, this will be the error value returned from {@link + * AudioTrack#write(byte[], int, int)} or {@link AudioTrack#write(ByteBuffer, int, int)}. * Otherwise, the meaning of the error code depends on the sink implementation. */ public final int errorCode; + /** If the exception can be recovered by recreating the sink. */ + public final boolean isRecoverable; + /** The input {@link Format} of the sink when the error occurs. */ + public final Format format; /** + * Creates an instance. + * * @param errorCode The error value returned from the sink implementation. + * @param format The input format of the sink when the error occurs. + * @param isRecoverable Whether the exception can be recovered by recreating the sink. */ - public WriteException(int errorCode) { + public WriteException(int errorCode, Format format, boolean isRecoverable) { super("AudioTrack write failed: " + errorCode); + this.isRecoverable = isRecoverable; this.errorCode = errorCode; + this.format = format; } + } + /** Thrown when the sink encounters an unexpected timestamp discontinuity. */ + final class UnexpectedDiscontinuityException extends Exception { + /** The actual presentation time of a sample, in microseconds. */ + public final long actualPresentationTimeUs; + /** The expected presentation time of a sample, in microseconds. */ + public final long expectedPresentationTimeUs; + + /** + * Creates an instance. + * + * @param actualPresentationTimeUs The actual presentation time of a sample, in microseconds. + * @param expectedPresentationTimeUs The expected presentation time of a sample, in + * microseconds. + */ + public UnexpectedDiscontinuityException( + long actualPresentationTimeUs, long expectedPresentationTimeUs) { + super( + "Unexpected audio track timestamp discontinuity: expected " + + expectedPresentationTimeUs + + ", got " + + actualPresentationTimeUs); + this.actualPresentationTimeUs = actualPresentationTimeUs; + this.expectedPresentationTimeUs = expectedPresentationTimeUs; + } } /** - * Returned by {@link #getCurrentPositionUs(boolean)} when the position is not set. + * The level of support the sink provides for a format. One of {@link + * #SINK_FORMAT_SUPPORTED_DIRECTLY}, {@link #SINK_FORMAT_SUPPORTED_WITH_TRANSCODING} or {@link + * #SINK_FORMAT_UNSUPPORTED}. */ + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef({ + SINK_FORMAT_SUPPORTED_DIRECTLY, + SINK_FORMAT_SUPPORTED_WITH_TRANSCODING, + SINK_FORMAT_UNSUPPORTED + }) + @interface SinkFormatSupport {} + /** The sink supports the format directly, without the need for internal transcoding. */ + int SINK_FORMAT_SUPPORTED_DIRECTLY = 2; + /** + * The sink supports the format, but needs to transcode it internally to do so. Internal + * transcoding may result in lower quality and higher CPU load in some cases. + */ + int SINK_FORMAT_SUPPORTED_WITH_TRANSCODING = 1; + /** The sink does not support the format. */ + int SINK_FORMAT_UNSUPPORTED = 0; + + /** Returned by {@link #getCurrentPositionUs(boolean)} when the position is not set. */ long CURRENT_POSITION_NOT_SET = Long.MIN_VALUE; /** @@ -167,17 +284,32 @@ public WriteException(int errorCode) { void setListener(Listener listener); /** - * Returns whether the sink supports the audio format. + * Sets the {@link PlayerId} of the player using this audio sink. + * + * @param playerId The {@link PlayerId}, or null to clear a previously set id. + */ + default void setPlayerId(@Nullable PlayerId playerId) {} + + /** + * Returns whether the sink supports a given {@link Format}. + * + * @param format The format. + * @return Whether the sink supports the format. + */ + boolean supportsFormat(Format format); + + /** + * Returns the level of support that the sink provides for a given {@link Format}. * - * @param channelCount The number of channels, or {@link Format#NO_VALUE} if not known. - * @param encoding The audio encoding, or {@link Format#NO_VALUE} if not known. - * @return Whether the sink supports the audio format. + * @param format The format. + * @return The level of support provided. */ - boolean supportsOutput(int channelCount, @C.Encoding int encoding); + @SinkFormatSupport + int getFormatSupport(Format format); /** - * Returns the playback position in the stream starting at zero, in microseconds, or - * {@link #CURRENT_POSITION_NOT_SET} if it is not yet available. + * Returns the playback position in the stream starting at zero, in microseconds, or {@link + * #CURRENT_POSITION_NOT_SET} if it is not yet available. * * @param sourceEnded Specify {@code true} if no more input buffers will be provided. * @return The playback position relative to the start of playback, in microseconds. @@ -187,9 +319,7 @@ public WriteException(int errorCode) { /** * Configures (or reconfigures) the sink. * - * @param inputEncoding The encoding of audio data provided in the input buffers. - * @param inputChannelCount The number of channels. - * @param inputSampleRate The sample rate in Hz. + * @param inputFormat The format of audio data provided in the input buffers. * @param specifiedBufferSize A specific size for the playback buffer in bytes, or 0 to infer a * suitable buffer size. * @param outputChannels A mapping from input to output channels that is applied to this sink's @@ -197,25 +327,12 @@ public WriteException(int errorCode) { * input unchanged. Otherwise, the element at index {@code i} specifies index of the input * channel to map to output channel {@code i} when preprocessing input buffers. After the map * is applied the audio data will have {@code outputChannels.length} channels. - * @param trimStartFrames The number of audio frames to trim from the start of data written to the - * sink after this call. - * @param trimEndFrames The number of audio frames to trim from data written to the sink - * immediately preceding the next call to {@link #flush()} or this method. * @throws ConfigurationException If an error occurs configuring the sink. */ - void configure( - @C.Encoding int inputEncoding, - int inputChannelCount, - int inputSampleRate, - int specifiedBufferSize, - @Nullable int[] outputChannels, - int trimStartFrames, - int trimEndFrames) + void configure(Format inputFormat, int specifiedBufferSize, @Nullable int[] outputChannels) throws ConfigurationException; - /** - * Starts or resumes consuming audio if initialized. - */ + /** Starts or resumes consuming audio if initialized. */ void play(); /** Signals to the sink that the next buffer may be discontinuous with the previous buffer. */ @@ -229,16 +346,19 @@ void configure( * *

      Returns whether the data was handled in full. If the data was not handled in full then the * same {@link ByteBuffer} must be provided to subsequent calls until it has been fully consumed, - * except in the case of an intervening call to {@link #flush()} (or to {@link #configure(int, - * int, int, int, int[], int, int)} that causes the sink to be flushed). + * except in the case of an intervening call to {@link #flush()} (or to {@link #configure(Format, + * int, int[])} that causes the sink to be flushed). * * @param buffer The buffer containing audio data. * @param presentationTimeUs The presentation timestamp of the buffer in microseconds. + * @param encodedAccessUnitCount The number of encoded access units in the buffer, or 1 if the + * buffer contains PCM audio. This allows batching multiple encoded access units in one + * buffer. * @return Whether the buffer was handled fully. * @throws InitializationException If an error occurs initializing the sink. * @throws WriteException If an error occurs writing the audio data. */ - boolean handleBuffer(ByteBuffer buffer, long presentationTimeUs) + boolean handleBuffer(ByteBuffer buffer, long presentationTimeUs, int encodedAccessUnitCount) throws InitializationException, WriteException; /** @@ -253,9 +373,7 @@ boolean handleBuffer(ByteBuffer buffer, long presentationTimeUs) */ boolean isEnded(); - /** - * Returns whether the sink has data pending that has not been consumed yet. - */ + /** Returns whether the sink has data pending that has not been consumed yet. */ boolean hasPendingData(); /** @@ -266,16 +384,20 @@ boolean handleBuffer(ByteBuffer buffer, long presentationTimeUs) */ void setPlaybackParameters(PlaybackParameters playbackParameters); - /** - * Gets the active {@link PlaybackParameters}. - */ + /** Returns the active {@link PlaybackParameters}. */ PlaybackParameters getPlaybackParameters(); + /** Sets whether silences should be skipped in the audio stream. */ + void setSkipSilenceEnabled(boolean skipSilenceEnabled); + + /** Returns whether silences are skipped in the audio stream. */ + boolean getSkipSilenceEnabled(); + /** * Sets attributes for audio playback. If the attributes have changed and if the sink is not * configured for use with tunneling, then it is reset and the audio session id is cleared. - *

      - * If the sink is configured for use with tunneling then the audio attributes are ignored. The + * + *

      If the sink is configured for use with tunneling then the audio attributes are ignored. The * sink is not reset and the audio session id is not cleared. The passed attributes will be used * if the sink is later re-configured into non-tunneled mode. * @@ -283,6 +405,13 @@ boolean handleBuffer(ByteBuffer buffer, long presentationTimeUs) */ void setAudioAttributes(AudioAttributes audioAttributes); + /** + * Returns the audio attributes used for audio playback, or {@code null} if the sink does not use + * audio attributes. + */ + @Nullable + AudioAttributes getAudioAttributes(); + /** Sets the audio session id. */ void setAudioSessionId(int audioSessionId); @@ -290,14 +419,30 @@ boolean handleBuffer(ByteBuffer buffer, long presentationTimeUs) void setAuxEffectInfo(AuxEffectInfo auxEffectInfo); /** - * Enables tunneling, if possible. The sink is reset if tunneling was previously disabled or if - * the audio session id has changed. Enabling tunneling is only possible if the sink is based on a - * platform {@link AudioTrack}, and requires platform API version 21 onwards. + * Sets the preferred audio device. + * + * @param audioDeviceInfo The preferred {@linkplain AudioDeviceInfo audio device}, or null to + * restore the default. + */ + @RequiresApi(23) + default void setPreferredDevice(@Nullable AudioDeviceInfo audioDeviceInfo) {} + + /** + * Sets the offset that is added to the media timestamp before it is passed as {@code + * presentationTimeUs} in {@link #handleBuffer(ByteBuffer, long, int)}. + * + * @param outputStreamOffsetUs The output stream offset in microseconds. + */ + default void setOutputStreamOffsetUs(long outputStreamOffsetUs) {} + + /** + * Enables tunneling, if possible. The sink is reset if tunneling was previously disabled. + * Enabling tunneling is only possible if the sink is based on a platform {@link AudioTrack}, and + * requires platform API version 21 onwards. * - * @param tunnelingAudioSessionId The audio session id to use. * @throws IllegalStateException Thrown if enabling tunneling on platform API version < 21. */ - void enableTunnelingV21(int tunnelingAudioSessionId); + void enableTunnelingV21(); /** * Disables tunneling. If tunneling was previously enabled then the sink is reset and any audio @@ -308,13 +453,11 @@ boolean handleBuffer(ByteBuffer buffer, long presentationTimeUs) /** * Sets the playback volume. * - * @param volume A volume in the range [0.0, 1.0]. + * @param volume Linear output gain to apply to all channels. Should be in the range [0.0, 1.0]. */ void setVolume(float volume); - /** - * Pauses playback. - */ + /** Pauses playback. */ void pause(); /** @@ -324,6 +467,18 @@ boolean handleBuffer(ByteBuffer buffer, long presentationTimeUs) */ void flush(); - /** Resets the renderer, releasing any resources that it currently holds. */ + /** + * Flushes the sink, after which it is ready to receive buffers from a new playback position. + * + *

      Does not release the {@link AudioTrack} held by the sink. + * + *

      This method is experimental, and will be renamed or removed in a future release. + * + *

      Only for experimental use as part of {@link + * MediaCodecAudioRenderer#experimentalSetEnableKeepAudioTrackOnSeek(boolean)}. + */ + void experimentalFlushWithoutAudioTrackRelease(); + + /** Resets the sink, releasing any resources that it currently holds. */ void reset(); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioTimestampPoller.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioTimestampPoller.java index 200c917954..537cd0791c 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioTimestampPoller.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioTimestampPoller.java @@ -15,16 +15,20 @@ */ package com.google.android.exoplayer2.audio; +import static java.lang.annotation.ElementType.TYPE_USE; + import android.annotation.TargetApi; import android.media.AudioTimestamp; import android.media.AudioTrack; import androidx.annotation.IntDef; import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.util.Util; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; /** * Polls the {@link AudioTrack} timestamp, if the platform supports it, taking care of polling at @@ -48,6 +52,7 @@ /** Timestamp polling states. */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({ STATE_INITIALIZING, STATE_TIMESTAMP, @@ -115,6 +120,7 @@ public AudioTimestampPoller(AudioTrack audioTrack) { * @param systemTimeUs The current system time, in microseconds. * @return Whether the timestamp was updated. */ + @TargetApi(19) // audioTimestamp will be null if Util.SDK_INT < 19. public boolean maybePollTimestamp(long systemTimeUs) { if (audioTimestamp == null || (systemTimeUs - lastTimestampSampleTimeUs) < sampleIntervalUs) { return false; @@ -220,6 +226,7 @@ public void reset() { * If {@link #maybePollTimestamp(long)} or {@link #hasTimestamp()} returned {@code true}, returns * the system time at which the latest timestamp was sampled, in microseconds. */ + @TargetApi(19) // audioTimestamp will be null if Util.SDK_INT < 19. public long getTimestampSystemTimeUs() { return audioTimestamp != null ? audioTimestamp.getTimestampSystemTimeUs() : C.TIME_UNSET; } @@ -228,6 +235,7 @@ public long getTimestampSystemTimeUs() { * If {@link #maybePollTimestamp(long)} or {@link #hasTimestamp()} returned {@code true}, returns * the latest timestamp's position in frames. */ + @TargetApi(19) // audioTimestamp will be null if Util.SDK_INT < 19. public long getTimestampPositionFrames() { return audioTimestamp != null ? audioTimestamp.getTimestampPositionFrames() : C.POSITION_UNSET; } @@ -257,7 +265,7 @@ private void updateState(@State int state) { } } - @TargetApi(19) + @RequiresApi(19) private static final class AudioTimestampV19 { private final AudioTrack audioTrack; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioTrackPositionTracker.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioTrackPositionTracker.java index d944edc197..5122f78395 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioTrackPositionTracker.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AudioTrackPositionTracker.java @@ -16,6 +16,9 @@ package com.google.android.exoplayer2.audio; import static com.google.android.exoplayer2.util.Util.castNonNull; +import static java.lang.Math.max; +import static java.lang.Math.min; +import static java.lang.annotation.ElementType.TYPE_USE; import android.media.AudioTimestamp; import android.media.AudioTrack; @@ -28,24 +31,34 @@ import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; import java.lang.reflect.Method; /** * Wraps an {@link AudioTrack}, exposing a position based on {@link * AudioTrack#getPlaybackHeadPosition()} and {@link AudioTrack#getTimestamp(AudioTimestamp)}. * - *

      Call {@link #setAudioTrack(AudioTrack, int, int, int)} to set the audio track to wrap. Call - * {@link #mayHandleBuffer(long)} if there is input data to write to the track. If it returns false, - * the audio track position is stabilizing and no data may be written. Call {@link #start()} - * immediately before calling {@link AudioTrack#play()}. Call {@link #pause()} when pausing the - * track. Call {@link #handleEndOfStream(long)} when no more data will be written to the track. When - * the audio track will no longer be used, call {@link #reset()}. + *

      Call {@link #setAudioTrack(AudioTrack, boolean, int, int, int)} to set the audio track to + * wrap. Call {@link #mayHandleBuffer(long)} if there is input data to write to the track. If it + * returns false, the audio track position is stabilizing and no data may be written. Call {@link + * #start()} immediately before calling {@link AudioTrack#play()}. Call {@link #pause()} when + * pausing the track. Call {@link #handleEndOfStream(long)} when no more data will be written to the + * track. When the audio track will no longer be used, call {@link #reset()}. */ /* package */ final class AudioTrackPositionTracker { /** Listener for position tracker events. */ public interface Listener { + /** + * Called when the position tracker's position has increased for the first time since it was + * last paused or reset. + * + * @param playoutStartSystemTimeMs The approximate derived {@link System#currentTimeMillis()} at + * which playout started. + */ + void onPositionAdvancing(long playoutStartSystemTimeMs); + /** * Called when the frame position is too far from the expected frame position. * @@ -100,13 +113,20 @@ void onSystemTimeUsMismatch( /** {@link AudioTrack} playback states. */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({PLAYSTATE_STOPPED, PLAYSTATE_PAUSED, PLAYSTATE_PLAYING}) private @interface PlayState {} - /** @see AudioTrack#PLAYSTATE_STOPPED */ + /** + * @see AudioTrack#PLAYSTATE_STOPPED + */ private static final int PLAYSTATE_STOPPED = AudioTrack.PLAYSTATE_STOPPED; - /** @see AudioTrack#PLAYSTATE_PAUSED */ + /** + * @see AudioTrack#PLAYSTATE_PAUSED + */ private static final int PLAYSTATE_PAUSED = AudioTrack.PLAYSTATE_PAUSED; - /** @see AudioTrack#PLAYSTATE_PLAYING */ + /** + * @see AudioTrack#PLAYSTATE_PLAYING + */ private static final int PLAYSTATE_PLAYING = AudioTrack.PLAYSTATE_PLAYING; /** @@ -129,8 +149,8 @@ void onSystemTimeUsMismatch( private static final long FORCE_RESET_WORKAROUND_TIMEOUT_MS = 200; private static final int MAX_PLAYHEAD_OFFSET_COUNT = 10; - private static final int MIN_PLAYHEAD_OFFSET_SAMPLE_INTERVAL_US = 30000; - private static final int MIN_LATENCY_SAMPLE_INTERVAL_US = 500000; + private static final int MIN_PLAYHEAD_OFFSET_SAMPLE_INTERVAL_US = 30_000; + private static final int MIN_LATENCY_SAMPLE_INTERVAL_US = 50_0000; private final Listener listener; private final long[] playheadOffsets; @@ -142,6 +162,8 @@ void onSystemTimeUsMismatch( private int outputSampleRate; private boolean needsPassthroughWorkarounds; private long bufferSizeUs; + private float audioTrackPlaybackSpeed; + private boolean notifiedPositionIncreasing; private long smoothedPlayheadOffsetUs; private long lastPlayheadSampleTimeUs; @@ -193,6 +215,7 @@ public AudioTrackPositionTracker(Listener listener) { * track's position, until the next call to {@link #reset()}. * * @param audioTrack The audio track to wrap. + * @param isPassthrough Whether passthrough mode is being used. * @param outputEncoding The encoding of the audio track. * @param outputPcmFrameSize For PCM output encodings, the frame size. The value is ignored * otherwise. @@ -200,6 +223,7 @@ public AudioTrackPositionTracker(Listener listener) { */ public void setAudioTrack( AudioTrack audioTrack, + boolean isPassthrough, @C.Encoding int outputEncoding, int outputPcmFrameSize, int bufferSize) { @@ -208,7 +232,7 @@ public void setAudioTrack( this.bufferSize = bufferSize; audioTimestampPoller = new AudioTimestampPoller(audioTrack); outputSampleRate = audioTrack.getSampleRate(); - needsPassthroughWorkarounds = needsPassthroughWorkarounds(outputEncoding); + needsPassthroughWorkarounds = isPassthrough && needsPassthroughWorkarounds(outputEncoding); isOutputPcm = Util.isEncodingLinearPcm(outputEncoding); bufferSizeUs = isOutputPcm ? framesToDurationUs(bufferSize / outputPcmFrameSize) : C.TIME_UNSET; lastRawPlaybackHeadPosition = 0; @@ -219,6 +243,16 @@ public void setAudioTrack( forceResetWorkaroundTimeMs = C.TIME_UNSET; lastLatencySampleTimeUs = 0; latencyUs = 0; + audioTrackPlaybackSpeed = 1f; + } + + public void setAudioTrackPlaybackSpeed(float audioTrackPlaybackSpeed) { + this.audioTrackPlaybackSpeed = audioTrackPlaybackSpeed; + // Extrapolation from the last audio timestamp relies on the audio rate being constant, so we + // reset audio timestamp tracking and wait for a new timestamp. + if (audioTimestampPoller != null) { + audioTimestampPoller.reset(); + } } public long getCurrentPositionUs(boolean sourceEnded) { @@ -237,6 +271,8 @@ public long getCurrentPositionUs(boolean sourceEnded) { long timestampPositionFrames = audioTimestampPoller.getTimestampPositionFrames(); long timestampPositionUs = framesToDurationUs(timestampPositionFrames); long elapsedSinceTimestampUs = systemTimeUs - audioTimestampPoller.getTimestampSystemTimeUs(); + elapsedSinceTimestampUs = + Util.getMediaDurationForPlayoutDuration(elapsedSinceTimestampUs, audioTrackPlaybackSpeed); positionUs = timestampPositionUs + elapsedSinceTimestampUs; } else { if (playheadOffsetCount == 0) { @@ -249,7 +285,7 @@ public long getCurrentPositionUs(boolean sourceEnded) { positionUs = systemTimeUs + smoothedPlayheadOffsetUs; } if (!sourceEnded) { - positionUs = Math.max(0, positionUs - latencyUs); + positionUs = max(0, positionUs - latencyUs); } } @@ -262,7 +298,10 @@ public long getCurrentPositionUs(boolean sourceEnded) { if (elapsedSincePreviousModeUs < MODE_SWITCH_SMOOTHING_DURATION_US) { // Use a ramp to smooth between the old mode and the new one to avoid introducing a sudden // jump if the two modes disagree. - long previousModeProjectedPositionUs = previousModePositionUs + elapsedSincePreviousModeUs; + long previousModeProjectedPositionUs = + previousModePositionUs + + Util.getMediaDurationForPlayoutDuration( + elapsedSincePreviousModeUs, audioTrackPlaybackSpeed); // A ramp consisting of 1000 points distributed over MODE_SWITCH_SMOOTHING_DURATION_US. long rampPoint = (elapsedSincePreviousModeUs * 1000) / MODE_SWITCH_SMOOTHING_DURATION_US; positionUs *= rampPoint; @@ -270,9 +309,21 @@ public long getCurrentPositionUs(boolean sourceEnded) { positionUs /= 1000; } + if (!notifiedPositionIncreasing && positionUs > lastPositionUs) { + notifiedPositionIncreasing = true; + long mediaDurationSinceLastPositionUs = Util.usToMs(positionUs - lastPositionUs); + long playoutDurationSinceLastPositionUs = + Util.getPlayoutDurationForMediaDuration( + mediaDurationSinceLastPositionUs, audioTrackPlaybackSpeed); + long playoutStartSystemTimeMs = + System.currentTimeMillis() - Util.usToMs(playoutDurationSinceLastPositionUs); + listener.onPositionAdvancing(playoutStartSystemTimeMs); + } + lastSystemTimeUs = systemTimeUs; lastPositionUs = positionUs; lastSampleUsedGetTimestampMode = useGetTimestampMode; + return positionUs; } @@ -314,8 +365,8 @@ public boolean mayHandleBuffer(long writtenFrames) { boolean hadData = hasData; hasData = hasPendingData(writtenFrames); - if (hadData && !hasData && playState != PLAYSTATE_STOPPED && listener != null) { - listener.onUnderrun(bufferSize, C.usToMs(bufferSizeUs)); + if (hadData && !hasData && playState != PLAYSTATE_STOPPED) { + listener.onUnderrun(bufferSize, Util.usToMs(bufferSizeUs)); } return true; @@ -362,8 +413,7 @@ public void handleEndOfStream(long writtenFrames) { * @return Whether the audio track has any pending data to play out. */ public boolean hasPendingData(long writtenFrames) { - return writtenFrames > getPlaybackHeadPosition() - || forceHasPendingData(); + return writtenFrames > getPlaybackHeadPosition() || forceHasPendingData(); } /** @@ -385,7 +435,7 @@ public boolean pause() { /** * Resets the position tracker. Should be called when the audio track previously passed to {@link - * #setAudioTrack(AudioTrack, int, int, int)} is no longer in use. + * #setAudioTrack(AudioTrack, boolean, int, int, int)} is no longer in use. */ public void reset() { resetSyncParams(); @@ -430,7 +480,7 @@ private void maybePollAndCheckTimestamp(long systemTimeUs, long playbackPosition return; } - // Perform sanity checks on the timestamp and accept/reject it. + // Check the timestamp and accept/reject it. long audioTimestampSystemTimeUs = audioTimestampPoller.getTimestampSystemTimeUs(); long audioTimestampPositionFrames = audioTimestampPoller.getTimestampPositionFrames(); if (Math.abs(audioTimestampSystemTimeUs - systemTimeUs) > MAX_AUDIO_TIMESTAMP_OFFSET_US) { @@ -464,9 +514,9 @@ private void maybeUpdateLatency(long systemTimeUs) { castNonNull((Integer) getLatencyMethod.invoke(Assertions.checkNotNull(audioTrack))) * 1000L - bufferSizeUs; - // Sanity check that the latency is non-negative. - latencyUs = Math.max(latencyUs, 0); - // Sanity check that the latency isn't too large. + // Check that the latency is non-negative. + latencyUs = max(latencyUs, 0); + // Check that the latency isn't too large. if (latencyUs > MAX_LATENCY_US) { listener.onInvalidLatency(latencyUs); latencyUs = 0; @@ -490,6 +540,7 @@ private void resetSyncParams() { lastPlayheadSampleTimeUs = 0; lastSystemTimeUs = 0; previousModeSystemTimeUs = 0; + notifiedPositionIncreasing = false; } /** @@ -530,7 +581,7 @@ private long getPlaybackHeadPosition() { // Simulate the playback head position up to the total number of frames submitted. long elapsedTimeSinceStopUs = (SystemClock.elapsedRealtime() * 1000) - stopTimestampUs; long framesSinceStop = (elapsedTimeSinceStopUs * outputSampleRate) / C.MICROS_PER_SECOND; - return Math.min(endPlaybackHeadPosition, stopPlaybackHeadPosition + framesSinceStop); + return min(endPlaybackHeadPosition, stopPlaybackHeadPosition + framesSinceStop); } int state = audioTrack.getPlayState(); diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AuxEffectInfo.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AuxEffectInfo.java index 968d8acebd..f3ea686210 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AuxEffectInfo.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/AuxEffectInfo.java @@ -50,7 +50,7 @@ public final class AuxEffectInfo { * Creates an instance with the given effect identifier and send level. * * @param effectId The effect identifier. This is the value returned by {@link - * AudioEffect#getId()} on the effect, or {@value NO_AUX_EFFECT_ID} which represents no + * AudioEffect#getId()} on the effect, or {@value #NO_AUX_EFFECT_ID} which represents no * effect. This value is passed to {@link AudioTrack#attachAuxEffect(int)} on the underlying * audio track. * @param sendLevel The send level for the effect, where 0 represents no effect and a value of 1 diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/BaseAudioProcessor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/BaseAudioProcessor.java index 41cb436504..b42d5f0767 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/BaseAudioProcessor.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/BaseAudioProcessor.java @@ -16,6 +16,7 @@ package com.google.android.exoplayer2.audio; import androidx.annotation.CallSuper; +import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.nio.ByteBuffer; import java.nio.ByteOrder; @@ -47,6 +48,7 @@ public BaseAudioProcessor() { } @Override + @CanIgnoreReturnValue public final AudioFormat configure(AudioFormat inputAudioFormat) throws UnhandledAudioFormatException { pendingInputAudioFormat = inputAudioFormat; @@ -101,13 +103,13 @@ public final void reset() { } /** - * Replaces the current output buffer with a buffer of at least {@code count} bytes and returns - * it. Callers should write to the returned buffer then {@link ByteBuffer#flip()} it so it can be - * read via {@link #getOutput()}. + * Replaces the current output buffer with a buffer of at least {@code size} bytes and returns it. + * Callers should write to the returned buffer then {@link ByteBuffer#flip()} it so it can be read + * via {@link #getOutput()}. */ - protected final ByteBuffer replaceOutputBuffer(int count) { - if (buffer.capacity() < count) { - buffer = ByteBuffer.allocateDirect(count).order(ByteOrder.nativeOrder()); + protected final ByteBuffer replaceOutputBuffer(int size) { + if (buffer.capacity() < size) { + buffer = ByteBuffer.allocateDirect(size).order(ByteOrder.nativeOrder()); } else { buffer.clear(); } @@ -121,6 +123,7 @@ protected final boolean hasPendingOutput() { } /** Called when the processor is configured for a new input format. */ + @CanIgnoreReturnValue protected AudioFormat onConfigure(AudioFormat inputAudioFormat) throws UnhandledAudioFormatException { return AudioFormat.NOT_SET; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/ChannelMappingAudioProcessor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/ChannelMappingAudioProcessor.java index 4fb6af1af4..503f07a2c3 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/ChannelMappingAudioProcessor.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/ChannelMappingAudioProcessor.java @@ -17,14 +17,15 @@ import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.util.Assertions; +import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.nio.ByteBuffer; /** * An {@link AudioProcessor} that applies a mapping from input channels onto specified output * channels. This can be used to reorder, duplicate or discard channels. */ -@SuppressWarnings("nullness:initialization.fields.uninitialized") /* package */ final class ChannelMappingAudioProcessor extends BaseAudioProcessor { @Nullable private int[] pendingOutputChannels; @@ -34,15 +35,17 @@ * Resets the channel mapping. After calling this method, call {@link #configure(AudioFormat)} to * start using the new channel map. * + *

      See {@link AudioSink#configure(Format, int, int[])}. + * * @param outputChannels The mapping from input to output channel indices, or {@code null} to * leave the input unchanged. - * @see AudioSink#configure(int, int, int, int, int[], int, int) */ public void setChannelMap(@Nullable int[] outputChannels) { pendingOutputChannels = outputChannels; } @Override + @CanIgnoreReturnValue public AudioFormat onConfigure(AudioFormat inputAudioFormat) throws UnhandledAudioFormatException { @Nullable int[] outputChannels = pendingOutputChannels; @@ -95,5 +98,4 @@ protected void onReset() { outputChannels = null; pendingOutputChannels = null; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/DecoderAudioRenderer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/DecoderAudioRenderer.java new file mode 100644 index 0000000000..cb1d3d5030 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/DecoderAudioRenderer.java @@ -0,0 +1,865 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.audio; + +import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.DISCARD_REASON_DRM_SESSION_CHANGED; +import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.DISCARD_REASON_REUSE_NOT_IMPLEMENTED; +import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.REUSE_RESULT_NO; +import static com.google.android.exoplayer2.source.SampleStream.FLAG_REQUIRE_FORMAT; +import static com.google.common.base.MoreObjects.firstNonNull; +import static java.lang.Math.max; +import static java.lang.annotation.ElementType.TYPE_USE; + +import android.media.AudioDeviceInfo; +import android.os.Handler; +import android.os.SystemClock; +import androidx.annotation.CallSuper; +import androidx.annotation.DoNotInline; +import androidx.annotation.IntDef; +import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; +import com.google.android.exoplayer2.BaseRenderer; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.ExoPlaybackException; +import com.google.android.exoplayer2.ExoPlayer; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.FormatHolder; +import com.google.android.exoplayer2.PlaybackException; +import com.google.android.exoplayer2.PlaybackParameters; +import com.google.android.exoplayer2.PlayerMessage.Target; +import com.google.android.exoplayer2.RendererCapabilities; +import com.google.android.exoplayer2.audio.AudioRendererEventListener.EventDispatcher; +import com.google.android.exoplayer2.audio.AudioSink.SinkFormatSupport; +import com.google.android.exoplayer2.decoder.CryptoConfig; +import com.google.android.exoplayer2.decoder.Decoder; +import com.google.android.exoplayer2.decoder.DecoderCounters; +import com.google.android.exoplayer2.decoder.DecoderException; +import com.google.android.exoplayer2.decoder.DecoderInputBuffer; +import com.google.android.exoplayer2.decoder.DecoderReuseEvaluation; +import com.google.android.exoplayer2.decoder.SimpleDecoderOutputBuffer; +import com.google.android.exoplayer2.drm.DrmSession; +import com.google.android.exoplayer2.drm.DrmSession.DrmSessionException; +import com.google.android.exoplayer2.source.SampleStream.ReadDataResult; +import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.Log; +import com.google.android.exoplayer2.util.MediaClock; +import com.google.android.exoplayer2.util.MimeTypes; +import com.google.android.exoplayer2.util.TraceUtil; +import com.google.android.exoplayer2.util.Util; +import com.google.errorprone.annotations.ForOverride; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; + +/** + * Decodes and renders audio using a {@link Decoder}. + * + *

      This renderer accepts the following messages sent via {@link ExoPlayer#createMessage(Target)} + * on the playback thread: + * + *

        + *
      • Message with type {@link #MSG_SET_VOLUME} to set the volume. The message payload should be + * a {@link Float} with 0 being silence and 1 being unity gain. + *
      • Message with type {@link #MSG_SET_AUDIO_ATTRIBUTES} to set the audio attributes. The + * message payload should be an {@link AudioAttributes} instance that will configure the + * underlying audio track. + *
      • Message with type {@link #MSG_SET_AUX_EFFECT_INFO} to set the auxiliary effect. The message + * payload should be an {@link AuxEffectInfo} instance that will configure the underlying + * audio track. + *
      • Message with type {@link #MSG_SET_SKIP_SILENCE_ENABLED} to enable or disable skipping + * silences. The message payload should be a {@link Boolean}. + *
      • Message with type {@link #MSG_SET_AUDIO_SESSION_ID} to set the audio session ID. The + * message payload should be a session ID {@link Integer} that will be attached to the + * underlying audio track. + *
      + */ +public abstract class DecoderAudioRenderer< + T extends + Decoder< + DecoderInputBuffer, + ? extends SimpleDecoderOutputBuffer, + ? extends DecoderException>> + extends BaseRenderer implements MediaClock { + + private static final String TAG = "DecoderAudioRenderer"; + + @Documented + @Retention(RetentionPolicy.SOURCE) + @java.lang.annotation.Target(TYPE_USE) + @IntDef({ + REINITIALIZATION_STATE_NONE, + REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM, + REINITIALIZATION_STATE_WAIT_END_OF_STREAM + }) + private @interface ReinitializationState {} + /** The decoder does not need to be re-initialized. */ + private static final int REINITIALIZATION_STATE_NONE = 0; + /** + * The input format has changed in a way that requires the decoder to be re-initialized, but we + * haven't yet signaled an end of stream to the existing decoder. We need to do so in order to + * ensure that it outputs any remaining buffers before we release it. + */ + private static final int REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM = 1; + /** + * The input format has changed in a way that requires the decoder to be re-initialized, and we've + * signaled an end of stream to the existing decoder. We're waiting for the decoder to output an + * end of stream signal to indicate that it has output any remaining buffers before we release it. + */ + private static final int REINITIALIZATION_STATE_WAIT_END_OF_STREAM = 2; + /** + * Generally there is zero or one pending output stream offset. We track more offsets to allow for + * pending output streams that have fewer frames than the codec latency. + */ + private static final int MAX_PENDING_OUTPUT_STREAM_OFFSET_COUNT = 10; + + private final EventDispatcher eventDispatcher; + private final AudioSink audioSink; + private final DecoderInputBuffer flagsOnlyBuffer; + + private DecoderCounters decoderCounters; + private Format inputFormat; + private int encoderDelay; + private int encoderPadding; + + private boolean experimentalKeepAudioTrackOnSeek; + private boolean firstStreamSampleRead; + + @Nullable private T decoder; + + @Nullable private DecoderInputBuffer inputBuffer; + @Nullable private SimpleDecoderOutputBuffer outputBuffer; + @Nullable private DrmSession decoderDrmSession; + @Nullable private DrmSession sourceDrmSession; + + private @ReinitializationState int decoderReinitializationState; + private boolean decoderReceivedBuffers; + private boolean audioTrackNeedsConfigure; + + private long currentPositionUs; + private boolean allowFirstBufferPositionDiscontinuity; + private boolean allowPositionDiscontinuity; + private boolean inputStreamEnded; + private boolean outputStreamEnded; + private long outputStreamOffsetUs; + private final long[] pendingOutputStreamOffsetsUs; + private int pendingOutputStreamOffsetCount; + + public DecoderAudioRenderer() { + this(/* eventHandler= */ null, /* eventListener= */ null); + } + + /** + * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be + * null if delivery of events is not required. + * @param eventListener A listener of events. May be null if delivery of events is not required. + * @param audioProcessors Optional {@link AudioProcessor}s that will process audio before output. + */ + public DecoderAudioRenderer( + @Nullable Handler eventHandler, + @Nullable AudioRendererEventListener eventListener, + AudioProcessor... audioProcessors) { + this(eventHandler, eventListener, /* audioCapabilities= */ null, audioProcessors); + } + + /** + * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be + * null if delivery of events is not required. + * @param eventListener A listener of events. May be null if delivery of events is not required. + * @param audioCapabilities The audio capabilities for playback on this device. Use {@link + * AudioCapabilities#DEFAULT_AUDIO_CAPABILITIES} if default capabilities (no encoded audio + * passthrough support) should be assumed. + * @param audioProcessors Optional {@link AudioProcessor}s that will process audio before output. + */ + public DecoderAudioRenderer( + @Nullable Handler eventHandler, + @Nullable AudioRendererEventListener eventListener, + AudioCapabilities audioCapabilities, + AudioProcessor... audioProcessors) { + this( + eventHandler, + eventListener, + new DefaultAudioSink.Builder() + .setAudioCapabilities( // For backward compatibility, null == default. + firstNonNull(audioCapabilities, AudioCapabilities.DEFAULT_AUDIO_CAPABILITIES)) + .setAudioProcessors(audioProcessors) + .build()); + } + + /** + * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be + * null if delivery of events is not required. + * @param eventListener A listener of events. May be null if delivery of events is not required. + * @param audioSink The sink to which audio will be output. + */ + public DecoderAudioRenderer( + @Nullable Handler eventHandler, + @Nullable AudioRendererEventListener eventListener, + AudioSink audioSink) { + super(C.TRACK_TYPE_AUDIO); + eventDispatcher = new EventDispatcher(eventHandler, eventListener); + this.audioSink = audioSink; + audioSink.setListener(new AudioSinkListener()); + flagsOnlyBuffer = DecoderInputBuffer.newNoDataInstance(); + decoderReinitializationState = REINITIALIZATION_STATE_NONE; + audioTrackNeedsConfigure = true; + setOutputStreamOffsetUs(C.TIME_UNSET); + pendingOutputStreamOffsetsUs = new long[MAX_PENDING_OUTPUT_STREAM_OFFSET_COUNT]; + } + + /** + * Sets whether to enable the experimental feature that keeps and flushes the {@link + * android.media.AudioTrack} when a seek occurs, as opposed to releasing and reinitialising. Off + * by default. + * + *

      This method is experimental, and will be renamed or removed in a future release. + * + * @param enableKeepAudioTrackOnSeek Whether to keep the {@link android.media.AudioTrack} on seek. + */ + public void experimentalSetEnableKeepAudioTrackOnSeek(boolean enableKeepAudioTrackOnSeek) { + this.experimentalKeepAudioTrackOnSeek = enableKeepAudioTrackOnSeek; + } + + @Override + @Nullable + public MediaClock getMediaClock() { + return this; + } + + @Override + public final @Capabilities int supportsFormat(Format format) { + if (!MimeTypes.isAudio(format.sampleMimeType)) { + return RendererCapabilities.create(C.FORMAT_UNSUPPORTED_TYPE); + } + @C.FormatSupport int formatSupport = supportsFormatInternal(format); + if (formatSupport <= C.FORMAT_UNSUPPORTED_DRM) { + return RendererCapabilities.create(formatSupport); + } + @TunnelingSupport + int tunnelingSupport = Util.SDK_INT >= 21 ? TUNNELING_SUPPORTED : TUNNELING_NOT_SUPPORTED; + return RendererCapabilities.create(formatSupport, ADAPTIVE_NOT_SEAMLESS, tunnelingSupport); + } + + /** + * Returns the {@link C.FormatSupport} for the given {@link Format}. + * + * @param format The format, which has an audio {@link Format#sampleMimeType}. + * @return The {@link C.FormatSupport} for this {@link Format}. + */ + @ForOverride + protected abstract @C.FormatSupport int supportsFormatInternal(Format format); + + /** + * Returns whether the renderer's {@link AudioSink} supports a given {@link Format}. + * + * @see AudioSink#supportsFormat(Format) + */ + protected final boolean sinkSupportsFormat(Format format) { + return audioSink.supportsFormat(format); + } + + /** + * Returns the level of support that the renderer's {@link AudioSink} provides for a given {@link + * Format}. + * + * @see AudioSink#getFormatSupport(Format) (Format) + */ + protected final @SinkFormatSupport int getSinkFormatSupport(Format format) { + return audioSink.getFormatSupport(format); + } + + @Override + public void render(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException { + if (outputStreamEnded) { + try { + audioSink.playToEndOfStream(); + } catch (AudioSink.WriteException e) { + throw createRendererException( + e, e.format, e.isRecoverable, PlaybackException.ERROR_CODE_AUDIO_TRACK_WRITE_FAILED); + } + return; + } + + // Try and read a format if we don't have one already. + if (inputFormat == null) { + // We don't have a format yet, so try and read one. + FormatHolder formatHolder = getFormatHolder(); + flagsOnlyBuffer.clear(); + @ReadDataResult int result = readSource(formatHolder, flagsOnlyBuffer, FLAG_REQUIRE_FORMAT); + if (result == C.RESULT_FORMAT_READ) { + onInputFormatChanged(formatHolder); + } else if (result == C.RESULT_BUFFER_READ) { + // End of stream read having not read a format. + Assertions.checkState(flagsOnlyBuffer.isEndOfStream()); + inputStreamEnded = true; + try { + processEndOfStream(); + } catch (AudioSink.WriteException e) { + throw createRendererException( + e, /* format= */ null, PlaybackException.ERROR_CODE_AUDIO_TRACK_WRITE_FAILED); + } + return; + } else { + // We still don't have a format and can't make progress without one. + return; + } + } + + // If we don't have a decoder yet, we need to instantiate one. + maybeInitDecoder(); + + if (decoder != null) { + try { + // Rendering loop. + TraceUtil.beginSection("drainAndFeed"); + while (drainOutputBuffer()) {} + while (feedInputBuffer()) {} + TraceUtil.endSection(); + } catch (DecoderException e) { + // Can happen with dequeueOutputBuffer, dequeueInputBuffer, queueInputBuffer + Log.e(TAG, "Audio codec error", e); + eventDispatcher.audioCodecError(e); + throw createRendererException(e, inputFormat, PlaybackException.ERROR_CODE_DECODING_FAILED); + } catch (AudioSink.ConfigurationException e) { + throw createRendererException( + e, e.format, PlaybackException.ERROR_CODE_AUDIO_TRACK_INIT_FAILED); + } catch (AudioSink.InitializationException e) { + throw createRendererException( + e, e.format, e.isRecoverable, PlaybackException.ERROR_CODE_AUDIO_TRACK_INIT_FAILED); + } catch (AudioSink.WriteException e) { + throw createRendererException( + e, e.format, e.isRecoverable, PlaybackException.ERROR_CODE_AUDIO_TRACK_WRITE_FAILED); + } + decoderCounters.ensureUpdated(); + } + } + + /** See {@link AudioSink.Listener#onPositionDiscontinuity()}. */ + @CallSuper + @ForOverride + protected void onPositionDiscontinuity() { + // We are out of sync so allow currentPositionUs to jump backwards. + allowPositionDiscontinuity = true; + } + + /** + * Creates a decoder for the given format. + * + * @param format The format for which a decoder is required. + * @param cryptoConfig The {@link CryptoConfig} object required for decoding encrypted content. + * May be null and can be ignored if decoder does not handle encrypted content. + * @return The decoder. + * @throws DecoderException If an error occurred creating a suitable decoder. + */ + @ForOverride + protected abstract T createDecoder(Format format, @Nullable CryptoConfig cryptoConfig) + throws DecoderException; + + /** + * Returns the format of audio buffers output by the decoder. Will not be called until the first + * output buffer has been dequeued, so the decoder may use input data to determine the format. + * + * @param decoder The decoder. + */ + @ForOverride + protected abstract Format getOutputFormat(T decoder); + + /** + * Evaluates whether the existing decoder can be reused for a new {@link Format}. + * + *

      The default implementation does not allow decoder reuse. + * + * @param decoderName The name of the decoder. + * @param oldFormat The previous format. + * @param newFormat The new format. + * @return The result of the evaluation. + */ + @ForOverride + protected DecoderReuseEvaluation canReuseDecoder( + String decoderName, Format oldFormat, Format newFormat) { + return new DecoderReuseEvaluation( + decoderName, oldFormat, newFormat, REUSE_RESULT_NO, DISCARD_REASON_REUSE_NOT_IMPLEMENTED); + } + + private boolean drainOutputBuffer() + throws ExoPlaybackException, DecoderException, AudioSink.ConfigurationException, + AudioSink.InitializationException, AudioSink.WriteException { + if (outputBuffer == null) { + outputBuffer = decoder.dequeueOutputBuffer(); + if (outputBuffer == null) { + return false; + } + if (outputBuffer.skippedOutputBufferCount > 0) { + decoderCounters.skippedOutputBufferCount += outputBuffer.skippedOutputBufferCount; + audioSink.handleDiscontinuity(); + } + if (outputBuffer.isFirstSample()) { + processFirstSampleOfStream(); + } + } + + if (outputBuffer.isEndOfStream()) { + if (decoderReinitializationState == REINITIALIZATION_STATE_WAIT_END_OF_STREAM) { + // We're waiting to re-initialize the decoder, and have now processed all final buffers. + releaseDecoder(); + maybeInitDecoder(); + // The audio track may need to be recreated once the new output format is known. + audioTrackNeedsConfigure = true; + } else { + outputBuffer.release(); + outputBuffer = null; + try { + processEndOfStream(); + } catch (AudioSink.WriteException e) { + throw createRendererException( + e, e.format, e.isRecoverable, PlaybackException.ERROR_CODE_AUDIO_TRACK_WRITE_FAILED); + } + } + return false; + } + + if (audioTrackNeedsConfigure) { + Format outputFormat = + getOutputFormat(decoder) + .buildUpon() + .setEncoderDelay(encoderDelay) + .setEncoderPadding(encoderPadding) + .build(); + audioSink.configure(outputFormat, /* specifiedBufferSize= */ 0, /* outputChannels= */ null); + audioTrackNeedsConfigure = false; + } + + if (audioSink.handleBuffer( + outputBuffer.data, outputBuffer.timeUs, /* encodedAccessUnitCount= */ 1)) { + decoderCounters.renderedOutputBufferCount++; + outputBuffer.release(); + outputBuffer = null; + return true; + } + + return false; + } + + private void processFirstSampleOfStream() { + audioSink.handleDiscontinuity(); + if (pendingOutputStreamOffsetCount != 0) { + setOutputStreamOffsetUs(pendingOutputStreamOffsetsUs[0]); + pendingOutputStreamOffsetCount--; + System.arraycopy( + pendingOutputStreamOffsetsUs, + /* srcPos= */ 1, + pendingOutputStreamOffsetsUs, + /* destPos= */ 0, + pendingOutputStreamOffsetCount); + } + } + + private void setOutputStreamOffsetUs(long outputStreamOffsetUs) { + this.outputStreamOffsetUs = outputStreamOffsetUs; + if (outputStreamOffsetUs != C.TIME_UNSET) { + audioSink.setOutputStreamOffsetUs(outputStreamOffsetUs); + } + } + + private boolean feedInputBuffer() throws DecoderException, ExoPlaybackException { + if (decoder == null + || decoderReinitializationState == REINITIALIZATION_STATE_WAIT_END_OF_STREAM + || inputStreamEnded) { + // We need to reinitialize the decoder or the input stream has ended. + return false; + } + + if (inputBuffer == null) { + inputBuffer = decoder.dequeueInputBuffer(); + if (inputBuffer == null) { + return false; + } + } + + if (decoderReinitializationState == REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM) { + inputBuffer.setFlags(C.BUFFER_FLAG_END_OF_STREAM); + decoder.queueInputBuffer(inputBuffer); + inputBuffer = null; + decoderReinitializationState = REINITIALIZATION_STATE_WAIT_END_OF_STREAM; + return false; + } + + FormatHolder formatHolder = getFormatHolder(); + switch (readSource(formatHolder, inputBuffer, /* readFlags= */ 0)) { + case C.RESULT_NOTHING_READ: + return false; + case C.RESULT_FORMAT_READ: + onInputFormatChanged(formatHolder); + return true; + case C.RESULT_BUFFER_READ: + if (inputBuffer.isEndOfStream()) { + inputStreamEnded = true; + decoder.queueInputBuffer(inputBuffer); + inputBuffer = null; + return false; + } + if (!firstStreamSampleRead) { + firstStreamSampleRead = true; + inputBuffer.addFlag(C.BUFFER_FLAG_FIRST_SAMPLE); + } + inputBuffer.flip(); + inputBuffer.format = inputFormat; + onQueueInputBuffer(inputBuffer); + decoder.queueInputBuffer(inputBuffer); + decoderReceivedBuffers = true; + decoderCounters.queuedInputBufferCount++; + inputBuffer = null; + return true; + default: + throw new IllegalStateException(); + } + } + + private void processEndOfStream() throws AudioSink.WriteException { + outputStreamEnded = true; + audioSink.playToEndOfStream(); + } + + private void flushDecoder() throws ExoPlaybackException { + if (decoderReinitializationState != REINITIALIZATION_STATE_NONE) { + releaseDecoder(); + maybeInitDecoder(); + } else { + inputBuffer = null; + if (outputBuffer != null) { + outputBuffer.release(); + outputBuffer = null; + } + decoder.flush(); + decoderReceivedBuffers = false; + } + } + + @Override + public boolean isEnded() { + return outputStreamEnded && audioSink.isEnded(); + } + + @Override + public boolean isReady() { + return audioSink.hasPendingData() + || (inputFormat != null && (isSourceReady() || outputBuffer != null)); + } + + @Override + public long getPositionUs() { + if (getState() == STATE_STARTED) { + updateCurrentPosition(); + } + return currentPositionUs; + } + + @Override + public void setPlaybackParameters(PlaybackParameters playbackParameters) { + audioSink.setPlaybackParameters(playbackParameters); + } + + @Override + public PlaybackParameters getPlaybackParameters() { + return audioSink.getPlaybackParameters(); + } + + @Override + protected void onEnabled(boolean joining, boolean mayRenderStartOfStream) + throws ExoPlaybackException { + decoderCounters = new DecoderCounters(); + eventDispatcher.enabled(decoderCounters); + if (getConfiguration().tunneling) { + audioSink.enableTunnelingV21(); + } else { + audioSink.disableTunneling(); + } + audioSink.setPlayerId(getPlayerId()); + } + + @Override + protected void onPositionReset(long positionUs, boolean joining) throws ExoPlaybackException { + if (experimentalKeepAudioTrackOnSeek) { + audioSink.experimentalFlushWithoutAudioTrackRelease(); + } else { + audioSink.flush(); + } + + currentPositionUs = positionUs; + allowFirstBufferPositionDiscontinuity = true; + allowPositionDiscontinuity = true; + inputStreamEnded = false; + outputStreamEnded = false; + if (decoder != null) { + flushDecoder(); + } + } + + @Override + protected void onStarted() { + audioSink.play(); + } + + @Override + protected void onStopped() { + updateCurrentPosition(); + audioSink.pause(); + } + + @Override + protected void onDisabled() { + inputFormat = null; + audioTrackNeedsConfigure = true; + setOutputStreamOffsetUs(C.TIME_UNSET); + try { + setSourceDrmSession(null); + releaseDecoder(); + audioSink.reset(); + } finally { + eventDispatcher.disabled(decoderCounters); + } + } + + @Override + protected void onStreamChanged(Format[] formats, long startPositionUs, long offsetUs) + throws ExoPlaybackException { + super.onStreamChanged(formats, startPositionUs, offsetUs); + firstStreamSampleRead = false; + if (outputStreamOffsetUs == C.TIME_UNSET) { + setOutputStreamOffsetUs(offsetUs); + } else { + if (pendingOutputStreamOffsetCount == pendingOutputStreamOffsetsUs.length) { + Log.w( + TAG, + "Too many stream changes, so dropping offset: " + + pendingOutputStreamOffsetsUs[pendingOutputStreamOffsetCount - 1]); + } else { + pendingOutputStreamOffsetCount++; + } + pendingOutputStreamOffsetsUs[pendingOutputStreamOffsetCount - 1] = offsetUs; + } + } + + @Override + public void handleMessage(@MessageType int messageType, @Nullable Object message) + throws ExoPlaybackException { + switch (messageType) { + case MSG_SET_VOLUME: + audioSink.setVolume((Float) message); + break; + case MSG_SET_AUDIO_ATTRIBUTES: + AudioAttributes audioAttributes = (AudioAttributes) message; + audioSink.setAudioAttributes(audioAttributes); + break; + case MSG_SET_AUX_EFFECT_INFO: + AuxEffectInfo auxEffectInfo = (AuxEffectInfo) message; + audioSink.setAuxEffectInfo(auxEffectInfo); + break; + case MSG_SET_SKIP_SILENCE_ENABLED: + audioSink.setSkipSilenceEnabled((Boolean) message); + break; + case MSG_SET_AUDIO_SESSION_ID: + audioSink.setAudioSessionId((Integer) message); + break; + case MSG_SET_PREFERRED_AUDIO_DEVICE: + if (Util.SDK_INT >= 23) { + Api23.setAudioSinkPreferredDevice(audioSink, message); + } + break; + case MSG_SET_CAMERA_MOTION_LISTENER: + case MSG_SET_CHANGE_FRAME_RATE_STRATEGY: + case MSG_SET_SCALING_MODE: + case MSG_SET_VIDEO_FRAME_METADATA_LISTENER: + case MSG_SET_VIDEO_OUTPUT: + case MSG_SET_WAKEUP_LISTENER: + default: + super.handleMessage(messageType, message); + break; + } + } + + private void maybeInitDecoder() throws ExoPlaybackException { + if (decoder != null) { + return; + } + + setDecoderDrmSession(sourceDrmSession); + + CryptoConfig cryptoConfig = null; + if (decoderDrmSession != null) { + cryptoConfig = decoderDrmSession.getCryptoConfig(); + if (cryptoConfig == null) { + DrmSessionException drmError = decoderDrmSession.getError(); + if (drmError != null) { + // Continue for now. We may be able to avoid failure if a new input format causes the + // session to be replaced without it having been used. + } else { + // The drm session isn't open yet. + return; + } + } + } + + try { + long codecInitializingTimestamp = SystemClock.elapsedRealtime(); + TraceUtil.beginSection("createAudioDecoder"); + decoder = createDecoder(inputFormat, cryptoConfig); + TraceUtil.endSection(); + long codecInitializedTimestamp = SystemClock.elapsedRealtime(); + eventDispatcher.decoderInitialized( + decoder.getName(), + codecInitializedTimestamp, + codecInitializedTimestamp - codecInitializingTimestamp); + decoderCounters.decoderInitCount++; + } catch (DecoderException e) { + Log.e(TAG, "Audio codec error", e); + eventDispatcher.audioCodecError(e); + throw createRendererException( + e, inputFormat, PlaybackException.ERROR_CODE_DECODER_INIT_FAILED); + } catch (OutOfMemoryError e) { + throw createRendererException( + e, inputFormat, PlaybackException.ERROR_CODE_DECODER_INIT_FAILED); + } + } + + private void releaseDecoder() { + inputBuffer = null; + outputBuffer = null; + decoderReinitializationState = REINITIALIZATION_STATE_NONE; + decoderReceivedBuffers = false; + if (decoder != null) { + decoderCounters.decoderReleaseCount++; + decoder.release(); + eventDispatcher.decoderReleased(decoder.getName()); + decoder = null; + } + setDecoderDrmSession(null); + } + + private void setSourceDrmSession(@Nullable DrmSession session) { + DrmSession.replaceSession(sourceDrmSession, session); + sourceDrmSession = session; + } + + private void setDecoderDrmSession(@Nullable DrmSession session) { + DrmSession.replaceSession(decoderDrmSession, session); + decoderDrmSession = session; + } + + private void onInputFormatChanged(FormatHolder formatHolder) throws ExoPlaybackException { + Format newFormat = Assertions.checkNotNull(formatHolder.format); + setSourceDrmSession(formatHolder.drmSession); + Format oldFormat = inputFormat; + inputFormat = newFormat; + encoderDelay = newFormat.encoderDelay; + encoderPadding = newFormat.encoderPadding; + + if (decoder == null) { + maybeInitDecoder(); + eventDispatcher.inputFormatChanged(inputFormat, /* decoderReuseEvaluation= */ null); + return; + } + + DecoderReuseEvaluation evaluation; + if (sourceDrmSession != decoderDrmSession) { + evaluation = + new DecoderReuseEvaluation( + decoder.getName(), + oldFormat, + newFormat, + REUSE_RESULT_NO, + DISCARD_REASON_DRM_SESSION_CHANGED); + } else { + evaluation = canReuseDecoder(decoder.getName(), oldFormat, newFormat); + } + + if (evaluation.result == REUSE_RESULT_NO) { + if (decoderReceivedBuffers) { + // Signal end of stream and wait for any final output buffers before re-initialization. + decoderReinitializationState = REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM; + } else { + // There aren't any final output buffers, so release the decoder immediately. + releaseDecoder(); + maybeInitDecoder(); + audioTrackNeedsConfigure = true; + } + } + eventDispatcher.inputFormatChanged(inputFormat, evaluation); + } + + protected void onQueueInputBuffer(DecoderInputBuffer buffer) { + if (allowFirstBufferPositionDiscontinuity && !buffer.isDecodeOnly()) { + // TODO: Remove this hack once we have a proper fix for [Internal: b/71876314]. + // Allow the position to jump if the first presentable input buffer has a timestamp that + // differs significantly from what was expected. + if (Math.abs(buffer.timeUs - currentPositionUs) > 500000) { + currentPositionUs = buffer.timeUs; + } + allowFirstBufferPositionDiscontinuity = false; + } + } + + private void updateCurrentPosition() { + long newCurrentPositionUs = audioSink.getCurrentPositionUs(isEnded()); + if (newCurrentPositionUs != AudioSink.CURRENT_POSITION_NOT_SET) { + currentPositionUs = + allowPositionDiscontinuity + ? newCurrentPositionUs + : max(currentPositionUs, newCurrentPositionUs); + allowPositionDiscontinuity = false; + } + } + + private final class AudioSinkListener implements AudioSink.Listener { + + @Override + public void onPositionDiscontinuity() { + DecoderAudioRenderer.this.onPositionDiscontinuity(); + } + + @Override + public void onPositionAdvancing(long playoutStartSystemTimeMs) { + eventDispatcher.positionAdvancing(playoutStartSystemTimeMs); + } + + @Override + public void onUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) { + eventDispatcher.underrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs); + } + + @Override + public void onSkipSilenceEnabledChanged(boolean skipSilenceEnabled) { + eventDispatcher.skipSilenceEnabledChanged(skipSilenceEnabled); + } + + @Override + public void onAudioSinkError(Exception audioSinkError) { + Log.e(TAG, "Audio sink error", audioSinkError); + eventDispatcher.audioSinkError(audioSinkError); + } + } + + @RequiresApi(23) + private static final class Api23 { + private Api23() {} + + @DoNotInline + public static void setAudioSinkPreferredDevice( + AudioSink audioSink, @Nullable Object messagePayload) { + @Nullable AudioDeviceInfo audioDeviceInfo = (AudioDeviceInfo) messagePayload; + audioSink.setPreferredDevice(audioDeviceInfo); + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/DefaultAudioSink.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/DefaultAudioSink.java index 32a819bf81..8016384f69 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/DefaultAudioSink.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/DefaultAudioSink.java @@ -15,43 +15,75 @@ */ package com.google.android.exoplayer2.audio; +import static com.google.android.exoplayer2.audio.AudioCapabilities.DEFAULT_AUDIO_CAPABILITIES; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Util.constrainValue; +import static com.google.common.base.MoreObjects.firstNonNull; +import static java.lang.Math.max; +import static java.lang.Math.min; +import static java.lang.annotation.ElementType.TYPE_USE; + import android.annotation.SuppressLint; -import android.annotation.TargetApi; +import android.media.AudioDeviceInfo; import android.media.AudioFormat; import android.media.AudioManager; import android.media.AudioTrack; -import android.os.ConditionVariable; +import android.media.PlaybackParams; +import android.media.metrics.LogSessionId; +import android.os.Handler; +import android.os.Looper; import android.os.SystemClock; +import android.util.Pair; +import androidx.annotation.DoNotInline; +import androidx.annotation.GuardedBy; import androidx.annotation.IntDef; import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.ExoPlayer.AudioOffloadListener; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.PlaybackParameters; +import com.google.android.exoplayer2.analytics.PlayerId; import com.google.android.exoplayer2.audio.AudioProcessor.UnhandledAudioFormatException; -import com.google.android.exoplayer2.extractor.MpegAudioHeader; import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.Clock; +import com.google.android.exoplayer2.util.ConditionVariable; import com.google.android.exoplayer2.util.Log; +import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.Util; +import com.google.errorprone.annotations.CanIgnoreReturnValue; +import com.google.errorprone.annotations.InlineMe; +import com.google.errorprone.annotations.InlineMeValidationDisabled; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Collections; +import java.util.concurrent.ExecutorService; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; +import org.checkerframework.checker.nullness.qual.RequiresNonNull; /** * Plays audio data. The implementation delegates to an {@link AudioTrack} and handles playback * position smoothing, non-blocking writes and reconfiguration. - *

      - * If tunneling mode is enabled, care must be taken that audio processors do not output buffers with - * a different duration than their input, and buffer processors must produce output corresponding to - * their last input immediately after that input is queued. This means that, for example, speed - * adjustment is not possible while using tunneling. + * + *

      If tunneling mode is enabled, care must be taken that audio processors do not output buffers + * with a different duration than their input, and buffer processors must produce output + * corresponding to their last input immediately after that input is queued. This means that, for + * example, speed adjustment is not possible while using tunneling. */ public final class DefaultAudioSink implements AudioSink { + /** + * If an attempt to instantiate an AudioTrack with a buffer size larger than this value fails, a + * second attempt is made using this buffer size. + */ + private static final int AUDIO_TRACK_SMALLER_BUFFER_RETRY_SIZE = 1_000_000; + /** * Thrown when the audio track has provided a spurious timestamp, if {@link * #failOnSpuriousAudioTimestamp} is set. @@ -66,52 +98,20 @@ public static final class InvalidAudioTrackTimestampException extends RuntimeExc private InvalidAudioTrackTimestampException(String message) { super(message); } - } /** - * Provides a chain of audio processors, which are used for any user-defined processing and - * applying playback parameters (if supported). Because applying playback parameters can skip and - * stretch/compress audio, the sink will query the chain for information on how to transform its - * output position to map it onto a media position, via {@link #getMediaDuration(long)} and {@link - * #getSkippedOutputFrameCount()}. + * @deprecated Use {@link com.google.android.exoplayer2.audio.AudioProcessorChain}. */ - public interface AudioProcessorChain { - - /** - * Returns the fixed chain of audio processors that will process audio. This method is called - * once during initialization, but audio processors may change state to become active/inactive - * during playback. - */ - AudioProcessor[] getAudioProcessors(); - - /** - * Configures audio processors to apply the specified playback parameters immediately, returning - * the new parameters, which may differ from those passed in. Only called when processors have - * no input pending. - * - * @param playbackParameters The playback parameters to try to apply. - * @return The playback parameters that were actually applied. - */ - PlaybackParameters applyPlaybackParameters(PlaybackParameters playbackParameters); - - /** - * Scales the specified playout duration to take into account speedup due to audio processing, - * returning an input media duration, in arbitrary units. - */ - long getMediaDuration(long playoutDuration); - - /** - * Returns the number of output audio frames skipped since the audio processors were last - * flushed. - */ - long getSkippedOutputFrameCount(); - } + @Deprecated + public interface AudioProcessorChain + extends com.google.android.exoplayer2.audio.AudioProcessorChain {} /** * The default audio processor chain, which applies a (possibly empty) chain of user-defined audio * processors followed by {@link SilenceSkippingAudioProcessor} and {@link SonicAudioProcessor}. */ + @SuppressWarnings("deprecation") public static class DefaultAudioProcessorChain implements AudioProcessorChain { private final AudioProcessor[] audioProcessors; @@ -156,16 +156,20 @@ public AudioProcessor[] getAudioProcessors() { @Override public PlaybackParameters applyPlaybackParameters(PlaybackParameters playbackParameters) { - silenceSkippingAudioProcessor.setEnabled(playbackParameters.skipSilence); - return new PlaybackParameters( - sonicAudioProcessor.setSpeed(playbackParameters.speed), - sonicAudioProcessor.setPitch(playbackParameters.pitch), - playbackParameters.skipSilence); + sonicAudioProcessor.setSpeed(playbackParameters.speed); + sonicAudioProcessor.setPitch(playbackParameters.pitch); + return playbackParameters; + } + + @Override + public boolean applySkipSilenceEnabled(boolean skipSilenceEnabled) { + silenceSkippingAudioProcessor.setEnabled(skipSilenceEnabled); + return skipSilenceEnabled; } @Override public long getMediaDuration(long playoutDuration) { - return sonicAudioProcessor.scaleDurationForSpeedup(playoutDuration); + return sonicAudioProcessor.getMediaDuration(playoutDuration); } @Override @@ -174,81 +178,286 @@ public long getSkippedOutputFrameCount() { } } - /** - * A minimum length for the {@link AudioTrack} buffer, in microseconds. - */ - private static final long MIN_BUFFER_DURATION_US = 250000; - /** - * A maximum length for the {@link AudioTrack} buffer, in microseconds. - */ - private static final long MAX_BUFFER_DURATION_US = 750000; - /** - * The length for passthrough {@link AudioTrack} buffers, in microseconds. - */ - private static final long PASSTHROUGH_BUFFER_DURATION_US = 250000; - /** - * A multiplication factor to apply to the minimum buffer size requested by the underlying - * {@link AudioTrack}. - */ - private static final int BUFFER_MULTIPLICATION_FACTOR = 4; + /** Provides the buffer size to use when creating an {@link AudioTrack}. */ + public interface AudioTrackBufferSizeProvider { + /** Default instance. */ + AudioTrackBufferSizeProvider DEFAULT = + new DefaultAudioTrackBufferSizeProvider.Builder().build(); + /** + * Returns the buffer size to use when creating an {@link AudioTrack} for a specific format and + * output mode. + * + * @param minBufferSizeInBytes The minimum buffer size in bytes required to play this format. + * See {@link AudioTrack#getMinBufferSize}. + * @param encoding The {@link C.Encoding} of the format. + * @param outputMode How the audio will be played. One of the {@link OutputMode output modes}. + * @param pcmFrameSize The size of the PCM frames if the {@code encoding} is PCM, 1 otherwise, + * in bytes. + * @param sampleRate The sample rate of the format, in Hz. + * @param bitrate The bitrate of the audio stream if the stream is compressed, or {@link + * Format#NO_VALUE} if {@code encoding} is PCM or the bitrate is not known. + * @param maxAudioTrackPlaybackSpeed The maximum speed the content will be played using {@link + * AudioTrack#setPlaybackParams}. 0.5 is 2x slow motion, 1 is real time, 2 is 2x fast + * forward, etc. This will be {@code 1} unless {@link + * Builder#setEnableAudioTrackPlaybackParams} is enabled. + * @return The computed buffer size in bytes. It should always be {@code >= + * minBufferSizeInBytes}. The computed buffer size must contain an integer number of frames: + * {@code bufferSizeInBytes % pcmFrameSize == 0}. + */ + int getBufferSizeInBytes( + int minBufferSizeInBytes, + @C.Encoding int encoding, + @OutputMode int outputMode, + int pcmFrameSize, + int sampleRate, + int bitrate, + double maxAudioTrackPlaybackSpeed); + } - /** To avoid underruns on some devices (e.g., Broadcom 7271), scale up the AC3 buffer duration. */ - private static final int AC3_BUFFER_MULTIPLICATION_FACTOR = 2; + /** A builder to create {@link DefaultAudioSink} instances. */ + public static final class Builder { + private AudioCapabilities audioCapabilities; + @Nullable private com.google.android.exoplayer2.audio.AudioProcessorChain audioProcessorChain; + private boolean enableFloatOutput; + private boolean enableAudioTrackPlaybackParams; + private int offloadMode; + AudioTrackBufferSizeProvider audioTrackBufferSizeProvider; + @Nullable AudioOffloadListener audioOffloadListener; + + /** Creates a new builder. */ + public Builder() { + audioCapabilities = DEFAULT_AUDIO_CAPABILITIES; + offloadMode = OFFLOAD_MODE_DISABLED; + audioTrackBufferSizeProvider = AudioTrackBufferSizeProvider.DEFAULT; + } + + /** + * Sets audio capabilities for playback on this device. May be {@code null} if the default + * capabilities (no encoded audio passthrough support) should be assumed. + * + *

      Default is {@link AudioCapabilities#DEFAULT_AUDIO_CAPABILITIES}. + */ + @CanIgnoreReturnValue + public Builder setAudioCapabilities(AudioCapabilities audioCapabilities) { + checkNotNull(audioCapabilities); + this.audioCapabilities = audioCapabilities; + return this; + } + + /** + * Sets an array of {@link AudioProcessor AudioProcessors}s that will process PCM audio before + * output. May be empty. Equivalent of {@code setAudioProcessorChain(new + * DefaultAudioProcessorChain(audioProcessors)}. + * + *

      The default value is an empty array. + */ + @CanIgnoreReturnValue + public Builder setAudioProcessors(AudioProcessor[] audioProcessors) { + checkNotNull(audioProcessors); + return setAudioProcessorChain(new DefaultAudioProcessorChain(audioProcessors)); + } + + /** + * Sets the {@link com.google.android.exoplayer2.audio.AudioProcessorChain} to process audio + * before playback. The instance passed in must not be reused in other sinks. Processing chains + * are only supported for PCM playback (not passthrough or offload). + * + *

      By default, no processing will be applied. + */ + @CanIgnoreReturnValue + public Builder setAudioProcessorChain( + com.google.android.exoplayer2.audio.AudioProcessorChain audioProcessorChain) { + checkNotNull(audioProcessorChain); + this.audioProcessorChain = audioProcessorChain; + return this; + } + + /** + * Sets whether to enable 32-bit float output or integer output. Where possible, 32-bit float + * output will be used if the input is 32-bit float, and also if the input is high resolution + * (24-bit or 32-bit) integer PCM. Float output is supported from API level 21. Audio processing + * (for example, speed adjustment) will not be available when float output is in use. + * + *

      The default value is {@code false}. + */ + @CanIgnoreReturnValue + public Builder setEnableFloatOutput(boolean enableFloatOutput) { + this.enableFloatOutput = enableFloatOutput; + return this; + } + + /** + * Sets whether to control the playback speed using the platform implementation (see {@link + * AudioTrack#setPlaybackParams(PlaybackParams)}), if supported. If set to {@code false}, speed + * up/down of the audio will be done by ExoPlayer (see {@link SonicAudioProcessor}). Platform + * speed adjustment is lower latency, but less reliable. + * + *

      The default value is {@code false}. + */ + @CanIgnoreReturnValue + public Builder setEnableAudioTrackPlaybackParams(boolean enableAudioTrackPlaybackParams) { + this.enableAudioTrackPlaybackParams = enableAudioTrackPlaybackParams; + return this; + } + + /** + * Sets the offload mode. If an audio format can be both played with offload and encoded audio + * passthrough, it will be played in offload. Audio offload is supported from API level 29. Most + * Android devices can only support one offload {@link AudioTrack} at a time and can invalidate + * it at any time. Thus an app can never be guaranteed that it will be able to play in offload. + * Audio processing (for example, speed adjustment) will not be available when offload is in + * use. + * + *

      The default value is {@link #OFFLOAD_MODE_DISABLED}. + */ + @CanIgnoreReturnValue + public Builder setOffloadMode(@OffloadMode int offloadMode) { + this.offloadMode = offloadMode; + return this; + } + + /** + * Sets an {@link AudioTrackBufferSizeProvider} to compute the buffer size when {@link + * #configure} is called with {@code specifiedBufferSize == 0}. + * + *

      The default value is {@link AudioTrackBufferSizeProvider#DEFAULT}. + */ + @CanIgnoreReturnValue + public Builder setAudioTrackBufferSizeProvider( + AudioTrackBufferSizeProvider audioTrackBufferSizeProvider) { + this.audioTrackBufferSizeProvider = audioTrackBufferSizeProvider; + return this; + } + + /** + * Sets an optional {@link AudioOffloadListener} to receive events relevant to offloaded + * playback. + * + *

      The default value is null. + */ + @CanIgnoreReturnValue + public Builder setExperimentalAudioOffloadListener( + @Nullable AudioOffloadListener audioOffloadListener) { + this.audioOffloadListener = audioOffloadListener; + return this; + } + + /** Builds the {@link DefaultAudioSink}. Must only be called once per Builder instance. */ + public DefaultAudioSink build() { + if (audioProcessorChain == null) { + audioProcessorChain = new DefaultAudioProcessorChain(); + } + return new DefaultAudioSink(this); + } + } + + /** The default playback speed. */ + public static final float DEFAULT_PLAYBACK_SPEED = 1f; + /** The minimum allowed playback speed. Lower values will be constrained to fall in range. */ + public static final float MIN_PLAYBACK_SPEED = 0.1f; + /** The maximum allowed playback speed. Higher values will be constrained to fall in range. */ + public static final float MAX_PLAYBACK_SPEED = 8f; + /** The minimum allowed pitch factor. Lower values will be constrained to fall in range. */ + public static final float MIN_PITCH = 0.1f; + /** The maximum allowed pitch factor. Higher values will be constrained to fall in range. */ + public static final float MAX_PITCH = 8f; + + /** The default skip silence flag. */ + private static final boolean DEFAULT_SKIP_SILENCE = false; + + /** Audio offload mode configuration. */ + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef({ + OFFLOAD_MODE_DISABLED, + OFFLOAD_MODE_ENABLED_GAPLESS_REQUIRED, + OFFLOAD_MODE_ENABLED_GAPLESS_NOT_REQUIRED, + OFFLOAD_MODE_ENABLED_GAPLESS_DISABLED + }) + public @interface OffloadMode {} + + /** The audio sink will never play in offload mode. */ + public static final int OFFLOAD_MODE_DISABLED = 0; /** - * @see AudioTrack#ERROR_BAD_VALUE - */ - private static final int ERROR_BAD_VALUE = AudioTrack.ERROR_BAD_VALUE; - /** - * @see AudioTrack#MODE_STATIC - */ - private static final int MODE_STATIC = AudioTrack.MODE_STATIC; - /** - * @see AudioTrack#MODE_STREAM + * The audio sink will prefer offload playback except if the track is gapless and the device does + * not advertise support for gapless playback in offload. + * + *

      Use this option to prioritize seamless transitions between tracks of the same album to power + * savings. */ - private static final int MODE_STREAM = AudioTrack.MODE_STREAM; + public static final int OFFLOAD_MODE_ENABLED_GAPLESS_REQUIRED = 1; /** - * @see AudioTrack#STATE_INITIALIZED + * The audio sink will prefer offload playback even if this might result in silence gaps between + * tracks. + * + *

      Use this option to prioritize battery saving at the cost of a possible non seamless + * transitions between tracks of the same album. */ - private static final int STATE_INITIALIZED = AudioTrack.STATE_INITIALIZED; + public static final int OFFLOAD_MODE_ENABLED_GAPLESS_NOT_REQUIRED = 2; /** - * @see AudioTrack#WRITE_NON_BLOCKING + * The audio sink will prefer offload playback, disabling gapless offload support. + * + *

      Use this option if gapless has undesirable side effects. For example if it introduces + * hardware issues. */ - @SuppressLint("InlinedApi") - private static final int WRITE_NON_BLOCKING = AudioTrack.WRITE_NON_BLOCKING; - - private static final String TAG = "AudioTrack"; + public static final int OFFLOAD_MODE_ENABLED_GAPLESS_DISABLED = 3; - /** Represents states of the {@link #startMediaTimeUs} value. */ + /** Output mode of the audio sink. */ @Documented @Retention(RetentionPolicy.SOURCE) - @IntDef({START_NOT_SET, START_IN_SYNC, START_NEED_SYNC}) - private @interface StartMediaTimeState {} + @Target(TYPE_USE) + @IntDef({OUTPUT_MODE_PCM, OUTPUT_MODE_OFFLOAD, OUTPUT_MODE_PASSTHROUGH}) + public @interface OutputMode {} + + /** The audio sink plays PCM audio. */ + public static final int OUTPUT_MODE_PCM = 0; + /** The audio sink plays encoded audio in offload. */ + public static final int OUTPUT_MODE_OFFLOAD = 1; + /** The audio sink plays encoded audio in passthrough. */ + public static final int OUTPUT_MODE_PASSTHROUGH = 2; - private static final int START_NOT_SET = 0; - private static final int START_IN_SYNC = 1; - private static final int START_NEED_SYNC = 2; + /** + * Native error code equivalent of {@link AudioTrack#ERROR_DEAD_OBJECT} to workaround missing + * error code translation on some devices. + * + *

      On some devices, AudioTrack native error codes are not always converted to their SDK + * equivalent. + * + *

      For example: {@link AudioTrack#write(byte[], int, int)} can return -32 instead of {@link + * AudioTrack#ERROR_DEAD_OBJECT}. + */ + private static final int ERROR_NATIVE_DEAD_OBJECT = -32; /** - * Whether to enable a workaround for an issue where an audio effect does not keep its session - * active across releasing/initializing a new audio track, on platform builds where - * {@link Util#SDK_INT} < 21. - *

      - * The flag must be set before creating a player. + * The duration for which failed attempts to initialize or write to the audio track may be retried + * before throwing an exception, in milliseconds. */ - public static boolean enablePreV21AudioSessionWorkaround = false; + private static final int AUDIO_TRACK_RETRY_DURATION_MS = 100; + + private static final String TAG = "DefaultAudioSink"; /** * Whether to throw an {@link InvalidAudioTrackTimestampException} when a spurious timestamp is * reported from {@link AudioTrack#getTimestamp}. - *

      - * The flag must be set before creating a player. Should be set to {@code true} for testing and + * + *

      The flag must be set before creating a player. Should be set to {@code true} for testing and * debugging purposes only. */ public static boolean failOnSpuriousAudioTimestamp = false; - @Nullable private final AudioCapabilities audioCapabilities; - private final AudioProcessorChain audioProcessorChain; + private static final Object releaseExecutorLock = new Object(); + + @GuardedBy("releaseExecutorLock") + @Nullable + private static ExecutorService releaseExecutor; + + @GuardedBy("releaseExecutorLock") + private static int pendingReleaseCount; + + private final AudioCapabilities audioCapabilities; + private final com.google.android.exoplayer2.audio.AudioProcessorChain audioProcessorChain; private final boolean enableFloatOutput; private final ChannelMappingAudioProcessor channelMappingAudioProcessor; private final TrimmingAudioProcessor trimmingAudioProcessor; @@ -256,21 +465,26 @@ public long getSkippedOutputFrameCount() { private final AudioProcessor[] toFloatPcmAvailableAudioProcessors; private final ConditionVariable releasingConditionVariable; private final AudioTrackPositionTracker audioTrackPositionTracker; - private final ArrayDeque playbackParametersCheckpoints; - + private final ArrayDeque mediaPositionParametersCheckpoints; + private final boolean enableAudioTrackPlaybackParams; + private final @OffloadMode int offloadMode; + private @MonotonicNonNull StreamEventCallbackV29 offloadStreamEventCallbackV29; + private final PendingExceptionHolder + initializationExceptionPendingExceptionHolder; + private final PendingExceptionHolder writeExceptionPendingExceptionHolder; + private final AudioTrackBufferSizeProvider audioTrackBufferSizeProvider; + @Nullable private final AudioOffloadListener audioOffloadListener; + + @Nullable private PlayerId playerId; @Nullable private Listener listener; - /** Used to keep the audio session active on pre-V21 builds (see {@link #initialize(long)}). */ - @Nullable private AudioTrack keepSessionIdAudioTrack; - @Nullable private Configuration pendingConfiguration; - private Configuration configuration; - private AudioTrack audioTrack; + private @MonotonicNonNull Configuration configuration; + @Nullable private AudioTrack audioTrack; private AudioAttributes audioAttributes; - @Nullable private PlaybackParameters afterDrainPlaybackParameters; - private PlaybackParameters playbackParameters; - private long playbackParametersOffsetUs; - private long playbackParametersPositionUs; + @Nullable private MediaPositionParameters afterDrainParameters; + private MediaPositionParameters mediaPositionParameters; + private PlaybackParameters audioTrackPlaybackParameters; @Nullable private ByteBuffer avSyncHeader; private int bytesUntilNextAvSync; @@ -280,79 +494,116 @@ public long getSkippedOutputFrameCount() { private long writtenPcmBytes; private long writtenEncodedFrames; private int framesPerEncodedSample; - private @StartMediaTimeState int startMediaTimeState; + private boolean startMediaTimeUsNeedsSync; + private boolean startMediaTimeUsNeedsInit; private long startMediaTimeUs; private float volume; private AudioProcessor[] activeAudioProcessors; private ByteBuffer[] outputBuffers; @Nullable private ByteBuffer inputBuffer; + private int inputBufferAccessUnitCount; @Nullable private ByteBuffer outputBuffer; - private byte[] preV21OutputBuffer; + private @MonotonicNonNull byte[] preV21OutputBuffer; private int preV21OutputBufferOffset; private int drainingAudioProcessorIndex; private boolean handledEndOfStream; private boolean stoppedAudioTrack; private boolean playing; + private boolean externalAudioSessionIdProvided; private int audioSessionId; private AuxEffectInfo auxEffectInfo; + @Nullable private AudioDeviceInfoApi23 preferredDevice; private boolean tunneling; private long lastFeedElapsedRealtimeMs; + private boolean offloadDisabledUntilNextConfiguration; + private boolean isWaitingForOffloadEndOfStreamHandled; /** - * Creates a new default audio sink. - * - * @param audioCapabilities The audio capabilities for playback on this device. May be null if the - * default capabilities (no encoded audio passthrough support) should be assumed. - * @param audioProcessors An array of {@link AudioProcessor}s that will process PCM audio before - * output. May be empty. + * @deprecated Use {@link Builder}. */ + @Deprecated + @InlineMeValidationDisabled("Migrate constructor to Builder") + @InlineMe( + replacement = + "new DefaultAudioSink.Builder()" + + ".setAudioCapabilities(audioCapabilities)" + + ".setAudioProcessors(audioProcessors)" + + ".build()", + imports = "com.google.android.exoplayer2.audio.DefaultAudioSink") public DefaultAudioSink( @Nullable AudioCapabilities audioCapabilities, AudioProcessor[] audioProcessors) { - this(audioCapabilities, audioProcessors, /* enableFloatOutput= */ false); + this( + new Builder() + .setAudioCapabilities(firstNonNull(audioCapabilities, DEFAULT_AUDIO_CAPABILITIES)) + .setAudioProcessors(audioProcessors)); } /** - * Creates a new default audio sink, optionally using float output for high resolution PCM. - * - * @param audioCapabilities The audio capabilities for playback on this device. May be null if the - * default capabilities (no encoded audio passthrough support) should be assumed. - * @param audioProcessors An array of {@link AudioProcessor}s that will process PCM audio before - * output. May be empty. - * @param enableFloatOutput Whether to enable 32-bit float output. Where possible, 32-bit float - * output will be used if the input is 32-bit float, and also if the input is high resolution - * (24-bit or 32-bit) integer PCM. Audio processing (for example, speed adjustment) will not - * be available when float output is in use. + * @deprecated Use {@link Builder}. */ + @Deprecated + @InlineMeValidationDisabled("Migrate constructor to Builder") + @InlineMe( + replacement = + "new DefaultAudioSink.Builder()" + + ".setAudioCapabilities(audioCapabilities)" + + ".setAudioProcessors(audioProcessors)" + + ".setEnableFloatOutput(enableFloatOutput)" + + ".build()", + imports = "com.google.android.exoplayer2.audio.DefaultAudioSink") public DefaultAudioSink( @Nullable AudioCapabilities audioCapabilities, AudioProcessor[] audioProcessors, boolean enableFloatOutput) { - this(audioCapabilities, new DefaultAudioProcessorChain(audioProcessors), enableFloatOutput); + this( + new Builder() + .setAudioCapabilities(firstNonNull(audioCapabilities, DEFAULT_AUDIO_CAPABILITIES)) + .setAudioProcessors(audioProcessors) + .setEnableFloatOutput(enableFloatOutput)); } /** - * Creates a new default audio sink, optionally using float output for high resolution PCM and - * with the specified {@code audioProcessorChain}. - * - * @param audioCapabilities The audio capabilities for playback on this device. May be null if the - * default capabilities (no encoded audio passthrough support) should be assumed. - * @param audioProcessorChain An {@link AudioProcessorChain} which is used to apply playback - * parameters adjustments. The instance passed in must not be reused in other sinks. - * @param enableFloatOutput Whether to enable 32-bit float output. Where possible, 32-bit float - * output will be used if the input is 32-bit float, and also if the input is high resolution - * (24-bit or 32-bit) integer PCM. Audio processing (for example, speed adjustment) will not - * be available when float output is in use. + * @deprecated Use {@link Builder}. */ + @Deprecated + @InlineMeValidationDisabled("Migrate constructor to Builder") + @InlineMe( + replacement = + "new DefaultAudioSink.Builder()" + + ".setAudioCapabilities(audioCapabilities)" + + ".setAudioProcessorChain(audioProcessorChain)" + + ".setEnableFloatOutput(enableFloatOutput)" + + ".setEnableAudioTrackPlaybackParams(enableAudioTrackPlaybackParams)" + + ".setOffloadMode(offloadMode)" + + ".build()", + imports = "com.google.android.exoplayer2.audio.DefaultAudioSink") public DefaultAudioSink( @Nullable AudioCapabilities audioCapabilities, AudioProcessorChain audioProcessorChain, - boolean enableFloatOutput) { - this.audioCapabilities = audioCapabilities; - this.audioProcessorChain = Assertions.checkNotNull(audioProcessorChain); - this.enableFloatOutput = enableFloatOutput; - releasingConditionVariable = new ConditionVariable(true); + boolean enableFloatOutput, + boolean enableAudioTrackPlaybackParams, + @OffloadMode int offloadMode) { + this( + new Builder() + .setAudioCapabilities(firstNonNull(audioCapabilities, DEFAULT_AUDIO_CAPABILITIES)) + .setAudioProcessorChain(audioProcessorChain) + .setEnableFloatOutput(enableFloatOutput) + .setEnableAudioTrackPlaybackParams(enableAudioTrackPlaybackParams) + .setOffloadMode(offloadMode)); + } + + @RequiresNonNull("#1.audioProcessorChain") + private DefaultAudioSink(Builder builder) { + audioCapabilities = builder.audioCapabilities; + audioProcessorChain = builder.audioProcessorChain; + enableFloatOutput = Util.SDK_INT >= 21 && builder.enableFloatOutput; + enableAudioTrackPlaybackParams = Util.SDK_INT >= 23 && builder.enableAudioTrackPlaybackParams; + offloadMode = Util.SDK_INT >= 29 ? builder.offloadMode : OFFLOAD_MODE_DISABLED; + audioTrackBufferSizeProvider = builder.audioTrackBufferSizeProvider; + releasingConditionVariable = new ConditionVariable(Clock.DEFAULT); + releasingConditionVariable.open(); audioTrackPositionTracker = new AudioTrackPositionTracker(new PositionTrackerListener()); channelMappingAudioProcessor = new ChannelMappingAudioProcessor(); trimmingAudioProcessor = new TrimmingAudioProcessor(); @@ -365,16 +616,26 @@ public DefaultAudioSink( Collections.addAll(toIntPcmAudioProcessors, audioProcessorChain.getAudioProcessors()); toIntPcmAvailableAudioProcessors = toIntPcmAudioProcessors.toArray(new AudioProcessor[0]); toFloatPcmAvailableAudioProcessors = new AudioProcessor[] {new FloatResamplingAudioProcessor()}; - volume = 1.0f; - startMediaTimeState = START_NOT_SET; + volume = 1f; audioAttributes = AudioAttributes.DEFAULT; audioSessionId = C.AUDIO_SESSION_ID_UNSET; auxEffectInfo = new AuxEffectInfo(AuxEffectInfo.NO_AUX_EFFECT_ID, 0f); - playbackParameters = PlaybackParameters.DEFAULT; + mediaPositionParameters = + new MediaPositionParameters( + PlaybackParameters.DEFAULT, + DEFAULT_SKIP_SILENCE, + /* mediaTimeUs= */ 0, + /* audioTrackPositionUs= */ 0); + audioTrackPlaybackParameters = PlaybackParameters.DEFAULT; drainingAudioProcessorIndex = C.INDEX_UNSET; activeAudioProcessors = new AudioProcessor[0]; outputBuffers = new ByteBuffer[0]; - playbackParametersCheckpoints = new ArrayDeque<>(); + mediaPositionParametersCheckpoints = new ArrayDeque<>(); + initializationExceptionPendingExceptionHolder = + new PendingExceptionHolder<>(AUDIO_TRACK_RETRY_DURATION_MS); + writeExceptionPendingExceptionHolder = + new PendingExceptionHolder<>(AUDIO_TRACK_RETRY_DURATION_MS); + audioOffloadListener = builder.audioOffloadListener; } // AudioSink implementation. @@ -385,66 +646,86 @@ public void setListener(Listener listener) { } @Override - public boolean supportsOutput(int channelCount, @C.Encoding int encoding) { - if (Util.isEncodingLinearPcm(encoding)) { - // AudioTrack supports 16-bit integer PCM output in all platform API versions, and float - // output from platform API version 21 only. Other integer PCM encodings are resampled by this - // sink to 16-bit PCM. We assume that the audio framework will downsample any number of - // channels to the output device's required number of channels. - return encoding != C.ENCODING_PCM_FLOAT || Util.SDK_INT >= 21; - } else { - return audioCapabilities != null - && audioCapabilities.supportsEncoding(encoding) - && (channelCount == Format.NO_VALUE - || channelCount <= audioCapabilities.getMaxChannelCount()); + public void setPlayerId(@Nullable PlayerId playerId) { + this.playerId = playerId; + } + + @Override + public boolean supportsFormat(Format format) { + return getFormatSupport(format) != SINK_FORMAT_UNSUPPORTED; + } + + @Override + public @SinkFormatSupport int getFormatSupport(Format format) { + if (MimeTypes.AUDIO_RAW.equals(format.sampleMimeType)) { + if (!Util.isEncodingLinearPcm(format.pcmEncoding)) { + Log.w(TAG, "Invalid PCM encoding: " + format.pcmEncoding); + return SINK_FORMAT_UNSUPPORTED; + } + if (format.pcmEncoding == C.ENCODING_PCM_16BIT + || (enableFloatOutput && format.pcmEncoding == C.ENCODING_PCM_FLOAT)) { + return SINK_FORMAT_SUPPORTED_DIRECTLY; + } + // We can resample all linear PCM encodings to 16-bit integer PCM, which AudioTrack is + // guaranteed to support. + return SINK_FORMAT_SUPPORTED_WITH_TRANSCODING; + } + if (!offloadDisabledUntilNextConfiguration && useOffloadedPlayback(format, audioAttributes)) { + return SINK_FORMAT_SUPPORTED_DIRECTLY; + } + if (audioCapabilities.isPassthroughPlaybackSupported(format)) { + return SINK_FORMAT_SUPPORTED_DIRECTLY; } + return SINK_FORMAT_UNSUPPORTED; } @Override public long getCurrentPositionUs(boolean sourceEnded) { - if (!isInitialized() || startMediaTimeState == START_NOT_SET) { + if (!isAudioTrackInitialized() || startMediaTimeUsNeedsInit) { return CURRENT_POSITION_NOT_SET; } long positionUs = audioTrackPositionTracker.getCurrentPositionUs(sourceEnded); - positionUs = Math.min(positionUs, configuration.framesToDurationUs(getWrittenFrames())); - return startMediaTimeUs + applySkipping(applySpeedup(positionUs)); + positionUs = min(positionUs, configuration.framesToDurationUs(getWrittenFrames())); + return applySkipping(applyMediaPositionParameters(positionUs)); } @Override - public void configure( - @C.Encoding int inputEncoding, - int inputChannelCount, - int inputSampleRate, - int specifiedBufferSize, - @Nullable int[] outputChannels, - int trimStartFrames, - int trimEndFrames) + public void configure(Format inputFormat, int specifiedBufferSize, @Nullable int[] outputChannels) throws ConfigurationException { - if (Util.SDK_INT < 21 && inputChannelCount == 8 && outputChannels == null) { - // AudioTrack doesn't support 8 channel output before Android L. Discard the last two (side) - // channels to give a 6 channel stream that is supported. - outputChannels = new int[6]; - for (int i = 0; i < outputChannels.length; i++) { - outputChannels[i] = i; + int inputPcmFrameSize; + @Nullable AudioProcessor[] availableAudioProcessors; + + @OutputMode int outputMode; + @C.Encoding int outputEncoding; + int outputSampleRate; + int outputChannelConfig; + int outputPcmFrameSize; + + if (MimeTypes.AUDIO_RAW.equals(inputFormat.sampleMimeType)) { + Assertions.checkArgument(Util.isEncodingLinearPcm(inputFormat.pcmEncoding)); + + inputPcmFrameSize = Util.getPcmFrameSize(inputFormat.pcmEncoding, inputFormat.channelCount); + availableAudioProcessors = + shouldUseFloatOutput(inputFormat.pcmEncoding) + ? toFloatPcmAvailableAudioProcessors + : toIntPcmAvailableAudioProcessors; + + trimmingAudioProcessor.setTrimFrameCount( + inputFormat.encoderDelay, inputFormat.encoderPadding); + + if (Util.SDK_INT < 21 && inputFormat.channelCount == 8 && outputChannels == null) { + // AudioTrack doesn't support 8 channel output before Android L. Discard the last two (side) + // channels to give a 6 channel stream that is supported. + outputChannels = new int[6]; + for (int i = 0; i < outputChannels.length; i++) { + outputChannels[i] = i; + } } - } - - boolean isInputPcm = Util.isEncodingLinearPcm(inputEncoding); - boolean processingEnabled = isInputPcm; - int sampleRate = inputSampleRate; - int channelCount = inputChannelCount; - @C.Encoding int encoding = inputEncoding; - boolean useFloatOutput = - enableFloatOutput - && supportsOutput(inputChannelCount, C.ENCODING_PCM_FLOAT) - && Util.isEncodingHighResolutionPcm(inputEncoding); - AudioProcessor[] availableAudioProcessors = - useFloatOutput ? toFloatPcmAvailableAudioProcessors : toIntPcmAvailableAudioProcessors; - if (processingEnabled) { - trimmingAudioProcessor.setTrimFrameCount(trimStartFrames, trimEndFrames); channelMappingAudioProcessor.setChannelMap(outputChannels); + AudioProcessor.AudioFormat outputFormat = - new AudioProcessor.AudioFormat(sampleRate, channelCount, encoding); + new AudioProcessor.AudioFormat( + inputFormat.sampleRate, inputFormat.channelCount, inputFormat.pcmEncoding); for (AudioProcessor audioProcessor : availableAudioProcessors) { try { AudioProcessor.AudioFormat nextFormat = audioProcessor.configure(outputFormat); @@ -452,38 +733,73 @@ && supportsOutput(inputChannelCount, C.ENCODING_PCM_FLOAT) outputFormat = nextFormat; } } catch (UnhandledAudioFormatException e) { - throw new ConfigurationException(e); + throw new ConfigurationException(e, inputFormat); } } - sampleRate = outputFormat.sampleRate; - channelCount = outputFormat.channelCount; - encoding = outputFormat.encoding; - } - int outputChannelConfig = getChannelConfig(channelCount, isInputPcm); - if (outputChannelConfig == AudioFormat.CHANNEL_INVALID) { - throw new ConfigurationException("Unsupported channel count: " + channelCount); + outputMode = OUTPUT_MODE_PCM; + outputEncoding = outputFormat.encoding; + outputSampleRate = outputFormat.sampleRate; + outputChannelConfig = Util.getAudioTrackChannelConfig(outputFormat.channelCount); + outputPcmFrameSize = Util.getPcmFrameSize(outputEncoding, outputFormat.channelCount); + } else { + inputPcmFrameSize = C.LENGTH_UNSET; + availableAudioProcessors = new AudioProcessor[0]; + outputSampleRate = inputFormat.sampleRate; + outputPcmFrameSize = C.LENGTH_UNSET; + if (useOffloadedPlayback(inputFormat, audioAttributes)) { + outputMode = OUTPUT_MODE_OFFLOAD; + outputEncoding = + MimeTypes.getEncoding(checkNotNull(inputFormat.sampleMimeType), inputFormat.codecs); + outputChannelConfig = Util.getAudioTrackChannelConfig(inputFormat.channelCount); + } else { + outputMode = OUTPUT_MODE_PASSTHROUGH; + @Nullable + Pair encodingAndChannelConfig = + audioCapabilities.getEncodingAndChannelConfigForPassthrough(inputFormat); + if (encodingAndChannelConfig == null) { + throw new ConfigurationException( + "Unable to configure passthrough for: " + inputFormat, inputFormat); + } + outputEncoding = encodingAndChannelConfig.first; + outputChannelConfig = encodingAndChannelConfig.second; + } } - int inputPcmFrameSize = - isInputPcm ? Util.getPcmFrameSize(inputEncoding, inputChannelCount) : C.LENGTH_UNSET; - int outputPcmFrameSize = - isInputPcm ? Util.getPcmFrameSize(encoding, channelCount) : C.LENGTH_UNSET; - boolean canApplyPlaybackParameters = processingEnabled && !useFloatOutput; + if (outputEncoding == C.ENCODING_INVALID) { + throw new ConfigurationException( + "Invalid output encoding (mode=" + outputMode + ") for: " + inputFormat, inputFormat); + } + if (outputChannelConfig == AudioFormat.CHANNEL_INVALID) { + throw new ConfigurationException( + "Invalid output channel config (mode=" + outputMode + ") for: " + inputFormat, + inputFormat); + } + int bufferSize = + specifiedBufferSize != 0 + ? specifiedBufferSize + : audioTrackBufferSizeProvider.getBufferSizeInBytes( + getAudioTrackMinBufferSize(outputSampleRate, outputChannelConfig, outputEncoding), + outputEncoding, + outputMode, + outputPcmFrameSize != C.LENGTH_UNSET ? outputPcmFrameSize : 1, + outputSampleRate, + inputFormat.bitrate, + enableAudioTrackPlaybackParams ? MAX_PLAYBACK_SPEED : DEFAULT_PLAYBACK_SPEED); + + offloadDisabledUntilNextConfiguration = false; Configuration pendingConfiguration = new Configuration( - isInputPcm, + inputFormat, inputPcmFrameSize, - inputSampleRate, + outputMode, outputPcmFrameSize, - sampleRate, + outputSampleRate, outputChannelConfig, - encoding, - specifiedBufferSize, - processingEnabled, - canApplyPlaybackParameters, + outputEncoding, + bufferSize, availableAudioProcessors); - if (isInitialized()) { + if (isAudioTrackInitialized()) { this.pendingConfiguration = pendingConfiguration; } else { configuration = pendingConfiguration; @@ -514,42 +830,31 @@ private void flushAudioProcessors() { } } - private void initialize(long presentationTimeUs) throws InitializationException { - // If we're asynchronously releasing a previous audio track then we block until it has been + private boolean initializeAudioTrack() throws InitializationException { + // If we're asynchronously releasing a previous audio track then we wait until it has been // released. This guarantees that we cannot end up in a state where we have multiple audio // track instances. Without this guarantee it would be possible, in extreme cases, to exhaust // the shared memory that's available for audio track buffers. This would in turn cause the // initialization of the audio track to fail. - releasingConditionVariable.block(); + if (!releasingConditionVariable.isOpen()) { + return false; + } - audioTrack = - Assertions.checkNotNull(configuration) - .buildAudioTrack(tunneling, audioAttributes, audioSessionId); - int audioSessionId = audioTrack.getAudioSessionId(); - if (enablePreV21AudioSessionWorkaround) { - if (Util.SDK_INT < 21) { - // The workaround creates an audio track with a two byte buffer on the same session, and - // does not release it until this object is released, which keeps the session active. - if (keepSessionIdAudioTrack != null - && audioSessionId != keepSessionIdAudioTrack.getAudioSessionId()) { - releaseKeepSessionIdAudioTrack(); - } - if (keepSessionIdAudioTrack == null) { - keepSessionIdAudioTrack = initializeKeepSessionIdAudioTrack(audioSessionId); - } + audioTrack = buildAudioTrackWithRetry(); + if (isOffloadedPlayback(audioTrack)) { + registerStreamEventCallbackV29(audioTrack); + if (offloadMode != OFFLOAD_MODE_ENABLED_GAPLESS_DISABLED) { + audioTrack.setOffloadDelayPadding( + configuration.inputFormat.encoderDelay, configuration.inputFormat.encoderPadding); } } - if (this.audioSessionId != audioSessionId) { - this.audioSessionId = audioSessionId; - if (listener != null) { - listener.onAudioSessionId(audioSessionId); - } + if (Util.SDK_INT >= 31 && playerId != null) { + Api31.setLogSessionIdOnAudioTrack(audioTrack, playerId); } - - applyPlaybackParameters(playbackParameters, presentationTimeUs); - + audioSessionId = audioTrack.getAudioSessionId(); audioTrackPositionTracker.setAudioTrack( audioTrack, + /* isPassthrough= */ configuration.outputMode == OUTPUT_MODE_PASSTHROUGH, configuration.outputEncoding, configuration.outputPcmFrameSize, configuration.bufferSize); @@ -559,12 +864,18 @@ private void initialize(long presentationTimeUs) throws InitializationException audioTrack.attachAuxEffect(auxEffectInfo.effectId); audioTrack.setAuxEffectSendLevel(auxEffectInfo.sendLevel); } + if (preferredDevice != null && Util.SDK_INT >= 23) { + Api23.setPreferredDeviceOnAudioTrack(audioTrack, preferredDevice); + } + + startMediaTimeUsNeedsInit = true; + return true; } @Override public void play() { playing = true; - if (isInitialized()) { + if (isAudioTrackInitialized()) { audioTrackPositionTracker.start(); audioTrack.play(); } @@ -572,20 +883,18 @@ public void play() { @Override public void handleDiscontinuity() { - // Force resynchronization after a skipped buffer. - if (startMediaTimeState == START_IN_SYNC) { - startMediaTimeState = START_NEED_SYNC; - } + startMediaTimeUsNeedsSync = true; } @Override @SuppressWarnings("ReferenceEquality") - public boolean handleBuffer(ByteBuffer buffer, long presentationTimeUs) + public boolean handleBuffer( + ByteBuffer buffer, long presentationTimeUs, int encodedAccessUnitCount) throws InitializationException, WriteException { Assertions.checkArgument(inputBuffer == null || buffer == inputBuffer); if (pendingConfiguration != null) { - if (!drainAudioProcessorsToEndOfStream()) { + if (!drainToEndOfStream()) { // There's still pending data in audio processors to write to the track. return false; } else if (!pendingConfiguration.canReuseAudioTrack(configuration)) { @@ -599,13 +908,48 @@ public boolean handleBuffer(ByteBuffer buffer, long presentationTimeUs) // The current audio track can be reused for the new configuration. configuration = pendingConfiguration; pendingConfiguration = null; + if (isOffloadedPlayback(audioTrack) + && offloadMode != OFFLOAD_MODE_ENABLED_GAPLESS_DISABLED) { + // If the first track is very short (typically <1s), the offload AudioTrack might + // not have started yet. Do not call setOffloadEndOfStream as it would throw. + if (audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING) { + audioTrack.setOffloadEndOfStream(); + } + audioTrack.setOffloadDelayPadding( + configuration.inputFormat.encoderDelay, configuration.inputFormat.encoderPadding); + isWaitingForOffloadEndOfStreamHandled = true; + } } // Re-apply playback parameters. - applyPlaybackParameters(playbackParameters, presentationTimeUs); + applyAudioProcessorPlaybackParametersAndSkipSilence(presentationTimeUs); } - if (!isInitialized()) { - initialize(presentationTimeUs); + if (!isAudioTrackInitialized()) { + try { + if (!initializeAudioTrack()) { + // Not yet ready for initialization of a new AudioTrack. + return false; + } + } catch (InitializationException e) { + if (e.isRecoverable) { + throw e; // Do not delay the exception if it can be recovered at higher level. + } + initializationExceptionPendingExceptionHolder.throwExceptionIfDeadlineIsReached(e); + return false; + } + } + initializationExceptionPendingExceptionHolder.clear(); + + if (startMediaTimeUsNeedsInit) { + startMediaTimeUs = max(0, presentationTimeUs); + startMediaTimeUsNeedsSync = false; + startMediaTimeUsNeedsInit = false; + + if (enableAudioTrackPlaybackParams && Util.SDK_INT >= 23) { + setAudioTrackPlaybackParametersV23(audioTrackPlaybackParameters); + } + applyAudioProcessorPlaybackParametersAndSkipSilence(presentationTimeUs); + if (playing) { play(); } @@ -617,12 +961,13 @@ public boolean handleBuffer(ByteBuffer buffer, long presentationTimeUs) if (inputBuffer == null) { // We are seeing this buffer for the first time. + Assertions.checkArgument(buffer.order() == ByteOrder.LITTLE_ENDIAN); if (!buffer.hasRemaining()) { // The buffer is empty. return true; } - if (!configuration.isInputPcm && framesPerEncodedSample == 0) { + if (configuration.outputMode != OUTPUT_MODE_PCM && framesPerEncodedSample == 0) { // If this is the first encoded sample, calculate the sample size in frames. framesPerEncodedSample = getFramesPerEncodedSample(configuration.outputEncoding, buffer); if (framesPerEncodedSample == 0) { @@ -634,60 +979,61 @@ public boolean handleBuffer(ByteBuffer buffer, long presentationTimeUs) } } - if (afterDrainPlaybackParameters != null) { - if (!drainAudioProcessorsToEndOfStream()) { + if (afterDrainParameters != null) { + if (!drainToEndOfStream()) { // Don't process any more input until draining completes. return false; } - PlaybackParameters newPlaybackParameters = afterDrainPlaybackParameters; - afterDrainPlaybackParameters = null; - applyPlaybackParameters(newPlaybackParameters, presentationTimeUs); + applyAudioProcessorPlaybackParametersAndSkipSilence(presentationTimeUs); + afterDrainParameters = null; } - if (startMediaTimeState == START_NOT_SET) { - startMediaTimeUs = Math.max(0, presentationTimeUs); - startMediaTimeState = START_IN_SYNC; - } else { - // Sanity check that presentationTimeUs is consistent with the expected value. - long expectedPresentationTimeUs = - startMediaTimeUs - + configuration.inputFramesToDurationUs( - getSubmittedFrames() - trimmingAudioProcessor.getTrimmedFrameCount()); - if (startMediaTimeState == START_IN_SYNC - && Math.abs(expectedPresentationTimeUs - presentationTimeUs) > 200000) { - Log.e(TAG, "Discontinuity detected [expected " + expectedPresentationTimeUs + ", got " - + presentationTimeUs + "]"); - startMediaTimeState = START_NEED_SYNC; + // Check that presentationTimeUs is consistent with the expected value. + long expectedPresentationTimeUs = + startMediaTimeUs + + configuration.inputFramesToDurationUs( + getSubmittedFrames() - trimmingAudioProcessor.getTrimmedFrameCount()); + if (!startMediaTimeUsNeedsSync + && Math.abs(expectedPresentationTimeUs - presentationTimeUs) > 200000) { + if (listener != null) { + listener.onAudioSinkError( + new AudioSink.UnexpectedDiscontinuityException( + presentationTimeUs, expectedPresentationTimeUs)); } - if (startMediaTimeState == START_NEED_SYNC) { - // Adjust startMediaTimeUs to be consistent with the current buffer's start time and the - // number of bytes submitted. - long adjustmentUs = presentationTimeUs - expectedPresentationTimeUs; - startMediaTimeUs += adjustmentUs; - startMediaTimeState = START_IN_SYNC; - if (listener != null && adjustmentUs != 0) { - listener.onPositionDiscontinuity(); - } + startMediaTimeUsNeedsSync = true; + } + if (startMediaTimeUsNeedsSync) { + if (!drainToEndOfStream()) { + // Don't update timing until pending AudioProcessor buffers are completely drained. + return false; + } + // Adjust startMediaTimeUs to be consistent with the current buffer's start time and the + // number of bytes submitted. + long adjustmentUs = presentationTimeUs - expectedPresentationTimeUs; + startMediaTimeUs += adjustmentUs; + startMediaTimeUsNeedsSync = false; + // Re-apply playback parameters because the startMediaTimeUs changed. + applyAudioProcessorPlaybackParametersAndSkipSilence(presentationTimeUs); + if (listener != null && adjustmentUs != 0) { + listener.onPositionDiscontinuity(); } } - if (configuration.isInputPcm) { + if (configuration.outputMode == OUTPUT_MODE_PCM) { submittedPcmBytes += buffer.remaining(); } else { - submittedEncodedFrames += framesPerEncodedSample; + submittedEncodedFrames += (long) framesPerEncodedSample * encodedAccessUnitCount; } inputBuffer = buffer; + inputBufferAccessUnitCount = encodedAccessUnitCount; } - if (configuration.processingEnabled) { - processBuffers(presentationTimeUs); - } else { - writeBuffer(inputBuffer, presentationTimeUs); - } + processBuffers(presentationTimeUs); if (!inputBuffer.hasRemaining()) { inputBuffer = null; + inputBufferAccessUnitCount = 0; return true; } @@ -700,17 +1046,68 @@ public boolean handleBuffer(ByteBuffer buffer, long presentationTimeUs) return false; } + private AudioTrack buildAudioTrackWithRetry() throws InitializationException { + try { + return buildAudioTrack(checkNotNull(configuration)); + } catch (InitializationException initialFailure) { + // Retry with a smaller buffer size. + if (configuration.bufferSize > AUDIO_TRACK_SMALLER_BUFFER_RETRY_SIZE) { + Configuration retryConfiguration = + configuration.copyWithBufferSize(AUDIO_TRACK_SMALLER_BUFFER_RETRY_SIZE); + try { + AudioTrack audioTrack = buildAudioTrack(retryConfiguration); + configuration = retryConfiguration; + return audioTrack; + } catch (InitializationException retryFailure) { + initialFailure.addSuppressed(retryFailure); + } + } + maybeDisableOffload(); + throw initialFailure; + } + } + + private AudioTrack buildAudioTrack(Configuration configuration) throws InitializationException { + try { + AudioTrack audioTrack = + configuration.buildAudioTrack(tunneling, audioAttributes, audioSessionId); + if (audioOffloadListener != null) { + audioOffloadListener.onExperimentalOffloadedPlayback(isOffloadedPlayback(audioTrack)); + } + return audioTrack; + } catch (InitializationException e) { + if (listener != null) { + listener.onAudioSinkError(e); + } + throw e; + } + } + + @RequiresApi(29) + private void registerStreamEventCallbackV29(AudioTrack audioTrack) { + if (offloadStreamEventCallbackV29 == null) { + // Must be lazily initialized to receive stream event callbacks on the current (playback) + // thread as the constructor is not called in the playback thread. + offloadStreamEventCallbackV29 = new StreamEventCallbackV29(); + } + offloadStreamEventCallbackV29.register(audioTrack); + } + private void processBuffers(long avSyncPresentationTimeUs) throws WriteException { int count = activeAudioProcessors.length; int index = count; while (index >= 0) { - ByteBuffer input = index > 0 ? outputBuffers[index - 1] - : (inputBuffer != null ? inputBuffer : AudioProcessor.EMPTY_BUFFER); + ByteBuffer input = + index > 0 + ? outputBuffers[index - 1] + : (inputBuffer != null ? inputBuffer : AudioProcessor.EMPTY_BUFFER); if (index == count) { writeBuffer(input, avSyncPresentationTimeUs); } else { AudioProcessor audioProcessor = activeAudioProcessors[index]; - audioProcessor.queueInput(input); + if (index > drainingAudioProcessorIndex) { + audioProcessor.queueInput(input); + } ByteBuffer output = audioProcessor.getOutput(); outputBuffers[index] = output; if (output.hasRemaining()) { @@ -751,38 +1148,81 @@ private void writeBuffer(ByteBuffer buffer, long avSyncPresentationTimeUs) throw } } int bytesRemaining = buffer.remaining(); - int bytesWritten = 0; - if (Util.SDK_INT < 21) { // isInputPcm == true + int bytesWrittenOrError = 0; // Error if negative + if (Util.SDK_INT < 21) { // outputMode == OUTPUT_MODE_PCM. // Work out how many bytes we can write without the risk of blocking. int bytesToWrite = audioTrackPositionTracker.getAvailableBufferSize(writtenPcmBytes); if (bytesToWrite > 0) { - bytesToWrite = Math.min(bytesRemaining, bytesToWrite); - bytesWritten = audioTrack.write(preV21OutputBuffer, preV21OutputBufferOffset, bytesToWrite); - if (bytesWritten > 0) { - preV21OutputBufferOffset += bytesWritten; - buffer.position(buffer.position() + bytesWritten); + bytesToWrite = min(bytesRemaining, bytesToWrite); + bytesWrittenOrError = + audioTrack.write(preV21OutputBuffer, preV21OutputBufferOffset, bytesToWrite); + if (bytesWrittenOrError > 0) { // No error + preV21OutputBufferOffset += bytesWrittenOrError; + buffer.position(buffer.position() + bytesWrittenOrError); } } } else if (tunneling) { Assertions.checkState(avSyncPresentationTimeUs != C.TIME_UNSET); - bytesWritten = writeNonBlockingWithAvSyncV21(audioTrack, buffer, bytesRemaining, - avSyncPresentationTimeUs); + bytesWrittenOrError = + writeNonBlockingWithAvSyncV21( + audioTrack, buffer, bytesRemaining, avSyncPresentationTimeUs); } else { - bytesWritten = writeNonBlockingV21(audioTrack, buffer, bytesRemaining); + bytesWrittenOrError = writeNonBlockingV21(audioTrack, buffer, bytesRemaining); } lastFeedElapsedRealtimeMs = SystemClock.elapsedRealtime(); - if (bytesWritten < 0) { - throw new WriteException(bytesWritten); + if (bytesWrittenOrError < 0) { + int error = bytesWrittenOrError; + + // Treat a write error on a previously successful offload channel as recoverable + // without disabling offload. Offload will be disabled when a new AudioTrack is created, + // if no longer supported. + boolean isRecoverable = isAudioTrackDeadObject(error) && writtenEncodedFrames > 0; + + WriteException e = new WriteException(error, configuration.inputFormat, isRecoverable); + if (listener != null) { + listener.onAudioSinkError(e); + } + if (e.isRecoverable) { + throw e; // Do not delay the exception if it can be recovered at higher level. + } + writeExceptionPendingExceptionHolder.throwExceptionIfDeadlineIsReached(e); + return; + } + writeExceptionPendingExceptionHolder.clear(); + + int bytesWritten = bytesWrittenOrError; + + if (isOffloadedPlayback(audioTrack)) { + // After calling AudioTrack.setOffloadEndOfStream, the AudioTrack internally stops and + // restarts during which AudioTrack.write will return 0. This situation must be detected to + // prevent reporting the buffer as full even though it is not which could lead ExoPlayer to + // sleep forever waiting for a onDataRequest that will never come. + if (writtenEncodedFrames > 0) { + isWaitingForOffloadEndOfStreamHandled = false; + } + + // Consider the offload buffer as full if the AudioTrack is playing and AudioTrack.write could + // not write all the data provided to it. This relies on the assumption that AudioTrack.write + // always writes as much as possible. + if (playing + && listener != null + && bytesWritten < bytesRemaining + && !isWaitingForOffloadEndOfStreamHandled) { + listener.onOffloadBufferFull(); + } } - if (configuration.isInputPcm) { + if (configuration.outputMode == OUTPUT_MODE_PCM) { writtenPcmBytes += bytesWritten; } if (bytesWritten == bytesRemaining) { - if (!configuration.isInputPcm) { - writtenEncodedFrames += framesPerEncodedSample; + if (configuration.outputMode != OUTPUT_MODE_PCM) { + // When playing non-PCM, the inputBuffer is never processed, thus the last inputBuffer + // must be the current input buffer. + Assertions.checkState(buffer == inputBuffer); + writtenEncodedFrames += (long) framesPerEncodedSample * inputBufferAccessUnitCount; } outputBuffer = null; } @@ -790,17 +1230,31 @@ private void writeBuffer(ByteBuffer buffer, long avSyncPresentationTimeUs) throw @Override public void playToEndOfStream() throws WriteException { - if (!handledEndOfStream && isInitialized() && drainAudioProcessorsToEndOfStream()) { + if (!handledEndOfStream && isAudioTrackInitialized() && drainToEndOfStream()) { playPendingData(); handledEndOfStream = true; } } - private boolean drainAudioProcessorsToEndOfStream() throws WriteException { + private void maybeDisableOffload() { + if (!configuration.outputModeIsOffload()) { + return; + } + // Offload was requested, but may not be available. There are cases when this can occur even if + // AudioManager.isOffloadedPlaybackSupported returned true. For example, due to use of an + // AudioPlaybackCaptureConfiguration. Disable offload until the sink is next configured. + offloadDisabledUntilNextConfiguration = true; + } + + private static boolean isAudioTrackDeadObject(int status) { + return (Util.SDK_INT >= 24 && status == AudioTrack.ERROR_DEAD_OBJECT) + || status == ERROR_NATIVE_DEAD_OBJECT; + } + + private boolean drainToEndOfStream() throws WriteException { boolean audioProcessorNeedsEndOfStream = false; if (drainingAudioProcessorIndex == C.INDEX_UNSET) { - drainingAudioProcessorIndex = - configuration.processingEnabled ? 0 : activeAudioProcessors.length; + drainingAudioProcessorIndex = 0; audioProcessorNeedsEndOfStream = true; } while (drainingAudioProcessorIndex < activeAudioProcessors.length) { @@ -829,42 +1283,45 @@ private boolean drainAudioProcessorsToEndOfStream() throws WriteException { @Override public boolean isEnded() { - return !isInitialized() || (handledEndOfStream && !hasPendingData()); + return !isAudioTrackInitialized() || (handledEndOfStream && !hasPendingData()); } @Override public boolean hasPendingData() { - return isInitialized() && audioTrackPositionTracker.hasPendingData(getWrittenFrames()); + return isAudioTrackInitialized() + && audioTrackPositionTracker.hasPendingData(getWrittenFrames()); } @Override public void setPlaybackParameters(PlaybackParameters playbackParameters) { - if (configuration != null && !configuration.canApplyPlaybackParameters) { - this.playbackParameters = PlaybackParameters.DEFAULT; - return; - } - PlaybackParameters lastSetPlaybackParameters = getPlaybackParameters(); - if (!playbackParameters.equals(lastSetPlaybackParameters)) { - if (isInitialized()) { - // Drain the audio processors so we can determine the frame position at which the new - // parameters apply. - afterDrainPlaybackParameters = playbackParameters; - } else { - // Update the playback parameters now. They will be applied to the audio processors during - // initialization. - this.playbackParameters = playbackParameters; - } + playbackParameters = + new PlaybackParameters( + constrainValue(playbackParameters.speed, MIN_PLAYBACK_SPEED, MAX_PLAYBACK_SPEED), + constrainValue(playbackParameters.pitch, MIN_PITCH, MAX_PITCH)); + if (enableAudioTrackPlaybackParams && Util.SDK_INT >= 23) { + setAudioTrackPlaybackParametersV23(playbackParameters); + } else { + setAudioProcessorPlaybackParametersAndSkipSilence( + playbackParameters, getSkipSilenceEnabled()); } } @Override public PlaybackParameters getPlaybackParameters() { - // Mask the already set parameters. - return afterDrainPlaybackParameters != null - ? afterDrainPlaybackParameters - : !playbackParametersCheckpoints.isEmpty() - ? playbackParametersCheckpoints.getLast().playbackParameters - : playbackParameters; + return enableAudioTrackPlaybackParams + ? audioTrackPlaybackParameters + : getAudioProcessorPlaybackParameters(); + } + + @Override + public void setSkipSilenceEnabled(boolean skipSilenceEnabled) { + setAudioProcessorPlaybackParametersAndSkipSilence( + getAudioProcessorPlaybackParameters(), skipSilenceEnabled); + } + + @Override + public boolean getSkipSilenceEnabled() { + return getMediaPositionParameters().skipSilence; } @Override @@ -878,13 +1335,18 @@ public void setAudioAttributes(AudioAttributes audioAttributes) { return; } flush(); - audioSessionId = C.AUDIO_SESSION_ID_UNSET; + } + + @Override + public AudioAttributes getAudioAttributes() { + return audioAttributes; } @Override public void setAudioSessionId(int audioSessionId) { if (this.audioSessionId != audioSessionId) { this.audioSessionId = audioSessionId; + externalAudioSessionIdProvided = audioSessionId != C.AUDIO_SESSION_ID_UNSET; flush(); } } @@ -907,12 +1369,22 @@ public void setAuxEffectInfo(AuxEffectInfo auxEffectInfo) { this.auxEffectInfo = auxEffectInfo; } + @RequiresApi(23) + @Override + public void setPreferredDevice(@Nullable AudioDeviceInfo audioDeviceInfo) { + this.preferredDevice = + audioDeviceInfo == null ? null : new AudioDeviceInfoApi23(audioDeviceInfo); + if (audioTrack != null) { + Api23.setPreferredDeviceOnAudioTrack(audioTrack, this.preferredDevice); + } + } + @Override - public void enableTunnelingV21(int tunnelingAudioSessionId) { + public void enableTunnelingV21() { Assertions.checkState(Util.SDK_INT >= 21); - if (!tunneling || audioSessionId != tunnelingAudioSessionId) { + Assertions.checkState(externalAudioSessionIdProvided); + if (!tunneling) { tunneling = true; - audioSessionId = tunnelingAudioSessionId; flush(); } } @@ -921,7 +1393,6 @@ public void enableTunnelingV21(int tunnelingAudioSessionId) { public void disableTunneling() { if (tunneling) { tunneling = false; - audioSessionId = C.AUDIO_SESSION_ID_UNSET; flush(); } } @@ -935,7 +1406,7 @@ public void setVolume(float volume) { } private void setVolumeInternal() { - if (!isInitialized()) { + if (!isAudioTrackInitialized()) { // Do nothing. } else if (Util.SDK_INT >= 21) { setVolumeInternalV21(audioTrack, volume); @@ -947,138 +1418,263 @@ private void setVolumeInternal() { @Override public void pause() { playing = false; - if (isInitialized() && audioTrackPositionTracker.pause()) { + if (isAudioTrackInitialized() && audioTrackPositionTracker.pause()) { audioTrack.pause(); } } @Override public void flush() { - if (isInitialized()) { - submittedPcmBytes = 0; - submittedEncodedFrames = 0; - writtenPcmBytes = 0; - writtenEncodedFrames = 0; - framesPerEncodedSample = 0; - if (afterDrainPlaybackParameters != null) { - playbackParameters = afterDrainPlaybackParameters; - afterDrainPlaybackParameters = null; - } else if (!playbackParametersCheckpoints.isEmpty()) { - playbackParameters = playbackParametersCheckpoints.getLast().playbackParameters; - } - playbackParametersCheckpoints.clear(); - playbackParametersOffsetUs = 0; - playbackParametersPositionUs = 0; - trimmingAudioProcessor.resetTrimmedFrameCount(); - flushAudioProcessors(); - inputBuffer = null; - outputBuffer = null; - stoppedAudioTrack = false; - handledEndOfStream = false; - drainingAudioProcessorIndex = C.INDEX_UNSET; - avSyncHeader = null; - bytesUntilNextAvSync = 0; - startMediaTimeState = START_NOT_SET; + if (isAudioTrackInitialized()) { + resetSinkStateForFlush(); + if (audioTrackPositionTracker.isPlaying()) { audioTrack.pause(); } - // AudioTrack.release can take some time, so we call it on a background thread. - final AudioTrack toRelease = audioTrack; - audioTrack = null; + if (isOffloadedPlayback(audioTrack)) { + checkNotNull(offloadStreamEventCallbackV29).unregister(audioTrack); + } + if (Util.SDK_INT < 21 && !externalAudioSessionIdProvided) { + // Prior to API level 21, audio sessions are not kept alive once there are no components + // associated with them. If we generated the session ID internally, the only component + // associated with the session is the audio track that's being released, and therefore + // the session will not be kept alive. As a result, we need to generate a new session when + // we next create an audio track. + audioSessionId = C.AUDIO_SESSION_ID_UNSET; + } if (pendingConfiguration != null) { configuration = pendingConfiguration; pendingConfiguration = null; } audioTrackPositionTracker.reset(); - releasingConditionVariable.close(); - new Thread() { - @Override - public void run() { - try { - toRelease.flush(); - toRelease.release(); - } finally { - releasingConditionVariable.open(); - } - } - }.start(); + releaseAudioTrackAsync(audioTrack, releasingConditionVariable); + audioTrack = null; + } + writeExceptionPendingExceptionHolder.clear(); + initializationExceptionPendingExceptionHolder.clear(); + } + + @Override + public void experimentalFlushWithoutAudioTrackRelease() { + // Prior to SDK 25, AudioTrack flush does not work as intended, and therefore it must be + // released and reinitialized. (Internal reference: b/143500232) + if (Util.SDK_INT < 25) { + flush(); + return; + } + + writeExceptionPendingExceptionHolder.clear(); + initializationExceptionPendingExceptionHolder.clear(); + + if (!isAudioTrackInitialized()) { + return; + } + + resetSinkStateForFlush(); + if (audioTrackPositionTracker.isPlaying()) { + audioTrack.pause(); } + audioTrack.flush(); + + audioTrackPositionTracker.reset(); + audioTrackPositionTracker.setAudioTrack( + audioTrack, + /* isPassthrough= */ configuration.outputMode == OUTPUT_MODE_PASSTHROUGH, + configuration.outputEncoding, + configuration.outputPcmFrameSize, + configuration.bufferSize); + + startMediaTimeUsNeedsInit = true; } @Override public void reset() { flush(); - releaseKeepSessionIdAudioTrack(); for (AudioProcessor audioProcessor : toIntPcmAvailableAudioProcessors) { audioProcessor.reset(); } for (AudioProcessor audioProcessor : toFloatPcmAvailableAudioProcessors) { audioProcessor.reset(); } - audioSessionId = C.AUDIO_SESSION_ID_UNSET; playing = false; + offloadDisabledUntilNextConfiguration = false; } - /** - * Releases {@link #keepSessionIdAudioTrack} asynchronously, if it is non-{@code null}. - */ - private void releaseKeepSessionIdAudioTrack() { - if (keepSessionIdAudioTrack == null) { - return; + // Internal methods. + + private void resetSinkStateForFlush() { + submittedPcmBytes = 0; + submittedEncodedFrames = 0; + writtenPcmBytes = 0; + writtenEncodedFrames = 0; + isWaitingForOffloadEndOfStreamHandled = false; + framesPerEncodedSample = 0; + mediaPositionParameters = + new MediaPositionParameters( + getAudioProcessorPlaybackParameters(), + getSkipSilenceEnabled(), + /* mediaTimeUs= */ 0, + /* audioTrackPositionUs= */ 0); + startMediaTimeUs = 0; + afterDrainParameters = null; + mediaPositionParametersCheckpoints.clear(); + inputBuffer = null; + inputBufferAccessUnitCount = 0; + outputBuffer = null; + stoppedAudioTrack = false; + handledEndOfStream = false; + drainingAudioProcessorIndex = C.INDEX_UNSET; + avSyncHeader = null; + bytesUntilNextAvSync = 0; + trimmingAudioProcessor.resetTrimmedFrameCount(); + flushAudioProcessors(); + } + + @RequiresApi(23) + private void setAudioTrackPlaybackParametersV23(PlaybackParameters audioTrackPlaybackParameters) { + if (isAudioTrackInitialized()) { + PlaybackParams playbackParams = + new PlaybackParams() + .allowDefaults() + .setSpeed(audioTrackPlaybackParameters.speed) + .setPitch(audioTrackPlaybackParameters.pitch) + .setAudioFallbackMode(PlaybackParams.AUDIO_FALLBACK_MODE_FAIL); + try { + audioTrack.setPlaybackParams(playbackParams); + } catch (IllegalArgumentException e) { + Log.w(TAG, "Failed to set playback params", e); + } + // Update the speed using the actual effective speed from the audio track. + audioTrackPlaybackParameters = + new PlaybackParameters( + audioTrack.getPlaybackParams().getSpeed(), audioTrack.getPlaybackParams().getPitch()); + audioTrackPositionTracker.setAudioTrackPlaybackSpeed(audioTrackPlaybackParameters.speed); } + this.audioTrackPlaybackParameters = audioTrackPlaybackParameters; + } - // AudioTrack.release can take some time, so we call it on a background thread. - final AudioTrack toRelease = keepSessionIdAudioTrack; - keepSessionIdAudioTrack = null; - new Thread() { - @Override - public void run() { - toRelease.release(); + private void setAudioProcessorPlaybackParametersAndSkipSilence( + PlaybackParameters playbackParameters, boolean skipSilence) { + MediaPositionParameters currentMediaPositionParameters = getMediaPositionParameters(); + if (!playbackParameters.equals(currentMediaPositionParameters.playbackParameters) + || skipSilence != currentMediaPositionParameters.skipSilence) { + MediaPositionParameters mediaPositionParameters = + new MediaPositionParameters( + playbackParameters, + skipSilence, + /* mediaTimeUs= */ C.TIME_UNSET, + /* audioTrackPositionUs= */ C.TIME_UNSET); + if (isAudioTrackInitialized()) { + // Drain the audio processors so we can determine the frame position at which the new + // parameters apply. + this.afterDrainParameters = mediaPositionParameters; + } else { + // Update the audio processor chain parameters now. They will be applied to the audio + // processors during initialization. + this.mediaPositionParameters = mediaPositionParameters; } - }.start(); + } + } + + private PlaybackParameters getAudioProcessorPlaybackParameters() { + return getMediaPositionParameters().playbackParameters; + } + + private MediaPositionParameters getMediaPositionParameters() { + // Mask the already set parameters. + return afterDrainParameters != null + ? afterDrainParameters + : !mediaPositionParametersCheckpoints.isEmpty() + ? mediaPositionParametersCheckpoints.getLast() + : mediaPositionParameters; } - private void applyPlaybackParameters( - PlaybackParameters playbackParameters, long presentationTimeUs) { - PlaybackParameters newPlaybackParameters = - configuration.canApplyPlaybackParameters - ? audioProcessorChain.applyPlaybackParameters(playbackParameters) + private void applyAudioProcessorPlaybackParametersAndSkipSilence(long presentationTimeUs) { + PlaybackParameters playbackParameters = + shouldApplyAudioProcessorPlaybackParameters() + ? audioProcessorChain.applyPlaybackParameters(getAudioProcessorPlaybackParameters()) : PlaybackParameters.DEFAULT; - // Store the position and corresponding media time from which the parameters will apply. - playbackParametersCheckpoints.add( - new PlaybackParametersCheckpoint( - newPlaybackParameters, - /* mediaTimeUs= */ Math.max(0, presentationTimeUs), - /* positionUs= */ configuration.framesToDurationUs(getWrittenFrames()))); + boolean skipSilenceEnabled = + shouldApplyAudioProcessorPlaybackParameters() + ? audioProcessorChain.applySkipSilenceEnabled(getSkipSilenceEnabled()) + : DEFAULT_SKIP_SILENCE; + mediaPositionParametersCheckpoints.add( + new MediaPositionParameters( + playbackParameters, + skipSilenceEnabled, + /* mediaTimeUs= */ max(0, presentationTimeUs), + /* audioTrackPositionUs= */ configuration.framesToDurationUs(getWrittenFrames()))); setupAudioProcessors(); + if (listener != null) { + listener.onSkipSilenceEnabledChanged(skipSilenceEnabled); + } } - private long applySpeedup(long positionUs) { - @Nullable PlaybackParametersCheckpoint checkpoint = null; - while (!playbackParametersCheckpoints.isEmpty() - && positionUs >= playbackParametersCheckpoints.getFirst().positionUs) { - checkpoint = playbackParametersCheckpoints.remove(); - } - if (checkpoint != null) { - // We are playing (or about to play) media with the new playback parameters, so update them. - playbackParameters = checkpoint.playbackParameters; - playbackParametersPositionUs = checkpoint.positionUs; - playbackParametersOffsetUs = checkpoint.mediaTimeUs - startMediaTimeUs; - } + /** + * Returns whether audio processor playback parameters should be applied in the current + * configuration. + */ + private boolean shouldApplyAudioProcessorPlaybackParameters() { + // We don't apply speed/pitch adjustment using an audio processor in the following cases: + // - in tunneling mode, because audio processing can change the duration of audio yet the video + // frame presentation times are currently not modified (see also + // https://github.com/google/ExoPlayer/issues/4803); + // - when playing encoded audio via passthrough/offload, because modifying the audio stream + // would require decoding/re-encoding; and + // - when outputting float PCM audio, because SonicAudioProcessor outputs 16-bit integer PCM. + return !tunneling + && MimeTypes.AUDIO_RAW.equals(configuration.inputFormat.sampleMimeType) + && !shouldUseFloatOutput(configuration.inputFormat.pcmEncoding); + } - if (playbackParameters.speed == 1f) { - return positionUs + playbackParametersOffsetUs - playbackParametersPositionUs; - } + /** + * Returns whether audio in the specified PCM encoding should be written to the audio track as + * float PCM. + */ + private boolean shouldUseFloatOutput(@C.PcmEncoding int pcmEncoding) { + return enableFloatOutput && Util.isEncodingHighResolutionPcm(pcmEncoding); + } - if (playbackParametersCheckpoints.isEmpty()) { - return playbackParametersOffsetUs - + audioProcessorChain.getMediaDuration(positionUs - playbackParametersPositionUs); + /** + * Applies and updates media position parameters. + * + * @param positionUs The current audio track position, in microseconds. + * @return The current media time, in microseconds. + */ + private long applyMediaPositionParameters(long positionUs) { + while (!mediaPositionParametersCheckpoints.isEmpty() + && positionUs >= mediaPositionParametersCheckpoints.getFirst().audioTrackPositionUs) { + // We are playing (or about to play) media with the new parameters, so update them. + mediaPositionParameters = mediaPositionParametersCheckpoints.remove(); + } + + long playoutDurationSinceLastCheckpointUs = + positionUs - mediaPositionParameters.audioTrackPositionUs; + if (mediaPositionParameters.playbackParameters.equals(PlaybackParameters.DEFAULT)) { + return mediaPositionParameters.mediaTimeUs + playoutDurationSinceLastCheckpointUs; + } else if (mediaPositionParametersCheckpoints.isEmpty()) { + long mediaDurationSinceLastCheckpointUs = + audioProcessorChain.getMediaDuration(playoutDurationSinceLastCheckpointUs); + return mediaPositionParameters.mediaTimeUs + mediaDurationSinceLastCheckpointUs; + } else { + // The processor chain has been configured with new parameters, but we're still playing audio + // that was processed using previous parameters. We can't scale the playout duration using the + // processor chain in this case, so we fall back to scaling using the previous parameters' + // target speed instead. Since the processor chain may not have achieved the target speed + // precisely, we scale the duration to the next checkpoint (which will always be small) rather + // than the duration from the previous checkpoint (which may be arbitrarily large). This + // limits the amount of error that can be introduced due to a difference between the target + // and actual speeds. + MediaPositionParameters nextMediaPositionParameters = + mediaPositionParametersCheckpoints.getFirst(); + long playoutDurationUntilNextCheckpointUs = + nextMediaPositionParameters.audioTrackPositionUs - positionUs; + long mediaDurationUntilNextCheckpointUs = + Util.getMediaDurationForPlayoutDuration( + playoutDurationUntilNextCheckpointUs, + mediaPositionParameters.playbackParameters.speed); + return nextMediaPositionParameters.mediaTimeUs - mediaDurationUntilNextCheckpointUs; } - - // We are playing data at a previous playback speed, so fall back to multiplying by the speed. - return playbackParametersOffsetUs - + Util.getMediaDurationForPlayoutDuration( - positionUs - playbackParametersPositionUs, playbackParameters.speed); } private long applySkipping(long positionUs) { @@ -1086,85 +1682,91 @@ private long applySkipping(long positionUs) { + configuration.framesToDurationUs(audioProcessorChain.getSkippedOutputFrameCount()); } - private boolean isInitialized() { + private boolean isAudioTrackInitialized() { return audioTrack != null; } private long getSubmittedFrames() { - return configuration.isInputPcm + return configuration.outputMode == OUTPUT_MODE_PCM ? (submittedPcmBytes / configuration.inputPcmFrameSize) : submittedEncodedFrames; } private long getWrittenFrames() { - return configuration.isInputPcm + return configuration.outputMode == OUTPUT_MODE_PCM ? (writtenPcmBytes / configuration.outputPcmFrameSize) : writtenEncodedFrames; } - private static AudioTrack initializeKeepSessionIdAudioTrack(int audioSessionId) { - int sampleRate = 4000; // Equal to private AudioTrack.MIN_SAMPLE_RATE. - int channelConfig = AudioFormat.CHANNEL_OUT_MONO; - @C.PcmEncoding int encoding = C.ENCODING_PCM_16BIT; - int bufferSize = 2; // Use a two byte buffer, as it is not actually used for playback. - return new AudioTrack(C.STREAM_TYPE_DEFAULT, sampleRate, channelConfig, encoding, bufferSize, - MODE_STATIC, audioSessionId); - } - - private static int getChannelConfig(int channelCount, boolean isInputPcm) { - if (Util.SDK_INT <= 28 && !isInputPcm) { - // In passthrough mode the channel count used to configure the audio track doesn't affect how - // the stream is handled, except that some devices do overly-strict channel configuration - // checks. Therefore we override the channel count so that a known-working channel - // configuration is chosen in all cases. See [Internal: b/29116190]. - if (channelCount == 7) { - channelCount = 8; - } else if (channelCount == 3 || channelCount == 4 || channelCount == 5) { - channelCount = 6; - } + private boolean useOffloadedPlayback(Format format, AudioAttributes audioAttributes) { + if (Util.SDK_INT < 29 || offloadMode == OFFLOAD_MODE_DISABLED) { + return false; } - - // Workaround for Nexus Player not reporting support for mono passthrough. - // (See [Internal: b/34268671].) - if (Util.SDK_INT <= 26 && "fugu".equals(Util.DEVICE) && !isInputPcm && channelCount == 1) { - channelCount = 2; + @C.Encoding + int encoding = MimeTypes.getEncoding(checkNotNull(format.sampleMimeType), format.codecs); + if (encoding == C.ENCODING_INVALID) { + return false; + } + int channelConfig = Util.getAudioTrackChannelConfig(format.channelCount); + if (channelConfig == AudioFormat.CHANNEL_INVALID) { + return false; } + AudioFormat audioFormat = getAudioFormat(format.sampleRate, channelConfig, encoding); - return Util.getAudioTrackChannelConfig(channelCount); + switch (getOffloadedPlaybackSupport( + audioFormat, audioAttributes.getAudioAttributesV21().audioAttributes)) { + case AudioManager.PLAYBACK_OFFLOAD_NOT_SUPPORTED: + return false; + case AudioManager.PLAYBACK_OFFLOAD_SUPPORTED: + boolean isGapless = format.encoderDelay != 0 || format.encoderPadding != 0; + boolean gaplessSupportRequired = offloadMode == OFFLOAD_MODE_ENABLED_GAPLESS_REQUIRED; + return !isGapless || !gaplessSupportRequired; + case AudioManager.PLAYBACK_OFFLOAD_GAPLESS_SUPPORTED: + return true; + default: + throw new IllegalStateException(); + } } - private static int getMaximumEncodedRateBytesPerSecond(@C.Encoding int encoding) { - switch (encoding) { - case C.ENCODING_AC3: - return 640 * 1000 / 8; - case C.ENCODING_E_AC3: - case C.ENCODING_E_AC3_JOC: - return 6144 * 1000 / 8; - case C.ENCODING_AC4: - return 2688 * 1000 / 8; - case C.ENCODING_DTS: - // DTS allows an 'open' bitrate, but we assume the maximum listed value: 1536 kbit/s. - return 1536 * 1000 / 8; - case C.ENCODING_DTS_HD: - return 18000 * 1000 / 8; - case C.ENCODING_DOLBY_TRUEHD: - return 24500 * 1000 / 8; - case C.ENCODING_INVALID: - case C.ENCODING_PCM_16BIT: - case C.ENCODING_PCM_24BIT: - case C.ENCODING_PCM_32BIT: - case C.ENCODING_PCM_8BIT: - case C.ENCODING_PCM_FLOAT: - case Format.NO_VALUE: - default: - throw new IllegalArgumentException(); + @RequiresApi(29) + @SuppressLint("InlinedApi") + private int getOffloadedPlaybackSupport( + AudioFormat audioFormat, android.media.AudioAttributes audioAttributes) { + if (Util.SDK_INT >= 31) { + return AudioManager.getPlaybackOffloadSupport(audioFormat, audioAttributes); } + if (!AudioManager.isOffloadedPlaybackSupported(audioFormat, audioAttributes)) { + return AudioManager.PLAYBACK_OFFLOAD_NOT_SUPPORTED; + } + // Manual testing has shown that Pixels on Android 11 support gapless offload. + if (Util.SDK_INT == 30 && Util.MODEL.startsWith("Pixel")) { + return AudioManager.PLAYBACK_OFFLOAD_GAPLESS_SUPPORTED; + } + return AudioManager.PLAYBACK_OFFLOAD_SUPPORTED; + } + + private static boolean isOffloadedPlayback(AudioTrack audioTrack) { + return Util.SDK_INT >= 29 && audioTrack.isOffloadedPlayback(); } private static int getFramesPerEncodedSample(@C.Encoding int encoding, ByteBuffer buffer) { switch (encoding) { case C.ENCODING_MP3: - return MpegAudioHeader.getFrameSampleCount(buffer.get(buffer.position())); + int headerDataInBigEndian = Util.getBigEndianInt(buffer, buffer.position()); + int frameCount = MpegAudioUtil.parseMpegAudioFrameSampleCount(headerDataInBigEndian); + if (frameCount == C.LENGTH_UNSET) { + throw new IllegalArgumentException(); + } + return frameCount; + case C.ENCODING_AAC_LC: + return AacUtil.AAC_LC_AUDIO_SAMPLE_COUNT; + case C.ENCODING_AAC_HE_V1: + case C.ENCODING_AAC_HE_V2: + return AacUtil.AAC_HE_AUDIO_SAMPLE_COUNT; + case C.ENCODING_AAC_XHE: + return AacUtil.AAC_XHE_AUDIO_SAMPLE_COUNT; + case C.ENCODING_AAC_ELD: + return AacUtil.AAC_LD_AUDIO_SAMPLE_COUNT; case C.ENCODING_DTS: case C.ENCODING_DTS_HD: return DtsUtil.parseDtsAudioSampleCount(buffer); @@ -1180,22 +1782,34 @@ private static int getFramesPerEncodedSample(@C.Encoding int encoding, ByteBuffe ? 0 : (Ac3Util.parseTrueHdSyncframeAudioSampleCount(buffer, syncframeOffset) * Ac3Util.TRUEHD_RECHUNK_SAMPLE_COUNT); + case C.ENCODING_OPUS: + return OpusUtil.parsePacketAudioSampleCount(buffer); + case C.ENCODING_PCM_16BIT: + case C.ENCODING_PCM_16BIT_BIG_ENDIAN: + case C.ENCODING_PCM_24BIT: + case C.ENCODING_PCM_32BIT: + case C.ENCODING_PCM_8BIT: + case C.ENCODING_PCM_FLOAT: + case C.ENCODING_AAC_ER_BSAC: + case C.ENCODING_INVALID: + case Format.NO_VALUE: default: throw new IllegalStateException("Unexpected audio encoding: " + encoding); } } - @TargetApi(21) + @RequiresApi(21) private static int writeNonBlockingV21(AudioTrack audioTrack, ByteBuffer buffer, int size) { - return audioTrack.write(buffer, size, WRITE_NON_BLOCKING); + return audioTrack.write(buffer, size, AudioTrack.WRITE_NON_BLOCKING); } - @TargetApi(21) - private int writeNonBlockingWithAvSyncV21(AudioTrack audioTrack, ByteBuffer buffer, int size, - long presentationTimeUs) { + @RequiresApi(21) + private int writeNonBlockingWithAvSyncV21( + AudioTrack audioTrack, ByteBuffer buffer, int size, long presentationTimeUs) { if (Util.SDK_INT >= 26) { // The underlying platform AudioTrack writes AV sync headers directly. - return audioTrack.write(buffer, size, WRITE_NON_BLOCKING, presentationTimeUs * 1000); + return audioTrack.write( + buffer, size, AudioTrack.WRITE_NON_BLOCKING, presentationTimeUs * 1000); } if (avSyncHeader == null) { avSyncHeader = ByteBuffer.allocate(16); @@ -1210,7 +1824,8 @@ private int writeNonBlockingWithAvSyncV21(AudioTrack audioTrack, ByteBuffer buff } int avSyncHeaderBytesRemaining = avSyncHeader.remaining(); if (avSyncHeaderBytesRemaining > 0) { - int result = audioTrack.write(avSyncHeader, avSyncHeaderBytesRemaining, WRITE_NON_BLOCKING); + int result = + audioTrack.write(avSyncHeader, avSyncHeaderBytesRemaining, AudioTrack.WRITE_NON_BLOCKING); if (result < 0) { bytesUntilNextAvSync = 0; return result; @@ -1228,7 +1843,7 @@ private int writeNonBlockingWithAvSyncV21(AudioTrack audioTrack, ByteBuffer buff return result; } - @TargetApi(21) + @RequiresApi(21) private static void setVolumeInternalV21(AudioTrack audioTrack, float volume) { audioTrack.setVolume(volume); } @@ -1246,20 +1861,126 @@ private void playPendingData() { } } - /** Stores playback parameters with the position and media time at which they apply. */ - private static final class PlaybackParametersCheckpoint { + private static void releaseAudioTrackAsync( + AudioTrack audioTrack, ConditionVariable releasedConditionVariable) { + // AudioTrack.release can take some time, so we call it on a background thread. The background + // thread is shared statically to avoid creating many threads when multiple players are released + // at the same time. + releasedConditionVariable.close(); + synchronized (releaseExecutorLock) { + if (releaseExecutor == null) { + releaseExecutor = Util.newSingleThreadExecutor("ExoPlayer:AudioTrackReleaseThread"); + } + pendingReleaseCount++; + releaseExecutor.execute( + () -> { + try { + audioTrack.flush(); + audioTrack.release(); + } finally { + releasedConditionVariable.open(); + synchronized (releaseExecutorLock) { + pendingReleaseCount--; + if (pendingReleaseCount == 0) { + releaseExecutor.shutdown(); + releaseExecutor = null; + } + } + } + }); + } + } - private final PlaybackParameters playbackParameters; - private final long mediaTimeUs; - private final long positionUs; + @RequiresApi(29) + private final class StreamEventCallbackV29 { + private final Handler handler; + private final AudioTrack.StreamEventCallback callback; + + public StreamEventCallbackV29() { + handler = new Handler(Looper.myLooper()); + // Avoid StreamEventCallbackV29 inheriting directly from AudioTrack.StreamEventCallback as it + // would cause a NoClassDefFoundError warning on load of DefaultAudioSink for SDK < 29. + // See: https://github.com/google/ExoPlayer/issues/8058 + callback = + new AudioTrack.StreamEventCallback() { + @Override + public void onDataRequest(AudioTrack track, int size) { + if (!track.equals(audioTrack)) { + // Stale event. + return; + } + if (listener != null && playing) { + // Do not signal that the buffer is emptying if not playing as it is a transient + // state. + listener.onOffloadBufferEmptying(); + } + } + + @Override + public void onTearDown(AudioTrack track) { + if (!track.equals(audioTrack)) { + // Stale event. + return; + } + if (listener != null && playing) { + // The audio track was destroyed while in use. Thus a new AudioTrack needs to be + // created and its buffer filled, which will be done on the next handleBuffer call. + // Request this call explicitly in case ExoPlayer is sleeping waiting for a data + // request. + listener.onOffloadBufferEmptying(); + } + } + }; + } + + public void register(AudioTrack audioTrack) { + audioTrack.registerStreamEventCallback(handler::post, callback); + } + + public void unregister(AudioTrack audioTrack) { + audioTrack.unregisterStreamEventCallback(callback); + handler.removeCallbacksAndMessages(/* token= */ null); + } + } - private PlaybackParametersCheckpoint(PlaybackParameters playbackParameters, long mediaTimeUs, - long positionUs) { + /** Stores parameters used to calculate the current media position. */ + private static final class MediaPositionParameters { + + /** The playback parameters. */ + public final PlaybackParameters playbackParameters; + /** Whether to skip silences. */ + public final boolean skipSilence; + /** The media time from which the playback parameters apply, in microseconds. */ + public final long mediaTimeUs; + /** The audio track position from which the playback parameters apply, in microseconds. */ + public final long audioTrackPositionUs; + + private MediaPositionParameters( + PlaybackParameters playbackParameters, + boolean skipSilence, + long mediaTimeUs, + long audioTrackPositionUs) { this.playbackParameters = playbackParameters; + this.skipSilence = skipSilence; this.mediaTimeUs = mediaTimeUs; - this.positionUs = positionUs; + this.audioTrackPositionUs = audioTrackPositionUs; } + } + @RequiresApi(21) + private static AudioFormat getAudioFormat(int sampleRate, int channelConfig, int encoding) { + return new AudioFormat.Builder() + .setSampleRate(sampleRate) + .setChannelMask(channelConfig) + .setEncoding(encoding) + .build(); + } + + private static int getAudioTrackMinBufferSize( + int sampleRateInHz, int channelConfig, int encoding) { + int minBufferSize = AudioTrack.getMinBufferSize(sampleRateInHz, channelConfig, encoding); + Assertions.checkState(minBufferSize != AudioTrack.ERROR_BAD_VALUE); + return minBufferSize; } private final class PositionTrackerListener implements AudioTrackPositionTracker.Listener { @@ -1319,6 +2040,13 @@ public void onInvalidLatency(long latencyUs) { Log.w(TAG, "Ignoring impossibly large audio latency: " + latencyUs); } + @Override + public void onPositionAdvancing(long playoutStartSystemTimeMs) { + if (listener != null) { + listener.onPositionAdvancing(playoutStartSystemTimeMs); + } + } + @Override public void onUnderrun(int bufferSize, long bufferSizeMs) { if (listener != null) { @@ -1331,154 +2059,255 @@ public void onUnderrun(int bufferSize, long bufferSizeMs) { /** Stores configuration relating to the audio format. */ private static final class Configuration { - public final boolean isInputPcm; + public final Format inputFormat; public final int inputPcmFrameSize; - public final int inputSampleRate; + public final @OutputMode int outputMode; public final int outputPcmFrameSize; public final int outputSampleRate; public final int outputChannelConfig; - @C.Encoding public final int outputEncoding; + public final @C.Encoding int outputEncoding; public final int bufferSize; - public final boolean processingEnabled; - public final boolean canApplyPlaybackParameters; public final AudioProcessor[] availableAudioProcessors; public Configuration( - boolean isInputPcm, + Format inputFormat, int inputPcmFrameSize, - int inputSampleRate, + @OutputMode int outputMode, int outputPcmFrameSize, int outputSampleRate, int outputChannelConfig, int outputEncoding, - int specifiedBufferSize, - boolean processingEnabled, - boolean canApplyPlaybackParameters, + int bufferSize, AudioProcessor[] availableAudioProcessors) { - this.isInputPcm = isInputPcm; + this.inputFormat = inputFormat; this.inputPcmFrameSize = inputPcmFrameSize; - this.inputSampleRate = inputSampleRate; + this.outputMode = outputMode; this.outputPcmFrameSize = outputPcmFrameSize; this.outputSampleRate = outputSampleRate; this.outputChannelConfig = outputChannelConfig; this.outputEncoding = outputEncoding; - this.bufferSize = specifiedBufferSize != 0 ? specifiedBufferSize : getDefaultBufferSize(); - this.processingEnabled = processingEnabled; - this.canApplyPlaybackParameters = canApplyPlaybackParameters; + this.bufferSize = bufferSize; this.availableAudioProcessors = availableAudioProcessors; } + public Configuration copyWithBufferSize(int bufferSize) { + return new Configuration( + inputFormat, + inputPcmFrameSize, + outputMode, + outputPcmFrameSize, + outputSampleRate, + outputChannelConfig, + outputEncoding, + bufferSize, + availableAudioProcessors); + } + + /** Returns if the configurations are sufficiently compatible to reuse the audio track. */ public boolean canReuseAudioTrack(Configuration audioTrackConfiguration) { - return audioTrackConfiguration.outputEncoding == outputEncoding + return audioTrackConfiguration.outputMode == outputMode + && audioTrackConfiguration.outputEncoding == outputEncoding && audioTrackConfiguration.outputSampleRate == outputSampleRate - && audioTrackConfiguration.outputChannelConfig == outputChannelConfig; + && audioTrackConfiguration.outputChannelConfig == outputChannelConfig + && audioTrackConfiguration.outputPcmFrameSize == outputPcmFrameSize; } public long inputFramesToDurationUs(long frameCount) { - return (frameCount * C.MICROS_PER_SECOND) / inputSampleRate; + return (frameCount * C.MICROS_PER_SECOND) / inputFormat.sampleRate; } public long framesToDurationUs(long frameCount) { return (frameCount * C.MICROS_PER_SECOND) / outputSampleRate; } - public long durationUsToFrames(long durationUs) { - return (durationUs * outputSampleRate) / C.MICROS_PER_SECOND; - } - public AudioTrack buildAudioTrack( boolean tunneling, AudioAttributes audioAttributes, int audioSessionId) throws InitializationException { AudioTrack audioTrack; - if (Util.SDK_INT >= 21) { - audioTrack = createAudioTrackV21(tunneling, audioAttributes, audioSessionId); - } else { - int streamType = Util.getStreamTypeForAudioUsage(audioAttributes.usage); - if (audioSessionId == C.AUDIO_SESSION_ID_UNSET) { - audioTrack = - new AudioTrack( - streamType, - outputSampleRate, - outputChannelConfig, - outputEncoding, - bufferSize, - MODE_STREAM); - } else { - // Re-attach to the same audio session. - audioTrack = - new AudioTrack( - streamType, - outputSampleRate, - outputChannelConfig, - outputEncoding, - bufferSize, - MODE_STREAM, - audioSessionId); - } + try { + audioTrack = createAudioTrack(tunneling, audioAttributes, audioSessionId); + } catch (UnsupportedOperationException | IllegalArgumentException e) { + throw new InitializationException( + AudioTrack.STATE_UNINITIALIZED, + outputSampleRate, + outputChannelConfig, + bufferSize, + inputFormat, + /* isRecoverable= */ outputModeIsOffload(), + e); } int state = audioTrack.getState(); - if (state != STATE_INITIALIZED) { + if (state != AudioTrack.STATE_INITIALIZED) { try { audioTrack.release(); } catch (Exception e) { // The track has already failed to initialize, so it wouldn't be that surprising if // release were to fail too. Swallow the exception. } - throw new InitializationException(state, outputSampleRate, outputChannelConfig, bufferSize); + throw new InitializationException( + state, + outputSampleRate, + outputChannelConfig, + bufferSize, + inputFormat, + /* isRecoverable= */ outputModeIsOffload(), + /* audioTrackException= */ null); } return audioTrack; } - @TargetApi(21) - private AudioTrack createAudioTrackV21( + private AudioTrack createAudioTrack( boolean tunneling, AudioAttributes audioAttributes, int audioSessionId) { - android.media.AudioAttributes attributes; - if (tunneling) { - attributes = - new android.media.AudioAttributes.Builder() - .setContentType(android.media.AudioAttributes.CONTENT_TYPE_MOVIE) - .setFlags(android.media.AudioAttributes.FLAG_HW_AV_SYNC) - .setUsage(android.media.AudioAttributes.USAGE_MEDIA) - .build(); + if (Util.SDK_INT >= 29) { + return createAudioTrackV29(tunneling, audioAttributes, audioSessionId); + } else if (Util.SDK_INT >= 21) { + return createAudioTrackV21(tunneling, audioAttributes, audioSessionId); } else { - attributes = audioAttributes.getAudioAttributesV21(); + return createAudioTrackV9(audioAttributes, audioSessionId); } - AudioFormat format = - new AudioFormat.Builder() - .setChannelMask(outputChannelConfig) - .setEncoding(outputEncoding) - .setSampleRate(outputSampleRate) - .build(); + } + + @RequiresApi(29) + private AudioTrack createAudioTrackV29( + boolean tunneling, AudioAttributes audioAttributes, int audioSessionId) { + AudioFormat audioFormat = + getAudioFormat(outputSampleRate, outputChannelConfig, outputEncoding); + android.media.AudioAttributes audioTrackAttributes = + getAudioTrackAttributesV21(audioAttributes, tunneling); + return new AudioTrack.Builder() + .setAudioAttributes(audioTrackAttributes) + .setAudioFormat(audioFormat) + .setTransferMode(AudioTrack.MODE_STREAM) + .setBufferSizeInBytes(bufferSize) + .setSessionId(audioSessionId) + .setOffloadedPlayback(outputMode == OUTPUT_MODE_OFFLOAD) + .build(); + } + + @RequiresApi(21) + private AudioTrack createAudioTrackV21( + boolean tunneling, AudioAttributes audioAttributes, int audioSessionId) { return new AudioTrack( - attributes, - format, + getAudioTrackAttributesV21(audioAttributes, tunneling), + getAudioFormat(outputSampleRate, outputChannelConfig, outputEncoding), bufferSize, - MODE_STREAM, - audioSessionId != C.AUDIO_SESSION_ID_UNSET - ? audioSessionId - : AudioManager.AUDIO_SESSION_ID_GENERATE); - } - - private int getDefaultBufferSize() { - if (isInputPcm) { - int minBufferSize = - AudioTrack.getMinBufferSize(outputSampleRate, outputChannelConfig, outputEncoding); - Assertions.checkState(minBufferSize != ERROR_BAD_VALUE); - int multipliedBufferSize = minBufferSize * BUFFER_MULTIPLICATION_FACTOR; - int minAppBufferSize = - (int) durationUsToFrames(MIN_BUFFER_DURATION_US) * outputPcmFrameSize; - int maxAppBufferSize = - (int) - Math.max( - minBufferSize, durationUsToFrames(MAX_BUFFER_DURATION_US) * outputPcmFrameSize); - return Util.constrainValue(multipliedBufferSize, minAppBufferSize, maxAppBufferSize); + AudioTrack.MODE_STREAM, + audioSessionId); + } + + @SuppressWarnings("deprecation") // Using deprecated AudioTrack constructor. + private AudioTrack createAudioTrackV9(AudioAttributes audioAttributes, int audioSessionId) { + int streamType = Util.getStreamTypeForAudioUsage(audioAttributes.usage); + if (audioSessionId == C.AUDIO_SESSION_ID_UNSET) { + return new AudioTrack( + streamType, + outputSampleRate, + outputChannelConfig, + outputEncoding, + bufferSize, + AudioTrack.MODE_STREAM); } else { - int rate = getMaximumEncodedRateBytesPerSecond(outputEncoding); - if (outputEncoding == C.ENCODING_AC3) { - rate *= AC3_BUFFER_MULTIPLICATION_FACTOR; + // Re-attach to the same audio session. + return new AudioTrack( + streamType, + outputSampleRate, + outputChannelConfig, + outputEncoding, + bufferSize, + AudioTrack.MODE_STREAM, + audioSessionId); + } + } + + @RequiresApi(21) + private static android.media.AudioAttributes getAudioTrackAttributesV21( + AudioAttributes audioAttributes, boolean tunneling) { + if (tunneling) { + return getAudioTrackTunnelingAttributesV21(); + } else { + return audioAttributes.getAudioAttributesV21().audioAttributes; + } + } + + @RequiresApi(21) + private static android.media.AudioAttributes getAudioTrackTunnelingAttributesV21() { + return new android.media.AudioAttributes.Builder() + .setContentType(android.media.AudioAttributes.CONTENT_TYPE_MOVIE) + .setFlags(android.media.AudioAttributes.FLAG_HW_AV_SYNC) + .setUsage(android.media.AudioAttributes.USAGE_MEDIA) + .build(); + } + + public boolean outputModeIsOffload() { + return outputMode == OUTPUT_MODE_OFFLOAD; + } + } + + private static final class PendingExceptionHolder { + + private final long throwDelayMs; + + @Nullable private T pendingException; + private long throwDeadlineMs; + + public PendingExceptionHolder(long throwDelayMs) { + this.throwDelayMs = throwDelayMs; + } + + public void throwExceptionIfDeadlineIsReached(T exception) throws T { + long nowMs = SystemClock.elapsedRealtime(); + if (pendingException == null) { + pendingException = exception; + throwDeadlineMs = nowMs + throwDelayMs; + } + if (nowMs >= throwDeadlineMs) { + if (pendingException != exception) { + // All retry exception are probably the same, thus only save the last one to save memory. + pendingException.addSuppressed(exception); } - return (int) (PASSTHROUGH_BUFFER_DURATION_US * rate / C.MICROS_PER_SECOND); + T pendingException = this.pendingException; + clear(); + throw pendingException; + } + } + + public void clear() { + pendingException = null; + } + } + + @RequiresApi(23) + private static final class AudioDeviceInfoApi23 { + + public final AudioDeviceInfo audioDeviceInfo; + + public AudioDeviceInfoApi23(AudioDeviceInfo audioDeviceInfo) { + this.audioDeviceInfo = audioDeviceInfo; + } + } + + @RequiresApi(23) + private static final class Api23 { + private Api23() {} + + @DoNotInline + public static void setPreferredDeviceOnAudioTrack( + AudioTrack audioTrack, @Nullable AudioDeviceInfoApi23 audioDeviceInfo) { + audioTrack.setPreferredDevice( + audioDeviceInfo == null ? null : audioDeviceInfo.audioDeviceInfo); + } + } + + @RequiresApi(31) + private static final class Api31 { + private Api31() {} + + @DoNotInline + public static void setLogSessionIdOnAudioTrack(AudioTrack audioTrack, PlayerId playerId) { + LogSessionId logSessionId = playerId.getLogSessionId(); + if (!logSessionId.equals(LogSessionId.LOG_SESSION_ID_NONE)) { + audioTrack.setLogSessionId(logSessionId); } } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/DefaultAudioTrackBufferSizeProvider.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/DefaultAudioTrackBufferSizeProvider.java new file mode 100644 index 0000000000..2e774551fa --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/DefaultAudioTrackBufferSizeProvider.java @@ -0,0 +1,277 @@ +/* + * Copyright (C) 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.audio; + +import static com.google.android.exoplayer2.audio.DefaultAudioSink.OUTPUT_MODE_OFFLOAD; +import static com.google.android.exoplayer2.audio.DefaultAudioSink.OUTPUT_MODE_PASSTHROUGH; +import static com.google.android.exoplayer2.audio.DefaultAudioSink.OUTPUT_MODE_PCM; +import static com.google.android.exoplayer2.util.Util.constrainValue; +import static com.google.common.math.IntMath.divide; +import static com.google.common.primitives.Ints.checkedCast; +import static java.lang.Math.max; + +import android.media.AudioTrack; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.audio.DefaultAudioSink.OutputMode; +import com.google.errorprone.annotations.CanIgnoreReturnValue; +import java.math.RoundingMode; + +/** Provide the buffer size to use when creating an {@link AudioTrack}. */ +public class DefaultAudioTrackBufferSizeProvider + implements DefaultAudioSink.AudioTrackBufferSizeProvider { + + /** Default minimum length for the {@link AudioTrack} buffer, in microseconds. */ + private static final int MIN_PCM_BUFFER_DURATION_US = 250_000; + /** Default maximum length for the {@link AudioTrack} buffer, in microseconds. */ + private static final int MAX_PCM_BUFFER_DURATION_US = 750_000; + /** Default multiplication factor to apply to the minimum buffer size requested. */ + private static final int PCM_BUFFER_MULTIPLICATION_FACTOR = 4; + /** Default length for passthrough {@link AudioTrack} buffers, in microseconds. */ + private static final int PASSTHROUGH_BUFFER_DURATION_US = 250_000; + /** Default length for offload {@link AudioTrack} buffers, in microseconds. */ + private static final int OFFLOAD_BUFFER_DURATION_US = 50_000_000; + /** + * Default multiplication factor to apply to AC3 passthrough buffer to avoid underruns on some + * devices (e.g., Broadcom 7271). + */ + private static final int AC3_BUFFER_MULTIPLICATION_FACTOR = 2; + + /** A builder to create {@link DefaultAudioTrackBufferSizeProvider} instances. */ + public static class Builder { + + private int minPcmBufferDurationUs; + private int maxPcmBufferDurationUs; + private int pcmBufferMultiplicationFactor; + private int passthroughBufferDurationUs; + private int offloadBufferDurationUs; + private int ac3BufferMultiplicationFactor; + + /** Creates a new builder. */ + public Builder() { + minPcmBufferDurationUs = MIN_PCM_BUFFER_DURATION_US; + maxPcmBufferDurationUs = MAX_PCM_BUFFER_DURATION_US; + pcmBufferMultiplicationFactor = PCM_BUFFER_MULTIPLICATION_FACTOR; + passthroughBufferDurationUs = PASSTHROUGH_BUFFER_DURATION_US; + offloadBufferDurationUs = OFFLOAD_BUFFER_DURATION_US; + ac3BufferMultiplicationFactor = AC3_BUFFER_MULTIPLICATION_FACTOR; + } + + /** + * Sets the minimum length for PCM {@link AudioTrack} buffers, in microseconds. Default is + * {@value #MIN_PCM_BUFFER_DURATION_US}. + */ + @CanIgnoreReturnValue + public Builder setMinPcmBufferDurationUs(int minPcmBufferDurationUs) { + this.minPcmBufferDurationUs = minPcmBufferDurationUs; + return this; + } + + /** + * Sets the maximum length for PCM {@link AudioTrack} buffers, in microseconds. Default is + * {@value #MAX_PCM_BUFFER_DURATION_US}. + */ + @CanIgnoreReturnValue + public Builder setMaxPcmBufferDurationUs(int maxPcmBufferDurationUs) { + this.maxPcmBufferDurationUs = maxPcmBufferDurationUs; + return this; + } + + /** + * Sets the multiplication factor to apply to the minimum buffer size requested. Default is + * {@value #PCM_BUFFER_MULTIPLICATION_FACTOR}. + */ + @CanIgnoreReturnValue + public Builder setPcmBufferMultiplicationFactor(int pcmBufferMultiplicationFactor) { + this.pcmBufferMultiplicationFactor = pcmBufferMultiplicationFactor; + return this; + } + + /** + * Sets the length for passthrough {@link AudioTrack} buffers, in microseconds. Default is + * {@value #PASSTHROUGH_BUFFER_DURATION_US}. + */ + @CanIgnoreReturnValue + public Builder setPassthroughBufferDurationUs(int passthroughBufferDurationUs) { + this.passthroughBufferDurationUs = passthroughBufferDurationUs; + return this; + } + + /** + * The length for offload {@link AudioTrack} buffers, in microseconds. Default is {@value + * #OFFLOAD_BUFFER_DURATION_US}. + */ + @CanIgnoreReturnValue + public Builder setOffloadBufferDurationUs(int offloadBufferDurationUs) { + this.offloadBufferDurationUs = offloadBufferDurationUs; + return this; + } + + /** + * Sets the multiplication factor to apply to the passthrough buffer for AC3 to avoid underruns + * on some devices (e.g., Broadcom 7271). Default is {@value #AC3_BUFFER_MULTIPLICATION_FACTOR}. + */ + @CanIgnoreReturnValue + public Builder setAc3BufferMultiplicationFactor(int ac3BufferMultiplicationFactor) { + this.ac3BufferMultiplicationFactor = ac3BufferMultiplicationFactor; + return this; + } + + /** Build the {@link DefaultAudioTrackBufferSizeProvider}. */ + public DefaultAudioTrackBufferSizeProvider build() { + return new DefaultAudioTrackBufferSizeProvider(this); + } + } + + /** The minimum length for PCM {@link AudioTrack} buffers, in microseconds. */ + protected final int minPcmBufferDurationUs; + /** The maximum length for PCM {@link AudioTrack} buffers, in microseconds. */ + protected final int maxPcmBufferDurationUs; + /** The multiplication factor to apply to the minimum buffer size requested. */ + protected final int pcmBufferMultiplicationFactor; + /** The length for passthrough {@link AudioTrack} buffers, in microseconds. */ + protected final int passthroughBufferDurationUs; + /** The length for offload {@link AudioTrack} buffers, in microseconds. */ + protected final int offloadBufferDurationUs; + /** + * The multiplication factor to apply to AC3 passthrough buffer to avoid underruns on some devices + * (e.g., Broadcom 7271). + */ + public final int ac3BufferMultiplicationFactor; + + protected DefaultAudioTrackBufferSizeProvider(Builder builder) { + minPcmBufferDurationUs = builder.minPcmBufferDurationUs; + maxPcmBufferDurationUs = builder.maxPcmBufferDurationUs; + pcmBufferMultiplicationFactor = builder.pcmBufferMultiplicationFactor; + passthroughBufferDurationUs = builder.passthroughBufferDurationUs; + offloadBufferDurationUs = builder.offloadBufferDurationUs; + ac3BufferMultiplicationFactor = builder.ac3BufferMultiplicationFactor; + } + + @Override + public int getBufferSizeInBytes( + int minBufferSizeInBytes, + @C.Encoding int encoding, + @OutputMode int outputMode, + int pcmFrameSize, + int sampleRate, + int bitrate, + double maxAudioTrackPlaybackSpeed) { + int bufferSize = + get1xBufferSizeInBytes( + minBufferSizeInBytes, encoding, outputMode, pcmFrameSize, sampleRate, bitrate); + // Maintain the buffer duration by scaling the size accordingly. + bufferSize = (int) (bufferSize * maxAudioTrackPlaybackSpeed); + // Buffer size must not be lower than the AudioTrack min buffer size for this format. + bufferSize = max(minBufferSizeInBytes, bufferSize); + // Increase if needed to make sure the buffers contains an integer number of frames. + return (bufferSize + pcmFrameSize - 1) / pcmFrameSize * pcmFrameSize; + } + + /** Returns the buffer size for playback at 1x speed. */ + protected int get1xBufferSizeInBytes( + int minBufferSizeInBytes, + int encoding, + int outputMode, + int pcmFrameSize, + int sampleRate, + int bitrate) { + switch (outputMode) { + case OUTPUT_MODE_PCM: + return getPcmBufferSizeInBytes(minBufferSizeInBytes, sampleRate, pcmFrameSize); + case OUTPUT_MODE_PASSTHROUGH: + return getPassthroughBufferSizeInBytes(encoding, bitrate); + case OUTPUT_MODE_OFFLOAD: + return getOffloadBufferSizeInBytes(encoding); + default: + throw new IllegalArgumentException(); + } + } + + /** Returns the buffer size for PCM playback. */ + protected int getPcmBufferSizeInBytes(int minBufferSizeInBytes, int samplingRate, int frameSize) { + int targetBufferSize = minBufferSizeInBytes * pcmBufferMultiplicationFactor; + int minAppBufferSize = durationUsToBytes(minPcmBufferDurationUs, samplingRate, frameSize); + int maxAppBufferSize = durationUsToBytes(maxPcmBufferDurationUs, samplingRate, frameSize); + return constrainValue(targetBufferSize, minAppBufferSize, maxAppBufferSize); + } + + /** Returns the buffer size for passthrough playback. */ + protected int getPassthroughBufferSizeInBytes(@C.Encoding int encoding, int bitrate) { + int bufferSizeUs = passthroughBufferDurationUs; + if (encoding == C.ENCODING_AC3) { + bufferSizeUs *= ac3BufferMultiplicationFactor; + } + int byteRate = + bitrate != Format.NO_VALUE + ? divide(bitrate, 8, RoundingMode.CEILING) + : getMaximumEncodedRateBytesPerSecond(encoding); + return checkedCast((long) bufferSizeUs * byteRate / C.MICROS_PER_SECOND); + } + + /** Returns the buffer size for offload playback. */ + protected int getOffloadBufferSizeInBytes(@C.Encoding int encoding) { + int maxByteRate = getMaximumEncodedRateBytesPerSecond(encoding); + return checkedCast((long) offloadBufferDurationUs * maxByteRate / C.MICROS_PER_SECOND); + } + + protected static int durationUsToBytes(int durationUs, int samplingRate, int frameSize) { + return checkedCast((long) durationUs * samplingRate * frameSize / C.MICROS_PER_SECOND); + } + + protected static int getMaximumEncodedRateBytesPerSecond(@C.Encoding int encoding) { + switch (encoding) { + case C.ENCODING_MP3: + return MpegAudioUtil.MAX_RATE_BYTES_PER_SECOND; + case C.ENCODING_AAC_LC: + return AacUtil.AAC_LC_MAX_RATE_BYTES_PER_SECOND; + case C.ENCODING_AAC_HE_V1: + return AacUtil.AAC_HE_V1_MAX_RATE_BYTES_PER_SECOND; + case C.ENCODING_AAC_HE_V2: + return AacUtil.AAC_HE_V2_MAX_RATE_BYTES_PER_SECOND; + case C.ENCODING_AAC_XHE: + return AacUtil.AAC_XHE_MAX_RATE_BYTES_PER_SECOND; + case C.ENCODING_AAC_ELD: + return AacUtil.AAC_ELD_MAX_RATE_BYTES_PER_SECOND; + case C.ENCODING_AC3: + return Ac3Util.AC3_MAX_RATE_BYTES_PER_SECOND; + case C.ENCODING_E_AC3: + case C.ENCODING_E_AC3_JOC: + return Ac3Util.E_AC3_MAX_RATE_BYTES_PER_SECOND; + case C.ENCODING_AC4: + return Ac4Util.MAX_RATE_BYTES_PER_SECOND; + case C.ENCODING_DTS: + return DtsUtil.DTS_MAX_RATE_BYTES_PER_SECOND; + case C.ENCODING_DTS_HD: + return DtsUtil.DTS_HD_MAX_RATE_BYTES_PER_SECOND; + case C.ENCODING_DOLBY_TRUEHD: + return Ac3Util.TRUEHD_MAX_RATE_BYTES_PER_SECOND; + case C.ENCODING_OPUS: + return OpusUtil.MAX_BYTES_PER_SECOND; + case C.ENCODING_PCM_16BIT: + case C.ENCODING_PCM_16BIT_BIG_ENDIAN: + case C.ENCODING_PCM_24BIT: + case C.ENCODING_PCM_32BIT: + case C.ENCODING_PCM_8BIT: + case C.ENCODING_PCM_FLOAT: + case C.ENCODING_AAC_ER_BSAC: + case C.ENCODING_INVALID: + case Format.NO_VALUE: + default: + throw new IllegalArgumentException(); + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/DtsUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/DtsUtil.java index 7af9d9f074..1a741d47d5 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/DtsUtil.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/DtsUtil.java @@ -23,11 +23,18 @@ import java.nio.ByteBuffer; import java.util.Arrays; -/** - * Utility methods for parsing DTS frames. - */ +/** Utility methods for parsing DTS frames. */ public final class DtsUtil { + /** + * Maximum rate for a DTS audio stream, in bytes per second. + * + *

      DTS allows an 'open' bitrate, but we assume the maximum listed value: 1536 kbit/s. + */ + public static final int DTS_MAX_RATE_BYTES_PER_SECOND = 1536 * 1000 / 8; + /** Maximum rate for a DTS-HD audio stream, in bytes per second. */ + public static final int DTS_HD_MAX_RATE_BYTES_PER_SECOND = 18000 * 1000 / 8; + private static final int SYNC_VALUE_BE = 0x7FFE8001; private static final int SYNC_VALUE_14B_BE = 0x1FFFE800; private static final int SYNC_VALUE_LE = 0xFE7F0180; @@ -37,24 +44,22 @@ public final class DtsUtil { private static final byte FIRST_BYTE_LE = (byte) (SYNC_VALUE_LE >>> 24); private static final byte FIRST_BYTE_14B_LE = (byte) (SYNC_VALUE_14B_LE >>> 24); - /** - * Maps AMODE to the number of channels. See ETSI TS 102 114 table 5.4. - */ - private static final int[] CHANNELS_BY_AMODE = new int[] {1, 2, 2, 2, 2, 3, 3, 4, 4, 5, 6, 6, 6, - 7, 8, 8}; + /** Maps AMODE to the number of channels. See ETSI TS 102 114 table 5.4. */ + private static final int[] CHANNELS_BY_AMODE = + new int[] {1, 2, 2, 2, 2, 3, 3, 4, 4, 5, 6, 6, 6, 7, 8, 8}; - /** - * Maps SFREQ to the sampling frequency in Hz. See ETSI TS 102 144 table 5.5. - */ - private static final int[] SAMPLE_RATE_BY_SFREQ = new int[] {-1, 8000, 16000, 32000, -1, -1, - 11025, 22050, 44100, -1, -1, 12000, 24000, 48000, -1, -1}; + /** Maps SFREQ to the sampling frequency in Hz. See ETSI TS 102 144 table 5.5. */ + private static final int[] SAMPLE_RATE_BY_SFREQ = + new int[] { + -1, 8000, 16000, 32000, -1, -1, 11025, 22050, 44100, -1, -1, 12000, 24000, 48000, -1, -1 + }; - /** - * Maps RATE to 2 * bitrate in kbit/s. See ETSI TS 102 144 table 5.7. - */ - private static final int[] TWICE_BITRATE_KBPS_BY_RATE = new int[] {64, 112, 128, 192, 224, 256, - 384, 448, 512, 640, 768, 896, 1024, 1152, 1280, 1536, 1920, 2048, 2304, 2560, 2688, 2816, - 2823, 2944, 3072, 3840, 4096, 6144, 7680}; + /** Maps RATE to 2 * bitrate in kbit/s. See ETSI TS 102 144 table 5.7. */ + private static final int[] TWICE_BITRATE_KBPS_BY_RATE = + new int[] { + 64, 112, 128, 192, 224, 256, 384, 448, 512, 640, 768, 896, 1024, 1152, 1280, 1536, 1920, + 2048, 2304, 2560, 2688, 2816, 2823, 2944, 3072, 3840, 4096, 6144, 7680 + }; /** * Returns whether a given integer matches a DTS sync word. Synchronization and storage modes are @@ -81,7 +86,10 @@ public static boolean isSyncWord(int word) { * @return The DTS format parsed from data in the header. */ public static Format parseDtsFormat( - byte[] frame, String trackId, @Nullable String language, @Nullable DrmInitData drmInitData) { + byte[] frame, + @Nullable String trackId, + @Nullable String language, + @Nullable DrmInitData drmInitData) { ParsableBitArray frameBits = getNormalizedFrameHeader(frame); frameBits.skipBits(32 + 1 + 5 + 1 + 7 + 14); // SYNC, FTYPE, SHORT, CPF, NBLKS, FSIZE int amode = frameBits.readBits(6); @@ -89,12 +97,21 @@ public static Format parseDtsFormat( int sfreq = frameBits.readBits(4); int sampleRate = SAMPLE_RATE_BY_SFREQ[sfreq]; int rate = frameBits.readBits(5); - int bitrate = rate >= TWICE_BITRATE_KBPS_BY_RATE.length ? Format.NO_VALUE - : TWICE_BITRATE_KBPS_BY_RATE[rate] * 1000 / 2; + int bitrate = + rate >= TWICE_BITRATE_KBPS_BY_RATE.length + ? Format.NO_VALUE + : TWICE_BITRATE_KBPS_BY_RATE[rate] * 1000 / 2; frameBits.skipBits(10); // MIX, DYNF, TIMEF, AUXF, HDCD, EXT_AUDIO_ID, EXT_AUDIO, ASPF channelCount += frameBits.readBits(2) > 0 ? 1 : 0; // LFF - return Format.createAudioSampleFormat(trackId, MimeTypes.AUDIO_DTS, null, bitrate, - Format.NO_VALUE, channelCount, sampleRate, null, drmInitData, 0, language); + return new Format.Builder() + .setId(trackId) + .setSampleMimeType(MimeTypes.AUDIO_DTS) + .setAverageBitrate(bitrate) + .setChannelCount(channelCount) + .setSampleRate(sampleRate) + .setDrmInitData(drmInitData) + .setLanguage(language) + .build(); } /** @@ -213,5 +230,4 @@ private static boolean isLittleEndianFrameHeader(byte[] frameHeader) { } private DtsUtil() {} - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/FloatResamplingAudioProcessor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/FloatResamplingAudioProcessor.java index ca6b4f3f13..ca2a54d572 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/FloatResamplingAudioProcessor.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/FloatResamplingAudioProcessor.java @@ -18,6 +18,7 @@ import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.util.Util; +import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.nio.ByteBuffer; /** @@ -36,6 +37,7 @@ private static final double PCM_32_BIT_INT_TO_PCM_32_BIT_FLOAT_FACTOR = 1.0 / 0x7FFFFFFF; @Override + @CanIgnoreReturnValue public AudioFormat onConfigure(AudioFormat inputAudioFormat) throws UnhandledAudioFormatException { @C.PcmEncoding int encoding = inputAudioFormat.encoding; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/ForwardingAudioSink.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/ForwardingAudioSink.java index 704bd11cc2..703d148be6 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/ForwardingAudioSink.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/ForwardingAudioSink.java @@ -15,8 +15,12 @@ */ package com.google.android.exoplayer2.audio; +import android.media.AudioDeviceInfo; import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; +import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.PlaybackParameters; +import com.google.android.exoplayer2.analytics.PlayerId; import java.nio.ByteBuffer; /** An overridable {@link AudioSink} implementation forwarding all methods to another sink. */ @@ -34,8 +38,18 @@ public void setListener(Listener listener) { } @Override - public boolean supportsOutput(int channelCount, int encoding) { - return sink.supportsOutput(channelCount, encoding); + public void setPlayerId(@Nullable PlayerId playerId) { + sink.setPlayerId(playerId); + } + + @Override + public boolean supportsFormat(Format format) { + return sink.supportsFormat(format); + } + + @Override + public @SinkFormatSupport int getFormatSupport(Format format) { + return sink.getFormatSupport(format); } @Override @@ -44,23 +58,9 @@ public long getCurrentPositionUs(boolean sourceEnded) { } @Override - public void configure( - int inputEncoding, - int inputChannelCount, - int inputSampleRate, - int specifiedBufferSize, - @Nullable int[] outputChannels, - int trimStartFrames, - int trimEndFrames) + public void configure(Format inputFormat, int specifiedBufferSize, @Nullable int[] outputChannels) throws ConfigurationException { - sink.configure( - inputEncoding, - inputChannelCount, - inputSampleRate, - specifiedBufferSize, - outputChannels, - trimStartFrames, - trimEndFrames); + sink.configure(inputFormat, specifiedBufferSize, outputChannels); } @Override @@ -74,9 +74,10 @@ public void handleDiscontinuity() { } @Override - public boolean handleBuffer(ByteBuffer buffer, long presentationTimeUs) + public boolean handleBuffer( + ByteBuffer buffer, long presentationTimeUs, int encodedAccessUnitCount) throws InitializationException, WriteException { - return sink.handleBuffer(buffer, presentationTimeUs); + return sink.handleBuffer(buffer, presentationTimeUs, encodedAccessUnitCount); } @Override @@ -104,11 +105,27 @@ public PlaybackParameters getPlaybackParameters() { return sink.getPlaybackParameters(); } + @Override + public void setSkipSilenceEnabled(boolean skipSilenceEnabled) { + sink.setSkipSilenceEnabled(skipSilenceEnabled); + } + + @Override + public boolean getSkipSilenceEnabled() { + return sink.getSkipSilenceEnabled(); + } + @Override public void setAudioAttributes(AudioAttributes audioAttributes) { sink.setAudioAttributes(audioAttributes); } + @Override + @Nullable + public AudioAttributes getAudioAttributes() { + return sink.getAudioAttributes(); + } + @Override public void setAudioSessionId(int audioSessionId) { sink.setAudioSessionId(audioSessionId); @@ -119,9 +136,20 @@ public void setAuxEffectInfo(AuxEffectInfo auxEffectInfo) { sink.setAuxEffectInfo(auxEffectInfo); } + @RequiresApi(23) + @Override + public void setPreferredDevice(@Nullable AudioDeviceInfo audioDeviceInfo) { + sink.setPreferredDevice(audioDeviceInfo); + } + @Override - public void enableTunnelingV21(int tunnelingAudioSessionId) { - sink.enableTunnelingV21(tunnelingAudioSessionId); + public void setOutputStreamOffsetUs(long outputStreamOffsetUs) { + sink.setOutputStreamOffsetUs(outputStreamOffsetUs); + } + + @Override + public void enableTunnelingV21() { + sink.enableTunnelingV21(); } @Override @@ -144,6 +172,11 @@ public void flush() { sink.flush(); } + @Override + public void experimentalFlushWithoutAudioTrackRelease() { + sink.experimentalFlushWithoutAudioTrackRelease(); + } + @Override public void reset() { sink.reset(); diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/MediaCodecAudioRenderer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/MediaCodecAudioRenderer.java index e8bd5056be..d76edd8840 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/MediaCodecAudioRenderer.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/MediaCodecAudioRenderer.java @@ -15,41 +15,52 @@ */ package com.google.android.exoplayer2.audio; +import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.DISCARD_REASON_MAX_INPUT_SIZE_EXCEEDED; +import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.REUSE_RESULT_NO; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.common.base.MoreObjects.firstNonNull; +import static java.lang.Math.max; + import android.annotation.SuppressLint; import android.content.Context; +import android.media.AudioDeviceInfo; +import android.media.AudioFormat; import android.media.MediaCodec; import android.media.MediaCrypto; import android.media.MediaFormat; -import android.media.audiofx.Virtualizer; import android.os.Handler; import androidx.annotation.CallSuper; +import androidx.annotation.DoNotInline; import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.ExoPlaybackException; import com.google.android.exoplayer2.ExoPlayer; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.FormatHolder; +import com.google.android.exoplayer2.PlaybackException; import com.google.android.exoplayer2.PlaybackParameters; import com.google.android.exoplayer2.PlayerMessage.Target; import com.google.android.exoplayer2.RendererCapabilities; import com.google.android.exoplayer2.audio.AudioRendererEventListener.EventDispatcher; +import com.google.android.exoplayer2.audio.AudioSink.InitializationException; +import com.google.android.exoplayer2.audio.AudioSink.WriteException; import com.google.android.exoplayer2.decoder.DecoderInputBuffer; -import com.google.android.exoplayer2.drm.DrmSessionManager; -import com.google.android.exoplayer2.drm.FrameworkMediaCrypto; +import com.google.android.exoplayer2.decoder.DecoderReuseEvaluation; +import com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.DecoderDiscardReasons; +import com.google.android.exoplayer2.mediacodec.MediaCodecAdapter; import com.google.android.exoplayer2.mediacodec.MediaCodecInfo; import com.google.android.exoplayer2.mediacodec.MediaCodecRenderer; import com.google.android.exoplayer2.mediacodec.MediaCodecSelector; import com.google.android.exoplayer2.mediacodec.MediaCodecUtil; import com.google.android.exoplayer2.mediacodec.MediaCodecUtil.DecoderQueryException; -import com.google.android.exoplayer2.mediacodec.MediaFormatUtil; -import com.google.android.exoplayer2.source.MediaSource; import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.MediaClock; +import com.google.android.exoplayer2.util.MediaFormatUtil; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.Util; +import com.google.common.collect.ImmutableList; import java.nio.ByteBuffer; -import java.util.ArrayList; -import java.util.Collections; import java.util.List; /** @@ -59,25 +70,23 @@ * on the playback thread: * *

        - *
      • Message with type {@link C#MSG_SET_VOLUME} to set the volume. The message payload should be + *
      • Message with type {@link #MSG_SET_VOLUME} to set the volume. The message payload should be * a {@link Float} with 0 being silence and 1 being unity gain. - *
      • Message with type {@link C#MSG_SET_AUDIO_ATTRIBUTES} to set the audio attributes. The - * message payload should be an {@link com.google.android.exoplayer2.audio.AudioAttributes} - * instance that will configure the underlying audio track. - *
      • Message with type {@link C#MSG_SET_AUX_EFFECT_INFO} to set the auxiliary effect. The - * message payload should be an {@link AuxEffectInfo} instance that will configure the + *
      • Message with type {@link #MSG_SET_AUDIO_ATTRIBUTES} to set the audio attributes. The + * message payload should be an {@link AudioAttributes} instance that will configure the + * underlying audio track. + *
      • Message with type {@link #MSG_SET_AUX_EFFECT_INFO} to set the auxiliary effect. The message + * payload should be an {@link AuxEffectInfo} instance that will configure the underlying + * audio track. + *
      • Message with type {@link #MSG_SET_SKIP_SILENCE_ENABLED} to enable or disable skipping + * silences. The message payload should be a {@link Boolean}. + *
      • Message with type {@link #MSG_SET_AUDIO_SESSION_ID} to set the audio session ID. The + * message payload should be a session ID {@link Integer} that will be attached to the * underlying audio track. *
      */ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements MediaClock { - /** - * Maximum number of tracked pending stream change times. Generally there is zero or one pending - * stream change. We track more to allow for pending changes that have fewer samples than the - * codec latency. - */ - private static final int MAX_PENDING_STREAM_CHANGE_COUNT = 10; - private static final String TAG = "MediaCodecAudioRenderer"; /** * Custom key used to indicate bits per sample by some decoders on Vivo devices. For example @@ -88,196 +97,98 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media private final Context context; private final EventDispatcher eventDispatcher; private final AudioSink audioSink; - private final long[] pendingStreamChangeTimesUs; private int codecMaxInputSize; - private boolean passthroughEnabled; private boolean codecNeedsDiscardChannelsWorkaround; - private boolean codecNeedsEosBufferTimestampWorkaround; - private android.media.MediaFormat passthroughMediaFormat; - @Nullable private Format inputFormat; + /** Codec used for DRM decryption only in passthrough and offload. */ + @Nullable private Format decryptOnlyCodecFormat; + private long currentPositionUs; private boolean allowFirstBufferPositionDiscontinuity; private boolean allowPositionDiscontinuity; - private long lastInputTimeUs; - private int pendingStreamChangeCount; + private boolean audioSinkNeedsReset; - /** - * @param context A context. - * @param mediaCodecSelector A decoder selector. - */ - @SuppressWarnings("deprecation") - public MediaCodecAudioRenderer(Context context, MediaCodecSelector mediaCodecSelector) { - this( - context, - mediaCodecSelector, - /* drmSessionManager= */ null, - /* playClearSamplesWithoutKeys= */ false); - } + private boolean experimentalKeepAudioTrackOnSeek; - /** - * @param context A context. - * @param mediaCodecSelector A decoder selector. - * @param drmSessionManager For use with encrypted content. May be null if support for encrypted - * content is not required. - * @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions. - * For example a media file may start with a short clear region so as to allow playback to - * begin in parallel with key acquisition. This parameter specifies whether the renderer is - * permitted to play clear regions of encrypted media files before {@code drmSessionManager} - * has obtained the keys necessary to decrypt encrypted regions of the media. - * @deprecated Use {@link #MediaCodecAudioRenderer(Context, MediaCodecSelector, boolean, Handler, - * AudioRendererEventListener, AudioSink)} instead, and pass DRM-related parameters to the - * {@link MediaSource} factories. - */ - @Deprecated - @SuppressWarnings("deprecation") - public MediaCodecAudioRenderer( - Context context, - MediaCodecSelector mediaCodecSelector, - @Nullable DrmSessionManager drmSessionManager, - boolean playClearSamplesWithoutKeys) { - this( - context, - mediaCodecSelector, - drmSessionManager, - playClearSamplesWithoutKeys, - /* eventHandler= */ null, - /* eventListener= */ null); - } + @Nullable private WakeupListener wakeupListener; /** * @param context A context. * @param mediaCodecSelector A decoder selector. - * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be - * null if delivery of events is not required. - * @param eventListener A listener of events. May be null if delivery of events is not required. */ - @SuppressWarnings("deprecation") - public MediaCodecAudioRenderer( - Context context, - MediaCodecSelector mediaCodecSelector, - @Nullable Handler eventHandler, - @Nullable AudioRendererEventListener eventListener) { - this( - context, - mediaCodecSelector, - /* drmSessionManager= */ null, - /* playClearSamplesWithoutKeys= */ false, - eventHandler, - eventListener); + public MediaCodecAudioRenderer(Context context, MediaCodecSelector mediaCodecSelector) { + this(context, mediaCodecSelector, /* eventHandler= */ null, /* eventListener= */ null); } /** * @param context A context. * @param mediaCodecSelector A decoder selector. - * @param drmSessionManager For use with encrypted content. May be null if support for encrypted - * content is not required. - * @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions. - * For example a media file may start with a short clear region so as to allow playback to - * begin in parallel with key acquisition. This parameter specifies whether the renderer is - * permitted to play clear regions of encrypted media files before {@code drmSessionManager} - * has obtained the keys necessary to decrypt encrypted regions of the media. * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be * null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required. - * @deprecated Use {@link #MediaCodecAudioRenderer(Context, MediaCodecSelector, boolean, Handler, - * AudioRendererEventListener, AudioSink)} instead, and pass DRM-related parameters to the - * {@link MediaSource} factories. */ - @Deprecated - @SuppressWarnings("deprecation") public MediaCodecAudioRenderer( Context context, MediaCodecSelector mediaCodecSelector, - @Nullable DrmSessionManager drmSessionManager, - boolean playClearSamplesWithoutKeys, @Nullable Handler eventHandler, @Nullable AudioRendererEventListener eventListener) { this( context, mediaCodecSelector, - drmSessionManager, - playClearSamplesWithoutKeys, eventHandler, eventListener, - (AudioCapabilities) null); + AudioCapabilities.DEFAULT_AUDIO_CAPABILITIES); } /** * @param context A context. * @param mediaCodecSelector A decoder selector. - * @param drmSessionManager For use with encrypted content. May be null if support for encrypted - * content is not required. - * @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions. - * For example a media file may start with a short clear region so as to allow playback to - * begin in parallel with key acquisition. This parameter specifies whether the renderer is - * permitted to play clear regions of encrypted media files before {@code drmSessionManager} - * has obtained the keys necessary to decrypt encrypted regions of the media. * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be * null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required. - * @param audioCapabilities The audio capabilities for playback on this device. May be null if the - * default capabilities (no encoded audio passthrough support) should be assumed. + * @param audioCapabilities The audio capabilities for playback on this device. Use {@link + * AudioCapabilities#DEFAULT_AUDIO_CAPABILITIES} if default capabilities (no encoded audio + * passthrough support) should be assumed. * @param audioProcessors Optional {@link AudioProcessor}s that will process PCM audio before * output. - * @deprecated Use {@link #MediaCodecAudioRenderer(Context, MediaCodecSelector, boolean, Handler, - * AudioRendererEventListener, AudioSink)} instead, and pass DRM-related parameters to the - * {@link MediaSource} factories. */ - @Deprecated - @SuppressWarnings("deprecation") public MediaCodecAudioRenderer( Context context, MediaCodecSelector mediaCodecSelector, - @Nullable DrmSessionManager drmSessionManager, - boolean playClearSamplesWithoutKeys, @Nullable Handler eventHandler, @Nullable AudioRendererEventListener eventListener, - @Nullable AudioCapabilities audioCapabilities, + AudioCapabilities audioCapabilities, AudioProcessor... audioProcessors) { this( context, mediaCodecSelector, - drmSessionManager, - playClearSamplesWithoutKeys, eventHandler, eventListener, - new DefaultAudioSink(audioCapabilities, audioProcessors)); + new DefaultAudioSink.Builder() + .setAudioCapabilities( // For backward compatibility, null == default. + firstNonNull(audioCapabilities, AudioCapabilities.DEFAULT_AUDIO_CAPABILITIES)) + .setAudioProcessors(audioProcessors) + .build()); } /** * @param context A context. * @param mediaCodecSelector A decoder selector. - * @param drmSessionManager For use with encrypted content. May be null if support for encrypted - * content is not required. - * @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions. - * For example a media file may start with a short clear region so as to allow playback to - * begin in parallel with key acquisition. This parameter specifies whether the renderer is - * permitted to play clear regions of encrypted media files before {@code drmSessionManager} - * has obtained the keys necessary to decrypt encrypted regions of the media. * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be * null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required. * @param audioSink The sink to which audio will be output. - * @deprecated Use {@link #MediaCodecAudioRenderer(Context, MediaCodecSelector, boolean, Handler, - * AudioRendererEventListener, AudioSink)} instead, and pass DRM-related parameters to the - * {@link MediaSource} factories. */ - @Deprecated - @SuppressWarnings("deprecation") public MediaCodecAudioRenderer( Context context, MediaCodecSelector mediaCodecSelector, - @Nullable DrmSessionManager drmSessionManager, - boolean playClearSamplesWithoutKeys, @Nullable Handler eventHandler, @Nullable AudioRendererEventListener eventListener, AudioSink audioSink) { this( context, + MediaCodecAdapter.Factory.DEFAULT, mediaCodecSelector, - drmSessionManager, - playClearSamplesWithoutKeys, /* enableDecoderFallback= */ false, eventHandler, eventListener, @@ -295,7 +206,6 @@ public MediaCodecAudioRenderer( * @param eventListener A listener of events. May be null if delivery of events is not required. * @param audioSink The sink to which audio will be output. */ - @SuppressWarnings("deprecation") public MediaCodecAudioRenderer( Context context, MediaCodecSelector mediaCodecSelector, @@ -305,9 +215,8 @@ public MediaCodecAudioRenderer( AudioSink audioSink) { this( context, + MediaCodecAdapter.Factory.DEFAULT, mediaCodecSelector, - /* drmSessionManager= */ null, - /* playClearSamplesWithoutKeys= */ false, enableDecoderFallback, eventHandler, eventListener, @@ -315,15 +224,12 @@ public MediaCodecAudioRenderer( } /** + * Creates a new instance. + * * @param context A context. + * @param codecAdapterFactory The {@link MediaCodecAdapter.Factory} used to create {@link + * MediaCodecAdapter} instances. * @param mediaCodecSelector A decoder selector. - * @param drmSessionManager For use with encrypted content. May be null if support for encrypted - * content is not required. - * @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions. - * For example a media file may start with a short clear region so as to allow playback to - * begin in parallel with key acquisition. This parameter specifies whether the renderer is - * permitted to play clear regions of encrypted media files before {@code drmSessionManager} - * has obtained the keys necessary to decrypt encrypted regions of the media. * @param enableDecoderFallback Whether to enable fallback to lower-priority decoders if decoder * initialization fails. This may result in using a decoder that is slower/less efficient than * the primary decoder. @@ -331,197 +237,217 @@ public MediaCodecAudioRenderer( * null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required. * @param audioSink The sink to which audio will be output. - * @deprecated Use {@link #MediaCodecAudioRenderer(Context, MediaCodecSelector, boolean, Handler, - * AudioRendererEventListener, AudioSink)} instead, and pass DRM-related parameters to the - * {@link MediaSource} factories. */ - @Deprecated public MediaCodecAudioRenderer( Context context, + MediaCodecAdapter.Factory codecAdapterFactory, MediaCodecSelector mediaCodecSelector, - @Nullable DrmSessionManager drmSessionManager, - boolean playClearSamplesWithoutKeys, boolean enableDecoderFallback, @Nullable Handler eventHandler, @Nullable AudioRendererEventListener eventListener, AudioSink audioSink) { super( C.TRACK_TYPE_AUDIO, + codecAdapterFactory, mediaCodecSelector, - drmSessionManager, - playClearSamplesWithoutKeys, enableDecoderFallback, /* assumedMinimumCodecOperatingRate= */ 44100); - this.context = context.getApplicationContext(); + context = context.getApplicationContext(); + this.context = context; this.audioSink = audioSink; - lastInputTimeUs = C.TIME_UNSET; - pendingStreamChangeTimesUs = new long[MAX_PENDING_STREAM_CHANGE_COUNT]; eventDispatcher = new EventDispatcher(eventHandler, eventListener); audioSink.setListener(new AudioSinkListener()); } @Override - @Capabilities - protected int supportsFormat( - MediaCodecSelector mediaCodecSelector, - @Nullable DrmSessionManager drmSessionManager, - Format format) + public String getName() { + return TAG; + } + + /** + * Sets whether to enable the experimental feature that keeps and flushes the {@link + * android.media.AudioTrack} when a seek occurs, as opposed to releasing and reinitialising. Off + * by default. + * + *

      This method is experimental, and will be renamed or removed in a future release. + * + * @param enableKeepAudioTrackOnSeek Whether to keep the {@link android.media.AudioTrack} on seek. + */ + public void experimentalSetEnableKeepAudioTrackOnSeek(boolean enableKeepAudioTrackOnSeek) { + this.experimentalKeepAudioTrackOnSeek = enableKeepAudioTrackOnSeek; + } + + @Override + protected @Capabilities int supportsFormat(MediaCodecSelector mediaCodecSelector, Format format) throws DecoderQueryException { - String mimeType = format.sampleMimeType; - if (!MimeTypes.isAudio(mimeType)) { - return RendererCapabilities.create(FORMAT_UNSUPPORTED_TYPE); + if (!MimeTypes.isAudio(format.sampleMimeType)) { + return RendererCapabilities.create(C.FORMAT_UNSUPPORTED_TYPE); } @TunnelingSupport int tunnelingSupport = Util.SDK_INT >= 21 ? TUNNELING_SUPPORTED : TUNNELING_NOT_SUPPORTED; - boolean supportsFormatDrm = - format.drmInitData == null - || FrameworkMediaCrypto.class.equals(format.exoMediaCryptoType) - || (format.exoMediaCryptoType == null - && supportsFormatDrm(drmSessionManager, format.drmInitData)); + boolean formatHasDrm = format.cryptoType != C.CRYPTO_TYPE_NONE; + boolean supportsFormatDrm = supportsFormatDrm(format); + // In direct mode, if the format has DRM then we need to use a decoder that only decrypts. + // Else we don't don't need a decoder at all. if (supportsFormatDrm - && allowPassthrough(format.channelCount, mimeType) - && mediaCodecSelector.getPassthroughDecoderInfo() != null) { - return RendererCapabilities.create(FORMAT_HANDLED, ADAPTIVE_NOT_SEAMLESS, tunnelingSupport); + && audioSink.supportsFormat(format) + && (!formatHasDrm || MediaCodecUtil.getDecryptOnlyDecoderInfo() != null)) { + return RendererCapabilities.create(C.FORMAT_HANDLED, ADAPTIVE_NOT_SEAMLESS, tunnelingSupport); } - if ((MimeTypes.AUDIO_RAW.equals(mimeType) - && !audioSink.supportsOutput(format.channelCount, format.pcmEncoding)) - || !audioSink.supportsOutput(format.channelCount, C.ENCODING_PCM_16BIT)) { - // Assume the decoder outputs 16-bit PCM, unless the input is raw. - return RendererCapabilities.create(FORMAT_UNSUPPORTED_SUBTYPE); + // If the input is PCM then it will be passed directly to the sink. Hence the sink must support + // the input format directly. + if (MimeTypes.AUDIO_RAW.equals(format.sampleMimeType) && !audioSink.supportsFormat(format)) { + return RendererCapabilities.create(C.FORMAT_UNSUPPORTED_SUBTYPE); + } + // For all other input formats, we expect the decoder to output 16-bit PCM. + if (!audioSink.supportsFormat( + Util.getPcmFormat(C.ENCODING_PCM_16BIT, format.channelCount, format.sampleRate))) { + return RendererCapabilities.create(C.FORMAT_UNSUPPORTED_SUBTYPE); } List decoderInfos = - getDecoderInfos(mediaCodecSelector, format, /* requiresSecureDecoder= */ false); + getDecoderInfos(mediaCodecSelector, format, /* requiresSecureDecoder= */ false, audioSink); if (decoderInfos.isEmpty()) { - return RendererCapabilities.create(FORMAT_UNSUPPORTED_SUBTYPE); + return RendererCapabilities.create(C.FORMAT_UNSUPPORTED_SUBTYPE); } if (!supportsFormatDrm) { - return RendererCapabilities.create(FORMAT_UNSUPPORTED_DRM); + return RendererCapabilities.create(C.FORMAT_UNSUPPORTED_DRM); } - // Check capabilities for the first decoder in the list, which takes priority. + // Check whether the first decoder supports the format. This is the preferred decoder for the + // format's MIME type, according to the MediaCodecSelector. MediaCodecInfo decoderInfo = decoderInfos.get(0); boolean isFormatSupported = decoderInfo.isFormatSupported(format); + boolean isPreferredDecoder = true; + if (!isFormatSupported) { + // Check whether any of the other decoders support the format. + for (int i = 1; i < decoderInfos.size(); i++) { + MediaCodecInfo otherDecoderInfo = decoderInfos.get(i); + if (otherDecoderInfo.isFormatSupported(format)) { + decoderInfo = otherDecoderInfo; + isFormatSupported = true; + isPreferredDecoder = false; + break; + } + } + } + @C.FormatSupport + int formatSupport = isFormatSupported ? C.FORMAT_HANDLED : C.FORMAT_EXCEEDS_CAPABILITIES; @AdaptiveSupport int adaptiveSupport = isFormatSupported && decoderInfo.isSeamlessAdaptationSupported(format) ? ADAPTIVE_SEAMLESS : ADAPTIVE_NOT_SEAMLESS; - @FormatSupport - int formatSupport = isFormatSupported ? FORMAT_HANDLED : FORMAT_EXCEEDS_CAPABILITIES; - return RendererCapabilities.create(formatSupport, adaptiveSupport, tunnelingSupport); + @HardwareAccelerationSupport + int hardwareAccelerationSupport = + decoderInfo.hardwareAccelerated + ? HARDWARE_ACCELERATION_SUPPORTED + : HARDWARE_ACCELERATION_NOT_SUPPORTED; + @DecoderSupport + int decoderSupport = isPreferredDecoder ? DECODER_SUPPORT_PRIMARY : DECODER_SUPPORT_FALLBACK; + return RendererCapabilities.create( + formatSupport, + adaptiveSupport, + tunnelingSupport, + hardwareAccelerationSupport, + decoderSupport); } @Override protected List getDecoderInfos( MediaCodecSelector mediaCodecSelector, Format format, boolean requiresSecureDecoder) throws DecoderQueryException { + return MediaCodecUtil.getDecoderInfosSortedByFormatSupport( + getDecoderInfos(mediaCodecSelector, format, requiresSecureDecoder, audioSink), format); + } + + /** + * Returns a list of decoders that can decode media in the specified format, in the priority order + * specified by the {@link MediaCodecSelector}. Note that since the {@link MediaCodecSelector} + * only has access to {@link Format#sampleMimeType}, the list is not ordered to account for + * whether each decoder supports the details of the format (e.g., taking into account the format's + * profile, level, channel count and so on). {@link + * MediaCodecUtil#getDecoderInfosSortedByFormatSupport} can be used to further sort the list into + * an order where decoders that fully support the format come first. + * + * @param mediaCodecSelector The decoder selector. + * @param format The {@link Format} for which a decoder is required. + * @param requiresSecureDecoder Whether a secure decoder is required. + * @param audioSink The {@link AudioSink} to which audio will be output. + * @return A list of {@link MediaCodecInfo}s corresponding to decoders. May be empty. + * @throws DecoderQueryException Thrown if there was an error querying decoders. + */ + private static List getDecoderInfos( + MediaCodecSelector mediaCodecSelector, + Format format, + boolean requiresSecureDecoder, + AudioSink audioSink) + throws DecoderQueryException { @Nullable String mimeType = format.sampleMimeType; if (mimeType == null) { - return Collections.emptyList(); + return ImmutableList.of(); } - if (allowPassthrough(format.channelCount, mimeType)) { - @Nullable - MediaCodecInfo passthroughDecoderInfo = mediaCodecSelector.getPassthroughDecoderInfo(); - if (passthroughDecoderInfo != null) { - return Collections.singletonList(passthroughDecoderInfo); + if (audioSink.supportsFormat(format)) { + // The format is supported directly, so a codec is only needed for decryption. + @Nullable MediaCodecInfo codecInfo = MediaCodecUtil.getDecryptOnlyDecoderInfo(); + if (codecInfo != null) { + return ImmutableList.of(codecInfo); } } List decoderInfos = mediaCodecSelector.getDecoderInfos( mimeType, requiresSecureDecoder, /* requiresTunnelingDecoder= */ false); - decoderInfos = MediaCodecUtil.getDecoderInfosSortedByFormatSupport(decoderInfos, format); - if (MimeTypes.AUDIO_E_AC3_JOC.equals(mimeType)) { - // E-AC3 decoders can decode JOC streams, but in 2-D rather than 3-D. - List decoderInfosWithEac3 = new ArrayList<>(decoderInfos); - decoderInfosWithEac3.addAll( - mediaCodecSelector.getDecoderInfos( - MimeTypes.AUDIO_E_AC3, requiresSecureDecoder, /* requiresTunnelingDecoder= */ false)); - decoderInfos = decoderInfosWithEac3; + @Nullable String alternativeMimeType = MediaCodecUtil.getAlternativeCodecMimeType(format); + if (alternativeMimeType == null) { + return ImmutableList.copyOf(decoderInfos); } - return Collections.unmodifiableList(decoderInfos); + List alternativeDecoderInfos = + mediaCodecSelector.getDecoderInfos( + alternativeMimeType, requiresSecureDecoder, /* requiresTunnelingDecoder= */ false); + return ImmutableList.builder() + .addAll(decoderInfos) + .addAll(alternativeDecoderInfos) + .build(); } - /** - * Returns whether encoded audio passthrough should be used for playing back the input format. - * This implementation returns true if the {@link AudioSink} indicates that encoded audio output - * is supported. - * - * @param channelCount The number of channels in the input media, or {@link Format#NO_VALUE} if - * not known. - * @param mimeType The type of input media. - * @return Whether passthrough playback is supported. - */ - protected boolean allowPassthrough(int channelCount, String mimeType) { - return getPassthroughEncoding(channelCount, mimeType) != C.ENCODING_INVALID; + @Override + protected boolean shouldUseBypass(Format format) { + return audioSink.supportsFormat(format); } @Override - protected void configureCodec( + protected MediaCodecAdapter.Configuration getMediaCodecConfiguration( MediaCodecInfo codecInfo, - MediaCodec codec, Format format, @Nullable MediaCrypto crypto, float codecOperatingRate) { codecMaxInputSize = getCodecMaxInputSize(codecInfo, format, getStreamFormats()); codecNeedsDiscardChannelsWorkaround = codecNeedsDiscardChannelsWorkaround(codecInfo.name); - codecNeedsEosBufferTimestampWorkaround = codecNeedsEosBufferTimestampWorkaround(codecInfo.name); - passthroughEnabled = codecInfo.passthrough; - String codecMimeType = passthroughEnabled ? MimeTypes.AUDIO_RAW : codecInfo.codecMimeType; MediaFormat mediaFormat = - getMediaFormat(format, codecMimeType, codecMaxInputSize, codecOperatingRate); - codec.configure(mediaFormat, /* surface= */ null, crypto, /* flags= */ 0); - if (passthroughEnabled) { - // Store the input MIME type if we're using the passthrough codec. - passthroughMediaFormat = mediaFormat; - passthroughMediaFormat.setString(MediaFormat.KEY_MIME, format.sampleMimeType); - } else { - passthroughMediaFormat = null; - } + getMediaFormat(format, codecInfo.codecMimeType, codecMaxInputSize, codecOperatingRate); + // Store the input MIME type if we're only using the codec for decryption. + boolean decryptOnlyCodecEnabled = + MimeTypes.AUDIO_RAW.equals(codecInfo.mimeType) + && !MimeTypes.AUDIO_RAW.equals(format.sampleMimeType); + decryptOnlyCodecFormat = decryptOnlyCodecEnabled ? format : null; + return MediaCodecAdapter.Configuration.createForAudioDecoding( + codecInfo, mediaFormat, format, crypto); } @Override - protected @KeepCodecResult int canKeepCodec( - MediaCodec codec, MediaCodecInfo codecInfo, Format oldFormat, Format newFormat) { - // TODO: We currently rely on recreating the codec when encoder delay or padding is non-zero. - // Re-creating the codec is necessary to guarantee that onOutputFormatChanged is called, which - // is where encoder delay and padding are propagated to the sink. We should find a better way to - // propagate these values, and then allow the codec to be re-used in cases where this would - // otherwise be possible. - if (getCodecMaxInputSize(codecInfo, newFormat) > codecMaxInputSize - || oldFormat.encoderDelay != 0 - || oldFormat.encoderPadding != 0 - || newFormat.encoderDelay != 0 - || newFormat.encoderPadding != 0) { - return KEEP_CODEC_RESULT_NO; - } else if (codecInfo.isSeamlessAdaptationSupported( - oldFormat, newFormat, /* isNewFormatComplete= */ true)) { - return KEEP_CODEC_RESULT_YES_WITHOUT_RECONFIGURATION; - } else if (canKeepCodecWithFlush(oldFormat, newFormat)) { - return KEEP_CODEC_RESULT_YES_WITH_FLUSH; - } else { - return KEEP_CODEC_RESULT_NO; + protected DecoderReuseEvaluation canReuseCodec( + MediaCodecInfo codecInfo, Format oldFormat, Format newFormat) { + DecoderReuseEvaluation evaluation = codecInfo.canReuseCodec(oldFormat, newFormat); + + @DecoderDiscardReasons int discardReasons = evaluation.discardReasons; + if (getCodecMaxInputSize(codecInfo, newFormat) > codecMaxInputSize) { + discardReasons |= DISCARD_REASON_MAX_INPUT_SIZE_EXCEEDED; } - } - /** - * Returns whether the codec can be flushed and reused when switching to a new format. Reuse is - * generally possible when the codec would be configured in an identical way after the format - * change (excluding {@link MediaFormat#KEY_MAX_INPUT_SIZE} and configuration that does not come - * from the {@link Format}). - * - * @param oldFormat The first format. - * @param newFormat The second format. - * @return Whether the codec can be flushed and reused when switching to a new format. - */ - protected boolean canKeepCodecWithFlush(Format oldFormat, Format newFormat) { - // Flush and reuse the codec if the audio format and initialization data matches. For Opus, we - // don't flush and reuse the codec because the decoder may discard samples after flushing, which - // would result in audio being dropped just after a stream change (see [Internal: b/143450854]). - return Util.areEqual(oldFormat.sampleMimeType, newFormat.sampleMimeType) - && oldFormat.channelCount == newFormat.channelCount - && oldFormat.sampleRate == newFormat.sampleRate - && oldFormat.pcmEncoding == newFormat.pcmEncoding - && oldFormat.initializationDataEquals(newFormat) - && !MimeTypes.AUDIO_OPUS.equals(oldFormat.sampleMimeType); + return new DecoderReuseEvaluation( + codecInfo.name, + oldFormat, + newFormat, + discardReasons != 0 ? REUSE_RESULT_NO : evaluation.result, + discardReasons); } @Override @@ -532,165 +458,129 @@ public MediaClock getMediaClock() { @Override protected float getCodecOperatingRateV23( - float operatingRate, Format format, Format[] streamFormats) { + float targetPlaybackSpeed, Format format, Format[] streamFormats) { // Use the highest known stream sample-rate up front, to avoid having to reconfigure the codec // should an adaptive switch to that stream occur. int maxSampleRate = -1; for (Format streamFormat : streamFormats) { int streamSampleRate = streamFormat.sampleRate; if (streamSampleRate != Format.NO_VALUE) { - maxSampleRate = Math.max(maxSampleRate, streamSampleRate); + maxSampleRate = max(maxSampleRate, streamSampleRate); } } - return maxSampleRate == -1 ? CODEC_OPERATING_RATE_UNSET : (maxSampleRate * operatingRate); + return maxSampleRate == -1 ? CODEC_OPERATING_RATE_UNSET : (maxSampleRate * targetPlaybackSpeed); } @Override - protected void onCodecInitialized(String name, long initializedTimestampMs, + protected void onCodecInitialized( + String name, + MediaCodecAdapter.Configuration configuration, + long initializedTimestampMs, long initializationDurationMs) { eventDispatcher.decoderInitialized(name, initializedTimestampMs, initializationDurationMs); } @Override - protected void onInputFormatChanged(FormatHolder formatHolder) throws ExoPlaybackException { - super.onInputFormatChanged(formatHolder); - inputFormat = formatHolder.format; - eventDispatcher.inputFormatChanged(inputFormat); + protected void onCodecReleased(String name) { + eventDispatcher.decoderReleased(name); + } + + @Override + protected void onCodecError(Exception codecError) { + Log.e(TAG, "Audio codec error", codecError); + eventDispatcher.audioCodecError(codecError); + } + + @Override + @Nullable + protected DecoderReuseEvaluation onInputFormatChanged(FormatHolder formatHolder) + throws ExoPlaybackException { + @Nullable DecoderReuseEvaluation evaluation = super.onInputFormatChanged(formatHolder); + eventDispatcher.inputFormatChanged(formatHolder.format, evaluation); + return evaluation; } @Override - protected void onOutputFormatChanged(MediaCodec codec, MediaFormat outputMediaFormat) + protected void onOutputFormatChanged(Format format, @Nullable MediaFormat mediaFormat) throws ExoPlaybackException { - @C.Encoding int encoding; - MediaFormat mediaFormat; - if (passthroughMediaFormat != null) { - mediaFormat = passthroughMediaFormat; - encoding = - getPassthroughEncoding( - mediaFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT), - mediaFormat.getString(MediaFormat.KEY_MIME)); + Format audioSinkInputFormat; + @Nullable int[] channelMap = null; + if (decryptOnlyCodecFormat != null) { // Direct playback with a codec for decryption. + audioSinkInputFormat = decryptOnlyCodecFormat; + } else if (getCodec() == null) { // Direct playback with codec bypass. + audioSinkInputFormat = format; } else { - mediaFormat = outputMediaFormat; - if (outputMediaFormat.containsKey(VIVO_BITS_PER_SAMPLE_KEY)) { - encoding = Util.getPcmEncoding(outputMediaFormat.getInteger(VIVO_BITS_PER_SAMPLE_KEY)); + @C.PcmEncoding int pcmEncoding; + if (MimeTypes.AUDIO_RAW.equals(format.sampleMimeType)) { + // For PCM streams, the encoder passes through int samples despite set to float mode. + pcmEncoding = format.pcmEncoding; + } else if (Util.SDK_INT >= 24 && mediaFormat.containsKey(MediaFormat.KEY_PCM_ENCODING)) { + pcmEncoding = mediaFormat.getInteger(MediaFormat.KEY_PCM_ENCODING); + } else if (mediaFormat.containsKey(VIVO_BITS_PER_SAMPLE_KEY)) { + pcmEncoding = Util.getPcmEncoding(mediaFormat.getInteger(VIVO_BITS_PER_SAMPLE_KEY)); } else { - encoding = getPcmEncoding(inputFormat); + // If the format is anything other than PCM then we assume that the audio decoder will + // output 16-bit PCM. + pcmEncoding = C.ENCODING_PCM_16BIT; } - } - int channelCount = mediaFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT); - int sampleRate = mediaFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE); - int[] channelMap; - if (codecNeedsDiscardChannelsWorkaround && channelCount == 6 && inputFormat.channelCount < 6) { - channelMap = new int[inputFormat.channelCount]; - for (int i = 0; i < inputFormat.channelCount; i++) { - channelMap[i] = i; + audioSinkInputFormat = + new Format.Builder() + .setSampleMimeType(MimeTypes.AUDIO_RAW) + .setPcmEncoding(pcmEncoding) + .setEncoderDelay(format.encoderDelay) + .setEncoderPadding(format.encoderPadding) + .setChannelCount(mediaFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT)) + .setSampleRate(mediaFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE)) + .build(); + if (codecNeedsDiscardChannelsWorkaround + && audioSinkInputFormat.channelCount == 6 + && format.channelCount < 6) { + channelMap = new int[format.channelCount]; + for (int i = 0; i < format.channelCount; i++) { + channelMap[i] = i; + } } - } else { - channelMap = null; } - try { - audioSink.configure( - encoding, - channelCount, - sampleRate, - 0, - channelMap, - inputFormat.encoderDelay, - inputFormat.encoderPadding); + audioSink.configure(audioSinkInputFormat, /* specifiedBufferSize= */ 0, channelMap); } catch (AudioSink.ConfigurationException e) { - // TODO(internal: b/145658993) Use outputFormat instead. - throw createRendererException(e, inputFormat); - } - } - - /** - * Returns the {@link C.Encoding} constant to use for passthrough of the given format, or {@link - * C#ENCODING_INVALID} if passthrough is not possible. - */ - @C.Encoding - protected int getPassthroughEncoding(int channelCount, String mimeType) { - if (MimeTypes.AUDIO_E_AC3_JOC.equals(mimeType)) { - // E-AC3 JOC is object-based so the output channel count is arbitrary. - if (audioSink.supportsOutput(/* channelCount= */ Format.NO_VALUE, C.ENCODING_E_AC3_JOC)) { - return MimeTypes.getEncoding(MimeTypes.AUDIO_E_AC3_JOC); - } - // E-AC3 receivers can decode JOC streams, but in 2-D rather than 3-D, so try to fall back. - mimeType = MimeTypes.AUDIO_E_AC3; - } - - @C.Encoding int encoding = MimeTypes.getEncoding(mimeType); - if (audioSink.supportsOutput(channelCount, encoding)) { - return encoding; - } else { - return C.ENCODING_INVALID; + throw createRendererException( + e, e.format, PlaybackException.ERROR_CODE_AUDIO_TRACK_INIT_FAILED); } } - /** - * Called when the audio session id becomes known. The default implementation is a no-op. One - * reason for overriding this method would be to instantiate and enable a {@link Virtualizer} in - * order to spatialize the audio channels. For this use case, any {@link Virtualizer} instances - * should be released in {@link #onDisabled()} (if not before). - * - * @see AudioSink.Listener#onAudioSessionId(int) - */ - protected void onAudioSessionId(int audioSessionId) { - // Do nothing. - } - - /** - * @see AudioSink.Listener#onPositionDiscontinuity() - */ - protected void onAudioTrackPositionDiscontinuity() { - // Do nothing. - } - - /** - * @see AudioSink.Listener#onUnderrun(int, long, long) - */ - protected void onAudioTrackUnderrun(int bufferSize, long bufferSizeMs, - long elapsedSinceLastFeedMs) { - // Do nothing. + /** See {@link AudioSink.Listener#onPositionDiscontinuity()}. */ + @CallSuper + protected void onPositionDiscontinuity() { + // We are out of sync so allow currentPositionUs to jump backwards. + allowPositionDiscontinuity = true; } @Override - protected void onEnabled(boolean joining) throws ExoPlaybackException { - super.onEnabled(joining); + protected void onEnabled(boolean joining, boolean mayRenderStartOfStream) + throws ExoPlaybackException { + super.onEnabled(joining, mayRenderStartOfStream); eventDispatcher.enabled(decoderCounters); - int tunnelingAudioSessionId = getConfiguration().tunnelingAudioSessionId; - if (tunnelingAudioSessionId != C.AUDIO_SESSION_ID_UNSET) { - audioSink.enableTunnelingV21(tunnelingAudioSessionId); + if (getConfiguration().tunneling) { + audioSink.enableTunnelingV21(); } else { audioSink.disableTunneling(); } - } - - @Override - protected void onStreamChanged(Format[] formats, long offsetUs) throws ExoPlaybackException { - super.onStreamChanged(formats, offsetUs); - if (lastInputTimeUs != C.TIME_UNSET) { - if (pendingStreamChangeCount == pendingStreamChangeTimesUs.length) { - Log.w( - TAG, - "Too many stream changes, so dropping change at " - + pendingStreamChangeTimesUs[pendingStreamChangeCount - 1]); - } else { - pendingStreamChangeCount++; - } - pendingStreamChangeTimesUs[pendingStreamChangeCount - 1] = lastInputTimeUs; - } + audioSink.setPlayerId(getPlayerId()); } @Override protected void onPositionReset(long positionUs, boolean joining) throws ExoPlaybackException { super.onPositionReset(positionUs, joining); - audioSink.flush(); + if (experimentalKeepAudioTrackOnSeek) { + audioSink.experimentalFlushWithoutAudioTrackRelease(); + } else { + audioSink.flush(); + } + currentPositionUs = positionUs; allowFirstBufferPositionDiscontinuity = true; allowPositionDiscontinuity = true; - lastInputTimeUs = C.TIME_UNSET; - pendingStreamChangeCount = 0; } @Override @@ -708,9 +598,8 @@ protected void onStopped() { @Override protected void onDisabled() { + audioSinkNeedsReset = true; try { - lastInputTimeUs = C.TIME_UNSET; - pendingStreamChangeCount = 0; audioSink.flush(); } finally { try { @@ -726,7 +615,10 @@ protected void onReset() { try { super.onReset(); } finally { - audioSink.reset(); + if (audioSinkNeedsReset) { + audioSinkNeedsReset = false; + audioSink.reset(); + } } } @@ -769,67 +661,65 @@ protected void onQueueInputBuffer(DecoderInputBuffer buffer) { } allowFirstBufferPositionDiscontinuity = false; } - lastInputTimeUs = Math.max(buffer.timeUs, lastInputTimeUs); } - @CallSuper @Override - protected void onProcessedOutputBuffer(long presentationTimeUs) { - while (pendingStreamChangeCount != 0 && presentationTimeUs >= pendingStreamChangeTimesUs[0]) { - audioSink.handleDiscontinuity(); - pendingStreamChangeCount--; - System.arraycopy( - pendingStreamChangeTimesUs, - /* srcPos= */ 1, - pendingStreamChangeTimesUs, - /* destPos= */ 0, - pendingStreamChangeCount); - } + protected void onProcessedStreamChange() { + super.onProcessedStreamChange(); + audioSink.handleDiscontinuity(); } @Override protected boolean processOutputBuffer( long positionUs, long elapsedRealtimeUs, - MediaCodec codec, - ByteBuffer buffer, + @Nullable MediaCodecAdapter codec, + @Nullable ByteBuffer buffer, int bufferIndex, int bufferFlags, + int sampleCount, long bufferPresentationTimeUs, boolean isDecodeOnlyBuffer, boolean isLastBuffer, Format format) throws ExoPlaybackException { - if (codecNeedsEosBufferTimestampWorkaround - && bufferPresentationTimeUs == 0 - && (bufferFlags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0 - && lastInputTimeUs != C.TIME_UNSET) { - bufferPresentationTimeUs = lastInputTimeUs; - } + checkNotNull(buffer); - if (passthroughEnabled && (bufferFlags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { + if (decryptOnlyCodecFormat != null + && (bufferFlags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { // Discard output buffers from the passthrough (raw) decoder containing codec specific data. - codec.releaseOutputBuffer(bufferIndex, false); + checkNotNull(codec).releaseOutputBuffer(bufferIndex, false); return true; } if (isDecodeOnlyBuffer) { - codec.releaseOutputBuffer(bufferIndex, false); - decoderCounters.skippedOutputBufferCount++; + if (codec != null) { + codec.releaseOutputBuffer(bufferIndex, false); + } + decoderCounters.skippedOutputBufferCount += sampleCount; audioSink.handleDiscontinuity(); return true; } + boolean fullyConsumed; try { - if (audioSink.handleBuffer(buffer, bufferPresentationTimeUs)) { + fullyConsumed = audioSink.handleBuffer(buffer, bufferPresentationTimeUs, sampleCount); + } catch (InitializationException e) { + throw createRendererException( + e, e.format, e.isRecoverable, PlaybackException.ERROR_CODE_AUDIO_TRACK_INIT_FAILED); + } catch (WriteException e) { + throw createRendererException( + e, format, e.isRecoverable, PlaybackException.ERROR_CODE_AUDIO_TRACK_WRITE_FAILED); + } + + if (fullyConsumed) { + if (codec != null) { codec.releaseOutputBuffer(bufferIndex, false); - decoderCounters.renderedOutputBufferCount++; - return true; } - } catch (AudioSink.InitializationException | AudioSink.WriteException e) { - // TODO(internal: b/145658993) Use outputFormat instead. - throw createRendererException(e, inputFormat); + decoderCounters.renderedOutputBufferCount += sampleCount; + return true; } + return false; } @@ -838,25 +728,50 @@ protected void renderToEndOfStream() throws ExoPlaybackException { try { audioSink.playToEndOfStream(); } catch (AudioSink.WriteException e) { - // TODO(internal: b/145658993) Use outputFormat instead. - throw createRendererException(e, inputFormat); + throw createRendererException( + e, e.format, e.isRecoverable, PlaybackException.ERROR_CODE_AUDIO_TRACK_WRITE_FAILED); } } @Override - public void handleMessage(int messageType, @Nullable Object message) throws ExoPlaybackException { + protected void onOutputStreamOffsetUsChanged(long outputStreamOffsetUs) { + audioSink.setOutputStreamOffsetUs(outputStreamOffsetUs); + } + + @Override + public void handleMessage(@MessageType int messageType, @Nullable Object message) + throws ExoPlaybackException { switch (messageType) { - case C.MSG_SET_VOLUME: + case MSG_SET_VOLUME: audioSink.setVolume((Float) message); break; - case C.MSG_SET_AUDIO_ATTRIBUTES: + case MSG_SET_AUDIO_ATTRIBUTES: AudioAttributes audioAttributes = (AudioAttributes) message; audioSink.setAudioAttributes(audioAttributes); break; - case C.MSG_SET_AUX_EFFECT_INFO: + case MSG_SET_AUX_EFFECT_INFO: AuxEffectInfo auxEffectInfo = (AuxEffectInfo) message; audioSink.setAuxEffectInfo(auxEffectInfo); break; + case MSG_SET_PREFERRED_AUDIO_DEVICE: + if (Util.SDK_INT >= 23) { + Api23.setAudioSinkPreferredDevice(audioSink, message); + } + break; + case MSG_SET_SKIP_SILENCE_ENABLED: + audioSink.setSkipSilenceEnabled((Boolean) message); + break; + case MSG_SET_AUDIO_SESSION_ID: + audioSink.setAudioSessionId((Integer) message); + break; + case MSG_SET_WAKEUP_LISTENER: + this.wakeupListener = (WakeupListener) message; + break; + case MSG_SET_CAMERA_MOTION_LISTENER: + case MSG_SET_CHANGE_FRAME_RATE_STRATEGY: + case MSG_SET_SCALING_MODE: + case MSG_SET_VIDEO_FRAME_METADATA_LISTENER: + case MSG_SET_VIDEO_OUTPUT: default: super.handleMessage(messageType, message); break; @@ -881,9 +796,8 @@ protected int getCodecMaxInputSize( return maxInputSize; } for (Format streamFormat : streamFormats) { - if (codecInfo.isSeamlessAdaptationSupported( - format, streamFormat, /* isNewFormatComplete= */ false)) { - maxInputSize = Math.max(maxInputSize, getCodecMaxInputSize(codecInfo, streamFormat)); + if (codecInfo.canReuseCodec(format, streamFormat).result != REUSE_RESULT_NO) { + maxInputSize = max(maxInputSize, getCodecMaxInputSize(codecInfo, streamFormat)); } } return maxInputSize; @@ -944,6 +858,16 @@ protected MediaFormat getMediaFormat( // not sync frames. Set a format key to override this. mediaFormat.setInteger("ac4-is-sync", 1); } + if (Util.SDK_INT >= 24 + && audioSink.getFormatSupport( + Util.getPcmFormat(C.ENCODING_PCM_FLOAT, format.channelCount, format.sampleRate)) + == AudioSink.SINK_FORMAT_SUPPORTED_DIRECTLY) { + mediaFormat.setInteger(MediaFormat.KEY_PCM_ENCODING, AudioFormat.ENCODING_PCM_FLOAT); + } + if (Util.SDK_INT >= 32) { + mediaFormat.setInteger(MediaFormat.KEY_MAX_OUTPUT_CHANNEL_COUNT, 99); + } + return mediaFormat; } @@ -953,7 +877,7 @@ private void updateCurrentPosition() { currentPositionUs = allowPositionDiscontinuity ? newCurrentPositionUs - : Math.max(currentPositionUs, newCurrentPositionUs); + : max(currentPositionUs, newCurrentPositionUs); allowPositionDiscontinuity = false; } } @@ -972,65 +896,71 @@ private static boolean deviceDoesntSupportOperatingRate() { /** * Returns whether the decoder is known to output six audio channels when provided with input with * fewer than six channels. - *

      - * See [Internal: b/35655036]. + * + *

      See [Internal: b/35655036]. */ private static boolean codecNeedsDiscardChannelsWorkaround(String codecName) { // The workaround applies to Samsung Galaxy S6 and Samsung Galaxy S7. - return Util.SDK_INT < 24 && "OMX.SEC.aac.dec".equals(codecName) - && "samsung".equals(Util.MANUFACTURER) - && (Util.DEVICE.startsWith("zeroflte") || Util.DEVICE.startsWith("herolte") - || Util.DEVICE.startsWith("heroqlte")); - } - - /** - * Returns whether the decoder may output a non-empty buffer with timestamp 0 as the end of stream - * buffer. - * - *

      See GitHub issue #5045. - */ - private static boolean codecNeedsEosBufferTimestampWorkaround(String codecName) { - return Util.SDK_INT < 21 - && "OMX.SEC.mp3.dec".equals(codecName) + return Util.SDK_INT < 24 + && "OMX.SEC.aac.dec".equals(codecName) && "samsung".equals(Util.MANUFACTURER) - && (Util.DEVICE.startsWith("baffin") - || Util.DEVICE.startsWith("grand") - || Util.DEVICE.startsWith("fortuna") - || Util.DEVICE.startsWith("gprimelte") - || Util.DEVICE.startsWith("j2y18lte") - || Util.DEVICE.startsWith("ms01")); - } - - @C.Encoding - private static int getPcmEncoding(Format format) { - // If the format is anything other than PCM then we assume that the audio decoder will output - // 16-bit PCM. - return MimeTypes.AUDIO_RAW.equals(format.sampleMimeType) - ? format.pcmEncoding - : C.ENCODING_PCM_16BIT; + && (Util.DEVICE.startsWith("zeroflte") + || Util.DEVICE.startsWith("herolte") + || Util.DEVICE.startsWith("heroqlte")); } private final class AudioSinkListener implements AudioSink.Listener { @Override - public void onAudioSessionId(int audioSessionId) { - eventDispatcher.audioSessionId(audioSessionId); - MediaCodecAudioRenderer.this.onAudioSessionId(audioSessionId); + public void onPositionDiscontinuity() { + MediaCodecAudioRenderer.this.onPositionDiscontinuity(); } @Override - public void onPositionDiscontinuity() { - onAudioTrackPositionDiscontinuity(); - // We are out of sync so allow currentPositionUs to jump backwards. - MediaCodecAudioRenderer.this.allowPositionDiscontinuity = true; + public void onPositionAdvancing(long playoutStartSystemTimeMs) { + eventDispatcher.positionAdvancing(playoutStartSystemTimeMs); } @Override public void onUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) { - eventDispatcher.audioTrackUnderrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs); - onAudioTrackUnderrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs); + eventDispatcher.underrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs); } + @Override + public void onSkipSilenceEnabledChanged(boolean skipSilenceEnabled) { + eventDispatcher.skipSilenceEnabledChanged(skipSilenceEnabled); + } + + @Override + public void onOffloadBufferEmptying() { + if (wakeupListener != null) { + wakeupListener.onWakeup(); + } + } + + @Override + public void onOffloadBufferFull() { + if (wakeupListener != null) { + wakeupListener.onSleep(); + } + } + + @Override + public void onAudioSinkError(Exception audioSinkError) { + Log.e(TAG, "Audio sink error", audioSinkError); + eventDispatcher.audioSinkError(audioSinkError); + } } + @RequiresApi(23) + private static final class Api23 { + private Api23() {} + + @DoNotInline + public static void setAudioSinkPreferredDevice( + AudioSink audioSink, @Nullable Object messagePayload) { + @Nullable AudioDeviceInfo audioDeviceInfo = (AudioDeviceInfo) messagePayload; + audioSink.setPreferredDevice(audioDeviceInfo); + } + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/MpegAudioUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/MpegAudioUtil.java new file mode 100644 index 0000000000..d09443daf0 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/MpegAudioUtil.java @@ -0,0 +1,265 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.audio; + +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.util.MimeTypes; + +/** Utility methods for handling MPEG audio streams. */ +public final class MpegAudioUtil { + + /** Stores the metadata for an MPEG audio frame. */ + public static final class Header { + + /** MPEG audio header version. */ + public int version; + /** The mime type. */ + @Nullable public String mimeType; + /** Size of the frame associated with this header, in bytes. */ + public int frameSize; + /** Sample rate in samples per second. */ + public int sampleRate; + /** Number of audio channels in the frame. */ + public int channels; + /** Bitrate of the frame in bit/s. */ + public int bitrate; + /** Number of samples stored in the frame. */ + public int samplesPerFrame; + + /** + * Populates the fields in this instance to reflect the MPEG audio header in {@code headerData}, + * returning whether the header was valid. If false, the values of the fields in this instance + * will not be updated. + * + * @param headerData Header data to parse. + * @return True if the fields were populated. False otherwise, indicating that {@code + * headerData} is not a valid MPEG audio header. + */ + public boolean setForHeaderData(int headerData) { + if (!isMagicPresent(headerData)) { + return false; + } + + int version = (headerData >>> 19) & 3; + if (version == 1) { + return false; + } + + int layer = (headerData >>> 17) & 3; + if (layer == 0) { + return false; + } + + int bitrateIndex = (headerData >>> 12) & 15; + if (bitrateIndex == 0 || bitrateIndex == 0xF) { + // Disallow "free" bitrate. + return false; + } + + int samplingRateIndex = (headerData >>> 10) & 3; + if (samplingRateIndex == 3) { + return false; + } + + this.version = version; + mimeType = MIME_TYPE_BY_LAYER[3 - layer]; + sampleRate = SAMPLING_RATE_V1[samplingRateIndex]; + if (version == 2) { + // Version 2 + sampleRate /= 2; + } else if (version == 0) { + // Version 2.5 + sampleRate /= 4; + } + int padding = (headerData >>> 9) & 1; + samplesPerFrame = getFrameSizeInSamples(version, layer); + if (layer == 3) { + // Layer I (layer == 3) + bitrate = version == 3 ? BITRATE_V1_L1[bitrateIndex - 1] : BITRATE_V2_L1[bitrateIndex - 1]; + frameSize = (12 * bitrate / sampleRate + padding) * 4; + } else { + // Layer II (layer == 2) or III (layer == 1) + if (version == 3) { + // Version 1 + bitrate = layer == 2 ? BITRATE_V1_L2[bitrateIndex - 1] : BITRATE_V1_L3[bitrateIndex - 1]; + frameSize = 144 * bitrate / sampleRate + padding; + } else { + // Version 2 or 2.5. + bitrate = BITRATE_V2[bitrateIndex - 1]; + frameSize = (layer == 1 ? 72 : 144) * bitrate / sampleRate + padding; + } + } + channels = ((headerData >> 6) & 3) == 3 ? 1 : 2; + return true; + } + } + + /** + * Returns the size of the frame associated with {@code header}, or {@link C#LENGTH_UNSET} if it + * is invalid. + */ + public static int getFrameSize(int headerData) { + if (!isMagicPresent(headerData)) { + return C.LENGTH_UNSET; + } + + int version = (headerData >>> 19) & 3; + if (version == 1) { + return C.LENGTH_UNSET; + } + + int layer = (headerData >>> 17) & 3; + if (layer == 0) { + return C.LENGTH_UNSET; + } + + int bitrateIndex = (headerData >>> 12) & 15; + if (bitrateIndex == 0 || bitrateIndex == 0xF) { + // Disallow "free" bitrate. + return C.LENGTH_UNSET; + } + + int samplingRateIndex = (headerData >>> 10) & 3; + if (samplingRateIndex == 3) { + return C.LENGTH_UNSET; + } + + int samplingRate = SAMPLING_RATE_V1[samplingRateIndex]; + if (version == 2) { + // Version 2 + samplingRate /= 2; + } else if (version == 0) { + // Version 2.5 + samplingRate /= 4; + } + + int bitrate; + int padding = (headerData >>> 9) & 1; + if (layer == 3) { + // Layer I (layer == 3) + bitrate = version == 3 ? BITRATE_V1_L1[bitrateIndex - 1] : BITRATE_V2_L1[bitrateIndex - 1]; + return (12 * bitrate / samplingRate + padding) * 4; + } else { + // Layer II (layer == 2) or III (layer == 1) + if (version == 3) { + bitrate = layer == 2 ? BITRATE_V1_L2[bitrateIndex - 1] : BITRATE_V1_L3[bitrateIndex - 1]; + } else { + // Version 2 or 2.5. + bitrate = BITRATE_V2[bitrateIndex - 1]; + } + } + + if (version == 3) { + // Version 1 + return 144 * bitrate / samplingRate + padding; + } else { + // Version 2 or 2.5 + return (layer == 1 ? 72 : 144) * bitrate / samplingRate + padding; + } + } + + /** + * Returns the number of samples per frame associated with {@code headerData}, or {@link + * C#LENGTH_UNSET} if it is invalid. + */ + public static int parseMpegAudioFrameSampleCount(int headerData) { + if (!isMagicPresent(headerData)) { + return C.LENGTH_UNSET; + } + + int version = (headerData >>> 19) & 3; + if (version == 1) { + return C.LENGTH_UNSET; + } + + int layer = (headerData >>> 17) & 3; + if (layer == 0) { + return C.LENGTH_UNSET; + } + + // Those header values are not used but are checked for consistency with the other methods + int bitrateIndex = (headerData >>> 12) & 15; + int samplingRateIndex = (headerData >>> 10) & 3; + if (bitrateIndex == 0 || bitrateIndex == 0xF || samplingRateIndex == 3) { + return C.LENGTH_UNSET; + } + + return getFrameSizeInSamples(version, layer); + } + + /** + * Theoretical maximum frame size for an MPEG audio stream, which occurs when playing a Layer 2 + * MPEG 2.5 audio stream at 16 kb/s (with padding). The size is 1152 sample/frame * 160000 bit/s / + * (8000 sample/s * 8 bit/byte) + 1 padding byte/frame = 2881 byte/frame. The next power of two + * size is 4 KiB. + */ + public static final int MAX_FRAME_SIZE_BYTES = 4096; + + /** + * Maximum rate for an MPEG audio stream corresponding to MPEG-1 layer III (320 kbit/s), in bytes + * per second. + */ + public static final int MAX_RATE_BYTES_PER_SECOND = 320 * 1000 / 8; + + private static final String[] MIME_TYPE_BY_LAYER = + new String[] {MimeTypes.AUDIO_MPEG_L1, MimeTypes.AUDIO_MPEG_L2, MimeTypes.AUDIO_MPEG}; + private static final int[] SAMPLING_RATE_V1 = {44100, 48000, 32000}; + private static final int[] BITRATE_V1_L1 = { + 32000, 64000, 96000, 128000, 160000, 192000, 224000, 256000, 288000, 320000, 352000, 384000, + 416000, 448000 + }; + private static final int[] BITRATE_V2_L1 = { + 32000, 48000, 56000, 64000, 80000, 96000, 112000, 128000, 144000, 160000, 176000, 192000, + 224000, 256000 + }; + private static final int[] BITRATE_V1_L2 = { + 32000, 48000, 56000, 64000, 80000, 96000, 112000, 128000, 160000, 192000, 224000, 256000, + 320000, 384000 + }; + private static final int[] BITRATE_V1_L3 = { + 32000, 40000, 48000, 56000, 64000, 80000, 96000, 112000, 128000, 160000, 192000, 224000, 256000, + 320000 + }; + private static final int[] BITRATE_V2 = { + 8000, 16000, 24000, 32000, 40000, 48000, 56000, 64000, 80000, 96000, 112000, 128000, 144000, + 160000 + }; + + private static final int SAMPLES_PER_FRAME_L1 = 384; + private static final int SAMPLES_PER_FRAME_L2 = 1152; + private static final int SAMPLES_PER_FRAME_L3_V1 = 1152; + private static final int SAMPLES_PER_FRAME_L3_V2 = 576; + + private MpegAudioUtil() {} + + private static boolean isMagicPresent(int headerData) { + return (headerData & 0xFFE00000) == 0xFFE00000; + } + + private static int getFrameSizeInSamples(int version, int layer) { + switch (layer) { + case 1: + return version == 3 ? SAMPLES_PER_FRAME_L3_V1 : SAMPLES_PER_FRAME_L3_V2; // Layer III + case 2: + return SAMPLES_PER_FRAME_L2; // Layer II + case 3: + return SAMPLES_PER_FRAME_L1; // Layer I + default: + throw new IllegalArgumentException(); + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/OpusUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/OpusUtil.java new file mode 100644 index 0000000000..5a9031991a --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/OpusUtil.java @@ -0,0 +1,134 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.audio; + +import com.google.android.exoplayer2.C; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.util.ArrayList; +import java.util.List; + +/** Utility methods for handling Opus audio streams. */ +public class OpusUtil { + + /** Opus streams are always 48000 Hz. */ + public static final int SAMPLE_RATE = 48_000; + + /** Maximum achievable Opus bitrate. */ + public static final int MAX_BYTES_PER_SECOND = 510 * 1000 / 8; // See RFC 6716. Section 2.1.1 + + private static final int DEFAULT_SEEK_PRE_ROLL_SAMPLES = 3840; + private static final int FULL_CODEC_INITIALIZATION_DATA_BUFFER_COUNT = 3; + + private OpusUtil() {} // Prevents instantiation. + + /** + * Parses the channel count from an Opus Identification Header. + * + * @param header An Opus Identification Header, as defined by RFC 7845. + * @return The parsed channel count. + */ + public static int getChannelCount(byte[] header) { + return header[9] & 0xFF; + } + + /** + * Builds codec initialization data from an Opus Identification Header. + * + * @param header An Opus Identification Header, as defined by RFC 7845. + * @return Codec initialization data suitable for an Opus MediaCodec. + */ + public static List buildInitializationData(byte[] header) { + int preSkipSamples = getPreSkipSamples(header); + long preSkipNanos = sampleCountToNanoseconds(preSkipSamples); + long seekPreRollNanos = sampleCountToNanoseconds(DEFAULT_SEEK_PRE_ROLL_SAMPLES); + + List initializationData = new ArrayList<>(FULL_CODEC_INITIALIZATION_DATA_BUFFER_COUNT); + initializationData.add(header); + initializationData.add(buildNativeOrderByteArray(preSkipNanos)); + initializationData.add(buildNativeOrderByteArray(seekPreRollNanos)); + return initializationData; + } + + /** + * Returns the number of audio samples in the given audio packet. + * + *

      The buffer's position is not modified. + * + * @param buffer The audio packet. + * @return Returns the number of audio samples in the packet. + */ + public static int parsePacketAudioSampleCount(ByteBuffer buffer) { + long packetDurationUs = + getPacketDurationUs(buffer.get(0), buffer.limit() > 1 ? buffer.get(1) : 0); + return (int) (packetDurationUs * SAMPLE_RATE / C.MICROS_PER_SECOND); + } + + /** + * Returns the duration of the given audio packet. + * + * @param buffer The audio packet. + * @return Returns the duration of the given audio packet, in microseconds. + */ + public static long getPacketDurationUs(byte[] buffer) { + return getPacketDurationUs(buffer[0], buffer.length > 1 ? buffer[1] : 0); + } + + private static long getPacketDurationUs(byte packetByte0, byte packetByte1) { + // See RFC6716, Sections 3.1 and 3.2. + int toc = packetByte0 & 0xFF; + int frames; + switch (toc & 0x3) { + case 0: + frames = 1; + break; + case 1: + case 2: + frames = 2; + break; + default: + frames = packetByte1 & 0x3F; + break; + } + + int config = toc >> 3; + int length = config & 0x3; + int frameDurationUs; + if (config >= 16) { + frameDurationUs = 2500 << length; + } else if (config >= 12) { + frameDurationUs = 10000 << (length & 0x1); + } else if (length == 3) { + frameDurationUs = 60000; + } else { + frameDurationUs = 10000 << length; + } + return (long) frames * frameDurationUs; + } + + private static int getPreSkipSamples(byte[] header) { + return ((header[11] & 0xFF) << 8) | (header[10] & 0xFF); + } + + private static byte[] buildNativeOrderByteArray(long value) { + return ByteBuffer.allocate(8).order(ByteOrder.nativeOrder()).putLong(value).array(); + } + + private static long sampleCountToNanoseconds(long sampleCount) { + return (sampleCount * C.NANOS_PER_SECOND) / SAMPLE_RATE; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/ResamplingAudioProcessor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/ResamplingAudioProcessor.java index 883f5bcb92..e75c7d9d8b 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/ResamplingAudioProcessor.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/ResamplingAudioProcessor.java @@ -17,6 +17,8 @@ import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.util.Util; +import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.nio.ByteBuffer; /** @@ -35,6 +37,7 @@ /* package */ final class ResamplingAudioProcessor extends BaseAudioProcessor { @Override + @CanIgnoreReturnValue public AudioFormat onConfigure(AudioFormat inputAudioFormat) throws UnhandledAudioFormatException { @C.PcmEncoding int encoding = inputAudioFormat.encoding; @@ -115,9 +118,13 @@ public void queueInput(ByteBuffer inputBuffer) { // 32 bit floating point -> 16 bit resampling. Floating point values are in the range // [-1.0, 1.0], so need to be scaled by Short.MAX_VALUE. for (int i = position; i < limit; i += 4) { - short value = (short) (inputBuffer.getFloat(i) * Short.MAX_VALUE); - buffer.put((byte) (value & 0xFF)); - buffer.put((byte) ((value >> 8) & 0xFF)); + // Clamp to avoid integer overflow if the floating point values exceed their nominal range + // [Internal ref: b/161204847]. + float floatValue = + Util.constrainValue(inputBuffer.getFloat(i), /* min= */ -1, /* max= */ 1); + short shortValue = (short) (floatValue * Short.MAX_VALUE); + buffer.put((byte) (shortValue & 0xFF)); + buffer.put((byte) ((shortValue >> 8) & 0xFF)); } break; case C.ENCODING_PCM_16BIT: @@ -130,5 +137,4 @@ public void queueInput(ByteBuffer inputBuffer) { inputBuffer.position(inputBuffer.limit()); buffer.flip(); } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/SilenceSkippingAudioProcessor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/SilenceSkippingAudioProcessor.java index 7ddb491525..2a6f4b5908 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/SilenceSkippingAudioProcessor.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/SilenceSkippingAudioProcessor.java @@ -15,15 +15,19 @@ */ package com.google.android.exoplayer2.audio; +import static java.lang.Math.min; +import static java.lang.annotation.ElementType.TYPE_USE; + import androidx.annotation.IntDef; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Util; +import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; import java.nio.ByteBuffer; -import java.nio.ByteOrder; /** * An {@link AudioProcessor} that skips silence in the input stream. Input and output are 16-bit @@ -50,6 +54,7 @@ public final class SilenceSkippingAudioProcessor extends BaseAudioProcessor { /** Trimming states. */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({ STATE_NOISY, STATE_MAYBE_SILENT, @@ -82,7 +87,7 @@ public final class SilenceSkippingAudioProcessor extends BaseAudioProcessor { */ private byte[] paddingBuffer; - @State private int state; + private @State int state; private int maybeSilenceBufferSize; private int paddingSize; private boolean hasOutputNoise; @@ -139,6 +144,7 @@ public long getSkippedFrames() { // AudioProcessor implementation. @Override + @CanIgnoreReturnValue public AudioFormat onConfigure(AudioFormat inputAudioFormat) throws UnhandledAudioFormatException { if (inputAudioFormat.encoding != C.ENCODING_PCM_16BIT) { @@ -219,7 +225,7 @@ private void processNoisy(ByteBuffer inputBuffer) { int limit = inputBuffer.limit(); // Check if there's any noise within the maybe silence buffer duration. - inputBuffer.limit(Math.min(limit, inputBuffer.position() + maybeSilenceBuffer.length)); + inputBuffer.limit(min(limit, inputBuffer.position() + maybeSilenceBuffer.length)); int noiseLimit = findNoiseLimit(inputBuffer); if (noiseLimit == inputBuffer.position()) { // The buffer contains the start of possible silence. @@ -249,7 +255,7 @@ private void processMaybeSilence(ByteBuffer inputBuffer) { state = STATE_NOISY; } else { // Fill as much of the maybe silence buffer as possible. - int bytesToWrite = Math.min(maybeSilenceInputSize, maybeSilenceBufferRemaining); + int bytesToWrite = min(maybeSilenceInputSize, maybeSilenceBufferRemaining); inputBuffer.limit(inputBuffer.position() + bytesToWrite); inputBuffer.get(maybeSilenceBuffer, maybeSilenceBufferSize, bytesToWrite); maybeSilenceBufferSize += bytesToWrite; @@ -321,7 +327,7 @@ private void output(ByteBuffer data) { * position. */ private void updatePaddingBuffer(ByteBuffer input, byte[] buffer, int size) { - int fromInputSize = Math.min(input.remaining(), paddingSize); + int fromInputSize = min(input.remaining(), paddingSize); int fromBufferSize = paddingSize - fromInputSize; System.arraycopy( /* src= */ buffer, @@ -345,7 +351,6 @@ private int durationUsToFrames(long durationUs) { * classified as a noisy frame, or the limit of the buffer if no such frame exists. */ private int findNoisePosition(ByteBuffer buffer) { - Assertions.checkArgument(buffer.order() == ByteOrder.LITTLE_ENDIAN); // The input is in ByteOrder.nativeOrder(), which is little endian on Android. for (int i = buffer.position(); i < buffer.limit(); i += 2) { if (Math.abs(buffer.getShort(i)) > silenceThresholdLevel) { @@ -361,7 +366,6 @@ private int findNoisePosition(ByteBuffer buffer) { * from the byte position to the limit are classified as silent. */ private int findNoiseLimit(ByteBuffer buffer) { - Assertions.checkArgument(buffer.order() == ByteOrder.LITTLE_ENDIAN); // The input is in ByteOrder.nativeOrder(), which is little endian on Android. for (int i = buffer.limit() - 2; i >= buffer.position(); i -= 2) { if (Math.abs(buffer.getShort(i)) > silenceThresholdLevel) { diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/SimpleDecoderAudioRenderer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/SimpleDecoderAudioRenderer.java deleted file mode 100644 index 30c664f0f8..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/SimpleDecoderAudioRenderer.java +++ /dev/null @@ -1,758 +0,0 @@ -/* - * Copyright (C) 2016 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.audio; - -import android.media.audiofx.Virtualizer; -import android.os.Handler; -import android.os.SystemClock; -import androidx.annotation.IntDef; -import androidx.annotation.Nullable; -import com.google.android.exoplayer2.BaseRenderer; -import com.google.android.exoplayer2.C; -import com.google.android.exoplayer2.ExoPlaybackException; -import com.google.android.exoplayer2.ExoPlayer; -import com.google.android.exoplayer2.Format; -import com.google.android.exoplayer2.FormatHolder; -import com.google.android.exoplayer2.PlaybackParameters; -import com.google.android.exoplayer2.PlayerMessage.Target; -import com.google.android.exoplayer2.RendererCapabilities; -import com.google.android.exoplayer2.audio.AudioRendererEventListener.EventDispatcher; -import com.google.android.exoplayer2.decoder.DecoderCounters; -import com.google.android.exoplayer2.decoder.DecoderInputBuffer; -import com.google.android.exoplayer2.decoder.SimpleDecoder; -import com.google.android.exoplayer2.decoder.SimpleOutputBuffer; -import com.google.android.exoplayer2.drm.DrmSession; -import com.google.android.exoplayer2.drm.DrmSession.DrmSessionException; -import com.google.android.exoplayer2.drm.DrmSessionManager; -import com.google.android.exoplayer2.drm.ExoMediaCrypto; -import com.google.android.exoplayer2.util.Assertions; -import com.google.android.exoplayer2.util.MediaClock; -import com.google.android.exoplayer2.util.MimeTypes; -import com.google.android.exoplayer2.util.TraceUtil; -import com.google.android.exoplayer2.util.Util; -import java.lang.annotation.Documented; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; - -/** - * Decodes and renders audio using a {@link SimpleDecoder}. - * - *

      This renderer accepts the following messages sent via {@link ExoPlayer#createMessage(Target)} - * on the playback thread: - * - *

        - *
      • Message with type {@link C#MSG_SET_VOLUME} to set the volume. The message payload should be - * a {@link Float} with 0 being silence and 1 being unity gain. - *
      • Message with type {@link C#MSG_SET_AUDIO_ATTRIBUTES} to set the audio attributes. The - * message payload should be an {@link com.google.android.exoplayer2.audio.AudioAttributes} - * instance that will configure the underlying audio track. - *
      • Message with type {@link C#MSG_SET_AUX_EFFECT_INFO} to set the auxiliary effect. The - * message payload should be an {@link AuxEffectInfo} instance that will configure the - * underlying audio track. - *
      - */ -public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements MediaClock { - - @Documented - @Retention(RetentionPolicy.SOURCE) - @IntDef({ - REINITIALIZATION_STATE_NONE, - REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM, - REINITIALIZATION_STATE_WAIT_END_OF_STREAM - }) - private @interface ReinitializationState {} - /** - * The decoder does not need to be re-initialized. - */ - private static final int REINITIALIZATION_STATE_NONE = 0; - /** - * The input format has changed in a way that requires the decoder to be re-initialized, but we - * haven't yet signaled an end of stream to the existing decoder. We need to do so in order to - * ensure that it outputs any remaining buffers before we release it. - */ - private static final int REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM = 1; - /** - * The input format has changed in a way that requires the decoder to be re-initialized, and we've - * signaled an end of stream to the existing decoder. We're waiting for the decoder to output an - * end of stream signal to indicate that it has output any remaining buffers before we release it. - */ - private static final int REINITIALIZATION_STATE_WAIT_END_OF_STREAM = 2; - - private final DrmSessionManager drmSessionManager; - private final boolean playClearSamplesWithoutKeys; - private final EventDispatcher eventDispatcher; - private final AudioSink audioSink; - private final DecoderInputBuffer flagsOnlyBuffer; - - private boolean drmResourcesAcquired; - private DecoderCounters decoderCounters; - private Format inputFormat; - private int encoderDelay; - private int encoderPadding; - private SimpleDecoder decoder; - private DecoderInputBuffer inputBuffer; - private SimpleOutputBuffer outputBuffer; - @Nullable private DrmSession decoderDrmSession; - @Nullable private DrmSession sourceDrmSession; - - @ReinitializationState private int decoderReinitializationState; - private boolean decoderReceivedBuffers; - private boolean audioTrackNeedsConfigure; - - private long currentPositionUs; - private boolean allowFirstBufferPositionDiscontinuity; - private boolean allowPositionDiscontinuity; - private boolean inputStreamEnded; - private boolean outputStreamEnded; - private boolean waitingForKeys; - - public SimpleDecoderAudioRenderer() { - this(/* eventHandler= */ null, /* eventListener= */ null); - } - - /** - * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be - * null if delivery of events is not required. - * @param eventListener A listener of events. May be null if delivery of events is not required. - * @param audioProcessors Optional {@link AudioProcessor}s that will process audio before output. - */ - public SimpleDecoderAudioRenderer( - @Nullable Handler eventHandler, - @Nullable AudioRendererEventListener eventListener, - AudioProcessor... audioProcessors) { - this( - eventHandler, - eventListener, - /* audioCapabilities= */ null, - /* drmSessionManager= */ null, - /* playClearSamplesWithoutKeys= */ false, - audioProcessors); - } - - /** - * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be - * null if delivery of events is not required. - * @param eventListener A listener of events. May be null if delivery of events is not required. - * @param audioCapabilities The audio capabilities for playback on this device. May be null if the - * default capabilities (no encoded audio passthrough support) should be assumed. - */ - public SimpleDecoderAudioRenderer( - @Nullable Handler eventHandler, - @Nullable AudioRendererEventListener eventListener, - @Nullable AudioCapabilities audioCapabilities) { - this( - eventHandler, - eventListener, - audioCapabilities, - /* drmSessionManager= */ null, - /* playClearSamplesWithoutKeys= */ false); - } - - /** - * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be - * null if delivery of events is not required. - * @param eventListener A listener of events. May be null if delivery of events is not required. - * @param audioCapabilities The audio capabilities for playback on this device. May be null if the - * default capabilities (no encoded audio passthrough support) should be assumed. - * @param drmSessionManager For use with encrypted media. May be null if support for encrypted - * media is not required. - * @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions. - * For example a media file may start with a short clear region so as to allow playback to - * begin in parallel with key acquisition. This parameter specifies whether the renderer is - * permitted to play clear regions of encrypted media files before {@code drmSessionManager} - * has obtained the keys necessary to decrypt encrypted regions of the media. - * @param audioProcessors Optional {@link AudioProcessor}s that will process audio before output. - */ - public SimpleDecoderAudioRenderer( - @Nullable Handler eventHandler, - @Nullable AudioRendererEventListener eventListener, - @Nullable AudioCapabilities audioCapabilities, - @Nullable DrmSessionManager drmSessionManager, - boolean playClearSamplesWithoutKeys, - AudioProcessor... audioProcessors) { - this(eventHandler, eventListener, drmSessionManager, - playClearSamplesWithoutKeys, new DefaultAudioSink(audioCapabilities, audioProcessors)); - } - - /** - * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be - * null if delivery of events is not required. - * @param eventListener A listener of events. May be null if delivery of events is not required. - * @param drmSessionManager For use with encrypted media. May be null if support for encrypted - * media is not required. - * @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions. - * For example a media file may start with a short clear region so as to allow playback to - * begin in parallel with key acquisition. This parameter specifies whether the renderer is - * permitted to play clear regions of encrypted media files before {@code drmSessionManager} - * has obtained the keys necessary to decrypt encrypted regions of the media. - * @param audioSink The sink to which audio will be output. - */ - public SimpleDecoderAudioRenderer( - @Nullable Handler eventHandler, - @Nullable AudioRendererEventListener eventListener, - @Nullable DrmSessionManager drmSessionManager, - boolean playClearSamplesWithoutKeys, - AudioSink audioSink) { - super(C.TRACK_TYPE_AUDIO); - this.drmSessionManager = drmSessionManager; - this.playClearSamplesWithoutKeys = playClearSamplesWithoutKeys; - eventDispatcher = new EventDispatcher(eventHandler, eventListener); - this.audioSink = audioSink; - audioSink.setListener(new AudioSinkListener()); - flagsOnlyBuffer = DecoderInputBuffer.newFlagsOnlyInstance(); - decoderReinitializationState = REINITIALIZATION_STATE_NONE; - audioTrackNeedsConfigure = true; - } - - @Override - @Nullable - public MediaClock getMediaClock() { - return this; - } - - @Override - @Capabilities - public final int supportsFormat(Format format) { - if (!MimeTypes.isAudio(format.sampleMimeType)) { - return RendererCapabilities.create(FORMAT_UNSUPPORTED_TYPE); - } - @FormatSupport int formatSupport = supportsFormatInternal(drmSessionManager, format); - if (formatSupport <= FORMAT_UNSUPPORTED_DRM) { - return RendererCapabilities.create(formatSupport); - } - @TunnelingSupport - int tunnelingSupport = Util.SDK_INT >= 21 ? TUNNELING_SUPPORTED : TUNNELING_NOT_SUPPORTED; - return RendererCapabilities.create(formatSupport, ADAPTIVE_NOT_SEAMLESS, tunnelingSupport); - } - - /** - * Returns the {@link FormatSupport} for the given {@link Format}. - * - * @param drmSessionManager The renderer's {@link DrmSessionManager}. - * @param format The format, which has an audio {@link Format#sampleMimeType}. - * @return The {@link FormatSupport} for this {@link Format}. - */ - @FormatSupport - protected abstract int supportsFormatInternal( - @Nullable DrmSessionManager drmSessionManager, Format format); - - /** - * Returns whether the sink supports the audio format. - * - * @see AudioSink#supportsOutput(int, int) - */ - protected final boolean supportsOutput(int channelCount, @C.Encoding int encoding) { - return audioSink.supportsOutput(channelCount, encoding); - } - - @Override - public void render(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException { - if (outputStreamEnded) { - try { - audioSink.playToEndOfStream(); - } catch (AudioSink.WriteException e) { - throw createRendererException(e, inputFormat); - } - return; - } - - // Try and read a format if we don't have one already. - if (inputFormat == null) { - // We don't have a format yet, so try and read one. - FormatHolder formatHolder = getFormatHolder(); - flagsOnlyBuffer.clear(); - int result = readSource(formatHolder, flagsOnlyBuffer, true); - if (result == C.RESULT_FORMAT_READ) { - onInputFormatChanged(formatHolder); - } else if (result == C.RESULT_BUFFER_READ) { - // End of stream read having not read a format. - Assertions.checkState(flagsOnlyBuffer.isEndOfStream()); - inputStreamEnded = true; - processEndOfStream(); - return; - } else { - // We still don't have a format and can't make progress without one. - return; - } - } - - // If we don't have a decoder yet, we need to instantiate one. - maybeInitDecoder(); - - if (decoder != null) { - try { - // Rendering loop. - TraceUtil.beginSection("drainAndFeed"); - while (drainOutputBuffer()) {} - while (feedInputBuffer()) {} - TraceUtil.endSection(); - } catch (AudioDecoderException | AudioSink.ConfigurationException - | AudioSink.InitializationException | AudioSink.WriteException e) { - throw createRendererException(e, inputFormat); - } - decoderCounters.ensureUpdated(); - } - } - - /** - * Called when the audio session id becomes known. The default implementation is a no-op. One - * reason for overriding this method would be to instantiate and enable a {@link Virtualizer} in - * order to spatialize the audio channels. For this use case, any {@link Virtualizer} instances - * should be released in {@link #onDisabled()} (if not before). - * - * @see AudioSink.Listener#onAudioSessionId(int) - */ - protected void onAudioSessionId(int audioSessionId) { - // Do nothing. - } - - /** - * @see AudioSink.Listener#onPositionDiscontinuity() - */ - protected void onAudioTrackPositionDiscontinuity() { - // Do nothing. - } - - /** - * @see AudioSink.Listener#onUnderrun(int, long, long) - */ - protected void onAudioTrackUnderrun(int bufferSize, long bufferSizeMs, - long elapsedSinceLastFeedMs) { - // Do nothing. - } - - /** - * Creates a decoder for the given format. - * - * @param format The format for which a decoder is required. - * @param mediaCrypto The {@link ExoMediaCrypto} object required for decoding encrypted content. - * Maybe null and can be ignored if decoder does not handle encrypted content. - * @return The decoder. - * @throws AudioDecoderException If an error occurred creating a suitable decoder. - */ - protected abstract SimpleDecoder< - DecoderInputBuffer, ? extends SimpleOutputBuffer, ? extends AudioDecoderException> - createDecoder(Format format, @Nullable ExoMediaCrypto mediaCrypto) - throws AudioDecoderException; - - /** - * Returns the format of audio buffers output by the decoder. Will not be called until the first - * output buffer has been dequeued, so the decoder may use input data to determine the format. - */ - protected abstract Format getOutputFormat(); - - /** - * Returns whether the existing decoder can be kept for a new format. - * - * @param oldFormat The previous format. - * @param newFormat The new format. - * @return True if the existing decoder can be kept. - */ - protected boolean canKeepCodec(Format oldFormat, Format newFormat) { - return false; - } - - private boolean drainOutputBuffer() throws ExoPlaybackException, AudioDecoderException, - AudioSink.ConfigurationException, AudioSink.InitializationException, - AudioSink.WriteException { - if (outputBuffer == null) { - outputBuffer = decoder.dequeueOutputBuffer(); - if (outputBuffer == null) { - return false; - } - if (outputBuffer.skippedOutputBufferCount > 0) { - decoderCounters.skippedOutputBufferCount += outputBuffer.skippedOutputBufferCount; - audioSink.handleDiscontinuity(); - } - } - - if (outputBuffer.isEndOfStream()) { - if (decoderReinitializationState == REINITIALIZATION_STATE_WAIT_END_OF_STREAM) { - // We're waiting to re-initialize the decoder, and have now processed all final buffers. - releaseDecoder(); - maybeInitDecoder(); - // The audio track may need to be recreated once the new output format is known. - audioTrackNeedsConfigure = true; - } else { - outputBuffer.release(); - outputBuffer = null; - processEndOfStream(); - } - return false; - } - - if (audioTrackNeedsConfigure) { - Format outputFormat = getOutputFormat(); - audioSink.configure(outputFormat.pcmEncoding, outputFormat.channelCount, - outputFormat.sampleRate, 0, null, encoderDelay, encoderPadding); - audioTrackNeedsConfigure = false; - } - - if (audioSink.handleBuffer(outputBuffer.data, outputBuffer.timeUs)) { - decoderCounters.renderedOutputBufferCount++; - outputBuffer.release(); - outputBuffer = null; - return true; - } - - return false; - } - - private boolean feedInputBuffer() throws AudioDecoderException, ExoPlaybackException { - if (decoder == null || decoderReinitializationState == REINITIALIZATION_STATE_WAIT_END_OF_STREAM - || inputStreamEnded) { - // We need to reinitialize the decoder or the input stream has ended. - return false; - } - - if (inputBuffer == null) { - inputBuffer = decoder.dequeueInputBuffer(); - if (inputBuffer == null) { - return false; - } - } - - if (decoderReinitializationState == REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM) { - inputBuffer.setFlags(C.BUFFER_FLAG_END_OF_STREAM); - decoder.queueInputBuffer(inputBuffer); - inputBuffer = null; - decoderReinitializationState = REINITIALIZATION_STATE_WAIT_END_OF_STREAM; - return false; - } - - int result; - FormatHolder formatHolder = getFormatHolder(); - if (waitingForKeys) { - // We've already read an encrypted sample into buffer, and are waiting for keys. - result = C.RESULT_BUFFER_READ; - } else { - result = readSource(formatHolder, inputBuffer, false); - } - - if (result == C.RESULT_NOTHING_READ) { - return false; - } - if (result == C.RESULT_FORMAT_READ) { - onInputFormatChanged(formatHolder); - return true; - } - if (inputBuffer.isEndOfStream()) { - inputStreamEnded = true; - decoder.queueInputBuffer(inputBuffer); - inputBuffer = null; - return false; - } - boolean bufferEncrypted = inputBuffer.isEncrypted(); - waitingForKeys = shouldWaitForKeys(bufferEncrypted); - if (waitingForKeys) { - return false; - } - inputBuffer.flip(); - onQueueInputBuffer(inputBuffer); - decoder.queueInputBuffer(inputBuffer); - decoderReceivedBuffers = true; - decoderCounters.inputBufferCount++; - inputBuffer = null; - return true; - } - - private boolean shouldWaitForKeys(boolean bufferEncrypted) throws ExoPlaybackException { - if (decoderDrmSession == null - || (!bufferEncrypted - && (playClearSamplesWithoutKeys || decoderDrmSession.playClearSamplesWithoutKeys()))) { - return false; - } - @DrmSession.State int drmSessionState = decoderDrmSession.getState(); - if (drmSessionState == DrmSession.STATE_ERROR) { - throw createRendererException(decoderDrmSession.getError(), inputFormat); - } - return drmSessionState != DrmSession.STATE_OPENED_WITH_KEYS; - } - - private void processEndOfStream() throws ExoPlaybackException { - outputStreamEnded = true; - try { - audioSink.playToEndOfStream(); - } catch (AudioSink.WriteException e) { - // TODO(internal: b/145658993) Use outputFormat for the call from drainOutputBuffer. - throw createRendererException(e, inputFormat); - } - } - - private void flushDecoder() throws ExoPlaybackException { - waitingForKeys = false; - if (decoderReinitializationState != REINITIALIZATION_STATE_NONE) { - releaseDecoder(); - maybeInitDecoder(); - } else { - inputBuffer = null; - if (outputBuffer != null) { - outputBuffer.release(); - outputBuffer = null; - } - decoder.flush(); - decoderReceivedBuffers = false; - } - } - - @Override - public boolean isEnded() { - return outputStreamEnded && audioSink.isEnded(); - } - - @Override - public boolean isReady() { - return audioSink.hasPendingData() - || (inputFormat != null && !waitingForKeys && (isSourceReady() || outputBuffer != null)); - } - - @Override - public long getPositionUs() { - if (getState() == STATE_STARTED) { - updateCurrentPosition(); - } - return currentPositionUs; - } - - @Override - public void setPlaybackParameters(PlaybackParameters playbackParameters) { - audioSink.setPlaybackParameters(playbackParameters); - } - - @Override - public PlaybackParameters getPlaybackParameters() { - return audioSink.getPlaybackParameters(); - } - - @Override - protected void onEnabled(boolean joining) throws ExoPlaybackException { - if (drmSessionManager != null && !drmResourcesAcquired) { - drmResourcesAcquired = true; - drmSessionManager.prepare(); - } - decoderCounters = new DecoderCounters(); - eventDispatcher.enabled(decoderCounters); - int tunnelingAudioSessionId = getConfiguration().tunnelingAudioSessionId; - if (tunnelingAudioSessionId != C.AUDIO_SESSION_ID_UNSET) { - audioSink.enableTunnelingV21(tunnelingAudioSessionId); - } else { - audioSink.disableTunneling(); - } - } - - @Override - protected void onPositionReset(long positionUs, boolean joining) throws ExoPlaybackException { - audioSink.flush(); - currentPositionUs = positionUs; - allowFirstBufferPositionDiscontinuity = true; - allowPositionDiscontinuity = true; - inputStreamEnded = false; - outputStreamEnded = false; - if (decoder != null) { - flushDecoder(); - } - } - - @Override - protected void onStarted() { - audioSink.play(); - } - - @Override - protected void onStopped() { - updateCurrentPosition(); - audioSink.pause(); - } - - @Override - protected void onDisabled() { - inputFormat = null; - audioTrackNeedsConfigure = true; - waitingForKeys = false; - try { - setSourceDrmSession(null); - releaseDecoder(); - audioSink.reset(); - } finally { - eventDispatcher.disabled(decoderCounters); - } - } - - @Override - protected void onReset() { - if (drmSessionManager != null && drmResourcesAcquired) { - drmResourcesAcquired = false; - drmSessionManager.release(); - } - } - - @Override - public void handleMessage(int messageType, @Nullable Object message) throws ExoPlaybackException { - switch (messageType) { - case C.MSG_SET_VOLUME: - audioSink.setVolume((Float) message); - break; - case C.MSG_SET_AUDIO_ATTRIBUTES: - AudioAttributes audioAttributes = (AudioAttributes) message; - audioSink.setAudioAttributes(audioAttributes); - break; - case C.MSG_SET_AUX_EFFECT_INFO: - AuxEffectInfo auxEffectInfo = (AuxEffectInfo) message; - audioSink.setAuxEffectInfo(auxEffectInfo); - break; - default: - super.handleMessage(messageType, message); - break; - } - } - - private void maybeInitDecoder() throws ExoPlaybackException { - if (decoder != null) { - return; - } - - setDecoderDrmSession(sourceDrmSession); - - ExoMediaCrypto mediaCrypto = null; - if (decoderDrmSession != null) { - mediaCrypto = decoderDrmSession.getMediaCrypto(); - if (mediaCrypto == null) { - DrmSessionException drmError = decoderDrmSession.getError(); - if (drmError != null) { - // Continue for now. We may be able to avoid failure if the session recovers, or if a new - // input format causes the session to be replaced before it's used. - } else { - // The drm session isn't open yet. - return; - } - } - } - - try { - long codecInitializingTimestamp = SystemClock.elapsedRealtime(); - TraceUtil.beginSection("createAudioDecoder"); - decoder = createDecoder(inputFormat, mediaCrypto); - TraceUtil.endSection(); - long codecInitializedTimestamp = SystemClock.elapsedRealtime(); - eventDispatcher.decoderInitialized(decoder.getName(), codecInitializedTimestamp, - codecInitializedTimestamp - codecInitializingTimestamp); - decoderCounters.decoderInitCount++; - } catch (AudioDecoderException e) { - throw createRendererException(e, inputFormat); - } - } - - private void releaseDecoder() { - inputBuffer = null; - outputBuffer = null; - decoderReinitializationState = REINITIALIZATION_STATE_NONE; - decoderReceivedBuffers = false; - if (decoder != null) { - decoder.release(); - decoder = null; - decoderCounters.decoderReleaseCount++; - } - setDecoderDrmSession(null); - } - - private void setSourceDrmSession(@Nullable DrmSession session) { - DrmSession.replaceSession(sourceDrmSession, session); - sourceDrmSession = session; - } - - private void setDecoderDrmSession(@Nullable DrmSession session) { - DrmSession.replaceSession(decoderDrmSession, session); - decoderDrmSession = session; - } - - @SuppressWarnings("unchecked") - private void onInputFormatChanged(FormatHolder formatHolder) throws ExoPlaybackException { - Format newFormat = Assertions.checkNotNull(formatHolder.format); - if (formatHolder.includesDrmSession) { - setSourceDrmSession((DrmSession) formatHolder.drmSession); - } else { - sourceDrmSession = - getUpdatedSourceDrmSession(inputFormat, newFormat, drmSessionManager, sourceDrmSession); - } - Format oldFormat = inputFormat; - inputFormat = newFormat; - - if (!canKeepCodec(oldFormat, inputFormat)) { - if (decoderReceivedBuffers) { - // Signal end of stream and wait for any final output buffers before re-initialization. - decoderReinitializationState = REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM; - } else { - // There aren't any final output buffers, so release the decoder immediately. - releaseDecoder(); - maybeInitDecoder(); - audioTrackNeedsConfigure = true; - } - } - - encoderDelay = inputFormat.encoderDelay; - encoderPadding = inputFormat.encoderPadding; - - eventDispatcher.inputFormatChanged(inputFormat); - } - - private void onQueueInputBuffer(DecoderInputBuffer buffer) { - if (allowFirstBufferPositionDiscontinuity && !buffer.isDecodeOnly()) { - // TODO: Remove this hack once we have a proper fix for [Internal: b/71876314]. - // Allow the position to jump if the first presentable input buffer has a timestamp that - // differs significantly from what was expected. - if (Math.abs(buffer.timeUs - currentPositionUs) > 500000) { - currentPositionUs = buffer.timeUs; - } - allowFirstBufferPositionDiscontinuity = false; - } - } - - private void updateCurrentPosition() { - long newCurrentPositionUs = audioSink.getCurrentPositionUs(isEnded()); - if (newCurrentPositionUs != AudioSink.CURRENT_POSITION_NOT_SET) { - currentPositionUs = - allowPositionDiscontinuity - ? newCurrentPositionUs - : Math.max(currentPositionUs, newCurrentPositionUs); - allowPositionDiscontinuity = false; - } - } - - private final class AudioSinkListener implements AudioSink.Listener { - - @Override - public void onAudioSessionId(int audioSessionId) { - eventDispatcher.audioSessionId(audioSessionId); - SimpleDecoderAudioRenderer.this.onAudioSessionId(audioSessionId); - } - - @Override - public void onPositionDiscontinuity() { - onAudioTrackPositionDiscontinuity(); - // We are out of sync so allow currentPositionUs to jump backwards. - SimpleDecoderAudioRenderer.this.allowPositionDiscontinuity = true; - } - - @Override - public void onUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) { - eventDispatcher.audioTrackUnderrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs); - onAudioTrackUnderrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs); - } - - } - -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/Sonic.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/Sonic.java index 6cd46bb705..58cd145e35 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/Sonic.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/Sonic.java @@ -16,14 +16,16 @@ */ package com.google.android.exoplayer2.audio; +import static java.lang.Math.min; + import com.google.android.exoplayer2.util.Assertions; import java.nio.ShortBuffer; import java.util.Arrays; /** * Sonic audio stream processor for time/pitch stretching. - *

      - * Based on https://github.com/waywardgeek/sonic. + * + *

      Based on https://github.com/waywardgeek/sonic. */ /* package */ final class Sonic { @@ -81,6 +83,14 @@ public Sonic( pitchBuffer = new short[maxRequiredFrameCount * channelCount]; } + /** + * Returns the number of bytes that have been input, but will not be processed until more input + * data is provided. + */ + public int getPendingInputBytes() { + return inputFrameCount * channelCount * BYTES_PER_SAMPLE; + } + /** * Queues remaining data from {@code buffer}, and advances its position by the number of bytes * consumed. @@ -103,7 +113,7 @@ public void queueInput(ShortBuffer buffer) { * @param buffer A {@link ShortBuffer} into which output will be written. */ public void getOutput(ShortBuffer buffer) { - int framesToRead = Math.min(buffer.remaining() / channelCount, outputFrameCount); + int framesToRead = min(buffer.remaining() / channelCount, outputFrameCount); buffer.put(outputBuffer, 0, framesToRead * channelCount); outputFrameCount -= framesToRead; System.arraycopy( @@ -205,7 +215,7 @@ private void copyToOutput(short[] samples, int positionFrames, int frameCount) { } private int copyInputToOutput(int positionFrames) { - int frameCount = Math.min(maxRequiredFrameCount, remainingInputToCopyFrameCount); + int frameCount = min(maxRequiredFrameCount, remainingInputToCopyFrameCount); copyToOutput(inputBuffer, positionFrames, frameCount); remainingInputToCopyFrameCount -= frameCount; return frameCount; @@ -502,5 +512,4 @@ private static void overlapAdd( } } } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/SonicAudioProcessor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/SonicAudioProcessor.java index b9a59cd620..ccb0091f21 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/SonicAudioProcessor.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/SonicAudioProcessor.java @@ -15,11 +15,13 @@ */ package com.google.android.exoplayer2.audio; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; + import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; -import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Util; +import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.ShortBuffer; @@ -29,37 +31,17 @@ */ public final class SonicAudioProcessor implements AudioProcessor { - /** - * The maximum allowed playback speed in {@link #setSpeed(float)}. - */ - public static final float MAXIMUM_SPEED = 8.0f; - /** - * The minimum allowed playback speed in {@link #setSpeed(float)}. - */ - public static final float MINIMUM_SPEED = 0.1f; - /** - * The maximum allowed pitch in {@link #setPitch(float)}. - */ - public static final float MAXIMUM_PITCH = 8.0f; - /** - * The minimum allowed pitch in {@link #setPitch(float)}. - */ - public static final float MINIMUM_PITCH = 0.1f; - /** - * Indicates that the output sample rate should be the same as the input. - */ + /** Indicates that the output sample rate should be the same as the input. */ public static final int SAMPLE_RATE_NO_CHANGE = -1; - /** - * The threshold below which the difference between two pitch/speed factors is negligible. - */ - private static final float CLOSE_THRESHOLD = 0.01f; + /** The threshold below which the difference between two pitch/speed factors is negligible. */ + private static final float CLOSE_THRESHOLD = 0.0001f; /** - * The minimum number of output bytes at which the speedup is calculated using the input/output - * byte counts, rather than using the current playback parameters speed. + * The minimum number of output bytes required for duration scaling to be calculated using the + * input and output byte counts, rather than using the current playback speed. */ - private static final int MIN_BYTES_FOR_SPEEDUP_CALCULATION = 1024; + private static final int MIN_BYTES_FOR_DURATION_SCALING_CALCULATION = 1024; private int pendingOutputSampleRate; private float speed; @@ -79,9 +61,7 @@ public final class SonicAudioProcessor implements AudioProcessor { private long outputBytes; private boolean inputEnded; - /** - * Creates a new Sonic audio processor. - */ + /** Creates a new Sonic audio processor. */ public SonicAudioProcessor() { speed = 1f; pitch = 1f; @@ -96,37 +76,31 @@ public SonicAudioProcessor() { } /** - * Sets the playback speed. This method may only be called after draining data through the + * Sets the target playback speed. This method may only be called after draining data through the * processor. The value returned by {@link #isActive()} may change, and the processor must be * {@link #flush() flushed} before queueing more data. * - * @param speed The requested new playback speed. - * @return The actual new playback speed. + * @param speed The target factor by which playback should be sped up. */ - public float setSpeed(float speed) { - speed = Util.constrainValue(speed, MINIMUM_SPEED, MAXIMUM_SPEED); + public void setSpeed(float speed) { if (this.speed != speed) { this.speed = speed; pendingSonicRecreation = true; } - return speed; } /** - * Sets the playback pitch. This method may only be called after draining data through the + * Sets the target playback pitch. This method may only be called after draining data through the * processor. The value returned by {@link #isActive()} may change, and the processor must be * {@link #flush() flushed} before queueing more data. * - * @param pitch The requested new pitch. - * @return The actual new pitch. + * @param pitch The target pitch. */ - public float setPitch(float pitch) { - pitch = Util.constrainValue(pitch, MINIMUM_PITCH, MAXIMUM_PITCH); + public void setPitch(float pitch) { if (this.pitch != pitch) { this.pitch = pitch; pendingSonicRecreation = true; } - return pitch; } /** @@ -142,27 +116,32 @@ public void setOutputSampleRateHz(int sampleRateHz) { } /** - * Returns the specified duration scaled to take into account the speedup factor of this instance, - * in the same units as {@code duration}. + * Returns the media duration corresponding to the specified playout duration, taking speed + * adjustment into account. + * + *

      The scaling performed by this method will use the actual playback speed achieved by the + * audio processor, on average, since it was last flushed. This may differ very slightly from the + * target playback speed. * - * @param duration The duration to scale taking into account speedup. - * @return The specified duration scaled to take into account speedup, in the same units as - * {@code duration}. + * @param playoutDuration The playout duration to scale. + * @return The corresponding media duration, in the same units as {@code duration}. */ - public long scaleDurationForSpeedup(long duration) { - if (outputBytes >= MIN_BYTES_FOR_SPEEDUP_CALCULATION) { + public long getMediaDuration(long playoutDuration) { + if (outputBytes >= MIN_BYTES_FOR_DURATION_SCALING_CALCULATION) { + long processedInputBytes = inputBytes - checkNotNull(sonic).getPendingInputBytes(); return outputAudioFormat.sampleRate == inputAudioFormat.sampleRate - ? Util.scaleLargeTimestamp(duration, inputBytes, outputBytes) + ? Util.scaleLargeTimestamp(playoutDuration, processedInputBytes, outputBytes) : Util.scaleLargeTimestamp( - duration, - inputBytes * outputAudioFormat.sampleRate, + playoutDuration, + processedInputBytes * outputAudioFormat.sampleRate, outputBytes * inputAudioFormat.sampleRate); } else { - return (long) ((double) speed * duration); + return (long) ((double) speed * playoutDuration); } } @Override + @CanIgnoreReturnValue public AudioFormat configure(AudioFormat inputAudioFormat) throws UnhandledAudioFormatException { if (inputAudioFormat.encoding != C.ENCODING_PCM_16BIT) { throw new UnhandledAudioFormatException(inputAudioFormat); @@ -188,32 +167,20 @@ public boolean isActive() { @Override public void queueInput(ByteBuffer inputBuffer) { - Sonic sonic = Assertions.checkNotNull(this.sonic); - if (inputBuffer.hasRemaining()) { - ShortBuffer shortBuffer = inputBuffer.asShortBuffer(); - int inputSize = inputBuffer.remaining(); - inputBytes += inputSize; - sonic.queueInput(shortBuffer); - inputBuffer.position(inputBuffer.position() + inputSize); - } - int outputSize = sonic.getOutputSize(); - if (outputSize > 0) { - if (buffer.capacity() < outputSize) { - buffer = ByteBuffer.allocateDirect(outputSize).order(ByteOrder.nativeOrder()); - shortBuffer = buffer.asShortBuffer(); - } else { - buffer.clear(); - shortBuffer.clear(); - } - sonic.getOutput(shortBuffer); - outputBytes += outputSize; - buffer.limit(outputSize); - outputBuffer = buffer; + if (!inputBuffer.hasRemaining()) { + return; } + Sonic sonic = checkNotNull(this.sonic); + ShortBuffer shortBuffer = inputBuffer.asShortBuffer(); + int inputSize = inputBuffer.remaining(); + inputBytes += inputSize; + sonic.queueInput(shortBuffer); + inputBuffer.position(inputBuffer.position() + inputSize); } @Override public void queueEndOfStream() { + // TODO(internal b/174554082): assert sonic is non-null here and in getOutput. if (sonic != null) { sonic.queueEndOfStream(); } @@ -222,6 +189,23 @@ public void queueEndOfStream() { @Override public ByteBuffer getOutput() { + @Nullable Sonic sonic = this.sonic; + if (sonic != null) { + int outputSize = sonic.getOutputSize(); + if (outputSize > 0) { + if (buffer.capacity() < outputSize) { + buffer = ByteBuffer.allocateDirect(outputSize).order(ByteOrder.nativeOrder()); + shortBuffer = buffer.asShortBuffer(); + } else { + buffer.clear(); + shortBuffer.clear(); + } + sonic.getOutput(shortBuffer); + outputBytes += outputSize; + buffer.limit(outputSize); + outputBuffer = buffer; + } + } ByteBuffer outputBuffer = this.outputBuffer; this.outputBuffer = EMPTY_BUFFER; return outputBuffer; @@ -273,5 +257,4 @@ public void reset() { outputBytes = 0; inputEnded = false; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/TeeAudioProcessor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/TeeAudioProcessor.java index a9afa47198..8ffd98837b 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/TeeAudioProcessor.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/TeeAudioProcessor.java @@ -15,6 +15,8 @@ */ package com.google.android.exoplayer2.audio; +import static java.lang.Math.min; + import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.util.Assertions; @@ -32,8 +34,8 @@ *

      This audio processor can be inserted into the audio processor chain to access audio data * before/after particular processing steps have been applied. For example, to get audio output * after playback speed adjustment and silence skipping have been applied it is necessary to pass a - * custom {@link com.google.android.exoplayer2.audio.DefaultAudioSink.AudioProcessorChain} when - * creating the audio sink, and include this audio processor after all other audio processors. + * custom {@link AudioProcessorChain} when creating the audio sink, and include this audio processor + * after all other audio processors. */ public final class TeeAudioProcessor extends BaseAudioProcessor { @@ -123,7 +125,7 @@ public static final class WavFileAudioBufferSink implements AudioBufferSink { private int sampleRateHz; private int channelCount; - @C.PcmEncoding private int encoding; + private @C.PcmEncoding int encoding; @Nullable private RandomAccessFile randomAccessFile; private int counter; private int bytesWritten; @@ -198,7 +200,7 @@ private void writeFileHeader(RandomAccessFile randomAccessFile) throws IOExcepti private void writeBuffer(ByteBuffer buffer) throws IOException { RandomAccessFile randomAccessFile = Assertions.checkNotNull(this.randomAccessFile); while (buffer.hasRemaining()) { - int bytesToWrite = Math.min(buffer.remaining(), scratchBuffer.length); + int bytesToWrite = min(buffer.remaining(), scratchBuffer.length); buffer.get(scratchBuffer, 0, bytesToWrite); randomAccessFile.write(scratchBuffer, 0, bytesToWrite); bytesWritten += bytesToWrite; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/TrimmingAudioProcessor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/TrimmingAudioProcessor.java index f630c267e6..5b0d984099 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/TrimmingAudioProcessor.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/TrimmingAudioProcessor.java @@ -15,14 +15,17 @@ */ package com.google.android.exoplayer2.audio; +import static java.lang.Math.min; + import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.util.Util; import java.nio.ByteBuffer; /** Audio processor for trimming samples from the start/end of data. */ /* package */ final class TrimmingAudioProcessor extends BaseAudioProcessor { - @C.PcmEncoding private static final int OUTPUT_ENCODING = C.ENCODING_PCM_16BIT; + private static final @C.PcmEncoding int OUTPUT_ENCODING = C.ENCODING_PCM_16BIT; private int trimStartFrames; private int trimEndFrames; @@ -43,9 +46,10 @@ public TrimmingAudioProcessor() { * processor. After calling this method, call {@link #configure(AudioFormat)} to apply the new * trimming frame counts. * + *

      See {@link AudioSink#configure(Format, int, int[])}. + * * @param trimStartFrames The number of audio frames to trim from the start of audio. * @param trimEndFrames The number of audio frames to trim from the end of audio. - * @see AudioSink#configure(int, int, int, int, int[], int, int) */ public void setTrimFrameCount(int trimStartFrames, int trimEndFrames) { this.trimStartFrames = trimStartFrames; @@ -86,7 +90,7 @@ public void queueInput(ByteBuffer inputBuffer) { } // Trim any pending start bytes from the input buffer. - int trimBytes = Math.min(remaining, pendingTrimStartBytes); + int trimBytes = min(remaining, pendingTrimStartBytes); trimmedFrameCount += trimBytes / inputAudioFormat.bytesPerFrame; pendingTrimStartBytes -= trimBytes; inputBuffer.position(position + trimBytes); @@ -176,5 +180,4 @@ protected void onFlush() { protected void onReset() { endBuffer = Util.EMPTY_BYTE_ARRAY; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/WavUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/WavUtil.java index 208989124a..c0f4b4c2d5 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/WavUtil.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/WavUtil.java @@ -30,6 +30,10 @@ public final class WavUtil { public static final int FMT_FOURCC = 0x666d7420; /** Four character code for "data". */ public static final int DATA_FOURCC = 0x64617461; + /** Four character code for "RF64". */ + public static final int RF64_FOURCC = 0x52463634; + /** Four character code for "ds64". */ + public static final int DS64_FOURCC = 0x64733634; /** WAVE type value for integer PCM audio data. */ public static final int TYPE_PCM = 0x0001; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/package-info.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/package-info.java index 5ae2413d92..134e17d2a3 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/package-info.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/audio/package-info.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2019 The Android Open Source Project + * Copyright 2022 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/database/DatabaseProvider.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/database/DatabaseProvider.java index 2bb5f260ba..2d6130400e 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/database/DatabaseProvider.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/database/DatabaseProvider.java @@ -19,12 +19,12 @@ import android.database.sqlite.SQLiteException; /** - * Provides {@link SQLiteDatabase} instances to ExoPlayer components, which may read and write + * Provides {@link SQLiteDatabase} instances to media library components, which may read and write * tables prefixed with {@link #TABLE_PREFIX}. */ public interface DatabaseProvider { - /** Prefix for tables that can be read and written by ExoPlayer components. */ + /** Prefix for tables that can be read and written by media library components. */ String TABLE_PREFIX = "ExoPlayer"; /** diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/database/ExoDatabaseProvider.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/database/ExoDatabaseProvider.java index 32dda5965c..4d9176e5b9 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/database/ExoDatabaseProvider.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/database/ExoDatabaseProvider.java @@ -16,80 +16,14 @@ package com.google.android.exoplayer2.database; import android.content.Context; -import android.database.Cursor; -import android.database.SQLException; -import android.database.sqlite.SQLiteDatabase; -import android.database.sqlite.SQLiteOpenHelper; -import com.google.android.exoplayer2.util.Log; /** - * An {@link SQLiteOpenHelper} that provides instances of a standalone ExoPlayer database. - * - *

      Suitable for use by applications that do not already have their own database, or that would - * prefer to keep ExoPlayer tables isolated in their own database. Other applications should prefer - * to use {@link DefaultDatabaseProvider} with their own {@link SQLiteOpenHelper}. + * @deprecated Use {@link StandaloneDatabaseProvider}. */ -public final class ExoDatabaseProvider extends SQLiteOpenHelper implements DatabaseProvider { - - /** The file name used for the standalone ExoPlayer database. */ - public static final String DATABASE_NAME = "exoplayer_internal.db"; - - private static final int VERSION = 1; - private static final String TAG = "ExoDatabaseProvider"; +@Deprecated +public final class ExoDatabaseProvider extends StandaloneDatabaseProvider { - /** - * Provides instances of the database located by passing {@link #DATABASE_NAME} to {@link - * Context#getDatabasePath(String)}. - * - * @param context Any context. - */ public ExoDatabaseProvider(Context context) { - super(context.getApplicationContext(), DATABASE_NAME, /* factory= */ null, VERSION); - } - - @Override - public void onCreate(SQLiteDatabase db) { - // Features create their own tables. - } - - @Override - public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) { - // Features handle their own upgrades. - } - - @Override - public void onDowngrade(SQLiteDatabase db, int oldVersion, int newVersion) { - wipeDatabase(db); - } - - /** - * Makes a best effort to wipe the existing database. The wipe may be incomplete if the database - * contains foreign key constraints. - */ - private static void wipeDatabase(SQLiteDatabase db) { - String[] columns = {"type", "name"}; - try (Cursor cursor = - db.query( - "sqlite_master", - columns, - /* selection= */ null, - /* selectionArgs= */ null, - /* groupBy= */ null, - /* having= */ null, - /* orderBy= */ null)) { - while (cursor.moveToNext()) { - String type = cursor.getString(0); - String name = cursor.getString(1); - if (!"sqlite_sequence".equals(name)) { - // If it's not an SQL-controlled entity, drop it - String sql = "DROP " + type + " IF EXISTS " + name; - try { - db.execSQL(sql); - } catch (SQLException e) { - Log.e(TAG, "Error executing " + sql, e); - } - } - } - } + super(context); } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/database/StandaloneDatabaseProvider.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/database/StandaloneDatabaseProvider.java new file mode 100644 index 0000000000..244cd39490 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/database/StandaloneDatabaseProvider.java @@ -0,0 +1,97 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.database; + +import android.content.Context; +import android.database.Cursor; +import android.database.SQLException; +import android.database.sqlite.SQLiteDatabase; +import android.database.sqlite.SQLiteOpenHelper; +import com.google.android.exoplayer2.util.Log; + +/** + * An {@link SQLiteOpenHelper} that provides instances of a standalone database. + * + *

      Suitable for use by applications that do not already have their own database, or that would + * prefer to keep tables used by media library components isolated in their own database. Other + * applications should prefer to use {@link DefaultDatabaseProvider} with their own {@link + * SQLiteOpenHelper}. + */ +// TODO: Make this class final when ExoDatabaseProvider is removed. +public class StandaloneDatabaseProvider extends SQLiteOpenHelper implements DatabaseProvider { + + /** The file name used for the standalone database. */ + public static final String DATABASE_NAME = "exoplayer_internal.db"; + + private static final int VERSION = 1; + private static final String TAG = "SADatabaseProvider"; + + /** + * Provides instances of the database located by passing {@link #DATABASE_NAME} to {@link + * Context#getDatabasePath(String)}. + * + * @param context Any context. + */ + public StandaloneDatabaseProvider(Context context) { + super(context.getApplicationContext(), DATABASE_NAME, /* factory= */ null, VERSION); + } + + @Override + public void onCreate(SQLiteDatabase db) { + // Features create their own tables. + } + + @Override + public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) { + // Features handle their own upgrades. + } + + @Override + public void onDowngrade(SQLiteDatabase db, int oldVersion, int newVersion) { + wipeDatabase(db); + } + + /** + * Makes a best effort to wipe the existing database. The wipe may be incomplete if the database + * contains foreign key constraints. + */ + private static void wipeDatabase(SQLiteDatabase db) { + String[] columns = {"type", "name"}; + try (Cursor cursor = + db.query( + "sqlite_master", + columns, + /* selection= */ null, + /* selectionArgs= */ null, + /* groupBy= */ null, + /* having= */ null, + /* orderBy= */ null)) { + while (cursor.moveToNext()) { + String type = cursor.getString(0); + String name = cursor.getString(1); + if (!"sqlite_sequence".equals(name)) { + // If it's not an SQL-controlled entity, drop it + String sql = "DROP " + type + " IF EXISTS " + name; + try { + db.execSQL(sql); + } catch (SQLException e) { + Log.e(TAG, "Error executing " + sql, e); + } + } + } + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/database/VersionTable.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/database/VersionTable.java index be367d2f22..4c0a0e7fd8 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/database/VersionTable.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/database/VersionTable.java @@ -15,23 +15,30 @@ */ package com.google.android.exoplayer2.database; +import static java.lang.annotation.ElementType.TYPE_USE; + import android.content.ContentValues; import android.database.Cursor; -import android.database.DatabaseUtils; import android.database.SQLException; import android.database.sqlite.SQLiteDatabase; import androidx.annotation.IntDef; -import androidx.annotation.VisibleForTesting; +import com.google.android.exoplayer2.ExoPlayerLibraryInfo; +import com.google.android.exoplayer2.util.Util; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; /** - * Utility methods for accessing versions of ExoPlayer database components. This allows them to be - * versioned independently to the version of the containing database. + * Utility methods for accessing versions of media library database components. This allows them to + * be versioned independently to the version of the containing database. */ public final class VersionTable { + static { + ExoPlayerLibraryInfo.registerModule("goog.exo.database"); + } + /** Returned by {@link #getVersion(SQLiteDatabase, int, String)} if the version is unset. */ public static final int VERSION_UNSET = -1; /** Version of tables used for offline functionality. */ @@ -40,6 +47,8 @@ public final class VersionTable { public static final int FEATURE_CACHE_CONTENT_METADATA = 1; /** Version of tables used for cache file metadata. */ public static final int FEATURE_CACHE_FILE_METADATA = 2; + /** Version of tables used from external features. */ + public static final int FEATURE_EXTERNAL = 1000; private static final String TABLE_NAME = DatabaseProvider.TABLE_PREFIX + "Versions"; @@ -67,7 +76,13 @@ public final class VersionTable { @Documented @Retention(RetentionPolicy.SOURCE) - @IntDef({FEATURE_OFFLINE, FEATURE_CACHE_CONTENT_METADATA, FEATURE_CACHE_FILE_METADATA}) + @Target(TYPE_USE) + @IntDef({ + FEATURE_OFFLINE, + FEATURE_CACHE_CONTENT_METADATA, + FEATURE_CACHE_FILE_METADATA, + FEATURE_EXTERNAL + }) private @interface Feature {} private VersionTable() {} @@ -108,7 +123,7 @@ public static void removeVersion( SQLiteDatabase writableDatabase, @Feature int feature, String instanceUid) throws DatabaseIOException { try { - if (!tableExists(writableDatabase, TABLE_NAME)) { + if (!Util.tableExists(writableDatabase, TABLE_NAME)) { return; } writableDatabase.delete( @@ -133,7 +148,7 @@ public static void removeVersion( public static int getVersion(SQLiteDatabase database, @Feature int feature, String instanceUid) throws DatabaseIOException { try { - if (!tableExists(database, TABLE_NAME)) { + if (!Util.tableExists(database, TABLE_NAME)) { return VERSION_UNSET; } try (Cursor cursor = @@ -156,14 +171,6 @@ public static int getVersion(SQLiteDatabase database, @Feature int feature, Stri } } - @VisibleForTesting - /* package */ static boolean tableExists(SQLiteDatabase readableDatabase, String tableName) { - long count = - DatabaseUtils.queryNumEntries( - readableDatabase, "sqlite_master", "tbl_name = ?", new String[] {tableName}); - return count > 0; - } - private static String[] featureAndInstanceUidArguments(int feature, String instance) { return new String[] {Integer.toString(feature), instance}; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/Buffer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/Buffer.java index 8fd25f2cf9..43a3f18469 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/Buffer.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/Buffer.java @@ -17,38 +17,32 @@ import com.google.android.exoplayer2.C; -/** - * Base class for buffers with flags. - */ +/** Base class for buffers with flags. */ public abstract class Buffer { - @C.BufferFlags - private int flags; + private @C.BufferFlags int flags; - /** - * Clears the buffer. - */ + /** Clears the buffer. */ public void clear() { flags = 0; } - /** - * Returns whether the {@link C#BUFFER_FLAG_DECODE_ONLY} flag is set. - */ + /** Returns whether the {@link C#BUFFER_FLAG_DECODE_ONLY} flag is set. */ public final boolean isDecodeOnly() { return getFlag(C.BUFFER_FLAG_DECODE_ONLY); } - /** - * Returns whether the {@link C#BUFFER_FLAG_END_OF_STREAM} flag is set. - */ + /** Returns whether the {@link C#BUFFER_FLAG_FIRST_SAMPLE} flag is set. */ + public final boolean isFirstSample() { + return getFlag(C.BUFFER_FLAG_FIRST_SAMPLE); + } + + /** Returns whether the {@link C#BUFFER_FLAG_END_OF_STREAM} flag is set. */ public final boolean isEndOfStream() { return getFlag(C.BUFFER_FLAG_END_OF_STREAM); } - /** - * Returns whether the {@link C#BUFFER_FLAG_KEY_FRAME} flag is set. - */ + /** Returns whether the {@link C#BUFFER_FLAG_KEY_FRAME} flag is set. */ public final boolean isKeyFrame() { return getFlag(C.BUFFER_FLAG_KEY_FRAME); } @@ -71,8 +65,8 @@ public final void setFlags(@C.BufferFlags int flags) { /** * Adds the {@code flag} to this buffer's flags. * - * @param flag The flag to add to this buffer's flags, which should be one of the - * {@code C.BUFFER_FLAG_*} constants. + * @param flag The flag to add to this buffer's flags, which should be one of the {@code + * C.BUFFER_FLAG_*} constants. */ public final void addFlag(@C.BufferFlags int flag) { flags |= flag; @@ -96,5 +90,4 @@ public final void clearFlag(@C.BufferFlags int flag) { protected final boolean getFlag(@C.BufferFlags int flag) { return (flags & flag) == flag; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/CryptoConfig.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/CryptoConfig.java new file mode 100644 index 0000000000..bc592b26a1 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/CryptoConfig.java @@ -0,0 +1,24 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.decoder; + +import com.google.android.exoplayer2.C; + +/** + * Configuration for a decoder to allow it to decode encrypted media data. The configuration is + * {@link C.CryptoType} specific. + */ +public interface CryptoConfig {} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/CryptoException.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/CryptoException.java new file mode 100644 index 0000000000..24ff6823ba --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/CryptoException.java @@ -0,0 +1,32 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.decoder; + +/** Thrown when a non-platform component fails to decrypt data. */ +public class CryptoException extends Exception { + + /** A component specific error code. */ + public final int errorCode; + + /** + * @param errorCode A component specific error code. + * @param message The detail message. + */ + public CryptoException(int errorCode, String message) { + super(message); + this.errorCode = errorCode; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/CryptoInfo.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/CryptoInfo.java index b865d5bb6f..bb4a259006 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/CryptoInfo.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/CryptoInfo.java @@ -15,12 +15,16 @@ */ package com.google.android.exoplayer2.decoder; -import android.annotation.TargetApi; +import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Util; /** - * Compatibility wrapper for {@link android.media.MediaCodec.CryptoInfo}. + * Metadata describing the structure of an encrypted input sample. + * + *

      This class is a compatibility wrapper for {@link android.media.MediaCodec.CryptoInfo}. */ public final class CryptoInfo { @@ -30,33 +34,33 @@ public final class CryptoInfo { * * @see android.media.MediaCodec.CryptoInfo#iv */ - public byte[] iv; + @Nullable public byte[] iv; /** * The 16 byte key id. * * @see android.media.MediaCodec.CryptoInfo#key */ - public byte[] key; + @Nullable public byte[] key; /** * The type of encryption that has been applied. Must be one of the {@link C.CryptoMode} values. * * @see android.media.MediaCodec.CryptoInfo#mode */ - @C.CryptoMode public int mode; + public @C.CryptoMode int mode; /** * The number of leading unencrypted bytes in each sub-sample. If null, all bytes are treated as * encrypted and {@link #numBytesOfEncryptedData} must be specified. * * @see android.media.MediaCodec.CryptoInfo#numBytesOfClearData */ - public int[] numBytesOfClearData; + @Nullable public int[] numBytesOfClearData; /** * The number of trailing encrypted bytes in each sub-sample. If null, all bytes are treated as * clear and {@link #numBytesOfClearData} must be specified. * * @see android.media.MediaCodec.CryptoInfo#numBytesOfEncryptedData */ - public int[] numBytesOfEncryptedData; + @Nullable public int[] numBytesOfEncryptedData; /** * The number of subSamples that make up the buffer's contents. * @@ -73,7 +77,7 @@ public final class CryptoInfo { public int clearBlocks; private final android.media.MediaCodec.CryptoInfo frameworkCryptoInfo; - private final PatternHolderV24 patternHolder; + @Nullable private final PatternHolderV24 patternHolder; public CryptoInfo() { frameworkCryptoInfo = new android.media.MediaCodec.CryptoInfo(); @@ -83,8 +87,15 @@ public CryptoInfo() { /** * @see android.media.MediaCodec.CryptoInfo#set(int, int[], int[], byte[], byte[], int) */ - public void set(int numSubSamples, int[] numBytesOfClearData, int[] numBytesOfEncryptedData, - byte[] key, byte[] iv, @C.CryptoMode int mode, int encryptedBlocks, int clearBlocks) { + public void set( + int numSubSamples, + int[] numBytesOfClearData, + int[] numBytesOfEncryptedData, + byte[] key, + byte[] iv, + @C.CryptoMode int mode, + int encryptedBlocks, + int clearBlocks) { this.numSubSamples = numSubSamples; this.numBytesOfClearData = numBytesOfClearData; this.numBytesOfEncryptedData = numBytesOfEncryptedData; @@ -102,7 +113,7 @@ public void set(int numSubSamples, int[] numBytesOfClearData, int[] numBytesOfEn frameworkCryptoInfo.iv = iv; frameworkCryptoInfo.mode = mode; if (Util.SDK_INT >= 24) { - patternHolder.set(encryptedBlocks, clearBlocks); + Assertions.checkNotNull(patternHolder).set(encryptedBlocks, clearBlocks); } } @@ -119,13 +130,30 @@ public android.media.MediaCodec.CryptoInfo getFrameworkCryptoInfo() { return frameworkCryptoInfo; } - /** @deprecated Use {@link #getFrameworkCryptoInfo()}. */ - @Deprecated - public android.media.MediaCodec.CryptoInfo getFrameworkCryptoInfoV16() { - return getFrameworkCryptoInfo(); + /** + * Increases the number of clear data for the first sub sample by {@code count}. + * + *

      If {@code count} is 0, this method is a no-op. Otherwise, it adds {@code count} to {@link + * #numBytesOfClearData}[0]. + * + *

      If {@link #numBytesOfClearData} is null (which is permitted), this method will instantiate + * it to a new {@code int[1]}. + * + * @param count The number of bytes to be added to the first subSample of {@link + * #numBytesOfClearData}. + */ + public void increaseClearDataFirstSubSampleBy(int count) { + if (count == 0) { + return; + } + if (numBytesOfClearData == null) { + numBytesOfClearData = new int[1]; + frameworkCryptoInfo.numBytesOfClearData = numBytesOfClearData; + } + numBytesOfClearData[0] += count; } - @TargetApi(24) + @RequiresApi(24) private static final class PatternHolderV24 { private final android.media.MediaCodec.CryptoInfo frameworkCryptoInfo; @@ -140,7 +168,5 @@ private void set(int encryptedBlocks, int clearBlocks) { pattern.set(encryptedBlocks, clearBlocks); frameworkCryptoInfo.setPattern(pattern); } - } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/Decoder.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/Decoder.java index 4552d190c3..68852182ae 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/Decoder.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/Decoder.java @@ -24,7 +24,7 @@ * @param The type of buffer output from the decoder. * @param The type of exception thrown from the decoder. */ -public interface Decoder { +public interface Decoder { /** * Returns the name of the decoder. @@ -65,9 +65,6 @@ public interface Decoder { */ void flush(); - /** - * Releases the decoder. Must be called when the decoder is no longer needed. - */ + /** Releases the decoder. Must be called when the decoder is no longer needed. */ void release(); - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/DecoderCounters.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/DecoderCounters.java index 8409bab558..0af4ff029f 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/DecoderCounters.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/DecoderCounters.java @@ -15,64 +15,99 @@ */ package com.google.android.exoplayer2.decoder; +import static java.lang.Math.max; + +import com.google.android.exoplayer2.util.Util; + /** * Maintains decoder event counts, for debugging purposes only. - *

      - * Counters should be written from the playback thread only. Counters may be read from any thread. - * To ensure that the counter values are made visible across threads, users of this class should - * invoke {@link #ensureUpdated()} prior to reading and after writing. + * + *

      Counters should be written from the playback thread only. Counters may be read from any + * thread. To ensure that the counter values are made visible across threads, users of this class + * should invoke {@link #ensureUpdated()} prior to reading and after writing. */ public final class DecoderCounters { - /** - * The number of times a decoder has been initialized. - */ + /** The number of times a decoder has been initialized. */ public int decoderInitCount; - /** - * The number of times a decoder has been released. - */ + /** The number of times a decoder has been released. */ public int decoderReleaseCount; - /** - * The number of queued input buffers. - */ - public int inputBufferCount; + /** The number of input buffers queued to the decoder. */ + public int queuedInputBufferCount; /** * The number of skipped input buffers. - *

      - * A skipped input buffer is an input buffer that was deliberately not sent to the decoder. + * + *

      A skipped input buffer is an input buffer that was deliberately not queued to the decoder. */ public int skippedInputBufferCount; - /** - * The number of rendered output buffers. - */ + /** The number of rendered output buffers. */ public int renderedOutputBufferCount; /** * The number of skipped output buffers. - *

      - * A skipped output buffer is an output buffer that was deliberately not rendered. + * + *

      A skipped output buffer is an output buffer that was deliberately not rendered. This + * includes buffers that were never dequeued from the decoder and instead skipped while 'inside' + * the codec due to a flush. */ public int skippedOutputBufferCount; /** * The number of dropped buffers. - *

      - * A dropped buffer is an buffer that was supposed to be decoded/rendered, but was instead + * + *

      A dropped buffer is a buffer that was supposed to be decoded/rendered, but was instead * dropped because it could not be rendered in time. + * + *

      This includes all of {@link #droppedInputBufferCount} in addition to buffers dropped after + * being queued to the decoder. */ public int droppedBufferCount; + /** + * The number of input buffers dropped. + * + *

      A dropped input buffer is a buffer that was not queued to the decoder because it would not + * be rendered in time. + */ + public int droppedInputBufferCount; /** * The maximum number of dropped buffers without an interleaving rendered output buffer. - *

      - * Skipped output buffers are ignored for the purposes of calculating this value. + * + *

      Skipped buffers are ignored for the purposes of calculating this value. */ public int maxConsecutiveDroppedBufferCount; /** * The number of times all buffers to a keyframe were dropped. - *

      - * Each time buffers to a keyframe are dropped, this counter is increased by one, and the dropped - * buffer counters are increased by one (for the current output buffer) plus the number of buffers - * dropped from the source to advance to the keyframe. + * + *

      Each time buffers to a keyframe are dropped: + * + *

        + *
      • This counter is incremented by one. + *
      • {@link #droppedInputBufferCount} is incremented by the number of buffers dropped from the + * source to advance to the keyframe. + *
      • {@link #droppedBufferCount} is incremented by the sum of the number of buffers dropped + * from the source to advance to the keyframe and the number of buffers 'inside' the + * decoder. + *
      */ public int droppedToKeyframeCount; + /** + * The sum of the video frame processing offsets in microseconds. + * + *

      The processing offset for a video frame is the difference between the time at which the + * frame became available to render, and the time at which it was scheduled to be rendered. A + * positive value indicates the frame became available early enough, whereas a negative value + * indicates that the frame wasn't available until after the time at which it should have been + * rendered. + * + *

      Note: Use {@link #addVideoFrameProcessingOffset(long)} to update this field instead of + * updating it directly. + */ + public long totalVideoFrameProcessingOffsetUs; + /** + * The number of video frame processing offsets added. + * + *

      Note: Use {@link #addVideoFrameProcessingOffset(long)} to update this field instead of + * updating it directly. + */ + public int videoFrameProcessingOffsetCount; /** * Should be called to ensure counter values are made visible across threads. The playback thread @@ -92,14 +127,63 @@ public synchronized void ensureUpdated() { public void merge(DecoderCounters other) { decoderInitCount += other.decoderInitCount; decoderReleaseCount += other.decoderReleaseCount; - inputBufferCount += other.inputBufferCount; + queuedInputBufferCount += other.queuedInputBufferCount; skippedInputBufferCount += other.skippedInputBufferCount; renderedOutputBufferCount += other.renderedOutputBufferCount; skippedOutputBufferCount += other.skippedOutputBufferCount; droppedBufferCount += other.droppedBufferCount; - maxConsecutiveDroppedBufferCount = Math.max(maxConsecutiveDroppedBufferCount, - other.maxConsecutiveDroppedBufferCount); + droppedInputBufferCount += other.droppedInputBufferCount; + maxConsecutiveDroppedBufferCount = + max(maxConsecutiveDroppedBufferCount, other.maxConsecutiveDroppedBufferCount); droppedToKeyframeCount += other.droppedToKeyframeCount; + addVideoFrameProcessingOffsets( + other.totalVideoFrameProcessingOffsetUs, other.videoFrameProcessingOffsetCount); } + /** + * Adds a video frame processing offset to {@link #totalVideoFrameProcessingOffsetUs} and + * increases {@link #videoFrameProcessingOffsetCount} by one. + * + *

      Convenience method to ensure both fields are updated when adding a single offset. + * + * @param processingOffsetUs The video frame processing offset in microseconds. + */ + public void addVideoFrameProcessingOffset(long processingOffsetUs) { + addVideoFrameProcessingOffsets(processingOffsetUs, /* count= */ 1); + } + + private void addVideoFrameProcessingOffsets(long totalProcessingOffsetUs, int count) { + totalVideoFrameProcessingOffsetUs += totalProcessingOffsetUs; + videoFrameProcessingOffsetCount += count; + } + + @Override + public String toString() { + return Util.formatInvariant( + "DecoderCounters {\n " + + "decoderInits=%s,\n " + + "decoderReleases=%s\n " + + "queuedInputBuffers=%s\n " + + "skippedInputBuffers=%s\n " + + "renderedOutputBuffers=%s\n " + + "skippedOutputBuffers=%s\n " + + "droppedBuffers=%s\n " + + "droppedInputBuffers=%s\n " + + "maxConsecutiveDroppedBuffers=%s\n " + + "droppedToKeyframeEvents=%s\n " + + "totalVideoFrameProcessingOffsetUs=%s\n " + + "videoFrameProcessingOffsetCount=%s\n}", + decoderInitCount, + decoderReleaseCount, + queuedInputBufferCount, + skippedInputBufferCount, + renderedOutputBufferCount, + skippedOutputBufferCount, + droppedBufferCount, + droppedInputBufferCount, + maxConsecutiveDroppedBufferCount, + droppedToKeyframeCount, + totalVideoFrameProcessingOffsetUs, + videoFrameProcessingOffsetCount); + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/DecoderException.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/DecoderException.java new file mode 100644 index 0000000000..0af3313ea3 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/DecoderException.java @@ -0,0 +1,50 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.decoder; + +import androidx.annotation.Nullable; + +/** Thrown when a {@link Decoder} error occurs. */ +public class DecoderException extends Exception { + + /** + * Creates an instance. + * + * @param message The detail message for this exception. + */ + public DecoderException(String message) { + super(message); + } + + /** + * Creates an instance. + * + * @param cause The cause of this exception, or {@code null}. + */ + public DecoderException(@Nullable Throwable cause) { + super(cause); + } + + /** + * Creates an instance. + * + * @param message The detail message for this exception. + * @param cause The cause of this exception, or {@code null}. + */ + public DecoderException(String message, @Nullable Throwable cause) { + super(message, cause); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/DecoderInputBuffer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/DecoderInputBuffer.java index bd5df4c8b1..7ecd14ff1f 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/DecoderInputBuffer.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/DecoderInputBuffer.java @@ -15,49 +15,78 @@ */ package com.google.android.exoplayer2.decoder; +import static java.lang.annotation.ElementType.TYPE_USE; + import androidx.annotation.IntDef; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.ExoPlayerLibraryInfo; +import com.google.android.exoplayer2.Format; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; import java.nio.ByteBuffer; import org.checkerframework.checker.nullness.qual.EnsuresNonNull; -/** - * Holds input for a decoder. - */ +/** Holds input for a decoder. */ public class DecoderInputBuffer extends Buffer { + static { + ExoPlayerLibraryInfo.registerModule("goog.exo.decoder"); + } + + /** + * Thrown when an attempt is made to write into a {@link DecoderInputBuffer} whose {@link + * #bufferReplacementMode} is {@link #BUFFER_REPLACEMENT_MODE_DISABLED} and who {@link #data} + * capacity is smaller than required. + */ + public static final class InsufficientCapacityException extends IllegalStateException { + + /** The current capacity of the buffer. */ + public final int currentCapacity; + /** The required capacity of the buffer. */ + public final int requiredCapacity; + + /** + * Creates an instance. + * + * @param currentCapacity The current capacity of the buffer. + * @param requiredCapacity The required capacity of the buffer. + */ + public InsufficientCapacityException(int currentCapacity, int requiredCapacity) { + super("Buffer too small (" + currentCapacity + " < " + requiredCapacity + ")"); + this.currentCapacity = currentCapacity; + this.requiredCapacity = requiredCapacity; + } + } + /** - * The buffer replacement mode, which may disable replacement. One of {@link + * The buffer replacement mode. This controls how {@link #ensureSpaceForWrite} generates + * replacement buffers when the capacity of the existing buffer is insufficient. One of {@link * #BUFFER_REPLACEMENT_MODE_DISABLED}, {@link #BUFFER_REPLACEMENT_MODE_NORMAL} or {@link * #BUFFER_REPLACEMENT_MODE_DIRECT}. */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({ BUFFER_REPLACEMENT_MODE_DISABLED, BUFFER_REPLACEMENT_MODE_NORMAL, BUFFER_REPLACEMENT_MODE_DIRECT }) public @interface BufferReplacementMode {} - /** - * Disallows buffer replacement. - */ + /** Disallows buffer replacement. */ public static final int BUFFER_REPLACEMENT_MODE_DISABLED = 0; - /** - * Allows buffer replacement using {@link ByteBuffer#allocate(int)}. - */ + /** Allows buffer replacement using {@link ByteBuffer#allocate(int)}. */ public static final int BUFFER_REPLACEMENT_MODE_NORMAL = 1; - /** - * Allows buffer replacement using {@link ByteBuffer#allocateDirect(int)}. - */ + /** Allows buffer replacement using {@link ByteBuffer#allocateDirect(int)}. */ public static final int BUFFER_REPLACEMENT_MODE_DIRECT = 2; - /** - * {@link CryptoInfo} for encrypted data. - */ + /** The {@link Format}. */ + @Nullable public Format format; + + /** {@link CryptoInfo} for encrypted data. */ public final CryptoInfo cryptoInfo; /** The buffer's data, or {@code null} if no data has been set. */ @@ -71,9 +100,7 @@ public class DecoderInputBuffer extends Buffer { */ public boolean waitingForKeys; - /** - * The time at which the sample should be presented. - */ + /** The time at which the sample should be presented. */ public long timeUs; /** @@ -82,25 +109,37 @@ public class DecoderInputBuffer extends Buffer { */ @Nullable public ByteBuffer supplementalData; - @BufferReplacementMode private final int bufferReplacementMode; + private final @BufferReplacementMode int bufferReplacementMode; + private final int paddingSize; + + /** Returns a new instance that's not able to hold any data. */ + public static DecoderInputBuffer newNoDataInstance() { + return new DecoderInputBuffer(BUFFER_REPLACEMENT_MODE_DISABLED); + } /** - * Creates a new instance for which {@link #isFlagsOnly()} will return true. + * Creates a new instance. * - * @return A new flags only input buffer. + * @param bufferReplacementMode The {@link BufferReplacementMode} replacement mode. */ - public static DecoderInputBuffer newFlagsOnlyInstance() { - return new DecoderInputBuffer(BUFFER_REPLACEMENT_MODE_DISABLED); + public DecoderInputBuffer(@BufferReplacementMode int bufferReplacementMode) { + this(bufferReplacementMode, /* paddingSize= */ 0); } /** - * @param bufferReplacementMode Determines the behavior of {@link #ensureSpaceForWrite(int)}. One - * of {@link #BUFFER_REPLACEMENT_MODE_DISABLED}, {@link #BUFFER_REPLACEMENT_MODE_NORMAL} and - * {@link #BUFFER_REPLACEMENT_MODE_DIRECT}. + * Creates a new instance. + * + * @param bufferReplacementMode The {@link BufferReplacementMode} replacement mode. + * @param paddingSize If non-zero, {@link #ensureSpaceForWrite(int)} will ensure that the buffer + * is this number of bytes larger than the requested length. This can be useful for decoders + * that consume data in fixed size blocks, for efficiency. Setting the padding size to the + * decoder's fixed read size is necessary to prevent such a decoder from trying to read beyond + * the end of the buffer. */ - public DecoderInputBuffer(@BufferReplacementMode int bufferReplacementMode) { + public DecoderInputBuffer(@BufferReplacementMode int bufferReplacementMode, int paddingSize) { this.cryptoInfo = new CryptoInfo(); this.bufferReplacementMode = bufferReplacementMode; + this.paddingSize = paddingSize; } /** @@ -127,45 +166,38 @@ public void resetSupplementalData(int length) { * whose capacity is sufficient. Data up to the current position is copied to the new buffer. * * @param length The length of the write that must be accommodated, in bytes. - * @throws IllegalStateException If there is insufficient capacity to accommodate the write and - * the buffer replacement mode of the holder is {@link #BUFFER_REPLACEMENT_MODE_DISABLED}. + * @throws InsufficientCapacityException If there is insufficient capacity to accommodate the + * write and {@link #bufferReplacementMode} is {@link #BUFFER_REPLACEMENT_MODE_DISABLED}. */ @EnsuresNonNull("data") public void ensureSpaceForWrite(int length) { - if (data == null) { + length += paddingSize; + @Nullable ByteBuffer currentData = data; + if (currentData == null) { data = createReplacementByteBuffer(length); return; } // Check whether the current buffer is sufficient. - int capacity = data.capacity(); - int position = data.position(); + int capacity = currentData.capacity(); + int position = currentData.position(); int requiredCapacity = position + length; if (capacity >= requiredCapacity) { + data = currentData; return; } // Instantiate a new buffer if possible. ByteBuffer newData = createReplacementByteBuffer(requiredCapacity); - newData.order(data.order()); + newData.order(currentData.order()); // Copy data up to the current position from the old buffer to the new one. if (position > 0) { - data.flip(); - newData.put(data); + currentData.flip(); + newData.put(currentData); } // Set the new buffer. data = newData; } - /** - * Returns whether the buffer is only able to hold flags, meaning {@link #data} is null and - * its replacement mode is {@link #BUFFER_REPLACEMENT_MODE_DISABLED}. - */ - public final boolean isFlagsOnly() { - return data == null && bufferReplacementMode == BUFFER_REPLACEMENT_MODE_DISABLED; - } - - /** - * Returns whether the {@link C#BUFFER_FLAG_ENCRYPTED} flag is set. - */ + /** Returns whether the {@link C#BUFFER_FLAG_ENCRYPTED} flag is set. */ public final boolean isEncrypted() { return getFlag(C.BUFFER_FLAG_ENCRYPTED); } @@ -176,7 +208,9 @@ public final boolean isEncrypted() { * @see java.nio.Buffer#flip() */ public final void flip() { - data.flip(); + if (data != null) { + data.flip(); + } if (supplementalData != null) { supplementalData.flip(); } @@ -201,9 +235,7 @@ private ByteBuffer createReplacementByteBuffer(int requiredCapacity) { return ByteBuffer.allocateDirect(requiredCapacity); } else { int currentCapacity = data == null ? 0 : data.capacity(); - throw new IllegalStateException("Buffer too small (" + currentCapacity + " < " - + requiredCapacity + ")"); + throw new InsufficientCapacityException(currentCapacity, requiredCapacity); } } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/DecoderOutputBuffer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/DecoderOutputBuffer.java new file mode 100644 index 0000000000..897f251cc1 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/DecoderOutputBuffer.java @@ -0,0 +1,42 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.decoder; + +/** Output buffer decoded by a {@link Decoder}. */ +public abstract class DecoderOutputBuffer extends Buffer { + + /** Buffer owner. */ + public interface Owner { + + /** + * Releases the buffer. + * + * @param outputBuffer Output buffer. + */ + void releaseOutputBuffer(S outputBuffer); + } + + /** The presentation timestamp for the buffer, in microseconds. */ + public long timeUs; + + /** + * The number of buffers immediately prior to this one that were skipped in the {@link Decoder}. + */ + public int skippedOutputBufferCount; + + /** Releases the output buffer for reuse. Must be called when the buffer is no longer needed. */ + public abstract void release(); +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/DecoderReuseEvaluation.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/DecoderReuseEvaluation.java new file mode 100644 index 0000000000..19170bd91d --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/DecoderReuseEvaluation.java @@ -0,0 +1,182 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.decoder; + +import static com.google.android.exoplayer2.util.Assertions.checkArgument; +import static com.google.android.exoplayer2.util.Assertions.checkNotEmpty; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static java.lang.annotation.ElementType.TYPE_USE; + +import androidx.annotation.IntDef; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.video.ColorInfo; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * The result of an evaluation to determine whether a decoder can be reused for a new input format. + */ +public final class DecoderReuseEvaluation { + + /** Possible outcomes of the evaluation. */ + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef({ + REUSE_RESULT_NO, + REUSE_RESULT_YES_WITH_FLUSH, + REUSE_RESULT_YES_WITH_RECONFIGURATION, + REUSE_RESULT_YES_WITHOUT_RECONFIGURATION + }) + public @interface DecoderReuseResult {} + /** The decoder cannot be reused. */ + public static final int REUSE_RESULT_NO = 0; + /** The decoder can be reused, but must be flushed. */ + public static final int REUSE_RESULT_YES_WITH_FLUSH = 1; + /** + * The decoder can be reused. It does not need to be flushed, but must be reconfigured by + * prefixing the next input buffer with the new format's configuration data. + */ + public static final int REUSE_RESULT_YES_WITH_RECONFIGURATION = 2; + /** The decoder can be kept. It does not need to be flushed and no reconfiguration is required. */ + public static final int REUSE_RESULT_YES_WITHOUT_RECONFIGURATION = 3; + + /** Possible reasons why reuse is not possible. */ + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef( + flag = true, + value = { + DISCARD_REASON_REUSE_NOT_IMPLEMENTED, + DISCARD_REASON_WORKAROUND, + DISCARD_REASON_APP_OVERRIDE, + DISCARD_REASON_MIME_TYPE_CHANGED, + DISCARD_REASON_OPERATING_RATE_CHANGED, + DISCARD_REASON_INITIALIZATION_DATA_CHANGED, + DISCARD_REASON_DRM_SESSION_CHANGED, + DISCARD_REASON_MAX_INPUT_SIZE_EXCEEDED, + DISCARD_REASON_VIDEO_MAX_RESOLUTION_EXCEEDED, + DISCARD_REASON_VIDEO_RESOLUTION_CHANGED, + DISCARD_REASON_VIDEO_ROTATION_CHANGED, + DISCARD_REASON_VIDEO_COLOR_INFO_CHANGED, + DISCARD_REASON_AUDIO_CHANNEL_COUNT_CHANGED, + DISCARD_REASON_AUDIO_SAMPLE_RATE_CHANGED, + DISCARD_REASON_AUDIO_ENCODING_CHANGED + }) + public @interface DecoderDiscardReasons {} + + /** Decoder reuse is not implemented. */ + public static final int DISCARD_REASON_REUSE_NOT_IMPLEMENTED = 1 << 0; + /** Decoder reuse is disabled by a workaround. */ + public static final int DISCARD_REASON_WORKAROUND = 1 << 1; + /** Decoder reuse is disabled by overriding behavior in application code. */ + public static final int DISCARD_REASON_APP_OVERRIDE = 1 << 2; + /** The sample MIME type is changing. */ + public static final int DISCARD_REASON_MIME_TYPE_CHANGED = 1 << 3; + /** The codec's operating rate is changing. */ + public static final int DISCARD_REASON_OPERATING_RATE_CHANGED = 1 << 4; + /** The format initialization data is changing. */ + public static final int DISCARD_REASON_INITIALIZATION_DATA_CHANGED = 1 << 5; + /** The new format may exceed the decoder's configured maximum sample size, in bytes. */ + public static final int DISCARD_REASON_MAX_INPUT_SIZE_EXCEEDED = 1 << 6; + /** The DRM session is changing. */ + public static final int DISCARD_REASON_DRM_SESSION_CHANGED = 1 << 7; + /** The new format may exceed the decoder's configured maximum resolution. */ + public static final int DISCARD_REASON_VIDEO_MAX_RESOLUTION_EXCEEDED = 1 << 8; + /** The video resolution is changing. */ + public static final int DISCARD_REASON_VIDEO_RESOLUTION_CHANGED = 1 << 9; + /** The video rotation is changing. */ + public static final int DISCARD_REASON_VIDEO_ROTATION_CHANGED = 1 << 10; + /** The video {@link ColorInfo} is changing. */ + public static final int DISCARD_REASON_VIDEO_COLOR_INFO_CHANGED = 1 << 11; + /** The audio channel count is changing. */ + public static final int DISCARD_REASON_AUDIO_CHANNEL_COUNT_CHANGED = 1 << 12; + /** The audio sample rate is changing. */ + public static final int DISCARD_REASON_AUDIO_SAMPLE_RATE_CHANGED = 1 << 13; + /** The audio encoding is changing. */ + public static final int DISCARD_REASON_AUDIO_ENCODING_CHANGED = 1 << 14; + + /** The name of the decoder. */ + public final String decoderName; + + /** The {@link Format} for which the decoder was previously configured. */ + public final Format oldFormat; + + /** The new {@link Format} being evaluated. */ + public final Format newFormat; + + /** The {@link DecoderReuseResult result} of the evaluation. */ + public final @DecoderReuseResult int result; + + /** + * {@link DecoderDiscardReasons Reasons} why the decoder cannot be reused. Always {@code 0} if + * reuse is possible. May also be {code 0} if reuse is not possible for an unspecified reason. + */ + public final @DecoderDiscardReasons int discardReasons; + + /** + * @param decoderName The name of the decoder. + * @param oldFormat The {@link Format} for which the decoder was previously configured. + * @param newFormat The new {@link Format} being evaluated. + * @param result The {@link DecoderReuseResult result} of the evaluation. + * @param discardReasons One or more {@link DecoderDiscardReasons reasons} why the decoder cannot + * be reused, or {@code 0} if reuse is possible. + */ + public DecoderReuseEvaluation( + String decoderName, + Format oldFormat, + Format newFormat, + @DecoderReuseResult int result, + @DecoderDiscardReasons int discardReasons) { + checkArgument(result == REUSE_RESULT_NO || discardReasons == 0); + this.decoderName = checkNotEmpty(decoderName); + this.oldFormat = checkNotNull(oldFormat); + this.newFormat = checkNotNull(newFormat); + this.result = result; + this.discardReasons = discardReasons; + } + + @Override + public boolean equals(@Nullable Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + DecoderReuseEvaluation other = (DecoderReuseEvaluation) obj; + return result == other.result + && discardReasons == other.discardReasons + && decoderName.equals(other.decoderName) + && oldFormat.equals(other.oldFormat) + && newFormat.equals(other.newFormat); + } + + @Override + public int hashCode() { + int hashCode = 17; + hashCode = 31 * hashCode + result; + hashCode = 31 * hashCode + discardReasons; + hashCode = 31 * hashCode + decoderName.hashCode(); + hashCode = 31 * hashCode + oldFormat.hashCode(); + hashCode = 31 * hashCode + newFormat.hashCode(); + return hashCode; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/OutputBuffer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/OutputBuffer.java deleted file mode 100644 index 730ce15ed4..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/OutputBuffer.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright (C) 2016 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.decoder; - -/** - * Output buffer decoded by a {@link Decoder}. - */ -public abstract class OutputBuffer extends Buffer { - - /** - * The presentation timestamp for the buffer, in microseconds. - */ - public long timeUs; - - /** - * The number of buffers immediately prior to this one that were skipped in the {@link Decoder}. - */ - public int skippedOutputBufferCount; - - /** - * Releases the output buffer for reuse. Must be called when the buffer is no longer needed. - */ - public abstract void release(); - -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/SimpleDecoder.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/SimpleDecoder.java index 4eef1ea32d..2c7f302369 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/SimpleDecoder.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/SimpleDecoder.java @@ -21,10 +21,13 @@ import com.google.android.exoplayer2.util.Assertions; import java.util.ArrayDeque; -/** Base class for {@link Decoder}s that use their own decode thread. */ +/** + * Base class for {@link Decoder}s that use their own decode thread and decode each input buffer + * immediately into a corresponding output buffer. + */ @SuppressWarnings("UngroupedOverloads") public abstract class SimpleDecoder< - I extends DecoderInputBuffer, O extends OutputBuffer, E extends Exception> + I extends DecoderInputBuffer, O extends DecoderOutputBuffer, E extends DecoderException> implements Decoder { private final Thread decodeThread; @@ -37,9 +40,9 @@ public abstract class SimpleDecoder< private int availableInputBufferCount; private int availableOutputBufferCount; - private I dequeuedInputBuffer; + @Nullable private I dequeuedInputBuffer; - private E exception; + @Nullable private E exception; private boolean flushed; private boolean released; private int skippedOutputBufferCount; @@ -48,6 +51,7 @@ public abstract class SimpleDecoder< * @param inputBuffers An array of nulls that will be used to store references to input buffers. * @param outputBuffers An array of nulls that will be used to store references to output buffers. */ + @SuppressWarnings("nullness:method.invocation") protected SimpleDecoder(I[] inputBuffers, O[] outputBuffers) { lock = new Object(); queuedInputBuffers = new ArrayDeque<>(); @@ -62,19 +66,20 @@ protected SimpleDecoder(I[] inputBuffers, O[] outputBuffers) { for (int i = 0; i < availableOutputBufferCount; i++) { availableOutputBuffers[i] = createOutputBuffer(); } - decodeThread = new Thread() { - @Override - public void run() { - SimpleDecoder.this.run(); - } - }; + decodeThread = + new Thread("ExoPlayer:SimpleDecoder") { + @Override + public void run() { + SimpleDecoder.this.run(); + } + }; decodeThread.start(); } /** * Sets the initial size of each input buffer. - *

      - * This method should only be called before the decoder is used (i.e. before the first call to + * + *

      This method should only be called before the decoder is used (i.e. before the first call to * {@link #dequeueInputBuffer()}. * * @param size The required input buffer size. @@ -92,8 +97,10 @@ public final I dequeueInputBuffer() throws E { synchronized (lock) { maybeThrowException(); Assertions.checkState(dequeuedInputBuffer == null); - dequeuedInputBuffer = availableInputBufferCount == 0 ? null - : availableInputBuffers[--availableInputBufferCount]; + dequeuedInputBuffer = + availableInputBufferCount == 0 + ? null + : availableInputBuffers[--availableInputBufferCount]; return dequeuedInputBuffer; } } @@ -149,7 +156,6 @@ public final void flush() { while (!queuedOutputBuffers.isEmpty()) { queuedOutputBuffers.removeFirst().release(); } - exception = null; } } @@ -173,6 +179,7 @@ public void release() { * @throws E The decode exception. */ private void maybeThrowException() throws E { + @Nullable E exception = this.exception; if (exception != null) { throw exception; } @@ -181,8 +188,8 @@ private void maybeThrowException() throws E { /** * Notifies the decode loop if there exists a queued input buffer and an available output buffer * to decode into. - *

      - * Should only be called whilst synchronized on the lock object. + * + *

      Should only be called whilst synchronized on the lock object. */ private void maybeNotifyDecodeLoop() { if (canDecodeBuffer()) { @@ -226,6 +233,9 @@ private boolean decode() throws InterruptedException { if (inputBuffer.isDecodeOnly()) { outputBuffer.addFlag(C.BUFFER_FLAG_DECODE_ONLY); } + if (inputBuffer.isFirstSample()) { + outputBuffer.addFlag(C.BUFFER_FLAG_FIRST_SAMPLE); + } @Nullable E exception; try { exception = decode(inputBuffer, outputBuffer, resetDecoder); @@ -279,14 +289,10 @@ private void releaseOutputBufferInternal(O outputBuffer) { availableOutputBuffers[availableOutputBufferCount++] = outputBuffer; } - /** - * Creates a new input buffer. - */ + /** Creates a new input buffer. */ protected abstract I createInputBuffer(); - /** - * Creates a new output buffer. - */ + /** Creates a new output buffer. */ protected abstract O createOutputBuffer(); /** diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/SimpleDecoderOutputBuffer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/SimpleDecoderOutputBuffer.java new file mode 100644 index 0000000000..cdc3530661 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/SimpleDecoderOutputBuffer.java @@ -0,0 +1,62 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.decoder; + +import androidx.annotation.Nullable; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; + +/** Buffer for {@link SimpleDecoder} output. */ +public class SimpleDecoderOutputBuffer extends DecoderOutputBuffer { + + private final Owner owner; + + @Nullable public ByteBuffer data; + + public SimpleDecoderOutputBuffer(Owner owner) { + this.owner = owner; + } + + /** + * Initializes the buffer. + * + * @param timeUs The presentation timestamp for the buffer, in microseconds. + * @param size An upper bound on the size of the data that will be written to the buffer. + * @return The {@link #data} buffer, for convenience. + */ + public ByteBuffer init(long timeUs, int size) { + this.timeUs = timeUs; + if (data == null || data.capacity() < size) { + data = ByteBuffer.allocateDirect(size).order(ByteOrder.nativeOrder()); + } + data.position(0); + data.limit(size); + return data; + } + + @Override + public void clear() { + super.clear(); + if (data != null) { + data.clear(); + } + } + + @Override + public void release() { + owner.releaseOutputBuffer(this); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/SimpleOutputBuffer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/SimpleOutputBuffer.java deleted file mode 100644 index 84cffc1145..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/SimpleOutputBuffer.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright (C) 2016 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.decoder; - -import androidx.annotation.Nullable; -import java.nio.ByteBuffer; -import java.nio.ByteOrder; - -/** - * Buffer for {@link SimpleDecoder} output. - */ -public class SimpleOutputBuffer extends OutputBuffer { - - private final SimpleDecoder owner; - - @Nullable public ByteBuffer data; - - public SimpleOutputBuffer(SimpleDecoder owner) { - this.owner = owner; - } - - /** - * Initializes the buffer. - * - * @param timeUs The presentation timestamp for the buffer, in microseconds. - * @param size An upper bound on the size of the data that will be written to the buffer. - * @return The {@link #data} buffer, for convenience. - */ - public ByteBuffer init(long timeUs, int size) { - this.timeUs = timeUs; - if (data == null || data.capacity() < size) { - data = ByteBuffer.allocateDirect(size).order(ByteOrder.nativeOrder()); - } - data.position(0); - data.limit(size); - return data; - } - - @Override - public void clear() { - super.clear(); - if (data != null) { - data.clear(); - } - } - - @Override - public void release() { - owner.releaseOutputBuffer(this); - } - -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoDecoderOutputBuffer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/VideoDecoderOutputBuffer.java similarity index 86% rename from TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoDecoderOutputBuffer.java rename to TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/VideoDecoderOutputBuffer.java index 457aa30ade..80b15db0d3 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoDecoderOutputBuffer.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/decoder/VideoDecoderOutputBuffer.java @@ -13,48 +13,33 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.google.android.exoplayer2.video; +package com.google.android.exoplayer2.decoder; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; -import com.google.android.exoplayer2.decoder.OutputBuffer; +import com.google.android.exoplayer2.Format; import java.nio.ByteBuffer; /** Video decoder output buffer containing video frame data. */ -public class VideoDecoderOutputBuffer extends OutputBuffer { +public class VideoDecoderOutputBuffer extends DecoderOutputBuffer { - /** Buffer owner. */ - public interface Owner { - - /** - * Releases the buffer. - * - * @param outputBuffer Output buffer. - */ - void releaseOutputBuffer(VideoDecoderOutputBuffer outputBuffer); - } - - // LINT.IfChange public static final int COLORSPACE_UNKNOWN = 0; public static final int COLORSPACE_BT601 = 1; public static final int COLORSPACE_BT709 = 2; public static final int COLORSPACE_BT2020 = 3; - // LINT.ThenChange( - // ../../../../../../../../../../extensions/av1/src/main/jni/gav1_jni.cc, - // ../../../../../../../../../../extensions/vp9/src/main/jni/vpx_jni.cc - // ) - /** Decoder private data. */ + /** Decoder private data. Used from native code. */ public int decoderPrivate; /** Output mode. */ - @C.VideoOutputMode public int mode; + public @C.VideoOutputMode int mode; /** RGB buffer for RGB mode. */ @Nullable public ByteBuffer data; public int width; public int height; - @Nullable public ColorInfo colorInfo; + /** The format of the input from which this output buffer was decoded. */ + @Nullable public Format format; /** YUV planes for YUV mode. */ @Nullable public ByteBuffer[] yuvPlanes; @@ -68,14 +53,14 @@ public interface Owner { */ @Nullable public ByteBuffer supplementalData; - private final Owner owner; + private final Owner owner; /** * Creates VideoDecoderOutputBuffer. * * @param owner Buffer owner. */ - public VideoDecoderOutputBuffer(Owner owner) { + public VideoDecoderOutputBuffer(Owner owner) { this.owner = owner; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/ClearKeyUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/ClearKeyUtil.java index 1c64570f9e..0aace3f95f 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/ClearKeyUtil.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/ClearKeyUtil.java @@ -21,9 +21,7 @@ import org.json.JSONException; import org.json.JSONObject; -/** - * Utility methods for ClearKey. - */ +/** Utility methods for ClearKey. */ /* package */ final class ClearKeyUtil { private static final String TAG = "ClearKeyUtil"; @@ -93,5 +91,4 @@ private static String base64ToBase64Url(String base64) { private static String base64UrlToBase64(String base64Url) { return base64Url.replace('-', '+').replace('_', '/'); } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DecryptionException.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DecryptionException.java deleted file mode 100644 index 81cfc26393..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DecryptionException.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright (C) 2017 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.drm; - -/** - * Thrown when a non-platform component fails to decrypt data. - */ -public class DecryptionException extends Exception { - - /** - * A component specific error code. - */ - public final int errorCode; - - /** - * @param errorCode A component specific error code. - * @param message The detail message. - */ - public DecryptionException(int errorCode, String message) { - super(message); - this.errorCode = errorCode; - } - -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DefaultDrmSession.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DefaultDrmSession.java index 432cc6613f..4e2db30ac5 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DefaultDrmSession.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DefaultDrmSession.java @@ -15,8 +15,11 @@ */ package com.google.android.exoplayer2.drm; +import static com.google.android.exoplayer2.util.Assertions.checkState; +import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; +import static java.lang.Math.min; + import android.annotation.SuppressLint; -import android.annotation.TargetApi; import android.media.NotProvisionedException; import android.os.Handler; import android.os.HandlerThread; @@ -24,14 +27,22 @@ import android.os.Message; import android.os.SystemClock; import android.util.Pair; +import androidx.annotation.GuardedBy; import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.analytics.PlayerId; +import com.google.android.exoplayer2.decoder.CryptoConfig; import com.google.android.exoplayer2.drm.DrmInitData.SchemeData; import com.google.android.exoplayer2.drm.ExoMediaDrm.KeyRequest; import com.google.android.exoplayer2.drm.ExoMediaDrm.ProvisionRequest; +import com.google.android.exoplayer2.source.LoadEventInfo; +import com.google.android.exoplayer2.source.MediaLoadData; import com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy; +import com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy.LoadErrorInfo; import com.google.android.exoplayer2.util.Assertions; -import com.google.android.exoplayer2.util.EventDispatcher; +import com.google.android.exoplayer2.util.Consumer; +import com.google.android.exoplayer2.util.CopyOnWriteMultiset; import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.Util; import java.io.IOException; @@ -46,19 +57,19 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull; /** A {@link DrmSession} that supports playbacks using {@link ExoMediaDrm}. */ -@TargetApi(18) -/* package */ class DefaultDrmSession implements DrmSession { +@RequiresApi(18) +/* package */ class DefaultDrmSession implements DrmSession { /** Thrown when an unexpected exception or error is thrown during provisioning or key requests. */ public static final class UnexpectedDrmSessionException extends IOException { - public UnexpectedDrmSessionException(Throwable cause) { - super("Unexpected " + cause.getClass().getSimpleName() + ": " + cause.getMessage(), cause); + public UnexpectedDrmSessionException(@Nullable Throwable cause) { + super(cause); } } /** Manages provisioning requests. */ - public interface ProvisioningManager { + public interface ProvisioningManager { /** * Called when a session requires provisioning. The manager may call {@link @@ -68,28 +79,41 @@ public interface ProvisioningManager { * * @param session The session. */ - void provisionRequired(DefaultDrmSession session); + void provisionRequired(DefaultDrmSession session); /** * Called by a session when it fails to perform a provisioning operation. * * @param error The error that occurred. + * @param thrownByExoMediaDrm Whether the error originated in an {@link ExoMediaDrm} operation. + * False when the error originated in the provisioning request. */ - void onProvisionError(Exception error); + void onProvisionError(Exception error, boolean thrownByExoMediaDrm); /** Called by a session when it successfully completes a provisioning operation. */ void onProvisionCompleted(); } - /** Callback to be notified when the session is released. */ - public interface ReleaseCallback { + /** Callback to be notified when the reference count of this session changes. */ + public interface ReferenceCountListener { /** - * Called immediately after releasing session resources. + * Called when the internal reference count of this session is incremented. * - * @param session The session. + * @param session This session. + * @param newReferenceCount The reference count after being incremented. */ - void onSessionReleased(DefaultDrmSession session); + void onReferenceCountIncremented(DefaultDrmSession session, int newReferenceCount); + + /** + * Called when the internal reference count of this session is decremented. + * + *

      {@code newReferenceCount == 0} indicates this session is in {@link #STATE_RELEASED}. + * + * @param session This session. + * @param newReferenceCount The reference count after being decremented. + */ + void onReferenceCountDecremented(DefaultDrmSession session, int newReferenceCount); } private static final String TAG = "DefaultDrmSession"; @@ -101,15 +125,16 @@ public interface ReleaseCallback { /** The DRM scheme datas, or null if this session uses offline keys. */ @Nullable public final List schemeDatas; - private final ExoMediaDrm mediaDrm; - private final ProvisioningManager provisioningManager; - private final ReleaseCallback releaseCallback; + private final ExoMediaDrm mediaDrm; + private final ProvisioningManager provisioningManager; + private final ReferenceCountListener referenceCountListener; private final @DefaultDrmSessionManager.Mode int mode; private final boolean playClearSamplesWithoutKeys; private final boolean isPlaceholderSession; private final HashMap keyRequestParameters; - private final EventDispatcher eventDispatcher; + private final CopyOnWriteMultiset eventDispatchers; private final LoadErrorHandlingPolicy loadErrorHandlingPolicy; + private final PlayerId playerId; /* package */ final MediaDrmCallback callback; /* package */ final UUID uuid; @@ -119,10 +144,10 @@ public interface ReleaseCallback { private int referenceCount; @Nullable private HandlerThread requestHandlerThread; @Nullable private RequestHandler requestHandler; - @Nullable private T mediaCrypto; + @Nullable private CryptoConfig cryptoConfig; @Nullable private DrmSessionException lastException; @Nullable private byte[] sessionId; - @MonotonicNonNull private byte[] offlineLicenseKeySetId; + private byte @MonotonicNonNull [] offlineLicenseKeySetId; @Nullable private KeyRequest currentKeyRequest; @Nullable private ProvisionRequest currentProvisionRequest; @@ -133,7 +158,7 @@ public interface ReleaseCallback { * @param uuid The UUID of the drm scheme. * @param mediaDrm The media DRM. * @param provisioningManager The manager for provisioning. - * @param releaseCallback The {@link ReleaseCallback}. + * @param referenceCountListener The {@link ReferenceCountListener}. * @param schemeDatas DRM scheme datas for this session, or null if an {@code * offlineLicenseKeySetId} is provided or if {@code isPlaceholderSession} is true. * @param mode The DRM mode. Ignored if {@code isPlaceholderSession} is true. @@ -143,17 +168,14 @@ public interface ReleaseCallback { * @param keyRequestParameters Key request parameters. * @param callback The media DRM callback. * @param playbackLooper The playback looper. - * @param eventDispatcher The dispatcher for DRM session manager events. * @param loadErrorHandlingPolicy The {@link LoadErrorHandlingPolicy} for key and provisioning * requests. */ - // the constructor does not initialize fields: sessionId - @SuppressWarnings("nullness:initialization.fields.uninitialized") public DefaultDrmSession( UUID uuid, - ExoMediaDrm mediaDrm, - ProvisioningManager provisioningManager, - ReleaseCallback releaseCallback, + ExoMediaDrm mediaDrm, + ProvisioningManager provisioningManager, + ReferenceCountListener referenceCountListener, @Nullable List schemeDatas, @DefaultDrmSessionManager.Mode int mode, boolean playClearSamplesWithoutKeys, @@ -162,15 +184,15 @@ public DefaultDrmSession( HashMap keyRequestParameters, MediaDrmCallback callback, Looper playbackLooper, - EventDispatcher eventDispatcher, - LoadErrorHandlingPolicy loadErrorHandlingPolicy) { + LoadErrorHandlingPolicy loadErrorHandlingPolicy, + PlayerId playerId) { if (mode == DefaultDrmSessionManager.MODE_QUERY || mode == DefaultDrmSessionManager.MODE_RELEASE) { Assertions.checkNotNull(offlineLicenseKeySetId); } this.uuid = uuid; this.provisioningManager = provisioningManager; - this.releaseCallback = releaseCallback; + this.referenceCountListener = referenceCountListener; this.mediaDrm = mediaDrm; this.mode = mode; this.playClearSamplesWithoutKeys = playClearSamplesWithoutKeys; @@ -183,8 +205,9 @@ public DefaultDrmSession( } this.keyRequestParameters = keyRequestParameters; this.callback = callback; - this.eventDispatcher = eventDispatcher; + this.eventDispatchers = new CopyOnWriteMultiset<>(); this.loadErrorHandlingPolicy = loadErrorHandlingPolicy; + this.playerId = playerId; state = STATE_OPENING; responseHandler = new ResponseHandler(playbackLooper); } @@ -215,20 +238,23 @@ public void provision() { } public void onProvisionCompleted() { - if (openInternal(false)) { + if (openInternal()) { doLicense(true); } } - public void onProvisionError(Exception error) { - onError(error); + public void onProvisionError(Exception error, boolean thrownByExoMediaDrm) { + onError( + error, + thrownByExoMediaDrm + ? DrmUtil.ERROR_SOURCE_EXO_MEDIA_DRM + : DrmUtil.ERROR_SOURCE_PROVISIONING); } // DrmSession implementation. @Override - @DrmSession.State - public final int getState() { + public final @DrmSession.State int getState() { return state; } @@ -238,13 +264,20 @@ public boolean playClearSamplesWithoutKeys() { } @Override - public final @Nullable DrmSessionException getError() { + @Nullable + public final DrmSessionException getError() { return state == STATE_ERROR ? lastException : null; } @Override - public final @Nullable T getMediaCrypto() { - return mediaCrypto; + public final UUID getSchemeUuid() { + return uuid; + } + + @Override + @Nullable + public final CryptoConfig getCryptoConfig() { + return cryptoConfig; } @Override @@ -260,40 +293,68 @@ public byte[] getOfflineLicenseKeySetId() { } @Override - public void acquire() { - Assertions.checkState(referenceCount >= 0); + public boolean requiresSecureDecoder(String mimeType) { + return mediaDrm.requiresSecureDecoder(checkStateNotNull(sessionId), mimeType); + } + + @Override + public void acquire(@Nullable DrmSessionEventListener.EventDispatcher eventDispatcher) { + if (referenceCount < 0) { + Log.e(TAG, "Session reference count less than zero: " + referenceCount); + referenceCount = 0; + } + if (eventDispatcher != null) { + eventDispatchers.add(eventDispatcher); + } if (++referenceCount == 1) { - Assertions.checkState(state == STATE_OPENING); - requestHandlerThread = new HandlerThread("DrmRequestHandler"); + checkState(state == STATE_OPENING); + requestHandlerThread = new HandlerThread("ExoPlayer:DrmRequestHandler"); requestHandlerThread.start(); requestHandler = new RequestHandler(requestHandlerThread.getLooper()); - if (openInternal(true)) { + if (openInternal()) { doLicense(true); } + } else if (eventDispatcher != null + && isOpen() + && eventDispatchers.count(eventDispatcher) == 1) { + // If the session is already open and this is the first instance of eventDispatcher we've + // seen, then send the acquire event only to the provided dispatcher. + eventDispatcher.drmSessionAcquired(state); } + referenceCountListener.onReferenceCountIncremented(this, referenceCount); } @Override - public void release() { + public void release(@Nullable DrmSessionEventListener.EventDispatcher eventDispatcher) { + if (referenceCount <= 0) { + Log.e(TAG, "release() called on a session that's already fully released."); + return; + } if (--referenceCount == 0) { // Assigning null to various non-null variables for clean-up. state = STATE_RELEASED; Util.castNonNull(responseHandler).removeCallbacksAndMessages(null); - Util.castNonNull(requestHandler).removeCallbacksAndMessages(null); + Util.castNonNull(requestHandler).release(); requestHandler = null; Util.castNonNull(requestHandlerThread).quit(); requestHandlerThread = null; - mediaCrypto = null; + cryptoConfig = null; lastException = null; currentKeyRequest = null; currentProvisionRequest = null; if (sessionId != null) { mediaDrm.closeSession(sessionId); sessionId = null; - eventDispatcher.dispatch(DefaultDrmSessionEventListener::onDrmSessionReleased); } - releaseCallback.onSessionReleased(this); } + if (eventDispatcher != null) { + eventDispatchers.remove(eventDispatcher); + if (eventDispatchers.count(eventDispatcher) == 0) { + // Release events are only sent to the last-attached instance of each EventDispatcher. + eventDispatcher.drmSessionReleased(); + } + } + referenceCountListener.onReferenceCountDecremented(this, referenceCount); } // Internal methods. @@ -301,12 +362,10 @@ public void release() { /** * Try to open a session, do provisioning if necessary. * - * @param allowProvisioning if provisioning is allowed, set this to false when calling from - * processing provision response. * @return true on success, false otherwise. */ @EnsuresNonNullIf(result = true, expression = "sessionId") - private boolean openInternal(boolean allowProvisioning) { + private boolean openInternal() { if (isOpen()) { // Already opened return true; @@ -314,19 +373,18 @@ private boolean openInternal(boolean allowProvisioning) { try { sessionId = mediaDrm.openSession(); - mediaCrypto = mediaDrm.createMediaCrypto(sessionId); - eventDispatcher.dispatch(DefaultDrmSessionEventListener::onDrmSessionAcquired); + mediaDrm.setPlayerIdForSession(sessionId, playerId); + cryptoConfig = mediaDrm.createCryptoConfig(sessionId); state = STATE_OPENED; + // Capture state into a local so a consistent value is seen by the lambda. + int localState = state; + dispatchEvent(eventDispatcher -> eventDispatcher.drmSessionAcquired(localState)); Assertions.checkNotNull(sessionId); return true; } catch (NotProvisionedException e) { - if (allowProvisioning) { - provisioningManager.provisionRequired(this); - } else { - onError(e); - } + provisioningManager.provisionRequired(this); } catch (Exception e) { - onError(e); + onError(e, DrmUtil.ERROR_SOURCE_EXO_MEDIA_DRM); } return false; @@ -340,14 +398,14 @@ private void onProvisionResponse(Object request, Object response) { currentProvisionRequest = null; if (response instanceof Exception) { - provisioningManager.onProvisionError((Exception) response); + provisioningManager.onProvisionError((Exception) response, /* thrownByExoMediaDrm= */ false); return; } try { mediaDrm.provideProvisionResponse((byte[]) response); } catch (Exception e) { - provisioningManager.onProvisionError(e); + provisioningManager.onProvisionError(e, /* thrownByExoMediaDrm= */ true); return; } @@ -376,10 +434,10 @@ private void doLicense(boolean allowRetry) { + licenseDurationRemainingSec); postKeyRequest(sessionId, ExoMediaDrm.KEY_TYPE_OFFLINE, allowRetry); } else if (licenseDurationRemainingSec <= 0) { - onError(new KeysExpiredException()); + onError(new KeysExpiredException(), DrmUtil.ERROR_SOURCE_LICENSE_ACQUISITION); } else { state = STATE_OPENED_WITH_KEYS; - eventDispatcher.dispatch(DefaultDrmSessionEventListener::onDrmKeysRestored); + dispatchEvent(DrmSessionEventListener.EventDispatcher::drmKeysRestored); } } break; @@ -391,11 +449,7 @@ private void doLicense(boolean allowRetry) { case DefaultDrmSessionManager.MODE_RELEASE: Assertions.checkNotNull(offlineLicenseKeySetId); Assertions.checkNotNull(this.sessionId); - // It's not necessary to restore the key (and open a session to do that) before releasing it - // but this serves as a good sanity/fast-failure check. - if (restoreKeys()) { - postKeyRequest(offlineLicenseKeySetId, ExoMediaDrm.KEY_TYPE_RELEASE, allowRetry); - } + postKeyRequest(offlineLicenseKeySetId, ExoMediaDrm.KEY_TYPE_RELEASE, allowRetry); break; default: break; @@ -408,8 +462,7 @@ private boolean restoreKeys() { mediaDrm.restoreKeys(sessionId, offlineLicenseKeySetId); return true; } catch (Exception e) { - Log.e(TAG, "Error trying to restore keys.", e); - onError(e); + onError(e, DrmUtil.ERROR_SOURCE_EXO_MEDIA_DRM); } return false; } @@ -420,7 +473,7 @@ private long getLicenseDurationRemainingSec() { } Pair pair = Assertions.checkNotNull(WidevineUtil.getLicenseDurationRemainingSec(this)); - return Math.min(pair.first, pair.second); + return min(pair.first, pair.second); } private void postKeyRequest(byte[] scope, int type, boolean allowRetry) { @@ -429,7 +482,7 @@ private void postKeyRequest(byte[] scope, int type, boolean allowRetry) { Util.castNonNull(requestHandler) .post(MSG_KEYS, Assertions.checkNotNull(currentKeyRequest), allowRetry); } catch (Exception e) { - onKeysError(e); + onKeysError(e, /* thrownByExoMediaDrm= */ true); } } @@ -441,7 +494,7 @@ private void onKeyResponse(Object request, Object response) { currentKeyRequest = null; if (response instanceof Exception) { - onKeysError((Exception) response); + onKeysError((Exception) response, /* thrownByExoMediaDrm= */ false); return; } @@ -449,7 +502,7 @@ private void onKeyResponse(Object request, Object response) { byte[] responseData = (byte[]) response; if (mode == DefaultDrmSessionManager.MODE_RELEASE) { mediaDrm.provideKeyResponse(Util.castNonNull(offlineLicenseKeySetId), responseData); - eventDispatcher.dispatch(DefaultDrmSessionEventListener::onDrmKeysRestored); + dispatchEvent(DrmSessionEventListener.EventDispatcher::drmKeysRemoved); } else { byte[] keySetId = mediaDrm.provideKeyResponse(sessionId, responseData); if ((mode == DefaultDrmSessionManager.MODE_DOWNLOAD @@ -460,10 +513,10 @@ private void onKeyResponse(Object request, Object response) { offlineLicenseKeySetId = keySetId; } state = STATE_OPENED_WITH_KEYS; - eventDispatcher.dispatch(DefaultDrmSessionEventListener::onDrmKeysLoaded); + dispatchEvent(DrmSessionEventListener.EventDispatcher::drmKeysLoaded); } } catch (Exception e) { - onKeysError(e); + onKeysError(e, /* thrownByExoMediaDrm= */ true); } } @@ -474,28 +527,40 @@ private void onKeysRequired() { } } - private void onKeysError(Exception e) { + private void onKeysError(Exception e, boolean thrownByExoMediaDrm) { if (e instanceof NotProvisionedException) { provisioningManager.provisionRequired(this); } else { - onError(e); + onError( + e, + thrownByExoMediaDrm + ? DrmUtil.ERROR_SOURCE_EXO_MEDIA_DRM + : DrmUtil.ERROR_SOURCE_LICENSE_ACQUISITION); } } - private void onError(final Exception e) { - lastException = new DrmSessionException(e); - eventDispatcher.dispatch(listener -> listener.onDrmSessionManagerError(e)); + private void onError(Exception e, @DrmUtil.ErrorSource int errorSource) { + lastException = + new DrmSessionException(e, DrmUtil.getErrorCodeForMediaDrmException(e, errorSource)); + Log.e(TAG, "DRM session error", e); + dispatchEvent(eventDispatcher -> eventDispatcher.drmSessionManagerError(e)); if (state != STATE_OPENED_WITH_KEYS) { state = STATE_ERROR; } } @EnsuresNonNullIf(result = true, expression = "sessionId") - @SuppressWarnings("contracts.conditional.postcondition.not.satisfied") + @SuppressWarnings("nullness:contracts.conditional.postcondition") private boolean isOpen() { return state == STATE_OPENED || state == STATE_OPENED_WITH_KEYS; } + private void dispatchEvent(Consumer event) { + for (DrmSessionEventListener.EventDispatcher eventDispatcher : eventDispatchers.elementSet()) { + event.accept(eventDispatcher); + } + } + // Internal classes. @SuppressLint("HandlerLeak") @@ -527,13 +592,20 @@ public void handleMessage(Message msg) { @SuppressLint("HandlerLeak") private class RequestHandler extends Handler { + @GuardedBy("this") + private boolean isReleased; + public RequestHandler(Looper backgroundLooper) { super(backgroundLooper); } void post(int what, Object request, boolean allowRetry) { RequestTask requestTask = - new RequestTask(allowRetry, /* startTimeMs= */ SystemClock.elapsedRealtime(), request); + new RequestTask( + LoadEventInfo.getNewId(), + allowRetry, + /* startTimeMs= */ SystemClock.elapsedRealtime(), + request); obtainMessage(what, requestTask).sendToTarget(); } @@ -553,18 +625,26 @@ public void handleMessage(Message msg) { default: throw new RuntimeException(); } - } catch (Exception e) { + } catch (MediaDrmCallbackException e) { if (maybeRetryRequest(msg, e)) { return; } response = e; + } catch (Exception e) { + Log.w(TAG, "Key/provisioning request produced an unexpected exception. Not retrying.", e); + response = e; + } + loadErrorHandlingPolicy.onLoadTaskConcluded(requestTask.taskId); + synchronized (this) { + if (!isReleased) { + responseHandler + .obtainMessage(msg.what, Pair.create(requestTask.request, response)) + .sendToTarget(); + } } - responseHandler - .obtainMessage(msg.what, Pair.create(requestTask.request, response)) - .sendToTarget(); } - private boolean maybeRetryRequest(Message originalMsg, Exception e) { + private boolean maybeRetryRequest(Message originalMsg, MediaDrmCallbackException exception) { RequestTask requestTask = (RequestTask) originalMsg.obj; if (!requestTask.allowRetry) { return false; @@ -574,31 +654,53 @@ private boolean maybeRetryRequest(Message originalMsg, Exception e) { > loadErrorHandlingPolicy.getMinimumLoadableRetryCount(C.DATA_TYPE_DRM)) { return false; } - IOException ioException = - e instanceof IOException ? (IOException) e : new UnexpectedDrmSessionException(e); + LoadEventInfo loadEventInfo = + new LoadEventInfo( + requestTask.taskId, + exception.dataSpec, + exception.uriAfterRedirects, + exception.responseHeaders, + SystemClock.elapsedRealtime(), + /* loadDurationMs= */ SystemClock.elapsedRealtime() - requestTask.startTimeMs, + exception.bytesLoaded); + MediaLoadData mediaLoadData = new MediaLoadData(C.DATA_TYPE_DRM); + IOException loadErrorCause = + exception.getCause() instanceof IOException + ? (IOException) exception.getCause() + : new UnexpectedDrmSessionException(exception.getCause()); long retryDelayMs = loadErrorHandlingPolicy.getRetryDelayMsFor( - C.DATA_TYPE_DRM, - /* loadDurationMs= */ SystemClock.elapsedRealtime() - requestTask.startTimeMs, - ioException, - requestTask.errorCount); + new LoadErrorInfo( + loadEventInfo, mediaLoadData, loadErrorCause, requestTask.errorCount)); if (retryDelayMs == C.TIME_UNSET) { // The error is fatal. return false; } - sendMessageDelayed(Message.obtain(originalMsg), retryDelayMs); - return true; + synchronized (this) { + if (!isReleased) { + sendMessageDelayed(Message.obtain(originalMsg), retryDelayMs); + return true; + } + } + return false; + } + + public synchronized void release() { + removeCallbacksAndMessages(/* token= */ null); + isReleased = true; } } private static final class RequestTask { + public final long taskId; public final boolean allowRetry; public final long startTimeMs; public final Object request; public int errorCount; - public RequestTask(boolean allowRetry, long startTimeMs, Object request) { + public RequestTask(long taskId, boolean allowRetry, long startTimeMs, Object request) { + this.taskId = taskId; this.allowRetry = allowRetry; this.startTimeMs = startTimeMs; this.request = request; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DefaultDrmSessionEventListener.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DefaultDrmSessionEventListener.java deleted file mode 100644 index 297f26bb71..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DefaultDrmSessionEventListener.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright (C) 2018 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.drm; - -import com.google.android.exoplayer2.Player; - -/** Listener of {@link DefaultDrmSessionManager} events. */ -public interface DefaultDrmSessionEventListener { - - /** Called each time a drm session is acquired. */ - default void onDrmSessionAcquired() {} - - /** Called each time keys are loaded. */ - default void onDrmKeysLoaded() {} - - /** - * Called when a drm error occurs. - * - *

      This method being called does not indicate that playback has failed, or that it will fail. - * The player may be able to recover from the error and continue. Hence applications should - * not implement this method to display a user visible error or initiate an application - * level retry ({@link Player.EventListener#onPlayerError} is the appropriate place to implement - * such behavior). This method is called to provide the application with an opportunity to log the - * error if it wishes to do so. - * - * @param error The corresponding exception. - */ - default void onDrmSessionManagerError(Exception error) {} - - /** Called each time offline keys are restored. */ - default void onDrmKeysRestored() {} - - /** Called each time offline keys are removed. */ - default void onDrmKeysRemoved() {} - - /** Called each time a drm session is released. */ - default void onDrmSessionReleased() {} -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DefaultDrmSessionManager.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DefaultDrmSessionManager.java index 1c27d745de..60c71fa152 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DefaultDrmSessionManager.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DefaultDrmSessionManager.java @@ -15,36 +15,59 @@ */ package com.google.android.exoplayer2.drm; +import static com.google.android.exoplayer2.util.Assertions.checkArgument; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Assertions.checkState; +import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; +import static java.lang.annotation.ElementType.TYPE_USE; + import android.annotation.SuppressLint; -import android.annotation.TargetApi; +import android.media.ResourceBusyException; import android.os.Handler; import android.os.Looper; import android.os.Message; +import android.os.SystemClock; import androidx.annotation.IntDef; import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.PlaybackException; +import com.google.android.exoplayer2.analytics.PlayerId; import com.google.android.exoplayer2.drm.DrmInitData.SchemeData; import com.google.android.exoplayer2.drm.DrmSession.DrmSessionException; import com.google.android.exoplayer2.drm.ExoMediaDrm.OnEventListener; import com.google.android.exoplayer2.upstream.DefaultLoadErrorHandlingPolicy; import com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy; -import com.google.android.exoplayer2.util.Assertions; -import com.google.android.exoplayer2.util.EventDispatcher; import com.google.android.exoplayer2.util.Log; +import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.Util; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Sets; +import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.UUID; +import org.checkerframework.checker.nullness.qual.EnsuresNonNull; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; -/** A {@link DrmSessionManager} that supports playbacks using {@link ExoMediaDrm}. */ -@TargetApi(18) -public class DefaultDrmSessionManager implements DrmSessionManager { +/** + * A {@link DrmSessionManager} that supports playbacks using {@link ExoMediaDrm}. + * + *

      This implementation supports pre-acquisition of sessions using {@link + * #preacquireSession(DrmSessionEventListener.EventDispatcher, Format)}. + */ +@RequiresApi(18) +public class DefaultDrmSessionManager implements DrmSessionManager { /** * Builder for {@link DefaultDrmSessionManager} instances. @@ -55,11 +78,12 @@ public static final class Builder { private final HashMap keyRequestParameters; private UUID uuid; - private ExoMediaDrm.Provider exoMediaDrmProvider; + private ExoMediaDrm.Provider exoMediaDrmProvider; private boolean multiSession; - private int[] useDrmSessionsForClearContentTrackTypes; + private @C.TrackType int[] useDrmSessionsForClearContentTrackTypes; private boolean playClearSamplesWithoutKeys; private LoadErrorHandlingPolicy loadErrorHandlingPolicy; + private long sessionKeepaliveMs; /** * Creates a builder with default values. The default values are: @@ -76,27 +100,31 @@ public static final class Builder { * DefaultLoadErrorHandlingPolicy}. * */ - @SuppressWarnings("unchecked") public Builder() { keyRequestParameters = new HashMap<>(); uuid = C.WIDEVINE_UUID; - exoMediaDrmProvider = (ExoMediaDrm.Provider) FrameworkMediaDrm.DEFAULT_PROVIDER; + exoMediaDrmProvider = FrameworkMediaDrm.DEFAULT_PROVIDER; loadErrorHandlingPolicy = new DefaultLoadErrorHandlingPolicy(); useDrmSessionsForClearContentTrackTypes = new int[0]; + sessionKeepaliveMs = DEFAULT_SESSION_KEEPALIVE_MS; } /** * Sets the key request parameters to pass as the last argument to {@link - * ExoMediaDrm#getKeyRequest(byte[], List, int, HashMap)}. + * ExoMediaDrm#getKeyRequest(byte[], List, int, HashMap)}. May be null if not parameters need to + * be passed. * *

      Custom data for PlayReady should be set under {@link #PLAYREADY_CUSTOM_DATA_KEY}. * * @param keyRequestParameters A map with parameters. * @return This builder. */ - public Builder setKeyRequestParameters(Map keyRequestParameters) { + @CanIgnoreReturnValue + public Builder setKeyRequestParameters(@Nullable Map keyRequestParameters) { this.keyRequestParameters.clear(); - this.keyRequestParameters.putAll(Assertions.checkNotNull(keyRequestParameters)); + if (keyRequestParameters != null) { + this.keyRequestParameters.putAll(keyRequestParameters); + } return this; } @@ -107,11 +135,11 @@ public Builder setKeyRequestParameters(Map keyRequestParameters) * @param exoMediaDrmProvider The {@link ExoMediaDrm.Provider}. * @return This builder. */ - @SuppressWarnings({"rawtypes", "unchecked"}) + @CanIgnoreReturnValue public Builder setUuidAndExoMediaDrmProvider( UUID uuid, ExoMediaDrm.Provider exoMediaDrmProvider) { - this.uuid = Assertions.checkNotNull(uuid); - this.exoMediaDrmProvider = Assertions.checkNotNull(exoMediaDrmProvider); + this.uuid = checkNotNull(uuid); + this.exoMediaDrmProvider = checkNotNull(exoMediaDrmProvider); return this; } @@ -125,6 +153,7 @@ public Builder setUuidAndExoMediaDrmProvider( * sessions. * @return This builder. */ + @CanIgnoreReturnValue public Builder setMultiSession(boolean multiSession) { this.multiSession = multiSession; return this; @@ -144,11 +173,11 @@ public Builder setMultiSession(boolean multiSession) { * @throws IllegalArgumentException If {@code useDrmSessionsForClearContentTrackTypes} contains * track types other than {@link C#TRACK_TYPE_AUDIO} and {@link C#TRACK_TYPE_VIDEO}. */ + @CanIgnoreReturnValue public Builder setUseDrmSessionsForClearContent( - int... useDrmSessionsForClearContentTrackTypes) { - for (int trackType : useDrmSessionsForClearContentTrackTypes) { - Assertions.checkArgument( - trackType == C.TRACK_TYPE_VIDEO || trackType == C.TRACK_TYPE_AUDIO); + @C.TrackType int... useDrmSessionsForClearContentTrackTypes) { + for (@C.TrackType int trackType : useDrmSessionsForClearContentTrackTypes) { + checkArgument(trackType == C.TRACK_TYPE_VIDEO || trackType == C.TRACK_TYPE_AUDIO); } this.useDrmSessionsForClearContentTrackTypes = useDrmSessionsForClearContentTrackTypes.clone(); @@ -163,6 +192,7 @@ public Builder setUseDrmSessionsForClearContent( * played when keys for the encrypted part of the content have yet to be loaded. * @return This builder. */ + @CanIgnoreReturnValue public Builder setPlayClearSamplesWithoutKeys(boolean playClearSamplesWithoutKeys) { this.playClearSamplesWithoutKeys = playClearSamplesWithoutKeys; return this; @@ -174,14 +204,37 @@ public Builder setPlayClearSamplesWithoutKeys(boolean playClearSamplesWithoutKey * @param loadErrorHandlingPolicy A {@link LoadErrorHandlingPolicy}. * @return This builder. */ + @CanIgnoreReturnValue public Builder setLoadErrorHandlingPolicy(LoadErrorHandlingPolicy loadErrorHandlingPolicy) { - this.loadErrorHandlingPolicy = Assertions.checkNotNull(loadErrorHandlingPolicy); + this.loadErrorHandlingPolicy = checkNotNull(loadErrorHandlingPolicy); + return this; + } + + /** + * Sets the time to keep {@link DrmSession DrmSessions} alive when they're not in use. + * + *

      It can be useful to keep sessions alive during playback of short clear sections of media + * (e.g. ad breaks) to avoid opening new DRM sessions (and re-requesting keys) at the transition + * back into secure content. This assumes the secure sections before and after the clear section + * are encrypted with the same keys. + * + *

      Defaults to {@link #DEFAULT_SESSION_KEEPALIVE_MS}. Pass {@link C#TIME_UNSET} to disable + * keep-alive. + * + * @param sessionKeepaliveMs The time to keep {@link DrmSession}s alive before fully releasing, + * in milliseconds. Must be > 0 or {@link C#TIME_UNSET} to disable keep-alive. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setSessionKeepaliveMs(long sessionKeepaliveMs) { + checkArgument(sessionKeepaliveMs > 0 || sessionKeepaliveMs == C.TIME_UNSET); + this.sessionKeepaliveMs = sessionKeepaliveMs; return this; } /** Builds a {@link DefaultDrmSessionManager} instance. */ - public DefaultDrmSessionManager build(MediaDrmCallback mediaDrmCallback) { - return new DefaultDrmSessionManager<>( + public DefaultDrmSessionManager build(MediaDrmCallback mediaDrmCallback) { + return new DefaultDrmSessionManager( uuid, exoMediaDrmProvider, mediaDrmCallback, @@ -189,13 +242,14 @@ public DefaultDrmSessionManager build(MediaDrmCallback mediaDrmC multiSession, useDrmSessionsForClearContentTrackTypes, playClearSamplesWithoutKeys, - loadErrorHandlingPolicy); + loadErrorHandlingPolicy, + sessionKeepaliveMs); } } /** - * Signals that the {@link DrmInitData} passed to {@link #acquireSession} does not contain does - * not contain scheme data for the required UUID. + * Signals that the {@link Format#drmInitData} passed to {@link #acquireSession} does not contain + * scheme data for the required UUID. */ public static final class MissingSchemeDataException extends Exception { @@ -216,6 +270,7 @@ private MissingSchemeDataException(UUID uuid) { */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({MODE_PLAYBACK, MODE_QUERY, MODE_DOWNLOAD, MODE_RELEASE}) public @interface Mode {} /** @@ -231,32 +286,38 @@ private MissingSchemeDataException(UUID uuid) { public static final int MODE_RELEASE = 3; /** Number of times to retry for initial provisioning and key request for reporting error. */ public static final int INITIAL_DRM_REQUEST_RETRY_COUNT = 3; + /** Default value for {@link Builder#setSessionKeepaliveMs(long)}. */ + public static final long DEFAULT_SESSION_KEEPALIVE_MS = 5 * 60 * C.MILLIS_PER_SECOND; private static final String TAG = "DefaultDrmSessionMgr"; private final UUID uuid; - private final ExoMediaDrm.Provider exoMediaDrmProvider; + private final ExoMediaDrm.Provider exoMediaDrmProvider; private final MediaDrmCallback callback; private final HashMap keyRequestParameters; - private final EventDispatcher eventDispatcher; private final boolean multiSession; - private final int[] useDrmSessionsForClearContentTrackTypes; + private final @C.TrackType int[] useDrmSessionsForClearContentTrackTypes; private final boolean playClearSamplesWithoutKeys; private final ProvisioningManagerImpl provisioningManagerImpl; private final LoadErrorHandlingPolicy loadErrorHandlingPolicy; + private final ReferenceCountListenerImpl referenceCountListener; + private final long sessionKeepaliveMs; - private final List> sessions; - private final List> provisioningSessions; + private final List sessions; + private final Set preacquiredSessionReferences; + private final Set keepaliveSessions; private int prepareCallsCount; - @Nullable private ExoMediaDrm exoMediaDrm; - @Nullable private DefaultDrmSession placeholderDrmSession; - @Nullable private DefaultDrmSession noMultiSessionDrmSession; - @Nullable private Looper playbackLooper; + @Nullable private ExoMediaDrm exoMediaDrm; + @Nullable private DefaultDrmSession placeholderDrmSession; + @Nullable private DefaultDrmSession noMultiSessionDrmSession; + private @MonotonicNonNull Looper playbackLooper; + private @MonotonicNonNull Handler playbackHandler; private int mode; @Nullable private byte[] offlineLicenseKeySetId; + private @MonotonicNonNull PlayerId playerId; - /* package */ volatile @Nullable MediaDrmHandler mediaDrmHandler; + /* package */ @Nullable volatile MediaDrmHandler mediaDrmHandler; /** * @param uuid The UUID of the drm scheme. @@ -270,7 +331,7 @@ private MissingSchemeDataException(UUID uuid) { @Deprecated public DefaultDrmSessionManager( UUID uuid, - ExoMediaDrm exoMediaDrm, + ExoMediaDrm exoMediaDrm, MediaDrmCallback callback, @Nullable HashMap keyRequestParameters) { this( @@ -292,10 +353,11 @@ public DefaultDrmSessionManager( * Default is false. * @deprecated Use {@link Builder} instead. */ + @SuppressWarnings("deprecation") @Deprecated public DefaultDrmSessionManager( UUID uuid, - ExoMediaDrm exoMediaDrm, + ExoMediaDrm exoMediaDrm, MediaDrmCallback callback, @Nullable HashMap keyRequestParameters, boolean multiSession) { @@ -323,73 +385,56 @@ public DefaultDrmSessionManager( @Deprecated public DefaultDrmSessionManager( UUID uuid, - ExoMediaDrm exoMediaDrm, + ExoMediaDrm exoMediaDrm, MediaDrmCallback callback, @Nullable HashMap keyRequestParameters, boolean multiSession, int initialDrmRequestRetryCount) { this( uuid, - new ExoMediaDrm.AppManagedProvider<>(exoMediaDrm), + new ExoMediaDrm.AppManagedProvider(exoMediaDrm), callback, keyRequestParameters == null ? new HashMap<>() : keyRequestParameters, multiSession, /* useDrmSessionsForClearContentTrackTypes= */ new int[0], /* playClearSamplesWithoutKeys= */ false, - new DefaultLoadErrorHandlingPolicy(initialDrmRequestRetryCount)); + new DefaultLoadErrorHandlingPolicy(initialDrmRequestRetryCount), + DEFAULT_SESSION_KEEPALIVE_MS); } - // the constructor does not initialize fields: offlineLicenseKeySetId - @SuppressWarnings("nullness:initialization.fields.uninitialized") private DefaultDrmSessionManager( UUID uuid, - ExoMediaDrm.Provider exoMediaDrmProvider, + ExoMediaDrm.Provider exoMediaDrmProvider, MediaDrmCallback callback, HashMap keyRequestParameters, boolean multiSession, - int[] useDrmSessionsForClearContentTrackTypes, + @C.TrackType int[] useDrmSessionsForClearContentTrackTypes, boolean playClearSamplesWithoutKeys, - LoadErrorHandlingPolicy loadErrorHandlingPolicy) { - Assertions.checkNotNull(uuid); - Assertions.checkArgument(!C.COMMON_PSSH_UUID.equals(uuid), "Use C.CLEARKEY_UUID instead"); + LoadErrorHandlingPolicy loadErrorHandlingPolicy, + long sessionKeepaliveMs) { + checkNotNull(uuid); + checkArgument(!C.COMMON_PSSH_UUID.equals(uuid), "Use C.CLEARKEY_UUID instead"); this.uuid = uuid; this.exoMediaDrmProvider = exoMediaDrmProvider; this.callback = callback; this.keyRequestParameters = keyRequestParameters; - this.eventDispatcher = new EventDispatcher<>(); this.multiSession = multiSession; this.useDrmSessionsForClearContentTrackTypes = useDrmSessionsForClearContentTrackTypes; this.playClearSamplesWithoutKeys = playClearSamplesWithoutKeys; this.loadErrorHandlingPolicy = loadErrorHandlingPolicy; provisioningManagerImpl = new ProvisioningManagerImpl(); + referenceCountListener = new ReferenceCountListenerImpl(); mode = MODE_PLAYBACK; sessions = new ArrayList<>(); - provisioningSessions = new ArrayList<>(); - } - - /** - * Adds a {@link DefaultDrmSessionEventListener} to listen to drm session events. - * - * @param handler A handler to use when delivering events to {@code eventListener}. - * @param eventListener A listener of events. - */ - public final void addListener(Handler handler, DefaultDrmSessionEventListener eventListener) { - eventDispatcher.addListener(handler, eventListener); - } - - /** - * Removes a {@link DefaultDrmSessionEventListener} from the list of drm session event listeners. - * - * @param eventListener The listener to remove. - */ - public final void removeListener(DefaultDrmSessionEventListener eventListener) { - eventDispatcher.removeListener(eventListener); + preacquiredSessionReferences = Sets.newIdentityHashSet(); + keepaliveSessions = Sets.newIdentityHashSet(); + this.sessionKeepaliveMs = sessionKeepaliveMs; } /** * Sets the mode, which determines the role of sessions acquired from the instance. This must be - * called before {@link #acquireSession(Looper, DrmInitData)} or {@link - * #acquirePlaceholderSession} is called. + * called before {@link #acquireSession(DrmSessionEventListener.EventDispatcher, Format)} is + * called. * *

      By default, the mode is {@link #MODE_PLAYBACK} and a streaming license is requested when * required. @@ -397,23 +442,23 @@ public final void removeListener(DefaultDrmSessionEventListener eventListener) { *

      {@code mode} must be one of these: * *

        - *
      • {@link #MODE_PLAYBACK}: If {@code offlineLicenseKeySetId} is null, a streaming license is - * requested otherwise the offline license is restored. - *
      • {@link #MODE_QUERY}: {@code offlineLicenseKeySetId} can not be null. The offline license - * is restored. - *
      • {@link #MODE_DOWNLOAD}: If {@code offlineLicenseKeySetId} is null, an offline license is - * requested otherwise the offline license is renewed. - *
      • {@link #MODE_RELEASE}: {@code offlineLicenseKeySetId} can not be null. The offline - * license is released. + *
      • {@link #MODE_PLAYBACK}: If {@code offlineLicenseKeySetId} is null then a streaming + * license is requested. Otherwise, the offline license is restored. + *
      • {@link #MODE_QUERY}: {@code offlineLicenseKeySetId} cannot be null. The offline license + * is restored to allow its status to be queried. + *
      • {@link #MODE_DOWNLOAD}: If {@code offlineLicenseKeySetId} is null then an offline license + * is requested. Otherwise, the offline license is renewed. + *
      • {@link #MODE_RELEASE}: {@code offlineLicenseKeySetId} cannot be null. The offline license + * is released. *
      * * @param mode The mode to be set. * @param offlineLicenseKeySetId The key set id of the license to be used with the given mode. */ public void setMode(@Mode int mode, @Nullable byte[] offlineLicenseKeySetId) { - Assertions.checkState(sessions.isEmpty()); + checkState(sessions.isEmpty()); if (mode == MODE_QUERY || mode == MODE_RELEASE) { - Assertions.checkNotNull(offlineLicenseKeySetId); + checkNotNull(offlineLicenseKeySetId); } this.mode = mode; this.offlineLicenseKeySetId = offlineLicenseKeySetId; @@ -423,101 +468,106 @@ public void setMode(@Mode int mode, @Nullable byte[] offlineLicenseKeySetId) { @Override public final void prepare() { - if (prepareCallsCount++ == 0) { - Assertions.checkState(exoMediaDrm == null); + if (prepareCallsCount++ != 0) { + return; + } + if (exoMediaDrm == null) { exoMediaDrm = exoMediaDrmProvider.acquireExoMediaDrm(uuid); exoMediaDrm.setOnEventListener(new MediaDrmEventListener()); + } else if (sessionKeepaliveMs != C.TIME_UNSET) { + // Re-acquire the keepalive references for any sessions that are still active. + for (int i = 0; i < sessions.size(); i++) { + sessions.get(i).acquire(/* eventDispatcher= */ null); + } } } @Override public final void release() { - if (--prepareCallsCount == 0) { - Assertions.checkNotNull(exoMediaDrm).release(); - exoMediaDrm = null; + if (--prepareCallsCount != 0) { + return; + } + // Release all keepalive acquisitions if keepalive is enabled. + if (sessionKeepaliveMs != C.TIME_UNSET) { + // Make a local copy, because sessions are removed from this.sessions during release (via + // callback). + List sessions = new ArrayList<>(this.sessions); + for (int i = 0; i < sessions.size(); i++) { + sessions.get(i).release(/* eventDispatcher= */ null); + } } + releaseAllPreacquiredSessions(); + + maybeReleaseMediaDrm(); } @Override - public boolean canAcquireSession(DrmInitData drmInitData) { - if (offlineLicenseKeySetId != null) { - // An offline license can be restored so a session can always be acquired. - return true; - } - List schemeDatas = getSchemeDatas(drmInitData, uuid, true); - if (schemeDatas.isEmpty()) { - if (drmInitData.schemeDataCount == 1 && drmInitData.get(0).matches(C.COMMON_PSSH_UUID)) { - // Assume scheme specific data will be added before the session is opened. - Log.w( - TAG, "DrmInitData only contains common PSSH SchemeData. Assuming support for: " + uuid); - } else { - // No data for this manager's scheme. - return false; - } - } - String schemeType = drmInitData.schemeType; - if (schemeType == null || C.CENC_TYPE_cenc.equals(schemeType)) { - // If there is no scheme information, assume patternless AES-CTR. - return true; - } else if (C.CENC_TYPE_cbc1.equals(schemeType) - || C.CENC_TYPE_cbcs.equals(schemeType) - || C.CENC_TYPE_cens.equals(schemeType)) { - // API support for AES-CBC and pattern encryption was added in API 24. However, the - // implementation was not stable until API 25. - return Util.SDK_INT >= 25; - } - // Unknown schemes, assume one of them is supported. - return true; + public void setPlayer(Looper playbackLooper, PlayerId playerId) { + initPlaybackLooper(playbackLooper); + this.playerId = playerId; } @Override - @Nullable - public DrmSession acquirePlaceholderSession(Looper playbackLooper, int trackType) { - assertExpectedPlaybackLooper(playbackLooper); - ExoMediaDrm exoMediaDrm = Assertions.checkNotNull(this.exoMediaDrm); - boolean avoidPlaceholderDrmSessions = - FrameworkMediaCrypto.class.equals(exoMediaDrm.getExoMediaCryptoType()) - && FrameworkMediaCrypto.WORKAROUND_DEVICE_NEEDS_KEYS_TO_CONFIGURE_CODEC; - // Avoid attaching a session to sparse formats. - if (avoidPlaceholderDrmSessions - || Util.linearSearch(useDrmSessionsForClearContentTrackTypes, trackType) == C.INDEX_UNSET - || exoMediaDrm.getExoMediaCryptoType() == null) { - return null; - } - maybeCreateMediaDrmHandler(playbackLooper); - if (placeholderDrmSession == null) { - DefaultDrmSession placeholderDrmSession = - createNewDefaultSession( - /* schemeDatas= */ Collections.emptyList(), /* isPlaceholderSession= */ true); - sessions.add(placeholderDrmSession); - this.placeholderDrmSession = placeholderDrmSession; - } - placeholderDrmSession.acquire(); - return placeholderDrmSession; + public DrmSessionReference preacquireSession( + @Nullable DrmSessionEventListener.EventDispatcher eventDispatcher, Format format) { + checkState(prepareCallsCount > 0); + checkStateNotNull(playbackLooper); + PreacquiredSessionReference preacquiredSessionReference = + new PreacquiredSessionReference(eventDispatcher); + preacquiredSessionReference.acquire(format); + return preacquiredSessionReference; } @Override - public DrmSession acquireSession(Looper playbackLooper, DrmInitData drmInitData) { - assertExpectedPlaybackLooper(playbackLooper); + @Nullable + public DrmSession acquireSession( + @Nullable DrmSessionEventListener.EventDispatcher eventDispatcher, Format format) { + checkState(prepareCallsCount > 0); + checkStateNotNull(playbackLooper); + return acquireSession( + playbackLooper, + eventDispatcher, + format, + /* shouldReleasePreacquiredSessionsBeforeRetrying= */ true); + } + + // Must be called on the playback thread. + @Nullable + private DrmSession acquireSession( + Looper playbackLooper, + @Nullable DrmSessionEventListener.EventDispatcher eventDispatcher, + Format format, + boolean shouldReleasePreacquiredSessionsBeforeRetrying) { maybeCreateMediaDrmHandler(playbackLooper); + if (format.drmInitData == null) { + // Content is not encrypted. + return maybeAcquirePlaceholderSession( + MimeTypes.getTrackType(format.sampleMimeType), + shouldReleasePreacquiredSessionsBeforeRetrying); + } + @Nullable List schemeDatas = null; if (offlineLicenseKeySetId == null) { - schemeDatas = getSchemeDatas(drmInitData, uuid, false); + schemeDatas = getSchemeDatas(checkNotNull(format.drmInitData), uuid, false); if (schemeDatas.isEmpty()) { final MissingSchemeDataException error = new MissingSchemeDataException(uuid); - eventDispatcher.dispatch(listener -> listener.onDrmSessionManagerError(error)); - return new ErrorStateDrmSession<>(new DrmSessionException(error)); + Log.e(TAG, "DRM error", error); + if (eventDispatcher != null) { + eventDispatcher.drmSessionManagerError(error); + } + return new ErrorStateDrmSession( + new DrmSessionException(error, PlaybackException.ERROR_CODE_DRM_CONTENT_ERROR)); } } - @Nullable DefaultDrmSession session; + @Nullable DefaultDrmSession session; if (!multiSession) { session = noMultiSessionDrmSession; } else { // Only use an existing session if it has matching init data. session = null; - for (DefaultDrmSession existingSession : sessions) { + for (DefaultDrmSession existingSession : sessions) { if (Util.areEqual(existingSession.schemeDatas, schemeDatas)) { session = existingSession; break; @@ -527,29 +577,110 @@ public DrmSession acquireSession(Looper playbackLooper, DrmInitData drmInitDa if (session == null) { // Create a new session. - session = createNewDefaultSession(schemeDatas, /* isPlaceholderSession= */ false); + session = + createAndAcquireSessionWithRetry( + schemeDatas, + /* isPlaceholderSession= */ false, + eventDispatcher, + shouldReleasePreacquiredSessionsBeforeRetrying); if (!multiSession) { noMultiSessionDrmSession = session; } sessions.add(session); + } else { + session.acquire(eventDispatcher); } - session.acquire(); + return session; } @Override - @Nullable - public Class getExoMediaCryptoType(DrmInitData drmInitData) { - return canAcquireSession(drmInitData) - ? Assertions.checkNotNull(exoMediaDrm).getExoMediaCryptoType() - : null; + public @C.CryptoType int getCryptoType(Format format) { + @C.CryptoType int cryptoType = checkNotNull(exoMediaDrm).getCryptoType(); + if (format.drmInitData == null) { + int trackType = MimeTypes.getTrackType(format.sampleMimeType); + return Util.linearSearch(useDrmSessionsForClearContentTrackTypes, trackType) != C.INDEX_UNSET + ? cryptoType + : C.CRYPTO_TYPE_NONE; + } else { + return canAcquireSession(format.drmInitData) ? cryptoType : C.CRYPTO_TYPE_UNSUPPORTED; + } } // Internal methods. - private void assertExpectedPlaybackLooper(Looper playbackLooper) { - Assertions.checkState(this.playbackLooper == null || this.playbackLooper == playbackLooper); - this.playbackLooper = playbackLooper; + @Nullable + private DrmSession maybeAcquirePlaceholderSession( + int trackType, boolean shouldReleasePreacquiredSessionsBeforeRetrying) { + ExoMediaDrm exoMediaDrm = checkNotNull(this.exoMediaDrm); + boolean avoidPlaceholderDrmSessions = + exoMediaDrm.getCryptoType() == C.CRYPTO_TYPE_FRAMEWORK + && FrameworkCryptoConfig.WORKAROUND_DEVICE_NEEDS_KEYS_TO_CONFIGURE_CODEC; + // Avoid attaching a session to sparse formats. + if (avoidPlaceholderDrmSessions + || Util.linearSearch(useDrmSessionsForClearContentTrackTypes, trackType) == C.INDEX_UNSET + || exoMediaDrm.getCryptoType() == C.CRYPTO_TYPE_UNSUPPORTED) { + return null; + } + if (placeholderDrmSession == null) { + DefaultDrmSession placeholderDrmSession = + createAndAcquireSessionWithRetry( + /* schemeDatas= */ ImmutableList.of(), + /* isPlaceholderSession= */ true, + /* eventDispatcher= */ null, + shouldReleasePreacquiredSessionsBeforeRetrying); + sessions.add(placeholderDrmSession); + this.placeholderDrmSession = placeholderDrmSession; + } else { + placeholderDrmSession.acquire(/* eventDispatcher= */ null); + } + return placeholderDrmSession; + } + + private boolean canAcquireSession(DrmInitData drmInitData) { + if (offlineLicenseKeySetId != null) { + // An offline license can be restored so a session can always be acquired. + return true; + } + List schemeDatas = getSchemeDatas(drmInitData, uuid, true); + if (schemeDatas.isEmpty()) { + if (drmInitData.schemeDataCount == 1 && drmInitData.get(0).matches(C.COMMON_PSSH_UUID)) { + // Assume scheme specific data will be added before the session is opened. + Log.w( + TAG, "DrmInitData only contains common PSSH SchemeData. Assuming support for: " + uuid); + } else { + // No data for this manager's scheme. + return false; + } + } + String schemeType = drmInitData.schemeType; + if (schemeType == null || C.CENC_TYPE_cenc.equals(schemeType)) { + // If there is no scheme information, assume patternless AES-CTR. + return true; + } else if (C.CENC_TYPE_cbcs.equals(schemeType)) { + // Support for cbcs (AES-CBC with pattern encryption) was added in API 24. However, the + // implementation was not stable until API 25. + return Util.SDK_INT >= 25; + } else if (C.CENC_TYPE_cbc1.equals(schemeType) || C.CENC_TYPE_cens.equals(schemeType)) { + // Support for cbc1 (AES-CTR with pattern encryption) and cens (AES-CBC without pattern + // encryption) was also added in API 24 and made stable from API 25, however support was + // removed from API 30. Since the range of API levels for which these modes are usable is too + // small to be useful, we don't indicate support on any API level. + return false; + } + // Unknown schemes, assume one of them is supported. + return true; + } + + @EnsuresNonNull({"this.playbackLooper", "this.playbackHandler"}) + private synchronized void initPlaybackLooper(Looper playbackLooper) { + if (this.playbackLooper == null) { + this.playbackLooper = playbackLooper; + this.playbackHandler = new Handler(playbackLooper); + } else { + checkState(this.playbackLooper == playbackLooper); + checkNotNull(playbackHandler); + } } private void maybeCreateMediaDrmHandler(Looper playbackLooper) { @@ -558,42 +689,129 @@ private void maybeCreateMediaDrmHandler(Looper playbackLooper) { } } - private DefaultDrmSession createNewDefaultSession( - @Nullable List schemeDatas, boolean isPlaceholderSession) { - Assertions.checkNotNull(exoMediaDrm); - // Placeholder sessions should always play clear samples without keys. - boolean playClearSamplesWithoutKeys = this.playClearSamplesWithoutKeys | isPlaceholderSession; - return new DefaultDrmSession<>( - uuid, - exoMediaDrm, - /* provisioningManager= */ provisioningManagerImpl, - /* releaseCallback= */ this::onSessionReleased, - schemeDatas, - mode, - playClearSamplesWithoutKeys, - isPlaceholderSession, - offlineLicenseKeySetId, - keyRequestParameters, - callback, - Assertions.checkNotNull(playbackLooper), - eventDispatcher, - loadErrorHandlingPolicy); + private DefaultDrmSession createAndAcquireSessionWithRetry( + @Nullable List schemeDatas, + boolean isPlaceholderSession, + @Nullable DrmSessionEventListener.EventDispatcher eventDispatcher, + boolean shouldReleasePreacquiredSessionsBeforeRetrying) { + DefaultDrmSession session = + createAndAcquireSession(schemeDatas, isPlaceholderSession, eventDispatcher); + // If we're short on DRM session resources, first try eagerly releasing all our keepalive + // sessions and then retry the acquisition. + if (acquisitionFailedIndicatingResourceShortage(session) && !keepaliveSessions.isEmpty()) { + releaseAllKeepaliveSessions(); + undoAcquisition(session, eventDispatcher); + session = createAndAcquireSession(schemeDatas, isPlaceholderSession, eventDispatcher); + } + + // If the acquisition failed again due to continued resource shortage, and + // shouldReleasePreacquiredSessionsBeforeRetrying is true, try releasing all pre-acquired + // sessions and then retry the acquisition. + if (acquisitionFailedIndicatingResourceShortage(session) + && shouldReleasePreacquiredSessionsBeforeRetrying + && !preacquiredSessionReferences.isEmpty()) { + releaseAllPreacquiredSessions(); + if (!keepaliveSessions.isEmpty()) { + // Some preacquired sessions released above are now in their keepalive timeout phase. We + // release the keepalive references immediately. + releaseAllKeepaliveSessions(); + } + undoAcquisition(session, eventDispatcher); + session = createAndAcquireSession(schemeDatas, isPlaceholderSession, eventDispatcher); + } + return session; + } + + private static boolean acquisitionFailedIndicatingResourceShortage(DrmSession session) { + // ResourceBusyException is only available at API 19, so on earlier versions we + // assume any error indicates resource shortage (ensuring we retry). + return session.getState() == DrmSession.STATE_ERROR + && (Util.SDK_INT < 19 + || checkNotNull(session.getError()).getCause() instanceof ResourceBusyException); } - private void onSessionReleased(DefaultDrmSession drmSession) { - sessions.remove(drmSession); - if (placeholderDrmSession == drmSession) { - placeholderDrmSession = null; + /** + * Undoes the acquisitions from {@link #createAndAcquireSession(List, boolean, + * DrmSessionEventListener.EventDispatcher)}. + */ + private void undoAcquisition( + DrmSession session, @Nullable DrmSessionEventListener.EventDispatcher eventDispatcher) { + session.release(eventDispatcher); + if (sessionKeepaliveMs != C.TIME_UNSET) { + session.release(/* eventDispatcher= */ null); + } + } + + private void releaseAllKeepaliveSessions() { + // Make a local copy, because sessions are removed from this.keepaliveSessions during + // release (via callback). + ImmutableSet keepaliveSessions = ImmutableSet.copyOf(this.keepaliveSessions); + for (DrmSession keepaliveSession : keepaliveSessions) { + keepaliveSession.release(/* eventDispatcher= */ null); + } + } + + private void releaseAllPreacquiredSessions() { + // Make a local copy, because sessions are removed from this.preacquiredSessionReferences + // during release (via callback). + ImmutableSet preacquiredSessionReferences = + ImmutableSet.copyOf(this.preacquiredSessionReferences); + for (PreacquiredSessionReference preacquiredSessionReference : preacquiredSessionReferences) { + preacquiredSessionReference.release(); } - if (noMultiSessionDrmSession == drmSession) { - noMultiSessionDrmSession = null; + } + + /** + * Creates a new {@link DefaultDrmSession} and acquires it on behalf of the caller (passing in + * {@code eventDispatcher}). + * + *

      If {@link #sessionKeepaliveMs} != {@link C#TIME_UNSET} then acquires it again to allow the + * manager to keep it alive (passing in {@code eventDispatcher=null}. + */ + private DefaultDrmSession createAndAcquireSession( + @Nullable List schemeDatas, + boolean isPlaceholderSession, + @Nullable DrmSessionEventListener.EventDispatcher eventDispatcher) { + checkNotNull(exoMediaDrm); + // Placeholder sessions should always play clear samples without keys. + boolean playClearSamplesWithoutKeys = this.playClearSamplesWithoutKeys | isPlaceholderSession; + DefaultDrmSession session = + new DefaultDrmSession( + uuid, + exoMediaDrm, + /* provisioningManager= */ provisioningManagerImpl, + referenceCountListener, + schemeDatas, + mode, + playClearSamplesWithoutKeys, + isPlaceholderSession, + offlineLicenseKeySetId, + keyRequestParameters, + callback, + checkNotNull(playbackLooper), + loadErrorHandlingPolicy, + checkNotNull(playerId)); + // Acquire the session once on behalf of the caller to DrmSessionManager - this is the + // reference 'assigned' to the caller which they're responsible for releasing. Do this first, + // to ensure that eventDispatcher receives all events related to the initial + // acquisition/opening. + session.acquire(eventDispatcher); + if (sessionKeepaliveMs != C.TIME_UNSET) { + // Acquire the session once more so the Manager can keep it alive. + session.acquire(/* eventDispatcher= */ null); } - if (provisioningSessions.size() > 1 && provisioningSessions.get(0) == drmSession) { - // Other sessions were waiting for the released session to complete a provision operation. - // We need to have one of those sessions perform the provision operation instead. - provisioningSessions.get(1).provision(); + return session; + } + + private void maybeReleaseMediaDrm() { + if (exoMediaDrm != null + && prepareCallsCount == 0 + && sessions.isEmpty() + && preacquiredSessionReferences.isEmpty()) { + // This manager and all its sessions are fully released so we can release exoMediaDrm. + checkNotNull(exoMediaDrm).release(); + exoMediaDrm = null; } - provisioningSessions.remove(drmSession); } /** @@ -636,7 +854,7 @@ public void handleMessage(Message msg) { // The event is not associated with any particular session. return; } - for (DefaultDrmSession session : sessions) { + for (DefaultDrmSession session : sessions) { if (session.hasSessionId(sessionId)) { session.onMediaDrmEvent(msg.what); return; @@ -645,47 +863,183 @@ public void handleMessage(Message msg) { } } - private class ProvisioningManagerImpl implements DefaultDrmSession.ProvisioningManager { + private class ProvisioningManagerImpl implements DefaultDrmSession.ProvisioningManager { + + private final Set sessionsAwaitingProvisioning; + @Nullable private DefaultDrmSession provisioningSession; + + public ProvisioningManagerImpl() { + sessionsAwaitingProvisioning = new HashSet<>(); + } + @Override - public void provisionRequired(DefaultDrmSession session) { - if (provisioningSessions.contains(session)) { - // The session has already requested provisioning. + public void provisionRequired(DefaultDrmSession session) { + sessionsAwaitingProvisioning.add(session); + if (provisioningSession != null) { + // Provisioning is already in-flight. return; } - provisioningSessions.add(session); - if (provisioningSessions.size() == 1) { - // This is the first session requesting provisioning, so have it perform the operation. - session.provision(); - } + provisioningSession = session; + session.provision(); } @Override public void onProvisionCompleted() { - for (DefaultDrmSession session : provisioningSessions) { + provisioningSession = null; + ImmutableList sessionsToNotify = + ImmutableList.copyOf(sessionsAwaitingProvisioning); + // Clear the list before calling onProvisionComplete in case provisioning is re-requested. + sessionsAwaitingProvisioning.clear(); + for (DefaultDrmSession session : sessionsToNotify) { session.onProvisionCompleted(); } - provisioningSessions.clear(); } @Override - public void onProvisionError(Exception error) { - for (DefaultDrmSession session : provisioningSessions) { - session.onProvisionError(error); + public void onProvisionError(Exception error, boolean thrownByExoMediaDrm) { + provisioningSession = null; + ImmutableList sessionsToNotify = + ImmutableList.copyOf(sessionsAwaitingProvisioning); + // Clear the list before calling onProvisionError in case provisioning is re-requested. + sessionsAwaitingProvisioning.clear(); + for (DefaultDrmSession session : sessionsToNotify) { + session.onProvisionError(error, thrownByExoMediaDrm); + } + } + + public void onSessionFullyReleased(DefaultDrmSession session) { + sessionsAwaitingProvisioning.remove(session); + if (provisioningSession == session) { + provisioningSession = null; + if (!sessionsAwaitingProvisioning.isEmpty()) { + // Other sessions were waiting for the released session to complete a provision operation. + // We need to have one of those sessions perform the provision operation instead. + provisioningSession = sessionsAwaitingProvisioning.iterator().next(); + provisioningSession.provision(); + } } - provisioningSessions.clear(); } } - private class MediaDrmEventListener implements OnEventListener { + private class ReferenceCountListenerImpl implements DefaultDrmSession.ReferenceCountListener { + + @Override + public void onReferenceCountIncremented(DefaultDrmSession session, int newReferenceCount) { + if (sessionKeepaliveMs != C.TIME_UNSET) { + // The session has been acquired elsewhere so we want to cancel our timeout. + keepaliveSessions.remove(session); + checkNotNull(playbackHandler).removeCallbacksAndMessages(session); + } + } + + @Override + public void onReferenceCountDecremented(DefaultDrmSession session, int newReferenceCount) { + if (newReferenceCount == 1 && prepareCallsCount > 0 && sessionKeepaliveMs != C.TIME_UNSET) { + // Only the internal keep-alive reference remains, so we can start the timeout. We only + // do this if the manager isn't released, because a released manager has already released + // all its internal session keep-alive references. + keepaliveSessions.add(session); + checkNotNull(playbackHandler) + .postAtTime( + () -> session.release(/* eventDispatcher= */ null), + session, + /* uptimeMillis= */ SystemClock.uptimeMillis() + sessionKeepaliveMs); + } else if (newReferenceCount == 0) { + // This session is fully released. + sessions.remove(session); + if (placeholderDrmSession == session) { + placeholderDrmSession = null; + } + if (noMultiSessionDrmSession == session) { + noMultiSessionDrmSession = null; + } + provisioningManagerImpl.onSessionFullyReleased(session); + if (sessionKeepaliveMs != C.TIME_UNSET) { + checkNotNull(playbackHandler).removeCallbacksAndMessages(session); + keepaliveSessions.remove(session); + } + } + maybeReleaseMediaDrm(); + } + } + + private class MediaDrmEventListener implements OnEventListener { @Override public void onEvent( - ExoMediaDrm md, - @Nullable byte[] sessionId, - int event, - int extra, - @Nullable byte[] data) { - Assertions.checkNotNull(mediaDrmHandler).obtainMessage(event, sessionId).sendToTarget(); + ExoMediaDrm md, @Nullable byte[] sessionId, int event, int extra, @Nullable byte[] data) { + checkNotNull(mediaDrmHandler).obtainMessage(event, sessionId).sendToTarget(); + } + } + + /** + * An implementation of {@link DrmSessionReference} that lazily acquires the underlying {@link + * DrmSession}. + * + *

      A new instance is needed for each reference (compared to maintaining exactly one instance + * for each {@link DrmSession}) because each associated {@link + * DrmSessionEventListener.EventDispatcher} might be different. The {@link + * DrmSessionEventListener.EventDispatcher} is required to implement the zero-arg {@link + * DrmSessionReference#release()} method. + */ + private class PreacquiredSessionReference implements DrmSessionReference { + + @Nullable private final DrmSessionEventListener.EventDispatcher eventDispatcher; + + @Nullable private DrmSession session; + private boolean isReleased; + + /** + * Constructs an instance. + * + * @param eventDispatcher The {@link DrmSessionEventListener.EventDispatcher} passed to {@link + * #acquireSession(DrmSessionEventListener.EventDispatcher, Format)}. + */ + public PreacquiredSessionReference( + @Nullable DrmSessionEventListener.EventDispatcher eventDispatcher) { + this.eventDispatcher = eventDispatcher; + } + + /** + * Acquires the underlying session. + * + *

      Must be called at most once. Can be called from any thread. + */ + public void acquire(Format format) { + checkNotNull(playbackHandler) + .post( + () -> { + if (prepareCallsCount == 0 || isReleased) { + // The manager has been fully released or this reference has already been + // released. Abort the acquisition attempt. + return; + } + this.session = + acquireSession( + checkNotNull(playbackLooper), + eventDispatcher, + format, + /* shouldReleasePreacquiredSessionsBeforeRetrying= */ false); + preacquiredSessionReferences.add(this); + }); + } + + @Override + public void release() { + // Ensure the underlying session is released immediately if we're already on the playback + // thread, to allow a failed session opening to be immediately retried. + Util.postOrRun( + checkNotNull(playbackHandler), + () -> { + if (isReleased) { + return; + } + if (session != null) { + session.release(eventDispatcher); + } + preacquiredSessionReferences.remove(this); + isReleased = true; + }); } } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DefaultDrmSessionManagerProvider.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DefaultDrmSessionManagerProvider.java new file mode 100644 index 0000000000..ae0124bcfc --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DefaultDrmSessionManagerProvider.java @@ -0,0 +1,115 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.drm; + +import static com.google.android.exoplayer2.drm.DefaultDrmSessionManager.MODE_PLAYBACK; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; + +import androidx.annotation.GuardedBy; +import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; +import com.google.android.exoplayer2.MediaItem; +import com.google.android.exoplayer2.upstream.DataSource; +import com.google.android.exoplayer2.upstream.DefaultHttpDataSource; +import com.google.android.exoplayer2.util.Util; +import com.google.common.primitives.Ints; +import java.util.Map; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; + +/** Default implementation of {@link DrmSessionManagerProvider}. */ +public final class DefaultDrmSessionManagerProvider implements DrmSessionManagerProvider { + + private final Object lock; + + @GuardedBy("lock") + private MediaItem.@MonotonicNonNull DrmConfiguration drmConfiguration; + + @GuardedBy("lock") + private @MonotonicNonNull DrmSessionManager manager; + + @Nullable private DataSource.Factory drmHttpDataSourceFactory; + @Nullable private String userAgent; + + public DefaultDrmSessionManagerProvider() { + lock = new Object(); + } + + /** + * Sets the {@link DataSource.Factory} which is used to create {@link HttpMediaDrmCallback} + * instances. If {@code null} is passed a {@link DefaultHttpDataSource.Factory} is used. + * + * @param drmDataSourceFactory The data source factory or {@code null} to use {@link + * DefaultHttpDataSource.Factory}. + */ + public void setDrmHttpDataSourceFactory(@Nullable DataSource.Factory drmDataSourceFactory) { + this.drmHttpDataSourceFactory = drmDataSourceFactory; + } + + /** + * @deprecated Pass a custom {@link DataSource.Factory} to {@link + * #setDrmHttpDataSourceFactory(DataSource.Factory)} which sets the desired user agent on + * outgoing requests. + */ + @Deprecated + public void setDrmUserAgent(@Nullable String userAgent) { + this.userAgent = userAgent; + } + + @Override + public DrmSessionManager get(MediaItem mediaItem) { + checkNotNull(mediaItem.localConfiguration); + @Nullable + MediaItem.DrmConfiguration drmConfiguration = mediaItem.localConfiguration.drmConfiguration; + if (drmConfiguration == null || Util.SDK_INT < 18) { + return DrmSessionManager.DRM_UNSUPPORTED; + } + + synchronized (lock) { + if (!Util.areEqual(drmConfiguration, this.drmConfiguration)) { + this.drmConfiguration = drmConfiguration; + this.manager = createManager(drmConfiguration); + } + return checkNotNull(this.manager); + } + } + + @RequiresApi(18) + private DrmSessionManager createManager(MediaItem.DrmConfiguration drmConfiguration) { + DataSource.Factory dataSourceFactory = + drmHttpDataSourceFactory != null + ? drmHttpDataSourceFactory + : new DefaultHttpDataSource.Factory().setUserAgent(userAgent); + HttpMediaDrmCallback httpDrmCallback = + new HttpMediaDrmCallback( + drmConfiguration.licenseUri == null ? null : drmConfiguration.licenseUri.toString(), + drmConfiguration.forceDefaultLicenseUri, + dataSourceFactory); + for (Map.Entry entry : drmConfiguration.licenseRequestHeaders.entrySet()) { + httpDrmCallback.setKeyRequestProperty(entry.getKey(), entry.getValue()); + } + DefaultDrmSessionManager drmSessionManager = + new DefaultDrmSessionManager.Builder() + .setUuidAndExoMediaDrmProvider( + drmConfiguration.scheme, FrameworkMediaDrm.DEFAULT_PROVIDER) + .setMultiSession(drmConfiguration.multiSession) + .setPlayClearSamplesWithoutKeys(drmConfiguration.playClearContentWithoutKey) + .setUseDrmSessionsForClearContent( + Ints.toArray(drmConfiguration.forcedSessionTrackTypes)) + .build(httpDrmCallback); + drmSessionManager.setMode(MODE_PLAYBACK, drmConfiguration.getKeySetId()); + return drmSessionManager; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DrmInitData.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DrmInitData.java index 2f0246ba64..b2e690fde9 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DrmInitData.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DrmInitData.java @@ -18,6 +18,7 @@ import android.os.Parcel; import android.os.Parcelable; import android.text.TextUtils; +import androidx.annotation.CheckResult; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.drm.DrmInitData.SchemeData; @@ -29,9 +30,7 @@ import java.util.List; import java.util.UUID; -/** - * Initialization data for one or more DRM schemes. - */ +/** Initialization data for one or more DRM schemes. */ public final class DrmInitData implements Comparator, Parcelable { /** @@ -53,7 +52,8 @@ public final class DrmInitData implements Comparator, Parcelable { * @param mediaData DRM session acquisition data obtained from the media. * @return A {@link DrmInitData} obtained from merging a media manifest and a media stream. */ - public static @Nullable DrmInitData createSessionCreationData( + @Nullable + public static DrmInitData createSessionCreationData( @Nullable DrmInitData manifestData, @Nullable DrmInitData mediaData) { ArrayList result = new ArrayList<>(); String schemeType = null; @@ -89,9 +89,7 @@ public final class DrmInitData implements Comparator, Parcelable { /** The protection scheme type, or null if not applicable or unknown. */ @Nullable public final String schemeType; - /** - * Number of {@link SchemeData}s. - */ + /** Number of {@link SchemeData}s. */ public final int schemeDataCount; /** @@ -124,8 +122,8 @@ public DrmInitData(@Nullable String schemeType, SchemeData... schemeDatas) { this(schemeType, true, schemeDatas); } - private DrmInitData(@Nullable String schemeType, boolean cloneSchemeDatas, - SchemeData... schemeDatas) { + private DrmInitData( + @Nullable String schemeType, boolean cloneSchemeDatas, SchemeData... schemeDatas) { this.schemeType = schemeType; if (cloneSchemeDatas) { schemeDatas = schemeDatas.clone(); @@ -137,31 +135,12 @@ private DrmInitData(@Nullable String schemeType, boolean cloneSchemeDatas, Arrays.sort(this.schemeDatas, this); } - /* package */ - DrmInitData(Parcel in) { + /* package */ DrmInitData(Parcel in) { schemeType = in.readString(); schemeDatas = Util.castNonNull(in.createTypedArray(SchemeData.CREATOR)); schemeDataCount = schemeDatas.length; } - /** - * Retrieves data for a given DRM scheme, specified by its UUID. - * - * @deprecated Use {@link #get(int)} and {@link SchemeData#matches(UUID)} instead. - * @param uuid The DRM scheme's UUID. - * @return The initialization data for the scheme, or null if the scheme is not supported. - */ - @Deprecated - @Nullable - public SchemeData get(UUID uuid) { - for (SchemeData schemeData : schemeDatas) { - if (schemeData.matches(uuid)) { - return schemeData; - } - } - return null; - } - /** * Retrieves the {@link SchemeData} at a given index. * @@ -178,6 +157,7 @@ public SchemeData get(int index) { * @param schemeType A protection scheme type. May be null. * @return A copy with the specified protection scheme type. */ + @CheckResult public DrmInitData copyWithSchemeType(@Nullable String schemeType) { if (Util.areEqual(this.schemeType, schemeType)) { return this; @@ -229,7 +209,8 @@ public boolean equals(@Nullable Object obj) { @Override public int compare(SchemeData first, SchemeData second) { - return C.UUID_NIL.equals(first.uuid) ? (C.UUID_NIL.equals(second.uuid) ? 0 : 1) + return C.UUID_NIL.equals(first.uuid) + ? (C.UUID_NIL.equals(second.uuid) ? 0 : 1) : first.uuid.compareTo(second.uuid); } @@ -249,17 +230,16 @@ public void writeToParcel(Parcel dest, int flags) { public static final Parcelable.Creator CREATOR = new Parcelable.Creator() { - @Override - public DrmInitData createFromParcel(Parcel in) { - return new DrmInitData(in); - } - - @Override - public DrmInitData[] newArray(int size) { - return new DrmInitData[size]; - } + @Override + public DrmInitData createFromParcel(Parcel in) { + return new DrmInitData(in); + } - }; + @Override + public DrmInitData[] newArray(int size) { + return new DrmInitData[size]; + } + }; // Internal methods. @@ -273,9 +253,7 @@ private static boolean containsSchemeDataWithUuid( return false; } - /** - * Scheme initialization data. - */ + /** Scheme initialization data. */ public static final class SchemeData implements Parcelable { // Lazily initialized hashcode. @@ -285,7 +263,7 @@ public static final class SchemeData implements Parcelable { * The {@link UUID} of the DRM scheme, or {@link C#UUID_NIL} if the data is universal (i.e. * applies to all schemes). */ - private final UUID uuid; + public final UUID uuid; /** The URL of the server to which license requests should be made. May be null if unknown. */ @Nullable public final String licenseServerUrl; /** The mimeType of {@link #data}. */ @@ -345,9 +323,7 @@ public boolean canReplace(SchemeData other) { return hasData() && !other.hasData() && matches(other.uuid); } - /** - * Returns whether {@link #data} is non-null. - */ + /** Returns whether {@link #data} is non-null. */ public boolean hasData() { return data != null; } @@ -358,6 +334,7 @@ public boolean hasData() { * @param data The data to include in the copy. * @return The new instance. */ + @CheckResult public SchemeData copyWithData(@Nullable byte[] data) { return new SchemeData(uuid, licenseServerUrl, mimeType, data); } @@ -408,18 +385,15 @@ public void writeToParcel(Parcel dest, int flags) { public static final Parcelable.Creator CREATOR = new Parcelable.Creator() { - @Override - public SchemeData createFromParcel(Parcel in) { - return new SchemeData(in); - } - - @Override - public SchemeData[] newArray(int size) { - return new SchemeData[size]; - } - - }; + @Override + public SchemeData createFromParcel(Parcel in) { + return new SchemeData(in); + } + @Override + public SchemeData[] newArray(int size) { + return new SchemeData[size]; + } + }; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DrmSession.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DrmSession.java index 35358f04f7..09b695a4cb 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DrmSession.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DrmSession.java @@ -15,67 +15,81 @@ */ package com.google.android.exoplayer2.drm; +import static java.lang.annotation.ElementType.FIELD; +import static java.lang.annotation.ElementType.LOCAL_VARIABLE; +import static java.lang.annotation.ElementType.METHOD; +import static java.lang.annotation.ElementType.PARAMETER; +import static java.lang.annotation.ElementType.TYPE_USE; + import android.media.MediaDrm; import androidx.annotation.IntDef; import androidx.annotation.Nullable; +import com.google.android.exoplayer2.PlaybackException; +import com.google.android.exoplayer2.decoder.CryptoConfig; import java.io.IOException; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; import java.util.Map; +import java.util.UUID; -/** - * A DRM session. - */ -public interface DrmSession { +/** A DRM session. */ +public interface DrmSession { /** - * Invokes {@code newSession's} {@link #acquire()} and {@code previousSession's} {@link - * #release()} in that order. Null arguments are ignored. Does nothing if {@code previousSession} - * and {@code newSession} are the same session. + * Acquires {@code newSession} then releases {@code previousSession}. + * + *

      Invokes {@code newSession's} {@link #acquire(DrmSessionEventListener.EventDispatcher)} and + * {@code previousSession's} {@link #release(DrmSessionEventListener.EventDispatcher)} in that + * order (passing {@code eventDispatcher = null}). Null arguments are ignored. Does nothing if + * {@code previousSession} and {@code newSession} are the same session. */ - static void replaceSession( - @Nullable DrmSession previousSession, @Nullable DrmSession newSession) { + static void replaceSession( + @Nullable DrmSession previousSession, @Nullable DrmSession newSession) { if (previousSession == newSession) { // Do nothing. return; } if (newSession != null) { - newSession.acquire(); + newSession.acquire(/* eventDispatcher= */ null); } if (previousSession != null) { - previousSession.release(); + previousSession.release(/* eventDispatcher= */ null); } } /** Wraps the throwable which is the cause of the error state. */ class DrmSessionException extends IOException { - public DrmSessionException(Throwable cause) { + /** The {@link PlaybackException.ErrorCode} that corresponds to the failure. */ + public final @PlaybackException.ErrorCode int errorCode; + + public DrmSessionException(Throwable cause, @PlaybackException.ErrorCode int errorCode) { super(cause); + this.errorCode = errorCode; } - } /** * The state of the DRM session. One of {@link #STATE_RELEASED}, {@link #STATE_ERROR}, {@link * #STATE_OPENING}, {@link #STATE_OPENED} or {@link #STATE_OPENED_WITH_KEYS}. */ + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. @Documented @Retention(RetentionPolicy.SOURCE) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) @IntDef({STATE_RELEASED, STATE_ERROR, STATE_OPENING, STATE_OPENED, STATE_OPENED_WITH_KEYS}) @interface State {} - /** - * The session has been released. - */ + /** The session has been released. This is a terminal state. */ int STATE_RELEASED = 0; /** * The session has encountered an error. {@link #getError()} can be used to retrieve the cause. + * This is a terminal state. */ int STATE_ERROR = 1; - /** - * The session is being opened. - */ + /** The session is being opened. */ int STATE_OPENING = 2; /** The session is open, but does not have keys required for decryption. */ int STATE_OPENED = 3; @@ -83,11 +97,12 @@ public DrmSessionException(Throwable cause) { int STATE_OPENED_WITH_KEYS = 4; /** - * Returns the current state of the session, which is one of {@link #STATE_ERROR}, - * {@link #STATE_RELEASED}, {@link #STATE_OPENING}, {@link #STATE_OPENED} and - * {@link #STATE_OPENED_WITH_KEYS}. + * Returns the current state of the session, which is one of {@link #STATE_ERROR}, {@link + * #STATE_RELEASED}, {@link #STATE_OPENING}, {@link #STATE_OPENED} and {@link + * #STATE_OPENED_WITH_KEYS}. */ - @State int getState(); + @State + int getState(); /** Returns whether this session allows playback of clear samples prior to keys being loaded. */ default boolean playClearSamplesWithoutKeys() { @@ -101,12 +116,15 @@ default boolean playClearSamplesWithoutKeys() { @Nullable DrmSessionException getError(); + /** Returns the DRM scheme UUID for this session. */ + UUID getSchemeUuid(); + /** - * Returns a {@link ExoMediaCrypto} for the open session, or null if called before the session has + * Returns a {@link CryptoConfig} for the open session, or null if called before the session has * been opened or after it's been released. */ @Nullable - T getMediaCrypto(); + CryptoConfig getCryptoConfig(); /** * Returns a map describing the key status for the session, or null if called before the session @@ -130,15 +148,32 @@ default boolean playClearSamplesWithoutKeys() { @Nullable byte[] getOfflineLicenseKeySetId(); + /** + * Returns whether this session requires use of a secure decoder for the given MIME type. Assumes + * a license policy that requires the highest level of security supported by the session. + * + *

      The session must be in {@link #getState() state} {@link #STATE_OPENED} or {@link + * #STATE_OPENED_WITH_KEYS}. + */ + boolean requiresSecureDecoder(String mimeType); + /** * Increments the reference count. When the caller no longer needs to use the instance, it must - * call {@link #release()} to decrement the reference count. + * call {@link #release(DrmSessionEventListener.EventDispatcher)} to decrement the reference + * count. + * + * @param eventDispatcher The {@link DrmSessionEventListener.EventDispatcher} used to route + * DRM-related events dispatched from this session, or null if no event handling is needed. */ - void acquire(); + void acquire(@Nullable DrmSessionEventListener.EventDispatcher eventDispatcher); /** * Decrements the reference count. If the reference count drops to 0 underlying resources are * released, and the instance cannot be re-used. + * + * @param eventDispatcher The {@link DrmSessionEventListener.EventDispatcher} to disconnect when + * the session is released (the same instance (possibly null) that was passed by the caller to + * {@link #acquire(DrmSessionEventListener.EventDispatcher)}). */ - void release(); + void release(@Nullable DrmSessionEventListener.EventDispatcher eventDispatcher); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DrmSessionEventListener.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DrmSessionEventListener.java new file mode 100644 index 0000000000..33aaafbfb9 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DrmSessionEventListener.java @@ -0,0 +1,237 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.drm; + +import static com.google.android.exoplayer2.util.Util.postOrRun; + +import android.os.Handler; +import androidx.annotation.CheckResult; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.Player; +import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId; +import com.google.android.exoplayer2.util.Assertions; +import java.util.concurrent.CopyOnWriteArrayList; + +/** Listener of {@link DrmSessionManager} events. */ +public interface DrmSessionEventListener { + + /** + * @deprecated Implement {@link #onDrmSessionAcquired(int, MediaPeriodId, int)} instead. + */ + @Deprecated + default void onDrmSessionAcquired(int windowIndex, @Nullable MediaPeriodId mediaPeriodId) {} + + /** + * Called each time a drm session is acquired. + * + * @param windowIndex The window index in the timeline this media period belongs to. + * @param mediaPeriodId The {@link MediaPeriodId} associated with the drm session. + * @param state The {@link DrmSession.State} of the session when the acquisition completed. + */ + default void onDrmSessionAcquired( + int windowIndex, @Nullable MediaPeriodId mediaPeriodId, @DrmSession.State int state) {} + + /** + * Called each time keys are loaded. + * + * @param windowIndex The window index in the timeline this media period belongs to. + * @param mediaPeriodId The {@link MediaPeriodId} associated with the drm session. + */ + default void onDrmKeysLoaded(int windowIndex, @Nullable MediaPeriodId mediaPeriodId) {} + + /** + * Called when a drm error occurs. + * + *

      This method being called does not indicate that playback has failed, or that it will fail. + * The player may be able to recover from the error and continue. Hence applications should + * not implement this method to display a user visible error or initiate an application + * level retry ({@link Player.Listener#onPlayerError} is the appropriate place to implement such + * behavior). This method is called to provide the application with an opportunity to log the + * error if it wishes to do so. + * + * @param windowIndex The window index in the timeline this media period belongs to. + * @param mediaPeriodId The {@link MediaPeriodId} associated with the drm session. + * @param error The corresponding exception. + */ + default void onDrmSessionManagerError( + int windowIndex, @Nullable MediaPeriodId mediaPeriodId, Exception error) {} + + /** + * Called each time offline keys are restored. + * + * @param windowIndex The window index in the timeline this media period belongs to. + * @param mediaPeriodId The {@link MediaPeriodId} associated with the drm session. + */ + default void onDrmKeysRestored(int windowIndex, @Nullable MediaPeriodId mediaPeriodId) {} + + /** + * Called each time offline keys are removed. + * + * @param windowIndex The window index in the timeline this media period belongs to. + * @param mediaPeriodId The {@link MediaPeriodId} associated with the drm session. + */ + default void onDrmKeysRemoved(int windowIndex, @Nullable MediaPeriodId mediaPeriodId) {} + + /** + * Called each time a drm session is released. + * + * @param windowIndex The window index in the timeline this media period belongs to. + * @param mediaPeriodId The {@link MediaPeriodId} associated with the drm session. + */ + default void onDrmSessionReleased(int windowIndex, @Nullable MediaPeriodId mediaPeriodId) {} + + /** Dispatches events to {@link DrmSessionEventListener DrmSessionEventListeners}. */ + class EventDispatcher { + + /** The timeline window index reported with the events. */ + public final int windowIndex; + /** The {@link MediaPeriodId} reported with the events. */ + @Nullable public final MediaPeriodId mediaPeriodId; + + private final CopyOnWriteArrayList listenerAndHandlers; + + /** Creates an event dispatcher. */ + public EventDispatcher() { + this( + /* listenerAndHandlers= */ new CopyOnWriteArrayList<>(), + /* windowIndex= */ 0, + /* mediaPeriodId= */ null); + } + + private EventDispatcher( + CopyOnWriteArrayList listenerAndHandlers, + int windowIndex, + @Nullable MediaPeriodId mediaPeriodId) { + this.listenerAndHandlers = listenerAndHandlers; + this.windowIndex = windowIndex; + this.mediaPeriodId = mediaPeriodId; + } + + /** + * Creates a view of the event dispatcher with the provided window index and media period id. + * + * @param windowIndex The timeline window index to be reported with the events. + * @param mediaPeriodId The {@link MediaPeriodId} to be reported with the events. + * @return A view of the event dispatcher with the pre-configured parameters. + */ + @CheckResult + public EventDispatcher withParameters(int windowIndex, @Nullable MediaPeriodId mediaPeriodId) { + return new EventDispatcher(listenerAndHandlers, windowIndex, mediaPeriodId); + } + + /** + * Adds a listener to the event dispatcher. + * + * @param handler A handler on the which listener events will be posted. + * @param eventListener The listener to be added. + */ + public void addEventListener(Handler handler, DrmSessionEventListener eventListener) { + Assertions.checkNotNull(handler); + Assertions.checkNotNull(eventListener); + listenerAndHandlers.add(new ListenerAndHandler(handler, eventListener)); + } + + /** + * Removes a listener from the event dispatcher. + * + * @param eventListener The listener to be removed. + */ + public void removeEventListener(DrmSessionEventListener eventListener) { + for (ListenerAndHandler listenerAndHandler : listenerAndHandlers) { + if (listenerAndHandler.listener == eventListener) { + listenerAndHandlers.remove(listenerAndHandler); + } + } + } + + /** + * Dispatches {@link #onDrmSessionAcquired(int, MediaPeriodId, int)} and {@link + * #onDrmSessionAcquired(int, MediaPeriodId)}. + */ + @SuppressWarnings("deprecation") // Calls deprecated listener method. + public void drmSessionAcquired(@DrmSession.State int state) { + for (ListenerAndHandler listenerAndHandler : listenerAndHandlers) { + DrmSessionEventListener listener = listenerAndHandler.listener; + postOrRun( + listenerAndHandler.handler, + () -> { + listener.onDrmSessionAcquired(windowIndex, mediaPeriodId); + listener.onDrmSessionAcquired(windowIndex, mediaPeriodId, state); + }); + } + } + + /** Dispatches {@link #onDrmKeysLoaded(int, MediaPeriodId)}. */ + public void drmKeysLoaded() { + for (ListenerAndHandler listenerAndHandler : listenerAndHandlers) { + DrmSessionEventListener listener = listenerAndHandler.listener; + postOrRun( + listenerAndHandler.handler, () -> listener.onDrmKeysLoaded(windowIndex, mediaPeriodId)); + } + } + + /** Dispatches {@link #onDrmSessionManagerError(int, MediaPeriodId, Exception)}. */ + public void drmSessionManagerError(Exception error) { + for (ListenerAndHandler listenerAndHandler : listenerAndHandlers) { + DrmSessionEventListener listener = listenerAndHandler.listener; + postOrRun( + listenerAndHandler.handler, + () -> listener.onDrmSessionManagerError(windowIndex, mediaPeriodId, error)); + } + } + + /** Dispatches {@link #onDrmKeysRestored(int, MediaPeriodId)}. */ + public void drmKeysRestored() { + for (ListenerAndHandler listenerAndHandler : listenerAndHandlers) { + DrmSessionEventListener listener = listenerAndHandler.listener; + postOrRun( + listenerAndHandler.handler, + () -> listener.onDrmKeysRestored(windowIndex, mediaPeriodId)); + } + } + + /** Dispatches {@link #onDrmKeysRemoved(int, MediaPeriodId)}. */ + public void drmKeysRemoved() { + for (ListenerAndHandler listenerAndHandler : listenerAndHandlers) { + DrmSessionEventListener listener = listenerAndHandler.listener; + postOrRun( + listenerAndHandler.handler, + () -> listener.onDrmKeysRemoved(windowIndex, mediaPeriodId)); + } + } + + /** Dispatches {@link #onDrmSessionReleased(int, MediaPeriodId)}. */ + public void drmSessionReleased() { + for (ListenerAndHandler listenerAndHandler : listenerAndHandlers) { + DrmSessionEventListener listener = listenerAndHandler.listener; + postOrRun( + listenerAndHandler.handler, + () -> listener.onDrmSessionReleased(windowIndex, mediaPeriodId)); + } + } + + private static final class ListenerAndHandler { + + public Handler handler; + public DrmSessionEventListener listener; + + public ListenerAndHandler(Handler handler, DrmSessionEventListener listener) { + this.handler = handler; + this.listener = listener; + } + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DrmSessionManager.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DrmSessionManager.java index 146c5d704d..53dcbded55 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DrmSessionManager.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DrmSessionManager.java @@ -18,43 +18,74 @@ import android.os.Looper; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; -import com.google.android.exoplayer2.drm.DrmInitData.SchemeData; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.PlaybackException; +import com.google.android.exoplayer2.analytics.PlayerId; -/** - * Manages a DRM session. - */ -public interface DrmSessionManager { +/** Manages a DRM session. */ +public interface DrmSessionManager { + + /** + * Represents a single reference count of a {@link DrmSession}, while deliberately not giving + * access to the underlying session. + */ + interface DrmSessionReference { + /** A reference that is never populated with an underlying {@link DrmSession}. */ + DrmSessionReference EMPTY = () -> {}; - /** Returns {@link #DUMMY}. */ - @SuppressWarnings("unchecked") - static DrmSessionManager getDummyDrmSessionManager() { - return (DrmSessionManager) DUMMY; + /** + * Releases the underlying session at most once. + * + *

      Can be called from any thread. Calling this method more than once will only release the + * underlying session once. + */ + void release(); } - /** {@link DrmSessionManager} that supports no DRM schemes. */ - DrmSessionManager DUMMY = - new DrmSessionManager() { + /** An instance that supports no DRM schemes. */ + DrmSessionManager DRM_UNSUPPORTED = + new DrmSessionManager() { @Override - public boolean canAcquireSession(DrmInitData drmInitData) { - return false; - } + public void setPlayer(Looper playbackLooper, PlayerId playerId) {} @Override - public DrmSession acquireSession( - Looper playbackLooper, DrmInitData drmInitData) { - return new ErrorStateDrmSession<>( - new DrmSession.DrmSessionException( - new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME))); + @Nullable + public DrmSession acquireSession( + @Nullable DrmSessionEventListener.EventDispatcher eventDispatcher, Format format) { + if (format.drmInitData == null) { + return null; + } else { + return new ErrorStateDrmSession( + new DrmSession.DrmSessionException( + new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME), + PlaybackException.ERROR_CODE_DRM_SCHEME_UNSUPPORTED)); + } } @Override - @Nullable - public Class getExoMediaCryptoType(DrmInitData drmInitData) { - return null; + public @C.CryptoType int getCryptoType(Format format) { + return format.drmInitData != null ? C.CRYPTO_TYPE_UNSUPPORTED : C.CRYPTO_TYPE_NONE; } }; + /** + * An instance that supports no DRM schemes. + * + * @deprecated Use {@link #DRM_UNSUPPORTED}. + */ + @Deprecated DrmSessionManager DUMMY = DRM_UNSUPPORTED; + + /** + * Returns {@link #DRM_UNSUPPORTED}. + * + * @deprecated Use {@link #DRM_UNSUPPORTED}. + */ + @Deprecated + static DrmSessionManager getDummyDrmSessionManager() { + return DRM_UNSUPPORTED; + } + /** * Acquires any required resources. * @@ -71,51 +102,90 @@ default void release() { } /** - * Returns whether the manager is capable of acquiring a session for the given - * {@link DrmInitData}. + * Sets information about the player using this DRM session manager. * - * @param drmInitData DRM initialization data. - * @return Whether the manager is capable of acquiring a session for the given - * {@link DrmInitData}. + * @param playbackLooper The {@link Looper} associated with the player's playback thread. + * @param playerId The {@link PlayerId} of the player. */ - boolean canAcquireSession(DrmInitData drmInitData); + void setPlayer(Looper playbackLooper, PlayerId playerId); /** - * Returns a {@link DrmSession} that does not execute key requests, with an incremented reference - * count. When the caller no longer needs to use the instance, it must call {@link - * DrmSession#release()} to decrement the reference count. + * Pre-acquires a DRM session for the specified {@link Format}. + * + *

      This notifies the manager that a subsequent call to {@link #acquireSession( + * DrmSessionEventListener.EventDispatcher, Format)} with the same {@link Format} is likely, + * allowing a manager that supports pre-acquisition to get the required {@link DrmSession} ready + * in the background. + * + *

      The caller must call {@link DrmSessionReference#release()} on the returned instance when + * they no longer require the pre-acquisition (i.e. they know they won't be making a matching call + * to {@link #acquireSession(DrmSessionEventListener.EventDispatcher, Format)} in the near + * future). + * + *

      This manager may silently release the underlying session in order to allow another operation + * to complete. This will result in a subsequent call to {@link #acquireSession( + * DrmSessionEventListener.EventDispatcher, Format)} re-initializing a new session, including + * repeating key loads and other async initialization steps. + * + *

      The caller must separately call {@link #acquireSession( + * DrmSessionEventListener.EventDispatcher, Format)} in order to obtain a session suitable for + * playback. The pre-acquired {@link DrmSessionReference} and full {@link DrmSession} instances + * are distinct. The caller must release both, and can release the {@link DrmSessionReference} + * before the {@link DrmSession} without affecting playback. * - *

      Placeholder {@link DrmSession DrmSessions} may be used to configure secure decoders for - * playback of clear content periods. This can reduce the cost of transitioning between clear and - * encrypted content periods. + *

      This can be called from any thread. * - * @param playbackLooper The looper associated with the media playback thread. - * @param trackType The type of the track to acquire a placeholder session for. Must be one of the - * {@link C}{@code .TRACK_TYPE_*} constants. - * @return The placeholder DRM session, or null if this DRM session manager does not support - * placeholder sessions. + *

      Implementations that do not support pre-acquisition always return an empty {@link + * DrmSessionReference} instance. + * + * @param eventDispatcher The {@link DrmSessionEventListener.EventDispatcher} used to distribute + * events, and passed on to {@link + * DrmSession#acquire(DrmSessionEventListener.EventDispatcher)}. + * @param format The {@link Format} for which to pre-acquire a {@link DrmSession}. + * @return A releaser for the pre-acquired session. Guaranteed to be non-null even if the matching + * {@link #acquireSession(DrmSessionEventListener.EventDispatcher, Format)} would return null. */ - @Nullable - default DrmSession acquirePlaceholderSession(Looper playbackLooper, int trackType) { - return null; + default DrmSessionReference preacquireSession( + @Nullable DrmSessionEventListener.EventDispatcher eventDispatcher, Format format) { + return DrmSessionReference.EMPTY; } /** - * Returns a {@link DrmSession} for the specified {@link DrmInitData}, with an incremented - * reference count. When the caller no longer needs to use the instance, it must call {@link - * DrmSession#release()} to decrement the reference count. + * Returns a {@link DrmSession} for the specified {@link Format}, with an incremented reference + * count. May return null if the {@link Format#drmInitData} is null and the DRM session manager is + * not configured to attach a {@link DrmSession} to clear content. When the caller no longer needs + * to use a returned {@link DrmSession}, it must call {@link + * DrmSession#release(DrmSessionEventListener.EventDispatcher)} to decrement the reference count. + * + *

      If the provided {@link Format} contains a null {@link Format#drmInitData}, the returned + * {@link DrmSession} (if not null) will be a placeholder session which does not execute key + * requests, and cannot be used to handle encrypted content. However, a placeholder session may be + * used to configure secure decoders for playback of clear content periods, which can reduce the + * cost of transitioning between clear and encrypted content. * - * @param playbackLooper The looper associated with the media playback thread. - * @param drmInitData DRM initialization data. All contained {@link SchemeData}s must contain - * non-null {@link SchemeData#data}. - * @return The DRM session. + * @param eventDispatcher The {@link DrmSessionEventListener.EventDispatcher} used to distribute + * events, and passed on to {@link + * DrmSession#acquire(DrmSessionEventListener.EventDispatcher)}. + * @param format The {@link Format} for which to acquire a {@link DrmSession}. + * @return The DRM session. May be null if the given {@link Format#drmInitData} is null. */ - DrmSession acquireSession(Looper playbackLooper, DrmInitData drmInitData); + @Nullable + DrmSession acquireSession( + @Nullable DrmSessionEventListener.EventDispatcher eventDispatcher, Format format); /** - * Returns the {@link ExoMediaCrypto} type returned by sessions acquired using the given {@link - * DrmInitData}, or null if a session cannot be acquired with the given {@link DrmInitData}. + * Returns the {@link C.CryptoType} that the DRM session manager will use for a given {@link + * Format}. Returns {@link C#CRYPTO_TYPE_UNSUPPORTED} if the manager does not support any of the + * DRM schemes defined in the {@link Format}. Returns {@link C#CRYPTO_TYPE_NONE} if {@link + * Format#drmInitData} is null and {@link #acquireSession} will return {@code null} for the given + * {@link Format}. + * + * @param format The {@link Format}. + * @return The {@link C.CryptoType} that the manager will use, or @link C#CRYPTO_TYPE_UNSUPPORTED} + * if the manager does not support any of the DRM schemes defined in the {@link Format}. Will + * be {@link C#CRYPTO_TYPE_NONE} if {@link Format#drmInitData} is null and {@link + * #acquireSession} will return null for the given {@link Format}. */ - @Nullable - Class getExoMediaCryptoType(DrmInitData drmInitData); + @C.CryptoType + int getCryptoType(Format format); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DrmSessionManagerProvider.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DrmSessionManagerProvider.java new file mode 100644 index 0000000000..158350e0fd --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DrmSessionManagerProvider.java @@ -0,0 +1,34 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.drm; + +import com.google.android.exoplayer2.MediaItem; + +/** + * A provider to obtain a {@link DrmSessionManager} suitable for playing the content described by a + * {@link MediaItem}. + */ +public interface DrmSessionManagerProvider { + + /** + * Returns a {@link DrmSessionManager} for the given media item. + * + *

      The caller is responsible for {@link DrmSessionManager#prepare() preparing} the {@link + * DrmSessionManager} before use, and subsequently {@link DrmSessionManager#release() releasing} + * it. + */ + DrmSessionManager get(MediaItem mediaItem); +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DrmUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DrmUtil.java new file mode 100644 index 0000000000..9e8b165899 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DrmUtil.java @@ -0,0 +1,149 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.drm; + +import static java.lang.annotation.ElementType.FIELD; +import static java.lang.annotation.ElementType.LOCAL_VARIABLE; +import static java.lang.annotation.ElementType.METHOD; +import static java.lang.annotation.ElementType.PARAMETER; +import static java.lang.annotation.ElementType.TYPE_USE; + +import android.media.DeniedByServerException; +import android.media.MediaDrm; +import android.media.MediaDrmResetException; +import android.media.NotProvisionedException; +import androidx.annotation.DoNotInline; +import androidx.annotation.IntDef; +import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; +import com.google.android.exoplayer2.PlaybackException; +import com.google.android.exoplayer2.util.Util; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** DRM-related utility methods. */ +public final class DrmUtil { + + /** Identifies the operation which caused a DRM-related error. */ + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) + @IntDef( + value = { + ERROR_SOURCE_EXO_MEDIA_DRM, + ERROR_SOURCE_LICENSE_ACQUISITION, + ERROR_SOURCE_PROVISIONING + }) + public @interface ErrorSource {} + + /** Corresponds to failures caused by an {@link ExoMediaDrm} method call. */ + public static final int ERROR_SOURCE_EXO_MEDIA_DRM = 1; + /** Corresponds to failures caused by an operation related to obtaining DRM licenses. */ + public static final int ERROR_SOURCE_LICENSE_ACQUISITION = 2; + /** Corresponds to failures caused by an operation related to provisioning the device. */ + public static final int ERROR_SOURCE_PROVISIONING = 3; + + /** + * Returns the {@link PlaybackException.ErrorCode} that corresponds to the given DRM-related + * exception. + * + * @param exception The DRM-related exception for which to obtain a corresponding {@link + * PlaybackException.ErrorCode}. + * @param errorSource The {@link ErrorSource} for the given {@code exception}. + * @return The {@link PlaybackException.ErrorCode} that corresponds to the given DRM-related + * exception. + */ + public static @PlaybackException.ErrorCode int getErrorCodeForMediaDrmException( + Exception exception, @ErrorSource int errorSource) { + if (Util.SDK_INT >= 21 && Api21.isMediaDrmStateException(exception)) { + return Api21.mediaDrmStateExceptionToErrorCode(exception); + } else if (Util.SDK_INT >= 23 && Api23.isMediaDrmResetException(exception)) { + return PlaybackException.ERROR_CODE_DRM_SYSTEM_ERROR; + } else if (Util.SDK_INT >= 18 && Api18.isNotProvisionedException(exception)) { + return PlaybackException.ERROR_CODE_DRM_PROVISIONING_FAILED; + } else if (Util.SDK_INT >= 18 && Api18.isDeniedByServerException(exception)) { + return PlaybackException.ERROR_CODE_DRM_DEVICE_REVOKED; + } else if (exception instanceof UnsupportedDrmException) { + return PlaybackException.ERROR_CODE_DRM_SCHEME_UNSUPPORTED; + } else if (exception instanceof DefaultDrmSessionManager.MissingSchemeDataException) { + return PlaybackException.ERROR_CODE_DRM_CONTENT_ERROR; + } else if (exception instanceof KeysExpiredException) { + return PlaybackException.ERROR_CODE_DRM_LICENSE_EXPIRED; + } else if (errorSource == ERROR_SOURCE_EXO_MEDIA_DRM) { + // A MediaDrm exception was thrown but it was impossible to determine the cause. Because no + // better diagnosis tools were provided, we treat this as a system error. + return PlaybackException.ERROR_CODE_DRM_SYSTEM_ERROR; + } else if (errorSource == ERROR_SOURCE_LICENSE_ACQUISITION) { + return PlaybackException.ERROR_CODE_DRM_LICENSE_ACQUISITION_FAILED; + } else if (errorSource == ERROR_SOURCE_PROVISIONING) { + return PlaybackException.ERROR_CODE_DRM_PROVISIONING_FAILED; + } else { + // Should never happen. + throw new IllegalArgumentException(); + } + } + + // Internal classes. + + @RequiresApi(18) + private static final class Api18 { + + @DoNotInline + public static boolean isNotProvisionedException(@Nullable Throwable throwable) { + return throwable instanceof NotProvisionedException; + } + + @DoNotInline + public static boolean isDeniedByServerException(@Nullable Throwable throwable) { + return throwable instanceof DeniedByServerException; + } + } + + @RequiresApi(21) + private static final class Api21 { + + @DoNotInline + public static boolean isMediaDrmStateException(@Nullable Throwable throwable) { + return throwable instanceof MediaDrm.MediaDrmStateException; + } + + @DoNotInline + public static @PlaybackException.ErrorCode int mediaDrmStateExceptionToErrorCode( + Throwable throwable) { + @Nullable + String diagnosticsInfo = ((MediaDrm.MediaDrmStateException) throwable).getDiagnosticInfo(); + int drmErrorCode = Util.getErrorCodeFromPlatformDiagnosticsInfo(diagnosticsInfo); + return Util.getErrorCodeForMediaDrmErrorCode(drmErrorCode); + } + } + + @RequiresApi(23) + private static final class Api23 { + + @DoNotInline + public static boolean isMediaDrmResetException(@Nullable Throwable throwable) { + return throwable instanceof MediaDrmResetException; + } + } + + // Prevent instantiation. + + private DrmUtil() {} +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DummyExoMediaDrm.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DummyExoMediaDrm.java index b619d9486f..444a72700e 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DummyExoMediaDrm.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/DummyExoMediaDrm.java @@ -19,6 +19,8 @@ import android.os.PersistableBundle; import androidx.annotation.Nullable; import androidx.annotation.RequiresApi; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.decoder.CryptoConfig; import com.google.android.exoplayer2.util.Util; import java.util.HashMap; import java.util.List; @@ -26,21 +28,25 @@ /** An {@link ExoMediaDrm} that does not support any protection schemes. */ @RequiresApi(18) -public final class DummyExoMediaDrm implements ExoMediaDrm { +public final class DummyExoMediaDrm implements ExoMediaDrm { /** Returns a new instance. */ - @SuppressWarnings("unchecked") - public static DummyExoMediaDrm getInstance() { - return (DummyExoMediaDrm) new DummyExoMediaDrm<>(); + public static DummyExoMediaDrm getInstance() { + return new DummyExoMediaDrm(); } @Override - public void setOnEventListener(OnEventListener listener) { + public void setOnEventListener(@Nullable OnEventListener listener) { // Do nothing. } @Override - public void setOnKeyStatusChangeListener(OnKeyStatusChangeListener listener) { + public void setOnKeyStatusChangeListener(@Nullable OnKeyStatusChangeListener listener) { + // Do nothing. + } + + @Override + public void setOnExpirationUpdateListener(@Nullable OnExpirationUpdateListener listener) { // Do nothing. } @@ -64,8 +70,8 @@ public KeyRequest getKeyRequest( throw new IllegalStateException(); } - @Nullable @Override + @Nullable public byte[] provideKeyResponse(byte[] scope, byte[] response) { // Should not be invoked. No session should exist. throw new IllegalStateException(); @@ -89,6 +95,12 @@ public Map queryKeyStatus(byte[] sessionId) { throw new IllegalStateException(); } + @Override + public boolean requiresSecureDecoder(byte[] sessionId, String mimeType) { + // Should not be invoked. No session should exist. + throw new IllegalStateException(); + } + @Override public void acquire() { // Do nothing. @@ -132,15 +144,13 @@ public void setPropertyByteArray(String propertyName, byte[] value) { } @Override - public T createMediaCrypto(byte[] sessionId) { + public CryptoConfig createCryptoConfig(byte[] sessionId) { // Should not be invoked. No session should exist. throw new IllegalStateException(); } @Override - @Nullable - public Class getExoMediaCryptoType() { - // No ExoMediaCrypto type is supported. - return null; + public @C.CryptoType int getCryptoType() { + return C.CRYPTO_TYPE_UNSUPPORTED; } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/ErrorStateDrmSession.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/ErrorStateDrmSession.java index 0028e47987..239a62327d 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/ErrorStateDrmSession.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/ErrorStateDrmSession.java @@ -16,11 +16,14 @@ package com.google.android.exoplayer2.drm; import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.decoder.CryptoConfig; import com.google.android.exoplayer2.util.Assertions; import java.util.Map; +import java.util.UUID; /** A {@link DrmSession} that's in a terminal error state. */ -public final class ErrorStateDrmSession implements DrmSession { +public final class ErrorStateDrmSession implements DrmSession { private final DrmSessionException error; @@ -44,9 +47,14 @@ public DrmSessionException getError() { return error; } + @Override + public final UUID getSchemeUuid() { + return C.UUID_NIL; + } + @Override @Nullable - public T getMediaCrypto() { + public CryptoConfig getCryptoConfig() { return null; } @@ -63,12 +71,17 @@ public byte[] getOfflineLicenseKeySetId() { } @Override - public void acquire() { + public boolean requiresSecureDecoder(String mimeType) { + return false; + } + + @Override + public void acquire(@Nullable DrmSessionEventListener.EventDispatcher eventDispatcher) { // Do nothing. } @Override - public void release() { + public void release(@Nullable DrmSessionEventListener.EventDispatcher eventDispatcher) { // Do nothing. } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/ExoMediaCrypto.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/ExoMediaCrypto.java deleted file mode 100644 index feba7eaaf4..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/ExoMediaCrypto.java +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Copyright (C) 2016 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.drm; - -/** An opaque {@link android.media.MediaCrypto} equivalent. */ -public interface ExoMediaCrypto {} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/ExoMediaDrm.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/ExoMediaDrm.java index b6ee644842..6981e88b8e 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/ExoMediaDrm.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/ExoMediaDrm.java @@ -15,24 +15,35 @@ */ package com.google.android.exoplayer2.drm; +import static java.lang.annotation.ElementType.TYPE_USE; + import android.media.DeniedByServerException; import android.media.MediaCryptoException; import android.media.MediaDrm; import android.media.MediaDrmException; import android.media.NotProvisionedException; +import android.media.ResourceBusyException; import android.os.Handler; import android.os.PersistableBundle; +import androidx.annotation.IntDef; import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.analytics.PlayerId; +import com.google.android.exoplayer2.decoder.CryptoConfig; import com.google.android.exoplayer2.drm.DrmInitData.SchemeData; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; /** - * Used to obtain keys for decrypting protected media streams. See {@link android.media.MediaDrm}. + * Used to obtain keys for decrypting protected media streams. * - *

      Reference counting

      + *

      Reference counting

      * *

      Access to an instance is managed by reference counting, where {@link #acquire()} increments * the reference count and {@link #release()} decrements it. When the reference count drops to 0 @@ -41,18 +52,20 @@ *

      Each new instance has an initial reference count of 1. Hence application code that creates a * new instance does not normally need to call {@link #acquire()}, and must call {@link #release()} * when the instance is no longer required. + * + * @see MediaDrm */ -public interface ExoMediaDrm { +public interface ExoMediaDrm { - /** {@link ExoMediaDrm} instances provider. */ - interface Provider { + /** Provider for {@link ExoMediaDrm} instances. */ + interface Provider { /** * Returns an {@link ExoMediaDrm} instance with an incremented reference count. When the caller - * no longer needs to use the instance, it must call {@link ExoMediaDrm#release()} to decrement - * the reference count. + * no longer needs the instance, it must call {@link ExoMediaDrm#release()} to decrement the + * reference count. */ - ExoMediaDrm acquireExoMediaDrm(UUID uuid); + ExoMediaDrm acquireExoMediaDrm(UUID uuid); } /** @@ -62,56 +75,54 @@ interface Provider { * instance, and remains responsible for calling {@link ExoMediaDrm#release()} on the instance * when it's no longer being used. */ - final class AppManagedProvider implements Provider { + final class AppManagedProvider implements Provider { - private final ExoMediaDrm exoMediaDrm; + private final ExoMediaDrm exoMediaDrm; /** Creates an instance that provides the given {@link ExoMediaDrm}. */ - public AppManagedProvider(ExoMediaDrm exoMediaDrm) { + public AppManagedProvider(ExoMediaDrm exoMediaDrm) { this.exoMediaDrm = exoMediaDrm; } @Override - public ExoMediaDrm acquireExoMediaDrm(UUID uuid) { + public ExoMediaDrm acquireExoMediaDrm(UUID uuid) { exoMediaDrm.acquire(); return exoMediaDrm; } } - /** @see MediaDrm#EVENT_KEY_REQUIRED */ + /** Event indicating that keys need to be requested from the license server. */ @SuppressWarnings("InlinedApi") int EVENT_KEY_REQUIRED = MediaDrm.EVENT_KEY_REQUIRED; - /** - * @see MediaDrm#EVENT_KEY_EXPIRED - */ + /** Event indicating that keys have expired, and are no longer usable. */ @SuppressWarnings("InlinedApi") int EVENT_KEY_EXPIRED = MediaDrm.EVENT_KEY_EXPIRED; - /** - * @see MediaDrm#EVENT_PROVISION_REQUIRED - */ + /** Event indicating that a certificate needs to be requested from the provisioning server. */ @SuppressWarnings("InlinedApi") int EVENT_PROVISION_REQUIRED = MediaDrm.EVENT_PROVISION_REQUIRED; /** - * @see MediaDrm#KEY_TYPE_STREAMING + * Key request type for keys that will be used for online use. Streaming keys will not be saved to + * the device for subsequent use when the device is not connected to a network. */ @SuppressWarnings("InlinedApi") int KEY_TYPE_STREAMING = MediaDrm.KEY_TYPE_STREAMING; /** - * @see MediaDrm#KEY_TYPE_OFFLINE + * Key request type for keys that will be used for offline use. They will be saved to the device + * for subsequent use when the device is not connected to a network. */ @SuppressWarnings("InlinedApi") int KEY_TYPE_OFFLINE = MediaDrm.KEY_TYPE_OFFLINE; - /** - * @see MediaDrm#KEY_TYPE_RELEASE - */ + /** Key request type indicating that saved offline keys should be released. */ @SuppressWarnings("InlinedApi") int KEY_TYPE_RELEASE = MediaDrm.KEY_TYPE_RELEASE; /** - * @see android.media.MediaDrm.OnEventListener + * Called when a DRM event occurs. + * + * @see MediaDrm.OnEventListener */ - interface OnEventListener { + interface OnEventListener { /** * Called when an event occurs that requires the app to be notified * @@ -122,7 +133,7 @@ interface OnEventListener { * @param data Optional byte array of data that may be associated with the event. */ void onEvent( - ExoMediaDrm mediaDrm, + ExoMediaDrm mediaDrm, @Nullable byte[] sessionId, int event, int extra, @@ -130,9 +141,11 @@ void onEvent( } /** - * @see android.media.MediaDrm.OnKeyStatusChangeListener + * Called when the keys in a DRM session change state. + * + * @see MediaDrm.OnKeyStatusChangeListener */ - interface OnKeyStatusChangeListener { + interface OnKeyStatusChangeListener { /** * Called when the keys in a session change status, such as when the license is renewed or * expires. @@ -143,101 +156,263 @@ interface OnKeyStatusChangeListener { * @param hasNewUsableKey Whether a new key became usable. */ void onKeyStatusChange( - ExoMediaDrm mediaDrm, + ExoMediaDrm mediaDrm, byte[] sessionId, List exoKeyInformation, boolean hasNewUsableKey); } - /** @see android.media.MediaDrm.KeyStatus */ + /** + * Called when a session expiration update occurs. + * + * @see MediaDrm.OnExpirationUpdateListener + */ + interface OnExpirationUpdateListener { + + /** + * Called when a session expiration update occurs, to inform the app about the change in + * expiration time. + * + * @param mediaDrm The {@link ExoMediaDrm} object on which the event occurred. + * @param sessionId The DRM session ID on which the event occurred + * @param expirationTimeMs The new expiration time for the keys in the session. The time is in + * milliseconds, relative to the Unix epoch. A time of 0 indicates that the keys never + * expire. + */ + void onExpirationUpdate(ExoMediaDrm mediaDrm, byte[] sessionId, long expirationTimeMs); + } + + /** + * Defines the status of a key. + * + * @see MediaDrm.KeyStatus + */ final class KeyStatus { private final int statusCode; private final byte[] keyId; + /** + * Creates an instance. + * + * @param statusCode The status code of the key, as defined by {@link + * MediaDrm.KeyStatus#getStatusCode()}. + * @param keyId The ID of the key. + */ public KeyStatus(int statusCode, byte[] keyId) { this.statusCode = statusCode; this.keyId = keyId; } + /** Returns the status of the key, as defined by {@link MediaDrm.KeyStatus#getStatusCode()}. */ public int getStatusCode() { return statusCode; } + /** Returns the ID of the key. */ public byte[] getKeyId() { return keyId; } - } - /** @see android.media.MediaDrm.KeyRequest */ + /** + * Contains data used to request keys from a license server. + * + * @see MediaDrm.KeyRequest + */ final class KeyRequest { + /** + * Key request types. One of {@link #REQUEST_TYPE_UNKNOWN}, {@link #REQUEST_TYPE_INITIAL}, + * {@link #REQUEST_TYPE_RENEWAL}, {@link #REQUEST_TYPE_RELEASE}, {@link #REQUEST_TYPE_NONE} or + * {@link #REQUEST_TYPE_UPDATE}. + */ + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef({ + REQUEST_TYPE_UNKNOWN, + REQUEST_TYPE_INITIAL, + REQUEST_TYPE_RENEWAL, + REQUEST_TYPE_RELEASE, + REQUEST_TYPE_NONE, + REQUEST_TYPE_UPDATE, + }) + public @interface RequestType {} + + /** + * Value returned from {@link #getRequestType()} if the underlying key request does not specify + * a type. + */ + public static final int REQUEST_TYPE_UNKNOWN = Integer.MIN_VALUE; + + /** Key request type for an initial license request. */ + public static final int REQUEST_TYPE_INITIAL = MediaDrm.KeyRequest.REQUEST_TYPE_INITIAL; + /** Key request type for license renewal. */ + public static final int REQUEST_TYPE_RENEWAL = MediaDrm.KeyRequest.REQUEST_TYPE_RENEWAL; + /** Key request type for license release. */ + public static final int REQUEST_TYPE_RELEASE = MediaDrm.KeyRequest.REQUEST_TYPE_RELEASE; + /** + * Key request type if keys are already loaded and available for use. No license request is + * necessary, and no key request data is returned. + */ + public static final int REQUEST_TYPE_NONE = MediaDrm.KeyRequest.REQUEST_TYPE_NONE; + /** + * Key request type if keys have been loaded, but an additional license request is needed to + * update their values. + */ + public static final int REQUEST_TYPE_UPDATE = MediaDrm.KeyRequest.REQUEST_TYPE_UPDATE; + private final byte[] data; private final String licenseServerUrl; + private final @RequestType int requestType; + /** + * Creates an instance with {@link #REQUEST_TYPE_UNKNOWN}. + * + * @param data The opaque key request data. + * @param licenseServerUrl The license server URL to which the request should be made. + */ public KeyRequest(byte[] data, String licenseServerUrl) { + this(data, licenseServerUrl, REQUEST_TYPE_UNKNOWN); + } + + /** + * Creates an instance. + * + * @param data The opaque key request data. + * @param licenseServerUrl The license server URL to which the request should be made. + * @param requestType The type of the request, or {@link #REQUEST_TYPE_UNKNOWN}. + */ + public KeyRequest(byte[] data, String licenseServerUrl, @RequestType int requestType) { this.data = data; this.licenseServerUrl = licenseServerUrl; + this.requestType = requestType; } + /** Returns the opaque key request data. */ public byte[] getData() { return data; } + /** Returns the URL of the license server to which the request should be made. */ public String getLicenseServerUrl() { return licenseServerUrl; } + /** + * Returns the type of the request, or {@link #REQUEST_TYPE_UNKNOWN} if the underlying key + * request does not specify a type. Note that when using a platform {@link MediaDrm} instance, + * key requests only specify a type on API levels 23 and above. + */ + public @RequestType int getRequestType() { + return requestType; + } } - /** @see android.media.MediaDrm.ProvisionRequest */ + /** + * Contains data to request a certificate from a provisioning server. + * + * @see MediaDrm.ProvisionRequest + */ final class ProvisionRequest { private final byte[] data; private final String defaultUrl; + /** + * Creates an instance. + * + * @param data The opaque provisioning request data. + * @param defaultUrl The default URL of the provisioning server to which the request can be + * made, or the empty string if not known. + */ public ProvisionRequest(byte[] data, String defaultUrl) { this.data = data; this.defaultUrl = defaultUrl; } + /** Returns the opaque provisioning request data. */ public byte[] getData() { return data; } + /** + * Returns the default URL of the provisioning server to which the request can be made, or the + * empty string if not known. + */ public String getDefaultUrl() { return defaultUrl; } - } /** + * Sets the listener for DRM events. + * + *

      This is an optional method, and some implementations may only support it on certain Android + * API levels. + * + * @param listener The listener to receive events, or {@code null} to stop receiving events. + * @throws UnsupportedOperationException if the implementation doesn't support this method. * @see MediaDrm#setOnEventListener(MediaDrm.OnEventListener) */ - void setOnEventListener(OnEventListener listener); + void setOnEventListener(@Nullable OnEventListener listener); /** + * Sets the listener for key status change events. + * + *

      This is an optional method, and some implementations may only support it on certain Android + * API levels. + * + * @param listener The listener to receive events, or {@code null} to stop receiving events. + * @throws UnsupportedOperationException if the implementation doesn't support this method. * @see MediaDrm#setOnKeyStatusChangeListener(MediaDrm.OnKeyStatusChangeListener, Handler) */ - void setOnKeyStatusChangeListener(OnKeyStatusChangeListener listener); + void setOnKeyStatusChangeListener(@Nullable OnKeyStatusChangeListener listener); /** - * @see MediaDrm#openSession() + * Sets the listener for session expiration events. + * + *

      This is an optional method, and some implementations may only support it on certain Android + * API levels. + * + * @param listener The listener to receive events, or {@code null} to stop receiving events. + * @throws UnsupportedOperationException if the implementation doesn't support this method. + * @see MediaDrm#setOnExpirationUpdateListener(MediaDrm.OnExpirationUpdateListener, Handler) + */ + void setOnExpirationUpdateListener(@Nullable OnExpirationUpdateListener listener); + + /** + * Opens a new DRM session. A session ID is returned. + * + * @return The session ID. + * @throws NotProvisionedException If provisioning is needed. + * @throws ResourceBusyException If required resources are in use. + * @throws MediaDrmException If the session could not be opened. */ byte[] openSession() throws MediaDrmException; /** - * @see MediaDrm#closeSession(byte[]) + * Closes a DRM session. + * + * @param sessionId The ID of the session to close. */ void closeSession(byte[] sessionId); + /** + * Sets the {@link PlayerId} of the player using a session. + * + * @param sessionId The ID of the session. + * @param playerId The {@link PlayerId} of the player using the session. + */ + default void setPlayerIdForSession(byte[] sessionId, PlayerId playerId) {} + /** * Generates a key request. * * @param scope If {@code keyType} is {@link #KEY_TYPE_STREAMING} or {@link #KEY_TYPE_OFFLINE}, - * the session id that the keys will be provided to. If {@code keyType} is {@link - * #KEY_TYPE_RELEASE}, the keySetId of the keys to release. + * the ID of the session that the keys will be provided to. If {@code keyType} is {@link + * #KEY_TYPE_RELEASE}, the {@code keySetId} of the keys to release. * @param schemeDatas If key type is {@link #KEY_TYPE_STREAMING} or {@link #KEY_TYPE_OFFLINE}, a * list of {@link SchemeData} instances extracted from the media. Null otherwise. * @param keyType The type of the request. Either {@link #KEY_TYPE_STREAMING} to acquire keys for @@ -257,26 +432,57 @@ KeyRequest getKeyRequest( @Nullable HashMap optionalParameters) throws NotProvisionedException; - /** @see MediaDrm#provideKeyResponse(byte[], byte[]) */ + /** + * Provides a key response for the last request to be generated using {@link #getKeyRequest}. + * + * @param scope If the request had type {@link #KEY_TYPE_STREAMING} or {@link #KEY_TYPE_OFFLINE}, + * the ID of the session to provide the keys to. If {@code keyType} is {@link + * #KEY_TYPE_RELEASE}, the {@code keySetId} of the keys being released. + * @param response The response data from the server. + * @return If the request had type {@link #KEY_TYPE_OFFLINE}, the {@code keySetId} for the offline + * keys. An empty byte array or {@code null} may be returned for other cases. + * @throws NotProvisionedException If the response indicates that provisioning is needed. + * @throws DeniedByServerException If the response indicates that the server rejected the request. + */ @Nullable byte[] provideKeyResponse(byte[] scope, byte[] response) throws NotProvisionedException, DeniedByServerException; /** - * @see MediaDrm#getProvisionRequest() + * Generates a provisioning request. + * + * @return The generated provisioning request. */ ProvisionRequest getProvisionRequest(); /** - * @see MediaDrm#provideProvisionResponse(byte[]) + * Provides a provisioning response for the last request to be generated using {@link + * #getProvisionRequest()}. + * + * @param response The response data from the server. + * @throws DeniedByServerException If the response indicates that the server rejected the request. */ void provideProvisionResponse(byte[] response) throws DeniedByServerException; /** - * @see MediaDrm#queryKeyStatus(byte[]) + * Returns the key status for a given session, as {name, value} pairs. Since DRM license policies + * vary by vendor, the returned entries depend on the DRM plugin being used. Refer to your DRM + * provider's documentation for more information. + * + * @param sessionId The ID of the session being queried. + * @return The key status for the session. */ Map queryKeyStatus(byte[] sessionId); + /** + * Returns whether the given session requires use of a secure decoder for the given MIME type. + * Assumes a license policy that requires the highest level of security supported by the session. + * + * @param sessionId The ID of the session. + * @param mimeType The content MIME type to query. + */ + boolean requiresSecureDecoder(byte[] sessionId, String mimeType); + /** * Increments the reference count. When the caller no longer needs to use the instance, it must * call {@link #release()} to decrement the reference count. @@ -293,50 +499,72 @@ byte[] provideKeyResponse(byte[] scope, byte[] response) void release(); /** - * @see MediaDrm#restoreKeys(byte[], byte[]) + * Restores persisted offline keys into a session. + * + * @param sessionId The ID of the session into which the keys will be restored. + * @param keySetId The {@code keySetId} of the keys to restore, as provided by the call to {@link + * #provideKeyResponse} that persisted them. */ void restoreKeys(byte[] sessionId, byte[] keySetId); /** - * Returns drm metrics. May be null if unavailable. - * - * @see MediaDrm#getMetrics() + * Returns metrics data for this ExoMediaDrm instance, or {@code null} if metrics are unavailable. */ @Nullable PersistableBundle getMetrics(); /** - * @see MediaDrm#getPropertyString(String) + * Returns the value of a string property. For standard property names, see {@link + * MediaDrm#getPropertyString}. + * + * @param propertyName The property name. + * @return The property value. + * @throws IllegalArgumentException If the underlying DRM plugin does not support the property. */ String getPropertyString(String propertyName); /** - * @see MediaDrm#getPropertyByteArray(String) + * Returns the value of a byte array property. For standard property names, see {@link + * MediaDrm#getPropertyByteArray}. + * + * @param propertyName The property name. + * @return The property value. + * @throws IllegalArgumentException If the underlying DRM plugin does not support the property. */ byte[] getPropertyByteArray(String propertyName); /** - * @see MediaDrm#setPropertyString(String, String) + * Sets the value of a string property. + * + * @param propertyName The property name. + * @param value The value. + * @throws IllegalArgumentException If the underlying DRM plugin does not support the property. */ void setPropertyString(String propertyName, String value); /** - * @see MediaDrm#setPropertyByteArray(String, byte[]) + * Sets the value of a byte array property. + * + * @param propertyName The property name. + * @param value The value. + * @throws IllegalArgumentException If the underlying DRM plugin does not support the property. */ void setPropertyByteArray(String propertyName, byte[] value); /** - * @see android.media.MediaCrypto#MediaCrypto(UUID, byte[]) - * @param sessionId The DRM session ID. - * @return An object extends {@link ExoMediaCrypto}, using opaque crypto scheme specific data. - * @throws MediaCryptoException If the instance can't be created. + * Creates a {@link CryptoConfig} that can be passed to a compatible decoder to allow decryption + * of protected content using the specified session. + * + * @param sessionId The ID of the session. + * @return A {@link CryptoConfig} for the given session. + * @throws MediaCryptoException If a {@link CryptoConfig} could not be created. */ - T createMediaCrypto(byte[] sessionId) throws MediaCryptoException; + CryptoConfig createCryptoConfig(byte[] sessionId) throws MediaCryptoException; /** - * Returns the {@link ExoMediaCrypto} type created by {@link #createMediaCrypto(byte[])}, or null - * if this instance cannot create any {@link ExoMediaCrypto} instances. + * Returns the {@link C.CryptoType type} of {@link CryptoConfig} instances returned by {@link + * #createCryptoConfig}. */ - @Nullable - Class getExoMediaCryptoType(); + @C.CryptoType + int getCryptoType(); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/FrameworkCryptoConfig.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/FrameworkCryptoConfig.java new file mode 100644 index 0000000000..4d824bec22 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/FrameworkCryptoConfig.java @@ -0,0 +1,63 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.drm; + +import android.media.MediaCodec; +import android.media.MediaCrypto; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.decoder.CryptoConfig; +import com.google.android.exoplayer2.util.Util; +import java.util.UUID; + +/** + * A {@link CryptoConfig} for {@link C#CRYPTO_TYPE_FRAMEWORK}. Contains the necessary information to + * build or update a framework {@link MediaCrypto} that can be used to configure a {@link + * MediaCodec}. + */ +public final class FrameworkCryptoConfig implements CryptoConfig { + + /** + * Whether the device needs keys to have been loaded into the {@link DrmSession} before codec + * configuration. + */ + public static final boolean WORKAROUND_DEVICE_NEEDS_KEYS_TO_CONFIGURE_CODEC = + "Amazon".equals(Util.MANUFACTURER) + && ("AFTM".equals(Util.MODEL) // Fire TV Stick Gen 1 + || "AFTB".equals(Util.MODEL)); // Fire TV Gen 1 + + /** The DRM scheme UUID. */ + public final UUID uuid; + /** The DRM session id. */ + public final byte[] sessionId; + /** + * Whether to allow use of insecure decoder components even if the underlying platform says + * otherwise. + */ + public final boolean forceAllowInsecureDecoderComponents; + + /** + * @param uuid The DRM scheme UUID. + * @param sessionId The DRM session id. + * @param forceAllowInsecureDecoderComponents Whether to allow use of insecure decoder components + * even if the underlying platform says otherwise. + */ + public FrameworkCryptoConfig( + UUID uuid, byte[] sessionId, boolean forceAllowInsecureDecoderComponents) { + this.uuid = uuid; + this.sessionId = sessionId; + this.forceAllowInsecureDecoderComponents = forceAllowInsecureDecoderComponents; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/FrameworkMediaCrypto.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/FrameworkMediaCrypto.java deleted file mode 100644 index c139b522e9..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/FrameworkMediaCrypto.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright (C) 2016 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.drm; - -import android.media.MediaCrypto; -import com.google.android.exoplayer2.util.Util; -import java.util.UUID; - -/** - * An {@link ExoMediaCrypto} implementation that contains the necessary information to build or - * update a framework {@link MediaCrypto}. - */ -public final class FrameworkMediaCrypto implements ExoMediaCrypto { - - /** - * Whether the device needs keys to have been loaded into the {@link DrmSession} before codec - * configuration. - */ - public static final boolean WORKAROUND_DEVICE_NEEDS_KEYS_TO_CONFIGURE_CODEC = - "Amazon".equals(Util.MANUFACTURER) - && ("AFTM".equals(Util.MODEL) // Fire TV Stick Gen 1 - || "AFTB".equals(Util.MODEL)); // Fire TV Gen 1 - - /** The DRM scheme UUID. */ - public final UUID uuid; - /** The DRM session id. */ - public final byte[] sessionId; - /** - * Whether to allow use of insecure decoder components even if the underlying platform says - * otherwise. - */ - public final boolean forceAllowInsecureDecoderComponents; - - /** - * @param uuid The DRM scheme UUID. - * @param sessionId The DRM session id. - * @param forceAllowInsecureDecoderComponents Whether to allow use of insecure decoder components - * even if the underlying platform says otherwise. - */ - public FrameworkMediaCrypto( - UUID uuid, byte[] sessionId, boolean forceAllowInsecureDecoderComponents) { - this.uuid = uuid; - this.sessionId = sessionId; - this.forceAllowInsecureDecoderComponents = forceAllowInsecureDecoderComponents; - } -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/FrameworkMediaDrm.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/FrameworkMediaDrm.java index 56d1aeea4b..29e84d35cc 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/FrameworkMediaDrm.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/FrameworkMediaDrm.java @@ -15,19 +15,24 @@ */ package com.google.android.exoplayer2.drm; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; + import android.annotation.SuppressLint; -import android.annotation.TargetApi; import android.media.DeniedByServerException; +import android.media.MediaCrypto; import android.media.MediaCryptoException; import android.media.MediaDrm; import android.media.MediaDrmException; import android.media.NotProvisionedException; import android.media.UnsupportedSchemeException; +import android.media.metrics.LogSessionId; import android.os.PersistableBundle; import android.text.TextUtils; +import androidx.annotation.DoNotInline; import androidx.annotation.Nullable; import androidx.annotation.RequiresApi; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.analytics.PlayerId; import com.google.android.exoplayer2.drm.DrmInitData.SchemeData; import com.google.android.exoplayer2.extractor.mp4.PsshAtomUtil; import com.google.android.exoplayer2.util.Assertions; @@ -35,9 +40,9 @@ import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.ParsableByteArray; import com.google.android.exoplayer2.util.Util; +import com.google.common.base.Charsets; import java.nio.ByteBuffer; import java.nio.ByteOrder; -import java.nio.charset.Charset; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -45,9 +50,8 @@ import java.util.UUID; /** An {@link ExoMediaDrm} implementation that wraps the framework {@link MediaDrm}. */ -@TargetApi(23) @RequiresApi(18) -public final class FrameworkMediaDrm implements ExoMediaDrm { +public final class FrameworkMediaDrm implements ExoMediaDrm { private static final String TAG = "FrameworkMediaDrm"; @@ -56,13 +60,13 @@ public final class FrameworkMediaDrm implements ExoMediaDrm DEFAULT_PROVIDER = + public static final Provider DEFAULT_PROVIDER = uuid -> { try { return newInstance(uuid); } catch (UnsupportedDrmException e) { Log.e(TAG, "Failed to instantiate a FrameworkMediaDrm for uuid: " + uuid + "."); - return new DummyExoMediaDrm<>(); + return new DummyExoMediaDrm(); } }; @@ -75,6 +79,15 @@ public final class FrameworkMediaDrm implements ExoMediaDrm listener) { + public void setOnEventListener(@Nullable ExoMediaDrm.OnEventListener listener) { mediaDrm.setOnEventListener( listener == null ? null @@ -115,9 +127,16 @@ public void setOnEventListener( listener.onEvent(FrameworkMediaDrm.this, sessionId, event, extra, data)); } + /** + * {@inheritDoc} + * + * @param listener The listener to receive events, or {@code null} to stop receiving events. + * @throws UnsupportedOperationException on API levels lower than 23. + */ @Override + @RequiresApi(23) public void setOnKeyStatusChangeListener( - final ExoMediaDrm.OnKeyStatusChangeListener listener) { + @Nullable ExoMediaDrm.OnKeyStatusChangeListener listener) { if (Util.SDK_INT < 23) { throw new UnsupportedOperationException(); } @@ -133,7 +152,28 @@ public void setOnKeyStatusChangeListener( listener.onKeyStatusChange( FrameworkMediaDrm.this, sessionId, exoKeyInfo, hasNewUsableKey); }, - null); + /* handler= */ null); + } + + /** + * {@inheritDoc} + * + * @param listener The listener to receive events, or {@code null} to stop receiving events. + * @throws UnsupportedOperationException on API levels lower than 23. + */ + @Override + @RequiresApi(23) + public void setOnExpirationUpdateListener(@Nullable OnExpirationUpdateListener listener) { + if (Util.SDK_INT < 23) { + throw new UnsupportedOperationException(); + } + + mediaDrm.setOnExpirationUpdateListener( + listener == null + ? null + : (mediaDrm, sessionId, expirationTimeMs) -> + listener.onExpirationUpdate(FrameworkMediaDrm.this, sessionId, expirationTimeMs), + /* handler= */ null); } @Override @@ -146,6 +186,19 @@ public void closeSession(byte[] sessionId) { mediaDrm.closeSession(sessionId); } + @Override + public void setPlayerIdForSession(byte[] sessionId, PlayerId playerId) { + if (Util.SDK_INT >= 31) { + try { + Api31.setLogSessionIdOnMediaDrmSession(mediaDrm, sessionId, playerId); + } catch (UnsupportedOperationException e) { + Log.w(TAG, "setLogSessionId failed."); + } + } + } + + // Return values of MediaDrm.KeyRequest.getRequestType are equal to KeyRequest.RequestType. + @SuppressLint("WrongConstant") @Override public KeyRequest getKeyRequest( byte[] scope, @@ -165,22 +218,33 @@ public KeyRequest getKeyRequest( mediaDrm.getKeyRequest(scope, initData, mimeType, keyType, optionalParameters); byte[] requestData = adjustRequestData(uuid, request.getData()); - - String licenseServerUrl = request.getDefaultUrl(); - if (MOCK_LA_URL_VALUE.equals(licenseServerUrl)) { - licenseServerUrl = ""; - } + String licenseServerUrl = adjustLicenseServerUrl(request.getDefaultUrl()); if (TextUtils.isEmpty(licenseServerUrl) && schemeData != null && !TextUtils.isEmpty(schemeData.licenseServerUrl)) { licenseServerUrl = schemeData.licenseServerUrl; } - return new KeyRequest(requestData, licenseServerUrl); + @KeyRequest.RequestType + int requestType = + Util.SDK_INT >= 23 ? request.getRequestType() : KeyRequest.REQUEST_TYPE_UNKNOWN; + + return new KeyRequest(requestData, licenseServerUrl, requestType); + } + + private static String adjustLicenseServerUrl(String licenseServerUrl) { + if (MOCK_LA_URL.equals(licenseServerUrl)) { + return ""; + } else if (Util.SDK_INT == 33 && "https://default.url".equals(licenseServerUrl)) { + // Work around b/247808112 + return ""; + } else { + return licenseServerUrl; + } } - @Nullable @Override + @Nullable public byte[] provideKeyResponse(byte[] scope, byte[] response) throws NotProvisionedException, DeniedByServerException { if (C.CLEARKEY_UUID.equals(uuid)) { @@ -206,6 +270,26 @@ public Map queryKeyStatus(byte[] sessionId) { return mediaDrm.queryKeyStatus(sessionId); } + @Override + public boolean requiresSecureDecoder(byte[] sessionId, String mimeType) { + if (Util.SDK_INT >= 31) { + return Api31.requiresSecureDecoder(mediaDrm, mimeType); + } + + MediaCrypto mediaCrypto; + try { + mediaCrypto = new MediaCrypto(uuid, sessionId); + } catch (MediaCryptoException e) { + // This shouldn't happen, but if it does then assume that a secure decoder may be required. + return true; + } + try { + return mediaCrypto.requiresSecureDecoderComponent(mimeType); + } finally { + mediaCrypto.release(); + } + } + @Override public synchronized void acquire() { Assertions.checkState(referenceCount > 0); @@ -226,7 +310,6 @@ public void restoreKeys(byte[] sessionId, byte[] keySetId) { @Override @Nullable - @TargetApi(28) public PersistableBundle getMetrics() { if (Util.SDK_INT < 28) { return null; @@ -255,18 +338,20 @@ public void setPropertyByteArray(String propertyName, byte[] value) { } @Override - public FrameworkMediaCrypto createMediaCrypto(byte[] initData) throws MediaCryptoException { + public FrameworkCryptoConfig createCryptoConfig(byte[] sessionId) throws MediaCryptoException { // Work around a bug prior to Lollipop where L1 Widevine forced into L3 mode would still // indicate that it required secure video decoders [Internal ref: b/11428937]. - boolean forceAllowInsecureDecoderComponents = Util.SDK_INT < 21 - && C.WIDEVINE_UUID.equals(uuid) && "L3".equals(getPropertyString("securityLevel")); - return new FrameworkMediaCrypto( - adjustUuid(uuid), initData, forceAllowInsecureDecoderComponents); + boolean forceAllowInsecureDecoderComponents = + Util.SDK_INT < 21 + && C.WIDEVINE_UUID.equals(uuid) + && "L3".equals(getPropertyString("securityLevel")); + return new FrameworkCryptoConfig( + adjustUuid(uuid), sessionId, forceAllowInsecureDecoderComponents); } @Override - public Class getExoMediaCryptoType() { - return FrameworkMediaCrypto.class; + public @C.CryptoType int getCryptoType() { + return C.CRYPTO_TYPE_FRAMEWORK; } private static SchemeData getSchemeData(UUID uuid, List schemeDatas) { @@ -282,7 +367,7 @@ private static SchemeData getSchemeData(UUID uuid, List schemeDatas) boolean canConcatenateData = true; for (int i = 0; i < schemeDatas.size(); i++) { SchemeData schemeData = schemeDatas.get(i); - byte[] schemeDataData = Util.castNonNull(schemeData.data); + byte[] schemeDataData = Assertions.checkNotNull(schemeData.data); if (Util.areEqual(schemeData.mimeType, firstSchemeData.mimeType) && Util.areEqual(schemeData.licenseServerUrl, firstSchemeData.licenseServerUrl) && PsshAtomUtil.isPsshAtom(schemeDataData)) { @@ -297,7 +382,7 @@ private static SchemeData getSchemeData(UUID uuid, List schemeDatas) int concatenatedDataPosition = 0; for (int i = 0; i < schemeDatas.size(); i++) { SchemeData schemeData = schemeDatas.get(i); - byte[] schemeDataData = Util.castNonNull(schemeData.data); + byte[] schemeDataData = Assertions.checkNotNull(schemeData.data); int schemeDataLength = schemeDataData.length; System.arraycopy( schemeDataData, 0, concatenatedData, concatenatedDataPosition, schemeDataLength); @@ -311,7 +396,7 @@ private static SchemeData getSchemeData(UUID uuid, List schemeDatas) // the first V0 box. for (int i = 0; i < schemeDatas.size(); i++) { SchemeData schemeData = schemeDatas.get(i); - int version = PsshAtomUtil.parseVersion(Util.castNonNull(schemeData.data)); + int version = PsshAtomUtil.parseVersion(Assertions.checkNotNull(schemeData.data)); if (Util.SDK_INT < 23 && version == 0) { return schemeData; } else if (Util.SDK_INT >= 23 && version == 1) { @@ -381,7 +466,6 @@ private static byte[] adjustRequestData(UUID uuid, byte[] requestData) { return requestData; } - @SuppressLint("WrongConstant") // Suppress spurious lint error [Internal ref: b/32137960] private static void forceWidevineL3(MediaDrm mediaDrm) { mediaDrm.setPropertyString("securityLevel", "L3"); } @@ -413,7 +497,7 @@ private static byte[] addLaUrlAttributeIfMissing(byte[] data) { return data; } int recordLength = byteArray.readLittleEndianShort(); - String xml = byteArray.readString(recordLength, Charset.forName(C.UTF16LE_NAME)); + String xml = byteArray.readString(recordLength, Charsets.UTF_16LE); if (xml.contains("")) { // LA_URL already present. Do nothing. return data; @@ -434,7 +518,28 @@ private static byte[] addLaUrlAttributeIfMissing(byte[] data) { newData.putShort((short) objectRecordCount); newData.putShort((short) recordType); newData.putShort((short) (xmlWithMockLaUrl.length() * UTF_16_BYTES_PER_CHARACTER)); - newData.put(xmlWithMockLaUrl.getBytes(Charset.forName(C.UTF16LE_NAME))); + newData.put(xmlWithMockLaUrl.getBytes(Charsets.UTF_16LE)); return newData.array(); } + + @RequiresApi(31) + private static class Api31 { + private Api31() {} + + @DoNotInline + public static boolean requiresSecureDecoder(MediaDrm mediaDrm, String mimeType) { + return mediaDrm.requiresSecureDecoder(mimeType); + } + + @DoNotInline + public static void setLogSessionIdOnMediaDrmSession( + MediaDrm mediaDrm, byte[] drmSessionId, PlayerId playerId) { + LogSessionId logSessionId = playerId.getLogSessionId(); + if (!logSessionId.equals(LogSessionId.LOG_SESSION_ID_NONE)) { + MediaDrm.PlaybackComponent playbackComponent = + checkNotNull(mediaDrm.getPlaybackComponent(drmSessionId)); + playbackComponent.setLogSessionId(logSessionId); + } + } + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/HttpMediaDrmCallback.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/HttpMediaDrmCallback.java index 19c32daf61..4af1e6bd14 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/HttpMediaDrmCallback.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/HttpMediaDrmCallback.java @@ -15,57 +15,67 @@ */ package com.google.android.exoplayer2.drm; -import android.annotation.TargetApi; import android.net.Uri; import android.text.TextUtils; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.drm.ExoMediaDrm.KeyRequest; import com.google.android.exoplayer2.drm.ExoMediaDrm.ProvisionRequest; +import com.google.android.exoplayer2.upstream.DataSource; import com.google.android.exoplayer2.upstream.DataSourceInputStream; import com.google.android.exoplayer2.upstream.DataSpec; -import com.google.android.exoplayer2.upstream.HttpDataSource; import com.google.android.exoplayer2.upstream.HttpDataSource.InvalidResponseCodeException; +import com.google.android.exoplayer2.upstream.StatsDataSource; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Util; -import java.io.IOException; +import com.google.common.collect.ImmutableMap; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; -/** - * A {@link MediaDrmCallback} that makes requests using {@link HttpDataSource} instances. - */ -@TargetApi(18) +/** A {@link MediaDrmCallback} that makes requests using {@link DataSource} instances. */ public final class HttpMediaDrmCallback implements MediaDrmCallback { private static final int MAX_MANUAL_REDIRECTS = 5; - private final HttpDataSource.Factory dataSourceFactory; - private final String defaultLicenseUrl; + private final DataSource.Factory dataSourceFactory; + @Nullable private final String defaultLicenseUrl; private final boolean forceDefaultLicenseUrl; private final Map keyRequestProperties; /** + * Constructs an instance. + * * @param defaultLicenseUrl The default license URL. Used for key requests that do not specify - * their own license URL. - * @param dataSourceFactory A factory from which to obtain {@link HttpDataSource} instances. + * their own license URL. May be {@code null} if it's known that all key requests will specify + * their own URLs. + * @param dataSourceFactory A factory from which to obtain {@link DataSource} instances. This will + * usually be an HTTP-based {@link DataSource}. */ - public HttpMediaDrmCallback(String defaultLicenseUrl, HttpDataSource.Factory dataSourceFactory) { - this(defaultLicenseUrl, false, dataSourceFactory); + public HttpMediaDrmCallback( + @Nullable String defaultLicenseUrl, DataSource.Factory dataSourceFactory) { + this(defaultLicenseUrl, /* forceDefaultLicenseUrl= */ false, dataSourceFactory); } /** + * Constructs an instance. + * * @param defaultLicenseUrl The default license URL. Used for key requests that do not specify - * their own license URL, or for all key requests if {@code forceDefaultLicenseUrl} is - * set to true. - * @param forceDefaultLicenseUrl Whether to use {@code defaultLicenseUrl} for key requests that - * include their own license URL. - * @param dataSourceFactory A factory from which to obtain {@link HttpDataSource} instances. + * their own license URL, or for all key requests if {@code forceDefaultLicenseUrl} is set to + * true. May be {@code null} if {@code forceDefaultLicenseUrl} is {@code false} and if it's + * known that all key requests will specify their own URLs. + * @param forceDefaultLicenseUrl Whether to force use of {@code defaultLicenseUrl} for key + * requests that include their own license URL. + * @param dataSourceFactory A factory from which to obtain {@link DataSource} instances. This will + * * usually be an HTTP-based {@link DataSource}. */ - public HttpMediaDrmCallback(String defaultLicenseUrl, boolean forceDefaultLicenseUrl, - HttpDataSource.Factory dataSourceFactory) { + public HttpMediaDrmCallback( + @Nullable String defaultLicenseUrl, + boolean forceDefaultLicenseUrl, + DataSource.Factory dataSourceFactory) { + Assertions.checkArgument(!(forceDefaultLicenseUrl && TextUtils.isEmpty(defaultLicenseUrl))); this.dataSourceFactory = dataSourceFactory; this.defaultLicenseUrl = defaultLicenseUrl; this.forceDefaultLicenseUrl = forceDefaultLicenseUrl; @@ -98,9 +108,7 @@ public void clearKeyRequestProperty(String name) { } } - /** - * Clears all headers for key requests made by the callback. - */ + /** Clears all headers for key requests made by the callback. */ public void clearAllKeyRequestProperties() { synchronized (keyRequestProperties) { keyRequestProperties.clear(); @@ -108,26 +116,41 @@ public void clearAllKeyRequestProperties() { } @Override - public byte[] executeProvisionRequest(UUID uuid, ProvisionRequest request) throws IOException { + public byte[] executeProvisionRequest(UUID uuid, ProvisionRequest request) + throws MediaDrmCallbackException { String url = request.getDefaultUrl() + "&signedRequest=" + Util.fromUtf8Bytes(request.getData()); - return executePost(dataSourceFactory, url, /* httpBody= */ null, /* requestProperties= */ null); + return executePost( + dataSourceFactory, + url, + /* httpBody= */ null, + /* requestProperties= */ Collections.emptyMap()); } @Override - public byte[] executeKeyRequest(UUID uuid, KeyRequest request) throws Exception { + public byte[] executeKeyRequest(UUID uuid, KeyRequest request) throws MediaDrmCallbackException { String url = request.getLicenseServerUrl(); if (forceDefaultLicenseUrl || TextUtils.isEmpty(url)) { url = defaultLicenseUrl; } + if (TextUtils.isEmpty(url)) { + throw new MediaDrmCallbackException( + new DataSpec.Builder().setUri(Uri.EMPTY).build(), + Uri.EMPTY, + /* responseHeaders= */ ImmutableMap.of(), + /* bytesLoaded= */ 0, + /* cause= */ new IllegalStateException("No license URL")); + } Map requestProperties = new HashMap<>(); // Add standard request properties for supported schemes. - String contentType = C.PLAYREADY_UUID.equals(uuid) ? "text/xml" - : (C.CLEARKEY_UUID.equals(uuid) ? "application/json" : "application/octet-stream"); + String contentType = + C.PLAYREADY_UUID.equals(uuid) + ? "text/xml" + : (C.CLEARKEY_UUID.equals(uuid) ? "application/json" : "application/octet-stream"); requestProperties.put("Content-Type", contentType); if (C.PLAYREADY_UUID.equals(uuid)) { - requestProperties.put("SOAPAction", - "http://schemas.microsoft.com/DRM/2007/03/protocols/AcquireLicense"); + requestProperties.put( + "SOAPAction", "http://schemas.microsoft.com/DRM/2007/03/protocols/AcquireLicense"); } // Add additional request properties. synchronized (keyRequestProperties) { @@ -137,59 +160,66 @@ public byte[] executeKeyRequest(UUID uuid, KeyRequest request) throws Exception } private static byte[] executePost( - HttpDataSource.Factory dataSourceFactory, + DataSource.Factory dataSourceFactory, String url, @Nullable byte[] httpBody, - @Nullable Map requestProperties) - throws IOException { - HttpDataSource dataSource = dataSourceFactory.createDataSource(); - if (requestProperties != null) { - for (Map.Entry requestProperty : requestProperties.entrySet()) { - dataSource.setRequestProperty(requestProperty.getKey(), requestProperty.getValue()); - } - } - + Map requestProperties) + throws MediaDrmCallbackException { + StatsDataSource dataSource = new StatsDataSource(dataSourceFactory.createDataSource()); int manualRedirectCount = 0; - while (true) { - DataSpec dataSpec = - new DataSpec( - Uri.parse(url), - DataSpec.HTTP_METHOD_POST, - httpBody, - /* absoluteStreamPosition= */ 0, - /* position= */ 0, - /* length= */ C.LENGTH_UNSET, - /* key= */ null, - DataSpec.FLAG_ALLOW_GZIP); - DataSourceInputStream inputStream = new DataSourceInputStream(dataSource, dataSpec); - try { - return Util.toByteArray(inputStream); - } catch (InvalidResponseCodeException e) { - // For POST requests, the underlying network stack will not normally follow 307 or 308 - // redirects automatically. Do so manually here. - boolean manuallyRedirect = - (e.responseCode == 307 || e.responseCode == 308) - && manualRedirectCount++ < MAX_MANUAL_REDIRECTS; - String redirectUrl = manuallyRedirect ? getRedirectUrl(e) : null; - if (redirectUrl == null) { - throw e; + DataSpec dataSpec = + new DataSpec.Builder() + .setUri(url) + .setHttpRequestHeaders(requestProperties) + .setHttpMethod(DataSpec.HTTP_METHOD_POST) + .setHttpBody(httpBody) + .setFlags(DataSpec.FLAG_ALLOW_GZIP) + .build(); + DataSpec originalDataSpec = dataSpec; + try { + while (true) { + DataSourceInputStream inputStream = new DataSourceInputStream(dataSource, dataSpec); + try { + return Util.toByteArray(inputStream); + } catch (InvalidResponseCodeException e) { + @Nullable String redirectUrl = getRedirectUrl(e, manualRedirectCount); + if (redirectUrl == null) { + throw e; + } + manualRedirectCount++; + dataSpec = dataSpec.buildUpon().setUri(redirectUrl).build(); + } finally { + Util.closeQuietly(inputStream); } - url = redirectUrl; - } finally { - Util.closeQuietly(inputStream); } + } catch (Exception e) { + throw new MediaDrmCallbackException( + originalDataSpec, + Assertions.checkNotNull(dataSource.getLastOpenedUri()), + dataSource.getResponseHeaders(), + dataSource.getBytesRead(), + /* cause= */ e); } } - private static @Nullable String getRedirectUrl(InvalidResponseCodeException exception) { + @Nullable + private static String getRedirectUrl( + InvalidResponseCodeException exception, int manualRedirectCount) { + // For POST requests, the underlying network stack will not normally follow 307 or 308 + // redirects automatically. Do so manually here. + boolean manuallyRedirect = + (exception.responseCode == 307 || exception.responseCode == 308) + && manualRedirectCount < MAX_MANUAL_REDIRECTS; + if (!manuallyRedirect) { + return null; + } Map> headerFields = exception.headerFields; if (headerFields != null) { - List locationHeaders = headerFields.get("Location"); + @Nullable List locationHeaders = headerFields.get("Location"); if (locationHeaders != null && !locationHeaders.isEmpty()) { return locationHeaders.get(0); } } return null; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/KeysExpiredException.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/KeysExpiredException.java index e5e1089fa9..b203a58c64 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/KeysExpiredException.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/KeysExpiredException.java @@ -15,8 +15,5 @@ */ package com.google.android.exoplayer2.drm; -/** - * Thrown when the drm keys loaded into an open session expire. - */ -public final class KeysExpiredException extends Exception { -} +/** Thrown when the drm keys loaded into an open session expire. */ +public final class KeysExpiredException extends Exception {} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/LocalMediaDrmCallback.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/LocalMediaDrmCallback.java index 7b9aeca30a..71835e63c3 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/LocalMediaDrmCallback.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/LocalMediaDrmCallback.java @@ -18,7 +18,6 @@ import com.google.android.exoplayer2.drm.ExoMediaDrm.KeyRequest; import com.google.android.exoplayer2.drm.ExoMediaDrm.ProvisionRequest; import com.google.android.exoplayer2.util.Assertions; -import java.io.IOException; import java.util.UUID; /** @@ -39,13 +38,12 @@ public LocalMediaDrmCallback(byte[] keyResponse) { } @Override - public byte[] executeProvisionRequest(UUID uuid, ProvisionRequest request) throws IOException { + public byte[] executeProvisionRequest(UUID uuid, ProvisionRequest request) { throw new UnsupportedOperationException(); } @Override - public byte[] executeKeyRequest(UUID uuid, KeyRequest request) throws Exception { + public byte[] executeKeyRequest(UUID uuid, KeyRequest request) { return keyResponse; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/MediaDrmCallback.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/MediaDrmCallback.java index 5b0ed04f81..d367a773c5 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/MediaDrmCallback.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/MediaDrmCallback.java @@ -19,9 +19,7 @@ import com.google.android.exoplayer2.drm.ExoMediaDrm.ProvisionRequest; import java.util.UUID; -/** - * Performs {@link ExoMediaDrm} key and provisioning requests. - */ +/** Performs {@link ExoMediaDrm} key and provisioning requests. */ public interface MediaDrmCallback { /** @@ -30,9 +28,10 @@ public interface MediaDrmCallback { * @param uuid The UUID of the content protection scheme. * @param request The request. * @return The response data. - * @throws Exception If an error occurred executing the request. + * @throws MediaDrmCallbackException If an error occurred executing the request. */ - byte[] executeProvisionRequest(UUID uuid, ProvisionRequest request) throws Exception; + byte[] executeProvisionRequest(UUID uuid, ProvisionRequest request) + throws MediaDrmCallbackException; /** * Executes a key request. @@ -40,7 +39,7 @@ public interface MediaDrmCallback { * @param uuid The UUID of the content protection scheme. * @param request The request. * @return The response data. - * @throws Exception If an error occurred executing the request. + * @throws MediaDrmCallbackException If an error occurred executing the request. */ - byte[] executeKeyRequest(UUID uuid, KeyRequest request) throws Exception; + byte[] executeKeyRequest(UUID uuid, KeyRequest request) throws MediaDrmCallbackException; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/MediaDrmCallbackException.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/MediaDrmCallbackException.java new file mode 100644 index 0000000000..37b2e03504 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/MediaDrmCallbackException.java @@ -0,0 +1,63 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.drm; + +import android.net.Uri; +import com.google.android.exoplayer2.upstream.DataSpec; +import java.io.IOException; +import java.util.List; +import java.util.Map; + +/** + * Thrown when an error occurs while executing a DRM {@link MediaDrmCallback#executeKeyRequest key} + * or {@link MediaDrmCallback#executeProvisionRequest provisioning} request. + */ +public final class MediaDrmCallbackException extends IOException { + + /** The {@link DataSpec} associated with the request. */ + public final DataSpec dataSpec; + /** + * The {@link Uri} after redirections, or {@link #dataSpec dataSpec.uri} if no redirection + * occurred. + */ + public final Uri uriAfterRedirects; + /** The HTTP request headers included in the response. */ + public final Map> responseHeaders; + /** The number of bytes obtained from the server. */ + public final long bytesLoaded; + + /** + * Creates a new instance with the given values. + * + * @param dataSpec See {@link #dataSpec}. + * @param uriAfterRedirects See {@link #uriAfterRedirects}. + * @param responseHeaders See {@link #responseHeaders}. + * @param bytesLoaded See {@link #bytesLoaded}. + * @param cause The cause of the exception. + */ + public MediaDrmCallbackException( + DataSpec dataSpec, + Uri uriAfterRedirects, + Map> responseHeaders, + long bytesLoaded, + Throwable cause) { + super(cause); + this.dataSpec = dataSpec; + this.uriAfterRedirects = uriAfterRedirects; + this.responseHeaders = responseHeaders; + this.bytesLoaded = bytesLoaded; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/OfflineLicenseHelper.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/OfflineLicenseHelper.java index 93a7585f89..7bc885a383 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/OfflineLicenseHelper.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/OfflineLicenseHelper.java @@ -15,7 +15,6 @@ */ package com.google.android.exoplayer2.drm; -import android.annotation.TargetApi; import android.media.MediaDrm; import android.os.ConditionVariable; import android.os.Handler; @@ -23,26 +22,27 @@ import android.util.Pair; import androidx.annotation.Nullable; import androidx.annotation.RequiresApi; -import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.analytics.PlayerId; import com.google.android.exoplayer2.drm.DefaultDrmSessionManager.Mode; import com.google.android.exoplayer2.drm.DrmSession.DrmSessionException; -import com.google.android.exoplayer2.upstream.HttpDataSource; -import com.google.android.exoplayer2.upstream.HttpDataSource.Factory; +import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId; +import com.google.android.exoplayer2.upstream.DataSource; import com.google.android.exoplayer2.util.Assertions; -import java.util.Collections; import java.util.Map; import java.util.UUID; /** Helper class to download, renew and release offline licenses. */ -@TargetApi(18) @RequiresApi(18) -public final class OfflineLicenseHelper { +public final class OfflineLicenseHelper { - private static final DrmInitData DUMMY_DRM_INIT_DATA = new DrmInitData(); + private static final Format FORMAT_WITH_EMPTY_DRM_INIT_DATA = + new Format.Builder().setDrmInitData(new DrmInitData()).build(); private final ConditionVariable conditionVariable; - private final DefaultDrmSessionManager drmSessionManager; + private final DefaultDrmSessionManager drmSessionManager; private final HandlerThread handlerThread; + private final DrmSessionEventListener.EventDispatcher eventDispatcher; /** * Instantiates a new instance which uses Widevine CDM. Call {@link #release()} when the instance @@ -50,15 +50,17 @@ public final class OfflineLicenseHelper { * * @param defaultLicenseUrl The default license URL. Used for key requests that do not specify * their own license URL. - * @param httpDataSourceFactory A factory from which to obtain {@link HttpDataSource} instances. + * @param dataSourceFactory A factory from which to obtain {@link DataSource} instances. + * @param eventDispatcher A {@link DrmSessionEventListener.EventDispatcher} used to distribute + * DRM-related events. * @return A new instance which uses Widevine CDM. - * @throws UnsupportedDrmException If the Widevine DRM scheme is unsupported or cannot be - * instantiated. */ - public static OfflineLicenseHelper newWidevineInstance( - String defaultLicenseUrl, Factory httpDataSourceFactory) - throws UnsupportedDrmException { - return newWidevineInstance(defaultLicenseUrl, false, httpDataSourceFactory, null); + public static OfflineLicenseHelper newWidevineInstance( + String defaultLicenseUrl, + DataSource.Factory dataSourceFactory, + DrmSessionEventListener.EventDispatcher eventDispatcher) { + return newWidevineInstance( + defaultLicenseUrl, /* forceDefaultLicenseUrl= */ false, dataSourceFactory, eventDispatcher); } /** @@ -69,16 +71,22 @@ public static OfflineLicenseHelper newWidevineInstance( * their own license URL. * @param forceDefaultLicenseUrl Whether to use {@code defaultLicenseUrl} for key requests that * include their own license URL. - * @param httpDataSourceFactory A factory from which to obtain {@link HttpDataSource} instances. + * @param dataSourceFactory A factory from which to obtain {@link DataSource} instances. + * @param eventDispatcher A {@link DrmSessionEventListener.EventDispatcher} used to distribute + * DRM-related events. * @return A new instance which uses Widevine CDM. - * @throws UnsupportedDrmException If the Widevine DRM scheme is unsupported or cannot be - * instantiated. */ - public static OfflineLicenseHelper newWidevineInstance( - String defaultLicenseUrl, boolean forceDefaultLicenseUrl, Factory httpDataSourceFactory) - throws UnsupportedDrmException { - return newWidevineInstance(defaultLicenseUrl, forceDefaultLicenseUrl, httpDataSourceFactory, - null); + public static OfflineLicenseHelper newWidevineInstance( + String defaultLicenseUrl, + boolean forceDefaultLicenseUrl, + DataSource.Factory dataSourceFactory, + DrmSessionEventListener.EventDispatcher eventDispatcher) { + return newWidevineInstance( + defaultLicenseUrl, + forceDefaultLicenseUrl, + dataSourceFactory, + /* optionalKeyRequestParameters= */ null, + eventDispatcher); } /** @@ -91,87 +99,97 @@ public static OfflineLicenseHelper newWidevineInstance( * include their own license URL. * @param optionalKeyRequestParameters An optional map of parameters to pass as the last argument * to {@link MediaDrm#getKeyRequest}. May be null. + * @param eventDispatcher A {@link DrmSessionEventListener.EventDispatcher} used to distribute + * DRM-related events. * @return A new instance which uses Widevine CDM. - * @throws UnsupportedDrmException If the Widevine DRM scheme is unsupported or cannot be - * instantiated. * @see DefaultDrmSessionManager.Builder */ - public static OfflineLicenseHelper newWidevineInstance( + public static OfflineLicenseHelper newWidevineInstance( String defaultLicenseUrl, boolean forceDefaultLicenseUrl, - Factory httpDataSourceFactory, - @Nullable Map optionalKeyRequestParameters) - throws UnsupportedDrmException { - return new OfflineLicenseHelper<>( - C.WIDEVINE_UUID, - FrameworkMediaDrm.DEFAULT_PROVIDER, - new HttpMediaDrmCallback(defaultLicenseUrl, forceDefaultLicenseUrl, httpDataSourceFactory), - optionalKeyRequestParameters); + DataSource.Factory dataSourceFactory, + @Nullable Map optionalKeyRequestParameters, + DrmSessionEventListener.EventDispatcher eventDispatcher) { + return new OfflineLicenseHelper( + new DefaultDrmSessionManager.Builder() + .setKeyRequestParameters(optionalKeyRequestParameters) + .build( + new HttpMediaDrmCallback( + defaultLicenseUrl, forceDefaultLicenseUrl, dataSourceFactory)), + eventDispatcher); } /** - * Constructs an instance. Call {@link #release()} when the instance is no longer required. - * - * @param uuid The UUID of the drm scheme. - * @param mediaDrmProvider A {@link ExoMediaDrm.Provider}. - * @param callback Performs key and provisioning requests. - * @param optionalKeyRequestParameters An optional map of parameters to pass as the last argument - * to {@link MediaDrm#getKeyRequest}. May be null. - * @see DefaultDrmSessionManager.Builder + * @deprecated Use {@link #OfflineLicenseHelper(DefaultDrmSessionManager, + * DrmSessionEventListener.EventDispatcher)} instead. */ - @SuppressWarnings("unchecked") + @Deprecated public OfflineLicenseHelper( UUID uuid, - ExoMediaDrm.Provider mediaDrmProvider, + ExoMediaDrm.Provider mediaDrmProvider, MediaDrmCallback callback, - @Nullable Map optionalKeyRequestParameters) { - handlerThread = new HandlerThread("OfflineLicenseHelper"); + @Nullable Map optionalKeyRequestParameters, + DrmSessionEventListener.EventDispatcher eventDispatcher) { + this( + new DefaultDrmSessionManager.Builder() + .setUuidAndExoMediaDrmProvider(uuid, mediaDrmProvider) + .setKeyRequestParameters(optionalKeyRequestParameters) + .build(callback), + eventDispatcher); + } + + /** + * Constructs an instance. Call {@link #release()} when the instance is no longer required. + * + * @param defaultDrmSessionManager The {@link DefaultDrmSessionManager} used to download licenses. + * @param eventDispatcher A {@link DrmSessionEventListener.EventDispatcher} used to distribute + * DRM-related events. + */ + public OfflineLicenseHelper( + DefaultDrmSessionManager defaultDrmSessionManager, + DrmSessionEventListener.EventDispatcher eventDispatcher) { + this.drmSessionManager = defaultDrmSessionManager; + this.eventDispatcher = eventDispatcher; + handlerThread = new HandlerThread("ExoPlayer:OfflineLicenseHelper"); handlerThread.start(); conditionVariable = new ConditionVariable(); - DefaultDrmSessionEventListener eventListener = - new DefaultDrmSessionEventListener() { + DrmSessionEventListener eventListener = + new DrmSessionEventListener() { @Override - public void onDrmKeysLoaded() { + public void onDrmKeysLoaded(int windowIndex, @Nullable MediaPeriodId mediaPeriodId) { conditionVariable.open(); } @Override - public void onDrmSessionManagerError(Exception e) { + public void onDrmSessionManagerError( + int windowIndex, @Nullable MediaPeriodId mediaPeriodId, Exception e) { conditionVariable.open(); } @Override - public void onDrmKeysRestored() { + public void onDrmKeysRestored(int windowIndex, @Nullable MediaPeriodId mediaPeriodId) { conditionVariable.open(); } @Override - public void onDrmKeysRemoved() { + public void onDrmKeysRemoved(int windowIndex, @Nullable MediaPeriodId mediaPeriodId) { conditionVariable.open(); } }; - if (optionalKeyRequestParameters == null) { - optionalKeyRequestParameters = Collections.emptyMap(); - } - drmSessionManager = - (DefaultDrmSessionManager) - new DefaultDrmSessionManager.Builder() - .setUuidAndExoMediaDrmProvider(uuid, mediaDrmProvider) - .setKeyRequestParameters(optionalKeyRequestParameters) - .build(callback); - drmSessionManager.addListener(new Handler(handlerThread.getLooper()), eventListener); + eventDispatcher.addEventListener(new Handler(handlerThread.getLooper()), eventListener); } /** * Downloads an offline license. * - * @param drmInitData The {@link DrmInitData} for the content whose license is to be downloaded. + * @param format The {@link Format} of the content whose license is to be downloaded. Must contain + * a non-null {@link Format#drmInitData}. * @return The key set id for the downloaded license. * @throws DrmSessionException Thrown when a DRM session error occurs. */ - public synchronized byte[] downloadLicense(DrmInitData drmInitData) throws DrmSessionException { - Assertions.checkArgument(drmInitData != null); - return blockingKeyRequest(DefaultDrmSessionManager.MODE_DOWNLOAD, null, drmInitData); + public synchronized byte[] downloadLicense(Format format) throws DrmSessionException { + Assertions.checkArgument(format.drmInitData != null); + return blockingKeyRequest(DefaultDrmSessionManager.MODE_DOWNLOAD, null, format); } /** @@ -185,7 +203,9 @@ public synchronized byte[] renewLicense(byte[] offlineLicenseKeySetId) throws DrmSessionException { Assertions.checkNotNull(offlineLicenseKeySetId); return blockingKeyRequest( - DefaultDrmSessionManager.MODE_DOWNLOAD, offlineLicenseKeySetId, DUMMY_DRM_INIT_DATA); + DefaultDrmSessionManager.MODE_DOWNLOAD, + offlineLicenseKeySetId, + FORMAT_WITH_EMPTY_DRM_INIT_DATA); } /** @@ -198,7 +218,9 @@ public synchronized void releaseLicense(byte[] offlineLicenseKeySetId) throws DrmSessionException { Assertions.checkNotNull(offlineLicenseKeySetId); blockingKeyRequest( - DefaultDrmSessionManager.MODE_RELEASE, offlineLicenseKeySetId, DUMMY_DRM_INIT_DATA); + DefaultDrmSessionManager.MODE_RELEASE, + offlineLicenseKeySetId, + FORMAT_WITH_EMPTY_DRM_INIT_DATA); } /** @@ -211,14 +233,17 @@ public synchronized void releaseLicense(byte[] offlineLicenseKeySetId) public synchronized Pair getLicenseDurationRemainingSec(byte[] offlineLicenseKeySetId) throws DrmSessionException { Assertions.checkNotNull(offlineLicenseKeySetId); + drmSessionManager.setPlayer(handlerThread.getLooper(), PlayerId.UNSET); drmSessionManager.prepare(); - DrmSession drmSession = + DrmSession drmSession = openBlockingKeyRequest( - DefaultDrmSessionManager.MODE_QUERY, offlineLicenseKeySetId, DUMMY_DRM_INIT_DATA); + DefaultDrmSessionManager.MODE_QUERY, + offlineLicenseKeySetId, + FORMAT_WITH_EMPTY_DRM_INIT_DATA); DrmSessionException error = drmSession.getError(); Pair licenseDurationRemainingSec = WidevineUtil.getLicenseDurationRemainingSec(drmSession); - drmSession.release(); + drmSession.release(eventDispatcher); drmSessionManager.release(); if (error != null) { if (error.getCause() instanceof KeysExpiredException) { @@ -229,22 +254,20 @@ public synchronized Pair getLicenseDurationRemainingSec(byte[] offli return Assertions.checkNotNull(licenseDurationRemainingSec); } - /** - * Releases the helper. Should be called when the helper is no longer required. - */ + /** Releases the helper. Should be called when the helper is no longer required. */ public void release() { handlerThread.quit(); } private byte[] blockingKeyRequest( - @Mode int licenseMode, @Nullable byte[] offlineLicenseKeySetId, DrmInitData drmInitData) + @Mode int licenseMode, @Nullable byte[] offlineLicenseKeySetId, Format format) throws DrmSessionException { + drmSessionManager.setPlayer(handlerThread.getLooper(), PlayerId.UNSET); drmSessionManager.prepare(); - DrmSession drmSession = openBlockingKeyRequest(licenseMode, offlineLicenseKeySetId, - drmInitData); + DrmSession drmSession = openBlockingKeyRequest(licenseMode, offlineLicenseKeySetId, format); DrmSessionException error = drmSession.getError(); byte[] keySetId = drmSession.getOfflineLicenseKeySetId(); - drmSession.release(); + drmSession.release(eventDispatcher); drmSessionManager.release(); if (error != null) { throw error; @@ -252,15 +275,14 @@ private byte[] blockingKeyRequest( return Assertions.checkNotNull(keySetId); } - private DrmSession openBlockingKeyRequest( - @Mode int licenseMode, @Nullable byte[] offlineLicenseKeySetId, DrmInitData drmInitData) { + private DrmSession openBlockingKeyRequest( + @Mode int licenseMode, @Nullable byte[] offlineLicenseKeySetId, Format format) { + Assertions.checkNotNull(format.drmInitData); drmSessionManager.setMode(licenseMode, offlineLicenseKeySetId); conditionVariable.close(); - DrmSession drmSession = drmSessionManager.acquireSession(handlerThread.getLooper(), - drmInitData); + DrmSession drmSession = drmSessionManager.acquireSession(eventDispatcher, format); // Block current thread until key loading is finished conditionVariable.block(); - return drmSession; + return Assertions.checkNotNull(drmSession); } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/UnsupportedDrmException.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/UnsupportedDrmException.java index a89196dc04..62d7a8873e 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/UnsupportedDrmException.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/UnsupportedDrmException.java @@ -15,27 +15,33 @@ */ package com.google.android.exoplayer2.drm; +import static java.lang.annotation.ElementType.FIELD; +import static java.lang.annotation.ElementType.LOCAL_VARIABLE; +import static java.lang.annotation.ElementType.METHOD; +import static java.lang.annotation.ElementType.PARAMETER; +import static java.lang.annotation.ElementType.TYPE_USE; + import androidx.annotation.IntDef; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; -/** - * Thrown when the requested DRM scheme is not supported. - */ +/** Thrown when the requested DRM scheme is not supported. */ public final class UnsupportedDrmException extends Exception { /** * The reason for the exception. One of {@link #REASON_UNSUPPORTED_SCHEME} or {@link * #REASON_INSTANTIATION_ERROR}. */ + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. @Documented @Retention(RetentionPolicy.SOURCE) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) @IntDef({REASON_UNSUPPORTED_SCHEME, REASON_INSTANTIATION_ERROR}) public @interface Reason {} - /** - * The requested DRM scheme is unsupported by the device. - */ + /** The requested DRM scheme is unsupported by the device. */ public static final int REASON_UNSUPPORTED_SCHEME = 1; /** * There device advertises support for the requested DRM scheme, but there was an error @@ -43,10 +49,8 @@ public final class UnsupportedDrmException extends Exception { */ public static final int REASON_INSTANTIATION_ERROR = 2; - /** - * Either {@link #REASON_UNSUPPORTED_SCHEME} or {@link #REASON_INSTANTIATION_ERROR}. - */ - @Reason public final int reason; + /** Either {@link #REASON_UNSUPPORTED_SCHEME} or {@link #REASON_INSTANTIATION_ERROR}. */ + public final @Reason int reason; /** * @param reason {@link #REASON_UNSUPPORTED_SCHEME} or {@link #REASON_INSTANTIATION_ERROR}. @@ -63,5 +67,4 @@ public UnsupportedDrmException(@Reason int reason, Exception cause) { super(cause); this.reason = reason; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/WidevineUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/WidevineUtil.java index 004f873a33..9db314e502 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/WidevineUtil.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/drm/WidevineUtil.java @@ -20,9 +20,7 @@ import com.google.android.exoplayer2.C; import java.util.Map; -/** - * Utility methods for Widevine. - */ +/** Utility methods for Widevine. */ public final class WidevineUtil { /** Widevine specific key status field name for the remaining license duration, in seconds. */ @@ -39,13 +37,14 @@ private WidevineUtil() {} * @return A {@link Pair} consisting of the remaining license and playback durations in seconds, * or null if called before the session has been opened or after it's been released. */ - public static @Nullable Pair getLicenseDurationRemainingSec( - DrmSession drmSession) { + @Nullable + public static Pair getLicenseDurationRemainingSec(DrmSession drmSession) { Map keyStatus = drmSession.queryKeyStatus(); if (keyStatus == null) { return null; } - return new Pair<>(getDurationRemainingSec(keyStatus, PROPERTY_LICENSE_DURATION_REMAINING), + return new Pair<>( + getDurationRemainingSec(keyStatus, PROPERTY_LICENSE_DURATION_REMAINING), getDurationRemainingSec(keyStatus, PROPERTY_PLAYBACK_DURATION_REMAINING)); } @@ -62,5 +61,4 @@ private static long getDurationRemainingSec(Map keyStatus, Strin } return C.TIME_UNSET; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/ChainingGlTextureProcessorListener.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/ChainingGlTextureProcessorListener.java new file mode 100644 index 0000000000..3b130d4482 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/ChainingGlTextureProcessorListener.java @@ -0,0 +1,117 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.effect; + +import android.util.Pair; +import androidx.annotation.GuardedBy; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.effect.GlTextureProcessor.InputListener; +import com.google.android.exoplayer2.effect.GlTextureProcessor.OutputListener; +import java.util.ArrayDeque; +import java.util.Queue; + +/** + * Connects a producing and a consuming {@link GlTextureProcessor} instance. + * + *

      This listener should be set as {@link InputListener} on the consuming {@link + * GlTextureProcessor} and as {@link OutputListener} on the producing {@link GlTextureProcessor}. + */ +/* package */ final class ChainingGlTextureProcessorListener + implements GlTextureProcessor.InputListener, GlTextureProcessor.OutputListener { + + private final GlTextureProcessor producingGlTextureProcessor; + private final GlTextureProcessor consumingGlTextureProcessor; + private final FrameProcessingTaskExecutor frameProcessingTaskExecutor; + + @GuardedBy("this") + private final Queue> availableFrames; + + @GuardedBy("this") + private int consumingGlTextureProcessorInputCapacity; + + /** + * Creates a new instance. + * + * @param producingGlTextureProcessor The {@link GlTextureProcessor} for which this listener will + * be set as {@link OutputListener}. + * @param consumingGlTextureProcessor The {@link GlTextureProcessor} for which this listener will + * be set as {@link InputListener}. + * @param frameProcessingTaskExecutor The {@link FrameProcessingTaskExecutor} that is used for + * OpenGL calls. All calls to the producing/consuming {@link GlTextureProcessor} will be + * executed by the {@link FrameProcessingTaskExecutor}. The caller is responsible for + * releasing the {@link FrameProcessingTaskExecutor}. + */ + public ChainingGlTextureProcessorListener( + GlTextureProcessor producingGlTextureProcessor, + GlTextureProcessor consumingGlTextureProcessor, + FrameProcessingTaskExecutor frameProcessingTaskExecutor) { + this.producingGlTextureProcessor = producingGlTextureProcessor; + this.consumingGlTextureProcessor = consumingGlTextureProcessor; + this.frameProcessingTaskExecutor = frameProcessingTaskExecutor; + availableFrames = new ArrayDeque<>(); + } + + @Override + public synchronized void onReadyToAcceptInputFrame() { + @Nullable Pair pendingFrame = availableFrames.poll(); + if (pendingFrame == null) { + consumingGlTextureProcessorInputCapacity++; + return; + } + + long presentationTimeUs = pendingFrame.second; + if (presentationTimeUs == C.TIME_END_OF_SOURCE) { + frameProcessingTaskExecutor.submit( + consumingGlTextureProcessor::signalEndOfCurrentInputStream); + } else { + frameProcessingTaskExecutor.submit( + () -> + consumingGlTextureProcessor.queueInputFrame( + /* inputTexture= */ pendingFrame.first, presentationTimeUs)); + } + } + + @Override + public void onInputFrameProcessed(TextureInfo inputTexture) { + frameProcessingTaskExecutor.submit( + () -> producingGlTextureProcessor.releaseOutputFrame(inputTexture)); + } + + @Override + public synchronized void onOutputFrameAvailable( + TextureInfo outputTexture, long presentationTimeUs) { + if (consumingGlTextureProcessorInputCapacity > 0) { + frameProcessingTaskExecutor.submit( + () -> + consumingGlTextureProcessor.queueInputFrame( + /* inputTexture= */ outputTexture, presentationTimeUs)); + consumingGlTextureProcessorInputCapacity--; + } else { + availableFrames.add(new Pair<>(outputTexture, presentationTimeUs)); + } + } + + @Override + public synchronized void onCurrentOutputStreamEnded() { + if (!availableFrames.isEmpty()) { + availableFrames.add(new Pair<>(TextureInfo.UNSET, C.TIME_END_OF_SOURCE)); + } else { + frameProcessingTaskExecutor.submit( + consumingGlTextureProcessor::signalEndOfCurrentInputStream); + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/ColorLut.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/ColorLut.java new file mode 100644 index 0000000000..7a05a8f5f2 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/ColorLut.java @@ -0,0 +1,49 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.android.exoplayer2.effect; + +import android.content.Context; +import androidx.annotation.WorkerThread; +import com.google.android.exoplayer2.util.FrameProcessingException; +import com.google.android.exoplayer2.util.GlUtil; + +/** + * Specifies color transformations using color lookup tables to apply to each frame in the fragment + * shader. + */ +public interface ColorLut extends GlEffect { + + /** + * Returns the OpenGL texture ID of the LUT to apply to the pixels of the frame with the given + * timestamp. + */ + int getLutTextureId(long presentationTimeUs); + + /** Returns the length N of the 3D N x N x N LUT cube with the given timestamp. */ + int getLength(long presentationTimeUs); + + /** Releases the OpenGL texture of the LUT. */ + void release() throws GlUtil.GlException; + + /** This method must be executed on the same thread as other GL commands. */ + @Override + @WorkerThread + default SingleFrameGlTextureProcessor toGlTextureProcessor(Context context, boolean useHdr) + throws FrameProcessingException { + return new ColorLutProcessor(context, /* colorLut= */ this, useHdr); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/ColorLutProcessor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/ColorLutProcessor.java new file mode 100644 index 0000000000..d6d2db164f --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/ColorLutProcessor.java @@ -0,0 +1,101 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.android.exoplayer2.effect; + +import static com.google.android.exoplayer2.util.Assertions.checkArgument; + +import android.content.Context; +import android.opengl.GLES20; +import android.util.Pair; +import com.google.android.exoplayer2.util.FrameProcessingException; +import com.google.android.exoplayer2.util.GlProgram; +import com.google.android.exoplayer2.util.GlUtil; +import java.io.IOException; + +/** Applies a {@link ColorLut} to each frame in the fragment shader. */ +/* package */ final class ColorLutProcessor extends SingleFrameGlTextureProcessor { + private static final String VERTEX_SHADER_PATH = "shaders/vertex_shader_transformation_es2.glsl"; + private static final String FRAGMENT_SHADER_PATH = "shaders/fragment_shader_lut_es2.glsl"; + + private final GlProgram glProgram; + private final ColorLut colorLut; + + /** + * Creates a new instance. + * + * @param context The {@link Context}. + * @param colorLut The {@link ColorLut} to apply to each frame in order. + * @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be + * in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709. + * @throws FrameProcessingException If a problem occurs while reading shader files. + */ + public ColorLutProcessor(Context context, ColorLut colorLut, boolean useHdr) + throws FrameProcessingException { + super(useHdr); + // TODO(b/246315245): Add HDR support. + checkArgument(!useHdr, "LutProcessor does not support HDR colors."); + this.colorLut = colorLut; + + try { + glProgram = new GlProgram(context, VERTEX_SHADER_PATH, FRAGMENT_SHADER_PATH); + } catch (IOException | GlUtil.GlException e) { + throw new FrameProcessingException(e); + } + + // Draw the frame on the entire normalized device coordinate space, from -1 to 1, for x and y. + glProgram.setBufferAttribute( + "aFramePosition", + GlUtil.getNormalizedCoordinateBounds(), + GlUtil.HOMOGENEOUS_COORDINATE_VECTOR_SIZE); + + float[] identityMatrix = GlUtil.create4x4IdentityMatrix(); + glProgram.setFloatsUniform("uTransformationMatrix", identityMatrix); + glProgram.setFloatsUniform("uTexTransformationMatrix", identityMatrix); + } + + @Override + public Pair configure(int inputWidth, int inputHeight) { + return Pair.create(inputWidth, inputHeight); + } + + @Override + public void drawFrame(int inputTexId, long presentationTimeUs) throws FrameProcessingException { + try { + glProgram.use(); + glProgram.setSamplerTexIdUniform("uTexSampler", inputTexId, /* texUnitIndex= */ 0); + glProgram.setSamplerTexIdUniform( + "uColorLut", colorLut.getLutTextureId(presentationTimeUs), /* texUnitIndex= */ 1); + glProgram.setFloatUniform("uColorLutLength", colorLut.getLength(presentationTimeUs)); + glProgram.bindAttributesAndUniforms(); + + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /* first= */ 0, /* count= */ 4); + } catch (GlUtil.GlException e) { + throw new FrameProcessingException(e); + } + } + + @Override + public void release() throws FrameProcessingException { + super.release(); + try { + colorLut.release(); + glProgram.delete(); + } catch (GlUtil.GlException e) { + throw new FrameProcessingException(e); + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/Contrast.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/Contrast.java new file mode 100644 index 0000000000..e98f59a791 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/Contrast.java @@ -0,0 +1,46 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.android.exoplayer2.effect; + +import static com.google.android.exoplayer2.util.Assertions.checkArgument; + +import android.content.Context; +import com.google.android.exoplayer2.util.FrameProcessingException; + +/** A {@link GlEffect} to control the contrast of video frames. */ +public class Contrast implements GlEffect { + + /** Adjusts the contrast of video frames in the interval [-1, 1]. */ + public final float contrast; + + /** + * Creates a new instance for the given contrast value. + * + *

      Contrast values range from -1 (all gray pixels) to 1 (maximum difference of colors). 0 means + * to add no contrast and leaves the frames unchanged. + */ + public Contrast(float contrast) { + checkArgument(-1 <= contrast && contrast <= 1, "Contrast needs to be in the interval [-1, 1]."); + this.contrast = contrast; + } + + @Override + public SingleFrameGlTextureProcessor toGlTextureProcessor(Context context, boolean useHdr) + throws FrameProcessingException { + return new ContrastProcessor(context, this, useHdr); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/ContrastProcessor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/ContrastProcessor.java new file mode 100644 index 0000000000..842a88d42c --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/ContrastProcessor.java @@ -0,0 +1,95 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.android.exoplayer2.effect; + +import android.content.Context; +import android.opengl.GLES20; +import android.util.Pair; +import com.google.android.exoplayer2.util.FrameProcessingException; +import com.google.android.exoplayer2.util.GlProgram; +import com.google.android.exoplayer2.util.GlUtil; +import java.io.IOException; + +/** Applies a {@link Contrast} to each frame in the fragment shader. */ +/* package */ final class ContrastProcessor extends SingleFrameGlTextureProcessor { + private static final String VERTEX_SHADER_PATH = "shaders/vertex_shader_transformation_es2.glsl"; + private static final String FRAGMENT_SHADER_PATH = "shaders/fragment_shader_contrast_es2.glsl"; + + private final GlProgram glProgram; + + /** + * Creates a new instance. + * + * @param context The {@link Context}. + * @param contrastEffect The {@link Contrast} to apply to each frame in order. + * @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be + * in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709. + * @throws FrameProcessingException If a problem occurs while reading shader files. + */ + public ContrastProcessor(Context context, Contrast contrastEffect, boolean useHdr) + throws FrameProcessingException { + super(useHdr); + // Use 1.0001f to avoid division by zero issues. + float contrastFactor = (1 + contrastEffect.contrast) / (1.0001f - contrastEffect.contrast); + + try { + glProgram = new GlProgram(context, VERTEX_SHADER_PATH, FRAGMENT_SHADER_PATH); + } catch (IOException | GlUtil.GlException e) { + throw new FrameProcessingException(e); + } + + // Draw the frame on the entire normalized device coordinate space, from -1 to 1, for x and y. + glProgram.setBufferAttribute( + "aFramePosition", + GlUtil.getNormalizedCoordinateBounds(), + GlUtil.HOMOGENEOUS_COORDINATE_VECTOR_SIZE); + + float[] identityMatrix = GlUtil.create4x4IdentityMatrix(); + glProgram.setFloatsUniform("uTransformationMatrix", identityMatrix); + glProgram.setFloatsUniform("uTexTransformationMatrix", identityMatrix); + glProgram.setFloatUniform("uContrastFactor", contrastFactor); + } + + @Override + public Pair configure(int inputWidth, int inputHeight) { + return Pair.create(inputWidth, inputHeight); + } + + @Override + public void drawFrame(int inputTexId, long presentationTimeUs) throws FrameProcessingException { + try { + glProgram.use(); + glProgram.setSamplerTexIdUniform("uTexSampler", inputTexId, /* texUnitIndex= */ 0); + glProgram.bindAttributesAndUniforms(); + + // The four-vertex triangle strip forms a quad. + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /* first= */ 0, /* count= */ 4); + } catch (GlUtil.GlException e) { + throw new FrameProcessingException(e, presentationTimeUs); + } + } + + @Override + public void release() throws FrameProcessingException { + super.release(); + try { + glProgram.delete(); + } catch (GlUtil.GlException e) { + throw new FrameProcessingException(e); + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/Crop.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/Crop.java new file mode 100644 index 0000000000..ad253cc67b --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/Crop.java @@ -0,0 +1,94 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.effect; + +import static com.google.android.exoplayer2.util.Assertions.checkArgument; +import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; + +import android.graphics.Matrix; +import android.util.Pair; +import com.google.android.exoplayer2.util.GlUtil; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; + +/** + * Specifies a crop to apply in the vertex shader. + * + *

      The background color of the output frame will be black, with alpha = 0 if applicable. + */ +public final class Crop implements MatrixTransformation { + + private final float left; + private final float right; + private final float bottom; + private final float top; + + private @MonotonicNonNull Matrix transformationMatrix; + + /** + * Crops a smaller (or larger) frame, per normalized device coordinates (NDC), where the input + * frame corresponds to the square ranging from -1 to 1 on the x and y axes. + * + *

      {@code left} and {@code bottom} default to -1, and {@code right} and {@code top} default to + * 1, which corresponds to not applying any crop. To crop to a smaller subset of the input frame, + * use values between -1 and 1. To crop to a larger frame, use values below -1 and above 1. + * + * @param left The left edge of the output frame, in NDC. Must be less than {@code right}. + * @param right The right edge of the output frame, in NDC. Must be greater than {@code left}. + * @param bottom The bottom edge of the output frame, in NDC. Must be less than {@code top}. + * @param top The top edge of the output frame, in NDC. Must be greater than {@code bottom}. + */ + public Crop(float left, float right, float bottom, float top) { + checkArgument( + right > left, "right value " + right + " should be greater than left value " + left); + checkArgument( + top > bottom, "top value " + top + " should be greater than bottom value " + bottom); + this.left = left; + this.right = right; + this.bottom = bottom; + this.top = top; + + transformationMatrix = new Matrix(); + } + + @Override + public Pair configure(int inputWidth, int inputHeight) { + checkArgument(inputWidth > 0, "inputWidth must be positive"); + checkArgument(inputHeight > 0, "inputHeight must be positive"); + + transformationMatrix = new Matrix(); + if (left == -1f && right == 1f && bottom == -1f && top == 1f) { + // No crop needed. + return Pair.create(inputWidth, inputHeight); + } + + float scaleX = (right - left) / GlUtil.LENGTH_NDC; + float scaleY = (top - bottom) / GlUtil.LENGTH_NDC; + float centerX = (left + right) / 2; + float centerY = (bottom + top) / 2; + + transformationMatrix.postTranslate(-centerX, -centerY); + transformationMatrix.postScale(1f / scaleX, 1f / scaleY); + + int outputWidth = Math.round(inputWidth * scaleX); + int outputHeight = Math.round(inputHeight * scaleY); + return Pair.create(outputWidth, outputHeight); + } + + @Override + public Matrix getMatrix(long presentationTimeUs) { + return checkStateNotNull(transformationMatrix, "configure must be called first"); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/ExternalTextureManager.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/ExternalTextureManager.java new file mode 100644 index 0000000000..bfa7e16ee6 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/ExternalTextureManager.java @@ -0,0 +1,181 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.effect; + +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; + +import android.graphics.SurfaceTexture; +import androidx.annotation.Nullable; +import androidx.annotation.WorkerThread; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.effect.GlTextureProcessor.InputListener; +import com.google.android.exoplayer2.util.FrameInfo; +import com.google.android.exoplayer2.util.FrameProcessingException; +import com.google.android.exoplayer2.util.FrameProcessor; +import com.google.android.exoplayer2.util.GlUtil; +import java.util.Queue; +import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.concurrent.atomic.AtomicInteger; + +/** + * Forwards externally produced frames that become available via a {@link SurfaceTexture} to an + * {@link ExternalTextureProcessor} for consumption. + */ +/* package */ class ExternalTextureManager implements InputListener { + + private final FrameProcessingTaskExecutor frameProcessingTaskExecutor; + private final ExternalTextureProcessor externalTextureProcessor; + private final int externalTexId; + private final SurfaceTexture surfaceTexture; + private final float[] textureTransformMatrix; + private final Queue pendingFrames; + + // Incremented on any thread when a frame becomes available on the surfaceTexture, decremented on + // the GL thread only. + private final AtomicInteger availableFrameCount; + // Incremented on any thread, decremented on the GL thread only. + private final AtomicInteger externalTextureProcessorInputCapacity; + + // Set to true on any thread. Read on the GL thread only. + private volatile boolean inputStreamEnded; + // The frame that is sent downstream and is not done processing yet. + // Set to null on any thread. Read and set to non-null on the GL thread only. + @Nullable private volatile FrameInfo currentFrame; + + private long previousStreamOffsetUs; + + /** + * Creates a new instance. + * + * @param externalTextureProcessor The {@link ExternalTextureProcessor} for which this {@code + * ExternalTextureManager} will be set as the {@link InputListener}. + * @param frameProcessingTaskExecutor The {@link FrameProcessingTaskExecutor}. + * @throws FrameProcessingException If a problem occurs while creating the external texture. + */ + public ExternalTextureManager( + ExternalTextureProcessor externalTextureProcessor, + FrameProcessingTaskExecutor frameProcessingTaskExecutor) + throws FrameProcessingException { + this.externalTextureProcessor = externalTextureProcessor; + this.frameProcessingTaskExecutor = frameProcessingTaskExecutor; + try { + externalTexId = GlUtil.createExternalTexture(); + } catch (GlUtil.GlException e) { + throw new FrameProcessingException(e); + } + surfaceTexture = new SurfaceTexture(externalTexId); + textureTransformMatrix = new float[16]; + pendingFrames = new ConcurrentLinkedQueue<>(); + availableFrameCount = new AtomicInteger(); + externalTextureProcessorInputCapacity = new AtomicInteger(); + previousStreamOffsetUs = C.TIME_UNSET; + } + + public SurfaceTexture getSurfaceTexture() { + surfaceTexture.setOnFrameAvailableListener( + unused -> { + availableFrameCount.getAndIncrement(); + frameProcessingTaskExecutor.submit(this::maybeQueueFrameToExternalTextureProcessor); + }); + return surfaceTexture; + } + + @Override + public void onReadyToAcceptInputFrame() { + externalTextureProcessorInputCapacity.getAndIncrement(); + frameProcessingTaskExecutor.submit(this::maybeQueueFrameToExternalTextureProcessor); + } + + @Override + public void onInputFrameProcessed(TextureInfo inputTexture) { + currentFrame = null; + frameProcessingTaskExecutor.submit(this::maybeQueueFrameToExternalTextureProcessor); + } + + /** + * Notifies the {@code ExternalTextureManager} that a frame with the given {@link FrameInfo} will + * become available via the {@link SurfaceTexture} eventually. + * + *

      Can be called on any thread. The caller must ensure that frames are registered in the + * correct order. + */ + public void registerInputFrame(FrameInfo frame) { + pendingFrames.add(frame); + } + + /** + * Returns the number of {@linkplain #registerInputFrame(FrameInfo) registered} frames that have + * not been rendered to the external texture yet. + * + *

      Can be called on any thread. + */ + public int getPendingFrameCount() { + return pendingFrames.size(); + } + + /** + * Signals the end of the input. + * + * @see FrameProcessor#signalEndOfInput() + */ + @WorkerThread + public void signalEndOfInput() { + inputStreamEnded = true; + if (pendingFrames.isEmpty() && currentFrame == null) { + externalTextureProcessor.signalEndOfCurrentInputStream(); + } + } + + public void release() { + surfaceTexture.release(); + } + + @WorkerThread + private void maybeQueueFrameToExternalTextureProcessor() { + if (externalTextureProcessorInputCapacity.get() == 0 + || availableFrameCount.get() == 0 + || currentFrame != null) { + return; + } + + availableFrameCount.getAndDecrement(); + surfaceTexture.updateTexImage(); + this.currentFrame = pendingFrames.remove(); + + FrameInfo currentFrame = checkNotNull(this.currentFrame); + externalTextureProcessorInputCapacity.getAndDecrement(); + surfaceTexture.getTransformMatrix(textureTransformMatrix); + externalTextureProcessor.setTextureTransformMatrix(textureTransformMatrix); + long frameTimeNs = surfaceTexture.getTimestamp(); + long streamOffsetUs = currentFrame.streamOffsetUs; + if (streamOffsetUs != previousStreamOffsetUs) { + if (previousStreamOffsetUs != C.TIME_UNSET) { + externalTextureProcessor.signalEndOfCurrentInputStream(); + } + previousStreamOffsetUs = streamOffsetUs; + } + // Correct for the stream offset so processors see original media presentation timestamps. + long presentationTimeUs = (frameTimeNs / 1000) - streamOffsetUs; + externalTextureProcessor.queueInputFrame( + new TextureInfo( + externalTexId, /* fboId= */ C.INDEX_UNSET, currentFrame.width, currentFrame.height), + presentationTimeUs); + + if (inputStreamEnded && pendingFrames.isEmpty()) { + externalTextureProcessor.signalEndOfCurrentInputStream(); + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/ExternalTextureProcessor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/ExternalTextureProcessor.java new file mode 100644 index 0000000000..9a85cfdea6 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/ExternalTextureProcessor.java @@ -0,0 +1,34 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.effect; + +/** + * Interface for a {@link GlTextureProcessor} that samples from an external texture. + * + *

      Use {@link #setTextureTransformMatrix(float[])} to provide the texture's transformation + * matrix. + */ +/* package */ interface ExternalTextureProcessor extends GlTextureProcessor { + + /** + * Sets the texture transform matrix for converting an external surface texture's coordinates to + * sampling locations. + * + * @param textureTransformMatrix The external surface texture's {@linkplain + * android.graphics.SurfaceTexture#getTransformMatrix(float[]) transform matrix}. + */ + void setTextureTransformMatrix(float[] textureTransformMatrix); +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/FinalMatrixTextureProcessorWrapper.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/FinalMatrixTextureProcessorWrapper.java new file mode 100644 index 0000000000..416a732d43 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/FinalMatrixTextureProcessorWrapper.java @@ -0,0 +1,486 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.effect; + +import static com.google.android.exoplayer2.util.Assertions.checkState; +import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; + +import android.content.Context; +import android.opengl.EGL14; +import android.opengl.EGLContext; +import android.opengl.EGLDisplay; +import android.opengl.EGLExt; +import android.opengl.EGLSurface; +import android.opengl.GLES20; +import android.util.Pair; +import android.view.Surface; +import android.view.SurfaceHolder; +import android.view.SurfaceView; +import androidx.annotation.GuardedBy; +import androidx.annotation.Nullable; +import androidx.annotation.WorkerThread; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.util.DebugViewProvider; +import com.google.android.exoplayer2.util.FrameProcessingException; +import com.google.android.exoplayer2.util.FrameProcessor; +import com.google.android.exoplayer2.util.GlUtil; +import com.google.android.exoplayer2.util.Log; +import com.google.android.exoplayer2.util.SurfaceInfo; +import com.google.android.exoplayer2.util.Util; +import com.google.android.exoplayer2.video.ColorInfo; +import com.google.common.collect.ImmutableList; +import java.util.Queue; +import java.util.concurrent.ConcurrentLinkedQueue; +import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; + +/** + * Wrapper around a {@link GlTextureProcessor} that writes to the provided output surface and + * optional debug surface view. + * + *

      The wrapped {@link GlTextureProcessor} applies the {@link GlMatrixTransformation} and {@link + * RgbMatrix} instances passed to the constructor, followed by any transformations needed to convert + * the frames to the dimensions specified by the provided {@link SurfaceInfo}. + * + *

      This wrapper is used for the final {@link GlTextureProcessor} instance in the chain of {@link + * GlTextureProcessor} instances used by {@link FrameProcessor}. + */ +/* package */ final class FinalMatrixTextureProcessorWrapper implements ExternalTextureProcessor { + + private static final String TAG = "FinalProcessorWrapper"; + + private final Context context; + private final ImmutableList matrixTransformations; + private final ImmutableList rgbMatrices; + private final EGLDisplay eglDisplay; + private final EGLContext eglContext; + private final DebugViewProvider debugViewProvider; + private final FrameProcessor.Listener frameProcessorListener; + private final boolean sampleFromExternalTexture; + private final ColorInfo colorInfo; + private final boolean releaseFramesAutomatically; + private final float[] textureTransformMatrix; + private final Queue streamOffsetUsQueue; + private final Queue> availableFrames; + + private int inputWidth; + private int inputHeight; + @Nullable private MatrixTextureProcessor matrixTextureProcessor; + @Nullable private SurfaceViewWrapper debugSurfaceViewWrapper; + private InputListener inputListener; + private @MonotonicNonNull Pair outputSizeBeforeSurfaceTransformation; + @Nullable private SurfaceView debugSurfaceView; + + private volatile boolean outputSizeOrRotationChanged; + + @GuardedBy("this") + @Nullable + private SurfaceInfo outputSurfaceInfo; + + @GuardedBy("this") + @Nullable + private EGLSurface outputEglSurface; + + public FinalMatrixTextureProcessorWrapper( + Context context, + EGLDisplay eglDisplay, + EGLContext eglContext, + ImmutableList matrixTransformations, + ImmutableList rgbMatrices, + FrameProcessor.Listener frameProcessorListener, + DebugViewProvider debugViewProvider, + boolean sampleFromExternalTexture, + ColorInfo colorInfo, + boolean releaseFramesAutomatically) { + this.context = context; + this.matrixTransformations = matrixTransformations; + this.rgbMatrices = rgbMatrices; + this.eglDisplay = eglDisplay; + this.eglContext = eglContext; + this.debugViewProvider = debugViewProvider; + this.frameProcessorListener = frameProcessorListener; + this.sampleFromExternalTexture = sampleFromExternalTexture; + this.colorInfo = colorInfo; + this.releaseFramesAutomatically = releaseFramesAutomatically; + + textureTransformMatrix = GlUtil.create4x4IdentityMatrix(); + streamOffsetUsQueue = new ConcurrentLinkedQueue<>(); + inputListener = new InputListener() {}; + availableFrames = new ConcurrentLinkedQueue<>(); + } + + @Override + public void setInputListener(InputListener inputListener) { + this.inputListener = inputListener; + inputListener.onReadyToAcceptInputFrame(); + } + + @Override + public void setOutputListener(OutputListener outputListener) { + // The FrameProcessor.Listener passed to the constructor is used for output-related events. + throw new UnsupportedOperationException(); + } + + @Override + public void setErrorListener(ErrorListener errorListener) { + // The FrameProcessor.Listener passed to the constructor is used for errors. + throw new UnsupportedOperationException(); + } + + @Override + public void queueInputFrame(TextureInfo inputTexture, long presentationTimeUs) { + long streamOffsetUs = + checkStateNotNull(streamOffsetUsQueue.peek(), "No input stream specified."); + long offsetPresentationTimeUs = presentationTimeUs + streamOffsetUs; + frameProcessorListener.onOutputFrameAvailable(offsetPresentationTimeUs); + if (releaseFramesAutomatically) { + renderFrameToSurfaces( + inputTexture, presentationTimeUs, /* releaseTimeNs= */ offsetPresentationTimeUs * 1000); + } else { + availableFrames.add(Pair.create(inputTexture, presentationTimeUs)); + } + inputListener.onReadyToAcceptInputFrame(); + } + + @Override + public void releaseOutputFrame(TextureInfo outputTexture) { + // The final texture processor writes to a surface so there is no texture to release. + throw new UnsupportedOperationException(); + } + + @WorkerThread + public void releaseOutputFrame(long releaseTimeNs) { + checkState(!releaseFramesAutomatically); + Pair oldestAvailableFrame = availableFrames.remove(); + renderFrameToSurfaces( + /* inputTexture= */ oldestAvailableFrame.first, + /* presentationTimeUs= */ oldestAvailableFrame.second, + releaseTimeNs); + } + + @Override + public void signalEndOfCurrentInputStream() { + checkState(!streamOffsetUsQueue.isEmpty(), "No input stream to end."); + + streamOffsetUsQueue.remove(); + if (streamOffsetUsQueue.isEmpty()) { + frameProcessorListener.onFrameProcessingEnded(); + } + } + + @Override + @WorkerThread + public void release() throws FrameProcessingException { + if (matrixTextureProcessor != null) { + matrixTextureProcessor.release(); + } + } + + @Override + public void setTextureTransformMatrix(float[] textureTransformMatrix) { + System.arraycopy( + /* src= */ textureTransformMatrix, + /* srcPos= */ 0, + /* dest= */ this.textureTransformMatrix, + /* destPost= */ 0, + /* length= */ textureTransformMatrix.length); + + if (matrixTextureProcessor != null) { + matrixTextureProcessor.setTextureTransformMatrix(textureTransformMatrix); + } + } + + /** + * Signals that there will be another input stream after all previously appended input streams + * have {@linkplain #signalEndOfCurrentInputStream() ended}. + * + *

      This method does not need to be called on the GL thread, but the caller must ensure that + * stream offsets are appended in the correct order. + * + * @param streamOffsetUs The presentation timestamp offset, in microseconds. + */ + public void appendStream(long streamOffsetUs) { + streamOffsetUsQueue.add(streamOffsetUs); + } + + /** + * Sets the output {@link SurfaceInfo}. + * + * @see FrameProcessor#setOutputSurfaceInfo(SurfaceInfo) + */ + public synchronized void setOutputSurfaceInfo(@Nullable SurfaceInfo outputSurfaceInfo) { + if (!Util.areEqual(this.outputSurfaceInfo, outputSurfaceInfo)) { + if (outputSurfaceInfo != null + && this.outputSurfaceInfo != null + && !this.outputSurfaceInfo.surface.equals(outputSurfaceInfo.surface)) { + this.outputEglSurface = null; + } + outputSizeOrRotationChanged = + this.outputSurfaceInfo == null + || outputSurfaceInfo == null + || this.outputSurfaceInfo.width != outputSurfaceInfo.width + || this.outputSurfaceInfo.height != outputSurfaceInfo.height + || this.outputSurfaceInfo.orientationDegrees != outputSurfaceInfo.orientationDegrees; + this.outputSurfaceInfo = outputSurfaceInfo; + } + } + + private void renderFrameToSurfaces( + TextureInfo inputTexture, long presentationTimeUs, long releaseTimeNs) { + try { + maybeRenderFrameToOutputSurface(inputTexture, presentationTimeUs, releaseTimeNs); + } catch (FrameProcessingException | GlUtil.GlException e) { + frameProcessorListener.onFrameProcessingError( + FrameProcessingException.from(e, presentationTimeUs)); + } + maybeRenderFrameToDebugSurface(inputTexture, presentationTimeUs); + inputListener.onInputFrameProcessed(inputTexture); + } + + private synchronized void maybeRenderFrameToOutputSurface( + TextureInfo inputTexture, long presentationTimeUs, long releaseTimeNs) + throws FrameProcessingException, GlUtil.GlException { + if (releaseTimeNs == FrameProcessor.DROP_OUTPUT_FRAME + || !ensureConfigured(inputTexture.width, inputTexture.height)) { + return; // Drop frames when requested, or there is no output surface. + } + + EGLSurface outputEglSurface = this.outputEglSurface; + SurfaceInfo outputSurfaceInfo = this.outputSurfaceInfo; + MatrixTextureProcessor matrixTextureProcessor = this.matrixTextureProcessor; + + GlUtil.focusEglSurface( + eglDisplay, + eglContext, + outputEglSurface, + outputSurfaceInfo.width, + outputSurfaceInfo.height); + GlUtil.clearOutputFrame(); + matrixTextureProcessor.drawFrame(inputTexture.texId, presentationTimeUs); + + EGLExt.eglPresentationTimeANDROID( + eglDisplay, + outputEglSurface, + releaseTimeNs == FrameProcessor.RELEASE_OUTPUT_FRAME_IMMEDIATELY + ? System.nanoTime() + : releaseTimeNs); + EGL14.eglSwapBuffers(eglDisplay, outputEglSurface); + } + + @EnsuresNonNullIf( + expression = {"outputSurfaceInfo", "outputEglSurface", "matrixTextureProcessor"}, + result = true) + private synchronized boolean ensureConfigured(int inputWidth, int inputHeight) + throws FrameProcessingException, GlUtil.GlException { + + if (this.inputWidth != inputWidth + || this.inputHeight != inputHeight + || this.outputSizeBeforeSurfaceTransformation == null) { + this.inputWidth = inputWidth; + this.inputHeight = inputHeight; + Pair outputSizeBeforeSurfaceTransformation = + MatrixUtils.configureAndGetOutputSize(inputWidth, inputHeight, matrixTransformations); + if (!Util.areEqual( + this.outputSizeBeforeSurfaceTransformation, outputSizeBeforeSurfaceTransformation)) { + this.outputSizeBeforeSurfaceTransformation = outputSizeBeforeSurfaceTransformation; + frameProcessorListener.onOutputSizeChanged( + outputSizeBeforeSurfaceTransformation.first, + outputSizeBeforeSurfaceTransformation.second); + } + } + + if (outputSurfaceInfo == null) { + if (matrixTextureProcessor != null) { + matrixTextureProcessor.release(); + matrixTextureProcessor = null; + } + outputEglSurface = null; + return false; + } + + SurfaceInfo outputSurfaceInfo = this.outputSurfaceInfo; + @Nullable EGLSurface outputEglSurface = this.outputEglSurface; + if (outputEglSurface == null) { + boolean colorInfoIsHdr = ColorInfo.isTransferHdr(colorInfo); + + outputEglSurface = + GlUtil.getEglSurface( + eglDisplay, + outputSurfaceInfo.surface, + colorInfoIsHdr + ? GlUtil.EGL_CONFIG_ATTRIBUTES_RGBA_1010102 + : GlUtil.EGL_CONFIG_ATTRIBUTES_RGBA_8888); + + @Nullable + SurfaceView debugSurfaceView = + debugViewProvider.getDebugPreviewSurfaceView( + outputSurfaceInfo.width, outputSurfaceInfo.height); + if (debugSurfaceView != null && !Util.areEqual(this.debugSurfaceView, debugSurfaceView)) { + debugSurfaceViewWrapper = + new SurfaceViewWrapper(eglDisplay, eglContext, colorInfoIsHdr, debugSurfaceView); + } + this.debugSurfaceView = debugSurfaceView; + } + + if (matrixTextureProcessor != null && outputSizeOrRotationChanged) { + matrixTextureProcessor.release(); + matrixTextureProcessor = null; + outputSizeOrRotationChanged = false; + } + if (matrixTextureProcessor == null) { + matrixTextureProcessor = createMatrixTextureProcessorForOutputSurface(outputSurfaceInfo); + } + + this.outputSurfaceInfo = outputSurfaceInfo; + this.outputEglSurface = outputEglSurface; + return true; + } + + private MatrixTextureProcessor createMatrixTextureProcessorForOutputSurface( + SurfaceInfo outputSurfaceInfo) throws FrameProcessingException { + ImmutableList.Builder matrixTransformationListBuilder = + new ImmutableList.Builder().addAll(matrixTransformations); + if (outputSurfaceInfo.orientationDegrees != 0) { + matrixTransformationListBuilder.add( + new ScaleToFitTransformation.Builder() + .setRotationDegrees(outputSurfaceInfo.orientationDegrees) + .build()); + } + matrixTransformationListBuilder.add( + Presentation.createForWidthAndHeight( + outputSurfaceInfo.width, outputSurfaceInfo.height, Presentation.LAYOUT_SCALE_TO_FIT)); + + MatrixTextureProcessor matrixTextureProcessor; + ImmutableList expandedMatrixTransformations = + matrixTransformationListBuilder.build(); + if (sampleFromExternalTexture) { + matrixTextureProcessor = + MatrixTextureProcessor.createWithExternalSamplerApplyingEotfThenOetf( + context, expandedMatrixTransformations, rgbMatrices, colorInfo); + } else { + matrixTextureProcessor = + MatrixTextureProcessor.createApplyingOetf( + context, expandedMatrixTransformations, rgbMatrices, colorInfo); + } + + matrixTextureProcessor.setTextureTransformMatrix(textureTransformMatrix); + Pair outputSize = matrixTextureProcessor.configure(inputWidth, inputHeight); + checkState(outputSize.first == outputSurfaceInfo.width); + checkState(outputSize.second == outputSurfaceInfo.height); + return matrixTextureProcessor; + } + + private void maybeRenderFrameToDebugSurface(TextureInfo inputTexture, long presentationTimeUs) { + if (debugSurfaceViewWrapper == null || this.matrixTextureProcessor == null) { + return; + } + + MatrixTextureProcessor matrixTextureProcessor = this.matrixTextureProcessor; + try { + debugSurfaceViewWrapper.maybeRenderToSurfaceView( + () -> { + GlUtil.clearOutputFrame(); + matrixTextureProcessor.drawFrame(inputTexture.texId, presentationTimeUs); + }); + } catch (FrameProcessingException | GlUtil.GlException e) { + Log.d(TAG, "Error rendering to debug preview", e); + } + } + + /** + * Wrapper around a {@link SurfaceView} that keeps track of whether the output surface is valid, + * and makes rendering a no-op if not. + */ + private static final class SurfaceViewWrapper implements SurfaceHolder.Callback { + private final EGLDisplay eglDisplay; + private final EGLContext eglContext; + private final boolean useHdr; + + @GuardedBy("this") + @Nullable + private Surface surface; + + @GuardedBy("this") + @Nullable + private EGLSurface eglSurface; + + private int width; + private int height; + + public SurfaceViewWrapper( + EGLDisplay eglDisplay, EGLContext eglContext, boolean useHdr, SurfaceView surfaceView) { + this.eglDisplay = eglDisplay; + this.eglContext = eglContext; + this.useHdr = useHdr; + surfaceView.getHolder().addCallback(this); + surface = surfaceView.getHolder().getSurface(); + width = surfaceView.getWidth(); + height = surfaceView.getHeight(); + } + + /** + * Focuses the wrapped surface view's surface as an {@link EGLSurface}, renders using {@code + * renderingTask} and swaps buffers, if the view's holder has a valid surface. Does nothing + * otherwise. + */ + @WorkerThread + public synchronized void maybeRenderToSurfaceView(FrameProcessingTask renderingTask) + throws GlUtil.GlException, FrameProcessingException { + if (surface == null) { + return; + } + + if (eglSurface == null) { + eglSurface = + GlUtil.getEglSurface( + eglDisplay, + surface, + useHdr + ? GlUtil.EGL_CONFIG_ATTRIBUTES_RGBA_1010102 + : GlUtil.EGL_CONFIG_ATTRIBUTES_RGBA_8888); + } + EGLSurface eglSurface = this.eglSurface; + GlUtil.focusEglSurface(eglDisplay, eglContext, eglSurface, width, height); + renderingTask.run(); + EGL14.eglSwapBuffers(eglDisplay, eglSurface); + // Prevents white flashing on the debug SurfaceView when frames are rendered too fast. + GLES20.glFinish(); + } + + @Override + public void surfaceCreated(SurfaceHolder holder) {} + + @Override + public synchronized void surfaceChanged( + SurfaceHolder holder, int format, int width, int height) { + this.width = width; + this.height = height; + Surface newSurface = holder.getSurface(); + if (surface == null || !surface.equals(newSurface)) { + surface = newSurface; + eglSurface = null; + } + } + + @Override + public synchronized void surfaceDestroyed(SurfaceHolder holder) { + surface = null; + eglSurface = null; + width = C.LENGTH_UNSET; + height = C.LENGTH_UNSET; + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/FrameProcessingTask.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/FrameProcessingTask.java new file mode 100644 index 0000000000..4ea13a9457 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/FrameProcessingTask.java @@ -0,0 +1,28 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.effect; + +import com.google.android.exoplayer2.util.FrameProcessingException; +import com.google.android.exoplayer2.util.GlUtil; + +/** + * Interface for tasks that may throw a {@link GlUtil.GlException} or {@link + * FrameProcessingException}. + */ +/* package */ interface FrameProcessingTask { + /** Runs the task. */ + void run() throws FrameProcessingException, GlUtil.GlException; +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/FrameProcessingTaskExecutor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/FrameProcessingTaskExecutor.java new file mode 100644 index 0000000000..b07875b544 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/FrameProcessingTaskExecutor.java @@ -0,0 +1,166 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.effect; + +import static java.util.concurrent.TimeUnit.MILLISECONDS; + +import com.google.android.exoplayer2.util.FrameProcessingException; +import com.google.android.exoplayer2.util.FrameProcessor; +import com.google.android.exoplayer2.util.GlUtil; +import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Future; +import java.util.concurrent.RejectedExecutionException; +import java.util.concurrent.atomic.AtomicBoolean; + +/** + * Wrapper around a single thread {@link ExecutorService} for executing {@link FrameProcessingTask} + * instances. + * + *

      The wrapper handles calling {@link + * FrameProcessor.Listener#onFrameProcessingError(FrameProcessingException)} for errors that occur + * during these tasks. Errors are assumed to be non-recoverable, so the {@code + * FrameProcessingTaskExecutor} should be released if an error occurs. + * + *

      {@linkplain #submitWithHighPriority(FrameProcessingTask) High priority tasks} are always + * executed before {@linkplain #submit(FrameProcessingTask) default priority tasks}. Tasks with + * equal priority are executed in FIFO order. + */ +/* package */ final class FrameProcessingTaskExecutor { + + private final ExecutorService singleThreadExecutorService; + private final FrameProcessor.Listener listener; + private final ConcurrentLinkedQueue> futures; + private final ConcurrentLinkedQueue highPriorityTasks; + private final AtomicBoolean shouldCancelTasks; + + /** Creates a new instance. */ + public FrameProcessingTaskExecutor( + ExecutorService singleThreadExecutorService, FrameProcessor.Listener listener) { + this.singleThreadExecutorService = singleThreadExecutorService; + this.listener = listener; + + futures = new ConcurrentLinkedQueue<>(); + highPriorityTasks = new ConcurrentLinkedQueue<>(); + shouldCancelTasks = new AtomicBoolean(); + } + + /** + * Submits the given {@link FrameProcessingTask} to be executed after all pending tasks have + * completed. + */ + public void submit(FrameProcessingTask task) { + if (shouldCancelTasks.get()) { + return; + } + try { + futures.add(wrapTaskAndSubmitToExecutorService(task)); + } catch (RejectedExecutionException e) { + handleException(e); + } + } + + /** + * Submits the given {@link FrameProcessingTask} to be executed after the currently running task + * and all previously submitted high-priority tasks have completed. + * + *

      Tasks that were previously {@linkplain #submit(FrameProcessingTask) submitted} without + * high-priority and have not started executing will be executed after this task is complete. + */ + public void submitWithHighPriority(FrameProcessingTask task) { + if (shouldCancelTasks.get()) { + return; + } + highPriorityTasks.add(task); + // If the ExecutorService has non-started tasks, the first of these non-started tasks will run + // the task passed to this method. Just in case there are no non-started tasks, submit another + // task to run high-priority tasks. + submit(() -> {}); + } + + /** + * Cancels remaining tasks, runs the given release task, and shuts down the background thread. + * + * @param releaseTask A {@link FrameProcessingTask} to execute before shutting down the background + * thread. + * @param releaseWaitTimeMs How long to wait for the release task to terminate, in milliseconds. + * @throws InterruptedException If interrupted while releasing resources. + */ + public void release(FrameProcessingTask releaseTask, long releaseWaitTimeMs) + throws InterruptedException { + shouldCancelTasks.getAndSet(true); + cancelNonStartedTasks(); + Future releaseFuture = wrapTaskAndSubmitToExecutorService(releaseTask); + singleThreadExecutorService.shutdown(); + try { + if (!singleThreadExecutorService.awaitTermination(releaseWaitTimeMs, MILLISECONDS)) { + listener.onFrameProcessingError(new FrameProcessingException("Release timed out")); + } + releaseFuture.get(); + } catch (ExecutionException e) { + listener.onFrameProcessingError(new FrameProcessingException(e)); + } + } + + private Future wrapTaskAndSubmitToExecutorService(FrameProcessingTask defaultPriorityTask) { + return singleThreadExecutorService.submit( + () -> { + try { + while (!highPriorityTasks.isEmpty()) { + highPriorityTasks.remove().run(); + } + defaultPriorityTask.run(); + removeFinishedFutures(); + } catch (FrameProcessingException | GlUtil.GlException | RuntimeException e) { + handleException(e); + } + }); + } + + private void cancelNonStartedTasks() { + while (!futures.isEmpty()) { + futures.remove().cancel(/* mayInterruptIfRunning= */ false); + } + } + + private void handleException(Exception exception) { + if (shouldCancelTasks.getAndSet(true)) { + // Ignore exception after cancelation as it can be caused by a previously reported exception + // that is the reason for the cancelation. + return; + } + listener.onFrameProcessingError(FrameProcessingException.from(exception)); + cancelNonStartedTasks(); + } + + private void removeFinishedFutures() { + while (!futures.isEmpty()) { + if (!futures.element().isDone()) { + return; + } + try { + futures.remove().get(); + } catch (ExecutionException impossible) { + // All exceptions are already caught in wrapTaskAndSubmitToExecutorService. + handleException(new IllegalStateException("Unexpected error", impossible)); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + handleException(e); + } + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/GlEffect.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/GlEffect.java new file mode 100644 index 0000000000..0e48457521 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/GlEffect.java @@ -0,0 +1,40 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.effect; + +import android.content.Context; +import com.google.android.exoplayer2.util.Effect; +import com.google.android.exoplayer2.util.FrameProcessingException; + +/** + * Interface for a video frame effect with a {@link GlTextureProcessor} implementation. + * + *

      Implementations contain information specifying the effect and can be {@linkplain + * #toGlTextureProcessor(Context, boolean) converted} to a {@link GlTextureProcessor} which applies + * the effect. + */ +public interface GlEffect extends Effect { + + /** + * Returns a {@link SingleFrameGlTextureProcessor} that applies the effect. + * + * @param context A {@link Context}. + * @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be + * in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709. + */ + GlTextureProcessor toGlTextureProcessor(Context context, boolean useHdr) + throws FrameProcessingException; +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/GlEffectsFrameProcessor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/GlEffectsFrameProcessor.java new file mode 100644 index 0000000000..4d5ad5ee0e --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/GlEffectsFrameProcessor.java @@ -0,0 +1,399 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.effect; + +import static com.google.android.exoplayer2.util.Assertions.checkArgument; +import static com.google.android.exoplayer2.util.Assertions.checkState; +import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; +import static com.google.common.collect.Iterables.getLast; + +import android.content.Context; +import android.opengl.EGLContext; +import android.opengl.EGLDisplay; +import android.view.Surface; +import androidx.annotation.Nullable; +import androidx.annotation.WorkerThread; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.util.DebugViewProvider; +import com.google.android.exoplayer2.util.Effect; +import com.google.android.exoplayer2.util.FrameInfo; +import com.google.android.exoplayer2.util.FrameProcessingException; +import com.google.android.exoplayer2.util.FrameProcessor; +import com.google.android.exoplayer2.util.GlUtil; +import com.google.android.exoplayer2.util.SurfaceInfo; +import com.google.android.exoplayer2.util.Util; +import com.google.android.exoplayer2.video.ColorInfo; +import com.google.common.collect.ImmutableList; +import java.util.List; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Future; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; + +/** + * A {@link FrameProcessor} implementation that applies {@link GlEffect} instances using OpenGL on a + * background thread. + */ +public final class GlEffectsFrameProcessor implements FrameProcessor { + + /** A factory for {@link GlEffectsFrameProcessor} instances. */ + public static class Factory implements FrameProcessor.Factory { + /** + * {@inheritDoc} + * + *

      All {@link Effect} instances must be {@link GlEffect} instances. + * + *

      Using HDR requires the {@code EXT_YUV_target} OpenGL extension. + */ + @Override + public GlEffectsFrameProcessor create( + Context context, + FrameProcessor.Listener listener, + List effects, + DebugViewProvider debugViewProvider, + ColorInfo colorInfo, + boolean releaseFramesAutomatically) + throws FrameProcessingException { + + ExecutorService singleThreadExecutorService = Util.newSingleThreadExecutor(THREAD_NAME); + + Future glFrameProcessorFuture = + singleThreadExecutorService.submit( + () -> + createOpenGlObjectsAndFrameProcessor( + context, + listener, + effects, + debugViewProvider, + colorInfo, + releaseFramesAutomatically, + singleThreadExecutorService)); + + try { + return glFrameProcessorFuture.get(); + } catch (ExecutionException e) { + throw new FrameProcessingException(e); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new FrameProcessingException(e); + } + } + } + + /** + * Creates the OpenGL context, surfaces, textures, and framebuffers, initializes {@link + * GlTextureProcessor} instances corresponding to the {@link GlEffect} instances, and returns a + * new {@code GlEffectsFrameProcessor}. + * + *

      All {@link Effect} instances must be {@link GlEffect} instances. + * + *

      This method must be executed using the {@code singleThreadExecutorService}, as later OpenGL + * commands will be called on that thread. + */ + @WorkerThread + private static GlEffectsFrameProcessor createOpenGlObjectsAndFrameProcessor( + Context context, + FrameProcessor.Listener listener, + List effects, + DebugViewProvider debugViewProvider, + ColorInfo colorInfo, + boolean releaseFramesAutomatically, + ExecutorService singleThreadExecutorService) + throws GlUtil.GlException, FrameProcessingException { + checkState(Thread.currentThread().getName().equals(THREAD_NAME)); + + // TODO(b/237674316): Delay initialization of things requiring the colorInfo, to + // configure based on the color info from the decoder output media format instead. + boolean useHdr = ColorInfo.isTransferHdr(colorInfo); + EGLDisplay eglDisplay = GlUtil.createEglDisplay(); + int[] configAttributes = + useHdr ? GlUtil.EGL_CONFIG_ATTRIBUTES_RGBA_1010102 : GlUtil.EGL_CONFIG_ATTRIBUTES_RGBA_8888; + EGLContext eglContext = GlUtil.createEglContext(eglDisplay, configAttributes); + GlUtil.createFocusedPlaceholderEglSurface(eglContext, eglDisplay, configAttributes); + + ImmutableList textureProcessors = + getGlTextureProcessorsForGlEffects( + context, + effects, + eglDisplay, + eglContext, + listener, + debugViewProvider, + colorInfo, + releaseFramesAutomatically); + FrameProcessingTaskExecutor frameProcessingTaskExecutor = + new FrameProcessingTaskExecutor(singleThreadExecutorService, listener); + chainTextureProcessorsWithListeners(textureProcessors, frameProcessingTaskExecutor, listener); + + return new GlEffectsFrameProcessor( + eglDisplay, + eglContext, + frameProcessingTaskExecutor, + textureProcessors, + releaseFramesAutomatically); + } + + /** + * Combines consecutive {@link GlMatrixTransformation} and {@link RgbMatrix} instances into a + * single {@link MatrixTextureProcessor} and converts all other {@link GlEffect} instances to + * separate {@link GlTextureProcessor} instances. + * + *

      All {@link Effect} instances must be {@link GlEffect} instances. + * + * @return A non-empty list of {@link GlTextureProcessor} instances to apply in the given order. + * The first is an {@link ExternalTextureProcessor} and the last is a {@link + * FinalMatrixTextureProcessorWrapper}. + */ + private static ImmutableList getGlTextureProcessorsForGlEffects( + Context context, + List effects, + EGLDisplay eglDisplay, + EGLContext eglContext, + FrameProcessor.Listener listener, + DebugViewProvider debugViewProvider, + ColorInfo colorInfo, + boolean releaseFramesAutomatically) + throws FrameProcessingException { + ImmutableList.Builder textureProcessorListBuilder = + new ImmutableList.Builder<>(); + ImmutableList.Builder matrixTransformationListBuilder = + new ImmutableList.Builder<>(); + ImmutableList.Builder rgbMatrixListBuilder = new ImmutableList.Builder<>(); + boolean sampleFromExternalTexture = true; + for (int i = 0; i < effects.size(); i++) { + Effect effect = effects.get(i); + checkArgument(effect instanceof GlEffect, "GlEffectsFrameProcessor only supports GlEffects"); + GlEffect glEffect = (GlEffect) effect; + // The following logic may change the order of the RgbMatrix and GlMatrixTransformation + // effects. This does not influence the output since RgbMatrix only changes the individual + // pixels and does not take any location in account, which the GlMatrixTransformation + // may change. + if (glEffect instanceof GlMatrixTransformation) { + matrixTransformationListBuilder.add((GlMatrixTransformation) glEffect); + continue; + } + if (glEffect instanceof RgbMatrix) { + rgbMatrixListBuilder.add((RgbMatrix) glEffect); + continue; + } + ImmutableList matrixTransformations = + matrixTransformationListBuilder.build(); + ImmutableList rgbMatrices = rgbMatrixListBuilder.build(); + if (!matrixTransformations.isEmpty() || !rgbMatrices.isEmpty() || sampleFromExternalTexture) { + MatrixTextureProcessor matrixTextureProcessor; + if (sampleFromExternalTexture) { + matrixTextureProcessor = + MatrixTextureProcessor.createWithExternalSamplerApplyingEotf( + context, matrixTransformations, rgbMatrices, colorInfo); + } else { + matrixTextureProcessor = + MatrixTextureProcessor.create( + context, matrixTransformations, rgbMatrices, ColorInfo.isTransferHdr(colorInfo)); + } + textureProcessorListBuilder.add(matrixTextureProcessor); + matrixTransformationListBuilder = new ImmutableList.Builder<>(); + rgbMatrixListBuilder = new ImmutableList.Builder<>(); + sampleFromExternalTexture = false; + } + textureProcessorListBuilder.add( + glEffect.toGlTextureProcessor(context, ColorInfo.isTransferHdr(colorInfo))); + } + + textureProcessorListBuilder.add( + new FinalMatrixTextureProcessorWrapper( + context, + eglDisplay, + eglContext, + matrixTransformationListBuilder.build(), + rgbMatrixListBuilder.build(), + listener, + debugViewProvider, + sampleFromExternalTexture, + colorInfo, + releaseFramesAutomatically)); + return textureProcessorListBuilder.build(); + } + + /** + * Chains the given {@link GlTextureProcessor} instances using {@link + * ChainingGlTextureProcessorListener} instances. + */ + private static void chainTextureProcessorsWithListeners( + ImmutableList textureProcessors, + FrameProcessingTaskExecutor frameProcessingTaskExecutor, + FrameProcessor.Listener frameProcessorListener) { + for (int i = 0; i < textureProcessors.size() - 1; i++) { + GlTextureProcessor producingGlTextureProcessor = textureProcessors.get(i); + GlTextureProcessor consumingGlTextureProcessor = textureProcessors.get(i + 1); + ChainingGlTextureProcessorListener chainingGlTextureProcessorListener = + new ChainingGlTextureProcessorListener( + producingGlTextureProcessor, + consumingGlTextureProcessor, + frameProcessingTaskExecutor); + producingGlTextureProcessor.setOutputListener(chainingGlTextureProcessorListener); + producingGlTextureProcessor.setErrorListener(frameProcessorListener::onFrameProcessingError); + consumingGlTextureProcessor.setInputListener(chainingGlTextureProcessorListener); + } + } + + private static final String THREAD_NAME = "Effect:GlThread"; + private static final long RELEASE_WAIT_TIME_MS = 100; + + private final EGLDisplay eglDisplay; + private final EGLContext eglContext; + private final FrameProcessingTaskExecutor frameProcessingTaskExecutor; + private final ExternalTextureManager inputExternalTextureManager; + private final Surface inputSurface; + private final boolean releaseFramesAutomatically; + private final FinalMatrixTextureProcessorWrapper finalTextureProcessorWrapper; + private final ImmutableList allTextureProcessors; + + private @MonotonicNonNull FrameInfo nextInputFrameInfo; + private boolean inputStreamEnded; + /** + * Offset compared to original media presentation time that has been added to incoming frame + * timestamps, in microseconds. + */ + private long previousStreamOffsetUs; + + private GlEffectsFrameProcessor( + EGLDisplay eglDisplay, + EGLContext eglContext, + FrameProcessingTaskExecutor frameProcessingTaskExecutor, + ImmutableList textureProcessors, + boolean releaseFramesAutomatically) + throws FrameProcessingException { + + this.eglDisplay = eglDisplay; + this.eglContext = eglContext; + this.frameProcessingTaskExecutor = frameProcessingTaskExecutor; + this.releaseFramesAutomatically = releaseFramesAutomatically; + + checkState(!textureProcessors.isEmpty()); + checkState(textureProcessors.get(0) instanceof ExternalTextureProcessor); + checkState(getLast(textureProcessors) instanceof FinalMatrixTextureProcessorWrapper); + ExternalTextureProcessor inputExternalTextureProcessor = + (ExternalTextureProcessor) textureProcessors.get(0); + inputExternalTextureManager = + new ExternalTextureManager(inputExternalTextureProcessor, frameProcessingTaskExecutor); + inputExternalTextureProcessor.setInputListener(inputExternalTextureManager); + inputSurface = new Surface(inputExternalTextureManager.getSurfaceTexture()); + finalTextureProcessorWrapper = (FinalMatrixTextureProcessorWrapper) getLast(textureProcessors); + allTextureProcessors = textureProcessors; + previousStreamOffsetUs = C.TIME_UNSET; + } + + @Override + public Surface getInputSurface() { + return inputSurface; + } + + @Override + public void setInputFrameInfo(FrameInfo inputFrameInfo) { + nextInputFrameInfo = adjustForPixelWidthHeightRatio(inputFrameInfo); + + if (nextInputFrameInfo.streamOffsetUs != previousStreamOffsetUs) { + finalTextureProcessorWrapper.appendStream(nextInputFrameInfo.streamOffsetUs); + previousStreamOffsetUs = nextInputFrameInfo.streamOffsetUs; + } + } + + @Override + public void registerInputFrame() { + checkState(!inputStreamEnded); + checkStateNotNull( + nextInputFrameInfo, "setInputFrameInfo must be called before registering input frames"); + + inputExternalTextureManager.registerInputFrame(nextInputFrameInfo); + } + + @Override + public int getPendingInputFrameCount() { + return inputExternalTextureManager.getPendingFrameCount(); + } + + @Override + public void setOutputSurfaceInfo(@Nullable SurfaceInfo outputSurfaceInfo) { + finalTextureProcessorWrapper.setOutputSurfaceInfo(outputSurfaceInfo); + } + + @Override + public void releaseOutputFrame(long releaseTimeNs) { + checkState( + !releaseFramesAutomatically, + "Calling this method is not allowed when releaseFramesAutomatically is enabled"); + frameProcessingTaskExecutor.submitWithHighPriority( + () -> finalTextureProcessorWrapper.releaseOutputFrame(releaseTimeNs)); + } + + @Override + public void signalEndOfInput() { + checkState(!inputStreamEnded); + inputStreamEnded = true; + frameProcessingTaskExecutor.submit(inputExternalTextureManager::signalEndOfInput); + } + + @Override + public void release() { + try { + frameProcessingTaskExecutor.release( + /* releaseTask= */ this::releaseTextureProcessorsAndDestroyGlContext, + RELEASE_WAIT_TIME_MS); + } catch (InterruptedException unexpected) { + Thread.currentThread().interrupt(); + throw new IllegalStateException(unexpected); + } + inputExternalTextureManager.release(); + inputSurface.release(); + } + + /** + * Expands or shrinks the frame based on the {@link FrameInfo#pixelWidthHeightRatio} and returns a + * new {@link FrameInfo} instance with scaled dimensions and {@link + * FrameInfo#pixelWidthHeightRatio} of {@code 1}. + */ + private FrameInfo adjustForPixelWidthHeightRatio(FrameInfo frameInfo) { + if (frameInfo.pixelWidthHeightRatio > 1f) { + return new FrameInfo( + (int) (frameInfo.width * frameInfo.pixelWidthHeightRatio), + frameInfo.height, + /* pixelWidthHeightRatio= */ 1, + frameInfo.streamOffsetUs); + } else if (frameInfo.pixelWidthHeightRatio < 1f) { + return new FrameInfo( + frameInfo.width, + (int) (frameInfo.height / frameInfo.pixelWidthHeightRatio), + /* pixelWidthHeightRatio= */ 1, + frameInfo.streamOffsetUs); + } else { + return frameInfo; + } + } + + /** + * Releases the {@link GlTextureProcessor} instances and destroys the OpenGL context. + * + *

      This method must be called on the {@linkplain #THREAD_NAME background thread}. + */ + @WorkerThread + private void releaseTextureProcessorsAndDestroyGlContext() + throws GlUtil.GlException, FrameProcessingException { + for (int i = 0; i < allTextureProcessors.size(); i++) { + allTextureProcessors.get(i).release(); + } + GlUtil.destroyEglContext(eglDisplay, eglContext); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/GlMatrixTransformation.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/GlMatrixTransformation.java new file mode 100644 index 0000000000..4aebabb431 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/GlMatrixTransformation.java @@ -0,0 +1,62 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.effect; + +import android.content.Context; +import android.opengl.Matrix; +import android.util.Pair; +import com.google.android.exoplayer2.util.FrameProcessingException; +import com.google.common.collect.ImmutableList; + +/** + * Specifies a 4x4 transformation {@link Matrix} to apply in the vertex shader for each frame. + * + *

      The matrix is applied to points given in normalized device coordinates (-1 to 1 on x, y, and z + * axes). Transformed pixels that are moved outside of the normal device coordinate range are + * clipped. + * + *

      Output frame pixels outside of the transformed input frame will be black, with alpha = 0 if + * applicable. + */ +public interface GlMatrixTransformation extends GlEffect { + /** + * Configures the input and output dimensions. + * + *

      Must be called before {@link #getGlMatrixArray(long)}. + * + * @param inputWidth The input frame width, in pixels. + * @param inputHeight The input frame height, in pixels. + * @return The output frame width and height, in pixels. + */ + default Pair configure(int inputWidth, int inputHeight) { + return Pair.create(inputWidth, inputHeight); + } + + /** + * Returns the 4x4 transformation {@link Matrix} to apply to the frame with the given timestamp. + */ + float[] getGlMatrixArray(long presentationTimeUs); + + @Override + default SingleFrameGlTextureProcessor toGlTextureProcessor(Context context, boolean useHdr) + throws FrameProcessingException { + return MatrixTextureProcessor.create( + context, + /* matrixTransformations= */ ImmutableList.of(this), + /* rgbMatrices= */ ImmutableList.of(), + useHdr); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/GlTextureProcessor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/GlTextureProcessor.java new file mode 100644 index 0000000000..5302be764d --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/GlTextureProcessor.java @@ -0,0 +1,160 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.effect; + +import com.google.android.exoplayer2.util.FrameProcessingException; + +/** + * Processes frames from one OpenGL 2D texture to another. + * + *

      The {@code GlTextureProcessor} consumes input frames it accepts via {@link + * #queueInputFrame(TextureInfo, long)} and surrenders each texture back to the caller via its + * {@linkplain InputListener#onInputFrameProcessed(TextureInfo) listener} once the texture's + * contents have been processed. + * + *

      The {@code GlTextureProcessor} produces output frames asynchronously and notifies its owner + * when they are available via its {@linkplain OutputListener#onOutputFrameAvailable(TextureInfo, + * long) listener}. The {@code GlTextureProcessor} instance's owner must surrender the texture back + * to the {@code GlTextureProcessor} via {@link #releaseOutputFrame(TextureInfo)} when it has + * finished processing it. + * + *

      {@code GlTextureProcessor} implementations can choose to produce output frames before + * receiving input frames or process several input frames before producing an output frame. However, + * {@code GlTextureProcessor} implementations cannot assume that they will receive more than one + * input frame at a time, so they must process each input frame they accept even if they cannot + * produce output yet. + * + *

      The methods in this interface must be called on the thread that owns the parent OpenGL + * context. If the implementation uses another OpenGL context, e.g., on another thread, it must + * configure it to share data with the context of thread the interface methods are called on. + */ +public interface GlTextureProcessor { + + /** + * Listener for input-related frame processing events. + * + *

      This listener can be called from any thread. + */ + interface InputListener { + /** + * Called when the {@link GlTextureProcessor} is ready to accept another input frame. + * + *

      For each time this method is called, {@link #queueInputFrame(TextureInfo, long)} can be + * called once. + */ + default void onReadyToAcceptInputFrame() {} + + /** + * Called when the {@link GlTextureProcessor} has processed an input frame. + * + *

      The implementation shall not assume the {@link GlTextureProcessor} is {@linkplain + * #onReadyToAcceptInputFrame ready to accept another input frame} when this method is called. + * + * @param inputTexture The {@link TextureInfo} that was used to {@linkplain + * #queueInputFrame(TextureInfo, long) queue} the input frame. + */ + default void onInputFrameProcessed(TextureInfo inputTexture) {} + } + + /** + * Listener for output-related frame processing events. + * + *

      This listener can be called from any thread. + */ + interface OutputListener { + /** + * Called when the {@link GlTextureProcessor} has produced an output frame. + * + *

      After the listener's owner has processed the output frame, it must call {@link + * #releaseOutputFrame(TextureInfo)}. The output frame should be released as soon as possible, + * as there is no guarantee that the {@link GlTextureProcessor} will produce further output + * frames before this output frame is released. + * + * @param outputTexture A {@link TextureInfo} describing the texture containing the output + * frame. + * @param presentationTimeUs The presentation timestamp of the output frame, in microseconds. + */ + default void onOutputFrameAvailable(TextureInfo outputTexture, long presentationTimeUs) {} + + /** + * Called when the {@link GlTextureProcessor} will not produce further output frames belonging + * to the current output stream. + */ + default void onCurrentOutputStreamEnded() {} + } + + /** + * Listener for frame processing errors. + * + *

      This listener can be called from any thread. + */ + interface ErrorListener { + /** + * Called when an exception occurs during asynchronous frame processing. + * + *

      If an error occurred, consuming and producing further frames will not work as expected and + * the {@link GlTextureProcessor} should be released. + */ + void onFrameProcessingError(FrameProcessingException e); + } + + /** Sets the {@link InputListener}. */ + void setInputListener(InputListener inputListener); + + /** Sets the {@link OutputListener}. */ + void setOutputListener(OutputListener outputListener); + + /** Sets the {@link ErrorListener}. */ + void setErrorListener(ErrorListener errorListener); + + /** + * Processes an input frame if possible. + * + *

      The {@code GlTextureProcessor} owns the accepted frame until it calls {@link + * InputListener#onInputFrameProcessed(TextureInfo)}. The caller should not overwrite or release + * the texture before the {@code GlTextureProcessor} has finished processing it. + * + *

      This method must only be called when the {@code GlTextureProcessor} can {@linkplain + * InputListener#onReadyToAcceptInputFrame() accept an input frame}. + * + * @param inputTexture A {@link TextureInfo} describing the texture containing the input frame. + * @param presentationTimeUs The presentation timestamp of the input frame, in microseconds. + */ + void queueInputFrame(TextureInfo inputTexture, long presentationTimeUs); + + /** + * Notifies the texture processor that the frame on the given output texture is no longer used and + * can be overwritten. + */ + void releaseOutputFrame(TextureInfo outputTexture); + + /** + * Notifies the {@code GlTextureProcessor} that no further input frames belonging to the current + * input stream will be queued. + * + *

      Input frames that are queued after this method is called belong to a different input stream, + * so presentation timestamps may reset to start from a smaller presentation timestamp than the + * last frame of the previous input stream. + */ + void signalEndOfCurrentInputStream(); + + /** + * Releases all resources. + * + * @throws FrameProcessingException If an error occurs while releasing resources. + */ + void release() throws FrameProcessingException; +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/HslAdjustment.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/HslAdjustment.java new file mode 100644 index 0000000000..9b99830b9a --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/HslAdjustment.java @@ -0,0 +1,118 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.android.exoplayer2.effect; + +import static com.google.android.exoplayer2.util.Assertions.checkArgument; + +import android.content.Context; +import com.google.android.exoplayer2.util.FrameProcessingException; +import com.google.errorprone.annotations.CanIgnoreReturnValue; + +/** Adjusts the HSL (Hue, Saturation, and Lightness) of a frame. */ +public class HslAdjustment implements GlEffect { + + /** A builder for {@code HslAdjustment} instances. */ + public static final class Builder { + private float hueAdjustment; + private float saturationAdjustment; + private float lightnessAdjustment; + + /** Creates a new instance with the default values. */ + public Builder() {} + + /** + * Rotates the hue of the frame by {@code hueAdjustmentDegrees}. + * + *

      The Hue of the frame is defined in the interval of [0, 360] degrees. The actual degrees of + * hue adjustment applied is {@code hueAdjustmentDegrees % 360}. + * + * @param hueAdjustmentDegrees The hue adjustment in rotation degrees. The default value is + * {@code 0}, which means no change is applied. + */ + @CanIgnoreReturnValue + public Builder adjustHue(float hueAdjustmentDegrees) { + hueAdjustment = hueAdjustmentDegrees % 360; + return this; + } + + /** + * Adjusts the saturation of the frame by {@code saturationAdjustment}. + * + *

      Saturation is defined in the interval of [0, 100] where a saturation of {@code 0} will + * generate a grayscale frame and a saturation of {@code 100} has a maximum separation between + * the colors. + * + * @param saturationAdjustment The difference of how much the saturation will be adjusted in + * either direction. Needs to be in the interval of [-100, 100] and the default value is + * {@code 0}, which means no change is applied. + */ + @CanIgnoreReturnValue + public Builder adjustSaturation(float saturationAdjustment) { + checkArgument( + -100 <= saturationAdjustment && saturationAdjustment <= 100, + "Can adjust the saturation by only 100 in either direction, but provided " + + saturationAdjustment); + this.saturationAdjustment = saturationAdjustment; + return this; + } + + /** + * Adjusts the lightness of the frame by {@code lightnessAdjustment}. + * + *

      Lightness is defined in the interval of [0, 100] where a lightness of {@code 0} is a black + * frame and a lightness of {@code 100} is a white frame. + * + * @param lightnessAdjustment The difference by how much the lightness will be adjusted in + * either direction. Needs to be in the interval of [-100, 100] and the default value is + * {@code 0}, which means no change is applied. + */ + @CanIgnoreReturnValue + public Builder adjustLightness(float lightnessAdjustment) { + checkArgument( + -100 <= lightnessAdjustment && lightnessAdjustment <= 100, + "Can adjust the lightness by only 100 in either direction, but provided " + + lightnessAdjustment); + this.lightnessAdjustment = lightnessAdjustment; + return this; + } + + /** Creates a new {@link HslAdjustment} instance. */ + public HslAdjustment build() { + return new HslAdjustment(hueAdjustment, saturationAdjustment, lightnessAdjustment); + } + } + + /** Indicates the hue adjustment in degrees. */ + public final float hueAdjustmentDegrees; + /** Indicates the saturation adjustment. */ + public final float saturationAdjustment; + /** Indicates the lightness adjustment. */ + public final float lightnessAdjustment; + + private HslAdjustment( + float hueAdjustmentDegrees, float saturationAdjustment, float lightnessAdjustment) { + this.hueAdjustmentDegrees = hueAdjustmentDegrees; + this.saturationAdjustment = saturationAdjustment; + this.lightnessAdjustment = lightnessAdjustment; + } + + @Override + public SingleFrameGlTextureProcessor toGlTextureProcessor(Context context, boolean useHdr) + throws FrameProcessingException { + return new HslProcessor(context, /* hslAdjustment= */ this, useHdr); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/HslProcessor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/HslProcessor.java new file mode 100644 index 0000000000..9132bc0fdc --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/HslProcessor.java @@ -0,0 +1,93 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.android.exoplayer2.effect; + +import static com.google.android.exoplayer2.util.Assertions.checkArgument; + +import android.content.Context; +import android.opengl.GLES20; +import android.util.Pair; +import com.google.android.exoplayer2.util.FrameProcessingException; +import com.google.android.exoplayer2.util.GlProgram; +import com.google.android.exoplayer2.util.GlUtil; +import java.io.IOException; + +/** Applies the {@link HslAdjustment} to each frame in the fragment shader. */ +/* package */ final class HslProcessor extends SingleFrameGlTextureProcessor { + private static final String VERTEX_SHADER_PATH = "shaders/vertex_shader_transformation_es2.glsl"; + private static final String FRAGMENT_SHADER_PATH = "shaders/fragment_shader_hsl_es2.glsl"; + + private final GlProgram glProgram; + + /** + * Creates a new instance. + * + * @param context The {@link Context}. + * @param hslAdjustment The {@link HslAdjustment} to apply to each frame in order. + * @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be + * in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709. + * @throws FrameProcessingException If a problem occurs while reading shader files. + */ + public HslProcessor(Context context, HslAdjustment hslAdjustment, boolean useHdr) + throws FrameProcessingException { + super(useHdr); + // TODO(b/241241680): Check if HDR <-> HSL works the same or not. + checkArgument(!useHdr, "HDR is not yet supported."); + + try { + glProgram = new GlProgram(context, VERTEX_SHADER_PATH, FRAGMENT_SHADER_PATH); + } catch (IOException | GlUtil.GlException e) { + throw new FrameProcessingException(e); + } + + // Draw the frame on the entire normalized device coordinate space, from -1 to 1, for x and y. + glProgram.setBufferAttribute( + "aFramePosition", + GlUtil.getNormalizedCoordinateBounds(), + GlUtil.HOMOGENEOUS_COORDINATE_VECTOR_SIZE); + + float[] identityMatrix = GlUtil.create4x4IdentityMatrix(); + glProgram.setFloatsUniform("uTransformationMatrix", identityMatrix); + glProgram.setFloatsUniform("uTexTransformationMatrix", identityMatrix); + + // OpenGL operates in a [0, 1] unit range and thus we transform the HSL intervals into + // the unit interval as well. The hue is defined in the [0, 360] interval and saturation + // and lightness in the [0, 100] interval. + glProgram.setFloatUniform("uHueAdjustmentDegrees", hslAdjustment.hueAdjustmentDegrees / 360); + glProgram.setFloatUniform("uSaturationAdjustment", hslAdjustment.saturationAdjustment / 100); + glProgram.setFloatUniform("uLightnessAdjustment", hslAdjustment.lightnessAdjustment / 100); + } + + @Override + public Pair configure(int inputWidth, int inputHeight) { + return Pair.create(inputWidth, inputHeight); + } + + @Override + public void drawFrame(int inputTexId, long presentationTimeUs) throws FrameProcessingException { + try { + glProgram.use(); + glProgram.setSamplerTexIdUniform("uTexSampler", inputTexId, /* texUnitIndex= */ 0); + glProgram.bindAttributesAndUniforms(); + + // The four-vertex triangle strip forms a quad. + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /* first= */ 0, /* count= */ 4); + } catch (GlUtil.GlException e) { + throw new FrameProcessingException(e, presentationTimeUs); + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/MatrixTextureProcessor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/MatrixTextureProcessor.java new file mode 100644 index 0000000000..673767e63c --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/MatrixTextureProcessor.java @@ -0,0 +1,528 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.effect; + +import static com.google.android.exoplayer2.util.Assertions.checkArgument; +import static com.google.android.exoplayer2.util.Assertions.checkState; + +import android.content.Context; +import android.opengl.GLES20; +import android.opengl.Matrix; +import android.util.Pair; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.util.FrameProcessingException; +import com.google.android.exoplayer2.util.GlProgram; +import com.google.android.exoplayer2.util.GlUtil; +import com.google.android.exoplayer2.video.ColorInfo; +import com.google.common.collect.ImmutableList; +import java.io.IOException; +import java.util.Arrays; +import java.util.List; + +/** + * Applies a sequence of {@link MatrixTransformation MatrixTransformations} in the vertex shader and + * a sequence of {@link RgbMatrix RgbMatrices} in the fragment shader. Copies input pixels into an + * output frame based on their locations after applying the sequence of transformation matrices. + * + *

      {@link MatrixTransformation} operations are done on normalized device coordinates (-1 to 1 on + * x, y, and z axes). Transformed vertices that are moved outside of this range after any of the + * transformation matrices are clipped to the NDC range. + * + *

      After applying all {@link RgbMatrix} instances, color values are clamped to the limits of the + * color space (e.g. BT.709 for SDR). Intermediate results are not clamped. + * + *

      The background color of the output frame will be (r=0, g=0, b=0, a=0). + * + *

      Can copy frames from an external texture and apply color transformations for HDR if needed. + */ +@SuppressWarnings("FunctionalInterfaceClash") // b/228192298 +/* package */ final class MatrixTextureProcessor extends SingleFrameGlTextureProcessor + implements ExternalTextureProcessor { + + private static final String VERTEX_SHADER_TRANSFORMATION_PATH = + "shaders/vertex_shader_transformation_es2.glsl"; + private static final String VERTEX_SHADER_TRANSFORMATION_ES3_PATH = + "shaders/vertex_shader_transformation_es3.glsl"; + private static final String FRAGMENT_SHADER_TRANSFORMATION_PATH = + "shaders/fragment_shader_transformation_es2.glsl"; + private static final String FRAGMENT_SHADER_OETF_ES3_PATH = + "shaders/fragment_shader_oetf_es3.glsl"; + private static final String FRAGMENT_SHADER_TRANSFORMATION_SDR_OETF_ES2_PATH = + "shaders/fragment_shader_transformation_sdr_oetf_es2.glsl"; + private static final String FRAGMENT_SHADER_TRANSFORMATION_EXTERNAL_YUV_ES3_PATH = + "shaders/fragment_shader_transformation_external_yuv_es3.glsl"; + private static final String FRAGMENT_SHADER_TRANSFORMATION_SDR_EXTERNAL_PATH = + "shaders/fragment_shader_transformation_sdr_external_es2.glsl"; + private static final ImmutableList NDC_SQUARE = + ImmutableList.of( + new float[] {-1, -1, 0, 1}, + new float[] {-1, 1, 0, 1}, + new float[] {1, 1, 0, 1}, + new float[] {1, -1, 0, 1}); + + // YUV to RGB color transform coefficients can be calculated from the BT.2020 specification, by + // inverting the RGB to YUV equations, and scaling for limited range. + // https://www.itu.int/dms_pubrec/itu-r/rec/bt/R-REC-BT.2020-2-201510-I!!PDF-E.pdf + private static final float[] BT2020_FULL_RANGE_YUV_TO_RGB_COLOR_TRANSFORM_MATRIX = { + 1.0000f, 1.0000f, 1.0000f, + 0.0000f, -0.1646f, 1.8814f, + 1.4746f, -0.5714f, 0.0000f + }; + private static final float[] BT2020_LIMITED_RANGE_YUV_TO_RGB_COLOR_TRANSFORM_MATRIX = { + 1.1689f, 1.1689f, 1.1689f, + 0.0000f, -0.1881f, 2.1502f, + 1.6853f, -0.6530f, 0.0000f, + }; + + /** The {@link MatrixTransformation MatrixTransformations} to apply. */ + private final ImmutableList matrixTransformations; + /** The {@link RgbMatrix RgbMatrices} to apply. */ + private final ImmutableList rgbMatrices; + /** Whether the frame is in HDR or not. */ + private final boolean useHdr; + /** + * The transformation matrices provided by the {@link MatrixTransformation MatrixTransformations} + * for the most recent frame. + */ + private final float[][] transformationMatrixCache; + /** The RGB matrices provided by the {@link RgbMatrix RgbMatrices} for the most recent frame. */ + private final float[][] rgbMatrixCache; + /** + * The product of the {@link #transformationMatrixCache} for the most recent frame, to be applied + * in the vertex shader. + */ + private final float[] compositeTransformationMatrixArray; + /** + * The product of the {@link #rgbMatrixCache} for the most recent frame, to be applied in the + * fragment shader. + */ + private final float[] compositeRgbMatrixArray; + /** Matrix for storing an intermediate calculation result. */ + private final float[] tempResultMatrix; + + /** + * A polygon in the input space chosen such that no additional clipping is needed to keep vertices + * inside the NDC range when applying each of the {@link #matrixTransformations}. + * + *

      This means that this polygon and {@link #compositeTransformationMatrixArray} can be used + * instead of applying each of the {@link #matrixTransformations} to {@link #NDC_SQUARE} in + * separate shaders. + */ + private ImmutableList visiblePolygon; + + private final GlProgram glProgram; + + /** + * Creates a new instance. + * + *

      Input and output are both intermediate optical/linear colors, and RGB BT.2020 if {@code + * useHdr} is {@code true} and RGB BT.709 if not. + * + * @param context The {@link Context}. + * @param matrixTransformations The {@link GlMatrixTransformation GlMatrixTransformations} to + * apply to each frame in order. Can be empty to apply no vertex transformations. + * @param rgbMatrices The {@link RgbMatrix RgbMatrices} to apply to each frame in order. Can be + * empty to apply no color transformations. + * @param useHdr Whether input and output colors are HDR. + * @throws FrameProcessingException If a problem occurs while reading shader files or an OpenGL + * operation fails or is unsupported. + */ + public static MatrixTextureProcessor create( + Context context, + List matrixTransformations, + List rgbMatrices, + boolean useHdr) + throws FrameProcessingException { + GlProgram glProgram = + createGlProgram( + context, VERTEX_SHADER_TRANSFORMATION_PATH, FRAGMENT_SHADER_TRANSFORMATION_PATH); + + // No transfer functions needed, because input and output are both optical colors. + // TODO(b/241902517): Add transfer functions since existing color filters may change the colors. + return new MatrixTextureProcessor( + glProgram, + ImmutableList.copyOf(matrixTransformations), + ImmutableList.copyOf(rgbMatrices), + useHdr); + } + + /** + * Creates a new instance. + * + *

      Input will be sampled from an external texture. The caller should use {@link + * #setTextureTransformMatrix(float[])} to provide the transformation matrix associated with the + * external texture. + * + *

      Applies the {@code electricalColorInfo} EOTF to convert from electrical color input, to + * intermediate optical {@link GlTextureProcessor} color output, before {@code + * matrixTransformations} and {@code rgbMatrices} are applied. + * + *

      Intermediate optical/linear colors are RGB BT.2020 if {@code electricalColorInfo} is + * {@linkplain ColorInfo#isTransferHdr(ColorInfo) HDR}, and RGB BT.709 if not. + * + * @param context The {@link Context}. + * @param matrixTransformations The {@link GlMatrixTransformation GlMatrixTransformations} to + * apply to each frame in order. Can be empty to apply no vertex transformations. + * @param rgbMatrices The {@link RgbMatrix RgbMatrices} to apply to each frame in order. Can be + * empty to apply no color transformations. + * @param electricalColorInfo The electrical {@link ColorInfo} describing input colors. + * @throws FrameProcessingException If a problem occurs while reading shader files or an OpenGL + * operation fails or is unsupported. + */ + public static MatrixTextureProcessor createWithExternalSamplerApplyingEotf( + Context context, + List matrixTransformations, + List rgbMatrices, + ColorInfo electricalColorInfo) + throws FrameProcessingException { + boolean useHdr = ColorInfo.isTransferHdr(electricalColorInfo); + String vertexShaderFilePath = + useHdr ? VERTEX_SHADER_TRANSFORMATION_ES3_PATH : VERTEX_SHADER_TRANSFORMATION_PATH; + String fragmentShaderFilePath = + useHdr + ? FRAGMENT_SHADER_TRANSFORMATION_EXTERNAL_YUV_ES3_PATH + : FRAGMENT_SHADER_TRANSFORMATION_SDR_EXTERNAL_PATH; + GlProgram glProgram = createGlProgram(context, vertexShaderFilePath, fragmentShaderFilePath); + + if (useHdr) { + // In HDR editing mode the decoder output is sampled in YUV. + if (!GlUtil.isYuvTargetExtensionSupported()) { + throw new FrameProcessingException( + "The EXT_YUV_target extension is required for HDR editing input."); + } + glProgram.setFloatsUniform( + "uYuvToRgbColorTransform", + electricalColorInfo.colorRange == C.COLOR_RANGE_FULL + ? BT2020_FULL_RANGE_YUV_TO_RGB_COLOR_TRANSFORM_MATRIX + : BT2020_LIMITED_RANGE_YUV_TO_RGB_COLOR_TRANSFORM_MATRIX); + + @C.ColorTransfer int colorTransfer = electricalColorInfo.colorTransfer; + checkArgument( + colorTransfer == C.COLOR_TRANSFER_HLG || colorTransfer == C.COLOR_TRANSFER_ST2084); + glProgram.setIntUniform("uEotfColorTransfer", colorTransfer); + } else { + glProgram.setIntUniform("uApplyOetf", 0); + } + + return new MatrixTextureProcessor( + glProgram, + ImmutableList.copyOf(matrixTransformations), + ImmutableList.copyOf(rgbMatrices), + useHdr); + } + + /** + * Creates a new instance. + * + *

      Applies the {@code electricalColorInfo} OETF to convert from intermediate optical {@link + * GlTextureProcessor} color input, to electrical color output, after {@code + * matrixTransformations} and {@code rgbMatrices} are applied. + * + *

      Intermediate optical/linear colors are RGB BT.2020 if {@code electricalColorInfo} is + * {@linkplain ColorInfo#isTransferHdr(ColorInfo) HDR}, and RGB BT.709 if not. + * + * @param context The {@link Context}. + * @param matrixTransformations The {@link GlMatrixTransformation GlMatrixTransformations} to + * apply to each frame in order. Can be empty to apply no vertex transformations. + * @param rgbMatrices The {@link RgbMatrix RgbMatrices} to apply to each frame in order. Can be + * empty to apply no color transformations. + * @param electricalColorInfo The electrical {@link ColorInfo} describing output colors. + * @throws FrameProcessingException If a problem occurs while reading shader files or an OpenGL + * operation fails or is unsupported. + */ + public static MatrixTextureProcessor createApplyingOetf( + Context context, + List matrixTransformations, + List rgbMatrices, + ColorInfo electricalColorInfo) + throws FrameProcessingException { + boolean useHdr = ColorInfo.isTransferHdr(electricalColorInfo); + String vertexShaderFilePath = + useHdr ? VERTEX_SHADER_TRANSFORMATION_ES3_PATH : VERTEX_SHADER_TRANSFORMATION_PATH; + String fragmentShaderFilePath = + useHdr ? FRAGMENT_SHADER_OETF_ES3_PATH : FRAGMENT_SHADER_TRANSFORMATION_SDR_OETF_ES2_PATH; + GlProgram glProgram = createGlProgram(context, vertexShaderFilePath, fragmentShaderFilePath); + + if (useHdr) { + @C.ColorTransfer int colorTransfer = electricalColorInfo.colorTransfer; + checkArgument( + colorTransfer == C.COLOR_TRANSFER_HLG || colorTransfer == C.COLOR_TRANSFER_ST2084); + glProgram.setIntUniform("uOetfColorTransfer", colorTransfer); + } + + return new MatrixTextureProcessor( + glProgram, + ImmutableList.copyOf(matrixTransformations), + ImmutableList.copyOf(rgbMatrices), + useHdr); + } + + /** + * Creates a new instance. + * + *

      Input will be sampled from an external texture. The caller should use {@link + * #setTextureTransformMatrix(float[])} to provide the transformation matrix associated with the + * external texture. + * + *

      Applies the EOTF, {@code matrixTransformations}, {@code rgbMatrices}, then the OETF, to + * convert from and to input and output electrical colors. + * + * @param context The {@link Context}. + * @param matrixTransformations The {@link GlMatrixTransformation GlMatrixTransformations} to + * apply to each frame in order. Can be empty to apply no vertex transformations. + * @param rgbMatrices The {@link RgbMatrix RgbMatrices} to apply to each frame in order. Can be + * empty to apply no color transformations. + * @param electricalColorInfo The electrical {@link ColorInfo} describing input and output colors. + * @throws FrameProcessingException If a problem occurs while reading shader files or an OpenGL + * operation fails or is unsupported. + */ + public static MatrixTextureProcessor createWithExternalSamplerApplyingEotfThenOetf( + Context context, + List matrixTransformations, + List rgbMatrices, + ColorInfo electricalColorInfo) + throws FrameProcessingException { + boolean useHdr = ColorInfo.isTransferHdr(electricalColorInfo); + String vertexShaderFilePath = + useHdr ? VERTEX_SHADER_TRANSFORMATION_ES3_PATH : VERTEX_SHADER_TRANSFORMATION_PATH; + String fragmentShaderFilePath = + useHdr + ? FRAGMENT_SHADER_TRANSFORMATION_EXTERNAL_YUV_ES3_PATH + : FRAGMENT_SHADER_TRANSFORMATION_SDR_EXTERNAL_PATH; + GlProgram glProgram = createGlProgram(context, vertexShaderFilePath, fragmentShaderFilePath); + + if (useHdr) { + // In HDR editing mode the decoder output is sampled in YUV. + if (!GlUtil.isYuvTargetExtensionSupported()) { + throw new FrameProcessingException( + "The EXT_YUV_target extension is required for HDR editing input."); + } + glProgram.setFloatsUniform( + "uYuvToRgbColorTransform", + electricalColorInfo.colorRange == C.COLOR_RANGE_FULL + ? BT2020_FULL_RANGE_YUV_TO_RGB_COLOR_TRANSFORM_MATRIX + : BT2020_LIMITED_RANGE_YUV_TO_RGB_COLOR_TRANSFORM_MATRIX); + + // No transfer functions needed, because the EOTF and OETF cancel out. + glProgram.setIntUniform("uEotfColorTransfer", Format.NO_VALUE); + } else { + glProgram.setIntUniform("uApplyOetf", 1); + } + + return new MatrixTextureProcessor( + glProgram, + ImmutableList.copyOf(matrixTransformations), + ImmutableList.copyOf(rgbMatrices), + useHdr); + } + + /** + * Creates a new instance. + * + * @param glProgram The {@link GlProgram}. + * @param matrixTransformations The {@link GlMatrixTransformation GlMatrixTransformations} to + * apply to each frame in order. Can be empty to apply no vertex transformations. + * @param rgbMatrices The {@link RgbMatrix RgbMatrices} to apply to each frame in order. Can be + * empty to apply no color transformations. + * @param useHdr Whether to process the input as an HDR signal. Using HDR requires the {@code + * EXT_YUV_target} OpenGL extension. + */ + private MatrixTextureProcessor( + GlProgram glProgram, + ImmutableList matrixTransformations, + ImmutableList rgbMatrices, + boolean useHdr) { + super(useHdr); + this.glProgram = glProgram; + this.matrixTransformations = matrixTransformations; + this.rgbMatrices = rgbMatrices; + this.useHdr = useHdr; + + transformationMatrixCache = new float[matrixTransformations.size()][16]; + rgbMatrixCache = new float[rgbMatrices.size()][16]; + compositeTransformationMatrixArray = GlUtil.create4x4IdentityMatrix(); + compositeRgbMatrixArray = GlUtil.create4x4IdentityMatrix(); + tempResultMatrix = new float[16]; + visiblePolygon = NDC_SQUARE; + } + + private static GlProgram createGlProgram( + Context context, String vertexShaderFilePath, String fragmentShaderFilePath) + throws FrameProcessingException { + + GlProgram glProgram; + try { + glProgram = new GlProgram(context, vertexShaderFilePath, fragmentShaderFilePath); + } catch (IOException | GlUtil.GlException e) { + throw new FrameProcessingException(e); + } + + float[] identityMatrix = GlUtil.create4x4IdentityMatrix(); + glProgram.setFloatsUniform("uTexTransformationMatrix", identityMatrix); + return glProgram; + } + + @Override + public void setTextureTransformMatrix(float[] textureTransformMatrix) { + glProgram.setFloatsUniform("uTexTransformationMatrix", textureTransformMatrix); + } + + @Override + public Pair configure(int inputWidth, int inputHeight) { + return MatrixUtils.configureAndGetOutputSize(inputWidth, inputHeight, matrixTransformations); + } + + @Override + public void drawFrame(int inputTexId, long presentationTimeUs) throws FrameProcessingException { + updateCompositeRgbaMatrixArray(presentationTimeUs); + updateCompositeTransformationMatrixAndVisiblePolygon(presentationTimeUs); + if (visiblePolygon.size() < 3) { + return; // Need at least three visible vertices for a triangle. + } + + try { + glProgram.use(); + glProgram.setSamplerTexIdUniform("uTexSampler", inputTexId, /* texUnitIndex= */ 0); + glProgram.setFloatsUniform("uTransformationMatrix", compositeTransformationMatrixArray); + glProgram.setFloatsUniform("uRgbMatrix", compositeRgbMatrixArray); + glProgram.setBufferAttribute( + "aFramePosition", + GlUtil.createVertexBuffer(visiblePolygon), + GlUtil.HOMOGENEOUS_COORDINATE_VECTOR_SIZE); + glProgram.bindAttributesAndUniforms(); + GLES20.glDrawArrays( + GLES20.GL_TRIANGLE_FAN, /* first= */ 0, /* count= */ visiblePolygon.size()); + GlUtil.checkGlError(); + } catch (GlUtil.GlException e) { + throw new FrameProcessingException(e, presentationTimeUs); + } + } + + @Override + public void release() throws FrameProcessingException { + super.release(); + try { + glProgram.delete(); + } catch (GlUtil.GlException e) { + throw new FrameProcessingException(e); + } + } + + /** + * Updates {@link #compositeTransformationMatrixArray} and {@link #visiblePolygon} based on the + * given frame timestamp. + */ + private void updateCompositeTransformationMatrixAndVisiblePolygon(long presentationTimeUs) { + float[][] matricesAtPresentationTime = new float[matrixTransformations.size()][16]; + for (int i = 0; i < matrixTransformations.size(); i++) { + matricesAtPresentationTime[i] = + matrixTransformations.get(i).getGlMatrixArray(presentationTimeUs); + } + + if (!updateMatrixCache(transformationMatrixCache, matricesAtPresentationTime)) { + return; + } + + // Compute the compositeTransformationMatrix and transform and clip the visiblePolygon for each + // MatrixTransformation's matrix. + GlUtil.setToIdentity(compositeTransformationMatrixArray); + visiblePolygon = NDC_SQUARE; + for (float[] transformationMatrix : transformationMatrixCache) { + Matrix.multiplyMM( + /* result= */ tempResultMatrix, + /* resultOffset= */ 0, + /* lhs= */ transformationMatrix, + /* lhsOffset= */ 0, + /* rhs= */ compositeTransformationMatrixArray, + /* rhsOffset= */ 0); + System.arraycopy( + /* src= */ tempResultMatrix, + /* srcPos= */ 0, + /* dest= */ compositeTransformationMatrixArray, + /* destPost= */ 0, + /* length= */ tempResultMatrix.length); + visiblePolygon = + MatrixUtils.clipConvexPolygonToNdcRange( + MatrixUtils.transformPoints(transformationMatrix, visiblePolygon)); + if (visiblePolygon.size() < 3) { + // Can ignore remaining matrices as there are not enough vertices left to form a polygon. + return; + } + } + // Calculate the input frame vertices corresponding to the output frame's visible polygon. + Matrix.invertM( + tempResultMatrix, + /* mInvOffset= */ 0, + compositeTransformationMatrixArray, + /* mOffset= */ 0); + visiblePolygon = MatrixUtils.transformPoints(tempResultMatrix, visiblePolygon); + } + + /** Updates {@link #compositeRgbMatrixArray} based on the given frame timestamp. */ + private void updateCompositeRgbaMatrixArray(long presentationTimeUs) { + float[][] matricesCurrTimestamp = new float[rgbMatrices.size()][16]; + for (int i = 0; i < rgbMatrices.size(); i++) { + matricesCurrTimestamp[i] = rgbMatrices.get(i).getMatrix(presentationTimeUs, useHdr); + } + + if (!updateMatrixCache(rgbMatrixCache, matricesCurrTimestamp)) { + return; + } + + for (int i = 0; i < rgbMatrices.size(); i++) { + Matrix.multiplyMM( + /* result= */ tempResultMatrix, + /* resultOffset= */ 0, + /* lhs= */ rgbMatrices.get(i).getMatrix(presentationTimeUs, useHdr), + /* lhsOffset= */ 0, + /* rhs= */ compositeRgbMatrixArray, + /* rhsOffset= */ 0); + System.arraycopy( + /* src= */ tempResultMatrix, + /* srcPos= */ 0, + /* dest= */ compositeRgbMatrixArray, + /* destPost= */ 0, + /* length= */ tempResultMatrix.length); + } + } + + /** + * Updates the {@code cachedMatrices} with the {@code newMatrices}. Returns whether a matrix has + * changed inside the cache. + * + * @param cachedMatrices The existing cached matrices. Gets updated if it is out of date. + * @param newMatrices The new matrices to compare the cached matrices against. + */ + private static boolean updateMatrixCache(float[][] cachedMatrices, float[][] newMatrices) { + boolean matrixChanged = false; + for (int i = 0; i < cachedMatrices.length; i++) { + float[] cachedMatrix = cachedMatrices[i]; + float[] newMatrix = newMatrices[i]; + if (!Arrays.equals(cachedMatrix, newMatrix)) { + checkState(newMatrix.length == 16, "A 4x4 transformation matrix must have 16 elements"); + System.arraycopy( + /* src= */ newMatrix, + /* srcPos= */ 0, + /* dest= */ cachedMatrix, + /* destPost= */ 0, + /* length= */ newMatrix.length); + matrixChanged = true; + } + } + return matrixChanged; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/MatrixTransformation.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/MatrixTransformation.java new file mode 100644 index 0000000000..de55160a5c --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/MatrixTransformation.java @@ -0,0 +1,40 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.effect; + +import android.graphics.Matrix; + +/** + * Specifies a 3x3 transformation {@link Matrix} to apply in the vertex shader for each frame. + * + *

      The matrix is applied to points given in normalized device coordinates (-1 to 1 on x and y + * axes). Transformed pixels that are moved outside of the normal device coordinate range are + * clipped. + * + *

      Output frame pixels outside of the transformed input frame will be black, with alpha = 0 if + * applicable. + */ +public interface MatrixTransformation extends GlMatrixTransformation { + /** + * Returns the 3x3 transformation {@link Matrix} to apply to the frame with the given timestamp. + */ + Matrix getMatrix(long presentationTimeUs); + + @Override + default float[] getGlMatrixArray(long presentationTimeUs) { + return MatrixUtils.getGlMatrixArray(getMatrix(presentationTimeUs)); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/MatrixUtils.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/MatrixUtils.java new file mode 100644 index 0000000000..1ac0f896e5 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/MatrixUtils.java @@ -0,0 +1,242 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.effect; + +import static com.google.android.exoplayer2.util.Assertions.checkArgument; + +import android.opengl.Matrix; +import android.util.Pair; +import com.google.common.collect.ImmutableList; +import java.util.Arrays; + +/** Utility functions for working with matrices, vertices, and polygons. */ +/* package */ final class MatrixUtils { + /** + * Contains the normal vectors of the clipping planes in homogeneous coordinates which + * conveniently also double as origin vectors and parameters of the normal form of the planes ax + + * by + cz = d. + */ + private static final float[][] NDC_CUBE = + new float[][] { + new float[] {1, 0, 0, 1}, + new float[] {-1, 0, 0, 1}, + new float[] {0, 1, 0, 1}, + new float[] {0, -1, 0, 1}, + new float[] {0, 0, 1, 1}, + new float[] {0, 0, -1, 1} + }; + + /** + * Returns a 4x4, column-major {@link Matrix} float array, from an input {@link + * android.graphics.Matrix}. + * + *

      This is useful for converting to the 4x4 column-major format commonly used in OpenGL. + */ + public static float[] getGlMatrixArray(android.graphics.Matrix matrix) { + float[] matrix3x3Array = new float[9]; + matrix.getValues(matrix3x3Array); + float[] matrix4x4Array = getMatrix4x4Array(matrix3x3Array); + + // Transpose from row-major to column-major representations. + float[] transposedMatrix4x4Array = new float[16]; + Matrix.transposeM( + transposedMatrix4x4Array, /* mTransOffset= */ 0, matrix4x4Array, /* mOffset= */ 0); + + return transposedMatrix4x4Array; + } + + /** + * Returns a 4x4 matrix array containing the 3x3 matrix array's contents. + * + *

      The 3x3 matrix array is expected to be in 2 dimensions, and the 4x4 matrix array is expected + * to be in 3 dimensions. The output will have the third row/column's values be an identity + * matrix's values, so that vertex transformations using this matrix will not affect the z axis. + *
      + * Input format: [a, b, c, d, e, f, g, h, i]
      + * Output format: [a, b, 0, c, d, e, 0, f, 0, 0, 1, 0, g, h, 0, i] + */ + private static float[] getMatrix4x4Array(float[] matrix3x3Array) { + float[] matrix4x4Array = new float[16]; + matrix4x4Array[10] = 1; + for (int inputRow = 0; inputRow < 3; inputRow++) { + for (int inputColumn = 0; inputColumn < 3; inputColumn++) { + int outputRow = (inputRow == 2) ? 3 : inputRow; + int outputColumn = (inputColumn == 2) ? 3 : inputColumn; + matrix4x4Array[outputRow * 4 + outputColumn] = matrix3x3Array[inputRow * 3 + inputColumn]; + } + } + return matrix4x4Array; + } + + /** + * Clips a convex polygon to normalized device coordinates (-1 to 1 on x, y, and z axes). + * + *

      The input and output vertices are given in homogeneous coordinates (x,y,z,1) where the last + * element must always be 1. To convert a general vector in homogeneous coordinates (xw,yw,zw,w) + * to this form, simply divide all elements by w. + * + * @param polygonVertices The vertices in counter-clockwise order as 4 element vectors of + * homogeneous coordinates. + * @return The vertices of the clipped polygon, in counter-clockwise order, or an empty list if + * the polygon doesn't intersect with the NDC range. + */ + public static ImmutableList clipConvexPolygonToNdcRange( + ImmutableList polygonVertices) { + checkArgument(polygonVertices.size() >= 3, "A polygon must have at least 3 vertices."); + + // This is a 3D generalization of the Sutherland-Hodgman algorithm + // https://en.wikipedia.org/wiki/Sutherland%E2%80%93Hodgman_algorithm + // using a convex clipping volume (the NDC cube) instead of a convex clipping polygon to clip a + // given subject polygon. + // For this algorithm, the subject polygon doesn't necessarily need to be convex. But since we + // require that it is convex, we can assume that the clipped result is a single connected + // convex polygon. + ImmutableList.Builder outputVertices = + new ImmutableList.Builder().addAll(polygonVertices); + for (float[] clippingPlane : NDC_CUBE) { + ImmutableList inputVertices = outputVertices.build(); + outputVertices = new ImmutableList.Builder<>(); + + for (int i = 0; i < inputVertices.size(); i++) { + float[] currentVertex = inputVertices.get(i); + float[] previousVertex = + inputVertices.get((inputVertices.size() + i - 1) % inputVertices.size()); + if (isInsideClippingHalfSpace(currentVertex, clippingPlane)) { + if (!isInsideClippingHalfSpace(previousVertex, clippingPlane)) { + float[] intersectionPoint = + computeIntersectionPoint( + clippingPlane, clippingPlane, previousVertex, currentVertex); + if (!Arrays.equals(currentVertex, intersectionPoint)) { + outputVertices.add(intersectionPoint); + } + } + outputVertices.add(currentVertex); + } else if (isInsideClippingHalfSpace(previousVertex, clippingPlane)) { + float[] intersection = + computeIntersectionPoint(clippingPlane, clippingPlane, previousVertex, currentVertex); + if (!Arrays.equals(previousVertex, intersection)) { + outputVertices.add(intersection); + } + } + } + } + + return outputVertices.build(); + } + + /** + * Returns whether the given point is inside the half-space bounded by the clipping plane and + * facing away from its normal vector. + * + *

      The clipping plane has the form ax + by + cz = d. + * + * @param point A point in homogeneous coordinates (x,y,z,1). + * @param clippingPlane The parameters (a,b,c,d) of the plane's normal form. + * @return Whether the point is on the inside of the plane. + */ + private static boolean isInsideClippingHalfSpace(float[] point, float[] clippingPlane) { + checkArgument(clippingPlane.length == 4, "Expecting 4 plane parameters"); + + return clippingPlane[0] * point[0] + clippingPlane[1] * point[1] + clippingPlane[2] * point[2] + <= clippingPlane[3]; + } + + /** + * Returns the intersection point of the given line and plane. + * + *

      This method may only be called if such an intersection exists. + * + *

      The plane has the form ax + by + cz = d. + * + *

      The points are given in homogeneous coordinates (x,y,z,1). + * + * @param planePoint A point on the plane. + * @param planeParameters The parameters of the plane's normal form. + * @param linePoint1 A point on the line. + * @param linePoint2 Another point on the line. + * @return The point of intersection. + */ + private static float[] computeIntersectionPoint( + float[] planePoint, float[] planeParameters, float[] linePoint1, float[] linePoint2) { + checkArgument(planeParameters.length == 4, "Expecting 4 plane parameters"); + + // See https://en.wikipedia.org/wiki/Line%E2%80%93plane_intersection#Algebraic_form for the + // derivation of this solution formula. + float lineEquationParameter = + ((planePoint[0] - linePoint1[0]) * planeParameters[0] + + (planePoint[1] - linePoint1[1]) * planeParameters[1] + + (planePoint[2] - linePoint1[2]) * planeParameters[2]) + / ((linePoint2[0] - linePoint1[0]) * planeParameters[0] + + (linePoint2[1] - linePoint1[1]) * planeParameters[1] + + (linePoint2[2] - linePoint1[2]) * planeParameters[2]); + float x = linePoint1[0] + (linePoint2[0] - linePoint1[0]) * lineEquationParameter; + float y = linePoint1[1] + (linePoint2[1] - linePoint1[1]) * lineEquationParameter; + float z = linePoint1[2] + (linePoint2[2] - linePoint1[2]) * lineEquationParameter; + return new float[] {x, y, z, 1}; + } + + /** + * Applies a transformation matrix to each point. + * + * @param transformationMatrix The 4x4 transformation matrix. + * @param points The points as 4 element vectors of homogeneous coordinates (x,y,z,1). + * @return The transformed points as 4 element vectors of homogeneous coordinates (x,y,z,1). + */ + public static ImmutableList transformPoints( + float[] transformationMatrix, ImmutableList points) { + ImmutableList.Builder transformedPoints = new ImmutableList.Builder<>(); + for (int i = 0; i < points.size(); i++) { + float[] transformedPoint = new float[4]; + Matrix.multiplyMV( + transformedPoint, + /* resultVecOffset= */ 0, + transformationMatrix, + /* lhsMatOffset= */ 0, + points.get(i), + /* rhsVecOffset= */ 0); + // Multiplication result is in homogeneous coordinates (xw,yw,zw,w) with any w. Divide by w + // to get (x,y,z,1). + transformedPoint[0] /= transformedPoint[3]; + transformedPoint[1] /= transformedPoint[3]; + transformedPoint[2] /= transformedPoint[3]; + transformedPoint[3] = 1; + transformedPoints.add(transformedPoint); + } + return transformedPoints.build(); + } + + /** + * Returns the output frame size after applying the given list of {@link GlMatrixTransformation + * GlMatrixTransformations} to an input frame with the given size. + */ + public static Pair configureAndGetOutputSize( + int inputWidth, + int inputHeight, + ImmutableList matrixTransformations) { + checkArgument(inputWidth > 0, "inputWidth must be positive"); + checkArgument(inputHeight > 0, "inputHeight must be positive"); + + Pair outputSize = Pair.create(inputWidth, inputHeight); + for (int i = 0; i < matrixTransformations.size(); i++) { + outputSize = matrixTransformations.get(i).configure(outputSize.first, outputSize.second); + } + + return outputSize; + } + + /** Class only contains static methods. */ + private MatrixUtils() {} +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/Presentation.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/Presentation.java new file mode 100644 index 0000000000..b15ba5748c --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/Presentation.java @@ -0,0 +1,242 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.effect; + +import static com.google.android.exoplayer2.util.Assertions.checkArgument; +import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; +import static java.lang.annotation.ElementType.TYPE_USE; +import static java.lang.annotation.RetentionPolicy.SOURCE; + +import android.graphics.Matrix; +import android.util.Pair; +import androidx.annotation.IntDef; +import com.google.android.exoplayer2.C; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.Target; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; +import org.checkerframework.checker.nullness.qual.RequiresNonNull; + +/** + * Controls how a frame is presented with options to set the output resolution and choose how to map + * the input pixels onto the output frame geometry (for example, by stretching the input frame to + * match the specified output frame, or fitting the input frame using letterboxing). + * + *

      The background color of the output frame will be black, with alpha = 0 if applicable. + */ +public final class Presentation implements MatrixTransformation { + + /** + * Strategies controlling the layout of input pixels in the output frame. + * + *

      One of {@link #LAYOUT_SCALE_TO_FIT}, {@link #LAYOUT_SCALE_TO_FIT_WITH_CROP}, or {@link + * #LAYOUT_STRETCH_TO_FIT}. + * + *

      May scale either width or height, leaving the other output dimension equal to its input. + */ + @Documented + @Retention(SOURCE) + @Target(TYPE_USE) + @IntDef({LAYOUT_SCALE_TO_FIT, LAYOUT_SCALE_TO_FIT_WITH_CROP, LAYOUT_STRETCH_TO_FIT}) + public @interface Layout {} + /** + * Empty pixels added above and below the input frame (for letterboxing), or to the left and right + * of the input frame (for pillarboxing), until the desired aspect ratio is achieved. All input + * frame pixels will be within the output frame. + * + *

      When applying: + * + *

        + *
      • letterboxing, the output width will default to the input width, and the output height + * will be scaled appropriately. + *
      • pillarboxing, the output height will default to the input height, and the output width + * will be scaled appropriately. + *
      + */ + public static final int LAYOUT_SCALE_TO_FIT = 0; + /** + * Pixels cropped from the input frame, until the desired aspect ratio is achieved. Pixels may be + * cropped either from the bottom and top, or from the left and right sides, of the input frame. + * + *

      When cropping from the: + * + *

        + *
      • bottom and top, the output width will default to the input width, and the output height + * will be scaled appropriately. + *
      • left and right, the output height will default to the input height, and the output width + * will be scaled appropriately. + *
      + */ + public static final int LAYOUT_SCALE_TO_FIT_WITH_CROP = 1; + /** + * Frame stretched larger on the x or y axes to fit the desired aspect ratio. + * + *

      When stretching to a: + * + *

        + *
      • taller aspect ratio, the output width will default to the input width, and the output + * height will be scaled appropriately. + *
      • narrower aspect ratio, the output height will default to the input height, and the output + * width will be scaled appropriately. + *
      + */ + public static final int LAYOUT_STRETCH_TO_FIT = 2; + + private static final float ASPECT_RATIO_UNSET = -1f; + + private static void checkLayout(@Layout int layout) { + checkArgument( + layout == LAYOUT_SCALE_TO_FIT + || layout == LAYOUT_SCALE_TO_FIT_WITH_CROP + || layout == LAYOUT_STRETCH_TO_FIT, + "invalid layout " + layout); + } + + /** + * Creates a new {@link Presentation} instance. + * + *

      The output frame will have the given aspect ratio (width/height ratio). Width or height will + * be resized to conform to this {@code aspectRatio}, given a {@link Layout}. + * + * @param aspectRatio The aspect ratio (width/height ratio) of the output frame. Must be positive. + * @param layout The layout of the output frame. + */ + public static Presentation createForAspectRatio(float aspectRatio, @Layout int layout) { + checkArgument( + aspectRatio == C.LENGTH_UNSET || aspectRatio > 0, + "aspect ratio " + aspectRatio + " must be positive or unset"); + checkLayout(layout); + return new Presentation( + /* width= */ C.LENGTH_UNSET, /* height= */ C.LENGTH_UNSET, aspectRatio, layout); + } + + /** + * Creates a new {@link Presentation} instance. + * + *

      The output frame will have the given height. Width will scale to preserve the input aspect + * ratio. + * + * @param height The height of the output frame, in pixels. + */ + public static Presentation createForHeight(int height) { + return new Presentation( + /* width= */ C.LENGTH_UNSET, height, ASPECT_RATIO_UNSET, LAYOUT_SCALE_TO_FIT); + } + + /** + * Creates a new {@link Presentation} instance. + * + *

      The output frame will have the given width and height, given a {@link Layout}. + * + *

      Width and height must be positive integers representing the output frame's width and height. + * + * @param width The width of the output frame, in pixels. + * @param height The height of the output frame, in pixels. + * @param layout The layout of the output frame. + */ + public static Presentation createForWidthAndHeight(int width, int height, @Layout int layout) { + checkArgument(width > 0, "width " + width + " must be positive"); + checkArgument(height > 0, "height " + height + " must be positive"); + checkLayout(layout); + return new Presentation(width, height, ASPECT_RATIO_UNSET, layout); + } + + private final int requestedWidthPixels; + private final int requestedHeightPixels; + private float requestedAspectRatio; + private final @Layout int layout; + + private float outputWidth; + private float outputHeight; + private @MonotonicNonNull Matrix transformationMatrix; + + private Presentation(int width, int height, float aspectRatio, @Layout int layout) { + checkArgument( + (aspectRatio == C.LENGTH_UNSET) || (width == C.LENGTH_UNSET), + "width and aspect ratio should not both be set"); + + this.requestedWidthPixels = width; + this.requestedHeightPixels = height; + this.requestedAspectRatio = aspectRatio; + this.layout = layout; + + outputWidth = C.LENGTH_UNSET; + outputHeight = C.LENGTH_UNSET; + transformationMatrix = new Matrix(); + } + + @Override + public Pair configure(int inputWidth, int inputHeight) { + checkArgument(inputWidth > 0, "inputWidth must be positive"); + checkArgument(inputHeight > 0, "inputHeight must be positive"); + + transformationMatrix = new Matrix(); + outputWidth = inputWidth; + outputHeight = inputHeight; + + if ((requestedWidthPixels != C.LENGTH_UNSET) && (requestedHeightPixels != C.LENGTH_UNSET)) { + requestedAspectRatio = (float) requestedWidthPixels / requestedHeightPixels; + } + + if (requestedAspectRatio != C.LENGTH_UNSET) { + applyAspectRatio(); + } + + // Scale output width and height to requested values. + if (requestedHeightPixels != C.LENGTH_UNSET) { + if (requestedWidthPixels != C.LENGTH_UNSET) { + outputWidth = requestedWidthPixels; + } else { + outputWidth = requestedHeightPixels * outputWidth / outputHeight; + } + outputHeight = requestedHeightPixels; + } + return Pair.create(Math.round(outputWidth), Math.round(outputHeight)); + } + + @Override + public Matrix getMatrix(long presentationTimeUs) { + return checkStateNotNull(transformationMatrix, "configure must be called first"); + } + + @RequiresNonNull("transformationMatrix") + private void applyAspectRatio() { + float inputAspectRatio = outputWidth / outputHeight; + if (layout == LAYOUT_SCALE_TO_FIT) { + if (requestedAspectRatio > inputAspectRatio) { + transformationMatrix.setScale(inputAspectRatio / requestedAspectRatio, 1f); + outputWidth = outputHeight * requestedAspectRatio; + } else { + transformationMatrix.setScale(1f, requestedAspectRatio / inputAspectRatio); + outputHeight = outputWidth / requestedAspectRatio; + } + } else if (layout == LAYOUT_SCALE_TO_FIT_WITH_CROP) { + if (requestedAspectRatio > inputAspectRatio) { + transformationMatrix.setScale(1f, requestedAspectRatio / inputAspectRatio); + outputHeight = outputWidth / requestedAspectRatio; + } else { + transformationMatrix.setScale(inputAspectRatio / requestedAspectRatio, 1f); + outputWidth = outputHeight * requestedAspectRatio; + } + } else if (layout == LAYOUT_STRETCH_TO_FIT) { + if (requestedAspectRatio > inputAspectRatio) { + outputWidth = outputHeight * requestedAspectRatio; + } else { + outputHeight = outputWidth / requestedAspectRatio; + } + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/RgbAdjustment.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/RgbAdjustment.java new file mode 100644 index 0000000000..d917d0acff --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/RgbAdjustment.java @@ -0,0 +1,100 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.android.exoplayer2.effect; + +import static com.google.android.exoplayer2.util.Assertions.checkArgument; + +import android.opengl.Matrix; +import com.google.android.exoplayer2.util.GlUtil; +import com.google.errorprone.annotations.CanIgnoreReturnValue; + +/** Scales the red, green, and blue color channels of a frame. */ +public final class RgbAdjustment implements RgbMatrix { + + /** A builder for {@link RgbAdjustment} instances. */ + public static final class Builder { + private float redScale; + private float greenScale; + private float blueScale; + + /** Creates a new instance with default values. */ + public Builder() { + redScale = 1; + greenScale = 1; + blueScale = 1; + } + + /** + * Scales the red channel of the frame by {@code redScale}. + * + * @param redScale The scale to apply to the red channel. Needs to be non-negative and the + * default value is {@code 1}. + */ + @CanIgnoreReturnValue + public Builder setRedScale(float redScale) { + checkArgument(0 <= redScale, "Red scale needs to be non-negative."); + this.redScale = redScale; + return this; + } + + /** + * Scales the green channel of the frame by {@code greenScale}. + * + * @param greenScale The scale to apply to the green channel. Needs to be non-negative and the + * default value is {@code 1}. + */ + @CanIgnoreReturnValue + public Builder setGreenScale(float greenScale) { + checkArgument(0 <= greenScale, "Green scale needs to be non-negative."); + this.greenScale = greenScale; + return this; + } + + /** + * Scales the blue channel of the frame by {@code blueScale}. + * + * @param blueScale The scale to apply to the blue channel. Needs to be non-negative and the + * default value is {@code 1}. + */ + @CanIgnoreReturnValue + public Builder setBlueScale(float blueScale) { + checkArgument(0 <= blueScale, "Blue scale needs to be non-negative."); + this.blueScale = blueScale; + return this; + } + + /** Creates a new {@link RgbAdjustment} instance. */ + public RgbAdjustment build() { + float[] rgbMatrix = GlUtil.create4x4IdentityMatrix(); + Matrix.scaleM( + rgbMatrix, /* smOffset= */ 0, /* x= */ redScale, /* y= */ greenScale, /* z= */ blueScale); + + return new RgbAdjustment(rgbMatrix); + } + } + + private final float[] rgbMatrix; + + private RgbAdjustment(float[] rgbMatrix) { + this.rgbMatrix = rgbMatrix; + } + + @Override + public float[] getMatrix(long presentationTimeUs, boolean useHdr) { + return rgbMatrix; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/RgbFilter.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/RgbFilter.java new file mode 100644 index 0000000000..eb806f43fa --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/RgbFilter.java @@ -0,0 +1,97 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.android.exoplayer2.effect; + +import static com.google.android.exoplayer2.util.Assertions.checkState; + +import android.content.Context; +import com.google.android.exoplayer2.util.FrameProcessingException; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; + +/** Provides common color filters. */ +public class RgbFilter implements RgbMatrix { + private static final int COLOR_FILTER_GRAYSCALE_INDEX = 1; + private static final int COLOR_FILTER_INVERTED_INDEX = 2; + + // Grayscale transformation matrix using the BT.709 luminance coefficients from + // https://en.wikipedia.org/wiki/Grayscale#Converting_colour_to_grayscale + private static final float[] FILTER_MATRIX_GRAYSCALE_SDR = { + 0.2126f, 0.2126f, 0.2126f, 0, 0.7152f, 0.7152f, 0.7152f, 0, 0.0722f, 0.0722f, 0.0722f, 0, 0, 0, + 0, 1 + }; + // Grayscale transformation using the BT.2020 primary colors from + // https://www.itu.int/dms_pubrec/itu-r/rec/bt/R-REC-BT.2020-2-201510-I!!PDF-E.pdf + // TODO(b/241240659): Add HDR tests once infrastructure supports it. + private static final float[] FILTER_MATRIX_GRAYSCALE_HDR = { + 0.2627f, 0.2627f, 0.2627f, 0, 0.6780f, 0.6780f, 0.6780f, 0, 0.0593f, 0.0593f, 0.0593f, 0, 0, 0, + 0, 1 + }; + // Inverted filter uses the transformation R' = -R + 1 = 1 - R. + private static final float[] FILTER_MATRIX_INVERTED = { + -1, 0, 0, 0, 0, -1, 0, 0, 0, 0, -1, 0, 1, 1, 1, 1 + }; + + private final int colorFilter; + /** + * Ensures that the usage of HDR is consistent. {@code null} indicates that HDR has not yet been + * set. + */ + private @MonotonicNonNull Boolean useHdr; + + /** Creates a new grayscale {@code RgbFilter} instance. */ + public static RgbFilter createGrayscaleFilter() { + return new RgbFilter(COLOR_FILTER_GRAYSCALE_INDEX); + } + + /** Creates a new inverted {@code RgbFilter} instance. */ + public static RgbFilter createInvertedFilter() { + return new RgbFilter(COLOR_FILTER_INVERTED_INDEX); + } + + private RgbFilter(int colorFilter) { + this.colorFilter = colorFilter; + } + + private void checkForConsistentHdrSetting(boolean useHdr) { + if (this.useHdr == null) { + this.useHdr = useHdr; + } else { + checkState(this.useHdr == useHdr, "Changing HDR setting is not supported."); + } + } + + @Override + public float[] getMatrix(long presentationTimeUs, boolean useHdr) { + checkForConsistentHdrSetting(useHdr); + switch (colorFilter) { + case COLOR_FILTER_GRAYSCALE_INDEX: + return useHdr ? FILTER_MATRIX_GRAYSCALE_HDR : FILTER_MATRIX_GRAYSCALE_SDR; + case COLOR_FILTER_INVERTED_INDEX: + return FILTER_MATRIX_INVERTED; + default: + // Should never happen. + throw new IllegalStateException("Invalid color filter " + colorFilter); + } + } + + @Override + public SingleFrameGlTextureProcessor toGlTextureProcessor(Context context, boolean useHdr) + throws FrameProcessingException { + checkForConsistentHdrSetting(useHdr); + return RgbMatrix.super.toGlTextureProcessor(context, useHdr); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/RgbMatrix.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/RgbMatrix.java new file mode 100644 index 0000000000..237aa47091 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/RgbMatrix.java @@ -0,0 +1,49 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.android.exoplayer2.effect; + +import android.content.Context; +import com.google.android.exoplayer2.util.FrameProcessingException; +import com.google.common.collect.ImmutableList; + +/** + * Specifies a 4x4 RGB color transformation matrix to apply to each frame in the fragment shader. + */ +public interface RgbMatrix extends GlEffect { + + /** + * Returns the 4x4 RGB transformation {@linkplain android.opengl.Matrix matrix} to apply to the + * color values of each pixel in the frame with the given timestamp. + * + * @param presentationTimeUs The timestamp of the frame to apply the matrix on. + * @param useHdr If {@code true}, colors will be in linear RGB BT.2020. If {@code false}, colors + * will be in linear RGB BT.709. Must be consistent with {@code useHdr} in {@link + * #toGlTextureProcessor(Context, boolean)}. + * @return The {@code RgbMatrix} to apply to the frame. + */ + float[] getMatrix(long presentationTimeUs, boolean useHdr); + + @Override + default SingleFrameGlTextureProcessor toGlTextureProcessor(Context context, boolean useHdr) + throws FrameProcessingException { + return MatrixTextureProcessor.create( + context, + /* matrixTransformations= */ ImmutableList.of(), + /* rgbMatrices= */ ImmutableList.of(this), + useHdr); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/ScaleToFitTransformation.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/ScaleToFitTransformation.java new file mode 100644 index 0000000000..aedcb23335 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/ScaleToFitTransformation.java @@ -0,0 +1,149 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.effect; + +import static com.google.android.exoplayer2.util.Assertions.checkArgument; +import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; +import static java.lang.Math.max; +import static java.lang.Math.min; + +import android.graphics.Matrix; +import android.util.Pair; +import com.google.android.exoplayer2.util.GlUtil; +import com.google.errorprone.annotations.CanIgnoreReturnValue; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; + +/** + * Specifies a simple rotation and/or scale to apply in the vertex shader. + * + *

      All input frames' pixels will be preserved and copied into an output frame, potentially + * changing the width and height of the frame by scaling dimensions to fit. + * + *

      The background color of the output frame will be black, with alpha = 0 if applicable. + */ +public final class ScaleToFitTransformation implements MatrixTransformation { + + /** A builder for {@link ScaleToFitTransformation} instances. */ + public static final class Builder { + + // Optional fields. + private float scaleX; + private float scaleY; + private float rotationDegrees; + + /** Creates a builder with default values. */ + public Builder() { + scaleX = 1; + scaleY = 1; + rotationDegrees = 0; + } + + /** + * Sets the x and y axis scaling factors to apply to each frame's width and height. + * + *

      The values default to 1, which corresponds to not scaling along both axes. + * + * @param scaleX The multiplier by which the frame will scale horizontally, along the x-axis. + * @param scaleY The multiplier by which the frame will scale vertically, along the y-axis. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setScale(float scaleX, float scaleY) { + this.scaleX = scaleX; + this.scaleY = scaleY; + return this; + } + + /** + * Sets the counterclockwise rotation degrees. + * + *

      The default value, 0, corresponds to not applying any rotation. + * + * @param rotationDegrees The counterclockwise rotation, in degrees. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setRotationDegrees(float rotationDegrees) { + this.rotationDegrees = rotationDegrees; + return this; + } + + public ScaleToFitTransformation build() { + return new ScaleToFitTransformation(scaleX, scaleY, rotationDegrees); + } + } + + private final Matrix transformationMatrix; + private @MonotonicNonNull Matrix adjustedTransformationMatrix; + + /** + * Creates a new instance. + * + * @param scaleX The multiplier by which the frame will scale horizontally, along the x-axis. + * @param scaleY The multiplier by which the frame will scale vertically, along the y-axis. + * @param rotationDegrees How much to rotate the frame counterclockwise, in degrees. + */ + private ScaleToFitTransformation(float scaleX, float scaleY, float rotationDegrees) { + transformationMatrix = new Matrix(); + transformationMatrix.postScale(scaleX, scaleY); + transformationMatrix.postRotate(rotationDegrees); + } + + @Override + public Pair configure(int inputWidth, int inputHeight) { + checkArgument(inputWidth > 0, "inputWidth must be positive"); + checkArgument(inputHeight > 0, "inputHeight must be positive"); + + adjustedTransformationMatrix = new Matrix(transformationMatrix); + + if (transformationMatrix.isIdentity()) { + return Pair.create(inputWidth, inputHeight); + } + + float inputAspectRatio = (float) inputWidth / inputHeight; + // Scale frames by inputAspectRatio, to account for OpenGL's normalized device + // coordinates (NDC) (a square from -1 to 1 for both x and y) and preserve rectangular + // display of input pixels during transformations (ex. rotations). With scaling, + // transformationMatrix operations operate on a rectangle for x from -inputAspectRatio to + // inputAspectRatio, and y from -1 to 1. + adjustedTransformationMatrix.preScale(/* sx= */ inputAspectRatio, /* sy= */ 1f); + adjustedTransformationMatrix.postScale(/* sx= */ 1f / inputAspectRatio, /* sy= */ 1f); + + // Modify transformationMatrix to keep input pixels. + float[][] transformOnNdcPoints = {{-1, -1, 0, 1}, {-1, 1, 0, 1}, {1, -1, 0, 1}, {1, 1, 0, 1}}; + float minX = Float.MAX_VALUE; + float maxX = Float.MIN_VALUE; + float minY = Float.MAX_VALUE; + float maxY = Float.MIN_VALUE; + for (float[] transformOnNdcPoint : transformOnNdcPoints) { + adjustedTransformationMatrix.mapPoints(transformOnNdcPoint); + minX = min(minX, transformOnNdcPoint[0]); + maxX = max(maxX, transformOnNdcPoint[0]); + minY = min(minY, transformOnNdcPoint[1]); + maxY = max(maxY, transformOnNdcPoint[1]); + } + + float scaleX = (maxX - minX) / GlUtil.LENGTH_NDC; + float scaleY = (maxY - minY) / GlUtil.LENGTH_NDC; + adjustedTransformationMatrix.postScale(1f / scaleX, 1f / scaleY); + return Pair.create(Math.round(inputWidth * scaleX), Math.round(inputHeight * scaleY)); + } + + @Override + public Matrix getMatrix(long presentationTimeUs) { + return checkStateNotNull(adjustedTransformationMatrix, "configure must be called first"); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/SingleColorLut.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/SingleColorLut.java new file mode 100644 index 0000000000..696dbbcd9d --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/SingleColorLut.java @@ -0,0 +1,171 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.android.exoplayer2.effect; + +import static com.google.android.exoplayer2.util.Assertions.checkArgument; +import static com.google.android.exoplayer2.util.Assertions.checkState; + +import android.content.Context; +import android.graphics.Bitmap; +import android.opengl.GLES20; +import android.opengl.GLUtils; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.util.FrameProcessingException; +import com.google.android.exoplayer2.util.GlUtil; +import com.google.android.exoplayer2.util.Util; + +/** Transforms the colors of a frame by applying the same color lookup table to each frame. */ +public class SingleColorLut implements ColorLut { + private final Bitmap lut; + private int lutTextureId; + + /** + * Creates a new instance. + * + *

      {@code lutCube} needs to be a {@code N x N x N} cube and each element is an integer + * representing a color using the {@link Bitmap.Config#ARGB_8888} format. + */ + public static SingleColorLut createFromCube(int[][][] lutCube) { + checkArgument( + lutCube.length > 0 && lutCube[0].length > 0 && lutCube[0][0].length > 0, + "LUT must have three dimensions."); + checkArgument( + lutCube.length == lutCube[0].length && lutCube.length == lutCube[0][0].length, + Util.formatInvariant( + "All three dimensions of a LUT must match, received %d x %d x %d.", + lutCube.length, lutCube[0].length, lutCube[0][0].length)); + + return new SingleColorLut(transformCubeIntoBitmap(lutCube)); + } + + /** + * Creates a new instance. + * + *

      LUT needs to be a Bitmap of a flattened HALD image of width {@code N} and height {@code + * N^2}. Each element must be an integer representing a color using the {@link + * Bitmap.Config#ARGB_8888} format. + */ + public static SingleColorLut createFromBitmap(Bitmap lut) { + checkArgument( + lut.getWidth() * lut.getWidth() == lut.getHeight(), + Util.formatInvariant( + "LUT needs to be in a N x N^2 format, received %d x %d.", + lut.getWidth(), lut.getHeight())); + checkArgument( + lut.getConfig() == Bitmap.Config.ARGB_8888, "Color representation needs to be ARGB_8888."); + + return new SingleColorLut(lut); + } + + private SingleColorLut(Bitmap lut) { + this.lut = lut; + lutTextureId = Format.NO_VALUE; + } + + /** + * Transforms the N x N x N {@code cube} into a N x N^2 {@code bitmap}. + * + * @param cube The 3D Color Lut which gets indexed using {@code cube[R][G][B]}. + * @return A {@link Bitmap} of size {@code N x N^2}, where the {@code cube[R][G][B]} color can be + * indexed at {@code bitmap.getColor(B, N * R + G)}. + */ + private static Bitmap transformCubeIntoBitmap(int[][][] cube) { + // The support for 3D textures starts in OpenGL 3.0 and the Android API 8, Version 2.2 + // uses OpenGL 2.0 which only supports 2D textures. Thus we need to transform the 3D LUT + // into 2D to support all Android SDKs. + + // The cube consists of N planes on the z-direction in the coordinate system where each plane + // has a size of N x N. To transform the cube into a 2D bitmap we stack each N x N plane + // vertically on top of each other. This gives us a bitmap of width N and height N^2. + // + // As an example, lets take the following 3D identity LUT of size 2x2x2: + // cube = [ + // [[(0, 0, 0), (0, 0, 1)], + // [(0, 1, 0), (0, 1, 1)]], + // [[(1, 0, 0), (1, 0, 1)], + // [(1, 1, 0), (1, 1, 1)]] + // ]; + // If we transform this cube now into a 2x2^2 = 2x4 bitmap we yield the following 2D plane: + // bitmap = [[(0, 0, 0), (0, 0, 1)], + // [(0, 1, 0), (0, 1, 1)], + // [(1, 0, 0), (1, 0, 1)], + // [(1, 1, 0), (1, 1, 1)]]; + // media/bitmap/lut/identity.png is an example of how a 32x32x32 3D LUT looks like as an + // 32x32^2 bitmap. + int length = cube.length; + int[] bitmapColorsArray = new int[length * length * length]; + + for (int r = 0; r < length; r++) { + for (int g = 0; g < length; g++) { + for (int b = 0; b < length; b++) { + int color = cube[r][g][b]; + int planePosition = b + length * (g + length * r); + bitmapColorsArray[planePosition] = color; + } + } + } + + return Bitmap.createBitmap( + bitmapColorsArray, + /* width= */ length, + /* height= */ length * length, + Bitmap.Config.ARGB_8888); + } + + /** Must be called after {@link #toGlTextureProcessor(Context, boolean)}. */ + @Override + public int getLutTextureId(long presentationTimeUs) { + checkState( + lutTextureId != Format.NO_VALUE, + "The LUT has not been stored as a texture in OpenGL yet. You must to call" + + " #toGlTextureProcessor() first."); + return lutTextureId; + } + + @Override + public int getLength(long presentationTimeUs) { + return lut.getWidth(); + } + + @Override + public void release() throws GlUtil.GlException { + GlUtil.deleteTexture(lutTextureId); + } + + @Override + public SingleFrameGlTextureProcessor toGlTextureProcessor(Context context, boolean useHdr) + throws FrameProcessingException { + checkState(!useHdr, "HDR is currently not supported."); + + try { + lutTextureId = storeLutAsTexture(lut); + } catch (GlUtil.GlException e) { + throw new FrameProcessingException("Could not store the LUT as a texture.", e); + } + + return new ColorLutProcessor(context, /* colorLut= */ this, useHdr); + } + + private static int storeLutAsTexture(Bitmap bitmap) throws GlUtil.GlException { + int lutTextureId = + GlUtil.createTexture( + bitmap.getWidth(), bitmap.getHeight(), /* useHighPrecisionColorComponents= */ false); + GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, /* level= */ 0, bitmap, /* border= */ 0); + GlUtil.checkGlError(); + return lutTextureId; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/SingleFrameGlTextureProcessor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/SingleFrameGlTextureProcessor.java new file mode 100644 index 0000000000..eda1dffa08 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/SingleFrameGlTextureProcessor.java @@ -0,0 +1,177 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.effect; + +import static com.google.android.exoplayer2.util.Assertions.checkState; + +import android.util.Pair; +import androidx.annotation.CallSuper; +import com.google.android.exoplayer2.util.FrameProcessingException; +import com.google.android.exoplayer2.util.GlUtil; +import org.checkerframework.checker.nullness.qual.EnsuresNonNull; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; + +/** + * Manages a GLSL shader program for processing a frame. Implementations generally copy input pixels + * into an output frame, with changes to pixels specific to the implementation. + * + *

      {@code SingleFrameGlTextureProcessor} implementations must produce exactly one output frame + * per input frame with the same presentation timestamp. For more flexibility, implement {@link + * GlTextureProcessor} directly. + * + *

      All methods in this class must be called on the thread that owns the OpenGL context. + */ +public abstract class SingleFrameGlTextureProcessor implements GlTextureProcessor { + + private final boolean useHdr; + + private InputListener inputListener; + private OutputListener outputListener; + private ErrorListener errorListener; + private int inputWidth; + private int inputHeight; + private @MonotonicNonNull TextureInfo outputTexture; + private boolean outputTextureInUse; + + /** + * Creates a {@code SingleFrameGlTextureProcessor} instance. + * + * @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be + * in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709. + */ + public SingleFrameGlTextureProcessor(boolean useHdr) { + this.useHdr = useHdr; + inputListener = new InputListener() {}; + outputListener = new OutputListener() {}; + errorListener = (frameProcessingException) -> {}; + } + + /** + * Configures the texture processor based on the input dimensions. + * + *

      This method must be called before {@linkplain #drawFrame(int,long) drawing} the first frame + * and before drawing subsequent frames with different input dimensions. + * + * @param inputWidth The input width, in pixels. + * @param inputHeight The input height, in pixels. + * @return The output width and height of frames processed through {@link #drawFrame(int, long)}. + */ + public abstract Pair configure(int inputWidth, int inputHeight); + + /** + * Draws one frame. + * + *

      This method may only be called after the texture processor has been {@link #configure(int, + * int) configured}. The caller is responsible for focussing the correct render target before + * calling this method. + * + *

      A minimal implementation should tell OpenGL to use its shader program, bind the shader + * program's vertex attributes and uniforms, and issue a drawing command. + * + * @param inputTexId Identifier of a 2D OpenGL texture containing the input frame. + * @param presentationTimeUs The presentation timestamp of the current frame, in microseconds. + * @throws FrameProcessingException If an error occurs while processing or drawing the frame. + */ + public abstract void drawFrame(int inputTexId, long presentationTimeUs) + throws FrameProcessingException; + + @Override + public final void setInputListener(InputListener inputListener) { + this.inputListener = inputListener; + if (!outputTextureInUse) { + inputListener.onReadyToAcceptInputFrame(); + } + } + + @Override + public final void setOutputListener(OutputListener outputListener) { + this.outputListener = outputListener; + } + + @Override + public final void setErrorListener(ErrorListener errorListener) { + this.errorListener = errorListener; + } + + @Override + public final void queueInputFrame(TextureInfo inputTexture, long presentationTimeUs) { + checkState( + !outputTextureInUse, + "The texture processor does not currently accept input frames. Release prior output frames" + + " first."); + + try { + if (outputTexture == null + || inputTexture.width != inputWidth + || inputTexture.height != inputHeight) { + configureOutputTexture(inputTexture.width, inputTexture.height); + } + outputTextureInUse = true; + GlUtil.focusFramebufferUsingCurrentContext( + outputTexture.fboId, outputTexture.width, outputTexture.height); + GlUtil.clearOutputFrame(); + drawFrame(inputTexture.texId, presentationTimeUs); + inputListener.onInputFrameProcessed(inputTexture); + outputListener.onOutputFrameAvailable(outputTexture, presentationTimeUs); + } catch (FrameProcessingException | GlUtil.GlException | RuntimeException e) { + errorListener.onFrameProcessingError( + e instanceof FrameProcessingException + ? (FrameProcessingException) e + : new FrameProcessingException(e)); + } + } + + @EnsuresNonNull("outputTexture") + private void configureOutputTexture(int inputWidth, int inputHeight) throws GlUtil.GlException { + this.inputWidth = inputWidth; + this.inputHeight = inputHeight; + Pair outputSize = configure(inputWidth, inputHeight); + if (outputTexture == null + || outputSize.first != outputTexture.width + || outputSize.second != outputTexture.height) { + if (outputTexture != null) { + GlUtil.deleteTexture(outputTexture.texId); + } + int outputTexId = GlUtil.createTexture(outputSize.first, outputSize.second, useHdr); + int outputFboId = GlUtil.createFboForTexture(outputTexId); + outputTexture = + new TextureInfo(outputTexId, outputFboId, outputSize.first, outputSize.second); + } + } + + @Override + public final void releaseOutputFrame(TextureInfo outputTexture) { + outputTextureInUse = false; + inputListener.onReadyToAcceptInputFrame(); + } + + @Override + public final void signalEndOfCurrentInputStream() { + outputListener.onCurrentOutputStreamEnded(); + } + + @Override + @CallSuper + public void release() throws FrameProcessingException { + if (outputTexture != null) { + try { + GlUtil.deleteTexture(outputTexture.texId); + } catch (GlUtil.GlException e) { + throw new FrameProcessingException(e); + } + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/TextureInfo.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/TextureInfo.java new file mode 100644 index 0000000000..e0d2eeb6b4 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/TextureInfo.java @@ -0,0 +1,50 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.effect; + +import com.google.android.exoplayer2.C; + +/** Contains information describing an OpenGL texture. */ +public final class TextureInfo { + + /** A {@link TextureInfo} instance with all fields unset. */ + public static final TextureInfo UNSET = + new TextureInfo(C.INDEX_UNSET, C.INDEX_UNSET, C.LENGTH_UNSET, C.LENGTH_UNSET); + + /** The OpenGL texture identifier. */ + public final int texId; + /** Identifier of a framebuffer object associated with the texture. */ + public final int fboId; + /** The width of the texture, in pixels. */ + public final int width; + /** The height of the texture, in pixels. */ + public final int height; + + /** + * Creates a new instance. + * + * @param texId The OpenGL texture identifier. + * @param fboId Identifier of a framebuffer object associated with the texture. + * @param width The width of the texture, in pixels. + * @param height The height of the texture, in pixels. + */ + public TextureInfo(int texId, int fboId, int width, int height) { + this.texId = texId; + this.fboId = fboId; + this.width = width; + this.height = height; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/package-info.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/package-info.java new file mode 100644 index 0000000000..3297e6c25d --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/effect/package-info.java @@ -0,0 +1,19 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +@NonNullApi +package com.google.android.exoplayer2.effect; + +import com.google.android.exoplayer2.util.NonNullApi; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/ffmpeg/FfmpegAudioDecoder.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/ffmpeg/FfmpegAudioDecoder.java new file mode 100644 index 0000000000..6d378dd818 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/ffmpeg/FfmpegAudioDecoder.java @@ -0,0 +1,233 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.ext.ffmpeg; + +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.decoder.DecoderInputBuffer; +import com.google.android.exoplayer2.decoder.SimpleDecoder; +import com.google.android.exoplayer2.decoder.SimpleDecoderOutputBuffer; +import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.MimeTypes; +import com.google.android.exoplayer2.util.ParsableByteArray; +import com.google.android.exoplayer2.util.Util; +import java.nio.ByteBuffer; +import java.util.List; + +/** FFmpeg audio decoder. */ +/* package */ final class FfmpegAudioDecoder + extends SimpleDecoder { + + // Output buffer sizes when decoding PCM mu-law streams, which is the maximum FFmpeg outputs. + private static final int OUTPUT_BUFFER_SIZE_16BIT = 65536; + private static final int OUTPUT_BUFFER_SIZE_32BIT = OUTPUT_BUFFER_SIZE_16BIT * 2; + + private static final int AUDIO_DECODER_ERROR_INVALID_DATA = -1; + private static final int AUDIO_DECODER_ERROR_OTHER = -2; + + private final String codecName; + @Nullable private final byte[] extraData; + private final @C.PcmEncoding int encoding; + private final int outputBufferSize; + + private long nativeContext; // May be reassigned on resetting the codec. + private boolean hasOutputFormat; + private volatile int channelCount; + private volatile int sampleRate; + + public FfmpegAudioDecoder( + Format format, + int numInputBuffers, + int numOutputBuffers, + int initialInputBufferSize, + boolean outputFloat) + throws FfmpegDecoderException { + super(new DecoderInputBuffer[numInputBuffers], new SimpleDecoderOutputBuffer[numOutputBuffers]); + if (!FfmpegLibrary.isAvailable()) { + throw new FfmpegDecoderException("Failed to load decoder native libraries."); + } + Assertions.checkNotNull(format.sampleMimeType); + codecName = Assertions.checkNotNull(FfmpegLibrary.getCodecName(format.sampleMimeType)); + extraData = getExtraData(format.sampleMimeType, format.initializationData); + encoding = outputFloat ? C.ENCODING_PCM_FLOAT : C.ENCODING_PCM_16BIT; + outputBufferSize = outputFloat ? OUTPUT_BUFFER_SIZE_32BIT : OUTPUT_BUFFER_SIZE_16BIT; + nativeContext = + ffmpegInitialize(codecName, extraData, outputFloat, format.sampleRate, format.channelCount); + if (nativeContext == 0) { + throw new FfmpegDecoderException("Initialization failed."); + } + setInitialInputBufferSize(initialInputBufferSize); + } + + @Override + public String getName() { + return "ffmpeg" + FfmpegLibrary.getVersion() + "-" + codecName; + } + + @Override + protected DecoderInputBuffer createInputBuffer() { + return new DecoderInputBuffer( + DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DIRECT, + FfmpegLibrary.getInputBufferPaddingSize()); + } + + @Override + protected SimpleDecoderOutputBuffer createOutputBuffer() { + return new SimpleDecoderOutputBuffer(this::releaseOutputBuffer); + } + + @Override + protected FfmpegDecoderException createUnexpectedDecodeException(Throwable error) { + return new FfmpegDecoderException("Unexpected decode error", error); + } + + @Override + @Nullable + protected FfmpegDecoderException decode( + DecoderInputBuffer inputBuffer, SimpleDecoderOutputBuffer outputBuffer, boolean reset) { + if (reset) { + nativeContext = ffmpegReset(nativeContext, extraData); + if (nativeContext == 0) { + return new FfmpegDecoderException("Error resetting (see logcat)."); + } + } + ByteBuffer inputData = Util.castNonNull(inputBuffer.data); + int inputSize = inputData.limit(); + ByteBuffer outputData = outputBuffer.init(inputBuffer.timeUs, outputBufferSize); + int result = ffmpegDecode(nativeContext, inputData, inputSize, outputData, outputBufferSize); + if (result == AUDIO_DECODER_ERROR_OTHER) { + return new FfmpegDecoderException("Error decoding (see logcat)."); + } else if (result == AUDIO_DECODER_ERROR_INVALID_DATA) { + // Treat invalid data errors as non-fatal to match the behavior of MediaCodec. No output will + // be produced for this buffer, so mark it as decode-only to ensure that the audio sink's + // position is reset when more audio is produced. + outputBuffer.setFlags(C.BUFFER_FLAG_DECODE_ONLY); + return null; + } else if (result == 0) { + // There's no need to output empty buffers. + outputBuffer.setFlags(C.BUFFER_FLAG_DECODE_ONLY); + return null; + } + if (!hasOutputFormat) { + channelCount = ffmpegGetChannelCount(nativeContext); + sampleRate = ffmpegGetSampleRate(nativeContext); + if (sampleRate == 0 && "alac".equals(codecName)) { + Assertions.checkNotNull(extraData); + // ALAC decoder did not set the sample rate in earlier versions of FFmpeg. See + // https://trac.ffmpeg.org/ticket/6096. + ParsableByteArray parsableExtraData = new ParsableByteArray(extraData); + parsableExtraData.setPosition(extraData.length - 4); + sampleRate = parsableExtraData.readUnsignedIntToInt(); + } + hasOutputFormat = true; + } + outputData.position(0); + outputData.limit(result); + return null; + } + + @Override + public void release() { + super.release(); + ffmpegRelease(nativeContext); + nativeContext = 0; + } + + /** Returns the channel count of output audio. */ + public int getChannelCount() { + return channelCount; + } + + /** Returns the sample rate of output audio. */ + public int getSampleRate() { + return sampleRate; + } + + /** Returns the encoding of output audio. */ + public @C.PcmEncoding int getEncoding() { + return encoding; + } + + /** + * Returns FFmpeg-compatible codec-specific initialization data ("extra data"), or {@code null} if + * not required. + */ + @Nullable + private static byte[] getExtraData(String mimeType, List initializationData) { + switch (mimeType) { + case MimeTypes.AUDIO_AAC: + case MimeTypes.AUDIO_OPUS: + return initializationData.get(0); + case MimeTypes.AUDIO_ALAC: + return getAlacExtraData(initializationData); + case MimeTypes.AUDIO_VORBIS: + return getVorbisExtraData(initializationData); + default: + // Other codecs do not require extra data. + return null; + } + } + + private static byte[] getAlacExtraData(List initializationData) { + // FFmpeg's ALAC decoder expects an ALAC atom, which contains the ALAC "magic cookie", as extra + // data. initializationData[0] contains only the magic cookie, and so we need to package it into + // an ALAC atom. See: + // https://ffmpeg.org/doxygen/0.6/alac_8c.html + // https://github.com/macosforge/alac/blob/master/ALACMagicCookieDescription.txt + byte[] magicCookie = initializationData.get(0); + int alacAtomLength = 12 + magicCookie.length; + ByteBuffer alacAtom = ByteBuffer.allocate(alacAtomLength); + alacAtom.putInt(alacAtomLength); + alacAtom.putInt(0x616c6163); // type=alac + alacAtom.putInt(0); // version=0, flags=0 + alacAtom.put(magicCookie, /* offset= */ 0, magicCookie.length); + return alacAtom.array(); + } + + private static byte[] getVorbisExtraData(List initializationData) { + byte[] header0 = initializationData.get(0); + byte[] header1 = initializationData.get(1); + byte[] extraData = new byte[header0.length + header1.length + 6]; + extraData[0] = (byte) (header0.length >> 8); + extraData[1] = (byte) (header0.length & 0xFF); + System.arraycopy(header0, 0, extraData, 2, header0.length); + extraData[header0.length + 2] = 0; + extraData[header0.length + 3] = 0; + extraData[header0.length + 4] = (byte) (header1.length >> 8); + extraData[header0.length + 5] = (byte) (header1.length & 0xFF); + System.arraycopy(header1, 0, extraData, header0.length + 6, header1.length); + return extraData; + } + + private native long ffmpegInitialize( + String codecName, + @Nullable byte[] extraData, + boolean outputFloat, + int rawSampleRate, + int rawChannelCount); + + private native int ffmpegDecode( + long context, ByteBuffer inputData, int inputSize, ByteBuffer outputData, int outputSize); + + private native int ffmpegGetChannelCount(long context); + + private native int ffmpegGetSampleRate(long context); + + private native long ffmpegReset(long context, @Nullable byte[] extraData); + + private native void ffmpegRelease(long context); +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/ffmpeg/FfmpegAudioRenderer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/ffmpeg/FfmpegAudioRenderer.java index 1b30bf4a19..23a601c0db 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/ffmpeg/FfmpegAudioRenderer.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/ffmpeg/FfmpegAudioRenderer.java @@ -15,42 +15,43 @@ */ package com.google.android.exoplayer2.ext.ffmpeg; +import static com.google.android.exoplayer2.audio.AudioSink.SINK_FORMAT_SUPPORTED_DIRECTLY; +import static com.google.android.exoplayer2.audio.AudioSink.SINK_FORMAT_SUPPORTED_WITH_TRANSCODING; +import static com.google.android.exoplayer2.audio.AudioSink.SINK_FORMAT_UNSUPPORTED; + import android.os.Handler; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; -import com.google.android.exoplayer2.ExoPlaybackException; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.audio.AudioProcessor; import com.google.android.exoplayer2.audio.AudioRendererEventListener; import com.google.android.exoplayer2.audio.AudioSink; +import com.google.android.exoplayer2.audio.AudioSink.SinkFormatSupport; +import com.google.android.exoplayer2.audio.DecoderAudioRenderer; import com.google.android.exoplayer2.audio.DefaultAudioSink; -import com.google.android.exoplayer2.audio.SimpleDecoderAudioRenderer; -import com.google.android.exoplayer2.drm.DrmSessionManager; -import com.google.android.exoplayer2.drm.ExoMediaCrypto; +import com.google.android.exoplayer2.decoder.CryptoConfig; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.MimeTypes; -import java.util.Collections; -import org.checkerframework.checker.nullness.qual.MonotonicNonNull; +import com.google.android.exoplayer2.util.TraceUtil; +import com.google.android.exoplayer2.util.Util; -/** - * Decodes and renders audio using FFmpeg. - */ -public final class FfmpegAudioRenderer extends SimpleDecoderAudioRenderer { +/** Decodes and renders audio using FFmpeg. */ +public final class FfmpegAudioRenderer extends DecoderAudioRenderer { + + private static final String TAG = "FfmpegAudioRenderer"; /** The number of input and output buffers. */ private static final int NUM_BUFFERS = 16; /** The default input buffer size. */ private static final int DEFAULT_INPUT_BUFFER_SIZE = 960 * 6; - private final boolean enableFloatOutput; - - private @MonotonicNonNull FfmpegDecoder decoder; - public FfmpegAudioRenderer() { this(/* eventHandler= */ null, /* eventListener= */ null); } /** + * Creates a new instance. + * * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be * null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required. @@ -63,109 +64,106 @@ public FfmpegAudioRenderer( this( eventHandler, eventListener, - new DefaultAudioSink(/* audioCapabilities= */ null, audioProcessors), - /* enableFloatOutput= */ false); + new DefaultAudioSink.Builder().setAudioProcessors(audioProcessors).build()); } /** + * Creates a new instance. + * * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be * null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required. * @param audioSink The sink to which audio will be output. - * @param enableFloatOutput Whether to enable 32-bit float audio format, if supported on the - * device/build and if the input format may have bit depth higher than 16-bit. When using - * 32-bit float output, any audio processing will be disabled, including playback speed/pitch - * adjustment. */ public FfmpegAudioRenderer( @Nullable Handler eventHandler, @Nullable AudioRendererEventListener eventListener, - AudioSink audioSink, - boolean enableFloatOutput) { - super( - eventHandler, - eventListener, - /* drmSessionManager= */ null, - /* playClearSamplesWithoutKeys= */ false, - audioSink); - this.enableFloatOutput = enableFloatOutput; + AudioSink audioSink) { + super(eventHandler, eventListener, audioSink); + } + + @Override + public String getName() { + return TAG; } @Override - @FormatSupport - protected int supportsFormatInternal( - @Nullable DrmSessionManager drmSessionManager, Format format) { - Assertions.checkNotNull(format.sampleMimeType); - if (!FfmpegLibrary.supportsFormat(format.sampleMimeType) || !isOutputSupported(format)) { - return FORMAT_UNSUPPORTED_SUBTYPE; - } else if (!supportsFormatDrm(drmSessionManager, format.drmInitData)) { - return FORMAT_UNSUPPORTED_DRM; + protected @C.FormatSupport int supportsFormatInternal(Format format) { + String mimeType = Assertions.checkNotNull(format.sampleMimeType); + if (!FfmpegLibrary.isAvailable() || !MimeTypes.isAudio(mimeType)) { + return C.FORMAT_UNSUPPORTED_TYPE; + } else if (!FfmpegLibrary.supportsFormat(mimeType) + || (!sinkSupportsFormat(format, C.ENCODING_PCM_16BIT) + && !sinkSupportsFormat(format, C.ENCODING_PCM_FLOAT))) { + return C.FORMAT_UNSUPPORTED_SUBTYPE; + } else if (format.cryptoType != C.CRYPTO_TYPE_NONE) { + return C.FORMAT_UNSUPPORTED_DRM; } else { - return FORMAT_HANDLED; + return C.FORMAT_HANDLED; } } @Override - @AdaptiveSupport - public final int supportsMixedMimeTypeAdaptation() throws ExoPlaybackException { + public @AdaptiveSupport int supportsMixedMimeTypeAdaptation() { return ADAPTIVE_NOT_SEAMLESS; } + /** {@inheritDoc} */ @Override - protected FfmpegDecoder createDecoder(Format format, @Nullable ExoMediaCrypto mediaCrypto) + protected FfmpegAudioDecoder createDecoder(Format format, @Nullable CryptoConfig cryptoConfig) throws FfmpegDecoderException { + TraceUtil.beginSection("createFfmpegAudioDecoder"); int initialInputBufferSize = format.maxInputSize != Format.NO_VALUE ? format.maxInputSize : DEFAULT_INPUT_BUFFER_SIZE; - decoder = - new FfmpegDecoder( - NUM_BUFFERS, NUM_BUFFERS, initialInputBufferSize, format, shouldUseFloatOutput(format)); + FfmpegAudioDecoder decoder = + new FfmpegAudioDecoder( + format, NUM_BUFFERS, NUM_BUFFERS, initialInputBufferSize, shouldOutputFloat(format)); + TraceUtil.endSection(); return decoder; } + /** {@inheritDoc} */ @Override - public Format getOutputFormat() { + protected Format getOutputFormat(FfmpegAudioDecoder decoder) { Assertions.checkNotNull(decoder); - int channelCount = decoder.getChannelCount(); - int sampleRate = decoder.getSampleRate(); - @C.PcmEncoding int encoding = decoder.getEncoding(); - return Format.createAudioSampleFormat( - /* id= */ null, - MimeTypes.AUDIO_RAW, - /* codecs= */ null, - Format.NO_VALUE, - Format.NO_VALUE, - channelCount, - sampleRate, - encoding, - Collections.emptyList(), - /* drmInitData= */ null, - /* selectionFlags= */ 0, - /* language= */ null); + return new Format.Builder() + .setSampleMimeType(MimeTypes.AUDIO_RAW) + .setChannelCount(decoder.getChannelCount()) + .setSampleRate(decoder.getSampleRate()) + .setPcmEncoding(decoder.getEncoding()) + .build(); } - private boolean isOutputSupported(Format inputFormat) { - return shouldUseFloatOutput(inputFormat) - || supportsOutput(inputFormat.channelCount, C.ENCODING_PCM_16BIT); + /** + * Returns whether the renderer's {@link AudioSink} supports the PCM format that will be output + * from the decoder for the given input format and requested output encoding. + */ + private boolean sinkSupportsFormat(Format inputFormat, @C.PcmEncoding int pcmEncoding) { + return sinkSupportsFormat( + Util.getPcmFormat(pcmEncoding, inputFormat.channelCount, inputFormat.sampleRate)); } - private boolean shouldUseFloatOutput(Format inputFormat) { - Assertions.checkNotNull(inputFormat.sampleMimeType); - if (!enableFloatOutput || !supportsOutput(inputFormat.channelCount, C.ENCODING_PCM_FLOAT)) { - return false; + private boolean shouldOutputFloat(Format inputFormat) { + if (!sinkSupportsFormat(inputFormat, C.ENCODING_PCM_16BIT)) { + // We have no choice because the sink doesn't support 16-bit integer PCM. + return true; } - switch (inputFormat.sampleMimeType) { - case MimeTypes.AUDIO_RAW: - // For raw audio, output in 32-bit float encoding if the bit depth is > 16-bit. - return inputFormat.pcmEncoding == C.ENCODING_PCM_24BIT - || inputFormat.pcmEncoding == C.ENCODING_PCM_32BIT - || inputFormat.pcmEncoding == C.ENCODING_PCM_FLOAT; - case MimeTypes.AUDIO_AC3: - // AC-3 is always 16-bit, so there is no point outputting in 32-bit float encoding. - return false; + + @SinkFormatSupport + int formatSupport = + getSinkFormatSupport( + Util.getPcmFormat( + C.ENCODING_PCM_FLOAT, inputFormat.channelCount, inputFormat.sampleRate)); + switch (formatSupport) { + case SINK_FORMAT_SUPPORTED_DIRECTLY: + // AC-3 is always 16-bit, so there's no point using floating point. Assume that it's worth + // using for all other formats. + return !MimeTypes.AUDIO_AC3.equals(inputFormat.sampleMimeType); + case SINK_FORMAT_UNSUPPORTED: + case SINK_FORMAT_SUPPORTED_WITH_TRANSCODING: default: - // For all other formats, assume that it's worth using 32-bit float encoding. - return true; + // Always prefer 16-bit PCM if the sink does not provide direct support for floating point. + return false; } } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/ffmpeg/FfmpegDecoder.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/ffmpeg/FfmpegDecoder.java deleted file mode 100644 index 1312b108eb..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/ffmpeg/FfmpegDecoder.java +++ /dev/null @@ -1,226 +0,0 @@ -/* - * Copyright (C) 2016 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.ext.ffmpeg; - -import androidx.annotation.Nullable; -import com.google.android.exoplayer2.C; -import com.google.android.exoplayer2.Format; -import com.google.android.exoplayer2.decoder.DecoderInputBuffer; -import com.google.android.exoplayer2.decoder.SimpleDecoder; -import com.google.android.exoplayer2.decoder.SimpleOutputBuffer; -import com.google.android.exoplayer2.util.Assertions; -import com.google.android.exoplayer2.util.MimeTypes; -import com.google.android.exoplayer2.util.ParsableByteArray; -import com.google.android.exoplayer2.util.Util; -import java.nio.ByteBuffer; -import java.util.List; - -/** - * FFmpeg audio decoder. - */ -/* package */ final class FfmpegDecoder extends - SimpleDecoder { - - // Output buffer sizes when decoding PCM mu-law streams, which is the maximum FFmpeg outputs. - private static final int OUTPUT_BUFFER_SIZE_16BIT = 65536; - private static final int OUTPUT_BUFFER_SIZE_32BIT = OUTPUT_BUFFER_SIZE_16BIT * 2; - - // Error codes matching ffmpeg_jni.cc. - private static final int DECODER_ERROR_INVALID_DATA = -1; - private static final int DECODER_ERROR_OTHER = -2; - - private final String codecName; - @Nullable private final byte[] extraData; - private final @C.Encoding int encoding; - private final int outputBufferSize; - - private long nativeContext; // May be reassigned on resetting the codec. - private boolean hasOutputFormat; - private volatile int channelCount; - private volatile int sampleRate; - - public FfmpegDecoder( - int numInputBuffers, - int numOutputBuffers, - int initialInputBufferSize, - Format format, - boolean outputFloat) - throws FfmpegDecoderException { - super(new DecoderInputBuffer[numInputBuffers], new SimpleOutputBuffer[numOutputBuffers]); - Assertions.checkNotNull(format.sampleMimeType); - codecName = Assertions.checkNotNull(FfmpegLibrary.getCodecName(format.sampleMimeType)); - extraData = getExtraData(format.sampleMimeType, format.initializationData); - encoding = outputFloat ? C.ENCODING_PCM_FLOAT : C.ENCODING_PCM_16BIT; - outputBufferSize = outputFloat ? OUTPUT_BUFFER_SIZE_32BIT : OUTPUT_BUFFER_SIZE_16BIT; - nativeContext = - ffmpegInitialize(codecName, extraData, outputFloat, format.sampleRate, format.channelCount); - if (nativeContext == 0) { - throw new FfmpegDecoderException("Initialization failed."); - } - setInitialInputBufferSize(initialInputBufferSize); - } - - @Override - public String getName() { - return "ffmpeg" + FfmpegLibrary.getVersion() + "-" + codecName; - } - - @Override - protected DecoderInputBuffer createInputBuffer() { - return new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DIRECT); - } - - @Override - protected SimpleOutputBuffer createOutputBuffer() { - return new SimpleOutputBuffer(this); - } - - @Override - protected FfmpegDecoderException createUnexpectedDecodeException(Throwable error) { - return new FfmpegDecoderException("Unexpected decode error", error); - } - - @Override - protected @Nullable FfmpegDecoderException decode( - DecoderInputBuffer inputBuffer, SimpleOutputBuffer outputBuffer, boolean reset) { - if (reset) { - nativeContext = ffmpegReset(nativeContext, extraData); - if (nativeContext == 0) { - return new FfmpegDecoderException("Error resetting (see logcat)."); - } - } - ByteBuffer inputData = Util.castNonNull(inputBuffer.data); - int inputSize = inputData.limit(); - ByteBuffer outputData = outputBuffer.init(inputBuffer.timeUs, outputBufferSize); - int result = ffmpegDecode(nativeContext, inputData, inputSize, outputData, outputBufferSize); - if (result == DECODER_ERROR_INVALID_DATA) { - // Treat invalid data errors as non-fatal to match the behavior of MediaCodec. No output will - // be produced for this buffer, so mark it as decode-only to ensure that the audio sink's - // position is reset when more audio is produced. - outputBuffer.setFlags(C.BUFFER_FLAG_DECODE_ONLY); - return null; - } else if (result == DECODER_ERROR_OTHER) { - return new FfmpegDecoderException("Error decoding (see logcat)."); - } - if (!hasOutputFormat) { - channelCount = ffmpegGetChannelCount(nativeContext); - sampleRate = ffmpegGetSampleRate(nativeContext); - if (sampleRate == 0 && "alac".equals(codecName)) { - Assertions.checkNotNull(extraData); - // ALAC decoder did not set the sample rate in earlier versions of FFMPEG. - // See https://trac.ffmpeg.org/ticket/6096 - ParsableByteArray parsableExtraData = new ParsableByteArray(extraData); - parsableExtraData.setPosition(extraData.length - 4); - sampleRate = parsableExtraData.readUnsignedIntToInt(); - } - hasOutputFormat = true; - } - outputData.position(0); - outputData.limit(result); - return null; - } - - @Override - public void release() { - super.release(); - ffmpegRelease(nativeContext); - nativeContext = 0; - } - - /** Returns the channel count of output audio. */ - public int getChannelCount() { - return channelCount; - } - - /** Returns the sample rate of output audio. */ - public int getSampleRate() { - return sampleRate; - } - - /** - * Returns the encoding of output audio. - */ - public @C.Encoding int getEncoding() { - return encoding; - } - - /** - * Returns FFmpeg-compatible codec-specific initialization data ("extra data"), or {@code null} if - * not required. - */ - private static @Nullable byte[] getExtraData(String mimeType, List initializationData) { - switch (mimeType) { - case MimeTypes.AUDIO_AAC: - case MimeTypes.AUDIO_OPUS: - return initializationData.get(0); - case MimeTypes.AUDIO_ALAC: - return getAlacExtraData(initializationData); - case MimeTypes.AUDIO_VORBIS: - return getVorbisExtraData(initializationData); - default: - // Other codecs do not require extra data. - return null; - } - } - - private static byte[] getAlacExtraData(List initializationData) { - // FFmpeg's ALAC decoder expects an ALAC atom, which contains the ALAC "magic cookie", as extra - // data. initializationData[0] contains only the magic cookie, and so we need to package it into - // an ALAC atom. See: - // https://ffmpeg.org/doxygen/0.6/alac_8c.html - // https://github.com/macosforge/alac/blob/master/ALACMagicCookieDescription.txt - byte[] magicCookie = initializationData.get(0); - int alacAtomLength = 12 + magicCookie.length; - ByteBuffer alacAtom = ByteBuffer.allocate(alacAtomLength); - alacAtom.putInt(alacAtomLength); - alacAtom.putInt(0x616c6163); // type=alac - alacAtom.putInt(0); // version=0, flags=0 - alacAtom.put(magicCookie, /* offset= */ 0, magicCookie.length); - return alacAtom.array(); - } - - private static byte[] getVorbisExtraData(List initializationData) { - byte[] header0 = initializationData.get(0); - byte[] header1 = initializationData.get(1); - byte[] extraData = new byte[header0.length + header1.length + 6]; - extraData[0] = (byte) (header0.length >> 8); - extraData[1] = (byte) (header0.length & 0xFF); - System.arraycopy(header0, 0, extraData, 2, header0.length); - extraData[header0.length + 2] = 0; - extraData[header0.length + 3] = 0; - extraData[header0.length + 4] = (byte) (header1.length >> 8); - extraData[header0.length + 5] = (byte) (header1.length & 0xFF); - System.arraycopy(header1, 0, extraData, header0.length + 6, header1.length); - return extraData; - } - - private native long ffmpegInitialize( - String codecName, - @Nullable byte[] extraData, - boolean outputFloat, - int rawSampleRate, - int rawChannelCount); - - private native int ffmpegDecode(long context, ByteBuffer inputData, int inputSize, - ByteBuffer outputData, int outputSize); - private native int ffmpegGetChannelCount(long context); - private native int ffmpegGetSampleRate(long context); - - private native long ffmpegReset(long context, @Nullable byte[] extraData); - - private native void ffmpegRelease(long context); - -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/ffmpeg/FfmpegDecoderException.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/ffmpeg/FfmpegDecoderException.java index d6b5a62450..47d5017350 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/ffmpeg/FfmpegDecoderException.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/ffmpeg/FfmpegDecoderException.java @@ -15,12 +15,10 @@ */ package com.google.android.exoplayer2.ext.ffmpeg; -import com.google.android.exoplayer2.audio.AudioDecoderException; +import com.google.android.exoplayer2.decoder.DecoderException; -/** - * Thrown when an FFmpeg decoder error occurs. - */ -public final class FfmpegDecoderException extends AudioDecoderException { +/** Thrown when an FFmpeg decoder error occurs. */ +public final class FfmpegDecoderException extends DecoderException { /* package */ FfmpegDecoderException(String message) { super(message); diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/ffmpeg/FfmpegLibrary.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/ffmpeg/FfmpegLibrary.java index 02f0bf16da..fa97a3877c 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/ffmpeg/FfmpegLibrary.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/ffmpeg/FfmpegLibrary.java @@ -16,14 +16,14 @@ package com.google.android.exoplayer2.ext.ffmpeg; import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.ExoPlayerLibraryInfo; import com.google.android.exoplayer2.util.LibraryLoader; import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.MimeTypes; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; -/** - * Configures and queries the underlying native library. - */ +/** Configures and queries the underlying native library. */ public final class FfmpegLibrary { static { @@ -32,11 +32,59 @@ public final class FfmpegLibrary { private static final String TAG = "FfmpegLibrary"; + private static final LibraryLoader LOADER = + new LibraryLoader("ffmpegJNI") { + @Override + protected void loadLibrary(String name) { + System.loadLibrary(name); + } + }; + + private static @MonotonicNonNull String version; + private static int inputBufferPaddingSize = C.LENGTH_UNSET; + private FfmpegLibrary() {} + /** + * Override the names of the FFmpeg native libraries. If an application wishes to call this + * method, it must do so before calling any other method defined by this class, and before + * instantiating a {@link FfmpegAudioRenderer} instance. + * + * @param libraries The names of the FFmpeg native libraries. + */ + public static void setLibraries(String... libraries) { + LOADER.setLibraries(libraries); + } + + /** Returns whether the underlying library is available, loading it if necessary. */ + public static boolean isAvailable() { + return LOADER.isAvailable(); + } + /** Returns the version of the underlying library if available, or null otherwise. */ - public static @Nullable String getVersion() { - return ffmpegGetVersion(); + @Nullable + public static String getVersion() { + if (!isAvailable()) { + return null; + } + if (version == null) { + version = ffmpegGetVersion(); + } + return version; + } + + /** + * Returns the required amount of padding for input buffers in bytes, or {@link C#LENGTH_UNSET} if + * the underlying library is not available. + */ + public static int getInputBufferPaddingSize() { + if (!isAvailable()) { + return C.LENGTH_UNSET; + } + if (inputBufferPaddingSize == C.LENGTH_UNSET) { + inputBufferPaddingSize = ffmpegGetInputBufferPaddingSize(); + } + return inputBufferPaddingSize; } /** @@ -45,7 +93,10 @@ private FfmpegLibrary() {} * @param mimeType The MIME type to check. */ public static boolean supportsFormat(String mimeType) { - String codecName = getCodecName(mimeType); + if (!isAvailable()) { + return false; + } + @Nullable String codecName = getCodecName(mimeType); if (codecName == null) { return false; } @@ -60,7 +111,8 @@ public static boolean supportsFormat(String mimeType) { * Returns the name of the FFmpeg decoder that could be used to decode the format, or {@code null} * if it's unsupported. */ - /* package */ static @Nullable String getCodecName(String mimeType) { + @Nullable + /* package */ static String getCodecName(String mimeType) { switch (mimeType) { case MimeTypes.AUDIO_AAC: return "aac"; @@ -103,6 +155,8 @@ public static boolean supportsFormat(String mimeType) { } private static native String ffmpegGetVersion(); - private static native boolean ffmpegHasDecoder(String codecName); + private static native int ffmpegGetInputBufferPaddingSize(); + + private static native boolean ffmpegHasDecoder(String codecName); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/flac/FlacBinarySearchSeeker.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/flac/FlacBinarySearchSeeker.java index 34b3ad2df5..3cbc8fe1a9 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/flac/FlacBinarySearchSeeker.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/flac/FlacBinarySearchSeeker.java @@ -15,12 +15,13 @@ */ package com.google.android.exoplayer2.ext.flac; +import static java.lang.Math.max; + import com.google.android.exoplayer2.extractor.BinarySearchSeeker; import com.google.android.exoplayer2.extractor.ExtractorInput; +import com.google.android.exoplayer2.extractor.FlacStreamMetadata; import com.google.android.exoplayer2.extractor.SeekMap; import com.google.android.exoplayer2.util.Assertions; -import com.google.android.exoplayer2.util.FlacConstants; -import com.google.android.exoplayer2.util.FlacStreamMetadata; import java.io.IOException; import java.nio.ByteBuffer; @@ -48,6 +49,8 @@ public OutputFrameHolder(ByteBuffer outputByteBuffer) { } } + private static final int MIN_FRAME_HEADER_SIZE = 6; + private final FlacDecoderJni decoderJni; /** @@ -74,8 +77,7 @@ public FlacBinarySearchSeeker( /* floorBytePosition= */ firstFramePosition, /* ceilingBytePosition= */ inputLength, /* approxBytesPerFrame= */ streamMetadata.getApproxBytesPerFrame(), - /* minimumSearchRange= */ Math.max( - FlacConstants.MIN_FRAME_HEADER_SIZE, streamMetadata.minFrameSize)); + /* minimumSearchRange= */ max(MIN_FRAME_HEADER_SIZE, streamMetadata.minFrameSize)); this.decoderJni = Assertions.checkNotNull(decoderJni); } @@ -100,7 +102,7 @@ private FlacTimestampSeeker(FlacDecoderJni decoderJni, OutputFrameHolder outputF @Override public TimestampSearchResult searchForTimestamp(ExtractorInput input, long targetSampleIndex) - throws IOException, InterruptedException { + throws IOException { ByteBuffer outputBuffer = outputFrameHolder.byteBuffer; long searchPosition = input.getPosition(); decoderJni.reset(searchPosition); diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/flac/FlacDecoder.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/flac/FlacDecoder.java index 013b23ef21..f2ac5f40e8 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/flac/FlacDecoder.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/flac/FlacDecoder.java @@ -15,23 +15,25 @@ */ package com.google.android.exoplayer2.ext.flac; +import static androidx.annotation.VisibleForTesting.PACKAGE_PRIVATE; + import androidx.annotation.Nullable; +import androidx.annotation.VisibleForTesting; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.ParserException; import com.google.android.exoplayer2.decoder.DecoderInputBuffer; import com.google.android.exoplayer2.decoder.SimpleDecoder; -import com.google.android.exoplayer2.decoder.SimpleOutputBuffer; -import com.google.android.exoplayer2.util.FlacStreamMetadata; +import com.google.android.exoplayer2.decoder.SimpleDecoderOutputBuffer; +import com.google.android.exoplayer2.extractor.FlacStreamMetadata; import com.google.android.exoplayer2.util.Util; import java.io.IOException; import java.nio.ByteBuffer; import java.util.List; -/** - * Flac decoder. - */ -/* package */ final class FlacDecoder extends - SimpleDecoder { +/** Flac decoder. */ +@VisibleForTesting(otherwise = PACKAGE_PRIVATE) +public final class FlacDecoder + extends SimpleDecoder { private final FlacStreamMetadata streamMetadata; private final FlacDecoderJni decoderJni; @@ -53,7 +55,7 @@ public FlacDecoder( int maxInputBufferSize, List initializationData) throws FlacDecoderException { - super(new DecoderInputBuffer[numInputBuffers], new SimpleOutputBuffer[numOutputBuffers]); + super(new DecoderInputBuffer[numInputBuffers], new SimpleDecoderOutputBuffer[numOutputBuffers]); if (initializationData.size() != 1) { throw new FlacDecoderException("Initialization data must be of length 1"); } @@ -63,7 +65,7 @@ public FlacDecoder( streamMetadata = decoderJni.decodeStreamMetadata(); } catch (ParserException e) { throw new FlacDecoderException("Failed to decode StreamInfo", e); - } catch (IOException | InterruptedException e) { + } catch (IOException e) { // Never happens. throw new IllegalStateException(e); } @@ -84,8 +86,8 @@ protected DecoderInputBuffer createInputBuffer() { } @Override - protected SimpleOutputBuffer createOutputBuffer() { - return new SimpleOutputBuffer(this); + protected SimpleDecoderOutputBuffer createOutputBuffer() { + return new SimpleDecoderOutputBuffer(this::releaseOutputBuffer); } @Override @@ -96,7 +98,7 @@ protected FlacDecoderException createUnexpectedDecodeException(Throwable error) @Override @Nullable protected FlacDecoderException decode( - DecoderInputBuffer inputBuffer, SimpleOutputBuffer outputBuffer, boolean reset) { + DecoderInputBuffer inputBuffer, SimpleDecoderOutputBuffer outputBuffer, boolean reset) { if (reset) { decoderJni.flush(); } @@ -107,7 +109,7 @@ protected FlacDecoderException decode( decoderJni.decodeSample(outputData); } catch (FlacDecoderJni.FlacFrameDecodeException e) { return new FlacDecoderException("Frame decoding failed", e); - } catch (IOException | InterruptedException e) { + } catch (IOException e) { // Never happens. throw new IllegalStateException(e); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/flac/FlacDecoderException.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/flac/FlacDecoderException.java index 95d7f87c05..2c2f56e06b 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/flac/FlacDecoderException.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/flac/FlacDecoderException.java @@ -15,12 +15,10 @@ */ package com.google.android.exoplayer2.ext.flac; -import com.google.android.exoplayer2.audio.AudioDecoderException; +import com.google.android.exoplayer2.decoder.DecoderException; -/** - * Thrown when an Flac decoder error occurs. - */ -public final class FlacDecoderException extends AudioDecoderException { +/** Thrown when an Flac decoder error occurs. */ +public final class FlacDecoderException extends DecoderException { /* package */ FlacDecoderException(String message) { super(message); diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/flac/FlacDecoderJni.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/flac/FlacDecoderJni.java index f3e8551e1e..c4e5e4e710 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/flac/FlacDecoderJni.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/flac/FlacDecoderJni.java @@ -15,20 +15,20 @@ */ package com.google.android.exoplayer2.ext.flac; +import static java.lang.Math.min; + import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.ParserException; import com.google.android.exoplayer2.extractor.ExtractorInput; +import com.google.android.exoplayer2.extractor.FlacStreamMetadata; import com.google.android.exoplayer2.extractor.SeekMap; import com.google.android.exoplayer2.extractor.SeekPoint; -import com.google.android.exoplayer2.util.FlacStreamMetadata; import com.google.android.exoplayer2.util.Util; import java.io.IOException; import java.nio.ByteBuffer; -/** - * JNI wrapper for the libflac Flac decoder. - */ +/** JNI wrapper for the libflac Flac decoder. */ /* package */ final class FlacDecoderJni { /** Exception to be thrown if {@link #decodeSample(ByteBuffer)} fails to decode a frame. */ @@ -51,13 +51,10 @@ public FlacFrameDecodeException(String message, int errorCode) { @Nullable private byte[] tempBuffer; private boolean endOfExtractorInput; - // the constructor does not initialize fields: tempBuffer - // call to flacInit() not allowed on the given receiver. - @SuppressWarnings({ - "nullness:initialization.fields.uninitialized", - "nullness:method.invocation.invalid" - }) public FlacDecoderJni() throws FlacDecoderException { + if (!FlacLibrary.isAvailable()) { + throw new FlacDecoderException("Failed to load decoder native libraries."); + } nativeDecoderContext = flacInit(); if (nativeDecoderContext == 0) { throw new FlacDecoderException("Failed to initialize decoder"); @@ -118,10 +115,10 @@ public void clearData() { * read from the source, then 0 is returned. */ @SuppressWarnings("unused") // Called from native code. - public int read(ByteBuffer target) throws IOException, InterruptedException { + public int read(ByteBuffer target) throws IOException { int byteCount = target.remaining(); if (byteBufferData != null) { - byteCount = Math.min(byteCount, byteBufferData.remaining()); + byteCount = min(byteCount, byteBufferData.remaining()); int originalLimit = byteBufferData.limit(); byteBufferData.limit(byteBufferData.position() + byteCount); target.put(byteBufferData); @@ -129,7 +126,7 @@ public int read(ByteBuffer target) throws IOException, InterruptedException { } else if (extractorInput != null) { ExtractorInput extractorInput = this.extractorInput; byte[] tempBuffer = Util.castNonNull(this.tempBuffer); - byteCount = Math.min(byteCount, TEMP_BUFFER_SIZE); + byteCount = min(byteCount, TEMP_BUFFER_SIZE); int read = readFromExtractorInput(extractorInput, tempBuffer, /* offset= */ 0, byteCount); if (read < 4) { // Reading less than 4 bytes, most of the time, happens because of getting the bytes left in @@ -148,10 +145,11 @@ public int read(ByteBuffer target) throws IOException, InterruptedException { } /** Decodes and consumes the metadata from the FLAC stream. */ - public FlacStreamMetadata decodeStreamMetadata() throws IOException, InterruptedException { + public FlacStreamMetadata decodeStreamMetadata() throws IOException { FlacStreamMetadata streamMetadata = flacDecodeMetadata(nativeDecoderContext); if (streamMetadata == null) { - throw new ParserException("Failed to decode stream metadata"); + throw ParserException.createForMalformedContainer( + "Failed to decode stream metadata", /* cause= */ null); } return streamMetadata; } @@ -164,7 +162,7 @@ public FlacStreamMetadata decodeStreamMetadata() throws IOException, Interrupted * @param retryPosition If any error happens, the input will be rewound to {@code retryPosition}. */ public void decodeSampleWithBacktrackPosition(ByteBuffer output, long retryPosition) - throws InterruptedException, IOException, FlacFrameDecodeException { + throws IOException, FlacFrameDecodeException { try { decodeSample(output); } catch (IOException e) { @@ -180,8 +178,7 @@ public void decodeSampleWithBacktrackPosition(ByteBuffer output, long retryPosit /** Decodes and consumes the next sample from the FLAC stream into the given byte buffer. */ @SuppressWarnings("ByteBufferBackingArray") - public void decodeSample(ByteBuffer output) - throws IOException, InterruptedException, FlacFrameDecodeException { + public void decodeSample(ByteBuffer output) throws IOException, FlacFrameDecodeException { output.clear(); int frameSize = output.isDirect() @@ -198,9 +195,7 @@ public void decodeSample(ByteBuffer output) } } - /** - * Returns the position of the next data to be decoded, or -1 in case of error. - */ + /** Returns the position of the next data to be decoded, or -1 in case of error. */ public long getDecodePosition() { return flacGetDecodePosition(nativeDecoderContext); } @@ -269,8 +264,7 @@ public void release() { } private int readFromExtractorInput( - ExtractorInput extractorInput, byte[] tempBuffer, int offset, int length) - throws IOException, InterruptedException { + ExtractorInput extractorInput, byte[] tempBuffer, int offset, int length) throws IOException { int read = extractorInput.read(tempBuffer, offset, length); if (read == C.RESULT_END_OF_INPUT) { endOfExtractorInput = true; @@ -281,14 +275,11 @@ private int readFromExtractorInput( private native long flacInit(); - private native FlacStreamMetadata flacDecodeMetadata(long context) - throws IOException, InterruptedException; + private native FlacStreamMetadata flacDecodeMetadata(long context) throws IOException; - private native int flacDecodeToBuffer(long context, ByteBuffer outputBuffer) - throws IOException, InterruptedException; + private native int flacDecodeToBuffer(long context, ByteBuffer outputBuffer) throws IOException; - private native int flacDecodeToArray(long context, byte[] outputArray) - throws IOException, InterruptedException; + private native int flacDecodeToArray(long context, byte[] outputArray) throws IOException; private native long flacGetDecodePosition(long context); @@ -309,5 +300,4 @@ private native int flacDecodeToArray(long context, byte[] outputArray) private native void flacReset(long context, long newPosition); private native void flacRelease(long context); - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/flac/FlacExtractor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/flac/FlacExtractor.java index 2c6f51da02..0c51811e8e 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/flac/FlacExtractor.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/flac/FlacExtractor.java @@ -16,6 +16,7 @@ package com.google.android.exoplayer2.ext.flac; import static com.google.android.exoplayer2.util.Util.getPcmEncoding; +import static java.lang.annotation.ElementType.TYPE_USE; import androidx.annotation.IntDef; import androidx.annotation.Nullable; @@ -27,38 +28,42 @@ import com.google.android.exoplayer2.extractor.ExtractorOutput; import com.google.android.exoplayer2.extractor.ExtractorsFactory; import com.google.android.exoplayer2.extractor.FlacMetadataReader; +import com.google.android.exoplayer2.extractor.FlacStreamMetadata; import com.google.android.exoplayer2.extractor.PositionHolder; import com.google.android.exoplayer2.extractor.SeekMap; import com.google.android.exoplayer2.extractor.SeekPoint; import com.google.android.exoplayer2.extractor.TrackOutput; import com.google.android.exoplayer2.metadata.Metadata; import com.google.android.exoplayer2.util.Assertions; -import com.google.android.exoplayer2.util.FlacStreamMetadata; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.ParsableByteArray; import java.io.IOException; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; import java.nio.ByteBuffer; import org.checkerframework.checker.nullness.qual.EnsuresNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.RequiresNonNull; -/** - * Facilitates the extraction of data from the FLAC container format. - */ +/** Facilitates the extraction of data from the FLAC container format. */ public final class FlacExtractor implements Extractor { /** Factory that returns one extractor which is a {@link FlacExtractor}. */ public static final ExtractorsFactory FACTORY = () -> new Extractor[] {new FlacExtractor()}; + /* + * Flags in the two FLAC extractors should be kept in sync. If we ever change this then + * DefaultExtractorsFactory will need modifying, because it currently assumes this is the case. + */ /** * Flags controlling the behavior of the extractor. Possible flag value is {@link * #FLAG_DISABLE_ID3_METADATA}. */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef( flag = true, value = {FLAG_DISABLE_ID3_METADATA}) @@ -68,7 +73,8 @@ public final class FlacExtractor implements Extractor { * Flag to disable parsing of ID3 metadata. Can be set to save memory if ID3 metadata is not * required. */ - public static final int FLAG_DISABLE_ID3_METADATA = 1; + public static final int FLAG_DISABLE_ID3_METADATA = + com.google.android.exoplayer2.extractor.flac.FlacExtractor.FLAG_DISABLE_ID3_METADATA; private final ParsableByteArray outputBuffer; private final boolean id3MetadataDisabled; @@ -113,14 +119,13 @@ public void init(ExtractorOutput output) { } @Override - public boolean sniff(ExtractorInput input) throws IOException, InterruptedException { + public boolean sniff(ExtractorInput input) throws IOException { id3Metadata = FlacMetadataReader.peekId3Metadata(input, /* parseData= */ !id3MetadataDisabled); return FlacMetadataReader.checkAndPeekStreamMarker(input); } @Override - public int read(final ExtractorInput input, PositionHolder seekPosition) - throws IOException, InterruptedException { + public int read(final ExtractorInput input, PositionHolder seekPosition) throws IOException { if (input.getPosition() == 0 && !id3MetadataDisabled && id3Metadata == null) { id3Metadata = FlacMetadataReader.peekId3Metadata(input, /* parseData= */ true); } @@ -175,7 +180,7 @@ public void release() { } @EnsuresNonNull({"decoderJni", "extractorOutput", "trackOutput"}) // Ensures initialized. - @SuppressWarnings({"contracts.postcondition.not.satisfied"}) + @SuppressWarnings("nullness:contracts.postcondition") private FlacDecoderJni initDecoderJni(ExtractorInput input) { FlacDecoderJni decoderJni = Assertions.checkNotNull(this.decoderJni); decoderJni.setData(input); @@ -184,8 +189,8 @@ private FlacDecoderJni initDecoderJni(ExtractorInput input) { @RequiresNonNull({"decoderJni", "extractorOutput", "trackOutput"}) // Requires initialized. @EnsuresNonNull({"streamMetadata", "outputFrameHolder"}) // Ensures stream metadata decoded. - @SuppressWarnings({"contracts.postcondition.not.satisfied"}) - private void decodeStreamMetadata(ExtractorInput input) throws InterruptedException, IOException { + @SuppressWarnings("nullness:contracts.postcondition") + private void decodeStreamMetadata(ExtractorInput input) throws IOException { if (streamMetadataDecoded) { return; } @@ -204,7 +209,7 @@ private void decodeStreamMetadata(ExtractorInput input) throws InterruptedExcept if (this.streamMetadata == null) { this.streamMetadata = streamMetadata; outputBuffer.reset(streamMetadata.getMaxDecodedFrameSize()); - outputFrameHolder = new OutputFrameHolder(ByteBuffer.wrap(outputBuffer.data)); + outputFrameHolder = new OutputFrameHolder(ByteBuffer.wrap(outputBuffer.getData())); binarySearchSeeker = outputSeekMap( flacDecoderJni, @@ -225,7 +230,7 @@ private int handlePendingSeek( ParsableByteArray outputBuffer, OutputFrameHolder outputFrameHolder, TrackOutput trackOutput) - throws InterruptedException, IOException { + throws IOException { int seekResult = binarySearchSeeker.handlePendingSeek(input, seekPosition); ByteBuffer outputByteBuffer = outputFrameHolder.byteBuffer; if (seekResult == RESULT_CONTINUE && outputByteBuffer.limit() > 0) { @@ -250,7 +255,7 @@ private static FlacBinarySearchSeeker outputSeekMap( SeekMap seekMap; if (haveSeekTable) { seekMap = new FlacSeekMap(streamMetadata.getDurationUs(), decoderJni); - } else if (streamLength != C.LENGTH_UNSET) { + } else if (streamLength != C.LENGTH_UNSET && streamMetadata.totalSamples > 0) { long firstFramePosition = decoderJni.getDecodePosition(); binarySearchSeeker = new FlacBinarySearchSeeker( @@ -266,22 +271,16 @@ private static FlacBinarySearchSeeker outputSeekMap( private static void outputFormat( FlacStreamMetadata streamMetadata, @Nullable Metadata metadata, TrackOutput output) { Format mediaFormat = - Format.createAudioSampleFormat( - /* id= */ null, - MimeTypes.AUDIO_RAW, - /* codecs= */ null, - streamMetadata.getBitRate(), - streamMetadata.getMaxDecodedFrameSize(), - streamMetadata.channels, - streamMetadata.sampleRate, - getPcmEncoding(streamMetadata.bitsPerSample), - /* encoderDelay= */ 0, - /* encoderPadding= */ 0, - /* initializationData= */ null, - /* drmInitData= */ null, - /* selectionFlags= */ 0, - /* language= */ null, - metadata); + new Format.Builder() + .setSampleMimeType(MimeTypes.AUDIO_RAW) + .setAverageBitrate(streamMetadata.getDecodedBitrate()) + .setPeakBitrate(streamMetadata.getDecodedBitrate()) + .setMaxInputSize(streamMetadata.getMaxDecodedFrameSize()) + .setChannelCount(streamMetadata.channels) + .setSampleRate(streamMetadata.sampleRate) + .setPcmEncoding(getPcmEncoding(streamMetadata.bitsPerSample)) + .setMetadata(metadata) + .build(); output.format(mediaFormat); } @@ -290,7 +289,7 @@ private static void outputSample( sampleData.setPosition(0); output.sampleData(sampleData, size); output.sampleMetadata( - timeUs, C.BUFFER_FLAG_KEY_FRAME, size, /* offset= */ 0, /* encryptionData= */ null); + timeUs, C.BUFFER_FLAG_KEY_FRAME, size, /* offset= */ 0, /* cryptoData= */ null); } /** A {@link SeekMap} implementation using a SeekTable within the Flac stream. */ diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/flac/FlacLibrary.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/flac/FlacLibrary.java index 3b6a14f61a..9dd97059e0 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/flac/FlacLibrary.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/flac/FlacLibrary.java @@ -16,16 +16,38 @@ package com.google.android.exoplayer2.ext.flac; import com.google.android.exoplayer2.ExoPlayerLibraryInfo; +import com.google.android.exoplayer2.util.LibraryLoader; -/** - * Configures and queries the underlying native library. - */ +/** Configures and queries the underlying native library. */ public final class FlacLibrary { static { ExoPlayerLibraryInfo.registerModule("goog.exo.flac"); } + private static final LibraryLoader LOADER = + new LibraryLoader("flacJNI") { + @Override + protected void loadLibrary(String name) { + System.loadLibrary(name); + } + }; + private FlacLibrary() {} + /** + * Override the names of the Flac native libraries. If an application wishes to call this method, + * it must do so before calling any other method defined by this class, and before instantiating + * any {@link LibflacAudioRenderer} and {@link FlacExtractor} instances. + * + * @param libraries The names of the Flac native libraries. + */ + public static void setLibraries(String... libraries) { + LOADER.setLibraries(libraries); + } + + /** Returns whether the underlying library is available, loading it if necessary. */ + public static boolean isAvailable() { + return LOADER.isAvailable(); + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/flac/LibflacAudioRenderer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/flac/LibflacAudioRenderer.java index 671eceea47..f35248fb61 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/flac/LibflacAudioRenderer.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/flac/LibflacAudioRenderer.java @@ -22,28 +22,28 @@ import com.google.android.exoplayer2.audio.AudioProcessor; import com.google.android.exoplayer2.audio.AudioRendererEventListener; import com.google.android.exoplayer2.audio.AudioSink; -import com.google.android.exoplayer2.audio.SimpleDecoderAudioRenderer; -import com.google.android.exoplayer2.drm.DrmSessionManager; -import com.google.android.exoplayer2.drm.ExoMediaCrypto; -import com.google.android.exoplayer2.util.Assertions; -import com.google.android.exoplayer2.util.FlacConstants; -import com.google.android.exoplayer2.util.FlacStreamMetadata; +import com.google.android.exoplayer2.audio.DecoderAudioRenderer; +import com.google.android.exoplayer2.decoder.CryptoConfig; +import com.google.android.exoplayer2.extractor.FlacStreamMetadata; import com.google.android.exoplayer2.util.MimeTypes; +import com.google.android.exoplayer2.util.TraceUtil; import com.google.android.exoplayer2.util.Util; -import org.checkerframework.checker.nullness.qual.MonotonicNonNull; /** Decodes and renders audio using the native Flac decoder. */ -public final class LibflacAudioRenderer extends SimpleDecoderAudioRenderer { +public final class LibflacAudioRenderer extends DecoderAudioRenderer { + private static final String TAG = "LibflacAudioRenderer"; private static final int NUM_BUFFERS = 16; - - @MonotonicNonNull private FlacStreamMetadata streamMetadata; + private static final int STREAM_MARKER_SIZE = 4; + private static final int METADATA_BLOCK_HEADER_SIZE = 4; public LibflacAudioRenderer() { this(/* eventHandler= */ null, /* eventListener= */ null); } /** + * Creates an instance. + * * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be * null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required. @@ -57,6 +57,8 @@ public LibflacAudioRenderer( } /** + * Creates an instance. + * * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be * null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required. @@ -66,69 +68,65 @@ public LibflacAudioRenderer( @Nullable Handler eventHandler, @Nullable AudioRendererEventListener eventListener, AudioSink audioSink) { - super( - eventHandler, - eventListener, - /* drmSessionManager= */ null, - /* playClearSamplesWithoutKeys= */ false, - audioSink); + super(eventHandler, eventListener, audioSink); + } + + @Override + public String getName() { + return TAG; } @Override - @FormatSupport - protected int supportsFormatInternal( - @Nullable DrmSessionManager drmSessionManager, Format format) { - if (!MimeTypes.AUDIO_FLAC.equalsIgnoreCase(format.sampleMimeType)) { - return FORMAT_UNSUPPORTED_TYPE; + protected @C.FormatSupport int supportsFormatInternal(Format format) { + if (!FlacLibrary.isAvailable() + || !MimeTypes.AUDIO_FLAC.equalsIgnoreCase(format.sampleMimeType)) { + return C.FORMAT_UNSUPPORTED_TYPE; } - // Compute the PCM encoding that the FLAC decoder will output. - @C.PcmEncoding int pcmEncoding; + // Compute the format that the FLAC decoder will output. + Format outputFormat; if (format.initializationData.isEmpty()) { // The initialization data might not be set if the format was obtained from a manifest (e.g. // for DASH playbacks) rather than directly from the media. In this case we assume // ENCODING_PCM_16BIT. If the actual encoding is different then playback will still succeed as // long as the AudioSink supports it, which will always be true when using DefaultAudioSink. - pcmEncoding = C.ENCODING_PCM_16BIT; + outputFormat = + Util.getPcmFormat(C.ENCODING_PCM_16BIT, format.channelCount, format.sampleRate); } else { - int streamMetadataOffset = - FlacConstants.STREAM_MARKER_SIZE + FlacConstants.METADATA_BLOCK_HEADER_SIZE; + int streamMetadataOffset = STREAM_MARKER_SIZE + METADATA_BLOCK_HEADER_SIZE; FlacStreamMetadata streamMetadata = new FlacStreamMetadata(format.initializationData.get(0), streamMetadataOffset); - pcmEncoding = Util.getPcmEncoding(streamMetadata.bitsPerSample); + outputFormat = getOutputFormat(streamMetadata); } - if (!supportsOutput(format.channelCount, pcmEncoding)) { - return FORMAT_UNSUPPORTED_SUBTYPE; - } else if (!supportsFormatDrm(drmSessionManager, format.drmInitData)) { - return FORMAT_UNSUPPORTED_DRM; + if (!sinkSupportsFormat(outputFormat)) { + return C.FORMAT_UNSUPPORTED_SUBTYPE; + } else if (format.cryptoType != C.CRYPTO_TYPE_NONE) { + return C.FORMAT_UNSUPPORTED_DRM; } else { - return FORMAT_HANDLED; + return C.FORMAT_HANDLED; } } + /** {@inheritDoc} */ @Override - protected FlacDecoder createDecoder(Format format, @Nullable ExoMediaCrypto mediaCrypto) + protected FlacDecoder createDecoder(Format format, @Nullable CryptoConfig cryptoConfig) throws FlacDecoderException { + TraceUtil.beginSection("createFlacDecoder"); FlacDecoder decoder = new FlacDecoder(NUM_BUFFERS, NUM_BUFFERS, format.maxInputSize, format.initializationData); - streamMetadata = decoder.getStreamMetadata(); + TraceUtil.endSection(); return decoder; } + /** {@inheritDoc} */ @Override - protected Format getOutputFormat() { - Assertions.checkNotNull(streamMetadata); - return Format.createAudioSampleFormat( - /* id= */ null, - MimeTypes.AUDIO_RAW, - /* codecs= */ null, - /* bitrate= */ Format.NO_VALUE, - /* maxInputSize= */ Format.NO_VALUE, - streamMetadata.channels, - streamMetadata.sampleRate, + protected Format getOutputFormat(FlacDecoder decoder) { + return getOutputFormat(decoder.getStreamMetadata()); + } + + private static Format getOutputFormat(FlacStreamMetadata streamMetadata) { + return Util.getPcmFormat( Util.getPcmEncoding(streamMetadata.bitsPerSample), - /* initializationData= */ null, - /* drmInitData= */ null, - /* selectionFlags= */ 0, - /* language= */ null); + streamMetadata.channels, + streamMetadata.sampleRate); } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/opus/LibopusAudioRenderer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/opus/LibopusAudioRenderer.java index 5f637c9a0f..052617d95d 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/opus/LibopusAudioRenderer.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/opus/LibopusAudioRenderer.java @@ -21,28 +21,30 @@ import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.audio.AudioProcessor; import com.google.android.exoplayer2.audio.AudioRendererEventListener; -import com.google.android.exoplayer2.audio.SimpleDecoderAudioRenderer; -import com.google.android.exoplayer2.drm.DrmSessionManager; -import com.google.android.exoplayer2.drm.ExoMediaCrypto; -import com.google.android.exoplayer2.source.MediaSource; +import com.google.android.exoplayer2.audio.AudioSink; +import com.google.android.exoplayer2.audio.AudioSink.SinkFormatSupport; +import com.google.android.exoplayer2.audio.DecoderAudioRenderer; +import com.google.android.exoplayer2.decoder.CryptoConfig; import com.google.android.exoplayer2.util.MimeTypes; +import com.google.android.exoplayer2.util.TraceUtil; +import com.google.android.exoplayer2.util.Util; /** Decodes and renders audio using the native Opus decoder. */ -public class LibopusAudioRenderer extends SimpleDecoderAudioRenderer { +public class LibopusAudioRenderer extends DecoderAudioRenderer { + private static final String TAG = "LibopusAudioRenderer"; /** The number of input and output buffers. */ private static final int NUM_BUFFERS = 16; /** The default input buffer size. */ private static final int DEFAULT_INPUT_BUFFER_SIZE = 960 * 6; - private int channelCount; - private int sampleRate; - public LibopusAudioRenderer() { this(/* eventHandler= */ null, /* eventListener= */ null); } /** + * Creates a new instance. + * * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be * null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required. @@ -56,55 +58,52 @@ public LibopusAudioRenderer( } /** + * Creates a new instance. + * * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be * null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required. - * @param drmSessionManager For use with encrypted media. May be null if support for encrypted - * media is not required. - * @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions. - * For example a media file may start with a short clear region so as to allow playback to - * begin in parallel with key acquisition. This parameter specifies whether the renderer is - * permitted to play clear regions of encrypted media files before {@code drmSessionManager} - * has obtained the keys necessary to decrypt encrypted regions of the media. - * @param audioProcessors Optional {@link AudioProcessor}s that will process audio before output. - * @deprecated Use {@link #LibopusAudioRenderer(Handler, AudioRendererEventListener, - * AudioProcessor...)} instead, and pass DRM-related parameters to the {@link MediaSource} - * factories. + * @param audioSink The sink to which audio will be output. */ - @Deprecated public LibopusAudioRenderer( @Nullable Handler eventHandler, @Nullable AudioRendererEventListener eventListener, - @Nullable DrmSessionManager drmSessionManager, - boolean playClearSamplesWithoutKeys, - AudioProcessor... audioProcessors) { - super(eventHandler, eventListener, null, drmSessionManager, playClearSamplesWithoutKeys, - audioProcessors); + AudioSink audioSink) { + super(eventHandler, eventListener, audioSink); + } + + @Override + public String getName() { + return TAG; } @Override - @FormatSupport - protected int supportsFormatInternal( - @Nullable DrmSessionManager drmSessionManager, Format format) { - boolean drmIsSupported = - format.drmInitData == null - || OpusLibrary.matchesExpectedExoMediaCryptoType(format.exoMediaCryptoType) - || (format.exoMediaCryptoType == null - && supportsFormatDrm(drmSessionManager, format.drmInitData)); - if (!MimeTypes.AUDIO_OPUS.equalsIgnoreCase(format.sampleMimeType)) { - return FORMAT_UNSUPPORTED_TYPE; - } else if (!supportsOutput(format.channelCount, C.ENCODING_PCM_16BIT)) { - return FORMAT_UNSUPPORTED_SUBTYPE; + protected @C.FormatSupport int supportsFormatInternal(Format format) { + boolean drmIsSupported = OpusLibrary.supportsCryptoType(format.cryptoType); + if (!OpusLibrary.isAvailable() + || !MimeTypes.AUDIO_OPUS.equalsIgnoreCase(format.sampleMimeType)) { + return C.FORMAT_UNSUPPORTED_TYPE; + } else if (!sinkSupportsFormat( + Util.getPcmFormat(C.ENCODING_PCM_16BIT, format.channelCount, format.sampleRate))) { + return C.FORMAT_UNSUPPORTED_SUBTYPE; } else if (!drmIsSupported) { - return FORMAT_UNSUPPORTED_DRM; + return C.FORMAT_UNSUPPORTED_DRM; } else { - return FORMAT_HANDLED; + return C.FORMAT_HANDLED; } } + /** {@inheritDoc} */ @Override - protected OpusDecoder createDecoder(Format format, @Nullable ExoMediaCrypto mediaCrypto) + protected final OpusDecoder createDecoder(Format format, @Nullable CryptoConfig cryptoConfig) throws OpusDecoderException { + TraceUtil.beginSection("createOpusDecoder"); + @SinkFormatSupport + int formatSupport = + getSinkFormatSupport( + Util.getPcmFormat(C.ENCODING_PCM_FLOAT, format.channelCount, format.sampleRate)); + boolean outputFloat = formatSupport == AudioSink.SINK_FORMAT_SUPPORTED_DIRECTLY; + int initialInputBufferSize = format.maxInputSize != Format.NO_VALUE ? format.maxInputSize : DEFAULT_INPUT_BUFFER_SIZE; OpusDecoder decoder = @@ -113,26 +112,29 @@ protected OpusDecoder createDecoder(Format format, @Nullable ExoMediaCrypto medi NUM_BUFFERS, initialInputBufferSize, format.initializationData, - mediaCrypto); - channelCount = decoder.getChannelCount(); - sampleRate = decoder.getSampleRate(); + cryptoConfig, + outputFloat); + decoder.experimentalSetDiscardPaddingEnabled(experimentalGetDiscardPaddingEnabled()); + + TraceUtil.endSection(); return decoder; } + /** {@inheritDoc} */ @Override - protected Format getOutputFormat() { - return Format.createAudioSampleFormat( - /* id= */ null, - MimeTypes.AUDIO_RAW, - /* codecs= */ null, - Format.NO_VALUE, - Format.NO_VALUE, - channelCount, - sampleRate, - C.ENCODING_PCM_16BIT, - /* initializationData= */ null, - /* drmInitData= */ null, - /* selectionFlags= */ 0, - /* language= */ null); + protected final Format getOutputFormat(OpusDecoder decoder) { + @C.PcmEncoding + int pcmEncoding = decoder.outputFloat ? C.ENCODING_PCM_FLOAT : C.ENCODING_PCM_16BIT; + return Util.getPcmFormat(pcmEncoding, decoder.channelCount, OpusDecoder.SAMPLE_RATE); + } + + /** + * Returns true if support for padding removal from the end of decoder output buffer should be + * enabled. + * + *

      This method is experimental, and will be renamed or removed in a future release. + */ + protected boolean experimentalGetDiscardPaddingEnabled() { + return false; } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/opus/OpusDecoder.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/opus/OpusDecoder.java index 26f451ee2d..febeea6b9d 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/opus/OpusDecoder.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/opus/OpusDecoder.java @@ -15,42 +15,46 @@ */ package com.google.android.exoplayer2.ext.opus; +import static androidx.annotation.VisibleForTesting.PACKAGE_PRIVATE; + import androidx.annotation.Nullable; +import androidx.annotation.VisibleForTesting; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.decoder.CryptoConfig; +import com.google.android.exoplayer2.decoder.CryptoException; import com.google.android.exoplayer2.decoder.CryptoInfo; import com.google.android.exoplayer2.decoder.DecoderInputBuffer; import com.google.android.exoplayer2.decoder.SimpleDecoder; -import com.google.android.exoplayer2.decoder.SimpleOutputBuffer; -import com.google.android.exoplayer2.drm.DecryptionException; -import com.google.android.exoplayer2.drm.ExoMediaCrypto; +import com.google.android.exoplayer2.decoder.SimpleDecoderOutputBuffer; +import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Util; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.List; -/** - * Opus decoder. - */ -/* package */ final class OpusDecoder extends - SimpleDecoder { +/** Opus decoder. */ +@VisibleForTesting(otherwise = PACKAGE_PRIVATE) +public final class OpusDecoder + extends SimpleDecoder { - private static final int DEFAULT_SEEK_PRE_ROLL_SAMPLES = 3840; + /** Opus streams are always 48000 Hz. */ + /* package */ static final int SAMPLE_RATE = 48_000; - /** - * Opus streams are always decoded at 48000 Hz. - */ - private static final int SAMPLE_RATE = 48000; + private static final int DEFAULT_SEEK_PRE_ROLL_SAMPLES = 3840; + private static final int FULL_CODEC_INITIALIZATION_DATA_BUFFER_COUNT = 3; private static final int NO_ERROR = 0; private static final int DECODE_ERROR = -1; private static final int DRM_ERROR = -2; - @Nullable private final ExoMediaCrypto exoMediaCrypto; + public final boolean outputFloat; + public final int channelCount; - private final int channelCount; - private final int headerSkipSamples; - private final int headerSeekPreRollSamples; + @Nullable private final CryptoConfig cryptoConfig; + private final int preSkipSamples; + private final int seekPreRollSamples; private final long nativeDecoderContext; + private boolean experimentalDiscardPaddingEnabled; private int skipSamples; @@ -63,8 +67,9 @@ * @param initializationData Codec-specific initialization data. The first element must contain an * opus header. Optionally, the list may contain two additional buffers, which must contain * the encoder delay and seek pre roll values in nanoseconds, encoded as longs. - * @param exoMediaCrypto The {@link ExoMediaCrypto} object required for decoding encrypted - * content. Maybe null and can be ignored if decoder does not handle encrypted content. + * @param cryptoConfig The {@link CryptoConfig} object required for decoding encrypted content. + * May be null and can be ignored if decoder does not handle encrypted content. + * @param outputFloat Forces the decoder to output float PCM samples when set * @throws OpusDecoderException Thrown if an exception occurs when initializing the decoder. */ public OpusDecoder( @@ -72,22 +77,37 @@ public OpusDecoder( int numOutputBuffers, int initialInputBufferSize, List initializationData, - @Nullable ExoMediaCrypto exoMediaCrypto) + @Nullable CryptoConfig cryptoConfig, + boolean outputFloat) throws OpusDecoderException { - super(new DecoderInputBuffer[numInputBuffers], new SimpleOutputBuffer[numOutputBuffers]); - this.exoMediaCrypto = exoMediaCrypto; - if (exoMediaCrypto != null && !OpusLibrary.opusIsSecureDecodeSupported()) { - throw new OpusDecoderException("Opus decoder does not support secure decode."); + super(new DecoderInputBuffer[numInputBuffers], new SimpleDecoderOutputBuffer[numOutputBuffers]); + if (!OpusLibrary.isAvailable()) { + throw new OpusDecoderException("Failed to load decoder native libraries"); + } + this.cryptoConfig = cryptoConfig; + if (cryptoConfig != null && !OpusLibrary.opusIsSecureDecodeSupported()) { + throw new OpusDecoderException("Opus decoder does not support secure decode"); } + int initializationDataSize = initializationData.size(); + if (initializationDataSize != 1 && initializationDataSize != 3) { + throw new OpusDecoderException("Invalid initialization data size"); + } + if (initializationDataSize == 3 + && (initializationData.get(1).length != 8 || initializationData.get(2).length != 8)) { + throw new OpusDecoderException("Invalid pre-skip or seek pre-roll"); + } + preSkipSamples = getPreSkipSamples(initializationData); + seekPreRollSamples = getSeekPreRollSamples(initializationData); + skipSamples = preSkipSamples; + byte[] headerBytes = initializationData.get(0); if (headerBytes.length < 19) { - throw new OpusDecoderException("Header size is too small."); + throw new OpusDecoderException("Invalid header length"); } - channelCount = headerBytes[9] & 0xFF; + channelCount = getChannelCount(headerBytes); if (channelCount > 8) { throw new OpusDecoderException("Invalid channel count: " + channelCount); } - int preskip = readUnsignedLittleEndian16(headerBytes, 10); int gain = readSignedLittleEndian16(headerBytes, 16); byte[] streamMap = new byte[8]; @@ -96,7 +116,7 @@ public OpusDecoder( if (headerBytes[18] == 0) { // Channel mapping // If there is no channel mapping, use the defaults. if (channelCount > 2) { // Maximum channel count with default layout. - throw new OpusDecoderException("Invalid Header, missing stream map."); + throw new OpusDecoderException("Invalid header, missing stream map"); } numStreams = 1; numCoupled = (channelCount == 2) ? 1 : 0; @@ -104,33 +124,34 @@ public OpusDecoder( streamMap[1] = 1; } else { if (headerBytes.length < 21 + channelCount) { - throw new OpusDecoderException("Header size is too small."); + throw new OpusDecoderException("Invalid header length"); } // Read the channel mapping. numStreams = headerBytes[19] & 0xFF; numCoupled = headerBytes[20] & 0xFF; System.arraycopy(headerBytes, 21, streamMap, 0, channelCount); } - if (initializationData.size() == 3) { - if (initializationData.get(1).length != 8 || initializationData.get(2).length != 8) { - throw new OpusDecoderException("Invalid Codec Delay or Seek Preroll"); - } - long codecDelayNs = - ByteBuffer.wrap(initializationData.get(1)).order(ByteOrder.nativeOrder()).getLong(); - long seekPreRollNs = - ByteBuffer.wrap(initializationData.get(2)).order(ByteOrder.nativeOrder()).getLong(); - headerSkipSamples = nsToSamples(codecDelayNs); - headerSeekPreRollSamples = nsToSamples(seekPreRollNs); - } else { - headerSkipSamples = preskip; - headerSeekPreRollSamples = DEFAULT_SEEK_PRE_ROLL_SAMPLES; - } - nativeDecoderContext = opusInit(SAMPLE_RATE, channelCount, numStreams, numCoupled, gain, - streamMap); + nativeDecoderContext = + opusInit(SAMPLE_RATE, channelCount, numStreams, numCoupled, gain, streamMap); if (nativeDecoderContext == 0) { throw new OpusDecoderException("Failed to initialize decoder"); } setInitialInputBufferSize(initialInputBufferSize); + + this.outputFloat = outputFloat; + if (outputFloat) { + opusSetFloatOutput(); + } + } + + /** + * Sets whether discard padding is enabled. When enabled, discard padding samples (provided as + * supplemental data on the input buffer) will be removed from the end of the decoder output. + * + *

      This method is experimental, and will be renamed or removed in a future release. + */ + public void experimentalSetDiscardPaddingEnabled(boolean enabled) { + this.experimentalDiscardPaddingEnabled = enabled; } @Override @@ -144,8 +165,8 @@ protected DecoderInputBuffer createInputBuffer() { } @Override - protected SimpleOutputBuffer createOutputBuffer() { - return new SimpleOutputBuffer(this); + protected SimpleDecoderOutputBuffer createOutputBuffer() { + return new SimpleDecoderOutputBuffer(this::releaseOutputBuffer); } @Override @@ -156,27 +177,42 @@ protected OpusDecoderException createUnexpectedDecodeException(Throwable error) @Override @Nullable protected OpusDecoderException decode( - DecoderInputBuffer inputBuffer, SimpleOutputBuffer outputBuffer, boolean reset) { + DecoderInputBuffer inputBuffer, SimpleDecoderOutputBuffer outputBuffer, boolean reset) { if (reset) { opusReset(nativeDecoderContext); // When seeking to 0, skip number of samples as specified in opus header. When seeking to // any other time, skip number of samples as specified by seek preroll. - skipSamples = (inputBuffer.timeUs == 0) ? headerSkipSamples : headerSeekPreRollSamples; + skipSamples = (inputBuffer.timeUs == 0) ? preSkipSamples : seekPreRollSamples; } ByteBuffer inputData = Util.castNonNull(inputBuffer.data); CryptoInfo cryptoInfo = inputBuffer.cryptoInfo; - int result = inputBuffer.isEncrypted() - ? opusSecureDecode(nativeDecoderContext, inputBuffer.timeUs, inputData, inputData.limit(), - outputBuffer, SAMPLE_RATE, exoMediaCrypto, cryptoInfo.mode, - cryptoInfo.key, cryptoInfo.iv, cryptoInfo.numSubSamples, - cryptoInfo.numBytesOfClearData, cryptoInfo.numBytesOfEncryptedData) - : opusDecode(nativeDecoderContext, inputBuffer.timeUs, inputData, inputData.limit(), - outputBuffer); + int result = + inputBuffer.isEncrypted() + ? opusSecureDecode( + nativeDecoderContext, + inputBuffer.timeUs, + inputData, + inputData.limit(), + outputBuffer, + SAMPLE_RATE, + cryptoConfig, + cryptoInfo.mode, + Assertions.checkNotNull(cryptoInfo.key), + Assertions.checkNotNull(cryptoInfo.iv), + cryptoInfo.numSubSamples, + cryptoInfo.numBytesOfClearData, + cryptoInfo.numBytesOfEncryptedData) + : opusDecode( + nativeDecoderContext, + inputBuffer.timeUs, + inputData, + inputData.limit(), + outputBuffer); if (result < 0) { if (result == DRM_ERROR) { String message = "Drm error: " + opusGetErrorMessage(nativeDecoderContext); - DecryptionException cause = new DecryptionException( - opusGetErrorCode(nativeDecoderContext), message); + CryptoException cause = + new CryptoException(opusGetErrorCode(nativeDecoderContext), message); return new OpusDecoderException(message, cause); } else { return new OpusDecoderException("Decode error: " + opusGetErrorMessage(result)); @@ -187,7 +223,7 @@ protected OpusDecoderException decode( outputData.position(0); outputData.limit(result); if (skipSamples > 0) { - int bytesPerSample = channelCount * 2; + int bytesPerSample = samplesToBytes(1, channelCount, outputFloat); int skipBytes = skipSamples * bytesPerSample; if (result <= skipBytes) { skipSamples -= result / bytesPerSample; @@ -197,6 +233,14 @@ protected OpusDecoderException decode( skipSamples = 0; outputData.position(skipBytes); } + } else if (experimentalDiscardPaddingEnabled && inputBuffer.hasSupplementalData()) { + int discardPaddingSamples = getDiscardPaddingSamples(inputBuffer.supplementalData); + if (discardPaddingSamples > 0) { + int discardBytes = samplesToBytes(discardPaddingSamples, channelCount, outputFloat); + if (result >= discardBytes) { + outputData.limit(result - discardBytes); + } + } } return null; } @@ -208,56 +252,115 @@ public void release() { } /** - * Returns the channel count of output audio. + * Parses the channel count from an Opus Identification Header. + * + * @param header An Opus Identification Header, as defined by RFC 7845. + * @return The parsed channel count. */ - public int getChannelCount() { - return channelCount; + @VisibleForTesting + /* package */ static int getChannelCount(byte[] header) { + return header[9] & 0xFF; } /** - * Returns the sample rate of output audio. + * Returns the number of pre-skip samples specified by the given Opus codec initialization data. + * + * @param initializationData The codec initialization data. + * @return The number of pre-skip samples. */ - public int getSampleRate() { - return SAMPLE_RATE; + @VisibleForTesting + /* package */ static int getPreSkipSamples(List initializationData) { + if (initializationData.size() == FULL_CODEC_INITIALIZATION_DATA_BUFFER_COUNT) { + long codecDelayNs = + ByteBuffer.wrap(initializationData.get(1)).order(ByteOrder.nativeOrder()).getLong(); + return (int) ((codecDelayNs * SAMPLE_RATE) / C.NANOS_PER_SECOND); + } + // Fall back to parsing directly from the Opus Identification header. + byte[] headerData = initializationData.get(0); + return ((headerData[11] & 0xFF) << 8) | (headerData[10] & 0xFF); } - private static int nsToSamples(long ns) { - return (int) (ns * SAMPLE_RATE / 1000000000); + /** + * Returns the number of seek per-roll samples specified by the given Opus codec initialization + * data. + * + * @param initializationData The codec initialization data. + * @return The number of seek pre-roll samples. + */ + @VisibleForTesting + /* package */ static int getSeekPreRollSamples(List initializationData) { + if (initializationData.size() == FULL_CODEC_INITIALIZATION_DATA_BUFFER_COUNT) { + long seekPreRollNs = + ByteBuffer.wrap(initializationData.get(2)).order(ByteOrder.nativeOrder()).getLong(); + return (int) ((seekPreRollNs * SAMPLE_RATE) / C.NANOS_PER_SECOND); + } + // Fall back to returning the default seek pre-roll. + return DEFAULT_SEEK_PRE_ROLL_SAMPLES; } - private static int readUnsignedLittleEndian16(byte[] input, int offset) { - int value = input[offset] & 0xFF; - value |= (input[offset + 1] & 0xFF) << 8; - return value; + /** + * Returns the number of discard padding samples specified by the supplemental data attached to an + * input buffer. + * + * @param supplementalData Supplemental data related to the an input buffer. + * @return The number of discard padding samples to remove from the decoder output. + */ + @VisibleForTesting + /* package */ static int getDiscardPaddingSamples(@Nullable ByteBuffer supplementalData) { + if (supplementalData == null || supplementalData.remaining() != 8) { + return 0; + } + long discardPaddingNs = supplementalData.order(ByteOrder.LITTLE_ENDIAN).getLong(); + if (discardPaddingNs < 0) { + return 0; + } + return (int) ((discardPaddingNs * SAMPLE_RATE) / C.NANOS_PER_SECOND); + } + + /** Returns number of bytes to represent {@code samples}. */ + private static int samplesToBytes(int samples, int channelCount, boolean outputFloat) { + int bytesPerChannel = outputFloat ? 4 : 2; + return samples * channelCount * bytesPerChannel; } private static int readSignedLittleEndian16(byte[] input, int offset) { - return (short) readUnsignedLittleEndian16(input, offset); + int value = input[offset] & 0xFF; + value |= (input[offset + 1] & 0xFF) << 8; + return (short) value; } - private native long opusInit(int sampleRate, int channelCount, int numStreams, int numCoupled, - int gain, byte[] streamMap); - private native int opusDecode(long decoder, long timeUs, ByteBuffer inputBuffer, int inputSize, - SimpleOutputBuffer outputBuffer); + private native long opusInit( + int sampleRate, int channelCount, int numStreams, int numCoupled, int gain, byte[] streamMap); + + private native int opusDecode( + long decoder, + long timeUs, + ByteBuffer inputBuffer, + int inputSize, + SimpleDecoderOutputBuffer outputBuffer); private native int opusSecureDecode( long decoder, long timeUs, ByteBuffer inputBuffer, int inputSize, - SimpleOutputBuffer outputBuffer, + SimpleDecoderOutputBuffer outputBuffer, int sampleRate, - @Nullable ExoMediaCrypto mediaCrypto, + @Nullable CryptoConfig mediaCrypto, int inputMode, byte[] key, byte[] iv, int numSubSamples, - int[] numBytesOfClearData, - int[] numBytesOfEncryptedData); + @Nullable int[] numBytesOfClearData, + @Nullable int[] numBytesOfEncryptedData); private native void opusClose(long decoder); + private native void opusReset(long decoder); + private native int opusGetErrorCode(long decoder); + private native String opusGetErrorMessage(long decoder); + private native void opusSetFloatOutput(); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/opus/OpusDecoderException.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/opus/OpusDecoderException.java index 6645086838..e9563b9af1 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/opus/OpusDecoderException.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/opus/OpusDecoderException.java @@ -15,12 +15,10 @@ */ package com.google.android.exoplayer2.ext.opus; -import com.google.android.exoplayer2.audio.AudioDecoderException; +import com.google.android.exoplayer2.decoder.DecoderException; -/** - * Thrown when an Opus decoder error occurs. - */ -public final class OpusDecoderException extends AudioDecoderException { +/** Thrown when an Opus decoder error occurs. */ +public final class OpusDecoderException extends DecoderException { /* package */ OpusDecoderException(String message) { super(message); @@ -29,5 +27,4 @@ public final class OpusDecoderException extends AudioDecoderException { /* package */ OpusDecoderException(String message, Throwable cause) { super(message, cause); } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/opus/OpusLibrary.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/opus/OpusLibrary.java index 969dfe7cbd..d7096ff7e8 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/opus/OpusLibrary.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ext/opus/OpusLibrary.java @@ -16,21 +16,26 @@ package com.google.android.exoplayer2.ext.opus; import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.ExoPlayerLibraryInfo; -import com.google.android.exoplayer2.drm.ExoMediaCrypto; import com.google.android.exoplayer2.util.LibraryLoader; -import com.google.android.exoplayer2.util.Util; -/** - * Configures and queries the underlying native library. - */ +/** Configures and queries the underlying native library. */ public final class OpusLibrary { static { ExoPlayerLibraryInfo.registerModule("goog.exo.opus"); } - @Nullable private static Class exoMediaCryptoType; + private static final LibraryLoader LOADER = + new LibraryLoader("opusV2JNI") { + @Override + protected void loadLibrary(String name) { + System.loadLibrary(name); + } + }; + + private static @C.CryptoType int cryptoType = C.CRYPTO_TYPE_UNSUPPORTED; private OpusLibrary() {} @@ -39,30 +44,34 @@ private OpusLibrary() {} * it must do so before calling any other method defined by this class, and before instantiating a * {@link LibopusAudioRenderer} instance. * - * @param exoMediaCryptoType The {@link ExoMediaCrypto} type expected for decoding protected - * content. + * @param cryptoType The {@link C.CryptoType} for which the decoder library supports decrypting + * protected content, or {@link C#CRYPTO_TYPE_UNSUPPORTED} if the library does not support + * decryption. * @param libraries The names of the Opus native libraries. */ - public static void setLibraries( - Class exoMediaCryptoType, String... libraries) { - OpusLibrary.exoMediaCryptoType = exoMediaCryptoType; + public static void setLibraries(@C.CryptoType int cryptoType, String... libraries) { + OpusLibrary.cryptoType = cryptoType; + LOADER.setLibraries(libraries); + } + + /** Returns whether the underlying library is available, loading it if necessary. */ + public static boolean isAvailable() { + return LOADER.isAvailable(); } /** Returns the version of the underlying library if available, or null otherwise. */ @Nullable public static String getVersion() { - return opusGetVersion(); + return isAvailable() ? opusGetVersion() : null; } - /** - * Returns whether the given {@link ExoMediaCrypto} type matches the one required for decoding - * protected content. - */ - public static boolean matchesExpectedExoMediaCryptoType( - @Nullable Class exoMediaCryptoType) { - return Util.areEqual(OpusLibrary.exoMediaCryptoType, exoMediaCryptoType); + /** Returns whether the library supports the given {@link C.CryptoType}. */ + public static boolean supportsCryptoType(@C.CryptoType int cryptoType) { + return cryptoType == C.CRYPTO_TYPE_NONE + || (cryptoType != C.CRYPTO_TYPE_UNSUPPORTED && cryptoType == OpusLibrary.cryptoType); } public static native String opusGetVersion(); + public static native boolean opusIsSecureDecodeSupported(); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/BinarySearchSeeker.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/BinarySearchSeeker.java index 0d823fa31d..80ab98a6d1 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/BinarySearchSeeker.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/BinarySearchSeeker.java @@ -15,6 +15,8 @@ */ package com.google.android.exoplayer2.extractor; +import static java.lang.annotation.ElementType.TYPE_USE; + import androidx.annotation.IntDef; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; @@ -24,6 +26,7 @@ import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; /** * A seeker that supports seeking within a stream by searching for the target frame using binary @@ -49,10 +52,9 @@ protected interface TimestampSeeker { * @param targetTimestamp The target timestamp. * @return A {@link TimestampSearchResult} that describes the result of the search. * @throws IOException If an error occurred reading from the input. - * @throws InterruptedException If the thread was interrupted. */ TimestampSearchResult searchForTimestamp(ExtractorInput input, long targetTimestamp) - throws IOException, InterruptedException; + throws IOException; /** Called when a seek operation finishes. */ default void onSeekFinished() {} @@ -91,7 +93,7 @@ protected interface SeekTimestampConverter { protected final BinarySearchSeekMap seekMap; protected final TimestampSeeker timestampSeeker; - protected @Nullable SeekOperationParams seekOperationParams; + @Nullable protected SeekOperationParams seekOperationParams; private final int minimumSearchRange; @@ -169,13 +171,12 @@ public final boolean isSeeking() { * to hold the position of the required seek. * @return One of the {@code RESULT_} values defined in {@link Extractor}. * @throws IOException If an error occurred reading from the input. - * @throws InterruptedException If the thread was interrupted. */ public int handlePendingSeek(ExtractorInput input, PositionHolder seekPositionHolder) - throws InterruptedException, IOException { - TimestampSeeker timestampSeeker = Assertions.checkNotNull(this.timestampSeeker); + throws IOException { while (true) { - SeekOperationParams seekOperationParams = Assertions.checkNotNull(this.seekOperationParams); + SeekOperationParams seekOperationParams = + Assertions.checkStateNotNull(this.seekOperationParams); long floorPosition = seekOperationParams.getFloorBytePosition(); long ceilingPosition = seekOperationParams.getCeilingBytePosition(); long searchPosition = seekOperationParams.getNextSearchBytePosition(); @@ -203,9 +204,9 @@ public int handlePendingSeek(ExtractorInput input, PositionHolder seekPositionHo timestampSearchResult.timestampToUpdate, timestampSearchResult.bytePositionToUpdate); break; case TimestampSearchResult.TYPE_TARGET_TIMESTAMP_FOUND: + skipInputUntilPosition(input, timestampSearchResult.bytePositionToUpdate); markSeekOperationFinished( /* foundTargetFrame= */ true, timestampSearchResult.bytePositionToUpdate); - skipInputUntilPosition(input, timestampSearchResult.bytePositionToUpdate); return seekToPosition( input, timestampSearchResult.bytePositionToUpdate, seekPositionHolder); case TimestampSearchResult.TYPE_NO_TIMESTAMP: @@ -241,7 +242,7 @@ protected void onSeekOperationFinished(boolean foundTargetFrame, long resultPosi } protected final boolean skipInputUntilPosition(ExtractorInput input, long position) - throws IOException, InterruptedException { + throws IOException { long bytesToSkip = position - input.getPosition(); if (bytesToSkip >= 0 && bytesToSkip <= MAX_SKIP_BYTES) { input.skipFully((int) bytesToSkip); @@ -407,6 +408,7 @@ public static final class TimestampSearchResult { @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({ TYPE_TARGET_TIMESTAMP_FOUND, TYPE_POSITION_OVERESTIMATED, @@ -419,7 +421,7 @@ public static final class TimestampSearchResult { new TimestampSearchResult(TYPE_NO_TIMESTAMP, C.TIME_UNSET, C.POSITION_UNSET); /** The type of the result. */ - @Type private final int type; + private final @Type int type; /** * When {@link #type} is {@link #TYPE_POSITION_OVERESTIMATED}, the {@link @@ -530,7 +532,9 @@ public long getDurationUs() { return durationUs; } - /** @see SeekTimestampConverter#timeUsToTargetTime(long) */ + /** + * @see SeekTimestampConverter#timeUsToTargetTime(long) + */ public long timeUsToTargetTime(long timeUs) { return seekTimestampConverter.timeUsToTargetTime(timeUs); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/cea/CeaUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/CeaUtil.java similarity index 91% rename from TMessagesProj/src/main/java/com/google/android/exoplayer2/text/cea/CeaUtil.java rename to TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/CeaUtil.java index cdc545e459..38e4ce1289 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/cea/CeaUtil.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/CeaUtil.java @@ -13,10 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.google.android.exoplayer2.text.cea; +package com.google.android.exoplayer2.extractor; import com.google.android.exoplayer2.C; -import com.google.android.exoplayer2.extractor.TrackOutput; import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.ParsableByteArray; @@ -34,15 +33,15 @@ public final class CeaUtil { private static final int PROVIDER_CODE_DIRECTV = 0x2F; /** - * Consumes the unescaped content of an SEI NAL unit, writing the content of any CEA-608 messages - * as samples to all of the provided outputs. + * Consumes the unescaped content of an SEI NAL unit, writing the content of any CEA-608/708 + * messages as samples to all of the provided outputs. * * @param presentationTimeUs The presentation time in microseconds for any samples. * @param seiBuffer The unescaped SEI NAL unit data, excluding the NAL unit start code and type. * @param outputs The outputs to which any samples should be written. */ - public static void consume(long presentationTimeUs, ParsableByteArray seiBuffer, - TrackOutput[] outputs) { + public static void consume( + long presentationTimeUs, ParsableByteArray seiBuffer, TrackOutput[] outputs) { while (seiBuffer.bytesLeft() > 1 /* last byte will be rbsp_trailing_bits */) { int payloadType = readNon255TerminatedValue(seiBuffer); int payloadSize = readNon255TerminatedValue(seiBuffer); @@ -103,12 +102,14 @@ public static void consumeCcData( for (TrackOutput output : outputs) { ccDataBuffer.setPosition(sampleStartPosition); output.sampleData(ccDataBuffer, sampleLength); - output.sampleMetadata( - presentationTimeUs, - C.BUFFER_FLAG_KEY_FRAME, - sampleLength, - /* offset= */ 0, - /* encryptionData= */ null); + if (presentationTimeUs != C.TIME_UNSET) { + output.sampleMetadata( + presentationTimeUs, + C.BUFFER_FLAG_KEY_FRAME, + sampleLength, + /* offset= */ 0, + /* cryptoData= */ null); + } } } @@ -134,5 +135,4 @@ private static int readNon255TerminatedValue(ParsableByteArray buffer) { } private CeaUtil() {} - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ChunkIndex.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ChunkIndex.java index 7ddd03bbd5..8291ed1ea4 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ChunkIndex.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ChunkIndex.java @@ -18,34 +18,22 @@ import com.google.android.exoplayer2.util.Util; import java.util.Arrays; -/** - * Defines chunks of samples within a media stream. - */ +/** Defines chunks of samples within a media stream. */ public final class ChunkIndex implements SeekMap { - /** - * The number of chunks. - */ + /** The number of chunks. */ public final int length; - /** - * The chunk sizes, in bytes. - */ + /** The chunk sizes, in bytes. */ public final int[] sizes; - /** - * The chunk byte offsets. - */ + /** The chunk byte offsets. */ public final long[] offsets; - /** - * The chunk durations, in microseconds. - */ + /** The chunk durations, in microseconds. */ public final long[] durationsUs; - /** - * The start time of each chunk, in microseconds. - */ + /** The start time of each chunk, in microseconds. */ public final long[] timesUs; private final long durationUs; @@ -76,7 +64,7 @@ public ChunkIndex(int[] sizes, long[] offsets, long[] durationsUs, long[] timesU * @return The index of the corresponding chunk. */ public int getChunkIndex(long timeUs) { - return Util.binarySearchFloor(timesUs, timeUs, true, true); + return Util.binarySearchFloor(timesUs, timeUs, /* inclusive= */ true, /* stayInBounds= */ true); } // SeekMap implementation. diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ConstantBitrateSeekMap.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ConstantBitrateSeekMap.java index abce01b5ef..5e2b743442 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ConstantBitrateSeekMap.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ConstantBitrateSeekMap.java @@ -15,8 +15,10 @@ */ package com.google.android.exoplayer2.extractor; +import static java.lang.Math.max; +import static java.lang.Math.min; + import com.google.android.exoplayer2.C; -import com.google.android.exoplayer2.util.Util; /** * A {@link SeekMap} implementation that assumes the stream has a constant bitrate and consists of @@ -31,9 +33,10 @@ public class ConstantBitrateSeekMap implements SeekMap { private final long dataSize; private final int bitrate; private final long durationUs; + private final boolean allowSeeksIfLengthUnknown; /** - * Constructs a new instance from a stream. + * Creates an instance with {@code allowSeeksIfLengthUnknown} set to {@code false}. * * @param inputLength The length of the stream in bytes, or {@link C#LENGTH_UNSET} if unknown. * @param firstFrameBytePosition The byte-position of the first frame in the stream. @@ -43,10 +46,36 @@ public class ConstantBitrateSeekMap implements SeekMap { */ public ConstantBitrateSeekMap( long inputLength, long firstFrameBytePosition, int bitrate, int frameSize) { + this( + inputLength, + firstFrameBytePosition, + bitrate, + frameSize, + /* allowSeeksIfLengthUnknown= */ false); + } + + /** + * Creates an instance. + * + * @param inputLength The length of the stream in bytes, or {@link C#LENGTH_UNSET} if unknown. + * @param firstFrameBytePosition The byte-position of the first frame in the stream. + * @param bitrate The bitrate (which is assumed to be constant in the stream). + * @param frameSize The size of each frame in the stream in bytes. May be {@link C#LENGTH_UNSET} + * if unknown. + * @param allowSeeksIfLengthUnknown Whether to allow seeking even if the length of the content is + * unknown. + */ + public ConstantBitrateSeekMap( + long inputLength, + long firstFrameBytePosition, + int bitrate, + int frameSize, + boolean allowSeeksIfLengthUnknown) { this.inputLength = inputLength; this.firstFrameBytePosition = firstFrameBytePosition; this.frameSize = frameSize == C.LENGTH_UNSET ? 1 : frameSize; this.bitrate = bitrate; + this.allowSeeksIfLengthUnknown = allowSeeksIfLengthUnknown; if (inputLength == C.LENGTH_UNSET) { dataSize = C.LENGTH_UNSET; @@ -59,18 +88,23 @@ public ConstantBitrateSeekMap( @Override public boolean isSeekable() { - return dataSize != C.LENGTH_UNSET; + return dataSize != C.LENGTH_UNSET || allowSeeksIfLengthUnknown; } @Override public SeekPoints getSeekPoints(long timeUs) { - if (dataSize == C.LENGTH_UNSET) { + if (dataSize == C.LENGTH_UNSET && !allowSeeksIfLengthUnknown) { return new SeekPoints(new SeekPoint(0, firstFrameBytePosition)); } long seekFramePosition = getFramePositionForTimeUs(timeUs); long seekTimeUs = getTimeUsAtPosition(seekFramePosition); SeekPoint seekPoint = new SeekPoint(seekTimeUs, seekFramePosition); - if (seekTimeUs >= timeUs || seekFramePosition + frameSize >= inputLength) { + // We only return a single seek point if the length is unknown, to avoid generating a second + // seek point beyond the end of the data in the case that the requested seek position is valid, + // but very close to the end of the content. + if (dataSize == C.LENGTH_UNSET + || seekTimeUs >= timeUs + || seekFramePosition + frameSize >= inputLength) { return new SeekPoints(seekPoint); } else { long secondSeekPosition = seekFramePosition + frameSize; @@ -106,7 +140,7 @@ public long getTimeUsAtPosition(long position) { * @return The stream time in microseconds for the given stream position. */ private static long getTimeUsAtPosition(long position, long firstFrameBytePosition, int bitrate) { - return Math.max(0, position - firstFrameBytePosition) + return max(0, position - firstFrameBytePosition) * C.BITS_PER_BYTE * C.MICROS_PER_SECOND / bitrate; @@ -116,8 +150,10 @@ private long getFramePositionForTimeUs(long timeUs) { long positionOffset = (timeUs * bitrate) / (C.MICROS_PER_SECOND * C.BITS_PER_BYTE); // Constrain to nearest preceding frame offset. positionOffset = (positionOffset / frameSize) * frameSize; - positionOffset = - Util.constrainValue(positionOffset, /* min= */ 0, /* max= */ dataSize - frameSize); + if (dataSize != C.LENGTH_UNSET) { + positionOffset = min(positionOffset, dataSize - frameSize); + } + positionOffset = max(positionOffset, 0); return firstFrameBytePosition + positionOffset; } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/DefaultExtractorInput.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/DefaultExtractorInput.java index c6f1129da8..b54131ce55 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/DefaultExtractorInput.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/DefaultExtractorInput.java @@ -15,25 +15,31 @@ */ package com.google.android.exoplayer2.extractor; +import static java.lang.Math.min; + import com.google.android.exoplayer2.C; -import com.google.android.exoplayer2.upstream.DataSource; +import com.google.android.exoplayer2.ExoPlayerLibraryInfo; +import com.google.android.exoplayer2.upstream.DataReader; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Util; import java.io.EOFException; import java.io.IOException; +import java.io.InterruptedIOException; import java.util.Arrays; -/** - * An {@link ExtractorInput} that wraps a {@link DataSource}. - */ +/** An {@link ExtractorInput} that wraps a {@link DataReader}. */ public final class DefaultExtractorInput implements ExtractorInput { + static { + ExoPlayerLibraryInfo.registerModule("goog.exo.extractor"); + } + private static final int PEEK_MIN_FREE_SPACE_AFTER_RESIZE = 64 * 1024; private static final int PEEK_MAX_FREE_SPACE = 512 * 1024; private static final int SCRATCH_SPACE_SIZE = 4096; private final byte[] scratchSpace; - private final DataSource dataSource; + private final DataReader dataReader; private final long streamLength; private long position; @@ -42,12 +48,12 @@ public final class DefaultExtractorInput implements ExtractorInput { private int peekBufferLength; /** - * @param dataSource The wrapped {@link DataSource}. + * @param dataReader The wrapped {@link DataReader}. * @param position The initial position in the stream. * @param length The length of the stream, or {@link C#LENGTH_UNSET} if it is unknown. */ - public DefaultExtractorInput(DataSource dataSource, long position, long length) { - this.dataSource = dataSource; + public DefaultExtractorInput(DataReader dataReader, long position, long length) { + this.dataReader = dataReader; this.position = position; this.streamLength = length; peekBuffer = new byte[PEEK_MIN_FREE_SPACE_AFTER_RESIZE]; @@ -55,12 +61,12 @@ public DefaultExtractorInput(DataSource dataSource, long position, long length) } @Override - public int read(byte[] target, int offset, int length) throws IOException, InterruptedException { - int bytesRead = readFromPeekBuffer(target, offset, length); + public int read(byte[] buffer, int offset, int length) throws IOException { + int bytesRead = readFromPeekBuffer(buffer, offset, length); if (bytesRead == 0) { bytesRead = - readFromDataSource( - target, offset, length, /* bytesAlreadyRead= */ 0, /* allowEndOfInput= */ true); + readFromUpstream( + buffer, offset, length, /* bytesAlreadyRead= */ 0, /* allowEndOfInput= */ true); } commitBytesRead(bytesRead); return bytesRead; @@ -68,58 +74,55 @@ public int read(byte[] target, int offset, int length) throws IOException, Inter @Override public boolean readFully(byte[] target, int offset, int length, boolean allowEndOfInput) - throws IOException, InterruptedException { + throws IOException { int bytesRead = readFromPeekBuffer(target, offset, length); while (bytesRead < length && bytesRead != C.RESULT_END_OF_INPUT) { - bytesRead = readFromDataSource(target, offset, length, bytesRead, allowEndOfInput); + bytesRead = readFromUpstream(target, offset, length, bytesRead, allowEndOfInput); } commitBytesRead(bytesRead); return bytesRead != C.RESULT_END_OF_INPUT; } @Override - public void readFully(byte[] target, int offset, int length) - throws IOException, InterruptedException { + public void readFully(byte[] target, int offset, int length) throws IOException { readFully(target, offset, length, false); } @Override - public int skip(int length) throws IOException, InterruptedException { + public int skip(int length) throws IOException { int bytesSkipped = skipFromPeekBuffer(length); if (bytesSkipped == 0) { - bytesSkipped = - readFromDataSource(scratchSpace, 0, Math.min(length, scratchSpace.length), 0, true); + bytesSkipped = readFromUpstream(scratchSpace, 0, min(length, scratchSpace.length), 0, true); } commitBytesRead(bytesSkipped); return bytesSkipped; } @Override - public boolean skipFully(int length, boolean allowEndOfInput) - throws IOException, InterruptedException { + public boolean skipFully(int length, boolean allowEndOfInput) throws IOException { int bytesSkipped = skipFromPeekBuffer(length); while (bytesSkipped < length && bytesSkipped != C.RESULT_END_OF_INPUT) { - int minLength = Math.min(length, bytesSkipped + scratchSpace.length); + int minLength = min(length, bytesSkipped + scratchSpace.length); bytesSkipped = - readFromDataSource(scratchSpace, -bytesSkipped, minLength, bytesSkipped, allowEndOfInput); + readFromUpstream(scratchSpace, -bytesSkipped, minLength, bytesSkipped, allowEndOfInput); } commitBytesRead(bytesSkipped); return bytesSkipped != C.RESULT_END_OF_INPUT; } @Override - public void skipFully(int length) throws IOException, InterruptedException { + public void skipFully(int length) throws IOException { skipFully(length, false); } @Override - public int peek(byte[] target, int offset, int length) throws IOException, InterruptedException { + public int peek(byte[] target, int offset, int length) throws IOException { ensureSpaceForPeek(length); int peekBufferRemainingBytes = peekBufferLength - peekBufferPosition; int bytesPeeked; if (peekBufferRemainingBytes == 0) { bytesPeeked = - readFromDataSource( + readFromUpstream( peekBuffer, peekBufferPosition, length, @@ -130,7 +133,7 @@ public int peek(byte[] target, int offset, int length) throws IOException, Inter } peekBufferLength += bytesPeeked; } else { - bytesPeeked = Math.min(length, peekBufferRemainingBytes); + bytesPeeked = min(length, peekBufferRemainingBytes); } System.arraycopy(peekBuffer, peekBufferPosition, target, offset, bytesPeeked); peekBufferPosition += bytesPeeked; @@ -139,7 +142,7 @@ public int peek(byte[] target, int offset, int length) throws IOException, Inter @Override public boolean peekFully(byte[] target, int offset, int length, boolean allowEndOfInput) - throws IOException, InterruptedException { + throws IOException { if (!advancePeekPosition(length, allowEndOfInput)) { return false; } @@ -148,19 +151,17 @@ public boolean peekFully(byte[] target, int offset, int length, boolean allowEnd } @Override - public void peekFully(byte[] target, int offset, int length) - throws IOException, InterruptedException { + public void peekFully(byte[] target, int offset, int length) throws IOException { peekFully(target, offset, length, false); } @Override - public boolean advancePeekPosition(int length, boolean allowEndOfInput) - throws IOException, InterruptedException { + public boolean advancePeekPosition(int length, boolean allowEndOfInput) throws IOException { ensureSpaceForPeek(length); int bytesPeeked = peekBufferLength - peekBufferPosition; while (bytesPeeked < length) { - bytesPeeked = readFromDataSource(peekBuffer, peekBufferPosition, length, bytesPeeked, - allowEndOfInput); + bytesPeeked = + readFromUpstream(peekBuffer, peekBufferPosition, length, bytesPeeked, allowEndOfInput); if (bytesPeeked == C.RESULT_END_OF_INPUT) { return false; } @@ -171,7 +172,7 @@ public boolean advancePeekPosition(int length, boolean allowEndOfInput) } @Override - public void advancePeekPosition(int length) throws IOException, InterruptedException { + public void advancePeekPosition(int length) throws IOException { advancePeekPosition(length, false); } @@ -209,8 +210,11 @@ public void setRetryPosition(long position, E e) throws E private void ensureSpaceForPeek(int length) { int requiredLength = peekBufferPosition + length; if (requiredLength > peekBuffer.length) { - int newPeekCapacity = Util.constrainValue(peekBuffer.length * 2, - requiredLength + PEEK_MIN_FREE_SPACE_AFTER_RESIZE, requiredLength + PEEK_MAX_FREE_SPACE); + int newPeekCapacity = + Util.constrainValue( + peekBuffer.length * 2, + requiredLength + PEEK_MIN_FREE_SPACE_AFTER_RESIZE, + requiredLength + PEEK_MAX_FREE_SPACE); peekBuffer = Arrays.copyOf(peekBuffer, newPeekCapacity); } } @@ -222,7 +226,7 @@ private void ensureSpaceForPeek(int length) { * @return The number of bytes skipped. */ private int skipFromPeekBuffer(int length) { - int bytesSkipped = Math.min(peekBufferLength, length); + int bytesSkipped = min(peekBufferLength, length); updatePeekBuffer(bytesSkipped); return bytesSkipped; } @@ -239,7 +243,7 @@ private int readFromPeekBuffer(byte[] target, int offset, int length) { if (peekBufferLength == 0) { return 0; } - int peekBytes = Math.min(peekBufferLength, length); + int peekBytes = min(peekBufferLength, length); System.arraycopy(peekBuffer, 0, target, offset, peekBytes); updatePeekBuffer(peekBytes); return peekBytes; @@ -262,7 +266,7 @@ private void updatePeekBuffer(int bytesConsumed) { } /** - * Starts or continues a read from the data source. + * Starts or continues a read from the data reader. * * @param target A target array into which data should be written. * @param offset The offset into the target array at which to write. @@ -271,20 +275,20 @@ private void updatePeekBuffer(int bytesConsumed) { * @param allowEndOfInput True if encountering the end of the input having read no data is * allowed, and should result in {@link C#RESULT_END_OF_INPUT} being returned. False if it * should be considered an error, causing an {@link EOFException} to be thrown. - * @return The total number of bytes read so far, or {@link C#RESULT_END_OF_INPUT} if - * {@code allowEndOfInput} is true and the input has ended having read no bytes. + * @return The total number of bytes read so far, or {@link C#RESULT_END_OF_INPUT} if {@code + * allowEndOfInput} is true and the input has ended having read no bytes. * @throws EOFException If the end of input was encountered having partially satisfied the read * (i.e. having read at least one byte, but fewer than {@code length}), or if no bytes were * read and {@code allowEndOfInput} is false. * @throws IOException If an error occurs reading from the input. - * @throws InterruptedException If the thread is interrupted. */ - private int readFromDataSource(byte[] target, int offset, int length, int bytesAlreadyRead, - boolean allowEndOfInput) throws InterruptedException, IOException { + private int readFromUpstream( + byte[] target, int offset, int length, int bytesAlreadyRead, boolean allowEndOfInput) + throws IOException { if (Thread.interrupted()) { - throw new InterruptedException(); + throw new InterruptedIOException(); } - int bytesRead = dataSource.read(target, offset + bytesAlreadyRead, length - bytesAlreadyRead); + int bytesRead = dataReader.read(target, offset + bytesAlreadyRead, length - bytesAlreadyRead); if (bytesRead == C.RESULT_END_OF_INPUT) { if (bytesAlreadyRead == 0 && allowEndOfInput) { return C.RESULT_END_OF_INPUT; @@ -304,5 +308,4 @@ private void commitBytesRead(int bytesRead) { position += bytesRead; } } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/DefaultExtractorsFactory.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/DefaultExtractorsFactory.java index bb8673e18b..1cac3c4e84 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/DefaultExtractorsFactory.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/DefaultExtractorsFactory.java @@ -15,9 +15,20 @@ */ package com.google.android.exoplayer2.extractor; +import static com.google.android.exoplayer2.util.FileTypes.inferFileTypeFromResponseHeaders; +import static com.google.android.exoplayer2.util.FileTypes.inferFileTypeFromUri; + +import android.net.Uri; +import androidx.annotation.GuardedBy; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.PlaybackException; +import com.google.android.exoplayer2.Player; import com.google.android.exoplayer2.extractor.amr.AmrExtractor; +import com.google.android.exoplayer2.extractor.avi.AviExtractor; import com.google.android.exoplayer2.extractor.flac.FlacExtractor; import com.google.android.exoplayer2.extractor.flv.FlvExtractor; +import com.google.android.exoplayer2.extractor.jpeg.JpegExtractor; import com.google.android.exoplayer2.extractor.mkv.MatroskaExtractor; import com.google.android.exoplayer2.extractor.mp3.Mp3Extractor; import com.google.android.exoplayer2.extractor.mp4.FragmentedMp4Extractor; @@ -31,8 +42,17 @@ import com.google.android.exoplayer2.extractor.ts.TsExtractor; import com.google.android.exoplayer2.extractor.ts.TsPayloadReader; import com.google.android.exoplayer2.extractor.wav.WavExtractor; +import com.google.android.exoplayer2.util.FileTypes; import com.google.android.exoplayer2.util.TimestampAdjuster; +import com.google.common.collect.ImmutableList; +import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; /** * An {@link ExtractorsFactory} that provides an array of extractors for the following formats: @@ -53,51 +73,68 @@ *

    • AMR ({@link AmrExtractor}) *
    • FLAC *
        - *
      • If available, the FLAC extension extractor is used. + *
      • If available, the FLAC extension's {@code + * com.google.android.exoplayer2.ext.flac.FlacExtractor} is used. *
      • Otherwise, the core {@link FlacExtractor} is used. Note that Android devices do not * generally include a FLAC decoder before API 27. This can be worked around by using * the FLAC extension or the FFmpeg extension. *
      + *
    • JPEG ({@link JpegExtractor}) + *
    • MIDI, if available, the MIDI extension's {@code + * com.google.android.exoplayer2.decoder.midi.MidiExtractor} is used. * */ public final class DefaultExtractorsFactory implements ExtractorsFactory { - private static final Constructor FLAC_EXTENSION_EXTRACTOR_CONSTRUCTOR; - - static { - Constructor flacExtensionExtractorConstructor = null; - try { - // LINT.IfChange - @SuppressWarnings("nullness:argument.type.incompatible") - boolean isFlacNativeLibraryAvailable = true; - if (isFlacNativeLibraryAvailable) { - flacExtensionExtractorConstructor = - Class.forName("com.google.android.exoplayer2.ext.flac.FlacExtractor") - .asSubclass(Extractor.class) - .getConstructor(); - } - // LINT.ThenChange(../../../../../../../../proguard-rules.txt) - } catch (ClassNotFoundException e) { - // Expected if the app was built without the FLAC extension. - } catch (Exception e) { - // The FLAC extension is present, but instantiation failed. - throw new RuntimeException("Error instantiating FLAC extension", e); - } - FLAC_EXTENSION_EXTRACTOR_CONSTRUCTOR = flacExtensionExtractorConstructor; - } + // Extractors order is optimized according to + // https://docs.google.com/document/d/1w2mKaWMxfz2Ei8-LdxqbPs1VLe_oudB-eryXXw9OvQQ. + // The JPEG extractor appears after audio/video extractors because we expect audio/video input to + // be more common. + private static final int[] DEFAULT_EXTRACTOR_ORDER = + new int[] { + FileTypes.FLV, + FileTypes.FLAC, + FileTypes.WAV, + FileTypes.MP4, + FileTypes.AMR, + FileTypes.PS, + FileTypes.OGG, + FileTypes.TS, + FileTypes.MATROSKA, + FileTypes.ADTS, + FileTypes.AC3, + FileTypes.AC4, + FileTypes.MP3, + // The following extractors are not part of the optimized ordering, and were appended + // without further analysis. + FileTypes.AVI, + FileTypes.MIDI, + FileTypes.JPEG, + }; + + private static final ExtensionLoader FLAC_EXTENSION_LOADER = + new ExtensionLoader(DefaultExtractorsFactory::getFlacExtractorConstructor); + private static final ExtensionLoader MIDI_EXTENSION_LOADER = + new ExtensionLoader(DefaultExtractorsFactory::getMidiExtractorConstructor); private boolean constantBitrateSeekingEnabled = true; + private boolean constantBitrateSeekingAlwaysEnabled; private @AdtsExtractor.Flags int adtsFlags; private @AmrExtractor.Flags int amrFlags; + private @FlacExtractor.Flags int flacFlags; private @MatroskaExtractor.Flags int matroskaFlags; private @Mp4Extractor.Flags int mp4Flags; private @FragmentedMp4Extractor.Flags int fragmentedMp4Flags; private @Mp3Extractor.Flags int mp3Flags; private @TsExtractor.Mode int tsMode; private @DefaultTsPayloadReaderFactory.Flags int tsFlags; + private ImmutableList tsSubtitleFormats; + private int tsTimestampSearchBytes; public DefaultExtractorsFactory() { tsMode = TsExtractor.MODE_SINGLE_PMT; + tsTimestampSearchBytes = TsExtractor.DEFAULT_TIMESTAMP_SEARCH_BYTES; + tsSubtitleFormats = ImmutableList.of(); } /** @@ -111,12 +148,38 @@ public DefaultExtractorsFactory() { * assumption should be enabled for all extractors that support it. * @return The factory, for convenience. */ + @CanIgnoreReturnValue public synchronized DefaultExtractorsFactory setConstantBitrateSeekingEnabled( boolean constantBitrateSeekingEnabled) { this.constantBitrateSeekingEnabled = constantBitrateSeekingEnabled; return this; } + /** + * Convenience method to set whether approximate seeking using constant bitrate assumptions should + * be enabled for all extractors that support it, and if it should be enabled even if the content + * length (and hence the duration of the media) is unknown. If set to true, the flags required to + * enable this functionality will be OR'd with those passed to the setters when creating extractor + * instances. If set to false then the flags passed to the setters will be used without + * modification. + * + *

      When seeking into content where the length is unknown, application code should ensure that + * requested seek positions are valid, or should be ready to handle playback failures reported + * through {@link Player.Listener#onPlayerError} with {@link PlaybackException#errorCode} set to + * {@link PlaybackException#ERROR_CODE_IO_READ_POSITION_OUT_OF_RANGE}. + * + * @param constantBitrateSeekingAlwaysEnabled Whether approximate seeking using a constant bitrate + * assumption should be enabled for all extractors that support it, including when the content + * duration is unknown. + * @return The factory, for convenience. + */ + @CanIgnoreReturnValue + public synchronized DefaultExtractorsFactory setConstantBitrateSeekingAlwaysEnabled( + boolean constantBitrateSeekingAlwaysEnabled) { + this.constantBitrateSeekingAlwaysEnabled = constantBitrateSeekingAlwaysEnabled; + return this; + } + /** * Sets flags for {@link AdtsExtractor} instances created by the factory. * @@ -124,6 +187,7 @@ public synchronized DefaultExtractorsFactory setConstantBitrateSeekingEnabled( * @param flags The flags to use. * @return The factory, for convenience. */ + @CanIgnoreReturnValue public synchronized DefaultExtractorsFactory setAdtsExtractorFlags( @AdtsExtractor.Flags int flags) { this.adtsFlags = flags; @@ -137,11 +201,28 @@ public synchronized DefaultExtractorsFactory setAdtsExtractorFlags( * @param flags The flags to use. * @return The factory, for convenience. */ + @CanIgnoreReturnValue public synchronized DefaultExtractorsFactory setAmrExtractorFlags(@AmrExtractor.Flags int flags) { this.amrFlags = flags; return this; } + /** + * Sets flags for {@link FlacExtractor} instances created by the factory. The flags are also used + * by {@code com.google.android.exoplayer2.ext.flac.FlacExtractor} instances if the FLAC extension + * is being used. + * + * @see FlacExtractor#FlacExtractor(int) + * @param flags The flags to use. + * @return The factory, for convenience. + */ + @CanIgnoreReturnValue + public synchronized DefaultExtractorsFactory setFlacExtractorFlags( + @FlacExtractor.Flags int flags) { + this.flacFlags = flags; + return this; + } + /** * Sets flags for {@link MatroskaExtractor} instances created by the factory. * @@ -149,6 +230,7 @@ public synchronized DefaultExtractorsFactory setAmrExtractorFlags(@AmrExtractor. * @param flags The flags to use. * @return The factory, for convenience. */ + @CanIgnoreReturnValue public synchronized DefaultExtractorsFactory setMatroskaExtractorFlags( @MatroskaExtractor.Flags int flags) { this.matroskaFlags = flags; @@ -162,6 +244,7 @@ public synchronized DefaultExtractorsFactory setMatroskaExtractorFlags( * @param flags The flags to use. * @return The factory, for convenience. */ + @CanIgnoreReturnValue public synchronized DefaultExtractorsFactory setMp4ExtractorFlags(@Mp4Extractor.Flags int flags) { this.mp4Flags = flags; return this; @@ -174,6 +257,7 @@ public synchronized DefaultExtractorsFactory setMp4ExtractorFlags(@Mp4Extractor. * @param flags The flags to use. * @return The factory, for convenience. */ + @CanIgnoreReturnValue public synchronized DefaultExtractorsFactory setFragmentedMp4ExtractorFlags( @FragmentedMp4Extractor.Flags int flags) { this.fragmentedMp4Flags = flags; @@ -187,6 +271,7 @@ public synchronized DefaultExtractorsFactory setFragmentedMp4ExtractorFlags( * @param flags The flags to use. * @return The factory, for convenience. */ + @CanIgnoreReturnValue public synchronized DefaultExtractorsFactory setMp3ExtractorFlags(@Mp3Extractor.Flags int flags) { mp3Flags = flags; return this; @@ -195,10 +280,11 @@ public synchronized DefaultExtractorsFactory setMp3ExtractorFlags(@Mp3Extractor. /** * Sets the mode for {@link TsExtractor} instances created by the factory. * - * @see TsExtractor#TsExtractor(int, TimestampAdjuster, TsPayloadReader.Factory) + * @see TsExtractor#TsExtractor(int, TimestampAdjuster, TsPayloadReader.Factory, int) * @param mode The mode to use. * @return The factory, for convenience. */ + @CanIgnoreReturnValue public synchronized DefaultExtractorsFactory setTsExtractorMode(@TsExtractor.Mode int mode) { tsMode = mode; return this; @@ -212,54 +298,246 @@ public synchronized DefaultExtractorsFactory setTsExtractorMode(@TsExtractor.Mod * @param flags The flags to use. * @return The factory, for convenience. */ + @CanIgnoreReturnValue public synchronized DefaultExtractorsFactory setTsExtractorFlags( @DefaultTsPayloadReaderFactory.Flags int flags) { tsFlags = flags; return this; } + /** + * Sets a list of subtitle formats to pass to the {@link DefaultTsPayloadReaderFactory} used by + * {@link TsExtractor} instances created by the factory. + * + * @see DefaultTsPayloadReaderFactory#DefaultTsPayloadReaderFactory(int, List) + * @param subtitleFormats The subtitle formats. + * @return The factory, for convenience. + */ + @CanIgnoreReturnValue + public synchronized DefaultExtractorsFactory setTsSubtitleFormats(List subtitleFormats) { + tsSubtitleFormats = ImmutableList.copyOf(subtitleFormats); + return this; + } + + /** + * Sets the number of bytes searched to find a timestamp for {@link TsExtractor} instances created + * by the factory. + * + * @see TsExtractor#TsExtractor(int, TimestampAdjuster, TsPayloadReader.Factory, int) + * @param timestampSearchBytes The number of search bytes to use. + * @return The factory, for convenience. + */ + @CanIgnoreReturnValue + public synchronized DefaultExtractorsFactory setTsExtractorTimestampSearchBytes( + int timestampSearchBytes) { + tsTimestampSearchBytes = timestampSearchBytes; + return this; + } + @Override public synchronized Extractor[] createExtractors() { - Extractor[] extractors = new Extractor[14]; - extractors[0] = new MatroskaExtractor(matroskaFlags); - extractors[1] = new FragmentedMp4Extractor(fragmentedMp4Flags); - extractors[2] = new Mp4Extractor(mp4Flags); - extractors[3] = new OggExtractor(); - extractors[4] = - new Mp3Extractor( - mp3Flags - | (constantBitrateSeekingEnabled - ? Mp3Extractor.FLAG_ENABLE_CONSTANT_BITRATE_SEEKING - : 0)); - extractors[5] = - new AdtsExtractor( - adtsFlags - | (constantBitrateSeekingEnabled - ? AdtsExtractor.FLAG_ENABLE_CONSTANT_BITRATE_SEEKING - : 0)); - extractors[6] = new Ac3Extractor(); - extractors[7] = new TsExtractor(tsMode, tsFlags); - extractors[8] = new FlvExtractor(); - extractors[9] = new PsExtractor(); - extractors[10] = new WavExtractor(); - extractors[11] = - new AmrExtractor( - amrFlags - | (constantBitrateSeekingEnabled - ? AmrExtractor.FLAG_ENABLE_CONSTANT_BITRATE_SEEKING - : 0)); - extractors[12] = new Ac4Extractor(); - if (FLAC_EXTENSION_EXTRACTOR_CONSTRUCTOR != null) { + return createExtractors(Uri.EMPTY, new HashMap<>()); + } + + @Override + public synchronized Extractor[] createExtractors( + Uri uri, Map> responseHeaders) { + List extractors = + new ArrayList<>(/* initialCapacity= */ DEFAULT_EXTRACTOR_ORDER.length); + + @FileTypes.Type + int responseHeadersInferredFileType = inferFileTypeFromResponseHeaders(responseHeaders); + if (responseHeadersInferredFileType != FileTypes.UNKNOWN) { + addExtractorsForFileType(responseHeadersInferredFileType, extractors); + } + + @FileTypes.Type int uriInferredFileType = inferFileTypeFromUri(uri); + if (uriInferredFileType != FileTypes.UNKNOWN + && uriInferredFileType != responseHeadersInferredFileType) { + addExtractorsForFileType(uriInferredFileType, extractors); + } + + for (int fileType : DEFAULT_EXTRACTOR_ORDER) { + if (fileType != responseHeadersInferredFileType && fileType != uriInferredFileType) { + addExtractorsForFileType(fileType, extractors); + } + } + + return extractors.toArray(new Extractor[extractors.size()]); + } + + private void addExtractorsForFileType(@FileTypes.Type int fileType, List extractors) { + switch (fileType) { + case FileTypes.AC3: + extractors.add(new Ac3Extractor()); + break; + case FileTypes.AC4: + extractors.add(new Ac4Extractor()); + break; + case FileTypes.ADTS: + extractors.add( + new AdtsExtractor( + adtsFlags + | (constantBitrateSeekingEnabled + ? AdtsExtractor.FLAG_ENABLE_CONSTANT_BITRATE_SEEKING + : 0) + | (constantBitrateSeekingAlwaysEnabled + ? AdtsExtractor.FLAG_ENABLE_CONSTANT_BITRATE_SEEKING_ALWAYS + : 0))); + break; + case FileTypes.AMR: + extractors.add( + new AmrExtractor( + amrFlags + | (constantBitrateSeekingEnabled + ? AmrExtractor.FLAG_ENABLE_CONSTANT_BITRATE_SEEKING + : 0) + | (constantBitrateSeekingAlwaysEnabled + ? AmrExtractor.FLAG_ENABLE_CONSTANT_BITRATE_SEEKING_ALWAYS + : 0))); + break; + case FileTypes.FLAC: + @Nullable Extractor flacExtractor = FLAC_EXTENSION_LOADER.getExtractor(flacFlags); + if (flacExtractor != null) { + extractors.add(flacExtractor); + } else { + extractors.add(new FlacExtractor(flacFlags)); + } + break; + case FileTypes.FLV: + extractors.add(new FlvExtractor()); + break; + case FileTypes.MATROSKA: + extractors.add(new MatroskaExtractor(matroskaFlags)); + break; + case FileTypes.MP3: + extractors.add( + new Mp3Extractor( + mp3Flags + | (constantBitrateSeekingEnabled + ? Mp3Extractor.FLAG_ENABLE_CONSTANT_BITRATE_SEEKING + : 0) + | (constantBitrateSeekingAlwaysEnabled + ? Mp3Extractor.FLAG_ENABLE_CONSTANT_BITRATE_SEEKING_ALWAYS + : 0))); + break; + case FileTypes.MP4: + extractors.add(new FragmentedMp4Extractor(fragmentedMp4Flags)); + extractors.add(new Mp4Extractor(mp4Flags)); + break; + case FileTypes.OGG: + extractors.add(new OggExtractor()); + break; + case FileTypes.PS: + extractors.add(new PsExtractor()); + break; + case FileTypes.TS: + extractors.add( + new TsExtractor( + tsMode, + new TimestampAdjuster(0), + new DefaultTsPayloadReaderFactory(tsFlags, tsSubtitleFormats), + tsTimestampSearchBytes)); + break; + case FileTypes.WAV: + extractors.add(new WavExtractor()); + break; + case FileTypes.JPEG: + extractors.add(new JpegExtractor()); + break; + case FileTypes.MIDI: + @Nullable Extractor midiExtractor = MIDI_EXTENSION_LOADER.getExtractor(); + if (midiExtractor != null) { + extractors.add(midiExtractor); + } + break; + case FileTypes.AVI: + extractors.add(new AviExtractor()); + break; + case FileTypes.WEBVTT: + case FileTypes.UNKNOWN: + default: + break; + } + } + + private static Constructor getMidiExtractorConstructor() + throws ClassNotFoundException, NoSuchMethodException { + return Class.forName("com.google.android.exoplayer2.decoder.midi.MidiExtractor") + .asSubclass(Extractor.class) + .getConstructor(); + } + + @Nullable + private static Constructor getFlacExtractorConstructor() + throws ClassNotFoundException, NoSuchMethodException, InvocationTargetException, + IllegalAccessException { + @SuppressWarnings("nullness:argument") + boolean isFlacNativeLibraryAvailable = + Boolean.TRUE.equals( + Class.forName("com.google.android.exoplayer2.ext.flac.FlacLibrary") + .getMethod("isAvailable") + .invoke(/* obj= */ null)); + if (isFlacNativeLibraryAvailable) { + return Class.forName("com.google.android.exoplayer2.ext.flac.FlacExtractor") + .asSubclass(Extractor.class) + .getConstructor(int.class); + } + return null; + } + + private static final class ExtensionLoader { + + public interface ConstructorSupplier { + @Nullable + Constructor getConstructor() + throws InvocationTargetException, IllegalAccessException, NoSuchMethodException, + ClassNotFoundException; + } + + private final ConstructorSupplier constructorSupplier; + private final AtomicBoolean extensionLoaded; + + @GuardedBy("extensionLoaded") + @Nullable + private Constructor extractorConstructor; + + public ExtensionLoader(ConstructorSupplier constructorSupplier) { + this.constructorSupplier = constructorSupplier; + extensionLoaded = new AtomicBoolean(false); + } + + @Nullable + public Extractor getExtractor(Object... constructorParams) { + @Nullable + Constructor extractorConstructor = maybeLoadExtractorConstructor(); + if (extractorConstructor == null) { + return null; + } try { - extractors[13] = FLAC_EXTENSION_EXTRACTOR_CONSTRUCTOR.newInstance(); + return extractorConstructor.newInstance(constructorParams); } catch (Exception e) { - // Should never happen. - throw new IllegalStateException("Unexpected error creating FLAC extractor", e); + throw new IllegalStateException("Unexpected error creating extractor", e); } - } else { - extractors[13] = new FlacExtractor(); } - return extractors; - } + @Nullable + private Constructor maybeLoadExtractorConstructor() { + synchronized (extensionLoaded) { + if (extensionLoaded.get()) { + return extractorConstructor; + } + try { + return constructorSupplier.getConstructor(); + } catch (ClassNotFoundException e) { + // Expected if the app was built without the extension. + } catch (Exception e) { + // The extension is present, but instantiation failed. + throw new RuntimeException("Error instantiating extension", e); + } + extensionLoaded.set(true); + return extractorConstructor; + } + } + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/DummyExtractorOutput.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/DummyExtractorOutput.java index f199493500..51fc59fd24 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/DummyExtractorOutput.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/DummyExtractorOutput.java @@ -15,7 +15,7 @@ */ package com.google.android.exoplayer2.extractor; -/** A dummy {@link ExtractorOutput} implementation. */ +/** A fake {@link ExtractorOutput} implementation. */ public final class DummyExtractorOutput implements ExtractorOutput { @Override diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/DummyTrackOutput.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/DummyTrackOutput.java index f1aeccacb7..94c4a9af94 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/DummyTrackOutput.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/DummyTrackOutput.java @@ -15,27 +15,39 @@ */ package com.google.android.exoplayer2.extractor; +import static java.lang.Math.min; + import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.upstream.DataReader; import com.google.android.exoplayer2.util.ParsableByteArray; import java.io.EOFException; import java.io.IOException; -/** - * A dummy {@link TrackOutput} implementation. - */ +/** A fake {@link TrackOutput} implementation. */ public final class DummyTrackOutput implements TrackOutput { + // Even though read data is discarded, data source implementations could be making use of the + // buffer contents. For example, caches. So we cannot use a static field for this which could be + // shared between different threads. + private final byte[] readBuffer; + + public DummyTrackOutput() { + readBuffer = new byte[4096]; + } + @Override public void format(Format format) { // Do nothing. } @Override - public int sampleData(ExtractorInput input, int length, boolean allowEndOfInput) - throws IOException, InterruptedException { - int bytesSkipped = input.skip(length); + public int sampleData( + DataReader input, int length, boolean allowEndOfInput, @SampleDataPart int sampleDataPart) + throws IOException { + int bytesToSkipByReading = min(readBuffer.length, length); + int bytesSkipped = input.read(readBuffer, /* offset= */ 0, bytesToSkipByReading); if (bytesSkipped == C.RESULT_END_OF_INPUT) { if (allowEndOfInput) { return C.RESULT_END_OF_INPUT; @@ -46,7 +58,7 @@ public int sampleData(ExtractorInput input, int length, boolean allowEndOfInput) } @Override - public void sampleData(ParsableByteArray data, int length) { + public void sampleData(ParsableByteArray data, int length, @SampleDataPart int sampleDataPart) { data.skipBytes(length); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/Extractor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/Extractor.java index a9151a1b7c..b0791bb981 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/Extractor.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/Extractor.java @@ -15,16 +15,17 @@ */ package com.google.android.exoplayer2.extractor; +import static java.lang.annotation.ElementType.TYPE_USE; + import androidx.annotation.IntDef; import com.google.android.exoplayer2.C; import java.io.IOException; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; -/** - * Extracts media data from a container format. - */ +/** Extracts media data from a container format. */ public interface Extractor { /** @@ -40,8 +41,8 @@ public interface Extractor { */ int RESULT_SEEK = 1; /** - * Returned by {@link #read(ExtractorInput, PositionHolder)} if the end of the - * {@link ExtractorInput} was reached. Equal to {@link C#RESULT_END_OF_INPUT}. + * Returned by {@link #read(ExtractorInput, PositionHolder)} if the end of the {@link + * ExtractorInput} was reached. Equal to {@link C#RESULT_END_OF_INPUT}. */ int RESULT_END_OF_INPUT = C.RESULT_END_OF_INPUT; @@ -51,22 +52,22 @@ public interface Extractor { */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef(value = {RESULT_CONTINUE, RESULT_SEEK, RESULT_END_OF_INPUT}) @interface ReadResult {} /** * Returns whether this extractor can extract samples from the {@link ExtractorInput}, which must * provide data from the start of the stream. - *

      - * If {@code true} is returned, the {@code input}'s reading position may have been modified. + * + *

      If {@code true} is returned, the {@code input}'s reading position may have been modified. * Otherwise, only its peek position may have been modified. * * @param input The {@link ExtractorInput} from which data should be peeked/read. * @return Whether this extractor can read the provided input. * @throws IOException If an error occurred reading from the input. - * @throws InterruptedException If the thread was interrupted. */ - boolean sniff(ExtractorInput input) throws IOException, InterruptedException; + boolean sniff(ExtractorInput input) throws IOException; /** * Initializes the extractor with an {@link ExtractorOutput}. Called at most once. @@ -89,25 +90,23 @@ public interface Extractor { * {@link #RESULT_SEEK} is returned. If the extractor reached the end of the data provided by the * {@link ExtractorInput}, then {@link #RESULT_END_OF_INPUT} is returned. * - *

      When this method throws an {@link IOException} or an {@link InterruptedException}, - * extraction may continue by providing an {@link ExtractorInput} with an unchanged {@link - * ExtractorInput#getPosition() read position} to a subsequent call to this method. + *

      When this method throws an {@link IOException}, extraction may continue by providing an + * {@link ExtractorInput} with an unchanged {@link ExtractorInput#getPosition() read position} to + * a subsequent call to this method. * * @param input The {@link ExtractorInput} from which data should be read. * @param seekPosition If {@link #RESULT_SEEK} is returned, this holder is updated to hold the * position of the required data. * @return One of the {@code RESULT_} values defined in this interface. - * @throws IOException If an error occurred reading from the input. - * @throws InterruptedException If the thread was interrupted. + * @throws IOException If an error occurred reading from or parsing the input. */ @ReadResult - int read(ExtractorInput input, PositionHolder seekPosition) - throws IOException, InterruptedException; + int read(ExtractorInput input, PositionHolder seekPosition) throws IOException; /** * Notifies the extractor that a seek has occurred. - *

      - * Following a call to this method, the {@link ExtractorInput} passed to the next invocation of + * + *

      Following a call to this method, the {@link ExtractorInput} passed to the next invocation of * {@link #read(ExtractorInput, PositionHolder)} is required to provide data starting from {@code * position} in the stream. Valid random access positions are the start of the stream and * positions that can be obtained from any {@link SeekMap} passed to the {@link ExtractorOutput}. @@ -117,9 +116,6 @@ int read(ExtractorInput input, PositionHolder seekPosition) */ void seek(long position, long timeUs); - /** - * Releases all kept resources. - */ + /** Releases all kept resources. */ void release(); - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ExtractorInput.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ExtractorInput.java index 8e5d6f0448..3dc9e0b15b 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ExtractorInput.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ExtractorInput.java @@ -16,6 +16,7 @@ package com.google.android.exoplayer2.extractor; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.upstream.DataReader; import java.io.EOFException; import java.io.IOException; import java.io.InputStream; @@ -33,7 +34,7 @@ * wants to read an entire block/frame/header of known length. * * - *

      {@link InputStream}-like methods

      + *

      {@link InputStream}-like methods

      * *

      The {@code read()/peek()} and {@code skip()} methods provide {@link InputStream}-like * byte-level access operations. The {@code length} parameter is a maximum, and each method returns @@ -41,7 +42,7 @@ * the input was reached, or the method was interrupted, or the operation was aborted early for * another reason. * - *

      Block-based methods

      + *

      Block-based methods

      * *

      The {@code read/skip/peekFully()} and {@code advancePeekPosition()} methods assume the user * wants to read an entire block/frame/header of known length. @@ -63,22 +64,22 @@ * (regardless of {@code allowEndOfInput}). * */ -public interface ExtractorInput { +public interface ExtractorInput extends DataReader { /** * Reads up to {@code length} bytes from the input and resets the peek position. - *

      - * This method blocks until at least one byte of data can be read, the end of the input is + * + *

      This method blocks until at least one byte of data can be read, the end of the input is * detected, or an exception is thrown. * - * @param target A target array into which data should be written. + * @param buffer A target array into which data should be written. * @param offset The offset into the target array at which to write. * @param length The maximum number of bytes to read from the input. * @return The number of bytes read, or {@link C#RESULT_END_OF_INPUT} if the input has ended. * @throws IOException If an error occurs reading from the input. - * @throws InterruptedException If the thread has been interrupted. */ - int read(byte[] target, int offset, int length) throws IOException, InterruptedException; + @Override + int read(byte[] buffer, int offset, int length) throws IOException; /** * Like {@link #read(byte[], int, int)}, but reads the requested {@code length} in full. @@ -96,10 +97,9 @@ public interface ExtractorInput { * (i.e. having read at least one byte, but fewer than {@code length}), or if no bytes were * read and {@code allowEndOfInput} is false. * @throws IOException If an error occurs reading from the input. - * @throws InterruptedException If the thread has been interrupted. */ boolean readFully(byte[] target, int offset, int length, boolean allowEndOfInput) - throws IOException, InterruptedException; + throws IOException; /** * Equivalent to {@link #readFully(byte[], int, int, boolean) readFully(target, offset, length, @@ -110,9 +110,8 @@ boolean readFully(byte[] target, int offset, int length, boolean allowEndOfInput * @param length The number of bytes to read from the input. * @throws EOFException If the end of input was encountered. * @throws IOException If an error occurs reading from the input. - * @throws InterruptedException If the thread is interrupted. */ - void readFully(byte[] target, int offset, int length) throws IOException, InterruptedException; + void readFully(byte[] target, int offset, int length) throws IOException; /** * Like {@link #read(byte[], int, int)}, except the data is skipped instead of read. @@ -120,9 +119,8 @@ boolean readFully(byte[] target, int offset, int length, boolean allowEndOfInput * @param length The maximum number of bytes to skip from the input. * @return The number of bytes skipped, or {@link C#RESULT_END_OF_INPUT} if the input has ended. * @throws IOException If an error occurs reading from the input. - * @throws InterruptedException If the thread has been interrupted. */ - int skip(int length) throws IOException, InterruptedException; + int skip(int length) throws IOException; /** * Like {@link #readFully(byte[], int, int, boolean)}, except the data is skipped instead of read. @@ -138,22 +136,20 @@ boolean readFully(byte[] target, int offset, int length, boolean allowEndOfInput * (i.e. having skipped at least one byte, but fewer than {@code length}), or if no bytes were * skipped and {@code allowEndOfInput} is false. * @throws IOException If an error occurs reading from the input. - * @throws InterruptedException If the thread has been interrupted. */ - boolean skipFully(int length, boolean allowEndOfInput) throws IOException, InterruptedException; + boolean skipFully(int length, boolean allowEndOfInput) throws IOException; /** * Like {@link #readFully(byte[], int, int)}, except the data is skipped instead of read. - *

      - * Encountering the end of input is always considered an error, and will result in an - * {@link EOFException} being thrown. + * + *

      Encountering the end of input is always considered an error, and will result in an {@link + * EOFException} being thrown. * * @param length The number of bytes to skip from the input. * @throws EOFException If the end of input was encountered. * @throws IOException If an error occurs reading from the input. - * @throws InterruptedException If the thread is interrupted. */ - void skipFully(int length) throws IOException, InterruptedException; + void skipFully(int length) throws IOException; /** * Peeks up to {@code length} bytes from the peek position. The current read position is left @@ -171,9 +167,8 @@ boolean readFully(byte[] target, int offset, int length, boolean allowEndOfInput * @param length The maximum number of bytes to peek from the input. * @return The number of bytes peeked, or {@link C#RESULT_END_OF_INPUT} if the input has ended. * @throws IOException If an error occurs peeking from the input. - * @throws InterruptedException If the thread has been interrupted. */ - int peek(byte[] target, int offset, int length) throws IOException, InterruptedException; + int peek(byte[] target, int offset, int length) throws IOException; /** * Like {@link #peek(byte[], int, int)}, but peeks the requested {@code length} in full. @@ -191,10 +186,9 @@ boolean readFully(byte[] target, int offset, int length, boolean allowEndOfInput * (i.e. having peeked at least one byte, but fewer than {@code length}), or if no bytes were * peeked and {@code allowEndOfInput} is false. * @throws IOException If an error occurs peeking from the input. - * @throws InterruptedException If the thread is interrupted. */ boolean peekFully(byte[] target, int offset, int length, boolean allowEndOfInput) - throws IOException, InterruptedException; + throws IOException; /** * Equivalent to {@link #peekFully(byte[], int, int, boolean) peekFully(target, offset, length, @@ -205,9 +199,8 @@ boolean peekFully(byte[] target, int offset, int length, boolean allowEndOfInput * @param length The number of bytes to peek from the input. * @throws EOFException If the end of input was encountered. * @throws IOException If an error occurs peeking from the input. - * @throws InterruptedException If the thread is interrupted. */ - void peekFully(byte[] target, int offset, int length) throws IOException, InterruptedException; + void peekFully(byte[] target, int offset, int length) throws IOException; /** * Advances the peek position by {@code length} bytes. Like {@link #peekFully(byte[], int, int, @@ -224,10 +217,8 @@ boolean peekFully(byte[] target, int offset, int length, boolean allowEndOfInput * advanced by at least one byte, but fewer than {@code length}), or if the end of input was * encountered before advancing and {@code allowEndOfInput} is false. * @throws IOException If an error occurs advancing the peek position. - * @throws InterruptedException If the thread is interrupted. */ - boolean advancePeekPosition(int length, boolean allowEndOfInput) - throws IOException, InterruptedException; + boolean advancePeekPosition(int length, boolean allowEndOfInput) throws IOException; /** * Advances the peek position by {@code length} bytes. Like {@link #peekFully(byte[], int, int)} @@ -236,13 +227,10 @@ boolean advancePeekPosition(int length, boolean allowEndOfInput) * @param length The number of bytes to peek from the input. * @throws EOFException If the end of input was encountered. * @throws IOException If an error occurs peeking from the input. - * @throws InterruptedException If the thread is interrupted. */ - void advancePeekPosition(int length) throws IOException, InterruptedException; + void advancePeekPosition(int length) throws IOException; - /** - * Resets the peek position to equal the current read position. - */ + /** Resets the peek position to equal the current read position. */ void resetPeekPosition(); /** @@ -276,5 +264,4 @@ boolean advancePeekPosition(int length, boolean allowEndOfInput) * @throws E The given {@link Throwable} object. */ void setRetryPosition(long position, E e) throws E; - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ExtractorOutput.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ExtractorOutput.java index a59cb1d1f2..272187607b 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ExtractorOutput.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ExtractorOutput.java @@ -15,22 +15,45 @@ */ package com.google.android.exoplayer2.extractor; -/** - * Receives stream level data extracted by an {@link Extractor}. - */ +import com.google.android.exoplayer2.C; + +/** Receives stream level data extracted by an {@link Extractor}. */ public interface ExtractorOutput { + /** + * Placeholder {@link ExtractorOutput} implementation throwing an {@link + * UnsupportedOperationException} in each method. + */ + ExtractorOutput PLACEHOLDER = + new ExtractorOutput() { + + @Override + public TrackOutput track(int id, int type) { + throw new UnsupportedOperationException(); + } + + @Override + public void endTracks() { + throw new UnsupportedOperationException(); + } + + @Override + public void seekMap(SeekMap seekMap) { + throw new UnsupportedOperationException(); + } + }; + /** * Called by the {@link Extractor} to get the {@link TrackOutput} for a specific track. - *

      - * The same {@link TrackOutput} is returned if multiple calls are made with the same {@code id}. + * + *

      The same {@link TrackOutput} is returned if multiple calls are made with the same {@code + * id}. * * @param id A track identifier. - * @param type The type of the track. Typically one of the {@link com.google.android.exoplayer2.C} - * {@code TRACK_TYPE_*} constants. + * @param type The {@link C.TrackType track type}. * @return The {@link TrackOutput} for the given track identifier. */ - TrackOutput track(int id, int type); + TrackOutput track(int id, @C.TrackType int type); /** * Called when all tracks have been identified, meaning no new {@code trackId} values will be @@ -44,5 +67,4 @@ public interface ExtractorOutput { * @param seekMap The extracted {@link SeekMap}. */ void seekMap(SeekMap seekMap); - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ExtractorUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ExtractorUtil.java index 3867a0fded..54cc61f2ef 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ExtractorUtil.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ExtractorUtil.java @@ -15,11 +15,27 @@ */ package com.google.android.exoplayer2.extractor; +import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.ParserException; +import java.io.EOFException; import java.io.IOException; +import org.checkerframework.dataflow.qual.Pure; /** Extractor related utility methods. */ -/* package */ final class ExtractorUtil { +public final class ExtractorUtil { + + /** + * If {@code expression} is false, throws a {@link ParserException#createForMalformedContainer + * container malformed ParserException} with the given message. Otherwise, does nothing. + */ + @Pure + public static void checkContainerInput(boolean expression, @Nullable String message) + throws ParserException { + if (!expression) { + throw ParserException.createForMalformedContainer(message, /* cause= */ null); + } + } /** * Peeks {@code length} bytes from the input peek position, or all the bytes to the end of the @@ -33,10 +49,9 @@ * @param length The maximum number of bytes to peek from the input. * @return The number of bytes peeked. * @throws IOException If an error occurs peeking from the input. - * @throws InterruptedException If the thread has been interrupted. */ public static int peekToLength(ExtractorInput input, byte[] target, int offset, int length) - throws IOException, InterruptedException { + throws IOException { int totalBytesPeeked = 0; while (totalBytesPeeked < length) { int bytesPeeked = input.peek(target, offset + totalBytesPeeked, length - totalBytesPeeked); @@ -48,5 +63,62 @@ public static int peekToLength(ExtractorInput input, byte[] target, int offset, return totalBytesPeeked; } + /** + * Equivalent to {@link ExtractorInput#readFully(byte[], int, int)} except that it returns {@code + * false} instead of throwing an {@link EOFException} if the end of input is encountered without + * having fully satisfied the read. + */ + public static boolean readFullyQuietly( + ExtractorInput input, byte[] output, int offset, int length) throws IOException { + try { + input.readFully(output, offset, length); + } catch (EOFException e) { + return false; + } + return true; + } + + /** + * Equivalent to {@link ExtractorInput#skipFully(int)} except that it returns {@code false} + * instead of throwing an {@link EOFException} if the end of input is encountered without having + * fully satisfied the skip. + */ + public static boolean skipFullyQuietly(ExtractorInput input, int length) throws IOException { + try { + input.skipFully(length); + } catch (EOFException e) { + return false; + } + return true; + } + + /** + * Peeks data from {@code input}, respecting {@code allowEndOfInput}. Returns true if the peek is + * successful. + * + *

      If {@code allowEndOfInput=false} then encountering the end of the input (whether before or + * after reading some data) will throw {@link EOFException}. + * + *

      If {@code allowEndOfInput=true} then encountering the end of the input (even after reading + * some data) will return {@code false}. + * + *

      This is slightly different to the behaviour of {@link ExtractorInput#peekFully(byte[], int, + * int, boolean)}, where {@code allowEndOfInput=true} only returns false (and suppresses the + * exception) if the end of the input is reached before reading any data. + */ + public static boolean peekFullyQuietly( + ExtractorInput input, byte[] output, int offset, int length, boolean allowEndOfInput) + throws IOException { + try { + return input.peekFully(output, offset, length, /* allowEndOfInput= */ allowEndOfInput); + } catch (EOFException e) { + if (allowEndOfInput) { + return false; + } else { + throw e; + } + } + } + private ExtractorUtil() {} } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ExtractorsFactory.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ExtractorsFactory.java index ee29f376a1..97ae74b9d2 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ExtractorsFactory.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ExtractorsFactory.java @@ -15,9 +15,31 @@ */ package com.google.android.exoplayer2.extractor; +import android.net.Uri; +import java.util.List; +import java.util.Map; + /** Factory for arrays of {@link Extractor} instances. */ public interface ExtractorsFactory { + /** + * Extractor factory that returns an empty list of extractors. Can be used whenever {@link + * Extractor Extractors} are not required. + */ + ExtractorsFactory EMPTY = () -> new Extractor[] {}; + /** Returns an array of new {@link Extractor} instances. */ Extractor[] createExtractors(); + + /** + * Returns an array of new {@link Extractor} instances. + * + * @param uri The {@link Uri} of the media to extract. + * @param responseHeaders The response headers of the media to extract, or an empty map if there + * are none. The map lookup should be case-insensitive. + * @return The {@link Extractor} instances. + */ + default Extractor[] createExtractors(Uri uri, Map> responseHeaders) { + return createExtractors(); + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/FlacFrameReader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/FlacFrameReader.java index f014eaa565..aefc69c0d5 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/FlacFrameReader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/FlacFrameReader.java @@ -16,8 +16,7 @@ package com.google.android.exoplayer2.extractor; import com.google.android.exoplayer2.ParserException; -import com.google.android.exoplayer2.util.FlacConstants; -import com.google.android.exoplayer2.util.FlacStreamMetadata; +import com.google.android.exoplayer2.extractor.flac.FlacConstants; import com.google.android.exoplayer2.util.ParsableByteArray; import com.google.android.exoplayer2.util.Util; import java.io.IOException; @@ -94,7 +93,7 @@ public static boolean checkFrameHeaderFromPeek( FlacStreamMetadata flacStreamMetadata, int frameStartMarker, SampleNumberHolder sampleNumberHolder) - throws IOException, InterruptedException { + throws IOException { long originalPeekPosition = input.getPeekPosition(); byte[] frameStartBytes = new byte[2]; @@ -108,10 +107,11 @@ public static boolean checkFrameHeaderFromPeek( ParsableByteArray scratch = new ParsableByteArray(FlacConstants.MAX_FRAME_HEADER_SIZE); System.arraycopy( - frameStartBytes, /* srcPos= */ 0, scratch.data, /* destPos= */ 0, /* length= */ 2); + frameStartBytes, /* srcPos= */ 0, scratch.getData(), /* destPos= */ 0, /* length= */ 2); int totalBytesPeeked = - ExtractorUtil.peekToLength(input, scratch.data, 2, FlacConstants.MAX_FRAME_HEADER_SIZE - 2); + ExtractorUtil.peekToLength( + input, scratch.getData(), 2, FlacConstants.MAX_FRAME_HEADER_SIZE - 2); scratch.setLimit(totalBytesPeeked); input.resetPeekPosition(); @@ -133,11 +133,9 @@ public static boolean checkFrameHeaderFromPeek( * @return The frame first sample number. * @throws ParserException If an error occurs parsing the sample number. * @throws IOException If peeking from the input fails. - * @throws InterruptedException If interrupted while peeking from input. */ public static long getFirstSampleNumber( - ExtractorInput input, FlacStreamMetadata flacStreamMetadata) - throws IOException, InterruptedException { + ExtractorInput input, FlacStreamMetadata flacStreamMetadata) throws IOException { input.resetPeekPosition(); input.advancePeekPosition(1); byte[] blockingStrategyByte = new byte[1]; @@ -148,14 +146,14 @@ public static long getFirstSampleNumber( int maxUtf8SampleNumberSize = isBlockSizeVariable ? 7 : 6; ParsableByteArray scratch = new ParsableByteArray(maxUtf8SampleNumberSize); int totalBytesPeeked = - ExtractorUtil.peekToLength(input, scratch.data, 0, maxUtf8SampleNumberSize); + ExtractorUtil.peekToLength(input, scratch.getData(), 0, maxUtf8SampleNumberSize); scratch.setLimit(totalBytesPeeked); input.resetPeekPosition(); SampleNumberHolder sampleNumberHolder = new SampleNumberHolder(); if (!checkAndReadFirstSampleNumber( scratch, flacStreamMetadata, isBlockSizeVariable, sampleNumberHolder)) { - throw new ParserException(); + throw ParserException.createForMalformedContainer(/* message= */ null, /* cause= */ null); } return sampleNumberHolder.sampleNumber; @@ -328,7 +326,7 @@ private static boolean checkAndReadCrc(ParsableByteArray data, int frameStartPos int crc = data.readUnsignedByte(); int frameEndPosition = data.getPosition(); int expectedCrc = - Util.crc8(data.data, frameStartPosition, frameEndPosition - 1, /* initialValue= */ 0); + Util.crc8(data.getData(), frameStartPosition, frameEndPosition - 1, /* initialValue= */ 0); return crc == expectedCrc; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/FlacMetadataReader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/FlacMetadataReader.java index 49d4558ddc..5d528dbb6f 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/FlacMetadataReader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/FlacMetadataReader.java @@ -16,20 +16,17 @@ package com.google.android.exoplayer2.extractor; import androidx.annotation.Nullable; -import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.ParserException; import com.google.android.exoplayer2.extractor.VorbisUtil.CommentHeader; +import com.google.android.exoplayer2.extractor.flac.FlacConstants; import com.google.android.exoplayer2.metadata.Metadata; import com.google.android.exoplayer2.metadata.flac.PictureFrame; import com.google.android.exoplayer2.metadata.id3.Id3Decoder; -import com.google.android.exoplayer2.util.FlacConstants; -import com.google.android.exoplayer2.util.FlacStreamMetadata; import com.google.android.exoplayer2.util.ParsableBitArray; import com.google.android.exoplayer2.util.ParsableByteArray; +import com.google.common.collect.ImmutableList; import java.io.IOException; -import java.nio.charset.Charset; import java.util.Arrays; -import java.util.Collections; import java.util.List; /** @@ -61,12 +58,10 @@ public FlacStreamMetadataHolder(@Nullable FlacStreamMetadata flacStreamMetadata) * is {@code false}. * @throws IOException If peeking from the input fails. In this case, there is no guarantee on the * peek position. - * @throws InterruptedException If interrupted while peeking from input. In this case, there is no - * guarantee on the peek position. */ @Nullable public static Metadata peekId3Metadata(ExtractorInput input, boolean parseData) - throws IOException, InterruptedException { + throws IOException { @Nullable Id3Decoder.FramePredicate id3FramePredicate = parseData ? null : Id3Decoder.NO_FRAMES_PREDICATE; @Nullable Metadata id3Metadata = new Id3Peeker().peekId3Data(input, id3FramePredicate); @@ -80,13 +75,10 @@ public static Metadata peekId3Metadata(ExtractorInput input, boolean parseData) * @return Whether the data peeked is the FLAC stream marker. * @throws IOException If peeking from the input fails. In this case, the peek position is left * unchanged. - * @throws InterruptedException If interrupted while peeking from input. In this case, the peek - * position is left unchanged. */ - public static boolean checkAndPeekStreamMarker(ExtractorInput input) - throws IOException, InterruptedException { + public static boolean checkAndPeekStreamMarker(ExtractorInput input) throws IOException { ParsableByteArray scratch = new ParsableByteArray(FlacConstants.STREAM_MARKER_SIZE); - input.peekFully(scratch.data, 0, FlacConstants.STREAM_MARKER_SIZE); + input.peekFully(scratch.getData(), 0, FlacConstants.STREAM_MARKER_SIZE); return scratch.readUnsignedInt() == STREAM_MARKER; } @@ -102,12 +94,10 @@ public static boolean checkAndPeekStreamMarker(ExtractorInput input) * is {@code false}. * @throws IOException If reading from the input fails. In this case, the read position is left * unchanged and there is no guarantee on the peek position. - * @throws InterruptedException If interrupted while reading from input. In this case, the read - * position is left unchanged and there is no guarantee on the peek position. */ @Nullable public static Metadata readId3Metadata(ExtractorInput input, boolean parseData) - throws IOException, InterruptedException { + throws IOException { input.resetPeekPosition(); long startingPeekPosition = input.getPeekPosition(); @Nullable Metadata id3Metadata = peekId3Metadata(input, parseData); @@ -124,15 +114,13 @@ public static Metadata readId3Metadata(ExtractorInput input, boolean parseData) * position of {@code input} is advanced by {@link FlacConstants#STREAM_MARKER_SIZE} bytes. * @throws IOException If reading from the input fails. In this case, the position is left * unchanged. - * @throws InterruptedException If interrupted while reading from input. In this case, the - * position is left unchanged. */ - public static void readStreamMarker(ExtractorInput input) - throws IOException, InterruptedException { + public static void readStreamMarker(ExtractorInput input) throws IOException { ParsableByteArray scratch = new ParsableByteArray(FlacConstants.STREAM_MARKER_SIZE); - input.readFully(scratch.data, 0, FlacConstants.STREAM_MARKER_SIZE); + input.readFully(scratch.getData(), 0, FlacConstants.STREAM_MARKER_SIZE); if (scratch.readUnsignedInt() != STREAM_MARKER) { - throw new ParserException("Failed to read FLAC stream marker."); + throw ParserException.createForMalformedContainer( + "Failed to read FLAC stream marker.", /* cause= */ null); } } @@ -154,13 +142,9 @@ public static void readStreamMarker(ExtractorInput input) * start of a metadata block and there is no guarantee on the peek position. * @throws IOException If reading from the input fails. In this case, the read position will be at * the start of a metadata block and there is no guarantee on the peek position. - * @throws InterruptedException If interrupted while reading from input. In this case, the read - * position will be at the start of a metadata block and there is no guarantee on the peek - * position. */ public static boolean readMetadataBlock( - ExtractorInput input, FlacStreamMetadataHolder metadataHolder) - throws IOException, InterruptedException { + ExtractorInput input, FlacStreamMetadataHolder metadataHolder) throws IOException { input.resetPeekPosition(); ParsableBitArray scratch = new ParsableBitArray(new byte[4]); input.peekFully(scratch.data, 0, FlacConstants.METADATA_BLOCK_HEADER_SIZE); @@ -171,7 +155,7 @@ public static boolean readMetadataBlock( if (type == FlacConstants.METADATA_TYPE_STREAM_INFO) { metadataHolder.flacStreamMetadata = readStreamInfoBlock(input); } else { - FlacStreamMetadata flacStreamMetadata = metadataHolder.flacStreamMetadata; + @Nullable FlacStreamMetadata flacStreamMetadata = metadataHolder.flacStreamMetadata; if (flacStreamMetadata == null) { throw new IllegalArgumentException(); } @@ -183,9 +167,12 @@ public static boolean readMetadataBlock( metadataHolder.flacStreamMetadata = flacStreamMetadata.copyWithVorbisComments(vorbisComments); } else if (type == FlacConstants.METADATA_TYPE_PICTURE) { - PictureFrame pictureFrame = readPictureMetadataBlock(input, length); + ParsableByteArray pictureBlock = new ParsableByteArray(length); + input.readFully(pictureBlock.getData(), 0, length); + pictureBlock.skipBytes(FlacConstants.METADATA_BLOCK_HEADER_SIZE); + PictureFrame pictureFrame = PictureFrame.fromPictureBlock(pictureBlock); metadataHolder.flacStreamMetadata = - flacStreamMetadata.copyWithPictureFrames(Collections.singletonList(pictureFrame)); + flacStreamMetadata.copyWithPictureFrames(ImmutableList.of(pictureFrame)); } else { input.skipFully(length); } @@ -208,7 +195,7 @@ public static FlacStreamMetadata.SeekTable readSeekTableMetadataBlock(ParsableBy data.skipBytes(1); int length = data.readUnsignedInt24(); - long seekTableEndPosition = data.getPosition() + length; + long seekTableEndPosition = (long) data.getPosition() + length; int seekPointCount = length / SEEK_POINT_SIZE; long[] pointSampleNumbers = new long[seekPointCount]; long[] pointOffsets = new long[seekPointCount]; @@ -240,27 +227,25 @@ public static FlacStreamMetadata.SeekTable readSeekTableMetadataBlock(ParsableBy * @return The frame start marker (which must be the same for all the frames in the stream). * @throws ParserException If an error occurs parsing the frame start marker. * @throws IOException If peeking from the input fails. - * @throws InterruptedException If interrupted while peeking from input. */ - public static int getFrameStartMarker(ExtractorInput input) - throws IOException, InterruptedException { + public static int getFrameStartMarker(ExtractorInput input) throws IOException { input.resetPeekPosition(); ParsableByteArray scratch = new ParsableByteArray(2); - input.peekFully(scratch.data, 0, 2); + input.peekFully(scratch.getData(), 0, 2); int frameStartMarker = scratch.readUnsignedShort(); int syncCode = frameStartMarker >> 2; if (syncCode != SYNC_CODE) { input.resetPeekPosition(); - throw new ParserException("First frame does not start with sync code."); + throw ParserException.createForMalformedContainer( + "First frame does not start with sync code.", /* cause= */ null); } input.resetPeekPosition(); return frameStartMarker; } - private static FlacStreamMetadata readStreamInfoBlock(ExtractorInput input) - throws IOException, InterruptedException { + private static FlacStreamMetadata readStreamInfoBlock(ExtractorInput input) throws IOException { byte[] scratchData = new byte[FlacConstants.STREAM_INFO_BLOCK_SIZE]; input.readFully(scratchData, 0, FlacConstants.STREAM_INFO_BLOCK_SIZE); return new FlacStreamMetadata( @@ -268,16 +253,16 @@ private static FlacStreamMetadata readStreamInfoBlock(ExtractorInput input) } private static FlacStreamMetadata.SeekTable readSeekTableMetadataBlock( - ExtractorInput input, int length) throws IOException, InterruptedException { + ExtractorInput input, int length) throws IOException { ParsableByteArray scratch = new ParsableByteArray(length); - input.readFully(scratch.data, 0, length); + input.readFully(scratch.getData(), 0, length); return readSeekTableMetadataBlock(scratch); } private static List readVorbisCommentMetadataBlock(ExtractorInput input, int length) - throws IOException, InterruptedException { + throws IOException { ParsableByteArray scratch = new ParsableByteArray(length); - input.readFully(scratch.data, 0, length); + input.readFully(scratch.getData(), 0, length); scratch.skipBytes(FlacConstants.METADATA_BLOCK_HEADER_SIZE); CommentHeader commentHeader = VorbisUtil.readVorbisCommentHeader( @@ -285,28 +270,5 @@ private static List readVorbisCommentMetadataBlock(ExtractorInput input, return Arrays.asList(commentHeader.comments); } - private static PictureFrame readPictureMetadataBlock(ExtractorInput input, int length) - throws IOException, InterruptedException { - ParsableByteArray scratch = new ParsableByteArray(length); - input.readFully(scratch.data, 0, length); - scratch.skipBytes(FlacConstants.METADATA_BLOCK_HEADER_SIZE); - - int pictureType = scratch.readInt(); - int mimeTypeLength = scratch.readInt(); - String mimeType = scratch.readString(mimeTypeLength, Charset.forName(C.ASCII_NAME)); - int descriptionLength = scratch.readInt(); - String description = scratch.readString(descriptionLength); - int width = scratch.readInt(); - int height = scratch.readInt(); - int depth = scratch.readInt(); - int colors = scratch.readInt(); - int pictureDataLength = scratch.readInt(); - byte[] pictureData = new byte[pictureDataLength]; - scratch.readBytes(pictureData, 0, pictureDataLength); - - return new PictureFrame( - pictureType, mimeType, description, width, height, depth, colors, pictureData); - } - private FlacMetadataReader() {} } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/FlacSeekTableSeekMap.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/FlacSeekTableSeekMap.java index a711f09e2f..02ecc9e7b0 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/FlacSeekTableSeekMap.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/FlacSeekTableSeekMap.java @@ -17,7 +17,6 @@ import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.util.Assertions; -import com.google.android.exoplayer2.util.FlacStreamMetadata; import com.google.android.exoplayer2.util.Util; /** @@ -52,7 +51,7 @@ public long getDurationUs() { @Override public SeekPoints getSeekPoints(long timeUs) { - Assertions.checkNotNull(flacStreamMetadata.seekTable); + Assertions.checkStateNotNull(flacStreamMetadata.seekTable); long[] pointSampleNumbers = flacStreamMetadata.seekTable.pointSampleNumbers; long[] pointOffsets = flacStreamMetadata.seekTable.pointOffsets; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/FlacStreamMetadata.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/FlacStreamMetadata.java similarity index 85% rename from TMessagesProj/src/main/java/com/google/android/exoplayer2/util/FlacStreamMetadata.java rename to TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/FlacStreamMetadata.java index 470e82c13f..48e521b413 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/FlacStreamMetadata.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/FlacStreamMetadata.java @@ -13,14 +13,18 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.google.android.exoplayer2.util; +package com.google.android.exoplayer2.extractor; + +import static com.google.android.exoplayer2.extractor.VorbisUtil.parseVorbisComments; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.metadata.Metadata; import com.google.android.exoplayer2.metadata.flac.PictureFrame; -import com.google.android.exoplayer2.metadata.flac.VorbisComment; +import com.google.android.exoplayer2.util.MimeTypes; +import com.google.android.exoplayer2.util.ParsableBitArray; +import com.google.android.exoplayer2.util.Util; import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -56,8 +60,6 @@ public SeekTable(long[] pointSampleNumbers, long[] pointOffsets) { /** Indicates that a value is not in the corresponding lookup table. */ public static final int NOT_IN_LOOKUP_TABLE = -1; - /** Separator between the field name of a Vorbis comment and the corresponding value. */ - private static final String SEPARATOR = "="; /** Minimum number of samples per block. */ public final int minBlockSizeSamples; @@ -145,7 +147,7 @@ public FlacStreamMetadata( bitsPerSample, totalSamples, /* seekTable= */ null, - buildMetadata(vorbisComments, pictureFrames)); + concatenateVorbisMetadata(vorbisComments, pictureFrames)); } private FlacStreamMetadata( @@ -178,8 +180,8 @@ public int getMaxDecodedFrameSize() { return maxBlockSizeSamples * channels * (bitsPerSample / 8); } - /** Returns the bit-rate of the FLAC stream. */ - public int getBitRate() { + /** Returns the bitrate of the stream after it's decoded into PCM. */ + public int getDecodedBitrate() { return bitsPerSample * sampleRate * channels; } @@ -235,23 +237,14 @@ public Format getFormat(byte[] streamMarkerAndInfoBlock, @Nullable Metadata id3M streamMarkerAndInfoBlock[4] = (byte) 0x80; int maxInputSize = maxFrameSize > 0 ? maxFrameSize : Format.NO_VALUE; @Nullable Metadata metadataWithId3 = getMetadataCopyWithAppendedEntriesFrom(id3Metadata); - - return Format.createAudioSampleFormat( - /* id= */ null, - MimeTypes.AUDIO_FLAC, - /* codecs= */ null, - getBitRate(), - maxInputSize, - channels, - sampleRate, - /* pcmEncoding= */ Format.NO_VALUE, - /* encoderDelay= */ 0, - /* encoderPadding= */ 0, - /* initializationData= */ Collections.singletonList(streamMarkerAndInfoBlock), - /* drmInitData= */ null, - /* selectionFlags= */ 0, - /* language= */ null, - metadataWithId3); + return new Format.Builder() + .setSampleMimeType(MimeTypes.AUDIO_FLAC) + .setMaxInputSize(maxInputSize) + .setChannelCount(channels) + .setSampleRate(sampleRate) + .setInitializationData(Collections.singletonList(streamMarkerAndInfoBlock)) + .setMetadata(metadataWithId3) + .build(); } /** Returns a copy of the content metadata with entries from {@code other} appended. */ @@ -279,8 +272,7 @@ public FlacStreamMetadata copyWithSeekTable(@Nullable SeekTable seekTable) { public FlacStreamMetadata copyWithVorbisComments(List vorbisComments) { @Nullable Metadata appendedMetadata = - getMetadataCopyWithAppendedEntriesFrom( - buildMetadata(vorbisComments, Collections.emptyList())); + getMetadataCopyWithAppendedEntriesFrom(parseVorbisComments(vorbisComments)); return new FlacStreamMetadata( minBlockSizeSamples, maxBlockSizeSamples, @@ -297,9 +289,7 @@ public FlacStreamMetadata copyWithVorbisComments(List vorbisComments) { /** Returns a copy of {@code this} with the given picture frames added to the metadata. */ public FlacStreamMetadata copyWithPictureFrames(List pictureFrames) { @Nullable - Metadata appendedMetadata = - getMetadataCopyWithAppendedEntriesFrom( - buildMetadata(Collections.emptyList(), pictureFrames)); + Metadata appendedMetadata = getMetadataCopyWithAppendedEntriesFrom(new Metadata(pictureFrames)); return new FlacStreamMetadata( minBlockSizeSamples, maxBlockSizeSamples, @@ -313,6 +303,20 @@ public FlacStreamMetadata copyWithPictureFrames(List pictureFrames appendedMetadata); } + /** + * Returns a new {@link Metadata} instance created from {@code vorbisComments} and {@code + * pictureFrames}. + */ + @Nullable + private static Metadata concatenateVorbisMetadata( + List vorbisComments, List pictureFrames) { + @Nullable Metadata parsedVorbisComments = parseVorbisComments(vorbisComments); + if (parsedVorbisComments == null && pictureFrames.isEmpty()) { + return null; + } + return new Metadata(pictureFrames).copyWithAppendedEntriesFrom(parsedVorbisComments); + } + private static int getSampleRateLookupKey(int sampleRate) { switch (sampleRate) { case 88200: @@ -358,27 +362,4 @@ private static int getBitsPerSampleLookupKey(int bitsPerSample) { return NOT_IN_LOOKUP_TABLE; } } - - @Nullable - private static Metadata buildMetadata( - List vorbisComments, List pictureFrames) { - if (vorbisComments.isEmpty() && pictureFrames.isEmpty()) { - return null; - } - - ArrayList metadataEntries = new ArrayList<>(); - for (int i = 0; i < vorbisComments.size(); i++) { - String vorbisComment = vorbisComments.get(i); - String[] keyAndValue = Util.splitAtFirst(vorbisComment, SEPARATOR); - if (keyAndValue.length != 2) { - Log.w(TAG, "Failed to parse Vorbis comment: " + vorbisComment); - } else { - VorbisComment entry = new VorbisComment(keyAndValue[0], keyAndValue[1]); - metadataEntries.add(entry); - } - } - metadataEntries.addAll(pictureFrames); - - return metadataEntries.isEmpty() ? null : new Metadata(metadataEntries); - } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ForwardingExtractorInput.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ForwardingExtractorInput.java new file mode 100644 index 0000000000..c70202d137 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ForwardingExtractorInput.java @@ -0,0 +1,110 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.extractor; + +import java.io.IOException; + +/** An overridable {@link ExtractorInput} implementation forwarding all methods to another input. */ +public class ForwardingExtractorInput implements ExtractorInput { + + private final ExtractorInput input; + + public ForwardingExtractorInput(ExtractorInput input) { + this.input = input; + } + + @Override + public int read(byte[] buffer, int offset, int length) throws IOException { + return input.read(buffer, offset, length); + } + + @Override + public boolean readFully(byte[] target, int offset, int length, boolean allowEndOfInput) + throws IOException { + return input.readFully(target, offset, length, allowEndOfInput); + } + + @Override + public void readFully(byte[] target, int offset, int length) throws IOException { + input.readFully(target, offset, length); + } + + @Override + public int skip(int length) throws IOException { + return input.skip(length); + } + + @Override + public boolean skipFully(int length, boolean allowEndOfInput) throws IOException { + return input.skipFully(length, allowEndOfInput); + } + + @Override + public void skipFully(int length) throws IOException { + input.skipFully(length); + } + + @Override + public int peek(byte[] target, int offset, int length) throws IOException { + return input.peek(target, offset, length); + } + + @Override + public boolean peekFully(byte[] target, int offset, int length, boolean allowEndOfInput) + throws IOException { + return input.peekFully(target, offset, length, allowEndOfInput); + } + + @Override + public void peekFully(byte[] target, int offset, int length) throws IOException { + input.peekFully(target, offset, length); + } + + @Override + public boolean advancePeekPosition(int length, boolean allowEndOfInput) throws IOException { + return input.advancePeekPosition(length, allowEndOfInput); + } + + @Override + public void advancePeekPosition(int length) throws IOException { + input.advancePeekPosition(length); + } + + @Override + public void resetPeekPosition() { + input.resetPeekPosition(); + } + + @Override + public long getPeekPosition() { + return input.getPeekPosition(); + } + + @Override + public long getPosition() { + return input.getPosition(); + } + + @Override + public long getLength() { + return input.getLength(); + } + + @Override + public void setRetryPosition(long position, E e) throws E { + input.setRetryPosition(position, e); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/GaplessInfoHolder.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/GaplessInfoHolder.java index a0effc0df8..0bf6b146d5 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/GaplessInfoHolder.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/GaplessInfoHolder.java @@ -15,6 +15,8 @@ */ package com.google.android.exoplayer2.extractor; +import static com.google.android.exoplayer2.util.Util.castNonNull; + import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.metadata.Metadata; import com.google.android.exoplayer2.metadata.id3.CommentFrame; @@ -22,9 +24,7 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -/** - * Holder for gapless playback information. - */ +/** Holder for gapless playback information. */ public final class GaplessInfoHolder { private static final String GAPLESS_DOMAIN = "com.apple.iTunes"; @@ -33,20 +33,18 @@ public final class GaplessInfoHolder { Pattern.compile("^ [0-9a-fA-F]{8} ([0-9a-fA-F]{8}) ([0-9a-fA-F]{8})"); /** - * The number of samples to trim from the start of the decoded audio stream, or - * {@link Format#NO_VALUE} if not set. + * The number of samples to trim from the start of the decoded audio stream, or {@link + * Format#NO_VALUE} if not set. */ public int encoderDelay; /** - * The number of samples to trim from the end of the decoded audio stream, or - * {@link Format#NO_VALUE} if not set. + * The number of samples to trim from the end of the decoded audio stream, or {@link + * Format#NO_VALUE} if not set. */ public int encoderPadding; - /** - * Creates a new holder for gapless playback information. - */ + /** Creates a new holder for gapless playback information. */ public GaplessInfoHolder() { encoderDelay = Format.NO_VALUE; encoderPadding = Format.NO_VALUE; @@ -107,8 +105,8 @@ private boolean setFromComment(String data) { Matcher matcher = GAPLESS_COMMENT_PATTERN.matcher(data); if (matcher.find()) { try { - int encoderDelay = Integer.parseInt(matcher.group(1), 16); - int encoderPadding = Integer.parseInt(matcher.group(2), 16); + int encoderDelay = Integer.parseInt(castNonNull(matcher.group(1)), 16); + int encoderPadding = Integer.parseInt(castNonNull(matcher.group(2)), 16); if (encoderDelay > 0 || encoderPadding > 0) { this.encoderDelay = encoderDelay; this.encoderPadding = encoderPadding; @@ -121,11 +119,8 @@ private boolean setFromComment(String data) { return false; } - /** - * Returns whether {@link #encoderDelay} and {@link #encoderPadding} have been set. - */ + /** Returns whether {@link #encoderDelay} and {@link #encoderPadding} have been set. */ public boolean hasGaplessInfo() { return encoderDelay != Format.NO_VALUE && encoderPadding != Format.NO_VALUE; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/Id3Peeker.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/Id3Peeker.java index 60386dcc3c..3c78f7a7dd 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/Id3Peeker.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/Id3Peeker.java @@ -43,17 +43,16 @@ public Id3Peeker() { * @return The first ID3 tag decoded into a {@link Metadata} object. May be null if ID3 tag is not * present in the input. * @throws IOException If an error occurred peeking from the input. - * @throws InterruptedException If the thread was interrupted. */ @Nullable public Metadata peekId3Data( ExtractorInput input, @Nullable Id3Decoder.FramePredicate id3FramePredicate) - throws IOException, InterruptedException { + throws IOException { int peekedId3Bytes = 0; - Metadata metadata = null; + @Nullable Metadata metadata = null; while (true) { try { - input.peekFully(scratch.data, /* offset= */ 0, Id3Decoder.ID3_HEADER_LENGTH); + input.peekFully(scratch.getData(), /* offset= */ 0, Id3Decoder.ID3_HEADER_LENGTH); } catch (EOFException e) { // If input has less than ID3_HEADER_LENGTH, ignore the rest. break; @@ -69,7 +68,7 @@ public Metadata peekId3Data( if (metadata == null) { byte[] id3Data = new byte[tagLength]; - System.arraycopy(scratch.data, 0, id3Data, 0, Id3Decoder.ID3_HEADER_LENGTH); + System.arraycopy(scratch.getData(), 0, id3Data, 0, Id3Decoder.ID3_HEADER_LENGTH); input.peekFully(id3Data, Id3Decoder.ID3_HEADER_LENGTH, framesLength); metadata = new Id3Decoder(id3FramePredicate).decode(id3Data, tagLength); diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/IndexSeekMap.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/IndexSeekMap.java new file mode 100644 index 0000000000..df56072eb3 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/IndexSeekMap.java @@ -0,0 +1,85 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.android.exoplayer2.extractor; + +import static com.google.android.exoplayer2.util.Assertions.checkArgument; + +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.util.Util; + +/** + * A {@link SeekMap} implementation based on a mapping between times and positions in the input + * stream. + */ +public final class IndexSeekMap implements SeekMap { + + private final long[] positions; + private final long[] timesUs; + private final long durationUs; + private final boolean isSeekable; + + /** + * Creates an instance. + * + * @param positions The positions in the stream corresponding to {@code timesUs}, in bytes. + * @param timesUs The times corresponding to {@code positions}, in microseconds. + * @param durationUs The duration of the input stream, or {@link C#TIME_UNSET} if it is unknown. + */ + public IndexSeekMap(long[] positions, long[] timesUs, long durationUs) { + checkArgument(positions.length == timesUs.length); + int length = timesUs.length; + isSeekable = length > 0; + if (isSeekable && timesUs[0] > 0) { + // Add (position = 0, timeUs = 0) as first entry. + this.positions = new long[length + 1]; + this.timesUs = new long[length + 1]; + System.arraycopy(positions, 0, this.positions, 1, length); + System.arraycopy(timesUs, 0, this.timesUs, 1, length); + } else { + this.positions = positions; + this.timesUs = timesUs; + } + this.durationUs = durationUs; + } + + @Override + public boolean isSeekable() { + return isSeekable; + } + + @Override + public long getDurationUs() { + return durationUs; + } + + @Override + public SeekMap.SeekPoints getSeekPoints(long timeUs) { + if (!isSeekable) { + return new SeekMap.SeekPoints(SeekPoint.START); + } + int targetIndex = + Util.binarySearchFloor(timesUs, timeUs, /* inclusive= */ true, /* stayInBounds= */ true); + SeekPoint leftSeekPoint = new SeekPoint(timesUs[targetIndex], positions[targetIndex]); + if (leftSeekPoint.timeUs == timeUs || targetIndex == timesUs.length - 1) { + return new SeekMap.SeekPoints(leftSeekPoint); + } else { + SeekPoint rightSeekPoint = + new SeekPoint(timesUs[targetIndex + 1], positions[targetIndex + 1]); + return new SeekMap.SeekPoints(leftSeekPoint, rightSeekPoint); + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/MpegAudioHeader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/MpegAudioHeader.java deleted file mode 100644 index b3155233d0..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/MpegAudioHeader.java +++ /dev/null @@ -1,275 +0,0 @@ -/* - * Copyright (C) 2016 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.extractor; - -import androidx.annotation.Nullable; -import com.google.android.exoplayer2.C; -import com.google.android.exoplayer2.util.MimeTypes; - -/** - * An MPEG audio frame header. - */ -public final class MpegAudioHeader { - - /** - * Theoretical maximum frame size for an MPEG audio stream, which occurs when playing a Layer 2 - * MPEG 2.5 audio stream at 16 kb/s (with padding). The size is 1152 sample/frame * - * 160000 bit/s / (8000 sample/s * 8 bit/byte) + 1 padding byte/frame = 2881 byte/frame. - * The next power of two size is 4 KiB. - */ - public static final int MAX_FRAME_SIZE_BYTES = 4096; - - private static final String[] MIME_TYPE_BY_LAYER = - new String[] {MimeTypes.AUDIO_MPEG_L1, MimeTypes.AUDIO_MPEG_L2, MimeTypes.AUDIO_MPEG}; - private static final int[] SAMPLING_RATE_V1 = {44100, 48000, 32000}; - private static final int[] BITRATE_V1_L1 = { - 32000, 64000, 96000, 128000, 160000, 192000, 224000, 256000, 288000, 320000, 352000, 384000, - 416000, 448000 - }; - private static final int[] BITRATE_V2_L1 = { - 32000, 48000, 56000, 64000, 80000, 96000, 112000, 128000, 144000, 160000, 176000, 192000, - 224000, 256000 - }; - private static final int[] BITRATE_V1_L2 = { - 32000, 48000, 56000, 64000, 80000, 96000, 112000, 128000, 160000, 192000, 224000, 256000, - 320000, 384000 - }; - private static final int[] BITRATE_V1_L3 = { - 32000, 40000, 48000, 56000, 64000, 80000, 96000, 112000, 128000, 160000, 192000, 224000, 256000, - 320000 - }; - private static final int[] BITRATE_V2 = { - 8000, 16000, 24000, 32000, 40000, 48000, 56000, 64000, 80000, 96000, 112000, 128000, 144000, - 160000 - }; - - private static final int SAMPLES_PER_FRAME_L1 = 384; - private static final int SAMPLES_PER_FRAME_L2 = 1152; - private static final int SAMPLES_PER_FRAME_L3_V1 = 1152; - private static final int SAMPLES_PER_FRAME_L3_V2 = 576; - - /** - * Returns the size of the frame associated with {@code header}, or {@link C#LENGTH_UNSET} if it - * is invalid. - */ - public static int getFrameSize(int header) { - if (!isMagicPresent(header)) { - return C.LENGTH_UNSET; - } - - int version = (header >>> 19) & 3; - if (version == 1) { - return C.LENGTH_UNSET; - } - - int layer = (header >>> 17) & 3; - if (layer == 0) { - return C.LENGTH_UNSET; - } - - int bitrateIndex = (header >>> 12) & 15; - if (bitrateIndex == 0 || bitrateIndex == 0xF) { - // Disallow "free" bitrate. - return C.LENGTH_UNSET; - } - - int samplingRateIndex = (header >>> 10) & 3; - if (samplingRateIndex == 3) { - return C.LENGTH_UNSET; - } - - int samplingRate = SAMPLING_RATE_V1[samplingRateIndex]; - if (version == 2) { - // Version 2 - samplingRate /= 2; - } else if (version == 0) { - // Version 2.5 - samplingRate /= 4; - } - - int bitrate; - int padding = (header >>> 9) & 1; - if (layer == 3) { - // Layer I (layer == 3) - bitrate = version == 3 ? BITRATE_V1_L1[bitrateIndex - 1] : BITRATE_V2_L1[bitrateIndex - 1]; - return (12 * bitrate / samplingRate + padding) * 4; - } else { - // Layer II (layer == 2) or III (layer == 1) - if (version == 3) { - bitrate = layer == 2 ? BITRATE_V1_L2[bitrateIndex - 1] : BITRATE_V1_L3[bitrateIndex - 1]; - } else { - // Version 2 or 2.5. - bitrate = BITRATE_V2[bitrateIndex - 1]; - } - } - - if (version == 3) { - // Version 1 - return 144 * bitrate / samplingRate + padding; - } else { - // Version 2 or 2.5 - return (layer == 1 ? 72 : 144) * bitrate / samplingRate + padding; - } - } - - /** - * Returns the number of samples per frame associated with {@code header}, or {@link - * C#LENGTH_UNSET} if it is invalid. - */ - public static int getFrameSampleCount(int header) { - - if (!isMagicPresent(header)) { - return C.LENGTH_UNSET; - } - - int version = (header >>> 19) & 3; - if (version == 1) { - return C.LENGTH_UNSET; - } - - int layer = (header >>> 17) & 3; - if (layer == 0) { - return C.LENGTH_UNSET; - } - - // Those header values are not used but are checked for consistency with the other methods - int bitrateIndex = (header >>> 12) & 15; - int samplingRateIndex = (header >>> 10) & 3; - if (bitrateIndex == 0 || bitrateIndex == 0xF || samplingRateIndex == 3) { - return C.LENGTH_UNSET; - } - - return getFrameSizeInSamples(version, layer); - } - - /** - * Parses {@code headerData}, populating {@code header} with the parsed data. - * - * @param headerData Header data to parse. - * @param header Header to populate with data from {@code headerData}. - * @return True if the header was populated. False otherwise, indicating that {@code headerData} - * is not a valid MPEG audio header. - */ - public static boolean populateHeader(int headerData, MpegAudioHeader header) { - if (!isMagicPresent(headerData)) { - return false; - } - - int version = (headerData >>> 19) & 3; - if (version == 1) { - return false; - } - - int layer = (headerData >>> 17) & 3; - if (layer == 0) { - return false; - } - - int bitrateIndex = (headerData >>> 12) & 15; - if (bitrateIndex == 0 || bitrateIndex == 0xF) { - // Disallow "free" bitrate. - return false; - } - - int samplingRateIndex = (headerData >>> 10) & 3; - if (samplingRateIndex == 3) { - return false; - } - - int sampleRate = SAMPLING_RATE_V1[samplingRateIndex]; - if (version == 2) { - // Version 2 - sampleRate /= 2; - } else if (version == 0) { - // Version 2.5 - sampleRate /= 4; - } - - int padding = (headerData >>> 9) & 1; - int bitrate; - int frameSize; - int samplesPerFrame = getFrameSizeInSamples(version, layer); - if (layer == 3) { - // Layer I (layer == 3) - bitrate = version == 3 ? BITRATE_V1_L1[bitrateIndex - 1] : BITRATE_V2_L1[bitrateIndex - 1]; - frameSize = (12 * bitrate / sampleRate + padding) * 4; - } else { - // Layer II (layer == 2) or III (layer == 1) - if (version == 3) { - // Version 1 - bitrate = layer == 2 ? BITRATE_V1_L2[bitrateIndex - 1] : BITRATE_V1_L3[bitrateIndex - 1]; - frameSize = 144 * bitrate / sampleRate + padding; - } else { - // Version 2 or 2.5. - bitrate = BITRATE_V2[bitrateIndex - 1]; - frameSize = (layer == 1 ? 72 : 144) * bitrate / sampleRate + padding; - } - } - - String mimeType = MIME_TYPE_BY_LAYER[3 - layer]; - int channels = ((headerData >> 6) & 3) == 3 ? 1 : 2; - header.setValues(version, mimeType, frameSize, sampleRate, channels, bitrate, samplesPerFrame); - return true; - } - - private static boolean isMagicPresent(int header) { - return (header & 0xFFE00000) == 0xFFE00000; - } - - private static int getFrameSizeInSamples(int version, int layer) { - switch (layer) { - case 1: - return version == 3 ? SAMPLES_PER_FRAME_L3_V1 : SAMPLES_PER_FRAME_L3_V2; // Layer III - case 2: - return SAMPLES_PER_FRAME_L2; // Layer II - case 3: - return SAMPLES_PER_FRAME_L1; // Layer I - } - throw new IllegalArgumentException(); - } - - /** MPEG audio header version. */ - public int version; - /** The mime type. */ - @Nullable public String mimeType; - /** Size of the frame associated with this header, in bytes. */ - public int frameSize; - /** Sample rate in samples per second. */ - public int sampleRate; - /** Number of audio channels in the frame. */ - public int channels; - /** Bitrate of the frame in bit/s. */ - public int bitrate; - /** Number of samples stored in the frame. */ - public int samplesPerFrame; - - private void setValues( - int version, - String mimeType, - int frameSize, - int sampleRate, - int channels, - int bitrate, - int samplesPerFrame) { - this.version = version; - this.mimeType = mimeType; - this.frameSize = frameSize; - this.sampleRate = sampleRate; - this.channels = channels; - this.bitrate = bitrate; - this.samplesPerFrame = samplesPerFrame; - } -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/PositionHolder.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/PositionHolder.java index d1f5d76468..94ca7282d3 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/PositionHolder.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/PositionHolder.java @@ -15,14 +15,9 @@ */ package com.google.android.exoplayer2.extractor; -/** - * Holds a position in the stream. - */ +/** Holds a position in the stream. */ public final class PositionHolder { - /** - * The held position. - */ + /** The held position. */ public long position; - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/SeekMap.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/SeekMap.java index 15a98ab5ad..5b117595e1 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/SeekMap.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/SeekMap.java @@ -73,7 +73,9 @@ final class SeekPoints { /** The second seek point, or {@link #first} if there's only one seek point. */ public final SeekPoint second; - /** @param point The single seek point. */ + /** + * @param point The single seek point. + */ public SeekPoints(SeekPoint point) { this(point, point); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/TrackOutput.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/TrackOutput.java index 0d5a168197..d1e29d73d6 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/TrackOutput.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/TrackOutput.java @@ -15,32 +15,32 @@ */ package com.google.android.exoplayer2.extractor; +import static java.lang.annotation.ElementType.TYPE_USE; + +import androidx.annotation.IntDef; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.upstream.DataReader; import com.google.android.exoplayer2.util.ParsableByteArray; import java.io.EOFException; import java.io.IOException; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; import java.util.Arrays; -/** - * Receives track level data extracted by an {@link Extractor}. - */ +/** Receives track level data extracted by an {@link Extractor}. */ public interface TrackOutput { - /** - * Holds data required to decrypt a sample. - */ + /** Holds data required to decrypt a sample. */ final class CryptoData { - /** - * The encryption mode used for the sample. - */ - @C.CryptoMode public final int cryptoMode; + /** The encryption mode used for the sample. */ + public final @C.CryptoMode int cryptoMode; - /** - * The encryption key associated with the sample. Its contents must not be modified. - */ + /** The encryption key associated with the sample. Its contents must not be modified. */ public final byte[] encryptionKey; /** @@ -50,8 +50,7 @@ final class CryptoData { public final int encryptedBlocks; /** - * The number of clear blocks in the encryption pattern, 0 if pattern encryption does not - * apply. + * The number of clear blocks in the encryption pattern, 0 if pattern encryption does not apply. */ public final int clearBlocks; @@ -61,8 +60,8 @@ final class CryptoData { * @param encryptedBlocks See {@link #encryptedBlocks}. * @param clearBlocks See {@link #clearBlocks}. */ - public CryptoData(@C.CryptoMode int cryptoMode, byte[] encryptionKey, int encryptedBlocks, - int clearBlocks) { + public CryptoData( + @C.CryptoMode int cryptoMode, byte[] encryptionKey, int encryptedBlocks, int clearBlocks) { this.cryptoMode = cryptoMode; this.encryptionKey = encryptionKey; this.encryptedBlocks = encryptedBlocks; @@ -78,8 +77,10 @@ public boolean equals(@Nullable Object obj) { return false; } CryptoData other = (CryptoData) obj; - return cryptoMode == other.cryptoMode && encryptedBlocks == other.encryptedBlocks - && clearBlocks == other.clearBlocks && Arrays.equals(encryptionKey, other.encryptionKey); + return cryptoMode == other.cryptoMode + && encryptedBlocks == other.encryptedBlocks + && clearBlocks == other.clearBlocks + && Arrays.equals(encryptionKey, other.encryptionKey); } @Override @@ -90,9 +91,57 @@ public int hashCode() { result = 31 * result + clearBlocks; return result; } - } + /** Defines the part of the sample data to which a call to {@link #sampleData} corresponds. */ + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef({SAMPLE_DATA_PART_MAIN, SAMPLE_DATA_PART_ENCRYPTION, SAMPLE_DATA_PART_SUPPLEMENTAL}) + @interface SampleDataPart {} + + /** Main media sample data. */ + int SAMPLE_DATA_PART_MAIN = 0; + /** + * Sample encryption data. + * + *

      The format for encryption information is: + * + *

        + *
      • (1 byte) {@code encryption_signal_byte}: Most significant bit signals whether the + * encryption data contains subsample encryption data. The remaining bits contain {@code + * initialization_vector_size}. + *
      • ({@code initialization_vector_size} bytes) Initialization vector. + *
      • If subsample encryption data is present, as per {@code encryption_signal_byte}, the + * encryption data also contains: + *
          + *
        • (2 bytes) {@code subsample_encryption_data_length}. + *
        • ({@code subsample_encryption_data_length * 6} bytes) Subsample encryption data + * (repeated {@code subsample_encryption_data_length} times: + *
            + *
          • (2 bytes) Size of a clear section in sample. + *
          • (4 bytes) Size of an encryption section in sample. + *
          + *
        + *
      + */ + int SAMPLE_DATA_PART_ENCRYPTION = 1; + /** + * Sample supplemental data. + * + *

      If a sample contains supplemental data, the format of the entire sample data will be: + * + *

        + *
      • If the sample has the {@link C#BUFFER_FLAG_ENCRYPTED} flag set, all encryption + * information. + *
      • (4 bytes) {@code sample_data_size}: The size of the actual sample data, not including + * supplemental data or encryption information. + *
      • ({@code sample_data_size} bytes): The media sample data. + *
      • (remaining bytes) The supplemental data. + *
      + */ + int SAMPLE_DATA_PART_SUPPLEMENTAL = 2; + /** * Called when the {@link Format} of the track has been extracted from the stream. * @@ -100,48 +149,61 @@ public int hashCode() { */ void format(Format format); + /** + * Equivalent to {@link #sampleData(DataReader, int, boolean, int) sampleData(input, length, + * allowEndOfInput, SAMPLE_DATA_PART_MAIN)}. + */ + default int sampleData(DataReader input, int length, boolean allowEndOfInput) throws IOException { + return sampleData(input, length, allowEndOfInput, SAMPLE_DATA_PART_MAIN); + } + + /** + * Equivalent to {@link #sampleData(ParsableByteArray, int, int)} sampleData(data, length, + * SAMPLE_DATA_PART_MAIN)}. + */ + default void sampleData(ParsableByteArray data, int length) { + sampleData(data, length, SAMPLE_DATA_PART_MAIN); + } + /** * Called to write sample data to the output. * - * @param input An {@link ExtractorInput} from which to read the sample data. + * @param input A {@link DataReader} from which to read the sample data. * @param length The maximum length to read from the input. * @param allowEndOfInput True if encountering the end of the input having read no data is * allowed, and should result in {@link C#RESULT_END_OF_INPUT} being returned. False if it * should be considered an error, causing an {@link EOFException} to be thrown. + * @param sampleDataPart The part of the sample data to which this call corresponds. * @return The number of bytes appended. * @throws IOException If an error occurred reading from the input. - * @throws InterruptedException If the thread was interrupted. */ - int sampleData(ExtractorInput input, int length, boolean allowEndOfInput) - throws IOException, InterruptedException; + int sampleData( + DataReader input, int length, boolean allowEndOfInput, @SampleDataPart int sampleDataPart) + throws IOException; /** * Called to write sample data to the output. * * @param data A {@link ParsableByteArray} from which to read the sample data. * @param length The number of bytes to read, starting from {@code data.getPosition()}. + * @param sampleDataPart The part of the sample data to which this call corresponds. */ - void sampleData(ParsableByteArray data, int length); + void sampleData(ParsableByteArray data, int length, @SampleDataPart int sampleDataPart); /** * Called when metadata associated with a sample has been extracted from the stream. * *

      The corresponding sample data will have already been passed to the output via calls to - * {@link #sampleData(ExtractorInput, int, boolean)} or {@link #sampleData(ParsableByteArray, - * int)}. + * {@link #sampleData(DataReader, int, boolean)} or {@link #sampleData(ParsableByteArray, int)}. * * @param timeUs The media timestamp associated with the sample, in microseconds. * @param flags Flags associated with the sample. See {@code C.BUFFER_FLAG_*}. * @param size The size of the sample data, in bytes. - * @param offset The number of bytes that have been passed to {@link #sampleData(ExtractorInput, - * int, boolean)} or {@link #sampleData(ParsableByteArray, int)} since the last byte belonging - * to the sample whose metadata is being passed. - * @param encryptionData The encryption data required to decrypt the sample. May be null. + * @param offset The number of bytes that have been passed to {@link #sampleData(DataReader, int, + * boolean)} or {@link #sampleData(ParsableByteArray, int)} since the last byte belonging to + * the sample whose metadata is being passed. + * @param cryptoData The encryption data required to decrypt the sample. May be null. */ void sampleMetadata( - long timeUs, - @C.BufferFlags int flags, - int size, - int offset, - @Nullable CryptoData encryptionData); + long timeUs, @C.BufferFlags int flags, int size, int offset, @Nullable CryptoData cryptoData); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/TrueHdSampleRechunker.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/TrueHdSampleRechunker.java new file mode 100644 index 0000000000..1596f7228c --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/TrueHdSampleRechunker.java @@ -0,0 +1,93 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.extractor; + +import static com.google.android.exoplayer2.util.Assertions.checkState; + +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.audio.Ac3Util; +import java.io.IOException; + +/** + * Rechunks TrueHD sample data into groups of {@link Ac3Util#TRUEHD_RECHUNK_SAMPLE_COUNT} samples. + */ +public final class TrueHdSampleRechunker { + + private final byte[] syncframePrefix; + + private boolean foundSyncframe; + private int chunkSampleCount; + private long chunkTimeUs; + private @C.BufferFlags int chunkFlags; + private int chunkSize; + private int chunkOffset; + + public TrueHdSampleRechunker() { + syncframePrefix = new byte[Ac3Util.TRUEHD_SYNCFRAME_PREFIX_LENGTH]; + } + + public void reset() { + foundSyncframe = false; + chunkSampleCount = 0; + } + + public void startSample(ExtractorInput input) throws IOException { + if (foundSyncframe) { + return; + } + input.peekFully(syncframePrefix, 0, Ac3Util.TRUEHD_SYNCFRAME_PREFIX_LENGTH); + input.resetPeekPosition(); + if (Ac3Util.parseTrueHdSyncframeAudioSampleCount(syncframePrefix) == 0) { + return; + } + foundSyncframe = true; + } + + public void sampleMetadata( + TrackOutput trackOutput, + long timeUs, + @C.BufferFlags int flags, + int size, + int offset, + @Nullable TrackOutput.CryptoData cryptoData) { + checkState( + chunkOffset <= size + offset, + "TrueHD chunk samples must be contiguous in the sample queue."); + if (!foundSyncframe) { + return; + } + if (chunkSampleCount++ == 0) { + // This is the first sample in the chunk. + chunkTimeUs = timeUs; + chunkFlags = flags; + chunkSize = 0; + } + chunkSize += size; + chunkOffset = offset; // The offset is to the end of the sample. + if (chunkSampleCount >= Ac3Util.TRUEHD_RECHUNK_SAMPLE_COUNT) { + outputPendingSampleMetadata(trackOutput, cryptoData); + } + } + + public void outputPendingSampleMetadata( + TrackOutput trackOutput, @Nullable TrackOutput.CryptoData cryptoData) { + if (chunkSampleCount > 0) { + trackOutput.sampleMetadata(chunkTimeUs, chunkFlags, chunkSize, chunkOffset, cryptoData); + chunkSampleCount = 0; + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/VorbisBitArray.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/VorbisBitArray.java index b498be4a33..56f419a184 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/VorbisBitArray.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/VorbisBitArray.java @@ -15,6 +15,8 @@ */ package com.google.android.exoplayer2.extractor; +import static java.lang.Math.min; + import com.google.android.exoplayer2.util.Assertions; /** @@ -41,9 +43,7 @@ public VorbisBitArray(byte[] data) { byteLimit = data.length; } - /** - * Resets the reading position to zero. - */ + /** Resets the reading position to zero. */ public void reset() { byteOffset = 0; bitOffset = 0; @@ -68,7 +68,7 @@ public boolean readBit() { */ public int readBits(int numBits) { int tempByteOffset = byteOffset; - int bitsRead = Math.min(numBits, 8 - bitOffset); + int bitsRead = min(numBits, 8 - bitOffset); int returnValue = ((data[tempByteOffset++] & 0xFF) >> bitOffset) & (0xFF >> (8 - bitsRead)); while (bitsRead < numBits) { returnValue |= (data[tempByteOffset++] & 0xFF) << bitsRead; @@ -95,9 +95,7 @@ public void skipBits(int numBits) { assertValidOffset(); } - /** - * Returns the reading position in bits. - */ + /** Returns the reading position in bits. */ public int getPosition() { return byteOffset * 8 + bitOffset; } @@ -113,17 +111,14 @@ public void setPosition(int position) { assertValidOffset(); } - /** - * Returns the number of remaining bits. - */ + /** Returns the number of remaining bits. */ public int bitsLeft() { return (byteLimit - byteOffset) * 8 - bitOffset; } private void assertValidOffset() { // It is fine for position to be at the end of the array, but no further. - Assertions.checkState(byteOffset >= 0 - && (byteOffset < byteLimit || (byteOffset == byteLimit && bitOffset == 0))); + Assertions.checkState( + byteOffset >= 0 && (byteOffset < byteLimit || (byteOffset == byteLimit && bitOffset == 0))); } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/VorbisUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/VorbisUtil.java index 5066c3a7bd..ed02771fe5 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/VorbisUtil.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/VorbisUtil.java @@ -15,10 +15,20 @@ */ package com.google.android.exoplayer2.extractor; +import android.util.Base64; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.ParserException; +import com.google.android.exoplayer2.metadata.Metadata; +import com.google.android.exoplayer2.metadata.Metadata.Entry; +import com.google.android.exoplayer2.metadata.flac.PictureFrame; +import com.google.android.exoplayer2.metadata.vorbis.VorbisComment; import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.ParsableByteArray; +import com.google.android.exoplayer2.util.Util; +import java.util.ArrayList; import java.util.Arrays; +import java.util.List; /** Utility methods for parsing Vorbis streams. */ public final class VorbisUtil { @@ -37,27 +47,54 @@ public CommentHeader(String vendor, String[] comments, int length) { } } - /** Vorbis identification header. */ + /** + * Vorbis identification header. + * + * @see Vorbis + * spec/Identification header + */ public static final class VorbisIdHeader { - public final long version; + /** The {@code vorbis_version} field. */ + public final int version; + /** The {@code audio_channels} field. */ public final int channels; - public final long sampleRate; - public final int bitrateMax; + /** The {@code audio_sample_rate} field. */ + public final int sampleRate; + /** The {@code bitrate_maximum} field, or {@link Format#NO_VALUE} if not greater than zero. */ + public final int bitrateMaximum; + /** The {@code bitrate_nominal} field, or {@link Format#NO_VALUE} if not greater than zero. */ public final int bitrateNominal; - public final int bitrateMin; + /** The {@code bitrate_minimum} field, or {@link Format#NO_VALUE} if not greater than zero. */ + public final int bitrateMinimum; + /** The {@code blocksize_0} field. */ public final int blockSize0; + /** The {@code blocksize_1} field. */ public final int blockSize1; + /** The {@code framing_flag} field. */ public final boolean framingFlag; + /** The raw header data. */ public final byte[] data; + /** + * @param version See {@link #version}. + * @param channels See {@link #channels}. + * @param sampleRate See {@link #sampleRate}. + * @param bitrateMaximum See {@link #bitrateMaximum}. + * @param bitrateNominal See {@link #bitrateNominal}. + * @param bitrateMinimum See {@link #bitrateMinimum}. + * @param blockSize0 See {@link #version}. + * @param blockSize1 See {@link #blockSize1}. + * @param framingFlag See {@link #framingFlag}. + * @param data See {@link #data}. + */ public VorbisIdHeader( - long version, + int version, int channels, - long sampleRate, - int bitrateMax, + int sampleRate, + int bitrateMaximum, int bitrateNominal, - int bitrateMin, + int bitrateMinimum, int blockSize0, int blockSize1, boolean framingFlag, @@ -65,18 +102,14 @@ public VorbisIdHeader( this.version = version; this.channels = channels; this.sampleRate = sampleRate; - this.bitrateMax = bitrateMax; + this.bitrateMaximum = bitrateMaximum; this.bitrateNominal = bitrateNominal; - this.bitrateMin = bitrateMin; + this.bitrateMinimum = bitrateMinimum; this.blockSize0 = blockSize0; this.blockSize1 = blockSize1; this.framingFlag = framingFlag; this.data = data; } - - public int getApproximateBitrate() { - return bitrateNominal == 0 ? (bitrateMin + bitrateMax) / 2 : bitrateNominal; - } } /** Vorbis setup header modes. */ @@ -100,8 +133,8 @@ public Mode(boolean blockFlag, int windowType, int transformType, int mapping) { /** * Returns ilog(x), which is the index of the highest set bit in {@code x}. * - * @see - * Vorbis spec + * @see Vorbis + * spec * @param x the value of which the ilog should be calculated. * @return ilog(x) */ @@ -128,23 +161,40 @@ public static VorbisIdHeader readVorbisIdentificationHeader(ParsableByteArray he verifyVorbisHeaderCapturePattern(0x01, headerData, false); - long version = headerData.readLittleEndianUnsignedInt(); + int version = headerData.readLittleEndianUnsignedIntToInt(); int channels = headerData.readUnsignedByte(); - long sampleRate = headerData.readLittleEndianUnsignedInt(); - int bitrateMax = headerData.readLittleEndianInt(); + int sampleRate = headerData.readLittleEndianUnsignedIntToInt(); + int bitrateMaximum = headerData.readLittleEndianInt(); + if (bitrateMaximum <= 0) { + bitrateMaximum = Format.NO_VALUE; + } int bitrateNominal = headerData.readLittleEndianInt(); - int bitrateMin = headerData.readLittleEndianInt(); - + if (bitrateNominal <= 0) { + bitrateNominal = Format.NO_VALUE; + } + int bitrateMinimum = headerData.readLittleEndianInt(); + if (bitrateMinimum <= 0) { + bitrateMinimum = Format.NO_VALUE; + } int blockSize = headerData.readUnsignedByte(); int blockSize0 = (int) Math.pow(2, blockSize & 0x0F); int blockSize1 = (int) Math.pow(2, (blockSize & 0xF0) >> 4); boolean framingFlag = (headerData.readUnsignedByte() & 0x01) > 0; // raw data of Vorbis setup header has to be passed to decoder as CSD buffer #1 - byte[] data = Arrays.copyOf(headerData.data, headerData.limit()); - - return new VorbisIdHeader(version, channels, sampleRate, bitrateMax, bitrateNominal, bitrateMin, - blockSize0, blockSize1, framingFlag, data); + byte[] data = Arrays.copyOf(headerData.getData(), headerData.limit()); + + return new VorbisIdHeader( + version, + channels, + sampleRate, + bitrateMaximum, + bitrateNominal, + bitrateMinimum, + blockSize0, + blockSize1, + framingFlag, + data); } /** @@ -200,12 +250,52 @@ public static CommentHeader readVorbisCommentHeader( length += comments[i].length(); } if (hasFramingBit && (headerData.readUnsignedByte() & 0x01) == 0) { - throw new ParserException("framing bit expected to be set"); + throw ParserException.createForMalformedContainer( + "framing bit expected to be set", /* cause= */ null); } length += 1; return new CommentHeader(vendor, comments, length); } + /** + * Builds a {@link Metadata} instance from a list of Vorbis Comments. + * + *

      METADATA_BLOCK_PICTURE comments will be transformed into {@link PictureFrame} entries. All + * others will be transformed into {@link VorbisComment} entries. + * + * @param vorbisComments The raw input of comments, as a key-value pair KEY=VAL. + * @return The fully parsed Metadata instance. Null if no vorbis comments could be parsed. + */ + @Nullable + public static Metadata parseVorbisComments(List vorbisComments) { + List metadataEntries = new ArrayList<>(); + for (int i = 0; i < vorbisComments.size(); i++) { + String vorbisComment = vorbisComments.get(i); + String[] keyAndValue = Util.splitAtFirst(vorbisComment, "="); + if (keyAndValue.length != 2) { + Log.w(TAG, "Failed to parse Vorbis comment: " + vorbisComment); + continue; + } + + if (keyAndValue[0].equals("METADATA_BLOCK_PICTURE")) { + // This tag is a special cover art tag, outlined by + // https://wiki.xiph.org/index.php/VorbisComment#Cover_art. + // Decode it from Base64 and transform it into a PictureFrame. + try { + byte[] decoded = Base64.decode(keyAndValue[1], Base64.DEFAULT); + metadataEntries.add(PictureFrame.fromPictureBlock(new ParsableByteArray(decoded))); + } catch (RuntimeException e) { + Log.w(TAG, "Failed to parse vorbis picture", e); + } + } else { + VorbisComment entry = new VorbisComment(keyAndValue[0], keyAndValue[1]); + metadataEntries.add(entry); + } + } + + return metadataEntries.isEmpty() ? null : new Metadata(metadataEntries); + } + /** * Verifies whether the next bytes in {@code header} are a Vorbis header of the given {@code * headerType}. @@ -222,7 +312,8 @@ public static boolean verifyVorbisHeaderCapturePattern( if (quiet) { return false; } else { - throw new ParserException("too short header: " + header.bytesLeft()); + throw ParserException.createForMalformedContainer( + "too short header: " + header.bytesLeft(), /* cause= */ null); } } @@ -230,7 +321,8 @@ public static boolean verifyVorbisHeaderCapturePattern( if (quiet) { return false; } else { - throw new ParserException("expected header type " + Integer.toHexString(headerType)); + throw ParserException.createForMalformedContainer( + "expected header type " + Integer.toHexString(headerType), /* cause= */ null); } } @@ -243,7 +335,8 @@ public static boolean verifyVorbisHeaderCapturePattern( if (quiet) { return false; } else { - throw new ParserException("expected characters 'vorbis'"); + throw ParserException.createForMalformedContainer( + "expected characters 'vorbis'", /* cause= */ null); } } return true; @@ -268,7 +361,7 @@ public static Mode[] readVorbisModes(ParsableByteArray headerData, int channels) int numberOfBooks = headerData.readUnsignedByte() + 1; - VorbisBitArray bitArray = new VorbisBitArray(headerData.data); + VorbisBitArray bitArray = new VorbisBitArray(headerData.getData()); bitArray.skipBits(headerData.getPosition() * 8); for (int i = 0; i < numberOfBooks; i++) { @@ -278,7 +371,8 @@ public static Mode[] readVorbisModes(ParsableByteArray headerData, int channels) int timeCount = bitArray.readBits(6) + 1; for (int i = 0; i < timeCount; i++) { if (bitArray.readBits(16) != 0x00) { - throw new ParserException("placeholder of time domain transforms not zeroed out"); + throw ParserException.createForMalformedContainer( + "placeholder of time domain transforms not zeroed out", /* cause= */ null); } } readFloors(bitArray); @@ -287,7 +381,8 @@ public static Mode[] readVorbisModes(ParsableByteArray headerData, int channels) Mode[] modes = readModes(bitArray); if (!bitArray.readBit()) { - throw new ParserException("framing bit after modes not set as expected"); + throw ParserException.createForMalformedContainer( + "framing bit after modes not set as expected", /* cause= */ null); } return modes; } @@ -305,8 +400,7 @@ private static Mode[] readModes(VorbisBitArray bitArray) { return modes; } - private static void readMappings(int channels, VorbisBitArray bitArray) - throws ParserException { + private static void readMappings(int channels, VorbisBitArray bitArray) throws ParserException { int mappingsCount = bitArray.readBits(6) + 1; for (int i = 0; i < mappingsCount; i++) { int mappingType = bitArray.readBits(16); @@ -331,7 +425,8 @@ private static void readMappings(int channels, VorbisBitArray bitArray) couplingSteps = 0; }*/ if (bitArray.readBits(2) != 0x00) { - throw new ParserException("to reserved bits must be zero after mapping coupling steps"); + throw ParserException.createForMalformedContainer( + "to reserved bits must be zero after mapping coupling steps", /* cause= */ null); } if (submaps > 1) { for (int j = 0; j < channels; j++) { @@ -351,7 +446,8 @@ private static void readResidues(VorbisBitArray bitArray) throws ParserException for (int i = 0; i < residueCount; i++) { int residueType = bitArray.readBits(16); if (residueType > 2) { - throw new ParserException("residueType greater than 2 is not decodable"); + throw ParserException.createForMalformedContainer( + "residueType greater than 2 is not decodable", /* cause= */ null); } else { bitArray.skipBits(24); // begin bitArray.skipBits(24); // end @@ -384,7 +480,7 @@ private static void readFloors(VorbisBitArray bitArray) throws ParserException { int floorType = bitArray.readBits(16); switch (floorType) { case 0: - bitArray.skipBits(8); //order + bitArray.skipBits(8); // order bitArray.skipBits(16); // rate bitArray.skipBits(16); // barkMapSize bitArray.skipBits(6); // amplitudeBits @@ -427,15 +523,17 @@ private static void readFloors(VorbisBitArray bitArray) throws ParserException { } break; default: - throw new ParserException("floor type greater than 1 not decodable: " + floorType); + throw ParserException.createForMalformedContainer( + "floor type greater than 1 not decodable: " + floorType, /* cause= */ null); } } } private static CodeBook readBook(VorbisBitArray bitArray) throws ParserException { if (bitArray.readBits(24) != 0x564342) { - throw new ParserException("expected code book to start with [0x56, 0x43, 0x42] at " - + bitArray.getPosition()); + throw ParserException.createForMalformedContainer( + "expected code book to start with [0x56, 0x43, 0x42] at " + bitArray.getPosition(), + /* cause= */ null); } int dimensions = bitArray.readBits(16); int entries = bitArray.readBits(24); @@ -457,7 +555,7 @@ private static CodeBook readBook(VorbisBitArray bitArray) throws ParserException } } else { int length = bitArray.readBits(5) + 1; - for (int i = 0; i < lengthMap.length;) { + for (int i = 0; i < lengthMap.length; ) { int num = bitArray.readBits(iLog(entries - i)); for (int j = 0; j < num && i < lengthMap.length; i++, j++) { lengthMap[i] = length; @@ -468,7 +566,8 @@ private static CodeBook readBook(VorbisBitArray bitArray) throws ParserException int lookupType = bitArray.readBits(4); if (lookupType > 2) { - throw new ParserException("lookup type greater than 2 not decodable: " + lookupType); + throw ParserException.createForMalformedContainer( + "lookup type greater than 2 not decodable: " + lookupType, /* cause= */ null); } else if (lookupType == 1 || lookupType == 2) { bitArray.skipBits(32); // minimumValue bitArray.skipBits(32); // deltaValue @@ -509,14 +608,13 @@ private static final class CodeBook { public final int lookupType; public final boolean isOrdered; - public CodeBook(int dimensions, int entries, long[] lengthMap, int lookupType, - boolean isOrdered) { + public CodeBook( + int dimensions, int entries, long[] lengthMap, int lookupType, boolean isOrdered) { this.dimensions = dimensions; this.entries = entries; this.lengthMap = lengthMap; this.lookupType = lookupType; this.isOrdered = isOrdered; } - } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/amr/AmrExtractor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/amr/AmrExtractor.java index f6b64245fc..d313b073ee 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/amr/AmrExtractor.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/amr/AmrExtractor.java @@ -15,11 +15,14 @@ */ package com.google.android.exoplayer2.extractor.amr; +import static java.lang.annotation.ElementType.TYPE_USE; + import androidx.annotation.IntDef; -import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.ParserException; +import com.google.android.exoplayer2.PlaybackException; +import com.google.android.exoplayer2.Player; import com.google.android.exoplayer2.extractor.ConstantBitrateSeekMap; import com.google.android.exoplayer2.extractor.Extractor; import com.google.android.exoplayer2.extractor.ExtractorInput; @@ -28,6 +31,7 @@ import com.google.android.exoplayer2.extractor.PositionHolder; import com.google.android.exoplayer2.extractor.SeekMap; import com.google.android.exoplayer2.extractor.TrackOutput; +import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.Util; import java.io.EOFException; @@ -35,7 +39,11 @@ import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; import java.util.Arrays; +import org.checkerframework.checker.nullness.qual.EnsuresNonNull; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; +import org.checkerframework.checker.nullness.qual.RequiresNonNull; /** * Extracts data from the AMR containers format (either AMR or AMR-WB). This follows RFC-4867, @@ -49,20 +57,34 @@ public final class AmrExtractor implements Extractor { public static final ExtractorsFactory FACTORY = () -> new Extractor[] {new AmrExtractor()}; /** - * Flags controlling the behavior of the extractor. Possible flag value is {@link - * #FLAG_ENABLE_CONSTANT_BITRATE_SEEKING}. + * Flags controlling the behavior of the extractor. Possible flag values are {@link + * #FLAG_ENABLE_CONSTANT_BITRATE_SEEKING} and {@link + * #FLAG_ENABLE_CONSTANT_BITRATE_SEEKING_ALWAYS}. */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef( flag = true, - value = {FLAG_ENABLE_CONSTANT_BITRATE_SEEKING}) + value = {FLAG_ENABLE_CONSTANT_BITRATE_SEEKING, FLAG_ENABLE_CONSTANT_BITRATE_SEEKING_ALWAYS}) public @interface Flags {} /** * Flag to force enable seeking using a constant bitrate assumption in cases where seeking would * otherwise not be possible. */ public static final int FLAG_ENABLE_CONSTANT_BITRATE_SEEKING = 1; + /** + * Like {@link #FLAG_ENABLE_CONSTANT_BITRATE_SEEKING}, except that seeking is also enabled in + * cases where the content length (and hence the duration of the media) is unknown. Application + * code should ensure that requested seek positions are valid when using this flag, or be ready to + * handle playback failures reported through {@link Player.Listener#onPlayerError} with {@link + * PlaybackException#errorCode} set to {@link + * PlaybackException#ERROR_CODE_IO_READ_POSITION_OUT_OF_RANGE}. + * + *

      If this flag is set, then the behavior enabled by {@link + * #FLAG_ENABLE_CONSTANT_BITRATE_SEEKING} is implicitly enabled as well. + */ + public static final int FLAG_ENABLE_CONSTANT_BITRATE_SEEKING_ALWAYS = 1 << 1; /** * The frame size in bytes, including header (1 byte), for each of the 16 frame types for AMR @@ -138,17 +160,22 @@ public final class AmrExtractor implements Extractor { private int numSamplesWithSameSize; private long timeOffsetUs; - private ExtractorOutput extractorOutput; - private TrackOutput trackOutput; - @Nullable private SeekMap seekMap; + private @MonotonicNonNull ExtractorOutput extractorOutput; + private @MonotonicNonNull TrackOutput trackOutput; + private @MonotonicNonNull SeekMap seekMap; private boolean hasOutputFormat; public AmrExtractor() { this(/* flags= */ 0); } - /** @param flags Flags that control the extractor's behavior. */ + /** + * @param flags Flags that control the extractor's behavior. + */ public AmrExtractor(@Flags int flags) { + if ((flags & FLAG_ENABLE_CONSTANT_BITRATE_SEEKING_ALWAYS) != 0) { + flags |= FLAG_ENABLE_CONSTANT_BITRATE_SEEKING; + } this.flags = flags; scratch = new byte[1]; firstSampleSize = C.LENGTH_UNSET; @@ -157,23 +184,24 @@ public AmrExtractor(@Flags int flags) { // Extractor implementation. @Override - public boolean sniff(ExtractorInput input) throws IOException, InterruptedException { + public boolean sniff(ExtractorInput input) throws IOException { return readAmrHeader(input); } @Override - public void init(ExtractorOutput extractorOutput) { - this.extractorOutput = extractorOutput; - trackOutput = extractorOutput.track(/* id= */ 0, C.TRACK_TYPE_AUDIO); - extractorOutput.endTracks(); + public void init(ExtractorOutput output) { + this.extractorOutput = output; + trackOutput = output.track(/* id= */ 0, C.TRACK_TYPE_AUDIO); + output.endTracks(); } @Override - public int read(ExtractorInput input, PositionHolder seekPosition) - throws IOException, InterruptedException { + public int read(ExtractorInput input, PositionHolder seekPosition) throws IOException { + assertInitialized(); if (input.getPosition() == 0) { if (!readAmrHeader(input)) { - throw new ParserException("Could not find AMR header."); + throw ParserException.createForMalformedContainer( + "Could not find AMR header.", /* cause= */ null); } } maybeOutputFormat(); @@ -223,7 +251,7 @@ public void release() { * @param input The {@link ExtractorInput} from which data should be peeked/read. * @return Whether the AMR header has been read. */ - private boolean readAmrHeader(ExtractorInput input) throws IOException, InterruptedException { + private boolean readAmrHeader(ExtractorInput input) throws IOException { if (peekAmrSignature(input, amrSignatureNb)) { isWideBand = false; input.skipFully(amrSignatureNb.length); @@ -237,37 +265,32 @@ private boolean readAmrHeader(ExtractorInput input) throws IOException, Interrup } /** Peeks from the beginning of the input to see if the given AMR signature exists. */ - private boolean peekAmrSignature(ExtractorInput input, byte[] amrSignature) - throws IOException, InterruptedException { + private static boolean peekAmrSignature(ExtractorInput input, byte[] amrSignature) + throws IOException { input.resetPeekPosition(); byte[] header = new byte[amrSignature.length]; input.peekFully(header, 0, amrSignature.length); return Arrays.equals(header, amrSignature); } + @RequiresNonNull("trackOutput") private void maybeOutputFormat() { if (!hasOutputFormat) { hasOutputFormat = true; String mimeType = isWideBand ? MimeTypes.AUDIO_AMR_WB : MimeTypes.AUDIO_AMR_NB; int sampleRate = isWideBand ? SAMPLE_RATE_WB : SAMPLE_RATE_NB; trackOutput.format( - Format.createAudioSampleFormat( - /* id= */ null, - mimeType, - /* codecs= */ null, - /* bitrate= */ Format.NO_VALUE, - MAX_FRAME_SIZE_BYTES, - /* channelCount= */ 1, - sampleRate, - /* pcmEncoding= */ Format.NO_VALUE, - /* initializationData= */ null, - /* drmInitData= */ null, - /* selectionFlags= */ 0, - /* language= */ null)); + new Format.Builder() + .setSampleMimeType(mimeType) + .setMaxInputSize(MAX_FRAME_SIZE_BYTES) + .setChannelCount(1) + .setSampleRate(sampleRate) + .build()); } } - private int readSample(ExtractorInput extractorInput) throws IOException, InterruptedException { + @RequiresNonNull("trackOutput") + private int readSample(ExtractorInput extractorInput) throws IOException { if (currentSampleBytesRemaining == 0) { try { currentSampleSize = peekNextSampleSize(extractorInput); @@ -300,13 +323,12 @@ private int readSample(ExtractorInput extractorInput) throws IOException, Interr C.BUFFER_FLAG_KEY_FRAME, currentSampleSize, /* offset= */ 0, - /* encryptionData= */ null); + /* cryptoData= */ null); currentSampleTimeUs += SAMPLE_TIME_PER_FRAME_US; return RESULT_CONTINUE; } - private int peekNextSampleSize(ExtractorInput extractorInput) - throws IOException, InterruptedException { + private int peekNextSampleSize(ExtractorInput extractorInput) throws IOException { extractorInput.resetPeekPosition(); extractorInput.peekFully(scratch, /* offset= */ 0, /* length= */ 1); @@ -314,7 +336,8 @@ private int peekNextSampleSize(ExtractorInput extractorInput) if ((frameHeader & 0x83) > 0) { // The padding bits are at bit-1 positions in the following pattern: 1000 0011 // Padding bits must be 0. - throw new ParserException("Invalid padding bits for frame header " + frameHeader); + throw ParserException.createForMalformedContainer( + "Invalid padding bits for frame header " + frameHeader, /* cause= */ null); } int frameType = (frameHeader >> 3) & 0x0f; @@ -323,8 +346,9 @@ private int peekNextSampleSize(ExtractorInput extractorInput) private int getFrameSizeInBytes(int frameType) throws ParserException { if (!isValidFrameType(frameType)) { - throw new ParserException( - "Illegal AMR " + (isWideBand ? "WB" : "NB") + " frame type " + frameType); + throw ParserException.createForMalformedContainer( + "Illegal AMR " + (isWideBand ? "WB" : "NB") + " frame type " + frameType, + /* cause= */ null); } return isWideBand ? frameSizeBytesByTypeWb[frameType] : frameSizeBytesByTypeNb[frameType]; @@ -346,6 +370,7 @@ private boolean isNarrowBandValidFrameType(int frameType) { return !isWideBand && (frameType < 12 || frameType > 14); } + @RequiresNonNull("extractorOutput") private void maybeOutputSeekMap(long inputLength, int sampleReadResult) { if (hasOutputSeekMap) { return; @@ -359,15 +384,24 @@ private void maybeOutputSeekMap(long inputLength, int sampleReadResult) { hasOutputSeekMap = true; } else if (numSamplesWithSameSize >= NUM_SAME_SIZE_CONSTANT_BIT_RATE_THRESHOLD || sampleReadResult == RESULT_END_OF_INPUT) { - seekMap = getConstantBitrateSeekMap(inputLength); + seekMap = + getConstantBitrateSeekMap( + inputLength, (flags & FLAG_ENABLE_CONSTANT_BITRATE_SEEKING_ALWAYS) != 0); extractorOutput.seekMap(seekMap); hasOutputSeekMap = true; } } - private SeekMap getConstantBitrateSeekMap(long inputLength) { + private SeekMap getConstantBitrateSeekMap(long inputLength, boolean allowSeeksIfLengthUnknown) { int bitrate = getBitrateFromFrameSize(firstSampleSize, SAMPLE_TIME_PER_FRAME_US); - return new ConstantBitrateSeekMap(inputLength, firstSamplePosition, bitrate, firstSampleSize); + return new ConstantBitrateSeekMap( + inputLength, firstSamplePosition, bitrate, firstSampleSize, allowSeeksIfLengthUnknown); + } + + @EnsuresNonNull({"extractorOutput", "trackOutput"}) + private void assertInitialized() { + Assertions.checkStateNotNull(trackOutput); + Util.castNonNull(extractorOutput); } /** @@ -378,6 +412,7 @@ private SeekMap getConstantBitrateSeekMap(long inputLength) { * @return The stream bitrate. */ private static int getBitrateFromFrameSize(int frameSize, long durationUsPerFrame) { - return (int) ((frameSize * C.BITS_PER_BYTE * C.MICROS_PER_SECOND) / durationUsPerFrame); + return (int) + ((frameSize * ((long) C.BITS_PER_BYTE) * C.MICROS_PER_SECOND) / durationUsPerFrame); } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/amr/package-info.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/amr/package-info.java new file mode 100644 index 0000000000..31d58fadc9 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/amr/package-info.java @@ -0,0 +1,19 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +@NonNullApi +package com.google.android.exoplayer2.extractor.amr; + +import com.google.android.exoplayer2.util.NonNullApi; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/avi/AviChunk.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/avi/AviChunk.java new file mode 100644 index 0000000000..d5f2cb73ad --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/avi/AviChunk.java @@ -0,0 +1,27 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.extractor.avi; + +/** + * A chunk, as defined in the AVI spec. + * + *

      See https://docs.microsoft.com/en-us/windows/win32/directshow/avi-riff-file-reference. + */ +/* package */ interface AviChunk { + + /** Returns the chunk type fourcc. */ + int getType(); +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/avi/AviExtractor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/avi/AviExtractor.java new file mode 100644 index 0000000000..e59ba61f7e --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/avi/AviExtractor.java @@ -0,0 +1,555 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.extractor.avi; + +import static java.lang.annotation.ElementType.TYPE_USE; + +import androidx.annotation.IntDef; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.ParserException; +import com.google.android.exoplayer2.extractor.DummyExtractorOutput; +import com.google.android.exoplayer2.extractor.Extractor; +import com.google.android.exoplayer2.extractor.ExtractorInput; +import com.google.android.exoplayer2.extractor.ExtractorOutput; +import com.google.android.exoplayer2.extractor.PositionHolder; +import com.google.android.exoplayer2.extractor.SeekMap; +import com.google.android.exoplayer2.extractor.TrackOutput; +import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.Log; +import com.google.android.exoplayer2.util.MimeTypes; +import com.google.android.exoplayer2.util.ParsableByteArray; +import java.io.IOException; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import java.util.ArrayList; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; + +/** + * Extracts data from the AVI container format. + * + *

      Spec: https://docs.microsoft.com/en-us/windows/win32/directshow/avi-riff-file-reference. + */ +public final class AviExtractor implements Extractor { + + private static final String TAG = "AviExtractor"; + + public static final int FOURCC_RIFF = 0x46464952; + public static final int FOURCC_AVI_ = 0x20495641; // AVI + public static final int FOURCC_LIST = 0x5453494c; + + @SuppressWarnings("ConstantCaseForConstants") + public static final int FOURCC_avih = 0x68697661; + + @SuppressWarnings("ConstantCaseForConstants") + public static final int FOURCC_hdrl = 0x6c726468; + + @SuppressWarnings("ConstantCaseForConstants") + public static final int FOURCC_strl = 0x6c727473; + + @SuppressWarnings("ConstantCaseForConstants") + public static final int FOURCC_movi = 0x69766f6d; + + @SuppressWarnings("ConstantCaseForConstants") + public static final int FOURCC_idx1 = 0x31786469; + + public static final int FOURCC_JUNK = 0x4b4e554a; + + @SuppressWarnings("ConstantCaseForConstants") + public static final int FOURCC_strf = 0x66727473; + + @SuppressWarnings("ConstantCaseForConstants") + public static final int FOURCC_strn = 0x6e727473; + + @SuppressWarnings("ConstantCaseForConstants") + public static final int FOURCC_strh = 0x68727473; + + @SuppressWarnings("ConstantCaseForConstants") + public static final int FOURCC_auds = 0x73647561; + + @SuppressWarnings("ConstantCaseForConstants") + public static final int FOURCC_txts = 0x73747874; + + @SuppressWarnings("ConstantCaseForConstants") + public static final int FOURCC_vids = 0x73646976; + + /** Parser states. */ + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef({ + STATE_SKIPPING_TO_HDRL, + STATE_READING_HDRL_HEADER, + STATE_READING_HDRL_BODY, + STATE_FINDING_MOVI_HEADER, + STATE_FINDING_IDX1_HEADER, + STATE_READING_IDX1_BODY, + STATE_READING_SAMPLES, + }) + private @interface State {} + + private static final int STATE_SKIPPING_TO_HDRL = 0; + private static final int STATE_READING_HDRL_HEADER = 1; + private static final int STATE_READING_HDRL_BODY = 2; + private static final int STATE_FINDING_MOVI_HEADER = 3; + private static final int STATE_FINDING_IDX1_HEADER = 4; + private static final int STATE_READING_IDX1_BODY = 5; + private static final int STATE_READING_SAMPLES = 6; + + private static final int AVIIF_KEYFRAME = 16; + + /** + * Maximum size to skip using {@link ExtractorInput#skip}. Boxes larger than this size are skipped + * using {@link #RESULT_SEEK}. + */ + private static final long RELOAD_MINIMUM_SEEK_DISTANCE = 256 * 1024; + + private final ParsableByteArray scratch; + private final ChunkHeaderHolder chunkHeaderHolder; + + private @State int state; + private ExtractorOutput extractorOutput; + private @MonotonicNonNull AviMainHeaderChunk aviHeader; + private long durationUs; + private ChunkReader[] chunkReaders; + + private long pendingReposition; + @Nullable private ChunkReader currentChunkReader; + private int hdrlSize; + private long moviStart; + private long moviEnd; + private int idx1BodySize; + private boolean seekMapHasBeenOutput; + + public AviExtractor() { + scratch = new ParsableByteArray(/* limit= */ 12); + chunkHeaderHolder = new ChunkHeaderHolder(); + extractorOutput = new DummyExtractorOutput(); + chunkReaders = new ChunkReader[0]; + moviStart = C.POSITION_UNSET; + moviEnd = C.POSITION_UNSET; + hdrlSize = C.LENGTH_UNSET; + durationUs = C.TIME_UNSET; + } + + // Extractor implementation. + + @Override + public void init(ExtractorOutput output) { + this.state = STATE_SKIPPING_TO_HDRL; + this.extractorOutput = output; + pendingReposition = C.POSITION_UNSET; + } + + @Override + public boolean sniff(ExtractorInput input) throws IOException { + input.peekFully(scratch.getData(), /* offset= */ 0, /* length= */ 12); + scratch.setPosition(0); + if (scratch.readLittleEndianInt() != FOURCC_RIFF) { + return false; + } + scratch.skipBytes(4); // Skip the RIFF chunk length. + return scratch.readLittleEndianInt() == FOURCC_AVI_; + } + + @Override + public int read(ExtractorInput input, PositionHolder seekPosition) throws IOException { + if (resolvePendingReposition(input, seekPosition)) { + return RESULT_SEEK; + } + switch (state) { + case STATE_SKIPPING_TO_HDRL: + // Check for RIFF and AVI fourcc's just in case the caller did not sniff, in order to + // provide a meaningful error if the input is not an AVI file. + if (sniff(input)) { + input.skipFully(/* length= */ 12); + } else { + throw ParserException.createForMalformedContainer( + /* message= */ "AVI Header List not found", /* cause= */ null); + } + state = STATE_READING_HDRL_HEADER; + return RESULT_CONTINUE; + case STATE_READING_HDRL_HEADER: + input.readFully(scratch.getData(), /* offset= */ 0, /* length= */ 12); + scratch.setPosition(0); + chunkHeaderHolder.populateWithListHeaderFrom(scratch); + if (chunkHeaderHolder.listType != FOURCC_hdrl) { + throw ParserException.createForMalformedContainer( + /* message= */ "hdrl expected, found: " + chunkHeaderHolder.listType, + /* cause= */ null); + } + hdrlSize = chunkHeaderHolder.size; + state = STATE_READING_HDRL_BODY; + return RESULT_CONTINUE; + case STATE_READING_HDRL_BODY: + // hdrlSize includes the LIST type (hdrl), so we subtract 4 to the size. + int bytesToRead = hdrlSize - 4; + ParsableByteArray hdrlBody = new ParsableByteArray(bytesToRead); + input.readFully(hdrlBody.getData(), /* offset= */ 0, bytesToRead); + parseHdrlBody(hdrlBody); + state = STATE_FINDING_MOVI_HEADER; + return RESULT_CONTINUE; + case STATE_FINDING_MOVI_HEADER: + if (moviStart != C.POSITION_UNSET && input.getPosition() != moviStart) { + pendingReposition = moviStart; + return RESULT_CONTINUE; + } + input.peekFully(scratch.getData(), /* offset= */ 0, /* length= */ 12); + input.resetPeekPosition(); + scratch.setPosition(0); + chunkHeaderHolder.populateFrom(scratch); + int listType = scratch.readLittleEndianInt(); + if (chunkHeaderHolder.chunkType == FOURCC_RIFF) { + // We are at the start of the file. The movi chunk is in the RIFF chunk, so we skip the + // header, so as to read the RIFF chunk's body. + input.skipFully(12); + return RESULT_CONTINUE; + } + if (chunkHeaderHolder.chunkType != FOURCC_LIST || listType != FOURCC_movi) { + // The chunk header (8 bytes) plus the whole body. + pendingReposition = input.getPosition() + chunkHeaderHolder.size + 8; + return RESULT_CONTINUE; + } + moviStart = input.getPosition(); + // Size includes the list type, but not the LIST or size fields, so we add 8. + moviEnd = moviStart + chunkHeaderHolder.size + 8; + if (!seekMapHasBeenOutput) { + if (Assertions.checkNotNull(aviHeader).hasIndex()) { + state = STATE_FINDING_IDX1_HEADER; + pendingReposition = moviEnd; + return RESULT_CONTINUE; + } else { + extractorOutput.seekMap(new SeekMap.Unseekable(durationUs)); + seekMapHasBeenOutput = true; + } + } + // No need to parse the idx1, so we start reading the samples from the movi chunk straight + // away. We skip 12 bytes to move to the start of the movi's body. + pendingReposition = input.getPosition() + 12; + state = STATE_READING_SAMPLES; + return RESULT_CONTINUE; + case STATE_FINDING_IDX1_HEADER: + input.readFully(scratch.getData(), /* offset= */ 0, /* length= */ 8); + scratch.setPosition(0); + int idx1Fourcc = scratch.readLittleEndianInt(); + int boxSize = scratch.readLittleEndianInt(); + if (idx1Fourcc == FOURCC_idx1) { + state = STATE_READING_IDX1_BODY; + idx1BodySize = boxSize; + } else { + // This one is not idx1, skip to the next box. + pendingReposition = input.getPosition() + boxSize; + } + return RESULT_CONTINUE; + case STATE_READING_IDX1_BODY: + ParsableByteArray idx1Body = new ParsableByteArray(idx1BodySize); + input.readFully(idx1Body.getData(), /* offset= */ 0, /* length= */ idx1BodySize); + parseIdx1Body(idx1Body); + state = STATE_READING_SAMPLES; + pendingReposition = moviStart; + return RESULT_CONTINUE; + case STATE_READING_SAMPLES: + return readMoviChunks(input); + default: + throw new AssertionError(); // Should never happen. + } + } + + @Override + public void seek(long position, long timeUs) { + pendingReposition = C.POSITION_UNSET; + currentChunkReader = null; + for (ChunkReader chunkReader : chunkReaders) { + chunkReader.seekToPosition(position); + } + if (position == 0) { + if (chunkReaders.length == 0) { + // Still unprepared. + state = STATE_SKIPPING_TO_HDRL; + } else { + state = STATE_FINDING_MOVI_HEADER; + } + return; + } + state = STATE_READING_SAMPLES; + } + + @Override + public void release() { + // Nothing to release. + } + + // Internal methods. + + /** + * Returns whether a {@link #RESULT_SEEK} is required for the pending reposition. A seek may not + * be necessary when the desired position (as held by {@link #pendingReposition}) is after the + * {@link ExtractorInput#getPosition() current position}, but not further than {@link + * #RELOAD_MINIMUM_SEEK_DISTANCE}. + */ + private boolean resolvePendingReposition(ExtractorInput input, PositionHolder seekPosition) + throws IOException { + boolean needSeek = false; + if (pendingReposition != C.POSITION_UNSET) { + long currentPosition = input.getPosition(); + if (pendingReposition < currentPosition + || pendingReposition > currentPosition + RELOAD_MINIMUM_SEEK_DISTANCE) { + seekPosition.position = pendingReposition; + needSeek = true; + } else { + // The distance to the target position is short enough that it makes sense to just skip the + // bytes, instead of doing a seek which might re-create an HTTP connection. + input.skipFully((int) (pendingReposition - currentPosition)); + } + } + pendingReposition = C.POSITION_UNSET; + return needSeek; + } + + private void parseHdrlBody(ParsableByteArray hrdlBody) throws IOException { + ListChunk headerList = ListChunk.parseFrom(FOURCC_hdrl, hrdlBody); + if (headerList.getType() != FOURCC_hdrl) { + throw ParserException.createForMalformedContainer( + /* message= */ "Unexpected header list type " + headerList.getType(), /* cause= */ null); + } + @Nullable AviMainHeaderChunk aviHeader = headerList.getChild(AviMainHeaderChunk.class); + if (aviHeader == null) { + throw ParserException.createForMalformedContainer( + /* message= */ "AviHeader not found", /* cause= */ null); + } + this.aviHeader = aviHeader; + // This is usually wrong, so it will be overwritten by video if present + durationUs = aviHeader.totalFrames * (long) aviHeader.frameDurationUs; + ArrayList chunkReaderList = new ArrayList<>(); + int streamId = 0; + for (AviChunk aviChunk : headerList.children) { + if (aviChunk.getType() == FOURCC_strl) { + ListChunk streamList = (ListChunk) aviChunk; + // Note the streamId needs to increment even if the corresponding `strl` is discarded. + // See + // https://docs.microsoft.com/en-us/windows/win32/directshow/avi-riff-file-reference#avi-stream-headers. + @Nullable ChunkReader chunkReader = processStreamList(streamList, streamId++); + if (chunkReader != null) { + chunkReaderList.add(chunkReader); + } + } + } + chunkReaders = chunkReaderList.toArray(new ChunkReader[0]); + extractorOutput.endTracks(); + } + + /** Builds and outputs the {@link SeekMap} from the idx1 chunk. */ + private void parseIdx1Body(ParsableByteArray body) { + long seekOffset = peekSeekOffset(body); + while (body.bytesLeft() >= 16) { + int chunkId = body.readLittleEndianInt(); + int flags = body.readLittleEndianInt(); + long offset = body.readLittleEndianInt() + seekOffset; + body.readLittleEndianInt(); // We ignore the size. + ChunkReader chunkReader = getChunkReader(chunkId); + if (chunkReader == null) { + // We ignore unknown chunk IDs. + continue; + } + if ((flags & AVIIF_KEYFRAME) == AVIIF_KEYFRAME) { + chunkReader.appendKeyFrameToIndex(offset); + } + chunkReader.incrementIndexChunkCount(); + } + for (ChunkReader chunkReader : chunkReaders) { + chunkReader.compactIndex(); + } + seekMapHasBeenOutput = true; + extractorOutput.seekMap(new AviSeekMap(durationUs)); + } + + private long peekSeekOffset(ParsableByteArray idx1Body) { + // The spec states the offset is based on the start of the movi list type fourcc, but it also + // says some files base the offset on the start of the file. We use a best effort approach to + // figure out which is the case. See: + // https://docs.microsoft.com/en-us/previous-versions/windows/desktop/api/Aviriff/ns-aviriff-avioldindex#dwoffset. + if (idx1Body.bytesLeft() < 16) { + // There are no full entries in the index, meaning we don't need to apply an offset. + return 0; + } + int startingPosition = idx1Body.getPosition(); + idx1Body.skipBytes(8); // Skip chunkId (4 bytes) and flags (4 bytes). + int offset = idx1Body.readLittleEndianInt(); + + // moviStart poitns at the start of the LIST, while the seek offset is based at the start of the + // movi fourCC, so we add 8 to reconcile the difference. + long seekOffset = offset > moviStart ? 0L : moviStart + 8; + idx1Body.setPosition(startingPosition); + return seekOffset; + } + + @Nullable + private ChunkReader getChunkReader(int chunkId) { + for (ChunkReader chunkReader : chunkReaders) { + if (chunkReader.handlesChunkId(chunkId)) { + return chunkReader; + } + } + return null; + } + + private int readMoviChunks(ExtractorInput input) throws IOException { + if (input.getPosition() >= moviEnd) { + return C.RESULT_END_OF_INPUT; + } else if (currentChunkReader != null) { + if (currentChunkReader.onChunkData(input)) { + currentChunkReader = null; + } + } else { + alignInputToEvenPosition(input); + input.peekFully(scratch.getData(), /* offset= */ 0, 12); + scratch.setPosition(0); + int chunkType = scratch.readLittleEndianInt(); + if (chunkType == FOURCC_LIST) { + scratch.setPosition(8); + int listType = scratch.readLittleEndianInt(); + input.skipFully(listType == FOURCC_movi ? 12 : 8); + input.resetPeekPosition(); + return RESULT_CONTINUE; + } + int size = scratch.readLittleEndianInt(); + if (chunkType == FOURCC_JUNK) { + pendingReposition = input.getPosition() + size + 8; + return RESULT_CONTINUE; + } + input.skipFully(8); + input.resetPeekPosition(); + ChunkReader chunkReader = getChunkReader(chunkType); + if (chunkReader == null) { + // No handler for this chunk. We skip it. + pendingReposition = input.getPosition() + size; + return RESULT_CONTINUE; + } else { + chunkReader.onChunkStart(size); + this.currentChunkReader = chunkReader; + } + } + return RESULT_CONTINUE; + } + + @Nullable + private ChunkReader processStreamList(ListChunk streamList, int streamId) { + AviStreamHeaderChunk aviStreamHeaderChunk = streamList.getChild(AviStreamHeaderChunk.class); + StreamFormatChunk streamFormatChunk = streamList.getChild(StreamFormatChunk.class); + if (aviStreamHeaderChunk == null) { + Log.w(TAG, "Missing Stream Header"); + return null; + } + if (streamFormatChunk == null) { + Log.w(TAG, "Missing Stream Format"); + return null; + } + long durationUs = aviStreamHeaderChunk.getDurationUs(); + Format streamFormat = streamFormatChunk.format; + Format.Builder builder = streamFormat.buildUpon(); + builder.setId(streamId); + int suggestedBufferSize = aviStreamHeaderChunk.suggestedBufferSize; + if (suggestedBufferSize != 0) { + builder.setMaxInputSize(suggestedBufferSize); + } + StreamNameChunk streamName = streamList.getChild(StreamNameChunk.class); + if (streamName != null) { + builder.setLabel(streamName.name); + } + int trackType = MimeTypes.getTrackType(streamFormat.sampleMimeType); + if (trackType == C.TRACK_TYPE_AUDIO || trackType == C.TRACK_TYPE_VIDEO) { + TrackOutput trackOutput = extractorOutput.track(streamId, trackType); + trackOutput.format(builder.build()); + ChunkReader chunkReader = + new ChunkReader( + streamId, trackType, durationUs, aviStreamHeaderChunk.length, trackOutput); + this.durationUs = durationUs; + return chunkReader; + } else { + // We don't currently support tracks other than video and audio. + return null; + } + } + + /** + * Skips one byte from the given {@code input} if the current position is odd. + * + *

      This isn't documented anywhere, but AVI files are aligned to even bytes and fill gaps with + * zeros. + */ + private static void alignInputToEvenPosition(ExtractorInput input) throws IOException { + if ((input.getPosition() & 1) == 1) { + input.skipFully(1); + } + } + + // Internal classes. + + private class AviSeekMap implements SeekMap { + + private final long durationUs; + + public AviSeekMap(long durationUs) { + this.durationUs = durationUs; + } + + @Override + public boolean isSeekable() { + return true; + } + + @Override + public long getDurationUs() { + return durationUs; + } + + @Override + public SeekPoints getSeekPoints(long timeUs) { + SeekPoints result = chunkReaders[0].getSeekPoints(timeUs); + for (int i = 1; i < chunkReaders.length; i++) { + SeekPoints seekPoints = chunkReaders[i].getSeekPoints(timeUs); + if (seekPoints.first.position < result.first.position) { + result = seekPoints; + } + } + return result; + } + } + + private static class ChunkHeaderHolder { + public int chunkType; + public int size; + public int listType; + + public void populateWithListHeaderFrom(ParsableByteArray headerBytes) throws ParserException { + populateFrom(headerBytes); + if (chunkType != AviExtractor.FOURCC_LIST) { + throw ParserException.createForMalformedContainer( + /* message= */ "LIST expected, found: " + chunkType, /* cause= */ null); + } + listType = headerBytes.readLittleEndianInt(); + } + + public void populateFrom(ParsableByteArray headerBytes) { + chunkType = headerBytes.readLittleEndianInt(); + size = headerBytes.readLittleEndianInt(); + listType = 0; + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/avi/AviMainHeaderChunk.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/avi/AviMainHeaderChunk.java new file mode 100644 index 0000000000..00af8a8248 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/avi/AviMainHeaderChunk.java @@ -0,0 +1,56 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.extractor.avi; + +import com.google.android.exoplayer2.util.ParsableByteArray; + +/** Wrapper around the AVIMAINHEADER structure */ +/* package */ final class AviMainHeaderChunk implements AviChunk { + + private static final int AVIF_HAS_INDEX = 0x10; + + public static AviMainHeaderChunk parseFrom(ParsableByteArray body) { + int microSecPerFrame = body.readLittleEndianInt(); + body.skipBytes(8); // Skip dwMaxBytesPerSec (4 bytes), dwPaddingGranularity (4 bytes). + int flags = body.readLittleEndianInt(); + int totalFrames = body.readLittleEndianInt(); + body.skipBytes(4); // dwInitialFrames (4 bytes). + int streams = body.readLittleEndianInt(); + body.skipBytes(12); // dwSuggestedBufferSize (4 bytes), dwWidth (4 bytes), dwHeight (4 bytes). + return new AviMainHeaderChunk(microSecPerFrame, flags, totalFrames, streams); + } + + public final int frameDurationUs; + public final int flags; + public final int totalFrames; + public final int streams; + + private AviMainHeaderChunk(int frameDurationUs, int flags, int totalFrames, int streams) { + this.frameDurationUs = frameDurationUs; + this.flags = flags; + this.totalFrames = totalFrames; + this.streams = streams; + } + + @Override + public int getType() { + return AviExtractor.FOURCC_avih; + } + + public boolean hasIndex() { + return (flags & AVIF_HAS_INDEX) == AVIF_HAS_INDEX; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/avi/AviStreamHeaderChunk.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/avi/AviStreamHeaderChunk.java new file mode 100644 index 0000000000..2ed10090c4 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/avi/AviStreamHeaderChunk.java @@ -0,0 +1,88 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.extractor.avi; + +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.util.Log; +import com.google.android.exoplayer2.util.ParsableByteArray; +import com.google.android.exoplayer2.util.Util; + +/** Parses and holds information from the AVISTREAMHEADER structure. */ +/* package */ final class AviStreamHeaderChunk implements AviChunk { + private static final String TAG = "AviStreamHeaderChunk"; + + public static AviStreamHeaderChunk parseFrom(ParsableByteArray body) { + int streamType = body.readLittleEndianInt(); + body.skipBytes(12); // fccHandler (4 bytes), dwFlags (4 bytes), wPriority (2 bytes), + // wLanguage (2 bytes). + int initialFrames = body.readLittleEndianInt(); + int scale = body.readLittleEndianInt(); + int rate = body.readLittleEndianInt(); + body.skipBytes(4); // dwStart (4 bytes). + int length = body.readLittleEndianInt(); + int suggestedBufferSize = body.readLittleEndianInt(); + body.skipBytes(8); // dwQuality (4 bytes), dwSampleSize (4 bytes). + return new AviStreamHeaderChunk( + streamType, initialFrames, scale, rate, length, suggestedBufferSize); + } + + public final int streamType; + public final int initialFrames; + public final int scale; + public final int rate; + public final int length; + public final int suggestedBufferSize; + + private AviStreamHeaderChunk( + int streamType, int initialFrames, int scale, int rate, int length, int suggestedBufferSize) { + this.streamType = streamType; + this.initialFrames = initialFrames; + this.scale = scale; + this.rate = rate; + this.length = length; + this.suggestedBufferSize = suggestedBufferSize; + } + + @Override + public int getType() { + return AviExtractor.FOURCC_strh; + } + + public @C.TrackType int getTrackType() { + switch (streamType) { + case AviExtractor.FOURCC_auds: + return C.TRACK_TYPE_AUDIO; + case AviExtractor.FOURCC_vids: + return C.TRACK_TYPE_VIDEO; + case AviExtractor.FOURCC_txts: + return C.TRACK_TYPE_TEXT; + default: + Log.w(TAG, "Found unsupported streamType fourCC: " + Integer.toHexString(streamType)); + return C.TRACK_TYPE_UNKNOWN; + } + } + + public float getFrameRate() { + return rate / (float) scale; + } + + public long getDurationUs() { + return Util.scaleLargeTimestamp( + /* timestamp= */ length, + /* multiplier= */ C.MICROS_PER_SECOND * scale, + /* divisor= */ rate); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/avi/ChunkReader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/avi/ChunkReader.java new file mode 100644 index 0000000000..627dd9abf9 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/avi/ChunkReader.java @@ -0,0 +1,212 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.extractor.avi; + +import static java.lang.annotation.ElementType.TYPE_USE; + +import androidx.annotation.IntDef; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.extractor.ExtractorInput; +import com.google.android.exoplayer2.extractor.SeekMap; +import com.google.android.exoplayer2.extractor.SeekPoint; +import com.google.android.exoplayer2.extractor.TrackOutput; +import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.Util; +import java.io.IOException; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import java.util.Arrays; + +/** Reads chunks holding sample data. */ +/* package */ final class ChunkReader { + + /** Parser states. */ + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef({ + CHUNK_TYPE_VIDEO_COMPRESSED, + CHUNK_TYPE_VIDEO_UNCOMPRESSED, + CHUNK_TYPE_AUDIO, + }) + private @interface ChunkType {} + + private static final int INITIAL_INDEX_SIZE = 512; + private static final int CHUNK_TYPE_VIDEO_COMPRESSED = ('d' << 16) | ('c' << 24); + private static final int CHUNK_TYPE_VIDEO_UNCOMPRESSED = ('d' << 16) | ('b' << 24); + private static final int CHUNK_TYPE_AUDIO = ('w' << 16) | ('b' << 24); + + protected final TrackOutput trackOutput; + + /** The chunk id fourCC (example: `01wb`), as defined in the index and the movi. */ + private final int chunkId; + /** Secondary chunk id. Bad muxers sometimes use an uncompressed video id (db) for key frames */ + private final int alternativeChunkId; + + private final long durationUs; + private final int streamHeaderChunkCount; + + private int currentChunkSize; + private int bytesRemainingInCurrentChunk; + + /** Number of chunks as calculated by the index */ + private int currentChunkIndex; + + private int indexChunkCount; + private int indexSize; + private long[] keyFrameOffsets; + private int[] keyFrameIndices; + + public ChunkReader( + int id, + @C.TrackType int trackType, + long durationnUs, + int streamHeaderChunkCount, + TrackOutput trackOutput) { + Assertions.checkArgument(trackType == C.TRACK_TYPE_AUDIO || trackType == C.TRACK_TYPE_VIDEO); + this.durationUs = durationnUs; + this.streamHeaderChunkCount = streamHeaderChunkCount; + this.trackOutput = trackOutput; + @ChunkType + int chunkType = + trackType == C.TRACK_TYPE_VIDEO ? CHUNK_TYPE_VIDEO_COMPRESSED : CHUNK_TYPE_AUDIO; + chunkId = getChunkIdFourCc(id, chunkType); + alternativeChunkId = + trackType == C.TRACK_TYPE_VIDEO ? getChunkIdFourCc(id, CHUNK_TYPE_VIDEO_UNCOMPRESSED) : -1; + keyFrameOffsets = new long[INITIAL_INDEX_SIZE]; + keyFrameIndices = new int[INITIAL_INDEX_SIZE]; + } + + public void appendKeyFrameToIndex(long offset) { + if (indexSize == keyFrameIndices.length) { + keyFrameOffsets = Arrays.copyOf(keyFrameOffsets, keyFrameOffsets.length * 3 / 2); + keyFrameIndices = Arrays.copyOf(keyFrameIndices, keyFrameIndices.length * 3 / 2); + } + keyFrameOffsets[indexSize] = offset; + keyFrameIndices[indexSize] = indexChunkCount; + indexSize++; + } + + public void advanceCurrentChunk() { + currentChunkIndex++; + } + + public long getCurrentChunkTimestampUs() { + return getChunkTimestampUs(currentChunkIndex); + } + + public long getFrameDurationUs() { + return getChunkTimestampUs(/* chunkIndex= */ 1); + } + + public void incrementIndexChunkCount() { + indexChunkCount++; + } + + public void compactIndex() { + keyFrameOffsets = Arrays.copyOf(keyFrameOffsets, indexSize); + keyFrameIndices = Arrays.copyOf(keyFrameIndices, indexSize); + } + + public boolean handlesChunkId(int chunkId) { + return this.chunkId == chunkId || alternativeChunkId == chunkId; + } + + public boolean isCurrentFrameAKeyFrame() { + return Arrays.binarySearch(keyFrameIndices, currentChunkIndex) >= 0; + } + + public boolean isVideo() { + return (chunkId & CHUNK_TYPE_VIDEO_COMPRESSED) == CHUNK_TYPE_VIDEO_COMPRESSED; + } + + public boolean isAudio() { + return (chunkId & CHUNK_TYPE_AUDIO) == CHUNK_TYPE_AUDIO; + } + + /** Prepares for parsing a chunk with the given {@code size}. */ + public void onChunkStart(int size) { + currentChunkSize = size; + bytesRemainingInCurrentChunk = size; + } + + /** + * Provides data associated to the current chunk and returns whether the full chunk has been + * parsed. + */ + public boolean onChunkData(ExtractorInput input) throws IOException { + bytesRemainingInCurrentChunk -= + trackOutput.sampleData(input, bytesRemainingInCurrentChunk, false); + boolean done = bytesRemainingInCurrentChunk == 0; + if (done) { + if (currentChunkSize > 0) { + trackOutput.sampleMetadata( + getCurrentChunkTimestampUs(), + (isCurrentFrameAKeyFrame() ? C.BUFFER_FLAG_KEY_FRAME : 0), + currentChunkSize, + 0, + null); + } + advanceCurrentChunk(); + } + return done; + } + + public void seekToPosition(long position) { + if (indexSize == 0) { + currentChunkIndex = 0; + } else { + int index = + Util.binarySearchFloor( + keyFrameOffsets, position, /* inclusive= */ true, /* stayInBounds= */ true); + currentChunkIndex = keyFrameIndices[index]; + } + } + + public SeekMap.SeekPoints getSeekPoints(long timeUs) { + int targetFrameIndex = (int) (timeUs / getFrameDurationUs()); + int keyFrameIndex = + Util.binarySearchFloor( + keyFrameIndices, targetFrameIndex, /* inclusive= */ true, /* stayInBounds= */ true); + if (keyFrameIndices[keyFrameIndex] == targetFrameIndex) { + return new SeekMap.SeekPoints(getSeekPoint(keyFrameIndex)); + } + // The target frame is not a key frame, we look for the two closest ones. + SeekPoint precedingKeyFrameSeekPoint = getSeekPoint(keyFrameIndex); + if (keyFrameIndex + 1 < keyFrameOffsets.length) { + return new SeekMap.SeekPoints(precedingKeyFrameSeekPoint, getSeekPoint(keyFrameIndex + 1)); + } else { + return new SeekMap.SeekPoints(precedingKeyFrameSeekPoint); + } + } + + private long getChunkTimestampUs(int chunkIndex) { + return durationUs * chunkIndex / streamHeaderChunkCount; + } + + private SeekPoint getSeekPoint(int keyFrameIndex) { + return new SeekPoint( + keyFrameIndices[keyFrameIndex] * getFrameDurationUs(), keyFrameOffsets[keyFrameIndex]); + } + + private static int getChunkIdFourCc(int streamId, @ChunkType int chunkType) { + int tens = streamId / 10; + int ones = streamId % 10; + return (('0' + ones) << 8) | ('0' + tens) | chunkType; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/avi/ListChunk.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/avi/ListChunk.java new file mode 100644 index 0000000000..6229cc1b02 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/avi/ListChunk.java @@ -0,0 +1,94 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.extractor.avi; + +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.util.ParsableByteArray; +import com.google.common.collect.ImmutableList; + +/** Represents an AVI LIST. */ +/* package */ final class ListChunk implements AviChunk { + + public static ListChunk parseFrom(int listType, ParsableByteArray body) { + ImmutableList.Builder builder = new ImmutableList.Builder<>(); + int listBodyEndPosition = body.limit(); + @C.TrackType int currentTrackType = C.TRACK_TYPE_NONE; + while (body.bytesLeft() > 8) { + int type = body.readLittleEndianInt(); + int size = body.readLittleEndianInt(); + int innerBoxBodyEndPosition = body.getPosition() + size; + body.setLimit(innerBoxBodyEndPosition); + @Nullable AviChunk aviChunk; + if (type == AviExtractor.FOURCC_LIST) { + int innerListType = body.readLittleEndianInt(); + aviChunk = parseFrom(innerListType, body); + } else { + aviChunk = createBox(type, currentTrackType, body); + } + if (aviChunk != null) { + if (aviChunk.getType() == AviExtractor.FOURCC_strh) { + currentTrackType = ((AviStreamHeaderChunk) aviChunk).getTrackType(); + } + builder.add(aviChunk); + } + body.setPosition(innerBoxBodyEndPosition); + body.setLimit(listBodyEndPosition); + } + return new ListChunk(listType, builder.build()); + } + + public final ImmutableList children; + private final int type; + + private ListChunk(int type, ImmutableList children) { + this.type = type; + this.children = children; + } + + @Override + public int getType() { + return type; + } + + @Nullable + @SuppressWarnings("unchecked") + public T getChild(Class c) { + for (AviChunk aviChunk : children) { + if (aviChunk.getClass() == c) { + return (T) aviChunk; + } + } + return null; + } + + @Nullable + private static AviChunk createBox( + int chunkType, @C.TrackType int trackType, ParsableByteArray body) { + switch (chunkType) { + case AviExtractor.FOURCC_avih: + return AviMainHeaderChunk.parseFrom(body); + case AviExtractor.FOURCC_strh: + return AviStreamHeaderChunk.parseFrom(body); + case AviExtractor.FOURCC_strf: + return StreamFormatChunk.parseFrom(trackType, body); + case AviExtractor.FOURCC_strn: + return StreamNameChunk.parseFrom(body); + default: + return null; + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/avi/StreamFormatChunk.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/avi/StreamFormatChunk.java new file mode 100644 index 0000000000..db0f0c1e16 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/avi/StreamFormatChunk.java @@ -0,0 +1,150 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.extractor.avi; + +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.util.Log; +import com.google.android.exoplayer2.util.MimeTypes; +import com.google.android.exoplayer2.util.ParsableByteArray; +import com.google.android.exoplayer2.util.Util; +import com.google.common.collect.ImmutableList; + +/** Holds the {@link Format} information contained in an STRF chunk. */ +/* package */ final class StreamFormatChunk implements AviChunk { + private static final String TAG = "StreamFormatChunk"; + + @Nullable + public static AviChunk parseFrom(int trackType, ParsableByteArray body) { + if (trackType == C.TRACK_TYPE_VIDEO) { + return parseBitmapInfoHeader(body); + } else if (trackType == C.TRACK_TYPE_AUDIO) { + return parseWaveFormatEx(body); + } else { + Log.w( + TAG, + "Ignoring strf box for unsupported track type: " + Util.getTrackTypeString(trackType)); + return null; + } + } + + public final Format format; + + public StreamFormatChunk(Format format) { + this.format = format; + } + + @Override + public int getType() { + return AviExtractor.FOURCC_strf; + } + + @Nullable + private static AviChunk parseBitmapInfoHeader(ParsableByteArray body) { + body.skipBytes(4); // biSize. + int width = body.readLittleEndianInt(); + int height = body.readLittleEndianInt(); + body.skipBytes(4); // biPlanes (2 bytes), biBitCount (2 bytes). + int compression = body.readLittleEndianInt(); + String mimeType = getMimeTypeFromCompression(compression); + if (mimeType == null) { + Log.w(TAG, "Ignoring track with unsupported compression " + compression); + return null; + } + Format.Builder formatBuilder = new Format.Builder(); + formatBuilder.setWidth(width).setHeight(height).setSampleMimeType(mimeType); + return new StreamFormatChunk(formatBuilder.build()); + } + + // Syntax defined by the WAVEFORMATEX structure. See + // https://docs.microsoft.com/en-us/previous-versions/dd757713(v=vs.85). + @Nullable + private static AviChunk parseWaveFormatEx(ParsableByteArray body) { + int formatTag = body.readLittleEndianUnsignedShort(); + @Nullable String mimeType = getMimeTypeFromTag(formatTag); + if (mimeType == null) { + Log.w(TAG, "Ignoring track with unsupported format tag " + formatTag); + return null; + } + int channelCount = body.readLittleEndianUnsignedShort(); + int samplesPerSecond = body.readLittleEndianInt(); + body.skipBytes(6); // averageBytesPerSecond (4 bytes), nBlockAlign (2 bytes). + int bitsPerSample = body.readUnsignedShort(); + int pcmEncoding = Util.getPcmEncoding(bitsPerSample); + int cbSize = body.readLittleEndianUnsignedShort(); + byte[] codecData = new byte[cbSize]; + body.readBytes(codecData, /* offset= */ 0, codecData.length); + + Format.Builder formatBuilder = new Format.Builder(); + formatBuilder + .setSampleMimeType(mimeType) + .setChannelCount(channelCount) + .setSampleRate(samplesPerSecond); + if (MimeTypes.AUDIO_RAW.equals(mimeType) && pcmEncoding != C.ENCODING_INVALID) { + formatBuilder.setPcmEncoding(pcmEncoding); + } + if (MimeTypes.AUDIO_AAC.equals(mimeType) && codecData.length > 0) { + formatBuilder.setInitializationData(ImmutableList.of(codecData)); + } + return new StreamFormatChunk(formatBuilder.build()); + } + + @Nullable + private static String getMimeTypeFromTag(int tag) { + switch (tag) { + case 0x1: // WAVE_FORMAT_PCM + return MimeTypes.AUDIO_RAW; + case 0x55: // WAVE_FORMAT_MPEGLAYER3 + return MimeTypes.AUDIO_MPEG; + case 0xff: // WAVE_FORMAT_AAC + return MimeTypes.AUDIO_AAC; + case 0x2000: // WAVE_FORMAT_DVM - AC3 + return MimeTypes.AUDIO_AC3; + case 0x2001: // WAVE_FORMAT_DTS2 + return MimeTypes.AUDIO_DTS; + default: + return null; + } + } + + @Nullable + private static String getMimeTypeFromCompression(int compression) { + switch (compression) { + case 0x3234504d: // MP42 + return MimeTypes.VIDEO_MP42; + case 0x3334504d: // MP43 + return MimeTypes.VIDEO_MP43; + case 0x34363248: // H264 + case 0x31637661: // avc1 + case 0x31435641: // AVC1 + return MimeTypes.VIDEO_H264; + case 0x44495633: // 3VID + case 0x78766964: // divx + case 0x58564944: // DIVX + case 0x30355844: // DX50 + case 0x34504d46: // FMP4 + case 0x64697678: // xvid + case 0x44495658: // XVID + return MimeTypes.VIDEO_MP4V; + case 0x47504a4d: // MJPG + case 0x67706a6d: // mjpg + return MimeTypes.VIDEO_MJPEG; + default: + return null; + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/avi/StreamNameChunk.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/avi/StreamNameChunk.java new file mode 100644 index 0000000000..908774ef21 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/avi/StreamNameChunk.java @@ -0,0 +1,37 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.extractor.avi; + +import com.google.android.exoplayer2.util.ParsableByteArray; + +/** Parses and contains the name from the STRN chunk. */ +/* package */ final class StreamNameChunk implements AviChunk { + + public static StreamNameChunk parseFrom(ParsableByteArray body) { + return new StreamNameChunk(body.readString(body.bytesLeft())); + } + + public final String name; + + private StreamNameChunk(String name) { + this.name = name; + } + + @Override + public int getType() { + return AviExtractor.FOURCC_strn; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/avi/package-info.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/avi/package-info.java new file mode 100644 index 0000000000..37f58b65f9 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/avi/package-info.java @@ -0,0 +1,19 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +@NonNullApi +package com.google.android.exoplayer2.extractor.avi; + +import com.google.android.exoplayer2.util.NonNullApi; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/flac/FlacBinarySearchSeeker.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/flac/FlacBinarySearchSeeker.java index 82e1636baf..4359bbba26 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/flac/FlacBinarySearchSeeker.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/flac/FlacBinarySearchSeeker.java @@ -15,13 +15,14 @@ */ package com.google.android.exoplayer2.extractor.flac; +import static java.lang.Math.max; + import com.google.android.exoplayer2.extractor.BinarySearchSeeker; import com.google.android.exoplayer2.extractor.ExtractorInput; import com.google.android.exoplayer2.extractor.FlacFrameReader; import com.google.android.exoplayer2.extractor.FlacFrameReader.SampleNumberHolder; +import com.google.android.exoplayer2.extractor.FlacStreamMetadata; import com.google.android.exoplayer2.extractor.SeekMap; -import com.google.android.exoplayer2.util.FlacConstants; -import com.google.android.exoplayer2.util.FlacStreamMetadata; import java.io.IOException; /** @@ -55,7 +56,7 @@ public FlacBinarySearchSeeker( /* floorBytePosition= */ firstFramePosition, /* ceilingBytePosition= */ inputLength, /* approxBytesPerFrame= */ flacStreamMetadata.getApproxBytesPerFrame(), - /* minimumSearchRange= */ Math.max( + /* minimumSearchRange= */ max( FlacConstants.MIN_FRAME_HEADER_SIZE, flacStreamMetadata.minFrameSize)); } @@ -73,7 +74,7 @@ private FlacTimestampSeeker(FlacStreamMetadata flacStreamMetadata, int frameStar @Override public TimestampSearchResult searchForTimestamp(ExtractorInput input, long targetSampleNumber) - throws IOException, InterruptedException { + throws IOException { long searchPosition = input.getPosition(); // Find left frame. @@ -81,7 +82,7 @@ public TimestampSearchResult searchForTimestamp(ExtractorInput input, long targe long leftFramePosition = input.getPeekPosition(); input.advancePeekPosition( - Math.max(FlacConstants.MIN_FRAME_HEADER_SIZE, flacStreamMetadata.minFrameSize)); + max(FlacConstants.MIN_FRAME_HEADER_SIZE, flacStreamMetadata.minFrameSize)); // Find right frame. long rightFrameFirstSampleNumber = findNextFrame(input); @@ -110,10 +111,8 @@ public TimestampSearchResult searchForTimestamp(ExtractorInput input, long targe * the stream if no frame was found. * @throws IOException If peeking from the input fails. In this case, there is no guarantee on * the peek position. - * @throws InterruptedException If interrupted while peeking from input. In this case, there is - * no guarantee on the peek position. */ - private long findNextFrame(ExtractorInput input) throws IOException, InterruptedException { + private long findNextFrame(ExtractorInput input) throws IOException { while (input.getPeekPosition() < input.getLength() - FlacConstants.MIN_FRAME_HEADER_SIZE && !FlacFrameReader.checkFrameHeaderFromPeek( input, flacStreamMetadata, frameStartMarker, sampleNumberHolder)) { diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/FlacConstants.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/flac/FlacConstants.java similarity index 96% rename from TMessagesProj/src/main/java/com/google/android/exoplayer2/util/FlacConstants.java rename to TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/flac/FlacConstants.java index 0d36d78ff9..ded7d0d441 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/FlacConstants.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/flac/FlacConstants.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.google.android.exoplayer2.util; +package com.google.android.exoplayer2.extractor.flac; /** Defines constants used by the FLAC extractor. */ public final class FlacConstants { diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/flac/FlacExtractor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/flac/FlacExtractor.java index 96ccfa4feb..02d40451d8 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/flac/FlacExtractor.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/flac/FlacExtractor.java @@ -16,6 +16,9 @@ package com.google.android.exoplayer2.extractor.flac; import static com.google.android.exoplayer2.util.Util.castNonNull; +import static java.lang.Math.max; +import static java.lang.Math.min; +import static java.lang.annotation.ElementType.TYPE_USE; import androidx.annotation.IntDef; import androidx.annotation.Nullable; @@ -28,18 +31,18 @@ import com.google.android.exoplayer2.extractor.FlacFrameReader.SampleNumberHolder; import com.google.android.exoplayer2.extractor.FlacMetadataReader; import com.google.android.exoplayer2.extractor.FlacSeekTableSeekMap; +import com.google.android.exoplayer2.extractor.FlacStreamMetadata; import com.google.android.exoplayer2.extractor.PositionHolder; import com.google.android.exoplayer2.extractor.SeekMap; import com.google.android.exoplayer2.extractor.TrackOutput; import com.google.android.exoplayer2.metadata.Metadata; import com.google.android.exoplayer2.util.Assertions; -import com.google.android.exoplayer2.util.FlacConstants; -import com.google.android.exoplayer2.util.FlacStreamMetadata; import com.google.android.exoplayer2.util.ParsableByteArray; import java.io.IOException; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; import org.checkerframework.checker.nullness.qual.MonotonicNonNull; /** @@ -52,12 +55,17 @@ public final class FlacExtractor implements Extractor { /** Factory for {@link FlacExtractor} instances. */ public static final ExtractorsFactory FACTORY = () -> new Extractor[] {new FlacExtractor()}; + /* + * Flags in the two FLAC extractors should be kept in sync. If we ever change this then + * DefaultExtractorsFactory will need modifying, because it currently assumes this is the case. + */ /** * Flags controlling the behavior of the extractor. Possible flag value is {@link * #FLAG_DISABLE_ID3_METADATA}. */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef( flag = true, value = {FLAG_DISABLE_ID3_METADATA}) @@ -72,6 +80,7 @@ public final class FlacExtractor implements Extractor { /** Parser state. */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({ STATE_READ_ID3_METADATA, STATE_GET_STREAM_MARKER_AND_INFO_BLOCK_BYTES, @@ -101,15 +110,15 @@ public final class FlacExtractor implements Extractor { private final SampleNumberHolder sampleNumberHolder; - @MonotonicNonNull private ExtractorOutput extractorOutput; - @MonotonicNonNull private TrackOutput trackOutput; + private @MonotonicNonNull ExtractorOutput extractorOutput; + private @MonotonicNonNull TrackOutput trackOutput; private @State int state; @Nullable private Metadata id3Metadata; - @MonotonicNonNull private FlacStreamMetadata flacStreamMetadata; + private @MonotonicNonNull FlacStreamMetadata flacStreamMetadata; private int minFrameSize; private int frameStartMarker; - @MonotonicNonNull private FlacBinarySearchSeeker binarySearchSeeker; + private @MonotonicNonNull FlacBinarySearchSeeker binarySearchSeeker; private int currentFrameBytesWritten; private long currentFrameFirstSampleNumber; @@ -134,7 +143,7 @@ public FlacExtractor(int flags) { } @Override - public boolean sniff(ExtractorInput input) throws IOException, InterruptedException { + public boolean sniff(ExtractorInput input) throws IOException { FlacMetadataReader.peekId3Metadata(input, /* parseData= */ false); return FlacMetadataReader.checkAndPeekStreamMarker(input); } @@ -148,7 +157,7 @@ public void init(ExtractorOutput output) { @Override public @ReadResult int read(ExtractorInput input, PositionHolder seekPosition) - throws IOException, InterruptedException { + throws IOException { switch (state) { case STATE_READ_ID3_METADATA: readId3Metadata(input); @@ -181,7 +190,7 @@ public void seek(long position, long timeUs) { } currentFrameFirstSampleNumber = timeUs == 0 ? 0 : SAMPLE_NUMBER_UNKNOWN; currentFrameBytesWritten = 0; - buffer.reset(); + buffer.reset(/* limit= */ 0); } @Override @@ -191,24 +200,23 @@ public void release() { // Private methods. - private void readId3Metadata(ExtractorInput input) throws IOException, InterruptedException { + private void readId3Metadata(ExtractorInput input) throws IOException { id3Metadata = FlacMetadataReader.readId3Metadata(input, /* parseData= */ !id3MetadataDisabled); state = STATE_GET_STREAM_MARKER_AND_INFO_BLOCK_BYTES; } - private void getStreamMarkerAndInfoBlockBytes(ExtractorInput input) - throws IOException, InterruptedException { + private void getStreamMarkerAndInfoBlockBytes(ExtractorInput input) throws IOException { input.peekFully(streamMarkerAndInfoBlock, 0, streamMarkerAndInfoBlock.length); input.resetPeekPosition(); state = STATE_READ_STREAM_MARKER; } - private void readStreamMarker(ExtractorInput input) throws IOException, InterruptedException { + private void readStreamMarker(ExtractorInput input) throws IOException { FlacMetadataReader.readStreamMarker(input); state = STATE_READ_METADATA_BLOCKS; } - private void readMetadataBlocks(ExtractorInput input) throws IOException, InterruptedException { + private void readMetadataBlocks(ExtractorInput input) throws IOException { boolean isLastMetadataBlock = false; FlacMetadataReader.FlacStreamMetadataHolder metadataHolder = new FlacMetadataReader.FlacStreamMetadataHolder(flacStreamMetadata); @@ -219,14 +227,14 @@ private void readMetadataBlocks(ExtractorInput input) throws IOException, Interr } Assertions.checkNotNull(flacStreamMetadata); - minFrameSize = Math.max(flacStreamMetadata.minFrameSize, FlacConstants.MIN_FRAME_HEADER_SIZE); + minFrameSize = max(flacStreamMetadata.minFrameSize, FlacConstants.MIN_FRAME_HEADER_SIZE); castNonNull(trackOutput) .format(flacStreamMetadata.getFormat(streamMarkerAndInfoBlock, id3Metadata)); state = STATE_GET_FRAME_START_MARKER; } - private void getFrameStartMarker(ExtractorInput input) throws IOException, InterruptedException { + private void getFrameStartMarker(ExtractorInput input) throws IOException { frameStartMarker = FlacMetadataReader.getFrameStartMarker(input); castNonNull(extractorOutput) .seekMap( @@ -238,7 +246,7 @@ private void getFrameStartMarker(ExtractorInput input) throws IOException, Inter } private @ReadResult int readFrames(ExtractorInput input, PositionHolder seekPosition) - throws IOException, InterruptedException { + throws IOException { Assertions.checkNotNull(trackOutput); Assertions.checkNotNull(flacStreamMetadata); @@ -260,7 +268,9 @@ private void getFrameStartMarker(ExtractorInput input) throws IOException, Inter if (currentLimit < BUFFER_LENGTH) { int bytesRead = input.read( - buffer.data, /* offset= */ currentLimit, /* length= */ BUFFER_LENGTH - currentLimit); + buffer.getData(), + /* offset= */ currentLimit, + /* length= */ BUFFER_LENGTH - currentLimit); foundEndOfInput = bytesRead == C.RESULT_END_OF_INPUT; if (!foundEndOfInput) { buffer.setLimit(currentLimit + bytesRead); @@ -275,7 +285,7 @@ private void getFrameStartMarker(ExtractorInput input) throws IOException, Inter // Skip frame search on the bytes within the minimum frame size. if (currentFrameBytesWritten < minFrameSize) { - buffer.skipBytes(Math.min(minFrameSize - currentFrameBytesWritten, buffer.bytesLeft())); + buffer.skipBytes(min(minFrameSize - currentFrameBytesWritten, buffer.bytesLeft())); } long nextFrameFirstSampleNumber = findFrame(buffer, foundEndOfInput); @@ -294,9 +304,11 @@ private void getFrameStartMarker(ExtractorInput input) throws IOException, Inter if (buffer.bytesLeft() < FlacConstants.MAX_FRAME_HEADER_SIZE) { // The next frame header may not fit in the rest of the buffer, so put the trailing bytes at // the start of the buffer, and reset the position and limit. + int bytesLeft = buffer.bytesLeft(); System.arraycopy( - buffer.data, buffer.getPosition(), buffer.data, /* destPos= */ 0, buffer.bytesLeft()); - buffer.reset(buffer.bytesLeft()); + buffer.getData(), buffer.getPosition(), buffer.getData(), /* destPos= */ 0, bytesLeft); + buffer.setPosition(0); + buffer.setLimit(bytesLeft); } return Extractor.RESULT_CONTINUE; @@ -406,6 +418,6 @@ private void outputSampleMetadata() { C.BUFFER_FLAG_KEY_FRAME, currentFrameBytesWritten, /* offset= */ 0, - /* encryptionData= */ null); + /* cryptoData= */ null); } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/flac/package-info.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/flac/package-info.java new file mode 100644 index 0000000000..44d3427910 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/flac/package-info.java @@ -0,0 +1,19 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +@NonNullApi +package com.google.android.exoplayer2.extractor.flac; + +import com.google.android.exoplayer2.util.NonNullApi; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/flv/AudioTagPayloadReader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/flv/AudioTagPayloadReader.java index 4a904844ee..124d62dd40 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/flv/AudioTagPayloadReader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/flv/AudioTagPayloadReader.java @@ -15,19 +15,16 @@ */ package com.google.android.exoplayer2.extractor.flv; -import android.util.Pair; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.ParserException; +import com.google.android.exoplayer2.audio.AacUtil; import com.google.android.exoplayer2.extractor.TrackOutput; -import com.google.android.exoplayer2.util.CodecSpecificDataUtil; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.ParsableByteArray; import java.util.Collections; -/** - * Parses audio tags from an FLV stream and extracts AAC frames. - */ +/** Parses audio tags from an FLV stream and extracts AAC frames. */ /* package */ final class AudioTagPayloadReader extends TagPayloadReader { private static final int AUDIO_FORMAT_MP3 = 2; @@ -62,27 +59,23 @@ protected boolean parseHeader(ParsableByteArray data) throws UnsupportedFormatEx if (audioFormat == AUDIO_FORMAT_MP3) { int sampleRateIndex = (header >> 2) & 0x03; int sampleRate = AUDIO_SAMPLING_RATE_TABLE[sampleRateIndex]; - Format format = Format.createAudioSampleFormat(null, MimeTypes.AUDIO_MPEG, null, - Format.NO_VALUE, Format.NO_VALUE, 1, sampleRate, null, null, 0, null); + Format format = + new Format.Builder() + .setSampleMimeType(MimeTypes.AUDIO_MPEG) + .setChannelCount(1) + .setSampleRate(sampleRate) + .build(); output.format(format); hasOutputFormat = true; } else if (audioFormat == AUDIO_FORMAT_ALAW || audioFormat == AUDIO_FORMAT_ULAW) { - String type = audioFormat == AUDIO_FORMAT_ALAW ? MimeTypes.AUDIO_ALAW - : MimeTypes.AUDIO_MLAW; + String mimeType = + audioFormat == AUDIO_FORMAT_ALAW ? MimeTypes.AUDIO_ALAW : MimeTypes.AUDIO_MLAW; Format format = - Format.createAudioSampleFormat( - /* id= */ null, - /* sampleMimeType= */ type, - /* codecs= */ null, - /* bitrate= */ Format.NO_VALUE, - /* maxInputSize= */ Format.NO_VALUE, - /* channelCount= */ 1, - /* sampleRate= */ 8000, - /* pcmEncoding= */ Format.NO_VALUE, - /* initializationData= */ null, - /* drmInitData= */ null, - /* selectionFlags= */ 0, - /* language= */ null); + new Format.Builder() + .setSampleMimeType(mimeType) + .setChannelCount(1) + .setSampleRate(8000) + .build(); output.format(format); hasOutputFormat = true; } else if (audioFormat != AUDIO_FORMAT_AAC) { @@ -109,11 +102,15 @@ protected boolean parsePayload(ParsableByteArray data, long timeUs) throws Parse // Parse the sequence header. byte[] audioSpecificConfig = new byte[data.bytesLeft()]; data.readBytes(audioSpecificConfig, 0, audioSpecificConfig.length); - Pair audioParams = CodecSpecificDataUtil.parseAacAudioSpecificConfig( - audioSpecificConfig); - Format format = Format.createAudioSampleFormat(null, MimeTypes.AUDIO_AAC, null, - Format.NO_VALUE, Format.NO_VALUE, audioParams.second, audioParams.first, - Collections.singletonList(audioSpecificConfig), null, 0, null); + AacUtil.Config aacConfig = AacUtil.parseAudioSpecificConfig(audioSpecificConfig); + Format format = + new Format.Builder() + .setSampleMimeType(MimeTypes.AUDIO_AAC) + .setCodecs(aacConfig.codecs) + .setChannelCount(aacConfig.channelCount) + .setSampleRate(aacConfig.sampleRateHz) + .setInitializationData(Collections.singletonList(audioSpecificConfig)) + .build(); output.format(format); hasOutputFormat = true; return false; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/flv/FlvExtractor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/flv/FlvExtractor.java index f6835558f2..568a15f692 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/flv/FlvExtractor.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/flv/FlvExtractor.java @@ -15,23 +15,29 @@ */ package com.google.android.exoplayer2.extractor.flv; +import static java.lang.Math.max; +import static java.lang.annotation.ElementType.TYPE_USE; + import androidx.annotation.IntDef; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.extractor.Extractor; import com.google.android.exoplayer2.extractor.ExtractorInput; import com.google.android.exoplayer2.extractor.ExtractorOutput; import com.google.android.exoplayer2.extractor.ExtractorsFactory; +import com.google.android.exoplayer2.extractor.IndexSeekMap; import com.google.android.exoplayer2.extractor.PositionHolder; import com.google.android.exoplayer2.extractor.SeekMap; +import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.ParsableByteArray; import java.io.IOException; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; +import org.checkerframework.checker.nullness.qual.RequiresNonNull; -/** - * Extracts data from the FLV container format. - */ +/** Extracts data from the FLV container format. */ public final class FlvExtractor implements Extractor { /** Factory for {@link FlvExtractor} instances. */ @@ -40,6 +46,7 @@ public final class FlvExtractor implements Extractor { /** Extractor states. */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({ STATE_READING_FLV_HEADER, STATE_SKIPPING_TO_TAG_HEADER, @@ -71,7 +78,7 @@ public final class FlvExtractor implements Extractor { private final ParsableByteArray tagData; private final ScriptTagPayloadReader metadataReader; - private ExtractorOutput extractorOutput; + private @MonotonicNonNull ExtractorOutput extractorOutput; private @States int state; private boolean outputFirstSample; private long mediaTagTimestampOffsetUs; @@ -80,8 +87,8 @@ public final class FlvExtractor implements Extractor { private int tagDataSize; private long tagTimestampUs; private boolean outputSeekMap; - private AudioTagPayloadReader audioReader; - private VideoTagPayloadReader videoReader; + private @MonotonicNonNull AudioTagPayloadReader audioReader; + private @MonotonicNonNull VideoTagPayloadReader videoReader; public FlvExtractor() { scratch = new ParsableByteArray(4); @@ -93,23 +100,23 @@ public FlvExtractor() { } @Override - public boolean sniff(ExtractorInput input) throws IOException, InterruptedException { + public boolean sniff(ExtractorInput input) throws IOException { // Check if file starts with "FLV" tag - input.peekFully(scratch.data, 0, 3); + input.peekFully(scratch.getData(), 0, 3); scratch.setPosition(0); if (scratch.readUnsignedInt24() != FLV_TAG) { return false; } // Checking reserved flags are set to 0 - input.peekFully(scratch.data, 0, 2); + input.peekFully(scratch.getData(), 0, 2); scratch.setPosition(0); if ((scratch.readUnsignedShort() & 0xFA) != 0) { return false; } // Read data offset - input.peekFully(scratch.data, 0, 4); + input.peekFully(scratch.getData(), 0, 4); scratch.setPosition(0); int dataOffset = scratch.readInt(); @@ -117,7 +124,7 @@ public boolean sniff(ExtractorInput input) throws IOException, InterruptedExcept input.advancePeekPosition(dataOffset); // Checking first "previous tag size" is set to 0 - input.peekFully(scratch.data, 0, 4); + input.peekFully(scratch.getData(), 0, 4); scratch.setPosition(0); return scratch.readInt() == 0; @@ -130,8 +137,12 @@ public void init(ExtractorOutput output) { @Override public void seek(long position, long timeUs) { - state = STATE_READING_FLV_HEADER; - outputFirstSample = false; + if (position == 0) { + state = STATE_READING_FLV_HEADER; + outputFirstSample = false; + } else { + state = STATE_READING_TAG_HEADER; + } bytesToNextTagHeader = 0; } @@ -141,8 +152,8 @@ public void release() { } @Override - public int read(ExtractorInput input, PositionHolder seekPosition) throws IOException, - InterruptedException { + public int read(ExtractorInput input, PositionHolder seekPosition) throws IOException { + Assertions.checkStateNotNull(extractorOutput); // Asserts that init has been called. while (true) { switch (state) { case STATE_READING_FLV_HEADER: @@ -176,10 +187,10 @@ public int read(ExtractorInput input, PositionHolder seekPosition) throws IOExce * @param input The {@link ExtractorInput} from which to read. * @return True if header was read successfully. False if the end of stream was reached. * @throws IOException If an error occurred reading or parsing data from the source. - * @throws InterruptedException If the thread was interrupted. */ - private boolean readFlvHeader(ExtractorInput input) throws IOException, InterruptedException { - if (!input.readFully(headerBuffer.data, 0, FLV_HEADER_SIZE, true)) { + @RequiresNonNull("extractorOutput") + private boolean readFlvHeader(ExtractorInput input) throws IOException { + if (!input.readFully(headerBuffer.getData(), 0, FLV_HEADER_SIZE, true)) { // We've reached the end of the stream. return false; } @@ -190,12 +201,12 @@ private boolean readFlvHeader(ExtractorInput input) throws IOException, Interrup boolean hasAudio = (flags & 0x04) != 0; boolean hasVideo = (flags & 0x01) != 0; if (hasAudio && audioReader == null) { - audioReader = new AudioTagPayloadReader( - extractorOutput.track(TAG_TYPE_AUDIO, C.TRACK_TYPE_AUDIO)); + audioReader = + new AudioTagPayloadReader(extractorOutput.track(TAG_TYPE_AUDIO, C.TRACK_TYPE_AUDIO)); } if (hasVideo && videoReader == null) { - videoReader = new VideoTagPayloadReader( - extractorOutput.track(TAG_TYPE_VIDEO, C.TRACK_TYPE_VIDEO)); + videoReader = + new VideoTagPayloadReader(extractorOutput.track(TAG_TYPE_VIDEO, C.TRACK_TYPE_VIDEO)); } extractorOutput.endTracks(); @@ -210,9 +221,8 @@ private boolean readFlvHeader(ExtractorInput input) throws IOException, Interrup * * @param input The {@link ExtractorInput} from which to read. * @throws IOException If an error occurred skipping data from the source. - * @throws InterruptedException If the thread was interrupted. */ - private void skipToTagHeader(ExtractorInput input) throws IOException, InterruptedException { + private void skipToTagHeader(ExtractorInput input) throws IOException { input.skipFully(bytesToNextTagHeader); bytesToNextTagHeader = 0; state = STATE_READING_TAG_HEADER; @@ -224,10 +234,9 @@ private void skipToTagHeader(ExtractorInput input) throws IOException, Interrupt * @param input The {@link ExtractorInput} from which to read. * @return True if tag header was read successfully. Otherwise, false. * @throws IOException If an error occurred reading or parsing data from the source. - * @throws InterruptedException If the thread was interrupted. */ - private boolean readTagHeader(ExtractorInput input) throws IOException, InterruptedException { - if (!input.readFully(tagHeaderBuffer.data, 0, FLV_TAG_HEADER_SIZE, true)) { + private boolean readTagHeader(ExtractorInput input) throws IOException { + if (!input.readFully(tagHeaderBuffer.getData(), 0, FLV_TAG_HEADER_SIZE, true)) { // We've reached the end of the stream. return false; } @@ -248,9 +257,9 @@ private boolean readTagHeader(ExtractorInput input) throws IOException, Interrup * @param input The {@link ExtractorInput} from which to read. * @return True if the data was consumed by a reader. False if it was skipped. * @throws IOException If an error occurred reading or parsing data from the source. - * @throws InterruptedException If the thread was interrupted. */ - private boolean readTagData(ExtractorInput input) throws IOException, InterruptedException { + @RequiresNonNull("extractorOutput") + private boolean readTagData(ExtractorInput input) throws IOException { boolean wasConsumed = true; boolean wasSampleOutput = false; long timestampUs = getCurrentTimestampUs(); @@ -264,7 +273,11 @@ private boolean readTagData(ExtractorInput input) throws IOException, Interrupte wasSampleOutput = metadataReader.consume(prepareTagData(input), timestampUs); long durationUs = metadataReader.getDurationUs(); if (durationUs != C.TIME_UNSET) { - extractorOutput.seekMap(new SeekMap.Unseekable(durationUs)); + extractorOutput.seekMap( + new IndexSeekMap( + metadataReader.getKeyFrameTagPositions(), + metadataReader.getKeyFrameTimesUs(), + durationUs)); outputSeekMap = true; } } else { @@ -281,18 +294,18 @@ private boolean readTagData(ExtractorInput input) throws IOException, Interrupte return wasConsumed; } - private ParsableByteArray prepareTagData(ExtractorInput input) throws IOException, - InterruptedException { + private ParsableByteArray prepareTagData(ExtractorInput input) throws IOException { if (tagDataSize > tagData.capacity()) { - tagData.reset(new byte[Math.max(tagData.capacity() * 2, tagDataSize)], 0); + tagData.reset(new byte[max(tagData.capacity() * 2, tagDataSize)], 0); } else { tagData.setPosition(0); } tagData.setLimit(tagDataSize); - input.readFully(tagData.data, 0, tagDataSize); + input.readFully(tagData.getData(), 0, tagDataSize); return tagData; } + @RequiresNonNull("extractorOutput") private void ensureReadyForMediaOutput() { if (!outputSeekMap) { extractorOutput.seekMap(new SeekMap.Unseekable(C.TIME_UNSET)); diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/flv/ScriptTagPayloadReader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/flv/ScriptTagPayloadReader.java index 806cc9fad4..42bb1e019e 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/flv/ScriptTagPayloadReader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/flv/ScriptTagPayloadReader.java @@ -17,21 +17,22 @@ import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; -import com.google.android.exoplayer2.ParserException; import com.google.android.exoplayer2.extractor.DummyTrackOutput; import com.google.android.exoplayer2.util.ParsableByteArray; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; +import java.util.List; import java.util.Map; -/** - * Parses Script Data tags from an FLV stream and extracts metadata information. - */ +/** Parses Script Data tags from an FLV stream and extracts metadata information. */ /* package */ final class ScriptTagPayloadReader extends TagPayloadReader { private static final String NAME_METADATA = "onMetaData"; private static final String KEY_DURATION = "duration"; + private static final String KEY_KEY_FRAMES = "keyframes"; + private static final String KEY_FILE_POSITIONS = "filepositions"; + private static final String KEY_TIMES = "times"; // AMF object types private static final int AMF_TYPE_NUMBER = 0; @@ -44,16 +45,28 @@ private static final int AMF_TYPE_DATE = 11; private long durationUs; + private long[] keyFrameTimesUs; + private long[] keyFrameTagPositions; public ScriptTagPayloadReader() { super(new DummyTrackOutput()); durationUs = C.TIME_UNSET; + keyFrameTimesUs = new long[0]; + keyFrameTagPositions = new long[0]; } public long getDurationUs() { return durationUs; } + public long[] getKeyFrameTimesUs() { + return keyFrameTimesUs; + } + + public long[] getKeyFrameTagPositions() { + return keyFrameTagPositions; + } + @Override public void seek() { // Do nothing. @@ -65,30 +78,61 @@ protected boolean parseHeader(ParsableByteArray data) { } @Override - protected boolean parsePayload(ParsableByteArray data, long timeUs) throws ParserException { + protected boolean parsePayload(ParsableByteArray data, long timeUs) { int nameType = readAmfType(data); if (nameType != AMF_TYPE_STRING) { - // Should never happen. - throw new ParserException(); + // Ignore segments with unexpected name type. + return false; } String name = readAmfString(data); if (!NAME_METADATA.equals(name)) { // We're only interested in metadata. return false; } + if (data.bytesLeft() == 0) { + // The metadata script tag has no value. + return false; + } int type = readAmfType(data); if (type != AMF_TYPE_ECMA_ARRAY) { // We're not interested in this metadata. return false; } - // Set the duration to the value contained in the metadata, if present. Map metadata = readAmfEcmaArray(data); - if (metadata.containsKey(KEY_DURATION)) { - double durationSeconds = (double) metadata.get(KEY_DURATION); + // Set the duration to the value contained in the metadata, if present. + @Nullable Object durationSecondsObj = metadata.get(KEY_DURATION); + if (durationSecondsObj instanceof Double) { + double durationSeconds = (double) durationSecondsObj; if (durationSeconds > 0.0) { durationUs = (long) (durationSeconds * C.MICROS_PER_SECOND); } } + // Set the key frame times and positions to the value contained in the metadata, if present. + @Nullable Object keyFramesObj = metadata.get(KEY_KEY_FRAMES); + if (keyFramesObj instanceof Map) { + Map keyFrames = (Map) keyFramesObj; + @Nullable Object positionsObj = keyFrames.get(KEY_FILE_POSITIONS); + @Nullable Object timesSecondsObj = keyFrames.get(KEY_TIMES); + if (positionsObj instanceof List && timesSecondsObj instanceof List) { + List positions = (List) positionsObj; + List timesSeconds = (List) timesSecondsObj; + int keyFrameCount = timesSeconds.size(); + keyFrameTimesUs = new long[keyFrameCount]; + keyFrameTagPositions = new long[keyFrameCount]; + for (int i = 0; i < keyFrameCount; i++) { + Object positionObj = positions.get(i); + Object timeSecondsObj = timesSeconds.get(i); + if (timeSecondsObj instanceof Double && positionObj instanceof Double) { + keyFrameTimesUs[i] = (long) (((Double) timeSecondsObj) * C.MICROS_PER_SECOND); + keyFrameTagPositions[i] = ((Double) positionObj).longValue(); + } else { + keyFrameTimesUs = new long[0]; + keyFrameTagPositions = new long[0]; + break; + } + } + } + } return false; } @@ -126,7 +170,7 @@ private static String readAmfString(ParsableByteArray data) { int size = data.readUnsignedShort(); int position = data.getPosition(); data.skipBytes(size); - return new String(data.data, position, size); + return new String(data.getData(), position, size); } /** diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/flv/TagPayloadReader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/flv/TagPayloadReader.java index 48914b7c2c..dd61c545ec 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/flv/TagPayloadReader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/flv/TagPayloadReader.java @@ -15,24 +15,20 @@ */ package com.google.android.exoplayer2.extractor.flv; +import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.ParserException; import com.google.android.exoplayer2.extractor.TrackOutput; import com.google.android.exoplayer2.util.ParsableByteArray; -/** - * Extracts individual samples from FLV tags, preserving original order. - */ +/** Extracts individual samples from FLV tags, preserving original order. */ /* package */ abstract class TagPayloadReader { - /** - * Thrown when the format is not supported. - */ + /** Thrown when the format is not supported. */ public static final class UnsupportedFormatException extends ParserException { public UnsupportedFormatException(String msg) { - super(msg); + super(msg, /* cause= */ null, /* contentIsMalformed= */ false, C.DATA_TYPE_MEDIA); } - } protected final TrackOutput output; @@ -46,10 +42,10 @@ protected TagPayloadReader(TrackOutput output) { /** * Notifies the reader that a seek has occurred. - *

      - * Following a call to this method, the data passed to the next invocation of - * {@link #consume(ParsableByteArray, long)} will not be a continuation of the data that - * was previously passed. Hence the reader should reset any internal state. + * + *

      Following a call to this method, the data passed to the next invocation of {@link + * #consume(ParsableByteArray, long)} will not be a continuation of the data that was previously + * passed. Hence the reader should reset any internal state. */ public abstract void seek(); diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/flv/VideoTagPayloadReader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/flv/VideoTagPayloadReader.java index 5ddaafb4a8..db077730b9 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/flv/VideoTagPayloadReader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/flv/VideoTagPayloadReader.java @@ -24,9 +24,7 @@ import com.google.android.exoplayer2.util.ParsableByteArray; import com.google.android.exoplayer2.video.AvcConfig; -/** - * Parses video tags from an FLV stream and extracts H.264 nal units. - */ +/** Parses video tags from an FLV stream and extracts H.264 nal units. */ /* package */ final class VideoTagPayloadReader extends TagPayloadReader { // Video codec. @@ -86,13 +84,19 @@ protected boolean parsePayload(ParsableByteArray data, long timeUs) throws Parse // Parse avc sequence header in case this was not done before. if (packetType == AVC_PACKET_TYPE_SEQUENCE_HEADER && !hasOutputFormat) { ParsableByteArray videoSequence = new ParsableByteArray(new byte[data.bytesLeft()]); - data.readBytes(videoSequence.data, 0, data.bytesLeft()); + data.readBytes(videoSequence.getData(), 0, data.bytesLeft()); AvcConfig avcConfig = AvcConfig.parse(videoSequence); nalUnitLengthFieldLength = avcConfig.nalUnitLengthFieldLength; // Construct and output the format. - Format format = Format.createVideoSampleFormat(null, MimeTypes.VIDEO_H264, null, - Format.NO_VALUE, Format.NO_VALUE, avcConfig.width, avcConfig.height, Format.NO_VALUE, - avcConfig.initializationData, Format.NO_VALUE, avcConfig.pixelWidthAspectRatio, null); + Format format = + new Format.Builder() + .setSampleMimeType(MimeTypes.VIDEO_H264) + .setCodecs(avcConfig.codecs) + .setWidth(avcConfig.width) + .setHeight(avcConfig.height) + .setPixelWidthHeightRatio(avcConfig.pixelWidthHeightRatio) + .setInitializationData(avcConfig.initializationData) + .build(); output.format(format); hasOutputFormat = true; return false; @@ -104,7 +108,7 @@ protected boolean parsePayload(ParsableByteArray data, long timeUs) throws Parse // TODO: Deduplicate with Mp4Extractor. // Zero the top three bytes of the array that we'll use to decode nal unit lengths, in case // they're only 1 or 2 bytes long. - byte[] nalLengthData = nalLength.data; + byte[] nalLengthData = nalLength.getData(); nalLengthData[0] = 0; nalLengthData[1] = 0; nalLengthData[2] = 0; @@ -116,7 +120,7 @@ protected boolean parsePayload(ParsableByteArray data, long timeUs) throws Parse int bytesToWrite; while (data.bytesLeft() > 0) { // Read the NAL length so that we know where we find the next one. - data.readBytes(nalLength.data, nalUnitLengthFieldLengthDiff, nalUnitLengthFieldLength); + data.readBytes(nalLength.getData(), nalUnitLengthFieldLengthDiff, nalUnitLengthFieldLength); nalLength.setPosition(0); bytesToWrite = nalLength.readUnsignedIntToInt(); @@ -137,5 +141,4 @@ protected boolean parsePayload(ParsableByteArray data, long timeUs) throws Parse return false; } } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/flv/package-info.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/flv/package-info.java new file mode 100644 index 0000000000..e726bb50e2 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/flv/package-info.java @@ -0,0 +1,19 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +@NonNullApi +package com.google.android.exoplayer2.extractor.flv; + +import com.google.android.exoplayer2.util.NonNullApi; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/jpeg/JpegExtractor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/jpeg/JpegExtractor.java new file mode 100644 index 0000000000..e4a5e8a863 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/jpeg/JpegExtractor.java @@ -0,0 +1,316 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.extractor.jpeg; + +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static java.lang.annotation.ElementType.TYPE_USE; + +import androidx.annotation.IntDef; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.extractor.Extractor; +import com.google.android.exoplayer2.extractor.ExtractorInput; +import com.google.android.exoplayer2.extractor.ExtractorOutput; +import com.google.android.exoplayer2.extractor.PositionHolder; +import com.google.android.exoplayer2.extractor.SeekMap; +import com.google.android.exoplayer2.extractor.TrackOutput; +import com.google.android.exoplayer2.extractor.mp4.Mp4Extractor; +import com.google.android.exoplayer2.metadata.Metadata; +import com.google.android.exoplayer2.metadata.mp4.MotionPhotoMetadata; +import com.google.android.exoplayer2.util.MimeTypes; +import com.google.android.exoplayer2.util.ParsableByteArray; +import java.io.IOException; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; + +/** Extracts JPEG image using the Exif format. */ +public final class JpegExtractor implements Extractor { + + /** Parser states. */ + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef({ + STATE_READING_MARKER, + STATE_READING_SEGMENT_LENGTH, + STATE_READING_SEGMENT, + STATE_SNIFFING_MOTION_PHOTO_VIDEO, + STATE_READING_MOTION_PHOTO_VIDEO, + STATE_ENDED, + }) + private @interface State {} + + private static final int STATE_READING_MARKER = 0; + private static final int STATE_READING_SEGMENT_LENGTH = 1; + private static final int STATE_READING_SEGMENT = 2; + private static final int STATE_SNIFFING_MOTION_PHOTO_VIDEO = 4; + private static final int STATE_READING_MOTION_PHOTO_VIDEO = 5; + private static final int STATE_ENDED = 6; + + private static final int EXIF_ID_CODE_LENGTH = 6; + private static final long EXIF_HEADER = 0x45786966; // Exif + private static final int MARKER_SOI = 0xFFD8; // Start of image marker + private static final int MARKER_SOS = 0xFFDA; // Start of scan (image data) marker + private static final int MARKER_APP0 = 0xFFE0; // Application data 0 marker + private static final int MARKER_APP1 = 0xFFE1; // Application data 1 marker + private static final String HEADER_XMP_APP1 = "http://ns.adobe.com/xap/1.0/"; + + /** + * The identifier to use for the image track. Chosen to avoid colliding with track IDs used by + * {@link Mp4Extractor} for motion photos. + */ + private static final int IMAGE_TRACK_ID = 1024; + + private final ParsableByteArray scratch; + + private @MonotonicNonNull ExtractorOutput extractorOutput; + + private @State int state; + private int marker; + private int segmentLength; + private long mp4StartPosition; + + @Nullable private MotionPhotoMetadata motionPhotoMetadata; + private @MonotonicNonNull ExtractorInput lastExtractorInput; + private @MonotonicNonNull StartOffsetExtractorInput mp4ExtractorStartOffsetExtractorInput; + @Nullable private Mp4Extractor mp4Extractor; + + public JpegExtractor() { + scratch = new ParsableByteArray(EXIF_ID_CODE_LENGTH); + mp4StartPosition = C.POSITION_UNSET; + } + + @Override + public boolean sniff(ExtractorInput input) throws IOException { + // See ITU-T.81 (1992) subsection B.1.1.3 and Exif version 2.2 (2002) subsection 4.5.4. + if (peekMarker(input) != MARKER_SOI) { + return false; + } + marker = peekMarker(input); + // Even though JFIF and Exif standards are incompatible in theory, Exif files often contain a + // JFIF APP0 marker segment preceding the Exif APP1 marker segment. Skip the JFIF segment if + // present. + if (marker == MARKER_APP0) { + advancePeekPositionToNextSegment(input); + marker = peekMarker(input); + } + if (marker != MARKER_APP1) { + return false; + } + input.advancePeekPosition(2); // Unused segment length + scratch.reset(/* limit= */ EXIF_ID_CODE_LENGTH); + input.peekFully(scratch.getData(), /* offset= */ 0, EXIF_ID_CODE_LENGTH); + return scratch.readUnsignedInt() == EXIF_HEADER && scratch.readUnsignedShort() == 0; // Exif\0\0 + } + + @Override + public void init(ExtractorOutput output) { + extractorOutput = output; + } + + @Override + public @ReadResult int read(ExtractorInput input, PositionHolder seekPosition) + throws IOException { + switch (state) { + case STATE_READING_MARKER: + readMarker(input); + return RESULT_CONTINUE; + case STATE_READING_SEGMENT_LENGTH: + readSegmentLength(input); + return RESULT_CONTINUE; + case STATE_READING_SEGMENT: + readSegment(input); + return RESULT_CONTINUE; + case STATE_SNIFFING_MOTION_PHOTO_VIDEO: + if (input.getPosition() != mp4StartPosition) { + seekPosition.position = mp4StartPosition; + return RESULT_SEEK; + } + sniffMotionPhotoVideo(input); + return RESULT_CONTINUE; + case STATE_READING_MOTION_PHOTO_VIDEO: + if (mp4ExtractorStartOffsetExtractorInput == null || input != lastExtractorInput) { + lastExtractorInput = input; + mp4ExtractorStartOffsetExtractorInput = + new StartOffsetExtractorInput(input, mp4StartPosition); + } + @ReadResult + int readResult = + checkNotNull(mp4Extractor).read(mp4ExtractorStartOffsetExtractorInput, seekPosition); + if (readResult == RESULT_SEEK) { + seekPosition.position += mp4StartPosition; + } + return readResult; + case STATE_ENDED: + return RESULT_END_OF_INPUT; + default: + throw new IllegalStateException(); + } + } + + @Override + public void seek(long position, long timeUs) { + if (position == 0) { + state = STATE_READING_MARKER; + mp4Extractor = null; + } else if (state == STATE_READING_MOTION_PHOTO_VIDEO) { + checkNotNull(mp4Extractor).seek(position, timeUs); + } + } + + @Override + public void release() { + if (mp4Extractor != null) { + mp4Extractor.release(); + } + } + + private int peekMarker(ExtractorInput input) throws IOException { + scratch.reset(/* limit= */ 2); + input.peekFully(scratch.getData(), /* offset= */ 0, /* length= */ 2); + return scratch.readUnsignedShort(); + } + + private void advancePeekPositionToNextSegment(ExtractorInput input) throws IOException { + scratch.reset(/* limit= */ 2); + input.peekFully(scratch.getData(), /* offset= */ 0, /* length= */ 2); + int segmentLength = scratch.readUnsignedShort() - 2; + input.advancePeekPosition(segmentLength); + } + + private void readMarker(ExtractorInput input) throws IOException { + scratch.reset(/* limit= */ 2); + input.readFully(scratch.getData(), /* offset= */ 0, /* length= */ 2); + marker = scratch.readUnsignedShort(); + if (marker == MARKER_SOS) { // Start of scan. + if (mp4StartPosition != C.POSITION_UNSET) { + state = STATE_SNIFFING_MOTION_PHOTO_VIDEO; + } else { + endReadingWithImageTrack(); + } + } else if ((marker < 0xFFD0 || marker > 0xFFD9) && marker != 0xFF01) { + state = STATE_READING_SEGMENT_LENGTH; + } + } + + private void readSegmentLength(ExtractorInput input) throws IOException { + scratch.reset(2); + input.readFully(scratch.getData(), /* offset= */ 0, /* length= */ 2); + segmentLength = scratch.readUnsignedShort() - 2; + state = STATE_READING_SEGMENT; + } + + private void readSegment(ExtractorInput input) throws IOException { + if (marker == MARKER_APP1) { + ParsableByteArray payload = new ParsableByteArray(segmentLength); + input.readFully(payload.getData(), /* offset= */ 0, /* length= */ segmentLength); + if (motionPhotoMetadata == null + && HEADER_XMP_APP1.equals(payload.readNullTerminatedString())) { + @Nullable String xmpString = payload.readNullTerminatedString(); + if (xmpString != null) { + motionPhotoMetadata = getMotionPhotoMetadata(xmpString, input.getLength()); + if (motionPhotoMetadata != null) { + mp4StartPosition = motionPhotoMetadata.videoStartPosition; + } + } + } + } else { + input.skipFully(segmentLength); + } + state = STATE_READING_MARKER; + } + + private void sniffMotionPhotoVideo(ExtractorInput input) throws IOException { + // Check if the file is truncated. + boolean peekedData = + input.peekFully( + scratch.getData(), /* offset= */ 0, /* length= */ 1, /* allowEndOfInput= */ true); + if (!peekedData) { + endReadingWithImageTrack(); + } else { + input.resetPeekPosition(); + if (mp4Extractor == null) { + mp4Extractor = new Mp4Extractor(); + } + mp4ExtractorStartOffsetExtractorInput = + new StartOffsetExtractorInput(input, mp4StartPosition); + if (mp4Extractor.sniff(mp4ExtractorStartOffsetExtractorInput)) { + mp4Extractor.init( + new StartOffsetExtractorOutput(mp4StartPosition, checkNotNull(extractorOutput))); + startReadingMotionPhoto(); + } else { + endReadingWithImageTrack(); + } + } + } + + private void startReadingMotionPhoto() { + outputImageTrack(checkNotNull(motionPhotoMetadata)); + state = STATE_READING_MOTION_PHOTO_VIDEO; + } + + private void endReadingWithImageTrack() { + outputImageTrack(); + checkNotNull(extractorOutput).endTracks(); + extractorOutput.seekMap(new SeekMap.Unseekable(/* durationUs= */ C.TIME_UNSET)); + state = STATE_ENDED; + } + + private void outputImageTrack(Metadata.Entry... metadataEntries) { + TrackOutput imageTrackOutput = + checkNotNull(extractorOutput).track(IMAGE_TRACK_ID, C.TRACK_TYPE_IMAGE); + imageTrackOutput.format( + new Format.Builder() + .setContainerMimeType(MimeTypes.IMAGE_JPEG) + .setMetadata(new Metadata(metadataEntries)) + .build()); + } + + /** + * Attempts to parse the specified XMP data describing the motion photo, returning the resulting + * {@link MotionPhotoMetadata} or {@code null} if it wasn't possible to derive motion photo + * metadata. + * + * @param xmpString A string of XML containing XMP motion photo metadata to attempt to parse. + * @param inputLength The length of the input stream in bytes, or {@link C#LENGTH_UNSET} if + * unknown. + * @return The {@link MotionPhotoMetadata}, or {@code null} if it wasn't possible to derive motion + * photo metadata. + * @throws IOException If an error occurs parsing the XMP string. + */ + @Nullable + private static MotionPhotoMetadata getMotionPhotoMetadata(String xmpString, long inputLength) + throws IOException { + // Metadata defines offsets from the end of the stream, so we need the stream length to + // determine start offsets. + if (inputLength == C.LENGTH_UNSET) { + return null; + } + + // Motion photos have (at least) a primary image media item and a secondary video media item. + @Nullable + MotionPhotoDescription motionPhotoDescription = + XmpMotionPhotoDescriptionParser.parse(xmpString); + if (motionPhotoDescription == null) { + return null; + } + return motionPhotoDescription.getMotionPhotoMetadata(inputLength); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/jpeg/MotionPhotoDescription.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/jpeg/MotionPhotoDescription.java new file mode 100644 index 0000000000..3117dfa5f4 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/jpeg/MotionPhotoDescription.java @@ -0,0 +1,122 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.extractor.jpeg; + +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.metadata.mp4.MotionPhotoMetadata; +import com.google.android.exoplayer2.util.MimeTypes; +import java.util.List; + +/** Describes the layout and metadata of a motion photo file. */ +/* package */ final class MotionPhotoDescription { + + /** Describes a media item in the motion photo. */ + public static final class ContainerItem { + /** The MIME type of the media item. */ + public final String mime; + /** The application-specific meaning of the media item. */ + public final String semantic; + /** + * The positive integer length in bytes of the media item, or 0 for primary media items and + * secondary media items that share their resource with the preceding media item. + */ + public final long length; + /** + * The number of bytes of additional padding between the end of the primary media item and the + * start of the next media item. 0 for secondary media items. + */ + public final long padding; + + public ContainerItem(String mime, String semantic, long length, long padding) { + this.mime = mime; + this.semantic = semantic; + this.length = length; + this.padding = padding; + } + } + + /** + * The presentation timestamp of the primary media item, in microseconds, or {@link C#TIME_UNSET} + * if unknown. + */ + public final long photoPresentationTimestampUs; + /** + * The media items represented by the motion photo file, in order. The primary media item is + * listed first, followed by any secondary media items. + */ + public final List items; + + public MotionPhotoDescription(long photoPresentationTimestampUs, List items) { + this.photoPresentationTimestampUs = photoPresentationTimestampUs; + this.items = items; + } + + /** + * Returns the {@link MotionPhotoMetadata} for the motion photo represented by this instance, or + * {@code null} if there wasn't enough information to derive the metadata. + * + * @param motionPhotoLength The length of the motion photo file, in bytes. + * @return The motion photo metadata, or {@code null}. + */ + @Nullable + public MotionPhotoMetadata getMotionPhotoMetadata(long motionPhotoLength) { + if (items.size() < 2) { + // We need a primary item (photo) and at least one secondary item (video). + return null; + } + // Iterate backwards through the items to find the earlier video in the list. If we find a video + // item with length zero, we need to keep scanning backwards to find the preceding item with + // non-zero length, which is the item that contains the video data. + long photoStartPosition = C.POSITION_UNSET; + long photoLength = C.LENGTH_UNSET; + long mp4StartPosition = C.POSITION_UNSET; + long mp4Length = C.LENGTH_UNSET; + boolean itemContainsMp4 = false; + long itemStartPosition = motionPhotoLength; + long itemEndPosition = motionPhotoLength; + for (int i = items.size() - 1; i >= 0; i--) { + MotionPhotoDescription.ContainerItem item = items.get(i); + itemContainsMp4 |= MimeTypes.VIDEO_MP4.equals(item.mime); + itemEndPosition = itemStartPosition; + if (i == 0) { + // Padding is only applied for the primary item. + itemStartPosition = 0; + itemEndPosition -= item.padding; + } else { + itemStartPosition -= item.length; + } + if (itemContainsMp4 && itemStartPosition != itemEndPosition) { + mp4StartPosition = itemStartPosition; + mp4Length = itemEndPosition - itemStartPosition; + // Reset in case there's another video earlier in the list. + itemContainsMp4 = false; + } + if (i == 0) { + photoStartPosition = itemStartPosition; + photoLength = itemEndPosition; + } + } + if (mp4StartPosition == C.POSITION_UNSET + || mp4Length == C.LENGTH_UNSET + || photoStartPosition == C.POSITION_UNSET + || photoLength == C.LENGTH_UNSET) { + return null; + } + return new MotionPhotoMetadata( + photoStartPosition, photoLength, photoPresentationTimestampUs, mp4StartPosition, mp4Length); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/jpeg/StartOffsetExtractorInput.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/jpeg/StartOffsetExtractorInput.java new file mode 100644 index 0000000000..132660349b --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/jpeg/StartOffsetExtractorInput.java @@ -0,0 +1,69 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.extractor.jpeg; + +import static com.google.android.exoplayer2.util.Assertions.checkArgument; + +import com.google.android.exoplayer2.extractor.ExtractorInput; +import com.google.android.exoplayer2.extractor.ForwardingExtractorInput; + +/** + * An extractor input that wraps another extractor input and exposes data starting at a given start + * byte offset. + * + *

      This is useful for reading data from a container that's concatenated after some prefix data + * but where the container's extractor doesn't handle a non-zero start offset (for example, because + * it seeks to absolute positions read from the container data). + */ +/* package */ final class StartOffsetExtractorInput extends ForwardingExtractorInput { + + private final long startOffset; + + /** + * Creates a new wrapper reading from the given start byte offset. + * + * @param input The extractor input to wrap. The reading position must be at or after the start + * offset, otherwise data could be read from before the start offset. + * @param startOffset The offset from which this extractor input provides data, in bytes. + * @throws IllegalArgumentException Thrown if the start offset is before the current reading + * position. + */ + public StartOffsetExtractorInput(ExtractorInput input, long startOffset) { + super(input); + checkArgument(input.getPosition() >= startOffset); + this.startOffset = startOffset; + } + + @Override + public long getPosition() { + return super.getPosition() - startOffset; + } + + @Override + public long getPeekPosition() { + return super.getPeekPosition() - startOffset; + } + + @Override + public long getLength() { + return super.getLength() - startOffset; + } + + @Override + public void setRetryPosition(long position, E e) throws E { + super.setRetryPosition(position + startOffset, e); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/jpeg/StartOffsetExtractorOutput.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/jpeg/StartOffsetExtractorOutput.java new file mode 100644 index 0000000000..d0c4730fcb --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/jpeg/StartOffsetExtractorOutput.java @@ -0,0 +1,75 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.extractor.jpeg; + +import com.google.android.exoplayer2.extractor.ExtractorOutput; +import com.google.android.exoplayer2.extractor.SeekMap; +import com.google.android.exoplayer2.extractor.SeekPoint; +import com.google.android.exoplayer2.extractor.TrackOutput; + +/** + * An extractor output that wraps another extractor output and applies a give start byte offset to + * seek positions. + * + *

      This is useful for extracting from a container that's concatenated after some prefix data but + * where the container's extractor doesn't handle a non-zero start offset (for example, because it + * seeks to absolute positions read from the container data). + */ +public final class StartOffsetExtractorOutput implements ExtractorOutput { + + private final long startOffset; + private final ExtractorOutput extractorOutput; + + /** Creates a new wrapper reading from the given start byte offset. */ + public StartOffsetExtractorOutput(long startOffset, ExtractorOutput extractorOutput) { + this.startOffset = startOffset; + this.extractorOutput = extractorOutput; + } + + @Override + public TrackOutput track(int id, int type) { + return extractorOutput.track(id, type); + } + + @Override + public void endTracks() { + extractorOutput.endTracks(); + } + + @Override + public void seekMap(SeekMap seekMap) { + extractorOutput.seekMap( + new SeekMap() { + @Override + public boolean isSeekable() { + return seekMap.isSeekable(); + } + + @Override + public long getDurationUs() { + return seekMap.getDurationUs(); + } + + @Override + public SeekPoints getSeekPoints(long timeUs) { + SeekPoints seekPoints = seekMap.getSeekPoints(timeUs); + return new SeekPoints( + new SeekPoint(seekPoints.first.timeUs, seekPoints.first.position + startOffset), + new SeekPoint(seekPoints.second.timeUs, seekPoints.second.position + startOffset)); + } + }); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/jpeg/XmpMotionPhotoDescriptionParser.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/jpeg/XmpMotionPhotoDescriptionParser.java new file mode 100644 index 0000000000..17fa7756ab --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/jpeg/XmpMotionPhotoDescriptionParser.java @@ -0,0 +1,193 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.extractor.jpeg; + +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.ParserException; +import com.google.android.exoplayer2.util.Log; +import com.google.android.exoplayer2.util.MimeTypes; +import com.google.android.exoplayer2.util.XmlPullParserUtil; +import com.google.common.collect.ImmutableList; +import java.io.IOException; +import java.io.StringReader; +import java.util.List; +import org.xmlpull.v1.XmlPullParser; +import org.xmlpull.v1.XmlPullParserException; +import org.xmlpull.v1.XmlPullParserFactory; + +/** + * Parser for motion photo metadata, handling XMP following the Motion Photo V1 and Micro Video V1b + * specifications. + */ +/* package */ final class XmpMotionPhotoDescriptionParser { + + /** + * Attempts to parse the specified XMP data describing the motion photo, returning the resulting + * {@link MotionPhotoDescription} or {@code null} if it wasn't possible to derive a motion photo + * description. + * + * @param xmpString A string of XML containing XMP motion photo metadata to attempt to parse. + * @return The {@link MotionPhotoDescription}, or {@code null} if it wasn't possible to derive a + * motion photo description. + * @throws IOException If an error occurs reading data from the stream. + */ + @Nullable + public static MotionPhotoDescription parse(String xmpString) throws IOException { + try { + return parseInternal(xmpString); + } catch (XmlPullParserException | ParserException | NumberFormatException e) { + Log.w(TAG, "Ignoring unexpected XMP metadata"); + return null; + } + } + + private static final String TAG = "MotionPhotoXmpParser"; + + private static final String[] MOTION_PHOTO_ATTRIBUTE_NAMES = + new String[] { + "Camera:MotionPhoto", // Motion Photo V1 + "GCamera:MotionPhoto", // Motion Photo V1 (legacy element naming) + "Camera:MicroVideo", // Micro Video V1b + "GCamera:MicroVideo", // Micro Video V1b (legacy element naming) + }; + private static final String[] DESCRIPTION_MOTION_PHOTO_PRESENTATION_TIMESTAMP_ATTRIBUTE_NAMES = + new String[] { + "Camera:MotionPhotoPresentationTimestampUs", // Motion Photo V1 + "GCamera:MotionPhotoPresentationTimestampUs", // Motion Photo V1 (legacy element naming) + "Camera:MicroVideoPresentationTimestampUs", // Micro Video V1b + "GCamera:MicroVideoPresentationTimestampUs", // Micro Video V1b (legacy element naming) + }; + private static final String[] DESCRIPTION_MICRO_VIDEO_OFFSET_ATTRIBUTE_NAMES = + new String[] { + "Camera:MicroVideoOffset", // Micro Video V1b + "GCamera:MicroVideoOffset", // Micro Video V1b (legacy element naming) + }; + + @Nullable + private static MotionPhotoDescription parseInternal(String xmpString) + throws XmlPullParserException, IOException { + XmlPullParserFactory xmlPullParserFactory = XmlPullParserFactory.newInstance(); + XmlPullParser xpp = xmlPullParserFactory.newPullParser(); + xpp.setInput(new StringReader(xmpString)); + xpp.next(); + if (!XmlPullParserUtil.isStartTag(xpp, "x:xmpmeta")) { + throw ParserException.createForMalformedContainer( + "Couldn't find xmp metadata", /* cause= */ null); + } + long motionPhotoPresentationTimestampUs = C.TIME_UNSET; + List containerItems = ImmutableList.of(); + do { + xpp.next(); + if (XmlPullParserUtil.isStartTag(xpp, "rdf:Description")) { + if (!parseMotionPhotoFlagFromDescription(xpp)) { + // The motion photo flag is not set, so the file should not be treated as a motion photo. + return null; + } + motionPhotoPresentationTimestampUs = + parseMotionPhotoPresentationTimestampUsFromDescription(xpp); + containerItems = parseMicroVideoOffsetFromDescription(xpp); + } else if (XmlPullParserUtil.isStartTag(xpp, "Container:Directory")) { + containerItems = parseMotionPhotoV1Directory(xpp, "Container", "Item"); + } else if (XmlPullParserUtil.isStartTag(xpp, "GContainer:Directory")) { + containerItems = parseMotionPhotoV1Directory(xpp, "GContainer", "GContainerItem"); + } + } while (!XmlPullParserUtil.isEndTag(xpp, "x:xmpmeta")); + if (containerItems.isEmpty()) { + // No motion photo information was parsed. + return null; + } + return new MotionPhotoDescription(motionPhotoPresentationTimestampUs, containerItems); + } + + private static boolean parseMotionPhotoFlagFromDescription(XmlPullParser xpp) { + for (String attributeName : MOTION_PHOTO_ATTRIBUTE_NAMES) { + @Nullable String attributeValue = XmlPullParserUtil.getAttributeValue(xpp, attributeName); + if (attributeValue != null) { + int motionPhotoFlag = Integer.parseInt(attributeValue); + return motionPhotoFlag == 1; + } + } + return false; + } + + private static long parseMotionPhotoPresentationTimestampUsFromDescription(XmlPullParser xpp) { + for (String attributeName : DESCRIPTION_MOTION_PHOTO_PRESENTATION_TIMESTAMP_ATTRIBUTE_NAMES) { + @Nullable String attributeValue = XmlPullParserUtil.getAttributeValue(xpp, attributeName); + if (attributeValue != null) { + long presentationTimestampUs = Long.parseLong(attributeValue); + return presentationTimestampUs == -1 ? C.TIME_UNSET : presentationTimestampUs; + } + } + return C.TIME_UNSET; + } + + private static ImmutableList + parseMicroVideoOffsetFromDescription(XmlPullParser xpp) { + // We store a new Motion Photo item list based on the MicroVideo offset, so that the same + // representation is used for both specifications. + for (String attributeName : DESCRIPTION_MICRO_VIDEO_OFFSET_ATTRIBUTE_NAMES) { + @Nullable String attributeValue = XmlPullParserUtil.getAttributeValue(xpp, attributeName); + if (attributeValue != null) { + long microVideoOffset = Long.parseLong(attributeValue); + return ImmutableList.of( + new MotionPhotoDescription.ContainerItem( + MimeTypes.IMAGE_JPEG, "Primary", /* length= */ 0, /* padding= */ 0), + new MotionPhotoDescription.ContainerItem( + MimeTypes.VIDEO_MP4, + "MotionPhoto", + /* length= */ microVideoOffset, + /* padding= */ 0)); + } + } + return ImmutableList.of(); + } + + private static ImmutableList parseMotionPhotoV1Directory( + XmlPullParser xpp, String containerNamespacePrefix, String itemNamespacePrefix) + throws XmlPullParserException, IOException { + ImmutableList.Builder containerItems = + ImmutableList.builder(); + String itemTagName = containerNamespacePrefix + ":Item"; + String directoryTagName = containerNamespacePrefix + ":Directory"; + do { + xpp.next(); + if (XmlPullParserUtil.isStartTag(xpp, itemTagName)) { + String mimeAttributeName = itemNamespacePrefix + ":Mime"; + String semanticAttributeName = itemNamespacePrefix + ":Semantic"; + String lengthAttributeName = itemNamespacePrefix + ":Length"; + String paddinghAttributeName = itemNamespacePrefix + ":Padding"; + @Nullable String mime = XmlPullParserUtil.getAttributeValue(xpp, mimeAttributeName); + @Nullable String semantic = XmlPullParserUtil.getAttributeValue(xpp, semanticAttributeName); + @Nullable String length = XmlPullParserUtil.getAttributeValue(xpp, lengthAttributeName); + @Nullable String padding = XmlPullParserUtil.getAttributeValue(xpp, paddinghAttributeName); + if (mime == null || semantic == null) { + // Required values are missing. + return ImmutableList.of(); + } + containerItems.add( + new MotionPhotoDescription.ContainerItem( + mime, + semantic, + length != null ? Long.parseLong(length) : 0, + padding != null ? Long.parseLong(padding) : 0)); + } + } while (!XmlPullParserUtil.isEndTag(xpp, directoryTagName)); + return containerItems.build(); + } + + private XmpMotionPhotoDescriptionParser() {} +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/jpeg/package-info.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/jpeg/package-info.java new file mode 100644 index 0000000000..7e0522b275 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/jpeg/package-info.java @@ -0,0 +1,19 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +@NonNullApi +package com.google.android.exoplayer2.extractor.jpeg; + +import com.google.android.exoplayer2.util.NonNullApi; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mkv/DefaultEbmlReader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mkv/DefaultEbmlReader.java index b5da6dbf2f..cae996821e 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mkv/DefaultEbmlReader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mkv/DefaultEbmlReader.java @@ -15,6 +15,8 @@ */ package com.google.android.exoplayer2.extractor.mkv; +import static java.lang.annotation.ElementType.TYPE_USE; + import androidx.annotation.IntDef; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.ParserException; @@ -25,15 +27,17 @@ import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; import java.util.ArrayDeque; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; +import org.checkerframework.checker.nullness.qual.RequiresNonNull; -/** - * Default implementation of {@link EbmlReader}. - */ +/** Default implementation of {@link EbmlReader}. */ /* package */ final class DefaultEbmlReader implements EbmlReader { @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({ELEMENT_STATE_READ_ID, ELEMENT_STATE_READ_CONTENT_SIZE, ELEMENT_STATE_READ_CONTENT}) private @interface ElementState {} @@ -52,7 +56,7 @@ private final ArrayDeque masterElementsStack; private final VarintReader varintReader; - private EbmlProcessor processor; + private @MonotonicNonNull EbmlProcessor processor; private @ElementState int elementState; private int elementId; private long elementContentSize; @@ -76,11 +80,11 @@ public void reset() { } @Override - public boolean read(ExtractorInput input) throws IOException, InterruptedException { - Assertions.checkNotNull(processor); + public boolean read(ExtractorInput input) throws IOException { + Assertions.checkStateNotNull(processor); while (true) { - if (!masterElementsStack.isEmpty() - && input.getPosition() >= masterElementsStack.peek().elementEndPosition) { + MasterElement head = masterElementsStack.peek(); + if (head != null && input.getPosition() >= head.elementEndPosition) { processor.endMasterElement(masterElementsStack.pop().elementId); return true; } @@ -114,7 +118,8 @@ public boolean read(ExtractorInput input) throws IOException, InterruptedExcepti return true; case EbmlProcessor.ELEMENT_TYPE_UNSIGNED_INT: if (elementContentSize > MAX_INTEGER_ELEMENT_SIZE_BYTES) { - throw new ParserException("Invalid integer size: " + elementContentSize); + throw ParserException.createForMalformedContainer( + "Invalid integer size: " + elementContentSize, /* cause= */ null); } processor.integerElement(elementId, readInteger(input, (int) elementContentSize)); elementState = ELEMENT_STATE_READ_ID; @@ -122,14 +127,16 @@ public boolean read(ExtractorInput input) throws IOException, InterruptedExcepti case EbmlProcessor.ELEMENT_TYPE_FLOAT: if (elementContentSize != VALID_FLOAT32_ELEMENT_SIZE_BYTES && elementContentSize != VALID_FLOAT64_ELEMENT_SIZE_BYTES) { - throw new ParserException("Invalid float size: " + elementContentSize); + throw ParserException.createForMalformedContainer( + "Invalid float size: " + elementContentSize, /* cause= */ null); } processor.floatElement(elementId, readFloat(input, (int) elementContentSize)); elementState = ELEMENT_STATE_READ_ID; return true; case EbmlProcessor.ELEMENT_TYPE_STRING: if (elementContentSize > Integer.MAX_VALUE) { - throw new ParserException("String element size: " + elementContentSize); + throw ParserException.createForMalformedContainer( + "String element size: " + elementContentSize, /* cause= */ null); } processor.stringElement(elementId, readString(input, (int) elementContentSize)); elementState = ELEMENT_STATE_READ_ID; @@ -143,7 +150,8 @@ public boolean read(ExtractorInput input) throws IOException, InterruptedExcepti elementState = ELEMENT_STATE_READ_ID; break; default: - throw new ParserException("Invalid element type " + type); + throw ParserException.createForMalformedContainer( + "Invalid element type " + type, /* cause= */ null); } } } @@ -157,10 +165,9 @@ public boolean read(ExtractorInput input) throws IOException, InterruptedExcepti * @throws EOFException If the end of input was encountered when searching for the next level 1 * element. * @throws IOException If an error occurs reading from the input. - * @throws InterruptedException If the thread is interrupted. */ - private long maybeResyncToNextLevel1Element(ExtractorInput input) throws IOException, - InterruptedException { + @RequiresNonNull("processor") + private long maybeResyncToNextLevel1Element(ExtractorInput input) throws IOException { input.resetPeekPosition(); while (true) { input.peekFully(scratch, 0, MAX_ID_BYTES); @@ -183,10 +190,8 @@ private long maybeResyncToNextLevel1Element(ExtractorInput input) throws IOExcep * @param byteLength The length of the integer being read. * @return The read integer value. * @throws IOException If an error occurs reading from the input. - * @throws InterruptedException If the thread is interrupted. */ - private long readInteger(ExtractorInput input, int byteLength) - throws IOException, InterruptedException { + private long readInteger(ExtractorInput input, int byteLength) throws IOException { input.readFully(scratch, 0, byteLength); long value = 0; for (int i = 0; i < byteLength; i++) { @@ -202,10 +207,8 @@ private long readInteger(ExtractorInput input, int byteLength) * @param byteLength The length of the float being read. * @return The read float value. * @throws IOException If an error occurs reading from the input. - * @throws InterruptedException If the thread is interrupted. */ - private double readFloat(ExtractorInput input, int byteLength) - throws IOException, InterruptedException { + private double readFloat(ExtractorInput input, int byteLength) throws IOException { long integerValue = readInteger(input, byteLength); double floatValue; if (byteLength == VALID_FLOAT32_ELEMENT_SIZE_BYTES) { @@ -224,10 +227,8 @@ private double readFloat(ExtractorInput input, int byteLength) * @param byteLength The length of the string being read, including zero padding. * @return The read string value. * @throws IOException If an error occurs reading from the input. - * @throws InterruptedException If the thread is interrupted. */ - private String readString(ExtractorInput input, int byteLength) - throws IOException, InterruptedException { + private static String readString(ExtractorInput input, int byteLength) throws IOException { if (byteLength == 0) { return ""; } @@ -254,7 +255,5 @@ private MasterElement(int elementId, long elementEndPosition) { this.elementId = elementId; this.elementEndPosition = elementEndPosition; } - } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mkv/EbmlProcessor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mkv/EbmlProcessor.java index 01fe5ff984..5c377becb1 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mkv/EbmlProcessor.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mkv/EbmlProcessor.java @@ -15,6 +15,8 @@ */ package com.google.android.exoplayer2.extractor.mkv; +import static java.lang.annotation.ElementType.TYPE_USE; + import androidx.annotation.IntDef; import com.google.android.exoplayer2.ParserException; import com.google.android.exoplayer2.extractor.ExtractorInput; @@ -22,6 +24,7 @@ import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; /** Defines EBML element IDs/types and processes events. */ public interface EbmlProcessor { @@ -33,6 +36,7 @@ public interface EbmlProcessor { */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({ ELEMENT_TYPE_UNKNOWN, ELEMENT_TYPE_MASTER, @@ -79,12 +83,12 @@ public interface EbmlProcessor { /** * Called when the start of a master element is encountered. - *

      - * Following events should be considered as taking place within this element until a matching call - * to {@link #endMasterElement(int)} is made. - *

      - * Note that it is possible for another master element of the same element ID to be nested within - * itself. + * + *

      Following events should be considered as taking place within this element until a matching + * call to {@link #endMasterElement(int)} is made. + * + *

      Note that it is possible for another master element of the same element ID to be nested + * within itself. * * @param id The element ID. * @param contentPosition The position of the start of the element's content in the stream. @@ -130,21 +134,18 @@ public interface EbmlProcessor { /** * Called when a binary element is encountered. - *

      - * The element header (containing the element ID and content size) will already have been read. - * Implementations are required to consume the whole remainder of the element, which is - * {@code contentSize} bytes in length, before returning. Implementations are permitted to fail - * (by throwing an exception) having partially consumed the data, however if they do this, they - * must consume the remainder of the content when called again. + * + *

      The element header (containing the element ID and content size) will already have been read. + * Implementations are required to consume the whole remainder of the element, which is {@code + * contentSize} bytes in length, before returning. Implementations are permitted to fail (by + * throwing an exception) having partially consumed the data, however if they do this, they must + * consume the remainder of the content when called again. * * @param id The element ID. * @param contentsSize The element's content size. * @param input The {@link ExtractorInput} from which data should be read. * @throws ParserException If a parsing error occurs. * @throws IOException If an error occurs reading from the input. - * @throws InterruptedException If the thread is interrupted. */ - void binaryElement(int id, int contentsSize, ExtractorInput input) - throws IOException, InterruptedException; - + void binaryElement(int id, int contentsSize, ExtractorInput input) throws IOException; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mkv/EbmlReader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mkv/EbmlReader.java index c3f00a222f..c19906a93a 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mkv/EbmlReader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mkv/EbmlReader.java @@ -37,8 +37,8 @@ /** * Resets the state of the reader. - *

      - * Subsequent calls to {@link #read(ExtractorInput)} will start reading a new EBML structure + * + *

      Subsequent calls to {@link #read(ExtractorInput)} will start reading a new EBML structure * from scratch. */ void reset(); @@ -50,8 +50,6 @@ * @return True if data can continue to be read. False if the end of the input was encountered. * @throws ParserException If parsing fails. * @throws IOException If an error occurs reading from the input. - * @throws InterruptedException If the thread is interrupted. */ - boolean read(ExtractorInput input) throws IOException, InterruptedException; - + boolean read(ExtractorInput input) throws IOException; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mkv/MatroskaExtractor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mkv/MatroskaExtractor.java index 55d58d78d1..504656e4e8 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mkv/MatroskaExtractor.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mkv/MatroskaExtractor.java @@ -15,6 +15,14 @@ */ package com.google.android.exoplayer2.extractor.mkv; +import static com.google.android.exoplayer2.util.Assertions.checkArgument; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Assertions.checkState; +import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; +import static java.lang.Math.max; +import static java.lang.Math.min; +import static java.lang.annotation.ElementType.TYPE_USE; + import android.util.Pair; import android.util.SparseArray; import androidx.annotation.CallSuper; @@ -23,7 +31,8 @@ import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.ParserException; -import com.google.android.exoplayer2.audio.Ac3Util; +import com.google.android.exoplayer2.audio.AacUtil; +import com.google.android.exoplayer2.audio.MpegAudioUtil; import com.google.android.exoplayer2.drm.DrmInitData; import com.google.android.exoplayer2.drm.DrmInitData.SchemeData; import com.google.android.exoplayer2.extractor.ChunkIndex; @@ -31,11 +40,10 @@ import com.google.android.exoplayer2.extractor.ExtractorInput; import com.google.android.exoplayer2.extractor.ExtractorOutput; import com.google.android.exoplayer2.extractor.ExtractorsFactory; -import com.google.android.exoplayer2.extractor.MpegAudioHeader; import com.google.android.exoplayer2.extractor.PositionHolder; import com.google.android.exoplayer2.extractor.SeekMap; import com.google.android.exoplayer2.extractor.TrackOutput; -import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.extractor.TrueHdSampleRechunker; import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.LongArray; import com.google.android.exoplayer2.util.MimeTypes; @@ -44,19 +52,28 @@ import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.video.AvcConfig; import com.google.android.exoplayer2.video.ColorInfo; +import com.google.android.exoplayer2.video.DolbyVisionConfig; import com.google.android.exoplayer2.video.HevcConfig; +import com.google.common.collect.ImmutableList; import java.io.IOException; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Locale; +import java.util.Map; import java.util.UUID; +import org.checkerframework.checker.nullness.compatqual.NullableType; +import org.checkerframework.checker.nullness.qual.EnsuresNonNull; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; +import org.checkerframework.checker.nullness.qual.RequiresNonNull; /** Extracts data from the Matroska and WebM container formats. */ public class MatroskaExtractor implements Extractor { @@ -70,14 +87,15 @@ public class MatroskaExtractor implements Extractor { */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef( flag = true, value = {FLAG_DISABLE_SEEK_FOR_CUES}) public @interface Flags {} /** * Flag to disable seeking for cues. - *

      - * Normally (i.e. when this flag is not set) the extractor will seek to the cues element if its + * + *

      Normally (i.e. when this flag is not set) the extractor will seek to the cues element if its * position is specified in the seek head and if it's after the first cluster. Setting this flag * disables seeking to the cues element. If the cues element is after the first cluster then the * media is treated as being unseekable. @@ -119,8 +137,11 @@ public class MatroskaExtractor implements Extractor { private static final String CODEC_ID_FLAC = "A_FLAC"; private static final String CODEC_ID_ACM = "A_MS/ACM"; private static final String CODEC_ID_PCM_INT_LIT = "A_PCM/INT/LIT"; + private static final String CODEC_ID_PCM_INT_BIG = "A_PCM/INT/BIG"; + private static final String CODEC_ID_PCM_FLOAT = "A_PCM/FLOAT/IEEE"; private static final String CODEC_ID_SUBRIP = "S_TEXT/UTF8"; private static final String CODEC_ID_ASS = "S_TEXT/ASS"; + private static final String CODEC_ID_VTT = "S_TEXT/WEBVTT"; private static final String CODEC_ID_VOBSUB = "S_VOBSUB"; private static final String CODEC_ID_PGS = "S_HDMV/PGS"; private static final String CODEC_ID_DVBSUB = "S_DVBSUB"; @@ -162,11 +183,15 @@ public class MatroskaExtractor implements Extractor { private static final int ID_FLAG_FORCED = 0x55AA; private static final int ID_DEFAULT_DURATION = 0x23E383; private static final int ID_MAX_BLOCK_ADDITION_ID = 0x55EE; + private static final int ID_BLOCK_ADDITION_MAPPING = 0x41E4; + private static final int ID_BLOCK_ADD_ID_TYPE = 0x41E7; + private static final int ID_BLOCK_ADD_ID_EXTRA_DATA = 0x41ED; private static final int ID_NAME = 0x536E; private static final int ID_CODEC_ID = 0x86; private static final int ID_CODEC_PRIVATE = 0x63A2; private static final int ID_CODEC_DELAY = 0x56AA; private static final int ID_SEEK_PRE_ROLL = 0x56BB; + private static final int ID_DISCARD_PADDING = 0x75A2; private static final int ID_VIDEO = 0xE0; private static final int ID_PIXEL_WIDTH = 0xB0; private static final int ID_PIXEL_HEIGHT = 0xBA; @@ -226,6 +251,17 @@ public class MatroskaExtractor implements Extractor { */ private static final int BLOCK_ADDITIONAL_ID_VP9_ITU_T_35 = 4; + /** + * BlockAddIdType value for Dolby Vision configuration with profile <= 7. See also + * https://www.matroska.org/technical/codec_specs.html. + */ + private static final int BLOCK_ADD_ID_TYPE_DVCC = 0x64766343; + /** + * BlockAddIdType value for Dolby Vision configuration with profile > 7. See also + * https://www.matroska.org/technical/codec_specs.html. + */ + private static final int BLOCK_ADD_ID_TYPE_DVVC = 0x64767643; + private static final int LACING_NONE = 0; private static final int LACING_XIPH = 1; private static final int LACING_FIXED_SIZE = 2; @@ -241,8 +277,8 @@ public class MatroskaExtractor implements Extractor { *

      The display time of each subtitle is passed as {@code timeUs} to {@link * TrackOutput#sampleMetadata}. The start and end timecodes in this template are relative to * {@code timeUs}. Hence the start timecode is always zero. The 12 byte end timecode starting at - * {@link #SUBRIP_PREFIX_END_TIMECODE_OFFSET} is set to a dummy value, and must be replaced with - * the duration of the subtitle. + * {@link #SUBRIP_PREFIX_END_TIMECODE_OFFSET} is set to a placeholder value, and must be replaced + * with the duration of the subtitle. * *

      Equivalent to the UTF-8 string: "1\n00:00:00,000 --> 00:00:00,000\n". */ @@ -251,33 +287,29 @@ public class MatroskaExtractor implements Extractor { 49, 10, 48, 48, 58, 48, 48, 58, 48, 48, 44, 48, 48, 48, 32, 45, 45, 62, 32, 48, 48, 58, 48, 48, 58, 48, 48, 44, 48, 48, 48, 10 }; - /** - * The byte offset of the end timecode in {@link #SUBRIP_PREFIX}. - */ + /** The byte offset of the end timecode in {@link #SUBRIP_PREFIX}. */ private static final int SUBRIP_PREFIX_END_TIMECODE_OFFSET = 19; /** * The value by which to divide a time in microseconds to convert it to the unit of the last value * in a subrip timecode (milliseconds). */ private static final long SUBRIP_TIMECODE_LAST_VALUE_SCALING_FACTOR = 1000; - /** - * The format of a subrip timecode. - */ + /** The format of a subrip timecode. */ private static final String SUBRIP_TIMECODE_FORMAT = "%02d:%02d:%02d,%03d"; - /** - * Matroska specific format line for SSA subtitles. - */ - private static final byte[] SSA_DIALOGUE_FORMAT = Util.getUtf8Bytes("Format: Start, End, " - + "ReadOrder, Layer, Style, Name, MarginL, MarginR, MarginV, Effect, Text"); + /** Matroska specific format line for SSA subtitles. */ + private static final byte[] SSA_DIALOGUE_FORMAT = + Util.getUtf8Bytes( + "Format: Start, End, " + + "ReadOrder, Layer, Style, Name, MarginL, MarginR, MarginV, Effect, Text"); /** * A template for the prefix that must be added to each SSA sample. * *

      The display time of each subtitle is passed as {@code timeUs} to {@link * TrackOutput#sampleMetadata}. The start and end timecodes in this template are relative to * {@code timeUs}. Hence the start timecode is always zero. The 12 byte end timecode starting at - * {@link #SUBRIP_PREFIX_END_TIMECODE_OFFSET} is set to a dummy value, and must be replaced with - * the duration of the subtitle. + * {@link #SUBRIP_PREFIX_END_TIMECODE_OFFSET} is set to a placeholder value, and must be replaced + * with the duration of the subtitle. * *

      Equivalent to the UTF-8 string: "Dialogue: 0:00:00:00,0:00:00:00,". */ @@ -286,37 +318,63 @@ public class MatroskaExtractor implements Extractor { 68, 105, 97, 108, 111, 103, 117, 101, 58, 32, 48, 58, 48, 48, 58, 48, 48, 58, 48, 48, 44, 48, 58, 48, 48, 58, 48, 48, 58, 48, 48, 44 }; - /** - * The byte offset of the end timecode in {@link #SSA_PREFIX}. - */ + /** The byte offset of the end timecode in {@link #SSA_PREFIX}. */ private static final int SSA_PREFIX_END_TIMECODE_OFFSET = 21; /** * The value by which to divide a time in microseconds to convert it to the unit of the last value * in an SSA timecode (1/100ths of a second). */ - private static final long SSA_TIMECODE_LAST_VALUE_SCALING_FACTOR = 10000; - /** - * The format of an SSA timecode. - */ + private static final long SSA_TIMECODE_LAST_VALUE_SCALING_FACTOR = 10_000; + /** The format of an SSA timecode. */ private static final String SSA_TIMECODE_FORMAT = "%01d:%02d:%02d:%02d"; /** - * The length in bytes of a WAVEFORMATEX structure. + * A template for the prefix that must be added to each VTT sample. + * + *

      The display time of each subtitle is passed as {@code timeUs} to {@link + * TrackOutput#sampleMetadata}. The start and end timecodes in this template are relative to + * {@code timeUs}. Hence the start timecode is always zero. The 12 byte end timecode starting at + * {@link #VTT_PREFIX_END_TIMECODE_OFFSET} is set to a placeholder value, and must be replaced + * with the duration of the subtitle. + * + *

      Equivalent to the UTF-8 string: "WEBVTT\n\n00:00:00.000 --> 00:00:00.000\n". */ - private static final int WAVE_FORMAT_SIZE = 18; + private static final byte[] VTT_PREFIX = + new byte[] { + 87, 69, 66, 86, 84, 84, 10, 10, 48, 48, 58, 48, 48, 58, 48, 48, 46, 48, 48, 48, 32, 45, 45, + 62, 32, 48, 48, 58, 48, 48, 58, 48, 48, 46, 48, 48, 48, 10 + }; + /** The byte offset of the end timecode in {@link #VTT_PREFIX}. */ + private static final int VTT_PREFIX_END_TIMECODE_OFFSET = 25; /** - * Format tag indicating a WAVEFORMATEXTENSIBLE structure. + * The value by which to divide a time in microseconds to convert it to the unit of the last value + * in a VTT timecode (milliseconds). */ + private static final long VTT_TIMECODE_LAST_VALUE_SCALING_FACTOR = 1000; + /** The format of a VTT timecode. */ + private static final String VTT_TIMECODE_FORMAT = "%02d:%02d:%02d.%03d"; + + /** The length in bytes of a WAVEFORMATEX structure. */ + private static final int WAVE_FORMAT_SIZE = 18; + /** Format tag indicating a WAVEFORMATEXTENSIBLE structure. */ private static final int WAVE_FORMAT_EXTENSIBLE = 0xFFFE; - /** - * Format tag for PCM. - */ + /** Format tag for PCM. */ private static final int WAVE_FORMAT_PCM = 1; - /** - * Sub format for PCM. - */ + /** Sub format for PCM. */ private static final UUID WAVE_SUBFORMAT_PCM = new UUID(0x0100000000001000L, 0x800000AA00389B71L); + /** Some HTC devices signal rotation in track names. */ + private static final Map TRACK_NAME_TO_ROTATION_DEGREES; + + static { + Map trackNameToRotationDegrees = new HashMap<>(); + trackNameToRotationDegrees.put("htc_video_rotA-000", 0); + trackNameToRotationDegrees.put("htc_video_rotA-090", 90); + trackNameToRotationDegrees.put("htc_video_rotA-180", 180); + trackNameToRotationDegrees.put("htc_video_rotA-270", 270); + TRACK_NAME_TO_ROTATION_DEGREES = Collections.unmodifiableMap(trackNameToRotationDegrees); + } + private final EbmlReader reader; private final VarintReader varintReader; private final SparseArray tracks; @@ -332,8 +390,8 @@ public class MatroskaExtractor implements Extractor { private final ParsableByteArray subtitleSample; private final ParsableByteArray encryptionInitializationVector; private final ParsableByteArray encryptionSubsampleData; - private final ParsableByteArray blockAdditionalData; - private ByteBuffer encryptionSubsampleDataBuffer; + private final ParsableByteArray supplementalData; + private @MonotonicNonNull ByteBuffer encryptionSubsampleDataBuffer; private long segmentContentSize; private long segmentContentPosition = C.POSITION_UNSET; @@ -342,7 +400,7 @@ public class MatroskaExtractor implements Extractor { private long durationUs = C.TIME_UNSET; // The track corresponding to the current TrackEntry element, or null. - private Track currentTrack; + @Nullable private Track currentTrack; // Whether a seek map has been sent to the output. private boolean sentSeekMap; @@ -356,8 +414,8 @@ public class MatroskaExtractor implements Extractor { private long cuesContentPosition = C.POSITION_UNSET; private long seekPositionAfterBuildingCues = C.POSITION_UNSET; private long clusterTimecodeUs = C.TIME_UNSET; - private LongArray cueTimesUs; - private LongArray cueClusterPositions; + @Nullable private LongArray cueTimesUs; + @Nullable private LongArray cueClusterPositions; private boolean seenClusterPositionForCurrentCuePoint; // Reading state. @@ -372,10 +430,10 @@ public class MatroskaExtractor implements Extractor { private int[] blockSampleSizes; private int blockTrackNumber; private int blockTrackNumberLength; - @C.BufferFlags - private int blockFlags; + private @C.BufferFlags int blockFlags; private int blockAdditionalId; private boolean blockHasReferenceBlock; + private long blockGroupDiscardPaddingNs; // Sample writing state. private int sampleBytesRead; @@ -389,7 +447,7 @@ public class MatroskaExtractor implements Extractor { private boolean sampleInitializationVectorRead; // Extractor outputs. - private ExtractorOutput extractorOutput; + private @MonotonicNonNull ExtractorOutput extractorOutput; public MatroskaExtractor() { this(0); @@ -414,11 +472,12 @@ public MatroskaExtractor(@Flags int flags) { subtitleSample = new ParsableByteArray(); encryptionInitializationVector = new ParsableByteArray(ENCRYPTION_IV_SIZE); encryptionSubsampleData = new ParsableByteArray(); - blockAdditionalData = new ParsableByteArray(); + supplementalData = new ParsableByteArray(); + blockSampleSizes = new int[1]; } @Override - public final boolean sniff(ExtractorInput input) throws IOException, InterruptedException { + public final boolean sniff(ExtractorInput input) throws IOException { return new Sniffer().sniff(input); } @@ -446,8 +505,7 @@ public final void release() { } @Override - public final int read(ExtractorInput input, PositionHolder seekPosition) - throws IOException, InterruptedException { + public final int read(ExtractorInput input, PositionHolder seekPosition) throws IOException { haveOutputSample = false; boolean continueReading = true; while (continueReading && !haveOutputSample) { @@ -458,7 +516,9 @@ public final int read(ExtractorInput input, PositionHolder seekPosition) } if (!continueReading) { for (int i = 0; i < tracks.size(); i++) { - tracks.valueAt(i).outputPendingSampleMetadata(); + Track track = tracks.valueAt(i); + track.assertOutputInitialized(); + track.outputPendingSampleMetadata(); } return Extractor.RESULT_END_OF_INPUT; } @@ -471,8 +531,7 @@ public final int read(ExtractorInput input, PositionHolder seekPosition) * @see EbmlProcessor#getElementType(int) */ @CallSuper - @EbmlProcessor.ElementType - protected int getElementType(int id) { + protected @EbmlProcessor.ElementType int getElementType(int id) { switch (id) { case ID_EBML: case ID_SEGMENT: @@ -482,6 +541,7 @@ protected int getElementType(int id) { case ID_CLUSTER: case ID_TRACKS: case ID_TRACK_ENTRY: + case ID_BLOCK_ADDITION_MAPPING: case ID_AUDIO: case ID_VIDEO: case ID_CONTENT_ENCODINGS: @@ -516,8 +576,10 @@ protected int getElementType(int id) { case ID_FLAG_FORCED: case ID_DEFAULT_DURATION: case ID_MAX_BLOCK_ADDITION_ID: + case ID_BLOCK_ADD_ID_TYPE: case ID_CODEC_DELAY: case ID_SEEK_PRE_ROLL: + case ID_DISCARD_PADDING: case ID_CHANNELS: case ID_AUDIO_BIT_DEPTH: case ID_CONTENT_ENCODING_ORDER: @@ -543,6 +605,7 @@ protected int getElementType(int id) { case ID_LANGUAGE: return EbmlProcessor.ELEMENT_TYPE_STRING; case ID_SEEK_ID: + case ID_BLOCK_ADD_ID_EXTRA_DATA: case ID_CONTENT_COMPRESSION_SETTINGS: case ID_CONTENT_ENCRYPTION_KEY_ID: case ID_SIMPLE_BLOCK: @@ -590,11 +653,13 @@ protected boolean isLevel1Element(int id) { @CallSuper protected void startMasterElement(int id, long contentPosition, long contentSize) throws ParserException { + assertInitialized(); switch (id) { case ID_SEGMENT: if (segmentContentPosition != C.POSITION_UNSET && segmentContentPosition != contentPosition) { - throw new ParserException("Multiple Segment elements not supported"); + throw ParserException.createForMalformedContainer( + "Multiple Segment elements not supported", /* cause= */ null); } segmentContentPosition = contentPosition; segmentContentSize = contentSize; @@ -626,18 +691,19 @@ protected void startMasterElement(int id, long contentPosition, long contentSize break; case ID_BLOCK_GROUP: blockHasReferenceBlock = false; + blockGroupDiscardPaddingNs = 0L; break; case ID_CONTENT_ENCODING: // TODO: check and fail if more than one content encoding is present. break; case ID_CONTENT_ENCRYPTION: - currentTrack.hasContentEncryption = true; + getCurrentTrack(id).hasContentEncryption = true; break; case ID_TRACK_ENTRY: currentTrack = new Track(); break; case ID_MASTERING_METADATA: - currentTrack.hasColorInfo = true; + getCurrentTrack(id).hasColorInfo = true; break; default: break; @@ -651,6 +717,7 @@ protected void startMasterElement(int id, long contentPosition, long contentSize */ @CallSuper protected void endMasterElement(int id) throws ParserException { + assertInitialized(); switch (id) { case ID_SEGMENT_INFO: if (timecodeScale == C.TIME_UNSET) { @@ -663,7 +730,8 @@ protected void endMasterElement(int id) throws ParserException { break; case ID_SEEK: if (seekEntryId == UNSET_ENTRY_ID || seekEntryPosition == C.POSITION_UNSET) { - throw new ParserException("Mandatory element SeekID or SeekPosition not found"); + throw ParserException.createForMalformedContainer( + "Mandatory element SeekID or SeekPosition not found", /* cause= */ null); } if (seekEntryId == ID_CUES) { cuesContentPosition = seekEntryPosition; @@ -671,23 +739,35 @@ protected void endMasterElement(int id) throws ParserException { break; case ID_CUES: if (!sentSeekMap) { - extractorOutput.seekMap(buildSeekMap()); + extractorOutput.seekMap(buildSeekMap(cueTimesUs, cueClusterPositions)); sentSeekMap = true; } else { // We have already built the cues. Ignore. } + this.cueTimesUs = null; + this.cueClusterPositions = null; break; case ID_BLOCK_GROUP: if (blockState != BLOCK_STATE_DATA) { // We've skipped this block (due to incompatible track number). return; } + Track track = tracks.get(blockTrackNumber); + track.assertOutputInitialized(); + if (blockGroupDiscardPaddingNs > 0L && CODEC_ID_OPUS.equals(track.codecId)) { + // For Opus, attach DiscardPadding to the block group samples as supplemental data. + supplementalData.reset( + ByteBuffer.allocate(8) + .order(ByteOrder.LITTLE_ENDIAN) + .putLong(blockGroupDiscardPaddingNs) + .array()); + } + // Commit sample metadata. int sampleOffset = 0; for (int i = 0; i < blockSampleCount; i++) { sampleOffset += blockSampleSizes[i]; } - Track track = tracks.get(blockTrackNumber); for (int i = 0; i < blockSampleCount; i++) { long sampleTimeUs = blockTimeUs + (i * track.defaultSampleDurationNs) / 1000; int sampleFlags = blockFlags; @@ -703,29 +783,42 @@ protected void endMasterElement(int id) throws ParserException { blockState = BLOCK_STATE_START; break; case ID_CONTENT_ENCODING: + assertInTrackEntry(id); if (currentTrack.hasContentEncryption) { if (currentTrack.cryptoData == null) { - throw new ParserException("Encrypted Track found but ContentEncKeyID was not found"); + throw ParserException.createForMalformedContainer( + "Encrypted Track found but ContentEncKeyID was not found", /* cause= */ null); } - currentTrack.drmInitData = new DrmInitData(new SchemeData(C.UUID_NIL, - MimeTypes.VIDEO_WEBM, currentTrack.cryptoData.encryptionKey)); + currentTrack.drmInitData = + new DrmInitData( + new SchemeData( + C.UUID_NIL, MimeTypes.VIDEO_WEBM, currentTrack.cryptoData.encryptionKey)); } break; case ID_CONTENT_ENCODINGS: + assertInTrackEntry(id); if (currentTrack.hasContentEncryption && currentTrack.sampleStrippedBytes != null) { - throw new ParserException("Combining encryption and compression is not supported"); + throw ParserException.createForMalformedContainer( + "Combining encryption and compression is not supported", /* cause= */ null); } break; case ID_TRACK_ENTRY: - if (isCodecSupported(currentTrack.codecId)) { - currentTrack.initializeOutput(extractorOutput, currentTrack.number); - tracks.put(currentTrack.number, currentTrack); + Track currentTrack = checkStateNotNull(this.currentTrack); + if (currentTrack.codecId == null) { + throw ParserException.createForMalformedContainer( + "CodecId is missing in TrackEntry element", /* cause= */ null); + } else { + if (isCodecSupported(currentTrack.codecId)) { + currentTrack.initializeOutput(extractorOutput, currentTrack.number); + tracks.put(currentTrack.number, currentTrack); + } } - currentTrack = null; + this.currentTrack = null; break; case ID_TRACKS: if (tracks.size() == 0) { - throw new ParserException("No valid tracks were found"); + throw ParserException.createForMalformedContainer( + "No valid tracks were found", /* cause= */ null); } extractorOutput.endTracks(); break; @@ -745,13 +838,15 @@ protected void integerElement(int id, long value) throws ParserException { case ID_EBML_READ_VERSION: // Validate that EBMLReadVersion is supported. This extractor only supports v1. if (value != 1) { - throw new ParserException("EBMLReadVersion " + value + " not supported"); + throw ParserException.createForMalformedContainer( + "EBMLReadVersion " + value + " not supported", /* cause= */ null); } break; case ID_DOC_TYPE_READ_VERSION: // Validate that DocTypeReadVersion is supported. This extractor only supports up to v2. if (value < 1 || value > 2) { - throw new ParserException("DocTypeReadVersion " + value + " not supported"); + throw ParserException.createForMalformedContainer( + "DocTypeReadVersion " + value + " not supported", /* cause= */ null); } break; case ID_SEEK_POSITION: @@ -763,49 +858,55 @@ protected void integerElement(int id, long value) throws ParserException { timecodeScale = value; break; case ID_PIXEL_WIDTH: - currentTrack.width = (int) value; + getCurrentTrack(id).width = (int) value; break; case ID_PIXEL_HEIGHT: - currentTrack.height = (int) value; + getCurrentTrack(id).height = (int) value; break; case ID_DISPLAY_WIDTH: - currentTrack.displayWidth = (int) value; + getCurrentTrack(id).displayWidth = (int) value; break; case ID_DISPLAY_HEIGHT: - currentTrack.displayHeight = (int) value; + getCurrentTrack(id).displayHeight = (int) value; break; case ID_DISPLAY_UNIT: - currentTrack.displayUnit = (int) value; + getCurrentTrack(id).displayUnit = (int) value; break; case ID_TRACK_NUMBER: - currentTrack.number = (int) value; + getCurrentTrack(id).number = (int) value; break; case ID_FLAG_DEFAULT: - currentTrack.flagDefault = value == 1; + getCurrentTrack(id).flagDefault = value == 1; break; case ID_FLAG_FORCED: - currentTrack.flagForced = value == 1; + getCurrentTrack(id).flagForced = value == 1; break; case ID_TRACK_TYPE: - currentTrack.type = (int) value; + getCurrentTrack(id).type = (int) value; break; case ID_DEFAULT_DURATION: - currentTrack.defaultSampleDurationNs = (int) value; + getCurrentTrack(id).defaultSampleDurationNs = (int) value; break; case ID_MAX_BLOCK_ADDITION_ID: - currentTrack.maxBlockAdditionId = (int) value; + getCurrentTrack(id).maxBlockAdditionId = (int) value; + break; + case ID_BLOCK_ADD_ID_TYPE: + getCurrentTrack(id).blockAddIdType = (int) value; break; case ID_CODEC_DELAY: - currentTrack.codecDelayNs = value; + getCurrentTrack(id).codecDelayNs = value; break; case ID_SEEK_PRE_ROLL: - currentTrack.seekPreRollNs = value; + getCurrentTrack(id).seekPreRollNs = value; + break; + case ID_DISCARD_PADDING: + blockGroupDiscardPaddingNs = value; break; case ID_CHANNELS: - currentTrack.channelCount = (int) value; + getCurrentTrack(id).channelCount = (int) value; break; case ID_AUDIO_BIT_DEPTH: - currentTrack.audioBitDepth = (int) value; + getCurrentTrack(id).audioBitDepth = (int) value; break; case ID_REFERENCE_BLOCK: blockHasReferenceBlock = true; @@ -813,38 +914,45 @@ protected void integerElement(int id, long value) throws ParserException { case ID_CONTENT_ENCODING_ORDER: // This extractor only supports one ContentEncoding element and hence the order has to be 0. if (value != 0) { - throw new ParserException("ContentEncodingOrder " + value + " not supported"); + throw ParserException.createForMalformedContainer( + "ContentEncodingOrder " + value + " not supported", /* cause= */ null); } break; case ID_CONTENT_ENCODING_SCOPE: // This extractor only supports the scope of all frames. if (value != 1) { - throw new ParserException("ContentEncodingScope " + value + " not supported"); + throw ParserException.createForMalformedContainer( + "ContentEncodingScope " + value + " not supported", /* cause= */ null); } break; case ID_CONTENT_COMPRESSION_ALGORITHM: // This extractor only supports header stripping. if (value != 3) { - throw new ParserException("ContentCompAlgo " + value + " not supported"); + throw ParserException.createForMalformedContainer( + "ContentCompAlgo " + value + " not supported", /* cause= */ null); } break; case ID_CONTENT_ENCRYPTION_ALGORITHM: // Only the value 5 (AES) is allowed according to the WebM specification. if (value != 5) { - throw new ParserException("ContentEncAlgo " + value + " not supported"); + throw ParserException.createForMalformedContainer( + "ContentEncAlgo " + value + " not supported", /* cause= */ null); } break; case ID_CONTENT_ENCRYPTION_AES_SETTINGS_CIPHER_MODE: // Only the value 1 is allowed according to the WebM specification. if (value != 1) { - throw new ParserException("AESSettingsCipherMode " + value + " not supported"); + throw ParserException.createForMalformedContainer( + "AESSettingsCipherMode " + value + " not supported", /* cause= */ null); } break; case ID_CUE_TIME: + assertInCues(id); cueTimesUs.add(scaleTimecodeToUs(value)); break; case ID_CUE_CLUSTER_POSITION: if (!seenClusterPositionForCurrentCuePoint) { + assertInCues(id); // If there's more than one video/audio track, then there could be more than one // CueTrackPositions within a single CuePoint. In such a case, ignore all but the first // one (since the cluster position will be quite close for all the tracks). @@ -860,6 +968,7 @@ protected void integerElement(int id, long value) throws ParserException { break; case ID_STEREO_MODE: int layout = (int) value; + assertInTrackEntry(id); switch (layout) { case 0: currentTrack.stereoMode = C.STEREO_MODE_MONO; @@ -878,44 +987,24 @@ protected void integerElement(int id, long value) throws ParserException { } break; case ID_COLOUR_PRIMARIES: + assertInTrackEntry(id); currentTrack.hasColorInfo = true; - switch ((int) value) { - case 1: - currentTrack.colorSpace = C.COLOR_SPACE_BT709; - break; - case 4: // BT.470M. - case 5: // BT.470BG. - case 6: // SMPTE 170M. - case 7: // SMPTE 240M. - currentTrack.colorSpace = C.COLOR_SPACE_BT601; - break; - case 9: - currentTrack.colorSpace = C.COLOR_SPACE_BT2020; - break; - default: - break; + int colorSpace = ColorInfo.isoColorPrimariesToColorSpace((int) value); + if (colorSpace != Format.NO_VALUE) { + currentTrack.colorSpace = colorSpace; } break; case ID_COLOUR_TRANSFER: - switch ((int) value) { - case 1: // BT.709. - case 6: // SMPTE 170M. - case 7: // SMPTE 240M. - currentTrack.colorTransfer = C.COLOR_TRANSFER_SDR; - break; - case 16: - currentTrack.colorTransfer = C.COLOR_TRANSFER_ST2084; - break; - case 18: - currentTrack.colorTransfer = C.COLOR_TRANSFER_HLG; - break; - default: - break; + assertInTrackEntry(id); + int colorTransfer = ColorInfo.isoTransferCharacteristicsToColorTransfer((int) value); + if (colorTransfer != Format.NO_VALUE) { + currentTrack.colorTransfer = colorTransfer; } break; case ID_COLOUR_RANGE: - switch((int) value) { - case 1: // Broadcast range. + assertInTrackEntry(id); + switch ((int) value) { + case 1: // Broadcast range. currentTrack.colorRange = C.COLOR_RANGE_LIMITED; break; case 2: @@ -926,12 +1015,13 @@ protected void integerElement(int id, long value) throws ParserException { } break; case ID_MAX_CLL: - currentTrack.maxContentLuminance = (int) value; + getCurrentTrack(id).maxContentLuminance = (int) value; break; case ID_MAX_FALL: - currentTrack.maxFrameAverageLuminance = (int) value; + getCurrentTrack(id).maxFrameAverageLuminance = (int) value; break; case ID_PROJECTION_TYPE: + assertInTrackEntry(id); switch ((int) value) { case 0: currentTrack.projectionType = C.PROJECTION_RECTANGULAR; @@ -969,46 +1059,46 @@ protected void floatElement(int id, double value) throws ParserException { durationTimecode = (long) value; break; case ID_SAMPLING_FREQUENCY: - currentTrack.sampleRate = (int) value; + getCurrentTrack(id).sampleRate = (int) value; break; case ID_PRIMARY_R_CHROMATICITY_X: - currentTrack.primaryRChromaticityX = (float) value; + getCurrentTrack(id).primaryRChromaticityX = (float) value; break; case ID_PRIMARY_R_CHROMATICITY_Y: - currentTrack.primaryRChromaticityY = (float) value; + getCurrentTrack(id).primaryRChromaticityY = (float) value; break; case ID_PRIMARY_G_CHROMATICITY_X: - currentTrack.primaryGChromaticityX = (float) value; + getCurrentTrack(id).primaryGChromaticityX = (float) value; break; case ID_PRIMARY_G_CHROMATICITY_Y: - currentTrack.primaryGChromaticityY = (float) value; + getCurrentTrack(id).primaryGChromaticityY = (float) value; break; case ID_PRIMARY_B_CHROMATICITY_X: - currentTrack.primaryBChromaticityX = (float) value; + getCurrentTrack(id).primaryBChromaticityX = (float) value; break; case ID_PRIMARY_B_CHROMATICITY_Y: - currentTrack.primaryBChromaticityY = (float) value; + getCurrentTrack(id).primaryBChromaticityY = (float) value; break; case ID_WHITE_POINT_CHROMATICITY_X: - currentTrack.whitePointChromaticityX = (float) value; + getCurrentTrack(id).whitePointChromaticityX = (float) value; break; case ID_WHITE_POINT_CHROMATICITY_Y: - currentTrack.whitePointChromaticityY = (float) value; + getCurrentTrack(id).whitePointChromaticityY = (float) value; break; case ID_LUMNINANCE_MAX: - currentTrack.maxMasteringLuminance = (float) value; + getCurrentTrack(id).maxMasteringLuminance = (float) value; break; case ID_LUMNINANCE_MIN: - currentTrack.minMasteringLuminance = (float) value; + getCurrentTrack(id).minMasteringLuminance = (float) value; break; case ID_PROJECTION_POSE_YAW: - currentTrack.projectionPoseYaw = (float) value; + getCurrentTrack(id).projectionPoseYaw = (float) value; break; case ID_PROJECTION_POSE_PITCH: - currentTrack.projectionPosePitch = (float) value; + getCurrentTrack(id).projectionPosePitch = (float) value; break; case ID_PROJECTION_POSE_ROLL: - currentTrack.projectionPoseRoll = (float) value; + getCurrentTrack(id).projectionPoseRoll = (float) value; break; default: break; @@ -1026,17 +1116,18 @@ protected void stringElement(int id, String value) throws ParserException { case ID_DOC_TYPE: // Validate that DocType is supported. if (!DOC_TYPE_WEBM.equals(value) && !DOC_TYPE_MATROSKA.equals(value)) { - throw new ParserException("DocType " + value + " not supported"); + throw ParserException.createForMalformedContainer( + "DocType " + value + " not supported", /* cause= */ null); } break; case ID_NAME: - currentTrack.name = value; + getCurrentTrack(id).name = value; break; case ID_CODEC_ID: - currentTrack.codecId = value; + getCurrentTrack(id).codecId = value; break; case ID_LANGUAGE: - currentTrack.language = value; + getCurrentTrack(id).language = value; break; default: break; @@ -1049,24 +1140,29 @@ protected void stringElement(int id, String value) throws ParserException { * @see EbmlProcessor#binaryElement(int, int, ExtractorInput) */ @CallSuper - protected void binaryElement(int id, int contentSize, ExtractorInput input) - throws IOException, InterruptedException { + protected void binaryElement(int id, int contentSize, ExtractorInput input) throws IOException { switch (id) { case ID_SEEK_ID: - Arrays.fill(seekEntryIdBytes.data, (byte) 0); - input.readFully(seekEntryIdBytes.data, 4 - contentSize, contentSize); + Arrays.fill(seekEntryIdBytes.getData(), (byte) 0); + input.readFully(seekEntryIdBytes.getData(), 4 - contentSize, contentSize); seekEntryIdBytes.setPosition(0); seekEntryId = (int) seekEntryIdBytes.readUnsignedInt(); break; + case ID_BLOCK_ADD_ID_EXTRA_DATA: + handleBlockAddIDExtraData(getCurrentTrack(id), input, contentSize); + break; case ID_CODEC_PRIVATE: + assertInTrackEntry(id); currentTrack.codecPrivate = new byte[contentSize]; input.readFully(currentTrack.codecPrivate, 0, contentSize); break; case ID_PROJECTION_PRIVATE: + assertInTrackEntry(id); currentTrack.projectionData = new byte[contentSize]; input.readFully(currentTrack.projectionData, 0, contentSize); break; case ID_CONTENT_COMPRESSION_SETTINGS: + assertInTrackEntry(id); // This extractor only supports header stripping, so the payload is the stripped bytes. currentTrack.sampleStrippedBytes = new byte[contentSize]; input.readFully(currentTrack.sampleStrippedBytes, 0, contentSize); @@ -1074,8 +1170,9 @@ protected void binaryElement(int id, int contentSize, ExtractorInput input) case ID_CONTENT_ENCRYPTION_KEY_ID: byte[] encryptionKey = new byte[contentSize]; input.readFully(encryptionKey, 0, contentSize); - currentTrack.cryptoData = new TrackOutput.CryptoData(C.CRYPTO_MODE_AES_CTR, encryptionKey, - 0, 0); // We assume patternless AES-CTR. + getCurrentTrack(id).cryptoData = + new TrackOutput.CryptoData( + C.CRYPTO_MODE_AES_CTR, encryptionKey, 0, 0); // We assume patternless AES-CTR. break; case ID_SIMPLE_BLOCK: case ID_BLOCK: @@ -1089,7 +1186,7 @@ protected void binaryElement(int id, int contentSize, ExtractorInput input) blockTrackNumberLength = varintReader.getLastLength(); blockDurationUs = C.TIME_UNSET; blockState = BLOCK_STATE_HEADER; - scratch.reset(); + scratch.reset(/* limit= */ 0); } Track track = tracks.get(blockTrackNumber); @@ -1101,10 +1198,12 @@ protected void binaryElement(int id, int contentSize, ExtractorInput input) return; } + track.assertOutputInitialized(); + if (blockState == BLOCK_STATE_HEADER) { // Read the relative timecode (2 bytes) and flags (1 byte). readScratch(input, 3); - int lacing = (scratch.data[2] & 0x06) >> 1; + int lacing = (scratch.getData()[2] & 0x06) >> 1; if (lacing == LACING_NONE) { blockSampleCount = 1; blockSampleSizes = ensureArrayCapacity(blockSampleSizes, 1); @@ -1112,7 +1211,7 @@ protected void binaryElement(int id, int contentSize, ExtractorInput input) } else { // Read the sample count (1 byte). readScratch(input, 4); - blockSampleCount = (scratch.data[3] & 0xFF) + 1; + blockSampleCount = (scratch.getData()[3] & 0xFF) + 1; blockSampleSizes = ensureArrayCapacity(blockSampleSizes, blockSampleCount); if (lacing == LACING_FIXED_SIZE) { int blockLacingSampleSize = @@ -1126,7 +1225,7 @@ protected void binaryElement(int id, int contentSize, ExtractorInput input) int byteValue; do { readScratch(input, ++headerSize); - byteValue = scratch.data[headerSize - 1] & 0xFF; + byteValue = scratch.getData()[headerSize - 1] & 0xFF; blockSampleSizes[sampleIndex] += byteValue; } while (byteValue == 0xFF); totalSamplesSize += blockSampleSizes[sampleIndex]; @@ -1139,20 +1238,21 @@ protected void binaryElement(int id, int contentSize, ExtractorInput input) for (int sampleIndex = 0; sampleIndex < blockSampleCount - 1; sampleIndex++) { blockSampleSizes[sampleIndex] = 0; readScratch(input, ++headerSize); - if (scratch.data[headerSize - 1] == 0) { - throw new ParserException("No valid varint length mask found"); + if (scratch.getData()[headerSize - 1] == 0) { + throw ParserException.createForMalformedContainer( + "No valid varint length mask found", /* cause= */ null); } long readValue = 0; for (int i = 0; i < 8; i++) { int lengthMask = 1 << (7 - i); - if ((scratch.data[headerSize - 1] & lengthMask) != 0) { + if ((scratch.getData()[headerSize - 1] & lengthMask) != 0) { int readPosition = headerSize - 1; headerSize += i; readScratch(input, headerSize); - readValue = (scratch.data[readPosition++] & 0xFF) & ~lengthMask; + readValue = (scratch.getData()[readPosition++] & 0xFF) & ~lengthMask; while (readPosition < headerSize) { readValue <<= 8; - readValue |= (scratch.data[readPosition++] & 0xFF); + readValue |= (scratch.getData()[readPosition++] & 0xFF); } // The first read value is the first size. Later values are signed offsets. if (sampleIndex > 0) { @@ -1162,7 +1262,8 @@ protected void binaryElement(int id, int contentSize, ExtractorInput input) } } if (readValue < Integer.MIN_VALUE || readValue > Integer.MAX_VALUE) { - throw new ParserException("EBML lacing sample size out of range."); + throw ParserException.createForMalformedContainer( + "EBML lacing sample size out of range.", /* cause= */ null); } int intReadValue = (int) readValue; blockSampleSizes[sampleIndex] = @@ -1175,17 +1276,17 @@ protected void binaryElement(int id, int contentSize, ExtractorInput input) contentSize - blockTrackNumberLength - headerSize - totalSamplesSize; } else { // Lacing is always in the range 0--3. - throw new ParserException("Unexpected lacing value: " + lacing); + throw ParserException.createForMalformedContainer( + "Unexpected lacing value: " + lacing, /* cause= */ null); } } - int timecode = (scratch.data[0] << 8) | (scratch.data[1] & 0xFF); + int timecode = (scratch.getData()[0] << 8) | (scratch.getData()[1] & 0xFF); blockTimeUs = clusterTimecodeUs + scaleTimecodeToUs(timecode); - boolean isInvisible = (scratch.data[2] & 0x08) == 0x08; - boolean isKeyframe = track.type == TRACK_TYPE_AUDIO - || (id == ID_SIMPLE_BLOCK && (scratch.data[2] & 0x80) == 0x80); - blockFlags = (isKeyframe ? C.BUFFER_FLAG_KEY_FRAME : 0) - | (isInvisible ? C.BUFFER_FLAG_DECODE_ONLY : 0); + boolean isKeyframe = + track.type == TRACK_TYPE_AUDIO + || (id == ID_SIMPLE_BLOCK && (scratch.getData()[2] & 0x80) == 0x80); + blockFlags = isKeyframe ? C.BUFFER_FLAG_KEY_FRAME : 0; blockState = BLOCK_STATE_DATA; blockSampleIndex = 0; } @@ -1194,7 +1295,9 @@ protected void binaryElement(int id, int contentSize, ExtractorInput input) // For SimpleBlock, we can write sample data and immediately commit the corresponding // sample metadata. while (blockSampleIndex < blockSampleCount) { - int sampleSize = writeSampleData(input, track, blockSampleSizes[blockSampleIndex]); + int sampleSize = + writeSampleData( + input, track, blockSampleSizes[blockSampleIndex], /* isBlockGroup= */ false); long sampleTimeUs = blockTimeUs + (blockSampleIndex * track.defaultSampleDurationNs) / 1000; commitSampleToOutput(track, sampleTimeUs, blockFlags, sampleSize, /* offset= */ 0); @@ -1209,7 +1312,8 @@ protected void binaryElement(int id, int contentSize, ExtractorInput input) // the sample data, storing the final sample sizes for when we commit the metadata. while (blockSampleIndex < blockSampleCount) { blockSampleSizes[blockSampleIndex] = - writeSampleData(input, track, blockSampleSizes[blockSampleIndex]); + writeSampleData( + input, track, blockSampleSizes[blockSampleIndex], /* isBlockGroup= */ true); blockSampleIndex++; } } @@ -1223,35 +1327,87 @@ protected void binaryElement(int id, int contentSize, ExtractorInput input) tracks.get(blockTrackNumber), blockAdditionalId, input, contentSize); break; default: - throw new ParserException("Unexpected id: " + id); + throw ParserException.createForMalformedContainer( + "Unexpected id: " + id, /* cause= */ null); + } + } + + protected void handleBlockAddIDExtraData(Track track, ExtractorInput input, int contentSize) + throws IOException { + if (track.blockAddIdType == BLOCK_ADD_ID_TYPE_DVVC + || track.blockAddIdType == BLOCK_ADD_ID_TYPE_DVCC) { + track.dolbyVisionConfigBytes = new byte[contentSize]; + input.readFully(track.dolbyVisionConfigBytes, 0, contentSize); + } else { + // Unhandled BlockAddIDExtraData. + input.skipFully(contentSize); } } protected void handleBlockAdditionalData( Track track, int blockAdditionalId, ExtractorInput input, int contentSize) - throws IOException, InterruptedException { + throws IOException { if (blockAdditionalId == BLOCK_ADDITIONAL_ID_VP9_ITU_T_35 && CODEC_ID_VP9.equals(track.codecId)) { - blockAdditionalData.reset(contentSize); - input.readFully(blockAdditionalData.data, 0, contentSize); + supplementalData.reset(contentSize); + input.readFully(supplementalData.getData(), 0, contentSize); } else { // Unhandled block additional data. input.skipFully(contentSize); } } + @EnsuresNonNull("currentTrack") + private void assertInTrackEntry(int id) throws ParserException { + if (currentTrack == null) { + throw ParserException.createForMalformedContainer( + "Element " + id + " must be in a TrackEntry", /* cause= */ null); + } + } + + @EnsuresNonNull({"cueTimesUs", "cueClusterPositions"}) + private void assertInCues(int id) throws ParserException { + if (cueTimesUs == null || cueClusterPositions == null) { + throw ParserException.createForMalformedContainer( + "Element " + id + " must be in a Cues", /* cause= */ null); + } + } + + /** + * Returns the track corresponding to the current TrackEntry element. + * + * @throws ParserException if the element id is not in a TrackEntry. + */ + protected Track getCurrentTrack(int currentElementId) throws ParserException { + assertInTrackEntry(currentElementId); + return currentTrack; + } + + @RequiresNonNull("#1.output") private void commitSampleToOutput( Track track, long timeUs, @C.BufferFlags int flags, int size, int offset) { if (track.trueHdSampleRechunker != null) { - track.trueHdSampleRechunker.sampleMetadata(track, timeUs, flags, size, offset); + track.trueHdSampleRechunker.sampleMetadata( + track.output, timeUs, flags, size, offset, track.cryptoData); } else { - if (CODEC_ID_SUBRIP.equals(track.codecId) || CODEC_ID_ASS.equals(track.codecId)) { + if (CODEC_ID_SUBRIP.equals(track.codecId) + || CODEC_ID_ASS.equals(track.codecId) + || CODEC_ID_VTT.equals(track.codecId)) { if (blockSampleCount > 1) { Log.w(TAG, "Skipping subtitle sample in laced block."); } else if (blockDurationUs == C.TIME_UNSET) { Log.w(TAG, "Skipping subtitle sample with no duration."); } else { - setSubtitleEndTime(track.codecId, blockDurationUs, subtitleSample.data); + setSubtitleEndTime(track.codecId, blockDurationUs, subtitleSample.getData()); + // The Matroska spec doesn't clearly define whether subtitle samples are null-terminated + // or the sample should instead be sized precisely. We truncate the sample at a null-byte + // to gracefully handle null-terminated strings followed by garbage bytes. + for (int i = subtitleSample.getPosition(); i < subtitleSample.limit(); i++) { + if (subtitleSample.getData()[i] == 0) { + subtitleSample.setLimit(i); + break; + } + } // Note: If we ever want to support DRM protected subtitles then we'll need to output the // appropriate encryption data here. track.output.sampleData(subtitleSample, subtitleSample.limit()); @@ -1263,12 +1419,13 @@ private void commitSampleToOutput( if (blockSampleCount > 1) { // There were multiple samples in the block. Appending the additional data to the last // sample doesn't make sense. Skip instead. - flags &= ~C.BUFFER_FLAG_HAS_SUPPLEMENTAL_DATA; + supplementalData.reset(/* limit= */ 0); } else { // Append supplemental data. - int blockAdditionalSize = blockAdditionalData.limit(); - track.output.sampleData(blockAdditionalData, blockAdditionalSize); - size += blockAdditionalSize; + int supplementalDataSize = supplementalData.limit(); + track.output.sampleData( + supplementalData, supplementalDataSize, TrackOutput.SAMPLE_DATA_PART_SUPPLEMENTAL); + size += supplementalDataSize; } } track.output.sampleMetadata(timeUs, flags, size, offset, track.cryptoData); @@ -1280,16 +1437,14 @@ private void commitSampleToOutput( * Ensures {@link #scratch} contains at least {@code requiredLength} bytes of data, reading from * the extractor input if necessary. */ - private void readScratch(ExtractorInput input, int requiredLength) - throws IOException, InterruptedException { + private void readScratch(ExtractorInput input, int requiredLength) throws IOException { if (scratch.limit() >= requiredLength) { return; } if (scratch.capacity() < requiredLength) { - scratch.reset(Arrays.copyOf(scratch.data, Math.max(scratch.data.length * 2, requiredLength)), - scratch.limit()); + scratch.ensureCapacity(max(scratch.capacity() * 2, requiredLength)); } - input.readFully(scratch.data, scratch.limit(), requiredLength - scratch.limit()); + input.readFully(scratch.getData(), scratch.limit(), requiredLength - scratch.limit()); scratch.setLimit(requiredLength); } @@ -1299,18 +1454,22 @@ private void readScratch(ExtractorInput input, int requiredLength) * @param input The input from which to read sample data. * @param track The track to output the sample to. * @param size The size of the sample data on the input side. + * @param isBlockGroup Whether the samples are from a BlockGroup. * @return The final size of the written sample. * @throws IOException If an error occurs reading from the input. - * @throws InterruptedException If the thread is interrupted. */ - private int writeSampleData(ExtractorInput input, Track track, int size) - throws IOException, InterruptedException { + @RequiresNonNull("#2.output") + private int writeSampleData(ExtractorInput input, Track track, int size, boolean isBlockGroup) + throws IOException { if (CODEC_ID_SUBRIP.equals(track.codecId)) { writeSubtitleSampleData(input, SUBRIP_PREFIX, size); return finishWriteSampleData(); } else if (CODEC_ID_ASS.equals(track.codecId)) { writeSubtitleSampleData(input, SSA_PREFIX, size); return finishWriteSampleData(); + } else if (CODEC_ID_VTT.equals(track.codecId)) { + writeSubtitleSampleData(input, VTT_PREFIX, size); + return finishWriteSampleData(); } TrackOutput output = track.output; @@ -1320,12 +1479,13 @@ private int writeSampleData(ExtractorInput input, Track track, int size) // Clear the encrypted flag. blockFlags &= ~C.BUFFER_FLAG_ENCRYPTED; if (!sampleSignalByteRead) { - input.readFully(scratch.data, 0, 1); + input.readFully(scratch.getData(), 0, 1); sampleBytesRead++; - if ((scratch.data[0] & 0x80) == 0x80) { - throw new ParserException("Extension bit is set in signal byte"); + if ((scratch.getData()[0] & 0x80) == 0x80) { + throw ParserException.createForMalformedContainer( + "Extension bit is set in signal byte", /* cause= */ null); } - sampleSignalByte = scratch.data[0]; + sampleSignalByte = scratch.getData()[0]; sampleSignalByteRead = true; } boolean isEncrypted = (sampleSignalByte & 0x01) == 0x01; @@ -1333,22 +1493,26 @@ private int writeSampleData(ExtractorInput input, Track track, int size) boolean hasSubsampleEncryption = (sampleSignalByte & 0x02) == 0x02; blockFlags |= C.BUFFER_FLAG_ENCRYPTED; if (!sampleInitializationVectorRead) { - input.readFully(encryptionInitializationVector.data, 0, ENCRYPTION_IV_SIZE); + input.readFully(encryptionInitializationVector.getData(), 0, ENCRYPTION_IV_SIZE); sampleBytesRead += ENCRYPTION_IV_SIZE; sampleInitializationVectorRead = true; // Write the signal byte, containing the IV size and the subsample encryption flag. - scratch.data[0] = (byte) (ENCRYPTION_IV_SIZE | (hasSubsampleEncryption ? 0x80 : 0x00)); + scratch.getData()[0] = + (byte) (ENCRYPTION_IV_SIZE | (hasSubsampleEncryption ? 0x80 : 0x00)); scratch.setPosition(0); - output.sampleData(scratch, 1); + output.sampleData(scratch, 1, TrackOutput.SAMPLE_DATA_PART_ENCRYPTION); sampleBytesWritten++; // Write the IV. encryptionInitializationVector.setPosition(0); - output.sampleData(encryptionInitializationVector, ENCRYPTION_IV_SIZE); + output.sampleData( + encryptionInitializationVector, + ENCRYPTION_IV_SIZE, + TrackOutput.SAMPLE_DATA_PART_ENCRYPTION); sampleBytesWritten += ENCRYPTION_IV_SIZE; } if (hasSubsampleEncryption) { if (!samplePartitionCountRead) { - input.readFully(scratch.data, 0, 1); + input.readFully(scratch.getData(), 0, 1); sampleBytesRead++; scratch.setPosition(0); samplePartitionCount = scratch.readUnsignedByte(); @@ -1356,7 +1520,7 @@ private int writeSampleData(ExtractorInput input, Track track, int size) } int samplePartitionDataSize = samplePartitionCount * 4; scratch.reset(samplePartitionDataSize); - input.readFully(scratch.data, 0, samplePartitionDataSize); + input.readFully(scratch.getData(), 0, samplePartitionDataSize); sampleBytesRead += samplePartitionDataSize; short subsampleCount = (short) (1 + (samplePartitionCount / 2)); int subsampleDataSize = 2 + 6 * subsampleCount; @@ -1391,7 +1555,10 @@ private int writeSampleData(ExtractorInput input, Track track, int size) encryptionSubsampleDataBuffer.putInt(0); } encryptionSubsampleData.reset(encryptionSubsampleDataBuffer.array(), subsampleDataSize); - output.sampleData(encryptionSubsampleData, subsampleDataSize); + output.sampleData( + encryptionSubsampleData, + subsampleDataSize, + TrackOutput.SAMPLE_DATA_PART_ENCRYPTION); sampleBytesWritten += subsampleDataSize; } } @@ -1400,17 +1567,18 @@ private int writeSampleData(ExtractorInput input, Track track, int size) sampleStrippedBytes.reset(track.sampleStrippedBytes, track.sampleStrippedBytes.length); } - if (track.maxBlockAdditionId > 0) { + if (track.samplesHaveSupplementalData(isBlockGroup)) { blockFlags |= C.BUFFER_FLAG_HAS_SUPPLEMENTAL_DATA; - blockAdditionalData.reset(); + supplementalData.reset(/* limit= */ 0); // If there is supplemental data, the structure of the sample data is: - // sample size (4 bytes) || sample data || supplemental data + // encryption data (if any) || sample size (4 bytes) || sample data || supplemental data + int sampleSize = size + sampleStrippedBytes.limit() - sampleBytesRead; scratch.reset(/* limit= */ 4); - scratch.data[0] = (byte) ((size >> 24) & 0xFF); - scratch.data[1] = (byte) ((size >> 16) & 0xFF); - scratch.data[2] = (byte) ((size >> 8) & 0xFF); - scratch.data[3] = (byte) (size & 0xFF); - output.sampleData(scratch, 4); + scratch.getData()[0] = (byte) ((sampleSize >> 24) & 0xFF); + scratch.getData()[1] = (byte) ((sampleSize >> 16) & 0xFF); + scratch.getData()[2] = (byte) ((sampleSize >> 8) & 0xFF); + scratch.getData()[3] = (byte) (sampleSize & 0xFF); + output.sampleData(scratch, 4, TrackOutput.SAMPLE_DATA_PART_SUPPLEMENTAL); sampleBytesWritten += 4; } @@ -1423,7 +1591,7 @@ private int writeSampleData(ExtractorInput input, Track track, int size) // Zero the top three bytes of the array that we'll use to decode nal unit lengths, in case // they're only 1 or 2 bytes long. - byte[] nalLengthData = nalLength.data; + byte[] nalLengthData = nalLength.getData(); nalLengthData[0] = 0; nalLengthData[1] = 0; nalLengthData[2] = 0; @@ -1454,7 +1622,7 @@ private int writeSampleData(ExtractorInput input, Track track, int size) } } else { if (track.trueHdSampleRechunker != null) { - Assertions.checkState(sampleStrippedBytes.limit() == 0); + checkState(sampleStrippedBytes.limit() == 0); track.trueHdSampleRechunker.startSample(input); } while (sampleBytesRead < size) { @@ -1469,8 +1637,10 @@ private int writeSampleData(ExtractorInput input, Track track, int size) // number of samples in the current page. This definition holds good only for Ogg and // irrelevant for Matroska. So we always set this to -1 (the decoder will ignore this value if // we set it to -1). The android platform media extractor [2] does the same. - // [1] https://android.googlesource.com/platform/frameworks/av/+/lollipop-release/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp#314 - // [2] https://android.googlesource.com/platform/frameworks/av/+/lollipop-release/media/libstagefright/NuMediaExtractor.cpp#474 + // [1] + // https://android.googlesource.com/platform/frameworks/av/+/lollipop-release/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp#314 + // [2] + // https://android.googlesource.com/platform/frameworks/av/+/lollipop-release/media/libstagefright/NuMediaExtractor.cpp#474 vorbisNumPageSamples.setPosition(0); output.sampleData(vorbisNumPageSamples, 4); sampleBytesWritten += 4; @@ -1480,8 +1650,8 @@ private int writeSampleData(ExtractorInput input, Track track, int size) } /** - * Called by {@link #writeSampleData(ExtractorInput, Track, int)} when the sample has been - * written. Returns the final sample size and resets state for the next sample. + * Called by {@link #writeSampleData(ExtractorInput, Track, int, boolean)} when the sample has + * been written. Returns the final sample size and resets state for the next sample. */ private int finishWriteSampleData() { int sampleSize = sampleBytesWritten; @@ -1489,7 +1659,7 @@ private int finishWriteSampleData() { return sampleSize; } - /** Resets state used by {@link #writeSampleData(ExtractorInput, Track, int)}. */ + /** Resets state used by {@link #writeSampleData(ExtractorInput, Track, int, boolean)}. */ private void resetWriteSampleData() { sampleBytesRead = 0; sampleBytesWritten = 0; @@ -1500,21 +1670,22 @@ private void resetWriteSampleData() { samplePartitionCount = 0; sampleSignalByte = (byte) 0; sampleInitializationVectorRead = false; - sampleStrippedBytes.reset(); + sampleStrippedBytes.reset(/* limit= */ 0); } private void writeSubtitleSampleData(ExtractorInput input, byte[] samplePrefix, int size) - throws IOException, InterruptedException { + throws IOException { int sizeWithPrefix = samplePrefix.length + size; if (subtitleSample.capacity() < sizeWithPrefix) { // Initialize subripSample to contain the required prefix and have space to hold a subtitle // twice as long as this one. - subtitleSample.data = Arrays.copyOf(samplePrefix, sizeWithPrefix + size); + subtitleSample.reset(Arrays.copyOf(samplePrefix, sizeWithPrefix + size)); } else { - System.arraycopy(samplePrefix, 0, subtitleSample.data, 0, samplePrefix.length); + System.arraycopy(samplePrefix, 0, subtitleSample.getData(), 0, samplePrefix.length); } - input.readFully(subtitleSample.data, samplePrefix.length, size); - subtitleSample.reset(sizeWithPrefix); + input.readFully(subtitleSample.getData(), samplePrefix.length, size); + subtitleSample.setPosition(0); + subtitleSample.setLimit(sizeWithPrefix); // Defer writing the data to the track output. We need to modify the sample data by setting // the correct end timecode, which we might not have yet. } @@ -1526,7 +1697,8 @@ private void writeSubtitleSampleData(ExtractorInput input, byte[] samplePrefix, *

      See documentation on {@link #SSA_DIALOGUE_FORMAT} and {@link #SUBRIP_PREFIX} for why we use * the duration as the end timecode. * - * @param codecId The subtitle codec; must be {@link #CODEC_ID_SUBRIP} or {@link #CODEC_ID_ASS}. + * @param codecId The subtitle codec; must be {@link #CODEC_ID_SUBRIP}, {@link #CODEC_ID_ASS} or + * {@link #CODEC_ID_VTT}. * @param durationUs The duration of the sample, in microseconds. * @param subtitleData The subtitle sample in which to overwrite the end timecode (output * parameter). @@ -1547,6 +1719,12 @@ private static void setSubtitleEndTime(String codecId, long durationUs, byte[] s durationUs, SSA_TIMECODE_FORMAT, SSA_TIMECODE_LAST_VALUE_SCALING_FACTOR); endTimecodeOffset = SSA_PREFIX_END_TIMECODE_OFFSET; break; + case CODEC_ID_VTT: + endTimecode = + formatSubtitleTimecode( + durationUs, VTT_TIMECODE_FORMAT, VTT_TIMECODE_LAST_VALUE_SCALING_FACTOR); + endTimecodeOffset = VTT_PREFIX_END_TIMECODE_OFFSET; + break; default: throw new IllegalArgumentException(); } @@ -1559,12 +1737,12 @@ private static void setSubtitleEndTime(String codecId, long durationUs, byte[] s */ private static byte[] formatSubtitleTimecode( long timeUs, String timecodeFormat, long lastTimecodeValueScalingFactor) { - Assertions.checkArgument(timeUs != C.TIME_UNSET); + checkArgument(timeUs != C.TIME_UNSET); byte[] timeCodeData; int hours = (int) (timeUs / (3600 * C.MICROS_PER_SECOND)); - timeUs -= (hours * 3600 * C.MICROS_PER_SECOND); + timeUs -= (hours * 3600L * C.MICROS_PER_SECOND); int minutes = (int) (timeUs / (60 * C.MICROS_PER_SECOND)); - timeUs -= (minutes * 60 * C.MICROS_PER_SECOND); + timeUs -= (minutes * 60L * C.MICROS_PER_SECOND); int seconds = (int) (timeUs / C.MICROS_PER_SECOND); timeUs -= (seconds * C.MICROS_PER_SECOND); int lastValue = (int) (timeUs / lastTimecodeValueScalingFactor); @@ -1579,8 +1757,8 @@ private static byte[] formatSubtitleTimecode( * pending {@link #sampleStrippedBytes} and any remaining data read from {@code input}. */ private void writeToTarget(ExtractorInput input, byte[] target, int offset, int length) - throws IOException, InterruptedException { - int pendingStrippedBytes = Math.min(length, sampleStrippedBytes.bytesLeft()); + throws IOException { + int pendingStrippedBytes = min(length, sampleStrippedBytes.bytesLeft()); input.readFully(target, offset + pendingStrippedBytes, length - pendingStrippedBytes); if (pendingStrippedBytes > 0) { sampleStrippedBytes.readBytes(target, offset, pendingStrippedBytes); @@ -1592,11 +1770,11 @@ private void writeToTarget(ExtractorInput input, byte[] target, int offset, int * {@link #sampleStrippedBytes} or data read from {@code input}. */ private int writeToOutput(ExtractorInput input, TrackOutput output, int length) - throws IOException, InterruptedException { + throws IOException { int bytesWritten; int strippedBytesLeft = sampleStrippedBytes.bytesLeft(); if (strippedBytesLeft > 0) { - bytesWritten = Math.min(length, strippedBytesLeft); + bytesWritten = min(length, strippedBytesLeft); output.sampleData(sampleStrippedBytes, bytesWritten); } else { bytesWritten = output.sampleData(input, length, false); @@ -1610,13 +1788,15 @@ private int writeToOutput(ExtractorInput input, TrackOutput output, int length) * @return The built {@link SeekMap}. The returned {@link SeekMap} may be unseekable if cues * information was missing or incomplete. */ - private SeekMap buildSeekMap() { - if (segmentContentPosition == C.POSITION_UNSET || durationUs == C.TIME_UNSET - || cueTimesUs == null || cueTimesUs.size() == 0 - || cueClusterPositions == null || cueClusterPositions.size() != cueTimesUs.size()) { + private SeekMap buildSeekMap( + @Nullable LongArray cueTimesUs, @Nullable LongArray cueClusterPositions) { + if (segmentContentPosition == C.POSITION_UNSET + || durationUs == C.TIME_UNSET + || cueTimesUs == null + || cueTimesUs.size() == 0 + || cueClusterPositions == null + || cueClusterPositions.size() != cueTimesUs.size()) { // Cues information is missing or incomplete. - cueTimesUs = null; - cueClusterPositions = null; return new SeekMap.Unseekable(durationUs); } int cuePointsSize = cueTimesUs.size(); @@ -1645,8 +1825,6 @@ private SeekMap buildSeekMap() { timesUs = Arrays.copyOf(timesUs, timesUs.length - 1); } - cueTimesUs = null; - cueClusterPositions = null; return new ChunkIndex(sizes, offsets, durationsUs, timesUs); } @@ -1678,65 +1856,78 @@ private boolean maybeSeekForCues(PositionHolder seekPosition, long currentPositi private long scaleTimecodeToUs(long unscaledTimecode) throws ParserException { if (timecodeScale == C.TIME_UNSET) { - throw new ParserException("Can't scale timecode prior to timecodeScale being set."); + throw ParserException.createForMalformedContainer( + "Can't scale timecode prior to timecodeScale being set.", /* cause= */ null); } return Util.scaleLargeTimestamp(unscaledTimecode, timecodeScale, 1000); } private static boolean isCodecSupported(String codecId) { - return CODEC_ID_VP8.equals(codecId) - || CODEC_ID_VP9.equals(codecId) - || CODEC_ID_AV1.equals(codecId) - || CODEC_ID_MPEG2.equals(codecId) - || CODEC_ID_MPEG4_SP.equals(codecId) - || CODEC_ID_MPEG4_ASP.equals(codecId) - || CODEC_ID_MPEG4_AP.equals(codecId) - || CODEC_ID_H264.equals(codecId) - || CODEC_ID_H265.equals(codecId) - || CODEC_ID_FOURCC.equals(codecId) - || CODEC_ID_THEORA.equals(codecId) - || CODEC_ID_OPUS.equals(codecId) - || CODEC_ID_VORBIS.equals(codecId) - || CODEC_ID_AAC.equals(codecId) - || CODEC_ID_MP2.equals(codecId) - || CODEC_ID_MP3.equals(codecId) - || CODEC_ID_AC3.equals(codecId) - || CODEC_ID_E_AC3.equals(codecId) - || CODEC_ID_TRUEHD.equals(codecId) - || CODEC_ID_DTS.equals(codecId) - || CODEC_ID_DTS_EXPRESS.equals(codecId) - || CODEC_ID_DTS_LOSSLESS.equals(codecId) - || CODEC_ID_FLAC.equals(codecId) - || CODEC_ID_ACM.equals(codecId) - || CODEC_ID_PCM_INT_LIT.equals(codecId) - || CODEC_ID_SUBRIP.equals(codecId) - || CODEC_ID_ASS.equals(codecId) - || CODEC_ID_VOBSUB.equals(codecId) - || CODEC_ID_PGS.equals(codecId) - || CODEC_ID_DVBSUB.equals(codecId); + switch (codecId) { + case CODEC_ID_VP8: + case CODEC_ID_VP9: + case CODEC_ID_AV1: + case CODEC_ID_MPEG2: + case CODEC_ID_MPEG4_SP: + case CODEC_ID_MPEG4_ASP: + case CODEC_ID_MPEG4_AP: + case CODEC_ID_H264: + case CODEC_ID_H265: + case CODEC_ID_FOURCC: + case CODEC_ID_THEORA: + case CODEC_ID_OPUS: + case CODEC_ID_VORBIS: + case CODEC_ID_AAC: + case CODEC_ID_MP2: + case CODEC_ID_MP3: + case CODEC_ID_AC3: + case CODEC_ID_E_AC3: + case CODEC_ID_TRUEHD: + case CODEC_ID_DTS: + case CODEC_ID_DTS_EXPRESS: + case CODEC_ID_DTS_LOSSLESS: + case CODEC_ID_FLAC: + case CODEC_ID_ACM: + case CODEC_ID_PCM_INT_LIT: + case CODEC_ID_PCM_INT_BIG: + case CODEC_ID_PCM_FLOAT: + case CODEC_ID_SUBRIP: + case CODEC_ID_ASS: + case CODEC_ID_VTT: + case CODEC_ID_VOBSUB: + case CODEC_ID_PGS: + case CODEC_ID_DVBSUB: + return true; + default: + return false; + } } /** * Returns an array that can store (at least) {@code length} elements, which will be either a new * array or {@code array} if it's not null and large enough. */ - private static int[] ensureArrayCapacity(int[] array, int length) { + private static int[] ensureArrayCapacity(@Nullable int[] array, int length) { if (array == null) { return new int[length]; } else if (array.length >= length) { return array; } else { // Double the size to avoid allocating constantly if the required length increases gradually. - return new int[Math.max(array.length * 2, length)]; + return new int[max(array.length * 2, length)]; } } + @EnsuresNonNull("extractorOutput") + private void assertInitialized() { + checkStateNotNull(extractorOutput); + } + /** Passes events through to the outer {@link MatroskaExtractor}. */ private final class InnerEbmlProcessor implements EbmlProcessor { @Override - @ElementType - public int getElementType(int id) { + public @ElementType int getElementType(int id) { return MatroskaExtractor.this.getElementType(id); } @@ -1772,100 +1963,35 @@ public void stringElement(int id, String value) throws ParserException { } @Override - public void binaryElement(int id, int contentsSize, ExtractorInput input) - throws IOException, InterruptedException { + public void binaryElement(int id, int contentsSize, ExtractorInput input) throws IOException { MatroskaExtractor.this.binaryElement(id, contentsSize, input); } } - /** - * Rechunks TrueHD sample data into groups of {@link Ac3Util#TRUEHD_RECHUNK_SAMPLE_COUNT} samples. - */ - private static final class TrueHdSampleRechunker { - - private final byte[] syncframePrefix; - - private boolean foundSyncframe; - private int chunkSampleCount; - private long chunkTimeUs; - private @C.BufferFlags int chunkFlags; - private int chunkSize; - private int chunkOffset; - - public TrueHdSampleRechunker() { - syncframePrefix = new byte[Ac3Util.TRUEHD_SYNCFRAME_PREFIX_LENGTH]; - } - - public void reset() { - foundSyncframe = false; - chunkSampleCount = 0; - } - - public void startSample(ExtractorInput input) throws IOException, InterruptedException { - if (foundSyncframe) { - return; - } - input.peekFully(syncframePrefix, 0, Ac3Util.TRUEHD_SYNCFRAME_PREFIX_LENGTH); - input.resetPeekPosition(); - if (Ac3Util.parseTrueHdSyncframeAudioSampleCount(syncframePrefix) == 0) { - return; - } - foundSyncframe = true; - } - - public void sampleMetadata( - Track track, long timeUs, @C.BufferFlags int flags, int size, int offset) { - if (!foundSyncframe) { - return; - } - if (chunkSampleCount++ == 0) { - // This is the first sample in the chunk. - chunkTimeUs = timeUs; - chunkFlags = flags; - chunkSize = 0; - } - chunkSize += size; - chunkOffset = offset; // The offset is to the end of the sample. - if (chunkSampleCount >= Ac3Util.TRUEHD_RECHUNK_SAMPLE_COUNT) { - outputPendingSampleMetadata(track); - } - } - - public void outputPendingSampleMetadata(Track track) { - if (chunkSampleCount > 0) { - track.output.sampleMetadata( - chunkTimeUs, chunkFlags, chunkSize, chunkOffset, track.cryptoData); - chunkSampleCount = 0; - } - } - } - - private static final class Track { + /** Holds data corresponding to a single track. */ + protected static final class Track { private static final int DISPLAY_UNIT_PIXELS = 0; - private static final int MAX_CHROMATICITY = 50000; // Defined in CTA-861.3. - /** - * Default max content light level (CLL) that should be encoded into hdrStaticInfo. - */ - private static final int DEFAULT_MAX_CLL = 1000; // nits. + private static final int MAX_CHROMATICITY = 50_000; // Defined in CTA-861.3. + /** Default max content light level (CLL) that should be encoded into hdrStaticInfo. */ + private static final int DEFAULT_MAX_CLL = 1000; // nits. - /** - * Default frame-average light level (FALL) that should be encoded into hdrStaticInfo. - */ - private static final int DEFAULT_MAX_FALL = 200; // nits. + /** Default frame-average light level (FALL) that should be encoded into hdrStaticInfo. */ + private static final int DEFAULT_MAX_FALL = 200; // nits. // Common elements. - public String name; - public String codecId; + public @MonotonicNonNull String name; + public @MonotonicNonNull String codecId; public int number; public int type; public int defaultSampleDurationNs; public int maxBlockAdditionId; + private int blockAddIdType; public boolean hasContentEncryption; - public byte[] sampleStrippedBytes; - public TrackOutput.CryptoData cryptoData; - public byte[] codecPrivate; - public DrmInitData drmInitData; + public byte @MonotonicNonNull [] sampleStrippedBytes; + public TrackOutput.@MonotonicNonNull CryptoData cryptoData; + public byte @MonotonicNonNull [] codecPrivate; + public @MonotonicNonNull DrmInitData drmInitData; // Video elements. public int width = Format.NO_VALUE; @@ -1873,20 +1999,16 @@ private static final class Track { public int displayWidth = Format.NO_VALUE; public int displayHeight = Format.NO_VALUE; public int displayUnit = DISPLAY_UNIT_PIXELS; - @C.Projection public int projectionType = Format.NO_VALUE; + public @C.Projection int projectionType = Format.NO_VALUE; public float projectionPoseYaw = 0f; public float projectionPosePitch = 0f; public float projectionPoseRoll = 0f; - public byte[] projectionData = null; - @C.StereoMode - public int stereoMode = Format.NO_VALUE; + public byte @MonotonicNonNull [] projectionData = null; + public @C.StereoMode int stereoMode = Format.NO_VALUE; public boolean hasColorInfo = false; - @C.ColorSpace - public int colorSpace = Format.NO_VALUE; - @C.ColorTransfer - public int colorTransfer = Format.NO_VALUE; - @C.ColorRange - public int colorRange = Format.NO_VALUE; + public @C.ColorSpace int colorSpace = Format.NO_VALUE; + public @C.ColorTransfer int colorTransfer = Format.NO_VALUE; + public @C.ColorRange int colorRange = Format.NO_VALUE; public int maxContentLuminance = DEFAULT_MAX_CLL; public int maxFrameAverageLuminance = DEFAULT_MAX_FALL; public float primaryRChromaticityX = Format.NO_VALUE; @@ -1899,6 +2021,7 @@ private static final class Track { public float whitePointChromaticityY = Format.NO_VALUE; public float maxMasteringLuminance = Format.NO_VALUE; public float minMasteringLuminance = Format.NO_VALUE; + public byte @MonotonicNonNull [] dolbyVisionConfigBytes; // Audio elements. Initially set to their default values. public int channelCount = 1; @@ -1906,7 +2029,7 @@ private static final class Track { public int sampleRate = 8000; public long codecDelayNs = 0; public long seekPreRollNs = 0; - @Nullable public TrueHdSampleRechunker trueHdSampleRechunker; + public @MonotonicNonNull TrueHdSampleRechunker trueHdSampleRechunker; // Text elements. public boolean flagForced; @@ -1914,15 +2037,18 @@ private static final class Track { private String language = "eng"; // Set when the output is initialized. nalUnitLengthFieldLength is only set for H264/H265. - public TrackOutput output; + public @MonotonicNonNull TrackOutput output; public int nalUnitLengthFieldLength; /** Initializes the track with an output. */ + @RequiresNonNull("codecId") + @EnsuresNonNull("this.output") public void initializeOutput(ExtractorOutput output, int trackId) throws ParserException { String mimeType; int maxInputSize = Format.NO_VALUE; @C.PcmEncoding int pcmEncoding = Format.NO_VALUE; - List initializationData = null; + @Nullable List initializationData = null; + @Nullable String codecs = null; switch (codecId) { case CODEC_ID_VP8: mimeType = MimeTypes.VIDEO_VP8; @@ -1945,18 +2071,21 @@ public void initializeOutput(ExtractorOutput output, int trackId) throws ParserE break; case CODEC_ID_H264: mimeType = MimeTypes.VIDEO_H264; - AvcConfig avcConfig = AvcConfig.parse(new ParsableByteArray(codecPrivate)); + AvcConfig avcConfig = AvcConfig.parse(new ParsableByteArray(getCodecPrivate(codecId))); initializationData = avcConfig.initializationData; nalUnitLengthFieldLength = avcConfig.nalUnitLengthFieldLength; + codecs = avcConfig.codecs; break; case CODEC_ID_H265: mimeType = MimeTypes.VIDEO_H265; - HevcConfig hevcConfig = HevcConfig.parse(new ParsableByteArray(codecPrivate)); + HevcConfig hevcConfig = HevcConfig.parse(new ParsableByteArray(getCodecPrivate(codecId))); initializationData = hevcConfig.initializationData; nalUnitLengthFieldLength = hevcConfig.nalUnitLengthFieldLength; + codecs = hevcConfig.codecs; break; case CODEC_ID_FOURCC: - Pair> pair = parseFourCcPrivate(new ParsableByteArray(codecPrivate)); + Pair> pair = + parseFourCcPrivate(new ParsableByteArray(getCodecPrivate(codecId))); mimeType = pair.first; initializationData = pair.second; break; @@ -1968,13 +2097,13 @@ public void initializeOutput(ExtractorOutput output, int trackId) throws ParserE case CODEC_ID_VORBIS: mimeType = MimeTypes.AUDIO_VORBIS; maxInputSize = VORBIS_MAX_INPUT_SIZE; - initializationData = parseVorbisCodecPrivate(codecPrivate); + initializationData = parseVorbisCodecPrivate(getCodecPrivate(codecId)); break; case CODEC_ID_OPUS: mimeType = MimeTypes.AUDIO_OPUS; maxInputSize = OPUS_MAX_INPUT_SIZE; initializationData = new ArrayList<>(3); - initializationData.add(codecPrivate); + initializationData.add(getCodecPrivate(codecId)); initializationData.add( ByteBuffer.allocate(8).order(ByteOrder.LITTLE_ENDIAN).putLong(codecDelayNs).array()); initializationData.add( @@ -1982,15 +2111,21 @@ public void initializeOutput(ExtractorOutput output, int trackId) throws ParserE break; case CODEC_ID_AAC: mimeType = MimeTypes.AUDIO_AAC; - initializationData = Collections.singletonList(codecPrivate); + initializationData = Collections.singletonList(getCodecPrivate(codecId)); + AacUtil.Config aacConfig = AacUtil.parseAudioSpecificConfig(codecPrivate); + // Update sampleRate and channelCount from the AudioSpecificConfig initialization data, + // which is more reliable. See [Internal: b/10903778]. + sampleRate = aacConfig.sampleRateHz; + channelCount = aacConfig.channelCount; + codecs = aacConfig.codecs; break; case CODEC_ID_MP2: mimeType = MimeTypes.AUDIO_MPEG_L2; - maxInputSize = MpegAudioHeader.MAX_FRAME_SIZE_BYTES; + maxInputSize = MpegAudioUtil.MAX_FRAME_SIZE_BYTES; break; case CODEC_ID_MP3: mimeType = MimeTypes.AUDIO_MPEG; - maxInputSize = MpegAudioHeader.MAX_FRAME_SIZE_BYTES; + maxInputSize = MpegAudioUtil.MAX_FRAME_SIZE_BYTES; break; case CODEC_ID_AC3: mimeType = MimeTypes.AUDIO_AC3; @@ -2011,17 +2146,21 @@ public void initializeOutput(ExtractorOutput output, int trackId) throws ParserE break; case CODEC_ID_FLAC: mimeType = MimeTypes.AUDIO_FLAC; - initializationData = Collections.singletonList(codecPrivate); + initializationData = Collections.singletonList(getCodecPrivate(codecId)); break; case CODEC_ID_ACM: mimeType = MimeTypes.AUDIO_RAW; - if (parseMsAcmCodecPrivate(new ParsableByteArray(codecPrivate))) { + if (parseMsAcmCodecPrivate(new ParsableByteArray(getCodecPrivate(codecId)))) { pcmEncoding = Util.getPcmEncoding(audioBitDepth); if (pcmEncoding == C.ENCODING_INVALID) { pcmEncoding = Format.NO_VALUE; mimeType = MimeTypes.AUDIO_UNKNOWN; - Log.w(TAG, "Unsupported PCM bit depth: " + audioBitDepth + ". Setting mimeType to " - + mimeType); + Log.w( + TAG, + "Unsupported PCM bit depth: " + + audioBitDepth + + ". Setting mimeType to " + + mimeType); } } else { mimeType = MimeTypes.AUDIO_UNKNOWN; @@ -2034,8 +2173,44 @@ public void initializeOutput(ExtractorOutput output, int trackId) throws ParserE if (pcmEncoding == C.ENCODING_INVALID) { pcmEncoding = Format.NO_VALUE; mimeType = MimeTypes.AUDIO_UNKNOWN; - Log.w(TAG, "Unsupported PCM bit depth: " + audioBitDepth + ". Setting mimeType to " - + mimeType); + Log.w( + TAG, + "Unsupported little endian PCM bit depth: " + + audioBitDepth + + ". Setting mimeType to " + + mimeType); + } + break; + case CODEC_ID_PCM_INT_BIG: + mimeType = MimeTypes.AUDIO_RAW; + if (audioBitDepth == 8) { + pcmEncoding = C.ENCODING_PCM_8BIT; + } else if (audioBitDepth == 16) { + pcmEncoding = C.ENCODING_PCM_16BIT_BIG_ENDIAN; + } else { + pcmEncoding = Format.NO_VALUE; + mimeType = MimeTypes.AUDIO_UNKNOWN; + Log.w( + TAG, + "Unsupported big endian PCM bit depth: " + + audioBitDepth + + ". Setting mimeType to " + + mimeType); + } + break; + case CODEC_ID_PCM_FLOAT: + mimeType = MimeTypes.AUDIO_RAW; + if (audioBitDepth == 32) { + pcmEncoding = C.ENCODING_PCM_FLOAT; + } else { + pcmEncoding = Format.NO_VALUE; + mimeType = MimeTypes.AUDIO_UNKNOWN; + Log.w( + TAG, + "Unsupported floating point PCM bit depth: " + + audioBitDepth + + ". Setting mimeType to " + + mimeType); } break; case CODEC_ID_SUBRIP: @@ -2043,10 +2218,14 @@ public void initializeOutput(ExtractorOutput output, int trackId) throws ParserE break; case CODEC_ID_ASS: mimeType = MimeTypes.TEXT_SSA; + initializationData = ImmutableList.of(SSA_DIALOGUE_FORMAT, getCodecPrivate(codecId)); + break; + case CODEC_ID_VTT: + mimeType = MimeTypes.TEXT_VTT; break; case CODEC_ID_VOBSUB: mimeType = MimeTypes.APPLICATION_VOBSUB; - initializationData = Collections.singletonList(codecPrivate); + initializationData = ImmutableList.of(getCodecPrivate(codecId)); break; case CODEC_ID_PGS: mimeType = MimeTypes.APPLICATION_PGS; @@ -2054,25 +2233,39 @@ public void initializeOutput(ExtractorOutput output, int trackId) throws ParserE case CODEC_ID_DVBSUB: mimeType = MimeTypes.APPLICATION_DVBSUBS; // Init data: composition_page (2), ancillary_page (2) - initializationData = Collections.singletonList(new byte[] {codecPrivate[0], - codecPrivate[1], codecPrivate[2], codecPrivate[3]}); + byte[] initializationDataBytes = new byte[4]; + System.arraycopy(getCodecPrivate(codecId), 0, initializationDataBytes, 0, 4); + initializationData = ImmutableList.of(initializationDataBytes); break; default: - throw new ParserException("Unrecognized codec identifier."); + throw ParserException.createForMalformedContainer( + "Unrecognized codec identifier.", /* cause= */ null); + } + + if (dolbyVisionConfigBytes != null) { + @Nullable + DolbyVisionConfig dolbyVisionConfig = + DolbyVisionConfig.parse(new ParsableByteArray(this.dolbyVisionConfigBytes)); + if (dolbyVisionConfig != null) { + codecs = dolbyVisionConfig.codecs; + mimeType = MimeTypes.VIDEO_DOLBY_VISION; + } } - int type; - Format format; @C.SelectionFlags int selectionFlags = 0; selectionFlags |= flagDefault ? C.SELECTION_FLAG_DEFAULT : 0; selectionFlags |= flagForced ? C.SELECTION_FLAG_FORCED : 0; + + int type; + Format.Builder formatBuilder = new Format.Builder(); // TODO: Consider reading the name elements of the tracks and, if present, incorporating them // into the trackId passed when creating the formats. if (MimeTypes.isAudio(mimeType)) { type = C.TRACK_TYPE_AUDIO; - format = Format.createAudioSampleFormat(Integer.toString(trackId), mimeType, null, - Format.NO_VALUE, maxInputSize, channelCount, sampleRate, pcmEncoding, - initializationData, drmInitData, selectionFlags, language); + formatBuilder + .setChannelCount(channelCount) + .setSampleRate(sampleRate) + .setPcmEncoding(pcmEncoding); } else if (MimeTypes.isVideo(mimeType)) { type = C.TRACK_TYPE_VIDEO; if (displayUnit == Track.DISPLAY_UNIT_PIXELS) { @@ -2083,21 +2276,15 @@ public void initializeOutput(ExtractorOutput output, int trackId) throws ParserE if (displayWidth != Format.NO_VALUE && displayHeight != Format.NO_VALUE) { pixelWidthHeightRatio = ((float) (height * displayWidth)) / (width * displayHeight); } - ColorInfo colorInfo = null; + @Nullable ColorInfo colorInfo = null; if (hasColorInfo) { - byte[] hdrStaticInfo = getHdrStaticInfo(); + @Nullable byte[] hdrStaticInfo = getHdrStaticInfo(); colorInfo = new ColorInfo(colorSpace, colorRange, colorTransfer, hdrStaticInfo); } int rotationDegrees = Format.NO_VALUE; - // Some HTC devices signal rotation in track names. - if ("htc_video_rotA-000".equals(name)) { - rotationDegrees = 0; - } else if ("htc_video_rotA-090".equals(name)) { - rotationDegrees = 90; - } else if ("htc_video_rotA-180".equals(name)) { - rotationDegrees = 180; - } else if ("htc_video_rotA-270".equals(name)) { - rotationDegrees = 270; + + if (name != null && TRACK_NAME_TO_ROTATION_DEGREES.containsKey(name)) { + rotationDegrees = TRACK_NAME_TO_ROTATION_DEGREES.get(name); } if (projectionType == C.PROJECTION_RECTANGULAR && Float.compare(projectionPoseYaw, 0f) == 0 @@ -2114,61 +2301,51 @@ public void initializeOutput(ExtractorOutput output, int trackId) throws ParserE rotationDegrees = 270; } } - format = - Format.createVideoSampleFormat( - Integer.toString(trackId), - mimeType, - /* codecs= */ null, - /* bitrate= */ Format.NO_VALUE, - maxInputSize, - width, - height, - /* frameRate= */ Format.NO_VALUE, - initializationData, - rotationDegrees, - pixelWidthHeightRatio, - projectionData, - stereoMode, - colorInfo, - drmInitData); - } else if (MimeTypes.APPLICATION_SUBRIP.equals(mimeType)) { - type = C.TRACK_TYPE_TEXT; - format = Format.createTextSampleFormat(Integer.toString(trackId), mimeType, selectionFlags, - language, drmInitData); - } else if (MimeTypes.TEXT_SSA.equals(mimeType)) { - type = C.TRACK_TYPE_TEXT; - initializationData = new ArrayList<>(2); - initializationData.add(SSA_DIALOGUE_FORMAT); - initializationData.add(codecPrivate); - format = Format.createTextSampleFormat(Integer.toString(trackId), mimeType, null, - Format.NO_VALUE, selectionFlags, language, Format.NO_VALUE, drmInitData, - Format.OFFSET_SAMPLE_RELATIVE, initializationData); - } else if (MimeTypes.APPLICATION_VOBSUB.equals(mimeType) + formatBuilder + .setWidth(width) + .setHeight(height) + .setPixelWidthHeightRatio(pixelWidthHeightRatio) + .setRotationDegrees(rotationDegrees) + .setProjectionData(projectionData) + .setStereoMode(stereoMode) + .setColorInfo(colorInfo); + } else if (MimeTypes.APPLICATION_SUBRIP.equals(mimeType) + || MimeTypes.TEXT_SSA.equals(mimeType) + || MimeTypes.TEXT_VTT.equals(mimeType) + || MimeTypes.APPLICATION_VOBSUB.equals(mimeType) || MimeTypes.APPLICATION_PGS.equals(mimeType) || MimeTypes.APPLICATION_DVBSUBS.equals(mimeType)) { type = C.TRACK_TYPE_TEXT; - format = - Format.createImageSampleFormat( - Integer.toString(trackId), - mimeType, - null, - Format.NO_VALUE, - selectionFlags, - initializationData, - language, - drmInitData); } else { - throw new ParserException("Unexpected MIME type."); + throw ParserException.createForMalformedContainer( + "Unexpected MIME type.", /* cause= */ null); + } + + if (name != null && !TRACK_NAME_TO_ROTATION_DEGREES.containsKey(name)) { + formatBuilder.setLabel(name); } + Format format = + formatBuilder + .setId(trackId) + .setSampleMimeType(mimeType) + .setMaxInputSize(maxInputSize) + .setLanguage(language) + .setSelectionFlags(selectionFlags) + .setInitializationData(initializationData) + .setCodecs(codecs) + .setDrmInitData(drmInitData) + .build(); + this.output = output.track(number, type); this.output.format(format); } /** Forces any pending sample metadata to be flushed to the output. */ + @RequiresNonNull("output") public void outputPendingSampleMetadata() { if (trueHdSampleRechunker != null) { - trueHdSampleRechunker.outputPendingSampleMetadata(this); + trueHdSampleRechunker.outputPendingSampleMetadata(output, cryptoData); } } @@ -2179,25 +2356,44 @@ public void reset() { } } + /** + * Returns true if supplemental data will be attached to the samples. + * + * @param isBlockGroup Whether the samples are from a BlockGroup. + */ + private boolean samplesHaveSupplementalData(boolean isBlockGroup) { + if (CODEC_ID_OPUS.equals(codecId)) { + // At the end of a BlockGroup, a positive DiscardPadding value will be written out as + // supplemental data for Opus codec. Otherwise (i.e. DiscardPadding <= 0) supplemental data + // size will be 0. + return isBlockGroup; + } + return maxBlockAdditionId > 0; + } + /** Returns the HDR Static Info as defined in CTA-861.3. */ @Nullable private byte[] getHdrStaticInfo() { // Are all fields present. - if (primaryRChromaticityX == Format.NO_VALUE || primaryRChromaticityY == Format.NO_VALUE - || primaryGChromaticityX == Format.NO_VALUE || primaryGChromaticityY == Format.NO_VALUE - || primaryBChromaticityX == Format.NO_VALUE || primaryBChromaticityY == Format.NO_VALUE + if (primaryRChromaticityX == Format.NO_VALUE + || primaryRChromaticityY == Format.NO_VALUE + || primaryGChromaticityX == Format.NO_VALUE + || primaryGChromaticityY == Format.NO_VALUE + || primaryBChromaticityX == Format.NO_VALUE + || primaryBChromaticityY == Format.NO_VALUE || whitePointChromaticityX == Format.NO_VALUE - || whitePointChromaticityY == Format.NO_VALUE || maxMasteringLuminance == Format.NO_VALUE + || whitePointChromaticityY == Format.NO_VALUE + || maxMasteringLuminance == Format.NO_VALUE || minMasteringLuminance == Format.NO_VALUE) { return null; } byte[] hdrStaticInfoData = new byte[25]; ByteBuffer hdrStaticInfo = ByteBuffer.wrap(hdrStaticInfoData).order(ByteOrder.LITTLE_ENDIAN); - hdrStaticInfo.put((byte) 0); // Type. + hdrStaticInfo.put((byte) 0); // Type. hdrStaticInfo.putShort((short) ((primaryRChromaticityX * MAX_CHROMATICITY) + 0.5f)); hdrStaticInfo.putShort((short) ((primaryRChromaticityY * MAX_CHROMATICITY) + 0.5f)); - hdrStaticInfo.putShort((short) ((primaryGChromaticityX * MAX_CHROMATICITY) + 0.5f)); + hdrStaticInfo.putShort((short) ((primaryGChromaticityX * MAX_CHROMATICITY) + 0.5f)); hdrStaticInfo.putShort((short) ((primaryGChromaticityY * MAX_CHROMATICITY) + 0.5f)); hdrStaticInfo.putShort((short) ((primaryBChromaticityX * MAX_CHROMATICITY) + 0.5f)); hdrStaticInfo.putShort((short) ((primaryBChromaticityY * MAX_CHROMATICITY) + 0.5f)); @@ -2218,8 +2414,8 @@ private byte[] getHdrStaticInfo() { * is {@code null}. * @throws ParserException If the initialization data could not be built. */ - private static Pair> parseFourCcPrivate(ParsableByteArray buffer) - throws ParserException { + private static Pair> parseFourCcPrivate( + ParsableByteArray buffer) throws ParserException { try { buffer.skipBytes(16); // size(4), width(4), height(4), planes(2), bitcount(2). long compression = buffer.readLittleEndianUnsignedInt(); @@ -2231,7 +2427,7 @@ private static Pair> parseFourCcPrivate(ParsableByteArray b // Search for the initialization data from the end of the BITMAPINFOHEADER. The last 20 // bytes of which are: sizeImage(4), xPel/m (4), yPel/m (4), clrUsed(4), clrImportant(4). int startOffset = buffer.getPosition() + 20; - byte[] bufferData = buffer.data; + byte[] bufferData = buffer.getData(); for (int offset = startOffset; offset < bufferData.length - 4; offset++) { if (bufferData[offset] == 0x00 && bufferData[offset + 1] == 0x00 @@ -2242,10 +2438,12 @@ private static Pair> parseFourCcPrivate(ParsableByteArray b return new Pair<>(MimeTypes.VIDEO_VC1, Collections.singletonList(initializationData)); } } - throw new ParserException("Failed to find FourCC VC1 initialization data"); + throw ParserException.createForMalformedContainer( + "Failed to find FourCC VC1 initialization data", /* cause= */ null); } } catch (ArrayIndexOutOfBoundsException e) { - throw new ParserException("Error parsing FourCC private data"); + throw ParserException.createForMalformedContainer( + "Error parsing FourCC private data", /* cause= */ null); } Log.w(TAG, "Unknown FourCC. Setting mimeType to " + MimeTypes.VIDEO_UNKNOWN); @@ -2262,35 +2460,39 @@ private static List parseVorbisCodecPrivate(byte[] codecPrivate) throws ParserException { try { if (codecPrivate[0] != 0x02) { - throw new ParserException("Error parsing vorbis codec private"); + throw ParserException.createForMalformedContainer( + "Error parsing vorbis codec private", /* cause= */ null); } int offset = 1; int vorbisInfoLength = 0; - while (codecPrivate[offset] == (byte) 0xFF) { + while ((codecPrivate[offset] & 0xFF) == 0xFF) { vorbisInfoLength += 0xFF; offset++; } - vorbisInfoLength += codecPrivate[offset++]; + vorbisInfoLength += codecPrivate[offset++] & 0xFF; int vorbisSkipLength = 0; - while (codecPrivate[offset] == (byte) 0xFF) { + while ((codecPrivate[offset] & 0xFF) == 0xFF) { vorbisSkipLength += 0xFF; offset++; } - vorbisSkipLength += codecPrivate[offset++]; + vorbisSkipLength += codecPrivate[offset++] & 0xFF; if (codecPrivate[offset] != 0x01) { - throw new ParserException("Error parsing vorbis codec private"); + throw ParserException.createForMalformedContainer( + "Error parsing vorbis codec private", /* cause= */ null); } byte[] vorbisInfo = new byte[vorbisInfoLength]; System.arraycopy(codecPrivate, offset, vorbisInfo, 0, vorbisInfoLength); offset += vorbisInfoLength; if (codecPrivate[offset] != 0x03) { - throw new ParserException("Error parsing vorbis codec private"); + throw ParserException.createForMalformedContainer( + "Error parsing vorbis codec private", /* cause= */ null); } offset += vorbisSkipLength; if (codecPrivate[offset] != 0x05) { - throw new ParserException("Error parsing vorbis codec private"); + throw ParserException.createForMalformedContainer( + "Error parsing vorbis codec private", /* cause= */ null); } byte[] vorbisBooks = new byte[codecPrivate.length - offset]; System.arraycopy(codecPrivate, offset, vorbisBooks, 0, codecPrivate.length - offset); @@ -2299,7 +2501,8 @@ private static List parseVorbisCodecPrivate(byte[] codecPrivate) initializationData.add(vorbisBooks); return initializationData; } catch (ArrayIndexOutOfBoundsException e) { - throw new ParserException("Error parsing vorbis codec private"); + throw ParserException.createForMalformedContainer( + "Error parsing vorbis codec private", /* cause= */ null); } } @@ -2322,10 +2525,30 @@ private static boolean parseMsAcmCodecPrivate(ParsableByteArray buffer) throws P return false; } } catch (ArrayIndexOutOfBoundsException e) { - throw new ParserException("Error parsing MS/ACM codec private"); + throw ParserException.createForMalformedContainer( + "Error parsing MS/ACM codec private", /* cause= */ null); } } - } + /** + * Checks that the track has an output. + * + *

      It is unfortunately not possible to mark {@link MatroskaExtractor#tracks} as only + * containing tracks with output with the nullness checker. This method is used to check that + * fact at runtime. + */ + @EnsuresNonNull("output") + private void assertOutputInitialized() { + checkNotNull(output); + } + @EnsuresNonNull("codecPrivate") + private byte[] getCodecPrivate(String codecId) throws ParserException { + if (codecPrivate == null) { + throw ParserException.createForMalformedContainer( + "Missing CodecPrivate for codec " + codecId, /* cause= */ null); + } + return codecPrivate; + } + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mkv/Sniffer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mkv/Sniffer.java index 62c9404916..f36adbed66 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mkv/Sniffer.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mkv/Sniffer.java @@ -16,6 +16,7 @@ package com.google.android.exoplayer2.extractor.mkv; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.extractor.Extractor; import com.google.android.exoplayer2.extractor.ExtractorInput; import com.google.android.exoplayer2.util.ParsableByteArray; import java.io.IOException; @@ -26,10 +27,9 @@ */ /* package */ final class Sniffer { - /** - * The number of bytes to search for a valid header in {@link #sniff(ExtractorInput)}. - */ + /** The number of bytes to search for a valid header in {@link #sniff(ExtractorInput)}. */ private static final int SEARCH_LENGTH = 1024; + private static final int ID_EBML = 0x1A45DFA3; private final ParsableByteArray scratch; @@ -39,24 +39,25 @@ public Sniffer() { scratch = new ParsableByteArray(8); } - /** - * @see com.google.android.exoplayer2.extractor.Extractor#sniff(ExtractorInput) - */ - public boolean sniff(ExtractorInput input) throws IOException, InterruptedException { + /** See {@link Extractor#sniff(ExtractorInput)}. */ + public boolean sniff(ExtractorInput input) throws IOException { long inputLength = input.getLength(); - int bytesToSearch = (int) (inputLength == C.LENGTH_UNSET || inputLength > SEARCH_LENGTH - ? SEARCH_LENGTH : inputLength); + int bytesToSearch = + (int) + (inputLength == C.LENGTH_UNSET || inputLength > SEARCH_LENGTH + ? SEARCH_LENGTH + : inputLength); // Find four bytes equal to ID_EBML near the start of the input. - input.peekFully(scratch.data, 0, 4); + input.peekFully(scratch.getData(), 0, 4); long tag = scratch.readUnsignedInt(); peekLength = 4; while (tag != ID_EBML) { if (++peekLength == bytesToSearch) { return false; } - input.peekFully(scratch.data, 0, 1); + input.peekFully(scratch.getData(), 0, 1); tag = (tag << 8) & 0xFFFFFF00; - tag |= scratch.data[0] & 0xFF; + tag |= scratch.getData()[0] & 0xFF; } // Read the size of the EBML header and make sure it is within the stream. @@ -86,12 +87,10 @@ public boolean sniff(ExtractorInput input) throws IOException, InterruptedExcept return peekLength == headerStart + headerSize; } - /** - * Peeks a variable-length unsigned EBML integer from the input. - */ - private long readUint(ExtractorInput input) throws IOException, InterruptedException { - input.peekFully(scratch.data, 0, 1); - int value = scratch.data[0] & 0xFF; + /** Peeks a variable-length unsigned EBML integer from the input. */ + private long readUint(ExtractorInput input) throws IOException { + input.peekFully(scratch.getData(), 0, 1); + int value = scratch.getData()[0] & 0xFF; if (value == 0) { return Long.MIN_VALUE; } @@ -102,13 +101,12 @@ private long readUint(ExtractorInput input) throws IOException, InterruptedExcep length++; } value &= ~mask; - input.peekFully(scratch.data, 1, length); + input.peekFully(scratch.getData(), 1, length); for (int i = 0; i < length; i++) { value <<= 8; - value += scratch.data[i + 1] & 0xFF; + value += scratch.getData()[i + 1] & 0xFF; } peekLength += length + 1; return value; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mkv/VarintReader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mkv/VarintReader.java index a94a5ec216..a6ee6d9cc1 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mkv/VarintReader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mkv/VarintReader.java @@ -20,9 +20,7 @@ import java.io.EOFException; import java.io.IOException; -/** - * Reads EBML variable-length integers (varints) from an {@link ExtractorInput}. - */ +/** Reads EBML variable-length integers (varints) from an {@link ExtractorInput}. */ /* package */ final class VarintReader { private static final int STATE_BEGIN_READING = 0; @@ -34,9 +32,8 @@ * *

      {@code 0x80} is a one-byte integer, {@code 0x40} is two bytes, and so on up to eight bytes. */ - private static final long[] VARINT_LENGTH_MASKS = new long[] { - 0x80L, 0x40L, 0x20L, 0x10L, 0x08L, 0x04L, 0x02L, 0x01L - }; + private static final long[] VARINT_LENGTH_MASKS = + new long[] {0x80L, 0x40L, 0x20L, 0x10L, 0x08L, 0x04L, 0x02L, 0x01L}; private final byte[] scratch; @@ -47,24 +44,22 @@ public VarintReader() { scratch = new byte[8]; } - /** - * Resets the reader to start reading a new variable-length integer. - */ + /** Resets the reader to start reading a new variable-length integer. */ public void reset() { state = STATE_BEGIN_READING; length = 0; } /** - * Reads an EBML variable-length integer (varint) from an {@link ExtractorInput} such that - * reading can be resumed later if an error occurs having read only some of it. - *

      - * If an value is successfully read, then the reader will automatically reset itself ready to + * Reads an EBML variable-length integer (varint) from an {@link ExtractorInput} such that reading + * can be resumed later if an error occurs having read only some of it. + * + *

      If an value is successfully read, then the reader will automatically reset itself ready to * read another value. - *

      - * If an {@link IOException} or {@link InterruptedException} is throw, the read can be resumed - * later by calling this method again, passing an {@link ExtractorInput} providing data starting - * where the previous one left off. + * + *

      If an {@link IOException} is thrown, the read can be resumed later by calling this method + * again, passing an {@link ExtractorInput} providing data starting where the previous one left + * off. * * @param input The {@link ExtractorInput} from which the integer should be read. * @param allowEndOfInput True if encountering the end of the input having read no data is @@ -76,10 +71,13 @@ public void reset() { * and the end of the input was encountered, or {@link C#RESULT_MAX_LENGTH_EXCEEDED} if the * length of the varint exceeded maximumAllowedLength. * @throws IOException If an error occurs reading from the input. - * @throws InterruptedException If the thread is interrupted. */ - public long readUnsignedVarint(ExtractorInput input, boolean allowEndOfInput, - boolean removeLengthMask, int maximumAllowedLength) throws IOException, InterruptedException { + public long readUnsignedVarint( + ExtractorInput input, + boolean allowEndOfInput, + boolean removeLengthMask, + int maximumAllowedLength) + throws IOException { if (state == STATE_BEGIN_READING) { // Read the first byte to establish the length. if (!input.readFully(scratch, 0, 1, allowEndOfInput)) { @@ -107,9 +105,7 @@ public long readUnsignedVarint(ExtractorInput input, boolean allowEndOfInput, return assembleVarint(scratch, length, removeLengthMask); } - /** - * Returns the number of bytes occupied by the most recently parsed varint. - */ + /** Returns the number of bytes occupied by the most recently parsed varint. */ public int getLastLength() { return length; } @@ -118,8 +114,8 @@ public int getLastLength() { * Parses and the length of the varint given the first byte. * * @param firstByte First byte of the varint. - * @return Length of the varint beginning with the given byte if it was valid, - * {@link C#LENGTH_UNSET} otherwise. + * @return Length of the varint beginning with the given byte if it was valid, {@link + * C#LENGTH_UNSET} otherwise. */ public static int parseUnsignedVarintLength(int firstByte) { int varIntLength = C.LENGTH_UNSET; @@ -140,8 +136,8 @@ public static int parseUnsignedVarintLength(int firstByte) { * @param removeLengthMask Removes the variable-length integer length mask from the value. * @return Parsed and assembled varint. */ - public static long assembleVarint(byte[] varintBytes, int varintLength, - boolean removeLengthMask) { + public static long assembleVarint( + byte[] varintBytes, int varintLength, boolean removeLengthMask) { long varint = varintBytes[0] & 0xFFL; if (removeLengthMask) { varint &= ~VARINT_LENGTH_MASKS[varintLength - 1]; @@ -151,5 +147,4 @@ public static long assembleVarint(byte[] varintBytes, int varintLength, } return varint; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mkv/package-info.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mkv/package-info.java new file mode 100644 index 0000000000..15629ba584 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mkv/package-info.java @@ -0,0 +1,19 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +@NonNullApi +package com.google.android.exoplayer2.extractor.mkv; + +import com.google.android.exoplayer2.util.NonNullApi; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp3/ConstantBitrateSeeker.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp3/ConstantBitrateSeeker.java index 4a5feb5096..9762f2d0e5 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp3/ConstantBitrateSeeker.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp3/ConstantBitrateSeeker.java @@ -16,8 +16,8 @@ package com.google.android.exoplayer2.extractor.mp3; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.audio.MpegAudioUtil; import com.google.android.exoplayer2.extractor.ConstantBitrateSeekMap; -import com.google.android.exoplayer2.extractor.MpegAudioHeader; /** * MP3 seeker that doesn't rely on metadata and seeks assuming the source has a constant bitrate. @@ -28,10 +28,23 @@ * @param inputLength The length of the stream in bytes, or {@link C#LENGTH_UNSET} if unknown. * @param firstFramePosition The position of the first frame in the stream. * @param mpegAudioHeader The MPEG audio header associated with the first frame. + * @param allowSeeksIfLengthUnknown Whether to allow seeking even if the length of the content is + * unknown. */ public ConstantBitrateSeeker( - long inputLength, long firstFramePosition, MpegAudioHeader mpegAudioHeader) { - super(inputLength, firstFramePosition, mpegAudioHeader.bitrate, mpegAudioHeader.frameSize); + long inputLength, + long firstFramePosition, + MpegAudioUtil.Header mpegAudioHeader, + boolean allowSeeksIfLengthUnknown) { + // Set the seeker frame size to the size of the first frame (even though some constant bitrate + // streams have variable frame sizes) to avoid the need to re-synchronize for constant frame + // size streams. + super( + inputLength, + firstFramePosition, + mpegAudioHeader.bitrate, + mpegAudioHeader.frameSize, + allowSeeksIfLengthUnknown); } @Override diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp3/IndexSeeker.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp3/IndexSeeker.java new file mode 100644 index 0000000000..4b9d2e46e8 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp3/IndexSeeker.java @@ -0,0 +1,112 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.extractor.mp3; + +import androidx.annotation.VisibleForTesting; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.extractor.SeekPoint; +import com.google.android.exoplayer2.util.LongArray; +import com.google.android.exoplayer2.util.Util; + +/** MP3 seeker that builds a time-to-byte mapping as the stream is read. */ +/* package */ final class IndexSeeker implements Seeker { + + @VisibleForTesting + /* package */ static final long MIN_TIME_BETWEEN_POINTS_US = C.MICROS_PER_SECOND / 10; + + private final long dataEndPosition; + private final LongArray timesUs; + private final LongArray positions; + + private long durationUs; + + public IndexSeeker(long durationUs, long dataStartPosition, long dataEndPosition) { + this.durationUs = durationUs; + this.dataEndPosition = dataEndPosition; + timesUs = new LongArray(); + positions = new LongArray(); + timesUs.add(0L); + positions.add(dataStartPosition); + } + + @Override + public long getTimeUs(long position) { + int targetIndex = + Util.binarySearchFloor( + positions, position, /* inclusive= */ true, /* stayInBounds= */ true); + return timesUs.get(targetIndex); + } + + @Override + public long getDataEndPosition() { + return dataEndPosition; + } + + @Override + public boolean isSeekable() { + return true; + } + + @Override + public long getDurationUs() { + return durationUs; + } + + @Override + public SeekPoints getSeekPoints(long timeUs) { + int targetIndex = + Util.binarySearchFloor(timesUs, timeUs, /* inclusive= */ true, /* stayInBounds= */ true); + SeekPoint seekPoint = new SeekPoint(timesUs.get(targetIndex), positions.get(targetIndex)); + if (seekPoint.timeUs == timeUs || targetIndex == timesUs.size() - 1) { + return new SeekPoints(seekPoint); + } else { + SeekPoint nextSeekPoint = + new SeekPoint(timesUs.get(targetIndex + 1), positions.get(targetIndex + 1)); + return new SeekPoints(seekPoint, nextSeekPoint); + } + } + + /** + * Adds a seek point to the index if it is sufficiently distant from the other points. + * + *

      Seek points must be added in order. + * + * @param timeUs The time corresponding to the seek point to add in microseconds. + * @param position The position corresponding to the seek point to add in bytes. + */ + public void maybeAddSeekPoint(long timeUs, long position) { + if (isTimeUsInIndex(timeUs)) { + return; + } + timesUs.add(timeUs); + positions.add(position); + } + + /** + * Returns whether {@code timeUs} (in microseconds) is included in the index. + * + *

      A point is included in the index if it is equal to another point, between 2 points, or + * sufficiently close to the last point. + */ + public boolean isTimeUsInIndex(long timeUs) { + long lastIndexedTimeUs = timesUs.get(timesUs.size() - 1); + return timeUs - lastIndexedTimeUs < MIN_TIME_BETWEEN_POINTS_US; + } + + /* package */ void setDurationUs(long durationUs) { + this.durationUs = durationUs; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp3/MlltSeeker.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp3/MlltSeeker.java index 1b627483f0..2204cfccd7 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp3/MlltSeeker.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp3/MlltSeeker.java @@ -29,9 +29,11 @@ * * @param firstFramePosition The position of the start of the first frame in the stream. * @param mlltFrame The MLLT frame with seeking metadata. + * @param durationUs The stream duration in microseconds, or {@link C#TIME_UNSET} if it is + * unknown. * @return An {@link MlltSeeker} for seeking in the stream. */ - public static MlltSeeker create(long firstFramePosition, MlltFrame mlltFrame) { + public static MlltSeeker create(long firstFramePosition, MlltFrame mlltFrame, long durationUs) { int referenceCount = mlltFrame.bytesDeviations.length; long[] referencePositions = new long[1 + referenceCount]; long[] referenceTimesMs = new long[1 + referenceCount]; @@ -45,19 +47,22 @@ public static MlltSeeker create(long firstFramePosition, MlltFrame mlltFrame) { referencePositions[i] = position; referenceTimesMs[i] = timeMs; } - return new MlltSeeker(referencePositions, referenceTimesMs); + return new MlltSeeker(referencePositions, referenceTimesMs, durationUs); } private final long[] referencePositions; private final long[] referenceTimesMs; private final long durationUs; - private MlltSeeker(long[] referencePositions, long[] referenceTimesMs) { + private MlltSeeker(long[] referencePositions, long[] referenceTimesMs, long durationUs) { this.referencePositions = referencePositions; this.referenceTimesMs = referenceTimesMs; - // Use the last reference point as the duration, as extrapolating variable bitrate at the end of - // the stream may give a large error. - durationUs = C.msToUs(referenceTimesMs[referenceTimesMs.length - 1]); + // Use the last reference point as the duration if it is unknown, as extrapolating variable + // bitrate at the end of the stream may give a large error. + this.durationUs = + durationUs != C.TIME_UNSET + ? durationUs + : Util.msToUs(referenceTimesMs[referenceTimesMs.length - 1]); } @Override @@ -69,8 +74,8 @@ public boolean isSeekable() { public SeekPoints getSeekPoints(long timeUs) { timeUs = Util.constrainValue(timeUs, 0, durationUs); Pair timeMsAndPosition = - linearlyInterpolate(C.usToMs(timeUs), referenceTimesMs, referencePositions); - timeUs = C.msToUs(timeMsAndPosition.first); + linearlyInterpolate(Util.usToMs(timeUs), referenceTimesMs, referencePositions); + timeUs = Util.msToUs(timeMsAndPosition.first); long position = timeMsAndPosition.second; return new SeekPoints(new SeekPoint(timeUs, position)); } @@ -79,7 +84,7 @@ public SeekPoints getSeekPoints(long timeUs) { public long getTimeUs(long position) { Pair positionAndTimeMs = linearlyInterpolate(position, referencePositions, referenceTimesMs); - return C.msToUs(positionAndTimeMs.second); + return Util.msToUs(positionAndTimeMs.second); } @Override diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp3/Mp3Extractor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp3/Mp3Extractor.java index 7a25677c55..40bf68e152 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp3/Mp3Extractor.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp3/Mp3Extractor.java @@ -15,18 +15,23 @@ */ package com.google.android.exoplayer2.extractor.mp3; +import static java.lang.annotation.ElementType.TYPE_USE; + import androidx.annotation.IntDef; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.ParserException; +import com.google.android.exoplayer2.PlaybackException; +import com.google.android.exoplayer2.Player; +import com.google.android.exoplayer2.audio.MpegAudioUtil; +import com.google.android.exoplayer2.extractor.DummyTrackOutput; import com.google.android.exoplayer2.extractor.Extractor; import com.google.android.exoplayer2.extractor.ExtractorInput; import com.google.android.exoplayer2.extractor.ExtractorOutput; import com.google.android.exoplayer2.extractor.ExtractorsFactory; import com.google.android.exoplayer2.extractor.GaplessInfoHolder; import com.google.android.exoplayer2.extractor.Id3Peeker; -import com.google.android.exoplayer2.extractor.MpegAudioHeader; import com.google.android.exoplayer2.extractor.PositionHolder; import com.google.android.exoplayer2.extractor.TrackOutput; import com.google.android.exoplayer2.extractor.mp3.Seeker.UnseekableSeeker; @@ -34,16 +39,21 @@ import com.google.android.exoplayer2.metadata.id3.Id3Decoder; import com.google.android.exoplayer2.metadata.id3.Id3Decoder.FramePredicate; import com.google.android.exoplayer2.metadata.id3.MlltFrame; +import com.google.android.exoplayer2.metadata.id3.TextInformationFrame; +import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.ParsableByteArray; +import com.google.android.exoplayer2.util.Util; import java.io.EOFException; import java.io.IOException; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.checkerframework.checker.nullness.qual.EnsuresNonNull; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; +import org.checkerframework.checker.nullness.qual.RequiresNonNull; -/** - * Extracts data from the MP3 container format. - */ +/** Extracts data from the MP3 container format. */ public final class Mp3Extractor implements Extractor { /** Factory for {@link Mp3Extractor} instances. */ @@ -51,24 +61,61 @@ public final class Mp3Extractor implements Extractor { /** * Flags controlling the behavior of the extractor. Possible flag values are {@link - * #FLAG_ENABLE_CONSTANT_BITRATE_SEEKING} and {@link #FLAG_DISABLE_ID3_METADATA}. + * #FLAG_ENABLE_CONSTANT_BITRATE_SEEKING}, {@link #FLAG_ENABLE_CONSTANT_BITRATE_SEEKING_ALWAYS}, + * {@link #FLAG_ENABLE_INDEX_SEEKING} and {@link #FLAG_DISABLE_ID3_METADATA}. */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef( flag = true, - value = {FLAG_ENABLE_CONSTANT_BITRATE_SEEKING, FLAG_DISABLE_ID3_METADATA}) + value = { + FLAG_ENABLE_CONSTANT_BITRATE_SEEKING, + FLAG_ENABLE_CONSTANT_BITRATE_SEEKING_ALWAYS, + FLAG_ENABLE_INDEX_SEEKING, + FLAG_DISABLE_ID3_METADATA + }) public @interface Flags {} /** * Flag to force enable seeking using a constant bitrate assumption in cases where seeking would * otherwise not be possible. + * + *

      This flag is ignored if {@link #FLAG_ENABLE_INDEX_SEEKING} is set. */ public static final int FLAG_ENABLE_CONSTANT_BITRATE_SEEKING = 1; + /** + * Like {@link #FLAG_ENABLE_CONSTANT_BITRATE_SEEKING}, except that seeking is also enabled in + * cases where the content length (and hence the duration of the media) is unknown. Application + * code should ensure that requested seek positions are valid when using this flag, or be ready to + * handle playback failures reported through {@link Player.Listener#onPlayerError} with {@link + * PlaybackException#errorCode} set to {@link + * PlaybackException#ERROR_CODE_IO_READ_POSITION_OUT_OF_RANGE}. + * + *

      If this flag is set, then the behavior enabled by {@link + * #FLAG_ENABLE_CONSTANT_BITRATE_SEEKING} is implicitly enabled. + * + *

      This flag is ignored if {@link #FLAG_ENABLE_INDEX_SEEKING} is set. + */ + public static final int FLAG_ENABLE_CONSTANT_BITRATE_SEEKING_ALWAYS = 1 << 1; + + /** + * Flag to force index seeking, in which a time-to-byte mapping is built as the file is read. + * + *

      This seeker may require to scan a significant portion of the file to compute a seek point. + * Therefore, it should only be used if one of the following is true: + * + *

        + *
      • The file is small. + *
      • The bitrate is variable (or it's unknown whether it's variable) and the file does not + * provide precise enough seeking metadata. + *
      + */ + public static final int FLAG_ENABLE_INDEX_SEEKING = 1 << 2; /** * Flag to disable parsing of ID3 metadata. Can be set to save memory if ID3 metadata is not * required. */ - public static final int FLAG_DISABLE_ID3_METADATA = 2; + public static final int FLAG_DISABLE_ID3_METADATA = 1 << 3; /** Predicate that matches ID3 frames containing only required gapless/seeking metadata. */ private static final FramePredicate REQUIRED_ID3_FRAME_PREDICATE = @@ -76,22 +123,16 @@ public final class Mp3Extractor implements Extractor { ((id0 == 'C' && id1 == 'O' && id2 == 'M' && (id3 == 'M' || majorVersion == 2)) || (id0 == 'M' && id1 == 'L' && id2 == 'L' && (id3 == 'T' || majorVersion == 2))); - /** - * The maximum number of bytes to search when synchronizing, before giving up. - */ + /** The maximum number of bytes to search when synchronizing, before giving up. */ private static final int MAX_SYNC_BYTES = 128 * 1024; /** * The maximum number of bytes to peek when sniffing, excluding the ID3 header, before giving up. */ - private static final int MAX_SNIFF_BYTES = 16 * 1024; - /** - * Maximum length of data read into {@link #scratch}. - */ + private static final int MAX_SNIFF_BYTES = 32 * 1024; + /** Maximum length of data read into {@link #scratch}. */ private static final int SCRATCH_LENGTH = 10; - /** - * Mask that includes the audio header values that must match between frames. - */ + /** Mask that includes the audio header values that must match between frames. */ private static final int MPEG_AUDIO_HEADER_MASK = 0xFFFE0C00; private static final int SEEK_HEADER_XING = 0x58696e67; @@ -99,27 +140,31 @@ public final class Mp3Extractor implements Extractor { private static final int SEEK_HEADER_VBRI = 0x56425249; private static final int SEEK_HEADER_UNSET = 0; - @Flags private final int flags; + private final @Flags int flags; private final long forcedFirstSampleTimestampUs; private final ParsableByteArray scratch; - private final MpegAudioHeader synchronizedHeader; + private final MpegAudioUtil.Header synchronizedHeader; private final GaplessInfoHolder gaplessInfoHolder; private final Id3Peeker id3Peeker; + private final TrackOutput skippingTrackOutput; - // Extractor outputs. - private ExtractorOutput extractorOutput; - private TrackOutput trackOutput; + private @MonotonicNonNull ExtractorOutput extractorOutput; + private @MonotonicNonNull TrackOutput realTrackOutput; + private TrackOutput currentTrackOutput; // skippingTrackOutput or realTrackOutput. private int synchronizedHeaderData; - private Metadata metadata; - @Nullable private Seeker seeker; - private boolean disableSeeking; + @Nullable private Metadata metadata; private long basisTimeUs; private long samplesRead; private long firstSamplePosition; private int sampleBytesRemaining; + private @MonotonicNonNull Seeker seeker; + private boolean disableSeeking; + private boolean isSeekInProgress; + private long seekTimeUs; + public Mp3Extractor() { this(0); } @@ -133,30 +178,36 @@ public Mp3Extractor(@Flags int flags) { /** * @param flags Flags that control the extractor's behavior. - * @param forcedFirstSampleTimestampUs A timestamp to force for the first sample, or - * {@link C#TIME_UNSET} if forcing is not required. + * @param forcedFirstSampleTimestampUs A timestamp to force for the first sample, or {@link + * C#TIME_UNSET} if forcing is not required. */ public Mp3Extractor(@Flags int flags, long forcedFirstSampleTimestampUs) { + if ((flags & FLAG_ENABLE_CONSTANT_BITRATE_SEEKING_ALWAYS) != 0) { + flags |= FLAG_ENABLE_CONSTANT_BITRATE_SEEKING; + } this.flags = flags; this.forcedFirstSampleTimestampUs = forcedFirstSampleTimestampUs; scratch = new ParsableByteArray(SCRATCH_LENGTH); - synchronizedHeader = new MpegAudioHeader(); + synchronizedHeader = new MpegAudioUtil.Header(); gaplessInfoHolder = new GaplessInfoHolder(); basisTimeUs = C.TIME_UNSET; id3Peeker = new Id3Peeker(); + skippingTrackOutput = new DummyTrackOutput(); + currentTrackOutput = skippingTrackOutput; } // Extractor implementation. @Override - public boolean sniff(ExtractorInput input) throws IOException, InterruptedException { + public boolean sniff(ExtractorInput input) throws IOException { return synchronize(input, true); } @Override public void init(ExtractorOutput output) { extractorOutput = output; - trackOutput = extractorOutput.track(0, C.TRACK_TYPE_AUDIO); + realTrackOutput = extractorOutput.track(0, C.TRACK_TYPE_AUDIO); + currentTrackOutput = realTrackOutput; extractorOutput.endTracks(); } @@ -166,6 +217,11 @@ public void seek(long position, long timeUs) { basisTimeUs = C.TIME_UNSET; samplesRead = 0; sampleBytesRemaining = 0; + seekTimeUs = timeUs; + if (seeker instanceof IndexSeeker && !((IndexSeeker) seeker).isTimeUsInIndex(timeUs)) { + isSeekInProgress = true; + currentTrackOutput = skippingTrackOutput; + } } @Override @@ -174,8 +230,33 @@ public void release() { } @Override - public int read(ExtractorInput input, PositionHolder seekPosition) - throws IOException, InterruptedException { + public int read(ExtractorInput input, PositionHolder seekPosition) throws IOException { + assertInitialized(); + int readResult = readInternal(input); + if (readResult == RESULT_END_OF_INPUT && seeker instanceof IndexSeeker) { + // Duration is exact when index seeker is used. + long durationUs = computeTimeUs(samplesRead); + if (seeker.getDurationUs() != durationUs) { + ((IndexSeeker) seeker).setDurationUs(durationUs); + extractorOutput.seekMap(seeker); + } + } + return readResult; + } + + /** + * Disables the extractor from being able to seek through the media. + * + *

      Please note that this needs to be called before {@link #read}. + */ + public void disableSeeking() { + disableSeeking = true; + } + + // Internal methods. + + @RequiresNonNull({"extractorOutput", "realTrackOutput"}) + private int readInternal(ExtractorInput input) throws IOException { if (synchronizedHeaderData == 0) { try { synchronize(input, false); @@ -184,42 +265,18 @@ public int read(ExtractorInput input, PositionHolder seekPosition) } } if (seeker == null) { - // Read past any seek frame and set the seeker based on metadata or a seek frame. Metadata - // takes priority as it can provide greater precision. - Seeker seekFrameSeeker = maybeReadSeekFrame(input); - Seeker metadataSeeker = maybeHandleSeekMetadata(metadata, input.getPosition()); - - if (disableSeeking) { - seeker = new UnseekableSeeker(); - } else { - if (metadataSeeker != null) { - seeker = metadataSeeker; - } else if (seekFrameSeeker != null) { - seeker = seekFrameSeeker; - } - if (seeker == null - || (!seeker.isSeekable() && (flags & FLAG_ENABLE_CONSTANT_BITRATE_SEEKING) != 0)) { - seeker = getConstantBitrateSeeker(input); - } - } + seeker = computeSeeker(input); extractorOutput.seekMap(seeker); - trackOutput.format( - Format.createAudioSampleFormat( - /* id= */ null, - synchronizedHeader.mimeType, - /* codecs= */ null, - /* bitrate= */ Format.NO_VALUE, - MpegAudioHeader.MAX_FRAME_SIZE_BYTES, - synchronizedHeader.channels, - synchronizedHeader.sampleRate, - /* pcmEncoding= */ Format.NO_VALUE, - gaplessInfoHolder.encoderDelay, - gaplessInfoHolder.encoderPadding, - /* initializationData= */ null, - /* drmInitData= */ null, - /* selectionFlags= */ 0, - /* language= */ null, - (flags & FLAG_DISABLE_ID3_METADATA) != 0 ? null : metadata)); + currentTrackOutput.format( + new Format.Builder() + .setSampleMimeType(synchronizedHeader.mimeType) + .setMaxInputSize(MpegAudioUtil.MAX_FRAME_SIZE_BYTES) + .setChannelCount(synchronizedHeader.channels) + .setSampleRate(synchronizedHeader.sampleRate) + .setEncoderDelay(gaplessInfoHolder.encoderDelay) + .setEncoderPadding(gaplessInfoHolder.encoderPadding) + .setMetadata((flags & FLAG_DISABLE_ID3_METADATA) != 0 ? null : metadata) + .build()); firstSamplePosition = input.getPosition(); } else if (firstSamplePosition != 0) { long inputPosition = input.getPosition(); @@ -231,18 +288,8 @@ public int read(ExtractorInput input, PositionHolder seekPosition) return readSample(input); } - /** - * Disables the extractor from being able to seek through the media. - * - *

      Please note that this needs to be called before {@link #read}. - */ - public void disableSeeking() { - disableSeeking = true; - } - - // Internal methods. - - private int readSample(ExtractorInput extractorInput) throws IOException, InterruptedException { + @RequiresNonNull({"realTrackOutput", "seeker"}) + private int readSample(ExtractorInput extractorInput) throws IOException { if (sampleBytesRemaining == 0) { extractorInput.resetPeekPosition(); if (peekEndOfStreamOrHeader(extractorInput)) { @@ -251,13 +298,13 @@ private int readSample(ExtractorInput extractorInput) throws IOException, Interr scratch.setPosition(0); int sampleHeaderData = scratch.readInt(); if (!headersMatch(sampleHeaderData, synchronizedHeaderData) - || MpegAudioHeader.getFrameSize(sampleHeaderData) == C.LENGTH_UNSET) { + || MpegAudioUtil.getFrameSize(sampleHeaderData) == C.LENGTH_UNSET) { // We have lost synchronization, so attempt to resynchronize starting at the next byte. extractorInput.skipFully(1); synchronizedHeaderData = 0; return RESULT_CONTINUE; } - MpegAudioHeader.populateHeader(sampleHeaderData, synchronizedHeader); + synchronizedHeader.setForHeaderData(sampleHeaderData); if (basisTimeUs == C.TIME_UNSET) { basisTimeUs = seeker.getTimeUs(extractorInput.getPosition()); if (forcedFirstSampleTimestampUs != C.TIME_UNSET) { @@ -266,8 +313,20 @@ private int readSample(ExtractorInput extractorInput) throws IOException, Interr } } sampleBytesRemaining = synchronizedHeader.frameSize; + if (seeker instanceof IndexSeeker) { + IndexSeeker indexSeeker = (IndexSeeker) seeker; + // Add seek point corresponding to the next frame instead of the current one to be able to + // start writing to the realTrackOutput on time when a seek is in progress. + indexSeeker.maybeAddSeekPoint( + computeTimeUs(samplesRead + synchronizedHeader.samplesPerFrame), + extractorInput.getPosition() + synchronizedHeader.frameSize); + if (isSeekInProgress && indexSeeker.isTimeUsInIndex(seekTimeUs)) { + isSeekInProgress = false; + currentTrackOutput = realTrackOutput; + } + } } - int bytesAppended = trackOutput.sampleData(extractorInput, sampleBytesRemaining, true); + int bytesAppended = currentTrackOutput.sampleData(extractorInput, sampleBytesRemaining, true); if (bytesAppended == C.RESULT_END_OF_INPUT) { return RESULT_END_OF_INPUT; } @@ -275,16 +334,18 @@ private int readSample(ExtractorInput extractorInput) throws IOException, Interr if (sampleBytesRemaining > 0) { return RESULT_CONTINUE; } - long timeUs = basisTimeUs + (samplesRead * C.MICROS_PER_SECOND / synchronizedHeader.sampleRate); - trackOutput.sampleMetadata(timeUs, C.BUFFER_FLAG_KEY_FRAME, synchronizedHeader.frameSize, 0, - null); + currentTrackOutput.sampleMetadata( + computeTimeUs(samplesRead), C.BUFFER_FLAG_KEY_FRAME, synchronizedHeader.frameSize, 0, null); samplesRead += synchronizedHeader.samplesPerFrame; sampleBytesRemaining = 0; return RESULT_CONTINUE; } - private boolean synchronize(ExtractorInput input, boolean sniffing) - throws IOException, InterruptedException { + private long computeTimeUs(long samplesRead) { + return basisTimeUs + samplesRead * C.MICROS_PER_SECOND / synchronizedHeader.sampleRate; + } + + private boolean synchronize(ExtractorInput input, boolean sniffing) throws IOException { int validFrameCount = 0; int candidateSynchronizedHeaderData = 0; int peekedId3Bytes = 0; @@ -318,12 +379,13 @@ private boolean synchronize(ExtractorInput input, boolean sniffing) int headerData = scratch.readInt(); int frameSize; if ((candidateSynchronizedHeaderData != 0 - && !headersMatch(headerData, candidateSynchronizedHeaderData)) - || (frameSize = MpegAudioHeader.getFrameSize(headerData)) == C.LENGTH_UNSET) { + && !headersMatch(headerData, candidateSynchronizedHeaderData)) + || (frameSize = MpegAudioUtil.getFrameSize(headerData)) == C.LENGTH_UNSET) { // The header doesn't match the candidate header or is invalid. Try the next byte offset. if (searchedBytes++ == searchLimitBytes) { if (!sniffing) { - throw new ParserException("Searched too many bytes."); + throw ParserException.createForMalformedContainer( + "Searched too many bytes.", /* cause= */ null); } return false; } @@ -339,7 +401,7 @@ private boolean synchronize(ExtractorInput input, boolean sniffing) // The header matches the candidate header and/or is valid. validFrameCount++; if (validFrameCount == 1) { - MpegAudioHeader.populateHeader(headerData, synchronizedHeader); + synchronizedHeader.setForHeaderData(headerData); candidateSynchronizedHeaderData = headerData; } else if (validFrameCount == 4) { break; @@ -361,8 +423,7 @@ private boolean synchronize(ExtractorInput input, boolean sniffing) * Returns whether the extractor input is peeking the end of the stream. If {@code false}, * populates the scratch buffer with the next four bytes. */ - private boolean peekEndOfStreamOrHeader(ExtractorInput extractorInput) - throws IOException, InterruptedException { + private boolean peekEndOfStreamOrHeader(ExtractorInput extractorInput) throws IOException { if (seeker != null) { long dataEndPosition = seeker.getDataEndPosition(); if (dataEndPosition != C.POSITION_UNSET @@ -372,12 +433,54 @@ private boolean peekEndOfStreamOrHeader(ExtractorInput extractorInput) } try { return !extractorInput.peekFully( - scratch.data, /* offset= */ 0, /* length= */ 4, /* allowEndOfInput= */ true); + scratch.getData(), /* offset= */ 0, /* length= */ 4, /* allowEndOfInput= */ true); } catch (EOFException e) { return true; } } + private Seeker computeSeeker(ExtractorInput input) throws IOException { + // Read past any seek frame and set the seeker based on metadata or a seek frame. Metadata + // takes priority as it can provide greater precision. + Seeker seekFrameSeeker = maybeReadSeekFrame(input); + Seeker metadataSeeker = maybeHandleSeekMetadata(metadata, input.getPosition()); + + if (disableSeeking) { + return new UnseekableSeeker(); + } + + @Nullable Seeker resultSeeker = null; + if ((flags & FLAG_ENABLE_INDEX_SEEKING) != 0) { + long durationUs; + long dataEndPosition = C.POSITION_UNSET; + if (metadataSeeker != null) { + durationUs = metadataSeeker.getDurationUs(); + dataEndPosition = metadataSeeker.getDataEndPosition(); + } else if (seekFrameSeeker != null) { + durationUs = seekFrameSeeker.getDurationUs(); + dataEndPosition = seekFrameSeeker.getDataEndPosition(); + } else { + durationUs = getId3TlenUs(metadata); + } + resultSeeker = + new IndexSeeker( + durationUs, /* dataStartPosition= */ input.getPosition(), dataEndPosition); + } else if (metadataSeeker != null) { + resultSeeker = metadataSeeker; + } else if (seekFrameSeeker != null) { + resultSeeker = seekFrameSeeker; + } + + if (resultSeeker == null + || (!resultSeeker.isSeekable() && (flags & FLAG_ENABLE_CONSTANT_BITRATE_SEEKING) != 0)) { + resultSeeker = + getConstantBitrateSeeker( + input, (flags & FLAG_ENABLE_CONSTANT_BITRATE_SEEKING_ALWAYS) != 0); + } + + return resultSeeker; + } + /** * Consumes the next frame from the {@code input} if it contains VBRI or Xing seeking metadata, * returning a {@link Seeker} if the metadata was present and valid, or {@code null} otherwise. @@ -387,31 +490,31 @@ private boolean peekEndOfStreamOrHeader(ExtractorInput extractorInput) * @return A {@link Seeker} if seeking metadata was present and valid, or {@code null} otherwise. * @throws IOException Thrown if there was an error reading from the stream. Not expected if the * next two frames were already peeked during synchronization. - * @throws InterruptedException Thrown if reading from the stream was interrupted. Not expected if - * the next two frames were already peeked during synchronization. */ - private Seeker maybeReadSeekFrame(ExtractorInput input) throws IOException, InterruptedException { + @Nullable + private Seeker maybeReadSeekFrame(ExtractorInput input) throws IOException { ParsableByteArray frame = new ParsableByteArray(synchronizedHeader.frameSize); - input.peekFully(frame.data, 0, synchronizedHeader.frameSize); - int xingBase = (synchronizedHeader.version & 1) != 0 - ? (synchronizedHeader.channels != 1 ? 36 : 21) // MPEG 1 - : (synchronizedHeader.channels != 1 ? 21 : 13); // MPEG 2 or 2.5 + input.peekFully(frame.getData(), 0, synchronizedHeader.frameSize); + int xingBase = + (synchronizedHeader.version & 1) != 0 + ? (synchronizedHeader.channels != 1 ? 36 : 21) // MPEG 1 + : (synchronizedHeader.channels != 1 ? 21 : 13); // MPEG 2 or 2.5 int seekHeader = getSeekFrameHeader(frame, xingBase); - Seeker seeker; + @Nullable Seeker seeker; if (seekHeader == SEEK_HEADER_XING || seekHeader == SEEK_HEADER_INFO) { seeker = XingSeeker.create(input.getLength(), input.getPosition(), synchronizedHeader, frame); if (seeker != null && !gaplessInfoHolder.hasGaplessInfo()) { // If there is a Xing header, read gapless playback metadata at a fixed offset. input.resetPeekPosition(); input.advancePeekPosition(xingBase + 141); - input.peekFully(scratch.data, 0, 3); + input.peekFully(scratch.getData(), 0, 3); scratch.setPosition(0); gaplessInfoHolder.setFromXingHeaderValue(scratch.readUnsignedInt24()); } input.skipFully(synchronizedHeader.frameSize); if (seeker != null && !seeker.isSeekable() && seekHeader == SEEK_HEADER_INFO) { // Fall back to constant bitrate seeking for Info headers missing a table of contents. - return getConstantBitrateSeeker(input); + return getConstantBitrateSeeker(input, /* allowSeeksIfLengthUnknown= */ false); } } else if (seekHeader == SEEK_HEADER_VBRI) { seeker = VbriSeeker.create(input.getLength(), input.getPosition(), synchronizedHeader, frame); @@ -424,20 +527,23 @@ private Seeker maybeReadSeekFrame(ExtractorInput input) throws IOException, Inte return seeker; } - /** - * Peeks the next frame and returns a {@link ConstantBitrateSeeker} based on its bitrate. - */ - private Seeker getConstantBitrateSeeker(ExtractorInput input) - throws IOException, InterruptedException { - input.peekFully(scratch.data, 0, 4); + /** Peeks the next frame and returns a {@link ConstantBitrateSeeker} based on its bitrate. */ + private Seeker getConstantBitrateSeeker(ExtractorInput input, boolean allowSeeksIfLengthUnknown) + throws IOException { + input.peekFully(scratch.getData(), 0, 4); scratch.setPosition(0); - MpegAudioHeader.populateHeader(scratch.readInt(), synchronizedHeader); - return new ConstantBitrateSeeker(input.getLength(), input.getPosition(), synchronizedHeader); + synchronizedHeader.setForHeaderData(scratch.readInt()); + return new ConstantBitrateSeeker( + input.getLength(), input.getPosition(), synchronizedHeader, allowSeeksIfLengthUnknown); } - /** - * Returns whether the headers match in those bits masked by {@link #MPEG_AUDIO_HEADER_MASK}. - */ + @EnsuresNonNull({"extractorOutput", "realTrackOutput"}) + private void assertInitialized() { + Assertions.checkStateNotNull(realTrackOutput); + Util.castNonNull(extractorOutput); + } + + /** Returns whether the headers match in those bits masked by {@link #MPEG_AUDIO_HEADER_MASK}. */ private static boolean headersMatch(int headerA, long headerB) { return (headerA & MPEG_AUDIO_HEADER_MASK) == (headerB & MPEG_AUDIO_HEADER_MASK); } @@ -465,18 +571,31 @@ private static int getSeekFrameHeader(ParsableByteArray frame, int xingBase) { } @Nullable - private static MlltSeeker maybeHandleSeekMetadata(Metadata metadata, long firstFramePosition) { + private static MlltSeeker maybeHandleSeekMetadata( + @Nullable Metadata metadata, long firstFramePosition) { if (metadata != null) { int length = metadata.length(); for (int i = 0; i < length; i++) { Metadata.Entry entry = metadata.get(i); if (entry instanceof MlltFrame) { - return MlltSeeker.create(firstFramePosition, (MlltFrame) entry); + return MlltSeeker.create(firstFramePosition, (MlltFrame) entry, getId3TlenUs(metadata)); } } } return null; } - + private static long getId3TlenUs(@Nullable Metadata metadata) { + if (metadata != null) { + int length = metadata.length(); + for (int i = 0; i < length; i++) { + Metadata.Entry entry = metadata.get(i); + if (entry instanceof TextInformationFrame + && ((TextInformationFrame) entry).id.equals("TLEN")) { + return Util.msToUs(Long.parseLong(((TextInformationFrame) entry).values.get(0))); + } + } + } + return C.TIME_UNSET; + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp3/VbriSeeker.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp3/VbriSeeker.java index 86551319e1..0ee5fc94e2 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp3/VbriSeeker.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp3/VbriSeeker.java @@ -15,9 +15,11 @@ */ package com.google.android.exoplayer2.extractor.mp3; +import static java.lang.Math.max; + import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; -import com.google.android.exoplayer2.extractor.MpegAudioHeader; +import com.google.android.exoplayer2.audio.MpegAudioUtil; import com.google.android.exoplayer2.extractor.SeekPoint; import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.ParsableByteArray; @@ -41,16 +43,21 @@ * @return A {@link VbriSeeker} for seeking in the stream, or {@code null} if the required * information is not present. */ - public static @Nullable VbriSeeker create( - long inputLength, long position, MpegAudioHeader mpegAudioHeader, ParsableByteArray frame) { + @Nullable + public static VbriSeeker create( + long inputLength, + long position, + MpegAudioUtil.Header mpegAudioHeader, + ParsableByteArray frame) { frame.skipBytes(10); int numFrames = frame.readInt(); if (numFrames <= 0) { return null; } int sampleRate = mpegAudioHeader.sampleRate; - long durationUs = Util.scaleLargeTimestamp(numFrames, - C.MICROS_PER_SECOND * (sampleRate >= 32000 ? 1152 : 576), sampleRate); + long durationUs = + Util.scaleLargeTimestamp( + numFrames, C.MICROS_PER_SECOND * (sampleRate >= 32000 ? 1152 : 576), sampleRate); int entryCount = frame.readUnsignedShort(); int scale = frame.readUnsignedShort(); int entrySize = frame.readUnsignedShort(); @@ -64,7 +71,7 @@ timesUs[index] = (index * durationUs) / entryCount; // Ensure positions do not fall within the frame containing the VBRI header. This constraint // will normally only apply to the first entry in the table. - positions[index] = Math.max(position, minPosition); + positions[index] = max(position, minPosition); int segmentSize; switch (entrySize) { case 1: @@ -82,7 +89,7 @@ default: return null; } - position += segmentSize * scale; + position += segmentSize * ((long) scale); } if (inputLength != C.LENGTH_UNSET && inputLength != position) { Log.w(TAG, "VBRI data size mismatch: " + inputLength + ", " + position); diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp3/XingSeeker.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp3/XingSeeker.java index c51b68a7c6..51950e6282 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp3/XingSeeker.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp3/XingSeeker.java @@ -17,7 +17,7 @@ import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; -import com.google.android.exoplayer2.extractor.MpegAudioHeader; +import com.google.android.exoplayer2.audio.MpegAudioUtil; import com.google.android.exoplayer2.extractor.SeekPoint; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Log; @@ -42,8 +42,12 @@ * @return A {@link XingSeeker} for seeking in the stream, or {@code null} if the required * information is not present. */ - public static @Nullable XingSeeker create( - long inputLength, long position, MpegAudioHeader mpegAudioHeader, ParsableByteArray frame) { + @Nullable + public static XingSeeker create( + long inputLength, + long position, + MpegAudioUtil.Header mpegAudioHeader, + ParsableByteArray frame) { int samplesPerFrame = mpegAudioHeader.samplesPerFrame; int sampleRate = mpegAudioHeader.sampleRate; @@ -53,8 +57,8 @@ // If the frame count is missing/invalid, the header can't be used to determine the duration. return null; } - long durationUs = Util.scaleLargeTimestamp(frameCount, samplesPerFrame * C.MICROS_PER_SECOND, - sampleRate); + long durationUs = + Util.scaleLargeTimestamp(frameCount, samplesPerFrame * C.MICROS_PER_SECOND, sampleRate); if ((flags & 0x06) != 0x06) { // If the size in bytes or table of contents is missing, the stream is not seekable. return new XingSeeker(position, mpegAudioHeader.frameSize, durationUs); @@ -132,13 +136,13 @@ public SeekPoints getSeekPoints(long timeUs) { scaledPosition = 256; } else { int prevTableIndex = (int) percent; - long[] tableOfContents = Assertions.checkNotNull(this.tableOfContents); + long[] tableOfContents = Assertions.checkStateNotNull(this.tableOfContents); double prevScaledPosition = tableOfContents[prevTableIndex]; double nextScaledPosition = prevTableIndex == 99 ? 256 : tableOfContents[prevTableIndex + 1]; // Linearly interpolate between the two scaled positions. double interpolateFraction = percent - prevTableIndex; - scaledPosition = prevScaledPosition - + (interpolateFraction * (nextScaledPosition - prevScaledPosition)); + scaledPosition = + prevScaledPosition + (interpolateFraction * (nextScaledPosition - prevScaledPosition)); } long positionOffset = Math.round((scaledPosition / 256) * dataSize); // Ensure returned positions skip the frame containing the XING header. @@ -152,7 +156,7 @@ public long getTimeUs(long position) { if (!isSeekable() || positionOffset <= xingFrameSize) { return 0L; } - long[] tableOfContents = Assertions.checkNotNull(this.tableOfContents); + long[] tableOfContents = Assertions.checkStateNotNull(this.tableOfContents); double scaledPosition = (positionOffset * 256d) / dataSize; int prevTableIndex = Util.binarySearchFloor(tableOfContents, (long) scaledPosition, true, true); long prevTimeUs = getTimeUsForTableIndex(prevTableIndex); @@ -160,8 +164,10 @@ public long getTimeUs(long position) { long nextTimeUs = getTimeUsForTableIndex(prevTableIndex + 1); long nextScaledPosition = prevTableIndex == 99 ? 256 : tableOfContents[prevTableIndex + 1]; // Linearly interpolate between the two table entries. - double interpolateFraction = prevScaledPosition == nextScaledPosition ? 0 - : ((scaledPosition - prevScaledPosition) / (nextScaledPosition - prevScaledPosition)); + double interpolateFraction = + prevScaledPosition == nextScaledPosition + ? 0 + : ((scaledPosition - prevScaledPosition) / (nextScaledPosition - prevScaledPosition)); return prevTimeUs + Math.round(interpolateFraction * (nextTimeUs - prevTimeUs)); } @@ -184,5 +190,4 @@ public long getDataEndPosition() { private long getTimeUsForTableIndex(int tableIndex) { return (durationUs * tableIndex) / 100; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp3/package-info.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp3/package-info.java new file mode 100644 index 0000000000..3483b26b47 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp3/package-info.java @@ -0,0 +1,19 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +@NonNullApi +package com.google.android.exoplayer2.extractor.mp3; + +import com.google.android.exoplayer2.util.NonNullApi; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/Atom.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/Atom.java index e86a873ed5..9c5de24b70 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/Atom.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/Atom.java @@ -24,29 +24,19 @@ @SuppressWarnings("ConstantField") /* package */ abstract class Atom { - /** - * Size of an atom header, in bytes. - */ + /** Size of an atom header, in bytes. */ public static final int HEADER_SIZE = 8; - /** - * Size of a full atom header, in bytes. - */ + /** Size of a full atom header, in bytes. */ public static final int FULL_HEADER_SIZE = 12; - /** - * Size of a long atom header, in bytes. - */ + /** Size of a long atom header, in bytes. */ public static final int LONG_HEADER_SIZE = 16; - /** - * Value for the size field in an atom that defines its size in the largesize field. - */ + /** Value for the size field in an atom that defines its size in the largesize field. */ public static final int DEFINES_LARGE_SIZE = 1; - /** - * Value for the size field in an atom that extends to the end of the file. - */ + /** Value for the size field in an atom that extends to the end of the file. */ public static final int EXTENDS_TO_END_SIZE = 0; @SuppressWarnings("ConstantCaseForConstants") @@ -85,6 +75,9 @@ @SuppressWarnings("ConstantCaseForConstants") public static final int TYPE_av1C = 0x61763143; + @SuppressWarnings("ConstantCaseForConstants") + public static final int TYPE_colr = 0x636f6c72; + @SuppressWarnings("ConstantCaseForConstants") public static final int TYPE_dvav = 0x64766176; @@ -106,6 +99,8 @@ @SuppressWarnings("ConstantCaseForConstants") public static final int TYPE_s263 = 0x73323633; + public static final int TYPE_H263 = 0x48323633; + @SuppressWarnings("ConstantCaseForConstants") public static final int TYPE_d263 = 0x64323633; @@ -115,9 +110,21 @@ @SuppressWarnings("ConstantCaseForConstants") public static final int TYPE_mp4a = 0x6d703461; + @SuppressWarnings("ConstantCaseForConstants") + public static final int TYPE__mp2 = 0x2e6d7032; + @SuppressWarnings("ConstantCaseForConstants") public static final int TYPE__mp3 = 0x2e6d7033; + @SuppressWarnings("ConstantCaseForConstants") + public static final int TYPE_mha1 = 0x6d686131; + + @SuppressWarnings("ConstantCaseForConstants") + public static final int TYPE_mhm1 = 0x6d686d31; + + @SuppressWarnings("ConstantCaseForConstants") + public static final int TYPE_mhaC = 0x6d686143; + @SuppressWarnings("ConstantCaseForConstants") public static final int TYPE_wave = 0x77617665; @@ -145,6 +152,12 @@ @SuppressWarnings("ConstantCaseForConstants") public static final int TYPE_dac4 = 0x64616334; + @SuppressWarnings("ConstantCaseForConstants") + public static final int TYPE_mlpa = 0x6d6c7061; + + @SuppressWarnings("ConstantCaseForConstants") + public static final int TYPE_dmlp = 0x646d6c70; + @SuppressWarnings("ConstantCaseForConstants") public static final int TYPE_dtsc = 0x64747363; @@ -157,6 +170,9 @@ @SuppressWarnings("ConstantCaseForConstants") public static final int TYPE_dtse = 0x64747365; + @SuppressWarnings("ConstantCaseForConstants") + public static final int TYPE_dtsx = 0x64747378; + @SuppressWarnings("ConstantCaseForConstants") public static final int TYPE_ddts = 0x64647473; @@ -178,6 +194,9 @@ @SuppressWarnings("ConstantCaseForConstants") public static final int TYPE_moov = 0x6d6f6f76; + @SuppressWarnings("ConstantCaseForConstants") + public static final int TYPE_mpvd = 0x6d707664; + @SuppressWarnings("ConstantCaseForConstants") public static final int TYPE_mvhd = 0x6d766864; @@ -275,7 +294,7 @@ public static final int TYPE_TTML = 0x54544d4c; @SuppressWarnings("ConstantCaseForConstants") - public static final int TYPE_vmhd = 0x766d6864; + public static final int TYPE_m1v_ = 0x6d317620; @SuppressWarnings("ConstantCaseForConstants") public static final int TYPE_mp4v = 0x6d703476; @@ -328,6 +347,12 @@ @SuppressWarnings("ConstantCaseForConstants") public static final int TYPE_meta = 0x6d657461; + @SuppressWarnings("ConstantCaseForConstants") + public static final int TYPE_smta = 0x736d7461; + + @SuppressWarnings("ConstantCaseForConstants") + public static final int TYPE_saut = 0x73617574; + @SuppressWarnings("ConstantCaseForConstants") public static final int TYPE_keys = 0x6b657973; @@ -358,6 +383,9 @@ @SuppressWarnings("ConstantCaseForConstants") public static final int TYPE_camm = 0x63616d6d; + @SuppressWarnings("ConstantCaseForConstants") + public static final int TYPE_mett = 0x6d657474; + @SuppressWarnings("ConstantCaseForConstants") public static final int TYPE_alac = 0x616c6163; @@ -382,6 +410,12 @@ @SuppressWarnings("ConstantCaseForConstants") public static final int TYPE_twos = 0x74776f73; + @SuppressWarnings("ConstantCaseForConstants") + public static final int TYPE_clli = 0x636c6c69; + + @SuppressWarnings("ConstantCaseForConstants") + public static final int TYPE_mdcv = 0x6d646376; + public final int type; public Atom(int type) { @@ -393,14 +427,10 @@ public String toString() { return getAtomTypeString(type); } - /** - * An MP4 atom that is a leaf. - */ + /** An MP4 atom that is a leaf. */ /* package */ static final class LeafAtom extends Atom { - /** - * The atom data. - */ + /** The atom data. */ public final ParsableByteArray data; /** @@ -411,12 +441,9 @@ public LeafAtom(int type, ParsableByteArray data) { super(type); this.data = data; } - } - /** - * An MP4 atom that has child atoms. - */ + /** An MP4 atom that has child atoms. */ /* package */ static final class ContainerAtom extends Atom { public final long endPosition; @@ -522,22 +549,19 @@ public int getChildAtomOfTypeCount(int type) { @Override public String toString() { return getAtomTypeString(type) - + " leaves: " + Arrays.toString(leafChildren.toArray()) - + " containers: " + Arrays.toString(containerChildren.toArray()); + + " leaves: " + + Arrays.toString(leafChildren.toArray()) + + " containers: " + + Arrays.toString(containerChildren.toArray()); } - } - /** - * Parses the version number out of the additional integer component of a full atom. - */ + /** Parses the version number out of the additional integer component of a full atom. */ public static int parseFullAtomVersion(int fullAtomInt) { return 0x000000FF & (fullAtomInt >> 24); } - /** - * Parses the atom flags out of the additional integer component of a full atom. - */ + /** Parses the atom flags out of the additional integer component of a full atom. */ public static int parseFullAtomFlags(int fullAtomInt) { return 0x00FFFFFF & fullAtomInt; } @@ -549,10 +573,10 @@ public static int parseFullAtomFlags(int fullAtomInt) { * @return The corresponding four character string. */ public static String getAtomTypeString(int type) { - return "" + (char) ((type >> 24) & 0xFF) + return "" + + (char) ((type >> 24) & 0xFF) + (char) ((type >> 16) & 0xFF) + (char) ((type >> 8) & 0xFF) + (char) (type & 0xFF); } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/AtomParsers.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/AtomParsers.java index 17c541ce07..3a9cedf98a 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/AtomParsers.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/AtomParsers.java @@ -15,61 +15,80 @@ */ package com.google.android.exoplayer2.extractor.mp4; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; import static com.google.android.exoplayer2.util.MimeTypes.getMimeTypeFromMp4ObjectType; +import static com.google.android.exoplayer2.util.Util.castNonNull; +import static java.lang.Math.max; import android.util.Pair; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.ParserException; +import com.google.android.exoplayer2.audio.AacUtil; import com.google.android.exoplayer2.audio.Ac3Util; import com.google.android.exoplayer2.audio.Ac4Util; +import com.google.android.exoplayer2.audio.OpusUtil; import com.google.android.exoplayer2.drm.DrmInitData; +import com.google.android.exoplayer2.extractor.ExtractorUtil; import com.google.android.exoplayer2.extractor.GaplessInfoHolder; +import com.google.android.exoplayer2.extractor.mp4.Atom.LeafAtom; import com.google.android.exoplayer2.metadata.Metadata; -import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.metadata.mp4.SmtaMetadataEntry; import com.google.android.exoplayer2.util.CodecSpecificDataUtil; import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.ParsableByteArray; import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.video.AvcConfig; +import com.google.android.exoplayer2.video.ColorInfo; import com.google.android.exoplayer2.video.DolbyVisionConfig; import com.google.android.exoplayer2.video.HevcConfig; +import com.google.common.base.Function; +import com.google.common.collect.ImmutableList; +import com.google.common.primitives.Ints; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; import java.util.List; +import org.checkerframework.checker.nullness.compatqual.NullableType; -/** Utility methods for parsing MP4 format atom payloads according to ISO 14496-12. */ -@SuppressWarnings({"ConstantField"}) +/** Utility methods for parsing MP4 format atom payloads according to ISO/IEC 14496-12. */ +@SuppressWarnings("ConstantField") /* package */ final class AtomParsers { private static final String TAG = "AtomParsers"; @SuppressWarnings("ConstantCaseForConstants") - private static final int TYPE_vide = 0x76696465; + private static final int TYPE_clcp = 0x636c6370; @SuppressWarnings("ConstantCaseForConstants") - private static final int TYPE_soun = 0x736f756e; + private static final int TYPE_mdta = 0x6d647461; @SuppressWarnings("ConstantCaseForConstants") - private static final int TYPE_text = 0x74657874; + private static final int TYPE_meta = 0x6d657461; + + @SuppressWarnings("ConstantCaseForConstants") + private static final int TYPE_nclc = 0x6e636c63; + + @SuppressWarnings("ConstantCaseForConstants") + private static final int TYPE_nclx = 0x6e636c78; @SuppressWarnings("ConstantCaseForConstants") private static final int TYPE_sbtl = 0x7362746c; @SuppressWarnings("ConstantCaseForConstants") - private static final int TYPE_subt = 0x73756274; + private static final int TYPE_soun = 0x736f756e; @SuppressWarnings("ConstantCaseForConstants") - private static final int TYPE_clcp = 0x636c6370; + private static final int TYPE_subt = 0x73756274; @SuppressWarnings("ConstantCaseForConstants") - private static final int TYPE_meta = 0x6d657461; + private static final int TYPE_text = 0x74657874; @SuppressWarnings("ConstantCaseForConstants") - private static final int TYPE_mdta = 0x6d647461; + private static final int TYPE_vide = 0x76696465; /** * The threshold number of samples to trim from the start/end of an audio track when applying an @@ -81,27 +100,198 @@ private static final byte[] opusMagic = Util.getUtf8Bytes("OpusHead"); /** - * Parses a trak atom (defined in 14496-12). + * Parse the trak atoms in a moov atom (defined in ISO/IEC 14496-12). + * + * @param moov Moov atom to decode. + * @param gaplessInfoHolder Holder to populate with gapless playback information. + * @param duration The duration in units of the timescale declared in the mvhd atom, or {@link + * C#TIME_UNSET} if the duration should be parsed from the tkhd atom. + * @param drmInitData {@link DrmInitData} to be included in the format, or {@code null}. + * @param ignoreEditLists Whether to ignore any edit lists in the trak boxes. + * @param isQuickTime True for QuickTime media. False otherwise. + * @param modifyTrackFunction A function to apply to the {@link Track Tracks} in the result. + * @return A list of {@link TrackSampleTable} instances. + * @throws ParserException Thrown if the trak atoms can't be parsed. + */ + public static List parseTraks( + Atom.ContainerAtom moov, + GaplessInfoHolder gaplessInfoHolder, + long duration, + @Nullable DrmInitData drmInitData, + boolean ignoreEditLists, + boolean isQuickTime, + Function<@NullableType Track, @NullableType Track> modifyTrackFunction) + throws ParserException { + List trackSampleTables = new ArrayList<>(); + for (int i = 0; i < moov.containerChildren.size(); i++) { + Atom.ContainerAtom atom = moov.containerChildren.get(i); + if (atom.type != Atom.TYPE_trak) { + continue; + } + @Nullable + Track track = + modifyTrackFunction.apply( + parseTrak( + atom, + checkNotNull(moov.getLeafAtomOfType(Atom.TYPE_mvhd)), + duration, + drmInitData, + ignoreEditLists, + isQuickTime)); + if (track == null) { + continue; + } + Atom.ContainerAtom stblAtom = + checkNotNull( + checkNotNull( + checkNotNull(atom.getContainerAtomOfType(Atom.TYPE_mdia)) + .getContainerAtomOfType(Atom.TYPE_minf)) + .getContainerAtomOfType(Atom.TYPE_stbl)); + TrackSampleTable trackSampleTable = parseStbl(track, stblAtom, gaplessInfoHolder); + trackSampleTables.add(trackSampleTable); + } + return trackSampleTables; + } + + /** + * Parses a udta atom. + * + * @param udtaAtom The udta (user data) atom to decode. + * @return A {@link Pair} containing the metadata from the meta child atom as first value (if + * any), and the metadata from the smta child atom as second value (if any). + */ + public static Pair<@NullableType Metadata, @NullableType Metadata> parseUdta( + Atom.LeafAtom udtaAtom) { + ParsableByteArray udtaData = udtaAtom.data; + udtaData.setPosition(Atom.HEADER_SIZE); + @Nullable Metadata metaMetadata = null; + @Nullable Metadata smtaMetadata = null; + while (udtaData.bytesLeft() >= Atom.HEADER_SIZE) { + int atomPosition = udtaData.getPosition(); + int atomSize = udtaData.readInt(); + int atomType = udtaData.readInt(); + if (atomType == Atom.TYPE_meta) { + udtaData.setPosition(atomPosition); + metaMetadata = parseUdtaMeta(udtaData, atomPosition + atomSize); + } else if (atomType == Atom.TYPE_smta) { + udtaData.setPosition(atomPosition); + smtaMetadata = parseSmta(udtaData, atomPosition + atomSize); + } + udtaData.setPosition(atomPosition + atomSize); + } + return Pair.create(metaMetadata, smtaMetadata); + } + + /** + * Parses a metadata meta atom if it contains metadata with handler 'mdta'. + * + * @param meta The metadata atom to decode. + * @return Parsed metadata, or null. + */ + @Nullable + public static Metadata parseMdtaFromMeta(Atom.ContainerAtom meta) { + @Nullable Atom.LeafAtom hdlrAtom = meta.getLeafAtomOfType(Atom.TYPE_hdlr); + @Nullable Atom.LeafAtom keysAtom = meta.getLeafAtomOfType(Atom.TYPE_keys); + @Nullable Atom.LeafAtom ilstAtom = meta.getLeafAtomOfType(Atom.TYPE_ilst); + if (hdlrAtom == null + || keysAtom == null + || ilstAtom == null + || parseHdlr(hdlrAtom.data) != TYPE_mdta) { + // There isn't enough information to parse the metadata, or the handler type is unexpected. + return null; + } + + // Parse metadata keys. + ParsableByteArray keys = keysAtom.data; + keys.setPosition(Atom.FULL_HEADER_SIZE); + int entryCount = keys.readInt(); + String[] keyNames = new String[entryCount]; + for (int i = 0; i < entryCount; i++) { + int entrySize = keys.readInt(); + keys.skipBytes(4); // keyNamespace + int keySize = entrySize - 8; + keyNames[i] = keys.readString(keySize); + } + + // Parse metadata items. + ParsableByteArray ilst = ilstAtom.data; + ilst.setPosition(Atom.HEADER_SIZE); + ArrayList entries = new ArrayList<>(); + while (ilst.bytesLeft() > Atom.HEADER_SIZE) { + int atomPosition = ilst.getPosition(); + int atomSize = ilst.readInt(); + int keyIndex = ilst.readInt() - 1; + if (keyIndex >= 0 && keyIndex < keyNames.length) { + String key = keyNames[keyIndex]; + @Nullable + Metadata.Entry entry = + MetadataUtil.parseMdtaMetadataEntryFromIlst(ilst, atomPosition + atomSize, key); + if (entry != null) { + entries.add(entry); + } + } else { + Log.w(TAG, "Skipped metadata with unknown key index: " + keyIndex); + } + ilst.setPosition(atomPosition + atomSize); + } + return entries.isEmpty() ? null : new Metadata(entries); + } + + /** + * Possibly skips the version and flags fields (1+3 byte) of a full meta atom. + * + *

      Atoms of type {@link Atom#TYPE_meta} are defined to be full atoms which have four additional + * bytes for a version and a flags field (see 4.2 'Object Structure' in ISO/IEC 14496-12:2005). + * QuickTime do not have such a full box structure. Since some of these files are encoded wrongly, + * we can't rely on the file type though. Instead we must check the 8 bytes after the common + * header bytes ourselves. + * + * @param meta The 8 or more bytes following the meta atom size and type. + */ + public static void maybeSkipRemainingMetaAtomHeaderBytes(ParsableByteArray meta) { + int endPosition = meta.getPosition(); + // The next 8 bytes can be either: + // (iso) [1 byte version + 3 bytes flags][4 byte size of next atom] + // (qt) [4 byte size of next atom ][4 byte hdlr atom type ] + // In case of (iso) we need to skip the next 4 bytes. + meta.skipBytes(4); + if (meta.readInt() != Atom.TYPE_hdlr) { + endPosition += 4; + } + meta.setPosition(endPosition); + } + + /** + * Parses a trak atom (defined in ISO/IEC 14496-12). * * @param trak Atom to decode. * @param mvhd Movie header atom, used to get the timescale. - * @param duration The duration in units of the timescale declared in the mvhd atom, or - * {@link C#TIME_UNSET} if the duration should be parsed from the tkhd atom. - * @param drmInitData {@link DrmInitData} to be included in the format. + * @param duration The duration in units of the timescale declared in the mvhd atom, or {@link + * C#TIME_UNSET} if the duration should be parsed from the tkhd atom. + * @param drmInitData {@link DrmInitData} to be included in the format, or {@code null}. * @param ignoreEditLists Whether to ignore any edit lists in the trak box. * @param isQuickTime True for QuickTime media. False otherwise. * @return A {@link Track} instance, or {@code null} if the track's type isn't supported. + * @throws ParserException Thrown if the trak atom can't be parsed. */ - public static Track parseTrak(Atom.ContainerAtom trak, Atom.LeafAtom mvhd, long duration, - DrmInitData drmInitData, boolean ignoreEditLists, boolean isQuickTime) + @Nullable + private static Track parseTrak( + Atom.ContainerAtom trak, + Atom.LeafAtom mvhd, + long duration, + @Nullable DrmInitData drmInitData, + boolean ignoreEditLists, + boolean isQuickTime) throws ParserException { - Atom.ContainerAtom mdia = trak.getContainerAtomOfType(Atom.TYPE_mdia); - int trackType = getTrackTypeForHdlr(parseHdlr(mdia.getLeafAtomOfType(Atom.TYPE_hdlr).data)); + Atom.ContainerAtom mdia = checkNotNull(trak.getContainerAtomOfType(Atom.TYPE_mdia)); + @C.TrackType + int trackType = + getTrackTypeForHdlr(parseHdlr(checkNotNull(mdia.getLeafAtomOfType(Atom.TYPE_hdlr)).data)); if (trackType == C.TRACK_TYPE_UNKNOWN) { return null; } - TkhdData tkhdData = parseTkhd(trak.getLeafAtomOfType(Atom.TYPE_tkhd).data); + TkhdData tkhdData = parseTkhd(checkNotNull(trak.getLeafAtomOfType(Atom.TYPE_tkhd)).data); if (duration == C.TIME_UNSET) { duration = tkhdData.duration; } @@ -112,27 +302,56 @@ public static Track parseTrak(Atom.ContainerAtom trak, Atom.LeafAtom mvhd, long } else { durationUs = Util.scaleLargeTimestamp(duration, C.MICROS_PER_SECOND, movieTimescale); } - Atom.ContainerAtom stbl = mdia.getContainerAtomOfType(Atom.TYPE_minf) - .getContainerAtomOfType(Atom.TYPE_stbl); - - Pair mdhdData = parseMdhd(mdia.getLeafAtomOfType(Atom.TYPE_mdhd).data); - StsdData stsdData = parseStsd(stbl.getLeafAtomOfType(Atom.TYPE_stsd).data, tkhdData.id, - tkhdData.rotationDegrees, mdhdData.second, drmInitData, isQuickTime); - long[] editListDurations = null; - long[] editListMediaTimes = null; + Atom.ContainerAtom stbl = + checkNotNull( + checkNotNull(mdia.getContainerAtomOfType(Atom.TYPE_minf)) + .getContainerAtomOfType(Atom.TYPE_stbl)); + + Pair mdhdData = + parseMdhd(checkNotNull(mdia.getLeafAtomOfType(Atom.TYPE_mdhd)).data); + LeafAtom stsd = stbl.getLeafAtomOfType(Atom.TYPE_stsd); + if (stsd == null) { + throw ParserException.createForMalformedContainer( + "Malformed sample table (stbl) missing sample description (stsd)", /* cause= */ null); + } + StsdData stsdData = + parseStsd( + stsd.data, + tkhdData.id, + tkhdData.rotationDegrees, + mdhdData.second, + drmInitData, + isQuickTime); + @Nullable long[] editListDurations = null; + @Nullable long[] editListMediaTimes = null; if (!ignoreEditLists) { - Pair edtsData = parseEdts(trak.getContainerAtomOfType(Atom.TYPE_edts)); - editListDurations = edtsData.first; - editListMediaTimes = edtsData.second; - } - return stsdData.format == null ? null - : new Track(tkhdData.id, trackType, mdhdData.first, movieTimescale, durationUs, - stsdData.format, stsdData.requiredSampleTransformation, stsdData.trackEncryptionBoxes, - stsdData.nalUnitLengthFieldLength, editListDurations, editListMediaTimes); + @Nullable Atom.ContainerAtom edtsAtom = trak.getContainerAtomOfType(Atom.TYPE_edts); + if (edtsAtom != null) { + @Nullable Pair edtsData = parseEdts(edtsAtom); + if (edtsData != null) { + editListDurations = edtsData.first; + editListMediaTimes = edtsData.second; + } + } + } + return stsdData.format == null + ? null + : new Track( + tkhdData.id, + trackType, + mdhdData.first, + movieTimescale, + durationUs, + stsdData.format, + stsdData.requiredSampleTransformation, + stsdData.trackEncryptionBoxes, + stsdData.nalUnitLengthFieldLength, + editListDurations, + editListMediaTimes); } /** - * Parses an stbl atom (defined in 14496-12). + * Parses an stbl atom (defined in ISO/IEC 14496-12). * * @param track Track to which this sample table corresponds. * @param stblAtom stbl (sample table) atom to decode. @@ -140,17 +359,18 @@ public static Track parseTrak(Atom.ContainerAtom trak, Atom.LeafAtom mvhd, long * @return Sample table described by the stbl atom. * @throws ParserException Thrown if the stbl atom can't be parsed. */ - public static TrackSampleTable parseStbl( + private static TrackSampleTable parseStbl( Track track, Atom.ContainerAtom stblAtom, GaplessInfoHolder gaplessInfoHolder) throws ParserException { SampleSizeBox sampleSizeBox; - Atom.LeafAtom stszAtom = stblAtom.getLeafAtomOfType(Atom.TYPE_stsz); + @Nullable Atom.LeafAtom stszAtom = stblAtom.getLeafAtomOfType(Atom.TYPE_stsz); if (stszAtom != null) { - sampleSizeBox = new StszSampleSizeBox(stszAtom); + sampleSizeBox = new StszSampleSizeBox(stszAtom, track.format); } else { - Atom.LeafAtom stz2Atom = stblAtom.getLeafAtomOfType(Atom.TYPE_stz2); + @Nullable Atom.LeafAtom stz2Atom = stblAtom.getLeafAtomOfType(Atom.TYPE_stz2); if (stz2Atom == null) { - throw new ParserException("Track has no sample table size information"); + throw ParserException.createForMalformedContainer( + "Track has no sample table size information", /* cause= */ null); } sampleSizeBox = new Stz2SampleSizeBox(stz2Atom); } @@ -164,27 +384,27 @@ public static TrackSampleTable parseStbl( /* maximumSize= */ 0, /* timestampsUs= */ new long[0], /* flags= */ new int[0], - /* durationUs= */ C.TIME_UNSET); + /* durationUs= */ 0); } // Entries are byte offsets of chunks. boolean chunkOffsetsAreLongs = false; - Atom.LeafAtom chunkOffsetsAtom = stblAtom.getLeafAtomOfType(Atom.TYPE_stco); + @Nullable Atom.LeafAtom chunkOffsetsAtom = stblAtom.getLeafAtomOfType(Atom.TYPE_stco); if (chunkOffsetsAtom == null) { chunkOffsetsAreLongs = true; - chunkOffsetsAtom = stblAtom.getLeafAtomOfType(Atom.TYPE_co64); + chunkOffsetsAtom = checkNotNull(stblAtom.getLeafAtomOfType(Atom.TYPE_co64)); } ParsableByteArray chunkOffsets = chunkOffsetsAtom.data; // Entries are (chunk number, number of samples per chunk, sample description index). - ParsableByteArray stsc = stblAtom.getLeafAtomOfType(Atom.TYPE_stsc).data; + ParsableByteArray stsc = checkNotNull(stblAtom.getLeafAtomOfType(Atom.TYPE_stsc)).data; // Entries are (number of samples, timestamp delta between those samples). - ParsableByteArray stts = stblAtom.getLeafAtomOfType(Atom.TYPE_stts).data; + ParsableByteArray stts = checkNotNull(stblAtom.getLeafAtomOfType(Atom.TYPE_stts)).data; // Entries are the indices of samples that are synchronization samples. - Atom.LeafAtom stssAtom = stblAtom.getLeafAtomOfType(Atom.TYPE_stss); - ParsableByteArray stss = stssAtom != null ? stssAtom.data : null; + @Nullable Atom.LeafAtom stssAtom = stblAtom.getLeafAtomOfType(Atom.TYPE_stss); + @Nullable ParsableByteArray stss = stssAtom != null ? stssAtom.data : null; // Entries are (number of samples, timestamp offset). - Atom.LeafAtom cttsAtom = stblAtom.getLeafAtomOfType(Atom.TYPE_ctts); - ParsableByteArray ctts = cttsAtom != null ? cttsAtom.data : null; + @Nullable Atom.LeafAtom cttsAtom = stblAtom.getLeafAtomOfType(Atom.TYPE_ctts); + @Nullable ParsableByteArray ctts = cttsAtom != null ? cttsAtom.data : null; // Prepare to read chunk information. ChunkIterator chunkIterator = new ChunkIterator(stsc, chunkOffsets, chunkOffsetsAreLongs); @@ -218,9 +438,13 @@ public static TrackSampleTable parseStbl( } // Fixed sample size raw audio may need to be rechunked. - boolean isFixedSampleSizeRawAudio = - sampleSizeBox.isFixedSampleSize() - && MimeTypes.AUDIO_RAW.equals(track.format.sampleMimeType) + int fixedSampleSize = sampleSizeBox.getFixedSampleSize(); + @Nullable String sampleMimeType = track.format.sampleMimeType; + boolean rechunkFixedSizeSamples = + fixedSampleSize != C.LENGTH_UNSET + && (MimeTypes.AUDIO_RAW.equals(sampleMimeType) + || MimeTypes.AUDIO_MLAW.equals(sampleMimeType) + || MimeTypes.AUDIO_ALAW.equals(sampleMimeType)) && remainingTimestampDeltaChanges == 0 && remainingTimestampOffsetChanges == 0 && remainingSynchronizationSamples == 0; @@ -233,7 +457,23 @@ public static TrackSampleTable parseStbl( long timestampTimeUnits = 0; long duration; - if (!isFixedSampleSizeRawAudio) { + if (rechunkFixedSizeSamples) { + long[] chunkOffsetsBytes = new long[chunkIterator.length]; + int[] chunkSampleCounts = new int[chunkIterator.length]; + while (chunkIterator.moveNext()) { + chunkOffsetsBytes[chunkIterator.index] = chunkIterator.offset; + chunkSampleCounts[chunkIterator.index] = chunkIterator.numSamples; + } + FixedSampleSizeRechunker.Results rechunkedResults = + FixedSampleSizeRechunker.rechunk( + fixedSampleSize, chunkOffsetsBytes, chunkSampleCounts, timestampDeltaInTimeUnits); + offsets = rechunkedResults.offsets; + sizes = rechunkedResults.sizes; + maximumSize = rechunkedResults.maximumSize; + timestamps = rechunkedResults.timestamps; + flags = rechunkedResults.flags; + duration = rechunkedResults.duration; + } else { offsets = new long[sampleCount]; sizes = new int[sampleCount]; timestamps = new long[sampleCount]; @@ -262,11 +502,11 @@ public static TrackSampleTable parseStbl( if (ctts != null) { while (remainingSamplesAtTimestampOffset == 0 && remainingTimestampOffsetChanges > 0) { remainingSamplesAtTimestampOffset = ctts.readUnsignedIntToInt(); - // The BMFF spec (ISO 14496-12) states that sample offsets should be unsigned integers - // in version 0 ctts boxes, however some streams violate the spec and use signed - // integers instead. It's safe to always decode sample offsets as signed integers here, - // because unsigned integers will still be parsed correctly (unless their top bit is - // set, which is never true in practice because sample offsets are always small). + // The BMFF spec (ISO/IEC 14496-12) states that sample offsets should be unsigned + // integers in version 0 ctts boxes, however some streams violate the spec and use + // signed integers instead. It's safe to always decode sample offsets as signed integers + // here, because unsigned integers will still be parsed correctly (unless their top bit + // is set, which is never true in practice because sample offsets are always small). timestampOffset = ctts.readInt(); remainingTimestampOffsetChanges--; } @@ -286,7 +526,7 @@ public static TrackSampleTable parseStbl( flags[i] = C.BUFFER_FLAG_KEY_FRAME; remainingSynchronizationSamples--; if (remainingSynchronizationSamples > 0) { - nextSynchronizationSampleIndex = stss.readUnsignedIntToInt() - 1; + nextSynchronizationSampleIndex = checkNotNull(stss).readUnsignedIntToInt() - 1; } } @@ -295,7 +535,7 @@ public static TrackSampleTable parseStbl( remainingSamplesAtTimestampDelta--; if (remainingSamplesAtTimestampDelta == 0 && remainingTimestampDeltaChanges > 0) { remainingSamplesAtTimestampDelta = stts.readUnsignedIntToInt(); - // The BMFF spec (ISO 14496-12) states that sample deltas should be unsigned integers + // The BMFF spec (ISO/IEC 14496-12) states that sample deltas should be unsigned integers // in stts boxes, however some streams violate the spec and use signed integers instead. // See https://github.com/google/ExoPlayer/issues/3384. It's safe to always decode sample // deltas as signed integers here, because unsigned integers will still be parsed @@ -313,13 +553,15 @@ public static TrackSampleTable parseStbl( // If the stbl's child boxes are not consistent the container is malformed, but the stream may // still be playable. boolean isCttsValid = true; - while (remainingTimestampOffsetChanges > 0) { - if (ctts.readUnsignedIntToInt() != 0) { - isCttsValid = false; - break; + if (ctts != null) { + while (remainingTimestampOffsetChanges > 0) { + if (ctts.readUnsignedIntToInt() != 0) { + isCttsValid = false; + break; + } + ctts.readInt(); // Ignore offset. + remainingTimestampOffsetChanges--; } - ctts.readInt(); // Ignore offset. - remainingTimestampOffsetChanges--; } if (remainingSynchronizationSamples != 0 || remainingSamplesAtTimestampDelta != 0 @@ -343,23 +585,6 @@ public static TrackSampleTable parseStbl( + remainingSamplesAtTimestampOffset + (!isCttsValid ? ", ctts invalid" : "")); } - } else { - long[] chunkOffsetsBytes = new long[chunkIterator.length]; - int[] chunkSampleCounts = new int[chunkIterator.length]; - while (chunkIterator.moveNext()) { - chunkOffsetsBytes[chunkIterator.index] = chunkIterator.offset; - chunkSampleCounts[chunkIterator.index] = chunkIterator.numSamples; - } - int fixedSampleSize = - Util.getPcmFrameSize(track.format.pcmEncoding, track.format.channelCount); - FixedSampleSizeRechunker.Results rechunkedResults = FixedSampleSizeRechunker.rechunk( - fixedSampleSize, chunkOffsetsBytes, chunkSampleCounts, timestampDeltaInTimeUnits); - offsets = rechunkedResults.offsets; - sizes = rechunkedResults.sizes; - maximumSize = rechunkedResults.maximumSize; - timestamps = rechunkedResults.timestamps; - flags = rechunkedResults.flags; - duration = rechunkedResults.duration; } long durationUs = Util.scaleLargeTimestamp(duration, C.MICROS_PER_SECOND, track.timescale); @@ -369,26 +594,30 @@ public static TrackSampleTable parseStbl( track, offsets, sizes, maximumSize, timestamps, flags, durationUs); } - // See the BMFF spec (ISO 14496-12) subsection 8.6.6. Edit lists that require prerolling from a - // sync sample after reordering are not supported. Partial audio sample truncation is only - // supported in edit lists with one edit that removes less than MAX_GAPLESS_TRIM_SIZE_SAMPLES - // samples from the start/end of the track. This implementation handles simple - // discarding/delaying of samples. The extractor may place further restrictions on what edited - // streams are playable. + // See the BMFF spec (ISO/IEC 14496-12) subsection 8.6.6. Edit lists that require prerolling + // from a sync sample after reordering are not supported. Partial audio sample truncation is + // only supported in edit lists with one edit that removes less than + // MAX_GAPLESS_TRIM_SIZE_SAMPLES samples from the start/end of the track. This implementation + // handles simple discarding/delaying of samples. The extractor may place further restrictions + // on what edited streams are playable. if (track.editListDurations.length == 1 && track.type == C.TRACK_TYPE_AUDIO && timestamps.length >= 2) { - long editStartTime = track.editListMediaTimes[0]; - long editEndTime = editStartTime + Util.scaleLargeTimestamp(track.editListDurations[0], - track.timescale, track.movieTimescale); + long editStartTime = checkNotNull(track.editListMediaTimes)[0]; + long editEndTime = + editStartTime + + Util.scaleLargeTimestamp( + track.editListDurations[0], track.timescale, track.movieTimescale); if (canApplyEditWithGaplessInfo(timestamps, duration, editStartTime, editEndTime)) { long paddingTimeUnits = duration - editEndTime; - long encoderDelay = Util.scaleLargeTimestamp(editStartTime - timestamps[0], - track.format.sampleRate, track.timescale); - long encoderPadding = Util.scaleLargeTimestamp(paddingTimeUnits, - track.format.sampleRate, track.timescale); - if ((encoderDelay != 0 || encoderPadding != 0) && encoderDelay <= Integer.MAX_VALUE + long encoderDelay = + Util.scaleLargeTimestamp( + editStartTime - timestamps[0], track.format.sampleRate, track.timescale); + long encoderPadding = + Util.scaleLargeTimestamp(paddingTimeUnits, track.format.sampleRate, track.timescale); + if ((encoderDelay != 0 || encoderPadding != 0) + && encoderDelay <= Integer.MAX_VALUE && encoderPadding <= Integer.MAX_VALUE) { gaplessInfoHolder.encoderDelay = (int) encoderDelay; gaplessInfoHolder.encoderPadding = (int) encoderPadding; @@ -406,7 +635,7 @@ public static TrackSampleTable parseStbl( // The current version of the spec leaves handling of an edit with zero segment_duration in // unfragmented files open to interpretation. We handle this as a special case and include all // samples in the edit. - long editStartTime = track.editListMediaTimes[0]; + long editStartTime = checkNotNull(track.editListMediaTimes)[0]; for (int i = 0; i < timestamps.length; i++) { timestamps[i] = Util.scaleLargeTimestamp( @@ -427,8 +656,9 @@ public static TrackSampleTable parseStbl( boolean copyMetadata = false; int[] startIndices = new int[track.editListDurations.length]; int[] endIndices = new int[track.editListDurations.length]; + long[] editListMediaTimes = checkNotNull(track.editListMediaTimes); for (int i = 0; i < track.editListDurations.length; i++) { - long editMediaTime = track.editListMediaTimes[i]; + long editMediaTime = editListMediaTimes[i]; if (editMediaTime != -1) { long editDuration = Util.scaleLargeTimestamp( @@ -479,7 +709,7 @@ public static TrackSampleTable parseStbl( long ptsUs = Util.scaleLargeTimestamp(pts, C.MICROS_PER_SECOND, track.movieTimescale); long timeInSegmentUs = Util.scaleLargeTimestamp( - Math.max(0, timestamps[j] - editMediaTime), C.MICROS_PER_SECOND, track.timescale); + max(0, timestamps[j] - editMediaTime), C.MICROS_PER_SECOND, track.timescale); editedTimestamps[sampleIndex] = ptsUs + timeInSegmentUs; if (copyMetadata && editedSizes[sampleIndex] > editedMaximumSize) { editedMaximumSize = sizes[j]; @@ -500,92 +730,10 @@ public static TrackSampleTable parseStbl( editedDurationUs); } - /** - * Parses a udta atom. - * - * @param udtaAtom The udta (user data) atom to decode. - * @param isQuickTime True for QuickTime media. False otherwise. - * @return Parsed metadata, or null. - */ - @Nullable - public static Metadata parseUdta(Atom.LeafAtom udtaAtom, boolean isQuickTime) { - if (isQuickTime) { - // Meta boxes are regular boxes rather than full boxes in QuickTime. For now, don't try and - // decode one. - return null; - } - ParsableByteArray udtaData = udtaAtom.data; - udtaData.setPosition(Atom.HEADER_SIZE); - while (udtaData.bytesLeft() >= Atom.HEADER_SIZE) { - int atomPosition = udtaData.getPosition(); - int atomSize = udtaData.readInt(); - int atomType = udtaData.readInt(); - if (atomType == Atom.TYPE_meta) { - udtaData.setPosition(atomPosition); - return parseUdtaMeta(udtaData, atomPosition + atomSize); - } - udtaData.setPosition(atomPosition + atomSize); - } - return null; - } - - /** - * Parses a metadata meta atom if it contains metadata with handler 'mdta'. - * - * @param meta The metadata atom to decode. - * @return Parsed metadata, or null. - */ - @Nullable - public static Metadata parseMdtaFromMeta(Atom.ContainerAtom meta) { - Atom.LeafAtom hdlrAtom = meta.getLeafAtomOfType(Atom.TYPE_hdlr); - Atom.LeafAtom keysAtom = meta.getLeafAtomOfType(Atom.TYPE_keys); - Atom.LeafAtom ilstAtom = meta.getLeafAtomOfType(Atom.TYPE_ilst); - if (hdlrAtom == null - || keysAtom == null - || ilstAtom == null - || AtomParsers.parseHdlr(hdlrAtom.data) != TYPE_mdta) { - // There isn't enough information to parse the metadata, or the handler type is unexpected. - return null; - } - - // Parse metadata keys. - ParsableByteArray keys = keysAtom.data; - keys.setPosition(Atom.FULL_HEADER_SIZE); - int entryCount = keys.readInt(); - String[] keyNames = new String[entryCount]; - for (int i = 0; i < entryCount; i++) { - int entrySize = keys.readInt(); - keys.skipBytes(4); // keyNamespace - int keySize = entrySize - 8; - keyNames[i] = keys.readString(keySize); - } - - // Parse metadata items. - ParsableByteArray ilst = ilstAtom.data; - ilst.setPosition(Atom.HEADER_SIZE); - ArrayList entries = new ArrayList<>(); - while (ilst.bytesLeft() > Atom.HEADER_SIZE) { - int atomPosition = ilst.getPosition(); - int atomSize = ilst.readInt(); - int keyIndex = ilst.readInt() - 1; - if (keyIndex >= 0 && keyIndex < keyNames.length) { - String key = keyNames[keyIndex]; - Metadata.Entry entry = - MetadataUtil.parseMdtaMetadataEntryFromIlst(ilst, atomPosition + atomSize, key); - if (entry != null) { - entries.add(entry); - } - } else { - Log.w(TAG, "Skipped metadata with unknown key index: " + keyIndex); - } - ilst.setPosition(atomPosition + atomSize); - } - return entries.isEmpty() ? null : new Metadata(entries); - } - @Nullable private static Metadata parseUdtaMeta(ParsableByteArray meta, int limit) { - meta.skipBytes(Atom.FULL_HEADER_SIZE); + meta.skipBytes(Atom.HEADER_SIZE); + maybeSkipRemainingMetaAtomHeaderBytes(meta); while (meta.getPosition() < limit) { int atomPosition = meta.getPosition(); int atomSize = meta.readInt(); @@ -604,7 +752,7 @@ private static Metadata parseIlst(ParsableByteArray ilst, int limit) { ilst.skipBytes(Atom.HEADER_SIZE); ArrayList entries = new ArrayList<>(); while (ilst.getPosition() < limit) { - Metadata.Entry entry = MetadataUtil.parseIlstElement(ilst); + @Nullable Metadata.Entry entry = MetadataUtil.parseIlstElement(ilst); if (entry != null) { entries.add(entry); } @@ -613,7 +761,38 @@ private static Metadata parseIlst(ParsableByteArray ilst, int limit) { } /** - * Parses a mvhd atom (defined in 14496-12), returning the timescale for the movie. + * Parses metadata from a Samsung smta atom. + * + *

      See [Internal: b/150138465#comment76]. + */ + @Nullable + private static Metadata parseSmta(ParsableByteArray smta, int limit) { + smta.skipBytes(Atom.FULL_HEADER_SIZE); + while (smta.getPosition() < limit) { + int atomPosition = smta.getPosition(); + int atomSize = smta.readInt(); + int atomType = smta.readInt(); + if (atomType == Atom.TYPE_saut) { + if (atomSize < 14) { + return null; + } + smta.skipBytes(5); // author (4), reserved = 0 (1). + int recordingMode = smta.readUnsignedByte(); + if (recordingMode != 12 && recordingMode != 13) { + return null; + } + float captureFrameRate = recordingMode == 12 ? 240 : 120; + smta.skipBytes(1); // reserved = 1 (1). + int svcTemporalLayerCount = smta.readUnsignedByte(); + return new Metadata(new SmtaMetadataEntry(captureFrameRate, svcTemporalLayerCount)); + } + smta.setPosition(atomPosition + atomSize); + } + return null; + } + + /** + * Parses a mvhd atom (defined in ISO/IEC 14496-12), returning the timescale for the movie. * * @param mvhd Contents of the mvhd atom to be parsed. * @return Timescale for the movie. @@ -627,7 +806,7 @@ private static long parseMvhd(ParsableByteArray mvhd) { } /** - * Parses a tkhd atom (defined in 14496-12). + * Parses a tkhd atom (defined in ISO/IEC 14496-12). * * @return An object containing the parsed data. */ @@ -644,7 +823,7 @@ private static TkhdData parseTkhd(ParsableByteArray tkhd) { int durationPosition = tkhd.getPosition(); int durationByteCount = version == 0 ? 4 : 8; for (int i = 0; i < durationByteCount; i++) { - if (tkhd.data[durationPosition + i] != -1) { + if (tkhd.getData()[durationPosition + i] != -1) { durationUnknown = false; break; } @@ -697,7 +876,7 @@ private static int parseHdlr(ParsableByteArray hdlr) { } /** Returns the track type for a given handler value. */ - private static int getTrackTypeForHdlr(int hdlr) { + private static @C.TrackType int getTrackTypeForHdlr(int hdlr) { if (hdlr == TYPE_soun) { return C.TRACK_TYPE_AUDIO; } else if (hdlr == TYPE_vide) { @@ -712,11 +891,11 @@ private static int getTrackTypeForHdlr(int hdlr) { } /** - * Parses an mdhd atom (defined in 14496-12). + * Parses an mdhd atom (defined in ISO/IEC 14496-12). * * @param mdhd The mdhd atom to decode. * @return A pair consisting of the media timescale defined as the number of time units that pass - * in one second, and the language code. + * in one second, and the language code. */ private static Pair parseMdhd(ParsableByteArray mdhd) { mdhd.setPosition(Atom.HEADER_SIZE); @@ -735,33 +914,41 @@ private static Pair parseMdhd(ParsableByteArray mdhd) { } /** - * Parses a stsd atom (defined in 14496-12). + * Parses a stsd atom (defined in ISO/IEC 14496-12). * * @param stsd The stsd atom to decode. * @param trackId The track's identifier in its container. * @param rotationDegrees The rotation of the track in degrees. * @param language The language of the track. - * @param drmInitData {@link DrmInitData} to be included in the format. + * @param drmInitData {@link DrmInitData} to be included in the format, or {@code null}. * @param isQuickTime True for QuickTime media. False otherwise. * @return An object containing the parsed data. */ - private static StsdData parseStsd(ParsableByteArray stsd, int trackId, int rotationDegrees, - String language, DrmInitData drmInitData, boolean isQuickTime) throws ParserException { + private static StsdData parseStsd( + ParsableByteArray stsd, + int trackId, + int rotationDegrees, + String language, + @Nullable DrmInitData drmInitData, + boolean isQuickTime) + throws ParserException { stsd.setPosition(Atom.FULL_HEADER_SIZE); int numberOfEntries = stsd.readInt(); StsdData out = new StsdData(numberOfEntries); for (int i = 0; i < numberOfEntries; i++) { int childStartPosition = stsd.getPosition(); int childAtomSize = stsd.readInt(); - Assertions.checkArgument(childAtomSize > 0, "childAtomSize should be positive"); + ExtractorUtil.checkContainerInput(childAtomSize > 0, "childAtomSize must be positive"); int childAtomType = stsd.readInt(); if (childAtomType == Atom.TYPE_avc1 || childAtomType == Atom.TYPE_avc3 || childAtomType == Atom.TYPE_encv + || childAtomType == Atom.TYPE_m1v_ || childAtomType == Atom.TYPE_mp4v || childAtomType == Atom.TYPE_hvc1 || childAtomType == Atom.TYPE_hev1 || childAtomType == Atom.TYPE_s263 + || childAtomType == Atom.TYPE_H263 || childAtomType == Atom.TYPE_vp08 || childAtomType == Atom.TYPE_vp09 || childAtomType == Atom.TYPE_av01 @@ -769,50 +956,85 @@ private static StsdData parseStsd(ParsableByteArray stsd, int trackId, int rotat || childAtomType == Atom.TYPE_dva1 || childAtomType == Atom.TYPE_dvhe || childAtomType == Atom.TYPE_dvh1) { - parseVideoSampleEntry(stsd, childAtomType, childStartPosition, childAtomSize, trackId, - rotationDegrees, drmInitData, out, i); + parseVideoSampleEntry( + stsd, + childAtomType, + childStartPosition, + childAtomSize, + trackId, + rotationDegrees, + drmInitData, + out, + i); } else if (childAtomType == Atom.TYPE_mp4a || childAtomType == Atom.TYPE_enca || childAtomType == Atom.TYPE_ac_3 || childAtomType == Atom.TYPE_ec_3 || childAtomType == Atom.TYPE_ac_4 + || childAtomType == Atom.TYPE_mlpa || childAtomType == Atom.TYPE_dtsc || childAtomType == Atom.TYPE_dtse || childAtomType == Atom.TYPE_dtsh || childAtomType == Atom.TYPE_dtsl + || childAtomType == Atom.TYPE_dtsx || childAtomType == Atom.TYPE_samr || childAtomType == Atom.TYPE_sawb || childAtomType == Atom.TYPE_lpcm || childAtomType == Atom.TYPE_sowt || childAtomType == Atom.TYPE_twos + || childAtomType == Atom.TYPE__mp2 || childAtomType == Atom.TYPE__mp3 + || childAtomType == Atom.TYPE_mha1 + || childAtomType == Atom.TYPE_mhm1 || childAtomType == Atom.TYPE_alac || childAtomType == Atom.TYPE_alaw || childAtomType == Atom.TYPE_ulaw || childAtomType == Atom.TYPE_Opus || childAtomType == Atom.TYPE_fLaC) { - parseAudioSampleEntry(stsd, childAtomType, childStartPosition, childAtomSize, trackId, - language, isQuickTime, drmInitData, out, i); - } else if (childAtomType == Atom.TYPE_TTML || childAtomType == Atom.TYPE_tx3g - || childAtomType == Atom.TYPE_wvtt || childAtomType == Atom.TYPE_stpp + parseAudioSampleEntry( + stsd, + childAtomType, + childStartPosition, + childAtomSize, + trackId, + language, + isQuickTime, + drmInitData, + out, + i); + } else if (childAtomType == Atom.TYPE_TTML + || childAtomType == Atom.TYPE_tx3g + || childAtomType == Atom.TYPE_wvtt + || childAtomType == Atom.TYPE_stpp || childAtomType == Atom.TYPE_c608) { - parseTextSampleEntry(stsd, childAtomType, childStartPosition, childAtomSize, trackId, - language, out); + parseTextSampleEntry( + stsd, childAtomType, childStartPosition, childAtomSize, trackId, language, out); + } else if (childAtomType == Atom.TYPE_mett) { + parseMetaDataSampleEntry(stsd, childAtomType, childStartPosition, trackId, out); } else if (childAtomType == Atom.TYPE_camm) { - out.format = Format.createSampleFormat(Integer.toString(trackId), - MimeTypes.APPLICATION_CAMERA_MOTION, null, Format.NO_VALUE, null); + out.format = + new Format.Builder() + .setId(trackId) + .setSampleMimeType(MimeTypes.APPLICATION_CAMERA_MOTION) + .build(); } stsd.setPosition(childStartPosition + childAtomSize); } return out; } - private static void parseTextSampleEntry(ParsableByteArray parent, int atomType, int position, - int atomSize, int trackId, String language, StsdData out) throws ParserException { + private static void parseTextSampleEntry( + ParsableByteArray parent, + int atomType, + int position, + int atomSize, + int trackId, + String language, + StsdData out) { parent.setPosition(position + Atom.HEADER_SIZE + StsdData.STSD_HEADER_SIZE); // Default values. - List initializationData = null; + @Nullable ImmutableList initializationData = null; long subsampleOffsetUs = Format.OFFSET_SAMPLE_RELATIVE; String mimeType; @@ -823,7 +1045,7 @@ private static void parseTextSampleEntry(ParsableByteArray parent, int atomType, int sampleDescriptionLength = atomSize - Atom.HEADER_SIZE - 8; byte[] sampleDescriptionData = new byte[sampleDescriptionLength]; parent.readBytes(sampleDescriptionData, 0, sampleDescriptionLength); - initializationData = Collections.singletonList(sampleDescriptionData); + initializationData = ImmutableList.of(sampleDescriptionData); } else if (atomType == Atom.TYPE_wvtt) { mimeType = MimeTypes.APPLICATION_MP4VTT; } else if (atomType == Atom.TYPE_stpp) { @@ -839,22 +1061,28 @@ private static void parseTextSampleEntry(ParsableByteArray parent, int atomType, } out.format = - Format.createTextSampleFormat( - Integer.toString(trackId), - mimeType, - /* codecs= */ null, - /* bitrate= */ Format.NO_VALUE, - /* selectionFlags= */ 0, - language, - /* accessibilityChannel= */ Format.NO_VALUE, - /* drmInitData= */ null, - subsampleOffsetUs, - initializationData); + new Format.Builder() + .setId(trackId) + .setSampleMimeType(mimeType) + .setLanguage(language) + .setSubsampleOffsetUs(subsampleOffsetUs) + .setInitializationData(initializationData) + .build(); } - private static void parseVideoSampleEntry(ParsableByteArray parent, int atomType, int position, - int size, int trackId, int rotationDegrees, DrmInitData drmInitData, StsdData out, - int entryIndex) throws ParserException { + // hdrStaticInfo is allocated using allocate() in allocateHdrStaticInfo(). + @SuppressWarnings("ByteBufferBackingArray") + private static void parseVideoSampleEntry( + ParsableByteArray parent, + int atomType, + int position, + int size, + int trackId, + int rotationDegrees, + @Nullable DrmInitData drmInitData, + StsdData out, + int entryIndex) + throws ParserException { parent.setPosition(position + Atom.HEADER_SIZE + StsdData.STSD_HEADER_SIZE); parent.skipBytes(16); @@ -866,12 +1094,15 @@ private static void parseVideoSampleEntry(ParsableByteArray parent, int atomType int childPosition = parent.getPosition(); if (atomType == Atom.TYPE_encv) { - Pair sampleEntryEncryptionData = parseSampleEntryEncryptionData( - parent, position, size); + @Nullable + Pair sampleEntryEncryptionData = + parseSampleEntryEncryptionData(parent, position, size); if (sampleEntryEncryptionData != null) { atomType = sampleEntryEncryptionData.first; - drmInitData = drmInitData == null ? null - : drmInitData.copyWithSchemeType(sampleEntryEncryptionData.second.schemeType); + drmInitData = + drmInitData == null + ? null + : drmInitData.copyWithSchemeType(sampleEntryEncryptionData.second.schemeType); out.trackEncryptionBoxes[entryIndex] = sampleEntryEncryptionData.second; } parent.setPosition(childPosition); @@ -881,12 +1112,26 @@ private static void parseVideoSampleEntry(ParsableByteArray parent, int atomType // drmInitData = null; // } - List initializationData = null; - String mimeType = null; - String codecs = null; - byte[] projectionData = null; - @C.StereoMode - int stereoMode = Format.NO_VALUE; + @Nullable String mimeType = null; + if (atomType == Atom.TYPE_m1v_) { + mimeType = MimeTypes.VIDEO_MPEG; + } else if (atomType == Atom.TYPE_H263) { + mimeType = MimeTypes.VIDEO_H263; + } + + @Nullable List initializationData = null; + @Nullable String codecs = null; + @Nullable byte[] projectionData = null; + @C.StereoMode int stereoMode = Format.NO_VALUE; + @Nullable EsdsData esdsData = null; + + // HDR related metadata. + @C.ColorSpace int colorSpace = Format.NO_VALUE; + @C.ColorRange int colorRange = Format.NO_VALUE; + @C.ColorTransfer int colorTransfer = Format.NO_VALUE; + // The format of HDR static info is defined in CTA-861-G:2017, Table 45. + @Nullable ByteBuffer hdrStaticInfo = null; + while (childPosition - position < size) { parent.setPosition(childPosition); int childStartPosition = parent.getPosition(); @@ -895,46 +1140,90 @@ private static void parseVideoSampleEntry(ParsableByteArray parent, int atomType // Handle optional terminating four zero bytes in MOV files. break; } - Assertions.checkArgument(childAtomSize > 0, "childAtomSize should be positive"); + ExtractorUtil.checkContainerInput(childAtomSize > 0, "childAtomSize must be positive"); int childAtomType = parent.readInt(); if (childAtomType == Atom.TYPE_avcC) { - Assertions.checkState(mimeType == null); + ExtractorUtil.checkContainerInput(mimeType == null, /* message= */ null); mimeType = MimeTypes.VIDEO_H264; parent.setPosition(childStartPosition + Atom.HEADER_SIZE); AvcConfig avcConfig = AvcConfig.parse(parent); initializationData = avcConfig.initializationData; out.nalUnitLengthFieldLength = avcConfig.nalUnitLengthFieldLength; if (!pixelWidthHeightRatioFromPasp) { - pixelWidthHeightRatio = avcConfig.pixelWidthAspectRatio; + pixelWidthHeightRatio = avcConfig.pixelWidthHeightRatio; } + codecs = avcConfig.codecs; } else if (childAtomType == Atom.TYPE_hvcC) { - Assertions.checkState(mimeType == null); + ExtractorUtil.checkContainerInput(mimeType == null, /* message= */ null); mimeType = MimeTypes.VIDEO_H265; parent.setPosition(childStartPosition + Atom.HEADER_SIZE); HevcConfig hevcConfig = HevcConfig.parse(parent); initializationData = hevcConfig.initializationData; out.nalUnitLengthFieldLength = hevcConfig.nalUnitLengthFieldLength; + if (!pixelWidthHeightRatioFromPasp) { + pixelWidthHeightRatio = hevcConfig.pixelWidthHeightRatio; + } + codecs = hevcConfig.codecs; } else if (childAtomType == Atom.TYPE_dvcC || childAtomType == Atom.TYPE_dvvC) { - DolbyVisionConfig dolbyVisionConfig = DolbyVisionConfig.parse(parent); + @Nullable DolbyVisionConfig dolbyVisionConfig = DolbyVisionConfig.parse(parent); if (dolbyVisionConfig != null) { codecs = dolbyVisionConfig.codecs; mimeType = MimeTypes.VIDEO_DOLBY_VISION; } } else if (childAtomType == Atom.TYPE_vpcC) { - Assertions.checkState(mimeType == null); + ExtractorUtil.checkContainerInput(mimeType == null, /* message= */ null); mimeType = (atomType == Atom.TYPE_vp08) ? MimeTypes.VIDEO_VP8 : MimeTypes.VIDEO_VP9; } else if (childAtomType == Atom.TYPE_av1C) { - Assertions.checkState(mimeType == null); + ExtractorUtil.checkContainerInput(mimeType == null, /* message= */ null); mimeType = MimeTypes.VIDEO_AV1; + } else if (childAtomType == Atom.TYPE_clli) { + if (hdrStaticInfo == null) { + hdrStaticInfo = allocateHdrStaticInfo(); + } + // The contents of the clli box occupy the last 4 bytes of the HDR static info array. Note + // that each field is read in big endian and written in little endian. + hdrStaticInfo.position(21); + hdrStaticInfo.putShort(parent.readShort()); // max_content_light_level. + hdrStaticInfo.putShort(parent.readShort()); // max_pic_average_light_level. + } else if (childAtomType == Atom.TYPE_mdcv) { + if (hdrStaticInfo == null) { + hdrStaticInfo = allocateHdrStaticInfo(); + } + // The contents of the mdcv box occupy 20 bytes after the first byte of the HDR static info + // array. Note that each field is read in big endian and written in little endian. + short displayPrimariesGX = parent.readShort(); + short displayPrimariesGY = parent.readShort(); + short displayPrimariesBX = parent.readShort(); + short displayPrimariesBY = parent.readShort(); + short displayPrimariesRX = parent.readShort(); + short displayPrimariesRY = parent.readShort(); + short whitePointX = parent.readShort(); + short whitePointY = parent.readShort(); + long maxDisplayMasteringLuminance = parent.readUnsignedInt(); + long minDisplayMasteringLuminance = parent.readUnsignedInt(); + + hdrStaticInfo.position(1); + hdrStaticInfo.putShort(displayPrimariesRX); + hdrStaticInfo.putShort(displayPrimariesRY); + hdrStaticInfo.putShort(displayPrimariesGX); + hdrStaticInfo.putShort(displayPrimariesGY); + hdrStaticInfo.putShort(displayPrimariesBX); + hdrStaticInfo.putShort(displayPrimariesBY); + hdrStaticInfo.putShort(whitePointX); + hdrStaticInfo.putShort(whitePointY); + hdrStaticInfo.putShort((short) (maxDisplayMasteringLuminance / 10000)); + hdrStaticInfo.putShort((short) (minDisplayMasteringLuminance / 10000)); } else if (childAtomType == Atom.TYPE_d263) { - Assertions.checkState(mimeType == null); + ExtractorUtil.checkContainerInput(mimeType == null, /* message= */ null); mimeType = MimeTypes.VIDEO_H263; } else if (childAtomType == Atom.TYPE_esds) { - Assertions.checkState(mimeType == null); - Pair mimeTypeAndInitializationData = - parseEsdsFromParent(parent, childStartPosition); - mimeType = mimeTypeAndInitializationData.first; - initializationData = Collections.singletonList(mimeTypeAndInitializationData.second); + ExtractorUtil.checkContainerInput(mimeType == null, /* message= */ null); + esdsData = parseEsdsFromParent(parent, childStartPosition); + mimeType = esdsData.mimeType; + @Nullable byte[] initializationDataBytes = esdsData.initializationData; + if (initializationDataBytes != null) { + initializationData = ImmutableList.of(initializationDataBytes); + } } else if (childAtomType == Atom.TYPE_pasp) { pixelWidthHeightRatio = parsePaspFromParent(parent, childStartPosition); pixelWidthHeightRatioFromPasp = true; @@ -962,6 +1251,28 @@ private static void parseVideoSampleEntry(ParsableByteArray parent, int atomType break; } } + } else if (childAtomType == Atom.TYPE_colr) { + int colorType = parent.readInt(); + if (colorType == TYPE_nclx || colorType == TYPE_nclc) { + // For more info on syntax, see Section 8.5.2.2 in ISO/IEC 14496-12:2012(E) and + // https://developer.apple.com/library/archive/documentation/QuickTime/QTFF/QTFFChap3/qtff3.html. + int colorPrimaries = parent.readUnsignedShort(); + int transferCharacteristics = parent.readUnsignedShort(); + parent.skipBytes(2); // matrix_coefficients. + + // Only try and read full_range_flag if the box is long enough. It should be present in + // all colr boxes with type=nclx (Section 8.5.2.2 in ISO/IEC 14496-12:2012(E)) but some + // device cameras record videos with type=nclx without this final flag (and therefore + // size=18): https://github.com/google/ExoPlayer/issues/9332 + boolean fullRangeFlag = + childAtomSize == 19 && (parent.readUnsignedByte() & 0b10000000) != 0; + colorSpace = ColorInfo.isoColorPrimariesToColorSpace(colorPrimaries); + colorRange = fullRangeFlag ? C.COLOR_RANGE_FULL : C.COLOR_RANGE_LIMITED; + colorTransfer = + ColorInfo.isoTransferCharacteristicsToColorTransfer(transferCharacteristics); + } else { + Log.w(TAG, "Unsupported color type: " + Atom.getAtomTypeString(colorType)); + } } childPosition += childAtomSize; } @@ -971,38 +1282,75 @@ private static void parseVideoSampleEntry(ParsableByteArray parent, int atomType return; } - out.format = - Format.createVideoSampleFormat( - Integer.toString(trackId), - mimeType, - codecs, - /* bitrate= */ Format.NO_VALUE, - /* maxInputSize= */ Format.NO_VALUE, - width, - height, - /* frameRate= */ Format.NO_VALUE, - initializationData, - rotationDegrees, - pixelWidthHeightRatio, - projectionData, - stereoMode, - /* colorInfo= */ null, - drmInitData); + Format.Builder formatBuilder = + new Format.Builder() + .setId(trackId) + .setSampleMimeType(mimeType) + .setCodecs(codecs) + .setWidth(width) + .setHeight(height) + .setPixelWidthHeightRatio(pixelWidthHeightRatio) + .setRotationDegrees(rotationDegrees) + .setProjectionData(projectionData) + .setStereoMode(stereoMode) + .setInitializationData(initializationData) + .setDrmInitData(drmInitData); + if (colorSpace != Format.NO_VALUE + || colorRange != Format.NO_VALUE + || colorTransfer != Format.NO_VALUE + || hdrStaticInfo != null) { + // Note that if either mdcv or clli are missing, we leave the corresponding HDR static + // metadata bytes with value zero. See [Internal ref: b/194535665]. + formatBuilder.setColorInfo( + new ColorInfo( + colorSpace, + colorRange, + colorTransfer, + hdrStaticInfo != null ? hdrStaticInfo.array() : null)); + } + + if (esdsData != null) { + formatBuilder + .setAverageBitrate(Ints.saturatedCast(esdsData.bitrate)) + .setPeakBitrate(Ints.saturatedCast(esdsData.peakBitrate)); + } + + out.format = formatBuilder.build(); + } + + private static ByteBuffer allocateHdrStaticInfo() { + // For HDR static info, Android decoders expect a 25-byte array. The first byte is zero to + // represent Static Metadata Type 1, as per CTA-861-G:2017, Table 44. The following 24 bytes + // follow CTA-861-G:2017, Table 45. + return ByteBuffer.allocate(25).order(ByteOrder.LITTLE_ENDIAN); + } + + private static void parseMetaDataSampleEntry( + ParsableByteArray parent, int atomType, int position, int trackId, StsdData out) { + parent.setPosition(position + Atom.HEADER_SIZE + StsdData.STSD_HEADER_SIZE); + if (atomType == Atom.TYPE_mett) { + parent.readNullTerminatedString(); // Skip optional content_encoding + @Nullable String mimeType = parent.readNullTerminatedString(); + if (mimeType != null) { + out.format = new Format.Builder().setId(trackId).setSampleMimeType(mimeType).build(); + } + } } /** - * Parses the edts atom (defined in 14496-12 subsection 8.6.5). + * Parses the edts atom (defined in ISO/IEC 14496-12 subsection 8.6.5). * * @param edtsAtom edts (edit box) atom to decode. - * @return Pair of edit list durations and edit list media times, or a pair of nulls if they are - * not present. + * @return Pair of edit list durations and edit list media times, or {@code null} if they are not + * present. */ + @Nullable private static Pair parseEdts(Atom.ContainerAtom edtsAtom) { - Atom.LeafAtom elst; - if (edtsAtom == null || (elst = edtsAtom.getLeafAtomOfType(Atom.TYPE_elst)) == null) { - return Pair.create(null, null); + @Nullable Atom.LeafAtom elstAtom = edtsAtom.getLeafAtomOfType(Atom.TYPE_elst); + if (elstAtom == null) { + return null; } - ParsableByteArray elstData = elst.data; + ParsableByteArray elstData = elstAtom.data; elstData.setPosition(Atom.HEADER_SIZE); int fullAtom = elstData.readInt(); int version = Atom.parseFullAtomVersion(fullAtom); @@ -1030,9 +1378,18 @@ private static float parsePaspFromParent(ParsableByteArray parent, int position) return (float) hSpacing / vSpacing; } - private static void parseAudioSampleEntry(ParsableByteArray parent, int atomType, int position, - int size, int trackId, String language, boolean isQuickTime, DrmInitData drmInitData, - StsdData out, int entryIndex) throws ParserException { + private static void parseAudioSampleEntry( + ParsableByteArray parent, + int atomType, + int position, + int size, + int trackId, + String language, + boolean isQuickTime, + @Nullable DrmInitData drmInitData, + StsdData out, + int entryIndex) + throws ParserException { parent.setPosition(position + Atom.HEADER_SIZE + StsdData.STSD_HEADER_SIZE); int quickTimeSoundDescriptionVersion = 0; @@ -1045,18 +1402,25 @@ private static void parseAudioSampleEntry(ParsableByteArray parent, int atomType int channelCount; int sampleRate; + int sampleRateMlp = 0; @C.PcmEncoding int pcmEncoding = Format.NO_VALUE; + @Nullable String codecs = null; + @Nullable EsdsData esdsData = null; if (quickTimeSoundDescriptionVersion == 0 || quickTimeSoundDescriptionVersion == 1) { channelCount = parent.readUnsignedShort(); - parent.skipBytes(6); // sampleSize, compressionId, packetSize. + parent.skipBytes(6); // sampleSize, compressionId, packetSize. + sampleRate = parent.readUnsignedFixedPoint1616(); + // The sample rate has been redefined as a 32-bit value for Dolby TrueHD (MLP) streams. + parent.setPosition(parent.getPosition() - 4); + sampleRateMlp = parent.readInt(); if (quickTimeSoundDescriptionVersion == 1) { parent.skipBytes(16); } } else if (quickTimeSoundDescriptionVersion == 2) { - parent.skipBytes(16); // always[3,16,Minus2,0,65536], sizeOfStructOnly + parent.skipBytes(16); // always[3,16,Minus2,0,65536], sizeOfStructOnly sampleRate = (int) Math.round(parent.readDouble()); channelCount = parent.readUnsignedIntToInt(); @@ -1071,12 +1435,15 @@ private static void parseAudioSampleEntry(ParsableByteArray parent, int atomType int childPosition = parent.getPosition(); if (atomType == Atom.TYPE_enca) { - Pair sampleEntryEncryptionData = parseSampleEntryEncryptionData( - parent, position, size); + @Nullable + Pair sampleEntryEncryptionData = + parseSampleEntryEncryptionData(parent, position, size); if (sampleEntryEncryptionData != null) { atomType = sampleEntryEncryptionData.first; - drmInitData = drmInitData == null ? null - : drmInitData.copyWithSchemeType(sampleEntryEncryptionData.second.schemeType); + drmInitData = + drmInitData == null + ? null + : drmInitData.copyWithSchemeType(sampleEntryEncryptionData.second.schemeType); out.trackEncryptionBoxes[entryIndex] = sampleEntryEncryptionData.second; } parent.setPosition(childPosition); @@ -1087,7 +1454,7 @@ private static void parseAudioSampleEntry(ParsableByteArray parent, int atomType // } // If the atom type determines a MIME type, set it immediately. - String mimeType = null; + @Nullable String mimeType = null; if (atomType == Atom.TYPE_ac_3) { mimeType = MimeTypes.AUDIO_AC3; } else if (atomType == Atom.TYPE_ec_3) { @@ -1100,6 +1467,8 @@ private static void parseAudioSampleEntry(ParsableByteArray parent, int atomType mimeType = MimeTypes.AUDIO_DTS_HD; } else if (atomType == Atom.TYPE_dtse) { mimeType = MimeTypes.AUDIO_DTS_EXPRESS; + } else if (atomType == Atom.TYPE_dtsx) { + mimeType = MimeTypes.AUDIO_DTS_X; } else if (atomType == Atom.TYPE_samr) { mimeType = MimeTypes.AUDIO_AMR_NB; } else if (atomType == Atom.TYPE_sawb) { @@ -1110,8 +1479,12 @@ private static void parseAudioSampleEntry(ParsableByteArray parent, int atomType } else if (atomType == Atom.TYPE_twos) { mimeType = MimeTypes.AUDIO_RAW; pcmEncoding = C.ENCODING_PCM_16BIT_BIG_ENDIAN; - } else if (atomType == Atom.TYPE__mp3) { + } else if (atomType == Atom.TYPE__mp2 || atomType == Atom.TYPE__mp3) { mimeType = MimeTypes.AUDIO_MPEG; + } else if (atomType == Atom.TYPE_mha1) { + mimeType = MimeTypes.AUDIO_MPEGH_MHA1; + } else if (atomType == Atom.TYPE_mhm1) { + mimeType = MimeTypes.AUDIO_MPEGH_MHM1; } else if (atomType == Atom.TYPE_alac) { mimeType = MimeTypes.AUDIO_ALAC; } else if (atomType == Atom.TYPE_alaw) { @@ -1122,99 +1495,161 @@ private static void parseAudioSampleEntry(ParsableByteArray parent, int atomType mimeType = MimeTypes.AUDIO_OPUS; } else if (atomType == Atom.TYPE_fLaC) { mimeType = MimeTypes.AUDIO_FLAC; + } else if (atomType == Atom.TYPE_mlpa) { + mimeType = MimeTypes.AUDIO_TRUEHD; } - byte[] initializationData = null; + @Nullable List initializationData = null; while (childPosition - position < size) { parent.setPosition(childPosition); int childAtomSize = parent.readInt(); - Assertions.checkArgument(childAtomSize > 0, "childAtomSize should be positive"); + ExtractorUtil.checkContainerInput(childAtomSize > 0, "childAtomSize must be positive"); int childAtomType = parent.readInt(); - if (childAtomType == Atom.TYPE_esds || (isQuickTime && childAtomType == Atom.TYPE_wave)) { - int esdsAtomPosition = childAtomType == Atom.TYPE_esds ? childPosition - : findEsdsPosition(parent, childPosition, childAtomSize); + if (childAtomType == Atom.TYPE_mhaC) { + // See ISO_IEC_23008-3;2019 MHADecoderConfigurationRecord + // The header consists of: size (4), boxtype 'mhaC' (4), configurationVersion (1), + // mpegh3daProfileLevelIndication (1), referenceChannelLayout (1), mpegh3daConfigLength (2). + int mhacHeaderSize = 13; + int childAtomBodySize = childAtomSize - mhacHeaderSize; + byte[] initializationDataBytes = new byte[childAtomBodySize]; + parent.setPosition(childPosition + mhacHeaderSize); + parent.readBytes(initializationDataBytes, 0, childAtomBodySize); + initializationData = ImmutableList.of(initializationDataBytes); + } else if (childAtomType == Atom.TYPE_esds + || (isQuickTime && childAtomType == Atom.TYPE_wave)) { + int esdsAtomPosition = + childAtomType == Atom.TYPE_esds + ? childPosition + : findBoxPosition(parent, Atom.TYPE_esds, childPosition, childAtomSize); if (esdsAtomPosition != C.POSITION_UNSET) { - Pair mimeTypeAndInitializationData = - parseEsdsFromParent(parent, esdsAtomPosition); - mimeType = mimeTypeAndInitializationData.first; - initializationData = mimeTypeAndInitializationData.second; - if (MimeTypes.AUDIO_AAC.equals(mimeType)) { - // Update sampleRate and channelCount from the AudioSpecificConfig initialization data, - // which is more reliable. See [Internal: b/10903778]. - Pair audioSpecificConfig = - CodecSpecificDataUtil.parseAacAudioSpecificConfig(initializationData); - sampleRate = audioSpecificConfig.first; - channelCount = audioSpecificConfig.second; + esdsData = parseEsdsFromParent(parent, esdsAtomPosition); + mimeType = esdsData.mimeType; + @Nullable byte[] initializationDataBytes = esdsData.initializationData; + if (initializationDataBytes != null) { + if (MimeTypes.AUDIO_AAC.equals(mimeType)) { + // Update sampleRate and channelCount from the AudioSpecificConfig initialization + // data, which is more reliable. See [Internal: b/10903778]. + AacUtil.Config aacConfig = AacUtil.parseAudioSpecificConfig(initializationDataBytes); + sampleRate = aacConfig.sampleRateHz; + channelCount = aacConfig.channelCount; + codecs = aacConfig.codecs; + } + initializationData = ImmutableList.of(initializationDataBytes); } } } else if (childAtomType == Atom.TYPE_dac3) { parent.setPosition(Atom.HEADER_SIZE + childPosition); - out.format = Ac3Util.parseAc3AnnexFFormat(parent, Integer.toString(trackId), language, - drmInitData); + out.format = + Ac3Util.parseAc3AnnexFFormat(parent, Integer.toString(trackId), language, drmInitData); } else if (childAtomType == Atom.TYPE_dec3) { parent.setPosition(Atom.HEADER_SIZE + childPosition); - out.format = Ac3Util.parseEAc3AnnexFFormat(parent, Integer.toString(trackId), language, - drmInitData); + out.format = + Ac3Util.parseEAc3AnnexFFormat(parent, Integer.toString(trackId), language, drmInitData); } else if (childAtomType == Atom.TYPE_dac4) { parent.setPosition(Atom.HEADER_SIZE + childPosition); out.format = Ac4Util.parseAc4AnnexEFormat(parent, Integer.toString(trackId), language, drmInitData); + } else if (childAtomType == Atom.TYPE_dmlp) { + if (sampleRateMlp <= 0) { + throw ParserException.createForMalformedContainer( + "Invalid sample rate for Dolby TrueHD MLP stream: " + sampleRateMlp, + /* cause= */ null); + } + sampleRate = sampleRateMlp; + // The channel count from the sample entry must be ignored for Dolby TrueHD (MLP) streams + // because these streams can carry simultaneously multiple representations of the same + // audio. Use stereo by default. + channelCount = 2; } else if (childAtomType == Atom.TYPE_ddts) { - out.format = Format.createAudioSampleFormat(Integer.toString(trackId), mimeType, null, - Format.NO_VALUE, Format.NO_VALUE, channelCount, sampleRate, null, drmInitData, 0, - language); + out.format = + new Format.Builder() + .setId(trackId) + .setSampleMimeType(mimeType) + .setChannelCount(channelCount) + .setSampleRate(sampleRate) + .setDrmInitData(drmInitData) + .setLanguage(language) + .build(); } else if (childAtomType == Atom.TYPE_dOps) { // Build an Opus Identification Header (defined in RFC-7845) by concatenating the Opus Magic // Signature and the body of the dOps atom. int childAtomBodySize = childAtomSize - Atom.HEADER_SIZE; - initializationData = new byte[opusMagic.length + childAtomBodySize]; - System.arraycopy(opusMagic, 0, initializationData, 0, opusMagic.length); + byte[] headerBytes = Arrays.copyOf(opusMagic, opusMagic.length + childAtomBodySize); parent.setPosition(childPosition + Atom.HEADER_SIZE); - parent.readBytes(initializationData, opusMagic.length, childAtomBodySize); + parent.readBytes(headerBytes, opusMagic.length, childAtomBodySize); + initializationData = OpusUtil.buildInitializationData(headerBytes); } else if (childAtomType == Atom.TYPE_dfLa) { int childAtomBodySize = childAtomSize - Atom.FULL_HEADER_SIZE; - initializationData = new byte[4 + childAtomBodySize]; - initializationData[0] = 0x66; // f - initializationData[1] = 0x4C; // L - initializationData[2] = 0x61; // a - initializationData[3] = 0x43; // C + byte[] initializationDataBytes = new byte[4 + childAtomBodySize]; + initializationDataBytes[0] = 0x66; // f + initializationDataBytes[1] = 0x4C; // L + initializationDataBytes[2] = 0x61; // a + initializationDataBytes[3] = 0x43; // C parent.setPosition(childPosition + Atom.FULL_HEADER_SIZE); - parent.readBytes(initializationData, /* offset= */ 4, childAtomBodySize); + parent.readBytes(initializationDataBytes, /* offset= */ 4, childAtomBodySize); + initializationData = ImmutableList.of(initializationDataBytes); } else if (childAtomType == Atom.TYPE_alac) { int childAtomBodySize = childAtomSize - Atom.FULL_HEADER_SIZE; - initializationData = new byte[childAtomBodySize]; + byte[] initializationDataBytes = new byte[childAtomBodySize]; parent.setPosition(childPosition + Atom.FULL_HEADER_SIZE); - parent.readBytes(initializationData, /* offset= */ 0, childAtomBodySize); + parent.readBytes(initializationDataBytes, /* offset= */ 0, childAtomBodySize); // Update sampleRate and channelCount from the AudioSpecificConfig initialization data, // which is more reliable. See https://github.com/google/ExoPlayer/pull/6629. Pair audioSpecificConfig = - CodecSpecificDataUtil.parseAlacAudioSpecificConfig(initializationData); + CodecSpecificDataUtil.parseAlacAudioSpecificConfig(initializationDataBytes); sampleRate = audioSpecificConfig.first; channelCount = audioSpecificConfig.second; + initializationData = ImmutableList.of(initializationDataBytes); } childPosition += childAtomSize; } if (out.format == null && mimeType != null) { - out.format = Format.createAudioSampleFormat(Integer.toString(trackId), mimeType, null, - Format.NO_VALUE, Format.NO_VALUE, channelCount, sampleRate, pcmEncoding, - initializationData == null ? null : Collections.singletonList(initializationData), - drmInitData, 0, language); + Format.Builder formatBuilder = + new Format.Builder() + .setId(trackId) + .setSampleMimeType(mimeType) + .setCodecs(codecs) + .setChannelCount(channelCount) + .setSampleRate(sampleRate) + .setPcmEncoding(pcmEncoding) + .setInitializationData(initializationData) + .setDrmInitData(drmInitData) + .setLanguage(language); + + if (esdsData != null) { + formatBuilder + .setAverageBitrate(Ints.saturatedCast(esdsData.bitrate)) + .setPeakBitrate(Ints.saturatedCast(esdsData.peakBitrate)); + } + + out.format = formatBuilder.build(); } } /** - * Returns the position of the esds box within a parent, or {@link C#POSITION_UNSET} if no esds - * box is found + * Returns the position of the first box with the given {@code boxType} within {@code parent}, or + * {@link C#POSITION_UNSET} if no such box is found. + * + * @param parent The {@link ParsableByteArray} to search. The search will start from the {@link + * ParsableByteArray#getPosition() current position}. + * @param boxType The box type to search for. + * @param parentBoxPosition The position in {@code parent} of the box we are searching. + * @param parentBoxSize The size of the parent box we are searching in bytes. + * @return The position of the first box with the given {@code boxType} within {@code parent}, or + * {@link C#POSITION_UNSET} if no such box is found. */ - private static int findEsdsPosition(ParsableByteArray parent, int position, int size) { + private static int findBoxPosition( + ParsableByteArray parent, int boxType, int parentBoxPosition, int parentBoxSize) + throws ParserException { int childAtomPosition = parent.getPosition(); - while (childAtomPosition - position < size) { + ExtractorUtil.checkContainerInput(childAtomPosition >= parentBoxPosition, /* message= */ null); + while (childAtomPosition - parentBoxPosition < parentBoxSize) { parent.setPosition(childAtomPosition); int childAtomSize = parent.readInt(); - Assertions.checkArgument(childAtomSize > 0, "childAtomSize should be positive"); + ExtractorUtil.checkContainerInput(childAtomSize > 0, "childAtomSize must be positive"); int childType = parent.readInt(); - if (childType == Atom.TYPE_esds) { + if (childType == boxType) { return childAtomPosition; } childAtomPosition += childAtomSize; @@ -1222,12 +1657,10 @@ private static int findEsdsPosition(ParsableByteArray parent, int position, int return C.POSITION_UNSET; } - /** - * Returns codec-specific initialization data contained in an esds box. - */ - private static Pair parseEsdsFromParent(ParsableByteArray parent, int position) { + /** Returns codec-specific initialization data contained in an esds box. */ + private static EsdsData parseEsdsFromParent(ParsableByteArray parent, int position) { parent.setPosition(position + Atom.HEADER_SIZE + 4); - // Start of the ES_Descriptor (defined in 14496-1) + // Start of the ES_Descriptor (defined in ISO/IEC 14496-1) parent.skipBytes(1); // ES_Descriptor tag parseExpandableClassSize(parent); parent.skipBytes(2); // ES_ID @@ -1237,33 +1670,45 @@ private static Pair parseEsdsFromParent(ParsableByteArray parent parent.skipBytes(2); } if ((flags & 0x40 /* URL_Flag */) != 0) { - parent.skipBytes(parent.readUnsignedShort()); + parent.skipBytes(parent.readUnsignedByte()); } if ((flags & 0x20 /* OCRstreamFlag */) != 0) { parent.skipBytes(2); } - // Start of the DecoderConfigDescriptor (defined in 14496-1) + // Start of the DecoderConfigDescriptor (defined in ISO/IEC 14496-1) parent.skipBytes(1); // DecoderConfigDescriptor tag parseExpandableClassSize(parent); - // Set the MIME type based on the object type indication (14496-1 table 5). + // Set the MIME type based on the object type indication (ISO/IEC 14496-1 table 5). int objectTypeIndication = parent.readUnsignedByte(); - String mimeType = getMimeTypeFromMp4ObjectType(objectTypeIndication); + @Nullable String mimeType = getMimeTypeFromMp4ObjectType(objectTypeIndication); if (MimeTypes.AUDIO_MPEG.equals(mimeType) || MimeTypes.AUDIO_DTS.equals(mimeType) || MimeTypes.AUDIO_DTS_HD.equals(mimeType)) { - return Pair.create(mimeType, null); + return new EsdsData( + mimeType, + /* initializationData= */ null, + /* bitrate= */ Format.NO_VALUE, + /* peakBitrate= */ Format.NO_VALUE); } - parent.skipBytes(12); + parent.skipBytes(4); + long peakBitrate = parent.readUnsignedInt(); + long bitrate = parent.readUnsignedInt(); // Start of the DecoderSpecificInfo. parent.skipBytes(1); // DecoderSpecificInfo tag int initializationDataSize = parseExpandableClassSize(parent); byte[] initializationData = new byte[initializationDataSize]; parent.readBytes(initializationData, 0, initializationDataSize); - return Pair.create(mimeType, initializationData); + + // Skipping zero values as unknown. + return new EsdsData( + mimeType, + /* initializationData= */ initializationData, + /* bitrate= */ bitrate > 0 ? bitrate : Format.NO_VALUE, + /* peakBitrate= */ peakBitrate > 0 ? peakBitrate : Format.NO_VALUE); } /** @@ -1271,17 +1716,19 @@ private static Pair parseEsdsFromParent(ParsableByteArray parent * unencrypted atom type and a {@link TrackEncryptionBox}. Null is returned if no common * encryption sinf atom was present. */ + @Nullable private static Pair parseSampleEntryEncryptionData( - ParsableByteArray parent, int position, int size) { + ParsableByteArray parent, int position, int size) throws ParserException { int childPosition = parent.getPosition(); while (childPosition - position < size) { parent.setPosition(childPosition); int childAtomSize = parent.readInt(); - Assertions.checkArgument(childAtomSize > 0, "childAtomSize should be positive"); + ExtractorUtil.checkContainerInput(childAtomSize > 0, "childAtomSize must be positive"); int childAtomType = parent.readInt(); if (childAtomType == Atom.TYPE_sinf) { - Pair result = parseCommonEncryptionSinfFromParent(parent, - childPosition, childAtomSize); + @Nullable + Pair result = + parseCommonEncryptionSinfFromParent(parent, childPosition, childAtomSize); if (result != null) { return result; } @@ -1291,13 +1738,14 @@ private static Pair parseSampleEntryEncryptionData( return null; } + @Nullable /* package */ static Pair parseCommonEncryptionSinfFromParent( - ParsableByteArray parent, int position, int size) { + ParsableByteArray parent, int position, int size) throws ParserException { int childPosition = position + Atom.HEADER_SIZE; int schemeInformationBoxPosition = C.POSITION_UNSET; int schemeInformationBoxSize = 0; - String schemeType = null; - Integer dataFormat = null; + @Nullable String schemeType = null; + @Nullable Integer dataFormat = null; while (childPosition - position < size) { parent.setPosition(childPosition); int childAtomSize = parent.readInt(); @@ -1315,22 +1763,27 @@ private static Pair parseSampleEntryEncryptionData( childPosition += childAtomSize; } - if (C.CENC_TYPE_cenc.equals(schemeType) || C.CENC_TYPE_cbc1.equals(schemeType) - || C.CENC_TYPE_cens.equals(schemeType) || C.CENC_TYPE_cbcs.equals(schemeType)) { - Assertions.checkArgument(dataFormat != null, "frma atom is mandatory"); - Assertions.checkArgument(schemeInformationBoxPosition != C.POSITION_UNSET, - "schi atom is mandatory"); - TrackEncryptionBox encryptionBox = parseSchiFromParent(parent, schemeInformationBoxPosition, - schemeInformationBoxSize, schemeType); - Assertions.checkArgument(encryptionBox != null, "tenc atom is mandatory"); - return Pair.create(dataFormat, encryptionBox); + if (C.CENC_TYPE_cenc.equals(schemeType) + || C.CENC_TYPE_cbc1.equals(schemeType) + || C.CENC_TYPE_cens.equals(schemeType) + || C.CENC_TYPE_cbcs.equals(schemeType)) { + ExtractorUtil.checkContainerInput(dataFormat != null, "frma atom is mandatory"); + ExtractorUtil.checkContainerInput( + schemeInformationBoxPosition != C.POSITION_UNSET, "schi atom is mandatory"); + @Nullable + TrackEncryptionBox encryptionBox = + parseSchiFromParent( + parent, schemeInformationBoxPosition, schemeInformationBoxSize, schemeType); + ExtractorUtil.checkContainerInput(encryptionBox != null, "tenc atom is mandatory"); + return Pair.create(dataFormat, castNonNull(encryptionBox)); } else { return null; } } - private static TrackEncryptionBox parseSchiFromParent(ParsableByteArray parent, int position, - int size, String schemeType) { + @Nullable + private static TrackEncryptionBox parseSchiFromParent( + ParsableByteArray parent, int position, int size, String schemeType) { int childPosition = position + Atom.HEADER_SIZE; while (childPosition - position < size) { parent.setPosition(childPosition); @@ -1359,17 +1812,22 @@ private static TrackEncryptionBox parseSchiFromParent(ParsableByteArray parent, constantIv = new byte[constantIvSize]; parent.readBytes(constantIv, 0, constantIvSize); } - return new TrackEncryptionBox(defaultIsProtected, schemeType, defaultPerSampleIvSize, - defaultKeyId, defaultCryptByteBlock, defaultSkipByteBlock, constantIv); + return new TrackEncryptionBox( + defaultIsProtected, + schemeType, + defaultPerSampleIvSize, + defaultKeyId, + defaultCryptByteBlock, + defaultSkipByteBlock, + constantIv); } childPosition += childAtomSize; } return null; } - /** - * Parses the proj box from sv3d box, as specified by https://github.com/google/spatial-media. - */ + /** Parses the proj box from sv3d box, as specified by https://github.com/google/spatial-media. */ + @Nullable private static byte[] parseProjFromParent(ParsableByteArray parent, int position, int size) { int childPosition = position + Atom.HEADER_SIZE; while (childPosition - position < size) { @@ -1377,16 +1835,14 @@ private static byte[] parseProjFromParent(ParsableByteArray parent, int position int childAtomSize = parent.readInt(); int childAtomType = parent.readInt(); if (childAtomType == Atom.TYPE_proj) { - return Arrays.copyOfRange(parent.data, childPosition, childPosition + childAtomSize); + return Arrays.copyOfRange(parent.getData(), childPosition, childPosition + childAtomSize); } childPosition += childAtomSize; } return null; } - /** - * Parses the size of an expandable class, as specified by ISO 14496-1 subsection 8.3.3. - */ + /** Parses the size of an expandable class, as specified by ISO/IEC 14496-1 subsection 8.3.3. */ private static int parseExpandableClassSize(ParsableByteArray data) { int currentByte = data.readUnsignedByte(); int size = currentByte & 0x7F; @@ -1429,8 +1885,9 @@ private static final class ChunkIterator { private int nextSamplesPerChunkChangeIndex; private int remainingSamplesPerChunkChanges; - public ChunkIterator(ParsableByteArray stsc, ParsableByteArray chunkOffsets, - boolean chunkOffsetsAreLongs) { + public ChunkIterator( + ParsableByteArray stsc, ParsableByteArray chunkOffsets, boolean chunkOffsetsAreLongs) + throws ParserException { this.stsc = stsc; this.chunkOffsets = chunkOffsets; this.chunkOffsetsAreLongs = chunkOffsetsAreLongs; @@ -1438,7 +1895,7 @@ public ChunkIterator(ParsableByteArray stsc, ParsableByteArray chunkOffsets, length = chunkOffsets.readUnsignedIntToInt(); stsc.setPosition(Atom.FULL_HEADER_SIZE); remainingSamplesPerChunkChanges = stsc.readUnsignedIntToInt(); - Assertions.checkState(stsc.readInt() == 1, "first_chunk must be 1"); + ExtractorUtil.checkContainerInput(stsc.readInt() == 1, "first_chunk must be 1"); index = -1; } @@ -1446,22 +1903,23 @@ public boolean moveNext() { if (++index == length) { return false; } - offset = chunkOffsetsAreLongs ? chunkOffsets.readUnsignedLongToLong() - : chunkOffsets.readUnsignedInt(); + offset = + chunkOffsetsAreLongs + ? chunkOffsets.readUnsignedLongToLong() + : chunkOffsets.readUnsignedInt(); if (index == nextSamplesPerChunkChangeIndex) { numSamples = stsc.readUnsignedIntToInt(); stsc.skipBytes(4); // Skip sample_description_index - nextSamplesPerChunkChangeIndex = --remainingSamplesPerChunkChanges > 0 - ? (stsc.readUnsignedIntToInt() - 1) : C.INDEX_UNSET; + nextSamplesPerChunkChangeIndex = + --remainingSamplesPerChunkChanges > 0 + ? (stsc.readUnsignedIntToInt() - 1) + : C.INDEX_UNSET; } return true; } - } - /** - * Holds data parsed from a tkhd atom. - */ + /** Holds data parsed from a tkhd atom. */ private static final class TkhdData { private final int id; @@ -1473,65 +1931,84 @@ public TkhdData(int id, long duration, int rotationDegrees) { this.duration = duration; this.rotationDegrees = rotationDegrees; } - } - /** - * Holds data parsed from an stsd atom and its children. - */ + /** Holds data parsed from an stsd atom and its children. */ private static final class StsdData { public static final int STSD_HEADER_SIZE = 8; public final TrackEncryptionBox[] trackEncryptionBoxes; - public Format format; + @Nullable public Format format; public int nalUnitLengthFieldLength; - @Track.Transformation - public int requiredSampleTransformation; + public @Track.Transformation int requiredSampleTransformation; public StsdData(int numberOfEntries) { trackEncryptionBoxes = new TrackEncryptionBox[numberOfEntries]; requiredSampleTransformation = Track.TRANSFORMATION_NONE; } + } + /** Data parsed from an esds box. */ + private static final class EsdsData { + private final @NullableType String mimeType; + private final byte @NullableType [] initializationData; + private final long bitrate; + private final long peakBitrate; + + public EsdsData( + @NullableType String mimeType, + byte @NullableType [] initializationData, + long bitrate, + long peakBitrate) { + this.mimeType = mimeType; + this.initializationData = initializationData; + this.bitrate = bitrate; + this.peakBitrate = peakBitrate; + } } - /** - * A box containing sample sizes (e.g. stsz, stz2). - */ + /** A box containing sample sizes (e.g. stsz, stz2). */ private interface SampleSizeBox { - /** - * Returns the number of samples. - */ + /** Returns the number of samples. */ int getSampleCount(); - /** - * Returns the size for the next sample. - */ - int readNextSampleSize(); - - /** - * Returns whether samples have a fixed size. - */ - boolean isFixedSampleSize(); + /** Returns the size of each sample if fixed, or {@link C#LENGTH_UNSET} otherwise. */ + int getFixedSampleSize(); + /** Returns the size for the next sample. */ + int readNextSampleSize(); } - /** - * An stsz sample size box. - */ + /** An stsz sample size box. */ /* package */ static final class StszSampleSizeBox implements SampleSizeBox { private final int fixedSampleSize; private final int sampleCount; private final ParsableByteArray data; - public StszSampleSizeBox(Atom.LeafAtom stszAtom) { + public StszSampleSizeBox(Atom.LeafAtom stszAtom, Format trackFormat) { data = stszAtom.data; data.setPosition(Atom.FULL_HEADER_SIZE); - fixedSampleSize = data.readUnsignedIntToInt(); + int fixedSampleSize = data.readUnsignedIntToInt(); + if (MimeTypes.AUDIO_RAW.equals(trackFormat.sampleMimeType)) { + int pcmFrameSize = Util.getPcmFrameSize(trackFormat.pcmEncoding, trackFormat.channelCount); + if (fixedSampleSize == 0 || fixedSampleSize % pcmFrameSize != 0) { + // The sample size from the stsz box is inconsistent with the PCM encoding and channel + // count derived from the stsd box. Choose stsd box as source of truth + // [Internal ref: b/171627904]. + Log.w( + TAG, + "Audio sample size mismatch. stsd sample size: " + + pcmFrameSize + + ", stsz sample size: " + + fixedSampleSize); + fixedSampleSize = pcmFrameSize; + } + } + this.fixedSampleSize = fixedSampleSize == 0 ? C.LENGTH_UNSET : fixedSampleSize; sampleCount = data.readUnsignedIntToInt(); } @@ -1541,20 +2018,17 @@ public int getSampleCount() { } @Override - public int readNextSampleSize() { - return fixedSampleSize == 0 ? data.readUnsignedIntToInt() : fixedSampleSize; + public int getFixedSampleSize() { + return fixedSampleSize; } @Override - public boolean isFixedSampleSize() { - return fixedSampleSize != 0; + public int readNextSampleSize() { + return fixedSampleSize == C.LENGTH_UNSET ? data.readUnsignedIntToInt() : fixedSampleSize; } - } - /** - * An stz2 sample size box. - */ + /** An stz2 sample size box. */ /* package */ static final class Stz2SampleSizeBox implements SampleSizeBox { private final ParsableByteArray data; @@ -1577,6 +2051,11 @@ public int getSampleCount() { return sampleCount; } + @Override + public int getFixedSampleSize() { + return C.LENGTH_UNSET; + } + @Override public int readNextSampleSize() { if (fieldSize == 8) { @@ -1596,12 +2075,5 @@ public int readNextSampleSize() { } } } - - @Override - public boolean isFixedSampleSize() { - return false; - } - } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/DefaultSampleValues.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/DefaultSampleValues.java index 1ec0237356..f1f6f57774 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/DefaultSampleValues.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/DefaultSampleValues.java @@ -28,5 +28,4 @@ public DefaultSampleValues(int sampleDescriptionIndex, int duration, int size, i this.size = size; this.flags = flags; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/FixedSampleSizeRechunker.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/FixedSampleSizeRechunker.java index 536f70048c..ae16c8107f 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/FixedSampleSizeRechunker.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/FixedSampleSizeRechunker.java @@ -15,6 +15,9 @@ */ package com.google.android.exoplayer2.extractor.mp4; +import static java.lang.Math.max; +import static java.lang.Math.min; + import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.util.Util; @@ -23,9 +26,7 @@ */ /* package */ final class FixedSampleSizeRechunker { - /** - * The result of a rechunking operation. - */ + /** The result of a rechunking operation. */ public static final class Results { public final long[] offsets; @@ -49,12 +50,9 @@ private Results( this.flags = flags; this.duration = duration; } - } - /** - * Maximum number of bytes for each buffer in rechunked output. - */ + /** Maximum number of bytes for each buffer in rechunked output. */ private static final int MAX_SAMPLE_SIZE = 8 * 1024; /** @@ -65,7 +63,10 @@ private Results( * @param chunkSampleCounts Sample counts for each of the MP4 stream's chunks. * @param timestampDeltaInTimeUnits Timestamp delta between each sample in time units. */ - public static Results rechunk(int fixedSampleSize, long[] chunkOffsets, int[] chunkSampleCounts, + public static Results rechunk( + int fixedSampleSize, + long[] chunkOffsets, + int[] chunkSampleCounts, long timestampDeltaInTimeUnits) { int maxSampleCount = MAX_SAMPLE_SIZE / fixedSampleSize; @@ -88,11 +89,11 @@ public static Results rechunk(int fixedSampleSize, long[] chunkOffsets, int[] ch long sampleOffset = chunkOffsets[chunkIndex]; while (chunkSamplesRemaining > 0) { - int bufferSampleCount = Math.min(maxSampleCount, chunkSamplesRemaining); + int bufferSampleCount = min(maxSampleCount, chunkSamplesRemaining); offsets[newSampleIndex] = sampleOffset; sizes[newSampleIndex] = fixedSampleSize * bufferSampleCount; - maximumSize = Math.max(maximumSize, sizes[newSampleIndex]); + maximumSize = max(maximumSize, sizes[newSampleIndex]); timestamps[newSampleIndex] = (timestampDeltaInTimeUnits * originalSampleIndex); flags[newSampleIndex] = C.BUFFER_FLAG_KEY_FRAME; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/FragmentedMp4Extractor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/FragmentedMp4Extractor.java index c0d1581c39..c918fc5790 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/FragmentedMp4Extractor.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/FragmentedMp4Extractor.java @@ -15,6 +15,14 @@ */ package com.google.android.exoplayer2.extractor.mp4; +import static com.google.android.exoplayer2.extractor.mp4.AtomParsers.parseTraks; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Assertions.checkState; +import static com.google.android.exoplayer2.util.Util.castNonNull; +import static com.google.android.exoplayer2.util.Util.nullSafeArrayCopy; +import static java.lang.Math.max; +import static java.lang.annotation.ElementType.TYPE_USE; + import android.util.Pair; import android.util.SparseArray; import androidx.annotation.IntDef; @@ -25,11 +33,13 @@ import com.google.android.exoplayer2.audio.Ac4Util; import com.google.android.exoplayer2.drm.DrmInitData; import com.google.android.exoplayer2.drm.DrmInitData.SchemeData; +import com.google.android.exoplayer2.extractor.CeaUtil; import com.google.android.exoplayer2.extractor.ChunkIndex; import com.google.android.exoplayer2.extractor.Extractor; import com.google.android.exoplayer2.extractor.ExtractorInput; import com.google.android.exoplayer2.extractor.ExtractorOutput; import com.google.android.exoplayer2.extractor.ExtractorsFactory; +import com.google.android.exoplayer2.extractor.GaplessInfoHolder; import com.google.android.exoplayer2.extractor.PositionHolder; import com.google.android.exoplayer2.extractor.SeekMap; import com.google.android.exoplayer2.extractor.TrackOutput; @@ -37,8 +47,6 @@ import com.google.android.exoplayer2.extractor.mp4.Atom.LeafAtom; import com.google.android.exoplayer2.metadata.emsg.EventMessage; import com.google.android.exoplayer2.metadata.emsg.EventMessageEncoder; -import com.google.android.exoplayer2.text.cea.CeaUtil; -import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.NalUnitUtil; @@ -49,6 +57,7 @@ import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Arrays; @@ -67,18 +76,17 @@ public class FragmentedMp4Extractor implements Extractor { /** * Flags controlling the behavior of the extractor. Possible flag values are {@link * #FLAG_WORKAROUND_EVERY_VIDEO_FRAME_IS_SYNC_FRAME}, {@link #FLAG_WORKAROUND_IGNORE_TFDT_BOX}, - * {@link #FLAG_ENABLE_EMSG_TRACK}, {@link #FLAG_SIDELOADED} and {@link - * #FLAG_WORKAROUND_IGNORE_EDIT_LISTS}. + * {@link #FLAG_ENABLE_EMSG_TRACK} and {@link #FLAG_WORKAROUND_IGNORE_EDIT_LISTS}. */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef( flag = true, value = { FLAG_WORKAROUND_EVERY_VIDEO_FRAME_IS_SYNC_FRAME, FLAG_WORKAROUND_IGNORE_TFDT_BOX, FLAG_ENABLE_EMSG_TRACK, - FLAG_SIDELOADED, FLAG_WORKAROUND_IGNORE_EDIT_LISTS }) public @interface Flags {} @@ -86,8 +94,8 @@ public class FragmentedMp4Extractor implements Extractor { * Flag to work around an issue in some video streams where every frame is marked as a sync frame. * The workaround overrides the sync frame flags in the stream, forcing them to false except for * the first sample in each segment. - *

      - * This flag does nothing if the stream is not a video stream. + * + *

      This flag does nothing if the stream is not a video stream. */ public static final int FLAG_WORKAROUND_EVERY_VIDEO_FRAME_IS_SYNC_FRAME = 1; /** Flag to ignore any tfdt boxes in the stream. */ @@ -97,11 +105,7 @@ public class FragmentedMp4Extractor implements Extractor { * messages in the stream will be delivered as samples to this track. */ public static final int FLAG_ENABLE_EMSG_TRACK = 1 << 2; // 4 - /** - * Flag to indicate that the {@link Track} was sideloaded, instead of being declared by the MP4 - * container. - */ - private static final int FLAG_SIDELOADED = 1 << 3; // 8 + /** Flag to ignore any edit lists in the stream. */ public static final int FLAG_WORKAROUND_IGNORE_EDIT_LISTS = 1 << 4; // 16 @@ -112,8 +116,11 @@ public class FragmentedMp4Extractor implements Extractor { private static final byte[] PIFF_SAMPLE_ENCRYPTION_BOX_EXTENDED_TYPE = new byte[] {-94, 57, 79, 82, 90, -101, 79, 20, -94, 68, 108, 66, 124, 100, -115, -12}; + + // Extra tracks constants. private static final Format EMSG_FORMAT = - Format.createSampleFormat(null, MimeTypes.APPLICATION_EMSG, Format.OFFSET_SAMPLE_RELATIVE); + new Format.Builder().setSampleMimeType(MimeTypes.APPLICATION_EMSG).build(); + private static final int EXTRA_TRACKS_BASE_ID = 100; // Parser states. private static final int STATE_READING_ATOM_HEADER = 0; @@ -123,7 +130,7 @@ public class FragmentedMp4Extractor implements Extractor { private static final int STATE_READING_SAMPLE_CONTINUE = 4; // Workarounds. - @Flags private final int flags; + private final @Flags int flags; @Nullable private final Track sideloadedTrack; // Sideloaded data. @@ -154,23 +161,23 @@ public class FragmentedMp4Extractor implements Extractor { private int atomType; private long atomSize; private int atomHeaderBytesRead; - private ParsableByteArray atomData; + @Nullable private ParsableByteArray atomData; private long endOfMdatPosition; private int pendingMetadataSampleBytes; private long pendingSeekTimeUs; private long durationUs; private long segmentIndexEarliestPresentationTimeUs; - private TrackBundle currentTrackBundle; + @Nullable private TrackBundle currentTrackBundle; private int sampleSize; private int sampleBytesWritten; private int sampleCurrentNalBytesRemaining; private boolean processSeiNalUnitPayload; - // Extractor output. + // Outputs. private ExtractorOutput extractorOutput; private TrackOutput[] emsgTrackOutputs; - private TrackOutput[] cea608TrackOutputs; + private TrackOutput[] ceaTrackOutputs; // Whether extractorOutput.seekMap has been called. private boolean haveOutputSeekMap; @@ -245,7 +252,7 @@ public FragmentedMp4Extractor( @Nullable Track sideloadedTrack, List closedCaptionFormats, @Nullable TrackOutput additionalEmsgTrackOutput) { - this.flags = flags | (sideloadedTrack != null ? FLAG_SIDELOADED : 0); + this.flags = flags; this.timestampAdjuster = timestampAdjuster; this.sideloadedTrack = sideloadedTrack; this.closedCaptionFormats = Collections.unmodifiableList(closedCaptionFormats); @@ -263,22 +270,39 @@ public FragmentedMp4Extractor( durationUs = C.TIME_UNSET; pendingSeekTimeUs = C.TIME_UNSET; segmentIndexEarliestPresentationTimeUs = C.TIME_UNSET; - enterReadingAtomHeaderState(); + extractorOutput = ExtractorOutput.PLACEHOLDER; + emsgTrackOutputs = new TrackOutput[0]; + ceaTrackOutputs = new TrackOutput[0]; } @Override - public boolean sniff(ExtractorInput input) throws IOException, InterruptedException { + public boolean sniff(ExtractorInput input) throws IOException { return Sniffer.sniffFragmented(input); } @Override public void init(ExtractorOutput output) { extractorOutput = output; + enterReadingAtomHeaderState(); + initExtraTracks(); if (sideloadedTrack != null) { - TrackBundle bundle = new TrackBundle(output.track(0, sideloadedTrack.type)); - bundle.init(sideloadedTrack, new DefaultSampleValues(0, 0, 0, 0)); + TrackBundle bundle = + new TrackBundle( + output.track(0, sideloadedTrack.type), + new TrackSampleTable( + sideloadedTrack, + /* offsets= */ new long[0], + /* sizes= */ new int[0], + /* maximumSize= */ 0, + /* timestampsUs= */ new long[0], + /* flags= */ new int[0], + /* durationUs= */ 0), + new DefaultSampleValues( + /* sampleDescriptionIndex= */ 0, + /* duration= */ 0, + /* size= */ 0, + /* flags= */ 0)); trackBundles.put(0, bundle); - maybeInitExtraTracks(); extractorOutput.endTracks(); } } @@ -287,7 +311,7 @@ public void init(ExtractorOutput output) { public void seek(long position, long timeUs) { int trackCount = trackBundles.size(); for (int i = 0; i < trackCount; i++) { - trackBundles.valueAt(i).reset(); + trackBundles.valueAt(i).resetFragmentInfo(); } pendingMetadataSampleInfos.clear(); pendingMetadataSampleBytes = 0; @@ -302,8 +326,7 @@ public void release() { } @Override - public int read(ExtractorInput input, PositionHolder seekPosition) - throws IOException, InterruptedException { + public int read(ExtractorInput input, PositionHolder seekPosition) throws IOException { while (true) { switch (parserState) { case STATE_READING_ATOM_HEADER: @@ -330,10 +353,10 @@ private void enterReadingAtomHeaderState() { atomHeaderBytesRead = 0; } - private boolean readAtomHeader(ExtractorInput input) throws IOException, InterruptedException { + private boolean readAtomHeader(ExtractorInput input) throws IOException { if (atomHeaderBytesRead == 0) { // Read the standard length atom header. - if (!input.readFully(atomHeader.data, 0, Atom.HEADER_SIZE, true)) { + if (!input.readFully(atomHeader.getData(), 0, Atom.HEADER_SIZE, true)) { return false; } atomHeaderBytesRead = Atom.HEADER_SIZE; @@ -345,7 +368,7 @@ private boolean readAtomHeader(ExtractorInput input) throws IOException, Interru if (atomSize == Atom.DEFINES_LARGE_SIZE) { // Read the large size. int headerBytesRemaining = Atom.LONG_HEADER_SIZE - Atom.HEADER_SIZE; - input.readFully(atomHeader.data, Atom.HEADER_SIZE, headerBytesRemaining); + input.readFully(atomHeader.getData(), Atom.HEADER_SIZE, headerBytesRemaining); atomHeaderBytesRead += headerBytesRemaining; atomSize = atomHeader.readUnsignedLongToLong(); } else if (atomSize == Atom.EXTENDS_TO_END_SIZE) { @@ -361,10 +384,19 @@ private boolean readAtomHeader(ExtractorInput input) throws IOException, Interru } if (atomSize < atomHeaderBytesRead) { - throw new ParserException("Atom size less than header length (unsupported)."); + throw ParserException.createForUnsupportedContainerFeature( + "Atom size less than header length (unsupported)."); } long atomPosition = input.getPosition() - atomHeaderBytesRead; + if (atomType == Atom.TYPE_moof || atomType == Atom.TYPE_mdat) { + if (!haveOutputSeekMap) { + // This must be the first moof or mdat in the stream. + extractorOutput.seekMap(new SeekMap.Unseekable(durationUs, atomPosition)); + haveOutputSeekMap = true; + } + } + if (atomType == Atom.TYPE_moof) { // The data positions may be updated when parsing the tfhd/trun. int trackCount = trackBundles.size(); @@ -379,11 +411,6 @@ private boolean readAtomHeader(ExtractorInput input) throws IOException, Interru if (atomType == Atom.TYPE_mdat) { currentTrackBundle = null; endOfMdatPosition = atomPosition + atomSize; - if (!haveOutputSeekMap) { - // This must be the first mdat in the stream. - extractorOutput.seekMap(new SeekMap.Unseekable(durationUs, atomPosition)); - haveOutputSeekMap = true; - } parserState = STATE_READING_ENCRYPTION_DATA; return true; } @@ -399,17 +426,21 @@ private boolean readAtomHeader(ExtractorInput input) throws IOException, Interru } } else if (shouldParseLeafAtom(atomType)) { if (atomHeaderBytesRead != Atom.HEADER_SIZE) { - throw new ParserException("Leaf atom defines extended atom size (unsupported)."); + throw ParserException.createForUnsupportedContainerFeature( + "Leaf atom defines extended atom size (unsupported)."); } if (atomSize > Integer.MAX_VALUE) { - throw new ParserException("Leaf atom with length > 2147483647 (unsupported)."); + throw ParserException.createForUnsupportedContainerFeature( + "Leaf atom with length > 2147483647 (unsupported)."); } - atomData = new ParsableByteArray((int) atomSize); - System.arraycopy(atomHeader.data, 0, atomData.data, 0, Atom.HEADER_SIZE); + ParsableByteArray atomData = new ParsableByteArray((int) atomSize); + System.arraycopy(atomHeader.getData(), 0, atomData.getData(), 0, Atom.HEADER_SIZE); + this.atomData = atomData; parserState = STATE_READING_ATOM_PAYLOAD; } else { if (atomSize > Integer.MAX_VALUE) { - throw new ParserException("Skipping atom with length > 2147483647 (unsupported)."); + throw ParserException.createForUnsupportedContainerFeature( + "Skipping atom with length > 2147483647 (unsupported)."); } atomData = null; parserState = STATE_READING_ATOM_PAYLOAD; @@ -418,10 +449,11 @@ private boolean readAtomHeader(ExtractorInput input) throws IOException, Interru return true; } - private void readAtomPayload(ExtractorInput input) throws IOException, InterruptedException { + private void readAtomPayload(ExtractorInput input) throws IOException { int atomPayloadSize = (int) atomSize - atomHeaderBytesRead; + @Nullable ParsableByteArray atomData = this.atomData; if (atomData != null) { - input.readFully(atomData.data, Atom.HEADER_SIZE, atomPayloadSize); + input.readFully(atomData.getData(), Atom.HEADER_SIZE, atomPayloadSize); onLeafAtomRead(new LeafAtom(atomType, atomData), input.getPosition()); } else { input.skipFully(atomPayloadSize); @@ -460,12 +492,12 @@ private void onContainerAtomRead(ContainerAtom container) throws ParserException } private void onMoovContainerAtomRead(ContainerAtom moov) throws ParserException { - Assertions.checkState(sideloadedTrack == null, "Unexpected moov box."); + checkState(sideloadedTrack == null, "Unexpected moov box."); @Nullable DrmInitData drmInitData = getDrmInitDataFromAtoms(moov.leafChildren); - // Read declaration of track fragments in the Moov box. - ContainerAtom mvex = moov.getContainerAtomOfType(Atom.TYPE_mvex); + // Read declaration of track fragments in the moov box. + ContainerAtom mvex = checkNotNull(moov.getContainerAtomOfType(Atom.TYPE_mvex)); SparseArray defaultSampleValuesArray = new SparseArray<>(); long duration = C.TIME_UNSET; int mvexChildrenSize = mvex.leafChildren.size(); @@ -479,46 +511,40 @@ private void onMoovContainerAtomRead(ContainerAtom moov) throws ParserException } } - // Construction of tracks. - SparseArray tracks = new SparseArray<>(); - int moovContainerChildrenSize = moov.containerChildren.size(); - for (int i = 0; i < moovContainerChildrenSize; i++) { - Atom.ContainerAtom atom = moov.containerChildren.get(i); - if (atom.type == Atom.TYPE_trak) { - Track track = - modifyTrack( - AtomParsers.parseTrak( - atom, - moov.getLeafAtomOfType(Atom.TYPE_mvhd), - duration, - drmInitData, - (flags & FLAG_WORKAROUND_IGNORE_EDIT_LISTS) != 0, - false)); - if (track != null) { - tracks.put(track.id, track); - } - } - } + // Construction of tracks and sample tables. + List sampleTables = + parseTraks( + moov, + new GaplessInfoHolder(), + duration, + drmInitData, + /* ignoreEditLists= */ (flags & FLAG_WORKAROUND_IGNORE_EDIT_LISTS) != 0, + /* isQuickTime= */ false, + this::modifyTrack); - int trackCount = tracks.size(); + int trackCount = sampleTables.size(); if (trackBundles.size() == 0) { // We need to create the track bundles. for (int i = 0; i < trackCount; i++) { - Track track = tracks.valueAt(i); - TrackBundle trackBundle = new TrackBundle(extractorOutput.track(i, track.type)); - trackBundle.init(track, getDefaultSampleValues(defaultSampleValuesArray, track.id)); + TrackSampleTable sampleTable = sampleTables.get(i); + Track track = sampleTable.track; + TrackBundle trackBundle = + new TrackBundle( + extractorOutput.track(i, track.type), + sampleTable, + getDefaultSampleValues(defaultSampleValuesArray, track.id)); trackBundles.put(track.id, trackBundle); - durationUs = Math.max(durationUs, track.durationUs); + durationUs = max(durationUs, track.durationUs); } - maybeInitExtraTracks(); extractorOutput.endTracks(); } else { - Assertions.checkState(trackBundles.size() == trackCount); + checkState(trackBundles.size() == trackCount); for (int i = 0; i < trackCount; i++) { - Track track = tracks.valueAt(i); + TrackSampleTable sampleTable = sampleTables.get(i); + Track track = sampleTable.track; trackBundles .get(track.id) - .init(track, getDefaultSampleValues(defaultSampleValuesArray, track.id)); + .reset(sampleTable, getDefaultSampleValues(defaultSampleValuesArray, track.id)); } } } @@ -535,11 +561,11 @@ private DefaultSampleValues getDefaultSampleValues( // See https://github.com/google/ExoPlayer/issues/4477. return defaultSampleValuesArray.valueAt(/* index= */ 0); } - return Assertions.checkNotNull(defaultSampleValuesArray.get(trackId)); + return checkNotNull(defaultSampleValuesArray.get(trackId)); } private void onMoofContainerAtomRead(ContainerAtom moof) throws ParserException { - parseMoof(moof, trackBundles, flags, scratchBytes); + parseMoof(moof, trackBundles, sideloadedTrack != null, flags, scratchBytes); @Nullable DrmInitData drmInitData = getDrmInitDataFromAtoms(moof.leafChildren); if (drmInitData != null) { @@ -558,36 +584,34 @@ private void onMoofContainerAtomRead(ContainerAtom moof) throws ParserException } } - private void maybeInitExtraTracks() { - if (emsgTrackOutputs == null) { - emsgTrackOutputs = new TrackOutput[2]; - int emsgTrackOutputCount = 0; - if (additionalEmsgTrackOutput != null) { - emsgTrackOutputs[emsgTrackOutputCount++] = additionalEmsgTrackOutput; - } - if ((flags & FLAG_ENABLE_EMSG_TRACK) != 0) { - emsgTrackOutputs[emsgTrackOutputCount++] = - extractorOutput.track(trackBundles.size(), C.TRACK_TYPE_METADATA); - } - emsgTrackOutputs = Arrays.copyOf(emsgTrackOutputs, emsgTrackOutputCount); + private void initExtraTracks() { + int nextExtraTrackId = EXTRA_TRACKS_BASE_ID; - for (TrackOutput eventMessageTrackOutput : emsgTrackOutputs) { - eventMessageTrackOutput.format(EMSG_FORMAT); - } + emsgTrackOutputs = new TrackOutput[2]; + int emsgTrackOutputCount = 0; + if (additionalEmsgTrackOutput != null) { + emsgTrackOutputs[emsgTrackOutputCount++] = additionalEmsgTrackOutput; } - if (cea608TrackOutputs == null) { - cea608TrackOutputs = new TrackOutput[closedCaptionFormats.size()]; - for (int i = 0; i < cea608TrackOutputs.length; i++) { - TrackOutput output = extractorOutput.track(trackBundles.size() + 1 + i, C.TRACK_TYPE_TEXT); - output.format(closedCaptionFormats.get(i)); - cea608TrackOutputs[i] = output; - } + if ((flags & FLAG_ENABLE_EMSG_TRACK) != 0) { + emsgTrackOutputs[emsgTrackOutputCount++] = + extractorOutput.track(nextExtraTrackId++, C.TRACK_TYPE_METADATA); + } + emsgTrackOutputs = nullSafeArrayCopy(emsgTrackOutputs, emsgTrackOutputCount); + for (TrackOutput eventMessageTrackOutput : emsgTrackOutputs) { + eventMessageTrackOutput.format(EMSG_FORMAT); + } + + ceaTrackOutputs = new TrackOutput[closedCaptionFormats.size()]; + for (int i = 0; i < ceaTrackOutputs.length; i++) { + TrackOutput output = extractorOutput.track(nextExtraTrackId++, C.TRACK_TYPE_TEXT); + output.format(closedCaptionFormats.get(i)); + ceaTrackOutputs[i] = output; } } /** Handles an emsg atom (defined in 23009-1). */ private void onEmsgLeafAtomRead(ParsableByteArray atom) { - if (emsgTrackOutputs == null || emsgTrackOutputs.length == 0) { + if (emsgTrackOutputs.length == 0) { return; } atom.setPosition(Atom.HEADER_SIZE); @@ -602,8 +626,8 @@ private void onEmsgLeafAtomRead(ParsableByteArray atom) { long id; switch (version) { case 0: - schemeIdUri = Assertions.checkNotNull(atom.readNullTerminatedString()); - value = Assertions.checkNotNull(atom.readNullTerminatedString()); + schemeIdUri = checkNotNull(atom.readNullTerminatedString()); + value = checkNotNull(atom.readNullTerminatedString()); timescale = atom.readUnsignedInt(); presentationTimeDeltaUs = Util.scaleLargeTimestamp(atom.readUnsignedInt(), C.MICROS_PER_SECOND, timescale); @@ -621,8 +645,8 @@ private void onEmsgLeafAtomRead(ParsableByteArray atom) { durationMs = Util.scaleLargeTimestamp(atom.readUnsignedInt(), C.MILLIS_PER_SECOND, timescale); id = atom.readUnsignedInt(); - schemeIdUri = Assertions.checkNotNull(atom.readNullTerminatedString()); - value = Assertions.checkNotNull(atom.readNullTerminatedString()); + schemeIdUri = checkNotNull(atom.readNullTerminatedString()); + value = checkNotNull(atom.readNullTerminatedString()); break; default: Log.w(TAG, "Skipping unsupported emsg version: " + version); @@ -642,14 +666,23 @@ private void onEmsgLeafAtomRead(ParsableByteArray atom) { emsgTrackOutput.sampleData(encodedEventMessage, sampleSize); } - // Output the sample metadata. This is made a little complicated because emsg-v0 atoms - // have presentation time *delta* while v1 atoms have absolute presentation time. + // Output the sample metadata. if (sampleTimeUs == C.TIME_UNSET) { - // We need the first sample timestamp in the segment before we can output the metadata. + // We're processing a v0 emsg atom, which contains a presentation time delta, and cannot yet + // calculate its absolute sample timestamp. Defer outputting the metadata until we can. + pendingMetadataSampleInfos.addLast( + new MetadataSampleInfo( + presentationTimeDeltaUs, /* sampleTimeIsRelative= */ true, sampleSize)); + pendingMetadataSampleBytes += sampleSize; + } else if (!pendingMetadataSampleInfos.isEmpty()) { + // We also need to defer outputting metadata if pendingMetadataSampleInfos is non-empty, else + // we will output metadata for samples in the wrong order. See: + // https://github.com/google/ExoPlayer/issues/9996. pendingMetadataSampleInfos.addLast( - new MetadataSampleInfo(presentationTimeDeltaUs, sampleSize)); + new MetadataSampleInfo(sampleTimeUs, /* sampleTimeIsRelative= */ false, sampleSize)); pendingMetadataSampleBytes += sampleSize; } else { + // We can output the sample metadata immediately. if (timestampAdjuster != null) { sampleTimeUs = timestampAdjuster.adjustSampleTimestamp(sampleTimeUs); } @@ -669,13 +702,16 @@ private static Pair parseTrex(ParsableByteArray tr int defaultSampleSize = trex.readInt(); int defaultSampleFlags = trex.readInt(); - return Pair.create(trackId, new DefaultSampleValues(defaultSampleDescriptionIndex, - defaultSampleDuration, defaultSampleSize, defaultSampleFlags)); + return Pair.create( + trackId, + new DefaultSampleValues( + defaultSampleDescriptionIndex, + defaultSampleDuration, + defaultSampleSize, + defaultSampleFlags)); } - /** - * Parses an mehd atom (defined in 14496-12). - */ + /** Parses an mehd atom (defined in 14496-12). */ private static long parseMehd(ParsableByteArray mehd) { mehd.setPosition(Atom.HEADER_SIZE); int fullAtom = mehd.readInt(); @@ -683,64 +719,74 @@ private static long parseMehd(ParsableByteArray mehd) { return version == 0 ? mehd.readUnsignedInt() : mehd.readUnsignedLongToLong(); } - private static void parseMoof(ContainerAtom moof, SparseArray trackBundleArray, - @Flags int flags, byte[] extendedTypeScratch) throws ParserException { + private static void parseMoof( + ContainerAtom moof, + SparseArray trackBundles, + boolean haveSideloadedTrack, + @Flags int flags, + byte[] extendedTypeScratch) + throws ParserException { int moofContainerChildrenSize = moof.containerChildren.size(); for (int i = 0; i < moofContainerChildrenSize; i++) { Atom.ContainerAtom child = moof.containerChildren.get(i); // TODO: Support multiple traf boxes per track in a single moof. if (child.type == Atom.TYPE_traf) { - parseTraf(child, trackBundleArray, flags, extendedTypeScratch); + parseTraf(child, trackBundles, haveSideloadedTrack, flags, extendedTypeScratch); } } } - /** - * Parses a traf atom (defined in 14496-12). - */ - private static void parseTraf(ContainerAtom traf, SparseArray trackBundleArray, - @Flags int flags, byte[] extendedTypeScratch) throws ParserException { - LeafAtom tfhd = traf.getLeafAtomOfType(Atom.TYPE_tfhd); - TrackBundle trackBundle = parseTfhd(tfhd.data, trackBundleArray); + /** Parses a traf atom (defined in 14496-12). */ + private static void parseTraf( + ContainerAtom traf, + SparseArray trackBundles, + boolean haveSideloadedTrack, + @Flags int flags, + byte[] extendedTypeScratch) + throws ParserException { + LeafAtom tfhd = checkNotNull(traf.getLeafAtomOfType(Atom.TYPE_tfhd)); + @Nullable TrackBundle trackBundle = parseTfhd(tfhd.data, trackBundles, haveSideloadedTrack); if (trackBundle == null) { return; } TrackFragment fragment = trackBundle.fragment; - long decodeTime = fragment.nextFragmentDecodeTime; - trackBundle.reset(); - - LeafAtom tfdtAtom = traf.getLeafAtomOfType(Atom.TYPE_tfdt); + long fragmentDecodeTime = fragment.nextFragmentDecodeTime; + boolean fragmentDecodeTimeIncludesMoov = fragment.nextFragmentDecodeTimeIncludesMoov; + trackBundle.resetFragmentInfo(); + trackBundle.currentlyInFragment = true; + @Nullable LeafAtom tfdtAtom = traf.getLeafAtomOfType(Atom.TYPE_tfdt); if (tfdtAtom != null && (flags & FLAG_WORKAROUND_IGNORE_TFDT_BOX) == 0) { - decodeTime = parseTfdt(traf.getLeafAtomOfType(Atom.TYPE_tfdt).data); + fragment.nextFragmentDecodeTime = parseTfdt(tfdtAtom.data); + fragment.nextFragmentDecodeTimeIncludesMoov = true; + } else { + fragment.nextFragmentDecodeTime = fragmentDecodeTime; + fragment.nextFragmentDecodeTimeIncludesMoov = fragmentDecodeTimeIncludesMoov; } - parseTruns(traf, trackBundle, decodeTime, flags); + parseTruns(traf, trackBundle, flags); - TrackEncryptionBox encryptionBox = trackBundle.track - .getSampleDescriptionEncryptionBox(fragment.header.sampleDescriptionIndex); + @Nullable + TrackEncryptionBox encryptionBox = + trackBundle.moovSampleTable.track.getSampleDescriptionEncryptionBox( + checkNotNull(fragment.header).sampleDescriptionIndex); - LeafAtom saiz = traf.getLeafAtomOfType(Atom.TYPE_saiz); + @Nullable LeafAtom saiz = traf.getLeafAtomOfType(Atom.TYPE_saiz); if (saiz != null) { - parseSaiz(encryptionBox, saiz.data, fragment); + parseSaiz(checkNotNull(encryptionBox), saiz.data, fragment); } - LeafAtom saio = traf.getLeafAtomOfType(Atom.TYPE_saio); + @Nullable LeafAtom saio = traf.getLeafAtomOfType(Atom.TYPE_saio); if (saio != null) { parseSaio(saio.data, fragment); } - LeafAtom senc = traf.getLeafAtomOfType(Atom.TYPE_senc); + @Nullable LeafAtom senc = traf.getLeafAtomOfType(Atom.TYPE_senc); if (senc != null) { parseSenc(senc.data, fragment); } - LeafAtom sbgp = traf.getLeafAtomOfType(Atom.TYPE_sbgp); - LeafAtom sgpd = traf.getLeafAtomOfType(Atom.TYPE_sgpd); - if (sbgp != null && sgpd != null) { - parseSgpd(sbgp.data, sgpd.data, encryptionBox != null ? encryptionBox.schemeType : null, - fragment); - } + parseSampleGroups(traf, encryptionBox != null ? encryptionBox.schemeType : null, fragment); int leafChildrenSize = traf.leafChildren.size(); for (int i = 0; i < leafChildrenSize; i++) { @@ -751,8 +797,7 @@ private static void parseTraf(ContainerAtom traf, SparseArray track } } - private static void parseTruns( - ContainerAtom traf, TrackBundle trackBundle, long decodeTime, @Flags int flags) + private static void parseTruns(ContainerAtom traf, TrackBundle trackBundle, @Flags int flags) throws ParserException { int trunCount = 0; int totalSampleCount = 0; @@ -780,14 +825,15 @@ private static void parseTruns( for (int i = 0; i < leafChildrenSize; i++) { LeafAtom trun = leafChildren.get(i); if (trun.type == Atom.TYPE_trun) { - trunStartPosition = parseTrun(trackBundle, trunIndex++, decodeTime, flags, trun.data, - trunStartPosition); + trunStartPosition = + parseTrun(trackBundle, trunIndex++, flags, trun.data, trunStartPosition); } } } - private static void parseSaiz(TrackEncryptionBox encryptionBox, ParsableByteArray saiz, - TrackFragment out) throws ParserException { + private static void parseSaiz( + TrackEncryptionBox encryptionBox, ParsableByteArray saiz, TrackFragment out) + throws ParserException { int vectorSize = encryptionBox.perSampleIvSize; saiz.setPosition(Atom.HEADER_SIZE); int fullAtom = saiz.readInt(); @@ -798,8 +844,13 @@ private static void parseSaiz(TrackEncryptionBox encryptionBox, ParsableByteArra int defaultSampleInfoSize = saiz.readUnsignedByte(); int sampleCount = saiz.readUnsignedIntToInt(); - if (sampleCount != out.sampleCount) { - throw new ParserException("Length mismatch: " + sampleCount + ", " + out.sampleCount); + if (sampleCount > out.sampleCount) { + throw ParserException.createForMalformedContainer( + "Saiz sample count " + + sampleCount + + " is greater than fragment sample count" + + out.sampleCount, + /* cause= */ null); } int totalSize = 0; @@ -815,7 +866,10 @@ private static void parseSaiz(TrackEncryptionBox encryptionBox, ParsableByteArra totalSize += defaultSampleInfoSize * sampleCount; Arrays.fill(out.sampleHasSubsampleEncryptionTable, 0, sampleCount, subsampleEncryption); } - out.initEncryptionData(totalSize); + Arrays.fill(out.sampleHasSubsampleEncryptionTable, sampleCount, out.sampleCount, false); + if (totalSize > 0) { + out.initEncryptionData(totalSize); + } } /** @@ -835,7 +889,8 @@ private static void parseSaio(ParsableByteArray saio, TrackFragment out) throws int entryCount = saio.readUnsignedIntToInt(); if (entryCount != 1) { // We only support one trun element currently, so always expect one entry. - throw new ParserException("Unexpected saio entry count: " + entryCount); + throw ParserException.createForMalformedContainer( + "Unexpected saio entry count: " + entryCount, /* cause= */ null); } int version = Atom.parseFullAtomVersion(fullAtom); @@ -850,16 +905,21 @@ private static void parseSaio(ParsableByteArray saio, TrackFragment out) throws * * @param tfhd The tfhd atom to decode. * @param trackBundles The track bundles, one of which corresponds to the tfhd atom being parsed. + * @param haveSideloadedTrack Whether {@code trackBundles} contains a single bundle corresponding + * to a side-loaded track. * @return The {@link TrackBundle} to which the {@link TrackFragment} belongs, or null if the tfhd * does not refer to any {@link TrackBundle}. */ + @Nullable private static TrackBundle parseTfhd( - ParsableByteArray tfhd, SparseArray trackBundles) { + ParsableByteArray tfhd, SparseArray trackBundles, boolean haveSideloadedTrack) { tfhd.setPosition(Atom.HEADER_SIZE); int fullAtom = tfhd.readInt(); int atomFlags = Atom.parseFullAtomFlags(fullAtom); int trackId = tfhd.readInt(); - TrackBundle trackBundle = getTrackBundle(trackBundles, trackId); + @Nullable + TrackBundle trackBundle = + haveSideloadedTrack ? trackBundles.valueAt(0) : trackBundles.get(trackId); if (trackBundle == null) { return null; } @@ -886,22 +946,15 @@ private static TrackBundle parseTfhd( ((atomFlags & 0x20 /* default_sample_flags_present */) != 0) ? tfhd.readInt() : defaultSampleValues.flags; - trackBundle.fragment.header = new DefaultSampleValues(defaultSampleDescriptionIndex, - defaultSampleDuration, defaultSampleSize, defaultSampleFlags); + trackBundle.fragment.header = + new DefaultSampleValues( + defaultSampleDescriptionIndex, + defaultSampleDuration, + defaultSampleSize, + defaultSampleFlags); return trackBundle; } - private static @Nullable TrackBundle getTrackBundle( - SparseArray trackBundles, int trackId) { - if (trackBundles.size() == 1) { - // Ignore track id if there is only one track. This is either because we have a side-loaded - // track (flag FLAG_SIDELOADED) or to cope with non-matching track indices (see - // https://github.com/google/ExoPlayer/issues/4083). - return trackBundles.valueAt(/* index= */ 0); - } - return trackBundles.get(trackId); - } - /** * Parses a tfdt atom (defined in 14496-12). * @@ -921,7 +974,6 @@ private static long parseTfdt(ParsableByteArray tfdt) { * @param trackBundle The {@link TrackBundle} that contains the {@link TrackFragment} into which * parsed data should be placed. * @param index Index of the track run in the fragment. - * @param decodeTime The decode time of the first sample in the fragment run. * @param flags Flags to allow any required workaround to be executed. * @param trun The trun atom to decode. * @return The starting position of samples for the next run. @@ -929,7 +981,6 @@ private static long parseTfdt(ParsableByteArray tfdt) { private static int parseTrun( TrackBundle trackBundle, int index, - long decodeTime, @Flags int flags, ParsableByteArray trun, int trackRunStart) @@ -938,9 +989,9 @@ private static int parseTrun( int fullAtom = trun.readInt(); int atomFlags = Atom.parseFullAtomFlags(fullAtom); - Track track = trackBundle.track; + Track track = trackBundle.moovSampleTable.track; TrackFragment fragment = trackBundle.fragment; - DefaultSampleValues defaultSampleValues = fragment.header; + DefaultSampleValues defaultSampleValues = castNonNull(fragment.header); fragment.trunLength[index] = trun.readUnsignedIntToInt(); fragment.trunDataPosition[index] = fragment.dataPosition; @@ -962,53 +1013,55 @@ private static int parseTrun( // Offset to the entire video timeline. In the presence of B-frames this is usually used to // ensure that the first frame's presentation timestamp is zero. - long edtsOffsetUs = 0; + long edtsOffset = 0; // Currently we only support a single edit that moves the entire media timeline (indicated by // duration == 0). Other uses of edit lists are uncommon and unsupported. - if (track.editListDurations != null && track.editListDurations.length == 1 + if (track.editListDurations != null + && track.editListDurations.length == 1 && track.editListDurations[0] == 0) { - edtsOffsetUs = - Util.scaleLargeTimestamp( - track.editListMediaTimes[0], C.MICROS_PER_SECOND, track.timescale); + edtsOffset = castNonNull(track.editListMediaTimes)[0]; } int[] sampleSizeTable = fragment.sampleSizeTable; - int[] sampleCompositionTimeOffsetUsTable = fragment.sampleCompositionTimeOffsetUsTable; - long[] sampleDecodingTimeUsTable = fragment.sampleDecodingTimeUsTable; + long[] samplePresentationTimesUs = fragment.samplePresentationTimesUs; boolean[] sampleIsSyncFrameTable = fragment.sampleIsSyncFrameTable; - boolean workaroundEveryVideoFrameIsSyncFrame = track.type == C.TRACK_TYPE_VIDEO - && (flags & FLAG_WORKAROUND_EVERY_VIDEO_FRAME_IS_SYNC_FRAME) != 0; + boolean workaroundEveryVideoFrameIsSyncFrame = + track.type == C.TRACK_TYPE_VIDEO + && (flags & FLAG_WORKAROUND_EVERY_VIDEO_FRAME_IS_SYNC_FRAME) != 0; int trackRunEnd = trackRunStart + fragment.trunLength[index]; long timescale = track.timescale; - long cumulativeTime = index > 0 ? fragment.nextFragmentDecodeTime : decodeTime; + long cumulativeTime = fragment.nextFragmentDecodeTime; for (int i = trackRunStart; i < trackRunEnd; i++) { // Use trun values if present, otherwise tfhd, otherwise trex. int sampleDuration = checkNonNegative(sampleDurationsPresent ? trun.readInt() : defaultSampleValues.duration); int sampleSize = checkNonNegative(sampleSizesPresent ? trun.readInt() : defaultSampleValues.size); - int sampleFlags = (i == 0 && firstSampleFlagsPresent) ? firstSampleFlags - : sampleFlagsPresent ? trun.readInt() : defaultSampleValues.flags; + int sampleFlags = + sampleFlagsPresent + ? trun.readInt() + : (i == 0 && firstSampleFlagsPresent) ? firstSampleFlags : defaultSampleValues.flags; + int sampleCompositionTimeOffset = 0; if (sampleCompositionTimeOffsetsPresent) { // The BMFF spec (ISO 14496-12) states that sample offsets should be unsigned integers in // version 0 trun boxes, however a significant number of streams violate the spec and use // signed integers instead. It's safe to always decode sample offsets as signed integers // here, because unsigned integers will still be parsed correctly (unless their top bit is // set, which is never true in practice because sample offsets are always small). - int sampleOffset = trun.readInt(); - sampleCompositionTimeOffsetUsTable[i] = - (int) ((sampleOffset * C.MICROS_PER_SECOND) / timescale); - } else { - sampleCompositionTimeOffsetUsTable[i] = 0; + sampleCompositionTimeOffset = trun.readInt(); + } + long samplePresentationTime = cumulativeTime + sampleCompositionTimeOffset - edtsOffset; + samplePresentationTimesUs[i] = + Util.scaleLargeTimestamp(samplePresentationTime, C.MICROS_PER_SECOND, timescale); + if (!fragment.nextFragmentDecodeTimeIncludesMoov) { + samplePresentationTimesUs[i] += trackBundle.moovSampleTable.durationUs; } - sampleDecodingTimeUsTable[i] = - Util.scaleLargeTimestamp(cumulativeTime, C.MICROS_PER_SECOND, timescale) - edtsOffsetUs; sampleSizeTable[i] = sampleSize; - sampleIsSyncFrameTable[i] = ((sampleFlags >> 16) & 0x1) == 0 - && (!workaroundEveryVideoFrameIsSyncFrame || i == 0); + sampleIsSyncFrameTable[i] = + ((sampleFlags >> 16) & 0x1) == 0 && (!workaroundEveryVideoFrameIsSyncFrame || i == 0); cumulativeTime += sampleDuration; } fragment.nextFragmentDecodeTime = cumulativeTime; @@ -1017,13 +1070,15 @@ private static int parseTrun( private static int checkNonNegative(int value) throws ParserException { if (value < 0) { - throw new ParserException("Unexpected negtive value: " + value); + throw ParserException.createForMalformedContainer( + "Unexpected negative value: " + value, /* cause= */ null); } return value; } - private static void parseUuid(ParsableByteArray uuid, TrackFragment out, - byte[] extendedTypeScratch) throws ParserException { + private static void parseUuid( + ParsableByteArray uuid, TrackFragment out, byte[] extendedTypeScratch) + throws ParserException { uuid.setPosition(Atom.HEADER_SIZE); uuid.readBytes(extendedTypeScratch, 0, 16); @@ -1050,13 +1105,23 @@ private static void parseSenc(ParsableByteArray senc, int offset, TrackFragment if ((flags & 0x01 /* override_track_encryption_box_parameters */) != 0) { // TODO: Implement this. - throw new ParserException("Overriding TrackEncryptionBox parameters is unsupported."); + throw ParserException.createForUnsupportedContainerFeature( + "Overriding TrackEncryptionBox parameters is unsupported."); } boolean subsampleEncryption = (flags & 0x02 /* use_subsample_encryption */) != 0; int sampleCount = senc.readUnsignedIntToInt(); - if (sampleCount != out.sampleCount) { - throw new ParserException("Length mismatch: " + sampleCount + ", " + out.sampleCount); + if (sampleCount == 0) { + // Samples are unencrypted. + Arrays.fill(out.sampleHasSubsampleEncryptionTable, 0, out.sampleCount, false); + return; + } else if (sampleCount != out.sampleCount) { + throw ParserException.createForMalformedContainer( + "Senc sample count " + + sampleCount + + " is different from fragment sample count" + + out.sampleCount, + /* cause= */ null); } Arrays.fill(out.sampleHasSubsampleEncryptionTable, 0, sampleCount, subsampleEncryption); @@ -1064,38 +1129,57 @@ private static void parseSenc(ParsableByteArray senc, int offset, TrackFragment out.fillEncryptionData(senc); } - private static void parseSgpd(ParsableByteArray sbgp, ParsableByteArray sgpd, String schemeType, - TrackFragment out) throws ParserException { - sbgp.setPosition(Atom.HEADER_SIZE); - int sbgpFullAtom = sbgp.readInt(); - if (sbgp.readInt() != SAMPLE_GROUP_TYPE_seig) { - // Only seig grouping type is supported. + private static void parseSampleGroups( + ContainerAtom traf, @Nullable String schemeType, TrackFragment out) throws ParserException { + // Find sbgp and sgpd boxes with grouping_type == seig. + @Nullable ParsableByteArray sbgp = null; + @Nullable ParsableByteArray sgpd = null; + for (int i = 0; i < traf.leafChildren.size(); i++) { + LeafAtom leafAtom = traf.leafChildren.get(i); + ParsableByteArray leafAtomData = leafAtom.data; + if (leafAtom.type == Atom.TYPE_sbgp) { + leafAtomData.setPosition(Atom.FULL_HEADER_SIZE); + if (leafAtomData.readInt() == SAMPLE_GROUP_TYPE_seig) { + sbgp = leafAtomData; + } + } else if (leafAtom.type == Atom.TYPE_sgpd) { + leafAtomData.setPosition(Atom.FULL_HEADER_SIZE); + if (leafAtomData.readInt() == SAMPLE_GROUP_TYPE_seig) { + sgpd = leafAtomData; + } + } + } + if (sbgp == null || sgpd == null) { return; } - if (Atom.parseFullAtomVersion(sbgpFullAtom) == 1) { - sbgp.skipBytes(4); // default_length. + + sbgp.setPosition(Atom.HEADER_SIZE); + int sbgpVersion = Atom.parseFullAtomVersion(sbgp.readInt()); + sbgp.skipBytes(4); // grouping_type == seig. + if (sbgpVersion == 1) { + sbgp.skipBytes(4); // grouping_type_parameter. } if (sbgp.readInt() != 1) { // entry_count. - throw new ParserException("Entry count in sbgp != 1 (unsupported)."); + throw ParserException.createForUnsupportedContainerFeature( + "Entry count in sbgp != 1 (unsupported)."); } sgpd.setPosition(Atom.HEADER_SIZE); - int sgpdFullAtom = sgpd.readInt(); - if (sgpd.readInt() != SAMPLE_GROUP_TYPE_seig) { - // Only seig grouping type is supported. - return; - } - int sgpdVersion = Atom.parseFullAtomVersion(sgpdFullAtom); + int sgpdVersion = Atom.parseFullAtomVersion(sgpd.readInt()); + sgpd.skipBytes(4); // grouping_type == seig. if (sgpdVersion == 1) { if (sgpd.readUnsignedInt() == 0) { - throw new ParserException("Variable length description in sgpd found (unsupported)"); + throw ParserException.createForUnsupportedContainerFeature( + "Variable length description in sgpd found (unsupported)"); } } else if (sgpdVersion >= 2) { sgpd.skipBytes(4); // default_sample_description_index. } if (sgpd.readUnsignedInt() != 1) { // entry_count. - throw new ParserException("Entry count in sgpd != 1 (unsupported)."); + throw ParserException.createForUnsupportedContainerFeature( + "Entry count in sgpd != 1 (unsupported)."); } + // CencSampleEncryptionInformationGroupEntry sgpd.skipBytes(1); // reserved = 0. int patternByte = sgpd.readUnsignedByte(); @@ -1108,15 +1192,22 @@ private static void parseSgpd(ParsableByteArray sbgp, ParsableByteArray sgpd, St int perSampleIvSize = sgpd.readUnsignedByte(); byte[] keyId = new byte[16]; sgpd.readBytes(keyId, 0, keyId.length); - byte[] constantIv = null; + @Nullable byte[] constantIv = null; if (perSampleIvSize == 0) { int constantIvSize = sgpd.readUnsignedByte(); constantIv = new byte[constantIvSize]; sgpd.readBytes(constantIv, 0, constantIvSize); } out.definesEncryptionData = true; - out.trackEncryptionBox = new TrackEncryptionBox(isProtected, schemeType, perSampleIvSize, keyId, - cryptByteBlock, skipByteBlock, constantIv); + out.trackEncryptionBox = + new TrackEncryptionBox( + isProtected, + schemeType, + perSampleIvSize, + keyId, + cryptByteBlock, + skipByteBlock, + constantIv); } /** @@ -1144,8 +1235,8 @@ private static Pair parseSidx(ParsableByteArray atom, long inp earliestPresentationTime = atom.readUnsignedLongToLong(); offset += atom.readUnsignedLongToLong(); } - long earliestPresentationTimeUs = Util.scaleLargeTimestamp(earliestPresentationTime, - C.MICROS_PER_SECOND, timescale); + long earliestPresentationTimeUs = + Util.scaleLargeTimestamp(earliestPresentationTime, C.MICROS_PER_SECOND, timescale); atom.skipBytes(2); @@ -1162,7 +1253,8 @@ private static Pair parseSidx(ParsableByteArray atom, long inp int type = 0x80000000 & firstInt; if (type != 0) { - throw new ParserException("Unhandled indirect reference"); + throw ParserException.createForMalformedContainer( + "Unhandled indirect reference", /* cause= */ null); } long referenceDuration = atom.readUnsignedInt(); @@ -1180,12 +1272,12 @@ private static Pair parseSidx(ParsableByteArray atom, long inp offset += sizes[i]; } - return Pair.create(earliestPresentationTimeUs, - new ChunkIndex(sizes, offsets, durationsUs, timesUs)); + return Pair.create( + earliestPresentationTimeUs, new ChunkIndex(sizes, offsets, durationsUs, timesUs)); } - private void readEncryptionData(ExtractorInput input) throws IOException, InterruptedException { - TrackBundle nextTrackBundle = null; + private void readEncryptionData(ExtractorInput input) throws IOException { + @Nullable TrackBundle nextTrackBundle = null; long nextDataOffset = Long.MAX_VALUE; int trackBundlesSize = trackBundles.size(); for (int i = 0; i < trackBundlesSize; i++) { @@ -1202,7 +1294,8 @@ private void readEncryptionData(ExtractorInput input) throws IOException, Interr } int bytesToSkip = (int) (nextDataOffset - input.getPosition()); if (bytesToSkip < 0) { - throw new ParserException("Offset to encryption data was negative."); + throw ParserException.createForMalformedContainer( + "Offset to encryption data was negative.", /* cause= */ null); } input.skipFully(bytesToSkip); nextTrackBundle.fragment.fillEncryptionData(input); @@ -1222,83 +1315,80 @@ private void readEncryptionData(ExtractorInput input) throws IOException, Interr * @return Whether a sample was read. The read sample may have been output or skipped. False * indicates that there are no samples left to read in the current mdat. * @throws IOException If an error occurs reading from the input. - * @throws InterruptedException If the thread is interrupted. */ - private boolean readSample(ExtractorInput input) throws IOException, InterruptedException { - if (parserState == STATE_READING_SAMPLE_START) { - if (currentTrackBundle == null) { - TrackBundle currentTrackBundle = getNextFragmentRun(trackBundles); - if (currentTrackBundle == null) { - // We've run out of samples in the current mdat. Discard any trailing data and prepare to - // read the header of the next atom. - int bytesToSkip = (int) (endOfMdatPosition - input.getPosition()); - if (bytesToSkip < 0) { - throw new ParserException("Offset to end of mdat was negative."); - } - input.skipFully(bytesToSkip); - enterReadingAtomHeaderState(); - return false; - } - - long nextDataPosition = currentTrackBundle.fragment - .trunDataPosition[currentTrackBundle.currentTrackRunIndex]; - // We skip bytes preceding the next sample to read. - int bytesToSkip = (int) (nextDataPosition - input.getPosition()); + private boolean readSample(ExtractorInput input) throws IOException { + @Nullable TrackBundle trackBundle = currentTrackBundle; + if (trackBundle == null) { + trackBundle = getNextTrackBundle(trackBundles); + if (trackBundle == null) { + // We've run out of samples in the current mdat. Discard any trailing data and prepare to + // read the header of the next atom. + int bytesToSkip = (int) (endOfMdatPosition - input.getPosition()); if (bytesToSkip < 0) { - // Assume the sample data must be contiguous in the mdat with no preceding data. - Log.w(TAG, "Ignoring negative offset to sample data."); - bytesToSkip = 0; + throw ParserException.createForMalformedContainer( + "Offset to end of mdat was negative.", /* cause= */ null); } input.skipFully(bytesToSkip); - this.currentTrackBundle = currentTrackBundle; + enterReadingAtomHeaderState(); + return false; } - sampleSize = currentTrackBundle.fragment - .sampleSizeTable[currentTrackBundle.currentSampleIndex]; + long nextDataPosition = trackBundle.getCurrentSampleOffset(); + // We skip bytes preceding the next sample to read. + int bytesToSkip = (int) (nextDataPosition - input.getPosition()); + if (bytesToSkip < 0) { + // Assume the sample data must be contiguous in the mdat with no preceding data. + Log.w(TAG, "Ignoring negative offset to sample data."); + bytesToSkip = 0; + } + input.skipFully(bytesToSkip); + currentTrackBundle = trackBundle; + } + if (parserState == STATE_READING_SAMPLE_START) { + sampleSize = trackBundle.getCurrentSampleSize(); - if (currentTrackBundle.currentSampleIndex < currentTrackBundle.firstSampleToOutputIndex) { + if (trackBundle.currentSampleIndex < trackBundle.firstSampleToOutputIndex) { input.skipFully(sampleSize); - currentTrackBundle.skipSampleEncryptionData(); - if (!currentTrackBundle.next()) { + trackBundle.skipSampleEncryptionData(); + if (!trackBundle.next()) { currentTrackBundle = null; } parserState = STATE_READING_SAMPLE_START; return true; } - if (currentTrackBundle.track.sampleTransformation == Track.TRANSFORMATION_CEA608_CDAT) { + if (trackBundle.moovSampleTable.track.sampleTransformation + == Track.TRANSFORMATION_CEA608_CDAT) { sampleSize -= Atom.HEADER_SIZE; input.skipFully(Atom.HEADER_SIZE); } - if (MimeTypes.AUDIO_AC4.equals(currentTrackBundle.track.format.sampleMimeType)) { + if (MimeTypes.AUDIO_AC4.equals(trackBundle.moovSampleTable.track.format.sampleMimeType)) { // AC4 samples need to be prefixed with a clear sample header. sampleBytesWritten = - currentTrackBundle.outputSampleEncryptionData(sampleSize, Ac4Util.SAMPLE_HEADER_SIZE); + trackBundle.outputSampleEncryptionData(sampleSize, Ac4Util.SAMPLE_HEADER_SIZE); Ac4Util.getAc4SampleHeader(sampleSize, scratch); - currentTrackBundle.output.sampleData(scratch, Ac4Util.SAMPLE_HEADER_SIZE); + trackBundle.output.sampleData(scratch, Ac4Util.SAMPLE_HEADER_SIZE); sampleBytesWritten += Ac4Util.SAMPLE_HEADER_SIZE; } else { sampleBytesWritten = - currentTrackBundle.outputSampleEncryptionData(sampleSize, /* clearHeaderSize= */ 0); + trackBundle.outputSampleEncryptionData(sampleSize, /* clearHeaderSize= */ 0); } sampleSize += sampleBytesWritten; parserState = STATE_READING_SAMPLE_CONTINUE; sampleCurrentNalBytesRemaining = 0; } - TrackFragment fragment = currentTrackBundle.fragment; - Track track = currentTrackBundle.track; - TrackOutput output = currentTrackBundle.output; - int sampleIndex = currentTrackBundle.currentSampleIndex; - long sampleTimeUs = fragment.getSamplePresentationTimeUs(sampleIndex); + Track track = trackBundle.moovSampleTable.track; + TrackOutput output = trackBundle.output; + long sampleTimeUs = trackBundle.getCurrentSamplePresentationTimeUs(); if (timestampAdjuster != null) { sampleTimeUs = timestampAdjuster.adjustSampleTimestamp(sampleTimeUs); } if (track.nalUnitLengthFieldLength != 0) { // Zero the top three bytes of the array that we'll use to decode nal unit lengths, in case // they're only 1 or 2 bytes long. - byte[] nalPrefixData = nalPrefix.data; + byte[] nalPrefixData = nalPrefix.getData(); nalPrefixData[0] = 0; nalPrefixData[1] = 0; nalPrefixData[2] = 0; @@ -1314,7 +1404,8 @@ private boolean readSample(ExtractorInput input) throws IOException, Interrupted nalPrefix.setPosition(0); int nalLengthInt = nalPrefix.readInt(); if (nalLengthInt < 1) { - throw new ParserException("Invalid NAL length"); + throw ParserException.createForMalformedContainer( + "Invalid NAL length", /* cause= */ null); } sampleCurrentNalBytesRemaining = nalLengthInt - 1; // Write a start code for the current NAL unit. @@ -1322,8 +1413,9 @@ private boolean readSample(ExtractorInput input) throws IOException, Interrupted output.sampleData(nalStartCode, 4); // Write the NAL unit type byte. output.sampleData(nalPrefix, 1); - processSeiNalUnitPayload = cea608TrackOutputs.length > 0 - && NalUnitUtil.isNalUnitSei(track.format.sampleMimeType, nalPrefixData[4]); + processSeiNalUnitPayload = + ceaTrackOutputs.length > 0 + && NalUnitUtil.isNalUnitSei(track.format.sampleMimeType, nalPrefixData[4]); sampleBytesWritten += 5; sampleSize += nalUnitLengthFieldLengthDiff; } else { @@ -1331,15 +1423,16 @@ private boolean readSample(ExtractorInput input) throws IOException, Interrupted if (processSeiNalUnitPayload) { // Read and write the payload of the SEI NAL unit. nalBuffer.reset(sampleCurrentNalBytesRemaining); - input.readFully(nalBuffer.data, 0, sampleCurrentNalBytesRemaining); + input.readFully(nalBuffer.getData(), 0, sampleCurrentNalBytesRemaining); output.sampleData(nalBuffer, sampleCurrentNalBytesRemaining); writtenBytes = sampleCurrentNalBytesRemaining; // Unescape and process the SEI NAL unit. - int unescapedLength = NalUnitUtil.unescapeStream(nalBuffer.data, nalBuffer.limit()); + int unescapedLength = + NalUnitUtil.unescapeStream(nalBuffer.getData(), nalBuffer.limit()); // If the format is H.265/HEVC the NAL unit header has two bytes so skip one more byte. nalBuffer.setPosition(MimeTypes.VIDEO_H265.equals(track.format.sampleMimeType) ? 1 : 0); nalBuffer.setLimit(unescapedLength); - CeaUtil.consume(sampleTimeUs, nalBuffer, cea608TrackOutputs); + CeaUtil.consume(sampleTimeUs, nalBuffer, ceaTrackOutputs); } else { // Write the payload of the NAL unit. writtenBytes = output.sampleData(input, sampleCurrentNalBytesRemaining, false); @@ -1355,14 +1448,12 @@ private boolean readSample(ExtractorInput input) throws IOException, Interrupted } } - @C.BufferFlags int sampleFlags = fragment.sampleIsSyncFrameTable[sampleIndex] - ? C.BUFFER_FLAG_KEY_FRAME : 0; + @C.BufferFlags int sampleFlags = trackBundle.getCurrentSampleFlags(); // Encryption data. - TrackOutput.CryptoData cryptoData = null; - TrackEncryptionBox encryptionBox = currentTrackBundle.getEncryptionBoxIfEncrypted(); + @Nullable TrackOutput.CryptoData cryptoData = null; + @Nullable TrackEncryptionBox encryptionBox = trackBundle.getEncryptionBoxIfEncrypted(); if (encryptionBox != null) { - sampleFlags |= C.BUFFER_FLAG_ENCRYPTED; cryptoData = encryptionBox.cryptoData; } @@ -1370,26 +1461,37 @@ private boolean readSample(ExtractorInput input) throws IOException, Interrupted // After we have the sampleTimeUs, we can commit all the pending metadata samples outputPendingMetadataSamples(sampleTimeUs); - if (!currentTrackBundle.next()) { + if (!trackBundle.next()) { currentTrackBundle = null; } parserState = STATE_READING_SAMPLE_START; return true; } + /** + * Called immediately after outputting a non-metadata sample, to output any pending metadata + * samples. + * + * @param sampleTimeUs The timestamp of the non-metadata sample that was just output. + */ private void outputPendingMetadataSamples(long sampleTimeUs) { while (!pendingMetadataSampleInfos.isEmpty()) { - MetadataSampleInfo sampleInfo = pendingMetadataSampleInfos.removeFirst(); - pendingMetadataSampleBytes -= sampleInfo.size; - long metadataTimeUs = sampleTimeUs + sampleInfo.presentationTimeDeltaUs; + MetadataSampleInfo metadataSampleInfo = pendingMetadataSampleInfos.removeFirst(); + pendingMetadataSampleBytes -= metadataSampleInfo.size; + long metadataSampleTimeUs = metadataSampleInfo.sampleTimeUs; + if (metadataSampleInfo.sampleTimeIsRelative) { + // The metadata sample timestamp is relative to the timestamp of the non-metadata sample + // that was just output. Make it absolute. + metadataSampleTimeUs += sampleTimeUs; + } if (timestampAdjuster != null) { - metadataTimeUs = timestampAdjuster.adjustSampleTimestamp(metadataTimeUs); + metadataSampleTimeUs = timestampAdjuster.adjustSampleTimestamp(metadataSampleTimeUs); } for (TrackOutput emsgTrackOutput : emsgTrackOutputs) { emsgTrackOutput.sampleMetadata( - metadataTimeUs, + metadataSampleTimeUs, C.BUFFER_FLAG_KEY_FRAME, - sampleInfo.size, + metadataSampleInfo.size, pendingMetadataSampleBytes, null); } @@ -1397,23 +1499,27 @@ private void outputPendingMetadataSamples(long sampleTimeUs) { } /** - * Returns the {@link TrackBundle} whose fragment run has the earliest file position out of those - * yet to be consumed, or null if all have been consumed. + * Returns the {@link TrackBundle} whose sample has the earliest file position out of those yet to + * be consumed, or null if all have been consumed. */ - private static TrackBundle getNextFragmentRun(SparseArray trackBundles) { - TrackBundle nextTrackBundle = null; - long nextTrackRunOffset = Long.MAX_VALUE; + @Nullable + private static TrackBundle getNextTrackBundle(SparseArray trackBundles) { + @Nullable TrackBundle nextTrackBundle = null; + long nextSampleOffset = Long.MAX_VALUE; int trackBundlesSize = trackBundles.size(); for (int i = 0; i < trackBundlesSize; i++) { TrackBundle trackBundle = trackBundles.valueAt(i); - if (trackBundle.currentTrackRunIndex == trackBundle.fragment.trunCount) { - // This track fragment contains no more runs in the next mdat box. + if ((!trackBundle.currentlyInFragment + && trackBundle.currentSampleIndex == trackBundle.moovSampleTable.sampleCount) + || (trackBundle.currentlyInFragment + && trackBundle.currentTrackRunIndex == trackBundle.fragment.trunCount)) { + // This track sample table or fragment contains no more runs in the next mdat box. } else { - long trunOffset = trackBundle.fragment.trunDataPosition[trackBundle.currentTrackRunIndex]; - if (trunOffset < nextTrackRunOffset) { + long sampleOffset = trackBundle.getCurrentSampleOffset(); + if (sampleOffset < nextSampleOffset) { nextTrackBundle = trackBundle; - nextTrackRunOffset = trunOffset; + nextSampleOffset = sampleOffset; } } } @@ -1423,7 +1529,7 @@ private static TrackBundle getNextFragmentRun(SparseArray trackBund /** Returns DrmInitData from leaf atoms. */ @Nullable private static DrmInitData getDrmInitDataFromAtoms(List leafChildren) { - ArrayList schemeDatas = null; + @Nullable ArrayList schemeDatas = null; int leafChildrenSize = leafChildren.size(); for (int i = 0; i < leafChildrenSize; i++) { LeafAtom child = leafChildren.get(i); @@ -1431,8 +1537,8 @@ private static DrmInitData getDrmInitDataFromAtoms(List leafChild if (schemeDatas == null) { schemeDatas = new ArrayList<>(); } - byte[] psshData = child.data.data; - UUID uuid = PsshAtomUtil.parseUuid(psshData); + byte[] psshData = child.data.getData(); + @Nullable UUID uuid = PsshAtomUtil.parseUuid(psshData); if (uuid == null) { Log.w(TAG, "Skipped pssh atom (failed to extract uuid)"); } else { @@ -1445,40 +1551,64 @@ private static DrmInitData getDrmInitDataFromAtoms(List leafChild /** Returns whether the extractor should decode a leaf atom with type {@code atom}. */ private static boolean shouldParseLeafAtom(int atom) { - return atom == Atom.TYPE_hdlr || atom == Atom.TYPE_mdhd || atom == Atom.TYPE_mvhd - || atom == Atom.TYPE_sidx || atom == Atom.TYPE_stsd || atom == Atom.TYPE_tfdt - || atom == Atom.TYPE_tfhd || atom == Atom.TYPE_tkhd || atom == Atom.TYPE_trex - || atom == Atom.TYPE_trun || atom == Atom.TYPE_pssh || atom == Atom.TYPE_saiz - || atom == Atom.TYPE_saio || atom == Atom.TYPE_senc || atom == Atom.TYPE_uuid - || atom == Atom.TYPE_sbgp || atom == Atom.TYPE_sgpd || atom == Atom.TYPE_elst - || atom == Atom.TYPE_mehd || atom == Atom.TYPE_emsg; + return atom == Atom.TYPE_hdlr + || atom == Atom.TYPE_mdhd + || atom == Atom.TYPE_mvhd + || atom == Atom.TYPE_sidx + || atom == Atom.TYPE_stsd + || atom == Atom.TYPE_stts + || atom == Atom.TYPE_ctts + || atom == Atom.TYPE_stsc + || atom == Atom.TYPE_stsz + || atom == Atom.TYPE_stz2 + || atom == Atom.TYPE_stco + || atom == Atom.TYPE_co64 + || atom == Atom.TYPE_stss + || atom == Atom.TYPE_tfdt + || atom == Atom.TYPE_tfhd + || atom == Atom.TYPE_tkhd + || atom == Atom.TYPE_trex + || atom == Atom.TYPE_trun + || atom == Atom.TYPE_pssh + || atom == Atom.TYPE_saiz + || atom == Atom.TYPE_saio + || atom == Atom.TYPE_senc + || atom == Atom.TYPE_uuid + || atom == Atom.TYPE_sbgp + || atom == Atom.TYPE_sgpd + || atom == Atom.TYPE_elst + || atom == Atom.TYPE_mehd + || atom == Atom.TYPE_emsg; } /** Returns whether the extractor should decode a container atom with type {@code atom}. */ private static boolean shouldParseContainerAtom(int atom) { - return atom == Atom.TYPE_moov || atom == Atom.TYPE_trak || atom == Atom.TYPE_mdia - || atom == Atom.TYPE_minf || atom == Atom.TYPE_stbl || atom == Atom.TYPE_moof - || atom == Atom.TYPE_traf || atom == Atom.TYPE_mvex || atom == Atom.TYPE_edts; + return atom == Atom.TYPE_moov + || atom == Atom.TYPE_trak + || atom == Atom.TYPE_mdia + || atom == Atom.TYPE_minf + || atom == Atom.TYPE_stbl + || atom == Atom.TYPE_moof + || atom == Atom.TYPE_traf + || atom == Atom.TYPE_mvex + || atom == Atom.TYPE_edts; } - /** - * Holds data corresponding to a metadata sample. - */ + /** Holds data corresponding to a metadata sample. */ private static final class MetadataSampleInfo { - public final long presentationTimeDeltaUs; + public final long sampleTimeUs; + public final boolean sampleTimeIsRelative; public final int size; - public MetadataSampleInfo(long presentationTimeDeltaUs, int size) { - this.presentationTimeDeltaUs = presentationTimeDeltaUs; + public MetadataSampleInfo(long sampleTimeUs, boolean sampleTimeIsRelative, int size) { + this.sampleTimeUs = sampleTimeUs; + this.sampleTimeIsRelative = sampleTimeIsRelative; this.size = size; } - } - /** - * Holds data corresponding to a single track. - */ + /** Holds data corresponding to a single track. */ private static final class TrackBundle { private static final int SINGLE_SUBSAMPLE_ENCRYPTION_DATA_LENGTH = 8; @@ -1487,7 +1617,7 @@ private static final class TrackBundle { public final TrackFragment fragment; public final ParsableByteArray scratch; - public Track track; + public TrackSampleTable moovSampleTable; public DefaultSampleValues defaultSampleValues; public int currentSampleIndex; public int currentSampleInTrackRun; @@ -1497,47 +1627,61 @@ private static final class TrackBundle { private final ParsableByteArray encryptionSignalByte; private final ParsableByteArray defaultInitializationVector; - public TrackBundle(TrackOutput output) { + private boolean currentlyInFragment; + + public TrackBundle( + TrackOutput output, + TrackSampleTable moovSampleTable, + DefaultSampleValues defaultSampleValues) { this.output = output; + this.moovSampleTable = moovSampleTable; + this.defaultSampleValues = defaultSampleValues; fragment = new TrackFragment(); scratch = new ParsableByteArray(); encryptionSignalByte = new ParsableByteArray(1); defaultInitializationVector = new ParsableByteArray(); + reset(moovSampleTable, defaultSampleValues); } - public void init(Track track, DefaultSampleValues defaultSampleValues) { - this.track = Assertions.checkNotNull(track); - this.defaultSampleValues = Assertions.checkNotNull(defaultSampleValues); - output.format(track.format); - reset(); + public void reset(TrackSampleTable moovSampleTable, DefaultSampleValues defaultSampleValues) { + this.moovSampleTable = moovSampleTable; + this.defaultSampleValues = defaultSampleValues; + output.format(moovSampleTable.track.format); + resetFragmentInfo(); } public void updateDrmInitData(DrmInitData drmInitData) { + @Nullable TrackEncryptionBox encryptionBox = - track.getSampleDescriptionEncryptionBox(fragment.header.sampleDescriptionIndex); - String schemeType = encryptionBox != null ? encryptionBox.schemeType : null; - output.format(track.format.copyWithDrmInitData(drmInitData.copyWithSchemeType(schemeType))); + moovSampleTable.track.getSampleDescriptionEncryptionBox( + castNonNull(fragment.header).sampleDescriptionIndex); + @Nullable String schemeType = encryptionBox != null ? encryptionBox.schemeType : null; + DrmInitData updatedDrmInitData = drmInitData.copyWithSchemeType(schemeType); + Format format = + moovSampleTable.track.format.buildUpon().setDrmInitData(updatedDrmInitData).build(); + output.format(format); } - /** Resets the current fragment and sample indices. */ - public void reset() { + /** Resets the current fragment, sample indices and {@link #currentlyInFragment} boolean. */ + public void resetFragmentInfo() { fragment.reset(); currentSampleIndex = 0; currentTrackRunIndex = 0; currentSampleInTrackRun = 0; firstSampleToOutputIndex = 0; + currentlyInFragment = false; } /** - * Advances {@link #firstSampleToOutputIndex} to point to the sync sample before the specified - * seek time in the current fragment. + * Advances {@link #firstSampleToOutputIndex} to point to the sync sample at or before the + * specified seek time in the current fragment. * * @param timeUs The seek time, in microseconds. */ public void seek(long timeUs) { int searchIndex = currentSampleIndex; while (searchIndex < fragment.sampleCount - && fragment.getSamplePresentationTimeUs(searchIndex) < timeUs) { + && fragment.getSamplePresentationTimeUs(searchIndex) <= timeUs) { if (fragment.sampleIsSyncFrameTable[searchIndex]) { firstSampleToOutputIndex = searchIndex; } @@ -1545,16 +1689,57 @@ public void seek(long timeUs) { } } + /** Returns the presentation time of the current sample in microseconds. */ + public long getCurrentSamplePresentationTimeUs() { + return !currentlyInFragment + ? moovSampleTable.timestampsUs[currentSampleIndex] + : fragment.getSamplePresentationTimeUs(currentSampleIndex); + } + + /** Returns the byte offset of the current sample. */ + public long getCurrentSampleOffset() { + return !currentlyInFragment + ? moovSampleTable.offsets[currentSampleIndex] + : fragment.trunDataPosition[currentTrackRunIndex]; + } + + /** Returns the size of the current sample in bytes. */ + public int getCurrentSampleSize() { + return !currentlyInFragment + ? moovSampleTable.sizes[currentSampleIndex] + : fragment.sampleSizeTable[currentSampleIndex]; + } + + /** Returns the {@link C.BufferFlags} corresponding to the current sample. */ + public @C.BufferFlags int getCurrentSampleFlags() { + int flags = + !currentlyInFragment + ? moovSampleTable.flags[currentSampleIndex] + : (fragment.sampleIsSyncFrameTable[currentSampleIndex] ? C.BUFFER_FLAG_KEY_FRAME : 0); + if (getEncryptionBoxIfEncrypted() != null) { + flags |= C.BUFFER_FLAG_ENCRYPTED; + } + return flags; + } + /** - * Advances the indices in the bundle to point to the next sample in the current fragment. If - * the current sample is the last one in the current fragment, then the advanced state will be - * {@code currentSampleIndex == fragment.sampleCount}, {@code currentTrackRunIndex == - * fragment.trunCount} and {@code #currentSampleInTrackRun == 0}. + * Advances the indices in the bundle to point to the next sample in the sample table (if it has + * not reached the fragments yet) or in the current fragment. + * + *

      If the current sample is the last one in the sample table, then the advanced state will be + * {@code currentSampleIndex == moovSampleTable.sampleCount}. If the current sample is the last + * one in the current fragment, then the advanced state will be {@code currentSampleIndex == + * fragment.sampleCount}, {@code currentTrackRunIndex == fragment.trunCount} and {@code + * #currentSampleInTrackRun == 0}. * - * @return Whether the next sample is in the same track run as the previous one. + * @return Whether this {@link TrackBundle} can be used to read the next sample without + * recomputing the next {@link TrackBundle}. */ public boolean next() { currentSampleIndex++; + if (!currentlyInFragment) { + return false; + } currentSampleInTrackRun++; if (currentSampleInTrackRun == fragment.trunLength[currentTrackRunIndex]) { currentTrackRunIndex++; @@ -1567,6 +1752,8 @@ public boolean next() { /** * Outputs the encryption data for the current sample. * + *

      This is not supported yet for samples specified in the sample table. + * * @param sampleSize The size of the current sample in bytes, excluding any additional clear * header that will be prefixed to the sample by the extractor. * @param clearHeaderSize The size of a clear header that will be prefixed to the sample by the @@ -1574,7 +1761,7 @@ public boolean next() { * @return The number of written bytes. */ public int outputSampleEncryptionData(int sampleSize, int clearHeaderSize) { - TrackEncryptionBox encryptionBox = getEncryptionBoxIfEncrypted(); + @Nullable TrackEncryptionBox encryptionBox = getEncryptionBoxIfEncrypted(); if (encryptionBox == null) { return 0; } @@ -1586,7 +1773,7 @@ public int outputSampleEncryptionData(int sampleSize, int clearHeaderSize) { vectorSize = encryptionBox.perSampleIvSize; } else { // The default initialization vector should be used. - byte[] initVectorData = encryptionBox.defaultInitializationVector; + byte[] initVectorData = castNonNull(encryptionBox.defaultInitializationVector); defaultInitializationVector.reset(initVectorData, initVectorData.length); initializationVectorData = defaultInitializationVector; vectorSize = initVectorData.length; @@ -1597,12 +1784,13 @@ public int outputSampleEncryptionData(int sampleSize, int clearHeaderSize) { boolean writeSubsampleEncryptionData = haveSubsampleEncryptionTable || clearHeaderSize != 0; // Write the signal byte, containing the vector size and the subsample encryption flag. - encryptionSignalByte.data[0] = + encryptionSignalByte.getData()[0] = (byte) (vectorSize | (writeSubsampleEncryptionData ? 0x80 : 0)); encryptionSignalByte.setPosition(0); - output.sampleData(encryptionSignalByte, 1); + output.sampleData(encryptionSignalByte, 1, TrackOutput.SAMPLE_DATA_PART_ENCRYPTION); // Write the vector. - output.sampleData(initializationVectorData, vectorSize); + output.sampleData( + initializationVectorData, vectorSize, TrackOutput.SAMPLE_DATA_PART_ENCRYPTION); if (!writeSubsampleEncryptionData) { return 1 + vectorSize; @@ -1614,17 +1802,21 @@ public int outputSampleEncryptionData(int sampleSize, int clearHeaderSize) { // into account. scratch.reset(SINGLE_SUBSAMPLE_ENCRYPTION_DATA_LENGTH); // subsampleCount = 1 (unsigned short) - scratch.data[0] = (byte) 0; - scratch.data[1] = (byte) 1; + byte[] data = scratch.getData(); + data[0] = (byte) 0; + data[1] = (byte) 1; // clearDataSize = clearHeaderSize (unsigned short) - scratch.data[2] = (byte) ((clearHeaderSize >> 8) & 0xFF); - scratch.data[3] = (byte) (clearHeaderSize & 0xFF); - // encryptedDataSize = sampleSize (unsigned short) - scratch.data[4] = (byte) ((sampleSize >> 24) & 0xFF); - scratch.data[5] = (byte) ((sampleSize >> 16) & 0xFF); - scratch.data[6] = (byte) ((sampleSize >> 8) & 0xFF); - scratch.data[7] = (byte) (sampleSize & 0xFF); - output.sampleData(scratch, SINGLE_SUBSAMPLE_ENCRYPTION_DATA_LENGTH); + data[2] = (byte) ((clearHeaderSize >> 8) & 0xFF); + data[3] = (byte) (clearHeaderSize & 0xFF); + // encryptedDataSize = sampleSize (unsigned int) + data[4] = (byte) ((sampleSize >> 24) & 0xFF); + data[5] = (byte) ((sampleSize >> 16) & 0xFF); + data[6] = (byte) ((sampleSize >> 8) & 0xFF); + data[7] = (byte) (sampleSize & 0xFF); + output.sampleData( + scratch, + SINGLE_SUBSAMPLE_ENCRYPTION_DATA_LENGTH, + TrackOutput.SAMPLE_DATA_PART_ENCRYPTION); return 1 + vectorSize + SINGLE_SUBSAMPLE_ENCRYPTION_DATA_LENGTH; } @@ -1637,23 +1829,28 @@ public int outputSampleEncryptionData(int sampleSize, int clearHeaderSize) { // We need to account for the additional clear header by adding clearHeaderSize to // clearDataSize for the first subsample specified in the subsample encryption data. scratch.reset(subsampleDataLength); - scratch.readBytes(subsampleEncryptionData.data, /* offset= */ 0, subsampleDataLength); - subsampleEncryptionData.skipBytes(subsampleDataLength); + byte[] scratchData = scratch.getData(); + subsampleEncryptionData.readBytes(scratchData, /* offset= */ 0, subsampleDataLength); - int clearDataSize = (scratch.data[2] & 0xFF) << 8 | (scratch.data[3] & 0xFF); + int clearDataSize = (scratchData[2] & 0xFF) << 8 | (scratchData[3] & 0xFF); int adjustedClearDataSize = clearDataSize + clearHeaderSize; - scratch.data[2] = (byte) ((adjustedClearDataSize >> 8) & 0xFF); - scratch.data[3] = (byte) (adjustedClearDataSize & 0xFF); + scratchData[2] = (byte) ((adjustedClearDataSize >> 8) & 0xFF); + scratchData[3] = (byte) (adjustedClearDataSize & 0xFF); subsampleEncryptionData = scratch; } - output.sampleData(subsampleEncryptionData, subsampleDataLength); + output.sampleData( + subsampleEncryptionData, subsampleDataLength, TrackOutput.SAMPLE_DATA_PART_ENCRYPTION); return 1 + vectorSize + subsampleDataLength; } - /** Skips the encryption data for the current sample. */ - private void skipSampleEncryptionData() { - TrackEncryptionBox encryptionBox = getEncryptionBoxIfEncrypted(); + /** + * Skips the encryption data for the current sample. + * + *

      This is not supported yet for samples specified in the sample table. + */ + public void skipSampleEncryptionData() { + @Nullable TrackEncryptionBox encryptionBox = getEncryptionBoxIfEncrypted(); if (encryptionBox == null) { return; } @@ -1667,15 +1864,19 @@ private void skipSampleEncryptionData() { } } - private TrackEncryptionBox getEncryptionBoxIfEncrypted() { - int sampleDescriptionIndex = fragment.header.sampleDescriptionIndex; + @Nullable + public TrackEncryptionBox getEncryptionBoxIfEncrypted() { + if (!currentlyInFragment) { + // Encryption is not supported yet for samples specified in the sample table. + return null; + } + int sampleDescriptionIndex = castNonNull(fragment.header).sampleDescriptionIndex; + @Nullable TrackEncryptionBox encryptionBox = fragment.trackEncryptionBox != null ? fragment.trackEncryptionBox - : track.getSampleDescriptionEncryptionBox(sampleDescriptionIndex); + : moovSampleTable.track.getSampleDescriptionEncryptionBox(sampleDescriptionIndex); return encryptionBox != null && encryptionBox.isEncrypted ? encryptionBox : null; } - } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/MetadataUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/MetadataUtil.java index 4f65836b76..9b9fea44ca 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/MetadataUtil.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/MetadataUtil.java @@ -15,6 +15,8 @@ */ package com.google.android.exoplayer2.extractor.mp4; +import static java.lang.Math.min; + import androidx.annotation.Nullable; import androidx.annotation.VisibleForTesting; import com.google.android.exoplayer2.C; @@ -26,8 +28,11 @@ import com.google.android.exoplayer2.metadata.id3.Id3Frame; import com.google.android.exoplayer2.metadata.id3.InternalFrame; import com.google.android.exoplayer2.metadata.id3.TextInformationFrame; +import com.google.android.exoplayer2.metadata.mp4.MdtaMetadataEntry; import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.ParsableByteArray; +import com.google.common.collect.ImmutableList; +import org.checkerframework.checker.nullness.compatqual.NullableType; /** Utilities for handling metadata in MP4. */ /* package */ final class MetadataUtil { @@ -275,48 +280,60 @@ "Psybient" }; - private static final String LANGUAGE_UNDEFINED = "und"; - private static final int TYPE_TOP_BYTE_COPYRIGHT = 0xA9; private static final int TYPE_TOP_BYTE_REPLACEMENT = 0xFD; // Truncated value of \uFFFD. - private static final String MDTA_KEY_ANDROID_CAPTURE_FPS = "com.android.capture.fps"; - private MetadataUtil() {} - /** - * Returns a {@link Format} that is the same as the input format but includes information from the - * specified sources of metadata. - */ - public static Format getFormatWithMetadata( + /** Updates a {@link Format.Builder} to include metadata from the provided sources. */ + public static void setFormatMetadata( int trackType, - Format format, - @Nullable Metadata udtaMetadata, + @Nullable Metadata udtaMetaMetadata, @Nullable Metadata mdtaMetadata, - GaplessInfoHolder gaplessInfoHolder) { + Format.Builder formatBuilder, + @NullableType Metadata... additionalMetadata) { + Metadata formatMetadata = new Metadata(); + if (trackType == C.TRACK_TYPE_AUDIO) { - if (gaplessInfoHolder.hasGaplessInfo()) { - format = - format.copyWithGaplessInfo( - gaplessInfoHolder.encoderDelay, gaplessInfoHolder.encoderPadding); - } - // We assume all udta metadata is associated with the audio track. - if (udtaMetadata != null) { - format = format.copyWithMetadata(udtaMetadata); + // We assume all meta metadata in the udta box is associated with the audio track. + if (udtaMetaMetadata != null) { + formatMetadata = udtaMetaMetadata; } - } else if (trackType == C.TRACK_TYPE_VIDEO && mdtaMetadata != null) { + } else if (trackType == C.TRACK_TYPE_VIDEO) { // Populate only metadata keys that are known to be specific to video. - for (int i = 0; i < mdtaMetadata.length(); i++) { - Metadata.Entry entry = mdtaMetadata.get(i); - if (entry instanceof MdtaMetadataEntry) { - MdtaMetadataEntry mdtaMetadataEntry = (MdtaMetadataEntry) entry; - if (MDTA_KEY_ANDROID_CAPTURE_FPS.equals(mdtaMetadataEntry.key)) { - format = format.copyWithMetadata(new Metadata(mdtaMetadataEntry)); + if (mdtaMetadata != null) { + for (int i = 0; i < mdtaMetadata.length(); i++) { + Metadata.Entry entry = mdtaMetadata.get(i); + if (entry instanceof MdtaMetadataEntry) { + MdtaMetadataEntry mdtaMetadataEntry = (MdtaMetadataEntry) entry; + if (MdtaMetadataEntry.KEY_ANDROID_CAPTURE_FPS.equals(mdtaMetadataEntry.key)) { + formatMetadata = new Metadata(mdtaMetadataEntry); + break; + } } } } } - return format; + + for (Metadata metadata : additionalMetadata) { + formatMetadata = formatMetadata.copyWithAppendedEntriesFrom(metadata); + } + + if (formatMetadata.length() > 0) { + formatBuilder.setMetadata(formatMetadata); + } + } + + /** + * Updates a {@link Format.Builder} to include audio gapless information from the provided source. + */ + public static void setFormatGaplessInfo( + int trackType, GaplessInfoHolder gaplessInfoHolder, Format.Builder formatBuilder) { + if (trackType == C.TRACK_TYPE_AUDIO && gaplessInfoHolder.hasGaplessInfo()) { + formatBuilder + .setEncoderDelay(gaplessInfoHolder.encoderDelay) + .setEncoderPadding(gaplessInfoHolder.encoderPadding); + } } /** @@ -436,7 +453,7 @@ private static TextInformationFrame parseTextAttribute( if (atomType == Atom.TYPE_data) { data.skipBytes(8); // version (1), flags (3), empty (4) String value = data.readNullTerminatedString(atomSize - 16); - return new TextInformationFrame(id, /* description= */ null, value); + return new TextInformationFrame(id, /* description= */ null, ImmutableList.of(value)); } Log.w(TAG, "Failed to parse text attribute: " + Atom.getAtomTypeString(type)); return null; @@ -449,7 +466,7 @@ private static CommentFrame parseCommentAttribute(int type, ParsableByteArray da if (atomType == Atom.TYPE_data) { data.skipBytes(8); // version (1), flags (3), empty (4) String value = data.readNullTerminatedString(atomSize - 16); - return new CommentFrame(LANGUAGE_UNDEFINED, value, value); + return new CommentFrame(C.LANGUAGE_UNDETERMINED, value, value); } Log.w(TAG, "Failed to parse comment attribute: " + Atom.getAtomTypeString(type)); return null; @@ -464,12 +481,13 @@ private static Id3Frame parseUint8Attribute( boolean isBoolean) { int value = parseUint8AttributeValue(data); if (isBoolean) { - value = Math.min(1, value); + value = min(1, value); } if (value >= 0) { return isTextInformationFrame - ? new TextInformationFrame(id, /* description= */ null, Integer.toString(value)) - : new CommentFrame(LANGUAGE_UNDEFINED, id, Integer.toString(value)); + ? new TextInformationFrame( + id, /* description= */ null, ImmutableList.of(Integer.toString(value))) + : new CommentFrame(C.LANGUAGE_UNDETERMINED, id, Integer.toString(value)); } Log.w(TAG, "Failed to parse uint8 attribute: " + Atom.getAtomTypeString(type)); return null; @@ -489,7 +507,8 @@ private static TextInformationFrame parseIndexAndCountAttribute( if (count > 0) { value += "/" + count; } - return new TextInformationFrame(attributeName, /* description= */ null, value); + return new TextInformationFrame( + attributeName, /* description= */ null, ImmutableList.of(value)); } } Log.w(TAG, "Failed to parse index/count attribute: " + Atom.getAtomTypeString(type)); @@ -499,10 +518,14 @@ private static TextInformationFrame parseIndexAndCountAttribute( @Nullable private static TextInformationFrame parseStandardGenreAttribute(ParsableByteArray data) { int genreCode = parseUint8AttributeValue(data); - String genreString = (0 < genreCode && genreCode <= STANDARD_GENRES.length) - ? STANDARD_GENRES[genreCode - 1] : null; + @Nullable + String genreString = + (0 < genreCode && genreCode <= STANDARD_GENRES.length) + ? STANDARD_GENRES[genreCode - 1] + : null; if (genreString != null) { - return new TextInformationFrame("TCON", /* description= */ null, genreString); + return new TextInformationFrame( + "TCON", /* description= */ null, ImmutableList.of(genreString)); } Log.w(TAG, "Failed to parse standard genre code"); return null; @@ -515,7 +538,7 @@ private static ApicFrame parseCoverArt(ParsableByteArray data) { if (atomType == Atom.TYPE_data) { int fullVersionInt = data.readInt(); int flags = Atom.parseFullAtomFlags(fullVersionInt); - String mimeType = flags == 13 ? "image/jpeg" : flags == 14 ? "image/png" : null; + @Nullable String mimeType = flags == 13 ? "image/jpeg" : flags == 14 ? "image/png" : null; if (mimeType == null) { Log.w(TAG, "Unrecognized cover art flags: " + flags); return null; @@ -535,8 +558,8 @@ private static ApicFrame parseCoverArt(ParsableByteArray data) { @Nullable private static Id3Frame parseInternalAttribute(ParsableByteArray data, int endPosition) { - String domain = null; - String name = null; + @Nullable String domain = null; + @Nullable String name = null; int dataAtomPosition = -1; int dataAtomSize = -1; while (data.getPosition() < endPosition) { @@ -575,5 +598,4 @@ private static int parseUint8AttributeValue(ParsableByteArray data) { Log.w(TAG, "Failed to parse uint8 attribute value"); return -1; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/Mp4Extractor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/Mp4Extractor.java index 4cc5af89cd..0632b4c4fe 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/Mp4Extractor.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/Mp4Extractor.java @@ -15,10 +15,21 @@ */ package com.google.android.exoplayer2.extractor.mp4; +import static com.google.android.exoplayer2.extractor.mp4.AtomParsers.parseTraks; +import static com.google.android.exoplayer2.extractor.mp4.Sniffer.BRAND_HEIC; +import static com.google.android.exoplayer2.extractor.mp4.Sniffer.BRAND_QUICKTIME; +import static com.google.android.exoplayer2.util.Util.castNonNull; +import static java.lang.Math.max; +import static java.lang.Math.min; +import static java.lang.annotation.ElementType.TYPE_USE; + +import android.util.Pair; import androidx.annotation.IntDef; +import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.ParserException; +import com.google.android.exoplayer2.audio.Ac3Util; import com.google.android.exoplayer2.audio.Ac4Util; import com.google.android.exoplayer2.extractor.Extractor; import com.google.android.exoplayer2.extractor.ExtractorInput; @@ -29,8 +40,11 @@ import com.google.android.exoplayer2.extractor.SeekMap; import com.google.android.exoplayer2.extractor.SeekPoint; import com.google.android.exoplayer2.extractor.TrackOutput; +import com.google.android.exoplayer2.extractor.TrueHdSampleRechunker; import com.google.android.exoplayer2.extractor.mp4.Atom.ContainerAtom; import com.google.android.exoplayer2.metadata.Metadata; +import com.google.android.exoplayer2.metadata.mp4.MotionPhotoMetadata; +import com.google.android.exoplayer2.metadata.mp4.SlowMotionData; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.NalUnitUtil; @@ -39,45 +53,78 @@ import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.List; +import org.checkerframework.checker.nullness.compatqual.NullableType; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; -/** - * Extracts data from the MP4 container format. - */ +/** Extracts data from the MP4 container format. */ public final class Mp4Extractor implements Extractor, SeekMap { /** Factory for {@link Mp4Extractor} instances. */ public static final ExtractorsFactory FACTORY = () -> new Extractor[] {new Mp4Extractor()}; /** - * Flags controlling the behavior of the extractor. Possible flag value is {@link - * #FLAG_WORKAROUND_IGNORE_EDIT_LISTS}. + * Flags controlling the behavior of the extractor. Possible flag values are {@link + * #FLAG_WORKAROUND_IGNORE_EDIT_LISTS}, {@link #FLAG_READ_MOTION_PHOTO_METADATA} and {@link + * #FLAG_READ_SEF_DATA}. */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef( flag = true, - value = {FLAG_WORKAROUND_IGNORE_EDIT_LISTS}) + value = { + FLAG_WORKAROUND_IGNORE_EDIT_LISTS, + FLAG_READ_MOTION_PHOTO_METADATA, + FLAG_READ_SEF_DATA + }) public @interface Flags {} + /** Flag to ignore any edit lists in the stream. */ + public static final int FLAG_WORKAROUND_IGNORE_EDIT_LISTS = 1; + /** + * Flag to extract {@link MotionPhotoMetadata} from HEIC motion photos following the Google Photos + * Motion Photo File Format V1.1. + * + *

      As playback is not supported for motion photos, this flag should only be used for metadata + * retrieval use cases. + */ + public static final int FLAG_READ_MOTION_PHOTO_METADATA = 1 << 1; /** - * Flag to ignore any edit lists in the stream. + * Flag to extract {@link SlowMotionData} metadata from Samsung Extension Format (SEF) slow motion + * videos. */ - public static final int FLAG_WORKAROUND_IGNORE_EDIT_LISTS = 1; + public static final int FLAG_READ_SEF_DATA = 1 << 2; /** Parser states. */ @Documented @Retention(RetentionPolicy.SOURCE) - @IntDef({STATE_READING_ATOM_HEADER, STATE_READING_ATOM_PAYLOAD, STATE_READING_SAMPLE}) + @Target(TYPE_USE) + @IntDef({ + STATE_READING_ATOM_HEADER, + STATE_READING_ATOM_PAYLOAD, + STATE_READING_SAMPLE, + STATE_READING_SEF, + }) private @interface State {} private static final int STATE_READING_ATOM_HEADER = 0; private static final int STATE_READING_ATOM_PAYLOAD = 1; private static final int STATE_READING_SAMPLE = 2; + private static final int STATE_READING_SEF = 3; - /** Brand stored in the ftyp atom for QuickTime media. */ - private static final int BRAND_QUICKTIME = 0x71742020; + /** Supported file types. */ + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef({FILE_TYPE_MP4, FILE_TYPE_QUICKTIME, FILE_TYPE_HEIC}) + private @interface FileType {} + + private static final int FILE_TYPE_MP4 = 0; + private static final int FILE_TYPE_QUICKTIME = 1; + private static final int FILE_TYPE_HEIC = 2; /** * When seeking within the source, if the offset is greater than or equal to this value (or the @@ -100,12 +147,14 @@ public final class Mp4Extractor implements Extractor, SeekMap { private final ParsableByteArray atomHeader; private final ArrayDeque containerAtoms; + private final SefReader sefReader; + private final List slowMotionMetadataEntries; - @State private int parserState; + private @State int parserState; private int atomType; private long atomSize; private int atomHeaderBytesRead; - private ParsableByteArray atomData; + @Nullable private ParsableByteArray atomData; private int sampleTrackIndex; private int sampleBytesRead; @@ -115,16 +164,16 @@ public final class Mp4Extractor implements Extractor, SeekMap { // Extractor outputs. private ExtractorOutput extractorOutput; private Mp4Track[] tracks; - private long[][] accumulatedSampleSizes; + + private long @MonotonicNonNull [][] accumulatedSampleSizes; private int firstVideoTrackIndex; private long durationUs; - private boolean isQuickTime; + private @FileType int fileType; + @Nullable private MotionPhotoMetadata motionPhotoMetadata; - /** - * Creates a new extractor for unfragmented MP4 streams. - */ + /** Creates a new extractor for unfragmented MP4 streams. */ public Mp4Extractor() { - this(0); + this(/* flags= */ 0); } /** @@ -135,17 +184,24 @@ public Mp4Extractor() { */ public Mp4Extractor(@Flags int flags) { this.flags = flags; + parserState = + ((flags & FLAG_READ_SEF_DATA) != 0) ? STATE_READING_SEF : STATE_READING_ATOM_HEADER; + sefReader = new SefReader(); + slowMotionMetadataEntries = new ArrayList<>(); atomHeader = new ParsableByteArray(Atom.LONG_HEADER_SIZE); containerAtoms = new ArrayDeque<>(); nalStartCode = new ParsableByteArray(NalUnitUtil.NAL_START_CODE); nalLength = new ParsableByteArray(4); scratch = new ParsableByteArray(); sampleTrackIndex = C.INDEX_UNSET; + extractorOutput = ExtractorOutput.PLACEHOLDER; + tracks = new Mp4Track[0]; } @Override - public boolean sniff(ExtractorInput input) throws IOException, InterruptedException { - return Sniffer.sniffUnfragmented(input); + public boolean sniff(ExtractorInput input) throws IOException { + return Sniffer.sniffUnfragmented( + input, /* acceptHeic= */ (flags & FLAG_READ_MOTION_PHOTO_METADATA) != 0); } @Override @@ -162,9 +218,21 @@ public void seek(long position, long timeUs) { sampleBytesWritten = 0; sampleCurrentNalBytesRemaining = 0; if (position == 0) { - enterReadingAtomHeaderState(); - } else if (tracks != null) { - updateSampleIndices(timeUs); + // Reading the SEF data occurs before normal MP4 parsing. Therefore we can not transition to + // reading the atom header until that has completed. + if (parserState != STATE_READING_SEF) { + enterReadingAtomHeaderState(); + } else { + sefReader.reset(); + slowMotionMetadataEntries.clear(); + } + } else { + for (Mp4Track track : tracks) { + updateSampleIndex(track, timeUs); + if (track.trueHdSampleRechunker != null) { + track.trueHdSampleRechunker.reset(); + } + } } } @@ -174,8 +242,7 @@ public void release() { } @Override - public int read(ExtractorInput input, PositionHolder seekPosition) - throws IOException, InterruptedException { + public int read(ExtractorInput input, PositionHolder seekPosition) throws IOException { while (true) { switch (parserState) { case STATE_READING_ATOM_HEADER: @@ -190,6 +257,8 @@ public int read(ExtractorInput input, PositionHolder seekPosition) break; case STATE_READING_SAMPLE: return readSample(input, seekPosition); + case STATE_READING_SEF: + return readSefData(input, seekPosition); default: throw new IllegalStateException(); } @@ -210,6 +279,22 @@ public long getDurationUs() { @Override public SeekPoints getSeekPoints(long timeUs) { + return getSeekPoints(timeUs, /* trackId= */ C.INDEX_UNSET); + } + + // Non-inherited public methods. + + /** + * Equivalent to {@link SeekMap#getSeekPoints(long)}, except it adds the {@code trackId} + * parameter. + * + * @param timeUs A seek time in microseconds. + * @param trackId The id of the track on which to seek for {@link SeekPoints}. May be {@link + * C#INDEX_UNSET} if the extractor is expected to define the strategy for generating {@link + * SeekPoints}. + * @return The corresponding seek points. + */ + public SeekPoints getSeekPoints(long timeUs, int trackId) { if (tracks.length == 0) { return new SeekPoints(SeekPoint.START); } @@ -219,9 +304,11 @@ public SeekPoints getSeekPoints(long timeUs) { long secondTimeUs = C.TIME_UNSET; long secondOffset = C.POSITION_UNSET; + // Note that the id matches the index in tracks. + int mainTrackIndex = trackId != C.INDEX_UNSET ? trackId : firstVideoTrackIndex; // If we have a video track, use it to establish one or two seek points. - if (firstVideoTrackIndex != C.INDEX_UNSET) { - TrackSampleTable sampleTable = tracks[firstVideoTrackIndex].sampleTable; + if (mainTrackIndex != C.INDEX_UNSET) { + TrackSampleTable sampleTable = tracks[mainTrackIndex].sampleTable; int sampleIndex = getSynchronizationSampleIndex(sampleTable, timeUs); if (sampleIndex == C.INDEX_UNSET) { return new SeekPoints(SeekPoint.START); @@ -241,13 +328,15 @@ public SeekPoints getSeekPoints(long timeUs) { firstOffset = Long.MAX_VALUE; } - // Take into account other tracks. - for (int i = 0; i < tracks.length; i++) { - if (i != firstVideoTrackIndex) { - TrackSampleTable sampleTable = tracks[i].sampleTable; - firstOffset = maybeAdjustSeekOffset(sampleTable, firstTimeUs, firstOffset); - if (secondTimeUs != C.TIME_UNSET) { - secondOffset = maybeAdjustSeekOffset(sampleTable, secondTimeUs, secondOffset); + if (trackId == C.INDEX_UNSET) { + // Take into account other tracks, but only if the caller has not specified a trackId. + for (int i = 0; i < tracks.length; i++) { + if (i != firstVideoTrackIndex) { + TrackSampleTable sampleTable = tracks[i].sampleTable; + firstOffset = maybeAdjustSeekOffset(sampleTable, firstTimeUs, firstOffset); + if (secondTimeUs != C.TIME_UNSET) { + secondOffset = maybeAdjustSeekOffset(sampleTable, secondTimeUs, secondOffset); + } } } } @@ -268,10 +357,11 @@ private void enterReadingAtomHeaderState() { atomHeaderBytesRead = 0; } - private boolean readAtomHeader(ExtractorInput input) throws IOException, InterruptedException { + private boolean readAtomHeader(ExtractorInput input) throws IOException { if (atomHeaderBytesRead == 0) { // Read the standard length atom header. - if (!input.readFully(atomHeader.data, 0, Atom.HEADER_SIZE, true)) { + if (!input.readFully(atomHeader.getData(), 0, Atom.HEADER_SIZE, true)) { + processEndOfStreamReadingAtomHeader(); return false; } atomHeaderBytesRead = Atom.HEADER_SIZE; @@ -283,15 +373,18 @@ private boolean readAtomHeader(ExtractorInput input) throws IOException, Interru if (atomSize == Atom.DEFINES_LARGE_SIZE) { // Read the large size. int headerBytesRemaining = Atom.LONG_HEADER_SIZE - Atom.HEADER_SIZE; - input.readFully(atomHeader.data, Atom.HEADER_SIZE, headerBytesRemaining); + input.readFully(atomHeader.getData(), Atom.HEADER_SIZE, headerBytesRemaining); atomHeaderBytesRead += headerBytesRemaining; atomSize = atomHeader.readUnsignedLongToLong(); } else if (atomSize == Atom.EXTENDS_TO_END_SIZE) { // The atom extends to the end of the file. Note that if the atom is within a container we can // work out its size even if the input length is unknown. long endPosition = input.getLength(); - if (endPosition == C.LENGTH_UNSET && !containerAtoms.isEmpty()) { - endPosition = containerAtoms.peek().endPosition; + if (endPosition == C.LENGTH_UNSET) { + @Nullable ContainerAtom containerAtom = containerAtoms.peek(); + if (containerAtom != null) { + endPosition = containerAtom.endPosition; + } } if (endPosition != C.LENGTH_UNSET) { atomSize = endPosition - input.getPosition() + atomHeaderBytesRead; @@ -299,7 +392,8 @@ private boolean readAtomHeader(ExtractorInput input) throws IOException, Interru } if (atomSize < atomHeaderBytesRead) { - throw new ParserException("Atom size less than header length (unsupported)."); + throw ParserException.createForUnsupportedContainerFeature( + "Atom size less than header length (unsupported)."); } if (shouldParseContainerAtom(atomType)) { @@ -319,10 +413,12 @@ private boolean readAtomHeader(ExtractorInput input) throws IOException, Interru // lengths greater than Integer.MAX_VALUE. Assertions.checkState(atomHeaderBytesRead == Atom.HEADER_SIZE); Assertions.checkState(atomSize <= Integer.MAX_VALUE); - atomData = new ParsableByteArray((int) atomSize); - System.arraycopy(atomHeader.data, 0, atomData.data, 0, Atom.HEADER_SIZE); + ParsableByteArray atomData = new ParsableByteArray((int) atomSize); + System.arraycopy(atomHeader.getData(), 0, atomData.getData(), 0, Atom.HEADER_SIZE); + this.atomData = atomData; parserState = STATE_READING_ATOM_PAYLOAD; } else { + processUnparsedAtom(input.getPosition() - atomHeaderBytesRead); atomData = null; parserState = STATE_READING_ATOM_PAYLOAD; } @@ -336,14 +432,15 @@ private boolean readAtomHeader(ExtractorInput input) throws IOException, Interru * restart loading at the position in {@code positionHolder}. Otherwise, the atom is read/skipped. */ private boolean readAtomPayload(ExtractorInput input, PositionHolder positionHolder) - throws IOException, InterruptedException { + throws IOException { long atomPayloadSize = atomSize - atomHeaderBytesRead; long atomEndPosition = input.getPosition() + atomPayloadSize; boolean seekRequired = false; + @Nullable ParsableByteArray atomData = this.atomData; if (atomData != null) { - input.readFully(atomData.data, atomHeaderBytesRead, (int) atomPayloadSize); + input.readFully(atomData.getData(), atomHeaderBytesRead, (int) atomPayloadSize); if (atomType == Atom.TYPE_ftyp) { - isQuickTime = processFtypAtom(atomData); + fileType = processFtypAtom(atomData); } else if (!containerAtoms.isEmpty()) { containerAtoms.peek().add(new Atom.LeafAtom(atomType, atomData)); } @@ -360,6 +457,15 @@ private boolean readAtomPayload(ExtractorInput input, PositionHolder positionHol return seekRequired && parserState != STATE_READING_SAMPLE; } + private @ReadResult int readSefData(ExtractorInput input, PositionHolder seekPosition) + throws IOException { + @ReadResult int result = sefReader.read(input, seekPosition, slowMotionMetadataEntries); + if (result == RESULT_SEEK && seekPosition.position == 0) { + enterReadingAtomHeaderState(); + } + return result; + } + private void processAtomEnded(long atomEndPosition) throws ParserException { while (!containerAtoms.isEmpty() && containerAtoms.peek().endPosition == atomEndPosition) { Atom.ContainerAtom containerAtom = containerAtoms.pop(); @@ -377,58 +483,85 @@ private void processAtomEnded(long atomEndPosition) throws ParserException { } } - /** - * Updates the stored track metadata to reflect the contents of the specified moov atom. - */ + /** Updates the stored track metadata to reflect the contents of the specified moov atom. */ private void processMoovAtom(ContainerAtom moov) throws ParserException { int firstVideoTrackIndex = C.INDEX_UNSET; long durationUs = C.TIME_UNSET; List tracks = new ArrayList<>(); // Process metadata. - Metadata udtaMetadata = null; + @Nullable Metadata udtaMetaMetadata = null; + @Nullable Metadata smtaMetadata = null; + boolean isQuickTime = fileType == FILE_TYPE_QUICKTIME; GaplessInfoHolder gaplessInfoHolder = new GaplessInfoHolder(); - Atom.LeafAtom udta = moov.getLeafAtomOfType(Atom.TYPE_udta); + @Nullable Atom.LeafAtom udta = moov.getLeafAtomOfType(Atom.TYPE_udta); if (udta != null) { - udtaMetadata = AtomParsers.parseUdta(udta, isQuickTime); - if (udtaMetadata != null) { - gaplessInfoHolder.setFromMetadata(udtaMetadata); + Pair<@NullableType Metadata, @NullableType Metadata> udtaMetadata = + AtomParsers.parseUdta(udta); + udtaMetaMetadata = udtaMetadata.first; + smtaMetadata = udtaMetadata.second; + if (udtaMetaMetadata != null) { + gaplessInfoHolder.setFromMetadata(udtaMetaMetadata); } } - Metadata mdtaMetadata = null; - Atom.ContainerAtom meta = moov.getContainerAtomOfType(Atom.TYPE_meta); + @Nullable Metadata mdtaMetadata = null; + @Nullable Atom.ContainerAtom meta = moov.getContainerAtomOfType(Atom.TYPE_meta); if (meta != null) { mdtaMetadata = AtomParsers.parseMdtaFromMeta(meta); } boolean ignoreEditLists = (flags & FLAG_WORKAROUND_IGNORE_EDIT_LISTS) != 0; - ArrayList trackSampleTables = - getTrackSampleTables(moov, gaplessInfoHolder, ignoreEditLists); + List trackSampleTables = + parseTraks( + moov, + gaplessInfoHolder, + /* duration= */ C.TIME_UNSET, + /* drmInitData= */ null, + ignoreEditLists, + isQuickTime, + /* modifyTrackFunction= */ track -> track); int trackCount = trackSampleTables.size(); for (int i = 0; i < trackCount; i++) { TrackSampleTable trackSampleTable = trackSampleTables.get(i); + if (trackSampleTable.sampleCount == 0) { + continue; + } Track track = trackSampleTable.track; long trackDurationUs = track.durationUs != C.TIME_UNSET ? track.durationUs : trackSampleTable.durationUs; - durationUs = Math.max(durationUs, trackDurationUs); - Mp4Track mp4Track = new Mp4Track(track, trackSampleTable, - extractorOutput.track(i, track.type)); - - // Each sample has up to three bytes of overhead for the start code that replaces its length. - // Allow ten source samples per output sample, like the platform extractor. - int maxInputSize = trackSampleTable.maximumSize + 3 * 10; - Format format = track.format.copyWithMaxInputSize(maxInputSize); + durationUs = max(durationUs, trackDurationUs); + Mp4Track mp4Track = + new Mp4Track(track, trackSampleTable, extractorOutput.track(i, track.type)); + + int maxInputSize; + if (MimeTypes.AUDIO_TRUEHD.equals(track.format.sampleMimeType)) { + // TrueHD groups samples per chunks of TRUEHD_RECHUNK_SAMPLE_COUNT samples. + maxInputSize = trackSampleTable.maximumSize * Ac3Util.TRUEHD_RECHUNK_SAMPLE_COUNT; + } else { + // Each sample has up to three bytes of overhead for the start code that replaces its + // length. Allow ten source samples per output sample, like the platform extractor. + maxInputSize = trackSampleTable.maximumSize + 3 * 10; + } + + Format.Builder formatBuilder = track.format.buildUpon(); + formatBuilder.setMaxInputSize(maxInputSize); if (track.type == C.TRACK_TYPE_VIDEO && trackDurationUs > 0 && trackSampleTable.sampleCount > 1) { float frameRate = trackSampleTable.sampleCount / (trackDurationUs / 1000000f); - format = format.copyWithFrameRate(frameRate); + formatBuilder.setFrameRate(frameRate); } - format = - MetadataUtil.getFormatWithMetadata( - track.type, format, udtaMetadata, mdtaMetadata, gaplessInfoHolder); - mp4Track.trackOutput.format(format); + + MetadataUtil.setFormatGaplessInfo(track.type, gaplessInfoHolder, formatBuilder); + MetadataUtil.setFormatMetadata( + track.type, + udtaMetaMetadata, + mdtaMetadata, + formatBuilder, + smtaMetadata, + slowMotionMetadataEntries.isEmpty() ? null : new Metadata(slowMotionMetadataEntries)); + mp4Track.trackOutput.format(formatBuilder.build()); if (track.type == C.TRACK_TYPE_VIDEO && firstVideoTrackIndex == C.INDEX_UNSET) { firstVideoTrackIndex = tracks.size(); @@ -444,57 +577,22 @@ private void processMoovAtom(ContainerAtom moov) throws ParserException { extractorOutput.seekMap(this); } - private ArrayList getTrackSampleTables( - ContainerAtom moov, GaplessInfoHolder gaplessInfoHolder, boolean ignoreEditLists) - throws ParserException { - ArrayList trackSampleTables = new ArrayList<>(); - for (int i = 0; i < moov.containerChildren.size(); i++) { - Atom.ContainerAtom atom = moov.containerChildren.get(i); - if (atom.type != Atom.TYPE_trak) { - continue; - } - Track track = - AtomParsers.parseTrak( - atom, - moov.getLeafAtomOfType(Atom.TYPE_mvhd), - /* duration= */ C.TIME_UNSET, - /* drmInitData= */ null, - ignoreEditLists, - isQuickTime); - if (track == null) { - continue; - } - Atom.ContainerAtom stblAtom = - atom.getContainerAtomOfType(Atom.TYPE_mdia) - .getContainerAtomOfType(Atom.TYPE_minf) - .getContainerAtomOfType(Atom.TYPE_stbl); - TrackSampleTable trackSampleTable = AtomParsers.parseStbl(track, stblAtom, gaplessInfoHolder); - if (trackSampleTable.sampleCount == 0) { - continue; - } - trackSampleTables.add(trackSampleTable); - } - return trackSampleTables; - } - /** * Attempts to extract the next sample in the current mdat atom for the specified track. - *

      - * Returns {@link #RESULT_SEEK} if the source should be reloaded from the position in - * {@code positionHolder}. - *

      - * Returns {@link #RESULT_END_OF_INPUT} if no samples are left. Otherwise, returns - * {@link #RESULT_CONTINUE}. + * + *

      Returns {@link #RESULT_SEEK} if the source should be reloaded from the position in {@code + * positionHolder}. + * + *

      Returns {@link #RESULT_END_OF_INPUT} if no samples are left. Otherwise, returns {@link + * #RESULT_CONTINUE}. * * @param input The {@link ExtractorInput} from which to read data. * @param positionHolder If {@link #RESULT_SEEK} is returned, this holder is updated to hold the * position of the required data. * @return One of the {@code RESULT_*} flags in {@link Extractor}. * @throws IOException If an error occurs reading from the input. - * @throws InterruptedException If the thread is interrupted. */ - private int readSample(ExtractorInput input, PositionHolder positionHolder) - throws IOException, InterruptedException { + private int readSample(ExtractorInput input, PositionHolder positionHolder) throws IOException { long inputPosition = input.getPosition(); if (sampleTrackIndex == C.INDEX_UNSET) { sampleTrackIndex = getTrackIndexOfNextReadSample(inputPosition); @@ -507,6 +605,7 @@ private int readSample(ExtractorInput input, PositionHolder positionHolder) int sampleIndex = track.sampleIndex; long position = track.sampleTable.offsets[sampleIndex]; int sampleSize = track.sampleTable.sizes[sampleIndex]; + @Nullable TrueHdSampleRechunker trueHdSampleRechunker = track.trueHdSampleRechunker; long skipAmount = position - inputPosition + sampleBytesRead; if (skipAmount < 0 || skipAmount >= RELOAD_MINIMUM_SEEK_DISTANCE) { positionHolder.position = position; @@ -522,7 +621,7 @@ private int readSample(ExtractorInput input, PositionHolder positionHolder) if (track.track.nalUnitLengthFieldLength != 0) { // Zero the top three bytes of the array that we'll use to decode nal unit lengths, in case // they're only 1 or 2 bytes long. - byte[] nalLengthData = nalLength.data; + byte[] nalLengthData = nalLength.getData(); nalLengthData[0] = 0; nalLengthData[1] = 0; nalLengthData[2] = 0; @@ -539,7 +638,8 @@ private int readSample(ExtractorInput input, PositionHolder positionHolder) nalLength.setPosition(0); int nalLengthInt = nalLength.readInt(); if (nalLengthInt < 0) { - throw new ParserException("Invalid NAL length"); + throw ParserException.createForMalformedContainer( + "Invalid NAL length", /* cause= */ null); } sampleCurrentNalBytesRemaining = nalLengthInt; // Write a start code for the current NAL unit. @@ -563,7 +663,10 @@ private int readSample(ExtractorInput input, PositionHolder positionHolder) sampleBytesWritten += Ac4Util.SAMPLE_HEADER_SIZE; } sampleSize += Ac4Util.SAMPLE_HEADER_SIZE; + } else if (trueHdSampleRechunker != null) { + trueHdSampleRechunker.startSample(input); } + while (sampleBytesWritten < sampleSize) { int writtenBytes = trackOutput.sampleData(input, sampleSize - sampleBytesWritten, false); sampleBytesRead += writtenBytes; @@ -571,8 +674,20 @@ private int readSample(ExtractorInput input, PositionHolder positionHolder) sampleCurrentNalBytesRemaining -= writtenBytes; } } - trackOutput.sampleMetadata(track.sampleTable.timestampsUs[sampleIndex], - track.sampleTable.flags[sampleIndex], sampleSize, 0, null); + + long timeUs = track.sampleTable.timestampsUs[sampleIndex]; + @C.BufferFlags int flags = track.sampleTable.flags[sampleIndex]; + if (trueHdSampleRechunker != null) { + trueHdSampleRechunker.sampleMetadata( + trackOutput, timeUs, flags, sampleSize, /* offset= */ 0, /* cryptoData= */ null); + if (sampleIndex + 1 == track.sampleTable.sampleCount) { + trueHdSampleRechunker.outputPendingSampleMetadata(trackOutput, /* cryptoData= */ null); + } + } else { + trackOutput.sampleMetadata( + timeUs, flags, sampleSize, /* offset= */ 0, /* cryptoData= */ null); + } + track.sampleIndex++; sampleTrackIndex = C.INDEX_UNSET; sampleBytesRead = 0; @@ -609,7 +724,7 @@ private int getTrackIndexOfNextReadSample(long inputPosition) { continue; } long sampleOffset = track.sampleTable.offsets[sampleIndex]; - long sampleAccumulatedBytes = accumulatedSampleSizes[trackIndex][sampleIndex]; + long sampleAccumulatedBytes = castNonNull(accumulatedSampleSizes)[trackIndex][sampleIndex]; long skipAmount = sampleOffset - inputPosition; boolean requiresReload = skipAmount < 0 || skipAmount >= RELOAD_MINIMUM_SEEK_DISTANCE; if ((!requiresReload && preferredRequiresReload) @@ -632,44 +747,50 @@ private int getTrackIndexOfNextReadSample(long inputPosition) { : minAccumulatedBytesTrackIndex; } - /** - * Updates every track's sample index to point its latest sync sample before/at {@code timeUs}. - */ - private void updateSampleIndices(long timeUs) { - for (Mp4Track track : tracks) { - TrackSampleTable sampleTable = track.sampleTable; - int sampleIndex = sampleTable.getIndexOfEarlierOrEqualSynchronizationSample(timeUs); - if (sampleIndex == C.INDEX_UNSET) { - // Handle the case where the requested time is before the first synchronization sample. - sampleIndex = sampleTable.getIndexOfLaterOrEqualSynchronizationSample(timeUs); - } - track.sampleIndex = sampleIndex; + /** Updates a track's sample index to point its latest sync sample before/at {@code timeUs}. */ + private void updateSampleIndex(Mp4Track track, long timeUs) { + TrackSampleTable sampleTable = track.sampleTable; + int sampleIndex = sampleTable.getIndexOfEarlierOrEqualSynchronizationSample(timeUs); + if (sampleIndex == C.INDEX_UNSET) { + // Handle the case where the requested time is before the first synchronization sample. + sampleIndex = sampleTable.getIndexOfLaterOrEqualSynchronizationSample(timeUs); } + track.sampleIndex = sampleIndex; } - /** - * Possibly skips the version and flags fields (1+3 byte) of a full meta atom of the {@code - * input}. - * - *

      Atoms of type {@link Atom#TYPE_meta} are defined to be full atoms which have four additional - * bytes for a version and a flags field (see 4.2 'Object Structure' in ISO/IEC 14496-12:2005). - * QuickTime do not have such a full box structure. Since some of these files are encoded wrongly, - * we can't rely on the file type though. Instead we must check the 8 bytes after the common - * header bytes ourselves. - */ - private void maybeSkipRemainingMetaAtomHeaderBytes(ExtractorInput input) - throws IOException, InterruptedException { + /** Processes the end of stream in case there is not atom left to read. */ + private void processEndOfStreamReadingAtomHeader() { + if (fileType == FILE_TYPE_HEIC && (flags & FLAG_READ_MOTION_PHOTO_METADATA) != 0) { + // Add image track and prepare media. + TrackOutput trackOutput = extractorOutput.track(/* id= */ 0, C.TRACK_TYPE_IMAGE); + @Nullable + Metadata metadata = motionPhotoMetadata == null ? null : new Metadata(motionPhotoMetadata); + trackOutput.format(new Format.Builder().setMetadata(metadata).build()); + extractorOutput.endTracks(); + extractorOutput.seekMap(new SeekMap.Unseekable(/* durationUs= */ C.TIME_UNSET)); + } + } + + private void maybeSkipRemainingMetaAtomHeaderBytes(ExtractorInput input) throws IOException { scratch.reset(8); - // Peek the next 8 bytes which can be either - // (iso) [1 byte version + 3 bytes flags][4 byte size of next atom] - // (qt) [4 byte size of next atom ][4 byte hdlr atom type ] - // In case of (iso) we need to skip the next 4 bytes. - input.peekFully(scratch.data, 0, 8); - scratch.skipBytes(4); - if (scratch.readInt() == Atom.TYPE_hdlr) { - input.resetPeekPosition(); - } else { - input.skipFully(4); + input.peekFully(scratch.getData(), 0, 8); + AtomParsers.maybeSkipRemainingMetaAtomHeaderBytes(scratch); + input.skipFully(scratch.getPosition()); + input.resetPeekPosition(); + } + + /** Processes an atom whose payload does not need to be parsed. */ + private void processUnparsedAtom(long atomStartPosition) { + if (atomType == Atom.TYPE_mpvd) { + // The input is an HEIC motion photo following the Google Photos Motion Photo File Format + // V1.1. + motionPhotoMetadata = + new MotionPhotoMetadata( + /* photoStartPosition= */ 0, + /* photoSize= */ atomStartPosition, + /* photoPresentationTimestampUs= */ C.TIME_UNSET, + /* videoStartPosition= */ atomStartPosition + atomHeaderBytesRead, + /* videoSize= */ atomSize - atomHeaderBytesRead); } } @@ -728,7 +849,7 @@ private static long maybeAdjustSeekOffset( return offset; } long sampleOffset = sampleTable.offsets[sampleIndex]; - return Math.min(sampleOffset, offset); + return min(sampleOffset, offset); } /** @@ -752,24 +873,37 @@ private static int getSynchronizationSampleIndex(TrackSampleTable sampleTable, l } /** - * Process an ftyp atom to determine whether the media is QuickTime. + * Process an ftyp atom to determine the corresponding {@link FileType}. * * @param atomData The ftyp atom data. - * @return Whether the media is QuickTime. + * @return The {@link FileType}. */ - private static boolean processFtypAtom(ParsableByteArray atomData) { + private static @FileType int processFtypAtom(ParsableByteArray atomData) { atomData.setPosition(Atom.HEADER_SIZE); int majorBrand = atomData.readInt(); - if (majorBrand == BRAND_QUICKTIME) { - return true; + @FileType int fileType = brandToFileType(majorBrand); + if (fileType != FILE_TYPE_MP4) { + return fileType; } atomData.skipBytes(4); // minor_version while (atomData.bytesLeft() > 0) { - if (atomData.readInt() == BRAND_QUICKTIME) { - return true; + fileType = brandToFileType(atomData.readInt()); + if (fileType != FILE_TYPE_MP4) { + return fileType; } } - return false; + return FILE_TYPE_MP4; + } + + private static @FileType int brandToFileType(int brand) { + switch (brand) { + case BRAND_QUICKTIME: + return FILE_TYPE_QUICKTIME; + case BRAND_HEIC: + return FILE_TYPE_HEIC; + default: + return FILE_TYPE_MP4; + } } /** Returns whether the extractor should decode a leaf atom with type {@code atom}. */ @@ -810,6 +944,7 @@ private static final class Mp4Track { public final Track track; public final TrackSampleTable sampleTable; public final TrackOutput trackOutput; + @Nullable public final TrueHdSampleRechunker trueHdSampleRechunker; public int sampleIndex; @@ -817,8 +952,10 @@ public Mp4Track(Track track, TrackSampleTable sampleTable, TrackOutput trackOutp this.track = track; this.sampleTable = sampleTable; this.trackOutput = trackOutput; + trueHdSampleRechunker = + MimeTypes.AUDIO_TRUEHD.equals(track.format.sampleMimeType) + ? new TrueHdSampleRechunker() + : null; } - } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/PsshAtomUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/PsshAtomUtil.java index b9ecaf174c..3be0946e94 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/PsshAtomUtil.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/PsshAtomUtil.java @@ -21,9 +21,7 @@ import java.nio.ByteBuffer; import java.util.UUID; -/** - * Utility methods for handling PSSH atoms. - */ +/** Utility methods for handling PSSH atoms. */ public final class PsshAtomUtil { private static final String TAG = "PsshAtomUtil"; @@ -49,8 +47,6 @@ public static byte[] buildPsshAtom(UUID systemId, @Nullable byte[] data) { * @param data The scheme specific data. * @return The PSSH atom. */ - // dereference of possibly-null reference keyId - @SuppressWarnings({"ParameterNotNullable", "nullness:dereference.of.nullable"}) public static byte[] buildPsshAtom( UUID systemId, @Nullable UUID[] keyIds, @Nullable byte[] data) { int dataLength = data != null ? data.length : 0; @@ -97,8 +93,9 @@ public static boolean isPsshAtom(byte[] data) { * @return The parsed UUID. Null if the input is not a valid PSSH atom, or if the PSSH atom has an * unsupported version. */ - public static @Nullable UUID parseUuid(byte[] atom) { - PsshAtom parsedAtom = parsePsshAtom(atom); + @Nullable + public static UUID parseUuid(byte[] atom) { + @Nullable PsshAtom parsedAtom = parsePsshAtom(atom); if (parsedAtom == null) { return null; } @@ -107,15 +104,15 @@ public static boolean isPsshAtom(byte[] data) { /** * Parses the version from a PSSH atom. Version 0 and 1 PSSH atoms are supported. - *

      - * The version is only parsed if the data is a valid PSSH atom. + * + *

      The version is only parsed if the data is a valid PSSH atom. * * @param atom The atom to parse. * @return The parsed version. -1 if the input is not a valid PSSH atom, or if the PSSH atom has * an unsupported version. */ public static int parseVersion(byte[] atom) { - PsshAtom parsedAtom = parsePsshAtom(atom); + @Nullable PsshAtom parsedAtom = parsePsshAtom(atom); if (parsedAtom == null) { return -1; } @@ -133,12 +130,13 @@ public static int parseVersion(byte[] atom) { * @return The parsed scheme specific data. Null if the input is not a valid PSSH atom, or if the * PSSH atom has an unsupported version, or if the PSSH atom does not match the passed UUID. */ - public static @Nullable byte[] parseSchemeSpecificData(byte[] atom, UUID uuid) { - PsshAtom parsedAtom = parsePsshAtom(atom); + @Nullable + public static byte[] parseSchemeSpecificData(byte[] atom, UUID uuid) { + @Nullable PsshAtom parsedAtom = parsePsshAtom(atom); if (parsedAtom == null) { return null; } - if (uuid != null && !uuid.equals(parsedAtom.uuid)) { + if (!uuid.equals(parsedAtom.uuid)) { Log.w(TAG, "UUID mismatch. Expected: " + uuid + ", got: " + parsedAtom.uuid + "."); return null; } @@ -153,7 +151,8 @@ public static int parseVersion(byte[] atom) { * has an unsupported version. */ // TODO: Support parsing of the key ids for version 1 PSSH atoms. - private static @Nullable PsshAtom parsePsshAtom(byte[] atom) { + @Nullable + private static PsshAtom parsePsshAtom(byte[] atom) { ParsableByteArray atomData = new ParsableByteArray(atom); if (atomData.limit() < Atom.FULL_HEADER_SIZE + 16 /* UUID */ + 4 /* DataSize */) { // Data too short. @@ -202,7 +201,5 @@ public PsshAtom(UUID uuid, int version, byte[] schemeData) { this.version = version; this.schemeData = schemeData; } - } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/SefReader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/SefReader.java new file mode 100644 index 0000000000..7d89e79f55 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/SefReader.java @@ -0,0 +1,280 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.extractor.mp4; + +import static com.google.android.exoplayer2.extractor.Extractor.RESULT_SEEK; +import static java.lang.annotation.ElementType.TYPE_USE; + +import androidx.annotation.IntDef; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.ParserException; +import com.google.android.exoplayer2.extractor.Extractor; +import com.google.android.exoplayer2.extractor.ExtractorInput; +import com.google.android.exoplayer2.extractor.PositionHolder; +import com.google.android.exoplayer2.metadata.Metadata; +import com.google.android.exoplayer2.metadata.mp4.SlowMotionData; +import com.google.android.exoplayer2.util.ParsableByteArray; +import com.google.common.base.Splitter; +import java.io.IOException; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import java.util.ArrayList; +import java.util.List; + +/** + * Reads Samsung Extension Format (SEF) metadata. + * + *

      To be used in conjunction with {@link Mp4Extractor}. + */ +/* package */ final class SefReader { + + /** Reader states. */ + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef({ + STATE_SHOULD_CHECK_FOR_SEF, + STATE_CHECKING_FOR_SEF, + STATE_READING_SDRS, + STATE_READING_SEF_DATA + }) + private @interface State {} + + private static final int STATE_SHOULD_CHECK_FOR_SEF = 0; + private static final int STATE_CHECKING_FOR_SEF = 1; + private static final int STATE_READING_SDRS = 2; + private static final int STATE_READING_SEF_DATA = 3; + + /** Supported data types. */ + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef({ + TYPE_SLOW_MOTION_DATA, + TYPE_SUPER_SLOW_MOTION_DATA, + TYPE_SUPER_SLOW_MOTION_BGM, + TYPE_SUPER_SLOW_MOTION_EDIT_DATA, + TYPE_SUPER_SLOW_DEFLICKERING_ON + }) + private @interface DataType {} + + private static final int TYPE_SLOW_MOTION_DATA = 0x0890; // 2192 + private static final int TYPE_SUPER_SLOW_MOTION_DATA = 0x0b00; // 2816 + private static final int TYPE_SUPER_SLOW_MOTION_BGM = 0x0b01; // 2817 + private static final int TYPE_SUPER_SLOW_MOTION_EDIT_DATA = 0x0b03; // 2819 + private static final int TYPE_SUPER_SLOW_DEFLICKERING_ON = 0x0b04; // 2820 + + private static final String TAG = "SefReader"; + + /** + * Hex representation of `SEFT` (in ASCII). + * + *

      This is the last 4 bytes of a file that has Samsung Extension Format (SEF) data. + */ + private static final int SAMSUNG_TAIL_SIGNATURE = 0x53454654; + /** Start signature (4 bytes), SEF version (4 bytes), SDR count (4 bytes). */ + private static final int TAIL_HEADER_LENGTH = 12; + /** Tail offset (4 bytes), tail signature (4 bytes). */ + private static final int TAIL_FOOTER_LENGTH = 8; + + private static final int LENGTH_OF_ONE_SDR = 12; + private static final Splitter COLON_SPLITTER = Splitter.on(':'); + private static final Splitter ASTERISK_SPLITTER = Splitter.on('*'); + + private final List dataReferences; + private @State int readerState; + private int tailLength; + + public SefReader() { + dataReferences = new ArrayList<>(); + readerState = STATE_SHOULD_CHECK_FOR_SEF; + } + + public void reset() { + dataReferences.clear(); + readerState = STATE_SHOULD_CHECK_FOR_SEF; + } + + public @Extractor.ReadResult int read( + ExtractorInput input, + PositionHolder seekPosition, + List slowMotionMetadataEntries) + throws IOException { + switch (readerState) { + case STATE_SHOULD_CHECK_FOR_SEF: + long inputLength = input.getLength(); + seekPosition.position = + inputLength == C.LENGTH_UNSET || inputLength < TAIL_FOOTER_LENGTH + ? 0 + : inputLength - TAIL_FOOTER_LENGTH; + readerState = STATE_CHECKING_FOR_SEF; + break; + case STATE_CHECKING_FOR_SEF: + checkForSefData(input, seekPosition); + break; + case STATE_READING_SDRS: + readSdrs(input, seekPosition); + break; + case STATE_READING_SEF_DATA: + readSefData(input, slowMotionMetadataEntries); + seekPosition.position = 0; + break; + default: + throw new IllegalStateException(); + } + return RESULT_SEEK; + } + + private void checkForSefData(ExtractorInput input, PositionHolder seekPosition) + throws IOException { + ParsableByteArray scratch = new ParsableByteArray(/* limit= */ TAIL_FOOTER_LENGTH); + input.readFully(scratch.getData(), /* offset= */ 0, /* length= */ TAIL_FOOTER_LENGTH); + tailLength = scratch.readLittleEndianInt() + TAIL_FOOTER_LENGTH; + if (scratch.readInt() != SAMSUNG_TAIL_SIGNATURE) { + seekPosition.position = 0; + return; + } + + // input.getPosition is at the very end of the tail, so jump forward by tailLength, but + // account for the tail header, which needs to be ignored. + seekPosition.position = input.getPosition() - (tailLength - TAIL_HEADER_LENGTH); + readerState = STATE_READING_SDRS; + } + + private void readSdrs(ExtractorInput input, PositionHolder seekPosition) throws IOException { + long streamLength = input.getLength(); + int sdrsLength = tailLength - TAIL_HEADER_LENGTH - TAIL_FOOTER_LENGTH; + ParsableByteArray scratch = new ParsableByteArray(/* limit= */ sdrsLength); + input.readFully(scratch.getData(), /* offset= */ 0, /* length= */ sdrsLength); + + for (int i = 0; i < sdrsLength / LENGTH_OF_ONE_SDR; i++) { + scratch.skipBytes(2); // SDR data sub info flag and reserved bits (2). + @DataType int dataType = scratch.readLittleEndianShort(); + switch (dataType) { + case TYPE_SLOW_MOTION_DATA: + case TYPE_SUPER_SLOW_MOTION_DATA: + case TYPE_SUPER_SLOW_MOTION_BGM: + case TYPE_SUPER_SLOW_MOTION_EDIT_DATA: + case TYPE_SUPER_SLOW_DEFLICKERING_ON: + // The read int is the distance from the tail info to the start of the metadata. + // Calculated as an offset from the start by working backwards. + long startOffset = streamLength - tailLength - scratch.readLittleEndianInt(); + int size = scratch.readLittleEndianInt(); + dataReferences.add(new DataReference(dataType, startOffset, size)); + break; + default: + scratch.skipBytes(8); // startPosition (4), size (4). + } + } + + if (dataReferences.isEmpty()) { + seekPosition.position = 0; + return; + } + + readerState = STATE_READING_SEF_DATA; + seekPosition.position = dataReferences.get(0).startOffset; + } + + private void readSefData(ExtractorInput input, List slowMotionMetadataEntries) + throws IOException { + long dataStartOffset = input.getPosition(); + int totalDataLength = (int) (input.getLength() - input.getPosition() - tailLength); + ParsableByteArray data = new ParsableByteArray(/* limit= */ totalDataLength); + input.readFully(data.getData(), 0, totalDataLength); + + for (int i = 0; i < dataReferences.size(); i++) { + DataReference dataReference = dataReferences.get(i); + int intendedPosition = (int) (dataReference.startOffset - dataStartOffset); + data.setPosition(intendedPosition); + + // The data type is derived from the name because the SEF format has inconsistent data type + // values. + data.skipBytes(4); // data type (2), data sub info (2). + int nameLength = data.readLittleEndianInt(); + String name = data.readString(nameLength); + @DataType int dataType = nameToDataType(name); + + int remainingDataLength = dataReference.size - (8 + nameLength); + switch (dataType) { + case TYPE_SLOW_MOTION_DATA: + slowMotionMetadataEntries.add(readSlowMotionData(data, remainingDataLength)); + break; + case TYPE_SUPER_SLOW_MOTION_DATA: + case TYPE_SUPER_SLOW_MOTION_BGM: + case TYPE_SUPER_SLOW_MOTION_EDIT_DATA: + case TYPE_SUPER_SLOW_DEFLICKERING_ON: + break; + default: + throw new IllegalStateException(); + } + } + } + + private static SlowMotionData readSlowMotionData(ParsableByteArray data, int dataLength) + throws ParserException { + List segments = new ArrayList<>(); + String dataString = data.readString(dataLength); + List segmentStrings = ASTERISK_SPLITTER.splitToList(dataString); + for (int i = 0; i < segmentStrings.size(); i++) { + List values = COLON_SPLITTER.splitToList(segmentStrings.get(i)); + if (values.size() != 3) { + throw ParserException.createForMalformedContainer(/* message= */ null, /* cause= */ null); + } + try { + long startTimeMs = Long.parseLong(values.get(0)); + long endTimeMs = Long.parseLong(values.get(1)); + int speedMode = Integer.parseInt(values.get(2)); + int speedDivisor = 1 << (speedMode - 1); + segments.add(new SlowMotionData.Segment(startTimeMs, endTimeMs, speedDivisor)); + } catch (NumberFormatException e) { + throw ParserException.createForMalformedContainer(/* message= */ null, /* cause= */ e); + } + } + return new SlowMotionData(segments); + } + + private static @DataType int nameToDataType(String name) throws ParserException { + switch (name) { + case "SlowMotion_Data": + return TYPE_SLOW_MOTION_DATA; + case "Super_SlowMotion_Data": + return TYPE_SUPER_SLOW_MOTION_DATA; + case "Super_SlowMotion_BGM": + return TYPE_SUPER_SLOW_MOTION_BGM; + case "Super_SlowMotion_Edit_Data": + return TYPE_SUPER_SLOW_MOTION_EDIT_DATA; + case "Super_SlowMotion_Deflickering_On": + return TYPE_SUPER_SLOW_DEFLICKERING_ON; + default: + throw ParserException.createForMalformedContainer("Invalid SEF name", /* cause= */ null); + } + } + + private static final class DataReference { + public final @DataType int dataType; + public final long startOffset; + public final int size; + + public DataReference(@DataType int dataType, long startOffset, int size) { + this.dataType = dataType; + this.startOffset = startOffset; + this.size = size; + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/Sniffer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/Sniffer.java index dac74bfe2b..2c4a847ee5 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/Sniffer.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/Sniffer.java @@ -26,6 +26,11 @@ */ /* package */ final class Sniffer { + /** Brand stored in the ftyp atom for QuickTime media. */ + public static final int BRAND_QUICKTIME = 0x71742020; + /** Brand stored in the ftyp atom for HEIC media. */ + public static final int BRAND_HEIC = 0x68656963; + /** The maximum number of bytes to peek when sniffing. */ private static final int SEARCH_LENGTH = 4 * 1024; @@ -37,6 +42,7 @@ 0x69736f34, // iso4 0x69736f35, // iso5 0x69736f36, // iso6 + 0x69736f39, // iso9 0x61766331, // avc1 0x68766331, // hvc1 0x68657631, // hev1 @@ -54,9 +60,11 @@ 0x66347620, // f4v[space] 0x6b646469, // kddi 0x4d345650, // M4VP - 0x71742020, // qt[space][space], Apple QuickTime + BRAND_QUICKTIME, // qt[space][space] 0x4d534e56, // MSNV, Sony PSP 0x64627931, // dby1, Dolby Vision + 0x69736d6c, // isml + 0x70696666, // piff }; /** @@ -66,11 +74,9 @@ * @param input The extractor input from which to peek data. The peek position will be modified. * @return Whether the input appears to be in the fragmented MP4 format. * @throws IOException If an error occurs reading from the input. - * @throws InterruptedException If the thread has been interrupted. */ - public static boolean sniffFragmented(ExtractorInput input) - throws IOException, InterruptedException { - return sniffInternal(input, true); + public static boolean sniffFragmented(ExtractorInput input) throws IOException { + return sniffInternal(input, /* fragmented= */ true, /* acceptHeic= */ false); } /** @@ -80,18 +86,33 @@ public static boolean sniffFragmented(ExtractorInput input) * @param input The extractor input from which to peek data. The peek position will be modified. * @return Whether the input appears to be in the unfragmented MP4 format. * @throws IOException If an error occurs reading from the input. - * @throws InterruptedException If the thread has been interrupted. */ - public static boolean sniffUnfragmented(ExtractorInput input) - throws IOException, InterruptedException { - return sniffInternal(input, false); + public static boolean sniffUnfragmented(ExtractorInput input) throws IOException { + return sniffInternal(input, /* fragmented= */ false, /* acceptHeic= */ false); } - private static boolean sniffInternal(ExtractorInput input, boolean fragmented) - throws IOException, InterruptedException { + /** + * Returns whether data peeked from the current position in {@code input} is consistent with the + * input being an unfragmented MP4 file. + * + * @param input The extractor input from which to peek data. The peek position will be modified. + * @param acceptHeic Whether {@code true} should be returned for HEIC photos. + * @return Whether the input appears to be in the unfragmented MP4 format. + * @throws IOException If an error occurs reading from the input. + */ + public static boolean sniffUnfragmented(ExtractorInput input, boolean acceptHeic) + throws IOException { + return sniffInternal(input, /* fragmented= */ false, acceptHeic); + } + + private static boolean sniffInternal(ExtractorInput input, boolean fragmented, boolean acceptHeic) + throws IOException { long inputLength = input.getLength(); - int bytesToSearch = (int) (inputLength == C.LENGTH_UNSET || inputLength > SEARCH_LENGTH - ? SEARCH_LENGTH : inputLength); + int bytesToSearch = + (int) + (inputLength == C.LENGTH_UNSET || inputLength > SEARCH_LENGTH + ? SEARCH_LENGTH + : inputLength); ParsableByteArray buffer = new ParsableByteArray(64); int bytesSearched = 0; @@ -101,13 +122,19 @@ private static boolean sniffInternal(ExtractorInput input, boolean fragmented) // Read an atom header. int headerSize = Atom.HEADER_SIZE; buffer.reset(headerSize); - input.peekFully(buffer.data, 0, headerSize); + boolean success = + input.peekFully(buffer.getData(), 0, headerSize, /* allowEndOfInput= */ true); + if (!success) { + // We've reached the end of the file. + break; + } long atomSize = buffer.readUnsignedInt(); int atomType = buffer.readInt(); if (atomSize == Atom.DEFINES_LARGE_SIZE) { // Read the large atom size. headerSize = Atom.LONG_HEADER_SIZE; - input.peekFully(buffer.data, Atom.HEADER_SIZE, Atom.LONG_HEADER_SIZE - Atom.HEADER_SIZE); + input.peekFully( + buffer.getData(), Atom.HEADER_SIZE, Atom.LONG_HEADER_SIZE - Atom.HEADER_SIZE); buffer.setLimit(Atom.LONG_HEADER_SIZE); atomSize = buffer.readLong(); } else if (atomSize == Atom.EXTENDS_TO_END_SIZE) { @@ -155,13 +182,13 @@ private static boolean sniffInternal(ExtractorInput input, boolean fragmented) return false; } buffer.reset(atomDataSize); - input.peekFully(buffer.data, 0, atomDataSize); + input.peekFully(buffer.getData(), 0, atomDataSize); int brandsCount = atomDataSize / 4; for (int i = 0; i < brandsCount; i++) { if (i == 1) { // This index refers to the minorVersion, not a brand, so skip it. buffer.skipBytes(4); - } else if (isCompatibleBrand(buffer.readInt())) { + } else if (isCompatibleBrand(buffer.readInt(), acceptHeic)) { foundGoodFileType = true; break; } @@ -181,9 +208,11 @@ private static boolean sniffInternal(ExtractorInput input, boolean fragmented) /** * Returns whether {@code brand} is an ftyp atom brand that is compatible with the MP4 extractors. */ - private static boolean isCompatibleBrand(int brand) { - // Accept all brands starting '3gp'. + private static boolean isCompatibleBrand(int brand, boolean acceptHeic) { if (brand >>> 8 == 0x00336770) { + // Brand starts with '3gp'. + return true; + } else if (brand == BRAND_HEIC && acceptHeic) { return true; } for (int compatibleBrand : COMPATIBLE_BRANDS) { @@ -197,5 +226,4 @@ private static boolean isCompatibleBrand(int brand) { private Sniffer() { // Prevent instantiation. } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/Track.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/Track.java index 0a21ddd3a3..138c1e3ebf 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/Track.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/Track.java @@ -15,6 +15,8 @@ */ package com.google.android.exoplayer2.extractor.mp4; +import static java.lang.annotation.ElementType.TYPE_USE; + import androidx.annotation.IntDef; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; @@ -22,10 +24,9 @@ import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; -/** - * Encapsulates information describing an MP4 track. - */ +/** Encapsulates information describing an MP4 track. */ public final class Track { /** @@ -34,61 +35,44 @@ public final class Track { */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({TRANSFORMATION_NONE, TRANSFORMATION_CEA608_CDAT}) public @interface Transformation {} - /** - * A no-op sample transformation. - */ + /** A no-op sample transformation. */ public static final int TRANSFORMATION_NONE = 0; - /** - * A transformation for caption samples in cdat atoms. - */ + /** A transformation for caption samples in cdat atoms. */ public static final int TRANSFORMATION_CEA608_CDAT = 1; - /** - * The track identifier. - */ + /** The track identifier. */ public final int id; /** * One of {@link C#TRACK_TYPE_AUDIO}, {@link C#TRACK_TYPE_VIDEO} and {@link C#TRACK_TYPE_TEXT}. */ - public final int type; + public final @C.TrackType int type; - /** - * The track timescale, defined as the number of time units that pass in one second. - */ + /** The track timescale, defined as the number of time units that pass in one second. */ public final long timescale; - /** - * The movie timescale. - */ + /** The movie timescale. */ public final long movieTimescale; - /** - * The duration of the track in microseconds, or {@link C#TIME_UNSET} if unknown. - */ + /** The duration of the track in microseconds, or {@link C#TIME_UNSET} if unknown. */ public final long durationUs; - /** - * The format. - */ + /** The format. */ public final Format format; /** * One of {@code TRANSFORMATION_*}. Defines the transformation to apply before outputting each * sample. */ - @Transformation public final int sampleTransformation; + public final @Transformation int sampleTransformation; - /** - * Durations of edit list segments in the movie timescale. Null if there is no edit list. - */ + /** Durations of edit list segments in the movie timescale. Null if there is no edit list. */ @Nullable public final long[] editListDurations; - /** - * Media times for edit list segments in the track timescale. Null if there is no edit list. - */ + /** Media times for edit list segments in the track timescale. Null if there is no edit list. */ @Nullable public final long[] editListMediaTimes; /** @@ -99,10 +83,18 @@ public final class Track { @Nullable private final TrackEncryptionBox[] sampleDescriptionEncryptionBoxes; - public Track(int id, int type, long timescale, long movieTimescale, long durationUs, - Format format, @Transformation int sampleTransformation, - @Nullable TrackEncryptionBox[] sampleDescriptionEncryptionBoxes, int nalUnitLengthFieldLength, - @Nullable long[] editListDurations, @Nullable long[] editListMediaTimes) { + public Track( + int id, + @C.TrackType int type, + long timescale, + long movieTimescale, + long durationUs, + Format format, + @Transformation int sampleTransformation, + @Nullable TrackEncryptionBox[] sampleDescriptionEncryptionBoxes, + int nalUnitLengthFieldLength, + @Nullable long[] editListDurations, + @Nullable long[] editListMediaTimes) { this.id = id; this.type = type; this.timescale = timescale; @@ -125,12 +117,11 @@ public Track(int id, int type, long timescale, long movieTimescale, long duratio */ @Nullable public TrackEncryptionBox getSampleDescriptionEncryptionBox(int sampleDescriptionIndex) { - return sampleDescriptionEncryptionBoxes == null ? null + return sampleDescriptionEncryptionBoxes == null + ? null : sampleDescriptionEncryptionBoxes[sampleDescriptionIndex]; } - // incompatible types in argument. - @SuppressWarnings("nullness:argument.type.incompatible") public Track copyWithFormat(Format format) { return new Track( id, diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/TrackEncryptionBox.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/TrackEncryptionBox.java index a35d211aa4..f5df9f7db1 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/TrackEncryptionBox.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/TrackEncryptionBox.java @@ -22,21 +22,17 @@ import com.google.android.exoplayer2.util.Log; /** - * Encapsulates information parsed from a track encryption (tenc) box or sample group description + * Encapsulates information parsed from a track encryption (tenc) box or sample group description * (sgpd) box in an MP4 stream. */ public final class TrackEncryptionBox { private static final String TAG = "TrackEncryptionBox"; - /** - * Indicates the encryption state of the samples in the sample group. - */ + /** Indicates the encryption state of the samples in the sample group. */ public final boolean isEncrypted; - /** - * The protection scheme type, as defined by the 'schm' box, or null if unknown. - */ + /** The protection scheme type, as defined by the 'schm' box, or null if unknown. */ @Nullable public final String schemeType; /** @@ -76,12 +72,12 @@ public TrackEncryptionBox( this.schemeType = schemeType; this.perSampleIvSize = perSampleIvSize; this.defaultInitializationVector = defaultInitializationVector; - cryptoData = new TrackOutput.CryptoData(schemeToCryptoMode(schemeType), keyId, - defaultEncryptedBlocks, defaultClearBlocks); + cryptoData = + new TrackOutput.CryptoData( + schemeToCryptoMode(schemeType), keyId, defaultEncryptedBlocks, defaultClearBlocks); } - @C.CryptoMode - private static int schemeToCryptoMode(@Nullable String schemeType) { + private static @C.CryptoMode int schemeToCryptoMode(@Nullable String schemeType) { if (schemeType == null) { // If unknown, assume cenc. return C.CRYPTO_MODE_AES_CTR; @@ -94,10 +90,12 @@ private static int schemeToCryptoMode(@Nullable String schemeType) { case C.CENC_TYPE_cbcs: return C.CRYPTO_MODE_AES_CBC; default: - Log.w(TAG, "Unsupported protection scheme type '" + schemeType + "'. Assuming AES-CTR " - + "crypto mode."); + Log.w( + TAG, + "Unsupported protection scheme type '" + + schemeType + + "'. Assuming AES-CTR crypto mode."); return C.CRYPTO_MODE_AES_CTR; } } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/TrackFragment.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/TrackFragment.java index 0272e8e338..d87f7ba443 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/TrackFragment.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/TrackFragment.java @@ -15,101 +15,86 @@ */ package com.google.android.exoplayer2.extractor.mp4; +import androidx.annotation.Nullable; import com.google.android.exoplayer2.extractor.ExtractorInput; import com.google.android.exoplayer2.util.ParsableByteArray; import java.io.IOException; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; -/** - * A holder for information corresponding to a single fragment of an mp4 file. - */ +/** A holder for information corresponding to a single fragment of an mp4 file. */ /* package */ final class TrackFragment { - /** - * The default values for samples from the track fragment header. - */ - public DefaultSampleValues header; - /** - * The position (byte offset) of the start of fragment. - */ + /** The default values for samples from the track fragment header. */ + public @MonotonicNonNull DefaultSampleValues header; + /** The position (byte offset) of the start of fragment. */ public long atomPosition; - /** - * The position (byte offset) of the start of data contained in the fragment. - */ + /** The position (byte offset) of the start of data contained in the fragment. */ public long dataPosition; - /** - * The position (byte offset) of the start of auxiliary data. - */ + /** The position (byte offset) of the start of auxiliary data. */ public long auxiliaryDataPosition; - /** - * The number of track runs of the fragment. - */ + /** The number of track runs of the fragment. */ public int trunCount; - /** - * The total number of samples in the fragment. - */ + /** The total number of samples in the fragment. */ public int sampleCount; - /** - * The position (byte offset) of the start of sample data of each track run in the fragment. - */ + /** The position (byte offset) of the start of sample data of each track run in the fragment. */ public long[] trunDataPosition; - /** - * The number of samples contained by each track run in the fragment. - */ + /** The number of samples contained by each track run in the fragment. */ public int[] trunLength; - /** - * The size of each sample in the fragment. - */ + /** The size of each sample in the fragment. */ public int[] sampleSizeTable; - /** The composition time offset of each sample in the fragment, in microseconds. */ - public int[] sampleCompositionTimeOffsetUsTable; - /** The decoding time of each sample in the fragment, in microseconds. */ - public long[] sampleDecodingTimeUsTable; - /** - * Indicates which samples are sync frames. - */ + /** The presentation time of each sample in the fragment, in microseconds. */ + public long[] samplePresentationTimesUs; + /** Indicates which samples are sync frames. */ public boolean[] sampleIsSyncFrameTable; - /** - * Whether the fragment defines encryption data. - */ + /** Whether the fragment defines encryption data. */ public boolean definesEncryptionData; /** * If {@link #definesEncryptionData} is true, indicates which samples use sub-sample encryption. * Undefined otherwise. */ public boolean[] sampleHasSubsampleEncryptionTable; - /** - * Fragment specific track encryption. May be null. - */ - public TrackEncryptionBox trackEncryptionBox; - /** - * If {@link #definesEncryptionData} is true, indicates the length of the sample encryption data. - * Undefined otherwise. - */ - public int sampleEncryptionDataLength; + /** Fragment specific track encryption. May be null. */ + @Nullable public TrackEncryptionBox trackEncryptionBox; /** * If {@link #definesEncryptionData} is true, contains binary sample encryption data. Undefined * otherwise. */ - public ParsableByteArray sampleEncryptionData; - /** - * Whether {@link #sampleEncryptionData} needs populating with the actual encryption data. - */ + public final ParsableByteArray sampleEncryptionData; + /** Whether {@link #sampleEncryptionData} needs populating with the actual encryption data. */ public boolean sampleEncryptionDataNeedsFill; /** - * The absolute decode time of the start of the next fragment. + * The duration of all the samples defined in the fragments up to and including this one, plus the + * duration of the samples defined in the moov atom if {@link #nextFragmentDecodeTimeIncludesMoov} + * is {@code true}. */ public long nextFragmentDecodeTime; + /** + * Whether {@link #nextFragmentDecodeTime} includes the duration of the samples referred to by the + * moov atom. + */ + public boolean nextFragmentDecodeTimeIncludesMoov; + + public TrackFragment() { + trunDataPosition = new long[0]; + trunLength = new int[0]; + sampleSizeTable = new int[0]; + samplePresentationTimesUs = new long[0]; + sampleIsSyncFrameTable = new boolean[0]; + sampleHasSubsampleEncryptionTable = new boolean[0]; + sampleEncryptionData = new ParsableByteArray(); + } /** * Resets the fragment. - *

      - * {@link #sampleCount} and {@link #nextFragmentDecodeTime} are set to 0, and both - * {@link #definesEncryptionData} and {@link #sampleEncryptionDataNeedsFill} is set to false, - * and {@link #trackEncryptionBox} is set to null. + * + *

      {@link #sampleCount} and {@link #nextFragmentDecodeTime} are set to 0, and both {@link + * #definesEncryptionData} and {@link #sampleEncryptionDataNeedsFill} is set to false, and {@link + * #trackEncryptionBox} is set to null. */ public void reset() { trunCount = 0; nextFragmentDecodeTime = 0; + nextFragmentDecodeTimeIncludesMoov = false; definesEncryptionData = false; sampleEncryptionDataNeedsFill = false; trackEncryptionBox = null; @@ -117,8 +102,8 @@ public void reset() { /** * Configures the fragment for the specified number of samples. - *

      - * The {@link #sampleCount} of the fragment is set to the specified sample count, and the + * + *

      The {@link #sampleCount} of the fragment is set to the specified sample count, and the * contained tables are resized if necessary such that they are at least this length. * * @param sampleCount The number of samples in the new run. @@ -126,17 +111,16 @@ public void reset() { public void initTables(int trunCount, int sampleCount) { this.trunCount = trunCount; this.sampleCount = sampleCount; - if (trunLength == null || trunLength.length < trunCount) { + if (trunLength.length < trunCount) { trunDataPosition = new long[trunCount]; trunLength = new int[trunCount]; } - if (sampleSizeTable == null || sampleSizeTable.length < sampleCount) { + if (sampleSizeTable.length < sampleCount) { // Size the tables 25% larger than needed, so as to make future resize operations less // likely. The choice of 25% is relatively arbitrary. int tableSize = (sampleCount * 125) / 100; sampleSizeTable = new int[tableSize]; - sampleCompositionTimeOffsetUsTable = new int[tableSize]; - sampleDecodingTimeUsTable = new long[tableSize]; + samplePresentationTimesUs = new long[tableSize]; sampleIsSyncFrameTable = new boolean[tableSize]; sampleHasSubsampleEncryptionTable = new boolean[tableSize]; } @@ -144,18 +128,14 @@ public void initTables(int trunCount, int sampleCount) { /** * Configures the fragment to be one that defines encryption data of the specified length. - *

      - * {@link #definesEncryptionData} is set to true, {@link #sampleEncryptionDataLength} is set to - * the specified length, and {@link #sampleEncryptionData} is resized if necessary such that it - * is at least this length. + * + *

      {@link #definesEncryptionData} is set to true, and the {@link ParsableByteArray#limit() + * limit} of {@link #sampleEncryptionData} is set to the specified length. * * @param length The length in bytes of the encryption data. */ public void initEncryptionData(int length) { - if (sampleEncryptionData == null || sampleEncryptionData.limit() < length) { - sampleEncryptionData = new ParsableByteArray(length); - } - sampleEncryptionDataLength = length; + sampleEncryptionData.reset(length); definesEncryptionData = true; sampleEncryptionDataNeedsFill = true; } @@ -165,8 +145,8 @@ public void initEncryptionData(int length) { * * @param input An {@link ExtractorInput} from which to read the encryption data. */ - public void fillEncryptionData(ExtractorInput input) throws IOException, InterruptedException { - input.readFully(sampleEncryptionData.data, 0, sampleEncryptionDataLength); + public void fillEncryptionData(ExtractorInput input) throws IOException { + input.readFully(sampleEncryptionData.getData(), 0, sampleEncryptionData.limit()); sampleEncryptionData.setPosition(0); sampleEncryptionDataNeedsFill = false; } @@ -177,7 +157,7 @@ public void fillEncryptionData(ExtractorInput input) throws IOException, Interru * @param source A source from which to read the encryption data. */ public void fillEncryptionData(ParsableByteArray source) { - source.readBytes(sampleEncryptionData.data, 0, sampleEncryptionDataLength); + source.readBytes(sampleEncryptionData.getData(), 0, sampleEncryptionData.limit()); sampleEncryptionData.setPosition(0); sampleEncryptionDataNeedsFill = false; } @@ -189,7 +169,7 @@ public void fillEncryptionData(ParsableByteArray source) { * @return The presentation timestamps of this sample in microseconds. */ public long getSamplePresentationTimeUs(int index) { - return sampleDecodingTimeUsTable[index] + sampleCompositionTimeOffsetUsTable[index]; + return samplePresentationTimesUs[index]; } /** Returns whether the sample at the given index has a subsample encryption table. */ diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/TrackSampleTable.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/TrackSampleTable.java index 59ea386335..5104aeb209 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/TrackSampleTable.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/TrackSampleTable.java @@ -19,9 +19,7 @@ import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Util; -/** - * Sample table for a track in an MP4 file. - */ +/** Sample table for a track in an MP4 file. */ /* package */ final class TrackSampleTable { /** The track corresponding to this sample table. */ @@ -38,10 +36,7 @@ public final long[] timestampsUs; /** Sample flags. */ public final int[] flags; - /** - * The duration of the track sample table in microseconds, or {@link C#TIME_UNSET} if the sample - * table is empty. - */ + /** The duration of the track sample table in microseconds. */ public final long durationUs; public TrackSampleTable( @@ -104,5 +99,4 @@ public int getIndexOfLaterOrEqualSynchronizationSample(long timeUs) { } return C.INDEX_UNSET; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/package-info.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/package-info.java new file mode 100644 index 0000000000..6d0ad27361 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/package-info.java @@ -0,0 +1,19 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +@NonNullApi +package com.google.android.exoplayer2.extractor.mp4; + +import com.google.android.exoplayer2.util.NonNullApi; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ogg/DefaultOggSeeker.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ogg/DefaultOggSeeker.java index 51ab94ba0e..29a0f5932a 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ogg/DefaultOggSeeker.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ogg/DefaultOggSeeker.java @@ -15,6 +15,9 @@ */ package com.google.android.exoplayer2.extractor.ogg; +import static com.google.android.exoplayer2.extractor.ExtractorUtil.skipFullyQuietly; + +import androidx.annotation.Nullable; import androidx.annotation.VisibleForTesting; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.ParserException; @@ -29,9 +32,9 @@ /** Seeks in an Ogg stream. */ /* package */ final class DefaultOggSeeker implements OggSeeker { - private static final int MATCH_RANGE = 72000; - private static final int MATCH_BYTE_RANGE = 100000; - private static final int DEFAULT_OFFSET = 30000; + private static final int MATCH_RANGE = 72_000; + private static final int MATCH_BYTE_RANGE = 100_000; + private static final int DEFAULT_OFFSET = 30_000; private static final int STATE_SEEK_TO_END = 0; private static final int STATE_READ_LAST_PAGE = 1; @@ -39,7 +42,7 @@ private static final int STATE_SKIP = 3; private static final int STATE_IDLE = 4; - private final OggPageHeader pageHeader = new OggPageHeader(); + private final OggPageHeader pageHeader; private final long payloadStartPosition; private final long payloadEndPosition; private final StreamReader streamReader; @@ -83,10 +86,11 @@ public DefaultOggSeeker( } else { state = STATE_SEEK_TO_END; } + pageHeader = new OggPageHeader(); } @Override - public long read(ExtractorInput input) throws IOException, InterruptedException { + public long read(ExtractorInput input) throws IOException { switch (state) { case STATE_IDLE: return -1; @@ -121,6 +125,7 @@ public long read(ExtractorInput input) throws IOException, InterruptedException } @Override + @Nullable public OggSeekMap createSeekMap() { return totalGranules != 0 ? new OggSeekMap() : null; } @@ -145,15 +150,14 @@ public void startSeek(long targetGranule) { * @return The byte position from which data should be provided for the next step, or {@link * C#POSITION_UNSET} if the search has converged. * @throws IOException If reading from the input fails. - * @throws InterruptedException If interrupted while reading from the input. */ - private long getNextSeekPosition(ExtractorInput input) throws IOException, InterruptedException { + private long getNextSeekPosition(ExtractorInput input) throws IOException { if (start == end) { return C.POSITION_UNSET; } long currentPosition = input.getPosition(); - if (!skipToNextPage(input, end)) { + if (!pageHeader.skipToNextPage(input, end)) { if (start == currentPosition) { throw new IOException("No ogg page can be found."); } @@ -196,75 +200,23 @@ private long getNextSeekPosition(ExtractorInput input) throws IOException, Inter * @param input The {@link ExtractorInput} to read from. * @throws ParserException If populating the page header fails. * @throws IOException If reading from the input fails. - * @throws InterruptedException If interrupted while reading from the input. */ - private void skipToPageOfTargetGranule(ExtractorInput input) - throws IOException, InterruptedException { - pageHeader.populate(input, /* quiet= */ false); - while (pageHeader.granulePosition <= targetGranule) { + private void skipToPageOfTargetGranule(ExtractorInput input) throws IOException { + while (true) { + // If pageHeader.skipToNextPage fails to find a page it will advance input.position to the + // end of the file, so pageHeader.populate will throw EOFException (because quiet=false). + pageHeader.skipToNextPage(input); + pageHeader.populate(input, /* quiet= */ false); + if (pageHeader.granulePosition > targetGranule) { + break; + } input.skipFully(pageHeader.headerSize + pageHeader.bodySize); start = input.getPosition(); startGranule = pageHeader.granulePosition; - pageHeader.populate(input, /* quiet= */ false); } input.resetPeekPosition(); } - /** - * Skips to the next page. - * - * @param input The {@code ExtractorInput} to skip to the next page. - * @throws IOException If peeking/reading from the input fails. - * @throws InterruptedException If the thread is interrupted. - * @throws EOFException If the next page can't be found before the end of the input. - */ - @VisibleForTesting - void skipToNextPage(ExtractorInput input) throws IOException, InterruptedException { - if (!skipToNextPage(input, payloadEndPosition)) { - // Not found until eof. - throw new EOFException(); - } - } - - /** - * Skips to the next page. Searches for the next page header. - * - * @param input The {@code ExtractorInput} to skip to the next page. - * @param limit The limit up to which the search should take place. - * @return Whether the next page was found. - * @throws IOException If peeking/reading from the input fails. - * @throws InterruptedException If interrupted while peeking/reading from the input. - */ - private boolean skipToNextPage(ExtractorInput input, long limit) - throws IOException, InterruptedException { - limit = Math.min(limit + 3, payloadEndPosition); - byte[] buffer = new byte[2048]; - int peekLength = buffer.length; - while (true) { - if (input.getPosition() + peekLength > limit) { - // Make sure to not peek beyond the end of the input. - peekLength = (int) (limit - input.getPosition()); - if (peekLength < 4) { - // Not found until end. - return false; - } - } - input.peekFully(buffer, 0, peekLength, false); - for (int i = 0; i < peekLength - 3; i++) { - if (buffer[i] == 'O' - && buffer[i + 1] == 'g' - && buffer[i + 2] == 'g' - && buffer[i + 3] == 'S') { - // Match! Skip to the start of the pattern. - input.skipFully(i); - return true; - } - } - // Overlap by not skipping the entire peekLength. - input.skipFully(peekLength - 3); - } - } - /** * Skips to the last Ogg page in the stream and reads the header's granule field which is the * total number of samples per channel. @@ -272,17 +224,28 @@ private boolean skipToNextPage(ExtractorInput input, long limit) * @param input The {@link ExtractorInput} to read from. * @return The total number of samples of this input. * @throws IOException If reading from the input fails. - * @throws InterruptedException If the thread is interrupted. */ @VisibleForTesting - long readGranuleOfLastPage(ExtractorInput input) throws IOException, InterruptedException { - skipToNextPage(input); + long readGranuleOfLastPage(ExtractorInput input) throws IOException { pageHeader.reset(); - while ((pageHeader.type & 0x04) != 0x04 && input.getPosition() < payloadEndPosition) { - pageHeader.populate(input, /* quiet= */ false); - input.skipFully(pageHeader.headerSize + pageHeader.bodySize); + if (!pageHeader.skipToNextPage(input)) { + throw new EOFException(); + } + pageHeader.populate(input, /* quiet= */ false); + input.skipFully(pageHeader.headerSize + pageHeader.bodySize); + long granulePosition = pageHeader.granulePosition; + while ((pageHeader.type & 0x04) != 0x04 + && pageHeader.skipToNextPage(input) + && input.getPosition() < payloadEndPosition) { + boolean hasPopulated = pageHeader.populate(input, /* quiet= */ true); + if (!hasPopulated || !skipFullyQuietly(input, pageHeader.headerSize + pageHeader.bodySize)) { + // The input file contains a partial page at the end. Ignore it and return the granule + // position of the last complete page. + return granulePosition; + } + granulePosition = pageHeader.granulePosition; } - return pageHeader.granulePosition; + return granulePosition; } private final class OggSeekMap implements SeekMap { diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ogg/FlacReader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ogg/FlacReader.java index f99b2420cc..c4552943b1 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ogg/FlacReader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ogg/FlacReader.java @@ -15,33 +15,37 @@ */ package com.google.android.exoplayer2.extractor.ogg; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Assertions.checkState; + +import androidx.annotation.Nullable; import com.google.android.exoplayer2.extractor.ExtractorInput; import com.google.android.exoplayer2.extractor.FlacFrameReader; import com.google.android.exoplayer2.extractor.FlacMetadataReader; import com.google.android.exoplayer2.extractor.FlacSeekTableSeekMap; +import com.google.android.exoplayer2.extractor.FlacStreamMetadata; +import com.google.android.exoplayer2.extractor.FlacStreamMetadata.SeekTable; import com.google.android.exoplayer2.extractor.SeekMap; -import com.google.android.exoplayer2.util.Assertions; -import com.google.android.exoplayer2.util.FlacConstants; -import com.google.android.exoplayer2.util.FlacStreamMetadata; +import com.google.android.exoplayer2.extractor.flac.FlacConstants; import com.google.android.exoplayer2.util.ParsableByteArray; import com.google.android.exoplayer2.util.Util; -import java.io.IOException; import java.util.Arrays; +import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf; -/** - * {@link StreamReader} to extract Flac data out of Ogg byte stream. - */ +/** {@link StreamReader} to extract Flac data out of Ogg byte stream. */ /* package */ final class FlacReader extends StreamReader { private static final byte AUDIO_PACKET_TYPE = (byte) 0xFF; private static final int FRAME_HEADER_SAMPLE_NUMBER_OFFSET = 4; - private FlacStreamMetadata streamMetadata; - private FlacOggSeeker flacOggSeeker; + @Nullable private FlacStreamMetadata streamMetadata; + @Nullable private FlacOggSeeker flacOggSeeker; public static boolean verifyBitstreamType(ParsableByteArray data) { - return data.bytesLeft() >= 5 && data.readUnsignedByte() == 0x7F && // packet type + return data.bytesLeft() >= 5 + && data.readUnsignedByte() == 0x7F + && // packet type data.readUnsignedInt() == 0x464C4143; // ASCII signature "FLAC" } @@ -60,36 +64,47 @@ private static boolean isAudioPacket(byte[] data) { @Override protected long preparePayload(ParsableByteArray packet) { - if (!isAudioPacket(packet.data)) { + if (!isAudioPacket(packet.getData())) { return -1; } return getFlacFrameBlockSize(packet); } @Override + @EnsuresNonNullIf(expression = "#3.format", result = false) protected boolean readHeaders(ParsableByteArray packet, long position, SetupData setupData) { - byte[] data = packet.data; + byte[] data = packet.getData(); + @Nullable FlacStreamMetadata streamMetadata = this.streamMetadata; if (streamMetadata == null) { streamMetadata = new FlacStreamMetadata(data, 17); + this.streamMetadata = streamMetadata; byte[] metadata = Arrays.copyOfRange(data, 9, packet.limit()); setupData.format = streamMetadata.getFormat(metadata, /* id3Metadata= */ null); - } else if ((data[0] & 0x7F) == FlacConstants.METADATA_TYPE_SEEK_TABLE) { - flacOggSeeker = new FlacOggSeeker(); - FlacStreamMetadata.SeekTable seekTable = - FlacMetadataReader.readSeekTableMetadataBlock(packet); + return true; + } + + if ((data[0] & 0x7F) == FlacConstants.METADATA_TYPE_SEEK_TABLE) { + SeekTable seekTable = FlacMetadataReader.readSeekTableMetadataBlock(packet); streamMetadata = streamMetadata.copyWithSeekTable(seekTable); - } else if (isAudioPacket(data)) { + this.streamMetadata = streamMetadata; + flacOggSeeker = new FlacOggSeeker(streamMetadata, seekTable); + return true; + } + + if (isAudioPacket(data)) { if (flacOggSeeker != null) { flacOggSeeker.setFirstFrameOffset(position); setupData.oggSeeker = flacOggSeeker; } + checkNotNull(setupData.format); return false; } + return true; } private int getFlacFrameBlockSize(ParsableByteArray packet) { - int blockSizeKey = (packet.data[2] & 0xFF) >> 4; + int blockSizeKey = (packet.getData()[2] & 0xFF) >> 4; if (blockSizeKey == 6 || blockSizeKey == 7) { // Skip the sample number. packet.skipBytes(FRAME_HEADER_SAMPLE_NUMBER_OFFSET); @@ -100,12 +115,16 @@ private int getFlacFrameBlockSize(ParsableByteArray packet) { return result; } - private class FlacOggSeeker implements OggSeeker { + private static final class FlacOggSeeker implements OggSeeker { + private FlacStreamMetadata streamMetadata; + private SeekTable seekTable; private long firstFrameOffset; private long pendingSeekGranule; - public FlacOggSeeker() { + public FlacOggSeeker(FlacStreamMetadata streamMetadata, SeekTable seekTable) { + this.streamMetadata = streamMetadata; + this.seekTable = seekTable; firstFrameOffset = -1; pendingSeekGranule = -1; } @@ -115,7 +134,7 @@ public void setFirstFrameOffset(long firstFrameOffset) { } @Override - public long read(ExtractorInput input) throws IOException, InterruptedException { + public long read(ExtractorInput input) { if (pendingSeekGranule >= 0) { long result = -(pendingSeekGranule + 2); pendingSeekGranule = -1; @@ -126,18 +145,17 @@ public long read(ExtractorInput input) throws IOException, InterruptedException @Override public void startSeek(long targetGranule) { - Assertions.checkNotNull(streamMetadata.seekTable); - long[] seekPointGranules = streamMetadata.seekTable.pointSampleNumbers; - int index = Util.binarySearchFloor(seekPointGranules, targetGranule, true, true); + long[] seekPointGranules = seekTable.pointSampleNumbers; + int index = + Util.binarySearchFloor( + seekPointGranules, targetGranule, /* inclusive= */ true, /* stayInBounds= */ true); pendingSeekGranule = seekPointGranules[index]; } @Override public SeekMap createSeekMap() { - Assertions.checkState(firstFrameOffset != -1); + checkState(firstFrameOffset != -1); return new FlacSeekTableSeekMap(streamMetadata, firstFrameOffset); } - } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ogg/OggExtractor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ogg/OggExtractor.java index 5e74eab8d4..32fb588c75 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ogg/OggExtractor.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ogg/OggExtractor.java @@ -15,6 +15,9 @@ */ package com.google.android.exoplayer2.extractor.ogg; +import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; +import static java.lang.Math.min; + import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.ParserException; import com.google.android.exoplayer2.extractor.Extractor; @@ -25,10 +28,10 @@ import com.google.android.exoplayer2.extractor.TrackOutput; import com.google.android.exoplayer2.util.ParsableByteArray; import java.io.IOException; +import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; -/** - * Extracts data from the Ogg container format. - */ +/** Extracts data from the Ogg container format. */ public class OggExtractor implements Extractor { /** Factory for {@link OggExtractor} instances. */ @@ -36,12 +39,12 @@ public class OggExtractor implements Extractor { private static final int MAX_VERIFICATION_BYTES = 8; - private ExtractorOutput output; - private StreamReader streamReader; + private @MonotonicNonNull ExtractorOutput output; + private @MonotonicNonNull StreamReader streamReader; private boolean streamReaderInitialized; @Override - public boolean sniff(ExtractorInput input) throws IOException, InterruptedException { + public boolean sniff(ExtractorInput input) throws IOException { try { return sniffInternal(input); } catch (ParserException e) { @@ -67,11 +70,12 @@ public void release() { } @Override - public int read(ExtractorInput input, PositionHolder seekPosition) - throws IOException, InterruptedException { + public int read(ExtractorInput input, PositionHolder seekPosition) throws IOException { + checkStateNotNull(output); // Check that init has been called. if (streamReader == null) { if (!sniffInternal(input)) { - throw new ParserException("Failed to determine bitstream type"); + throw ParserException.createForMalformedContainer( + "Failed to determine bitstream type", /* cause= */ null); } input.resetPeekPosition(); } @@ -84,15 +88,16 @@ public int read(ExtractorInput input, PositionHolder seekPosition) return streamReader.read(input, seekPosition); } - private boolean sniffInternal(ExtractorInput input) throws IOException, InterruptedException { + @EnsuresNonNullIf(expression = "streamReader", result = true) + private boolean sniffInternal(ExtractorInput input) throws IOException { OggPageHeader header = new OggPageHeader(); if (!header.populate(input, true) || (header.type & 0x02) != 0x02) { return false; } - int length = Math.min(header.bodySize, MAX_VERIFICATION_BYTES); + int length = min(header.bodySize, MAX_VERIFICATION_BYTES); ParsableByteArray scratch = new ParsableByteArray(length); - input.peekFully(scratch.data, 0, length); + input.peekFully(scratch.getData(), 0, length); if (FlacReader.verifyBitstreamType(resetPosition(scratch))) { streamReader = new FlacReader(); @@ -110,5 +115,4 @@ private static ParsableByteArray resetPosition(ParsableByteArray scratch) { scratch.setPosition(0); return scratch; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ogg/OggPacket.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ogg/OggPacket.java index 9c594ffde5..b31d3769b2 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ogg/OggPacket.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ogg/OggPacket.java @@ -15,6 +15,10 @@ */ package com.google.android.exoplayer2.extractor.ogg; +import static com.google.android.exoplayer2.extractor.ExtractorUtil.readFullyQuietly; +import static com.google.android.exoplayer2.extractor.ExtractorUtil.skipFullyQuietly; +import static java.lang.Math.max; + import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.extractor.ExtractorInput; import com.google.android.exoplayer2.util.Assertions; @@ -22,25 +26,21 @@ import java.io.IOException; import java.util.Arrays; -/** - * OGG packet class. - */ +/** OGG packet class. */ /* package */ final class OggPacket { private final OggPageHeader pageHeader = new OggPageHeader(); - private final ParsableByteArray packetArray = new ParsableByteArray( - new byte[OggPageHeader.MAX_PAGE_PAYLOAD], 0); + private final ParsableByteArray packetArray = + new ParsableByteArray(new byte[OggPageHeader.MAX_PAGE_PAYLOAD], 0); private int currentSegmentIndex = C.INDEX_UNSET; private int segmentCount; private boolean populated; - /** - * Resets this reader. - */ + /** Resets this reader. */ public void reset() { pageHeader.reset(); - packetArray.reset(); + packetArray.reset(/* limit= */ 0); currentSegmentIndex = C.INDEX_UNSET; populated = false; } @@ -53,22 +53,21 @@ public void reset() { * * @param input The {@link ExtractorInput} to read data from. * @return {@code true} if the read was successful. The read fails if the end of the input is - * encountered without reading data. + * encountered without reading the whole packet. * @throws IOException If reading from the input fails. - * @throws InterruptedException If the thread is interrupted. */ - public boolean populate(ExtractorInput input) throws IOException, InterruptedException { + public boolean populate(ExtractorInput input) throws IOException { Assertions.checkState(input != null); if (populated) { populated = false; - packetArray.reset(); + packetArray.reset(/* limit= */ 0); } while (!populated) { if (currentSegmentIndex < 0) { // We're at the start of a page. - if (!pageHeader.populate(input, true)) { + if (!pageHeader.skipToNextPage(input) || !pageHeader.populate(input, /* quiet= */ true)) { return false; } int segmentIndex = 0; @@ -79,23 +78,25 @@ public boolean populate(ExtractorInput input) throws IOException, InterruptedExc bytesToSkip += calculatePacketSize(segmentIndex); segmentIndex += segmentCount; } - input.skipFully(bytesToSkip); + if (!skipFullyQuietly(input, bytesToSkip)) { + return false; + } currentSegmentIndex = segmentIndex; } int size = calculatePacketSize(currentSegmentIndex); int segmentIndex = currentSegmentIndex + segmentCount; if (size > 0) { - if (packetArray.capacity() < packetArray.limit() + size) { - packetArray.data = Arrays.copyOf(packetArray.data, packetArray.limit() + size); + packetArray.ensureCapacity(packetArray.limit() + size); + if (!readFullyQuietly(input, packetArray.getData(), packetArray.limit(), size)) { + return false; } - input.readFully(packetArray.data, packetArray.limit(), size); packetArray.setLimit(packetArray.limit() + size); populated = pageHeader.laces[segmentIndex - 1] != 255; } // Advance now since we are sure reading didn't throw an exception. - currentSegmentIndex = segmentIndex == pageHeader.pageSegmentCount ? C.INDEX_UNSET - : segmentIndex; + currentSegmentIndex = + segmentIndex == pageHeader.pageSegmentCount ? C.INDEX_UNSET : segmentIndex; } return true; } @@ -114,22 +115,20 @@ public OggPageHeader getPageHeader() { return pageHeader; } - /** - * Returns a {@link ParsableByteArray} containing the packet's payload. - */ + /** Returns a {@link ParsableByteArray} containing the packet's payload. */ public ParsableByteArray getPayload() { return packetArray; } - /** - * Trims the packet data array. - */ + /** Trims the packet data array. */ public void trimPayload() { - if (packetArray.data.length == OggPageHeader.MAX_PAGE_PAYLOAD) { + if (packetArray.getData().length == OggPageHeader.MAX_PAGE_PAYLOAD) { return; } - packetArray.data = Arrays.copyOf(packetArray.data, Math.max(OggPageHeader.MAX_PAGE_PAYLOAD, - packetArray.limit())); + packetArray.reset( + Arrays.copyOf( + packetArray.getData(), max(OggPageHeader.MAX_PAGE_PAYLOAD, packetArray.limit())), + /* limit= */ packetArray.limit()); } /** @@ -151,5 +150,4 @@ private int calculatePacketSize(int startSegmentIndex) { } return size; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ogg/OggPageHeader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ogg/OggPageHeader.java index c7fb3ff6a2..306e64f559 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ogg/OggPageHeader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ogg/OggPageHeader.java @@ -15,25 +15,26 @@ */ package com.google.android.exoplayer2.extractor.ogg; +import static com.google.android.exoplayer2.extractor.ExtractorUtil.peekFullyQuietly; + import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.ParserException; import com.google.android.exoplayer2.extractor.ExtractorInput; +import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.ParsableByteArray; -import java.io.EOFException; import java.io.IOException; -/** - * Data object to store header information. - */ -/* package */ final class OggPageHeader { +/** Data object to store header information. */ +/* package */ final class OggPageHeader { public static final int EMPTY_PAGE_HEADER_SIZE = 27; public static final int MAX_SEGMENT_COUNT = 255; public static final int MAX_PAGE_PAYLOAD = 255 * 255; - public static final int MAX_PAGE_SIZE = EMPTY_PAGE_HEADER_SIZE + MAX_SEGMENT_COUNT - + MAX_PAGE_PAYLOAD; + public static final int MAX_PAGE_SIZE = + EMPTY_PAGE_HEADER_SIZE + MAX_SEGMENT_COUNT + MAX_PAGE_PAYLOAD; - private static final int TYPE_OGGS = 0x4f676753; + private static final int CAPTURE_PATTERN = 0x4f676753; // OggS + private static final int CAPTURE_PATTERN_SIZE = 4; public int revision; public int type; @@ -51,16 +52,14 @@ public int headerSize; public int bodySize; /** - * Be aware that {@code laces.length} is always {@link #MAX_SEGMENT_COUNT}. Instead use - * {@link #pageSegmentCount} to iterate. + * Be aware that {@code laces.length} is always {@link #MAX_SEGMENT_COUNT}. Instead use {@link + * #pageSegmentCount} to iterate. */ public final int[] laces = new int[MAX_SEGMENT_COUNT]; private final ParsableByteArray scratch = new ParsableByteArray(MAX_SEGMENT_COUNT); - /** - * Resets all primitive member fields to zero. - */ + /** Resets all primitive member fields to zero. */ public void reset() { revision = 0; type = 0; @@ -73,36 +72,68 @@ public void reset() { bodySize = 0; } + /** + * Advances through {@code input} looking for the start of the next Ogg page. + * + *

      Equivalent to {@link #skipToNextPage(ExtractorInput, long) skipToNextPage(input, /* limit= + * *\/ C.POSITION_UNSET)}. + */ + public boolean skipToNextPage(ExtractorInput input) throws IOException { + return skipToNextPage(input, /* limit= */ C.POSITION_UNSET); + } + + /** + * Advances through {@code input} looking for the start of the next Ogg page. + * + *

      The start of a page is identified by the 4-byte capture_pattern 'OggS'. + * + *

      Returns {@code true} if a capture pattern was found, with the read and peek positions of + * {@code input} at the start of the page, just before the capture_pattern. Otherwise returns + * {@code false}, with the read and peek positions of {@code input} at either {@code limit} (if + * set) or end-of-input. + * + * @param input The {@link ExtractorInput} to read from (must have {@code readPosition == + * peekPosition}). + * @param limit The max position in {@code input} to peek to, or {@link C#POSITION_UNSET} to allow + * peeking to the end. + * @return True if a capture_pattern was found. + * @throws IOException If reading data fails. + */ + public boolean skipToNextPage(ExtractorInput input, long limit) throws IOException { + Assertions.checkArgument(input.getPosition() == input.getPeekPosition()); + scratch.reset(/* limit= */ CAPTURE_PATTERN_SIZE); + while ((limit == C.POSITION_UNSET || input.getPosition() + CAPTURE_PATTERN_SIZE < limit) + && peekFullyQuietly( + input, scratch.getData(), 0, CAPTURE_PATTERN_SIZE, /* allowEndOfInput= */ true)) { + scratch.setPosition(0); + if (scratch.readUnsignedInt() == CAPTURE_PATTERN) { + input.resetPeekPosition(); + return true; + } + // Advance one byte before looking for the capture pattern again. + input.skipFully(1); + } + // Move the read & peek positions to limit or end-of-input, whichever is closer. + while ((limit == C.POSITION_UNSET || input.getPosition() < limit) + && input.skip(1) != C.RESULT_END_OF_INPUT) {} + return false; + } + /** * Peeks an Ogg page header and updates this {@link OggPageHeader}. * * @param input The {@link ExtractorInput} to read from. * @param quiet Whether to return {@code false} rather than throwing an exception if the header * cannot be populated. - * @return Whether the read was successful. The read fails if the end of the input is encountered - * without reading data. + * @return Whether the header was entirely populated. * @throws IOException If reading data fails or the stream is invalid. - * @throws InterruptedException If the thread is interrupted. */ - public boolean populate(ExtractorInput input, boolean quiet) - throws IOException, InterruptedException { - scratch.reset(); + public boolean populate(ExtractorInput input, boolean quiet) throws IOException { reset(); - boolean hasEnoughBytes = input.getLength() == C.LENGTH_UNSET - || input.getLength() - input.getPeekPosition() >= EMPTY_PAGE_HEADER_SIZE; - if (!hasEnoughBytes || !input.peekFully(scratch.data, 0, EMPTY_PAGE_HEADER_SIZE, true)) { - if (quiet) { - return false; - } else { - throw new EOFException(); - } - } - if (scratch.readUnsignedInt() != TYPE_OGGS) { - if (quiet) { - return false; - } else { - throw new ParserException("expected OggS capture pattern at begin of page"); - } + scratch.reset(/* limit= */ EMPTY_PAGE_HEADER_SIZE); + if (!peekFullyQuietly(input, scratch.getData(), 0, EMPTY_PAGE_HEADER_SIZE, quiet) + || scratch.readUnsignedInt() != CAPTURE_PATTERN) { + return false; } revision = scratch.readUnsignedByte(); @@ -110,7 +141,8 @@ public boolean populate(ExtractorInput input, boolean quiet) if (quiet) { return false; } else { - throw new ParserException("unsupported bit stream revision"); + throw ParserException.createForUnsupportedContainerFeature( + "unsupported bit stream revision"); } } type = scratch.readUnsignedByte(); @@ -123,8 +155,10 @@ public boolean populate(ExtractorInput input, boolean quiet) headerSize = EMPTY_PAGE_HEADER_SIZE + pageSegmentCount; // calculate total size of header including laces - scratch.reset(); - input.peekFully(scratch.data, 0, pageSegmentCount); + scratch.reset(/* limit= */ pageSegmentCount); + if (!peekFullyQuietly(input, scratch.getData(), 0, pageSegmentCount, quiet)) { + return false; + } for (int i = 0; i < pageSegmentCount; i++) { laces[i] = scratch.readUnsignedByte(); bodySize += laces[i]; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ogg/OggSeeker.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ogg/OggSeeker.java index e4c3a163e6..7626aa52d8 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ogg/OggSeeker.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ogg/OggSeeker.java @@ -15,6 +15,7 @@ */ package com.google.android.exoplayer2.extractor.ogg; +import androidx.annotation.Nullable; import com.google.android.exoplayer2.extractor.ExtractorInput; import com.google.android.exoplayer2.extractor.SeekMap; import java.io.IOException; @@ -27,9 +28,10 @@ /* package */ interface OggSeeker { /** - * Returns a {@link SeekMap} that returns an initial estimated position for progressive seeking - * or the final position for direct seeking. Returns null if {@link #read} has yet to return -1. + * Returns a {@link SeekMap} that returns an initial estimated position for progressive seeking or + * the final position for direct seeking. Returns null if {@link #read} has yet to return -1. */ + @Nullable SeekMap createSeekMap(); /** @@ -41,17 +43,15 @@ /** * Reads data from the {@link ExtractorInput} to build the {@link SeekMap} or to continue a seek. - *

      - * If more data is required or if the position of the input needs to be modified then a position - * from which data should be provided is returned. Else a negative value is returned. If a seek - * has been completed then the value returned is -(currentGranule + 2). Else it is -1. + * + *

      If more data is required or if the position of the input needs to be modified then a + * position from which data should be provided is returned. Else a negative value is returned. If + * a seek has been completed then the value returned is -(currentGranule + 2). Else it is -1. * * @param input The {@link ExtractorInput} to read from. * @return A non-negative position to seek the {@link ExtractorInput} to, or -(currentGranule + 2) * if the progressive seek has completed, or -1 otherwise. * @throws IOException If reading from the {@link ExtractorInput} fails. - * @throws InterruptedException If the thread is interrupted. */ - long read(ExtractorInput input) throws IOException, InterruptedException; - + long read(ExtractorInput input) throws IOException; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ogg/OpusReader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ogg/OpusReader.java index 90ae3f0f47..6e7051aa14 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ogg/OpusReader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ogg/OpusReader.java @@ -15,118 +15,127 @@ */ package com.google.android.exoplayer2.extractor.ogg; -import com.google.android.exoplayer2.C; +import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; + +import androidx.annotation.Nullable; import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.ParserException; +import com.google.android.exoplayer2.audio.OpusUtil; +import com.google.android.exoplayer2.extractor.VorbisUtil; +import com.google.android.exoplayer2.metadata.Metadata; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.ParsableByteArray; -import java.nio.ByteBuffer; -import java.nio.ByteOrder; -import java.util.ArrayList; +import com.google.common.collect.ImmutableList; import java.util.Arrays; import java.util.List; +import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf; -/** - * {@link StreamReader} to extract Opus data out of Ogg byte stream. - */ +/** {@link StreamReader} to extract Opus data out of Ogg byte stream. */ /* package */ final class OpusReader extends StreamReader { - private static final int DEFAULT_SEEK_PRE_ROLL_SAMPLES = 3840; - - /** - * Opus streams are always decoded at 48000 Hz. - */ - private static final int SAMPLE_RATE = 48000; - - private static final int OPUS_CODE = 0x4f707573; - private static final byte[] OPUS_SIGNATURE = {'O', 'p', 'u', 's', 'H', 'e', 'a', 'd'}; + private static final byte[] OPUS_ID_HEADER_SIGNATURE = {'O', 'p', 'u', 's', 'H', 'e', 'a', 'd'}; + private static final byte[] OPUS_COMMENT_HEADER_SIGNATURE = { + 'O', 'p', 'u', 's', 'T', 'a', 'g', 's' + }; - private boolean headerRead; + private boolean firstCommentHeaderSeen; public static boolean verifyBitstreamType(ParsableByteArray data) { - if (data.bytesLeft() < OPUS_SIGNATURE.length) { - return false; - } - byte[] header = new byte[OPUS_SIGNATURE.length]; - data.readBytes(header, 0, OPUS_SIGNATURE.length); - return Arrays.equals(header, OPUS_SIGNATURE); + return peekPacketStartsWith(data, OPUS_ID_HEADER_SIGNATURE); } @Override protected void reset(boolean headerData) { super.reset(headerData); if (headerData) { - headerRead = false; + firstCommentHeaderSeen = false; } } @Override protected long preparePayload(ParsableByteArray packet) { - return convertTimeToGranule(getPacketDurationUs(packet.data)); + return convertTimeToGranule(OpusUtil.getPacketDurationUs(packet.getData())); } @Override - protected boolean readHeaders(ParsableByteArray packet, long position, SetupData setupData) { - if (!headerRead) { - byte[] metadata = Arrays.copyOf(packet.data, packet.limit()); - int channelCount = metadata[9] & 0xFF; - int preskip = ((metadata[11] & 0xFF) << 8) | (metadata[10] & 0xFF); + @EnsuresNonNullIf(expression = "#3.format", result = false) + protected boolean readHeaders(ParsableByteArray packet, long position, SetupData setupData) + throws ParserException { + if (peekPacketStartsWith(packet, OPUS_ID_HEADER_SIGNATURE)) { + byte[] headerBytes = Arrays.copyOf(packet.getData(), packet.limit()); + int channelCount = OpusUtil.getChannelCount(headerBytes); + List initializationData = OpusUtil.buildInitializationData(headerBytes); - List initializationData = new ArrayList<>(3); - initializationData.add(metadata); - putNativeOrderLong(initializationData, preskip); - putNativeOrderLong(initializationData, DEFAULT_SEEK_PRE_ROLL_SAMPLES); - - setupData.format = Format.createAudioSampleFormat(null, MimeTypes.AUDIO_OPUS, null, - Format.NO_VALUE, Format.NO_VALUE, channelCount, SAMPLE_RATE, initializationData, null, 0, - null); - headerRead = true; + if (setupData.format != null) { + // setupData.format being non-null indicates we've already seen an ID header. Multiple ID + // headers are not permitted by the Opus spec [1], but have been observed in real files [2], + // so we just ignore all subsequent ones. + // [1] https://datatracker.ietf.org/doc/html/rfc7845#section-3 and + // https://datatracker.ietf.org/doc/html/rfc7845#section-5 + // [2] https://github.com/google/ExoPlayer/issues/10038 + return true; + } + setupData.format = + new Format.Builder() + .setSampleMimeType(MimeTypes.AUDIO_OPUS) + .setChannelCount(channelCount) + .setSampleRate(OpusUtil.SAMPLE_RATE) + .setInitializationData(initializationData) + .build(); + return true; + } else if (peekPacketStartsWith(packet, OPUS_COMMENT_HEADER_SIGNATURE)) { + // The comment header must come immediately after the ID header, so the format will already + // be populated: https://datatracker.ietf.org/doc/html/rfc7845#section-3 + checkStateNotNull(setupData.format); + if (firstCommentHeaderSeen) { + // Multiple comment headers are not permitted by the Opus spec [1], but have been observed + // in real files [2], so we just ignore all subsequent ones. + // [1] https://datatracker.ietf.org/doc/html/rfc7845#section-3 and + // https://datatracker.ietf.org/doc/html/rfc7845#section-5 + // [2] https://github.com/google/ExoPlayer/issues/10038 + return true; + } + firstCommentHeaderSeen = true; + packet.skipBytes(OPUS_COMMENT_HEADER_SIGNATURE.length); + VorbisUtil.CommentHeader commentHeader = + VorbisUtil.readVorbisCommentHeader( + packet, /* hasMetadataHeader= */ false, /* hasFramingBit= */ false); + @Nullable + Metadata vorbisMetadata = + VorbisUtil.parseVorbisComments(ImmutableList.copyOf(commentHeader.comments)); + if (vorbisMetadata == null) { + return true; + } + setupData.format = + setupData + .format + .buildUpon() + .setMetadata(vorbisMetadata.copyWithAppendedEntriesFrom(setupData.format.metadata)) + .build(); + return true; } else { - boolean headerPacket = packet.readInt() == OPUS_CODE; - packet.setPosition(0); - return headerPacket; + // The ID header must come at the start of the file, so the format must already be populated: + // https://datatracker.ietf.org/doc/html/rfc7845#section-3 + checkStateNotNull(setupData.format); + return false; } - return true; - } - - private void putNativeOrderLong(List initializationData, int samples) { - long ns = (samples * C.NANOS_PER_SECOND) / SAMPLE_RATE; - byte[] array = ByteBuffer.allocate(8).order(ByteOrder.nativeOrder()).putLong(ns).array(); - initializationData.add(array); } /** - * Returns the duration of the given audio packet. + * Returns true if the given {@link ParsableByteArray} starts with {@code expectedPrefix}. Does + * not change the {@link ParsableByteArray#getPosition() position} of {@code packet}. * - * @param packet Contains audio data. - * @return Returns the duration of the given audio packet. + * @param packet The packet data. + * @return True if the packet starts with {@code expectedPrefix}, false if not. */ - private long getPacketDurationUs(byte[] packet) { - int toc = packet[0] & 0xFF; - int frames; - switch (toc & 0x3) { - case 0: - frames = 1; - break; - case 1: - case 2: - frames = 2; - break; - default: - frames = packet[1] & 0x3F; - break; - } - - int config = toc >> 3; - int length = config & 0x3; - if (config >= 16) { - length = 2500 << length; - } else if (config >= 12) { - length = 10000 << (length & 0x1); - } else if (length == 3) { - length = 60000; - } else { - length = 10000 << length; + private static boolean peekPacketStartsWith(ParsableByteArray packet, byte[] expectedPrefix) { + if (packet.bytesLeft() < expectedPrefix.length) { + return false; } - return (long) frames * length; + int startPosition = packet.getPosition(); + byte[] header = new byte[expectedPrefix.length]; + packet.readBytes(header, 0, expectedPrefix.length); + packet.setPosition(startPosition); + return Arrays.equals(header, expectedPrefix); } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ogg/StreamReader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ogg/StreamReader.java index d2671125e4..4bc7ade08a 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ogg/StreamReader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ogg/StreamReader.java @@ -15,6 +15,9 @@ */ package com.google.android.exoplayer2.extractor.ogg; +import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; +import static com.google.android.exoplayer2.util.Util.castNonNull; + import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.extractor.Extractor; @@ -25,6 +28,10 @@ import com.google.android.exoplayer2.extractor.TrackOutput; import com.google.android.exoplayer2.util.ParsableByteArray; import java.io.IOException; +import org.checkerframework.checker.nullness.qual.EnsuresNonNull; +import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; +import org.checkerframework.checker.nullness.qual.RequiresNonNull; /** StreamReader abstract class. */ @SuppressWarnings("UngroupedOverloads") @@ -36,15 +43,15 @@ private static final int STATE_END_OF_INPUT = 3; static class SetupData { - Format format; - OggSeeker oggSeeker; + @MonotonicNonNull Format format; + @MonotonicNonNull OggSeeker oggSeeker; } private final OggPacket oggPacket; - private TrackOutput trackOutput; - private ExtractorOutput extractorOutput; - private OggSeeker oggSeeker; + private @MonotonicNonNull TrackOutput trackOutput; + private @MonotonicNonNull ExtractorOutput extractorOutput; + private @MonotonicNonNull OggSeeker oggSeeker; private long targetGranule; private long payloadStartPosition; private long currentGranule; @@ -57,6 +64,7 @@ static class SetupData { public StreamReader() { oggPacket = new OggPacket(); + setupData = new SetupData(); } void init(ExtractorOutput output, TrackOutput trackOutput) { @@ -92,7 +100,7 @@ final void seek(long position, long timeUs) { } else { if (state != STATE_READ_HEADERS) { targetGranule = convertTimeToGranule(timeUs); - oggSeeker.startSeek(targetGranule); + castNonNull(oggSeeker).startSeek(targetGranule); state = STATE_READ_PAYLOAD; } } @@ -101,37 +109,62 @@ final void seek(long position, long timeUs) { /** * @see Extractor#read(ExtractorInput, PositionHolder) */ - final int read(ExtractorInput input, PositionHolder seekPosition) - throws IOException, InterruptedException { + final int read(ExtractorInput input, PositionHolder seekPosition) throws IOException { + assertInitialized(); switch (state) { case STATE_READ_HEADERS: - return readHeaders(input); + return readHeadersAndUpdateState(input); case STATE_SKIP_HEADERS: input.skipFully((int) payloadStartPosition); state = STATE_READ_PAYLOAD; return Extractor.RESULT_CONTINUE; case STATE_READ_PAYLOAD: + castNonNull(oggSeeker); return readPayload(input, seekPosition); + case STATE_END_OF_INPUT: + return C.RESULT_END_OF_INPUT; default: // Never happens. throw new IllegalStateException(); } } - private int readHeaders(ExtractorInput input) throws IOException, InterruptedException { - boolean readingHeaders = true; - while (readingHeaders) { + @EnsuresNonNull({"trackOutput", "extractorOutput"}) + private void assertInitialized() { + checkStateNotNull(trackOutput); + castNonNull(extractorOutput); + } + + /** + * Read all header packets. + * + * @param input The {@link ExtractorInput} to read data from. + * @return {@code true} if all headers were read. {@code false} if end of the input is + * encountered. + * @throws IOException If reading from the input fails. + */ + @EnsuresNonNullIf(expression = "setupData.format", result = true) + private boolean readHeaders(ExtractorInput input) throws IOException { + while (true) { if (!oggPacket.populate(input)) { state = STATE_END_OF_INPUT; - return Extractor.RESULT_END_OF_INPUT; + return false; } lengthOfReadPacket = input.getPosition() - payloadStartPosition; - readingHeaders = readHeaders(oggPacket.getPayload(), payloadStartPosition, setupData); - if (readingHeaders) { + if (readHeaders(oggPacket.getPayload(), payloadStartPosition, setupData)) { payloadStartPosition = input.getPosition(); + } else { + return true; // Current packet is not a header, therefore all headers have been read. } } + } + + @RequiresNonNull({"trackOutput"}) + private int readHeadersAndUpdateState(ExtractorInput input) throws IOException { + if (!readHeaders(input)) { + return Extractor.RESULT_END_OF_INPUT; + } sampleRate = setupData.format.sampleRate; if (!formatSet) { @@ -148,7 +181,7 @@ private int readHeaders(ExtractorInput input) throws IOException, InterruptedExc boolean isLastPage = (firstPayloadPageHeader.type & 0x04) != 0; // Type 4 is end of stream. oggSeeker = new DefaultOggSeeker( - this, + /* streamReader= */ this, payloadStartPosition, input.getLength(), firstPayloadPageHeader.headerSize + firstPayloadPageHeader.bodySize, @@ -156,15 +189,14 @@ private int readHeaders(ExtractorInput input) throws IOException, InterruptedExc isLastPage); } - setupData = null; state = STATE_READ_PAYLOAD; // First payload packet. Trim the payload array of the ogg packet after headers have been read. oggPacket.trimPayload(); return Extractor.RESULT_CONTINUE; } - private int readPayload(ExtractorInput input, PositionHolder seekPosition) - throws IOException, InterruptedException { + @RequiresNonNull({"trackOutput", "oggSeeker", "extractorOutput"}) + private int readPayload(ExtractorInput input, PositionHolder seekPosition) throws IOException { long position = oggSeeker.read(input); if (position >= 0) { seekPosition.position = position; @@ -172,8 +204,9 @@ private int readPayload(ExtractorInput input, PositionHolder seekPosition) } else if (position < -1) { onSeekEnd(-(position + 2)); } + if (!seekMapSet) { - SeekMap seekMap = oggSeeker.createSeekMap(); + SeekMap seekMap = checkStateNotNull(oggSeeker.createSeekMap()); extractorOutput.seekMap(seekMap); seekMapSet = true; } @@ -234,8 +267,9 @@ protected long convertTimeToGranule(long timeUs) { * @param setupData Setup data to be filled. * @return Whether the packet contains header data. */ - protected abstract boolean readHeaders(ParsableByteArray packet, long position, - SetupData setupData) throws IOException, InterruptedException; + @EnsuresNonNullIf(expression = "#3.format", result = false) + protected abstract boolean readHeaders( + ParsableByteArray packet, long position, SetupData setupData) throws IOException; /** * Called on end of seeking. @@ -262,7 +296,5 @@ public void startSeek(long targetGranule) { public SeekMap createSeekMap() { return new SeekMap.Unseekable(C.TIME_UNSET); } - } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ogg/VorbisReader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ogg/VorbisReader.java index b57678266a..316543113c 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ogg/VorbisReader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ogg/VorbisReader.java @@ -15,31 +15,37 @@ */ package com.google.android.exoplayer2.extractor.ogg; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; + +import androidx.annotation.Nullable; import androidx.annotation.VisibleForTesting; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.ParserException; import com.google.android.exoplayer2.extractor.VorbisUtil; import com.google.android.exoplayer2.extractor.VorbisUtil.Mode; +import com.google.android.exoplayer2.metadata.Metadata; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.ParsableByteArray; +import com.google.common.collect.ImmutableList; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; +import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf; -/** - * {@link StreamReader} to extract Vorbis data out of Ogg byte stream. - */ +/** {@link StreamReader} to extract Vorbis data out of Ogg byte stream. */ /* package */ final class VorbisReader extends StreamReader { - private VorbisSetup vorbisSetup; + @Nullable private VorbisSetup vorbisSetup; private int previousPacketBlockSize; private boolean seenFirstAudioPacket; - private VorbisUtil.VorbisIdHeader vorbisIdHeader; - private VorbisUtil.CommentHeader commentHeader; + @Nullable private VorbisUtil.VorbisIdHeader vorbisIdHeader; + @Nullable private VorbisUtil.CommentHeader commentHeader; public static boolean verifyBitstreamType(ParsableByteArray data) { try { - return VorbisUtil.verifyVorbisHeaderCapturePattern(0x01, data, true); + return VorbisUtil.verifyVorbisHeaderCapturePattern(/* headerType= */ 0x01, data, true); } catch (ParserException e) { return false; } @@ -67,16 +73,16 @@ protected void onSeekEnd(long currentGranule) { @Override protected long preparePayload(ParsableByteArray packet) { // if this is not an audio packet... - if ((packet.data[0] & 0x01) == 1) { + if ((packet.getData()[0] & 0x01) == 1) { return -1; } // ... we need to decode the block size - int packetBlockSize = decodeBlockSize(packet.data[0], vorbisSetup); + int packetBlockSize = decodeBlockSize(packet.getData()[0], checkStateNotNull(vorbisSetup)); // a packet contains samples produced from overlapping the previous and current frame data // (https://www.xiph.org/vorbis/doc/Vorbis_I_spec.html#x1-350001.3.2) - int samplesInPacket = seenFirstAudioPacket ? (packetBlockSize + previousPacketBlockSize) / 4 - : 0; + int samplesInPacket = + seenFirstAudioPacket ? (packetBlockSize + previousPacketBlockSize) / 4 : 0; // codec expects the number of samples appended to audio data appendNumberOfSamples(packet, samplesInPacket); @@ -87,9 +93,11 @@ protected long preparePayload(ParsableByteArray packet) { } @Override + @EnsuresNonNullIf(expression = "#3.format", result = false) protected boolean readHeaders(ParsableByteArray packet, long position, SetupData setupData) - throws IOException, InterruptedException { + throws IOException { if (vorbisSetup != null) { + checkNotNull(setupData.format); return false; } @@ -97,19 +105,33 @@ protected boolean readHeaders(ParsableByteArray packet, long position, SetupData if (vorbisSetup == null) { return true; } - - ArrayList codecInitialisationData = new ArrayList<>(); - codecInitialisationData.add(vorbisSetup.idHeader.data); - codecInitialisationData.add(vorbisSetup.setupHeaderData); - - setupData.format = Format.createAudioSampleFormat(null, MimeTypes.AUDIO_VORBIS, null, - this.vorbisSetup.idHeader.bitrateNominal, Format.NO_VALUE, - this.vorbisSetup.idHeader.channels, (int) this.vorbisSetup.idHeader.sampleRate, - codecInitialisationData, null, 0, null); + VorbisSetup vorbisSetup = this.vorbisSetup; + + VorbisUtil.VorbisIdHeader idHeader = vorbisSetup.idHeader; + + ArrayList codecInitializationData = new ArrayList<>(); + codecInitializationData.add(idHeader.data); + codecInitializationData.add(vorbisSetup.setupHeaderData); + + @Nullable + Metadata metadata = + VorbisUtil.parseVorbisComments(ImmutableList.copyOf(vorbisSetup.commentHeader.comments)); + + setupData.format = + new Format.Builder() + .setSampleMimeType(MimeTypes.AUDIO_VORBIS) + .setAverageBitrate(idHeader.bitrateNominal) + .setPeakBitrate(idHeader.bitrateMaximum) + .setChannelCount(idHeader.channels) + .setSampleRate(idHeader.sampleRate) + .setInitializationData(codecInitializationData) + .setMetadata(metadata) + .build(); return true; } @VisibleForTesting + @Nullable /* package */ VorbisSetup readSetupHeaders(ParsableByteArray scratch) throws IOException { if (vorbisIdHeader == null) { @@ -121,11 +143,13 @@ protected boolean readHeaders(ParsableByteArray packet, long position, SetupData commentHeader = VorbisUtil.readVorbisCommentHeader(scratch); return null; } + VorbisUtil.VorbisIdHeader vorbisIdHeader = this.vorbisIdHeader; + VorbisUtil.CommentHeader commentHeader = this.commentHeader; // the third packet contains the setup header byte[] setupHeaderData = new byte[scratch.limit()]; // raw data of vorbis setup header has to be passed to decoder as CSD buffer #2 - System.arraycopy(scratch.data, 0, setupHeaderData, 0, scratch.limit()); + System.arraycopy(scratch.getData(), 0, setupHeaderData, 0, scratch.limit()); // partially decode setup header to get the modes Mode[] modes = VorbisUtil.readVorbisModes(scratch, vorbisIdHeader.channels); // we need the ilog of modes all the time when extracting, so we compute it once @@ -151,14 +175,18 @@ protected boolean readHeaders(ParsableByteArray packet, long position, SetupData @VisibleForTesting /* package */ static void appendNumberOfSamples( ParsableByteArray buffer, long packetSampleCount) { - - buffer.setLimit(buffer.limit() + 4); + if (buffer.capacity() < buffer.limit() + 4) { + buffer.reset(Arrays.copyOf(buffer.getData(), buffer.limit() + 4)); + } else { + buffer.setLimit(buffer.limit() + 4); + } // The vorbis decoder expects the number of samples in the packet // to be appended to the audio data as an int32 - buffer.data[buffer.limit() - 4] = (byte) (packetSampleCount & 0xFF); - buffer.data[buffer.limit() - 3] = (byte) ((packetSampleCount >>> 8) & 0xFF); - buffer.data[buffer.limit() - 2] = (byte) ((packetSampleCount >>> 16) & 0xFF); - buffer.data[buffer.limit() - 1] = (byte) ((packetSampleCount >>> 24) & 0xFF); + byte[] data = buffer.getData(); + data[buffer.limit() - 4] = (byte) (packetSampleCount & 0xFF); + data[buffer.limit() - 3] = (byte) ((packetSampleCount >>> 8) & 0xFF); + data[buffer.limit() - 2] = (byte) ((packetSampleCount >>> 16) & 0xFF); + data[buffer.limit() - 1] = (byte) ((packetSampleCount >>> 24) & 0xFF); } private static int decodeBlockSize(byte firstByteOfAudioPacket, VorbisSetup vorbisSetup) { @@ -173,9 +201,7 @@ private static int decodeBlockSize(byte firstByteOfAudioPacket, VorbisSetup vorb return currentBlockSize; } - /** - * Class to hold all data read from Vorbis setup headers. - */ + /** Class to hold all data read from Vorbis setup headers. */ /* package */ static final class VorbisSetup { public final VorbisUtil.VorbisIdHeader idHeader; @@ -184,15 +210,17 @@ private static int decodeBlockSize(byte firstByteOfAudioPacket, VorbisSetup vorb public final Mode[] modes; public final int iLogModes; - public VorbisSetup(VorbisUtil.VorbisIdHeader idHeader, VorbisUtil.CommentHeader - commentHeader, byte[] setupHeaderData, Mode[] modes, int iLogModes) { + public VorbisSetup( + VorbisUtil.VorbisIdHeader idHeader, + VorbisUtil.CommentHeader commentHeader, + byte[] setupHeaderData, + Mode[] modes, + int iLogModes) { this.idHeader = idHeader; this.commentHeader = commentHeader; this.setupHeaderData = setupHeaderData; this.modes = modes; this.iLogModes = iLogModes; } - } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ogg/package-info.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ogg/package-info.java new file mode 100644 index 0000000000..ef8ed054a4 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ogg/package-info.java @@ -0,0 +1,19 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +@NonNullApi +package com.google.android.exoplayer2.extractor.ogg; + +import com.google.android.exoplayer2.util.NonNullApi; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/package-info.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/package-info.java new file mode 100644 index 0000000000..9920b247e6 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/package-info.java @@ -0,0 +1,19 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +@NonNullApi +package com.google.android.exoplayer2.extractor; + +import com.google.android.exoplayer2.util.NonNullApi; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/rawcc/RawCcExtractor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/rawcc/RawCcExtractor.java deleted file mode 100644 index 3d76276240..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/rawcc/RawCcExtractor.java +++ /dev/null @@ -1,170 +0,0 @@ -/* - * Copyright (C) 2016 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.extractor.rawcc; - -import com.google.android.exoplayer2.C; -import com.google.android.exoplayer2.Format; -import com.google.android.exoplayer2.ParserException; -import com.google.android.exoplayer2.extractor.Extractor; -import com.google.android.exoplayer2.extractor.ExtractorInput; -import com.google.android.exoplayer2.extractor.ExtractorOutput; -import com.google.android.exoplayer2.extractor.PositionHolder; -import com.google.android.exoplayer2.extractor.SeekMap; -import com.google.android.exoplayer2.extractor.TrackOutput; -import com.google.android.exoplayer2.util.ParsableByteArray; -import java.io.IOException; - -/** - * Extracts data from the RawCC container format. - */ -public final class RawCcExtractor implements Extractor { - - private static final int SCRATCH_SIZE = 9; - private static final int HEADER_SIZE = 8; - private static final int HEADER_ID = 0x52434301; - private static final int TIMESTAMP_SIZE_V0 = 4; - private static final int TIMESTAMP_SIZE_V1 = 8; - - // Parser states. - private static final int STATE_READING_HEADER = 0; - private static final int STATE_READING_TIMESTAMP_AND_COUNT = 1; - private static final int STATE_READING_SAMPLES = 2; - - private final Format format; - - private final ParsableByteArray dataScratch; - - private TrackOutput trackOutput; - - private int parserState; - private int version; - private long timestampUs; - private int remainingSampleCount; - private int sampleBytesWritten; - - public RawCcExtractor(Format format) { - this.format = format; - dataScratch = new ParsableByteArray(SCRATCH_SIZE); - parserState = STATE_READING_HEADER; - } - - @Override - public void init(ExtractorOutput output) { - output.seekMap(new SeekMap.Unseekable(C.TIME_UNSET)); - trackOutput = output.track(0, C.TRACK_TYPE_TEXT); - output.endTracks(); - trackOutput.format(format); - } - - @Override - public boolean sniff(ExtractorInput input) throws IOException, InterruptedException { - dataScratch.reset(); - input.peekFully(dataScratch.data, 0, HEADER_SIZE); - return dataScratch.readInt() == HEADER_ID; - } - - @Override - public int read(ExtractorInput input, PositionHolder seekPosition) - throws IOException, InterruptedException { - while (true) { - switch (parserState) { - case STATE_READING_HEADER: - if (parseHeader(input)) { - parserState = STATE_READING_TIMESTAMP_AND_COUNT; - } else { - return RESULT_END_OF_INPUT; - } - break; - case STATE_READING_TIMESTAMP_AND_COUNT: - if (parseTimestampAndSampleCount(input)) { - parserState = STATE_READING_SAMPLES; - } else { - parserState = STATE_READING_HEADER; - return RESULT_END_OF_INPUT; - } - break; - case STATE_READING_SAMPLES: - parseSamples(input); - parserState = STATE_READING_TIMESTAMP_AND_COUNT; - return RESULT_CONTINUE; - default: - throw new IllegalStateException(); - } - } - } - - @Override - public void seek(long position, long timeUs) { - parserState = STATE_READING_HEADER; - } - - @Override - public void release() { - // Do nothing - } - - private boolean parseHeader(ExtractorInput input) throws IOException, InterruptedException { - dataScratch.reset(); - if (input.readFully(dataScratch.data, 0, HEADER_SIZE, true)) { - if (dataScratch.readInt() != HEADER_ID) { - throw new IOException("Input not RawCC"); - } - version = dataScratch.readUnsignedByte(); - // no versions use the flag fields yet - return true; - } else { - return false; - } - } - - private boolean parseTimestampAndSampleCount(ExtractorInput input) throws IOException, - InterruptedException { - dataScratch.reset(); - if (version == 0) { - if (!input.readFully(dataScratch.data, 0, TIMESTAMP_SIZE_V0 + 1, true)) { - return false; - } - // version 0 timestamps are 45kHz, so we need to convert them into us - timestampUs = dataScratch.readUnsignedInt() * 1000 / 45; - } else if (version == 1) { - if (!input.readFully(dataScratch.data, 0, TIMESTAMP_SIZE_V1 + 1, true)) { - return false; - } - timestampUs = dataScratch.readLong(); - } else { - throw new ParserException("Unsupported version number: " + version); - } - - remainingSampleCount = dataScratch.readUnsignedByte(); - sampleBytesWritten = 0; - return true; - } - - private void parseSamples(ExtractorInput input) throws IOException, InterruptedException { - for (; remainingSampleCount > 0; remainingSampleCount--) { - dataScratch.reset(); - input.readFully(dataScratch.data, 0, 3); - - trackOutput.sampleData(dataScratch, 3); - sampleBytesWritten += 3; - } - - if (sampleBytesWritten > 0) { - trackOutput.sampleMetadata(timestampUs, C.BUFFER_FLAG_KEY_FRAME, sampleBytesWritten, 0, null); - } - } - -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/rawcc/package-info.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/rawcc/package-info.java new file mode 100644 index 0000000000..b01e56b8dd --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/rawcc/package-info.java @@ -0,0 +1,19 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +@NonNullApi +package com.google.android.exoplayer2.extractor.rawcc; + +import com.google.android.exoplayer2.util.NonNullApi; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/Ac3Extractor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/Ac3Extractor.java index b1d15b7189..65f0f3a60a 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/Ac3Extractor.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/Ac3Extractor.java @@ -31,9 +31,7 @@ import com.google.android.exoplayer2.util.ParsableByteArray; import java.io.IOException; -/** - * Extracts data from (E-)AC-3 bitstreams. - */ +/** Extracts data from (E-)AC-3 bitstreams. */ public final class Ac3Extractor implements Extractor { /** Factory for {@link Ac3Extractor} instances. */ @@ -44,6 +42,7 @@ public final class Ac3Extractor implements Extractor { * up. */ private static final int MAX_SNIFF_BYTES = 8 * 1024; + private static final int AC3_SYNC_WORD = 0x0B77; private static final int MAX_SYNC_FRAME_SIZE = 2786; @@ -61,12 +60,12 @@ public Ac3Extractor() { // Extractor implementation. @Override - public boolean sniff(ExtractorInput input) throws IOException, InterruptedException { + public boolean sniff(ExtractorInput input) throws IOException { // Skip any ID3 headers. ParsableByteArray scratch = new ParsableByteArray(ID3_HEADER_LENGTH); int startPosition = 0; while (true) { - input.peekFully(scratch.data, /* offset= */ 0, ID3_HEADER_LENGTH); + input.peekFully(scratch.getData(), /* offset= */ 0, ID3_HEADER_LENGTH); scratch.setPosition(0); if (scratch.readUnsignedInt24() != ID3_TAG) { break; @@ -82,7 +81,7 @@ public boolean sniff(ExtractorInput input) throws IOException, InterruptedExcept int headerPosition = startPosition; int validFramesCount = 0; while (true) { - input.peekFully(scratch.data, 0, 6); + input.peekFully(scratch.getData(), 0, 6); scratch.setPosition(0); int syncBytes = scratch.readUnsignedShort(); if (syncBytes != AC3_SYNC_WORD) { @@ -96,7 +95,7 @@ public boolean sniff(ExtractorInput input) throws IOException, InterruptedExcept if (++validFramesCount >= 4) { return true; } - int frameSize = Ac3Util.parseAc3SyncframeSize(scratch.data); + int frameSize = Ac3Util.parseAc3SyncframeSize(scratch.getData()); if (frameSize == C.LENGTH_UNSET) { return false; } @@ -124,9 +123,8 @@ public void release() { } @Override - public int read(ExtractorInput input, PositionHolder seekPosition) throws IOException, - InterruptedException { - int bytesRead = input.read(sampleData.data, 0, MAX_SYNC_FRAME_SIZE); + public int read(ExtractorInput input, PositionHolder seekPosition) throws IOException { + int bytesRead = input.read(sampleData.getData(), 0, MAX_SYNC_FRAME_SIZE); if (bytesRead == C.RESULT_END_OF_INPUT) { return RESULT_END_OF_INPUT; } @@ -145,5 +143,4 @@ public int read(ExtractorInput input, PositionHolder seekPosition) throws IOExce reader.consume(sampleData); return RESULT_CONTINUE; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/Ac3Reader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/Ac3Reader.java index cd07a40c6d..01b86edae8 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/Ac3Reader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/Ac3Reader.java @@ -15,7 +15,11 @@ */ package com.google.android.exoplayer2.extractor.ts; +import static java.lang.Math.min; +import static java.lang.annotation.ElementType.TYPE_USE; + import androidx.annotation.IntDef; +import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.audio.Ac3Util; @@ -23,19 +27,24 @@ import com.google.android.exoplayer2.extractor.ExtractorOutput; import com.google.android.exoplayer2.extractor.TrackOutput; import com.google.android.exoplayer2.extractor.ts.TsPayloadReader.TrackIdGenerator; +import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.ParsableBitArray; import com.google.android.exoplayer2.util.ParsableByteArray; +import com.google.android.exoplayer2.util.Util; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; +import org.checkerframework.checker.nullness.qual.RequiresNonNull; -/** - * Parses a continuous (E-)AC-3 byte stream and extracts individual samples. - */ +/** Parses a continuous (E-)AC-3 byte stream and extracts individual samples. */ public final class Ac3Reader implements ElementaryStreamReader { @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({STATE_FINDING_SYNC, STATE_READING_HEADER, STATE_READING_SAMPLE}) private @interface State {} @@ -47,12 +56,12 @@ public final class Ac3Reader implements ElementaryStreamReader { private final ParsableBitArray headerScratchBits; private final ParsableByteArray headerScratchBytes; - private final String language; + @Nullable private final String language; - private String trackFormatId; - private TrackOutput output; + private @MonotonicNonNull String formatId; + private @MonotonicNonNull TrackOutput output; - @State private int state; + private @State int state; private int bytesRead; // Used to find the header. @@ -60,15 +69,13 @@ public final class Ac3Reader implements ElementaryStreamReader { // Used when parsing the header. private long sampleDurationUs; - private Format format; + private @MonotonicNonNull Format format; private int sampleSize; // Used when reading the samples. private long timeUs; - /** - * Constructs a new reader for (E-)AC-3 elementary streams. - */ + /** Constructs a new reader for (E-)AC-3 elementary streams. */ public Ac3Reader() { this(null); } @@ -78,10 +85,11 @@ public Ac3Reader() { * * @param language Track language. */ - public Ac3Reader(String language) { + public Ac3Reader(@Nullable String language) { headerScratchBits = new ParsableBitArray(new byte[HEADER_SIZE]); headerScratchBytes = new ParsableByteArray(headerScratchBits.data); state = STATE_FINDING_SYNC; + timeUs = C.TIME_UNSET; this.language = language; } @@ -90,34 +98,38 @@ public void seek() { state = STATE_FINDING_SYNC; bytesRead = 0; lastByteWas0B = false; + timeUs = C.TIME_UNSET; } @Override - public void createTracks(ExtractorOutput extractorOutput, TrackIdGenerator generator) { - generator.generateNewId(); - trackFormatId = generator.getFormatId(); - output = extractorOutput.track(generator.getTrackId(), C.TRACK_TYPE_AUDIO); + public void createTracks(ExtractorOutput extractorOutput, TrackIdGenerator idGenerator) { + idGenerator.generateNewId(); + formatId = idGenerator.getFormatId(); + output = extractorOutput.track(idGenerator.getTrackId(), C.TRACK_TYPE_AUDIO); } @Override public void packetStarted(long pesTimeUs, @TsPayloadReader.Flags int flags) { - timeUs = pesTimeUs; + if (pesTimeUs != C.TIME_UNSET) { + timeUs = pesTimeUs; + } } @Override public void consume(ParsableByteArray data) { + Assertions.checkStateNotNull(output); // Asserts that createTracks has been called. while (data.bytesLeft() > 0) { switch (state) { case STATE_FINDING_SYNC: if (skipToNextSync(data)) { state = STATE_READING_HEADER; - headerScratchBytes.data[0] = 0x0B; - headerScratchBytes.data[1] = 0x77; + headerScratchBytes.getData()[0] = 0x0B; + headerScratchBytes.getData()[1] = 0x77; bytesRead = 2; } break; case STATE_READING_HEADER: - if (continueRead(data, headerScratchBytes.data, HEADER_SIZE)) { + if (continueRead(data, headerScratchBytes.getData(), HEADER_SIZE)) { parseHeader(); headerScratchBytes.setPosition(0); output.sampleData(headerScratchBytes, HEADER_SIZE); @@ -125,12 +137,14 @@ public void consume(ParsableByteArray data) { } break; case STATE_READING_SAMPLE: - int bytesToRead = Math.min(data.bytesLeft(), sampleSize - bytesRead); + int bytesToRead = min(data.bytesLeft(), sampleSize - bytesRead); output.sampleData(data, bytesToRead); bytesRead += bytesToRead; if (bytesRead == sampleSize) { - output.sampleMetadata(timeUs, C.BUFFER_FLAG_KEY_FRAME, sampleSize, 0, null); - timeUs += sampleDurationUs; + if (timeUs != C.TIME_UNSET) { + output.sampleMetadata(timeUs, C.BUFFER_FLAG_KEY_FRAME, sampleSize, 0, null); + timeUs += sampleDurationUs; + } state = STATE_FINDING_SYNC; } break; @@ -155,7 +169,7 @@ public void packetFinished() { * @return Whether the target length was reached. */ private boolean continueRead(ParsableByteArray source, byte[] target, int targetLength) { - int bytesToRead = Math.min(source.bytesLeft(), targetLength - bytesRead); + int bytesToRead = min(source.bytesLeft(), targetLength - bytesRead); source.readBytes(target, bytesRead, bytesToRead); bytesRead += bytesToRead; return bytesRead == targetLength; @@ -185,19 +199,28 @@ private boolean skipToNextSync(ParsableByteArray pesBuffer) { return false; } - /** - * Parses the sample header. - */ - @SuppressWarnings("ReferenceEquality") + /** Parses the sample header. */ + @RequiresNonNull("output") private void parseHeader() { headerScratchBits.setPosition(0); SyncFrameInfo frameInfo = Ac3Util.parseAc3SyncframeInfo(headerScratchBits); - if (format == null || frameInfo.channelCount != format.channelCount + if (format == null + || frameInfo.channelCount != format.channelCount || frameInfo.sampleRate != format.sampleRate - || frameInfo.mimeType != format.sampleMimeType) { - format = Format.createAudioSampleFormat(trackFormatId, frameInfo.mimeType, null, - Format.NO_VALUE, Format.NO_VALUE, frameInfo.channelCount, frameInfo.sampleRate, null, - null, 0, language); + || !Util.areEqual(frameInfo.mimeType, format.sampleMimeType)) { + Format.Builder formatBuilder = + new Format.Builder() + .setId(formatId) + .setSampleMimeType(frameInfo.mimeType) + .setChannelCount(frameInfo.channelCount) + .setSampleRate(frameInfo.sampleRate) + .setLanguage(language) + .setPeakBitrate(frameInfo.bitrate); + // AC3 has constant bitrate, so averageBitrate = peakBitrate + if (MimeTypes.AUDIO_AC3.equals(frameInfo.mimeType)) { + formatBuilder.setAverageBitrate(frameInfo.bitrate); + } + format = formatBuilder.build(); output.format(format); } sampleSize = frameInfo.frameSize; @@ -205,5 +228,4 @@ private void parseHeader() { // specifies the number of PCM audio samples per second. sampleDurationUs = C.MICROS_PER_SECOND * frameInfo.sampleCount / format.sampleRate; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/Ac4Extractor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/Ac4Extractor.java index 205d71e16e..996ae2f69b 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/Ac4Extractor.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/Ac4Extractor.java @@ -68,12 +68,12 @@ public Ac4Extractor() { // Extractor implementation. @Override - public boolean sniff(ExtractorInput input) throws IOException, InterruptedException { + public boolean sniff(ExtractorInput input) throws IOException { // Skip any ID3 headers. ParsableByteArray scratch = new ParsableByteArray(ID3_HEADER_LENGTH); int startPosition = 0; while (true) { - input.peekFully(scratch.data, /* offset= */ 0, ID3_HEADER_LENGTH); + input.peekFully(scratch.getData(), /* offset= */ 0, ID3_HEADER_LENGTH); scratch.setPosition(0); if (scratch.readUnsignedInt24() != ID3_TAG) { break; @@ -89,7 +89,7 @@ public boolean sniff(ExtractorInput input) throws IOException, InterruptedExcept int headerPosition = startPosition; int validFramesCount = 0; while (true) { - input.peekFully(scratch.data, /* offset= */ 0, /* length= */ FRAME_HEADER_SIZE); + input.peekFully(scratch.getData(), /* offset= */ 0, /* length= */ FRAME_HEADER_SIZE); scratch.setPosition(0); int syncBytes = scratch.readUnsignedShort(); if (syncBytes != AC40_SYNCWORD && syncBytes != AC41_SYNCWORD) { @@ -103,7 +103,7 @@ public boolean sniff(ExtractorInput input) throws IOException, InterruptedExcept if (++validFramesCount >= 4) { return true; } - int frameSize = Ac4Util.parseAc4SyncframeSize(scratch.data, syncBytes); + int frameSize = Ac4Util.parseAc4SyncframeSize(scratch.getData(), syncBytes); if (frameSize == C.LENGTH_UNSET) { return false; } @@ -132,9 +132,9 @@ public void release() { } @Override - public int read(ExtractorInput input, PositionHolder seekPosition) - throws IOException, InterruptedException { - int bytesRead = input.read(sampleData.data, /* offset= */ 0, /* length= */ READ_BUFFER_SIZE); + public int read(ExtractorInput input, PositionHolder seekPosition) throws IOException { + int bytesRead = + input.read(sampleData.getData(), /* offset= */ 0, /* length= */ READ_BUFFER_SIZE); if (bytesRead == C.RESULT_END_OF_INPUT) { return RESULT_END_OF_INPUT; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/Ac4Reader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/Ac4Reader.java index 48bd07fce4..360057d0af 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/Ac4Reader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/Ac4Reader.java @@ -15,7 +15,11 @@ */ package com.google.android.exoplayer2.extractor.ts; +import static java.lang.Math.min; +import static java.lang.annotation.ElementType.TYPE_USE; + import androidx.annotation.IntDef; +import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.audio.Ac4Util; @@ -23,18 +27,23 @@ import com.google.android.exoplayer2.extractor.ExtractorOutput; import com.google.android.exoplayer2.extractor.TrackOutput; import com.google.android.exoplayer2.extractor.ts.TsPayloadReader.TrackIdGenerator; +import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.ParsableBitArray; import com.google.android.exoplayer2.util.ParsableByteArray; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; +import org.checkerframework.checker.nullness.qual.RequiresNonNull; /** Parses a continuous AC-4 byte stream and extracts individual samples. */ public final class Ac4Reader implements ElementaryStreamReader { @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({STATE_FINDING_SYNC, STATE_READING_HEADER, STATE_READING_SAMPLE}) private @interface State {} @@ -44,12 +53,12 @@ public final class Ac4Reader implements ElementaryStreamReader { private final ParsableBitArray headerScratchBits; private final ParsableByteArray headerScratchBytes; - private final String language; + @Nullable private final String language; - private String trackFormatId; - private TrackOutput output; + private @MonotonicNonNull String formatId; + private @MonotonicNonNull TrackOutput output; - @State private int state; + private @State int state; private int bytesRead; // Used to find the header. @@ -58,7 +67,7 @@ public final class Ac4Reader implements ElementaryStreamReader { // Used when parsing the header. private long sampleDurationUs; - private Format format; + private @MonotonicNonNull Format format; private int sampleSize; // Used when reading the samples. @@ -74,13 +83,14 @@ public Ac4Reader() { * * @param language Track language. */ - public Ac4Reader(String language) { + public Ac4Reader(@Nullable String language) { headerScratchBits = new ParsableBitArray(new byte[Ac4Util.HEADER_SIZE_FOR_PARSER]); headerScratchBytes = new ParsableByteArray(headerScratchBits.data); state = STATE_FINDING_SYNC; bytesRead = 0; lastByteWasAC = false; hasCRC = false; + timeUs = C.TIME_UNSET; this.language = language; } @@ -90,34 +100,38 @@ public void seek() { bytesRead = 0; lastByteWasAC = false; hasCRC = false; + timeUs = C.TIME_UNSET; } @Override - public void createTracks(ExtractorOutput extractorOutput, TrackIdGenerator generator) { - generator.generateNewId(); - trackFormatId = generator.getFormatId(); - output = extractorOutput.track(generator.getTrackId(), C.TRACK_TYPE_AUDIO); + public void createTracks(ExtractorOutput extractorOutput, TrackIdGenerator idGenerator) { + idGenerator.generateNewId(); + formatId = idGenerator.getFormatId(); + output = extractorOutput.track(idGenerator.getTrackId(), C.TRACK_TYPE_AUDIO); } @Override public void packetStarted(long pesTimeUs, @TsPayloadReader.Flags int flags) { - timeUs = pesTimeUs; + if (pesTimeUs != C.TIME_UNSET) { + timeUs = pesTimeUs; + } } @Override public void consume(ParsableByteArray data) { + Assertions.checkStateNotNull(output); // Asserts that createTracks has been called. while (data.bytesLeft() > 0) { switch (state) { case STATE_FINDING_SYNC: if (skipToNextSync(data)) { state = STATE_READING_HEADER; - headerScratchBytes.data[0] = (byte) 0xAC; - headerScratchBytes.data[1] = (byte) (hasCRC ? 0x41 : 0x40); + headerScratchBytes.getData()[0] = (byte) 0xAC; + headerScratchBytes.getData()[1] = (byte) (hasCRC ? 0x41 : 0x40); bytesRead = 2; } break; case STATE_READING_HEADER: - if (continueRead(data, headerScratchBytes.data, Ac4Util.HEADER_SIZE_FOR_PARSER)) { + if (continueRead(data, headerScratchBytes.getData(), Ac4Util.HEADER_SIZE_FOR_PARSER)) { parseHeader(); headerScratchBytes.setPosition(0); output.sampleData(headerScratchBytes, Ac4Util.HEADER_SIZE_FOR_PARSER); @@ -125,12 +139,14 @@ public void consume(ParsableByteArray data) { } break; case STATE_READING_SAMPLE: - int bytesToRead = Math.min(data.bytesLeft(), sampleSize - bytesRead); + int bytesToRead = min(data.bytesLeft(), sampleSize - bytesRead); output.sampleData(data, bytesToRead); bytesRead += bytesToRead; if (bytesRead == sampleSize) { - output.sampleMetadata(timeUs, C.BUFFER_FLAG_KEY_FRAME, sampleSize, 0, null); - timeUs += sampleDurationUs; + if (timeUs != C.TIME_UNSET) { + output.sampleMetadata(timeUs, C.BUFFER_FLAG_KEY_FRAME, sampleSize, 0, null); + timeUs += sampleDurationUs; + } state = STATE_FINDING_SYNC; } break; @@ -155,7 +171,7 @@ public void packetFinished() { * @return Whether the target length was reached. */ private boolean continueRead(ParsableByteArray source, byte[] target, int targetLength) { - int bytesToRead = Math.min(source.bytesLeft(), targetLength - bytesRead); + int bytesToRead = min(source.bytesLeft(), targetLength - bytesRead); source.readBytes(target, bytesRead, bytesToRead); bytesRead += bytesToRead; return bytesRead == targetLength; @@ -185,7 +201,7 @@ private boolean skipToNextSync(ParsableByteArray pesBuffer) { } /** Parses the sample header. */ - @SuppressWarnings("ReferenceEquality") + @RequiresNonNull("output") private void parseHeader() { headerScratchBits.setPosition(0); SyncFrameInfo frameInfo = Ac4Util.parseAc4SyncframeInfo(headerScratchBits); @@ -194,18 +210,13 @@ private void parseHeader() { || frameInfo.sampleRate != format.sampleRate || !MimeTypes.AUDIO_AC4.equals(format.sampleMimeType)) { format = - Format.createAudioSampleFormat( - trackFormatId, - MimeTypes.AUDIO_AC4, - /* codecs= */ null, - /* bitrate= */ Format.NO_VALUE, - /* maxInputSize= */ Format.NO_VALUE, - frameInfo.channelCount, - frameInfo.sampleRate, - /* initializationData= */ null, - /* drmInitData= */ null, - /* selectionFlags= */ 0, - language); + new Format.Builder() + .setId(formatId) + .setSampleMimeType(MimeTypes.AUDIO_AC4) + .setChannelCount(frameInfo.channelCount) + .setSampleRate(frameInfo.sampleRate) + .setLanguage(language) + .build(); output.format(format); } sampleSize = frameInfo.frameSize; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/AdtsExtractor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/AdtsExtractor.java index 86dacd8c30..e704f2ff89 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/AdtsExtractor.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/AdtsExtractor.java @@ -18,11 +18,13 @@ import static com.google.android.exoplayer2.extractor.ts.TsPayloadReader.FLAG_DATA_ALIGNMENT_INDICATOR; import static com.google.android.exoplayer2.metadata.id3.Id3Decoder.ID3_HEADER_LENGTH; import static com.google.android.exoplayer2.metadata.id3.Id3Decoder.ID3_TAG; +import static java.lang.annotation.ElementType.TYPE_USE; import androidx.annotation.IntDef; -import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.ParserException; +import com.google.android.exoplayer2.PlaybackException; +import com.google.android.exoplayer2.Player; import com.google.android.exoplayer2.extractor.ConstantBitrateSeekMap; import com.google.android.exoplayer2.extractor.Extractor; import com.google.android.exoplayer2.extractor.ExtractorInput; @@ -39,24 +41,27 @@ import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; +import org.checkerframework.checker.nullness.qual.RequiresNonNull; -/** - * Extracts data from AAC bit streams with ADTS framing. - */ +/** Extracts data from AAC bit streams with ADTS framing. */ public final class AdtsExtractor implements Extractor { /** Factory for {@link AdtsExtractor} instances. */ public static final ExtractorsFactory FACTORY = () -> new Extractor[] {new AdtsExtractor()}; /** - * Flags controlling the behavior of the extractor. Possible flag value is {@link - * #FLAG_ENABLE_CONSTANT_BITRATE_SEEKING}. + * Flags controlling the behavior of the extractor. Possible flag values are {@link + * #FLAG_ENABLE_CONSTANT_BITRATE_SEEKING} and {@link + * #FLAG_ENABLE_CONSTANT_BITRATE_SEEKING_ALWAYS}. */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef( flag = true, - value = {FLAG_ENABLE_CONSTANT_BITRATE_SEEKING}) + value = {FLAG_ENABLE_CONSTANT_BITRATE_SEEKING, FLAG_ENABLE_CONSTANT_BITRATE_SEEKING_ALWAYS}) public @interface Flags {} /** * Flag to force enable seeking using a constant bitrate assumption in cases where seeking would @@ -66,6 +71,18 @@ public final class AdtsExtractor implements Extractor { * are not precise, especially when the stream bitrate varies a lot. */ public static final int FLAG_ENABLE_CONSTANT_BITRATE_SEEKING = 1; + /** + * Like {@link #FLAG_ENABLE_CONSTANT_BITRATE_SEEKING}, except that seeking is also enabled in + * cases where the content length (and hence the duration of the media) is unknown. Application + * code should ensure that requested seek positions are valid when using this flag, or be ready to + * handle playback failures reported through {@link Player.Listener#onPlayerError} with {@link + * PlaybackException#errorCode} set to {@link + * PlaybackException#ERROR_CODE_IO_READ_POSITION_OUT_OF_RANGE}. + * + *

      If this flag is set, then the behavior enabled by {@link + * #FLAG_ENABLE_CONSTANT_BITRATE_SEEKING} is implicitly enabled as well. + */ + public static final int FLAG_ENABLE_CONSTANT_BITRATE_SEEKING_ALWAYS = 1 << 1; private static final int MAX_PACKET_SIZE = 2 * 1024; /** @@ -86,7 +103,7 @@ public final class AdtsExtractor implements Extractor { private final ParsableByteArray scratch; private final ParsableBitArray scratchBits; - @Nullable private ExtractorOutput extractorOutput; + private @MonotonicNonNull ExtractorOutput extractorOutput; private long firstSampleTimestampUs; private long firstFramePosition; @@ -106,6 +123,9 @@ public AdtsExtractor() { * @param flags Flags that control the extractor's behavior. */ public AdtsExtractor(@Flags int flags) { + if ((flags & FLAG_ENABLE_CONSTANT_BITRATE_SEEKING_ALWAYS) != 0) { + flags |= FLAG_ENABLE_CONSTANT_BITRATE_SEEKING; + } this.flags = flags; reader = new AdtsReader(true); packetBuffer = new ParsableByteArray(MAX_PACKET_SIZE); @@ -113,13 +133,13 @@ public AdtsExtractor(@Flags int flags) { firstFramePosition = C.POSITION_UNSET; // Allocate scratch space for an ID3 header. The same buffer is also used to read 4 byte values. scratch = new ParsableByteArray(ID3_HEADER_LENGTH); - scratchBits = new ParsableBitArray(scratch.data); + scratchBits = new ParsableBitArray(scratch.getData()); } // Extractor implementation. @Override - public boolean sniff(ExtractorInput input) throws IOException, InterruptedException { + public boolean sniff(ExtractorInput input) throws IOException { // Skip any ID3 headers. int startPosition = peekId3Header(input); @@ -128,16 +148,16 @@ public boolean sniff(ExtractorInput input) throws IOException, InterruptedExcept int totalValidFramesSize = 0; int validFramesCount = 0; while (true) { - input.peekFully(scratch.data, 0, 2); + input.peekFully(scratch.getData(), 0, 2); scratch.setPosition(0); int syncBytes = scratch.readUnsignedShort(); if (!AdtsReader.isAdtsSyncWord(syncBytes)) { + // We didn't find an ADTS sync word. Start searching again from one byte further into the + // start of the stream. validFramesCount = 0; totalValidFramesSize = 0; + headerPosition++; input.resetPeekPosition(); - if (++headerPosition - startPosition >= MAX_SNIFF_BYTES) { - return false; - } input.advancePeekPosition(headerPosition); } else { if (++validFramesCount >= 4 && totalValidFramesSize > TsExtractor.TS_PACKET_SIZE) { @@ -145,15 +165,24 @@ public boolean sniff(ExtractorInput input) throws IOException, InterruptedExcept } // Skip the frame. - input.peekFully(scratch.data, 0, 4); + input.peekFully(scratch.getData(), 0, 4); scratchBits.setPosition(14); int frameSize = scratchBits.readBits(13); - // Either the stream is malformed OR we're not parsing an ADTS stream. if (frameSize <= 6) { - return false; + // The size is too small, so we're probably not reading an ADTS frame. Start searching + // again from one byte further into the start of the stream. + validFramesCount = 0; + totalValidFramesSize = 0; + headerPosition++; + input.resetPeekPosition(); + input.advancePeekPosition(headerPosition); + } else { + input.advancePeekPosition(frameSize - 6); + totalValidFramesSize += frameSize; } - input.advancePeekPosition(frameSize - 6); - totalValidFramesSize += frameSize; + } + if (headerPosition - startPosition >= MAX_SNIFF_BYTES) { + return false; } } } @@ -178,18 +207,21 @@ public void release() { } @Override - public int read(ExtractorInput input, PositionHolder seekPosition) - throws IOException, InterruptedException { + public int read(ExtractorInput input, PositionHolder seekPosition) throws IOException { + Assertions.checkStateNotNull(extractorOutput); // Asserts that init has been called. + long inputLength = input.getLength(); boolean canUseConstantBitrateSeeking = - (flags & FLAG_ENABLE_CONSTANT_BITRATE_SEEKING) != 0 && inputLength != C.LENGTH_UNSET; + (flags & FLAG_ENABLE_CONSTANT_BITRATE_SEEKING_ALWAYS) != 0 + || ((flags & FLAG_ENABLE_CONSTANT_BITRATE_SEEKING) != 0 + && inputLength != C.LENGTH_UNSET); if (canUseConstantBitrateSeeking) { calculateAverageFrameSize(input); } - int bytesRead = input.read(packetBuffer.data, 0, MAX_PACKET_SIZE); + int bytesRead = input.read(packetBuffer.getData(), 0, MAX_PACKET_SIZE); boolean readEndOfStream = bytesRead == RESULT_END_OF_INPUT; - maybeOutputSeekMap(inputLength, canUseConstantBitrateSeeking, readEndOfStream); + maybeOutputSeekMap(inputLength, readEndOfStream); if (readEndOfStream) { return RESULT_END_OF_INPUT; } @@ -209,10 +241,10 @@ public int read(ExtractorInput input, PositionHolder seekPosition) return RESULT_CONTINUE; } - private int peekId3Header(ExtractorInput input) throws IOException, InterruptedException { + private int peekId3Header(ExtractorInput input) throws IOException { int firstFramePosition = 0; while (true) { - input.peekFully(scratch.data, /* offset= */ 0, ID3_HEADER_LENGTH); + input.peekFully(scratch.getData(), /* offset= */ 0, ID3_HEADER_LENGTH); scratch.setPosition(0); if (scratch.readUnsignedInt24() != ID3_TAG) { break; @@ -230,12 +262,14 @@ private int peekId3Header(ExtractorInput input) throws IOException, InterruptedE return firstFramePosition; } - private void maybeOutputSeekMap( - long inputLength, boolean canUseConstantBitrateSeeking, boolean readEndOfStream) { + @RequiresNonNull("extractorOutput") + private void maybeOutputSeekMap(long inputLength, boolean readEndOfStream) { if (hasOutputSeekMap) { return; } - boolean useConstantBitrateSeeking = canUseConstantBitrateSeeking && averageFrameSize > 0; + + boolean useConstantBitrateSeeking = + (flags & FLAG_ENABLE_CONSTANT_BITRATE_SEEKING) != 0 && averageFrameSize > 0; if (useConstantBitrateSeeking && reader.getSampleDurationUs() == C.TIME_UNSET && !readEndOfStream) { @@ -244,17 +278,17 @@ private void maybeOutputSeekMap( return; } - ExtractorOutput extractorOutput = Assertions.checkNotNull(this.extractorOutput); if (useConstantBitrateSeeking && reader.getSampleDurationUs() != C.TIME_UNSET) { - extractorOutput.seekMap(getConstantBitrateSeekMap(inputLength)); + extractorOutput.seekMap( + getConstantBitrateSeekMap( + inputLength, (flags & FLAG_ENABLE_CONSTANT_BITRATE_SEEKING_ALWAYS) != 0)); } else { extractorOutput.seekMap(new SeekMap.Unseekable(C.TIME_UNSET)); } hasOutputSeekMap = true; } - private void calculateAverageFrameSize(ExtractorInput input) - throws IOException, InterruptedException { + private void calculateAverageFrameSize(ExtractorInput input) throws IOException { if (hasCalculatedAverageFrameSize) { return; } @@ -269,7 +303,7 @@ private void calculateAverageFrameSize(ExtractorInput input) long totalValidFramesSize = 0; try { while (input.peekFully( - scratch.data, /* offset= */ 0, /* length= */ 2, /* allowEndOfInput= */ true)) { + scratch.getData(), /* offset= */ 0, /* length= */ 2, /* allowEndOfInput= */ true)) { scratch.setPosition(0); int syncBytes = scratch.readUnsignedShort(); if (!AdtsReader.isAdtsSyncWord(syncBytes)) { @@ -280,7 +314,7 @@ private void calculateAverageFrameSize(ExtractorInput input) } else { // Read the frame size. if (!input.peekFully( - scratch.data, /* offset= */ 0, /* length= */ 4, /* allowEndOfInput= */ true)) { + scratch.getData(), /* offset= */ 0, /* length= */ 4, /* allowEndOfInput= */ true)) { break; } scratchBits.setPosition(14); @@ -288,7 +322,8 @@ private void calculateAverageFrameSize(ExtractorInput input) // Either the stream is malformed OR we're not parsing an ADTS stream. if (currentFrameSize <= 6) { hasCalculatedAverageFrameSize = true; - throw new ParserException("Malformed ADTS stream"); + throw ParserException.createForMalformedContainer( + "Malformed ADTS stream", /* cause= */ null); } totalValidFramesSize += currentFrameSize; if (++numValidFrames == NUM_FRAMES_FOR_AVERAGE_FRAME_SIZE) { @@ -314,9 +349,10 @@ private void calculateAverageFrameSize(ExtractorInput input) hasCalculatedAverageFrameSize = true; } - private SeekMap getConstantBitrateSeekMap(long inputLength) { + private SeekMap getConstantBitrateSeekMap(long inputLength, boolean allowSeeksIfLengthUnknown) { int bitrate = getBitrateFromFrameSize(averageFrameSize, reader.getSampleDurationUs()); - return new ConstantBitrateSeekMap(inputLength, firstFramePosition, bitrate, averageFrameSize); + return new ConstantBitrateSeekMap( + inputLength, firstFramePosition, bitrate, averageFrameSize, allowSeeksIfLengthUnknown); } /** @@ -327,6 +363,7 @@ private SeekMap getConstantBitrateSeekMap(long inputLength) { * @return The stream bitrate. */ private static int getBitrateFromFrameSize(int frameSize, long durationUsPerFrame) { - return (int) ((frameSize * C.BITS_PER_BYTE * C.MICROS_PER_SECOND) / durationUsPerFrame); + return (int) + ((frameSize * ((long) C.BITS_PER_BYTE) * C.MICROS_PER_SECOND) / durationUsPerFrame); } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/AdtsReader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/AdtsReader.java index bde575f39a..f0796e9ebc 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/AdtsReader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/AdtsReader.java @@ -15,25 +15,30 @@ */ package com.google.android.exoplayer2.extractor.ts; -import android.util.Pair; +import static java.lang.Math.min; + +import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.ParserException; +import com.google.android.exoplayer2.audio.AacUtil; import com.google.android.exoplayer2.extractor.DummyTrackOutput; import com.google.android.exoplayer2.extractor.ExtractorOutput; import com.google.android.exoplayer2.extractor.TrackOutput; import com.google.android.exoplayer2.extractor.ts.TsPayloadReader.TrackIdGenerator; -import com.google.android.exoplayer2.util.CodecSpecificDataUtil; +import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.ParsableBitArray; import com.google.android.exoplayer2.util.ParsableByteArray; +import com.google.android.exoplayer2.util.Util; import java.util.Arrays; import java.util.Collections; +import org.checkerframework.checker.nullness.qual.EnsuresNonNull; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; +import org.checkerframework.checker.nullness.qual.RequiresNonNull; -/** - * Parses a continuous ADTS byte stream and extracts individual frames. - */ +/** Parses a continuous ADTS byte stream and extracts individual frames. */ public final class AdtsReader implements ElementaryStreamReader { private static final String TAG = "AdtsReader"; @@ -62,11 +67,11 @@ public final class AdtsReader implements ElementaryStreamReader { private final boolean exposeId3; private final ParsableBitArray adtsScratch; private final ParsableByteArray id3HeaderBuffer; - private final String language; + @Nullable private final String language; - private String formatId; - private TrackOutput output; - private TrackOutput id3Output; + private @MonotonicNonNull String formatId; + private @MonotonicNonNull TrackOutput output; + private @MonotonicNonNull TrackOutput id3Output; private int state; private int bytesRead; @@ -90,7 +95,7 @@ public final class AdtsReader implements ElementaryStreamReader { // Used when reading the samples. private long timeUs; - private TrackOutput currentOutput; + private @MonotonicNonNull TrackOutput currentOutput; private long currentSampleDuration; /** @@ -104,13 +109,14 @@ public AdtsReader(boolean exposeId3) { * @param exposeId3 True if the reader should expose ID3 information. * @param language Track language. */ - public AdtsReader(boolean exposeId3, String language) { + public AdtsReader(boolean exposeId3, @Nullable String language) { adtsScratch = new ParsableBitArray(new byte[HEADER_SIZE + CRC_SIZE]); id3HeaderBuffer = new ParsableByteArray(Arrays.copyOf(ID3_IDENTIFIER, ID3_HEADER_SIZE)); setFindingSampleState(); firstFrameVersion = VERSION_UNSET; firstFrameSampleRateIndex = C.INDEX_UNSET; sampleDurationUs = C.TIME_UNSET; + timeUs = C.TIME_UNSET; this.exposeId3 = exposeId3; this.language = language; } @@ -122,6 +128,7 @@ public static boolean isAdtsSyncWord(int candidateSyncWord) { @Override public void seek() { + timeUs = C.TIME_UNSET; resetSync(); } @@ -130,11 +137,15 @@ public void createTracks(ExtractorOutput extractorOutput, TrackIdGenerator idGen idGenerator.generateNewId(); formatId = idGenerator.getFormatId(); output = extractorOutput.track(idGenerator.getTrackId(), C.TRACK_TYPE_AUDIO); + currentOutput = output; if (exposeId3) { idGenerator.generateNewId(); id3Output = extractorOutput.track(idGenerator.getTrackId(), C.TRACK_TYPE_METADATA); - id3Output.format(Format.createSampleFormat(idGenerator.getFormatId(), - MimeTypes.APPLICATION_ID3, null, Format.NO_VALUE, null)); + id3Output.format( + new Format.Builder() + .setId(idGenerator.getFormatId()) + .setSampleMimeType(MimeTypes.APPLICATION_ID3) + .build()); } else { id3Output = new DummyTrackOutput(); } @@ -142,18 +153,21 @@ public void createTracks(ExtractorOutput extractorOutput, TrackIdGenerator idGen @Override public void packetStarted(long pesTimeUs, @TsPayloadReader.Flags int flags) { - timeUs = pesTimeUs; + if (pesTimeUs != C.TIME_UNSET) { + timeUs = pesTimeUs; + } } @Override public void consume(ParsableByteArray data) throws ParserException { + assertTracksCreated(); while (data.bytesLeft() > 0) { switch (state) { case STATE_FINDING_SAMPLE: findNextSample(data); break; case STATE_READING_ID3_HEADER: - if (continueRead(data, id3HeaderBuffer.data, ID3_HEADER_SIZE)) { + if (continueRead(data, id3HeaderBuffer.getData(), ID3_HEADER_SIZE)) { parseId3Header(); } break; @@ -203,15 +217,13 @@ private void resetSync() { * @return Whether the target length was reached. */ private boolean continueRead(ParsableByteArray source, byte[] target, int targetLength) { - int bytesToRead = Math.min(source.bytesLeft(), targetLength - bytesRead); + int bytesToRead = min(source.bytesLeft(), targetLength - bytesRead); source.readBytes(target, bytesRead, bytesToRead); bytesRead += bytesToRead; return bytesRead == targetLength; } - /** - * Sets the state to STATE_FINDING_SAMPLE. - */ + /** Sets the state to STATE_FINDING_SAMPLE. */ private void setFindingSampleState() { state = STATE_FINDING_SAMPLE; bytesRead = 0; @@ -219,8 +231,8 @@ private void setFindingSampleState() { } /** - * Sets the state to STATE_READING_ID3_HEADER and resets the fields required for - * {@link #parseId3Header()}. + * Sets the state to STATE_READING_ID3_HEADER and resets the fields required for {@link + * #parseId3Header()}. */ private void setReadingId3HeaderState() { state = STATE_READING_ID3_HEADER; @@ -237,8 +249,8 @@ private void setReadingId3HeaderState() { * @param priorReadBytes Size of prior read bytes * @param sampleSize Size of the sample */ - private void setReadingSampleState(TrackOutput outputToUse, long currentSampleDuration, - int priorReadBytes, int sampleSize) { + private void setReadingSampleState( + TrackOutput outputToUse, long currentSampleDuration, int priorReadBytes, int sampleSize) { state = STATE_READING_SAMPLE; bytesRead = priorReadBytes; this.currentOutput = outputToUse; @@ -246,9 +258,7 @@ private void setReadingSampleState(TrackOutput outputToUse, long currentSampleDu this.sampleSize = sampleSize; } - /** - * Sets the state to STATE_READING_ADTS_HEADER. - */ + /** Sets the state to STATE_READING_ADTS_HEADER. */ private void setReadingAdtsHeaderState() { state = STATE_READING_ADTS_HEADER; bytesRead = 0; @@ -267,7 +277,7 @@ private void setCheckingAdtsHeaderState() { * @param pesBuffer The buffer whose position should be advanced. */ private void findNextSample(ParsableByteArray pesBuffer) { - byte[] adtsData = pesBuffer.data; + byte[] adtsData = pesBuffer.getData(); int position = pesBuffer.getPosition(); int endOffset = pesBuffer.limit(); while (position < endOffset) { @@ -325,7 +335,7 @@ private void checkAdtsHeader(ParsableByteArray buffer) { return; } // Peek the next byte of buffer into scratch array. - adtsScratch.data[0] = buffer.data[buffer.getPosition()]; + adtsScratch.data[0] = buffer.getData()[buffer.getPosition()]; adtsScratch.setPosition(2); int currentFrameSampleRateIndex = adtsScratch.readBits(4); @@ -406,7 +416,7 @@ private boolean checkSyncPositionValid(ParsableByteArray pesBuffer, int syncPosi // The bytes following the frame must be either another SYNC word with the same MPEG version, or // the start of an ID3 header. - byte[] data = pesBuffer.data; + byte[] data = pesBuffer.getData(); int dataLimit = pesBuffer.limit(); int nextSyncPosition = syncPositionCandidate + frameSize; if (nextSyncPosition >= dataLimit) { @@ -453,19 +463,17 @@ private boolean tryRead(ParsableByteArray source, byte[] target, int targetLengt return true; } - /** - * Parses the Id3 header. - */ + /** Parses the Id3 header. */ + @RequiresNonNull("id3Output") private void parseId3Header() { id3Output.sampleData(id3HeaderBuffer, ID3_HEADER_SIZE); id3HeaderBuffer.setPosition(ID3_SIZE_OFFSET); - setReadingSampleState(id3Output, 0, ID3_HEADER_SIZE, - id3HeaderBuffer.readSynchSafeInt() + ID3_HEADER_SIZE); + setReadingSampleState( + id3Output, 0, ID3_HEADER_SIZE, id3HeaderBuffer.readSynchSafeInt() + ID3_HEADER_SIZE); } - /** - * Parses the sample header. - */ + /** Parses the sample header. */ + @RequiresNonNull("output") private void parseAdtsHeader() throws ParserException { adtsScratch.setPosition(0); @@ -489,14 +497,19 @@ private void parseAdtsHeader() throws ParserException { int channelConfig = adtsScratch.readBits(3); byte[] audioSpecificConfig = - CodecSpecificDataUtil.buildAacAudioSpecificConfig( + AacUtil.buildAudioSpecificConfig( audioObjectType, firstFrameSampleRateIndex, channelConfig); - Pair audioParams = CodecSpecificDataUtil.parseAacAudioSpecificConfig( - audioSpecificConfig); - - Format format = Format.createAudioSampleFormat(formatId, MimeTypes.AUDIO_AAC, null, - Format.NO_VALUE, Format.NO_VALUE, audioParams.second, audioParams.first, - Collections.singletonList(audioSpecificConfig), null, 0, language); + AacUtil.Config aacConfig = AacUtil.parseAudioSpecificConfig(audioSpecificConfig); + Format format = + new Format.Builder() + .setId(formatId) + .setSampleMimeType(MimeTypes.AUDIO_AAC) + .setCodecs(aacConfig.codecs) + .setChannelCount(aacConfig.channelCount) + .setSampleRate(aacConfig.sampleRateHz) + .setInitializationData(Collections.singletonList(audioSpecificConfig)) + .setLanguage(language) + .build(); // In this class a sample is an access unit, but the MediaFormat sample rate specifies the // number of PCM audio samples per second. sampleDurationUs = (C.MICROS_PER_SECOND * 1024) / format.sampleRate; @@ -515,18 +528,25 @@ private void parseAdtsHeader() throws ParserException { setReadingSampleState(output, sampleDurationUs, 0, sampleSize); } - /** - * Reads the rest of the sample - */ + /** Reads the rest of the sample */ + @RequiresNonNull("currentOutput") private void readSample(ParsableByteArray data) { - int bytesToRead = Math.min(data.bytesLeft(), sampleSize - bytesRead); + int bytesToRead = min(data.bytesLeft(), sampleSize - bytesRead); currentOutput.sampleData(data, bytesToRead); bytesRead += bytesToRead; if (bytesRead == sampleSize) { - currentOutput.sampleMetadata(timeUs, C.BUFFER_FLAG_KEY_FRAME, sampleSize, 0, null); - timeUs += currentSampleDuration; + if (timeUs != C.TIME_UNSET) { + currentOutput.sampleMetadata(timeUs, C.BUFFER_FLAG_KEY_FRAME, sampleSize, 0, null); + timeUs += currentSampleDuration; + } setFindingSampleState(); } } + @EnsuresNonNull({"output", "currentOutput", "id3Output"}) + private void assertTracksCreated() { + Assertions.checkNotNull(output); + Util.castNonNull(currentOutput); + Util.castNonNull(id3Output); + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/DefaultTsPayloadReaderFactory.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/DefaultTsPayloadReaderFactory.java index 24d17f4956..c59e210547 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/DefaultTsPayloadReaderFactory.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/DefaultTsPayloadReaderFactory.java @@ -15,23 +15,25 @@ */ package com.google.android.exoplayer2.extractor.ts; +import static java.lang.annotation.ElementType.TYPE_USE; + import android.util.SparseArray; import androidx.annotation.IntDef; +import androidx.annotation.Nullable; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.extractor.ts.TsPayloadReader.EsInfo; -import com.google.android.exoplayer2.text.cea.Cea708InitializationData; +import com.google.android.exoplayer2.util.CodecSpecificDataUtil; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.ParsableByteArray; +import com.google.common.collect.ImmutableList; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; import java.util.ArrayList; -import java.util.Collections; import java.util.List; -/** - * Default {@link TsPayloadReader.Factory} implementation. - */ +/** Default {@link TsPayloadReader.Factory} implementation. */ public final class DefaultTsPayloadReaderFactory implements TsPayloadReader.Factory { /** @@ -43,6 +45,7 @@ public final class DefaultTsPayloadReaderFactory implements TsPayloadReader.Fact */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef( flag = true, value = { @@ -79,7 +82,10 @@ public final class DefaultTsPayloadReaderFactory implements TsPayloadReader.Fact * delimiters (AUDs). */ public static final int FLAG_DETECT_ACCESS_UNITS = 1 << 3; - /** Prevents the creation of {@link SpliceInfoSectionReader} instances. */ + /** + * Prevents the creation of {@link SectionPayloadReader}s for splice information sections + * (SCTE-35). + */ public static final int FLAG_IGNORE_SPLICE_INFO_STREAM = 1 << 4; /** * Whether the list of {@code closedCaptionFormats} passed to {@link @@ -96,7 +102,7 @@ public final class DefaultTsPayloadReaderFactory implements TsPayloadReader.Fact private static final int DESCRIPTOR_TAG_CAPTION_SERVICE = 0x86; - @Flags private final int flags; + private final @Flags int flags; private final List closedCaptionFormats; public DefaultTsPayloadReaderFactory() { @@ -108,21 +114,18 @@ public DefaultTsPayloadReaderFactory() { * readers. */ public DefaultTsPayloadReaderFactory(@Flags int flags) { - this( - flags, - Collections.singletonList( - Format.createTextSampleFormat(null, MimeTypes.APPLICATION_CEA608, 0, null))); + this(flags, ImmutableList.of()); } /** * @param flags A combination of {@code FLAG_*} values that control the behavior of the created * readers. * @param closedCaptionFormats {@link Format}s to be exposed by payload readers for streams with - * embedded closed captions when no caption service descriptors are provided. If - * {@link #FLAG_OVERRIDE_CAPTION_DESCRIPTORS} is set, {@code closedCaptionFormats} overrides - * any descriptor information. If not set, and {@code closedCaptionFormats} is empty, a - * closed caption track with {@link Format#accessibilityChannel} {@link Format#NO_VALUE} will - * be exposed. + * embedded closed captions when no caption service descriptors are provided. If {@link + * #FLAG_OVERRIDE_CAPTION_DESCRIPTORS} is set, {@code closedCaptionFormats} overrides any + * descriptor information. If not set, and {@code closedCaptionFormats} is empty, a closed + * caption track with {@link Format#accessibilityChannel} {@link Format#NO_VALUE} will be + * exposed. */ public DefaultTsPayloadReaderFactory(@Flags int flags, List closedCaptionFormats) { this.flags = flags; @@ -135,6 +138,7 @@ public SparseArray createInitialPayloadReaders() { } @Override + @Nullable public TsPayloadReader createPayloadReader(int streamType, EsInfo esInfo) { switch (streamType) { case TsExtractor.TS_STREAM_TYPE_MPA: @@ -142,10 +146,12 @@ public TsPayloadReader createPayloadReader(int streamType, EsInfo esInfo) { return new PesReader(new MpegAudioReader(esInfo.language)); case TsExtractor.TS_STREAM_TYPE_AAC_ADTS: return isSet(FLAG_IGNORE_AAC_STREAM) - ? null : new PesReader(new AdtsReader(false, esInfo.language)); + ? null + : new PesReader(new AdtsReader(false, esInfo.language)); case TsExtractor.TS_STREAM_TYPE_AAC_LATM: return isSet(FLAG_IGNORE_AAC_STREAM) - ? null : new PesReader(new LatmReader(esInfo.language)); + ? null + : new PesReader(new LatmReader(esInfo.language)); case TsExtractor.TS_STREAM_TYPE_AC3: case TsExtractor.TS_STREAM_TYPE_E_AC3: return new PesReader(new Ac3Reader(esInfo.language)); @@ -159,31 +165,40 @@ public TsPayloadReader createPayloadReader(int streamType, EsInfo esInfo) { case TsExtractor.TS_STREAM_TYPE_DTS: return new PesReader(new DtsReader(esInfo.language)); case TsExtractor.TS_STREAM_TYPE_H262: + case TsExtractor.TS_STREAM_TYPE_DC2_H262: return new PesReader(new H262Reader(buildUserDataReader(esInfo))); + case TsExtractor.TS_STREAM_TYPE_H263: + return new PesReader(new H263Reader(buildUserDataReader(esInfo))); case TsExtractor.TS_STREAM_TYPE_H264: - return isSet(FLAG_IGNORE_H264_STREAM) ? null - : new PesReader(new H264Reader(buildSeiReader(esInfo), - isSet(FLAG_ALLOW_NON_IDR_KEYFRAMES), isSet(FLAG_DETECT_ACCESS_UNITS))); + return isSet(FLAG_IGNORE_H264_STREAM) + ? null + : new PesReader( + new H264Reader( + buildSeiReader(esInfo), + isSet(FLAG_ALLOW_NON_IDR_KEYFRAMES), + isSet(FLAG_DETECT_ACCESS_UNITS))); case TsExtractor.TS_STREAM_TYPE_H265: return new PesReader(new H265Reader(buildSeiReader(esInfo))); case TsExtractor.TS_STREAM_TYPE_SPLICE_INFO: return isSet(FLAG_IGNORE_SPLICE_INFO_STREAM) - ? null : new SectionReader(new SpliceInfoSectionReader()); + ? null + : new SectionReader(new PassthroughSectionPayloadReader(MimeTypes.APPLICATION_SCTE35)); case TsExtractor.TS_STREAM_TYPE_ID3: return new PesReader(new Id3Reader()); case TsExtractor.TS_STREAM_TYPE_DVBSUBS: - return new PesReader( - new DvbSubtitleReader(esInfo.dvbSubtitleInfos)); + return new PesReader(new DvbSubtitleReader(esInfo.dvbSubtitleInfos)); + case TsExtractor.TS_STREAM_TYPE_AIT: + return new SectionReader(new PassthroughSectionPayloadReader(MimeTypes.APPLICATION_AIT)); default: return null; } } /** - * If {@link #FLAG_OVERRIDE_CAPTION_DESCRIPTORS} is set, returns a {@link SeiReader} for - * {@link #closedCaptionFormats}. If unset, parses the PMT descriptor information and returns a - * {@link SeiReader} for the declared formats, or {@link #closedCaptionFormats} if the descriptor - * is not present. + * If {@link #FLAG_OVERRIDE_CAPTION_DESCRIPTORS} is set, returns a {@link SeiReader} for {@link + * #closedCaptionFormats}. If unset, parses the PMT descriptor information and returns a {@link + * SeiReader} for the declared formats, or {@link #closedCaptionFormats} if the descriptor is not + * present. * * @param esInfo The {@link EsInfo} passed to {@link #createPayloadReader(int, EsInfo)}. * @return A {@link SeiReader} for closed caption tracks. @@ -247,25 +262,21 @@ private List getClosedCaptionFormats(EsInfo esInfo) { // Skip reserved (8). scratchDescriptorData.skipBytes(1); - List initializationData = null; + @Nullable List initializationData = null; // The wide_aspect_ratio flag only has meaning for CEA-708. if (isDigital) { boolean isWideAspectRatio = (flags & 0x40) != 0; - initializationData = Cea708InitializationData.buildData(isWideAspectRatio); + initializationData = + CodecSpecificDataUtil.buildCea708InitializationData(isWideAspectRatio); } closedCaptionFormats.add( - Format.createTextSampleFormat( - /* id= */ null, - mimeType, - /* codecs= */ null, - /* bitrate= */ Format.NO_VALUE, - /* selectionFlags= */ 0, - language, - accessibilityChannel, - /* drmInitData= */ null, - Format.OFFSET_SAMPLE_RELATIVE, - initializationData)); + new Format.Builder() + .setSampleMimeType(mimeType) + .setLanguage(language) + .setAccessibilityChannel(accessibilityChannel) + .setInitializationData(initializationData) + .build()); } } else { // Unknown descriptor. Ignore. diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/DtsReader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/DtsReader.java index 1f9b0e79d4..5fb5eb9a9b 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/DtsReader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/DtsReader.java @@ -15,17 +15,21 @@ */ package com.google.android.exoplayer2.extractor.ts; +import static java.lang.Math.min; + +import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.audio.DtsUtil; import com.google.android.exoplayer2.extractor.ExtractorOutput; import com.google.android.exoplayer2.extractor.TrackOutput; import com.google.android.exoplayer2.extractor.ts.TsPayloadReader.TrackIdGenerator; +import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.ParsableByteArray; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; +import org.checkerframework.checker.nullness.qual.RequiresNonNull; -/** - * Parses a continuous DTS byte stream and extracts individual samples. - */ +/** Parses a continuous DTS byte stream and extracts individual samples. */ public final class DtsReader implements ElementaryStreamReader { private static final int STATE_FINDING_SYNC = 0; @@ -35,10 +39,10 @@ public final class DtsReader implements ElementaryStreamReader { private static final int HEADER_SIZE = 18; private final ParsableByteArray headerScratchBytes; - private final String language; + @Nullable private final String language; - private String formatId; - private TrackOutput output; + private @MonotonicNonNull String formatId; + private @MonotonicNonNull TrackOutput output; private int state; private int bytesRead; @@ -48,7 +52,7 @@ public final class DtsReader implements ElementaryStreamReader { // Used when parsing the header. private long sampleDurationUs; - private Format format; + private @MonotonicNonNull Format format; private int sampleSize; // Used when reading the samples. @@ -59,9 +63,10 @@ public final class DtsReader implements ElementaryStreamReader { * * @param language Track language. */ - public DtsReader(String language) { + public DtsReader(@Nullable String language) { headerScratchBytes = new ParsableByteArray(new byte[HEADER_SIZE]); state = STATE_FINDING_SYNC; + timeUs = C.TIME_UNSET; this.language = language; } @@ -70,6 +75,7 @@ public void seek() { state = STATE_FINDING_SYNC; bytesRead = 0; syncBytes = 0; + timeUs = C.TIME_UNSET; } @Override @@ -81,11 +87,14 @@ public void createTracks(ExtractorOutput extractorOutput, TrackIdGenerator idGen @Override public void packetStarted(long pesTimeUs, @TsPayloadReader.Flags int flags) { - timeUs = pesTimeUs; + if (pesTimeUs != C.TIME_UNSET) { + timeUs = pesTimeUs; + } } @Override public void consume(ParsableByteArray data) { + Assertions.checkStateNotNull(output); // Asserts that createTracks has been called. while (data.bytesLeft() > 0) { switch (state) { case STATE_FINDING_SYNC: @@ -94,7 +103,7 @@ public void consume(ParsableByteArray data) { } break; case STATE_READING_HEADER: - if (continueRead(data, headerScratchBytes.data, HEADER_SIZE)) { + if (continueRead(data, headerScratchBytes.getData(), HEADER_SIZE)) { parseHeader(); headerScratchBytes.setPosition(0); output.sampleData(headerScratchBytes, HEADER_SIZE); @@ -102,12 +111,14 @@ public void consume(ParsableByteArray data) { } break; case STATE_READING_SAMPLE: - int bytesToRead = Math.min(data.bytesLeft(), sampleSize - bytesRead); + int bytesToRead = min(data.bytesLeft(), sampleSize - bytesRead); output.sampleData(data, bytesToRead); bytesRead += bytesToRead; if (bytesRead == sampleSize) { - output.sampleMetadata(timeUs, C.BUFFER_FLAG_KEY_FRAME, sampleSize, 0, null); - timeUs += sampleDurationUs; + if (timeUs != C.TIME_UNSET) { + output.sampleMetadata(timeUs, C.BUFFER_FLAG_KEY_FRAME, sampleSize, 0, null); + timeUs += sampleDurationUs; + } state = STATE_FINDING_SYNC; } break; @@ -132,7 +143,7 @@ public void packetFinished() { * @return Whether the target length was reached. */ private boolean continueRead(ParsableByteArray source, byte[] target, int targetLength) { - int bytesToRead = Math.min(source.bytesLeft(), targetLength - bytesRead); + int bytesToRead = min(source.bytesLeft(), targetLength - bytesRead); source.readBytes(target, bytesRead, bytesToRead); bytesRead += bytesToRead; return bytesRead == targetLength; @@ -150,10 +161,11 @@ private boolean skipToNextSync(ParsableByteArray pesBuffer) { syncBytes <<= 8; syncBytes |= pesBuffer.readUnsignedByte(); if (DtsUtil.isSyncWord(syncBytes)) { - headerScratchBytes.data[0] = (byte) ((syncBytes >> 24) & 0xFF); - headerScratchBytes.data[1] = (byte) ((syncBytes >> 16) & 0xFF); - headerScratchBytes.data[2] = (byte) ((syncBytes >> 8) & 0xFF); - headerScratchBytes.data[3] = (byte) (syncBytes & 0xFF); + byte[] headerData = headerScratchBytes.getData(); + headerData[0] = (byte) ((syncBytes >> 24) & 0xFF); + headerData[1] = (byte) ((syncBytes >> 16) & 0xFF); + headerData[2] = (byte) ((syncBytes >> 8) & 0xFF); + headerData[3] = (byte) (syncBytes & 0xFF); bytesRead = 4; syncBytes = 0; return true; @@ -162,11 +174,10 @@ private boolean skipToNextSync(ParsableByteArray pesBuffer) { return false; } - /** - * Parses the sample header. - */ + /** Parses the sample header. */ + @RequiresNonNull("output") private void parseHeader() { - byte[] frameData = headerScratchBytes.data; + byte[] frameData = headerScratchBytes.getData(); if (format == null) { format = DtsUtil.parseDtsFormat(frameData, formatId, language, null); output.format(format); @@ -174,8 +185,8 @@ private void parseHeader() { sampleSize = DtsUtil.getDtsFrameSize(frameData); // In this class a sample is an access unit (frame in DTS), but the format's sample rate // specifies the number of PCM audio samples per second. - sampleDurationUs = (int) (C.MICROS_PER_SECOND - * DtsUtil.parseDtsAudioSampleCount(frameData) / format.sampleRate); + sampleDurationUs = + (int) + (C.MICROS_PER_SECOND * DtsUtil.parseDtsAudioSampleCount(frameData) / format.sampleRate); } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/DvbSubtitleReader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/DvbSubtitleReader.java index 3f0a772b1c..a5413606da 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/DvbSubtitleReader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/DvbSubtitleReader.java @@ -28,9 +28,7 @@ import java.util.Collections; import java.util.List; -/** - * Parses DVB subtitle data and extracts individual frames. - */ +/** Parses DVB subtitle data and extracts individual frames. */ public final class DvbSubtitleReader implements ElementaryStreamReader { private final List subtitleInfos; @@ -47,11 +45,13 @@ public final class DvbSubtitleReader implements ElementaryStreamReader { public DvbSubtitleReader(List subtitleInfos) { this.subtitleInfos = subtitleInfos; outputs = new TrackOutput[subtitleInfos.size()]; + sampleTimeUs = C.TIME_UNSET; } @Override public void seek() { writingSample = false; + sampleTimeUs = C.TIME_UNSET; } @Override @@ -61,15 +61,12 @@ public void createTracks(ExtractorOutput extractorOutput, TrackIdGenerator idGen idGenerator.generateNewId(); TrackOutput output = extractorOutput.track(idGenerator.getTrackId(), C.TRACK_TYPE_TEXT); output.format( - Format.createImageSampleFormat( - idGenerator.getFormatId(), - MimeTypes.APPLICATION_DVBSUBS, - null, - Format.NO_VALUE, - 0, - Collections.singletonList(subtitleInfo.initializationData), - subtitleInfo.language, - null)); + new Format.Builder() + .setId(idGenerator.getFormatId()) + .setSampleMimeType(MimeTypes.APPLICATION_DVBSUBS) + .setInitializationData(Collections.singletonList(subtitleInfo.initializationData)) + .setLanguage(subtitleInfo.language) + .build()); outputs[i] = output; } } @@ -80,7 +77,9 @@ public void packetStarted(long pesTimeUs, @TsPayloadReader.Flags int flags) { return; } writingSample = true; - sampleTimeUs = pesTimeUs; + if (pesTimeUs != C.TIME_UNSET) { + sampleTimeUs = pesTimeUs; + } sampleBytesWritten = 0; bytesToCheck = 2; } @@ -88,8 +87,10 @@ public void packetStarted(long pesTimeUs, @TsPayloadReader.Flags int flags) { @Override public void packetFinished() { if (writingSample) { - for (TrackOutput output : outputs) { - output.sampleMetadata(sampleTimeUs, C.BUFFER_FLAG_KEY_FRAME, sampleBytesWritten, 0, null); + if (sampleTimeUs != C.TIME_UNSET) { + for (TrackOutput output : outputs) { + output.sampleMetadata(sampleTimeUs, C.BUFFER_FLAG_KEY_FRAME, sampleBytesWritten, 0, null); + } } writingSample = false; } @@ -126,5 +127,4 @@ private boolean checkNextByte(ParsableByteArray data, int expectedValue) { bytesToCheck--; return writingSample; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/ElementaryStreamReader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/ElementaryStreamReader.java index e022fc237b..48227d6670 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/ElementaryStreamReader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/ElementaryStreamReader.java @@ -20,14 +20,10 @@ import com.google.android.exoplayer2.extractor.TrackOutput; import com.google.android.exoplayer2.util.ParsableByteArray; -/** - * Extracts individual samples from an elementary media stream, preserving original order. - */ +/** Extracts individual samples from an elementary media stream, preserving original order. */ public interface ElementaryStreamReader { - /** - * Notifies the reader that a seek has occurred. - */ + /** Notifies the reader that a seek has occurred. */ void seek(); /** @@ -55,9 +51,6 @@ public interface ElementaryStreamReader { */ void consume(ParsableByteArray data) throws ParserException; - /** - * Called when a packet ends. - */ + /** Called when a packet ends. */ void packetFinished(); - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/H262Reader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/H262Reader.java index e7f2c1935b..99b9ec722a 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/H262Reader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/H262Reader.java @@ -15,7 +15,11 @@ */ package com.google.android.exoplayer2.extractor.ts; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; + import android.util.Pair; +import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.extractor.ExtractorOutput; @@ -24,12 +28,12 @@ import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.NalUnitUtil; import com.google.android.exoplayer2.util.ParsableByteArray; +import com.google.android.exoplayer2.util.Util; import java.util.Arrays; import java.util.Collections; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; -/** - * Parses a continuous H262 byte stream and extracts individual frames. - */ +/** Parses a continuous H262 byte stream and extracts individual frames. */ public final class H262Reader implements ElementaryStreamReader { private static final int START_PICTURE = 0x00; @@ -38,27 +42,27 @@ public final class H262Reader implements ElementaryStreamReader { private static final int START_GROUP = 0xB8; private static final int START_USER_DATA = 0xB2; - private String formatId; - private TrackOutput output; + private @MonotonicNonNull String formatId; + private @MonotonicNonNull TrackOutput output; // Maps (frame_rate_code - 1) indices to values, as defined in ITU-T H.262 Table 6-4. - private static final double[] FRAME_RATE_VALUES = new double[] { - 24000d / 1001, 24, 25, 30000d / 1001, 30, 50, 60000d / 1001, 60}; - - // State that should not be reset on seek. - private boolean hasOutputFormat; - private long frameDurationUs; + private static final double[] FRAME_RATE_VALUES = + new double[] {24000d / 1001, 24, 25, 30000d / 1001, 30, 50, 60000d / 1001, 60}; - private final UserDataReader userDataReader; - private final ParsableByteArray userDataParsable; + @Nullable private final UserDataReader userDataReader; + @Nullable private final ParsableByteArray userDataParsable; // State that should be reset on seek. + @Nullable private final NalUnitTargetBuffer userData; private final boolean[] prefixFlags; private final CsdBuffer csdBuffer; - private final NalUnitTargetBuffer userData; private long totalBytesWritten; private boolean startedFirstSample; + // State that should not be reset on seek. + private boolean hasOutputFormat; + private long frameDurationUs; + // Per packet state that gets reset at the start of each packet. private long pesTimeUs; @@ -72,7 +76,7 @@ public H262Reader() { this(null); } - /* package */ H262Reader(UserDataReader userDataReader) { + /* package */ H262Reader(@Nullable UserDataReader userDataReader) { this.userDataReader = userDataReader; prefixFlags = new boolean[4]; csdBuffer = new CsdBuffer(128); @@ -83,17 +87,21 @@ public H262Reader() { userData = null; userDataParsable = null; } + pesTimeUs = C.TIME_UNSET; + sampleTimeUs = C.TIME_UNSET; } @Override public void seek() { NalUnitUtil.clearPrefixFlags(prefixFlags); csdBuffer.reset(); - if (userDataReader != null) { + if (userData != null) { userData.reset(); } totalBytesWritten = 0; startedFirstSample = false; + pesTimeUs = C.TIME_UNSET; + sampleTimeUs = C.TIME_UNSET; } @Override @@ -114,9 +122,10 @@ public void packetStarted(long pesTimeUs, @TsPayloadReader.Flags int flags) { @Override public void consume(ParsableByteArray data) { + checkStateNotNull(output); // Asserts that createTracks has been called. int offset = data.getPosition(); int limit = data.limit(); - byte[] dataArray = data.data; + byte[] dataArray = data.getData(); // Append the data to the buffer. totalBytesWritten += data.bytesLeft(); @@ -130,14 +139,14 @@ public void consume(ParsableByteArray data) { if (!hasOutputFormat) { csdBuffer.onData(dataArray, offset, limit); } - if (userDataReader != null) { + if (userData != null) { userData.appendToNalUnit(dataArray, offset, limit); } return; } // We've found a start code with the following value. - int startCodeValue = data.data[startCodeOffset + 3] & 0xFF; + int startCodeValue = data.getData()[startCodeOffset + 3] & 0xFF; // This is the number of bytes from the current offset to the start of the next start // code. It may be negative if the start code started in the previously consumed data. int lengthToStartCode = startCodeOffset - offset; @@ -151,13 +160,13 @@ public void consume(ParsableByteArray data) { int bytesAlreadyPassed = lengthToStartCode < 0 ? -lengthToStartCode : 0; if (csdBuffer.onStartCode(startCodeValue, bytesAlreadyPassed)) { // The csd data is complete, so we can decode and output the media format. - Pair result = parseCsdBuffer(csdBuffer, formatId); + Pair result = parseCsdBuffer(csdBuffer, checkNotNull(formatId)); output.format(result.first); frameDurationUs = result.second; hasOutputFormat = true; } } - if (userDataReader != null) { + if (userData != null) { int bytesAlreadyPassed = 0; if (lengthToStartCode > 0) { userData.appendToNalUnit(dataArray, offset, startCodeOffset); @@ -167,17 +176,17 @@ public void consume(ParsableByteArray data) { if (userData.endNalUnit(bytesAlreadyPassed)) { int unescapedLength = NalUnitUtil.unescapeStream(userData.nalData, userData.nalLength); - userDataParsable.reset(userData.nalData, unescapedLength); - userDataReader.consume(sampleTimeUs, userDataParsable); + Util.castNonNull(userDataParsable).reset(userData.nalData, unescapedLength); + Util.castNonNull(userDataReader).consume(sampleTimeUs, userDataParsable); } - if (startCodeValue == START_USER_DATA && data.data[startCodeOffset + 2] == 0x1) { + if (startCodeValue == START_USER_DATA && data.getData()[startCodeOffset + 2] == 0x1) { userData.startNalUnit(startCodeValue); } } if (startCodeValue == START_PICTURE || startCodeValue == START_SEQUENCE_HEADER) { int bytesWrittenPastStartCode = limit - startCodeOffset; - if (startedFirstSample && sampleHasPicture && hasOutputFormat) { + if (sampleHasPicture && hasOutputFormat && sampleTimeUs != C.TIME_UNSET) { // Output the sample. @C.BufferFlags int flags = sampleIsKeyframe ? C.BUFFER_FLAG_KEY_FRAME : 0; int size = (int) (totalBytesWritten - samplePosition) - bytesWrittenPastStartCode; @@ -186,8 +195,12 @@ public void consume(ParsableByteArray data) { if (!startedFirstSample || sampleHasPicture) { // Start the next sample. samplePosition = totalBytesWritten - bytesWrittenPastStartCode; - sampleTimeUs = pesTimeUs != C.TIME_UNSET ? pesTimeUs - : (startedFirstSample ? (sampleTimeUs + frameDurationUs) : 0); + sampleTimeUs = + pesTimeUs != C.TIME_UNSET + ? pesTimeUs + : (sampleTimeUs != C.TIME_UNSET + ? (sampleTimeUs + frameDurationUs) + : C.TIME_UNSET); sampleIsKeyframe = false; pesTimeUs = C.TIME_UNSET; startedFirstSample = true; @@ -210,9 +223,9 @@ public void packetFinished() { * Parses the {@link Format} and frame duration from a csd buffer. * * @param csdBuffer The csd buffer. - * @param formatId The id for the generated format. May be null. - * @return A pair consisting of the {@link Format} and the frame duration in microseconds, or - * 0 if the duration could not be determined. + * @param formatId The id for the generated format. + * @return A pair consisting of the {@link Format} and the frame duration in microseconds, or 0 if + * the duration could not be determined. */ private static Pair parseCsdBuffer(CsdBuffer csdBuffer, String formatId) { byte[] csdData = Arrays.copyOf(csdBuffer.data, csdBuffer.length); @@ -225,7 +238,7 @@ private static Pair parseCsdBuffer(CsdBuffer csdBuffer, String for float pixelWidthHeightRatio = 1f; int aspectRatioCode = (csdData[7] & 0xF0) >> 4; - switch(aspectRatioCode) { + switch (aspectRatioCode) { case 2: pixelWidthHeightRatio = (4 * height) / (float) (3 * width); break; @@ -240,9 +253,15 @@ private static Pair parseCsdBuffer(CsdBuffer csdBuffer, String for break; } - Format format = Format.createVideoSampleFormat(formatId, MimeTypes.VIDEO_MPEG2, null, - Format.NO_VALUE, Format.NO_VALUE, width, height, Format.NO_VALUE, - Collections.singletonList(csdData), Format.NO_VALUE, pixelWidthHeightRatio, null); + Format format = + new Format.Builder() + .setId(formatId) + .setSampleMimeType(MimeTypes.VIDEO_MPEG2) + .setWidth(width) + .setHeight(height) + .setPixelWidthHeightRatio(pixelWidthHeightRatio) + .setInitializationData(Collections.singletonList(csdData)) + .build(); long frameDurationUs = 0; int frameRateCodeMinusOne = (csdData[7] & 0x0F) - 1; @@ -274,9 +293,7 @@ public CsdBuffer(int initialCapacity) { data = new byte[initialCapacity]; } - /** - * Resets the buffer, clearing any data that it holds. - */ + /** Resets the buffer, clearing any data that it holds. */ public void reset() { isFilling = false; length = 0; @@ -289,9 +306,9 @@ public void reset() { * @param startCodeValue The start code value. * @param bytesAlreadyPassed The number of bytes of the start code that have been passed to * {@link #onData(byte[], int, int)}, or 0. - * @return Whether the csd data is now complete. If true is returned, neither - * this method nor {@link #onData(byte[], int, int)} should be called again without an - * interleaving call to {@link #reset()}. + * @return Whether the csd data is now complete. If true is returned, neither this method nor + * {@link #onData(byte[], int, int)} should be called again without an interleaving call to + * {@link #reset()}. */ public boolean onStartCode(int startCodeValue, int bytesAlreadyPassed) { if (isFilling) { @@ -327,7 +344,5 @@ public void onData(byte[] newData, int offset, int limit) { System.arraycopy(newData, offset, data, length, readLength); length += readLength; } - } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/H263Reader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/H263Reader.java new file mode 100644 index 0000000000..62a68cb42c --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/H263Reader.java @@ -0,0 +1,490 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.extractor.ts; + +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; +import static com.google.android.exoplayer2.util.Util.castNonNull; +import static java.lang.annotation.ElementType.TYPE_USE; + +import androidx.annotation.IntDef; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.extractor.ExtractorOutput; +import com.google.android.exoplayer2.extractor.TrackOutput; +import com.google.android.exoplayer2.extractor.ts.TsPayloadReader.TrackIdGenerator; +import com.google.android.exoplayer2.util.Log; +import com.google.android.exoplayer2.util.MimeTypes; +import com.google.android.exoplayer2.util.NalUnitUtil; +import com.google.android.exoplayer2.util.ParsableBitArray; +import com.google.android.exoplayer2.util.ParsableByteArray; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import java.util.Arrays; +import java.util.Collections; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; + +/** + * Parses an ISO/IEC 14496-2 (MPEG-4 Part 2) or ITU-T Recommendation H.263 byte stream and extracts + * individual frames. + */ +public final class H263Reader implements ElementaryStreamReader { + + private static final String TAG = "H263Reader"; + + private static final int START_CODE_VALUE_VISUAL_OBJECT_SEQUENCE = 0xB0; + private static final int START_CODE_VALUE_USER_DATA = 0xB2; + private static final int START_CODE_VALUE_GROUP_OF_VOP = 0xB3; + private static final int START_CODE_VALUE_VISUAL_OBJECT = 0xB5; + private static final int START_CODE_VALUE_VOP = 0xB6; + private static final int START_CODE_VALUE_MAX_VIDEO_OBJECT = 0x1F; + private static final int START_CODE_VALUE_UNSET = -1; + + // See ISO 14496-2 (2001) table 6-12 for the mapping from aspect_ratio_info to pixel aspect ratio. + private static final float[] PIXEL_WIDTH_HEIGHT_RATIO_BY_ASPECT_RATIO_INFO = + new float[] {1f, 1f, 12 / 11f, 10 / 11f, 16 / 11f, 40 / 33f, 1f}; + private static final int VIDEO_OBJECT_LAYER_SHAPE_RECTANGULAR = 0; + + @Nullable private final UserDataReader userDataReader; + @Nullable private final ParsableByteArray userDataParsable; + + // State that should be reset on seek. + private final boolean[] prefixFlags; + private final CsdBuffer csdBuffer; + @Nullable private final NalUnitTargetBuffer userData; + private H263Reader.@MonotonicNonNull SampleReader sampleReader; + private long totalBytesWritten; + + // State initialized once when tracks are created. + private @MonotonicNonNull String formatId; + private @MonotonicNonNull TrackOutput output; + + // State that should not be reset on seek. + private boolean hasOutputFormat; + + // Per packet state that gets reset at the start of each packet. + private long pesTimeUs; + + /** Creates a new reader. */ + public H263Reader() { + this(null); + } + + /* package */ H263Reader(@Nullable UserDataReader userDataReader) { + this.userDataReader = userDataReader; + prefixFlags = new boolean[4]; + csdBuffer = new CsdBuffer(128); + pesTimeUs = C.TIME_UNSET; + if (userDataReader != null) { + userData = new NalUnitTargetBuffer(START_CODE_VALUE_USER_DATA, 128); + userDataParsable = new ParsableByteArray(); + } else { + userData = null; + userDataParsable = null; + } + } + + @Override + public void seek() { + NalUnitUtil.clearPrefixFlags(prefixFlags); + csdBuffer.reset(); + if (sampleReader != null) { + sampleReader.reset(); + } + if (userData != null) { + userData.reset(); + } + totalBytesWritten = 0; + pesTimeUs = C.TIME_UNSET; + } + + @Override + public void createTracks(ExtractorOutput extractorOutput, TrackIdGenerator idGenerator) { + idGenerator.generateNewId(); + formatId = idGenerator.getFormatId(); + output = extractorOutput.track(idGenerator.getTrackId(), C.TRACK_TYPE_VIDEO); + sampleReader = new SampleReader(output); + if (userDataReader != null) { + userDataReader.createTracks(extractorOutput, idGenerator); + } + } + + @Override + public void packetStarted(long pesTimeUs, @TsPayloadReader.Flags int flags) { + // TODO (Internal b/32267012): Consider using random access indicator. + if (pesTimeUs != C.TIME_UNSET) { + this.pesTimeUs = pesTimeUs; + } + } + + @Override + public void consume(ParsableByteArray data) { + // Assert that createTracks has been called. + checkStateNotNull(sampleReader); + checkStateNotNull(output); + int offset = data.getPosition(); + int limit = data.limit(); + byte[] dataArray = data.getData(); + + // Append the data to the buffer. + totalBytesWritten += data.bytesLeft(); + output.sampleData(data, data.bytesLeft()); + + while (true) { + int startCodeOffset = NalUnitUtil.findNalUnit(dataArray, offset, limit, prefixFlags); + + if (startCodeOffset == limit) { + // We've scanned to the end of the data without finding another start code. + if (!hasOutputFormat) { + csdBuffer.onData(dataArray, offset, limit); + } + sampleReader.onData(dataArray, offset, limit); + if (userData != null) { + userData.appendToNalUnit(dataArray, offset, limit); + } + return; + } + + // We've found a start code with the following value. + int startCodeValue = data.getData()[startCodeOffset + 3] & 0xFF; + // This is the number of bytes from the current offset to the start of the next start + // code. It may be negative if the start code started in the previously consumed data. + int lengthToStartCode = startCodeOffset - offset; + + if (!hasOutputFormat) { + if (lengthToStartCode > 0) { + csdBuffer.onData(dataArray, offset, /* limit= */ startCodeOffset); + } + // This is the number of bytes belonging to the next start code that have already been + // passed to csdBuffer. + int bytesAlreadyPassed = lengthToStartCode < 0 ? -lengthToStartCode : 0; + if (csdBuffer.onStartCode(startCodeValue, bytesAlreadyPassed)) { + // The csd data is complete, so we can decode and output the media format. + output.format( + parseCsdBuffer(csdBuffer, csdBuffer.volStartPosition, checkNotNull(formatId))); + hasOutputFormat = true; + } + } + + sampleReader.onData(dataArray, offset, /* limit= */ startCodeOffset); + + if (userData != null) { + int bytesAlreadyPassed = 0; + if (lengthToStartCode > 0) { + userData.appendToNalUnit(dataArray, offset, /* limit= */ startCodeOffset); + } else { + bytesAlreadyPassed = -lengthToStartCode; + } + + if (userData.endNalUnit(bytesAlreadyPassed)) { + int unescapedLength = NalUnitUtil.unescapeStream(userData.nalData, userData.nalLength); + castNonNull(userDataParsable).reset(userData.nalData, unescapedLength); + castNonNull(userDataReader).consume(pesTimeUs, userDataParsable); + } + + if (startCodeValue == START_CODE_VALUE_USER_DATA + && data.getData()[startCodeOffset + 2] == 0x1) { + userData.startNalUnit(startCodeValue); + } + } + + int bytesWrittenPastPosition = limit - startCodeOffset; + long absolutePosition = totalBytesWritten - bytesWrittenPastPosition; + sampleReader.onDataEnd(absolutePosition, bytesWrittenPastPosition, hasOutputFormat); + // Indicate the start of the next chunk. + sampleReader.onStartCode(startCodeValue, pesTimeUs); + // Continue scanning the data. + offset = startCodeOffset + 3; + } + } + + @Override + public void packetFinished() { + // Do nothing. + } + + /** + * Parses a codec-specific data buffer, returning the {@link Format} of the media. + * + * @param csdBuffer The buffer to parse. + * @param volStartPosition The byte offset of the start of the video object layer in the buffer. + * @param formatId The ID for the generated format. + * @return The {@link Format} of the media represented in the buffer. + */ + private static Format parseCsdBuffer(CsdBuffer csdBuffer, int volStartPosition, String formatId) { + byte[] csdData = Arrays.copyOf(csdBuffer.data, csdBuffer.length); + ParsableBitArray buffer = new ParsableBitArray(csdData); + buffer.skipBytes(volStartPosition); + + // Parse the video object layer defined in ISO 14496-2 (2001) subsection 6.2.3. + buffer.skipBytes(4); // video_object_layer_start_code + buffer.skipBit(); // random_accessible_vol + buffer.skipBits(8); // video_object_type_indication + if (buffer.readBit()) { // is_object_layer_identifier + buffer.skipBits(4); // video_object_layer_verid + buffer.skipBits(3); // video_object_layer_priority + } + float pixelWidthHeightRatio; + int aspectRatioInfo = buffer.readBits(4); + if (aspectRatioInfo == 0x0F) { // extended_PAR + int parWidth = buffer.readBits(8); + int parHeight = buffer.readBits(8); + if (parHeight == 0) { + Log.w(TAG, "Invalid aspect ratio"); + pixelWidthHeightRatio = 1f; + } else { + pixelWidthHeightRatio = (float) parWidth / parHeight; + } + } else if (aspectRatioInfo < PIXEL_WIDTH_HEIGHT_RATIO_BY_ASPECT_RATIO_INFO.length) { + pixelWidthHeightRatio = PIXEL_WIDTH_HEIGHT_RATIO_BY_ASPECT_RATIO_INFO[aspectRatioInfo]; + } else { + Log.w(TAG, "Invalid aspect ratio"); + pixelWidthHeightRatio = 1f; + } + if (buffer.readBit()) { // vol_control_parameters + buffer.skipBits(2); // chroma_format + buffer.skipBits(1); // low_delay + if (buffer.readBit()) { // vbv_parameters + buffer.skipBits(15); // first_half_bit_rate + buffer.skipBit(); // marker_bit + buffer.skipBits(15); // latter_half_bit_rate + buffer.skipBit(); // marker_bit + buffer.skipBits(15); // first_half_vbv_buffer_size + buffer.skipBit(); // marker_bit + buffer.skipBits(3); // latter_half_vbv_buffer_size + buffer.skipBits(11); // first_half_vbv_occupancy + buffer.skipBit(); // marker_bit + buffer.skipBits(15); // latter_half_vbv_occupancy + buffer.skipBit(); // marker_bit + } + } + int videoObjectLayerShape = buffer.readBits(2); + if (videoObjectLayerShape != VIDEO_OBJECT_LAYER_SHAPE_RECTANGULAR) { + Log.w(TAG, "Unhandled video object layer shape"); + } + buffer.skipBit(); // marker_bit + int vopTimeIncrementResolution = buffer.readBits(16); + buffer.skipBit(); // marker_bit + if (buffer.readBit()) { // fixed_vop_rate + if (vopTimeIncrementResolution == 0) { + Log.w(TAG, "Invalid vop_increment_time_resolution"); + } else { + vopTimeIncrementResolution--; + int numBits = 0; + while (vopTimeIncrementResolution > 0) { + ++numBits; + vopTimeIncrementResolution >>= 1; + } + buffer.skipBits(numBits); // fixed_vop_time_increment + } + } + buffer.skipBit(); // marker_bit + int videoObjectLayerWidth = buffer.readBits(13); + buffer.skipBit(); // marker_bit + int videoObjectLayerHeight = buffer.readBits(13); + buffer.skipBit(); // marker_bit + buffer.skipBit(); // interlaced + return new Format.Builder() + .setId(formatId) + .setSampleMimeType(MimeTypes.VIDEO_MP4V) + .setWidth(videoObjectLayerWidth) + .setHeight(videoObjectLayerHeight) + .setPixelWidthHeightRatio(pixelWidthHeightRatio) + .setInitializationData(Collections.singletonList(csdData)) + .build(); + } + + private static final class CsdBuffer { + + private static final byte[] START_CODE = new byte[] {0, 0, 1}; + + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef({ + STATE_SKIP_TO_VISUAL_OBJECT_SEQUENCE_START, + STATE_EXPECT_VISUAL_OBJECT_START, + STATE_EXPECT_VIDEO_OBJECT_START, + STATE_EXPECT_VIDEO_OBJECT_LAYER_START, + STATE_WAIT_FOR_VOP_START + }) + private @interface State {} + + private static final int STATE_SKIP_TO_VISUAL_OBJECT_SEQUENCE_START = 0; + private static final int STATE_EXPECT_VISUAL_OBJECT_START = 1; + private static final int STATE_EXPECT_VIDEO_OBJECT_START = 2; + private static final int STATE_EXPECT_VIDEO_OBJECT_LAYER_START = 3; + private static final int STATE_WAIT_FOR_VOP_START = 4; + + private boolean isFilling; + private @State int state; + + public int length; + public int volStartPosition; + public byte[] data; + + public CsdBuffer(int initialCapacity) { + data = new byte[initialCapacity]; + } + + public void reset() { + isFilling = false; + length = 0; + state = STATE_SKIP_TO_VISUAL_OBJECT_SEQUENCE_START; + } + + /** + * Called when a start code is encountered in the stream. + * + * @param startCodeValue The start code value. + * @param bytesAlreadyPassed The number of bytes of the start code that have been passed to + * {@link #onData(byte[], int, int)}, or 0. + * @return Whether the csd data is now complete. If true is returned, neither this method nor + * {@link #onData(byte[], int, int)} should be called again without an interleaving call to + * {@link #reset()}. + */ + public boolean onStartCode(int startCodeValue, int bytesAlreadyPassed) { + switch (state) { + case STATE_SKIP_TO_VISUAL_OBJECT_SEQUENCE_START: + if (startCodeValue == START_CODE_VALUE_VISUAL_OBJECT_SEQUENCE) { + state = STATE_EXPECT_VISUAL_OBJECT_START; + isFilling = true; + } + break; + case STATE_EXPECT_VISUAL_OBJECT_START: + if (startCodeValue != START_CODE_VALUE_VISUAL_OBJECT) { + Log.w(TAG, "Unexpected start code value"); + reset(); + } else { + state = STATE_EXPECT_VIDEO_OBJECT_START; + } + break; + case STATE_EXPECT_VIDEO_OBJECT_START: + if (startCodeValue > START_CODE_VALUE_MAX_VIDEO_OBJECT) { + Log.w(TAG, "Unexpected start code value"); + reset(); + } else { + state = STATE_EXPECT_VIDEO_OBJECT_LAYER_START; + } + break; + case STATE_EXPECT_VIDEO_OBJECT_LAYER_START: + if ((startCodeValue & 0xF0) != 0x20) { + Log.w(TAG, "Unexpected start code value"); + reset(); + } else { + volStartPosition = length; + state = STATE_WAIT_FOR_VOP_START; + } + break; + case STATE_WAIT_FOR_VOP_START: + if (startCodeValue == START_CODE_VALUE_GROUP_OF_VOP + || startCodeValue == START_CODE_VALUE_VISUAL_OBJECT) { + length -= bytesAlreadyPassed; + isFilling = false; + return true; + } + break; + default: + throw new IllegalStateException(); + } + onData(START_CODE, /* offset= */ 0, /* limit= */ START_CODE.length); + return false; + } + + public void onData(byte[] newData, int offset, int limit) { + if (!isFilling) { + return; + } + int readLength = limit - offset; + if (data.length < length + readLength) { + data = Arrays.copyOf(data, (length + readLength) * 2); + } + System.arraycopy(newData, offset, data, length, readLength); + length += readLength; + } + } + + private static final class SampleReader { + + /** Byte offset of vop_coding_type after the start code value. */ + private static final int OFFSET_VOP_CODING_TYPE = 1; + /** Value of vop_coding_type for intra video object planes. */ + private static final int VOP_CODING_TYPE_INTRA = 0; + + private final TrackOutput output; + + private boolean readingSample; + private boolean lookingForVopCodingType; + private boolean sampleIsKeyframe; + private int startCodeValue; + private int vopBytesRead; + private long samplePosition; + private long sampleTimeUs; + + public SampleReader(TrackOutput output) { + this.output = output; + } + + public void reset() { + readingSample = false; + lookingForVopCodingType = false; + sampleIsKeyframe = false; + startCodeValue = START_CODE_VALUE_UNSET; + } + + public void onStartCode(int startCodeValue, long pesTimeUs) { + this.startCodeValue = startCodeValue; + sampleIsKeyframe = false; + readingSample = + startCodeValue == START_CODE_VALUE_VOP || startCodeValue == START_CODE_VALUE_GROUP_OF_VOP; + lookingForVopCodingType = startCodeValue == START_CODE_VALUE_VOP; + vopBytesRead = 0; + sampleTimeUs = pesTimeUs; + } + + public void onData(byte[] data, int offset, int limit) { + if (lookingForVopCodingType) { + int headerOffset = offset + OFFSET_VOP_CODING_TYPE - vopBytesRead; + if (headerOffset < limit) { + sampleIsKeyframe = ((data[headerOffset] & 0xC0) >> 6) == VOP_CODING_TYPE_INTRA; + lookingForVopCodingType = false; + } else { + vopBytesRead += limit - offset; + } + } + } + + public void onDataEnd(long position, int bytesWrittenPastPosition, boolean hasOutputFormat) { + if (startCodeValue == START_CODE_VALUE_VOP + && hasOutputFormat + && readingSample + && sampleTimeUs != C.TIME_UNSET) { + int size = (int) (position - samplePosition); + @C.BufferFlags int flags = sampleIsKeyframe ? C.BUFFER_FLAG_KEY_FRAME : 0; + output.sampleMetadata( + sampleTimeUs, flags, size, bytesWrittenPastPosition, /* cryptoData= */ null); + } + // Start a new sample, unless this is a 'group of video object plane' in which case we + // include the data at the start of a 'video object plane' coming next. + if (startCodeValue != START_CODE_VALUE_GROUP_OF_VOP) { + samplePosition = position; + } + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/H264Reader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/H264Reader.java index d249c1b9da..1d59c3c69a 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/H264Reader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/H264Reader.java @@ -18,30 +18,30 @@ import static com.google.android.exoplayer2.extractor.ts.TsPayloadReader.FLAG_RANDOM_ACCESS_INDICATOR; import android.util.SparseArray; +import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.extractor.ExtractorOutput; import com.google.android.exoplayer2.extractor.TrackOutput; import com.google.android.exoplayer2.extractor.ts.TsPayloadReader.TrackIdGenerator; +import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.CodecSpecificDataUtil; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.NalUnitUtil; import com.google.android.exoplayer2.util.NalUnitUtil.SpsData; import com.google.android.exoplayer2.util.ParsableByteArray; import com.google.android.exoplayer2.util.ParsableNalUnitBitArray; +import com.google.android.exoplayer2.util.Util; import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import org.checkerframework.checker.nullness.qual.EnsuresNonNull; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; +import org.checkerframework.checker.nullness.qual.RequiresNonNull; -/** - * Parses a continuous H264 byte stream and extracts individual frames. - */ +/** Parses a continuous H264 byte stream and extracts individual frames. */ public final class H264Reader implements ElementaryStreamReader { - private static final int NAL_UNIT_TYPE_SEI = 6; // Supplemental enhancement information - private static final int NAL_UNIT_TYPE_SPS = 7; // Sequence parameter set - private static final int NAL_UNIT_TYPE_PPS = 8; // Picture parameter set - private final SeiReader seiReader; private final boolean allowNonIdrKeyframes; private final boolean detectAccessUnits; @@ -51,9 +51,9 @@ public final class H264Reader implements ElementaryStreamReader { private long totalBytesWritten; private final boolean[] prefixFlags; - private String formatId; - private TrackOutput output; - private SampleReader sampleReader; + private @MonotonicNonNull String formatId; + private @MonotonicNonNull TrackOutput output; + private @MonotonicNonNull SampleReader sampleReader; // State that should not be reset on seek. private boolean hasOutputFormat; @@ -79,21 +79,25 @@ public H264Reader(SeiReader seiReader, boolean allowNonIdrKeyframes, boolean det this.allowNonIdrKeyframes = allowNonIdrKeyframes; this.detectAccessUnits = detectAccessUnits; prefixFlags = new boolean[3]; - sps = new NalUnitTargetBuffer(NAL_UNIT_TYPE_SPS, 128); - pps = new NalUnitTargetBuffer(NAL_UNIT_TYPE_PPS, 128); - sei = new NalUnitTargetBuffer(NAL_UNIT_TYPE_SEI, 128); + sps = new NalUnitTargetBuffer(NalUnitUtil.NAL_UNIT_TYPE_SPS, 128); + pps = new NalUnitTargetBuffer(NalUnitUtil.NAL_UNIT_TYPE_PPS, 128); + sei = new NalUnitTargetBuffer(NalUnitUtil.NAL_UNIT_TYPE_SEI, 128); + pesTimeUs = C.TIME_UNSET; seiWrapper = new ParsableByteArray(); } @Override public void seek() { + totalBytesWritten = 0; + randomAccessIndicator = false; + pesTimeUs = C.TIME_UNSET; NalUnitUtil.clearPrefixFlags(prefixFlags); sps.reset(); pps.reset(); sei.reset(); - sampleReader.reset(); - totalBytesWritten = 0; - randomAccessIndicator = false; + if (sampleReader != null) { + sampleReader.reset(); + } } @Override @@ -107,15 +111,19 @@ public void createTracks(ExtractorOutput extractorOutput, TrackIdGenerator idGen @Override public void packetStarted(long pesTimeUs, @TsPayloadReader.Flags int flags) { - this.pesTimeUs = pesTimeUs; + if (pesTimeUs != C.TIME_UNSET) { + this.pesTimeUs = pesTimeUs; + } randomAccessIndicator |= (flags & FLAG_RANDOM_ACCESS_INDICATOR) != 0; } @Override public void consume(ParsableByteArray data) { + assertTracksCreated(); + int offset = data.getPosition(); int limit = data.limit(); - byte[] dataArray = data.data; + byte[] dataArray = data.getData(); // Append the data to the buffer. totalBytesWritten += data.bytesLeft(); @@ -145,8 +153,11 @@ public void consume(ParsableByteArray data) { // Indicate the end of the previous NAL unit. If the length to the start of the next unit // is negative then we wrote too many bytes to the NAL buffers. Discard the excess bytes // when notifying that the unit has ended. - endNalUnit(absolutePosition, bytesWrittenPastPosition, - lengthToNalUnit < 0 ? -lengthToNalUnit : 0, pesTimeUs); + endNalUnit( + absolutePosition, + bytesWrittenPastPosition, + lengthToNalUnit < 0 ? -lengthToNalUnit : 0, + pesTimeUs); // Indicate the start of the next NAL unit. startNalUnit(absolutePosition, nalUnitType, pesTimeUs); // Continue scanning the data. @@ -159,6 +170,7 @@ public void packetFinished() { // Do nothing. } + @RequiresNonNull("sampleReader") private void startNalUnit(long position, int nalUnitType, long pesTimeUs) { if (!hasOutputFormat || sampleReader.needsSpsPps()) { sps.startNalUnit(nalUnitType); @@ -168,6 +180,7 @@ private void startNalUnit(long position, int nalUnitType, long pesTimeUs) { sampleReader.startNalUnit(position, nalUnitType, pesTimeUs); } + @RequiresNonNull("sampleReader") private void nalUnitData(byte[] dataArray, int offset, int limit) { if (!hasOutputFormat || sampleReader.needsSpsPps()) { sps.appendToNalUnit(dataArray, offset, limit); @@ -177,6 +190,7 @@ private void nalUnitData(byte[] dataArray, int offset, int limit) { sampleReader.appendToNalUnit(dataArray, offset, limit); } + @RequiresNonNull({"output", "sampleReader"}) private void endNalUnit(long position, int offset, int discardPadding, long pesTimeUs) { if (!hasOutputFormat || sampleReader.needsSpsPps()) { sps.endNalUnit(discardPadding); @@ -188,23 +202,21 @@ private void endNalUnit(long position, int offset, int discardPadding, long pesT initializationData.add(Arrays.copyOf(pps.nalData, pps.nalLength)); NalUnitUtil.SpsData spsData = NalUnitUtil.parseSpsNalUnit(sps.nalData, 3, sps.nalLength); NalUnitUtil.PpsData ppsData = NalUnitUtil.parsePpsNalUnit(pps.nalData, 3, pps.nalLength); + String codecs = + CodecSpecificDataUtil.buildAvcCodecString( + spsData.profileIdc, + spsData.constraintsFlagsAndReservedZero2Bits, + spsData.levelIdc); output.format( - Format.createVideoSampleFormat( - formatId, - MimeTypes.VIDEO_H264, - CodecSpecificDataUtil.buildAvcCodecString( - spsData.profileIdc, - spsData.constraintsFlagsAndReservedZero2Bits, - spsData.levelIdc), - /* bitrate= */ Format.NO_VALUE, - /* maxInputSize= */ Format.NO_VALUE, - spsData.width, - spsData.height, - /* frameRate= */ Format.NO_VALUE, - initializationData, - /* rotationDegrees= */ Format.NO_VALUE, - spsData.pixelWidthAspectRatio, - /* drmInitData= */ null)); + new Format.Builder() + .setId(formatId) + .setSampleMimeType(MimeTypes.VIDEO_H264) + .setCodecs(codecs) + .setWidth(spsData.width) + .setHeight(spsData.height) + .setPixelWidthHeightRatio(spsData.pixelWidthHeightRatio) + .setInitializationData(initializationData) + .build()); hasOutputFormat = true; sampleReader.putSps(spsData); sampleReader.putPps(ppsData); @@ -237,16 +249,17 @@ private void endNalUnit(long position, int offset, int discardPadding, long pesT } } + @EnsuresNonNull({"output", "sampleReader"}) + private void assertTracksCreated() { + Assertions.checkStateNotNull(output); + Util.castNonNull(sampleReader); + } + /** Consumes a stream of NAL units and outputs samples. */ private static final class SampleReader { private static final int DEFAULT_BUFFER_SIZE = 128; - private static final int NAL_UNIT_TYPE_NON_IDR = 1; // Coded slice of a non-IDR picture - private static final int NAL_UNIT_TYPE_PARTITION_A = 2; // Coded slice data partition A - private static final int NAL_UNIT_TYPE_IDR = 5; // Coded slice of an IDR picture - private static final int NAL_UNIT_TYPE_AUD = 9; // Access unit delimiter - private final TrackOutput output; private final boolean allowNonIdrKeyframes; private final boolean detectAccessUnits; @@ -271,8 +284,8 @@ private static final class SampleReader { private long sampleTimeUs; private boolean sampleIsKeyframe; - public SampleReader(TrackOutput output, boolean allowNonIdrKeyframes, - boolean detectAccessUnits) { + public SampleReader( + TrackOutput output, boolean allowNonIdrKeyframes, boolean detectAccessUnits) { this.output = output; this.allowNonIdrKeyframes = allowNonIdrKeyframes; this.detectAccessUnits = detectAccessUnits; @@ -307,10 +320,11 @@ public void startNalUnit(long position, int type, long pesTimeUs) { nalUnitType = type; nalUnitTimeUs = pesTimeUs; nalUnitStartPosition = position; - if ((allowNonIdrKeyframes && nalUnitType == NAL_UNIT_TYPE_NON_IDR) - || (detectAccessUnits && (nalUnitType == NAL_UNIT_TYPE_IDR - || nalUnitType == NAL_UNIT_TYPE_NON_IDR - || nalUnitType == NAL_UNIT_TYPE_PARTITION_A))) { + if ((allowNonIdrKeyframes && nalUnitType == NalUnitUtil.NAL_UNIT_TYPE_NON_IDR) + || (detectAccessUnits + && (nalUnitType == NalUnitUtil.NAL_UNIT_TYPE_IDR + || nalUnitType == NalUnitUtil.NAL_UNIT_TYPE_NON_IDR + || nalUnitType == NalUnitUtil.NAL_UNIT_TYPE_PARTITION_A))) { // Store the previous header and prepare to populate the new one. SliceHeaderData newSliceHeader = previousSliceHeader; previousSliceHeader = sliceHeader; @@ -400,7 +414,7 @@ public void appendToNalUnit(byte[] data, int offset, int limit) { bottomFieldFlagPresent = true; } } - boolean idrPicFlag = nalUnitType == NAL_UNIT_TYPE_IDR; + boolean idrPicFlag = nalUnitType == NalUnitUtil.NAL_UNIT_TYPE_IDR; int idrPicId = 0; if (idrPicFlag) { if (!bitArray.canReadExpGolombCodedNum()) { @@ -423,8 +437,7 @@ public void appendToNalUnit(byte[] data, int offset, int limit) { } deltaPicOrderCntBottom = bitArray.readSignedExpGolombCodedInt(); } - } else if (spsData.picOrderCountType == 1 - && !spsData.deltaPicOrderAlwaysZeroFlag) { + } else if (spsData.picOrderCountType == 1 && !spsData.deltaPicOrderAlwaysZeroFlag) { if (!bitArray.canReadExpGolombCodedNum()) { return; } @@ -436,15 +449,27 @@ public void appendToNalUnit(byte[] data, int offset, int limit) { deltaPicOrderCnt1 = bitArray.readSignedExpGolombCodedInt(); } } - sliceHeader.setAll(spsData, nalRefIdc, sliceType, frameNum, picParameterSetId, fieldPicFlag, - bottomFieldFlagPresent, bottomFieldFlag, idrPicFlag, idrPicId, picOrderCntLsb, - deltaPicOrderCntBottom, deltaPicOrderCnt0, deltaPicOrderCnt1); + sliceHeader.setAll( + spsData, + nalRefIdc, + sliceType, + frameNum, + picParameterSetId, + fieldPicFlag, + bottomFieldFlagPresent, + bottomFieldFlag, + idrPicFlag, + idrPicId, + picOrderCntLsb, + deltaPicOrderCntBottom, + deltaPicOrderCnt0, + deltaPicOrderCnt1); isFilling = false; } public boolean endNalUnit( long position, int offset, boolean hasOutputFormat, boolean randomAccessIndicator) { - if (nalUnitType == NAL_UNIT_TYPE_AUD + if (nalUnitType == NalUnitUtil.NAL_UNIT_TYPE_AUD || (detectAccessUnits && sliceHeader.isFirstVclNalUnitOfPicture(previousSliceHeader))) { // If the NAL unit ending is the start of a new sample, output the previous one. if (hasOutputFormat && readingSample) { @@ -459,12 +484,15 @@ public boolean endNalUnit( boolean treatIFrameAsKeyframe = allowNonIdrKeyframes ? sliceHeader.isISlice() : randomAccessIndicator; sampleIsKeyframe |= - nalUnitType == NAL_UNIT_TYPE_IDR - || (treatIFrameAsKeyframe && nalUnitType == NAL_UNIT_TYPE_NON_IDR); + nalUnitType == NalUnitUtil.NAL_UNIT_TYPE_IDR + || (treatIFrameAsKeyframe && nalUnitType == NalUnitUtil.NAL_UNIT_TYPE_NON_IDR); return sampleIsKeyframe; } private void outputSample(int offset) { + if (sampleTimeUs == C.TIME_UNSET) { + return; + } @C.BufferFlags int flags = sampleIsKeyframe ? C.BUFFER_FLAG_KEY_FRAME : 0; int size = (int) (nalUnitStartPosition - samplePosition); output.sampleMetadata(sampleTimeUs, flags, size, offset, null); @@ -478,7 +506,7 @@ private static final class SliceHeaderData { private boolean isComplete; private boolean hasSliceType; - private SpsData spsData; + @Nullable private SpsData spsData; private int nalRefIdc; private int sliceType; private int frameNum; @@ -541,26 +569,32 @@ public boolean isISlice() { } private boolean isFirstVclNalUnitOfPicture(SliceHeaderData other) { + if (!isComplete) { + return false; + } + if (!other.isComplete) { + return true; + } // See ISO 14496-10 subsection 7.4.1.2.4. - return isComplete - && (!other.isComplete - || frameNum != other.frameNum - || picParameterSetId != other.picParameterSetId - || fieldPicFlag != other.fieldPicFlag - || (bottomFieldFlagPresent - && other.bottomFieldFlagPresent - && bottomFieldFlag != other.bottomFieldFlag) - || (nalRefIdc != other.nalRefIdc && (nalRefIdc == 0 || other.nalRefIdc == 0)) - || (spsData.picOrderCountType == 0 - && other.spsData.picOrderCountType == 0 - && (picOrderCntLsb != other.picOrderCntLsb - || deltaPicOrderCntBottom != other.deltaPicOrderCntBottom)) - || (spsData.picOrderCountType == 1 - && other.spsData.picOrderCountType == 1 - && (deltaPicOrderCnt0 != other.deltaPicOrderCnt0 - || deltaPicOrderCnt1 != other.deltaPicOrderCnt1)) - || idrPicFlag != other.idrPicFlag - || (idrPicFlag && other.idrPicFlag && idrPicId != other.idrPicId)); + SpsData spsData = Assertions.checkStateNotNull(this.spsData); + SpsData otherSpsData = Assertions.checkStateNotNull(other.spsData); + return frameNum != other.frameNum + || picParameterSetId != other.picParameterSetId + || fieldPicFlag != other.fieldPicFlag + || (bottomFieldFlagPresent + && other.bottomFieldFlagPresent + && bottomFieldFlag != other.bottomFieldFlag) + || (nalRefIdc != other.nalRefIdc && (nalRefIdc == 0 || other.nalRefIdc == 0)) + || (spsData.picOrderCountType == 0 + && otherSpsData.picOrderCountType == 0 + && (picOrderCntLsb != other.picOrderCntLsb + || deltaPicOrderCntBottom != other.deltaPicOrderCntBottom)) + || (spsData.picOrderCountType == 1 + && otherSpsData.picOrderCountType == 1 + && (deltaPicOrderCnt0 != other.deltaPicOrderCnt0 + || deltaPicOrderCnt1 != other.deltaPicOrderCnt1)) + || idrPicFlag != other.idrPicFlag + || (idrPicFlag && idrPicId != other.idrPicId); } } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/H265Reader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/H265Reader.java index b4007ea4a4..5c84ffe131 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/H265Reader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/H265Reader.java @@ -15,21 +15,28 @@ */ package com.google.android.exoplayer2.extractor.ts; +import static java.lang.Math.min; + +import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.extractor.ExtractorOutput; import com.google.android.exoplayer2.extractor.TrackOutput; import com.google.android.exoplayer2.extractor.ts.TsPayloadReader.TrackIdGenerator; +import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.CodecSpecificDataUtil; import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.NalUnitUtil; import com.google.android.exoplayer2.util.ParsableByteArray; import com.google.android.exoplayer2.util.ParsableNalUnitBitArray; +import com.google.android.exoplayer2.util.Util; import java.util.Collections; +import org.checkerframework.checker.nullness.qual.EnsuresNonNull; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; +import org.checkerframework.checker.nullness.qual.RequiresNonNull; -/** - * Parses a continuous H.265 byte stream and extracts individual frames. - */ +/** Parses a continuous H.265 byte stream and extracts individual frames. */ public final class H265Reader implements ElementaryStreamReader { private static final String TAG = "H265Reader"; @@ -47,9 +54,9 @@ public final class H265Reader implements ElementaryStreamReader { private final SeiReader seiReader; - private String formatId; - private TrackOutput output; - private SampleReader sampleReader; + private @MonotonicNonNull String formatId; + private @MonotonicNonNull TrackOutput output; + private @MonotonicNonNull SampleReader sampleReader; // State that should not be reset on seek. private boolean hasOutputFormat; @@ -80,19 +87,23 @@ public H265Reader(SeiReader seiReader) { pps = new NalUnitTargetBuffer(PPS_NUT, 128); prefixSei = new NalUnitTargetBuffer(PREFIX_SEI_NUT, 128); suffixSei = new NalUnitTargetBuffer(SUFFIX_SEI_NUT, 128); + pesTimeUs = C.TIME_UNSET; seiWrapper = new ParsableByteArray(); } @Override public void seek() { + totalBytesWritten = 0; + pesTimeUs = C.TIME_UNSET; NalUnitUtil.clearPrefixFlags(prefixFlags); vps.reset(); sps.reset(); pps.reset(); prefixSei.reset(); suffixSei.reset(); - sampleReader.reset(); - totalBytesWritten = 0; + if (sampleReader != null) { + sampleReader.reset(); + } } @Override @@ -107,15 +118,19 @@ public void createTracks(ExtractorOutput extractorOutput, TrackIdGenerator idGen @Override public void packetStarted(long pesTimeUs, @TsPayloadReader.Flags int flags) { // TODO (Internal b/32267012): Consider using random access indicator. - this.pesTimeUs = pesTimeUs; + if (pesTimeUs != C.TIME_UNSET) { + this.pesTimeUs = pesTimeUs; + } } @Override public void consume(ParsableByteArray data) { + assertTracksCreated(); + while (data.bytesLeft() > 0) { int offset = data.getPosition(); int limit = data.limit(); - byte[] dataArray = data.data; + byte[] dataArray = data.getData(); // Append the data to the buffer. totalBytesWritten += data.bytesLeft(); @@ -146,8 +161,11 @@ public void consume(ParsableByteArray data) { // Indicate the end of the previous NAL unit. If the length to the start of the next unit // is negative then we wrote too many bytes to the NAL buffers. Discard the excess bytes // when notifying that the unit has ended. - endNalUnit(absolutePosition, bytesWrittenPastPosition, - lengthToNalUnit < 0 ? -lengthToNalUnit : 0, pesTimeUs); + endNalUnit( + absolutePosition, + bytesWrittenPastPosition, + lengthToNalUnit < 0 ? -lengthToNalUnit : 0, + pesTimeUs); // Indicate the start of the next NAL unit. startNalUnit(absolutePosition, bytesWrittenPastPosition, nalUnitType, pesTimeUs); // Continue scanning the data. @@ -161,6 +179,7 @@ public void packetFinished() { // Do nothing. } + @RequiresNonNull("sampleReader") private void startNalUnit(long position, int offset, int nalUnitType, long pesTimeUs) { sampleReader.startNalUnit(position, offset, nalUnitType, pesTimeUs, hasOutputFormat); if (!hasOutputFormat) { @@ -172,6 +191,7 @@ private void startNalUnit(long position, int offset, int nalUnitType, long pesTi suffixSei.startNalUnit(nalUnitType); } + @RequiresNonNull("sampleReader") private void nalUnitData(byte[] dataArray, int offset, int limit) { sampleReader.readNalUnitData(dataArray, offset, limit); if (!hasOutputFormat) { @@ -183,6 +203,7 @@ private void nalUnitData(byte[] dataArray, int offset, int limit) { suffixSei.appendToNalUnit(dataArray, offset, limit); } + @RequiresNonNull({"output", "sampleReader"}) private void endNalUnit(long position, int offset, int discardPadding, long pesTimeUs) { sampleReader.endNalUnit(position, offset, hasOutputFormat); if (!hasOutputFormat) { @@ -212,23 +233,36 @@ private void endNalUnit(long position, int offset, int discardPadding, long pesT } } - private static Format parseMediaFormat(String formatId, NalUnitTargetBuffer vps, - NalUnitTargetBuffer sps, NalUnitTargetBuffer pps) { + private static Format parseMediaFormat( + @Nullable String formatId, + NalUnitTargetBuffer vps, + NalUnitTargetBuffer sps, + NalUnitTargetBuffer pps) { // Build codec-specific data. - byte[] csd = new byte[vps.nalLength + sps.nalLength + pps.nalLength]; - System.arraycopy(vps.nalData, 0, csd, 0, vps.nalLength); - System.arraycopy(sps.nalData, 0, csd, vps.nalLength, sps.nalLength); - System.arraycopy(pps.nalData, 0, csd, vps.nalLength + sps.nalLength, pps.nalLength); + byte[] csdData = new byte[vps.nalLength + sps.nalLength + pps.nalLength]; + System.arraycopy(vps.nalData, 0, csdData, 0, vps.nalLength); + System.arraycopy(sps.nalData, 0, csdData, vps.nalLength, sps.nalLength); + System.arraycopy(pps.nalData, 0, csdData, vps.nalLength + sps.nalLength, pps.nalLength); // Parse the SPS NAL unit, as per H.265/HEVC (2014) 7.3.2.2.1. ParsableNalUnitBitArray bitArray = new ParsableNalUnitBitArray(sps.nalData, 0, sps.nalLength); bitArray.skipBits(40 + 4); // NAL header, sps_video_parameter_set_id int maxSubLayersMinus1 = bitArray.readBits(3); bitArray.skipBit(); // sps_temporal_id_nesting_flag - - // profile_tier_level(1, sps_max_sub_layers_minus1) - bitArray.skipBits(88); // if (profilePresentFlag) {...} - bitArray.skipBits(8); // general_level_idc + int generalProfileSpace = bitArray.readBits(2); + boolean generalTierFlag = bitArray.readBit(); + int generalProfileIdc = bitArray.readBits(5); + int generalProfileCompatibilityFlags = 0; + for (int i = 0; i < 32; i++) { + if (bitArray.readBit()) { + generalProfileCompatibilityFlags |= (1 << i); + } + } + int[] constraintBytes = new int[6]; + for (int i = 0; i < constraintBytes.length; ++i) { + constraintBytes[i] = bitArray.readBits(8); + } + int generalLevelIdc = bitArray.readBits(8); int toSkip = 0; for (int i = 0; i < maxSubLayersMinus1; i++) { if (bitArray.readBit()) { // sub_layer_profile_present_flag[i] @@ -316,16 +350,49 @@ private static Format parseMediaFormat(String formatId, NalUnitTargetBuffer vps, Log.w(TAG, "Unexpected aspect_ratio_idc value: " + aspectRatioIdc); } } + if (bitArray.readBit()) { // overscan_info_present_flag + bitArray.skipBit(); // overscan_appropriate_flag + } + if (bitArray.readBit()) { // video_signal_type_present_flag + bitArray.skipBits(4); // video_format, video_full_range_flag + if (bitArray.readBit()) { // colour_description_present_flag + // colour_primaries, transfer_characteristics, matrix_coeffs + bitArray.skipBits(24); + } + } + if (bitArray.readBit()) { // chroma_loc_info_present_flag + bitArray.readUnsignedExpGolombCodedInt(); // chroma_sample_loc_type_top_field + bitArray.readUnsignedExpGolombCodedInt(); // chroma_sample_loc_type_bottom_field + } + bitArray.skipBit(); // neutral_chroma_indication_flag + if (bitArray.readBit()) { // field_seq_flag + // field_seq_flag equal to 1 indicates that the coded video sequence conveys pictures that + // represent fields, which means that frame height is double the picture height. + picHeightInLumaSamples *= 2; + } } - return Format.createVideoSampleFormat(formatId, MimeTypes.VIDEO_H265, null, Format.NO_VALUE, - Format.NO_VALUE, picWidthInLumaSamples, picHeightInLumaSamples, Format.NO_VALUE, - Collections.singletonList(csd), Format.NO_VALUE, pixelWidthHeightRatio, null); + String codecs = + CodecSpecificDataUtil.buildHevcCodecString( + generalProfileSpace, + generalTierFlag, + generalProfileIdc, + generalProfileCompatibilityFlags, + constraintBytes, + generalLevelIdc); + + return new Format.Builder() + .setId(formatId) + .setSampleMimeType(MimeTypes.VIDEO_H265) + .setCodecs(codecs) + .setWidth(picWidthInLumaSamples) + .setHeight(picHeightInLumaSamples) + .setPixelWidthHeightRatio(pixelWidthHeightRatio) + .setInitializationData(Collections.singletonList(csdData)) + .build(); } - /** - * Skips scaling_list_data(). See H.265/HEVC (2014) 7.3.4. - */ + /** Skips scaling_list_data(). See H.265/HEVC (2014) 7.3.4. */ private static void skipScalingList(ParsableNalUnitBitArray bitArray) { for (int sizeId = 0; sizeId < 4; sizeId++) { for (int matrixId = 0; matrixId < 6; matrixId += sizeId == 3 ? 3 : 1) { @@ -333,7 +400,7 @@ private static void skipScalingList(ParsableNalUnitBitArray bitArray) { // scaling_list_pred_matrix_id_delta[sizeId][matrixId] bitArray.readUnsignedExpGolombCodedInt(); } else { - int coefNum = Math.min(64, 1 << (4 + (sizeId << 1))); + int coefNum = min(64, 1 << (4 + (sizeId << 1))); if (sizeId > 1) { // scaling_list_dc_coef_minus8[sizeId - 2][matrixId] bitArray.readSignedExpGolombCodedInt(); @@ -387,6 +454,12 @@ private static void skipShortTermRefPicSets(ParsableNalUnitBitArray bitArray) { } } + @EnsuresNonNull({"output", "sampleReader"}) + private void assertTracksCreated() { + Assertions.checkStateNotNull(output); + Util.castNonNull(sampleReader); + } + private static final class SampleReader { /** @@ -483,6 +556,9 @@ public void endNalUnit(long position, int offset, boolean hasOutputFormat) { } private void outputSample(int offset) { + if (sampleTimeUs == C.TIME_UNSET) { + return; + } @C.BufferFlags int flags = sampleIsKeyframe ? C.BUFFER_FLAG_KEY_FRAME : 0; int size = (int) (nalUnitPosition - samplePosition); output.sampleMetadata(sampleTimeUs, flags, size, offset, null); diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/Id3Reader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/Id3Reader.java index 77ec48d0a7..4f6915f6b1 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/Id3Reader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/Id3Reader.java @@ -17,26 +17,27 @@ import static com.google.android.exoplayer2.extractor.ts.TsPayloadReader.FLAG_DATA_ALIGNMENT_INDICATOR; import static com.google.android.exoplayer2.metadata.id3.Id3Decoder.ID3_HEADER_LENGTH; +import static java.lang.Math.min; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.extractor.ExtractorOutput; import com.google.android.exoplayer2.extractor.TrackOutput; import com.google.android.exoplayer2.extractor.ts.TsPayloadReader.TrackIdGenerator; +import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.ParsableByteArray; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; -/** - * Parses ID3 data and extracts individual text information frames. - */ +/** Parses ID3 data and extracts individual text information frames. */ public final class Id3Reader implements ElementaryStreamReader { private static final String TAG = "Id3Reader"; private final ParsableByteArray id3Header; - private TrackOutput output; + private @MonotonicNonNull TrackOutput output; // State that should be reset on seek. private boolean writingSample; @@ -48,19 +49,24 @@ public final class Id3Reader implements ElementaryStreamReader { public Id3Reader() { id3Header = new ParsableByteArray(ID3_HEADER_LENGTH); + sampleTimeUs = C.TIME_UNSET; } @Override public void seek() { writingSample = false; + sampleTimeUs = C.TIME_UNSET; } @Override public void createTracks(ExtractorOutput extractorOutput, TrackIdGenerator idGenerator) { idGenerator.generateNewId(); output = extractorOutput.track(idGenerator.getTrackId(), C.TRACK_TYPE_METADATA); - output.format(Format.createSampleFormat(idGenerator.getFormatId(), MimeTypes.APPLICATION_ID3, - null, Format.NO_VALUE, null)); + output.format( + new Format.Builder() + .setId(idGenerator.getFormatId()) + .setSampleMimeType(MimeTypes.APPLICATION_ID3) + .build()); } @Override @@ -69,26 +75,34 @@ public void packetStarted(long pesTimeUs, @TsPayloadReader.Flags int flags) { return; } writingSample = true; - sampleTimeUs = pesTimeUs; + if (pesTimeUs != C.TIME_UNSET) { + sampleTimeUs = pesTimeUs; + } sampleSize = 0; sampleBytesRead = 0; } @Override public void consume(ParsableByteArray data) { + Assertions.checkStateNotNull(output); // Asserts that createTracks has been called. if (!writingSample) { return; } int bytesAvailable = data.bytesLeft(); if (sampleBytesRead < ID3_HEADER_LENGTH) { // We're still reading the ID3 header. - int headerBytesAvailable = Math.min(bytesAvailable, ID3_HEADER_LENGTH - sampleBytesRead); - System.arraycopy(data.data, data.getPosition(), id3Header.data, sampleBytesRead, + int headerBytesAvailable = min(bytesAvailable, ID3_HEADER_LENGTH - sampleBytesRead); + System.arraycopy( + data.getData(), + data.getPosition(), + id3Header.getData(), + sampleBytesRead, headerBytesAvailable); if (sampleBytesRead + headerBytesAvailable == ID3_HEADER_LENGTH) { // We've finished reading the ID3 header. Extract the sample size. id3Header.setPosition(0); - if ('I' != id3Header.readUnsignedByte() || 'D' != id3Header.readUnsignedByte() + if ('I' != id3Header.readUnsignedByte() + || 'D' != id3Header.readUnsignedByte() || '3' != id3Header.readUnsignedByte()) { Log.w(TAG, "Discarding invalid ID3 tag"); writingSample = false; @@ -99,18 +113,20 @@ public void consume(ParsableByteArray data) { } } // Write data to the output. - int bytesToWrite = Math.min(bytesAvailable, sampleSize - sampleBytesRead); + int bytesToWrite = min(bytesAvailable, sampleSize - sampleBytesRead); output.sampleData(data, bytesToWrite); sampleBytesRead += bytesToWrite; } @Override public void packetFinished() { + Assertions.checkStateNotNull(output); // Asserts that createTracks has been called. if (!writingSample || sampleSize == 0 || sampleBytesRead != sampleSize) { return; } - output.sampleMetadata(sampleTimeUs, C.BUFFER_FLAG_KEY_FRAME, sampleSize, 0, null); + if (sampleTimeUs != C.TIME_UNSET) { + output.sampleMetadata(sampleTimeUs, C.BUFFER_FLAG_KEY_FRAME, sampleSize, 0, null); + } writingSample = false; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/LatmReader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/LatmReader.java index 4ad9adfa2a..a4a4eede3c 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/LatmReader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/LatmReader.java @@ -15,23 +15,25 @@ */ package com.google.android.exoplayer2.extractor.ts; -import android.util.Pair; +import static java.lang.Math.min; + import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.ParserException; +import com.google.android.exoplayer2.audio.AacUtil; import com.google.android.exoplayer2.extractor.ExtractorOutput; import com.google.android.exoplayer2.extractor.TrackOutput; import com.google.android.exoplayer2.extractor.ts.TsPayloadReader.TrackIdGenerator; -import com.google.android.exoplayer2.util.CodecSpecificDataUtil; +import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.ParsableBitArray; import com.google.android.exoplayer2.util.ParsableByteArray; import java.util.Collections; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; +import org.checkerframework.checker.nullness.qual.RequiresNonNull; -/** - * Parses and extracts samples from an AAC/LATM elementary stream. - */ +/** Parses and extracts samples from an AAC/LATM elementary stream. */ public final class LatmReader implements ElementaryStreamReader { private static final int STATE_FINDING_SYNC_1 = 0; @@ -43,14 +45,14 @@ public final class LatmReader implements ElementaryStreamReader { private static final int SYNC_BYTE_FIRST = 0x56; private static final int SYNC_BYTE_SECOND = 0xE0; - private final String language; + @Nullable private final String language; private final ParsableByteArray sampleDataBuffer; private final ParsableBitArray sampleBitArray; // Track output info. - private TrackOutput output; - private Format format; - private String formatId; + private @MonotonicNonNull TrackOutput output; + private @MonotonicNonNull String formatId; + private @MonotonicNonNull Format format; // Parser state info. private int state; @@ -69,6 +71,7 @@ public final class LatmReader implements ElementaryStreamReader { private int sampleRateHz; private long sampleDurationUs; private int channelCount; + @Nullable private String codecs; /** * @param language Track language. @@ -76,12 +79,14 @@ public final class LatmReader implements ElementaryStreamReader { public LatmReader(@Nullable String language) { this.language = language; sampleDataBuffer = new ParsableByteArray(INITIAL_BUFFER_SIZE); - sampleBitArray = new ParsableBitArray(sampleDataBuffer.data); + sampleBitArray = new ParsableBitArray(sampleDataBuffer.getData()); + timeUs = C.TIME_UNSET; } @Override public void seek() { state = STATE_FINDING_SYNC_1; + timeUs = C.TIME_UNSET; streamMuxRead = false; } @@ -94,11 +99,14 @@ public void createTracks(ExtractorOutput extractorOutput, TrackIdGenerator idGen @Override public void packetStarted(long pesTimeUs, @TsPayloadReader.Flags int flags) { - timeUs = pesTimeUs; + if (pesTimeUs != C.TIME_UNSET) { + timeUs = pesTimeUs; + } } @Override public void consume(ParsableByteArray data) throws ParserException { + Assertions.checkStateNotNull(output); // Asserts that createTracks has been called. int bytesToRead; while (data.bytesLeft() > 0) { switch (state) { @@ -118,14 +126,14 @@ public void consume(ParsableByteArray data) throws ParserException { break; case STATE_READING_HEADER: sampleSize = ((secondHeaderByte & ~SYNC_BYTE_SECOND) << 8) | data.readUnsignedByte(); - if (sampleSize > sampleDataBuffer.data.length) { + if (sampleSize > sampleDataBuffer.getData().length) { resetBufferForSize(sampleSize); } bytesRead = 0; state = STATE_READING_SAMPLE; break; case STATE_READING_SAMPLE: - bytesToRead = Math.min(data.bytesLeft(), sampleSize - bytesRead); + bytesToRead = min(data.bytesLeft(), sampleSize - bytesRead); data.readBytes(sampleBitArray.data, bytesRead, bytesToRead); bytesRead += bytesToRead; if (bytesRead == sampleSize) { @@ -150,6 +158,7 @@ public void packetFinished() { * * @param data A {@link ParsableBitArray} containing the AudioMuxElement's bytes. */ + @RequiresNonNull("output") private void parseAudioMuxElement(ParsableBitArray data) throws ParserException { boolean useSameStreamMux = data.readBit(); if (!useSameStreamMux) { @@ -161,7 +170,7 @@ private void parseAudioMuxElement(ParsableBitArray data) throws ParserException if (audioMuxVersionA == 0) { if (numSubframes != 0) { - throw new ParserException(); + throw ParserException.createForMalformedContainer(/* message= */ null, /* cause= */ null); } int muxSlotLengthBytes = parsePayloadLengthInfo(data); parsePayloadMux(data, muxSlotLengthBytes); @@ -169,13 +178,13 @@ private void parseAudioMuxElement(ParsableBitArray data) throws ParserException data.skipBits((int) otherDataLenBits); } } else { - throw new ParserException(); // Not defined by ISO/IEC 14496-3:2009. + // Not defined by ISO/IEC 14496-3:2009. + throw ParserException.createForMalformedContainer(/* message= */ null, /* cause= */ null); } } - /** - * Parses a StreamMuxConfig as defined in ISO/IEC 14496-3:2009 Section 1.7.3.1, Table 1.42. - */ + /** Parses a StreamMuxConfig as defined in ISO/IEC 14496-3:2009 Section 1.7.3.1, Table 1.42. */ + @RequiresNonNull("output") private void parseStreamMuxConfig(ParsableBitArray data) throws ParserException { int audioMuxVersion = data.readBits(1); audioMuxVersionA = audioMuxVersion == 1 ? data.readBits(1) : 0; @@ -184,13 +193,13 @@ private void parseStreamMuxConfig(ParsableBitArray data) throws ParserException latmGetValue(data); // Skip taraBufferFullness. } if (!data.readBit()) { - throw new ParserException(); + throw ParserException.createForMalformedContainer(/* message= */ null, /* cause= */ null); } numSubframes = data.readBits(6); int numProgram = data.readBits(4); int numLayer = data.readBits(3); if (numProgram != 0 || numLayer != 0) { - throw new ParserException(); + throw ParserException.createForMalformedContainer(/* message= */ null, /* cause= */ null); } if (audioMuxVersion == 0) { int startPosition = data.getPosition(); @@ -198,9 +207,16 @@ private void parseStreamMuxConfig(ParsableBitArray data) throws ParserException data.setPosition(startPosition); byte[] initData = new byte[(readBits + 7) / 8]; data.readBits(initData, 0, readBits); - Format format = Format.createAudioSampleFormat(formatId, MimeTypes.AUDIO_AAC, null, - Format.NO_VALUE, Format.NO_VALUE, channelCount, sampleRateHz, - Collections.singletonList(initData), null, 0, language); + Format format = + new Format.Builder() + .setId(formatId) + .setSampleMimeType(MimeTypes.AUDIO_AAC) + .setCodecs(codecs) + .setChannelCount(channelCount) + .setSampleRate(sampleRateHz) + .setInitializationData(Collections.singletonList(initData)) + .setLanguage(language) + .build(); if (!format.equals(this.format)) { this.format = format; sampleDurationUs = (C.MICROS_PER_SECOND * 1024) / format.sampleRate; @@ -230,7 +246,8 @@ private void parseStreamMuxConfig(ParsableBitArray data) throws ParserException data.skipBits(8); // crcCheckSum. } } else { - throw new ParserException(); // This is not defined by ISO/IEC 14496-3:2009. + // This is not defined by ISO/IEC 14496-3:2009. + throw ParserException.createForMalformedContainer(/* message= */ null, /* cause= */ null); } } @@ -259,9 +276,10 @@ private void parseFrameLength(ParsableBitArray data) { private int parseAudioSpecificConfig(ParsableBitArray data) throws ParserException { int bitsLeft = data.bitsLeft(); - Pair config = CodecSpecificDataUtil.parseAacAudioSpecificConfig(data, true); - sampleRateHz = config.first; - channelCount = config.second; + AacUtil.Config config = AacUtil.parseAudioSpecificConfig(data, /* forceReadToEnd= */ true); + codecs = config.codecs; + sampleRateHz = config.sampleRateHz; + channelCount = config.channelCount; return bitsLeft - data.bitsLeft(); } @@ -276,10 +294,11 @@ private int parsePayloadLengthInfo(ParsableBitArray data) throws ParserException } while (tmp == 255); return muxSlotLengthBytes; } else { - throw new ParserException(); + throw ParserException.createForMalformedContainer(/* message= */ null, /* cause= */ null); } } + @RequiresNonNull("output") private void parsePayloadMux(ParsableBitArray data, int muxLengthBytes) { // The start of sample data in int bitPosition = data.getPosition(); @@ -289,22 +308,23 @@ private void parsePayloadMux(ParsableBitArray data, int muxLengthBytes) { } else { // Sample data is not byte-aligned and we need align it ourselves before outputting. // Byte alignment is needed because LATM framing is not supported by MediaCodec. - data.readBits(sampleDataBuffer.data, 0, muxLengthBytes * 8); + data.readBits(sampleDataBuffer.getData(), 0, muxLengthBytes * 8); sampleDataBuffer.setPosition(0); } output.sampleData(sampleDataBuffer, muxLengthBytes); - output.sampleMetadata(timeUs, C.BUFFER_FLAG_KEY_FRAME, muxLengthBytes, 0, null); - timeUs += sampleDurationUs; + if (timeUs != C.TIME_UNSET) { + output.sampleMetadata(timeUs, C.BUFFER_FLAG_KEY_FRAME, muxLengthBytes, 0, null); + timeUs += sampleDurationUs; + } } private void resetBufferForSize(int newSize) { sampleDataBuffer.reset(newSize); - sampleBitArray.reset(sampleDataBuffer.data); + sampleBitArray.reset(sampleDataBuffer.getData()); } private static long latmGetValue(ParsableBitArray data) { int bytesForValue = data.readBits(2); return data.readBits((bytesForValue + 1) * 8); } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/MpegAudioReader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/MpegAudioReader.java index 393e297818..db02d9f697 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/MpegAudioReader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/MpegAudioReader.java @@ -15,17 +15,21 @@ */ package com.google.android.exoplayer2.extractor.ts; +import static java.lang.Math.min; + +import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.audio.MpegAudioUtil; import com.google.android.exoplayer2.extractor.ExtractorOutput; -import com.google.android.exoplayer2.extractor.MpegAudioHeader; import com.google.android.exoplayer2.extractor.TrackOutput; import com.google.android.exoplayer2.extractor.ts.TsPayloadReader.TrackIdGenerator; +import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.ParsableByteArray; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; +import org.checkerframework.checker.nullness.qual.RequiresNonNull; -/** - * Parses a continuous MPEG Audio byte stream and extracts individual frames. - */ +/** Parses a continuous MPEG Audio byte stream and extracts individual frames. */ public final class MpegAudioReader implements ElementaryStreamReader { private static final int STATE_FINDING_HEADER = 0; @@ -35,11 +39,11 @@ public final class MpegAudioReader implements ElementaryStreamReader { private static final int HEADER_SIZE = 4; private final ParsableByteArray headerScratch; - private final MpegAudioHeader header; - private final String language; + private final MpegAudioUtil.Header header; + @Nullable private final String language; - private String formatId; - private TrackOutput output; + private @MonotonicNonNull TrackOutput output; + private @MonotonicNonNull String formatId; private int state; private int frameBytesRead; @@ -59,12 +63,13 @@ public MpegAudioReader() { this(null); } - public MpegAudioReader(String language) { + public MpegAudioReader(@Nullable String language) { state = STATE_FINDING_HEADER; // The first byte of an MPEG Audio frame header is always 0xFF. headerScratch = new ParsableByteArray(4); - headerScratch.data[0] = (byte) 0xFF; - header = new MpegAudioHeader(); + headerScratch.getData()[0] = (byte) 0xFF; + header = new MpegAudioUtil.Header(); + timeUs = C.TIME_UNSET; this.language = language; } @@ -73,6 +78,7 @@ public void seek() { state = STATE_FINDING_HEADER; frameBytesRead = 0; lastByteWasFF = false; + timeUs = C.TIME_UNSET; } @Override @@ -84,11 +90,14 @@ public void createTracks(ExtractorOutput extractorOutput, TrackIdGenerator idGen @Override public void packetStarted(long pesTimeUs, @TsPayloadReader.Flags int flags) { - timeUs = pesTimeUs; + if (pesTimeUs != C.TIME_UNSET) { + timeUs = pesTimeUs; + } } @Override public void consume(ParsableByteArray data) { + Assertions.checkStateNotNull(output); // Asserts that createTracks has been called. while (data.bytesLeft() > 0) { switch (state) { case STATE_FINDING_HEADER: @@ -113,18 +122,18 @@ public void packetFinished() { /** * Attempts to locate the start of the next frame header. - *

      - * If a frame header is located then the state is changed to {@link #STATE_READING_HEADER}, the + * + *

      If a frame header is located then the state is changed to {@link #STATE_READING_HEADER}, the * first two bytes of the header are written into {@link #headerScratch}, and the position of the * source is advanced to the byte that immediately follows these two bytes. - *

      - * If a frame header is not located then the position of the source is advanced to the limit, and - * the method should be called again with the next source to continue the search. + * + *

      If a frame header is not located then the position of the source is advanced to the limit, + * and the method should be called again with the next source to continue the search. * * @param source The source from which to read. */ private void findHeader(ParsableByteArray source) { - byte[] data = source.data; + byte[] data = source.getData(); int startOffset = source.getPosition(); int endOffset = source.limit(); for (int i = startOffset; i < endOffset; i++) { @@ -135,7 +144,7 @@ private void findHeader(ParsableByteArray source) { source.setPosition(i + 1); // Reset lastByteWasFF for next time. lastByteWasFF = false; - headerScratch.data[1] = data[i]; + headerScratch.getData()[1] = data[i]; frameBytesRead = 2; state = STATE_READING_HEADER; return; @@ -146,23 +155,24 @@ private void findHeader(ParsableByteArray source) { /** * Attempts to read the remaining two bytes of the frame header. - *

      - * If a frame header is read in full then the state is changed to {@link #STATE_READING_FRAME}, + * + *

      If a frame header is read in full then the state is changed to {@link #STATE_READING_FRAME}, * the media format is output if this has not previously occurred, the four header bytes are * output as sample data, and the position of the source is advanced to the byte that immediately * follows the header. - *

      - * If a frame header is read in full but cannot be parsed then the state is changed to - * {@link #STATE_READING_HEADER}. - *

      - * If a frame header is not read in full then the position of the source is advanced to the limit, - * and the method should be called again with the next source to continue the read. + * + *

      If a frame header is read in full but cannot be parsed then the state is changed to {@link + * #STATE_READING_HEADER}. + * + *

      If a frame header is not read in full then the position of the source is advanced to the + * limit, and the method should be called again with the next source to continue the read. * * @param source The source from which to read. */ + @RequiresNonNull("output") private void readHeaderRemainder(ParsableByteArray source) { - int bytesToRead = Math.min(source.bytesLeft(), HEADER_SIZE - frameBytesRead); - source.readBytes(headerScratch.data, frameBytesRead, bytesToRead); + int bytesToRead = min(source.bytesLeft(), HEADER_SIZE - frameBytesRead); + source.readBytes(headerScratch.getData(), frameBytesRead, bytesToRead); frameBytesRead += bytesToRead; if (frameBytesRead < HEADER_SIZE) { // We haven't read the whole header yet. @@ -170,7 +180,7 @@ private void readHeaderRemainder(ParsableByteArray source) { } headerScratch.setPosition(0); - boolean parsedHeader = MpegAudioHeader.populateHeader(headerScratch.readInt(), header); + boolean parsedHeader = header.setForHeaderData(headerScratch.readInt()); if (!parsedHeader) { // We thought we'd located a frame header, but we hadn't. frameBytesRead = 0; @@ -181,9 +191,15 @@ private void readHeaderRemainder(ParsableByteArray source) { frameSize = header.frameSize; if (!hasOutputFormat) { frameDurationUs = (C.MICROS_PER_SECOND * header.samplesPerFrame) / header.sampleRate; - Format format = Format.createAudioSampleFormat(formatId, header.mimeType, null, - Format.NO_VALUE, MpegAudioHeader.MAX_FRAME_SIZE_BYTES, header.channels, header.sampleRate, - null, null, 0, language); + Format format = + new Format.Builder() + .setId(formatId) + .setSampleMimeType(header.mimeType) + .setMaxInputSize(MpegAudioUtil.MAX_FRAME_SIZE_BYTES) + .setChannelCount(header.channels) + .setSampleRate(header.sampleRate) + .setLanguage(language) + .build(); output.format(format); hasOutputFormat = true; } @@ -195,18 +211,19 @@ private void readHeaderRemainder(ParsableByteArray source) { /** * Attempts to read the remainder of the frame. - *

      - * If a frame is read in full then true is returned. The frame will have been output, and the + * + *

      If a frame is read in full then true is returned. The frame will have been output, and the * position of the source will have been advanced to the byte that immediately follows the end of * the frame. - *

      - * If a frame is not read in full then the position of the source will have been advanced to the - * limit, and the method should be called again with the next source to continue the read. + * + *

      If a frame is not read in full then the position of the source will have been advanced to + * the limit, and the method should be called again with the next source to continue the read. * * @param source The source from which to read. */ + @RequiresNonNull("output") private void readFrameRemainder(ParsableByteArray source) { - int bytesToRead = Math.min(source.bytesLeft(), frameSize - frameBytesRead); + int bytesToRead = min(source.bytesLeft(), frameSize - frameBytesRead); output.sampleData(source, bytesToRead); frameBytesRead += bytesToRead; if (frameBytesRead < frameSize) { @@ -214,10 +231,11 @@ private void readFrameRemainder(ParsableByteArray source) { return; } - output.sampleMetadata(timeUs, C.BUFFER_FLAG_KEY_FRAME, frameSize, 0, null); - timeUs += frameDurationUs; + if (timeUs != C.TIME_UNSET) { + output.sampleMetadata(timeUs, C.BUFFER_FLAG_KEY_FRAME, frameSize, 0, null); + timeUs += frameDurationUs; + } frameBytesRead = 0; state = STATE_FINDING_HEADER; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/NalUnitTargetBuffer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/NalUnitTargetBuffer.java index ece2fdf767..04081c7cdf 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/NalUnitTargetBuffer.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/NalUnitTargetBuffer.java @@ -19,8 +19,8 @@ import java.util.Arrays; /** - * A buffer that fills itself with data corresponding to a specific NAL unit, as it is - * encountered in the stream. + * A buffer that fills itself with data corresponding to a specific NAL unit, as it is encountered + * in the stream. */ /* package */ final class NalUnitTargetBuffer { @@ -40,17 +40,13 @@ public NalUnitTargetBuffer(int targetType, int initialCapacity) { nalData[2] = 1; } - /** - * Resets the buffer, clearing any data that it holds. - */ + /** Resets the buffer, clearing any data that it holds. */ public void reset() { isFilling = false; isCompleted = false; } - /** - * Returns whether the buffer currently holds a complete NAL unit of the target type. - */ + /** Returns whether the buffer currently holds a complete NAL unit of the target type. */ public boolean isCompleted() { return isCompleted; } @@ -92,8 +88,8 @@ public void appendToNalUnit(byte[] data, int offset, int limit) { /** * Called to indicate that a NAL unit has ended. * - * @param discardPadding The number of excess bytes that were passed to - * {@link #appendToNalUnit(byte[], int, int)}, which should be discarded. + * @param discardPadding The number of excess bytes that were passed to {@link + * #appendToNalUnit(byte[], int, int)}, which should be discarded. * @return Whether the ended NAL unit is of the target type. */ public boolean endNalUnit(int discardPadding) { @@ -105,5 +101,4 @@ public boolean endNalUnit(int discardPadding) { isCompleted = true; return true; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/PassthroughSectionPayloadReader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/PassthroughSectionPayloadReader.java new file mode 100644 index 0000000000..111cfd8750 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/PassthroughSectionPayloadReader.java @@ -0,0 +1,85 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.extractor.ts; + +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.extractor.ExtractorOutput; +import com.google.android.exoplayer2.extractor.TrackOutput; +import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.ParsableByteArray; +import com.google.android.exoplayer2.util.TimestampAdjuster; +import com.google.android.exoplayer2.util.Util; +import org.checkerframework.checker.nullness.qual.EnsuresNonNull; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; + +/** + * A {@link SectionPayloadReader} that directly outputs the section bytes as sample data. + * + *

      Timestamp adjustment is provided through {@link Format#subsampleOffsetUs}. + */ +public final class PassthroughSectionPayloadReader implements SectionPayloadReader { + + private Format format; + private @MonotonicNonNull TimestampAdjuster timestampAdjuster; + private @MonotonicNonNull TrackOutput output; + + /** + * Create a new PassthroughSectionPayloadReader. + * + * @param mimeType The MIME type set as {@link Format#sampleMimeType} on the created output track. + */ + public PassthroughSectionPayloadReader(String mimeType) { + this.format = new Format.Builder().setSampleMimeType(mimeType).build(); + } + + @Override + public void init( + TimestampAdjuster timestampAdjuster, + ExtractorOutput extractorOutput, + TsPayloadReader.TrackIdGenerator idGenerator) { + this.timestampAdjuster = timestampAdjuster; + idGenerator.generateNewId(); + output = extractorOutput.track(idGenerator.getTrackId(), C.TRACK_TYPE_METADATA); + // Eagerly output an incomplete format (missing timestamp offset) to ensure source preparation + // is not blocked waiting for potentially sparse metadata. + output.format(format); + } + + @Override + public void consume(ParsableByteArray sectionData) { + assertInitialized(); + long sampleTimestampUs = timestampAdjuster.getLastAdjustedTimestampUs(); + long subsampleOffsetUs = timestampAdjuster.getTimestampOffsetUs(); + if (sampleTimestampUs == C.TIME_UNSET || subsampleOffsetUs == C.TIME_UNSET) { + // Don't output samples without a known sample timestamp and subsample offset. + return; + } + if (subsampleOffsetUs != format.subsampleOffsetUs) { + format = format.buildUpon().setSubsampleOffsetUs(subsampleOffsetUs).build(); + output.format(format); + } + int sampleSize = sectionData.bytesLeft(); + output.sampleData(sectionData, sampleSize); + output.sampleMetadata(sampleTimestampUs, C.BUFFER_FLAG_KEY_FRAME, sampleSize, 0, null); + } + + @EnsuresNonNull({"timestampAdjuster", "output"}) + private void assertInitialized() { + Assertions.checkStateNotNull(timestampAdjuster); + Util.castNonNull(output); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/PesReader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/PesReader.java index ff755f4ece..7b4ca00cef 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/PesReader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/PesReader.java @@ -15,17 +15,21 @@ */ package com.google.android.exoplayer2.extractor.ts; +import static java.lang.Math.min; + +import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.ParserException; import com.google.android.exoplayer2.extractor.ExtractorOutput; +import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.ParsableBitArray; import com.google.android.exoplayer2.util.ParsableByteArray; import com.google.android.exoplayer2.util.TimestampAdjuster; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; +import org.checkerframework.checker.nullness.qual.RequiresNonNull; -/** - * Parses PES packet data and extracts samples. - */ +/** Parses PES packet data and extracts samples. */ public final class PesReader implements TsPayloadReader { private static final String TAG = "PesReader"; @@ -45,7 +49,7 @@ public final class PesReader implements TsPayloadReader { private int state; private int bytesRead; - private TimestampAdjuster timestampAdjuster; + private @MonotonicNonNull TimestampAdjuster timestampAdjuster; private boolean ptsFlag; private boolean dtsFlag; private boolean seenFirstDts; @@ -61,7 +65,9 @@ public PesReader(ElementaryStreamReader reader) { } @Override - public void init(TimestampAdjuster timestampAdjuster, ExtractorOutput extractorOutput, + public void init( + TimestampAdjuster timestampAdjuster, + ExtractorOutput extractorOutput, TrackIdGenerator idGenerator) { this.timestampAdjuster = timestampAdjuster; reader.createTracks(extractorOutput, idGenerator); @@ -79,6 +85,8 @@ public final void seek() { @Override public final void consume(ParsableByteArray data, @Flags int flags) throws ParserException { + Assertions.checkStateNotNull(timestampAdjuster); // Asserts init has been called. + if ((flags & FLAG_PAYLOAD_UNIT_START_INDICATOR) != 0) { switch (state) { case STATE_FINDING_HEADER: @@ -89,11 +97,11 @@ public final void consume(ParsableByteArray data, @Flags int flags) throws Parse Log.w(TAG, "Unexpected start indicator reading extended header"); break; case STATE_READING_BODY: - // If payloadSize == -1 then the length of the previous packet was unspecified, and so - // we only know that it's finished now that we've seen the start of the next one. This - // is expected. If payloadSize != -1, then the length of the previous packet was known, - // but we didn't receive that amount of data. This is not expected. - if (payloadSize != -1) { + // If payloadSize is unset then the length of the previous packet was unspecified, and so + // we only know that it's finished now that we've seen the start of the next one. This is + // expected. If payloadSize is set, then the length of the previous packet was known, but + // we didn't receive that amount of data. This is not expected. + if (payloadSize != C.LENGTH_UNSET) { Log.w(TAG, "Unexpected start indicator: expected " + payloadSize + " more bytes"); } // Either way, notify the reader that it has now finished. @@ -116,10 +124,10 @@ public final void consume(ParsableByteArray data, @Flags int flags) throws Parse } break; case STATE_READING_HEADER_EXTENSION: - int readLength = Math.min(MAX_HEADER_EXTENSION_SIZE, extendedHeaderLength); + int readLength = min(MAX_HEADER_EXTENSION_SIZE, extendedHeaderLength); // Read as much of the extended header as we're interested in, and skip the rest. if (continueRead(data, pesScratch.data, readLength) - && continueRead(data, null, extendedHeaderLength)) { + && continueRead(data, /* target= */ null, extendedHeaderLength)) { parseHeaderExtension(); flags |= dataAlignmentIndicator ? FLAG_DATA_ALIGNMENT_INDICATOR : 0; reader.packetStarted(timeUs, flags); @@ -128,13 +136,13 @@ && continueRead(data, null, extendedHeaderLength)) { break; case STATE_READING_BODY: readLength = data.bytesLeft(); - int padding = payloadSize == -1 ? 0 : readLength - payloadSize; + int padding = payloadSize == C.LENGTH_UNSET ? 0 : readLength - payloadSize; if (padding > 0) { readLength -= padding; data.setLimit(data.getPosition() + readLength); } reader.consume(data); - if (payloadSize != -1) { + if (payloadSize != C.LENGTH_UNSET) { payloadSize -= readLength; if (payloadSize == 0) { reader.packetFinished(); @@ -162,8 +170,9 @@ private void setState(int state) { * @param targetLength The target length of the read. * @return Whether the target length has been reached. */ - private boolean continueRead(ParsableByteArray source, byte[] target, int targetLength) { - int bytesToRead = Math.min(source.bytesLeft(), targetLength - bytesRead); + private boolean continueRead( + ParsableByteArray source, @Nullable byte[] target, int targetLength) { + int bytesToRead = min(source.bytesLeft(), targetLength - bytesRead); if (bytesToRead <= 0) { return true; } else if (target == null) { @@ -182,7 +191,7 @@ private boolean parseHeader() { int startCodePrefix = pesScratch.readBits(24); if (startCodePrefix != 0x000001) { Log.w(TAG, "Unexpected start code prefix: " + startCodePrefix); - payloadSize = -1; + payloadSize = C.LENGTH_UNSET; return false; } @@ -199,14 +208,22 @@ private boolean parseHeader() { extendedHeaderLength = pesScratch.readBits(8); if (packetLength == 0) { - payloadSize = -1; + payloadSize = C.LENGTH_UNSET; } else { - payloadSize = packetLength + 6 /* packetLength does not include the first 6 bytes */ - - HEADER_SIZE - extendedHeaderLength; + payloadSize = + packetLength + + 6 /* packetLength does not include the first 6 bytes */ + - HEADER_SIZE + - extendedHeaderLength; + if (payloadSize < 0) { + Log.w(TAG, "Found negative packet payload size: " + payloadSize); + payloadSize = C.LENGTH_UNSET; + } } return true; } + @RequiresNonNull("timestampAdjuster") private void parseHeaderExtension() { pesScratch.setPosition(0); timeUs = C.TIME_UNSET; @@ -237,5 +254,4 @@ private void parseHeaderExtension() { timeUs = timestampAdjuster.adjustTsTimestamp(pts); } } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/PsBinarySearchSeeker.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/PsBinarySearchSeeker.java index c4f53ba176..3616a0c354 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/PsBinarySearchSeeker.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/PsBinarySearchSeeker.java @@ -15,6 +15,8 @@ */ package com.google.android.exoplayer2.extractor.ts; +import static java.lang.Math.min; + import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.extractor.BinarySearchSeeker; import com.google.android.exoplayer2.extractor.ExtractorInput; @@ -35,7 +37,7 @@ private static final long SEEK_TOLERANCE_US = 100_000; private static final int MINIMUM_SEARCH_RANGE_BYTES = 1000; - private static final int TIMESTAMP_SEARCH_BYTES = 20000; + private static final int TIMESTAMP_SEARCH_BYTES = 20_000; public PsBinarySearchSeeker( TimestampAdjuster scrTimestampAdjuster, long streamDurationUs, long inputLength) { @@ -70,12 +72,12 @@ private PsScrSeeker(TimestampAdjuster scrTimestampAdjuster) { @Override public TimestampSearchResult searchForTimestamp(ExtractorInput input, long targetTimestamp) - throws IOException, InterruptedException { + throws IOException { long inputPosition = input.getPosition(); - int bytesToSearch = (int) Math.min(TIMESTAMP_SEARCH_BYTES, input.getLength() - inputPosition); + int bytesToSearch = (int) min(TIMESTAMP_SEARCH_BYTES, input.getLength() - inputPosition); packetBuffer.reset(bytesToSearch); - input.peekFully(packetBuffer.data, /* offset= */ 0, bytesToSearch); + input.peekFully(packetBuffer.getData(), /* offset= */ 0, bytesToSearch); return searchForScrValueInBuffer(packetBuffer, targetTimestamp, inputPosition); } @@ -92,7 +94,7 @@ private TimestampSearchResult searchForScrValueInBuffer( long lastScrTimeUsInRange = C.TIME_UNSET; while (packetBuffer.bytesLeft() >= 4) { - int nextStartCode = peekIntAtPosition(packetBuffer.data, packetBuffer.getPosition()); + int nextStartCode = peekIntAtPosition(packetBuffer.getData(), packetBuffer.getPosition()); if (nextStartCode != PsExtractor.PACK_START_CODE) { packetBuffer.skipBytes(1); continue; @@ -162,7 +164,7 @@ private static void skipToEndOfCurrentPack(ParsableByteArray packetBuffer) { return; } - int nextStartCode = peekIntAtPosition(packetBuffer.data, packetBuffer.getPosition()); + int nextStartCode = peekIntAtPosition(packetBuffer.getData(), packetBuffer.getPosition()); if (nextStartCode == PsExtractor.SYSTEM_HEADER_START_CODE) { packetBuffer.skipBytes(4); int systemHeaderLength = packetBuffer.readUnsignedShort(); @@ -178,7 +180,7 @@ private static void skipToEndOfCurrentPack(ParsableByteArray packetBuffer) { // If we couldn't find these codes within the buffer, return the buffer limit, or return // the first position which PES packets pattern does not match (some malformed packets). while (packetBuffer.bytesLeft() >= 4) { - nextStartCode = peekIntAtPosition(packetBuffer.data, packetBuffer.getPosition()); + nextStartCode = peekIntAtPosition(packetBuffer.getData(), packetBuffer.getPosition()); if (nextStartCode == PsExtractor.PACK_START_CODE || nextStartCode == PsExtractor.MPEG_PROGRAM_END_CODE) { break; @@ -195,7 +197,7 @@ private static void skipToEndOfCurrentPack(ParsableByteArray packetBuffer) { } int pesPacketLength = packetBuffer.readUnsignedShort(); packetBuffer.setPosition( - Math.min(packetBuffer.limit(), packetBuffer.getPosition() + pesPacketLength)); + min(packetBuffer.limit(), packetBuffer.getPosition() + pesPacketLength)); } } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/PsDurationReader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/PsDurationReader.java index b0cdf7eb79..d55e633207 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/PsDurationReader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/PsDurationReader.java @@ -15,10 +15,13 @@ */ package com.google.android.exoplayer2.extractor.ts; +import static java.lang.Math.min; + import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.extractor.Extractor; import com.google.android.exoplayer2.extractor.ExtractorInput; import com.google.android.exoplayer2.extractor.PositionHolder; +import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.ParsableByteArray; import com.google.android.exoplayer2.util.TimestampAdjuster; import com.google.android.exoplayer2.util.Util; @@ -39,7 +42,9 @@ */ /* package */ final class PsDurationReader { - private static final int TIMESTAMP_SEARCH_BYTES = 20000; + private static final String TAG = "PsDurationReader"; + + private static final int TIMESTAMP_SEARCH_BYTES = 20_000; private final TimestampAdjuster scrTimestampAdjuster; private final ParsableByteArray packetBuffer; @@ -81,11 +86,9 @@ public TimestampAdjuster getScrTimestampAdjuster() { * to hold the position of the required seek. * @return One of the {@code RESULT_} values defined in {@link Extractor}. * @throws IOException If an error occurred reading from the input. - * @throws InterruptedException If the thread was interrupted. */ public @Extractor.ReadResult int readDuration( - ExtractorInput input, PositionHolder seekPositionHolder) - throws IOException, InterruptedException { + ExtractorInput input, PositionHolder seekPositionHolder) throws IOException { if (!isLastScrValueRead) { return readLastScrValue(input, seekPositionHolder); } @@ -102,6 +105,10 @@ public TimestampAdjuster getScrTimestampAdjuster() { long minScrPositionUs = scrTimestampAdjuster.adjustTsTimestamp(firstScrValue); long maxScrPositionUs = scrTimestampAdjuster.adjustTsTimestamp(lastScrValue); durationUs = maxScrPositionUs - minScrPositionUs; + if (durationUs < 0) { + Log.w(TAG, "Invalid duration: " + durationUs + ". Using TIME_UNSET instead."); + durationUs = C.TIME_UNSET; + } return finishReadDuration(input); } @@ -137,8 +144,8 @@ private int finishReadDuration(ExtractorInput input) { } private int readFirstScrValue(ExtractorInput input, PositionHolder seekPositionHolder) - throws IOException, InterruptedException { - int bytesToSearch = (int) Math.min(TIMESTAMP_SEARCH_BYTES, input.getLength()); + throws IOException { + int bytesToSearch = (int) min(TIMESTAMP_SEARCH_BYTES, input.getLength()); int searchStartPosition = 0; if (input.getPosition() != searchStartPosition) { seekPositionHolder.position = searchStartPosition; @@ -147,7 +154,7 @@ private int readFirstScrValue(ExtractorInput input, PositionHolder seekPositionH packetBuffer.reset(bytesToSearch); input.resetPeekPosition(); - input.peekFully(packetBuffer.data, /* offset= */ 0, bytesToSearch); + input.peekFully(packetBuffer.getData(), /* offset= */ 0, bytesToSearch); firstScrValue = readFirstScrValueFromBuffer(packetBuffer); isFirstScrValueRead = true; @@ -160,7 +167,7 @@ private long readFirstScrValueFromBuffer(ParsableByteArray packetBuffer) { for (int searchPosition = searchStartPosition; searchPosition < searchEndPosition - 3; searchPosition++) { - int nextStartCode = peekIntAtPosition(packetBuffer.data, searchPosition); + int nextStartCode = peekIntAtPosition(packetBuffer.getData(), searchPosition); if (nextStartCode == PsExtractor.PACK_START_CODE) { packetBuffer.setPosition(searchPosition + 4); long scrValue = readScrValueFromPack(packetBuffer); @@ -173,9 +180,9 @@ private long readFirstScrValueFromBuffer(ParsableByteArray packetBuffer) { } private int readLastScrValue(ExtractorInput input, PositionHolder seekPositionHolder) - throws IOException, InterruptedException { + throws IOException { long inputLength = input.getLength(); - int bytesToSearch = (int) Math.min(TIMESTAMP_SEARCH_BYTES, inputLength); + int bytesToSearch = (int) min(TIMESTAMP_SEARCH_BYTES, inputLength); long searchStartPosition = inputLength - bytesToSearch; if (input.getPosition() != searchStartPosition) { seekPositionHolder.position = searchStartPosition; @@ -184,7 +191,7 @@ private int readLastScrValue(ExtractorInput input, PositionHolder seekPositionHo packetBuffer.reset(bytesToSearch); input.resetPeekPosition(); - input.peekFully(packetBuffer.data, /* offset= */ 0, bytesToSearch); + input.peekFully(packetBuffer.getData(), /* offset= */ 0, bytesToSearch); lastScrValue = readLastScrValueFromBuffer(packetBuffer); isLastScrValueRead = true; @@ -197,7 +204,7 @@ private long readLastScrValueFromBuffer(ParsableByteArray packetBuffer) { for (int searchPosition = searchEndPosition - 4; searchPosition >= searchStartPosition; searchPosition--) { - int nextStartCode = peekIntAtPosition(packetBuffer.data, searchPosition); + int nextStartCode = peekIntAtPosition(packetBuffer.getData(), searchPosition); if (nextStartCode == PsExtractor.PACK_START_CODE) { packetBuffer.setPosition(searchPosition + 4); long scrValue = readScrValueFromPack(packetBuffer); diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/PsExtractor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/PsExtractor.java index fec108fd5f..4611a25825 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/PsExtractor.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/PsExtractor.java @@ -16,6 +16,7 @@ package com.google.android.exoplayer2.extractor.ts; import android.util.SparseArray; +import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.ParserException; import com.google.android.exoplayer2.extractor.Extractor; @@ -25,14 +26,15 @@ import com.google.android.exoplayer2.extractor.PositionHolder; import com.google.android.exoplayer2.extractor.SeekMap; import com.google.android.exoplayer2.extractor.ts.TsPayloadReader.TrackIdGenerator; +import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.ParsableBitArray; import com.google.android.exoplayer2.util.ParsableByteArray; import com.google.android.exoplayer2.util.TimestampAdjuster; import java.io.IOException; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; +import org.checkerframework.checker.nullness.qual.RequiresNonNull; -/** - * Extracts data from the MPEG-2 PS container format. - */ +/** Extracts data from the MPEG-2 PS container format. */ public final class PsExtractor implements Extractor { /** Factory for {@link PsExtractor} instances. */ @@ -67,8 +69,8 @@ public final class PsExtractor implements Extractor { private long lastTrackPosition; // Accessed only by the loading thread. - private PsBinarySearchSeeker psBinarySearchSeeker; - private ExtractorOutput output; + @Nullable private PsBinarySearchSeeker psBinarySearchSeeker; + private @MonotonicNonNull ExtractorOutput output; private boolean hasOutputSeekMap; public PsExtractor() { @@ -85,13 +87,16 @@ public PsExtractor(TimestampAdjuster timestampAdjuster) { // Extractor implementation. @Override - public boolean sniff(ExtractorInput input) throws IOException, InterruptedException { + public boolean sniff(ExtractorInput input) throws IOException { byte[] scratch = new byte[14]; input.peekFully(scratch, 0, 14); // Verify the PACK_START_CODE for the first 4 bytes - if (PACK_START_CODE != (((scratch[0] & 0xFF) << 24) | ((scratch[1] & 0xFF) << 16) - | ((scratch[2] & 0xFF) << 8) | (scratch[3] & 0xFF))) { + if (PACK_START_CODE + != (((scratch[0] & 0xFF) << 24) + | ((scratch[1] & 0xFF) << 16) + | ((scratch[2] & 0xFF) << 8) + | (scratch[3] & 0xFF))) { return false; } // Verify the 01xxx1xx marker on the 5th byte @@ -119,8 +124,8 @@ public boolean sniff(ExtractorInput input) throws IOException, InterruptedExcept input.advancePeekPosition(packStuffingLength); // Now check that the next 3 bytes are the beginning of an MPEG start code input.peekFully(scratch, 0, 3); - return (PACKET_START_CODE_PREFIX == (((scratch[0] & 0xFF) << 16) | ((scratch[1] & 0xFF) << 8) - | (scratch[2] & 0xFF))); + return (PACKET_START_CODE_PREFIX + == (((scratch[0] & 0xFF) << 16) | ((scratch[1] & 0xFF) << 8) | (scratch[2] & 0xFF))); } @Override @@ -130,18 +135,24 @@ public void init(ExtractorOutput output) { @Override public void seek(long position, long timeUs) { - boolean hasNotEncounteredFirstTimestamp = - timestampAdjuster.getTimestampOffsetUs() == C.TIME_UNSET; - if (hasNotEncounteredFirstTimestamp - || (timestampAdjuster.getFirstSampleTimestampUs() != 0 - && timestampAdjuster.getFirstSampleTimestampUs() != timeUs)) { - // - If the timestamp adjuster in the PS stream has not encountered any sample, it's going to - // treat the first timestamp encountered as sample time 0, which is incorrect. In this case, - // we have to set the first sample timestamp manually. - // - If the timestamp adjuster has its timestamp set manually before, and now we seek to a - // different position, we need to set the first sample timestamp manually again. - timestampAdjuster.reset(); - timestampAdjuster.setFirstSampleTimestampUs(timeUs); + // If the timestamp adjuster has not yet established a timestamp offset, we need to reset its + // expected first sample timestamp to be the new seek position. Without this, the timestamp + // adjuster would incorrectly establish its timestamp offset assuming that the first sample + // after this seek corresponds to the start of the stream (or a previous seek position, if there + // was one). + boolean resetTimestampAdjuster = timestampAdjuster.getTimestampOffsetUs() == C.TIME_UNSET; + if (!resetTimestampAdjuster) { + long adjusterFirstSampleTimestampUs = timestampAdjuster.getFirstSampleTimestampUs(); + // Also reset the timestamp adjuster if its offset was calculated based on a non-zero position + // in the stream (other than the position being seeked to), since in this case the offset may + // not be accurate. + resetTimestampAdjuster = + adjusterFirstSampleTimestampUs != C.TIME_UNSET + && adjusterFirstSampleTimestampUs != 0 + && adjusterFirstSampleTimestampUs != timeUs; + } + if (resetTimestampAdjuster) { + timestampAdjuster.reset(timeUs); } if (psBinarySearchSeeker != null) { @@ -158,8 +169,8 @@ public void release() { } @Override - public int read(ExtractorInput input, PositionHolder seekPosition) - throws IOException, InterruptedException { + public int read(ExtractorInput input, PositionHolder seekPosition) throws IOException { + Assertions.checkStateNotNull(output); // Asserts init has been called. long inputLength = input.getLength(); boolean canReadDuration = inputLength != C.LENGTH_UNSET; @@ -178,7 +189,7 @@ public int read(ExtractorInput input, PositionHolder seekPosition) return RESULT_END_OF_INPUT; } // First peek and check what type of start code is next. - if (!input.peekFully(psPacketBuffer.data, 0, 4, true)) { + if (!input.peekFully(psPacketBuffer.getData(), 0, 4, true)) { return RESULT_END_OF_INPUT; } @@ -188,7 +199,7 @@ public int read(ExtractorInput input, PositionHolder seekPosition) return RESULT_END_OF_INPUT; } else if (nextStartCode == PACK_START_CODE) { // Now peek the rest of the pack_header. - input.peekFully(psPacketBuffer.data, 0, 10); + input.peekFully(psPacketBuffer.getData(), 0, 10); // We only care about the pack_stuffing_length in here, skip the first 77 bits. psPacketBuffer.setPosition(9); @@ -201,7 +212,7 @@ public int read(ExtractorInput input, PositionHolder seekPosition) return RESULT_CONTINUE; } else if (nextStartCode == SYSTEM_HEADER_START_CODE) { // We just skip all this, but we need to get the length first. - input.peekFully(psPacketBuffer.data, 0, 2); + input.peekFully(psPacketBuffer.getData(), 0, 2); // Length is the next 2 bytes. psPacketBuffer.setPosition(0); @@ -209,7 +220,7 @@ public int read(ExtractorInput input, PositionHolder seekPosition) input.skipFully(systemHeaderLength + 6); return RESULT_CONTINUE; } else if (((nextStartCode & 0xFFFFFF00) >> 8) != PACKET_START_CODE_PREFIX) { - input.skipFully(1); // Skip bytes until we see a valid start code again. + input.skipFully(1); // Skip bytes until we see a valid start code again. return RESULT_CONTINUE; } @@ -221,7 +232,7 @@ public int read(ExtractorInput input, PositionHolder seekPosition) PesReader payloadReader = psPayloadReaders.get(streamId); if (!foundAllTracks) { if (payloadReader == null) { - ElementaryStreamReader elementaryStreamReader = null; + @Nullable ElementaryStreamReader elementaryStreamReader = null; if (streamId == PRIVATE_STREAM_1) { // Private stream, used for AC3 audio. // NOTE: This may need further parsing to determine if its DTS, but that's likely only @@ -256,7 +267,7 @@ public int read(ExtractorInput input, PositionHolder seekPosition) } // The next 2 bytes are the length. Once we have that we can consume the complete packet. - input.peekFully(psPacketBuffer.data, 0, 2); + input.peekFully(psPacketBuffer.getData(), 0, 2); psPacketBuffer.setPosition(0); int payloadLength = psPacketBuffer.readUnsignedShort(); int pesLength = payloadLength + 6; @@ -267,7 +278,7 @@ public int read(ExtractorInput input, PositionHolder seekPosition) } else { psPacketBuffer.reset(pesLength); // Read the whole packet and the header for consumption. - input.readFully(psPacketBuffer.data, 0, pesLength); + input.readFully(psPacketBuffer.getData(), 0, pesLength); psPacketBuffer.setPosition(6); payloadReader.consume(psPacketBuffer); psPacketBuffer.setLimit(psPacketBuffer.capacity()); @@ -278,6 +289,7 @@ public int read(ExtractorInput input, PositionHolder seekPosition) // Internals. + @RequiresNonNull("output") private void maybeOutputSeekMap(long inputLength) { if (!hasOutputSeekMap) { hasOutputSeekMap = true; @@ -294,9 +306,7 @@ private void maybeOutputSeekMap(long inputLength) { } } - /** - * Parses PES packet data and extracts samples. - */ + /** Parses PES packet data and extracts samples. */ private static final class PesReader { private static final int PES_SCRATCH_SIZE = 64; @@ -319,10 +329,10 @@ public PesReader(ElementaryStreamReader pesPayloadReader, TimestampAdjuster time /** * Notifies the reader that a seek has occurred. - *

      - * Following a call to this method, the data passed to the next invocation of - * {@link #consume(ParsableByteArray)} will not be a continuation of the data that was - * previously passed. Hence the reader should reset any internal state. + * + *

      Following a call to this method, the data passed to the next invocation of {@link + * #consume(ParsableByteArray)} will not be a continuation of the data that was previously + * passed. Hence the reader should reset any internal state. */ public void seek() { seenFirstDts = false; @@ -391,7 +401,5 @@ private void parseHeaderExtension() { timeUs = timestampAdjuster.adjustTsTimestamp(pts); } } - } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/SectionPayloadReader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/SectionPayloadReader.java index d6e6eadf3f..40e15fa08d 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/SectionPayloadReader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/SectionPayloadReader.java @@ -21,9 +21,7 @@ import com.google.android.exoplayer2.util.ParsableByteArray; import com.google.android.exoplayer2.util.TimestampAdjuster; -/** - * Reads section data. - */ +/** Reads section data. */ public interface SectionPayloadReader { /** @@ -34,7 +32,9 @@ public interface SectionPayloadReader { * @param idGenerator A {@link PesReader.TrackIdGenerator} that generates unique track ids for the * {@link TrackOutput}s. */ - void init(TimestampAdjuster timestampAdjuster, ExtractorOutput extractorOutput, + void init( + TimestampAdjuster timestampAdjuster, + ExtractorOutput extractorOutput, TrackIdGenerator idGenerator); /** @@ -45,5 +45,4 @@ void init(TimestampAdjuster timestampAdjuster, ExtractorOutput extractorOutput, * Otherwise, all bytes belonging to the table section are included. */ void consume(ParsableByteArray sectionData); - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/SectionReader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/SectionReader.java index bc590c9d4c..4a73799c07 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/SectionReader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/SectionReader.java @@ -15,6 +15,9 @@ */ package com.google.android.exoplayer2.extractor.ts; +import static java.lang.Math.max; +import static java.lang.Math.min; + import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.extractor.ExtractorOutput; import com.google.android.exoplayer2.util.ParsableByteArray; @@ -45,7 +48,9 @@ public SectionReader(SectionPayloadReader reader) { } @Override - public void init(TimestampAdjuster timestampAdjuster, ExtractorOutput extractorOutput, + public void init( + TimestampAdjuster timestampAdjuster, + ExtractorOutput extractorOutput, TrackIdGenerator idGenerator) { reader.init(timestampAdjuster, extractorOutput, idGenerator); waitingForPayloadStart = true; @@ -87,11 +92,14 @@ public void consume(ParsableByteArray data, @Flags int flags) { return; } } - int headerBytesToRead = Math.min(data.bytesLeft(), SECTION_HEADER_LENGTH - bytesRead); - data.readBytes(sectionData.data, bytesRead, headerBytesToRead); + int headerBytesToRead = min(data.bytesLeft(), SECTION_HEADER_LENGTH - bytesRead); + // sectionData is guaranteed to have enough space because it's initialized with a 32-element + // backing array and headerBytesToRead is at most 3. + data.readBytes(sectionData.getData(), bytesRead, headerBytesToRead); bytesRead += headerBytesToRead; if (bytesRead == SECTION_HEADER_LENGTH) { - sectionData.reset(SECTION_HEADER_LENGTH); + sectionData.setPosition(0); + sectionData.setLimit(SECTION_HEADER_LENGTH); sectionData.skipBytes(1); // Skip table id (8). int secondHeaderByte = sectionData.readUnsignedByte(); int thirdHeaderByte = sectionData.readUnsignedByte(); @@ -100,35 +108,35 @@ public void consume(ParsableByteArray data, @Flags int flags) { (((secondHeaderByte & 0x0F) << 8) | thirdHeaderByte) + SECTION_HEADER_LENGTH; if (sectionData.capacity() < totalSectionLength) { // Ensure there is enough space to keep the whole section. - byte[] bytes = sectionData.data; - sectionData.reset( - Math.min(MAX_SECTION_LENGTH, Math.max(totalSectionLength, bytes.length * 2))); - System.arraycopy(bytes, 0, sectionData.data, 0, SECTION_HEADER_LENGTH); + int limit = + min(MAX_SECTION_LENGTH, max(totalSectionLength, sectionData.capacity() * 2)); + sectionData.ensureCapacity(limit); } } } else { // Reading the body. - int bodyBytesToRead = Math.min(data.bytesLeft(), totalSectionLength - bytesRead); - data.readBytes(sectionData.data, bytesRead, bodyBytesToRead); + int bodyBytesToRead = min(data.bytesLeft(), totalSectionLength - bytesRead); + // sectionData has been sized large enough for totalSectionLength when reading the header. + data.readBytes(sectionData.getData(), bytesRead, bodyBytesToRead); bytesRead += bodyBytesToRead; if (bytesRead == totalSectionLength) { if (sectionSyntaxIndicator) { // This section has common syntax as defined in ISO/IEC 13818-1, section 2.4.4.11. - if (Util.crc32(sectionData.data, 0, totalSectionLength, 0xFFFFFFFF) != 0) { + if (Util.crc32(sectionData.getData(), 0, totalSectionLength, 0xFFFFFFFF) != 0) { // The CRC is invalid so discard the section. waitingForPayloadStart = true; return; } - sectionData.reset(totalSectionLength - 4); // Exclude the CRC_32 field. + sectionData.setLimit(totalSectionLength - 4); // Exclude the CRC_32 field. } else { // This is a private section with private defined syntax. - sectionData.reset(totalSectionLength); + sectionData.setLimit(totalSectionLength); } + sectionData.setPosition(0); reader.consume(sectionData); bytesRead = 0; } } } } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/SeiReader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/SeiReader.java index d032ef5883..b8435d7d7f 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/SeiReader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/SeiReader.java @@ -15,18 +15,19 @@ */ package com.google.android.exoplayer2.extractor.ts; +import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.extractor.CeaUtil; import com.google.android.exoplayer2.extractor.ExtractorOutput; import com.google.android.exoplayer2.extractor.TrackOutput; import com.google.android.exoplayer2.extractor.ts.TsPayloadReader.TrackIdGenerator; -import com.google.android.exoplayer2.text.cea.CeaUtil; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.ParsableByteArray; import java.util.List; -/** Consumes SEI buffers, outputting contained CEA-608 messages to a {@link TrackOutput}. */ +/** Consumes SEI buffers, outputting contained CEA-608/708 messages to a {@link TrackOutput}. */ public final class SeiReader { private final List closedCaptionFormats; @@ -45,23 +46,21 @@ public void createTracks(ExtractorOutput extractorOutput, TrackIdGenerator idGen idGenerator.generateNewId(); TrackOutput output = extractorOutput.track(idGenerator.getTrackId(), C.TRACK_TYPE_TEXT); Format channelFormat = closedCaptionFormats.get(i); - String channelMimeType = channelFormat.sampleMimeType; - Assertions.checkArgument(MimeTypes.APPLICATION_CEA608.equals(channelMimeType) - || MimeTypes.APPLICATION_CEA708.equals(channelMimeType), + @Nullable String channelMimeType = channelFormat.sampleMimeType; + Assertions.checkArgument( + MimeTypes.APPLICATION_CEA608.equals(channelMimeType) + || MimeTypes.APPLICATION_CEA708.equals(channelMimeType), "Invalid closed caption mime type provided: " + channelMimeType); String formatId = channelFormat.id != null ? channelFormat.id : idGenerator.getFormatId(); output.format( - Format.createTextSampleFormat( - formatId, - channelMimeType, - /* codecs= */ null, - /* bitrate= */ Format.NO_VALUE, - channelFormat.selectionFlags, - channelFormat.language, - channelFormat.accessibilityChannel, - /* drmInitData= */ null, - Format.OFFSET_SAMPLE_RELATIVE, - channelFormat.initializationData)); + new Format.Builder() + .setId(formatId) + .setSampleMimeType(channelMimeType) + .setSelectionFlags(channelFormat.selectionFlags) + .setLanguage(channelFormat.language) + .setAccessibilityChannel(channelFormat.accessibilityChannel) + .setInitializationData(channelFormat.initializationData) + .build()); outputs[i] = output; } } @@ -69,5 +68,4 @@ public void createTracks(ExtractorOutput extractorOutput, TrackIdGenerator idGen public void consume(long pesTimeUs, ParsableByteArray seiBuffer) { CeaUtil.consume(pesTimeUs, seiBuffer, outputs); } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/SpliceInfoSectionReader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/SpliceInfoSectionReader.java deleted file mode 100644 index 27838d4c25..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/SpliceInfoSectionReader.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright (C) 2016 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.extractor.ts; - -import com.google.android.exoplayer2.C; -import com.google.android.exoplayer2.Format; -import com.google.android.exoplayer2.extractor.ExtractorOutput; -import com.google.android.exoplayer2.extractor.TrackOutput; -import com.google.android.exoplayer2.util.MimeTypes; -import com.google.android.exoplayer2.util.ParsableByteArray; -import com.google.android.exoplayer2.util.TimestampAdjuster; - -/** - * Parses splice info sections as defined by SCTE35. - */ -public final class SpliceInfoSectionReader implements SectionPayloadReader { - - private TimestampAdjuster timestampAdjuster; - private TrackOutput output; - private boolean formatDeclared; - - @Override - public void init(TimestampAdjuster timestampAdjuster, ExtractorOutput extractorOutput, - TsPayloadReader.TrackIdGenerator idGenerator) { - this.timestampAdjuster = timestampAdjuster; - idGenerator.generateNewId(); - output = extractorOutput.track(idGenerator.getTrackId(), C.TRACK_TYPE_METADATA); - output.format(Format.createSampleFormat(idGenerator.getFormatId(), MimeTypes.APPLICATION_SCTE35, - null, Format.NO_VALUE, null)); - } - - @Override - public void consume(ParsableByteArray sectionData) { - if (!formatDeclared) { - if (timestampAdjuster.getTimestampOffsetUs() == C.TIME_UNSET) { - // There is not enough information to initialize the timestamp adjuster. - return; - } - output.format(Format.createSampleFormat(null, MimeTypes.APPLICATION_SCTE35, - timestampAdjuster.getTimestampOffsetUs())); - formatDeclared = true; - } - int sampleSize = sectionData.bytesLeft(); - output.sampleData(sectionData, sampleSize); - output.sampleMetadata(timestampAdjuster.getLastAdjustedTimestampUs(), C.BUFFER_FLAG_KEY_FRAME, - sampleSize, 0, null); - } - -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/TsBinarySearchSeeker.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/TsBinarySearchSeeker.java index a627c00ba2..fa9792079c 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/TsBinarySearchSeeker.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/TsBinarySearchSeeker.java @@ -15,6 +15,8 @@ */ package com.google.android.exoplayer2.extractor.ts; +import static java.lang.Math.min; + import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.extractor.BinarySearchSeeker; import com.google.android.exoplayer2.extractor.ExtractorInput; @@ -35,13 +37,16 @@ private static final long SEEK_TOLERANCE_US = 100_000; private static final int MINIMUM_SEARCH_RANGE_BYTES = 5 * TsExtractor.TS_PACKET_SIZE; - private static final int TIMESTAMP_SEARCH_BYTES = 600 * TsExtractor.TS_PACKET_SIZE; public TsBinarySearchSeeker( - TimestampAdjuster pcrTimestampAdjuster, long streamDurationUs, long inputLength, int pcrPid) { + TimestampAdjuster pcrTimestampAdjuster, + long streamDurationUs, + long inputLength, + int pcrPid, + int timestampSearchBytes) { super( new DefaultSeekTimestampConverter(), - new TsPcrSeeker(pcrPid, pcrTimestampAdjuster), + new TsPcrSeeker(pcrPid, pcrTimestampAdjuster, timestampSearchBytes), streamDurationUs, /* floorTimePosition= */ 0, /* ceilingTimePosition= */ streamDurationUs + 1, @@ -56,7 +61,7 @@ public TsBinarySearchSeeker( * position in a TS stream. * *

      Given a PCR timestamp, and a position within a TS stream, this seeker will peek up to {@link - * #TIMESTAMP_SEARCH_BYTES} from that stream position, look for all packets with PID equal to + * #timestampSearchBytes} from that stream position, look for all packets with PID equal to * PCR_PID, and then compare the PCR timestamps (if available) of these packets to the target * timestamp. */ @@ -65,21 +70,24 @@ private static final class TsPcrSeeker implements TimestampSeeker { private final TimestampAdjuster pcrTimestampAdjuster; private final ParsableByteArray packetBuffer; private final int pcrPid; + private final int timestampSearchBytes; - public TsPcrSeeker(int pcrPid, TimestampAdjuster pcrTimestampAdjuster) { + public TsPcrSeeker( + int pcrPid, TimestampAdjuster pcrTimestampAdjuster, int timestampSearchBytes) { this.pcrPid = pcrPid; this.pcrTimestampAdjuster = pcrTimestampAdjuster; + this.timestampSearchBytes = timestampSearchBytes; packetBuffer = new ParsableByteArray(); } @Override public TimestampSearchResult searchForTimestamp(ExtractorInput input, long targetTimestamp) - throws IOException, InterruptedException { + throws IOException { long inputPosition = input.getPosition(); - int bytesToSearch = (int) Math.min(TIMESTAMP_SEARCH_BYTES, input.getLength() - inputPosition); + int bytesToSearch = (int) min(timestampSearchBytes, input.getLength() - inputPosition); packetBuffer.reset(bytesToSearch); - input.peekFully(packetBuffer.data, /* offset= */ 0, bytesToSearch); + input.peekFully(packetBuffer.getData(), /* offset= */ 0, bytesToSearch); return searchForPcrValueInBuffer(packetBuffer, targetTimestamp, inputPosition); } @@ -94,7 +102,7 @@ private TimestampSearchResult searchForPcrValueInBuffer( while (packetBuffer.bytesLeft() >= TsExtractor.TS_PACKET_SIZE) { int startOfPacket = - TsUtil.findSyncBytePosition(packetBuffer.data, packetBuffer.getPosition(), limit); + TsUtil.findSyncBytePosition(packetBuffer.getData(), packetBuffer.getPosition(), limit); int endOfPacket = startOfPacket + TsExtractor.TS_PACKET_SIZE; if (endOfPacket > limit) { break; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/TsDurationReader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/TsDurationReader.java index 804a643414..5934be9653 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/TsDurationReader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/TsDurationReader.java @@ -15,10 +15,13 @@ */ package com.google.android.exoplayer2.extractor.ts; +import static java.lang.Math.min; + import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.extractor.Extractor; import com.google.android.exoplayer2.extractor.ExtractorInput; import com.google.android.exoplayer2.extractor.PositionHolder; +import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.ParsableByteArray; import com.google.android.exoplayer2.util.TimestampAdjuster; import com.google.android.exoplayer2.util.Util; @@ -36,8 +39,9 @@ */ /* package */ final class TsDurationReader { - private static final int TIMESTAMP_SEARCH_BYTES = 600 * TsExtractor.TS_PACKET_SIZE; + private static final String TAG = "TsDurationReader"; + private final int timestampSearchBytes; private final TimestampAdjuster pcrTimestampAdjuster; private final ParsableByteArray packetBuffer; @@ -49,7 +53,8 @@ private long lastPcrValue; private long durationUs; - /* package */ TsDurationReader() { + /* package */ TsDurationReader(int timestampSearchBytes) { + this.timestampSearchBytes = timestampSearchBytes; pcrTimestampAdjuster = new TimestampAdjuster(/* firstSampleTimestampUs= */ 0); firstPcrValue = C.TIME_UNSET; lastPcrValue = C.TIME_UNSET; @@ -74,11 +79,9 @@ public boolean isDurationReadFinished() { * @param pcrPid The PID of the packet stream within this TS stream that contains PCR values. * @return One of the {@code RESULT_} values defined in {@link Extractor}. * @throws IOException If an error occurred reading from the input. - * @throws InterruptedException If the thread was interrupted. */ public @Extractor.ReadResult int readDuration( - ExtractorInput input, PositionHolder seekPositionHolder, int pcrPid) - throws IOException, InterruptedException { + ExtractorInput input, PositionHolder seekPositionHolder, int pcrPid) throws IOException { if (pcrPid <= 0) { return finishReadDuration(input); } @@ -98,6 +101,10 @@ public boolean isDurationReadFinished() { long minPcrPositionUs = pcrTimestampAdjuster.adjustTsTimestamp(firstPcrValue); long maxPcrPositionUs = pcrTimestampAdjuster.adjustTsTimestamp(lastPcrValue); durationUs = maxPcrPositionUs - minPcrPositionUs; + if (durationUs < 0) { + Log.w(TAG, "Invalid duration: " + durationUs + ". Using TIME_UNSET instead."); + durationUs = C.TIME_UNSET; + } return finishReadDuration(input); } @@ -124,8 +131,8 @@ private int finishReadDuration(ExtractorInput input) { } private int readFirstPcrValue(ExtractorInput input, PositionHolder seekPositionHolder, int pcrPid) - throws IOException, InterruptedException { - int bytesToSearch = (int) Math.min(TIMESTAMP_SEARCH_BYTES, input.getLength()); + throws IOException { + int bytesToSearch = (int) min(timestampSearchBytes, input.getLength()); int searchStartPosition = 0; if (input.getPosition() != searchStartPosition) { seekPositionHolder.position = searchStartPosition; @@ -134,7 +141,7 @@ private int readFirstPcrValue(ExtractorInput input, PositionHolder seekPositionH packetBuffer.reset(bytesToSearch); input.resetPeekPosition(); - input.peekFully(packetBuffer.data, /* offset= */ 0, bytesToSearch); + input.peekFully(packetBuffer.getData(), /* offset= */ 0, bytesToSearch); firstPcrValue = readFirstPcrValueFromBuffer(packetBuffer, pcrPid); isFirstPcrValueRead = true; @@ -147,7 +154,7 @@ private long readFirstPcrValueFromBuffer(ParsableByteArray packetBuffer, int pcr for (int searchPosition = searchStartPosition; searchPosition < searchEndPosition; searchPosition++) { - if (packetBuffer.data[searchPosition] != TsExtractor.TS_SYNC_BYTE) { + if (packetBuffer.getData()[searchPosition] != TsExtractor.TS_SYNC_BYTE) { continue; } long pcrValue = TsUtil.readPcrFromPacket(packetBuffer, searchPosition, pcrPid); @@ -159,9 +166,9 @@ private long readFirstPcrValueFromBuffer(ParsableByteArray packetBuffer, int pcr } private int readLastPcrValue(ExtractorInput input, PositionHolder seekPositionHolder, int pcrPid) - throws IOException, InterruptedException { + throws IOException { long inputLength = input.getLength(); - int bytesToSearch = (int) Math.min(TIMESTAMP_SEARCH_BYTES, inputLength); + int bytesToSearch = (int) min(timestampSearchBytes, inputLength); long searchStartPosition = inputLength - bytesToSearch; if (input.getPosition() != searchStartPosition) { seekPositionHolder.position = searchStartPosition; @@ -170,7 +177,7 @@ private int readLastPcrValue(ExtractorInput input, PositionHolder seekPositionHo packetBuffer.reset(bytesToSearch); input.resetPeekPosition(); - input.peekFully(packetBuffer.data, /* offset= */ 0, bytesToSearch); + input.peekFully(packetBuffer.getData(), /* offset= */ 0, bytesToSearch); lastPcrValue = readLastPcrValueFromBuffer(packetBuffer, pcrPid); isLastPcrValueRead = true; @@ -180,10 +187,13 @@ private int readLastPcrValue(ExtractorInput input, PositionHolder seekPositionHo private long readLastPcrValueFromBuffer(ParsableByteArray packetBuffer, int pcrPid) { int searchStartPosition = packetBuffer.getPosition(); int searchEndPosition = packetBuffer.limit(); - for (int searchPosition = searchEndPosition - 1; + // We start searching 'TsExtractor.TS_PACKET_SIZE' bytes from the end to prevent trying to read + // from an incomplete TS packet. + for (int searchPosition = searchEndPosition - TsExtractor.TS_PACKET_SIZE; searchPosition >= searchStartPosition; searchPosition--) { - if (packetBuffer.data[searchPosition] != TsExtractor.TS_SYNC_BYTE) { + if (!TsUtil.isStartOfTsPacket( + packetBuffer.getData(), searchStartPosition, searchEndPosition, searchPosition)) { continue; } long pcrValue = TsUtil.readPcrFromPacket(packetBuffer, searchPosition, pcrPid); @@ -193,5 +203,4 @@ private long readLastPcrValueFromBuffer(ParsableByteArray packetBuffer, int pcrP } return C.TIME_UNSET; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/TsExtractor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/TsExtractor.java index 2bd5b12551..970e8e121a 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/TsExtractor.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/TsExtractor.java @@ -16,11 +16,13 @@ package com.google.android.exoplayer2.extractor.ts; import static com.google.android.exoplayer2.extractor.ts.TsPayloadReader.FLAG_PAYLOAD_UNIT_START_INDICATOR; +import static java.lang.annotation.ElementType.TYPE_USE; import android.util.SparseArray; import android.util.SparseBooleanArray; import android.util.SparseIntArray; import androidx.annotation.IntDef; +import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.ParserException; import com.google.android.exoplayer2.extractor.Extractor; @@ -43,14 +45,15 @@ import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; +import org.checkerframework.checker.nullness.compatqual.NullableType; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; -/** - * Extracts data from the MPEG-2 TS container format. - */ +/** Extracts data from the MPEG-2 TS container format. */ public final class TsExtractor implements Extractor { /** Factory for {@link TsExtractor} instances. */ @@ -62,16 +65,13 @@ public final class TsExtractor implements Extractor { */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({MODE_MULTI_PMT, MODE_SINGLE_PMT, MODE_HLS}) public @interface Mode {} - /** - * Behave as defined in ISO/IEC 13818-1. - */ + /** Behave as defined in ISO/IEC 13818-1. */ public static final int MODE_MULTI_PMT = 0; - /** - * Assume only one PMT will be contained in the stream, even if more are declared by the PAT. - */ + /** Assume only one PMT will be contained in the stream, even if more are declared by the PAT. */ public static final int MODE_SINGLE_PMT = 1; /** * Enable single PMT mode, map {@link TrackOutput}s by their type (instead of PID) and ignore @@ -79,6 +79,9 @@ public final class TsExtractor implements Extractor { */ public static final int MODE_HLS = 2; + public static final int TS_PACKET_SIZE = 188; + public static final int DEFAULT_TIMESTAMP_SEARCH_BYTES = 600 * TS_PACKET_SIZE; + public static final int TS_STREAM_TYPE_MPA = 0x03; public static final int TS_STREAM_TYPE_MPA_LSF = 0x04; public static final int TS_STREAM_TYPE_AAC_ADTS = 0x0F; @@ -89,13 +92,17 @@ public final class TsExtractor implements Extractor { public static final int TS_STREAM_TYPE_E_AC3 = 0x87; public static final int TS_STREAM_TYPE_AC4 = 0xAC; // DVB/ATSC AC-4 Descriptor public static final int TS_STREAM_TYPE_H262 = 0x02; + public static final int TS_STREAM_TYPE_H263 = 0x10; // MPEG-4 Part 2 and H.263 public static final int TS_STREAM_TYPE_H264 = 0x1B; public static final int TS_STREAM_TYPE_H265 = 0x24; public static final int TS_STREAM_TYPE_ID3 = 0x15; public static final int TS_STREAM_TYPE_SPLICE_INFO = 0x86; public static final int TS_STREAM_TYPE_DVBSUBS = 0x59; - public static final int TS_PACKET_SIZE = 188; + // Stream types that aren't defined by the MPEG-2 TS specification. + public static final int TS_STREAM_TYPE_DC2_H262 = 0x80; + public static final int TS_STREAM_TYPE_AIT = 0x101; + public static final int TS_SYNC_BYTE = 0x47; // First byte of each TS packet. private static final int TS_PAT_PID = 0; @@ -110,6 +117,7 @@ public final class TsExtractor implements Extractor { private static final int SNIFF_TS_PACKET_COUNT = 5; private final @Mode int mode; + private final int timestampSearchBytes; private final List timestampAdjusters; private final ParsableByteArray tsPacketBuffer; private final SparseIntArray continuityCounters; @@ -120,18 +128,18 @@ public final class TsExtractor implements Extractor { private final TsDurationReader durationReader; // Accessed only by the loading thread. - private TsBinarySearchSeeker tsBinarySearchSeeker; + private @MonotonicNonNull TsBinarySearchSeeker tsBinarySearchSeeker; private ExtractorOutput output; private int remainingPmts; private boolean tracksEnded; private boolean hasOutputSeekMap; private boolean pendingSeekToStart; - private TsPayloadReader id3Reader; + @Nullable private TsPayloadReader id3Reader; private int bytesSinceLastSync; private int pcrPid; public TsExtractor() { - this(0); + this(/* defaultTsPayloadReaderFlags= */ 0); } /** @@ -139,7 +147,7 @@ public TsExtractor() { * {@code FLAG_*} values that control the behavior of the payload readers. */ public TsExtractor(@Flags int defaultTsPayloadReaderFlags) { - this(MODE_SINGLE_PMT, defaultTsPayloadReaderFlags); + this(MODE_SINGLE_PMT, defaultTsPayloadReaderFlags, DEFAULT_TIMESTAMP_SEARCH_BYTES); } /** @@ -147,12 +155,22 @@ public TsExtractor(@Flags int defaultTsPayloadReaderFlags) { * and {@link #MODE_HLS}. * @param defaultTsPayloadReaderFlags A combination of {@link DefaultTsPayloadReaderFactory} * {@code FLAG_*} values that control the behavior of the payload readers. + * @param timestampSearchBytes The number of bytes searched from a given position in the stream to + * find a PCR timestamp. If this value is too small, the duration might be unknown and seeking + * might not be supported for high bitrate progressive streams. Setting a large value for this + * field might be inefficient though because the extractor stores a buffer of {@code + * timestampSearchBytes} bytes when determining the duration or when performing a seek + * operation. The default value is {@link #DEFAULT_TIMESTAMP_SEARCH_BYTES}. If the number of + * bytes left in the stream from the current position is less than {@code + * timestampSearchBytes}, the search is performed on the bytes left. */ - public TsExtractor(@Mode int mode, @Flags int defaultTsPayloadReaderFlags) { + public TsExtractor( + @Mode int mode, @Flags int defaultTsPayloadReaderFlags, int timestampSearchBytes) { this( mode, new TimestampAdjuster(0), - new DefaultTsPayloadReaderFactory(defaultTsPayloadReaderFlags)); + new DefaultTsPayloadReaderFactory(defaultTsPayloadReaderFlags), + timestampSearchBytes); } /** @@ -165,7 +183,30 @@ public TsExtractor( @Mode int mode, TimestampAdjuster timestampAdjuster, TsPayloadReader.Factory payloadReaderFactory) { + this(mode, timestampAdjuster, payloadReaderFactory, DEFAULT_TIMESTAMP_SEARCH_BYTES); + } + + /** + * @param mode Mode for the extractor. One of {@link #MODE_MULTI_PMT}, {@link #MODE_SINGLE_PMT} + * and {@link #MODE_HLS}. + * @param timestampAdjuster A timestamp adjuster for offsetting and scaling sample timestamps. + * @param payloadReaderFactory Factory for injecting a custom set of payload readers. + * @param timestampSearchBytes The number of bytes searched from a given position in the stream to + * find a PCR timestamp. If this value is too small, the duration might be unknown and seeking + * might not be supported for high bitrate progressive streams. Setting a large value for this + * field might be inefficient though because the extractor stores a buffer of {@code + * timestampSearchBytes} bytes when determining the duration or when performing a seek + * operation. The default value is {@link #DEFAULT_TIMESTAMP_SEARCH_BYTES}. If the number of + * bytes left in the stream from the current position is less than {@code + * timestampSearchBytes}, the search is performed on the bytes left. + */ + public TsExtractor( + @Mode int mode, + TimestampAdjuster timestampAdjuster, + TsPayloadReader.Factory payloadReaderFactory, + int timestampSearchBytes) { this.payloadReaderFactory = Assertions.checkNotNull(payloadReaderFactory); + this.timestampSearchBytes = timestampSearchBytes; this.mode = mode; if (mode == MODE_SINGLE_PMT || mode == MODE_HLS) { timestampAdjusters = Collections.singletonList(timestampAdjuster); @@ -178,7 +219,8 @@ public TsExtractor( trackPids = new SparseBooleanArray(); tsPayloadReaders = new SparseArray<>(); continuityCounters = new SparseIntArray(); - durationReader = new TsDurationReader(); + durationReader = new TsDurationReader(timestampSearchBytes); + output = ExtractorOutput.PLACEHOLDER; pcrPid = -1; resetPayloadReaders(); } @@ -186,8 +228,8 @@ public TsExtractor( // Extractor implementation. @Override - public boolean sniff(ExtractorInput input) throws IOException, InterruptedException { - byte[] buffer = tsPacketBuffer.data; + public boolean sniff(ExtractorInput input) throws IOException { + byte[] buffer = tsPacketBuffer.getData(); input.peekFully(buffer, 0, TS_PACKET_SIZE * SNIFF_TS_PACKET_COUNT); for (int startPosCandidate = 0; startPosCandidate < TS_PACKET_SIZE; startPosCandidate++) { // Try to identify at least SNIFF_TS_PACKET_COUNT packets starting with TS_SYNC_BYTE. @@ -217,24 +259,30 @@ public void seek(long position, long timeUs) { int timestampAdjustersCount = timestampAdjusters.size(); for (int i = 0; i < timestampAdjustersCount; i++) { TimestampAdjuster timestampAdjuster = timestampAdjusters.get(i); - boolean hasNotEncounteredFirstTimestamp = - timestampAdjuster.getTimestampOffsetUs() == C.TIME_UNSET; - if (hasNotEncounteredFirstTimestamp - || (timestampAdjuster.getTimestampOffsetUs() != 0 - && timestampAdjuster.getFirstSampleTimestampUs() != timeUs)) { - // - If a track in the TS stream has not encountered any sample, it's going to treat the - // first sample encountered as timestamp 0, which is incorrect. So we have to set the first - // sample timestamp for that track manually. - // - If the timestamp adjuster has its timestamp set manually before, and now we seek to a - // different position, we need to set the first sample timestamp manually again. - timestampAdjuster.reset(); - timestampAdjuster.setFirstSampleTimestampUs(timeUs); + // If the timestamp adjuster has not yet established a timestamp offset, we need to reset its + // expected first sample timestamp to be the new seek position. Without this, the timestamp + // adjuster would incorrectly establish its timestamp offset assuming that the first sample + // after this seek corresponds to the start of the stream (or a previous seek position, if + // there was one). + boolean resetTimestampAdjuster = timestampAdjuster.getTimestampOffsetUs() == C.TIME_UNSET; + if (!resetTimestampAdjuster) { + long adjusterFirstSampleTimestampUs = timestampAdjuster.getFirstSampleTimestampUs(); + // Also reset the timestamp adjuster if its offset was calculated based on a non-zero + // position in the stream (other than the position being seeked to), since in this case the + // offset may not be accurate. + resetTimestampAdjuster = + adjusterFirstSampleTimestampUs != C.TIME_UNSET + && adjusterFirstSampleTimestampUs != 0 + && adjusterFirstSampleTimestampUs != timeUs; + } + if (resetTimestampAdjuster) { + timestampAdjuster.reset(timeUs); } } if (timeUs != 0 && tsBinarySearchSeeker != null) { tsBinarySearchSeeker.setSeekTargetUs(timeUs); } - tsPacketBuffer.reset(); + tsPacketBuffer.reset(/* limit= */ 0); continuityCounters.clear(); for (int i = 0; i < tsPayloadReaders.size(); i++) { tsPayloadReaders.valueAt(i).seek(); @@ -249,7 +297,7 @@ public void release() { @Override public @ReadResult int read(ExtractorInput input, PositionHolder seekPosition) - throws IOException, InterruptedException { + throws IOException { long inputLength = input.getLength(); if (tracksEnded) { boolean canReadDuration = inputLength != C.LENGTH_UNSET && mode != MODE_HLS; @@ -360,7 +408,8 @@ private void maybeOutputSeekMap(long inputLength) { durationReader.getPcrTimestampAdjuster(), durationReader.getDurationUs(), inputLength, - pcrPid); + pcrPid, + timestampSearchBytes); output.seekMap(tsBinarySearchSeeker.getSeekMap()); } else { output.seekMap(new SeekMap.Unseekable(durationReader.getDurationUs())); @@ -368,9 +417,8 @@ private void maybeOutputSeekMap(long inputLength) { } } - private boolean fillBufferWithAtLeastOnePacket(ExtractorInput input) - throws IOException, InterruptedException { - byte[] data = tsPacketBuffer.data; + private boolean fillBufferWithAtLeastOnePacket(ExtractorInput input) throws IOException { + byte[] data = tsPacketBuffer.getData(); // Shift bytes to the start of the buffer if there isn't enough space left at the end. if (BUFFER_SIZE - tsPacketBuffer.getPosition() < TS_PACKET_SIZE) { int bytesLeft = tsPacketBuffer.bytesLeft(); @@ -400,7 +448,8 @@ private boolean fillBufferWithAtLeastOnePacket(ExtractorInput input) private int findEndOfFirstTsPacketInBuffer() throws ParserException { int searchStart = tsPacketBuffer.getPosition(); int limit = tsPacketBuffer.limit(); - int syncBytePosition = TsUtil.findSyncBytePosition(tsPacketBuffer.data, searchStart, limit); + int syncBytePosition = + TsUtil.findSyncBytePosition(tsPacketBuffer.getData(), searchStart, limit); // Discard all bytes before the sync byte. // If sync byte is not found, this means discard the whole buffer. tsPacketBuffer.setPosition(syncBytePosition); @@ -408,7 +457,8 @@ private int findEndOfFirstTsPacketInBuffer() throws ParserException { if (endOfPacket > limit) { bytesSinceLastSync += syncBytePosition - searchStart; if (mode == MODE_HLS && bytesSinceLastSync > TS_PACKET_SIZE * 2) { - throw new ParserException("Cannot find sync byte. Most likely not a Transport Stream."); + throw ParserException.createForMalformedContainer( + "Cannot find sync byte. Most likely not a Transport Stream.", /* cause= */ null); } } else { // We have found a packet within the buffer. @@ -436,9 +486,7 @@ private void resetPayloadReaders() { id3Reader = null; } - /** - * Parses Program Association Table data. - */ + /** Parses Program Association Table data. */ private class PatReader implements SectionPayloadReader { private final ParsableBitArray patScratch; @@ -448,7 +496,9 @@ public PatReader() { } @Override - public void init(TimestampAdjuster timestampAdjuster, ExtractorOutput extractorOutput, + public void init( + TimestampAdjuster timestampAdjuster, + ExtractorOutput extractorOutput, TrackIdGenerator idGenerator) { // Do nothing. } @@ -479,25 +529,25 @@ public void consume(ParsableByteArray sectionData) { patScratch.skipBits(13); // network_PID (13) } else { int pid = patScratch.readBits(13); - tsPayloadReaders.put(pid, new SectionReader(new PmtReader(pid))); - remainingPmts++; + if (tsPayloadReaders.get(pid) == null) { + tsPayloadReaders.put(pid, new SectionReader(new PmtReader(pid))); + remainingPmts++; + } } } if (mode != MODE_HLS) { tsPayloadReaders.remove(TS_PAT_PID); } } - } - /** - * Parses Program Map Table. - */ + /** Parses Program Map Table. */ private class PmtReader implements SectionPayloadReader { private static final int TS_PMT_DESC_REGISTRATION = 0x05; private static final int TS_PMT_DESC_ISO639_LANG = 0x0A; private static final int TS_PMT_DESC_AC3 = 0x6A; + private static final int TS_PMT_DESC_AIT = 0x6F; private static final int TS_PMT_DESC_EAC3 = 0x7A; private static final int TS_PMT_DESC_DTS = 0x7B; private static final int TS_PMT_DESC_DVB_EXT = 0x7F; @@ -506,7 +556,7 @@ private class PmtReader implements SectionPayloadReader { private static final int TS_PMT_DESC_DVB_EXT_AC4 = 0x15; private final ParsableBitArray pmtScratch; - private final SparseArray trackIdToReaderScratch; + private final SparseArray<@NullableType TsPayloadReader> trackIdToReaderScratch; private final SparseIntArray trackIdToPidScratch; private final int pid; @@ -518,7 +568,9 @@ public PmtReader(int pid) { } @Override - public void init(TimestampAdjuster timestampAdjuster, ExtractorOutput extractorOutput, + public void init( + TimestampAdjuster timestampAdjuster, + ExtractorOutput extractorOutput, TrackIdGenerator idGenerator) { // Do nothing. } @@ -535,8 +587,8 @@ public void consume(ParsableByteArray sectionData) { if (mode == MODE_SINGLE_PMT || mode == MODE_HLS || remainingPmts == 1) { timestampAdjuster = timestampAdjusters.get(0); } else { - timestampAdjuster = new TimestampAdjuster( - timestampAdjusters.get(0).getFirstSampleTimestampUs()); + timestampAdjuster = + new TimestampAdjuster(timestampAdjusters.get(0).getFirstSampleTimestampUs()); timestampAdjusters.add(timestampAdjuster); } @@ -571,10 +623,14 @@ public void consume(ParsableByteArray sectionData) { if (mode == MODE_HLS && id3Reader == null) { // Setup an ID3 track regardless of whether there's a corresponding entry, in case one // appears intermittently during playback. See [Internal: b/20261500]. - EsInfo dummyEsInfo = new EsInfo(TS_STREAM_TYPE_ID3, null, null, Util.EMPTY_BYTE_ARRAY); - id3Reader = payloadReaderFactory.createPayloadReader(TS_STREAM_TYPE_ID3, dummyEsInfo); - id3Reader.init(timestampAdjuster, output, - new TrackIdGenerator(programNumber, TS_STREAM_TYPE_ID3, MAX_PID_PLUS_ONE)); + EsInfo id3EsInfo = new EsInfo(TS_STREAM_TYPE_ID3, null, null, Util.EMPTY_BYTE_ARRAY); + id3Reader = payloadReaderFactory.createPayloadReader(TS_STREAM_TYPE_ID3, id3EsInfo); + if (id3Reader != null) { + id3Reader.init( + timestampAdjuster, + output, + new TrackIdGenerator(programNumber, TS_STREAM_TYPE_ID3, MAX_PID_PLUS_ONE)); + } } trackIdToReaderScratch.clear(); @@ -588,7 +644,7 @@ public void consume(ParsableByteArray sectionData) { pmtScratch.skipBits(4); // reserved int esInfoLength = pmtScratch.readBits(12); // ES_info_length. EsInfo esInfo = readEsInfo(sectionData, esInfoLength); - if (streamType == 0x06) { + if (streamType == 0x06 || streamType == 0x05) { streamType = esInfo.streamType; } remainingEntriesLength -= esInfoLength + 5; @@ -598,8 +654,11 @@ public void consume(ParsableByteArray sectionData) { continue; } - TsPayloadReader reader = mode == MODE_HLS && streamType == TS_STREAM_TYPE_ID3 ? id3Reader - : payloadReaderFactory.createPayloadReader(streamType, esInfo); + @Nullable + TsPayloadReader reader = + mode == MODE_HLS && streamType == TS_STREAM_TYPE_ID3 + ? id3Reader + : payloadReaderFactory.createPayloadReader(streamType, esInfo); if (mode != MODE_HLS || elementaryPid < trackIdToPidScratch.get(trackId, MAX_PID_PLUS_ONE)) { trackIdToPidScratch.put(trackId, elementaryPid); @@ -613,10 +672,12 @@ public void consume(ParsableByteArray sectionData) { int trackPid = trackIdToPidScratch.valueAt(i); trackIds.put(trackId, true); trackPids.put(trackPid, true); - TsPayloadReader reader = trackIdToReaderScratch.valueAt(i); + @Nullable TsPayloadReader reader = trackIdToReaderScratch.valueAt(i); if (reader != null) { if (reader != id3Reader) { - reader.init(timestampAdjuster, output, + reader.init( + timestampAdjuster, + output, new TrackIdGenerator(programNumber, trackId, MAX_PID_PLUS_ONE)); } tsPayloadReaders.put(trackPid, reader); @@ -657,6 +718,10 @@ private EsInfo readEsInfo(ParsableByteArray data, int length) { int descriptorTag = data.readUnsignedByte(); int descriptorLength = data.readUnsignedByte(); int positionOfNextDescriptor = data.getPosition() + descriptorLength; + if (positionOfNextDescriptor > descriptorsEndPosition) { + // Descriptor claims to extend past the end position. Skip it. + break; + } if (descriptorTag == TS_PMT_DESC_REGISTRATION) { // registration_descriptor long formatIdentifier = data.readUnsignedInt(); if (formatIdentifier == AC3_FORMAT_IDENTIFIER) { @@ -692,18 +757,21 @@ private EsInfo readEsInfo(ParsableByteArray data, int length) { int dvbSubtitlingType = data.readUnsignedByte(); byte[] initializationData = new byte[4]; data.readBytes(initializationData, 0, 4); - dvbSubtitleInfos.add(new DvbSubtitleInfo(dvbLanguage, dvbSubtitlingType, - initializationData)); + dvbSubtitleInfos.add( + new DvbSubtitleInfo(dvbLanguage, dvbSubtitlingType, initializationData)); } + } else if (descriptorTag == TS_PMT_DESC_AIT) { + streamType = TS_STREAM_TYPE_AIT; } // Skip unused bytes of current descriptor. data.skipBytes(positionOfNextDescriptor - data.getPosition()); } data.setPosition(descriptorsEndPosition); - return new EsInfo(streamType, language, dvbSubtitleInfos, - Arrays.copyOfRange(data.data, descriptorsStartPosition, descriptorsEndPosition)); + return new EsInfo( + streamType, + language, + dvbSubtitleInfos, + Arrays.copyOfRange(data.getData(), descriptorsStartPosition, descriptorsEndPosition)); } - } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/TsPayloadReader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/TsPayloadReader.java index af27235257..4a19c18160 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/TsPayloadReader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/TsPayloadReader.java @@ -15,8 +15,11 @@ */ package com.google.android.exoplayer2.extractor.ts; +import static java.lang.annotation.ElementType.TYPE_USE; + import android.util.SparseArray; import androidx.annotation.IntDef; +import androidx.annotation.Nullable; import com.google.android.exoplayer2.ParserException; import com.google.android.exoplayer2.extractor.ExtractorOutput; import com.google.android.exoplayer2.extractor.TrackOutput; @@ -25,23 +28,20 @@ import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; import java.util.Collections; import java.util.List; -/** - * Parses TS packet payload data. - */ +/** Parses TS packet payload data. */ public interface TsPayloadReader { - /** - * Factory of {@link TsPayloadReader} instances. - */ + /** Factory of {@link TsPayloadReader} instances. */ interface Factory { /** * Returns the initial mapping from PIDs to payload readers. - *

      - * This method allows the injection of payload readers for reserved PIDs, excluding PID 0. + * + *

      This method allows the injection of payload readers for reserved PIDs, excluding PID 0. * * @return A {@link SparseArray} that maps PIDs to payload readers. */ @@ -53,31 +53,32 @@ interface Factory { * * @param streamType Stream type value as defined in the PMT entry or associated descriptors. * @param esInfo Information associated to the elementary stream provided in the PMT. - * @return A {@link TsPayloadReader} for the packet stream carried by the provided pid. + * @return A {@link TsPayloadReader} for the packet stream carried by the provided pid, or * {@code null} if the stream is not supported. */ + @Nullable TsPayloadReader createPayloadReader(int streamType, EsInfo esInfo); - } - /** - * Holds information associated with a PMT entry. - */ + /** Holds information associated with a PMT entry. */ final class EsInfo { public final int streamType; - public final String language; + @Nullable public final String language; public final List dvbSubtitleInfos; public final byte[] descriptorBytes; /** - * @param streamType The type of the stream as defined by the - * {@link TsExtractor}{@code .TS_STREAM_TYPE_*}. + * @param streamType The type of the stream as defined by the {@link TsExtractor}{@code + * .TS_STREAM_TYPE_*}. * @param language The language of the stream, as defined by ISO/IEC 13818-1, section 2.6.18. * @param dvbSubtitleInfos Information about DVB subtitles associated to the stream. * @param descriptorBytes The descriptor bytes associated to the stream. */ - public EsInfo(int streamType, String language, List dvbSubtitleInfos, + public EsInfo( + int streamType, + @Nullable String language, + @Nullable List dvbSubtitleInfos, byte[] descriptorBytes) { this.streamType = streamType; this.language = language; @@ -87,7 +88,6 @@ public EsInfo(int streamType, String language, List dvbSubtitle : Collections.unmodifiableList(dvbSubtitleInfos); this.descriptorBytes = descriptorBytes; } - } /** @@ -109,12 +109,9 @@ public DvbSubtitleInfo(String language, int type, byte[] initializationData) { this.type = type; this.initializationData = initializationData; } - } - /** - * Generates track ids for initializing {@link TsPayloadReader}s' {@link TrackOutput}s. - */ + /** Generates track ids for initializing {@link TsPayloadReader}s' {@link TrackOutput}s. */ final class TrackIdGenerator { private static final int ID_UNSET = Integer.MIN_VALUE; @@ -134,6 +131,7 @@ public TrackIdGenerator(int programNumber, int firstTrackId, int trackIdIncremen this.firstTrackId = firstTrackId; this.trackIdIncrement = trackIdIncrement; trackId = ID_UNSET; + formatId = ""; } /** @@ -162,8 +160,7 @@ public int getTrackId() { * called after the first {@link #generateNewId()} call. * * @return The last generated format id, with the format {@code "programNumber/trackId"}. If no - * {@code programNumber} was provided, the {@code trackId} alone is used as - * format id. + * {@code programNumber} was provided, the {@code trackId} alone is used as format id. */ public String getFormatId() { maybeThrowUninitializedError(); @@ -175,14 +172,22 @@ private void maybeThrowUninitializedError() { throw new IllegalStateException("generateNewId() must be called before retrieving ids."); } } - } /** * Contextual flags indicating the presence of indicators in the TS packet or PES packet headers. + * + *

      The individual flag values are: + * + *

        + *
      • {@link #FLAG_PAYLOAD_UNIT_START_INDICATOR} + *
      • {@link #FLAG_RANDOM_ACCESS_INDICATOR} + *
      • {@link #FLAG_DATA_ALIGNMENT_INDICATOR} + *
      */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef( flag = true, value = { @@ -209,7 +214,9 @@ private void maybeThrowUninitializedError() { * @param idGenerator A {@link PesReader.TrackIdGenerator} that generates unique track ids for the * {@link TrackOutput}s. */ - void init(TimestampAdjuster timestampAdjuster, ExtractorOutput extractorOutput, + void init( + TimestampAdjuster timestampAdjuster, + ExtractorOutput extractorOutput, TrackIdGenerator idGenerator); /** diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/TsUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/TsUtil.java index 2a7a0d25ab..c8e6ec0859 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/TsUtil.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/TsUtil.java @@ -21,6 +21,38 @@ /** Utilities method for extracting MPEG-TS streams. */ public final class TsUtil { + + /** + * Returns whether a TS packet starts at {@code searchPosition} according to the MPEG-TS + * synchronization recommendations. + * + *

      ISO/IEC 13818-1:2015 Annex G recommends that 5 sync bytes emulating the start of 5 + * consecutive TS packets should never occur as part of the TS packets' contents. So, this method + * returns true when {@code data} contains a sync byte at {@code searchPosition}, and said sync + * byte is also one of five consecutive sync bytes separated from each other by the size of a TS + * packet. + * + * @param data The array holding the data to search in. + * @param start The first valid position in {@code data} from which a sync byte can be read. + * @param limit The first invalid position in {@code data}, after which no data should be read. + * @param searchPosition The position to check for a TS packet start. + * @return Whether a TS packet starts at {@code searchPosition}. + */ + public static boolean isStartOfTsPacket(byte[] data, int start, int limit, int searchPosition) { + int consecutiveSyncByteCount = 0; + for (int i = -4; i <= 4; i++) { + int currentPosition = searchPosition + i * TsExtractor.TS_PACKET_SIZE; + if (currentPosition < start + || currentPosition >= limit + || data[currentPosition] != TsExtractor.TS_SYNC_BYTE) { + consecutiveSyncByteCount = 0; + } else if (++consecutiveSyncByteCount == 5) { + return true; + } + } + return false; + } + /** * Returns the position of the first TS_SYNC_BYTE within the range [startPosition, limitPosition) * from the provided data array, or returns limitPosition if sync byte could not be found. diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/UserDataReader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/UserDataReader.java index 724eba1d9a..a9d1e1ef1d 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/UserDataReader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/UserDataReader.java @@ -15,11 +15,12 @@ */ package com.google.android.exoplayer2.extractor.ts; +import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.extractor.CeaUtil; import com.google.android.exoplayer2.extractor.ExtractorOutput; import com.google.android.exoplayer2.extractor.TrackOutput; -import com.google.android.exoplayer2.text.cea.CeaUtil; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.ParsableByteArray; @@ -44,23 +45,20 @@ public void createTracks( idGenerator.generateNewId(); TrackOutput output = extractorOutput.track(idGenerator.getTrackId(), C.TRACK_TYPE_TEXT); Format channelFormat = closedCaptionFormats.get(i); - String channelMimeType = channelFormat.sampleMimeType; + @Nullable String channelMimeType = channelFormat.sampleMimeType; Assertions.checkArgument( MimeTypes.APPLICATION_CEA608.equals(channelMimeType) || MimeTypes.APPLICATION_CEA708.equals(channelMimeType), "Invalid closed caption mime type provided: " + channelMimeType); output.format( - Format.createTextSampleFormat( - idGenerator.getFormatId(), - channelMimeType, - /* codecs= */ null, - /* bitrate= */ Format.NO_VALUE, - channelFormat.selectionFlags, - channelFormat.language, - channelFormat.accessibilityChannel, - /* drmInitData= */ null, - Format.OFFSET_SAMPLE_RELATIVE, - channelFormat.initializationData)); + new Format.Builder() + .setId(idGenerator.getFormatId()) + .setSampleMimeType(channelMimeType) + .setSelectionFlags(channelFormat.selectionFlags) + .setLanguage(channelFormat.language) + .setAccessibilityChannel(channelFormat.accessibilityChannel) + .setInitializationData(channelFormat.initializationData) + .build()); outputs[i] = output; } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/package-info.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/package-info.java new file mode 100644 index 0000000000..78f4551db4 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/ts/package-info.java @@ -0,0 +1,19 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +@NonNullApi +package com.google.android.exoplayer2.extractor.ts; + +import com.google.android.exoplayer2.util.NonNullApi; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/wav/WavExtractor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/wav/WavExtractor.java index cff378d7b6..04ded67a6b 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/wav/WavExtractor.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/wav/WavExtractor.java @@ -15,8 +15,13 @@ */ package com.google.android.exoplayer2.extractor.wav; +import static java.lang.Math.max; +import static java.lang.Math.min; +import static java.lang.annotation.ElementType.TYPE_USE; + import android.util.Pair; +import androidx.annotation.IntDef; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.ParserException; @@ -28,20 +33,26 @@ import com.google.android.exoplayer2.extractor.PositionHolder; import com.google.android.exoplayer2.extractor.TrackOutput; import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.ParsableByteArray; import com.google.android.exoplayer2.util.Util; - +import java.io.IOException; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; import org.checkerframework.checker.nullness.qual.EnsuresNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull; +import org.checkerframework.checker.nullness.qual.RequiresNonNull; import java.io.IOException; -/** - * Extracts data from WAV byte streams. - */ +/** Extracts data from WAV byte streams. */ public final class WavExtractor implements Extractor { + private static final String TAG = "WavExtractor"; + /** * When outputting PCM data to a {@link TrackOutput}, we can choose how many frames are grouped * into each sample, and hence each sample's duration. This is the target number of samples to @@ -52,20 +63,43 @@ public final class WavExtractor implements Extractor { /** Factory for {@link WavExtractor} instances. */ public static final ExtractorsFactory FACTORY = () -> new Extractor[] {new WavExtractor()}; - @MonotonicNonNull private ExtractorOutput extractorOutput; - @MonotonicNonNull private TrackOutput trackOutput; - @MonotonicNonNull private OutputWriter outputWriter; + /** Parser state. */ + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef({ + STATE_READING_FILE_TYPE, + STATE_READING_RF64_SAMPLE_DATA_SIZE, + STATE_READING_FORMAT, + STATE_SKIPPING_TO_SAMPLE_DATA, + STATE_READING_SAMPLE_DATA + }) + private @interface State {} + + private static final int STATE_READING_FILE_TYPE = 0; + private static final int STATE_READING_RF64_SAMPLE_DATA_SIZE = 1; + private static final int STATE_READING_FORMAT = 2; + private static final int STATE_SKIPPING_TO_SAMPLE_DATA = 3; + private static final int STATE_READING_SAMPLE_DATA = 4; + + private @MonotonicNonNull ExtractorOutput extractorOutput; + private @MonotonicNonNull TrackOutput trackOutput; + private @State int state; + private long rf64SampleDataSize; + private @MonotonicNonNull OutputWriter outputWriter; private int dataStartPosition; private long dataEndPosition; public WavExtractor() { + state = STATE_READING_FILE_TYPE; + rf64SampleDataSize = C.LENGTH_UNSET; dataStartPosition = C.POSITION_UNSET; dataEndPosition = C.POSITION_UNSET; } @Override - public boolean sniff(ExtractorInput input) throws IOException, InterruptedException { - return WavHeaderReader.peek(input) != null; + public boolean sniff(ExtractorInput input) throws IOException { + return WavHeaderReader.checkFileType(input); } @Override @@ -77,6 +111,7 @@ public void init(ExtractorOutput output) { @Override public void seek(long position, long timeUs) { + state = position == 0 ? STATE_READING_FILE_TYPE : STATE_READING_SAMPLE_DATA; if (outputWriter != null) { outputWriter.reset(timeUs); } @@ -88,64 +123,117 @@ public void release() { } @Override - public int read(ExtractorInput input, PositionHolder seekPosition) - throws IOException, InterruptedException { + public @ReadResult int read(ExtractorInput input, PositionHolder seekPosition) + throws IOException { assertInitialized(); - if (outputWriter == null) { - WavHeader header = WavHeaderReader.peek(input); - if (header == null) { - // Should only happen if the media wasn't sniffed. - throw new ParserException("Unsupported or unrecognized wav header."); - } + switch (state) { + case STATE_READING_FILE_TYPE: + readFileType(input); + return Extractor.RESULT_CONTINUE; + case STATE_READING_RF64_SAMPLE_DATA_SIZE: + readRf64SampleDataSize(input); + return Extractor.RESULT_CONTINUE; + case STATE_READING_FORMAT: + readFormat(input); + return Extractor.RESULT_CONTINUE; + case STATE_SKIPPING_TO_SAMPLE_DATA: + skipToSampleData(input); + return Extractor.RESULT_CONTINUE; + case STATE_READING_SAMPLE_DATA: + return readSampleData(input); + default: + throw new IllegalStateException(); + } + } - if (header.formatType == WavUtil.TYPE_IMA_ADPCM) { - outputWriter = new ImaAdPcmOutputWriter(extractorOutput, trackOutput, header); - } else if (header.formatType == WavUtil.TYPE_ALAW) { - outputWriter = - new PassthroughOutputWriter( - extractorOutput, - trackOutput, - header, - MimeTypes.AUDIO_ALAW, - /* pcmEncoding= */ Format.NO_VALUE); - } else if (header.formatType == WavUtil.TYPE_MLAW) { - outputWriter = - new PassthroughOutputWriter( - extractorOutput, - trackOutput, - header, - MimeTypes.AUDIO_MLAW, - /* pcmEncoding= */ Format.NO_VALUE); - } else { - @C.PcmEncoding - int pcmEncoding = WavUtil.getPcmEncodingForType(header.formatType, header.bitsPerSample); - if (pcmEncoding == C.ENCODING_INVALID) { - throw new ParserException("Unsupported WAV format type: " + header.formatType); - } - outputWriter = - new PassthroughOutputWriter( - extractorOutput, trackOutput, header, MimeTypes.AUDIO_RAW, pcmEncoding); + @EnsuresNonNull({"extractorOutput", "trackOutput"}) + private void assertInitialized() { + Assertions.checkStateNotNull(trackOutput); + Util.castNonNull(extractorOutput); + } + + private void readFileType(ExtractorInput input) throws IOException { + Assertions.checkState(input.getPosition() == 0); + if (dataStartPosition != C.POSITION_UNSET) { + input.skipFully(dataStartPosition); + state = STATE_READING_SAMPLE_DATA; + return; + } + if (!WavHeaderReader.checkFileType(input)) { + // Should only happen if the media wasn't sniffed. + throw ParserException.createForMalformedContainer( + "Unsupported or unrecognized wav file type.", /* cause= */ null); + } + input.skipFully((int) (input.getPeekPosition() - input.getPosition())); + state = STATE_READING_RF64_SAMPLE_DATA_SIZE; + } + + private void readRf64SampleDataSize(ExtractorInput input) throws IOException { + rf64SampleDataSize = WavHeaderReader.readRf64SampleDataSize(input); + state = STATE_READING_FORMAT; + } + + @RequiresNonNull({"extractorOutput", "trackOutput"}) + private void readFormat(ExtractorInput input) throws IOException { + WavFormat wavFormat = WavHeaderReader.readFormat(input); + if (wavFormat.formatType == WavUtil.TYPE_IMA_ADPCM) { + outputWriter = new ImaAdPcmOutputWriter(extractorOutput, trackOutput, wavFormat); + } else if (wavFormat.formatType == WavUtil.TYPE_ALAW) { + outputWriter = + new PassthroughOutputWriter( + extractorOutput, + trackOutput, + wavFormat, + MimeTypes.AUDIO_ALAW, + /* pcmEncoding= */ Format.NO_VALUE); + } else if (wavFormat.formatType == WavUtil.TYPE_MLAW) { + outputWriter = + new PassthroughOutputWriter( + extractorOutput, + trackOutput, + wavFormat, + MimeTypes.AUDIO_MLAW, + /* pcmEncoding= */ Format.NO_VALUE); + } else { + @C.PcmEncoding + int pcmEncoding = + WavUtil.getPcmEncodingForType(wavFormat.formatType, wavFormat.bitsPerSample); + if (pcmEncoding == C.ENCODING_INVALID) { + throw ParserException.createForUnsupportedContainerFeature( + "Unsupported WAV format type: " + wavFormat.formatType); } + outputWriter = + new PassthroughOutputWriter( + extractorOutput, trackOutput, wavFormat, MimeTypes.AUDIO_RAW, pcmEncoding); } + state = STATE_SKIPPING_TO_SAMPLE_DATA; + } - if (dataStartPosition == C.POSITION_UNSET) { - Pair dataBounds = WavHeaderReader.skipToData(input); - dataStartPosition = dataBounds.first.intValue(); - dataEndPosition = dataBounds.second; - outputWriter.init(dataStartPosition, dataEndPosition); - } else if (input.getPosition() == 0) { - input.skipFully(dataStartPosition); + private void skipToSampleData(ExtractorInput input) throws IOException { + Pair dataBounds = WavHeaderReader.skipToSampleData(input); + dataStartPosition = dataBounds.first.intValue(); + long dataSize = dataBounds.second; + if (rf64SampleDataSize != C.LENGTH_UNSET && dataSize == 0xFFFFFFFFL) { + // Following EBU - Tech 3306-2007, the data size indicated in the ds64 chunk should only be + // used if the size of the data chunk is unset. + dataSize = rf64SampleDataSize; + } + dataEndPosition = dataStartPosition + dataSize; + long inputLength = input.getLength(); + if (inputLength != C.LENGTH_UNSET && dataEndPosition > inputLength) { + Log.w(TAG, "Data exceeds input length: " + dataEndPosition + ", " + inputLength); + dataEndPosition = inputLength; } + Assertions.checkNotNull(outputWriter).init(dataStartPosition, dataEndPosition); + state = STATE_READING_SAMPLE_DATA; + } + private @ReadResult int readSampleData(ExtractorInput input) throws IOException { Assertions.checkState(dataEndPosition != C.POSITION_UNSET); long bytesLeft = dataEndPosition - input.getPosition(); - return outputWriter.sampleData(input, bytesLeft) ? RESULT_END_OF_INPUT : RESULT_CONTINUE; - } - - @EnsuresNonNull({"extractorOutput", "trackOutput"}) - private void assertInitialized() { - Assertions.checkStateNotNull(trackOutput); - Util.castNonNull(extractorOutput); + return Assertions.checkNotNull(outputWriter).sampleData(input, bytesLeft) + ? RESULT_END_OF_INPUT + : RESULT_CONTINUE; } /** Writes to the extractor's output. */ @@ -179,17 +267,15 @@ private interface OutputWriter { * @param bytesLeft The number of sample data bytes left to be read from the input. * @return Whether the end of the sample data has been reached. * @throws IOException If an error occurs reading from the input. - * @throws InterruptedException If the thread has been interrupted. */ - boolean sampleData(ExtractorInput input, long bytesLeft) - throws IOException, InterruptedException; + boolean sampleData(ExtractorInput input, long bytesLeft) throws IOException; } private static final class PassthroughOutputWriter implements OutputWriter { private final ExtractorOutput extractorOutput; private final TrackOutput trackOutput; - private final WavHeader header; + private final WavFormat wavFormat; private final Format format; /** The target size of each output sample, in bytes. */ private final int targetSampleSizeBytes; @@ -211,37 +297,35 @@ private static final class PassthroughOutputWriter implements OutputWriter { public PassthroughOutputWriter( ExtractorOutput extractorOutput, TrackOutput trackOutput, - WavHeader header, + WavFormat wavFormat, String mimeType, @C.PcmEncoding int pcmEncoding) throws ParserException { this.extractorOutput = extractorOutput; this.trackOutput = trackOutput; - this.header = header; - - int bytesPerFrame = header.numChannels * header.bitsPerSample / 8; - // Validate the header. Blocks are expected to correspond to single frames. - if (header.blockSize != bytesPerFrame) { - throw new ParserException( - "Expected block size: " + bytesPerFrame + "; got: " + header.blockSize); + this.wavFormat = wavFormat; + + int bytesPerFrame = wavFormat.numChannels * wavFormat.bitsPerSample / 8; + // Validate the WAV format. Blocks are expected to correspond to single frames. + if (wavFormat.blockSize != bytesPerFrame) { + throw ParserException.createForMalformedContainer( + "Expected block size: " + bytesPerFrame + "; got: " + wavFormat.blockSize, + /* cause= */ null); } + int constantBitrate = wavFormat.frameRateHz * bytesPerFrame * 8; targetSampleSizeBytes = - Math.max(bytesPerFrame, header.frameRateHz * bytesPerFrame / TARGET_SAMPLES_PER_SECOND); + max(bytesPerFrame, wavFormat.frameRateHz * bytesPerFrame / TARGET_SAMPLES_PER_SECOND); format = - Format.createAudioSampleFormat( - /* id= */ null, - mimeType, - /* codecs= */ null, - /* bitrate= */ header.frameRateHz * bytesPerFrame * 8, - /* maxInputSize= */ targetSampleSizeBytes, - header.numChannels, - header.frameRateHz, - pcmEncoding, - /* initializationData= */ null, - /* drmInitData= */ null, - /* selectionFlags= */ 0, - /* language= */ null); + new Format.Builder() + .setSampleMimeType(mimeType) + .setAverageBitrate(constantBitrate) + .setPeakBitrate(constantBitrate) + .setMaxInputSize(targetSampleSizeBytes) + .setChannelCount(wavFormat.numChannels) + .setSampleRate(wavFormat.frameRateHz) + .setPcmEncoding(pcmEncoding) + .build(); } @Override @@ -254,16 +338,15 @@ public void reset(long timeUs) { @Override public void init(int dataStartPosition, long dataEndPosition) { extractorOutput.seekMap( - new WavSeekMap(header, /* framesPerBlock= */ 1, dataStartPosition, dataEndPosition)); + new WavSeekMap(wavFormat, /* framesPerBlock= */ 1, dataStartPosition, dataEndPosition)); trackOutput.format(format); } @Override - public boolean sampleData(ExtractorInput input, long bytesLeft) - throws IOException, InterruptedException { + public boolean sampleData(ExtractorInput input, long bytesLeft) throws IOException { // Write sample data until we've reached the target sample size, or the end of the data. while (bytesLeft > 0 && pendingOutputBytes < targetSampleSizeBytes) { - int bytesToRead = (int) Math.min(targetSampleSizeBytes - pendingOutputBytes, bytesLeft); + int bytesToRead = (int) min(targetSampleSizeBytes - pendingOutputBytes, bytesLeft); int bytesAppended = trackOutput.sampleData(input, bytesToRead, true); if (bytesAppended == RESULT_END_OF_INPUT) { bytesLeft = 0; @@ -276,17 +359,17 @@ public boolean sampleData(ExtractorInput input, long bytesLeft) // Write the corresponding sample metadata. Samples must be a whole number of frames. It's // possible that the number of pending output bytes is not a whole number of frames if the // stream ended unexpectedly. - int bytesPerFrame = header.blockSize; + int bytesPerFrame = wavFormat.blockSize; int pendingFrames = pendingOutputBytes / bytesPerFrame; if (pendingFrames > 0) { long timeUs = startTimeUs + Util.scaleLargeTimestamp( - outputFrameCount, C.MICROS_PER_SECOND, header.frameRateHz); + outputFrameCount, C.MICROS_PER_SECOND, wavFormat.frameRateHz); int size = pendingFrames * bytesPerFrame; int offset = pendingOutputBytes - size; trackOutput.sampleMetadata( - timeUs, C.BUFFER_FLAG_KEY_FRAME, size, offset, /* encryptionData= */ null); + timeUs, C.BUFFER_FLAG_KEY_FRAME, size, offset, /* cryptoData= */ null); outputFrameCount += pendingFrames; pendingOutputBytes = offset; } @@ -312,7 +395,7 @@ private static final class ImaAdPcmOutputWriter implements OutputWriter { private final ExtractorOutput extractorOutput; private final TrackOutput trackOutput; - private final WavHeader header; + private final WavFormat wavFormat; /** Number of frames per block of the input (yet to be decoded) data. */ private final int framesPerBlock; @@ -344,53 +427,53 @@ private static final class ImaAdPcmOutputWriter implements OutputWriter { private long outputFrameCount; public ImaAdPcmOutputWriter( - ExtractorOutput extractorOutput, TrackOutput trackOutput, WavHeader header) + ExtractorOutput extractorOutput, TrackOutput trackOutput, WavFormat wavFormat) throws ParserException { this.extractorOutput = extractorOutput; this.trackOutput = trackOutput; - this.header = header; - targetSampleSizeFrames = Math.max(1, header.frameRateHz / TARGET_SAMPLES_PER_SECOND); + this.wavFormat = wavFormat; + targetSampleSizeFrames = max(1, wavFormat.frameRateHz / TARGET_SAMPLES_PER_SECOND); - ParsableByteArray scratch = new ParsableByteArray(header.extraData); + ParsableByteArray scratch = new ParsableByteArray(wavFormat.extraData); scratch.readLittleEndianUnsignedShort(); framesPerBlock = scratch.readLittleEndianUnsignedShort(); - int numChannels = header.numChannels; - // Validate the header. This calculation is defined in "Microsoft Multimedia Standards Update + int numChannels = wavFormat.numChannels; + // Validate the WAV format. This calculation is defined in "Microsoft Multimedia Standards + // Update // - New Multimedia Types and Data Techniques" (1994). See the "IMA ADPCM Wave Type" and "DVI // ADPCM Wave Type" sections, and the calculation of wSamplesPerBlock in the latter. int expectedFramesPerBlock = - (((header.blockSize - (4 * numChannels)) * 8) / (header.bitsPerSample * numChannels)) + 1; + (((wavFormat.blockSize - (4 * numChannels)) * 8) + / (wavFormat.bitsPerSample * numChannels)) + + 1; if (framesPerBlock != expectedFramesPerBlock) { - throw new ParserException( - "Expected frames per block: " + expectedFramesPerBlock + "; got: " + framesPerBlock); + throw ParserException.createForMalformedContainer( + "Expected frames per block: " + expectedFramesPerBlock + "; got: " + framesPerBlock, + /* cause= */ null); } // Calculate the number of blocks we'll need to decode to obtain an output sample of the // target sample size, and allocate suitably sized buffers for input and decoded data. int maxBlocksToDecode = Util.ceilDivide(targetSampleSizeFrames, framesPerBlock); - inputData = new byte[maxBlocksToDecode * header.blockSize]; + inputData = new byte[maxBlocksToDecode * wavFormat.blockSize]; decodedData = new ParsableByteArray( maxBlocksToDecode * numOutputFramesToBytes(framesPerBlock, numChannels)); // Create the format. We calculate the bitrate of the data before decoding, since this is the // bitrate of the stream itself. - int bitrate = header.frameRateHz * header.blockSize * 8 / framesPerBlock; + int constantBitrate = wavFormat.frameRateHz * wavFormat.blockSize * 8 / framesPerBlock; format = - Format.createAudioSampleFormat( - /* id= */ null, - MimeTypes.AUDIO_RAW, - /* codecs= */ null, - bitrate, - /* maxInputSize= */ numOutputFramesToBytes(targetSampleSizeFrames, numChannels), - header.numChannels, - header.frameRateHz, - C.ENCODING_PCM_16BIT, - /* initializationData= */ null, - /* drmInitData= */ null, - /* selectionFlags= */ 0, - /* language= */ null); + new Format.Builder() + .setSampleMimeType(MimeTypes.AUDIO_RAW) + .setAverageBitrate(constantBitrate) + .setPeakBitrate(constantBitrate) + .setMaxInputSize(numOutputFramesToBytes(targetSampleSizeFrames, numChannels)) + .setChannelCount(wavFormat.numChannels) + .setSampleRate(wavFormat.frameRateHz) + .setPcmEncoding(C.ENCODING_PCM_16BIT) + .build(); } @Override @@ -404,25 +487,24 @@ public void reset(long timeUs) { @Override public void init(int dataStartPosition, long dataEndPosition) { extractorOutput.seekMap( - new WavSeekMap(header, framesPerBlock, dataStartPosition, dataEndPosition)); + new WavSeekMap(wavFormat, framesPerBlock, dataStartPosition, dataEndPosition)); trackOutput.format(format); } @Override - public boolean sampleData(ExtractorInput input, long bytesLeft) - throws IOException, InterruptedException { + public boolean sampleData(ExtractorInput input, long bytesLeft) throws IOException { // Calculate the number of additional frames that we need on the output side to complete a // sample of the target size. int targetFramesRemaining = targetSampleSizeFrames - numOutputBytesToFrames(pendingOutputBytes); // Calculate the whole number of blocks that we need to decode to obtain this many frames. int blocksToDecode = Util.ceilDivide(targetFramesRemaining, framesPerBlock); - int targetReadBytes = blocksToDecode * header.blockSize; + int targetReadBytes = blocksToDecode * wavFormat.blockSize; // Read input data until we've reached the target number of blocks, or the end of the data. boolean endOfSampleData = bytesLeft == 0; while (!endOfSampleData && pendingInputBytes < targetReadBytes) { - int bytesToRead = (int) Math.min(targetReadBytes - pendingInputBytes, bytesLeft); + int bytesToRead = (int) min(targetReadBytes - pendingInputBytes, bytesLeft); int bytesAppended = input.read(inputData, pendingInputBytes, bytesToRead); if (bytesAppended == RESULT_END_OF_INPUT) { endOfSampleData = true; @@ -431,11 +513,11 @@ public boolean sampleData(ExtractorInput input, long bytesLeft) } } - int pendingBlockCount = pendingInputBytes / header.blockSize; + int pendingBlockCount = pendingInputBytes / wavFormat.blockSize; if (pendingBlockCount > 0) { // We have at least one whole block to decode. decode(inputData, pendingBlockCount, decodedData); - pendingInputBytes -= pendingBlockCount * header.blockSize; + pendingInputBytes -= pendingBlockCount * wavFormat.blockSize; // Write all of the decoded data to the track output. int decodedDataSize = decodedData.limit(); @@ -463,11 +545,12 @@ public boolean sampleData(ExtractorInput input, long bytesLeft) private void writeSampleMetadata(int sampleFrames) { long timeUs = startTimeUs - + Util.scaleLargeTimestamp(outputFrameCount, C.MICROS_PER_SECOND, header.frameRateHz); + + Util.scaleLargeTimestamp( + outputFrameCount, C.MICROS_PER_SECOND, wavFormat.frameRateHz); int size = numOutputFramesToBytes(sampleFrames); int offset = pendingOutputBytes - size; trackOutput.sampleMetadata( - timeUs, C.BUFFER_FLAG_KEY_FRAME, size, offset, /* encryptionData= */ null); + timeUs, C.BUFFER_FLAG_KEY_FRAME, size, offset, /* cryptoData= */ null); outputFrameCount += sampleFrames; pendingOutputBytes -= size; } @@ -481,18 +564,19 @@ private void writeSampleMetadata(int sampleFrames) { */ private void decode(byte[] input, int blockCount, ParsableByteArray output) { for (int blockIndex = 0; blockIndex < blockCount; blockIndex++) { - for (int channelIndex = 0; channelIndex < header.numChannels; channelIndex++) { - decodeBlockForChannel(input, blockIndex, channelIndex, output.data); + for (int channelIndex = 0; channelIndex < wavFormat.numChannels; channelIndex++) { + decodeBlockForChannel(input, blockIndex, channelIndex, output.getData()); } } int decodedDataSize = numOutputFramesToBytes(framesPerBlock * blockCount); - output.reset(decodedDataSize); + output.setPosition(0); + output.setLimit(decodedDataSize); } private void decodeBlockForChannel( byte[] input, int blockIndex, int channelIndex, byte[] output) { - int blockSize = header.blockSize; - int numChannels = header.numChannels; + int blockSize = wavFormat.blockSize; + int numChannels = wavFormat.numChannels; // The input data consists for a four byte header [Ci] for each of the N channels, followed // by interleaved data segments [Ci-DATAj], each of which are four bytes long. @@ -510,7 +594,7 @@ private void decodeBlockForChannel( // treated as -2^15 rather than 2^15. int predictedSample = (short) (((input[headerStartIndex + 1] & 0xFF) << 8) | (input[headerStartIndex] & 0xFF)); - int stepIndex = Math.min(input[headerStartIndex + 2] & 0xFF, 88); + int stepIndex = min(input[headerStartIndex + 2] & 0xFF, 88); int step = STEP_TABLE[stepIndex]; // Output the initial 16 bit PCM sample from the header. @@ -553,11 +637,11 @@ private void decodeBlockForChannel( } private int numOutputBytesToFrames(int bytes) { - return bytes / (2 * header.numChannels); + return bytes / (2 * wavFormat.numChannels); } private int numOutputFramesToBytes(int frames) { - return numOutputFramesToBytes(frames, header.numChannels); + return numOutputFramesToBytes(frames, wavFormat.numChannels); } private static int numOutputFramesToBytes(int frames, int numChannels) { diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/wav/WavFormat.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/wav/WavFormat.java new file mode 100644 index 0000000000..ca9e1d8dd7 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/wav/WavFormat.java @@ -0,0 +1,55 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.extractor.wav; + +/** Format information for a WAV file. */ +/* package */ final class WavFormat { + + /** + * The format type. Standard format types are the "WAVE form Registration Number" constants + * defined in RFC 2361 Appendix A. + */ + public final int formatType; + /** The number of channels. */ + public final int numChannels; + /** The sample rate in Hertz. */ + public final int frameRateHz; + /** The average bytes per second for the sample data. */ + public final int averageBytesPerSecond; + /** The block size in bytes. */ + public final int blockSize; + /** Bits per sample for a single channel. */ + public final int bitsPerSample; + /** Extra data appended to the format chunk. */ + public final byte[] extraData; + + public WavFormat( + int formatType, + int numChannels, + int frameRateHz, + int averageBytesPerSecond, + int blockSize, + int bitsPerSample, + byte[] extraData) { + this.formatType = formatType; + this.numChannels = numChannels; + this.frameRateHz = frameRateHz; + this.averageBytesPerSecond = averageBytesPerSecond; + this.blockSize = blockSize; + this.bitsPerSample = bitsPerSample; + this.extraData = extraData; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/wav/WavHeader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/wav/WavHeader.java deleted file mode 100644 index ca34e32cc0..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/wav/WavHeader.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright (C) 2016 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.extractor.wav; - -/** Header for a WAV file. */ -/* package */ final class WavHeader { - - /** - * The format type. Standard format types are the "WAVE form Registration Number" constants - * defined in RFC 2361 Appendix A. - */ - public final int formatType; - /** The number of channels. */ - public final int numChannels; - /** The sample rate in Hertz. */ - public final int frameRateHz; - /** The average bytes per second for the sample data. */ - public final int averageBytesPerSecond; - /** The block size in bytes. */ - public final int blockSize; - /** Bits per sample for a single channel. */ - public final int bitsPerSample; - /** Extra data appended to the format chunk of the header. */ - public final byte[] extraData; - - public WavHeader( - int formatType, - int numChannels, - int frameRateHz, - int averageBytesPerSecond, - int blockSize, - int bitsPerSample, - byte[] extraData) { - this.formatType = formatType; - this.numChannels = numChannels; - this.frameRateHz = frameRateHz; - this.averageBytesPerSecond = averageBytesPerSecond; - this.blockSize = blockSize; - this.bitsPerSample = bitsPerSample; - this.extraData = extraData; - } -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/wav/WavHeaderReader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/wav/WavHeaderReader.java index b2cdda7f9d..03cbb9645a 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/wav/WavHeaderReader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/wav/WavHeaderReader.java @@ -16,7 +16,6 @@ package com.google.android.exoplayer2.extractor.wav; import android.util.Pair; -import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.ParserException; import com.google.android.exoplayer2.audio.WavUtil; @@ -27,51 +26,79 @@ import com.google.android.exoplayer2.util.Util; import java.io.IOException; -/** Reads a {@code WavHeader} from an input stream; supports resuming from input failures. */ +/** Reads a WAV header from an input stream; supports resuming from input failures. */ /* package */ final class WavHeaderReader { private static final String TAG = "WavHeaderReader"; /** - * Peeks and returns a {@code WavHeader}. + * Returns whether the given {@code input} starts with a RIFF or RF64 chunk header, followed by a + * WAVE tag. * - * @param input Input stream to peek the WAV header from. - * @throws ParserException If the input file is an incorrect RIFF WAV. + * @param input The input stream to peek from. The position should point to the start of the + * stream. + * @return Whether the given {@code input} starts with a RIFF or RF64 chunk header, followed by a + * WAVE tag. * @throws IOException If peeking from the input fails. - * @throws InterruptedException If interrupted while peeking from input. - * @return A new {@code WavHeader} peeked from {@code input}, or null if the input is not a - * supported WAV format. */ - @Nullable - public static WavHeader peek(ExtractorInput input) throws IOException, InterruptedException { - Assertions.checkNotNull(input); - - // Allocate a scratch buffer large enough to store the format chunk. - ParsableByteArray scratch = new ParsableByteArray(16); - - // Attempt to read the RIFF chunk. + public static boolean checkFileType(ExtractorInput input) throws IOException { + ParsableByteArray scratch = new ParsableByteArray(ChunkHeader.SIZE_IN_BYTES); ChunkHeader chunkHeader = ChunkHeader.peek(input, scratch); - if (chunkHeader.id != WavUtil.RIFF_FOURCC) { - return null; + if (chunkHeader.id != WavUtil.RIFF_FOURCC && chunkHeader.id != WavUtil.RF64_FOURCC) { + return false; } - input.peekFully(scratch.data, 0, 4); + input.peekFully(scratch.getData(), 0, 4); scratch.setPosition(0); - int riffFormat = scratch.readInt(); - if (riffFormat != WavUtil.WAVE_FOURCC) { - Log.e(TAG, "Unsupported RIFF format: " + riffFormat); - return null; + int formType = scratch.readInt(); + if (formType != WavUtil.WAVE_FOURCC) { + Log.e(TAG, "Unsupported form type: " + formType); + return false; } - // Skip chunks until we find the format chunk. - chunkHeader = ChunkHeader.peek(input, scratch); - while (chunkHeader.id != WavUtil.FMT_FOURCC) { - input.advancePeekPosition((int) chunkHeader.size); - chunkHeader = ChunkHeader.peek(input, scratch); + return true; + } + + /** + * Reads the ds64 chunk defined in EBU - TECH 3306-2007, if present. If there is no such chunk, + * the input's position is left unchanged. + * + * @param input Input stream to read from. The position should point to the byte following the + * WAVE tag. + * @throws IOException If reading from the input fails. + * @return The value of the data size field in the ds64 chunk, or {@link C#LENGTH_UNSET} if there + * is no such chunk. + */ + public static long readRf64SampleDataSize(ExtractorInput input) throws IOException { + ParsableByteArray scratch = new ParsableByteArray(ChunkHeader.SIZE_IN_BYTES); + ChunkHeader chunkHeader = ChunkHeader.peek(input, scratch); + if (chunkHeader.id != WavUtil.DS64_FOURCC) { + input.resetPeekPosition(); + return C.LENGTH_UNSET; } + input.advancePeekPosition(8); // RIFF size + scratch.setPosition(0); + input.peekFully(scratch.getData(), 0, 8); + long sampleDataSize = scratch.readLittleEndianLong(); + input.skipFully(ChunkHeader.SIZE_IN_BYTES + (int) chunkHeader.size); + return sampleDataSize; + } + /** + * Reads and returns a {@code WavFormat}. + * + * @param input Input stream to read the WAV format from. The position should point to the byte + * following the ds64 chunk if present, or to the byte following the WAVE tag otherwise. + * @throws IOException If reading from the input fails. + * @return A new {@code WavFormat} read from {@code input}. + */ + public static WavFormat readFormat(ExtractorInput input) throws IOException { + // Allocate a scratch buffer large enough to store the format chunk. + ParsableByteArray scratch = new ParsableByteArray(16); + // Skip chunks until we find the format chunk. + ChunkHeader chunkHeader = skipToChunk(/* chunkId= */ WavUtil.FMT_FOURCC, input, scratch); Assertions.checkState(chunkHeader.size >= 16); - input.peekFully(scratch.data, 0, 16); + input.peekFully(scratch.getData(), 0, 16); scratch.setPosition(0); int audioFormatType = scratch.readLittleEndianUnsignedShort(); int numChannels = scratch.readLittleEndianUnsignedShort(); @@ -89,7 +116,8 @@ public static WavHeader peek(ExtractorInput input) throws IOException, Interrupt extraData = Util.EMPTY_BYTE_ARRAY; } - return new WavHeader( + input.skipFully((int) (input.getPeekPosition() - input.getPosition())); + return new WavFormat( audioFormatType, numChannels, frameRateHz, @@ -100,52 +128,58 @@ public static WavHeader peek(ExtractorInput input) throws IOException, Interrupt } /** - * Skips to the data in the given WAV input stream, and returns its bounds. After calling, the - * input stream's position will point to the start of sample data in the WAV. If an exception is - * thrown, the input position will be left pointing to a chunk header. + * Skips to the data in the given WAV input stream, and returns its start position and size. After + * calling, the input stream's position will point to the start of sample data in the WAV. If an + * exception is thrown, the input position will be left pointing to a chunk header (that may not + * be the data chunk header). * * @param input The input stream, whose read position must be pointing to a valid chunk header. - * @return The byte positions at which the data starts (inclusive) and ends (exclusive). + * @return The byte positions at which the data starts (inclusive) and the size of the data, in + * bytes. * @throws ParserException If an error occurs parsing chunks. * @throws IOException If reading from the input fails. - * @throws InterruptedException If interrupted while reading from input. */ - public static Pair skipToData(ExtractorInput input) - throws IOException, InterruptedException { - Assertions.checkNotNull(input); - + public static Pair skipToSampleData(ExtractorInput input) throws IOException { // Make sure the peek position is set to the read position before we peek the first header. input.resetPeekPosition(); ParsableByteArray scratch = new ParsableByteArray(ChunkHeader.SIZE_IN_BYTES); - // Skip all chunks until we hit the data header. + // Skip all chunks until we find the data header. + ChunkHeader chunkHeader = skipToChunk(/* chunkId= */ WavUtil.DATA_FOURCC, input, scratch); + // Skip past the "data" header. + input.skipFully(ChunkHeader.SIZE_IN_BYTES); + + long dataStartPosition = input.getPosition(); + return Pair.create(dataStartPosition, chunkHeader.size); + } + + /** + * Skips to the chunk header corresponding to the {@code chunkId} provided. After calling, the + * input stream's position will point to the chunk header with provided {@code chunkId} and the + * peek position to the chunk body. If an exception is thrown, the input position will be left + * pointing to a chunk header (that may not be the one corresponding to the {@code chunkId}). + * + * @param chunkId The ID of the chunk to skip to. + * @param input The input stream, whose read position must be pointing to a valid chunk header. + * @param scratch A scratch buffer to read the chunk headers. + * @return The {@link ChunkHeader} corresponding to the {@code chunkId} provided. + * @throws ParserException If an error occurs parsing chunks. + * @throws IOException If reading from the input fails. + */ + private static ChunkHeader skipToChunk( + int chunkId, ExtractorInput input, ParsableByteArray scratch) throws IOException { ChunkHeader chunkHeader = ChunkHeader.peek(input, scratch); - while (chunkHeader.id != WavUtil.DATA_FOURCC) { - if (chunkHeader.id != WavUtil.RIFF_FOURCC && chunkHeader.id != WavUtil.FMT_FOURCC) { - Log.w(TAG, "Ignoring unknown WAV chunk: " + chunkHeader.id); - } + while (chunkHeader.id != chunkId) { + Log.w(TAG, "Ignoring unknown WAV chunk: " + chunkHeader.id); long bytesToSkip = ChunkHeader.SIZE_IN_BYTES + chunkHeader.size; - // Override size of RIFF chunk, since it describes its size as the entire file. - if (chunkHeader.id == WavUtil.RIFF_FOURCC) { - bytesToSkip = ChunkHeader.SIZE_IN_BYTES + 4; - } if (bytesToSkip > Integer.MAX_VALUE) { - throw new ParserException("Chunk is too large (~2GB+) to skip; id: " + chunkHeader.id); + throw ParserException.createForUnsupportedContainerFeature( + "Chunk is too large (~2GB+) to skip; id: " + chunkHeader.id); } input.skipFully((int) bytesToSkip); chunkHeader = ChunkHeader.peek(input, scratch); } - // Skip past the "data" header. - input.skipFully(ChunkHeader.SIZE_IN_BYTES); - - long dataStartPosition = input.getPosition(); - long dataEndPosition = dataStartPosition + chunkHeader.size; - long inputLength = input.getLength(); - if (inputLength != C.LENGTH_UNSET && dataEndPosition > inputLength) { - Log.w(TAG, "Data exceeds input length: " + dataEndPosition + ", " + inputLength); - dataEndPosition = inputLength; - } - return Pair.create(dataStartPosition, dataEndPosition); + return chunkHeader; } private WavHeaderReader() { @@ -174,12 +208,11 @@ private ChunkHeader(int id, long size) { * @param input Input stream to peek the chunk header from. * @param scratch Buffer for temporary use. * @throws IOException If peeking from the input fails. - * @throws InterruptedException If interrupted while peeking from input. * @return A new {@code ChunkHeader} peeked from {@code input}. */ public static ChunkHeader peek(ExtractorInput input, ParsableByteArray scratch) - throws IOException, InterruptedException { - input.peekFully(scratch.data, /* offset= */ 0, /* length= */ SIZE_IN_BYTES); + throws IOException { + input.peekFully(scratch.getData(), /* offset= */ 0, /* length= */ SIZE_IN_BYTES); scratch.setPosition(0); int id = scratch.readInt(); diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/wav/WavSeekMap.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/wav/WavSeekMap.java index 2a92c38431..1d5c8fdae1 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/wav/WavSeekMap.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/wav/WavSeekMap.java @@ -22,18 +22,18 @@ /* package */ final class WavSeekMap implements SeekMap { - private final WavHeader wavHeader; + private final WavFormat wavFormat; private final int framesPerBlock; private final long firstBlockPosition; private final long blockCount; private final long durationUs; public WavSeekMap( - WavHeader wavHeader, int framesPerBlock, long dataStartPosition, long dataEndPosition) { - this.wavHeader = wavHeader; + WavFormat wavFormat, int framesPerBlock, long dataStartPosition, long dataEndPosition) { + this.wavFormat = wavFormat; this.framesPerBlock = framesPerBlock; this.firstBlockPosition = dataStartPosition; - this.blockCount = (dataEndPosition - dataStartPosition) / wavHeader.blockSize; + this.blockCount = (dataEndPosition - dataStartPosition) / wavFormat.blockSize; durationUs = blockIndexToTimeUs(blockCount); } @@ -50,17 +50,17 @@ public long getDurationUs() { @Override public SeekPoints getSeekPoints(long timeUs) { // Calculate the containing block index, constraining to valid indices. - long blockIndex = (timeUs * wavHeader.frameRateHz) / (C.MICROS_PER_SECOND * framesPerBlock); + long blockIndex = (timeUs * wavFormat.frameRateHz) / (C.MICROS_PER_SECOND * framesPerBlock); blockIndex = Util.constrainValue(blockIndex, 0, blockCount - 1); - long seekPosition = firstBlockPosition + (blockIndex * wavHeader.blockSize); + long seekPosition = firstBlockPosition + (blockIndex * wavFormat.blockSize); long seekTimeUs = blockIndexToTimeUs(blockIndex); SeekPoint seekPoint = new SeekPoint(seekTimeUs, seekPosition); if (seekTimeUs >= timeUs || blockIndex == blockCount - 1) { return new SeekPoints(seekPoint); } else { long secondBlockIndex = blockIndex + 1; - long secondSeekPosition = firstBlockPosition + (secondBlockIndex * wavHeader.blockSize); + long secondSeekPosition = firstBlockPosition + (secondBlockIndex * wavFormat.blockSize); long secondSeekTimeUs = blockIndexToTimeUs(secondBlockIndex); SeekPoint secondSeekPoint = new SeekPoint(secondSeekTimeUs, secondSeekPosition); return new SeekPoints(seekPoint, secondSeekPoint); @@ -69,6 +69,6 @@ public SeekPoints getSeekPoints(long timeUs) { private long blockIndexToTimeUs(long blockIndex) { return Util.scaleLargeTimestamp( - blockIndex * framesPerBlock, C.MICROS_PER_SECOND, wavHeader.frameRateHz); + blockIndex * framesPerBlock, C.MICROS_PER_SECOND, wavFormat.frameRateHz); } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/wav/package-info.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/wav/package-info.java new file mode 100644 index 0000000000..4769ea693a --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/wav/package-info.java @@ -0,0 +1,19 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +@NonNullApi +package com.google.android.exoplayer2.extractor.wav; + +import com.google.android.exoplayer2.util.NonNullApi; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/AsynchronousMediaCodecAdapter.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/AsynchronousMediaCodecAdapter.java new file mode 100644 index 0000000000..fe69f3f124 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/AsynchronousMediaCodecAdapter.java @@ -0,0 +1,326 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.android.exoplayer2.mediacodec; + +import static java.lang.annotation.ElementType.TYPE_USE; + +import android.media.MediaCodec; +import android.media.MediaCrypto; +import android.media.MediaFormat; +import android.os.Bundle; +import android.os.Handler; +import android.os.HandlerThread; +import android.os.PersistableBundle; +import android.view.Surface; +import androidx.annotation.IntDef; +import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; +import androidx.annotation.VisibleForTesting; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.decoder.CryptoInfo; +import com.google.android.exoplayer2.util.TraceUtil; +import com.google.common.base.Supplier; +import java.io.IOException; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import java.nio.ByteBuffer; + +/** + * A {@link MediaCodecAdapter} that operates the underlying {@link MediaCodec} in asynchronous mode, + * routes {@link MediaCodec.Callback} callbacks on a dedicated thread that is managed internally, + * and queues input buffers asynchronously. + */ +@RequiresApi(23) +/* package */ final class AsynchronousMediaCodecAdapter implements MediaCodecAdapter { + + /** A factory for {@link AsynchronousMediaCodecAdapter} instances. */ + public static final class Factory implements MediaCodecAdapter.Factory { + private final Supplier callbackThreadSupplier; + private final Supplier queueingThreadSupplier; + private final boolean synchronizeCodecInteractionsWithQueueing; + + /** + * Creates an factory for {@link AsynchronousMediaCodecAdapter} instances. + * + * @param trackType One of {@link C#TRACK_TYPE_AUDIO} or {@link C#TRACK_TYPE_VIDEO}. Used for + * labelling the internal thread accordingly. + * @param synchronizeCodecInteractionsWithQueueing Whether the adapter should synchronize {@link + * MediaCodec} interactions with asynchronous buffer queueing. When {@code true}, codec + * interactions will wait until all input buffers pending queueing wil be submitted to the + * {@link MediaCodec}. + */ + public Factory(@C.TrackType int trackType, boolean synchronizeCodecInteractionsWithQueueing) { + this( + /* callbackThreadSupplier= */ () -> + new HandlerThread(createCallbackThreadLabel(trackType)), + /* queueingThreadSupplier= */ () -> + new HandlerThread(createQueueingThreadLabel(trackType)), + synchronizeCodecInteractionsWithQueueing); + } + + @VisibleForTesting + /* package */ Factory( + Supplier callbackThreadSupplier, + Supplier queueingThreadSupplier, + boolean synchronizeCodecInteractionsWithQueueing) { + this.callbackThreadSupplier = callbackThreadSupplier; + this.queueingThreadSupplier = queueingThreadSupplier; + this.synchronizeCodecInteractionsWithQueueing = synchronizeCodecInteractionsWithQueueing; + } + + @Override + public AsynchronousMediaCodecAdapter createAdapter(Configuration configuration) + throws IOException { + String codecName = configuration.codecInfo.name; + @Nullable AsynchronousMediaCodecAdapter codecAdapter = null; + @Nullable MediaCodec codec = null; + try { + TraceUtil.beginSection("createCodec:" + codecName); + codec = MediaCodec.createByCodecName(codecName); + codecAdapter = + new AsynchronousMediaCodecAdapter( + codec, + callbackThreadSupplier.get(), + queueingThreadSupplier.get(), + synchronizeCodecInteractionsWithQueueing); + TraceUtil.endSection(); + codecAdapter.initialize( + configuration.mediaFormat, + configuration.surface, + configuration.crypto, + configuration.flags); + return codecAdapter; + } catch (Exception e) { + if (codecAdapter != null) { + codecAdapter.release(); + } else if (codec != null) { + codec.release(); + } + throw e; + } + } + } + + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef({STATE_CREATED, STATE_INITIALIZED, STATE_SHUT_DOWN}) + private @interface State {} + + private static final int STATE_CREATED = 0; + private static final int STATE_INITIALIZED = 1; + private static final int STATE_SHUT_DOWN = 2; + + private final MediaCodec codec; + private final AsynchronousMediaCodecCallback asynchronousMediaCodecCallback; + private final AsynchronousMediaCodecBufferEnqueuer bufferEnqueuer; + private final boolean synchronizeCodecInteractionsWithQueueing; + private boolean codecReleased; + private @State int state; + + private AsynchronousMediaCodecAdapter( + MediaCodec codec, + HandlerThread callbackThread, + HandlerThread enqueueingThread, + boolean synchronizeCodecInteractionsWithQueueing) { + this.codec = codec; + this.asynchronousMediaCodecCallback = new AsynchronousMediaCodecCallback(callbackThread); + this.bufferEnqueuer = new AsynchronousMediaCodecBufferEnqueuer(codec, enqueueingThread); + this.synchronizeCodecInteractionsWithQueueing = synchronizeCodecInteractionsWithQueueing; + this.state = STATE_CREATED; + } + + private void initialize( + @Nullable MediaFormat mediaFormat, + @Nullable Surface surface, + @Nullable MediaCrypto crypto, + int flags) { + asynchronousMediaCodecCallback.initialize(codec); + TraceUtil.beginSection("configureCodec"); + codec.configure(mediaFormat, surface, crypto, flags); + TraceUtil.endSection(); + bufferEnqueuer.start(); + TraceUtil.beginSection("startCodec"); + codec.start(); + TraceUtil.endSection(); + state = STATE_INITIALIZED; + } + + @Override + public boolean needsReconfiguration() { + return false; + } + + @Override + public void queueInputBuffer( + int index, int offset, int size, long presentationTimeUs, int flags) { + bufferEnqueuer.queueInputBuffer(index, offset, size, presentationTimeUs, flags); + } + + @Override + public void queueSecureInputBuffer( + int index, int offset, CryptoInfo info, long presentationTimeUs, int flags) { + bufferEnqueuer.queueSecureInputBuffer(index, offset, info, presentationTimeUs, flags); + } + + @Override + public void releaseOutputBuffer(int index, boolean render) { + codec.releaseOutputBuffer(index, render); + } + + @Override + public void releaseOutputBuffer(int index, long renderTimeStampNs) { + codec.releaseOutputBuffer(index, renderTimeStampNs); + } + + @Override + public int dequeueInputBufferIndex() { + return asynchronousMediaCodecCallback.dequeueInputBufferIndex(); + } + + @Override + public int dequeueOutputBufferIndex(MediaCodec.BufferInfo bufferInfo) { + return asynchronousMediaCodecCallback.dequeueOutputBufferIndex(bufferInfo); + } + + @Override + public MediaFormat getOutputFormat() { + return asynchronousMediaCodecCallback.getOutputFormat(); + } + + @Override + @Nullable + public ByteBuffer getInputBuffer(int index) { + return codec.getInputBuffer(index); + } + + @Override + @Nullable + public ByteBuffer getOutputBuffer(int index) { + return codec.getOutputBuffer(index); + } + + @Override + public void flush() { + // The order of calls is important: + // 1. Flush the bufferEnqueuer to stop queueing input buffers. + // 2. Flush the codec to stop producing available input/output buffers. + // 3. Flush the callback so that in-flight callbacks are discarded. + // 4. Start the codec. The asynchronous callback will drop pending callbacks and we can start + // the codec now. + bufferEnqueuer.flush(); + codec.flush(); + asynchronousMediaCodecCallback.flush(); + codec.start(); + } + + @Override + public void release() { + try { + if (state == STATE_INITIALIZED) { + bufferEnqueuer.shutdown(); + asynchronousMediaCodecCallback.shutdown(); + } + state = STATE_SHUT_DOWN; + } finally { + if (!codecReleased) { + codec.release(); + codecReleased = true; + } + } + } + + @Override + public void setOnFrameRenderedListener(OnFrameRenderedListener listener, Handler handler) { + maybeBlockOnQueueing(); + codec.setOnFrameRenderedListener( + (codec, presentationTimeUs, nanoTime) -> + listener.onFrameRendered( + AsynchronousMediaCodecAdapter.this, presentationTimeUs, nanoTime), + handler); + } + + @Override + public void setOutputSurface(Surface surface) { + maybeBlockOnQueueing(); + codec.setOutputSurface(surface); + } + + @Override + public void setParameters(Bundle params) { + maybeBlockOnQueueing(); + codec.setParameters(params); + } + + @Override + public void setVideoScalingMode(@C.VideoScalingMode int scalingMode) { + maybeBlockOnQueueing(); + codec.setVideoScalingMode(scalingMode); + } + + @Override + @RequiresApi(26) + public PersistableBundle getMetrics() { + maybeBlockOnQueueing(); + return codec.getMetrics(); + } + + @VisibleForTesting + /* package */ void onError(MediaCodec.CodecException error) { + asynchronousMediaCodecCallback.onError(codec, error); + } + + @VisibleForTesting + /* package */ void onOutputFormatChanged(MediaFormat format) { + asynchronousMediaCodecCallback.onOutputFormatChanged(codec, format); + } + + private void maybeBlockOnQueueing() { + if (synchronizeCodecInteractionsWithQueueing) { + try { + bufferEnqueuer.waitUntilQueueingComplete(); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + // The playback thread should not be interrupted. Raising this as an + // IllegalStateException. + throw new IllegalStateException(e); + } + } + } + + private static String createCallbackThreadLabel(@C.TrackType int trackType) { + return createThreadLabel(trackType, /* prefix= */ "ExoPlayer:MediaCodecAsyncAdapter:"); + } + + private static String createQueueingThreadLabel(@C.TrackType int trackType) { + return createThreadLabel(trackType, /* prefix= */ "ExoPlayer:MediaCodecQueueingThread:"); + } + + private static String createThreadLabel(@C.TrackType int trackType, String prefix) { + StringBuilder labelBuilder = new StringBuilder(prefix); + if (trackType == C.TRACK_TYPE_AUDIO) { + labelBuilder.append("Audio"); + } else if (trackType == C.TRACK_TYPE_VIDEO) { + labelBuilder.append("Video"); + } else { + labelBuilder.append("Unknown(").append(trackType).append(")"); + } + return labelBuilder.toString(); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/AsynchronousMediaCodecBufferEnqueuer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/AsynchronousMediaCodecBufferEnqueuer.java new file mode 100644 index 0000000000..3e95c2a500 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/AsynchronousMediaCodecBufferEnqueuer.java @@ -0,0 +1,348 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.android.exoplayer2.mediacodec; + +import static androidx.annotation.VisibleForTesting.NONE; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Util.castNonNull; + +import android.media.MediaCodec; +import android.os.Handler; +import android.os.HandlerThread; +import android.os.Message; +import androidx.annotation.GuardedBy; +import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; +import androidx.annotation.VisibleForTesting; +import com.google.android.exoplayer2.decoder.CryptoInfo; +import com.google.android.exoplayer2.util.ConditionVariable; +import com.google.android.exoplayer2.util.Util; +import java.util.ArrayDeque; +import java.util.Arrays; +import java.util.concurrent.atomic.AtomicReference; +import org.checkerframework.checker.nullness.compatqual.NullableType; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; + +/** + * Performs {@link MediaCodec} input buffer queueing on a background thread. + * + *

      The implementation of this class assumes that its public methods will be called from the same + * thread. + */ +@RequiresApi(23) +class AsynchronousMediaCodecBufferEnqueuer { + + private static final int MSG_QUEUE_INPUT_BUFFER = 0; + private static final int MSG_QUEUE_SECURE_INPUT_BUFFER = 1; + private static final int MSG_OPEN_CV = 2; + + @GuardedBy("MESSAGE_PARAMS_INSTANCE_POOL") + private static final ArrayDeque MESSAGE_PARAMS_INSTANCE_POOL = new ArrayDeque<>(); + + private static final Object QUEUE_SECURE_LOCK = new Object(); + + private final MediaCodec codec; + private final HandlerThread handlerThread; + private @MonotonicNonNull Handler handler; + private final AtomicReference<@NullableType RuntimeException> pendingRuntimeException; + private final ConditionVariable conditionVariable; + private boolean started; + + /** + * Creates a new instance that submits input buffers on the specified {@link MediaCodec}. + * + * @param codec The {@link MediaCodec} to submit input buffers to. + * @param queueingThread The {@link HandlerThread} to use for queueing buffers. + */ + public AsynchronousMediaCodecBufferEnqueuer(MediaCodec codec, HandlerThread queueingThread) { + this(codec, queueingThread, /* conditionVariable= */ new ConditionVariable()); + } + + @VisibleForTesting + /* package */ AsynchronousMediaCodecBufferEnqueuer( + MediaCodec codec, HandlerThread handlerThread, ConditionVariable conditionVariable) { + this.codec = codec; + this.handlerThread = handlerThread; + this.conditionVariable = conditionVariable; + pendingRuntimeException = new AtomicReference<>(); + } + + /** + * Starts this instance. + * + *

      Call this method after creating an instance and before queueing input buffers. + */ + public void start() { + if (!started) { + handlerThread.start(); + handler = + new Handler(handlerThread.getLooper()) { + @Override + public void handleMessage(Message msg) { + doHandleMessage(msg); + } + }; + started = true; + } + } + + /** + * Submits an input buffer for decoding. + * + * @see android.media.MediaCodec#queueInputBuffer + */ + public void queueInputBuffer( + int index, int offset, int size, long presentationTimeUs, int flags) { + maybeThrowException(); + MessageParams messageParams = getMessageParams(); + messageParams.setQueueParams(index, offset, size, presentationTimeUs, flags); + Message message = castNonNull(handler).obtainMessage(MSG_QUEUE_INPUT_BUFFER, messageParams); + message.sendToTarget(); + } + + /** + * Submits an input buffer that potentially contains encrypted data for decoding. + * + *

      Note: This method behaves as {@link MediaCodec#queueSecureInputBuffer} with the difference + * that {@code info} is of type {@link CryptoInfo} and not {@link + * android.media.MediaCodec.CryptoInfo}. + * + * @see android.media.MediaCodec#queueSecureInputBuffer + */ + public void queueSecureInputBuffer( + int index, int offset, CryptoInfo info, long presentationTimeUs, int flags) { + maybeThrowException(); + MessageParams messageParams = getMessageParams(); + messageParams.setQueueParams(index, offset, /* size= */ 0, presentationTimeUs, flags); + copy(info, messageParams.cryptoInfo); + Message message = + castNonNull(handler).obtainMessage(MSG_QUEUE_SECURE_INPUT_BUFFER, messageParams); + message.sendToTarget(); + } + + /** Flushes the instance. */ + public void flush() { + if (started) { + try { + flushHandlerThread(); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + // The playback thread should not be interrupted. Raising this as an + // IllegalStateException. + throw new IllegalStateException(e); + } + } + } + + /** Shuts down the instance. Make sure to call this method to release its internal resources. */ + public void shutdown() { + if (started) { + flush(); + handlerThread.quit(); + } + started = false; + } + + /** Blocks the current thread until all input buffers pending queueing are submitted. */ + public void waitUntilQueueingComplete() throws InterruptedException { + blockUntilHandlerThreadIsIdle(); + } + + private void maybeThrowException() { + @Nullable RuntimeException exception = pendingRuntimeException.getAndSet(null); + if (exception != null) { + throw exception; + } + } + + /** + * Empties all tasks enqueued on the {@link #handlerThread} via the {@link #handler}. This method + * blocks until the {@link #handlerThread} is idle. + */ + private void flushHandlerThread() throws InterruptedException { + checkNotNull(this.handler).removeCallbacksAndMessages(null); + blockUntilHandlerThreadIsIdle(); + } + + private void blockUntilHandlerThreadIsIdle() throws InterruptedException { + conditionVariable.close(); + checkNotNull(handler).obtainMessage(MSG_OPEN_CV).sendToTarget(); + conditionVariable.block(); + } + + @VisibleForTesting(otherwise = NONE) + /* package */ void setPendingRuntimeException(RuntimeException exception) { + pendingRuntimeException.set(exception); + } + + // Called from the handler thread + + private void doHandleMessage(Message msg) { + @Nullable MessageParams params = null; + switch (msg.what) { + case MSG_QUEUE_INPUT_BUFFER: + params = (MessageParams) msg.obj; + doQueueInputBuffer( + params.index, params.offset, params.size, params.presentationTimeUs, params.flags); + break; + case MSG_QUEUE_SECURE_INPUT_BUFFER: + params = (MessageParams) msg.obj; + doQueueSecureInputBuffer( + params.index, + params.offset, + params.cryptoInfo, + params.presentationTimeUs, + params.flags); + break; + case MSG_OPEN_CV: + conditionVariable.open(); + break; + default: + pendingRuntimeException.compareAndSet( + null, new IllegalStateException(String.valueOf(msg.what))); + } + if (params != null) { + recycleMessageParams(params); + } + } + + private void doQueueInputBuffer( + int index, int offset, int size, long presentationTimeUs, int flag) { + try { + codec.queueInputBuffer(index, offset, size, presentationTimeUs, flag); + } catch (RuntimeException e) { + pendingRuntimeException.compareAndSet(null, e); + } + } + + private void doQueueSecureInputBuffer( + int index, int offset, MediaCodec.CryptoInfo info, long presentationTimeUs, int flags) { + try { + // Synchronize calls to MediaCodec.queueSecureInputBuffer() to avoid race conditions inside + // the crypto module when audio and video are sharing the same DRM session + // (see [Internal: b/149908061]). + synchronized (QUEUE_SECURE_LOCK) { + codec.queueSecureInputBuffer(index, offset, info, presentationTimeUs, flags); + } + } catch (RuntimeException e) { + pendingRuntimeException.compareAndSet(null, e); + } + } + + private static MessageParams getMessageParams() { + synchronized (MESSAGE_PARAMS_INSTANCE_POOL) { + if (MESSAGE_PARAMS_INSTANCE_POOL.isEmpty()) { + return new MessageParams(); + } else { + return MESSAGE_PARAMS_INSTANCE_POOL.removeFirst(); + } + } + } + + private static void recycleMessageParams(MessageParams params) { + synchronized (MESSAGE_PARAMS_INSTANCE_POOL) { + MESSAGE_PARAMS_INSTANCE_POOL.add(params); + } + } + + /** Parameters for queue input buffer and queue secure input buffer tasks. */ + private static class MessageParams { + public int index; + public int offset; + public int size; + public final MediaCodec.CryptoInfo cryptoInfo; + public long presentationTimeUs; + public int flags; + + MessageParams() { + cryptoInfo = new MediaCodec.CryptoInfo(); + } + + /** Convenience method for setting the queueing parameters. */ + public void setQueueParams( + int index, int offset, int size, long presentationTimeUs, int flags) { + this.index = index; + this.offset = offset; + this.size = size; + this.presentationTimeUs = presentationTimeUs; + this.flags = flags; + } + } + + /** Performs a deep copy of {@code cryptoInfo} to {@code frameworkCryptoInfo}. */ + private static void copy( + CryptoInfo cryptoInfo, android.media.MediaCodec.CryptoInfo frameworkCryptoInfo) { + // Update frameworkCryptoInfo fields directly because CryptoInfo.set performs an unnecessary + // object allocation on Android N. + frameworkCryptoInfo.numSubSamples = cryptoInfo.numSubSamples; + frameworkCryptoInfo.numBytesOfClearData = + copy(cryptoInfo.numBytesOfClearData, frameworkCryptoInfo.numBytesOfClearData); + frameworkCryptoInfo.numBytesOfEncryptedData = + copy(cryptoInfo.numBytesOfEncryptedData, frameworkCryptoInfo.numBytesOfEncryptedData); + frameworkCryptoInfo.key = checkNotNull(copy(cryptoInfo.key, frameworkCryptoInfo.key)); + frameworkCryptoInfo.iv = checkNotNull(copy(cryptoInfo.iv, frameworkCryptoInfo.iv)); + frameworkCryptoInfo.mode = cryptoInfo.mode; + if (Util.SDK_INT >= 24) { + android.media.MediaCodec.CryptoInfo.Pattern pattern = + new android.media.MediaCodec.CryptoInfo.Pattern( + cryptoInfo.encryptedBlocks, cryptoInfo.clearBlocks); + frameworkCryptoInfo.setPattern(pattern); + } + } + + /** + * Copies {@code src}, reusing {@code dst} if it's at least as long as {@code src}. + * + * @param src The source array. + * @param dst The destination array, which will be reused if it's at least as long as {@code src}. + * @return The copy, which may be {@code dst} if it was reused. + */ + @Nullable + private static int[] copy(@Nullable int[] src, @Nullable int[] dst) { + if (src == null) { + return dst; + } + + if (dst == null || dst.length < src.length) { + return Arrays.copyOf(src, src.length); + } else { + System.arraycopy(src, 0, dst, 0, src.length); + return dst; + } + } + + /** + * Copies {@code src}, reusing {@code dst} if it's at least as long as {@code src}. + * + * @param src The source array. + * @param dst The destination array, which will be reused if it's at least as long as {@code src}. + * @return The copy, which may be {@code dst} if it was reused. + */ + @Nullable + private static byte[] copy(@Nullable byte[] src, @Nullable byte[] dst) { + if (src == null) { + return dst; + } + + if (dst == null || dst.length < src.length) { + return Arrays.copyOf(src, src.length); + } else { + System.arraycopy(src, 0, dst, 0, src.length); + return dst; + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/AsynchronousMediaCodecCallback.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/AsynchronousMediaCodecCallback.java new file mode 100644 index 0000000000..a4a83c0dee --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/AsynchronousMediaCodecCallback.java @@ -0,0 +1,313 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.android.exoplayer2.mediacodec; + +import static com.google.android.exoplayer2.util.Assertions.checkState; +import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; + +import android.media.MediaCodec; +import android.media.MediaFormat; +import android.os.Handler; +import android.os.HandlerThread; +import androidx.annotation.GuardedBy; +import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; +import com.google.android.exoplayer2.util.Util; +import java.util.ArrayDeque; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; + +/** A {@link MediaCodec.Callback} that routes callbacks on a separate thread. */ +@RequiresApi(23) +/* package */ final class AsynchronousMediaCodecCallback extends MediaCodec.Callback { + private final Object lock; + private final HandlerThread callbackThread; + + private @MonotonicNonNull Handler handler; + + @GuardedBy("lock") + private final IntArrayQueue availableInputBuffers; + + @GuardedBy("lock") + private final IntArrayQueue availableOutputBuffers; + + @GuardedBy("lock") + private final ArrayDeque bufferInfos; + + @GuardedBy("lock") + private final ArrayDeque formats; + + @GuardedBy("lock") + @Nullable + private MediaFormat currentFormat; + + @GuardedBy("lock") + @Nullable + private MediaFormat pendingOutputFormat; + + @GuardedBy("lock") + @Nullable + private MediaCodec.CodecException mediaCodecException; + + @GuardedBy("lock") + private long pendingFlushCount; + + @GuardedBy("lock") + private boolean shutDown; + + @GuardedBy("lock") + @Nullable + private IllegalStateException internalException; + + /** + * Creates a new instance. + * + * @param callbackThread The thread that will be used for routing the {@link MediaCodec} + * callbacks. The thread must not be started. + */ + /* package */ AsynchronousMediaCodecCallback(HandlerThread callbackThread) { + this.lock = new Object(); + this.callbackThread = callbackThread; + this.availableInputBuffers = new IntArrayQueue(); + this.availableOutputBuffers = new IntArrayQueue(); + this.bufferInfos = new ArrayDeque<>(); + this.formats = new ArrayDeque<>(); + } + + /** + * Sets the callback on {@code codec} and starts the background callback thread. + * + *

      Make sure to call {@link #shutdown()} to stop the background thread and release its + * resources. + * + * @see MediaCodec#setCallback(MediaCodec.Callback, Handler) + */ + public void initialize(MediaCodec codec) { + checkState(handler == null); + + callbackThread.start(); + Handler handler = new Handler(callbackThread.getLooper()); + codec.setCallback(this, handler); + // Initialize this.handler at the very end ensuring the callback in not considered configured + // if MediaCodec raises an exception. + this.handler = handler; + } + + /** + * Shuts down this instance. + * + *

      This method will stop the callback thread. After calling it, callbacks will no longer be + * handled and dequeue methods will return {@link MediaCodec#INFO_TRY_AGAIN_LATER}. + */ + public void shutdown() { + synchronized (lock) { + shutDown = true; + callbackThread.quit(); + flushInternal(); + } + } + + /** + * Returns the next available input buffer index or {@link MediaCodec#INFO_TRY_AGAIN_LATER} if no + * such buffer exists. + */ + public int dequeueInputBufferIndex() { + synchronized (lock) { + if (isFlushingOrShutdown()) { + return MediaCodec.INFO_TRY_AGAIN_LATER; + } else { + maybeThrowException(); + return availableInputBuffers.isEmpty() + ? MediaCodec.INFO_TRY_AGAIN_LATER + : availableInputBuffers.remove(); + } + } + } + + /** + * Returns the next available output buffer index. If the next available output is a MediaFormat + * change, it will return {@link MediaCodec#INFO_OUTPUT_FORMAT_CHANGED} and you should call {@link + * #getOutputFormat()} to get the format. If there is no available output, this method will return + * {@link MediaCodec#INFO_TRY_AGAIN_LATER}. + */ + public int dequeueOutputBufferIndex(MediaCodec.BufferInfo bufferInfo) { + synchronized (lock) { + if (isFlushingOrShutdown()) { + return MediaCodec.INFO_TRY_AGAIN_LATER; + } else { + maybeThrowException(); + if (availableOutputBuffers.isEmpty()) { + return MediaCodec.INFO_TRY_AGAIN_LATER; + } else { + int bufferIndex = availableOutputBuffers.remove(); + if (bufferIndex >= 0) { + checkStateNotNull(currentFormat); + MediaCodec.BufferInfo nextBufferInfo = bufferInfos.remove(); + bufferInfo.set( + nextBufferInfo.offset, + nextBufferInfo.size, + nextBufferInfo.presentationTimeUs, + nextBufferInfo.flags); + } else if (bufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { + currentFormat = formats.remove(); + } + return bufferIndex; + } + } + } + } + + /** + * Returns the {@link MediaFormat} signalled by the underlying {@link MediaCodec}. + * + *

      Call this after {@link #dequeueOutputBufferIndex} returned {@link + * MediaCodec#INFO_OUTPUT_FORMAT_CHANGED}. + * + * @throws IllegalStateException If called before {@link #dequeueOutputBufferIndex} has returned + * {@link MediaCodec#INFO_OUTPUT_FORMAT_CHANGED}. + */ + public MediaFormat getOutputFormat() { + synchronized (lock) { + if (currentFormat == null) { + throw new IllegalStateException(); + } + return currentFormat; + } + } + + /** + * Initiates a flush asynchronously, which will be completed on the callback thread. When the + * flush is complete, it will trigger {@code onFlushCompleted} from the callback thread. + */ + public void flush() { + synchronized (lock) { + ++pendingFlushCount; + Util.castNonNull(handler).post(this::onFlushCompleted); + } + } + + // Called from the callback thread. + + @Override + public void onInputBufferAvailable(MediaCodec codec, int index) { + synchronized (lock) { + availableInputBuffers.add(index); + } + } + + @Override + public void onOutputBufferAvailable(MediaCodec codec, int index, MediaCodec.BufferInfo info) { + synchronized (lock) { + if (pendingOutputFormat != null) { + addOutputFormat(pendingOutputFormat); + pendingOutputFormat = null; + } + availableOutputBuffers.add(index); + bufferInfos.add(info); + } + } + + @Override + public void onError(MediaCodec codec, MediaCodec.CodecException e) { + synchronized (lock) { + mediaCodecException = e; + } + } + + @Override + public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) { + synchronized (lock) { + addOutputFormat(format); + pendingOutputFormat = null; + } + } + + private void onFlushCompleted() { + synchronized (lock) { + if (shutDown) { + return; + } + + --pendingFlushCount; + if (pendingFlushCount > 0) { + // Another flush() has been called. + return; + } else if (pendingFlushCount < 0) { + // This should never happen. + setInternalException(new IllegalStateException()); + return; + } + flushInternal(); + } + } + + /** Flushes all available input and output buffers and any error that was previously set. */ + @GuardedBy("lock") + private void flushInternal() { + if (!formats.isEmpty()) { + pendingOutputFormat = formats.getLast(); + } + // else, pendingOutputFormat may already be non-null following a previous flush, and remains + // set in this case. + + availableInputBuffers.clear(); + availableOutputBuffers.clear(); + bufferInfos.clear(); + formats.clear(); + mediaCodecException = null; + } + + @GuardedBy("lock") + private boolean isFlushingOrShutdown() { + return pendingFlushCount > 0 || shutDown; + } + + @GuardedBy("lock") + private void addOutputFormat(MediaFormat mediaFormat) { + availableOutputBuffers.add(MediaCodec.INFO_OUTPUT_FORMAT_CHANGED); + formats.add(mediaFormat); + } + + @GuardedBy("lock") + private void maybeThrowException() { + maybeThrowInternalException(); + maybeThrowMediaCodecException(); + } + + @GuardedBy("lock") + private void maybeThrowInternalException() { + if (internalException != null) { + IllegalStateException e = internalException; + internalException = null; + throw e; + } + } + + @GuardedBy("lock") + private void maybeThrowMediaCodecException() { + if (mediaCodecException != null) { + MediaCodec.CodecException codecException = mediaCodecException; + mediaCodecException = null; + throw codecException; + } + } + + private void setInternalException(IllegalStateException e) { + synchronized (lock) { + internalException = e; + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/BatchBuffer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/BatchBuffer.java new file mode 100644 index 0000000000..a34a3b4f34 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/BatchBuffer.java @@ -0,0 +1,138 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.mediacodec; + +import static com.google.android.exoplayer2.util.Assertions.checkArgument; + +import androidx.annotation.IntRange; +import androidx.annotation.Nullable; +import androidx.annotation.VisibleForTesting; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.decoder.DecoderInputBuffer; +import java.nio.ByteBuffer; + +/** Buffer to which multiple sample buffers can be appended for batch processing */ +/* package */ final class BatchBuffer extends DecoderInputBuffer { + + /** The default maximum number of samples that can be appended before the buffer is full. */ + public static final int DEFAULT_MAX_SAMPLE_COUNT = 32; + /** + * The maximum size of the buffer in bytes. This prevents excessive memory usage for high bitrate + * streams. The limit is equivalent of 75s of mp3 at highest bitrate (320kb/s) and 30s of AAC LC + * at highest bitrate (800kb/s). That limit is ignored for the first sample. + */ + @VisibleForTesting /* package */ static final int MAX_SIZE_BYTES = 3 * 1000 * 1024; + + private long lastSampleTimeUs; + private int sampleCount; + private int maxSampleCount; + + public BatchBuffer() { + super(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DIRECT); + maxSampleCount = DEFAULT_MAX_SAMPLE_COUNT; + } + + @Override + public void clear() { + super.clear(); + sampleCount = 0; + } + + /** Sets the maximum number of samples that can be appended before the buffer is full. */ + public void setMaxSampleCount(@IntRange(from = 1) int maxSampleCount) { + checkArgument(maxSampleCount > 0); + this.maxSampleCount = maxSampleCount; + } + + /** + * Returns the timestamp of the first sample in the buffer. The return value is undefined if + * {@link #hasSamples()} is {@code false}. + */ + public long getFirstSampleTimeUs() { + return timeUs; + } + + /** + * Returns the timestamp of the last sample in the buffer. The return value is undefined if {@link + * #hasSamples()} is {@code false}. + */ + public long getLastSampleTimeUs() { + return lastSampleTimeUs; + } + + /** Returns the number of samples in the buffer. */ + public int getSampleCount() { + return sampleCount; + } + + /** Returns whether the buffer contains one or more samples. */ + public boolean hasSamples() { + return sampleCount > 0; + } + + /** + * Attempts to append the provided buffer. + * + * @param buffer The buffer to try and append. + * @return Whether the buffer was successfully appended. + * @throws IllegalArgumentException If the {@code buffer} is encrypted, has supplemental data, or + * is an end of stream buffer, none of which are supported. + */ + public boolean append(DecoderInputBuffer buffer) { + checkArgument(!buffer.isEncrypted()); + checkArgument(!buffer.hasSupplementalData()); + checkArgument(!buffer.isEndOfStream()); + if (!canAppendSampleBuffer(buffer)) { + return false; + } + if (sampleCount++ == 0) { + timeUs = buffer.timeUs; + if (buffer.isKeyFrame()) { + setFlags(C.BUFFER_FLAG_KEY_FRAME); + } + } + if (buffer.isDecodeOnly()) { + setFlags(C.BUFFER_FLAG_DECODE_ONLY); + } + @Nullable ByteBuffer bufferData = buffer.data; + if (bufferData != null) { + ensureSpaceForWrite(bufferData.remaining()); + data.put(bufferData); + } + lastSampleTimeUs = buffer.timeUs; + return true; + } + + private boolean canAppendSampleBuffer(DecoderInputBuffer buffer) { + if (!hasSamples()) { + // Always allow appending when the buffer is empty, else no progress can be made. + return true; + } + if (sampleCount >= maxSampleCount) { + return false; + } + if (buffer.isDecodeOnly() != isDecodeOnly()) { + return false; + } + @Nullable ByteBuffer bufferData = buffer.data; + if (bufferData != null + && data != null + && data.position() + bufferData.remaining() > MAX_SIZE_BYTES) { + return false; + } + return true; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/C2Mp3TimestampTracker.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/C2Mp3TimestampTracker.java new file mode 100644 index 0000000000..8fde836637 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/C2Mp3TimestampTracker.java @@ -0,0 +1,106 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.mediacodec; + +import static java.lang.Math.max; + +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.audio.MpegAudioUtil; +import com.google.android.exoplayer2.decoder.DecoderInputBuffer; +import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.Log; +import java.nio.ByteBuffer; + +/** + * Tracks the number of processed samples to calculate an accurate current timestamp, matching the + * calculations made in the Codec2 Mp3 decoder. + */ +/* package */ final class C2Mp3TimestampTracker { + + private static final long DECODER_DELAY_FRAMES = 529; + private static final String TAG = "C2Mp3TimestampTracker"; + + private long anchorTimestampUs; + private long processedFrames; + private boolean seenInvalidMpegAudioHeader; + + /** + * Resets the timestamp tracker. + * + *

      This should be done when the codec is flushed. + */ + public void reset() { + anchorTimestampUs = 0; + processedFrames = 0; + seenInvalidMpegAudioHeader = false; + } + + /** + * Updates the tracker with the given input buffer and returns the expected output timestamp. + * + * @param format The format associated with the buffer. + * @param buffer The current input buffer. + * @return The expected output presentation time, in microseconds. + */ + public long updateAndGetPresentationTimeUs(Format format, DecoderInputBuffer buffer) { + if (processedFrames == 0) { + anchorTimestampUs = buffer.timeUs; + } + + if (seenInvalidMpegAudioHeader) { + return buffer.timeUs; + } + + ByteBuffer data = Assertions.checkNotNull(buffer.data); + int sampleHeaderData = 0; + for (int i = 0; i < 4; i++) { + sampleHeaderData <<= 8; + sampleHeaderData |= data.get(i) & 0xFF; + } + + int frameCount = MpegAudioUtil.parseMpegAudioFrameSampleCount(sampleHeaderData); + if (frameCount == C.LENGTH_UNSET) { + seenInvalidMpegAudioHeader = true; + processedFrames = 0; + anchorTimestampUs = buffer.timeUs; + Log.w(TAG, "MPEG audio header is invalid."); + return buffer.timeUs; + } + long currentBufferTimestampUs = getBufferTimestampUs(format.sampleRate); + processedFrames += frameCount; + return currentBufferTimestampUs; + } + + /** + * Returns the timestamp of the last buffer that will be produced if the stream ends at the + * current position, in microseconds. + * + * @param format The format associated with input buffers. + * @return The timestamp of the last buffer that will be produced if the stream ends at the + * current position, in microseconds. + */ + public long getLastOutputBufferPresentationTimeUs(Format format) { + return getBufferTimestampUs(format.sampleRate); + } + + private long getBufferTimestampUs(long sampleRate) { + // This calculation matches the timestamp calculation in the Codec2 Mp3 Decoder. + // https://cs.android.com/android/platform/superproject/+/main:frameworks/av/media/codec2/components/mp3/C2SoftMp3Dec.cpp;l=464;drc=ed134640332fea70ca4b05694289d91a5265bb46 + return anchorTimestampUs + + max(0, (processedFrames - DECODER_DELAY_FRAMES) * C.MICROS_PER_SECOND / sampleRate); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/DefaultMediaCodecAdapterFactory.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/DefaultMediaCodecAdapterFactory.java new file mode 100644 index 0000000000..f9d991e721 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/DefaultMediaCodecAdapterFactory.java @@ -0,0 +1,115 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.mediacodec; + +import static java.lang.annotation.ElementType.TYPE_USE; + +import androidx.annotation.IntDef; +import com.google.android.exoplayer2.util.Log; +import com.google.android.exoplayer2.util.MimeTypes; +import com.google.android.exoplayer2.util.Util; +import com.google.errorprone.annotations.CanIgnoreReturnValue; +import java.io.IOException; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * The default {@link MediaCodecAdapter.Factory}. + * + *

      By default, this factory {@link #createAdapter creates} {@link AsynchronousMediaCodecAdapter} + * instances on devices with API level >= 31 (Android 12+). For devices with older API versions, + * the default behavior is to create {@link SynchronousMediaCodecAdapter} instances. The factory + * offers APIs to force the creation of {@link AsynchronousMediaCodecAdapter} (applicable for + * devices with API >= 23) or {@link SynchronousMediaCodecAdapter} instances. + */ +public final class DefaultMediaCodecAdapterFactory implements MediaCodecAdapter.Factory { + + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef({MODE_DEFAULT, MODE_ENABLED, MODE_DISABLED}) + private @interface Mode {} + + private static final int MODE_DEFAULT = 0; + private static final int MODE_ENABLED = 1; + private static final int MODE_DISABLED = 2; + + private static final String TAG = "DMCodecAdapterFactory"; + + private @Mode int asynchronousMode; + private boolean enableSynchronizeCodecInteractionsWithQueueing; + + public DefaultMediaCodecAdapterFactory() { + asynchronousMode = MODE_DEFAULT; + } + + /** + * Forces this factory to always create {@link AsynchronousMediaCodecAdapter} instances, provided + * the device API level is >= 23. For devices with API level < 23, the factory will create + * {@link SynchronousMediaCodecAdapter SynchronousMediaCodecAdapters}. + * + * @return This factory, for convenience. + */ + @CanIgnoreReturnValue + public DefaultMediaCodecAdapterFactory forceEnableAsynchronous() { + asynchronousMode = MODE_ENABLED; + return this; + } + + /** + * Forces the factory to always create {@link SynchronousMediaCodecAdapter} instances. + * + * @return This factory, for convenience. + */ + @CanIgnoreReturnValue + public DefaultMediaCodecAdapterFactory forceDisableAsynchronous() { + asynchronousMode = MODE_DISABLED; + return this; + } + + /** + * Enable synchronizing codec interactions with asynchronous buffer queueing. + * + *

      This method is experimental, and will be renamed or removed in a future release. + * + * @param enabled Whether codec interactions will be synchronized with asynchronous buffer + * queueing. + */ + public void experimentalSetSynchronizeCodecInteractionsWithQueueingEnabled(boolean enabled) { + enableSynchronizeCodecInteractionsWithQueueing = enabled; + } + + @Override + public MediaCodecAdapter createAdapter(MediaCodecAdapter.Configuration configuration) + throws IOException { + if (Util.SDK_INT >= 23 + && (asynchronousMode == MODE_ENABLED + || (asynchronousMode == MODE_DEFAULT && Util.SDK_INT >= 31))) { + int trackType = MimeTypes.getTrackType(configuration.format.sampleMimeType); + Log.i( + TAG, + "Creating an asynchronous MediaCodec adapter for track type " + + Util.getTrackTypeString(trackType)); + AsynchronousMediaCodecAdapter.Factory factory = + new AsynchronousMediaCodecAdapter.Factory( + trackType, enableSynchronizeCodecInteractionsWithQueueing); + return factory.createAdapter(configuration); + } + return new SynchronousMediaCodecAdapter.Factory().createAdapter(configuration); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/IntArrayQueue.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/IntArrayQueue.java new file mode 100644 index 0000000000..48461892d5 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/IntArrayQueue.java @@ -0,0 +1,112 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.mediacodec; + +import java.util.NoSuchElementException; + +/** + * Array-based unbounded queue for int primitives with amortized O(1) add and remove. + * + *

      Use this class instead of a {@link java.util.Deque} to avoid boxing int primitives to {@link + * Integer} instances. + */ +/* package */ final class IntArrayQueue { + + /** Default capacity needs to be a power of 2. */ + private static final int DEFAULT_INITIAL_CAPACITY = 16; + + private int headIndex; + private int tailIndex; + private int size; + private int[] data; + private int wrapAroundMask; + + public IntArrayQueue() { + headIndex = 0; + tailIndex = -1; + size = 0; + data = new int[DEFAULT_INITIAL_CAPACITY]; + wrapAroundMask = data.length - 1; + } + + /** Add a new item to the queue. */ + public void add(int value) { + if (size == data.length) { + doubleArraySize(); + } + + tailIndex = (tailIndex + 1) & wrapAroundMask; + data[tailIndex] = value; + size++; + } + + /** + * Remove an item from the queue. + * + * @throws NoSuchElementException if the queue is empty. + */ + public int remove() { + if (size == 0) { + throw new NoSuchElementException(); + } + + int value = data[headIndex]; + headIndex = (headIndex + 1) & wrapAroundMask; + size--; + + return value; + } + + /** Returns the number of items in the queue. */ + public int size() { + return size; + } + + /** Returns whether the queue is empty. */ + public boolean isEmpty() { + return size == 0; + } + + /** Clears the queue. */ + public void clear() { + headIndex = 0; + tailIndex = -1; + size = 0; + } + + /** Returns the length of the backing array. */ + public int capacity() { + return data.length; + } + + private void doubleArraySize() { + int newCapacity = data.length << 1; + if (newCapacity < 0) { + throw new IllegalStateException(); + } + + int[] newData = new int[newCapacity]; + int itemsToRight = data.length - headIndex; + int itemsToLeft = headIndex; + System.arraycopy(data, headIndex, newData, 0, itemsToRight); + System.arraycopy(data, 0, newData, itemsToRight, itemsToLeft); + + headIndex = 0; + tailIndex = size - 1; + data = newData; + wrapAroundMask = data.length - 1; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/MediaCodecAdapter.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/MediaCodecAdapter.java new file mode 100644 index 0000000000..69acb3b844 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/MediaCodecAdapter.java @@ -0,0 +1,266 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.mediacodec; + +import android.media.MediaCodec; +import android.media.MediaCrypto; +import android.media.MediaFormat; +import android.os.Bundle; +import android.os.Handler; +import android.os.PersistableBundle; +import android.view.Surface; +import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.decoder.CryptoInfo; +import java.io.IOException; +import java.nio.ByteBuffer; + +/** + * Abstracts {@link MediaCodec} operations. + * + *

      {@code MediaCodecAdapter} offers a common interface to interact with a {@link MediaCodec} + * regardless of the mode the {@link MediaCodec} is operating in. + */ +public interface MediaCodecAdapter { + /** Configuration parameters for a {@link MediaCodecAdapter}. */ + final class Configuration { + + /** + * Creates a configuration for audio decoding. + * + * @param codecInfo See {@link #codecInfo}. + * @param mediaFormat See {@link #mediaFormat}. + * @param format See {@link #format}. + * @param crypto See {@link #crypto}. + * @return The created instance. + */ + public static Configuration createForAudioDecoding( + MediaCodecInfo codecInfo, + MediaFormat mediaFormat, + Format format, + @Nullable MediaCrypto crypto) { + return new Configuration( + codecInfo, mediaFormat, format, /* surface= */ null, crypto, /* flags= */ 0); + } + + /** + * Creates a configuration for video decoding. + * + * @param codecInfo See {@link #codecInfo}. + * @param mediaFormat See {@link #mediaFormat}. + * @param format See {@link #format}. + * @param surface See {@link #surface}. + * @param crypto See {@link #crypto}. + * @return The created instance. + */ + public static Configuration createForVideoDecoding( + MediaCodecInfo codecInfo, + MediaFormat mediaFormat, + Format format, + @Nullable Surface surface, + @Nullable MediaCrypto crypto) { + return new Configuration(codecInfo, mediaFormat, format, surface, crypto, /* flags= */ 0); + } + + /** Information about the {@link MediaCodec} being configured. */ + public final MediaCodecInfo codecInfo; + /** The {@link MediaFormat} for which the codec is being configured. */ + public final MediaFormat mediaFormat; + /** The {@link Format} for which the codec is being configured. */ + public final Format format; + /** + * For video decoding, the output where the object will render the decoded frames. This must be + * null if the codec is not a video decoder, or if it is configured for {@link ByteBuffer} + * output. + */ + @Nullable public final Surface surface; + /** For DRM protected playbacks, a {@link MediaCrypto} to use for decryption. */ + @Nullable public final MediaCrypto crypto; + /** See {@link MediaCodec#configure}. */ + public final int flags; + + private Configuration( + MediaCodecInfo codecInfo, + MediaFormat mediaFormat, + Format format, + @Nullable Surface surface, + @Nullable MediaCrypto crypto, + int flags) { + this.codecInfo = codecInfo; + this.mediaFormat = mediaFormat; + this.format = format; + this.surface = surface; + this.crypto = crypto; + this.flags = flags; + } + } + + /** A factory for {@link MediaCodecAdapter} instances. */ + interface Factory { + + /** Default factory used in most cases. */ + Factory DEFAULT = new DefaultMediaCodecAdapterFactory(); + + /** Creates a {@link MediaCodecAdapter} instance. */ + MediaCodecAdapter createAdapter(Configuration configuration) throws IOException; + } + + /** + * Listener to be called when an output frame has rendered on the output surface. + * + * @see MediaCodec.OnFrameRenderedListener + */ + interface OnFrameRenderedListener { + void onFrameRendered(MediaCodecAdapter codec, long presentationTimeUs, long nanoTime); + } + + /** + * Returns the next available input buffer index from the underlying {@link MediaCodec} or {@link + * MediaCodec#INFO_TRY_AGAIN_LATER} if no such buffer exists. + * + * @throws IllegalStateException If the underlying {@link MediaCodec} raised an error. + */ + int dequeueInputBufferIndex(); + + /** + * Returns the next available output buffer index from the underlying {@link MediaCodec}. If the + * next available output is a MediaFormat change, it will return {@link + * MediaCodec#INFO_OUTPUT_FORMAT_CHANGED} and you should call {@link #getOutputFormat()} to get + * the format. If there is no available output, this method will return {@link + * MediaCodec#INFO_TRY_AGAIN_LATER}. + * + * @throws IllegalStateException If the underlying {@link MediaCodec} raised an error. + */ + int dequeueOutputBufferIndex(MediaCodec.BufferInfo bufferInfo); + + /** + * Gets the {@link MediaFormat} that was output from the {@link MediaCodec}. + * + *

      Call this method if a previous call to {@link #dequeueOutputBufferIndex} returned {@link + * MediaCodec#INFO_OUTPUT_FORMAT_CHANGED}. + */ + MediaFormat getOutputFormat(); + + /** + * Returns a writable ByteBuffer object for a dequeued input buffer index. + * + * @see MediaCodec#getInputBuffer(int) + */ + @Nullable + ByteBuffer getInputBuffer(int index); + + /** + * Returns a read-only ByteBuffer for a dequeued output buffer index. + * + * @see MediaCodec#getOutputBuffer(int) + */ + @Nullable + ByteBuffer getOutputBuffer(int index); + + /** + * Submit an input buffer for decoding. + * + *

      The {@code index} must be an input buffer index that has been obtained from a previous call + * to {@link #dequeueInputBufferIndex()}. + * + * @see MediaCodec#queueInputBuffer + */ + void queueInputBuffer(int index, int offset, int size, long presentationTimeUs, int flags); + + /** + * Submit an input buffer that is potentially encrypted for decoding. + * + *

      The {@code index} must be an input buffer index that has been obtained from a previous call + * to {@link #dequeueInputBufferIndex()}. + * + *

      This method behaves like {@link MediaCodec#queueSecureInputBuffer}, with the difference that + * {@code info} is of type {@link CryptoInfo} and not {@link android.media.MediaCodec.CryptoInfo}. + * + * @see MediaCodec#queueSecureInputBuffer + */ + void queueSecureInputBuffer( + int index, int offset, CryptoInfo info, long presentationTimeUs, int flags); + + /** + * Returns the buffer to the {@link MediaCodec}. If the {@link MediaCodec} was configured with an + * output surface, setting {@code render} to {@code true} will first send the buffer to the output + * surface. The surface will release the buffer back to the codec once it is no longer + * used/displayed. + * + * @see MediaCodec#releaseOutputBuffer(int, boolean) + */ + void releaseOutputBuffer(int index, boolean render); + + /** + * Updates the output buffer's surface timestamp and sends it to the {@link MediaCodec} to render + * it on the output surface. If the {@link MediaCodec} is not configured with an output surface, + * this call will simply return the buffer to the {@link MediaCodec}. + * + * @see MediaCodec#releaseOutputBuffer(int, long) + */ + @RequiresApi(21) + void releaseOutputBuffer(int index, long renderTimeStampNs); + + /** Flushes the adapter and the underlying {@link MediaCodec}. */ + void flush(); + + /** Releases the adapter and the underlying {@link MediaCodec}. */ + void release(); + + /** + * Registers a callback to be invoked when an output frame is rendered on the output surface. + * + * @see MediaCodec#setOnFrameRenderedListener + */ + @RequiresApi(23) + void setOnFrameRenderedListener(OnFrameRenderedListener listener, Handler handler); + + /** + * Dynamically sets the output surface of a {@link MediaCodec}. + * + * @see MediaCodec#setOutputSurface(Surface) + */ + @RequiresApi(23) + void setOutputSurface(Surface surface); + + /** + * Communicate additional parameter changes to the {@link MediaCodec} instance. + * + * @see MediaCodec#setParameters(Bundle) + */ + @RequiresApi(19) + void setParameters(Bundle params); + + /** + * Specifies the scaling mode to use, if a surface was specified when the codec was created. + * + * @see MediaCodec#setVideoScalingMode(int) + */ + void setVideoScalingMode(@C.VideoScalingMode int scalingMode); + + /** Whether the adapter needs to be reconfigured before it is used. */ + boolean needsReconfiguration(); + + /** + * Returns metrics data about the current codec instance. + * + * @see MediaCodec#getMetrics() + */ + @RequiresApi(26) + PersistableBundle getMetrics(); +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/MediaCodecDecoderException.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/MediaCodecDecoderException.java new file mode 100644 index 0000000000..524f8568f7 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/MediaCodecDecoderException.java @@ -0,0 +1,47 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.mediacodec; + +import android.media.MediaCodec; +import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; +import com.google.android.exoplayer2.decoder.DecoderException; +import com.google.android.exoplayer2.util.Util; + +/** Thrown when a failure occurs in a {@link MediaCodec} decoder. */ +public class MediaCodecDecoderException extends DecoderException { + + /** The {@link MediaCodecInfo} of the decoder that failed. Null if unknown. */ + @Nullable public final MediaCodecInfo codecInfo; + + /** An optional developer-readable diagnostic information string. May be null. */ + @Nullable public final String diagnosticInfo; + + public MediaCodecDecoderException(Throwable cause, @Nullable MediaCodecInfo codecInfo) { + super("Decoder failed: " + (codecInfo == null ? null : codecInfo.name), cause); + this.codecInfo = codecInfo; + diagnosticInfo = Util.SDK_INT >= 21 ? getDiagnosticInfoV21(cause) : null; + } + + @RequiresApi(21) + @Nullable + private static String getDiagnosticInfoV21(Throwable cause) { + if (cause instanceof MediaCodec.CodecException) { + return ((MediaCodec.CodecException) cause).getDiagnosticInfo(); + } + return null; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/MediaCodecInfo.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/MediaCodecInfo.java index ce347f3e05..e9f6dac001 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/MediaCodecInfo.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/MediaCodecInfo.java @@ -15,20 +15,47 @@ */ package com.google.android.exoplayer2.mediacodec; -import android.annotation.TargetApi; +import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.DISCARD_REASON_AUDIO_CHANNEL_COUNT_CHANGED; +import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.DISCARD_REASON_AUDIO_ENCODING_CHANGED; +import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.DISCARD_REASON_AUDIO_SAMPLE_RATE_CHANGED; +import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.DISCARD_REASON_INITIALIZATION_DATA_CHANGED; +import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.DISCARD_REASON_MIME_TYPE_CHANGED; +import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.DISCARD_REASON_VIDEO_COLOR_INFO_CHANGED; +import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.DISCARD_REASON_VIDEO_RESOLUTION_CHANGED; +import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.DISCARD_REASON_VIDEO_ROTATION_CHANGED; +import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.DISCARD_REASON_WORKAROUND; +import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.REUSE_RESULT_NO; +import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.REUSE_RESULT_YES_WITHOUT_RECONFIGURATION; +import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.REUSE_RESULT_YES_WITH_FLUSH; +import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.REUSE_RESULT_YES_WITH_RECONFIGURATION; +import static java.lang.annotation.ElementType.TYPE_USE; + import android.graphics.Point; import android.media.MediaCodec; import android.media.MediaCodecInfo.AudioCapabilities; import android.media.MediaCodecInfo.CodecCapabilities; import android.media.MediaCodecInfo.CodecProfileLevel; import android.media.MediaCodecInfo.VideoCapabilities; +import android.media.MediaCodecInfo.VideoCapabilities.PerformancePoint; import android.util.Pair; +import androidx.annotation.DoNotInline; +import androidx.annotation.IntDef; import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; +import androidx.annotation.VisibleForTesting; import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.decoder.DecoderReuseEvaluation; +import com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.DecoderDiscardReasons; +import com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.DecoderReuseResult; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.Util; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import java.util.List; /** Information about a {@link MediaCodec} for a given mime type. */ @SuppressWarnings("InlinedApi") @@ -44,21 +71,20 @@ public final class MediaCodecInfo { /** * The name of the decoder. - *

      - * May be passed to {@link MediaCodec#createByCodecName(String)} to create an instance of the + * + *

      May be passed to {@link MediaCodec#createByCodecName(String)} to create an instance of the * decoder. */ public final String name; - /** The MIME type handled by the codec, or {@code null} if this is a passthrough codec. */ - @Nullable public final String mimeType; + /** The MIME type handled by the codec. */ + public final String mimeType; /** - * The MIME type that the codec uses for media of type {@link #mimeType}, or {@code null} if this - * is a passthrough codec. Equal to {@link #mimeType} unless the codec is known to use a - * non-standard MIME type alias. + * The MIME type that the codec uses for media of type {@link #mimeType}. Equal to {@link + * #mimeType} unless the codec is known to use a non-standard MIME type alias. */ - @Nullable public final String codecMimeType; + public final String codecMimeType; /** * The capabilities of the decoder, like the profiles/levels it supports, or {@code null} if not @@ -90,9 +116,6 @@ public final class MediaCodecInfo { */ public final boolean secure; - /** Whether this instance describes a passthrough codec. */ - public final boolean passthrough; - /** * Whether the codec is hardware accelerated. * @@ -122,26 +145,6 @@ public final class MediaCodecInfo { private final boolean isVideo; - /** - * Creates an instance representing an audio passthrough decoder. - * - * @param name The name of the {@link MediaCodec}. - * @return The created instance. - */ - public static MediaCodecInfo newPassthroughInstance(String name) { - return new MediaCodecInfo( - name, - /* mimeType= */ null, - /* codecMimeType= */ null, - /* capabilities= */ null, - /* passthrough= */ true, - /* hardwareAccelerated= */ false, - /* softwareOnly= */ true, - /* vendor= */ false, - /* forceDisableAdaptive= */ false, - /* forceSecure= */ false); - } - /** * Creates an instance. * @@ -173,36 +176,39 @@ public static MediaCodecInfo newInstance( mimeType, codecMimeType, capabilities, - /* passthrough= */ false, hardwareAccelerated, softwareOnly, vendor, - forceDisableAdaptive, - forceSecure); + /* adaptive= */ !forceDisableAdaptive + && capabilities != null + && isAdaptive(capabilities) + && !needsDisableAdaptationWorkaround(name), + /* tunneling= */ capabilities != null && isTunneling(capabilities), + /* secure= */ forceSecure || (capabilities != null && isSecure(capabilities))); } - private MediaCodecInfo( + @VisibleForTesting + /* package */ MediaCodecInfo( String name, - @Nullable String mimeType, - @Nullable String codecMimeType, + String mimeType, + String codecMimeType, @Nullable CodecCapabilities capabilities, - boolean passthrough, boolean hardwareAccelerated, boolean softwareOnly, boolean vendor, - boolean forceDisableAdaptive, - boolean forceSecure) { + boolean adaptive, + boolean tunneling, + boolean secure) { this.name = Assertions.checkNotNull(name); this.mimeType = mimeType; this.codecMimeType = codecMimeType; this.capabilities = capabilities; - this.passthrough = passthrough; this.hardwareAccelerated = hardwareAccelerated; this.softwareOnly = softwareOnly; this.vendor = vendor; - adaptive = !forceDisableAdaptive && capabilities != null && isAdaptive(capabilities); - tunneling = capabilities != null && isTunneling(capabilities); - secure = forceSecure || (capabilities != null && isSecure(capabilities)); + this.adaptive = adaptive; + this.tunneling = tunneling; + this.secure = secure; isVideo = MimeTypes.isVideo(mimeType); } @@ -217,7 +223,8 @@ public String toString() { * @return The profile levels supported by the decoder. */ public CodecProfileLevel[] getProfileLevels() { - return capabilities == null || capabilities.profileLevels == null ? new CodecProfileLevel[0] + return capabilities == null || capabilities.profileLevels == null + ? new CodecProfileLevel[0] : capabilities.profileLevels; } @@ -229,20 +236,26 @@ public CodecProfileLevel[] getProfileLevels() { * @see CodecCapabilities#getMaxSupportedInstances() */ public int getMaxSupportedInstances() { - return (Util.SDK_INT < 23 || capabilities == null) - ? MAX_SUPPORTED_INSTANCES_UNKNOWN - : getMaxSupportedInstancesV23(capabilities); + if (Util.SDK_INT < 23 || capabilities == null) { + return MAX_SUPPORTED_INSTANCES_UNKNOWN; + } + return getMaxSupportedInstancesV23(capabilities); } /** - * Returns whether the decoder may support decoding the given {@code format}. + * Returns whether the decoder may support decoding the given {@code format} both functionally and + * performantly. * * @param format The input media format. * @return Whether the decoder may support decoding the given {@code format}. * @throws MediaCodecUtil.DecoderQueryException Thrown if an error occurs while querying decoders. */ public boolean isFormatSupported(Format format) throws MediaCodecUtil.DecoderQueryException { - if (!isCodecSupported(format)) { + if (!isSampleMimeTypeSupported(format)) { + return false; + } + + if (!isCodecProfileAndLevelSupported(format, /* checkPerformanceCapabilities= */ true)) { return false; } @@ -270,24 +283,23 @@ public boolean isFormatSupported(Format format) throws MediaCodecUtil.DecoderQue } /** - * Whether the decoder supports the codec of the given {@code format}. If there is insufficient - * information to decide, returns true. + * Returns whether the decoder may functionally support decoding the given {@code format}. * * @param format The input media format. - * @return True if the codec of the given {@code format} is supported by the decoder. + * @return Whether the decoder may functionally support decoding the given {@code format}. */ - public boolean isCodecSupported(Format format) { - if (format.codecs == null || mimeType == null) { - return true; - } - String codecMimeType = MimeTypes.getMediaMimeType(format.codecs); - if (codecMimeType == null) { - return true; - } - if (!mimeType.equals(codecMimeType)) { - logNoSupport("codec.mime " + format.codecs + ", " + codecMimeType); - return false; - } + public boolean isFormatFunctionallySupported(Format format) { + return isSampleMimeTypeSupported(format) + && isCodecProfileAndLevelSupported(format, /* checkPerformanceCapabilities= */ false); + } + + private boolean isSampleMimeTypeSupported(Format format) { + return mimeType.equals(format.sampleMimeType) + || mimeType.equals(MediaCodecUtil.getAlternativeCodecMimeType(format)); + } + + private boolean isCodecProfileAndLevelSupported( + Format format, boolean checkPerformanceCapabilities) { Pair codecProfileAndLevel = MediaCodecUtil.getCodecProfileAndLevel(format); if (codecProfileAndLevel == null) { // If we don't know any better, we assume that the profile and level are supported. @@ -295,13 +307,36 @@ public boolean isCodecSupported(Format format) { } int profile = codecProfileAndLevel.first; int level = codecProfileAndLevel.second; + if (MimeTypes.VIDEO_DOLBY_VISION.equals(format.sampleMimeType)) { + // If this codec is H264 or H265, we only support the Dolby Vision base layer and need to map + // the Dolby Vision profile to the corresponding base layer profile. Also assume all levels of + // this base layer profile are supported. + if (MimeTypes.VIDEO_H264.equals(mimeType)) { + profile = CodecProfileLevel.AVCProfileHigh; + level = 0; + } else if (MimeTypes.VIDEO_H265.equals(mimeType)) { + profile = CodecProfileLevel.HEVCProfileMain10; + level = 0; + } + } + if (!isVideo && profile != CodecProfileLevel.AACObjectXHE) { // Some devices/builds underreport audio capabilities, so assume support except for xHE-AAC // which may not be widely supported. See https://github.com/google/ExoPlayer/issues/5145. return true; } - for (CodecProfileLevel capabilities : getProfileLevels()) { - if (capabilities.profile == profile && capabilities.level >= level) { + + CodecProfileLevel[] profileLevels = getProfileLevels(); + if (Util.SDK_INT <= 23 && MimeTypes.VIDEO_VP9.equals(mimeType) && profileLevels.length == 0) { + // Some older devices don't report profile levels for VP9. Estimate them using other data in + // the codec capabilities. + profileLevels = estimateLegacyVp9ProfileLevels(capabilities); + } + + for (CodecProfileLevel profileLevel : profileLevels) { + if (profileLevel.profile == profile + && (profileLevel.level >= level || !checkPerformanceCapabilities) + && !needsProfileExcludedWorkaround(mimeType, profile)) { return true; } } @@ -322,11 +357,12 @@ public boolean isHdr10PlusOutOfBandMetadataSupported() { } /** - * Returns whether it may be possible to adapt to playing a different format when the codec is - * configured to play media in the specified {@code format}. For adaptation to succeed, the codec - * must also be configured with appropriate maximum values and {@link - * #isSeamlessAdaptationSupported(Format, Format, boolean)} must return {@code true} for the - * old/new formats. + * Returns whether it may be possible to adapt an instance of this decoder to playing a different + * format when the codec is configured to play media in the specified {@code format}. + * + *

      For adaptation to succeed, the codec must also be configured with appropriate maximum values + * and {@link #isSeamlessAdaptationSupported(Format, Format, boolean)} must return {@code true} + * for the old/new formats. * * @param format The format of media for which the decoder will be configured. * @return Whether adaptation may be possible @@ -335,65 +371,141 @@ public boolean isSeamlessAdaptationSupported(Format format) { if (isVideo) { return adaptive; } else { - Pair codecProfileLevel = MediaCodecUtil.getCodecProfileAndLevel(format); - return codecProfileLevel != null && codecProfileLevel.first == CodecProfileLevel.AACObjectXHE; + Pair profileLevel = MediaCodecUtil.getCodecProfileAndLevel(format); + return profileLevel != null && profileLevel.first == CodecProfileLevel.AACObjectXHE; } } /** - * Returns whether it is possible to adapt the decoder seamlessly from {@code oldFormat} to {@code - * newFormat}. If {@code newFormat} may not be completely populated, pass {@code false} for {@code - * isNewFormatComplete}. + * Returns whether it is possible to adapt an instance of this decoder seamlessly from {@code + * oldFormat} to {@code newFormat}. If {@code newFormat} may not be completely populated, pass + * {@code false} for {@code isNewFormatComplete}. + * + *

      For adaptation to succeed, the codec must also be configured with maximum values that are + * compatible with the new format. * * @param oldFormat The format being decoded. * @param newFormat The new format. * @param isNewFormatComplete Whether {@code newFormat} is populated with format-specific * metadata. * @return Whether it is possible to adapt the decoder seamlessly. + * @deprecated Use {@link #canReuseCodec}. */ + @Deprecated public boolean isSeamlessAdaptationSupported( Format oldFormat, Format newFormat, boolean isNewFormatComplete) { + if (!isNewFormatComplete && oldFormat.colorInfo != null && newFormat.colorInfo == null) { + newFormat = newFormat.buildUpon().setColorInfo(oldFormat.colorInfo).build(); + } + @DecoderReuseResult int reuseResult = canReuseCodec(oldFormat, newFormat).result; + return reuseResult == REUSE_RESULT_YES_WITH_RECONFIGURATION + || reuseResult == REUSE_RESULT_YES_WITHOUT_RECONFIGURATION; + } + + /** + * Evaluates whether it's possible to reuse an instance of this decoder that's currently decoding + * {@code oldFormat} to decode {@code newFormat} instead. + * + *

      For adaptation to succeed, the codec must also be configured with maximum values that are + * compatible with the new format. + * + * @param oldFormat The format being decoded. + * @param newFormat The new format. + * @return The result of the evaluation. + */ + public DecoderReuseEvaluation canReuseCodec(Format oldFormat, Format newFormat) { + @DecoderDiscardReasons int discardReasons = 0; + if (!Util.areEqual(oldFormat.sampleMimeType, newFormat.sampleMimeType)) { + discardReasons |= DISCARD_REASON_MIME_TYPE_CHANGED; + } + if (isVideo) { - return oldFormat.sampleMimeType.equals(newFormat.sampleMimeType) - && oldFormat.rotationDegrees == newFormat.rotationDegrees - && (adaptive - || (oldFormat.width == newFormat.width && oldFormat.height == newFormat.height)) - && ((!isNewFormatComplete && newFormat.colorInfo == null) - || Util.areEqual(oldFormat.colorInfo, newFormat.colorInfo)); + if (oldFormat.rotationDegrees != newFormat.rotationDegrees) { + discardReasons |= DISCARD_REASON_VIDEO_ROTATION_CHANGED; + } + if (!adaptive + && (oldFormat.width != newFormat.width || oldFormat.height != newFormat.height)) { + discardReasons |= DISCARD_REASON_VIDEO_RESOLUTION_CHANGED; + } + if (!Util.areEqual(oldFormat.colorInfo, newFormat.colorInfo)) { + discardReasons |= DISCARD_REASON_VIDEO_COLOR_INFO_CHANGED; + } + if (needsAdaptationReconfigureWorkaround(name) + && !oldFormat.initializationDataEquals(newFormat)) { + discardReasons |= DISCARD_REASON_WORKAROUND; + } + + if (discardReasons == 0) { + return new DecoderReuseEvaluation( + name, + oldFormat, + newFormat, + oldFormat.initializationDataEquals(newFormat) + ? REUSE_RESULT_YES_WITHOUT_RECONFIGURATION + : REUSE_RESULT_YES_WITH_RECONFIGURATION, + /* discardReasons= */ 0); + } } else { - if (!MimeTypes.AUDIO_AAC.equals(mimeType) - || !oldFormat.sampleMimeType.equals(newFormat.sampleMimeType) - || oldFormat.channelCount != newFormat.channelCount - || oldFormat.sampleRate != newFormat.sampleRate) { - return false; + if (oldFormat.channelCount != newFormat.channelCount) { + discardReasons |= DISCARD_REASON_AUDIO_CHANNEL_COUNT_CHANGED; } - // Check the codec profile levels support adaptation. - Pair oldCodecProfileLevel = - MediaCodecUtil.getCodecProfileAndLevel(oldFormat); - Pair newCodecProfileLevel = - MediaCodecUtil.getCodecProfileAndLevel(newFormat); - if (oldCodecProfileLevel == null || newCodecProfileLevel == null) { - return false; + if (oldFormat.sampleRate != newFormat.sampleRate) { + discardReasons |= DISCARD_REASON_AUDIO_SAMPLE_RATE_CHANGED; + } + if (oldFormat.pcmEncoding != newFormat.pcmEncoding) { + discardReasons |= DISCARD_REASON_AUDIO_ENCODING_CHANGED; + } + + // Check whether we're adapting between two xHE-AAC formats, for which adaptation is possible + // without reconfiguration or flushing. + if (discardReasons == 0 && MimeTypes.AUDIO_AAC.equals(mimeType)) { + @Nullable + Pair oldCodecProfileLevel = + MediaCodecUtil.getCodecProfileAndLevel(oldFormat); + @Nullable + Pair newCodecProfileLevel = + MediaCodecUtil.getCodecProfileAndLevel(newFormat); + if (oldCodecProfileLevel != null && newCodecProfileLevel != null) { + int oldProfile = oldCodecProfileLevel.first; + int newProfile = newCodecProfileLevel.first; + if (oldProfile == CodecProfileLevel.AACObjectXHE + && newProfile == CodecProfileLevel.AACObjectXHE) { + return new DecoderReuseEvaluation( + name, + oldFormat, + newFormat, + REUSE_RESULT_YES_WITHOUT_RECONFIGURATION, + /* discardReasons= */ 0); + } + } + } + + if (!oldFormat.initializationDataEquals(newFormat)) { + discardReasons |= DISCARD_REASON_INITIALIZATION_DATA_CHANGED; + } + if (needsAdaptationFlushWorkaround(mimeType)) { + discardReasons |= DISCARD_REASON_WORKAROUND; + } + + if (discardReasons == 0) { + return new DecoderReuseEvaluation( + name, oldFormat, newFormat, REUSE_RESULT_YES_WITH_FLUSH, /* discardReasons= */ 0); } - int oldProfile = oldCodecProfileLevel.first; - int newProfile = newCodecProfileLevel.first; - return oldProfile == CodecProfileLevel.AACObjectXHE - && newProfile == CodecProfileLevel.AACObjectXHE; } + + return new DecoderReuseEvaluation(name, oldFormat, newFormat, REUSE_RESULT_NO, discardReasons); } /** * Whether the decoder supports video with a given width, height and frame rate. * - *

      Must not be called if the device SDK version is less than 21. - * * @param width Width in pixels. * @param height Height in pixels. * @param frameRate Optional frame rate in frames per second. Ignored if set to {@link * Format#NO_VALUE} or any value less than or equal to 0. * @return Whether the decoder supports video with the given width, height and frame rate. */ - @TargetApi(21) + @RequiresApi(21) public boolean isVideoSizeAndRateSupportedV21(int width, int height, double frameRate) { if (capabilities == null) { logNoSupport("sizeAndRate.caps"); @@ -404,14 +516,28 @@ public boolean isVideoSizeAndRateSupportedV21(int width, int height, double fram logNoSupport("sizeAndRate.vCaps"); return false; } + + if (Util.SDK_INT >= 29) { + @PerformancePointCoverageResult + int evaluation = + Api29.areResolutionAndFrameRateCovered(videoCapabilities, width, height, frameRate); + if (evaluation == COVERAGE_RESULT_YES) { + return true; + } else if (evaluation == COVERAGE_RESULT_NO) { + logNoSupport("sizeAndRate.cover, " + width + "x" + height + "@" + frameRate); + return false; + } + // COVERAGE_RESULT_NO_EMPTY_LIST falls through to API 21+ code below + } + if (!areSizeAndRateSupportedV21(videoCapabilities, width, height, frameRate)) { if (width >= height - || !enableRotatedVerticalResolutionWorkaround(name) + || !needsRotatedVerticalResolutionWorkaround(name) || !areSizeAndRateSupportedV21(videoCapabilities, height, width, frameRate)) { - logNoSupport("sizeAndRate.support, " + width + "x" + height + "x" + frameRate); + logNoSupport("sizeAndRate.support, " + width + "x" + height + "@" + frameRate); return false; } - logAssumedSupport("sizeAndRate.rotated, " + width + "x" + height + "x" + frameRate); + logAssumedSupport("sizeAndRate.rotated, " + width + "x" + height + "@" + frameRate); } return true; } @@ -419,8 +545,8 @@ public boolean isVideoSizeAndRateSupportedV21(int width, int height, double fram /** * Returns the smallest video size greater than or equal to a specified size that also satisfies * the {@link MediaCodec}'s width and height alignment requirements. - *

      - * Must not be called if the device SDK version is less than 21. + * + *

      Must not be called if the device SDK version is less than 21. * * @param width Width in pixels. * @param height Height in pixels. @@ -428,7 +554,8 @@ public boolean isVideoSizeAndRateSupportedV21(int width, int height, double fram * the {@link MediaCodec}'s width and height alignment requirements, or null if not a video * codec. */ - @TargetApi(21) + @Nullable + @RequiresApi(21) public Point alignVideoSizeV21(int width, int height) { if (capabilities == null) { return null; @@ -442,13 +569,13 @@ public Point alignVideoSizeV21(int width, int height) { /** * Whether the decoder supports audio with a given sample rate. - *

      - * Must not be called if the device SDK version is less than 21. + * + *

      Must not be called if the device SDK version is less than 21. * * @param sampleRate The sample rate in Hz. * @return Whether the decoder supports audio with the given sample rate. */ - @TargetApi(21) + @RequiresApi(21) public boolean isAudioSampleRateSupportedV21(int sampleRate) { if (capabilities == null) { logNoSupport("sampleRate.caps"); @@ -468,13 +595,13 @@ public boolean isAudioSampleRateSupportedV21(int sampleRate) { /** * Whether the decoder supports audio with a given channel count. - *

      - * Must not be called if the device SDK version is less than 21. + * + *

      Must not be called if the device SDK version is less than 21. * * @param channelCount The channel count. * @return Whether the decoder supports audio with the given channel count. */ - @TargetApi(21) + @RequiresApi(21) public boolean isAudioChannelCountSupportedV21(int channelCount) { if (capabilities == null) { logNoSupport("channelCount.caps"); @@ -485,8 +612,8 @@ public boolean isAudioChannelCountSupportedV21(int channelCount) { logNoSupport("channelCount.aCaps"); return false; } - int maxInputChannelCount = adjustMaxInputChannelCount(name, mimeType, - audioCapabilities.getMaxInputChannelCount()); + int maxInputChannelCount = + adjustMaxInputChannelCount(name, mimeType, audioCapabilities.getMaxInputChannelCount()); if (maxInputChannelCount < channelCount) { logNoSupport("channelCount.support, " + channelCount); return false; @@ -495,13 +622,31 @@ public boolean isAudioChannelCountSupportedV21(int channelCount) { } private void logNoSupport(String message) { - Log.d(TAG, "NoSupport [" + message + "] [" + name + ", " + mimeType + "] [" - + Util.DEVICE_DEBUG_INFO + "]"); + Log.d( + TAG, + "NoSupport [" + + message + + "] [" + + name + + ", " + + mimeType + + "] [" + + Util.DEVICE_DEBUG_INFO + + "]"); } private void logAssumedSupport(String message) { - Log.d(TAG, "AssumedSupport [" + message + "] [" + name + ", " + mimeType + "] [" - + Util.DEVICE_DEBUG_INFO + "]"); + Log.d( + TAG, + "AssumedSupport [" + + message + + "] [" + + name + + ", " + + mimeType + + "] [" + + Util.DEVICE_DEBUG_INFO + + "]"); } private static int adjustMaxInputChannelCount(String name, String mimeType, int maxChannelCount) { @@ -535,8 +680,15 @@ private static int adjustMaxInputChannelCount(String name, String mimeType, int // Default to the platform limit, which is 30. assumedMaxChannelCount = 30; } - Log.w(TAG, "AssumedMaxChannelAdjustment: " + name + ", [" + maxChannelCount + " to " - + assumedMaxChannelCount + "]"); + Log.w( + TAG, + "AssumedMaxChannelAdjustment: " + + name + + ", [" + + maxChannelCount + + " to " + + assumedMaxChannelCount + + "]"); return assumedMaxChannelCount; } @@ -544,7 +696,7 @@ private static boolean isAdaptive(CodecCapabilities capabilities) { return Util.SDK_INT >= 19 && isAdaptiveV19(capabilities); } - @TargetApi(19) + @RequiresApi(19) private static boolean isAdaptiveV19(CodecCapabilities capabilities) { return capabilities.isFeatureSupported(CodecCapabilities.FEATURE_AdaptivePlayback); } @@ -553,7 +705,7 @@ private static boolean isTunneling(CodecCapabilities capabilities) { return Util.SDK_INT >= 21 && isTunnelingV21(capabilities); } - @TargetApi(21) + @RequiresApi(21) private static boolean isTunnelingV21(CodecCapabilities capabilities) { return capabilities.isFeatureSupported(CodecCapabilities.FEATURE_TunneledPlayback); } @@ -562,14 +714,14 @@ private static boolean isSecure(CodecCapabilities capabilities) { return Util.SDK_INT >= 21 && isSecureV21(capabilities); } - @TargetApi(21) + @RequiresApi(21) private static boolean isSecureV21(CodecCapabilities capabilities) { return capabilities.isFeatureSupported(CodecCapabilities.FEATURE_SecurePlayback); } - @TargetApi(21) - private static boolean areSizeAndRateSupportedV21(VideoCapabilities capabilities, int width, - int height, double frameRate) { + @RequiresApi(21) + private static boolean areSizeAndRateSupportedV21( + VideoCapabilities capabilities, int width, int height, double frameRate) { // Don't ever fail due to alignment. See: https://github.com/google/ExoPlayer/issues/6551. Point alignedSize = alignVideoSizeV21(capabilities, width, height); width = alignedSize.x; @@ -588,7 +740,7 @@ private static boolean areSizeAndRateSupportedV21(VideoCapabilities capabilities } } - @TargetApi(21) + @RequiresApi(21) private static Point alignVideoSizeV21(VideoCapabilities capabilities, int width, int height) { int widthAlignment = capabilities.getWidthAlignment(); int heightAlignment = capabilities.getHeightAlignment(); @@ -597,11 +749,105 @@ private static Point alignVideoSizeV21(VideoCapabilities capabilities, int width Util.ceilDivide(height, heightAlignment) * heightAlignment); } - @TargetApi(23) + @RequiresApi(23) private static int getMaxSupportedInstancesV23(CodecCapabilities capabilities) { return capabilities.getMaxSupportedInstances(); } + /** + * Called on devices with {@link Util#SDK_INT} 23 and below, for VP9 decoders whose {@link + * CodecCapabilities} do not correctly report profile levels. The returned {@link + * CodecProfileLevel CodecProfileLevels} are estimated based on other data in the {@link + * CodecCapabilities}. + * + * @param capabilities The {@link CodecCapabilities} for a VP9 decoder, or {@code null} if not + * known. + * @return The estimated {@link CodecProfileLevel CodecProfileLevels} for the decoder. + */ + private static CodecProfileLevel[] estimateLegacyVp9ProfileLevels( + @Nullable CodecCapabilities capabilities) { + int maxBitrate = 0; + if (capabilities != null) { + @Nullable VideoCapabilities videoCapabilities = capabilities.getVideoCapabilities(); + if (videoCapabilities != null) { + maxBitrate = videoCapabilities.getBitrateRange().getUpper(); + } + } + + // Values taken from https://www.webmproject.org/vp9/levels. + int level; + if (maxBitrate >= 180_000_000) { + level = CodecProfileLevel.VP9Level52; + } else if (maxBitrate >= 120_000_000) { + level = CodecProfileLevel.VP9Level51; + } else if (maxBitrate >= 60_000_000) { + level = CodecProfileLevel.VP9Level5; + } else if (maxBitrate >= 30_000_000) { + level = CodecProfileLevel.VP9Level41; + } else if (maxBitrate >= 18_000_000) { + level = CodecProfileLevel.VP9Level4; + } else if (maxBitrate >= 12_000_000) { + level = CodecProfileLevel.VP9Level31; + } else if (maxBitrate >= 7_200_000) { + level = CodecProfileLevel.VP9Level3; + } else if (maxBitrate >= 3_600_000) { + level = CodecProfileLevel.VP9Level21; + } else if (maxBitrate >= 1_800_000) { + level = CodecProfileLevel.VP9Level2; + } else if (maxBitrate >= 800_000) { + level = CodecProfileLevel.VP9Level11; + } else { // Assume level 1 is always supported. + level = CodecProfileLevel.VP9Level1; + } + + CodecProfileLevel profileLevel = new CodecProfileLevel(); + // Since this method is for legacy devices only, assume that only profile 0 is supported. + profileLevel.profile = CodecProfileLevel.VP9Profile0; + profileLevel.level = level; + + return new CodecProfileLevel[] {profileLevel}; + } + + /** + * Returns whether the decoder is known to fail when adapting, despite advertising itself as an + * adaptive decoder. + * + * @param name The decoder name. + * @return True if the decoder is known to fail when adapting. + */ + private static boolean needsDisableAdaptationWorkaround(String name) { + return Util.SDK_INT <= 22 + && ("ODROID-XU3".equals(Util.MODEL) || "Nexus 10".equals(Util.MODEL)) + && ("OMX.Exynos.AVC.Decoder".equals(name) || "OMX.Exynos.AVC.Decoder.secure".equals(name)); + } + + /** + * Returns whether the decoder is known to fail when an attempt is made to reconfigure it with a + * new format's configuration data. + * + * @param name The name of the decoder. + * @return Whether the decoder is known to fail when an attempt is made to reconfigure it with a + * new format's configuration data. + */ + private static boolean needsAdaptationReconfigureWorkaround(String name) { + return Util.MODEL.startsWith("SM-T230") && "OMX.MARVELL.VIDEO.HW.CODA7542DECODER".equals(name); + } + + /** + * Returns whether the decoder is known to behave incorrectly if flushed to adapt to a new format. + * + * @param mimeType The name of the MIME type. + * @return Whether the decoder is known to to behave incorrectly if flushed to adapt to a new + * format. + */ + private static boolean needsAdaptationFlushWorkaround(String mimeType) { + // For Opus, we don't flush and reuse the codec because the decoder may discard samples after + // flushing, which would result in audio being dropped just after a stream change (see + // [Internal: b/143450854]). For other formats, we allow reuse after flushing if the codec + // initialization data is unchanged. + return MimeTypes.AUDIO_OPUS.equals(mimeType); + } + /** * Capabilities are known to be inaccurately reported for vertical resolutions on some devices. * [Internal ref: b/31387661]. When this workaround is enabled, we also check whether the @@ -611,11 +857,65 @@ private static int getMaxSupportedInstancesV23(CodecCapabilities capabilities) { * @param name The name of the codec. * @return Whether to enable the workaround. */ - private static final boolean enableRotatedVerticalResolutionWorkaround(String name) { + private static final boolean needsRotatedVerticalResolutionWorkaround(String name) { if ("OMX.MTK.VIDEO.DECODER.HEVC".equals(name) && "mcv5a".equals(Util.DEVICE)) { // See https://github.com/google/ExoPlayer/issues/6612. return false; } return true; } + + /** + * Whether a profile is excluded from the list of supported profiles. This may happen when a + * device declares support for a profile it doesn't actually support. + */ + private static boolean needsProfileExcludedWorkaround(String mimeType, int profile) { + // See https://github.com/google/ExoPlayer/issues/3537 + return MimeTypes.VIDEO_H265.equals(mimeType) + && CodecProfileLevel.HEVCProfileMain10 == profile + && ("sailfish".equals(Util.DEVICE) || "marlin".equals(Util.DEVICE)); + } + + /** Possible outcomes of evaluating PerformancePoint coverage */ + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef({COVERAGE_RESULT_YES, COVERAGE_RESULT_NO, COVERAGE_RESULT_NO_EMPTY_LIST}) + private @interface PerformancePointCoverageResult {} + + /** The decoder has a PerformancePoint that covers the resolution and frame rate */ + private static final int COVERAGE_RESULT_YES = 2; + /** + * The decoder has at least one PerformancePoint, but none of them cover the resolution and frame + * rate + */ + private static final int COVERAGE_RESULT_NO = 1; + /** The VideoCapabilities does not contain any PerformancePoints */ + private static final int COVERAGE_RESULT_NO_EMPTY_LIST = 0; + + @RequiresApi(29) + private static final class Api29 { + @DoNotInline + public static @PerformancePointCoverageResult int areResolutionAndFrameRateCovered( + VideoCapabilities videoCapabilities, int width, int height, double frameRate) { + List performancePointList = + videoCapabilities.getSupportedPerformancePoints(); + if (performancePointList == null || performancePointList.isEmpty()) { + return COVERAGE_RESULT_NO_EMPTY_LIST; + } + + // Round frame rate down to to avoid situations where a range check in + // covers fails due to slightly exceeding the limits for a standard format + // (e.g., 1080p at 30 fps). [Internal ref: b/134706676] + PerformancePoint targetPerformancePoint = + new PerformancePoint(width, height, (int) frameRate); + + for (int i = 0; i < performancePointList.size(); i++) { + if (performancePointList.get(i).covers(targetPerformancePoint)) { + return COVERAGE_RESULT_YES; + } + } + return COVERAGE_RESULT_NO; + } + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/MediaCodecRenderer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/MediaCodecRenderer.java index e1026ed196..264b31a9a9 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/MediaCodecRenderer.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/MediaCodecRenderer.java @@ -15,6 +15,22 @@ */ package com.google.android.exoplayer2.mediacodec; +import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.DISCARD_REASON_DRM_SESSION_CHANGED; +import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.DISCARD_REASON_OPERATING_RATE_CHANGED; +import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.DISCARD_REASON_REUSE_NOT_IMPLEMENTED; +import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.DISCARD_REASON_WORKAROUND; +import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.REUSE_RESULT_NO; +import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.REUSE_RESULT_YES_WITHOUT_RECONFIGURATION; +import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.REUSE_RESULT_YES_WITH_FLUSH; +import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.REUSE_RESULT_YES_WITH_RECONFIGURATION; +import static com.google.android.exoplayer2.source.SampleStream.FLAG_OMIT_SAMPLE_DATA; +import static com.google.android.exoplayer2.source.SampleStream.FLAG_PEEK; +import static com.google.android.exoplayer2.source.SampleStream.FLAG_REQUIRE_FORMAT; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Assertions.checkState; +import static java.lang.Math.max; +import static java.lang.annotation.ElementType.TYPE_USE; + import android.annotation.TargetApi; import android.media.MediaCodec; import android.media.MediaCodec.CodecException; @@ -22,26 +38,39 @@ import android.media.MediaCrypto; import android.media.MediaCryptoException; import android.media.MediaFormat; +import android.media.metrics.LogSessionId; import android.os.Bundle; import android.os.SystemClock; +import androidx.annotation.CallSuper; import androidx.annotation.CheckResult; +import androidx.annotation.DoNotInline; import androidx.annotation.IntDef; import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; import com.google.android.exoplayer2.BaseRenderer; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.ExoPlaybackException; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.FormatHolder; +import com.google.android.exoplayer2.PlaybackException; +import com.google.android.exoplayer2.analytics.PlayerId; +import com.google.android.exoplayer2.decoder.CryptoConfig; import com.google.android.exoplayer2.decoder.DecoderCounters; import com.google.android.exoplayer2.decoder.DecoderInputBuffer; +import com.google.android.exoplayer2.decoder.DecoderInputBuffer.InsufficientCapacityException; +import com.google.android.exoplayer2.decoder.DecoderReuseEvaluation; +import com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.DecoderDiscardReasons; import com.google.android.exoplayer2.drm.DrmSession; import com.google.android.exoplayer2.drm.DrmSession.DrmSessionException; -import com.google.android.exoplayer2.drm.DrmSessionManager; -import com.google.android.exoplayer2.drm.FrameworkMediaCrypto; +import com.google.android.exoplayer2.drm.FrameworkCryptoConfig; import com.google.android.exoplayer2.mediacodec.MediaCodecUtil.DecoderQueryException; import com.google.android.exoplayer2.source.MediaPeriod; +import com.google.android.exoplayer2.source.SampleStream; +import com.google.android.exoplayer2.source.SampleStream.ReadDataResult; +import com.google.android.exoplayer2.source.SampleStream.ReadFlags; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Log; +import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.NalUnitUtil; import com.google.android.exoplayer2.util.TimedValueQueue; import com.google.android.exoplayer2.util.TraceUtil; @@ -49,14 +78,14 @@ import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; import java.nio.ByteBuffer; +import java.nio.ByteOrder; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.List; -/** - * An abstract renderer that uses {@link MediaCodec} to decode samples for rendering. - */ +/** An abstract renderer that uses {@link MediaCodec} to decode samples for rendering. */ public abstract class MediaCodecRenderer extends BaseRenderer { /** Thrown when a failure occurs instantiating a decoder. */ @@ -66,14 +95,10 @@ public static class DecoderInitializationException extends Exception { private static final int NO_SUITABLE_DECODER_ERROR = CUSTOM_ERROR_CODE_BASE + 1; private static final int DECODER_QUERY_ERROR = CUSTOM_ERROR_CODE_BASE + 2; - /** - * The mime type for which a decoder was being initialized. - */ + /** The mime type for which a decoder was being initialized. */ public final String mimeType; - /** - * Whether it was required that the decoder support a secure output path. - */ + /** Whether it was required that the decoder support a secure output path. */ public final boolean secureDecoderRequired; /** @@ -92,8 +117,8 @@ public static class DecoderInitializationException extends Exception { */ @Nullable public final DecoderInitializationException fallbackDecoderInitializationException; - public DecoderInitializationException(Format format, Throwable cause, - boolean secureDecoderRequired, int errorCode) { + public DecoderInitializationException( + Format format, @Nullable Throwable cause, boolean secureDecoderRequired, int errorCode) { this( "Decoder init failed: [" + errorCode + "], " + format, cause, @@ -106,7 +131,7 @@ public DecoderInitializationException(Format format, Throwable cause, public DecoderInitializationException( Format format, - Throwable cause, + @Nullable Throwable cause, boolean secureDecoderRequired, MediaCodecInfo mediaCodecInfo) { this( @@ -121,7 +146,7 @@ public DecoderInitializationException( private DecoderInitializationException( String message, - Throwable cause, + @Nullable Throwable cause, String mimeType, boolean secureDecoderRequired, @Nullable MediaCodecInfo mediaCodecInfo, @@ -148,8 +173,9 @@ private DecoderInitializationException copyWithFallbackException( fallbackException); } - @TargetApi(21) - private static String getDiagnosticInfoV21(Throwable cause) { + @RequiresApi(21) + @Nullable + private static String getDiagnosticInfoV21(@Nullable Throwable cause) { if (cause instanceof CodecException) { return ((CodecException) cause).getDiagnosticInfo(); } @@ -164,30 +190,6 @@ private static String buildCustomDiagnosticInfo(int errorCode) { } } - /** Thrown when a failure occurs in the decoder. */ - public static class DecoderException extends Exception { - - /** The {@link MediaCodecInfo} of the decoder that failed. Null if unknown. */ - @Nullable public final MediaCodecInfo codecInfo; - - /** An optional developer-readable diagnostic information string. May be null. */ - @Nullable public final String diagnosticInfo; - - public DecoderException(Throwable cause, @Nullable MediaCodecInfo codecInfo) { - super("Decoder failed: " + (codecInfo == null ? null : codecInfo.name), cause); - this.codecInfo = codecInfo; - diagnosticInfo = Util.SDK_INT >= 21 ? getDiagnosticInfoV21(cause) : null; - } - - @TargetApi(21) - private static String getDiagnosticInfoV21(Throwable cause) { - if (cause instanceof CodecException) { - return ((CodecException) cause).getDiagnosticInfo(); - } - return null; - } - } - /** Indicates no codec operating rate should be set. */ protected static final float CODEC_OPERATING_RATE_UNSET = -1; @@ -197,52 +199,29 @@ private static String getDiagnosticInfoV21(Throwable cause) { * If the {@link MediaCodec} is hotswapped (i.e. replaced during playback), this is the period of * time during which {@link #isReady()} will report true regardless of whether the new codec has * output frames that are ready to be rendered. - *

      - * This allows codec hotswapping to be performed seamlessly, without interrupting the playback of - * other renderers, provided the new codec is able to decode some frames within this time period. + * + *

      This allows codec hotswapping to be performed seamlessly, without interrupting the playback + * of other renderers, provided the new codec is able to decode some frames within this time + * period. */ private static final long MAX_CODEC_HOTSWAP_TIME_MS = 1000; - /** - * The possible return values for {@link #canKeepCodec(MediaCodec, MediaCodecInfo, Format, - * Format)}. - */ - @Documented - @Retention(RetentionPolicy.SOURCE) - @IntDef({ - KEEP_CODEC_RESULT_NO, - KEEP_CODEC_RESULT_YES_WITH_FLUSH, - KEEP_CODEC_RESULT_YES_WITH_RECONFIGURATION, - KEEP_CODEC_RESULT_YES_WITHOUT_RECONFIGURATION - }) - protected @interface KeepCodecResult {} - /** The codec cannot be kept. */ - protected static final int KEEP_CODEC_RESULT_NO = 0; - /** The codec can be kept, but must be flushed. */ - protected static final int KEEP_CODEC_RESULT_YES_WITH_FLUSH = 1; - /** - * The codec can be kept. It does not need to be flushed, but must be reconfigured by prefixing - * the next input buffer with the new format's configuration data. - */ - protected static final int KEEP_CODEC_RESULT_YES_WITH_RECONFIGURATION = 2; - /** The codec can be kept. It does not need to be flushed and no reconfiguration is required. */ - protected static final int KEEP_CODEC_RESULT_YES_WITHOUT_RECONFIGURATION = 3; + // Generally there is zero or one pending output stream offset. We track more offsets to allow for + // pending output streams that have fewer frames than the codec latency. + private static final int MAX_PENDING_OUTPUT_STREAM_OFFSET_COUNT = 10; @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({ RECONFIGURATION_STATE_NONE, RECONFIGURATION_STATE_WRITE_PENDING, RECONFIGURATION_STATE_QUEUE_PENDING }) private @interface ReconfigurationState {} - /** - * There is no pending adaptive reconfiguration work. - */ + /** There is no pending adaptive reconfiguration work. */ private static final int RECONFIGURATION_STATE_NONE = 0; - /** - * Codec configuration data needs to be written into the next buffer. - */ + /** Codec configuration data needs to be written into the next buffer. */ private static final int RECONFIGURATION_STATE_WRITE_PENDING = 1; /** * Codec configuration data has been written into the next buffer, but that buffer still needs to @@ -252,6 +231,7 @@ private static String getDiagnosticInfoV21(Throwable cause) { @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({DRAIN_STATE_NONE, DRAIN_STATE_SIGNAL_END_OF_STREAM, DRAIN_STATE_WAIT_END_OF_STREAM}) private @interface DrainState {} /** The codec is not being drained. */ @@ -263,10 +243,11 @@ private static String getDiagnosticInfoV21(Throwable cause) { @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({ DRAIN_ACTION_NONE, DRAIN_ACTION_FLUSH, - DRAIN_ACTION_UPDATE_DRM_SESSION, + DRAIN_ACTION_FLUSH_AND_UPDATE_DRM_SESSION, DRAIN_ACTION_REINITIALIZE }) private @interface DrainAction {} @@ -275,29 +256,27 @@ private static String getDiagnosticInfoV21(Throwable cause) { /** The codec should be flushed. */ private static final int DRAIN_ACTION_FLUSH = 1; /** The codec should be flushed and updated to use the pending DRM session. */ - private static final int DRAIN_ACTION_UPDATE_DRM_SESSION = 2; + private static final int DRAIN_ACTION_FLUSH_AND_UPDATE_DRM_SESSION = 2; /** The codec should be reinitialized. */ private static final int DRAIN_ACTION_REINITIALIZE = 3; @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({ ADAPTATION_WORKAROUND_MODE_NEVER, ADAPTATION_WORKAROUND_MODE_SAME_RESOLUTION, ADAPTATION_WORKAROUND_MODE_ALWAYS }) private @interface AdaptationWorkaroundMode {} - /** - * The adaptation workaround is never used. - */ + + /** The adaptation workaround is never used. */ private static final int ADAPTATION_WORKAROUND_MODE_NEVER = 0; /** * The adaptation workaround is used when adapting between formats of the same resolution only. */ private static final int ADAPTATION_WORKAROUND_MODE_SAME_RESOLUTION = 1; - /** - * The adaptation workaround is always used when adapting between formats. - */ + /** The adaptation workaround is always used when adapting between formats. */ private static final int ADAPTATION_WORKAROUND_MODE_ALWAYS = 2; /** @@ -314,55 +293,63 @@ private static String getDiagnosticInfoV21(Throwable cause) { private static final int ADAPTATION_WORKAROUND_SLICE_WIDTH_HEIGHT = 32; + private final MediaCodecAdapter.Factory codecAdapterFactory; private final MediaCodecSelector mediaCodecSelector; - @Nullable private final DrmSessionManager drmSessionManager; - private final boolean playClearSamplesWithoutKeys; private final boolean enableDecoderFallback; private final float assumedMinimumCodecOperatingRate; + private final DecoderInputBuffer noDataBuffer; private final DecoderInputBuffer buffer; - private final DecoderInputBuffer flagsOnlyBuffer; + private final DecoderInputBuffer bypassSampleBuffer; + private final BatchBuffer bypassBatchBuffer; private final TimedValueQueue formatQueue; private final ArrayList decodeOnlyPresentationTimestamps; private final MediaCodec.BufferInfo outputBufferInfo; + private final long[] pendingOutputStreamStartPositionsUs; + private final long[] pendingOutputStreamOffsetsUs; + private final long[] pendingOutputStreamSwitchTimesUs; - private boolean drmResourcesAcquired; @Nullable private Format inputFormat; - private Format outputFormat; - @Nullable private DrmSession codecDrmSession; - @Nullable private DrmSession sourceDrmSession; + @Nullable private Format outputFormat; + @Nullable private DrmSession codecDrmSession; + @Nullable private DrmSession sourceDrmSession; @Nullable private MediaCrypto mediaCrypto; private boolean mediaCryptoRequiresSecureDecoder; private long renderTimeLimitMs; - private float rendererOperatingRate; - @Nullable private MediaCodec codec; - @Nullable private Format codecFormat; + private float currentPlaybackSpeed; + private float targetPlaybackSpeed; + @Nullable private MediaCodecAdapter codec; + @Nullable private Format codecInputFormat; + @Nullable private MediaFormat codecOutputMediaFormat; + private boolean codecOutputMediaFormatChanged; private float codecOperatingRate; @Nullable private ArrayDeque availableCodecInfos; @Nullable private DecoderInitializationException preferredDecoderInitializationException; @Nullable private MediaCodecInfo codecInfo; - @AdaptationWorkaroundMode private int codecAdaptationWorkaroundMode; - private boolean codecNeedsReconfigureWorkaround; + private @AdaptationWorkaroundMode int codecAdaptationWorkaroundMode; private boolean codecNeedsDiscardToSpsWorkaround; private boolean codecNeedsFlushWorkaround; private boolean codecNeedsSosFlushWorkaround; private boolean codecNeedsEosFlushWorkaround; private boolean codecNeedsEosOutputExceptionWorkaround; + private boolean codecNeedsEosBufferTimestampWorkaround; private boolean codecNeedsMonoChannelCountWorkaround; private boolean codecNeedsAdaptationWorkaroundBuffer; private boolean shouldSkipAdaptationWorkaroundOutputBuffer; private boolean codecNeedsEosPropagation; - private ByteBuffer[] inputBuffers; - private ByteBuffer[] outputBuffers; + @Nullable private C2Mp3TimestampTracker c2Mp3TimestampTracker; private long codecHotswapDeadlineMs; private int inputIndex; private int outputIndex; - private ByteBuffer outputBuffer; + @Nullable private ByteBuffer outputBuffer; private boolean isDecodeOnlyOutputBuffer; private boolean isLastOutputBuffer; + private boolean bypassEnabled; + private boolean bypassSampleBufferPending; + private boolean bypassDrainAndReinitialize; private boolean codecReconfigured; - @ReconfigurationState private int codecReconfigurationState; - @DrainState private int codecDrainState; - @DrainAction private int codecDrainAction; + private @ReconfigurationState int codecReconfigurationState; + private @DrainState int codecDrainState; + private @DrainAction int codecDrainAction; private boolean codecReceivedBuffers; private boolean codecReceivedEos; private boolean codecHasOutputMediaFormat; @@ -370,25 +357,17 @@ private static String getDiagnosticInfoV21(Throwable cause) { private long lastBufferInStreamPresentationTimeUs; private boolean inputStreamEnded; private boolean outputStreamEnded; - private boolean waitingForKeys; - private boolean waitingForFirstSyncSample; private boolean waitingForFirstSampleInFormat; - private boolean skipMediaCodecStopOnRelease; private boolean pendingOutputEndOfStream; - + @Nullable private ExoPlaybackException pendingPlaybackException; protected DecoderCounters decoderCounters; + private long outputStreamStartPositionUs; + private long outputStreamOffsetUs; + private int pendingOutputStreamOffsetCount; /** - * @param trackType The track type that the renderer handles. One of the {@code C.TRACK_TYPE_*} - * constants defined in {@link C}. + * @param trackType The {@link C.TrackType track type} that the renderer handles. * @param mediaCodecSelector A decoder selector. - * @param drmSessionManager For use with encrypted media. May be null if support for encrypted - * media is not required. - * @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions. - * For example a media file may start with a short clear region so as to allow playback to - * begin in parallel with key acquisition. This parameter specifies whether the renderer is - * permitted to play clear regions of encrypted media files before {@code drmSessionManager} - * has obtained the keys necessary to decrypt encrypted regions of the media. * @param enableDecoderFallback Whether to enable fallback to lower-priority decoders if decoder * initialization fails. This may result in using a decoder that is less efficient or slower * than the primary decoder. @@ -397,74 +376,75 @@ private static String getDiagnosticInfoV21(Throwable cause) { * explicitly using {@link MediaFormat#KEY_OPERATING_RATE}). */ public MediaCodecRenderer( - int trackType, + @C.TrackType int trackType, + MediaCodecAdapter.Factory codecAdapterFactory, MediaCodecSelector mediaCodecSelector, - @Nullable DrmSessionManager drmSessionManager, - boolean playClearSamplesWithoutKeys, boolean enableDecoderFallback, float assumedMinimumCodecOperatingRate) { super(trackType); - this.mediaCodecSelector = Assertions.checkNotNull(mediaCodecSelector); - this.drmSessionManager = drmSessionManager; - this.playClearSamplesWithoutKeys = playClearSamplesWithoutKeys; + this.codecAdapterFactory = codecAdapterFactory; + this.mediaCodecSelector = checkNotNull(mediaCodecSelector); this.enableDecoderFallback = enableDecoderFallback; this.assumedMinimumCodecOperatingRate = assumedMinimumCodecOperatingRate; + noDataBuffer = DecoderInputBuffer.newNoDataInstance(); buffer = new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED); - flagsOnlyBuffer = DecoderInputBuffer.newFlagsOnlyInstance(); + bypassSampleBuffer = new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DIRECT); + bypassBatchBuffer = new BatchBuffer(); formatQueue = new TimedValueQueue<>(); decodeOnlyPresentationTimestamps = new ArrayList<>(); outputBufferInfo = new MediaCodec.BufferInfo(); + currentPlaybackSpeed = 1f; + targetPlaybackSpeed = 1f; + renderTimeLimitMs = C.TIME_UNSET; + pendingOutputStreamStartPositionsUs = new long[MAX_PENDING_OUTPUT_STREAM_OFFSET_COUNT]; + pendingOutputStreamOffsetsUs = new long[MAX_PENDING_OUTPUT_STREAM_OFFSET_COUNT]; + pendingOutputStreamSwitchTimesUs = new long[MAX_PENDING_OUTPUT_STREAM_OFFSET_COUNT]; + outputStreamStartPositionUs = C.TIME_UNSET; + setOutputStreamOffsetUs(C.TIME_UNSET); + // MediaCodec outputs audio buffers in native endian: + // https://developer.android.com/reference/android/media/MediaCodec#raw-audio-buffers + // and code called from MediaCodecAudioRenderer.processOutputBuffer expects this endianness. + // Call ensureSpaceForWrite to make sure the buffer has non-null data, and set the expected + // endianness. + bypassBatchBuffer.ensureSpaceForWrite(/* length= */ 0); + bypassBatchBuffer.data.order(ByteOrder.nativeOrder()); + + codecOperatingRate = CODEC_OPERATING_RATE_UNSET; + codecAdaptationWorkaroundMode = ADAPTATION_WORKAROUND_MODE_NEVER; codecReconfigurationState = RECONFIGURATION_STATE_NONE; + inputIndex = C.INDEX_UNSET; + outputIndex = C.INDEX_UNSET; + codecHotswapDeadlineMs = C.TIME_UNSET; + largestQueuedPresentationTimeUs = C.TIME_UNSET; + lastBufferInStreamPresentationTimeUs = C.TIME_UNSET; codecDrainState = DRAIN_STATE_NONE; codecDrainAction = DRAIN_ACTION_NONE; - codecOperatingRate = CODEC_OPERATING_RATE_UNSET; - rendererOperatingRate = 1f; - renderTimeLimitMs = C.TIME_UNSET; } /** - * Set a limit on the time a single {@link #render(long, long)} call can spend draining and + * Sets a limit on the time a single {@link #render(long, long)} call can spend draining and * filling the decoder. * - *

      This method is experimental, and will be renamed or removed in a future release. It should - * only be called before the renderer is used. + *

      This method should be called right after creating an instance of this class. * * @param renderTimeLimitMs The render time limit in milliseconds, or {@link C#TIME_UNSET} for no * limit. */ - public void experimental_setRenderTimeLimitMs(long renderTimeLimitMs) { + public void setRenderTimeLimitMs(long renderTimeLimitMs) { this.renderTimeLimitMs = renderTimeLimitMs; } - /** - * Skip calling {@link MediaCodec#stop()} when the underlying MediaCodec is going to be released. - * - *

      By default, when the MediaCodecRenderer is releasing the underlying {@link MediaCodec}, it - * first calls {@link MediaCodec#stop()} and then calls {@link MediaCodec#release()}. If this - * feature is enabled, the MediaCodecRenderer will skip the call to {@link MediaCodec#stop()}. - * - *

      This method is experimental, and will be renamed or removed in a future release. It should - * only be called before the renderer is used. - * - * @param enabled enable or disable the feature. - */ - public void experimental_setSkipMediaCodecStopOnRelease(boolean enabled) { - skipMediaCodecStopOnRelease = enabled; - } - @Override - @AdaptiveSupport - public final int supportsMixedMimeTypeAdaptation() { + public final @AdaptiveSupport int supportsMixedMimeTypeAdaptation() { return ADAPTIVE_NOT_SEAMLESS; } @Override - @Capabilities - public final int supportsFormat(Format format) throws ExoPlaybackException { + public final @Capabilities int supportsFormat(Format format) throws ExoPlaybackException { try { - return supportsFormat(mediaCodecSelector, drmSessionManager, format); + return supportsFormat(mediaCodecSelector, format); } catch (DecoderQueryException e) { - throw createRendererException(e, format); + throw createRendererException(e, format, PlaybackException.ERROR_CODE_DECODER_QUERY_FAILED); } } @@ -472,17 +452,12 @@ public final int supportsFormat(Format format) throws ExoPlaybackException { * Returns the {@link Capabilities} for the given {@link Format}. * * @param mediaCodecSelector The decoder selector. - * @param drmSessionManager The renderer's {@link DrmSessionManager}. * @param format The {@link Format}. * @return The {@link Capabilities} for this {@link Format}. * @throws DecoderQueryException If there was an error querying decoders. */ - @Capabilities - protected abstract int supportsFormat( - MediaCodecSelector mediaCodecSelector, - @Nullable DrmSessionManager drmSessionManager, - Format format) - throws DecoderQueryException; + protected abstract @Capabilities int supportsFormat( + MediaCodecSelector mediaCodecSelector, Format format) throws DecoderQueryException; /** * Returns a list of decoders that can decode media in the specified format, in priority order. @@ -498,25 +473,30 @@ protected abstract List getDecoderInfos( throws DecoderQueryException; /** - * Configures a newly created {@link MediaCodec}. + * Returns the {@link MediaCodecAdapter.Configuration} that will be used to create and configure a + * {@link MediaCodec} to decode the given {@link Format} for a playback. * * @param codecInfo Information about the {@link MediaCodec} being configured. - * @param codec The {@link MediaCodec} to configure. * @param format The {@link Format} for which the codec is being configured. * @param crypto For drm protected playbacks, a {@link MediaCrypto} to use for decryption. * @param codecOperatingRate The codec operating rate, or {@link #CODEC_OPERATING_RATE_UNSET} if * no codec operating rate should be set. + * @return The parameters needed to call {@link MediaCodec#configure}. */ - protected abstract void configureCodec( + protected abstract MediaCodecAdapter.Configuration getMediaCodecConfiguration( MediaCodecInfo codecInfo, - MediaCodec codec, Format format, @Nullable MediaCrypto crypto, float codecOperatingRate); - protected final void maybeInitCodec() throws ExoPlaybackException { - if (codec != null || inputFormat == null) { - // We have a codec already, or we don't have a format with which to instantiate one. + protected final void maybeInitCodecOrBypass() throws ExoPlaybackException { + if (codec != null || bypassEnabled || inputFormat == null) { + // We have a codec, are bypassing it, or don't have a format to decide how to render. + return; + } + + if (sourceDrmSession == null && shouldUseBypass(inputFormat)) { + initBypass(inputFormat); return; } @@ -525,31 +505,36 @@ protected final void maybeInitCodec() throws ExoPlaybackException { String mimeType = inputFormat.sampleMimeType; if (codecDrmSession != null) { if (mediaCrypto == null) { - FrameworkMediaCrypto sessionMediaCrypto = codecDrmSession.getMediaCrypto(); - if (sessionMediaCrypto == null) { - DrmSessionException drmError = codecDrmSession.getError(); + @Nullable + FrameworkCryptoConfig sessionCryptoConfig = getFrameworkCryptoConfig(codecDrmSession); + if (sessionCryptoConfig == null) { + @Nullable DrmSessionException drmError = codecDrmSession.getError(); if (drmError != null) { - // Continue for now. We may be able to avoid failure if the session recovers, or if a - // new input format causes the session to be replaced before it's used. + // Continue for now. We may be able to avoid failure if a new input format causes the + // session to be replaced without it having been used. } else { // The drm session isn't open yet. return; } } else { try { - mediaCrypto = new MediaCrypto(sessionMediaCrypto.uuid, sessionMediaCrypto.sessionId); + mediaCrypto = new MediaCrypto(sessionCryptoConfig.uuid, sessionCryptoConfig.sessionId); } catch (MediaCryptoException e) { - throw createRendererException(e, inputFormat); + throw createRendererException( + e, inputFormat, PlaybackException.ERROR_CODE_DRM_SYSTEM_ERROR); } mediaCryptoRequiresSecureDecoder = - !sessionMediaCrypto.forceAllowInsecureDecoderComponents + !sessionCryptoConfig.forceAllowInsecureDecoderComponents && mediaCrypto.requiresSecureDecoderComponent(mimeType); } } - if (FrameworkMediaCrypto.WORKAROUND_DEVICE_NEEDS_KEYS_TO_CONFIGURE_CODEC) { + if (FrameworkCryptoConfig.WORKAROUND_DEVICE_NEEDS_KEYS_TO_CONFIGURE_CODEC) { @DrmSession.State int drmSessionState = codecDrmSession.getState(); if (drmSessionState == DrmSession.STATE_ERROR) { - throw createRendererException(codecDrmSession.getError(), inputFormat); + DrmSessionException drmSessionException = + Assertions.checkNotNull(codecDrmSession.getError()); + throw createRendererException( + drmSessionException, inputFormat, drmSessionException.errorCode); } else if (drmSessionState != DrmSession.STATE_OPENED_WITH_KEYS) { // Wait for keys. return; @@ -560,14 +545,36 @@ protected final void maybeInitCodec() throws ExoPlaybackException { try { maybeInitCodecWithFallback(mediaCrypto, mediaCryptoRequiresSecureDecoder); } catch (DecoderInitializationException e) { - throw createRendererException(e, inputFormat); + throw createRendererException( + e, inputFormat, PlaybackException.ERROR_CODE_DECODER_INIT_FAILED); } } + /** + * Returns whether buffers in the input format can be processed without a codec. + * + *

      This method is only called if the content is not DRM protected, because if the content is + * DRM protected use of bypass is never possible. + * + * @param format The input {@link Format}. + * @return Whether playback bypassing {@link MediaCodec} is supported. + */ + protected boolean shouldUseBypass(Format format) { + return false; + } + protected boolean shouldInitCodec(MediaCodecInfo codecInfo) { return true; } + /** + * Returns whether the renderer needs to re-initialize the codec, possibly as a result of a change + * in device capabilities. + */ + protected boolean shouldReinitCodec() { + return false; + } + /** * Returns whether the codec needs the renderer to propagate the end-of-stream signal directly, * rather than by using an end-of-stream buffer queued to the codec. @@ -577,102 +584,156 @@ protected boolean getCodecNeedsEosPropagation() { } /** - * Polls the pending output format queue for a given buffer timestamp. If a format is present, it - * is removed and returned. Otherwise returns {@code null}. Subclasses should only call this - * method if they are taking over responsibility for output format propagation (e.g., when using - * video tunneling). + * Sets an exception to be re-thrown by render. + * + * @param exception The exception. */ - protected final @Nullable Format updateOutputFormatForTime(long presentationTimeUs) { - Format format = formatQueue.pollFloor(presentationTimeUs); + protected final void setPendingPlaybackException(ExoPlaybackException exception) { + pendingPlaybackException = exception; + } + + /** + * Updates the output formats for the specified output buffer timestamp, calling {@link + * #onOutputFormatChanged} if a change has occurred. + * + *

      Subclasses should only call this method if operating in a mode where buffers are not + * dequeued from the decoder, for example when using video tunneling). + * + * @throws ExoPlaybackException Thrown if an error occurs as a result of the output format change. + */ + protected final void updateOutputFormatForTime(long presentationTimeUs) + throws ExoPlaybackException { + boolean outputFormatChanged = false; + @Nullable Format format = formatQueue.pollFloor(presentationTimeUs); + if (format == null && codecOutputMediaFormatChanged) { + // If the codec's output MediaFormat has changed then there should be a corresponding Format + // change, which we've not found. Check the Format queue in case the corresponding + // presentation timestamp is greater than presentationTimeUs, which can happen for some codecs + // [Internal ref: b/162719047]. + format = formatQueue.pollFirst(); + } if (format != null) { outputFormat = format; + outputFormatChanged = true; + } + if (outputFormatChanged || (codecOutputMediaFormatChanged && outputFormat != null)) { + onOutputFormatChanged(outputFormat, codecOutputMediaFormat); + codecOutputMediaFormatChanged = false; } - return format; } - protected final MediaCodec getCodec() { + @Nullable + protected final MediaCodecAdapter getCodec() { return codec; } - protected final @Nullable MediaCodecInfo getCodecInfo() { + @Nullable + protected final MediaFormat getCodecOutputMediaFormat() { + return codecOutputMediaFormat; + } + + @Nullable + protected final MediaCodecInfo getCodecInfo() { return codecInfo; } @Override - protected void onEnabled(boolean joining) throws ExoPlaybackException { - if (drmSessionManager != null && !drmResourcesAcquired) { - drmResourcesAcquired = true; - drmSessionManager.prepare(); - } + protected void onEnabled(boolean joining, boolean mayRenderStartOfStream) + throws ExoPlaybackException { decoderCounters = new DecoderCounters(); } + @Override + protected void onStreamChanged(Format[] formats, long startPositionUs, long offsetUs) + throws ExoPlaybackException { + if (this.outputStreamOffsetUs == C.TIME_UNSET) { + checkState(this.outputStreamStartPositionUs == C.TIME_UNSET); + this.outputStreamStartPositionUs = startPositionUs; + setOutputStreamOffsetUs(offsetUs); + } else { + if (pendingOutputStreamOffsetCount == pendingOutputStreamOffsetsUs.length) { + Log.w( + TAG, + "Too many stream changes, so dropping offset: " + + pendingOutputStreamOffsetsUs[pendingOutputStreamOffsetCount - 1]); + } else { + pendingOutputStreamOffsetCount++; + } + pendingOutputStreamStartPositionsUs[pendingOutputStreamOffsetCount - 1] = startPositionUs; + pendingOutputStreamOffsetsUs[pendingOutputStreamOffsetCount - 1] = offsetUs; + pendingOutputStreamSwitchTimesUs[pendingOutputStreamOffsetCount - 1] = + largestQueuedPresentationTimeUs; + } + } + @Override protected void onPositionReset(long positionUs, boolean joining) throws ExoPlaybackException { inputStreamEnded = false; outputStreamEnded = false; pendingOutputEndOfStream = false; - flushOrReinitializeCodec(); + if (bypassEnabled) { + bypassBatchBuffer.clear(); + bypassSampleBuffer.clear(); + bypassSampleBufferPending = false; + } else { + flushOrReinitializeCodec(); + } + // If there is a format change on the input side still pending propagation to the output, we + // need to queue a format next time a buffer is read. This is because we may not read a new + // input format after the position reset. + if (formatQueue.size() > 0) { + waitingForFirstSampleInFormat = true; + } formatQueue.clear(); + if (pendingOutputStreamOffsetCount != 0) { + setOutputStreamOffsetUs(pendingOutputStreamOffsetsUs[pendingOutputStreamOffsetCount - 1]); + outputStreamStartPositionUs = + pendingOutputStreamStartPositionsUs[pendingOutputStreamOffsetCount - 1]; + pendingOutputStreamOffsetCount = 0; + } } @Override - public final void setOperatingRate(float operatingRate) throws ExoPlaybackException { - rendererOperatingRate = operatingRate; - if (codec != null - && codecDrainAction != DRAIN_ACTION_REINITIALIZE - && getState() != STATE_DISABLED) { - updateCodecOperatingRate(); - } + public void setPlaybackSpeed(float currentPlaybackSpeed, float targetPlaybackSpeed) + throws ExoPlaybackException { + this.currentPlaybackSpeed = currentPlaybackSpeed; + this.targetPlaybackSpeed = targetPlaybackSpeed; + updateCodecOperatingRate(codecInputFormat); } @Override protected void onDisabled() { inputFormat = null; - if (sourceDrmSession != null || codecDrmSession != null) { - // TODO: Do something better with this case. - onReset(); - } else { - flushOrReleaseCodec(); - } + outputStreamStartPositionUs = C.TIME_UNSET; + setOutputStreamOffsetUs(C.TIME_UNSET); + pendingOutputStreamOffsetCount = 0; + flushOrReleaseCodec(); } @Override protected void onReset() { try { + disableBypass(); releaseCodec(); } finally { setSourceDrmSession(null); } - if (drmSessionManager != null && drmResourcesAcquired) { - drmResourcesAcquired = false; - drmSessionManager.release(); - } + } + + private void disableBypass() { + bypassDrainAndReinitialize = false; + bypassBatchBuffer.clear(); + bypassSampleBuffer.clear(); + bypassSampleBufferPending = false; + bypassEnabled = false; } protected void releaseCodec() { - availableCodecInfos = null; - codecInfo = null; - codecFormat = null; - codecHasOutputMediaFormat = false; - resetInputBuffer(); - resetOutputBuffer(); - resetCodecBuffers(); - waitingForKeys = false; - codecHotswapDeadlineMs = C.TIME_UNSET; - decodeOnlyPresentationTimestamps.clear(); - largestQueuedPresentationTimeUs = C.TIME_UNSET; - lastBufferInStreamPresentationTimeUs = C.TIME_UNSET; try { if (codec != null) { + codec.release(); decoderCounters.decoderReleaseCount++; - try { - if (!skipMediaCodecStopOnRelease) { - codec.stop(); - } - } finally { - codec.release(); - } + onCodecReleased(codecInfo.name); } } finally { codec = null; @@ -682,8 +743,8 @@ protected void releaseCodec() { } } finally { mediaCrypto = null; - mediaCryptoRequiresSecureDecoder = false; setCodecDrmSession(null); + resetCodecStateForRelease(); } } } @@ -704,35 +765,55 @@ public void render(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackEx pendingOutputEndOfStream = false; processEndOfStream(); } + if (pendingPlaybackException != null) { + ExoPlaybackException playbackException = pendingPlaybackException; + pendingPlaybackException = null; + throw playbackException; + } + try { if (outputStreamEnded) { renderToEndOfStream(); return; } - if (inputFormat == null && !readToFlagsOnlyBuffer(/* requireFormat= */ true)) { + if (inputFormat == null && !readSourceOmittingSampleData(FLAG_REQUIRE_FORMAT)) { // We still don't have a format and can't make progress without one. return; } // We have a format. - maybeInitCodec(); - if (codec != null) { - long drainStartTimeMs = SystemClock.elapsedRealtime(); + maybeInitCodecOrBypass(); + if (bypassEnabled) { + TraceUtil.beginSection("bypassRender"); + while (bypassRender(positionUs, elapsedRealtimeUs)) {} + TraceUtil.endSection(); + } else if (codec != null) { + long renderStartTimeMs = SystemClock.elapsedRealtime(); TraceUtil.beginSection("drainAndFeed"); - while (drainOutputBuffer(positionUs, elapsedRealtimeUs)) {} - while (feedInputBuffer() && shouldContinueFeeding(drainStartTimeMs)) {} + while (drainOutputBuffer(positionUs, elapsedRealtimeUs) + && shouldContinueRendering(renderStartTimeMs)) {} + while (feedInputBuffer() && shouldContinueRendering(renderStartTimeMs)) {} TraceUtil.endSection(); } else { decoderCounters.skippedInputBufferCount += skipSource(positionUs); // We need to read any format changes despite not having a codec so that drmSession can be // updated, and so that we have the most recent format should the codec be initialized. We - // may also reach the end of the stream. Note that readSource will not read a sample into a - // flags-only buffer. - readToFlagsOnlyBuffer(/* requireFormat= */ false); + // may also reach the end of the stream. FLAG_PEEK is used because we don't want to advance + // the source further than skipSource has already done. + readSourceOmittingSampleData(FLAG_PEEK); } decoderCounters.ensureUpdated(); } catch (IllegalStateException e) { if (isMediaCodecException(e)) { - throw createRendererException(e, inputFormat); + onCodecError(e); + boolean isRecoverable = Util.SDK_INT >= 21 && isRecoverableMediaCodecExceptionV21(e); + if (isRecoverable) { + releaseCodec(); + } + throw createRendererException( + createDecoderException(e, getCodecInfo()), + inputFormat, + isRecoverable, + PlaybackException.ERROR_CODE_DECODING_FAILED); } throw e; } @@ -743,7 +824,7 @@ public void render(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackEx * This method is a no-op if the codec is {@code null}. * *

      The implementation of this method calls {@link #flushOrReleaseCodec()}, and {@link - * #maybeInitCodec()} if the codec needs to be re-instantiated. + * #maybeInitCodecOrBypass()} if the codec needs to be re-instantiated. * * @return Whether the codec was released and reinitialized, rather than being flushed. * @throws ExoPlaybackException If an error occurs re-instantiating the codec. @@ -751,7 +832,7 @@ public void render(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackEx protected final boolean flushOrReinitializeCodec() throws ExoPlaybackException { boolean released = flushOrReleaseCodec(); if (released) { - maybeInitCodec(); + maybeInitCodecOrBypass(); } return released; } @@ -773,23 +854,50 @@ protected boolean flushOrReleaseCodec() { releaseCodec(); return true; } + if (codecDrainAction == DRAIN_ACTION_FLUSH_AND_UPDATE_DRM_SESSION) { + checkState(Util.SDK_INT >= 23); // Implied by DRAIN_ACTION_FLUSH_AND_UPDATE_DRM_SESSION + // Needed to keep lint happy (it doesn't understand the checkState call alone) + if (Util.SDK_INT >= 23) { + try { + updateDrmSessionV23(); + } catch (ExoPlaybackException e) { + Log.w(TAG, "Failed to update the DRM session, releasing the codec instead.", e); + releaseCodec(); + return true; + } + } + } + flushCodec(); + return false; + } - codec.flush(); + /** Flushes the codec. */ + private void flushCodec() { + try { + codec.flush(); + } finally { + resetCodecStateForFlush(); + } + } + + /** Resets the renderer internal state after a codec flush. */ + @CallSuper + protected void resetCodecStateForFlush() { resetInputBuffer(); resetOutputBuffer(); codecHotswapDeadlineMs = C.TIME_UNSET; codecReceivedEos = false; codecReceivedBuffers = false; - waitingForFirstSyncSample = true; codecNeedsAdaptationWorkaroundBuffer = false; shouldSkipAdaptationWorkaroundOutputBuffer = false; isDecodeOnlyOutputBuffer = false; isLastOutputBuffer = false; - - waitingForKeys = false; decodeOnlyPresentationTimestamps.clear(); largestQueuedPresentationTimeUs = C.TIME_UNSET; lastBufferInStreamPresentationTimeUs = C.TIME_UNSET; + if (c2Mp3TimestampTracker != null) { + c2Mp3TimestampTracker.reset(); + } codecDrainState = DRAIN_STATE_NONE; codecDrainAction = DRAIN_ACTION_NONE; // Reconfiguration data sent shortly before the flush may not have been processed by the @@ -797,23 +905,64 @@ protected boolean flushOrReleaseCodec() { // guarantee that it's processed. codecReconfigurationState = codecReconfigured ? RECONFIGURATION_STATE_WRITE_PENDING : RECONFIGURATION_STATE_NONE; - return false; } - protected DecoderException createDecoderException( + /** + * Resets the renderer internal state after a codec release. + * + *

      Note that this only needs to reset state variables that are changed in addition to those + * already changed in {@link #resetCodecStateForFlush()}. + */ + @CallSuper + protected void resetCodecStateForRelease() { + resetCodecStateForFlush(); + + pendingPlaybackException = null; + c2Mp3TimestampTracker = null; + availableCodecInfos = null; + codecInfo = null; + codecInputFormat = null; + codecOutputMediaFormat = null; + codecOutputMediaFormatChanged = false; + codecHasOutputMediaFormat = false; + codecOperatingRate = CODEC_OPERATING_RATE_UNSET; + codecAdaptationWorkaroundMode = ADAPTATION_WORKAROUND_MODE_NEVER; + codecNeedsDiscardToSpsWorkaround = false; + codecNeedsFlushWorkaround = false; + codecNeedsSosFlushWorkaround = false; + codecNeedsEosFlushWorkaround = false; + codecNeedsEosOutputExceptionWorkaround = false; + codecNeedsEosBufferTimestampWorkaround = false; + codecNeedsMonoChannelCountWorkaround = false; + codecNeedsEosPropagation = false; + codecReconfigured = false; + codecReconfigurationState = RECONFIGURATION_STATE_NONE; + mediaCryptoRequiresSecureDecoder = false; + } + + protected MediaCodecDecoderException createDecoderException( Throwable cause, @Nullable MediaCodecInfo codecInfo) { - return new DecoderException(cause, codecInfo); + return new MediaCodecDecoderException(cause, codecInfo); } - /** Reads into {@link #flagsOnlyBuffer} and returns whether a {@link Format} was read. */ - private boolean readToFlagsOnlyBuffer(boolean requireFormat) throws ExoPlaybackException { + /** + * Reads from the source when sample data is not required. If a format or an end of stream buffer + * is read, it will be handled before the call returns. + * + * @param readFlags Additional {@link ReadFlags}. {@link SampleStream#FLAG_OMIT_SAMPLE_DATA} is + * added internally, and so does not need to be passed. + * @return Whether a format was read and processed. + */ + private boolean readSourceOmittingSampleData(@SampleStream.ReadFlags int readFlags) + throws ExoPlaybackException { FormatHolder formatHolder = getFormatHolder(); - flagsOnlyBuffer.clear(); - int result = readSource(formatHolder, flagsOnlyBuffer, requireFormat); + noDataBuffer.clear(); + @ReadDataResult + int result = readSource(formatHolder, noDataBuffer, readFlags | FLAG_OMIT_SAMPLE_DATA); if (result == C.RESULT_FORMAT_READ) { onInputFormatChanged(formatHolder); return true; - } else if (result == C.RESULT_BUFFER_READ && flagsOnlyBuffer.isEndOfStream()) { + } else if (result == C.RESULT_BUFFER_READ && noDataBuffer.isEndOfStream()) { inputStreamEnded = true; processEndOfStream(); } @@ -851,13 +1000,27 @@ private void maybeInitCodecWithFallback( DecoderInitializationException.NO_SUITABLE_DECODER_ERROR); } + MediaCodecInfo preferredCodecInfo = availableCodecInfos.peekFirst(); while (codec == null) { MediaCodecInfo codecInfo = availableCodecInfos.peekFirst(); if (!shouldInitCodec(codecInfo)) { return; } try { - initCodec(codecInfo, crypto); + try { + initCodec(codecInfo, crypto); + } catch (Exception e) { + if (codecInfo == preferredCodecInfo) { + // If creating the preferred decoder failed then sleep briefly before retrying. + // Workaround for [internal b/191966399]. + // See also https://github.com/google/ExoPlayer/issues/8696. + Log.w(TAG, "Preferred decoder instantiation failed. Sleeping for 50ms then retrying."); + Thread.sleep(/* millis= */ 50); + initCodec(codecInfo, crypto); + } else { + throw e; + } + } } catch (Exception e) { Log.w(TAG, "Failed to initialize decoder: " + codecInfo, e); // This codec failed to initialize, so fall back to the next codec in the list (if any). We @@ -867,6 +1030,7 @@ private void maybeInitCodecWithFallback( DecoderInitializationException exception = new DecoderInitializationException( inputFormat, e, mediaCryptoRequiresSecureDecoder, codecInfo); + onCodecError(exception); if (preferredDecoderInitializationException == null) { preferredDecoderInitializationException = exception; } else { @@ -906,114 +1070,92 @@ private List getAvailableCodecInfos(boolean mediaCryptoRequiresS return codecInfos; } + /** Configures rendering where no codec is used. */ + private void initBypass(Format format) { + disableBypass(); // In case of transition between 2 bypass formats. + + String mimeType = format.sampleMimeType; + if (!MimeTypes.AUDIO_AAC.equals(mimeType) + && !MimeTypes.AUDIO_MPEG.equals(mimeType) + && !MimeTypes.AUDIO_OPUS.equals(mimeType)) { + // TODO(b/154746451): Batching provokes frame drops in non offload. + bypassBatchBuffer.setMaxSampleCount(1); + } else { + bypassBatchBuffer.setMaxSampleCount(BatchBuffer.DEFAULT_MAX_SAMPLE_COUNT); + } + bypassEnabled = true; + } + private void initCodec(MediaCodecInfo codecInfo, MediaCrypto crypto) throws Exception { long codecInitializingTimestamp; long codecInitializedTimestamp; - MediaCodec codec = null; String codecName = codecInfo.name; - float codecOperatingRate = Util.SDK_INT < 23 ? CODEC_OPERATING_RATE_UNSET - : getCodecOperatingRateV23(rendererOperatingRate, inputFormat, getStreamFormats()); + : getCodecOperatingRateV23(targetPlaybackSpeed, inputFormat, getStreamFormats()); if (codecOperatingRate <= assumedMinimumCodecOperatingRate) { codecOperatingRate = CODEC_OPERATING_RATE_UNSET; } + codecInitializingTimestamp = SystemClock.elapsedRealtime(); + MediaCodecAdapter.Configuration configuration = + getMediaCodecConfiguration(codecInfo, inputFormat, crypto, codecOperatingRate); + if (Util.SDK_INT >= 31) { + Api31.setLogSessionIdToMediaCodecFormat(configuration, getPlayerId()); + } try { - codecInitializingTimestamp = SystemClock.elapsedRealtime(); TraceUtil.beginSection("createCodec:" + codecName); - codec = MediaCodec.createByCodecName(codecName); - TraceUtil.endSection(); - TraceUtil.beginSection("configureCodec"); - configureCodec(codecInfo, codec, inputFormat, crypto, codecOperatingRate); - TraceUtil.endSection(); - TraceUtil.beginSection("startCodec"); - codec.start(); + codec = codecAdapterFactory.createAdapter(configuration); + } finally { TraceUtil.endSection(); - codecInitializedTimestamp = SystemClock.elapsedRealtime(); - getCodecBuffers(codec); - } catch (Exception e) { - if (codec != null) { - resetCodecBuffers(); - codec.release(); - } - throw e; + } + codecInitializedTimestamp = SystemClock.elapsedRealtime(); + + if (!codecInfo.isFormatSupported(inputFormat)) { + Log.w( + TAG, + Util.formatInvariant( + "Format exceeds selected codec's capabilities [%s, %s]", + Format.toLogString(inputFormat), codecName)); } - this.codec = codec; this.codecInfo = codecInfo; this.codecOperatingRate = codecOperatingRate; - codecFormat = inputFormat; + codecInputFormat = inputFormat; codecAdaptationWorkaroundMode = codecAdaptationWorkaroundMode(codecName); - codecNeedsReconfigureWorkaround = codecNeedsReconfigureWorkaround(codecName); - codecNeedsDiscardToSpsWorkaround = codecNeedsDiscardToSpsWorkaround(codecName, codecFormat); + codecNeedsDiscardToSpsWorkaround = + codecNeedsDiscardToSpsWorkaround(codecName, codecInputFormat); codecNeedsFlushWorkaround = codecNeedsFlushWorkaround(codecName); codecNeedsSosFlushWorkaround = codecNeedsSosFlushWorkaround(codecName); codecNeedsEosFlushWorkaround = codecNeedsEosFlushWorkaround(codecName); codecNeedsEosOutputExceptionWorkaround = codecNeedsEosOutputExceptionWorkaround(codecName); + codecNeedsEosBufferTimestampWorkaround = codecNeedsEosBufferTimestampWorkaround(codecName); codecNeedsMonoChannelCountWorkaround = - codecNeedsMonoChannelCountWorkaround(codecName, codecFormat); + codecNeedsMonoChannelCountWorkaround(codecName, codecInputFormat); codecNeedsEosPropagation = codecNeedsEosPropagationWorkaround(codecInfo) || getCodecNeedsEosPropagation(); + if (codec.needsReconfiguration()) { + this.codecReconfigured = true; + this.codecReconfigurationState = RECONFIGURATION_STATE_WRITE_PENDING; + this.codecNeedsAdaptationWorkaroundBuffer = + codecAdaptationWorkaroundMode != ADAPTATION_WORKAROUND_MODE_NEVER; + } + if ("c2.android.mp3.decoder".equals(codecInfo.name)) { + c2Mp3TimestampTracker = new C2Mp3TimestampTracker(); + } - resetInputBuffer(); - resetOutputBuffer(); - codecHotswapDeadlineMs = - getState() == STATE_STARTED - ? (SystemClock.elapsedRealtime() + MAX_CODEC_HOTSWAP_TIME_MS) - : C.TIME_UNSET; - codecReconfigured = false; - codecReconfigurationState = RECONFIGURATION_STATE_NONE; - codecReceivedEos = false; - codecReceivedBuffers = false; - largestQueuedPresentationTimeUs = C.TIME_UNSET; - lastBufferInStreamPresentationTimeUs = C.TIME_UNSET; - codecDrainState = DRAIN_STATE_NONE; - codecDrainAction = DRAIN_ACTION_NONE; - codecNeedsAdaptationWorkaroundBuffer = false; - shouldSkipAdaptationWorkaroundOutputBuffer = false; - isDecodeOnlyOutputBuffer = false; - isLastOutputBuffer = false; - waitingForFirstSyncSample = true; + if (getState() == STATE_STARTED) { + codecHotswapDeadlineMs = SystemClock.elapsedRealtime() + MAX_CODEC_HOTSWAP_TIME_MS; + } decoderCounters.decoderInitCount++; long elapsed = codecInitializedTimestamp - codecInitializingTimestamp; - onCodecInitialized(codecName, codecInitializedTimestamp, elapsed); + onCodecInitialized(codecName, configuration, codecInitializedTimestamp, elapsed); } - private boolean shouldContinueFeeding(long drainStartTimeMs) { + private boolean shouldContinueRendering(long renderStartTimeMs) { return renderTimeLimitMs == C.TIME_UNSET - || SystemClock.elapsedRealtime() - drainStartTimeMs < renderTimeLimitMs; - } - - private void getCodecBuffers(MediaCodec codec) { - if (Util.SDK_INT < 21) { - inputBuffers = codec.getInputBuffers(); - outputBuffers = codec.getOutputBuffers(); - } - } - - private void resetCodecBuffers() { - if (Util.SDK_INT < 21) { - inputBuffers = null; - outputBuffers = null; - } - } - - private ByteBuffer getInputBuffer(int inputIndex) { - if (Util.SDK_INT >= 21) { - return codec.getInputBuffer(inputIndex); - } else { - return inputBuffers[inputIndex]; - } - } - - private ByteBuffer getOutputBuffer(int outputIndex) { - if (Util.SDK_INT >= 21) { - return codec.getOutputBuffer(outputIndex); - } else { - return outputBuffers[outputIndex]; - } + || SystemClock.elapsedRealtime() - renderStartTimeMs < renderTimeLimitMs; } private boolean hasOutputBuffer() { @@ -1030,12 +1172,12 @@ private void resetOutputBuffer() { outputBuffer = null; } - private void setSourceDrmSession(@Nullable DrmSession session) { + private void setSourceDrmSession(@Nullable DrmSession session) { DrmSession.replaceSession(sourceDrmSession, session); sourceDrmSession = session; } - private void setCodecDrmSession(@Nullable DrmSession session) { + private void setCodecDrmSession(@Nullable DrmSession session) { DrmSession.replaceSession(codecDrmSession, session); codecDrmSession = session; } @@ -1048,13 +1190,16 @@ private boolean feedInputBuffer() throws ExoPlaybackException { if (codec == null || codecDrainState == DRAIN_STATE_WAIT_END_OF_STREAM || inputStreamEnded) { return false; } + if (codecDrainState == DRAIN_STATE_NONE && shouldReinitCodec()) { + drainAndReinitializeCodec(); + } if (inputIndex < 0) { - inputIndex = codec.dequeueInputBuffer(0); + inputIndex = codec.dequeueInputBufferIndex(); if (inputIndex < 0) { return false; } - buffer.data = getInputBuffer(inputIndex); + buffer.data = codec.getInputBuffer(inputIndex); buffer.clear(); } @@ -1081,24 +1226,29 @@ private boolean feedInputBuffer() throws ExoPlaybackException { return true; } - int result; - FormatHolder formatHolder = getFormatHolder(); - int adaptiveReconfigurationBytes = 0; - if (waitingForKeys) { - // We've already read an encrypted sample into buffer, and are waiting for keys. - result = C.RESULT_BUFFER_READ; - } else { - // For adaptive reconfiguration OMX decoders expect all reconfiguration data to be supplied - // at the start of the buffer that also contains the first frame in the new format. - if (codecReconfigurationState == RECONFIGURATION_STATE_WRITE_PENDING) { - for (int i = 0; i < codecFormat.initializationData.size(); i++) { - byte[] data = codecFormat.initializationData.get(i); - buffer.data.put(data); - } - codecReconfigurationState = RECONFIGURATION_STATE_QUEUE_PENDING; + // For adaptive reconfiguration, decoders expect all reconfiguration data to be supplied at + // the start of the buffer that also contains the first frame in the new format. + if (codecReconfigurationState == RECONFIGURATION_STATE_WRITE_PENDING) { + for (int i = 0; i < codecInputFormat.initializationData.size(); i++) { + byte[] data = codecInputFormat.initializationData.get(i); + buffer.data.put(data); } - adaptiveReconfigurationBytes = buffer.data.position(); - result = readSource(formatHolder, buffer, false); + codecReconfigurationState = RECONFIGURATION_STATE_QUEUE_PENDING; + } + int adaptiveReconfigurationBytes = buffer.data.position(); + + FormatHolder formatHolder = getFormatHolder(); + + @SampleStream.ReadDataResult int result; + try { + result = readSource(formatHolder, buffer, /* readFlags= */ 0); + } catch (InsufficientCapacityException e) { + onCodecError(e); + // Skip the sample that's too large by reading it without its data. Then flush the codec so + // that rendering will resume from the next key frame. + readSourceOmittingSampleData(/* readFlags= */ 0); + flushCodec(); + return true; } if (hasReadStreamToEnd()) { @@ -1125,7 +1275,7 @@ private boolean feedInputBuffer() throws ExoPlaybackException { if (codecReconfigurationState == RECONFIGURATION_STATE_QUEUE_PENDING) { // We received a new format immediately before the end of the stream. We need to clear // the corresponding reconfiguration data from the current buffer, but re-write it into - // a subsequent buffer if there are any (e.g. if the user seeks backwards). + // a subsequent buffer if there are any (for example, if the user seeks backwards). buffer.clear(); codecReconfigurationState = RECONFIGURATION_STATE_WRITE_PENDING; } @@ -1139,28 +1289,40 @@ private boolean feedInputBuffer() throws ExoPlaybackException { // Do nothing. } else { codecReceivedEos = true; - codec.queueInputBuffer(inputIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM); + codec.queueInputBuffer( + inputIndex, + /* offset= */ 0, + /* size= */ 0, + /* presentationTimeUs= */ 0, + MediaCodec.BUFFER_FLAG_END_OF_STREAM); resetInputBuffer(); } } catch (CryptoException e) { - throw createRendererException(e, inputFormat); + throw createRendererException( + e, inputFormat, Util.getErrorCodeForMediaDrmErrorCode(e.getErrorCode())); } return false; } - if (waitingForFirstSyncSample && !buffer.isKeyFrame()) { + + // This logic is required for cases where the decoder needs to be flushed or re-instantiated + // during normal consumption of samples from the source (i.e., without a corresponding + // Renderer.enable or Renderer.resetPosition call). This is necessary for certain legacy and + // workaround behaviors, for example when switching the output Surface on API levels prior to + // the introduction of MediaCodec.setOutputSurface, and when it's necessary to skip past a + // sample that's too large to be held in one of the decoder's input buffers. + if (!codecReceivedBuffers && !buffer.isKeyFrame()) { buffer.clear(); if (codecReconfigurationState == RECONFIGURATION_STATE_QUEUE_PENDING) { - // The buffer we just cleared contained reconfiguration data. We need to re-write this - // data into a subsequent buffer (if there is one). + // The buffer we just cleared contained reconfiguration data. We need to re-write this data + // into a subsequent buffer (if there is one). codecReconfigurationState = RECONFIGURATION_STATE_WRITE_PENDING; } return true; } - waitingForFirstSyncSample = false; + boolean bufferEncrypted = buffer.isEncrypted(); - waitingForKeys = shouldWaitForKeys(bufferEncrypted); - if (waitingForKeys) { - return false; + if (bufferEncrypted) { + buffer.cryptoInfo.increaseClearDataFirstSubSampleBy(adaptiveReconfigurationBytes); } if (codecNeedsDiscardToSpsWorkaround && !bufferEncrypted) { NalUnitUtil.discardToSps(buffer.data); @@ -1169,161 +1331,230 @@ private boolean feedInputBuffer() throws ExoPlaybackException { } codecNeedsDiscardToSpsWorkaround = false; } - try { - long presentationTimeUs = buffer.timeUs; - if (buffer.isDecodeOnly()) { - decodeOnlyPresentationTimestamps.add(presentationTimeUs); - } - if (waitingForFirstSampleInFormat) { - formatQueue.add(presentationTimeUs, inputFormat); - waitingForFirstSampleInFormat = false; - } + + long presentationTimeUs = buffer.timeUs; + + if (c2Mp3TimestampTracker != null) { + presentationTimeUs = + c2Mp3TimestampTracker.updateAndGetPresentationTimeUs(inputFormat, buffer); + // When draining the C2 MP3 decoder it produces an extra non-empty buffer with a timestamp + // after all queued input buffer timestamps (unlike other decoders, which generally propagate + // the input timestamps to output buffers 1:1). To detect the end of the stream when this + // buffer is dequeued we override the largest queued timestamp accordingly. largestQueuedPresentationTimeUs = - Math.max(largestQueuedPresentationTimeUs, presentationTimeUs); + max( + largestQueuedPresentationTimeUs, + c2Mp3TimestampTracker.getLastOutputBufferPresentationTimeUs(inputFormat)); + } - buffer.flip(); - if (buffer.hasSupplementalData()) { - handleInputBufferSupplementalData(buffer); - } - onQueueInputBuffer(buffer); + if (buffer.isDecodeOnly()) { + decodeOnlyPresentationTimestamps.add(presentationTimeUs); + } + if (waitingForFirstSampleInFormat) { + formatQueue.add(presentationTimeUs, inputFormat); + waitingForFirstSampleInFormat = false; + } + largestQueuedPresentationTimeUs = max(largestQueuedPresentationTimeUs, presentationTimeUs); + buffer.flip(); + if (buffer.hasSupplementalData()) { + handleInputBufferSupplementalData(buffer); + } + onQueueInputBuffer(buffer); + try { if (bufferEncrypted) { - MediaCodec.CryptoInfo cryptoInfo = getFrameworkCryptoInfo(buffer, - adaptiveReconfigurationBytes); - codec.queueSecureInputBuffer(inputIndex, 0, cryptoInfo, presentationTimeUs, 0); + codec.queueSecureInputBuffer( + inputIndex, /* offset= */ 0, buffer.cryptoInfo, presentationTimeUs, /* flags= */ 0); } else { - codec.queueInputBuffer(inputIndex, 0, buffer.data.limit(), presentationTimeUs, 0); + codec.queueInputBuffer( + inputIndex, /* offset= */ 0, buffer.data.limit(), presentationTimeUs, /* flags= */ 0); } - resetInputBuffer(); - codecReceivedBuffers = true; - codecReconfigurationState = RECONFIGURATION_STATE_NONE; - decoderCounters.inputBufferCount++; } catch (CryptoException e) { - throw createRendererException(e, inputFormat); + throw createRendererException( + e, inputFormat, Util.getErrorCodeForMediaDrmErrorCode(e.getErrorCode())); } - return true; - } - private boolean shouldWaitForKeys(boolean bufferEncrypted) throws ExoPlaybackException { - if (codecDrmSession == null - || (!bufferEncrypted - && (playClearSamplesWithoutKeys || codecDrmSession.playClearSamplesWithoutKeys()))) { - return false; - } - @DrmSession.State int drmSessionState = codecDrmSession.getState(); - if (drmSessionState == DrmSession.STATE_ERROR) { - throw createRendererException(codecDrmSession.getError(), inputFormat); - } - return drmSessionState != DrmSession.STATE_OPENED_WITH_KEYS; + resetInputBuffer(); + codecReceivedBuffers = true; + codecReconfigurationState = RECONFIGURATION_STATE_NONE; + decoderCounters.queuedInputBufferCount++; + return true; } /** * Called when a {@link MediaCodec} has been created and configured. - *

      - * The default implementation is a no-op. + * + *

      The default implementation is a no-op. * * @param name The name of the codec that was initialized. + * @param configuration The {@link MediaCodecAdapter.Configuration} used to configure the codec. * @param initializedTimestampMs {@link SystemClock#elapsedRealtime()} when initialization * finished. * @param initializationDurationMs The time taken to initialize the codec in milliseconds. */ - protected void onCodecInitialized(String name, long initializedTimestampMs, + protected void onCodecInitialized( + String name, + MediaCodecAdapter.Configuration configuration, + long initializedTimestampMs, long initializationDurationMs) { // Do nothing. } + /** + * Called when a {@link MediaCodec} has been released. + * + *

      The default implementation is a no-op. + * + * @param name The name of the codec that was released. + */ + protected void onCodecReleased(String name) { + // Do nothing. + } + + /** + * Called when a codec error has occurred. + * + *

      The default implementation is a no-op. + * + * @param codecError The error. + */ + protected void onCodecError(Exception codecError) { + // Do nothing. + } + /** * Called when a new {@link Format} is read from the upstream {@link MediaPeriod}. * * @param formatHolder A {@link FormatHolder} that holds the new {@link Format}. * @throws ExoPlaybackException If an error occurs re-initializing the {@link MediaCodec}. + * @return The result of the evaluation to determine whether the existing decoder instance can be + * reused for the new format, or {@code null} if the renderer did not have a decoder. */ - @SuppressWarnings("unchecked") - protected void onInputFormatChanged(FormatHolder formatHolder) throws ExoPlaybackException { + @CallSuper + @Nullable + protected DecoderReuseEvaluation onInputFormatChanged(FormatHolder formatHolder) + throws ExoPlaybackException { waitingForFirstSampleInFormat = true; - Format newFormat = Assertions.checkNotNull(formatHolder.format); - if (formatHolder.includesDrmSession) { - setSourceDrmSession((DrmSession) formatHolder.drmSession); - } else { - sourceDrmSession = - getUpdatedSourceDrmSession(inputFormat, newFormat, drmSessionManager, sourceDrmSession); + Format newFormat = checkNotNull(formatHolder.format); + if (newFormat.sampleMimeType == null) { + // If the new format is invalid, it is either a media bug or it is not intended to be played. + // See also https://github.com/google/ExoPlayer/issues/8283. + + throw createRendererException( + new IllegalArgumentException(), + newFormat, + PlaybackException.ERROR_CODE_DECODING_FORMAT_UNSUPPORTED); } + setSourceDrmSession(formatHolder.drmSession); inputFormat = newFormat; + if (bypassEnabled) { + bypassDrainAndReinitialize = true; + return null; // Need to drain batch buffer first. + } + if (codec == null) { - maybeInitCodec(); - return; + availableCodecInfos = null; + maybeInitCodecOrBypass(); + return null; } - // We have an existing codec that we may need to reconfigure or re-initialize. If the existing - // codec instance is being kept then its operating rate may need to be updated. + // We have an existing codec that we may need to reconfigure, re-initialize, or release to + // switch to bypass. If the existing codec instance is kept then its operating rate and DRM + // session may need to be updated. - if ((sourceDrmSession == null && codecDrmSession != null) - || (sourceDrmSession != null && codecDrmSession == null) - || (sourceDrmSession != codecDrmSession - && !codecInfo.secure - && maybeRequiresSecureDecoder(sourceDrmSession, newFormat)) - || (Util.SDK_INT < 23 && sourceDrmSession != codecDrmSession)) { - // We might need to switch between the clear and protected output paths, or we're using DRM - // prior to API level 23 where the codec needs to be re-initialized to switch to the new DRM - // session. - drainAndReinitializeCodec(); - return; - } + // Copy the current codec and codecInfo to local variables so they remain accessible if the + // member variables are updated during the logic below. + MediaCodecAdapter codec = this.codec; + MediaCodecInfo codecInfo = this.codecInfo; - switch (canKeepCodec(codec, codecInfo, codecFormat, newFormat)) { - case KEEP_CODEC_RESULT_NO: + Format oldFormat = codecInputFormat; + if (drmNeedsCodecReinitialization(codecInfo, newFormat, codecDrmSession, sourceDrmSession)) { + drainAndReinitializeCodec(); + return new DecoderReuseEvaluation( + codecInfo.name, + oldFormat, + newFormat, + REUSE_RESULT_NO, + DISCARD_REASON_DRM_SESSION_CHANGED); + } + boolean drainAndUpdateCodecDrmSession = sourceDrmSession != codecDrmSession; + Assertions.checkState(!drainAndUpdateCodecDrmSession || Util.SDK_INT >= 23); + + DecoderReuseEvaluation evaluation = canReuseCodec(codecInfo, oldFormat, newFormat); + @DecoderDiscardReasons int overridingDiscardReasons = 0; + switch (evaluation.result) { + case REUSE_RESULT_NO: drainAndReinitializeCodec(); break; - case KEEP_CODEC_RESULT_YES_WITH_FLUSH: - codecFormat = newFormat; - updateCodecOperatingRate(); - if (sourceDrmSession != codecDrmSession) { - drainAndUpdateCodecDrmSession(); + case REUSE_RESULT_YES_WITH_FLUSH: + if (!updateCodecOperatingRate(newFormat)) { + overridingDiscardReasons |= DISCARD_REASON_OPERATING_RATE_CHANGED; } else { - drainAndFlushCodec(); + codecInputFormat = newFormat; + if (drainAndUpdateCodecDrmSession) { + if (!drainAndUpdateCodecDrmSessionV23()) { + overridingDiscardReasons |= DISCARD_REASON_WORKAROUND; + } + } else if (!drainAndFlushCodec()) { + overridingDiscardReasons |= DISCARD_REASON_WORKAROUND; + } } break; - case KEEP_CODEC_RESULT_YES_WITH_RECONFIGURATION: - if (codecNeedsReconfigureWorkaround) { - drainAndReinitializeCodec(); + case REUSE_RESULT_YES_WITH_RECONFIGURATION: + if (!updateCodecOperatingRate(newFormat)) { + overridingDiscardReasons |= DISCARD_REASON_OPERATING_RATE_CHANGED; } else { codecReconfigured = true; codecReconfigurationState = RECONFIGURATION_STATE_WRITE_PENDING; codecNeedsAdaptationWorkaroundBuffer = codecAdaptationWorkaroundMode == ADAPTATION_WORKAROUND_MODE_ALWAYS || (codecAdaptationWorkaroundMode == ADAPTATION_WORKAROUND_MODE_SAME_RESOLUTION - && newFormat.width == codecFormat.width - && newFormat.height == codecFormat.height); - codecFormat = newFormat; - updateCodecOperatingRate(); - if (sourceDrmSession != codecDrmSession) { - drainAndUpdateCodecDrmSession(); + && newFormat.width == oldFormat.width + && newFormat.height == oldFormat.height); + codecInputFormat = newFormat; + if (drainAndUpdateCodecDrmSession && !drainAndUpdateCodecDrmSessionV23()) { + overridingDiscardReasons |= DISCARD_REASON_WORKAROUND; } } break; - case KEEP_CODEC_RESULT_YES_WITHOUT_RECONFIGURATION: - codecFormat = newFormat; - updateCodecOperatingRate(); - if (sourceDrmSession != codecDrmSession) { - drainAndUpdateCodecDrmSession(); + case REUSE_RESULT_YES_WITHOUT_RECONFIGURATION: + if (!updateCodecOperatingRate(newFormat)) { + overridingDiscardReasons |= DISCARD_REASON_OPERATING_RATE_CHANGED; + } else { + codecInputFormat = newFormat; + if (drainAndUpdateCodecDrmSession && !drainAndUpdateCodecDrmSessionV23()) { + overridingDiscardReasons |= DISCARD_REASON_WORKAROUND; + } } break; default: throw new IllegalStateException(); // Never happens. } + + if (evaluation.result != REUSE_RESULT_NO + && (this.codec != codec || codecDrainAction == DRAIN_ACTION_REINITIALIZE)) { + // Initial evaluation indicated reuse was possible, but codec re-initialization was triggered. + // The reasons are indicated by overridingDiscardReasons. + return new DecoderReuseEvaluation( + codecInfo.name, oldFormat, newFormat, REUSE_RESULT_NO, overridingDiscardReasons); + } + + return evaluation; } /** - * Called when the output {@link MediaFormat} of the {@link MediaCodec} changes. + * Called when one of the output formats changes. * *

      The default implementation is a no-op. * - * @param codec The {@link MediaCodec} instance. - * @param outputMediaFormat The new output {@link MediaFormat}. - * @throws ExoPlaybackException Thrown if an error occurs handling the new output media format. + * @param format The input {@link Format} to which future output now corresponds. If the renderer + * is in bypass mode, this is also the output format. + * @param mediaFormat The codec output {@link MediaFormat}, or {@code null} if the renderer is in + * bypass mode. + * @throws ExoPlaybackException Thrown if an error occurs configuring the output. */ - protected void onOutputFormatChanged(MediaCodec codec, MediaFormat outputMediaFormat) + protected void onOutputFormatChanged(Format format, @Nullable MediaFormat mediaFormat) throws ExoPlaybackException { // Do nothing. } @@ -1347,37 +1578,81 @@ protected void handleInputBufferSupplementalData(DecoderInputBuffer buffer) *

      The default implementation is a no-op. * * @param buffer The buffer to be queued. + * @throws ExoPlaybackException Thrown if an error occurs handling the input buffer. */ - protected void onQueueInputBuffer(DecoderInputBuffer buffer) { + protected void onQueueInputBuffer(DecoderInputBuffer buffer) throws ExoPlaybackException { // Do nothing. } /** * Called when an output buffer is successfully processed. - *

      - * The default implementation is a no-op. * * @param presentationTimeUs The timestamp associated with the output buffer. */ + @CallSuper protected void onProcessedOutputBuffer(long presentationTimeUs) { + while (pendingOutputStreamOffsetCount != 0 + && presentationTimeUs >= pendingOutputStreamSwitchTimesUs[0]) { + outputStreamStartPositionUs = pendingOutputStreamStartPositionsUs[0]; + setOutputStreamOffsetUs(pendingOutputStreamOffsetsUs[0]); + pendingOutputStreamOffsetCount--; + System.arraycopy( + pendingOutputStreamStartPositionsUs, + /* srcPos= */ 1, + pendingOutputStreamStartPositionsUs, + /* destPos= */ 0, + pendingOutputStreamOffsetCount); + System.arraycopy( + pendingOutputStreamOffsetsUs, + /* srcPos= */ 1, + pendingOutputStreamOffsetsUs, + /* destPos= */ 0, + pendingOutputStreamOffsetCount); + System.arraycopy( + pendingOutputStreamSwitchTimesUs, + /* srcPos= */ 1, + pendingOutputStreamSwitchTimesUs, + /* destPos= */ 0, + pendingOutputStreamOffsetCount); + onProcessedStreamChange(); + } + } + + /** Called after the last output buffer before a stream change has been processed. */ + protected void onProcessedStreamChange() { // Do nothing. } /** - * Determines whether the existing {@link MediaCodec} can be kept for a new {@link Format}, and if + * Evaluates whether the existing {@link MediaCodec} can be kept for a new {@link Format}, and if * it can whether it requires reconfiguration. * - *

      The default implementation returns {@link #KEEP_CODEC_RESULT_NO}. + *

      The default implementation does not allow decoder reuse. * - * @param codec The existing {@link MediaCodec} instance. * @param codecInfo A {@link MediaCodecInfo} describing the decoder. * @param oldFormat The {@link Format} for which the existing instance is configured. * @param newFormat The new {@link Format}. - * @return Whether the instance can be kept, and if it can whether it requires reconfiguration. + * @return The result of the evaluation. + */ + protected DecoderReuseEvaluation canReuseCodec( + MediaCodecInfo codecInfo, Format oldFormat, Format newFormat) { + return new DecoderReuseEvaluation( + codecInfo.name, + oldFormat, + newFormat, + REUSE_RESULT_NO, + DISCARD_REASON_REUSE_NOT_IMPLEMENTED); + } + + /** + * Called after the output stream offset changes. + * + *

      The default implementation is a no-op. + * + * @param outputStreamOffsetUs The output stream offset in microseconds. */ - protected @KeepCodecResult int canKeepCodec( - MediaCodec codec, MediaCodecInfo codecInfo, Format oldFormat, Format newFormat) { - return KEEP_CODEC_RESULT_NO; + protected void onOutputStreamOffsetUsChanged(long outputStreamOffsetUs) { + // Do nothing } @Override @@ -1388,57 +1663,82 @@ public boolean isEnded() { @Override public boolean isReady() { return inputFormat != null - && !waitingForKeys && (isSourceReady() || hasOutputBuffer() || (codecHotswapDeadlineMs != C.TIME_UNSET && SystemClock.elapsedRealtime() < codecHotswapDeadlineMs)); } - /** - * Returns the maximum time to block whilst waiting for a decoded output buffer. - * - * @return The maximum time to block, in microseconds. - */ - protected long getDequeueOutputBufferTimeoutUs() { - return 0; + /** Returns the current playback speed, as set by {@link #setPlaybackSpeed}. */ + protected float getPlaybackSpeed() { + return currentPlaybackSpeed; + } + + /** Returns the operating rate used by the current codec */ + protected float getCodecOperatingRate() { + return codecOperatingRate; } /** - * Returns the {@link MediaFormat#KEY_OPERATING_RATE} value for a given renderer operating rate, - * current {@link Format} and set of possible stream formats. + * Returns the {@link MediaFormat#KEY_OPERATING_RATE} value for a given playback speed, current + * {@link Format} and set of possible stream formats. * *

      The default implementation returns {@link #CODEC_OPERATING_RATE_UNSET}. * - * @param operatingRate The renderer operating rate. + * @param targetPlaybackSpeed The target factor by which playback should be sped up. This may be + * different from the current playback speed, for example, if the speed is temporarily + * adjusted for live playback. * @param format The {@link Format} for which the codec is being configured. * @param streamFormats The possible stream formats. * @return The codec operating rate, or {@link #CODEC_OPERATING_RATE_UNSET} if no codec operating * rate should be set. */ protected float getCodecOperatingRateV23( - float operatingRate, Format format, Format[] streamFormats) { + float targetPlaybackSpeed, Format format, Format[] streamFormats) { return CODEC_OPERATING_RATE_UNSET; } /** - * Updates the codec operating rate. + * Updates the codec operating rate, or triggers codec release and re-initialization if a + * previously set operating rate needs to be cleared. + * + * @throws ExoPlaybackException If an error occurs releasing or initializing a codec. + * @return False if codec release and re-initialization was triggered. True in all other cases. + */ + protected final boolean updateCodecOperatingRate() throws ExoPlaybackException { + return updateCodecOperatingRate(codecInputFormat); + } + + /** + * Updates the codec operating rate, or triggers codec release and re-initialization if a + * previously set operating rate needs to be cleared. * + * @param format The {@link Format} for which the operating rate should be configured. * @throws ExoPlaybackException If an error occurs releasing or initializing a codec. + * @return False if codec release and re-initialization was triggered. True in all other cases. */ - private void updateCodecOperatingRate() throws ExoPlaybackException { + private boolean updateCodecOperatingRate(Format format) throws ExoPlaybackException { if (Util.SDK_INT < 23) { - return; + return true; + } + + if (codec == null + || codecDrainAction == DRAIN_ACTION_REINITIALIZE + || getState() == STATE_DISABLED) { + // No need to update the operating rate. + return true; } float newCodecOperatingRate = - getCodecOperatingRateV23(rendererOperatingRate, codecFormat, getStreamFormats()); + getCodecOperatingRateV23(targetPlaybackSpeed, format, getStreamFormats()); if (codecOperatingRate == newCodecOperatingRate) { // No change. + return true; } else if (newCodecOperatingRate == CODEC_OPERATING_RATE_UNSET) { // The only way to clear the operating rate is to instantiate a new codec instance. See - // [Internal ref: b/71987865]. + // [Internal ref: b/111543954]. drainAndReinitializeCodec(); + return false; } else if (codecOperatingRate != CODEC_OPERATING_RATE_UNSET || newCodecOperatingRate > assumedMinimumCodecOperatingRate) { // We need to set the operating rate, either because we've set it previously or because it's @@ -1447,36 +1747,56 @@ private void updateCodecOperatingRate() throws ExoPlaybackException { codecParameters.putFloat(MediaFormat.KEY_OPERATING_RATE, newCodecOperatingRate); codec.setParameters(codecParameters); codecOperatingRate = newCodecOperatingRate; + return true; } + + return true; } - /** Starts draining the codec for flush. */ - private void drainAndFlushCodec() { + /** + * Starts draining the codec for a flush, or to release and re-initialize the codec if flushing + * will not be possible. If no buffers have been queued to the codec then this method is a no-op. + * + * @return False if codec release and re-initialization was triggered due to the need to apply a + * flush workaround. True in all other cases. + */ + private boolean drainAndFlushCodec() { if (codecReceivedBuffers) { codecDrainState = DRAIN_STATE_SIGNAL_END_OF_STREAM; - codecDrainAction = DRAIN_ACTION_FLUSH; + if (codecNeedsFlushWorkaround || codecNeedsEosFlushWorkaround) { + codecDrainAction = DRAIN_ACTION_REINITIALIZE; + return false; + } else { + codecDrainAction = DRAIN_ACTION_FLUSH; + } } + return true; } /** - * Starts draining the codec to update its DRM session. The update may occur immediately if no - * buffers have been queued to the codec. + * Starts draining the codec to flush it and update its DRM session, or to release and + * re-initialize the codec if flushing will not be possible. If no buffers have been queued to the + * codec then this method updates the DRM session immediately without flushing the codec. * * @throws ExoPlaybackException If an error occurs updating the codec's DRM session. + * @return False if codec release and re-initialization was triggered due to the need to apply a + * flush workaround. True in all other cases. */ - private void drainAndUpdateCodecDrmSession() throws ExoPlaybackException { - if (Util.SDK_INT < 23) { - // The codec needs to be re-initialized to switch to the source DRM session. - drainAndReinitializeCodec(); - return; - } + @TargetApi(23) // Only called when SDK_INT >= 23, but lint isn't clever enough to know. + private boolean drainAndUpdateCodecDrmSessionV23() throws ExoPlaybackException { if (codecReceivedBuffers) { codecDrainState = DRAIN_STATE_SIGNAL_END_OF_STREAM; - codecDrainAction = DRAIN_ACTION_UPDATE_DRM_SESSION; + if (codecNeedsFlushWorkaround || codecNeedsEosFlushWorkaround) { + codecDrainAction = DRAIN_ACTION_REINITIALIZE; + return false; + } else { + codecDrainAction = DRAIN_ACTION_FLUSH_AND_UPDATE_DRM_SESSION; + } } else { // Nothing has been queued to the decoder, so we can do the update immediately. - updateDrmSessionOrReinitializeCodecV23(); + updateDrmSessionV23(); } + return true; } /** @@ -1505,8 +1825,7 @@ private boolean drainOutputBuffer(long positionUs, long elapsedRealtimeUs) int outputIndex; if (codecNeedsEosOutputExceptionWorkaround && codecReceivedEos) { try { - outputIndex = - codec.dequeueOutputBuffer(outputBufferInfo, getDequeueOutputBufferTimeoutUs()); + outputIndex = codec.dequeueOutputBufferIndex(outputBufferInfo); } catch (IllegalStateException e) { processEndOfStream(); if (outputStreamEnded) { @@ -1516,19 +1835,15 @@ private boolean drainOutputBuffer(long positionUs, long elapsedRealtimeUs) return false; } } else { - outputIndex = - codec.dequeueOutputBuffer(outputBufferInfo, getDequeueOutputBufferTimeoutUs()); + outputIndex = codec.dequeueOutputBufferIndex(outputBufferInfo); } if (outputIndex < 0) { if (outputIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED /* (-2) */) { - processOutputFormat(); - return true; - } else if (outputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED /* (-3) */) { - processOutputBuffersChanged(); + processOutputMediaFormatChanged(); return true; } - /* MediaCodec.INFO_TRY_AGAIN_LATER (-1) or unknown negative return value */ + // MediaCodec.INFO_TRY_AGAIN_LATER (-1) or unknown negative return value. if (codecNeedsEosPropagation && (inputStreamEnded || codecDrainState == DRAIN_STATE_WAIT_END_OF_STREAM)) { processEndOfStream(); @@ -1549,13 +1864,20 @@ private boolean drainOutputBuffer(long positionUs, long elapsedRealtimeUs) } this.outputIndex = outputIndex; - outputBuffer = getOutputBuffer(outputIndex); + outputBuffer = codec.getOutputBuffer(outputIndex); + // The dequeued buffer is a media buffer. Do some initial setup. // It will be processed by calling processOutputBuffer (possibly multiple times). if (outputBuffer != null) { outputBuffer.position(outputBufferInfo.offset); outputBuffer.limit(outputBufferInfo.offset + outputBufferInfo.size); } + if (codecNeedsEosBufferTimestampWorkaround + && outputBufferInfo.presentationTimeUs == 0 + && (outputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0 + && largestQueuedPresentationTimeUs != C.TIME_UNSET) { + outputBufferInfo.presentationTimeUs = largestQueuedPresentationTimeUs; + } isDecodeOnlyOutputBuffer = isDecodeOnlyBuffer(outputBufferInfo.presentationTimeUs); isLastOutputBuffer = lastBufferInStreamPresentationTimeUs == outputBufferInfo.presentationTimeUs; @@ -1573,6 +1895,7 @@ private boolean drainOutputBuffer(long positionUs, long elapsedRealtimeUs) outputBuffer, outputIndex, outputBufferInfo.flags, + /* sampleCount= */ 1, outputBufferInfo.presentationTimeUs, isDecodeOnlyOutputBuffer, isLastOutputBuffer, @@ -1594,6 +1917,7 @@ private boolean drainOutputBuffer(long positionUs, long elapsedRealtimeUs) outputBuffer, outputIndex, outputBufferInfo.flags, + /* sampleCount= */ 1, outputBufferInfo.presentationTimeUs, isDecodeOnlyOutputBuffer, isLastOutputBuffer, @@ -1613,8 +1937,8 @@ private boolean drainOutputBuffer(long positionUs, long elapsedRealtimeUs) return false; } - /** Processes a new output {@link MediaFormat}. */ - private void processOutputFormat() throws ExoPlaybackException { + /** Processes a change in the decoder output {@link MediaFormat}. */ + private void processOutputMediaFormatChanged() { codecHasOutputMediaFormat = true; MediaFormat mediaFormat = codec.getOutputFormat(); if (codecAdaptationWorkaroundMode != ADAPTATION_WORKAROUND_MODE_NEVER @@ -1628,16 +1952,8 @@ private void processOutputFormat() throws ExoPlaybackException { if (codecNeedsMonoChannelCountWorkaround) { mediaFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1); } - onOutputFormatChanged(codec, mediaFormat); - } - - /** - * Processes a change in the output buffers. - */ - private void processOutputBuffersChanged() { - if (Util.SDK_INT < 21) { - outputBuffers = codec.getOutputBuffers(); - } + codecOutputMediaFormat = mediaFormat; + codecOutputMediaFormatChanged = true; } /** @@ -1658,25 +1974,34 @@ private void processOutputBuffersChanged() { * iteration of the rendering loop. * @param elapsedRealtimeUs {@link SystemClock#elapsedRealtime()} in microseconds, measured at the * start of the current iteration of the rendering loop. - * @param codec The {@link MediaCodec} instance. - * @param buffer The output buffer to process. + * @param codec The {@link MediaCodecAdapter} instance, or null in bypass mode were no codec is + * used. + * @param buffer The output buffer to process, or null if the buffer data is not made available to + * the application layer (see {@link MediaCodec#getOutputBuffer(int)}). This {@code buffer} + * can only be null for video data. Note that the buffer data can still be rendered in this + * case by using the {@code bufferIndex}. * @param bufferIndex The index of the output buffer. * @param bufferFlags The flags attached to the output buffer. + * @param sampleCount The number of samples extracted from the sample queue in the buffer. This + * allows handling multiple samples as a batch for efficiency. * @param bufferPresentationTimeUs The presentation time of the output buffer in microseconds. * @param isDecodeOnlyBuffer Whether the buffer was marked with {@link C#BUFFER_FLAG_DECODE_ONLY} * by the source. - * @param isLastBuffer Whether the buffer is the last sample of the current stream. + * @param isLastBuffer Whether the buffer is known to contain the last sample of the current + * stream. This flag is set on a best effort basis, and any logic relying on it should degrade + * gracefully to handle cases where it's not set. * @param format The {@link Format} associated with the buffer. - * @return Whether the output buffer was fully processed (e.g. rendered or skipped). + * @return Whether the output buffer was fully processed (for example, rendered or skipped). * @throws ExoPlaybackException If an error occurs processing the output buffer. */ protected abstract boolean processOutputBuffer( long positionUs, long elapsedRealtimeUs, - MediaCodec codec, - ByteBuffer buffer, + @Nullable MediaCodecAdapter codec, + @Nullable ByteBuffer buffer, int bufferIndex, int bufferFlags, + int sampleCount, long bufferPresentationTimeUs, boolean isDecodeOnlyBuffer, boolean isLastBuffer, @@ -1685,8 +2010,8 @@ protected abstract boolean processOutputBuffer( /** * Incrementally renders any remaining output. - *

      - * The default implementation is a no-op. + * + *

      The default implementation is a no-op. * * @throws ExoPlaybackException Thrown if an error occurs rendering remaining output. */ @@ -1699,16 +2024,19 @@ protected void renderToEndOfStream() throws ExoPlaybackException { * * @throws ExoPlaybackException If an error occurs processing the signal. */ + // codecDrainAction == DRAIN_ACTION_FLUSH_AND_UPDATE_DRM_SESSION implies SDK_INT >= 23. + @TargetApi(23) private void processEndOfStream() throws ExoPlaybackException { switch (codecDrainAction) { case DRAIN_ACTION_REINITIALIZE: reinitializeCodec(); break; - case DRAIN_ACTION_UPDATE_DRM_SESSION: - updateDrmSessionOrReinitializeCodecV23(); + case DRAIN_ACTION_FLUSH_AND_UPDATE_DRM_SESSION: + flushCodec(); + updateDrmSessionV23(); break; case DRAIN_ACTION_FLUSH: - flushOrReinitializeCodec(); + flushCodec(); break; case DRAIN_ACTION_NONE: default: @@ -1726,9 +2054,99 @@ protected final void setPendingOutputEndOfStream() { pendingOutputEndOfStream = true; } + /** + * Returns the offset that should be subtracted from {@code bufferPresentationTimeUs} in {@link + * #processOutputBuffer(long, long, MediaCodecAdapter, ByteBuffer, int, int, int, long, boolean, + * boolean, Format)} to get the playback position with respect to the media. + */ + protected final long getOutputStreamOffsetUs() { + return outputStreamOffsetUs; + } + + private void setOutputStreamOffsetUs(long outputStreamOffsetUs) { + this.outputStreamOffsetUs = outputStreamOffsetUs; + if (outputStreamOffsetUs != C.TIME_UNSET) { + onOutputStreamOffsetUsChanged(outputStreamOffsetUs); + } + } + + /** Returns whether this renderer supports the given {@link Format Format's} DRM scheme. */ + protected static boolean supportsFormatDrm(Format format) { + return format.cryptoType == C.CRYPTO_TYPE_NONE || format.cryptoType == C.CRYPTO_TYPE_FRAMEWORK; + } + + /** + * Returns whether it's necessary to re-initialize the codec to handle a DRM change. If {@code + * false} is returned then either {@code oldSession == newSession} (i.e., there was no change), or + * it's possible to update the existing codec using MediaCrypto.setMediaDrmSession. + */ + private boolean drmNeedsCodecReinitialization( + MediaCodecInfo codecInfo, + Format newFormat, + @Nullable DrmSession oldSession, + @Nullable DrmSession newSession) + throws ExoPlaybackException { + if (oldSession == newSession) { + // No need to re-initialize if the old and new sessions are the same. + return false; + } + + // Note: At least one of oldSession and newSession are non-null. + + if (newSession == null || oldSession == null) { + // Changing from DRM to no DRM and vice-versa always requires re-initialization. + return true; + } + + // Note: Both oldSession and newSession are non-null, and they are different sessions. + + if (!newSession.getSchemeUuid().equals(oldSession.getSchemeUuid())) { + // MediaCrypto.setMediaDrmSession is unable to switch between DRM schemes. + return true; + } + + if (Util.SDK_INT < 23) { + // MediaCrypto.setMediaDrmSession is only available from API level 23, so re-initialization is + // required to switch to newSession on older API levels. + return true; + } + if (C.PLAYREADY_UUID.equals(oldSession.getSchemeUuid()) + || C.PLAYREADY_UUID.equals(newSession.getSchemeUuid())) { + // The PlayReady CDM does not support MediaCrypto.setMediaDrmSession, either as the old or new + // session. + // TODO: Add an API check once [Internal ref: b/128835874] is fixed. + return true; + } + @Nullable FrameworkCryptoConfig newCryptoConfig = getFrameworkCryptoConfig(newSession); + if (newCryptoConfig == null) { + // We'd only expect this to happen if the CDM from which newSession is obtained needs + // provisioning. This is unlikely to happen (it probably requires a switch from one DRM scheme + // to another, where the new CDM hasn't been used before and needs provisioning). It would be + // possible to handle this case without codec re-initialization, but it would require the + // re-use code path to be able to wait for provisioning to finish before calling + // MediaCrypto.setMediaDrmSession. The extra complexity is not warranted given how unlikely + // the case is to occur, so we re-initialize in this case. + return true; + } + + boolean requiresSecureDecoder; + if (newCryptoConfig.forceAllowInsecureDecoderComponents) { + requiresSecureDecoder = false; + } else { + requiresSecureDecoder = newSession.requiresSecureDecoder(newFormat.sampleMimeType); + } + if (!codecInfo.secure && requiresSecureDecoder) { + // Re-initialization is required because newSession might require switching to the secure + // output path. + return true; + } + + return false; + } + private void reinitializeCodec() throws ExoPlaybackException { releaseCodec(); - maybeInitCodec(); + maybeInitCodecOrBypass(); } private boolean isDecodeOnlyBuffer(long presentationTimeUs) { @@ -1744,90 +2162,147 @@ private boolean isDecodeOnlyBuffer(long presentationTimeUs) { return false; } - @TargetApi(23) - private void updateDrmSessionOrReinitializeCodecV23() throws ExoPlaybackException { - @Nullable FrameworkMediaCrypto sessionMediaCrypto = sourceDrmSession.getMediaCrypto(); - if (sessionMediaCrypto == null) { - // We'd only expect this to happen if the CDM from which the pending session is obtained needs - // provisioning. This is unlikely to happen (it probably requires a switch from one DRM scheme - // to another, where the new CDM hasn't been used before and needs provisioning). It would be - // possible to handle this case more efficiently (i.e. with a new renderer state that waits - // for provisioning to finish and then calls mediaCrypto.setMediaDrmSession), but the extra - // complexity is not warranted given how unlikely the case is to occur. - reinitializeCodec(); - return; - } - if (C.PLAYREADY_UUID.equals(sessionMediaCrypto.uuid)) { - // The PlayReady CDM does not implement setMediaDrmSession. - // TODO: Add API check once [Internal ref: b/128835874] is fixed. - reinitializeCodec(); - return; - } - - if (flushOrReinitializeCodec()) { - // The codec was reinitialized. The new codec will be using the new DRM session, so there's - // nothing more to do. - return; - } - + @RequiresApi(23) + private void updateDrmSessionV23() throws ExoPlaybackException { try { - mediaCrypto.setMediaDrmSession(sessionMediaCrypto.sessionId); + mediaCrypto.setMediaDrmSession(getFrameworkCryptoConfig(sourceDrmSession).sessionId); } catch (MediaCryptoException e) { - throw createRendererException(e, inputFormat); + throw createRendererException(e, inputFormat, PlaybackException.ERROR_CODE_DRM_SYSTEM_ERROR); } setCodecDrmSession(sourceDrmSession); codecDrainState = DRAIN_STATE_NONE; codecDrainAction = DRAIN_ACTION_NONE; } + @Nullable + private FrameworkCryptoConfig getFrameworkCryptoConfig(DrmSession drmSession) + throws ExoPlaybackException { + @Nullable CryptoConfig cryptoConfig = drmSession.getCryptoConfig(); + if (cryptoConfig != null && !(cryptoConfig instanceof FrameworkCryptoConfig)) { + // This should not happen if the track went through a supportsFormatDrm() check, during track + // selection. + throw createRendererException( + new IllegalArgumentException( + "Expecting FrameworkCryptoConfig but found: " + cryptoConfig), + inputFormat, + PlaybackException.ERROR_CODE_DRM_SCHEME_UNSUPPORTED); + } + return (FrameworkCryptoConfig) cryptoConfig; + } + /** - * Returns whether a {@link DrmSession} may require a secure decoder for a given {@link Format}. + * Processes any pending batch of buffers without using a decoder, and drains a new batch of + * buffers from the source. * - * @param drmSession The {@link DrmSession}. - * @param format The {@link Format}. - * @return Whether a secure decoder may be required. + * @param positionUs The current media time in microseconds, measured at the start of the current + * iteration of the rendering loop. + * @param elapsedRealtimeUs {@link SystemClock#elapsedRealtime()} in microseconds, measured at the + * start of the current iteration of the rendering loop. + * @return Whether immediately calling this method again will make more progress. + * @throws ExoPlaybackException If an error occurred while processing a buffer or handling a + * format change. */ - private static boolean maybeRequiresSecureDecoder( - DrmSession drmSession, Format format) { - @Nullable FrameworkMediaCrypto sessionMediaCrypto = drmSession.getMediaCrypto(); - if (sessionMediaCrypto == null) { - // We'd only expect this to happen if the CDM from which the pending session is obtained needs - // provisioning. This is unlikely to happen (it probably requires a switch from one DRM scheme - // to another, where the new CDM hasn't been used before and needs provisioning). Assume that - // a secure decoder may be required. - return true; + private boolean bypassRender(long positionUs, long elapsedRealtimeUs) + throws ExoPlaybackException { + + // Process any batched data. + checkState(!outputStreamEnded); + if (bypassBatchBuffer.hasSamples()) { + if (processOutputBuffer( + positionUs, + elapsedRealtimeUs, + /* codec= */ null, + bypassBatchBuffer.data, + outputIndex, + /* bufferFlags= */ 0, + bypassBatchBuffer.getSampleCount(), + bypassBatchBuffer.getFirstSampleTimeUs(), + bypassBatchBuffer.isDecodeOnly(), + bypassBatchBuffer.isEndOfStream(), + outputFormat)) { + // The batch buffer has been fully processed. + onProcessedOutputBuffer(bypassBatchBuffer.getLastSampleTimeUs()); + bypassBatchBuffer.clear(); + } else { + // Could not process the whole batch buffer. Try again later. + return false; + } } - if (sessionMediaCrypto.forceAllowInsecureDecoderComponents) { + + // Process end of stream, if reached. + if (inputStreamEnded) { + outputStreamEnded = true; return false; } - MediaCrypto mediaCrypto; - try { - mediaCrypto = new MediaCrypto(sessionMediaCrypto.uuid, sessionMediaCrypto.sessionId); - } catch (MediaCryptoException e) { - // This shouldn't happen, but if it does then assume that a secure decoder may be required. - return true; + + if (bypassSampleBufferPending) { + Assertions.checkState(bypassBatchBuffer.append(bypassSampleBuffer)); + bypassSampleBufferPending = false; } - try { - return mediaCrypto.requiresSecureDecoderComponent(format.sampleMimeType); - } finally { - mediaCrypto.release(); + + if (bypassDrainAndReinitialize) { + if (bypassBatchBuffer.hasSamples()) { + // This can only happen if bypassSampleBufferPending was true above. Return true to try and + // immediately process the sample, which has now been appended to the batch buffer. + return true; + } + // The new format might require using a codec rather than bypass. + disableBypass(); + bypassDrainAndReinitialize = false; + maybeInitCodecOrBypass(); + if (!bypassEnabled) { + // We're no longer in bypass mode. + return false; + } } - } - private static MediaCodec.CryptoInfo getFrameworkCryptoInfo( - DecoderInputBuffer buffer, int adaptiveReconfigurationBytes) { - MediaCodec.CryptoInfo cryptoInfo = buffer.cryptoInfo.getFrameworkCryptoInfo(); - if (adaptiveReconfigurationBytes == 0) { - return cryptoInfo; + // Read from the input, appending any sample buffers to the batch buffer. + bypassRead(); + + if (bypassBatchBuffer.hasSamples()) { + bypassBatchBuffer.flip(); } - // There must be at least one sub-sample, although numBytesOfClearData is permitted to be - // null if it contains no clear data. Instantiate it if needed, and add the reconfiguration - // bytes to the clear byte count of the first sub-sample. - if (cryptoInfo.numBytesOfClearData == null) { - cryptoInfo.numBytesOfClearData = new int[1]; + + // We can make more progress if we have batched data, an EOS, or a re-initialization to process + // (note that one or more of the code blocks above will be executed during the next call). + return bypassBatchBuffer.hasSamples() || inputStreamEnded || bypassDrainAndReinitialize; + } + + private void bypassRead() throws ExoPlaybackException { + checkState(!inputStreamEnded); + FormatHolder formatHolder = getFormatHolder(); + bypassSampleBuffer.clear(); + while (true) { + bypassSampleBuffer.clear(); + @ReadDataResult int result = readSource(formatHolder, bypassSampleBuffer, /* readFlags= */ 0); + switch (result) { + case C.RESULT_FORMAT_READ: + onInputFormatChanged(formatHolder); + return; + case C.RESULT_NOTHING_READ: + return; + case C.RESULT_BUFFER_READ: + if (bypassSampleBuffer.isEndOfStream()) { + inputStreamEnded = true; + return; + } + if (waitingForFirstSampleInFormat) { + // This is the first buffer in a new format, the output format must be updated. + outputFormat = checkNotNull(inputFormat); + onOutputFormatChanged(outputFormat, /* mediaFormat= */ null); + waitingForFirstSampleInFormat = false; + } + // Try to append the buffer to the batch buffer. + bypassSampleBuffer.flip(); + if (!bypassBatchBuffer.append(bypassSampleBuffer)) { + bypassSampleBufferPending = true; + return; + } + break; + default: + throw new IllegalStateException(); + } } - cryptoInfo.numBytesOfClearData[0] += adaptiveReconfigurationBytes; - return cryptoInfo; } private static boolean isMediaCodecException(IllegalStateException error) { @@ -1838,18 +2313,26 @@ private static boolean isMediaCodecException(IllegalStateException error) { return stackTrace.length > 0 && stackTrace[0].getClassName().equals("android.media.MediaCodec"); } - @TargetApi(21) + @RequiresApi(21) private static boolean isMediaCodecExceptionV21(IllegalStateException error) { return error instanceof MediaCodec.CodecException; } + @RequiresApi(21) + private static boolean isRecoverableMediaCodecExceptionV21(IllegalStateException error) { + if (error instanceof MediaCodec.CodecException) { + return ((MediaCodec.CodecException) error).isRecoverable(); + } + return false; + } + /** * Returns whether the decoder is known to fail when flushed. - *

      - * If true is returned, the renderer will work around the issue by releasing the decoder and + * + *

      If true is returned, the renderer will work around the issue by releasing the decoder and * instantiating a new one rather than flushing the current instance. - *

      - * See [Internal: b/8347958, b/8543366]. + * + *

      See [Internal: b/8347958, b/8543366]. * * @param name The name of the decoder. * @return True if the decoder is known to fail when flushed. @@ -1857,9 +2340,10 @@ private static boolean isMediaCodecExceptionV21(IllegalStateException error) { private static boolean codecNeedsFlushWorkaround(String name) { return Util.SDK_INT < 18 || (Util.SDK_INT == 18 - && ("OMX.SEC.avc.dec".equals(name) || "OMX.SEC.avc.dec.secure".equals(name))) - || (Util.SDK_INT == 19 && Util.MODEL.startsWith("SM-G800") - && ("OMX.Exynos.avc.dec".equals(name) || "OMX.Exynos.avc.dec.secure".equals(name))); + && ("OMX.SEC.avc.dec".equals(name) || "OMX.SEC.avc.dec.secure".equals(name))) + || (Util.SDK_INT == 19 + && Util.MODEL.startsWith("SM-G800") + && ("OMX.Exynos.avc.dec".equals(name) || "OMX.Exynos.avc.dec.secure".equals(name))); } /** @@ -1876,35 +2360,25 @@ private static boolean codecNeedsFlushWorkaround(String name) { * @return The mode specifying when the adaptation workaround should be enabled. */ private @AdaptationWorkaroundMode int codecAdaptationWorkaroundMode(String name) { - if (Util.SDK_INT <= 25 && "OMX.Exynos.avc.dec.secure".equals(name) - && (Util.MODEL.startsWith("SM-T585") || Util.MODEL.startsWith("SM-A510") - || Util.MODEL.startsWith("SM-A520") || Util.MODEL.startsWith("SM-J700"))) { + if (Util.SDK_INT <= 25 + && "OMX.Exynos.avc.dec.secure".equals(name) + && (Util.MODEL.startsWith("SM-T585") + || Util.MODEL.startsWith("SM-A510") + || Util.MODEL.startsWith("SM-A520") + || Util.MODEL.startsWith("SM-J700"))) { return ADAPTATION_WORKAROUND_MODE_ALWAYS; } else if (Util.SDK_INT < 24 && ("OMX.Nvidia.h264.decode".equals(name) || "OMX.Nvidia.h264.decode.secure".equals(name)) - && ("flounder".equals(Util.DEVICE) || "flounder_lte".equals(Util.DEVICE) - || "grouper".equals(Util.DEVICE) || "tilapia".equals(Util.DEVICE))) { + && ("flounder".equals(Util.DEVICE) + || "flounder_lte".equals(Util.DEVICE) + || "grouper".equals(Util.DEVICE) + || "tilapia".equals(Util.DEVICE))) { return ADAPTATION_WORKAROUND_MODE_SAME_RESOLUTION; } else { return ADAPTATION_WORKAROUND_MODE_NEVER; } } - /** - * Returns whether the decoder is known to fail when an attempt is made to reconfigure it with a - * new format's configuration data. - * - *

      When enabled, the workaround will always release and recreate the decoder, rather than - * attempting to reconfigure the existing instance. - * - * @param name The name of the decoder. - * @return True if the decoder is known to fail when an attempt is made to reconfigure it with a - * new format's configuration data. - */ - private static boolean codecNeedsReconfigureWorkaround(String name) { - return Util.MODEL.startsWith("SM-T230") && "OMX.MARVELL.VIDEO.HW.CODA7542DECODER".equals(name); - } - /** * Returns whether the decoder is an H.264/AVC decoder known to fail if NAL units are queued * before the codec specific data. @@ -1917,10 +2391,28 @@ private static boolean codecNeedsReconfigureWorkaround(String name) { * @return True if the decoder is known to fail if NAL units are queued before CSD. */ private static boolean codecNeedsDiscardToSpsWorkaround(String name, Format format) { - return Util.SDK_INT < 21 && format.initializationData.isEmpty() + return Util.SDK_INT < 21 + && format.initializationData.isEmpty() && "OMX.MTK.VIDEO.DECODER.AVC".equals(name); } + /** + * Returns whether the decoder is known to behave incorrectly if flushed prior to having output a + * {@link MediaFormat}. + * + *

      If true is returned, the renderer will work around the issue by instantiating a new decoder + * when this case occurs. + * + *

      See [Internal: b/141097367]. + * + * @param name The name of the decoder. + * @return True if the decoder is known to behave incorrectly if flushed prior to having output a + * {@link MediaFormat}. False otherwise. + */ + private static boolean codecNeedsSosFlushWorkaround(String name) { + return Util.SDK_INT == 29 && "c2.android.aac.decoder".equals(name); + } + /** * Returns whether the decoder is known to handle the propagation of the {@link * MediaCodec#BUFFER_FLAG_END_OF_STREAM} flag incorrectly on the host device. @@ -1937,17 +2429,24 @@ private static boolean codecNeedsEosPropagationWorkaround(MediaCodecInfo codecIn String name = codecInfo.name; return (Util.SDK_INT <= 25 && "OMX.rk.video_decoder.avc".equals(name)) || (Util.SDK_INT <= 17 && "OMX.allwinner.video.decoder.avc".equals(name)) + || (Util.SDK_INT <= 29 + && ("OMX.broadcom.video_decoder.tunnel".equals(name) + || "OMX.broadcom.video_decoder.tunnel.secure".equals(name) + || "OMX.bcm.vdec.avc.tunnel".equals(name) + || "OMX.bcm.vdec.avc.tunnel.secure".equals(name) + || "OMX.bcm.vdec.hevc.tunnel".equals(name) + || "OMX.bcm.vdec.hevc.tunnel.secure".equals(name))) || ("Amazon".equals(Util.MANUFACTURER) && "AFTS".equals(Util.MODEL) && codecInfo.secure); } /** * Returns whether the decoder is known to behave incorrectly if flushed after receiving an input * buffer with {@link MediaCodec#BUFFER_FLAG_END_OF_STREAM} set. - *

      - * If true is returned, the renderer will work around the issue by instantiating a new decoder + * + *

      If true is returned, the renderer will work around the issue by instantiating a new decoder * when this case occurs. - *

      - * See [Internal: b/8578467, b/23361053]. + * + *

      See [Internal: b/8578467, b/23361053]. * * @param name The name of the decoder. * @return True if the decoder is known to behave incorrectly if flushed after receiving an input @@ -1962,12 +2461,30 @@ private static boolean codecNeedsEosFlushWorkaround(String name) { } /** - * Returns whether the decoder may throw an {@link IllegalStateException} from - * {@link MediaCodec#dequeueOutputBuffer(MediaCodec.BufferInfo, long)} or - * {@link MediaCodec#releaseOutputBuffer(int, boolean)} after receiving an input - * buffer with {@link MediaCodec#BUFFER_FLAG_END_OF_STREAM} set. - *

      - * See [Internal: b/17933838]. + * Returns whether the decoder may output a non-empty buffer with timestamp 0 as the end of stream + * buffer. + * + *

      See GitHub issue #5045. + */ + private static boolean codecNeedsEosBufferTimestampWorkaround(String codecName) { + return Util.SDK_INT < 21 + && "OMX.SEC.mp3.dec".equals(codecName) + && "samsung".equals(Util.MANUFACTURER) + && (Util.DEVICE.startsWith("baffin") + || Util.DEVICE.startsWith("grand") + || Util.DEVICE.startsWith("fortuna") + || Util.DEVICE.startsWith("gprimelte") + || Util.DEVICE.startsWith("j2y18lte") + || Util.DEVICE.startsWith("ms01")); + } + + /** + * Returns whether the decoder may throw an {@link IllegalStateException} from {@link + * MediaCodec#dequeueOutputBuffer(MediaCodec.BufferInfo, long)} or {@link + * MediaCodec#releaseOutputBuffer(int, boolean)} after receiving an input buffer with {@link + * MediaCodec#BUFFER_FLAG_END_OF_STREAM} set. + * + *

      See [Internal: b/17933838]. * * @param name The name of the decoder. * @return True if the decoder may throw an exception after receiving an end-of-stream buffer. @@ -1991,24 +2508,22 @@ private static boolean codecNeedsEosOutputExceptionWorkaround(String name) { * channel. False otherwise. */ private static boolean codecNeedsMonoChannelCountWorkaround(String name, Format format) { - return Util.SDK_INT <= 18 && format.channelCount == 1 + return Util.SDK_INT <= 18 + && format.channelCount == 1 && "OMX.MTK.AUDIO.DECODER.MP3".equals(name); } - /** - * Returns whether the decoder is known to behave incorrectly if flushed prior to having output a - * {@link MediaFormat}. - * - *

      If true is returned, the renderer will work around the issue by instantiating a new decoder - * when this case occurs. - * - *

      See [Internal: b/141097367]. - * - * @param name The name of the decoder. - * @return True if the decoder is known to behave incorrectly if flushed prior to having output a - * {@link MediaFormat}. False otherwise. - */ - private static boolean codecNeedsSosFlushWorkaround(String name) { - return Util.SDK_INT == 29 && "c2.android.aac.decoder".equals(name); + @RequiresApi(31) + private static final class Api31 { + private Api31() {} + + @DoNotInline + public static void setLogSessionIdToMediaCodecFormat( + MediaCodecAdapter.Configuration codecConfiguration, PlayerId playerId) { + LogSessionId logSessionId = playerId.getLogSessionId(); + if (!logSessionId.equals(LogSessionId.LOG_SESSION_ID_NONE)) { + codecConfiguration.mediaFormat.setString("log-session-id", logSessionId.getStringId()); + } + } } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/MediaCodecSelector.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/MediaCodecSelector.java index 10ff81147e..6e3893cb48 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/MediaCodecSelector.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/MediaCodecSelector.java @@ -16,35 +16,17 @@ package com.google.android.exoplayer2.mediacodec; import android.media.MediaCodec; -import androidx.annotation.Nullable; import com.google.android.exoplayer2.mediacodec.MediaCodecUtil.DecoderQueryException; import java.util.List; -/** - * Selector of {@link MediaCodec} instances. - */ +/** Selector of {@link MediaCodec} instances. */ public interface MediaCodecSelector { /** * Default implementation of {@link MediaCodecSelector}, which returns the preferred decoder for * the given format. */ - MediaCodecSelector DEFAULT = - new MediaCodecSelector() { - @Override - public List getDecoderInfos( - String mimeType, boolean requiresSecureDecoder, boolean requiresTunnelingDecoder) - throws DecoderQueryException { - return MediaCodecUtil.getDecoderInfos( - mimeType, requiresSecureDecoder, requiresTunnelingDecoder); - } - - @Override - @Nullable - public MediaCodecInfo getPassthroughDecoderInfo() throws DecoderQueryException { - return MediaCodecUtil.getPassthroughDecoderInfo(); - } - }; + MediaCodecSelector DEFAULT = MediaCodecUtil::getDecoderInfos; /** * Returns a list of decoders that can decode media in the specified MIME type, in priority order. @@ -59,13 +41,4 @@ public MediaCodecInfo getPassthroughDecoderInfo() throws DecoderQueryException { List getDecoderInfos( String mimeType, boolean requiresSecureDecoder, boolean requiresTunnelingDecoder) throws DecoderQueryException; - - /** - * Selects a decoder to instantiate for audio passthrough. - * - * @return A {@link MediaCodecInfo} describing the decoder, or null if no suitable decoder exists. - * @throws DecoderQueryException Thrown if there was an error querying decoders. - */ - @Nullable - MediaCodecInfo getPassthroughDecoderInfo() throws DecoderQueryException; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/MediaCodecUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/MediaCodecUtil.java index 2b8dcc0a55..16583004fc 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/MediaCodecUtil.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/MediaCodecUtil.java @@ -15,42 +15,43 @@ */ package com.google.android.exoplayer2.mediacodec; +import static java.lang.Math.max; + import android.annotation.SuppressLint; -import android.annotation.TargetApi; import android.media.MediaCodecInfo.CodecCapabilities; import android.media.MediaCodecInfo.CodecProfileLevel; import android.media.MediaCodecList; import android.text.TextUtils; import android.util.Pair; -import android.util.SparseIntArray; import androidx.annotation.CheckResult; +import androidx.annotation.GuardedBy; import androidx.annotation.Nullable; import androidx.annotation.RequiresApi; +import androidx.annotation.VisibleForTesting; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.video.ColorInfo; +import com.google.common.base.Ascii; +import com.google.common.collect.ImmutableList; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; -import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.checkerframework.checker.nullness.qual.EnsuresNonNull; -/** - * A utility class for querying the available codecs. - */ +/** A utility class for querying the available codecs. */ @SuppressLint("InlinedApi") public final class MediaCodecUtil { /** * Thrown when an error occurs querying the device for its underlying media capabilities. - *

      - * Such failures are not expected in normal operation and are normally temporary (e.g. if the + * + *

      Such failures are not expected in normal operation and are normally temporary (e.g. if the * mediaserver process has crashed and is yet to restart). */ public static class DecoderQueryException extends Exception { @@ -58,36 +59,26 @@ public static class DecoderQueryException extends Exception { private DecoderQueryException(Throwable cause) { super("Failed to query underlying media codecs", cause); } - } private static final String TAG = "MediaCodecUtil"; private static final Pattern PROFILE_PATTERN = Pattern.compile("^\\D?(\\d+)$"); + @GuardedBy("MediaCodecUtil.class") private static final HashMap> decoderInfosCache = new HashMap<>(); // Codecs to constant mappings. // AVC. - private static final SparseIntArray AVC_PROFILE_NUMBER_TO_CONST; - private static final SparseIntArray AVC_LEVEL_NUMBER_TO_CONST; private static final String CODEC_ID_AVC1 = "avc1"; private static final String CODEC_ID_AVC2 = "avc2"; // VP9 - private static final SparseIntArray VP9_PROFILE_NUMBER_TO_CONST; - private static final SparseIntArray VP9_LEVEL_NUMBER_TO_CONST; private static final String CODEC_ID_VP09 = "vp09"; // HEVC. - private static final Map HEVC_CODEC_STRING_TO_PROFILE_LEVEL; private static final String CODEC_ID_HEV1 = "hev1"; private static final String CODEC_ID_HVC1 = "hvc1"; - // Dolby Vision. - private static final Map DOLBY_VISION_STRING_TO_PROFILE; - private static final Map DOLBY_VISION_STRING_TO_LEVEL; // AV1. - private static final SparseIntArray AV1_LEVEL_NUMBER_TO_CONST; private static final String CODEC_ID_AV01 = "av01"; // MP4A AAC. - private static final SparseIntArray MP4A_AUDIO_OBJECT_TYPE_TO_PROFILE; private static final String CODEC_ID_MP4A = "mp4a"; // Lazily initialized. @@ -116,18 +107,21 @@ public static void warmDecoderInfoCache(String mimeType, boolean secure, boolean } } + /* Clears the codec cache.*/ + @VisibleForTesting + public static synchronized void clearDecoderInfoCache() { + decoderInfosCache.clear(); + } + /** - * Returns information about a decoder suitable for audio passthrough. + * Returns information about a decoder that will only decrypt data, without decoding it. * * @return A {@link MediaCodecInfo} describing the decoder, or null if no suitable decoder exists. * @throws DecoderQueryException If there was an error querying the available decoders. */ @Nullable - public static MediaCodecInfo getPassthroughDecoderInfo() throws DecoderQueryException { - @Nullable - MediaCodecInfo decoderInfo = - getDecoderInfo(MimeTypes.AUDIO_RAW, /* secure= */ false, /* tunneling= */ false); - return decoderInfo == null ? null : MediaCodecInfo.newPassthroughInstance(decoderInfo.name); + public static MediaCodecInfo getDecryptOnlyDecoderInfo() throws DecoderQueryException { + return getDecoderInfo(MimeTypes.AUDIO_RAW, /* secure= */ false, /* tunneling= */ false); } /** @@ -179,33 +173,30 @@ public static synchronized List getDecoderInfos( mediaCodecList = new MediaCodecListCompatV16(); decoderInfos = getDecoderInfosInternal(key, mediaCodecList); if (!decoderInfos.isEmpty()) { - Log.w(TAG, "MediaCodecList API didn't list secure decoder for: " + mimeType - + ". Assuming: " + decoderInfos.get(0).name); + Log.w( + TAG, + "MediaCodecList API didn't list secure decoder for: " + + mimeType + + ". Assuming: " + + decoderInfos.get(0).name); } } applyWorkarounds(mimeType, decoderInfos); - List unmodifiableDecoderInfos = Collections.unmodifiableList(decoderInfos); - decoderInfosCache.put(key, unmodifiableDecoderInfos); - return unmodifiableDecoderInfos; + ImmutableList immutableDecoderInfos = ImmutableList.copyOf(decoderInfos); + decoderInfosCache.put(key, immutableDecoderInfos); + return immutableDecoderInfos; } /** - * Returns a copy of the provided decoder list sorted such that decoders with format support are - * listed first. The returned list is modifiable for convenience. + * Returns a copy of the provided decoder list sorted such that decoders with functional format + * support are listed first. The returned list is modifiable for convenience. */ @CheckResult public static List getDecoderInfosSortedByFormatSupport( List decoderInfos, Format format) { decoderInfos = new ArrayList<>(decoderInfos); sortByScore( - decoderInfos, - decoderInfo -> { - try { - return decoderInfo.isFormatSupported(format) ? 1 : 0; - } catch (DecoderQueryException e) { - return -1; - } - }); + decoderInfos, decoderInfo -> decoderInfo.isFormatFunctionallySupported(format) ? 1 : 0); return decoderInfos; } @@ -222,11 +213,11 @@ public static int maxH264DecodableFrameSize() throws DecoderQueryException { getDecoderInfo(MimeTypes.VIDEO_H264, /* secure= */ false, /* tunneling= */ false); if (decoderInfo != null) { for (CodecProfileLevel profileLevel : decoderInfo.getProfileLevels()) { - result = Math.max(avcLevelToMaxFrameSize(profileLevel.level), result); + result = max(avcLevelToMaxFrameSize(profileLevel.level), result); } // We assume support for at least 480p (SDK_INT >= 21) or 360p (SDK_INT < 21), which are // the levels mandated by the Android CDD. - result = Math.max(result, Util.SDK_INT >= 21 ? (720 * 480) : (480 * 360)); + result = max(result, Util.SDK_INT >= 21 ? (720 * 480) : (480 * 360)); } maxH264DecodableFrameSize = result; } @@ -269,6 +260,41 @@ public static Pair getCodecProfileAndLevel(Format format) { } } + /** + * Returns an alternative codec MIME type (besides the default {@link Format#sampleMimeType}) that + * can be used to decode samples of the provided {@link Format}. + * + * @param format The media format. + * @return An alternative MIME type of a codec that be used decode samples of the provided {@code + * Format} (besides the default {@link Format#sampleMimeType}), or null if no such alternative + * exists. + */ + @Nullable + public static String getAlternativeCodecMimeType(Format format) { + if (MimeTypes.AUDIO_E_AC3_JOC.equals(format.sampleMimeType)) { + // E-AC3 decoders can decode JOC streams, but in 2-D rather than 3-D. + return MimeTypes.AUDIO_E_AC3; + } + if (MimeTypes.VIDEO_DOLBY_VISION.equals(format.sampleMimeType)) { + // H.264/AVC or H.265/HEVC decoders can decode the base layer of some DV profiles. This can't + // be done for profile CodecProfileLevel.DolbyVisionProfileDvheStn and profile + // CodecProfileLevel.DolbyVisionProfileDvheDtb because the first one is not backward + // compatible and the second one is deprecated and is not always backward compatible. + @Nullable + Pair codecProfileAndLevel = MediaCodecUtil.getCodecProfileAndLevel(format); + if (codecProfileAndLevel != null) { + int profile = codecProfileAndLevel.first; + if (profile == CodecProfileLevel.DolbyVisionProfileDvheDtr + || profile == CodecProfileLevel.DolbyVisionProfileDvheSt) { + return MimeTypes.VIDEO_H265; + } else if (profile == CodecProfileLevel.DolbyVisionProfileDvavSe) { + return MimeTypes.VIDEO_H264; + } + } + } + return null; + } + // Internal methods. /** @@ -323,10 +349,9 @@ private static ArrayList getDecoderInfosInternal( if ((!key.secure && secureRequired) || (key.secure && !secureSupported)) { continue; } - boolean hardwareAccelerated = isHardwareAccelerated(codecInfo); - boolean softwareOnly = isSoftwareOnly(codecInfo); + boolean hardwareAccelerated = isHardwareAccelerated(codecInfo, mimeType); + boolean softwareOnly = isSoftwareOnly(codecInfo, mimeType); boolean vendor = isVendor(codecInfo); - boolean forceDisableAdaptive = codecNeedsDisableAdaptationWorkaround(name); if ((secureDecodersExplicit && key.secure == secureSupported) || (!secureDecodersExplicit && !key.secure)) { decoderInfos.add( @@ -338,7 +363,7 @@ private static ArrayList getDecoderInfosInternal( hardwareAccelerated, softwareOnly, vendor, - forceDisableAdaptive, + /* forceDisableAdaptive= */ false, /* forceSecure= */ false)); } else if (!secureDecodersExplicit && secureSupported) { decoderInfos.add( @@ -350,7 +375,7 @@ private static ArrayList getDecoderInfosInternal( hardwareAccelerated, softwareOnly, vendor, - forceDisableAdaptive, + /* forceDisableAdaptive= */ false, /* forceSecure= */ true)); // It only makes sense to have one synthesized secure decoder, return immediately. return decoderInfos; @@ -388,9 +413,7 @@ private static ArrayList getDecoderInfosInternal( */ @Nullable private static String getCodecMimeType( - android.media.MediaCodecInfo info, - String name, - String mimeType) { + android.media.MediaCodecInfo info, String name, String mimeType) { String[] supportedTypes = info.getSupportedTypes(); for (String supportedType : supportedTypes) { if (supportedType.equalsIgnoreCase(mimeType)) { @@ -411,6 +434,8 @@ private static String getCodecMimeType( return "audio/x-lg-alac"; } else if (mimeType.equals(MimeTypes.AUDIO_FLAC) && "OMX.lge.flac.decoder".equals(name)) { return "audio/x-lg-flac"; + } else if (mimeType.equals(MimeTypes.AUDIO_AC3) && "OMX.lge.ac3.decoder".equals(name)) { + return "audio/lg-ac3"; } return null; @@ -512,13 +537,16 @@ private static boolean isCodecUsableDecoder( } // VP8 decoder on Samsung Galaxy S4 cannot be queried. - if (Util.SDK_INT <= 19 && Util.DEVICE.startsWith("jflte") + if (Util.SDK_INT <= 19 + && Util.DEVICE.startsWith("jflte") && "OMX.qcom.video.decoder.vp8".equals(name)) { return false; } - // MTK E-AC3 decoder doesn't support decoding JOC streams in 2-D. See [Internal: b/69400041]. - if (MimeTypes.AUDIO_E_AC3_JOC.equals(mimeType) && "OMX.MTK.AUDIO.DECODER.DSPAC3".equals(name)) { + // MTK AC3 decoder doesn't support decoding JOC streams in 2-D. See [Internal: b/69400041]. + if (Util.SDK_INT <= 23 + && MimeTypes.AUDIO_E_AC3_JOC.equals(mimeType) + && "OMX.MTK.AUDIO.DECODER.DSPAC3".equals(name)) { return false; } @@ -583,10 +611,10 @@ private static void applyWorkarounds(String mimeType, List decod } } - if (Util.SDK_INT < 30 && decoderInfos.size() > 1) { + if (Util.SDK_INT < 32 && decoderInfos.size() > 1) { String firstCodecName = decoderInfos.get(0).name; // Prefer anything other than OMX.qti.audio.decoder.flac on older devices. See [Internal - // ref: b/147278539] and [Internal ref: b/147354613]. + // ref: b/199124812]. if ("OMX.qti.audio.decoder.flac".equals(firstCodecName)) { decoderInfos.add(decoderInfos.remove(0)); } @@ -606,16 +634,17 @@ private static boolean isAliasV29(android.media.MediaCodecInfo info) { * The result of {@link android.media.MediaCodecInfo#isHardwareAccelerated()} for API levels 29+, * or a best-effort approximation for lower levels. */ - private static boolean isHardwareAccelerated(android.media.MediaCodecInfo codecInfo) { + private static boolean isHardwareAccelerated( + android.media.MediaCodecInfo codecInfo, String mimeType) { if (Util.SDK_INT >= 29) { return isHardwareAcceleratedV29(codecInfo); } // codecInfo.isHardwareAccelerated() != codecInfo.isSoftwareOnly() is not necessarily true. // However, we assume this to be true as an approximation. - return !isSoftwareOnly(codecInfo); + return !isSoftwareOnly(codecInfo, mimeType); } - @TargetApi(29) + @RequiresApi(29) private static boolean isHardwareAcceleratedV29(android.media.MediaCodecInfo codecInfo) { return codecInfo.isHardwareAccelerated(); } @@ -624,12 +653,17 @@ private static boolean isHardwareAcceleratedV29(android.media.MediaCodecInfo cod * The result of {@link android.media.MediaCodecInfo#isSoftwareOnly()} for API levels 29+, or a * best-effort approximation for lower levels. */ - private static boolean isSoftwareOnly(android.media.MediaCodecInfo codecInfo) { + private static boolean isSoftwareOnly(android.media.MediaCodecInfo codecInfo, String mimeType) { if (Util.SDK_INT >= 29) { return isSoftwareOnlyV29(codecInfo); } - String codecName = Util.toLowerInvariant(codecInfo.getName()); - if (codecName.startsWith("arc.")) { // App Runtime for Chrome (ARC) codecs + if (MimeTypes.isAudio(mimeType)) { + // Assume audio decoders are software only. + return true; + } + String codecName = Ascii.toLowerCase(codecInfo.getName()); + if (codecName.startsWith("arc.")) { + // App Runtime for Chrome (ARC) codecs return false; } return codecName.startsWith("omx.google.") @@ -641,7 +675,7 @@ private static boolean isSoftwareOnly(android.media.MediaCodecInfo codecInfo) { || (!codecName.startsWith("omx.") && !codecName.startsWith("c2.")); } - @TargetApi(29) + @RequiresApi(29) private static boolean isSoftwareOnlyV29(android.media.MediaCodecInfo codecInfo) { return codecInfo.isSoftwareOnly(); } @@ -654,30 +688,17 @@ private static boolean isVendor(android.media.MediaCodecInfo codecInfo) { if (Util.SDK_INT >= 29) { return isVendorV29(codecInfo); } - String codecName = Util.toLowerInvariant(codecInfo.getName()); + String codecName = Ascii.toLowerCase(codecInfo.getName()); return !codecName.startsWith("omx.google.") && !codecName.startsWith("c2.android.") && !codecName.startsWith("c2.google."); } - @TargetApi(29) + @RequiresApi(29) private static boolean isVendorV29(android.media.MediaCodecInfo codecInfo) { return codecInfo.isVendor(); } - /** - * Returns whether the decoder is known to fail when adapting, despite advertising itself as an - * adaptive decoder. - * - * @param name The decoder name. - * @return True if the decoder is known to fail when adapting. - */ - private static boolean codecNeedsDisableAdaptationWorkaround(String name) { - return Util.SDK_INT <= 22 - && ("ODROID-XU3".equals(Util.MODEL) || "Nexus 10".equals(Util.MODEL)) - && ("OMX.Exynos.AVC.Decoder".equals(name) || "OMX.Exynos.AVC.Decoder.secure".equals(name)); - } - @Nullable private static Pair getDolbyVisionProfileAndLevel( String codec, String[] parts) { @@ -693,13 +714,13 @@ private static Pair getDolbyVisionProfileAndLevel( return null; } @Nullable String profileString = matcher.group(1); - @Nullable Integer profile = DOLBY_VISION_STRING_TO_PROFILE.get(profileString); + @Nullable Integer profile = dolbyVisionStringToProfile(profileString); if (profile == null) { Log.w(TAG, "Unknown Dolby Vision profile string: " + profileString); return null; } String levelString = parts[2]; - @Nullable Integer level = DOLBY_VISION_STRING_TO_LEVEL.get(levelString); + @Nullable Integer level = dolbyVisionStringToLevel(levelString); if (level == null) { Log.w(TAG, "Unknown Dolby Vision level string: " + levelString); return null; @@ -731,7 +752,7 @@ private static Pair getHevcProfileAndLevel(String codec, Strin return null; } @Nullable String levelString = parts[3]; - @Nullable Integer level = HEVC_CODEC_STRING_TO_PROFILE_LEVEL.get(levelString); + @Nullable Integer level = hevcCodecStringToProfileLevel(levelString); if (level == null) { Log.w(TAG, "Unknown HEVC level string: " + levelString); return null; @@ -767,12 +788,12 @@ private static Pair getAvcProfileAndLevel(String codec, String return null; } - int profile = AVC_PROFILE_NUMBER_TO_CONST.get(profileInteger, -1); + int profile = avcProfileNumberToConst(profileInteger); if (profile == -1) { Log.w(TAG, "Unknown AVC profile: " + profileInteger); return null; } - int level = AVC_LEVEL_NUMBER_TO_CONST.get(levelInteger, -1); + int level = avcLevelNumberToConst(levelInteger); if (level == -1) { Log.w(TAG, "Unknown AVC level: " + levelInteger); return null; @@ -796,12 +817,12 @@ private static Pair getVp9ProfileAndLevel(String codec, String return null; } - int profile = VP9_PROFILE_NUMBER_TO_CONST.get(profileInteger, -1); + int profile = vp9ProfileNumberToConst(profileInteger); if (profile == -1) { Log.w(TAG, "Unknown VP9 profile: " + profileInteger); return null; } - int level = VP9_LEVEL_NUMBER_TO_CONST.get(levelInteger, -1); + int level = vp9LevelNumberToConst(levelInteger); if (level == -1) { Log.w(TAG, "Unknown VP9 level: " + levelInteger); return null; @@ -848,7 +869,7 @@ private static Pair getAv1ProfileAndLevel( profile = CodecProfileLevel.AV1ProfileMain10; } - int level = AV1_LEVEL_NUMBER_TO_CONST.get(levelInteger, -1); + int level = av1LevelNumberToConst(levelInteger); if (level == -1) { Log.w(TAG, "Unknown AV1 level: " + levelInteger); return null; @@ -859,9 +880,9 @@ private static Pair getAv1ProfileAndLevel( /** * Conversion values taken from ISO 14496-10 Table A-1. * - * @param avcLevel one of CodecProfileLevel.AVCLevel* constants. - * @return maximum frame size that can be decoded by a decoder with the specified avc level - * (or {@code -1} if the level is not recognized) + * @param avcLevel One of the {@link CodecProfileLevel} {@code AVCLevel*} constants. + * @return The maximum frame size that can be decoded by a decoder with the specified AVC level, + * or {@code -1} if the level is not recognized. */ private static int avcLevelToMaxFrameSize(int avcLevel) { switch (avcLevel) { @@ -891,6 +912,10 @@ private static int avcLevelToMaxFrameSize(int avcLevel) { case CodecProfileLevel.AVCLevel51: case CodecProfileLevel.AVCLevel52: return 36864 * 16 * 16; + case CodecProfileLevel.AVCLevel6: + case CodecProfileLevel.AVCLevel61: + case CodecProfileLevel.AVCLevel62: + return 139264 * 16 * 16; default: return -1; } @@ -909,7 +934,7 @@ private static Pair getAacCodecProfileAndLevel(String codec, S if (MimeTypes.AUDIO_AAC.equals(mimeType)) { // For MPEG-4 audio this is followed by an audio object type indication as a decimal number. int audioObjectTypeIndication = Integer.parseInt(parts[2]); - int profile = MP4A_AUDIO_OBJECT_TYPE_TO_PROFILE.get(audioObjectTypeIndication, -1); + int profile = mp4aAudioObjectTypeToProfile(audioObjectTypeIndication); if (profile != -1) { // Level is set to zero in AAC decoder CodecProfileLevels. return new Pair<>(profile, 0); @@ -934,9 +959,7 @@ private interface ScoreProvider { private interface MediaCodecListCompat { - /** - * The number of codecs in the list. - */ + /** The number of codecs in the list. */ int getCodecCount(); /** @@ -946,9 +969,7 @@ private interface MediaCodecListCompat { */ android.media.MediaCodecInfo getCodecInfoAt(int index); - /** - * Returns whether secure decoders are explicitly listed, if present. - */ + /** Returns whether secure decoders are explicitly listed, if present. */ boolean secureDecodersExplicit(); /** Whether the specified {@link CodecCapabilities} {@code feature} is supported. */ @@ -958,15 +979,13 @@ private interface MediaCodecListCompat { boolean isFeatureRequired(String feature, String mimeType, CodecCapabilities capabilities); } - @TargetApi(21) + @RequiresApi(21) private static final class MediaCodecListCompatV21 implements MediaCodecListCompat { private final int codecKind; @Nullable private android.media.MediaCodecInfo[] mediaCodecInfos; - // the constructor does not initialize fields: mediaCodecInfos - @SuppressWarnings("nullness:initialization.fields.uninitialized") public MediaCodecListCompatV21(boolean includeSecure, boolean includeTunneling) { codecKind = includeSecure || includeTunneling @@ -980,8 +999,6 @@ public int getCodecCount() { return mediaCodecInfos.length; } - // incompatible types in return. - @SuppressWarnings("nullness:return.type.incompatible") @Override public android.media.MediaCodecInfo getCodecInfoAt(int index) { ensureMediaCodecInfosInitialized(); @@ -1011,7 +1028,6 @@ private void ensureMediaCodecInfosInitialized() { mediaCodecInfos = new MediaCodecList(codecKind).getCodecInfos(); } } - } private static final class MediaCodecListCompatV16 implements MediaCodecListCompat { @@ -1045,7 +1061,6 @@ public boolean isFeatureRequired( String feature, String mimeType, CodecCapabilities capabilities) { return false; } - } private static final class CodecKey { @@ -1083,150 +1098,334 @@ public boolean equals(@Nullable Object obj) { && secure == other.secure && tunneling == other.tunneling; } + } + private static int avcProfileNumberToConst(int profileNumber) { + switch (profileNumber) { + case 66: + return CodecProfileLevel.AVCProfileBaseline; + case 77: + return CodecProfileLevel.AVCProfileMain; + case 88: + return CodecProfileLevel.AVCProfileExtended; + case 100: + return CodecProfileLevel.AVCProfileHigh; + case 110: + return CodecProfileLevel.AVCProfileHigh10; + case 122: + return CodecProfileLevel.AVCProfileHigh422; + case 244: + return CodecProfileLevel.AVCProfileHigh444; + default: + return -1; + } } - static { - AVC_PROFILE_NUMBER_TO_CONST = new SparseIntArray(); - AVC_PROFILE_NUMBER_TO_CONST.put(66, CodecProfileLevel.AVCProfileBaseline); - AVC_PROFILE_NUMBER_TO_CONST.put(77, CodecProfileLevel.AVCProfileMain); - AVC_PROFILE_NUMBER_TO_CONST.put(88, CodecProfileLevel.AVCProfileExtended); - AVC_PROFILE_NUMBER_TO_CONST.put(100, CodecProfileLevel.AVCProfileHigh); - AVC_PROFILE_NUMBER_TO_CONST.put(110, CodecProfileLevel.AVCProfileHigh10); - AVC_PROFILE_NUMBER_TO_CONST.put(122, CodecProfileLevel.AVCProfileHigh422); - AVC_PROFILE_NUMBER_TO_CONST.put(244, CodecProfileLevel.AVCProfileHigh444); - - AVC_LEVEL_NUMBER_TO_CONST = new SparseIntArray(); - AVC_LEVEL_NUMBER_TO_CONST.put(10, CodecProfileLevel.AVCLevel1); + private static int avcLevelNumberToConst(int levelNumber) { // TODO: Find int for CodecProfileLevel.AVCLevel1b. - AVC_LEVEL_NUMBER_TO_CONST.put(11, CodecProfileLevel.AVCLevel11); - AVC_LEVEL_NUMBER_TO_CONST.put(12, CodecProfileLevel.AVCLevel12); - AVC_LEVEL_NUMBER_TO_CONST.put(13, CodecProfileLevel.AVCLevel13); - AVC_LEVEL_NUMBER_TO_CONST.put(20, CodecProfileLevel.AVCLevel2); - AVC_LEVEL_NUMBER_TO_CONST.put(21, CodecProfileLevel.AVCLevel21); - AVC_LEVEL_NUMBER_TO_CONST.put(22, CodecProfileLevel.AVCLevel22); - AVC_LEVEL_NUMBER_TO_CONST.put(30, CodecProfileLevel.AVCLevel3); - AVC_LEVEL_NUMBER_TO_CONST.put(31, CodecProfileLevel.AVCLevel31); - AVC_LEVEL_NUMBER_TO_CONST.put(32, CodecProfileLevel.AVCLevel32); - AVC_LEVEL_NUMBER_TO_CONST.put(40, CodecProfileLevel.AVCLevel4); - AVC_LEVEL_NUMBER_TO_CONST.put(41, CodecProfileLevel.AVCLevel41); - AVC_LEVEL_NUMBER_TO_CONST.put(42, CodecProfileLevel.AVCLevel42); - AVC_LEVEL_NUMBER_TO_CONST.put(50, CodecProfileLevel.AVCLevel5); - AVC_LEVEL_NUMBER_TO_CONST.put(51, CodecProfileLevel.AVCLevel51); - AVC_LEVEL_NUMBER_TO_CONST.put(52, CodecProfileLevel.AVCLevel52); - - VP9_PROFILE_NUMBER_TO_CONST = new SparseIntArray(); - VP9_PROFILE_NUMBER_TO_CONST.put(0, CodecProfileLevel.VP9Profile0); - VP9_PROFILE_NUMBER_TO_CONST.put(1, CodecProfileLevel.VP9Profile1); - VP9_PROFILE_NUMBER_TO_CONST.put(2, CodecProfileLevel.VP9Profile2); - VP9_PROFILE_NUMBER_TO_CONST.put(3, CodecProfileLevel.VP9Profile3); - VP9_LEVEL_NUMBER_TO_CONST = new SparseIntArray(); - VP9_LEVEL_NUMBER_TO_CONST.put(10, CodecProfileLevel.VP9Level1); - VP9_LEVEL_NUMBER_TO_CONST.put(11, CodecProfileLevel.VP9Level11); - VP9_LEVEL_NUMBER_TO_CONST.put(20, CodecProfileLevel.VP9Level2); - VP9_LEVEL_NUMBER_TO_CONST.put(21, CodecProfileLevel.VP9Level21); - VP9_LEVEL_NUMBER_TO_CONST.put(30, CodecProfileLevel.VP9Level3); - VP9_LEVEL_NUMBER_TO_CONST.put(31, CodecProfileLevel.VP9Level31); - VP9_LEVEL_NUMBER_TO_CONST.put(40, CodecProfileLevel.VP9Level4); - VP9_LEVEL_NUMBER_TO_CONST.put(41, CodecProfileLevel.VP9Level41); - VP9_LEVEL_NUMBER_TO_CONST.put(50, CodecProfileLevel.VP9Level5); - VP9_LEVEL_NUMBER_TO_CONST.put(51, CodecProfileLevel.VP9Level51); - VP9_LEVEL_NUMBER_TO_CONST.put(60, CodecProfileLevel.VP9Level6); - VP9_LEVEL_NUMBER_TO_CONST.put(61, CodecProfileLevel.VP9Level61); - VP9_LEVEL_NUMBER_TO_CONST.put(62, CodecProfileLevel.VP9Level62); - - HEVC_CODEC_STRING_TO_PROFILE_LEVEL = new HashMap<>(); - HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("L30", CodecProfileLevel.HEVCMainTierLevel1); - HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("L60", CodecProfileLevel.HEVCMainTierLevel2); - HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("L63", CodecProfileLevel.HEVCMainTierLevel21); - HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("L90", CodecProfileLevel.HEVCMainTierLevel3); - HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("L93", CodecProfileLevel.HEVCMainTierLevel31); - HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("L120", CodecProfileLevel.HEVCMainTierLevel4); - HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("L123", CodecProfileLevel.HEVCMainTierLevel41); - HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("L150", CodecProfileLevel.HEVCMainTierLevel5); - HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("L153", CodecProfileLevel.HEVCMainTierLevel51); - HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("L156", CodecProfileLevel.HEVCMainTierLevel52); - HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("L180", CodecProfileLevel.HEVCMainTierLevel6); - HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("L183", CodecProfileLevel.HEVCMainTierLevel61); - HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("L186", CodecProfileLevel.HEVCMainTierLevel62); - - HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("H30", CodecProfileLevel.HEVCHighTierLevel1); - HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("H60", CodecProfileLevel.HEVCHighTierLevel2); - HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("H63", CodecProfileLevel.HEVCHighTierLevel21); - HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("H90", CodecProfileLevel.HEVCHighTierLevel3); - HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("H93", CodecProfileLevel.HEVCHighTierLevel31); - HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("H120", CodecProfileLevel.HEVCHighTierLevel4); - HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("H123", CodecProfileLevel.HEVCHighTierLevel41); - HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("H150", CodecProfileLevel.HEVCHighTierLevel5); - HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("H153", CodecProfileLevel.HEVCHighTierLevel51); - HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("H156", CodecProfileLevel.HEVCHighTierLevel52); - HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("H180", CodecProfileLevel.HEVCHighTierLevel6); - HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("H183", CodecProfileLevel.HEVCHighTierLevel61); - HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("H186", CodecProfileLevel.HEVCHighTierLevel62); - - DOLBY_VISION_STRING_TO_PROFILE = new HashMap<>(); - DOLBY_VISION_STRING_TO_PROFILE.put("00", CodecProfileLevel.DolbyVisionProfileDvavPer); - DOLBY_VISION_STRING_TO_PROFILE.put("01", CodecProfileLevel.DolbyVisionProfileDvavPen); - DOLBY_VISION_STRING_TO_PROFILE.put("02", CodecProfileLevel.DolbyVisionProfileDvheDer); - DOLBY_VISION_STRING_TO_PROFILE.put("03", CodecProfileLevel.DolbyVisionProfileDvheDen); - DOLBY_VISION_STRING_TO_PROFILE.put("04", CodecProfileLevel.DolbyVisionProfileDvheDtr); - DOLBY_VISION_STRING_TO_PROFILE.put("05", CodecProfileLevel.DolbyVisionProfileDvheStn); - DOLBY_VISION_STRING_TO_PROFILE.put("06", CodecProfileLevel.DolbyVisionProfileDvheDth); - DOLBY_VISION_STRING_TO_PROFILE.put("07", CodecProfileLevel.DolbyVisionProfileDvheDtb); - DOLBY_VISION_STRING_TO_PROFILE.put("08", CodecProfileLevel.DolbyVisionProfileDvheSt); - DOLBY_VISION_STRING_TO_PROFILE.put("09", CodecProfileLevel.DolbyVisionProfileDvavSe); - - DOLBY_VISION_STRING_TO_LEVEL = new HashMap<>(); - DOLBY_VISION_STRING_TO_LEVEL.put("01", CodecProfileLevel.DolbyVisionLevelHd24); - DOLBY_VISION_STRING_TO_LEVEL.put("02", CodecProfileLevel.DolbyVisionLevelHd30); - DOLBY_VISION_STRING_TO_LEVEL.put("03", CodecProfileLevel.DolbyVisionLevelFhd24); - DOLBY_VISION_STRING_TO_LEVEL.put("04", CodecProfileLevel.DolbyVisionLevelFhd30); - DOLBY_VISION_STRING_TO_LEVEL.put("05", CodecProfileLevel.DolbyVisionLevelFhd60); - DOLBY_VISION_STRING_TO_LEVEL.put("06", CodecProfileLevel.DolbyVisionLevelUhd24); - DOLBY_VISION_STRING_TO_LEVEL.put("07", CodecProfileLevel.DolbyVisionLevelUhd30); - DOLBY_VISION_STRING_TO_LEVEL.put("08", CodecProfileLevel.DolbyVisionLevelUhd48); - DOLBY_VISION_STRING_TO_LEVEL.put("09", CodecProfileLevel.DolbyVisionLevelUhd60); + switch (levelNumber) { + case 10: + return CodecProfileLevel.AVCLevel1; + case 11: + return CodecProfileLevel.AVCLevel11; + case 12: + return CodecProfileLevel.AVCLevel12; + case 13: + return CodecProfileLevel.AVCLevel13; + case 20: + return CodecProfileLevel.AVCLevel2; + case 21: + return CodecProfileLevel.AVCLevel21; + case 22: + return CodecProfileLevel.AVCLevel22; + case 30: + return CodecProfileLevel.AVCLevel3; + case 31: + return CodecProfileLevel.AVCLevel31; + case 32: + return CodecProfileLevel.AVCLevel32; + case 40: + return CodecProfileLevel.AVCLevel4; + case 41: + return CodecProfileLevel.AVCLevel41; + case 42: + return CodecProfileLevel.AVCLevel42; + case 50: + return CodecProfileLevel.AVCLevel5; + case 51: + return CodecProfileLevel.AVCLevel51; + case 52: + return CodecProfileLevel.AVCLevel52; + default: + return -1; + } + } + + private static int vp9ProfileNumberToConst(int profileNumber) { + switch (profileNumber) { + case 0: + return CodecProfileLevel.VP9Profile0; + case 1: + return CodecProfileLevel.VP9Profile1; + case 2: + return CodecProfileLevel.VP9Profile2; + case 3: + return CodecProfileLevel.VP9Profile3; + default: + return -1; + } + } + private static int vp9LevelNumberToConst(int levelNumber) { + switch (levelNumber) { + case 10: + return CodecProfileLevel.VP9Level1; + case 11: + return CodecProfileLevel.VP9Level11; + case 20: + return CodecProfileLevel.VP9Level2; + case 21: + return CodecProfileLevel.VP9Level21; + case 30: + return CodecProfileLevel.VP9Level3; + case 31: + return CodecProfileLevel.VP9Level31; + case 40: + return CodecProfileLevel.VP9Level4; + case 41: + return CodecProfileLevel.VP9Level41; + case 50: + return CodecProfileLevel.VP9Level5; + case 51: + return CodecProfileLevel.VP9Level51; + case 60: + return CodecProfileLevel.VP9Level6; + case 61: + return CodecProfileLevel.VP9Level61; + case 62: + return CodecProfileLevel.VP9Level62; + default: + return -1; + } + } + + @Nullable + private static Integer hevcCodecStringToProfileLevel(@Nullable String codecString) { + if (codecString == null) { + return null; + } + switch (codecString) { + case "L30": + return CodecProfileLevel.HEVCMainTierLevel1; + case "L60": + return CodecProfileLevel.HEVCMainTierLevel2; + case "L63": + return CodecProfileLevel.HEVCMainTierLevel21; + case "L90": + return CodecProfileLevel.HEVCMainTierLevel3; + case "L93": + return CodecProfileLevel.HEVCMainTierLevel31; + case "L120": + return CodecProfileLevel.HEVCMainTierLevel4; + case "L123": + return CodecProfileLevel.HEVCMainTierLevel41; + case "L150": + return CodecProfileLevel.HEVCMainTierLevel5; + case "L153": + return CodecProfileLevel.HEVCMainTierLevel51; + case "L156": + return CodecProfileLevel.HEVCMainTierLevel52; + case "L180": + return CodecProfileLevel.HEVCMainTierLevel6; + case "L183": + return CodecProfileLevel.HEVCMainTierLevel61; + case "L186": + return CodecProfileLevel.HEVCMainTierLevel62; + case "H30": + return CodecProfileLevel.HEVCHighTierLevel1; + case "H60": + return CodecProfileLevel.HEVCHighTierLevel2; + case "H63": + return CodecProfileLevel.HEVCHighTierLevel21; + case "H90": + return CodecProfileLevel.HEVCHighTierLevel3; + case "H93": + return CodecProfileLevel.HEVCHighTierLevel31; + case "H120": + return CodecProfileLevel.HEVCHighTierLevel4; + case "H123": + return CodecProfileLevel.HEVCHighTierLevel41; + case "H150": + return CodecProfileLevel.HEVCHighTierLevel5; + case "H153": + return CodecProfileLevel.HEVCHighTierLevel51; + case "H156": + return CodecProfileLevel.HEVCHighTierLevel52; + case "H180": + return CodecProfileLevel.HEVCHighTierLevel6; + case "H183": + return CodecProfileLevel.HEVCHighTierLevel61; + case "H186": + return CodecProfileLevel.HEVCHighTierLevel62; + default: + return null; + } + } + + @Nullable + private static Integer dolbyVisionStringToProfile(@Nullable String profileString) { + if (profileString == null) { + return null; + } + switch (profileString) { + case "00": + return CodecProfileLevel.DolbyVisionProfileDvavPer; + case "01": + return CodecProfileLevel.DolbyVisionProfileDvavPen; + case "02": + return CodecProfileLevel.DolbyVisionProfileDvheDer; + case "03": + return CodecProfileLevel.DolbyVisionProfileDvheDen; + case "04": + return CodecProfileLevel.DolbyVisionProfileDvheDtr; + case "05": + return CodecProfileLevel.DolbyVisionProfileDvheStn; + case "06": + return CodecProfileLevel.DolbyVisionProfileDvheDth; + case "07": + return CodecProfileLevel.DolbyVisionProfileDvheDtb; + case "08": + return CodecProfileLevel.DolbyVisionProfileDvheSt; + case "09": + return CodecProfileLevel.DolbyVisionProfileDvavSe; + default: + return null; + } + } + + @Nullable + private static Integer dolbyVisionStringToLevel(@Nullable String levelString) { + if (levelString == null) { + return null; + } + // TODO (Internal: b/179261323): use framework constants for levels 10 to 13. + switch (levelString) { + case "01": + return CodecProfileLevel.DolbyVisionLevelHd24; + case "02": + return CodecProfileLevel.DolbyVisionLevelHd30; + case "03": + return CodecProfileLevel.DolbyVisionLevelFhd24; + case "04": + return CodecProfileLevel.DolbyVisionLevelFhd30; + case "05": + return CodecProfileLevel.DolbyVisionLevelFhd60; + case "06": + return CodecProfileLevel.DolbyVisionLevelUhd24; + case "07": + return CodecProfileLevel.DolbyVisionLevelUhd30; + case "08": + return CodecProfileLevel.DolbyVisionLevelUhd48; + case "09": + return CodecProfileLevel.DolbyVisionLevelUhd60; + case "10": + return 0x200; + case "11": + return 0x400; + case "12": + return 0x800; + case "13": + return 0x1000; + default: + return null; + } + } + + private static int av1LevelNumberToConst(int levelNumber) { // See https://aomediacodec.github.io/av1-spec/av1-spec.pdf Annex A: Profiles and levels for // more information on mapping AV1 codec strings to levels. - AV1_LEVEL_NUMBER_TO_CONST = new SparseIntArray(); - AV1_LEVEL_NUMBER_TO_CONST.put(0, CodecProfileLevel.AV1Level2); - AV1_LEVEL_NUMBER_TO_CONST.put(1, CodecProfileLevel.AV1Level21); - AV1_LEVEL_NUMBER_TO_CONST.put(2, CodecProfileLevel.AV1Level22); - AV1_LEVEL_NUMBER_TO_CONST.put(3, CodecProfileLevel.AV1Level23); - AV1_LEVEL_NUMBER_TO_CONST.put(4, CodecProfileLevel.AV1Level3); - AV1_LEVEL_NUMBER_TO_CONST.put(5, CodecProfileLevel.AV1Level31); - AV1_LEVEL_NUMBER_TO_CONST.put(6, CodecProfileLevel.AV1Level32); - AV1_LEVEL_NUMBER_TO_CONST.put(7, CodecProfileLevel.AV1Level33); - AV1_LEVEL_NUMBER_TO_CONST.put(8, CodecProfileLevel.AV1Level4); - AV1_LEVEL_NUMBER_TO_CONST.put(9, CodecProfileLevel.AV1Level41); - AV1_LEVEL_NUMBER_TO_CONST.put(10, CodecProfileLevel.AV1Level42); - AV1_LEVEL_NUMBER_TO_CONST.put(11, CodecProfileLevel.AV1Level43); - AV1_LEVEL_NUMBER_TO_CONST.put(12, CodecProfileLevel.AV1Level5); - AV1_LEVEL_NUMBER_TO_CONST.put(13, CodecProfileLevel.AV1Level51); - AV1_LEVEL_NUMBER_TO_CONST.put(14, CodecProfileLevel.AV1Level52); - AV1_LEVEL_NUMBER_TO_CONST.put(15, CodecProfileLevel.AV1Level53); - AV1_LEVEL_NUMBER_TO_CONST.put(16, CodecProfileLevel.AV1Level6); - AV1_LEVEL_NUMBER_TO_CONST.put(17, CodecProfileLevel.AV1Level61); - AV1_LEVEL_NUMBER_TO_CONST.put(18, CodecProfileLevel.AV1Level62); - AV1_LEVEL_NUMBER_TO_CONST.put(19, CodecProfileLevel.AV1Level63); - AV1_LEVEL_NUMBER_TO_CONST.put(20, CodecProfileLevel.AV1Level7); - AV1_LEVEL_NUMBER_TO_CONST.put(21, CodecProfileLevel.AV1Level71); - AV1_LEVEL_NUMBER_TO_CONST.put(22, CodecProfileLevel.AV1Level72); - AV1_LEVEL_NUMBER_TO_CONST.put(23, CodecProfileLevel.AV1Level73); - - MP4A_AUDIO_OBJECT_TYPE_TO_PROFILE = new SparseIntArray(); - MP4A_AUDIO_OBJECT_TYPE_TO_PROFILE.put(1, CodecProfileLevel.AACObjectMain); - MP4A_AUDIO_OBJECT_TYPE_TO_PROFILE.put(2, CodecProfileLevel.AACObjectLC); - MP4A_AUDIO_OBJECT_TYPE_TO_PROFILE.put(3, CodecProfileLevel.AACObjectSSR); - MP4A_AUDIO_OBJECT_TYPE_TO_PROFILE.put(4, CodecProfileLevel.AACObjectLTP); - MP4A_AUDIO_OBJECT_TYPE_TO_PROFILE.put(5, CodecProfileLevel.AACObjectHE); - MP4A_AUDIO_OBJECT_TYPE_TO_PROFILE.put(6, CodecProfileLevel.AACObjectScalable); - MP4A_AUDIO_OBJECT_TYPE_TO_PROFILE.put(17, CodecProfileLevel.AACObjectERLC); - MP4A_AUDIO_OBJECT_TYPE_TO_PROFILE.put(20, CodecProfileLevel.AACObjectERScalable); - MP4A_AUDIO_OBJECT_TYPE_TO_PROFILE.put(23, CodecProfileLevel.AACObjectLD); - MP4A_AUDIO_OBJECT_TYPE_TO_PROFILE.put(29, CodecProfileLevel.AACObjectHE_PS); - MP4A_AUDIO_OBJECT_TYPE_TO_PROFILE.put(39, CodecProfileLevel.AACObjectELD); - MP4A_AUDIO_OBJECT_TYPE_TO_PROFILE.put(42, CodecProfileLevel.AACObjectXHE); + switch (levelNumber) { + case 0: + return CodecProfileLevel.AV1Level2; + case 1: + return CodecProfileLevel.AV1Level21; + case 2: + return CodecProfileLevel.AV1Level22; + case 3: + return CodecProfileLevel.AV1Level23; + case 4: + return CodecProfileLevel.AV1Level3; + case 5: + return CodecProfileLevel.AV1Level31; + case 6: + return CodecProfileLevel.AV1Level32; + case 7: + return CodecProfileLevel.AV1Level33; + case 8: + return CodecProfileLevel.AV1Level4; + case 9: + return CodecProfileLevel.AV1Level41; + case 10: + return CodecProfileLevel.AV1Level42; + case 11: + return CodecProfileLevel.AV1Level43; + case 12: + return CodecProfileLevel.AV1Level5; + case 13: + return CodecProfileLevel.AV1Level51; + case 14: + return CodecProfileLevel.AV1Level52; + case 15: + return CodecProfileLevel.AV1Level53; + case 16: + return CodecProfileLevel.AV1Level6; + case 17: + return CodecProfileLevel.AV1Level61; + case 18: + return CodecProfileLevel.AV1Level62; + case 19: + return CodecProfileLevel.AV1Level63; + case 20: + return CodecProfileLevel.AV1Level7; + case 21: + return CodecProfileLevel.AV1Level71; + case 22: + return CodecProfileLevel.AV1Level72; + case 23: + return CodecProfileLevel.AV1Level73; + default: + return -1; + } + } + + private static int mp4aAudioObjectTypeToProfile(int profileNumber) { + switch (profileNumber) { + case 1: + return CodecProfileLevel.AACObjectMain; + case 2: + return CodecProfileLevel.AACObjectLC; + case 3: + return CodecProfileLevel.AACObjectSSR; + case 4: + return CodecProfileLevel.AACObjectLTP; + case 5: + return CodecProfileLevel.AACObjectHE; + case 6: + return CodecProfileLevel.AACObjectScalable; + case 17: + return CodecProfileLevel.AACObjectERLC; + case 20: + return CodecProfileLevel.AACObjectERScalable; + case 23: + return CodecProfileLevel.AACObjectLD; + case 29: + return CodecProfileLevel.AACObjectHE_PS; + case 39: + return CodecProfileLevel.AACObjectELD; + case 42: + return CodecProfileLevel.AACObjectXHE; + default: + return -1; + } } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/MediaFormatUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/MediaFormatUtil.java deleted file mode 100644 index 118445835b..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/MediaFormatUtil.java +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Copyright (C) 2018 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.mediacodec; - -import android.media.MediaFormat; -import androidx.annotation.Nullable; -import com.google.android.exoplayer2.Format; -import com.google.android.exoplayer2.video.ColorInfo; -import java.nio.ByteBuffer; -import java.util.List; - -/** Helper class for configuring {@link MediaFormat} instances. */ -public final class MediaFormatUtil { - - private MediaFormatUtil() {} - - /** - * Sets a {@link MediaFormat} {@link String} value. - * - * @param format The {@link MediaFormat} being configured. - * @param key The key to set. - * @param value The value to set. - */ - public static void setString(MediaFormat format, String key, String value) { - format.setString(key, value); - } - - /** - * Sets a {@link MediaFormat}'s codec specific data buffers. - * - * @param format The {@link MediaFormat} being configured. - * @param csdBuffers The csd buffers to set. - */ - public static void setCsdBuffers(MediaFormat format, List csdBuffers) { - for (int i = 0; i < csdBuffers.size(); i++) { - format.setByteBuffer("csd-" + i, ByteBuffer.wrap(csdBuffers.get(i))); - } - } - - /** - * Sets a {@link MediaFormat} integer value. Does nothing if {@code value} is {@link - * Format#NO_VALUE}. - * - * @param format The {@link MediaFormat} being configured. - * @param key The key to set. - * @param value The value to set. - */ - public static void maybeSetInteger(MediaFormat format, String key, int value) { - if (value != Format.NO_VALUE) { - format.setInteger(key, value); - } - } - - /** - * Sets a {@link MediaFormat} float value. Does nothing if {@code value} is {@link - * Format#NO_VALUE}. - * - * @param format The {@link MediaFormat} being configured. - * @param key The key to set. - * @param value The value to set. - */ - public static void maybeSetFloat(MediaFormat format, String key, float value) { - if (value != Format.NO_VALUE) { - format.setFloat(key, value); - } - } - - /** - * Sets a {@link MediaFormat} {@link ByteBuffer} value. Does nothing if {@code value} is null. - * - * @param format The {@link MediaFormat} being configured. - * @param key The key to set. - * @param value The {@link byte[]} that will be wrapped to obtain the value. - */ - public static void maybeSetByteBuffer(MediaFormat format, String key, @Nullable byte[] value) { - if (value != null) { - format.setByteBuffer(key, ByteBuffer.wrap(value)); - } - } - - /** - * Sets a {@link MediaFormat}'s color information. Does nothing if {@code colorInfo} is null. - * - * @param format The {@link MediaFormat} being configured. - * @param colorInfo The color info to set. - */ - @SuppressWarnings("InlinedApi") - public static void maybeSetColorInfo(MediaFormat format, @Nullable ColorInfo colorInfo) { - if (colorInfo != null) { - maybeSetInteger(format, MediaFormat.KEY_COLOR_TRANSFER, colorInfo.colorTransfer); - maybeSetInteger(format, MediaFormat.KEY_COLOR_STANDARD, colorInfo.colorSpace); - maybeSetInteger(format, MediaFormat.KEY_COLOR_RANGE, colorInfo.colorRange); - maybeSetByteBuffer(format, MediaFormat.KEY_HDR_STATIC_INFO, colorInfo.hdrStaticInfo); - } - } -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/SynchronousMediaCodecAdapter.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/SynchronousMediaCodecAdapter.java new file mode 100644 index 0000000000..ff6ce38bda --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/mediacodec/SynchronousMediaCodecAdapter.java @@ -0,0 +1,208 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.android.exoplayer2.mediacodec; + +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Util.castNonNull; + +import android.media.MediaCodec; +import android.media.MediaFormat; +import android.os.Bundle; +import android.os.Handler; +import android.os.PersistableBundle; +import android.view.Surface; +import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.decoder.CryptoInfo; +import com.google.android.exoplayer2.util.TraceUtil; +import com.google.android.exoplayer2.util.Util; +import java.io.IOException; +import java.nio.ByteBuffer; + +/** + * A {@link MediaCodecAdapter} that operates the underlying {@link MediaCodec} in synchronous mode. + */ +public final class SynchronousMediaCodecAdapter implements MediaCodecAdapter { + + /** A factory for {@link SynchronousMediaCodecAdapter} instances. */ + public static class Factory implements MediaCodecAdapter.Factory { + + @Override + public MediaCodecAdapter createAdapter(Configuration configuration) throws IOException { + @Nullable MediaCodec codec = null; + try { + codec = createCodec(configuration); + TraceUtil.beginSection("configureCodec"); + codec.configure( + configuration.mediaFormat, + configuration.surface, + configuration.crypto, + configuration.flags); + TraceUtil.endSection(); + TraceUtil.beginSection("startCodec"); + codec.start(); + TraceUtil.endSection(); + return new SynchronousMediaCodecAdapter(codec); + } catch (IOException | RuntimeException e) { + if (codec != null) { + codec.release(); + } + throw e; + } + } + + /** Creates a new {@link MediaCodec} instance. */ + protected MediaCodec createCodec(Configuration configuration) throws IOException { + checkNotNull(configuration.codecInfo); + String codecName = configuration.codecInfo.name; + TraceUtil.beginSection("createCodec:" + codecName); + MediaCodec mediaCodec = MediaCodec.createByCodecName(codecName); + TraceUtil.endSection(); + return mediaCodec; + } + } + + private final MediaCodec codec; + @Nullable private ByteBuffer[] inputByteBuffers; + @Nullable private ByteBuffer[] outputByteBuffers; + + private SynchronousMediaCodecAdapter(MediaCodec mediaCodec) { + this.codec = mediaCodec; + if (Util.SDK_INT < 21) { + inputByteBuffers = codec.getInputBuffers(); + outputByteBuffers = codec.getOutputBuffers(); + } + } + + @Override + public boolean needsReconfiguration() { + return false; + } + + @Override + public int dequeueInputBufferIndex() { + return codec.dequeueInputBuffer(0); + } + + @Override + public int dequeueOutputBufferIndex(MediaCodec.BufferInfo bufferInfo) { + int index; + do { + index = codec.dequeueOutputBuffer(bufferInfo, 0); + if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED && Util.SDK_INT < 21) { + outputByteBuffers = codec.getOutputBuffers(); + } + } while (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED); + + return index; + } + + @Override + public MediaFormat getOutputFormat() { + return codec.getOutputFormat(); + } + + @Override + @Nullable + public ByteBuffer getInputBuffer(int index) { + if (Util.SDK_INT >= 21) { + return codec.getInputBuffer(index); + } else { + return castNonNull(inputByteBuffers)[index]; + } + } + + @Override + @Nullable + public ByteBuffer getOutputBuffer(int index) { + if (Util.SDK_INT >= 21) { + return codec.getOutputBuffer(index); + } else { + return castNonNull(outputByteBuffers)[index]; + } + } + + @Override + public void queueInputBuffer( + int index, int offset, int size, long presentationTimeUs, int flags) { + codec.queueInputBuffer(index, offset, size, presentationTimeUs, flags); + } + + @Override + public void queueSecureInputBuffer( + int index, int offset, CryptoInfo info, long presentationTimeUs, int flags) { + codec.queueSecureInputBuffer( + index, offset, info.getFrameworkCryptoInfo(), presentationTimeUs, flags); + } + + @Override + public void releaseOutputBuffer(int index, boolean render) { + codec.releaseOutputBuffer(index, render); + } + + @Override + @RequiresApi(21) + public void releaseOutputBuffer(int index, long renderTimeStampNs) { + codec.releaseOutputBuffer(index, renderTimeStampNs); + } + + @Override + public void flush() { + codec.flush(); + } + + @Override + public void release() { + inputByteBuffers = null; + outputByteBuffers = null; + codec.release(); + } + + @Override + @RequiresApi(23) + public void setOnFrameRenderedListener(OnFrameRenderedListener listener, Handler handler) { + codec.setOnFrameRenderedListener( + (codec, presentationTimeUs, nanoTime) -> + listener.onFrameRendered( + SynchronousMediaCodecAdapter.this, presentationTimeUs, nanoTime), + handler); + } + + @Override + @RequiresApi(23) + public void setOutputSurface(Surface surface) { + codec.setOutputSurface(surface); + } + + @Override + @RequiresApi(19) + public void setParameters(Bundle params) { + codec.setParameters(params); + } + + @Override + public void setVideoScalingMode(@C.VideoScalingMode int scalingMode) { + codec.setVideoScalingMode(scalingMode); + } + + @Override + @RequiresApi(26) + public PersistableBundle getMetrics() { + return codec.getMetrics(); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/Metadata.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/Metadata.java index 046c1fef55..9bbe0e8a95 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/Metadata.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/Metadata.java @@ -18,14 +18,16 @@ import android.os.Parcel; import android.os.Parcelable; import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.MediaMetadata; +import com.google.android.exoplayer2.Timeline; import com.google.android.exoplayer2.util.Util; +import com.google.common.primitives.Longs; import java.util.Arrays; import java.util.List; -/** - * A collection of metadata entries. - */ +/** A collection of metadata entries. */ public final class Metadata implements Parcelable { /** A metadata entry. */ @@ -48,14 +50,39 @@ default Format getWrappedMetadataFormat() { default byte[] getWrappedMetadataBytes() { return null; } + + /** + * Updates the {@link MediaMetadata.Builder} with the type-specific values stored in this {@code + * Entry}. + * + * @param builder The builder to be updated. + */ + default void populateMediaMetadata(MediaMetadata.Builder builder) {} } private final Entry[] entries; + /** + * The presentation time of the metadata, in microseconds. + * + *

      This time is an offset from the start of the current {@link Timeline.Period}. + * + *

      This time is {@link C#TIME_UNSET} when not known or undefined. + */ + public final long presentationTimeUs; /** * @param entries The metadata entries. */ public Metadata(Entry... entries) { + this(/* presentationTimeUs= */ C.TIME_UNSET, entries); + } + + /** + * @param presentationTimeUs The presentation time for the metadata entries. + * @param entries The metadata entries. + */ + public Metadata(long presentationTimeUs, Entry... entries) { + this.presentationTimeUs = presentationTimeUs; this.entries = entries; } @@ -63,8 +90,15 @@ public Metadata(Entry... entries) { * @param entries The metadata entries. */ public Metadata(List entries) { - this.entries = new Entry[entries.size()]; - entries.toArray(this.entries); + this(entries.toArray(new Entry[0])); + } + + /** + * @param presentationTimeUs The presentation time for the metadata entries. + * @param entries The metadata entries. + */ + public Metadata(long presentationTimeUs, List entries) { + this(presentationTimeUs, entries.toArray(new Entry[0])); } /* package */ Metadata(Parcel in) { @@ -72,11 +106,10 @@ public Metadata(List entries) { for (int i = 0; i < entries.length; i++) { entries[i] = in.readParcelable(Entry.class.getClassLoader()); } + presentationTimeUs = in.readLong(); } - /** - * Returns the number of metadata entries. - */ + /** Returns the number of metadata entries. */ public int length() { return entries.length; } @@ -116,7 +149,21 @@ public Metadata copyWithAppendedEntries(Entry... entriesToAppend) { if (entriesToAppend.length == 0) { return this; } - return new Metadata(Util.nullSafeArrayConcatenation(entries, entriesToAppend)); + return new Metadata( + presentationTimeUs, Util.nullSafeArrayConcatenation(entries, entriesToAppend)); + } + + /** + * Returns a copy of this metadata with the specified presentation time. + * + * @param presentationTimeUs The new presentation time, in microseconds. + * @return The metadata instance with the new presentation time. + */ + public Metadata copyWithPresentationTimeUs(long presentationTimeUs) { + if (this.presentationTimeUs == presentationTimeUs) { + return this; + } + return new Metadata(presentationTimeUs, entries); } @Override @@ -128,17 +175,21 @@ public boolean equals(@Nullable Object obj) { return false; } Metadata other = (Metadata) obj; - return Arrays.equals(entries, other.entries); + return Arrays.equals(entries, other.entries) && presentationTimeUs == other.presentationTimeUs; } @Override public int hashCode() { - return Arrays.hashCode(entries); + int result = Arrays.hashCode(entries); + result = 31 * result + Longs.hashCode(presentationTimeUs); + return result; } @Override public String toString() { - return "entries=" + Arrays.toString(entries); + return "entries=" + + Arrays.toString(entries) + + (presentationTimeUs == C.TIME_UNSET ? "" : ", presentationTimeUs=" + presentationTimeUs); } // Parcelable implementation. @@ -154,6 +205,7 @@ public void writeToParcel(Parcel dest, int flags) { for (Entry entry : entries) { dest.writeParcelable(entry, 0); } + dest.writeLong(presentationTimeUs); } public static final Parcelable.Creator CREATOR = diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/MetadataDecoder.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/MetadataDecoder.java index 1d95d32290..825f690fe8 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/MetadataDecoder.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/MetadataDecoder.java @@ -16,17 +16,21 @@ package com.google.android.exoplayer2.metadata; import androidx.annotation.Nullable; +import java.nio.ByteBuffer; -/** - * Decodes metadata from binary data. - */ +/** Decodes metadata from binary data. */ public interface MetadataDecoder { /** * Decodes a {@link Metadata} element from the provided input buffer. * + *

      Respects {@link ByteBuffer#limit()} of {@code inputBuffer.data}, but assumes {@link + * ByteBuffer#position()} and {@link ByteBuffer#arrayOffset()} are both zero and {@link + * ByteBuffer#hasArray()} is true. + * * @param inputBuffer The input buffer to decode. - * @return The decoded metadata object, or null if the metadata could not be decoded. + * @return The decoded metadata object, or {@code null} if the metadata could not be decoded or if + * {@link MetadataInputBuffer#isDecodeOnly()} was set on the input buffer. */ @Nullable Metadata decode(MetadataInputBuffer inputBuffer); diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/MetadataDecoderFactory.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/MetadataDecoderFactory.java index 0b653830a3..94ab6c4232 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/MetadataDecoderFactory.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/MetadataDecoderFactory.java @@ -17,15 +17,14 @@ import androidx.annotation.Nullable; import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.metadata.dvbsi.AppInfoTableDecoder; import com.google.android.exoplayer2.metadata.emsg.EventMessageDecoder; import com.google.android.exoplayer2.metadata.icy.IcyDecoder; import com.google.android.exoplayer2.metadata.id3.Id3Decoder; import com.google.android.exoplayer2.metadata.scte35.SpliceInfoDecoder; import com.google.android.exoplayer2.util.MimeTypes; -/** - * A factory for {@link MetadataDecoder} instances. - */ +/** A factory for {@link MetadataDecoder} instances. */ public interface MetadataDecoderFactory { /** @@ -67,7 +66,8 @@ public boolean supportsFormat(Format format) { return MimeTypes.APPLICATION_ID3.equals(mimeType) || MimeTypes.APPLICATION_EMSG.equals(mimeType) || MimeTypes.APPLICATION_SCTE35.equals(mimeType) - || MimeTypes.APPLICATION_ICY.equals(mimeType); + || MimeTypes.APPLICATION_ICY.equals(mimeType) + || MimeTypes.APPLICATION_AIT.equals(mimeType); } @Override @@ -83,6 +83,8 @@ public MetadataDecoder createDecoder(Format format) { return new SpliceInfoDecoder(); case MimeTypes.APPLICATION_ICY: return new IcyDecoder(); + case MimeTypes.APPLICATION_AIT: + return new AppInfoTableDecoder(); default: break; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/MetadataInputBuffer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/MetadataInputBuffer.java index a09b565653..0ce9eb616b 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/MetadataInputBuffer.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/MetadataInputBuffer.java @@ -18,19 +18,16 @@ import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.decoder.DecoderInputBuffer; -/** - * A {@link DecoderInputBuffer} for a {@link MetadataDecoder}. - */ +/** A {@link DecoderInputBuffer} for a {@link MetadataDecoder}. */ public final class MetadataInputBuffer extends DecoderInputBuffer { /** - * An offset that must be added to the metadata's timestamps after it's been decoded, or - * {@link Format#OFFSET_SAMPLE_RELATIVE} if {@link #timeUs} should be added. + * An offset that must be added to the metadata's timestamps after it's been decoded, or {@link + * Format#OFFSET_SAMPLE_RELATIVE} if {@link #timeUs} should be added. */ public long subsampleOffsetUs; public MetadataInputBuffer() { super(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_NORMAL); } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/MetadataOutput.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/MetadataOutput.java index b635cbc4b2..0eb64d75c0 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/MetadataOutput.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/MetadataOutput.java @@ -15,9 +15,8 @@ */ package com.google.android.exoplayer2.metadata; -/** - * Receives metadata output. - */ + +/** Receives metadata output. */ public interface MetadataOutput { /** @@ -26,5 +25,4 @@ public interface MetadataOutput { * @param metadata The metadata. */ void onMetadata(Metadata metadata); - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/MetadataRenderer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/MetadataRenderer.java index 7a5235a466..8790d12819 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/MetadataRenderer.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/MetadataRenderer.java @@ -15,6 +15,7 @@ */ package com.google.android.exoplayer2.metadata; +import static com.google.android.exoplayer2.util.Assertions.checkState; import static com.google.android.exoplayer2.util.Util.castNonNull; import android.os.Handler; @@ -27,38 +28,41 @@ import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.FormatHolder; import com.google.android.exoplayer2.RendererCapabilities; +import com.google.android.exoplayer2.source.SampleStream.ReadDataResult; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Util; import java.util.ArrayList; -import java.util.Arrays; import java.util.List; -import org.checkerframework.checker.nullness.compatqual.NullableType; +import org.checkerframework.dataflow.qual.SideEffectFree; /** * A renderer for metadata. + * + *

      The renderer can be configured to render metadata as soon as they are available using {@link + * #MetadataRenderer(MetadataOutput, Looper, MetadataDecoderFactory, boolean)}. */ public final class MetadataRenderer extends BaseRenderer implements Callback { + private static final String TAG = "MetadataRenderer"; private static final int MSG_INVOKE_RENDERER = 0; - // TODO: Holding multiple pending metadata objects is temporary mitigation against - // https://github.com/google/ExoPlayer/issues/1874. It should be removed once this issue has been - // addressed. - private static final int MAX_PENDING_METADATA_COUNT = 5; private final MetadataDecoderFactory decoderFactory; private final MetadataOutput output; @Nullable private final Handler outputHandler; private final MetadataInputBuffer buffer; - private final @NullableType Metadata[] pendingMetadata; - private final long[] pendingMetadataTimestamps; + private final boolean outputMetadataEarly; - private int pendingMetadataIndex; - private int pendingMetadataCount; @Nullable private MetadataDecoder decoder; private boolean inputStreamEnded; + private boolean outputStreamEnded; private long subsampleOffsetUs; + @Nullable private Metadata pendingMetadata; + private long outputStreamOffsetUs; /** + * Creates an instance that uses {@link MetadataDecoderFactory#DEFAULT} to create {@link + * MetadataDecoder} instances. + * * @param output The output. * @param outputLooper The looper associated with the thread on which the output should be called. * If the output makes use of standard Android UI components, then this should normally be the @@ -71,6 +75,8 @@ public MetadataRenderer(MetadataOutput output, @Nullable Looper outputLooper) { } /** + * Creates an instance. + * * @param output The output. * @param outputLooper The looper associated with the thread on which the output should be called. * If the output makes use of standard Android UI components, then this should normally be the @@ -81,79 +87,77 @@ public MetadataRenderer(MetadataOutput output, @Nullable Looper outputLooper) { */ public MetadataRenderer( MetadataOutput output, @Nullable Looper outputLooper, MetadataDecoderFactory decoderFactory) { + this(output, outputLooper, decoderFactory, /* outputMetadataEarly= */ false); + } + + /** + * Creates an instance. + * + * @param output The output. + * @param outputLooper The looper associated with the thread on which the output should be called. + * If the output makes use of standard Android UI components, then this should normally be the + * looper associated with the application's main thread, which can be obtained using {@link + * android.app.Activity#getMainLooper()}. Null may be passed if the output should be called + * directly on the player's internal rendering thread. + * @param decoderFactory A factory from which to obtain {@link MetadataDecoder} instances. + * @param outputMetadataEarly Whether the renderer outputs metadata early. When {@code true}, + * {@link #render} will output metadata as soon as they are available to the renderer, + * otherwise {@link #render} will output metadata in sync with the rendering position. + */ + public MetadataRenderer( + MetadataOutput output, + @Nullable Looper outputLooper, + MetadataDecoderFactory decoderFactory, + boolean outputMetadataEarly) { super(C.TRACK_TYPE_METADATA); this.output = Assertions.checkNotNull(output); this.outputHandler = outputLooper == null ? null : Util.createHandler(outputLooper, /* callback= */ this); this.decoderFactory = Assertions.checkNotNull(decoderFactory); + this.outputMetadataEarly = outputMetadataEarly; buffer = new MetadataInputBuffer(); - pendingMetadata = new Metadata[MAX_PENDING_METADATA_COUNT]; - pendingMetadataTimestamps = new long[MAX_PENDING_METADATA_COUNT]; + outputStreamOffsetUs = C.TIME_UNSET; + } + + @Override + public String getName() { + return TAG; } @Override - @Capabilities - public int supportsFormat(Format format) { + public @Capabilities int supportsFormat(Format format) { if (decoderFactory.supportsFormat(format)) { return RendererCapabilities.create( - supportsFormatDrm(null, format.drmInitData) ? FORMAT_HANDLED : FORMAT_UNSUPPORTED_DRM); + format.cryptoType == C.CRYPTO_TYPE_NONE ? C.FORMAT_HANDLED : C.FORMAT_UNSUPPORTED_DRM); } else { - return RendererCapabilities.create(FORMAT_UNSUPPORTED_TYPE); + return RendererCapabilities.create(C.FORMAT_UNSUPPORTED_TYPE); } } @Override - protected void onStreamChanged(Format[] formats, long offsetUs) { + protected void onStreamChanged(Format[] formats, long startPositionUs, long offsetUs) { decoder = decoderFactory.createDecoder(formats[0]); + if (pendingMetadata != null) { + pendingMetadata = + pendingMetadata.copyWithPresentationTimeUs( + pendingMetadata.presentationTimeUs + outputStreamOffsetUs - offsetUs); + } + outputStreamOffsetUs = offsetUs; } @Override protected void onPositionReset(long positionUs, boolean joining) { - flushPendingMetadata(); + pendingMetadata = null; inputStreamEnded = false; + outputStreamEnded = false; } @Override public void render(long positionUs, long elapsedRealtimeUs) { - if (!inputStreamEnded && pendingMetadataCount < MAX_PENDING_METADATA_COUNT) { - buffer.clear(); - FormatHolder formatHolder = getFormatHolder(); - int result = readSource(formatHolder, buffer, false); - if (result == C.RESULT_BUFFER_READ) { - if (buffer.isEndOfStream()) { - inputStreamEnded = true; - } else if (buffer.isDecodeOnly()) { - // Do nothing. Note this assumes that all metadata buffers can be decoded independently. - // If we ever need to support a metadata format where this is not the case, we'll need to - // pass the buffer to the decoder and discard the output. - } else { - buffer.subsampleOffsetUs = subsampleOffsetUs; - buffer.flip(); - @Nullable Metadata metadata = castNonNull(decoder).decode(buffer); - if (metadata != null) { - List entries = new ArrayList<>(metadata.length()); - decodeWrappedMetadata(metadata, entries); - if (!entries.isEmpty()) { - Metadata expandedMetadata = new Metadata(entries); - int index = - (pendingMetadataIndex + pendingMetadataCount) % MAX_PENDING_METADATA_COUNT; - pendingMetadata[index] = expandedMetadata; - pendingMetadataTimestamps[index] = buffer.timeUs; - pendingMetadataCount++; - } - } - } - } else if (result == C.RESULT_FORMAT_READ) { - subsampleOffsetUs = Assertions.checkNotNull(formatHolder.format).subsampleOffsetUs; - } - } - - if (pendingMetadataCount > 0 && pendingMetadataTimestamps[pendingMetadataIndex] <= positionUs) { - Metadata metadata = castNonNull(pendingMetadata[pendingMetadataIndex]); - invokeRenderer(metadata); - pendingMetadata[pendingMetadataIndex] = null; - pendingMetadataIndex = (pendingMetadataIndex + 1) % MAX_PENDING_METADATA_COUNT; - pendingMetadataCount--; + boolean working = true; + while (working) { + readMetadata(); + working = outputMetadata(positionUs); } } @@ -189,13 +193,14 @@ private void decodeWrappedMetadata(Metadata metadata, List decod @Override protected void onDisabled() { - flushPendingMetadata(); + pendingMetadata = null; decoder = null; + outputStreamOffsetUs = C.TIME_UNSET; } @Override public boolean isEnded() { - return inputStreamEnded; + return outputStreamEnded; } @Override @@ -203,20 +208,6 @@ public boolean isReady() { return true; } - private void invokeRenderer(Metadata metadata) { - if (outputHandler != null) { - outputHandler.obtainMessage(MSG_INVOKE_RENDERER, metadata).sendToTarget(); - } else { - invokeRendererInternal(metadata); - } - } - - private void flushPendingMetadata() { - Arrays.fill(pendingMetadata, null); - pendingMetadataIndex = 0; - pendingMetadataCount = 0; - } - @Override public boolean handleMessage(Message msg) { switch (msg.what) { @@ -229,8 +220,66 @@ public boolean handleMessage(Message msg) { } } + private void readMetadata() { + if (!inputStreamEnded && pendingMetadata == null) { + buffer.clear(); + FormatHolder formatHolder = getFormatHolder(); + @ReadDataResult int result = readSource(formatHolder, buffer, /* readFlags= */ 0); + if (result == C.RESULT_BUFFER_READ) { + if (buffer.isEndOfStream()) { + inputStreamEnded = true; + } else { + buffer.subsampleOffsetUs = subsampleOffsetUs; + buffer.flip(); + @Nullable Metadata metadata = castNonNull(decoder).decode(buffer); + if (metadata != null) { + List entries = new ArrayList<>(metadata.length()); + decodeWrappedMetadata(metadata, entries); + if (!entries.isEmpty()) { + Metadata expandedMetadata = + new Metadata(getPresentationTimeUs(buffer.timeUs), entries); + pendingMetadata = expandedMetadata; + } + } + } + } else if (result == C.RESULT_FORMAT_READ) { + subsampleOffsetUs = Assertions.checkNotNull(formatHolder.format).subsampleOffsetUs; + } + } + } + + private boolean outputMetadata(long positionUs) { + boolean didOutput = false; + if (pendingMetadata != null + && (outputMetadataEarly + || pendingMetadata.presentationTimeUs <= getPresentationTimeUs(positionUs))) { + invokeRenderer(pendingMetadata); + pendingMetadata = null; + didOutput = true; + } + if (inputStreamEnded && pendingMetadata == null) { + outputStreamEnded = true; + } + return didOutput; + } + + private void invokeRenderer(Metadata metadata) { + if (outputHandler != null) { + outputHandler.obtainMessage(MSG_INVOKE_RENDERER, metadata).sendToTarget(); + } else { + invokeRendererInternal(metadata); + } + } + private void invokeRendererInternal(Metadata metadata) { output.onMetadata(metadata); } + @SideEffectFree + private long getPresentationTimeUs(long positionUs) { + checkState(positionUs != C.TIME_UNSET); + checkState(outputStreamOffsetUs != C.TIME_UNSET); + + return positionUs - outputStreamOffsetUs; + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/SimpleMetadataDecoder.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/SimpleMetadataDecoder.java new file mode 100644 index 0000000000..cf3954b7c5 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/SimpleMetadataDecoder.java @@ -0,0 +1,50 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.metadata; + +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.util.Assertions; +import java.nio.ByteBuffer; + +/** + * A {@link MetadataDecoder} base class that validates input buffers and discards any for which + * {@link MetadataInputBuffer#isDecodeOnly()} is {@code true}. + */ +public abstract class SimpleMetadataDecoder implements MetadataDecoder { + + @Override + @Nullable + public final Metadata decode(MetadataInputBuffer inputBuffer) { + ByteBuffer buffer = Assertions.checkNotNull(inputBuffer.data); + Assertions.checkArgument( + buffer.position() == 0 && buffer.hasArray() && buffer.arrayOffset() == 0); + return inputBuffer.isDecodeOnly() ? null : decode(inputBuffer, buffer); + } + + /** + * Called by {@link #decode(MetadataInputBuffer)} after input buffer validation has been + * performed, except in the case that {@link MetadataInputBuffer#isDecodeOnly()} is {@code true}. + * + * @param inputBuffer The input buffer to decode. + * @param buffer The input buffer's {@link MetadataInputBuffer#data data buffer}, for convenience. + * Validation by {@link #decode} guarantees that {@link ByteBuffer#hasArray()}, {@link + * ByteBuffer#position()} and {@link ByteBuffer#arrayOffset()} are {@code true}, {@code 0} and + * {@code 0} respectively. + * @return The decoded metadata object, or {@code null} if the metadata could not be decoded. + */ + @Nullable + protected abstract Metadata decode(MetadataInputBuffer inputBuffer, ByteBuffer buffer); +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/dvbsi/AppInfoTable.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/dvbsi/AppInfoTable.java new file mode 100644 index 0000000000..cdfb15f15b --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/dvbsi/AppInfoTable.java @@ -0,0 +1,80 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.metadata.dvbsi; + +import android.os.Parcel; +import android.os.Parcelable; +import com.google.android.exoplayer2.metadata.Metadata; +import com.google.android.exoplayer2.util.Assertions; + +/** + * A representation of a DVB Application Information Table (AIT). + * + *

      For more info on the AIT see section 5.3.4 of the + * DVB ETSI TS 102 809 v1.1.1 spec. + */ +public final class AppInfoTable implements Metadata.Entry { + /** + * The application shall be started when the service is selected, unless the application is + * already running. + */ + public static final int CONTROL_CODE_AUTOSTART = 0x01; + /** + * The application is allowed to run while the service is selected, however it shall not start + * automatically when the service becomes selected. + */ + public static final int CONTROL_CODE_PRESENT = 0x02; + + public final int controlCode; + public final String url; + + public AppInfoTable(int controlCode, String url) { + this.controlCode = controlCode; + this.url = url; + } + + @Override + public String toString() { + return "Ait(controlCode=" + controlCode + ",url=" + url + ")"; + } + + @Override + public int describeContents() { + return 0; + } + + @Override + public void writeToParcel(Parcel parcel, int i) { + parcel.writeString(url); + parcel.writeInt(controlCode); + } + + public static final Parcelable.Creator CREATOR = + new Parcelable.Creator() { + @Override + public AppInfoTable createFromParcel(Parcel in) { + String url = Assertions.checkNotNull(in.readString()); + int controlCode = in.readInt(); + return new AppInfoTable(controlCode, url); + } + + @Override + public AppInfoTable[] newArray(int size) { + return new AppInfoTable[size]; + } + }; +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/dvbsi/AppInfoTableDecoder.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/dvbsi/AppInfoTableDecoder.java new file mode 100644 index 0000000000..fb16945d82 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/dvbsi/AppInfoTableDecoder.java @@ -0,0 +1,136 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.metadata.dvbsi; + +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.metadata.Metadata; +import com.google.android.exoplayer2.metadata.MetadataInputBuffer; +import com.google.android.exoplayer2.metadata.SimpleMetadataDecoder; +import com.google.android.exoplayer2.util.ParsableBitArray; +import com.google.common.base.Charsets; +import java.nio.ByteBuffer; +import java.util.ArrayList; + +/** + * Decoder for the DVB Application Information Table (AIT). + * + *

      For more info on the AIT see section 5.3.4 of the + * DVB ETSI TS 102 809 v1.1.1 spec. + */ +public final class AppInfoTableDecoder extends SimpleMetadataDecoder { + + /** See section 5.3.6. */ + private static final int DESCRIPTOR_TRANSPORT_PROTOCOL = 0x02; + /** See section 5.3.7. */ + private static final int DESCRIPTOR_SIMPLE_APPLICATION_LOCATION = 0x15; + + /** See table 29 in section 5.3.6. */ + private static final int TRANSPORT_PROTOCOL_HTTP = 3; + + /** See table 16 in section 5.3.4.6. */ + public static final int APPLICATION_INFORMATION_TABLE_ID = 0x74; + + @Override + @Nullable + @SuppressWarnings("ByteBufferBackingArray") // Buffer validated by SimpleMetadataDecoder.decode + protected Metadata decode(MetadataInputBuffer inputBuffer, ByteBuffer buffer) { + int tableId = buffer.get(); + return tableId == APPLICATION_INFORMATION_TABLE_ID + ? parseAit(new ParsableBitArray(buffer.array(), buffer.limit())) + : null; + } + + @Nullable + private static Metadata parseAit(ParsableBitArray sectionData) { + // tableId, section_syntax_indication, reserved_future_use, reserved + sectionData.skipBits(12); + int sectionLength = sectionData.readBits(12); + int endOfSection = sectionData.getBytePosition() + sectionLength - 4 /* Ignore leading CRC */; + + // test_application_flag, application_type, reserved, version_number, current_next_indicator, + // section_number, last_section_number, reserved_future_use + sectionData.skipBits(44); + + int commonDescriptorsLength = sectionData.readBits(12); + + // Since we currently only keep URL and control code, which are unique per application, + // there is no useful information in common descriptor. + sectionData.skipBytes(commonDescriptorsLength); + + // reserved_future_use, application_loop_length + sectionData.skipBits(16); + + ArrayList appInfoTables = new ArrayList<>(); + while (sectionData.getBytePosition() < endOfSection) { + @Nullable String urlBase = null; + @Nullable String urlExtension = null; + + // application_identifier + sectionData.skipBits(48); + + int controlCode = sectionData.readBits(8); + + // reserved_future_use + sectionData.skipBits(4); + + int applicationDescriptorsLoopLength = sectionData.readBits(12); + int positionOfNextApplication = + sectionData.getBytePosition() + applicationDescriptorsLoopLength; + while (sectionData.getBytePosition() < positionOfNextApplication) { + int descriptorTag = sectionData.readBits(8); + int descriptorLength = sectionData.readBits(8); + int positionOfNextDescriptor = sectionData.getBytePosition() + descriptorLength; + + if (descriptorTag == DESCRIPTOR_TRANSPORT_PROTOCOL) { + // See section 5.3.6. + int protocolId = sectionData.readBits(16); + // label + sectionData.skipBits(8); + + if (protocolId == TRANSPORT_PROTOCOL_HTTP) { + // See section 5.3.6.2. + while (sectionData.getBytePosition() < positionOfNextDescriptor) { + int urlBaseLength = sectionData.readBits(8); + urlBase = sectionData.readBytesAsString(urlBaseLength, Charsets.US_ASCII); + + int extensionCount = sectionData.readBits(8); + for (int urlExtensionIndex = 0; + urlExtensionIndex < extensionCount; + urlExtensionIndex++) { + int urlExtensionLength = sectionData.readBits(8); + sectionData.skipBytes(urlExtensionLength); + } + } + } + } else if (descriptorTag == DESCRIPTOR_SIMPLE_APPLICATION_LOCATION) { + // See section 5.3.7. + urlExtension = sectionData.readBytesAsString(descriptorLength, Charsets.US_ASCII); + } + + sectionData.setPosition(positionOfNextDescriptor * 8); + } + + sectionData.setPosition(positionOfNextApplication * 8); + + if (urlBase != null && urlExtension != null) { + appInfoTables.add(new AppInfoTable(controlCode, urlBase + urlExtension)); + } + } + + return appInfoTables.isEmpty() ? null : new Metadata(appInfoTables); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/dvbsi/package-info.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/dvbsi/package-info.java new file mode 100644 index 0000000000..33efd262fe --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/dvbsi/package-info.java @@ -0,0 +1,20 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +@NonNullApi +package com.google.android.exoplayer2.metadata.dvbsi; + +import com.google.android.exoplayer2.util.NonNullApi; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/emsg/EventMessage.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/emsg/EventMessage.java index 7e3862ca31..8f8046d7e2 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/emsg/EventMessage.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/emsg/EventMessage.java @@ -50,33 +50,23 @@ public final class EventMessage implements Metadata.Entry { @VisibleForTesting public static final String SCTE35_SCHEME_ID = "urn:scte:scte35:2014:bin"; private static final Format ID3_FORMAT = - Format.createSampleFormat( - /* id= */ null, MimeTypes.APPLICATION_ID3, Format.OFFSET_SAMPLE_RELATIVE); + new Format.Builder().setSampleMimeType(MimeTypes.APPLICATION_ID3).build(); private static final Format SCTE35_FORMAT = - Format.createSampleFormat( - /* id= */ null, MimeTypes.APPLICATION_SCTE35, Format.OFFSET_SAMPLE_RELATIVE); + new Format.Builder().setSampleMimeType(MimeTypes.APPLICATION_SCTE35).build(); /** The message scheme. */ public final String schemeIdUri; - /** - * The value for the event. - */ + /** The value for the event. */ public final String value; - /** - * The duration of the event in milliseconds. - */ + /** The duration of the event in milliseconds. */ public final long durationMs; - /** - * The instance identifier. - */ + /** The instance identifier. */ public final long id; - /** - * The body of the message. - */ + /** The body of the message. */ public final byte[] messageData; // Lazily initialized hashcode. @@ -187,16 +177,14 @@ public void writeToParcel(Parcel dest, int flags) { public static final Parcelable.Creator CREATOR = new Parcelable.Creator() { - @Override - public EventMessage createFromParcel(Parcel in) { - return new EventMessage(in); - } - - @Override - public EventMessage[] newArray(int size) { - return new EventMessage[size]; - } - - }; + @Override + public EventMessage createFromParcel(Parcel in) { + return new EventMessage(in); + } + @Override + public EventMessage[] newArray(int size) { + return new EventMessage[size]; + } + }; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/emsg/EventMessageDecoder.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/emsg/EventMessageDecoder.java index d87376feb0..999f0228bd 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/emsg/EventMessageDecoder.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/emsg/EventMessageDecoder.java @@ -16,32 +16,29 @@ package com.google.android.exoplayer2.metadata.emsg; import com.google.android.exoplayer2.metadata.Metadata; -import com.google.android.exoplayer2.metadata.MetadataDecoder; import com.google.android.exoplayer2.metadata.MetadataInputBuffer; +import com.google.android.exoplayer2.metadata.SimpleMetadataDecoder; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.ParsableByteArray; import java.nio.ByteBuffer; import java.util.Arrays; /** Decodes data encoded by {@link EventMessageEncoder}. */ -public final class EventMessageDecoder implements MetadataDecoder { +public final class EventMessageDecoder extends SimpleMetadataDecoder { - @SuppressWarnings("ByteBufferBackingArray") @Override - public Metadata decode(MetadataInputBuffer inputBuffer) { - ByteBuffer buffer = Assertions.checkNotNull(inputBuffer.data); - byte[] data = buffer.array(); - int size = buffer.limit(); - return new Metadata(decode(new ParsableByteArray(data, size))); + @SuppressWarnings("ByteBufferBackingArray") // Buffer validated by SimpleMetadataDecoder.decode + protected Metadata decode(MetadataInputBuffer inputBuffer, ByteBuffer buffer) { + return new Metadata(decode(new ParsableByteArray(buffer.array(), buffer.limit()))); } public EventMessage decode(ParsableByteArray emsgData) { String schemeIdUri = Assertions.checkNotNull(emsgData.readNullTerminatedString()); String value = Assertions.checkNotNull(emsgData.readNullTerminatedString()); - long durationMs = emsgData.readUnsignedInt(); - long id = emsgData.readUnsignedInt(); + long durationMs = emsgData.readLong(); + long id = emsgData.readLong(); byte[] messageData = - Arrays.copyOfRange(emsgData.data, emsgData.getPosition(), emsgData.limit()); + Arrays.copyOfRange(emsgData.getData(), emsgData.getPosition(), emsgData.limit()); return new EventMessage(schemeIdUri, value, durationMs, id, messageData); } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/emsg/EventMessageEncoder.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/emsg/EventMessageEncoder.java index 4fa3f71b32..81c11ba48c 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/emsg/EventMessageEncoder.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/emsg/EventMessageEncoder.java @@ -45,8 +45,8 @@ public byte[] encode(EventMessage eventMessage) { writeNullTerminatedString(dataOutputStream, eventMessage.schemeIdUri); String nonNullValue = eventMessage.value != null ? eventMessage.value : ""; writeNullTerminatedString(dataOutputStream, nonNullValue); - writeUnsignedInt(dataOutputStream, eventMessage.durationMs); - writeUnsignedInt(dataOutputStream, eventMessage.id); + dataOutputStream.writeLong(eventMessage.durationMs); + dataOutputStream.writeLong(eventMessage.id); dataOutputStream.write(eventMessage.messageData); dataOutputStream.flush(); return byteArrayOutputStream.toByteArray(); @@ -61,13 +61,4 @@ private static void writeNullTerminatedString(DataOutputStream dataOutputStream, dataOutputStream.writeBytes(value); dataOutputStream.writeByte(0); } - - private static void writeUnsignedInt(DataOutputStream outputStream, long value) - throws IOException { - outputStream.writeByte((int) (value >>> 24) & 0xFF); - outputStream.writeByte((int) (value >>> 16) & 0xFF); - outputStream.writeByte((int) (value >>> 8) & 0xFF); - outputStream.writeByte((int) value & 0xFF); - } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/flac/PictureFrame.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/flac/PictureFrame.java index ce134614ad..11fc1f24e5 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/flac/PictureFrame.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/flac/PictureFrame.java @@ -20,10 +20,13 @@ import android.os.Parcel; import android.os.Parcelable; import androidx.annotation.Nullable; +import com.google.android.exoplayer2.MediaMetadata; import com.google.android.exoplayer2.metadata.Metadata; +import com.google.android.exoplayer2.util.ParsableByteArray; +import com.google.common.base.Charsets; import java.util.Arrays; -/** A picture parsed from a FLAC file. */ +/** A picture parsed from a Vorbis Comment or a FLAC picture block. */ public final class PictureFrame implements Metadata.Entry { /** The type of the picture. */ @@ -73,6 +76,11 @@ public PictureFrame( this.pictureData = castNonNull(in.createByteArray()); } + @Override + public void populateMediaMetadata(MediaMetadata.Builder builder) { + builder.maybeSetArtworkData(pictureData, pictureType); + } + @Override public String toString() { return "Picture: mimeType=" + mimeType + ", description=" + description; @@ -128,6 +136,35 @@ public int describeContents() { return 0; } + /** + * Parses a {@code METADATA_BLOCK_PICTURE} into a {@code PictureFrame} instance. + * + *

      {@code pictureBlock} may be read directly from a FLAC file, or decoded from + * the base64 content of a Vorbis Comment. + * + * @param pictureBlock The data of the {@code METADATA_BLOCK_PICTURE}, not including any headers. + * @return A {@code PictureFrame} parsed from {@code pictureBlock}. + */ + public static PictureFrame fromPictureBlock(ParsableByteArray pictureBlock) { + int pictureType = pictureBlock.readInt(); + int mimeTypeLength = pictureBlock.readInt(); + String mimeType = pictureBlock.readString(mimeTypeLength, Charsets.US_ASCII); + int descriptionLength = pictureBlock.readInt(); + String description = pictureBlock.readString(descriptionLength); + int width = pictureBlock.readInt(); + int height = pictureBlock.readInt(); + int depth = pictureBlock.readInt(); + int colors = pictureBlock.readInt(); + int pictureDataLength = pictureBlock.readInt(); + byte[] pictureData = new byte[pictureDataLength]; + pictureBlock.readBytes(pictureData, 0, pictureDataLength); + + return new PictureFrame( + pictureType, mimeType, description, width, height, depth, colors, pictureData); + } + public static final Parcelable.Creator CREATOR = new Parcelable.Creator() { diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/flac/VorbisComment.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/flac/VorbisComment.java index 9f44cdf393..8754857459 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/flac/VorbisComment.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/flac/VorbisComment.java @@ -20,10 +20,14 @@ import android.os.Parcel; import android.os.Parcelable; import androidx.annotation.Nullable; +import com.google.android.exoplayer2.MediaMetadata; import com.google.android.exoplayer2.metadata.Metadata; -/** A vorbis comment. */ -public final class VorbisComment implements Metadata.Entry { +/** + * @deprecated Use {@link com.google.android.exoplayer2.metadata.vorbis.VorbisComment} instead. + */ +@Deprecated +public class VorbisComment implements Metadata.Entry { /** The key. */ public final String key; @@ -40,11 +44,34 @@ public VorbisComment(String key, String value) { this.value = value; } - /* package */ VorbisComment(Parcel in) { + protected VorbisComment(Parcel in) { this.key = castNonNull(in.readString()); this.value = castNonNull(in.readString()); } + @Override + public void populateMediaMetadata(MediaMetadata.Builder builder) { + switch (key) { + case "TITLE": + builder.setTitle(value); + break; + case "ARTIST": + builder.setArtist(value); + break; + case "ALBUM": + builder.setAlbumTitle(value); + break; + case "ALBUMARTIST": + builder.setAlbumArtist(value); + break; + case "DESCRIPTION": + builder.setDescription(value); + break; + default: + break; + } + } + @Override public String toString() { return "VC: " + key + "=" + value; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/icy/IcyDecoder.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/icy/IcyDecoder.java index 854a8fc3a4..a97cb6a2ce 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/icy/IcyDecoder.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/icy/IcyDecoder.java @@ -16,21 +16,19 @@ package com.google.android.exoplayer2.metadata.icy; import androidx.annotation.Nullable; -import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.metadata.Metadata; -import com.google.android.exoplayer2.metadata.MetadataDecoder; import com.google.android.exoplayer2.metadata.MetadataInputBuffer; -import com.google.android.exoplayer2.util.Assertions; -import com.google.android.exoplayer2.util.Util; +import com.google.android.exoplayer2.metadata.SimpleMetadataDecoder; +import com.google.common.base.Ascii; +import com.google.common.base.Charsets; import java.nio.ByteBuffer; import java.nio.charset.CharacterCodingException; -import java.nio.charset.Charset; import java.nio.charset.CharsetDecoder; import java.util.regex.Matcher; import java.util.regex.Pattern; /** Decodes ICY stream information. */ -public final class IcyDecoder implements MetadataDecoder { +public final class IcyDecoder extends SimpleMetadataDecoder { private static final Pattern METADATA_ELEMENT = Pattern.compile("(.+?)='(.*?)';", Pattern.DOTALL); private static final String STREAM_KEY_NAME = "streamtitle"; @@ -40,14 +38,12 @@ public final class IcyDecoder implements MetadataDecoder { private final CharsetDecoder iso88591Decoder; public IcyDecoder() { - utf8Decoder = Charset.forName(C.UTF8_NAME).newDecoder(); - iso88591Decoder = Charset.forName(C.ISO88591_NAME).newDecoder(); + utf8Decoder = Charsets.UTF_8.newDecoder(); + iso88591Decoder = Charsets.ISO_8859_1.newDecoder(); } @Override - @SuppressWarnings("ByteBufferBackingArray") - public Metadata decode(MetadataInputBuffer inputBuffer) { - ByteBuffer buffer = Assertions.checkNotNull(inputBuffer.data); + protected Metadata decode(MetadataInputBuffer inputBuffer, ByteBuffer buffer) { @Nullable String icyString = decodeToString(buffer); byte[] icyBytes = new byte[buffer.limit()]; buffer.get(icyBytes); @@ -61,15 +57,19 @@ public Metadata decode(MetadataInputBuffer inputBuffer) { int index = 0; Matcher matcher = METADATA_ELEMENT.matcher(icyString); while (matcher.find(index)) { - @Nullable String key = Util.toLowerInvariant(matcher.group(1)); + @Nullable String key = matcher.group(1); @Nullable String value = matcher.group(2); - switch (key) { - case STREAM_KEY_NAME: - name = value; - break; - case STREAM_KEY_URL: - url = value; - break; + if (key != null) { + switch (Ascii.toLowerCase(key)) { + case STREAM_KEY_NAME: + name = value; + break; + case STREAM_KEY_URL: + url = value; + break; + default: + break; + } } index = matcher.end(); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/icy/IcyHeaders.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/icy/IcyHeaders.java index 35c5be86d6..5a6d4a1f7f 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/icy/IcyHeaders.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/icy/IcyHeaders.java @@ -20,6 +20,7 @@ import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.MediaMetadata; import com.google.android.exoplayer2.metadata.Metadata; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Log; @@ -169,6 +170,16 @@ public IcyHeaders( metadataInterval = in.readInt(); } + @Override + public void populateMediaMetadata(MediaMetadata.Builder builder) { + if (name != null) { + builder.setStation(name); + } + if (genre != null) { + builder.setGenre(genre); + } + } + @Override public boolean equals(@Nullable Object obj) { if (this == obj) { diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/icy/IcyInfo.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/icy/IcyInfo.java index 1a3ed2ea6d..9fda4ac725 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/icy/IcyInfo.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/icy/IcyInfo.java @@ -18,6 +18,7 @@ import android.os.Parcel; import android.os.Parcelable; import androidx.annotation.Nullable; +import com.google.android.exoplayer2.MediaMetadata; import com.google.android.exoplayer2.metadata.Metadata; import com.google.android.exoplayer2.util.Assertions; import java.util.Arrays; @@ -52,6 +53,13 @@ public IcyInfo(byte[] rawMetadata, @Nullable String title, @Nullable String url) url = in.readString(); } + @Override + public void populateMediaMetadata(MediaMetadata.Builder builder) { + if (title != null) { + builder.setTitle(title); + } + } + @Override public boolean equals(@Nullable Object obj) { if (this == obj) { diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/ApicFrame.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/ApicFrame.java index 3f4a400677..84d426c6b9 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/ApicFrame.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/ApicFrame.java @@ -20,12 +20,11 @@ import android.os.Parcel; import android.os.Parcelable; import androidx.annotation.Nullable; +import com.google.android.exoplayer2.MediaMetadata; import com.google.android.exoplayer2.util.Util; import java.util.Arrays; -/** - * APIC (Attached Picture) ID3 frame. - */ +/** APIC (Attached Picture) ID3 frame. */ public final class ApicFrame extends Id3Frame { public static final String ID = "APIC"; @@ -52,6 +51,11 @@ public ApicFrame( pictureData = castNonNull(in.createByteArray()); } + @Override + public void populateMediaMetadata(MediaMetadata.Builder builder) { + builder.maybeSetArtworkData(pictureData, pictureType); + } + @Override public boolean equals(@Nullable Object obj) { if (this == obj) { @@ -61,7 +65,8 @@ public boolean equals(@Nullable Object obj) { return false; } ApicFrame other = (ApicFrame) obj; - return pictureType == other.pictureType && Util.areEqual(mimeType, other.mimeType) + return pictureType == other.pictureType + && Util.areEqual(mimeType, other.mimeType) && Util.areEqual(description, other.description) && Arrays.equals(pictureData, other.pictureData); } @@ -91,18 +96,17 @@ public void writeToParcel(Parcel dest, int flags) { dest.writeByteArray(pictureData); } - public static final Parcelable.Creator CREATOR = new Parcelable.Creator() { - - @Override - public ApicFrame createFromParcel(Parcel in) { - return new ApicFrame(in); - } - - @Override - public ApicFrame[] newArray(int size) { - return new ApicFrame[size]; - } + public static final Parcelable.Creator CREATOR = + new Parcelable.Creator() { - }; + @Override + public ApicFrame createFromParcel(Parcel in) { + return new ApicFrame(in); + } + @Override + public ApicFrame[] newArray(int size) { + return new ApicFrame[size]; + } + }; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/BinaryFrame.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/BinaryFrame.java index 6c6057bb7a..39305404cf 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/BinaryFrame.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/BinaryFrame.java @@ -22,9 +22,7 @@ import androidx.annotation.Nullable; import java.util.Arrays; -/** - * Binary ID3 frame. - */ +/** Binary ID3 frame. */ public final class BinaryFrame extends Id3Frame { public final byte[] data; @@ -77,7 +75,5 @@ public BinaryFrame createFromParcel(Parcel in) { public BinaryFrame[] newArray(int size) { return new BinaryFrame[size]; } - }; - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/ChapterFrame.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/ChapterFrame.java index bf5d2de6ea..e759a73aac 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/ChapterFrame.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/ChapterFrame.java @@ -23,9 +23,7 @@ import com.google.android.exoplayer2.util.Util; import java.util.Arrays; -/** - * Chapter information ID3 frame. - */ +/** Chapter information ID3 frame. */ public final class ChapterFrame extends Id3Frame { public static final String ID = "CHAP"; @@ -33,18 +31,20 @@ public final class ChapterFrame extends Id3Frame { public final String chapterId; public final int startTimeMs; public final int endTimeMs; - /** - * The byte offset of the start of the chapter, or {@link C#POSITION_UNSET} if not set. - */ + /** The byte offset of the start of the chapter, or {@link C#POSITION_UNSET} if not set. */ public final long startOffset; - /** - * The byte offset of the end of the chapter, or {@link C#POSITION_UNSET} if not set. - */ + /** The byte offset of the end of the chapter, or {@link C#POSITION_UNSET} if not set. */ public final long endOffset; + private final Id3Frame[] subFrames; - public ChapterFrame(String chapterId, int startTimeMs, int endTimeMs, long startOffset, - long endOffset, Id3Frame[] subFrames) { + public ChapterFrame( + String chapterId, + int startTimeMs, + int endTimeMs, + long startOffset, + long endOffset, + Id3Frame[] subFrames) { super(ID); this.chapterId = chapterId; this.startTimeMs = startTimeMs; @@ -68,16 +68,12 @@ public ChapterFrame(String chapterId, int startTimeMs, int endTimeMs, long start } } - /** - * Returns the number of sub-frames. - */ + /** Returns the number of sub-frames. */ public int getSubFrameCount() { return subFrames.length; } - /** - * Returns the sub-frame at {@code index}. - */ + /** Returns the sub-frame at {@code index}. */ public Id3Frame getSubFrame(int index) { return subFrames[index]; } @@ -128,18 +124,17 @@ public int describeContents() { return 0; } - public static final Creator CREATOR = new Creator() { - - @Override - public ChapterFrame createFromParcel(Parcel in) { - return new ChapterFrame(in); - } - - @Override - public ChapterFrame[] newArray(int size) { - return new ChapterFrame[size]; - } + public static final Creator CREATOR = + new Creator() { - }; + @Override + public ChapterFrame createFromParcel(Parcel in) { + return new ChapterFrame(in); + } + @Override + public ChapterFrame[] newArray(int size) { + return new ChapterFrame[size]; + } + }; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/ChapterTocFrame.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/ChapterTocFrame.java index 5d454e84ac..ee7bcd397e 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/ChapterTocFrame.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/ChapterTocFrame.java @@ -22,9 +22,7 @@ import com.google.android.exoplayer2.util.Util; import java.util.Arrays; -/** - * Chapter table of contents ID3 frame. - */ +/** Chapter table of contents ID3 frame. */ public final class ChapterTocFrame extends Id3Frame { public static final String ID = "CTOC"; @@ -35,7 +33,11 @@ public final class ChapterTocFrame extends Id3Frame { public final String[] children; private final Id3Frame[] subFrames; - public ChapterTocFrame(String elementId, boolean isRoot, boolean isOrdered, String[] children, + public ChapterTocFrame( + String elementId, + boolean isRoot, + boolean isOrdered, + String[] children, Id3Frame[] subFrames) { super(ID); this.elementId = elementId; @@ -45,8 +47,7 @@ public ChapterTocFrame(String elementId, boolean isRoot, boolean isOrdered, Stri this.subFrames = subFrames; } - /* package */ - ChapterTocFrame(Parcel in) { + /* package */ ChapterTocFrame(Parcel in) { super(ID); this.elementId = castNonNull(in.readString()); this.isRoot = in.readByte() != 0; @@ -59,16 +60,12 @@ public ChapterTocFrame(String elementId, boolean isRoot, boolean isOrdered, Stri } } - /** - * Returns the number of sub-frames. - */ + /** Returns the number of sub-frames. */ public int getSubFrameCount() { return subFrames.length; } - /** - * Returns the sub-frame at {@code index}. - */ + /** Returns the sub-frame at {@code index}. */ public Id3Frame getSubFrame(int index) { return subFrames[index]; } @@ -110,18 +107,17 @@ public void writeToParcel(Parcel dest, int flags) { } } - public static final Creator CREATOR = new Creator() { - - @Override - public ChapterTocFrame createFromParcel(Parcel in) { - return new ChapterTocFrame(in); - } - - @Override - public ChapterTocFrame[] newArray(int size) { - return new ChapterTocFrame[size]; - } + public static final Creator CREATOR = + new Creator() { - }; + @Override + public ChapterTocFrame createFromParcel(Parcel in) { + return new ChapterTocFrame(in); + } + @Override + public ChapterTocFrame[] newArray(int size) { + return new ChapterTocFrame[size]; + } + }; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/CommentFrame.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/CommentFrame.java index 363057f17a..5d441e782f 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/CommentFrame.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/CommentFrame.java @@ -22,9 +22,7 @@ import androidx.annotation.Nullable; import com.google.android.exoplayer2.util.Util; -/** - * Comment ID3 frame. - */ +/** Comment ID3 frame. */ public final class CommentFrame extends Id3Frame { public static final String ID = "COMM"; @@ -56,7 +54,8 @@ public boolean equals(@Nullable Object obj) { return false; } CommentFrame other = (CommentFrame) obj; - return Util.areEqual(description, other.description) && Util.areEqual(language, other.language) + return Util.areEqual(description, other.description) + && Util.areEqual(language, other.language) && Util.areEqual(text, other.text); } @@ -95,7 +94,5 @@ public CommentFrame createFromParcel(Parcel in) { public CommentFrame[] newArray(int size) { return new CommentFrame[size]; } - }; - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/GeobFrame.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/GeobFrame.java index 6023f76aa1..2c609db8d5 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/GeobFrame.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/GeobFrame.java @@ -23,9 +23,7 @@ import com.google.android.exoplayer2.util.Util; import java.util.Arrays; -/** - * GEOB (General Encapsulated Object) ID3 frame. - */ +/** GEOB (General Encapsulated Object) ID3 frame. */ public final class GeobFrame extends Id3Frame { public static final String ID = "GEOB"; @@ -60,8 +58,10 @@ public boolean equals(@Nullable Object obj) { return false; } GeobFrame other = (GeobFrame) obj; - return Util.areEqual(mimeType, other.mimeType) && Util.areEqual(filename, other.filename) - && Util.areEqual(description, other.description) && Arrays.equals(data, other.data); + return Util.areEqual(mimeType, other.mimeType) + && Util.areEqual(filename, other.filename) + && Util.areEqual(description, other.description) + && Arrays.equals(data, other.data); } @Override @@ -95,18 +95,17 @@ public void writeToParcel(Parcel dest, int flags) { dest.writeByteArray(data); } - public static final Parcelable.Creator CREATOR = new Parcelable.Creator() { - - @Override - public GeobFrame createFromParcel(Parcel in) { - return new GeobFrame(in); - } - - @Override - public GeobFrame[] newArray(int size) { - return new GeobFrame[size]; - } + public static final Parcelable.Creator CREATOR = + new Parcelable.Creator() { - }; + @Override + public GeobFrame createFromParcel(Parcel in) { + return new GeobFrame(in); + } + @Override + public GeobFrame[] newArray(int size) { + return new GeobFrame[size]; + } + }; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/Id3Decoder.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/Id3Decoder.java index faab7f0775..0ee41664e4 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/Id3Decoder.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/Id3Decoder.java @@ -18,28 +18,26 @@ import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.metadata.Metadata; -import com.google.android.exoplayer2.metadata.MetadataDecoder; import com.google.android.exoplayer2.metadata.MetadataInputBuffer; -import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.metadata.SimpleMetadataDecoder; import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.ParsableBitArray; import com.google.android.exoplayer2.util.ParsableByteArray; import com.google.android.exoplayer2.util.Util; -import java.io.UnsupportedEncodingException; +import com.google.common.base.Ascii; +import com.google.common.base.Charsets; +import com.google.common.collect.ImmutableList; import java.nio.ByteBuffer; +import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Locale; -/** - * Decodes ID3 tags. - */ -public final class Id3Decoder implements MetadataDecoder { +/** Decodes ID3 tags. */ +public final class Id3Decoder extends SimpleMetadataDecoder { - /** - * A predicate for determining whether individual frames should be decoded. - */ + /** A predicate for determining whether individual frames should be decoded. */ public interface FramePredicate { /** @@ -53,7 +51,6 @@ public interface FramePredicate { * @return Whether the frame should be decoded. */ boolean evaluate(int majorVersion, int id0, int id1, int id2, int id3); - } /** A predicate that indicates no frames should be decoded. */ @@ -64,9 +61,7 @@ public interface FramePredicate { /** The first three bytes of a well formed ID3 tag header. */ public static final int ID3_TAG = 0x00494433; - /** - * Length of an ID3 tag header. - */ + /** Length of an ID3 tag header. */ public static final int ID3_HEADER_LENGTH = 10; private static final int FRAME_FLAG_V3_IS_COMPRESSED = 0x0080; @@ -96,11 +91,10 @@ public Id3Decoder(@Nullable FramePredicate framePredicate) { this.framePredicate = framePredicate; } - @SuppressWarnings("ByteBufferBackingArray") @Override @Nullable - public Metadata decode(MetadataInputBuffer inputBuffer) { - ByteBuffer buffer = Assertions.checkNotNull(inputBuffer.data); + @SuppressWarnings("ByteBufferBackingArray") // Buffer validated by SimpleMetadataDecoder.decode + protected Metadata decode(MetadataInputBuffer inputBuffer, ByteBuffer buffer) { return decode(buffer.array(), buffer.limit()); } @@ -117,7 +111,7 @@ public Metadata decode(byte[] data, int size) { List id3Frames = new ArrayList<>(); ParsableByteArray id3Data = new ParsableByteArray(data, size); - Id3Header id3Header = decodeHeader(id3Data); + @Nullable Id3Header id3Header = decodeHeader(id3Data); if (id3Header == null) { return null; } @@ -141,8 +135,14 @@ public Metadata decode(byte[] data, int size) { } while (id3Data.bytesLeft() >= frameHeaderSize) { - Id3Frame frame = decodeFrame(id3Header.majorVersion, id3Data, unsignedIntFrameSizeHack, - frameHeaderSize, framePredicate); + @Nullable + Id3Frame frame = + decodeFrame( + id3Header.majorVersion, + id3Data, + unsignedIntFrameSizeHack, + frameHeaderSize, + framePredicate); if (frame != null) { id3Frames.add(frame); } @@ -207,8 +207,11 @@ private static Id3Header decodeHeader(ParsableByteArray data) { return new Id3Header(majorVersion, isUnsynchronized, framesSize); } - private static boolean validateFrames(ParsableByteArray id3Data, int majorVersion, - int frameHeaderSize, boolean unsignedIntFrameSizeHack) { + private static boolean validateFrames( + ParsableByteArray id3Data, + int majorVersion, + int frameHeaderSize, + boolean unsignedIntFrameSizeHack) { int startPosition = id3Data.getPosition(); try { while (id3Data.bytesLeft() >= frameHeaderSize) { @@ -235,8 +238,11 @@ private static boolean validateFrames(ParsableByteArray id3Data, int majorVersio if ((frameSize & 0x808080L) != 0) { return false; } - frameSize = (frameSize & 0xFF) | (((frameSize >> 8) & 0xFF) << 7) - | (((frameSize >> 16) & 0xFF) << 14) | (((frameSize >> 24) & 0xFF) << 21); + frameSize = + (frameSize & 0xFF) + | (((frameSize >> 8) & 0xFF) << 7) + | (((frameSize >> 16) & 0xFF) << 14) + | (((frameSize >> 24) & 0xFF) << 21); } boolean hasGroupIdentifier = false; boolean hasDataLength = false; @@ -286,8 +292,11 @@ private static Id3Frame decodeFrame( if (majorVersion == 4) { frameSize = id3Data.readUnsignedIntToInt(); if (!unsignedIntFrameSizeHack) { - frameSize = (frameSize & 0xFF) | (((frameSize >> 8) & 0xFF) << 7) - | (((frameSize >> 16) & 0xFF) << 14) | (((frameSize >> 24) & 0xFF) << 21); + frameSize = + (frameSize & 0xFF) + | (((frameSize >> 8) & 0xFF) << 7) + | (((frameSize >> 16) & 0xFF) << 14) + | (((frameSize >> 24) & 0xFF) << 21); } } else if (majorVersion == 3) { frameSize = id3Data.readUnsignedIntToInt(); @@ -296,7 +305,11 @@ private static Id3Frame decodeFrame( } int flags = majorVersion >= 3 ? id3Data.readUnsignedShort() : 0; - if (frameId0 == 0 && frameId1 == 0 && frameId2 == 0 && frameId3 == 0 && frameSize == 0 + if (frameId0 == 0 + && frameId1 == 0 + && frameId2 == 0 + && frameId3 == 0 + && frameSize == 0 && flags == 0) { // We must be reading zero padding at the end of the tag. id3Data.setPosition(id3Data.limit()); @@ -357,13 +370,17 @@ private static Id3Frame decodeFrame( try { Id3Frame frame; - if (frameId0 == 'T' && frameId1 == 'X' && frameId2 == 'X' + if (frameId0 == 'T' + && frameId1 == 'X' + && frameId2 == 'X' && (majorVersion == 2 || frameId3 == 'X')) { frame = decodeTxxxFrame(id3Data, frameSize); } else if (frameId0 == 'T') { String id = getFrameId(majorVersion, frameId0, frameId1, frameId2, frameId3); frame = decodeTextInformationFrame(id3Data, frameSize, id); - } else if (frameId0 == 'W' && frameId1 == 'X' && frameId2 == 'X' + } else if (frameId0 == 'W' + && frameId1 == 'X' + && frameId2 == 'X' && (majorVersion == 2 || frameId3 == 'X')) { frame = decodeWxxxFrame(id3Data, frameSize); } else if (frameId0 == 'W') { @@ -371,21 +388,38 @@ private static Id3Frame decodeFrame( frame = decodeUrlLinkFrame(id3Data, frameSize, id); } else if (frameId0 == 'P' && frameId1 == 'R' && frameId2 == 'I' && frameId3 == 'V') { frame = decodePrivFrame(id3Data, frameSize); - } else if (frameId0 == 'G' && frameId1 == 'E' && frameId2 == 'O' + } else if (frameId0 == 'G' + && frameId1 == 'E' + && frameId2 == 'O' && (frameId3 == 'B' || majorVersion == 2)) { frame = decodeGeobFrame(id3Data, frameSize); - } else if (majorVersion == 2 ? (frameId0 == 'P' && frameId1 == 'I' && frameId2 == 'C') + } else if (majorVersion == 2 + ? (frameId0 == 'P' && frameId1 == 'I' && frameId2 == 'C') : (frameId0 == 'A' && frameId1 == 'P' && frameId2 == 'I' && frameId3 == 'C')) { frame = decodeApicFrame(id3Data, frameSize, majorVersion); - } else if (frameId0 == 'C' && frameId1 == 'O' && frameId2 == 'M' + } else if (frameId0 == 'C' + && frameId1 == 'O' + && frameId2 == 'M' && (frameId3 == 'M' || majorVersion == 2)) { frame = decodeCommentFrame(id3Data, frameSize); } else if (frameId0 == 'C' && frameId1 == 'H' && frameId2 == 'A' && frameId3 == 'P') { - frame = decodeChapterFrame(id3Data, frameSize, majorVersion, unsignedIntFrameSizeHack, - frameHeaderSize, framePredicate); + frame = + decodeChapterFrame( + id3Data, + frameSize, + majorVersion, + unsignedIntFrameSizeHack, + frameHeaderSize, + framePredicate); } else if (frameId0 == 'C' && frameId1 == 'T' && frameId2 == 'O' && frameId3 == 'C') { - frame = decodeChapterTOCFrame(id3Data, frameSize, majorVersion, unsignedIntFrameSizeHack, - frameHeaderSize, framePredicate); + frame = + decodeChapterTOCFrame( + id3Data, + frameSize, + majorVersion, + unsignedIntFrameSizeHack, + frameHeaderSize, + framePredicate); } else if (frameId0 == 'M' && frameId1 == 'L' && frameId2 == 'L' && frameId3 == 'T') { frame = decodeMlltFrame(id3Data, frameSize); } else { @@ -393,105 +427,118 @@ private static Id3Frame decodeFrame( frame = decodeBinaryFrame(id3Data, frameSize, id); } if (frame == null) { - Log.w(TAG, "Failed to decode frame: id=" - + getFrameId(majorVersion, frameId0, frameId1, frameId2, frameId3) + ", frameSize=" - + frameSize); + Log.w( + TAG, + "Failed to decode frame: id=" + + getFrameId(majorVersion, frameId0, frameId1, frameId2, frameId3) + + ", frameSize=" + + frameSize); } return frame; - } catch (UnsupportedEncodingException e) { - Log.w(TAG, "Unsupported character encoding"); - return null; } finally { id3Data.setPosition(nextFramePosition); } } @Nullable - private static TextInformationFrame decodeTxxxFrame(ParsableByteArray id3Data, int frameSize) - throws UnsupportedEncodingException { + private static TextInformationFrame decodeTxxxFrame(ParsableByteArray id3Data, int frameSize) { if (frameSize < 1) { // Frame is malformed. return null; } int encoding = id3Data.readUnsignedByte(); - String charset = getCharsetName(encoding); byte[] data = new byte[frameSize - 1]; id3Data.readBytes(data, 0, frameSize - 1); - int descriptionEndIndex = indexOfEos(data, 0, encoding); - String description = new String(data, 0, descriptionEndIndex, charset); + int descriptionEndIndex = indexOfTerminator(data, 0, encoding); + String description = new String(data, 0, descriptionEndIndex, getCharset(encoding)); - int valueStartIndex = descriptionEndIndex + delimiterLength(encoding); - int valueEndIndex = indexOfEos(data, valueStartIndex, encoding); - String value = decodeStringIfValid(data, valueStartIndex, valueEndIndex, charset); - - return new TextInformationFrame("TXXX", description, value); + ImmutableList values = + decodeTextInformationFrameValues( + data, encoding, descriptionEndIndex + delimiterLength(encoding)); + return new TextInformationFrame("TXXX", description, values); } @Nullable private static TextInformationFrame decodeTextInformationFrame( - ParsableByteArray id3Data, int frameSize, String id) throws UnsupportedEncodingException { + ParsableByteArray id3Data, int frameSize, String id) { if (frameSize < 1) { // Frame is malformed. return null; } int encoding = id3Data.readUnsignedByte(); - String charset = getCharsetName(encoding); byte[] data = new byte[frameSize - 1]; id3Data.readBytes(data, 0, frameSize - 1); - int valueEndIndex = indexOfEos(data, 0, encoding); - String value = new String(data, 0, valueEndIndex, charset); + ImmutableList values = decodeTextInformationFrameValues(data, encoding, 0); + return new TextInformationFrame(id, null, values); + } - return new TextInformationFrame(id, null, value); + private static ImmutableList decodeTextInformationFrameValues( + byte[] data, final int encoding, final int index) { + if (index >= data.length) { + return ImmutableList.of(""); + } + + ImmutableList.Builder values = ImmutableList.builder(); + int valueStartIndex = index; + int valueEndIndex = indexOfTerminator(data, valueStartIndex, encoding); + while (valueStartIndex < valueEndIndex) { + String value = + new String(data, valueStartIndex, valueEndIndex - valueStartIndex, getCharset(encoding)); + values.add(value); + + valueStartIndex = valueEndIndex + delimiterLength(encoding); + valueEndIndex = indexOfTerminator(data, valueStartIndex, encoding); + } + + ImmutableList result = values.build(); + return result.isEmpty() ? ImmutableList.of("") : result; } @Nullable - private static UrlLinkFrame decodeWxxxFrame(ParsableByteArray id3Data, int frameSize) - throws UnsupportedEncodingException { + private static UrlLinkFrame decodeWxxxFrame(ParsableByteArray id3Data, int frameSize) { if (frameSize < 1) { // Frame is malformed. return null; } int encoding = id3Data.readUnsignedByte(); - String charset = getCharsetName(encoding); byte[] data = new byte[frameSize - 1]; id3Data.readBytes(data, 0, frameSize - 1); - int descriptionEndIndex = indexOfEos(data, 0, encoding); - String description = new String(data, 0, descriptionEndIndex, charset); + int descriptionEndIndex = indexOfTerminator(data, 0, encoding); + String description = new String(data, 0, descriptionEndIndex, getCharset(encoding)); int urlStartIndex = descriptionEndIndex + delimiterLength(encoding); int urlEndIndex = indexOfZeroByte(data, urlStartIndex); - String url = decodeStringIfValid(data, urlStartIndex, urlEndIndex, "ISO-8859-1"); + String url = decodeStringIfValid(data, urlStartIndex, urlEndIndex, Charsets.ISO_8859_1); return new UrlLinkFrame("WXXX", description, url); } - private static UrlLinkFrame decodeUrlLinkFrame(ParsableByteArray id3Data, int frameSize, - String id) throws UnsupportedEncodingException { + private static UrlLinkFrame decodeUrlLinkFrame( + ParsableByteArray id3Data, int frameSize, String id) { byte[] data = new byte[frameSize]; id3Data.readBytes(data, 0, frameSize); int urlEndIndex = indexOfZeroByte(data, 0); - String url = new String(data, 0, urlEndIndex, "ISO-8859-1"); + String url = new String(data, 0, urlEndIndex, Charsets.ISO_8859_1); return new UrlLinkFrame(id, null, url); } - private static PrivFrame decodePrivFrame(ParsableByteArray id3Data, int frameSize) - throws UnsupportedEncodingException { + private static PrivFrame decodePrivFrame(ParsableByteArray id3Data, int frameSize) { byte[] data = new byte[frameSize]; id3Data.readBytes(data, 0, frameSize); int ownerEndIndex = indexOfZeroByte(data, 0); - String owner = new String(data, 0, ownerEndIndex, "ISO-8859-1"); + String owner = new String(data, 0, ownerEndIndex, Charsets.ISO_8859_1); int privateDataStartIndex = ownerEndIndex + 1; byte[] privateData = copyOfRangeIfValid(data, privateDataStartIndex, data.length); @@ -499,23 +546,22 @@ private static PrivFrame decodePrivFrame(ParsableByteArray id3Data, int frameSiz return new PrivFrame(owner, privateData); } - private static GeobFrame decodeGeobFrame(ParsableByteArray id3Data, int frameSize) - throws UnsupportedEncodingException { + private static GeobFrame decodeGeobFrame(ParsableByteArray id3Data, int frameSize) { int encoding = id3Data.readUnsignedByte(); - String charset = getCharsetName(encoding); + Charset charset = getCharset(encoding); byte[] data = new byte[frameSize - 1]; id3Data.readBytes(data, 0, frameSize - 1); int mimeTypeEndIndex = indexOfZeroByte(data, 0); - String mimeType = new String(data, 0, mimeTypeEndIndex, "ISO-8859-1"); + String mimeType = new String(data, 0, mimeTypeEndIndex, Charsets.ISO_8859_1); int filenameStartIndex = mimeTypeEndIndex + 1; - int filenameEndIndex = indexOfEos(data, filenameStartIndex, encoding); + int filenameEndIndex = indexOfTerminator(data, filenameStartIndex, encoding); String filename = decodeStringIfValid(data, filenameStartIndex, filenameEndIndex, charset); int descriptionStartIndex = filenameEndIndex + delimiterLength(encoding); - int descriptionEndIndex = indexOfEos(data, descriptionStartIndex, encoding); + int descriptionEndIndex = indexOfTerminator(data, descriptionStartIndex, encoding); String description = decodeStringIfValid(data, descriptionStartIndex, descriptionEndIndex, charset); @@ -525,10 +571,10 @@ private static GeobFrame decodeGeobFrame(ParsableByteArray id3Data, int frameSiz return new GeobFrame(mimeType, filename, description, objectData); } - private static ApicFrame decodeApicFrame(ParsableByteArray id3Data, int frameSize, - int majorVersion) throws UnsupportedEncodingException { + private static ApicFrame decodeApicFrame( + ParsableByteArray id3Data, int frameSize, int majorVersion) { int encoding = id3Data.readUnsignedByte(); - String charset = getCharsetName(encoding); + Charset charset = getCharset(encoding); byte[] data = new byte[frameSize - 1]; id3Data.readBytes(data, 0, frameSize - 1); @@ -537,13 +583,13 @@ private static ApicFrame decodeApicFrame(ParsableByteArray id3Data, int frameSiz int mimeTypeEndIndex; if (majorVersion == 2) { mimeTypeEndIndex = 2; - mimeType = "image/" + Util.toLowerInvariant(new String(data, 0, 3, "ISO-8859-1")); + mimeType = "image/" + Ascii.toLowerCase(new String(data, 0, 3, Charsets.ISO_8859_1)); if ("image/jpg".equals(mimeType)) { mimeType = "image/jpeg"; } } else { mimeTypeEndIndex = indexOfZeroByte(data, 0); - mimeType = Util.toLowerInvariant(new String(data, 0, mimeTypeEndIndex, "ISO-8859-1")); + mimeType = Ascii.toLowerCase(new String(data, 0, mimeTypeEndIndex, Charsets.ISO_8859_1)); if (mimeType.indexOf('/') == -1) { mimeType = "image/" + mimeType; } @@ -552,9 +598,10 @@ private static ApicFrame decodeApicFrame(ParsableByteArray id3Data, int frameSiz int pictureType = data[mimeTypeEndIndex + 1] & 0xFF; int descriptionStartIndex = mimeTypeEndIndex + 2; - int descriptionEndIndex = indexOfEos(data, descriptionStartIndex, encoding); - String description = new String(data, descriptionStartIndex, - descriptionEndIndex - descriptionStartIndex, charset); + int descriptionEndIndex = indexOfTerminator(data, descriptionStartIndex, encoding); + String description = + new String( + data, descriptionStartIndex, descriptionEndIndex - descriptionStartIndex, charset); int pictureDataStartIndex = descriptionEndIndex + delimiterLength(encoding); byte[] pictureData = copyOfRangeIfValid(data, pictureDataStartIndex, data.length); @@ -563,15 +610,14 @@ private static ApicFrame decodeApicFrame(ParsableByteArray id3Data, int frameSiz } @Nullable - private static CommentFrame decodeCommentFrame(ParsableByteArray id3Data, int frameSize) - throws UnsupportedEncodingException { + private static CommentFrame decodeCommentFrame(ParsableByteArray id3Data, int frameSize) { if (frameSize < 4) { // Frame is malformed. return null; } int encoding = id3Data.readUnsignedByte(); - String charset = getCharsetName(encoding); + Charset charset = getCharset(encoding); byte[] data = new byte[3]; id3Data.readBytes(data, 0, 3); @@ -580,11 +626,11 @@ private static CommentFrame decodeCommentFrame(ParsableByteArray id3Data, int fr data = new byte[frameSize - 4]; id3Data.readBytes(data, 0, frameSize - 4); - int descriptionEndIndex = indexOfEos(data, 0, encoding); + int descriptionEndIndex = indexOfTerminator(data, 0, encoding); String description = new String(data, 0, descriptionEndIndex, charset); int textStartIndex = descriptionEndIndex + delimiterLength(encoding); - int textEndIndex = indexOfEos(data, textStartIndex, encoding); + int textEndIndex = indexOfTerminator(data, textStartIndex, encoding); String text = decodeStringIfValid(data, textStartIndex, textEndIndex, charset); return new CommentFrame(language, description, text); @@ -596,12 +642,15 @@ private static ChapterFrame decodeChapterFrame( int majorVersion, boolean unsignedIntFrameSizeHack, int frameHeaderSize, - @Nullable FramePredicate framePredicate) - throws UnsupportedEncodingException { + @Nullable FramePredicate framePredicate) { int framePosition = id3Data.getPosition(); - int chapterIdEndIndex = indexOfZeroByte(id3Data.data, framePosition); - String chapterId = new String(id3Data.data, framePosition, chapterIdEndIndex - framePosition, - "ISO-8859-1"); + int chapterIdEndIndex = indexOfZeroByte(id3Data.getData(), framePosition); + String chapterId = + new String( + id3Data.getData(), + framePosition, + chapterIdEndIndex - framePosition, + Charsets.ISO_8859_1); id3Data.setPosition(chapterIdEndIndex + 1); int startTime = id3Data.readInt(); @@ -618,15 +667,15 @@ private static ChapterFrame decodeChapterFrame( ArrayList subFrames = new ArrayList<>(); int limit = framePosition + frameSize; while (id3Data.getPosition() < limit) { - Id3Frame frame = decodeFrame(majorVersion, id3Data, unsignedIntFrameSizeHack, - frameHeaderSize, framePredicate); + Id3Frame frame = + decodeFrame( + majorVersion, id3Data, unsignedIntFrameSizeHack, frameHeaderSize, framePredicate); if (frame != null) { subFrames.add(frame); } } - Id3Frame[] subFrameArray = new Id3Frame[subFrames.size()]; - subFrames.toArray(subFrameArray); + Id3Frame[] subFrameArray = subFrames.toArray(new Id3Frame[0]); return new ChapterFrame(chapterId, startTime, endTime, startOffset, endOffset, subFrameArray); } @@ -636,12 +685,15 @@ private static ChapterTocFrame decodeChapterTOCFrame( int majorVersion, boolean unsignedIntFrameSizeHack, int frameHeaderSize, - @Nullable FramePredicate framePredicate) - throws UnsupportedEncodingException { + @Nullable FramePredicate framePredicate) { int framePosition = id3Data.getPosition(); - int elementIdEndIndex = indexOfZeroByte(id3Data.data, framePosition); - String elementId = new String(id3Data.data, framePosition, elementIdEndIndex - framePosition, - "ISO-8859-1"); + int elementIdEndIndex = indexOfZeroByte(id3Data.getData(), framePosition); + String elementId = + new String( + id3Data.getData(), + framePosition, + elementIdEndIndex - framePosition, + Charsets.ISO_8859_1); id3Data.setPosition(elementIdEndIndex + 1); int ctocFlags = id3Data.readUnsignedByte(); @@ -652,23 +704,25 @@ private static ChapterTocFrame decodeChapterTOCFrame( String[] children = new String[childCount]; for (int i = 0; i < childCount; i++) { int startIndex = id3Data.getPosition(); - int endIndex = indexOfZeroByte(id3Data.data, startIndex); - children[i] = new String(id3Data.data, startIndex, endIndex - startIndex, "ISO-8859-1"); + int endIndex = indexOfZeroByte(id3Data.getData(), startIndex); + children[i] = + new String(id3Data.getData(), startIndex, endIndex - startIndex, Charsets.ISO_8859_1); id3Data.setPosition(endIndex + 1); } ArrayList subFrames = new ArrayList<>(); int limit = framePosition + frameSize; while (id3Data.getPosition() < limit) { - Id3Frame frame = decodeFrame(majorVersion, id3Data, unsignedIntFrameSizeHack, - frameHeaderSize, framePredicate); + @Nullable + Id3Frame frame = + decodeFrame( + majorVersion, id3Data, unsignedIntFrameSizeHack, frameHeaderSize, framePredicate); if (frame != null) { subFrames.add(frame); } } - Id3Frame[] subFrameArray = new Id3Frame[subFrames.size()]; - subFrames.toArray(subFrameArray); + Id3Frame[] subFrameArray = subFrames.toArray(new Id3Frame[0]); return new ChapterTocFrame(elementId, isRoot, isOrdered, children, subFrameArray); } @@ -702,8 +756,8 @@ private static MlltFrame decodeMlltFrame(ParsableByteArray id3Data, int frameSiz millisecondsDeviations); } - private static BinaryFrame decodeBinaryFrame(ParsableByteArray id3Data, int frameSize, - String id) { + private static BinaryFrame decodeBinaryFrame( + ParsableByteArray id3Data, int frameSize, String id) { byte[] frame = new byte[frameSize]; id3Data.readBytes(frame, 0, frameSize); @@ -711,15 +765,15 @@ private static BinaryFrame decodeBinaryFrame(ParsableByteArray id3Data, int fram } /** - * Performs in-place removal of unsynchronization for {@code length} bytes starting from - * {@link ParsableByteArray#getPosition()} + * Performs in-place removal of unsynchronization for {@code length} bytes starting from {@link + * ParsableByteArray#getPosition()} * * @param data Contains the data to be processed. * @param length The length of the data to be processed. * @return The length of the data after processing. */ private static int removeUnsynchronization(ParsableByteArray data, int length) { - byte[] bytes = data.data; + byte[] bytes = data.getData(); int startPosition = data.getPosition(); for (int i = startPosition; i + 1 < startPosition + length; i++) { if ((bytes[i] & 0xFF) == 0xFF && bytes[i + 1] == 0x00) { @@ -731,33 +785,29 @@ private static int removeUnsynchronization(ParsableByteArray data, int length) { return length; } - /** - * Maps encoding byte from ID3v2 frame to a Charset. - * - * @param encodingByte The value of encoding byte from ID3v2 frame. - * @return Charset name. - */ - private static String getCharsetName(int encodingByte) { + /** Maps encoding byte from ID3v2 frame to a {@link Charset}. */ + private static Charset getCharset(int encodingByte) { switch (encodingByte) { case ID3_TEXT_ENCODING_UTF_16: - return "UTF-16"; + return Charsets.UTF_16; case ID3_TEXT_ENCODING_UTF_16BE: - return "UTF-16BE"; + return Charsets.UTF_16BE; case ID3_TEXT_ENCODING_UTF_8: - return "UTF-8"; + return Charsets.UTF_8; case ID3_TEXT_ENCODING_ISO_8859_1: default: - return "ISO-8859-1"; + return Charsets.ISO_8859_1; } } - private static String getFrameId(int majorVersion, int frameId0, int frameId1, int frameId2, - int frameId3) { - return majorVersion == 2 ? String.format(Locale.US, "%c%c%c", frameId0, frameId1, frameId2) + private static String getFrameId( + int majorVersion, int frameId0, int frameId1, int frameId2, int frameId3) { + return majorVersion == 2 + ? String.format(Locale.US, "%c%c%c", frameId0, frameId1, frameId2) : String.format(Locale.US, "%c%c%c%c", frameId0, frameId1, frameId2, frameId3); } - private static int indexOfEos(byte[] data, int fromIndex, int encoding) { + private static int indexOfTerminator(byte[] data, int fromIndex, int encoding) { int terminationPos = indexOfZeroByte(data, fromIndex); // For single byte encoding charsets, we're done. @@ -765,9 +815,9 @@ private static int indexOfEos(byte[] data, int fromIndex, int encoding) { return terminationPos; } - // Otherwise ensure an even index and look for a second zero byte. + // Otherwise ensure an even offset from the start, and look for a second zero byte. while (terminationPos < data.length - 1) { - if (terminationPos % 2 == 0 && data[terminationPos + 1] == (byte) 0) { + if ((terminationPos - fromIndex) % 2 == 0 && data[terminationPos + 1] == (byte) 0) { return terminationPos; } terminationPos = indexOfZeroByte(data, terminationPos + 1); @@ -787,7 +837,8 @@ private static int indexOfZeroByte(byte[] data, int fromIndex) { private static int delimiterLength(int encodingByte) { return (encodingByte == ID3_TEXT_ENCODING_ISO_8859_1 || encodingByte == ID3_TEXT_ENCODING_UTF_8) - ? 1 : 2; + ? 1 + : 2; } /** @@ -808,21 +859,19 @@ private static byte[] copyOfRangeIfValid(byte[] data, int from, int to) { /** * Returns a string obtained by decoding the specified range of {@code data} using the specified - * {@code charsetName}. An empty string is returned if the range is invalid. + * {@code charset}. An empty string is returned if the range is invalid. * * @param data The array from which to decode the string. * @param from The start of the range. * @param to The end of the range (exclusive). - * @param charsetName The name of the Charset to use. + * @param charset The {@link Charset} to use. * @return The decoded string, or an empty string if the range is invalid. - * @throws UnsupportedEncodingException If the Charset is not supported. */ - private static String decodeStringIfValid(byte[] data, int from, int to, String charsetName) - throws UnsupportedEncodingException { + private static String decodeStringIfValid(byte[] data, int from, int to, Charset charset) { if (to <= from || to > data.length) { return ""; } - return new String(data, from, to - from, charsetName); + return new String(data, from, to - from, charset); } private static final class Id3Header { @@ -836,7 +885,5 @@ public Id3Header(int majorVersion, boolean isUnsynchronized, int framesSize) { this.isUnsynchronized = isUnsynchronized; this.framesSize = framesSize; } - } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/Id3Frame.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/Id3Frame.java index 27ea833deb..e54de84f58 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/Id3Frame.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/Id3Frame.java @@ -17,14 +17,10 @@ import com.google.android.exoplayer2.metadata.Metadata; -/** - * Base class for ID3 frames. - */ +/** Base class for ID3 frames. */ public abstract class Id3Frame implements Metadata.Entry { - /** - * The frame ID. - */ + /** The frame ID. */ public final String id; public Id3Frame(String id) { @@ -40,5 +36,4 @@ public String toString() { public int describeContents() { return 0; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/MlltFrame.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/MlltFrame.java index 0cdd2e038e..c5f0542795 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/MlltFrame.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/MlltFrame.java @@ -45,8 +45,7 @@ public MlltFrame( this.millisecondsDeviations = millisecondsDeviations; } - /* package */ - MlltFrame(Parcel in) { + /* package */ MlltFrame(Parcel in) { super(ID); this.mpegFramesBetweenReference = in.readInt(); this.bytesBetweenReference = in.readInt(); diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/PrivFrame.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/PrivFrame.java index 6e53485453..567785ac16 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/PrivFrame.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/PrivFrame.java @@ -23,9 +23,7 @@ import com.google.android.exoplayer2.util.Util; import java.util.Arrays; -/** - * PRIV (Private) ID3 frame. - */ +/** PRIV (Private) ID3 frame. */ public final class PrivFrame extends Id3Frame { public static final String ID = "PRIV"; @@ -77,18 +75,17 @@ public void writeToParcel(Parcel dest, int flags) { dest.writeByteArray(privateData); } - public static final Parcelable.Creator CREATOR = new Parcelable.Creator() { - - @Override - public PrivFrame createFromParcel(Parcel in) { - return new PrivFrame(in); - } - - @Override - public PrivFrame[] newArray(int size) { - return new PrivFrame[size]; - } + public static final Parcelable.Creator CREATOR = + new Parcelable.Creator() { - }; + @Override + public PrivFrame createFromParcel(Parcel in) { + return new PrivFrame(in); + } + @Override + public PrivFrame[] newArray(int size) { + return new PrivFrame[size]; + } + }; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/TextInformationFrame.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/TextInformationFrame.java index 8337911c0d..8b46b1a56c 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/TextInformationFrame.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/TextInformationFrame.java @@ -15,31 +15,166 @@ */ package com.google.android.exoplayer2.metadata.id3; -import static com.google.android.exoplayer2.util.Util.castNonNull; +import static com.google.android.exoplayer2.util.Assertions.checkArgument; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; import android.os.Parcel; import android.os.Parcelable; import androidx.annotation.Nullable; +import com.google.android.exoplayer2.MediaMetadata; import com.google.android.exoplayer2.util.Util; +import com.google.common.collect.ImmutableList; +import com.google.errorprone.annotations.InlineMe; +import java.util.ArrayList; +import java.util.List; -/** - * Text information ID3 frame. - */ +/** Text information ID3 frame. */ public final class TextInformationFrame extends Id3Frame { @Nullable public final String description; - public final String value; - public TextInformationFrame(String id, @Nullable String description, String value) { + /** + * @deprecated Use the first element of {@link #values} instead. + */ + @Deprecated public final String value; + + /** The text values of this frame. Will always have at least one element. */ + public final ImmutableList values; + + public TextInformationFrame(String id, @Nullable String description, List values) { super(id); + checkArgument(!values.isEmpty()); + this.description = description; - this.value = value; + this.values = ImmutableList.copyOf(values); + this.value = this.values.get(0); + } + + /** + * @deprecated Use {@code TextInformationFrame(String id, String description, String[] values} + * instead + */ + @Deprecated + @InlineMe( + replacement = "this(id, description, ImmutableList.of(value))", + imports = "com.google.common.collect.ImmutableList") + public TextInformationFrame(String id, @Nullable String description, String value) { + this(id, description, ImmutableList.of(value)); + } + + private TextInformationFrame(Parcel in) { + this( + checkNotNull(in.readString()), + in.readString(), + ImmutableList.copyOf(checkNotNull(in.createStringArray()))); } - /* package */ TextInformationFrame(Parcel in) { - super(castNonNull(in.readString())); - description = in.readString(); - value = castNonNull(in.readString()); + /** + * Uses the first element in {@link #values} to set the relevant field in {@link MediaMetadata} + * (as determined by {@link #id}). + */ + @Override + public void populateMediaMetadata(MediaMetadata.Builder builder) { + switch (id) { + case "TT2": + case "TIT2": + builder.setTitle(values.get(0)); + break; + case "TP1": + case "TPE1": + builder.setArtist(values.get(0)); + break; + case "TP2": + case "TPE2": + builder.setAlbumArtist(values.get(0)); + break; + case "TAL": + case "TALB": + builder.setAlbumTitle(values.get(0)); + break; + case "TRK": + case "TRCK": + String[] trackNumbers = Util.split(values.get(0), "/"); + try { + int trackNumber = Integer.parseInt(trackNumbers[0]); + @Nullable + Integer totalTrackCount = + trackNumbers.length > 1 ? Integer.parseInt(trackNumbers[1]) : null; + builder.setTrackNumber(trackNumber).setTotalTrackCount(totalTrackCount); + } catch (NumberFormatException e) { + // Do nothing, invalid input. + } + break; + case "TYE": + case "TYER": + try { + builder.setRecordingYear(Integer.parseInt(values.get(0))); + } catch (NumberFormatException e) { + // Do nothing, invalid input. + } + break; + case "TDA": + case "TDAT": + try { + String date = values.get(0); + int month = Integer.parseInt(date.substring(2, 4)); + int day = Integer.parseInt(date.substring(0, 2)); + builder.setRecordingMonth(month).setRecordingDay(day); + } catch (NumberFormatException | StringIndexOutOfBoundsException e) { + // Do nothing, invalid input. + } + break; + case "TDRC": + List recordingDate = parseId3v2point4TimestampFrameForDate(values.get(0)); + switch (recordingDate.size()) { + case 3: + builder.setRecordingDay(recordingDate.get(2)); + // fall through + case 2: + builder.setRecordingMonth(recordingDate.get(1)); + // fall through + case 1: + builder.setRecordingYear(recordingDate.get(0)); + // fall through + break; + default: + // Do nothing. + break; + } + break; + case "TDRL": + List releaseDate = parseId3v2point4TimestampFrameForDate(values.get(0)); + switch (releaseDate.size()) { + case 3: + builder.setReleaseDay(releaseDate.get(2)); + // fall through + case 2: + builder.setReleaseMonth(releaseDate.get(1)); + // fall through + case 1: + builder.setReleaseYear(releaseDate.get(0)); + // fall through + break; + default: + // Do nothing. + break; + } + break; + case "TCM": + case "TCOM": + builder.setComposer(values.get(0)); + break; + case "TP3": + case "TPE3": + builder.setConductor(values.get(0)); + break; + case "TXT": + case "TEXT": + builder.setWriter(values.get(0)); + break; + default: + break; + } } @Override @@ -51,8 +186,9 @@ public boolean equals(@Nullable Object obj) { return false; } TextInformationFrame other = (TextInformationFrame) obj; - return id.equals(other.id) && Util.areEqual(description, other.description) - && Util.areEqual(value, other.value); + return Util.areEqual(id, other.id) + && Util.areEqual(description, other.description) + && values.equals(other.values); } @Override @@ -60,13 +196,13 @@ public int hashCode() { int result = 17; result = 31 * result + id.hashCode(); result = 31 * result + (description != null ? description.hashCode() : 0); - result = 31 * result + (value != null ? value.hashCode() : 0); + result = 31 * result + values.hashCode(); return result; } @Override public String toString() { - return id + ": description=" + description + ": value=" + value; + return id + ": description=" + description + ": values=" + values; } // Parcelable implementation. @@ -75,7 +211,7 @@ public String toString() { public void writeToParcel(Parcel dest, int flags) { dest.writeString(id); dest.writeString(description); - dest.writeString(value); + dest.writeStringArray(values.toArray(new String[0])); } public static final Parcelable.Creator CREATOR = @@ -90,7 +226,29 @@ public TextInformationFrame createFromParcel(Parcel in) { public TextInformationFrame[] newArray(int size) { return new TextInformationFrame[size]; } - }; + // Private methods + + private static List parseId3v2point4TimestampFrameForDate(String value) { + // Timestamp string format is ISO-8601, can be `yyyy-MM-ddTHH:mm:ss`, or reduced precision + // at each point, for example `yyyy-MM` or `yyyy-MM-ddTHH:mm`. + List dates = new ArrayList<>(); + try { + if (value.length() >= 10) { + dates.add(Integer.parseInt(value.substring(0, 4))); + dates.add(Integer.parseInt(value.substring(5, 7))); + dates.add(Integer.parseInt(value.substring(8, 10))); + } else if (value.length() >= 7) { + dates.add(Integer.parseInt(value.substring(0, 4))); + dates.add(Integer.parseInt(value.substring(5, 7))); + } else if (value.length() >= 4) { + dates.add(Integer.parseInt(value.substring(0, 4))); + } + } catch (NumberFormatException e) { + // Invalid output, return. + return new ArrayList<>(); + } + return dates; + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/UrlLinkFrame.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/UrlLinkFrame.java index 298558b662..6648d852fa 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/UrlLinkFrame.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/id3/UrlLinkFrame.java @@ -22,9 +22,7 @@ import androidx.annotation.Nullable; import com.google.android.exoplayer2.util.Util; -/** - * Url link ID3 frame. - */ +/** Url link ID3 frame. */ public final class UrlLinkFrame extends Id3Frame { @Nullable public final String description; @@ -51,7 +49,8 @@ public boolean equals(@Nullable Object obj) { return false; } UrlLinkFrame other = (UrlLinkFrame) obj; - return id.equals(other.id) && Util.areEqual(description, other.description) + return id.equals(other.id) + && Util.areEqual(description, other.description) && Util.areEqual(url, other.url); } @@ -90,7 +89,5 @@ public UrlLinkFrame createFromParcel(Parcel in) { public UrlLinkFrame[] newArray(int size) { return new UrlLinkFrame[size]; } - }; - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/MdtaMetadataEntry.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/mp4/MdtaMetadataEntry.java similarity index 92% rename from TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/MdtaMetadataEntry.java rename to TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/mp4/MdtaMetadataEntry.java index e50fbd54f7..5b2db4945c 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/mp4/MdtaMetadataEntry.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/mp4/MdtaMetadataEntry.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.google.android.exoplayer2.extractor.mp4; +package com.google.android.exoplayer2.metadata.mp4; import android.os.Parcel; import android.os.Parcelable; @@ -28,6 +28,9 @@ */ public final class MdtaMetadataEntry implements Metadata.Entry { + /** Key for the capture frame rate (in frames per second). */ + public static final String KEY_ANDROID_CAPTURE_FPS = "com.android.capture.fps"; + /** The metadata key name. */ public final String key; /** The payload. The interpretation of the value depends on {@link #typeIndicator}. */ @@ -47,8 +50,7 @@ public MdtaMetadataEntry(String key, byte[] value, int localeIndicator, int type private MdtaMetadataEntry(Parcel in) { key = Util.castNonNull(in.readString()); - value = new byte[in.readInt()]; - in.readByteArray(value); + value = Util.castNonNull(in.createByteArray()); localeIndicator = in.readInt(); typeIndicator = in.readInt(); } @@ -88,7 +90,6 @@ public String toString() { @Override public void writeToParcel(Parcel dest, int flags) { dest.writeString(key); - dest.writeInt(value.length); dest.writeByteArray(value); dest.writeInt(localeIndicator); dest.writeInt(typeIndicator); diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/mp4/MotionPhotoMetadata.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/mp4/MotionPhotoMetadata.java new file mode 100644 index 0000000000..b547ad67f6 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/mp4/MotionPhotoMetadata.java @@ -0,0 +1,134 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.android.exoplayer2.metadata.mp4; + +import android.os.Parcel; +import android.os.Parcelable; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.metadata.Metadata; +import com.google.common.primitives.Longs; + +/** Metadata of a motion photo file. */ +public final class MotionPhotoMetadata implements Metadata.Entry { + + /** The start offset of the photo data, in bytes. */ + public final long photoStartPosition; + /** The size of the photo data, in bytes. */ + public final long photoSize; + /** + * The presentation timestamp of the photo, in microseconds, or {@link C#TIME_UNSET} if unknown. + */ + public final long photoPresentationTimestampUs; + /** The start offset of the video data, in bytes. */ + public final long videoStartPosition; + /** The size of the video data, in bytes. */ + public final long videoSize; + + /** Creates an instance. */ + public MotionPhotoMetadata( + long photoStartPosition, + long photoSize, + long photoPresentationTimestampUs, + long videoStartPosition, + long videoSize) { + this.photoStartPosition = photoStartPosition; + this.photoSize = photoSize; + this.photoPresentationTimestampUs = photoPresentationTimestampUs; + this.videoStartPosition = videoStartPosition; + this.videoSize = videoSize; + } + + private MotionPhotoMetadata(Parcel in) { + photoStartPosition = in.readLong(); + photoSize = in.readLong(); + photoPresentationTimestampUs = in.readLong(); + videoStartPosition = in.readLong(); + videoSize = in.readLong(); + } + + @Override + public boolean equals(@Nullable Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + MotionPhotoMetadata other = (MotionPhotoMetadata) obj; + return photoStartPosition == other.photoStartPosition + && photoSize == other.photoSize + && photoPresentationTimestampUs == other.photoPresentationTimestampUs + && videoStartPosition == other.videoStartPosition + && videoSize == other.videoSize; + } + + @Override + public int hashCode() { + int result = 17; + result = 31 * result + Longs.hashCode(photoStartPosition); + result = 31 * result + Longs.hashCode(photoSize); + result = 31 * result + Longs.hashCode(photoPresentationTimestampUs); + result = 31 * result + Longs.hashCode(videoStartPosition); + result = 31 * result + Longs.hashCode(videoSize); + return result; + } + + @Override + public String toString() { + return "Motion photo metadata: photoStartPosition=" + + photoStartPosition + + ", photoSize=" + + photoSize + + ", photoPresentationTimestampUs=" + + photoPresentationTimestampUs + + ", videoStartPosition=" + + videoStartPosition + + ", videoSize=" + + videoSize; + } + + // Parcelable implementation. + + @Override + public void writeToParcel(Parcel dest, int flags) { + dest.writeLong(photoStartPosition); + dest.writeLong(photoSize); + dest.writeLong(photoPresentationTimestampUs); + dest.writeLong(videoStartPosition); + dest.writeLong(videoSize); + } + + @Override + public int describeContents() { + return 0; + } + + public static final Parcelable.Creator CREATOR = + new Parcelable.Creator() { + + @Override + public MotionPhotoMetadata createFromParcel(Parcel in) { + return new MotionPhotoMetadata(in); + } + + @Override + public MotionPhotoMetadata[] newArray(int size) { + return new MotionPhotoMetadata[size]; + } + }; +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/mp4/SlowMotionData.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/mp4/SlowMotionData.java new file mode 100644 index 0000000000..ae8698b66a --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/mp4/SlowMotionData.java @@ -0,0 +1,201 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.metadata.mp4; + +import static com.google.android.exoplayer2.util.Assertions.checkArgument; + +import android.os.Parcel; +import android.os.Parcelable; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.metadata.Metadata; +import com.google.android.exoplayer2.util.Util; +import com.google.common.base.Objects; +import com.google.common.collect.ComparisonChain; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.List; + +/** Holds information about the segments of slow motion playback within a track. */ +public final class SlowMotionData implements Metadata.Entry { + + /** Holds information about a single segment of slow motion playback within a track. */ + public static final class Segment implements Parcelable { + + public static final Comparator BY_START_THEN_END_THEN_DIVISOR = + (s1, s2) -> + ComparisonChain.start() + .compare(s1.startTimeMs, s2.startTimeMs) + .compare(s1.endTimeMs, s2.endTimeMs) + .compare(s1.speedDivisor, s2.speedDivisor) + .result(); + + /** The start time, in milliseconds, of the track segment that is intended to be slow motion. */ + public final long startTimeMs; + /** The end time, in milliseconds, of the track segment that is intended to be slow motion. */ + public final long endTimeMs; + /** + * The speed reduction factor. + * + *

      For example, 4 would mean the segment should be played at a quarter (1/4) of the normal + * speed. + */ + public final int speedDivisor; + + /** + * Creates an instance. + * + * @param startTimeMs See {@link #startTimeMs}. Must be less than endTimeMs. + * @param endTimeMs See {@link #endTimeMs}. + * @param speedDivisor See {@link #speedDivisor}. + */ + public Segment(long startTimeMs, long endTimeMs, int speedDivisor) { + checkArgument(startTimeMs < endTimeMs); + this.startTimeMs = startTimeMs; + this.endTimeMs = endTimeMs; + this.speedDivisor = speedDivisor; + } + + @Override + public String toString() { + return Util.formatInvariant( + "Segment: startTimeMs=%d, endTimeMs=%d, speedDivisor=%d", + startTimeMs, endTimeMs, speedDivisor); + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Segment segment = (Segment) o; + return startTimeMs == segment.startTimeMs + && endTimeMs == segment.endTimeMs + && speedDivisor == segment.speedDivisor; + } + + @Override + public int hashCode() { + return Objects.hashCode(startTimeMs, endTimeMs, speedDivisor); + } + + @Override + public int describeContents() { + return 0; + } + + @Override + public void writeToParcel(Parcel dest, int flags) { + dest.writeLong(startTimeMs); + dest.writeLong(endTimeMs); + dest.writeInt(speedDivisor); + } + + public static final Creator CREATOR = + new Creator() { + + @Override + public Segment createFromParcel(Parcel in) { + long startTimeMs = in.readLong(); + long endTimeMs = in.readLong(); + int speedDivisor = in.readInt(); + return new Segment(startTimeMs, endTimeMs, speedDivisor); + } + + @Override + public Segment[] newArray(int size) { + return new Segment[size]; + } + }; + } + + public final List segments; + + /** + * Creates an instance with a list of {@link Segment}s. + * + *

      The segments must not overlap, that is that the start time of a segment can not be between + * the start and end time of another segment. + */ + public SlowMotionData(List segments) { + this.segments = segments; + checkArgument(!doSegmentsOverlap(segments)); + } + + @Override + public String toString() { + return "SlowMotion: segments=" + segments; + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + SlowMotionData that = (SlowMotionData) o; + return segments.equals(that.segments); + } + + @Override + public int hashCode() { + return segments.hashCode(); + } + + @Override + public int describeContents() { + return 0; + } + + @Override + public void writeToParcel(Parcel dest, int flags) { + dest.writeList(segments); + } + + public static final Creator CREATOR = + new Creator() { + @Override + public SlowMotionData createFromParcel(Parcel in) { + List slowMotionSegments = new ArrayList<>(); + in.readList(slowMotionSegments, Segment.class.getClassLoader()); + return new SlowMotionData(slowMotionSegments); + } + + @Override + public SlowMotionData[] newArray(int size) { + return new SlowMotionData[size]; + } + }; + + private static boolean doSegmentsOverlap(List segments) { + if (segments.isEmpty()) { + return false; + } + long previousEndTimeMs = segments.get(0).endTimeMs; + for (int i = 1; i < segments.size(); i++) { + if (segments.get(i).startTimeMs < previousEndTimeMs) { + return true; + } + previousEndTimeMs = segments.get(i).endTimeMs; + } + + return false; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/mp4/SmtaMetadataEntry.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/mp4/SmtaMetadataEntry.java new file mode 100644 index 0000000000..6654a9dbb6 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/mp4/SmtaMetadataEntry.java @@ -0,0 +1,107 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.metadata.mp4; + +import android.os.Parcel; +import android.os.Parcelable; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.metadata.Metadata; +import com.google.common.primitives.Floats; + +/** + * Stores metadata from the Samsung smta box. + * + *

      See [Internal: b/150138465#comment76]. + */ +public final class SmtaMetadataEntry implements Metadata.Entry { + + /** + * The capture frame rate, in fps, or {@link C#RATE_UNSET} if it is unknown. + * + *

      If known, the capture frame rate should always be an integer value. + */ + public final float captureFrameRate; + /** The number of layers in the SVC extended frames. */ + public final int svcTemporalLayerCount; + + /** Creates an instance. */ + public SmtaMetadataEntry(float captureFrameRate, int svcTemporalLayerCount) { + this.captureFrameRate = captureFrameRate; + this.svcTemporalLayerCount = svcTemporalLayerCount; + } + + private SmtaMetadataEntry(Parcel in) { + captureFrameRate = in.readFloat(); + svcTemporalLayerCount = in.readInt(); + } + + @Override + public boolean equals(@Nullable Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + SmtaMetadataEntry other = (SmtaMetadataEntry) obj; + return captureFrameRate == other.captureFrameRate + && svcTemporalLayerCount == other.svcTemporalLayerCount; + } + + @Override + public int hashCode() { + int result = 17; + result = 31 * result + Floats.hashCode(captureFrameRate); + result = 31 * result + svcTemporalLayerCount; + return result; + } + + @Override + public String toString() { + return "smta: captureFrameRate=" + + captureFrameRate + + ", svcTemporalLayerCount=" + + svcTemporalLayerCount; + } + + // Parcelable implementation. + + @Override + public void writeToParcel(Parcel dest, int flags) { + dest.writeFloat(captureFrameRate); + dest.writeInt(svcTemporalLayerCount); + } + + @Override + public int describeContents() { + return 0; + } + + public static final Parcelable.Creator CREATOR = + new Parcelable.Creator() { + + @Override + public SmtaMetadataEntry createFromParcel(Parcel in) { + return new SmtaMetadataEntry(in); + } + + @Override + public SmtaMetadataEntry[] newArray(int size) { + return new SmtaMetadataEntry[size]; + } + }; +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/mp4/package-info.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/mp4/package-info.java new file mode 100644 index 0000000000..8ddf4040c1 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/mp4/package-info.java @@ -0,0 +1,19 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +@NonNullApi +package com.google.android.exoplayer2.metadata.mp4; + +import com.google.android.exoplayer2.util.NonNullApi; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/scte35/PrivateCommand.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/scte35/PrivateCommand.java index 44850b720f..a849851aa0 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/scte35/PrivateCommand.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/scte35/PrivateCommand.java @@ -20,22 +20,14 @@ import com.google.android.exoplayer2.util.ParsableByteArray; import com.google.android.exoplayer2.util.Util; -/** - * Represents a private command as defined in SCTE35, Section 9.3.6. - */ +/** Represents a private command as defined in SCTE35, Section 9.3.6. */ public final class PrivateCommand extends SpliceCommand { - /** - * The {@code pts_adjustment} as defined in SCTE35, Section 9.2. - */ + /** The {@code pts_adjustment} as defined in SCTE35, Section 9.2. */ public final long ptsAdjustment; - /** - * The identifier as defined in SCTE35, Section 9.3.6. - */ + /** The identifier as defined in SCTE35, Section 9.3.6. */ public final long identifier; - /** - * The private bytes as defined in SCTE35, Section 9.3.6. - */ + /** The private bytes as defined in SCTE35, Section 9.3.6. */ public final byte[] commandBytes; private PrivateCommand(long identifier, byte[] commandBytes, long ptsAdjustment) { @@ -50,8 +42,8 @@ private PrivateCommand(Parcel in) { commandBytes = Util.castNonNull(in.createByteArray()); } - /* package */ static PrivateCommand parseFromSection(ParsableByteArray sectionData, - int commandLength, long ptsAdjustment) { + /* package */ static PrivateCommand parseFromSection( + ParsableByteArray sectionData, int commandLength, long ptsAdjustment) { long identifier = sectionData.readUnsignedInt(); byte[] privateBytes = new byte[commandLength - 4 /* identifier size */]; sectionData.readBytes(privateBytes, 0, privateBytes.length); @@ -70,16 +62,14 @@ public void writeToParcel(Parcel dest, int flags) { public static final Parcelable.Creator CREATOR = new Parcelable.Creator() { - @Override - public PrivateCommand createFromParcel(Parcel in) { - return new PrivateCommand(in); - } - - @Override - public PrivateCommand[] newArray(int size) { - return new PrivateCommand[size]; - } - - }; + @Override + public PrivateCommand createFromParcel(Parcel in) { + return new PrivateCommand(in); + } + @Override + public PrivateCommand[] newArray(int size) { + return new PrivateCommand[size]; + } + }; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/scte35/SpliceCommand.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/scte35/SpliceCommand.java index b0c3e34cde..40da002c04 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/scte35/SpliceCommand.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/scte35/SpliceCommand.java @@ -17,9 +17,7 @@ import com.google.android.exoplayer2.metadata.Metadata; -/** - * Superclass for SCTE35 splice commands. - */ +/** Superclass for SCTE35 splice commands. */ public abstract class SpliceCommand implements Metadata.Entry { @Override @@ -33,5 +31,4 @@ public String toString() { public int describeContents() { return 0; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/scte35/SpliceInfoDecoder.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/scte35/SpliceInfoDecoder.java index 0e161d9c69..f26d446b80 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/scte35/SpliceInfoDecoder.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/scte35/SpliceInfoDecoder.java @@ -17,19 +17,16 @@ import androidx.annotation.Nullable; import com.google.android.exoplayer2.metadata.Metadata; -import com.google.android.exoplayer2.metadata.MetadataDecoder; import com.google.android.exoplayer2.metadata.MetadataInputBuffer; -import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.metadata.SimpleMetadataDecoder; import com.google.android.exoplayer2.util.ParsableBitArray; import com.google.android.exoplayer2.util.ParsableByteArray; import com.google.android.exoplayer2.util.TimestampAdjuster; import java.nio.ByteBuffer; import org.checkerframework.checker.nullness.qual.MonotonicNonNull; -/** - * Decodes splice info sections and produces splice commands. - */ -public final class SpliceInfoDecoder implements MetadataDecoder { +/** Decodes splice info sections and produces splice commands. */ +public final class SpliceInfoDecoder extends SimpleMetadataDecoder { private static final int TYPE_SPLICE_NULL = 0x00; private static final int TYPE_SPLICE_SCHEDULE = 0x04; @@ -40,18 +37,16 @@ public final class SpliceInfoDecoder implements MetadataDecoder { private final ParsableByteArray sectionData; private final ParsableBitArray sectionHeader; - @MonotonicNonNull private TimestampAdjuster timestampAdjuster; + private @MonotonicNonNull TimestampAdjuster timestampAdjuster; public SpliceInfoDecoder() { sectionData = new ParsableByteArray(); sectionHeader = new ParsableBitArray(); } - @SuppressWarnings("ByteBufferBackingArray") @Override - public Metadata decode(MetadataInputBuffer inputBuffer) { - ByteBuffer buffer = Assertions.checkNotNull(inputBuffer.data); - + @SuppressWarnings("ByteBufferBackingArray") // Buffer validated by SimpleMetadataDecoder.decode + protected Metadata decode(MetadataInputBuffer inputBuffer, ByteBuffer buffer) { // Internal timestamps adjustment. if (timestampAdjuster == null || inputBuffer.subsampleOffsetUs != timestampAdjuster.getTimestampOffsetUs()) { @@ -83,8 +78,8 @@ public Metadata decode(MetadataInputBuffer inputBuffer) { command = SpliceScheduleCommand.parseFromSection(sectionData); break; case TYPE_SPLICE_INSERT: - command = SpliceInsertCommand.parseFromSection(sectionData, ptsAdjustment, - timestampAdjuster); + command = + SpliceInsertCommand.parseFromSection(sectionData, ptsAdjustment, timestampAdjuster); break; case TYPE_TIME_SIGNAL: command = TimeSignalCommand.parseFromSection(sectionData, ptsAdjustment, timestampAdjuster); @@ -98,5 +93,4 @@ public Metadata decode(MetadataInputBuffer inputBuffer) { } return command == null ? new Metadata() : new Metadata(command); } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/scte35/SpliceInsertCommand.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/scte35/SpliceInsertCommand.java index 6f56d3b68c..e5df2b963b 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/scte35/SpliceInsertCommand.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/scte35/SpliceInsertCommand.java @@ -24,18 +24,12 @@ import java.util.Collections; import java.util.List; -/** - * Represents a splice insert command defined in SCTE35, Section 9.3.3. - */ +/** Represents a splice insert command defined in SCTE35, Section 9.3.3. */ public final class SpliceInsertCommand extends SpliceCommand { - /** - * The splice event id. - */ + /** The splice event id. */ public final long spliceEventId; - /** - * True if the event with id {@link #spliceEventId} has been canceled. - */ + /** True if the event with id {@link #spliceEventId} has been canceled. */ public final boolean spliceEventCancelIndicator; /** * If true, the splice event is an opportunity to exit from the network feed. If false, indicates @@ -49,53 +43,53 @@ public final class SpliceInsertCommand extends SpliceCommand { public final boolean programSpliceFlag; /** * Whether splicing should be done at the nearest opportunity. If false, splicing should be done - * at the moment indicated by {@link #programSplicePlaybackPositionUs} or - * {@link ComponentSplice#componentSplicePlaybackPositionUs}, depending on - * {@link #programSpliceFlag}. + * at the moment indicated by {@link #programSplicePlaybackPositionUs} or {@link + * ComponentSplice#componentSplicePlaybackPositionUs}, depending on {@link #programSpliceFlag}. */ public final boolean spliceImmediateFlag; /** - * If {@link #programSpliceFlag} is true, the PTS at which the program splice should occur. - * {@link C#TIME_UNSET} otherwise. + * If {@link #programSpliceFlag} is true, the PTS at which the program splice should occur. {@link + * C#TIME_UNSET} otherwise. */ public final long programSplicePts; - /** - * Equivalent to {@link #programSplicePts} but in the playback timebase. - */ + /** Equivalent to {@link #programSplicePts} but in the playback timebase. */ public final long programSplicePlaybackPositionUs; /** - * If {@link #programSpliceFlag} is false, a non-empty list containing the - * {@link ComponentSplice}s. Otherwise, an empty list. + * If {@link #programSpliceFlag} is false, a non-empty list containing the {@link + * ComponentSplice}s. Otherwise, an empty list. */ public final List componentSpliceList; /** - * If {@link #breakDurationUs} is not {@link C#TIME_UNSET}, defines whether - * {@link #breakDurationUs} should be used to know when to return to the network feed. If - * {@link #breakDurationUs} is {@link C#TIME_UNSET}, the value is undefined. + * If {@link #breakDurationUs} is not {@link C#TIME_UNSET}, defines whether {@link + * #breakDurationUs} should be used to know when to return to the network feed. If {@link + * #breakDurationUs} is {@link C#TIME_UNSET}, the value is undefined. */ public final boolean autoReturn; /** * The duration of the splice in microseconds, or {@link C#TIME_UNSET} if no duration is present. */ public final long breakDurationUs; - /** - * The unique program id as defined in SCTE35, Section 9.3.3. - */ + /** The unique program id as defined in SCTE35, Section 9.3.3. */ public final int uniqueProgramId; - /** - * Holds the value of {@code avail_num} as defined in SCTE35, Section 9.3.3. - */ + /** Holds the value of {@code avail_num} as defined in SCTE35, Section 9.3.3. */ public final int availNum; - /** - * Holds the value of {@code avails_expected} as defined in SCTE35, Section 9.3.3. - */ + /** Holds the value of {@code avails_expected} as defined in SCTE35, Section 9.3.3. */ public final int availsExpected; - private SpliceInsertCommand(long spliceEventId, boolean spliceEventCancelIndicator, - boolean outOfNetworkIndicator, boolean programSpliceFlag, boolean spliceImmediateFlag, - long programSplicePts, long programSplicePlaybackPositionUs, - List componentSpliceList, boolean autoReturn, long breakDurationUs, - int uniqueProgramId, int availNum, int availsExpected) { + private SpliceInsertCommand( + long spliceEventId, + boolean spliceEventCancelIndicator, + boolean outOfNetworkIndicator, + boolean programSpliceFlag, + boolean spliceImmediateFlag, + long programSplicePts, + long programSplicePlaybackPositionUs, + List componentSpliceList, + boolean autoReturn, + long breakDurationUs, + int uniqueProgramId, + int availNum, + int availsExpected) { this.spliceEventId = spliceEventId; this.spliceEventCancelIndicator = spliceEventCancelIndicator; this.outOfNetworkIndicator = outOfNetworkIndicator; @@ -132,8 +126,8 @@ private SpliceInsertCommand(Parcel in) { availsExpected = in.readInt(); } - /* package */ static SpliceInsertCommand parseFromSection(ParsableByteArray sectionData, - long ptsAdjustment, TimestampAdjuster timestampAdjuster) { + /* package */ static SpliceInsertCommand parseFromSection( + ParsableByteArray sectionData, long ptsAdjustment, TimestampAdjuster timestampAdjuster) { long spliceEventId = sectionData.readUnsignedInt(); // splice_event_cancel_indicator(1), reserved(7). boolean spliceEventCancelIndicator = (sectionData.readUnsignedByte() & 0x80) != 0; @@ -165,8 +159,11 @@ private SpliceInsertCommand(Parcel in) { if (!spliceImmediateFlag) { componentSplicePts = TimeSignalCommand.parseSpliceTime(sectionData, ptsAdjustment); } - componentSplices.add(new ComponentSplice(componentTag, componentSplicePts, - timestampAdjuster.adjustTsTimestamp(componentSplicePts))); + componentSplices.add( + new ComponentSplice( + componentTag, + componentSplicePts, + timestampAdjuster.adjustTsTimestamp(componentSplicePts))); } } if (durationFlag) { @@ -179,23 +176,31 @@ private SpliceInsertCommand(Parcel in) { availNum = sectionData.readUnsignedByte(); availsExpected = sectionData.readUnsignedByte(); } - return new SpliceInsertCommand(spliceEventId, spliceEventCancelIndicator, outOfNetworkIndicator, - programSpliceFlag, spliceImmediateFlag, programSplicePts, - timestampAdjuster.adjustTsTimestamp(programSplicePts), componentSplices, autoReturn, - breakDurationUs, uniqueProgramId, availNum, availsExpected); + return new SpliceInsertCommand( + spliceEventId, + spliceEventCancelIndicator, + outOfNetworkIndicator, + programSpliceFlag, + spliceImmediateFlag, + programSplicePts, + timestampAdjuster.adjustTsTimestamp(programSplicePts), + componentSplices, + autoReturn, + breakDurationUs, + uniqueProgramId, + availNum, + availsExpected); } - /** - * Holds splicing information for specific splice insert command components. - */ + /** Holds splicing information for specific splice insert command components. */ public static final class ComponentSplice { public final int componentTag; public final long componentSplicePts; public final long componentSplicePlaybackPositionUs; - private ComponentSplice(int componentTag, long componentSplicePts, - long componentSplicePlaybackPositionUs) { + private ComponentSplice( + int componentTag, long componentSplicePts, long componentSplicePlaybackPositionUs) { this.componentTag = componentTag; this.componentSplicePts = componentSplicePts; this.componentSplicePlaybackPositionUs = componentSplicePlaybackPositionUs; @@ -210,7 +215,6 @@ public void writeToParcel(Parcel dest) { public static ComponentSplice createFromParcel(Parcel in) { return new ComponentSplice(in.readInt(), in.readLong(), in.readLong()); } - } // Parcelable implementation. @@ -239,16 +243,14 @@ public void writeToParcel(Parcel dest, int flags) { public static final Parcelable.Creator CREATOR = new Parcelable.Creator() { - @Override - public SpliceInsertCommand createFromParcel(Parcel in) { - return new SpliceInsertCommand(in); - } - - @Override - public SpliceInsertCommand[] newArray(int size) { - return new SpliceInsertCommand[size]; - } - - }; + @Override + public SpliceInsertCommand createFromParcel(Parcel in) { + return new SpliceInsertCommand(in); + } + @Override + public SpliceInsertCommand[] newArray(int size) { + return new SpliceInsertCommand[size]; + } + }; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/scte35/SpliceNullCommand.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/scte35/SpliceNullCommand.java index 461d49ebb4..196f297970 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/scte35/SpliceNullCommand.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/scte35/SpliceNullCommand.java @@ -17,9 +17,7 @@ import android.os.Parcel; -/** - * Represents a splice null command as defined in SCTE35, Section 9.3.1. - */ +/** Represents a splice null command as defined in SCTE35, Section 9.3.1. */ public final class SpliceNullCommand extends SpliceCommand { // Parcelable implementation. @@ -32,16 +30,14 @@ public void writeToParcel(Parcel dest, int flags) { public static final Creator CREATOR = new Creator() { - @Override - public SpliceNullCommand createFromParcel(Parcel in) { - return new SpliceNullCommand(); - } - - @Override - public SpliceNullCommand[] newArray(int size) { - return new SpliceNullCommand[size]; - } - - }; + @Override + public SpliceNullCommand createFromParcel(Parcel in) { + return new SpliceNullCommand(); + } + @Override + public SpliceNullCommand[] newArray(int size) { + return new SpliceNullCommand[size]; + } + }; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/scte35/SpliceScheduleCommand.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/scte35/SpliceScheduleCommand.java index 8696909c97..6e62177fc7 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/scte35/SpliceScheduleCommand.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/scte35/SpliceScheduleCommand.java @@ -23,23 +23,15 @@ import java.util.Collections; import java.util.List; -/** - * Represents a splice schedule command as defined in SCTE35, Section 9.3.2. - */ +/** Represents a splice schedule command as defined in SCTE35, Section 9.3.2. */ public final class SpliceScheduleCommand extends SpliceCommand { - /** - * Represents a splice event as contained in a {@link SpliceScheduleCommand}. - */ + /** Represents a splice event as contained in a {@link SpliceScheduleCommand}. */ public static final class Event { - /** - * The splice event id. - */ + /** The splice event id. */ public final long spliceEventId; - /** - * True if the event with id {@link #spliceEventId} has been canceled. - */ + /** True if the event with id {@link #spliceEventId} has been canceled. */ public final boolean spliceEventCancelIndicator; /** * If true, the splice event is an opportunity to exit from the network feed. If false, @@ -57,14 +49,14 @@ public static final class Event { */ public final long utcSpliceTime; /** - * If {@link #programSpliceFlag} is false, a non-empty list containing the - * {@link ComponentSplice}s. Otherwise, an empty list. + * If {@link #programSpliceFlag} is false, a non-empty list containing the {@link + * ComponentSplice}s. Otherwise, an empty list. */ public final List componentSpliceList; /** - * If {@link #breakDurationUs} is not {@link C#TIME_UNSET}, defines whether - * {@link #breakDurationUs} should be used to know when to return to the network feed. If - * {@link #breakDurationUs} is {@link C#TIME_UNSET}, the value is undefined. + * If {@link #breakDurationUs} is not {@link C#TIME_UNSET}, defines whether {@link + * #breakDurationUs} should be used to know when to return to the network feed. If {@link + * #breakDurationUs} is {@link C#TIME_UNSET}, the value is undefined. */ public final boolean autoReturn; /** @@ -72,23 +64,25 @@ public static final class Event { * present. */ public final long breakDurationUs; - /** - * The unique program id as defined in SCTE35, Section 9.3.2. - */ + /** The unique program id as defined in SCTE35, Section 9.3.2. */ public final int uniqueProgramId; - /** - * Holds the value of {@code avail_num} as defined in SCTE35, Section 9.3.2. - */ + /** Holds the value of {@code avail_num} as defined in SCTE35, Section 9.3.2. */ public final int availNum; - /** - * Holds the value of {@code avails_expected} as defined in SCTE35, Section 9.3.2. - */ + /** Holds the value of {@code avails_expected} as defined in SCTE35, Section 9.3.2. */ public final int availsExpected; - private Event(long spliceEventId, boolean spliceEventCancelIndicator, - boolean outOfNetworkIndicator, boolean programSpliceFlag, - List componentSpliceList, long utcSpliceTime, boolean autoReturn, - long breakDurationUs, int uniqueProgramId, int availNum, int availsExpected) { + private Event( + long spliceEventId, + boolean spliceEventCancelIndicator, + boolean outOfNetworkIndicator, + boolean programSpliceFlag, + List componentSpliceList, + long utcSpliceTime, + boolean autoReturn, + long breakDurationUs, + int uniqueProgramId, + int availNum, + int availsExpected) { this.spliceEventId = spliceEventId; this.spliceEventCancelIndicator = spliceEventCancelIndicator; this.outOfNetworkIndicator = outOfNetworkIndicator; @@ -161,9 +155,18 @@ private static Event parseFromSection(ParsableByteArray sectionData) { availNum = sectionData.readUnsignedByte(); availsExpected = sectionData.readUnsignedByte(); } - return new Event(spliceEventId, spliceEventCancelIndicator, outOfNetworkIndicator, - programSpliceFlag, componentSplices, utcSpliceTime, autoReturn, breakDurationUs, - uniqueProgramId, availNum, availsExpected); + return new Event( + spliceEventId, + spliceEventCancelIndicator, + outOfNetworkIndicator, + programSpliceFlag, + componentSplices, + utcSpliceTime, + autoReturn, + breakDurationUs, + uniqueProgramId, + availNum, + availsExpected); } private void writeToParcel(Parcel dest) { @@ -187,12 +190,9 @@ private void writeToParcel(Parcel dest) { private static Event createFromParcel(Parcel in) { return new Event(in); } - } - /** - * Holds splicing information for specific splice schedule command components. - */ + /** Holds splicing information for specific splice schedule command components. */ public static final class ComponentSplice { public final int componentTag; @@ -211,12 +211,9 @@ private void writeToParcel(Parcel dest) { dest.writeInt(componentTag); dest.writeLong(utcSpliceTime); } - } - /** - * The list of scheduled events. - */ + /** The list of scheduled events. */ public final List events; private SpliceScheduleCommand(List events) { @@ -255,16 +252,14 @@ public void writeToParcel(Parcel dest, int flags) { public static final Parcelable.Creator CREATOR = new Parcelable.Creator() { - @Override - public SpliceScheduleCommand createFromParcel(Parcel in) { - return new SpliceScheduleCommand(in); - } - - @Override - public SpliceScheduleCommand[] newArray(int size) { - return new SpliceScheduleCommand[size]; - } - - }; + @Override + public SpliceScheduleCommand createFromParcel(Parcel in) { + return new SpliceScheduleCommand(in); + } + @Override + public SpliceScheduleCommand[] newArray(int size) { + return new SpliceScheduleCommand[size]; + } + }; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/scte35/TimeSignalCommand.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/scte35/TimeSignalCommand.java index e233a276ed..aec2a91731 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/scte35/TimeSignalCommand.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/scte35/TimeSignalCommand.java @@ -20,18 +20,12 @@ import com.google.android.exoplayer2.util.ParsableByteArray; import com.google.android.exoplayer2.util.TimestampAdjuster; -/** - * Represents a time signal command as defined in SCTE35, Section 9.3.4. - */ +/** Represents a time signal command as defined in SCTE35, Section 9.3.4. */ public final class TimeSignalCommand extends SpliceCommand { - /** - * A PTS value, as defined in SCTE35, Section 9.3.4. - */ + /** A PTS value, as defined in SCTE35, Section 9.3.4. */ public final long ptsTime; - /** - * Equivalent to {@link #ptsTime} but in the playback timebase. - */ + /** Equivalent to {@link #ptsTime} but in the playback timebase. */ public final long playbackPositionUs; private TimeSignalCommand(long ptsTime, long playbackPositionUs) { @@ -39,8 +33,8 @@ private TimeSignalCommand(long ptsTime, long playbackPositionUs) { this.playbackPositionUs = playbackPositionUs; } - /* package */ static TimeSignalCommand parseFromSection(ParsableByteArray sectionData, - long ptsAdjustment, TimestampAdjuster timestampAdjuster) { + /* package */ static TimeSignalCommand parseFromSection( + ParsableByteArray sectionData, long ptsAdjustment, TimestampAdjuster timestampAdjuster) { long ptsTime = parseSpliceTime(sectionData, ptsAdjustment); long playbackPositionUs = timestampAdjuster.adjustTsTimestamp(ptsTime); return new TimeSignalCommand(ptsTime, playbackPositionUs); @@ -78,16 +72,14 @@ public void writeToParcel(Parcel dest, int flags) { public static final Creator CREATOR = new Creator() { - @Override - public TimeSignalCommand createFromParcel(Parcel in) { - return new TimeSignalCommand(in.readLong(), in.readLong()); - } - - @Override - public TimeSignalCommand[] newArray(int size) { - return new TimeSignalCommand[size]; - } - - }; + @Override + public TimeSignalCommand createFromParcel(Parcel in) { + return new TimeSignalCommand(in.readLong(), in.readLong()); + } + @Override + public TimeSignalCommand[] newArray(int size) { + return new TimeSignalCommand[size]; + } + }; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/vorbis/VorbisComment.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/vorbis/VorbisComment.java new file mode 100644 index 0000000000..6f452f29bd --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/vorbis/VorbisComment.java @@ -0,0 +1,49 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.metadata.vorbis; + +import android.os.Parcel; + +/** A vorbis comment, extracted from a FLAC or Ogg file. */ +@SuppressWarnings("deprecation") // Extending deprecated type for backwards compatibility. +public final class VorbisComment extends com.google.android.exoplayer2.metadata.flac.VorbisComment { + + /** + * @param key The key. + * @param value The value. + */ + public VorbisComment(String key, String value) { + super(key, value); + } + + /* package */ VorbisComment(Parcel in) { + super(in); + } + + public static final Creator CREATOR = + new Creator() { + + @Override + public VorbisComment createFromParcel(Parcel in) { + return new VorbisComment(in); + } + + @Override + public VorbisComment[] newArray(int size) { + return new VorbisComment[size]; + } + }; +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/vorbis/package-info.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/vorbis/package-info.java new file mode 100644 index 0000000000..858bba01a9 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/metadata/vorbis/package-info.java @@ -0,0 +1,19 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +@NonNullApi +package com.google.android.exoplayer2.metadata.vorbis; + +import com.google.android.exoplayer2.util.NonNullApi; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/ActionFile.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/ActionFile.java deleted file mode 100644 index c69908c746..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/ActionFile.java +++ /dev/null @@ -1,164 +0,0 @@ -/* - * Copyright (C) 2017 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.offline; - -import android.net.Uri; -import androidx.annotation.Nullable; -import com.google.android.exoplayer2.offline.DownloadRequest.UnsupportedRequestException; -import com.google.android.exoplayer2.util.AtomicFile; -import com.google.android.exoplayer2.util.Util; -import java.io.DataInputStream; -import java.io.File; -import java.io.IOException; -import java.io.InputStream; -import java.util.ArrayList; -import java.util.List; - -/** - * Loads {@link DownloadRequest DownloadRequests} from legacy action files. - * - * @deprecated Legacy action files should be merged into download indices using {@link - * ActionFileUpgradeUtil}. - */ -@Deprecated -/* package */ final class ActionFile { - - private static final int VERSION = 0; - - private final AtomicFile atomicFile; - - /** - * @param actionFile The file from which {@link DownloadRequest DownloadRequests} will be loaded. - */ - public ActionFile(File actionFile) { - atomicFile = new AtomicFile(actionFile); - } - - /** Returns whether the file or its backup exists. */ - public boolean exists() { - return atomicFile.exists(); - } - - /** Deletes the action file and its backup. */ - public void delete() { - atomicFile.delete(); - } - - /** - * Loads {@link DownloadRequest DownloadRequests} from the file. - * - * @return The loaded {@link DownloadRequest DownloadRequests}, or an empty array if the file does - * not exist. - * @throws IOException If there is an error reading the file. - */ - public DownloadRequest[] load() throws IOException { - if (!exists()) { - return new DownloadRequest[0]; - } - @Nullable InputStream inputStream = null; - try { - inputStream = atomicFile.openRead(); - DataInputStream dataInputStream = new DataInputStream(inputStream); - int version = dataInputStream.readInt(); - if (version > VERSION) { - throw new IOException("Unsupported action file version: " + version); - } - int actionCount = dataInputStream.readInt(); - ArrayList actions = new ArrayList<>(); - for (int i = 0; i < actionCount; i++) { - try { - actions.add(readDownloadRequest(dataInputStream)); - } catch (UnsupportedRequestException e) { - // remove DownloadRequest is not supported. Ignore and continue loading rest. - } - } - return actions.toArray(new DownloadRequest[0]); - } finally { - Util.closeQuietly(inputStream); - } - } - - private static DownloadRequest readDownloadRequest(DataInputStream input) throws IOException { - String type = input.readUTF(); - int version = input.readInt(); - - Uri uri = Uri.parse(input.readUTF()); - boolean isRemoveAction = input.readBoolean(); - - int dataLength = input.readInt(); - @Nullable byte[] data; - if (dataLength != 0) { - data = new byte[dataLength]; - input.readFully(data); - } else { - data = null; - } - - // Serialized version 0 progressive actions did not contain keys. - boolean isLegacyProgressive = version == 0 && DownloadRequest.TYPE_PROGRESSIVE.equals(type); - List keys = new ArrayList<>(); - if (!isLegacyProgressive) { - int keyCount = input.readInt(); - for (int i = 0; i < keyCount; i++) { - keys.add(readKey(type, version, input)); - } - } - - // Serialized version 0 and 1 DASH/HLS/SS actions did not contain a custom cache key. - boolean isLegacySegmented = - version < 2 - && (DownloadRequest.TYPE_DASH.equals(type) - || DownloadRequest.TYPE_HLS.equals(type) - || DownloadRequest.TYPE_SS.equals(type)); - @Nullable String customCacheKey = null; - if (!isLegacySegmented) { - customCacheKey = input.readBoolean() ? input.readUTF() : null; - } - - // Serialized version 0, 1 and 2 did not contain an id. We need to generate one. - String id = version < 3 ? generateDownloadId(uri, customCacheKey) : input.readUTF(); - - if (isRemoveAction) { - // Remove actions are not supported anymore. - throw new UnsupportedRequestException(); - } - return new DownloadRequest(id, type, uri, keys, customCacheKey, data); - } - - private static StreamKey readKey(String type, int version, DataInputStream input) - throws IOException { - int periodIndex; - int groupIndex; - int trackIndex; - - // Serialized version 0 HLS/SS actions did not contain a period index. - if ((DownloadRequest.TYPE_HLS.equals(type) || DownloadRequest.TYPE_SS.equals(type)) - && version == 0) { - periodIndex = 0; - groupIndex = input.readInt(); - trackIndex = input.readInt(); - } else { - periodIndex = input.readInt(); - groupIndex = input.readInt(); - trackIndex = input.readInt(); - } - return new StreamKey(periodIndex, groupIndex, trackIndex); - } - - private static String generateDownloadId(Uri uri, @Nullable String customCacheKey) { - return customCacheKey != null ? customCacheKey : uri.toString(); - } -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/ActionFileUpgradeUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/ActionFileUpgradeUtil.java deleted file mode 100644 index 999059e852..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/ActionFileUpgradeUtil.java +++ /dev/null @@ -1,120 +0,0 @@ -/* - * Copyright (C) 2019 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.offline; - -import static com.google.android.exoplayer2.offline.Download.STATE_QUEUED; - -import androidx.annotation.Nullable; -import androidx.annotation.WorkerThread; -import com.google.android.exoplayer2.C; -import java.io.File; -import java.io.IOException; - -/** Utility class for upgrading legacy action files into {@link DefaultDownloadIndex}. */ -public final class ActionFileUpgradeUtil { - - /** Provides download IDs during action file upgrade. */ - public interface DownloadIdProvider { - - /** - * Returns a download id for given request. - * - * @param downloadRequest The request for which an ID is required. - * @return A corresponding download ID. - */ - String getId(DownloadRequest downloadRequest); - } - - private ActionFileUpgradeUtil() {} - - /** - * Merges {@link DownloadRequest DownloadRequests} contained in a legacy action file into a {@link - * DefaultDownloadIndex}, deleting the action file if the merge is successful or if {@code - * deleteOnFailure} is {@code true}. - * - *

      This method must not be called while the {@link DefaultDownloadIndex} is being used by a - * {@link DownloadManager}. - * - *

      This method may be slow and shouldn't normally be called on the main thread. - * - * @param actionFilePath The action file path. - * @param downloadIdProvider A download ID provider, or {@code null}. If {@code null} then ID of - * each download will be its custom cache key if one is specified, or else its URL. - * @param downloadIndex The index into which the requests will be merged. - * @param deleteOnFailure Whether to delete the action file if the merge fails. - * @param addNewDownloadsAsCompleted Whether to add new downloads as completed. - * @throws IOException If an error occurs loading or merging the requests. - */ - @WorkerThread - @SuppressWarnings("deprecation") - public static void upgradeAndDelete( - File actionFilePath, - @Nullable DownloadIdProvider downloadIdProvider, - DefaultDownloadIndex downloadIndex, - boolean deleteOnFailure, - boolean addNewDownloadsAsCompleted) - throws IOException { - ActionFile actionFile = new ActionFile(actionFilePath); - if (actionFile.exists()) { - boolean success = false; - try { - long nowMs = System.currentTimeMillis(); - for (DownloadRequest request : actionFile.load()) { - if (downloadIdProvider != null) { - request = request.copyWithId(downloadIdProvider.getId(request)); - } - mergeRequest(request, downloadIndex, addNewDownloadsAsCompleted, nowMs); - } - success = true; - } finally { - if (success || deleteOnFailure) { - actionFile.delete(); - } - } - } - } - - /** - * Merges a {@link DownloadRequest} into a {@link DefaultDownloadIndex}. - * - * @param request The request to be merged. - * @param downloadIndex The index into which the request will be merged. - * @param addNewDownloadAsCompleted Whether to add new downloads as completed. - * @throws IOException If an error occurs merging the request. - */ - /* package */ static void mergeRequest( - DownloadRequest request, - DefaultDownloadIndex downloadIndex, - boolean addNewDownloadAsCompleted, - long nowMs) - throws IOException { - @Nullable Download download = downloadIndex.getDownload(request.id); - if (download != null) { - download = DownloadManager.mergeRequest(download, request, download.stopReason, nowMs); - } else { - download = - new Download( - request, - addNewDownloadAsCompleted ? Download.STATE_COMPLETED : STATE_QUEUED, - /* startTimeMs= */ nowMs, - /* updateTimeMs= */ nowMs, - /* contentLength= */ C.LENGTH_UNSET, - Download.STOP_REASON_NONE, - Download.FAILURE_REASON_NONE); - } - downloadIndex.putDownload(download); - } -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DefaultDownloadIndex.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DefaultDownloadIndex.java index f1c897813f..df987c3776 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DefaultDownloadIndex.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DefaultDownloadIndex.java @@ -15,12 +15,16 @@ */ package com.google.android.exoplayer2.offline; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; + import android.content.ContentValues; import android.database.Cursor; import android.database.SQLException; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteException; import android.net.Uri; +import android.text.TextUtils; +import androidx.annotation.GuardedBy; import androidx.annotation.Nullable; import androidx.annotation.VisibleForTesting; import com.google.android.exoplayer2.database.DatabaseIOException; @@ -29,6 +33,7 @@ import com.google.android.exoplayer2.offline.Download.FailureReason; import com.google.android.exoplayer2.offline.Download.State; import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.Util; import java.util.ArrayList; import java.util.List; @@ -38,10 +43,10 @@ public final class DefaultDownloadIndex implements WritableDownloadIndex { private static final String TABLE_PREFIX = DatabaseProvider.TABLE_PREFIX + "Downloads"; - @VisibleForTesting /* package */ static final int TABLE_VERSION = 2; + @VisibleForTesting /* package */ static final int TABLE_VERSION = 3; private static final String COLUMN_ID = "id"; - private static final String COLUMN_TYPE = "title"; + private static final String COLUMN_MIME_TYPE = "mime_type"; private static final String COLUMN_URI = "uri"; private static final String COLUMN_STREAM_KEYS = "stream_keys"; private static final String COLUMN_CUSTOM_CACHE_KEY = "custom_cache_key"; @@ -54,9 +59,10 @@ public final class DefaultDownloadIndex implements WritableDownloadIndex { private static final String COLUMN_FAILURE_REASON = "failure_reason"; private static final String COLUMN_PERCENT_DOWNLOADED = "percent_downloaded"; private static final String COLUMN_BYTES_DOWNLOADED = "bytes_downloaded"; + private static final String COLUMN_KEY_SET_ID = "key_set_id"; private static final int COLUMN_INDEX_ID = 0; - private static final int COLUMN_INDEX_TYPE = 1; + private static final int COLUMN_INDEX_MIME_TYPE = 1; private static final int COLUMN_INDEX_URI = 2; private static final int COLUMN_INDEX_STREAM_KEYS = 3; private static final int COLUMN_INDEX_CUSTOM_CACHE_KEY = 4; @@ -69,6 +75,7 @@ public final class DefaultDownloadIndex implements WritableDownloadIndex { private static final int COLUMN_INDEX_FAILURE_REASON = 11; private static final int COLUMN_INDEX_PERCENT_DOWNLOADED = 12; private static final int COLUMN_INDEX_BYTES_DOWNLOADED = 13; + private static final int COLUMN_INDEX_KEY_SET_ID = 14; private static final String WHERE_ID_EQUALS = COLUMN_ID + " = ?"; private static final String WHERE_STATE_IS_DOWNLOADING = @@ -79,7 +86,7 @@ public final class DefaultDownloadIndex implements WritableDownloadIndex { private static final String[] COLUMNS = new String[] { COLUMN_ID, - COLUMN_TYPE, + COLUMN_MIME_TYPE, COLUMN_URI, COLUMN_STREAM_KEYS, COLUMN_CUSTOM_CACHE_KEY, @@ -92,14 +99,15 @@ public final class DefaultDownloadIndex implements WritableDownloadIndex { COLUMN_FAILURE_REASON, COLUMN_PERCENT_DOWNLOADED, COLUMN_BYTES_DOWNLOADED, + COLUMN_KEY_SET_ID }; private static final String TABLE_SCHEMA = "(" + COLUMN_ID + " TEXT PRIMARY KEY NOT NULL," - + COLUMN_TYPE - + " TEXT NOT NULL," + + COLUMN_MIME_TYPE + + " TEXT," + COLUMN_URI + " TEXT NOT NULL," + COLUMN_STREAM_KEYS @@ -123,14 +131,18 @@ public final class DefaultDownloadIndex implements WritableDownloadIndex { + COLUMN_PERCENT_DOWNLOADED + " REAL NOT NULL," + COLUMN_BYTES_DOWNLOADED - + " INTEGER NOT NULL)"; + + " INTEGER NOT NULL," + + COLUMN_KEY_SET_ID + + " BLOB NOT NULL)"; private static final String TRUE = "1"; private final String name; private final String tableName; private final DatabaseProvider databaseProvider; + private final Object initializationLock; + @GuardedBy("initializationLock") private boolean initialized; /** @@ -162,6 +174,7 @@ public DefaultDownloadIndex(DatabaseProvider databaseProvider, String name) { this.name = name; this.databaseProvider = databaseProvider; tableName = TABLE_PREFIX + name; + initializationLock = new Object(); } @Override @@ -189,24 +202,9 @@ public DownloadCursor getDownloads(@Download.State int... states) throws Databas @Override public void putDownload(Download download) throws DatabaseIOException { ensureInitialized(); - ContentValues values = new ContentValues(); - values.put(COLUMN_ID, download.request.id); - values.put(COLUMN_TYPE, download.request.type); - values.put(COLUMN_URI, download.request.uri.toString()); - values.put(COLUMN_STREAM_KEYS, encodeStreamKeys(download.request.streamKeys)); - values.put(COLUMN_CUSTOM_CACHE_KEY, download.request.customCacheKey); - values.put(COLUMN_DATA, download.request.data); - values.put(COLUMN_STATE, download.state); - values.put(COLUMN_START_TIME_MS, download.startTimeMs); - values.put(COLUMN_UPDATE_TIME_MS, download.updateTimeMs); - values.put(COLUMN_CONTENT_LENGTH, download.contentLength); - values.put(COLUMN_STOP_REASON, download.stopReason); - values.put(COLUMN_FAILURE_REASON, download.failureReason); - values.put(COLUMN_PERCENT_DOWNLOADED, download.getPercentDownloaded()); - values.put(COLUMN_BYTES_DOWNLOADED, download.getBytesDownloaded()); try { SQLiteDatabase writableDatabase = databaseProvider.getWritableDatabase(); - writableDatabase.replaceOrThrow(tableName, /* nullColumnHack= */ null, values); + putDownloadInternal(download, writableDatabase); } catch (SQLiteException e) { throw new DatabaseIOException(e); } @@ -282,33 +280,112 @@ public void setStopReason(String id, int stopReason) throws DatabaseIOException } private void ensureInitialized() throws DatabaseIOException { - if (initialized) { - return; - } - try { - SQLiteDatabase readableDatabase = databaseProvider.getReadableDatabase(); - int version = VersionTable.getVersion(readableDatabase, VersionTable.FEATURE_OFFLINE, name); - if (version != TABLE_VERSION) { - SQLiteDatabase writableDatabase = databaseProvider.getWritableDatabase(); - writableDatabase.beginTransactionNonExclusive(); - try { - VersionTable.setVersion( - writableDatabase, VersionTable.FEATURE_OFFLINE, name, TABLE_VERSION); - writableDatabase.execSQL("DROP TABLE IF EXISTS " + tableName); - writableDatabase.execSQL("CREATE TABLE " + tableName + " " + TABLE_SCHEMA); - writableDatabase.setTransactionSuccessful(); - } finally { - writableDatabase.endTransaction(); + synchronized (initializationLock) { + if (initialized) { + return; + } + try { + SQLiteDatabase readableDatabase = databaseProvider.getReadableDatabase(); + int version = VersionTable.getVersion(readableDatabase, VersionTable.FEATURE_OFFLINE, name); + if (version != TABLE_VERSION) { + SQLiteDatabase writableDatabase = databaseProvider.getWritableDatabase(); + writableDatabase.beginTransactionNonExclusive(); + try { + VersionTable.setVersion( + writableDatabase, VersionTable.FEATURE_OFFLINE, name, TABLE_VERSION); + List upgradedDownloads = + version == 2 ? loadDownloadsFromVersion2(writableDatabase) : new ArrayList<>(); + writableDatabase.execSQL("DROP TABLE IF EXISTS " + tableName); + writableDatabase.execSQL("CREATE TABLE " + tableName + " " + TABLE_SCHEMA); + for (Download download : upgradedDownloads) { + putDownloadInternal(download, writableDatabase); + } + writableDatabase.setTransactionSuccessful(); + } finally { + writableDatabase.endTransaction(); + } } + initialized = true; + } catch (SQLException e) { + throw new DatabaseIOException(e); } - initialized = true; - } catch (SQLException e) { - throw new DatabaseIOException(e); } } - // incompatible types in argument. - @SuppressWarnings("nullness:argument.type.incompatible") + private void putDownloadInternal(Download download, SQLiteDatabase database) { + byte[] keySetId = + download.request.keySetId == null ? Util.EMPTY_BYTE_ARRAY : download.request.keySetId; + ContentValues values = new ContentValues(); + values.put(COLUMN_ID, download.request.id); + values.put(COLUMN_MIME_TYPE, download.request.mimeType); + values.put(COLUMN_URI, download.request.uri.toString()); + values.put(COLUMN_STREAM_KEYS, encodeStreamKeys(download.request.streamKeys)); + values.put(COLUMN_CUSTOM_CACHE_KEY, download.request.customCacheKey); + values.put(COLUMN_DATA, download.request.data); + values.put(COLUMN_STATE, download.state); + values.put(COLUMN_START_TIME_MS, download.startTimeMs); + values.put(COLUMN_UPDATE_TIME_MS, download.updateTimeMs); + values.put(COLUMN_CONTENT_LENGTH, download.contentLength); + values.put(COLUMN_STOP_REASON, download.stopReason); + values.put(COLUMN_FAILURE_REASON, download.failureReason); + values.put(COLUMN_PERCENT_DOWNLOADED, download.getPercentDownloaded()); + values.put(COLUMN_BYTES_DOWNLOADED, download.getBytesDownloaded()); + values.put(COLUMN_KEY_SET_ID, keySetId); + database.replaceOrThrow(tableName, /* nullColumnHack= */ null, values); + } + + private List loadDownloadsFromVersion2(SQLiteDatabase database) { + List downloads = new ArrayList<>(); + if (!Util.tableExists(database, tableName)) { + return downloads; + } + + String[] columnsV2 = + new String[] { + "id", + "title", + "uri", + "stream_keys", + "custom_cache_key", + "data", + "state", + "start_time_ms", + "update_time_ms", + "content_length", + "stop_reason", + "failure_reason", + "percent_downloaded", + "bytes_downloaded" + }; + try (Cursor cursor = + database.query( + tableName, + columnsV2, + /* selection= */ null, + /* selectionArgs= */ null, + /* groupBy= */ null, + /* having= */ null, + /* orderBy= */ null); ) { + while (cursor.moveToNext()) { + downloads.add(getDownloadForCurrentRowV2(cursor)); + } + return downloads; + } + } + + /** Infers the MIME type from a v2 table row. */ + private static String inferMimeType(@Nullable String downloadType) { + if ("dash".equals(downloadType)) { + return MimeTypes.APPLICATION_MPD; + } else if ("hls".equals(downloadType)) { + return MimeTypes.APPLICATION_M3U8; + } else if ("ss".equals(downloadType)) { + return MimeTypes.APPLICATION_SS; + } else { + return MimeTypes.VIDEO_UNKNOWN; + } + } + private Cursor getCursor(String selection, @Nullable String[] selectionArgs) throws DatabaseIOException { try { @@ -328,6 +405,25 @@ private Cursor getCursor(String selection, @Nullable String[] selectionArgs) } } + @VisibleForTesting + /* package */ static String encodeStreamKeys(List streamKeys) { + StringBuilder stringBuilder = new StringBuilder(); + for (int i = 0; i < streamKeys.size(); i++) { + StreamKey streamKey = streamKeys.get(i); + stringBuilder + .append(streamKey.periodIndex) + .append('.') + .append(streamKey.groupIndex) + .append('.') + .append(streamKey.streamIndex) + .append(','); + } + if (stringBuilder.length() > 0) { + stringBuilder.setLength(stringBuilder.length() - 1); + } + return stringBuilder.toString(); + } + private static String getStateQuery(@Download.State int... states) { if (states.length == 0) { return TRUE; @@ -345,14 +441,17 @@ private static String getStateQuery(@Download.State int... states) { } private static Download getDownloadForCurrentRow(Cursor cursor) { + byte[] keySetId = cursor.getBlob(COLUMN_INDEX_KEY_SET_ID); DownloadRequest request = - new DownloadRequest( - /* id= */ cursor.getString(COLUMN_INDEX_ID), - /* type= */ cursor.getString(COLUMN_INDEX_TYPE), - /* uri= */ Uri.parse(cursor.getString(COLUMN_INDEX_URI)), - /* streamKeys= */ decodeStreamKeys(cursor.getString(COLUMN_INDEX_STREAM_KEYS)), - /* customCacheKey= */ cursor.getString(COLUMN_INDEX_CUSTOM_CACHE_KEY), - /* data= */ cursor.getBlob(COLUMN_INDEX_DATA)); + new DownloadRequest.Builder( + /* id= */ checkNotNull(cursor.getString(COLUMN_INDEX_ID)), + /* uri= */ Uri.parse(checkNotNull(cursor.getString(COLUMN_INDEX_URI)))) + .setMimeType(cursor.getString(COLUMN_INDEX_MIME_TYPE)) + .setStreamKeys(decodeStreamKeys(cursor.getString(COLUMN_INDEX_STREAM_KEYS))) + .setKeySetId(keySetId.length > 0 ? keySetId : null) + .setCustomCacheKey(cursor.getString(COLUMN_INDEX_CUSTOM_CACHE_KEY)) + .setData(cursor.getBlob(COLUMN_INDEX_DATA)) + .build(); DownloadProgress downloadProgress = new DownloadProgress(); downloadProgress.bytesDownloaded = cursor.getLong(COLUMN_INDEX_BYTES_DOWNLOADED); downloadProgress.percentDownloaded = cursor.getFloat(COLUMN_INDEX_PERCENT_DOWNLOADED); @@ -375,27 +474,58 @@ private static Download getDownloadForCurrentRow(Cursor cursor) { downloadProgress); } - private static String encodeStreamKeys(List streamKeys) { - StringBuilder stringBuilder = new StringBuilder(); - for (int i = 0; i < streamKeys.size(); i++) { - StreamKey streamKey = streamKeys.get(i); - stringBuilder - .append(streamKey.periodIndex) - .append('.') - .append(streamKey.groupIndex) - .append('.') - .append(streamKey.trackIndex) - .append(','); - } - if (stringBuilder.length() > 0) { - stringBuilder.setLength(stringBuilder.length() - 1); - } - return stringBuilder.toString(); + /** Read a {@link Download} from a table row of version 2. */ + private static Download getDownloadForCurrentRowV2(Cursor cursor) { + /* + * Version 2 schema + * Index Column Type + * 0 id string + * 1 type string + * 2 uri string + * 3 stream_keys string + * 4 custom_cache_key string + * 5 data blob + * 6 state integer + * 7 start_time_ms integer + * 8 update_time_ms integer + * 9 content_length integer + * 10 stop_reason integer + * 11 failure_reason integer + * 12 percent_downloaded real + * 13 bytes_downloaded integer + */ + DownloadRequest request = + new DownloadRequest.Builder( + /* id= */ checkNotNull(cursor.getString(0)), + /* uri= */ Uri.parse(checkNotNull(cursor.getString(2)))) + .setMimeType(inferMimeType(cursor.getString(1))) + .setStreamKeys(decodeStreamKeys(cursor.getString(3))) + .setCustomCacheKey(cursor.getString(4)) + .setData(cursor.getBlob(5)) + .build(); + DownloadProgress downloadProgress = new DownloadProgress(); + downloadProgress.bytesDownloaded = cursor.getLong(13); + downloadProgress.percentDownloaded = cursor.getFloat(12); + @State int state = cursor.getInt(6); + // It's possible the database contains failure reasons for non-failed downloads, which is + // invalid. Clear them here. See https://github.com/google/ExoPlayer/issues/6785. + @FailureReason + int failureReason = + state == Download.STATE_FAILED ? cursor.getInt(11) : Download.FAILURE_REASON_NONE; + return new Download( + request, + state, + /* startTimeMs= */ cursor.getLong(7), + /* updateTimeMs= */ cursor.getLong(8), + /* contentLength= */ cursor.getLong(9), + /* stopReason= */ cursor.getInt(10), + failureReason, + downloadProgress); } - private static List decodeStreamKeys(String encodedStreamKeys) { + private static List decodeStreamKeys(@Nullable String encodedStreamKeys) { ArrayList streamKeys = new ArrayList<>(); - if (encodedStreamKeys.isEmpty()) { + if (TextUtils.isEmpty(encodedStreamKeys)) { return streamKeys; } String[] streamKeysStrings = Util.split(encodedStreamKeys, ","); diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DefaultDownloaderFactory.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DefaultDownloaderFactory.java index d8126d4736..3ed13c0963 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DefaultDownloaderFactory.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DefaultDownloaderFactory.java @@ -15,10 +15,15 @@ */ package com.google.android.exoplayer2.offline; -import android.net.Uri; +import android.util.SparseArray; import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.MediaItem; +import com.google.android.exoplayer2.upstream.cache.CacheDataSource; +import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.Util; import java.lang.reflect.Constructor; -import java.util.List; +import java.util.concurrent.Executor; /** * Default {@link DownloaderFactory}, supporting creation of progressive, DASH, HLS and @@ -27,93 +32,120 @@ */ public class DefaultDownloaderFactory implements DownloaderFactory { - @Nullable private static final Constructor DASH_DOWNLOADER_CONSTRUCTOR; - @Nullable private static final Constructor HLS_DOWNLOADER_CONSTRUCTOR; - @Nullable private static final Constructor SS_DOWNLOADER_CONSTRUCTOR; + private static final SparseArray> CONSTRUCTORS = + createDownloaderConstructors(); - static { - Constructor dashDownloaderConstructor = null; - try { - // LINT.IfChange - dashDownloaderConstructor = - getDownloaderConstructor( - Class.forName("com.google.android.exoplayer2.source.dash.offline.DashDownloader")); - // LINT.ThenChange(../../../../../../../../proguard-rules.txt) - } catch (ClassNotFoundException e) { - // Expected if the app was built without the DASH module. - } - DASH_DOWNLOADER_CONSTRUCTOR = dashDownloaderConstructor; - Constructor hlsDownloaderConstructor = null; - try { - // LINT.IfChange - hlsDownloaderConstructor = - getDownloaderConstructor( - Class.forName("com.google.android.exoplayer2.source.hls.offline.HlsDownloader")); - // LINT.ThenChange(../../../../../../../../proguard-rules.txt) - } catch (ClassNotFoundException e) { - // Expected if the app was built without the HLS module. - } - HLS_DOWNLOADER_CONSTRUCTOR = hlsDownloaderConstructor; - Constructor ssDownloaderConstructor = null; - try { - // LINT.IfChange - ssDownloaderConstructor = - getDownloaderConstructor( - Class.forName( - "com.google.android.exoplayer2.source.smoothstreaming.offline.SsDownloader")); - // LINT.ThenChange(../../../../../../../../proguard-rules.txt) - } catch (ClassNotFoundException e) { - // Expected if the app was built without the SmoothStreaming module. - } - SS_DOWNLOADER_CONSTRUCTOR = ssDownloaderConstructor; - } + private final CacheDataSource.Factory cacheDataSourceFactory; + private final Executor executor; - private final DownloaderConstructorHelper downloaderConstructorHelper; + /** + * Creates an instance. + * + * @param cacheDataSourceFactory A {@link CacheDataSource.Factory} for the cache into which + * downloads will be written. + * @deprecated Use {@link #DefaultDownloaderFactory(CacheDataSource.Factory, Executor)}. + */ + @Deprecated + public DefaultDownloaderFactory(CacheDataSource.Factory cacheDataSourceFactory) { + this(cacheDataSourceFactory, /* executor= */ Runnable::run); + } - /** @param downloaderConstructorHelper A helper for instantiating downloaders. */ - public DefaultDownloaderFactory(DownloaderConstructorHelper downloaderConstructorHelper) { - this.downloaderConstructorHelper = downloaderConstructorHelper; + /** + * Creates an instance. + * + * @param cacheDataSourceFactory A {@link CacheDataSource.Factory} for the cache into which + * downloads will be written. + * @param executor An {@link Executor} used to download data. Passing {@code Runnable::run} will + * cause each download task to download data on its own thread. Passing an {@link Executor} + * that uses multiple threads will speed up download tasks that can be split into smaller + * parts for parallel execution. + */ + public DefaultDownloaderFactory( + CacheDataSource.Factory cacheDataSourceFactory, Executor executor) { + this.cacheDataSourceFactory = Assertions.checkNotNull(cacheDataSourceFactory); + this.executor = Assertions.checkNotNull(executor); } @Override public Downloader createDownloader(DownloadRequest request) { - switch (request.type) { - case DownloadRequest.TYPE_PROGRESSIVE: + @C.ContentType + int contentType = Util.inferContentTypeForUriAndMimeType(request.uri, request.mimeType); + switch (contentType) { + case C.CONTENT_TYPE_DASH: + case C.CONTENT_TYPE_HLS: + case C.CONTENT_TYPE_SS: + return createDownloader(request, contentType); + case C.CONTENT_TYPE_OTHER: return new ProgressiveDownloader( - request.uri, request.customCacheKey, downloaderConstructorHelper); - case DownloadRequest.TYPE_DASH: - return createDownloader(request, DASH_DOWNLOADER_CONSTRUCTOR); - case DownloadRequest.TYPE_HLS: - return createDownloader(request, HLS_DOWNLOADER_CONSTRUCTOR); - case DownloadRequest.TYPE_SS: - return createDownloader(request, SS_DOWNLOADER_CONSTRUCTOR); + new MediaItem.Builder() + .setUri(request.uri) + .setCustomCacheKey(request.customCacheKey) + .build(), + cacheDataSourceFactory, + executor); default: - throw new IllegalArgumentException("Unsupported type: " + request.type); + throw new IllegalArgumentException("Unsupported type: " + contentType); } } - private Downloader createDownloader( - DownloadRequest request, @Nullable Constructor constructor) { + private Downloader createDownloader(DownloadRequest request, @C.ContentType int contentType) { + @Nullable Constructor constructor = CONSTRUCTORS.get(contentType); if (constructor == null) { - throw new IllegalStateException("Module missing for: " + request.type); + throw new IllegalStateException("Module missing for content type " + contentType); } + MediaItem mediaItem = + new MediaItem.Builder() + .setUri(request.uri) + .setStreamKeys(request.streamKeys) + .setCustomCacheKey(request.customCacheKey) + .build(); try { - return constructor.newInstance(request.uri, request.streamKeys, downloaderConstructorHelper); + return constructor.newInstance(mediaItem, cacheDataSourceFactory, executor); } catch (Exception e) { - throw new RuntimeException("Failed to instantiate downloader for: " + request.type, e); + throw new IllegalStateException( + "Failed to instantiate downloader for content type " + contentType); + } + } + + private static SparseArray> createDownloaderConstructors() { + SparseArray> array = new SparseArray<>(); + try { + array.put( + C.CONTENT_TYPE_DASH, + getDownloaderConstructor( + Class.forName("com.google.android.exoplayer2.source.dash.offline.DashDownloader"))); + } catch (ClassNotFoundException e) { + // Expected if the app was built without the DASH module. + } + + try { + array.put( + C.CONTENT_TYPE_HLS, + getDownloaderConstructor( + Class.forName("com.google.android.exoplayer2.source.hls.offline.HlsDownloader"))); + } catch (ClassNotFoundException e) { + // Expected if the app was built without the HLS module. + } + try { + array.put( + C.CONTENT_TYPE_SS, + getDownloaderConstructor( + Class.forName( + "com.google.android.exoplayer2.source.smoothstreaming.offline.SsDownloader"))); + } catch (ClassNotFoundException e) { + // Expected if the app was built without the SmoothStreaming module. } + return array; } - // LINT.IfChange private static Constructor getDownloaderConstructor(Class clazz) { try { return clazz .asSubclass(Downloader.class) - .getConstructor(Uri.class, List.class, DownloaderConstructorHelper.class); + .getConstructor(MediaItem.class, CacheDataSource.Factory.class, Executor.class); } catch (NoSuchMethodException e) { // The downloader is present, but the expected constructor is missing. - throw new RuntimeException("Downloader constructor missing", e); + throw new IllegalStateException("Downloader constructor missing", e); } } - // LINT.ThenChange(../../../../../../../../proguard-rules.txt) } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/Download.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/Download.java index da46120b29..62d4d57814 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/Download.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/Download.java @@ -15,12 +15,19 @@ */ package com.google.android.exoplayer2.offline; +import static java.lang.annotation.ElementType.FIELD; +import static java.lang.annotation.ElementType.LOCAL_VARIABLE; +import static java.lang.annotation.ElementType.METHOD; +import static java.lang.annotation.ElementType.PARAMETER; +import static java.lang.annotation.ElementType.TYPE_USE; + import androidx.annotation.IntDef; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.util.Assertions; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; /** Represents state of a download. */ public final class Download { @@ -30,8 +37,11 @@ public final class Download { * #STATE_DOWNLOADING}, {@link #STATE_COMPLETED}, {@link #STATE_FAILED}, {@link #STATE_REMOVING} * or {@link #STATE_RESTARTING}. */ + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. @Documented @Retention(RetentionPolicy.SOURCE) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) @IntDef({ STATE_QUEUED, STATE_STOPPED, @@ -44,13 +54,13 @@ public final class Download { public @interface State {} // Important: These constants are persisted into DownloadIndex. Do not change them. /** - * The download is waiting to be started. A download may be queued because the {@link + * The download is waiting to be started. A download may be queued because the {@code * DownloadManager} * *

        - *
      • Is {@link DownloadManager#getDownloadsPaused() paused} - *
      • Has {@link DownloadManager#getRequirements() Requirements} that are not met - *
      • Has already started {@link DownloadManager#getMaxParallelDownloads() + *
      • Is {@code DownloadManager#getDownloadsPaused() paused} + *
      • Has {@code DownloadManager#getRequirements() Requirements} that are not met + *
      • Has already started {@code DownloadManager#getMaxParallelDownloads() * maxParallelDownloads} *
      */ @@ -69,8 +79,11 @@ public final class Download { public static final int STATE_RESTARTING = 7; /** Failure reasons. Either {@link #FAILURE_REASON_NONE} or {@link #FAILURE_REASON_UNKNOWN}. */ + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. @Documented @Retention(RetentionPolicy.SOURCE) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) @IntDef({FAILURE_REASON_NONE, FAILURE_REASON_UNKNOWN}) public @interface FailureReason {} /** The download isn't failed. */ @@ -84,7 +97,7 @@ public final class Download { /** The download request. */ public final DownloadRequest request; /** The state of the download. */ - @State public final int state; + public final @State int state; /** The first time when download entry is created. */ public final long startTimeMs; /** The last update time. */ @@ -97,7 +110,7 @@ public final class Download { * If {@link #state} is {@link #STATE_FAILED} then this is the cause, otherwise {@link * #FAILURE_REASON_NONE}. */ - @FailureReason public final int failureReason; + public final @FailureReason int failureReason; /* package */ final DownloadProgress progress; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DownloadCursor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DownloadCursor.java index a1822fca97..ce0c84d700 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DownloadCursor.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DownloadCursor.java @@ -28,7 +28,7 @@ public interface DownloadCursor extends Closeable { /** * Returns the current position of the cursor in the download set. The value is zero-based. When - * the download set is first returned the cursor will be at positon -1, which is before the first + * the download set is first returned the cursor will be at position -1, which is before the first * download. After the last download is returned another call to next() will leave the cursor past * the last entry, at a position of count(). * diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DownloadException.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DownloadException.java index 983727c14d..15be74ebed 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DownloadException.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DownloadException.java @@ -20,14 +20,17 @@ /** Thrown on an error during downloading. */ public final class DownloadException extends IOException { - /** @param message The message for the exception. */ + /** + * @param message The message for the exception. + */ public DownloadException(String message) { super(message); } - /** @param cause The cause for the exception. */ + /** + * @param cause The cause for the exception. + */ public DownloadException(Throwable cause) { super(cause); } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DownloadHelper.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DownloadHelper.java index 6707c1e496..6575c8fc22 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DownloadHelper.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DownloadHelper.java @@ -15,6 +15,9 @@ */ package com.google.android.exoplayer2.offline; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Util.castNonNull; + import android.content.Context; import android.net.Uri; import android.os.Handler; @@ -24,38 +27,44 @@ import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.ExoPlaybackException; +import com.google.android.exoplayer2.MediaItem; +import com.google.android.exoplayer2.Renderer; import com.google.android.exoplayer2.RendererCapabilities; import com.google.android.exoplayer2.RenderersFactory; import com.google.android.exoplayer2.Timeline; +import com.google.android.exoplayer2.Tracks; +import com.google.android.exoplayer2.analytics.PlayerId; +import com.google.android.exoplayer2.audio.AudioRendererEventListener; import com.google.android.exoplayer2.drm.DrmSessionManager; -import com.google.android.exoplayer2.drm.FrameworkMediaCrypto; +import com.google.android.exoplayer2.extractor.ExtractorsFactory; +import com.google.android.exoplayer2.source.DefaultMediaSourceFactory; import com.google.android.exoplayer2.source.MediaPeriod; import com.google.android.exoplayer2.source.MediaSource; import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId; import com.google.android.exoplayer2.source.MediaSource.MediaSourceCaller; -import com.google.android.exoplayer2.source.MediaSourceFactory; -import com.google.android.exoplayer2.source.ProgressiveMediaSource; import com.google.android.exoplayer2.source.TrackGroup; import com.google.android.exoplayer2.source.TrackGroupArray; import com.google.android.exoplayer2.source.chunk.MediaChunk; import com.google.android.exoplayer2.source.chunk.MediaChunkIterator; import com.google.android.exoplayer2.trackselection.BaseTrackSelection; import com.google.android.exoplayer2.trackselection.DefaultTrackSelector; -import com.google.android.exoplayer2.trackselection.DefaultTrackSelector.Parameters; import com.google.android.exoplayer2.trackselection.DefaultTrackSelector.SelectionOverride; +import com.google.android.exoplayer2.trackselection.ExoTrackSelection; import com.google.android.exoplayer2.trackselection.MappingTrackSelector.MappedTrackInfo; -import com.google.android.exoplayer2.trackselection.TrackSelection; +import com.google.android.exoplayer2.trackselection.TrackSelectionOverride; +import com.google.android.exoplayer2.trackselection.TrackSelectionParameters; +import com.google.android.exoplayer2.trackselection.TrackSelectionUtil; import com.google.android.exoplayer2.trackselection.TrackSelectorResult; import com.google.android.exoplayer2.upstream.Allocator; import com.google.android.exoplayer2.upstream.BandwidthMeter; import com.google.android.exoplayer2.upstream.DataSource; -import com.google.android.exoplayer2.upstream.DataSource.Factory; import com.google.android.exoplayer2.upstream.DefaultAllocator; import com.google.android.exoplayer2.upstream.TransferListener; import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.Util; +import com.google.android.exoplayer2.video.VideoRendererEventListener; import java.io.IOException; -import java.lang.reflect.Constructor; import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -73,12 +82,12 @@ *

      A typical usage of DownloadHelper follows these steps: * *

        - *
      1. Build the helper using one of the {@code forXXX} methods. + *
      2. Build the helper using one of the {@code forMediaItem} methods. *
      3. Prepare the helper using {@link #prepare(Callback)} and wait for the callback. *
      4. Optional: Inspect the selected tracks using {@link #getMappedTrackInfo(int)} and {@link * #getTrackSelections(int, int)}, and make adjustments using {@link - * #clearTrackSelections(int)}, {@link #replaceTrackSelections(int, Parameters)} and {@link - * #addTrackSelection(int, Parameters)}. + * #clearTrackSelections(int)}, {@link #replaceTrackSelections(int, TrackSelectionParameters)} + * and {@link #addTrackSelection(int, TrackSelectionParameters)}. *
      5. Create a download request for the selected track using {@link #getDownloadRequest(byte[])}. *
      6. Release the helper using {@link #release()}. *
      @@ -91,32 +100,22 @@ public final class DownloadHelper { * *

      If possible, use {@link #getDefaultTrackSelectorParameters(Context)} instead. * - * @see Parameters#DEFAULT_WITHOUT_CONTEXT - */ - public static final Parameters DEFAULT_TRACK_SELECTOR_PARAMETERS_WITHOUT_CONTEXT = - Parameters.DEFAULT_WITHOUT_CONTEXT.buildUpon().setForceHighestSupportedBitrate(true).build(); - - /** - * @deprecated This instance does not have {@link Context} constraints. Use {@link - * #getDefaultTrackSelectorParameters(Context)} instead. + * @see DefaultTrackSelector.Parameters#DEFAULT_WITHOUT_CONTEXT */ - @Deprecated - public static final Parameters DEFAULT_TRACK_SELECTOR_PARAMETERS_WITHOUT_VIEWPORT = - DEFAULT_TRACK_SELECTOR_PARAMETERS_WITHOUT_CONTEXT; - - /** - * @deprecated This instance does not have {@link Context} constraints. Use {@link - * #getDefaultTrackSelectorParameters(Context)} instead. - */ - @Deprecated - public static final DefaultTrackSelector.Parameters DEFAULT_TRACK_SELECTOR_PARAMETERS = - DEFAULT_TRACK_SELECTOR_PARAMETERS_WITHOUT_CONTEXT; + public static final DefaultTrackSelector.Parameters + DEFAULT_TRACK_SELECTOR_PARAMETERS_WITHOUT_CONTEXT = + DefaultTrackSelector.Parameters.DEFAULT_WITHOUT_CONTEXT + .buildUpon() + .setForceHighestSupportedBitrate(true) + .setConstrainAudioChannelCountToDeviceCapabilities(false) + .build(); /** Returns the default parameters used for track selection for downloading. */ public static DefaultTrackSelector.Parameters getDefaultTrackSelectorParameters(Context context) { - return Parameters.getDefaults(context) + return DefaultTrackSelector.Parameters.getDefaults(context) .buildUpon() .setForceHighestSupportedBitrate(true) + .setConstrainAudioChannelCountToDeviceCapabilities(false) .build(); } @@ -142,89 +141,51 @@ public interface Callback { /** Thrown at an attempt to download live content. */ public static class LiveContentUnsupportedException extends IOException {} - @Nullable - private static final Constructor DASH_FACTORY_CONSTRUCTOR = - getConstructor("com.google.android.exoplayer2.source.dash.DashMediaSource$Factory"); - - @Nullable - private static final Constructor SS_FACTORY_CONSTRUCTOR = - getConstructor("com.google.android.exoplayer2.source.smoothstreaming.SsMediaSource$Factory"); - - @Nullable - private static final Constructor HLS_FACTORY_CONSTRUCTOR = - getConstructor("com.google.android.exoplayer2.source.hls.HlsMediaSource$Factory"); - - /** @deprecated Use {@link #forProgressive(Context, Uri)} */ - @Deprecated - @SuppressWarnings("deprecation") - public static DownloadHelper forProgressive(Uri uri) { - return forProgressive(uri, /* cacheKey= */ null); - } - /** - * Creates a {@link DownloadHelper} for progressive streams. + * Extracts renderer capabilities for the renderers created by the provided renderers factory. * - * @param context Any {@link Context}. - * @param uri A stream {@link Uri}. - * @return A {@link DownloadHelper} for progressive streams. + * @param renderersFactory A {@link RenderersFactory}. + * @return The {@link RendererCapabilities} for each renderer created by the {@code + * renderersFactory}. */ - public static DownloadHelper forProgressive(Context context, Uri uri) { - return forProgressive(context, uri, /* cacheKey= */ null); + public static RendererCapabilities[] getRendererCapabilities(RenderersFactory renderersFactory) { + Renderer[] renderers = + renderersFactory.createRenderers( + Util.createHandlerForCurrentOrMainLooper(), + new VideoRendererEventListener() {}, + new AudioRendererEventListener() {}, + (cues) -> {}, + (metadata) -> {}); + RendererCapabilities[] capabilities = new RendererCapabilities[renderers.length]; + for (int i = 0; i < renderers.length; i++) { + capabilities[i] = renderers[i].getCapabilities(); + } + return capabilities; } - /** @deprecated Use {@link #forProgressive(Context, Uri, String)} */ + /** + * @deprecated Use {@link #forMediaItem(Context, MediaItem)} + */ @Deprecated - public static DownloadHelper forProgressive(Uri uri, @Nullable String cacheKey) { - return new DownloadHelper( - DownloadRequest.TYPE_PROGRESSIVE, - uri, - cacheKey, - /* mediaSource= */ null, - DEFAULT_TRACK_SELECTOR_PARAMETERS_WITHOUT_VIEWPORT, - /* rendererCapabilities= */ new RendererCapabilities[0]); + public static DownloadHelper forProgressive(Context context, Uri uri) { + return forMediaItem(context, new MediaItem.Builder().setUri(uri).build()); } /** - * Creates a {@link DownloadHelper} for progressive streams. - * - * @param context Any {@link Context}. - * @param uri A stream {@link Uri}. - * @param cacheKey An optional cache key. - * @return A {@link DownloadHelper} for progressive streams. + * @deprecated Use {@link #forMediaItem(Context, MediaItem)} */ - public static DownloadHelper forProgressive(Context context, Uri uri, @Nullable String cacheKey) { - return new DownloadHelper( - DownloadRequest.TYPE_PROGRESSIVE, - uri, - cacheKey, - /* mediaSource= */ null, - getDefaultTrackSelectorParameters(context), - /* rendererCapabilities= */ new RendererCapabilities[0]); - } - - /** @deprecated Use {@link #forDash(Context, Uri, Factory, RenderersFactory)} */ @Deprecated - public static DownloadHelper forDash( - Uri uri, DataSource.Factory dataSourceFactory, RenderersFactory renderersFactory) { - return forDash( - uri, - dataSourceFactory, - renderersFactory, - /* drmSessionManager= */ null, - DEFAULT_TRACK_SELECTOR_PARAMETERS_WITHOUT_VIEWPORT); + public static DownloadHelper forProgressive(Context context, Uri uri, @Nullable String cacheKey) { + return forMediaItem( + context, new MediaItem.Builder().setUri(uri).setCustomCacheKey(cacheKey).build()); } /** - * Creates a {@link DownloadHelper} for DASH streams. - * - * @param context Any {@link Context}. - * @param uri A manifest {@link Uri}. - * @param dataSourceFactory A {@link DataSource.Factory} used to load the manifest. - * @param renderersFactory A {@link RenderersFactory} creating the renderers for which tracks are - * selected. - * @return A {@link DownloadHelper} for DASH streams. - * @throws IllegalStateException If the DASH module is missing. + * @deprecated Use {@link #forMediaItem(MediaItem, TrackSelectionParameters, RenderersFactory, + * DataSource.Factory)} instead. */ + @SuppressWarnings("deprecation") + @Deprecated public static DownloadHelper forDash( Context context, Uri uri, @@ -239,62 +200,30 @@ public static DownloadHelper forDash( } /** - * Creates a {@link DownloadHelper} for DASH streams. - * - * @param uri A manifest {@link Uri}. - * @param dataSourceFactory A {@link DataSource.Factory} used to load the manifest. - * @param renderersFactory A {@link RenderersFactory} creating the renderers for which tracks are - * selected. - * @param drmSessionManager An optional {@link DrmSessionManager}. Used to help determine which - * tracks can be selected. - * @param trackSelectorParameters {@link DefaultTrackSelector.Parameters} for selecting tracks for - * downloading. - * @return A {@link DownloadHelper} for DASH streams. - * @throws IllegalStateException If the DASH module is missing. + * @deprecated Use {@link #forMediaItem(MediaItem, TrackSelectionParameters, RenderersFactory, + * DataSource.Factory, DrmSessionManager)} instead. */ + @Deprecated public static DownloadHelper forDash( Uri uri, DataSource.Factory dataSourceFactory, RenderersFactory renderersFactory, - @Nullable DrmSessionManager drmSessionManager, - DefaultTrackSelector.Parameters trackSelectorParameters) { - return new DownloadHelper( - DownloadRequest.TYPE_DASH, - uri, - /* cacheKey= */ null, - createMediaSourceInternal( - DASH_FACTORY_CONSTRUCTOR, - uri, - dataSourceFactory, - drmSessionManager, - /* streamKeys= */ null), - trackSelectorParameters, - Util.getRendererCapabilities(renderersFactory)); - } - - /** @deprecated Use {@link #forHls(Context, Uri, Factory, RenderersFactory)} */ - @Deprecated - public static DownloadHelper forHls( - Uri uri, DataSource.Factory dataSourceFactory, RenderersFactory renderersFactory) { - return forHls( - uri, - dataSourceFactory, + @Nullable DrmSessionManager drmSessionManager, + TrackSelectionParameters trackSelectionParameters) { + return forMediaItem( + new MediaItem.Builder().setUri(uri).setMimeType(MimeTypes.APPLICATION_MPD).build(), + trackSelectionParameters, renderersFactory, - /* drmSessionManager= */ null, - DEFAULT_TRACK_SELECTOR_PARAMETERS_WITHOUT_VIEWPORT); + dataSourceFactory, + drmSessionManager); } /** - * Creates a {@link DownloadHelper} for HLS streams. - * - * @param context Any {@link Context}. - * @param uri A playlist {@link Uri}. - * @param dataSourceFactory A {@link DataSource.Factory} used to load the playlist. - * @param renderersFactory A {@link RenderersFactory} creating the renderers for which tracks are - * selected. - * @return A {@link DownloadHelper} for HLS streams. - * @throws IllegalStateException If the HLS module is missing. + * @deprecated Use {@link #forMediaItem(MediaItem, TrackSelectionParameters, RenderersFactory, + * DataSource.Factory)} instead. */ + @SuppressWarnings("deprecation") + @Deprecated public static DownloadHelper forHls( Context context, Uri uri, @@ -309,40 +238,29 @@ public static DownloadHelper forHls( } /** - * Creates a {@link DownloadHelper} for HLS streams. - * - * @param uri A playlist {@link Uri}. - * @param dataSourceFactory A {@link DataSource.Factory} used to load the playlist. - * @param renderersFactory A {@link RenderersFactory} creating the renderers for which tracks are - * selected. - * @param drmSessionManager An optional {@link DrmSessionManager}. Used to help determine which - * tracks can be selected. - * @param trackSelectorParameters {@link DefaultTrackSelector.Parameters} for selecting tracks for - * downloading. - * @return A {@link DownloadHelper} for HLS streams. - * @throws IllegalStateException If the HLS module is missing. + * @deprecated Use {@link #forMediaItem(MediaItem, TrackSelectionParameters, RenderersFactory, + * DataSource.Factory, DrmSessionManager)} instead. */ + @Deprecated public static DownloadHelper forHls( Uri uri, DataSource.Factory dataSourceFactory, RenderersFactory renderersFactory, - @Nullable DrmSessionManager drmSessionManager, - DefaultTrackSelector.Parameters trackSelectorParameters) { - return new DownloadHelper( - DownloadRequest.TYPE_HLS, - uri, - /* cacheKey= */ null, - createMediaSourceInternal( - HLS_FACTORY_CONSTRUCTOR, - uri, - dataSourceFactory, - drmSessionManager, - /* streamKeys= */ null), - trackSelectorParameters, - Util.getRendererCapabilities(renderersFactory)); + @Nullable DrmSessionManager drmSessionManager, + TrackSelectionParameters trackSelectionParameters) { + return forMediaItem( + new MediaItem.Builder().setUri(uri).setMimeType(MimeTypes.APPLICATION_M3U8).build(), + trackSelectionParameters, + renderersFactory, + dataSourceFactory, + drmSessionManager); } - /** @deprecated Use {@link #forSmoothStreaming(Context, Uri, Factory, RenderersFactory)} */ + /** + * @deprecated Use {@link #forMediaItem(MediaItem, TrackSelectionParameters, RenderersFactory, + * DataSource.Factory)} instead. + */ + @SuppressWarnings("deprecation") @Deprecated public static DownloadHelper forSmoothStreaming( Uri uri, DataSource.Factory dataSourceFactory, RenderersFactory renderersFactory) { @@ -351,20 +269,15 @@ public static DownloadHelper forSmoothStreaming( dataSourceFactory, renderersFactory, /* drmSessionManager= */ null, - DEFAULT_TRACK_SELECTOR_PARAMETERS_WITHOUT_VIEWPORT); + DEFAULT_TRACK_SELECTOR_PARAMETERS_WITHOUT_CONTEXT); } /** - * Creates a {@link DownloadHelper} for SmoothStreaming streams. - * - * @param context Any {@link Context}. - * @param uri A manifest {@link Uri}. - * @param dataSourceFactory A {@link DataSource.Factory} used to load the manifest. - * @param renderersFactory A {@link RenderersFactory} creating the renderers for which tracks are - * selected. - * @return A {@link DownloadHelper} for SmoothStreaming streams. - * @throws IllegalStateException If the SmoothStreaming module is missing. + * @deprecated Use {@link #forMediaItem(MediaItem, TrackSelectionParameters, RenderersFactory, + * DataSource.Factory)} instead. */ + @SuppressWarnings("deprecation") + @Deprecated public static DownloadHelper forSmoothStreaming( Context context, Uri uri, @@ -379,41 +292,139 @@ public static DownloadHelper forSmoothStreaming( } /** - * Creates a {@link DownloadHelper} for SmoothStreaming streams. + * @deprecated Use {@link #forMediaItem(MediaItem, TrackSelectionParameters, RenderersFactory, + * DataSource.Factory, DrmSessionManager)} instead. + */ + @Deprecated + public static DownloadHelper forSmoothStreaming( + Uri uri, + DataSource.Factory dataSourceFactory, + RenderersFactory renderersFactory, + @Nullable DrmSessionManager drmSessionManager, + TrackSelectionParameters trackSelectionParameters) { + return forMediaItem( + new MediaItem.Builder().setUri(uri).setMimeType(MimeTypes.APPLICATION_SS).build(), + trackSelectionParameters, + renderersFactory, + dataSourceFactory, + drmSessionManager); + } + + /** + * Creates a {@link DownloadHelper} for the given progressive media item. + * + * @param context The context. + * @param mediaItem A {@link MediaItem}. + * @return A {@link DownloadHelper} for progressive streams. + * @throws IllegalStateException If the media item is of type DASH, HLS or SmoothStreaming. + */ + public static DownloadHelper forMediaItem(Context context, MediaItem mediaItem) { + Assertions.checkArgument(isProgressive(checkNotNull(mediaItem.localConfiguration))); + return forMediaItem( + mediaItem, + getDefaultTrackSelectorParameters(context), + /* renderersFactory= */ null, + /* dataSourceFactory= */ null, + /* drmSessionManager= */ null); + } + + /** + * Creates a {@link DownloadHelper} for the given media item. + * + * @param context The context. + * @param mediaItem A {@link MediaItem}. + * @param renderersFactory A {@link RenderersFactory} creating the renderers for which tracks are + * selected. + * @param dataSourceFactory A {@link DataSource.Factory} used to load the manifest for adaptive + * streams. This argument is required for adaptive streams and ignored for progressive + * streams. + * @return A {@link DownloadHelper}. + * @throws IllegalStateException If the corresponding module is missing for DASH, HLS or + * SmoothStreaming media items. + * @throws IllegalArgumentException If the {@code dataSourceFactory} is null for adaptive streams. + */ + public static DownloadHelper forMediaItem( + Context context, + MediaItem mediaItem, + @Nullable RenderersFactory renderersFactory, + @Nullable DataSource.Factory dataSourceFactory) { + return forMediaItem( + mediaItem, + getDefaultTrackSelectorParameters(context), + renderersFactory, + dataSourceFactory, + /* drmSessionManager= */ null); + } + + /** + * Creates a {@link DownloadHelper} for the given media item. * - * @param uri A manifest {@link Uri}. - * @param dataSourceFactory A {@link DataSource.Factory} used to load the manifest. + * @param mediaItem A {@link MediaItem}. * @param renderersFactory A {@link RenderersFactory} creating the renderers for which tracks are * selected. + * @param trackSelectionParameters {@link TrackSelectionParameters} for selecting tracks for + * downloading. + * @param dataSourceFactory A {@link DataSource.Factory} used to load the manifest for adaptive + * streams. This argument is required for adaptive streams and ignored for progressive + * streams. + * @return A {@link DownloadHelper}. + * @throws IllegalStateException If the corresponding module is missing for DASH, HLS or + * SmoothStreaming media items. + * @throws IllegalArgumentException If the {@code dataSourceFactory} is null for adaptive streams. + */ + public static DownloadHelper forMediaItem( + MediaItem mediaItem, + TrackSelectionParameters trackSelectionParameters, + @Nullable RenderersFactory renderersFactory, + @Nullable DataSource.Factory dataSourceFactory) { + return forMediaItem( + mediaItem, + trackSelectionParameters, + renderersFactory, + dataSourceFactory, + /* drmSessionManager= */ null); + } + + /** + * Creates a {@link DownloadHelper} for the given media item. + * + * @param mediaItem A {@link MediaItem}. + * @param renderersFactory A {@link RenderersFactory} creating the renderers for which tracks are + * selected. + * @param trackSelectionParameters {@link TrackSelectionParameters} for selecting tracks for + * downloading. + * @param dataSourceFactory A {@link DataSource.Factory} used to load the manifest for adaptive + * streams. This argument is required for adaptive streams and ignored for progressive + * streams. * @param drmSessionManager An optional {@link DrmSessionManager}. Used to help determine which * tracks can be selected. - * @param trackSelectorParameters {@link DefaultTrackSelector.Parameters} for selecting tracks for - * downloading. - * @return A {@link DownloadHelper} for SmoothStreaming streams. - * @throws IllegalStateException If the SmoothStreaming module is missing. + * @return A {@link DownloadHelper}. + * @throws IllegalStateException If the corresponding module is missing for DASH, HLS or + * SmoothStreaming media items. + * @throws IllegalArgumentException If the {@code dataSourceFactory} is null for adaptive streams. */ - public static DownloadHelper forSmoothStreaming( - Uri uri, - DataSource.Factory dataSourceFactory, - RenderersFactory renderersFactory, - @Nullable DrmSessionManager drmSessionManager, - DefaultTrackSelector.Parameters trackSelectorParameters) { + public static DownloadHelper forMediaItem( + MediaItem mediaItem, + TrackSelectionParameters trackSelectionParameters, + @Nullable RenderersFactory renderersFactory, + @Nullable DataSource.Factory dataSourceFactory, + @Nullable DrmSessionManager drmSessionManager) { + boolean isProgressive = isProgressive(checkNotNull(mediaItem.localConfiguration)); + Assertions.checkArgument(isProgressive || dataSourceFactory != null); return new DownloadHelper( - DownloadRequest.TYPE_SS, - uri, - /* cacheKey= */ null, - createMediaSourceInternal( - SS_FACTORY_CONSTRUCTOR, - uri, - dataSourceFactory, - drmSessionManager, - /* streamKeys= */ null), - trackSelectorParameters, - Util.getRendererCapabilities(renderersFactory)); + mediaItem, + isProgressive + ? null + : createMediaSourceInternal( + mediaItem, castNonNull(dataSourceFactory), drmSessionManager), + trackSelectionParameters, + renderersFactory != null + ? getRendererCapabilities(renderersFactory) + : new RendererCapabilities[0]); } /** - * Equivalent to {@link #createMediaSource(DownloadRequest, Factory, DrmSessionManager) + * Equivalent to {@link #createMediaSource(DownloadRequest, DataSource.Factory, DrmSessionManager) * createMediaSource(downloadRequest, dataSourceFactory, null)}. */ public static MediaSource createMediaSource( @@ -434,36 +445,12 @@ public static MediaSource createMediaSource( public static MediaSource createMediaSource( DownloadRequest downloadRequest, DataSource.Factory dataSourceFactory, - @Nullable DrmSessionManager drmSessionManager) { - @Nullable Constructor constructor; - switch (downloadRequest.type) { - case DownloadRequest.TYPE_DASH: - constructor = DASH_FACTORY_CONSTRUCTOR; - break; - case DownloadRequest.TYPE_SS: - constructor = SS_FACTORY_CONSTRUCTOR; - break; - case DownloadRequest.TYPE_HLS: - constructor = HLS_FACTORY_CONSTRUCTOR; - break; - case DownloadRequest.TYPE_PROGRESSIVE: - return new ProgressiveMediaSource.Factory(dataSourceFactory) - .setCustomCacheKey(downloadRequest.customCacheKey) - .createMediaSource(downloadRequest.uri); - default: - throw new IllegalStateException("Unsupported type: " + downloadRequest.type); - } + @Nullable DrmSessionManager drmSessionManager) { return createMediaSourceInternal( - constructor, - downloadRequest.uri, - dataSourceFactory, - drmSessionManager, - downloadRequest.streamKeys); + downloadRequest.toMediaItem(), dataSourceFactory, drmSessionManager); } - private final String downloadType; - private final Uri uri; - @Nullable private final String cacheKey; + private final MediaItem.LocalConfiguration localConfiguration; @Nullable private final MediaSource mediaSource; private final DefaultTrackSelector trackSelector; private final RendererCapabilities[] rendererCapabilities; @@ -476,39 +463,34 @@ public static MediaSource createMediaSource( private @MonotonicNonNull MediaPreparer mediaPreparer; private TrackGroupArray @MonotonicNonNull [] trackGroupArrays; private MappedTrackInfo @MonotonicNonNull [] mappedTrackInfos; - private List @MonotonicNonNull [][] trackSelectionsByPeriodAndRenderer; - private List @MonotonicNonNull [][] immutableTrackSelectionsByPeriodAndRenderer; + private List @MonotonicNonNull [][] trackSelectionsByPeriodAndRenderer; + private List @MonotonicNonNull [][] + immutableTrackSelectionsByPeriodAndRenderer; /** * Creates download helper. * - * @param downloadType A download type. This value will be used as {@link DownloadRequest#type}. - * @param uri A {@link Uri}. - * @param cacheKey An optional cache key. + * @param mediaItem The media item. * @param mediaSource A {@link MediaSource} for which tracks are selected, or null if no track * selection needs to be made. - * @param trackSelectorParameters {@link DefaultTrackSelector.Parameters} for selecting tracks for + * @param trackSelectionParameters {@link TrackSelectionParameters} for selecting tracks for * downloading. * @param rendererCapabilities The {@link RendererCapabilities} of the renderers for which tracks * are selected. */ public DownloadHelper( - String downloadType, - Uri uri, - @Nullable String cacheKey, + MediaItem mediaItem, @Nullable MediaSource mediaSource, - DefaultTrackSelector.Parameters trackSelectorParameters, + TrackSelectionParameters trackSelectionParameters, RendererCapabilities[] rendererCapabilities) { - this.downloadType = downloadType; - this.uri = uri; - this.cacheKey = cacheKey; + this.localConfiguration = checkNotNull(mediaItem.localConfiguration); this.mediaSource = mediaSource; this.trackSelector = - new DefaultTrackSelector(trackSelectorParameters, new DownloadTrackSelection.Factory()); + new DefaultTrackSelector(trackSelectionParameters, new DownloadTrackSelection.Factory()); this.rendererCapabilities = rendererCapabilities; this.scratchSet = new SparseIntArray(); - trackSelector.init(/* listener= */ () -> {}, new DummyBandwidthMeter()); - callbackHandler = new Handler(Util.getLooper()); + trackSelector.init(/* listener= */ () -> {}, new FakeBandwidthMeter()); + callbackHandler = Util.createHandlerForCurrentOrMainLooper(); window = new Timeline.Window(); } @@ -533,6 +515,7 @@ public void release() { if (mediaPreparer != null) { mediaPreparer.release(); } + trackSelector.release(); } /** @@ -562,6 +545,20 @@ public int getPeriodCount() { return trackGroupArrays.length; } + /** + * Returns {@link Tracks} for the given period. Must not be called until after preparation + * completes. + * + * @param periodIndex The period index. + * @return The {@link Tracks} for the period. May be {@link Tracks#EMPTY} for single stream + * content. + */ + public Tracks getTracks(int periodIndex) { + assertPreparedWithMedia(); + return TrackSelectionUtil.buildTracks( + mappedTrackInfos[periodIndex], immutableTrackSelectionsByPeriodAndRenderer[periodIndex]); + } + /** * Returns the track groups for the given period. Must not be called until after preparation * completes. @@ -590,14 +587,14 @@ public MappedTrackInfo getMappedTrackInfo(int periodIndex) { } /** - * Returns all {@link TrackSelection track selections} for a period and renderer. Must not be + * Returns all {@link ExoTrackSelection track selections} for a period and renderer. Must not be * called until after preparation completes. * * @param periodIndex The period index. * @param rendererIndex The renderer index. - * @return A list of selected {@link TrackSelection track selections}. + * @return A list of selected {@link ExoTrackSelection track selections}. */ - public List getTrackSelections(int periodIndex, int rendererIndex) { + public List getTrackSelections(int periodIndex, int rendererIndex) { assertPreparedWithMedia(); return immutableTrackSelectionsByPeriodAndRenderer[periodIndex][rendererIndex]; } @@ -620,13 +617,18 @@ public void clearTrackSelections(int periodIndex) { * completes. * * @param periodIndex The period index for which the track selection is replaced. - * @param trackSelectorParameters The {@link DefaultTrackSelector.Parameters} to obtain the new + * @param trackSelectionParameters The {@link TrackSelectionParameters} to obtain the new * selection of tracks. */ public void replaceTrackSelections( - int periodIndex, DefaultTrackSelector.Parameters trackSelectorParameters) { - clearTrackSelections(periodIndex); - addTrackSelection(periodIndex, trackSelectorParameters); + int periodIndex, TrackSelectionParameters trackSelectionParameters) { + try { + assertPreparedWithMedia(); + clearTrackSelections(periodIndex); + addTrackSelectionInternal(periodIndex, trackSelectionParameters); + } catch (ExoPlaybackException e) { + throw new IllegalStateException(e); + } } /** @@ -634,14 +636,17 @@ public void replaceTrackSelections( * completes. * * @param periodIndex The period index this track selection is added for. - * @param trackSelectorParameters The {@link DefaultTrackSelector.Parameters} to obtain the new + * @param trackSelectionParameters The {@link TrackSelectionParameters} to obtain the new * selection of tracks. */ public void addTrackSelection( - int periodIndex, DefaultTrackSelector.Parameters trackSelectorParameters) { - assertPreparedWithMedia(); - trackSelector.setParameters(trackSelectorParameters); - runTrackSelection(periodIndex); + int periodIndex, TrackSelectionParameters trackSelectionParameters) { + try { + assertPreparedWithMedia(); + addTrackSelectionInternal(periodIndex, trackSelectionParameters); + } catch (ExoPlaybackException e) { + throw new IllegalStateException(e); + } } /** @@ -653,21 +658,31 @@ public void addTrackSelection( * selection, as IETF BCP 47 conformant tags. */ public void addAudioLanguagesToSelection(String... languages) { - assertPreparedWithMedia(); - for (int periodIndex = 0; periodIndex < mappedTrackInfos.length; periodIndex++) { - DefaultTrackSelector.ParametersBuilder parametersBuilder = + try { + assertPreparedWithMedia(); + + TrackSelectionParameters.Builder parametersBuilder = DEFAULT_TRACK_SELECTOR_PARAMETERS_WITHOUT_CONTEXT.buildUpon(); - MappedTrackInfo mappedTrackInfo = mappedTrackInfos[periodIndex]; - int rendererCount = mappedTrackInfo.getRendererCount(); - for (int rendererIndex = 0; rendererIndex < rendererCount; rendererIndex++) { - if (mappedTrackInfo.getRendererType(rendererIndex) != C.TRACK_TYPE_AUDIO) { - parametersBuilder.setRendererDisabled(rendererIndex, /* disabled= */ true); - } + // Prefer highest supported bitrate for downloads. + parametersBuilder.setForceHighestSupportedBitrate(true); + // Disable all non-audio track types supported by the renderers. + for (RendererCapabilities capabilities : rendererCapabilities) { + @C.TrackType int trackType = capabilities.getTrackType(); + parametersBuilder.setTrackTypeDisabled( + trackType, /* disabled= */ trackType != C.TRACK_TYPE_AUDIO); } + + // Add a track selection to each period for each of the languages. + int periodCount = getPeriodCount(); for (String language : languages) { - parametersBuilder.setPreferredAudioLanguage(language); - addTrackSelection(periodIndex, parametersBuilder.build()); + TrackSelectionParameters parameters = + parametersBuilder.setPreferredAudioLanguage(language).build(); + for (int periodIndex = 0; periodIndex < periodCount; periodIndex++) { + addTrackSelectionInternal(periodIndex, parameters); + } } + } catch (ExoPlaybackException e) { + throw new IllegalStateException(e); } } @@ -683,22 +698,32 @@ public void addAudioLanguagesToSelection(String... languages) { */ public void addTextLanguagesToSelection( boolean selectUndeterminedTextLanguage, String... languages) { - assertPreparedWithMedia(); - for (int periodIndex = 0; periodIndex < mappedTrackInfos.length; periodIndex++) { - DefaultTrackSelector.ParametersBuilder parametersBuilder = + try { + assertPreparedWithMedia(); + + TrackSelectionParameters.Builder parametersBuilder = DEFAULT_TRACK_SELECTOR_PARAMETERS_WITHOUT_CONTEXT.buildUpon(); - MappedTrackInfo mappedTrackInfo = mappedTrackInfos[periodIndex]; - int rendererCount = mappedTrackInfo.getRendererCount(); - for (int rendererIndex = 0; rendererIndex < rendererCount; rendererIndex++) { - if (mappedTrackInfo.getRendererType(rendererIndex) != C.TRACK_TYPE_TEXT) { - parametersBuilder.setRendererDisabled(rendererIndex, /* disabled= */ true); - } - } parametersBuilder.setSelectUndeterminedTextLanguage(selectUndeterminedTextLanguage); + // Prefer highest supported bitrate for downloads. + parametersBuilder.setForceHighestSupportedBitrate(true); + // Disable all non-text track types supported by the renderers. + for (RendererCapabilities capabilities : rendererCapabilities) { + @C.TrackType int trackType = capabilities.getTrackType(); + parametersBuilder.setTrackTypeDisabled( + trackType, /* disabled= */ trackType != C.TRACK_TYPE_TEXT); + } + + // Add a track selection to each period for each of the languages. + int periodCount = getPeriodCount(); for (String language : languages) { - parametersBuilder.setPreferredTextLanguage(language); - addTrackSelection(periodIndex, parametersBuilder.build()); + TrackSelectionParameters parameters = + parametersBuilder.setPreferredTextLanguage(language).build(); + for (int periodIndex = 0; periodIndex < periodCount; periodIndex++) { + addTrackSelectionInternal(periodIndex, parameters); + } } + } catch (ExoPlaybackException e) { + throw new IllegalStateException(e); } } @@ -718,19 +743,24 @@ public void addTrackSelectionForSingleRenderer( int rendererIndex, DefaultTrackSelector.Parameters trackSelectorParameters, List overrides) { - assertPreparedWithMedia(); - DefaultTrackSelector.ParametersBuilder builder = trackSelectorParameters.buildUpon(); - for (int i = 0; i < mappedTrackInfos[periodIndex].getRendererCount(); i++) { - builder.setRendererDisabled(/* rendererIndex= */ i, /* disabled= */ i != rendererIndex); - } - if (overrides.isEmpty()) { - addTrackSelection(periodIndex, builder.build()); - } else { - TrackGroupArray trackGroupArray = mappedTrackInfos[periodIndex].getTrackGroups(rendererIndex); - for (int i = 0; i < overrides.size(); i++) { - builder.setSelectionOverride(rendererIndex, trackGroupArray, overrides.get(i)); - addTrackSelection(periodIndex, builder.build()); + try { + assertPreparedWithMedia(); + DefaultTrackSelector.Parameters.Builder builder = trackSelectorParameters.buildUpon(); + for (int i = 0; i < mappedTrackInfos[periodIndex].getRendererCount(); i++) { + builder.setRendererDisabled(/* rendererIndex= */ i, /* disabled= */ i != rendererIndex); } + if (overrides.isEmpty()) { + addTrackSelectionInternal(periodIndex, builder.build()); + } else { + TrackGroupArray trackGroupArray = + mappedTrackInfos[periodIndex].getTrackGroups(rendererIndex); + for (int i = 0; i < overrides.size(); i++) { + builder.setSelectionOverride(rendererIndex, trackGroupArray, overrides.get(i)); + addTrackSelectionInternal(periodIndex, builder.build()); + } + } + } catch (ExoPlaybackException e) { + throw new IllegalStateException(e); } } @@ -742,7 +772,7 @@ public void addTrackSelectionForSingleRenderer( * @return The built {@link DownloadRequest}. */ public DownloadRequest getDownloadRequest(@Nullable byte[] data) { - return getDownloadRequest(uri.toString(), data); + return getDownloadRequest(localConfiguration.uri.toString(), data); } /** @@ -754,13 +784,21 @@ public DownloadRequest getDownloadRequest(@Nullable byte[] data) { * @return The built {@link DownloadRequest}. */ public DownloadRequest getDownloadRequest(String id, @Nullable byte[] data) { + DownloadRequest.Builder requestBuilder = + new DownloadRequest.Builder(id, localConfiguration.uri) + .setMimeType(localConfiguration.mimeType) + .setKeySetId( + localConfiguration.drmConfiguration != null + ? localConfiguration.drmConfiguration.getKeySetId() + : null) + .setCustomCacheKey(localConfiguration.customCacheKey) + .setData(data); if (mediaSource == null) { - return new DownloadRequest( - id, downloadType, uri, /* streamKeys= */ Collections.emptyList(), cacheKey, data); + return requestBuilder.build(); } assertPreparedWithMedia(); List streamKeys = new ArrayList<>(); - List allSelections = new ArrayList<>(); + List allSelections = new ArrayList<>(); int periodCount = trackSelectionsByPeriodAndRenderer.length; for (int periodIndex = 0; periodIndex < periodCount; periodIndex++) { allSelections.clear(); @@ -770,21 +808,42 @@ public DownloadRequest getDownloadRequest(String id, @Nullable byte[] data) { } streamKeys.addAll(mediaPreparer.mediaPeriods[periodIndex].getStreamKeys(allSelections)); } - return new DownloadRequest(id, downloadType, uri, streamKeys, cacheKey, data); + return requestBuilder.setStreamKeys(streamKeys).build(); + } + + @RequiresNonNull({ + "trackGroupArrays", + "trackSelectionsByPeriodAndRenderer", + "mediaPreparer", + "mediaPreparer.timeline" + }) + private void addTrackSelectionInternal( + int periodIndex, TrackSelectionParameters trackSelectionParameters) + throws ExoPlaybackException { + trackSelector.setParameters(trackSelectionParameters); + runTrackSelection(periodIndex); + // TrackSelectionParameters can contain multiple overrides for each track type. The track + // selector will only use one of them (because it's designed for playback), but for downloads we + // want to use all of them. Run selection again with each override being the only one of its + // type, to ensure that all of the desired tracks are included. + for (TrackSelectionOverride override : trackSelectionParameters.overrides.values()) { + trackSelector.setParameters( + trackSelectionParameters.buildUpon().setOverrideForType(override).build()); + runTrackSelection(periodIndex); + } } - // Initialization of array of Lists. - @SuppressWarnings("unchecked") - private void onMediaPrepared() { - Assertions.checkNotNull(mediaPreparer); - Assertions.checkNotNull(mediaPreparer.mediaPeriods); - Assertions.checkNotNull(mediaPreparer.timeline); + @SuppressWarnings("unchecked") // Initialization of array of Lists. + private void onMediaPrepared() throws ExoPlaybackException { + checkNotNull(mediaPreparer); + checkNotNull(mediaPreparer.mediaPeriods); + checkNotNull(mediaPreparer.timeline); int periodCount = mediaPreparer.mediaPeriods.length; int rendererCount = rendererCapabilities.length; trackSelectionsByPeriodAndRenderer = - (List[][]) new List[periodCount][rendererCount]; + (List[][]) new List[periodCount][rendererCount]; immutableTrackSelectionsByPeriodAndRenderer = - (List[][]) new List[periodCount][rendererCount]; + (List[][]) new List[periodCount][rendererCount]; for (int i = 0; i < periodCount; i++) { for (int j = 0; j < rendererCount; j++) { trackSelectionsByPeriodAndRenderer[i][j] = new ArrayList<>(); @@ -798,16 +857,14 @@ private void onMediaPrepared() { trackGroupArrays[i] = mediaPreparer.mediaPeriods[i].getTrackGroups(); TrackSelectorResult trackSelectorResult = runTrackSelection(/* periodIndex= */ i); trackSelector.onSelectionActivated(trackSelectorResult.info); - mappedTrackInfos[i] = Assertions.checkNotNull(trackSelector.getCurrentMappedTrackInfo()); + mappedTrackInfos[i] = checkNotNull(trackSelector.getCurrentMappedTrackInfo()); } setPreparedWithMedia(); - Assertions.checkNotNull(callbackHandler) - .post(() -> Assertions.checkNotNull(callback).onPrepared(this)); + checkNotNull(callbackHandler).post(() -> checkNotNull(callback).onPrepared(this)); } private void onMediaPreparationFailed(IOException error) { - Assertions.checkNotNull(callbackHandler) - .post(() -> Assertions.checkNotNull(callback).onPrepareError(this, error)); + checkNotNull(callbackHandler).post(() -> checkNotNull(callback).onPrepareError(this, error)); } @RequiresNonNull({ @@ -832,7 +889,7 @@ private void setPreparedWithMedia() { "mediaPreparer.timeline", "mediaPreparer.mediaPeriods" }) - @SuppressWarnings("nullness:contracts.postcondition.not.satisfied") + @SuppressWarnings("nullness:contracts.postcondition") private void assertPreparedWithMedia() { Assertions.checkState(isPreparedWithMedia); } @@ -841,100 +898,71 @@ private void assertPreparedWithMedia() { * Runs the track selection for a given period index with the current parameters. The selected * tracks will be added to {@link #trackSelectionsByPeriodAndRenderer}. */ - // Intentional reference comparison of track group instances. - @SuppressWarnings("ReferenceEquality") @RequiresNonNull({ "trackGroupArrays", "trackSelectionsByPeriodAndRenderer", "mediaPreparer", "mediaPreparer.timeline" }) - private TrackSelectorResult runTrackSelection(int periodIndex) { - try { - TrackSelectorResult trackSelectorResult = - trackSelector.selectTracks( - rendererCapabilities, - trackGroupArrays[periodIndex], - new MediaPeriodId(mediaPreparer.timeline.getUidOfPeriod(periodIndex)), - mediaPreparer.timeline); - for (int i = 0; i < trackSelectorResult.length; i++) { - @Nullable TrackSelection newSelection = trackSelectorResult.selections.get(i); - if (newSelection == null) { - continue; - } - List existingSelectionList = - trackSelectionsByPeriodAndRenderer[periodIndex][i]; - boolean mergedWithExistingSelection = false; - for (int j = 0; j < existingSelectionList.size(); j++) { - TrackSelection existingSelection = existingSelectionList.get(j); - if (existingSelection.getTrackGroup() == newSelection.getTrackGroup()) { - // Merge with existing selection. - scratchSet.clear(); - for (int k = 0; k < existingSelection.length(); k++) { - scratchSet.put(existingSelection.getIndexInTrackGroup(k), 0); - } - for (int k = 0; k < newSelection.length(); k++) { - scratchSet.put(newSelection.getIndexInTrackGroup(k), 0); - } - int[] mergedTracks = new int[scratchSet.size()]; - for (int k = 0; k < scratchSet.size(); k++) { - mergedTracks[k] = scratchSet.keyAt(k); - } - existingSelectionList.set( - j, new DownloadTrackSelection(existingSelection.getTrackGroup(), mergedTracks)); - mergedWithExistingSelection = true; - break; + private TrackSelectorResult runTrackSelection(int periodIndex) throws ExoPlaybackException { + TrackSelectorResult trackSelectorResult = + trackSelector.selectTracks( + rendererCapabilities, + trackGroupArrays[periodIndex], + new MediaPeriodId(mediaPreparer.timeline.getUidOfPeriod(periodIndex)), + mediaPreparer.timeline); + for (int i = 0; i < trackSelectorResult.length; i++) { + @Nullable ExoTrackSelection newSelection = trackSelectorResult.selections[i]; + if (newSelection == null) { + continue; + } + List existingSelectionList = + trackSelectionsByPeriodAndRenderer[periodIndex][i]; + boolean mergedWithExistingSelection = false; + for (int j = 0; j < existingSelectionList.size(); j++) { + ExoTrackSelection existingSelection = existingSelectionList.get(j); + if (existingSelection.getTrackGroup().equals(newSelection.getTrackGroup())) { + // Merge with existing selection. + scratchSet.clear(); + for (int k = 0; k < existingSelection.length(); k++) { + scratchSet.put(existingSelection.getIndexInTrackGroup(k), 0); } - } - if (!mergedWithExistingSelection) { - existingSelectionList.add(newSelection); + for (int k = 0; k < newSelection.length(); k++) { + scratchSet.put(newSelection.getIndexInTrackGroup(k), 0); + } + int[] mergedTracks = new int[scratchSet.size()]; + for (int k = 0; k < scratchSet.size(); k++) { + mergedTracks[k] = scratchSet.keyAt(k); + } + existingSelectionList.set( + j, new DownloadTrackSelection(existingSelection.getTrackGroup(), mergedTracks)); + mergedWithExistingSelection = true; + break; } } - return trackSelectorResult; - } catch (ExoPlaybackException e) { - // DefaultTrackSelector does not throw exceptions during track selection. - throw new UnsupportedOperationException(e); + if (!mergedWithExistingSelection) { + existingSelectionList.add(newSelection); + } } + return trackSelectorResult; } - @Nullable - private static Constructor getConstructor(String className) { - try { - // LINT.IfChange - Class factoryClazz = - Class.forName(className).asSubclass(MediaSourceFactory.class); - return factoryClazz.getConstructor(Factory.class); - // LINT.ThenChange(../../../../../../../../proguard-rules.txt) - } catch (ClassNotFoundException e) { - // Expected if the app was built without the respective module. - return null; - } catch (NoSuchMethodException e) { - // Something is wrong with the library or the proguard configuration. - throw new IllegalStateException(e); + private static MediaSource createMediaSourceInternal( + MediaItem mediaItem, + DataSource.Factory dataSourceFactory, + @Nullable DrmSessionManager drmSessionManager) { + DefaultMediaSourceFactory mediaSourceFactory = + new DefaultMediaSourceFactory(dataSourceFactory, ExtractorsFactory.EMPTY); + if (drmSessionManager != null) { + mediaSourceFactory.setDrmSessionManagerProvider(unusedMediaItem -> drmSessionManager); } + return mediaSourceFactory.createMediaSource(mediaItem); } - private static MediaSource createMediaSourceInternal( - @Nullable Constructor constructor, - Uri uri, - Factory dataSourceFactory, - @Nullable DrmSessionManager drmSessionManager, - @Nullable List streamKeys) { - if (constructor == null) { - throw new IllegalStateException("Module missing to create media source."); - } - try { - MediaSourceFactory factory = constructor.newInstance(dataSourceFactory); - if (drmSessionManager != null) { - factory.setDrmSessionManager(drmSessionManager); - } - if (streamKeys != null) { - factory.setStreamKeys(streamKeys); - } - return Assertions.checkNotNull(factory.createMediaSource(uri)); - } catch (Exception e) { - throw new IllegalStateException("Failed to instantiate media source.", e); - } + private static boolean isProgressive(MediaItem.LocalConfiguration localConfiguration) { + return Util.inferContentTypeForUriAndMimeType( + localConfiguration.uri, localConfiguration.mimeType) + == C.CONTENT_TYPE_OTHER; } private static final class MediaPreparer @@ -966,10 +994,11 @@ public MediaPreparer(MediaSource mediaSource, DownloadHelper downloadHelper) { this.downloadHelper = downloadHelper; allocator = new DefaultAllocator(true, C.DEFAULT_BUFFER_SEGMENT_SIZE); pendingMediaPeriods = new ArrayList<>(); - @SuppressWarnings("methodref.receiver.bound.invalid") - Handler downloadThreadHandler = Util.createHandler(this::handleDownloadHelperCallbackMessage); + @SuppressWarnings("nullness:methodref.receiver.bound") + Handler downloadThreadHandler = + Util.createHandlerForCurrentOrMainLooper(this::handleDownloadHelperCallbackMessage); this.downloadHelperHandler = downloadThreadHandler; - mediaSourceThread = new HandlerThread("DownloadHelper"); + mediaSourceThread = new HandlerThread("ExoPlayer:DownloadHelper"); mediaSourceThread.start(); mediaSourceHandler = Util.createHandler(mediaSourceThread.getLooper(), /* callback= */ this); mediaSourceHandler.sendEmptyMessage(MESSAGE_PREPARE_SOURCE); @@ -989,7 +1018,8 @@ public void release() { public boolean handleMessage(Message msg) { switch (msg.what) { case MESSAGE_PREPARE_SOURCE: - mediaSource.prepareSource(/* caller= */ this, /* mediaTransferListener= */ null); + mediaSource.prepareSource( + /* caller= */ this, /* mediaTransferListener= */ null, PlayerId.UNSET); mediaSourceHandler.sendEmptyMessage(MESSAGE_CHECK_FOR_FAILURE); return true; case MESSAGE_CHECK_FOR_FAILURE: @@ -1038,7 +1068,7 @@ public void onSourceInfoRefreshed(MediaSource source, Timeline timeline) { // Ignore dynamic updates. return; } - if (timeline.getWindow(/* windowIndex= */ 0, new Timeline.Window()).isLive) { + if (timeline.getWindow(/* windowIndex= */ 0, new Timeline.Window()).isLive()) { downloadHelperHandler .obtainMessage( DOWNLOAD_HELPER_CALLBACK_MESSAGE_FAILED, @@ -1087,11 +1117,18 @@ private boolean handleDownloadHelperCallbackMessage(Message msg) { } switch (msg.what) { case DOWNLOAD_HELPER_CALLBACK_MESSAGE_PREPARED: - downloadHelper.onMediaPrepared(); + try { + downloadHelper.onMediaPrepared(); + } catch (ExoPlaybackException e) { + downloadHelperHandler + .obtainMessage( + DOWNLOAD_HELPER_CALLBACK_MESSAGE_FAILED, /* obj= */ new IOException(e)) + .sendToTarget(); + } return true; case DOWNLOAD_HELPER_CALLBACK_MESSAGE_FAILED: release(); - downloadHelper.onMediaPreparationFailed((IOException) Util.castNonNull(msg.obj)); + downloadHelper.onMediaPreparationFailed((IOException) castNonNull(msg.obj)); return true; default: return false; @@ -1101,12 +1138,15 @@ private boolean handleDownloadHelperCallbackMessage(Message msg) { private static final class DownloadTrackSelection extends BaseTrackSelection { - private static final class Factory implements TrackSelection.Factory { + private static final class Factory implements ExoTrackSelection.Factory { @Override - public @NullableType TrackSelection[] createTrackSelections( - @NullableType Definition[] definitions, BandwidthMeter bandwidthMeter) { - @NullableType TrackSelection[] selections = new TrackSelection[definitions.length]; + public @NullableType ExoTrackSelection[] createTrackSelections( + @NullableType Definition[] definitions, + BandwidthMeter bandwidthMeter, + MediaPeriodId mediaPeriodId, + Timeline timeline) { + @NullableType ExoTrackSelection[] selections = new ExoTrackSelection[definitions.length]; for (int i = 0; i < definitions.length; i++) { selections[i] = definitions[i] == null @@ -1127,12 +1167,12 @@ public int getSelectedIndex() { } @Override - public int getSelectionReason() { + public @C.SelectionReason int getSelectionReason() { return C.SELECTION_REASON_UNKNOWN; } - @Nullable @Override + @Nullable public Object getSelectionData() { return null; } @@ -1148,15 +1188,15 @@ public void updateSelectedTrack( } } - private static final class DummyBandwidthMeter implements BandwidthMeter { + private static final class FakeBandwidthMeter implements BandwidthMeter { @Override public long getBitrateEstimate() { return 0; } - @Nullable @Override + @Nullable public TransferListener getTransferListener() { return null; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DownloadManager.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DownloadManager.java index 66b2a7cf91..00c6913ef1 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DownloadManager.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DownloadManager.java @@ -25,6 +25,7 @@ import static com.google.android.exoplayer2.offline.Download.STATE_RESTARTING; import static com.google.android.exoplayer2.offline.Download.STATE_STOPPED; import static com.google.android.exoplayer2.offline.Download.STOP_REASON_NONE; +import static java.lang.Math.min; import android.content.Context; import android.os.Handler; @@ -32,6 +33,7 @@ import android.os.Looper; import android.os.Message; import androidx.annotation.CheckResult; +import androidx.annotation.IntRange; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.database.DatabaseProvider; @@ -40,6 +42,7 @@ import com.google.android.exoplayer2.upstream.DataSource; import com.google.android.exoplayer2.upstream.DataSource.Factory; import com.google.android.exoplayer2.upstream.cache.Cache; +import com.google.android.exoplayer2.upstream.cache.CacheDataSource; import com.google.android.exoplayer2.upstream.cache.CacheEvictor; import com.google.android.exoplayer2.upstream.cache.NoOpCacheEvictor; import com.google.android.exoplayer2.util.Assertions; @@ -51,6 +54,7 @@ import java.util.HashMap; import java.util.List; import java.util.concurrent.CopyOnWriteArraySet; +import java.util.concurrent.Executor; /** * Manages downloads. @@ -61,7 +65,9 @@ * *

      A download manager instance must be accessed only from the thread that created it, unless that * thread does not have a {@link Looper}. In that case, it must be accessed only from the - * application's main thread. Registered listeners will be called on the same thread. + * application's main thread. Registered listeners will be called on the same thread. In all cases + * the `Looper` of the thread from which the manager must be accessed can be queried using {@link + * #getApplicationLooper()}. */ public final class DownloadManager { @@ -90,8 +96,11 @@ default void onDownloadsPausedChanged( * * @param downloadManager The reporting instance. * @param download The state of the download. + * @param finalException If the download is transitioning to {@link Download#STATE_FAILED}, this + * is the final exception that resulted in the failure. */ - default void onDownloadChanged(DownloadManager downloadManager, Download download) {} + default void onDownloadChanged( + DownloadManager downloadManager, Download download, @Nullable Exception finalException) {} /** * Called when a download is removed. @@ -166,7 +175,7 @@ default void onWaitingForRequirementsChanged( private final Context context; private final WritableDownloadIndex downloadIndex; - private final Handler mainHandler; + private final Handler applicationHandler; private final InternalHandler internalHandler; private final RequirementsWatcher.Listener requirementsListener; private final CopyOnWriteArraySet listeners; @@ -191,13 +200,42 @@ default void onWaitingForRequirementsChanged( * an {@link CacheEvictor} that will not evict downloaded content, for example {@link * NoOpCacheEvictor}. * @param upstreamFactory A {@link Factory} for creating {@link DataSource}s for downloading data. + * @deprecated Use {@link #DownloadManager(Context, DatabaseProvider, Cache, Factory, Executor)}. */ + @Deprecated public DownloadManager( Context context, DatabaseProvider databaseProvider, Cache cache, Factory upstreamFactory) { + this(context, databaseProvider, cache, upstreamFactory, Runnable::run); + } + + /** + * Constructs a {@link DownloadManager}. + * + * @param context Any context. + * @param databaseProvider Provides the SQLite database in which downloads are persisted. + * @param cache A cache to be used to store downloaded data. The cache should be configured with + * an {@link CacheEvictor} that will not evict downloaded content, for example {@link + * NoOpCacheEvictor}. + * @param upstreamFactory A {@link Factory} for creating {@link DataSource}s for downloading data. + * @param executor An {@link Executor} used to download data. Passing {@code Runnable::run} will + * cause each download task to download data on its own thread. Passing an {@link Executor} + * that uses multiple threads will speed up download tasks that can be split into smaller + * parts for parallel execution. + */ + public DownloadManager( + Context context, + DatabaseProvider databaseProvider, + Cache cache, + Factory upstreamFactory, + Executor executor) { this( context, new DefaultDownloadIndex(databaseProvider), - new DefaultDownloaderFactory(new DownloaderConstructorHelper(cache, upstreamFactory))); + new DefaultDownloaderFactory( + new CacheDataSource.Factory() + .setCache(cache) + .setUpstreamDataSourceFactory(upstreamFactory), + executor)); } /** @@ -218,10 +256,10 @@ public DownloadManager( downloads = Collections.emptyList(); listeners = new CopyOnWriteArraySet<>(); - @SuppressWarnings("methodref.receiver.bound.invalid") - Handler mainHandler = Util.createHandler(this::handleMainMessage); - this.mainHandler = mainHandler; - HandlerThread internalThread = new HandlerThread("DownloadManager file i/o"); + @SuppressWarnings("nullness:methodref.receiver.bound") + Handler mainHandler = Util.createHandlerForCurrentOrMainLooper(this::handleMainMessage); + this.applicationHandler = mainHandler; + HandlerThread internalThread = new HandlerThread("ExoPlayer:DownloadManager"); internalThread.start(); internalHandler = new InternalHandler( @@ -233,7 +271,7 @@ public DownloadManager( minRetryCount, downloadsPaused); - @SuppressWarnings("methodref.receiver.bound.invalid") + @SuppressWarnings("nullness:methodref.receiver.bound") RequirementsWatcher.Listener requirementsListener = this::onRequirementsStateChanged; this.requirementsListener = requirementsListener; requirementsWatcher = @@ -246,6 +284,14 @@ public DownloadManager( .sendToTarget(); } + /** + * Returns the {@link Looper} associated with the application thread that's used to access the + * manager, and on which the manager will call its {@link Listener Listeners}. + */ + public Looper getApplicationLooper() { + return applicationHandler.getLooper(); + } + /** Returns whether the manager has completed initialization. */ public boolean isInitialized() { return initialized; @@ -280,6 +326,7 @@ public boolean isWaitingForRequirements() { * @param listener The listener to be added. */ public void addListener(Listener listener) { + Assertions.checkNotNull(listener); listeners.add(listener); } @@ -302,8 +349,7 @@ public Requirements getRequirements() { * * @return The not met {@link Requirements.RequirementFlags}, or 0 if all requirements are met. */ - @Requirements.RequirementFlags - public int getNotMetRequirements() { + public @Requirements.RequirementFlags int getNotMetRequirements() { return notMetRequirements; } @@ -332,7 +378,7 @@ public int getMaxParallelDownloads() { * * @param maxParallelDownloads The maximum number of parallel downloads. Must be greater than 0. */ - public void setMaxParallelDownloads(int maxParallelDownloads) { + public void setMaxParallelDownloads(@IntRange(from = 1) int maxParallelDownloads) { Assertions.checkArgument(maxParallelDownloads > 0); if (this.maxParallelDownloads == maxParallelDownloads) { return; @@ -484,7 +530,8 @@ public void release() { // Restore the interrupted status. Thread.currentThread().interrupt(); } - mainHandler.removeCallbacksAndMessages(/* token= */ null); + applicationHandler.removeCallbacksAndMessages(/* token= */ null); + requirementsWatcher.stop(); // Reset state. downloads = Collections.emptyList(); pendingMessages = 0; @@ -600,7 +647,7 @@ private void onDownloadUpdate(DownloadUpdate update) { } } else { for (Listener listener : listeners) { - listener.onDownloadChanged(this, updatedDownload); + listener.onDownloadChanged(this, updatedDownload, update.finalException); } } if (waitingForRequirementsChanged) { @@ -656,11 +703,12 @@ private static final class InternalHandler extends Handler { private final ArrayList downloads; private final HashMap activeTasks; - @Requirements.RequirementFlags private int notMetRequirements; + private @Requirements.RequirementFlags int notMetRequirements; private boolean downloadsPaused; private int maxParallelDownloads; private int minRetryCount; private int activeDownloadTaskCount; + private boolean hasActiveRemoveTask; public InternalHandler( HandlerThread thread, @@ -730,7 +778,7 @@ public void handleMessage(Message message) { break; case MSG_CONTENT_LENGTH_CHANGED: task = (Task) message.obj; - onContentLengthChanged(task); + onContentLengthChanged(task, Util.toLong(message.arg1, message.arg2)); return; // No need to post back to mainHandler. case MSG_UPDATE_PROGRESS: updateProgress(); @@ -810,7 +858,7 @@ private void setStopReason(@Nullable String id, int stopReason) { private void setStopReason(Download download, int stopReason) { if (stopReason == STOP_REASON_NONE) { if (download.state == STATE_STOPPED) { - putDownloadWithState(download, STATE_QUEUED); + putDownloadWithState(download, STATE_QUEUED, STOP_REASON_NONE); } } else if (stopReason != download.stopReason) { @Download.State int state = download.state; @@ -864,7 +912,7 @@ private void removeDownload(String id) { Log.e(TAG, "Failed to remove nonexistent download: " + id); return; } - putDownloadWithState(download, STATE_REMOVING); + putDownloadWithState(download, STATE_REMOVING, STOP_REASON_NONE); syncTasks(); } @@ -878,10 +926,11 @@ private void removeAllDownloads() { Log.e(TAG, "Failed to load downloads."); } for (int i = 0; i < downloads.size(); i++) { - downloads.set(i, copyDownloadWithState(downloads.get(i), STATE_REMOVING)); + downloads.set(i, copyDownloadWithState(downloads.get(i), STATE_REMOVING, STOP_REASON_NONE)); } for (int i = 0; i < terminalDownloads.size(); i++) { - downloads.add(copyDownloadWithState(terminalDownloads.get(i), STATE_REMOVING)); + downloads.add( + copyDownloadWithState(terminalDownloads.get(i), STATE_REMOVING, STOP_REASON_NONE)); } Collections.sort(downloads, InternalHandler::compareStartTimes); try { @@ -892,7 +941,8 @@ private void removeAllDownloads() { ArrayList updateList = new ArrayList<>(downloads); for (int i = 0; i < downloads.size(); i++) { DownloadUpdate update = - new DownloadUpdate(downloads.get(i), /* isRemove= */ false, updateList); + new DownloadUpdate( + downloads.get(i), /* isRemove= */ false, updateList, /* finalException= */ null); mainHandler.obtainMessage(MSG_DOWNLOAD_UPDATE, update).sendToTarget(); } syncTasks(); @@ -972,7 +1022,7 @@ private Task syncQueuedDownload(@Nullable Task activeTask, Download download) { } // We can start a download task. - download = putDownloadWithState(download, STATE_DOWNLOADING); + download = putDownloadWithState(download, STATE_DOWNLOADING, STOP_REASON_NONE); Downloader downloader = downloaderFactory.createDownloader(download.request); activeTask = new Task( @@ -994,7 +1044,7 @@ private void syncDownloadingDownload( Task activeTask, Download download, int accumulatingDownloadTaskCount) { Assertions.checkState(!activeTask.isRemove); if (!canDownloadsRun() || accumulatingDownloadTaskCount >= maxParallelDownloads) { - putDownloadWithState(download, STATE_QUEUED); + putDownloadWithState(download, STATE_QUEUED, STOP_REASON_NONE); activeTask.cancel(/* released= */ false); } } @@ -1005,11 +1055,15 @@ private void syncRemovingDownload(@Nullable Task activeTask, Download download) // Cancel the downloading task. activeTask.cancel(/* released= */ false); } - // The activeTask is either a remove task, or a downloading task that we just cancelled. In + // The activeTask is either a remove task, or a downloading task that we just canceled. In // the latter case we need to wait for the task to stop before we start a remove task. return; } + if (hasActiveRemoveTask) { + return; + } + // We can start a remove task. Downloader downloader = downloaderFactory.createDownloader(download.request); activeTask = @@ -1021,14 +1075,14 @@ private void syncRemovingDownload(@Nullable Task activeTask, Download download) minRetryCount, /* internalHandler= */ this); activeTasks.put(download.request.id, activeTask); + hasActiveRemoveTask = true; activeTask.start(); } // Task event processing. - private void onContentLengthChanged(Task task) { + private void onContentLengthChanged(Task task, long contentLength) { String downloadId = task.request.id; - long contentLength = task.contentLength; Download download = Assertions.checkNotNull(getDownload(downloadId, /* loadFromIndex= */ false)); if (contentLength == download.contentLength || contentLength == C.LENGTH_UNSET) { @@ -1051,7 +1105,9 @@ private void onTaskStopped(Task task) { activeTasks.remove(downloadId); boolean isRemove = task.isRemove; - if (!isRemove && --activeDownloadTaskCount == 0) { + if (isRemove) { + hasActiveRemoveTask = false; + } else if (--activeDownloadTaskCount == 0) { removeMessages(MSG_UPDATE_PROGRESS); } @@ -1060,9 +1116,9 @@ private void onTaskStopped(Task task) { return; } - @Nullable Throwable finalError = task.finalError; - if (finalError != null) { - Log.e(TAG, "Task failed: " + task.request + ", " + isRemove, finalError); + @Nullable Exception finalException = task.finalException; + if (finalException != null) { + Log.e(TAG, "Task failed: " + task.request + ", " + isRemove, finalException); } Download download = @@ -1070,7 +1126,7 @@ private void onTaskStopped(Task task) { switch (download.state) { case STATE_DOWNLOADING: Assertions.checkState(!isRemove); - onDownloadTaskStopped(download, finalError); + onDownloadTaskStopped(download, finalException); break; case STATE_REMOVING: case STATE_RESTARTING: @@ -1088,16 +1144,16 @@ private void onTaskStopped(Task task) { syncTasks(); } - private void onDownloadTaskStopped(Download download, @Nullable Throwable finalError) { + private void onDownloadTaskStopped(Download download, @Nullable Exception finalException) { download = new Download( download.request, - finalError == null ? STATE_COMPLETED : STATE_FAILED, + finalException == null ? STATE_COMPLETED : STATE_FAILED, download.startTimeMs, /* updateTimeMs= */ System.currentTimeMillis(), download.contentLength, download.stopReason, - finalError == null ? FAILURE_REASON_NONE : FAILURE_REASON_UNKNOWN, + finalException == null ? FAILURE_REASON_NONE : FAILURE_REASON_UNKNOWN, download.progress); // The download is now in a terminal state, so should not be in the downloads list. downloads.remove(getDownloadIndex(download.request.id)); @@ -1108,14 +1164,16 @@ private void onDownloadTaskStopped(Download download, @Nullable Throwable finalE Log.e(TAG, "Failed to update index.", e); } DownloadUpdate update = - new DownloadUpdate(download, /* isRemove= */ false, new ArrayList<>(downloads)); + new DownloadUpdate( + download, /* isRemove= */ false, new ArrayList<>(downloads), finalException); mainHandler.obtainMessage(MSG_DOWNLOAD_UPDATE, update).sendToTarget(); } private void onRemoveTaskStopped(Download download) { if (download.state == STATE_RESTARTING) { - putDownloadWithState( - download, download.stopReason == STOP_REASON_NONE ? STATE_QUEUED : STATE_STOPPED); + @Download.State + int state = download.stopReason == STOP_REASON_NONE ? STATE_QUEUED : STATE_STOPPED; + putDownloadWithState(download, state, download.stopReason); syncTasks(); } else { int removeIndex = getDownloadIndex(download.request.id); @@ -1126,7 +1184,11 @@ private void onRemoveTaskStopped(Download download) { Log.e(TAG, "Failed to remove from database"); } DownloadUpdate update = - new DownloadUpdate(download, /* isRemove= */ true, new ArrayList<>(downloads)); + new DownloadUpdate( + download, + /* isRemove= */ true, + new ArrayList<>(downloads), + /* finalException= */ null); mainHandler.obtainMessage(MSG_DOWNLOAD_UPDATE, update).sendToTarget(); } } @@ -1153,12 +1215,11 @@ private boolean canDownloadsRun() { return !downloadsPaused && notMetRequirements == 0; } - private Download putDownloadWithState(Download download, @Download.State int state) { - // Downloads in terminal states shouldn't be in the downloads list. This method cannot be used - // to set STATE_STOPPED either, because it doesn't have a stopReason argument. - Assertions.checkState( - state != STATE_COMPLETED && state != STATE_FAILED && state != STATE_STOPPED); - return putDownload(copyDownloadWithState(download, state)); + private Download putDownloadWithState( + Download download, @Download.State int state, int stopReason) { + // Downloads in terminal states shouldn't be in the downloads list. + Assertions.checkState(state != STATE_COMPLETED && state != STATE_FAILED); + return putDownload(copyDownloadWithState(download, state, stopReason)); } private Download putDownload(Download download) { @@ -1181,7 +1242,11 @@ private Download putDownload(Download download) { Log.e(TAG, "Failed to update index.", e); } DownloadUpdate update = - new DownloadUpdate(download, /* isRemove= */ false, new ArrayList<>(downloads)); + new DownloadUpdate( + download, + /* isRemove= */ false, + new ArrayList<>(downloads), + /* finalException= */ null); mainHandler.obtainMessage(MSG_DOWNLOAD_UPDATE, update).sendToTarget(); return download; } @@ -1212,14 +1277,15 @@ private int getDownloadIndex(String id) { return C.INDEX_UNSET; } - private static Download copyDownloadWithState(Download download, @Download.State int state) { + private static Download copyDownloadWithState( + Download download, @Download.State int state, int stopReason) { return new Download( download.request, state, download.startTimeMs, /* updateTimeMs= */ System.currentTimeMillis(), download.contentLength, - /* stopReason= */ 0, + stopReason, FAILURE_REASON_NONE, download.progress); } @@ -1239,7 +1305,7 @@ private static class Task extends Thread implements Downloader.ProgressListener @Nullable private volatile InternalHandler internalHandler; private volatile boolean isCanceled; - @Nullable private Throwable finalError; + @Nullable private Exception finalException; private long contentLength; @@ -1259,7 +1325,7 @@ private Task( contentLength = C.LENGTH_UNSET; } - @SuppressWarnings("nullness:assignment.type.incompatible") + @SuppressWarnings("nullness:assignment") public void cancel(boolean released) { if (released) { // Download threads are GC roots for as long as they're running. The time taken for @@ -1304,8 +1370,10 @@ public void run() { } } } - } catch (Throwable e) { - finalError = e; + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } catch (Exception e) { + finalException = e; } @Nullable Handler internalHandler = this.internalHandler; if (internalHandler != null) { @@ -1321,13 +1389,19 @@ public void onProgress(long contentLength, long bytesDownloaded, float percentDo this.contentLength = contentLength; @Nullable Handler internalHandler = this.internalHandler; if (internalHandler != null) { - internalHandler.obtainMessage(MSG_CONTENT_LENGTH_CHANGED, this).sendToTarget(); + internalHandler + .obtainMessage( + MSG_CONTENT_LENGTH_CHANGED, + (int) (contentLength >> 32), + (int) contentLength, + this) + .sendToTarget(); } } } private static int getRetryDelayMillis(int errorCount) { - return Math.min((errorCount - 1) * 1000, 5000); + return min((errorCount - 1) * 1000, 5000); } } @@ -1336,11 +1410,17 @@ private static final class DownloadUpdate { public final Download download; public final boolean isRemove; public final List downloads; + @Nullable public final Exception finalException; - public DownloadUpdate(Download download, boolean isRemove, List downloads) { + public DownloadUpdate( + Download download, + boolean isRemove, + List downloads, + @Nullable Exception finalException) { this.download = download; this.isRemove = isRemove; this.downloads = downloads; + this.finalException = finalException; } } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DownloadProgress.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DownloadProgress.java index 9d946daa28..ba226e60b2 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DownloadProgress.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DownloadProgress.java @@ -21,8 +21,8 @@ public class DownloadProgress { /** The number of bytes that have been downloaded. */ - public long bytesDownloaded; + public volatile long bytesDownloaded; /** The percentage that has been downloaded, or {@link C#PERCENTAGE_UNSET} if unknown. */ - public float percentDownloaded; + public volatile float percentDownloaded; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DownloadRequest.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DownloadRequest.java index 988b908140..3964533bb2 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DownloadRequest.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DownloadRequest.java @@ -21,8 +21,12 @@ import android.os.Parcel; import android.os.Parcelable; import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.MediaItem; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Util; +import com.google.common.collect.ImmutableList; +import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; @@ -35,23 +39,83 @@ public final class DownloadRequest implements Parcelable { /** Thrown when the encoded request data belongs to an unsupported request type. */ public static class UnsupportedRequestException extends IOException {} - /** Type for progressive downloads. */ - public static final String TYPE_PROGRESSIVE = "progressive"; - /** Type for DASH downloads. */ - public static final String TYPE_DASH = "dash"; - /** Type for HLS downloads. */ - public static final String TYPE_HLS = "hls"; - /** Type for SmoothStreaming downloads. */ - public static final String TYPE_SS = "ss"; + /** A builder for download requests. */ + public static class Builder { + private final String id; + private final Uri uri; + @Nullable private String mimeType; + @Nullable private List streamKeys; + @Nullable private byte[] keySetId; + @Nullable private String customCacheKey; + @Nullable private byte[] data; + + /** Creates a new instance with the specified id and uri. */ + public Builder(String id, Uri uri) { + this.id = id; + this.uri = uri; + } + + /** Sets the {@link DownloadRequest#mimeType}. */ + @CanIgnoreReturnValue + public Builder setMimeType(@Nullable String mimeType) { + this.mimeType = mimeType; + return this; + } + + /** Sets the {@link DownloadRequest#streamKeys}. */ + @CanIgnoreReturnValue + public Builder setStreamKeys(@Nullable List streamKeys) { + this.streamKeys = streamKeys; + return this; + } + + /** Sets the {@link DownloadRequest#keySetId}. */ + @CanIgnoreReturnValue + public Builder setKeySetId(@Nullable byte[] keySetId) { + this.keySetId = keySetId; + return this; + } + + /** Sets the {@link DownloadRequest#customCacheKey}. */ + @CanIgnoreReturnValue + public Builder setCustomCacheKey(@Nullable String customCacheKey) { + this.customCacheKey = customCacheKey; + return this; + } + + /** Sets the {@link DownloadRequest#data}. */ + @CanIgnoreReturnValue + public Builder setData(@Nullable byte[] data) { + this.data = data; + return this; + } + + public DownloadRequest build() { + return new DownloadRequest( + id, + uri, + mimeType, + streamKeys != null ? streamKeys : ImmutableList.of(), + keySetId, + customCacheKey, + data); + } + } /** The unique content id. */ public final String id; - /** The type of the request. */ - public final String type; /** The uri being downloaded. */ public final Uri uri; + /** + * The MIME type of this content. Used as a hint to infer the content's type (DASH, HLS, + * SmoothStreaming). If null, a {@code DownloadService} will infer the content type from the + * {@link #uri}. + */ + @Nullable public final String mimeType; /** Stream keys to be downloaded. If empty, all streams will be downloaded. */ public final List streamKeys; + /** The key set id of the offline licence if the content is protected with DRM. */ + @Nullable public final byte[] keySetId; /** * Custom key for cache indexing, or null. Must be null for DASH, HLS and SmoothStreaming * downloads. @@ -62,43 +126,49 @@ public static class UnsupportedRequestException extends IOException {} /** * @param id See {@link #id}. - * @param type See {@link #type}. * @param uri See {@link #uri}. + * @param mimeType See {@link #mimeType} * @param streamKeys See {@link #streamKeys}. * @param customCacheKey See {@link #customCacheKey}. * @param data See {@link #data}. */ - public DownloadRequest( + private DownloadRequest( String id, - String type, Uri uri, + @Nullable String mimeType, List streamKeys, + @Nullable byte[] keySetId, @Nullable String customCacheKey, @Nullable byte[] data) { - if (TYPE_DASH.equals(type) || TYPE_HLS.equals(type) || TYPE_SS.equals(type)) { + @C.ContentType int contentType = Util.inferContentTypeForUriAndMimeType(uri, mimeType); + if (contentType == C.CONTENT_TYPE_DASH + || contentType == C.CONTENT_TYPE_HLS + || contentType == C.CONTENT_TYPE_SS) { Assertions.checkArgument( - customCacheKey == null, "customCacheKey must be null for type: " + type); + customCacheKey == null, "customCacheKey must be null for type: " + contentType); } this.id = id; - this.type = type; this.uri = uri; + this.mimeType = mimeType; ArrayList mutableKeys = new ArrayList<>(streamKeys); Collections.sort(mutableKeys); this.streamKeys = Collections.unmodifiableList(mutableKeys); + this.keySetId = keySetId != null ? Arrays.copyOf(keySetId, keySetId.length) : null; this.customCacheKey = customCacheKey; this.data = data != null ? Arrays.copyOf(data, data.length) : Util.EMPTY_BYTE_ARRAY; } /* package */ DownloadRequest(Parcel in) { id = castNonNull(in.readString()); - type = castNonNull(in.readString()); uri = Uri.parse(castNonNull(in.readString())); + mimeType = in.readString(); int streamKeyCount = in.readInt(); ArrayList mutableStreamKeys = new ArrayList<>(streamKeyCount); for (int i = 0; i < streamKeyCount; i++) { mutableStreamKeys.add(in.readParcelable(StreamKey.class.getClassLoader())); } streamKeys = Collections.unmodifiableList(mutableStreamKeys); + keySetId = in.createByteArray(); customCacheKey = in.readString(); data = castNonNull(in.createByteArray()); } @@ -110,24 +180,32 @@ public DownloadRequest( * @return The copy with the specified ID. */ public DownloadRequest copyWithId(String id) { - return new DownloadRequest(id, type, uri, streamKeys, customCacheKey, data); + return new DownloadRequest(id, uri, mimeType, streamKeys, keySetId, customCacheKey, data); + } + + /** + * Returns a copy with the specified key set ID. + * + * @param keySetId The key set ID of the copy. + * @return The copy with the specified key set ID. + */ + public DownloadRequest copyWithKeySetId(@Nullable byte[] keySetId) { + return new DownloadRequest(id, uri, mimeType, streamKeys, keySetId, customCacheKey, data); } /** * Returns the result of merging {@code newRequest} into this request. The requests must have the - * same {@link #id} and {@link #type}. + * same {@link #id}. * - *

      If the requests have different {@link #uri}, {@link #customCacheKey} and {@link #data} - * values, then those from the request being merged are included in the result. + *

      The resulting request contains the stream keys from both requests. For all other member + * variables, those in {@code newRequest} are preferred. * * @param newRequest The request being merged. * @return The merged result. - * @throws IllegalArgumentException If the requests do not have the same {@link #id} and {@link - * #type}. + * @throws IllegalArgumentException If the requests do not have the same {@link #id}. */ public DownloadRequest copyWithMergedRequest(DownloadRequest newRequest) { Assertions.checkArgument(id.equals(newRequest.id)); - Assertions.checkArgument(type.equals(newRequest.type)); List mergedKeys; if (streamKeys.isEmpty() || newRequest.streamKeys.isEmpty()) { // If either streamKeys is empty then all streams should be downloaded. @@ -142,12 +220,29 @@ public DownloadRequest copyWithMergedRequest(DownloadRequest newRequest) { } } return new DownloadRequest( - id, type, newRequest.uri, mergedKeys, newRequest.customCacheKey, newRequest.data); + id, + newRequest.uri, + newRequest.mimeType, + mergedKeys, + newRequest.keySetId, + newRequest.customCacheKey, + newRequest.data); + } + + /** Returns a {@link MediaItem} for the content defined by the request. */ + public MediaItem toMediaItem() { + return new MediaItem.Builder() + .setMediaId(id) + .setUri(uri) + .setCustomCacheKey(customCacheKey) + .setMimeType(mimeType) + .setStreamKeys(streamKeys) + .build(); } @Override public String toString() { - return type + ":" + id; + return mimeType + ":" + id; } @Override @@ -157,20 +252,21 @@ public boolean equals(@Nullable Object o) { } DownloadRequest that = (DownloadRequest) o; return id.equals(that.id) - && type.equals(that.type) && uri.equals(that.uri) + && Util.areEqual(mimeType, that.mimeType) && streamKeys.equals(that.streamKeys) + && Arrays.equals(keySetId, that.keySetId) && Util.areEqual(customCacheKey, that.customCacheKey) && Arrays.equals(data, that.data); } @Override public final int hashCode() { - int result = type.hashCode(); - result = 31 * result + id.hashCode(); - result = 31 * result + type.hashCode(); + int result = 31 * id.hashCode(); result = 31 * result + uri.hashCode(); + result = 31 * result + (mimeType != null ? mimeType.hashCode() : 0); result = 31 * result + streamKeys.hashCode(); + result = 31 * result + Arrays.hashCode(keySetId); result = 31 * result + (customCacheKey != null ? customCacheKey.hashCode() : 0); result = 31 * result + Arrays.hashCode(data); return result; @@ -186,12 +282,13 @@ public int describeContents() { @Override public void writeToParcel(Parcel dest, int flags) { dest.writeString(id); - dest.writeString(type); dest.writeString(uri.toString()); + dest.writeString(mimeType); dest.writeInt(streamKeys.size()); for (int i = 0; i < streamKeys.size(); i++) { dest.writeParcelable(streamKeys.get(i), /* parcelableFlags= */ 0); } + dest.writeByteArray(keySetId); dest.writeString(customCacheKey); dest.writeByteArray(data); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DownloadService.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DownloadService.java index f78e9bb545..03b05bf0ee 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DownloadService.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DownloadService.java @@ -18,6 +18,7 @@ import static com.google.android.exoplayer2.offline.Download.STOP_REASON_NONE; import android.app.Notification; +import android.app.NotificationManager; import android.app.Service; import android.content.Context; import android.content.Intent; @@ -27,6 +28,7 @@ import androidx.annotation.Nullable; import androidx.annotation.StringRes; import com.google.android.exoplayer2.scheduler.Requirements; +import com.google.android.exoplayer2.scheduler.Requirements.RequirementFlags; import com.google.android.exoplayer2.scheduler.Scheduler; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Log; @@ -35,8 +37,18 @@ import java.util.HashMap; import java.util.List; import org.checkerframework.checker.nullness.qual.MonotonicNonNull; +import org.checkerframework.checker.nullness.qual.RequiresNonNull; -/** A {@link Service} for downloading media. */ +/** + * A {@link Service} for downloading media. + * + *

      Apps with target SDK 33 and greater need to add the {@code + * android.permission.POST_NOTIFICATIONS} permission to the manifest and request the permission at + * runtime before starting downloads. Without that permission granted by the user, notifications + * posted by this service are not displayed. See the + * official UI guide for more detailed information. + */ public abstract class DownloadService extends Service { /** @@ -167,9 +179,11 @@ public abstract class DownloadService extends Service { private static final String TAG = "DownloadService"; - // Keep a DownloadManagerHelper for each DownloadService as long as the process is running. The - // helper is needed to restart the DownloadService when there's no scheduler. Even when there is a - // scheduler, the DownloadManagerHelper is typically able to restart the DownloadService faster. + // Maps each concrete DownloadService subclass to a single DownloadManagerHelper instance. This + // ensures getDownloadManager is only called once per subclass, even if a new instance of the + // service is created. The DownloadManagerHelper wrapper also takes care of restarting the service + // when there's no scheduler, and is often able to restart the service faster than the scheduler + // even when there is one. private static final HashMap, DownloadManagerHelper> downloadManagerHelpers = new HashMap<>(); @@ -178,7 +192,7 @@ public abstract class DownloadService extends Service { @StringRes private final int channelNameResourceId; @StringRes private final int channelDescriptionResourceId; - @MonotonicNonNull private DownloadManager downloadManager; + private @MonotonicNonNull DownloadManagerHelper downloadManagerHelper; private int lastStartId; private boolean startedInForeground; private boolean taskRemoved; @@ -189,8 +203,7 @@ public abstract class DownloadService extends Service { * Creates a DownloadService. * *

      If {@code foregroundNotificationId} is {@link #FOREGROUND_NOTIFICATION_ID_NONE} then the - * service will only ever run in the background. No foreground notification will be displayed and - * {@link #getScheduler()} will not be called. + * service will only ever run in the background, and no foreground notification will be displayed. * *

      If {@code foregroundNotificationId} is not {@link #FOREGROUND_NOTIFICATION_ID_NONE} then the * service will run in the foreground. The foreground notification will be updated at least as @@ -222,7 +235,9 @@ protected DownloadService( /* channelDescriptionResourceId= */ 0); } - /** @deprecated Use {@link #DownloadService(int, long, String, int, int)}. */ + /** + * @deprecated Use {@link #DownloadService(int, long, String, int, int)}. + */ @Deprecated protected DownloadService( int foregroundNotificationId, @@ -567,6 +582,17 @@ public static void startForeground(Context context, ClassCalling this method is normally only required if an app supports downloading content for + * multiple users for which different download directories should be used. + */ + public static void clearDownloadManagerHelpers() { + downloadManagerHelpers.clear(); + } + @Override public void onCreate() { if (channelId != null) { @@ -581,16 +607,19 @@ public void onCreate() { @Nullable DownloadManagerHelper downloadManagerHelper = downloadManagerHelpers.get(clazz); if (downloadManagerHelper == null) { boolean foregroundAllowed = foregroundNotificationUpdater != null; - @Nullable Scheduler scheduler = foregroundAllowed ? getScheduler() : null; - downloadManager = getDownloadManager(); + // See https://developer.android.com/about/versions/12/foreground-services. + boolean canStartForegroundServiceFromBackground = Util.SDK_INT < 31; + @Nullable + Scheduler scheduler = + foregroundAllowed && canStartForegroundServiceFromBackground ? getScheduler() : null; + DownloadManager downloadManager = getDownloadManager(); downloadManager.resumeDownloads(); downloadManagerHelper = new DownloadManagerHelper( getApplicationContext(), downloadManager, foregroundAllowed, scheduler, clazz); downloadManagerHelpers.put(clazz, downloadManagerHelper); - } else { - downloadManager = downloadManagerHelper.downloadManager; } + this.downloadManagerHelper = downloadManagerHelper; downloadManagerHelper.attachService(this); } @@ -610,7 +639,8 @@ public int onStartCommand(@Nullable Intent intent, int flags, int startId) { if (intentAction == null) { intentAction = ACTION_INIT; } - DownloadManager downloadManager = Assertions.checkNotNull(this.downloadManager); + DownloadManager downloadManager = + Assertions.checkNotNull(downloadManagerHelper).downloadManager; switch (intentAction) { case ACTION_INIT: case ACTION_RESTART: @@ -673,7 +703,7 @@ public int onStartCommand(@Nullable Intent intent, int flags, int startId) { isStopped = false; if (downloadManager.isIdle()) { - stop(); + onIdle(); } return START_STICKY; } @@ -686,9 +716,7 @@ public void onTaskRemoved(Intent rootIntent) { @Override public void onDestroy() { isDestroyed = true; - DownloadManagerHelper downloadManagerHelper = - Assertions.checkNotNull(downloadManagerHelpers.get(getClass())); - downloadManagerHelper.detachService(this); + Assertions.checkNotNull(downloadManagerHelper).detachService(this); if (foregroundNotificationUpdater != null) { foregroundNotificationUpdater.stopPeriodicUpdates(); } @@ -697,27 +725,51 @@ public void onDestroy() { /** * Throws {@link UnsupportedOperationException} because this service is not designed to be bound. */ - @Nullable @Override + @Nullable public final IBinder onBind(Intent intent) { throw new UnsupportedOperationException(); } /** - * Returns a {@link DownloadManager} to be used to downloaded content. Called only once in the - * life cycle of the process. + * Returns a {@link DownloadManager} to be used to downloaded content. For each concrete download + * service subclass, this is called once in the lifecycle of the process when {@link #onCreate} is + * called on the first instance of the service. If the service is destroyed and a new instance is + * created later, the new instance will use the previously returned {@link DownloadManager} + * without this method being called again. */ protected abstract DownloadManager getDownloadManager(); /** - * Returns a {@link Scheduler} to restart the service when requirements allowing downloads to take - * place are met. If {@code null}, the service will only be restarted if the process is still in - * memory when the requirements are met. + * Returns a {@link Scheduler} to restart the service when requirements for downloads to continue + * are met. * - *

      This method is not called for services whose {@code foregroundNotificationId} is set to - * {@link #FOREGROUND_NOTIFICATION_ID_NONE}. Such services will only be restarted if the process - * is still in memory and considered non-idle, meaning that it's either in the foreground or was - * backgrounded within the last few minutes. + *

      This method is not called on all devices or for all service configurations. When it is + * called, it's called only once in the life cycle of the process. If a service has unfinished + * downloads that cannot make progress due to unmet requirements, it will behave according to the + * first matching case below: + * + *

        + *
      • If the service has {@code foregroundNotificationId} set to {@link + * #FOREGROUND_NOTIFICATION_ID_NONE}, then this method will not be called. The service will + * remain in the background until the downloads are able to continue to completion or the + * service is killed by the platform. + *
      • If the device API level is less than 31, a {@link Scheduler} is returned from this + * method, and the returned {@link Scheduler} {@link Scheduler#getSupportedRequirements + * supports} all of the requirements that have been specified for downloads to continue, + * then the service will stop itself and the {@link Scheduler} will be used to restart it in + * the foreground when the requirements are met. + *
      • If the device API level is less than 31 and either {@code null} or a {@link Scheduler} + * that does not {@link Scheduler#getSupportedRequirements support} all of the requirements + * is returned from this method, then the service will remain in the foreground until the + * downloads are able to continue to completion. + *
      • If the device API level is 31 or above, then this method will not be called and the + * service will remain in the foreground until the downloads are able to continue to + * completion. A {@link Scheduler} cannot be used for this case due to Android 12 + * foreground service launch restrictions. + *
      • + *
      */ @Nullable protected abstract Scheduler getScheduler(); @@ -731,13 +783,15 @@ public final IBinder onBind(Intent intent) { * be implemented to throw {@link UnsupportedOperationException}. * * @param downloads The current downloads. + * @param notMetRequirements Any requirements for downloads that are not currently met. * @return The foreground notification to display. */ - protected abstract Notification getForegroundNotification(List downloads); + protected abstract Notification getForegroundNotification( + List downloads, @RequirementFlags int notMetRequirements); /** * Invalidates the current foreground notification and causes {@link - * #getForegroundNotification(List)} to be invoked again if the service isn't stopped. + * #getForegroundNotification(List, int)} to be invoked again if the service isn't stopped. */ protected final void invalidateForegroundNotification() { if (foregroundNotificationUpdater != null && !isDestroyed) { @@ -745,27 +799,6 @@ protected final void invalidateForegroundNotification() { } } - /** - * @deprecated Some state change events may not be delivered to this method. Instead, use {@link - * DownloadManager#addListener(DownloadManager.Listener)} to register a listener directly to - * the {@link DownloadManager} that you return through {@link #getDownloadManager()}. - */ - @Deprecated - protected void onDownloadChanged(Download download) { - // Do nothing. - } - - /** - * @deprecated Some download removal events may not be delivered to this method. Instead, use - * {@link DownloadManager#addListener(DownloadManager.Listener)} to register a listener - * directly to the {@link DownloadManager} that you return through {@link - * #getDownloadManager()}. - */ - @Deprecated - protected void onDownloadRemoved(Download download) { - // Do nothing. - } - /** * Called after the service is created, once the downloads are known. * @@ -787,9 +820,7 @@ private void notifyDownloads(List downloads) { * * @param download The state of the download. */ - @SuppressWarnings("deprecation") private void notifyDownloadChanged(Download download) { - onDownloadChanged(download); if (foregroundNotificationUpdater != null) { if (needsStartedService(download.state)) { foregroundNotificationUpdater.startPeriodicUpdates(); @@ -799,14 +830,8 @@ private void notifyDownloadChanged(Download download) { } } - /** - * Called when a download is removed. - * - * @param download The last state of the download before it was removed. - */ - @SuppressWarnings("deprecation") - private void notifyDownloadRemoved(Download download) { - onDownloadRemoved(download); + /** Called when a download is removed. */ + private void notifyDownloadRemoved() { if (foregroundNotificationUpdater != null) { foregroundNotificationUpdater.invalidate(); } @@ -817,10 +842,21 @@ private boolean isStopped() { return isStopped; } - private void stop() { + private void onIdle() { if (foregroundNotificationUpdater != null) { + // Whether the service remains started or not, we don't need periodic notification updates + // when the DownloadManager is idle. foregroundNotificationUpdater.stopPeriodicUpdates(); } + + if (!Assertions.checkNotNull(downloadManagerHelper).updateScheduler()) { + // We failed to schedule the service to restart when requirements that the DownloadManager is + // waiting for are met, so remain started. + return; + } + + // Stop the service, either because the DownloadManager is not waiting for requirements to be + // met, or because we've scheduled the service to be restarted when they are. if (Util.SDK_INT < 28 && taskRemoved) { // See [Internal: b/74248644]. stopSelf(); isStopped = true; @@ -891,9 +927,20 @@ public void invalidate() { } private void update() { - List downloads = Assertions.checkNotNull(downloadManager).getCurrentDownloads(); - startForeground(notificationId, getForegroundNotification(downloads)); - notificationDisplayed = true; + DownloadManager downloadManager = + Assertions.checkNotNull(downloadManagerHelper).downloadManager; + List downloads = downloadManager.getCurrentDownloads(); + @RequirementFlags int notMetRequirements = downloadManager.getNotMetRequirements(); + Notification notification = getForegroundNotification(downloads, notMetRequirements); + if (!notificationDisplayed) { + startForeground(notificationId, notification); + notificationDisplayed = true; + } else { + // Update the notification via NotificationManager rather than by repeatedly calling + // startForeground, since the latter can cause ActivityManager log spam. + ((NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE)) + .notify(notificationId, notification); + } if (periodicUpdatesStarted) { handler.removeCallbacksAndMessages(null); handler.postDelayed(this::update, updateInterval); @@ -908,7 +955,9 @@ private static final class DownloadManagerHelper implements DownloadManager.List private final boolean foregroundAllowed; @Nullable private final Scheduler scheduler; private final Class serviceClass; + @Nullable private DownloadService downloadService; + private @MonotonicNonNull Requirements scheduledRequirements; private DownloadManagerHelper( Context context, @@ -934,7 +983,7 @@ public void attachService(DownloadService downloadService) { // DownloadService.getForegroundNotification, and concrete subclass implementations may // not anticipate the possibility of this method being called before their onCreate // implementation has finished executing. - new Handler() + Util.createHandlerForCurrentOrMainLooper() .postAtFrontOfQueue( () -> downloadService.notifyDownloads(downloadManager.getCurrentDownloads())); } @@ -943,8 +992,46 @@ public void attachService(DownloadService downloadService) { public void detachService(DownloadService downloadService) { Assertions.checkState(this.downloadService == downloadService); this.downloadService = null; - if (scheduler != null && !downloadManager.isWaitingForRequirements()) { - scheduler.cancel(); + } + + /** + * Schedules or cancels restarting the service, as needed for the current state. + * + * @return True if the DownloadManager is not waiting for requirements, or if it is waiting for + * requirements and the service has been successfully scheduled to be restarted when they + * are met. False if the DownloadManager is waiting for requirements and the service has not + * been scheduled for restart. + */ + public boolean updateScheduler() { + boolean waitingForRequirements = downloadManager.isWaitingForRequirements(); + if (scheduler == null) { + return !waitingForRequirements; + } + + if (!waitingForRequirements) { + cancelScheduler(); + return true; + } + + Requirements requirements = downloadManager.getRequirements(); + Requirements supportedRequirements = scheduler.getSupportedRequirements(requirements); + if (!supportedRequirements.equals(requirements)) { + cancelScheduler(); + return false; + } + + if (!schedulerNeedsUpdate(requirements)) { + return true; + } + + String servicePackage = context.getPackageName(); + if (scheduler.schedule(requirements, servicePackage, ACTION_RESTART)) { + scheduledRequirements = requirements; + return true; + } else { + Log.w(TAG, "Failed to schedule restart"); + cancelScheduler(); + return false; } } @@ -958,7 +1045,8 @@ public void onInitialized(DownloadManager downloadManager) { } @Override - public void onDownloadChanged(DownloadManager downloadManager, Download download) { + public void onDownloadChanged( + DownloadManager downloadManager, Download download, @Nullable Exception finalException) { if (downloadService != null) { downloadService.notifyDownloadChanged(download); } @@ -975,17 +1063,25 @@ public void onDownloadChanged(DownloadManager downloadManager, Download download @Override public void onDownloadRemoved(DownloadManager downloadManager, Download download) { if (downloadService != null) { - downloadService.notifyDownloadRemoved(download); + downloadService.notifyDownloadRemoved(); } } @Override public final void onIdle(DownloadManager downloadManager) { if (downloadService != null) { - downloadService.stop(); + downloadService.onIdle(); } } + @Override + public void onRequirementsStateChanged( + DownloadManager downloadManager, + Requirements requirements, + @RequirementFlags int notMetRequirements) { + updateScheduler(); + } + @Override public void onWaitingForRequirementsChanged( DownloadManager downloadManager, boolean waitingForRequirements) { @@ -999,23 +1095,42 @@ && serviceMayNeedRestart()) { for (int i = 0; i < downloads.size(); i++) { if (downloads.get(i).state == Download.STATE_QUEUED) { restartService(); - break; + return; } } } - updateScheduler(); } // Internal methods. + private boolean schedulerNeedsUpdate(Requirements requirements) { + return !Util.areEqual(scheduledRequirements, requirements); + } + + @RequiresNonNull("scheduler") + private void cancelScheduler() { + Requirements canceledRequirements = new Requirements(/* requirements= */ 0); + if (schedulerNeedsUpdate(canceledRequirements)) { + scheduler.cancel(); + scheduledRequirements = canceledRequirements; + } + } + private boolean serviceMayNeedRestart() { return downloadService == null || downloadService.isStopped(); } private void restartService() { if (foregroundAllowed) { - Intent intent = getIntent(context, serviceClass, DownloadService.ACTION_RESTART); - Util.startForegroundService(context, intent); + try { + Intent intent = getIntent(context, serviceClass, DownloadService.ACTION_RESTART); + Util.startForegroundService(context, intent); + } catch (IllegalStateException e) { + // The process is running in the background, and is not allowed to start a foreground + // service due to foreground service launch restrictions + // (https://developer.android.com/about/versions/12/foreground-services). + Log.w(TAG, "Failed to restart (foreground launch restriction)"); + } } else { // The service is background only. Use ACTION_INIT rather than ACTION_RESTART because // ACTION_RESTART is handled as though KEY_FOREGROUND is set to true. @@ -1025,25 +1140,9 @@ private void restartService() { } catch (IllegalStateException e) { // The process is classed as idle by the platform. Starting a background service is not // allowed in this state. - Log.w(TAG, "Failed to restart DownloadService (process is idle)."); + Log.w(TAG, "Failed to restart (process is idle)"); } } } - - private void updateScheduler() { - if (scheduler == null) { - return; - } - if (downloadManager.isWaitingForRequirements()) { - String servicePackage = context.getPackageName(); - Requirements requirements = downloadManager.getRequirements(); - boolean success = scheduler.schedule(requirements, servicePackage, ACTION_RESTART); - if (!success) { - Log.e(TAG, "Scheduling downloads failed."); - } - } else { - scheduler.cancel(); - } - } } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/Downloader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/Downloader.java index fa10d5842b..1059157d34 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/Downloader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/Downloader.java @@ -18,6 +18,7 @@ import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import java.io.IOException; +import java.util.concurrent.CancellationException; /** Downloads and removes a piece of content. */ public interface Downloader { @@ -28,6 +29,10 @@ interface ProgressListener { /** * Called when progress is made during a download operation. * + *

      May be called directly from {@link #download}, or from any other thread used by the + * downloader. In all cases, {@link #download} is guaranteed not to return until after the last + * call to this method has finished executing. + * * @param contentLength The length of the content in bytes, or {@link C#LENGTH_UNSET} if * unknown. * @param bytesDownloaded The number of bytes that have been downloaded. @@ -40,21 +45,32 @@ interface ProgressListener { /** * Downloads the content. * + *

      If downloading fails, this method can be called again to resume the download. It cannot be + * called again after the download has been {@link #cancel canceled}. + * + *

      If downloading is canceled whilst this method is executing, then it is expected that it will + * return reasonably quickly. However, there are no guarantees about how the method will return, + * meaning that it can return without throwing, or by throwing any of its documented exceptions. + * The caller must use its own knowledge about whether downloading has been canceled to determine + * whether this is why the method has returned, rather than relying on the method returning in a + * particular way. + * * @param progressListener A listener to receive progress updates, or {@code null}. - * @throws DownloadException Thrown if the content cannot be downloaded. - * @throws InterruptedException If the thread has been interrupted. - * @throws IOException Thrown when there is an io error while downloading. + * @throws IOException If the download failed to complete successfully. + * @throws InterruptedException If the download was interrupted. + * @throws CancellationException If the download was canceled. */ void download(@Nullable ProgressListener progressListener) - throws InterruptedException, IOException; - - /** Cancels the download operation and prevents future download operations from running. */ - void cancel(); + throws IOException, InterruptedException; /** - * Removes the content. + * Permanently cancels the downloading by this downloader. The caller should also interrupt the + * downloading thread immediately after calling this method. * - * @throws InterruptedException Thrown if the thread was interrupted. + *

      Once canceled, {@link #download} cannot be called again. */ - void remove() throws InterruptedException; + void cancel(); + + /** Removes the content. */ + void remove(); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DownloaderConstructorHelper.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DownloaderConstructorHelper.java deleted file mode 100644 index 0d53b3cde0..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DownloaderConstructorHelper.java +++ /dev/null @@ -1,170 +0,0 @@ -/* - * Copyright (C) 2017 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.offline; - -import androidx.annotation.Nullable; -import com.google.android.exoplayer2.C; -import com.google.android.exoplayer2.upstream.DataSink; -import com.google.android.exoplayer2.upstream.DataSource; -import com.google.android.exoplayer2.upstream.DummyDataSource; -import com.google.android.exoplayer2.upstream.FileDataSource; -import com.google.android.exoplayer2.upstream.PriorityDataSourceFactory; -import com.google.android.exoplayer2.upstream.cache.Cache; -import com.google.android.exoplayer2.upstream.cache.CacheDataSink; -import com.google.android.exoplayer2.upstream.cache.CacheDataSinkFactory; -import com.google.android.exoplayer2.upstream.cache.CacheDataSource; -import com.google.android.exoplayer2.upstream.cache.CacheDataSourceFactory; -import com.google.android.exoplayer2.upstream.cache.CacheKeyFactory; -import com.google.android.exoplayer2.upstream.cache.CacheUtil; -import com.google.android.exoplayer2.util.PriorityTaskManager; - -/** A helper class that holds necessary parameters for {@link Downloader} construction. */ -public final class DownloaderConstructorHelper { - - private final Cache cache; - @Nullable private final CacheKeyFactory cacheKeyFactory; - @Nullable private final PriorityTaskManager priorityTaskManager; - private final CacheDataSourceFactory onlineCacheDataSourceFactory; - private final CacheDataSourceFactory offlineCacheDataSourceFactory; - - /** - * @param cache Cache instance to be used to store downloaded data. - * @param upstreamFactory A {@link DataSource.Factory} for creating {@link DataSource}s for - * downloading data. - */ - public DownloaderConstructorHelper(Cache cache, DataSource.Factory upstreamFactory) { - this( - cache, - upstreamFactory, - /* cacheReadDataSourceFactory= */ null, - /* cacheWriteDataSinkFactory= */ null, - /* priorityTaskManager= */ null); - } - - /** - * @param cache Cache instance to be used to store downloaded data. - * @param upstreamFactory A {@link DataSource.Factory} for creating {@link DataSource}s for - * downloading data. - * @param cacheReadDataSourceFactory A {@link DataSource.Factory} for creating {@link DataSource}s - * for reading data from the cache. If null then a {@link FileDataSource.Factory} will be - * used. - * @param cacheWriteDataSinkFactory A {@link DataSink.Factory} for creating {@link DataSource}s - * for writing data to the cache. If null then a {@link CacheDataSinkFactory} will be used. - * @param priorityTaskManager A {@link PriorityTaskManager} to use when downloading. If non-null, - * downloaders will register as tasks with priority {@link C#PRIORITY_DOWNLOAD} whilst - * downloading. - */ - public DownloaderConstructorHelper( - Cache cache, - DataSource.Factory upstreamFactory, - @Nullable DataSource.Factory cacheReadDataSourceFactory, - @Nullable DataSink.Factory cacheWriteDataSinkFactory, - @Nullable PriorityTaskManager priorityTaskManager) { - this( - cache, - upstreamFactory, - cacheReadDataSourceFactory, - cacheWriteDataSinkFactory, - priorityTaskManager, - /* cacheKeyFactory= */ null); - } - - /** - * @param cache Cache instance to be used to store downloaded data. - * @param upstreamFactory A {@link DataSource.Factory} for creating {@link DataSource}s for - * downloading data. - * @param cacheReadDataSourceFactory A {@link DataSource.Factory} for creating {@link DataSource}s - * for reading data from the cache. If null then a {@link FileDataSource.Factory} will be - * used. - * @param cacheWriteDataSinkFactory A {@link DataSink.Factory} for creating {@link DataSource}s - * for writing data to the cache. If null then a {@link CacheDataSinkFactory} will be used. - * @param priorityTaskManager A {@link PriorityTaskManager} to use when downloading. If non-null, - * downloaders will register as tasks with priority {@link C#PRIORITY_DOWNLOAD} whilst - * downloading. - * @param cacheKeyFactory An optional factory for cache keys. - */ - public DownloaderConstructorHelper( - Cache cache, - DataSource.Factory upstreamFactory, - @Nullable DataSource.Factory cacheReadDataSourceFactory, - @Nullable DataSink.Factory cacheWriteDataSinkFactory, - @Nullable PriorityTaskManager priorityTaskManager, - @Nullable CacheKeyFactory cacheKeyFactory) { - if (priorityTaskManager != null) { - upstreamFactory = - new PriorityDataSourceFactory(upstreamFactory, priorityTaskManager, C.PRIORITY_DOWNLOAD); - } - DataSource.Factory readDataSourceFactory = - cacheReadDataSourceFactory != null - ? cacheReadDataSourceFactory - : new FileDataSource.Factory(); - if (cacheWriteDataSinkFactory == null) { - cacheWriteDataSinkFactory = - new CacheDataSinkFactory(cache, CacheDataSink.DEFAULT_FRAGMENT_SIZE); - } - onlineCacheDataSourceFactory = - new CacheDataSourceFactory( - cache, - upstreamFactory, - readDataSourceFactory, - cacheWriteDataSinkFactory, - CacheDataSource.FLAG_BLOCK_ON_CACHE, - /* eventListener= */ null, - cacheKeyFactory); - offlineCacheDataSourceFactory = - new CacheDataSourceFactory( - cache, - DummyDataSource.FACTORY, - readDataSourceFactory, - null, - CacheDataSource.FLAG_BLOCK_ON_CACHE, - /* eventListener= */ null, - cacheKeyFactory); - this.cache = cache; - this.priorityTaskManager = priorityTaskManager; - this.cacheKeyFactory = cacheKeyFactory; - } - - /** Returns the {@link Cache} instance. */ - public Cache getCache() { - return cache; - } - - /** Returns the {@link CacheKeyFactory}. */ - public CacheKeyFactory getCacheKeyFactory() { - return cacheKeyFactory != null ? cacheKeyFactory : CacheUtil.DEFAULT_CACHE_KEY_FACTORY; - } - - /** Returns a {@link PriorityTaskManager} instance. */ - public PriorityTaskManager getPriorityTaskManager() { - // Return a dummy PriorityTaskManager if none is provided. Create a new PriorityTaskManager - // each time so clients don't affect each other over the dummy PriorityTaskManager instance. - return priorityTaskManager != null ? priorityTaskManager : new PriorityTaskManager(); - } - - /** Returns a new {@link CacheDataSource} instance. */ - public CacheDataSource createCacheDataSource() { - return onlineCacheDataSourceFactory.createDataSource(); - } - - /** - * Returns a new {@link CacheDataSource} instance which accesses cache read-only and throws an - * exception on cache miss. - */ - public CacheDataSource createOfflineCacheDataSource() { - return offlineCacheDataSourceFactory.createDataSource(); - } -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DownloaderFactory.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DownloaderFactory.java index f98ca3eac3..435c5aa00c 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DownloaderFactory.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/DownloaderFactory.java @@ -21,8 +21,8 @@ public interface DownloaderFactory { /** * Creates a {@link Downloader} to perform the given {@link DownloadRequest}. * - * @param action The action. + * @param request The download request. * @return The downloader. */ - Downloader createDownloader(DownloadRequest action); + Downloader createDownloader(DownloadRequest request); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/ProgressiveDownloader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/ProgressiveDownloader.java index a73258272c..064aeff5e0 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/ProgressiveDownloader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/ProgressiveDownloader.java @@ -15,106 +15,152 @@ */ package com.google.android.exoplayer2.offline; -import android.net.Uri; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; + import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.MediaItem; import com.google.android.exoplayer2.upstream.DataSpec; -import com.google.android.exoplayer2.upstream.cache.Cache; import com.google.android.exoplayer2.upstream.cache.CacheDataSource; -import com.google.android.exoplayer2.upstream.cache.CacheKeyFactory; -import com.google.android.exoplayer2.upstream.cache.CacheUtil; +import com.google.android.exoplayer2.upstream.cache.CacheWriter; +import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.PriorityTaskManager; +import com.google.android.exoplayer2.util.PriorityTaskManager.PriorityTooLowException; +import com.google.android.exoplayer2.util.RunnableFutureTask; +import com.google.android.exoplayer2.util.Util; import java.io.IOException; -import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Executor; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; -/** - * A downloader for progressive media streams. - * - *

      The downloader attempts to download the entire media bytes referenced by a {@link Uri} into a - * cache as defined by {@link DownloaderConstructorHelper}. Callers can use the constructor to - * specify a custom cache key for the downloaded bytes. - * - *

      The downloader will avoid downloading already-downloaded media bytes. - */ +/** A downloader for progressive media streams. */ public final class ProgressiveDownloader implements Downloader { - private static final int BUFFER_SIZE_BYTES = 128 * 1024; - + private final Executor executor; private final DataSpec dataSpec; - private final Cache cache; private final CacheDataSource dataSource; - private final CacheKeyFactory cacheKeyFactory; - private final PriorityTaskManager priorityTaskManager; - private final AtomicBoolean isCanceled; + private final CacheWriter cacheWriter; + @Nullable private final PriorityTaskManager priorityTaskManager; + + @Nullable private ProgressListener progressListener; + private volatile @MonotonicNonNull RunnableFutureTask downloadRunnable; + private volatile boolean isCanceled; + + /** + * Creates a new instance. + * + * @param mediaItem The media item with a uri to the stream to be downloaded. + * @param cacheDataSourceFactory A {@link CacheDataSource.Factory} for the cache into which the + * download will be written. + */ + public ProgressiveDownloader( + MediaItem mediaItem, CacheDataSource.Factory cacheDataSourceFactory) { + this(mediaItem, cacheDataSourceFactory, Runnable::run); + } /** - * @param uri Uri of the data to be downloaded. - * @param customCacheKey A custom key that uniquely identifies the original stream. Used for cache - * indexing. May be null. - * @param constructorHelper A {@link DownloaderConstructorHelper} instance. + * Creates a new instance. + * + * @param mediaItem The media item with a uri to the stream to be downloaded. + * @param cacheDataSourceFactory A {@link CacheDataSource.Factory} for the cache into which the + * download will be written. + * @param executor An {@link Executor} used to make requests for the media being downloaded. In + * the future, providing an {@link Executor} that uses multiple threads may speed up the + * download by allowing parts of it to be executed in parallel. */ public ProgressiveDownloader( - Uri uri, @Nullable String customCacheKey, DownloaderConstructorHelper constructorHelper) { - this.dataSpec = - new DataSpec( - uri, - /* absoluteStreamPosition= */ 0, - C.LENGTH_UNSET, - customCacheKey, - /* flags= */ DataSpec.FLAG_ALLOW_CACHE_FRAGMENTATION); - this.cache = constructorHelper.getCache(); - this.dataSource = constructorHelper.createCacheDataSource(); - this.cacheKeyFactory = constructorHelper.getCacheKeyFactory(); - this.priorityTaskManager = constructorHelper.getPriorityTaskManager(); - isCanceled = new AtomicBoolean(); + MediaItem mediaItem, CacheDataSource.Factory cacheDataSourceFactory, Executor executor) { + this.executor = Assertions.checkNotNull(executor); + Assertions.checkNotNull(mediaItem.localConfiguration); + dataSpec = + new DataSpec.Builder() + .setUri(mediaItem.localConfiguration.uri) + .setKey(mediaItem.localConfiguration.customCacheKey) + .setFlags(DataSpec.FLAG_ALLOW_CACHE_FRAGMENTATION) + .build(); + dataSource = cacheDataSourceFactory.createDataSourceForDownloading(); + @SuppressWarnings("nullness:methodref.receiver.bound") + CacheWriter.ProgressListener progressListener = this::onProgress; + cacheWriter = + new CacheWriter(dataSource, dataSpec, /* temporaryBuffer= */ null, progressListener); + priorityTaskManager = cacheDataSourceFactory.getUpstreamPriorityTaskManager(); } @Override public void download(@Nullable ProgressListener progressListener) - throws InterruptedException, IOException { - priorityTaskManager.add(C.PRIORITY_DOWNLOAD); + throws IOException, InterruptedException { + this.progressListener = progressListener; + if (priorityTaskManager != null) { + priorityTaskManager.add(C.PRIORITY_DOWNLOAD); + } try { - CacheUtil.cache( - dataSpec, - cache, - cacheKeyFactory, - dataSource, - new byte[BUFFER_SIZE_BYTES], - priorityTaskManager, - C.PRIORITY_DOWNLOAD, - progressListener == null ? null : new ProgressForwarder(progressListener), - isCanceled, - /* enableEOFException= */ true); + boolean finished = false; + while (!finished && !isCanceled) { + // Recreate downloadRunnable on each loop iteration to avoid rethrowing a previous error. + downloadRunnable = + new RunnableFutureTask() { + @Override + protected Void doWork() throws IOException { + cacheWriter.cache(); + return null; + } + + @Override + protected void cancelWork() { + cacheWriter.cancel(); + } + }; + if (priorityTaskManager != null) { + priorityTaskManager.proceed(C.PRIORITY_DOWNLOAD); + } + executor.execute(downloadRunnable); + try { + downloadRunnable.get(); + finished = true; + } catch (ExecutionException e) { + Throwable cause = Assertions.checkNotNull(e.getCause()); + if (cause instanceof PriorityTooLowException) { + // The next loop iteration will block until the task is able to proceed. + } else if (cause instanceof IOException) { + throw (IOException) cause; + } else { + // The cause must be an uncaught Throwable type. + Util.sneakyThrow(cause); + } + } + } } finally { - priorityTaskManager.remove(C.PRIORITY_DOWNLOAD); + // If the main download thread was interrupted as part of cancelation, then it's possible that + // the runnable is still doing work. We need to wait until it's finished before returning. + checkNotNull(downloadRunnable).blockUntilFinished(); + if (priorityTaskManager != null) { + priorityTaskManager.remove(C.PRIORITY_DOWNLOAD); + } } } @Override public void cancel() { - isCanceled.set(true); + isCanceled = true; + RunnableFutureTask downloadRunnable = this.downloadRunnable; + if (downloadRunnable != null) { + downloadRunnable.cancel(/* interruptIfRunning= */ true); + } } @Override public void remove() { - CacheUtil.remove(dataSpec, cache, cacheKeyFactory); + dataSource.getCache().removeResource(dataSource.getCacheKeyFactory().buildCacheKey(dataSpec)); } - private static final class ProgressForwarder implements CacheUtil.ProgressListener { - - private final ProgressListener progessListener; - - public ProgressForwarder(ProgressListener progressListener) { - this.progessListener = progressListener; - } - - @Override - public void onProgress(long contentLength, long bytesCached, long newBytesCached) { - float percentDownloaded = - contentLength == C.LENGTH_UNSET || contentLength == 0 - ? C.PERCENTAGE_UNSET - : ((bytesCached * 100f) / contentLength); - progessListener.onProgress(contentLength, bytesCached, percentDownloaded); + private void onProgress(long contentLength, long bytesCached, long newBytesCached) { + if (progressListener == null) { + return; } + float percentDownloaded = + contentLength == C.LENGTH_UNSET || contentLength == 0 + ? C.PERCENTAGE_UNSET + : ((bytesCached * 100f) / contentLength); + progressListener.onProgress(contentLength, bytesCached, percentDownloaded); } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/SegmentDownloader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/SegmentDownloader.java index 969003101f..764d2c63f5 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/SegmentDownloader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/SegmentDownloader.java @@ -15,23 +15,34 @@ */ package com.google.android.exoplayer2.offline; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; + import android.net.Uri; -import android.util.Pair; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.MediaItem; import com.google.android.exoplayer2.upstream.DataSource; import com.google.android.exoplayer2.upstream.DataSpec; +import com.google.android.exoplayer2.upstream.ParsingLoadable; +import com.google.android.exoplayer2.upstream.ParsingLoadable.Parser; import com.google.android.exoplayer2.upstream.cache.Cache; import com.google.android.exoplayer2.upstream.cache.CacheDataSource; import com.google.android.exoplayer2.upstream.cache.CacheKeyFactory; -import com.google.android.exoplayer2.upstream.cache.CacheUtil; +import com.google.android.exoplayer2.upstream.cache.CacheWriter; +import com.google.android.exoplayer2.upstream.cache.ContentMetadata; +import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.PriorityTaskManager; +import com.google.android.exoplayer2.util.PriorityTaskManager.PriorityTooLowException; +import com.google.android.exoplayer2.util.RunnableFutureTask; import com.google.android.exoplayer2.util.Util; import java.io.IOException; +import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.List; -import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Executor; /** * Base class for multi segment stream downloaders. @@ -62,52 +73,76 @@ public int compareTo(Segment other) { } private static final int BUFFER_SIZE_BYTES = 128 * 1024; + private static final long MAX_MERGED_SEGMENT_START_TIME_DIFF_US = 20 * C.MICROS_PER_SECOND; private final DataSpec manifestDataSpec; + private final Parser manifestParser; + private final ArrayList streamKeys; + private final CacheDataSource.Factory cacheDataSourceFactory; private final Cache cache; - private final CacheDataSource dataSource; - private final CacheDataSource offlineDataSource; private final CacheKeyFactory cacheKeyFactory; - private final PriorityTaskManager priorityTaskManager; - private final ArrayList streamKeys; - private final AtomicBoolean isCanceled; + @Nullable private final PriorityTaskManager priorityTaskManager; + private final Executor executor; /** - * @param manifestUri The {@link Uri} of the manifest to be downloaded. - * @param streamKeys Keys defining which streams in the manifest should be selected for download. - * If empty, all streams are downloaded. - * @param constructorHelper A {@link DownloaderConstructorHelper} instance. + * The currently active runnables. + * + *

      Note: Only the {@link #download} thread is permitted to modify this list. Modifications, as + * well as the iteration on the {@link #cancel} thread, must be synchronized on the instance for + * thread safety. Iterations on the {@link #download} thread do not need to be synchronized, and + * should not be synchronized because doing so can erroneously block {@link #cancel}. */ - public SegmentDownloader( - Uri manifestUri, List streamKeys, DownloaderConstructorHelper constructorHelper) { - this.manifestDataSpec = getCompressibleDataSpec(manifestUri); - this.streamKeys = new ArrayList<>(streamKeys); - this.cache = constructorHelper.getCache(); - this.dataSource = constructorHelper.createCacheDataSource(); - this.offlineDataSource = constructorHelper.createOfflineCacheDataSource(); - this.cacheKeyFactory = constructorHelper.getCacheKeyFactory(); - this.priorityTaskManager = constructorHelper.getPriorityTaskManager(); - isCanceled = new AtomicBoolean(); - } + private final ArrayList> activeRunnables; + + private volatile boolean isCanceled; /** - * Downloads the selected streams in the media. If multiple streams are selected, they are - * downloaded in sync with one another. - * - * @throws IOException Thrown when there is an error downloading. - * @throws InterruptedException If the thread has been interrupted. + * @param mediaItem The {@link MediaItem} to be downloaded. + * @param manifestParser A parser for manifests belonging to the media to be downloaded. + * @param cacheDataSourceFactory A {@link CacheDataSource.Factory} for the cache into which the + * download will be written. + * @param executor An {@link Executor} used to make requests for the media being downloaded. + * Providing an {@link Executor} that uses multiple threads will speed up the download by + * allowing parts of it to be executed in parallel. */ + public SegmentDownloader( + MediaItem mediaItem, + Parser manifestParser, + CacheDataSource.Factory cacheDataSourceFactory, + Executor executor) { + checkNotNull(mediaItem.localConfiguration); + this.manifestDataSpec = getCompressibleDataSpec(mediaItem.localConfiguration.uri); + this.manifestParser = manifestParser; + this.streamKeys = new ArrayList<>(mediaItem.localConfiguration.streamKeys); + this.cacheDataSourceFactory = cacheDataSourceFactory; + this.executor = executor; + cache = Assertions.checkNotNull(cacheDataSourceFactory.getCache()); + cacheKeyFactory = cacheDataSourceFactory.getCacheKeyFactory(); + priorityTaskManager = cacheDataSourceFactory.getUpstreamPriorityTaskManager(); + activeRunnables = new ArrayList<>(); + } + @Override public final void download(@Nullable ProgressListener progressListener) throws IOException, InterruptedException { - priorityTaskManager.add(C.PRIORITY_DOWNLOAD); + ArrayDeque pendingSegments = new ArrayDeque<>(); + ArrayDeque recycledRunnables = new ArrayDeque<>(); + if (priorityTaskManager != null) { + priorityTaskManager.add(C.PRIORITY_DOWNLOAD); + } try { + CacheDataSource dataSource = cacheDataSourceFactory.createDataSourceForDownloading(); // Get the manifest and all of the segments. - M manifest = getManifest(dataSource, manifestDataSpec); + M manifest = getManifest(dataSource, manifestDataSpec, /* removing= */ false); if (!streamKeys.isEmpty()) { manifest = manifest.copy(streamKeys); } - List segments = getSegments(dataSource, manifest, /* allowIncompleteList= */ false); + List segments = getSegments(dataSource, manifest, /* removing= */ false); + + // Sort the segments so that we download media in the right order from the start of the + // content, and merge segments where possible to minimize the number of server round trips. + Collections.sort(segments); + mergeSegments(segments, cacheKeyFactory); // Scan the segments, removing any that are fully downloaded. int totalSegments = segments.size(); @@ -115,11 +150,18 @@ public final void download(@Nullable ProgressListener progressListener) long contentLength = 0; long bytesDownloaded = 0; for (int i = segments.size() - 1; i >= 0; i--) { - Segment segment = segments.get(i); - Pair segmentLengthAndBytesDownloaded = - CacheUtil.getCached(segment.dataSpec, cache, cacheKeyFactory); - long segmentLength = segmentLengthAndBytesDownloaded.first; - long segmentBytesDownloaded = segmentLengthAndBytesDownloaded.second; + DataSpec dataSpec = segments.get(i).dataSpec; + String cacheKey = cacheKeyFactory.buildCacheKey(dataSpec); + long segmentLength = dataSpec.length; + if (segmentLength == C.LENGTH_UNSET) { + long resourceLength = + ContentMetadata.getContentLength(cache.getContentMetadata(cacheKey)); + if (resourceLength != C.LENGTH_UNSET) { + segmentLength = resourceLength - dataSpec.position; + } + } + long segmentBytesDownloaded = + cache.getCachedBytes(cacheKey, dataSpec.position, segmentLength); bytesDownloaded += segmentBytesDownloaded; if (segmentLength != C.LENGTH_UNSET) { if (segmentLength == segmentBytesDownloaded) { @@ -134,105 +176,318 @@ public final void download(@Nullable ProgressListener progressListener) contentLength = C.LENGTH_UNSET; } } - Collections.sort(segments); // Download the segments. - @Nullable ProgressNotifier progressNotifier = null; - if (progressListener != null) { - progressNotifier = - new ProgressNotifier( - progressListener, - contentLength, - totalSegments, - bytesDownloaded, - segmentsDownloaded); - } - byte[] buffer = new byte[BUFFER_SIZE_BYTES]; - for (int i = 0; i < segments.size(); i++) { - CacheUtil.cache( - segments.get(i).dataSpec, - cache, - cacheKeyFactory, - dataSource, - buffer, - priorityTaskManager, - C.PRIORITY_DOWNLOAD, - progressNotifier, - isCanceled, - true); - if (progressNotifier != null) { - progressNotifier.onSegmentDownloaded(); + @Nullable + ProgressNotifier progressNotifier = + progressListener != null + ? new ProgressNotifier( + progressListener, + contentLength, + totalSegments, + bytesDownloaded, + segmentsDownloaded) + : null; + pendingSegments.addAll(segments); + while (!isCanceled && !pendingSegments.isEmpty()) { + // Block until there aren't any higher priority tasks. + if (priorityTaskManager != null) { + priorityTaskManager.proceed(C.PRIORITY_DOWNLOAD); } + + // Create and execute a runnable to download the next segment. + CacheDataSource segmentDataSource; + byte[] temporaryBuffer; + if (!recycledRunnables.isEmpty()) { + SegmentDownloadRunnable recycledRunnable = recycledRunnables.removeFirst(); + segmentDataSource = recycledRunnable.dataSource; + temporaryBuffer = recycledRunnable.temporaryBuffer; + } else { + segmentDataSource = cacheDataSourceFactory.createDataSourceForDownloading(); + temporaryBuffer = new byte[BUFFER_SIZE_BYTES]; + } + Segment segment = pendingSegments.removeFirst(); + SegmentDownloadRunnable downloadRunnable = + new SegmentDownloadRunnable( + segment, segmentDataSource, progressNotifier, temporaryBuffer); + addActiveRunnable(downloadRunnable); + executor.execute(downloadRunnable); + + // Clean up runnables that have finished. + for (int j = activeRunnables.size() - 1; j >= 0; j--) { + SegmentDownloadRunnable activeRunnable = (SegmentDownloadRunnable) activeRunnables.get(j); + // Only block until the runnable has finished if we don't have any more pending segments + // to start. If we do have pending segments to start then only process the runnable if + // it's already finished. + if (pendingSegments.isEmpty() || activeRunnable.isDone()) { + try { + activeRunnable.get(); + removeActiveRunnable(j); + recycledRunnables.addLast(activeRunnable); + } catch (ExecutionException e) { + Throwable cause = Assertions.checkNotNull(e.getCause()); + if (cause instanceof PriorityTooLowException) { + // We need to schedule this segment again in a future loop iteration. + pendingSegments.addFirst(activeRunnable.segment); + removeActiveRunnable(j); + recycledRunnables.addLast(activeRunnable); + } else if (cause instanceof IOException) { + throw (IOException) cause; + } else { + // The cause must be an uncaught Throwable type. + Util.sneakyThrow(cause); + } + } + } + } + + // Don't move on to the next segment until the runnable for this segment has started. This + // drip feeds runnables to the executor, rather than providing them all up front. + downloadRunnable.blockUntilStarted(); } } finally { - priorityTaskManager.remove(C.PRIORITY_DOWNLOAD); + // If one of the runnables has thrown an exception, then it's possible there are other active + // runnables still doing work. We need to wait until they finish before exiting this method. + // Cancel them to speed this up. + for (int i = 0; i < activeRunnables.size(); i++) { + activeRunnables.get(i).cancel(/* interruptIfRunning= */ true); + } + // Wait until the runnables have finished. In addition to the failure case, we also need to + // do this for the case where the main download thread was interrupted as part of cancelation. + for (int i = activeRunnables.size() - 1; i >= 0; i--) { + activeRunnables.get(i).blockUntilFinished(); + removeActiveRunnable(i); + } + if (priorityTaskManager != null) { + priorityTaskManager.remove(C.PRIORITY_DOWNLOAD); + } } } @Override public void cancel() { - isCanceled.set(true); + synchronized (activeRunnables) { + isCanceled = true; + for (int i = 0; i < activeRunnables.size(); i++) { + activeRunnables.get(i).cancel(/* interruptIfRunning= */ true); + } + } } @Override - public final void remove() throws InterruptedException { + public final void remove() { + CacheDataSource dataSource = cacheDataSourceFactory.createDataSourceForRemovingDownload(); try { - M manifest = getManifest(offlineDataSource, manifestDataSpec); - List segments = getSegments(offlineDataSource, manifest, true); + M manifest = getManifest(dataSource, manifestDataSpec, /* removing= */ true); + List segments = getSegments(dataSource, manifest, /* removing= */ true); for (int i = 0; i < segments.size(); i++) { - removeDataSpec(segments.get(i).dataSpec); + cache.removeResource(cacheKeyFactory.buildCacheKey(segments.get(i).dataSpec)); } - } catch (IOException e) { + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } catch (Exception e) { // Ignore exceptions when removing. } finally { // Always attempt to remove the manifest. - removeDataSpec(manifestDataSpec); + cache.removeResource(cacheKeyFactory.buildCacheKey(manifestDataSpec)); } } // Internal methods. /** - * Loads and parses the manifest. + * Loads and parses a manifest. * - * @param dataSource The {@link DataSource} through which to load. * @param dataSpec The manifest {@link DataSpec}. - * @return The manifest. - * @throws IOException If an error occurs reading data. + * @param removing Whether the manifest is being loaded as part of the download being removed. + * @return The loaded manifest. + * @throws InterruptedException If the thread on which the method is called is interrupted. + * @throws IOException If an error occurs during execution. */ - protected abstract M getManifest(DataSource dataSource, DataSpec dataSpec) throws IOException; + protected final M getManifest(DataSource dataSource, DataSpec dataSpec, boolean removing) + throws InterruptedException, IOException { + return execute( + new RunnableFutureTask() { + @Override + protected M doWork() throws IOException { + return ParsingLoadable.load(dataSource, manifestParser, dataSpec, C.DATA_TYPE_MANIFEST); + } + }, + removing); + } /** - * Returns a list of all downloadable {@link Segment}s for a given manifest. + * Executes the provided {@link RunnableFutureTask}. + * + * @param runnable The {@link RunnableFutureTask} to execute. + * @param removing Whether the execution is part of the download being removed. + * @return The result. + * @throws InterruptedException If the thread on which the method is called is interrupted. + * @throws IOException If an error occurs during execution. + */ + protected final T execute(RunnableFutureTask runnable, boolean removing) + throws InterruptedException, IOException { + if (removing) { + runnable.run(); + try { + return runnable.get(); + } catch (ExecutionException e) { + Throwable cause = Assertions.checkNotNull(e.getCause()); + if (cause instanceof IOException) { + throw (IOException) cause; + } else { + // The cause must be an uncaught Throwable type. + Util.sneakyThrow(e); + } + } + } + while (true) { + if (isCanceled) { + throw new InterruptedException(); + } + // Block until there aren't any higher priority tasks. + if (priorityTaskManager != null) { + priorityTaskManager.proceed(C.PRIORITY_DOWNLOAD); + } + addActiveRunnable(runnable); + executor.execute(runnable); + try { + return runnable.get(); + } catch (ExecutionException e) { + Throwable cause = Assertions.checkNotNull(e.getCause()); + if (cause instanceof PriorityTooLowException) { + // The next loop iteration will block until the task is able to proceed. + } else if (cause instanceof IOException) { + throw (IOException) cause; + } else { + // The cause must be an uncaught Throwable type. + Util.sneakyThrow(e); + } + } finally { + // We don't want to return for as long as the runnable might still be doing work. + runnable.blockUntilFinished(); + removeActiveRunnable(runnable); + } + } + } + + /** + * Returns a list of all downloadable {@link Segment}s for a given manifest. Any required data + * should be loaded using {@link #getManifest} or {@link #execute}. * * @param dataSource The {@link DataSource} through which to load any required data. * @param manifest The manifest containing the segments. - * @param allowIncompleteList Whether to continue in the case that a load error prevents all - * segments from being listed. If true then a partial segment list will be returned. If false - * an {@link IOException} will be thrown. + * @param removing Whether the segments are being obtained as part of a removal. If true then a + * partial segment list is returned in the case that a load error prevents all segments from + * being listed. If false then an {@link IOException} will be thrown in this case. * @return The list of downloadable {@link Segment}s. - * @throws InterruptedException Thrown if the thread was interrupted. - * @throws IOException Thrown if {@code allowPartialIndex} is false and a load error occurs, or if - * the media is not in a form that allows for its segments to be listed. + * @throws IOException Thrown if {@code allowPartialIndex} is false and an execution error occurs, + * or if the media is not in a form that allows for its segments to be listed. */ - protected abstract List getSegments( - DataSource dataSource, M manifest, boolean allowIncompleteList) - throws InterruptedException, IOException; + protected abstract List getSegments(DataSource dataSource, M manifest, boolean removing) + throws IOException, InterruptedException; - private void removeDataSpec(DataSpec dataSpec) { - CacheUtil.remove(dataSpec, cache, cacheKeyFactory); + protected static DataSpec getCompressibleDataSpec(Uri uri) { + return new DataSpec.Builder().setUri(uri).setFlags(DataSpec.FLAG_ALLOW_GZIP).build(); } - protected static DataSpec getCompressibleDataSpec(Uri uri) { - return new DataSpec( - uri, - /* absoluteStreamPosition= */ 0, - /* length= */ C.LENGTH_UNSET, - /* key= */ null, - /* flags= */ DataSpec.FLAG_ALLOW_GZIP); + private void addActiveRunnable(RunnableFutureTask runnable) + throws InterruptedException { + synchronized (activeRunnables) { + if (isCanceled) { + throw new InterruptedException(); + } + activeRunnables.add(runnable); + } + } + + private void removeActiveRunnable(RunnableFutureTask runnable) { + synchronized (activeRunnables) { + activeRunnables.remove(runnable); + } + } + + private void removeActiveRunnable(int index) { + synchronized (activeRunnables) { + activeRunnables.remove(index); + } + } + + private static void mergeSegments(List segments, CacheKeyFactory keyFactory) { + HashMap lastIndexByCacheKey = new HashMap<>(); + int nextOutIndex = 0; + for (int i = 0; i < segments.size(); i++) { + Segment segment = segments.get(i); + String cacheKey = keyFactory.buildCacheKey(segment.dataSpec); + @Nullable Integer lastIndex = lastIndexByCacheKey.get(cacheKey); + @Nullable Segment lastSegment = lastIndex == null ? null : segments.get(lastIndex); + if (lastSegment == null + || segment.startTimeUs > lastSegment.startTimeUs + MAX_MERGED_SEGMENT_START_TIME_DIFF_US + || !canMergeSegments(lastSegment.dataSpec, segment.dataSpec)) { + lastIndexByCacheKey.put(cacheKey, nextOutIndex); + segments.set(nextOutIndex, segment); + nextOutIndex++; + } else { + long mergedLength = + segment.dataSpec.length == C.LENGTH_UNSET + ? C.LENGTH_UNSET + : lastSegment.dataSpec.length + segment.dataSpec.length; + DataSpec mergedDataSpec = lastSegment.dataSpec.subrange(/* offset= */ 0, mergedLength); + segments.set( + Assertions.checkNotNull(lastIndex), + new Segment(lastSegment.startTimeUs, mergedDataSpec)); + } + } + Util.removeRange(segments, /* fromIndex= */ nextOutIndex, /* toIndex= */ segments.size()); + } + + private static boolean canMergeSegments(DataSpec dataSpec1, DataSpec dataSpec2) { + return dataSpec1.uri.equals(dataSpec2.uri) + && dataSpec1.length != C.LENGTH_UNSET + && (dataSpec1.position + dataSpec1.length == dataSpec2.position) + && Util.areEqual(dataSpec1.key, dataSpec2.key) + && dataSpec1.flags == dataSpec2.flags + && dataSpec1.httpMethod == dataSpec2.httpMethod + && dataSpec1.httpRequestHeaders.equals(dataSpec2.httpRequestHeaders); + } + + private static final class SegmentDownloadRunnable extends RunnableFutureTask { + + public final Segment segment; + public final CacheDataSource dataSource; + @Nullable private final ProgressNotifier progressNotifier; + public final byte[] temporaryBuffer; + private final CacheWriter cacheWriter; + + public SegmentDownloadRunnable( + Segment segment, + CacheDataSource dataSource, + @Nullable ProgressNotifier progressNotifier, + byte[] temporaryBuffer) { + this.segment = segment; + this.dataSource = dataSource; + this.progressNotifier = progressNotifier; + this.temporaryBuffer = temporaryBuffer; + this.cacheWriter = + new CacheWriter(dataSource, segment.dataSpec, temporaryBuffer, progressNotifier); + } + + @Override + protected Void doWork() throws IOException { + cacheWriter.cache(); + if (progressNotifier != null) { + progressNotifier.onSegmentDownloaded(); + } + return null; + } + + @Override + protected void cancelWork() { + cacheWriter.cancel(); + } } - private static final class ProgressNotifier implements CacheUtil.ProgressListener { + private static final class ProgressNotifier implements CacheWriter.ProgressListener { private final ProgressListener progressListener; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/StreamKey.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/StreamKey.java index f9a48868d9..d8d87663a3 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/StreamKey.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/offline/StreamKey.java @@ -20,11 +20,18 @@ import androidx.annotation.Nullable; /** - * A key for a subset of media which can be separately loaded (a "stream"). + * A key for a subset of media that can be separately loaded (a "stream"). * - *

      The stream key consists of a period index, a group index within the period and a track index + *

      The stream key consists of a period index, a group index within the period and a stream index * within the group. The interpretation of these indices depends on the type of media for which the - * stream key is used. + * stream key is used. Note that they are not the same as track group and track indices, + * because multiple tracks can be multiplexed into a single stream. + * + *

      Application code should not generally attempt to build StreamKey instances directly. Instead, + * {@code DownloadHelper.getDownloadRequest} can be used to generate download requests with the + * correct StreamKeys for the track selections that have been configured on the helper. {@code + * MediaPeriod.getStreamKeys} provides a lower level way of generating StreamKeys corresponding to a + * particular track selection. */ public final class StreamKey implements Comparable, Parcelable { @@ -32,37 +39,50 @@ public final class StreamKey implements Comparable, Parcelable { public final int periodIndex; /** The group index. */ public final int groupIndex; - /** The track index. */ - public final int trackIndex; + /** The stream index. */ + public final int streamIndex; + + /** + * @deprecated Use {@link #streamIndex}. + */ + @Deprecated public final int trackIndex; /** + * Creates an instance with {@link #periodIndex} set to 0. + * * @param groupIndex The group index. - * @param trackIndex The track index. + * @param streamIndex The stream index. */ - public StreamKey(int groupIndex, int trackIndex) { - this(0, groupIndex, trackIndex); + public StreamKey(int groupIndex, int streamIndex) { + this(0, groupIndex, streamIndex); } /** + * Creates an instance. + * * @param periodIndex The period index. * @param groupIndex The group index. - * @param trackIndex The track index. + * @param streamIndex The stream index. */ - public StreamKey(int periodIndex, int groupIndex, int trackIndex) { + @SuppressWarnings("deprecation") + public StreamKey(int periodIndex, int groupIndex, int streamIndex) { this.periodIndex = periodIndex; this.groupIndex = groupIndex; - this.trackIndex = trackIndex; + this.streamIndex = streamIndex; + trackIndex = streamIndex; } + @SuppressWarnings("deprecation") /* package */ StreamKey(Parcel in) { periodIndex = in.readInt(); groupIndex = in.readInt(); - trackIndex = in.readInt(); + streamIndex = in.readInt(); + trackIndex = streamIndex; } @Override public String toString() { - return periodIndex + "." + groupIndex + "." + trackIndex; + return periodIndex + "." + groupIndex + "." + streamIndex; } @Override @@ -77,14 +97,14 @@ public boolean equals(@Nullable Object o) { StreamKey that = (StreamKey) o; return periodIndex == that.periodIndex && groupIndex == that.groupIndex - && trackIndex == that.trackIndex; + && streamIndex == that.streamIndex; } @Override public int hashCode() { int result = periodIndex; result = 31 * result + groupIndex; - result = 31 * result + trackIndex; + result = 31 * result + streamIndex; return result; } @@ -96,7 +116,7 @@ public int compareTo(StreamKey o) { if (result == 0) { result = groupIndex - o.groupIndex; if (result == 0) { - result = trackIndex - o.trackIndex; + result = streamIndex - o.streamIndex; } } return result; @@ -113,7 +133,7 @@ public int describeContents() { public void writeToParcel(Parcel dest, int flags) { dest.writeInt(periodIndex); dest.writeInt(groupIndex); - dest.writeInt(trackIndex); + dest.writeInt(streamIndex); } public static final Parcelable.Creator CREATOR = diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/scheduler/PlatformScheduler.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/scheduler/PlatformScheduler.java index fcebc9388a..357fdab957 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/scheduler/PlatformScheduler.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/scheduler/PlatformScheduler.java @@ -15,7 +15,8 @@ */ package com.google.android.exoplayer2.scheduler; -import android.annotation.TargetApi; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; + import android.app.job.JobInfo; import android.app.job.JobParameters; import android.app.job.JobScheduler; @@ -24,8 +25,8 @@ import android.content.Context; import android.content.Intent; import android.os.PersistableBundle; +import androidx.annotation.RequiresApi; import androidx.annotation.RequiresPermission; -import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.Util; @@ -42,14 +43,19 @@ * android:exported="true"/> * } */ -@TargetApi(21) +@RequiresApi(21) public final class PlatformScheduler implements Scheduler { - private static final boolean DEBUG = false; private static final String TAG = "PlatformScheduler"; private static final String KEY_SERVICE_ACTION = "service_action"; private static final String KEY_SERVICE_PACKAGE = "service_package"; private static final String KEY_REQUIREMENTS = "requirements"; + private static final int SUPPORTED_REQUIREMENTS = + Requirements.NETWORK + | Requirements.NETWORK_UNMETERED + | Requirements.DEVICE_IDLE + | Requirements.DEVICE_CHARGING + | (Util.SDK_INT >= 26 ? Requirements.DEVICE_STORAGE_NOT_LOW : 0); private final int jobId; private final ComponentName jobServiceComponentName; @@ -67,7 +73,8 @@ public PlatformScheduler(Context context, int jobId) { context = context.getApplicationContext(); this.jobId = jobId; jobServiceComponentName = new ComponentName(context, PlatformSchedulerService.class); - jobScheduler = (JobScheduler) context.getSystemService(Context.JOB_SCHEDULER_SERVICE); + jobScheduler = + checkNotNull((JobScheduler) context.getSystemService(Context.JOB_SCHEDULER_SERVICE)); } @Override @@ -75,17 +82,20 @@ public boolean schedule(Requirements requirements, String servicePackage, String JobInfo jobInfo = buildJobInfo(jobId, jobServiceComponentName, requirements, serviceAction, servicePackage); int result = jobScheduler.schedule(jobInfo); - logd("Scheduling job: " + jobId + " result: " + result); return result == JobScheduler.RESULT_SUCCESS; } @Override public boolean cancel() { - logd("Canceling job: " + jobId); jobScheduler.cancel(jobId); return true; } + @Override + public Requirements getSupportedRequirements(Requirements requirements) { + return requirements.filterRequirements(SUPPORTED_REQUIREMENTS); + } + // @RequiresPermission constructor annotation should ensure the permission is present. @SuppressWarnings("MissingPermission") private static JobInfo buildJobInfo( @@ -94,8 +104,15 @@ private static JobInfo buildJobInfo( Requirements requirements, String serviceAction, String servicePackage) { - JobInfo.Builder builder = new JobInfo.Builder(jobId, jobServiceComponentName); + Requirements filteredRequirements = requirements.filterRequirements(SUPPORTED_REQUIREMENTS); + if (!filteredRequirements.equals(requirements)) { + Log.w( + TAG, + "Ignoring unsupported requirements: " + + (filteredRequirements.getRequirements() ^ requirements.getRequirements())); + } + JobInfo.Builder builder = new JobInfo.Builder(jobId, jobServiceComponentName); if (requirements.isUnmeteredNetworkRequired()) { builder.setRequiredNetworkType(JobInfo.NETWORK_TYPE_UNMETERED); } else if (requirements.isNetworkRequired()) { @@ -103,6 +120,9 @@ private static JobInfo buildJobInfo( } builder.setRequiresDeviceIdle(requirements.isIdleRequired()); builder.setRequiresCharging(requirements.isChargingRequired()); + if (Util.SDK_INT >= 26 && requirements.isStorageNotLowRequired()) { + builder.setRequiresStorageNotLow(true); + } builder.setPersisted(true); PersistableBundle extras = new PersistableBundle(); @@ -114,30 +134,21 @@ private static JobInfo buildJobInfo( return builder.build(); } - private static void logd(String message) { - if (DEBUG) { - Log.d(TAG, message); - } - } - /** A {@link JobService} that starts the target service if the requirements are met. */ public static final class PlatformSchedulerService extends JobService { @Override public boolean onStartJob(JobParameters params) { - logd("PlatformSchedulerService started"); PersistableBundle extras = params.getExtras(); Requirements requirements = new Requirements(extras.getInt(KEY_REQUIREMENTS)); - if (requirements.checkRequirements(this)) { - logd("Requirements are met"); - String serviceAction = extras.getString(KEY_SERVICE_ACTION); - String servicePackage = extras.getString(KEY_SERVICE_PACKAGE); - Intent intent = - new Intent(Assertions.checkNotNull(serviceAction)).setPackage(servicePackage); - logd("Starting service action: " + serviceAction + " package: " + servicePackage); + int notMetRequirements = requirements.getNotMetRequirements(this); + if (notMetRequirements == 0) { + String serviceAction = checkNotNull(extras.getString(KEY_SERVICE_ACTION)); + String servicePackage = checkNotNull(extras.getString(KEY_SERVICE_PACKAGE)); + Intent intent = new Intent(serviceAction).setPackage(servicePackage); Util.startForegroundService(this, intent); } else { - logd("Requirements are not met"); - jobFinished(params, /* needsReschedule */ true); + Log.w(TAG, "Requirements not met: " + notMetRequirements); + jobFinished(params, /* wantsReschedule= */ true); } return false; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/scheduler/Requirements.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/scheduler/Requirements.java index 4e2c83d5d6..030ac1cbcb 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/scheduler/Requirements.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/scheduler/Requirements.java @@ -15,6 +15,12 @@ */ package com.google.android.exoplayer2.scheduler; +import static java.lang.annotation.ElementType.FIELD; +import static java.lang.annotation.ElementType.LOCAL_VARIABLE; +import static java.lang.annotation.ElementType.METHOD; +import static java.lang.annotation.ElementType.PARAMETER; +import static java.lang.annotation.ElementType.TYPE_USE; + import android.content.Context; import android.content.Intent; import android.content.IntentFilter; @@ -33,19 +39,23 @@ import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; /** Defines a set of device state requirements. */ public final class Requirements implements Parcelable { /** * Requirement flags. Possible flag values are {@link #NETWORK}, {@link #NETWORK_UNMETERED}, - * {@link #DEVICE_IDLE} and {@link #DEVICE_CHARGING}. + * {@link #DEVICE_IDLE}, {@link #DEVICE_CHARGING} and {@link #DEVICE_STORAGE_NOT_LOW}. */ + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. @Documented @Retention(RetentionPolicy.SOURCE) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) @IntDef( flag = true, - value = {NETWORK, NETWORK_UNMETERED, DEVICE_IDLE, DEVICE_CHARGING}) + value = {NETWORK, NETWORK_UNMETERED, DEVICE_IDLE, DEVICE_CHARGING, DEVICE_STORAGE_NOT_LOW}) public @interface RequirementFlags {} /** Requirement that the device has network connectivity. */ @@ -56,10 +66,17 @@ public final class Requirements implements Parcelable { public static final int DEVICE_IDLE = 1 << 2; /** Requirement that the device is charging. */ public static final int DEVICE_CHARGING = 1 << 3; + /** + * Requirement that the device's internal storage is not low. Note that this requirement + * is not affected by the status of external storage. + */ + public static final int DEVICE_STORAGE_NOT_LOW = 1 << 4; - @RequirementFlags private final int requirements; + private final @RequirementFlags int requirements; - /** @param requirements A combination of requirement flags. */ + /** + * @param requirements A combination of requirement flags. + */ public Requirements(@RequirementFlags int requirements) { if ((requirements & NETWORK_UNMETERED) != 0) { // Make sure network requirement flags are consistent. @@ -69,11 +86,22 @@ public Requirements(@RequirementFlags int requirements) { } /** Returns the requirements. */ - @RequirementFlags - public int getRequirements() { + public @RequirementFlags int getRequirements() { return requirements; } + /** + * Filters the requirements, returning the subset that are enabled by the provided filter. + * + * @param requirementsFilter The enabled {@link RequirementFlags}. + * @return The filtered requirements. If the filter does not cause a change in the requirements + * then this instance will be returned. + */ + public Requirements filterRequirements(int requirementsFilter) { + int filteredRequirements = requirements & requirementsFilter; + return filteredRequirements == requirements ? this : new Requirements(filteredRequirements); + } + /** Returns whether network connectivity is required. */ public boolean isNetworkRequired() { return (requirements & NETWORK) != 0; @@ -94,6 +122,11 @@ public boolean isIdleRequired() { return (requirements & DEVICE_IDLE) != 0; } + /** Returns whether the device is required to not be low on internal storage. */ + public boolean isStorageNotLowRequired() { + return (requirements & DEVICE_STORAGE_NOT_LOW) != 0; + } + /** * Returns whether the requirements are met. * @@ -110,8 +143,7 @@ public boolean checkRequirements(Context context) { * @param context Any context. * @return The requirements that are not met, or 0. */ - @RequirementFlags - public int getNotMetRequirements(Context context) { + public @RequirementFlags int getNotMetRequirements(Context context) { @RequirementFlags int notMetRequirements = getNotMetNetworkRequirements(context); if (isChargingRequired() && !isDeviceCharging(context)) { notMetRequirements |= DEVICE_CHARGING; @@ -119,11 +151,13 @@ public int getNotMetRequirements(Context context) { if (isIdleRequired() && !isDeviceIdle(context)) { notMetRequirements |= DEVICE_IDLE; } + if (isStorageNotLowRequired() && !isStorageNotLow(context)) { + notMetRequirements |= DEVICE_STORAGE_NOT_LOW; + } return notMetRequirements; } - @RequirementFlags - private int getNotMetNetworkRequirements(Context context) { + private @RequirementFlags int getNotMetNetworkRequirements(Context context) { if (!isNetworkRequired()) { return 0; } @@ -146,8 +180,10 @@ private int getNotMetNetworkRequirements(Context context) { } private boolean isDeviceCharging(Context context) { + @Nullable Intent batteryStatus = - context.registerReceiver(null, new IntentFilter(Intent.ACTION_BATTERY_CHANGED)); + Util.registerReceiverNotExported( + context, /* receiver= */ null, new IntentFilter(Intent.ACTION_BATTERY_CHANGED)); if (batteryStatus == null) { return false; } @@ -164,6 +200,12 @@ private boolean isDeviceIdle(Context context) { : Util.SDK_INT >= 20 ? !powerManager.isInteractive() : !powerManager.isScreenOn(); } + private boolean isStorageNotLow(Context context) { + return Util.registerReceiverNotExported( + context, /* receiver= */ null, new IntentFilter(Intent.ACTION_DEVICE_STORAGE_LOW)) + == null; + } + private static boolean isInternetConnectivityValidated(ConnectivityManager connectivityManager) { // It's possible to check NetworkCapabilities.NET_CAPABILITY_VALIDATED from API level 23, but // RequirementsWatcher only fires an event to re-check the requirements when NetworkCapabilities @@ -177,11 +219,17 @@ private static boolean isInternetConnectivityValidated(ConnectivityManager conne if (activeNetwork == null) { return false; } - @Nullable - NetworkCapabilities networkCapabilities = - connectivityManager.getNetworkCapabilities(activeNetwork); - return networkCapabilities != null - && networkCapabilities.hasCapability(NetworkCapabilities.NET_CAPABILITY_VALIDATED); + + try { + @Nullable + NetworkCapabilities networkCapabilities = + connectivityManager.getNetworkCapabilities(activeNetwork); + return networkCapabilities != null + && networkCapabilities.hasCapability(NetworkCapabilities.NET_CAPABILITY_VALIDATED); + } catch (SecurityException e) { + // Workaround for https://issuetracker.google.com/issues/175055271. + return true; + } } @Override diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/scheduler/RequirementsWatcher.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/scheduler/RequirementsWatcher.java index 80015cf3a7..c8e48c826f 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/scheduler/RequirementsWatcher.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/scheduler/RequirementsWatcher.java @@ -15,7 +15,8 @@ */ package com.google.android.exoplayer2.scheduler; -import android.annotation.TargetApi; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; + import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; @@ -28,7 +29,6 @@ import android.os.PowerManager; import androidx.annotation.Nullable; import androidx.annotation.RequiresApi; -import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Util; /** @@ -60,7 +60,7 @@ void onRequirementsStateChanged( @Nullable private DeviceStatusChangeReceiver receiver; - @Requirements.RequirementFlags private int notMetRequirements; + private @Requirements.RequirementFlags int notMetRequirements; @Nullable private NetworkCallback networkCallback; /** @@ -72,7 +72,7 @@ public RequirementsWatcher(Context context, Listener listener, Requirements requ this.context = context.getApplicationContext(); this.listener = listener; this.requirements = requirements; - handler = new Handler(Util.getLooper()); + handler = Util.createHandlerForCurrentOrMainLooper(); } /** @@ -81,8 +81,7 @@ public RequirementsWatcher(Context context, Listener listener, Requirements requ * * @return Initial {@link Requirements.RequirementFlags RequirementFlags} that are not met, or 0. */ - @Requirements.RequirementFlags - public int start() { + public @Requirements.RequirementFlags int start() { notMetRequirements = requirements.getNotMetRequirements(context); IntentFilter filter = new IntentFilter(); @@ -105,14 +104,18 @@ public int start() { filter.addAction(Intent.ACTION_SCREEN_OFF); } } + if (requirements.isStorageNotLowRequired()) { + filter.addAction(Intent.ACTION_DEVICE_STORAGE_LOW); + filter.addAction(Intent.ACTION_DEVICE_STORAGE_OK); + } receiver = new DeviceStatusChangeReceiver(); - context.registerReceiver(receiver, filter, null, handler); + Util.registerReceiverNotExported(context, receiver, filter, handler); return notMetRequirements; } /** Stops watching for changes. */ public void stop() { - context.unregisterReceiver(Assertions.checkNotNull(receiver)); + context.unregisterReceiver(checkNotNull(receiver)); receiver = null; if (Util.SDK_INT >= 24 && networkCallback != null) { unregisterNetworkCallbackV24(); @@ -124,20 +127,19 @@ public Requirements getRequirements() { return requirements; } - @TargetApi(24) + @RequiresApi(24) private void registerNetworkCallbackV24() { ConnectivityManager connectivityManager = - Assertions.checkNotNull( - (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE)); + checkNotNull((ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE)); networkCallback = new NetworkCallback(); connectivityManager.registerDefaultNetworkCallback(networkCallback); } - @TargetApi(24) + @RequiresApi(24) private void unregisterNetworkCallbackV24() { ConnectivityManager connectivityManager = - (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE); - connectivityManager.unregisterNetworkCallback(Assertions.checkNotNull(networkCallback)); + checkNotNull((ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE)); + connectivityManager.unregisterNetworkCallback(checkNotNull(networkCallback)); networkCallback = null; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/scheduler/Scheduler.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/scheduler/Scheduler.java index b5a6f40424..c34c77b2cf 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/scheduler/Scheduler.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/scheduler/Scheduler.java @@ -45,4 +45,14 @@ public interface Scheduler { * @return Whether cancellation was successful. */ boolean cancel(); + + /** + * Checks whether this {@link Scheduler} supports the provided {@link Requirements}. If all of the + * requirements are supported then the same {@link Requirements} instance is returned. If not then + * a new instance is returned containing the subset of the requirements that are supported. + * + * @param requirements The requirements to check. + * @return The supported requirements. + */ + Requirements getSupportedRequirements(Requirements requirements); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/AdaptiveMediaSourceEventListener.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/AdaptiveMediaSourceEventListener.java deleted file mode 100644 index ccc3beac55..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/AdaptiveMediaSourceEventListener.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright (C) 2016 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.source; - -/** - * Interface for callbacks to be notified of {@link MediaSource} events. - * - * @deprecated Use {@link MediaSourceEventListener}. - */ -@Deprecated -public interface AdaptiveMediaSourceEventListener extends MediaSourceEventListener {} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/BaseMediaSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/BaseMediaSource.java index 86e00e0a37..cb8ff2650a 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/BaseMediaSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/BaseMediaSource.java @@ -15,10 +15,14 @@ */ package com.google.android.exoplayer2.source; +import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; + import android.os.Handler; import android.os.Looper; import androidx.annotation.Nullable; import com.google.android.exoplayer2.Timeline; +import com.google.android.exoplayer2.analytics.PlayerId; +import com.google.android.exoplayer2.drm.DrmSessionEventListener; import com.google.android.exoplayer2.upstream.TransferListener; import com.google.android.exoplayer2.util.Assertions; import java.util.ArrayList; @@ -36,19 +40,22 @@ public abstract class BaseMediaSource implements MediaSource { private final ArrayList mediaSourceCallers; private final HashSet enabledMediaSourceCallers; private final MediaSourceEventListener.EventDispatcher eventDispatcher; + private final DrmSessionEventListener.EventDispatcher drmEventDispatcher; @Nullable private Looper looper; @Nullable private Timeline timeline; + @Nullable private PlayerId playerId; public BaseMediaSource() { mediaSourceCallers = new ArrayList<>(/* initialCapacity= */ 1); enabledMediaSourceCallers = new HashSet<>(/* initialCapacity= */ 1); eventDispatcher = new MediaSourceEventListener.EventDispatcher(); + drmEventDispatcher = new DrmSessionEventListener.EventDispatcher(); } /** * Starts source preparation and enables the source, see {@link #prepareSource(MediaSourceCaller, - * TransferListener)}. This method is called at most once until the next call to {@link + * TransferListener, PlayerId)}. This method is called at most once until the next call to {@link * #releaseSourceInternal()}. * * @param mediaTransferListener The transfer listener which should be informed of any media data @@ -84,7 +91,7 @@ protected final void refreshSourceInfo(Timeline timeline) { /** * Returns a {@link MediaSourceEventListener.EventDispatcher} which dispatches all events to the - * registered listeners with the specified media period id. + * registered listeners with the specified {@link MediaPeriodId}. * * @param mediaPeriodId The {@link MediaPeriodId} to be reported with the events. May be null, if * the events do not belong to a specific media period. @@ -98,7 +105,7 @@ protected final MediaSourceEventListener.EventDispatcher createEventDispatcher( /** * Returns a {@link MediaSourceEventListener.EventDispatcher} which dispatches all events to the - * registered listeners with the specified media period id and time offset. + * registered listeners with the specified {@link MediaPeriodId} and time offset. * * @param mediaPeriodId The {@link MediaPeriodId} to be reported with the events. * @param mediaTimeOffsetMs The offset to be added to all media times, in milliseconds. @@ -106,13 +113,13 @@ protected final MediaSourceEventListener.EventDispatcher createEventDispatcher( */ protected final MediaSourceEventListener.EventDispatcher createEventDispatcher( MediaPeriodId mediaPeriodId, long mediaTimeOffsetMs) { - Assertions.checkArgument(mediaPeriodId != null); + Assertions.checkNotNull(mediaPeriodId); return eventDispatcher.withParameters(/* windowIndex= */ 0, mediaPeriodId, mediaTimeOffsetMs); } /** * Returns a {@link MediaSourceEventListener.EventDispatcher} which dispatches all events to the - * registered listeners with the specified window index, media period id and time offset. + * registered listeners with the specified window index, {@link MediaPeriodId} and time offset. * * @param windowIndex The timeline window index to be reported with the events. * @param mediaPeriodId The {@link MediaPeriodId} to be reported with the events. May be null, if @@ -125,13 +132,52 @@ protected final MediaSourceEventListener.EventDispatcher createEventDispatcher( return eventDispatcher.withParameters(windowIndex, mediaPeriodId, mediaTimeOffsetMs); } + /** + * Returns a {@link DrmSessionEventListener.EventDispatcher} which dispatches all events to the + * registered listeners with the specified {@link MediaPeriodId} + * + * @param mediaPeriodId The {@link MediaPeriodId} to be reported with the events. May be null, if + * the events do not belong to a specific media period. + * @return An event dispatcher with pre-configured media period id. + */ + protected final DrmSessionEventListener.EventDispatcher createDrmEventDispatcher( + @Nullable MediaPeriodId mediaPeriodId) { + return drmEventDispatcher.withParameters(/* windowIndex= */ 0, mediaPeriodId); + } + + /** + * Returns a {@link DrmSessionEventListener.EventDispatcher} which dispatches all events to the + * registered listeners with the specified window index and {@link MediaPeriodId}. + * + * @param windowIndex The timeline window index to be reported with the events. + * @param mediaPeriodId The {@link MediaPeriodId} to be reported with the events. May be null, if + * the events do not belong to a specific media period. + * @return An event dispatcher with pre-configured media period id and time offset. + */ + protected final DrmSessionEventListener.EventDispatcher createDrmEventDispatcher( + int windowIndex, @Nullable MediaPeriodId mediaPeriodId) { + return drmEventDispatcher.withParameters(windowIndex, mediaPeriodId); + } + /** Returns whether the source is enabled. */ protected final boolean isEnabled() { return !enabledMediaSourceCallers.isEmpty(); } + /** + * Returns the {@link PlayerId} of the player using this media source. + * + *

      Must only be used when the media source is {@link #prepareSourceInternal(TransferListener) + * prepared}. + */ + protected final PlayerId getPlayerId() { + return checkStateNotNull(playerId); + } + @Override public final void addEventListener(Handler handler, MediaSourceEventListener eventListener) { + Assertions.checkNotNull(handler); + Assertions.checkNotNull(eventListener); eventDispatcher.addEventListener(handler, eventListener); } @@ -140,12 +186,34 @@ public final void removeEventListener(MediaSourceEventListener eventListener) { eventDispatcher.removeEventListener(eventListener); } + @Override + public final void addDrmEventListener(Handler handler, DrmSessionEventListener eventListener) { + Assertions.checkNotNull(handler); + Assertions.checkNotNull(eventListener); + drmEventDispatcher.addEventListener(handler, eventListener); + } + + @Override + public final void removeDrmEventListener(DrmSessionEventListener eventListener) { + drmEventDispatcher.removeEventListener(eventListener); + } + + @SuppressWarnings("deprecation") // Overriding deprecated method to make it final. @Override public final void prepareSource( MediaSourceCaller caller, @Nullable TransferListener mediaTransferListener) { + prepareSource(caller, mediaTransferListener, PlayerId.UNSET); + } + + @Override + public final void prepareSource( + MediaSourceCaller caller, + @Nullable TransferListener mediaTransferListener, + PlayerId playerId) { Looper looper = Looper.myLooper(); Assertions.checkArgument(this.looper == null || this.looper == looper); - Timeline timeline = this.timeline; + this.playerId = playerId; + @Nullable Timeline timeline = this.timeline; mediaSourceCallers.add(caller); if (this.looper == null) { this.looper = looper; @@ -182,6 +250,7 @@ public final void releaseSource(MediaSourceCaller caller) { if (mediaSourceCallers.isEmpty()) { looper = null; timeline = null; + playerId = null; enabledMediaSourceCallers.clear(); releaseSourceInternal(); } else { diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/BehindLiveWindowException.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/BehindLiveWindowException.java index 8e0441dfcf..743659773a 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/BehindLiveWindowException.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/BehindLiveWindowException.java @@ -17,13 +17,10 @@ import java.io.IOException; -/** - * Thrown when a live playback falls behind the available media window. - */ +/** Thrown when a live playback falls behind the available media window. */ public final class BehindLiveWindowException extends IOException { public BehindLiveWindowException() { super(); } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/BundledExtractorsAdapter.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/BundledExtractorsAdapter.java new file mode 100644 index 0000000000..edb16e53d6 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/BundledExtractorsAdapter.java @@ -0,0 +1,129 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source; + +import android.net.Uri; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.extractor.DefaultExtractorInput; +import com.google.android.exoplayer2.extractor.Extractor; +import com.google.android.exoplayer2.extractor.ExtractorInput; +import com.google.android.exoplayer2.extractor.ExtractorOutput; +import com.google.android.exoplayer2.extractor.ExtractorsFactory; +import com.google.android.exoplayer2.extractor.PositionHolder; +import com.google.android.exoplayer2.extractor.mp3.Mp3Extractor; +import com.google.android.exoplayer2.upstream.DataReader; +import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.Util; +import java.io.EOFException; +import java.io.IOException; +import java.util.List; +import java.util.Map; + +/** + * {@link ProgressiveMediaExtractor} built on top of {@link Extractor} instances, whose + * implementation classes are bundled in the app. + */ +public final class BundledExtractorsAdapter implements ProgressiveMediaExtractor { + + private final ExtractorsFactory extractorsFactory; + + @Nullable private Extractor extractor; + @Nullable private ExtractorInput extractorInput; + + /** + * Creates a holder that will select an extractor and initialize it using the specified output. + * + * @param extractorsFactory The {@link ExtractorsFactory} providing the extractors to choose from. + */ + public BundledExtractorsAdapter(ExtractorsFactory extractorsFactory) { + this.extractorsFactory = extractorsFactory; + } + + @Override + public void init( + DataReader dataReader, + Uri uri, + Map> responseHeaders, + long position, + long length, + ExtractorOutput output) + throws IOException { + ExtractorInput extractorInput = new DefaultExtractorInput(dataReader, position, length); + this.extractorInput = extractorInput; + if (extractor != null) { + return; + } + Extractor[] extractors = extractorsFactory.createExtractors(uri, responseHeaders); + if (extractors.length == 1) { + this.extractor = extractors[0]; + } else { + for (Extractor extractor : extractors) { + try { + if (extractor.sniff(extractorInput)) { + this.extractor = extractor; + break; + } + } catch (EOFException e) { + // Do nothing. + } finally { + Assertions.checkState(this.extractor != null || extractorInput.getPosition() == position); + extractorInput.resetPeekPosition(); + } + } + if (extractor == null) { + throw new UnrecognizedInputFormatException( + "None of the available extractors (" + + Util.getCommaDelimitedSimpleClassNames(extractors) + + ") could read the stream.", + Assertions.checkNotNull(uri)); + } + } + extractor.init(output); + } + + @Override + public void release() { + if (extractor != null) { + extractor.release(); + extractor = null; + } + extractorInput = null; + } + + @Override + public void disableSeekingOnMp3Streams() { + if (extractor instanceof Mp3Extractor) { + ((Mp3Extractor) extractor).disableSeeking(); + } + } + + @Override + public long getCurrentInputPosition() { + return extractorInput != null ? extractorInput.getPosition() : C.POSITION_UNSET; + } + + @Override + public void seek(long position, long seekTimeUs) { + Assertions.checkNotNull(extractor).seek(position, seekTimeUs); + } + + @Override + public int read(PositionHolder positionHolder) throws IOException { + return Assertions.checkNotNull(extractor) + .read(Assertions.checkNotNull(extractorInput), positionHolder); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ClippingMediaPeriod.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ClippingMediaPeriod.java index 4385a41ff3..10cc75de32 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ClippingMediaPeriod.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ClippingMediaPeriod.java @@ -21,7 +21,8 @@ import com.google.android.exoplayer2.FormatHolder; import com.google.android.exoplayer2.SeekParameters; import com.google.android.exoplayer2.decoder.DecoderInputBuffer; -import com.google.android.exoplayer2.trackselection.TrackSelection; +import com.google.android.exoplayer2.source.ClippingMediaSource.IllegalClippingException; +import com.google.android.exoplayer2.trackselection.ExoTrackSelection; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.Util; @@ -34,9 +35,7 @@ */ public final class ClippingMediaPeriod implements MediaPeriod, MediaPeriod.Callback { - /** - * The {@link MediaPeriod} wrapped by this clipping media period. - */ + /** The {@link MediaPeriod} wrapped by this clipping media period. */ public final MediaPeriod mediaPeriod; @Nullable private MediaPeriod.Callback callback; @@ -44,6 +43,7 @@ public final class ClippingMediaPeriod implements MediaPeriod, MediaPeriod.Callb private long pendingInitialDiscontinuityPositionUs; /* package */ long startUs; /* package */ long endUs; + @Nullable private IllegalClippingException clippingError; /** * Creates a new clipping media period that provides a clipped view of the specified {@link @@ -80,6 +80,16 @@ public void updateClipping(long startUs, long endUs) { this.endUs = endUs; } + /** + * Sets a clipping error detected by the media source so that it can be thrown as a period error + * at the next opportunity. + * + * @param clippingError The clipping error. + */ + public void setClippingError(IllegalClippingException clippingError) { + this.clippingError = clippingError; + } + @Override public void prepare(MediaPeriod.Callback callback, long positionUs) { this.callback = callback; @@ -88,6 +98,9 @@ public void prepare(MediaPeriod.Callback callback, long positionUs) { @Override public void maybeThrowPrepareError() throws IOException { + if (clippingError != null) { + throw clippingError; + } mediaPeriod.maybeThrowPrepareError(); } @@ -98,7 +111,7 @@ public TrackGroupArray getTrackGroups() { @Override public long selectTracks( - @NullableType TrackSelection[] selections, + @NullableType ExoTrackSelection[] selections, boolean[] mayRetainStreamFlags, @NullableType SampleStream[] streams, boolean[] streamResetFlags, @@ -174,7 +187,7 @@ public long getBufferedPositionUs() { @Override public long seekToUs(long positionUs) { pendingInitialDiscontinuityPositionUs = C.TIME_UNSET; - for (ClippingSampleStream sampleStream : sampleStreams) { + for (@Nullable ClippingSampleStream sampleStream : sampleStreams) { if (sampleStream != null) { sampleStream.clearSentEos(); } @@ -220,6 +233,9 @@ public boolean isLoading() { @Override public void onPrepared(MediaPeriod mediaPeriod) { + if (clippingError != null) { + return; + } Assertions.checkNotNull(callback).onPrepared(this); } @@ -250,7 +266,7 @@ private SeekParameters clipSeekParameters(long positionUs, SeekParameters seekPa } private static boolean shouldKeepInitialDiscontinuity( - long startUs, @NullableType TrackSelection[] selections) { + long startUs, @NullableType ExoTrackSelection[] selections) { // If the clipping start position is non-zero, the clipping sample streams will adjust // timestamps on buffers they read from the unclipped sample streams. These adjusted buffer // timestamps can be negative, because sample streams provide buffers starting at a key-frame, @@ -258,13 +274,14 @@ private static boolean shouldKeepInitialDiscontinuity( // negative timestamp, its offset timestamp can jump backwards compared to the last timestamp // read in the previous period. Renderer implementations may not allow this, so we signal a // discontinuity which resets the renderers before they read the clipping sample stream. - // However, for audio-only track selections we assume to have random access seek behaviour and - // do not need an initial discontinuity to reset the renderer. + // However, for tracks where all samples are sync samples, we assume they have random access + // seek behaviour and do not need an initial discontinuity to reset the renderer. if (startUs != 0) { - for (TrackSelection trackSelection : selections) { + for (ExoTrackSelection trackSelection : selections) { if (trackSelection != null) { Format selectedFormat = trackSelection.getSelectedFormat(); - if (!MimeTypes.isAudio(selectedFormat.sampleMimeType)) { + if (!MimeTypes.allSamplesAreSyncSamples( + selectedFormat.sampleMimeType, selectedFormat.codecs)) { return true; } } @@ -273,9 +290,7 @@ private static boolean shouldKeepInitialDiscontinuity( return false; } - /** - * Wraps a {@link SampleStream} and clips its samples. - */ + /** Wraps a {@link SampleStream} and clips its samples. */ private final class ClippingSampleStream implements SampleStream { public final SampleStream childStream; @@ -301,8 +316,8 @@ public void maybeThrowError() throws IOException { } @Override - public int readData(FormatHolder formatHolder, DecoderInputBuffer buffer, - boolean requireFormat) { + public int readData( + FormatHolder formatHolder, DecoderInputBuffer buffer, @ReadFlags int readFlags) { if (isPendingInitialDiscontinuity()) { return C.RESULT_NOTHING_READ; } @@ -310,14 +325,19 @@ public int readData(FormatHolder formatHolder, DecoderInputBuffer buffer, buffer.setFlags(C.BUFFER_FLAG_END_OF_STREAM); return C.RESULT_BUFFER_READ; } - int result = childStream.readData(formatHolder, buffer, requireFormat); + @ReadDataResult int result = childStream.readData(formatHolder, buffer, readFlags); if (result == C.RESULT_FORMAT_READ) { Format format = Assertions.checkNotNull(formatHolder.format); if (format.encoderDelay != 0 || format.encoderPadding != 0) { // Clear gapless playback metadata if the start/end points don't match the media. int encoderDelay = startUs != 0 ? 0 : format.encoderDelay; int encoderPadding = endUs != C.TIME_END_OF_SOURCE ? 0 : format.encoderPadding; - formatHolder.format = format.copyWithGaplessInfo(encoderDelay, encoderPadding); + formatHolder.format = + format + .buildUpon() + .setEncoderDelay(encoderDelay) + .setEncoderPadding(encoderPadding) + .build(); } return C.RESULT_FORMAT_READ; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ClippingMediaSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ClippingMediaSource.java index 4780c075d5..c1dfa1ac98 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ClippingMediaSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ClippingMediaSource.java @@ -15,24 +15,29 @@ */ package com.google.android.exoplayer2.source; +import static java.lang.Math.max; +import static java.lang.Math.min; +import static java.lang.annotation.ElementType.TYPE_USE; + import androidx.annotation.IntDef; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Timeline; import com.google.android.exoplayer2.upstream.Allocator; -import com.google.android.exoplayer2.upstream.TransferListener; import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.Util; import java.io.IOException; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; import java.util.ArrayList; /** * {@link MediaSource} that wraps a source and clips its timeline based on specified start/end * positions. The wrapped source must consist of a single period. */ -public final class ClippingMediaSource extends CompositeMediaSource { +public final class ClippingMediaSource extends WrappingMediaSource { /** Thrown when a {@link ClippingMediaSource} cannot clip its wrapped source. */ public static final class IllegalClippingException extends IOException { @@ -43,6 +48,7 @@ public static final class IllegalClippingException extends IOException { */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({REASON_INVALID_PERIOD_COUNT, REASON_NOT_SEEKABLE_TO_START, REASON_START_EXCEEDS_END}) public @interface Reason {} /** The wrapped source doesn't consist of a single period. */ @@ -77,7 +83,6 @@ private static String getReasonDescription(@Reason int reason) { } } - private final MediaSource mediaSource; private final long startUs; private final long endUs; private final boolean enableInitialDiscontinuity; @@ -173,8 +178,8 @@ public ClippingMediaSource( boolean enableInitialDiscontinuity, boolean allowDynamicClippingUpdates, boolean relativeToDefaultPosition) { + super(Assertions.checkNotNull(mediaSource)); Assertions.checkArgument(startPositionUs >= 0); - this.mediaSource = Assertions.checkNotNull(mediaSource); startUs = startPositionUs; endUs = endPositionUs; this.enableInitialDiscontinuity = enableInitialDiscontinuity; @@ -184,18 +189,6 @@ public ClippingMediaSource( window = new Timeline.Window(); } - @Override - @Nullable - public Object getTag() { - return mediaSource.getTag(); - } - - @Override - protected void prepareSourceInternal(@Nullable TransferListener mediaTransferListener) { - super.prepareSourceInternal(mediaTransferListener); - prepareChildSource(/* id= */ null, mediaSource); - } - @Override public void maybeThrowSourceInfoRefreshError() throws IOException { if (clippingError != null) { @@ -233,11 +226,11 @@ protected void releaseSourceInternal() { } @Override - protected void onChildSourceInfoRefreshed(Void id, MediaSource mediaSource, Timeline timeline) { + protected void onChildSourceInfoRefreshed(Timeline newTimeline) { if (clippingError != null) { return; } - refreshClippedTimeline(timeline); + refreshClippedTimeline(newTimeline); } private void refreshClippedTimeline(Timeline timeline) { @@ -274,27 +267,17 @@ private void refreshClippedTimeline(Timeline timeline) { clippingTimeline = new ClippingTimeline(timeline, windowStartUs, windowEndUs); } catch (IllegalClippingException e) { clippingError = e; + // The clipping error won't be propagated while we have existing MediaPeriods. Setting the + // error at the MediaPeriods ensures it will be thrown as soon as possible. + for (int i = 0; i < mediaPeriods.size(); i++) { + mediaPeriods.get(i).setClippingError(clippingError); + } return; } refreshSourceInfo(clippingTimeline); } - @Override - protected long getMediaTimeForChildMediaTime(Void id, long mediaTimeMs) { - if (mediaTimeMs == C.TIME_UNSET) { - return C.TIME_UNSET; - } - long startMs = C.usToMs(startUs); - long clippedTimeMs = Math.max(0, mediaTimeMs - startMs); - if (endUs != C.TIME_END_OF_SOURCE) { - clippedTimeMs = Math.min(C.usToMs(endUs) - startMs, clippedTimeMs); - } - return clippedTimeMs; - } - - /** - * Provides a clipped view of a specified timeline. - */ + /** Provides a clipped view of a specified timeline. */ private static final class ClippingTimeline extends ForwardingTimeline { private final long startUs; @@ -318,15 +301,15 @@ public ClippingTimeline(Timeline timeline, long startUs, long endUs) throw new IllegalClippingException(IllegalClippingException.REASON_INVALID_PERIOD_COUNT); } Window window = timeline.getWindow(0, new Window()); - startUs = Math.max(0, startUs); - long resolvedEndUs = endUs == C.TIME_END_OF_SOURCE ? window.durationUs : Math.max(0, endUs); + startUs = max(0, startUs); + if (!window.isPlaceholder && startUs != 0 && !window.isSeekable) { + throw new IllegalClippingException(IllegalClippingException.REASON_NOT_SEEKABLE_TO_START); + } + long resolvedEndUs = endUs == C.TIME_END_OF_SOURCE ? window.durationUs : max(0, endUs); if (window.durationUs != C.TIME_UNSET) { if (resolvedEndUs > window.durationUs) { resolvedEndUs = window.durationUs; } - if (startUs != 0 && !window.isSeekable) { - throw new IllegalClippingException(IllegalClippingException.REASON_NOT_SEEKABLE_TO_START); - } if (startUs > resolvedEndUs) { throw new IllegalClippingException(IllegalClippingException.REASON_START_EXCEEDS_END); } @@ -347,12 +330,12 @@ public Window getWindow(int windowIndex, Window window, long defaultPositionProj window.durationUs = durationUs; window.isDynamic = isDynamic; if (window.defaultPositionUs != C.TIME_UNSET) { - window.defaultPositionUs = Math.max(window.defaultPositionUs, startUs); - window.defaultPositionUs = endUs == C.TIME_UNSET ? window.defaultPositionUs - : Math.min(window.defaultPositionUs, endUs); + window.defaultPositionUs = max(window.defaultPositionUs, startUs); + window.defaultPositionUs = + endUs == C.TIME_UNSET ? window.defaultPositionUs : min(window.defaultPositionUs, endUs); window.defaultPositionUs -= startUs; } - long startMs = C.usToMs(startUs); + long startMs = Util.usToMs(startUs); if (window.presentationStartTimeMs != C.TIME_UNSET) { window.presentationStartTimeMs += startMs; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/CompositeMediaSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/CompositeMediaSource.java index 7077416a02..08912c0586 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/CompositeMediaSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/CompositeMediaSource.java @@ -19,8 +19,11 @@ import androidx.annotation.CallSuper; import androidx.annotation.Nullable; import com.google.android.exoplayer2.Timeline; +import com.google.android.exoplayer2.drm.DrmSession; +import com.google.android.exoplayer2.drm.DrmSessionEventListener; import com.google.android.exoplayer2.upstream.TransferListener; import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.UnknownNull; import com.google.android.exoplayer2.util.Util; import java.io.IOException; import java.util.HashMap; @@ -32,7 +35,7 @@ */ public abstract class CompositeMediaSource extends BaseMediaSource { - private final HashMap childSources; + private final HashMap> childSources; @Nullable private Handler eventHandler; @Nullable private TransferListener mediaTransferListener; @@ -46,13 +49,13 @@ protected CompositeMediaSource() { @CallSuper protected void prepareSourceInternal(@Nullable TransferListener mediaTransferListener) { this.mediaTransferListener = mediaTransferListener; - eventHandler = new Handler(); + eventHandler = Util.createHandlerForCurrentLooper(); } @Override @CallSuper public void maybeThrowSourceInfoRefreshError() throws IOException { - for (MediaSourceAndListener childSource : childSources.values()) { + for (MediaSourceAndListener childSource : childSources.values()) { childSource.mediaSource.maybeThrowSourceInfoRefreshError(); } } @@ -60,7 +63,7 @@ public void maybeThrowSourceInfoRefreshError() throws IOException { @Override @CallSuper protected void enableInternal() { - for (MediaSourceAndListener childSource : childSources.values()) { + for (MediaSourceAndListener childSource : childSources.values()) { childSource.mediaSource.enable(childSource.caller); } } @@ -68,7 +71,7 @@ protected void enableInternal() { @Override @CallSuper protected void disableInternal() { - for (MediaSourceAndListener childSource : childSources.values()) { + for (MediaSourceAndListener childSource : childSources.values()) { childSource.mediaSource.disable(childSource.caller); } } @@ -76,9 +79,10 @@ protected void disableInternal() { @Override @CallSuper protected void releaseSourceInternal() { - for (MediaSourceAndListener childSource : childSources.values()) { + for (MediaSourceAndListener childSource : childSources.values()) { childSource.mediaSource.releaseSource(childSource.caller); childSource.mediaSource.removeEventListener(childSource.eventListener); + childSource.mediaSource.removeDrmEventListener(childSource.eventListener); } childSources.clear(); } @@ -86,12 +90,12 @@ protected void releaseSourceInternal() { /** * Called when the source info of a child source has been refreshed. * - * @param id The unique id used to prepare the child source. + * @param childSourceId The unique id used to prepare the child source. * @param mediaSource The child source whose source info has been refreshed. - * @param timeline The timeline of the child source. + * @param newTimeline The timeline of the child source. */ protected abstract void onChildSourceInfoRefreshed( - T id, MediaSource mediaSource, Timeline timeline); + @UnknownNull T childSourceId, MediaSource mediaSource, Timeline newTimeline); /** * Prepares a child source. @@ -105,14 +109,15 @@ protected abstract void onChildSourceInfoRefreshed( * @param id A unique id to identify the child source preparation. Null is allowed as an id. * @param mediaSource The child {@link MediaSource}. */ - protected final void prepareChildSource(final T id, MediaSource mediaSource) { + protected final void prepareChildSource(@UnknownNull T id, MediaSource mediaSource) { Assertions.checkArgument(!childSources.containsKey(id)); MediaSourceCaller caller = (source, timeline) -> onChildSourceInfoRefreshed(id, source, timeline); - MediaSourceEventListener eventListener = new ForwardingEventListener(id); - childSources.put(id, new MediaSourceAndListener(mediaSource, caller, eventListener)); + ForwardingEventListener eventListener = new ForwardingEventListener(id); + childSources.put(id, new MediaSourceAndListener<>(mediaSource, caller, eventListener)); mediaSource.addEventListener(Assertions.checkNotNull(eventHandler), eventListener); - mediaSource.prepareSource(caller, mediaTransferListener); + mediaSource.addDrmEventListener(Assertions.checkNotNull(eventHandler), eventListener); + mediaSource.prepareSource(caller, mediaTransferListener, getPlayerId()); if (!isEnabled()) { mediaSource.disable(caller); } @@ -123,8 +128,8 @@ protected final void prepareChildSource(final T id, MediaSource mediaSource) { * * @param id The unique id used to prepare the child source. */ - protected final void enableChildSource(final T id) { - MediaSourceAndListener enabledChild = Assertions.checkNotNull(childSources.get(id)); + protected final void enableChildSource(@UnknownNull T id) { + MediaSourceAndListener enabledChild = Assertions.checkNotNull(childSources.get(id)); enabledChild.mediaSource.enable(enabledChild.caller); } @@ -133,8 +138,8 @@ protected final void enableChildSource(final T id) { * * @param id The unique id used to prepare the child source. */ - protected final void disableChildSource(final T id) { - MediaSourceAndListener disabledChild = Assertions.checkNotNull(childSources.get(id)); + protected final void disableChildSource(@UnknownNull T id) { + MediaSourceAndListener disabledChild = Assertions.checkNotNull(childSources.get(id)); disabledChild.mediaSource.disable(disabledChild.caller); } @@ -143,21 +148,22 @@ protected final void disableChildSource(final T id) { * * @param id The unique id used to prepare the child source. */ - protected final void releaseChildSource(T id) { - MediaSourceAndListener removedChild = Assertions.checkNotNull(childSources.remove(id)); + protected final void releaseChildSource(@UnknownNull T id) { + MediaSourceAndListener removedChild = Assertions.checkNotNull(childSources.remove(id)); removedChild.mediaSource.releaseSource(removedChild.caller); removedChild.mediaSource.removeEventListener(removedChild.eventListener); + removedChild.mediaSource.removeDrmEventListener(removedChild.eventListener); } /** * Returns the window index in the composite source corresponding to the specified window index in * a child source. The default implementation does not change the window index. * - * @param id The unique id used to prepare the child source. + * @param childSourceId The unique id used to prepare the child source. * @param windowIndex A window index of the child source. * @return The corresponding window index in the composite source. */ - protected int getWindowIndexForChildWindowIndex(T id, int windowIndex) { + protected int getWindowIndexForChildWindowIndex(@UnknownNull T childSourceId, int windowIndex) { return windowIndex; } @@ -166,83 +172,62 @@ protected int getWindowIndexForChildWindowIndex(T id, int windowIndex) { * MediaPeriodId} in a child source. The default implementation does not change the media period * id. * - * @param id The unique id used to prepare the child source. + * @param childSourceId The unique id used to prepare the child source. * @param mediaPeriodId A {@link MediaPeriodId} of the child source. * @return The corresponding {@link MediaPeriodId} in the composite source. Null if no * corresponding media period id can be determined. */ - protected @Nullable MediaPeriodId getMediaPeriodIdForChildMediaPeriodId( - T id, MediaPeriodId mediaPeriodId) { + @Nullable + protected MediaPeriodId getMediaPeriodIdForChildMediaPeriodId( + @UnknownNull T childSourceId, MediaPeriodId mediaPeriodId) { return mediaPeriodId; } /** - * Returns the media time in the composite source corresponding to the specified media time in a - * child source. The default implementation does not change the media time. + * Returns the media time in the {@link MediaPeriod} of the composite source corresponding to the + * specified media time in the {@link MediaPeriod} of the child source. The default implementation + * does not change the media time. * - * @param id The unique id used to prepare the child source. - * @param mediaTimeMs A media time of the child source, in milliseconds. - * @return The corresponding media time in the composite source, in milliseconds. + * @param childSourceId The unique id used to prepare the child source. + * @param mediaTimeMs A media time in the {@link MediaPeriod} of the child source, in + * milliseconds. + * @return The corresponding media time in the {@link MediaPeriod} of the composite source, in + * milliseconds. */ - protected long getMediaTimeForChildMediaTime(@Nullable T id, long mediaTimeMs) { + protected long getMediaTimeForChildMediaTime(@UnknownNull T childSourceId, long mediaTimeMs) { return mediaTimeMs; } - /** - * Returns whether {@link MediaSourceEventListener#onMediaPeriodCreated(int, MediaPeriodId)} and - * {@link MediaSourceEventListener#onMediaPeriodReleased(int, MediaPeriodId)} events of the given - * media period should be reported. The default implementation is to always report these events. - * - * @param mediaPeriodId A {@link MediaPeriodId} in the composite media source. - * @return Whether create and release events for this media period should be reported. - */ - protected boolean shouldDispatchCreateOrReleaseEvent(MediaPeriodId mediaPeriodId) { - return true; - } - - private static final class MediaSourceAndListener { + private static final class MediaSourceAndListener { public final MediaSource mediaSource; public final MediaSourceCaller caller; - public final MediaSourceEventListener eventListener; + public final CompositeMediaSource.ForwardingEventListener eventListener; public MediaSourceAndListener( - MediaSource mediaSource, MediaSourceCaller caller, MediaSourceEventListener eventListener) { + MediaSource mediaSource, + MediaSourceCaller caller, + CompositeMediaSource.ForwardingEventListener eventListener) { this.mediaSource = mediaSource; this.caller = caller; this.eventListener = eventListener; } } - private final class ForwardingEventListener implements MediaSourceEventListener { + private final class ForwardingEventListener + implements MediaSourceEventListener, DrmSessionEventListener { - private final T id; - private EventDispatcher eventDispatcher; + @UnknownNull private final T id; + private MediaSourceEventListener.EventDispatcher mediaSourceEventDispatcher; + private DrmSessionEventListener.EventDispatcher drmEventDispatcher; - public ForwardingEventListener(T id) { - this.eventDispatcher = createEventDispatcher(/* mediaPeriodId= */ null); + public ForwardingEventListener(@UnknownNull T id) { + this.mediaSourceEventDispatcher = createEventDispatcher(/* mediaPeriodId= */ null); + this.drmEventDispatcher = createDrmEventDispatcher(/* mediaPeriodId= */ null); this.id = id; } - @Override - public void onMediaPeriodCreated(int windowIndex, MediaPeriodId mediaPeriodId) { - if (maybeUpdateEventDispatcher(windowIndex, mediaPeriodId)) { - if (shouldDispatchCreateOrReleaseEvent( - Assertions.checkNotNull(eventDispatcher.mediaPeriodId))) { - eventDispatcher.mediaPeriodCreated(); - } - } - } - - @Override - public void onMediaPeriodReleased(int windowIndex, MediaPeriodId mediaPeriodId) { - if (maybeUpdateEventDispatcher(windowIndex, mediaPeriodId)) { - if (shouldDispatchCreateOrReleaseEvent( - Assertions.checkNotNull(eventDispatcher.mediaPeriodId))) { - eventDispatcher.mediaPeriodReleased(); - } - } - } + // MediaSourceEventListener implementation @Override public void onLoadStarted( @@ -251,7 +236,8 @@ public void onLoadStarted( LoadEventInfo loadEventData, MediaLoadData mediaLoadData) { if (maybeUpdateEventDispatcher(windowIndex, mediaPeriodId)) { - eventDispatcher.loadStarted(loadEventData, maybeUpdateMediaLoadData(mediaLoadData)); + mediaSourceEventDispatcher.loadStarted( + loadEventData, maybeUpdateMediaLoadData(mediaLoadData)); } } @@ -262,7 +248,8 @@ public void onLoadCompleted( LoadEventInfo loadEventData, MediaLoadData mediaLoadData) { if (maybeUpdateEventDispatcher(windowIndex, mediaPeriodId)) { - eventDispatcher.loadCompleted(loadEventData, maybeUpdateMediaLoadData(mediaLoadData)); + mediaSourceEventDispatcher.loadCompleted( + loadEventData, maybeUpdateMediaLoadData(mediaLoadData)); } } @@ -273,7 +260,8 @@ public void onLoadCanceled( LoadEventInfo loadEventData, MediaLoadData mediaLoadData) { if (maybeUpdateEventDispatcher(windowIndex, mediaPeriodId)) { - eventDispatcher.loadCanceled(loadEventData, maybeUpdateMediaLoadData(mediaLoadData)); + mediaSourceEventDispatcher.loadCanceled( + loadEventData, maybeUpdateMediaLoadData(mediaLoadData)); } } @@ -286,38 +274,77 @@ public void onLoadError( IOException error, boolean wasCanceled) { if (maybeUpdateEventDispatcher(windowIndex, mediaPeriodId)) { - eventDispatcher.loadError( + mediaSourceEventDispatcher.loadError( loadEventData, maybeUpdateMediaLoadData(mediaLoadData), error, wasCanceled); } } @Override - public void onReadingStarted(int windowIndex, MediaPeriodId mediaPeriodId) { + public void onUpstreamDiscarded( + int windowIndex, @Nullable MediaPeriodId mediaPeriodId, MediaLoadData mediaLoadData) { if (maybeUpdateEventDispatcher(windowIndex, mediaPeriodId)) { - eventDispatcher.readingStarted(); + mediaSourceEventDispatcher.upstreamDiscarded(maybeUpdateMediaLoadData(mediaLoadData)); } } @Override - public void onUpstreamDiscarded( + public void onDownstreamFormatChanged( int windowIndex, @Nullable MediaPeriodId mediaPeriodId, MediaLoadData mediaLoadData) { if (maybeUpdateEventDispatcher(windowIndex, mediaPeriodId)) { - eventDispatcher.upstreamDiscarded(maybeUpdateMediaLoadData(mediaLoadData)); + mediaSourceEventDispatcher.downstreamFormatChanged(maybeUpdateMediaLoadData(mediaLoadData)); } } + // DrmSessionEventListener implementation + @Override - public void onDownstreamFormatChanged( - int windowIndex, @Nullable MediaPeriodId mediaPeriodId, MediaLoadData mediaLoadData) { + public void onDrmSessionAcquired( + int windowIndex, @Nullable MediaPeriodId mediaPeriodId, @DrmSession.State int state) { if (maybeUpdateEventDispatcher(windowIndex, mediaPeriodId)) { - eventDispatcher.downstreamFormatChanged(maybeUpdateMediaLoadData(mediaLoadData)); + drmEventDispatcher.drmSessionAcquired(state); + } + } + + @Override + public void onDrmKeysLoaded(int windowIndex, @Nullable MediaPeriodId mediaPeriodId) { + if (maybeUpdateEventDispatcher(windowIndex, mediaPeriodId)) { + drmEventDispatcher.drmKeysLoaded(); + } + } + + @Override + public void onDrmSessionManagerError( + int windowIndex, @Nullable MediaPeriodId mediaPeriodId, Exception error) { + if (maybeUpdateEventDispatcher(windowIndex, mediaPeriodId)) { + drmEventDispatcher.drmSessionManagerError(error); + } + } + + @Override + public void onDrmKeysRestored(int windowIndex, @Nullable MediaPeriodId mediaPeriodId) { + if (maybeUpdateEventDispatcher(windowIndex, mediaPeriodId)) { + drmEventDispatcher.drmKeysRestored(); + } + } + + @Override + public void onDrmKeysRemoved(int windowIndex, @Nullable MediaPeriodId mediaPeriodId) { + if (maybeUpdateEventDispatcher(windowIndex, mediaPeriodId)) { + drmEventDispatcher.drmKeysRemoved(); + } + } + + @Override + public void onDrmSessionReleased(int windowIndex, @Nullable MediaPeriodId mediaPeriodId) { + if (maybeUpdateEventDispatcher(windowIndex, mediaPeriodId)) { + drmEventDispatcher.drmSessionReleased(); } } /** Updates the event dispatcher and returns whether the event should be dispatched. */ private boolean maybeUpdateEventDispatcher( int childWindowIndex, @Nullable MediaPeriodId childMediaPeriodId) { - MediaPeriodId mediaPeriodId = null; + @Nullable MediaPeriodId mediaPeriodId = null; if (childMediaPeriodId != null) { mediaPeriodId = getMediaPeriodIdForChildMediaPeriodId(id, childMediaPeriodId); if (mediaPeriodId == null) { @@ -326,11 +353,15 @@ private boolean maybeUpdateEventDispatcher( } } int windowIndex = getWindowIndexForChildWindowIndex(id, childWindowIndex); - if (eventDispatcher.windowIndex != windowIndex - || !Util.areEqual(eventDispatcher.mediaPeriodId, mediaPeriodId)) { - eventDispatcher = + if (mediaSourceEventDispatcher.windowIndex != windowIndex + || !Util.areEqual(mediaSourceEventDispatcher.mediaPeriodId, mediaPeriodId)) { + mediaSourceEventDispatcher = createEventDispatcher(windowIndex, mediaPeriodId, /* mediaTimeOffsetMs= */ 0); } + if (drmEventDispatcher.windowIndex != windowIndex + || !Util.areEqual(drmEventDispatcher.mediaPeriodId, mediaPeriodId)) { + drmEventDispatcher = createDrmEventDispatcher(windowIndex, mediaPeriodId); + } return true; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/CompositeSequenceableLoader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/CompositeSequenceableLoader.java index b583705170..5afded58e2 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/CompositeSequenceableLoader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/CompositeSequenceableLoader.java @@ -15,11 +15,11 @@ */ package com.google.android.exoplayer2.source; +import static java.lang.Math.min; + import com.google.android.exoplayer2.C; -/** - * A {@link SequenceableLoader} that encapsulates multiple other {@link SequenceableLoader}s. - */ +/** A {@link SequenceableLoader} that encapsulates multiple other {@link SequenceableLoader}s. */ public class CompositeSequenceableLoader implements SequenceableLoader { protected final SequenceableLoader[] loaders; @@ -34,7 +34,7 @@ public final long getBufferedPositionUs() { for (SequenceableLoader loader : loaders) { long loaderBufferedPositionUs = loader.getBufferedPositionUs(); if (loaderBufferedPositionUs != C.TIME_END_OF_SOURCE) { - bufferedPositionUs = Math.min(bufferedPositionUs, loaderBufferedPositionUs); + bufferedPositionUs = min(bufferedPositionUs, loaderBufferedPositionUs); } } return bufferedPositionUs == Long.MAX_VALUE ? C.TIME_END_OF_SOURCE : bufferedPositionUs; @@ -46,7 +46,7 @@ public final long getNextLoadPositionUs() { for (SequenceableLoader loader : loaders) { long loaderNextLoadPositionUs = loader.getNextLoadPositionUs(); if (loaderNextLoadPositionUs != C.TIME_END_OF_SOURCE) { - nextLoadPositionUs = Math.min(nextLoadPositionUs, loaderNextLoadPositionUs); + nextLoadPositionUs = min(nextLoadPositionUs, loaderNextLoadPositionUs); } } return nextLoadPositionUs == Long.MAX_VALUE ? C.TIME_END_OF_SOURCE : nextLoadPositionUs; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/CompositeSequenceableLoaderFactory.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/CompositeSequenceableLoaderFactory.java index b4a266feef..8492245442 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/CompositeSequenceableLoaderFactory.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/CompositeSequenceableLoaderFactory.java @@ -15,9 +15,7 @@ */ package com.google.android.exoplayer2.source; -/** - * A factory to create composite {@link SequenceableLoader}s. - */ +/** A factory to create composite {@link SequenceableLoader}s. */ public interface CompositeSequenceableLoaderFactory { /** @@ -27,5 +25,4 @@ public interface CompositeSequenceableLoaderFactory { * @return A composite {@link SequenceableLoader} that comprises the given loaders. */ SequenceableLoader createCompositeSequenceableLoader(SequenceableLoader... loaders); - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ConcatenatingMediaSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ConcatenatingMediaSource.java index 8dfea1e511..eb6226874b 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ConcatenatingMediaSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ConcatenatingMediaSource.java @@ -15,11 +15,17 @@ */ package com.google.android.exoplayer2.source; +import static java.lang.Math.max; +import static java.lang.Math.min; + +import android.net.Uri; import android.os.Handler; import android.os.Message; import androidx.annotation.GuardedBy; import androidx.annotation.Nullable; +import com.google.android.exoplayer2.AbstractConcatenatedTimeline; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.MediaItem; import com.google.android.exoplayer2.Timeline; import com.google.android.exoplayer2.source.ConcatenatingMediaSource.MediaSourceHolder; import com.google.android.exoplayer2.source.ShuffleOrder.DefaultShuffleOrder; @@ -27,7 +33,6 @@ import com.google.android.exoplayer2.upstream.TransferListener; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Util; -import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -54,6 +59,9 @@ public final class ConcatenatingMediaSource extends CompositeMediaSource mediaSourcesPublic; @@ -67,7 +75,7 @@ public final class ConcatenatingMediaSource extends CompositeMediaSource mediaSourceHolders; - private final Map mediaSourceByMediaPeriod; + private final IdentityHashMap mediaSourceByMediaPeriod; private final Map mediaSourceByUid; private final Set enabledMediaSourceHolders; private final boolean isAtomic; @@ -78,8 +86,8 @@ public final class ConcatenatingMediaSource extends CompositeMediaSource(index, mediaSourceHolders, callbackAction)) @@ -563,9 +593,10 @@ private void removePublicMediaSources( @Nullable Handler handler, @Nullable Runnable onCompletionAction) { Assertions.checkArgument((handler == null) == (onCompletionAction == null)); - Handler playbackThreadHandler = this.playbackThreadHandler; + @Nullable Handler playbackThreadHandler = this.playbackThreadHandler; Util.removeRange(mediaSourcesPublic, fromIndex, toIndex); if (playbackThreadHandler != null) { + @Nullable HandlerAndRunnable callbackAction = createOnCompletionAction(handler, onCompletionAction); playbackThreadHandler .obtainMessage(MSG_REMOVE, new MessageData<>(fromIndex, toIndex, callbackAction)) @@ -582,9 +613,10 @@ private void movePublicMediaSource( @Nullable Handler handler, @Nullable Runnable onCompletionAction) { Assertions.checkArgument((handler == null) == (onCompletionAction == null)); - Handler playbackThreadHandler = this.playbackThreadHandler; + @Nullable Handler playbackThreadHandler = this.playbackThreadHandler; mediaSourcesPublic.add(newIndex, mediaSourcesPublic.remove(currentIndex)); if (playbackThreadHandler != null) { + @Nullable HandlerAndRunnable callbackAction = createOnCompletionAction(handler, onCompletionAction); playbackThreadHandler .obtainMessage(MSG_MOVE, new MessageData<>(currentIndex, newIndex, callbackAction)) @@ -598,7 +630,7 @@ private void movePublicMediaSource( private void setPublicShuffleOrder( ShuffleOrder shuffleOrder, @Nullable Handler handler, @Nullable Runnable onCompletionAction) { Assertions.checkArgument((handler == null) == (onCompletionAction == null)); - Handler playbackThreadHandler = this.playbackThreadHandler; + @Nullable Handler playbackThreadHandler = this.playbackThreadHandler; if (playbackThreadHandler != null) { int size = getSize(); if (shuffleOrder.getLength() != size) { @@ -607,6 +639,7 @@ private void setPublicShuffleOrder( .cloneAndClear() .cloneAndInsert(/* insertionIndex= */ 0, /* insertionCount= */ size); } + @Nullable HandlerAndRunnable callbackAction = createOnCompletionAction(handler, onCompletionAction); playbackThreadHandler .obtainMessage( @@ -754,9 +787,6 @@ private void addMediaSourceInternal(int newIndex, MediaSourceHolder newMediaSour } private void updateMediaSourceInternal(MediaSourceHolder mediaSourceHolder, Timeline timeline) { - if (mediaSourceHolder == null) { - throw new IllegalArgumentException(); - } if (mediaSourceHolder.childIndex + 1 < mediaSourceHolders.size()) { MediaSourceHolder nextHolder = mediaSourceHolders.get(mediaSourceHolder.childIndex + 1); int windowOffsetUpdate = @@ -780,8 +810,8 @@ private void removeMediaSourceInternal(int index) { } private void moveMediaSourceInternal(int currentIndex, int newIndex) { - int startIndex = Math.min(currentIndex, newIndex); - int endIndex = Math.max(currentIndex, newIndex); + int startIndex = min(currentIndex, newIndex); + int endIndex = max(currentIndex, newIndex); int windowOffset = mediaSourceHolders.get(startIndex).firstWindowIndexInChild; mediaSourceHolders.add(newIndex, mediaSourceHolders.remove(currentIndex)); for (int i = startIndex; i <= endIndex; i++) { @@ -929,7 +959,7 @@ protected int getChildIndexByWindowIndex(int windowIndex) { @Override protected int getChildIndexByChildUid(Object childUid) { - Integer index = childIndexByUid.get(childUid); + @Nullable Integer index = childIndexByUid.get(childUid); return index == null ? C.INDEX_UNSET : index; } @@ -964,8 +994,8 @@ public int getPeriodCount() { } } - /** Dummy media source which does nothing and does not support creating periods. */ - private static final class DummyMediaSource extends BaseMediaSource { + /** A media source which does nothing and does not support creating periods. */ + private static final class FakeMediaSource extends BaseMediaSource { @Override protected void prepareSourceInternal(@Nullable TransferListener mediaTransferListener) { @@ -973,9 +1003,8 @@ protected void prepareSourceInternal(@Nullable TransferListener mediaTransferLis } @Override - @Nullable - public Object getTag() { - return null; + public MediaItem getMediaItem() { + return PLACEHOLDER_MEDIA_ITEM; } @Override @@ -984,7 +1013,7 @@ protected void releaseSourceInternal() { } @Override - public void maybeThrowSourceInfoRefreshError() throws IOException { + public void maybeThrowSourceInfoRefreshError() { // Do nothing. } @@ -1014,4 +1043,3 @@ public void dispatch() { } } } - diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ConcatenatingMediaSource2.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ConcatenatingMediaSource2.java new file mode 100644 index 0000000000..10af495e01 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ConcatenatingMediaSource2.java @@ -0,0 +1,608 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source; + +import static com.google.android.exoplayer2.util.Assertions.checkArgument; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Assertions.checkState; +import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; + +import android.content.Context; +import android.net.Uri; +import android.os.Handler; +import android.os.Message; +import android.util.Pair; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.MediaItem; +import com.google.android.exoplayer2.Player; +import com.google.android.exoplayer2.Timeline; +import com.google.android.exoplayer2.upstream.Allocator; +import com.google.android.exoplayer2.upstream.TransferListener; +import com.google.android.exoplayer2.util.Util; +import com.google.common.collect.ImmutableList; +import com.google.errorprone.annotations.CanIgnoreReturnValue; +import java.util.IdentityHashMap; + +/** + * Concatenates multiple {@link MediaSource MediaSources}, combining everything in one single {@link + * Timeline.Window}. + * + *

      This class can only be used under the following conditions: + * + *

        + *
      • All sources must be non-empty. + *
      • All {@link Timeline.Window Windows} defined by the sources, except the first, must have an + * {@link Timeline.Window#getPositionInFirstPeriodUs() period offset} of zero. This excludes, + * for example, live streams or {@link ClippingMediaSource} with a non-zero start position. + *
      + */ +public final class ConcatenatingMediaSource2 extends CompositeMediaSource { + + /** A builder for {@link ConcatenatingMediaSource2} instances. */ + public static final class Builder { + + private final ImmutableList.Builder mediaSourceHoldersBuilder; + + private int index; + @Nullable private MediaItem mediaItem; + @Nullable private MediaSource.Factory mediaSourceFactory; + + /** Creates the builder. */ + public Builder() { + mediaSourceHoldersBuilder = ImmutableList.builder(); + } + + /** + * Instructs the builder to use a {@link DefaultMediaSourceFactory} to convert {@link MediaItem + * MediaItems} to {@link MediaSource MediaSources} for all future calls to {@link + * #add(MediaItem)} or {@link #add(MediaItem, long)}. + * + * @param context A {@link Context}. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder useDefaultMediaSourceFactory(Context context) { + return setMediaSourceFactory(new DefaultMediaSourceFactory(context)); + } + + /** + * Sets a {@link MediaSource.Factory} that is used to convert {@link MediaItem MediaItems} to + * {@link MediaSource MediaSources} for all future calls to {@link #add(MediaItem)} or {@link + * #add(MediaItem, long)}. + * + * @param mediaSourceFactory A {@link MediaSource.Factory}. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setMediaSourceFactory(MediaSource.Factory mediaSourceFactory) { + this.mediaSourceFactory = checkNotNull(mediaSourceFactory); + return this; + } + + /** + * Sets the {@link MediaItem} to be used for the concatenated media source. + * + *

      This {@link MediaItem} will be used as {@link Timeline.Window#mediaItem} for the + * concatenated source and will be returned by {@link Player#getCurrentMediaItem()}. + * + *

      The default is {@code MediaItem.fromUri(Uri.EMPTY)}. + * + * @param mediaItem The {@link MediaItem}. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setMediaItem(MediaItem mediaItem) { + this.mediaItem = mediaItem; + return this; + } + + /** + * Adds a {@link MediaItem} to the concatenation. + * + *

      {@link #useDefaultMediaSourceFactory(Context)} or {@link + * #setMediaSourceFactory(MediaSource.Factory)} must be called before this method. + * + *

      This method must not be used with media items for progressive media that can't provide + * their duration with their first {@link Timeline} update. Use {@link #add(MediaItem, long)} + * instead. + * + * @param mediaItem The {@link MediaItem}. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder add(MediaItem mediaItem) { + return add(mediaItem, /* initialPlaceholderDurationMs= */ C.TIME_UNSET); + } + + /** + * Adds a {@link MediaItem} to the concatenation and specifies its initial placeholder duration + * used while the actual duration is still unknown. + * + *

      {@link #useDefaultMediaSourceFactory(Context)} or {@link + * #setMediaSourceFactory(MediaSource.Factory)} must be called before this method. + * + *

      Setting a placeholder duration is required for media items for progressive media that + * can't provide their duration with their first {@link Timeline} update. It may also be used + * for other items to make the duration known immediately. + * + * @param mediaItem The {@link MediaItem}. + * @param initialPlaceholderDurationMs The initial placeholder duration in milliseconds used + * while the actual duration is still unknown, or {@link C#TIME_UNSET} to not define one. + * The placeholder duration is used for every {@link Timeline.Window} defined by {@link + * Timeline} of the {@link MediaItem}. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder add(MediaItem mediaItem, long initialPlaceholderDurationMs) { + checkNotNull(mediaItem); + checkStateNotNull( + mediaSourceFactory, + "Must use useDefaultMediaSourceFactory or setMediaSourceFactory first."); + return add(mediaSourceFactory.createMediaSource(mediaItem), initialPlaceholderDurationMs); + } + + /** + * Adds a {@link MediaSource} to the concatenation. + * + *

      This method must not be used for sources like {@link ProgressiveMediaSource} that can't + * provide their duration with their first {@link Timeline} update. Use {@link #add(MediaSource, + * long)} instead. + * + * @param mediaSource The {@link MediaSource}. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder add(MediaSource mediaSource) { + return add(mediaSource, /* initialPlaceholderDurationMs= */ C.TIME_UNSET); + } + + /** + * Adds a {@link MediaSource} to the concatenation and specifies its initial placeholder + * duration used while the actual duration is still unknown. + * + *

      Setting a placeholder duration is required for sources like {@link ProgressiveMediaSource} + * that can't provide their duration with their first {@link Timeline} update. It may also be + * used for other sources to make the duration known immediately. + * + * @param mediaSource The {@link MediaSource}. + * @param initialPlaceholderDurationMs The initial placeholder duration in milliseconds used + * while the actual duration is still unknown, or {@link C#TIME_UNSET} to not define one. + * The placeholder duration is used for every {@link Timeline.Window} defined by {@link + * Timeline} of the {@link MediaSource}. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder add(MediaSource mediaSource, long initialPlaceholderDurationMs) { + checkNotNull(mediaSource); + checkState( + !(mediaSource instanceof ProgressiveMediaSource) + || initialPlaceholderDurationMs != C.TIME_UNSET, + "Progressive media source must define an initial placeholder duration."); + mediaSourceHoldersBuilder.add( + new MediaSourceHolder(mediaSource, index++, Util.msToUs(initialPlaceholderDurationMs))); + return this; + } + + /** Builds the concatenating media source. */ + public ConcatenatingMediaSource2 build() { + checkArgument(index > 0, "Must add at least one source to the concatenation."); + if (mediaItem == null) { + mediaItem = MediaItem.fromUri(Uri.EMPTY); + } + return new ConcatenatingMediaSource2(mediaItem, mediaSourceHoldersBuilder.build()); + } + } + + private static final int MSG_UPDATE_TIMELINE = 0; + + private final MediaItem mediaItem; + private final ImmutableList mediaSourceHolders; + private final IdentityHashMap mediaSourceByMediaPeriod; + + @Nullable private Handler playbackThreadHandler; + private boolean timelineUpdateScheduled; + + private ConcatenatingMediaSource2( + MediaItem mediaItem, ImmutableList mediaSourceHolders) { + this.mediaItem = mediaItem; + this.mediaSourceHolders = mediaSourceHolders; + mediaSourceByMediaPeriod = new IdentityHashMap<>(); + } + + @Nullable + @Override + public Timeline getInitialTimeline() { + return maybeCreateConcatenatedTimeline(); + } + + @Override + public MediaItem getMediaItem() { + return mediaItem; + } + + @Override + protected void prepareSourceInternal(@Nullable TransferListener mediaTransferListener) { + super.prepareSourceInternal(mediaTransferListener); + playbackThreadHandler = new Handler(/* callback= */ this::handleMessage); + for (int i = 0; i < mediaSourceHolders.size(); i++) { + MediaSourceHolder holder = mediaSourceHolders.get(i); + prepareChildSource(/* id= */ i, holder.mediaSource); + } + scheduleTimelineUpdate(); + } + + @SuppressWarnings("MissingSuperCall") + @Override + protected void enableInternal() { + // Suppress enabling all child sources here as they can be lazily enabled when creating periods. + } + + @Override + public MediaPeriod createPeriod(MediaPeriodId id, Allocator allocator, long startPositionUs) { + int holderIndex = getChildIndex(id.periodUid); + MediaSourceHolder holder = mediaSourceHolders.get(holderIndex); + MediaPeriodId childMediaPeriodId = + id.copyWithPeriodUid(getChildPeriodUid(id.periodUid)) + .copyWithWindowSequenceNumber( + getChildWindowSequenceNumber( + id.windowSequenceNumber, mediaSourceHolders.size(), holder.index)); + enableChildSource(holder.index); + holder.activeMediaPeriods++; + MediaPeriod mediaPeriod = + holder.mediaSource.createPeriod(childMediaPeriodId, allocator, startPositionUs); + mediaSourceByMediaPeriod.put(mediaPeriod, holder); + disableUnusedMediaSources(); + return mediaPeriod; + } + + @Override + public void releasePeriod(MediaPeriod mediaPeriod) { + MediaSourceHolder holder = checkNotNull(mediaSourceByMediaPeriod.remove(mediaPeriod)); + holder.mediaSource.releasePeriod(mediaPeriod); + holder.activeMediaPeriods--; + if (!mediaSourceByMediaPeriod.isEmpty()) { + disableUnusedMediaSources(); + } + } + + @Override + protected void releaseSourceInternal() { + super.releaseSourceInternal(); + if (playbackThreadHandler != null) { + playbackThreadHandler.removeCallbacksAndMessages(null); + playbackThreadHandler = null; + } + timelineUpdateScheduled = false; + } + + @Override + protected void onChildSourceInfoRefreshed( + Integer childSourceId, MediaSource mediaSource, Timeline newTimeline) { + scheduleTimelineUpdate(); + } + + @Override + @Nullable + protected MediaPeriodId getMediaPeriodIdForChildMediaPeriodId( + Integer childSourceId, MediaPeriodId mediaPeriodId) { + int childIndex = + getChildIndexFromChildWindowSequenceNumber( + mediaPeriodId.windowSequenceNumber, mediaSourceHolders.size()); + if (childSourceId != childIndex) { + // Ensure the reported media period id has the expected window sequence number. Otherwise it + // does not belong to this child source. + return null; + } + long windowSequenceNumber = + getWindowSequenceNumberFromChildWindowSequenceNumber( + mediaPeriodId.windowSequenceNumber, mediaSourceHolders.size()); + Object periodUid = getPeriodUid(childSourceId, mediaPeriodId.periodUid); + return mediaPeriodId + .copyWithPeriodUid(periodUid) + .copyWithWindowSequenceNumber(windowSequenceNumber); + } + + @Override + protected int getWindowIndexForChildWindowIndex(Integer childSourceId, int windowIndex) { + return 0; + } + + private boolean handleMessage(Message msg) { + if (msg.what == MSG_UPDATE_TIMELINE) { + updateTimeline(); + } + return true; + } + + private void scheduleTimelineUpdate() { + if (!timelineUpdateScheduled) { + checkNotNull(playbackThreadHandler).obtainMessage(MSG_UPDATE_TIMELINE).sendToTarget(); + timelineUpdateScheduled = true; + } + } + + private void updateTimeline() { + timelineUpdateScheduled = false; + @Nullable ConcatenatedTimeline timeline = maybeCreateConcatenatedTimeline(); + if (timeline != null) { + refreshSourceInfo(timeline); + } + } + + private void disableUnusedMediaSources() { + for (int i = 0; i < mediaSourceHolders.size(); i++) { + MediaSourceHolder holder = mediaSourceHolders.get(i); + if (holder.activeMediaPeriods == 0) { + disableChildSource(holder.index); + } + } + } + + @Nullable + private ConcatenatedTimeline maybeCreateConcatenatedTimeline() { + Timeline.Window window = new Timeline.Window(); + Timeline.Period period = new Timeline.Period(); + ImmutableList.Builder timelinesBuilder = ImmutableList.builder(); + ImmutableList.Builder firstPeriodIndicesBuilder = ImmutableList.builder(); + ImmutableList.Builder periodOffsetsInWindowUsBuilder = ImmutableList.builder(); + int periodCount = 0; + boolean isSeekable = true; + boolean isDynamic = false; + long durationUs = 0; + long defaultPositionUs = 0; + long nextPeriodOffsetInWindowUs = 0; + boolean manifestsAreIdentical = true; + boolean hasInitialManifest = false; + @Nullable Object initialManifest = null; + for (int i = 0; i < mediaSourceHolders.size(); i++) { + MediaSourceHolder holder = mediaSourceHolders.get(i); + Timeline timeline = holder.mediaSource.getTimeline(); + checkArgument(!timeline.isEmpty(), "Can't concatenate empty child Timeline."); + timelinesBuilder.add(timeline); + firstPeriodIndicesBuilder.add(periodCount); + periodCount += timeline.getPeriodCount(); + for (int j = 0; j < timeline.getWindowCount(); j++) { + timeline.getWindow(/* windowIndex= */ j, window); + if (!hasInitialManifest) { + initialManifest = window.manifest; + hasInitialManifest = true; + } + manifestsAreIdentical = + manifestsAreIdentical && Util.areEqual(initialManifest, window.manifest); + + long windowDurationUs = window.durationUs; + if (windowDurationUs == C.TIME_UNSET) { + if (holder.initialPlaceholderDurationUs == C.TIME_UNSET) { + // Source duration isn't known yet and we have no placeholder duration. + return null; + } + windowDurationUs = holder.initialPlaceholderDurationUs; + } + durationUs += windowDurationUs; + if (holder.index == 0 && j == 0) { + defaultPositionUs = window.defaultPositionUs; + nextPeriodOffsetInWindowUs = -window.positionInFirstPeriodUs; + } else { + checkArgument( + window.positionInFirstPeriodUs == 0, + "Can't concatenate windows. A window has a non-zero offset in a period."); + } + // Assume placeholder windows are seekable to not prevent seeking in other periods. + isSeekable &= window.isSeekable || window.isPlaceholder; + isDynamic |= window.isDynamic; + } + int childPeriodCount = timeline.getPeriodCount(); + for (int j = 0; j < childPeriodCount; j++) { + periodOffsetsInWindowUsBuilder.add(nextPeriodOffsetInWindowUs); + timeline.getPeriod(/* periodIndex= */ j, period); + long periodDurationUs = period.durationUs; + if (periodDurationUs == C.TIME_UNSET) { + checkArgument( + childPeriodCount == 1, + "Can't concatenate multiple periods with unknown duration in one window."); + long windowDurationUs = + window.durationUs != C.TIME_UNSET + ? window.durationUs + : holder.initialPlaceholderDurationUs; + periodDurationUs = windowDurationUs + window.positionInFirstPeriodUs; + } + nextPeriodOffsetInWindowUs += periodDurationUs; + } + } + return new ConcatenatedTimeline( + mediaItem, + timelinesBuilder.build(), + firstPeriodIndicesBuilder.build(), + periodOffsetsInWindowUsBuilder.build(), + isSeekable, + isDynamic, + durationUs, + defaultPositionUs, + manifestsAreIdentical ? initialManifest : null); + } + + /** + * Returns the period uid for the concatenated source from the child index and child period uid. + */ + private static Object getPeriodUid(int childIndex, Object childPeriodUid) { + return Pair.create(childIndex, childPeriodUid); + } + + /** Returns the child index from the period uid of the concatenated source. */ + @SuppressWarnings("unchecked") + private static int getChildIndex(Object periodUid) { + return ((Pair) periodUid).first; + } + + /** Returns the uid of child period from the period uid of the concatenated source. */ + @SuppressWarnings("unchecked") + private static Object getChildPeriodUid(Object periodUid) { + return ((Pair) periodUid).second; + } + + /** Returns the window sequence number used for the child source. */ + private static long getChildWindowSequenceNumber( + long windowSequenceNumber, int childCount, int childIndex) { + return windowSequenceNumber * childCount + childIndex; + } + + /** Returns the index of the child source from a child window sequence number. */ + private static int getChildIndexFromChildWindowSequenceNumber( + long childWindowSequenceNumber, int childCount) { + return (int) (childWindowSequenceNumber % childCount); + } + + /** Returns the concatenated window sequence number from a child window sequence number. */ + private static long getWindowSequenceNumberFromChildWindowSequenceNumber( + long childWindowSequenceNumber, int childCount) { + return childWindowSequenceNumber / childCount; + } + + /* package */ static final class MediaSourceHolder { + + public final MaskingMediaSource mediaSource; + public final int index; + public final long initialPlaceholderDurationUs; + + public int activeMediaPeriods; + + public MediaSourceHolder( + MediaSource mediaSource, int index, long initialPlaceholderDurationUs) { + this.mediaSource = new MaskingMediaSource(mediaSource, /* useLazyPreparation= */ false); + this.index = index; + this.initialPlaceholderDurationUs = initialPlaceholderDurationUs; + } + } + + private static final class ConcatenatedTimeline extends Timeline { + + private final MediaItem mediaItem; + private final ImmutableList timelines; + private final ImmutableList firstPeriodIndices; + private final ImmutableList periodOffsetsInWindowUs; + private final boolean isSeekable; + private final boolean isDynamic; + private final long durationUs; + private final long defaultPositionUs; + @Nullable private final Object manifest; + + public ConcatenatedTimeline( + MediaItem mediaItem, + ImmutableList timelines, + ImmutableList firstPeriodIndices, + ImmutableList periodOffsetsInWindowUs, + boolean isSeekable, + boolean isDynamic, + long durationUs, + long defaultPositionUs, + @Nullable Object manifest) { + this.mediaItem = mediaItem; + this.timelines = timelines; + this.firstPeriodIndices = firstPeriodIndices; + this.periodOffsetsInWindowUs = periodOffsetsInWindowUs; + this.isSeekable = isSeekable; + this.isDynamic = isDynamic; + this.durationUs = durationUs; + this.defaultPositionUs = defaultPositionUs; + this.manifest = manifest; + } + + @Override + public int getWindowCount() { + return 1; + } + + @Override + public int getPeriodCount() { + return periodOffsetsInWindowUs.size(); + } + + @Override + public final Window getWindow( + int windowIndex, Window window, long defaultPositionProjectionUs) { + return window.set( + Window.SINGLE_WINDOW_UID, + mediaItem, + manifest, + /* presentationStartTimeMs= */ C.TIME_UNSET, + /* windowStartTimeMs= */ C.TIME_UNSET, + /* elapsedRealtimeEpochOffsetMs= */ C.TIME_UNSET, + isSeekable, + isDynamic, + /* liveConfiguration= */ null, + defaultPositionUs, + durationUs, + /* firstPeriodIndex= */ 0, + /* lastPeriodIndex= */ getPeriodCount() - 1, + /* positionInFirstPeriodUs= */ -periodOffsetsInWindowUs.get(0)); + } + + @Override + public final Period getPeriodByUid(Object periodUid, Period period) { + int childIndex = getChildIndex(periodUid); + Object childPeriodUid = getChildPeriodUid(periodUid); + Timeline timeline = timelines.get(childIndex); + int periodIndex = + firstPeriodIndices.get(childIndex) + timeline.getIndexOfPeriod(childPeriodUid); + timeline.getPeriodByUid(childPeriodUid, period); + period.windowIndex = 0; + period.positionInWindowUs = periodOffsetsInWindowUs.get(periodIndex); + period.uid = periodUid; + return period; + } + + @Override + public final Period getPeriod(int periodIndex, Period period, boolean setIds) { + int childIndex = getChildIndexByPeriodIndex(periodIndex); + int firstPeriodIndexInChild = firstPeriodIndices.get(childIndex); + timelines.get(childIndex).getPeriod(periodIndex - firstPeriodIndexInChild, period, setIds); + period.windowIndex = 0; + period.positionInWindowUs = periodOffsetsInWindowUs.get(periodIndex); + if (setIds) { + period.uid = getPeriodUid(childIndex, checkNotNull(period.uid)); + } + return period; + } + + @Override + public final int getIndexOfPeriod(Object uid) { + if (!(uid instanceof Pair) || !(((Pair) uid).first instanceof Integer)) { + return C.INDEX_UNSET; + } + int childIndex = getChildIndex(uid); + Object periodUid = getChildPeriodUid(uid); + int periodIndexInChild = timelines.get(childIndex).getIndexOfPeriod(periodUid); + return periodIndexInChild == C.INDEX_UNSET + ? C.INDEX_UNSET + : firstPeriodIndices.get(childIndex) + periodIndexInChild; + } + + @Override + public final Object getUidOfPeriod(int periodIndex) { + int childIndex = getChildIndexByPeriodIndex(periodIndex); + int firstPeriodIndexInChild = firstPeriodIndices.get(childIndex); + Object periodUidInChild = + timelines.get(childIndex).getUidOfPeriod(periodIndex - firstPeriodIndexInChild); + return getPeriodUid(childIndex, periodUidInChild); + } + + private int getChildIndexByPeriodIndex(int periodIndex) { + return Util.binarySearchFloor( + firstPeriodIndices, periodIndex + 1, /* inclusive= */ false, /* stayInBounds= */ false); + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/DefaultCompositeSequenceableLoaderFactory.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/DefaultCompositeSequenceableLoaderFactory.java index 759b0824af..ce26ec584a 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/DefaultCompositeSequenceableLoaderFactory.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/DefaultCompositeSequenceableLoaderFactory.java @@ -15,9 +15,7 @@ */ package com.google.android.exoplayer2.source; -/** - * Default implementation of {@link CompositeSequenceableLoaderFactory}. - */ +/** Default implementation of {@link CompositeSequenceableLoaderFactory}. */ public final class DefaultCompositeSequenceableLoaderFactory implements CompositeSequenceableLoaderFactory { @@ -25,5 +23,4 @@ public final class DefaultCompositeSequenceableLoaderFactory public SequenceableLoader createCompositeSequenceableLoader(SequenceableLoader... loaders) { return new CompositeSequenceableLoader(loaders); } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/DefaultMediaSourceEventListener.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/DefaultMediaSourceEventListener.java deleted file mode 100644 index fbb3a86221..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/DefaultMediaSourceEventListener.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright (C) 2018 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.source; - -/** - * @deprecated Use {@link MediaSourceEventListener} interface directly for selective overrides as - * all methods are implemented as no-op default methods. - */ -@Deprecated -public abstract class DefaultMediaSourceEventListener implements MediaSourceEventListener {} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/DefaultMediaSourceFactory.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/DefaultMediaSourceFactory.java new file mode 100644 index 0000000000..275f98db7a --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/DefaultMediaSourceFactory.java @@ -0,0 +1,733 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source; + +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; +import static com.google.android.exoplayer2.util.Util.castNonNull; + +import android.content.Context; +import android.net.Uri; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.MediaItem; +import com.google.android.exoplayer2.drm.DrmSessionManagerProvider; +import com.google.android.exoplayer2.extractor.DefaultExtractorsFactory; +import com.google.android.exoplayer2.extractor.Extractor; +import com.google.android.exoplayer2.extractor.ExtractorInput; +import com.google.android.exoplayer2.extractor.ExtractorOutput; +import com.google.android.exoplayer2.extractor.ExtractorsFactory; +import com.google.android.exoplayer2.extractor.PositionHolder; +import com.google.android.exoplayer2.extractor.SeekMap; +import com.google.android.exoplayer2.extractor.TrackOutput; +import com.google.android.exoplayer2.source.ads.AdsLoader; +import com.google.android.exoplayer2.source.ads.AdsMediaSource; +import com.google.android.exoplayer2.text.SubtitleDecoderFactory; +import com.google.android.exoplayer2.text.SubtitleExtractor; +import com.google.android.exoplayer2.ui.AdViewProvider; +import com.google.android.exoplayer2.upstream.DataSource; +import com.google.android.exoplayer2.upstream.DataSpec; +import com.google.android.exoplayer2.upstream.DefaultDataSource; +import com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy; +import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.Log; +import com.google.android.exoplayer2.util.MimeTypes; +import com.google.android.exoplayer2.util.Util; +import com.google.common.base.Supplier; +import com.google.common.collect.ImmutableList; +import com.google.common.primitives.Ints; +import com.google.errorprone.annotations.CanIgnoreReturnValue; +import java.io.IOException; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import org.checkerframework.checker.nullness.compatqual.NullableType; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; + +/** + * The default {@link MediaSource.Factory} implementation. + * + *

      This implementation delegates calls to {@link #createMediaSource(MediaItem)} to the following + * factories: + * + *

        + *
      • {@code DashMediaSource.Factory} if the item's {@link MediaItem.LocalConfiguration#uri uri} + * ends in '.mpd' or if its {@link MediaItem.LocalConfiguration#mimeType mimeType field} is + * explicitly set to {@link MimeTypes#APPLICATION_MPD} (Requires the exoplayer-dash module + * to be added to the app). + *
      • {@code HlsMediaSource.Factory} if the item's {@link MediaItem.LocalConfiguration#uri uri} + * ends in '.m3u8' or if its {@link MediaItem.LocalConfiguration#mimeType mimeType field} is + * explicitly set to {@link MimeTypes#APPLICATION_M3U8} (Requires the exoplayer-hls module to + * be added to the app). + *
      • {@code SsMediaSource.Factory} if the item's {@link MediaItem.LocalConfiguration#uri uri} + * ends in '.ism', '.ism/Manifest' or if its {@link MediaItem.LocalConfiguration#mimeType + * mimeType field} is explicitly set to {@link MimeTypes#APPLICATION_SS} (Requires the + * exoplayer-smoothstreaming module to be added to the app). + *
      • {@link ProgressiveMediaSource.Factory} serves as a fallback if the item's {@link + * MediaItem.LocalConfiguration#uri uri} doesn't match one of the above. It tries to infer the + * required extractor by using the {@link DefaultExtractorsFactory} or the {@link + * ExtractorsFactory} provided in the constructor. An {@link UnrecognizedInputFormatException} + * is thrown if none of the available extractors can read the stream. + *
      + * + *

      Ad support for media items with ad tag URIs

      + * + *

      To support media items with {@link MediaItem.LocalConfiguration#adsConfiguration ads + * configuration}, {@link #setAdsLoaderProvider} and {@link #setAdViewProvider} need to be called to + * configure the factory with the required providers. + */ +@SuppressWarnings("deprecation") // Implement deprecated type for backwards compatibility. +public final class DefaultMediaSourceFactory implements MediaSourceFactory { + + /** + * @deprecated Use {@link AdsLoader.Provider} instead. + */ + @Deprecated + public interface AdsLoaderProvider extends AdsLoader.Provider {} + + private static final String TAG = "DMediaSourceFactory"; + + private final DelegateFactoryLoader delegateFactoryLoader; + + private DataSource.Factory dataSourceFactory; + @Nullable private MediaSource.Factory serverSideAdInsertionMediaSourceFactory; + @Nullable private AdsLoader.Provider adsLoaderProvider; + @Nullable private AdViewProvider adViewProvider; + @Nullable private LoadErrorHandlingPolicy loadErrorHandlingPolicy; + private long liveTargetOffsetMs; + private long liveMinOffsetMs; + private long liveMaxOffsetMs; + private float liveMinSpeed; + private float liveMaxSpeed; + private boolean useProgressiveMediaSourceForSubtitles; + + /** + * Creates a new instance. + * + * @param context Any context. + */ + public DefaultMediaSourceFactory(Context context) { + this(new DefaultDataSource.Factory(context)); + } + + /** + * Creates a new instance. + * + *

      Note that this constructor is only useful to try and ensure that ExoPlayer's {@link + * DefaultExtractorsFactory} can be removed by ProGuard or R8. + * + * @param context Any context. + * @param extractorsFactory An {@link ExtractorsFactory} used to extract progressive media from + * its container. + */ + public DefaultMediaSourceFactory(Context context, ExtractorsFactory extractorsFactory) { + this(new DefaultDataSource.Factory(context), extractorsFactory); + } + + /** + * Creates a new instance. + * + *

      Note that this constructor is only useful to try and ensure that ExoPlayer's {@link + * DefaultDataSource.Factory} can be removed by ProGuard or R8. + * + * @param dataSourceFactory A {@link DataSource.Factory} to create {@link DataSource} instances + * for requesting media data. + */ + public DefaultMediaSourceFactory(DataSource.Factory dataSourceFactory) { + this(dataSourceFactory, new DefaultExtractorsFactory()); + } + + /** + * Creates a new instance. + * + *

      Note that this constructor is only useful to try and ensure that ExoPlayer's {@link + * DefaultDataSource.Factory} and {@link DefaultExtractorsFactory} can be removed by ProGuard or + * R8. + * + * @param dataSourceFactory A {@link DataSource.Factory} to create {@link DataSource} instances + * for requesting media data. + * @param extractorsFactory An {@link ExtractorsFactory} used to extract progressive media from + * its container. + */ + public DefaultMediaSourceFactory( + DataSource.Factory dataSourceFactory, ExtractorsFactory extractorsFactory) { + this.dataSourceFactory = dataSourceFactory; + delegateFactoryLoader = new DelegateFactoryLoader(extractorsFactory); + delegateFactoryLoader.setDataSourceFactory(dataSourceFactory); + liveTargetOffsetMs = C.TIME_UNSET; + liveMinOffsetMs = C.TIME_UNSET; + liveMaxOffsetMs = C.TIME_UNSET; + liveMinSpeed = C.RATE_UNSET; + liveMaxSpeed = C.RATE_UNSET; + } + + /** + * Sets whether a {@link ProgressiveMediaSource} or {@link SingleSampleMediaSource} is constructed + * to handle {@link MediaItem.LocalConfiguration#subtitleConfigurations}. Defaults to false (i.e. + * {@link SingleSampleMediaSource}. + * + *

      This method is experimental, and will be renamed or removed in a future release. + * + * @param useProgressiveMediaSourceForSubtitles Indicates that {@link ProgressiveMediaSource} + * should be used for subtitles instead of {@link SingleSampleMediaSource}. + * @return This factory, for convenience. + */ + @CanIgnoreReturnValue + public DefaultMediaSourceFactory experimentalUseProgressiveMediaSourceForSubtitles( + boolean useProgressiveMediaSourceForSubtitles) { + this.useProgressiveMediaSourceForSubtitles = useProgressiveMediaSourceForSubtitles; + return this; + } + + /** + * Sets the {@link AdsLoader.Provider} that provides {@link AdsLoader} instances for media items + * that have {@link MediaItem.LocalConfiguration#adsConfiguration ads configurations}. + * + *

      This will override or clear the {@link AdsLoader.Provider} set by {@link + * #setLocalAdInsertionComponents(AdsLoader.Provider, AdViewProvider)}. + * + * @param adsLoaderProvider A provider for {@link AdsLoader} instances. + * @return This factory, for convenience. + * @deprecated Use {@link #setLocalAdInsertionComponents(AdsLoader.Provider, AdViewProvider)} + * instead. + */ + @CanIgnoreReturnValue + @Deprecated + public DefaultMediaSourceFactory setAdsLoaderProvider( + @Nullable AdsLoader.Provider adsLoaderProvider) { + this.adsLoaderProvider = adsLoaderProvider; + return this; + } + + /** + * Sets the {@link AdViewProvider} that provides information about views for the ad playback UI. + * + *

      This will override or clear the {@link AdViewProvider} set by {@link + * #setLocalAdInsertionComponents(AdsLoader.Provider, AdViewProvider)}. + * + * @param adViewProvider A provider for information about views for the ad playback UI. + * @return This factory, for convenience. + * @deprecated Use {@link #setLocalAdInsertionComponents(AdsLoader.Provider, AdViewProvider)} + * instead. + */ + @CanIgnoreReturnValue + @Deprecated + public DefaultMediaSourceFactory setAdViewProvider(@Nullable AdViewProvider adViewProvider) { + this.adViewProvider = adViewProvider; + return this; + } + + /** + * Sets the components required for local ad insertion for media items that have {@link + * MediaItem.LocalConfiguration#adsConfiguration ads configurations} + * + *

      This will override the values set by {@link #setAdsLoaderProvider(AdsLoader.Provider)} and + * {@link #setAdViewProvider(AdViewProvider)}. + * + * @param adsLoaderProvider A provider for {@link AdsLoader} instances. + * @param adViewProvider A provider for information about views for the ad playback UI. + * @return This factory, for convenience. + */ + @CanIgnoreReturnValue + public DefaultMediaSourceFactory setLocalAdInsertionComponents( + AdsLoader.Provider adsLoaderProvider, AdViewProvider adViewProvider) { + this.adsLoaderProvider = checkNotNull(adsLoaderProvider); + this.adViewProvider = checkNotNull(adViewProvider); + return this; + } + + /** + * Clear any values set via {@link #setLocalAdInsertionComponents(AdsLoader.Provider, + * AdViewProvider)}. + * + *

      This will also clear any values set by {@link #setAdsLoaderProvider(AdsLoader.Provider)} and + * {@link #setAdViewProvider(AdViewProvider)}. + * + * @return This factory, for convenience. + */ + @CanIgnoreReturnValue + public DefaultMediaSourceFactory clearLocalAdInsertionComponents() { + this.adsLoaderProvider = null; + this.adViewProvider = null; + return this; + } + + /** + * Sets the {@link DataSource.Factory} used to create {@link DataSource} instances for requesting + * media data. + * + * @param dataSourceFactory The {@link DataSource.Factory}. + * @return This factory, for convenience. + */ + @CanIgnoreReturnValue + public DefaultMediaSourceFactory setDataSourceFactory(DataSource.Factory dataSourceFactory) { + this.dataSourceFactory = dataSourceFactory; + delegateFactoryLoader.setDataSourceFactory(dataSourceFactory); + return this; + } + + /** + * Sets the {@link MediaSource.Factory} used to handle {@link MediaItem} instances containing a + * {@link Uri} identified as resolving to content with server side ad insertion (SSAI). + * + *

      SSAI URIs are those with a {@link Uri#getScheme() scheme} of {@link C#SSAI_SCHEME}. + * + * @param serverSideAdInsertionMediaSourceFactory The {@link MediaSource.Factory} for SSAI + * content, or {@code null} to remove a previously set {@link MediaSource.Factory}. + * @return This factory, for convenience. + */ + @CanIgnoreReturnValue + public DefaultMediaSourceFactory setServerSideAdInsertionMediaSourceFactory( + @Nullable MediaSource.Factory serverSideAdInsertionMediaSourceFactory) { + this.serverSideAdInsertionMediaSourceFactory = serverSideAdInsertionMediaSourceFactory; + return this; + } + + /** + * Sets the target live offset for live streams, in milliseconds. + * + * @param liveTargetOffsetMs The target live offset, in milliseconds, or {@link C#TIME_UNSET} to + * use the media-defined default. + * @return This factory, for convenience. + */ + @CanIgnoreReturnValue + public DefaultMediaSourceFactory setLiveTargetOffsetMs(long liveTargetOffsetMs) { + this.liveTargetOffsetMs = liveTargetOffsetMs; + return this; + } + + /** + * Sets the minimum offset from the live edge for live streams, in milliseconds. + * + * @param liveMinOffsetMs The minimum allowed live offset, in milliseconds, or {@link + * C#TIME_UNSET} to use the media-defined default. + * @return This factory, for convenience. + */ + @CanIgnoreReturnValue + public DefaultMediaSourceFactory setLiveMinOffsetMs(long liveMinOffsetMs) { + this.liveMinOffsetMs = liveMinOffsetMs; + return this; + } + + /** + * Sets the maximum offset from the live edge for live streams, in milliseconds. + * + * @param liveMaxOffsetMs The maximum allowed live offset, in milliseconds, or {@link + * C#TIME_UNSET} to use the media-defined default. + * @return This factory, for convenience. + */ + @CanIgnoreReturnValue + public DefaultMediaSourceFactory setLiveMaxOffsetMs(long liveMaxOffsetMs) { + this.liveMaxOffsetMs = liveMaxOffsetMs; + return this; + } + + /** + * Sets the minimum playback speed for live streams. + * + * @param minSpeed The minimum factor by which playback can be sped up for live streams, or {@link + * C#RATE_UNSET} to use the media-defined default. + * @return This factory, for convenience. + */ + @CanIgnoreReturnValue + public DefaultMediaSourceFactory setLiveMinSpeed(float minSpeed) { + this.liveMinSpeed = minSpeed; + return this; + } + + /** + * Sets the maximum playback speed for live streams. + * + * @param maxSpeed The maximum factor by which playback can be sped up for live streams, or {@link + * C#RATE_UNSET} to use the media-defined default. + * @return This factory, for convenience. + */ + @CanIgnoreReturnValue + public DefaultMediaSourceFactory setLiveMaxSpeed(float maxSpeed) { + this.liveMaxSpeed = maxSpeed; + return this; + } + + @CanIgnoreReturnValue + @Override + public DefaultMediaSourceFactory setDrmSessionManagerProvider( + DrmSessionManagerProvider drmSessionManagerProvider) { + delegateFactoryLoader.setDrmSessionManagerProvider( + checkNotNull( + drmSessionManagerProvider, + "MediaSource.Factory#setDrmSessionManagerProvider no longer handles null by" + + " instantiating a new DefaultDrmSessionManagerProvider. Explicitly construct and" + + " pass an instance in order to retain the old behavior.")); + return this; + } + + @CanIgnoreReturnValue + @Override + public DefaultMediaSourceFactory setLoadErrorHandlingPolicy( + LoadErrorHandlingPolicy loadErrorHandlingPolicy) { + this.loadErrorHandlingPolicy = + checkNotNull( + loadErrorHandlingPolicy, + "MediaSource.Factory#setLoadErrorHandlingPolicy no longer handles null by" + + " instantiating a new DefaultLoadErrorHandlingPolicy. Explicitly construct and" + + " pass an instance in order to retain the old behavior."); + delegateFactoryLoader.setLoadErrorHandlingPolicy(loadErrorHandlingPolicy); + return this; + } + + @Override + public @C.ContentType int[] getSupportedTypes() { + return delegateFactoryLoader.getSupportedTypes(); + } + + @Override + public MediaSource createMediaSource(MediaItem mediaItem) { + Assertions.checkNotNull(mediaItem.localConfiguration); + @Nullable String scheme = mediaItem.localConfiguration.uri.getScheme(); + if (scheme != null && scheme.equals(C.SSAI_SCHEME)) { + return checkNotNull(serverSideAdInsertionMediaSourceFactory).createMediaSource(mediaItem); + } + @C.ContentType + int type = + Util.inferContentTypeForUriAndMimeType( + mediaItem.localConfiguration.uri, mediaItem.localConfiguration.mimeType); + @Nullable + MediaSource.Factory mediaSourceFactory = delegateFactoryLoader.getMediaSourceFactory(type); + checkStateNotNull( + mediaSourceFactory, "No suitable media source factory found for content type: " + type); + + MediaItem.LiveConfiguration.Builder liveConfigurationBuilder = + mediaItem.liveConfiguration.buildUpon(); + if (mediaItem.liveConfiguration.targetOffsetMs == C.TIME_UNSET) { + liveConfigurationBuilder.setTargetOffsetMs(liveTargetOffsetMs); + } + if (mediaItem.liveConfiguration.minPlaybackSpeed == C.RATE_UNSET) { + liveConfigurationBuilder.setMinPlaybackSpeed(liveMinSpeed); + } + if (mediaItem.liveConfiguration.maxPlaybackSpeed == C.RATE_UNSET) { + liveConfigurationBuilder.setMaxPlaybackSpeed(liveMaxSpeed); + } + if (mediaItem.liveConfiguration.minOffsetMs == C.TIME_UNSET) { + liveConfigurationBuilder.setMinOffsetMs(liveMinOffsetMs); + } + if (mediaItem.liveConfiguration.maxOffsetMs == C.TIME_UNSET) { + liveConfigurationBuilder.setMaxOffsetMs(liveMaxOffsetMs); + } + MediaItem.LiveConfiguration liveConfiguration = liveConfigurationBuilder.build(); + // Make sure to retain the very same media item instance, if no value needs to be overridden. + if (!liveConfiguration.equals(mediaItem.liveConfiguration)) { + mediaItem = mediaItem.buildUpon().setLiveConfiguration(liveConfiguration).build(); + } + + MediaSource mediaSource = mediaSourceFactory.createMediaSource(mediaItem); + + List subtitleConfigurations = + castNonNull(mediaItem.localConfiguration).subtitleConfigurations; + if (!subtitleConfigurations.isEmpty()) { + MediaSource[] mediaSources = new MediaSource[subtitleConfigurations.size() + 1]; + mediaSources[0] = mediaSource; + for (int i = 0; i < subtitleConfigurations.size(); i++) { + if (useProgressiveMediaSourceForSubtitles) { + Format format = + new Format.Builder() + .setSampleMimeType(subtitleConfigurations.get(i).mimeType) + .setLanguage(subtitleConfigurations.get(i).language) + .setSelectionFlags(subtitleConfigurations.get(i).selectionFlags) + .setRoleFlags(subtitleConfigurations.get(i).roleFlags) + .setLabel(subtitleConfigurations.get(i).label) + .setId(subtitleConfigurations.get(i).id) + .build(); + ExtractorsFactory extractorsFactory = + () -> + new Extractor[] { + SubtitleDecoderFactory.DEFAULT.supportsFormat(format) + ? new SubtitleExtractor( + SubtitleDecoderFactory.DEFAULT.createDecoder(format), format) + : new UnknownSubtitlesExtractor(format) + }; + ProgressiveMediaSource.Factory progressiveMediaSourceFactory = + new ProgressiveMediaSource.Factory(dataSourceFactory, extractorsFactory); + if (loadErrorHandlingPolicy != null) { + progressiveMediaSourceFactory.setLoadErrorHandlingPolicy(loadErrorHandlingPolicy); + } + mediaSources[i + 1] = + progressiveMediaSourceFactory.createMediaSource( + MediaItem.fromUri(subtitleConfigurations.get(i).uri.toString())); + } else { + SingleSampleMediaSource.Factory singleSampleMediaSourceFactory = + new SingleSampleMediaSource.Factory(dataSourceFactory); + if (loadErrorHandlingPolicy != null) { + singleSampleMediaSourceFactory.setLoadErrorHandlingPolicy(loadErrorHandlingPolicy); + } + mediaSources[i + 1] = + singleSampleMediaSourceFactory.createMediaSource( + subtitleConfigurations.get(i), /* durationUs= */ C.TIME_UNSET); + } + } + + mediaSource = new MergingMediaSource(mediaSources); + } + return maybeWrapWithAdsMediaSource(mediaItem, maybeClipMediaSource(mediaItem, mediaSource)); + } + + // internal methods + + private static MediaSource maybeClipMediaSource(MediaItem mediaItem, MediaSource mediaSource) { + if (mediaItem.clippingConfiguration.startPositionMs == 0 + && mediaItem.clippingConfiguration.endPositionMs == C.TIME_END_OF_SOURCE + && !mediaItem.clippingConfiguration.relativeToDefaultPosition) { + return mediaSource; + } + return new ClippingMediaSource( + mediaSource, + Util.msToUs(mediaItem.clippingConfiguration.startPositionMs), + Util.msToUs(mediaItem.clippingConfiguration.endPositionMs), + /* enableInitialDiscontinuity= */ !mediaItem.clippingConfiguration.startsAtKeyFrame, + /* allowDynamicClippingUpdates= */ mediaItem.clippingConfiguration.relativeToLiveWindow, + mediaItem.clippingConfiguration.relativeToDefaultPosition); + } + + private MediaSource maybeWrapWithAdsMediaSource(MediaItem mediaItem, MediaSource mediaSource) { + checkNotNull(mediaItem.localConfiguration); + @Nullable + MediaItem.AdsConfiguration adsConfiguration = mediaItem.localConfiguration.adsConfiguration; + if (adsConfiguration == null) { + return mediaSource; + } + @Nullable AdsLoader.Provider adsLoaderProvider = this.adsLoaderProvider; + @Nullable AdViewProvider adViewProvider = this.adViewProvider; + if (adsLoaderProvider == null || adViewProvider == null) { + Log.w( + TAG, + "Playing media without ads. Configure ad support by calling setAdsLoaderProvider and" + + " setAdViewProvider."); + return mediaSource; + } + @Nullable AdsLoader adsLoader = adsLoaderProvider.getAdsLoader(adsConfiguration); + if (adsLoader == null) { + Log.w(TAG, "Playing media without ads, as no AdsLoader was provided."); + return mediaSource; + } + return new AdsMediaSource( + mediaSource, + new DataSpec(adsConfiguration.adTagUri), + /* adsId= */ adsConfiguration.adsId != null + ? adsConfiguration.adsId + : ImmutableList.of( + mediaItem.mediaId, mediaItem.localConfiguration.uri, adsConfiguration.adTagUri), + /* adMediaSourceFactory= */ this, + adsLoader, + adViewProvider); + } + + /** Loads media source factories lazily. */ + private static final class DelegateFactoryLoader { + private final ExtractorsFactory extractorsFactory; + private final Map> + mediaSourceFactorySuppliers; + private final Set supportedTypes; + private final Map mediaSourceFactories; + + private DataSource.@MonotonicNonNull Factory dataSourceFactory; + @Nullable private DrmSessionManagerProvider drmSessionManagerProvider; + @Nullable private LoadErrorHandlingPolicy loadErrorHandlingPolicy; + + public DelegateFactoryLoader(ExtractorsFactory extractorsFactory) { + this.extractorsFactory = extractorsFactory; + mediaSourceFactorySuppliers = new HashMap<>(); + supportedTypes = new HashSet<>(); + mediaSourceFactories = new HashMap<>(); + } + + public @C.ContentType int[] getSupportedTypes() { + ensureAllSuppliersAreLoaded(); + return Ints.toArray(supportedTypes); + } + + @SuppressWarnings("deprecation") // Forwarding to deprecated methods. + @Nullable + public MediaSource.Factory getMediaSourceFactory(@C.ContentType int contentType) { + @Nullable MediaSource.Factory mediaSourceFactory = mediaSourceFactories.get(contentType); + if (mediaSourceFactory != null) { + return mediaSourceFactory; + } + @Nullable + Supplier mediaSourceFactorySupplier = maybeLoadSupplier(contentType); + if (mediaSourceFactorySupplier == null) { + return null; + } + + mediaSourceFactory = mediaSourceFactorySupplier.get(); + if (drmSessionManagerProvider != null) { + mediaSourceFactory.setDrmSessionManagerProvider(drmSessionManagerProvider); + } + if (loadErrorHandlingPolicy != null) { + mediaSourceFactory.setLoadErrorHandlingPolicy(loadErrorHandlingPolicy); + } + mediaSourceFactories.put(contentType, mediaSourceFactory); + return mediaSourceFactory; + } + + public void setDataSourceFactory(DataSource.Factory dataSourceFactory) { + if (dataSourceFactory != this.dataSourceFactory) { + this.dataSourceFactory = dataSourceFactory; + // TODO(b/233577470): Call MediaSource.Factory.setDataSourceFactory on each value when it + // exists on the interface. + mediaSourceFactorySuppliers.clear(); + mediaSourceFactories.clear(); + } + } + + public void setDrmSessionManagerProvider(DrmSessionManagerProvider drmSessionManagerProvider) { + this.drmSessionManagerProvider = drmSessionManagerProvider; + for (MediaSource.Factory mediaSourceFactory : mediaSourceFactories.values()) { + mediaSourceFactory.setDrmSessionManagerProvider(drmSessionManagerProvider); + } + } + + public void setLoadErrorHandlingPolicy(LoadErrorHandlingPolicy loadErrorHandlingPolicy) { + this.loadErrorHandlingPolicy = loadErrorHandlingPolicy; + for (MediaSource.Factory mediaSourceFactory : mediaSourceFactories.values()) { + mediaSourceFactory.setLoadErrorHandlingPolicy(loadErrorHandlingPolicy); + } + } + + private void ensureAllSuppliersAreLoaded() { + maybeLoadSupplier(C.CONTENT_TYPE_DASH); + maybeLoadSupplier(C.CONTENT_TYPE_SS); + maybeLoadSupplier(C.CONTENT_TYPE_HLS); + maybeLoadSupplier(C.CONTENT_TYPE_RTSP); + maybeLoadSupplier(C.CONTENT_TYPE_OTHER); + } + + @Nullable + private Supplier maybeLoadSupplier(@C.ContentType int contentType) { + if (mediaSourceFactorySuppliers.containsKey(contentType)) { + return mediaSourceFactorySuppliers.get(contentType); + } + + @Nullable Supplier mediaSourceFactorySupplier = null; + DataSource.Factory dataSourceFactory = checkNotNull(this.dataSourceFactory); + try { + Class clazz; + switch (contentType) { + case C.CONTENT_TYPE_DASH: + clazz = + Class.forName("com.google.android.exoplayer2.source.dash.DashMediaSource$Factory") + .asSubclass(MediaSource.Factory.class); + mediaSourceFactorySupplier = () -> newInstance(clazz, dataSourceFactory); + break; + case C.CONTENT_TYPE_SS: + clazz = + Class.forName( + "com.google.android.exoplayer2.source.smoothstreaming.SsMediaSource$Factory") + .asSubclass(MediaSource.Factory.class); + mediaSourceFactorySupplier = () -> newInstance(clazz, dataSourceFactory); + break; + case C.CONTENT_TYPE_HLS: + clazz = + Class.forName("com.google.android.exoplayer2.source.hls.HlsMediaSource$Factory") + .asSubclass(MediaSource.Factory.class); + mediaSourceFactorySupplier = () -> newInstance(clazz, dataSourceFactory); + break; + case C.CONTENT_TYPE_RTSP: + clazz = + Class.forName("com.google.android.exoplayer2.source.rtsp.RtspMediaSource$Factory") + .asSubclass(MediaSource.Factory.class); + mediaSourceFactorySupplier = () -> newInstance(clazz); + break; + case C.CONTENT_TYPE_OTHER: + mediaSourceFactorySupplier = + () -> new ProgressiveMediaSource.Factory(dataSourceFactory, extractorsFactory); + break; + default: + // Do nothing. + } + } catch (ClassNotFoundException e) { + // Expected if the app was built without the specific module. + } + mediaSourceFactorySuppliers.put(contentType, mediaSourceFactorySupplier); + if (mediaSourceFactorySupplier != null) { + supportedTypes.add(contentType); + } + return mediaSourceFactorySupplier; + } + } + + private static final class UnknownSubtitlesExtractor implements Extractor { + private final Format format; + + public UnknownSubtitlesExtractor(Format format) { + this.format = format; + } + + @Override + public boolean sniff(ExtractorInput input) { + return true; + } + + @Override + public void init(ExtractorOutput output) { + TrackOutput trackOutput = output.track(/* id= */ 0, C.TRACK_TYPE_TEXT); + output.seekMap(new SeekMap.Unseekable(C.TIME_UNSET)); + output.endTracks(); + trackOutput.format( + format + .buildUpon() + .setSampleMimeType(MimeTypes.TEXT_UNKNOWN) + .setCodecs(format.sampleMimeType) + .build()); + } + + @Override + public int read(ExtractorInput input, PositionHolder seekPosition) throws IOException { + int skipResult = input.skip(Integer.MAX_VALUE); + if (skipResult == C.RESULT_END_OF_INPUT) { + return RESULT_END_OF_INPUT; + } + return RESULT_CONTINUE; + } + + @Override + public void seek(long position, long timeUs) {} + + @Override + public void release() {} + } + + private static MediaSource.Factory newInstance( + Class clazz, DataSource.Factory dataSourceFactory) { + try { + return clazz.getConstructor(DataSource.Factory.class).newInstance(dataSourceFactory); + } catch (Exception e) { + throw new IllegalStateException(e); + } + } + + private static MediaSource.Factory newInstance(Class clazz) { + try { + return clazz.getConstructor().newInstance(); + } catch (Exception e) { + throw new IllegalStateException(e); + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/EmptySampleStream.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/EmptySampleStream.java index 299b816cc8..39ca0e97f4 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/EmptySampleStream.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/EmptySampleStream.java @@ -18,11 +18,8 @@ import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.FormatHolder; import com.google.android.exoplayer2.decoder.DecoderInputBuffer; -import java.io.IOException; -/** - * An empty {@link SampleStream}. - */ +/** An empty {@link SampleStream}. */ public final class EmptySampleStream implements SampleStream { @Override @@ -31,13 +28,13 @@ public boolean isReady() { } @Override - public void maybeThrowError() throws IOException { + public void maybeThrowError() { // Do nothing. } @Override - public int readData(FormatHolder formatHolder, DecoderInputBuffer buffer, - boolean formatRequired) { + public int readData( + FormatHolder formatHolder, DecoderInputBuffer buffer, @ReadFlags int readFlags) { buffer.setFlags(C.BUFFER_FLAG_END_OF_STREAM); return C.RESULT_BUFFER_READ; } @@ -46,5 +43,4 @@ public int readData(FormatHolder formatHolder, DecoderInputBuffer buffer, public int skipData(long positionUs) { return 0; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ExtractorMediaSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ExtractorMediaSource.java deleted file mode 100644 index 830a62a884..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ExtractorMediaSource.java +++ /dev/null @@ -1,394 +0,0 @@ -/* - * Copyright (C) 2016 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.source; - -import android.net.Uri; -import android.os.Handler; -import androidx.annotation.Nullable; -import com.google.android.exoplayer2.C; -import com.google.android.exoplayer2.Player; -import com.google.android.exoplayer2.Timeline; -import com.google.android.exoplayer2.drm.DrmSessionManager; -import com.google.android.exoplayer2.extractor.DefaultExtractorsFactory; -import com.google.android.exoplayer2.extractor.Extractor; -import com.google.android.exoplayer2.extractor.ExtractorsFactory; -import com.google.android.exoplayer2.upstream.Allocator; -import com.google.android.exoplayer2.upstream.DataSource; -import com.google.android.exoplayer2.upstream.DefaultLoadErrorHandlingPolicy; -import com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy; -import com.google.android.exoplayer2.upstream.TransferListener; -import com.google.android.exoplayer2.util.Assertions; -import java.io.IOException; - -/** @deprecated Use {@link ProgressiveMediaSource} instead. */ -@Deprecated -@SuppressWarnings("deprecation") -public final class ExtractorMediaSource extends CompositeMediaSource { - - /** @deprecated Use {@link MediaSourceEventListener} instead. */ - @Deprecated - public interface EventListener { - - /** - * Called when an error occurs loading media data. - *

      - * This method being called does not indicate that playback has failed, or that it will fail. - * The player may be able to recover from the error and continue. Hence applications should - * not implement this method to display a user visible error or initiate an application - * level retry ({@link Player.EventListener#onPlayerError} is the appropriate place to implement - * such behavior). This method is called to provide the application with an opportunity to log - * the error if it wishes to do so. - * - * @param error The load error. - */ - void onLoadError(IOException error); - - } - - /** @deprecated Use {@link ProgressiveMediaSource.Factory} instead. */ - @Deprecated - public static final class Factory implements MediaSourceFactory { - - private final DataSource.Factory dataSourceFactory; - - @Nullable private ExtractorsFactory extractorsFactory; - @Nullable private String customCacheKey; - @Nullable private Object tag; - private LoadErrorHandlingPolicy loadErrorHandlingPolicy; - private int continueLoadingCheckIntervalBytes; - private boolean isCreateCalled; - - /** - * Creates a new factory for {@link ExtractorMediaSource}s. - * - * @param dataSourceFactory A factory for {@link DataSource}s to read the media. - */ - public Factory(DataSource.Factory dataSourceFactory) { - this.dataSourceFactory = dataSourceFactory; - loadErrorHandlingPolicy = new DefaultLoadErrorHandlingPolicy(); - continueLoadingCheckIntervalBytes = DEFAULT_LOADING_CHECK_INTERVAL_BYTES; - } - - /** - * Sets the factory for {@link Extractor}s to process the media stream. The default value is an - * instance of {@link DefaultExtractorsFactory}. - * - * @param extractorsFactory A factory for {@link Extractor}s to process the media stream. If the - * possible formats are known, pass a factory that instantiates extractors for those - * formats. - * @return This factory, for convenience. - * @throws IllegalStateException If one of the {@code create} methods has already been called. - */ - public Factory setExtractorsFactory(ExtractorsFactory extractorsFactory) { - Assertions.checkState(!isCreateCalled); - this.extractorsFactory = extractorsFactory; - return this; - } - - /** - * Sets the custom key that uniquely identifies the original stream. Used for cache indexing. - * The default value is {@code null}. - * - * @param customCacheKey A custom key that uniquely identifies the original stream. Used for - * cache indexing. - * @return This factory, for convenience. - * @throws IllegalStateException If one of the {@code create} methods has already been called. - */ - public Factory setCustomCacheKey(String customCacheKey) { - Assertions.checkState(!isCreateCalled); - this.customCacheKey = customCacheKey; - return this; - } - - /** - * Sets a tag for the media source which will be published in the {@link - * com.google.android.exoplayer2.Timeline} of the source as {@link - * com.google.android.exoplayer2.Timeline.Window#tag}. - * - * @param tag A tag for the media source. - * @return This factory, for convenience. - * @throws IllegalStateException If one of the {@code create} methods has already been called. - */ - public Factory setTag(Object tag) { - Assertions.checkState(!isCreateCalled); - this.tag = tag; - return this; - } - - /** - * Sets the minimum number of times to retry if a loading error occurs. See {@link - * #setLoadErrorHandlingPolicy} for the default value. - * - *

      Calling this method is equivalent to calling {@link #setLoadErrorHandlingPolicy} with - * {@link DefaultLoadErrorHandlingPolicy#DefaultLoadErrorHandlingPolicy(int) - * DefaultLoadErrorHandlingPolicy(minLoadableRetryCount)} - * - * @param minLoadableRetryCount The minimum number of times to retry if a loading error occurs. - * @return This factory, for convenience. - * @throws IllegalStateException If one of the {@code create} methods has already been called. - * @deprecated Use {@link #setLoadErrorHandlingPolicy(LoadErrorHandlingPolicy)} instead. - */ - @Deprecated - public Factory setMinLoadableRetryCount(int minLoadableRetryCount) { - return setLoadErrorHandlingPolicy(new DefaultLoadErrorHandlingPolicy(minLoadableRetryCount)); - } - - /** - * Sets the {@link LoadErrorHandlingPolicy}. The default value is created by calling {@link - * DefaultLoadErrorHandlingPolicy#DefaultLoadErrorHandlingPolicy()}. - * - *

      Calling this method overrides any calls to {@link #setMinLoadableRetryCount(int)}. - * - * @param loadErrorHandlingPolicy A {@link LoadErrorHandlingPolicy}. - * @return This factory, for convenience. - * @throws IllegalStateException If one of the {@code create} methods has already been called. - */ - public Factory setLoadErrorHandlingPolicy(LoadErrorHandlingPolicy loadErrorHandlingPolicy) { - Assertions.checkState(!isCreateCalled); - this.loadErrorHandlingPolicy = loadErrorHandlingPolicy; - return this; - } - - /** - * Sets the number of bytes that should be loaded between each invocation of {@link - * MediaPeriod.Callback#onContinueLoadingRequested(SequenceableLoader)}. The default value is - * {@link #DEFAULT_LOADING_CHECK_INTERVAL_BYTES}. - * - * @param continueLoadingCheckIntervalBytes The number of bytes that should be loaded between - * each invocation of {@link - * MediaPeriod.Callback#onContinueLoadingRequested(SequenceableLoader)}. - * @return This factory, for convenience. - * @throws IllegalStateException If one of the {@code create} methods has already been called. - */ - public Factory setContinueLoadingCheckIntervalBytes(int continueLoadingCheckIntervalBytes) { - Assertions.checkState(!isCreateCalled); - this.continueLoadingCheckIntervalBytes = continueLoadingCheckIntervalBytes; - return this; - } - - /** @deprecated Use {@link ProgressiveMediaSource.Factory#setDrmSessionManager} instead. */ - @Override - @Deprecated - public Factory setDrmSessionManager(DrmSessionManager drmSessionManager) { - throw new UnsupportedOperationException(); - } - - /** - * Returns a new {@link ExtractorMediaSource} using the current parameters. - * - * @param uri The {@link Uri}. - * @return The new {@link ExtractorMediaSource}. - */ - @Override - public ExtractorMediaSource createMediaSource(Uri uri) { - isCreateCalled = true; - if (extractorsFactory == null) { - extractorsFactory = new DefaultExtractorsFactory(); - } - return new ExtractorMediaSource( - uri, - dataSourceFactory, - extractorsFactory, - loadErrorHandlingPolicy, - customCacheKey, - continueLoadingCheckIntervalBytes, - tag); - } - - /** - * @deprecated Use {@link #createMediaSource(Uri)} and {@link #addEventListener(Handler, - * MediaSourceEventListener)} instead. - */ - @Deprecated - public ExtractorMediaSource createMediaSource( - Uri uri, @Nullable Handler eventHandler, @Nullable MediaSourceEventListener eventListener) { - ExtractorMediaSource mediaSource = createMediaSource(uri); - if (eventHandler != null && eventListener != null) { - mediaSource.addEventListener(eventHandler, eventListener); - } - return mediaSource; - } - - @Override - public int[] getSupportedTypes() { - return new int[] {C.TYPE_OTHER}; - } - } - - /** - * @deprecated Use {@link ProgressiveMediaSource#DEFAULT_LOADING_CHECK_INTERVAL_BYTES} instead. - */ - @Deprecated - public static final int DEFAULT_LOADING_CHECK_INTERVAL_BYTES = - ProgressiveMediaSource.DEFAULT_LOADING_CHECK_INTERVAL_BYTES; - - private final ProgressiveMediaSource progressiveMediaSource; - - /** - * @param uri The {@link Uri} of the media stream. - * @param dataSourceFactory A factory for {@link DataSource}s to read the media. - * @param extractorsFactory A factory for {@link Extractor}s to process the media stream. If the - * possible formats are known, pass a factory that instantiates extractors for those formats. - * Otherwise, pass a {@link DefaultExtractorsFactory} to use default extractors. - * @param eventHandler A handler for events. May be null if delivery of events is not required. - * @param eventListener A listener of events. May be null if delivery of events is not required. - * @deprecated Use {@link Factory} instead. - */ - @Deprecated - public ExtractorMediaSource( - Uri uri, - DataSource.Factory dataSourceFactory, - ExtractorsFactory extractorsFactory, - @Nullable Handler eventHandler, - @Nullable EventListener eventListener) { - this(uri, dataSourceFactory, extractorsFactory, eventHandler, eventListener, null); - } - - /** - * @param uri The {@link Uri} of the media stream. - * @param dataSourceFactory A factory for {@link DataSource}s to read the media. - * @param extractorsFactory A factory for {@link Extractor}s to process the media stream. If the - * possible formats are known, pass a factory that instantiates extractors for those formats. - * Otherwise, pass a {@link DefaultExtractorsFactory} to use default extractors. - * @param eventHandler A handler for events. May be null if delivery of events is not required. - * @param eventListener A listener of events. May be null if delivery of events is not required. - * @param customCacheKey A custom key that uniquely identifies the original stream. Used for cache - * indexing. May be null. - * @deprecated Use {@link Factory} instead. - */ - @Deprecated - public ExtractorMediaSource( - Uri uri, - DataSource.Factory dataSourceFactory, - ExtractorsFactory extractorsFactory, - @Nullable Handler eventHandler, - @Nullable EventListener eventListener, - @Nullable String customCacheKey) { - this( - uri, - dataSourceFactory, - extractorsFactory, - eventHandler, - eventListener, - customCacheKey, - DEFAULT_LOADING_CHECK_INTERVAL_BYTES); - } - - /** - * @param uri The {@link Uri} of the media stream. - * @param dataSourceFactory A factory for {@link DataSource}s to read the media. - * @param extractorsFactory A factory for {@link Extractor}s to process the media stream. If the - * possible formats are known, pass a factory that instantiates extractors for those formats. - * Otherwise, pass a {@link DefaultExtractorsFactory} to use default extractors. - * @param eventHandler A handler for events. May be null if delivery of events is not required. - * @param eventListener A listener of events. May be null if delivery of events is not required. - * @param customCacheKey A custom key that uniquely identifies the original stream. Used for cache - * indexing. May be null. - * @param continueLoadingCheckIntervalBytes The number of bytes that should be loaded between each - * invocation of {@link MediaPeriod.Callback#onContinueLoadingRequested(SequenceableLoader)}. - * @deprecated Use {@link Factory} instead. - */ - @Deprecated - public ExtractorMediaSource( - Uri uri, - DataSource.Factory dataSourceFactory, - ExtractorsFactory extractorsFactory, - @Nullable Handler eventHandler, - @Nullable EventListener eventListener, - @Nullable String customCacheKey, - int continueLoadingCheckIntervalBytes) { - this( - uri, - dataSourceFactory, - extractorsFactory, - new DefaultLoadErrorHandlingPolicy(), - customCacheKey, - continueLoadingCheckIntervalBytes, - /* tag= */ null); - if (eventListener != null && eventHandler != null) { - addEventListener(eventHandler, new EventListenerWrapper(eventListener)); - } - } - - private ExtractorMediaSource( - Uri uri, - DataSource.Factory dataSourceFactory, - ExtractorsFactory extractorsFactory, - LoadErrorHandlingPolicy loadableLoadErrorHandlingPolicy, - @Nullable String customCacheKey, - int continueLoadingCheckIntervalBytes, - @Nullable Object tag) { - progressiveMediaSource = - new ProgressiveMediaSource( - uri, - dataSourceFactory, - extractorsFactory, - DrmSessionManager.getDummyDrmSessionManager(), - loadableLoadErrorHandlingPolicy, - customCacheKey, - continueLoadingCheckIntervalBytes, - tag); - } - - @Override - @Nullable - public Object getTag() { - return progressiveMediaSource.getTag(); - } - - @Override - protected void prepareSourceInternal(@Nullable TransferListener mediaTransferListener) { - super.prepareSourceInternal(mediaTransferListener); - prepareChildSource(/* id= */ null, progressiveMediaSource); - } - - @Override - protected void onChildSourceInfoRefreshed( - @Nullable Void id, MediaSource mediaSource, Timeline timeline) { - refreshSourceInfo(timeline); - } - - @Override - public MediaPeriod createPeriod(MediaPeriodId id, Allocator allocator, long startPositionUs) { - return progressiveMediaSource.createPeriod(id, allocator, startPositionUs); - } - - @Override - public void releasePeriod(MediaPeriod mediaPeriod) { - progressiveMediaSource.releasePeriod(mediaPeriod); - } - - @Deprecated - private static final class EventListenerWrapper implements MediaSourceEventListener { - - private final EventListener eventListener; - - public EventListenerWrapper(EventListener eventListener) { - this.eventListener = Assertions.checkNotNull(eventListener); - } - - @Override - public void onLoadError( - int windowIndex, - @Nullable MediaPeriodId mediaPeriodId, - LoadEventInfo loadEventInfo, - MediaLoadData mediaLoadData, - IOException error, - boolean wasCanceled) { - eventListener.onLoadError(error); - } - } -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ForwardingTimeline.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ForwardingTimeline.java index 38b373b26c..009c6466d1 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ForwardingTimeline.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ForwardingTimeline.java @@ -18,9 +18,7 @@ import com.google.android.exoplayer2.Player; import com.google.android.exoplayer2.Timeline; -/** - * An overridable {@link Timeline} implementation forwarding all methods to another timeline. - */ +/** An overridable {@link Timeline} implementation forwarding all methods to another timeline. */ public abstract class ForwardingTimeline extends Timeline { protected final Timeline timeline; @@ -35,14 +33,14 @@ public int getWindowCount() { } @Override - public int getNextWindowIndex(int windowIndex, @Player.RepeatMode int repeatMode, - boolean shuffleModeEnabled) { + public int getNextWindowIndex( + int windowIndex, @Player.RepeatMode int repeatMode, boolean shuffleModeEnabled) { return timeline.getNextWindowIndex(windowIndex, repeatMode, shuffleModeEnabled); } @Override - public int getPreviousWindowIndex(int windowIndex, @Player.RepeatMode int repeatMode, - boolean shuffleModeEnabled) { + public int getPreviousWindowIndex( + int windowIndex, @Player.RepeatMode int repeatMode, boolean shuffleModeEnabled) { return timeline.getPreviousWindowIndex(windowIndex, repeatMode, shuffleModeEnabled); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/IcyDataSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/IcyDataSource.java index d097073960..cab324fc13 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/IcyDataSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/IcyDataSource.java @@ -15,6 +15,8 @@ */ package com.google.android.exoplayer2.source; +import static java.lang.Math.min; + import android.net.Uri; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; @@ -67,16 +69,17 @@ public IcyDataSource(DataSource upstream, int metadataIntervalBytes, Listener li @Override public void addTransferListener(TransferListener transferListener) { + Assertions.checkNotNull(transferListener); upstream.addTransferListener(transferListener); } @Override - public long open(DataSpec dataSpec) throws IOException { + public long open(DataSpec dataSpec) { throw new UnsupportedOperationException(); } @Override - public int read(byte[] buffer, int offset, int readLength) throws IOException { + public int read(byte[] buffer, int offset, int length) throws IOException { if (bytesUntilMetadata == 0) { if (readMetadata()) { bytesUntilMetadata = metadataIntervalBytes; @@ -84,15 +87,15 @@ public int read(byte[] buffer, int offset, int readLength) throws IOException { return C.RESULT_END_OF_INPUT; } } - int bytesRead = upstream.read(buffer, offset, Math.min(bytesUntilMetadata, readLength)); + int bytesRead = upstream.read(buffer, offset, min(bytesUntilMetadata, length)); if (bytesRead != C.RESULT_END_OF_INPUT) { bytesUntilMetadata -= bytesRead; } return bytesRead; } - @Nullable @Override + @Nullable public Uri getUri() { return upstream.getUri(); } @@ -103,7 +106,7 @@ public Map> getResponseHeaders() { } @Override - public void close() throws IOException { + public void close() { throw new UnsupportedOperationException(); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/LoadEventInfo.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/LoadEventInfo.java new file mode 100644 index 0000000000..8ae7b02d43 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/LoadEventInfo.java @@ -0,0 +1,99 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source; + +import android.net.Uri; +import android.os.SystemClock; +import com.google.android.exoplayer2.upstream.DataSpec; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicLong; + +/** {@link MediaSource} load event information. */ +public final class LoadEventInfo { + + /** Used for the generation of unique ids. */ + private static final AtomicLong idSource = new AtomicLong(); + + /** Returns an non-negative identifier which is unique to the JVM instance. */ + public static long getNewId() { + return idSource.getAndIncrement(); + } + + /** Identifies the load task to which this event corresponds. */ + public final long loadTaskId; + /** Defines the requested data. */ + public final DataSpec dataSpec; + /** + * The {@link Uri} from which data is being read. The uri will be identical to the one in {@link + * #dataSpec}.uri unless redirection has occurred. If redirection has occurred, this is the uri + * after redirection. + */ + public final Uri uri; + /** The response headers associated with the load, or an empty map if unavailable. */ + public final Map> responseHeaders; + /** The value of {@link SystemClock#elapsedRealtime} at the time of the load event. */ + public final long elapsedRealtimeMs; + /** The duration of the load up to the event time. */ + public final long loadDurationMs; + /** The number of bytes that were loaded up to the event time. */ + public final long bytesLoaded; + + /** + * Equivalent to {@link #LoadEventInfo(long, DataSpec, Uri, Map, long, long, long) + * LoadEventInfo(loadTaskId, dataSpec, dataSpec.uri, Collections.emptyMap(), elapsedRealtimeMs, 0, + * 0)}. + */ + public LoadEventInfo(long loadTaskId, DataSpec dataSpec, long elapsedRealtimeMs) { + this( + loadTaskId, + dataSpec, + dataSpec.uri, + Collections.emptyMap(), + elapsedRealtimeMs, + /* loadDurationMs= */ 0, + /* bytesLoaded= */ 0); + } + + /** + * Creates load event info. + * + * @param loadTaskId See {@link #loadTaskId}. + * @param dataSpec See {@link #dataSpec}. + * @param uri See {@link #uri}. + * @param responseHeaders See {@link #responseHeaders}. + * @param elapsedRealtimeMs See {@link #elapsedRealtimeMs}. + * @param loadDurationMs See {@link #loadDurationMs}. + * @param bytesLoaded See {@link #bytesLoaded}. + */ + public LoadEventInfo( + long loadTaskId, + DataSpec dataSpec, + Uri uri, + Map> responseHeaders, + long elapsedRealtimeMs, + long loadDurationMs, + long bytesLoaded) { + this.loadTaskId = loadTaskId; + this.dataSpec = dataSpec; + this.uri = uri; + this.responseHeaders = responseHeaders; + this.elapsedRealtimeMs = elapsedRealtimeMs; + this.loadDurationMs = loadDurationMs; + this.bytesLoaded = bytesLoaded; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/LoopingMediaSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/LoopingMediaSource.java index ac23e2a831..2e4d98fcea 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/LoopingMediaSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/LoopingMediaSource.java @@ -16,13 +16,13 @@ package com.google.android.exoplayer2.source; import androidx.annotation.Nullable; +import com.google.android.exoplayer2.AbstractConcatenatedTimeline; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.ExoPlayer; import com.google.android.exoplayer2.Player; import com.google.android.exoplayer2.Timeline; import com.google.android.exoplayer2.source.ShuffleOrder.UnshuffledShuffleOrder; import com.google.android.exoplayer2.upstream.Allocator; -import com.google.android.exoplayer2.upstream.TransferListener; import com.google.android.exoplayer2.util.Assertions; import java.util.HashMap; import java.util.Map; @@ -30,19 +30,24 @@ /** * Loops a {@link MediaSource} a specified number of times. * - *

      Note: To loop a {@link MediaSource} indefinitely, it is usually better to use {@link - * ExoPlayer#setRepeatMode(int)} instead of this class. + * @deprecated To loop a {@link MediaSource} indefinitely, use {@link Player#setRepeatMode(int)} + * instead of this class. To add a {@link MediaSource} a specific number of times to the + * playlist, use {@link ExoPlayer#addMediaSource} in a loop with the same {@link MediaSource}. + * To combine repeated {@link MediaSource} instances into one {@link MediaSource}, for example + * to further wrap it in another {@link MediaSource}, use {@link ConcatenatingMediaSource} with + * the same {@link MediaSource} {@link ConcatenatingMediaSource#addMediaSource added} multiple + * times. */ -public final class LoopingMediaSource extends CompositeMediaSource { +@Deprecated +public final class LoopingMediaSource extends WrappingMediaSource { - private final MediaSource childSource; private final int loopCount; private final Map childMediaPeriodIdToMediaPeriodId; private final Map mediaPeriodToChildMediaPeriodId; /** - * Loops the provided source indefinitely. Note that it is usually better to use - * {@link ExoPlayer#setRepeatMode(int)}. + * Loops the provided source indefinitely. Note that it is usually better to use {@link + * ExoPlayer#setRepeatMode(int)}. * * @param childSource The {@link MediaSource} to loop. */ @@ -57,8 +62,8 @@ public LoopingMediaSource(MediaSource childSource) { * @param loopCount The desired number of loops. Must be strictly positive. */ public LoopingMediaSource(MediaSource childSource, int loopCount) { + super(new MaskingMediaSource(childSource, /* useLazyPreparation= */ false)); Assertions.checkArgument(loopCount > 0); - this.childSource = childSource; this.loopCount = loopCount; childMediaPeriodIdToMediaPeriodId = new HashMap<>(); mediaPeriodToChildMediaPeriodId = new HashMap<>(); @@ -66,33 +71,36 @@ public LoopingMediaSource(MediaSource childSource, int loopCount) { @Override @Nullable - public Object getTag() { - return childSource.getTag(); + public Timeline getInitialTimeline() { + MaskingMediaSource maskingMediaSource = (MaskingMediaSource) mediaSource; + return loopCount != Integer.MAX_VALUE + ? new LoopingTimeline(maskingMediaSource.getTimeline(), loopCount) + : new InfinitelyLoopingTimeline(maskingMediaSource.getTimeline()); } @Override - protected void prepareSourceInternal(@Nullable TransferListener mediaTransferListener) { - super.prepareSourceInternal(mediaTransferListener); - prepareChildSource(/* id= */ null, childSource); + public boolean isSingleWindow() { + return false; } @Override public MediaPeriod createPeriod(MediaPeriodId id, Allocator allocator, long startPositionUs) { if (loopCount == Integer.MAX_VALUE) { - return childSource.createPeriod(id, allocator, startPositionUs); + return mediaSource.createPeriod(id, allocator, startPositionUs); } Object childPeriodUid = LoopingTimeline.getChildPeriodUidFromConcatenatedUid(id.periodUid); MediaPeriodId childMediaPeriodId = id.copyWithPeriodUid(childPeriodUid); childMediaPeriodIdToMediaPeriodId.put(childMediaPeriodId, id); MediaPeriod mediaPeriod = - childSource.createPeriod(childMediaPeriodId, allocator, startPositionUs); + mediaSource.createPeriod(childMediaPeriodId, allocator, startPositionUs); mediaPeriodToChildMediaPeriodId.put(mediaPeriod, childMediaPeriodId); return mediaPeriod; } @Override public void releasePeriod(MediaPeriod mediaPeriod) { - childSource.releasePeriod(mediaPeriod); + mediaSource.releasePeriod(mediaPeriod); + @Nullable MediaPeriodId childMediaPeriodId = mediaPeriodToChildMediaPeriodId.remove(mediaPeriod); if (childMediaPeriodId != null) { childMediaPeriodIdToMediaPeriodId.remove(childMediaPeriodId); @@ -100,17 +108,17 @@ public void releasePeriod(MediaPeriod mediaPeriod) { } @Override - protected void onChildSourceInfoRefreshed(Void id, MediaSource mediaSource, Timeline timeline) { + protected void onChildSourceInfoRefreshed(Timeline newTimeline) { Timeline loopingTimeline = loopCount != Integer.MAX_VALUE - ? new LoopingTimeline(timeline, loopCount) - : new InfinitelyLoopingTimeline(timeline); + ? new LoopingTimeline(newTimeline, loopCount) + : new InfinitelyLoopingTimeline(newTimeline); refreshSourceInfo(loopingTimeline); } @Override - protected @Nullable MediaPeriodId getMediaPeriodIdForChildMediaPeriodId( - Void id, MediaPeriodId mediaPeriodId) { + @Nullable + protected MediaPeriodId getMediaPeriodIdForChildMediaPeriodId(MediaPeriodId mediaPeriodId) { return loopCount != Integer.MAX_VALUE ? childMediaPeriodIdToMediaPeriodId.get(mediaPeriodId) : mediaPeriodId; @@ -130,7 +138,8 @@ public LoopingTimeline(Timeline childTimeline, int loopCount) { childWindowCount = childTimeline.getWindowCount(); this.loopCount = loopCount; if (childPeriodCount > 0) { - Assertions.checkState(loopCount <= Integer.MAX_VALUE / childPeriodCount, + Assertions.checkState( + loopCount <= Integer.MAX_VALUE / childPeriodCount, "LoopingMediaSource contains too many periods"); } } @@ -182,7 +191,6 @@ protected int getFirstWindowIndexByChildIndex(int childIndex) { protected Object getChildUidByChildIndex(int childIndex) { return childIndex; } - } private static final class InfinitelyLoopingTimeline extends ForwardingTimeline { @@ -192,23 +200,23 @@ public InfinitelyLoopingTimeline(Timeline timeline) { } @Override - public int getNextWindowIndex(int windowIndex, @Player.RepeatMode int repeatMode, - boolean shuffleModeEnabled) { - int childNextWindowIndex = timeline.getNextWindowIndex(windowIndex, repeatMode, - shuffleModeEnabled); - return childNextWindowIndex == C.INDEX_UNSET ? getFirstWindowIndex(shuffleModeEnabled) + public int getNextWindowIndex( + int windowIndex, @Player.RepeatMode int repeatMode, boolean shuffleModeEnabled) { + int childNextWindowIndex = + timeline.getNextWindowIndex(windowIndex, repeatMode, shuffleModeEnabled); + return childNextWindowIndex == C.INDEX_UNSET + ? getFirstWindowIndex(shuffleModeEnabled) : childNextWindowIndex; } @Override - public int getPreviousWindowIndex(int windowIndex, @Player.RepeatMode int repeatMode, - boolean shuffleModeEnabled) { - int childPreviousWindowIndex = timeline.getPreviousWindowIndex(windowIndex, repeatMode, - shuffleModeEnabled); - return childPreviousWindowIndex == C.INDEX_UNSET ? getLastWindowIndex(shuffleModeEnabled) + public int getPreviousWindowIndex( + int windowIndex, @Player.RepeatMode int repeatMode, boolean shuffleModeEnabled) { + int childPreviousWindowIndex = + timeline.getPreviousWindowIndex(windowIndex, repeatMode, shuffleModeEnabled); + return childPreviousWindowIndex == C.INDEX_UNSET + ? getLastWindowIndex(shuffleModeEnabled) : childPreviousWindowIndex; } - } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MaskingMediaPeriod.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MaskingMediaPeriod.java index 17ac6c0667..7c60a379c7 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MaskingMediaPeriod.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MaskingMediaPeriod.java @@ -15,27 +15,33 @@ */ package com.google.android.exoplayer2.source; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Assertions.checkState; import static com.google.android.exoplayer2.util.Util.castNonNull; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.SeekParameters; import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId; -import com.google.android.exoplayer2.trackselection.TrackSelection; +import com.google.android.exoplayer2.trackselection.ExoTrackSelection; import com.google.android.exoplayer2.upstream.Allocator; import java.io.IOException; import org.checkerframework.checker.nullness.compatqual.NullableType; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; /** - * Media period that wraps a media source and defers calling its {@link - * MediaSource#createPeriod(MediaPeriodId, Allocator, long)} method until {@link - * #createPeriod(MediaPeriodId)} has been called. This is useful if you need to return a media - * period immediately but the media source that should create it is not yet prepared. + * Media period that defers calling {@link MediaSource#createPeriod(MediaPeriodId, Allocator, long)} + * on a given source until {@link #createPeriod(MediaPeriodId)} has been called. This is useful if + * you need to return a media period immediately but the media source that should create it is not + * yet available or prepared. */ public final class MaskingMediaPeriod implements MediaPeriod, MediaPeriod.Callback { - /** Listener for preparation errors. */ - public interface PrepareErrorListener { + /** Listener for preparation events. */ + public interface PrepareListener { + + /** Called when preparing the media period completes. */ + void onPrepareComplete(MediaPeriodId mediaPeriodId); /** * Called the first time an error occurs while refreshing source info or preparing the period. @@ -43,45 +49,44 @@ public interface PrepareErrorListener { void onPrepareError(MediaPeriodId mediaPeriodId, IOException exception); } - /** The {@link MediaSource} which will create the actual media period. */ - public final MediaSource mediaSource; /** The {@link MediaPeriodId} used to create the masking media period. */ public final MediaPeriodId id; + private final long preparePositionUs; private final Allocator allocator; - @Nullable private MediaPeriod mediaPeriod; + /** The {@link MediaSource} that will create the underlying media period. */ + private @MonotonicNonNull MediaSource mediaSource; + + private @MonotonicNonNull MediaPeriod mediaPeriod; @Nullable private Callback callback; - private long preparePositionUs; - @Nullable private PrepareErrorListener listener; + @Nullable private PrepareListener listener; private boolean notifiedPrepareError; private long preparePositionOverrideUs; /** - * Creates a new masking media period. + * Creates a new masking media period. The media source must be set via {@link + * #setMediaSource(MediaSource)} before preparation can start. * - * @param mediaSource The media source to wrap. * @param id The identifier used to create the masking media period. * @param allocator The allocator used to create the media period. * @param preparePositionUs The expected start position, in microseconds. */ - public MaskingMediaPeriod( - MediaSource mediaSource, MediaPeriodId id, Allocator allocator, long preparePositionUs) { + public MaskingMediaPeriod(MediaPeriodId id, Allocator allocator, long preparePositionUs) { this.id = id; this.allocator = allocator; - this.mediaSource = mediaSource; this.preparePositionUs = preparePositionUs; preparePositionOverrideUs = C.TIME_UNSET; } /** - * Sets a listener for preparation errors. + * Sets a listener for preparation events. * - * @param listener An listener to be notified of media period preparation errors. If a listener is + * @param listener An listener to be notified of media period preparation events. If a listener is * set, {@link #maybeThrowPrepareError()} will not throw but will instead pass the first * preparation error (if any) to the listener. */ - public void setPrepareErrorListener(PrepareErrorListener listener) { + public void setPrepareListener(PrepareListener listener) { this.listener = listener; } @@ -91,8 +96,8 @@ public long getPreparePositionUs() { } /** - * Overrides the default prepare position at which to prepare the media period. This value is only - * used if called before {@link #createPeriod(MediaPeriodId)}. + * Overrides the default prepare position at which to prepare the media period. This method must + * be called before {@link #createPeriod(MediaPeriodId)}. * * @param preparePositionUs The default prepare position to use, in microseconds. */ @@ -100,6 +105,17 @@ public void overridePreparePositionUs(long preparePositionUs) { preparePositionOverrideUs = preparePositionUs; } + /** Returns the prepare position override set by {@link #overridePreparePositionUs(long)}. */ + public long getPreparePositionOverrideUs() { + return preparePositionOverrideUs; + } + + /** Sets the {@link MediaSource} that will create the underlying media period. */ + public void setMediaSource(MediaSource mediaSource) { + checkState(this.mediaSource == null); + this.mediaSource = mediaSource; + } + /** * Calls {@link MediaSource#createPeriod(MediaPeriodId, Allocator, long)} on the wrapped source * then prepares it if {@link #prepare(Callback, long)} has been called. Call {@link @@ -109,26 +125,25 @@ public void overridePreparePositionUs(long preparePositionUs) { */ public void createPeriod(MediaPeriodId id) { long preparePositionUs = getPreparePositionWithOverride(this.preparePositionUs); - mediaPeriod = mediaSource.createPeriod(id, allocator, preparePositionUs); + mediaPeriod = checkNotNull(mediaSource).createPeriod(id, allocator, preparePositionUs); if (callback != null) { - mediaPeriod.prepare(this, preparePositionUs); + mediaPeriod.prepare(/* callback= */ this, preparePositionUs); } } - /** - * Releases the period. - */ + /** Releases the period. */ public void releasePeriod() { if (mediaPeriod != null) { - mediaSource.releasePeriod(mediaPeriod); + checkNotNull(mediaSource).releasePeriod(mediaPeriod); } } @Override - public void prepare(Callback callback, long preparePositionUs) { + public void prepare(Callback callback, long positionUs) { this.callback = callback; if (mediaPeriod != null) { - mediaPeriod.prepare(this, getPreparePositionWithOverride(this.preparePositionUs)); + mediaPeriod.prepare( + /* callback= */ this, getPreparePositionWithOverride(this.preparePositionUs)); } } @@ -137,10 +152,10 @@ public void maybeThrowPrepareError() throws IOException { try { if (mediaPeriod != null) { mediaPeriod.maybeThrowPrepareError(); - } else { + } else if (mediaSource != null) { mediaSource.maybeThrowSourceInfoRefreshError(); } - } catch (final IOException e) { + } catch (IOException e) { if (listener == null) { throw e; } @@ -158,7 +173,7 @@ public TrackGroupArray getTrackGroups() { @Override public long selectTracks( - @NullableType TrackSelection[] selections, + @NullableType ExoTrackSelection[] selections, boolean[] mayRetainStreamFlags, @NullableType SampleStream[] streams, boolean[] streamResetFlags, @@ -226,6 +241,9 @@ public void onContinueLoadingRequested(MediaPeriod source) { @Override public void onPrepared(MediaPeriod mediaPeriod) { castNonNull(callback).onPrepared(this); + if (listener != null) { + listener.onPrepareComplete(id); + } } private long getPreparePositionWithOverride(long preparePositionUs) { diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MaskingMediaSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MaskingMediaSource.java index 47279f2358..6ea76c5f78 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MaskingMediaSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MaskingMediaSource.java @@ -15,34 +15,36 @@ */ package com.google.android.exoplayer2.source; +import static java.lang.Math.max; + import android.util.Pair; import androidx.annotation.Nullable; +import androidx.annotation.VisibleForTesting; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.MediaItem; import com.google.android.exoplayer2.Timeline; import com.google.android.exoplayer2.Timeline.Window; -import com.google.android.exoplayer2.source.MediaSourceEventListener.EventDispatcher; +import com.google.android.exoplayer2.source.ads.AdPlaybackState; import com.google.android.exoplayer2.upstream.Allocator; -import com.google.android.exoplayer2.upstream.TransferListener; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Util; -import java.io.IOException; +import org.checkerframework.checker.nullness.qual.RequiresNonNull; /** * A {@link MediaSource} that masks the {@link Timeline} with a placeholder until the actual media * structure is known. */ -public final class MaskingMediaSource extends CompositeMediaSource { +public final class MaskingMediaSource extends WrappingMediaSource { - private final MediaSource mediaSource; private final boolean useLazyPreparation; private final Timeline.Window window; private final Timeline.Period period; private MaskingTimeline timeline; @Nullable private MaskingMediaPeriod unpreparedMaskingMediaPeriod; - @Nullable private EventDispatcher unpreparedMaskingMediaPeriodEventDispatcher; private boolean hasStartedPreparing; private boolean isPrepared; + private boolean hasRealTimeline; /** * Creates the masking media source. @@ -53,11 +55,19 @@ public final class MaskingMediaSource extends CompositeMediaSource { * initial preparations are triggered only when the player starts buffering the media. */ public MaskingMediaSource(MediaSource mediaSource, boolean useLazyPreparation) { - this.mediaSource = mediaSource; - this.useLazyPreparation = useLazyPreparation; + super(mediaSource); + this.useLazyPreparation = useLazyPreparation && mediaSource.isSingleWindow(); window = new Timeline.Window(); period = new Timeline.Period(); - timeline = MaskingTimeline.createWithDummyTimeline(mediaSource.getTag()); + @Nullable Timeline initialTimeline = mediaSource.getInitialTimeline(); + if (initialTimeline != null) { + timeline = + MaskingTimeline.createWithRealTimeline( + initialTimeline, /* firstWindowUid= */ null, /* firstPeriodUid= */ null); + hasRealTimeline = true; + } else { + timeline = MaskingTimeline.createWithPlaceholderTimeline(mediaSource.getMediaItem()); + } } /** Returns the {@link Timeline}. */ @@ -66,23 +76,16 @@ public Timeline getTimeline() { } @Override - public void prepareSourceInternal(@Nullable TransferListener mediaTransferListener) { - super.prepareSourceInternal(mediaTransferListener); + public void prepareSourceInternal() { if (!useLazyPreparation) { hasStartedPreparing = true; - prepareChildSource(/* id= */ null, mediaSource); + prepareChildSource(); } } - @Nullable - @Override - public Object getTag() { - return mediaSource.getTag(); - } - @Override @SuppressWarnings("MissingSuperCall") - public void maybeThrowSourceInfoRefreshError() throws IOException { + public void maybeThrowSourceInfoRefreshError() { // Do nothing. Source info refresh errors will be thrown when calling // MaskingMediaPeriod.maybeThrowPrepareError. } @@ -90,8 +93,8 @@ public void maybeThrowSourceInfoRefreshError() throws IOException { @Override public MaskingMediaPeriod createPeriod( MediaPeriodId id, Allocator allocator, long startPositionUs) { - MaskingMediaPeriod mediaPeriod = - new MaskingMediaPeriod(mediaSource, id, allocator, startPositionUs); + MaskingMediaPeriod mediaPeriod = new MaskingMediaPeriod(id, allocator, startPositionUs); + mediaPeriod.setMediaSource(mediaSource); if (isPrepared) { MediaPeriodId idInSource = id.copyWithPeriodUid(getInternalPeriodUid(id.periodUid)); mediaPeriod.createPeriod(idInSource); @@ -100,12 +103,9 @@ public MaskingMediaPeriod createPeriod( // unset and we don't load beyond periods with unset duration. We need to figure out how to // handle the prepare positions of multiple deferred media periods, should that ever change. unpreparedMaskingMediaPeriod = mediaPeriod; - unpreparedMaskingMediaPeriodEventDispatcher = - createEventDispatcher(/* windowIndex= */ 0, id, /* mediaTimeOffsetMs= */ 0); - unpreparedMaskingMediaPeriodEventDispatcher.mediaPeriodCreated(); if (!hasStartedPreparing) { hasStartedPreparing = true; - prepareChildSource(/* id= */ null, mediaSource); + prepareChildSource(); } } return mediaPeriod; @@ -115,8 +115,6 @@ public MaskingMediaPeriod createPeriod( public void releasePeriod(MediaPeriod mediaPeriod) { ((MaskingMediaPeriod) mediaPeriod).releasePeriod(); if (mediaPeriod == unpreparedMaskingMediaPeriod) { - Assertions.checkNotNull(unpreparedMaskingMediaPeriodEventDispatcher).mediaPeriodReleased(); - unpreparedMaskingMediaPeriodEventDispatcher = null; unpreparedMaskingMediaPeriod = null; } } @@ -129,14 +127,23 @@ public void releaseSourceInternal() { } @Override - protected void onChildSourceInfoRefreshed( - Void id, MediaSource mediaSource, Timeline newTimeline) { + protected void onChildSourceInfoRefreshed(Timeline newTimeline) { + @Nullable MediaPeriodId idForMaskingPeriodPreparation = null; if (isPrepared) { timeline = timeline.cloneWithUpdatedTimeline(newTimeline); + if (unpreparedMaskingMediaPeriod != null) { + // Reset override in case the duration changed and we need to update our override. + setPreparePositionOverrideToUnpreparedMaskingPeriod( + unpreparedMaskingMediaPeriod.getPreparePositionOverrideUs()); + } } else if (newTimeline.isEmpty()) { timeline = - MaskingTimeline.createWithRealTimeline( - newTimeline, Window.SINGLE_WINDOW_UID, MaskingTimeline.DUMMY_EXTERNAL_PERIOD_UID); + hasRealTimeline + ? timeline.cloneWithUpdatedTimeline(newTimeline) + : MaskingTimeline.createWithRealTimeline( + newTimeline, + Window.SINGLE_WINDOW_UID, + MaskingTimeline.MASKING_EXTERNAL_PERIOD_UID); } else { // Determine first period and the start position. // This will be: @@ -145,103 +152,132 @@ protected void onChildSourceInfoRefreshed( // a non-zero initial seek position in the window. // 3. The default window start position if the deferred period has a prepare position of zero // under the assumption that the prepare position of zero was used because it's the - // default position of the DummyTimeline window. Note that this will override an + // default position of the PlaceholderTimeline window. Note that this will override an // intentional seek to zero for a window with a non-zero default position. This is // unlikely to be a problem as a non-zero default position usually only occurs for live // playbacks and seeking to zero in a live window would cause BehindLiveWindowExceptions // anyway. newTimeline.getWindow(/* windowIndex= */ 0, window); long windowStartPositionUs = window.getDefaultPositionUs(); + Object windowUid = window.uid; if (unpreparedMaskingMediaPeriod != null) { long periodPreparePositionUs = unpreparedMaskingMediaPeriod.getPreparePositionUs(); - if (periodPreparePositionUs != 0) { - windowStartPositionUs = periodPreparePositionUs; + timeline.getPeriodByUid(unpreparedMaskingMediaPeriod.id.periodUid, period); + long windowPreparePositionUs = period.getPositionInWindowUs() + periodPreparePositionUs; + long oldWindowDefaultPositionUs = + timeline.getWindow(/* windowIndex= */ 0, window).getDefaultPositionUs(); + if (windowPreparePositionUs != oldWindowDefaultPositionUs) { + windowStartPositionUs = windowPreparePositionUs; } } - Object windowUid = window.uid; - Pair periodPosition = - newTimeline.getPeriodPosition( + Pair periodUidAndPositionUs = + newTimeline.getPeriodPositionUs( window, period, /* windowIndex= */ 0, windowStartPositionUs); - Object periodUid = periodPosition.first; - long periodPositionUs = periodPosition.second; - timeline = MaskingTimeline.createWithRealTimeline(newTimeline, windowUid, periodUid); + Object periodUid = periodUidAndPositionUs.first; + long periodPositionUs = periodUidAndPositionUs.second; + timeline = + hasRealTimeline + ? timeline.cloneWithUpdatedTimeline(newTimeline) + : MaskingTimeline.createWithRealTimeline(newTimeline, windowUid, periodUid); if (unpreparedMaskingMediaPeriod != null) { MaskingMediaPeriod maskingPeriod = unpreparedMaskingMediaPeriod; - maskingPeriod.overridePreparePositionUs(periodPositionUs); - MediaPeriodId idInSource = + setPreparePositionOverrideToUnpreparedMaskingPeriod(periodPositionUs); + idForMaskingPeriodPreparation = maskingPeriod.id.copyWithPeriodUid(getInternalPeriodUid(maskingPeriod.id.periodUid)); - maskingPeriod.createPeriod(idInSource); } } + hasRealTimeline = true; isPrepared = true; refreshSourceInfo(this.timeline); + if (idForMaskingPeriodPreparation != null) { + Assertions.checkNotNull(unpreparedMaskingMediaPeriod) + .createPeriod(idForMaskingPeriodPreparation); + } } - @Nullable @Override - protected MediaPeriodId getMediaPeriodIdForChildMediaPeriodId( - Void id, MediaPeriodId mediaPeriodId) { + @Nullable + protected MediaPeriodId getMediaPeriodIdForChildMediaPeriodId(MediaPeriodId mediaPeriodId) { return mediaPeriodId.copyWithPeriodUid(getExternalPeriodUid(mediaPeriodId.periodUid)); } - @Override - protected boolean shouldDispatchCreateOrReleaseEvent(MediaPeriodId mediaPeriodId) { - // Suppress create and release events for the period created while the source was still - // unprepared, as we send these events from this class. - return unpreparedMaskingMediaPeriod == null - || !mediaPeriodId.equals(unpreparedMaskingMediaPeriod.id); - } - private Object getInternalPeriodUid(Object externalPeriodUid) { - return externalPeriodUid.equals(MaskingTimeline.DUMMY_EXTERNAL_PERIOD_UID) + return timeline.replacedInternalPeriodUid != null + && externalPeriodUid.equals(MaskingTimeline.MASKING_EXTERNAL_PERIOD_UID) ? timeline.replacedInternalPeriodUid : externalPeriodUid; } private Object getExternalPeriodUid(Object internalPeriodUid) { - return timeline.replacedInternalPeriodUid.equals(internalPeriodUid) - ? MaskingTimeline.DUMMY_EXTERNAL_PERIOD_UID + return timeline.replacedInternalPeriodUid != null + && timeline.replacedInternalPeriodUid.equals(internalPeriodUid) + ? MaskingTimeline.MASKING_EXTERNAL_PERIOD_UID : internalPeriodUid; } + @RequiresNonNull("unpreparedMaskingMediaPeriod") + private void setPreparePositionOverrideToUnpreparedMaskingPeriod(long preparePositionOverrideUs) { + MaskingMediaPeriod maskingPeriod = unpreparedMaskingMediaPeriod; + int maskingPeriodIndex = timeline.getIndexOfPeriod(maskingPeriod.id.periodUid); + if (maskingPeriodIndex == C.INDEX_UNSET) { + // The new timeline doesn't contain this period anymore. This can happen if the media source + // has multiple periods and removed the first period with a timeline update. Ignore the + // update, as the non-existing period will be released anyway as soon as the player receives + // this new timeline. + return; + } + long periodDurationUs = timeline.getPeriod(maskingPeriodIndex, period).durationUs; + if (periodDurationUs != C.TIME_UNSET) { + // Ensure the overridden position doesn't exceed the period duration. + if (preparePositionOverrideUs >= periodDurationUs) { + preparePositionOverrideUs = max(0, periodDurationUs - 1); + } + } + maskingPeriod.overridePreparePositionUs(preparePositionOverrideUs); + } + /** * Timeline used as placeholder for an unprepared media source. After preparation, a - * MaskingTimeline is used to keep the originally assigned dummy period ID. + * MaskingTimeline is used to keep the originally assigned masking period ID. */ private static final class MaskingTimeline extends ForwardingTimeline { - public static final Object DUMMY_EXTERNAL_PERIOD_UID = new Object(); + public static final Object MASKING_EXTERNAL_PERIOD_UID = new Object(); - private final Object replacedInternalWindowUid; - private final Object replacedInternalPeriodUid; + @Nullable private final Object replacedInternalWindowUid; + @Nullable private final Object replacedInternalPeriodUid; /** - * Returns an instance with a dummy timeline using the provided window tag. + * Returns an instance with a placeholder timeline using the provided {@link MediaItem}. * - * @param windowTag A window tag. + * @param mediaItem A {@link MediaItem}. */ - public static MaskingTimeline createWithDummyTimeline(@Nullable Object windowTag) { + public static MaskingTimeline createWithPlaceholderTimeline(MediaItem mediaItem) { return new MaskingTimeline( - new DummyTimeline(windowTag), Window.SINGLE_WINDOW_UID, DUMMY_EXTERNAL_PERIOD_UID); + new PlaceholderTimeline(mediaItem), + Window.SINGLE_WINDOW_UID, + MASKING_EXTERNAL_PERIOD_UID); } /** * Returns an instance with a real timeline, replacing the provided period ID with the already - * assigned dummy period ID. + * assigned masking period ID. * * @param timeline The real timeline. * @param firstWindowUid The window UID in the timeline which will be replaced by the already * assigned {@link Window#SINGLE_WINDOW_UID}. * @param firstPeriodUid The period UID in the timeline which will be replaced by the already - * assigned {@link #DUMMY_EXTERNAL_PERIOD_UID}. + * assigned {@link #MASKING_EXTERNAL_PERIOD_UID}. */ public static MaskingTimeline createWithRealTimeline( - Timeline timeline, Object firstWindowUid, Object firstPeriodUid) { + Timeline timeline, @Nullable Object firstWindowUid, @Nullable Object firstPeriodUid) { return new MaskingTimeline(timeline, firstWindowUid, firstPeriodUid); } private MaskingTimeline( - Timeline timeline, Object replacedInternalWindowUid, Object replacedInternalPeriodUid) { + Timeline timeline, + @Nullable Object replacedInternalWindowUid, + @Nullable Object replacedInternalPeriodUid) { super(timeline); this.replacedInternalWindowUid = replacedInternalWindowUid; this.replacedInternalPeriodUid = replacedInternalPeriodUid; @@ -256,11 +292,6 @@ public MaskingTimeline cloneWithUpdatedTimeline(Timeline timeline) { return new MaskingTimeline(timeline, replacedInternalWindowUid, replacedInternalPeriodUid); } - /** Returns the wrapped timeline. */ - public Timeline getTimeline() { - return timeline; - } - @Override public Window getWindow(int windowIndex, Window window, long defaultPositionProjectionUs) { timeline.getWindow(windowIndex, window, defaultPositionProjectionUs); @@ -273,8 +304,8 @@ public Window getWindow(int windowIndex, Window window, long defaultPositionProj @Override public Period getPeriod(int periodIndex, Period period, boolean setIds) { timeline.getPeriod(periodIndex, period, setIds); - if (Util.areEqual(period.uid, replacedInternalPeriodUid)) { - period.uid = DUMMY_EXTERNAL_PERIOD_UID; + if (Util.areEqual(period.uid, replacedInternalPeriodUid) && setIds) { + period.uid = MASKING_EXTERNAL_PERIOD_UID; } return period; } @@ -282,23 +313,27 @@ public Period getPeriod(int periodIndex, Period period, boolean setIds) { @Override public int getIndexOfPeriod(Object uid) { return timeline.getIndexOfPeriod( - DUMMY_EXTERNAL_PERIOD_UID.equals(uid) ? replacedInternalPeriodUid : uid); + MASKING_EXTERNAL_PERIOD_UID.equals(uid) && replacedInternalPeriodUid != null + ? replacedInternalPeriodUid + : uid); } @Override public Object getUidOfPeriod(int periodIndex) { Object uid = timeline.getUidOfPeriod(periodIndex); - return Util.areEqual(uid, replacedInternalPeriodUid) ? DUMMY_EXTERNAL_PERIOD_UID : uid; + return Util.areEqual(uid, replacedInternalPeriodUid) ? MASKING_EXTERNAL_PERIOD_UID : uid; } } - /** Dummy placeholder timeline with one dynamic window with a period of indeterminate duration. */ - public static final class DummyTimeline extends Timeline { + /** A timeline with one dynamic window with a period of indeterminate duration. */ + @VisibleForTesting + public static final class PlaceholderTimeline extends Timeline { - @Nullable private final Object tag; + private final MediaItem mediaItem; - public DummyTimeline(@Nullable Object tag) { - this.tag = tag; + /** Creates a new instance with the given media item. */ + public PlaceholderTimeline(MediaItem mediaItem) { + this.mediaItem = mediaItem; } @Override @@ -308,21 +343,24 @@ public int getWindowCount() { @Override public Window getWindow(int windowIndex, Window window, long defaultPositionProjectionUs) { - return window.set( + window.set( Window.SINGLE_WINDOW_UID, - tag, + mediaItem, /* manifest= */ null, /* presentationStartTimeMs= */ C.TIME_UNSET, /* windowStartTimeMs= */ C.TIME_UNSET, + /* elapsedRealtimeEpochOffsetMs= */ C.TIME_UNSET, /* isSeekable= */ false, // Dynamic window to indicate pending timeline updates. /* isDynamic= */ true, - /* isLive= */ false, + /* liveConfiguration= */ null, /* defaultPositionUs= */ 0, /* durationUs= */ C.TIME_UNSET, /* firstPeriodIndex= */ 0, /* lastPeriodIndex= */ 0, /* positionInFirstPeriodUs= */ 0); + window.isPlaceholder = true; + return window; } @Override @@ -332,22 +370,25 @@ public int getPeriodCount() { @Override public Period getPeriod(int periodIndex, Period period, boolean setIds) { - return period.set( - /* id= */ 0, - /* uid= */ MaskingTimeline.DUMMY_EXTERNAL_PERIOD_UID, + period.set( + /* id= */ setIds ? 0 : null, + /* uid= */ setIds ? MaskingTimeline.MASKING_EXTERNAL_PERIOD_UID : null, /* windowIndex= */ 0, /* durationUs = */ C.TIME_UNSET, - /* positionInWindowUs= */ 0); + /* positionInWindowUs= */ 0, + /* adPlaybackState= */ AdPlaybackState.NONE, + /* isPlaceholder= */ true); + return period; } @Override public int getIndexOfPeriod(Object uid) { - return uid == MaskingTimeline.DUMMY_EXTERNAL_PERIOD_UID ? 0 : C.INDEX_UNSET; + return uid == MaskingTimeline.MASKING_EXTERNAL_PERIOD_UID ? 0 : C.INDEX_UNSET; } @Override public Object getUidOfPeriod(int periodIndex) { - return MaskingTimeline.DUMMY_EXTERNAL_PERIOD_UID; + return MaskingTimeline.MASKING_EXTERNAL_PERIOD_UID; } } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MediaLoadData.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MediaLoadData.java new file mode 100644 index 0000000000..7ff7d234fa --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MediaLoadData.java @@ -0,0 +1,100 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source; + +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.C.DataType; +import com.google.android.exoplayer2.C.SelectionReason; +import com.google.android.exoplayer2.C.TrackType; +import com.google.android.exoplayer2.Format; + +/** Descriptor for data being loaded or selected by a {@link MediaSource}. */ +public final class MediaLoadData { + + /** The {@link DataType data type}. */ + public final @DataType int dataType; + /** + * One of the {@link TrackType track types}, which is a media track type if the data corresponds + * to media of a specific type, or {@link C#TRACK_TYPE_UNKNOWN} otherwise. + */ + public final @TrackType int trackType; + /** + * The format of the track to which the data belongs. Null if the data does not belong to a + * specific track. + */ + @Nullable public final Format trackFormat; + /** + * One of the {@link SelectionReason selection reasons} if the data belongs to a track. {@link + * C#SELECTION_REASON_UNKNOWN} otherwise. + */ + public final @C.SelectionReason int trackSelectionReason; + /** + * Optional data associated with the selection of the track to which the data belongs. Null if the + * data does not belong to a track. + */ + @Nullable public final Object trackSelectionData; + /** + * The start time of the media in the {@link MediaPeriod}, or {@link C#TIME_UNSET} if the data + * does not belong to a specific {@link MediaPeriod}. + */ + public final long mediaStartTimeMs; + /** + * The end time of the media in the {@link MediaPeriod}, or {@link C#TIME_UNSET} if the data does + * not belong to a specific {@link MediaPeriod} or the end time is unknown. + */ + public final long mediaEndTimeMs; + + /** Creates an instance with the given {@link #dataType}. */ + public MediaLoadData(@DataType int dataType) { + this( + dataType, + /* trackType= */ C.TRACK_TYPE_UNKNOWN, + /* trackFormat= */ null, + /* trackSelectionReason= */ C.SELECTION_REASON_UNKNOWN, + /* trackSelectionData= */ null, + /* mediaStartTimeMs= */ C.TIME_UNSET, + /* mediaEndTimeMs= */ C.TIME_UNSET); + } + + /** + * Creates media load data. + * + * @param dataType See {@link #dataType}. + * @param trackType See {@link #trackType}. + * @param trackFormat See {@link #trackFormat}. + * @param trackSelectionReason See {@link #trackSelectionReason}. + * @param trackSelectionData See {@link #trackSelectionData}. + * @param mediaStartTimeMs See {@link #mediaStartTimeMs}. + * @param mediaEndTimeMs See {@link #mediaEndTimeMs}. + */ + public MediaLoadData( + @DataType int dataType, + @TrackType int trackType, + @Nullable Format trackFormat, + @SelectionReason int trackSelectionReason, + @Nullable Object trackSelectionData, + long mediaStartTimeMs, + long mediaEndTimeMs) { + this.dataType = dataType; + this.trackType = trackType; + this.trackFormat = trackFormat; + this.trackSelectionReason = trackSelectionReason; + this.trackSelectionData = trackSelectionData; + this.mediaStartTimeMs = mediaStartTimeMs; + this.mediaEndTimeMs = mediaEndTimeMs; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MediaParserExtractorAdapter.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MediaParserExtractorAdapter.java new file mode 100644 index 0000000000..51926cdf43 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MediaParserExtractorAdapter.java @@ -0,0 +1,133 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source; + +import static com.google.android.exoplayer2.source.mediaparser.MediaParserUtil.PARAMETER_EAGERLY_EXPOSE_TRACK_TYPE; +import static com.google.android.exoplayer2.source.mediaparser.MediaParserUtil.PARAMETER_INCLUDE_SUPPLEMENTAL_DATA; +import static com.google.android.exoplayer2.source.mediaparser.MediaParserUtil.PARAMETER_IN_BAND_CRYPTO_INFO; + +import android.annotation.SuppressLint; +import android.media.MediaParser; +import android.media.MediaParser.SeekPoint; +import android.net.Uri; +import android.util.Pair; +import androidx.annotation.RequiresApi; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.analytics.PlayerId; +import com.google.android.exoplayer2.extractor.Extractor; +import com.google.android.exoplayer2.extractor.ExtractorOutput; +import com.google.android.exoplayer2.extractor.PositionHolder; +import com.google.android.exoplayer2.source.mediaparser.InputReaderAdapterV30; +import com.google.android.exoplayer2.source.mediaparser.MediaParserUtil; +import com.google.android.exoplayer2.source.mediaparser.OutputConsumerAdapterV30; +import com.google.android.exoplayer2.upstream.DataReader; +import com.google.android.exoplayer2.util.Util; +import java.io.IOException; +import java.util.List; +import java.util.Map; + +/** {@link ProgressiveMediaExtractor} implemented on top of the platform's {@link MediaParser}. */ +@RequiresApi(30) +public final class MediaParserExtractorAdapter implements ProgressiveMediaExtractor { + + /** + * A {@link ProgressiveMediaExtractor.Factory} for instances of this class, which rely on platform + * extractors through {@link MediaParser}. + */ + public static final ProgressiveMediaExtractor.Factory FACTORY = MediaParserExtractorAdapter::new; + + private final OutputConsumerAdapterV30 outputConsumerAdapter; + private final InputReaderAdapterV30 inputReaderAdapter; + private final MediaParser mediaParser; + private String parserName; + + @SuppressLint("WrongConstant") + public MediaParserExtractorAdapter(PlayerId playerId) { + // TODO: Add support for injecting the desired extractor list. + outputConsumerAdapter = new OutputConsumerAdapterV30(); + inputReaderAdapter = new InputReaderAdapterV30(); + mediaParser = MediaParser.create(outputConsumerAdapter); + mediaParser.setParameter(PARAMETER_EAGERLY_EXPOSE_TRACK_TYPE, true); + mediaParser.setParameter(PARAMETER_IN_BAND_CRYPTO_INFO, true); + mediaParser.setParameter(PARAMETER_INCLUDE_SUPPLEMENTAL_DATA, true); + parserName = MediaParser.PARSER_NAME_UNKNOWN; + if (Util.SDK_INT >= 31) { + MediaParserUtil.setLogSessionIdOnMediaParser(mediaParser, playerId); + } + } + + @Override + public void init( + DataReader dataReader, + Uri uri, + Map> responseHeaders, + long position, + long length, + ExtractorOutput output) + throws IOException { + outputConsumerAdapter.setExtractorOutput(output); + inputReaderAdapter.setDataReader(dataReader, length); + inputReaderAdapter.setCurrentPosition(position); + String currentParserName = mediaParser.getParserName(); + if (MediaParser.PARSER_NAME_UNKNOWN.equals(currentParserName)) { + // We need to sniff. + mediaParser.advance(inputReaderAdapter); + parserName = mediaParser.getParserName(); + outputConsumerAdapter.setSelectedParserName(parserName); + } else if (!currentParserName.equals(parserName)) { + // The parser was created by name. + parserName = mediaParser.getParserName(); + outputConsumerAdapter.setSelectedParserName(parserName); + } else { + // The parser implementation has already been selected. Do nothing. + } + } + + @Override + public void release() { + mediaParser.release(); + } + + @Override + public void disableSeekingOnMp3Streams() { + if (MediaParser.PARSER_NAME_MP3.equals(parserName)) { + outputConsumerAdapter.disableSeeking(); + } + } + + @Override + public long getCurrentInputPosition() { + return inputReaderAdapter.getPosition(); + } + + @Override + public void seek(long position, long seekTimeUs) { + inputReaderAdapter.setCurrentPosition(position); + Pair seekPoints = outputConsumerAdapter.getSeekPoints(seekTimeUs); + mediaParser.seek(seekPoints.second.position == position ? seekPoints.second : seekPoints.first); + } + + @Override + public int read(PositionHolder positionHolder) throws IOException { + boolean shouldContinue = mediaParser.advance(inputReaderAdapter); + positionHolder.position = inputReaderAdapter.getAndResetSeekPosition(); + return !shouldContinue + ? Extractor.RESULT_END_OF_INPUT + : positionHolder.position != C.POSITION_UNSET + ? Extractor.RESULT_SEEK + : Extractor.RESULT_CONTINUE; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MediaPeriod.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MediaPeriod.java index 2e2cf9caba..bcbf95a431 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MediaPeriod.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MediaPeriod.java @@ -21,7 +21,7 @@ import com.google.android.exoplayer2.Timeline; import com.google.android.exoplayer2.offline.StreamKey; import com.google.android.exoplayer2.source.MediaSource.MediaSourceCaller; -import com.google.android.exoplayer2.trackselection.TrackSelection; +import com.google.android.exoplayer2.trackselection.ExoTrackSelection; import java.io.IOException; import java.util.Collections; import java.util.List; @@ -29,21 +29,23 @@ /** * Loads media corresponding to a {@link Timeline.Period}, and allows that media to be read. All - * methods are called on the player's internal playback thread, as described in the - * {@link ExoPlayer} Javadoc. + * methods are called on the player's internal playback thread, as described in the {@link + * ExoPlayer} Javadoc. + * + *

      A {@link MediaPeriod} may only able to provide one {@link SampleStream} corresponding to a + * group at any given time, however this {@link SampleStream} may adapt between multiple tracks + * within the group. */ public interface MediaPeriod extends SequenceableLoader { - /** - * A callback to be notified of {@link MediaPeriod} events. - */ + /** A callback to be notified of {@link MediaPeriod} events. */ interface Callback extends SequenceableLoader.Callback { /** * Called when preparation completes. * *

      Called on the playback thread. After invoking this method, the {@link MediaPeriod} can - * expect for {@link #selectTracks(TrackSelection[], boolean[], SampleStream[], boolean[], + * expect for {@link #selectTracks(ExoTrackSelection[], boolean[], SampleStream[], boolean[], * long)} to be called with the initial track selection. * * @param mediaPeriod The prepared {@link MediaPeriod}. @@ -88,17 +90,17 @@ interface Callback extends SequenceableLoader.Callback { /** * Returns a list of {@link StreamKey StreamKeys} which allow to filter the media in this period - * to load only the parts needed to play the provided {@link TrackSelection TrackSelections}. + * to load only the parts needed to play the provided {@link ExoTrackSelection TrackSelections}. * *

      This method is only called after the period has been prepared. * - * @param trackSelections The {@link TrackSelection TrackSelections} describing the tracks for + * @param trackSelections The {@link ExoTrackSelection TrackSelections} describing the tracks for * which stream keys are requested. * @return The corresponding {@link StreamKey StreamKeys} for the selected tracks, or an empty * list if filtering is not possible and the entire media needs to be loaded to play the * selected tracks. */ - default List getStreamKeys(List trackSelections) { + default List getStreamKeys(List trackSelections) { return Collections.emptyList(); } @@ -113,8 +115,8 @@ default List getStreamKeys(List trackSelections) { * corresponding flag in {@code streamResetFlags} will be set to true. This flag will also be set * if a new sample stream is created. * - *

      Note that previously passed {@link TrackSelection TrackSelections} are no longer valid, and - * any references to them must be updated to point to the new selections. + *

      Note that previously passed {@link ExoTrackSelection TrackSelections} are no longer valid, + * and any references to them must be updated to point to the new selections. * *

      This method is only called after the period has been prepared. * @@ -133,7 +135,7 @@ default List getStreamKeys(List trackSelections) { * @return The actual position at which the tracks were enabled, in microseconds. */ long selectTracks( - @NullableType TrackSelection[] selections, + @NullableType ExoTrackSelection[] selections, boolean[] mayRetainStreamFlags, @NullableType SampleStream[] streams, boolean[] streamResetFlags, @@ -239,8 +241,8 @@ long selectTracks( * *

      This method is only called after the period has been prepared. * - *

      A period may choose to discard buffered media so that it can be re-buffered in a different - * quality. + *

      A period may choose to discard buffered media or cancel ongoing loads so that media can be + * re-buffered in a different quality. * * @param positionUs The current playback position in microseconds. If playback of this period has * not yet started, the value will be the starting position in this period minus the duration diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MediaPeriodId.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MediaPeriodId.java new file mode 100644 index 0000000000..8192486a1b --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MediaPeriodId.java @@ -0,0 +1,195 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source; + +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.Timeline; + +/** + * Identifies a specific playback of a {@link Timeline.Period}. + * + *

      A {@link Timeline.Period} can be played multiple times, for example if it is repeated. Each + * instances of this class identifies a specific playback of a {@link Timeline.Period}. + * + *

      In ExoPlayer's implementation, {@link MediaPeriodId} identifies a {@code MediaPeriod}. + */ +// TODO(b/172315872) Should be final, but subclassed in MediaSource for backward-compatibility. +public class MediaPeriodId { + + /** The unique id of the timeline period. */ + public final Object periodUid; + + /** + * If the media period is in an ad group, the index of the ad group in the period. {@link + * C#INDEX_UNSET} otherwise. + */ + public final int adGroupIndex; + + /** + * If the media period is in an ad group, the index of the ad in its ad group in the period. + * {@link C#INDEX_UNSET} otherwise. + */ + public final int adIndexInAdGroup; + + /** + * The sequence number of the window in the buffered sequence of windows this media period is part + * of. {@link C#INDEX_UNSET} if the media period id is not part of a buffered sequence of windows. + */ + public final long windowSequenceNumber; + + /** + * The index of the next ad group to which the media period's content is clipped, or {@link + * C#INDEX_UNSET} if there is no following ad group or if this media period is an ad. + */ + public final int nextAdGroupIndex; + + /** + * Creates a media period identifier for a period which is not part of a buffered sequence of + * windows. + * + * @param periodUid The unique id of the timeline period. + */ + public MediaPeriodId(Object periodUid) { + this(periodUid, /* windowSequenceNumber= */ C.INDEX_UNSET); + } + + /** + * Creates a media period identifier for the specified period in the timeline. + * + * @param periodUid The unique id of the timeline period. + * @param windowSequenceNumber The sequence number of the window in the buffered sequence of + * windows this media period is part of. + */ + public MediaPeriodId(Object periodUid, long windowSequenceNumber) { + this( + periodUid, + /* adGroupIndex= */ C.INDEX_UNSET, + /* adIndexInAdGroup= */ C.INDEX_UNSET, + windowSequenceNumber, + /* nextAdGroupIndex= */ C.INDEX_UNSET); + } + + /** + * Creates a media period identifier for the specified clipped period in the timeline. + * + * @param periodUid The unique id of the timeline period. + * @param windowSequenceNumber The sequence number of the window in the buffered sequence of + * windows this media period is part of. + * @param nextAdGroupIndex The index of the next ad group to which the media period's content is + * clipped. + */ + public MediaPeriodId(Object periodUid, long windowSequenceNumber, int nextAdGroupIndex) { + this( + periodUid, + /* adGroupIndex= */ C.INDEX_UNSET, + /* adIndexInAdGroup= */ C.INDEX_UNSET, + windowSequenceNumber, + nextAdGroupIndex); + } + + /** + * Creates a media period identifier that identifies an ad within an ad group at the specified + * timeline period. + * + * @param periodUid The unique id of the timeline period that contains the ad group. + * @param adGroupIndex The index of the ad group. + * @param adIndexInAdGroup The index of the ad in the ad group. + * @param windowSequenceNumber The sequence number of the window in the buffered sequence of + * windows this media period is part of. + */ + public MediaPeriodId( + Object periodUid, int adGroupIndex, int adIndexInAdGroup, long windowSequenceNumber) { + this( + periodUid, + adGroupIndex, + adIndexInAdGroup, + windowSequenceNumber, + /* nextAdGroupIndex= */ C.INDEX_UNSET); + } + + /** Copy constructor for inheritance. */ + // TODO(b/172315872) Delete when client have migrated from MediaSource.MediaPeriodId + protected MediaPeriodId(MediaPeriodId mediaPeriodId) { + this.periodUid = mediaPeriodId.periodUid; + this.adGroupIndex = mediaPeriodId.adGroupIndex; + this.adIndexInAdGroup = mediaPeriodId.adIndexInAdGroup; + this.windowSequenceNumber = mediaPeriodId.windowSequenceNumber; + this.nextAdGroupIndex = mediaPeriodId.nextAdGroupIndex; + } + + private MediaPeriodId( + Object periodUid, + int adGroupIndex, + int adIndexInAdGroup, + long windowSequenceNumber, + int nextAdGroupIndex) { + this.periodUid = periodUid; + this.adGroupIndex = adGroupIndex; + this.adIndexInAdGroup = adIndexInAdGroup; + this.windowSequenceNumber = windowSequenceNumber; + this.nextAdGroupIndex = nextAdGroupIndex; + } + + /** Returns a copy of this period identifier but with {@code newPeriodUid} as its period uid. */ + public MediaPeriodId copyWithPeriodUid(Object newPeriodUid) { + return periodUid.equals(newPeriodUid) + ? this + : new MediaPeriodId( + newPeriodUid, adGroupIndex, adIndexInAdGroup, windowSequenceNumber, nextAdGroupIndex); + } + + /** Returns a copy of this period identifier with a new {@code windowSequenceNumber}. */ + public MediaPeriodId copyWithWindowSequenceNumber(long windowSequenceNumber) { + return this.windowSequenceNumber == windowSequenceNumber + ? this + : new MediaPeriodId( + periodUid, adGroupIndex, adIndexInAdGroup, windowSequenceNumber, nextAdGroupIndex); + } + + /** Returns whether this period identifier identifies an ad in an ad group in a period. */ + public boolean isAd() { + return adGroupIndex != C.INDEX_UNSET; + } + + @Override + public boolean equals(@Nullable Object obj) { + if (this == obj) { + return true; + } + if (!(obj instanceof MediaPeriodId)) { + return false; + } + + MediaPeriodId periodId = (MediaPeriodId) obj; + return periodUid.equals(periodId.periodUid) + && adGroupIndex == periodId.adGroupIndex + && adIndexInAdGroup == periodId.adIndexInAdGroup + && windowSequenceNumber == periodId.windowSequenceNumber + && nextAdGroupIndex == periodId.nextAdGroupIndex; + } + + @Override + public int hashCode() { + int result = 17; + result = 31 * result + periodUid.hashCode(); + result = 31 * result + adGroupIndex; + result = 31 * result + adIndexInAdGroup; + result = 31 * result + (int) windowSequenceNumber; + result = 31 * result + nextAdGroupIndex; + return result; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MediaSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MediaSource.java index 5ee980d01f..2cc97c3836 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MediaSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MediaSource.java @@ -18,196 +18,150 @@ import android.os.Handler; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.ExoPlayer; +import com.google.android.exoplayer2.MediaItem; import com.google.android.exoplayer2.Timeline; +import com.google.android.exoplayer2.analytics.PlayerId; +import com.google.android.exoplayer2.drm.DrmSessionEventListener; +import com.google.android.exoplayer2.drm.DrmSessionManager; +import com.google.android.exoplayer2.drm.DrmSessionManagerProvider; import com.google.android.exoplayer2.upstream.Allocator; +import com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy; import com.google.android.exoplayer2.upstream.TransferListener; import java.io.IOException; /** - * Defines and provides media to be played by an {@link com.google.android.exoplayer2.ExoPlayer}. A - * MediaSource has two main responsibilities: + * Defines and provides media to be played by an {@link ExoPlayer}. A MediaSource has two main + * responsibilities: * *

        *
      • To provide the player with a {@link Timeline} defining the structure of its media, and to * provide a new timeline whenever the structure of the media changes. The MediaSource * provides these timelines by calling {@link MediaSourceCaller#onSourceInfoRefreshed} on the * {@link MediaSourceCaller}s passed to {@link #prepareSource(MediaSourceCaller, - * TransferListener)}. + * TransferListener, PlayerId)}. *
      • To provide {@link MediaPeriod} instances for the periods in its timeline. MediaPeriods are * obtained by calling {@link #createPeriod(MediaPeriodId, Allocator, long)}, and provide a * way for the player to load and read the media. *
      * * All methods are called on the player's internal playback thread, as described in the {@link - * com.google.android.exoplayer2.ExoPlayer} Javadoc. They should not be called directly from - * application code. Instances can be re-used, but only for one {@link - * com.google.android.exoplayer2.ExoPlayer} instance simultaneously. + * ExoPlayer} Javadoc. They should not be called directly from application code. Instances can be + * re-used, but only for one {@link ExoPlayer} instance simultaneously. */ public interface MediaSource { - /** A caller of media sources, which will be notified of source events. */ - interface MediaSourceCaller { + /** Factory for creating {@link MediaSource MediaSources} from {@link MediaItem MediaItems}. */ + interface Factory { /** - * Called when the {@link Timeline} has been refreshed. - * - *

      Called on the playback thread. - * - * @param source The {@link MediaSource} whose info has been refreshed. - * @param timeline The source's timeline. + * An instance that throws {@link UnsupportedOperationException} from {@link #createMediaSource} + * and {@link #getSupportedTypes()}. */ - void onSourceInfoRefreshed(MediaSource source, Timeline timeline); - } - - /** Identifier for a {@link MediaPeriod}. */ - final class MediaPeriodId { - - /** The unique id of the timeline period. */ - public final Object periodUid; + @SuppressWarnings("deprecation") + Factory UNSUPPORTED = MediaSourceFactory.UNSUPPORTED; /** - * If the media period is in an ad group, the index of the ad group in the period. - * {@link C#INDEX_UNSET} otherwise. + * Sets the {@link DrmSessionManagerProvider} used to obtain a {@link DrmSessionManager} for a + * {@link MediaItem}. + * + * @return This factory, for convenience. */ - public final int adGroupIndex; + Factory setDrmSessionManagerProvider(DrmSessionManagerProvider drmSessionManagerProvider); /** - * If the media period is in an ad group, the index of the ad in its ad group in the period. - * {@link C#INDEX_UNSET} otherwise. + * Sets an optional {@link LoadErrorHandlingPolicy}. + * + * @return This factory, for convenience. */ - public final int adIndexInAdGroup; + Factory setLoadErrorHandlingPolicy(LoadErrorHandlingPolicy loadErrorHandlingPolicy); /** - * The sequence number of the window in the buffered sequence of windows this media period is - * part of. {@link C#INDEX_UNSET} if the media period id is not part of a buffered sequence of - * windows. + * Returns the {@link C.ContentType content types} supported by media sources created by this + * factory. */ - public final long windowSequenceNumber; + @C.ContentType + int[] getSupportedTypes(); /** - * The index of the next ad group to which the media period's content is clipped, or {@link - * C#INDEX_UNSET} if there is no following ad group or if this media period is an ad. + * Creates a new {@link MediaSource} with the specified {@link MediaItem}. + * + * @param mediaItem The media item to play. + * @return The new {@link MediaSource media source}. */ - public final int nextAdGroupIndex; + MediaSource createMediaSource(MediaItem mediaItem); + } + + /** A caller of media sources, which will be notified of source events. */ + interface MediaSourceCaller { /** - * Creates a media period identifier for a dummy period which is not part of a buffered sequence - * of windows. + * Called when the {@link Timeline} has been refreshed. + * + *

      Called on the playback thread. * - * @param periodUid The unique id of the timeline period. + * @param source The {@link MediaSource} whose info has been refreshed. + * @param timeline The source's timeline. */ + void onSourceInfoRefreshed(MediaSource source, Timeline timeline); + } + + // TODO(b/172315872) Delete when all clients have been migrated to base class. + /** + * Identifier for a {@link MediaPeriod}. + * + *

      Extends for backward-compatibility {@link + * com.google.android.exoplayer2.source.MediaPeriodId}. + */ + final class MediaPeriodId extends com.google.android.exoplayer2.source.MediaPeriodId { + + /** See {@link com.google.android.exoplayer2.source.MediaPeriodId#MediaPeriodId(Object)}. */ public MediaPeriodId(Object periodUid) { - this(periodUid, /* windowSequenceNumber= */ C.INDEX_UNSET); + super(periodUid); } /** - * Creates a media period identifier for the specified period in the timeline. - * - * @param periodUid The unique id of the timeline period. - * @param windowSequenceNumber The sequence number of the window in the buffered sequence of - * windows this media period is part of. + * See {@link com.google.android.exoplayer2.source.MediaPeriodId#MediaPeriodId(Object, long)}. */ public MediaPeriodId(Object periodUid, long windowSequenceNumber) { - this( - periodUid, - /* adGroupIndex= */ C.INDEX_UNSET, - /* adIndexInAdGroup= */ C.INDEX_UNSET, - windowSequenceNumber, - /* nextAdGroupIndex= */ C.INDEX_UNSET); + super(periodUid, windowSequenceNumber); } /** - * Creates a media period identifier for the specified clipped period in the timeline. - * - * @param periodUid The unique id of the timeline period. - * @param windowSequenceNumber The sequence number of the window in the buffered sequence of - * windows this media period is part of. - * @param nextAdGroupIndex The index of the next ad group to which the media period's content is - * clipped. + * See {@link com.google.android.exoplayer2.source.MediaPeriodId#MediaPeriodId(Object, long, + * int)}. */ public MediaPeriodId(Object periodUid, long windowSequenceNumber, int nextAdGroupIndex) { - this( - periodUid, - /* adGroupIndex= */ C.INDEX_UNSET, - /* adIndexInAdGroup= */ C.INDEX_UNSET, - windowSequenceNumber, - nextAdGroupIndex); + super(periodUid, windowSequenceNumber, nextAdGroupIndex); } /** - * Creates a media period identifier that identifies an ad within an ad group at the specified - * timeline period. - * - * @param periodUid The unique id of the timeline period that contains the ad group. - * @param adGroupIndex The index of the ad group. - * @param adIndexInAdGroup The index of the ad in the ad group. - * @param windowSequenceNumber The sequence number of the window in the buffered sequence of - * windows this media period is part of. + * See {@link com.google.android.exoplayer2.source.MediaPeriodId#MediaPeriodId(Object, int, int, + * long)}. */ public MediaPeriodId( Object periodUid, int adGroupIndex, int adIndexInAdGroup, long windowSequenceNumber) { - this( - periodUid, - adGroupIndex, - adIndexInAdGroup, - windowSequenceNumber, - /* nextAdGroupIndex= */ C.INDEX_UNSET); + super(periodUid, adGroupIndex, adIndexInAdGroup, windowSequenceNumber); } - private MediaPeriodId( - Object periodUid, - int adGroupIndex, - int adIndexInAdGroup, - long windowSequenceNumber, - int nextAdGroupIndex) { - this.periodUid = periodUid; - this.adGroupIndex = adGroupIndex; - this.adIndexInAdGroup = adIndexInAdGroup; - this.windowSequenceNumber = windowSequenceNumber; - this.nextAdGroupIndex = nextAdGroupIndex; + /** Wraps an {@link com.google.android.exoplayer2.source.MediaPeriodId} into a MediaPeriodId. */ + public MediaPeriodId(com.google.android.exoplayer2.source.MediaPeriodId mediaPeriodId) { + super(mediaPeriodId); } - /** Returns a copy of this period identifier but with {@code newPeriodUid} as its period uid. */ + /** See {@link com.google.android.exoplayer2.source.MediaPeriodId#copyWithPeriodUid(Object)}. */ + @Override public MediaPeriodId copyWithPeriodUid(Object newPeriodUid) { - return periodUid.equals(newPeriodUid) - ? this - : new MediaPeriodId( - newPeriodUid, adGroupIndex, adIndexInAdGroup, windowSequenceNumber, nextAdGroupIndex); + return new MediaPeriodId(super.copyWithPeriodUid(newPeriodUid)); } /** - * Returns whether this period identifier identifies an ad in an ad group in a period. + * See {@link + * com.google.android.exoplayer2.source.MediaPeriodId#copyWithWindowSequenceNumber(long)}. */ - public boolean isAd() { - return adGroupIndex != C.INDEX_UNSET; - } - @Override - public boolean equals(@Nullable Object obj) { - if (this == obj) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - MediaPeriodId periodId = (MediaPeriodId) obj; - return periodUid.equals(periodId.periodUid) - && adGroupIndex == periodId.adGroupIndex - && adIndexInAdGroup == periodId.adIndexInAdGroup - && windowSequenceNumber == periodId.windowSequenceNumber - && nextAdGroupIndex == periodId.nextAdGroupIndex; - } - - @Override - public int hashCode() { - int result = 17; - result = 31 * result + periodUid.hashCode(); - result = 31 * result + adGroupIndex; - result = 31 * result + adIndexInAdGroup; - result = 31 * result + (int) windowSequenceNumber; - result = 31 * result + nextAdGroupIndex; - return result; + public MediaPeriodId copyWithWindowSequenceNumber(long windowSequenceNumber) { + return new MediaPeriodId(super.copyWithWindowSequenceNumber(windowSequenceNumber)); } } @@ -228,12 +182,63 @@ public int hashCode() { */ void removeEventListener(MediaSourceEventListener eventListener); - /** Returns the tag set on the media source, or null if none was set. */ + /** + * Adds a {@link DrmSessionEventListener} to the list of listeners which are notified of DRM + * events for this media source. + * + * @param handler A handler on the which listener events will be posted. + * @param eventListener The listener to be added. + */ + void addDrmEventListener(Handler handler, DrmSessionEventListener eventListener); + + /** + * Removes a {@link DrmSessionEventListener} from the list of listeners which are notified of DRM + * events for this media source. + * + * @param eventListener The listener to be removed. + */ + void removeDrmEventListener(DrmSessionEventListener eventListener); + + /** + * Returns the initial placeholder timeline that is returned immediately when the real timeline is + * not yet known, or null to let the player create an initial timeline. + * + *

      The initial timeline must use the same uids for windows and periods that the real timeline + * will use. It also must provide windows which are marked as dynamic to indicate that the window + * is expected to change when the real timeline arrives. + * + *

      Any media source which has multiple windows should typically provide such an initial + * timeline to make sure the player reports the correct number of windows immediately. + */ @Nullable - default Object getTag() { + default Timeline getInitialTimeline() { return null; } + /** + * Returns true if the media source is guaranteed to never have zero or more than one window. + * + *

      The default implementation returns {@code true}. + * + * @return true if the source has exactly one window. + */ + default boolean isSingleWindow() { + return true; + } + + /** Returns the {@link MediaItem} whose media is provided by the source. */ + MediaItem getMediaItem(); + + /** + * @deprecated Implement {@link #prepareSource(MediaSourceCaller, TransferListener, PlayerId)} + * instead. + */ + @Deprecated + default void prepareSource( + MediaSourceCaller caller, @Nullable TransferListener mediaTransferListener) { + prepareSource(caller, mediaTransferListener, PlayerId.UNSET); + } + /** * Registers a {@link MediaSourceCaller}. Starts source preparation if needed and enables the * source for the creation of {@link MediaPeriod MediaPerods}. @@ -251,15 +256,20 @@ default Object getTag() { * transfers. May be null if no listener is available. Note that this listener should be only * informed of transfers related to the media loads and not of auxiliary loads for manifests * and other data. + * @param playerId The {@link PlayerId} of the player using this media source. */ - void prepareSource(MediaSourceCaller caller, @Nullable TransferListener mediaTransferListener); + void prepareSource( + MediaSourceCaller caller, + @Nullable TransferListener mediaTransferListener, + PlayerId playerId); /** * Throws any pending error encountered while loading or refreshing source information. * *

      Should not be called directly from application code. * - *

      Must only be called after {@link #prepareSource(MediaSourceCaller, TransferListener)}. + *

      Must only be called after {@link #prepareSource(MediaSourceCaller, TransferListener, + * PlayerId)}. */ void maybeThrowSourceInfoRefreshError() throws IOException; @@ -268,7 +278,8 @@ default Object getTag() { * *

      Should not be called directly from application code. * - *

      Must only be called after {@link #prepareSource(MediaSourceCaller, TransferListener)}. + *

      Must only be called after {@link #prepareSource(MediaSourceCaller, TransferListener, + * PlayerId)}. * * @param caller The {@link MediaSourceCaller} enabling the source. */ diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MediaSourceEventListener.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MediaSourceEventListener.java index 9e6f4f9cf1..d1bcf9380c 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MediaSourceEventListener.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MediaSourceEventListener.java @@ -15,165 +15,24 @@ */ package com.google.android.exoplayer2.source; -import android.net.Uri; +import static com.google.android.exoplayer2.util.Util.postOrRun; + import android.os.Handler; -import android.os.Looper; -import android.os.SystemClock; import androidx.annotation.CheckResult; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.C.DataType; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.Player; import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId; -import com.google.android.exoplayer2.upstream.DataSpec; import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.Util; import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Map; import java.util.concurrent.CopyOnWriteArrayList; /** Interface for callbacks to be notified of {@link MediaSource} events. */ public interface MediaSourceEventListener { - /** Media source load event information. */ - final class LoadEventInfo { - - /** Defines the requested data. */ - public final DataSpec dataSpec; - /** - * The {@link Uri} from which data is being read. The uri will be identical to the one in {@link - * #dataSpec}.uri unless redirection has occurred. If redirection has occurred, this is the uri - * after redirection. - */ - public final Uri uri; - /** The response headers associated with the load, or an empty map if unavailable. */ - public final Map> responseHeaders; - /** The value of {@link SystemClock#elapsedRealtime} at the time of the load event. */ - public final long elapsedRealtimeMs; - /** The duration of the load up to the event time. */ - public final long loadDurationMs; - /** The number of bytes that were loaded up to the event time. */ - public final long bytesLoaded; - - /** - * Creates load event info. - * - * @param dataSpec Defines the requested data. - * @param uri The {@link Uri} from which data is being read. The uri must be identical to the - * one in {@code dataSpec.uri} unless redirection has occurred. If redirection has occurred, - * this is the uri after redirection. - * @param responseHeaders The response headers associated with the load, or an empty map if - * unavailable. - * @param elapsedRealtimeMs The value of {@link SystemClock#elapsedRealtime} at the time of the - * load event. - * @param loadDurationMs The duration of the load up to the event time. - * @param bytesLoaded The number of bytes that were loaded up to the event time. For compressed - * network responses, this is the decompressed size. - */ - public LoadEventInfo( - DataSpec dataSpec, - Uri uri, - Map> responseHeaders, - long elapsedRealtimeMs, - long loadDurationMs, - long bytesLoaded) { - this.dataSpec = dataSpec; - this.uri = uri; - this.responseHeaders = responseHeaders; - this.elapsedRealtimeMs = elapsedRealtimeMs; - this.loadDurationMs = loadDurationMs; - this.bytesLoaded = bytesLoaded; - } - } - - /** Descriptor for data being loaded or selected by a media source. */ - final class MediaLoadData { - - /** One of the {@link C} {@code DATA_TYPE_*} constants defining the type of data. */ - public final int dataType; - /** - * One of the {@link C} {@code TRACK_TYPE_*} constants if the data corresponds to media of a - * specific type. {@link C#TRACK_TYPE_UNKNOWN} otherwise. - */ - public final int trackType; - /** - * The format of the track to which the data belongs. Null if the data does not belong to a - * specific track. - */ - @Nullable public final Format trackFormat; - /** - * One of the {@link C} {@code SELECTION_REASON_*} constants if the data belongs to a track. - * {@link C#SELECTION_REASON_UNKNOWN} otherwise. - */ - public final int trackSelectionReason; - /** - * Optional data associated with the selection of the track to which the data belongs. Null if - * the data does not belong to a track. - */ - @Nullable public final Object trackSelectionData; - /** - * The start time of the media, or {@link C#TIME_UNSET} if the data does not belong to a - * specific media period. - */ - public final long mediaStartTimeMs; - /** - * The end time of the media, or {@link C#TIME_UNSET} if the data does not belong to a specific - * media period or the end time is unknown. - */ - public final long mediaEndTimeMs; - - /** - * Creates media load data. - * - * @param dataType One of the {@link C} {@code DATA_TYPE_*} constants defining the type of data. - * @param trackType One of the {@link C} {@code TRACK_TYPE_*} constants if the data corresponds - * to media of a specific type. {@link C#TRACK_TYPE_UNKNOWN} otherwise. - * @param trackFormat The format of the track to which the data belongs. Null if the data does - * not belong to a track. - * @param trackSelectionReason One of the {@link C} {@code SELECTION_REASON_*} constants if the - * data belongs to a track. {@link C#SELECTION_REASON_UNKNOWN} otherwise. - * @param trackSelectionData Optional data associated with the selection of the track to which - * the data belongs. Null if the data does not belong to a track. - * @param mediaStartTimeMs The start time of the media, or {@link C#TIME_UNSET} if the data does - * not belong to a specific media period. - * @param mediaEndTimeMs The end time of the media, or {@link C#TIME_UNSET} if the data does not - * belong to a specific media period or the end time is unknown. - */ - public MediaLoadData( - int dataType, - int trackType, - @Nullable Format trackFormat, - int trackSelectionReason, - @Nullable Object trackSelectionData, - long mediaStartTimeMs, - long mediaEndTimeMs) { - this.dataType = dataType; - this.trackType = trackType; - this.trackFormat = trackFormat; - this.trackSelectionReason = trackSelectionReason; - this.trackSelectionData = trackSelectionData; - this.mediaStartTimeMs = mediaStartTimeMs; - this.mediaEndTimeMs = mediaEndTimeMs; - } - } - - /** - * Called when a media period is created by the media source. - * - * @param windowIndex The window index in the timeline this media period belongs to. - * @param mediaPeriodId The {@link MediaPeriodId} of the created media period. - */ - default void onMediaPeriodCreated(int windowIndex, MediaPeriodId mediaPeriodId) {} - - /** - * Called when a media period is released by the media source. - * - * @param windowIndex The window index in the timeline this media period belongs to. - * @param mediaPeriodId The {@link MediaPeriodId} of the released media period. - */ - default void onMediaPeriodReleased(int windowIndex, MediaPeriodId mediaPeriodId) {} - /** * Called when a load begins. * @@ -235,11 +94,11 @@ default void onLoadCanceled( * not be called in addition to this method. * *

      This method being called does not indicate that playback has failed, or that it will fail. - * The player may be able to recover from the error and continue. Hence applications should - * not implement this method to display a user visible error or initiate an application - * level retry ({@link Player.EventListener#onPlayerError} is the appropriate place to implement - * such behavior). This method is called to provide the application with an opportunity to log the - * error if it wishes to do so. + * The player may be able to recover from the error. Hence applications should not + * implement this method to display a user visible error or initiate an application level retry. + * {@link Player.Listener#onPlayerError} is the appropriate place to implement such behavior. This + * method is called to provide the application with an opportunity to log the error if it wishes + * to do so. * * @param windowIndex The window index in the timeline of the media source this load belongs to. * @param mediaPeriodId The {@link MediaPeriodId} this load belongs to. Null if the load does not @@ -260,14 +119,6 @@ default void onLoadError( IOException error, boolean wasCanceled) {} - /** - * Called when a media period is first being read from. - * - * @param windowIndex The window index in the timeline this media period belongs to. - * @param mediaPeriodId The {@link MediaPeriodId} of the media period being read from. - */ - default void onReadingStarted(int windowIndex, MediaPeriodId mediaPeriodId) {} - /** * Called when data is removed from the back of a media buffer, typically so that it can be * re-buffered in a different format. @@ -290,8 +141,8 @@ default void onUpstreamDiscarded( default void onDownstreamFormatChanged( int windowIndex, @Nullable MediaPeriodId mediaPeriodId, MediaLoadData mediaLoadData) {} - /** Dispatches events to {@link MediaSourceEventListener}s. */ - final class EventDispatcher { + /** Dispatches events to {@link MediaSourceEventListener MediaSourceEventListeners}. */ + class EventDispatcher { /** The timeline window index reported with the events. */ public final int windowIndex; @@ -344,7 +195,8 @@ public EventDispatcher withParameters( * @param eventListener The listener to be added. */ public void addEventListener(Handler handler, MediaSourceEventListener eventListener) { - Assertions.checkArgument(handler != null && eventListener != null); + Assertions.checkNotNull(handler); + Assertions.checkNotNull(eventListener); listenerAndHandlers.add(new ListenerAndHandler(handler, eventListener)); } @@ -361,61 +213,31 @@ public void removeEventListener(MediaSourceEventListener eventListener) { } } - /** Dispatches {@link #onMediaPeriodCreated(int, MediaPeriodId)}. */ - public void mediaPeriodCreated() { - MediaPeriodId mediaPeriodId = Assertions.checkNotNull(this.mediaPeriodId); - for (ListenerAndHandler listenerAndHandler : listenerAndHandlers) { - final MediaSourceEventListener listener = listenerAndHandler.listener; - postOrRun( - listenerAndHandler.handler, - () -> listener.onMediaPeriodCreated(windowIndex, mediaPeriodId)); - } - } - - /** Dispatches {@link #onMediaPeriodReleased(int, MediaPeriodId)}. */ - public void mediaPeriodReleased() { - MediaPeriodId mediaPeriodId = Assertions.checkNotNull(this.mediaPeriodId); - for (ListenerAndHandler listenerAndHandler : listenerAndHandlers) { - final MediaSourceEventListener listener = listenerAndHandler.listener; - postOrRun( - listenerAndHandler.handler, - () -> listener.onMediaPeriodReleased(windowIndex, mediaPeriodId)); - } - } - /** Dispatches {@link #onLoadStarted(int, MediaPeriodId, LoadEventInfo, MediaLoadData)}. */ - public void loadStarted(DataSpec dataSpec, int dataType, long elapsedRealtimeMs) { + public void loadStarted(LoadEventInfo loadEventInfo, @DataType int dataType) { loadStarted( - dataSpec, + loadEventInfo, dataType, - C.TRACK_TYPE_UNKNOWN, - null, - C.SELECTION_REASON_UNKNOWN, - null, - C.TIME_UNSET, - C.TIME_UNSET, - elapsedRealtimeMs); + /* trackType= */ C.TRACK_TYPE_UNKNOWN, + /* trackFormat= */ null, + /* trackSelectionReason= */ C.SELECTION_REASON_UNKNOWN, + /* trackSelectionData= */ null, + /* mediaStartTimeUs= */ C.TIME_UNSET, + /* mediaEndTimeUs= */ C.TIME_UNSET); } /** Dispatches {@link #onLoadStarted(int, MediaPeriodId, LoadEventInfo, MediaLoadData)}. */ public void loadStarted( - DataSpec dataSpec, - int dataType, - int trackType, + LoadEventInfo loadEventInfo, + @DataType int dataType, + @C.TrackType int trackType, @Nullable Format trackFormat, - int trackSelectionReason, + @C.SelectionReason int trackSelectionReason, @Nullable Object trackSelectionData, long mediaStartTimeUs, - long mediaEndTimeUs, - long elapsedRealtimeMs) { + long mediaEndTimeUs) { loadStarted( - new LoadEventInfo( - dataSpec, - dataSpec.uri, - /* responseHeaders= */ Collections.emptyMap(), - elapsedRealtimeMs, - /* loadDurationMs= */ 0, - /* bytesLoaded= */ 0), + loadEventInfo, new MediaLoadData( dataType, trackType, @@ -429,7 +251,7 @@ public void loadStarted( /** Dispatches {@link #onLoadStarted(int, MediaPeriodId, LoadEventInfo, MediaLoadData)}. */ public void loadStarted(LoadEventInfo loadEventInfo, MediaLoadData mediaLoadData) { for (ListenerAndHandler listenerAndHandler : listenerAndHandlers) { - final MediaSourceEventListener listener = listenerAndHandler.listener; + MediaSourceEventListener listener = listenerAndHandler.listener; postOrRun( listenerAndHandler.handler, () -> listener.onLoadStarted(windowIndex, mediaPeriodId, loadEventInfo, mediaLoadData)); @@ -437,48 +259,30 @@ public void loadStarted(LoadEventInfo loadEventInfo, MediaLoadData mediaLoadData } /** Dispatches {@link #onLoadCompleted(int, MediaPeriodId, LoadEventInfo, MediaLoadData)}. */ - public void loadCompleted( - DataSpec dataSpec, - Uri uri, - Map> responseHeaders, - int dataType, - long elapsedRealtimeMs, - long loadDurationMs, - long bytesLoaded) { + public void loadCompleted(LoadEventInfo loadEventInfo, @DataType int dataType) { loadCompleted( - dataSpec, - uri, - responseHeaders, + loadEventInfo, dataType, - C.TRACK_TYPE_UNKNOWN, - null, - C.SELECTION_REASON_UNKNOWN, - null, - C.TIME_UNSET, - C.TIME_UNSET, - elapsedRealtimeMs, - loadDurationMs, - bytesLoaded); + /* trackType= */ C.TRACK_TYPE_UNKNOWN, + /* trackFormat= */ null, + /* trackSelectionReason= */ C.SELECTION_REASON_UNKNOWN, + /* trackSelectionData= */ null, + /* mediaStartTimeUs= */ C.TIME_UNSET, + /* mediaEndTimeUs= */ C.TIME_UNSET); } /** Dispatches {@link #onLoadCompleted(int, MediaPeriodId, LoadEventInfo, MediaLoadData)}. */ public void loadCompleted( - DataSpec dataSpec, - Uri uri, - Map> responseHeaders, - int dataType, - int trackType, + LoadEventInfo loadEventInfo, + @DataType int dataType, + @C.TrackType int trackType, @Nullable Format trackFormat, - int trackSelectionReason, + @C.SelectionReason int trackSelectionReason, @Nullable Object trackSelectionData, long mediaStartTimeUs, - long mediaEndTimeUs, - long elapsedRealtimeMs, - long loadDurationMs, - long bytesLoaded) { + long mediaEndTimeUs) { loadCompleted( - new LoadEventInfo( - dataSpec, uri, responseHeaders, elapsedRealtimeMs, loadDurationMs, bytesLoaded), + loadEventInfo, new MediaLoadData( dataType, trackType, @@ -492,7 +296,7 @@ public void loadCompleted( /** Dispatches {@link #onLoadCompleted(int, MediaPeriodId, LoadEventInfo, MediaLoadData)}. */ public void loadCompleted(LoadEventInfo loadEventInfo, MediaLoadData mediaLoadData) { for (ListenerAndHandler listenerAndHandler : listenerAndHandlers) { - final MediaSourceEventListener listener = listenerAndHandler.listener; + MediaSourceEventListener listener = listenerAndHandler.listener; postOrRun( listenerAndHandler.handler, () -> @@ -501,48 +305,30 @@ public void loadCompleted(LoadEventInfo loadEventInfo, MediaLoadData mediaLoadDa } /** Dispatches {@link #onLoadCanceled(int, MediaPeriodId, LoadEventInfo, MediaLoadData)}. */ - public void loadCanceled( - DataSpec dataSpec, - Uri uri, - Map> responseHeaders, - int dataType, - long elapsedRealtimeMs, - long loadDurationMs, - long bytesLoaded) { + public void loadCanceled(LoadEventInfo loadEventInfo, @DataType int dataType) { loadCanceled( - dataSpec, - uri, - responseHeaders, + loadEventInfo, dataType, - C.TRACK_TYPE_UNKNOWN, - null, - C.SELECTION_REASON_UNKNOWN, - null, - C.TIME_UNSET, - C.TIME_UNSET, - elapsedRealtimeMs, - loadDurationMs, - bytesLoaded); + /* trackType= */ C.TRACK_TYPE_UNKNOWN, + /* trackFormat= */ null, + /* trackSelectionReason= */ C.SELECTION_REASON_UNKNOWN, + /* trackSelectionData= */ null, + /* mediaStartTimeUs= */ C.TIME_UNSET, + /* mediaEndTimeUs= */ C.TIME_UNSET); } /** Dispatches {@link #onLoadCanceled(int, MediaPeriodId, LoadEventInfo, MediaLoadData)}. */ public void loadCanceled( - DataSpec dataSpec, - Uri uri, - Map> responseHeaders, - int dataType, - int trackType, + LoadEventInfo loadEventInfo, + @DataType int dataType, + @C.TrackType int trackType, @Nullable Format trackFormat, - int trackSelectionReason, + @C.SelectionReason int trackSelectionReason, @Nullable Object trackSelectionData, long mediaStartTimeUs, - long mediaEndTimeUs, - long elapsedRealtimeMs, - long loadDurationMs, - long bytesLoaded) { + long mediaEndTimeUs) { loadCanceled( - new LoadEventInfo( - dataSpec, uri, responseHeaders, elapsedRealtimeMs, loadDurationMs, bytesLoaded), + loadEventInfo, new MediaLoadData( dataType, trackType, @@ -569,29 +355,19 @@ public void loadCanceled(LoadEventInfo loadEventInfo, MediaLoadData mediaLoadDat * boolean)}. */ public void loadError( - DataSpec dataSpec, - Uri uri, - Map> responseHeaders, - int dataType, - long elapsedRealtimeMs, - long loadDurationMs, - long bytesLoaded, + LoadEventInfo loadEventInfo, + @DataType int dataType, IOException error, boolean wasCanceled) { loadError( - dataSpec, - uri, - responseHeaders, + loadEventInfo, dataType, - C.TRACK_TYPE_UNKNOWN, - null, - C.SELECTION_REASON_UNKNOWN, - null, - C.TIME_UNSET, - C.TIME_UNSET, - elapsedRealtimeMs, - loadDurationMs, - bytesLoaded, + /* trackType= */ C.TRACK_TYPE_UNKNOWN, + /* trackFormat= */ null, + /* trackSelectionReason= */ C.SELECTION_REASON_UNKNOWN, + /* trackSelectionData= */ null, + /* mediaStartTimeUs= */ C.TIME_UNSET, + /* mediaEndTimeUs= */ C.TIME_UNSET, error, wasCanceled); } @@ -601,24 +377,18 @@ public void loadError( * boolean)}. */ public void loadError( - DataSpec dataSpec, - Uri uri, - Map> responseHeaders, - int dataType, - int trackType, + LoadEventInfo loadEventInfo, + @DataType int dataType, + @C.TrackType int trackType, @Nullable Format trackFormat, - int trackSelectionReason, + @C.SelectionReason int trackSelectionReason, @Nullable Object trackSelectionData, long mediaStartTimeUs, long mediaEndTimeUs, - long elapsedRealtimeMs, - long loadDurationMs, - long bytesLoaded, IOException error, boolean wasCanceled) { loadError( - new LoadEventInfo( - dataSpec, uri, responseHeaders, elapsedRealtimeMs, loadDurationMs, bytesLoaded), + loadEventInfo, new MediaLoadData( dataType, trackType, @@ -641,7 +411,7 @@ public void loadError( IOException error, boolean wasCanceled) { for (ListenerAndHandler listenerAndHandler : listenerAndHandlers) { - final MediaSourceEventListener listener = listenerAndHandler.listener; + MediaSourceEventListener listener = listenerAndHandler.listener; postOrRun( listenerAndHandler.handler, () -> @@ -650,17 +420,6 @@ public void loadError( } } - /** Dispatches {@link #onReadingStarted(int, MediaPeriodId)}. */ - public void readingStarted() { - MediaPeriodId mediaPeriodId = Assertions.checkNotNull(this.mediaPeriodId); - for (ListenerAndHandler listenerAndHandler : listenerAndHandlers) { - final MediaSourceEventListener listener = listenerAndHandler.listener; - postOrRun( - listenerAndHandler.handler, - () -> listener.onReadingStarted(windowIndex, mediaPeriodId)); - } - } - /** Dispatches {@link #onUpstreamDiscarded(int, MediaPeriodId, MediaLoadData)}. */ public void upstreamDiscarded(int trackType, long mediaStartTimeUs, long mediaEndTimeUs) { upstreamDiscarded( @@ -678,7 +437,7 @@ public void upstreamDiscarded(int trackType, long mediaStartTimeUs, long mediaEn public void upstreamDiscarded(MediaLoadData mediaLoadData) { MediaPeriodId mediaPeriodId = Assertions.checkNotNull(this.mediaPeriodId); for (ListenerAndHandler listenerAndHandler : listenerAndHandlers) { - final MediaSourceEventListener listener = listenerAndHandler.listener; + MediaSourceEventListener listener = listenerAndHandler.listener; postOrRun( listenerAndHandler.handler, () -> listener.onUpstreamDiscarded(windowIndex, mediaPeriodId, mediaLoadData)); @@ -687,9 +446,9 @@ public void upstreamDiscarded(MediaLoadData mediaLoadData) { /** Dispatches {@link #onDownstreamFormatChanged(int, MediaPeriodId, MediaLoadData)}. */ public void downstreamFormatChanged( - int trackType, + @C.TrackType int trackType, @Nullable Format trackFormat, - int trackSelectionReason, + @C.SelectionReason int trackSelectionReason, @Nullable Object trackSelectionData, long mediaTimeUs) { downstreamFormatChanged( @@ -706,7 +465,7 @@ public void downstreamFormatChanged( /** Dispatches {@link #onDownstreamFormatChanged(int, MediaPeriodId, MediaLoadData)}. */ public void downstreamFormatChanged(MediaLoadData mediaLoadData) { for (ListenerAndHandler listenerAndHandler : listenerAndHandlers) { - final MediaSourceEventListener listener = listenerAndHandler.listener; + MediaSourceEventListener listener = listenerAndHandler.listener; postOrRun( listenerAndHandler.handler, () -> listener.onDownstreamFormatChanged(windowIndex, mediaPeriodId, mediaLoadData)); @@ -714,22 +473,14 @@ public void downstreamFormatChanged(MediaLoadData mediaLoadData) { } private long adjustMediaTime(long mediaTimeUs) { - long mediaTimeMs = C.usToMs(mediaTimeUs); + long mediaTimeMs = Util.usToMs(mediaTimeUs); return mediaTimeMs == C.TIME_UNSET ? C.TIME_UNSET : mediaTimeOffsetMs + mediaTimeMs; } - private void postOrRun(Handler handler, Runnable runnable) { - if (handler.getLooper() == Looper.myLooper()) { - runnable.run(); - } else { - handler.post(runnable); - } - } - private static final class ListenerAndHandler { - public final Handler handler; - public final MediaSourceEventListener listener; + public Handler handler; + public MediaSourceEventListener listener; public ListenerAndHandler(Handler handler, MediaSourceEventListener listener) { this.handler = handler; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MediaSourceFactory.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MediaSourceFactory.java index 201f241d59..dec7c92c39 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MediaSourceFactory.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MediaSourceFactory.java @@ -15,48 +15,44 @@ */ package com.google.android.exoplayer2.source; -import android.net.Uri; +import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; -import com.google.android.exoplayer2.drm.DrmSession; -import com.google.android.exoplayer2.drm.DrmSessionManager; -import com.google.android.exoplayer2.offline.StreamKey; -import java.util.List; +import com.google.android.exoplayer2.MediaItem; +import com.google.android.exoplayer2.drm.DrmSessionManagerProvider; +import com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy; -/** Factory for creating {@link MediaSource}s from URIs. */ -public interface MediaSourceFactory { +/** + * @deprecated Use {@link MediaSource.Factory}. + */ +@Deprecated +public interface MediaSourceFactory extends MediaSource.Factory { /** - * Sets a list of {@link StreamKey StreamKeys} by which the manifest is filtered. - * - * @param streamKeys A list of {@link StreamKey StreamKeys}. - * @return This factory, for convenience. - * @throws IllegalStateException If {@link #createMediaSource(Uri)} has already been called. + * An instance that throws {@link UnsupportedOperationException} from {@link #createMediaSource} + * and {@link #getSupportedTypes()}. */ - default MediaSourceFactory setStreamKeys(List streamKeys) { - return this; - } + MediaSourceFactory UNSUPPORTED = + new MediaSourceFactory() { + @Override + public MediaSourceFactory setDrmSessionManagerProvider( + @Nullable DrmSessionManagerProvider drmSessionManagerProvider) { + return this; + } - /** - * Sets the {@link DrmSessionManager} to use for acquiring {@link DrmSession DrmSessions}. - * - * @param drmSessionManager The {@link DrmSessionManager}. - * @return This factory, for convenience. - * @throws IllegalStateException If one of the {@code create} methods has already been called. - */ - MediaSourceFactory setDrmSessionManager(DrmSessionManager drmSessionManager); + @Override + public MediaSourceFactory setLoadErrorHandlingPolicy( + @Nullable LoadErrorHandlingPolicy loadErrorHandlingPolicy) { + return this; + } - /** - * Creates a new {@link MediaSource} with the specified {@code uri}. - * - * @param uri The URI to play. - * @return The new {@link MediaSource media source}. - */ - MediaSource createMediaSource(Uri uri); + @Override + public @C.ContentType int[] getSupportedTypes() { + throw new UnsupportedOperationException(); + } - /** - * Returns the {@link C.ContentType content types} supported by media sources created by this - * factory. - */ - @C.ContentType - int[] getSupportedTypes(); + @Override + public MediaSource createMediaSource(MediaItem mediaItem) { + throw new UnsupportedOperationException(); + } + }; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MergingMediaPeriod.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MergingMediaPeriod.java index afa25d6fce..8f056fc094 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MergingMediaPeriod.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MergingMediaPeriod.java @@ -15,42 +15,72 @@ */ package com.google.android.exoplayer2.source; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static java.lang.Math.max; + import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.FormatHolder; import com.google.android.exoplayer2.SeekParameters; -import com.google.android.exoplayer2.trackselection.TrackSelection; +import com.google.android.exoplayer2.decoder.DecoderInputBuffer; +import com.google.android.exoplayer2.offline.StreamKey; +import com.google.android.exoplayer2.source.chunk.Chunk; +import com.google.android.exoplayer2.source.chunk.MediaChunk; +import com.google.android.exoplayer2.source.chunk.MediaChunkIterator; +import com.google.android.exoplayer2.trackselection.ExoTrackSelection; import com.google.android.exoplayer2.util.Assertions; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.IdentityHashMap; +import java.util.List; import org.checkerframework.checker.nullness.compatqual.NullableType; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; -/** - * Merges multiple {@link MediaPeriod}s. - */ +/** Merges multiple {@link MediaPeriod}s. */ /* package */ final class MergingMediaPeriod implements MediaPeriod, MediaPeriod.Callback { - public final MediaPeriod[] periods; - + private final MediaPeriod[] periods; private final IdentityHashMap streamPeriodIndices; private final CompositeSequenceableLoaderFactory compositeSequenceableLoaderFactory; private final ArrayList childrenPendingPreparation; + private final HashMap childTrackGroupByMergedTrackGroup; @Nullable private Callback callback; @Nullable private TrackGroupArray trackGroups; private MediaPeriod[] enabledPeriods; private SequenceableLoader compositeSequenceableLoader; - public MergingMediaPeriod(CompositeSequenceableLoaderFactory compositeSequenceableLoaderFactory, + public MergingMediaPeriod( + CompositeSequenceableLoaderFactory compositeSequenceableLoaderFactory, + long[] periodTimeOffsetsUs, MediaPeriod... periods) { this.compositeSequenceableLoaderFactory = compositeSequenceableLoaderFactory; this.periods = periods; childrenPendingPreparation = new ArrayList<>(); + childTrackGroupByMergedTrackGroup = new HashMap<>(); compositeSequenceableLoader = compositeSequenceableLoaderFactory.createCompositeSequenceableLoader(); streamPeriodIndices = new IdentityHashMap<>(); enabledPeriods = new MediaPeriod[0]; + for (int i = 0; i < periods.length; i++) { + if (periodTimeOffsetsUs[i] != 0) { + this.periods[i] = new TimeOffsetMediaPeriod(periods[i], periodTimeOffsetsUs[i]); + } + } + } + + /** + * Returns the child period passed to {@link + * #MergingMediaPeriod(CompositeSequenceableLoaderFactory, long[], MediaPeriod...)} at the + * specified index. + */ + public MediaPeriod getChildPeriod(int index) { + return periods[index] instanceof TimeOffsetMediaPeriod + ? ((TimeOffsetMediaPeriod) periods[index]).mediaPeriod + : periods[index]; } @Override @@ -76,7 +106,7 @@ public TrackGroupArray getTrackGroups() { @Override public long selectTracks( - @NullableType TrackSelection[] selections, + @NullableType ExoTrackSelection[] selections, boolean[] mayRetainStreamFlags, @NullableType SampleStream[] streams, boolean[] streamResetFlags, @@ -85,32 +115,39 @@ public long selectTracks( int[] streamChildIndices = new int[selections.length]; int[] selectionChildIndices = new int[selections.length]; for (int i = 0; i < selections.length; i++) { - streamChildIndices[i] = streams[i] == null ? C.INDEX_UNSET - : streamPeriodIndices.get(streams[i]); - selectionChildIndices[i] = C.INDEX_UNSET; + Integer streamChildIndex = streams[i] == null ? null : streamPeriodIndices.get(streams[i]); + streamChildIndices[i] = streamChildIndex == null ? C.INDEX_UNSET : streamChildIndex; if (selections[i] != null) { - TrackGroup trackGroup = selections[i].getTrackGroup(); - for (int j = 0; j < periods.length; j++) { - if (periods[j].getTrackGroups().indexOf(trackGroup) != C.INDEX_UNSET) { - selectionChildIndices[i] = j; - break; - } - } + TrackGroup mergedTrackGroup = selections[i].getTrackGroup(); + // mergedTrackGroup.id is 'periods array index' + ":" + childTrackGroup.id + selectionChildIndices[i] = + Integer.parseInt(mergedTrackGroup.id.substring(0, mergedTrackGroup.id.indexOf(":"))); + } else { + selectionChildIndices[i] = C.INDEX_UNSET; } } streamPeriodIndices.clear(); // Select tracks for each child, copying the resulting streams back into a new streams array. @NullableType SampleStream[] newStreams = new SampleStream[selections.length]; @NullableType SampleStream[] childStreams = new SampleStream[selections.length]; - @NullableType TrackSelection[] childSelections = new TrackSelection[selections.length]; + @NullableType ExoTrackSelection[] childSelections = new ExoTrackSelection[selections.length]; ArrayList enabledPeriodsList = new ArrayList<>(periods.length); for (int i = 0; i < periods.length; i++) { for (int j = 0; j < selections.length; j++) { childStreams[j] = streamChildIndices[j] == i ? streams[j] : null; - childSelections[j] = selectionChildIndices[j] == i ? selections[j] : null; + if (selectionChildIndices[j] == i) { + ExoTrackSelection mergedTrackSelection = checkNotNull(selections[j]); + TrackGroup mergedTrackGroup = mergedTrackSelection.getTrackGroup(); + TrackGroup childTrackGroup = + checkNotNull(childTrackGroupByMergedTrackGroup.get(mergedTrackGroup)); + childSelections[j] = new ForwardingTrackSelection(mergedTrackSelection, childTrackGroup); + } else { + childSelections[j] = null; + } } - long selectPositionUs = periods[i].selectTracks(childSelections, mayRetainStreamFlags, - childStreams, streamResetFlags, positionUs); + long selectPositionUs = + periods[i].selectTracks( + childSelections, mayRetainStreamFlags, childStreams, streamResetFlags, positionUs); if (i == 0) { positionUs = selectPositionUs; } else if (selectPositionUs != positionUs) { @@ -136,8 +173,7 @@ public long selectTracks( // Copy the new streams back into the streams array. System.arraycopy(newStreams, 0, streams, 0, newStreams.length); // Update the local state. - enabledPeriods = new MediaPeriod[enabledPeriodsList.size()]; - enabledPeriodsList.toArray(enabledPeriods); + enabledPeriods = enabledPeriodsList.toArray(new MediaPeriod[0]); compositeSequenceableLoader = compositeSequenceableLoaderFactory.createCompositeSequenceableLoader(enabledPeriods); return positionUs; @@ -181,23 +217,32 @@ public long getNextLoadPositionUs() { @Override public long readDiscontinuity() { - long positionUs = periods[0].readDiscontinuity(); - // Periods other than the first one are not allowed to report discontinuities. - for (int i = 1; i < periods.length; i++) { - if (periods[i].readDiscontinuity() != C.TIME_UNSET) { - throw new IllegalStateException("Child reported discontinuity."); - } - } - // It must be possible to seek enabled periods to the new position, if there is one. - if (positionUs != C.TIME_UNSET) { - for (MediaPeriod enabledPeriod : enabledPeriods) { - if (enabledPeriod != periods[0] - && enabledPeriod.seekToUs(positionUs) != positionUs) { + long discontinuityUs = C.TIME_UNSET; + for (MediaPeriod period : enabledPeriods) { + long otherDiscontinuityUs = period.readDiscontinuity(); + if (otherDiscontinuityUs != C.TIME_UNSET) { + if (discontinuityUs == C.TIME_UNSET) { + discontinuityUs = otherDiscontinuityUs; + // First reported discontinuity. Seek all previous periods to the new position. + for (MediaPeriod previousPeriod : enabledPeriods) { + if (previousPeriod == period) { + break; + } + if (previousPeriod.seekToUs(discontinuityUs) != discontinuityUs) { + throw new IllegalStateException("Unexpected child seekToUs result."); + } + } + } else if (otherDiscontinuityUs != discontinuityUs) { + throw new IllegalStateException("Conflicting discontinuities."); + } + } else if (discontinuityUs != C.TIME_UNSET) { + // We already have a discontinuity, seek this period to the new position. + if (period.seekToUs(discontinuityUs) != discontinuityUs) { throw new IllegalStateException("Unexpected child seekToUs result."); } } } - return positionUs; + return discontinuityUs; } @Override @@ -237,11 +282,14 @@ public void onPrepared(MediaPeriod preparedPeriod) { } TrackGroup[] trackGroupArray = new TrackGroup[totalTrackGroupCount]; int trackGroupIndex = 0; - for (MediaPeriod period : periods) { - TrackGroupArray periodTrackGroups = period.getTrackGroups(); + for (int i = 0; i < periods.length; i++) { + TrackGroupArray periodTrackGroups = periods[i].getTrackGroups(); int periodTrackGroupCount = periodTrackGroups.length; for (int j = 0; j < periodTrackGroupCount; j++) { - trackGroupArray[trackGroupIndex++] = periodTrackGroups.get(j); + TrackGroup childTrackGroup = periodTrackGroups.get(j); + TrackGroup mergedTrackGroup = childTrackGroup.copyWithId(i + ":" + childTrackGroup.id); + childTrackGroupByMergedTrackGroup.put(mergedTrackGroup, childTrackGroup); + trackGroupArray[trackGroupIndex++] = mergedTrackGroup; } } trackGroups = new TrackGroupArray(trackGroupArray); @@ -253,4 +301,327 @@ public void onContinueLoadingRequested(MediaPeriod ignored) { Assertions.checkNotNull(callback).onContinueLoadingRequested(this); } + private static final class TimeOffsetMediaPeriod implements MediaPeriod, MediaPeriod.Callback { + + private final MediaPeriod mediaPeriod; + private final long timeOffsetUs; + + private @MonotonicNonNull Callback callback; + + public TimeOffsetMediaPeriod(MediaPeriod mediaPeriod, long timeOffsetUs) { + this.mediaPeriod = mediaPeriod; + this.timeOffsetUs = timeOffsetUs; + } + + @Override + public void prepare(Callback callback, long positionUs) { + this.callback = callback; + mediaPeriod.prepare(/* callback= */ this, positionUs - timeOffsetUs); + } + + @Override + public void maybeThrowPrepareError() throws IOException { + mediaPeriod.maybeThrowPrepareError(); + } + + @Override + public TrackGroupArray getTrackGroups() { + return mediaPeriod.getTrackGroups(); + } + + @Override + public List getStreamKeys(List trackSelections) { + return mediaPeriod.getStreamKeys(trackSelections); + } + + @Override + public long selectTracks( + @NullableType ExoTrackSelection[] selections, + boolean[] mayRetainStreamFlags, + @NullableType SampleStream[] streams, + boolean[] streamResetFlags, + long positionUs) { + @NullableType SampleStream[] childStreams = new SampleStream[streams.length]; + for (int i = 0; i < streams.length; i++) { + TimeOffsetSampleStream sampleStream = (TimeOffsetSampleStream) streams[i]; + childStreams[i] = sampleStream != null ? sampleStream.getChildStream() : null; + } + long startPositionUs = + mediaPeriod.selectTracks( + selections, + mayRetainStreamFlags, + childStreams, + streamResetFlags, + positionUs - timeOffsetUs); + for (int i = 0; i < streams.length; i++) { + @Nullable SampleStream childStream = childStreams[i]; + if (childStream == null) { + streams[i] = null; + } else if (streams[i] == null + || ((TimeOffsetSampleStream) streams[i]).getChildStream() != childStream) { + streams[i] = new TimeOffsetSampleStream(childStream, timeOffsetUs); + } + } + return startPositionUs + timeOffsetUs; + } + + @Override + public void discardBuffer(long positionUs, boolean toKeyframe) { + mediaPeriod.discardBuffer(positionUs - timeOffsetUs, toKeyframe); + } + + @Override + public long readDiscontinuity() { + long discontinuityPositionUs = mediaPeriod.readDiscontinuity(); + return discontinuityPositionUs == C.TIME_UNSET + ? C.TIME_UNSET + : discontinuityPositionUs + timeOffsetUs; + } + + @Override + public long seekToUs(long positionUs) { + return mediaPeriod.seekToUs(positionUs - timeOffsetUs) + timeOffsetUs; + } + + @Override + public long getAdjustedSeekPositionUs(long positionUs, SeekParameters seekParameters) { + return mediaPeriod.getAdjustedSeekPositionUs(positionUs - timeOffsetUs, seekParameters) + + timeOffsetUs; + } + + @Override + public long getBufferedPositionUs() { + long bufferedPositionUs = mediaPeriod.getBufferedPositionUs(); + return bufferedPositionUs == C.TIME_END_OF_SOURCE + ? C.TIME_END_OF_SOURCE + : bufferedPositionUs + timeOffsetUs; + } + + @Override + public long getNextLoadPositionUs() { + long nextLoadPositionUs = mediaPeriod.getNextLoadPositionUs(); + return nextLoadPositionUs == C.TIME_END_OF_SOURCE + ? C.TIME_END_OF_SOURCE + : nextLoadPositionUs + timeOffsetUs; + } + + @Override + public boolean continueLoading(long positionUs) { + return mediaPeriod.continueLoading(positionUs - timeOffsetUs); + } + + @Override + public boolean isLoading() { + return mediaPeriod.isLoading(); + } + + @Override + public void reevaluateBuffer(long positionUs) { + mediaPeriod.reevaluateBuffer(positionUs - timeOffsetUs); + } + + @Override + public void onPrepared(MediaPeriod mediaPeriod) { + Assertions.checkNotNull(callback).onPrepared(/* mediaPeriod= */ this); + } + + @Override + public void onContinueLoadingRequested(MediaPeriod source) { + Assertions.checkNotNull(callback).onContinueLoadingRequested(/* source= */ this); + } + } + + private static final class TimeOffsetSampleStream implements SampleStream { + + private final SampleStream sampleStream; + private final long timeOffsetUs; + + public TimeOffsetSampleStream(SampleStream sampleStream, long timeOffsetUs) { + this.sampleStream = sampleStream; + this.timeOffsetUs = timeOffsetUs; + } + + public SampleStream getChildStream() { + return sampleStream; + } + + @Override + public boolean isReady() { + return sampleStream.isReady(); + } + + @Override + public void maybeThrowError() throws IOException { + sampleStream.maybeThrowError(); + } + + @Override + public int readData( + FormatHolder formatHolder, DecoderInputBuffer buffer, @ReadFlags int readFlags) { + int readResult = sampleStream.readData(formatHolder, buffer, readFlags); + if (readResult == C.RESULT_BUFFER_READ) { + buffer.timeUs = max(0, buffer.timeUs + timeOffsetUs); + } + return readResult; + } + + @Override + public int skipData(long positionUs) { + return sampleStream.skipData(positionUs - timeOffsetUs); + } + } + + private static final class ForwardingTrackSelection implements ExoTrackSelection { + + private final ExoTrackSelection trackSelection; + private final TrackGroup trackGroup; + + public ForwardingTrackSelection(ExoTrackSelection trackSelection, TrackGroup trackGroup) { + this.trackSelection = trackSelection; + this.trackGroup = trackGroup; + } + + @Override + public @Type int getType() { + return trackSelection.getType(); + } + + @Override + public TrackGroup getTrackGroup() { + return trackGroup; + } + + @Override + public int length() { + return trackSelection.length(); + } + + @Override + public Format getFormat(int index) { + return trackSelection.getFormat(index); + } + + @Override + public int getIndexInTrackGroup(int index) { + return trackSelection.getIndexInTrackGroup(index); + } + + @Override + public int indexOf(Format format) { + return trackSelection.indexOf(format); + } + + @Override + public int indexOf(int indexInTrackGroup) { + return trackSelection.indexOf(indexInTrackGroup); + } + + @Override + public void enable() { + trackSelection.enable(); + } + + @Override + public void disable() { + trackSelection.disable(); + } + + @Override + public Format getSelectedFormat() { + return trackSelection.getSelectedFormat(); + } + + @Override + public int getSelectedIndexInTrackGroup() { + return trackSelection.getSelectedIndexInTrackGroup(); + } + + @Override + public int getSelectedIndex() { + return trackSelection.getSelectedIndex(); + } + + @Override + public @C.SelectionReason int getSelectionReason() { + return trackSelection.getSelectionReason(); + } + + @Nullable + @Override + public Object getSelectionData() { + return trackSelection.getSelectionData(); + } + + @Override + public void onPlaybackSpeed(float playbackSpeed) { + trackSelection.onPlaybackSpeed(playbackSpeed); + } + + @Override + public void onDiscontinuity() { + trackSelection.onDiscontinuity(); + } + + @Override + public void onRebuffer() { + trackSelection.onRebuffer(); + } + + @Override + public void onPlayWhenReadyChanged(boolean playWhenReady) { + trackSelection.onPlayWhenReadyChanged(playWhenReady); + } + + @Override + public void updateSelectedTrack( + long playbackPositionUs, + long bufferedDurationUs, + long availableDurationUs, + List queue, + MediaChunkIterator[] mediaChunkIterators) { + trackSelection.updateSelectedTrack( + playbackPositionUs, bufferedDurationUs, availableDurationUs, queue, mediaChunkIterators); + } + + @Override + public int evaluateQueueSize(long playbackPositionUs, List queue) { + return trackSelection.evaluateQueueSize(playbackPositionUs, queue); + } + + @Override + public boolean shouldCancelChunkLoad( + long playbackPositionUs, Chunk loadingChunk, List queue) { + return trackSelection.shouldCancelChunkLoad(playbackPositionUs, loadingChunk, queue); + } + + @Override + public boolean blacklist(int index, long exclusionDurationMs) { + return trackSelection.blacklist(index, exclusionDurationMs); + } + + @Override + public boolean isBlacklisted(int index, long nowMs) { + return trackSelection.isBlacklisted(index, nowMs); + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (!(o instanceof ForwardingTrackSelection)) { + return false; + } + ForwardingTrackSelection that = (ForwardingTrackSelection) o; + return trackSelection.equals(that.trackSelection) && trackGroup.equals(that.trackGroup); + } + + @Override + public int hashCode() { + int result = 17; + result = 31 * result + trackGroup.hashCode(); + result = 31 * result + trackSelection.hashCode(); + return result; + } + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MergingMediaSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MergingMediaSource.java index dd7675f3d4..4a5f74ea77 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MergingMediaSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/MergingMediaSource.java @@ -15,18 +15,29 @@ */ package com.google.android.exoplayer2.source; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static java.lang.Math.min; +import static java.lang.annotation.ElementType.TYPE_USE; + import androidx.annotation.IntDef; import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.MediaItem; import com.google.android.exoplayer2.Timeline; import com.google.android.exoplayer2.upstream.Allocator; import com.google.android.exoplayer2.upstream.TransferListener; +import com.google.common.collect.Multimap; +import com.google.common.collect.MultimapBuilder; import java.io.IOException; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; +import java.util.Map; /** * Merges multiple {@link MediaSource}s. @@ -35,25 +46,20 @@ */ public final class MergingMediaSource extends CompositeMediaSource { - /** - * Thrown when a {@link MergingMediaSource} cannot merge its sources. - */ + /** Thrown when a {@link MergingMediaSource} cannot merge its sources. */ public static final class IllegalMergeException extends IOException { /** The reason the merge failed. One of {@link #REASON_PERIOD_COUNT_MISMATCH}. */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({REASON_PERIOD_COUNT_MISMATCH}) public @interface Reason {} - /** - * The sources have different period counts. - */ + /** The sources have different period counts. */ public static final int REASON_PERIOD_COUNT_MISMATCH = 0; - /** - * The reason the merge failed. - */ - @Reason public final int reason; + /** The reason the merge failed. */ + public final @Reason int reason; /** * @param reason The reason the merge failed. @@ -61,45 +67,101 @@ public static final class IllegalMergeException extends IOException { public IllegalMergeException(@Reason int reason) { this.reason = reason; } - } private static final int PERIOD_COUNT_UNSET = -1; + private static final MediaItem PLACEHOLDER_MEDIA_ITEM = + new MediaItem.Builder().setMediaId("MergingMediaSource").build(); + private final boolean adjustPeriodTimeOffsets; + private final boolean clipDurations; private final MediaSource[] mediaSources; private final Timeline[] timelines; private final ArrayList pendingTimelineSources; private final CompositeSequenceableLoaderFactory compositeSequenceableLoaderFactory; + private final Map clippedDurationsUs; + private final Multimap clippedMediaPeriods; private int periodCount; + private long[][] periodTimeOffsetsUs; + @Nullable private IllegalMergeException mergeError; /** - * @param mediaSources The {@link MediaSource}s to merge. + * Creates a merging media source. + * + *

      Neither offsets between the timestamps in the media sources nor the durations of the media + * sources will be adjusted. + * + * @param mediaSources The {@link MediaSource MediaSources} to merge. */ public MergingMediaSource(MediaSource... mediaSources) { - this(new DefaultCompositeSequenceableLoaderFactory(), mediaSources); + this(/* adjustPeriodTimeOffsets= */ false, mediaSources); } /** - * @param compositeSequenceableLoaderFactory A factory to create composite - * {@link SequenceableLoader}s for when this media source loads data from multiple streams - * (video, audio etc...). - * @param mediaSources The {@link MediaSource}s to merge. + * Creates a merging media source. + * + *

      Durations of the media sources will not be adjusted. + * + * @param adjustPeriodTimeOffsets Whether to adjust timestamps of the merged media sources to all + * start at the same time. + * @param mediaSources The {@link MediaSource MediaSources} to merge. */ - public MergingMediaSource(CompositeSequenceableLoaderFactory compositeSequenceableLoaderFactory, + public MergingMediaSource(boolean adjustPeriodTimeOffsets, MediaSource... mediaSources) { + this(adjustPeriodTimeOffsets, /* clipDurations= */ false, mediaSources); + } + + /** + * Creates a merging media source. + * + * @param adjustPeriodTimeOffsets Whether to adjust timestamps of the merged media sources to all + * start at the same time. + * @param clipDurations Whether to clip the durations of the media sources to match the shortest + * duration. + * @param mediaSources The {@link MediaSource MediaSources} to merge. + */ + public MergingMediaSource( + boolean adjustPeriodTimeOffsets, boolean clipDurations, MediaSource... mediaSources) { + this( + adjustPeriodTimeOffsets, + clipDurations, + new DefaultCompositeSequenceableLoaderFactory(), + mediaSources); + } + + /** + * Creates a merging media source. + * + * @param adjustPeriodTimeOffsets Whether to adjust timestamps of the merged media sources to all + * start at the same time. + * @param clipDurations Whether to clip the durations of the media sources to match the shortest + * duration. + * @param compositeSequenceableLoaderFactory A factory to create composite {@link + * SequenceableLoader}s for when this media source loads data from multiple streams (video, + * audio etc...). + * @param mediaSources The {@link MediaSource MediaSources} to merge. + */ + public MergingMediaSource( + boolean adjustPeriodTimeOffsets, + boolean clipDurations, + CompositeSequenceableLoaderFactory compositeSequenceableLoaderFactory, MediaSource... mediaSources) { + this.adjustPeriodTimeOffsets = adjustPeriodTimeOffsets; + this.clipDurations = clipDurations; this.mediaSources = mediaSources; this.compositeSequenceableLoaderFactory = compositeSequenceableLoaderFactory; pendingTimelineSources = new ArrayList<>(Arrays.asList(mediaSources)); periodCount = PERIOD_COUNT_UNSET; timelines = new Timeline[mediaSources.length]; + periodTimeOffsetsUs = new long[0][]; + clippedDurationsUs = new HashMap<>(); + clippedMediaPeriods = MultimapBuilder.hashKeys().arrayListValues().build(); } @Override - @Nullable - public Object getTag() { - return mediaSources.length > 0 ? mediaSources[0].getTag() : null; + public MediaItem getMediaItem() { + return mediaSources.length > 0 ? mediaSources[0].getMediaItem() : PLACEHOLDER_MEDIA_ITEM; } @Override @@ -125,16 +187,40 @@ public MediaPeriod createPeriod(MediaPeriodId id, Allocator allocator, long star for (int i = 0; i < periods.length; i++) { MediaPeriodId childMediaPeriodId = id.copyWithPeriodUid(timelines[i].getUidOfPeriod(periodIndex)); - periods[i] = mediaSources[i].createPeriod(childMediaPeriodId, allocator, startPositionUs); + periods[i] = + mediaSources[i].createPeriod( + childMediaPeriodId, allocator, startPositionUs - periodTimeOffsetsUs[periodIndex][i]); + } + MediaPeriod mediaPeriod = + new MergingMediaPeriod( + compositeSequenceableLoaderFactory, periodTimeOffsetsUs[periodIndex], periods); + if (clipDurations) { + mediaPeriod = + new ClippingMediaPeriod( + mediaPeriod, + /* enableInitialDiscontinuity= */ true, + /* startUs= */ 0, + /* endUs= */ checkNotNull(clippedDurationsUs.get(id.periodUid))); + clippedMediaPeriods.put(id.periodUid, (ClippingMediaPeriod) mediaPeriod); } - return new MergingMediaPeriod(compositeSequenceableLoaderFactory, periods); + return mediaPeriod; } @Override public void releasePeriod(MediaPeriod mediaPeriod) { + if (clipDurations) { + ClippingMediaPeriod clippingMediaPeriod = (ClippingMediaPeriod) mediaPeriod; + for (Map.Entry entry : clippedMediaPeriods.entries()) { + if (entry.getValue().equals(clippingMediaPeriod)) { + clippedMediaPeriods.remove(entry.getKey(), entry.getValue()); + break; + } + } + mediaPeriod = clippingMediaPeriod.mediaPeriod; + } MergingMediaPeriod mergingPeriod = (MergingMediaPeriod) mediaPeriod; for (int i = 0; i < mediaSources.length; i++) { - mediaSources[i].releasePeriod(mergingPeriod.periods[i]); + mediaSources[i].releasePeriod(mergingPeriod.getChildPeriod(i)); } } @@ -150,35 +236,120 @@ protected void releaseSourceInternal() { @Override protected void onChildSourceInfoRefreshed( - Integer id, MediaSource mediaSource, Timeline timeline) { - if (mergeError == null) { - mergeError = checkTimelineMerges(timeline); - } + Integer childSourceId, MediaSource mediaSource, Timeline newTimeline) { if (mergeError != null) { return; } + if (periodCount == PERIOD_COUNT_UNSET) { + periodCount = newTimeline.getPeriodCount(); + } else if (newTimeline.getPeriodCount() != periodCount) { + mergeError = new IllegalMergeException(IllegalMergeException.REASON_PERIOD_COUNT_MISMATCH); + return; + } + if (periodTimeOffsetsUs.length == 0) { + periodTimeOffsetsUs = new long[periodCount][timelines.length]; + } pendingTimelineSources.remove(mediaSource); - timelines[id] = timeline; + timelines[childSourceId] = newTimeline; if (pendingTimelineSources.isEmpty()) { - refreshSourceInfo(timelines[0]); + if (adjustPeriodTimeOffsets) { + computePeriodTimeOffsets(); + } + Timeline mergedTimeline = timelines[0]; + if (clipDurations) { + updateClippedDuration(); + mergedTimeline = new ClippedTimeline(mergedTimeline, clippedDurationsUs); + } + refreshSourceInfo(mergedTimeline); } } @Override @Nullable protected MediaPeriodId getMediaPeriodIdForChildMediaPeriodId( - Integer id, MediaPeriodId mediaPeriodId) { - return id == 0 ? mediaPeriodId : null; + Integer childSourceId, MediaPeriodId mediaPeriodId) { + return childSourceId == 0 ? mediaPeriodId : null; } - @Nullable - private IllegalMergeException checkTimelineMerges(Timeline timeline) { - if (periodCount == PERIOD_COUNT_UNSET) { - periodCount = timeline.getPeriodCount(); - } else if (timeline.getPeriodCount() != periodCount) { - return new IllegalMergeException(IllegalMergeException.REASON_PERIOD_COUNT_MISMATCH); + private void computePeriodTimeOffsets() { + Timeline.Period period = new Timeline.Period(); + for (int periodIndex = 0; periodIndex < periodCount; periodIndex++) { + long primaryWindowOffsetUs = + -timelines[0].getPeriod(periodIndex, period).getPositionInWindowUs(); + for (int timelineIndex = 1; timelineIndex < timelines.length; timelineIndex++) { + long secondaryWindowOffsetUs = + -timelines[timelineIndex].getPeriod(periodIndex, period).getPositionInWindowUs(); + periodTimeOffsetsUs[periodIndex][timelineIndex] = + primaryWindowOffsetUs - secondaryWindowOffsetUs; + } } - return null; } + private void updateClippedDuration() { + Timeline.Period period = new Timeline.Period(); + for (int periodIndex = 0; periodIndex < periodCount; periodIndex++) { + long minDurationUs = C.TIME_END_OF_SOURCE; + for (int timelineIndex = 0; timelineIndex < timelines.length; timelineIndex++) { + long durationUs = timelines[timelineIndex].getPeriod(periodIndex, period).getDurationUs(); + if (durationUs == C.TIME_UNSET) { + continue; + } + long adjustedDurationUs = durationUs + periodTimeOffsetsUs[periodIndex][timelineIndex]; + if (minDurationUs == C.TIME_END_OF_SOURCE || adjustedDurationUs < minDurationUs) { + minDurationUs = adjustedDurationUs; + } + } + Object periodUid = timelines[0].getUidOfPeriod(periodIndex); + clippedDurationsUs.put(periodUid, minDurationUs); + for (ClippingMediaPeriod clippingMediaPeriod : clippedMediaPeriods.get(periodUid)) { + clippingMediaPeriod.updateClipping(/* startUs= */ 0, /* endUs= */ minDurationUs); + } + } + } + + private static final class ClippedTimeline extends ForwardingTimeline { + + private final long[] periodDurationsUs; + private final long[] windowDurationsUs; + + public ClippedTimeline(Timeline timeline, Map clippedDurationsUs) { + super(timeline); + int windowCount = timeline.getWindowCount(); + windowDurationsUs = new long[timeline.getWindowCount()]; + Window window = new Window(); + for (int i = 0; i < windowCount; i++) { + windowDurationsUs[i] = timeline.getWindow(i, window).durationUs; + } + int periodCount = timeline.getPeriodCount(); + periodDurationsUs = new long[periodCount]; + Period period = new Period(); + for (int i = 0; i < periodCount; i++) { + timeline.getPeriod(i, period, /* setIds= */ true); + long clippedDurationUs = checkNotNull(clippedDurationsUs.get(period.uid)); + periodDurationsUs[i] = + clippedDurationUs != C.TIME_END_OF_SOURCE ? clippedDurationUs : period.durationUs; + if (period.durationUs != C.TIME_UNSET) { + windowDurationsUs[period.windowIndex] -= period.durationUs - periodDurationsUs[i]; + } + } + } + + @Override + public Window getWindow(int windowIndex, Window window, long defaultPositionProjectionUs) { + super.getWindow(windowIndex, window, defaultPositionProjectionUs); + window.durationUs = windowDurationsUs[windowIndex]; + window.defaultPositionUs = + window.durationUs == C.TIME_UNSET || window.defaultPositionUs == C.TIME_UNSET + ? window.defaultPositionUs + : min(window.defaultPositionUs, window.durationUs); + return window; + } + + @Override + public Period getPeriod(int periodIndex, Period period, boolean setIds) { + super.getPeriod(periodIndex, period, setIds); + period.durationUs = periodDurationsUs[periodIndex]; + return period; + } + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ProgressiveMediaExtractor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ProgressiveMediaExtractor.java new file mode 100644 index 0000000000..7de4d5d291 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ProgressiveMediaExtractor.java @@ -0,0 +1,98 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source; + +import android.net.Uri; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.analytics.PlayerId; +import com.google.android.exoplayer2.extractor.Extractor; +import com.google.android.exoplayer2.extractor.ExtractorOutput; +import com.google.android.exoplayer2.extractor.PositionHolder; +import com.google.android.exoplayer2.upstream.DataReader; +import java.io.IOException; +import java.util.List; +import java.util.Map; + +/** Extracts the contents of a container file from a progressive media stream. */ +public interface ProgressiveMediaExtractor { + + /** Creates {@link ProgressiveMediaExtractor} instances. */ + interface Factory { + + /** + * Returns a new {@link ProgressiveMediaExtractor} instance. + * + * @param playerId The {@link PlayerId} of the player this extractor is used for. + */ + ProgressiveMediaExtractor createProgressiveMediaExtractor(PlayerId playerId); + } + + /** + * Initializes the underlying infrastructure for reading from the input. + * + * @param dataReader The {@link DataReader} from which data should be read. + * @param uri The {@link Uri} from which the media is obtained. + * @param responseHeaders The response headers of the media, or an empty map if there are none. + * @param position The initial position of the {@code dataReader} in the stream. + * @param length The length of the stream, or {@link C#LENGTH_UNSET} if length is unknown. + * @param output The {@link ExtractorOutput} that will be used to initialize the selected + * extractor. + * @throws UnrecognizedInputFormatException Thrown if the input format could not be detected. + * @throws IOException Thrown if the input could not be read. + */ + void init( + DataReader dataReader, + Uri uri, + Map> responseHeaders, + long position, + long length, + ExtractorOutput output) + throws IOException; + + /** Releases any held resources. */ + void release(); + + /** + * Disables seeking in MP3 streams. + * + *

      MP3 live streams commonly have seekable metadata, despite being unseekable. + */ + void disableSeekingOnMp3Streams(); + + /** + * Returns the current read position in the input stream, or {@link C#POSITION_UNSET} if no input + * is available. + */ + long getCurrentInputPosition(); + + /** + * Notifies the extracting infrastructure that a seek has occurred. + * + * @param position The byte offset in the stream from which data will be provided. + * @param seekTimeUs The seek time in microseconds. + */ + void seek(long position, long seekTimeUs); + + /** + * Extracts data starting at the current input stream position. + * + * @param positionHolder If {@link Extractor#RESULT_SEEK} is returned, this holder is updated to + * hold the position of the required data. + * @return One of the {@link Extractor}{@code .RESULT_*} values. + * @throws IOException If an error occurred reading from the input. + */ + int read(PositionHolder positionHolder) throws IOException; +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ProgressiveMediaPeriod.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ProgressiveMediaPeriod.java index 277e17410d..b29338f86e 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ProgressiveMediaPeriod.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ProgressiveMediaPeriod.java @@ -15,35 +15,40 @@ */ package com.google.android.exoplayer2.source; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static java.lang.Math.max; +import static java.lang.Math.min; + import android.net.Uri; import android.os.Handler; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.C.DataType; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.FormatHolder; import com.google.android.exoplayer2.ParserException; import com.google.android.exoplayer2.SeekParameters; import com.google.android.exoplayer2.decoder.DecoderInputBuffer; +import com.google.android.exoplayer2.drm.DrmSessionEventListener; import com.google.android.exoplayer2.drm.DrmSessionManager; -import com.google.android.exoplayer2.extractor.DefaultExtractorInput; import com.google.android.exoplayer2.extractor.Extractor; -import com.google.android.exoplayer2.extractor.ExtractorInput; import com.google.android.exoplayer2.extractor.ExtractorOutput; import com.google.android.exoplayer2.extractor.PositionHolder; import com.google.android.exoplayer2.extractor.SeekMap; import com.google.android.exoplayer2.extractor.SeekMap.SeekPoints; import com.google.android.exoplayer2.extractor.SeekMap.Unseekable; import com.google.android.exoplayer2.extractor.TrackOutput; -import com.google.android.exoplayer2.extractor.mp3.Mp3Extractor; import com.google.android.exoplayer2.metadata.Metadata; import com.google.android.exoplayer2.metadata.icy.IcyHeaders; -import com.google.android.exoplayer2.source.MediaSourceEventListener.EventDispatcher; import com.google.android.exoplayer2.source.SampleQueue.UpstreamFormatChangedListener; -import com.google.android.exoplayer2.trackselection.TrackSelection; +import com.google.android.exoplayer2.source.SampleStream.ReadFlags; +import com.google.android.exoplayer2.trackselection.ExoTrackSelection; import com.google.android.exoplayer2.upstream.Allocator; import com.google.android.exoplayer2.upstream.DataSource; +import com.google.android.exoplayer2.upstream.DataSourceUtil; import com.google.android.exoplayer2.upstream.DataSpec; import com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy; +import com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy.LoadErrorInfo; import com.google.android.exoplayer2.upstream.Loader; import com.google.android.exoplayer2.upstream.Loader.LoadErrorAction; import com.google.android.exoplayer2.upstream.Loader.Loadable; @@ -53,13 +58,15 @@ import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.ParsableByteArray; import com.google.android.exoplayer2.util.Util; -import java.io.EOFException; import java.io.IOException; +import java.io.InterruptedIOException; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Map; import org.checkerframework.checker.nullness.compatqual.NullableType; +import org.checkerframework.checker.nullness.qual.EnsuresNonNull; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; /** A {@link MediaPeriod} that extracts data using an {@link Extractor}. */ /* package */ final class ProgressiveMediaPeriod @@ -69,9 +76,7 @@ Loader.ReleaseCallback, UpstreamFormatChangedListener { - /** - * Listener for information about the period. - */ + /** Listener for information about the period. */ interface Listener { /** @@ -89,48 +94,48 @@ interface Listener { * When the source's duration is unknown, it is calculated by adding this value to the largest * sample timestamp seen when buffering completes. */ - private static final long DEFAULT_LAST_SAMPLE_DURATION_US = 10000; + private static final long DEFAULT_LAST_SAMPLE_DURATION_US = 10_000; private static final Map ICY_METADATA_HEADERS = createIcyMetadataHeaders(); private static final Format ICY_FORMAT = - Format.createSampleFormat("icy", MimeTypes.APPLICATION_ICY, Format.OFFSET_SAMPLE_RELATIVE); + new Format.Builder().setId("icy").setSampleMimeType(MimeTypes.APPLICATION_ICY).build(); private final Uri uri; private final DataSource dataSource; - private final DrmSessionManager drmSessionManager; + private final DrmSessionManager drmSessionManager; private final LoadErrorHandlingPolicy loadErrorHandlingPolicy; - private final EventDispatcher eventDispatcher; + private final MediaSourceEventListener.EventDispatcher mediaSourceEventDispatcher; + private final DrmSessionEventListener.EventDispatcher drmEventDispatcher; private final Listener listener; private final Allocator allocator; @Nullable private final String customCacheKey; private final long continueLoadingCheckIntervalBytes; private final Loader loader; - private final ExtractorHolder extractorHolder; + private final ProgressiveMediaExtractor progressiveMediaExtractor; private final ConditionVariable loadCondition; private final Runnable maybeFinishPrepareRunnable; private final Runnable onContinueLoadingRequestedRunnable; private final Handler handler; @Nullable private Callback callback; - @Nullable private SeekMap seekMap; @Nullable private IcyHeaders icyHeaders; private SampleQueue[] sampleQueues; private TrackId[] sampleQueueTrackIds; private boolean sampleQueuesBuilt; - private boolean prepared; - @Nullable private PreparedState preparedState; + private boolean prepared; private boolean haveAudioVideoTracks; - private int dataType; + private @MonotonicNonNull TrackState trackState; + private @MonotonicNonNull SeekMap seekMap; + private long durationUs; + private boolean isLive; + private @DataType int dataType; private boolean seenFirstTrackSelection; private boolean notifyDiscontinuity; - private boolean notifiedReadingStarted; private int enabledTrackCount; - private long durationUs; - private long length; - private boolean isLive; + private boolean isLengthKnown; private long lastSeekPositionUs; private long pendingResetPositionUs; @@ -143,9 +148,13 @@ interface Listener { /** * @param uri The {@link Uri} of the media stream. * @param dataSource The data source to read the media. - * @param extractors The extractors to use to read the data source. + * @param progressiveMediaExtractor The {@link ProgressiveMediaExtractor} to use to read the data + * source. + * @param drmSessionManager A {@link DrmSessionManager} to allow DRM interactions. + * @param drmEventDispatcher A dispatcher to notify of {@link DrmSessionEventListener} events. * @param loadErrorHandlingPolicy The {@link LoadErrorHandlingPolicy}. - * @param eventDispatcher A dispatcher to notify of events. + * @param mediaSourceEventDispatcher A dispatcher to notify of {@link MediaSourceEventListener} + * events. * @param listener A listener to notify when information about the period changes. * @param allocator An {@link Allocator} from which to obtain media buffer allocations. * @param customCacheKey A custom key that uniquely identifies the original stream. Used for cache @@ -154,17 +163,15 @@ interface Listener { * invocation of {@link Callback#onContinueLoadingRequested(SequenceableLoader)}. */ // maybeFinishPrepare is not posted to the handler until initialization completes. - @SuppressWarnings({ - "nullness:argument.type.incompatible", - "nullness:methodref.receiver.bound.invalid" - }) + @SuppressWarnings({"nullness:argument", "nullness:methodref.receiver.bound"}) public ProgressiveMediaPeriod( Uri uri, DataSource dataSource, - Extractor[] extractors, - DrmSessionManager drmSessionManager, + ProgressiveMediaExtractor progressiveMediaExtractor, + DrmSessionManager drmSessionManager, + DrmSessionEventListener.EventDispatcher drmEventDispatcher, LoadErrorHandlingPolicy loadErrorHandlingPolicy, - EventDispatcher eventDispatcher, + MediaSourceEventListener.EventDispatcher mediaSourceEventDispatcher, Listener listener, Allocator allocator, @Nullable String customCacheKey, @@ -172,31 +179,29 @@ public ProgressiveMediaPeriod( this.uri = uri; this.dataSource = dataSource; this.drmSessionManager = drmSessionManager; + this.drmEventDispatcher = drmEventDispatcher; this.loadErrorHandlingPolicy = loadErrorHandlingPolicy; - this.eventDispatcher = eventDispatcher; + this.mediaSourceEventDispatcher = mediaSourceEventDispatcher; this.listener = listener; this.allocator = allocator; this.customCacheKey = customCacheKey; this.continueLoadingCheckIntervalBytes = continueLoadingCheckIntervalBytes; - loader = new Loader("Loader:ProgressiveMediaPeriod"); - extractorHolder = new ExtractorHolder(extractors); + loader = new Loader("ProgressiveMediaPeriod"); + this.progressiveMediaExtractor = progressiveMediaExtractor; loadCondition = new ConditionVariable(); maybeFinishPrepareRunnable = this::maybeFinishPrepare; onContinueLoadingRequestedRunnable = () -> { if (!released) { - Assertions.checkNotNull(callback) - .onContinueLoadingRequested(ProgressiveMediaPeriod.this); + checkNotNull(callback).onContinueLoadingRequested(ProgressiveMediaPeriod.this); } }; - handler = new Handler(); + handler = Util.createHandlerForCurrentLooper(); sampleQueueTrackIds = new TrackId[0]; sampleQueues = new SampleQueue[0]; pendingResetPositionUs = C.TIME_UNSET; - length = C.LENGTH_UNSET; durationUs = C.TIME_UNSET; dataType = C.DATA_TYPE_MEDIA; - eventDispatcher.mediaPeriodCreated(); } public void release() { @@ -211,7 +216,6 @@ public void release() { handler.removeCallbacksAndMessages(null); callback = null; released = true; - eventDispatcher.mediaPeriodReleased(); } @Override @@ -219,7 +223,7 @@ public void onLoaderReleased() { for (SampleQueue sampleQueue : sampleQueues) { sampleQueue.release(); } - extractorHolder.release(); + progressiveMediaExtractor.release(); } @Override @@ -233,25 +237,27 @@ public void prepare(Callback callback, long positionUs) { public void maybeThrowPrepareError() throws IOException { maybeThrowError(); if (loadingFinished && !prepared) { - throw new ParserException("Loading finished before preparation is complete."); + throw ParserException.createForMalformedContainer( + "Loading finished before preparation is complete.", /* cause= */ null); } } @Override public TrackGroupArray getTrackGroups() { - return getPreparedState().tracks; + assertPrepared(); + return trackState.tracks; } @Override public long selectTracks( - @NullableType TrackSelection[] selections, + @NullableType ExoTrackSelection[] selections, boolean[] mayRetainStreamFlags, @NullableType SampleStream[] streams, boolean[] streamResetFlags, long positionUs) { - PreparedState preparedState = getPreparedState(); - TrackGroupArray tracks = preparedState.tracks; - boolean[] trackEnabledStates = preparedState.trackEnabledStates; + assertPrepared(); + TrackGroupArray tracks = trackState.tracks; + boolean[] trackEnabledStates = trackState.trackEnabledStates; int oldEnabledTrackCount = enabledTrackCount; // Deselect old tracks. for (int i = 0; i < selections.length; i++) { @@ -269,7 +275,7 @@ public long selectTracks( // Select new tracks. for (int i = 0; i < selections.length; i++) { if (streams[i] == null && selections[i] != null) { - TrackSelection selection = selections[i]; + ExoTrackSelection selection = selections[i]; Assertions.checkState(selection.length() == 1); Assertions.checkState(selection.getIndexInTrackGroup(0) == 0); int track = tracks.indexOf(selection.getTrackGroup()); @@ -320,10 +326,11 @@ public long selectTracks( @Override public void discardBuffer(long positionUs, boolean toKeyframe) { + assertPrepared(); if (isPendingReset()) { return; } - boolean[] trackEnabledStates = getPreparedState().trackEnabledStates; + boolean[] trackEnabledStates = trackState.trackEnabledStates; int trackCount = sampleQueues.length; for (int i = 0; i < trackCount; i++) { sampleQueues[i].discardTo(positionUs, toKeyframe, trackEnabledStates[i]); @@ -358,15 +365,11 @@ public boolean isLoading() { @Override public long getNextLoadPositionUs() { - return enabledTrackCount == 0 ? C.TIME_END_OF_SOURCE : getBufferedPositionUs(); + return getBufferedPositionUs(); } @Override public long readDiscontinuity() { - if (!notifiedReadingStarted) { - eventDispatcher.readingStarted(); - notifiedReadingStarted = true; - } if (notifyDiscontinuity && (loadingFinished || getExtractedSamplesCount() > extractedSamplesCountAtStartOfLoad)) { notifyDiscontinuity = false; @@ -377,8 +380,8 @@ public long readDiscontinuity() { @Override public long getBufferedPositionUs() { - boolean[] trackIsAudioVideoFlags = getPreparedState().trackIsAudioVideoFlags; - if (loadingFinished) { + assertPrepared(); + if (loadingFinished || enabledTrackCount == 0) { return C.TIME_END_OF_SOURCE; } else if (isPendingReset()) { return pendingResetPositionUs; @@ -388,24 +391,26 @@ public long getBufferedPositionUs() { // Ignore non-AV tracks, which may be sparse or poorly interleaved. int trackCount = sampleQueues.length; for (int i = 0; i < trackCount; i++) { - if (trackIsAudioVideoFlags[i] && !sampleQueues[i].isLastSampleQueued()) { - largestQueuedTimestampUs = Math.min(largestQueuedTimestampUs, - sampleQueues[i].getLargestQueuedTimestampUs()); + if (trackState.trackIsAudioVideoFlags[i] + && trackState.trackEnabledStates[i] + && !sampleQueues[i].isLastSampleQueued()) { + largestQueuedTimestampUs = + min(largestQueuedTimestampUs, sampleQueues[i].getLargestQueuedTimestampUs()); } } } if (largestQueuedTimestampUs == Long.MAX_VALUE) { - largestQueuedTimestampUs = getLargestQueuedTimestampUs(); + largestQueuedTimestampUs = getLargestQueuedTimestampUs(/* includeDisabledTracks= */ false); } - return largestQueuedTimestampUs == Long.MIN_VALUE ? lastSeekPositionUs + return largestQueuedTimestampUs == Long.MIN_VALUE + ? lastSeekPositionUs : largestQueuedTimestampUs; } @Override public long seekToUs(long positionUs) { - PreparedState preparedState = getPreparedState(); - SeekMap seekMap = preparedState.seekMap; - boolean[] trackIsAudioVideoFlags = preparedState.trackIsAudioVideoFlags; + assertPrepared(); + boolean[] trackIsAudioVideoFlags = trackState.trackIsAudioVideoFlags; // Treat all seeks into non-seekable media as being to t=0. positionUs = seekMap.isSeekable() ? positionUs : 0; @@ -428,6 +433,10 @@ && seekInsideBufferUs(trackIsAudioVideoFlags, positionUs)) { pendingResetPositionUs = positionUs; loadingFinished = false; if (loader.isLoading()) { + // Discard as much as we can synchronously. + for (SampleQueue sampleQueue : sampleQueues) { + sampleQueue.discardToEnd(); + } loader.cancelLoading(); } else { loader.clearFatalError(); @@ -440,14 +449,14 @@ && seekInsideBufferUs(trackIsAudioVideoFlags, positionUs)) { @Override public long getAdjustedSeekPositionUs(long positionUs, SeekParameters seekParameters) { - SeekMap seekMap = getPreparedState().seekMap; + assertPrepared(); if (!seekMap.isSeekable()) { // Treat all seeks into non-seekable media as being to t=0. return 0; } SeekPoints seekPoints = seekMap.getSeekPoints(positionUs); - return Util.resolveSeekPositionUs( - positionUs, seekParameters, seekPoints.first.timeUs, seekPoints.second.timeUs); + return seekParameters.resolveSeekPositionUs( + positionUs, seekPoints.first.timeUs, seekPoints.second.timeUs); } // SampleStream methods. @@ -469,14 +478,13 @@ public long getAdjustedSeekPositionUs(long positionUs, SeekParameters seekParame int sampleQueueIndex, FormatHolder formatHolder, DecoderInputBuffer buffer, - boolean formatRequired) { + @ReadFlags int readFlags) { if (suppressRead()) { return C.RESULT_NOTHING_READ; } maybeNotifyDownstreamFormat(sampleQueueIndex); int result = - sampleQueues[sampleQueueIndex].read( - formatHolder, buffer, formatRequired, loadingFinished, lastSeekPositionUs); + sampleQueues[sampleQueueIndex].read(formatHolder, buffer, readFlags, loadingFinished); if (result == C.RESULT_NOTHING_READ) { maybeStartDeferredRetry(sampleQueueIndex); } @@ -489,12 +497,8 @@ public long getAdjustedSeekPositionUs(long positionUs, SeekParameters seekParame } maybeNotifyDownstreamFormat(track); SampleQueue sampleQueue = sampleQueues[track]; - int skipCount; - if (loadingFinished && positionUs > sampleQueue.getLargestQueuedTimestampUs()) { - skipCount = sampleQueue.advanceToEnd(); - } else { - skipCount = sampleQueue.advanceTo(positionUs); - } + int skipCount = sampleQueue.getSkipCount(positionUs, loadingFinished); + sampleQueue.skip(skipCount); if (skipCount == 0) { maybeStartDeferredRetry(track); } @@ -502,11 +506,11 @@ public long getAdjustedSeekPositionUs(long positionUs, SeekParameters seekParame } private void maybeNotifyDownstreamFormat(int track) { - PreparedState preparedState = getPreparedState(); - boolean[] trackNotifiedDownstreamFormats = preparedState.trackNotifiedDownstreamFormats; + assertPrepared(); + boolean[] trackNotifiedDownstreamFormats = trackState.trackNotifiedDownstreamFormats; if (!trackNotifiedDownstreamFormats[track]) { - Format trackFormat = preparedState.tracks.get(track).getFormat(/* index= */ 0); - eventDispatcher.downstreamFormatChanged( + Format trackFormat = trackState.tracks.get(track).getFormat(/* index= */ 0); + mediaSourceEventDispatcher.downstreamFormatChanged( MimeTypes.getTrackType(trackFormat.sampleMimeType), trackFormat, C.SELECTION_REASON_UNKNOWN, @@ -517,7 +521,8 @@ private void maybeNotifyDownstreamFormat(int track) { } private void maybeStartDeferredRetry(int track) { - boolean[] trackIsAudioVideoFlags = getPreparedState().trackIsAudioVideoFlags; + assertPrepared(); + boolean[] trackIsAudioVideoFlags = trackState.trackIsAudioVideoFlags; if (!pendingDeferredRetry || !trackIsAudioVideoFlags[track] || sampleQueues[track].isReady(/* loadingFinished= */ false)) { @@ -531,7 +536,7 @@ private void maybeStartDeferredRetry(int track) { for (SampleQueue sampleQueue : sampleQueues) { sampleQueue.reset(); } - Assertions.checkNotNull(callback).onContinueLoadingRequested(this); + checkNotNull(callback).onContinueLoadingRequested(this); } private boolean suppressRead() { @@ -541,58 +546,71 @@ private boolean suppressRead() { // Loader.Callback implementation. @Override - public void onLoadCompleted(ExtractingLoadable loadable, long elapsedRealtimeMs, - long loadDurationMs) { + public void onLoadCompleted( + ExtractingLoadable loadable, long elapsedRealtimeMs, long loadDurationMs) { if (durationUs == C.TIME_UNSET && seekMap != null) { boolean isSeekable = seekMap.isSeekable(); - long largestQueuedTimestampUs = getLargestQueuedTimestampUs(); - durationUs = largestQueuedTimestampUs == Long.MIN_VALUE ? 0 - : largestQueuedTimestampUs + DEFAULT_LAST_SAMPLE_DURATION_US; + long largestQueuedTimestampUs = + getLargestQueuedTimestampUs(/* includeDisabledTracks= */ true); + durationUs = + largestQueuedTimestampUs == Long.MIN_VALUE + ? 0 + : largestQueuedTimestampUs + DEFAULT_LAST_SAMPLE_DURATION_US; listener.onSourceInfoRefreshed(durationUs, isSeekable, isLive); } - eventDispatcher.loadCompleted( - loadable.dataSpec, - loadable.dataSource.getLastOpenedUri(), - loadable.dataSource.getLastResponseHeaders(), + StatsDataSource dataSource = loadable.dataSource; + LoadEventInfo loadEventInfo = + new LoadEventInfo( + loadable.loadTaskId, + loadable.dataSpec, + dataSource.getLastOpenedUri(), + dataSource.getLastResponseHeaders(), + elapsedRealtimeMs, + loadDurationMs, + dataSource.getBytesRead()); + loadErrorHandlingPolicy.onLoadTaskConcluded(loadable.loadTaskId); + mediaSourceEventDispatcher.loadCompleted( + loadEventInfo, C.DATA_TYPE_MEDIA, C.TRACK_TYPE_UNKNOWN, /* trackFormat= */ null, C.SELECTION_REASON_UNKNOWN, /* trackSelectionData= */ null, /* mediaStartTimeUs= */ loadable.seekTimeUs, - durationUs, - elapsedRealtimeMs, - loadDurationMs, - loadable.dataSource.getBytesRead()); - copyLengthFromLoader(loadable); + durationUs); loadingFinished = true; - Assertions.checkNotNull(callback).onContinueLoadingRequested(this); + checkNotNull(callback).onContinueLoadingRequested(this); } @Override - public void onLoadCanceled(ExtractingLoadable loadable, long elapsedRealtimeMs, - long loadDurationMs, boolean released) { - eventDispatcher.loadCanceled( - loadable.dataSpec, - loadable.dataSource.getLastOpenedUri(), - loadable.dataSource.getLastResponseHeaders(), + public void onLoadCanceled( + ExtractingLoadable loadable, long elapsedRealtimeMs, long loadDurationMs, boolean released) { + StatsDataSource dataSource = loadable.dataSource; + LoadEventInfo loadEventInfo = + new LoadEventInfo( + loadable.loadTaskId, + loadable.dataSpec, + dataSource.getLastOpenedUri(), + dataSource.getLastResponseHeaders(), + elapsedRealtimeMs, + loadDurationMs, + dataSource.getBytesRead()); + loadErrorHandlingPolicy.onLoadTaskConcluded(loadable.loadTaskId); + mediaSourceEventDispatcher.loadCanceled( + loadEventInfo, C.DATA_TYPE_MEDIA, C.TRACK_TYPE_UNKNOWN, /* trackFormat= */ null, C.SELECTION_REASON_UNKNOWN, /* trackSelectionData= */ null, /* mediaStartTimeUs= */ loadable.seekTimeUs, - durationUs, - elapsedRealtimeMs, - loadDurationMs, - loadable.dataSource.getBytesRead()); + durationUs); if (!released) { - copyLengthFromLoader(loadable); for (SampleQueue sampleQueue : sampleQueues) { sampleQueue.reset(); } if (enabledTrackCount > 0) { - Assertions.checkNotNull(callback).onContinueLoadingRequested(this); + checkNotNull(callback).onContinueLoadingRequested(this); } } } @@ -604,10 +622,29 @@ public LoadErrorAction onLoadError( long loadDurationMs, IOException error, int errorCount) { - copyLengthFromLoader(loadable); + StatsDataSource dataSource = loadable.dataSource; + LoadEventInfo loadEventInfo = + new LoadEventInfo( + loadable.loadTaskId, + loadable.dataSpec, + dataSource.getLastOpenedUri(), + dataSource.getLastResponseHeaders(), + elapsedRealtimeMs, + loadDurationMs, + dataSource.getBytesRead()); + MediaLoadData mediaLoadData = + new MediaLoadData( + C.DATA_TYPE_MEDIA, + C.TRACK_TYPE_UNKNOWN, + /* trackFormat= */ null, + C.SELECTION_REASON_UNKNOWN, + /* trackSelectionData= */ null, + /* mediaStartTimeMs= */ Util.usToMs(loadable.seekTimeUs), + Util.usToMs(durationUs)); LoadErrorAction loadErrorAction; long retryDelayMs = - loadErrorHandlingPolicy.getRetryDelayMsFor(dataType, loadDurationMs, error, errorCount); + loadErrorHandlingPolicy.getRetryDelayMsFor( + new LoadErrorInfo(loadEventInfo, mediaLoadData, error, errorCount)); if (retryDelayMs == C.TIME_UNSET) { loadErrorAction = Loader.DONT_RETRY_FATAL; } else /* the load should be retried */ { @@ -619,10 +656,9 @@ public LoadErrorAction onLoadError( : Loader.DONT_RETRY; } - eventDispatcher.loadError( - loadable.dataSpec, - loadable.dataSource.getLastOpenedUri(), - loadable.dataSource.getLastResponseHeaders(), + boolean wasCanceled = !loadErrorAction.isRetry(); + mediaSourceEventDispatcher.loadError( + loadEventInfo, C.DATA_TYPE_MEDIA, C.TRACK_TYPE_UNKNOWN, /* trackFormat= */ null, @@ -630,11 +666,11 @@ public LoadErrorAction onLoadError( /* trackSelectionData= */ null, /* mediaStartTimeUs= */ loadable.seekTimeUs, durationUs, - elapsedRealtimeMs, - loadDurationMs, - loadable.dataSource.getBytesRead(), error, - !loadErrorAction.isRetry()); + wasCanceled); + if (wasCanceled) { + loadErrorHandlingPolicy.onLoadTaskConcluded(loadable.loadTaskId); + } return loadErrorAction; } @@ -653,8 +689,7 @@ public void endTracks() { @Override public void seekMap(SeekMap seekMap) { - this.seekMap = icyHeaders == null ? seekMap : new Unseekable(/* durationUs */ C.TIME_UNSET); - handler.post(maybeFinishPrepareRunnable); + handler.post(() -> setSeekMap(seekMap)); } // Icy metadata. Called by the loading thread. @@ -672,6 +707,10 @@ public void onUpstreamFormatChanged(Format format) { // Internal methods. + private void onLengthKnown() { + handler.post(() -> isLengthKnown = true); + } + private TrackOutput prepareTrackOutput(TrackId id) { int trackCount = sampleQueues.length; for (int i = 0; i < trackCount; i++) { @@ -679,8 +718,8 @@ private TrackOutput prepareTrackOutput(TrackId id) { return sampleQueues[i]; } } - SampleQueue trackOutput = new SampleQueue( - allocator, /* playbackLooper= */ handler.getLooper(), drmSessionManager); + SampleQueue trackOutput = + SampleQueue.createWithDrm(allocator, drmSessionManager, drmEventDispatcher); trackOutput.setUpstreamFormatChangeListener(this); @NullableType TrackId[] sampleQueueTrackIds = Arrays.copyOf(this.sampleQueueTrackIds, trackCount + 1); @@ -692,8 +731,18 @@ private TrackOutput prepareTrackOutput(TrackId id) { return trackOutput; } + private void setSeekMap(SeekMap seekMap) { + this.seekMap = icyHeaders == null ? seekMap : new Unseekable(/* durationUs= */ C.TIME_UNSET); + durationUs = seekMap.getDurationUs(); + isLive = !isLengthKnown && seekMap.getDurationUs() == C.TIME_UNSET; + dataType = isLive ? C.DATA_TYPE_MEDIA_PROGRESSIVE_LIVE : C.DATA_TYPE_MEDIA; + listener.onSourceInfoRefreshed(durationUs, seekMap.isSeekable(), isLive); + if (!prepared) { + maybeFinishPrepare(); + } + } + private void maybeFinishPrepare() { - SeekMap seekMap = this.seekMap; if (released || prepared || !sampleQueuesBuilt || seekMap == null) { return; } @@ -706,62 +755,46 @@ private void maybeFinishPrepare() { int trackCount = sampleQueues.length; TrackGroup[] trackArray = new TrackGroup[trackCount]; boolean[] trackIsAudioVideoFlags = new boolean[trackCount]; - durationUs = seekMap.getDurationUs(); for (int i = 0; i < trackCount; i++) { - Format trackFormat = sampleQueues[i].getUpstreamFormat(); - String mimeType = trackFormat.sampleMimeType; + Format trackFormat = checkNotNull(sampleQueues[i].getUpstreamFormat()); + @Nullable String mimeType = trackFormat.sampleMimeType; boolean isAudio = MimeTypes.isAudio(mimeType); boolean isAudioVideo = isAudio || MimeTypes.isVideo(mimeType); trackIsAudioVideoFlags[i] = isAudioVideo; haveAudioVideoTracks |= isAudioVideo; - IcyHeaders icyHeaders = this.icyHeaders; + @Nullable IcyHeaders icyHeaders = this.icyHeaders; if (icyHeaders != null) { if (isAudio || sampleQueueTrackIds[i].isIcyTrack) { - Metadata metadata = trackFormat.metadata; - trackFormat = - trackFormat.copyWithMetadata( - metadata == null - ? new Metadata(icyHeaders) - : metadata.copyWithAppendedEntries(icyHeaders)); + @Nullable Metadata metadata = trackFormat.metadata; + if (metadata == null) { + metadata = new Metadata(icyHeaders); + } else { + metadata = metadata.copyWithAppendedEntries(icyHeaders); + } + trackFormat = trackFormat.buildUpon().setMetadata(metadata).build(); } + // Update the track format with the bitrate from the ICY header only if it declares neither + // an average or peak bitrate of its own. if (isAudio - && trackFormat.bitrate == Format.NO_VALUE + && trackFormat.averageBitrate == Format.NO_VALUE + && trackFormat.peakBitrate == Format.NO_VALUE && icyHeaders.bitrate != Format.NO_VALUE) { - trackFormat = trackFormat.copyWithBitrate(icyHeaders.bitrate); + trackFormat = trackFormat.buildUpon().setAverageBitrate(icyHeaders.bitrate).build(); } } - if (trackFormat.drmInitData != null) { - trackFormat = - trackFormat.copyWithExoMediaCryptoType( - drmSessionManager.getExoMediaCryptoType(trackFormat.drmInitData)); - } - trackArray[i] = new TrackGroup(trackFormat); + trackFormat = trackFormat.copyWithCryptoType(drmSessionManager.getCryptoType(trackFormat)); + trackArray[i] = new TrackGroup(/* id= */ Integer.toString(i), trackFormat); } - isLive = length == C.LENGTH_UNSET && seekMap.getDurationUs() == C.TIME_UNSET; - dataType = isLive ? C.DATA_TYPE_MEDIA_PROGRESSIVE_LIVE : C.DATA_TYPE_MEDIA; - preparedState = - new PreparedState(seekMap, new TrackGroupArray(trackArray), trackIsAudioVideoFlags); + trackState = new TrackState(new TrackGroupArray(trackArray), trackIsAudioVideoFlags); prepared = true; - listener.onSourceInfoRefreshed(durationUs, seekMap.isSeekable(), isLive); - Assertions.checkNotNull(callback).onPrepared(this); - } - - private PreparedState getPreparedState() { - return Assertions.checkNotNull(preparedState); - } - - private void copyLengthFromLoader(ExtractingLoadable loadable) { - if (length == C.LENGTH_UNSET) { - length = loadable.length; - } + checkNotNull(callback).onPrepared(this); } private void startLoading() { ExtractingLoadable loadable = new ExtractingLoadable( - uri, dataSource, extractorHolder, /* extractorOutput= */ this, loadCondition); + uri, dataSource, progressiveMediaExtractor, /* extractorOutput= */ this, loadCondition); if (prepared) { - SeekMap seekMap = getPreparedState().seekMap; Assertions.checkState(isPendingReset()); if (durationUs != C.TIME_UNSET && pendingResetPositionUs > durationUs) { loadingFinished = true; @@ -769,23 +802,27 @@ private void startLoading() { return; } loadable.setLoadPosition( - seekMap.getSeekPoints(pendingResetPositionUs).first.position, pendingResetPositionUs); + checkNotNull(seekMap).getSeekPoints(pendingResetPositionUs).first.position, + pendingResetPositionUs); + for (SampleQueue sampleQueue : sampleQueues) { + sampleQueue.setStartTimeUs(pendingResetPositionUs); + } pendingResetPositionUs = C.TIME_UNSET; } extractedSamplesCountAtStartOfLoad = getExtractedSamplesCount(); long elapsedRealtimeMs = loader.startLoading( loadable, this, loadErrorHandlingPolicy.getMinimumLoadableRetryCount(dataType)); - eventDispatcher.loadStarted( - loadable.dataSpec, + DataSpec dataSpec = loadable.dataSpec; + mediaSourceEventDispatcher.loadStarted( + new LoadEventInfo(loadable.loadTaskId, dataSpec, elapsedRealtimeMs), C.DATA_TYPE_MEDIA, C.TRACK_TYPE_UNKNOWN, /* trackFormat= */ null, C.SELECTION_REASON_UNKNOWN, /* trackSelectionData= */ null, /* mediaStartTimeUs= */ loadable.seekTimeUs, - durationUs, - elapsedRealtimeMs); + durationUs); } /** @@ -798,8 +835,7 @@ private void startLoading() { * retry. */ private boolean configureRetry(ExtractingLoadable loadable, int currentExtractedSampleCount) { - if (length != C.LENGTH_UNSET - || (seekMap != null && seekMap.getDurationUs() != C.TIME_UNSET)) { + if (isLengthKnown || (seekMap != null && seekMap.getDurationUs() != C.TIME_UNSET)) { // We're playing an on-demand stream. Resume the current loadable, which will // request data starting from the point it left off. extractedSamplesCountAtStartOfLoad = currentExtractedSampleCount; @@ -863,11 +899,13 @@ private int getExtractedSamplesCount() { return extractedSamplesCount; } - private long getLargestQueuedTimestampUs() { + private long getLargestQueuedTimestampUs(boolean includeDisabledTracks) { long largestQueuedTimestampUs = Long.MIN_VALUE; - for (SampleQueue sampleQueue : sampleQueues) { - largestQueuedTimestampUs = Math.max(largestQueuedTimestampUs, - sampleQueue.getLargestQueuedTimestampUs()); + for (int i = 0; i < sampleQueues.length; i++) { + if (includeDisabledTracks || checkNotNull(trackState).trackEnabledStates[i]) { + largestQueuedTimestampUs = + max(largestQueuedTimestampUs, sampleQueues[i].getLargestQueuedTimestampUs()); + } } return largestQueuedTimestampUs; } @@ -876,6 +914,13 @@ private boolean isPendingReset() { return pendingResetPositionUs != C.TIME_UNSET; } + @EnsuresNonNull({"trackState", "seekMap"}) + private void assertPrepared() { + Assertions.checkState(prepared); + checkNotNull(trackState); + checkNotNull(seekMap); + } + private final class SampleStreamImpl implements SampleStream { private final int track; @@ -895,24 +940,24 @@ public void maybeThrowError() throws IOException { } @Override - public int readData(FormatHolder formatHolder, DecoderInputBuffer buffer, - boolean formatRequired) { - return ProgressiveMediaPeriod.this.readData(track, formatHolder, buffer, formatRequired); + public int readData( + FormatHolder formatHolder, DecoderInputBuffer buffer, @ReadFlags int readFlags) { + return ProgressiveMediaPeriod.this.readData(track, formatHolder, buffer, readFlags); } @Override public int skipData(long positionUs) { return ProgressiveMediaPeriod.this.skipData(track, positionUs); } - } /** Loads the media stream and extracts sample data from it. */ /* package */ final class ExtractingLoadable implements Loadable, IcyDataSource.Listener { + private final long loadTaskId; private final Uri uri; private final StatsDataSource dataSource; - private final ExtractorHolder extractorHolder; + private final ProgressiveMediaExtractor progressiveMediaExtractor; private final ExtractorOutput extractorOutput; private final ConditionVariable loadCondition; private final PositionHolder positionHolder; @@ -922,25 +967,24 @@ public int skipData(long positionUs) { private boolean pendingExtractorSeek; private long seekTimeUs; private DataSpec dataSpec; - private long length; @Nullable private TrackOutput icyTrackOutput; private boolean seenIcyMetadata; - @SuppressWarnings("method.invocation.invalid") + @SuppressWarnings("nullness:method.invocation") public ExtractingLoadable( Uri uri, DataSource dataSource, - ExtractorHolder extractorHolder, + ProgressiveMediaExtractor progressiveMediaExtractor, ExtractorOutput extractorOutput, ConditionVariable loadCondition) { this.uri = uri; this.dataSource = new StatsDataSource(dataSource); - this.extractorHolder = extractorHolder; + this.progressiveMediaExtractor = progressiveMediaExtractor; this.extractorOutput = extractorOutput; this.loadCondition = loadCondition; this.positionHolder = new PositionHolder(); this.pendingExtractorSeek = true; - this.length = C.LENGTH_UNSET; + loadTaskId = LoadEventInfo.getNewId(); dataSpec = buildDataSpec(/* position= */ 0); } @@ -952,18 +996,17 @@ public void cancelLoad() { } @Override - public void load() throws IOException, InterruptedException { + public void load() throws IOException { int result = Extractor.RESULT_CONTINUE; while (result == Extractor.RESULT_CONTINUE && !loadCanceled) { - ExtractorInput input = null; try { long position = positionHolder.position; dataSpec = buildDataSpec(position); - length = dataSource.open(dataSpec); + long length = dataSource.open(dataSpec); if (length != C.LENGTH_UNSET) { length += position; + onLengthKnown(); } - Uri uri = Assertions.checkNotNull(dataSource.getUri()); icyHeaders = IcyHeaders.parse(dataSource.getResponseHeaders()); DataSource extractorDataSource = dataSource; if (icyHeaders != null && icyHeaders.metadataInterval != C.LENGTH_UNSET) { @@ -971,23 +1014,32 @@ public void load() throws IOException, InterruptedException { icyTrackOutput = icyTrack(); icyTrackOutput.format(ICY_FORMAT); } - input = new DefaultExtractorInput(extractorDataSource, position, length); - Extractor extractor = extractorHolder.selectExtractor(input, extractorOutput, uri); - - // MP3 live streams commonly have seekable metadata, despite being unseekable. - if (icyHeaders != null && extractor instanceof Mp3Extractor) { - ((Mp3Extractor) extractor).disableSeeking(); + progressiveMediaExtractor.init( + extractorDataSource, + uri, + dataSource.getResponseHeaders(), + position, + length, + extractorOutput); + + if (icyHeaders != null) { + progressiveMediaExtractor.disableSeekingOnMp3Streams(); } if (pendingExtractorSeek) { - extractor.seek(position, seekTimeUs); + progressiveMediaExtractor.seek(position, seekTimeUs); pendingExtractorSeek = false; } while (result == Extractor.RESULT_CONTINUE && !loadCanceled) { - loadCondition.block(); - result = extractor.read(input, positionHolder); - if (input.getPosition() > position + continueLoadingCheckIntervalBytes) { - position = input.getPosition(); + try { + loadCondition.block(); + } catch (InterruptedException e) { + throw new InterruptedIOException(); + } + result = progressiveMediaExtractor.read(positionHolder); + long currentInputPosition = progressiveMediaExtractor.getCurrentInputPosition(); + if (currentInputPosition > position + continueLoadingCheckIntervalBytes) { + position = currentInputPosition; loadCondition.close(); handler.post(onContinueLoadingRequestedRunnable); } @@ -995,10 +1047,10 @@ public void load() throws IOException, InterruptedException { } finally { if (result == Extractor.RESULT_SEEK) { result = Extractor.RESULT_CONTINUE; - } else if (input != null) { - positionHolder.position = input.getPosition(); + } else if (progressiveMediaExtractor.getCurrentInputPosition() != C.POSITION_UNSET) { + positionHolder.position = progressiveMediaExtractor.getCurrentInputPosition(); } - Util.closeQuietly(dataSource); + DataSourceUtil.closeQuietly(dataSource); } } } @@ -1010,12 +1062,14 @@ public void onIcyMetadata(ParsableByteArray metadata) { // Always output the first ICY metadata at the start time. This helps minimize any delay // between the start of playback and the first ICY metadata event. long timeUs = - !seenIcyMetadata ? seekTimeUs : Math.max(getLargestQueuedTimestampUs(), seekTimeUs); + !seenIcyMetadata + ? seekTimeUs + : max(getLargestQueuedTimestampUs(/* includeDisabledTracks= */ true), seekTimeUs); int length = metadata.bytesLeft(); - TrackOutput icyTrackOutput = Assertions.checkNotNull(this.icyTrackOutput); + TrackOutput icyTrackOutput = checkNotNull(this.icyTrackOutput); icyTrackOutput.sampleData(metadata, length); icyTrackOutput.sampleMetadata( - timeUs, C.BUFFER_FLAG_KEY_FRAME, length, /* offset= */ 0, /* encryptionData= */ null); + timeUs, C.BUFFER_FLAG_KEY_FRAME, length, /* offset= */ 0, /* cryptoData= */ null); seenIcyMetadata = true; } @@ -1024,13 +1078,14 @@ public void onIcyMetadata(ParsableByteArray metadata) { private DataSpec buildDataSpec(long position) { // Disable caching if the content length cannot be resolved, since this is indicative of a // progressive live stream. - return new DataSpec( - uri, - position, - C.LENGTH_UNSET, - customCacheKey, - DataSpec.FLAG_DONT_CACHE_IF_LENGTH_UNKNOWN | DataSpec.FLAG_ALLOW_CACHE_FRAGMENTATION, - ICY_METADATA_HEADERS); + return new DataSpec.Builder() + .setUri(uri) + .setPosition(position) + .setKey(customCacheKey) + .setFlags( + DataSpec.FLAG_DONT_CACHE_IF_LENGTH_UNKNOWN | DataSpec.FLAG_ALLOW_CACHE_FRAGMENTATION) + .setHttpRequestHeaders(ICY_METADATA_HEADERS) + .build(); } private void setLoadPosition(long position, long timeUs) { @@ -1041,87 +1096,15 @@ private void setLoadPosition(long position, long timeUs) { } } - /** Stores a list of extractors and a selected extractor when the format has been detected. */ - private static final class ExtractorHolder { - - private final Extractor[] extractors; - - @Nullable private Extractor extractor; - - /** - * Creates a holder that will select an extractor and initialize it using the specified output. - * - * @param extractors One or more extractors to choose from. - */ - public ExtractorHolder(Extractor[] extractors) { - this.extractors = extractors; - } - - /** - * Returns an initialized extractor for reading {@code input}, and returns the same extractor on - * later calls. - * - * @param input The {@link ExtractorInput} from which data should be read. - * @param output The {@link ExtractorOutput} that will be used to initialize the selected - * extractor. - * @param uri The {@link Uri} of the data. - * @return An initialized extractor for reading {@code input}. - * @throws UnrecognizedInputFormatException Thrown if the input format could not be detected. - * @throws IOException Thrown if the input could not be read. - * @throws InterruptedException Thrown if the thread was interrupted. - */ - public Extractor selectExtractor(ExtractorInput input, ExtractorOutput output, Uri uri) - throws IOException, InterruptedException { - if (extractor != null) { - return extractor; - } - if (extractors.length == 1) { - this.extractor = extractors[0]; - } else { - for (Extractor extractor : extractors) { - try { - if (extractor.sniff(input)) { - this.extractor = extractor; - break; - } - } catch (EOFException e) { - // Do nothing. - } finally { - input.resetPeekPosition(); - } - } - if (extractor == null) { - throw new UnrecognizedInputFormatException( - "None of the available extractors (" - + Util.getCommaDelimitedSimpleClassNames(extractors) - + ") could read the stream.", - uri); - } - } - extractor.init(output); - return extractor; - } - - public void release() { - if (extractor != null) { - extractor.release(); - extractor = null; - } - } - } - - /** Stores state that is initialized when preparation completes. */ - private static final class PreparedState { + /** Stores track state. */ + private static final class TrackState { - public final SeekMap seekMap; public final TrackGroupArray tracks; public final boolean[] trackIsAudioVideoFlags; public final boolean[] trackEnabledStates; public final boolean[] trackNotifiedDownstreamFormats; - public PreparedState( - SeekMap seekMap, TrackGroupArray tracks, boolean[] trackIsAudioVideoFlags) { - this.seekMap = seekMap; + public TrackState(TrackGroupArray tracks, boolean[] trackIsAudioVideoFlags) { this.tracks = tracks; this.trackIsAudioVideoFlags = trackIsAudioVideoFlags; this.trackEnabledStates = new boolean[tracks.length]; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ProgressiveMediaSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ProgressiveMediaSource.java index b48e7835ab..07aa3309c0 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ProgressiveMediaSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ProgressiveMediaSource.java @@ -15,11 +15,17 @@ */ package com.google.android.exoplayer2.source; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; + import android.net.Uri; +import android.os.Looper; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; -import com.google.android.exoplayer2.drm.DrmSession; +import com.google.android.exoplayer2.MediaItem; +import com.google.android.exoplayer2.Timeline; +import com.google.android.exoplayer2.drm.DefaultDrmSessionManagerProvider; import com.google.android.exoplayer2.drm.DrmSessionManager; +import com.google.android.exoplayer2.drm.DrmSessionManagerProvider; import com.google.android.exoplayer2.extractor.DefaultExtractorsFactory; import com.google.android.exoplayer2.extractor.Extractor; import com.google.android.exoplayer2.extractor.ExtractorsFactory; @@ -28,8 +34,7 @@ import com.google.android.exoplayer2.upstream.DefaultLoadErrorHandlingPolicy; import com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy; import com.google.android.exoplayer2.upstream.TransferListener; -import com.google.android.exoplayer2.util.Assertions; -import java.io.IOException; +import com.google.errorprone.annotations.CanIgnoreReturnValue; /** * Provides one period that loads data from a {@link Uri} and extracted using an {@link Extractor}. @@ -46,103 +51,118 @@ public final class ProgressiveMediaSource extends BaseMediaSource implements ProgressiveMediaPeriod.Listener { /** Factory for {@link ProgressiveMediaSource}s. */ + @SuppressWarnings("deprecation") // Implement deprecated type for backwards compatibility. public static final class Factory implements MediaSourceFactory { private final DataSource.Factory dataSourceFactory; - private ExtractorsFactory extractorsFactory; - @Nullable private String customCacheKey; - @Nullable private Object tag; - private DrmSessionManager drmSessionManager; + private ProgressiveMediaExtractor.Factory progressiveMediaExtractorFactory; + private DrmSessionManagerProvider drmSessionManagerProvider; private LoadErrorHandlingPolicy loadErrorHandlingPolicy; private int continueLoadingCheckIntervalBytes; - private boolean isCreateCalled; + @Nullable private String customCacheKey; + @Nullable private Object tag; /** - * Creates a new factory for {@link ProgressiveMediaSource}s, using the extractors provided by - * {@link DefaultExtractorsFactory}. + * Creates a new factory for {@link ProgressiveMediaSource}s. + * + *

      The factory will use the following default components: * - * @param dataSourceFactory A factory for {@link DataSource}s to read the media. + *

        + *
      • {@link DefaultExtractorsFactory} + *
      • {@link DefaultDrmSessionManagerProvider} + *
      • {@link DefaultLoadErrorHandlingPolicy} + *
      + * + * @param dataSourceFactory A factory for {@linkplain DataSource data sources} to read the + * media. */ public Factory(DataSource.Factory dataSourceFactory) { this(dataSourceFactory, new DefaultExtractorsFactory()); } /** - * Creates a new factory for {@link ProgressiveMediaSource}s. + * Equivalent to {@link #Factory(DataSource.Factory, ProgressiveMediaExtractor.Factory) new + * Factory(dataSourceFactory, () -> new BundledExtractorsAdapter(extractorsFactory)}. + * + *

      The factory will use the following default components: * - * @param dataSourceFactory A factory for {@link DataSource}s to read the media. - * @param extractorsFactory A factory for extractors used to extract media from its container. + *

        + *
      • {@link DefaultDrmSessionManagerProvider} + *
      • {@link DefaultLoadErrorHandlingPolicy} + *
      + * + * @param dataSourceFactory A factory for {@linkplain DataSource data sources} to read the + * media. + * @param extractorsFactory A factory for the {@linkplain Extractor extractors} used to extract + * the media from its container. */ public Factory(DataSource.Factory dataSourceFactory, ExtractorsFactory extractorsFactory) { - this.dataSourceFactory = dataSourceFactory; - this.extractorsFactory = extractorsFactory; - drmSessionManager = DrmSessionManager.getDummyDrmSessionManager(); - loadErrorHandlingPolicy = new DefaultLoadErrorHandlingPolicy(); - continueLoadingCheckIntervalBytes = DEFAULT_LOADING_CHECK_INTERVAL_BYTES; + this(dataSourceFactory, playerId -> new BundledExtractorsAdapter(extractorsFactory)); } /** - * Sets the factory for {@link Extractor}s to process the media stream. The default value is an - * instance of {@link DefaultExtractorsFactory}. + * Creates a new factory for {@link ProgressiveMediaSource}s. * - * @param extractorsFactory A factory for {@link Extractor}s to process the media stream. If the - * possible formats are known, pass a factory that instantiates extractors for those - * formats. - * @return This factory, for convenience. - * @throws IllegalStateException If {@link #createMediaSource(Uri)} has already been called. - * @deprecated Pass the {@link ExtractorsFactory} via {@link #Factory(DataSource.Factory, - * ExtractorsFactory)}. This is necessary so that proguard can treat the default extractors - * factory as unused. - */ - @Deprecated - public Factory setExtractorsFactory(ExtractorsFactory extractorsFactory) { - Assertions.checkState(!isCreateCalled); - this.extractorsFactory = extractorsFactory; - return this; - } - - /** - * Sets the custom key that uniquely identifies the original stream. Used for cache indexing. - * The default value is {@code null}. + *

      The factory will use the following default components: * - * @param customCacheKey A custom key that uniquely identifies the original stream. Used for - * cache indexing. - * @return This factory, for convenience. - * @throws IllegalStateException If {@link #createMediaSource(Uri)} has already been called. + *

        + *
      • {@link DefaultDrmSessionManagerProvider} + *
      • {@link DefaultLoadErrorHandlingPolicy} + *
      + * + * @param dataSourceFactory A factory for {@linkplain DataSource data sources} to read the + * media. + * @param progressiveMediaExtractorFactory A factory for the {@link ProgressiveMediaExtractor} + * to extract the media from its container. */ - public Factory setCustomCacheKey(@Nullable String customCacheKey) { - Assertions.checkState(!isCreateCalled); - this.customCacheKey = customCacheKey; - return this; + public Factory( + DataSource.Factory dataSourceFactory, + ProgressiveMediaExtractor.Factory progressiveMediaExtractorFactory) { + this( + dataSourceFactory, + progressiveMediaExtractorFactory, + new DefaultDrmSessionManagerProvider(), + new DefaultLoadErrorHandlingPolicy(), + DEFAULT_LOADING_CHECK_INTERVAL_BYTES); } /** - * Sets a tag for the media source which will be published in the {@link - * com.google.android.exoplayer2.Timeline} of the source as {@link - * com.google.android.exoplayer2.Timeline.Window#tag}. + * Creates a new factory for {@link ProgressiveMediaSource}s. * - * @param tag A tag for the media source. - * @return This factory, for convenience. - * @throws IllegalStateException If {@link #createMediaSource(Uri)} has already been called. + * @param dataSourceFactory A factory for {@linkplain DataSource data sources} to read the + * media. + * @param progressiveMediaExtractorFactory A factory for the {@link ProgressiveMediaExtractor} + * to extract media from its container. + * @param drmSessionManagerProvider A provider to obtain a {@link DrmSessionManager} for a + * {@link MediaItem}. + * @param loadErrorHandlingPolicy A policy to handle load error. + * @param continueLoadingCheckIntervalBytes The number of bytes that should be loaded between + * each invocation of {@link + * MediaPeriod.Callback#onContinueLoadingRequested(SequenceableLoader)}. */ - public Factory setTag(Object tag) { - Assertions.checkState(!isCreateCalled); - this.tag = tag; - return this; + public Factory( + DataSource.Factory dataSourceFactory, + ProgressiveMediaExtractor.Factory progressiveMediaExtractorFactory, + DrmSessionManagerProvider drmSessionManagerProvider, + LoadErrorHandlingPolicy loadErrorHandlingPolicy, + int continueLoadingCheckIntervalBytes) { + this.dataSourceFactory = dataSourceFactory; + this.progressiveMediaExtractorFactory = progressiveMediaExtractorFactory; + this.drmSessionManagerProvider = drmSessionManagerProvider; + this.loadErrorHandlingPolicy = loadErrorHandlingPolicy; + this.continueLoadingCheckIntervalBytes = continueLoadingCheckIntervalBytes; } - /** - * Sets the {@link LoadErrorHandlingPolicy}. The default value is created by calling {@link - * DefaultLoadErrorHandlingPolicy#DefaultLoadErrorHandlingPolicy()}. - * - * @param loadErrorHandlingPolicy A {@link LoadErrorHandlingPolicy}. - * @return This factory, for convenience. - * @throws IllegalStateException If {@link #createMediaSource(Uri)} has already been called. - */ + @CanIgnoreReturnValue + @Override public Factory setLoadErrorHandlingPolicy(LoadErrorHandlingPolicy loadErrorHandlingPolicy) { - Assertions.checkState(!isCreateCalled); - this.loadErrorHandlingPolicy = loadErrorHandlingPolicy; + this.loadErrorHandlingPolicy = + checkNotNull( + loadErrorHandlingPolicy, + "MediaSource.Factory#setLoadErrorHandlingPolicy no longer handles null by" + + " instantiating a new DefaultLoadErrorHandlingPolicy. Explicitly construct and" + + " pass an instance in order to retain the old behavior."); return this; } @@ -155,55 +175,58 @@ public Factory setLoadErrorHandlingPolicy(LoadErrorHandlingPolicy loadErrorHandl * each invocation of {@link * MediaPeriod.Callback#onContinueLoadingRequested(SequenceableLoader)}. * @return This factory, for convenience. - * @throws IllegalStateException If {@link #createMediaSource(Uri)} has already been called. */ + @CanIgnoreReturnValue public Factory setContinueLoadingCheckIntervalBytes(int continueLoadingCheckIntervalBytes) { - Assertions.checkState(!isCreateCalled); this.continueLoadingCheckIntervalBytes = continueLoadingCheckIntervalBytes; return this; } - /** - * Sets the {@link DrmSessionManager} to use for acquiring {@link DrmSession DrmSessions}. The - * default value is {@link DrmSessionManager#DUMMY}. - * - * @param drmSessionManager The {@link DrmSessionManager}. - * @return This factory, for convenience. - * @throws IllegalStateException If one of the {@code create} methods has already been called. - */ + @CanIgnoreReturnValue @Override - public Factory setDrmSessionManager(DrmSessionManager drmSessionManager) { - Assertions.checkState(!isCreateCalled); - this.drmSessionManager = - drmSessionManager != null - ? drmSessionManager - : DrmSessionManager.getDummyDrmSessionManager(); + public Factory setDrmSessionManagerProvider( + DrmSessionManagerProvider drmSessionManagerProvider) { + this.drmSessionManagerProvider = + checkNotNull( + drmSessionManagerProvider, + "MediaSource.Factory#setDrmSessionManagerProvider no longer handles null by" + + " instantiating a new DefaultDrmSessionManagerProvider. Explicitly construct" + + " and pass an instance in order to retain the old behavior."); return this; } /** * Returns a new {@link ProgressiveMediaSource} using the current parameters. * - * @param uri The {@link Uri}. + * @param mediaItem The {@link MediaItem}. * @return The new {@link ProgressiveMediaSource}. + * @throws NullPointerException if {@link MediaItem#localConfiguration} is {@code null}. */ @Override - public ProgressiveMediaSource createMediaSource(Uri uri) { - isCreateCalled = true; + public ProgressiveMediaSource createMediaSource(MediaItem mediaItem) { + checkNotNull(mediaItem.localConfiguration); + boolean needsTag = mediaItem.localConfiguration.tag == null && tag != null; + boolean needsCustomCacheKey = + mediaItem.localConfiguration.customCacheKey == null && customCacheKey != null; + if (needsTag && needsCustomCacheKey) { + mediaItem = mediaItem.buildUpon().setTag(tag).setCustomCacheKey(customCacheKey).build(); + } else if (needsTag) { + mediaItem = mediaItem.buildUpon().setTag(tag).build(); + } else if (needsCustomCacheKey) { + mediaItem = mediaItem.buildUpon().setCustomCacheKey(customCacheKey).build(); + } return new ProgressiveMediaSource( - uri, + mediaItem, dataSourceFactory, - extractorsFactory, - drmSessionManager, + progressiveMediaExtractorFactory, + drmSessionManagerProvider.get(mediaItem), loadErrorHandlingPolicy, - customCacheKey, - continueLoadingCheckIntervalBytes, - tag); + continueLoadingCheckIntervalBytes); } @Override - public int[] getSupportedTypes() { - return new int[] {C.TYPE_OTHER}; + public @C.ContentType int[] getSupportedTypes() { + return new int[] {C.CONTENT_TYPE_OTHER}; } } @@ -213,56 +236,54 @@ public int[] getSupportedTypes() { */ public static final int DEFAULT_LOADING_CHECK_INTERVAL_BYTES = 1024 * 1024; - private final Uri uri; + private final MediaItem mediaItem; + private final MediaItem.LocalConfiguration localConfiguration; private final DataSource.Factory dataSourceFactory; - private final ExtractorsFactory extractorsFactory; - private final DrmSessionManager drmSessionManager; + private final ProgressiveMediaExtractor.Factory progressiveMediaExtractorFactory; + private final DrmSessionManager drmSessionManager; private final LoadErrorHandlingPolicy loadableLoadErrorHandlingPolicy; - @Nullable private final String customCacheKey; private final int continueLoadingCheckIntervalBytes; - @Nullable private final Object tag; + private boolean timelineIsPlaceholder; private long timelineDurationUs; private boolean timelineIsSeekable; private boolean timelineIsLive; @Nullable private TransferListener transferListener; - // TODO: Make private when ExtractorMediaSource is deleted. - /* package */ ProgressiveMediaSource( - Uri uri, + private ProgressiveMediaSource( + MediaItem mediaItem, DataSource.Factory dataSourceFactory, - ExtractorsFactory extractorsFactory, - DrmSessionManager drmSessionManager, + ProgressiveMediaExtractor.Factory progressiveMediaExtractorFactory, + DrmSessionManager drmSessionManager, LoadErrorHandlingPolicy loadableLoadErrorHandlingPolicy, - @Nullable String customCacheKey, - int continueLoadingCheckIntervalBytes, - @Nullable Object tag) { - this.uri = uri; + int continueLoadingCheckIntervalBytes) { + this.localConfiguration = checkNotNull(mediaItem.localConfiguration); + this.mediaItem = mediaItem; this.dataSourceFactory = dataSourceFactory; - this.extractorsFactory = extractorsFactory; + this.progressiveMediaExtractorFactory = progressiveMediaExtractorFactory; this.drmSessionManager = drmSessionManager; this.loadableLoadErrorHandlingPolicy = loadableLoadErrorHandlingPolicy; - this.customCacheKey = customCacheKey; this.continueLoadingCheckIntervalBytes = continueLoadingCheckIntervalBytes; + this.timelineIsPlaceholder = true; this.timelineDurationUs = C.TIME_UNSET; - this.tag = tag; } @Override - @Nullable - public Object getTag() { - return tag; + public MediaItem getMediaItem() { + return mediaItem; } @Override protected void prepareSourceInternal(@Nullable TransferListener mediaTransferListener) { transferListener = mediaTransferListener; drmSessionManager.prepare(); - notifySourceInfoRefreshed(timelineDurationUs, timelineIsSeekable, timelineIsLive); + drmSessionManager.setPlayer( + /* playbackLooper= */ checkNotNull(Looper.myLooper()), getPlayerId()); + notifySourceInfoRefreshed(); } @Override - public void maybeThrowSourceInfoRefreshError() throws IOException { + public void maybeThrowSourceInfoRefreshError() { // Do nothing. } @@ -273,15 +294,16 @@ public MediaPeriod createPeriod(MediaPeriodId id, Allocator allocator, long star dataSource.addTransferListener(transferListener); } return new ProgressiveMediaPeriod( - uri, + localConfiguration.uri, dataSource, - extractorsFactory.createExtractors(), + progressiveMediaExtractorFactory.createProgressiveMediaExtractor(getPlayerId()), drmSessionManager, + createDrmEventDispatcher(id), loadableLoadErrorHandlingPolicy, createEventDispatcher(id), this, allocator, - customCacheKey, + localConfiguration.customCacheKey, continueLoadingCheckIntervalBytes); } @@ -301,30 +323,54 @@ protected void releaseSourceInternal() { public void onSourceInfoRefreshed(long durationUs, boolean isSeekable, boolean isLive) { // If we already have the duration from a previous source info refresh, use it. durationUs = durationUs == C.TIME_UNSET ? timelineDurationUs : durationUs; - if (timelineDurationUs == durationUs + if (!timelineIsPlaceholder + && timelineDurationUs == durationUs && timelineIsSeekable == isSeekable && timelineIsLive == isLive) { // Suppress no-op source info changes. return; } - notifySourceInfoRefreshed(durationUs, isSeekable, isLive); + timelineDurationUs = durationUs; + timelineIsSeekable = isSeekable; + timelineIsLive = isLive; + timelineIsPlaceholder = false; + notifySourceInfoRefreshed(); } // Internal methods. - private void notifySourceInfoRefreshed(long durationUs, boolean isSeekable, boolean isLive) { - timelineDurationUs = durationUs; - timelineIsSeekable = isSeekable; - timelineIsLive = isLive; + private void notifySourceInfoRefreshed() { // TODO: Split up isDynamic into multiple fields to indicate which values may change. Then // indicate that the duration may change until it's known. See [internal: b/69703223]. - refreshSourceInfo( + Timeline timeline = new SinglePeriodTimeline( timelineDurationUs, timelineIsSeekable, /* isDynamic= */ false, - /* isLive= */ timelineIsLive, + /* useLiveConfiguration= */ timelineIsLive, /* manifest= */ null, - tag)); + mediaItem); + if (timelineIsPlaceholder) { + // TODO: Actually prepare the extractors during preparation so that we don't need a + // placeholder. See https://github.com/google/ExoPlayer/issues/4727. + timeline = + new ForwardingTimeline(timeline) { + @Override + public Window getWindow( + int windowIndex, Window window, long defaultPositionProjectionUs) { + super.getWindow(windowIndex, window, defaultPositionProjectionUs); + window.isPlaceholder = true; + return window; + } + + @Override + public Period getPeriod(int periodIndex, Period period, boolean setIds) { + super.getPeriod(periodIndex, period, setIds); + period.isPlaceholder = true; + return period; + } + }; + } + refreshSourceInfo(timeline); } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/SampleDataQueue.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/SampleDataQueue.java index 3779fe33e5..00b1943533 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/SampleDataQueue.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/SampleDataQueue.java @@ -15,16 +15,21 @@ */ package com.google.android.exoplayer2.source; +import static java.lang.Math.min; + import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.decoder.CryptoInfo; import com.google.android.exoplayer2.decoder.DecoderInputBuffer; -import com.google.android.exoplayer2.extractor.ExtractorInput; +import com.google.android.exoplayer2.decoder.DecoderInputBuffer.InsufficientCapacityException; import com.google.android.exoplayer2.extractor.TrackOutput.CryptoData; import com.google.android.exoplayer2.source.SampleQueue.SampleExtrasHolder; import com.google.android.exoplayer2.upstream.Allocation; import com.google.android.exoplayer2.upstream.Allocator; +import com.google.android.exoplayer2.upstream.DataReader; +import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.ParsableByteArray; +import com.google.android.exoplayer2.util.Util; import java.io.EOFException; import java.io.IOException; import java.nio.ByteBuffer; @@ -61,7 +66,7 @@ public SampleDataQueue(Allocator allocator) { /** Clears all sample data. */ public void reset() { clearAllocationNodes(firstAllocationNode); - firstAllocationNode = new AllocationNode(0, allocationLength); + firstAllocationNode.reset(/* startPosition= */ 0, allocationLength); readAllocationNode = firstAllocationNode; writeAllocationNode = firstAllocationNode; totalBytesWritten = 0; @@ -75,6 +80,7 @@ public void reset() { * discarded, or 0 if the queue is now empty. */ public void discardUpstreamSampleBytes(long totalBytesWritten) { + Assertions.checkArgument(totalBytesWritten <= this.totalBytesWritten); this.totalBytesWritten = totalBytesWritten; if (this.totalBytesWritten == 0 || this.totalBytesWritten == firstAllocationNode.startPosition) { @@ -88,8 +94,8 @@ public void discardUpstreamSampleBytes(long totalBytesWritten) { while (this.totalBytesWritten > lastNodeToKeep.endPosition) { lastNodeToKeep = lastNodeToKeep.next; } - // Discard all subsequent nodes. - AllocationNode firstNodeToDiscard = lastNodeToKeep.next; + // Discard all subsequent nodes. lastNodeToKeep is initialized, therefore next cannot be null. + AllocationNode firstNodeToDiscard = Assertions.checkNotNull(lastNodeToKeep.next); clearAllocationNodes(firstNodeToDiscard); // Reset the successor of the last node to be an uninitialized node. lastNodeToKeep.next = new AllocationNode(lastNodeToKeep.endPosition, allocationLength); @@ -112,39 +118,29 @@ public void rewind() { } /** - * Reads data from the rolling buffer to populate a decoder input buffer. + * Reads data from the rolling buffer to populate a decoder input buffer, and advances the read + * position. * * @param buffer The buffer to populate. * @param extrasHolder The extras holder whose offset should be read and subsequently adjusted. + * @throws InsufficientCapacityException If the {@code buffer} has insufficient capacity to hold + * the data being read. */ public void readToBuffer(DecoderInputBuffer buffer, SampleExtrasHolder extrasHolder) { - // Read encryption data if the sample is encrypted. - if (buffer.isEncrypted()) { - readEncryptionData(buffer, extrasHolder); - } - // Read sample data, extracting supplemental data into a separate buffer if needed. - if (buffer.hasSupplementalData()) { - // If there is supplemental data, the sample data is prefixed by its size. - scratch.reset(4); - readData(extrasHolder.offset, scratch.data, 4); - int sampleSize = scratch.readUnsignedIntToInt(); - extrasHolder.offset += 4; - extrasHolder.size -= 4; - - // Write the sample data. - buffer.ensureSpaceForWrite(sampleSize); - readData(extrasHolder.offset, buffer.data, sampleSize); - extrasHolder.offset += sampleSize; - extrasHolder.size -= sampleSize; + readAllocationNode = readSampleData(readAllocationNode, buffer, extrasHolder, scratch); + } - // Write the remaining data as supplemental data. - buffer.resetSupplementalData(extrasHolder.size); - readData(extrasHolder.offset, buffer.supplementalData, extrasHolder.size); - } else { - // Write the sample data. - buffer.ensureSpaceForWrite(extrasHolder.size); - readData(extrasHolder.offset, buffer.data, extrasHolder.size); - } + /** + * Peeks data from the rolling buffer to populate a decoder input buffer, without advancing the + * read position. + * + * @param buffer The buffer to populate. + * @param extrasHolder The extras holder whose offset should be read and subsequently adjusted. + * @throws InsufficientCapacityException If the {@code buffer} has insufficient capacity to hold + * the data being peeked. + */ + public void peekToBuffer(DecoderInputBuffer buffer, SampleExtrasHolder extrasHolder) { + readSampleData(readAllocationNode, buffer, extrasHolder, scratch); } /** @@ -176,8 +172,7 @@ public long getTotalBytesWritten() { return totalBytesWritten; } - public int sampleData(ExtractorInput input, int length, boolean allowEndOfInput) - throws IOException, InterruptedException { + public int sampleData(DataReader input, int length, boolean allowEndOfInput) throws IOException { length = preAppend(length); int bytesAppended = input.read( @@ -209,23 +204,123 @@ public void sampleData(ParsableByteArray buffer, int length) { // Private methods. /** - * Reads encryption data for the current sample. + * Clears allocation nodes starting from {@code fromNode}. + * + * @param fromNode The node from which to clear. + */ + private void clearAllocationNodes(AllocationNode fromNode) { + if (fromNode.allocation == null) { + return; + } + // Bulk release allocations for performance (it's significantly faster when using + // DefaultAllocator because the allocator's lock only needs to be acquired and released once) + // [Internal: See b/29542039]. + allocator.release(fromNode); + fromNode.clear(); + } + + /** + * Called before writing sample data to {@link #writeAllocationNode}. May cause {@link + * #writeAllocationNode} to be initialized. + * + * @param length The number of bytes that the caller wishes to write. + * @return The number of bytes that the caller is permitted to write, which may be less than + * {@code length}. + */ + private int preAppend(int length) { + if (writeAllocationNode.allocation == null) { + writeAllocationNode.initialize( + allocator.allocate(), + new AllocationNode(writeAllocationNode.endPosition, allocationLength)); + } + return min(length, (int) (writeAllocationNode.endPosition - totalBytesWritten)); + } + + /** + * Called after writing sample data. May cause {@link #writeAllocationNode} to be advanced. + * + * @param length The number of bytes that were written. + */ + private void postAppend(int length) { + totalBytesWritten += length; + if (totalBytesWritten == writeAllocationNode.endPosition) { + writeAllocationNode = writeAllocationNode.next; + } + } + + /** + * Reads data from the rolling buffer to populate a decoder input buffer. + * + * @param allocationNode The first {@link AllocationNode} containing data yet to be read. + * @param buffer The buffer to populate. + * @param extrasHolder The extras holder whose offset should be read and subsequently adjusted. + * @param scratch A scratch {@link ParsableByteArray}. + * @return The first {@link AllocationNode} that contains unread bytes after the last byte that + * the invocation read. + * @throws InsufficientCapacityException If the {@code buffer} has insufficient capacity to hold + * the sample data. + */ + private static AllocationNode readSampleData( + AllocationNode allocationNode, + DecoderInputBuffer buffer, + SampleExtrasHolder extrasHolder, + ParsableByteArray scratch) { + if (buffer.isEncrypted()) { + allocationNode = readEncryptionData(allocationNode, buffer, extrasHolder, scratch); + } + // Read sample data, extracting supplemental data into a separate buffer if needed. + if (buffer.hasSupplementalData()) { + // If there is supplemental data, the sample data is prefixed by its size. + scratch.reset(4); + allocationNode = readData(allocationNode, extrasHolder.offset, scratch.getData(), 4); + int sampleSize = scratch.readUnsignedIntToInt(); + extrasHolder.offset += 4; + extrasHolder.size -= 4; + + // Write the sample data. + buffer.ensureSpaceForWrite(sampleSize); + allocationNode = readData(allocationNode, extrasHolder.offset, buffer.data, sampleSize); + extrasHolder.offset += sampleSize; + extrasHolder.size -= sampleSize; + + // Write the remaining data as supplemental data. + buffer.resetSupplementalData(extrasHolder.size); + allocationNode = + readData(allocationNode, extrasHolder.offset, buffer.supplementalData, extrasHolder.size); + } else { + // Write the sample data. + buffer.ensureSpaceForWrite(extrasHolder.size); + allocationNode = + readData(allocationNode, extrasHolder.offset, buffer.data, extrasHolder.size); + } + return allocationNode; + } + + /** + * Reads encryption data for the sample described by {@code extrasHolder}. * *

      The encryption data is written into {@link DecoderInputBuffer#cryptoInfo}, and {@link * SampleExtrasHolder#size} is adjusted to subtract the number of bytes that were read. The same * value is added to {@link SampleExtrasHolder#offset}. * + * @param allocationNode The first {@link AllocationNode} containing data yet to be read. * @param buffer The buffer into which the encryption data should be written. * @param extrasHolder The extras holder whose offset should be read and subsequently adjusted. + * @param scratch A scratch {@link ParsableByteArray}. + * @return The first {@link AllocationNode} that contains unread bytes after this method returns. */ - private void readEncryptionData(DecoderInputBuffer buffer, SampleExtrasHolder extrasHolder) { + private static AllocationNode readEncryptionData( + AllocationNode allocationNode, + DecoderInputBuffer buffer, + SampleExtrasHolder extrasHolder, + ParsableByteArray scratch) { long offset = extrasHolder.offset; // Read the signal byte. scratch.reset(1); - readData(offset, scratch.data, 1); + allocationNode = readData(allocationNode, offset, scratch.getData(), 1); offset++; - byte signalByte = scratch.data[0]; + byte signalByte = scratch.getData()[0]; boolean subsampleEncryption = (signalByte & 0x80) != 0; int ivSize = signalByte & 0x7F; @@ -237,14 +332,14 @@ private void readEncryptionData(DecoderInputBuffer buffer, SampleExtrasHolder ex // Zero out cryptoInfo.iv so that if ivSize < 16, the remaining bytes are correctly set to 0. Arrays.fill(cryptoInfo.iv, (byte) 0); } - readData(offset, cryptoInfo.iv, ivSize); + allocationNode = readData(allocationNode, offset, cryptoInfo.iv, ivSize); offset += ivSize; // Read the subsample count, if present. int subsampleCount; if (subsampleEncryption) { scratch.reset(2); - readData(offset, scratch.data, 2); + allocationNode = readData(allocationNode, offset, scratch.getData(), 2); offset += 2; subsampleCount = scratch.readUnsignedShort(); } else { @@ -263,7 +358,7 @@ private void readEncryptionData(DecoderInputBuffer buffer, SampleExtrasHolder ex if (subsampleEncryption) { int subsampleDataLength = 6 * subsampleCount; scratch.reset(subsampleDataLength); - readData(offset, scratch.data, subsampleDataLength); + allocationNode = readData(allocationNode, offset, scratch.getData(), subsampleDataLength); offset += subsampleDataLength; scratch.setPosition(0); for (int i = 0; i < subsampleCount; i++) { @@ -276,7 +371,7 @@ private void readEncryptionData(DecoderInputBuffer buffer, SampleExtrasHolder ex } // Populate the cryptoInfo. - CryptoData cryptoData = extrasHolder.cryptoData; + CryptoData cryptoData = Util.castNonNull(extrasHolder.cryptoData); cryptoInfo.set( subsampleCount, clearDataSizes, @@ -291,136 +386,92 @@ private void readEncryptionData(DecoderInputBuffer buffer, SampleExtrasHolder ex int bytesRead = (int) (offset - extrasHolder.offset); extrasHolder.offset += bytesRead; extrasHolder.size -= bytesRead; + return allocationNode; } /** - * Reads data from the front of the rolling buffer. + * Reads data from {@code allocationNode} and its following nodes. * + * @param allocationNode The first {@link AllocationNode} containing data yet to be read. * @param absolutePosition The absolute position from which data should be read. * @param target The buffer into which data should be written. * @param length The number of bytes to read. + * @return The first {@link AllocationNode} that contains unread bytes after this method returns. */ - private void readData(long absolutePosition, ByteBuffer target, int length) { - advanceReadTo(absolutePosition); + private static AllocationNode readData( + AllocationNode allocationNode, long absolutePosition, ByteBuffer target, int length) { + allocationNode = getNodeContainingPosition(allocationNode, absolutePosition); int remaining = length; while (remaining > 0) { - int toCopy = Math.min(remaining, (int) (readAllocationNode.endPosition - absolutePosition)); - Allocation allocation = readAllocationNode.allocation; - target.put(allocation.data, readAllocationNode.translateOffset(absolutePosition), toCopy); + int toCopy = min(remaining, (int) (allocationNode.endPosition - absolutePosition)); + Allocation allocation = allocationNode.allocation; + target.put(allocation.data, allocationNode.translateOffset(absolutePosition), toCopy); remaining -= toCopy; absolutePosition += toCopy; - if (absolutePosition == readAllocationNode.endPosition) { - readAllocationNode = readAllocationNode.next; + if (absolutePosition == allocationNode.endPosition) { + allocationNode = allocationNode.next; } } + return allocationNode; } /** - * Reads data from the front of the rolling buffer. + * Reads data from {@code allocationNode} and its following nodes. * + * @param allocationNode The first {@link AllocationNode} containing data yet to be read. * @param absolutePosition The absolute position from which data should be read. * @param target The array into which data should be written. * @param length The number of bytes to read. + * @return The first {@link AllocationNode} that contains unread bytes after this method returns. */ - private void readData(long absolutePosition, byte[] target, int length) { - advanceReadTo(absolutePosition); + private static AllocationNode readData( + AllocationNode allocationNode, long absolutePosition, byte[] target, int length) { + allocationNode = getNodeContainingPosition(allocationNode, absolutePosition); int remaining = length; while (remaining > 0) { - int toCopy = Math.min(remaining, (int) (readAllocationNode.endPosition - absolutePosition)); - Allocation allocation = readAllocationNode.allocation; + int toCopy = min(remaining, (int) (allocationNode.endPosition - absolutePosition)); + Allocation allocation = allocationNode.allocation; System.arraycopy( allocation.data, - readAllocationNode.translateOffset(absolutePosition), + allocationNode.translateOffset(absolutePosition), target, length - remaining, toCopy); remaining -= toCopy; absolutePosition += toCopy; - if (absolutePosition == readAllocationNode.endPosition) { - readAllocationNode = readAllocationNode.next; + if (absolutePosition == allocationNode.endPosition) { + allocationNode = allocationNode.next; } } + return allocationNode; } /** - * Advances the read position to the specified absolute position. - * - * @param absolutePosition The position to which {@link #readAllocationNode} should be advanced. - */ - private void advanceReadTo(long absolutePosition) { - while (absolutePosition >= readAllocationNode.endPosition) { - readAllocationNode = readAllocationNode.next; - } - } - - /** - * Clears allocation nodes starting from {@code fromNode}. - * - * @param fromNode The node from which to clear. - */ - private void clearAllocationNodes(AllocationNode fromNode) { - if (!fromNode.wasInitialized) { - return; - } - // Bulk release allocations for performance (it's significantly faster when using - // DefaultAllocator because the allocator's lock only needs to be acquired and released once) - // [Internal: See b/29542039]. - int allocationCount = - (writeAllocationNode.wasInitialized ? 1 : 0) - + ((int) (writeAllocationNode.startPosition - fromNode.startPosition) - / allocationLength); - Allocation[] allocationsToRelease = new Allocation[allocationCount]; - AllocationNode currentNode = fromNode; - for (int i = 0; i < allocationsToRelease.length; i++) { - allocationsToRelease[i] = currentNode.allocation; - currentNode = currentNode.clear(); - } - allocator.release(allocationsToRelease); - } - - /** - * Called before writing sample data to {@link #writeAllocationNode}. May cause {@link - * #writeAllocationNode} to be initialized. - * - * @param length The number of bytes that the caller wishes to write. - * @return The number of bytes that the caller is permitted to write, which may be less than - * {@code length}. - */ - private int preAppend(int length) { - if (!writeAllocationNode.wasInitialized) { - writeAllocationNode.initialize( - allocator.allocate(), - new AllocationNode(writeAllocationNode.endPosition, allocationLength)); - } - return Math.min(length, (int) (writeAllocationNode.endPosition - totalBytesWritten)); - } - - /** - * Called after writing sample data. May cause {@link #writeAllocationNode} to be advanced. - * - * @param length The number of bytes that were written. + * Returns the {@link AllocationNode} in {@code allocationNode}'s chain which contains the given + * {@code absolutePosition}. */ - private void postAppend(int length) { - totalBytesWritten += length; - if (totalBytesWritten == writeAllocationNode.endPosition) { - writeAllocationNode = writeAllocationNode.next; + private static AllocationNode getNodeContainingPosition( + AllocationNode allocationNode, long absolutePosition) { + while (absolutePosition >= allocationNode.endPosition) { + allocationNode = allocationNode.next; } + return allocationNode; } /** A node in a linked list of {@link Allocation}s held by the output. */ - private static final class AllocationNode { + private static final class AllocationNode implements Allocator.AllocationNode { /** The absolute position of the start of the data (inclusive). */ - public final long startPosition; + public long startPosition; /** The absolute position of the end of the data (exclusive). */ - public final long endPosition; - /** Whether the node has been initialized. Remains true after {@link #clear()}. */ - public boolean wasInitialized; - /** The {@link Allocation}, or {@code null} if the node is not initialized. */ + public long endPosition; + /** + * The {@link Allocation}, or {@code null} if the node is not {@link #initialize initialized}. + */ @Nullable public Allocation allocation; /** - * The next {@link AllocationNode} in the list, or {@code null} if the node has not been - * initialized. Remains set after {@link #clear()}. + * The next {@link AllocationNode} in the list, or {@code null} if the node is not {@link + * #initialize initialized}. */ @Nullable public AllocationNode next; @@ -430,6 +481,17 @@ private static final class AllocationNode { * initialized. */ public AllocationNode(long startPosition, int allocationLength) { + reset(startPosition, allocationLength); + } + + /** + * Sets the {@link #startPosition} and the {@link Allocation} length. + * + *

      Must only be called for uninitialized instances, where {@link #allocation} is {@code + * null}. + */ + public void reset(long startPosition, int allocationLength) { + Assertions.checkState(allocation == null); this.startPosition = startPosition; this.endPosition = startPosition + allocationLength; } @@ -443,7 +505,6 @@ public AllocationNode(long startPosition, int allocationLength) { public void initialize(Allocation allocation, AllocationNode next) { this.allocation = allocation; this.next = next; - wasInitialized = true; } /** @@ -468,5 +529,22 @@ public AllocationNode clear() { next = null; return temp; } + + // AllocationChainNode implementation. + + @Override + public Allocation getAllocation() { + return Assertions.checkNotNull(allocation); + } + + @Override + @Nullable + public Allocator.AllocationNode next() { + if (next == null || next.allocation == null) { + return null; + } else { + return next; + } + } } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/SampleQueue.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/SampleQueue.java index c63b755f4a..c6112c67f8 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/SampleQueue.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/SampleQueue.java @@ -15,25 +15,41 @@ */ package com.google.android.exoplayer2.source; +import static com.google.android.exoplayer2.source.SampleStream.FLAG_OMIT_SAMPLE_DATA; +import static com.google.android.exoplayer2.source.SampleStream.FLAG_PEEK; +import static com.google.android.exoplayer2.source.SampleStream.FLAG_REQUIRE_FORMAT; +import static com.google.android.exoplayer2.util.Assertions.checkArgument; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static java.lang.Math.max; + import android.os.Looper; import androidx.annotation.CallSuper; +import androidx.annotation.GuardedBy; import androidx.annotation.Nullable; import androidx.annotation.VisibleForTesting; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.FormatHolder; +import com.google.android.exoplayer2.analytics.PlayerId; import com.google.android.exoplayer2.decoder.DecoderInputBuffer; +import com.google.android.exoplayer2.decoder.DecoderInputBuffer.InsufficientCapacityException; import com.google.android.exoplayer2.drm.DrmInitData; import com.google.android.exoplayer2.drm.DrmSession; +import com.google.android.exoplayer2.drm.DrmSessionEventListener; +import com.google.android.exoplayer2.drm.DrmSessionEventListener.EventDispatcher; import com.google.android.exoplayer2.drm.DrmSessionManager; -import com.google.android.exoplayer2.extractor.ExtractorInput; +import com.google.android.exoplayer2.drm.DrmSessionManager.DrmSessionReference; import com.google.android.exoplayer2.extractor.TrackOutput; +import com.google.android.exoplayer2.source.SampleStream.ReadFlags; import com.google.android.exoplayer2.upstream.Allocator; +import com.google.android.exoplayer2.upstream.DataReader; import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.ParsableByteArray; import com.google.android.exoplayer2.util.Util; import java.io.IOException; +import org.checkerframework.checker.nullness.compatqual.NullableType; /** A queue of media samples. */ public class SampleQueue implements TrackOutput { @@ -50,15 +66,17 @@ public interface UpstreamFormatChangedListener { } @VisibleForTesting /* package */ static final int SAMPLE_CAPACITY_INCREMENT = 1000; + private static final String TAG = "SampleQueue"; private final SampleDataQueue sampleDataQueue; private final SampleExtrasHolder extrasHolder; - private final DrmSessionManager drmSessionManager; - private UpstreamFormatChangedListener upstreamFormatChangeListener; - private final Looper playbackLooper; + private final SpannedData sharedSampleMetadata; + @Nullable private final DrmSessionManager drmSessionManager; + @Nullable private final DrmSessionEventListener.EventDispatcher drmEventDispatcher; + @Nullable private UpstreamFormatChangedListener upstreamFormatChangeListener; @Nullable private Format downstreamFormat; - @Nullable private DrmSession currentDrmSession; + @Nullable private DrmSession currentDrmSession; private int capacity; private int[] sourceIds; @@ -66,40 +84,86 @@ public interface UpstreamFormatChangedListener { private int[] sizes; private int[] flags; private long[] timesUs; - private CryptoData[] cryptoDatas; - private Format[] formats; + private @NullableType CryptoData[] cryptoDatas; private int length; private int absoluteFirstIndex; private int relativeFirstIndex; private int readPosition; + private long startTimeUs; private long largestDiscardedTimestampUs; private long largestQueuedTimestampUs; private boolean isLastSampleQueued; private boolean upstreamKeyframeRequired; private boolean upstreamFormatRequired; - private Format upstreamFormat; - private Format upstreamCommittedFormat; + private boolean upstreamFormatAdjustmentRequired; + @Nullable private Format unadjustedUpstreamFormat; + @Nullable private Format upstreamFormat; private int upstreamSourceId; + private boolean upstreamAllSamplesAreSyncSamples; + private boolean loggedUnexpectedNonSyncSample; - private boolean pendingUpstreamFormatAdjustment; - private Format unadjustedUpstreamFormat; private long sampleOffsetUs; private boolean pendingSplice; /** - * Creates a sample queue. + * Creates a sample queue without DRM resource management. + * + * @param allocator An {@link Allocator} from which allocations for sample data can be obtained. + */ + public static SampleQueue createWithoutDrm(Allocator allocator) { + return new SampleQueue( + allocator, /* drmSessionManager= */ null, /* drmEventDispatcher= */ null); + } + + /** + * Creates a sample queue with DRM resource management. + * + *

      For each sample added to the queue, a {@link DrmSession} will be attached containing the + * keys needed to decrypt it. * * @param allocator An {@link Allocator} from which allocations for sample data can be obtained. - * @param playbackLooper The looper associated with the media playback thread. * @param drmSessionManager The {@link DrmSessionManager} to obtain {@link DrmSession DrmSessions} * from. The created instance does not take ownership of this {@link DrmSessionManager}. + * @param drmEventDispatcher A {@link DrmSessionEventListener.EventDispatcher} to notify of events + * related to this SampleQueue. */ - public SampleQueue(Allocator allocator, Looper playbackLooper, DrmSessionManager drmSessionManager) { - sampleDataQueue = new SampleDataQueue(allocator); - this.playbackLooper = playbackLooper; + public static SampleQueue createWithDrm( + Allocator allocator, + DrmSessionManager drmSessionManager, + DrmSessionEventListener.EventDispatcher drmEventDispatcher) { + return new SampleQueue( + allocator, + Assertions.checkNotNull(drmSessionManager), + Assertions.checkNotNull(drmEventDispatcher)); + } + + /** + * @deprecated Use {@link #createWithDrm(Allocator, DrmSessionManager, EventDispatcher)} instead. + * The {@code playbackLooper} should be configured on the {@link DrmSessionManager} with + * {@link DrmSessionManager#setPlayer(Looper, PlayerId)}. + */ + @Deprecated + public static SampleQueue createWithDrm( + Allocator allocator, + Looper playbackLooper, + DrmSessionManager drmSessionManager, + DrmSessionEventListener.EventDispatcher drmEventDispatcher) { + drmSessionManager.setPlayer(playbackLooper, PlayerId.UNSET); + return new SampleQueue( + allocator, + Assertions.checkNotNull(drmSessionManager), + Assertions.checkNotNull(drmEventDispatcher)); + } + + protected SampleQueue( + Allocator allocator, + @Nullable DrmSessionManager drmSessionManager, + @Nullable DrmSessionEventListener.EventDispatcher drmEventDispatcher) { this.drmSessionManager = drmSessionManager; + this.drmEventDispatcher = drmEventDispatcher; + sampleDataQueue = new SampleDataQueue(allocator); extrasHolder = new SampleExtrasHolder(); capacity = SAMPLE_CAPACITY_INCREMENT; sourceIds = new int[capacity]; @@ -108,7 +172,9 @@ public SampleQueue(Allocator allocator, Looper playbackLooper, DrmSessionManager flags = new int[capacity]; sizes = new int[capacity]; cryptoDatas = new CryptoData[capacity]; - formats = new Format[capacity]; + sharedSampleMetadata = + new SpannedData<>(/* removeCallback= */ metadata -> metadata.drmSessionReference.release()); + startTimeUs = Long.MIN_VALUE; largestDiscardedTimestampUs = Long.MIN_VALUE; largestQueuedTimestampUs = Long.MIN_VALUE; upstreamFormatRequired = true; @@ -145,10 +211,11 @@ public void reset(boolean resetUpstreamFormat) { relativeFirstIndex = 0; readPosition = 0; upstreamKeyframeRequired = true; + startTimeUs = Long.MIN_VALUE; largestDiscardedTimestampUs = Long.MIN_VALUE; largestQueuedTimestampUs = Long.MIN_VALUE; isLastSampleQueued = false; - upstreamCommittedFormat = null; + sharedSampleMetadata.clear(); if (resetUpstreamFormat) { unadjustedUpstreamFormat = null; upstreamFormat = null; @@ -156,6 +223,16 @@ public void reset(boolean resetUpstreamFormat) { } } + /** + * Sets the start time for the queue. Samples with earlier timestamps will be discarded or have + * the {@link C#BUFFER_FLAG_DECODE_ONLY} flag set when read. + * + * @param startTimeUs The start time, in microseconds. + */ + public final void setStartTimeUs(long startTimeUs) { + this.startTimeUs = startTimeUs; + } + /** * Sets a source identifier for subsequent samples. * @@ -185,6 +262,22 @@ public final void discardUpstreamSamples(int discardFromIndex) { sampleDataQueue.discardUpstreamSampleBytes(discardUpstreamSampleMetadata(discardFromIndex)); } + /** + * Discards samples from the write side of the queue. + * + * @param timeUs Samples will be discarded from the write end of the queue until a sample with a + * timestamp smaller than timeUs is encountered (this sample is not discarded). Must be larger + * than {@link #getLargestReadTimestampUs()}. + */ + public final void discardUpstreamFrom(long timeUs) { + if (length == 0) { + return; + } + checkArgument(timeUs > getLargestReadTimestampUs()); + int retainCount = countUnreadSamplesBefore(timeUs); + discardUpstreamSamples(absoluteFirstIndex + retainCount); + } + // Called by the consuming thread. /** Calls {@link #discardToEnd()} and releases any resources owned by the queue. */ @@ -229,6 +322,7 @@ public final synchronized int peekSourceId() { } /** Returns the upstream {@link Format} in which samples are being queued. */ + @Nullable public final synchronized Format getUpstreamFormat() { return upstreamFormatRequired ? null : upstreamFormat; } @@ -247,6 +341,16 @@ public final synchronized long getLargestQueuedTimestampUs() { return largestQueuedTimestampUs; } + /** + * Returns the largest sample timestamp that has been read since the last {@link #reset}. + * + * @return The largest sample timestamp that has been read, or {@link Long#MIN_VALUE} if no + * samples have been read. + */ + public final synchronized long getLargestReadTimestampUs() { + return max(largestDiscardedTimestampUs, getLargestTimestamp(readPosition)); + } + /** * Returns whether the last sample of the stream has knowingly been queued. A return value of * {@code false} means that the last sample had not been queued or that it's unknown whether the @@ -284,53 +388,56 @@ public synchronized boolean isReady(boolean loadingFinished) { || isLastSampleQueued || (upstreamFormat != null && upstreamFormat != downstreamFormat); } - int relativeReadIndex = getRelativeIndex(readPosition); - if (formats[relativeReadIndex] != downstreamFormat) { + if (sharedSampleMetadata.get(getReadIndex()).format != downstreamFormat) { // A format can be read. return true; } - return mayReadSample(relativeReadIndex); + return mayReadSample(getRelativeIndex(readPosition)); } /** * Attempts to read from the queue. * *

      {@link Format Formats} read from this method may be associated to a {@link DrmSession} - * through {@link FormatHolder#drmSession}, which is populated in two scenarios: - * - *

        - *
      • The {@link Format} has a non-null {@link Format#drmInitData}. - *
      • The {@link DrmSessionManager} provides placeholder sessions for this queue's track type. - * See {@link DrmSessionManager#acquirePlaceholderSession(Looper, int)}. - *
      + * through {@link FormatHolder#drmSession}. * * @param formatHolder A {@link FormatHolder} to populate in the case of reading a format. * @param buffer A {@link DecoderInputBuffer} to populate in the case of reading a sample or the * end of the stream. If the end of the stream has been reached, the {@link - * C#BUFFER_FLAG_END_OF_STREAM} flag will be set on the buffer. If a {@link - * DecoderInputBuffer#isFlagsOnly() flags-only} buffer is passed, only the buffer flags may be - * populated by this method and the read position of the queue will not change. - * @param formatRequired Whether the caller requires that the format of the stream be read even if - * it's not changing. A sample will never be read if set to true, however it is still possible - * for the end of stream or nothing to be read. + * C#BUFFER_FLAG_END_OF_STREAM} flag will be set on the buffer. + * @param readFlags Flags controlling the behavior of this read operation. * @param loadingFinished True if an empty queue should be considered the end of the stream. - * @param decodeOnlyUntilUs If a buffer is read, the {@link C#BUFFER_FLAG_DECODE_ONLY} flag will - * be set if the buffer's timestamp is less than this value. * @return The result, which can be {@link C#RESULT_NOTHING_READ}, {@link C#RESULT_FORMAT_READ} or * {@link C#RESULT_BUFFER_READ}. + * @throws InsufficientCapacityException If the {@code buffer} has insufficient capacity to hold + * the data of a sample being read. The buffer {@link DecoderInputBuffer#timeUs timestamp} and + * flags are populated if this exception is thrown, but the read position is not advanced. */ @CallSuper public int read( FormatHolder formatHolder, DecoderInputBuffer buffer, - boolean formatRequired, - boolean loadingFinished, - long decodeOnlyUntilUs) { + @ReadFlags int readFlags, + boolean loadingFinished) { int result = - readSampleMetadata( - formatHolder, buffer, formatRequired, loadingFinished, decodeOnlyUntilUs, extrasHolder); - if (result == C.RESULT_BUFFER_READ && !buffer.isEndOfStream() && !buffer.isFlagsOnly()) { - sampleDataQueue.readToBuffer(buffer, extrasHolder); + peekSampleMetadata( + formatHolder, + buffer, + /* formatRequired= */ (readFlags & FLAG_REQUIRE_FORMAT) != 0, + loadingFinished, + extrasHolder); + if (result == C.RESULT_BUFFER_READ && !buffer.isEndOfStream()) { + boolean peek = (readFlags & FLAG_PEEK) != 0; + if ((readFlags & FLAG_OMIT_SAMPLE_DATA) == 0) { + if (peek) { + sampleDataQueue.peekToBuffer(buffer, extrasHolder); + } else { + sampleDataQueue.readToBuffer(buffer, extrasHolder); + } + } + if (!peek) { + readPosition++; + } } return result; } @@ -346,6 +453,7 @@ public final synchronized boolean seekTo(int sampleIndex) { if (sampleIndex < absoluteFirstIndex || sampleIndex > absoluteFirstIndex + length) { return false; } + startTimeUs = Long.MIN_VALUE; readPosition = sampleIndex - absoluteFirstIndex; return true; } @@ -371,39 +479,45 @@ public final synchronized boolean seekTo(long timeUs, boolean allowTimeBeyondBuf if (offset == -1) { return false; } + startTimeUs = timeUs; readPosition += offset; return true; } /** - * Advances the read position to the keyframe before or at the specified time. + * Returns the number of samples that need to be {@link #skip(int) skipped} to advance the read + * position to the keyframe before or at the specified time. * * @param timeUs The time to advance to. - * @return The number of samples that were skipped, which may be equal to 0. + * @param allowEndOfQueue Whether the end of the queue is considered a keyframe when {@code + * timeUs} is larger than the largest queued timestamp. + * @return The number of samples that need to be skipped, which may be equal to 0. */ - public final synchronized int advanceTo(long timeUs) { + public final synchronized int getSkipCount(long timeUs, boolean allowEndOfQueue) { int relativeReadIndex = getRelativeIndex(readPosition); if (!hasNextSample() || timeUs < timesUs[relativeReadIndex]) { return 0; } + if (timeUs > largestQueuedTimestampUs && allowEndOfQueue) { + return length - readPosition; + } int offset = findSampleBefore(relativeReadIndex, length - readPosition, timeUs, /* keyframe= */ true); if (offset == -1) { return 0; } - readPosition += offset; return offset; } /** - * Advances the read position to the end of the queue. + * Advances the read position by the specified number of samples. * - * @return The number of samples that were skipped. + * @param count The number of samples to advance the read position by. Must be at least 0 and at + * most {@link #getWriteIndex()} - {@link #getReadIndex()}. */ - public final synchronized int advanceToEnd() { - int skipCount = length - readPosition; - readPosition = length; - return skipCount; + public final synchronized void skip(int count) { + checkArgument(count >= 0 && readPosition + count <= length); + readPosition += count; } /** @@ -451,17 +565,18 @@ public final void setSampleOffsetUs(long sampleOffsetUs) { * * @param listener The listener. */ - public final void setUpstreamFormatChangeListener(UpstreamFormatChangedListener listener) { + public final void setUpstreamFormatChangeListener( + @Nullable UpstreamFormatChangedListener listener) { upstreamFormatChangeListener = listener; } // TrackOutput implementation. Called by the loading thread. @Override - public final void format(Format unadjustedUpstreamFormat) { - Format adjustedUpstreamFormat = getAdjustedUpstreamFormat(unadjustedUpstreamFormat); - pendingUpstreamFormatAdjustment = false; - this.unadjustedUpstreamFormat = unadjustedUpstreamFormat; + public final void format(Format format) { + Format adjustedUpstreamFormat = getAdjustedUpstreamFormat(format); + upstreamFormatAdjustmentRequired = false; + unadjustedUpstreamFormat = format; boolean upstreamFormatChanged = setUpstreamFormat(adjustedUpstreamFormat); if (upstreamFormatChangeListener != null && upstreamFormatChanged) { upstreamFormatChangeListener.onUpstreamFormatChanged(adjustedUpstreamFormat); @@ -469,33 +584,61 @@ public final void format(Format unadjustedUpstreamFormat) { } @Override - public final int sampleData(ExtractorInput input, int length, boolean allowEndOfInput) - throws IOException, InterruptedException { + public final int sampleData( + DataReader input, int length, boolean allowEndOfInput, @SampleDataPart int sampleDataPart) + throws IOException { return sampleDataQueue.sampleData(input, length, allowEndOfInput); } @Override - public final void sampleData(ParsableByteArray buffer, int length) { - sampleDataQueue.sampleData(buffer, length); + public final void sampleData( + ParsableByteArray data, int length, @SampleDataPart int sampleDataPart) { + sampleDataQueue.sampleData(data, length); } @Override - public final void sampleMetadata( + public void sampleMetadata( long timeUs, @C.BufferFlags int flags, int size, int offset, @Nullable CryptoData cryptoData) { - if (pendingUpstreamFormatAdjustment) { - format(unadjustedUpstreamFormat); + if (upstreamFormatAdjustmentRequired) { + format(Assertions.checkStateNotNull(unadjustedUpstreamFormat)); + } + + boolean isKeyframe = (flags & C.BUFFER_FLAG_KEY_FRAME) != 0; + if (upstreamKeyframeRequired) { + if (!isKeyframe) { + return; + } + upstreamKeyframeRequired = false; } + timeUs += sampleOffsetUs; + if (upstreamAllSamplesAreSyncSamples) { + if (timeUs < startTimeUs) { + // If we know that all samples are sync samples, we can discard those that come before the + // start time on the write side of the queue. + return; + } + if ((flags & C.BUFFER_FLAG_KEY_FRAME) == 0) { + // The flag should always be set unless the source content has incorrect sample metadata. + // Log a warning (once per format change, to avoid log spam) and override the flag. + if (!loggedUnexpectedNonSyncSample) { + Log.w(TAG, "Overriding unexpected non-sync sample for format: " + upstreamFormat); + loggedUnexpectedNonSyncSample = true; + } + flags |= C.BUFFER_FLAG_KEY_FRAME; + } + } if (pendingSplice) { - if ((flags & C.BUFFER_FLAG_KEY_FRAME) == 0 || !attemptSplice(timeUs)) { + if (!isKeyframe || !attemptSplice(timeUs)) { return; } pendingSplice = false; } + long absoluteOffset = sampleDataQueue.getTotalBytesWritten() - size - offset; commitSample(timeUs, flags, absoluteOffset, size, cryptoData); } @@ -505,7 +648,7 @@ public final void sampleMetadata( * will be called to adjust the upstream {@link Format} again before the next sample is queued. */ protected final void invalidateUpstreamFormatAdjustment() { - pendingUpstreamFormatAdjustment = true; + upstreamFormatAdjustmentRequired = true; } /** @@ -521,7 +664,11 @@ protected final void invalidateUpstreamFormatAdjustment() { @CallSuper protected Format getAdjustedUpstreamFormat(Format format) { if (sampleOffsetUs != 0 && format.subsampleOffsetUs != Format.OFFSET_SAMPLE_RELATIVE) { - format = format.copyWithSubsampleOffsetUs(format.subsampleOffsetUs + sampleOffsetUs); + format = + format + .buildUpon() + .setSubsampleOffsetUs(format.subsampleOffsetUs + sampleOffsetUs) + .build(); } return format; } @@ -535,30 +682,14 @@ private synchronized void rewind() { } @SuppressWarnings("ReferenceEquality") // See comments in setUpstreamFormat - private synchronized int readSampleMetadata( + private synchronized int peekSampleMetadata( FormatHolder formatHolder, DecoderInputBuffer buffer, boolean formatRequired, boolean loadingFinished, - long decodeOnlyUntilUs, SampleExtrasHolder extrasHolder) { buffer.waitingForKeys = false; - // This is a temporary fix for https://github.com/google/ExoPlayer/issues/6155. - // TODO: Remove it and replace it with a fix that discards samples when writing to the queue. - boolean hasNextSample; - int relativeReadIndex = C.INDEX_UNSET; - while ((hasNextSample = hasNextSample())) { - relativeReadIndex = getRelativeIndex(readPosition); - long timeUs = timesUs[relativeReadIndex]; - if (timeUs < decodeOnlyUntilUs - && MimeTypes.allSamplesAreSyncSamples(formats[relativeReadIndex].sampleMimeType)) { - readPosition++; - } else { - break; - } - } - - if (!hasNextSample) { + if (!hasNextSample()) { if (loadingFinished || isLastSampleQueued) { buffer.setFlags(C.BUFFER_FLAG_END_OF_STREAM); return C.RESULT_BUFFER_READ; @@ -570,11 +701,13 @@ private synchronized int readSampleMetadata( } } - if (formatRequired || formats[relativeReadIndex] != downstreamFormat) { - onFormatResult(formats[relativeReadIndex], formatHolder); + Format format = sharedSampleMetadata.get(getReadIndex()).format; + if (formatRequired || format != downstreamFormat) { + onFormatResult(format, formatHolder); return C.RESULT_FORMAT_READ; } + int relativeReadIndex = getRelativeIndex(readPosition); if (!mayReadSample(relativeReadIndex)) { buffer.waitingForKeys = true; return C.RESULT_NOTHING_READ; @@ -582,41 +715,38 @@ private synchronized int readSampleMetadata( buffer.setFlags(flags[relativeReadIndex]); buffer.timeUs = timesUs[relativeReadIndex]; - if (buffer.timeUs < decodeOnlyUntilUs) { + if (buffer.timeUs < startTimeUs) { buffer.addFlag(C.BUFFER_FLAG_DECODE_ONLY); } - if (buffer.isFlagsOnly()) { - return C.RESULT_BUFFER_READ; - } extrasHolder.size = sizes[relativeReadIndex]; extrasHolder.offset = offsets[relativeReadIndex]; extrasHolder.cryptoData = cryptoDatas[relativeReadIndex]; - readPosition++; return C.RESULT_BUFFER_READ; } private synchronized boolean setUpstreamFormat(Format format) { - if (format == null) { - upstreamFormatRequired = true; - return false; - } upstreamFormatRequired = false; if (Util.areEqual(format, upstreamFormat)) { // The format is unchanged. If format and upstreamFormat are different objects, we keep the // current upstreamFormat so we can detect format changes on the read side using cheap // referential quality. return false; - } else if (Util.areEqual(format, upstreamCommittedFormat)) { + } + + if (!sharedSampleMetadata.isEmpty() + && sharedSampleMetadata.getEndValue().format.equals(format)) { // The format has changed back to the format of the last committed sample. If they are // different objects, we revert back to using upstreamCommittedFormat as the upstreamFormat // so we can detect format changes on the read side using cheap referential equality. - upstreamFormat = upstreamCommittedFormat; - return true; + upstreamFormat = sharedSampleMetadata.getEndValue().format; } else { upstreamFormat = format; - return true; } + upstreamAllSamplesAreSyncSamples = + MimeTypes.allSamplesAreSyncSamples(upstreamFormat.sampleMimeType, upstreamFormat.codecs); + loggedUnexpectedNonSyncSample = false; + return true; } private synchronized long discardSampleMetadataTo( @@ -648,7 +778,7 @@ private synchronized long discardSampleMetadataToEnd() { private void releaseDrmSessionReferences() { if (currentDrmSession != null) { - currentDrmSession.release(); + currentDrmSession.release(drmEventDispatcher); currentDrmSession = null; // Clear downstream format to avoid violating the assumption that downstreamFormat.drmInitData // != null implies currentSession != null @@ -657,17 +787,20 @@ private void releaseDrmSessionReferences() { } private synchronized void commitSample( - long timeUs, @C.BufferFlags int sampleFlags, long offset, int size, CryptoData cryptoData) { - if (upstreamKeyframeRequired) { - if ((sampleFlags & C.BUFFER_FLAG_KEY_FRAME) == 0) { - return; - } - upstreamKeyframeRequired = false; + long timeUs, + @C.BufferFlags int sampleFlags, + long offset, + int size, + @Nullable CryptoData cryptoData) { + if (length > 0) { + // Ensure sample data doesn't overlap. + int previousSampleRelativeIndex = getRelativeIndex(length - 1); + checkArgument( + offsets[previousSampleRelativeIndex] + sizes[previousSampleRelativeIndex] <= offset); } - Assertions.checkState(!upstreamFormatRequired); isLastSampleQueued = (sampleFlags & C.BUFFER_FLAG_LAST_SAMPLE) != 0; - largestQueuedTimestampUs = Math.max(largestQueuedTimestampUs, timeUs); + largestQueuedTimestampUs = max(largestQueuedTimestampUs, timeUs); int relativeEndIndex = getRelativeIndex(length); timesUs[relativeEndIndex] = timeUs; @@ -675,9 +808,19 @@ private synchronized void commitSample( sizes[relativeEndIndex] = size; flags[relativeEndIndex] = sampleFlags; cryptoDatas[relativeEndIndex] = cryptoData; - formats[relativeEndIndex] = upstreamFormat; sourceIds[relativeEndIndex] = upstreamSourceId; - upstreamCommittedFormat = upstreamFormat; + + if (sharedSampleMetadata.isEmpty() + || !sharedSampleMetadata.getEndValue().format.equals(upstreamFormat)) { + DrmSessionReference drmSessionReference = + drmSessionManager != null + ? drmSessionManager.preacquireSession(drmEventDispatcher, upstreamFormat) + : DrmSessionReference.EMPTY; + + sharedSampleMetadata.appendSpan( + getWriteIndex(), + new SharedSampleMetadata(checkNotNull(upstreamFormat), drmSessionReference)); + } length++; if (length == capacity) { @@ -689,14 +832,12 @@ private synchronized void commitSample( int[] newFlags = new int[newCapacity]; int[] newSizes = new int[newCapacity]; CryptoData[] newCryptoDatas = new CryptoData[newCapacity]; - Format[] newFormats = new Format[newCapacity]; int beforeWrap = capacity - relativeFirstIndex; System.arraycopy(offsets, relativeFirstIndex, newOffsets, 0, beforeWrap); System.arraycopy(timesUs, relativeFirstIndex, newTimesUs, 0, beforeWrap); System.arraycopy(flags, relativeFirstIndex, newFlags, 0, beforeWrap); System.arraycopy(sizes, relativeFirstIndex, newSizes, 0, beforeWrap); System.arraycopy(cryptoDatas, relativeFirstIndex, newCryptoDatas, 0, beforeWrap); - System.arraycopy(formats, relativeFirstIndex, newFormats, 0, beforeWrap); System.arraycopy(sourceIds, relativeFirstIndex, newSourceIds, 0, beforeWrap); int afterWrap = relativeFirstIndex; System.arraycopy(offsets, 0, newOffsets, beforeWrap, afterWrap); @@ -704,14 +845,12 @@ private synchronized void commitSample( System.arraycopy(flags, 0, newFlags, beforeWrap, afterWrap); System.arraycopy(sizes, 0, newSizes, beforeWrap, afterWrap); System.arraycopy(cryptoDatas, 0, newCryptoDatas, beforeWrap, afterWrap); - System.arraycopy(formats, 0, newFormats, beforeWrap, afterWrap); System.arraycopy(sourceIds, 0, newSourceIds, beforeWrap, afterWrap); offsets = newOffsets; timesUs = newTimesUs; flags = newFlags; sizes = newSizes; cryptoDatas = newCryptoDatas; - formats = newFormats; sourceIds = newSourceIds; relativeFirstIndex = 0; capacity = newCapacity; @@ -729,30 +868,21 @@ private synchronized boolean attemptSplice(long timeUs) { if (length == 0) { return timeUs > largestDiscardedTimestampUs; } - long largestReadTimestampUs = - Math.max(largestDiscardedTimestampUs, getLargestTimestamp(readPosition)); - if (largestReadTimestampUs >= timeUs) { + if (getLargestReadTimestampUs() >= timeUs) { return false; } - int retainCount = length; - int relativeSampleIndex = getRelativeIndex(length - 1); - while (retainCount > readPosition && timesUs[relativeSampleIndex] >= timeUs) { - retainCount--; - relativeSampleIndex--; - if (relativeSampleIndex == -1) { - relativeSampleIndex = capacity - 1; - } - } + int retainCount = countUnreadSamplesBefore(timeUs); discardUpstreamSampleMetadata(absoluteFirstIndex + retainCount); return true; } private long discardUpstreamSampleMetadata(int discardFromIndex) { int discardCount = getWriteIndex() - discardFromIndex; - Assertions.checkArgument(0 <= discardCount && discardCount <= (length - readPosition)); + checkArgument(0 <= discardCount && discardCount <= (length - readPosition)); length -= discardCount; - largestQueuedTimestampUs = Math.max(largestDiscardedTimestampUs, getLargestTimestamp(length)); + largestQueuedTimestampUs = max(largestDiscardedTimestampUs, getLargestTimestamp(length)); isLastSampleQueued = discardCount == 0 && isLastSampleQueued; + sharedSampleMetadata.discardFrom(discardFromIndex); if (length != 0) { int relativeLastWriteIndex = getRelativeIndex(length - 1); return offsets[relativeLastWriteIndex] + sizes[relativeLastWriteIndex]; @@ -772,20 +902,20 @@ private boolean hasNextSample() { * @param outputFormatHolder The output {@link FormatHolder}. */ private void onFormatResult(Format newFormat, FormatHolder outputFormatHolder) { - outputFormatHolder.format = newFormat; boolean isFirstFormat = downstreamFormat == null; - DrmInitData oldDrmInitData = isFirstFormat ? null : downstreamFormat.drmInitData; + @Nullable DrmInitData oldDrmInitData = isFirstFormat ? null : downstreamFormat.drmInitData; downstreamFormat = newFormat; - if (drmSessionManager == DrmSessionManager.DUMMY) { - // Avoid attempting to acquire a session using the dummy DRM session manager. It's likely that - // the media source creation has not yet been migrated and the renderer can acquire the - // session for the read DRM init data. - // TODO: Remove once renderers are migrated [Internal ref: b/122519809]. + @Nullable DrmInitData newDrmInitData = newFormat.drmInitData; + + outputFormatHolder.format = + drmSessionManager != null + ? newFormat.copyWithCryptoType(drmSessionManager.getCryptoType(newFormat)) + : newFormat; + outputFormatHolder.drmSession = currentDrmSession; + if (drmSessionManager == null) { + // This sample queue is not expected to handle DRM. Nothing to do. return; } - DrmInitData newDrmInitData = newFormat.drmInitData; - outputFormatHolder.includesDrmSession = true; - outputFormatHolder.drmSession = currentDrmSession; if (!isFirstFormat && Util.areEqual(oldDrmInitData, newDrmInitData)) { // Nothing to do. return; @@ -793,15 +923,11 @@ private void onFormatResult(Format newFormat, FormatHolder outputFormatHolder) { // Ensure we acquire the new session before releasing the previous one in case the same session // is being used for both DrmInitData. @Nullable DrmSession previousSession = currentDrmSession; - currentDrmSession = - newDrmInitData != null - ? drmSessionManager.acquireSession(playbackLooper, newDrmInitData) - : drmSessionManager.acquirePlaceholderSession( - playbackLooper, MimeTypes.getTrackType(newFormat.sampleMimeType)); + currentDrmSession = drmSessionManager.acquireSession(drmEventDispatcher, newFormat); outputFormatHolder.drmSession = currentDrmSession; if (previousSession != null) { - previousSession.release(); + previousSession.release(drmEventDispatcher); } } @@ -812,12 +938,6 @@ private void onFormatResult(Format newFormat, FormatHolder outputFormatHolder) { * @return Whether it's possible to read the next sample. */ private boolean mayReadSample(int relativeReadIndex) { - if (drmSessionManager == DrmSessionManager.DUMMY) { - // TODO: Remove once renderers are migrated [Internal ref: b/122519809]. - // For protected content it's likely that the DrmSessionManager is still being injected into - // the renderers. We assume that the renderers will be able to acquire a DrmSession if needed. - return true; - } return currentDrmSession == null || currentDrmSession.getState() == DrmSession.STATE_OPENED_WITH_KEYS || ((flags[relativeReadIndex] & C.BUFFER_FLAG_ENCRYPTED) == 0 @@ -845,6 +965,11 @@ private int findSampleBefore(int relativeStartIndex, int length, long timeUs, bo if (!keyframe || (flags[searchIndex] & C.BUFFER_FLAG_KEY_FRAME) != 0) { // We've found a suitable sample. sampleCountToTarget = i; + if (timesUs[searchIndex] == timeUs) { + // Stop the search if we found a sample at the specified time to avoid returning a later + // sample with the same exactly matching timestamp. + break; + } } searchIndex++; if (searchIndex == capacity) { @@ -854,15 +979,36 @@ private int findSampleBefore(int relativeStartIndex, int length, long timeUs, bo return sampleCountToTarget; } + /** + * Counts the number of samples that haven't been read that have a timestamp smaller than {@code + * timeUs}. + * + * @param timeUs The specified time. + * @return The number of unread samples with a timestamp smaller than {@code timeUs}. + */ + private int countUnreadSamplesBefore(long timeUs) { + int count = length; + int relativeSampleIndex = getRelativeIndex(length - 1); + while (count > readPosition && timesUs[relativeSampleIndex] >= timeUs) { + count--; + relativeSampleIndex--; + if (relativeSampleIndex == -1) { + relativeSampleIndex = capacity - 1; + } + } + return count; + } + /** * Discards the specified number of samples. * * @param discardCount The number of samples to discard. * @return The corresponding offset up to which data should be discarded. */ + @GuardedBy("this") private long discardSamples(int discardCount) { largestDiscardedTimestampUs = - Math.max(largestDiscardedTimestampUs, getLargestTimestamp(discardCount)); + max(largestDiscardedTimestampUs, getLargestTimestamp(discardCount)); length -= discardCount; absoluteFirstIndex += discardCount; relativeFirstIndex += discardCount; @@ -873,6 +1019,8 @@ private long discardSamples(int discardCount) { if (readPosition < 0) { readPosition = 0; } + sharedSampleMetadata.discardTo(absoluteFirstIndex); + if (length == 0) { int relativeLastDiscardIndex = (relativeFirstIndex == 0 ? capacity : relativeFirstIndex) - 1; return offsets[relativeLastDiscardIndex] + sizes[relativeLastDiscardIndex]; @@ -896,7 +1044,7 @@ private long getLargestTimestamp(int length) { long largestTimestampUs = Long.MIN_VALUE; int relativeSampleIndex = getRelativeIndex(length - 1); for (int i = 0; i < length; i++) { - largestTimestampUs = Math.max(largestTimestampUs, timesUs[relativeSampleIndex]); + largestTimestampUs = max(largestTimestampUs, timesUs[relativeSampleIndex]); if ((flags[relativeSampleIndex] & C.BUFFER_FLAG_KEY_FRAME) != 0) { break; } @@ -923,6 +1071,17 @@ private int getRelativeIndex(int offset) { public int size; public long offset; - public CryptoData cryptoData; + @Nullable public CryptoData cryptoData; + } + + /** A holder for metadata that applies to a span of contiguous samples. */ + private static final class SharedSampleMetadata { + public final Format format; + public final DrmSessionReference drmSessionReference; + + private SharedSampleMetadata(Format format, DrmSessionReference drmSessionReference) { + this.format = format; + this.drmSessionReference = drmSessionReference; + } } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/SampleStream.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/SampleStream.java index 54293aa4c1..ff40bd1c34 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/SampleStream.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/SampleStream.java @@ -15,22 +15,71 @@ */ package com.google.android.exoplayer2.source; +import static java.lang.annotation.ElementType.TYPE_USE; + +import androidx.annotation.IntDef; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.FormatHolder; import com.google.android.exoplayer2.decoder.DecoderInputBuffer; +import com.google.android.exoplayer2.decoder.DecoderInputBuffer.InsufficientCapacityException; import java.io.IOException; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; -/** - * A stream of media samples (and associated format information). - */ +/** A stream of media samples (and associated format information). */ public interface SampleStream { + /** + * Flags that can be specified when calling {@link #readData}. Possible flag values are {@link + * #FLAG_PEEK}, {@link #FLAG_REQUIRE_FORMAT} and {@link #FLAG_OMIT_SAMPLE_DATA}. + */ + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef( + flag = true, + value = {FLAG_PEEK, FLAG_REQUIRE_FORMAT, FLAG_OMIT_SAMPLE_DATA}) + @interface ReadFlags {} + /** Specifies that the read position should not be advanced if a sample buffer is read. */ + int FLAG_PEEK = 1; + /** + * Specifies that if a sample buffer would normally be read next, the format of the stream should + * be read instead. In detail, the effect of this flag is as follows: + * + *
        + *
      • If a sample buffer would be read were the flag not set, then the stream format will be + * read instead. + *
      • If nothing would be read were the flag not set, then the stream format will be read if + * it's known. If the stream format is not known then behavior is unchanged. + *
      • If an end of stream buffer would be read were the flag not set, then behavior is + * unchanged. + *
      + */ + int FLAG_REQUIRE_FORMAT = 1 << 1; + /** + * Specifies that {@link DecoderInputBuffer#data}, {@link DecoderInputBuffer#supplementalData} and + * {@link DecoderInputBuffer#cryptoInfo} should not be populated when reading a sample buffer. + * + *

      This flag is useful for efficiently reading or (when combined with {@link #FLAG_PEEK}) + * peeking sample metadata. It can also be used for efficiency by a caller wishing to skip a + * sample buffer. + */ + int FLAG_OMIT_SAMPLE_DATA = 1 << 2; + + /** Return values of {@link #readData}. */ + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef({C.RESULT_NOTHING_READ, C.RESULT_FORMAT_READ, C.RESULT_BUFFER_READ}) + @interface ReadDataResult {} + /** * Returns whether data is available to be read. - *

      - * Note: If the stream has ended then a buffer with the end of stream flag can always be read from - * {@link #readData(FormatHolder, DecoderInputBuffer, boolean)}. Hence an ended stream is always - * ready. + * + *

      Note: If the stream has ended then a buffer with the end of stream flag can always be read + * from {@link #readData}. Hence an ended stream is always ready. * * @return Whether data is available to be read. */ @@ -55,17 +104,15 @@ public interface SampleStream { * @param formatHolder A {@link FormatHolder} to populate in the case of reading a format. * @param buffer A {@link DecoderInputBuffer} to populate in the case of reading a sample or the * end of the stream. If the end of the stream has been reached, the {@link - * C#BUFFER_FLAG_END_OF_STREAM} flag will be set on the buffer. If a {@link - * DecoderInputBuffer#isFlagsOnly() flags-only} buffer is passed, then no {@link - * DecoderInputBuffer#data} will be read and the read position of the stream will not change, - * but the flags of the buffer will be populated. - * @param formatRequired Whether the caller requires that the format of the stream be read even if - * it's not changing. A sample will never be read if set to true, however it is still possible - * for the end of stream or nothing to be read. - * @return The result, which can be {@link C#RESULT_NOTHING_READ}, {@link C#RESULT_FORMAT_READ} or - * {@link C#RESULT_BUFFER_READ}. + * C#BUFFER_FLAG_END_OF_STREAM} flag will be set on the buffer. + * @param readFlags Flags controlling the behavior of this read operation. + * @return The {@link ReadDataResult result} of the read operation. + * @throws InsufficientCapacityException If the {@code buffer} has insufficient capacity to hold + * the data of a sample being read. The buffer {@link DecoderInputBuffer#timeUs timestamp} and + * flags are populated if this exception is thrown, but the read position is not advanced. */ - int readData(FormatHolder formatHolder, DecoderInputBuffer buffer, boolean formatRequired); + @ReadDataResult + int readData(FormatHolder formatHolder, DecoderInputBuffer buffer, @ReadFlags int readFlags); /** * Attempts to skip to the keyframe before the specified position, or to the end of the stream if @@ -75,5 +122,4 @@ public interface SampleStream { * @return The number of samples that were skipped. */ int skipData(long positionUs); - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/SequenceableLoader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/SequenceableLoader.java index 189c13ef0f..91f0e9b820 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/SequenceableLoader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/SequenceableLoader.java @@ -18,14 +18,10 @@ import com.google.android.exoplayer2.C; // TODO: Clarify the requirements for implementing this interface [Internal ref: b/36250203]. -/** - * A loader that can proceed in approximate synchronization with other loaders. - */ +/** A loader that can proceed in approximate synchronization with other loaders. */ public interface SequenceableLoader { - /** - * A callback to be notified of {@link SequenceableLoader} events. - */ + /** A callback to be notified of {@link SequenceableLoader} events. */ interface Callback { /** @@ -33,7 +29,6 @@ interface Callback { * to be called when it can continue to load data. Called on the playback thread. */ void onContinueLoadingRequested(T source); - } /** @@ -44,19 +39,17 @@ interface Callback { */ long getBufferedPositionUs(); - /** - * Returns the next load time, or {@link C#TIME_END_OF_SOURCE} if loading has finished. - */ + /** Returns the next load time, or {@link C#TIME_END_OF_SOURCE} if loading has finished. */ long getNextLoadPositionUs(); /** * Attempts to continue loading. * * @param positionUs The current playback position in microseconds. If playback of the period to - * which this loader belongs has not yet started, the value will be the starting position - * in the period minus the duration of any media in previous periods still to be played. - * @return True if progress was made, meaning that {@link #getNextLoadPositionUs()} will return - * a different value than prior to the call. False otherwise. + * which this loader belongs has not yet started, the value will be the starting position in + * the period minus the duration of any media in previous periods still to be played. + * @return True if progress was made, meaning that {@link #getNextLoadPositionUs()} will return a + * different value than prior to the call. False otherwise. */ boolean continueLoading(long positionUs); @@ -66,8 +59,8 @@ interface Callback { /** * Re-evaluates the buffer given the playback position. * - *

      Re-evaluation may discard buffered media so that it can be re-buffered in a different - * quality. + *

      Re-evaluation may discard buffered media or cancel ongoing loads so that media can be + * re-buffered in a different quality. * * @param positionUs The current playback position in microseconds. If playback of this period has * not yet started, the value will be the starting position in this period minus the duration diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ShuffleOrder.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ShuffleOrder.java index 5af9dbd20a..9c4e502dad 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ShuffleOrder.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ShuffleOrder.java @@ -23,12 +23,13 @@ * Shuffled order of indices. * *

      The shuffle order must be immutable to ensure thread safety. + * + *

      The order must be consistent when traversed both {@linkplain #getNextIndex(int) forwards} and + * {@linkplain #getPreviousIndex(int) backwards}. */ public interface ShuffleOrder { - /** - * The default {@link ShuffleOrder} implementation for random shuffle order. - */ + /** The default {@link ShuffleOrder} implementation for random shuffle order. */ class DefaultShuffleOrder implements ShuffleOrder { private final Random random; @@ -164,12 +165,9 @@ private static int[] createShuffledList(int length, Random random) { } return shuffled; } - } - /** - * A {@link ShuffleOrder} implementation which does not shuffle. - */ + /** A {@link ShuffleOrder} implementation which does not shuffle. */ final class UnshuffledShuffleOrder implements ShuffleOrder { private final int length; @@ -224,9 +222,7 @@ public ShuffleOrder cloneAndClear() { } } - /** - * Returns length of shuffle order. - */ + /** Returns length of shuffle order. */ int getLength(); /** diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/SilenceMediaSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/SilenceMediaSource.java index 773eba732b..532171a7a4 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/SilenceMediaSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/SilenceMediaSource.java @@ -15,18 +15,25 @@ */ package com.google.android.exoplayer2.source; +import static java.lang.Math.min; + +import android.net.Uri; +import androidx.annotation.IntRange; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.FormatHolder; +import com.google.android.exoplayer2.MediaItem; import com.google.android.exoplayer2.SeekParameters; +import com.google.android.exoplayer2.Timeline; import com.google.android.exoplayer2.decoder.DecoderInputBuffer; -import com.google.android.exoplayer2.trackselection.TrackSelection; +import com.google.android.exoplayer2.trackselection.ExoTrackSelection; import com.google.android.exoplayer2.upstream.Allocator; import com.google.android.exoplayer2.upstream.TransferListener; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.Util; +import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.util.ArrayList; import org.checkerframework.checker.nullness.compatqual.NullableType; @@ -40,57 +47,65 @@ public static final class Factory { @Nullable private Object tag; /** - * Sets the duration of the silent audio. + * Sets the duration of the silent audio. The value needs to be a positive value. * * @param durationUs The duration of silent audio to output, in microseconds. * @return This factory, for convenience. */ - public Factory setDurationUs(long durationUs) { + @CanIgnoreReturnValue + public Factory setDurationUs(@IntRange(from = 1) long durationUs) { this.durationUs = durationUs; return this; } /** - * Sets a tag for the media source which will be published in the {@link - * com.google.android.exoplayer2.Timeline} of the source as {@link - * com.google.android.exoplayer2.Timeline.Window#tag}. + * Sets a tag for the media source which will be published in the {@link Timeline} of the source + * as {@link MediaItem.LocalConfiguration#tag Window#mediaItem.localConfiguration.tag}. * * @param tag A tag for the media source. * @return This factory, for convenience. */ + @CanIgnoreReturnValue public Factory setTag(@Nullable Object tag) { this.tag = tag; return this; } - /** Creates a new {@link SilenceMediaSource}. */ + /** + * Creates a new {@link SilenceMediaSource}. + * + * @throws IllegalStateException if the duration is a non-positive value. + */ public SilenceMediaSource createMediaSource() { - return new SilenceMediaSource(durationUs, tag); + Assertions.checkState(durationUs > 0); + return new SilenceMediaSource(durationUs, MEDIA_ITEM.buildUpon().setTag(tag).build()); } } + /** The media id used by any media item of silence media sources. */ + public static final String MEDIA_ID = "SilenceMediaSource"; + private static final int SAMPLE_RATE_HZ = 44100; - @C.PcmEncoding private static final int ENCODING = C.ENCODING_PCM_16BIT; + private static final @C.PcmEncoding int PCM_ENCODING = C.ENCODING_PCM_16BIT; private static final int CHANNEL_COUNT = 2; private static final Format FORMAT = - Format.createAudioSampleFormat( - /* id=*/ null, - MimeTypes.AUDIO_RAW, - /* codecs= */ null, - /* bitrate= */ Format.NO_VALUE, - /* maxInputSize= */ Format.NO_VALUE, - CHANNEL_COUNT, - SAMPLE_RATE_HZ, - ENCODING, - /* initializationData= */ null, - /* drmInitData= */ null, - /* selectionFlags= */ 0, - /* language= */ null); + new Format.Builder() + .setSampleMimeType(MimeTypes.AUDIO_RAW) + .setChannelCount(CHANNEL_COUNT) + .setSampleRate(SAMPLE_RATE_HZ) + .setPcmEncoding(PCM_ENCODING) + .build(); + private static final MediaItem MEDIA_ITEM = + new MediaItem.Builder() + .setMediaId(MEDIA_ID) + .setUri(Uri.EMPTY) + .setMimeType(FORMAT.sampleMimeType) + .build(); private static final byte[] SILENCE_SAMPLE = - new byte[Util.getPcmFrameSize(ENCODING, CHANNEL_COUNT) * 1024]; + new byte[Util.getPcmFrameSize(PCM_ENCODING, CHANNEL_COUNT) * 1024]; private final long durationUs; - @Nullable private final Object tag; + private final MediaItem mediaItem; /** * Creates a new media source providing silent audio of the given duration. @@ -98,13 +113,19 @@ public SilenceMediaSource createMediaSource() { * @param durationUs The duration of silent audio to output, in microseconds. */ public SilenceMediaSource(long durationUs) { - this(durationUs, /* tag= */ null); + this(durationUs, MEDIA_ITEM); } - private SilenceMediaSource(long durationUs, @Nullable Object tag) { + /** + * Creates a new media source providing silent audio of the given duration. + * + * @param durationUs The duration of silent audio to output, in microseconds. + * @param mediaItem The media item associated with this media source. + */ + private SilenceMediaSource(long durationUs, MediaItem mediaItem) { Assertions.checkArgument(durationUs >= 0); this.durationUs = durationUs; - this.tag = tag; + this.mediaItem = mediaItem; } @Override @@ -114,9 +135,9 @@ protected void prepareSourceInternal(@Nullable TransferListener mediaTransferLis durationUs, /* isSeekable= */ true, /* isDynamic= */ false, - /* isLive= */ false, + /* useLiveConfiguration= */ false, /* manifest= */ null, - tag)); + mediaItem)); } @Override @@ -130,6 +151,11 @@ public MediaPeriod createPeriod(MediaPeriodId id, Allocator allocator, long star @Override public void releasePeriod(MediaPeriod mediaPeriod) {} + @Override + public MediaItem getMediaItem() { + return mediaItem; + } + @Override protected void releaseSourceInternal() {} @@ -160,7 +186,7 @@ public TrackGroupArray getTrackGroups() { @Override public long selectTracks( - @NullableType TrackSelection[] selections, + @NullableType ExoTrackSelection[] selections, boolean[] mayRetainStreamFlags, @NullableType SampleStream[] streams, boolean[] streamResetFlags, @@ -258,8 +284,8 @@ public void maybeThrowError() {} @Override public int readData( - FormatHolder formatHolder, DecoderInputBuffer buffer, boolean formatRequired) { - if (!sentFormat || formatRequired) { + FormatHolder formatHolder, DecoderInputBuffer buffer, @ReadFlags int readFlags) { + if (!sentFormat || (readFlags & FLAG_REQUIRE_FORMAT) != 0) { formatHolder.format = FORMAT; sentFormat = true; return C.RESULT_FORMAT_READ; @@ -271,12 +297,16 @@ public int readData( return C.RESULT_BUFFER_READ; } - int bytesToWrite = (int) Math.min(SILENCE_SAMPLE.length, bytesRemaining); - buffer.ensureSpaceForWrite(bytesToWrite); - buffer.data.put(SILENCE_SAMPLE, /* offset= */ 0, bytesToWrite); buffer.timeUs = getAudioPositionUs(positionBytes); buffer.addFlag(C.BUFFER_FLAG_KEY_FRAME); - positionBytes += bytesToWrite; + int bytesToWrite = (int) min(SILENCE_SAMPLE.length, bytesRemaining); + if ((readFlags & FLAG_OMIT_SAMPLE_DATA) == 0) { + buffer.ensureSpaceForWrite(bytesToWrite); + buffer.data.put(SILENCE_SAMPLE, /* offset= */ 0, bytesToWrite); + } + if ((readFlags & FLAG_PEEK) == 0) { + positionBytes += bytesToWrite; + } return C.RESULT_BUFFER_READ; } @@ -290,11 +320,11 @@ public int skipData(long positionUs) { private static long getAudioByteCount(long durationUs) { long audioSampleCount = durationUs * SAMPLE_RATE_HZ / C.MICROS_PER_SECOND; - return Util.getPcmFrameSize(ENCODING, CHANNEL_COUNT) * audioSampleCount; + return Util.getPcmFrameSize(PCM_ENCODING, CHANNEL_COUNT) * audioSampleCount; } private static long getAudioPositionUs(long bytes) { - long audioSampleCount = bytes / Util.getPcmFrameSize(ENCODING, CHANNEL_COUNT); + long audioSampleCount = bytes / Util.getPcmFrameSize(PCM_ENCODING, CHANNEL_COUNT); return audioSampleCount * C.MICROS_PER_SECOND / SAMPLE_RATE_HZ; } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/SinglePeriodTimeline.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/SinglePeriodTimeline.java index 45f64cacf2..105b59de83 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/SinglePeriodTimeline.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/SinglePeriodTimeline.java @@ -15,41 +15,60 @@ */ package com.google.android.exoplayer2.source; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; + +import android.net.Uri; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.MediaItem; import com.google.android.exoplayer2.Timeline; import com.google.android.exoplayer2.util.Assertions; -/** - * A {@link Timeline} consisting of a single period and static window. - */ +/** A {@link Timeline} consisting of a single period and static window. */ public final class SinglePeriodTimeline extends Timeline { private static final Object UID = new Object(); + private static final MediaItem MEDIA_ITEM = + new MediaItem.Builder().setMediaId("SinglePeriodTimeline").setUri(Uri.EMPTY).build(); private final long presentationStartTimeMs; private final long windowStartTimeMs; + private final long elapsedRealtimeEpochOffsetMs; private final long periodDurationUs; private final long windowDurationUs; private final long windowPositionInPeriodUs; private final long windowDefaultStartPositionUs; private final boolean isSeekable; private final boolean isDynamic; - private final boolean isLive; - @Nullable private final Object tag; + private final boolean suppressPositionProjection; @Nullable private final Object manifest; + @Nullable private final MediaItem mediaItem; + @Nullable private final MediaItem.LiveConfiguration liveConfiguration; /** - * Creates a timeline containing a single period and a window that spans it. - * - * @param durationUs The duration of the period, in microseconds. - * @param isSeekable Whether seeking is supported within the period. - * @param isDynamic Whether the window may change when the timeline is updated. - * @param isLive Whether the window is live. + * @deprecated Use {@link #SinglePeriodTimeline(long, boolean, boolean, boolean, Object, + * MediaItem)} instead. */ + // Provide backwards compatibility. + @SuppressWarnings("deprecation") + @Deprecated public SinglePeriodTimeline( - long durationUs, boolean isSeekable, boolean isDynamic, boolean isLive) { - this(durationUs, isSeekable, isDynamic, isLive, /* manifest= */ null, /* tag= */ null); + long durationUs, + boolean isSeekable, + boolean isDynamic, + boolean isLive, + @Nullable Object manifest, + @Nullable Object tag) { + this( + durationUs, + durationUs, + /* windowPositionInPeriodUs= */ 0, + /* windowDefaultStartPositionUs= */ 0, + isSeekable, + isDynamic, + isLive, + manifest, + tag); } /** @@ -58,17 +77,18 @@ public SinglePeriodTimeline( * @param durationUs The duration of the period, in microseconds. * @param isSeekable Whether seeking is supported within the period. * @param isDynamic Whether the window may change when the timeline is updated. - * @param isLive Whether the window is live. + * @param useLiveConfiguration Whether the window is live and {@link MediaItem#liveConfiguration} + * is used to configure live playback behaviour. * @param manifest The manifest. May be {@code null}. - * @param tag A tag used for {@link Window#tag}. + * @param mediaItem A media item used for {@link Window#mediaItem}. */ public SinglePeriodTimeline( long durationUs, boolean isSeekable, boolean isDynamic, - boolean isLive, + boolean useLiveConfiguration, @Nullable Object manifest, - @Nullable Object tag) { + MediaItem mediaItem) { this( durationUs, durationUs, @@ -76,6 +96,38 @@ public SinglePeriodTimeline( /* windowDefaultStartPositionUs= */ 0, isSeekable, isDynamic, + useLiveConfiguration, + manifest, + mediaItem); + } + + /** + * @deprecated Use {@link #SinglePeriodTimeline(long, long, long, long, boolean, boolean, boolean, + * Object, MediaItem)} instead. + */ + // Provide backwards compatibility. + @SuppressWarnings("deprecation") + @Deprecated + public SinglePeriodTimeline( + long periodDurationUs, + long windowDurationUs, + long windowPositionInPeriodUs, + long windowDefaultStartPositionUs, + boolean isSeekable, + boolean isDynamic, + boolean isLive, + @Nullable Object manifest, + @Nullable Object tag) { + this( + /* presentationStartTimeMs= */ C.TIME_UNSET, + /* windowStartTimeMs= */ C.TIME_UNSET, + /* elapsedRealtimeEpochOffsetMs= */ C.TIME_UNSET, + periodDurationUs, + windowDurationUs, + windowPositionInPeriodUs, + windowDefaultStartPositionUs, + isSeekable, + isDynamic, isLive, manifest, tag); @@ -93,9 +145,10 @@ public SinglePeriodTimeline( * which to begin playback, in microseconds. * @param isSeekable Whether seeking is supported within the window. * @param isDynamic Whether the window may change when the timeline is updated. - * @param isLive Whether the window is live. - * @param manifest The manifest. May be (@code null}. - * @param tag A tag used for {@link Timeline.Window#tag}. + * @param useLiveConfiguration Whether the window is live and {@link MediaItem#liveConfiguration} + * is used to configure live playback behaviour. + * @param manifest The manifest. May be {@code null}. + * @param mediaItem A media item used for {@link Timeline.Window#mediaItem}. */ public SinglePeriodTimeline( long periodDurationUs, @@ -104,21 +157,91 @@ public SinglePeriodTimeline( long windowDefaultStartPositionUs, boolean isSeekable, boolean isDynamic, - boolean isLive, + boolean useLiveConfiguration, @Nullable Object manifest, - @Nullable Object tag) { + MediaItem mediaItem) { this( /* presentationStartTimeMs= */ C.TIME_UNSET, /* windowStartTimeMs= */ C.TIME_UNSET, + /* elapsedRealtimeEpochOffsetMs= */ C.TIME_UNSET, periodDurationUs, windowDurationUs, windowPositionInPeriodUs, windowDefaultStartPositionUs, isSeekable, isDynamic, - isLive, + /* suppressPositionProjection= */ false, manifest, - tag); + mediaItem, + useLiveConfiguration ? mediaItem.liveConfiguration : null); + } + + /** + * @deprecated Use {@link #SinglePeriodTimeline(long, long, long, long, long, long, long, boolean, + * boolean, boolean, Object, MediaItem, MediaItem.LiveConfiguration)} instead. + */ + @Deprecated + public SinglePeriodTimeline( + long presentationStartTimeMs, + long windowStartTimeMs, + long elapsedRealtimeEpochOffsetMs, + long periodDurationUs, + long windowDurationUs, + long windowPositionInPeriodUs, + long windowDefaultStartPositionUs, + boolean isSeekable, + boolean isDynamic, + boolean isLive, + @Nullable Object manifest, + @Nullable Object tag) { + this( + presentationStartTimeMs, + windowStartTimeMs, + elapsedRealtimeEpochOffsetMs, + periodDurationUs, + windowDurationUs, + windowPositionInPeriodUs, + windowDefaultStartPositionUs, + isSeekable, + isDynamic, + /* suppressPositionProjection= */ false, + manifest, + MEDIA_ITEM.buildUpon().setTag(tag).build(), + isLive ? MEDIA_ITEM.liveConfiguration : null); + } + + /** + * @deprecated Use {@link #SinglePeriodTimeline(long, long, long, long, long, long, long, boolean, + * boolean, boolean, Object, MediaItem, MediaItem.LiveConfiguration)} instead. + */ + @Deprecated + public SinglePeriodTimeline( + long presentationStartTimeMs, + long windowStartTimeMs, + long elapsedRealtimeEpochOffsetMs, + long periodDurationUs, + long windowDurationUs, + long windowPositionInPeriodUs, + long windowDefaultStartPositionUs, + boolean isSeekable, + boolean isDynamic, + @Nullable Object manifest, + MediaItem mediaItem, + @Nullable MediaItem.LiveConfiguration liveConfiguration) { + this( + presentationStartTimeMs, + windowStartTimeMs, + elapsedRealtimeEpochOffsetMs, + periodDurationUs, + windowDurationUs, + windowPositionInPeriodUs, + windowDefaultStartPositionUs, + isSeekable, + isDynamic, + /* suppressPositionProjection= */ false, + manifest, + mediaItem, + liveConfiguration); } /** @@ -126,8 +249,12 @@ public SinglePeriodTimeline( * position in the period. * * @param presentationStartTimeMs The start time of the presentation in milliseconds since the - * epoch. - * @param windowStartTimeMs The window's start time in milliseconds since the epoch. + * epoch, or {@link C#TIME_UNSET} if unknown or not applicable. + * @param windowStartTimeMs The window's start time in milliseconds since the epoch, or {@link + * C#TIME_UNSET} if unknown or not applicable. + * @param elapsedRealtimeEpochOffsetMs The offset between {@link + * android.os.SystemClock#elapsedRealtime()} and the time since the Unix epoch according to + * the clock of the media origin server, or {@link C#TIME_UNSET} if unknown or not applicable. * @param periodDurationUs The duration of the period in microseconds. * @param windowDurationUs The duration of the window in microseconds. * @param windowPositionInPeriodUs The position of the start of the window in the period, in @@ -136,33 +263,41 @@ public SinglePeriodTimeline( * which to begin playback, in microseconds. * @param isSeekable Whether seeking is supported within the window. * @param isDynamic Whether the window may change when the timeline is updated. - * @param isLive Whether the window is live. + * @param suppressPositionProjection Whether {@link #getWindow(int, Window, long) position + * projection} in a playlist should be suppressed. This only applies for dynamic timelines and + * is ignored otherwise. * @param manifest The manifest. May be {@code null}. - * @param tag A tag used for {@link Timeline.Window#tag}. + * @param mediaItem A media item used for {@link Timeline.Window#mediaItem}. + * @param liveConfiguration The configuration for live playback behaviour, or {@code null} if the + * window is not live. */ public SinglePeriodTimeline( long presentationStartTimeMs, long windowStartTimeMs, + long elapsedRealtimeEpochOffsetMs, long periodDurationUs, long windowDurationUs, long windowPositionInPeriodUs, long windowDefaultStartPositionUs, boolean isSeekable, boolean isDynamic, - boolean isLive, + boolean suppressPositionProjection, @Nullable Object manifest, - @Nullable Object tag) { + MediaItem mediaItem, + @Nullable MediaItem.LiveConfiguration liveConfiguration) { this.presentationStartTimeMs = presentationStartTimeMs; this.windowStartTimeMs = windowStartTimeMs; + this.elapsedRealtimeEpochOffsetMs = elapsedRealtimeEpochOffsetMs; this.periodDurationUs = periodDurationUs; this.windowDurationUs = windowDurationUs; this.windowPositionInPeriodUs = windowPositionInPeriodUs; this.windowDefaultStartPositionUs = windowDefaultStartPositionUs; this.isSeekable = isSeekable; this.isDynamic = isDynamic; - this.isLive = isLive; + this.suppressPositionProjection = suppressPositionProjection; this.manifest = manifest; - this.tag = tag; + this.mediaItem = checkNotNull(mediaItem); + this.liveConfiguration = liveConfiguration; } @Override @@ -170,11 +305,12 @@ public int getWindowCount() { return 1; } + // Provide backwards compatibility. @Override public Window getWindow(int windowIndex, Window window, long defaultPositionProjectionUs) { Assertions.checkIndex(windowIndex, 0, 1); long windowDefaultStartPositionUs = this.windowDefaultStartPositionUs; - if (isDynamic && defaultPositionProjectionUs != 0) { + if (isDynamic && !suppressPositionProjection && defaultPositionProjectionUs != 0) { if (windowDurationUs == C.TIME_UNSET) { // Don't allow projection into a window that has an unknown duration. windowDefaultStartPositionUs = C.TIME_UNSET; @@ -188,17 +324,18 @@ public Window getWindow(int windowIndex, Window window, long defaultPositionProj } return window.set( Window.SINGLE_WINDOW_UID, - tag, + mediaItem, manifest, presentationStartTimeMs, windowStartTimeMs, + elapsedRealtimeEpochOffsetMs, isSeekable, isDynamic, - isLive, + liveConfiguration, windowDefaultStartPositionUs, windowDurationUs, - 0, - 0, + /* firstPeriodIndex= */ 0, + /* lastPeriodIndex= */ 0, windowPositionInPeriodUs); } @@ -210,7 +347,7 @@ public int getPeriodCount() { @Override public Period getPeriod(int periodIndex, Period period, boolean setIds) { Assertions.checkIndex(periodIndex, 0, 1); - Object uid = setIds ? UID : null; + @Nullable Object uid = setIds ? UID : null; return period.set(/* id= */ null, uid, 0, periodDurationUs, -windowPositionInPeriodUs); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/SingleSampleMediaPeriod.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/SingleSampleMediaPeriod.java index ca50c342b5..e589e8cca4 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/SingleSampleMediaPeriod.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/SingleSampleMediaPeriod.java @@ -22,16 +22,19 @@ import com.google.android.exoplayer2.SeekParameters; import com.google.android.exoplayer2.decoder.DecoderInputBuffer; import com.google.android.exoplayer2.source.MediaSourceEventListener.EventDispatcher; -import com.google.android.exoplayer2.trackselection.TrackSelection; +import com.google.android.exoplayer2.trackselection.ExoTrackSelection; import com.google.android.exoplayer2.upstream.DataSource; +import com.google.android.exoplayer2.upstream.DataSourceUtil; import com.google.android.exoplayer2.upstream.DataSpec; import com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy; +import com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy.LoadErrorInfo; import com.google.android.exoplayer2.upstream.Loader; import com.google.android.exoplayer2.upstream.Loader.LoadErrorAction; import com.google.android.exoplayer2.upstream.Loader.Loadable; import com.google.android.exoplayer2.upstream.StatsDataSource; import com.google.android.exoplayer2.upstream.TransferListener; import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.Util; import java.io.IOException; @@ -40,15 +43,13 @@ import org.checkerframework.checker.nullness.compatqual.NullableType; import org.checkerframework.checker.nullness.qual.MonotonicNonNull; -/** - * A {@link MediaPeriod} with a single sample. - */ -/* package */ final class SingleSampleMediaPeriod implements MediaPeriod, - Loader.Callback { +/** A {@link MediaPeriod} with a single sample. */ +/* package */ final class SingleSampleMediaPeriod + implements MediaPeriod, Loader.Callback { + + private static final String TAG = "SingleSampleMediaPeriod"; - /** - * The initial size of the allocation used to hold the sample data. - */ + /** The initial size of the allocation used to hold the sample data. */ private static final int INITIAL_SAMPLE_SIZE = 1024; private final DataSpec dataSpec; @@ -65,7 +66,6 @@ /* package */ final Format format; /* package */ final boolean treatLoadErrorsAsEndOfStream; - /* package */ boolean notifiedReadingStarted; /* package */ boolean loadingFinished; /* package */ byte @MonotonicNonNull [] sampleData; /* package */ int sampleSize; @@ -89,13 +89,11 @@ public SingleSampleMediaPeriod( this.treatLoadErrorsAsEndOfStream = treatLoadErrorsAsEndOfStream; tracks = new TrackGroupArray(new TrackGroup(format)); sampleStreams = new ArrayList<>(); - loader = new Loader("Loader:SingleSampleMediaPeriod"); - eventDispatcher.mediaPeriodCreated(); + loader = new Loader("SingleSampleMediaPeriod"); } public void release() { loader.release(); - eventDispatcher.mediaPeriodReleased(); } @Override @@ -104,7 +102,7 @@ public void prepare(Callback callback, long positionUs) { } @Override - public void maybeThrowPrepareError() throws IOException { + public void maybeThrowPrepareError() { // Do nothing. } @@ -115,7 +113,7 @@ public TrackGroupArray getTrackGroups() { @Override public long selectTracks( - @NullableType TrackSelection[] selections, + @NullableType ExoTrackSelection[] selections, boolean[] mayRetainStreamFlags, @NullableType SampleStream[] streams, boolean[] streamResetFlags, @@ -154,21 +152,21 @@ public boolean continueLoading(long positionUs) { if (transferListener != null) { dataSource.addTransferListener(transferListener); } + SourceLoadable loadable = new SourceLoadable(dataSpec, dataSource); long elapsedRealtimeMs = loader.startLoading( - new SourceLoadable(dataSpec, dataSource), + loadable, /* callback= */ this, loadErrorHandlingPolicy.getMinimumLoadableRetryCount(C.DATA_TYPE_MEDIA)); eventDispatcher.loadStarted( - dataSpec, + new LoadEventInfo(loadable.loadTaskId, dataSpec, elapsedRealtimeMs), C.DATA_TYPE_MEDIA, C.TRACK_TYPE_UNKNOWN, format, C.SELECTION_REASON_UNKNOWN, /* trackSelectionData= */ null, /* mediaStartTimeUs= */ 0, - durationUs, - elapsedRealtimeMs); + durationUs); return true; } @@ -179,10 +177,6 @@ public boolean isLoading() { @Override public long readDiscontinuity() { - if (!notifiedReadingStarted) { - eventDispatcher.readingStarted(); - notifiedReadingStarted = true; - } return C.TIME_UNSET; } @@ -212,44 +206,56 @@ public long getAdjustedSeekPositionUs(long positionUs, SeekParameters seekParame // Loader.Callback implementation. @Override - public void onLoadCompleted(SourceLoadable loadable, long elapsedRealtimeMs, - long loadDurationMs) { + public void onLoadCompleted( + SourceLoadable loadable, long elapsedRealtimeMs, long loadDurationMs) { sampleSize = (int) loadable.dataSource.getBytesRead(); sampleData = Assertions.checkNotNull(loadable.sampleData); loadingFinished = true; + StatsDataSource dataSource = loadable.dataSource; + LoadEventInfo loadEventInfo = + new LoadEventInfo( + loadable.loadTaskId, + loadable.dataSpec, + dataSource.getLastOpenedUri(), + dataSource.getLastResponseHeaders(), + elapsedRealtimeMs, + loadDurationMs, + sampleSize); + loadErrorHandlingPolicy.onLoadTaskConcluded(loadable.loadTaskId); eventDispatcher.loadCompleted( - loadable.dataSpec, - loadable.dataSource.getLastOpenedUri(), - loadable.dataSource.getLastResponseHeaders(), + loadEventInfo, C.DATA_TYPE_MEDIA, C.TRACK_TYPE_UNKNOWN, format, C.SELECTION_REASON_UNKNOWN, /* trackSelectionData= */ null, /* mediaStartTimeUs= */ 0, - durationUs, - elapsedRealtimeMs, - loadDurationMs, - sampleSize); + durationUs); } @Override - public void onLoadCanceled(SourceLoadable loadable, long elapsedRealtimeMs, long loadDurationMs, - boolean released) { + public void onLoadCanceled( + SourceLoadable loadable, long elapsedRealtimeMs, long loadDurationMs, boolean released) { + StatsDataSource dataSource = loadable.dataSource; + LoadEventInfo loadEventInfo = + new LoadEventInfo( + loadable.loadTaskId, + loadable.dataSpec, + dataSource.getLastOpenedUri(), + dataSource.getLastResponseHeaders(), + elapsedRealtimeMs, + loadDurationMs, + dataSource.getBytesRead()); + loadErrorHandlingPolicy.onLoadTaskConcluded(loadable.loadTaskId); eventDispatcher.loadCanceled( - loadable.dataSpec, - loadable.dataSource.getLastOpenedUri(), - loadable.dataSource.getLastResponseHeaders(), + loadEventInfo, C.DATA_TYPE_MEDIA, C.TRACK_TYPE_UNKNOWN, /* trackFormat= */ null, C.SELECTION_REASON_UNKNOWN, /* trackSelectionData= */ null, /* mediaStartTimeUs= */ 0, - durationUs, - elapsedRealtimeMs, - loadDurationMs, - loadable.dataSource.getBytesRead()); + durationUs); } @Override @@ -259,9 +265,28 @@ public LoadErrorAction onLoadError( long loadDurationMs, IOException error, int errorCount) { + StatsDataSource dataSource = loadable.dataSource; + LoadEventInfo loadEventInfo = + new LoadEventInfo( + loadable.loadTaskId, + loadable.dataSpec, + dataSource.getLastOpenedUri(), + dataSource.getLastResponseHeaders(), + elapsedRealtimeMs, + loadDurationMs, + dataSource.getBytesRead()); + MediaLoadData mediaLoadData = + new MediaLoadData( + C.DATA_TYPE_MEDIA, + C.TRACK_TYPE_UNKNOWN, + format, + C.SELECTION_REASON_UNKNOWN, + /* trackSelectionData= */ null, + /* mediaStartTimeMs= */ 0, + Util.usToMs(durationUs)); long retryDelay = loadErrorHandlingPolicy.getRetryDelayMsFor( - C.DATA_TYPE_MEDIA, loadDurationMs, error, errorCount); + new LoadErrorInfo(loadEventInfo, mediaLoadData, error, errorCount)); boolean errorCanBePropagated = retryDelay == C.TIME_UNSET || errorCount @@ -269,6 +294,7 @@ public LoadErrorAction onLoadError( LoadErrorAction action; if (treatLoadErrorsAsEndOfStream && errorCanBePropagated) { + Log.w(TAG, "Loading failed, treating as end-of-stream.", error); loadingFinished = true; action = Loader.DONT_RETRY; } else { @@ -277,10 +303,9 @@ public LoadErrorAction onLoadError( ? Loader.createRetryAction(/* resetErrorCount= */ false, retryDelay) : Loader.DONT_RETRY_FATAL; } + boolean wasCanceled = !action.isRetry(); eventDispatcher.loadError( - loadable.dataSpec, - loadable.dataSource.getLastOpenedUri(), - loadable.dataSource.getLastResponseHeaders(), + loadEventInfo, C.DATA_TYPE_MEDIA, C.TRACK_TYPE_UNKNOWN, format, @@ -288,11 +313,11 @@ public LoadErrorAction onLoadError( /* trackSelectionData= */ null, /* mediaStartTimeUs= */ 0, durationUs, - elapsedRealtimeMs, - loadDurationMs, - loadable.dataSource.getBytesRead(), error, - /* wasCanceled= */ !action.isRetry()); + wasCanceled); + if (wasCanceled) { + loadErrorHandlingPolicy.onLoadTaskConcluded(loadable.loadTaskId); + } return action; } @@ -324,32 +349,39 @@ public void maybeThrowError() throws IOException { } @Override - public int readData(FormatHolder formatHolder, DecoderInputBuffer buffer, - boolean requireFormat) { + public int readData( + FormatHolder formatHolder, DecoderInputBuffer buffer, @ReadFlags int readFlags) { maybeNotifyDownstreamFormat(); + if (loadingFinished && sampleData == null) { + streamState = STREAM_STATE_END_OF_STREAM; + } + if (streamState == STREAM_STATE_END_OF_STREAM) { buffer.addFlag(C.BUFFER_FLAG_END_OF_STREAM); return C.RESULT_BUFFER_READ; - } else if (requireFormat || streamState == STREAM_STATE_SEND_FORMAT) { + } + + if ((readFlags & FLAG_REQUIRE_FORMAT) != 0 || streamState == STREAM_STATE_SEND_FORMAT) { formatHolder.format = format; streamState = STREAM_STATE_SEND_SAMPLE; return C.RESULT_FORMAT_READ; - } else if (loadingFinished) { - if (sampleData != null) { - buffer.addFlag(C.BUFFER_FLAG_KEY_FRAME); - buffer.timeUs = 0; - if (buffer.isFlagsOnly()) { - return C.RESULT_BUFFER_READ; - } - buffer.ensureSpaceForWrite(sampleSize); - buffer.data.put(sampleData, 0, sampleSize); - } else { - buffer.addFlag(C.BUFFER_FLAG_END_OF_STREAM); - } + } + + if (!loadingFinished) { + return C.RESULT_NOTHING_READ; + } + Assertions.checkNotNull(sampleData); + + buffer.addFlag(C.BUFFER_FLAG_KEY_FRAME); + buffer.timeUs = 0; + if ((readFlags & FLAG_OMIT_SAMPLE_DATA) == 0) { + buffer.ensureSpaceForWrite(sampleSize); + buffer.data.put(sampleData, 0, sampleSize); + } + if ((readFlags & FLAG_PEEK) == 0) { streamState = STREAM_STATE_END_OF_STREAM; - return C.RESULT_BUFFER_READ; } - return C.RESULT_NOTHING_READ; + return C.RESULT_BUFFER_READ; } @Override @@ -377,15 +409,15 @@ private void maybeNotifyDownstreamFormat() { /* package */ static final class SourceLoadable implements Loadable { + public final long loadTaskId; public final DataSpec dataSpec; private final StatsDataSource dataSource; @Nullable private byte[] sampleData; - // the constructor does not initialize fields: sampleData - @SuppressWarnings("nullness:initialization.fields.uninitialized") public SourceLoadable(DataSpec dataSpec, DataSource dataSource) { + this.loadTaskId = LoadEventInfo.getNewId(); this.dataSpec = dataSpec; this.dataSource = new StatsDataSource(dataSource); } @@ -396,7 +428,7 @@ public void cancelLoad() { } @Override - public void load() throws IOException, InterruptedException { + public void load() throws IOException { // We always load from the beginning, so reset bytesRead to 0. dataSource.resetBytesRead(); try { @@ -414,10 +446,8 @@ public void load() throws IOException, InterruptedException { result = dataSource.read(sampleData, sampleSize, sampleData.length - sampleSize); } } finally { - Util.closeQuietly(dataSource); + DataSourceUtil.closeQuietly(dataSource); } } - } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/SingleSampleMediaSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/SingleSampleMediaSource.java index db1414942f..411f2656c1 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/SingleSampleMediaSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/SingleSampleMediaSource.java @@ -15,10 +15,13 @@ */ package com.google.android.exoplayer2.source; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.common.base.MoreObjects.firstNonNull; + import android.net.Uri; -import android.os.Handler; import androidx.annotation.Nullable; import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.MediaItem; import com.google.android.exoplayer2.Timeline; import com.google.android.exoplayer2.upstream.Allocator; import com.google.android.exoplayer2.upstream.DataSource; @@ -26,32 +29,15 @@ import com.google.android.exoplayer2.upstream.DefaultLoadErrorHandlingPolicy; import com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy; import com.google.android.exoplayer2.upstream.TransferListener; -import com.google.android.exoplayer2.util.Assertions; -import java.io.IOException; +import com.google.android.exoplayer2.util.MimeTypes; +import com.google.common.collect.ImmutableList; +import com.google.errorprone.annotations.CanIgnoreReturnValue; /** * Loads data at a given {@link Uri} as a single sample belonging to a single {@link MediaPeriod}. */ public final class SingleSampleMediaSource extends BaseMediaSource { - /** - * Listener of {@link SingleSampleMediaSource} events. - * - * @deprecated Use {@link MediaSourceEventListener}. - */ - @Deprecated - public interface EventListener { - - /** - * Called when an error occurs loading media data. - * - * @param sourceId The id of the reporting {@link SingleSampleMediaSource}. - * @param e The cause of the failure. - */ - void onLoadError(int sourceId, IOException e); - - } - /** Factory for {@link SingleSampleMediaSource}. */ public static final class Factory { @@ -59,8 +45,8 @@ public static final class Factory { private LoadErrorHandlingPolicy loadErrorHandlingPolicy; private boolean treatLoadErrorsAsEndOfStream; - private boolean isCreateCalled; @Nullable private Object tag; + @Nullable private String trackId; /** * Creates a factory for {@link SingleSampleMediaSource}s. @@ -69,70 +55,65 @@ public static final class Factory { * be obtained. */ public Factory(DataSource.Factory dataSourceFactory) { - this.dataSourceFactory = Assertions.checkNotNull(dataSourceFactory); + this.dataSourceFactory = checkNotNull(dataSourceFactory); loadErrorHandlingPolicy = new DefaultLoadErrorHandlingPolicy(); + treatLoadErrorsAsEndOfStream = true; } /** * Sets a tag for the media source which will be published in the {@link Timeline} of the source - * as {@link Timeline.Window#tag}. + * as {@link MediaItem.LocalConfiguration#tag Window#mediaItem.localConfiguration.tag}. * * @param tag A tag for the media source. * @return This factory, for convenience. - * @throws IllegalStateException If one of the {@code create} methods has already been called. */ - public Factory setTag(Object tag) { - Assertions.checkState(!isCreateCalled); + @CanIgnoreReturnValue + public Factory setTag(@Nullable Object tag) { this.tag = tag; return this; } /** - * Sets the minimum number of times to retry if a loading error occurs. See {@link - * #setLoadErrorHandlingPolicy} for the default value. - * - *

      Calling this method is equivalent to calling {@link #setLoadErrorHandlingPolicy} with - * {@link DefaultLoadErrorHandlingPolicy#DefaultLoadErrorHandlingPolicy(int) - * DefaultLoadErrorHandlingPolicy(minLoadableRetryCount)} - * - * @param minLoadableRetryCount The minimum number of times to retry if a loading error occurs. - * @return This factory, for convenience. - * @throws IllegalStateException If one of the {@code create} methods has already been called. - * @deprecated Use {@link #setLoadErrorHandlingPolicy(LoadErrorHandlingPolicy)} instead. + * @deprecated Use {@link MediaItem.SubtitleConfiguration.Builder#setId(String)} instead (on the + * {@link MediaItem.SubtitleConfiguration} passed to {@link + * #createMediaSource(MediaItem.SubtitleConfiguration, long)}). {@code trackId} will only be + * used if {@link MediaItem.SubtitleConfiguration#id} is {@code null}. */ + @CanIgnoreReturnValue @Deprecated - public Factory setMinLoadableRetryCount(int minLoadableRetryCount) { - return setLoadErrorHandlingPolicy(new DefaultLoadErrorHandlingPolicy(minLoadableRetryCount)); + public Factory setTrackId(@Nullable String trackId) { + this.trackId = trackId; + return this; } /** * Sets the {@link LoadErrorHandlingPolicy}. The default value is created by calling {@link * DefaultLoadErrorHandlingPolicy#DefaultLoadErrorHandlingPolicy()}. * - *

      Calling this method overrides any calls to {@link #setMinLoadableRetryCount(int)}. - * * @param loadErrorHandlingPolicy A {@link LoadErrorHandlingPolicy}. * @return This factory, for convenience. - * @throws IllegalStateException If one of the {@code create} methods has already been called. */ - public Factory setLoadErrorHandlingPolicy(LoadErrorHandlingPolicy loadErrorHandlingPolicy) { - Assertions.checkState(!isCreateCalled); - this.loadErrorHandlingPolicy = loadErrorHandlingPolicy; + @CanIgnoreReturnValue + public Factory setLoadErrorHandlingPolicy( + @Nullable LoadErrorHandlingPolicy loadErrorHandlingPolicy) { + this.loadErrorHandlingPolicy = + loadErrorHandlingPolicy != null + ? loadErrorHandlingPolicy + : new DefaultLoadErrorHandlingPolicy(); return this; } /** * Sets whether load errors will be treated as end-of-stream signal (load errors will not be - * propagated). The default value is false. + * propagated). The default value is true. * * @param treatLoadErrorsAsEndOfStream If true, load errors will not be propagated by sample * streams, treating them as ended instead. If false, load errors will be propagated * normally by {@link SampleStream#maybeThrowError()}. * @return This factory, for convenience. - * @throws IllegalStateException If one of the {@code create} methods has already been called. */ + @CanIgnoreReturnValue public Factory setTreatLoadErrorsAsEndOfStream(boolean treatLoadErrorsAsEndOfStream) { - Assertions.checkState(!isCreateCalled); this.treatLoadErrorsAsEndOfStream = treatLoadErrorsAsEndOfStream; return this; } @@ -140,41 +121,21 @@ public Factory setTreatLoadErrorsAsEndOfStream(boolean treatLoadErrorsAsEndOfStr /** * Returns a new {@link SingleSampleMediaSource} using the current parameters. * - * @param uri The {@link Uri}. - * @param format The {@link Format} of the media stream. + * @param subtitleConfiguration The {@link MediaItem.SubtitleConfiguration}. * @param durationUs The duration of the media stream in microseconds. * @return The new {@link SingleSampleMediaSource}. */ - public SingleSampleMediaSource createMediaSource(Uri uri, Format format, long durationUs) { - isCreateCalled = true; + public SingleSampleMediaSource createMediaSource( + MediaItem.SubtitleConfiguration subtitleConfiguration, long durationUs) { return new SingleSampleMediaSource( - uri, + trackId, + subtitleConfiguration, dataSourceFactory, - format, durationUs, loadErrorHandlingPolicy, treatLoadErrorsAsEndOfStream, tag); } - - /** - * @deprecated Use {@link #createMediaSource(Uri, Format, long)} and {@link - * #addEventListener(Handler, MediaSourceEventListener)} instead. - */ - @Deprecated - public SingleSampleMediaSource createMediaSource( - Uri uri, - Format format, - long durationUs, - @Nullable Handler eventHandler, - @Nullable MediaSourceEventListener eventListener) { - SingleSampleMediaSource mediaSource = createMediaSource(uri, format, durationUs); - if (eventHandler != null && eventListener != null) { - mediaSource.addEventListener(eventHandler, eventListener); - } - return mediaSource; - } - } private final DataSpec dataSpec; @@ -184,127 +145,58 @@ public SingleSampleMediaSource createMediaSource( private final LoadErrorHandlingPolicy loadErrorHandlingPolicy; private final boolean treatLoadErrorsAsEndOfStream; private final Timeline timeline; - @Nullable private final Object tag; + private final MediaItem mediaItem; @Nullable private TransferListener transferListener; - /** - * @param uri The {@link Uri} of the media stream. - * @param dataSourceFactory The factory from which the {@link DataSource} to read the media will - * be obtained. - * @param format The {@link Format} associated with the output track. - * @param durationUs The duration of the media stream in microseconds. - * @deprecated Use {@link Factory} instead. - */ - @Deprecated - @SuppressWarnings("deprecation") - public SingleSampleMediaSource( - Uri uri, DataSource.Factory dataSourceFactory, Format format, long durationUs) { - this( - uri, - dataSourceFactory, - format, - durationUs, - DefaultLoadErrorHandlingPolicy.DEFAULT_MIN_LOADABLE_RETRY_COUNT); - } - - /** - * @param uri The {@link Uri} of the media stream. - * @param dataSourceFactory The factory from which the {@link DataSource} to read the media will - * be obtained. - * @param format The {@link Format} associated with the output track. - * @param durationUs The duration of the media stream in microseconds. - * @param minLoadableRetryCount The minimum number of times to retry if a loading error occurs. - * @deprecated Use {@link Factory} instead. - */ - @Deprecated - public SingleSampleMediaSource( - Uri uri, - DataSource.Factory dataSourceFactory, - Format format, - long durationUs, - int minLoadableRetryCount) { - this( - uri, - dataSourceFactory, - format, - durationUs, - new DefaultLoadErrorHandlingPolicy(minLoadableRetryCount), - /* treatLoadErrorsAsEndOfStream= */ false, - /* tag= */ null); - } - - /** - * @param uri The {@link Uri} of the media stream. - * @param dataSourceFactory The factory from which the {@link DataSource} to read the media will - * be obtained. - * @param format The {@link Format} associated with the output track. - * @param durationUs The duration of the media stream in microseconds. - * @param minLoadableRetryCount The minimum number of times to retry if a loading error occurs. - * @param eventHandler A handler for events. May be null if delivery of events is not required. - * @param eventListener A listener of events. May be null if delivery of events is not required. - * @param eventSourceId An identifier that gets passed to {@code eventListener} methods. - * @param treatLoadErrorsAsEndOfStream If true, load errors will not be propagated by sample - * streams, treating them as ended instead. If false, load errors will be propagated normally - * by {@link SampleStream#maybeThrowError()}. - * @deprecated Use {@link Factory} instead. - */ - @Deprecated - @SuppressWarnings("deprecation") - public SingleSampleMediaSource( - Uri uri, - DataSource.Factory dataSourceFactory, - Format format, - long durationUs, - int minLoadableRetryCount, - Handler eventHandler, - EventListener eventListener, - int eventSourceId, - boolean treatLoadErrorsAsEndOfStream) { - this( - uri, - dataSourceFactory, - format, - durationUs, - new DefaultLoadErrorHandlingPolicy(minLoadableRetryCount), - treatLoadErrorsAsEndOfStream, - /* tag= */ null); - if (eventHandler != null && eventListener != null) { - addEventListener(eventHandler, new EventListenerWrapper(eventListener, eventSourceId)); - } - } - private SingleSampleMediaSource( - Uri uri, + @Nullable String trackId, + MediaItem.SubtitleConfiguration subtitleConfiguration, DataSource.Factory dataSourceFactory, - Format format, long durationUs, LoadErrorHandlingPolicy loadErrorHandlingPolicy, boolean treatLoadErrorsAsEndOfStream, @Nullable Object tag) { this.dataSourceFactory = dataSourceFactory; - this.format = format; this.durationUs = durationUs; this.loadErrorHandlingPolicy = loadErrorHandlingPolicy; this.treatLoadErrorsAsEndOfStream = treatLoadErrorsAsEndOfStream; - this.tag = tag; - dataSpec = new DataSpec(uri, DataSpec.FLAG_ALLOW_GZIP); - timeline = + this.mediaItem = + new MediaItem.Builder() + .setUri(Uri.EMPTY) + .setMediaId(subtitleConfiguration.uri.toString()) + .setSubtitleConfigurations(ImmutableList.of(subtitleConfiguration)) + .setTag(tag) + .build(); + this.format = + new Format.Builder() + .setSampleMimeType(firstNonNull(subtitleConfiguration.mimeType, MimeTypes.TEXT_UNKNOWN)) + .setLanguage(subtitleConfiguration.language) + .setSelectionFlags(subtitleConfiguration.selectionFlags) + .setRoleFlags(subtitleConfiguration.roleFlags) + .setLabel(subtitleConfiguration.label) + .setId(subtitleConfiguration.id != null ? subtitleConfiguration.id : trackId) + .build(); + this.dataSpec = + new DataSpec.Builder() + .setUri(subtitleConfiguration.uri) + .setFlags(DataSpec.FLAG_ALLOW_GZIP) + .build(); + this.timeline = new SinglePeriodTimeline( durationUs, /* isSeekable= */ true, /* isDynamic= */ false, - /* isLive= */ false, + /* useLiveConfiguration= */ false, /* manifest= */ null, - tag); + mediaItem); } // MediaSource implementation. @Override - @Nullable - public Object getTag() { - return tag; + public MediaItem getMediaItem() { + return mediaItem; } @Override @@ -314,7 +206,7 @@ protected void prepareSourceInternal(@Nullable TransferListener mediaTransferLis } @Override - public void maybeThrowSourceInfoRefreshError() throws IOException { + public void maybeThrowSourceInfoRefreshError() { // Do nothing. } @@ -340,32 +232,4 @@ public void releasePeriod(MediaPeriod mediaPeriod) { protected void releaseSourceInternal() { // Do nothing. } - - /** - * Wraps a deprecated {@link EventListener}, invoking its callback from the equivalent callback in - * {@link MediaSourceEventListener}. - */ - @Deprecated - @SuppressWarnings("deprecation") - private static final class EventListenerWrapper implements MediaSourceEventListener { - - private final EventListener eventListener; - private final int eventSourceId; - - public EventListenerWrapper(EventListener eventListener, int eventSourceId) { - this.eventListener = Assertions.checkNotNull(eventListener); - this.eventSourceId = eventSourceId; - } - - @Override - public void onLoadError( - int windowIndex, - @Nullable MediaPeriodId mediaPeriodId, - LoadEventInfo loadEventInfo, - MediaLoadData mediaLoadData, - IOException error, - boolean wasCanceled) { - eventListener.onLoadError(eventSourceId, error); - } - } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/SpannedData.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/SpannedData.java new file mode 100644 index 0000000000..3b303d887a --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/SpannedData.java @@ -0,0 +1,157 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source; + +import static com.google.android.exoplayer2.util.Assertions.checkArgument; +import static com.google.android.exoplayer2.util.Assertions.checkState; +import static java.lang.Math.min; + +import android.util.SparseArray; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.util.Consumer; + +/** + * Stores value objects associated with spans of integer keys. + * + *

      This implementation is optimised for consecutive {@link #get(int)} calls with keys that are + * close to each other in value. + * + *

      Spans are defined by their own {@code startKey} (inclusive) and the {@code startKey} of the + * next span (exclusive). The last span is open-ended. + * + * @param The type of values stored in this collection. + */ +/* package */ final class SpannedData { + + private int memoizedReadIndex; + + private final SparseArray spans; + private final Consumer removeCallback; + + /** Constructs an empty instance. */ + public SpannedData() { + this(/* removeCallback= */ value -> {}); + } + + /** + * Constructs an empty instance that invokes {@code removeCallback} on each value that is removed + * from the collection. + */ + public SpannedData(Consumer removeCallback) { + spans = new SparseArray<>(); + this.removeCallback = removeCallback; + memoizedReadIndex = C.INDEX_UNSET; + } + + /** + * Returns the value associated with the span covering {@code key}. + * + *

      The collection must not be {@link #isEmpty() empty}. + * + * @param key The key to lookup in the collection. Must be greater than or equal to the previous + * value passed to {@link #discardTo(int)} (or zero after {@link #clear()} has been called). + * @return The value associated with the provided key. + */ + public V get(int key) { + if (memoizedReadIndex == C.INDEX_UNSET) { + memoizedReadIndex = 0; + } + while (memoizedReadIndex > 0 && key < spans.keyAt(memoizedReadIndex)) { + memoizedReadIndex--; + } + while (memoizedReadIndex < spans.size() - 1 && key >= spans.keyAt(memoizedReadIndex + 1)) { + memoizedReadIndex++; + } + return spans.valueAt(memoizedReadIndex); + } + + /** + * Adds a new span to the end starting at {@code startKey} and containing {@code value}. + * + *

      {@code startKey} must be greater than or equal to the start key of the previous span. If + * they're equal, the previous span is overwritten and it's passed to {@code removeCallback} (if + * set). + */ + public void appendSpan(int startKey, V value) { + if (memoizedReadIndex == C.INDEX_UNSET) { + checkState(spans.size() == 0); + memoizedReadIndex = 0; + } + + if (spans.size() > 0) { + int lastStartKey = spans.keyAt(spans.size() - 1); + checkArgument(startKey >= lastStartKey); + if (lastStartKey == startKey) { + removeCallback.accept(spans.valueAt(spans.size() - 1)); + } + } + spans.append(startKey, value); + } + + /** + * Returns the value associated with the end span. This is either the last value passed to {@link + * #appendSpan(int, Object)}, or the value of the span covering the index passed to {@link + * #discardFrom(int)}. + * + *

      The collection must not be {@link #isEmpty() empty}. + */ + public V getEndValue() { + return spans.valueAt(spans.size() - 1); + } + + /** + * Discard the spans from the start up to {@code discardToKey}. + * + *

      The span associated with {@code discardToKey} is not discarded (which means the last span is + * never discarded). + */ + public void discardTo(int discardToKey) { + for (int i = 0; i < spans.size() - 1 && discardToKey >= spans.keyAt(i + 1); i++) { + removeCallback.accept(spans.valueAt(i)); + spans.removeAt(i); + if (memoizedReadIndex > 0) { + memoizedReadIndex--; + } + } + } + + /** + * Discard the spans from the end back to {@code discardFromKey}. + * + *

      The span associated with {@code discardFromKey} is not discarded. + */ + public void discardFrom(int discardFromKey) { + for (int i = spans.size() - 1; i >= 0 && discardFromKey < spans.keyAt(i); i--) { + removeCallback.accept(spans.valueAt(i)); + spans.removeAt(i); + } + memoizedReadIndex = spans.size() > 0 ? min(memoizedReadIndex, spans.size() - 1) : C.INDEX_UNSET; + } + + /** Remove all spans. */ + public void clear() { + for (int i = 0; i < spans.size(); i++) { + removeCallback.accept(spans.valueAt(i)); + } + memoizedReadIndex = C.INDEX_UNSET; + spans.clear(); + } + + /** Returns true if the collection is empty. */ + public boolean isEmpty() { + return spans.size() == 0; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/TrackGroup.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/TrackGroup.java index 9e837bf05d..e0016540dd 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/TrackGroup.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/TrackGroup.java @@ -15,29 +15,53 @@ */ package com.google.android.exoplayer2.source; -import android.os.Parcel; -import android.os.Parcelable; +import static com.google.android.exoplayer2.util.Assertions.checkArgument; + +import android.os.Bundle; +import androidx.annotation.CheckResult; import androidx.annotation.Nullable; +import com.google.android.exoplayer2.Bundleable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; -import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.Tracks; +import com.google.android.exoplayer2.util.BundleableUtil; +import com.google.android.exoplayer2.util.Log; +import com.google.android.exoplayer2.util.MimeTypes; +import com.google.android.exoplayer2.util.Util; +import com.google.common.collect.ImmutableList; +import java.util.ArrayList; import java.util.Arrays; +import java.util.List; -// TODO: Add an allowMultipleStreams boolean to indicate where the one stream per group restriction -// does not apply. /** - * Defines a group of tracks exposed by a {@link MediaPeriod}. + * An immutable group of tracks available within a media stream. All tracks in a group present the + * same content, but their formats may differ. + * + *

      As an example of how tracks can be grouped, consider an adaptive playback where a main video + * feed is provided in five resolutions, and an alternative video feed (e.g., a different camera + * angle in a sports match) is provided in two resolutions. In this case there will be two video + * track groups, one corresponding to the main video feed containing five tracks, and a second for + * the alternative video feed containing two tracks. * - *

      A {@link MediaPeriod} is only able to provide one {@link SampleStream} corresponding to a - * group at any given time, however this {@link SampleStream} may adapt between multiple tracks - * within the group. + *

      Note that audio tracks whose languages differ are not grouped, because content in different + * languages is not considered to be the same. Conversely, audio tracks in the same language that + * only differ in properties such as bitrate, sampling rate, channel count and so on can be grouped. + * This also applies to text tracks. + * + *

      Note also that this class only contains information derived from the media itself. Unlike + * {@link Tracks.Group}, it does not include runtime information such as the extent to which + * playback of each track is supported by the device, or which tracks are currently selected. */ -public final class TrackGroup implements Parcelable { +public final class TrackGroup implements Bundleable { - /** - * The number of tracks in the group. - */ + private static final String TAG = "TrackGroup"; + + /** The number of tracks in the group. */ public final int length; + /** An identifier for the track group. */ + public final String id; + /** The type of tracks in the group. */ + public final @C.TrackType int type; private final Format[] formats; @@ -45,20 +69,42 @@ public final class TrackGroup implements Parcelable { private int hashCode; /** - * @param formats The track formats. Must not be null, contain null elements or be of length 0. + * Constructs a track group containing the provided {@code formats}. + * + * @param formats The list of {@link Format Formats}. Must not be empty. */ public TrackGroup(Format... formats) { - Assertions.checkState(formats.length > 0); + this(/* id= */ "", formats); + } + + /** + * Constructs a track group with the provided {@code id} and {@code formats}. + * + * @param id The identifier of the track group. May be an empty string. + * @param formats The list of {@link Format Formats}. Must not be empty. + */ + public TrackGroup(String id, Format... formats) { + checkArgument(formats.length > 0); + this.id = id; this.formats = formats; this.length = formats.length; + @C.TrackType int type = MimeTypes.getTrackType(formats[0].sampleMimeType); + if (type == C.TRACK_TYPE_UNKNOWN) { + type = MimeTypes.getTrackType(formats[0].containerMimeType); + } + this.type = type; + verifyCorrectness(); } - /* package */ TrackGroup(Parcel in) { - length = in.readInt(); - formats = new Format[length]; - for (int i = 0; i < length; i++) { - formats[i] = in.readParcelable(Format.class.getClassLoader()); - } + /** + * Returns a copy of this track group with the specified {@code id}. + * + * @param id The identifier for the copy of the track group. + * @return The copied track group. + */ + @CheckResult + public TrackGroup copyWithId(String id) { + return new TrackGroup(id, formats); } /** @@ -93,6 +139,7 @@ public int indexOf(Format format) { public int hashCode() { if (hashCode == 0) { int result = 17; + result = 31 * result + id.hashCode(); result = 31 * result + Arrays.hashCode(formats); hashCode = result; } @@ -108,35 +155,91 @@ public boolean equals(@Nullable Object obj) { return false; } TrackGroup other = (TrackGroup) obj; - return length == other.length && Arrays.equals(formats, other.formats); + return id.equals(other.id) && Arrays.equals(formats, other.formats); } - // Parcelable implementation. + // Bundleable implementation. + private static final String FIELD_FORMATS = Util.intToStringMaxRadix(0); + private static final String FIELD_ID = Util.intToStringMaxRadix(1); @Override - public int describeContents() { - return 0; + public Bundle toBundle() { + Bundle bundle = new Bundle(); + ArrayList arrayList = new ArrayList<>(formats.length); + for (Format format : formats) { + arrayList.add(format.toBundle(/* excludeMetadata= */ true)); + } + bundle.putParcelableArrayList(FIELD_FORMATS, arrayList); + bundle.putString(FIELD_ID, id); + return bundle; } - @Override - public void writeToParcel(Parcel dest, int flags) { - dest.writeInt(length); - for (int i = 0; i < length; i++) { - dest.writeParcelable(formats[i], 0); + /** Object that can restore {@code TrackGroup} from a {@link Bundle}. */ + public static final Creator CREATOR = + bundle -> { + @Nullable List formatBundles = bundle.getParcelableArrayList(FIELD_FORMATS); + List formats = + formatBundles == null + ? ImmutableList.of() + : BundleableUtil.fromBundleList(Format.CREATOR, formatBundles); + String id = bundle.getString(FIELD_ID, /* defaultValue= */ ""); + return new TrackGroup(id, formats.toArray(new Format[0])); + }; + + private void verifyCorrectness() { + // TrackGroups should only contain tracks with exactly the same content (but in different + // qualities). We only log an error instead of throwing to not break backwards-compatibility for + // cases where malformed TrackGroups happen to work by chance (e.g. because adaptive selections + // are always disabled). + String language = normalizeLanguage(formats[0].language); + @C.RoleFlags int roleFlags = normalizeRoleFlags(formats[0].roleFlags); + for (int i = 1; i < formats.length; i++) { + if (!language.equals(normalizeLanguage(formats[i].language))) { + logErrorMessage( + /* mismatchField= */ "languages", + /* valueIndex0= */ formats[0].language, + /* otherValue=* */ formats[i].language, + /* otherIndex= */ i); + return; + } + if (roleFlags != normalizeRoleFlags(formats[i].roleFlags)) { + logErrorMessage( + /* mismatchField= */ "role flags", + /* valueIndex0= */ Integer.toBinaryString(formats[0].roleFlags), + /* otherValue=* */ Integer.toBinaryString(formats[i].roleFlags), + /* otherIndex= */ i); + return; + } } } - public static final Parcelable.Creator CREATOR = - new Parcelable.Creator() { + private static String normalizeLanguage(@Nullable String language) { + // Treat all variants of undetermined or unknown languages as compatible. + return language == null || language.equals(C.LANGUAGE_UNDETERMINED) ? "" : language; + } - @Override - public TrackGroup createFromParcel(Parcel in) { - return new TrackGroup(in); - } + private static @C.RoleFlags int normalizeRoleFlags(@C.RoleFlags int roleFlags) { + // Treat trick-play and non-trick-play formats as compatible. + return roleFlags | C.ROLE_FLAG_TRICK_PLAY; + } - @Override - public TrackGroup[] newArray(int size) { - return new TrackGroup[size]; - } - }; + private static void logErrorMessage( + String mismatchField, + @Nullable String valueIndex0, + @Nullable String otherValue, + int otherIndex) { + Log.e( + TAG, + "", + new IllegalStateException( + "Different " + + mismatchField + + " combined in one TrackGroup: '" + + valueIndex0 + + "' (track 0) and '" + + otherValue + + "' (track " + + otherIndex + + ")")); + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/TrackGroupArray.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/TrackGroupArray.java index e737a5fafa..66917a3ae0 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/TrackGroupArray.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/TrackGroupArray.java @@ -15,44 +15,50 @@ */ package com.google.android.exoplayer2.source; -import android.os.Parcel; -import android.os.Parcelable; +import android.os.Bundle; import androidx.annotation.Nullable; +import com.google.android.exoplayer2.Bundleable; import com.google.android.exoplayer2.C; -import java.util.Arrays; +import com.google.android.exoplayer2.util.BundleableUtil; +import com.google.android.exoplayer2.util.Log; +import com.google.android.exoplayer2.util.Util; +import com.google.common.collect.ImmutableList; +import java.util.List; + +/** + * An immutable array of {@link TrackGroup}s. + * + *

      This class is typically used to represent all of the tracks available in a piece of media. + * Tracks that are known to present the same content are grouped together (e.g., the same video feed + * provided at different resolutions in an adaptive stream). Tracks that are known to present + * different content are in separate track groups (e.g., an audio track will not be in the same + * group as a video track, and an audio track in one language will be in a different group to an + * audio track in another language). + */ +public final class TrackGroupArray implements Bundleable { -/** An array of {@link TrackGroup}s exposed by a {@link MediaPeriod}. */ -public final class TrackGroupArray implements Parcelable { + private static final String TAG = "TrackGroupArray"; - /** - * The empty array. - */ + /** The empty array. */ public static final TrackGroupArray EMPTY = new TrackGroupArray(); - /** - * The number of groups in the array. Greater than or equal to zero. - */ + /** The number of groups in the array. Greater than or equal to zero. */ public final int length; - private final TrackGroup[] trackGroups; + private final ImmutableList trackGroups; // Lazily initialized hashcode. private int hashCode; /** - * @param trackGroups The groups. Must not be null or contain null elements, but may be empty. + * Construct a {@code TrackGroupArray} from an array of {@link TrackGroup TrackGroups}. + * + *

      The groups must not contain duplicates. */ public TrackGroupArray(TrackGroup... trackGroups) { - this.trackGroups = trackGroups; + this.trackGroups = ImmutableList.copyOf(trackGroups); this.length = trackGroups.length; - } - - /* package */ TrackGroupArray(Parcel in) { - length = in.readInt(); - trackGroups = new TrackGroup[length]; - for (int i = 0; i < length; i++) { - trackGroups[i] = in.readParcelable(TrackGroup.class.getClassLoader()); - } + verifyCorrectness(); } /** @@ -62,7 +68,7 @@ public TrackGroupArray(TrackGroup... trackGroups) { * @return The group. */ public TrackGroup get(int index) { - return trackGroups[index]; + return trackGroups.get(index); } /** @@ -71,21 +77,12 @@ public TrackGroup get(int index) { * @param group The group. * @return The index of the group, or {@link C#INDEX_UNSET} if no such group exists. */ - @SuppressWarnings("ReferenceEquality") public int indexOf(TrackGroup group) { - for (int i = 0; i < length; i++) { - // Suppressed reference equality warning because this is looking for the index of a specific - // TrackGroup object, not the index of a potential equal TrackGroup. - if (trackGroups[i] == group) { - return i; - } - } - return C.INDEX_UNSET; + int index = trackGroups.indexOf(group); + return index >= 0 ? index : C.INDEX_UNSET; } - /** - * Returns whether this track group array is empty. - */ + /** Returns whether this track group array is empty. */ public boolean isEmpty() { return length == 0; } @@ -93,7 +90,7 @@ public boolean isEmpty() { @Override public int hashCode() { if (hashCode == 0) { - hashCode = Arrays.hashCode(trackGroups); + hashCode = trackGroups.hashCode(); } return hashCode; } @@ -107,35 +104,45 @@ public boolean equals(@Nullable Object obj) { return false; } TrackGroupArray other = (TrackGroupArray) obj; - return length == other.length && Arrays.equals(trackGroups, other.trackGroups); + return length == other.length && trackGroups.equals(other.trackGroups); } - // Parcelable implementation. + // Bundleable implementation. - @Override - public int describeContents() { - return 0; - } + private static final String FIELD_TRACK_GROUPS = Util.intToStringMaxRadix(0); @Override - public void writeToParcel(Parcel dest, int flags) { - dest.writeInt(length); - for (int i = 0; i < length; i++) { - dest.writeParcelable(trackGroups[i], 0); - } + public Bundle toBundle() { + Bundle bundle = new Bundle(); + bundle.putParcelableArrayList( + FIELD_TRACK_GROUPS, BundleableUtil.toBundleArrayList(trackGroups)); + return bundle; } - public static final Parcelable.Creator CREATOR = - new Parcelable.Creator() { - - @Override - public TrackGroupArray createFromParcel(Parcel in) { - return new TrackGroupArray(in); + /** Object that can restores a TrackGroupArray from a {@link Bundle}. */ + public static final Creator CREATOR = + bundle -> { + @Nullable + List trackGroupBundles = bundle.getParcelableArrayList(FIELD_TRACK_GROUPS); + if (trackGroupBundles == null) { + return new TrackGroupArray(); } + return new TrackGroupArray( + BundleableUtil.fromBundleList(TrackGroup.CREATOR, trackGroupBundles) + .toArray(new TrackGroup[0])); + }; - @Override - public TrackGroupArray[] newArray(int size) { - return new TrackGroupArray[size]; + private void verifyCorrectness() { + for (int i = 0; i < trackGroups.size(); i++) { + for (int j = i + 1; j < trackGroups.size(); j++) { + if (trackGroups.get(i).equals(trackGroups.get(j))) { + Log.e( + TAG, + "", + new IllegalArgumentException( + "Multiple identical TrackGroups added to one TrackGroupArray.")); } - }; + } + } + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/UnrecognizedInputFormatException.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/UnrecognizedInputFormatException.java index 508bf0e365..ec94e40777 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/UnrecognizedInputFormatException.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/UnrecognizedInputFormatException.java @@ -16,16 +16,13 @@ package com.google.android.exoplayer2.source; import android.net.Uri; +import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.ParserException; -/** - * Thrown if the input format was not recognized. - */ +/** Thrown if the input format was not recognized. */ public class UnrecognizedInputFormatException extends ParserException { - /** - * The {@link Uri} from which the unrecognized data was read. - */ + /** The {@link Uri} from which the unrecognized data was read. */ public final Uri uri; /** @@ -33,8 +30,7 @@ public class UnrecognizedInputFormatException extends ParserException { * @param uri The {@link Uri} from which the unrecognized data was read. */ public UnrecognizedInputFormatException(String message, Uri uri) { - super(message); + super(message, /* cause= */ null, /* contentIsMalformed= */ false, C.DATA_TYPE_MEDIA); this.uri = uri; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/WrappingMediaSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/WrappingMediaSource.java new file mode 100644 index 0000000000..fe29c5a17f --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/WrappingMediaSource.java @@ -0,0 +1,229 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source; + +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.MediaItem; +import com.google.android.exoplayer2.Timeline; +import com.google.android.exoplayer2.analytics.PlayerId; +import com.google.android.exoplayer2.upstream.Allocator; +import com.google.android.exoplayer2.upstream.TransferListener; + +/** + * An abstract {@link MediaSource} wrapping a single child {@link MediaSource}. + * + *

      The implementation may want to override the following methods as needed: + * + *

        + *
      • {@link #getMediaItem()}: Amend the {@link MediaItem} for this media source. This is only + * used before the child source is prepared. + *
      • {@link #onChildSourceInfoRefreshed(Timeline)}: Called whenever the child source's {@link + * Timeline} changed. This {@link Timeline} can be amended if needed, for example using {@link + * ForwardingTimeline}. The {@link Timeline} for the wrapping source needs to be published + * with {@link #refreshSourceInfo(Timeline)}. + *
      • {@link #createPeriod}/{@link #releasePeriod}: These methods create and release {@link + * MediaPeriod} instances. They typically forward to the wrapped media source and optionally + * wrap the returned {@link MediaPeriod}. + *
      + * + *

      Other methods like {@link #prepareSourceInternal}, {@link #enableInternal}, {@link + * #disableInternal} or {@link #releaseSourceInternal} only need to be overwritten if required for + * resource management. + */ +public abstract class WrappingMediaSource extends CompositeMediaSource { + + private static final Void CHILD_SOURCE_ID = null; + + /** The wrapped child {@link MediaSource}. */ + protected final MediaSource mediaSource; + + /** + * Creates the wrapping {@link MediaSource}. + * + * @param mediaSource The wrapped child {@link MediaSource}. + */ + protected WrappingMediaSource(MediaSource mediaSource) { + this.mediaSource = mediaSource; + } + + @Override + protected final void prepareSourceInternal(@Nullable TransferListener mediaTransferListener) { + super.prepareSourceInternal(mediaTransferListener); + prepareSourceInternal(); + } + + /** + * Starts source preparation and enables the source, see {@link #prepareSource(MediaSourceCaller, + * TransferListener, PlayerId)}. This method is called at most once until the next call to {@link + * #releaseSourceInternal()}. + */ + protected void prepareSourceInternal() { + prepareChildSource(); + } + + @Nullable + @Override + public Timeline getInitialTimeline() { + return mediaSource.getInitialTimeline(); + } + + @Override + public boolean isSingleWindow() { + return mediaSource.isSingleWindow(); + } + + /** + * Returns the {@link MediaItem} for this media source. + * + *

      This method can be overridden to amend the {@link MediaItem} of the child source. It is only + * used before the child source is prepared. + * + * @see MediaSource#getMediaItem() + */ + @Override + public MediaItem getMediaItem() { + return mediaSource.getMediaItem(); + } + + /** + * Creates the requested {@link MediaPeriod}. + * + *

      This method typically forwards to the wrapped media source and optionally wraps the returned + * {@link MediaPeriod}. + * + * @see MediaSource#createPeriod(MediaPeriodId, Allocator, long) + */ + @Override + public MediaPeriod createPeriod(MediaPeriodId id, Allocator allocator, long startPositionUs) { + return mediaSource.createPeriod(id, allocator, startPositionUs); + } + + /** + * Releases a {@link MediaPeriod}. + * + *

      This method typically forwards to the wrapped media source and optionally unwraps the + * provided {@link MediaPeriod}. + * + * @see MediaSource#releasePeriod(MediaPeriod) + */ + @Override + public void releasePeriod(MediaPeriod mediaPeriod) { + mediaSource.releasePeriod(mediaPeriod); + } + + @Override + protected final void onChildSourceInfoRefreshed( + Void childSourceId, MediaSource mediaSource, Timeline newTimeline) { + onChildSourceInfoRefreshed(newTimeline); + } + + /** + * Called when the child source info has been refreshed. + * + *

      This {@link Timeline} can be amended if needed, for example using {@link + * ForwardingTimeline}. The {@link Timeline} for the wrapping source needs to be published with + * {@link #refreshSourceInfo(Timeline)}. + * + * @param newTimeline The timeline of the child source. + */ + protected void onChildSourceInfoRefreshed(Timeline newTimeline) { + refreshSourceInfo(newTimeline); + } + + @Override + protected final int getWindowIndexForChildWindowIndex(Void childSourceId, int windowIndex) { + return getWindowIndexForChildWindowIndex(windowIndex); + } + + /** + * Returns the window index in the wrapping source corresponding to the specified window index in + * a child source. The default implementation does not change the window index. + * + * @param windowIndex A window index of the child source. + * @return The corresponding window index in the wrapping source. + */ + protected int getWindowIndexForChildWindowIndex(int windowIndex) { + return windowIndex; + } + + @Nullable + @Override + protected final MediaPeriodId getMediaPeriodIdForChildMediaPeriodId( + Void childSourceId, MediaPeriodId mediaPeriodId) { + return getMediaPeriodIdForChildMediaPeriodId(mediaPeriodId); + } + + /** + * Returns the {@link MediaPeriodId} in the wrapping source corresponding to the specified {@link + * MediaPeriodId} in a child source. The default implementation does not change the media period + * id. + * + * @param mediaPeriodId A {@link MediaPeriodId} of the child source. + * @return The corresponding {@link MediaPeriodId} in the wrapping source. Null if no + * corresponding media period id can be determined. + */ + @Nullable + protected MediaPeriodId getMediaPeriodIdForChildMediaPeriodId(MediaPeriodId mediaPeriodId) { + return mediaPeriodId; + } + + @Override + protected final long getMediaTimeForChildMediaTime(Void childSourceId, long mediaTimeMs) { + return getMediaTimeForChildMediaTime(mediaTimeMs); + } + + /** + * Returns the media time in the {@link MediaPeriod} of the wrapping source corresponding to the + * specified media time in the {@link MediaPeriod} of the child source. The default implementation + * does not change the media time. + * + * @param mediaTimeMs A media time in the {@link MediaPeriod} of the child source, in + * milliseconds. + * @return The corresponding media time in the {@link MediaPeriod} of the wrapping source, in + * milliseconds. + */ + protected long getMediaTimeForChildMediaTime(long mediaTimeMs) { + return mediaTimeMs; + } + + /** + * Prepares the wrapped child source. + * + *

      {@link #onChildSourceInfoRefreshed(Timeline)} will be called when the child source updates + * its timeline. + * + *

      If sources aren't explicitly released with {@link #releaseChildSource()} they will be + * released in {@link #releaseSourceInternal()}. + */ + protected final void prepareChildSource() { + prepareChildSource(CHILD_SOURCE_ID, mediaSource); + } + + /** Enables the child source. */ + protected final void enableChildSource() { + enableChildSource(CHILD_SOURCE_ID); + } + + /** Disables the child source. */ + protected final void disableChildSource() { + disableChildSource(CHILD_SOURCE_ID); + } + + /** Releases the child source. */ + protected final void releaseChildSource() { + releaseChildSource(CHILD_SOURCE_ID); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ads/AdPlaybackState.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ads/AdPlaybackState.java index 3a093ca79f..6054def4ff 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ads/AdPlaybackState.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ads/AdPlaybackState.java @@ -15,16 +15,30 @@ */ package com.google.android.exoplayer2.source.ads; +import static com.google.android.exoplayer2.util.Assertions.checkArgument; +import static com.google.android.exoplayer2.util.Assertions.checkState; +import static java.lang.Math.max; +import static java.lang.annotation.ElementType.FIELD; +import static java.lang.annotation.ElementType.LOCAL_VARIABLE; +import static java.lang.annotation.ElementType.METHOD; +import static java.lang.annotation.ElementType.PARAMETER; +import static java.lang.annotation.ElementType.TYPE_USE; + import android.net.Uri; +import android.os.Bundle; import androidx.annotation.CheckResult; import androidx.annotation.IntDef; +import androidx.annotation.IntRange; import androidx.annotation.Nullable; +import com.google.android.exoplayer2.Bundleable; import com.google.android.exoplayer2.C; -import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.Timeline; import com.google.android.exoplayer2.util.Util; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import java.util.ArrayList; import java.util.Arrays; import org.checkerframework.checker.nullness.compatqual.NullableType; @@ -34,7 +48,7 @@ *

      Instances are immutable. Call the {@code with*} methods to get new instances that have the * required changes. */ -public final class AdPlaybackState { +public final class AdPlaybackState implements Bundleable { /** * Represents a group of ads, with information about their states. @@ -42,33 +56,72 @@ public final class AdPlaybackState { *

      Instances are immutable. Call the {@code with*} methods to get new instances that have the * required changes. */ - public static final class AdGroup { + public static final class AdGroup implements Bundleable { + /** + * The time of the ad group in the {@link Timeline.Period}, in microseconds, or {@link + * C#TIME_END_OF_SOURCE} to indicate a postroll ad. + */ + public final long timeUs; /** The number of ads in the ad group, or {@link C#LENGTH_UNSET} if unknown. */ public final int count; + /** + * The original number of ads in the ad group in case the ad group is only partially available, + * or {@link C#LENGTH_UNSET} if unknown. An ad can be partially available when a server side + * inserted ad live stream is joined while an ad is already playing and some ad information is + * missing. + */ + public final int originalCount; /** The URI of each ad in the ad group. */ public final @NullableType Uri[] uris; /** The state of each ad in the ad group. */ - @AdState public final int[] states; + public final @AdState int[] states; /** The durations of each ad in the ad group, in microseconds. */ public final long[] durationsUs; + /** + * The offset in microseconds which should be added to the content stream when resuming playback + * after the ad group. + */ + public final long contentResumeOffsetUs; + /** Whether this ad group is server-side inserted and part of the content stream. */ + public final boolean isServerSideInserted; - /** Creates a new ad group with an unspecified number of ads. */ - public AdGroup() { + /** + * Creates a new ad group with an unspecified number of ads. + * + * @param timeUs The time of the ad group in the {@link Timeline.Period}, in microseconds, or + * {@link C#TIME_END_OF_SOURCE} to indicate a postroll ad. + */ + public AdGroup(long timeUs) { this( + timeUs, /* count= */ C.LENGTH_UNSET, + /* originalCount= */ C.LENGTH_UNSET, /* states= */ new int[0], /* uris= */ new Uri[0], - /* durationsUs= */ new long[0]); + /* durationsUs= */ new long[0], + /* contentResumeOffsetUs= */ 0, + /* isServerSideInserted= */ false); } private AdGroup( - int count, @AdState int[] states, @NullableType Uri[] uris, long[] durationsUs) { - Assertions.checkArgument(states.length == uris.length); + long timeUs, + int count, + int originalCount, + @AdState int[] states, + @NullableType Uri[] uris, + long[] durationsUs, + long contentResumeOffsetUs, + boolean isServerSideInserted) { + checkArgument(states.length == uris.length); + this.timeUs = timeUs; this.count = count; + this.originalCount = originalCount; this.states = states; this.uris = uris; this.durationsUs = durationsUs; + this.contentResumeOffsetUs = contentResumeOffsetUs; + this.isServerSideInserted = isServerSideInserted; } /** @@ -81,12 +134,17 @@ public int getFirstAdIndexToPlay() { /** * Returns the index of the next ad in the ad group that should be played after playing {@code - * lastPlayedAdIndex}, or {@link #count} if no later ads should be played. + * lastPlayedAdIndex}, or {@link #count} if no later ads should be played. If no ads have been + * played, pass -1 to get the index of the first ad to play. + * + *

      Note: {@linkplain #isServerSideInserted Server side inserted ads} are always considered + * playable. */ - public int getNextAdIndexToPlay(int lastPlayedAdIndex) { + public int getNextAdIndexToPlay(@IntRange(from = -1) int lastPlayedAdIndex) { int nextAdIndexToPlay = lastPlayedAdIndex + 1; while (nextAdIndexToPlay < states.length) { - if (states[nextAdIndexToPlay] == AD_STATE_UNAVAILABLE + if (isServerSideInserted + || states[nextAdIndexToPlay] == AD_STATE_UNAVAILABLE || states[nextAdIndexToPlay] == AD_STATE_AVAILABLE) { break; } @@ -95,11 +153,26 @@ public int getNextAdIndexToPlay(int lastPlayedAdIndex) { return nextAdIndexToPlay; } - /** Returns whether the ad group has at least one ad that still needs to be played. */ - public boolean hasUnplayedAds() { + /** Returns whether the ad group has at least one ad that should be played. */ + public boolean shouldPlayAdGroup() { return count == C.LENGTH_UNSET || getFirstAdIndexToPlay() < count; } + /** + * Returns whether the ad group has at least one ad that is neither played, skipped, nor failed. + */ + public boolean hasUnplayedAds() { + if (count == C.LENGTH_UNSET) { + return true; + } + for (int i = 0; i < count; i++) { + if (states[i] == AD_STATE_UNAVAILABLE || states[i] == AD_STATE_AVAILABLE) { + return true; + } + } + return false; + } + @Override public boolean equals(@Nullable Object o) { if (this == o) { @@ -109,28 +182,58 @@ public boolean equals(@Nullable Object o) { return false; } AdGroup adGroup = (AdGroup) o; - return count == adGroup.count + return timeUs == adGroup.timeUs + && count == adGroup.count + && originalCount == adGroup.originalCount && Arrays.equals(uris, adGroup.uris) && Arrays.equals(states, adGroup.states) - && Arrays.equals(durationsUs, adGroup.durationsUs); + && Arrays.equals(durationsUs, adGroup.durationsUs) + && contentResumeOffsetUs == adGroup.contentResumeOffsetUs + && isServerSideInserted == adGroup.isServerSideInserted; } @Override public int hashCode() { int result = count; + result = 31 * result + originalCount; + result = 31 * result + (int) (timeUs ^ (timeUs >>> 32)); result = 31 * result + Arrays.hashCode(uris); result = 31 * result + Arrays.hashCode(states); result = 31 * result + Arrays.hashCode(durationsUs); + result = 31 * result + (int) (contentResumeOffsetUs ^ (contentResumeOffsetUs >>> 32)); + result = 31 * result + (isServerSideInserted ? 1 : 0); return result; } + /** Returns a new instance with the {@link #timeUs} set to the specified value. */ + @CheckResult + public AdGroup withTimeUs(long timeUs) { + return new AdGroup( + timeUs, + count, + originalCount, + states, + uris, + durationsUs, + contentResumeOffsetUs, + isServerSideInserted); + } + /** Returns a new instance with the ad count set to {@code count}. */ @CheckResult public AdGroup withAdCount(int count) { @AdState int[] states = copyStatesWithSpaceForAdCount(this.states, count); long[] durationsUs = copyDurationsUsWithSpaceForAdCount(this.durationsUs, count); @NullableType Uri[] uris = Arrays.copyOf(this.uris, count); - return new AdGroup(count, states, uris, durationsUs); + return new AdGroup( + timeUs, + count, + originalCount, + states, + uris, + durationsUs, + contentResumeOffsetUs, + isServerSideInserted); } /** @@ -138,7 +241,7 @@ public AdGroup withAdCount(int count) { * marked as {@link #AD_STATE_AVAILABLE}. */ @CheckResult - public AdGroup withAdUri(Uri uri, int index) { + public AdGroup withAdUri(Uri uri, @IntRange(from = 0) int index) { @AdState int[] states = copyStatesWithSpaceForAdCount(this.states, index + 1); long[] durationsUs = this.durationsUs.length == states.length @@ -147,7 +250,15 @@ public AdGroup withAdUri(Uri uri, int index) { @NullableType Uri[] uris = Arrays.copyOf(this.uris, states.length); uris[index] = uri; states[index] = AD_STATE_AVAILABLE; - return new AdGroup(count, states, uris, durationsUs); + return new AdGroup( + timeUs, + count, + originalCount, + states, + uris, + durationsUs, + contentResumeOffsetUs, + isServerSideInserted); } /** @@ -159,10 +270,10 @@ public AdGroup withAdUri(Uri uri, int index) { * ad count specified later. Otherwise, {@code index} must be less than the current ad count. */ @CheckResult - public AdGroup withAdState(@AdState int state, int index) { - Assertions.checkArgument(count == C.LENGTH_UNSET || index < count); - @AdState int[] states = copyStatesWithSpaceForAdCount(this.states, index + 1); - Assertions.checkArgument( + public AdGroup withAdState(@AdState int state, @IntRange(from = 0) int index) { + checkArgument(count == C.LENGTH_UNSET || index < count); + @AdState int[] states = copyStatesWithSpaceForAdCount(this.states, /* count= */ index + 1); + checkArgument( states[index] == AD_STATE_UNAVAILABLE || states[index] == AD_STATE_AVAILABLE || states[index] == state); @@ -174,17 +285,95 @@ public AdGroup withAdState(@AdState int state, int index) { Uri[] uris = this.uris.length == states.length ? this.uris : Arrays.copyOf(this.uris, states.length); states[index] = state; - return new AdGroup(count, states, uris, durationsUs); + return new AdGroup( + timeUs, + count, + originalCount, + states, + uris, + durationsUs, + contentResumeOffsetUs, + isServerSideInserted); } /** Returns a new instance with the specified ad durations, in microseconds. */ @CheckResult public AdGroup withAdDurationsUs(long[] durationsUs) { - Assertions.checkArgument(count == C.LENGTH_UNSET || durationsUs.length <= this.uris.length); - if (durationsUs.length < this.uris.length) { + if (durationsUs.length < uris.length) { durationsUs = copyDurationsUsWithSpaceForAdCount(durationsUs, uris.length); + } else if (count != C.LENGTH_UNSET && durationsUs.length > uris.length) { + durationsUs = Arrays.copyOf(durationsUs, uris.length); } - return new AdGroup(count, states, uris, durationsUs); + return new AdGroup( + timeUs, + count, + originalCount, + states, + uris, + durationsUs, + contentResumeOffsetUs, + isServerSideInserted); + } + + /** Returns an instance with the specified {@link #contentResumeOffsetUs}. */ + @CheckResult + public AdGroup withContentResumeOffsetUs(long contentResumeOffsetUs) { + return new AdGroup( + timeUs, + count, + originalCount, + states, + uris, + durationsUs, + contentResumeOffsetUs, + isServerSideInserted); + } + + /** Returns an instance with the specified value for {@link #isServerSideInserted}. */ + @CheckResult + public AdGroup withIsServerSideInserted(boolean isServerSideInserted) { + return new AdGroup( + timeUs, + count, + originalCount, + states, + uris, + durationsUs, + contentResumeOffsetUs, + isServerSideInserted); + } + + /** Returns an instance with the specified value for {@link #originalCount}. */ + public AdGroup withOriginalAdCount(int originalCount) { + return new AdGroup( + timeUs, + count, + originalCount, + states, + uris, + durationsUs, + contentResumeOffsetUs, + isServerSideInserted); + } + + /** Removes the last ad from the ad group. */ + public AdGroup withLastAdRemoved() { + int newCount = states.length - 1; + @AdState int[] newStates = Arrays.copyOf(states, newCount); + @NullableType Uri[] newUris = Arrays.copyOf(uris, newCount); + long[] newDurationsUs = durationsUs; + if (durationsUs.length > newCount) { + newDurationsUs = Arrays.copyOf(durationsUs, newCount); + } + return new AdGroup( + timeUs, + newCount, + originalCount, + newStates, + newUris, + newDurationsUs, + /* contentResumeOffsetUs= */ Util.sum(newDurationsUs), + isServerSideInserted); } /** @@ -195,10 +384,14 @@ public AdGroup withAdDurationsUs(long[] durationsUs) { public AdGroup withAllAdsSkipped() { if (count == C.LENGTH_UNSET) { return new AdGroup( + timeUs, /* count= */ 0, + originalCount, /* states= */ new int[0], /* uris= */ new Uri[0], - /* durationsUs= */ new long[0]); + /* durationsUs= */ new long[0], + contentResumeOffsetUs, + isServerSideInserted); } int count = this.states.length; @AdState int[] states = Arrays.copyOf(this.states, count); @@ -207,13 +400,50 @@ public AdGroup withAllAdsSkipped() { states[i] = AD_STATE_SKIPPED; } } - return new AdGroup(count, states, uris, durationsUs); + return new AdGroup( + timeUs, + count, + originalCount, + states, + uris, + durationsUs, + contentResumeOffsetUs, + isServerSideInserted); + } + + /** + * Returns an instance with all ads in final states (played, skipped, error) reset to either + * available or unavailable, which allows to play them again. + */ + @CheckResult + public AdGroup withAllAdsReset() { + if (count == C.LENGTH_UNSET) { + return this; + } + int count = this.states.length; + @AdState int[] states = Arrays.copyOf(this.states, count); + for (int i = 0; i < count; i++) { + if (states[i] == AD_STATE_PLAYED + || states[i] == AD_STATE_SKIPPED + || states[i] == AD_STATE_ERROR) { + states[i] = uris[i] == null ? AD_STATE_UNAVAILABLE : AD_STATE_AVAILABLE; + } + } + return new AdGroup( + timeUs, + count, + originalCount, + states, + uris, + durationsUs, + contentResumeOffsetUs, + isServerSideInserted); } @CheckResult private static @AdState int[] copyStatesWithSpaceForAdCount(@AdState int[] states, int count) { int oldStateCount = states.length; - int newStateCount = Math.max(count, oldStateCount); + int newStateCount = max(count, oldStateCount); states = Arrays.copyOf(states, newStateCount); Arrays.fill(states, oldStateCount, newStateCount, AD_STATE_UNAVAILABLE); return states; @@ -222,11 +452,66 @@ public AdGroup withAllAdsSkipped() { @CheckResult private static long[] copyDurationsUsWithSpaceForAdCount(long[] durationsUs, int count) { int oldDurationsUsCount = durationsUs.length; - int newDurationsUsCount = Math.max(count, oldDurationsUsCount); + int newDurationsUsCount = max(count, oldDurationsUsCount); durationsUs = Arrays.copyOf(durationsUs, newDurationsUsCount); Arrays.fill(durationsUs, oldDurationsUsCount, newDurationsUsCount, C.TIME_UNSET); return durationsUs; } + + // Bundleable implementation. + + private static final String FIELD_TIME_US = Util.intToStringMaxRadix(0); + private static final String FIELD_COUNT = Util.intToStringMaxRadix(1); + private static final String FIELD_URIS = Util.intToStringMaxRadix(2); + private static final String FIELD_STATES = Util.intToStringMaxRadix(3); + private static final String FIELD_DURATIONS_US = Util.intToStringMaxRadix(4); + private static final String FIELD_CONTENT_RESUME_OFFSET_US = Util.intToStringMaxRadix(5); + private static final String FIELD_IS_SERVER_SIDE_INSERTED = Util.intToStringMaxRadix(6); + private static final String FIELD_ORIGINAL_COUNT = Util.intToStringMaxRadix(7); + + // putParcelableArrayList actually supports null elements. + @SuppressWarnings("nullness:argument") + @Override + public Bundle toBundle() { + Bundle bundle = new Bundle(); + bundle.putLong(FIELD_TIME_US, timeUs); + bundle.putInt(FIELD_COUNT, count); + bundle.putInt(FIELD_ORIGINAL_COUNT, originalCount); + bundle.putParcelableArrayList( + FIELD_URIS, new ArrayList<@NullableType Uri>(Arrays.asList(uris))); + bundle.putIntArray(FIELD_STATES, states); + bundle.putLongArray(FIELD_DURATIONS_US, durationsUs); + bundle.putLong(FIELD_CONTENT_RESUME_OFFSET_US, contentResumeOffsetUs); + bundle.putBoolean(FIELD_IS_SERVER_SIDE_INSERTED, isServerSideInserted); + return bundle; + } + + /** Object that can restore {@link AdGroup} from a {@link Bundle}. */ + public static final Creator CREATOR = AdGroup::fromBundle; + + // getParcelableArrayList may have null elements. + @SuppressWarnings("nullness:type.argument") + private static AdGroup fromBundle(Bundle bundle) { + long timeUs = bundle.getLong(FIELD_TIME_US); + int count = bundle.getInt(FIELD_COUNT); + int originalCount = bundle.getInt(FIELD_ORIGINAL_COUNT); + @Nullable ArrayList<@NullableType Uri> uriList = bundle.getParcelableArrayList(FIELD_URIS); + @Nullable + @AdState + int[] states = bundle.getIntArray(FIELD_STATES); + @Nullable long[] durationsUs = bundle.getLongArray(FIELD_DURATIONS_US); + long contentResumeOffsetUs = bundle.getLong(FIELD_CONTENT_RESUME_OFFSET_US); + boolean isServerSideInserted = bundle.getBoolean(FIELD_IS_SERVER_SIDE_INSERTED); + return new AdGroup( + timeUs, + count, + originalCount, + states == null ? new int[0] : states, + uriList == null ? new Uri[0] : uriList.toArray(new Uri[0]), + durationsUs == null ? new long[0] : durationsUs, + contentResumeOffsetUs, + isServerSideInserted); + } } /** @@ -234,8 +519,11 @@ private static long[] copyDurationsUsWithSpaceForAdCount(long[] durationsUs, int * #AD_STATE_AVAILABLE}, {@link #AD_STATE_SKIPPED}, {@link #AD_STATE_PLAYED} or {@link * #AD_STATE_ERROR}. */ + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. @Documented @Retention(RetentionPolicy.SOURCE) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) @IntDef({ AD_STATE_UNAVAILABLE, AD_STATE_AVAILABLE, @@ -256,57 +544,80 @@ private static long[] copyDurationsUsWithSpaceForAdCount(long[] durationsUs, int public static final int AD_STATE_ERROR = 4; /** Ad playback state with no ads. */ - public static final AdPlaybackState NONE = new AdPlaybackState(); + public static final AdPlaybackState NONE = + new AdPlaybackState( + /* adsId= */ null, + /* adGroups= */ new AdGroup[0], + /* adResumePositionUs= */ 0L, + /* contentDurationUs= */ C.TIME_UNSET, + /* removedAdGroupCount= */ 0); + + private static final AdGroup REMOVED_AD_GROUP = new AdGroup(/* timeUs= */ 0).withAdCount(0); - /** The number of ad groups. */ - public final int adGroupCount; /** - * The times of ad groups, in microseconds, relative to the start of the {@link - * com.google.android.exoplayer2.Timeline.Period} they belong to. A final element with the value - * {@link C#TIME_END_OF_SOURCE} indicates a postroll ad. + * The opaque identifier for ads with which this instance is associated, or {@code null} if unset. */ - public final long[] adGroupTimesUs; - /** The ad groups. */ - public final AdGroup[] adGroups; + @Nullable public final Object adsId; + + /** The number of ad groups. */ + public final int adGroupCount; /** The position offset in the first unplayed ad at which to begin playback, in microseconds. */ public final long adResumePositionUs; /** * The duration of the content period in microseconds, if known. {@link C#TIME_UNSET} otherwise. */ public final long contentDurationUs; + /** + * The number of ad groups that have been removed. Ad groups with indices between {@code 0} + * (inclusive) and {@code removedAdGroupCount} (exclusive) will be empty and must not be modified + * by any of the {@code with*} methods. + */ + public final int removedAdGroupCount; + + private final AdGroup[] adGroups; /** * Creates a new ad playback state with the specified ad group times. * + * @param adsId The opaque identifier for ads with which this instance is associated. * @param adGroupTimesUs The times of ad groups in microseconds, relative to the start of the - * {@link com.google.android.exoplayer2.Timeline.Period} they belong to. A final element with - * the value {@link C#TIME_END_OF_SOURCE} indicates that there is a postroll ad. + * {@link Timeline.Period} they belong to. A final element with the value {@link + * C#TIME_END_OF_SOURCE} indicates that there is a postroll ad. */ - public AdPlaybackState(long... adGroupTimesUs) { - int count = adGroupTimesUs.length; - adGroupCount = count; - this.adGroupTimesUs = Arrays.copyOf(adGroupTimesUs, count); - this.adGroups = new AdGroup[count]; - for (int i = 0; i < count; i++) { - adGroups[i] = new AdGroup(); - } - adResumePositionUs = 0; - contentDurationUs = C.TIME_UNSET; + public AdPlaybackState(Object adsId, long... adGroupTimesUs) { + this( + adsId, + createEmptyAdGroups(adGroupTimesUs), + /* adResumePositionUs= */ 0, + /* contentDurationUs= */ C.TIME_UNSET, + /* removedAdGroupCount= */ 0); } private AdPlaybackState( - long[] adGroupTimesUs, AdGroup[] adGroups, long adResumePositionUs, long contentDurationUs) { - adGroupCount = adGroups.length; - this.adGroupTimesUs = adGroupTimesUs; - this.adGroups = adGroups; + @Nullable Object adsId, + AdGroup[] adGroups, + long adResumePositionUs, + long contentDurationUs, + int removedAdGroupCount) { + this.adsId = adsId; this.adResumePositionUs = adResumePositionUs; this.contentDurationUs = contentDurationUs; + adGroupCount = adGroups.length + removedAdGroupCount; + this.adGroups = adGroups; + this.removedAdGroupCount = removedAdGroupCount; + } + + /** Returns the specified {@link AdGroup}. */ + public AdGroup getAdGroup(@IntRange(from = 0) int adGroupIndex) { + return adGroupIndex < removedAdGroupCount + ? REMOVED_AD_GROUP + : adGroups[adGroupIndex - removedAdGroupCount]; } /** - * Returns the index of the ad group at or before {@code positionUs}, if that ad group is - * unplayed. Returns {@link C#INDEX_UNSET} if the ad group at or before {@code positionUs} has no - * ads remaining to be played, or if there is no such ad group. + * Returns the index of the ad group at or before {@code positionUs} that should be played before + * the content at {@code positionUs}. Returns {@link C#INDEX_UNSET} if the ad group at or before + * {@code positionUs} has no ads remaining to be played, or if there is no such ad group. * * @param positionUs The period position at or before which to find an ad group, in microseconds, * or {@link C#TIME_END_OF_SOURCE} for the end of the stream (in which case the index of any @@ -318,16 +629,16 @@ private AdPlaybackState( public int getAdGroupIndexForPositionUs(long positionUs, long periodDurationUs) { // Use a linear search as the array elements may not be increasing due to TIME_END_OF_SOURCE. // In practice we expect there to be few ad groups so the search shouldn't be expensive. - int index = adGroupTimesUs.length - 1; + int index = adGroupCount - 1; while (index >= 0 && isPositionBeforeAdGroup(positionUs, periodDurationUs, index)) { index--; } - return index >= 0 && adGroups[index].hasUnplayedAds() ? index : C.INDEX_UNSET; + return index >= 0 && getAdGroup(index).hasUnplayedAds() ? index : C.INDEX_UNSET; } /** - * Returns the index of the next ad group after {@code positionUs} that has ads remaining to be - * played. Returns {@link C#INDEX_UNSET} if there is no such ad group. + * Returns the index of the next ad group after {@code positionUs} that should be played. Returns + * {@link C#INDEX_UNSET} if there is no such ad group. * * @param positionUs The period position after which to find an ad group, in microseconds, or * {@link C#TIME_END_OF_SOURCE} for the end of the stream (in which case there can be no ad @@ -343,72 +654,168 @@ public int getAdGroupIndexAfterPositionUs(long positionUs, long periodDurationUs } // Use a linear search as the array elements may not be increasing due to TIME_END_OF_SOURCE. // In practice we expect there to be few ad groups so the search shouldn't be expensive. - int index = 0; - while (index < adGroupTimesUs.length - && adGroupTimesUs[index] != C.TIME_END_OF_SOURCE - && (positionUs >= adGroupTimesUs[index] || !adGroups[index].hasUnplayedAds())) { + int index = removedAdGroupCount; + while (index < adGroupCount + && ((getAdGroup(index).timeUs != C.TIME_END_OF_SOURCE + && getAdGroup(index).timeUs <= positionUs) + || !getAdGroup(index).shouldPlayAdGroup())) { index++; } - return index < adGroupTimesUs.length ? index : C.INDEX_UNSET; + return index < adGroupCount ? index : C.INDEX_UNSET; } /** Returns whether the specified ad has been marked as in {@link #AD_STATE_ERROR}. */ - public boolean isAdInErrorState(int adGroupIndex, int adIndexInAdGroup) { - if (adGroupIndex >= adGroups.length) { + public boolean isAdInErrorState( + @IntRange(from = 0) int adGroupIndex, @IntRange(from = 0) int adIndexInAdGroup) { + if (adGroupIndex >= adGroupCount) { return false; } - AdGroup adGroup = adGroups[adGroupIndex]; + AdGroup adGroup = getAdGroup(adGroupIndex); if (adGroup.count == C.LENGTH_UNSET || adIndexInAdGroup >= adGroup.count) { return false; } return adGroup.states[adIndexInAdGroup] == AdPlaybackState.AD_STATE_ERROR; } + /** + * Returns an instance with the specified ad group time. + * + * @param adGroupIndex The index of the ad group. + * @param adGroupTimeUs The new ad group time, in microseconds, or {@link C#TIME_END_OF_SOURCE} to + * indicate a postroll ad. + * @return The updated ad playback state. + */ + @CheckResult + public AdPlaybackState withAdGroupTimeUs( + @IntRange(from = 0) int adGroupIndex, long adGroupTimeUs) { + int adjustedIndex = adGroupIndex - removedAdGroupCount; + AdGroup[] adGroups = Util.nullSafeArrayCopy(this.adGroups, this.adGroups.length); + adGroups[adjustedIndex] = this.adGroups[adjustedIndex].withTimeUs(adGroupTimeUs); + return new AdPlaybackState( + adsId, adGroups, adResumePositionUs, contentDurationUs, removedAdGroupCount); + } + + /** + * Returns an instance with a new ad group. + * + * @param adGroupIndex The insertion index of the new group. + * @param adGroupTimeUs The ad group time, in microseconds, or {@link C#TIME_END_OF_SOURCE} to + * indicate a postroll ad. + * @return The updated ad playback state. + */ + @CheckResult + public AdPlaybackState withNewAdGroup(@IntRange(from = 0) int adGroupIndex, long adGroupTimeUs) { + int adjustedIndex = adGroupIndex - removedAdGroupCount; + AdGroup newAdGroup = new AdGroup(adGroupTimeUs); + AdGroup[] adGroups = Util.nullSafeArrayAppend(this.adGroups, newAdGroup); + System.arraycopy( + /* src= */ adGroups, + /* srcPos= */ adjustedIndex, + /* dest= */ adGroups, + /* destPos= */ adjustedIndex + 1, + /* length= */ this.adGroups.length - adjustedIndex); + adGroups[adjustedIndex] = newAdGroup; + return new AdPlaybackState( + adsId, adGroups, adResumePositionUs, contentDurationUs, removedAdGroupCount); + } + /** * Returns an instance with the number of ads in {@code adGroupIndex} resolved to {@code adCount}. * The ad count must be greater than zero. */ @CheckResult - public AdPlaybackState withAdCount(int adGroupIndex, int adCount) { - Assertions.checkArgument(adCount > 0); - if (adGroups[adGroupIndex].count == adCount) { + public AdPlaybackState withAdCount( + @IntRange(from = 0) int adGroupIndex, @IntRange(from = 1) int adCount) { + checkArgument(adCount > 0); + int adjustedIndex = adGroupIndex - removedAdGroupCount; + if (adGroups[adjustedIndex].count == adCount) { return this; } AdGroup[] adGroups = Util.nullSafeArrayCopy(this.adGroups, this.adGroups.length); - adGroups[adGroupIndex] = this.adGroups[adGroupIndex].withAdCount(adCount); - return new AdPlaybackState(adGroupTimesUs, adGroups, adResumePositionUs, contentDurationUs); + adGroups[adjustedIndex] = this.adGroups[adjustedIndex].withAdCount(adCount); + return new AdPlaybackState( + adsId, adGroups, adResumePositionUs, contentDurationUs, removedAdGroupCount); } - /** Returns an instance with the specified ad URI. */ + /** + * Returns an instance with the specified ad URI and the ad marked as {@linkplain + * #AD_STATE_AVAILABLE available}. + * + * @throws IllegalStateException If {@link Uri#EMPTY} is passed as argument for a client-side + * inserted ad group. + */ @CheckResult - public AdPlaybackState withAdUri(int adGroupIndex, int adIndexInAdGroup, Uri uri) { + public AdPlaybackState withAvailableAdUri( + @IntRange(from = 0) int adGroupIndex, @IntRange(from = 0) int adIndexInAdGroup, Uri uri) { + int adjustedIndex = adGroupIndex - removedAdGroupCount; AdGroup[] adGroups = Util.nullSafeArrayCopy(this.adGroups, this.adGroups.length); - adGroups[adGroupIndex] = adGroups[adGroupIndex].withAdUri(uri, adIndexInAdGroup); - return new AdPlaybackState(adGroupTimesUs, adGroups, adResumePositionUs, contentDurationUs); + checkState(!Uri.EMPTY.equals(uri) || adGroups[adjustedIndex].isServerSideInserted); + adGroups[adjustedIndex] = adGroups[adjustedIndex].withAdUri(uri, adIndexInAdGroup); + return new AdPlaybackState( + adsId, adGroups, adResumePositionUs, contentDurationUs, removedAdGroupCount); } - /** Returns an instance with the specified ad marked as played. */ + /** + * Returns an instance with the specified ad marked as {@linkplain #AD_STATE_AVAILABLE available}. + * + *

      Must not be called with client side inserted ad groups. Client side inserted ads should use + * {@link #withAvailableAdUri}. + * + * @throws IllegalStateException in case this methods is called on an ad group that {@linkplain + * AdGroup#isServerSideInserted is not server side inserted}. + */ + @CheckResult + public AdPlaybackState withAvailableAd( + @IntRange(from = 0) int adGroupIndex, @IntRange(from = 0) int adIndexInAdGroup) { + return withAvailableAdUri(adGroupIndex, adIndexInAdGroup, Uri.EMPTY); + } + + /** Returns an instance with the specified ad marked as {@linkplain #AD_STATE_PLAYED played}. */ + @CheckResult + public AdPlaybackState withPlayedAd( + @IntRange(from = 0) int adGroupIndex, @IntRange(from = 0) int adIndexInAdGroup) { + int adjustedIndex = adGroupIndex - removedAdGroupCount; + AdGroup[] adGroups = Util.nullSafeArrayCopy(this.adGroups, this.adGroups.length); + adGroups[adjustedIndex] = + adGroups[adjustedIndex].withAdState(AD_STATE_PLAYED, adIndexInAdGroup); + return new AdPlaybackState( + adsId, adGroups, adResumePositionUs, contentDurationUs, removedAdGroupCount); + } + + /** Returns an instance with the specified ad marked as {@linkplain #AD_STATE_SKIPPED skipped}. */ @CheckResult - public AdPlaybackState withPlayedAd(int adGroupIndex, int adIndexInAdGroup) { + public AdPlaybackState withSkippedAd( + @IntRange(from = 0) int adGroupIndex, @IntRange(from = 0) int adIndexInAdGroup) { + int adjustedIndex = adGroupIndex - removedAdGroupCount; AdGroup[] adGroups = Util.nullSafeArrayCopy(this.adGroups, this.adGroups.length); - adGroups[adGroupIndex] = adGroups[adGroupIndex].withAdState(AD_STATE_PLAYED, adIndexInAdGroup); - return new AdPlaybackState(adGroupTimesUs, adGroups, adResumePositionUs, contentDurationUs); + adGroups[adjustedIndex] = + adGroups[adjustedIndex].withAdState(AD_STATE_SKIPPED, adIndexInAdGroup); + return new AdPlaybackState( + adsId, adGroups, adResumePositionUs, contentDurationUs, removedAdGroupCount); } - /** Returns an instance with the specified ad marked as skipped. */ + /** Returns an instance with the last ad of the given ad group removed. */ @CheckResult - public AdPlaybackState withSkippedAd(int adGroupIndex, int adIndexInAdGroup) { + public AdPlaybackState withLastAdRemoved(@IntRange(from = 0) int adGroupIndex) { + int adjustedIndex = adGroupIndex - removedAdGroupCount; AdGroup[] adGroups = Util.nullSafeArrayCopy(this.adGroups, this.adGroups.length); - adGroups[adGroupIndex] = adGroups[adGroupIndex].withAdState(AD_STATE_SKIPPED, adIndexInAdGroup); - return new AdPlaybackState(adGroupTimesUs, adGroups, adResumePositionUs, contentDurationUs); + adGroups[adjustedIndex] = adGroups[adjustedIndex].withLastAdRemoved(); + return new AdPlaybackState( + adsId, adGroups, adResumePositionUs, contentDurationUs, removedAdGroupCount); } - /** Returns an instance with the specified ad marked as having a load error. */ + /** + * Returns an instance with the specified ad marked {@linkplain #AD_STATE_ERROR as having a load + * error}. + */ @CheckResult - public AdPlaybackState withAdLoadError(int adGroupIndex, int adIndexInAdGroup) { + public AdPlaybackState withAdLoadError( + @IntRange(from = 0) int adGroupIndex, @IntRange(from = 0) int adIndexInAdGroup) { + int adjustedIndex = adGroupIndex - removedAdGroupCount; AdGroup[] adGroups = Util.nullSafeArrayCopy(this.adGroups, this.adGroups.length); - adGroups[adGroupIndex] = adGroups[adGroupIndex].withAdState(AD_STATE_ERROR, adIndexInAdGroup); - return new AdPlaybackState(adGroupTimesUs, adGroups, adResumePositionUs, contentDurationUs); + adGroups[adjustedIndex] = adGroups[adjustedIndex].withAdState(AD_STATE_ERROR, adIndexInAdGroup); + return new AdPlaybackState( + adsId, adGroups, adResumePositionUs, contentDurationUs, removedAdGroupCount); } /** @@ -416,20 +823,42 @@ public AdPlaybackState withAdLoadError(int adGroupIndex, int adIndexInAdGroup) { * marked as played or in the error state). */ @CheckResult - public AdPlaybackState withSkippedAdGroup(int adGroupIndex) { + public AdPlaybackState withSkippedAdGroup(@IntRange(from = 0) int adGroupIndex) { + int adjustedIndex = adGroupIndex - removedAdGroupCount; AdGroup[] adGroups = Util.nullSafeArrayCopy(this.adGroups, this.adGroups.length); - adGroups[adGroupIndex] = adGroups[adGroupIndex].withAllAdsSkipped(); - return new AdPlaybackState(adGroupTimesUs, adGroups, adResumePositionUs, contentDurationUs); + adGroups[adjustedIndex] = adGroups[adjustedIndex].withAllAdsSkipped(); + return new AdPlaybackState( + adsId, adGroups, adResumePositionUs, contentDurationUs, removedAdGroupCount); } - /** Returns an instance with the specified ad durations, in microseconds. */ + /** + * Returns an instance with the specified ad durations, in microseconds. + * + *

      Must only be used if {@link #removedAdGroupCount} is 0. + */ @CheckResult public AdPlaybackState withAdDurationsUs(long[][] adDurationUs) { + checkState(removedAdGroupCount == 0); AdGroup[] adGroups = Util.nullSafeArrayCopy(this.adGroups, this.adGroups.length); for (int adGroupIndex = 0; adGroupIndex < adGroupCount; adGroupIndex++) { adGroups[adGroupIndex] = adGroups[adGroupIndex].withAdDurationsUs(adDurationUs[adGroupIndex]); } - return new AdPlaybackState(adGroupTimesUs, adGroups, adResumePositionUs, contentDurationUs); + return new AdPlaybackState( + adsId, adGroups, adResumePositionUs, contentDurationUs, removedAdGroupCount); + } + + /** + * Returns an instance with the specified ad durations, in microseconds, in the specified ad + * group. + */ + @CheckResult + public AdPlaybackState withAdDurationsUs( + @IntRange(from = 0) int adGroupIndex, long... adDurationsUs) { + int adjustedIndex = adGroupIndex - removedAdGroupCount; + AdGroup[] adGroups = Util.nullSafeArrayCopy(this.adGroups, this.adGroups.length); + adGroups[adjustedIndex] = adGroups[adjustedIndex].withAdDurationsUs(adDurationsUs); + return new AdPlaybackState( + adsId, adGroups, adResumePositionUs, contentDurationUs, removedAdGroupCount); } /** @@ -441,7 +870,8 @@ public AdPlaybackState withAdResumePositionUs(long adResumePositionUs) { if (this.adResumePositionUs == adResumePositionUs) { return this; } else { - return new AdPlaybackState(adGroupTimesUs, adGroups, adResumePositionUs, contentDurationUs); + return new AdPlaybackState( + adsId, adGroups, adResumePositionUs, contentDurationUs, removedAdGroupCount); } } @@ -451,10 +881,133 @@ public AdPlaybackState withContentDurationUs(long contentDurationUs) { if (this.contentDurationUs == contentDurationUs) { return this; } else { - return new AdPlaybackState(adGroupTimesUs, adGroups, adResumePositionUs, contentDurationUs); + return new AdPlaybackState( + adsId, adGroups, adResumePositionUs, contentDurationUs, removedAdGroupCount); + } + } + + /** + * Returns an instance with the specified number of {@link #removedAdGroupCount removed ad + * groups}. + * + *

      Ad groups with indices between {@code 0} (inclusive) and {@code removedAdGroupCount} + * (exclusive) will be empty and must not be modified by any of the {@code with*} methods. + */ + @CheckResult + public AdPlaybackState withRemovedAdGroupCount(@IntRange(from = 0) int removedAdGroupCount) { + if (this.removedAdGroupCount == removedAdGroupCount) { + return this; + } else { + checkArgument(removedAdGroupCount > this.removedAdGroupCount); + AdGroup[] adGroups = new AdGroup[adGroupCount - removedAdGroupCount]; + System.arraycopy( + /* src= */ this.adGroups, + /* srcPos= */ removedAdGroupCount - this.removedAdGroupCount, + /* dest= */ adGroups, + /* destPos= */ 0, + /* length= */ adGroups.length); + return new AdPlaybackState( + adsId, adGroups, adResumePositionUs, contentDurationUs, removedAdGroupCount); } } + /** + * Returns an instance with the specified {@link AdGroup#contentResumeOffsetUs}, in microseconds, + * for the specified ad group. + */ + @CheckResult + public AdPlaybackState withContentResumeOffsetUs( + @IntRange(from = 0) int adGroupIndex, long contentResumeOffsetUs) { + int adjustedIndex = adGroupIndex - removedAdGroupCount; + if (adGroups[adjustedIndex].contentResumeOffsetUs == contentResumeOffsetUs) { + return this; + } + AdGroup[] adGroups = Util.nullSafeArrayCopy(this.adGroups, this.adGroups.length); + adGroups[adjustedIndex] = + adGroups[adjustedIndex].withContentResumeOffsetUs(contentResumeOffsetUs); + return new AdPlaybackState( + adsId, adGroups, adResumePositionUs, contentDurationUs, removedAdGroupCount); + } + + /** + * Returns an instance with the specified value for {@link AdGroup#originalCount} in the specified + * ad group. + */ + @CheckResult + public AdPlaybackState withOriginalAdCount( + @IntRange(from = 0) int adGroupIndex, int originalAdCount) { + int adjustedIndex = adGroupIndex - removedAdGroupCount; + if (adGroups[adjustedIndex].originalCount == originalAdCount) { + return this; + } + AdGroup[] adGroups = Util.nullSafeArrayCopy(this.adGroups, this.adGroups.length); + adGroups[adjustedIndex] = adGroups[adjustedIndex].withOriginalAdCount(originalAdCount); + return new AdPlaybackState( + adsId, adGroups, adResumePositionUs, contentDurationUs, removedAdGroupCount); + } + + /** + * Returns an instance with the specified value for {@link AdGroup#isServerSideInserted} in the + * specified ad group. + */ + @CheckResult + public AdPlaybackState withIsServerSideInserted( + @IntRange(from = 0) int adGroupIndex, boolean isServerSideInserted) { + int adjustedIndex = adGroupIndex - removedAdGroupCount; + if (adGroups[adjustedIndex].isServerSideInserted == isServerSideInserted) { + return this; + } + AdGroup[] adGroups = Util.nullSafeArrayCopy(this.adGroups, this.adGroups.length); + adGroups[adjustedIndex] = + adGroups[adjustedIndex].withIsServerSideInserted(isServerSideInserted); + return new AdPlaybackState( + adsId, adGroups, adResumePositionUs, contentDurationUs, removedAdGroupCount); + } + + /** + * Returns an instance with all ads in the specified ad group reset from final states (played, + * skipped, error) to either available or unavailable, which allows to play them again. + */ + @CheckResult + public AdPlaybackState withResetAdGroup(@IntRange(from = 0) int adGroupIndex) { + int adjustedIndex = adGroupIndex - removedAdGroupCount; + AdGroup[] adGroups = Util.nullSafeArrayCopy(this.adGroups, this.adGroups.length); + adGroups[adjustedIndex] = adGroups[adjustedIndex].withAllAdsReset(); + return new AdPlaybackState( + adsId, adGroups, adResumePositionUs, contentDurationUs, removedAdGroupCount); + } + + /** + * Returns a copy of the ad playback state with the given ads ID. + * + * @param adsId The new ads ID. + * @param adPlaybackState The ad playback state to copy. + * @return The new ad playback state. + */ + public static AdPlaybackState fromAdPlaybackState(Object adsId, AdPlaybackState adPlaybackState) { + AdGroup[] adGroups = + new AdGroup[adPlaybackState.adGroupCount - adPlaybackState.removedAdGroupCount]; + for (int i = 0; i < adGroups.length; i++) { + AdGroup adGroup = adPlaybackState.adGroups[i]; + adGroups[i] = + new AdGroup( + adGroup.timeUs, + adGroup.count, + adGroup.originalCount, + Arrays.copyOf(adGroup.states, adGroup.states.length), + Arrays.copyOf(adGroup.uris, adGroup.uris.length), + Arrays.copyOf(adGroup.durationsUs, adGroup.durationsUs.length), + adGroup.contentResumeOffsetUs, + adGroup.isServerSideInserted); + } + return new AdPlaybackState( + adsId, + adGroups, + adPlaybackState.adResumePositionUs, + adPlaybackState.contentDurationUs, + adPlaybackState.removedAdGroupCount); + } + @Override public boolean equals(@Nullable Object o) { if (this == o) { @@ -464,19 +1017,21 @@ public boolean equals(@Nullable Object o) { return false; } AdPlaybackState that = (AdPlaybackState) o; - return adGroupCount == that.adGroupCount + return Util.areEqual(adsId, that.adsId) + && adGroupCount == that.adGroupCount && adResumePositionUs == that.adResumePositionUs && contentDurationUs == that.contentDurationUs - && Arrays.equals(adGroupTimesUs, that.adGroupTimesUs) + && removedAdGroupCount == that.removedAdGroupCount && Arrays.equals(adGroups, that.adGroups); } @Override public int hashCode() { int result = adGroupCount; + result = 31 * result + (adsId == null ? 0 : adsId.hashCode()); result = 31 * result + (int) adResumePositionUs; result = 31 * result + (int) contentDurationUs; - result = 31 * result + Arrays.hashCode(adGroupTimesUs); + result = 31 * result + removedAdGroupCount; result = 31 * result + Arrays.hashCode(adGroups); return result; } @@ -484,12 +1039,14 @@ public int hashCode() { @Override public String toString() { StringBuilder sb = new StringBuilder(); - sb.append("AdPlaybackState(adResumePositionUs="); + sb.append("AdPlaybackState(adsId="); + sb.append(adsId); + sb.append(", adResumePositionUs="); sb.append(adResumePositionUs); sb.append(", adGroups=["); for (int i = 0; i < adGroups.length; i++) { sb.append("adGroup(timeUs="); - sb.append(adGroupTimesUs[i]); + sb.append(adGroups[i].timeUs); sb.append(", ads=["); for (int j = 0; j < adGroups[i].states.length; j++) { sb.append("ad(state="); @@ -535,11 +1092,83 @@ private boolean isPositionBeforeAdGroup( // The end of the content is at (but not before) any postroll ad, and after any other ads. return false; } - long adGroupPositionUs = adGroupTimesUs[adGroupIndex]; + long adGroupPositionUs = getAdGroup(adGroupIndex).timeUs; if (adGroupPositionUs == C.TIME_END_OF_SOURCE) { return periodDurationUs == C.TIME_UNSET || positionUs < periodDurationUs; } else { return positionUs < adGroupPositionUs; } } + + // Bundleable implementation. + + private static final String FIELD_AD_GROUPS = Util.intToStringMaxRadix(1); + private static final String FIELD_AD_RESUME_POSITION_US = Util.intToStringMaxRadix(2); + private static final String FIELD_CONTENT_DURATION_US = Util.intToStringMaxRadix(3); + private static final String FIELD_REMOVED_AD_GROUP_COUNT = Util.intToStringMaxRadix(4); + + /** + * {@inheritDoc} + * + *

      It omits the {@link #adsId} field so the {@link #adsId} of instances restored by {@link + * #CREATOR} will always be {@code null}. + */ + // TODO(b/166765820): See if missing adsId would be okay and add adsId to the Bundle otherwise. + @Override + public Bundle toBundle() { + Bundle bundle = new Bundle(); + ArrayList adGroupBundleList = new ArrayList<>(); + for (AdGroup adGroup : adGroups) { + adGroupBundleList.add(adGroup.toBundle()); + } + if (!adGroupBundleList.isEmpty()) { + bundle.putParcelableArrayList(FIELD_AD_GROUPS, adGroupBundleList); + } + if (adResumePositionUs != NONE.adResumePositionUs) { + bundle.putLong(FIELD_AD_RESUME_POSITION_US, adResumePositionUs); + } + if (contentDurationUs != NONE.contentDurationUs) { + bundle.putLong(FIELD_CONTENT_DURATION_US, contentDurationUs); + } + if (removedAdGroupCount != NONE.removedAdGroupCount) { + bundle.putInt(FIELD_REMOVED_AD_GROUP_COUNT, removedAdGroupCount); + } + return bundle; + } + + /** + * Object that can restore {@link AdPlaybackState} from a {@link Bundle}. + * + *

      The {@link #adsId} of restored instances will always be {@code null}. + */ + public static final Bundleable.Creator CREATOR = AdPlaybackState::fromBundle; + + private static AdPlaybackState fromBundle(Bundle bundle) { + @Nullable ArrayList adGroupBundleList = bundle.getParcelableArrayList(FIELD_AD_GROUPS); + @Nullable AdGroup[] adGroups; + if (adGroupBundleList == null) { + adGroups = new AdGroup[0]; + } else { + adGroups = new AdGroup[adGroupBundleList.size()]; + for (int i = 0; i < adGroupBundleList.size(); i++) { + adGroups[i] = AdGroup.CREATOR.fromBundle(adGroupBundleList.get(i)); + } + } + long adResumePositionUs = + bundle.getLong(FIELD_AD_RESUME_POSITION_US, /* defaultValue= */ NONE.adResumePositionUs); + long contentDurationUs = + bundle.getLong(FIELD_CONTENT_DURATION_US, /* defaultValue= */ NONE.contentDurationUs); + int removedAdGroupCount = + bundle.getInt(FIELD_REMOVED_AD_GROUP_COUNT, /* defaultValue= */ NONE.removedAdGroupCount); + return new AdPlaybackState( + /* adsId= */ null, adGroups, adResumePositionUs, contentDurationUs, removedAdGroupCount); + } + + private static AdGroup[] createEmptyAdGroups(long[] adGroupTimesUs) { + AdGroup[] adGroups = new AdGroup[adGroupTimesUs.length]; + for (int i = 0; i < adGroups.length; i++) { + adGroups[i] = new AdGroup(adGroupTimesUs[i]); + } + return adGroups; + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ads/AdsLoader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ads/AdsLoader.java index 11947218a3..088141136e 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ads/AdsLoader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ads/AdsLoader.java @@ -15,12 +15,13 @@ */ package com.google.android.exoplayer2.source.ads; -import android.view.View; -import android.view.ViewGroup; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.MediaItem; import com.google.android.exoplayer2.Player; +import com.google.android.exoplayer2.source.MediaSource; import com.google.android.exoplayer2.source.ads.AdsMediaSource.AdLoadException; +import com.google.android.exoplayer2.ui.AdViewProvider; import com.google.android.exoplayer2.upstream.DataSpec; import java.io.IOException; @@ -32,24 +33,44 @@ * with a new copy of the current {@link AdPlaybackState} whenever further information about ads * becomes known (for example, when an ad media URI is available, or an ad has played to the end). * - *

      {@link #start(EventListener, AdViewProvider)} will be called when the ads media source first - * initializes, at which point the loader can request ads. If the player enters the background, - * {@link #stop()} will be called. Loaders should maintain any ad playback state in preparation for - * a later call to {@link #start(EventListener, AdViewProvider)}. If an ad is playing when the - * player is detached, update the ad playback state with the current playback position using {@link - * AdPlaybackState#withAdResumePositionUs(long)}. + *

      {@link #start(AdsMediaSource, DataSpec, Object, AdViewProvider, EventListener)} will be called + * when an ads media source first initializes, at which point the loader can request ads. If the + * player enters the background, {@link #stop(AdsMediaSource, EventListener)} will be called. + * Loaders should maintain any ad playback state in preparation for a later call to {@link + * #start(AdsMediaSource, DataSpec, Object, AdViewProvider, EventListener)}. If an ad is playing + * when the player is detached, update the ad playback state with the current playback position + * using {@link AdPlaybackState#withAdResumePositionUs(long)}. * *

      If {@link EventListener#onAdPlaybackState(AdPlaybackState)} has been called, the - * implementation of {@link #start(EventListener, AdViewProvider)} should invoke the same listener - * to provide the existing playback state to the new player. + * implementation of {@link #start(AdsMediaSource, DataSpec, Object, AdViewProvider, EventListener)} + * should invoke the same listener to provide the existing playback state to the new player. */ public interface AdsLoader { + /** + * Provides {@link AdsLoader} instances for media items that have {@link + * MediaItem.LocalConfiguration#adsConfiguration ad tag URIs}. + */ + interface Provider { + + /** + * Returns an {@link AdsLoader} for the given {@link + * MediaItem.LocalConfiguration#adsConfiguration ads configuration}, or {@code null} if no ads + * loader is available for the given ads configuration. + * + *

      This method is called each time a {@link MediaSource} is created from a {@link MediaItem} + * that defines an {@link MediaItem.LocalConfiguration#adsConfiguration ads configuration}. + */ + @Nullable + AdsLoader getAdsLoader(MediaItem.AdsConfiguration adsConfiguration); + } + /** Listener for ads loader events. All methods are called on the main thread. */ interface EventListener { /** - * Called when the ad playback state has been updated. + * Called when the ad playback state has been updated. The number of {@link + * AdPlaybackState#adGroupCount ad groups} may not change after the first call. * * @param adPlaybackState The new ad playback state. */ @@ -70,25 +91,6 @@ default void onAdClicked() {} default void onAdTapped() {} } - /** Provides views for the ad UI. */ - interface AdViewProvider { - - /** Returns the {@link ViewGroup} on top of the player that will show any ad UI. */ - ViewGroup getAdViewGroup(); - - /** - * Returns an array of views that are shown on top of the ad view group, but that are essential - * for controlling playback and should be excluded from ad viewability measurements by the - * {@link AdsLoader} (if it supports this). - * - *

      Each view must be either a fully transparent overlay (for capturing touch events), or a - * small piece of transient UI that is essential to the user experience of playback (such as a - * button to pause/resume playback or a transient full-screen or cast button). For more - * information see the documentation for your ads loader. - */ - View[] getAdOverlayViews(); - } - // Methods called by the application. /** @@ -115,36 +117,60 @@ interface AdViewProvider { /** * Sets the supported content types for ad media. Must be called before the first call to {@link - * #start(EventListener, AdViewProvider)}. Subsequent calls may be ignored. Called on the main - * thread by {@link AdsMediaSource}. + * #start(AdsMediaSource, DataSpec, Object, AdViewProvider, EventListener)}. Subsequent calls may + * be ignored. Called on the main thread by {@link AdsMediaSource}. * * @param contentTypes The supported content types for ad media. Each element must be one of - * {@link C#TYPE_DASH}, {@link C#TYPE_HLS}, {@link C#TYPE_SS} and {@link C#TYPE_OTHER}. + * {@link C#CONTENT_TYPE_DASH}, {@link C#CONTENT_TYPE_HLS}, {@link C#CONTENT_TYPE_SS} and + * {@link C#CONTENT_TYPE_OTHER}. */ void setSupportedContentTypes(@C.ContentType int... contentTypes); /** * Starts using the ads loader for playback. Called on the main thread by {@link AdsMediaSource}. * - * @param eventListener Listener for ads loader events. + * @param adsMediaSource The ads media source requesting to start loading ads. + * @param adTagDataSpec A data spec for the ad tag to load. + * @param adsId An opaque identifier for the ad playback state across start/stop calls. * @param adViewProvider Provider of views for the ad UI. + * @param eventListener Listener for ads loader events. */ - void start(EventListener eventListener, AdViewProvider adViewProvider); + void start( + AdsMediaSource adsMediaSource, + DataSpec adTagDataSpec, + Object adsId, + AdViewProvider adViewProvider, + EventListener eventListener); /** * Stops using the ads loader for playback and deregisters the event listener. Called on the main * thread by {@link AdsMediaSource}. + * + * @param adsMediaSource The ads media source requesting to stop loading/playing ads. + * @param eventListener The ads media source's listener for ads loader events. + */ + void stop(AdsMediaSource adsMediaSource, EventListener eventListener); + + /** + * Notifies the ads loader that preparation of an ad media period is complete. Called on the main + * thread by {@link AdsMediaSource}. + * + * @param adsMediaSource The ads media source for which preparation of ad media completed. + * @param adGroupIndex The index of the ad group. + * @param adIndexInAdGroup The index of the ad in the ad group. */ - void stop(); + void handlePrepareComplete(AdsMediaSource adsMediaSource, int adGroupIndex, int adIndexInAdGroup); /** * Notifies the ads loader that the player was not able to prepare media for a given ad. * Implementations should update the ad playback state as the specified ad has failed to load. * Called on the main thread by {@link AdsMediaSource}. * + * @param adsMediaSource The ads media source for which preparation of ad media failed. * @param adGroupIndex The index of the ad group. * @param adIndexInAdGroup The index of the ad in the ad group. * @param exception The preparation error. */ - void handlePrepareError(int adGroupIndex, int adIndexInAdGroup, IOException exception); + void handlePrepareError( + AdsMediaSource adsMediaSource, int adGroupIndex, int adIndexInAdGroup, IOException exception); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ads/AdsMediaSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ads/AdsMediaSource.java index 3481042c98..5c15f6b3f8 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ads/AdsMediaSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ads/AdsMediaSource.java @@ -15,6 +15,10 @@ */ package com.google.android.exoplayer2.source.ads; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Assertions.checkState; +import static java.lang.annotation.ElementType.TYPE_USE; + import android.net.Uri; import android.os.Handler; import android.os.Looper; @@ -22,37 +26,37 @@ import androidx.annotation.IntDef; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.MediaItem; import com.google.android.exoplayer2.Timeline; import com.google.android.exoplayer2.source.CompositeMediaSource; +import com.google.android.exoplayer2.source.LoadEventInfo; import com.google.android.exoplayer2.source.MaskingMediaPeriod; +import com.google.android.exoplayer2.source.MediaLoadData; import com.google.android.exoplayer2.source.MediaPeriod; import com.google.android.exoplayer2.source.MediaSource; import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId; import com.google.android.exoplayer2.source.MediaSourceEventListener; -import com.google.android.exoplayer2.source.MediaSourceEventListener.LoadEventInfo; -import com.google.android.exoplayer2.source.MediaSourceEventListener.MediaLoadData; -import com.google.android.exoplayer2.source.MediaSourceFactory; -import com.google.android.exoplayer2.source.ProgressiveMediaSource; +import com.google.android.exoplayer2.ui.AdViewProvider; import com.google.android.exoplayer2.upstream.Allocator; -import com.google.android.exoplayer2.upstream.DataSource; import com.google.android.exoplayer2.upstream.DataSpec; import com.google.android.exoplayer2.upstream.TransferListener; import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.Util; import java.io.IOException; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; import java.util.List; import org.checkerframework.checker.nullness.compatqual.NullableType; import org.checkerframework.checker.nullness.qual.MonotonicNonNull; /** - * A {@link MediaSource} that inserts ads linearly with a provided content media source. This source - * cannot be used as a child source in a composition. It must be the top-level source used to - * prepare the player. + * A {@link MediaSource} that inserts ads linearly into a provided content media source. + * + *

      The wrapped content media source must contain a single {@link Timeline.Period}. */ public final class AdsMediaSource extends CompositeMediaSource { @@ -69,6 +73,7 @@ public static final class AdLoadException extends IOException { */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({TYPE_AD, TYPE_AD_GROUP, TYPE_ALL_ADS, TYPE_UNEXPECTED}) public @interface Type {} /** Type for when an ad failed to load. The ad will be skipped. */ @@ -115,18 +120,20 @@ private AdLoadException(@Type int type, Exception cause) { */ public RuntimeException getRuntimeExceptionForUnexpected() { Assertions.checkState(type == TYPE_UNEXPECTED); - return (RuntimeException) Assertions.checkNotNull(getCause()); + return (RuntimeException) checkNotNull(getCause()); } } // Used to identify the content "child" source for CompositeMediaSource. - private static final MediaPeriodId DUMMY_CONTENT_MEDIA_PERIOD_ID = + private static final MediaPeriodId CHILD_SOURCE_MEDIA_PERIOD_ID = new MediaPeriodId(/* periodUid= */ new Object()); private final MediaSource contentMediaSource; - private final MediaSourceFactory adMediaSourceFactory; + private final MediaSource.Factory adMediaSourceFactory; private final AdsLoader adsLoader; - private final AdsLoader.AdViewProvider adViewProvider; + private final AdViewProvider adViewProvider; + private final DataSpec adTagDataSpec; + private final Object adsId; private final Handler mainHandler; private final Timeline.Period period; @@ -136,45 +143,33 @@ public RuntimeException getRuntimeExceptionForUnexpected() { @Nullable private AdPlaybackState adPlaybackState; private @NullableType AdMediaSourceHolder[][] adMediaSourceHolders; - /** - * Constructs a new source that inserts ads linearly with the content specified by {@code - * contentMediaSource}. Ad media is loaded using {@link ProgressiveMediaSource}. - * - * @param contentMediaSource The {@link MediaSource} providing the content to play. - * @param dataSourceFactory Factory for data sources used to load ad media. - * @param adsLoader The loader for ads. - * @param adViewProvider Provider of views for the ad UI. - */ - public AdsMediaSource( - MediaSource contentMediaSource, - DataSource.Factory dataSourceFactory, - AdsLoader adsLoader, - AdsLoader.AdViewProvider adViewProvider) { - this( - contentMediaSource, - new ProgressiveMediaSource.Factory(dataSourceFactory), - adsLoader, - adViewProvider); - } - /** * Constructs a new source that inserts ads linearly with the content specified by {@code * contentMediaSource}. * * @param contentMediaSource The {@link MediaSource} providing the content to play. + * @param adTagDataSpec The data specification of the ad tag to load. + * @param adsId An opaque identifier for ad playback state associated with this instance. Ad + * loading and playback state is shared among all playlist items that have the same ads id (by + * {@link Object#equals(Object) equality}), so it is important to pass the same identifiers + * when constructing playlist items each time the player returns to the foreground. * @param adMediaSourceFactory Factory for media sources used to load ad media. * @param adsLoader The loader for ads. * @param adViewProvider Provider of views for the ad UI. */ public AdsMediaSource( MediaSource contentMediaSource, - MediaSourceFactory adMediaSourceFactory, + DataSpec adTagDataSpec, + Object adsId, + MediaSource.Factory adMediaSourceFactory, AdsLoader adsLoader, - AdsLoader.AdViewProvider adViewProvider) { + AdViewProvider adViewProvider) { this.contentMediaSource = contentMediaSource; this.adMediaSourceFactory = adMediaSourceFactory; this.adsLoader = adsLoader; this.adViewProvider = adViewProvider; + this.adTagDataSpec = adTagDataSpec; + this.adsId = adsId; mainHandler = new Handler(Looper.getMainLooper()); period = new Timeline.Period(); adMediaSourceHolders = new AdMediaSourceHolder[0][]; @@ -182,9 +177,8 @@ public AdsMediaSource( } @Override - @Nullable - public Object getTag() { - return contentMediaSource.getTag(); + public MediaItem getMediaItem() { + return contentMediaSource.getMediaItem(); } @Override @@ -192,18 +186,23 @@ protected void prepareSourceInternal(@Nullable TransferListener mediaTransferLis super.prepareSourceInternal(mediaTransferListener); ComponentListener componentListener = new ComponentListener(); this.componentListener = componentListener; - prepareChildSource(DUMMY_CONTENT_MEDIA_PERIOD_ID, contentMediaSource); - mainHandler.post(() -> adsLoader.start(componentListener, adViewProvider)); + prepareChildSource(CHILD_SOURCE_MEDIA_PERIOD_ID, contentMediaSource); + mainHandler.post( + () -> + adsLoader.start( + /* adsMediaSource= */ this, + adTagDataSpec, + adsId, + adViewProvider, + componentListener)); } @Override public MediaPeriod createPeriod(MediaPeriodId id, Allocator allocator, long startPositionUs) { - AdPlaybackState adPlaybackState = Assertions.checkNotNull(this.adPlaybackState); + AdPlaybackState adPlaybackState = checkNotNull(this.adPlaybackState); if (adPlaybackState.adGroupCount > 0 && id.isAd()) { int adGroupIndex = id.adGroupIndex; int adIndexInAdGroup = id.adIndexInAdGroup; - Uri adUri = - Assertions.checkNotNull(adPlaybackState.adGroups[adGroupIndex].uris[adIndexInAdGroup]); if (adMediaSourceHolders[adGroupIndex].length <= adIndexInAdGroup) { int adCount = adIndexInAdGroup + 1; adMediaSourceHolders[adGroupIndex] = @@ -213,15 +212,14 @@ public MediaPeriod createPeriod(MediaPeriodId id, Allocator allocator, long star AdMediaSourceHolder adMediaSourceHolder = adMediaSourceHolders[adGroupIndex][adIndexInAdGroup]; if (adMediaSourceHolder == null) { - MediaSource adMediaSource = adMediaSourceFactory.createMediaSource(adUri); - adMediaSourceHolder = new AdMediaSourceHolder(adMediaSource); + adMediaSourceHolder = new AdMediaSourceHolder(id); adMediaSourceHolders[adGroupIndex][adIndexInAdGroup] = adMediaSourceHolder; - prepareChildSource(id, adMediaSource); + maybeUpdateAdMediaSources(); } - return adMediaSourceHolder.createMediaPeriod(adUri, id, allocator, startPositionUs); + return adMediaSourceHolder.createMediaPeriod(id, allocator, startPositionUs); } else { - MaskingMediaPeriod mediaPeriod = - new MaskingMediaPeriod(contentMediaSource, id, allocator, startPositionUs); + MaskingMediaPeriod mediaPeriod = new MaskingMediaPeriod(id, allocator, startPositionUs); + mediaPeriod.setMediaSource(contentMediaSource); mediaPeriod.createPeriod(id); return mediaPeriod; } @@ -233,10 +231,10 @@ public void releasePeriod(MediaPeriod mediaPeriod) { MediaPeriodId id = maskingMediaPeriod.id; if (id.isAd()) { AdMediaSourceHolder adMediaSourceHolder = - Assertions.checkNotNull(adMediaSourceHolders[id.adGroupIndex][id.adIndexInAdGroup]); + checkNotNull(adMediaSourceHolders[id.adGroupIndex][id.adIndexInAdGroup]); adMediaSourceHolder.releaseMediaPeriod(maskingMediaPeriod); if (adMediaSourceHolder.isInactive()) { - releaseChildSource(id); + adMediaSourceHolder.release(); adMediaSourceHolders[id.adGroupIndex][id.adIndexInAdGroup] = null; } } else { @@ -247,35 +245,36 @@ public void releasePeriod(MediaPeriod mediaPeriod) { @Override protected void releaseSourceInternal() { super.releaseSourceInternal(); - Assertions.checkNotNull(componentListener).release(); - componentListener = null; + ComponentListener componentListener = checkNotNull(this.componentListener); + this.componentListener = null; + componentListener.stop(); contentTimeline = null; adPlaybackState = null; adMediaSourceHolders = new AdMediaSourceHolder[0][]; - mainHandler.post(adsLoader::stop); + mainHandler.post(() -> adsLoader.stop(/* adsMediaSource= */ this, componentListener)); } @Override protected void onChildSourceInfoRefreshed( - MediaPeriodId mediaPeriodId, MediaSource mediaSource, Timeline timeline) { - if (mediaPeriodId.isAd()) { - int adGroupIndex = mediaPeriodId.adGroupIndex; - int adIndexInAdGroup = mediaPeriodId.adIndexInAdGroup; - Assertions.checkNotNull(adMediaSourceHolders[adGroupIndex][adIndexInAdGroup]) - .handleSourceInfoRefresh(timeline); + MediaPeriodId childSourceId, MediaSource mediaSource, Timeline newTimeline) { + if (childSourceId.isAd()) { + int adGroupIndex = childSourceId.adGroupIndex; + int adIndexInAdGroup = childSourceId.adIndexInAdGroup; + checkNotNull(adMediaSourceHolders[adGroupIndex][adIndexInAdGroup]) + .handleSourceInfoRefresh(newTimeline); } else { - Assertions.checkArgument(timeline.getPeriodCount() == 1); - contentTimeline = timeline; + Assertions.checkArgument(newTimeline.getPeriodCount() == 1); + contentTimeline = newTimeline; } maybeUpdateSourceInfo(); } @Override protected MediaPeriodId getMediaPeriodIdForChildMediaPeriodId( - MediaPeriodId childId, MediaPeriodId mediaPeriodId) { - // The child id for the content period is just DUMMY_CONTENT_MEDIA_PERIOD_ID. That's why we need - // to forward the reported mediaPeriodId in this case. - return childId.isAd() ? childId : mediaPeriodId; + MediaPeriodId childSourceId, MediaPeriodId mediaPeriodId) { + // The child id for the content period is just CHILD_SOURCE_MEDIA_PERIOD_ID. That's why + // we need to forward the reported mediaPeriodId in this case. + return childSourceId.isAd() ? childSourceId : mediaPeriodId; } // Internal methods. @@ -284,20 +283,61 @@ private void onAdPlaybackState(AdPlaybackState adPlaybackState) { if (this.adPlaybackState == null) { adMediaSourceHolders = new AdMediaSourceHolder[adPlaybackState.adGroupCount][]; Arrays.fill(adMediaSourceHolders, new AdMediaSourceHolder[0]); + } else { + checkState(adPlaybackState.adGroupCount == this.adPlaybackState.adGroupCount); } this.adPlaybackState = adPlaybackState; + maybeUpdateAdMediaSources(); maybeUpdateSourceInfo(); } + /** + * Initializes any {@link AdMediaSourceHolder AdMediaSourceHolders} where the ad media URI is + * newly known. + */ + private void maybeUpdateAdMediaSources() { + @Nullable AdPlaybackState adPlaybackState = this.adPlaybackState; + if (adPlaybackState == null) { + return; + } + for (int adGroupIndex = 0; adGroupIndex < adMediaSourceHolders.length; adGroupIndex++) { + for (int adIndexInAdGroup = 0; + adIndexInAdGroup < this.adMediaSourceHolders[adGroupIndex].length; + adIndexInAdGroup++) { + @Nullable + AdMediaSourceHolder adMediaSourceHolder = + this.adMediaSourceHolders[adGroupIndex][adIndexInAdGroup]; + AdPlaybackState.AdGroup adGroup = adPlaybackState.getAdGroup(adGroupIndex); + if (adMediaSourceHolder != null + && !adMediaSourceHolder.hasMediaSource() + && adIndexInAdGroup < adGroup.uris.length) { + @Nullable Uri adUri = adGroup.uris[adIndexInAdGroup]; + if (adUri != null) { + MediaItem.Builder adMediaItem = new MediaItem.Builder().setUri(adUri); + // Propagate the content's DRM config into the ad media source. + @Nullable + MediaItem.LocalConfiguration contentLocalConfiguration = + contentMediaSource.getMediaItem().localConfiguration; + if (contentLocalConfiguration != null) { + adMediaItem.setDrmConfiguration(contentLocalConfiguration.drmConfiguration); + } + MediaSource adMediaSource = adMediaSourceFactory.createMediaSource(adMediaItem.build()); + adMediaSourceHolder.initializeWithMediaSource(adMediaSource, adUri); + } + } + } + } + } + private void maybeUpdateSourceInfo() { @Nullable Timeline contentTimeline = this.contentTimeline; if (adPlaybackState != null && contentTimeline != null) { - adPlaybackState = adPlaybackState.withAdDurationsUs(getAdDurationsUs()); - Timeline timeline = - adPlaybackState.adGroupCount == 0 - ? contentTimeline - : new SinglePeriodAdTimeline(contentTimeline, adPlaybackState); - refreshSourceInfo(timeline); + if (adPlaybackState.adGroupCount == 0) { + refreshSourceInfo(contentTimeline); + } else { + adPlaybackState = adPlaybackState.withAdDurationsUs(getAdDurationsUs()); + refreshSourceInfo(new SinglePeriodAdTimeline(contentTimeline, adPlaybackState)); + } } } @@ -318,30 +358,30 @@ private final class ComponentListener implements AdsLoader.EventListener { private final Handler playerHandler; - private volatile boolean released; + private volatile boolean stopped; /** * Creates new listener which forwards ad playback states on the creating thread and all other * events on the external event listener thread. */ public ComponentListener() { - playerHandler = new Handler(); + playerHandler = Util.createHandlerForCurrentLooper(); } - /** Releases the component listener. */ - public void release() { - released = true; + /** Stops event delivery from this instance. */ + public void stop() { + stopped = true; playerHandler.removeCallbacksAndMessages(null); } @Override public void onAdPlaybackState(final AdPlaybackState adPlaybackState) { - if (released) { + if (stopped) { return; } playerHandler.post( () -> { - if (released) { + if (stopped) { return; } AdsMediaSource.this.onAdPlaybackState(adPlaybackState); @@ -350,72 +390,94 @@ public void onAdPlaybackState(final AdPlaybackState adPlaybackState) { @Override public void onAdLoadError(final AdLoadException error, DataSpec dataSpec) { - if (released) { + if (stopped) { return; } createEventDispatcher(/* mediaPeriodId= */ null) .loadError( - dataSpec, - dataSpec.uri, - /* responseHeaders= */ Collections.emptyMap(), + new LoadEventInfo( + LoadEventInfo.getNewId(), + dataSpec, + /* elapsedRealtimeMs= */ SystemClock.elapsedRealtime()), C.DATA_TYPE_AD, - /* elapsedRealtimeMs= */ SystemClock.elapsedRealtime(), - /* loadDurationMs= */ 0, - /* bytesLoaded= */ 0, error, /* wasCanceled= */ true); } } - private final class AdPrepareErrorListener implements MaskingMediaPeriod.PrepareErrorListener { + private final class AdPrepareListener implements MaskingMediaPeriod.PrepareListener { private final Uri adUri; - private final int adGroupIndex; - private final int adIndexInAdGroup; - public AdPrepareErrorListener(Uri adUri, int adGroupIndex, int adIndexInAdGroup) { + public AdPrepareListener(Uri adUri) { this.adUri = adUri; - this.adGroupIndex = adGroupIndex; - this.adIndexInAdGroup = adIndexInAdGroup; } @Override - public void onPrepareError(MediaPeriodId mediaPeriodId, final IOException exception) { + public void onPrepareComplete(MediaPeriodId mediaPeriodId) { + mainHandler.post( + () -> + adsLoader.handlePrepareComplete( + /* adsMediaSource= */ AdsMediaSource.this, + mediaPeriodId.adGroupIndex, + mediaPeriodId.adIndexInAdGroup)); + } + + @Override + public void onPrepareError(MediaPeriodId mediaPeriodId, IOException exception) { createEventDispatcher(mediaPeriodId) .loadError( - new DataSpec(adUri), - adUri, - /* responseHeaders= */ Collections.emptyMap(), + new LoadEventInfo( + LoadEventInfo.getNewId(), + new DataSpec(adUri), + /* elapsedRealtimeMs= */ SystemClock.elapsedRealtime()), C.DATA_TYPE_AD, - C.TRACK_TYPE_UNKNOWN, - /* loadDurationMs= */ 0, - /* bytesLoaded= */ 0, AdLoadException.createForAd(exception), /* wasCanceled= */ true); mainHandler.post( - () -> adsLoader.handlePrepareError(adGroupIndex, adIndexInAdGroup, exception)); + () -> + adsLoader.handlePrepareError( + /* adsMediaSource= */ AdsMediaSource.this, + mediaPeriodId.adGroupIndex, + mediaPeriodId.adIndexInAdGroup, + exception)); } } private final class AdMediaSourceHolder { - private final MediaSource adMediaSource; + private final MediaPeriodId id; private final List activeMediaPeriods; - @MonotonicNonNull private Timeline timeline; + private @MonotonicNonNull Uri adUri; + private @MonotonicNonNull MediaSource adMediaSource; + private @MonotonicNonNull Timeline timeline; - public AdMediaSourceHolder(MediaSource adMediaSource) { - this.adMediaSource = adMediaSource; + public AdMediaSourceHolder(MediaPeriodId id) { + this.id = id; activeMediaPeriods = new ArrayList<>(); } + public void initializeWithMediaSource(MediaSource adMediaSource, Uri adUri) { + this.adMediaSource = adMediaSource; + this.adUri = adUri; + for (int i = 0; i < activeMediaPeriods.size(); i++) { + MaskingMediaPeriod maskingMediaPeriod = activeMediaPeriods.get(i); + maskingMediaPeriod.setMediaSource(adMediaSource); + maskingMediaPeriod.setPrepareListener(new AdPrepareListener(adUri)); + } + prepareChildSource(id, adMediaSource); + } + public MediaPeriod createMediaPeriod( - Uri adUri, MediaPeriodId id, Allocator allocator, long startPositionUs) { + MediaPeriodId id, Allocator allocator, long startPositionUs) { MaskingMediaPeriod maskingMediaPeriod = - new MaskingMediaPeriod(adMediaSource, id, allocator, startPositionUs); - maskingMediaPeriod.setPrepareErrorListener( - new AdPrepareErrorListener(adUri, id.adGroupIndex, id.adIndexInAdGroup)); + new MaskingMediaPeriod(id, allocator, startPositionUs); activeMediaPeriods.add(maskingMediaPeriod); + if (adMediaSource != null) { + maskingMediaPeriod.setMediaSource(adMediaSource); + maskingMediaPeriod.setPrepareListener(new AdPrepareListener(checkNotNull(adUri))); + } if (timeline != null) { Object periodUid = timeline.getUidOfPeriod(/* periodIndex= */ 0); MediaPeriodId adSourceMediaPeriodId = new MediaPeriodId(periodUid, id.windowSequenceNumber); @@ -449,6 +511,16 @@ public void releaseMediaPeriod(MaskingMediaPeriod maskingMediaPeriod) { maskingMediaPeriod.releasePeriod(); } + public void release() { + if (hasMediaSource()) { + releaseChildSource(id); + } + } + + public boolean hasMediaSource() { + return adMediaSource != null; + } + public boolean isInactive() { return activeMediaPeriods.isEmpty(); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ads/ServerSideAdInsertionMediaSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ads/ServerSideAdInsertionMediaSource.java new file mode 100644 index 0000000000..e69de29bb2 diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ads/ServerSideAdInsertionUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ads/ServerSideAdInsertionUtil.java new file mode 100644 index 0000000000..e69de29bb2 diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ads/SinglePeriodAdTimeline.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ads/SinglePeriodAdTimeline.java index cc82510a29..a114bd92d9 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ads/SinglePeriodAdTimeline.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ads/SinglePeriodAdTimeline.java @@ -52,8 +52,8 @@ public Period getPeriod(int periodIndex, Period period, boolean setIds) { period.windowIndex, durationUs, period.getPositionInWindowUs(), - adPlaybackState); + adPlaybackState, + period.isPlaceholder); return period; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ads/package-info.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ads/package-info.java new file mode 100644 index 0000000000..ee7c4ab024 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/ads/package-info.java @@ -0,0 +1,19 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +@NonNullApi +package com.google.android.exoplayer2.source.ads; + +import com.google.android.exoplayer2.util.NonNullApi; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/BaseMediaChunk.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/BaseMediaChunk.java index 74d8ddad3d..992991180e 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/BaseMediaChunk.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/BaseMediaChunk.java @@ -20,10 +20,10 @@ import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.upstream.DataSource; import com.google.android.exoplayer2.upstream.DataSpec; +import com.google.android.exoplayer2.util.Assertions; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; -/** - * A base implementation of {@link MediaChunk} that outputs to a {@link BaseMediaChunkOutput}. - */ +/** A base implementation of {@link MediaChunk} that outputs to a {@link BaseMediaChunkOutput}. */ public abstract class BaseMediaChunk extends MediaChunk { /** @@ -37,8 +37,8 @@ public abstract class BaseMediaChunk extends MediaChunk { */ public final long clippedEndTimeUs; - private BaseMediaChunkOutput output; - private int[] firstSampleIndices; + private @MonotonicNonNull BaseMediaChunkOutput output; + private int @MonotonicNonNull [] firstSampleIndices; /** * @param dataSource The source from which the data should be loaded. @@ -58,15 +58,22 @@ public BaseMediaChunk( DataSource dataSource, DataSpec dataSpec, Format trackFormat, - int trackSelectionReason, + @C.SelectionReason int trackSelectionReason, @Nullable Object trackSelectionData, long startTimeUs, long endTimeUs, long clippedStartTimeUs, long clippedEndTimeUs, long chunkIndex) { - super(dataSource, dataSpec, trackFormat, trackSelectionReason, trackSelectionData, startTimeUs, - endTimeUs, chunkIndex); + super( + dataSource, + dataSpec, + trackFormat, + trackSelectionReason, + trackSelectionData, + startTimeUs, + endTimeUs, + chunkIndex); this.clippedStartTimeUs = clippedStartTimeUs; this.clippedEndTimeUs = clippedEndTimeUs; } @@ -87,14 +94,11 @@ public void init(BaseMediaChunkOutput output) { * from this chunk. */ public final int getFirstSampleIndex(int trackIndex) { - return firstSampleIndices[trackIndex]; + return Assertions.checkStateNotNull(firstSampleIndices)[trackIndex]; } - /** - * Returns the output most recently passed to {@link #init(BaseMediaChunkOutput)}. - */ + /** Returns the output most recently passed to {@link #init(BaseMediaChunkOutput)}. */ protected final BaseMediaChunkOutput getOutput() { - return output; + return Assertions.checkStateNotNull(output); } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/BaseMediaChunkIterator.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/BaseMediaChunkIterator.java index 274be54889..a46fc10606 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/BaseMediaChunkIterator.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/BaseMediaChunkIterator.java @@ -34,7 +34,7 @@ public abstract class BaseMediaChunkIterator implements MediaChunkIterator { * @param fromIndex The first available index. * @param toIndex The last available index. */ - @SuppressWarnings("method.invocation.invalid") + @SuppressWarnings("nullness:method.invocation") public BaseMediaChunkIterator(long fromIndex, long toIndex) { this.fromIndex = fromIndex; this.toIndex = toIndex; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/BaseMediaChunkOutput.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/BaseMediaChunkOutput.java index a23e506d08..f0a37f4dc8 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/BaseMediaChunkOutput.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/BaseMediaChunkOutput.java @@ -15,10 +15,11 @@ */ package com.google.android.exoplayer2.source.chunk; +import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.extractor.DummyTrackOutput; import com.google.android.exoplayer2.extractor.TrackOutput; import com.google.android.exoplayer2.source.SampleQueue; -import com.google.android.exoplayer2.source.chunk.ChunkExtractorWrapper.TrackOutputProvider; +import com.google.android.exoplayer2.source.chunk.ChunkExtractor.TrackOutputProvider; import com.google.android.exoplayer2.util.Log; /** @@ -29,7 +30,7 @@ public final class BaseMediaChunkOutput implements TrackOutputProvider { private static final String TAG = "BaseMediaChunkOutput"; - private final int[] trackTypes; + private final @C.TrackType int[] trackTypes; private final SampleQueue[] sampleQueues; /** @@ -42,7 +43,7 @@ public BaseMediaChunkOutput(int[] trackTypes, SampleQueue[] sampleQueues) { } @Override - public TrackOutput track(int id, int type) { + public TrackOutput track(int id, @C.TrackType int type) { for (int i = 0; i < trackTypes.length; i++) { if (type == trackTypes[i]) { return sampleQueues[i]; @@ -52,15 +53,11 @@ public TrackOutput track(int id, int type) { return new DummyTrackOutput(); } - /** - * Returns the current absolute write indices of the individual sample queues. - */ + /** Returns the current absolute write indices of the individual sample queues. */ public int[] getWriteIndices() { int[] writeIndices = new int[sampleQueues.length]; for (int i = 0; i < sampleQueues.length; i++) { - if (sampleQueues[i] != null) { - writeIndices[i] = sampleQueues[i].getWriteIndex(); - } + writeIndices[i] = sampleQueues[i].getWriteIndex(); } return writeIndices; } @@ -71,10 +68,7 @@ public int[] getWriteIndices() { */ public void setSampleOffsetUs(long sampleOffsetUs) { for (SampleQueue sampleQueue : sampleQueues) { - if (sampleQueue != null) { - sampleQueue.setSampleOffsetUs(sampleOffsetUs); - } + sampleQueue.setSampleOffsetUs(sampleOffsetUs); } } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/BundledChunkExtractor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/BundledChunkExtractor.java new file mode 100644 index 0000000000..4f0b674135 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/BundledChunkExtractor.java @@ -0,0 +1,248 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.chunk; + +import static com.google.android.exoplayer2.util.Util.castNonNull; + +import android.util.SparseArray; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.extractor.ChunkIndex; +import com.google.android.exoplayer2.extractor.DummyTrackOutput; +import com.google.android.exoplayer2.extractor.Extractor; +import com.google.android.exoplayer2.extractor.ExtractorInput; +import com.google.android.exoplayer2.extractor.ExtractorOutput; +import com.google.android.exoplayer2.extractor.PositionHolder; +import com.google.android.exoplayer2.extractor.SeekMap; +import com.google.android.exoplayer2.extractor.TrackOutput; +import com.google.android.exoplayer2.extractor.mkv.MatroskaExtractor; +import com.google.android.exoplayer2.extractor.mp4.FragmentedMp4Extractor; +import com.google.android.exoplayer2.upstream.DataReader; +import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.MimeTypes; +import com.google.android.exoplayer2.util.ParsableByteArray; +import java.io.IOException; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; + +/** + * {@link ChunkExtractor} implementation that uses ExoPlayer app-bundled {@link Extractor + * Extractors}. + */ +public final class BundledChunkExtractor implements ExtractorOutput, ChunkExtractor { + + /** {@link ChunkExtractor.Factory} for instances of this class. */ + public static final ChunkExtractor.Factory FACTORY = + (primaryTrackType, + format, + enableEventMessageTrack, + closedCaptionFormats, + playerEmsgTrackOutput, + playerId) -> { + @Nullable String containerMimeType = format.containerMimeType; + Extractor extractor; + if (MimeTypes.isText(containerMimeType)) { + // Text types do not need an extractor. + return null; + } else if (MimeTypes.isMatroska(containerMimeType)) { + extractor = new MatroskaExtractor(MatroskaExtractor.FLAG_DISABLE_SEEK_FOR_CUES); + } else { + int flags = 0; + if (enableEventMessageTrack) { + flags |= FragmentedMp4Extractor.FLAG_ENABLE_EMSG_TRACK; + } + extractor = + new FragmentedMp4Extractor( + flags, + /* timestampAdjuster= */ null, + /* sideloadedTrack= */ null, + closedCaptionFormats, + playerEmsgTrackOutput); + } + return new BundledChunkExtractor(extractor, primaryTrackType, format); + }; + + private static final PositionHolder POSITION_HOLDER = new PositionHolder(); + + private final Extractor extractor; + private final @C.TrackType int primaryTrackType; + private final Format primaryTrackManifestFormat; + private final SparseArray bindingTrackOutputs; + + private boolean extractorInitialized; + @Nullable private TrackOutputProvider trackOutputProvider; + private long endTimeUs; + private @MonotonicNonNull SeekMap seekMap; + private Format @MonotonicNonNull [] sampleFormats; + + /** + * Creates an instance. + * + * @param extractor The extractor to wrap. + * @param primaryTrackType The {@link C.TrackType type} of the primary track. + * @param primaryTrackManifestFormat A manifest defined {@link Format} whose data should be merged + * into any sample {@link Format} output from the {@link Extractor} for the primary track. + */ + public BundledChunkExtractor( + Extractor extractor, @C.TrackType int primaryTrackType, Format primaryTrackManifestFormat) { + this.extractor = extractor; + this.primaryTrackType = primaryTrackType; + this.primaryTrackManifestFormat = primaryTrackManifestFormat; + bindingTrackOutputs = new SparseArray<>(); + } + + // ChunkExtractor implementation. + + @Override + @Nullable + public ChunkIndex getChunkIndex() { + return seekMap instanceof ChunkIndex ? (ChunkIndex) seekMap : null; + } + + @Override + @Nullable + public Format[] getSampleFormats() { + return sampleFormats; + } + + @Override + public void init( + @Nullable TrackOutputProvider trackOutputProvider, long startTimeUs, long endTimeUs) { + this.trackOutputProvider = trackOutputProvider; + this.endTimeUs = endTimeUs; + if (!extractorInitialized) { + extractor.init(this); + if (startTimeUs != C.TIME_UNSET) { + extractor.seek(/* position= */ 0, startTimeUs); + } + extractorInitialized = true; + } else { + extractor.seek(/* position= */ 0, startTimeUs == C.TIME_UNSET ? 0 : startTimeUs); + for (int i = 0; i < bindingTrackOutputs.size(); i++) { + bindingTrackOutputs.valueAt(i).bind(trackOutputProvider, endTimeUs); + } + } + } + + @Override + public void release() { + extractor.release(); + } + + @Override + public boolean read(ExtractorInput input) throws IOException { + int result = extractor.read(input, POSITION_HOLDER); + Assertions.checkState(result != Extractor.RESULT_SEEK); + return result == Extractor.RESULT_CONTINUE; + } + + // ExtractorOutput implementation. + + @Override + public TrackOutput track(int id, int type) { + BindingTrackOutput bindingTrackOutput = bindingTrackOutputs.get(id); + if (bindingTrackOutput == null) { + // Assert that if we're seeing a new track we have not seen endTracks. + Assertions.checkState(sampleFormats == null); + // TODO: Manifest formats for embedded tracks should also be passed here. + bindingTrackOutput = + new BindingTrackOutput( + id, type, type == primaryTrackType ? primaryTrackManifestFormat : null); + bindingTrackOutput.bind(trackOutputProvider, endTimeUs); + bindingTrackOutputs.put(id, bindingTrackOutput); + } + return bindingTrackOutput; + } + + @Override + public void endTracks() { + Format[] sampleFormats = new Format[bindingTrackOutputs.size()]; + for (int i = 0; i < bindingTrackOutputs.size(); i++) { + sampleFormats[i] = Assertions.checkStateNotNull(bindingTrackOutputs.valueAt(i).sampleFormat); + } + this.sampleFormats = sampleFormats; + } + + @Override + public void seekMap(SeekMap seekMap) { + this.seekMap = seekMap; + } + + // Internal logic. + + private static final class BindingTrackOutput implements TrackOutput { + + private final int id; + private final int type; + @Nullable private final Format manifestFormat; + private final DummyTrackOutput fakeTrackOutput; + + public @MonotonicNonNull Format sampleFormat; + private @MonotonicNonNull TrackOutput trackOutput; + private long endTimeUs; + + public BindingTrackOutput(int id, int type, @Nullable Format manifestFormat) { + this.id = id; + this.type = type; + this.manifestFormat = manifestFormat; + fakeTrackOutput = new DummyTrackOutput(); + } + + public void bind(@Nullable TrackOutputProvider trackOutputProvider, long endTimeUs) { + if (trackOutputProvider == null) { + trackOutput = fakeTrackOutput; + return; + } + this.endTimeUs = endTimeUs; + trackOutput = trackOutputProvider.track(id, type); + if (sampleFormat != null) { + trackOutput.format(sampleFormat); + } + } + + @Override + public void format(Format format) { + sampleFormat = + manifestFormat != null ? format.withManifestFormatInfo(manifestFormat) : format; + castNonNull(trackOutput).format(sampleFormat); + } + + @Override + public int sampleData( + DataReader input, int length, boolean allowEndOfInput, @SampleDataPart int sampleDataPart) + throws IOException { + return castNonNull(trackOutput).sampleData(input, length, allowEndOfInput); + } + + @Override + public void sampleData(ParsableByteArray data, int length, @SampleDataPart int sampleDataPart) { + castNonNull(trackOutput).sampleData(data, length); + } + + @Override + public void sampleMetadata( + long timeUs, + @C.BufferFlags int flags, + int size, + int offset, + @Nullable CryptoData cryptoData) { + if (endTimeUs != C.TIME_UNSET && timeUs >= endTimeUs) { + trackOutput = fakeTrackOutput; + } + castNonNull(trackOutput).sampleMetadata(timeUs, flags, size, offset, cryptoData); + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/Chunk.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/Chunk.java index a794f67fe2..43fde00f8a 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/Chunk.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/Chunk.java @@ -18,7 +18,9 @@ import android.net.Uri; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.C.DataType; import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.source.LoadEventInfo; import com.google.android.exoplayer2.upstream.DataSource; import com.google.android.exoplayer2.upstream.DataSpec; import com.google.android.exoplayer2.upstream.Loader.Loadable; @@ -28,38 +30,33 @@ import java.util.Map; /** - * An abstract base class for {@link Loadable} implementations that load chunks of data required - * for the playback of streams. + * An abstract base class for {@link Loadable} implementations that load chunks of data required for + * the playback of streams. */ public abstract class Chunk implements Loadable { - /** - * The {@link DataSpec} that defines the data to be loaded. - */ + /** Identifies the load task for this loadable. */ + public final long loadTaskId; + /** The {@link DataSpec} that defines the data to be loaded. */ public final DataSpec dataSpec; - /** - * The type of the chunk. One of the {@code DATA_TYPE_*} constants defined in {@link C}. For - * reporting only. - */ - public final int type; - /** - * The format of the track to which this chunk belongs, or null if the chunk does not belong to - * a track. - */ + /** The {@link DataType data type} of the chunk. For reporting only. */ + public final @DataType int type; + /** The format of the track to which this chunk belongs. */ public final Format trackFormat; /** - * One of the {@link C} {@code SELECTION_REASON_*} constants if the chunk belongs to a track. - * {@link C#SELECTION_REASON_UNKNOWN} if the chunk does not belong to a track. + * One of the {@link C.SelectionReason selection reasons} if the chunk belongs to a track. {@link + * C#SELECTION_REASON_UNKNOWN} if the chunk does not belong to a track, or if the selection reason + * is unknown. */ - public final int trackSelectionReason; + public final @C.SelectionReason int trackSelectionReason; /** * Optional data associated with the selection of the track to which this chunk belongs. Null if - * the chunk does not belong to a track. + * the chunk does not belong to a track, or if there is no associated track selection data. */ @Nullable public final Object trackSelectionData; /** - * The start time of the media contained by the chunk, or {@link C#TIME_UNSET} if the data - * being loaded does not contain media samples. + * The start time of the media contained by the chunk, or {@link C#TIME_UNSET} if the data being + * loaded does not contain media samples. */ public final long startTimeUs; /** @@ -83,9 +80,9 @@ public abstract class Chunk implements Loadable { public Chunk( DataSource dataSource, DataSpec dataSpec, - int type, + @DataType int type, Format trackFormat, - int trackSelectionReason, + @C.SelectionReason int trackSelectionReason, @Nullable Object trackSelectionData, long startTimeUs, long endTimeUs) { @@ -97,11 +94,10 @@ public Chunk( this.trackSelectionData = trackSelectionData; this.startTimeUs = startTimeUs; this.endTimeUs = endTimeUs; + loadTaskId = LoadEventInfo.getNewId(); } - /** - * Returns the duration of the chunk in microseconds. - */ + /** Returns the duration of the chunk in microseconds. */ public final long getDurationUs() { return endTimeUs - startTimeUs; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/ChunkExtractor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/ChunkExtractor.java new file mode 100644 index 0000000000..afbf4eebf1 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/ChunkExtractor.java @@ -0,0 +1,114 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.chunk; + +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.analytics.PlayerId; +import com.google.android.exoplayer2.extractor.ChunkIndex; +import com.google.android.exoplayer2.extractor.ExtractorInput; +import com.google.android.exoplayer2.extractor.TrackOutput; +import java.io.IOException; +import java.util.List; + +/** + * Extracts samples and track {@link Format Formats} from chunks. + * + *

      The {@link TrackOutputProvider} passed to {@link #init} provides the {@link TrackOutput + * TrackOutputs} that receive the extracted data. + */ +public interface ChunkExtractor { + + /** Creates {@link ChunkExtractor} instances. */ + interface Factory { + + /** + * Returns a new {@link ChunkExtractor} instance. + * + * @param primaryTrackType The {@link C.TrackType type} of the primary track. + * @param representationFormat The format of the representation to extract from. + * @param enableEventMessageTrack Whether to enable the event message track. + * @param closedCaptionFormats The {@link Format Formats} of the Closed-Caption tracks. + * @param playerEmsgTrackOutput The {@link TrackOutput} for extracted EMSG messages, or null. + * @param playerId The {@link PlayerId} of the player using this chunk extractor. + * @return A new {@link ChunkExtractor} instance, or null if not applicable. + */ + @Nullable + ChunkExtractor createProgressiveMediaExtractor( + @C.TrackType int primaryTrackType, + Format representationFormat, + boolean enableEventMessageTrack, + List closedCaptionFormats, + @Nullable TrackOutput playerEmsgTrackOutput, + PlayerId playerId); + } + + /** Provides {@link TrackOutput} instances to be written to during extraction. */ + interface TrackOutputProvider { + + /** + * Called to get the {@link TrackOutput} for a specific track. + * + *

      The same {@link TrackOutput} is returned if multiple calls are made with the same {@code + * id}. + * + * @param id A track identifier. + * @param type The {@link C.TrackType type} of the track. + * @return The {@link TrackOutput} for the given track identifier. + */ + TrackOutput track(int id, @C.TrackType int type); + } + + /** + * Returns the {@link ChunkIndex} most recently obtained from the chunks, or null if a {@link + * ChunkIndex} has not been obtained. + */ + @Nullable + ChunkIndex getChunkIndex(); + + /** + * Returns the sample {@link Format}s for the tracks identified by the extractor, or null if the + * extractor has not finished identifying tracks. + */ + @Nullable + Format[] getSampleFormats(); + + /** + * Initializes the wrapper to output to {@link TrackOutput}s provided by the specified {@link + * TrackOutputProvider}, and configures the extractor to receive data from a new chunk. + * + * @param trackOutputProvider The provider of {@link TrackOutput}s that will receive sample data. + * @param startTimeUs The start position in the new chunk, or {@link C#TIME_UNSET} to output + * samples from the start of the chunk. + * @param endTimeUs The end position in the new chunk, or {@link C#TIME_UNSET} to output samples + * to the end of the chunk. + */ + void init(@Nullable TrackOutputProvider trackOutputProvider, long startTimeUs, long endTimeUs); + + /** Releases any held resources. */ + void release(); + + /** + * Reads from the given {@link ExtractorInput}. + * + * @param input The input to read from. + * @return Whether there is any data left to extract. Returns false if the end of input has been + * reached. + * @throws IOException If an error occurred reading from or parsing the input. + */ + boolean read(ExtractorInput input) throws IOException; +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/ChunkExtractorWrapper.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/ChunkExtractorWrapper.java deleted file mode 100644 index c4c8647a55..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/ChunkExtractorWrapper.java +++ /dev/null @@ -1,220 +0,0 @@ -/* - * Copyright (C) 2016 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.source.chunk; - -import android.util.SparseArray; -import androidx.annotation.Nullable; -import com.google.android.exoplayer2.C; -import com.google.android.exoplayer2.Format; -import com.google.android.exoplayer2.extractor.DummyTrackOutput; -import com.google.android.exoplayer2.extractor.Extractor; -import com.google.android.exoplayer2.extractor.ExtractorInput; -import com.google.android.exoplayer2.extractor.ExtractorOutput; -import com.google.android.exoplayer2.extractor.SeekMap; -import com.google.android.exoplayer2.extractor.TrackOutput; -import com.google.android.exoplayer2.util.Assertions; -import com.google.android.exoplayer2.util.ParsableByteArray; -import java.io.IOException; - -/** - * An {@link Extractor} wrapper for loading chunks that contain a single primary track, and possibly - * additional embedded tracks. - *

      - * The wrapper allows switching of the {@link TrackOutput}s that receive parsed data. - */ -public final class ChunkExtractorWrapper implements ExtractorOutput { - - /** - * Provides {@link TrackOutput} instances to be written to by the wrapper. - */ - public interface TrackOutputProvider { - - /** - * Called to get the {@link TrackOutput} for a specific track. - *

      - * The same {@link TrackOutput} is returned if multiple calls are made with the same {@code id}. - * - * @param id A track identifier. - * @param type The type of the track. Typically one of the - * {@link com.google.android.exoplayer2.C} {@code TRACK_TYPE_*} constants. - * @return The {@link TrackOutput} for the given track identifier. - */ - TrackOutput track(int id, int type); - - } - - public final Extractor extractor; - - private final int primaryTrackType; - private final Format primaryTrackManifestFormat; - private final SparseArray bindingTrackOutputs; - - private boolean extractorInitialized; - private TrackOutputProvider trackOutputProvider; - private long endTimeUs; - private SeekMap seekMap; - private Format[] sampleFormats; - - /** - * @param extractor The extractor to wrap. - * @param primaryTrackType The type of the primary track. Typically one of the - * {@link com.google.android.exoplayer2.C} {@code TRACK_TYPE_*} constants. - * @param primaryTrackManifestFormat A manifest defined {@link Format} whose data should be merged - * into any sample {@link Format} output from the {@link Extractor} for the primary track. - */ - public ChunkExtractorWrapper(Extractor extractor, int primaryTrackType, - Format primaryTrackManifestFormat) { - this.extractor = extractor; - this.primaryTrackType = primaryTrackType; - this.primaryTrackManifestFormat = primaryTrackManifestFormat; - bindingTrackOutputs = new SparseArray<>(); - } - - /** - * Returns the {@link SeekMap} most recently output by the extractor, or null. - */ - public SeekMap getSeekMap() { - return seekMap; - } - - /** - * Returns the sample {@link Format}s most recently output by the extractor, or null. - */ - public Format[] getSampleFormats() { - return sampleFormats; - } - - /** - * Initializes the wrapper to output to {@link TrackOutput}s provided by the specified {@link - * TrackOutputProvider}, and configures the extractor to receive data from a new chunk. - * - * @param trackOutputProvider The provider of {@link TrackOutput}s that will receive sample data. - * @param startTimeUs The start position in the new chunk, or {@link C#TIME_UNSET} to output - * samples from the start of the chunk. - * @param endTimeUs The end position in the new chunk, or {@link C#TIME_UNSET} to output samples - * to the end of the chunk. - */ - public void init( - @Nullable TrackOutputProvider trackOutputProvider, long startTimeUs, long endTimeUs) { - this.trackOutputProvider = trackOutputProvider; - this.endTimeUs = endTimeUs; - if (!extractorInitialized) { - extractor.init(this); - if (startTimeUs != C.TIME_UNSET) { - extractor.seek(/* position= */ 0, startTimeUs); - } - extractorInitialized = true; - } else { - extractor.seek(/* position= */ 0, startTimeUs == C.TIME_UNSET ? 0 : startTimeUs); - for (int i = 0; i < bindingTrackOutputs.size(); i++) { - bindingTrackOutputs.valueAt(i).bind(trackOutputProvider, endTimeUs); - } - } - } - - // ExtractorOutput implementation. - - @Override - public TrackOutput track(int id, int type) { - BindingTrackOutput bindingTrackOutput = bindingTrackOutputs.get(id); - if (bindingTrackOutput == null) { - // Assert that if we're seeing a new track we have not seen endTracks. - Assertions.checkState(sampleFormats == null); - // TODO: Manifest formats for embedded tracks should also be passed here. - bindingTrackOutput = new BindingTrackOutput(id, type, - type == primaryTrackType ? primaryTrackManifestFormat : null); - bindingTrackOutput.bind(trackOutputProvider, endTimeUs); - bindingTrackOutputs.put(id, bindingTrackOutput); - } - return bindingTrackOutput; - } - - @Override - public void endTracks() { - Format[] sampleFormats = new Format[bindingTrackOutputs.size()]; - for (int i = 0; i < bindingTrackOutputs.size(); i++) { - sampleFormats[i] = bindingTrackOutputs.valueAt(i).sampleFormat; - } - this.sampleFormats = sampleFormats; - } - - @Override - public void seekMap(SeekMap seekMap) { - this.seekMap = seekMap; - } - - // Internal logic. - - private static final class BindingTrackOutput implements TrackOutput { - - private final int id; - private final int type; - private final Format manifestFormat; - private final DummyTrackOutput dummyTrackOutput; - - public Format sampleFormat; - private TrackOutput trackOutput; - private long endTimeUs; - - public BindingTrackOutput(int id, int type, Format manifestFormat) { - this.id = id; - this.type = type; - this.manifestFormat = manifestFormat; - dummyTrackOutput = new DummyTrackOutput(); - } - - public void bind(TrackOutputProvider trackOutputProvider, long endTimeUs) { - if (trackOutputProvider == null) { - trackOutput = dummyTrackOutput; - return; - } - this.endTimeUs = endTimeUs; - trackOutput = trackOutputProvider.track(id, type); - if (sampleFormat != null) { - trackOutput.format(sampleFormat); - } - } - - @Override - public void format(Format format) { - sampleFormat = manifestFormat != null ? format.copyWithManifestFormatInfo(manifestFormat) - : format; - trackOutput.format(sampleFormat); - } - - @Override - public int sampleData(ExtractorInput input, int length, boolean allowEndOfInput) - throws IOException, InterruptedException { - return trackOutput.sampleData(input, length, allowEndOfInput); - } - - @Override - public void sampleData(ParsableByteArray data, int length) { - trackOutput.sampleData(data, length); - } - - @Override - public void sampleMetadata(long timeUs, @C.BufferFlags int flags, int size, int offset, - CryptoData cryptoData) { - if (endTimeUs != C.TIME_UNSET && timeUs >= endTimeUs) { - trackOutput = dummyTrackOutput; - } - trackOutput.sampleMetadata(timeUs, flags, size, offset, cryptoData); - } - - } - -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/ChunkHolder.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/ChunkHolder.java index d6400c5165..8d47ac7ad9 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/ChunkHolder.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/ChunkHolder.java @@ -17,25 +17,18 @@ import androidx.annotation.Nullable; -/** - * Holds a chunk or an indication that the end of the stream has been reached. - */ +/** Holds a chunk or an indication that the end of the stream has been reached. */ public final class ChunkHolder { /** The chunk. */ @Nullable public Chunk chunk; - /** - * Indicates that the end of the stream has been reached. - */ + /** Indicates that the end of the stream has been reached. */ public boolean endOfStream; - /** - * Clears the holder. - */ + /** Clears the holder. */ public void clear() { chunk = null; endOfStream = false; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/ChunkSampleStream.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/ChunkSampleStream.java index e2278d7f95..e25dee7fdd 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/ChunkSampleStream.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/ChunkSampleStream.java @@ -15,7 +15,10 @@ */ package com.google.android.exoplayer2.source.chunk; -import android.os.Looper; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static java.lang.Math.max; +import static java.lang.Math.min; + import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; @@ -23,13 +26,17 @@ import com.google.android.exoplayer2.SeekParameters; import com.google.android.exoplayer2.decoder.DecoderInputBuffer; import com.google.android.exoplayer2.drm.DrmSession; +import com.google.android.exoplayer2.drm.DrmSessionEventListener; import com.google.android.exoplayer2.drm.DrmSessionManager; -import com.google.android.exoplayer2.source.MediaSourceEventListener.EventDispatcher; +import com.google.android.exoplayer2.source.LoadEventInfo; +import com.google.android.exoplayer2.source.MediaLoadData; +import com.google.android.exoplayer2.source.MediaSourceEventListener; import com.google.android.exoplayer2.source.SampleQueue; import com.google.android.exoplayer2.source.SampleStream; import com.google.android.exoplayer2.source.SequenceableLoader; import com.google.android.exoplayer2.upstream.Allocator; import com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy; +import com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy.LoadErrorInfo; import com.google.android.exoplayer2.upstream.Loader; import com.google.android.exoplayer2.upstream.Loader.LoadErrorAction; import com.google.android.exoplayer2.util.Assertions; @@ -39,13 +46,14 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; /** * A {@link SampleStream} that loads media in {@link Chunk}s, obtained from a {@link ChunkSource}. * May also be configured to expose additional embedded {@link SampleStream}s. */ -public class ChunkSampleStream implements SampleStream, SequenceableLoader, - Loader.Callback, Loader.ReleaseCallback { +public class ChunkSampleStream + implements SampleStream, SequenceableLoader, Loader.Callback, Loader.ReleaseCallback { /** A callback to be notified when a sample stream has finished being released. */ public interface ReleaseCallback { @@ -60,14 +68,14 @@ public interface ReleaseCallback { private static final String TAG = "ChunkSampleStream"; - public final int primaryTrackType; + public final @C.TrackType int primaryTrackType; - @Nullable private final int[] embeddedTrackTypes; - @Nullable private final Format[] embeddedTrackFormats; + private final int[] embeddedTrackTypes; + private final Format[] embeddedTrackFormats; private final boolean[] embeddedTracksSelected; private final T chunkSource; private final SequenceableLoader.Callback> callback; - private final EventDispatcher eventDispatcher; + private final MediaSourceEventListener.EventDispatcher mediaSourceEventDispatcher; private final LoadErrorHandlingPolicy loadErrorHandlingPolicy; private final Loader loader; private final ChunkHolder nextChunkHolder; @@ -77,20 +85,20 @@ public interface ReleaseCallback { private final SampleQueue[] embeddedSampleQueues; private final BaseMediaChunkOutput chunkOutput; - private Format primaryDownstreamTrackFormat; + @Nullable private Chunk loadingChunk; + private @MonotonicNonNull Format primaryDownstreamTrackFormat; @Nullable private ReleaseCallback releaseCallback; private long pendingResetPositionUs; private long lastSeekPositionUs; private int nextNotifyPrimaryFormatMediaChunkIndex; + @Nullable private BaseMediaChunk canceledMediaChunk; - /* package */ long decodeOnlyUntilPositionUs; /* package */ boolean loadingFinished; /** * Constructs an instance. * - * @param primaryTrackType The type of the primary track. One of the {@link C} {@code - * TRACK_TYPE_*} constants. + * @param primaryTrackType The {@link C.TrackType type} of the primary track. * @param embeddedTrackTypes The types of any embedded tracks, or null. * @param embeddedTrackFormats The formats of the embedded tracks, or null. * @param chunkSource A {@link ChunkSource} from which chunks to load are obtained. @@ -99,54 +107,51 @@ public interface ReleaseCallback { * @param positionUs The position from which to start loading media. * @param drmSessionManager The {@link DrmSessionManager} to obtain {@link DrmSession DrmSessions} * from. + * @param drmEventDispatcher A dispatcher to notify of {@link DrmSessionEventListener} events. * @param loadErrorHandlingPolicy The {@link LoadErrorHandlingPolicy}. - * @param eventDispatcher A dispatcher to notify of events. + * @param mediaSourceEventDispatcher A dispatcher to notify of {@link MediaSourceEventListener} + * events. */ public ChunkSampleStream( - int primaryTrackType, + @C.TrackType int primaryTrackType, @Nullable int[] embeddedTrackTypes, @Nullable Format[] embeddedTrackFormats, T chunkSource, Callback> callback, Allocator allocator, long positionUs, - DrmSessionManager drmSessionManager, + DrmSessionManager drmSessionManager, + DrmSessionEventListener.EventDispatcher drmEventDispatcher, LoadErrorHandlingPolicy loadErrorHandlingPolicy, - EventDispatcher eventDispatcher) { + MediaSourceEventListener.EventDispatcher mediaSourceEventDispatcher) { this.primaryTrackType = primaryTrackType; - this.embeddedTrackTypes = embeddedTrackTypes; - this.embeddedTrackFormats = embeddedTrackFormats; + this.embeddedTrackTypes = embeddedTrackTypes == null ? new int[0] : embeddedTrackTypes; + this.embeddedTrackFormats = embeddedTrackFormats == null ? new Format[0] : embeddedTrackFormats; this.chunkSource = chunkSource; this.callback = callback; - this.eventDispatcher = eventDispatcher; + this.mediaSourceEventDispatcher = mediaSourceEventDispatcher; this.loadErrorHandlingPolicy = loadErrorHandlingPolicy; - loader = new Loader("Loader:ChunkSampleStream"); + loader = new Loader("ChunkSampleStream"); nextChunkHolder = new ChunkHolder(); mediaChunks = new ArrayList<>(); readOnlyMediaChunks = Collections.unmodifiableList(mediaChunks); - int embeddedTrackCount = embeddedTrackTypes == null ? 0 : embeddedTrackTypes.length; + int embeddedTrackCount = this.embeddedTrackTypes.length; embeddedSampleQueues = new SampleQueue[embeddedTrackCount]; embeddedTracksSelected = new boolean[embeddedTrackCount]; int[] trackTypes = new int[1 + embeddedTrackCount]; SampleQueue[] sampleQueues = new SampleQueue[1 + embeddedTrackCount]; - primarySampleQueue = new SampleQueue( - allocator, - /* playbackLooper= */ Assertions.checkNotNull(Looper.myLooper()), - drmSessionManager); + primarySampleQueue = + SampleQueue.createWithDrm(allocator, drmSessionManager, drmEventDispatcher); trackTypes[0] = primaryTrackType; sampleQueues[0] = primarySampleQueue; for (int i = 0; i < embeddedTrackCount; i++) { - SampleQueue sampleQueue = - new SampleQueue( - allocator, - /* playbackLooper= */ Assertions.checkNotNull(Looper.myLooper()), - DrmSessionManager.getDummyDrmSessionManager()); + SampleQueue sampleQueue = SampleQueue.createWithoutDrm(allocator); embeddedSampleQueues[i] = sampleQueue; sampleQueues[i + 1] = sampleQueue; - trackTypes[i + 1] = embeddedTrackTypes[i]; + trackTypes[i + 1] = this.embeddedTrackTypes[i]; } chunkOutput = new BaseMediaChunkOutput(trackTypes, sampleQueues); @@ -200,9 +205,7 @@ public EmbeddedSampleStream selectEmbeddedTrack(long positionUs, int trackType) throw new IllegalStateException(); } - /** - * Returns the {@link ChunkSource} used by this stream. - */ + /** Returns the {@link ChunkSource} used by this stream. */ public T getChunkSource() { return chunkSource; } @@ -222,12 +225,14 @@ public long getBufferedPositionUs() { } else { long bufferedPositionUs = lastSeekPositionUs; BaseMediaChunk lastMediaChunk = getLastMediaChunk(); - BaseMediaChunk lastCompletedMediaChunk = lastMediaChunk.isLoadCompleted() ? lastMediaChunk - : mediaChunks.size() > 1 ? mediaChunks.get(mediaChunks.size() - 2) : null; + BaseMediaChunk lastCompletedMediaChunk = + lastMediaChunk.isLoadCompleted() + ? lastMediaChunk + : mediaChunks.size() > 1 ? mediaChunks.get(mediaChunks.size() - 2) : null; if (lastCompletedMediaChunk != null) { - bufferedPositionUs = Math.max(bufferedPositionUs, lastCompletedMediaChunk.endTimeUs); + bufferedPositionUs = max(bufferedPositionUs, lastCompletedMediaChunk.endTimeUs); } - return Math.max(bufferedPositionUs, primarySampleQueue.getLargestQueuedTimestampUs()); + return max(bufferedPositionUs, primarySampleQueue.getLargestQueuedTimestampUs()); } } @@ -257,7 +262,7 @@ public void seekToUs(long positionUs) { } // Detect whether the seek is to the start of a chunk that's at least partially buffered. - BaseMediaChunk seekToMediaChunk = null; + @Nullable BaseMediaChunk seekToMediaChunk = null; for (int i = 0; i < mediaChunks.size(); i++) { BaseMediaChunk mediaChunk = mediaChunks.get(i); long mediaChunkStartTimeUs = mediaChunk.startTimeUs; @@ -275,14 +280,12 @@ public void seekToUs(long positionUs) { if (seekToMediaChunk != null) { // When seeking to the start of a chunk we use the index of the first sample in the chunk // rather than the seek position. This ensures we seek to the keyframe at the start of the - // chunk even if the sample timestamps are slightly offset from the chunk start times. + // chunk even if its timestamp is slightly earlier than the advertised chunk start time. seekInsideBuffer = primarySampleQueue.seekTo(seekToMediaChunk.getFirstSampleIndex(0)); - decodeOnlyUntilPositionUs = 0; } else { seekInsideBuffer = primarySampleQueue.seekTo( positionUs, /* allowTimeBeyondBuffer= */ positionUs < getNextLoadPositionUs()); - decodeOnlyUntilPositionUs = lastSeekPositionUs; } if (seekInsideBuffer) { @@ -301,13 +304,15 @@ public void seekToUs(long positionUs) { mediaChunks.clear(); nextNotifyPrimaryFormatMediaChunkIndex = 0; if (loader.isLoading()) { + // Discard as much as we can synchronously. + primarySampleQueue.discardToEnd(); + for (SampleQueue embeddedSampleQueue : embeddedSampleQueues) { + embeddedSampleQueue.discardToEnd(); + } loader.cancelLoading(); } else { loader.clearFatalError(); - primarySampleQueue.reset(); - for (SampleQueue embeddedSampleQueue : embeddedSampleQueues) { - embeddedSampleQueue.reset(); - } + resetSampleQueues(); } } } @@ -347,6 +352,7 @@ public void onLoaderReleased() { for (SampleQueue embeddedSampleQueue : embeddedSampleQueues) { embeddedSampleQueue.release(); } + chunkSource.release(); if (releaseCallback != null) { releaseCallback.onSampleStreamReleased(this); } @@ -369,15 +375,21 @@ public void maybeThrowError() throws IOException { } @Override - public int readData(FormatHolder formatHolder, DecoderInputBuffer buffer, - boolean formatRequired) { + public int readData( + FormatHolder formatHolder, DecoderInputBuffer buffer, @ReadFlags int readFlags) { if (isPendingReset()) { return C.RESULT_NOTHING_READ; } + if (canceledMediaChunk != null + && canceledMediaChunk.getFirstSampleIndex(/* trackIndex= */ 0) + <= primarySampleQueue.getReadIndex()) { + // Don't read into chunk that's going to be discarded. + // TODO: Support splicing to allow this. See [internal b/161130873]. + return C.RESULT_NOTHING_READ; + } maybeNotifyPrimaryTrackFormatChanged(); - return primarySampleQueue.read( - formatHolder, buffer, formatRequired, loadingFinished, decodeOnlyUntilPositionUs); + return primarySampleQueue.read(formatHolder, buffer, readFlags, loadingFinished); } @Override @@ -385,12 +397,16 @@ public int skipData(long positionUs) { if (isPendingReset()) { return 0; } - int skipCount; - if (loadingFinished && positionUs > primarySampleQueue.getLargestQueuedTimestampUs()) { - skipCount = primarySampleQueue.advanceToEnd(); - } else { - skipCount = primarySampleQueue.advanceTo(positionUs); + int skipCount = primarySampleQueue.getSkipCount(positionUs, loadingFinished); + if (canceledMediaChunk != null) { + // Don't skip into chunk that's going to be discarded. + // TODO: Support splicing to allow this. See [internal b/161130873]. + int maxSkipCount = + canceledMediaChunk.getFirstSampleIndex(/* trackIndex= */ 0) + - primarySampleQueue.getReadIndex(); + skipCount = min(skipCount, maxSkipCount); } + primarySampleQueue.skip(skipCount); maybeNotifyPrimaryTrackFormatChanged(); return skipCount; } @@ -399,45 +415,63 @@ public int skipData(long positionUs) { @Override public void onLoadCompleted(Chunk loadable, long elapsedRealtimeMs, long loadDurationMs) { + loadingChunk = null; chunkSource.onChunkLoadCompleted(loadable); - eventDispatcher.loadCompleted( - loadable.dataSpec, - loadable.getUri(), - loadable.getResponseHeaders(), + LoadEventInfo loadEventInfo = + new LoadEventInfo( + loadable.loadTaskId, + loadable.dataSpec, + loadable.getUri(), + loadable.getResponseHeaders(), + elapsedRealtimeMs, + loadDurationMs, + loadable.bytesLoaded()); + loadErrorHandlingPolicy.onLoadTaskConcluded(loadable.loadTaskId); + mediaSourceEventDispatcher.loadCompleted( + loadEventInfo, loadable.type, primaryTrackType, loadable.trackFormat, loadable.trackSelectionReason, loadable.trackSelectionData, loadable.startTimeUs, - loadable.endTimeUs, - elapsedRealtimeMs, - loadDurationMs, - loadable.bytesLoaded()); + loadable.endTimeUs); callback.onContinueLoadingRequested(this); } @Override - public void onLoadCanceled(Chunk loadable, long elapsedRealtimeMs, long loadDurationMs, - boolean released) { - eventDispatcher.loadCanceled( - loadable.dataSpec, - loadable.getUri(), - loadable.getResponseHeaders(), + public void onLoadCanceled( + Chunk loadable, long elapsedRealtimeMs, long loadDurationMs, boolean released) { + loadingChunk = null; + canceledMediaChunk = null; + LoadEventInfo loadEventInfo = + new LoadEventInfo( + loadable.loadTaskId, + loadable.dataSpec, + loadable.getUri(), + loadable.getResponseHeaders(), + elapsedRealtimeMs, + loadDurationMs, + loadable.bytesLoaded()); + loadErrorHandlingPolicy.onLoadTaskConcluded(loadable.loadTaskId); + mediaSourceEventDispatcher.loadCanceled( + loadEventInfo, loadable.type, primaryTrackType, loadable.trackFormat, loadable.trackSelectionReason, loadable.trackSelectionData, loadable.startTimeUs, - loadable.endTimeUs, - elapsedRealtimeMs, - loadDurationMs, - loadable.bytesLoaded()); + loadable.endTimeUs); if (!released) { - primarySampleQueue.reset(); - for (SampleQueue embeddedSampleQueue : embeddedSampleQueues) { - embeddedSampleQueue.reset(); + if (isPendingReset()) { + resetSampleQueues(); + } else if (isMediaChunk(loadable)) { + // TODO: Support splicing to keep data from canceled chunk. See [internal b/161130873]. + discardUpstreamMediaChunksFromIndex(mediaChunks.size() - 1); + if (mediaChunks.isEmpty()) { + pendingResetPositionUs = lastSeekPositionUs; + } } callback.onContinueLoadingRequested(this); } @@ -455,13 +489,30 @@ public LoadErrorAction onLoadError( int lastChunkIndex = mediaChunks.size() - 1; boolean cancelable = bytesLoaded == 0 || !isMediaChunk || !haveReadFromMediaChunk(lastChunkIndex); - long blacklistDurationMs = - cancelable - ? loadErrorHandlingPolicy.getBlacklistDurationMsFor( - loadable.type, loadDurationMs, error, errorCount) - : C.TIME_UNSET; - LoadErrorAction loadErrorAction = null; - if (chunkSource.onChunkLoadError(loadable, cancelable, error, blacklistDurationMs)) { + LoadEventInfo loadEventInfo = + new LoadEventInfo( + loadable.loadTaskId, + loadable.dataSpec, + loadable.getUri(), + loadable.getResponseHeaders(), + elapsedRealtimeMs, + loadDurationMs, + bytesLoaded); + MediaLoadData mediaLoadData = + new MediaLoadData( + loadable.type, + primaryTrackType, + loadable.trackFormat, + loadable.trackSelectionReason, + loadable.trackSelectionData, + Util.usToMs(loadable.startTimeUs), + Util.usToMs(loadable.endTimeUs)); + LoadErrorInfo loadErrorInfo = + new LoadErrorInfo(loadEventInfo, mediaLoadData, error, errorCount); + + @Nullable LoadErrorAction loadErrorAction = null; + if (chunkSource.onChunkLoadError( + loadable, cancelable, loadErrorInfo, loadErrorHandlingPolicy)) { if (cancelable) { loadErrorAction = Loader.DONT_RETRY; if (isMediaChunk) { @@ -478,9 +529,7 @@ public LoadErrorAction onLoadError( if (loadErrorAction == null) { // The load was not cancelled. Either the load must be retried or the error propagated. - long retryDelayMs = - loadErrorHandlingPolicy.getRetryDelayMsFor( - loadable.type, loadDurationMs, error, errorCount); + long retryDelayMs = loadErrorHandlingPolicy.getRetryDelayMsFor(loadErrorInfo); loadErrorAction = retryDelayMs != C.TIME_UNSET ? Loader.createRetryAction(/* resetErrorCount= */ false, retryDelayMs) @@ -488,10 +537,8 @@ public LoadErrorAction onLoadError( } boolean canceled = !loadErrorAction.isRetry(); - eventDispatcher.loadError( - loadable.dataSpec, - loadable.getUri(), - loadable.getResponseHeaders(), + mediaSourceEventDispatcher.loadError( + loadEventInfo, loadable.type, primaryTrackType, loadable.trackFormat, @@ -499,12 +546,11 @@ public LoadErrorAction onLoadError( loadable.trackSelectionData, loadable.startTimeUs, loadable.endTimeUs, - elapsedRealtimeMs, - loadDurationMs, - bytesLoaded, error, canceled); if (canceled) { + loadingChunk = null; + loadErrorHandlingPolicy.onLoadTaskConcluded(loadable.loadTaskId); callback.onContinueLoadingRequested(this); } return loadErrorAction; @@ -530,7 +576,7 @@ public boolean continueLoading(long positionUs) { } chunkSource.getNextChunk(positionUs, loadPositionUs, chunkQueue, nextChunkHolder); boolean endOfStream = nextChunkHolder.endOfStream; - Chunk loadable = nextChunkHolder.chunk; + @Nullable Chunk loadable = nextChunkHolder.chunk; nextChunkHolder.clear(); if (endOfStream) { @@ -543,12 +589,20 @@ public boolean continueLoading(long positionUs) { return false; } + loadingChunk = loadable; if (isMediaChunk(loadable)) { BaseMediaChunk mediaChunk = (BaseMediaChunk) loadable; if (pendingReset) { - boolean resetToMediaChunk = mediaChunk.startTimeUs == pendingResetPositionUs; - // Only enable setting of the decode only flag if we're not resetting to a chunk boundary. - decodeOnlyUntilPositionUs = resetToMediaChunk ? 0 : pendingResetPositionUs; + // Only set the queue start times if we're not seeking to a chunk boundary. If we are + // seeking to a chunk boundary then we want the queue to pass through all of the samples in + // the chunk. Doing this ensures we'll always output the keyframe at the start of the chunk, + // even if its timestamp is slightly earlier than the advertised chunk start time. + if (mediaChunk.startTimeUs != pendingResetPositionUs) { + primarySampleQueue.setStartTimeUs(pendingResetPositionUs); + for (SampleQueue embeddedSampleQueue : embeddedSampleQueues) { + embeddedSampleQueue.setStartTimeUs(pendingResetPositionUs); + } + } pendingResetPositionUs = C.TIME_UNSET; } mediaChunk.init(chunkOutput); @@ -559,16 +613,15 @@ public boolean continueLoading(long positionUs) { long elapsedRealtimeMs = loader.startLoading( loadable, this, loadErrorHandlingPolicy.getMinimumLoadableRetryCount(loadable.type)); - eventDispatcher.loadStarted( - loadable.dataSpec, + mediaSourceEventDispatcher.loadStarted( + new LoadEventInfo(loadable.loadTaskId, loadable.dataSpec, elapsedRealtimeMs), loadable.type, primaryTrackType, loadable.trackFormat, loadable.trackSelectionReason, loadable.trackSelectionData, loadable.startTimeUs, - loadable.endTimeUs, - elapsedRealtimeMs); + loadable.endTimeUs); return true; } @@ -588,24 +641,46 @@ public long getNextLoadPositionUs() { @Override public void reevaluateBuffer(long positionUs) { - if (loader.isLoading() || loader.hasFatalError() || isPendingReset()) { + if (loader.hasFatalError() || isPendingReset()) { return; } - int currentQueueSize = mediaChunks.size(); - int preferredQueueSize = chunkSource.getPreferredQueueSize(positionUs, readOnlyMediaChunks); - if (currentQueueSize <= preferredQueueSize) { + if (loader.isLoading()) { + Chunk loadingChunk = checkNotNull(this.loadingChunk); + if (isMediaChunk(loadingChunk) + && haveReadFromMediaChunk(/* mediaChunkIndex= */ mediaChunks.size() - 1)) { + // Can't cancel anymore because the renderers have read from this chunk. + return; + } + if (chunkSource.shouldCancelLoad(positionUs, loadingChunk, readOnlyMediaChunks)) { + loader.cancelLoading(); + if (isMediaChunk(loadingChunk)) { + canceledMediaChunk = (BaseMediaChunk) loadingChunk; + } + } return; } - int newQueueSize = currentQueueSize; + int preferredQueueSize = chunkSource.getPreferredQueueSize(positionUs, readOnlyMediaChunks); + if (preferredQueueSize < mediaChunks.size()) { + discardUpstream(preferredQueueSize); + } + } + + private void discardUpstream(int preferredQueueSize) { + Assertions.checkState(!loader.isLoading()); + + int currentQueueSize = mediaChunks.size(); + int newQueueSize = C.LENGTH_UNSET; for (int i = preferredQueueSize; i < currentQueueSize; i++) { if (!haveReadFromMediaChunk(i)) { + // TODO: Sparse tracks (e.g. ESMG) may prevent discarding in almost all cases because it + // means that most chunks have been read from already. See [internal b/161126666]. newQueueSize = i; break; } } - if (newQueueSize == currentQueueSize) { + if (newQueueSize == C.LENGTH_UNSET) { return; } @@ -615,15 +690,21 @@ public void reevaluateBuffer(long positionUs) { pendingResetPositionUs = lastSeekPositionUs; } loadingFinished = false; - eventDispatcher.upstreamDiscarded(primaryTrackType, firstRemovedChunk.startTimeUs, endTimeUs); + mediaSourceEventDispatcher.upstreamDiscarded( + primaryTrackType, firstRemovedChunk.startTimeUs, endTimeUs); } - // Internal methods - private boolean isMediaChunk(Chunk chunk) { return chunk instanceof BaseMediaChunk; } + private void resetSampleQueues() { + primarySampleQueue.reset(); + for (SampleQueue embeddedSampleQueue : embeddedSampleQueues) { + embeddedSampleQueue.reset(); + } + } + /** Returns whether samples have been read from media chunk at given index. */ private boolean haveReadFromMediaChunk(int mediaChunkIndex) { BaseMediaChunk mediaChunk = mediaChunks.get(mediaChunkIndex); @@ -647,7 +728,7 @@ private void discardDownstreamMediaChunks(int discardToSampleIndex) { primarySampleIndexToMediaChunkIndex(discardToSampleIndex, /* minChunkIndex= */ 0); // Don't discard any chunks that we haven't reported the primary format change for yet. discardToMediaChunkIndex = - Math.min(discardToMediaChunkIndex, nextNotifyPrimaryFormatMediaChunkIndex); + min(discardToMediaChunkIndex, nextNotifyPrimaryFormatMediaChunkIndex); if (discardToMediaChunkIndex > 0) { Util.removeRange(mediaChunks, /* fromIndex= */ 0, /* toIndex= */ discardToMediaChunkIndex); nextNotifyPrimaryFormatMediaChunkIndex -= discardToMediaChunkIndex; @@ -668,8 +749,11 @@ private void maybeNotifyPrimaryTrackFormatChanged(int mediaChunkReadIndex) { BaseMediaChunk currentChunk = mediaChunks.get(mediaChunkReadIndex); Format trackFormat = currentChunk.trackFormat; if (!trackFormat.equals(primaryDownstreamTrackFormat)) { - eventDispatcher.downstreamFormatChanged(primaryTrackType, trackFormat, - currentChunk.trackSelectionReason, currentChunk.trackSelectionData, + mediaSourceEventDispatcher.downstreamFormatChanged( + primaryTrackType, + trackFormat, + currentChunk.trackSelectionReason, + currentChunk.trackSelectionData, currentChunk.startTimeUs); } primaryDownstreamTrackFormat = trackFormat; @@ -711,7 +795,7 @@ private BaseMediaChunk discardUpstreamMediaChunksFromIndex(int chunkIndex) { BaseMediaChunk firstRemovedChunk = mediaChunks.get(chunkIndex); Util.removeRange(mediaChunks, /* fromIndex= */ chunkIndex, /* toIndex= */ mediaChunks.size()); nextNotifyPrimaryFormatMediaChunkIndex = - Math.max(nextNotifyPrimaryFormatMediaChunkIndex, mediaChunks.size()); + max(nextNotifyPrimaryFormatMediaChunkIndex, mediaChunks.size()); primarySampleQueue.discardUpstreamSamples(firstRemovedChunk.getFirstSampleIndex(0)); for (int i = 0; i < embeddedSampleQueues.length; i++) { embeddedSampleQueues[i].discardUpstreamSamples(firstRemovedChunk.getFirstSampleIndex(i + 1)); @@ -719,9 +803,7 @@ private BaseMediaChunk discardUpstreamMediaChunksFromIndex(int chunkIndex) { return firstRemovedChunk; } - /** - * A {@link SampleStream} embedded in a {@link ChunkSampleStream}. - */ + /** A {@link SampleStream} embedded in a {@link ChunkSampleStream}. */ public final class EmbeddedSampleStream implements SampleStream { public final ChunkSampleStream parent; @@ -747,34 +829,42 @@ public int skipData(long positionUs) { if (isPendingReset()) { return 0; } - maybeNotifyDownstreamFormat(); - int skipCount; - if (loadingFinished && positionUs > sampleQueue.getLargestQueuedTimestampUs()) { - skipCount = sampleQueue.advanceToEnd(); - } else { - skipCount = sampleQueue.advanceTo(positionUs); + int skipCount = sampleQueue.getSkipCount(positionUs, loadingFinished); + if (canceledMediaChunk != null) { + // Don't skip into chunk that's going to be discarded. + // TODO: Support splicing to allow this. See [internal b/161130873]. + int maxSkipCount = + canceledMediaChunk.getFirstSampleIndex(/* trackIndex= */ 1 + index) + - sampleQueue.getReadIndex(); + skipCount = min(skipCount, maxSkipCount); + } + sampleQueue.skip(skipCount); + if (skipCount > 0) { + maybeNotifyDownstreamFormat(); } return skipCount; } @Override - public void maybeThrowError() throws IOException { + public void maybeThrowError() { // Do nothing. Errors will be thrown from the primary stream. } @Override - public int readData(FormatHolder formatHolder, DecoderInputBuffer buffer, - boolean formatRequired) { + public int readData( + FormatHolder formatHolder, DecoderInputBuffer buffer, @ReadFlags int readFlags) { if (isPendingReset()) { return C.RESULT_NOTHING_READ; } + if (canceledMediaChunk != null + && canceledMediaChunk.getFirstSampleIndex(/* trackIndex= */ 1 + index) + <= sampleQueue.getReadIndex()) { + // Don't read into chunk that's going to be discarded. + // TODO: Support splicing to allow this. See [internal b/161130873]. + return C.RESULT_NOTHING_READ; + } maybeNotifyDownstreamFormat(); - return sampleQueue.read( - formatHolder, - buffer, - formatRequired, - loadingFinished, - decodeOnlyUntilPositionUs); + return sampleQueue.read(formatHolder, buffer, readFlags, loadingFinished); } public void release() { @@ -784,7 +874,7 @@ public void release() { private void maybeNotifyDownstreamFormat() { if (!notifiedDownstreamFormat) { - eventDispatcher.downstreamFormatChanged( + mediaSourceEventDispatcher.downstreamFormatChanged( embeddedTrackTypes[index], embeddedTrackFormats[index], C.SELECTION_REASON_UNKNOWN, @@ -794,5 +884,4 @@ private void maybeNotifyDownstreamFormat() { } } } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/ChunkSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/ChunkSource.java index b119cad5b0..b2a46a0cd4 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/ChunkSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/ChunkSource.java @@ -15,14 +15,12 @@ */ package com.google.android.exoplayer2.source.chunk; -import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.SeekParameters; +import com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy; import java.io.IOException; import java.util.List; -/** - * A provider of {@link Chunk}s for a {@link ChunkSampleStream} to load. - */ +/** A provider of {@link Chunk}s for a {@link ChunkSampleStream} to load. */ public interface ChunkSource { /** @@ -38,8 +36,6 @@ public interface ChunkSource { /** * If the source is currently having difficulty providing chunks, then this method throws the * underlying error. Otherwise does nothing. - *

      - * This method should only be called after the source has been prepared. * * @throws IOException The underlying error. */ @@ -47,17 +43,30 @@ public interface ChunkSource { /** * Evaluates whether {@link MediaChunk}s should be removed from the back of the queue. - *

      - * Removing {@link MediaChunk}s from the back of the queue can be useful if they could be replaced - * with chunks of a significantly higher quality (e.g. because the available bandwidth has - * substantially increased). * - * @param playbackPositionUs The current playback position. + *

      Removing {@link MediaChunk}s from the back of the queue can be useful if they could be + * replaced with chunks of a significantly higher quality (e.g. because the available bandwidth + * has substantially increased). + * + *

      Will only be called if no {@link MediaChunk} in the queue is currently loading. + * + * @param playbackPositionUs The current playback position, in microseconds. * @param queue The queue of buffered {@link MediaChunk}s. * @return The preferred queue size. */ int getPreferredQueueSize(long playbackPositionUs, List queue); + /** + * Returns whether an ongoing load of a chunk should be canceled. + * + * @param playbackPositionUs The current playback position, in microseconds. + * @param loadingChunk The currently loading {@link Chunk}. + * @param queue The queue of buffered {@link MediaChunk MediaChunks}. + * @return Whether the ongoing load of {@code loadingChunk} should be canceled. + */ + boolean shouldCancelLoad( + long playbackPositionUs, Chunk loadingChunk, List queue); + /** * Returns the next chunk to load. * @@ -85,8 +94,6 @@ void getNextChunk( * Called when the {@link ChunkSampleStream} has finished loading a chunk obtained from this * source. * - *

      This method should only be called when the source is enabled. - * * @param chunk The chunk whose load has been completed. */ void onChunkLoadCompleted(Chunk chunk); @@ -95,17 +102,22 @@ void getNextChunk( * Called when the {@link ChunkSampleStream} encounters an error loading a chunk obtained from * this source. * - *

      This method should only be called when the source is enabled. - * * @param chunk The chunk whose load encountered the error. * @param cancelable Whether the load can be canceled. - * @param e The error. - * @param blacklistDurationMs The duration for which the associated track may be blacklisted, or - * {@link C#TIME_UNSET} if the track may not be blacklisted. + * @param loadErrorInfo The load error info. + * @param loadErrorHandlingPolicy The load error handling policy to customize the behaviour of + * handling the load error. * @return Whether the load should be canceled so that a replacement chunk can be loaded instead. * Must be {@code false} if {@code cancelable} is {@code false}. If {@code true}, {@link * #getNextChunk(long, long, List, ChunkHolder)} will be called to obtain the replacement * chunk. */ - boolean onChunkLoadError(Chunk chunk, boolean cancelable, Exception e, long blacklistDurationMs); + boolean onChunkLoadError( + Chunk chunk, + boolean cancelable, + LoadErrorHandlingPolicy.LoadErrorInfo loadErrorInfo, + LoadErrorHandlingPolicy loadErrorHandlingPolicy); + + /** Releases any held resources. */ + void release(); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/ContainerMediaChunk.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/ContainerMediaChunk.java index 9dffe09194..154cccb786 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/ContainerMediaChunk.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/ContainerMediaChunk.java @@ -15,29 +15,24 @@ */ package com.google.android.exoplayer2.source.chunk; +import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.extractor.DefaultExtractorInput; import com.google.android.exoplayer2.extractor.Extractor; import com.google.android.exoplayer2.extractor.ExtractorInput; -import com.google.android.exoplayer2.extractor.PositionHolder; -import com.google.android.exoplayer2.source.chunk.ChunkExtractorWrapper.TrackOutputProvider; +import com.google.android.exoplayer2.source.chunk.ChunkExtractor.TrackOutputProvider; import com.google.android.exoplayer2.upstream.DataSource; +import com.google.android.exoplayer2.upstream.DataSourceUtil; import com.google.android.exoplayer2.upstream.DataSpec; -import com.google.android.exoplayer2.util.Assertions; -import com.google.android.exoplayer2.util.Util; import java.io.IOException; -/** - * A {@link BaseMediaChunk} that uses an {@link Extractor} to decode sample data. - */ +/** A {@link BaseMediaChunk} that uses an {@link Extractor} to decode sample data. */ public class ContainerMediaChunk extends BaseMediaChunk { - private static final PositionHolder DUMMY_POSITION_HOLDER = new PositionHolder(); - private final int chunkCount; private final long sampleOffsetUs; - private final ChunkExtractorWrapper extractorWrapper; + private final ChunkExtractor chunkExtractor; private long nextLoadPosition; private volatile boolean loadCanceled; @@ -60,14 +55,14 @@ public class ContainerMediaChunk extends BaseMediaChunk { * instance. Normally equal to one, but may be larger if multiple chunks as defined by the * underlying media are being merged into a single load. * @param sampleOffsetUs An offset to add to the sample timestamps parsed by the extractor. - * @param extractorWrapper A wrapped extractor to use for parsing the data. + * @param chunkExtractor A wrapped extractor to use for parsing the data. */ public ContainerMediaChunk( DataSource dataSource, DataSpec dataSpec, Format trackFormat, - int trackSelectionReason, - Object trackSelectionData, + @C.SelectionReason int trackSelectionReason, + @Nullable Object trackSelectionData, long startTimeUs, long endTimeUs, long clippedStartTimeUs, @@ -75,7 +70,7 @@ public ContainerMediaChunk( long chunkIndex, int chunkCount, long sampleOffsetUs, - ChunkExtractorWrapper extractorWrapper) { + ChunkExtractor chunkExtractor) { super( dataSource, dataSpec, @@ -89,7 +84,7 @@ public ContainerMediaChunk( chunkIndex); this.chunkCount = chunkCount; this.sampleOffsetUs = sampleOffsetUs; - this.extractorWrapper = extractorWrapper; + this.chunkExtractor = chunkExtractor; } @Override @@ -111,12 +106,12 @@ public final void cancelLoad() { @SuppressWarnings("NonAtomicVolatileUpdate") @Override - public final void load() throws IOException, InterruptedException { + public final void load() throws IOException { if (nextLoadPosition == 0) { // Configure the output and set it as the target for the extractor wrapper. BaseMediaChunkOutput output = getOutput(); output.setSampleOffsetUs(sampleOffsetUs); - extractorWrapper.init( + chunkExtractor.init( getTrackOutputProvider(output), clippedStartTimeUs == C.TIME_UNSET ? C.TIME_UNSET : (clippedStartTimeUs - sampleOffsetUs), clippedEndTimeUs == C.TIME_UNSET ? C.TIME_UNSET : (clippedEndTimeUs - sampleOffsetUs)); @@ -126,22 +121,17 @@ public final void load() throws IOException, InterruptedException { DataSpec loadDataSpec = dataSpec.subrange(nextLoadPosition); ExtractorInput input = new DefaultExtractorInput( - dataSource, loadDataSpec.absoluteStreamPosition, dataSource.open(loadDataSpec)); + dataSource, loadDataSpec.position, dataSource.open(loadDataSpec)); // Load and decode the sample data. try { - Extractor extractor = extractorWrapper.extractor; - int result = Extractor.RESULT_CONTINUE; - while (result == Extractor.RESULT_CONTINUE && !loadCanceled) { - result = extractor.read(input, DUMMY_POSITION_HOLDER); - } - Assertions.checkState(result != Extractor.RESULT_SEEK); + while (!loadCanceled && chunkExtractor.read(input)) {} } finally { - nextLoadPosition = input.getPosition() - dataSpec.absoluteStreamPosition; + nextLoadPosition = input.getPosition() - dataSpec.position; } } finally { - Util.closeQuietly(dataSource); + DataSourceUtil.closeQuietly(dataSource); } - loadCompleted = true; + loadCompleted = !loadCanceled; } /** diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/DataChunk.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/DataChunk.java index f3bea8aeb5..7b106c0423 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/DataChunk.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/DataChunk.java @@ -17,16 +17,18 @@ import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.C.DataType; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.upstream.DataSource; +import com.google.android.exoplayer2.upstream.DataSourceUtil; import com.google.android.exoplayer2.upstream.DataSpec; import com.google.android.exoplayer2.util.Util; import java.io.IOException; import java.util.Arrays; /** - * A base class for {@link Chunk} implementations where the data should be loaded into a - * {@code byte[]} before being consumed. + * A base class for {@link Chunk} implementations where the data should be loaded into a {@code + * byte[]} before being consumed. */ public abstract class DataChunk extends Chunk { @@ -48,20 +50,27 @@ public abstract class DataChunk extends Chunk { public DataChunk( DataSource dataSource, DataSpec dataSpec, - int type, + @DataType int type, Format trackFormat, - int trackSelectionReason, + @C.SelectionReason int trackSelectionReason, @Nullable Object trackSelectionData, - byte[] data) { - super(dataSource, dataSpec, type, trackFormat, trackSelectionReason, trackSelectionData, - C.TIME_UNSET, C.TIME_UNSET); - this.data = data; + @Nullable byte[] data) { + super( + dataSource, + dataSpec, + type, + trackFormat, + trackSelectionReason, + trackSelectionData, + C.TIME_UNSET, + C.TIME_UNSET); + this.data = data == null ? Util.EMPTY_BYTE_ARRAY : data; } /** * Returns the array in which the data is held. - *

      - * This method should be used for recycling the holder only, and not for reading the data. + * + *

      This method should be used for recycling the holder only, and not for reading the data. * * @return The array in which the data is held. */ @@ -77,7 +86,7 @@ public final void cancelLoad() { } @Override - public final void load() throws IOException, InterruptedException { + public final void load() throws IOException { try { dataSource.open(dataSpec); int limit = 0; @@ -93,7 +102,7 @@ public final void load() throws IOException, InterruptedException { consume(data, limit); } } finally { - Util.closeQuietly(dataSource); + DataSourceUtil.closeQuietly(dataSource); } } @@ -108,9 +117,7 @@ public final void load() throws IOException, InterruptedException { protected abstract void consume(byte[] data, int limit) throws IOException; private void maybeExpandData(int limit) { - if (data == null) { - data = new byte[READ_GRANULARITY]; - } else if (data.length < limit + READ_GRANULARITY) { + if (data.length < limit + READ_GRANULARITY) { // The new length is calculated as (data.length + READ_GRANULARITY) rather than // (limit + READ_GRANULARITY) in order to avoid small increments in the length. data = Arrays.copyOf(data, data.length + READ_GRANULARITY); diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/InitializationChunk.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/InitializationChunk.java index 178fb94c7c..a1c3cfea78 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/InitializationChunk.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/InitializationChunk.java @@ -21,12 +21,10 @@ import com.google.android.exoplayer2.extractor.DefaultExtractorInput; import com.google.android.exoplayer2.extractor.Extractor; import com.google.android.exoplayer2.extractor.ExtractorInput; -import com.google.android.exoplayer2.extractor.PositionHolder; -import com.google.android.exoplayer2.source.chunk.ChunkExtractorWrapper.TrackOutputProvider; +import com.google.android.exoplayer2.source.chunk.ChunkExtractor.TrackOutputProvider; import com.google.android.exoplayer2.upstream.DataSource; +import com.google.android.exoplayer2.upstream.DataSourceUtil; import com.google.android.exoplayer2.upstream.DataSpec; -import com.google.android.exoplayer2.util.Assertions; -import com.google.android.exoplayer2.util.Util; import java.io.IOException; import org.checkerframework.checker.nullness.qual.MonotonicNonNull; @@ -35,11 +33,9 @@ */ public final class InitializationChunk extends Chunk { - private static final PositionHolder DUMMY_POSITION_HOLDER = new PositionHolder(); + private final ChunkExtractor chunkExtractor; - private final ChunkExtractorWrapper extractorWrapper; - - @MonotonicNonNull private TrackOutputProvider trackOutputProvider; + private @MonotonicNonNull TrackOutputProvider trackOutputProvider; private long nextLoadPosition; private volatile boolean loadCanceled; @@ -49,18 +45,25 @@ public final class InitializationChunk extends Chunk { * @param trackFormat See {@link #trackFormat}. * @param trackSelectionReason See {@link #trackSelectionReason}. * @param trackSelectionData See {@link #trackSelectionData}. - * @param extractorWrapper A wrapped extractor to use for parsing the initialization data. + * @param chunkExtractor A wrapped extractor to use for parsing the initialization data. */ public InitializationChunk( DataSource dataSource, DataSpec dataSpec, Format trackFormat, - int trackSelectionReason, + @C.SelectionReason int trackSelectionReason, @Nullable Object trackSelectionData, - ChunkExtractorWrapper extractorWrapper) { - super(dataSource, dataSpec, C.DATA_TYPE_MEDIA_INITIALIZATION, trackFormat, trackSelectionReason, - trackSelectionData, C.TIME_UNSET, C.TIME_UNSET); - this.extractorWrapper = extractorWrapper; + ChunkExtractor chunkExtractor) { + super( + dataSource, + dataSpec, + C.DATA_TYPE_MEDIA_INITIALIZATION, + trackFormat, + trackSelectionReason, + trackSelectionData, + C.TIME_UNSET, + C.TIME_UNSET); + this.chunkExtractor = chunkExtractor; } /** @@ -83,9 +86,9 @@ public void cancelLoad() { @SuppressWarnings("NonAtomicVolatileUpdate") @Override - public void load() throws IOException, InterruptedException { + public void load() throws IOException { if (nextLoadPosition == 0) { - extractorWrapper.init( + chunkExtractor.init( trackOutputProvider, /* startTimeUs= */ C.TIME_UNSET, /* endTimeUs= */ C.TIME_UNSET); } try { @@ -93,20 +96,15 @@ public void load() throws IOException, InterruptedException { DataSpec loadDataSpec = dataSpec.subrange(nextLoadPosition); ExtractorInput input = new DefaultExtractorInput( - dataSource, loadDataSpec.absoluteStreamPosition, dataSource.open(loadDataSpec)); + dataSource, loadDataSpec.position, dataSource.open(loadDataSpec)); // Load and decode the initialization data. try { - Extractor extractor = extractorWrapper.extractor; - int result = Extractor.RESULT_CONTINUE; - while (result == Extractor.RESULT_CONTINUE && !loadCanceled) { - result = extractor.read(input, DUMMY_POSITION_HOLDER); - } - Assertions.checkState(result != Extractor.RESULT_SEEK); + while (!loadCanceled && chunkExtractor.read(input)) {} } finally { - nextLoadPosition = input.getPosition() - dataSpec.absoluteStreamPosition; + nextLoadPosition = input.getPosition() - dataSpec.position; } } finally { - Util.closeQuietly(dataSource); + DataSourceUtil.closeQuietly(dataSource); } } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/MediaChunk.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/MediaChunk.java index 39c097826f..2780c0d3bf 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/MediaChunk.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/MediaChunk.java @@ -22,9 +22,7 @@ import com.google.android.exoplayer2.upstream.DataSpec; import com.google.android.exoplayer2.util.Assertions; -/** - * An abstract base class for {@link Chunk}s that contain media samples. - */ +/** An abstract base class for {@link Chunk}s that contain media samples. */ public abstract class MediaChunk extends Chunk { /** The chunk index, or {@link C#INDEX_UNSET} if it is not known. */ @@ -44,13 +42,20 @@ public MediaChunk( DataSource dataSource, DataSpec dataSpec, Format trackFormat, - int trackSelectionReason, + @C.SelectionReason int trackSelectionReason, @Nullable Object trackSelectionData, long startTimeUs, long endTimeUs, long chunkIndex) { - super(dataSource, dataSpec, C.DATA_TYPE_MEDIA, trackFormat, trackSelectionReason, - trackSelectionData, startTimeUs, endTimeUs); + super( + dataSource, + dataSpec, + C.DATA_TYPE_MEDIA, + trackFormat, + trackSelectionReason, + trackSelectionData, + startTimeUs, + endTimeUs); Assertions.checkNotNull(trackFormat); this.chunkIndex = chunkIndex; } @@ -60,9 +65,6 @@ public long getNextChunkIndex() { return chunkIndex != C.INDEX_UNSET ? chunkIndex + 1 : C.INDEX_UNSET; } - /** - * Returns whether the chunk has been fully loaded. - */ + /** Returns whether the chunk has been fully loaded. */ public abstract boolean isLoadCompleted(); - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/MediaChunkListIterator.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/MediaChunkListIterator.java deleted file mode 100644 index ca64e1affd..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/MediaChunkListIterator.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright (C) 2018 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.source.chunk; - -import com.google.android.exoplayer2.upstream.DataSpec; -import java.util.List; - -/** A {@link MediaChunkIterator} which iterates over a {@link List} of {@link MediaChunk}s. */ -public final class MediaChunkListIterator extends BaseMediaChunkIterator { - - private final List chunks; - private final boolean reverseOrder; - - /** - * Creates iterator. - * - * @param chunks The list of chunks to iterate over. - * @param reverseOrder Whether to iterate in reverse order. - */ - public MediaChunkListIterator(List chunks, boolean reverseOrder) { - super(0, chunks.size() - 1); - this.chunks = chunks; - this.reverseOrder = reverseOrder; - } - - @Override - public DataSpec getDataSpec() { - return getCurrentChunk().dataSpec; - } - - @Override - public long getChunkStartTimeUs() { - return getCurrentChunk().startTimeUs; - } - - @Override - public long getChunkEndTimeUs() { - return getCurrentChunk().endTimeUs; - } - - private MediaChunk getCurrentChunk() { - int index = (int) super.getCurrentIndex(); - if (reverseOrder) { - index = chunks.size() - 1 - index; - } - return chunks.get(index); - } -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/MediaParserChunkExtractor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/MediaParserChunkExtractor.java new file mode 100644 index 0000000000..9f367aa876 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/MediaParserChunkExtractor.java @@ -0,0 +1,200 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.chunk; + +import static com.google.android.exoplayer2.source.mediaparser.MediaParserUtil.PARAMETER_EAGERLY_EXPOSE_TRACK_TYPE; +import static com.google.android.exoplayer2.source.mediaparser.MediaParserUtil.PARAMETER_EXPOSE_CAPTION_FORMATS; +import static com.google.android.exoplayer2.source.mediaparser.MediaParserUtil.PARAMETER_EXPOSE_CHUNK_INDEX_AS_MEDIA_FORMAT; +import static com.google.android.exoplayer2.source.mediaparser.MediaParserUtil.PARAMETER_EXPOSE_DUMMY_SEEK_MAP; +import static com.google.android.exoplayer2.source.mediaparser.MediaParserUtil.PARAMETER_INCLUDE_SUPPLEMENTAL_DATA; +import static com.google.android.exoplayer2.source.mediaparser.MediaParserUtil.PARAMETER_IN_BAND_CRYPTO_INFO; +import static com.google.android.exoplayer2.source.mediaparser.MediaParserUtil.PARAMETER_OVERRIDE_IN_BAND_CAPTION_DECLARATIONS; + +import android.annotation.SuppressLint; +import android.media.MediaFormat; +import android.media.MediaParser; +import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.analytics.PlayerId; +import com.google.android.exoplayer2.extractor.ChunkIndex; +import com.google.android.exoplayer2.extractor.DummyTrackOutput; +import com.google.android.exoplayer2.extractor.ExtractorInput; +import com.google.android.exoplayer2.extractor.ExtractorOutput; +import com.google.android.exoplayer2.extractor.SeekMap; +import com.google.android.exoplayer2.extractor.TrackOutput; +import com.google.android.exoplayer2.source.mediaparser.InputReaderAdapterV30; +import com.google.android.exoplayer2.source.mediaparser.MediaParserUtil; +import com.google.android.exoplayer2.source.mediaparser.OutputConsumerAdapterV30; +import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.Log; +import com.google.android.exoplayer2.util.MimeTypes; +import com.google.android.exoplayer2.util.Util; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +/** {@link ChunkExtractor} implemented on top of the platform's {@link MediaParser}. */ +@RequiresApi(30) +public final class MediaParserChunkExtractor implements ChunkExtractor { + + // Maximum TAG length is 23 characters. + private static final String TAG = "MediaPrsrChunkExtractor"; + + public static final ChunkExtractor.Factory FACTORY = + (primaryTrackType, + format, + enableEventMessageTrack, + closedCaptionFormats, + playerEmsgTrackOutput, + playerId) -> { + if (!MimeTypes.isText(format.containerMimeType)) { + // Container is either Matroska or Fragmented MP4. + return new MediaParserChunkExtractor( + primaryTrackType, format, closedCaptionFormats, playerId); + } else { + // This is either RAWCC (unsupported) or a text track that does not require an extractor. + Log.w(TAG, "Ignoring an unsupported text track."); + return null; + } + }; + + private final OutputConsumerAdapterV30 outputConsumerAdapter; + private final InputReaderAdapterV30 inputReaderAdapter; + private final MediaParser mediaParser; + private final TrackOutputProviderAdapter trackOutputProviderAdapter; + private final DummyTrackOutput dummyTrackOutput; + private long pendingSeekUs; + @Nullable private TrackOutputProvider trackOutputProvider; + @Nullable private Format[] sampleFormats; + + /** + * Creates a new instance. + * + * @param primaryTrackType The {@link C.TrackType type} of the primary track. {@link + * C#TRACK_TYPE_NONE} if there is no primary track. + * @param manifestFormat The chunks {@link Format} as obtained from the manifest. + * @param closedCaptionFormats A list containing the {@link Format Formats} of the closed-caption + * tracks in the chunks. + * @param playerId The {@link PlayerId} of the player this chunk extractor is used for. + */ + @SuppressLint("WrongConstant") + public MediaParserChunkExtractor( + @C.TrackType int primaryTrackType, + Format manifestFormat, + List closedCaptionFormats, + PlayerId playerId) { + outputConsumerAdapter = + new OutputConsumerAdapterV30( + manifestFormat, primaryTrackType, /* expectDummySeekMap= */ true); + inputReaderAdapter = new InputReaderAdapterV30(); + String mimeType = Assertions.checkNotNull(manifestFormat.containerMimeType); + String parserName = + MimeTypes.isMatroska(mimeType) + ? MediaParser.PARSER_NAME_MATROSKA + : MediaParser.PARSER_NAME_FMP4; + outputConsumerAdapter.setSelectedParserName(parserName); + mediaParser = MediaParser.createByName(parserName, outputConsumerAdapter); + mediaParser.setParameter(MediaParser.PARAMETER_MATROSKA_DISABLE_CUES_SEEKING, true); + mediaParser.setParameter(PARAMETER_IN_BAND_CRYPTO_INFO, true); + mediaParser.setParameter(PARAMETER_INCLUDE_SUPPLEMENTAL_DATA, true); + mediaParser.setParameter(PARAMETER_EAGERLY_EXPOSE_TRACK_TYPE, true); + mediaParser.setParameter(PARAMETER_EXPOSE_DUMMY_SEEK_MAP, true); + mediaParser.setParameter(PARAMETER_EXPOSE_CHUNK_INDEX_AS_MEDIA_FORMAT, true); + mediaParser.setParameter(PARAMETER_OVERRIDE_IN_BAND_CAPTION_DECLARATIONS, true); + ArrayList closedCaptionMediaFormats = new ArrayList<>(); + for (int i = 0; i < closedCaptionFormats.size(); i++) { + closedCaptionMediaFormats.add( + MediaParserUtil.toCaptionsMediaFormat(closedCaptionFormats.get(i))); + } + mediaParser.setParameter(PARAMETER_EXPOSE_CAPTION_FORMATS, closedCaptionMediaFormats); + if (Util.SDK_INT >= 31) { + MediaParserUtil.setLogSessionIdOnMediaParser(mediaParser, playerId); + } + outputConsumerAdapter.setMuxedCaptionFormats(closedCaptionFormats); + trackOutputProviderAdapter = new TrackOutputProviderAdapter(); + dummyTrackOutput = new DummyTrackOutput(); + pendingSeekUs = C.TIME_UNSET; + } + + // ChunkExtractor implementation. + + @Override + public void init( + @Nullable TrackOutputProvider trackOutputProvider, long startTimeUs, long endTimeUs) { + this.trackOutputProvider = trackOutputProvider; + outputConsumerAdapter.setSampleTimestampUpperLimitFilterUs(endTimeUs); + outputConsumerAdapter.setExtractorOutput(trackOutputProviderAdapter); + pendingSeekUs = startTimeUs; + } + + @Override + public void release() { + mediaParser.release(); + } + + @Override + public boolean read(ExtractorInput input) throws IOException { + maybeExecutePendingSeek(); + inputReaderAdapter.setDataReader(input, input.getLength()); + return mediaParser.advance(inputReaderAdapter); + } + + @Nullable + @Override + public ChunkIndex getChunkIndex() { + return outputConsumerAdapter.getChunkIndex(); + } + + @Nullable + @Override + public Format[] getSampleFormats() { + return sampleFormats; + } + + // Internal methods. + + private void maybeExecutePendingSeek() { + @Nullable MediaParser.SeekMap dummySeekMap = outputConsumerAdapter.getDummySeekMap(); + if (pendingSeekUs != C.TIME_UNSET && dummySeekMap != null) { + mediaParser.seek(dummySeekMap.getSeekPoints(pendingSeekUs).first); + pendingSeekUs = C.TIME_UNSET; + } + } + + // Internal classes. + + private class TrackOutputProviderAdapter implements ExtractorOutput { + + @Override + public TrackOutput track(int id, int type) { + return trackOutputProvider != null ? trackOutputProvider.track(id, type) : dummyTrackOutput; + } + + @Override + public void endTracks() { + // Imitate BundledChunkExtractor behavior, which captures a sample format snapshot when + // endTracks is called. + sampleFormats = outputConsumerAdapter.getSampleFormats(); + } + + @Override + public void seekMap(SeekMap seekMap) { + // Do nothing. + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/SingleSampleMediaChunk.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/SingleSampleMediaChunk.java index 00d841eee0..4fdce6377b 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/SingleSampleMediaChunk.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/SingleSampleMediaChunk.java @@ -15,22 +15,21 @@ */ package com.google.android.exoplayer2.source.chunk; +import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.extractor.DefaultExtractorInput; import com.google.android.exoplayer2.extractor.ExtractorInput; import com.google.android.exoplayer2.extractor.TrackOutput; import com.google.android.exoplayer2.upstream.DataSource; +import com.google.android.exoplayer2.upstream.DataSourceUtil; import com.google.android.exoplayer2.upstream.DataSpec; -import com.google.android.exoplayer2.util.Util; import java.io.IOException; -/** - * A {@link BaseMediaChunk} for chunks consisting of a single raw sample. - */ +/** A {@link BaseMediaChunk} for chunks consisting of a single raw sample. */ public final class SingleSampleMediaChunk extends BaseMediaChunk { - private final int trackType; + private final @C.TrackType int trackType; private final Format sampleFormat; private long nextLoadPosition; @@ -45,20 +44,19 @@ public final class SingleSampleMediaChunk extends BaseMediaChunk { * @param startTimeUs The start time of the media contained by the chunk, in microseconds. * @param endTimeUs The end time of the media contained by the chunk, in microseconds. * @param chunkIndex The index of the chunk, or {@link C#INDEX_UNSET} if it is not known. - * @param trackType The type of the chunk. Typically one of the {@link C} {@code TRACK_TYPE_*} - * constants. + * @param trackType The {@link C.TrackType track type} of the chunk. * @param sampleFormat The {@link Format} of the sample in the chunk. */ public SingleSampleMediaChunk( DataSource dataSource, DataSpec dataSpec, Format trackFormat, - int trackSelectionReason, - Object trackSelectionData, + @C.SelectionReason int trackSelectionReason, + @Nullable Object trackSelectionData, long startTimeUs, long endTimeUs, long chunkIndex, - int trackType, + @C.TrackType int trackType, Format sampleFormat) { super( dataSource, @@ -75,7 +73,6 @@ public SingleSampleMediaChunk( this.sampleFormat = sampleFormat; } - @Override public boolean isLoadCompleted() { return loadCompleted; @@ -90,7 +87,7 @@ public void cancelLoad() { @SuppressWarnings("NonAtomicVolatileUpdate") @Override - public void load() throws IOException, InterruptedException { + public void load() throws IOException { BaseMediaChunkOutput output = getOutput(); output.setSampleOffsetUs(0); TrackOutput trackOutput = output.track(0, trackType); @@ -113,7 +110,7 @@ public void load() throws IOException, InterruptedException { int sampleSize = (int) nextLoadPosition; trackOutput.sampleMetadata(startTimeUs, C.BUFFER_FLAG_KEY_FRAME, sampleSize, 0, null); } finally { - Util.closeQuietly(dataSource); + DataSourceUtil.closeQuietly(dataSource); } loadCompleted = true; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/package-info.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/package-info.java new file mode 100644 index 0000000000..c57494dc1c --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/chunk/package-info.java @@ -0,0 +1,19 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +@NonNullApi +package com.google.android.exoplayer2.source.chunk; + +import com.google.android.exoplayer2.util.NonNullApi; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/BaseUrlExclusionList.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/BaseUrlExclusionList.java new file mode 100644 index 0000000000..1cf34b8f42 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/BaseUrlExclusionList.java @@ -0,0 +1,212 @@ +/* + * Copyright (C) 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.dash; + +import static com.google.android.exoplayer2.util.Util.castNonNull; +import static java.lang.Math.max; + +import android.os.SystemClock; +import android.util.Pair; +import androidx.annotation.Nullable; +import androidx.annotation.VisibleForTesting; +import com.google.android.exoplayer2.source.dash.manifest.BaseUrl; +import com.google.common.collect.Iterables; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.Set; + +/** + * Holds the state of {@link #exclude(BaseUrl, long) excluded} base URLs to be used to {@link + * #selectBaseUrl(List) select} a base URL based on these exclusions. + */ +public final class BaseUrlExclusionList { + + private final Map excludedServiceLocations; + private final Map excludedPriorities; + private final Map>, BaseUrl> selectionsTaken = new HashMap<>(); + private final Random random; + + /** Creates an instance. */ + public BaseUrlExclusionList() { + this(new Random()); + } + + /** Creates an instance with the given {@link Random}. */ + @VisibleForTesting + /* package */ BaseUrlExclusionList(Random random) { + this.random = random; + excludedServiceLocations = new HashMap<>(); + excludedPriorities = new HashMap<>(); + } + + /** + * Excludes the given base URL. + * + * @param baseUrlToExclude The base URL to exclude. + * @param exclusionDurationMs The duration of exclusion, in milliseconds. + */ + public void exclude(BaseUrl baseUrlToExclude, long exclusionDurationMs) { + long excludeUntilMs = SystemClock.elapsedRealtime() + exclusionDurationMs; + addExclusion(baseUrlToExclude.serviceLocation, excludeUntilMs, excludedServiceLocations); + if (baseUrlToExclude.priority != BaseUrl.PRIORITY_UNSET) { + addExclusion(baseUrlToExclude.priority, excludeUntilMs, excludedPriorities); + } + } + + /** + * Selects the base URL to use from the given list. + * + *

      The list is reduced by service location and priority of base URLs that have been passed to + * {@link #exclude(BaseUrl, long)}. The base URL to use is then selected from the remaining base + * URLs by priority and weight. + * + * @param baseUrls The list of {@link BaseUrl base URLs} to select from. + * @return The selected base URL after exclusion or null if all elements have been excluded. + */ + @Nullable + public BaseUrl selectBaseUrl(List baseUrls) { + List includedBaseUrls = applyExclusions(baseUrls); + if (includedBaseUrls.size() < 2) { + return Iterables.getFirst(includedBaseUrls, /* defaultValue= */ null); + } + // Sort by priority and service location to make the sort order of the candidates deterministic. + Collections.sort(includedBaseUrls, BaseUrlExclusionList::compareBaseUrl); + // Get candidates of the lowest priority from the head of the sorted list. + List> candidateKeys = new ArrayList<>(); + int lowestPriority = includedBaseUrls.get(0).priority; + for (int i = 0; i < includedBaseUrls.size(); i++) { + BaseUrl baseUrl = includedBaseUrls.get(i); + if (lowestPriority != baseUrl.priority) { + if (candidateKeys.size() == 1) { + // Only a single candidate of lowest priority; no choice. + return includedBaseUrls.get(0); + } + break; + } + candidateKeys.add(new Pair<>(baseUrl.serviceLocation, baseUrl.weight)); + } + // Check whether selection has already been taken. + @Nullable BaseUrl baseUrl = selectionsTaken.get(candidateKeys); + if (baseUrl == null) { + // Weighted random selection from multiple candidates of the same priority. + baseUrl = selectWeighted(includedBaseUrls.subList(0, candidateKeys.size())); + // Remember the selection taken for later. + selectionsTaken.put(candidateKeys, baseUrl); + } + return baseUrl; + } + + /** + * Returns the number of priority levels for the given list of base URLs after exclusion. + * + * @param baseUrls The list of base URLs. + * @return The number of priority levels after exclusion. + */ + public int getPriorityCountAfterExclusion(List baseUrls) { + Set priorities = new HashSet<>(); + List includedBaseUrls = applyExclusions(baseUrls); + for (int i = 0; i < includedBaseUrls.size(); i++) { + priorities.add(includedBaseUrls.get(i).priority); + } + return priorities.size(); + } + + /** + * Returns the number of priority levels of the given list of base URLs. + * + * @param baseUrls The list of base URLs. + * @return The number of priority levels before exclusion. + */ + public static int getPriorityCount(List baseUrls) { + Set priorities = new HashSet<>(); + for (int i = 0; i < baseUrls.size(); i++) { + priorities.add(baseUrls.get(i).priority); + } + return priorities.size(); + } + + /** Resets the state. */ + public void reset() { + excludedServiceLocations.clear(); + excludedPriorities.clear(); + selectionsTaken.clear(); + } + + // Internal methods. + + private List applyExclusions(List baseUrls) { + long nowMs = SystemClock.elapsedRealtime(); + removeExpiredExclusions(nowMs, excludedServiceLocations); + removeExpiredExclusions(nowMs, excludedPriorities); + List includedBaseUrls = new ArrayList<>(); + for (int i = 0; i < baseUrls.size(); i++) { + BaseUrl baseUrl = baseUrls.get(i); + if (!excludedServiceLocations.containsKey(baseUrl.serviceLocation) + && !excludedPriorities.containsKey(baseUrl.priority)) { + includedBaseUrls.add(baseUrl); + } + } + return includedBaseUrls; + } + + private BaseUrl selectWeighted(List candidates) { + int totalWeight = 0; + for (int i = 0; i < candidates.size(); i++) { + totalWeight += candidates.get(i).weight; + } + int randomChoice = random.nextInt(/* bound= */ totalWeight); + totalWeight = 0; + for (int i = 0; i < candidates.size(); i++) { + BaseUrl baseUrl = candidates.get(i); + totalWeight += baseUrl.weight; + if (randomChoice < totalWeight) { + return baseUrl; + } + } + return Iterables.getLast(candidates); + } + + private static void addExclusion( + T toExclude, long excludeUntilMs, Map currentExclusions) { + if (currentExclusions.containsKey(toExclude)) { + excludeUntilMs = max(excludeUntilMs, castNonNull(currentExclusions.get(toExclude))); + } + currentExclusions.put(toExclude, excludeUntilMs); + } + + private static void removeExpiredExclusions(long nowMs, Map exclusions) { + List expiredExclusions = new ArrayList<>(); + for (Map.Entry entries : exclusions.entrySet()) { + if (entries.getValue() <= nowMs) { + expiredExclusions.add(entries.getKey()); + } + } + for (int i = 0; i < expiredExclusions.size(); i++) { + exclusions.remove(expiredExclusions.get(i)); + } + } + + /** Compare by priority and service location. */ + private static int compareBaseUrl(BaseUrl a, BaseUrl b) { + int compare = Integer.compare(a.priority, b.priority); + return compare != 0 ? compare : a.serviceLocation.compareTo(b.serviceLocation); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/DashChunkSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/DashChunkSource.java index f7edf62182..9897575081 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/DashChunkSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/DashChunkSource.java @@ -17,18 +17,18 @@ import android.os.SystemClock; import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.analytics.PlayerId; import com.google.android.exoplayer2.source.chunk.ChunkSource; import com.google.android.exoplayer2.source.dash.PlayerEmsgHandler.PlayerTrackEmsgHandler; import com.google.android.exoplayer2.source.dash.manifest.DashManifest; -import com.google.android.exoplayer2.trackselection.TrackSelection; +import com.google.android.exoplayer2.trackselection.ExoTrackSelection; import com.google.android.exoplayer2.upstream.LoaderErrorThrower; import com.google.android.exoplayer2.upstream.TransferListener; import java.util.List; -/** - * An {@link ChunkSource} for DASH streams. - */ +/** A {@link ChunkSource} for DASH streams. */ public interface DashChunkSource extends ChunkSource { /** Factory for {@link DashChunkSource}s. */ @@ -37,30 +37,36 @@ interface Factory { /** * @param manifestLoaderErrorThrower Throws errors affecting loading of manifests. * @param manifest The initial manifest. + * @param baseUrlExclusionList The base URL exclusion list. * @param periodIndex The index of the corresponding period in the manifest. * @param adaptationSetIndices The indices of the corresponding adaptation sets in the period. * @param trackSelection The track selection. + * @param trackType The {@link C.TrackType track type}. * @param elapsedRealtimeOffsetMs If known, an estimate of the instantaneous difference between * server-side unix time and {@link SystemClock#elapsedRealtime()} in milliseconds, - * specified as the server's unix time minus the local elapsed time. If unknown, set to 0. + * specified as the server's unix time minus the local elapsed time. Or {@link C#TIME_UNSET} + * if unknown. * @param enableEventMessageTrack Whether to output an event message track. * @param closedCaptionFormats The {@link Format Formats} of closed caption tracks to be output. * @param transferListener The transfer listener which should be informed of any data transfers. * May be null if no listener is available. + * @param playerId The {@link PlayerId} of the player using this chunk source. * @return The created {@link DashChunkSource}. */ DashChunkSource createDashChunkSource( LoaderErrorThrower manifestLoaderErrorThrower, DashManifest manifest, + BaseUrlExclusionList baseUrlExclusionList, int periodIndex, int[] adaptationSetIndices, - TrackSelection trackSelection, - int type, + ExoTrackSelection trackSelection, + @C.TrackType int trackType, long elapsedRealtimeOffsetMs, boolean enableEventMessageTrack, List closedCaptionFormats, @Nullable PlayerTrackEmsgHandler playerEmsgHandler, - @Nullable TransferListener transferListener); + @Nullable TransferListener transferListener, + PlayerId playerId); } /** @@ -75,5 +81,5 @@ DashChunkSource createDashChunkSource( * * @param trackSelection The new track selection instance. Must be equivalent to the previous one. */ - void updateTrackSelection(TrackSelection trackSelection); + void updateTrackSelection(ExoTrackSelection trackSelection); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/DashMediaPeriod.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/DashMediaPeriod.java index fa8e5338fc..c53684a05b 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/DashMediaPeriod.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/DashMediaPeriod.java @@ -15,6 +15,9 @@ */ package com.google.android.exoplayer2.source.dash; +import static java.lang.Math.min; +import static java.lang.annotation.ElementType.TYPE_USE; + import android.util.Pair; import android.util.SparseArray; import android.util.SparseIntArray; @@ -23,12 +26,14 @@ import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.SeekParameters; -import com.google.android.exoplayer2.drm.DrmInitData; +import com.google.android.exoplayer2.analytics.PlayerId; +import com.google.android.exoplayer2.drm.DrmSessionEventListener; import com.google.android.exoplayer2.drm.DrmSessionManager; import com.google.android.exoplayer2.offline.StreamKey; import com.google.android.exoplayer2.source.CompositeSequenceableLoaderFactory; import com.google.android.exoplayer2.source.EmptySampleStream; import com.google.android.exoplayer2.source.MediaPeriod; +import com.google.android.exoplayer2.source.MediaSourceEventListener; import com.google.android.exoplayer2.source.MediaSourceEventListener.EventDispatcher; import com.google.android.exoplayer2.source.SampleStream; import com.google.android.exoplayer2.source.SequenceableLoader; @@ -44,17 +49,19 @@ import com.google.android.exoplayer2.source.dash.manifest.EventStream; import com.google.android.exoplayer2.source.dash.manifest.Period; import com.google.android.exoplayer2.source.dash.manifest.Representation; -import com.google.android.exoplayer2.trackselection.TrackSelection; +import com.google.android.exoplayer2.trackselection.ExoTrackSelection; import com.google.android.exoplayer2.upstream.Allocator; import com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy; import com.google.android.exoplayer2.upstream.LoaderErrorThrower; import com.google.android.exoplayer2.upstream.TransferListener; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.Util; +import com.google.common.primitives.Ints; import java.io.IOException; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; import java.util.ArrayList; import java.util.Arrays; import java.util.IdentityHashMap; @@ -69,13 +76,18 @@ SequenceableLoader.Callback>, ChunkSampleStream.ReleaseCallback { + // Defined by ANSI/SCTE 214-1 2016 7.2.3. private static final Pattern CEA608_SERVICE_DESCRIPTOR_REGEX = Pattern.compile("CC([1-4])=(.+)"); + // Defined by ANSI/SCTE 214-1 2016 7.2.2. + private static final Pattern CEA708_SERVICE_DESCRIPTOR_REGEX = + Pattern.compile("([1-4])=lang:(\\w+)(,.+)?"); /* package */ final int id; private final DashChunkSource.Factory chunkSourceFactory; @Nullable private final TransferListener transferListener; - private final DrmSessionManager drmSessionManager; + private final DrmSessionManager drmSessionManager; private final LoadErrorHandlingPolicy loadErrorHandlingPolicy; + private final BaseUrlExclusionList baseUrlExclusionList; private final long elapsedRealtimeOffsetMs; private final LoaderErrorThrower manifestLoaderErrorThrower; private final Allocator allocator; @@ -85,7 +97,9 @@ private final PlayerEmsgHandler playerEmsgHandler; private final IdentityHashMap, PlayerTrackEmsgHandler> trackEmsgHandlerBySampleStream; - private final EventDispatcher eventDispatcher; + private final MediaSourceEventListener.EventDispatcher mediaSourceEventDispatcher; + private final DrmSessionEventListener.EventDispatcher drmEventDispatcher; + private final PlayerId playerId; @Nullable private Callback callback; private ChunkSampleStream[] sampleStreams; @@ -94,34 +108,39 @@ private DashManifest manifest; private int periodIndex; private List eventStreams; - private boolean notifiedReadingStarted; public DashMediaPeriod( int id, DashManifest manifest, + BaseUrlExclusionList baseUrlExclusionList, int periodIndex, DashChunkSource.Factory chunkSourceFactory, @Nullable TransferListener transferListener, - DrmSessionManager drmSessionManager, + DrmSessionManager drmSessionManager, + DrmSessionEventListener.EventDispatcher drmEventDispatcher, LoadErrorHandlingPolicy loadErrorHandlingPolicy, - EventDispatcher eventDispatcher, + EventDispatcher mediaSourceEventDispatcher, long elapsedRealtimeOffsetMs, LoaderErrorThrower manifestLoaderErrorThrower, Allocator allocator, CompositeSequenceableLoaderFactory compositeSequenceableLoaderFactory, - PlayerEmsgCallback playerEmsgCallback) { + PlayerEmsgCallback playerEmsgCallback, + PlayerId playerId) { this.id = id; this.manifest = manifest; + this.baseUrlExclusionList = baseUrlExclusionList; this.periodIndex = periodIndex; this.chunkSourceFactory = chunkSourceFactory; this.transferListener = transferListener; this.drmSessionManager = drmSessionManager; + this.drmEventDispatcher = drmEventDispatcher; this.loadErrorHandlingPolicy = loadErrorHandlingPolicy; - this.eventDispatcher = eventDispatcher; + this.mediaSourceEventDispatcher = mediaSourceEventDispatcher; this.elapsedRealtimeOffsetMs = elapsedRealtimeOffsetMs; this.manifestLoaderErrorThrower = manifestLoaderErrorThrower; this.allocator = allocator; this.compositeSequenceableLoaderFactory = compositeSequenceableLoaderFactory; + this.playerId = playerId; playerEmsgHandler = new PlayerEmsgHandler(manifest, playerEmsgCallback, allocator); sampleStreams = newSampleStreamArray(0); eventSampleStreams = new EventSampleStream[0]; @@ -134,7 +153,6 @@ public DashMediaPeriod( buildTrackGroups(drmSessionManager, period.adaptationSets, eventStreams); trackGroups = result.first; trackGroupInfos = result.second; - eventDispatcher.mediaPeriodCreated(); } /** @@ -173,7 +191,6 @@ public void release() { sampleStream.release(this); } callback = null; - eventDispatcher.mediaPeriodReleased(); } // ChunkSampleStream.ReleaseCallback implementation. @@ -205,10 +222,10 @@ public TrackGroupArray getTrackGroups() { } @Override - public List getStreamKeys(List trackSelections) { + public List getStreamKeys(List trackSelections) { List manifestAdaptationSets = manifest.getPeriod(periodIndex).adaptationSets; List streamKeys = new ArrayList<>(); - for (TrackSelection trackSelection : trackSelections) { + for (ExoTrackSelection trackSelection : trackSelections) { int trackGroupIndex = trackGroups.indexOf(trackSelection.getTrackGroup()); TrackGroupInfo trackGroupInfo = trackGroupInfos[trackGroupIndex]; if (trackGroupInfo.trackGroupCategory != TrackGroupInfo.CATEGORY_PRIMARY) { @@ -248,7 +265,7 @@ public List getStreamKeys(List trackSelections) { @Override public long selectTracks( - @NullableType TrackSelection[] selections, + @NullableType ExoTrackSelection[] selections, boolean[] mayRetainStreamFlags, @NullableType SampleStream[] streams, boolean[] streamResetFlags, @@ -310,10 +327,6 @@ public long getNextLoadPositionUs() { @Override public long readDiscontinuity() { - if (!notifiedReadingStarted) { - eventDispatcher.readingStarted(); - notifiedReadingStarted = true; - } return C.TIME_UNSET; } @@ -352,7 +365,7 @@ public void onContinueLoadingRequested(ChunkSampleStream sample // Internal methods. - private int[] getStreamIndexToTrackGroupIndex(TrackSelection[] selections) { + private int[] getStreamIndexToTrackGroupIndex(ExoTrackSelection[] selections) { int[] streamIndexToTrackGroupIndex = new int[selections.length]; for (int i = 0; i < selections.length; i++) { if (selections[i] != null) { @@ -365,7 +378,7 @@ private int[] getStreamIndexToTrackGroupIndex(TrackSelection[] selections) { } private void releaseDisabledStreams( - TrackSelection[] selections, boolean[] mayRetainStreamFlags, SampleStream[] streams) { + ExoTrackSelection[] selections, boolean[] mayRetainStreamFlags, SampleStream[] streams) { for (int i = 0; i < selections.length; i++) { if (selections[i] == null || !mayRetainStreamFlags[i]) { if (streams[i] instanceof ChunkSampleStream) { @@ -382,7 +395,7 @@ private void releaseDisabledStreams( } private void releaseOrphanEmbeddedStreams( - TrackSelection[] selections, SampleStream[] streams, int[] streamIndexToTrackGroupIndex) { + ExoTrackSelection[] selections, SampleStream[] streams, int[] streamIndexToTrackGroupIndex) { for (int i = 0; i < selections.length; i++) { if (streams[i] instanceof EmptySampleStream || streams[i] instanceof EmbeddedSampleStream) { // We need to release an embedded stream if the corresponding primary stream is released. @@ -410,14 +423,14 @@ private void releaseOrphanEmbeddedStreams( } private void selectNewStreams( - TrackSelection[] selections, + ExoTrackSelection[] selections, SampleStream[] streams, boolean[] streamResetFlags, long positionUs, int[] streamIndexToTrackGroupIndex) { // Create newly selected primary and event streams. for (int i = 0; i < selections.length; i++) { - TrackSelection selection = selections[i]; + ExoTrackSelection selection = selections[i]; if (selection == null) { continue; } @@ -481,21 +494,21 @@ private int getPrimaryStreamIndex(int embeddedStreamIndex, int[] streamIndexToTr } private static Pair buildTrackGroups( - DrmSessionManager drmSessionManager, + DrmSessionManager drmSessionManager, List adaptationSets, List eventStreams) { int[][] groupedAdaptationSetIndices = getGroupedAdaptationSetIndices(adaptationSets); int primaryGroupCount = groupedAdaptationSetIndices.length; boolean[] primaryGroupHasEventMessageTrackFlags = new boolean[primaryGroupCount]; - Format[][] primaryGroupCea608TrackFormats = new Format[primaryGroupCount][]; + Format[][] primaryGroupClosedCaptionTrackFormats = new Format[primaryGroupCount][]; int totalEmbeddedTrackGroupCount = identifyEmbeddedTracks( primaryGroupCount, adaptationSets, groupedAdaptationSetIndices, primaryGroupHasEventMessageTrackFlags, - primaryGroupCea608TrackFormats); + primaryGroupClosedCaptionTrackFormats); int totalGroupCount = primaryGroupCount + totalEmbeddedTrackGroupCount + eventStreams.size(); TrackGroup[] trackGroups = new TrackGroup[totalGroupCount]; @@ -508,7 +521,7 @@ private static Pair buildTrackGroups( groupedAdaptationSetIndices, primaryGroupCount, primaryGroupHasEventMessageTrackFlags, - primaryGroupCea608TrackFormats, + primaryGroupClosedCaptionTrackFormats, trackGroups, trackGroupInfos); @@ -582,7 +595,7 @@ private static int[][] getGroupedAdaptationSetIndices(List adapta adaptationSetIdToIndex.get( Integer.parseInt(adaptationSetId), /* valueIfKeyNotFound= */ -1); if (otherAdaptationSetId != -1) { - mergedGroupIndex = Math.min(mergedGroupIndex, otherAdaptationSetId); + mergedGroupIndex = min(mergedGroupIndex, otherAdaptationSetId); } } } @@ -600,7 +613,7 @@ private static int[][] getGroupedAdaptationSetIndices(List adapta int[][] groupedAdaptationSetIndices = new int[adaptationSetGroupedIndices.size()][]; for (int i = 0; i < groupedAdaptationSetIndices.length; i++) { - groupedAdaptationSetIndices[i] = Util.toArray(adaptationSetGroupedIndices.get(i)); + groupedAdaptationSetIndices[i] = Ints.toArray(adaptationSetGroupedIndices.get(i)); // Restore the original adaptation set order within each group. Arrays.sort(groupedAdaptationSetIndices[i]); } @@ -616,8 +629,8 @@ private static int[][] getGroupedAdaptationSetIndices(List adapta * same primary group, grouped in primary track groups order. * @param primaryGroupHasEventMessageTrackFlags An output array to be filled with flags indicating * whether each of the primary track groups contains an embedded event message track. - * @param primaryGroupCea608TrackFormats An output array to be filled with track formats for - * CEA-608 tracks embedded in each of the primary track groups. + * @param primaryGroupClosedCaptionTrackFormats An output array to be filled with track formats + * for closed caption tracks embedded in each of the primary track groups. * @return Total number of embedded track groups. */ private static int identifyEmbeddedTracks( @@ -625,16 +638,16 @@ private static int identifyEmbeddedTracks( List adaptationSets, int[][] groupedAdaptationSetIndices, boolean[] primaryGroupHasEventMessageTrackFlags, - Format[][] primaryGroupCea608TrackFormats) { + Format[][] primaryGroupClosedCaptionTrackFormats) { int numEmbeddedTrackGroups = 0; for (int i = 0; i < primaryGroupCount; i++) { if (hasEventMessageTrack(adaptationSets, groupedAdaptationSetIndices[i])) { primaryGroupHasEventMessageTrackFlags[i] = true; numEmbeddedTrackGroups++; } - primaryGroupCea608TrackFormats[i] = - getCea608TrackFormats(adaptationSets, groupedAdaptationSetIndices[i]); - if (primaryGroupCea608TrackFormats[i].length != 0) { + primaryGroupClosedCaptionTrackFormats[i] = + getClosedCaptionTrackFormats(adaptationSets, groupedAdaptationSetIndices[i]); + if (primaryGroupClosedCaptionTrackFormats[i].length != 0) { numEmbeddedTrackGroups++; } } @@ -642,12 +655,12 @@ private static int identifyEmbeddedTracks( } private static int buildPrimaryAndEmbeddedTrackGroupInfos( - DrmSessionManager drmSessionManager, + DrmSessionManager drmSessionManager, List adaptationSets, int[][] groupedAdaptationSetIndices, int primaryGroupCount, boolean[] primaryGroupHasEventMessageTrackFlags, - Format[][] primaryGroupCea608TrackFormats, + Format[][] primaryGroupClosedCaptionTrackFormats, TrackGroup[] trackGroups, TrackGroupInfo[] trackGroupInfos) { int trackGroupCount = 0; @@ -660,59 +673,70 @@ private static int buildPrimaryAndEmbeddedTrackGroupInfos( Format[] formats = new Format[representations.size()]; for (int j = 0; j < formats.length; j++) { Format format = representations.get(j).format; - DrmInitData drmInitData = format.drmInitData; - if (drmInitData != null) { - format = - format.copyWithExoMediaCryptoType( - drmSessionManager.getExoMediaCryptoType(drmInitData)); - } - formats[j] = format; + formats[j] = format.copyWithCryptoType(drmSessionManager.getCryptoType(format)); } AdaptationSet firstAdaptationSet = adaptationSets.get(adaptationSetIndices[0]); + String trackGroupId = + firstAdaptationSet.id != AdaptationSet.ID_UNSET + ? Integer.toString(firstAdaptationSet.id) + : ("unset:" + i); int primaryTrackGroupIndex = trackGroupCount++; int eventMessageTrackGroupIndex = primaryGroupHasEventMessageTrackFlags[i] ? trackGroupCount++ : C.INDEX_UNSET; - int cea608TrackGroupIndex = - primaryGroupCea608TrackFormats[i].length != 0 ? trackGroupCount++ : C.INDEX_UNSET; + int closedCaptionTrackGroupIndex = + primaryGroupClosedCaptionTrackFormats[i].length != 0 ? trackGroupCount++ : C.INDEX_UNSET; - trackGroups[primaryTrackGroupIndex] = new TrackGroup(formats); + trackGroups[primaryTrackGroupIndex] = new TrackGroup(trackGroupId, formats); trackGroupInfos[primaryTrackGroupIndex] = TrackGroupInfo.primaryTrack( firstAdaptationSet.type, adaptationSetIndices, primaryTrackGroupIndex, eventMessageTrackGroupIndex, - cea608TrackGroupIndex); + closedCaptionTrackGroupIndex); if (eventMessageTrackGroupIndex != C.INDEX_UNSET) { - Format format = Format.createSampleFormat(firstAdaptationSet.id + ":emsg", - MimeTypes.APPLICATION_EMSG, null, Format.NO_VALUE, null); - trackGroups[eventMessageTrackGroupIndex] = new TrackGroup(format); + String eventMessageTrackGroupId = trackGroupId + ":emsg"; + Format format = + new Format.Builder() + .setId(eventMessageTrackGroupId) + .setSampleMimeType(MimeTypes.APPLICATION_EMSG) + .build(); + trackGroups[eventMessageTrackGroupIndex] = new TrackGroup(eventMessageTrackGroupId, format); trackGroupInfos[eventMessageTrackGroupIndex] = TrackGroupInfo.embeddedEmsgTrack(adaptationSetIndices, primaryTrackGroupIndex); } - if (cea608TrackGroupIndex != C.INDEX_UNSET) { - trackGroups[cea608TrackGroupIndex] = new TrackGroup(primaryGroupCea608TrackFormats[i]); - trackGroupInfos[cea608TrackGroupIndex] = - TrackGroupInfo.embeddedCea608Track(adaptationSetIndices, primaryTrackGroupIndex); + if (closedCaptionTrackGroupIndex != C.INDEX_UNSET) { + String closedCaptionTrackGroupId = trackGroupId + ":cc"; + trackGroups[closedCaptionTrackGroupIndex] = + new TrackGroup(closedCaptionTrackGroupId, primaryGroupClosedCaptionTrackFormats[i]); + trackGroupInfos[closedCaptionTrackGroupIndex] = + TrackGroupInfo.embeddedClosedCaptionTrack(adaptationSetIndices, primaryTrackGroupIndex); } } return trackGroupCount; } - private static void buildManifestEventTrackGroupInfos(List eventStreams, - TrackGroup[] trackGroups, TrackGroupInfo[] trackGroupInfos, int existingTrackGroupCount) { + private static void buildManifestEventTrackGroupInfos( + List eventStreams, + TrackGroup[] trackGroups, + TrackGroupInfo[] trackGroupInfos, + int existingTrackGroupCount) { for (int i = 0; i < eventStreams.size(); i++) { EventStream eventStream = eventStreams.get(i); - Format format = Format.createSampleFormat(eventStream.id(), MimeTypes.APPLICATION_EMSG, null, - Format.NO_VALUE, null); - trackGroups[existingTrackGroupCount] = new TrackGroup(format); + Format format = + new Format.Builder() + .setId(eventStream.id()) + .setSampleMimeType(MimeTypes.APPLICATION_EMSG) + .build(); + String uniqueTrackGroupId = eventStream.id() + ":" + i; + trackGroups[existingTrackGroupCount] = new TrackGroup(uniqueTrackGroupId, format); trackGroupInfos[existingTrackGroupCount++] = TrackGroupInfo.mpdEventTrack(i); } } - private ChunkSampleStream buildSampleStream(TrackGroupInfo trackGroupInfo, - TrackSelection selection, long positionUs) { + private ChunkSampleStream buildSampleStream( + TrackGroupInfo trackGroupInfo, ExoTrackSelection selection, long positionUs) { int embeddedTrackCount = 0; boolean enableEventMessageTrack = trackGroupInfo.embeddedEventMessageTrackGroupIndex != C.INDEX_UNSET; @@ -722,11 +746,13 @@ private ChunkSampleStream buildSampleStream(TrackGroupInfo trac trackGroups.get(trackGroupInfo.embeddedEventMessageTrackGroupIndex); embeddedTrackCount++; } - boolean enableCea608Tracks = trackGroupInfo.embeddedCea608TrackGroupIndex != C.INDEX_UNSET; - TrackGroup embeddedCea608TrackGroup = null; - if (enableCea608Tracks) { - embeddedCea608TrackGroup = trackGroups.get(trackGroupInfo.embeddedCea608TrackGroupIndex); - embeddedTrackCount += embeddedCea608TrackGroup.length; + boolean enableClosedCaptionTrack = + trackGroupInfo.embeddedClosedCaptionTrackGroupIndex != C.INDEX_UNSET; + TrackGroup embeddedClosedCaptionTrackGroup = null; + if (enableClosedCaptionTrack) { + embeddedClosedCaptionTrackGroup = + trackGroups.get(trackGroupInfo.embeddedClosedCaptionTrackGroupIndex); + embeddedTrackCount += embeddedClosedCaptionTrackGroup.length; } Format[] embeddedTrackFormats = new Format[embeddedTrackCount]; @@ -737,12 +763,12 @@ private ChunkSampleStream buildSampleStream(TrackGroupInfo trac embeddedTrackTypes[embeddedTrackCount] = C.TRACK_TYPE_METADATA; embeddedTrackCount++; } - List embeddedCea608TrackFormats = new ArrayList<>(); - if (enableCea608Tracks) { - for (int i = 0; i < embeddedCea608TrackGroup.length; i++) { - embeddedTrackFormats[embeddedTrackCount] = embeddedCea608TrackGroup.getFormat(i); + List embeddedClosedCaptionTrackFormats = new ArrayList<>(); + if (enableClosedCaptionTrack) { + for (int i = 0; i < embeddedClosedCaptionTrackGroup.length; i++) { + embeddedTrackFormats[embeddedTrackCount] = embeddedClosedCaptionTrackGroup.getFormat(i); embeddedTrackTypes[embeddedTrackCount] = C.TRACK_TYPE_TEXT; - embeddedCea608TrackFormats.add(embeddedTrackFormats[embeddedTrackCount]); + embeddedClosedCaptionTrackFormats.add(embeddedTrackFormats[embeddedTrackCount]); embeddedTrackCount++; } } @@ -755,15 +781,17 @@ private ChunkSampleStream buildSampleStream(TrackGroupInfo trac chunkSourceFactory.createDashChunkSource( manifestLoaderErrorThrower, manifest, + baseUrlExclusionList, periodIndex, trackGroupInfo.adaptationSetIndices, selection, trackGroupInfo.trackType, elapsedRealtimeOffsetMs, enableEventMessageTrack, - embeddedCea608TrackFormats, + embeddedClosedCaptionTrackFormats, trackPlayerEmsgHandler, - transferListener); + transferListener, + playerId); ChunkSampleStream stream = new ChunkSampleStream<>( trackGroupInfo.trackType, @@ -774,8 +802,9 @@ private ChunkSampleStream buildSampleStream(TrackGroupInfo trac allocator, positionUs, drmSessionManager, + drmEventDispatcher, loadErrorHandlingPolicy, - eventDispatcher); + mediaSourceEventDispatcher); synchronized (this) { // The map is also accessed on the loading thread so synchronize access. trackEmsgHandlerBySampleStream.put(stream, trackPlayerEmsgHandler); @@ -783,6 +812,7 @@ private ChunkSampleStream buildSampleStream(TrackGroupInfo trac return stream; } + @Nullable private static Descriptor findAdaptationSetSwitchingProperty(List descriptors) { return findDescriptor(descriptors, "urn:mpeg:dash:adaptation-set-switching:2016"); } @@ -803,8 +833,8 @@ private static Descriptor findDescriptor(List descriptors, String sc return null; } - private static boolean hasEventMessageTrack(List adaptationSets, - int[] adaptationSetIndices) { + private static boolean hasEventMessageTrack( + List adaptationSets, int[] adaptationSetIndices) { for (int i : adaptationSetIndices) { List representations = adaptationSets.get(i).representations; for (int j = 0; j < representations.size(); j++) { @@ -817,7 +847,7 @@ private static boolean hasEventMessageTrack(List adaptationSets, return false; } - private static Format[] getCea608TrackFormats( + private static Format[] getClosedCaptionTrackFormats( List adaptationSets, int[] adaptationSetIndices) { for (int i : adaptationSetIndices) { AdaptationSet adaptationSet = adaptationSets.get(i); @@ -825,52 +855,52 @@ private static Format[] getCea608TrackFormats( for (int j = 0; j < descriptors.size(); j++) { Descriptor descriptor = descriptors.get(j); if ("urn:scte:dash:cc:cea-608:2015".equals(descriptor.schemeIdUri)) { - String value = descriptor.value; - if (value == null) { - // There are embedded CEA-608 tracks, but service information is not declared. - return new Format[] {buildCea608TrackFormat(adaptationSet.id)}; - } - String[] services = Util.split(value, ";"); - Format[] formats = new Format[services.length]; - for (int k = 0; k < services.length; k++) { - Matcher matcher = CEA608_SERVICE_DESCRIPTOR_REGEX.matcher(services[k]); - if (!matcher.matches()) { - // If we can't parse service information for all services, assume a single track. - return new Format[] {buildCea608TrackFormat(adaptationSet.id)}; - } - formats[k] = - buildCea608TrackFormat( - adaptationSet.id, - /* language= */ matcher.group(2), - /* accessibilityChannel= */ Integer.parseInt(matcher.group(1))); - } - return formats; + Format cea608Format = + new Format.Builder() + .setSampleMimeType(MimeTypes.APPLICATION_CEA608) + .setId(adaptationSet.id + ":cea608") + .build(); + return parseClosedCaptionDescriptor( + descriptor, CEA608_SERVICE_DESCRIPTOR_REGEX, cea608Format); + } else if ("urn:scte:dash:cc:cea-708:2015".equals(descriptor.schemeIdUri)) { + Format cea708Format = + new Format.Builder() + .setSampleMimeType(MimeTypes.APPLICATION_CEA708) + .setId(adaptationSet.id + ":cea708") + .build(); + return parseClosedCaptionDescriptor( + descriptor, CEA708_SERVICE_DESCRIPTOR_REGEX, cea708Format); } } } return new Format[0]; } - private static Format buildCea608TrackFormat(int adaptationSetId) { - return buildCea608TrackFormat( - adaptationSetId, /* language= */ null, /* accessibilityChannel= */ Format.NO_VALUE); - } - - private static Format buildCea608TrackFormat( - int adaptationSetId, String language, int accessibilityChannel) { - return Format.createTextSampleFormat( - adaptationSetId - + ":cea608" - + (accessibilityChannel != Format.NO_VALUE ? ":" + accessibilityChannel : ""), - MimeTypes.APPLICATION_CEA608, - /* codecs= */ null, - /* bitrate= */ Format.NO_VALUE, - /* selectionFlags= */ 0, - language, - accessibilityChannel, - /* drmInitData= */ null, - Format.OFFSET_SAMPLE_RELATIVE, - /* initializationData= */ null); + private static Format[] parseClosedCaptionDescriptor( + Descriptor descriptor, Pattern serviceDescriptorRegex, Format baseFormat) { + @Nullable String value = descriptor.value; + if (value == null) { + // There are embedded closed caption tracks, but service information is not declared. + return new Format[] {baseFormat}; + } + String[] services = Util.split(value, ";"); + Format[] formats = new Format[services.length]; + for (int i = 0; i < services.length; i++) { + Matcher matcher = serviceDescriptorRegex.matcher(services[i]); + if (!matcher.matches()) { + // If we can't parse service information for all services, assume a single track. + return new Format[] {baseFormat}; + } + int accessibilityChannel = Integer.parseInt(matcher.group(1)); + formats[i] = + baseFormat + .buildUpon() + .setId(baseFormat.id + ":" + accessibilityChannel) + .setAccessibilityChannel(accessibilityChannel) + .setLanguage(matcher.group(2)) + .build(); + } + return formats; } // We won't assign the array to a variable that erases the generic type, and then write into it. @@ -883,12 +913,13 @@ private static final class TrackGroupInfo { @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({CATEGORY_PRIMARY, CATEGORY_EMBEDDED, CATEGORY_MANIFEST_EVENTS}) public @interface TrackGroupCategory {} /** - * A normal track group that has its samples drawn from the stream. - * For example: a video Track Group or an audio Track Group. + * A normal track group that has its samples drawn from the stream. For example: a video Track + * Group or an audio Track Group. */ private static final int CATEGORY_PRIMARY = 0; @@ -899,39 +930,38 @@ private static final class TrackGroupInfo { private static final int CATEGORY_EMBEDDED = 1; /** - * A track group that has its samples listed explicitly in the DASH manifest file. - * For example: an EventStream track has its sample (Events) included directly in the DASH - * manifest file. + * A track group that has its samples listed explicitly in the DASH manifest file. For example: + * an EventStream track has its sample (Events) included directly in the DASH manifest file. */ private static final int CATEGORY_MANIFEST_EVENTS = 2; public final int[] adaptationSetIndices; - public final int trackType; - @TrackGroupCategory public final int trackGroupCategory; + public final @C.TrackType int trackType; + public final @TrackGroupCategory int trackGroupCategory; public final int eventStreamGroupIndex; public final int primaryTrackGroupIndex; public final int embeddedEventMessageTrackGroupIndex; - public final int embeddedCea608TrackGroupIndex; + public final int embeddedClosedCaptionTrackGroupIndex; public static TrackGroupInfo primaryTrack( int trackType, int[] adaptationSetIndices, int primaryTrackGroupIndex, int embeddedEventMessageTrackGroupIndex, - int embeddedCea608TrackGroupIndex) { + int embeddedClosedCaptionTrackGroupIndex) { return new TrackGroupInfo( trackType, CATEGORY_PRIMARY, adaptationSetIndices, primaryTrackGroupIndex, embeddedEventMessageTrackGroupIndex, - embeddedCea608TrackGroupIndex, + embeddedClosedCaptionTrackGroupIndex, /* eventStreamGroupIndex= */ -1); } - public static TrackGroupInfo embeddedEmsgTrack(int[] adaptationSetIndices, - int primaryTrackGroupIndex) { + public static TrackGroupInfo embeddedEmsgTrack( + int[] adaptationSetIndices, int primaryTrackGroupIndex) { return new TrackGroupInfo( C.TRACK_TYPE_METADATA, CATEGORY_EMBEDDED, @@ -942,8 +972,8 @@ public static TrackGroupInfo embeddedEmsgTrack(int[] adaptationSetIndices, /* eventStreamGroupIndex= */ -1); } - public static TrackGroupInfo embeddedCea608Track(int[] adaptationSetIndices, - int primaryTrackGroupIndex) { + public static TrackGroupInfo embeddedClosedCaptionTrack( + int[] adaptationSetIndices, int primaryTrackGroupIndex) { return new TrackGroupInfo( C.TRACK_TYPE_TEXT, CATEGORY_EMBEDDED, @@ -966,21 +996,20 @@ public static TrackGroupInfo mpdEventTrack(int eventStreamIndex) { } private TrackGroupInfo( - int trackType, + @C.TrackType int trackType, @TrackGroupCategory int trackGroupCategory, int[] adaptationSetIndices, int primaryTrackGroupIndex, int embeddedEventMessageTrackGroupIndex, - int embeddedCea608TrackGroupIndex, + int embeddedClosedCaptionTrackGroupIndex, int eventStreamGroupIndex) { this.trackType = trackType; this.adaptationSetIndices = adaptationSetIndices; this.trackGroupCategory = trackGroupCategory; this.primaryTrackGroupIndex = primaryTrackGroupIndex; this.embeddedEventMessageTrackGroupIndex = embeddedEventMessageTrackGroupIndex; - this.embeddedCea608TrackGroupIndex = embeddedCea608TrackGroupIndex; + this.embeddedClosedCaptionTrackGroupIndex = embeddedClosedCaptionTrackGroupIndex; this.eventStreamGroupIndex = eventStreamGroupIndex; } } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/DashMediaSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/DashMediaSource.java index 39cc03dd12..e875f6c928 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/DashMediaSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/DashMediaSource.java @@ -15,23 +15,37 @@ */ package com.google.android.exoplayer2.source.dash; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Assertions.checkState; +import static com.google.android.exoplayer2.util.Util.constrainValue; +import static com.google.android.exoplayer2.util.Util.usToMs; +import static java.lang.Math.max; +import static java.lang.Math.min; + import android.net.Uri; import android.os.Handler; +import android.os.Looper; import android.os.SystemClock; import android.text.TextUtils; import android.util.SparseArray; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.ExoPlayerLibraryInfo; +import com.google.android.exoplayer2.MediaItem; import com.google.android.exoplayer2.ParserException; +import com.google.android.exoplayer2.Player; import com.google.android.exoplayer2.Timeline; -import com.google.android.exoplayer2.drm.DrmSession; +import com.google.android.exoplayer2.drm.DefaultDrmSessionManagerProvider; +import com.google.android.exoplayer2.drm.DrmSessionEventListener; import com.google.android.exoplayer2.drm.DrmSessionManager; +import com.google.android.exoplayer2.drm.DrmSessionManagerProvider; import com.google.android.exoplayer2.offline.FilteringManifestParser; import com.google.android.exoplayer2.offline.StreamKey; import com.google.android.exoplayer2.source.BaseMediaSource; import com.google.android.exoplayer2.source.CompositeSequenceableLoaderFactory; import com.google.android.exoplayer2.source.DefaultCompositeSequenceableLoaderFactory; +import com.google.android.exoplayer2.source.LoadEventInfo; +import com.google.android.exoplayer2.source.MediaLoadData; import com.google.android.exoplayer2.source.MediaPeriod; import com.google.android.exoplayer2.source.MediaSource; import com.google.android.exoplayer2.source.MediaSourceEventListener; @@ -42,11 +56,14 @@ import com.google.android.exoplayer2.source.dash.manifest.AdaptationSet; import com.google.android.exoplayer2.source.dash.manifest.DashManifest; import com.google.android.exoplayer2.source.dash.manifest.DashManifestParser; +import com.google.android.exoplayer2.source.dash.manifest.Period; +import com.google.android.exoplayer2.source.dash.manifest.Representation; import com.google.android.exoplayer2.source.dash.manifest.UtcTimingElement; import com.google.android.exoplayer2.upstream.Allocator; import com.google.android.exoplayer2.upstream.DataSource; import com.google.android.exoplayer2.upstream.DefaultLoadErrorHandlingPolicy; import com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy; +import com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy.LoadErrorInfo; import com.google.android.exoplayer2.upstream.Loader; import com.google.android.exoplayer2.upstream.Loader.LoadErrorAction; import com.google.android.exoplayer2.upstream.LoaderErrorThrower; @@ -54,12 +71,17 @@ import com.google.android.exoplayer2.upstream.TransferListener; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Log; +import com.google.android.exoplayer2.util.MimeTypes; +import com.google.android.exoplayer2.util.SntpClient; import com.google.android.exoplayer2.util.Util; +import com.google.common.base.Charsets; +import com.google.common.math.LongMath; +import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; -import java.nio.charset.Charset; +import java.math.RoundingMode; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.List; @@ -76,24 +98,30 @@ public final class DashMediaSource extends BaseMediaSource { } /** Factory for {@link DashMediaSource}s. */ + @SuppressWarnings("deprecation") // Implement deprecated type for backwards compatibility. public static final class Factory implements MediaSourceFactory { private final DashChunkSource.Factory chunkSourceFactory; @Nullable private final DataSource.Factory manifestDataSourceFactory; - private DrmSessionManager drmSessionManager; - @Nullable private ParsingLoadable.Parser manifestParser; - @Nullable private List streamKeys; + private DrmSessionManagerProvider drmSessionManagerProvider; private CompositeSequenceableLoaderFactory compositeSequenceableLoaderFactory; private LoadErrorHandlingPolicy loadErrorHandlingPolicy; - private long livePresentationDelayMs; - private boolean livePresentationDelayOverridesManifest; - private boolean isCreateCalled; - @Nullable private Object tag; + private long fallbackTargetLiveOffsetMs; + @Nullable private ParsingLoadable.Parser manifestParser; /** * Creates a new factory for {@link DashMediaSource}s. * + *

      The factory will use the following default components: + * + *

        + *
      • {@link DefaultDashChunkSource.Factory} + *
      • {@link DefaultDrmSessionManagerProvider} + *
      • {@link DefaultLoadErrorHandlingPolicy} + *
      • {@link DefaultCompositeSequenceableLoaderFactory} + *
      + * * @param dataSourceFactory A factory for {@link DataSource} instances that will be used to load * manifest and media data. */ @@ -104,102 +132,68 @@ public Factory(DataSource.Factory dataSourceFactory) { /** * Creates a new factory for {@link DashMediaSource}s. * + *

      The factory will use the following default components: + * + *

        + *
      • {@link DefaultDrmSessionManagerProvider} + *
      • {@link DefaultLoadErrorHandlingPolicy} + *
      • {@link DefaultCompositeSequenceableLoaderFactory} + *
      + * * @param chunkSourceFactory A factory for {@link DashChunkSource} instances. * @param manifestDataSourceFactory A factory for {@link DataSource} instances that will be used * to load (and refresh) the manifest. May be {@code null} if the factory will only ever be * used to create create media sources with sideloaded manifests via {@link - * #createMediaSource(DashManifest, Handler, MediaSourceEventListener)}. + * #createMediaSource(DashManifest, MediaItem)}. */ public Factory( DashChunkSource.Factory chunkSourceFactory, @Nullable DataSource.Factory manifestDataSourceFactory) { - this.chunkSourceFactory = Assertions.checkNotNull(chunkSourceFactory); + this.chunkSourceFactory = checkNotNull(chunkSourceFactory); this.manifestDataSourceFactory = manifestDataSourceFactory; - drmSessionManager = DrmSessionManager.getDummyDrmSessionManager(); + drmSessionManagerProvider = new DefaultDrmSessionManagerProvider(); loadErrorHandlingPolicy = new DefaultLoadErrorHandlingPolicy(); - livePresentationDelayMs = DEFAULT_LIVE_PRESENTATION_DELAY_MS; + fallbackTargetLiveOffsetMs = DEFAULT_FALLBACK_TARGET_LIVE_OFFSET_MS; compositeSequenceableLoaderFactory = new DefaultCompositeSequenceableLoaderFactory(); } - /** - * Sets a tag for the media source which will be published in the {@link - * com.google.android.exoplayer2.Timeline} of the source as {@link - * com.google.android.exoplayer2.Timeline.Window#tag}. - * - * @param tag A tag for the media source. - * @return This factory, for convenience. - * @throws IllegalStateException If one of the {@code create} methods has already been called. - */ - public Factory setTag(@Nullable Object tag) { - Assertions.checkState(!isCreateCalled); - this.tag = tag; + @CanIgnoreReturnValue + @Override + public Factory setDrmSessionManagerProvider( + DrmSessionManagerProvider drmSessionManagerProvider) { + this.drmSessionManagerProvider = + checkNotNull( + drmSessionManagerProvider, + "MediaSource.Factory#setDrmSessionManagerProvider no longer handles null by" + + " instantiating a new DefaultDrmSessionManagerProvider. Explicitly construct" + + " and pass an instance in order to retain the old behavior."); return this; } - /** - * Sets the minimum number of times to retry if a loading error occurs. See {@link - * #setLoadErrorHandlingPolicy} for the default value. - * - *

      Calling this method is equivalent to calling {@link #setLoadErrorHandlingPolicy} with - * {@link DefaultLoadErrorHandlingPolicy#DefaultLoadErrorHandlingPolicy(int) - * DefaultLoadErrorHandlingPolicy(minLoadableRetryCount)} - * - * @param minLoadableRetryCount The minimum number of times to retry if a loading error occurs. - * @return This factory, for convenience. - * @throws IllegalStateException If one of the {@code create} methods has already been called. - * @deprecated Use {@link #setLoadErrorHandlingPolicy(LoadErrorHandlingPolicy)} instead. - */ - @Deprecated - public Factory setMinLoadableRetryCount(int minLoadableRetryCount) { - return setLoadErrorHandlingPolicy(new DefaultLoadErrorHandlingPolicy(minLoadableRetryCount)); - } - - /** - * Sets the {@link LoadErrorHandlingPolicy}. The default value is created by calling {@link - * DefaultLoadErrorHandlingPolicy#DefaultLoadErrorHandlingPolicy()}. - * - *

      Calling this method overrides any calls to {@link #setMinLoadableRetryCount(int)}. - * - * @param loadErrorHandlingPolicy A {@link LoadErrorHandlingPolicy}. - * @return This factory, for convenience. - * @throws IllegalStateException If one of the {@code create} methods has already been called. - */ + @CanIgnoreReturnValue + @Override public Factory setLoadErrorHandlingPolicy(LoadErrorHandlingPolicy loadErrorHandlingPolicy) { - Assertions.checkState(!isCreateCalled); - this.loadErrorHandlingPolicy = loadErrorHandlingPolicy; + this.loadErrorHandlingPolicy = + checkNotNull( + loadErrorHandlingPolicy, + "MediaSource.Factory#setLoadErrorHandlingPolicy no longer handles null by" + + " instantiating a new DefaultLoadErrorHandlingPolicy. Explicitly construct and" + + " pass an instance in order to retain the old behavior."); return this; } - /** @deprecated Use {@link #setLivePresentationDelayMs(long, boolean)}. */ - @Deprecated - @SuppressWarnings("deprecation") - public Factory setLivePresentationDelayMs(long livePresentationDelayMs) { - if (livePresentationDelayMs == DEFAULT_LIVE_PRESENTATION_DELAY_PREFER_MANIFEST_MS) { - return setLivePresentationDelayMs(DEFAULT_LIVE_PRESENTATION_DELAY_MS, false); - } else { - return setLivePresentationDelayMs(livePresentationDelayMs, true); - } - } - /** - * Sets the duration in milliseconds by which the default start position should precede the end - * of the live window for live playbacks. The {@code overridesManifest} parameter specifies - * whether the value is used in preference to one in the manifest, if present. The default value - * is {@link #DEFAULT_LIVE_PRESENTATION_DELAY_MS}, and by default {@code overridesManifest} is - * false. + * Sets the target {@link Player#getCurrentLiveOffset() offset for live streams} that is used if + * no value is defined in the {@link MediaItem} or the manifest. + * + *

      The default value is {@link #DEFAULT_FALLBACK_TARGET_LIVE_OFFSET_MS}. * - * @param livePresentationDelayMs For live playbacks, the duration in milliseconds by which the - * default start position should precede the end of the live window. - * @param overridesManifest Whether the value is used in preference to one in the manifest, if - * present. + * @param fallbackTargetLiveOffsetMs The fallback live target offset in milliseconds. * @return This factory, for convenience. - * @throws IllegalStateException If one of the {@code create} methods has already been called. */ - public Factory setLivePresentationDelayMs( - long livePresentationDelayMs, boolean overridesManifest) { - Assertions.checkState(!isCreateCalled); - this.livePresentationDelayMs = livePresentationDelayMs; - this.livePresentationDelayOverridesManifest = overridesManifest; + @CanIgnoreReturnValue + public Factory setFallbackTargetLiveOffsetMs(long fallbackTargetLiveOffsetMs) { + this.fallbackTargetLiveOffsetMs = fallbackTargetLiveOffsetMs; return this; } @@ -208,12 +202,11 @@ public Factory setLivePresentationDelayMs( * * @param manifestParser A parser for loaded manifest data. * @return This factory, for convenience. - * @throws IllegalStateException If one of the {@code create} methods has already been called. */ + @CanIgnoreReturnValue public Factory setManifestParser( - ParsingLoadable.Parser manifestParser) { - Assertions.checkState(!isCreateCalled); - this.manifestParser = Assertions.checkNotNull(manifestParser); + @Nullable ParsingLoadable.Parser manifestParser) { + this.manifestParser = manifestParser; return this; } @@ -226,13 +219,16 @@ public Factory setManifestParser( * SequenceableLoader}s for when this media source loads data from multiple streams (video, * audio etc...). * @return This factory, for convenience. - * @throws IllegalStateException If one of the {@code create} methods has already been called. */ + @CanIgnoreReturnValue public Factory setCompositeSequenceableLoaderFactory( CompositeSequenceableLoaderFactory compositeSequenceableLoaderFactory) { - Assertions.checkState(!isCreateCalled); this.compositeSequenceableLoaderFactory = - Assertions.checkNotNull(compositeSequenceableLoaderFactory); + checkNotNull( + compositeSequenceableLoaderFactory, + "DashMediaSource.Factory#setCompositeSequenceableLoaderFactory no longer handles null" + + " by instantiating a new DefaultCompositeSequenceableLoaderFactory. Explicitly" + + " construct and pass an instance in order to retain the old behavior."); return this; } @@ -245,150 +241,115 @@ public Factory setCompositeSequenceableLoaderFactory( * @throws IllegalArgumentException If {@link DashManifest#dynamic} is true. */ public DashMediaSource createMediaSource(DashManifest manifest) { + return createMediaSource( + manifest, + new MediaItem.Builder() + .setUri(Uri.EMPTY) + .setMediaId(DEFAULT_MEDIA_ID) + .setMimeType(MimeTypes.APPLICATION_MPD) + .build()); + } + + /** + * Returns a new {@link DashMediaSource} using the current parameters and the specified + * sideloaded manifest. + * + * @param manifest The manifest. {@link DashManifest#dynamic} must be false. + * @param mediaItem The {@link MediaItem} to be included in the timeline. + * @return The new {@link DashMediaSource}. + * @throws IllegalArgumentException If {@link DashManifest#dynamic} is true. + */ + public DashMediaSource createMediaSource(DashManifest manifest, MediaItem mediaItem) { Assertions.checkArgument(!manifest.dynamic); - isCreateCalled = true; - if (streamKeys != null && !streamKeys.isEmpty()) { - manifest = manifest.copy(streamKeys); + MediaItem.Builder mediaItemBuilder = + mediaItem.buildUpon().setMimeType(MimeTypes.APPLICATION_MPD); + if (mediaItem.localConfiguration == null) { + mediaItemBuilder.setUri(Uri.EMPTY); } + mediaItem = mediaItemBuilder.build(); return new DashMediaSource( + mediaItem, manifest, - /* manifestUri= */ null, /* manifestDataSourceFactory= */ null, /* manifestParser= */ null, chunkSourceFactory, compositeSequenceableLoaderFactory, - drmSessionManager, + drmSessionManagerProvider.get(mediaItem), loadErrorHandlingPolicy, - livePresentationDelayMs, - livePresentationDelayOverridesManifest, - tag); - } - - /** - * @deprecated Use {@link #createMediaSource(DashManifest)} and {@link - * #addEventListener(Handler, MediaSourceEventListener)} instead. - */ - @Deprecated - public DashMediaSource createMediaSource( - DashManifest manifest, - @Nullable Handler eventHandler, - @Nullable MediaSourceEventListener eventListener) { - DashMediaSource mediaSource = createMediaSource(manifest); - if (eventHandler != null && eventListener != null) { - mediaSource.addEventListener(eventHandler, eventListener); - } - return mediaSource; - } - - /** - * @deprecated Use {@link #createMediaSource(Uri)} and {@link #addEventListener(Handler, - * MediaSourceEventListener)} instead. - */ - @Deprecated - public DashMediaSource createMediaSource( - Uri manifestUri, - @Nullable Handler eventHandler, - @Nullable MediaSourceEventListener eventListener) { - DashMediaSource mediaSource = createMediaSource(manifestUri); - if (eventHandler != null && eventListener != null) { - mediaSource.addEventListener(eventHandler, eventListener); - } - return mediaSource; - } - - /** - * Sets the {@link DrmSessionManager} to use for acquiring {@link DrmSession DrmSessions}. The - * default value is {@link DrmSessionManager#DUMMY}. - * - * @param drmSessionManager The {@link DrmSessionManager}. - * @return This factory, for convenience. - * @throws IllegalStateException If one of the {@code create} methods has already been called. - */ - @Override - public Factory setDrmSessionManager(DrmSessionManager drmSessionManager) { - Assertions.checkState(!isCreateCalled); - this.drmSessionManager = - drmSessionManager != null - ? drmSessionManager - : DrmSessionManager.getDummyDrmSessionManager(); - return this; + fallbackTargetLiveOffsetMs); } /** * Returns a new {@link DashMediaSource} using the current parameters. * - * @param manifestUri The manifest {@link Uri}. + * @param mediaItem The media item of the dash stream. * @return The new {@link DashMediaSource}. + * @throws NullPointerException if {@link MediaItem#localConfiguration} is {@code null}. */ @Override - public DashMediaSource createMediaSource(Uri manifestUri) { - isCreateCalled = true; + public DashMediaSource createMediaSource(MediaItem mediaItem) { + checkNotNull(mediaItem.localConfiguration); + @Nullable ParsingLoadable.Parser manifestParser = this.manifestParser; if (manifestParser == null) { manifestParser = new DashManifestParser(); } - if (streamKeys != null) { + List streamKeys = mediaItem.localConfiguration.streamKeys; + if (!streamKeys.isEmpty()) { manifestParser = new FilteringManifestParser<>(manifestParser, streamKeys); } + return new DashMediaSource( + mediaItem, /* manifest= */ null, - Assertions.checkNotNull(manifestUri), manifestDataSourceFactory, manifestParser, chunkSourceFactory, compositeSequenceableLoaderFactory, - drmSessionManager, + drmSessionManagerProvider.get(mediaItem), loadErrorHandlingPolicy, - livePresentationDelayMs, - livePresentationDelayOverridesManifest, - tag); - } - - @Override - public Factory setStreamKeys(List streamKeys) { - Assertions.checkState(!isCreateCalled); - this.streamKeys = streamKeys; - return this; + fallbackTargetLiveOffsetMs); } @Override - public int[] getSupportedTypes() { - return new int[] {C.TYPE_DASH}; + public @C.ContentType int[] getSupportedTypes() { + return new int[] {C.CONTENT_TYPE_DASH}; } } /** - * The default presentation delay for live streams. The presentation delay is the duration by - * which the default start position precedes the end of the live window. + * The default target {@link Player#getCurrentLiveOffset() offset for live streams} that is used + * if no value is defined in the {@link MediaItem} or the manifest. */ - public static final long DEFAULT_LIVE_PRESENTATION_DELAY_MS = 30000; - /** @deprecated Use {@link #DEFAULT_LIVE_PRESENTATION_DELAY_MS}. */ - @Deprecated - public static final long DEFAULT_LIVE_PRESENTATION_DELAY_FIXED_MS = - DEFAULT_LIVE_PRESENTATION_DELAY_MS; - /** @deprecated Use of this parameter is no longer necessary. */ - @Deprecated public static final long DEFAULT_LIVE_PRESENTATION_DELAY_PREFER_MANIFEST_MS = -1; + public static final long DEFAULT_FALLBACK_TARGET_LIVE_OFFSET_MS = 30_000; + /** + * @deprecated Use {@link #DEFAULT_FALLBACK_TARGET_LIVE_OFFSET_MS} instead. + */ + @Deprecated public static final long DEFAULT_LIVE_PRESENTATION_DELAY_MS = 30_000; + /** The media id used by media items of dash media sources without a manifest URI. */ + public static final String DEFAULT_MEDIA_ID = "DashMediaSource"; /** * The interval in milliseconds between invocations of {@link * MediaSourceCaller#onSourceInfoRefreshed(MediaSource, Timeline)} when the source's {@link * Timeline} is changing dynamically (for example, for incomplete live streams). */ - private static final int NOTIFY_MANIFEST_INTERVAL_MS = 5000; + private static final long DEFAULT_NOTIFY_MANIFEST_INTERVAL_MS = 5000; /** * The minimum default start position for live streams, relative to the start of the live window. */ - private static final long MIN_LIVE_DEFAULT_START_POSITION_US = 5000000; + private static final long MIN_LIVE_DEFAULT_START_POSITION_US = 5_000_000; private static final String TAG = "DashMediaSource"; + private final MediaItem mediaItem; private final boolean sideloadedManifest; private final DataSource.Factory manifestDataSourceFactory; private final DashChunkSource.Factory chunkSourceFactory; private final CompositeSequenceableLoaderFactory compositeSequenceableLoaderFactory; - private final DrmSessionManager drmSessionManager; + private final DrmSessionManager drmSessionManager; private final LoadErrorHandlingPolicy loadErrorHandlingPolicy; - private final long livePresentationDelayMs; - private final boolean livePresentationDelayOverridesManifest; + private final BaseUrlExclusionList baseUrlExclusionList; + private final long fallbackTargetLiveOffsetMs; private final EventDispatcher manifestEventDispatcher; private final ParsingLoadable.Parser manifestParser; private final ManifestCallback manifestCallback; @@ -398,7 +359,6 @@ public int[] getSupportedTypes() { private final Runnable simulateManifestRefreshRunnable; private final PlayerEmsgCallback playerEmsgCallback; private final LoaderErrorThrower manifestLoadErrorThrower; - @Nullable private final Object tag; private DataSource dataSource; private Loader loader; @@ -407,8 +367,9 @@ public int[] getSupportedTypes() { private IOException manifestFatalError; private Handler handler; - private Uri initialManifestUri; + private MediaItem.LiveConfiguration liveConfiguration; private Uri manifestUri; + private Uri initialManifestUri; private DashManifest manifest; private boolean manifestLoadPending; private long manifestLoadStartTimestampMs; @@ -420,210 +381,36 @@ public int[] getSupportedTypes() { private int firstPeriodId; - /** - * Constructs an instance to play a given {@link DashManifest}, which must be static. - * - * @param manifest The manifest. {@link DashManifest#dynamic} must be false. - * @param chunkSourceFactory A factory for {@link DashChunkSource} instances. - * @param eventHandler A handler for events. May be null if delivery of events is not required. - * @param eventListener A listener of events. May be null if delivery of events is not required. - * @deprecated Use {@link Factory} instead. - */ - @Deprecated - @SuppressWarnings("deprecation") - public DashMediaSource( - DashManifest manifest, - DashChunkSource.Factory chunkSourceFactory, - @Nullable Handler eventHandler, - @Nullable MediaSourceEventListener eventListener) { - this( - manifest, - chunkSourceFactory, - DefaultLoadErrorHandlingPolicy.DEFAULT_MIN_LOADABLE_RETRY_COUNT, - eventHandler, - eventListener); - } - - /** - * Constructs an instance to play a given {@link DashManifest}, which must be static. - * - * @param manifest The manifest. {@link DashManifest#dynamic} must be false. - * @param chunkSourceFactory A factory for {@link DashChunkSource} instances. - * @param minLoadableRetryCount The minimum number of times to retry if a loading error occurs. - * @param eventHandler A handler for events. May be null if delivery of events is not required. - * @param eventListener A listener of events. May be null if delivery of events is not required. - * @deprecated Use {@link Factory} instead. - */ - @Deprecated - public DashMediaSource( - DashManifest manifest, - DashChunkSource.Factory chunkSourceFactory, - int minLoadableRetryCount, - @Nullable Handler eventHandler, - @Nullable MediaSourceEventListener eventListener) { - this( - manifest, - /* manifestUri= */ null, - /* manifestDataSourceFactory= */ null, - /* manifestParser= */ null, - chunkSourceFactory, - new DefaultCompositeSequenceableLoaderFactory(), - DrmSessionManager.getDummyDrmSessionManager(), - new DefaultLoadErrorHandlingPolicy(minLoadableRetryCount), - DEFAULT_LIVE_PRESENTATION_DELAY_MS, - /* livePresentationDelayOverridesManifest= */ false, - /* tag= */ null); - if (eventHandler != null && eventListener != null) { - addEventListener(eventHandler, eventListener); - } - } - - /** - * Constructs an instance to play the manifest at a given {@link Uri}, which may be dynamic or - * static. - * - * @param manifestUri The manifest {@link Uri}. - * @param manifestDataSourceFactory A factory for {@link DataSource} instances that will be used - * to load (and refresh) the manifest. - * @param chunkSourceFactory A factory for {@link DashChunkSource} instances. - * @param eventHandler A handler for events. May be null if delivery of events is not required. - * @param eventListener A listener of events. May be null if delivery of events is not required. - * @deprecated Use {@link Factory} instead. - */ - @Deprecated - @SuppressWarnings("deprecation") - public DashMediaSource( - Uri manifestUri, - DataSource.Factory manifestDataSourceFactory, - DashChunkSource.Factory chunkSourceFactory, - @Nullable Handler eventHandler, - @Nullable MediaSourceEventListener eventListener) { - this( - manifestUri, - manifestDataSourceFactory, - chunkSourceFactory, - DefaultLoadErrorHandlingPolicy.DEFAULT_MIN_LOADABLE_RETRY_COUNT, - DEFAULT_LIVE_PRESENTATION_DELAY_PREFER_MANIFEST_MS, - eventHandler, - eventListener); - } - - /** - * Constructs an instance to play the manifest at a given {@link Uri}, which may be dynamic or - * static. - * - * @param manifestUri The manifest {@link Uri}. - * @param manifestDataSourceFactory A factory for {@link DataSource} instances that will be used - * to load (and refresh) the manifest. - * @param chunkSourceFactory A factory for {@link DashChunkSource} instances. - * @param minLoadableRetryCount The minimum number of times to retry if a loading error occurs. - * @param livePresentationDelayMs For live playbacks, the duration in milliseconds by which the - * default start position should precede the end of the live window. Use {@link - * #DEFAULT_LIVE_PRESENTATION_DELAY_PREFER_MANIFEST_MS} to use the value specified by the - * manifest, if present. - * @param eventHandler A handler for events. May be null if delivery of events is not required. - * @param eventListener A listener of events. May be null if delivery of events is not required. - * @deprecated Use {@link Factory} instead. - */ - @Deprecated - @SuppressWarnings("deprecation") - public DashMediaSource( - Uri manifestUri, - DataSource.Factory manifestDataSourceFactory, - DashChunkSource.Factory chunkSourceFactory, - int minLoadableRetryCount, - long livePresentationDelayMs, - @Nullable Handler eventHandler, - @Nullable MediaSourceEventListener eventListener) { - this( - manifestUri, - manifestDataSourceFactory, - new DashManifestParser(), - chunkSourceFactory, - minLoadableRetryCount, - livePresentationDelayMs, - eventHandler, - eventListener); - } - - /** - * Constructs an instance to play the manifest at a given {@link Uri}, which may be dynamic or - * static. - * - * @param manifestUri The manifest {@link Uri}. - * @param manifestDataSourceFactory A factory for {@link DataSource} instances that will be used - * to load (and refresh) the manifest. - * @param manifestParser A parser for loaded manifest data. - * @param chunkSourceFactory A factory for {@link DashChunkSource} instances. - * @param minLoadableRetryCount The minimum number of times to retry if a loading error occurs. - * @param livePresentationDelayMs For live playbacks, the duration in milliseconds by which the - * default start position should precede the end of the live window. Use {@link - * #DEFAULT_LIVE_PRESENTATION_DELAY_PREFER_MANIFEST_MS} to use the value specified by the - * manifest, if present. - * @param eventHandler A handler for events. May be null if delivery of events is not required. - * @param eventListener A listener of events. May be null if delivery of events is not required. - * @deprecated Use {@link Factory} instead. - */ - @Deprecated - @SuppressWarnings("deprecation") - public DashMediaSource( - Uri manifestUri, - DataSource.Factory manifestDataSourceFactory, - ParsingLoadable.Parser manifestParser, - DashChunkSource.Factory chunkSourceFactory, - int minLoadableRetryCount, - long livePresentationDelayMs, - @Nullable Handler eventHandler, - @Nullable MediaSourceEventListener eventListener) { - this( - /* manifest= */ null, - manifestUri, - manifestDataSourceFactory, - manifestParser, - chunkSourceFactory, - new DefaultCompositeSequenceableLoaderFactory(), - DrmSessionManager.getDummyDrmSessionManager(), - new DefaultLoadErrorHandlingPolicy(minLoadableRetryCount), - livePresentationDelayMs == DEFAULT_LIVE_PRESENTATION_DELAY_PREFER_MANIFEST_MS - ? DEFAULT_LIVE_PRESENTATION_DELAY_MS - : livePresentationDelayMs, - livePresentationDelayMs != DEFAULT_LIVE_PRESENTATION_DELAY_PREFER_MANIFEST_MS, - /* tag= */ null); - if (eventHandler != null && eventListener != null) { - addEventListener(eventHandler, eventListener); - } - } - private DashMediaSource( + MediaItem mediaItem, @Nullable DashManifest manifest, - @Nullable Uri manifestUri, @Nullable DataSource.Factory manifestDataSourceFactory, @Nullable ParsingLoadable.Parser manifestParser, DashChunkSource.Factory chunkSourceFactory, CompositeSequenceableLoaderFactory compositeSequenceableLoaderFactory, - DrmSessionManager drmSessionManager, + DrmSessionManager drmSessionManager, LoadErrorHandlingPolicy loadErrorHandlingPolicy, - long livePresentationDelayMs, - boolean livePresentationDelayOverridesManifest, - @Nullable Object tag) { - this.initialManifestUri = manifestUri; + long fallbackTargetLiveOffsetMs) { + this.mediaItem = mediaItem; + this.liveConfiguration = mediaItem.liveConfiguration; + this.manifestUri = checkNotNull(mediaItem.localConfiguration).uri; + this.initialManifestUri = mediaItem.localConfiguration.uri; this.manifest = manifest; - this.manifestUri = manifestUri; this.manifestDataSourceFactory = manifestDataSourceFactory; this.manifestParser = manifestParser; this.chunkSourceFactory = chunkSourceFactory; this.drmSessionManager = drmSessionManager; this.loadErrorHandlingPolicy = loadErrorHandlingPolicy; - this.livePresentationDelayMs = livePresentationDelayMs; - this.livePresentationDelayOverridesManifest = livePresentationDelayOverridesManifest; + this.fallbackTargetLiveOffsetMs = fallbackTargetLiveOffsetMs; this.compositeSequenceableLoaderFactory = compositeSequenceableLoaderFactory; - this.tag = tag; + baseUrlExclusionList = new BaseUrlExclusionList(); sideloadedManifest = manifest != null; manifestEventDispatcher = createEventDispatcher(/* mediaPeriodId= */ null); manifestUriLock = new Object(); periodsById = new SparseArray<>(); playerEmsgCallback = new DefaultPlayerEmsgCallback(); expiredManifestPublishTimeUs = C.TIME_UNSET; + elapsedRealtimeOffsetMs = C.TIME_UNSET; if (sideloadedManifest) { Assertions.checkState(!manifest.dynamic); manifestCallback = null; @@ -653,21 +440,21 @@ public void replaceManifestUri(Uri manifestUri) { // MediaSource implementation. @Override - @Nullable - public Object getTag() { - return tag; + public MediaItem getMediaItem() { + return mediaItem; } @Override protected void prepareSourceInternal(@Nullable TransferListener mediaTransferListener) { this.mediaTransferListener = mediaTransferListener; drmSessionManager.prepare(); + drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), getPlayerId()); if (sideloadedManifest) { processManifest(false); } else { dataSource = manifestDataSourceFactory.createDataSource(); - loader = new Loader("Loader:DashMediaSource"); - handler = new Handler(); + loader = new Loader("DashMediaSource"); + handler = Util.createHandlerForCurrentLooper(); startLoadingManifest(); } } @@ -678,26 +465,29 @@ public void maybeThrowSourceInfoRefreshError() throws IOException { } @Override - public MediaPeriod createPeriod( - MediaPeriodId periodId, Allocator allocator, long startPositionUs) { - int periodIndex = (Integer) periodId.periodUid - firstPeriodId; - EventDispatcher periodEventDispatcher = - createEventDispatcher(periodId, manifest.getPeriod(periodIndex).startMs); + public MediaPeriod createPeriod(MediaPeriodId id, Allocator allocator, long startPositionUs) { + int periodIndex = (Integer) id.periodUid - firstPeriodId; + MediaSourceEventListener.EventDispatcher periodEventDispatcher = + createEventDispatcher(id, manifest.getPeriod(periodIndex).startMs); + DrmSessionEventListener.EventDispatcher drmEventDispatcher = createDrmEventDispatcher(id); DashMediaPeriod mediaPeriod = new DashMediaPeriod( firstPeriodId + periodIndex, manifest, + baseUrlExclusionList, periodIndex, chunkSourceFactory, mediaTransferListener, drmSessionManager, + drmEventDispatcher, loadErrorHandlingPolicy, periodEventDispatcher, elapsedRealtimeOffsetMs, manifestLoadErrorThrower, allocator, compositeSequenceableLoaderFactory, - playerEmsgCallback); + playerEmsgCallback, + getPlayerId()); periodsById.put(mediaPeriod.id, mediaPeriod); return mediaPeriod; } @@ -726,11 +516,12 @@ protected void releaseSourceInternal() { handler.removeCallbacksAndMessages(null); handler = null; } - elapsedRealtimeOffsetMs = 0; + elapsedRealtimeOffsetMs = C.TIME_UNSET; staleManifestReloadAttempt = 0; expiredManifestPublishTimeUs = C.TIME_UNSET; firstPeriodId = 0; periodsById.clear(); + baseUrlExclusionList.reset(); drmSessionManager.release(); } @@ -750,16 +541,19 @@ protected void releaseSourceInternal() { // Loadable callbacks. - /* package */ void onManifestLoadCompleted(ParsingLoadable loadable, - long elapsedRealtimeMs, long loadDurationMs) { - manifestEventDispatcher.loadCompleted( - loadable.dataSpec, - loadable.getUri(), - loadable.getResponseHeaders(), - loadable.type, - elapsedRealtimeMs, - loadDurationMs, - loadable.bytesLoaded()); + /* package */ void onManifestLoadCompleted( + ParsingLoadable loadable, long elapsedRealtimeMs, long loadDurationMs) { + LoadEventInfo loadEventInfo = + new LoadEventInfo( + loadable.loadTaskId, + loadable.dataSpec, + loadable.getUri(), + loadable.getResponseHeaders(), + elapsedRealtimeMs, + loadDurationMs, + loadable.bytesLoaded()); + loadErrorHandlingPolicy.onLoadTaskConcluded(loadable.loadTaskId); + manifestEventDispatcher.loadCompleted(loadEventInfo, loadable.type); DashManifest newManifest = loadable.getResult(); int oldPeriodCount = manifest == null ? 0 : manifest.getPeriodCount(); @@ -826,8 +620,12 @@ protected void releaseSourceInternal() { } if (oldPeriodCount == 0) { - if (manifest.dynamic && manifest.utcTiming != null) { - resolveUtcTimingElement(manifest.utcTiming); + if (manifest.dynamic) { + if (manifest.utcTiming != null) { + resolveUtcTimingElement(manifest.utcTiming); + } else { + loadNtpTimeOffset(); + } } else { processManifest(true); } @@ -843,36 +641,44 @@ protected void releaseSourceInternal() { long loadDurationMs, IOException error, int errorCount) { - long retryDelayMs = - loadErrorHandlingPolicy.getRetryDelayMsFor( - C.DATA_TYPE_MANIFEST, loadDurationMs, error, errorCount); + LoadEventInfo loadEventInfo = + new LoadEventInfo( + loadable.loadTaskId, + loadable.dataSpec, + loadable.getUri(), + loadable.getResponseHeaders(), + elapsedRealtimeMs, + loadDurationMs, + loadable.bytesLoaded()); + MediaLoadData mediaLoadData = new MediaLoadData(loadable.type); + LoadErrorInfo loadErrorInfo = + new LoadErrorInfo(loadEventInfo, mediaLoadData, error, errorCount); + long retryDelayMs = loadErrorHandlingPolicy.getRetryDelayMsFor(loadErrorInfo); LoadErrorAction loadErrorAction = retryDelayMs == C.TIME_UNSET ? Loader.DONT_RETRY_FATAL : Loader.createRetryAction(/* resetErrorCount= */ false, retryDelayMs); - manifestEventDispatcher.loadError( - loadable.dataSpec, - loadable.getUri(), - loadable.getResponseHeaders(), - loadable.type, - elapsedRealtimeMs, - loadDurationMs, - loadable.bytesLoaded(), - error, - !loadErrorAction.isRetry()); + boolean wasCanceled = !loadErrorAction.isRetry(); + manifestEventDispatcher.loadError(loadEventInfo, loadable.type, error, wasCanceled); + if (wasCanceled) { + loadErrorHandlingPolicy.onLoadTaskConcluded(loadable.loadTaskId); + } return loadErrorAction; } - /* package */ void onUtcTimestampLoadCompleted(ParsingLoadable loadable, - long elapsedRealtimeMs, long loadDurationMs) { - manifestEventDispatcher.loadCompleted( - loadable.dataSpec, - loadable.getUri(), - loadable.getResponseHeaders(), - loadable.type, - elapsedRealtimeMs, - loadDurationMs, - loadable.bytesLoaded()); + /* package */ void onUtcTimestampLoadCompleted( + ParsingLoadable loadable, long elapsedRealtimeMs, long loadDurationMs) { + LoadEventInfo loadEventInfo = + new LoadEventInfo( + loadable.loadTaskId, + loadable.dataSpec, + loadable.getUri(), + loadable.getResponseHeaders(), + elapsedRealtimeMs, + loadDurationMs, + loadable.bytesLoaded()); + loadErrorHandlingPolicy.onLoadTaskConcluded(loadable.loadTaskId); + manifestEventDispatcher.loadCompleted(loadEventInfo, loadable.type); onUtcTimestampResolved(loadable.getResult() - elapsedRealtimeMs); } @@ -882,29 +688,35 @@ protected void releaseSourceInternal() { long loadDurationMs, IOException error) { manifestEventDispatcher.loadError( - loadable.dataSpec, - loadable.getUri(), - loadable.getResponseHeaders(), + new LoadEventInfo( + loadable.loadTaskId, + loadable.dataSpec, + loadable.getUri(), + loadable.getResponseHeaders(), + elapsedRealtimeMs, + loadDurationMs, + loadable.bytesLoaded()), loadable.type, - elapsedRealtimeMs, - loadDurationMs, - loadable.bytesLoaded(), error, - true); + /* wasCanceled= */ true); + loadErrorHandlingPolicy.onLoadTaskConcluded(loadable.loadTaskId); onUtcTimestampResolutionError(error); return Loader.DONT_RETRY; } - /* package */ void onLoadCanceled(ParsingLoadable loadable, long elapsedRealtimeMs, - long loadDurationMs) { - manifestEventDispatcher.loadCanceled( - loadable.dataSpec, - loadable.getUri(), - loadable.getResponseHeaders(), - loadable.type, - elapsedRealtimeMs, - loadDurationMs, - loadable.bytesLoaded()); + /* package */ void onLoadCanceled( + ParsingLoadable loadable, long elapsedRealtimeMs, long loadDurationMs) { + LoadEventInfo loadEventInfo = + new LoadEventInfo( + loadable.loadTaskId, + loadable.dataSpec, + loadable.getUri(), + loadable.getResponseHeaders(), + elapsedRealtimeMs, + loadDurationMs, + loadable.bytesLoaded()); + loadErrorHandlingPolicy.onLoadTaskConcluded(loadable.loadTaskId); + manifestEventDispatcher.loadCanceled(loadEventInfo, loadable.type); } // Internal methods. @@ -920,6 +732,9 @@ private void resolveUtcTimingElement(UtcTimingElement timingElement) { } else if (Util.areEqual(scheme, "urn:mpeg:dash:utc:http-xsdate:2014") || Util.areEqual(scheme, "urn:mpeg:dash:utc:http-xsdate:2012")) { resolveUtcTimingElementHttp(timingElement, new XsDateTimeParser()); + } else if (Util.areEqual(scheme, "urn:mpeg:dash:utc:ntp:2014") + || Util.areEqual(scheme, "urn:mpeg:dash:utc:ntp:2012")) { + loadNtpTimeOffset(); } else { // Unsupported scheme. onUtcTimestampResolutionError(new IOException("Unsupported UTC timing scheme")); @@ -935,10 +750,29 @@ private void resolveUtcTimingElementDirect(UtcTimingElement timingElement) { } } - private void resolveUtcTimingElementHttp(UtcTimingElement timingElement, - ParsingLoadable.Parser parser) { - startLoading(new ParsingLoadable<>(dataSource, Uri.parse(timingElement.value), - C.DATA_TYPE_TIME_SYNCHRONIZATION, parser), new UtcTimestampCallback(), 1); + private void resolveUtcTimingElementHttp( + UtcTimingElement timingElement, ParsingLoadable.Parser parser) { + startLoading( + new ParsingLoadable<>( + dataSource, Uri.parse(timingElement.value), C.DATA_TYPE_TIME_SYNCHRONIZATION, parser), + new UtcTimestampCallback(), + 1); + } + + private void loadNtpTimeOffset() { + SntpClient.initialize( + loader, + new SntpClient.InitializationCallback() { + @Override + public void onInitialized() { + onUtcTimestampResolved(SntpClient.getElapsedRealtimeOffsetMs()); + } + + @Override + public void onInitializationFailed(IOException error) { + onUtcTimestampResolutionError(error); + } + }); } private void onUtcTimestampResolved(long elapsedRealtimeOffsetMs) { @@ -947,7 +781,7 @@ private void onUtcTimestampResolved(long elapsedRealtimeOffsetMs) { } private void onUtcTimestampResolutionError(IOException error) { - Log.e(TAG, "Failed to resolve UtcTiming element.", error); + Log.e(TAG, "Failed to resolve time offset.", error); // Be optimistic and continue in the hope that the device clock is correct. processManifest(true); } @@ -963,77 +797,59 @@ private void processManifest(boolean scheduleRefresh) { } } // Update the window. - boolean windowChangingImplicitly = false; + Period firstPeriod = manifest.getPeriod(0); int lastPeriodIndex = manifest.getPeriodCount() - 1; - PeriodSeekInfo firstPeriodSeekInfo = PeriodSeekInfo.createPeriodSeekInfo(manifest.getPeriod(0), - manifest.getPeriodDurationUs(0)); - PeriodSeekInfo lastPeriodSeekInfo = PeriodSeekInfo.createPeriodSeekInfo( - manifest.getPeriod(lastPeriodIndex), manifest.getPeriodDurationUs(lastPeriodIndex)); - // Get the period-relative start/end times. - long currentStartTimeUs = firstPeriodSeekInfo.availableStartTimeUs; - long currentEndTimeUs = lastPeriodSeekInfo.availableEndTimeUs; - if (manifest.dynamic && !lastPeriodSeekInfo.isIndexExplicit) { - // The manifest describes an incomplete live stream. Update the start/end times to reflect the - // live stream duration and the manifest's time shift buffer depth. - long liveStreamDurationUs = getNowUnixTimeUs() - C.msToUs(manifest.availabilityStartTimeMs); - long liveStreamEndPositionInLastPeriodUs = liveStreamDurationUs - - C.msToUs(manifest.getPeriod(lastPeriodIndex).startMs); - currentEndTimeUs = Math.min(liveStreamEndPositionInLastPeriodUs, currentEndTimeUs); - if (manifest.timeShiftBufferDepthMs != C.TIME_UNSET) { - long timeShiftBufferDepthUs = C.msToUs(manifest.timeShiftBufferDepthMs); - long offsetInPeriodUs = currentEndTimeUs - timeShiftBufferDepthUs; - int periodIndex = lastPeriodIndex; - while (offsetInPeriodUs < 0 && periodIndex > 0) { - offsetInPeriodUs += manifest.getPeriodDurationUs(--periodIndex); - } - if (periodIndex == 0) { - currentStartTimeUs = Math.max(currentStartTimeUs, offsetInPeriodUs); - } else { - // The time shift buffer starts after the earliest period. - // TODO: Does this ever happen? - currentStartTimeUs = manifest.getPeriodDurationUs(0); - } - } - windowChangingImplicitly = true; - } - long windowDurationUs = currentEndTimeUs - currentStartTimeUs; - for (int i = 0; i < manifest.getPeriodCount() - 1; i++) { - windowDurationUs += manifest.getPeriodDurationUs(i); - } - long windowDefaultStartPositionUs = 0; + Period lastPeriod = manifest.getPeriod(lastPeriodIndex); + long lastPeriodDurationUs = manifest.getPeriodDurationUs(lastPeriodIndex); + long nowUnixTimeUs = Util.msToUs(Util.getNowUnixTimeMs(elapsedRealtimeOffsetMs)); + long windowStartTimeInManifestUs = + getAvailableStartTimeInManifestUs( + firstPeriod, manifest.getPeriodDurationUs(0), nowUnixTimeUs); + long windowEndTimeInManifestUs = + getAvailableEndTimeInManifestUs(lastPeriod, lastPeriodDurationUs, nowUnixTimeUs); + boolean windowChangingImplicitly = manifest.dynamic && !isIndexExplicit(lastPeriod); + if (windowChangingImplicitly && manifest.timeShiftBufferDepthMs != C.TIME_UNSET) { + // Update the available start time to reflect the manifest's time shift buffer depth. + long timeShiftBufferStartTimeInManifestUs = + windowEndTimeInManifestUs - Util.msToUs(manifest.timeShiftBufferDepthMs); + windowStartTimeInManifestUs = + max(windowStartTimeInManifestUs, timeShiftBufferStartTimeInManifestUs); + } + long windowDurationUs = windowEndTimeInManifestUs - windowStartTimeInManifestUs; + long windowStartUnixTimeMs = C.TIME_UNSET; + long windowDefaultPositionUs = 0; if (manifest.dynamic) { - long presentationDelayForManifestMs = livePresentationDelayMs; - if (!livePresentationDelayOverridesManifest - && manifest.suggestedPresentationDelayMs != C.TIME_UNSET) { - presentationDelayForManifestMs = manifest.suggestedPresentationDelayMs; + checkState(manifest.availabilityStartTimeMs != C.TIME_UNSET); + long nowInWindowUs = + nowUnixTimeUs + - Util.msToUs(manifest.availabilityStartTimeMs) + - windowStartTimeInManifestUs; + updateLiveConfiguration(nowInWindowUs, windowDurationUs); + windowStartUnixTimeMs = + manifest.availabilityStartTimeMs + Util.usToMs(windowStartTimeInManifestUs); + windowDefaultPositionUs = nowInWindowUs - Util.msToUs(liveConfiguration.targetOffsetMs); + long minimumWindowDefaultPositionUs = + min(MIN_LIVE_DEFAULT_START_POSITION_US, windowDurationUs / 2); + if (windowDefaultPositionUs < minimumWindowDefaultPositionUs) { + // The default position is too close to the start of the live window. Set it to the minimum + // default position provided the window is at least twice as big. Else set it to the middle + // of the window. + windowDefaultPositionUs = minimumWindowDefaultPositionUs; } - // Snap the default position to the start of the segment containing it. - windowDefaultStartPositionUs = windowDurationUs - C.msToUs(presentationDelayForManifestMs); - if (windowDefaultStartPositionUs < MIN_LIVE_DEFAULT_START_POSITION_US) { - // The default start position is too close to the start of the live window. Set it to the - // minimum default start position provided the window is at least twice as big. Else set - // it to the middle of the window. - windowDefaultStartPositionUs = Math.min(MIN_LIVE_DEFAULT_START_POSITION_US, - windowDurationUs / 2); - } - } - long windowStartTimeMs = C.TIME_UNSET; - if (manifest.availabilityStartTimeMs != C.TIME_UNSET) { - windowStartTimeMs = - manifest.availabilityStartTimeMs - + manifest.getPeriod(0).startMs - + C.usToMs(currentStartTimeUs); } + long offsetInFirstPeriodUs = windowStartTimeInManifestUs - Util.msToUs(firstPeriod.startMs); DashTimeline timeline = new DashTimeline( manifest.availabilityStartTimeMs, - windowStartTimeMs, + windowStartUnixTimeMs, + elapsedRealtimeOffsetMs, firstPeriodId, - currentStartTimeUs, + offsetInFirstPeriodUs, windowDurationUs, - windowDefaultStartPositionUs, + windowDefaultPositionUs, manifest, - tag); + mediaItem, + manifest.dynamic ? liveConfiguration : null); refreshSourceInfo(timeline); if (!sideloadedManifest) { @@ -1041,7 +857,10 @@ private void processManifest(boolean scheduleRefresh) { handler.removeCallbacks(simulateManifestRefreshRunnable); // If the window is changing implicitly, post a simulated manifest refresh to update it. if (windowChangingImplicitly) { - handler.postDelayed(simulateManifestRefreshRunnable, NOTIFY_MANIFEST_INTERVAL_MS); + handler.postDelayed( + simulateManifestRefreshRunnable, + getIntervalUntilNextManifestRefreshMs( + manifest, Util.getNowUnixTimeMs(elapsedRealtimeOffsetMs))); } if (manifestLoadPending) { startLoadingManifest(); @@ -1058,13 +877,107 @@ private void processManifest(boolean scheduleRefresh) { minUpdatePeriodMs = 5000; } long nextLoadTimestampMs = manifestLoadStartTimestampMs + minUpdatePeriodMs; - long delayUntilNextLoadMs = - Math.max(0, nextLoadTimestampMs - SystemClock.elapsedRealtime()); + long delayUntilNextLoadMs = max(0, nextLoadTimestampMs - SystemClock.elapsedRealtime()); scheduleManifestRefresh(delayUntilNextLoadMs); } } } + private void updateLiveConfiguration(long nowInWindowUs, long windowDurationUs) { + // Default maximum offset: start of window. + long maxPossibleLiveOffsetMs = usToMs(nowInWindowUs); + long maxLiveOffsetMs = maxPossibleLiveOffsetMs; + // Override maximum offset with user or media defined values if they are smaller. + if (mediaItem.liveConfiguration.maxOffsetMs != C.TIME_UNSET) { + maxLiveOffsetMs = min(maxLiveOffsetMs, mediaItem.liveConfiguration.maxOffsetMs); + } else if (manifest.serviceDescription != null + && manifest.serviceDescription.maxOffsetMs != C.TIME_UNSET) { + maxLiveOffsetMs = min(maxLiveOffsetMs, manifest.serviceDescription.maxOffsetMs); + } + // Default minimum offset: end of window. + long minLiveOffsetMs = usToMs(nowInWindowUs - windowDurationUs); + if (minLiveOffsetMs < 0 && maxLiveOffsetMs > 0) { + // The current time is in the window, so assume all clocks are synchronized and set the + // minimum to a live offset of zero. + minLiveOffsetMs = 0; + } + if (manifest.minBufferTimeMs != C.TIME_UNSET) { + // Ensure to leave one GOP as minimum and don't exceed the maximum possible offset. + minLiveOffsetMs = min(minLiveOffsetMs + manifest.minBufferTimeMs, maxPossibleLiveOffsetMs); + } + // Override minimum offset with user and media defined values if they are larger, but don't + // exceed the maximum possible offset. + if (mediaItem.liveConfiguration.minOffsetMs != C.TIME_UNSET) { + minLiveOffsetMs = + constrainValue( + mediaItem.liveConfiguration.minOffsetMs, minLiveOffsetMs, maxPossibleLiveOffsetMs); + } else if (manifest.serviceDescription != null + && manifest.serviceDescription.minOffsetMs != C.TIME_UNSET) { + minLiveOffsetMs = + constrainValue( + manifest.serviceDescription.minOffsetMs, minLiveOffsetMs, maxPossibleLiveOffsetMs); + } + if (minLiveOffsetMs > maxLiveOffsetMs) { + // The values can be set by different sources and may disagree. Prefer the maximum offset + // under the assumption that it is safer for playback. + maxLiveOffsetMs = minLiveOffsetMs; + } + long targetOffsetMs; + if (liveConfiguration.targetOffsetMs != C.TIME_UNSET) { + // Keep existing target offset even if the media configuration changes. + targetOffsetMs = liveConfiguration.targetOffsetMs; + } else if (manifest.serviceDescription != null + && manifest.serviceDescription.targetOffsetMs != C.TIME_UNSET) { + targetOffsetMs = manifest.serviceDescription.targetOffsetMs; + } else if (manifest.suggestedPresentationDelayMs != C.TIME_UNSET) { + targetOffsetMs = manifest.suggestedPresentationDelayMs; + } else { + targetOffsetMs = fallbackTargetLiveOffsetMs; + } + if (targetOffsetMs < minLiveOffsetMs) { + targetOffsetMs = minLiveOffsetMs; + } + if (targetOffsetMs > maxLiveOffsetMs) { + long safeDistanceFromWindowStartUs = + min(MIN_LIVE_DEFAULT_START_POSITION_US, windowDurationUs / 2); + long maxTargetOffsetForSafeDistanceToWindowStartMs = + usToMs(nowInWindowUs - safeDistanceFromWindowStartUs); + targetOffsetMs = + constrainValue( + maxTargetOffsetForSafeDistanceToWindowStartMs, minLiveOffsetMs, maxLiveOffsetMs); + } + float minPlaybackSpeed = C.RATE_UNSET; + if (mediaItem.liveConfiguration.minPlaybackSpeed != C.RATE_UNSET) { + minPlaybackSpeed = mediaItem.liveConfiguration.minPlaybackSpeed; + } else if (manifest.serviceDescription != null) { + minPlaybackSpeed = manifest.serviceDescription.minPlaybackSpeed; + } + float maxPlaybackSpeed = C.RATE_UNSET; + if (mediaItem.liveConfiguration.maxPlaybackSpeed != C.RATE_UNSET) { + maxPlaybackSpeed = mediaItem.liveConfiguration.maxPlaybackSpeed; + } else if (manifest.serviceDescription != null) { + maxPlaybackSpeed = manifest.serviceDescription.maxPlaybackSpeed; + } + if (minPlaybackSpeed == C.RATE_UNSET + && maxPlaybackSpeed == C.RATE_UNSET + && (manifest.serviceDescription == null + || manifest.serviceDescription.targetOffsetMs == C.TIME_UNSET)) { + // Force unit speed (instead of automatic adjustment with fallback speeds) if there are no + // specific speed limits defined by the media item or the manifest, and the manifest contains + // no low-latency target offset either. + minPlaybackSpeed = 1f; + maxPlaybackSpeed = 1f; + } + liveConfiguration = + new MediaItem.LiveConfiguration.Builder() + .setTargetOffsetMs(targetOffsetMs) + .setMinOffsetMs(minLiveOffsetMs) + .setMaxOffsetMs(maxLiveOffsetMs) + .setMinPlaybackSpeed(minPlaybackSpeed) + .setMaxPlaybackSpeed(maxPlaybackSpeed) + .build(); + } + private void scheduleManifestRefresh(long delayUntilNextLoadMs) { handler.postDelayed(refreshManifestRunnable, delayUntilNextLoadMs); } @@ -1090,117 +1003,179 @@ private void startLoadingManifest() { } private long getManifestLoadRetryDelayMillis() { - return Math.min((staleManifestReloadAttempt - 1) * 1000, 5000); + return min((staleManifestReloadAttempt - 1) * 1000, 5000); } - private void startLoading(ParsingLoadable loadable, - Loader.Callback> callback, int minRetryCount) { + private void startLoading( + ParsingLoadable loadable, + Loader.Callback> callback, + int minRetryCount) { long elapsedRealtimeMs = loader.startLoading(loadable, callback, minRetryCount); - manifestEventDispatcher.loadStarted(loadable.dataSpec, loadable.type, elapsedRealtimeMs); + manifestEventDispatcher.loadStarted( + new LoadEventInfo(loadable.loadTaskId, loadable.dataSpec, elapsedRealtimeMs), + loadable.type); } - private long getNowUnixTimeUs() { - if (elapsedRealtimeOffsetMs != 0) { - return C.msToUs(SystemClock.elapsedRealtime() + elapsedRealtimeOffsetMs); - } else { - return C.msToUs(System.currentTimeMillis()); + private static long getIntervalUntilNextManifestRefreshMs( + DashManifest manifest, long nowUnixTimeMs) { + int periodIndex = manifest.getPeriodCount() - 1; + Period period = manifest.getPeriod(periodIndex); + long periodStartUs = Util.msToUs(period.startMs); + long periodDurationUs = manifest.getPeriodDurationUs(periodIndex); + long nowUnixTimeUs = Util.msToUs(nowUnixTimeMs); + long availabilityStartTimeUs = Util.msToUs(manifest.availabilityStartTimeMs); + long intervalUs = Util.msToUs(DEFAULT_NOTIFY_MANIFEST_INTERVAL_MS); + for (int i = 0; i < period.adaptationSets.size(); i++) { + List representations = period.adaptationSets.get(i).representations; + if (representations.isEmpty()) { + continue; + } + @Nullable DashSegmentIndex index = representations.get(0).getIndex(); + if (index != null) { + long nextSegmentShiftUnixTimeUs = + availabilityStartTimeUs + + periodStartUs + + index.getNextSegmentAvailableTimeUs(periodDurationUs, nowUnixTimeUs); + long requiredIntervalUs = nextSegmentShiftUnixTimeUs - nowUnixTimeUs; + // Avoid multiple refreshes within a very small amount of time. + if (requiredIntervalUs < intervalUs - 100_000 + || (requiredIntervalUs > intervalUs && requiredIntervalUs < intervalUs + 100_000)) { + intervalUs = requiredIntervalUs; + } + } } + // Round up to compensate for a potential loss in the us to ms conversion. + return LongMath.divide(intervalUs, 1000, RoundingMode.CEILING); } - private static final class PeriodSeekInfo { - - public static PeriodSeekInfo createPeriodSeekInfo( - com.google.android.exoplayer2.source.dash.manifest.Period period, long durationUs) { - int adaptationSetCount = period.adaptationSets.size(); - long availableStartTimeUs = 0; - long availableEndTimeUs = Long.MAX_VALUE; - boolean isIndexExplicit = false; - boolean seenEmptyIndex = false; - - boolean haveAudioVideoAdaptationSets = false; - for (int i = 0; i < adaptationSetCount; i++) { - int type = period.adaptationSets.get(i).type; - if (type == C.TRACK_TYPE_AUDIO || type == C.TRACK_TYPE_VIDEO) { - haveAudioVideoAdaptationSets = true; - break; - } + private static long getAvailableStartTimeInManifestUs( + Period period, long periodDurationUs, long nowUnixTimeUs) { + long periodStartTimeInManifestUs = Util.msToUs(period.startMs); + long availableStartTimeInManifestUs = periodStartTimeInManifestUs; + boolean haveAudioVideoAdaptationSets = hasVideoOrAudioAdaptationSets(period); + for (int i = 0; i < period.adaptationSets.size(); i++) { + AdaptationSet adaptationSet = period.adaptationSets.get(i); + List representations = adaptationSet.representations; + // Exclude other adaptation sets from duration calculations, if we have at least one audio or + // video adaptation set. See: https://github.com/google/ExoPlayer/issues/4029. + boolean adaptationSetIsNotAudioVideo = + adaptationSet.type != C.TRACK_TYPE_AUDIO && adaptationSet.type != C.TRACK_TYPE_VIDEO; + if ((haveAudioVideoAdaptationSets && adaptationSetIsNotAudioVideo) + || representations.isEmpty()) { + continue; + } + @Nullable DashSegmentIndex index = representations.get(0).getIndex(); + if (index == null) { + return periodStartTimeInManifestUs; } + long availableSegmentCount = index.getAvailableSegmentCount(periodDurationUs, nowUnixTimeUs); + if (availableSegmentCount == 0) { + return periodStartTimeInManifestUs; + } + long firstAvailableSegmentNum = + index.getFirstAvailableSegmentNum(periodDurationUs, nowUnixTimeUs); + long adaptationSetAvailableStartTimeInManifestUs = + periodStartTimeInManifestUs + index.getTimeUs(firstAvailableSegmentNum); + availableStartTimeInManifestUs = + max(availableStartTimeInManifestUs, adaptationSetAvailableStartTimeInManifestUs); + } + return availableStartTimeInManifestUs; + } - for (int i = 0; i < adaptationSetCount; i++) { - AdaptationSet adaptationSet = period.adaptationSets.get(i); - // Exclude text adaptation sets from duration calculations, if we have at least one audio - // or video adaptation set. See: https://github.com/google/ExoPlayer/issues/4029 - if (haveAudioVideoAdaptationSets && adaptationSet.type == C.TRACK_TYPE_TEXT) { - continue; - } + private static long getAvailableEndTimeInManifestUs( + Period period, long periodDurationUs, long nowUnixTimeUs) { + long periodStartTimeInManifestUs = Util.msToUs(period.startMs); + long availableEndTimeInManifestUs = Long.MAX_VALUE; + boolean haveAudioVideoAdaptationSets = hasVideoOrAudioAdaptationSets(period); + for (int i = 0; i < period.adaptationSets.size(); i++) { + AdaptationSet adaptationSet = period.adaptationSets.get(i); + List representations = adaptationSet.representations; + // Exclude other adaptation sets from duration calculations, if we have at least one audio or + // video adaptation set. See: https://github.com/google/ExoPlayer/issues/4029 + boolean adaptationSetIsNotAudioVideo = + adaptationSet.type != C.TRACK_TYPE_AUDIO && adaptationSet.type != C.TRACK_TYPE_VIDEO; + if ((haveAudioVideoAdaptationSets && adaptationSetIsNotAudioVideo) + || representations.isEmpty()) { + continue; + } + @Nullable DashSegmentIndex index = representations.get(0).getIndex(); + if (index == null) { + return periodStartTimeInManifestUs + periodDurationUs; + } + long availableSegmentCount = index.getAvailableSegmentCount(periodDurationUs, nowUnixTimeUs); + if (availableSegmentCount == 0) { + return periodStartTimeInManifestUs; + } + long firstAvailableSegmentNum = + index.getFirstAvailableSegmentNum(periodDurationUs, nowUnixTimeUs); + long lastAvailableSegmentNum = firstAvailableSegmentNum + availableSegmentCount - 1; + long adaptationSetAvailableEndTimeInManifestUs = + periodStartTimeInManifestUs + + index.getTimeUs(lastAvailableSegmentNum) + + index.getDurationUs(lastAvailableSegmentNum, periodDurationUs); + availableEndTimeInManifestUs = + min(availableEndTimeInManifestUs, adaptationSetAvailableEndTimeInManifestUs); + } + return availableEndTimeInManifestUs; + } - DashSegmentIndex index = adaptationSet.representations.get(0).getIndex(); - if (index == null) { - return new PeriodSeekInfo(true, 0, durationUs); - } - isIndexExplicit |= index.isExplicit(); - int segmentCount = index.getSegmentCount(durationUs); - if (segmentCount == 0) { - seenEmptyIndex = true; - availableStartTimeUs = 0; - availableEndTimeUs = 0; - } else if (!seenEmptyIndex) { - long firstSegmentNum = index.getFirstSegmentNum(); - long adaptationSetAvailableStartTimeUs = index.getTimeUs(firstSegmentNum); - availableStartTimeUs = Math.max(availableStartTimeUs, adaptationSetAvailableStartTimeUs); - if (segmentCount != DashSegmentIndex.INDEX_UNBOUNDED) { - long lastSegmentNum = firstSegmentNum + segmentCount - 1; - long adaptationSetAvailableEndTimeUs = index.getTimeUs(lastSegmentNum) - + index.getDurationUs(lastSegmentNum, durationUs); - availableEndTimeUs = Math.min(availableEndTimeUs, adaptationSetAvailableEndTimeUs); - } - } + private static boolean isIndexExplicit(Period period) { + for (int i = 0; i < period.adaptationSets.size(); i++) { + @Nullable + DashSegmentIndex index = period.adaptationSets.get(i).representations.get(0).getIndex(); + if (index == null || index.isExplicit()) { + return true; } - return new PeriodSeekInfo(isIndexExplicit, availableStartTimeUs, availableEndTimeUs); } + return false; + } - public final boolean isIndexExplicit; - public final long availableStartTimeUs; - public final long availableEndTimeUs; - - private PeriodSeekInfo(boolean isIndexExplicit, long availableStartTimeUs, - long availableEndTimeUs) { - this.isIndexExplicit = isIndexExplicit; - this.availableStartTimeUs = availableStartTimeUs; - this.availableEndTimeUs = availableEndTimeUs; + private static boolean hasVideoOrAudioAdaptationSets(Period period) { + for (int i = 0; i < period.adaptationSets.size(); i++) { + int type = period.adaptationSets.get(i).type; + if (type == C.TRACK_TYPE_AUDIO || type == C.TRACK_TYPE_VIDEO) { + return true; + } } - + return false; } private static final class DashTimeline extends Timeline { private final long presentationStartTimeMs; private final long windowStartTimeMs; + private final long elapsedRealtimeEpochOffsetMs; private final int firstPeriodId; private final long offsetInFirstPeriodUs; private final long windowDurationUs; private final long windowDefaultStartPositionUs; private final DashManifest manifest; - @Nullable private final Object windowTag; + private final MediaItem mediaItem; + @Nullable private final MediaItem.LiveConfiguration liveConfiguration; public DashTimeline( long presentationStartTimeMs, long windowStartTimeMs, + long elapsedRealtimeEpochOffsetMs, int firstPeriodId, long offsetInFirstPeriodUs, long windowDurationUs, long windowDefaultStartPositionUs, DashManifest manifest, - @Nullable Object windowTag) { + MediaItem mediaItem, + @Nullable MediaItem.LiveConfiguration liveConfiguration) { + checkState(manifest.dynamic == (liveConfiguration != null)); this.presentationStartTimeMs = presentationStartTimeMs; this.windowStartTimeMs = windowStartTimeMs; + this.elapsedRealtimeEpochOffsetMs = elapsedRealtimeEpochOffsetMs; this.firstPeriodId = firstPeriodId; this.offsetInFirstPeriodUs = offsetInFirstPeriodUs; this.windowDurationUs = windowDurationUs; this.windowDefaultStartPositionUs = windowDefaultStartPositionUs; this.manifest = manifest; - this.windowTag = windowTag; + this.mediaItem = mediaItem; + this.liveConfiguration = liveConfiguration; } @Override @@ -1209,12 +1184,16 @@ public int getPeriodCount() { } @Override - public Period getPeriod(int periodIndex, Period period, boolean setIdentifiers) { + public Period getPeriod(int periodIndex, Period period, boolean setIds) { Assertions.checkIndex(periodIndex, 0, getPeriodCount()); - Object id = setIdentifiers ? manifest.getPeriod(periodIndex).id : null; - Object uid = setIdentifiers ? (firstPeriodId + periodIndex) : null; - return period.set(id, uid, 0, manifest.getPeriodDurationUs(periodIndex), - C.msToUs(manifest.getPeriod(periodIndex).startMs - manifest.getPeriod(0).startMs) + Object id = setIds ? manifest.getPeriod(periodIndex).id : null; + Object uid = setIds ? (firstPeriodId + periodIndex) : null; + return period.set( + id, + uid, + 0, + manifest.getPeriodDurationUs(periodIndex), + Util.msToUs(manifest.getPeriod(periodIndex).startMs - manifest.getPeriod(0).startMs) - offsetInFirstPeriodUs); } @@ -1226,17 +1205,18 @@ public int getWindowCount() { @Override public Window getWindow(int windowIndex, Window window, long defaultPositionProjectionUs) { Assertions.checkIndex(windowIndex, 0, 1); - long windowDefaultStartPositionUs = getAdjustedWindowDefaultStartPositionUs( - defaultPositionProjectionUs); + long windowDefaultStartPositionUs = + getAdjustedWindowDefaultStartPositionUs(defaultPositionProjectionUs); return window.set( Window.SINGLE_WINDOW_UID, - windowTag, + mediaItem, manifest, presentationStartTimeMs, windowStartTimeMs, + elapsedRealtimeEpochOffsetMs, /* isSeekable= */ true, /* isDynamic= */ isMovingLiveWindow(manifest), - /* isLive= */ manifest.dynamic, + liveConfiguration, windowDefaultStartPositionUs, windowDurationUs, /* firstPeriodIndex= */ 0, @@ -1285,14 +1265,16 @@ private long getAdjustedWindowDefaultStartPositionUs(long defaultPositionProject } // If there are multiple video adaptation sets with unaligned segments, the initial time may // not correspond to the start of a segment in both, but this is an edge case. - DashSegmentIndex snapIndex = period.adaptationSets.get(videoAdaptationSetIndex) - .representations.get(0).getIndex(); + @Nullable + DashSegmentIndex snapIndex = + period.adaptationSets.get(videoAdaptationSetIndex).representations.get(0).getIndex(); if (snapIndex == null || snapIndex.getSegmentCount(periodDurationUs) == 0) { // Video adaptation set does not include a non-empty index for snapping. return windowDefaultStartPositionUs; } long segmentNum = snapIndex.getSegmentNum(defaultStartPositionInPeriodUs, periodDurationUs); - return windowDefaultStartPositionUs + snapIndex.getTimeUs(segmentNum) + return windowDefaultStartPositionUs + + snapIndex.getTimeUs(segmentNum) - defaultStartPositionInPeriodUs; } @@ -1325,14 +1307,17 @@ public void onDashManifestPublishTimeExpired(long expiredManifestPublishTimeUs) private final class ManifestCallback implements Loader.Callback> { @Override - public void onLoadCompleted(ParsingLoadable loadable, - long elapsedRealtimeMs, long loadDurationMs) { + public void onLoadCompleted( + ParsingLoadable loadable, long elapsedRealtimeMs, long loadDurationMs) { onManifestLoadCompleted(loadable, elapsedRealtimeMs, loadDurationMs); } @Override - public void onLoadCanceled(ParsingLoadable loadable, - long elapsedRealtimeMs, long loadDurationMs, boolean released) { + public void onLoadCanceled( + ParsingLoadable loadable, + long elapsedRealtimeMs, + long loadDurationMs, + boolean released) { DashMediaSource.this.onLoadCanceled(loadable, elapsedRealtimeMs, loadDurationMs); } @@ -1345,20 +1330,22 @@ public LoadErrorAction onLoadError( int errorCount) { return onManifestLoadError(loadable, elapsedRealtimeMs, loadDurationMs, error, errorCount); } - } private final class UtcTimestampCallback implements Loader.Callback> { @Override - public void onLoadCompleted(ParsingLoadable loadable, long elapsedRealtimeMs, - long loadDurationMs) { + public void onLoadCompleted( + ParsingLoadable loadable, long elapsedRealtimeMs, long loadDurationMs) { onUtcTimestampLoadCompleted(loadable, elapsedRealtimeMs, loadDurationMs); } @Override - public void onLoadCanceled(ParsingLoadable loadable, long elapsedRealtimeMs, - long loadDurationMs, boolean released) { + public void onLoadCanceled( + ParsingLoadable loadable, + long elapsedRealtimeMs, + long loadDurationMs, + boolean released) { DashMediaSource.this.onLoadCanceled(loadable, elapsedRealtimeMs, loadDurationMs); } @@ -1371,7 +1358,6 @@ public LoadErrorAction onLoadError( int errorCount) { return onUtcTimestampLoadError(loadable, elapsedRealtimeMs, loadDurationMs, error); } - } private static final class XsDateTimeParser implements ParsingLoadable.Parser { @@ -1381,7 +1367,6 @@ public Long parse(Uri uri, InputStream inputStream) throws IOException { String firstLine = new BufferedReader(new InputStreamReader(inputStream)).readLine(); return Util.parseXsDateTime(firstLine); } - } /* package */ static final class Iso8601Parser implements ParsingLoadable.Parser { @@ -1392,12 +1377,12 @@ public Long parse(Uri uri, InputStream inputStream) throws IOException { @Override public Long parse(Uri uri, InputStream inputStream) throws IOException { String firstLine = - new BufferedReader(new InputStreamReader(inputStream, Charset.forName(C.UTF8_NAME))) - .readLine(); + new BufferedReader(new InputStreamReader(inputStream, Charsets.UTF_8)).readLine(); try { Matcher matcher = TIMESTAMP_WITH_TIMEZONE_PATTERN.matcher(firstLine); if (!matcher.matches()) { - throw new ParserException("Couldn't parse timestamp: " + firstLine); + throw ParserException.createForMalformedManifest( + "Couldn't parse timestamp: " + firstLine, /* cause= */ null); } // Parse the timestamp. String timestampWithoutTimezone = matcher.group(1); @@ -1418,10 +1403,9 @@ public Long parse(Uri uri, InputStream inputStream) throws IOException { } return timestampMs; } catch (ParseException e) { - throw new ParserException(e); + throw ParserException.createForMalformedManifest(/* message= */ null, /* cause= */ e); } } - } /** diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/DashSegmentIndex.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/DashSegmentIndex.java index 9d45bc726e..b62b5d844d 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/DashSegmentIndex.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/DashSegmentIndex.java @@ -18,9 +18,7 @@ import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.source.dash.manifest.RangedUri; -/** - * Indexes the segments within a media stream. - */ +/** Indexes the segments within a media stream. */ public interface DashSegmentIndex { int INDEX_UNBOUNDED = -1; @@ -64,38 +62,66 @@ public interface DashSegmentIndex { */ RangedUri getSegmentUrl(long segmentNum); + /** Returns the segment number of the first defined segment in the index. */ + long getFirstSegmentNum(); + /** - * Returns the segment number of the first segment. + * Returns the segment number of the first available segment in the index. * - * @return The segment number of the first segment. + * @param periodDurationUs The duration of the enclosing period in microseconds, or {@link + * C#TIME_UNSET} if the period's duration is not yet known. + * @param nowUnixTimeUs The current time in milliseconds since the Unix epoch. + * @return The number of the first available segment. */ - long getFirstSegmentNum(); + long getFirstAvailableSegmentNum(long periodDurationUs, long nowUnixTimeUs); /** - * Returns the number of segments in the index, or {@link #INDEX_UNBOUNDED}. - *

      - * An unbounded index occurs if a dynamic manifest uses SegmentTemplate elements without a + * Returns the number of segments defined in the index, or {@link #INDEX_UNBOUNDED}. + * + *

      An unbounded index occurs if a dynamic manifest uses SegmentTemplate elements without a * SegmentTimeline element, and if the period duration is not yet known. In this case the caller - * must manually determine the window of currently available segments. + * can query the available segment using {@link #getFirstAvailableSegmentNum(long, long)} and + * {@link #getAvailableSegmentCount(long, long)}. * - * @param periodDurationUs The duration of the enclosing period in microseconds, or - * {@link C#TIME_UNSET} if the period's duration is not yet known. + * @param periodDurationUs The duration of the enclosing period in microseconds, or {@link + * C#TIME_UNSET} if the period's duration is not yet known. * @return The number of segments in the index, or {@link #INDEX_UNBOUNDED}. */ - int getSegmentCount(long periodDurationUs); + long getSegmentCount(long periodDurationUs); + + /** + * Returns the number of available segments in the index. + * + * @param periodDurationUs The duration of the enclosing period in microseconds, or {@link + * C#TIME_UNSET} if the period's duration is not yet known. + * @param nowUnixTimeUs The current time in milliseconds since the Unix epoch. + * @return The number of available segments in the index. + */ + long getAvailableSegmentCount(long periodDurationUs, long nowUnixTimeUs); + + /** + * Returns the time, in microseconds, at which a new segment becomes available, or {@link + * C#TIME_UNSET} if not applicable. + * + * @param periodDurationUs The duration of the enclosing period in microseconds, or {@link + * C#TIME_UNSET} if the period's duration is not yet known. + * @param nowUnixTimeUs The current time in milliseconds since the Unix epoch. + * @return The time, in microseconds, at which a new segment becomes available, or {@link + * C#TIME_UNSET} if not applicable. + */ + long getNextSegmentAvailableTimeUs(long periodDurationUs, long nowUnixTimeUs); /** * Returns true if segments are defined explicitly by the index. - *

      - * If true is returned, each segment is defined explicitly by the index data, and all of the + * + *

      If true is returned, each segment is defined explicitly by the index data, and all of the * listed segments are guaranteed to be available at the time when the index was obtained. - *

      - * If false is returned then segment information was derived from properties such as a fixed + * + *

      If false is returned then segment information was derived from properties such as a fixed * segment duration. If the presentation is dynamic, it's possible that only a subset of the * segments are available. * * @return Whether segments are defined explicitly by the index. */ boolean isExplicit(); - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/DashUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/DashUtil.java index c9433b9e41..239532b383 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/DashUtil.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/DashUtil.java @@ -19,12 +19,12 @@ import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; -import com.google.android.exoplayer2.drm.DrmInitData; import com.google.android.exoplayer2.extractor.ChunkIndex; import com.google.android.exoplayer2.extractor.Extractor; import com.google.android.exoplayer2.extractor.mkv.MatroskaExtractor; import com.google.android.exoplayer2.extractor.mp4.FragmentedMp4Extractor; -import com.google.android.exoplayer2.source.chunk.ChunkExtractorWrapper; +import com.google.android.exoplayer2.source.chunk.BundledChunkExtractor; +import com.google.android.exoplayer2.source.chunk.ChunkExtractor; import com.google.android.exoplayer2.source.chunk.InitializationChunk; import com.google.android.exoplayer2.source.dash.manifest.DashManifest; import com.google.android.exoplayer2.source.dash.manifest.DashManifestParser; @@ -33,43 +33,76 @@ import com.google.android.exoplayer2.source.dash.manifest.Representation; import com.google.android.exoplayer2.upstream.DataSource; import com.google.android.exoplayer2.upstream.DataSpec; -import com.google.android.exoplayer2.upstream.HttpDataSource; import com.google.android.exoplayer2.upstream.ParsingLoadable; +import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.MimeTypes; import java.io.IOException; import java.util.List; -/** - * Utility methods for DASH streams. - */ +/** Utility methods for DASH streams. */ public final class DashUtil { + /** + * Builds a {@link DataSpec} for a given {@link RangedUri} belonging to {@link Representation}. + * + * @param representation The {@link Representation} to which the request belongs. + * @param baseUrl The base url with which to resolve the request URI. + * @param requestUri The {@link RangedUri} of the data to request. + * @param flags Flags to be set on the returned {@link DataSpec}. See {@link + * DataSpec.Builder#setFlags(int)}. + * @return The {@link DataSpec}. + */ + public static DataSpec buildDataSpec( + Representation representation, String baseUrl, RangedUri requestUri, int flags) { + return new DataSpec.Builder() + .setUri(requestUri.resolveUri(baseUrl)) + .setPosition(requestUri.start) + .setLength(requestUri.length) + .setKey(resolveCacheKey(representation, requestUri)) + .setFlags(flags) + .build(); + } + + /** + * Builds a {@link DataSpec} for a given {@link RangedUri} belonging to {@link Representation}. + * + *

      Uses the first base URL of the representation to build the data spec. + * + * @param representation The {@link Representation} to which the request belongs. + * @param requestUri The {@link RangedUri} of the data to request. + * @param flags Flags to be set on the returned {@link DataSpec}. See {@link + * DataSpec.Builder#setFlags(int)}. + * @return The {@link DataSpec}. + */ + public static DataSpec buildDataSpec( + Representation representation, RangedUri requestUri, int flags) { + return buildDataSpec(representation, representation.baseUrls.get(0).url, requestUri, flags); + } + /** * Loads a DASH manifest. * - * @param dataSource The {@link HttpDataSource} from which the manifest should be read. + * @param dataSource The {@link DataSource} from which the manifest should be read. * @param uri The {@link Uri} of the manifest to be read. * @return An instance of {@link DashManifest}. * @throws IOException Thrown when there is an error while loading. */ - public static DashManifest loadManifest(DataSource dataSource, Uri uri) - throws IOException { + public static DashManifest loadManifest(DataSource dataSource, Uri uri) throws IOException { return ParsingLoadable.load(dataSource, new DashManifestParser(), uri, C.DATA_TYPE_MANIFEST); } /** - * Loads {@link DrmInitData} for a given period in a DASH manifest. + * Loads a {@link Format} for acquiring keys for a given period in a DASH manifest. * - * @param dataSource The {@link HttpDataSource} from which data should be loaded. + * @param dataSource The {@link DataSource} from which data should be loaded. * @param period The {@link Period}. - * @return The loaded {@link DrmInitData}, or null if none is defined. + * @return The loaded {@link Format}, or null if none is defined. * @throws IOException Thrown when there is an error while loading. - * @throws InterruptedException Thrown if the thread was interrupted. */ @Nullable - public static DrmInitData loadDrmInitData(DataSource dataSource, Period period) - throws IOException, InterruptedException { - int primaryTrackType = C.TRACK_TYPE_VIDEO; + public static Format loadFormatWithDrmInitData(DataSource dataSource, Period period) + throws IOException { + @C.TrackType int primaryTrackType = C.TRACK_TYPE_VIDEO; Representation representation = getFirstRepresentation(period, primaryTrackType); if (representation == null) { primaryTrackType = C.TRACK_TYPE_AUDIO; @@ -79,30 +112,58 @@ public static DrmInitData loadDrmInitData(DataSource dataSource, Period period) } } Format manifestFormat = representation.format; + @Nullable Format sampleFormat = DashUtil.loadSampleFormat(dataSource, primaryTrackType, representation); return sampleFormat == null - ? manifestFormat.drmInitData - : sampleFormat.copyWithManifestFormatInfo(manifestFormat).drmInitData; + ? manifestFormat + : sampleFormat.withManifestFormatInfo(manifestFormat); + } + + /** + * Loads initialization data for the {@code representation} and returns the sample {@link Format}. + * + * @param dataSource The source from which the data should be loaded. + * @param trackType The type of the representation. Typically one of the {@link C + * com.google.android.exoplayer2.C} {@code TRACK_TYPE_*} constants. + * @param representation The representation which initialization chunk belongs to. + * @param baseUrlIndex The index of the base URL to be picked from the {@link + * Representation#baseUrls list of base URLs}. + * @return the sample {@link Format} of the given representation. + * @throws IOException Thrown when there is an error while loading. + */ + @Nullable + public static Format loadSampleFormat( + DataSource dataSource, int trackType, Representation representation, int baseUrlIndex) + throws IOException { + if (representation.getInitializationUri() == null) { + return null; + } + ChunkExtractor chunkExtractor = newChunkExtractor(trackType, representation.format); + try { + loadInitializationData( + chunkExtractor, dataSource, representation, baseUrlIndex, /* loadIndex= */ false); + } finally { + chunkExtractor.release(); + } + return Assertions.checkStateNotNull(chunkExtractor.getSampleFormats())[0]; } /** * Loads initialization data for the {@code representation} and returns the sample {@link Format}. * + *

      Uses the first base URL for loading the format. + * * @param dataSource The source from which the data should be loaded. - * @param trackType The type of the representation. Typically one of the {@link + * @param trackType The type of the representation. Typically one of the {@link C * com.google.android.exoplayer2.C} {@code TRACK_TYPE_*} constants. * @param representation The representation which initialization chunk belongs to. * @return the sample {@link Format} of the given representation. * @throws IOException Thrown when there is an error while loading. - * @throws InterruptedException Thrown if the thread was interrupted. */ @Nullable public static Format loadSampleFormat( - DataSource dataSource, int trackType, Representation representation) - throws IOException, InterruptedException { - ChunkExtractorWrapper extractorWrapper = loadInitializationData(dataSource, trackType, - representation, false); - return extractorWrapper == null ? null : extractorWrapper.getSampleFormats()[0]; + DataSource dataSource, int trackType, Representation representation) throws IOException { + return loadSampleFormat(dataSource, trackType, representation, /* baseUrlIndex= */ 0); } /** @@ -110,89 +171,164 @@ public static Format loadSampleFormat( * ChunkIndex}. * * @param dataSource The source from which the data should be loaded. - * @param trackType The type of the representation. Typically one of the {@link + * @param trackType The type of the representation. Typically one of the {@link C * com.google.android.exoplayer2.C} {@code TRACK_TYPE_*} constants. * @param representation The representation which initialization chunk belongs to. + * @param baseUrlIndex The index of the base URL with which to resolve the request URI. * @return The {@link ChunkIndex} of the given representation, or null if no initialization or * index data exists. * @throws IOException Thrown when there is an error while loading. - * @throws InterruptedException Thrown if the thread was interrupted. */ @Nullable public static ChunkIndex loadChunkIndex( - DataSource dataSource, int trackType, Representation representation) - throws IOException, InterruptedException { - ChunkExtractorWrapper extractorWrapper = loadInitializationData(dataSource, trackType, - representation, true); - return extractorWrapper == null ? null : (ChunkIndex) extractorWrapper.getSeekMap(); + DataSource dataSource, int trackType, Representation representation, int baseUrlIndex) + throws IOException { + if (representation.getInitializationUri() == null) { + return null; + } + ChunkExtractor chunkExtractor = newChunkExtractor(trackType, representation.format); + try { + loadInitializationData( + chunkExtractor, dataSource, representation, baseUrlIndex, /* loadIndex= */ true); + } finally { + chunkExtractor.release(); + } + return chunkExtractor.getChunkIndex(); } /** - * Loads initialization data for the {@code representation} and optionally index data then returns - * a {@link ChunkExtractorWrapper} which contains the output. + * Loads initialization and index data for the {@code representation} and returns the {@link + * ChunkIndex}. + * + *

      Uses the first base URL for loading the index. * * @param dataSource The source from which the data should be loaded. - * @param trackType The type of the representation. Typically one of the {@link + * @param trackType The type of the representation. Typically one of the {@link C * com.google.android.exoplayer2.C} {@code TRACK_TYPE_*} constants. * @param representation The representation which initialization chunk belongs to. - * @param loadIndex Whether to load index data too. - * @return A {@link ChunkExtractorWrapper} for the {@code representation}, or null if no - * initialization or (if requested) index data exists. + * @return The {@link ChunkIndex} of the given representation, or null if no initialization or + * index data exists. * @throws IOException Thrown when there is an error while loading. - * @throws InterruptedException Thrown if the thread was interrupted. */ @Nullable - private static ChunkExtractorWrapper loadInitializationData( - DataSource dataSource, int trackType, Representation representation, boolean loadIndex) - throws IOException, InterruptedException { - RangedUri initializationUri = representation.getInitializationUri(); - if (initializationUri == null) { - return null; - } - ChunkExtractorWrapper extractorWrapper = newWrappedExtractor(trackType, representation.format); - RangedUri requestUri; + public static ChunkIndex loadChunkIndex( + DataSource dataSource, int trackType, Representation representation) throws IOException { + return loadChunkIndex(dataSource, trackType, representation, /* baseUrlIndex= */ 0); + } + + /** + * Loads initialization data for the {@code representation} and optionally index data then returns + * a {@link BundledChunkExtractor} which contains the output. + * + * @param chunkExtractor The {@link ChunkExtractor} to use. + * @param dataSource The source from which the data should be loaded. + * @param representation The representation which initialization chunk belongs to. + * @param baseUrlIndex The index of the base URL with which to resolve the request URI. + * @param loadIndex Whether to load index data too. + * @throws IOException Thrown when there is an error while loading. + */ + private static void loadInitializationData( + ChunkExtractor chunkExtractor, + DataSource dataSource, + Representation representation, + int baseUrlIndex, + boolean loadIndex) + throws IOException { + RangedUri initializationUri = Assertions.checkNotNull(representation.getInitializationUri()); + @Nullable RangedUri requestUri; if (loadIndex) { - RangedUri indexUri = representation.getIndexUri(); + @Nullable RangedUri indexUri = representation.getIndexUri(); if (indexUri == null) { - return null; + return; } // It's common for initialization and index data to be stored adjacently. Attempt to merge // the two requests together to request both at once. - requestUri = initializationUri.attemptMerge(indexUri, representation.baseUrl); + requestUri = + initializationUri.attemptMerge(indexUri, representation.baseUrls.get(baseUrlIndex).url); if (requestUri == null) { - loadInitializationData(dataSource, representation, extractorWrapper, initializationUri); + loadInitializationData( + dataSource, representation, baseUrlIndex, chunkExtractor, initializationUri); requestUri = indexUri; } } else { requestUri = initializationUri; } - loadInitializationData(dataSource, representation, extractorWrapper, requestUri); - return extractorWrapper; + loadInitializationData(dataSource, representation, baseUrlIndex, chunkExtractor, requestUri); } - private static void loadInitializationData(DataSource dataSource, - Representation representation, ChunkExtractorWrapper extractorWrapper, RangedUri requestUri) - throws IOException, InterruptedException { - DataSpec dataSpec = new DataSpec(requestUri.resolveUri(representation.baseUrl), - requestUri.start, requestUri.length, representation.getCacheKey()); - InitializationChunk initializationChunk = new InitializationChunk(dataSource, dataSpec, - representation.format, C.SELECTION_REASON_UNKNOWN, null /* trackSelectionData */, - extractorWrapper); + /** + * Loads initialization data for the {@code representation} and optionally index data then returns + * a {@link BundledChunkExtractor} which contains the output. + * + *

      Uses the first base URL for loading the initialization data. + * + * @param chunkExtractor The {@link ChunkExtractor} to use. + * @param dataSource The source from which the data should be loaded. + * @param representation The representation which initialization chunk belongs to. + * @param loadIndex Whether to load index data too. + * @throws IOException Thrown when there is an error while loading. + */ + public static void loadInitializationData( + ChunkExtractor chunkExtractor, + DataSource dataSource, + Representation representation, + boolean loadIndex) + throws IOException { + loadInitializationData( + chunkExtractor, dataSource, representation, /* baseUrlIndex= */ 0, loadIndex); + } + + private static void loadInitializationData( + DataSource dataSource, + Representation representation, + int baseUrlIndex, + ChunkExtractor chunkExtractor, + RangedUri requestUri) + throws IOException { + DataSpec dataSpec = + DashUtil.buildDataSpec( + representation, + representation.baseUrls.get(baseUrlIndex).url, + requestUri, + /* flags= */ 0); + InitializationChunk initializationChunk = + new InitializationChunk( + dataSource, + dataSpec, + representation.format, + C.SELECTION_REASON_UNKNOWN, + null /* trackSelectionData */, + chunkExtractor); initializationChunk.load(); } - private static ChunkExtractorWrapper newWrappedExtractor(int trackType, Format format) { + /** + * Resolves the cache key to be used when requesting the given ranged URI for the given {@link + * Representation}. + * + * @param representation The {@link Representation} to which the URI belongs to. + * @param rangedUri The URI for which to resolve the cache key. + * @return The cache key. + */ + public static String resolveCacheKey(Representation representation, RangedUri rangedUri) { + @Nullable String cacheKey = representation.getCacheKey(); + return cacheKey != null + ? cacheKey + : rangedUri.resolveUri(representation.baseUrls.get(0).url).toString(); + } + + private static ChunkExtractor newChunkExtractor(int trackType, Format format) { String mimeType = format.containerMimeType; boolean isWebm = mimeType != null && (mimeType.startsWith(MimeTypes.VIDEO_WEBM) || mimeType.startsWith(MimeTypes.AUDIO_WEBM)); Extractor extractor = isWebm ? new MatroskaExtractor() : new FragmentedMp4Extractor(); - return new ChunkExtractorWrapper(extractor, trackType, format); + return new BundledChunkExtractor(extractor, trackType, format); } @Nullable - private static Representation getFirstRepresentation(Period period, int type) { + private static Representation getFirstRepresentation(Period period, @C.TrackType int type) { int index = period.getAdaptationSetIndex(type); if (index == C.INDEX_UNSET) { return null; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/DashWrappingSegmentIndex.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/DashWrappingSegmentIndex.java index 3eca7892c4..e894e07c3d 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/DashWrappingSegmentIndex.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/DashWrappingSegmentIndex.java @@ -15,12 +15,13 @@ */ package com.google.android.exoplayer2.source.dash; +import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.extractor.ChunkIndex; import com.google.android.exoplayer2.source.dash.manifest.RangedUri; /** - * An implementation of {@link DashSegmentIndex} that wraps a {@link ChunkIndex} parsed from a - * media stream. + * An implementation of {@link DashSegmentIndex} that wraps a {@link ChunkIndex} parsed from a media + * stream. */ public final class DashWrappingSegmentIndex implements DashSegmentIndex { @@ -42,10 +43,25 @@ public long getFirstSegmentNum() { } @Override - public int getSegmentCount(long periodDurationUs) { + public long getFirstAvailableSegmentNum(long periodDurationUs, long nowUnixTimeUs) { + return 0; + } + + @Override + public long getSegmentCount(long periodDurationUs) { + return chunkIndex.length; + } + + @Override + public long getAvailableSegmentCount(long periodDurationUs, long nowUnixTimeUs) { return chunkIndex.length; } + @Override + public long getNextSegmentAvailableTimeUs(long periodDurationUs, long nowUnixTimeUs) { + return C.TIME_UNSET; + } + @Override public long getTimeUs(long segmentNum) { return chunkIndex.timesUs[(int) segmentNum] - timeOffsetUs; @@ -71,5 +87,4 @@ public long getSegmentNum(long timeUs, long periodDurationUs) { public boolean isExplicit() { return true; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/DefaultDashChunkSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/DefaultDashChunkSource.java index cf0ffde411..0bdc49d76d 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/DefaultDashChunkSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/DefaultDashChunkSource.java @@ -15,6 +15,9 @@ */ package com.google.android.exoplayer2.source.dash; +import static java.lang.Math.max; +import static java.lang.Math.min; + import android.net.Uri; import android.os.SystemClock; import androidx.annotation.CheckResult; @@ -22,17 +25,13 @@ import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.SeekParameters; +import com.google.android.exoplayer2.analytics.PlayerId; import com.google.android.exoplayer2.extractor.ChunkIndex; -import com.google.android.exoplayer2.extractor.Extractor; -import com.google.android.exoplayer2.extractor.SeekMap; -import com.google.android.exoplayer2.extractor.TrackOutput; -import com.google.android.exoplayer2.extractor.mkv.MatroskaExtractor; -import com.google.android.exoplayer2.extractor.mp4.FragmentedMp4Extractor; -import com.google.android.exoplayer2.extractor.rawcc.RawCcExtractor; import com.google.android.exoplayer2.source.BehindLiveWindowException; import com.google.android.exoplayer2.source.chunk.BaseMediaChunkIterator; +import com.google.android.exoplayer2.source.chunk.BundledChunkExtractor; import com.google.android.exoplayer2.source.chunk.Chunk; -import com.google.android.exoplayer2.source.chunk.ChunkExtractorWrapper; +import com.google.android.exoplayer2.source.chunk.ChunkExtractor; import com.google.android.exoplayer2.source.chunk.ChunkHolder; import com.google.android.exoplayer2.source.chunk.ContainerMediaChunk; import com.google.android.exoplayer2.source.chunk.InitializationChunk; @@ -41,36 +40,60 @@ import com.google.android.exoplayer2.source.chunk.SingleSampleMediaChunk; import com.google.android.exoplayer2.source.dash.PlayerEmsgHandler.PlayerTrackEmsgHandler; import com.google.android.exoplayer2.source.dash.manifest.AdaptationSet; +import com.google.android.exoplayer2.source.dash.manifest.BaseUrl; import com.google.android.exoplayer2.source.dash.manifest.DashManifest; import com.google.android.exoplayer2.source.dash.manifest.RangedUri; import com.google.android.exoplayer2.source.dash.manifest.Representation; -import com.google.android.exoplayer2.trackselection.TrackSelection; +import com.google.android.exoplayer2.trackselection.ExoTrackSelection; import com.google.android.exoplayer2.upstream.DataSource; import com.google.android.exoplayer2.upstream.DataSpec; import com.google.android.exoplayer2.upstream.HttpDataSource.InvalidResponseCodeException; +import com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy; import com.google.android.exoplayer2.upstream.LoaderErrorThrower; import com.google.android.exoplayer2.upstream.TransferListener; -import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.Util; import java.io.IOException; import java.util.ArrayList; import java.util.List; -/** - * A default {@link DashChunkSource} implementation. - */ +/** A default {@link DashChunkSource} implementation. */ public class DefaultDashChunkSource implements DashChunkSource { public static final class Factory implements DashChunkSource.Factory { private final DataSource.Factory dataSourceFactory; private final int maxSegmentsPerLoad; + private final ChunkExtractor.Factory chunkExtractorFactory; + /** + * Equivalent to {@link #Factory(ChunkExtractor.Factory, DataSource.Factory, int) new + * Factory(BundledChunkExtractor.FACTORY, dataSourceFactory, maxSegmentsPerLoad = 1)}. + */ public Factory(DataSource.Factory dataSourceFactory) { this(dataSourceFactory, /* maxSegmentsPerLoad= */ 1); } + /** + * Equivalent to {@link #Factory(ChunkExtractor.Factory, DataSource.Factory, int) new + * Factory(BundledChunkExtractor.FACTORY, dataSourceFactory, maxSegmentsPerLoad)}. + */ public Factory(DataSource.Factory dataSourceFactory, int maxSegmentsPerLoad) { + this(BundledChunkExtractor.FACTORY, dataSourceFactory, maxSegmentsPerLoad); + } + + /** + * Creates a new instance. + * + * @param chunkExtractorFactory Creates {@link ChunkExtractor} instances to use for extracting + * chunks. + * @param dataSourceFactory Creates the {@link DataSource} to use for downloading chunks. + * @param maxSegmentsPerLoad See {@link DefaultDashChunkSource#DefaultDashChunkSource}. + */ + public Factory( + ChunkExtractor.Factory chunkExtractorFactory, + DataSource.Factory dataSourceFactory, + int maxSegmentsPerLoad) { + this.chunkExtractorFactory = chunkExtractorFactory; this.dataSourceFactory = dataSourceFactory; this.maxSegmentsPerLoad = maxSegmentsPerLoad; } @@ -79,22 +102,26 @@ public Factory(DataSource.Factory dataSourceFactory, int maxSegmentsPerLoad) { public DashChunkSource createDashChunkSource( LoaderErrorThrower manifestLoaderErrorThrower, DashManifest manifest, + BaseUrlExclusionList baseUrlExclusionList, int periodIndex, int[] adaptationSetIndices, - TrackSelection trackSelection, - int trackType, + ExoTrackSelection trackSelection, + @C.TrackType int trackType, long elapsedRealtimeOffsetMs, boolean enableEventMessageTrack, List closedCaptionFormats, @Nullable PlayerTrackEmsgHandler playerEmsgHandler, - @Nullable TransferListener transferListener) { + @Nullable TransferListener transferListener, + PlayerId playerId) { DataSource dataSource = dataSourceFactory.createDataSource(); if (transferListener != null) { dataSource.addTransferListener(transferListener); } return new DefaultDashChunkSource( + chunkExtractorFactory, manifestLoaderErrorThrower, manifest, + baseUrlExclusionList, periodIndex, adaptationSetIndices, trackSelection, @@ -104,14 +131,15 @@ public DashChunkSource createDashChunkSource( maxSegmentsPerLoad, enableEventMessageTrack, closedCaptionFormats, - playerEmsgHandler); + playerEmsgHandler, + playerId); } - } private final LoaderErrorThrower manifestLoaderErrorThrower; + private final BaseUrlExclusionList baseUrlExclusionList; private final int[] adaptationSetIndices; - private final int trackType; + private final @C.TrackType int trackType; private final DataSource dataSource; private final long elapsedRealtimeOffsetMs; private final int maxSegmentsPerLoad; @@ -119,24 +147,26 @@ public DashChunkSource createDashChunkSource( protected final RepresentationHolder[] representationHolders; - private TrackSelection trackSelection; + private ExoTrackSelection trackSelection; private DashManifest manifest; private int periodIndex; - private IOException fatalError; + @Nullable private IOException fatalError; private boolean missingLastSegment; - private long liveEdgeTimeUs; /** + * @param chunkExtractorFactory Creates {@link ChunkExtractor} instances to use for extracting + * chunks. * @param manifestLoaderErrorThrower Throws errors affecting loading of manifests. * @param manifest The initial manifest. + * @param baseUrlExclusionList The base URL exclusion list. * @param periodIndex The index of the period in the manifest. * @param adaptationSetIndices The indices of the adaptation sets in the period. * @param trackSelection The track selection. - * @param trackType The type of the tracks in the selection. + * @param trackType The {@link C.TrackType type} of the tracks in the selection. * @param dataSource A {@link DataSource} suitable for loading the media data. * @param elapsedRealtimeOffsetMs If known, an estimate of the instantaneous difference between * server-side unix time and {@link SystemClock#elapsedRealtime()} in milliseconds, specified - * as the server's unix time minus the local elapsed time. If unknown, set to 0. + * as the server's unix time minus the local elapsed time. Or {@link C#TIME_UNSET} if unknown. * @param maxSegmentsPerLoad The maximum number of segments to combine into a single request. Note * that segments will only be combined if their {@link Uri}s are the same and if their data * ranges are adjacent. @@ -144,22 +174,27 @@ public DashChunkSource createDashChunkSource( * @param closedCaptionFormats The {@link Format Formats} of closed caption tracks to be output. * @param playerTrackEmsgHandler The {@link PlayerTrackEmsgHandler} instance to handle emsg * messages targeting the player. Maybe null if this is not necessary. + * @param playerId The {@link PlayerId} of the player using this chunk source. */ public DefaultDashChunkSource( + ChunkExtractor.Factory chunkExtractorFactory, LoaderErrorThrower manifestLoaderErrorThrower, DashManifest manifest, + BaseUrlExclusionList baseUrlExclusionList, int periodIndex, int[] adaptationSetIndices, - TrackSelection trackSelection, - int trackType, + ExoTrackSelection trackSelection, + @C.TrackType int trackType, DataSource dataSource, long elapsedRealtimeOffsetMs, int maxSegmentsPerLoad, boolean enableEventMessageTrack, List closedCaptionFormats, - @Nullable PlayerTrackEmsgHandler playerTrackEmsgHandler) { + @Nullable PlayerTrackEmsgHandler playerTrackEmsgHandler, + PlayerId playerId) { this.manifestLoaderErrorThrower = manifestLoaderErrorThrower; this.manifest = manifest; + this.baseUrlExclusionList = baseUrlExclusionList; this.adaptationSetIndices = adaptationSetIndices; this.trackSelection = trackSelection; this.trackType = trackType; @@ -170,20 +205,27 @@ public DefaultDashChunkSource( this.playerTrackEmsgHandler = playerTrackEmsgHandler; long periodDurationUs = manifest.getPeriodDurationUs(periodIndex); - liveEdgeTimeUs = C.TIME_UNSET; List representations = getRepresentations(); representationHolders = new RepresentationHolder[trackSelection.length()]; for (int i = 0; i < representationHolders.length; i++) { Representation representation = representations.get(trackSelection.getIndexInTrackGroup(i)); + @Nullable + BaseUrl selectedBaseUrl = baseUrlExclusionList.selectBaseUrl(representation.baseUrls); representationHolders[i] = new RepresentationHolder( periodDurationUs, - trackType, representation, - enableEventMessageTrack, - closedCaptionFormats, - playerTrackEmsgHandler); + selectedBaseUrl != null ? selectedBaseUrl : representation.baseUrls.get(0), + chunkExtractorFactory.createProgressiveMediaExtractor( + trackType, + representation.format, + enableEventMessageTrack, + closedCaptionFormats, + playerTrackEmsgHandler, + playerId), + /* segmentNumShift= */ 0, + representation.getIndex()); } } @@ -194,11 +236,15 @@ public long getAdjustedSeekPositionUs(long positionUs, SeekParameters seekParame if (representationHolder.segmentIndex != null) { long segmentNum = representationHolder.getSegmentNum(positionUs); long firstSyncUs = representationHolder.getSegmentStartTimeUs(segmentNum); + long segmentCount = representationHolder.getSegmentCount(); long secondSyncUs = - firstSyncUs < positionUs && segmentNum < representationHolder.getSegmentCount() - 1 + firstSyncUs < positionUs + && (segmentCount == DashSegmentIndex.INDEX_UNBOUNDED + || segmentNum + < representationHolder.getFirstSegmentNum() + segmentCount - 1) ? representationHolder.getSegmentStartTimeUs(segmentNum + 1) : firstSyncUs; - return Util.resolveSeekPositionUs(positionUs, seekParameters, firstSyncUs, secondSyncUs); + return seekParameters.resolveSeekPositionUs(positionUs, firstSyncUs, secondSyncUs); } } // We don't have a segment index to adjust the seek position with yet. @@ -223,7 +269,7 @@ public void updateManifest(DashManifest newManifest, int newPeriodIndex) { } @Override - public void updateTrackSelection(TrackSelection trackSelection) { + public void updateTrackSelection(ExoTrackSelection trackSelection) { this.trackSelection = trackSelection; } @@ -244,6 +290,15 @@ public int getPreferredQueueSize(long playbackPositionUs, List queue) { + if (fatalError != null) { + return false; + } + return trackSelection.shouldCancelChunkLoad(playbackPositionUs, loadingChunk, queue); + } + @Override public void getNextChunk( long playbackPositionUs, @@ -255,10 +310,9 @@ public void getNextChunk( } long bufferedDurationUs = loadPositionUs - playbackPositionUs; - long timeToLiveEdgeUs = resolveTimeToLiveEdgeUs(playbackPositionUs); long presentationPositionUs = - C.msToUs(manifest.availabilityStartTimeMs) - + C.msToUs(manifest.getPeriod(periodIndex).startMs) + Util.msToUs(manifest.availabilityStartTimeMs) + + Util.msToUs(manifest.getPeriod(periodIndex).startMs) + loadPositionUs; if (playerTrackEmsgHandler != null @@ -267,7 +321,8 @@ public void getNextChunk( return; } - long nowUnixTimeUs = getNowUnixTimeUs(); + long nowUnixTimeUs = Util.msToUs(Util.getNowUnixTimeMs(elapsedRealtimeOffsetMs)); + long nowPeriodTimeUs = getNowPeriodTimeUs(nowUnixTimeUs); MediaChunk previous = queue.isEmpty() ? null : queue.get(queue.size() - 1); MediaChunkIterator[] chunkIterators = new MediaChunkIterator[trackSelection.length()]; for (int i = 0; i < chunkIterators.length; i++) { @@ -276,9 +331,9 @@ public void getNextChunk( chunkIterators[i] = MediaChunkIterator.EMPTY; } else { long firstAvailableSegmentNum = - representationHolder.getFirstAvailableSegmentNum(manifest, periodIndex, nowUnixTimeUs); + representationHolder.getFirstAvailableSegmentNum(nowUnixTimeUs); long lastAvailableSegmentNum = - representationHolder.getLastAvailableSegmentNum(manifest, periodIndex, nowUnixTimeUs); + representationHolder.getLastAvailableSegmentNum(nowUnixTimeUs); long segmentNum = getSegmentNum( representationHolder, @@ -289,24 +344,25 @@ public void getNextChunk( if (segmentNum < firstAvailableSegmentNum) { chunkIterators[i] = MediaChunkIterator.EMPTY; } else { + representationHolder = updateSelectedBaseUrl(/* trackIndex= */ i); chunkIterators[i] = new RepresentationSegmentIterator( - representationHolder, segmentNum, lastAvailableSegmentNum); + representationHolder, segmentNum, lastAvailableSegmentNum, nowPeriodTimeUs); } } } + long availableLiveDurationUs = getAvailableLiveDurationUs(nowUnixTimeUs, playbackPositionUs); trackSelection.updateSelectedTrack( - playbackPositionUs, bufferedDurationUs, timeToLiveEdgeUs, queue, chunkIterators); + playbackPositionUs, bufferedDurationUs, availableLiveDurationUs, queue, chunkIterators); RepresentationHolder representationHolder = - representationHolders[trackSelection.getSelectedIndex()]; - - if (representationHolder.extractorWrapper != null) { + updateSelectedBaseUrl(trackSelection.getSelectedIndex()); + if (representationHolder.chunkExtractor != null) { Representation selectedRepresentation = representationHolder.representation; - RangedUri pendingInitializationUri = null; - RangedUri pendingIndexUri = null; - if (representationHolder.extractorWrapper.getSampleFormats() == null) { + @Nullable RangedUri pendingInitializationUri = null; + @Nullable RangedUri pendingIndexUri = null; + if (representationHolder.chunkExtractor.getSampleFormats() == null) { pendingInitializationUri = selectedRepresentation.getInitializationUri(); } if (representationHolder.segmentIndex == null) { @@ -314,9 +370,15 @@ public void getNextChunk( } if (pendingInitializationUri != null || pendingIndexUri != null) { // We have initialization and/or index requests to make. - out.chunk = newInitializationChunk(representationHolder, dataSource, - trackSelection.getSelectedFormat(), trackSelection.getSelectionReason(), - trackSelection.getSelectionData(), pendingInitializationUri, pendingIndexUri); + out.chunk = + newInitializationChunk( + representationHolder, + dataSource, + trackSelection.getSelectedFormat(), + trackSelection.getSelectionReason(), + trackSelection.getSelectionData(), + pendingInitializationUri, + pendingIndexUri); return; } } @@ -330,13 +392,8 @@ public void getNextChunk( return; } - long firstAvailableSegmentNum = - representationHolder.getFirstAvailableSegmentNum(manifest, periodIndex, nowUnixTimeUs); - long lastAvailableSegmentNum = - representationHolder.getLastAvailableSegmentNum(manifest, periodIndex, nowUnixTimeUs); - - updateLiveEdgeTimeUs(representationHolder, lastAvailableSegmentNum); - + long firstAvailableSegmentNum = representationHolder.getFirstAvailableSegmentNum(nowUnixTimeUs); + long lastAvailableSegmentNum = representationHolder.getLastAvailableSegmentNum(nowUnixTimeUs); long segmentNum = getSegmentNum( representationHolder, @@ -363,8 +420,7 @@ public void getNextChunk( return; } - int maxSegmentCount = - (int) Math.min(maxSegmentsPerLoad, lastAvailableSegmentNum - segmentNum + 1); + int maxSegmentCount = (int) min(maxSegmentsPerLoad, lastAvailableSegmentNum - segmentNum + 1); if (periodDurationUs != C.TIME_UNSET) { while (maxSegmentCount > 1 && representationHolder.getSegmentStartTimeUs(segmentNum + maxSegmentCount - 1) @@ -386,7 +442,8 @@ public void getNextChunk( trackSelection.getSelectionData(), segmentNum, maxSegmentCount, - seekTimeUs); + seekTimeUs, + nowPeriodTimeUs); } @Override @@ -399,13 +456,12 @@ public void onChunkLoadCompleted(Chunk chunk) { // from the stream. If the manifest defines an index then the stream shouldn't, but in cases // where it does we should ignore it. if (representationHolder.segmentIndex == null) { - SeekMap seekMap = representationHolder.extractorWrapper.getSeekMap(); - if (seekMap != null) { + @Nullable ChunkIndex chunkIndex = representationHolder.chunkExtractor.getChunkIndex(); + if (chunkIndex != null) { representationHolders[trackIndex] = representationHolder.copyWithNewSegmentIndex( new DashWrappingSegmentIndex( - (ChunkIndex) seekMap, - representationHolder.representation.presentationTimeOffsetUs)); + chunkIndex, representationHolder.representation.presentationTimeOffsetUs)); } } } @@ -416,21 +472,24 @@ public void onChunkLoadCompleted(Chunk chunk) { @Override public boolean onChunkLoadError( - Chunk chunk, boolean cancelable, Exception e, long blacklistDurationMs) { + Chunk chunk, + boolean cancelable, + LoadErrorHandlingPolicy.LoadErrorInfo loadErrorInfo, + LoadErrorHandlingPolicy loadErrorHandlingPolicy) { if (!cancelable) { return false; } - if (playerTrackEmsgHandler != null - && playerTrackEmsgHandler.maybeRefreshManifestOnLoadingError(chunk)) { + if (playerTrackEmsgHandler != null && playerTrackEmsgHandler.onChunkLoadError(chunk)) { return true; } // Workaround for missing segment at the end of the period - if (!manifest.dynamic && chunk instanceof MediaChunk - && e instanceof InvalidResponseCodeException - && ((InvalidResponseCodeException) e).responseCode == 404) { + if (!manifest.dynamic + && chunk instanceof MediaChunk + && loadErrorInfo.exception instanceof InvalidResponseCodeException + && ((InvalidResponseCodeException) loadErrorInfo.exception).responseCode == 404) { RepresentationHolder representationHolder = representationHolders[trackSelection.indexOf(chunk.trackFormat)]; - int segmentCount = representationHolder.getSegmentCount(); + long segmentCount = representationHolder.getSegmentCount(); if (segmentCount != DashSegmentIndex.INDEX_UNBOUNDED && segmentCount != 0) { long lastAvailableSegmentNum = representationHolder.getFirstSegmentNum() + segmentCount - 1; if (((MediaChunk) chunk).getNextChunkIndex() > lastAvailableSegmentNum) { @@ -439,12 +498,76 @@ public boolean onChunkLoadError( } } } - return blacklistDurationMs != C.TIME_UNSET - && trackSelection.blacklist(trackSelection.indexOf(chunk.trackFormat), blacklistDurationMs); + + int trackIndex = trackSelection.indexOf(chunk.trackFormat); + RepresentationHolder representationHolder = representationHolders[trackIndex]; + @Nullable + BaseUrl newBaseUrl = + baseUrlExclusionList.selectBaseUrl(representationHolder.representation.baseUrls); + if (newBaseUrl != null && !representationHolder.selectedBaseUrl.equals(newBaseUrl)) { + // The base URL has changed since the failing chunk was created. Request a replacement chunk, + // which will use the new base URL. + return true; + } + + LoadErrorHandlingPolicy.FallbackOptions fallbackOptions = + createFallbackOptions(trackSelection, representationHolder.representation.baseUrls); + if (!fallbackOptions.isFallbackAvailable(LoadErrorHandlingPolicy.FALLBACK_TYPE_TRACK) + && !fallbackOptions.isFallbackAvailable(LoadErrorHandlingPolicy.FALLBACK_TYPE_LOCATION)) { + return false; + } + @Nullable + LoadErrorHandlingPolicy.FallbackSelection fallbackSelection = + loadErrorHandlingPolicy.getFallbackSelectionFor(fallbackOptions, loadErrorInfo); + if (fallbackSelection == null || !fallbackOptions.isFallbackAvailable(fallbackSelection.type)) { + // Policy indicated to not use any fallback or a fallback type that is not available. + return false; + } + + boolean cancelLoad = false; + if (fallbackSelection.type == LoadErrorHandlingPolicy.FALLBACK_TYPE_TRACK) { + cancelLoad = + trackSelection.blacklist( + trackSelection.indexOf(chunk.trackFormat), fallbackSelection.exclusionDurationMs); + } else if (fallbackSelection.type == LoadErrorHandlingPolicy.FALLBACK_TYPE_LOCATION) { + baseUrlExclusionList.exclude( + representationHolder.selectedBaseUrl, fallbackSelection.exclusionDurationMs); + cancelLoad = true; + } + return cancelLoad; + } + + @Override + public void release() { + for (RepresentationHolder representationHolder : representationHolders) { + @Nullable ChunkExtractor chunkExtractor = representationHolder.chunkExtractor; + if (chunkExtractor != null) { + chunkExtractor.release(); + } + } } // Internal methods. + private LoadErrorHandlingPolicy.FallbackOptions createFallbackOptions( + ExoTrackSelection trackSelection, List baseUrls) { + long nowMs = SystemClock.elapsedRealtime(); + int numberOfTracks = trackSelection.length(); + int numberOfExcludedTracks = 0; + for (int i = 0; i < numberOfTracks; i++) { + if (trackSelection.isBlacklisted(i, nowMs)) { + numberOfExcludedTracks++; + } + } + int priorityCount = BaseUrlExclusionList.getPriorityCount(baseUrls); + return new LoadErrorHandlingPolicy.FallbackOptions( + /* numberOfLocations= */ priorityCount, + /* numberOfExcludedLocations= */ priorityCount + - baseUrlExclusionList.getPriorityCountAfterExclusion(baseUrls), + numberOfTracks, + numberOfExcludedTracks); + } + private long getSegmentNum( RepresentationHolder representationHolder, @Nullable MediaChunk previousChunk, @@ -468,76 +591,100 @@ private ArrayList getRepresentations() { return representations; } - private void updateLiveEdgeTimeUs( - RepresentationHolder representationHolder, long lastAvailableSegmentNum) { - liveEdgeTimeUs = manifest.dynamic - ? representationHolder.getSegmentEndTimeUs(lastAvailableSegmentNum) : C.TIME_UNSET; - } - - private long getNowUnixTimeUs() { - if (elapsedRealtimeOffsetMs != 0) { - return (SystemClock.elapsedRealtime() + elapsedRealtimeOffsetMs) * 1000; - } else { - return System.currentTimeMillis() * 1000; + private long getAvailableLiveDurationUs(long nowUnixTimeUs, long playbackPositionUs) { + if (!manifest.dynamic) { + return C.TIME_UNSET; } + long lastSegmentNum = representationHolders[0].getLastAvailableSegmentNum(nowUnixTimeUs); + long lastSegmentEndTimeUs = representationHolders[0].getSegmentEndTimeUs(lastSegmentNum); + long nowPeriodTimeUs = getNowPeriodTimeUs(nowUnixTimeUs); + long availabilityEndTimeUs = min(nowPeriodTimeUs, lastSegmentEndTimeUs); + return max(0, availabilityEndTimeUs - playbackPositionUs); } - private long resolveTimeToLiveEdgeUs(long playbackPositionUs) { - boolean resolveTimeToLiveEdgePossible = manifest.dynamic && liveEdgeTimeUs != C.TIME_UNSET; - return resolveTimeToLiveEdgePossible ? liveEdgeTimeUs - playbackPositionUs : C.TIME_UNSET; + private long getNowPeriodTimeUs(long nowUnixTimeUs) { + return manifest.availabilityStartTimeMs == C.TIME_UNSET + ? C.TIME_UNSET + : nowUnixTimeUs + - Util.msToUs( + manifest.availabilityStartTimeMs + manifest.getPeriod(periodIndex).startMs); } protected Chunk newInitializationChunk( RepresentationHolder representationHolder, DataSource dataSource, Format trackFormat, - int trackSelectionReason, - Object trackSelectionData, - RangedUri initializationUri, - RangedUri indexUri) { - RangedUri requestUri; - String baseUrl = representationHolder.representation.baseUrl; + @C.SelectionReason int trackSelectionReason, + @Nullable Object trackSelectionData, + @Nullable RangedUri initializationUri, + @Nullable RangedUri indexUri) { + Representation representation = representationHolder.representation; + @Nullable RangedUri requestUri; if (initializationUri != null) { // It's common for initialization and index data to be stored adjacently. Attempt to merge // the two requests together to request both at once. - requestUri = initializationUri.attemptMerge(indexUri, baseUrl); + requestUri = + initializationUri.attemptMerge(indexUri, representationHolder.selectedBaseUrl.url); if (requestUri == null) { requestUri = initializationUri; } } else { requestUri = indexUri; } - DataSpec dataSpec = new DataSpec(requestUri.resolveUri(baseUrl), requestUri.start, - requestUri.length, representationHolder.representation.getCacheKey()); - return new InitializationChunk(dataSource, dataSpec, trackFormat, - trackSelectionReason, trackSelectionData, representationHolder.extractorWrapper); + DataSpec dataSpec = + DashUtil.buildDataSpec( + representation, representationHolder.selectedBaseUrl.url, requestUri, /* flags= */ 0); + return new InitializationChunk( + dataSource, + dataSpec, + trackFormat, + trackSelectionReason, + trackSelectionData, + representationHolder.chunkExtractor); } protected Chunk newMediaChunk( RepresentationHolder representationHolder, DataSource dataSource, - int trackType, + @C.TrackType int trackType, Format trackFormat, - int trackSelectionReason, + @C.SelectionReason int trackSelectionReason, Object trackSelectionData, long firstSegmentNum, int maxSegmentCount, - long seekTimeUs) { + long seekTimeUs, + long nowPeriodTimeUs) { Representation representation = representationHolder.representation; long startTimeUs = representationHolder.getSegmentStartTimeUs(firstSegmentNum); RangedUri segmentUri = representationHolder.getSegmentUrl(firstSegmentNum); - String baseUrl = representation.baseUrl; - if (representationHolder.extractorWrapper == null) { + if (representationHolder.chunkExtractor == null) { long endTimeUs = representationHolder.getSegmentEndTimeUs(firstSegmentNum); - DataSpec dataSpec = new DataSpec(segmentUri.resolveUri(baseUrl), - segmentUri.start, segmentUri.length, representation.getCacheKey()); - return new SingleSampleMediaChunk(dataSource, dataSpec, trackFormat, trackSelectionReason, - trackSelectionData, startTimeUs, endTimeUs, firstSegmentNum, trackType, trackFormat); + int flags = + representationHolder.isSegmentAvailableAtFullNetworkSpeed( + firstSegmentNum, nowPeriodTimeUs) + ? 0 + : DataSpec.FLAG_MIGHT_NOT_USE_FULL_NETWORK_SPEED; + DataSpec dataSpec = + DashUtil.buildDataSpec( + representation, representationHolder.selectedBaseUrl.url, segmentUri, flags); + return new SingleSampleMediaChunk( + dataSource, + dataSpec, + trackFormat, + trackSelectionReason, + trackSelectionData, + startTimeUs, + endTimeUs, + firstSegmentNum, + trackType, + trackFormat); } else { int segmentCount = 1; for (int i = 1; i < maxSegmentCount; i++) { RangedUri nextSegmentUri = representationHolder.getSegmentUrl(firstSegmentNum + i); - RangedUri mergedSegmentUri = segmentUri.attemptMerge(nextSegmentUri, baseUrl); + @Nullable + RangedUri mergedSegmentUri = + segmentUri.attemptMerge(nextSegmentUri, representationHolder.selectedBaseUrl.url); if (mergedSegmentUri == null) { // Unable to merge segment fetches because the URIs do not merge. break; @@ -545,14 +692,20 @@ protected Chunk newMediaChunk( segmentUri = mergedSegmentUri; segmentCount++; } - long endTimeUs = representationHolder.getSegmentEndTimeUs(firstSegmentNum + segmentCount - 1); + long segmentNum = firstSegmentNum + segmentCount - 1; + long endTimeUs = representationHolder.getSegmentEndTimeUs(segmentNum); long periodDurationUs = representationHolder.periodDurationUs; long clippedEndTimeUs = periodDurationUs != C.TIME_UNSET && periodDurationUs <= endTimeUs ? periodDurationUs : C.TIME_UNSET; - DataSpec dataSpec = new DataSpec(segmentUri.resolveUri(baseUrl), - segmentUri.start, segmentUri.length, representation.getCacheKey()); + int flags = + representationHolder.isSegmentAvailableAtFullNetworkSpeed(segmentNum, nowPeriodTimeUs) + ? 0 + : DataSpec.FLAG_MIGHT_NOT_USE_FULL_NETWORK_SPEED; + DataSpec dataSpec = + DashUtil.buildDataSpec( + representation, representationHolder.selectedBaseUrl.url, segmentUri, flags); long sampleOffsetUs = -representation.presentationTimeOffsetUs; return new ContainerMediaChunk( dataSource, @@ -567,16 +720,29 @@ protected Chunk newMediaChunk( firstSegmentNum, segmentCount, sampleOffsetUs, - representationHolder.extractorWrapper); + representationHolder.chunkExtractor); } } + private RepresentationHolder updateSelectedBaseUrl(int trackIndex) { + RepresentationHolder representationHolder = representationHolders[trackIndex]; + @Nullable + BaseUrl selectedBaseUrl = + baseUrlExclusionList.selectBaseUrl(representationHolder.representation.baseUrls); + if (selectedBaseUrl != null && !selectedBaseUrl.equals(representationHolder.selectedBaseUrl)) { + representationHolder = representationHolder.copyWithNewSelectedBaseUrl(selectedBaseUrl); + representationHolders[trackIndex] = representationHolder; + } + return representationHolder; + } + // Protected classes. /** {@link MediaChunkIterator} wrapping a {@link RepresentationHolder}. */ protected static final class RepresentationSegmentIterator extends BaseMediaChunkIterator { private final RepresentationHolder representationHolder; + private final long nowPeriodTimeUs; /** * Creates iterator. @@ -584,23 +750,33 @@ protected static final class RepresentationSegmentIterator extends BaseMediaChun * @param representation The {@link RepresentationHolder} to wrap. * @param firstAvailableSegmentNum The number of the first available segment. * @param lastAvailableSegmentNum The number of the last available segment. + * @param nowPeriodTimeUs The current time in microseconds since the start of the period used + * for calculating if segments are available at full network speed. */ public RepresentationSegmentIterator( RepresentationHolder representation, long firstAvailableSegmentNum, - long lastAvailableSegmentNum) { + long lastAvailableSegmentNum, + long nowPeriodTimeUs) { super(/* fromIndex= */ firstAvailableSegmentNum, /* toIndex= */ lastAvailableSegmentNum); this.representationHolder = representation; + this.nowPeriodTimeUs = nowPeriodTimeUs; } @Override public DataSpec getDataSpec() { checkInBounds(); - Representation representation = representationHolder.representation; - RangedUri segmentUri = representationHolder.getSegmentUrl(getCurrentIndex()); - Uri resolvedUri = segmentUri.resolveUri(representation.baseUrl); - String cacheKey = representation.getCacheKey(); - return new DataSpec(resolvedUri, segmentUri.start, segmentUri.length, cacheKey); + long currentIndex = getCurrentIndex(); + RangedUri segmentUri = representationHolder.getSegmentUrl(currentIndex); + int flags = + representationHolder.isSegmentAvailableAtFullNetworkSpeed(currentIndex, nowPeriodTimeUs) + ? 0 + : DataSpec.FLAG_MIGHT_NOT_USE_FULL_NETWORK_SPEED; + return DashUtil.buildDataSpec( + representationHolder.representation, + representationHolder.selectedBaseUrl.url, + segmentUri, + flags); } @Override @@ -619,44 +795,27 @@ public long getChunkEndTimeUs() { /** Holds information about a snapshot of a single {@link Representation}. */ protected static final class RepresentationHolder { - /* package */ final @Nullable ChunkExtractorWrapper extractorWrapper; + @Nullable /* package */ final ChunkExtractor chunkExtractor; public final Representation representation; + public final BaseUrl selectedBaseUrl; @Nullable public final DashSegmentIndex segmentIndex; private final long periodDurationUs; private final long segmentNumShift; /* package */ RepresentationHolder( - long periodDurationUs, - int trackType, - Representation representation, - boolean enableEventMessageTrack, - List closedCaptionFormats, - @Nullable TrackOutput playerEmsgTrackOutput) { - this( - periodDurationUs, - representation, - createExtractorWrapper( - trackType, - representation, - enableEventMessageTrack, - closedCaptionFormats, - playerEmsgTrackOutput), - /* segmentNumShift= */ 0, - representation.getIndex()); - } - - private RepresentationHolder( long periodDurationUs, Representation representation, - @Nullable ChunkExtractorWrapper extractorWrapper, + BaseUrl selectedBaseUrl, + @Nullable ChunkExtractor chunkExtractor, long segmentNumShift, @Nullable DashSegmentIndex segmentIndex) { this.periodDurationUs = periodDurationUs; this.representation = representation; + this.selectedBaseUrl = selectedBaseUrl; this.segmentNumShift = segmentNumShift; - this.extractorWrapper = extractorWrapper; + this.chunkExtractor = chunkExtractor; this.segmentIndex = segmentIndex; } @@ -664,26 +823,41 @@ private RepresentationHolder( /* package */ RepresentationHolder copyWithNewRepresentation( long newPeriodDurationUs, Representation newRepresentation) throws BehindLiveWindowException { - DashSegmentIndex oldIndex = representation.getIndex(); - DashSegmentIndex newIndex = newRepresentation.getIndex(); + @Nullable DashSegmentIndex oldIndex = representation.getIndex(); + @Nullable DashSegmentIndex newIndex = newRepresentation.getIndex(); if (oldIndex == null) { // Segment numbers cannot shift if the index isn't defined by the manifest. return new RepresentationHolder( - newPeriodDurationUs, newRepresentation, extractorWrapper, segmentNumShift, oldIndex); + newPeriodDurationUs, + newRepresentation, + selectedBaseUrl, + chunkExtractor, + segmentNumShift, + oldIndex); } if (!oldIndex.isExplicit()) { // Segment numbers cannot shift if the index isn't explicit. return new RepresentationHolder( - newPeriodDurationUs, newRepresentation, extractorWrapper, segmentNumShift, newIndex); + newPeriodDurationUs, + newRepresentation, + selectedBaseUrl, + chunkExtractor, + segmentNumShift, + newIndex); } - int oldIndexSegmentCount = oldIndex.getSegmentCount(newPeriodDurationUs); + long oldIndexSegmentCount = oldIndex.getSegmentCount(newPeriodDurationUs); if (oldIndexSegmentCount == 0) { // Segment numbers cannot shift if the old index was empty. return new RepresentationHolder( - newPeriodDurationUs, newRepresentation, extractorWrapper, segmentNumShift, newIndex); + newPeriodDurationUs, + newRepresentation, + selectedBaseUrl, + chunkExtractor, + segmentNumShift, + newIndex); } long oldIndexFirstSegmentNum = oldIndex.getFirstSegmentNum(); @@ -715,20 +889,46 @@ private RepresentationHolder( - newIndexFirstSegmentNum; } return new RepresentationHolder( - newPeriodDurationUs, newRepresentation, extractorWrapper, newSegmentNumShift, newIndex); + newPeriodDurationUs, + newRepresentation, + selectedBaseUrl, + chunkExtractor, + newSegmentNumShift, + newIndex); } @CheckResult /* package */ RepresentationHolder copyWithNewSegmentIndex(DashSegmentIndex segmentIndex) { return new RepresentationHolder( - periodDurationUs, representation, extractorWrapper, segmentNumShift, segmentIndex); + periodDurationUs, + representation, + selectedBaseUrl, + chunkExtractor, + segmentNumShift, + segmentIndex); + } + + @CheckResult + /* package */ RepresentationHolder copyWithNewSelectedBaseUrl(BaseUrl selectedBaseUrl) { + return new RepresentationHolder( + periodDurationUs, + representation, + selectedBaseUrl, + chunkExtractor, + segmentNumShift, + segmentIndex); } public long getFirstSegmentNum() { return segmentIndex.getFirstSegmentNum() + segmentNumShift; } - public int getSegmentCount() { + public long getFirstAvailableSegmentNum(long nowUnixTimeUs) { + return segmentIndex.getFirstAvailableSegmentNum(periodDurationUs, nowUnixTimeUs) + + segmentNumShift; + } + + public long getSegmentCount() { return segmentIndex.getSegmentCount(periodDurationUs); } @@ -749,76 +949,20 @@ public RangedUri getSegmentUrl(long segmentNum) { return segmentIndex.getSegmentUrl(segmentNum - segmentNumShift); } - public long getFirstAvailableSegmentNum( - DashManifest manifest, int periodIndex, long nowUnixTimeUs) { - if (getSegmentCount() == DashSegmentIndex.INDEX_UNBOUNDED - && manifest.timeShiftBufferDepthMs != C.TIME_UNSET) { - // The index is itself unbounded. We need to use the current time to calculate the range of - // available segments. - long liveEdgeTimeUs = nowUnixTimeUs - C.msToUs(manifest.availabilityStartTimeMs); - long periodStartUs = C.msToUs(manifest.getPeriod(periodIndex).startMs); - long liveEdgeTimeInPeriodUs = liveEdgeTimeUs - periodStartUs; - long bufferDepthUs = C.msToUs(manifest.timeShiftBufferDepthMs); - return Math.max( - getFirstSegmentNum(), getSegmentNum(liveEdgeTimeInPeriodUs - bufferDepthUs)); - } - return getFirstSegmentNum(); - } - - public long getLastAvailableSegmentNum( - DashManifest manifest, int periodIndex, long nowUnixTimeUs) { - int availableSegmentCount = getSegmentCount(); - if (availableSegmentCount == DashSegmentIndex.INDEX_UNBOUNDED) { - // The index is itself unbounded. We need to use the current time to calculate the range of - // available segments. - long liveEdgeTimeUs = nowUnixTimeUs - C.msToUs(manifest.availabilityStartTimeMs); - long periodStartUs = C.msToUs(manifest.getPeriod(periodIndex).startMs); - long liveEdgeTimeInPeriodUs = liveEdgeTimeUs - periodStartUs; - // getSegmentNum(liveEdgeTimeInPeriodUs) will not be completed yet, so subtract one to get - // the index of the last completed segment. - return getSegmentNum(liveEdgeTimeInPeriodUs) - 1; - } - return getFirstSegmentNum() + availableSegmentCount - 1; - } - - private static boolean mimeTypeIsWebm(String mimeType) { - return mimeType.startsWith(MimeTypes.VIDEO_WEBM) || mimeType.startsWith(MimeTypes.AUDIO_WEBM) - || mimeType.startsWith(MimeTypes.APPLICATION_WEBM); + public long getLastAvailableSegmentNum(long nowUnixTimeUs) { + return getFirstAvailableSegmentNum(nowUnixTimeUs) + + segmentIndex.getAvailableSegmentCount(periodDurationUs, nowUnixTimeUs) + - 1; } - private static boolean mimeTypeIsRawText(String mimeType) { - return MimeTypes.isText(mimeType) || MimeTypes.APPLICATION_TTML.equals(mimeType); - } - - private static @Nullable ChunkExtractorWrapper createExtractorWrapper( - int trackType, - Representation representation, - boolean enableEventMessageTrack, - List closedCaptionFormats, - @Nullable TrackOutput playerEmsgTrackOutput) { - String containerMimeType = representation.format.containerMimeType; - if (mimeTypeIsRawText(containerMimeType)) { - return null; - } - Extractor extractor; - if (MimeTypes.APPLICATION_RAWCC.equals(containerMimeType)) { - extractor = new RawCcExtractor(representation.format); - } else if (mimeTypeIsWebm(containerMimeType)) { - extractor = new MatroskaExtractor(MatroskaExtractor.FLAG_DISABLE_SEEK_FOR_CUES); - } else { - int flags = 0; - if (enableEventMessageTrack) { - flags |= FragmentedMp4Extractor.FLAG_ENABLE_EMSG_TRACK; - } - extractor = - new FragmentedMp4Extractor( - flags, - /* timestampAdjuster= */ null, - /* sideloadedTrack= */ null, - closedCaptionFormats, - playerEmsgTrackOutput); + public boolean isSegmentAvailableAtFullNetworkSpeed(long segmentNum, long nowPeriodTimeUs) { + if (segmentIndex.isExplicit()) { + // We don't support segment availability for explicit indices (internal ref: b/172894901). + // Hence, also assume all segments in explicit indices are always available at full network + // speed even if they end in the future. + return true; } - return new ChunkExtractorWrapper(extractor, trackType, representation.format); + return nowPeriodTimeUs == C.TIME_UNSET || getSegmentEndTimeUs(segmentNum) <= nowPeriodTimeUs; } } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/EventSampleStream.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/EventSampleStream.java index 6e67be6ec5..47b0eddd65 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/EventSampleStream.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/EventSampleStream.java @@ -15,6 +15,8 @@ */ package com.google.android.exoplayer2.source.dash; +import static java.lang.Math.max; + import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.FormatHolder; @@ -27,8 +29,8 @@ import java.io.IOException; /** - * A {@link SampleStream} consisting of serialized {@link EventMessage}s read from an - * {@link EventStream}. + * A {@link SampleStream} consisting of serialized {@link EventMessage}s read from an {@link + * EventStream}. */ /* package */ final class EventSampleStream implements SampleStream { @@ -96,41 +98,41 @@ public void maybeThrowError() throws IOException { } @Override - public int readData(FormatHolder formatHolder, DecoderInputBuffer buffer, - boolean formatRequired) { - if (formatRequired || !isFormatSentDownstream) { + public int readData( + FormatHolder formatHolder, DecoderInputBuffer buffer, @ReadFlags int readFlags) { + boolean noMoreEventsInStream = currentIndex == eventTimesUs.length; + if (noMoreEventsInStream && !eventStreamAppendable) { + buffer.setFlags(C.BUFFER_FLAG_END_OF_STREAM); + return C.RESULT_BUFFER_READ; + } + if ((readFlags & FLAG_REQUIRE_FORMAT) != 0 || !isFormatSentDownstream) { formatHolder.format = upstreamFormat; isFormatSentDownstream = true; return C.RESULT_FORMAT_READ; } - if (currentIndex == eventTimesUs.length) { - if (!eventStreamAppendable) { - buffer.setFlags(C.BUFFER_FLAG_END_OF_STREAM); - return C.RESULT_BUFFER_READ; - } else { - return C.RESULT_NOTHING_READ; - } + if (noMoreEventsInStream) { + // More events may be appended later. + return C.RESULT_NOTHING_READ; + } + int sampleIndex = currentIndex; + if ((readFlags & SampleStream.FLAG_PEEK) == 0) { + currentIndex++; } - int sampleIndex = currentIndex++; - byte[] serializedEvent = eventMessageEncoder.encode(eventStream.events[sampleIndex]); - if (serializedEvent != null) { + if ((readFlags & SampleStream.FLAG_OMIT_SAMPLE_DATA) == 0) { + byte[] serializedEvent = eventMessageEncoder.encode(eventStream.events[sampleIndex]); buffer.ensureSpaceForWrite(serializedEvent.length); buffer.data.put(serializedEvent); - buffer.timeUs = eventTimesUs[sampleIndex]; - buffer.setFlags(C.BUFFER_FLAG_KEY_FRAME); - return C.RESULT_BUFFER_READ; - } else { - return C.RESULT_NOTHING_READ; } + buffer.timeUs = eventTimesUs[sampleIndex]; + buffer.setFlags(C.BUFFER_FLAG_KEY_FRAME); + return C.RESULT_BUFFER_READ; } @Override public int skipData(long positionUs) { - int newIndex = - Math.max(currentIndex, Util.binarySearchCeil(eventTimesUs, positionUs, true, false)); + int newIndex = max(currentIndex, Util.binarySearchCeil(eventTimesUs, positionUs, true, false)); int skipped = newIndex - currentIndex; currentIndex = newIndex; return skipped; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/PlayerEmsgHandler.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/PlayerEmsgHandler.java index 187baad76b..f60bd0563e 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/PlayerEmsgHandler.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/PlayerEmsgHandler.java @@ -24,8 +24,6 @@ import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.FormatHolder; import com.google.android.exoplayer2.ParserException; -import com.google.android.exoplayer2.drm.DrmSessionManager; -import com.google.android.exoplayer2.extractor.ExtractorInput; import com.google.android.exoplayer2.extractor.TrackOutput; import com.google.android.exoplayer2.metadata.Metadata; import com.google.android.exoplayer2.metadata.MetadataInputBuffer; @@ -35,6 +33,7 @@ import com.google.android.exoplayer2.source.chunk.Chunk; import com.google.android.exoplayer2.source.dash.manifest.DashManifest; import com.google.android.exoplayer2.upstream.Allocator; +import com.google.android.exoplayer2.upstream.DataReader; import com.google.android.exoplayer2.util.ParsableByteArray; import com.google.android.exoplayer2.util.Util; import java.io.IOException; @@ -86,8 +85,7 @@ public interface PlayerEmsgCallback { private DashManifest manifest; private long expiredManifestPublishTimeUs; - private long lastLoadedChunkEndTimeUs; - private long lastLoadedChunkEndTimeBeforeRefreshUs; + private boolean chunkLoadedCompletedSinceLastManifestRefreshRequest; private boolean isWaitingForManifestRefresh; private boolean released; @@ -104,10 +102,8 @@ public PlayerEmsgHandler( this.allocator = allocator; manifestPublishTimeToExpiryTimeUs = new TreeMap<>(); - handler = Util.createHandler(/* callback= */ this); + handler = Util.createHandlerForCurrentLooper(/* callback= */ this); decoder = new EventMessageDecoder(); - lastLoadedChunkEndTimeUs = C.TIME_UNSET; - lastLoadedChunkEndTimeBeforeRefreshUs = C.TIME_UNSET; } /** @@ -122,6 +118,36 @@ public void updateManifest(DashManifest newManifest) { removePreviouslyExpiredManifestPublishTimeValues(); } + /** Returns a {@link TrackOutput} that emsg messages could be written to. */ + public PlayerTrackEmsgHandler newPlayerTrackEmsgHandler() { + return new PlayerTrackEmsgHandler(allocator); + } + + /** Release this emsg handler. It should not be reused after this call. */ + public void release() { + released = true; + handler.removeCallbacksAndMessages(null); + } + + @Override + public boolean handleMessage(Message message) { + if (released) { + return true; + } + switch (message.what) { + case EMSG_MANIFEST_EXPIRED: + ManifestExpiryEventInfo messageObj = (ManifestExpiryEventInfo) message.obj; + handleManifestExpiredMessage( + messageObj.eventTimeUs, messageObj.manifestPublishTimeMsInEmsg); + return true; + default: + // Do nothing. + } + return false; + } + + // Internal methods. + /* package */ boolean maybeRefreshManifestBeforeLoadingNextChunk(long presentationPositionUs) { if (!manifest.dynamic) { return false; @@ -147,83 +173,27 @@ public void updateManifest(DashManifest newManifest) { return manifestRefreshNeeded; } - /** - * For live streaming with emsg event stream, forward seeking can seek pass the emsg messages that - * signals end-of-stream or Manifest expiry, which results in load error. In this case, we should - * notify the Dash media source to refresh its manifest. - * - * @param chunk The chunk whose load encountered the error. - * @return True if manifest refresh has been requested, false otherwise. - */ - /* package */ boolean maybeRefreshManifestOnLoadingError(Chunk chunk) { + /* package */ void onChunkLoadCompleted(Chunk chunk) { + chunkLoadedCompletedSinceLastManifestRefreshRequest = true; + } + + /* package */ boolean onChunkLoadError(boolean isForwardSeek) { if (!manifest.dynamic) { return false; } if (isWaitingForManifestRefresh) { return true; } - boolean isAfterForwardSeek = - lastLoadedChunkEndTimeUs != C.TIME_UNSET && lastLoadedChunkEndTimeUs < chunk.startTimeUs; - if (isAfterForwardSeek) { - // if we are after a forward seek, and the playback is dynamic with embedded emsg stream, - // there's a chance that we have seek over the emsg messages, in which case we should ask - // media source for a refresh. + if (isForwardSeek) { + // If a forward seek has occurred, there's a chance that the seek has skipped EMSGs signalling + // end-of-stream or manifest expiration. We must assume that the manifest might need to be + // refreshed. maybeNotifyDashManifestRefreshNeeded(); return true; } return false; } - /** - * Called when the a new chunk in the current media stream has been loaded. - * - * @param chunk The chunk whose load has been completed. - */ - /* package */ void onChunkLoadCompleted(Chunk chunk) { - if (lastLoadedChunkEndTimeUs != C.TIME_UNSET || chunk.endTimeUs > lastLoadedChunkEndTimeUs) { - lastLoadedChunkEndTimeUs = chunk.endTimeUs; - } - } - - /** - * Returns whether an event with given schemeIdUri and value is a DASH emsg event targeting the - * player. - */ - public static boolean isPlayerEmsgEvent(String schemeIdUri, String value) { - return "urn:mpeg:dash:event:2012".equals(schemeIdUri) - && ("1".equals(value) || "2".equals(value) || "3".equals(value)); - } - - /** Returns a {@link TrackOutput} that emsg messages could be written to. */ - public PlayerTrackEmsgHandler newPlayerTrackEmsgHandler() { - return new PlayerTrackEmsgHandler(allocator); - } - - /** Release this emsg handler. It should not be reused after this call. */ - public void release() { - released = true; - handler.removeCallbacksAndMessages(null); - } - - @Override - public boolean handleMessage(Message message) { - if (released) { - return true; - } - switch (message.what) { - case (EMSG_MANIFEST_EXPIRED): - ManifestExpiryEventInfo messageObj = (ManifestExpiryEventInfo) message.obj; - handleManifestExpiredMessage( - messageObj.eventTimeUs, messageObj.manifestPublishTimeMsInEmsg); - return true; - default: - // Do nothing. - } - return false; - } - - // Internal methods. - private void handleManifestExpiredMessage(long eventTimeUs, long manifestPublishTimeMsInEmsg) { Long previousExpiryTimeUs = manifestPublishTimeToExpiryTimeUs.get(manifestPublishTimeMsInEmsg); if (previousExpiryTimeUs == null) { @@ -235,7 +205,8 @@ private void handleManifestExpiredMessage(long eventTimeUs, long manifestPublish } } - private @Nullable Map.Entry ceilingExpiryEntryForPublishTime(long publishTimeMs) { + @Nullable + private Map.Entry ceilingExpiryEntryForPublishTime(long publishTimeMs) { return manifestPublishTimeToExpiryTimeUs.ceilingEntry(publishTimeMs); } @@ -257,13 +228,12 @@ private void notifyManifestPublishTimeExpired() { /** Requests DASH media manifest to be refreshed if necessary. */ private void maybeNotifyDashManifestRefreshNeeded() { - if (lastLoadedChunkEndTimeBeforeRefreshUs != C.TIME_UNSET - && lastLoadedChunkEndTimeBeforeRefreshUs == lastLoadedChunkEndTimeUs) { - // Already requested manifest refresh. + if (!chunkLoadedCompletedSinceLastManifestRefreshRequest) { + // Don't request a refresh unless some progress has been made. return; } isWaitingForManifestRefresh = true; - lastLoadedChunkEndTimeBeforeRefreshUs = lastLoadedChunkEndTimeUs; + chunkLoadedCompletedSinceLastManifestRefreshRequest = false; playerEmsgCallback.onDashManifestRefreshRequested(); } @@ -276,6 +246,15 @@ private static long getManifestPublishTimeMsInEmsg(EventMessage eventMessage) { } } + /** + * Returns whether an event with given schemeIdUri and value is a DASH emsg event targeting the + * player. + */ + private static boolean isPlayerEmsgEvent(String schemeIdUri, String value) { + return "urn:mpeg:dash:event:2012".equals(schemeIdUri) + && ("1".equals(value) || "2".equals(value) || "3".equals(value)); + } + /** Handles emsg messages for a specific track for the player. */ public final class PlayerTrackEmsgHandler implements TrackOutput { @@ -283,13 +262,13 @@ public final class PlayerTrackEmsgHandler implements TrackOutput { private final FormatHolder formatHolder; private final MetadataInputBuffer buffer; + private long maxLoadedChunkEndTimeUs; + /* package */ PlayerTrackEmsgHandler(Allocator allocator) { - this.sampleQueue = new SampleQueue( - allocator, - /* playbackLooper= */ handler.getLooper(), - DrmSessionManager.getDummyDrmSessionManager()); + this.sampleQueue = SampleQueue.createWithoutDrm(allocator); formatHolder = new FormatHolder(); buffer = new MetadataInputBuffer(); + maxLoadedChunkEndTimeUs = C.TIME_UNSET; } @Override @@ -298,20 +277,21 @@ public void format(Format format) { } @Override - public int sampleData(ExtractorInput input, int length, boolean allowEndOfInput) - throws IOException, InterruptedException { + public int sampleData( + DataReader input, int length, boolean allowEndOfInput, @SampleDataPart int sampleDataPart) + throws IOException { return sampleQueue.sampleData(input, length, allowEndOfInput); } @Override - public void sampleData(ParsableByteArray data, int length) { + public void sampleData(ParsableByteArray data, int length, @SampleDataPart int sampleDataPart) { sampleQueue.sampleData(data, length); } @Override public void sampleMetadata( - long timeUs, int flags, int size, int offset, @Nullable CryptoData encryptionData) { - sampleQueue.sampleMetadata(timeUs, flags, size, offset, encryptionData); + long timeUs, int flags, int size, int offset, @Nullable CryptoData cryptoData) { + sampleQueue.sampleMetadata(timeUs, flags, size, offset, cryptoData); parseAndDiscardSamples(); } @@ -328,24 +308,27 @@ public boolean maybeRefreshManifestBeforeLoadingNextChunk(long presentationPosit } /** - * Called when the a new chunk in the current media stream has been loaded. + * Called when a chunk load has been completed. * * @param chunk The chunk whose load has been completed. */ public void onChunkLoadCompleted(Chunk chunk) { + if (maxLoadedChunkEndTimeUs == C.TIME_UNSET || chunk.endTimeUs > maxLoadedChunkEndTimeUs) { + maxLoadedChunkEndTimeUs = chunk.endTimeUs; + } PlayerEmsgHandler.this.onChunkLoadCompleted(chunk); } /** - * For live streaming with emsg event stream, forward seeking can seek pass the emsg messages - * that signals end-of-stream or Manifest expiry, which results in load error. In this case, we - * should notify the Dash media source to refresh its manifest. + * Called when a chunk load has encountered an error. * - * @param chunk The chunk whose load encountered the error. - * @return True if manifest refresh has been requested, false otherwise. + * @param chunk The chunk whose load encountered an error. + * @return Whether a manifest refresh has been requested. */ - public boolean maybeRefreshManifestOnLoadingError(Chunk chunk) { - return PlayerEmsgHandler.this.maybeRefreshManifestOnLoadingError(chunk); + public boolean onChunkLoadError(Chunk chunk) { + boolean isAfterForwardSeek = + maxLoadedChunkEndTimeUs != C.TIME_UNSET && maxLoadedChunkEndTimeUs < chunk.startTimeUs; + return PlayerEmsgHandler.this.onChunkLoadError(isAfterForwardSeek); } /** Release this track emsg handler. It should not be reused after this call. */ @@ -357,12 +340,15 @@ public void release() { private void parseAndDiscardSamples() { while (sampleQueue.isReady(/* loadingFinished= */ false)) { - MetadataInputBuffer inputBuffer = dequeueSample(); + @Nullable MetadataInputBuffer inputBuffer = dequeueSample(); if (inputBuffer == null) { continue; } long eventTimeUs = inputBuffer.timeUs; - Metadata metadata = decoder.decode(inputBuffer); + @Nullable Metadata metadata = decoder.decode(inputBuffer); + if (metadata == null) { + continue; + } EventMessage eventMessage = (EventMessage) metadata.get(0); if (isPlayerEmsgEvent(eventMessage.schemeIdUri, eventMessage.value)) { parsePlayerEmsgEvent(eventTimeUs, eventMessage); @@ -375,12 +361,7 @@ private void parseAndDiscardSamples() { private MetadataInputBuffer dequeueSample() { buffer.clear(); int result = - sampleQueue.read( - formatHolder, - buffer, - /* formatRequired= */ false, - /* loadingFinished= */ false, - /* decodeOnlyUntilUs= */ 0); + sampleQueue.read(formatHolder, buffer, /* readFlags= */ 0, /* loadingFinished= */ false); if (result == C.RESULT_BUFFER_READ) { buffer.flip(); return buffer; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/AdaptationSet.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/AdaptationSet.java index b0689eeb11..26c52697be 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/AdaptationSet.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/AdaptationSet.java @@ -15,17 +15,14 @@ */ package com.google.android.exoplayer2.source.dash.manifest; +import com.google.android.exoplayer2.C; import java.util.Collections; import java.util.List; -/** - * Represents a set of interchangeable encoded versions of a media content component. - */ +/** Represents a set of interchangeable encoded versions of a media content component. */ public class AdaptationSet { - /** - * Value of {@link #id} indicating no value is set.= - */ + /** Value of {@link #id} indicating no value is set.= */ public static final int ID_UNSET = -1; /** @@ -34,20 +31,13 @@ public class AdaptationSet { */ public final int id; - /** - * The type of the adaptation set. One of the {@link com.google.android.exoplayer2.C} - * {@code TRACK_TYPE_*} constants. - */ - public final int type; + /** The {@link C.TrackType track type} of the adaptation set. */ + public final @C.TrackType int type; - /** - * {@link Representation}s in the adaptation set. - */ + /** {@link Representation}s in the adaptation set. */ public final List representations; - /** - * Accessibility descriptors in the adaptation set. - */ + /** Accessibility descriptors in the adaptation set. */ public final List accessibilityDescriptors; /** Essential properties in the adaptation set. */ @@ -59,8 +49,7 @@ public class AdaptationSet { /** * @param id A non-negative identifier for the adaptation set that's unique in the scope of its * containing period, or {@link #ID_UNSET} if not specified. - * @param type The type of the adaptation set. One of the {@link com.google.android.exoplayer2.C} - * {@code TRACK_TYPE_*} constants. + * @param type The {@link C.TrackType track type} of the adaptation set. * @param representations {@link Representation}s in the adaptation set. * @param accessibilityDescriptors Accessibility descriptors in the adaptation set. * @param essentialProperties Essential properties in the adaptation set. @@ -68,7 +57,7 @@ public class AdaptationSet { */ public AdaptationSet( int id, - int type, + @C.TrackType int type, List representations, List accessibilityDescriptors, List essentialProperties, diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/BaseUrl.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/BaseUrl.java new file mode 100644 index 0000000000..5cf79e7fc4 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/BaseUrl.java @@ -0,0 +1,75 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.dash.manifest; + +import androidx.annotation.Nullable; +import com.google.common.base.Objects; + +/** A base URL, as defined by ISO 23009-1, 2nd edition, 5.6. and ETSI TS 103 285 V1.2.1, 10.8.2.1 */ +public final class BaseUrl { + + /** The default weight. */ + public static final int DEFAULT_WEIGHT = 1; + /** The default priority. */ + public static final int DEFAULT_DVB_PRIORITY = 1; + /** Constant representing an unset priority in a manifest that does not declare a DVB profile. */ + public static final int PRIORITY_UNSET = Integer.MIN_VALUE; + + /** The URL. */ + public final String url; + /** The service location. */ + public final String serviceLocation; + /** The priority. */ + public final int priority; + /** The weight. */ + public final int weight; + + /** + * Creates an instance with {@link #PRIORITY_UNSET an unset priority}, {@link #DEFAULT_WEIGHT + * default weight} and using the URL as the service location. + */ + public BaseUrl(String url) { + this(url, /* serviceLocation= */ url, PRIORITY_UNSET, DEFAULT_WEIGHT); + } + + /** Creates an instance. */ + public BaseUrl(String url, String serviceLocation, int priority, int weight) { + this.url = url; + this.serviceLocation = serviceLocation; + this.priority = priority; + this.weight = weight; + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (!(o instanceof BaseUrl)) { + return false; + } + BaseUrl baseUrl = (BaseUrl) o; + return priority == baseUrl.priority + && weight == baseUrl.weight + && Objects.equal(url, baseUrl.url) + && Objects.equal(serviceLocation, baseUrl.serviceLocation); + } + + @Override + public int hashCode() { + return Objects.hashCode(url, serviceLocation, priority, weight); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/DashManifest.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/DashManifest.java index c21af45d15..6bdbb0d6b0 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/DashManifest.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/DashManifest.java @@ -20,6 +20,7 @@ import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.offline.FilterableManifest; import com.google.android.exoplayer2.offline.StreamKey; +import com.google.android.exoplayer2.util.Util; import java.util.ArrayList; import java.util.Collections; import java.util.LinkedList; @@ -42,14 +43,10 @@ public class DashManifest implements FilterableManifest { */ public final long durationMs; - /** - * The {@code minBufferTime} value in milliseconds, or {@link C#TIME_UNSET} if not present. - */ + /** The {@code minBufferTime} value in milliseconds, or {@link C#TIME_UNSET} if not present. */ public final long minBufferTimeMs; - /** - * Whether the manifest has value "dynamic" for the {@code type} attribute. - */ + /** Whether the manifest has value "dynamic" for the {@code type} attribute. */ public final boolean dynamic; /** @@ -59,8 +56,7 @@ public class DashManifest implements FilterableManifest { public final long minUpdatePeriodMs; /** - * The {@code timeShiftBufferDepth} value in milliseconds, or {@link C#TIME_UNSET} if not - * present. + * The {@code timeShiftBufferDepth} value in milliseconds, or {@link C#TIME_UNSET} if not present. */ public final long timeShiftBufferDepthMs; @@ -71,8 +67,8 @@ public class DashManifest implements FilterableManifest { public final long suggestedPresentationDelayMs; /** - * The {@code publishTime} value in milliseconds since epoch, or {@link C#TIME_UNSET} if - * not present. + * The {@code publishTime} value in milliseconds since epoch, or {@link C#TIME_UNSET} if not + * present. */ public final long publishTimeMs; @@ -82,6 +78,9 @@ public class DashManifest implements FilterableManifest { */ @Nullable public final UtcTimingElement utcTiming; + /** The {@link ServiceDescriptionElement}, or null if not present. */ + @Nullable public final ServiceDescriptionElement serviceDescription; + /** The location of this manifest, or null if not present. */ @Nullable public final Uri location; @@ -90,38 +89,6 @@ public class DashManifest implements FilterableManifest { private final List periods; - /** - * @deprecated Use {@link #DashManifest(long, long, long, boolean, long, long, long, long, - * ProgramInformation, UtcTimingElement, Uri, List)}. - */ - @Deprecated - public DashManifest( - long availabilityStartTimeMs, - long durationMs, - long minBufferTimeMs, - boolean dynamic, - long minUpdatePeriodMs, - long timeShiftBufferDepthMs, - long suggestedPresentationDelayMs, - long publishTimeMs, - @Nullable UtcTimingElement utcTiming, - @Nullable Uri location, - List periods) { - this( - availabilityStartTimeMs, - durationMs, - minBufferTimeMs, - dynamic, - minUpdatePeriodMs, - timeShiftBufferDepthMs, - suggestedPresentationDelayMs, - publishTimeMs, - /* programInformation= */ null, - utcTiming, - location, - periods); - } - public DashManifest( long availabilityStartTimeMs, long durationMs, @@ -133,6 +100,7 @@ public DashManifest( long publishTimeMs, @Nullable ProgramInformation programInformation, @Nullable UtcTimingElement utcTiming, + @Nullable ServiceDescriptionElement serviceDescription, @Nullable Uri location, List periods) { this.availabilityStartTimeMs = availabilityStartTimeMs; @@ -146,6 +114,7 @@ public DashManifest( this.programInformation = programInformation; this.utcTiming = utcTiming; this.location = location; + this.serviceDescription = serviceDescription; this.periods = periods == null ? Collections.emptyList() : periods; } @@ -164,7 +133,7 @@ public final long getPeriodDurationMs(int index) { } public final long getPeriodDurationUs(int index) { - return C.msToUs(getPeriodDurationMs(index)); + return Util.msToUs(getPeriodDurationMs(index)); } @Override @@ -186,8 +155,9 @@ public final DashManifest copy(List streamKeys) { Period period = getPeriod(periodIndex); ArrayList copyAdaptationSets = copyAdaptationSets(period.adaptationSets, keys); - Period copiedPeriod = new Period(period.id, period.startMs - shiftMs, copyAdaptationSets, - period.eventStreams); + Period copiedPeriod = + new Period( + period.id, period.startMs - shiftMs, copyAdaptationSets, period.eventStreams); copyPeriods.add(copiedPeriod); } } @@ -203,6 +173,7 @@ public final DashManifest copy(List streamKeys) { publishTimeMs, programInformation, utcTiming, + serviceDescription, location, copyPeriods); } @@ -219,7 +190,7 @@ private static ArrayList copyAdaptationSets( List representations = adaptationSet.representations; ArrayList copyRepresentations = new ArrayList<>(); do { - Representation representation = representations.get(key.trackIndex); + Representation representation = representations.get(key.streamIndex); copyRepresentations.add(representation); key = keys.poll(); } while (key.periodIndex == periodIndex && key.groupIndex == adaptationSetIndex); @@ -232,10 +203,9 @@ private static ArrayList copyAdaptationSets( adaptationSet.accessibilityDescriptors, adaptationSet.essentialProperties, adaptationSet.supplementalProperties)); - } while(key.periodIndex == periodIndex); + } while (key.periodIndex == periodIndex); // Add back the last key which doesn't belong to the period being processed keys.addFirst(key); return copyAdaptationSets; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/DashManifestParser.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/DashManifestParser.java index 6d25c50cf6..9235f8c830 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/DashManifestParser.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/DashManifestParser.java @@ -15,6 +15,10 @@ */ package com.google.android.exoplayer2.source.dash.manifest; +import static com.google.android.exoplayer2.source.dash.manifest.BaseUrl.DEFAULT_DVB_PRIORITY; +import static com.google.android.exoplayer2.source.dash.manifest.BaseUrl.DEFAULT_WEIGHT; +import static com.google.android.exoplayer2.source.dash.manifest.BaseUrl.PRIORITY_UNSET; + import android.net.Uri; import android.text.TextUtils; import android.util.Base64; @@ -39,11 +43,14 @@ import com.google.android.exoplayer2.util.UriUtil; import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.util.XmlPullParserUtil; +import com.google.common.base.Ascii; +import com.google.common.base.Charsets; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.UUID; import java.util.regex.Matcher; @@ -55,9 +62,7 @@ import org.xmlpull.v1.XmlPullParserFactory; import org.xmlpull.v1.XmlSerializer; -/** - * A parser of media presentation description files. - */ +/** A parser of media presentation description files. */ public class DashManifestParser extends DefaultHandler implements ParsingLoadable.Parser { @@ -69,6 +74,16 @@ public class DashManifestParser extends DefaultHandler private static final Pattern CEA_708_ACCESSIBILITY_PATTERN = Pattern.compile("([1-9]|[1-5][0-9]|6[0-3])=.*"); + /** + * Maps the value attribute of an AudioChannelConfiguration with schemeIdUri + * "urn:mpeg:mpegB:cicp:ChannelConfiguration", as defined by ISO 23001-8 clause 8.1, to a channel + * count. + */ + private static final int[] MPEG_CHANNEL_CONFIGURATION_MAPPING = + new int[] { + Format.NO_VALUE, 1, 2, 3, 4, 5, 6, 8, 2, 3, 4, 7, 8, 24, 8, 12, 10, 12, 14, 12, 14 + }; + private final XmlPullParserFactory xmlParserFactory; public DashManifestParser() { @@ -88,34 +103,47 @@ public DashManifest parse(Uri uri, InputStream inputStream) throws IOException { xpp.setInput(inputStream, null); int eventType = xpp.next(); if (eventType != XmlPullParser.START_TAG || !"MPD".equals(xpp.getName())) { - throw new ParserException( - "inputStream does not contain a valid media presentation description"); + throw ParserException.createForMalformedManifest( + "inputStream does not contain a valid media presentation description", + /* cause= */ null); } - return parseMediaPresentationDescription(xpp, uri.toString()); + return parseMediaPresentationDescription(xpp, uri); } catch (XmlPullParserException e) { - throw new ParserException(e); + throw ParserException.createForMalformedManifest(/* message= */ null, /* cause= */ e); } } - protected DashManifest parseMediaPresentationDescription(XmlPullParser xpp, - String baseUrl) throws XmlPullParserException, IOException { + protected DashManifest parseMediaPresentationDescription(XmlPullParser xpp, Uri documentBaseUri) + throws XmlPullParserException, IOException { + boolean dvbProfileDeclared = + isDvbProfileDeclared(parseProfiles(xpp, "profiles", new String[0])); long availabilityStartTime = parseDateTime(xpp, "availabilityStartTime", C.TIME_UNSET); long durationMs = parseDuration(xpp, "mediaPresentationDuration", C.TIME_UNSET); long minBufferTimeMs = parseDuration(xpp, "minBufferTime", C.TIME_UNSET); String typeString = xpp.getAttributeValue(null, "type"); boolean dynamic = "dynamic".equals(typeString); - long minUpdateTimeMs = dynamic ? parseDuration(xpp, "minimumUpdatePeriod", C.TIME_UNSET) - : C.TIME_UNSET; - long timeShiftBufferDepthMs = dynamic - ? parseDuration(xpp, "timeShiftBufferDepth", C.TIME_UNSET) : C.TIME_UNSET; - long suggestedPresentationDelayMs = dynamic - ? parseDuration(xpp, "suggestedPresentationDelay", C.TIME_UNSET) : C.TIME_UNSET; + long minUpdateTimeMs = + dynamic ? parseDuration(xpp, "minimumUpdatePeriod", C.TIME_UNSET) : C.TIME_UNSET; + long timeShiftBufferDepthMs = + dynamic ? parseDuration(xpp, "timeShiftBufferDepth", C.TIME_UNSET) : C.TIME_UNSET; + long suggestedPresentationDelayMs = + dynamic ? parseDuration(xpp, "suggestedPresentationDelay", C.TIME_UNSET) : C.TIME_UNSET; long publishTimeMs = parseDateTime(xpp, "publishTime", C.TIME_UNSET); ProgramInformation programInformation = null; UtcTimingElement utcTiming = null; Uri location = null; + ServiceDescriptionElement serviceDescription = null; + long baseUrlAvailabilityTimeOffsetUs = dynamic ? 0 : C.TIME_UNSET; + BaseUrl documentBaseUrl = + new BaseUrl( + documentBaseUri.toString(), + /* serviceLocation= */ documentBaseUri.toString(), + dvbProfileDeclared ? DEFAULT_DVB_PRIORITY : PRIORITY_UNSET, + DEFAULT_WEIGHT); + ArrayList parentBaseUrls = Lists.newArrayList(documentBaseUrl); List periods = new ArrayList<>(); + ArrayList baseUrls = new ArrayList<>(); long nextPeriodStartMs = dynamic ? C.TIME_UNSET : 0; boolean seenEarlyAccessPeriod = false; boolean seenFirstBaseUrl = false; @@ -123,17 +151,29 @@ protected DashManifest parseMediaPresentationDescription(XmlPullParser xpp, xpp.next(); if (XmlPullParserUtil.isStartTag(xpp, "BaseURL")) { if (!seenFirstBaseUrl) { - baseUrl = parseBaseUrl(xpp, baseUrl); + baseUrlAvailabilityTimeOffsetUs = + parseAvailabilityTimeOffsetUs(xpp, baseUrlAvailabilityTimeOffsetUs); seenFirstBaseUrl = true; } + baseUrls.addAll(parseBaseUrl(xpp, parentBaseUrls, dvbProfileDeclared)); } else if (XmlPullParserUtil.isStartTag(xpp, "ProgramInformation")) { programInformation = parseProgramInformation(xpp); } else if (XmlPullParserUtil.isStartTag(xpp, "UTCTiming")) { utcTiming = parseUtcTiming(xpp); } else if (XmlPullParserUtil.isStartTag(xpp, "Location")) { - location = Uri.parse(xpp.nextText()); + location = UriUtil.resolveToUri(documentBaseUri.toString(), xpp.nextText()); + } else if (XmlPullParserUtil.isStartTag(xpp, "ServiceDescription")) { + serviceDescription = parseServiceDescription(xpp); } else if (XmlPullParserUtil.isStartTag(xpp, "Period") && !seenEarlyAccessPeriod) { - Pair periodWithDurationMs = parsePeriod(xpp, baseUrl, nextPeriodStartMs); + Pair periodWithDurationMs = + parsePeriod( + xpp, + !baseUrls.isEmpty() ? baseUrls : parentBaseUrls, + nextPeriodStartMs, + baseUrlAvailabilityTimeOffsetUs, + availabilityStartTime, + timeShiftBufferDepthMs, + dvbProfileDeclared); Period period = periodWithDurationMs.first; if (period.startMs == C.TIME_UNSET) { if (dynamic) { @@ -141,12 +181,13 @@ protected DashManifest parseMediaPresentationDescription(XmlPullParser xpp, // early access. seenEarlyAccessPeriod = true; } else { - throw new ParserException("Unable to determine start of period " + periods.size()); + throw ParserException.createForMalformedManifest( + "Unable to determine start of period " + periods.size(), /* cause= */ null); } } else { long periodDurationMs = periodWithDurationMs.second; - nextPeriodStartMs = periodDurationMs == C.TIME_UNSET ? C.TIME_UNSET - : (period.startMs + periodDurationMs); + nextPeriodStartMs = + periodDurationMs == C.TIME_UNSET ? C.TIME_UNSET : (period.startMs + periodDurationMs); periods.add(period); } } else { @@ -159,12 +200,13 @@ protected DashManifest parseMediaPresentationDescription(XmlPullParser xpp, // If we know the end time of the final period, we can use it as the duration. durationMs = nextPeriodStartMs; } else if (!dynamic) { - throw new ParserException("Unable to determine duration of static manifest."); + throw ParserException.createForMalformedManifest( + "Unable to determine duration of static manifest.", /* cause= */ null); } } if (periods.isEmpty()) { - throw new ParserException("No periods found."); + throw ParserException.createForMalformedManifest("No periods found.", /* cause= */ null); } return buildMediaPresentationDescription( @@ -178,6 +220,7 @@ protected DashManifest parseMediaPresentationDescription(XmlPullParser xpp, publishTimeMs, programInformation, utcTiming, + serviceDescription, location, periods); } @@ -193,6 +236,7 @@ protected DashManifest buildMediaPresentationDescription( long publishTimeMs, @Nullable ProgramInformation programInformation, @Nullable UtcTimingElement utcTiming, + @Nullable ServiceDescriptionElement serviceDescription, @Nullable Uri location, List periods) { return new DashManifest( @@ -206,6 +250,7 @@ protected DashManifest buildMediaPresentationDescription( publishTimeMs, programInformation, utcTiming, + serviceDescription, location, periods); } @@ -220,33 +265,99 @@ protected UtcTimingElement buildUtcTimingElement(String schemeIdUri, String valu return new UtcTimingElement(schemeIdUri, value); } - protected Pair parsePeriod(XmlPullParser xpp, String baseUrl, long defaultStartMs) + protected ServiceDescriptionElement parseServiceDescription(XmlPullParser xpp) + throws XmlPullParserException, IOException { + long targetOffsetMs = C.TIME_UNSET; + long minOffsetMs = C.TIME_UNSET; + long maxOffsetMs = C.TIME_UNSET; + float minPlaybackSpeed = C.RATE_UNSET; + float maxPlaybackSpeed = C.RATE_UNSET; + do { + xpp.next(); + if (XmlPullParserUtil.isStartTag(xpp, "Latency")) { + targetOffsetMs = parseLong(xpp, "target", C.TIME_UNSET); + minOffsetMs = parseLong(xpp, "min", C.TIME_UNSET); + maxOffsetMs = parseLong(xpp, "max", C.TIME_UNSET); + } else if (XmlPullParserUtil.isStartTag(xpp, "PlaybackRate")) { + minPlaybackSpeed = parseFloat(xpp, "min", C.RATE_UNSET); + maxPlaybackSpeed = parseFloat(xpp, "max", C.RATE_UNSET); + } + } while (!XmlPullParserUtil.isEndTag(xpp, "ServiceDescription")); + return new ServiceDescriptionElement( + targetOffsetMs, minOffsetMs, maxOffsetMs, minPlaybackSpeed, maxPlaybackSpeed); + } + + protected Pair parsePeriod( + XmlPullParser xpp, + List parentBaseUrls, + long defaultStartMs, + long baseUrlAvailabilityTimeOffsetUs, + long availabilityStartTimeMs, + long timeShiftBufferDepthMs, + boolean dvbProfileDeclared) throws XmlPullParserException, IOException { @Nullable String id = xpp.getAttributeValue(null, "id"); long startMs = parseDuration(xpp, "start", defaultStartMs); + long periodStartUnixTimeMs = + availabilityStartTimeMs != C.TIME_UNSET ? availabilityStartTimeMs + startMs : C.TIME_UNSET; long durationMs = parseDuration(xpp, "duration", C.TIME_UNSET); @Nullable SegmentBase segmentBase = null; @Nullable Descriptor assetIdentifier = null; List adaptationSets = new ArrayList<>(); List eventStreams = new ArrayList<>(); + ArrayList baseUrls = new ArrayList<>(); boolean seenFirstBaseUrl = false; + long segmentBaseAvailabilityTimeOffsetUs = C.TIME_UNSET; do { xpp.next(); if (XmlPullParserUtil.isStartTag(xpp, "BaseURL")) { if (!seenFirstBaseUrl) { - baseUrl = parseBaseUrl(xpp, baseUrl); + baseUrlAvailabilityTimeOffsetUs = + parseAvailabilityTimeOffsetUs(xpp, baseUrlAvailabilityTimeOffsetUs); seenFirstBaseUrl = true; } + baseUrls.addAll(parseBaseUrl(xpp, parentBaseUrls, dvbProfileDeclared)); } else if (XmlPullParserUtil.isStartTag(xpp, "AdaptationSet")) { - adaptationSets.add(parseAdaptationSet(xpp, baseUrl, segmentBase, durationMs)); + adaptationSets.add( + parseAdaptationSet( + xpp, + !baseUrls.isEmpty() ? baseUrls : parentBaseUrls, + segmentBase, + durationMs, + baseUrlAvailabilityTimeOffsetUs, + segmentBaseAvailabilityTimeOffsetUs, + periodStartUnixTimeMs, + timeShiftBufferDepthMs, + dvbProfileDeclared)); } else if (XmlPullParserUtil.isStartTag(xpp, "EventStream")) { eventStreams.add(parseEventStream(xpp)); } else if (XmlPullParserUtil.isStartTag(xpp, "SegmentBase")) { - segmentBase = parseSegmentBase(xpp, null); + segmentBase = parseSegmentBase(xpp, /* parent= */ null); } else if (XmlPullParserUtil.isStartTag(xpp, "SegmentList")) { - segmentBase = parseSegmentList(xpp, null, durationMs); + segmentBaseAvailabilityTimeOffsetUs = + parseAvailabilityTimeOffsetUs(xpp, /* parentAvailabilityTimeOffsetUs= */ C.TIME_UNSET); + segmentBase = + parseSegmentList( + xpp, + /* parent= */ null, + periodStartUnixTimeMs, + durationMs, + baseUrlAvailabilityTimeOffsetUs, + segmentBaseAvailabilityTimeOffsetUs, + timeShiftBufferDepthMs); } else if (XmlPullParserUtil.isStartTag(xpp, "SegmentTemplate")) { - segmentBase = parseSegmentTemplate(xpp, null, Collections.emptyList(), durationMs); + segmentBaseAvailabilityTimeOffsetUs = + parseAvailabilityTimeOffsetUs(xpp, /* parentAvailabilityTimeOffsetUs= */ C.TIME_UNSET); + segmentBase = + parseSegmentTemplate( + xpp, + /* parent= */ null, + ImmutableList.of(), + periodStartUnixTimeMs, + durationMs, + baseUrlAvailabilityTimeOffsetUs, + segmentBaseAvailabilityTimeOffsetUs, + timeShiftBufferDepthMs); } else if (XmlPullParserUtil.isStartTag(xpp, "AssetIdentifier")) { assetIdentifier = parseDescriptor(xpp, "AssetIdentifier"); } else { @@ -270,10 +381,18 @@ protected Period buildPeriod( // AdaptationSet parsing. protected AdaptationSet parseAdaptationSet( - XmlPullParser xpp, String baseUrl, @Nullable SegmentBase segmentBase, long periodDurationMs) + XmlPullParser xpp, + List parentBaseUrls, + @Nullable SegmentBase segmentBase, + long periodDurationMs, + long baseUrlAvailabilityTimeOffsetUs, + long segmentBaseAvailabilityTimeOffsetUs, + long periodStartUnixTimeMs, + long timeShiftBufferDepthMs, + boolean dvbProfileDeclared) throws XmlPullParserException, IOException { int id = parseInt(xpp, "id", AdaptationSet.ID_UNSET); - int contentType = parseContentType(xpp); + @C.TrackType int contentType = parseContentType(xpp); String mimeType = xpp.getAttributeValue(null, "mimeType"); String codecs = xpp.getAttributeValue(null, "codecs"); @@ -292,15 +411,18 @@ protected AdaptationSet parseAdaptationSet( ArrayList essentialProperties = new ArrayList<>(); ArrayList supplementalProperties = new ArrayList<>(); List representationInfos = new ArrayList<>(); + ArrayList baseUrls = new ArrayList<>(); boolean seenFirstBaseUrl = false; do { xpp.next(); if (XmlPullParserUtil.isStartTag(xpp, "BaseURL")) { if (!seenFirstBaseUrl) { - baseUrl = parseBaseUrl(xpp, baseUrl); + baseUrlAvailabilityTimeOffsetUs = + parseAvailabilityTimeOffsetUs(xpp, baseUrlAvailabilityTimeOffsetUs); seenFirstBaseUrl = true; } + baseUrls.addAll(parseBaseUrl(xpp, parentBaseUrls, dvbProfileDeclared)); } else if (XmlPullParserUtil.isStartTag(xpp, "ContentProtection")) { Pair contentProtection = parseContentProtection(xpp); if (contentProtection.first != null) { @@ -326,7 +448,7 @@ protected AdaptationSet parseAdaptationSet( RepresentationInfo representationInfo = parseRepresentation( xpp, - baseUrl, + !baseUrls.isEmpty() ? baseUrls : parentBaseUrls, mimeType, codecs, width, @@ -340,18 +462,43 @@ protected AdaptationSet parseAdaptationSet( essentialProperties, supplementalProperties, segmentBase, - periodDurationMs); - contentType = checkContentTypeConsistency(contentType, - getContentType(representationInfo.format)); + periodStartUnixTimeMs, + periodDurationMs, + baseUrlAvailabilityTimeOffsetUs, + segmentBaseAvailabilityTimeOffsetUs, + timeShiftBufferDepthMs, + dvbProfileDeclared); + contentType = + checkContentTypeConsistency( + contentType, MimeTypes.getTrackType(representationInfo.format.sampleMimeType)); representationInfos.add(representationInfo); } else if (XmlPullParserUtil.isStartTag(xpp, "SegmentBase")) { segmentBase = parseSegmentBase(xpp, (SingleSegmentBase) segmentBase); } else if (XmlPullParserUtil.isStartTag(xpp, "SegmentList")) { - segmentBase = parseSegmentList(xpp, (SegmentList) segmentBase, periodDurationMs); + segmentBaseAvailabilityTimeOffsetUs = + parseAvailabilityTimeOffsetUs(xpp, segmentBaseAvailabilityTimeOffsetUs); + segmentBase = + parseSegmentList( + xpp, + (SegmentList) segmentBase, + periodStartUnixTimeMs, + periodDurationMs, + baseUrlAvailabilityTimeOffsetUs, + segmentBaseAvailabilityTimeOffsetUs, + timeShiftBufferDepthMs); } else if (XmlPullParserUtil.isStartTag(xpp, "SegmentTemplate")) { + segmentBaseAvailabilityTimeOffsetUs = + parseAvailabilityTimeOffsetUs(xpp, segmentBaseAvailabilityTimeOffsetUs); segmentBase = parseSegmentTemplate( - xpp, (SegmentTemplate) segmentBase, supplementalProperties, periodDurationMs); + xpp, + (SegmentTemplate) segmentBase, + supplementalProperties, + periodStartUnixTimeMs, + periodDurationMs, + baseUrlAvailabilityTimeOffsetUs, + segmentBaseAvailabilityTimeOffsetUs, + timeShiftBufferDepthMs); } else if (XmlPullParserUtil.isStartTag(xpp, "InbandEventStream")) { inbandEventStreams.add(parseDescriptor(xpp, "InbandEventStream")); } else if (XmlPullParserUtil.isStartTag(xpp, "Label")) { @@ -384,7 +531,7 @@ protected AdaptationSet parseAdaptationSet( protected AdaptationSet buildAdaptationSet( int id, - int contentType, + @C.TrackType int contentType, List representations, List accessibilityDescriptors, List essentialProperties, @@ -398,27 +545,19 @@ protected AdaptationSet buildAdaptationSet( supplementalProperties); } - protected int parseContentType(XmlPullParser xpp) { + protected @C.TrackType int parseContentType(XmlPullParser xpp) { String contentType = xpp.getAttributeValue(null, "contentType"); - return TextUtils.isEmpty(contentType) ? C.TRACK_TYPE_UNKNOWN - : MimeTypes.BASE_TYPE_AUDIO.equals(contentType) ? C.TRACK_TYPE_AUDIO - : MimeTypes.BASE_TYPE_VIDEO.equals(contentType) ? C.TRACK_TYPE_VIDEO - : MimeTypes.BASE_TYPE_TEXT.equals(contentType) ? C.TRACK_TYPE_TEXT - : C.TRACK_TYPE_UNKNOWN; - } - - protected int getContentType(Format format) { - String sampleMimeType = format.sampleMimeType; - if (TextUtils.isEmpty(sampleMimeType)) { - return C.TRACK_TYPE_UNKNOWN; - } else if (MimeTypes.isVideo(sampleMimeType)) { - return C.TRACK_TYPE_VIDEO; - } else if (MimeTypes.isAudio(sampleMimeType)) { - return C.TRACK_TYPE_AUDIO; - } else if (mimeTypeIsRawText(sampleMimeType)) { - return C.TRACK_TYPE_TEXT; - } - return C.TRACK_TYPE_UNKNOWN; + return TextUtils.isEmpty(contentType) + ? C.TRACK_TYPE_UNKNOWN + : MimeTypes.BASE_TYPE_AUDIO.equals(contentType) + ? C.TRACK_TYPE_AUDIO + : MimeTypes.BASE_TYPE_VIDEO.equals(contentType) + ? C.TRACK_TYPE_VIDEO + : MimeTypes.BASE_TYPE_TEXT.equals(contentType) + ? C.TRACK_TYPE_TEXT + : MimeTypes.BASE_TYPE_IMAGE.equals(contentType) + ? C.TRACK_TYPE_IMAGE + : C.TRACK_TYPE_UNKNOWN; } /** @@ -439,7 +578,7 @@ protected int getContentType(Format format) { String schemeIdUri = xpp.getAttributeValue(null, "schemeIdUri"); if (schemeIdUri != null) { - switch (Util.toLowerInvariant(schemeIdUri)) { + switch (Ascii.toLowerCase(schemeIdUri)) { case "urn:mpeg:dash:mp4protection:2011": schemeType = xpp.getAttributeValue(null, "value"); String defaultKid = XmlPullParserUtil.getAttributeValueIgnorePrefix(xpp, "default_KID"); @@ -460,6 +599,9 @@ protected int getContentType(Format format) { case "urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed": uuid = C.WIDEVINE_UUID; break; + case "urn:uuid:e2719d58-a985-b3c9-781a-b030af78d30e": + uuid = C.CLEARKEY_UUID; + break; default: break; } @@ -467,7 +609,9 @@ protected int getContentType(Format format) { do { xpp.next(); - if (XmlPullParserUtil.isStartTag(xpp, "ms:laurl")) { + if (XmlPullParserUtil.isStartTag(xpp, "clearkey:Laurl") && xpp.next() == XmlPullParser.TEXT) { + licenseServerUrl = xpp.getText(); + } else if (XmlPullParserUtil.isStartTag(xpp, "ms:laurl")) { licenseServerUrl = xpp.getAttributeValue(null, "licenseUrl"); } else if (data == null && XmlPullParserUtil.isStartTagIgnorePrefix(xpp, "pssh") @@ -512,7 +656,7 @@ protected void parseAdaptationSetChild(XmlPullParser xpp) protected RepresentationInfo parseRepresentation( XmlPullParser xpp, - String baseUrl, + List parentBaseUrls, @Nullable String adaptationSetMimeType, @Nullable String adaptationSetCodecs, int adaptationSetWidth, @@ -526,7 +670,12 @@ protected RepresentationInfo parseRepresentation( List adaptationSetEssentialProperties, List adaptationSetSupplementalProperties, @Nullable SegmentBase segmentBase, - long periodDurationMs) + long periodStartUnixTimeMs, + long periodDurationMs, + long baseUrlAvailabilityTimeOffsetUs, + long segmentBaseAvailabilityTimeOffsetUs, + long timeShiftBufferDepthMs, + boolean dvbProfileDeclared) throws XmlPullParserException, IOException { String id = xpp.getAttributeValue(null, "id"); int bandwidth = parseInt(xpp, "bandwidth", Format.NO_VALUE); @@ -544,28 +693,47 @@ protected RepresentationInfo parseRepresentation( ArrayList essentialProperties = new ArrayList<>(adaptationSetEssentialProperties); ArrayList supplementalProperties = new ArrayList<>(adaptationSetSupplementalProperties); + ArrayList baseUrls = new ArrayList<>(); boolean seenFirstBaseUrl = false; do { xpp.next(); if (XmlPullParserUtil.isStartTag(xpp, "BaseURL")) { if (!seenFirstBaseUrl) { - baseUrl = parseBaseUrl(xpp, baseUrl); + baseUrlAvailabilityTimeOffsetUs = + parseAvailabilityTimeOffsetUs(xpp, baseUrlAvailabilityTimeOffsetUs); seenFirstBaseUrl = true; } + baseUrls.addAll(parseBaseUrl(xpp, parentBaseUrls, dvbProfileDeclared)); } else if (XmlPullParserUtil.isStartTag(xpp, "AudioChannelConfiguration")) { audioChannels = parseAudioChannelConfiguration(xpp); } else if (XmlPullParserUtil.isStartTag(xpp, "SegmentBase")) { segmentBase = parseSegmentBase(xpp, (SingleSegmentBase) segmentBase); } else if (XmlPullParserUtil.isStartTag(xpp, "SegmentList")) { - segmentBase = parseSegmentList(xpp, (SegmentList) segmentBase, periodDurationMs); + segmentBaseAvailabilityTimeOffsetUs = + parseAvailabilityTimeOffsetUs(xpp, segmentBaseAvailabilityTimeOffsetUs); + segmentBase = + parseSegmentList( + xpp, + (SegmentList) segmentBase, + periodStartUnixTimeMs, + periodDurationMs, + baseUrlAvailabilityTimeOffsetUs, + segmentBaseAvailabilityTimeOffsetUs, + timeShiftBufferDepthMs); } else if (XmlPullParserUtil.isStartTag(xpp, "SegmentTemplate")) { + segmentBaseAvailabilityTimeOffsetUs = + parseAvailabilityTimeOffsetUs(xpp, segmentBaseAvailabilityTimeOffsetUs); segmentBase = parseSegmentTemplate( xpp, (SegmentTemplate) segmentBase, adaptationSetSupplementalProperties, - periodDurationMs); + periodStartUnixTimeMs, + periodDurationMs, + baseUrlAvailabilityTimeOffsetUs, + segmentBaseAvailabilityTimeOffsetUs, + timeShiftBufferDepthMs); } else if (XmlPullParserUtil.isStartTag(xpp, "ContentProtection")) { Pair contentProtection = parseContentProtection(xpp); if (contentProtection.first != null) { @@ -603,8 +771,16 @@ protected RepresentationInfo parseRepresentation( supplementalProperties); segmentBase = segmentBase != null ? segmentBase : new SingleSegmentBase(); - return new RepresentationInfo(format, baseUrl, segmentBase, drmSchemeType, drmSchemeDatas, - inbandEventStreams, Representation.REVISION_ID_DEFAULT); + return new RepresentationInfo( + format, + !baseUrls.isEmpty() ? baseUrls : parentBaseUrls, + segmentBase, + drmSchemeType, + drmSchemeDatas, + inbandEventStreams, + essentialProperties, + supplementalProperties, + Representation.REVISION_ID_DEFAULT); } protected Format buildFormat( @@ -622,78 +798,50 @@ protected Format buildFormat( @Nullable String codecs, List essentialProperties, List supplementalProperties) { - String sampleMimeType = getSampleMimeType(containerMimeType, codecs); + @Nullable String sampleMimeType = getSampleMimeType(containerMimeType, codecs); + if (MimeTypes.AUDIO_E_AC3.equals(sampleMimeType)) { + sampleMimeType = parseEac3SupplementalProperties(supplementalProperties); + if (MimeTypes.AUDIO_E_AC3_JOC.equals(sampleMimeType)) { + codecs = MimeTypes.CODEC_E_AC3_JOC; + } + } @C.SelectionFlags int selectionFlags = parseSelectionFlagsFromRoleDescriptors(roleDescriptors); @C.RoleFlags int roleFlags = parseRoleFlagsFromRoleDescriptors(roleDescriptors); roleFlags |= parseRoleFlagsFromAccessibilityDescriptors(accessibilityDescriptors); roleFlags |= parseRoleFlagsFromProperties(essentialProperties); roleFlags |= parseRoleFlagsFromProperties(supplementalProperties); - if (sampleMimeType != null) { - if (MimeTypes.AUDIO_E_AC3.equals(sampleMimeType)) { - sampleMimeType = parseEac3SupplementalProperties(supplementalProperties); - } - if (MimeTypes.isVideo(sampleMimeType)) { - return Format.createVideoContainerFormat( - id, - /* label= */ null, - containerMimeType, - sampleMimeType, - codecs, - /* metadata= */ null, - bitrate, - width, - height, - frameRate, - /* initializationData= */ null, - selectionFlags, - roleFlags); - } else if (MimeTypes.isAudio(sampleMimeType)) { - return Format.createAudioContainerFormat( - id, - /* label= */ null, - containerMimeType, - sampleMimeType, - codecs, - /* metadata= */ null, - bitrate, - audioChannels, - audioSamplingRate, - /* initializationData= */ null, - selectionFlags, - roleFlags, - language); - } else if (mimeTypeIsRawText(sampleMimeType)) { - int accessibilityChannel; - if (MimeTypes.APPLICATION_CEA608.equals(sampleMimeType)) { - accessibilityChannel = parseCea608AccessibilityChannel(accessibilityDescriptors); - } else if (MimeTypes.APPLICATION_CEA708.equals(sampleMimeType)) { - accessibilityChannel = parseCea708AccessibilityChannel(accessibilityDescriptors); - } else { - accessibilityChannel = Format.NO_VALUE; - } - return Format.createTextContainerFormat( - id, - /* label= */ null, - containerMimeType, - sampleMimeType, - codecs, - bitrate, - selectionFlags, - roleFlags, - language, - accessibilityChannel); + @Nullable Pair tileCounts = parseTileCountFromProperties(essentialProperties); + + Format.Builder formatBuilder = + new Format.Builder() + .setId(id) + .setContainerMimeType(containerMimeType) + .setSampleMimeType(sampleMimeType) + .setCodecs(codecs) + .setPeakBitrate(bitrate) + .setSelectionFlags(selectionFlags) + .setRoleFlags(roleFlags) + .setLanguage(language) + .setTileCountHorizontal(tileCounts != null ? tileCounts.first : Format.NO_VALUE) + .setTileCountVertical(tileCounts != null ? tileCounts.second : Format.NO_VALUE); + + if (MimeTypes.isVideo(sampleMimeType)) { + formatBuilder.setWidth(width).setHeight(height).setFrameRate(frameRate); + } else if (MimeTypes.isAudio(sampleMimeType)) { + formatBuilder.setChannelCount(audioChannels).setSampleRate(audioSamplingRate); + } else if (MimeTypes.isText(sampleMimeType)) { + int accessibilityChannel = Format.NO_VALUE; + if (MimeTypes.APPLICATION_CEA608.equals(sampleMimeType)) { + accessibilityChannel = parseCea608AccessibilityChannel(accessibilityDescriptors); + } else if (MimeTypes.APPLICATION_CEA708.equals(sampleMimeType)) { + accessibilityChannel = parseCea708AccessibilityChannel(accessibilityDescriptors); } + formatBuilder.setAccessibilityChannel(accessibilityChannel); + } else if (MimeTypes.isImage(sampleMimeType)) { + formatBuilder.setWidth(width).setHeight(height); } - return Format.createContainerFormat( - id, - /* label= */ null, - containerMimeType, - sampleMimeType, - codecs, - bitrate, - selectionFlags, - roleFlags, - language); + + return formatBuilder.build(); } protected Representation buildRepresentation( @@ -702,27 +850,32 @@ protected Representation buildRepresentation( @Nullable String extraDrmSchemeType, ArrayList extraDrmSchemeDatas, ArrayList extraInbandEventStreams) { - Format format = representationInfo.format; + Format.Builder formatBuilder = representationInfo.format.buildUpon(); if (label != null) { - format = format.copyWithLabel(label); + formatBuilder.setLabel(label); + } + @Nullable String drmSchemeType = representationInfo.drmSchemeType; + if (drmSchemeType == null) { + drmSchemeType = extraDrmSchemeType; } - String drmSchemeType = representationInfo.drmSchemeType != null - ? representationInfo.drmSchemeType : extraDrmSchemeType; ArrayList drmSchemeDatas = representationInfo.drmSchemeDatas; drmSchemeDatas.addAll(extraDrmSchemeDatas); if (!drmSchemeDatas.isEmpty()) { + fillInClearKeyInformation(drmSchemeDatas); filterRedundantIncompleteSchemeDatas(drmSchemeDatas); - DrmInitData drmInitData = new DrmInitData(drmSchemeType, drmSchemeDatas); - format = format.copyWithDrmInitData(drmInitData); + formatBuilder.setDrmInitData(new DrmInitData(drmSchemeType, drmSchemeDatas)); } ArrayList inbandEventStreams = representationInfo.inbandEventStreams; inbandEventStreams.addAll(extraInbandEventStreams); return Representation.newInstance( representationInfo.revisionId, - format, - representationInfo.baseUrl, + formatBuilder.build(), + representationInfo.baseUrls, representationInfo.segmentBase, - inbandEventStreams); + inbandEventStreams, + representationInfo.essentialProperties, + representationInfo.supplementalProperties, + /* cacheKey= */ null); } // SegmentBase, SegmentList and SegmentTemplate parsing. @@ -732,8 +885,9 @@ protected SingleSegmentBase parseSegmentBase( throws XmlPullParserException, IOException { long timescale = parseLong(xpp, "timescale", parent != null ? parent.timescale : 1); - long presentationTimeOffset = parseLong(xpp, "presentationTimeOffset", - parent != null ? parent.presentationTimeOffset : 0); + long presentationTimeOffset = + parseLong( + xpp, "presentationTimeOffset", parent != null ? parent.presentationTimeOffset : 0); long indexStart = parent != null ? parent.indexStart : 0; long indexLength = parent != null ? parent.indexLength : 0; @@ -744,7 +898,7 @@ protected SingleSegmentBase parseSegmentBase( indexLength = Long.parseLong(indexRange[1]) - indexStart + 1; } - RangedUri initialization = parent != null ? parent.initialization : null; + @Nullable RangedUri initialization = parent != null ? parent.initialization : null; do { xpp.next(); if (XmlPullParserUtil.isStartTag(xpp, "Initialization")) { @@ -754,25 +908,39 @@ protected SingleSegmentBase parseSegmentBase( } } while (!XmlPullParserUtil.isEndTag(xpp, "SegmentBase")); - return buildSingleSegmentBase(initialization, timescale, presentationTimeOffset, indexStart, - indexLength); + return buildSingleSegmentBase( + initialization, timescale, presentationTimeOffset, indexStart, indexLength); } - protected SingleSegmentBase buildSingleSegmentBase(RangedUri initialization, long timescale, - long presentationTimeOffset, long indexStart, long indexLength) { - return new SingleSegmentBase(initialization, timescale, presentationTimeOffset, indexStart, - indexLength); + protected SingleSegmentBase buildSingleSegmentBase( + RangedUri initialization, + long timescale, + long presentationTimeOffset, + long indexStart, + long indexLength) { + return new SingleSegmentBase( + initialization, timescale, presentationTimeOffset, indexStart, indexLength); } protected SegmentList parseSegmentList( - XmlPullParser xpp, @Nullable SegmentList parent, long periodDurationMs) + XmlPullParser xpp, + @Nullable SegmentList parent, + long periodStartUnixTimeMs, + long periodDurationMs, + long baseUrlAvailabilityTimeOffsetUs, + long segmentBaseAvailabilityTimeOffsetUs, + long timeShiftBufferDepthMs) throws XmlPullParserException, IOException { long timescale = parseLong(xpp, "timescale", parent != null ? parent.timescale : 1); - long presentationTimeOffset = parseLong(xpp, "presentationTimeOffset", - parent != null ? parent.presentationTimeOffset : 0); + long presentationTimeOffset = + parseLong( + xpp, "presentationTimeOffset", parent != null ? parent.presentationTimeOffset : 0); long duration = parseLong(xpp, "duration", parent != null ? parent.duration : C.TIME_UNSET); long startNumber = parseLong(xpp, "startNumber", parent != null ? parent.startNumber : 1); + long availabilityTimeOffsetUs = + getFinalAvailabilityTimeOffset( + baseUrlAvailabilityTimeOffsetUs, segmentBaseAvailabilityTimeOffsetUs); RangedUri initialization = null; List timeline = null; @@ -800,8 +968,17 @@ protected SegmentList parseSegmentList( segments = segments != null ? segments : parent.mediaSegments; } - return buildSegmentList(initialization, timescale, presentationTimeOffset, - startNumber, duration, timeline, segments); + return buildSegmentList( + initialization, + timescale, + presentationTimeOffset, + startNumber, + duration, + timeline, + availabilityTimeOffsetUs, + segments, + timeShiftBufferDepthMs, + periodStartUnixTimeMs); } protected SegmentList buildSegmentList( @@ -811,29 +988,50 @@ protected SegmentList buildSegmentList( long startNumber, long duration, @Nullable List timeline, - @Nullable List segments) { - return new SegmentList(initialization, timescale, presentationTimeOffset, - startNumber, duration, timeline, segments); + long availabilityTimeOffsetUs, + @Nullable List segments, + long timeShiftBufferDepthMs, + long periodStartUnixTimeMs) { + return new SegmentList( + initialization, + timescale, + presentationTimeOffset, + startNumber, + duration, + timeline, + availabilityTimeOffsetUs, + segments, + Util.msToUs(timeShiftBufferDepthMs), + Util.msToUs(periodStartUnixTimeMs)); } protected SegmentTemplate parseSegmentTemplate( XmlPullParser xpp, @Nullable SegmentTemplate parent, List adaptationSetSupplementalProperties, - long periodDurationMs) + long periodStartUnixTimeMs, + long periodDurationMs, + long baseUrlAvailabilityTimeOffsetUs, + long segmentBaseAvailabilityTimeOffsetUs, + long timeShiftBufferDepthMs) throws XmlPullParserException, IOException { long timescale = parseLong(xpp, "timescale", parent != null ? parent.timescale : 1); - long presentationTimeOffset = parseLong(xpp, "presentationTimeOffset", - parent != null ? parent.presentationTimeOffset : 0); + long presentationTimeOffset = + parseLong( + xpp, "presentationTimeOffset", parent != null ? parent.presentationTimeOffset : 0); long duration = parseLong(xpp, "duration", parent != null ? parent.duration : C.TIME_UNSET); long startNumber = parseLong(xpp, "startNumber", parent != null ? parent.startNumber : 1); long endNumber = parseLastSegmentNumberSupplementalProperty(adaptationSetSupplementalProperties); + long availabilityTimeOffsetUs = + getFinalAvailabilityTimeOffset( + baseUrlAvailabilityTimeOffsetUs, segmentBaseAvailabilityTimeOffsetUs); - UrlTemplate mediaTemplate = parseUrlTemplate(xpp, "media", - parent != null ? parent.mediaTemplate : null); - UrlTemplate initializationTemplate = parseUrlTemplate(xpp, "initialization", - parent != null ? parent.initializationTemplate : null); + UrlTemplate mediaTemplate = + parseUrlTemplate(xpp, "media", parent != null ? parent.mediaTemplate : null); + UrlTemplate initializationTemplate = + parseUrlTemplate( + xpp, "initialization", parent != null ? parent.initializationTemplate : null); RangedUri initialization = null; List timeline = null; @@ -862,8 +1060,11 @@ protected SegmentTemplate parseSegmentTemplate( endNumber, duration, timeline, + availabilityTimeOffsetUs, initializationTemplate, - mediaTemplate); + mediaTemplate, + timeShiftBufferDepthMs, + periodStartUnixTimeMs); } protected SegmentTemplate buildSegmentTemplate( @@ -874,8 +1075,11 @@ protected SegmentTemplate buildSegmentTemplate( long endNumber, long duration, List timeline, + long availabilityTimeOffsetUs, @Nullable UrlTemplate initializationTemplate, - @Nullable UrlTemplate mediaTemplate) { + @Nullable UrlTemplate mediaTemplate, + long timeShiftBufferDepthMs, + long periodStartUnixTimeMs) { return new SegmentTemplate( initialization, timescale, @@ -884,14 +1088,16 @@ protected SegmentTemplate buildSegmentTemplate( endNumber, duration, timeline, + availabilityTimeOffsetUs, initializationTemplate, - mediaTemplate); + mediaTemplate, + Util.msToUs(timeShiftBufferDepthMs), + Util.msToUs(periodStartUnixTimeMs)); } /** - * /** * Parses a single EventStream node in the manifest. - *

      + * * @param xpp The current xml parser. * @return The {@link EventStream} parsed from this EventStream node. * @throws XmlPullParserException If there is any error parsing this node. @@ -902,13 +1108,15 @@ protected EventStream parseEventStream(XmlPullParser xpp) String schemeIdUri = parseString(xpp, "schemeIdUri", ""); String value = parseString(xpp, "value", ""); long timescale = parseLong(xpp, "timescale", 1); + long presentationTimeOffset = parseLong(xpp, "presentationTimeOffset", 0); List> eventMessages = new ArrayList<>(); ByteArrayOutputStream scratchOutputStream = new ByteArrayOutputStream(512); do { xpp.next(); if (XmlPullParserUtil.isStartTag(xpp, "Event")) { Pair event = - parseEvent(xpp, schemeIdUri, value, timescale, scratchOutputStream); + parseEvent( + xpp, schemeIdUri, value, timescale, presentationTimeOffset, scratchOutputStream); eventMessages.add(event); } else { maybeSkipTag(xpp); @@ -925,8 +1133,12 @@ protected EventStream parseEventStream(XmlPullParser xpp) return buildEventStream(schemeIdUri, value, timescale, presentationTimesUs, events); } - protected EventStream buildEventStream(String schemeIdUri, String value, long timescale, - long[] presentationTimesUs, EventMessage[] events) { + protected EventStream buildEventStream( + String schemeIdUri, + String value, + long timescale, + long[] presentationTimesUs, + EventMessage[] events) { return new EventStream(schemeIdUri, value, timescale, presentationTimesUs, events); } @@ -937,6 +1149,7 @@ protected EventStream buildEventStream(String schemeIdUri, String value, long ti * @param schemeIdUri The schemeIdUri of the parent EventStream. * @param value The schemeIdUri of the parent EventStream. * @param timescale The timescale of the parent EventStream. + * @param presentationTimeOffset The unscaled presentation time offset of the parent EventStream. * @param scratchOutputStream A {@link ByteArrayOutputStream} that is used when parsing event * objects. * @return A pair containing the node's presentation timestamp in microseconds and the parsed @@ -949,14 +1162,16 @@ protected Pair parseEvent( String schemeIdUri, String value, long timescale, + long presentationTimeOffset, ByteArrayOutputStream scratchOutputStream) throws IOException, XmlPullParserException { long id = parseLong(xpp, "id", 0); long duration = parseLong(xpp, "duration", C.TIME_UNSET); long presentationTime = parseLong(xpp, "presentationTime", 0); long durationMs = Util.scaleLargeTimestamp(duration, C.MILLIS_PER_SECOND, timescale); - long presentationTimesUs = Util.scaleLargeTimestamp(presentationTime, C.MICROS_PER_SECOND, - timescale); + long presentationTimesUs = + Util.scaleLargeTimestamp( + presentationTime - presentationTimeOffset, C.MICROS_PER_SECOND, timescale); String messageData = parseString(xpp, "messageData", null); byte[] eventObject = parseEventObject(xpp, scratchOutputStream); return Pair.create( @@ -982,47 +1197,47 @@ protected byte[] parseEventObject(XmlPullParser xpp, ByteArrayOutputStream scrat throws XmlPullParserException, IOException { scratchOutputStream.reset(); XmlSerializer xmlSerializer = Xml.newSerializer(); - xmlSerializer.setOutput(scratchOutputStream, C.UTF8_NAME); + xmlSerializer.setOutput(scratchOutputStream, Charsets.UTF_8.name()); // Start reading everything between and , and serialize them into an Xml // byte array. xpp.nextToken(); while (!XmlPullParserUtil.isEndTag(xpp, "Event")) { switch (xpp.getEventType()) { - case (XmlPullParser.START_DOCUMENT): + case XmlPullParser.START_DOCUMENT: xmlSerializer.startDocument(null, false); break; - case (XmlPullParser.END_DOCUMENT): + case XmlPullParser.END_DOCUMENT: xmlSerializer.endDocument(); break; - case (XmlPullParser.START_TAG): + case XmlPullParser.START_TAG: xmlSerializer.startTag(xpp.getNamespace(), xpp.getName()); for (int i = 0; i < xpp.getAttributeCount(); i++) { - xmlSerializer.attribute(xpp.getAttributeNamespace(i), xpp.getAttributeName(i), - xpp.getAttributeValue(i)); + xmlSerializer.attribute( + xpp.getAttributeNamespace(i), xpp.getAttributeName(i), xpp.getAttributeValue(i)); } break; - case (XmlPullParser.END_TAG): + case XmlPullParser.END_TAG: xmlSerializer.endTag(xpp.getNamespace(), xpp.getName()); break; - case (XmlPullParser.TEXT): + case XmlPullParser.TEXT: xmlSerializer.text(xpp.getText()); break; - case (XmlPullParser.CDSECT): + case XmlPullParser.CDSECT: xmlSerializer.cdsect(xpp.getText()); break; - case (XmlPullParser.ENTITY_REF): + case XmlPullParser.ENTITY_REF: xmlSerializer.entityRef(xpp.getText()); break; - case (XmlPullParser.IGNORABLE_WHITESPACE): + case XmlPullParser.IGNORABLE_WHITESPACE: xmlSerializer.ignorableWhitespace(xpp.getText()); break; - case (XmlPullParser.PROCESSING_INSTRUCTION): + case XmlPullParser.PROCESSING_INSTRUCTION: xmlSerializer.processingInstruction(xpp.getText()); break; - case (XmlPullParser.COMMENT): + case XmlPullParser.COMMENT: xmlSerializer.comment(xpp.getText()); break; - case (XmlPullParser.DOCDECL): + case XmlPullParser.DOCDECL: xmlSerializer.docdecl(xpp.getText()); break; default: // fall out @@ -1131,8 +1346,8 @@ protected RangedUri parseSegmentUrl(XmlPullParser xpp) { return parseRangedUrl(xpp, "media", "mediaRange"); } - protected RangedUri parseRangedUrl(XmlPullParser xpp, String urlAttribute, - String rangeAttribute) { + protected RangedUri parseRangedUrl( + XmlPullParser xpp, String urlAttribute, String rangeAttribute) { String urlText = xpp.getAttributeValue(null, urlAttribute); long rangeStart = 0; long rangeLength = C.LENGTH_UNSET; @@ -1189,14 +1404,66 @@ protected String parseLabel(XmlPullParser xpp) throws XmlPullParserException, IO * Parses a BaseURL element. * * @param xpp The parser from which to read. - * @param parentBaseUrl A base URL for resolving the parsed URL. + * @param parentBaseUrls The parent base URLs for resolving the parsed URLs. + * @param dvbProfileDeclared Whether the dvb profile is declared. * @throws XmlPullParserException If an error occurs parsing the element. * @throws IOException If an error occurs reading the element. - * @return The parsed and resolved URL. + * @return The list of parsed and resolved URLs. */ - protected String parseBaseUrl(XmlPullParser xpp, String parentBaseUrl) + protected List parseBaseUrl( + XmlPullParser xpp, List parentBaseUrls, boolean dvbProfileDeclared) throws XmlPullParserException, IOException { - return UriUtil.resolve(parentBaseUrl, parseText(xpp, "BaseURL")); + @Nullable String priorityValue = xpp.getAttributeValue(null, "dvb:priority"); + int priority = + priorityValue != null + ? Integer.parseInt(priorityValue) + : (dvbProfileDeclared ? DEFAULT_DVB_PRIORITY : PRIORITY_UNSET); + @Nullable String weightValue = xpp.getAttributeValue(null, "dvb:weight"); + int weight = weightValue != null ? Integer.parseInt(weightValue) : DEFAULT_WEIGHT; + @Nullable String serviceLocation = xpp.getAttributeValue(null, "serviceLocation"); + String baseUrl = parseText(xpp, "BaseURL"); + if (UriUtil.isAbsolute(baseUrl)) { + if (serviceLocation == null) { + serviceLocation = baseUrl; + } + return Lists.newArrayList(new BaseUrl(baseUrl, serviceLocation, priority, weight)); + } + + List baseUrls = new ArrayList<>(); + for (int i = 0; i < parentBaseUrls.size(); i++) { + BaseUrl parentBaseUrl = parentBaseUrls.get(i); + String resolvedBaseUri = UriUtil.resolve(parentBaseUrl.url, baseUrl); + String resolvedServiceLocation = serviceLocation == null ? resolvedBaseUri : serviceLocation; + if (dvbProfileDeclared) { + // Inherit parent properties only if dvb profile is declared. + priority = parentBaseUrl.priority; + weight = parentBaseUrl.weight; + resolvedServiceLocation = parentBaseUrl.serviceLocation; + } + baseUrls.add(new BaseUrl(resolvedBaseUri, resolvedServiceLocation, priority, weight)); + } + return baseUrls; + } + + /** + * Parses the availabilityTimeOffset value and returns the parsed value or the parent value if it + * doesn't exist. + * + * @param xpp The parser from which to read. + * @param parentAvailabilityTimeOffsetUs The availability time offset of a parent element in + * microseconds. + * @return The parsed availabilityTimeOffset in microseconds. + */ + protected long parseAvailabilityTimeOffsetUs( + XmlPullParser xpp, long parentAvailabilityTimeOffsetUs) { + String value = xpp.getAttributeValue(/* namespace= */ null, "availabilityTimeOffset"); + if (value == null) { + return parentAvailabilityTimeOffsetUs; + } + if ("INF".equals(value)) { + return Long.MAX_VALUE; + } + return (long) (Float.parseFloat(value) * C.MICROS_PER_SECOND); } // AudioChannelConfiguration parsing. @@ -1204,13 +1471,29 @@ protected String parseBaseUrl(XmlPullParser xpp, String parentBaseUrl) protected int parseAudioChannelConfiguration(XmlPullParser xpp) throws XmlPullParserException, IOException { String schemeIdUri = parseString(xpp, "schemeIdUri", null); - int audioChannels = - "urn:mpeg:dash:23003:3:audio_channel_configuration:2011".equals(schemeIdUri) - ? parseInt(xpp, "value", Format.NO_VALUE) - : ("tag:dolby.com,2014:dash:audio_channel_configuration:2011".equals(schemeIdUri) - || "urn:dolby:dash:audio_channel_configuration:2011".equals(schemeIdUri) - ? parseDolbyChannelConfiguration(xpp) - : Format.NO_VALUE); + int audioChannels; + switch (schemeIdUri) { + case "urn:mpeg:dash:23003:3:audio_channel_configuration:2011": + audioChannels = parseInt(xpp, "value", Format.NO_VALUE); + break; + case "urn:mpeg:mpegB:cicp:ChannelConfiguration": + audioChannels = parseMpegChannelConfiguration(xpp); + break; + case "tag:dts.com,2014:dash:audio_channel_configuration:2012": + case "urn:dts:dash:audio_channel_configuration:2012": + audioChannels = parseDtsChannelConfiguration(xpp); + break; + case "tag:dts.com,2018:uhd:audio_channel_configuration": + audioChannels = parseDtsxChannelConfiguration(xpp); + break; + case "tag:dolby.com,2014:dash:audio_channel_configuration:2011": + case "urn:dolby:dash:audio_channel_configuration:2011": + audioChannels = parseDolbyChannelConfiguration(xpp); + break; + default: + audioChannels = Format.NO_VALUE; + break; + } do { xpp.next(); } while (!XmlPullParserUtil.isEndTag(xpp, "AudioChannelConfiguration")); @@ -1219,61 +1502,74 @@ protected int parseAudioChannelConfiguration(XmlPullParser xpp) // Selection flag parsing. - protected int parseSelectionFlagsFromRoleDescriptors(List roleDescriptors) { + protected @C.SelectionFlags int parseSelectionFlagsFromRoleDescriptors( + List roleDescriptors) { + @C.SelectionFlags int result = 0; for (int i = 0; i < roleDescriptors.size(); i++) { Descriptor descriptor = roleDescriptors.get(i); - if ("urn:mpeg:dash:role:2011".equalsIgnoreCase(descriptor.schemeIdUri) - && "main".equals(descriptor.value)) { - return C.SELECTION_FLAG_DEFAULT; + if (Ascii.equalsIgnoreCase("urn:mpeg:dash:role:2011", descriptor.schemeIdUri)) { + result |= parseSelectionFlagsFromDashRoleScheme(descriptor.value); } } - return 0; + return result; + } + + protected @C.SelectionFlags int parseSelectionFlagsFromDashRoleScheme(@Nullable String value) { + if (value == null) { + return 0; + } + switch (value) { + case "forced_subtitle": + // Support both hyphen and underscore (https://github.com/google/ExoPlayer/issues/9727). + case "forced-subtitle": + return C.SELECTION_FLAG_FORCED; + default: + return 0; + } } // Role and Accessibility parsing. - @C.RoleFlags - protected int parseRoleFlagsFromRoleDescriptors(List roleDescriptors) { + protected @C.RoleFlags int parseRoleFlagsFromRoleDescriptors(List roleDescriptors) { @C.RoleFlags int result = 0; for (int i = 0; i < roleDescriptors.size(); i++) { Descriptor descriptor = roleDescriptors.get(i); - if ("urn:mpeg:dash:role:2011".equalsIgnoreCase(descriptor.schemeIdUri)) { - result |= parseDashRoleSchemeValue(descriptor.value); + if (Ascii.equalsIgnoreCase("urn:mpeg:dash:role:2011", descriptor.schemeIdUri)) { + result |= parseRoleFlagsFromDashRoleScheme(descriptor.value); } } return result; } - @C.RoleFlags - protected int parseRoleFlagsFromAccessibilityDescriptors( + protected @C.RoleFlags int parseRoleFlagsFromAccessibilityDescriptors( List accessibilityDescriptors) { @C.RoleFlags int result = 0; for (int i = 0; i < accessibilityDescriptors.size(); i++) { Descriptor descriptor = accessibilityDescriptors.get(i); - if ("urn:mpeg:dash:role:2011".equalsIgnoreCase(descriptor.schemeIdUri)) { - result |= parseDashRoleSchemeValue(descriptor.value); - } else if ("urn:tva:metadata:cs:AudioPurposeCS:2007" - .equalsIgnoreCase(descriptor.schemeIdUri)) { + if (Ascii.equalsIgnoreCase("urn:mpeg:dash:role:2011", descriptor.schemeIdUri)) { + result |= parseRoleFlagsFromDashRoleScheme(descriptor.value); + } else if (Ascii.equalsIgnoreCase( + "urn:tva:metadata:cs:AudioPurposeCS:2007", descriptor.schemeIdUri)) { result |= parseTvaAudioPurposeCsValue(descriptor.value); } } return result; } - @C.RoleFlags - protected int parseRoleFlagsFromProperties(List accessibilityDescriptors) { + protected @C.RoleFlags int parseRoleFlagsFromProperties( + List accessibilityDescriptors) { @C.RoleFlags int result = 0; for (int i = 0; i < accessibilityDescriptors.size(); i++) { Descriptor descriptor = accessibilityDescriptors.get(i); - if ("http://dashif.org/guidelines/trickmode".equalsIgnoreCase(descriptor.schemeIdUri)) { + if (Ascii.equalsIgnoreCase( + "http://dashif.org/guidelines/trickmode", descriptor.schemeIdUri)) { result |= C.ROLE_FLAG_TRICK_PLAY; } } return result; } - @C.RoleFlags - protected int parseDashRoleSchemeValue(@Nullable String value) { + protected @C.RoleFlags int parseRoleFlagsFromDashRoleScheme(@Nullable String value) { if (value == null) { return 0; } @@ -1292,6 +1588,9 @@ protected int parseDashRoleSchemeValue(@Nullable String value) { return C.ROLE_FLAG_EMERGENCY; case "caption": return C.ROLE_FLAG_CAPTION; + case "forced_subtitle": + // Support both hyphen and underscore (https://github.com/google/ExoPlayer/issues/9727). + case "forced-subtitle": case "subtitle": return C.ROLE_FLAG_SUBTITLE; case "sign": @@ -1305,8 +1604,7 @@ protected int parseDashRoleSchemeValue(@Nullable String value) { } } - @C.RoleFlags - protected int parseTvaAudioPurposeCsValue(@Nullable String value) { + protected @C.RoleFlags int parseTvaAudioPurposeCsValue(@Nullable String value) { if (value == null) { return 0; } @@ -1326,6 +1624,49 @@ protected int parseTvaAudioPurposeCsValue(@Nullable String value) { } } + protected String[] parseProfiles(XmlPullParser xpp, String attributeName, String[] defaultValue) { + @Nullable String attributeValue = xpp.getAttributeValue(/* namespace= */ null, attributeName); + if (attributeValue == null) { + return defaultValue; + } + return attributeValue.split(","); + } + + // Thumbnail tile information parsing + + /** + * Parses given descriptors for thumbnail tile information. + * + * @param essentialProperties List of descriptors that contain thumbnail tile information. + * @return A pair of Integer values, where the first is the count of horizontal tiles and the + * second is the count of vertical tiles, or null if no thumbnail tile information is found. + */ + @Nullable + protected Pair parseTileCountFromProperties( + List essentialProperties) { + for (int i = 0; i < essentialProperties.size(); i++) { + Descriptor descriptor = essentialProperties.get(i); + if ((Ascii.equalsIgnoreCase("http://dashif.org/thumbnail_tile", descriptor.schemeIdUri) + || Ascii.equalsIgnoreCase( + "http://dashif.org/guidelines/thumbnail_tile", descriptor.schemeIdUri)) + && descriptor.value != null) { + String size = descriptor.value; + String[] sizeSplit = Util.split(size, "x"); + if (sizeSplit.length != 2) { + continue; + } + try { + int tileCountHorizontal = Integer.parseInt(sizeSplit[0]); + int tileCountVertical = Integer.parseInt(sizeSplit[1]); + return Pair.create(tileCountHorizontal, tileCountVertical); + } catch (NumberFormatException e) { + // Ignore property if it's malformed. + } + } + } + return null; + } + // Utility methods. /** @@ -1351,9 +1692,7 @@ public static void maybeSkipTag(XmlPullParser xpp) throws IOException, XmlPullPa } } - /** - * Removes unnecessary {@link SchemeData}s with null {@link SchemeData#data}. - */ + /** Removes unnecessary {@link SchemeData}s with null {@link SchemeData#data}. */ private static void filterRedundantIncompleteSchemeDatas(ArrayList schemeDatas) { for (int i = schemeDatas.size() - 1; i >= 0; i--) { SchemeData schemeData = schemeDatas.get(i); @@ -1370,6 +1709,32 @@ private static void filterRedundantIncompleteSchemeDatas(ArrayList s } } + private static void fillInClearKeyInformation(ArrayList schemeDatas) { + // Find and remove ClearKey information. + @Nullable String clearKeyLicenseServerUrl = null; + for (int i = 0; i < schemeDatas.size(); i++) { + SchemeData schemeData = schemeDatas.get(i); + if (C.CLEARKEY_UUID.equals(schemeData.uuid) && schemeData.licenseServerUrl != null) { + clearKeyLicenseServerUrl = schemeData.licenseServerUrl; + schemeDatas.remove(i); + break; + } + } + if (clearKeyLicenseServerUrl == null) { + return; + } + // Fill in the ClearKey information into the existing PSSH schema data if applicable. + for (int i = 0; i < schemeDatas.size(); i++) { + SchemeData schemeData = schemeDatas.get(i); + if (C.COMMON_PSSH_UUID.equals(schemeData.uuid) && schemeData.licenseServerUrl == null) { + schemeDatas.set( + i, + new SchemeData( + C.CLEARKEY_UUID, clearKeyLicenseServerUrl, schemeData.mimeType, schemeData.data)); + } + } + } + /** * Derives a sample mimeType from a container mimeType and codecs attribute. * @@ -1384,43 +1749,19 @@ private static String getSampleMimeType( return MimeTypes.getAudioMediaMimeType(codecs); } else if (MimeTypes.isVideo(containerMimeType)) { return MimeTypes.getVideoMediaMimeType(codecs); - } else if (mimeTypeIsRawText(containerMimeType)) { + } else if (MimeTypes.isText(containerMimeType)) { + // Text types are raw formats. + return containerMimeType; + } else if (MimeTypes.isImage(containerMimeType)) { + // Image types are raw formats. return containerMimeType; } else if (MimeTypes.APPLICATION_MP4.equals(containerMimeType)) { - if (codecs != null) { - if (codecs.startsWith("stpp")) { - return MimeTypes.APPLICATION_TTML; - } else if (codecs.startsWith("wvtt")) { - return MimeTypes.APPLICATION_MP4VTT; - } - } - } else if (MimeTypes.APPLICATION_RAWCC.equals(containerMimeType)) { - if (codecs != null) { - if (codecs.contains("cea708")) { - return MimeTypes.APPLICATION_CEA708; - } else if (codecs.contains("eia608") || codecs.contains("cea608")) { - return MimeTypes.APPLICATION_CEA608; - } - } - return null; + @Nullable String mimeType = MimeTypes.getMediaMimeType(codecs); + return MimeTypes.TEXT_VTT.equals(mimeType) ? MimeTypes.APPLICATION_MP4VTT : mimeType; } return null; } - /** - * Returns whether a mimeType is a text sample mimeType. - * - * @param mimeType The mimeType. - * @return Whether the mimeType is a text sample mimeType. - */ - private static boolean mimeTypeIsRawText(@Nullable String mimeType) { - return MimeTypes.isText(mimeType) - || MimeTypes.APPLICATION_TTML.equals(mimeType) - || MimeTypes.APPLICATION_MP4VTT.equals(mimeType) - || MimeTypes.APPLICATION_CEA708.equals(mimeType) - || MimeTypes.APPLICATION_CEA608.equals(mimeType); - } - /** * Checks two languages for consistency, returning the consistent language, or throwing an {@link * IllegalStateException} if the languages are inconsistent. @@ -1447,15 +1788,16 @@ private static String checkLanguageConsistency( /** * Checks two adaptation set content types for consistency, returning the consistent type, or * throwing an {@link IllegalStateException} if the types are inconsistent. - *

      - * Two types are consistent if they are equal, or if one is {@link C#TRACK_TYPE_UNKNOWN}. - * Where one of the types is {@link C#TRACK_TYPE_UNKNOWN}, the other is returned. + * + *

      Two types are consistent if they are equal, or if one is {@link C#TRACK_TYPE_UNKNOWN}. Where + * one of the types is {@link C#TRACK_TYPE_UNKNOWN}, the other is returned. * * @param firstType The first type. * @param secondType The second type. * @return The consistent type. */ - private static int checkContentTypeConsistency(int firstType, int secondType) { + private static int checkContentTypeConsistency( + @C.TrackType int firstType, @C.TrackType int secondType) { if (firstType == C.TRACK_TYPE_UNKNOWN) { return secondType; } else if (secondType == C.TRACK_TYPE_UNKNOWN) { @@ -1486,8 +1828,7 @@ protected static Descriptor parseDescriptor(XmlPullParser xpp, String tag) return new Descriptor(schemeIdUri, value, id); } - protected static int parseCea608AccessibilityChannel( - List accessibilityDescriptors) { + protected static int parseCea608AccessibilityChannel(List accessibilityDescriptors) { for (int i = 0; i < accessibilityDescriptors.size(); i++) { Descriptor descriptor = accessibilityDescriptors.get(i); if ("urn:scte:dash:cc:cea-608:2015".equals(descriptor.schemeIdUri) @@ -1503,8 +1844,7 @@ protected static int parseCea608AccessibilityChannel( return Format.NO_VALUE; } - protected static int parseCea708AccessibilityChannel( - List accessibilityDescriptors) { + protected static int parseCea708AccessibilityChannel(List accessibilityDescriptors) { for (int i = 0; i < accessibilityDescriptors.size(); i++) { Descriptor descriptor = accessibilityDescriptors.get(i); if ("urn:scte:dash:cc:cea-708:2015".equals(descriptor.schemeIdUri) @@ -1595,15 +1935,68 @@ protected static long parseLong(XmlPullParser xpp, String name, long defaultValu return value == null ? defaultValue : Long.parseLong(value); } + protected static float parseFloat(XmlPullParser xpp, String name, float defaultValue) { + String value = xpp.getAttributeValue(null, name); + return value == null ? defaultValue : Float.parseFloat(value); + } + protected static String parseString(XmlPullParser xpp, String name, String defaultValue) { String value = xpp.getAttributeValue(null, name); return value == null ? defaultValue : value; } /** - * Parses the number of channels from the value attribute of an AudioElementConfiguration with - * schemeIdUri "tag:dolby.com,2014:dash:audio_channel_configuration:2011", as defined by table E.5 - * in ETSI TS 102 366, or the legacy schemeIdUri + * Parses the number of channels from the value attribute of an AudioChannelConfiguration with + * schemeIdUri "urn:mpeg:mpegB:cicp:ChannelConfiguration", as defined by ISO 23001-8 clause 8.1. + * + * @param xpp The parser from which to read. + * @return The parsed number of channels, or {@link Format#NO_VALUE} if the channel count could + * not be parsed. + */ + protected static int parseMpegChannelConfiguration(XmlPullParser xpp) { + int index = parseInt(xpp, "value", C.INDEX_UNSET); + return 0 <= index && index < MPEG_CHANNEL_CONFIGURATION_MAPPING.length + ? MPEG_CHANNEL_CONFIGURATION_MAPPING[index] + : Format.NO_VALUE; + } + + /** + * Parses the number of channels from the value attribute of an AudioChannelConfiguration with + * schemeIdUri "tag:dts.com,2014:dash:audio_channel_configuration:2012" as defined by Annex G + * (3.2) in ETSI TS 102 114 V1.6.1, or by the legacy schemeIdUri + * "urn:dts:dash:audio_channel_configuration:2012". + * + * @param xpp The parser from which to read. + * @return The parsed number of channels, or {@link Format#NO_VALUE} if the channel count could + * not be parsed. + */ + protected static int parseDtsChannelConfiguration(XmlPullParser xpp) { + int channelCount = parseInt(xpp, "value", Format.NO_VALUE); + return 0 < channelCount && channelCount < 33 ? channelCount : Format.NO_VALUE; + } + + /** + * Parses the number of channels from the value attribute of an AudioChannelConfiguration with + * schemeIdUri "tag:dts.com,2018:uhd:audio_channel_configuration" as defined by table B-5 in ETSI + * TS 103 491 v1.2.1. + * + * @param xpp The parser from which to read. + * @return The parsed number of channels, or {@link Format#NO_VALUE} if the channel count could + * not be parsed. + */ + protected static int parseDtsxChannelConfiguration(XmlPullParser xpp) { + @Nullable String value = xpp.getAttributeValue(null, "value"); + if (value == null) { + return Format.NO_VALUE; + } + int channelCount = Integer.bitCount(Integer.parseInt(value, /* radix= */ 16)); + return channelCount == 0 ? Format.NO_VALUE : channelCount; + } + + /** + * Parses the number of channels from the value attribute of an AudioChannelConfiguration with + * schemeIdUri "tag:dolby.com,2014:dash:audio_channel_configuration:2011" as defined by table E.5 + * in ETSI TS 102 366, or by the legacy schemeIdUri * "urn:dolby:dash:audio_channel_configuration:2011". * * @param xpp The parser from which to read. @@ -1611,11 +2004,11 @@ protected static String parseString(XmlPullParser xpp, String name, String defau * not be parsed. */ protected static int parseDolbyChannelConfiguration(XmlPullParser xpp) { - String value = Util.toLowerInvariant(xpp.getAttributeValue(null, "value")); + @Nullable String value = xpp.getAttributeValue(null, "value"); if (value == null) { return Format.NO_VALUE; } - switch (value) { + switch (Ascii.toLowerCase(value)) { case "4000": return 1; case "a000": @@ -1633,42 +2026,69 @@ protected static long parseLastSegmentNumberSupplementalProperty( List supplementalProperties) { for (int i = 0; i < supplementalProperties.size(); i++) { Descriptor descriptor = supplementalProperties.get(i); - if ("http://dashif.org/guidelines/last-segment-number" - .equalsIgnoreCase(descriptor.schemeIdUri)) { + if (Ascii.equalsIgnoreCase( + "http://dashif.org/guidelines/last-segment-number", descriptor.schemeIdUri)) { return Long.parseLong(descriptor.value); } } return C.INDEX_UNSET; } + private static long getFinalAvailabilityTimeOffset( + long baseUrlAvailabilityTimeOffsetUs, long segmentBaseAvailabilityTimeOffsetUs) { + long availabilityTimeOffsetUs = segmentBaseAvailabilityTimeOffsetUs; + if (availabilityTimeOffsetUs == C.TIME_UNSET) { + // Fall back to BaseURL values if no SegmentBase specifies an offset. + availabilityTimeOffsetUs = baseUrlAvailabilityTimeOffsetUs; + } + if (availabilityTimeOffsetUs == Long.MAX_VALUE) { + // Replace INF value with TIME_UNSET to specify that all segments are available immediately. + availabilityTimeOffsetUs = C.TIME_UNSET; + } + return availabilityTimeOffsetUs; + } + + private boolean isDvbProfileDeclared(String[] profiles) { + for (String profile : profiles) { + if (profile.startsWith("urn:dvb:dash:profile:dvb-dash:")) { + return true; + } + } + return false; + } + /** A parsed Representation element. */ protected static final class RepresentationInfo { public final Format format; - public final String baseUrl; + public final ImmutableList baseUrls; public final SegmentBase segmentBase; @Nullable public final String drmSchemeType; public final ArrayList drmSchemeDatas; public final ArrayList inbandEventStreams; public final long revisionId; + public final List essentialProperties; + public final List supplementalProperties; public RepresentationInfo( Format format, - String baseUrl, + List baseUrls, SegmentBase segmentBase, @Nullable String drmSchemeType, ArrayList drmSchemeDatas, ArrayList inbandEventStreams, + List essentialProperties, + List supplementalProperties, long revisionId) { this.format = format; - this.baseUrl = baseUrl; + this.baseUrls = ImmutableList.copyOf(baseUrls); this.segmentBase = segmentBase; this.drmSchemeType = drmSchemeType; this.drmSchemeDatas = drmSchemeDatas; this.inbandEventStreams = inbandEventStreams; + this.essentialProperties = essentialProperties; + this.supplementalProperties = supplementalProperties; this.revisionId = revisionId; } - } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/Descriptor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/Descriptor.java index d68690d363..23c8ee368e 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/Descriptor.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/Descriptor.java @@ -18,20 +18,14 @@ import androidx.annotation.Nullable; import com.google.android.exoplayer2.util.Util; -/** - * A descriptor, as defined by ISO 23009-1, 2nd edition, 5.8.2. - */ +/** A descriptor, as defined by ISO 23009-1, 2nd edition, 5.8.2. */ public final class Descriptor { /** The scheme URI. */ public final String schemeIdUri; - /** - * The value, or null. - */ + /** The value, or null. */ @Nullable public final String value; - /** - * The identifier, or null. - */ + /** The identifier, or null. */ @Nullable public final String id; /** @@ -54,7 +48,8 @@ public boolean equals(@Nullable Object obj) { return false; } Descriptor other = (Descriptor) obj; - return Util.areEqual(schemeIdUri, other.schemeIdUri) && Util.areEqual(value, other.value) + return Util.areEqual(schemeIdUri, other.schemeIdUri) + && Util.areEqual(value, other.value) && Util.areEqual(id, other.id); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/EventStream.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/EventStream.java index 8a4e1ad058..ddc2fe5d1a 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/EventStream.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/EventStream.java @@ -17,37 +17,29 @@ import com.google.android.exoplayer2.metadata.emsg.EventMessage; -/** - * A DASH in-MPD EventStream element, as defined by ISO/IEC 23009-1, 2nd edition, section 5.10. - */ +/** A DASH in-MPD EventStream element, as defined by ISO/IEC 23009-1, 2nd edition, section 5.10. */ public final class EventStream { - /** - * {@link EventMessage}s in the event stream. - */ + /** {@link EventMessage}s in the event stream. */ public final EventMessage[] events; - /** - * Presentation time of the events in microsecond, sorted in ascending order. - */ + /** Presentation time of the events in microsecond, sorted in ascending order. */ public final long[] presentationTimesUs; - /** - * The scheme URI. - */ + /** The scheme URI. */ public final String schemeIdUri; - /** - * The value of the event stream. Use empty string if not defined in manifest. - */ + /** The value of the event stream. Use empty string if not defined in manifest. */ public final String value; - /** - * The timescale in units per seconds, as defined in the manifest. - */ + /** The timescale in units per seconds, as defined in the manifest. */ public final long timescale; - public EventStream(String schemeIdUri, String value, long timescale, long[] presentationTimesUs, + public EventStream( + String schemeIdUri, + String value, + long timescale, + long[] presentationTimesUs, EventMessage[] events) { this.schemeIdUri = schemeIdUri; this.value = value; @@ -56,11 +48,8 @@ public EventStream(String schemeIdUri, String value, long timescale, long[] pres this.events = events; } - /** - * A constructed id of this {@link EventStream}. Equal to {@code schemeIdUri + "/" + value}. - */ + /** A constructed id of this {@link EventStream}. Equal to {@code schemeIdUri + "/" + value}. */ public String id() { return schemeIdUri + "/" + value; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/Period.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/Period.java index b472aed50c..449803ea1e 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/Period.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/Period.java @@ -20,29 +20,19 @@ import java.util.Collections; import java.util.List; -/** - * Encapsulates media content components over a contiguous period of time. - */ +/** Encapsulates media content components over a contiguous period of time. */ public class Period { - /** - * The period identifier, if one exists. - */ + /** The period identifier, if one exists. */ @Nullable public final String id; - /** - * The start time of the period in milliseconds. - */ + /** The start time of the period in milliseconds, relative to the start of the manifest. */ public final long startMs; - /** - * The adaptation sets belonging to the period. - */ + /** The adaptation sets belonging to the period. */ public final List adaptationSets; - /** - * The event stream belonging to the period. - */ + /** The event stream belonging to the period. */ public final List eventStreams; /** The asset identifier for this period, if one exists */ @@ -63,7 +53,10 @@ public Period(@Nullable String id, long startMs, List adaptationS * @param adaptationSets The adaptation sets belonging to the period. * @param eventStreams The {@link EventStream}s belonging to the period. */ - public Period(@Nullable String id, long startMs, List adaptationSets, + public Period( + @Nullable String id, + long startMs, + List adaptationSets, List eventStreams) { this(id, startMs, adaptationSets, eventStreams, /* assetIdentifier= */ null); } @@ -104,5 +97,4 @@ public int getAdaptationSetIndex(int type) { } return C.INDEX_UNSET; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/ProgramInformation.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/ProgramInformation.java index ac264bd2b1..5c434adb4a 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/ProgramInformation.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/ProgramInformation.java @@ -19,7 +19,7 @@ import com.google.android.exoplayer2.util.Util; /** A parsed program information element. */ -public class ProgramInformation { +public final class ProgramInformation { /** The title for the media presentation. */ @Nullable public final String title; @@ -53,7 +53,7 @@ public boolean equals(@Nullable Object obj) { if (this == obj) { return true; } - if (obj == null || getClass() != obj.getClass()) { + if (!(obj instanceof ProgramInformation)) { return false; } ProgramInformation other = (ProgramInformation) obj; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/RangedUri.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/RangedUri.java index bcd783f0cb..1637b2625a 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/RangedUri.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/RangedUri.java @@ -20,19 +20,13 @@ import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.util.UriUtil; -/** - * Defines a range of data located at a reference uri. - */ +/** Defines a range of data located at a reference uri. */ public final class RangedUri { - /** - * The (zero based) index of the first byte of the range. - */ + /** The (zero based) index of the first byte of the range. */ public final long start; - /** - * The length of the range, or {@link C#LENGTH_UNSET} to indicate that the range is unbounded. - */ + /** The length of the range, or {@link C#LENGTH_UNSET} to indicate that the range is unbounded. */ public final long length; private final String referenceUri; @@ -92,10 +86,14 @@ public RangedUri attemptMerge(@Nullable RangedUri other, String baseUri) { if (other == null || !resolvedUri.equals(other.resolveUriString(baseUri))) { return null; } else if (length != C.LENGTH_UNSET && start + length == other.start) { - return new RangedUri(resolvedUri, start, + return new RangedUri( + resolvedUri, + start, other.length == C.LENGTH_UNSET ? C.LENGTH_UNSET : length + other.length); } else if (other.length != C.LENGTH_UNSET && other.start + other.length == start) { - return new RangedUri(resolvedUri, other.start, + return new RangedUri( + resolvedUri, + other.start, length == C.LENGTH_UNSET ? C.LENGTH_UNSET : other.length + length); } else { return null; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/Representation.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/Representation.java index 80ad15cd8f..62787cee4c 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/Representation.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/Representation.java @@ -15,24 +15,24 @@ */ package com.google.android.exoplayer2.source.dash.manifest; +import static com.google.android.exoplayer2.util.Assertions.checkArgument; + import android.net.Uri; import androidx.annotation.Nullable; +import androidx.annotation.VisibleForTesting; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.source.dash.DashSegmentIndex; import com.google.android.exoplayer2.source.dash.manifest.SegmentBase.MultiSegmentBase; import com.google.android.exoplayer2.source.dash.manifest.SegmentBase.SingleSegmentBase; +import com.google.common.collect.ImmutableList; import java.util.Collections; import java.util.List; -/** - * A DASH representation. - */ +/** A DASH representation. */ public abstract class Representation { - /** - * A default value for {@link #revisionId}. - */ + /** A default value for {@link #revisionId}. */ public static final long REVISION_ID_DEFAULT = -1; /** @@ -42,20 +42,18 @@ public abstract class Representation { * often a suitable. */ public final long revisionId; - /** - * The format of the representation. - */ + /** The format of the representation. */ public final Format format; - /** - * The base URL of the representation. - */ - public final String baseUrl; - /** - * The offset of the presentation timestamps in the media stream relative to media time. - */ + /** The base URLs of the representation. */ + public final ImmutableList baseUrls; + /** The offset of the presentation timestamps in the media stream relative to media time. */ public final long presentationTimeOffsetUs; /** The in-band event streams in the representation. May be empty. */ public final List inbandEventStreams; + /** Essential properties in the representation. May be empty. */ + public final List essentialProperties; + /** Supplemental properties in the adaptation set. May be empty. */ + public final List supplementalProperties; private final RangedUri initializationUri; @@ -64,33 +62,21 @@ public abstract class Representation { * * @param revisionId Identifies the revision of the content. * @param format The format of the representation. - * @param baseUrl The base URL. + * @param baseUrls The list of base URLs of the representation. * @param segmentBase A segment base element for the representation. * @return The constructed instance. */ public static Representation newInstance( - long revisionId, Format format, String baseUrl, SegmentBase segmentBase) { - return newInstance(revisionId, format, baseUrl, segmentBase, /* inbandEventStreams= */ null); - } - - /** - * Constructs a new instance. - * - * @param revisionId Identifies the revision of the content. - * @param format The format of the representation. - * @param baseUrl The base URL. - * @param segmentBase A segment base element for the representation. - * @param inbandEventStreams The in-band event streams in the representation. May be null. - * @return The constructed instance. - */ - public static Representation newInstance( - long revisionId, - Format format, - String baseUrl, - SegmentBase segmentBase, - @Nullable List inbandEventStreams) { + long revisionId, Format format, List baseUrls, SegmentBase segmentBase) { return newInstance( - revisionId, format, baseUrl, segmentBase, inbandEventStreams, /* cacheKey= */ null); + revisionId, + format, + baseUrls, + segmentBase, + /* inbandEventStreams= */ null, + /* essentialProperties= */ ImmutableList.of(), + /* supplementalProperties= */ ImmutableList.of(), + /* cacheKey= */ null); } /** @@ -98,9 +84,11 @@ public static Representation newInstance( * * @param revisionId Identifies the revision of the content. * @param format The format of the representation. - * @param baseUrl The base URL of the representation. + * @param baseUrls The list of base URLs of the representation. * @param segmentBase A segment base element for the representation. * @param inbandEventStreams The in-band event streams in the representation. May be null. + * @param essentialProperties Essential properties in the representation. May be empty. + * @param supplementalProperties Supplemental properties in the representation. May be empty. * @param cacheKey An optional key to be returned from {@link #getCacheKey()}, or null. This * parameter is ignored if {@code segmentBase} consists of multiple segments. * @return The constructed instance. @@ -108,41 +96,56 @@ public static Representation newInstance( public static Representation newInstance( long revisionId, Format format, - String baseUrl, + List baseUrls, SegmentBase segmentBase, @Nullable List inbandEventStreams, + List essentialProperties, + List supplementalProperties, @Nullable String cacheKey) { if (segmentBase instanceof SingleSegmentBase) { return new SingleSegmentRepresentation( revisionId, format, - baseUrl, + baseUrls, (SingleSegmentBase) segmentBase, inbandEventStreams, + essentialProperties, + supplementalProperties, cacheKey, - C.LENGTH_UNSET); + /* contentLength= */ C.LENGTH_UNSET); } else if (segmentBase instanceof MultiSegmentBase) { return new MultiSegmentRepresentation( - revisionId, format, baseUrl, (MultiSegmentBase) segmentBase, inbandEventStreams); + revisionId, + format, + baseUrls, + (MultiSegmentBase) segmentBase, + inbandEventStreams, + essentialProperties, + supplementalProperties); } else { - throw new IllegalArgumentException("segmentBase must be of type SingleSegmentBase or " - + "MultiSegmentBase"); + throw new IllegalArgumentException( + "segmentBase must be of type SingleSegmentBase or " + "MultiSegmentBase"); } } private Representation( long revisionId, Format format, - String baseUrl, + List baseUrls, SegmentBase segmentBase, - @Nullable List inbandEventStreams) { + @Nullable List inbandEventStreams, + List essentialProperties, + List supplementalProperties) { + checkArgument(!baseUrls.isEmpty()); this.revisionId = revisionId; this.format = format; - this.baseUrl = baseUrl; + this.baseUrls = ImmutableList.copyOf(baseUrls); this.inbandEventStreams = inbandEventStreams == null ? Collections.emptyList() : Collections.unmodifiableList(inbandEventStreams); + this.essentialProperties = essentialProperties; + this.supplementalProperties = supplementalProperties; initializationUri = segmentBase.getInitialization(this); presentationTimeOffsetUs = segmentBase.getPresentationTimeOffsetUs(); } @@ -171,19 +174,12 @@ public RangedUri getInitializationUri() { @Nullable public abstract String getCacheKey(); - /** - * A DASH representation consisting of a single segment. - */ + /** A DASH representation consisting of a single segment. */ public static class SingleSegmentRepresentation extends Representation { - /** - * The uri of the single segment. - */ + /** The uri of the single segment. */ public final Uri uri; - - /** - * The content length, or {@link C#LENGTH_UNSET} if unknown. - */ + /** The content length, or {@link C#LENGTH_UNSET} if unknown. */ public final long contentLength; @Nullable private final String cacheKey; @@ -213,40 +209,60 @@ public static SingleSegmentRepresentation newInstance( List inbandEventStreams, @Nullable String cacheKey, long contentLength) { - RangedUri rangedUri = new RangedUri(null, initializationStart, - initializationEnd - initializationStart + 1); - SingleSegmentBase segmentBase = new SingleSegmentBase(rangedUri, 1, 0, indexStart, - indexEnd - indexStart + 1); + RangedUri rangedUri = + new RangedUri(null, initializationStart, initializationEnd - initializationStart + 1); + SingleSegmentBase segmentBase = + new SingleSegmentBase(rangedUri, 1, 0, indexStart, indexEnd - indexStart + 1); + ImmutableList baseUrls = ImmutableList.of(new BaseUrl(uri)); return new SingleSegmentRepresentation( - revisionId, format, uri, segmentBase, inbandEventStreams, cacheKey, contentLength); + revisionId, + format, + baseUrls, + segmentBase, + inbandEventStreams, + /* essentialProperties= */ ImmutableList.of(), + /* supplementalProperties= */ ImmutableList.of(), + cacheKey, + contentLength); } /** * @param revisionId Identifies the revision of the content. * @param format The format of the representation. - * @param baseUrl The base URL of the representation. + * @param baseUrls The base urls of the representation. * @param segmentBase The segment base underlying the representation. * @param inbandEventStreams The in-band event streams in the representation. May be null. + * @param essentialProperties Essential properties in the representation. May be empty. + * @param supplementalProperties Supplemental properties in the representation. May be empty. * @param cacheKey An optional key to be returned from {@link #getCacheKey()}, or null. * @param contentLength The content length, or {@link C#LENGTH_UNSET} if unknown. */ public SingleSegmentRepresentation( long revisionId, Format format, - String baseUrl, + List baseUrls, SingleSegmentBase segmentBase, @Nullable List inbandEventStreams, + List essentialProperties, + List supplementalProperties, @Nullable String cacheKey, long contentLength) { - super(revisionId, format, baseUrl, segmentBase, inbandEventStreams); - this.uri = Uri.parse(baseUrl); + super( + revisionId, + format, + baseUrls, + segmentBase, + inbandEventStreams, + essentialProperties, + supplementalProperties); + this.uri = Uri.parse(baseUrls.get(0).url); this.indexUri = segmentBase.getIndex(); this.cacheKey = cacheKey; this.contentLength = contentLength; // If we have an index uri then the index is defined externally, and we shouldn't return one // directly. If we don't, then we can't do better than an index defining a single segment. - segmentIndex = indexUri != null ? null - : new SingleSegmentIndex(new RangedUri(null, 0, contentLength)); + segmentIndex = + indexUri != null ? null : new SingleSegmentIndex(new RangedUri(null, 0, contentLength)); } @Override @@ -266,31 +282,41 @@ public DashSegmentIndex getIndex() { public String getCacheKey() { return cacheKey; } - } - /** - * A DASH representation consisting of multiple segments. - */ + /** A DASH representation consisting of multiple segments. */ public static class MultiSegmentRepresentation extends Representation implements DashSegmentIndex { - private final MultiSegmentBase segmentBase; + @VisibleForTesting /* package */ final MultiSegmentBase segmentBase; /** + * Creates the multi-segment Representation. + * * @param revisionId Identifies the revision of the content. * @param format The format of the representation. - * @param baseUrl The base URL of the representation. + * @param baseUrls The base URLs of the representation. * @param segmentBase The segment base underlying the representation. * @param inbandEventStreams The in-band event streams in the representation. May be null. + * @param essentialProperties Essential properties in the representation. May be empty. + * @param supplementalProperties Supplemental properties in the representation. May be empty. */ public MultiSegmentRepresentation( long revisionId, Format format, - String baseUrl, + List baseUrls, MultiSegmentBase segmentBase, - @Nullable List inbandEventStreams) { - super(revisionId, format, baseUrl, segmentBase, inbandEventStreams); + @Nullable List inbandEventStreams, + List essentialProperties, + List supplementalProperties) { + super( + revisionId, + format, + baseUrls, + segmentBase, + inbandEventStreams, + essentialProperties, + supplementalProperties); this.segmentBase = segmentBase; } @@ -314,8 +340,8 @@ public String getCacheKey() { // DashSegmentIndex implementation. @Override - public RangedUri getSegmentUrl(long segmentIndex) { - return segmentBase.getSegmentUrl(this, segmentIndex); + public RangedUri getSegmentUrl(long segmentNum) { + return segmentBase.getSegmentUrl(this, segmentNum); } @Override @@ -324,13 +350,13 @@ public long getSegmentNum(long timeUs, long periodDurationUs) { } @Override - public long getTimeUs(long segmentIndex) { - return segmentBase.getSegmentTimeUs(segmentIndex); + public long getTimeUs(long segmentNum) { + return segmentBase.getSegmentTimeUs(segmentNum); } @Override - public long getDurationUs(long segmentIndex, long periodDurationUs) { - return segmentBase.getSegmentDurationUs(segmentIndex, periodDurationUs); + public long getDurationUs(long segmentNum, long periodDurationUs) { + return segmentBase.getSegmentDurationUs(segmentNum, periodDurationUs); } @Override @@ -339,15 +365,28 @@ public long getFirstSegmentNum() { } @Override - public int getSegmentCount(long periodDurationUs) { + public long getFirstAvailableSegmentNum(long periodDurationUs, long nowUnixTimeUs) { + return segmentBase.getFirstAvailableSegmentNum(periodDurationUs, nowUnixTimeUs); + } + + @Override + public long getSegmentCount(long periodDurationUs) { return segmentBase.getSegmentCount(periodDurationUs); } + @Override + public long getAvailableSegmentCount(long periodDurationUs, long nowUnixTimeUs) { + return segmentBase.getAvailableSegmentCount(periodDurationUs, nowUnixTimeUs); + } + + @Override + public long getNextSegmentAvailableTimeUs(long periodDurationUs, long nowUnixTimeUs) { + return segmentBase.getNextSegmentAvailableTimeUs(periodDurationUs, nowUnixTimeUs); + } + @Override public boolean isExplicit() { return segmentBase.isExplicit(); } - } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/SegmentBase.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/SegmentBase.java index db7c8d6471..754d5fc99d 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/SegmentBase.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/SegmentBase.java @@ -15,18 +15,24 @@ */ package com.google.android.exoplayer2.source.dash.manifest; +import static com.google.android.exoplayer2.source.dash.DashSegmentIndex.INDEX_UNBOUNDED; +import static java.lang.Math.max; +import static java.lang.Math.min; + import androidx.annotation.Nullable; +import androidx.annotation.VisibleForTesting; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.source.dash.DashSegmentIndex; import com.google.android.exoplayer2.util.Util; +import com.google.common.math.BigIntegerMath; +import java.math.BigInteger; +import java.math.RoundingMode; import java.util.List; -/** - * An approximate representation of a SegmentBase manifest element. - */ +/** An approximate representation of a SegmentBase manifest element. */ public abstract class SegmentBase { - /* package */ @Nullable final RangedUri initialization; + @Nullable /* package */ final RangedUri initialization; /* package */ final long timescale; /* package */ final long presentationTimeOffset; @@ -56,16 +62,12 @@ public RangedUri getInitialization(Representation representation) { return initialization; } - /** - * Returns the presentation time offset, in microseconds. - */ + /** Returns the presentation time offset, in microseconds. */ public long getPresentationTimeOffsetUs() { return Util.scaleLargeTimestamp(presentationTimeOffset, C.MICROS_PER_SECOND, timescale); } - /** - * A {@link SegmentBase} that defines a single segment. - */ + /** A {@link SegmentBase} that defines a single segment. */ public static class SingleSegmentBase extends SegmentBase { /* package */ final long indexStart; @@ -106,17 +108,25 @@ public RangedUri getIndex() { ? null : new RangedUri(/* referenceUri= */ null, indexStart, indexLength); } - } - /** - * A {@link SegmentBase} that consists of multiple segments. - */ + /** A {@link SegmentBase} that consists of multiple segments. */ public abstract static class MultiSegmentBase extends SegmentBase { /* package */ final long startNumber; /* package */ final long duration; - /* package */ @Nullable final List segmentTimeline; + @Nullable /* package */ final List segmentTimeline; + private final long timeShiftBufferDepthUs; + private final long periodStartUnixTimeUs; + + /** + * Offset to the current realtime at which segments become available, in microseconds, or {@link + * C#TIME_UNSET} if all segments are available immediately. + * + *

      Segments will be available once their end time ≤ currentRealTime + + * availabilityTimeOffset. + */ + @VisibleForTesting /* package */ final long availabilityTimeOffsetUs; /** * @param initialization A {@link RangedUri} corresponding to initialization data, if such data @@ -131,6 +141,11 @@ public abstract static class MultiSegmentBase extends SegmentBase { * @param segmentTimeline A segment timeline corresponding to the segments. If null, then * segments are assumed to be of fixed duration as specified by the {@code duration} * parameter. + * @param availabilityTimeOffsetUs The offset to the current realtime at which segments become + * available in microseconds, or {@link C#TIME_UNSET} if not applicable. + * @param timeShiftBufferDepthUs The time shift buffer depth in microseconds. + * @param periodStartUnixTimeUs The start of the enclosing period in microseconds since the Unix + * epoch. */ public MultiSegmentBase( @Nullable RangedUri initialization, @@ -138,14 +153,20 @@ public MultiSegmentBase( long presentationTimeOffset, long startNumber, long duration, - @Nullable List segmentTimeline) { + @Nullable List segmentTimeline, + long availabilityTimeOffsetUs, + long timeShiftBufferDepthUs, + long periodStartUnixTimeUs) { super(initialization, timescale, presentationTimeOffset); this.startNumber = startNumber; this.duration = duration; this.segmentTimeline = segmentTimeline; + this.availabilityTimeOffsetUs = availabilityTimeOffsetUs; + this.timeShiftBufferDepthUs = timeShiftBufferDepthUs; + this.periodStartUnixTimeUs = periodStartUnixTimeUs; } - /** @see DashSegmentIndex#getSegmentNum(long, long) */ + /** See {@link DashSegmentIndex#getSegmentNum(long, long)}. */ public long getSegmentNum(long timeUs, long periodDurationUs) { final long firstSegmentNum = getFirstSegmentNum(); final long segmentCount = getSegmentCount(periodDurationUs); @@ -157,9 +178,11 @@ public long getSegmentNum(long timeUs, long periodDurationUs) { long durationUs = (duration * C.MICROS_PER_SECOND) / timescale; long segmentNum = startNumber + timeUs / durationUs; // Ensure we stay within bounds. - return segmentNum < firstSegmentNum ? firstSegmentNum - : segmentCount == DashSegmentIndex.INDEX_UNBOUNDED ? segmentNum - : Math.min(segmentNum, firstSegmentNum + segmentCount - 1); + return segmentNum < firstSegmentNum + ? firstSegmentNum + : segmentCount == INDEX_UNBOUNDED + ? segmentNum + : min(segmentNum, firstSegmentNum + segmentCount - 1); } else { // The index cannot be unbounded. Identify the segment using binary search. long lowIndex = firstSegmentNum; @@ -179,21 +202,21 @@ public long getSegmentNum(long timeUs, long periodDurationUs) { } } - /** @see DashSegmentIndex#getDurationUs(long, long) */ + /** See {@link DashSegmentIndex#getDurationUs(long, long)}. */ public final long getSegmentDurationUs(long sequenceNumber, long periodDurationUs) { if (segmentTimeline != null) { long duration = segmentTimeline.get((int) (sequenceNumber - startNumber)).duration; return (duration * C.MICROS_PER_SECOND) / timescale; } else { - int segmentCount = getSegmentCount(periodDurationUs); - return segmentCount != DashSegmentIndex.INDEX_UNBOUNDED - && sequenceNumber == (getFirstSegmentNum() + segmentCount - 1) + long segmentCount = getSegmentCount(periodDurationUs); + return segmentCount != INDEX_UNBOUNDED + && sequenceNumber == (getFirstSegmentNum() + segmentCount - 1) ? (periodDurationUs - getSegmentTimeUs(sequenceNumber)) : ((duration * C.MICROS_PER_SECOND) / timescale); } } - /** @see DashSegmentIndex#getTimeUs(long) */ + /** See {@link DashSegmentIndex#getTimeUs(long)}. */ public final long getSegmentTimeUs(long sequenceNumber) { long unscaledSegmentTime; if (segmentTimeline != null) { @@ -210,35 +233,72 @@ public final long getSegmentTimeUs(long sequenceNumber) { * Returns a {@link RangedUri} defining the location of a segment for the given index in the * given representation. * - * @see DashSegmentIndex#getSegmentUrl(long) + *

      See {@link DashSegmentIndex#getSegmentUrl(long)}. */ public abstract RangedUri getSegmentUrl(Representation representation, long index); - /** @see DashSegmentIndex#getFirstSegmentNum() */ + /** See {@link DashSegmentIndex#getFirstSegmentNum()}. */ public long getFirstSegmentNum() { return startNumber; } - /** - * @see DashSegmentIndex#getSegmentCount(long) - */ - public abstract int getSegmentCount(long periodDurationUs); + /** See {@link DashSegmentIndex#getFirstAvailableSegmentNum(long, long)}. */ + public long getFirstAvailableSegmentNum(long periodDurationUs, long nowUnixTimeUs) { + long segmentCount = getSegmentCount(periodDurationUs); + if (segmentCount != INDEX_UNBOUNDED || timeShiftBufferDepthUs == C.TIME_UNSET) { + return getFirstSegmentNum(); + } + // The index is itself unbounded. We need to use the current time to calculate the range of + // available segments. + long liveEdgeTimeInPeriodUs = nowUnixTimeUs - periodStartUnixTimeUs; + long timeShiftBufferStartInPeriodUs = liveEdgeTimeInPeriodUs - timeShiftBufferDepthUs; + long timeShiftBufferStartSegmentNum = + getSegmentNum(timeShiftBufferStartInPeriodUs, periodDurationUs); + return max(getFirstSegmentNum(), timeShiftBufferStartSegmentNum); + } - /** - * @see DashSegmentIndex#isExplicit() - */ + /** See {@link DashSegmentIndex#getAvailableSegmentCount(long, long)}. */ + public long getAvailableSegmentCount(long periodDurationUs, long nowUnixTimeUs) { + long segmentCount = getSegmentCount(periodDurationUs); + if (segmentCount != INDEX_UNBOUNDED) { + return segmentCount; + } + // The index is itself unbounded. We need to use the current time to calculate the range of + // available segments. + long liveEdgeTimeInPeriodUs = nowUnixTimeUs - periodStartUnixTimeUs; + long availabilityTimeOffsetUs = liveEdgeTimeInPeriodUs + this.availabilityTimeOffsetUs; + // getSegmentNum(availabilityTimeOffsetUs) will not be completed yet. + long firstIncompleteSegmentNum = getSegmentNum(availabilityTimeOffsetUs, periodDurationUs); + long firstAvailableSegmentNum = getFirstAvailableSegmentNum(periodDurationUs, nowUnixTimeUs); + return (int) (firstIncompleteSegmentNum - firstAvailableSegmentNum); + } + + /** See {@link DashSegmentIndex#getNextSegmentAvailableTimeUs(long, long)}. */ + public long getNextSegmentAvailableTimeUs(long periodDurationUs, long nowUnixTimeUs) { + if (segmentTimeline != null) { + return C.TIME_UNSET; + } + long firstIncompleteSegmentNum = + getFirstAvailableSegmentNum(periodDurationUs, nowUnixTimeUs) + + getAvailableSegmentCount(periodDurationUs, nowUnixTimeUs); + return getSegmentTimeUs(firstIncompleteSegmentNum) + + getSegmentDurationUs(firstIncompleteSegmentNum, periodDurationUs) + - availabilityTimeOffsetUs; + } + + /** See {@link DashSegmentIndex#isExplicit()} */ public boolean isExplicit() { return segmentTimeline != null; } + /** See {@link DashSegmentIndex#getSegmentCount(long)}. */ + public abstract long getSegmentCount(long periodDurationUs); } - /** - * A {@link MultiSegmentBase} that uses a SegmentList to define its segments. - */ - public static class SegmentList extends MultiSegmentBase { + /** A {@link MultiSegmentBase} that uses a SegmentList to define its segments. */ + public static final class SegmentList extends MultiSegmentBase { - /* package */ @Nullable final List mediaSegments; + @Nullable /* package */ final List mediaSegments; /** * @param initialization A {@link RangedUri} corresponding to initialization data, if such data @@ -253,7 +313,12 @@ public static class SegmentList extends MultiSegmentBase { * @param segmentTimeline A segment timeline corresponding to the segments. If null, then * segments are assumed to be of fixed duration as specified by the {@code duration} * parameter. + * @param availabilityTimeOffsetUs The offset to the current realtime at which segments become + * available in microseconds, or {@link C#TIME_UNSET} if not applicable. * @param mediaSegments A list of {@link RangedUri}s indicating the locations of the segments. + * @param timeShiftBufferDepthUs The time shift buffer depth in microseconds. + * @param periodStartUnixTimeUs The start of the enclosing period in microseconds since the Unix + * epoch. */ public SegmentList( RangedUri initialization, @@ -262,9 +327,20 @@ public SegmentList( long startNumber, long duration, @Nullable List segmentTimeline, - @Nullable List mediaSegments) { - super(initialization, timescale, presentationTimeOffset, startNumber, duration, - segmentTimeline); + long availabilityTimeOffsetUs, + @Nullable List mediaSegments, + long timeShiftBufferDepthUs, + long periodStartUnixTimeUs) { + super( + initialization, + timescale, + presentationTimeOffset, + startNumber, + duration, + segmentTimeline, + availabilityTimeOffsetUs, + timeShiftBufferDepthUs, + periodStartUnixTimeUs); this.mediaSegments = mediaSegments; } @@ -274,7 +350,7 @@ public RangedUri getSegmentUrl(Representation representation, long sequenceNumbe } @Override - public int getSegmentCount(long periodDurationUs) { + public long getSegmentCount(long periodDurationUs) { return mediaSegments.size(); } @@ -282,16 +358,13 @@ public int getSegmentCount(long periodDurationUs) { public boolean isExplicit() { return true; } - } - /** - * A {@link MultiSegmentBase} that uses a SegmentTemplate to define its segments. - */ - public static class SegmentTemplate extends MultiSegmentBase { + /** A {@link MultiSegmentBase} that uses a SegmentTemplate to define its segments. */ + public static final class SegmentTemplate extends MultiSegmentBase { - /* package */ @Nullable final UrlTemplate initializationTemplate; - /* package */ @Nullable final UrlTemplate mediaTemplate; + @Nullable /* package */ final UrlTemplate initializationTemplate; + @Nullable /* package */ final UrlTemplate mediaTemplate; /* package */ final long endNumber; /** @@ -311,10 +384,15 @@ public static class SegmentTemplate extends MultiSegmentBase { * @param segmentTimeline A segment timeline corresponding to the segments. If null, then * segments are assumed to be of fixed duration as specified by the {@code duration} * parameter. + * @param availabilityTimeOffsetUs The offset to the current realtime at which segments become + * available in microseconds, or {@link C#TIME_UNSET} if not applicable. * @param initializationTemplate A template defining the location of initialization data, if * such data exists. If non-null then the {@code initialization} parameter is ignored. If * null then {@code initialization} will be used. * @param mediaTemplate A template defining the location of each media segment. + * @param timeShiftBufferDepthUs The time shift buffer depth in microseconds. + * @param periodStartUnixTimeUs The start of the enclosing period in microseconds since the Unix + * epoch. */ public SegmentTemplate( RangedUri initialization, @@ -324,15 +402,21 @@ public SegmentTemplate( long endNumber, long duration, @Nullable List segmentTimeline, + long availabilityTimeOffsetUs, @Nullable UrlTemplate initializationTemplate, - @Nullable UrlTemplate mediaTemplate) { + @Nullable UrlTemplate mediaTemplate, + long timeShiftBufferDepthUs, + long periodStartUnixTimeUs) { super( initialization, timescale, presentationTimeOffset, startNumber, duration, - segmentTimeline); + segmentTimeline, + availabilityTimeOffsetUs, + timeShiftBufferDepthUs, + periodStartUnixTimeUs); this.initializationTemplate = initializationTemplate; this.mediaTemplate = mediaTemplate; this.endNumber = endNumber; @@ -342,8 +426,9 @@ public SegmentTemplate( @Nullable public RangedUri getInitialization(Representation representation) { if (initializationTemplate != null) { - String urlString = initializationTemplate.buildUri(representation.format.id, 0, - representation.format.bitrate, 0); + String urlString = + initializationTemplate.buildUri( + representation.format.id, 0, representation.format.bitrate, 0); return new RangedUri(urlString, 0, C.LENGTH_UNSET); } else { return super.getInitialization(representation); @@ -358,30 +443,32 @@ public RangedUri getSegmentUrl(Representation representation, long sequenceNumbe } else { time = (sequenceNumber - startNumber) * duration; } - String uriString = mediaTemplate.buildUri(representation.format.id, sequenceNumber, - representation.format.bitrate, time); + String uriString = + mediaTemplate.buildUri( + representation.format.id, sequenceNumber, representation.format.bitrate, time); return new RangedUri(uriString, 0, C.LENGTH_UNSET); } @Override - public int getSegmentCount(long periodDurationUs) { + public long getSegmentCount(long periodDurationUs) { if (segmentTimeline != null) { return segmentTimeline.size(); } else if (endNumber != C.INDEX_UNSET) { - return (int) (endNumber - startNumber + 1); + return endNumber - startNumber + 1; } else if (periodDurationUs != C.TIME_UNSET) { - long durationUs = (duration * C.MICROS_PER_SECOND) / timescale; - return (int) Util.ceilDivide(periodDurationUs, durationUs); + BigInteger numerator = + BigInteger.valueOf(periodDurationUs).multiply(BigInteger.valueOf(timescale)); + BigInteger denominator = + BigInteger.valueOf(duration).multiply(BigInteger.valueOf(C.MICROS_PER_SECOND)); + return BigIntegerMath.divide(numerator, denominator, RoundingMode.CEILING).longValue(); } else { - return DashSegmentIndex.INDEX_UNBOUNDED; + return INDEX_UNBOUNDED; } } } - /** - * Represents a timeline segment from the MPD's SegmentTimeline list. - */ - public static class SegmentTimelineElement { + /** Represents a timeline segment from the MPD's SegmentTimeline list. */ + public static final class SegmentTimelineElement { /* package */ final long startTime; /* package */ final long duration; @@ -414,5 +501,4 @@ public int hashCode() { return 31 * (int) startTime + (int) duration; } } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/ServiceDescriptionElement.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/ServiceDescriptionElement.java new file mode 100644 index 0000000000..eec862f4f4 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/ServiceDescriptionElement.java @@ -0,0 +1,66 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.dash.manifest; + +import com.google.android.exoplayer2.C; + +/** Represents a service description element. */ +public final class ServiceDescriptionElement { + + /** The target live offset in milliseconds, or {@link C#TIME_UNSET} if undefined. */ + public final long targetOffsetMs; + /** The minimum live offset in milliseconds, or {@link C#TIME_UNSET} if undefined. */ + public final long minOffsetMs; + /** The maximum live offset in milliseconds, or {@link C#TIME_UNSET} if undefined. */ + public final long maxOffsetMs; + /** + * The minimum factor by which playback can be sped up for live speed adjustment, or {@link + * C#RATE_UNSET} if undefined. + */ + public final float minPlaybackSpeed; + /** + * The maximum factor by which playback can be sped up for live speed adjustment, or {@link + * C#RATE_UNSET} if undefined. + */ + public final float maxPlaybackSpeed; + + /** + * Creates a service description element. + * + * @param targetOffsetMs The target live offset in milliseconds, or {@link C#TIME_UNSET} if + * undefined. + * @param minOffsetMs The minimum live offset in milliseconds, or {@link C#TIME_UNSET} if + * undefined. + * @param maxOffsetMs The maximum live offset in milliseconds, or {@link C#TIME_UNSET} if + * undefined. + * @param minPlaybackSpeed The minimum factor by which playback can be sped up for live speed + * adjustment, or {@link C#RATE_UNSET} if undefined. + * @param maxPlaybackSpeed The maximum factor by which playback can be sped up for live speed + * adjustment, or {@link C#RATE_UNSET} if undefined. + */ + public ServiceDescriptionElement( + long targetOffsetMs, + long minOffsetMs, + long maxOffsetMs, + float minPlaybackSpeed, + float maxPlaybackSpeed) { + this.targetOffsetMs = targetOffsetMs; + this.minOffsetMs = minOffsetMs; + this.maxOffsetMs = maxOffsetMs; + this.minPlaybackSpeed = minPlaybackSpeed; + this.maxPlaybackSpeed = maxPlaybackSpeed; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/SingleSegmentIndex.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/SingleSegmentIndex.java index a56a11fe50..a8c0254ad2 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/SingleSegmentIndex.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/SingleSegmentIndex.java @@ -15,11 +15,10 @@ */ package com.google.android.exoplayer2.source.dash.manifest; +import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.source.dash.DashSegmentIndex; -/** - * A {@link DashSegmentIndex} that defines a single segment. - */ +/** A {@link DashSegmentIndex} that defines a single segment. */ /* package */ final class SingleSegmentIndex implements DashSegmentIndex { private final RangedUri uri; @@ -57,13 +56,27 @@ public long getFirstSegmentNum() { } @Override - public int getSegmentCount(long periodDurationUs) { + public long getFirstAvailableSegmentNum(long periodDurationUs, long nowUnixTimeUs) { + return 0; + } + + @Override + public long getSegmentCount(long periodDurationUs) { return 1; } + @Override + public long getAvailableSegmentCount(long periodDurationUs, long nowUnixTimeUs) { + return 1; + } + + @Override + public long getNextSegmentAvailableTimeUs(long periodDurationUs, long nowUnixTimeUs) { + return C.TIME_UNSET; + } + @Override public boolean isExplicit() { return true; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/UrlTemplate.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/UrlTemplate.java index 7d13993655..a70fdb855c 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/UrlTemplate.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/UrlTemplate.java @@ -19,8 +19,8 @@ /** * A template from which URLs can be built. - *

      - * URLs are built according to the substitution rules defined in ISO/IEC 23009-1:2014 5.3.9.4.4. + * + *

      URLs are built according to the substitution rules defined in ISO/IEC 23009-1:2014 5.3.9.4.4. */ public final class UrlTemplate { @@ -58,11 +58,9 @@ public static UrlTemplate compile(String template) { return new UrlTemplate(urlPieces, identifiers, identifierFormatTags, identifierCount); } - /** - * Internal constructor. Use {@link #compile(String)} to build instances of this class. - */ - private UrlTemplate(String[] urlPieces, int[] identifiers, String[] identifierFormatTags, - int identifierCount) { + /** Internal constructor. Use {@link #compile(String)} to build instances of this class. */ + private UrlTemplate( + String[] urlPieces, int[] identifiers, String[] identifierFormatTags, int identifierCount) { this.urlPieces = urlPieces; this.identifiers = identifiers; this.identifierFormatTags = identifierFormatTags; @@ -100,8 +98,8 @@ public String buildUri(String representationId, long segmentNumber, int bandwidt /** * Parses {@code template}, placing the decomposed components into the provided arrays. - *

      - * If the return value is N, {@code urlPieces} will contain (N+1) strings that must be + * + *

      If the return value is N, {@code urlPieces} will contain (N+1) strings that must be * interleaved with N arguments in order to construct a url. The N identifiers that correspond to * the required arguments, together with the tags that define their required formatting, are * returned in {@code identifiers} and {@code identifierFormatTags} respectively. @@ -113,8 +111,8 @@ public String buildUri(String representationId, long segmentNumber, int bandwidt * @return The number of identifiers in the template url. * @throws IllegalArgumentException If the template string is malformed. */ - private static int parseTemplate(String template, String[] urlPieces, int[] identifiers, - String[] identifierFormatTags) { + private static int parseTemplate( + String template, String[] urlPieces, int[] identifiers, String[] identifierFormatTags) { urlPieces[0] = ""; int templateIndex = 0; int identifierCount = 0; @@ -142,7 +140,7 @@ private static int parseTemplate(String template, String[] urlPieces, int[] iden // Allowed conversions are decimal integer (which is the only conversion allowed by the // DASH specification) and hexadecimal integer (due to existing content that uses it). // Else we assume that the conversion is missing, and that it should be decimal integer. - if (!formatTag.endsWith("d") && !formatTag.endsWith("x")) { + if (!formatTag.endsWith("d") && !formatTag.endsWith("x") && !formatTag.endsWith("X")) { formatTag += "d"; } identifier = identifier.substring(0, formatTagIndex); @@ -169,5 +167,4 @@ private static int parseTemplate(String template, String[] urlPieces, int[] iden } return identifierCount; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/UtcTimingElement.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/UtcTimingElement.java index 79e7452459..7deb92e66c 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/UtcTimingElement.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/manifest/UtcTimingElement.java @@ -15,9 +15,7 @@ */ package com.google.android.exoplayer2.source.dash.manifest; -/** - * Represents a UTCTiming element. - */ +/** Represents a UTCTiming element. */ public final class UtcTimingElement { public final String schemeIdUri; @@ -32,5 +30,4 @@ public UtcTimingElement(String schemeIdUri, String value) { public String toString() { return schemeIdUri + ", " + value; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/offline/DashDownloader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/offline/DashDownloader.java index 2754a3341a..f397b5c2be 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/offline/DashDownloader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/dash/offline/DashDownloader.java @@ -15,14 +15,14 @@ */ package com.google.android.exoplayer2.source.dash.offline; -import android.net.Uri; +import static com.google.android.exoplayer2.util.Util.castNonNull; + import androidx.annotation.Nullable; -import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.MediaItem; import com.google.android.exoplayer2.extractor.ChunkIndex; import com.google.android.exoplayer2.offline.DownloadException; -import com.google.android.exoplayer2.offline.DownloaderConstructorHelper; import com.google.android.exoplayer2.offline.SegmentDownloader; -import com.google.android.exoplayer2.offline.StreamKey; +import com.google.android.exoplayer2.source.dash.BaseUrlExclusionList; import com.google.android.exoplayer2.source.dash.DashSegmentIndex; import com.google.android.exoplayer2.source.dash.DashUtil; import com.google.android.exoplayer2.source.dash.DashWrappingSegmentIndex; @@ -34,10 +34,15 @@ import com.google.android.exoplayer2.source.dash.manifest.Representation; import com.google.android.exoplayer2.upstream.DataSource; import com.google.android.exoplayer2.upstream.DataSpec; -import com.google.android.exoplayer2.upstream.ParsingLoadable; +import com.google.android.exoplayer2.upstream.ParsingLoadable.Parser; +import com.google.android.exoplayer2.upstream.cache.CacheDataSource; +import com.google.android.exoplayer2.util.RunnableFutureTask; +import com.google.android.exoplayer2.util.Util; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.concurrent.Executor; +import org.checkerframework.checker.nullness.compatqual.NullableType; /** * A downloader for DASH streams. @@ -46,128 +51,170 @@ * *

      {@code
        * SimpleCache cache = new SimpleCache(downloadFolder, new NoOpCacheEvictor(), databaseProvider);
      - * DefaultHttpDataSourceFactory factory = new DefaultHttpDataSourceFactory("ExoPlayer", null);
      - * DownloaderConstructorHelper constructorHelper =
      - *     new DownloaderConstructorHelper(cache, factory);
      + * CacheDataSource.Factory cacheDataSourceFactory =
      + *     new CacheDataSource.Factory()
      + *         .setCache(cache)
      + *         .setUpstreamDataSourceFactory(new DefaultHttpDataSource.Factory());
        * // Create a downloader for the first representation of the first adaptation set of the first
        * // period.
        * DashDownloader dashDownloader =
        *     new DashDownloader(
      - *         manifestUrl, Collections.singletonList(new StreamKey(0, 0, 0)), constructorHelper);
      + *         new MediaItem.Builder()
      + *             .setUri(manifestUrl)
      + *             .setStreamKeys(Collections.singletonList(new StreamKey(0, 0, 0)))
      + *             .build(),
      + *         cacheDataSourceFactory);
        * // Perform the download.
        * dashDownloader.download(progressListener);
      - * // Access downloaded data using CacheDataSource
      - * CacheDataSource cacheDataSource =
      - *     new CacheDataSource(cache, factory.createDataSource(), CacheDataSource.FLAG_BLOCK_ON_CACHE);
      + * // Use the downloaded data for playback.
      + * DashMediaSource mediaSource =
      + *     new DashMediaSource.Factory(cacheDataSourceFactory).createMediaSource(mediaItem);
        * }
      */ public final class DashDownloader extends SegmentDownloader { + private final BaseUrlExclusionList baseUrlExclusionList; + + /** + * Creates a new instance. + * + * @param mediaItem The {@link MediaItem} to be downloaded. + * @param cacheDataSourceFactory A {@link CacheDataSource.Factory} for the cache into which the + * download will be written. + */ + public DashDownloader(MediaItem mediaItem, CacheDataSource.Factory cacheDataSourceFactory) { + this(mediaItem, cacheDataSourceFactory, Runnable::run); + } + /** - * @param manifestUri The {@link Uri} of the manifest to be downloaded. - * @param streamKeys Keys defining which representations in the manifest should be selected for - * download. If empty, all representations are downloaded. - * @param constructorHelper A {@link DownloaderConstructorHelper} instance. + * Creates a new instance. + * + * @param mediaItem The {@link MediaItem} to be downloaded. + * @param cacheDataSourceFactory A {@link CacheDataSource.Factory} for the cache into which the + * download will be written. + * @param executor An {@link Executor} used to make requests for the media being downloaded. + * Providing an {@link Executor} that uses multiple threads will speed up the download by + * allowing parts of it to be executed in parallel. */ public DashDownloader( - Uri manifestUri, List streamKeys, DownloaderConstructorHelper constructorHelper) { - super(manifestUri, streamKeys, constructorHelper); + MediaItem mediaItem, CacheDataSource.Factory cacheDataSourceFactory, Executor executor) { + this(mediaItem, new DashManifestParser(), cacheDataSourceFactory, executor); } - @Override - protected DashManifest getManifest(DataSource dataSource, DataSpec dataSpec) throws IOException { - return ParsingLoadable.load( - dataSource, new DashManifestParser(), dataSpec, C.DATA_TYPE_MANIFEST); + /** + * Creates a new instance. + * + * @param mediaItem The {@link MediaItem} to be downloaded. + * @param manifestParser A parser for DASH manifests. + * @param cacheDataSourceFactory A {@link CacheDataSource.Factory} for the cache into which the + * download will be written. + * @param executor An {@link Executor} used to make requests for the media being downloaded. + * Providing an {@link Executor} that uses multiple threads will speed up the download by + * allowing parts of it to be executed in parallel. + */ + public DashDownloader( + MediaItem mediaItem, + Parser manifestParser, + CacheDataSource.Factory cacheDataSourceFactory, + Executor executor) { + super(mediaItem, manifestParser, cacheDataSourceFactory, executor); + baseUrlExclusionList = new BaseUrlExclusionList(); } @Override protected List getSegments( - DataSource dataSource, DashManifest manifest, boolean allowIncompleteList) - throws InterruptedException, IOException { + DataSource dataSource, DashManifest manifest, boolean removing) + throws IOException, InterruptedException { ArrayList segments = new ArrayList<>(); for (int i = 0; i < manifest.getPeriodCount(); i++) { Period period = manifest.getPeriod(i); - long periodStartUs = C.msToUs(period.startMs); + long periodStartUs = Util.msToUs(period.startMs); long periodDurationUs = manifest.getPeriodDurationUs(i); List adaptationSets = period.adaptationSets; for (int j = 0; j < adaptationSets.size(); j++) { addSegmentsForAdaptationSet( - dataSource, - adaptationSets.get(j), - periodStartUs, - periodDurationUs, - allowIncompleteList, - segments); + dataSource, adaptationSets.get(j), periodStartUs, periodDurationUs, removing, segments); } } return segments; } - private static void addSegmentsForAdaptationSet( + private void addSegmentsForAdaptationSet( DataSource dataSource, AdaptationSet adaptationSet, long periodStartUs, long periodDurationUs, - boolean allowIncompleteList, + boolean removing, ArrayList out) throws IOException, InterruptedException { for (int i = 0; i < adaptationSet.representations.size(); i++) { Representation representation = adaptationSet.representations.get(i); DashSegmentIndex index; try { - index = getSegmentIndex(dataSource, adaptationSet.type, representation); + index = getSegmentIndex(dataSource, adaptationSet.type, representation, removing); if (index == null) { // Loading succeeded but there was no index. throw new DownloadException("Missing segment index"); } } catch (IOException e) { - if (!allowIncompleteList) { + if (!removing) { throw e; } // Generating an incomplete segment list is allowed. Advance to the next representation. continue; } - int segmentCount = index.getSegmentCount(periodDurationUs); + long segmentCount = index.getSegmentCount(periodDurationUs); if (segmentCount == DashSegmentIndex.INDEX_UNBOUNDED) { throw new DownloadException("Unbounded segment index"); } - String baseUrl = representation.baseUrl; - RangedUri initializationUri = representation.getInitializationUri(); + String baseUrl = castNonNull(baseUrlExclusionList.selectBaseUrl(representation.baseUrls)).url; + @Nullable RangedUri initializationUri = representation.getInitializationUri(); if (initializationUri != null) { - addSegment(periodStartUs, baseUrl, initializationUri, out); + out.add(createSegment(representation, baseUrl, periodStartUs, initializationUri)); } - RangedUri indexUri = representation.getIndexUri(); + @Nullable RangedUri indexUri = representation.getIndexUri(); if (indexUri != null) { - addSegment(periodStartUs, baseUrl, indexUri, out); + out.add(createSegment(representation, baseUrl, periodStartUs, indexUri)); } long firstSegmentNum = index.getFirstSegmentNum(); long lastSegmentNum = firstSegmentNum + segmentCount - 1; for (long j = firstSegmentNum; j <= lastSegmentNum; j++) { - addSegment(periodStartUs + index.getTimeUs(j), baseUrl, index.getSegmentUrl(j), out); + out.add( + createSegment( + representation, + baseUrl, + periodStartUs + index.getTimeUs(j), + index.getSegmentUrl(j))); } } } - private static void addSegment( - long startTimeUs, String baseUrl, RangedUri rangedUri, ArrayList out) { - DataSpec dataSpec = - new DataSpec(rangedUri.resolveUri(baseUrl), rangedUri.start, rangedUri.length, null); - out.add(new Segment(startTimeUs, dataSpec)); + private Segment createSegment( + Representation representation, String baseUrl, long startTimeUs, RangedUri rangedUri) { + DataSpec dataSpec = DashUtil.buildDataSpec(representation, baseUrl, rangedUri, /* flags= */ 0); + return new Segment(startTimeUs, dataSpec); } - private static @Nullable DashSegmentIndex getSegmentIndex( - DataSource dataSource, int trackType, Representation representation) + @Nullable + private DashSegmentIndex getSegmentIndex( + DataSource dataSource, int trackType, Representation representation, boolean removing) throws IOException, InterruptedException { DashSegmentIndex index = representation.getIndex(); if (index != null) { return index; } - ChunkIndex seekMap = DashUtil.loadChunkIndex(dataSource, trackType, representation); + RunnableFutureTask<@NullableType ChunkIndex, IOException> runnable = + new RunnableFutureTask<@NullableType ChunkIndex, IOException>() { + @Override + protected @NullableType ChunkIndex doWork() throws IOException { + return DashUtil.loadChunkIndex(dataSource, trackType, representation); + } + }; + @Nullable ChunkIndex seekMap = execute(runnable, removing); return seekMap == null ? null : new DashWrappingSegmentIndex(seekMap, representation.presentationTimeOffsetUs); } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/Aes128DataSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/Aes128DataSource.java index fe70298dc8..617df277bf 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/Aes128DataSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/Aes128DataSource.java @@ -66,6 +66,7 @@ public Aes128DataSource(DataSource upstream, byte[] encryptionKey, byte[] encryp @Override public final void addTransferListener(TransferListener transferListener) { + Assertions.checkNotNull(transferListener); upstream.addTransferListener(transferListener); } @@ -95,9 +96,9 @@ public final long open(DataSpec dataSpec) throws IOException { } @Override - public final int read(byte[] buffer, int offset, int readLength) throws IOException { + public final int read(byte[] buffer, int offset, int length) throws IOException { Assertions.checkNotNull(cipherInputStream); - int bytesRead = cipherInputStream.read(buffer, offset, readLength); + int bytesRead = cipherInputStream.read(buffer, offset, length); if (bytesRead < 0) { return C.RESULT_END_OF_INPUT; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/BundledHlsMediaChunkExtractor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/BundledHlsMediaChunkExtractor.java new file mode 100644 index 0000000000..a7c5081f59 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/BundledHlsMediaChunkExtractor.java @@ -0,0 +1,110 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.hls; + +import androidx.annotation.VisibleForTesting; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.extractor.Extractor; +import com.google.android.exoplayer2.extractor.ExtractorInput; +import com.google.android.exoplayer2.extractor.ExtractorOutput; +import com.google.android.exoplayer2.extractor.PositionHolder; +import com.google.android.exoplayer2.extractor.mp3.Mp3Extractor; +import com.google.android.exoplayer2.extractor.mp4.FragmentedMp4Extractor; +import com.google.android.exoplayer2.extractor.ts.Ac3Extractor; +import com.google.android.exoplayer2.extractor.ts.Ac4Extractor; +import com.google.android.exoplayer2.extractor.ts.AdtsExtractor; +import com.google.android.exoplayer2.extractor.ts.TsExtractor; +import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.TimestampAdjuster; +import java.io.IOException; + +/** + * {@link HlsMediaChunkExtractor} implementation that uses ExoPlayer app-bundled {@link Extractor + * Extractors}. + */ +public final class BundledHlsMediaChunkExtractor implements HlsMediaChunkExtractor { + + private static final PositionHolder POSITION_HOLDER = new PositionHolder(); + + @VisibleForTesting /* package */ final Extractor extractor; + private final Format multivariantPlaylistFormat; + private final TimestampAdjuster timestampAdjuster; + + /** + * Creates a new instance. + * + * @param extractor The underlying {@link Extractor}. + * @param multivariantPlaylistFormat The {@link Format} obtained from the multivariant playlist. + * @param timestampAdjuster A {@link TimestampAdjuster} to adjust sample timestamps. + */ + public BundledHlsMediaChunkExtractor( + Extractor extractor, Format multivariantPlaylistFormat, TimestampAdjuster timestampAdjuster) { + this.extractor = extractor; + this.multivariantPlaylistFormat = multivariantPlaylistFormat; + this.timestampAdjuster = timestampAdjuster; + } + + @Override + public void init(ExtractorOutput extractorOutput) { + extractor.init(extractorOutput); + } + + @Override + public boolean read(ExtractorInput extractorInput) throws IOException { + return extractor.read(extractorInput, POSITION_HOLDER) == Extractor.RESULT_CONTINUE; + } + + @Override + public boolean isPackedAudioExtractor() { + return extractor instanceof AdtsExtractor + || extractor instanceof Ac3Extractor + || extractor instanceof Ac4Extractor + || extractor instanceof Mp3Extractor; + } + + @Override + public boolean isReusable() { + return extractor instanceof TsExtractor || extractor instanceof FragmentedMp4Extractor; + } + + @Override + public HlsMediaChunkExtractor recreate() { + Assertions.checkState(!isReusable()); + Extractor newExtractorInstance; + if (extractor instanceof WebvttExtractor) { + newExtractorInstance = + new WebvttExtractor(multivariantPlaylistFormat.language, timestampAdjuster); + } else if (extractor instanceof AdtsExtractor) { + newExtractorInstance = new AdtsExtractor(); + } else if (extractor instanceof Ac3Extractor) { + newExtractorInstance = new Ac3Extractor(); + } else if (extractor instanceof Ac4Extractor) { + newExtractorInstance = new Ac4Extractor(); + } else if (extractor instanceof Mp3Extractor) { + newExtractorInstance = new Mp3Extractor(); + } else { + throw new IllegalStateException( + "Unexpected extractor type for recreation: " + extractor.getClass().getSimpleName()); + } + return new BundledHlsMediaChunkExtractor( + newExtractorInstance, multivariantPlaylistFormat, timestampAdjuster); + } + + @Override + public void onTruncatedSegmentParsed() { + extractor.seek(/* position= */ 0, /* timeUs= */ 0); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/DefaultHlsDataSourceFactory.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/DefaultHlsDataSourceFactory.java index b90dcb2139..7ab8097648 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/DefaultHlsDataSourceFactory.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/DefaultHlsDataSourceFactory.java @@ -15,11 +15,10 @@ */ package com.google.android.exoplayer2.source.hls; +import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.upstream.DataSource; -/** - * Default implementation of {@link HlsDataSourceFactory}. - */ +/** Default implementation of {@link HlsDataSourceFactory}. */ public final class DefaultHlsDataSourceFactory implements HlsDataSourceFactory { private final DataSource.Factory dataSourceFactory; @@ -32,8 +31,7 @@ public DefaultHlsDataSourceFactory(DataSource.Factory dataSourceFactory) { } @Override - public DataSource createDataSource(int dataType) { + public DataSource createDataSource(@C.DataType int dataType) { return dataSourceFactory.createDataSource(); } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/DefaultHlsExtractorFactory.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/DefaultHlsExtractorFactory.java index de4c425c7d..3bf1ae2ff8 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/DefaultHlsExtractorFactory.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/DefaultHlsExtractorFactory.java @@ -15,10 +15,14 @@ */ package com.google.android.exoplayer2.source.hls; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; + +import android.annotation.SuppressLint; import android.net.Uri; import android.text.TextUtils; import androidx.annotation.Nullable; import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.analytics.PlayerId; import com.google.android.exoplayer2.extractor.Extractor; import com.google.android.exoplayer2.extractor.ExtractorInput; import com.google.android.exoplayer2.extractor.mp3.Mp3Extractor; @@ -29,32 +33,34 @@ import com.google.android.exoplayer2.extractor.ts.DefaultTsPayloadReaderFactory; import com.google.android.exoplayer2.extractor.ts.TsExtractor; import com.google.android.exoplayer2.metadata.Metadata; +import com.google.android.exoplayer2.util.FileTypes; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.TimestampAdjuster; +import com.google.common.primitives.Ints; import java.io.EOFException; import java.io.IOException; +import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; -/** - * Default {@link HlsExtractorFactory} implementation. - */ +/** Default {@link HlsExtractorFactory} implementation. */ public final class DefaultHlsExtractorFactory implements HlsExtractorFactory { - public static final String AAC_FILE_EXTENSION = ".aac"; - public static final String AC3_FILE_EXTENSION = ".ac3"; - public static final String EC3_FILE_EXTENSION = ".ec3"; - public static final String AC4_FILE_EXTENSION = ".ac4"; - public static final String MP3_FILE_EXTENSION = ".mp3"; - public static final String MP4_FILE_EXTENSION = ".mp4"; - public static final String M4_FILE_EXTENSION_PREFIX = ".m4"; - public static final String MP4_FILE_EXTENSION_PREFIX = ".mp4"; - public static final String CMF_FILE_EXTENSION_PREFIX = ".cmf"; - public static final String VTT_FILE_EXTENSION = ".vtt"; - public static final String WEBVTT_FILE_EXTENSION = ".webvtt"; + // Extractors order is optimized according to + // https://docs.google.com/document/d/1w2mKaWMxfz2Ei8-LdxqbPs1VLe_oudB-eryXXw9OvQQ. + private static final int[] DEFAULT_EXTRACTOR_ORDER = + new int[] { + FileTypes.MP4, + FileTypes.WEBVTT, + FileTypes.TS, + FileTypes.ADTS, + FileTypes.AC3, + FileTypes.AC4, + FileTypes.MP3, + }; - @DefaultTsPayloadReaderFactory.Flags private final int payloadReaderFactoryFlags; + private final @DefaultTsPayloadReaderFactory.Flags int payloadReaderFactoryFlags; private final boolean exposeCea608WhenMissingDeclarations; /** @@ -73,8 +79,9 @@ public DefaultHlsExtractorFactory() { * DefaultTsPayloadReaderFactory} instances. Other flags may be added on top of {@code * payloadReaderFactoryFlags} when creating {@link DefaultTsPayloadReaderFactory}. * @param exposeCea608WhenMissingDeclarations Whether created {@link TsExtractor} instances should - * expose a CEA-608 track should the master playlist contain no Closed Captions declarations. - * If the master playlist contains any Closed Captions declarations, this flag is ignored. + * expose a CEA-608 track should the multivariant playlist contain no Closed Captions + * declarations. If the multivariant playlist contains any Closed Captions declarations, this + * flag is ignored. */ public DefaultHlsExtractorFactory( int payloadReaderFactoryFlags, boolean exposeCea608WhenMissingDeclarations) { @@ -83,137 +90,95 @@ public DefaultHlsExtractorFactory( } @Override - public Result createExtractor( - @Nullable Extractor previousExtractor, + public BundledHlsMediaChunkExtractor createExtractor( Uri uri, Format format, @Nullable List muxedCaptionFormats, TimestampAdjuster timestampAdjuster, Map> responseHeaders, - ExtractorInput extractorInput) - throws InterruptedException, IOException { - - if (previousExtractor != null) { - // A extractor has already been successfully used. Return one of the same type. - if (isReusable(previousExtractor)) { - return buildResult(previousExtractor); - } else { - Result result = - buildResultForSameExtractorType(previousExtractor, format, timestampAdjuster); - if (result == null) { - throw new IllegalArgumentException( - "Unexpected previousExtractor type: " + previousExtractor.getClass().getSimpleName()); - } - } - } - - // Try selecting the extractor by the file extension. - Extractor extractorByFileExtension = - createExtractorByFileExtension(uri, format, muxedCaptionFormats, timestampAdjuster); - extractorInput.resetPeekPosition(); - if (sniffQuietly(extractorByFileExtension, extractorInput)) { - return buildResult(extractorByFileExtension); - } - - // We need to manually sniff each known type, without retrying the one selected by file - // extension. - - if (!(extractorByFileExtension instanceof WebvttExtractor)) { - WebvttExtractor webvttExtractor = new WebvttExtractor(format.language, timestampAdjuster); - if (sniffQuietly(webvttExtractor, extractorInput)) { - return buildResult(webvttExtractor); - } - } + ExtractorInput sniffingExtractorInput, + PlayerId playerId) + throws IOException { + @FileTypes.Type + int formatInferredFileType = FileTypes.inferFileTypeFromMimeType(format.sampleMimeType); + @FileTypes.Type + int responseHeadersInferredFileType = + FileTypes.inferFileTypeFromResponseHeaders(responseHeaders); + @FileTypes.Type int uriInferredFileType = FileTypes.inferFileTypeFromUri(uri); - if (!(extractorByFileExtension instanceof AdtsExtractor)) { - AdtsExtractor adtsExtractor = new AdtsExtractor(); - if (sniffQuietly(adtsExtractor, extractorInput)) { - return buildResult(adtsExtractor); - } + // Defines the order in which to try the extractors. + List fileTypeOrder = + new ArrayList<>(/* initialCapacity= */ DEFAULT_EXTRACTOR_ORDER.length); + addFileTypeIfValidAndNotPresent(formatInferredFileType, fileTypeOrder); + addFileTypeIfValidAndNotPresent(responseHeadersInferredFileType, fileTypeOrder); + addFileTypeIfValidAndNotPresent(uriInferredFileType, fileTypeOrder); + for (int fileType : DEFAULT_EXTRACTOR_ORDER) { + addFileTypeIfValidAndNotPresent(fileType, fileTypeOrder); } - if (!(extractorByFileExtension instanceof Ac3Extractor)) { - Ac3Extractor ac3Extractor = new Ac3Extractor(); - if (sniffQuietly(ac3Extractor, extractorInput)) { - return buildResult(ac3Extractor); + // Extractor to be used if the type is not recognized. + @Nullable Extractor fallBackExtractor = null; + sniffingExtractorInput.resetPeekPosition(); + for (int i = 0; i < fileTypeOrder.size(); i++) { + int fileType = fileTypeOrder.get(i); + Extractor extractor = + checkNotNull( + createExtractorByFileType(fileType, format, muxedCaptionFormats, timestampAdjuster)); + if (sniffQuietly(extractor, sniffingExtractorInput)) { + return new BundledHlsMediaChunkExtractor(extractor, format, timestampAdjuster); } - } - - if (!(extractorByFileExtension instanceof Ac4Extractor)) { - Ac4Extractor ac4Extractor = new Ac4Extractor(); - if (sniffQuietly(ac4Extractor, extractorInput)) { - return buildResult(ac4Extractor); + if (fallBackExtractor == null + && (fileType == formatInferredFileType + || fileType == responseHeadersInferredFileType + || fileType == uriInferredFileType + || fileType == FileTypes.TS)) { + // If sniffing fails, fallback to the file types inferred from context. If all else fails, + // fallback to Transport Stream. See https://github.com/google/ExoPlayer/issues/8219. + fallBackExtractor = extractor; } } - if (!(extractorByFileExtension instanceof Mp3Extractor)) { - Mp3Extractor mp3Extractor = - new Mp3Extractor(/* flags= */ 0, /* forcedFirstSampleTimestampUs= */ 0); - if (sniffQuietly(mp3Extractor, extractorInput)) { - return buildResult(mp3Extractor); - } - } - - if (!(extractorByFileExtension instanceof FragmentedMp4Extractor)) { - FragmentedMp4Extractor fragmentedMp4Extractor = - createFragmentedMp4Extractor(timestampAdjuster, format, muxedCaptionFormats); - if (sniffQuietly(fragmentedMp4Extractor, extractorInput)) { - return buildResult(fragmentedMp4Extractor); - } - } + return new BundledHlsMediaChunkExtractor( + checkNotNull(fallBackExtractor), format, timestampAdjuster); + } - if (!(extractorByFileExtension instanceof TsExtractor)) { - TsExtractor tsExtractor = - createTsExtractor( - payloadReaderFactoryFlags, - exposeCea608WhenMissingDeclarations, - format, - muxedCaptionFormats, - timestampAdjuster); - if (sniffQuietly(tsExtractor, extractorInput)) { - return buildResult(tsExtractor); - } + private static void addFileTypeIfValidAndNotPresent( + @FileTypes.Type int fileType, List fileTypes) { + if (Ints.indexOf(DEFAULT_EXTRACTOR_ORDER, fileType) == -1 || fileTypes.contains(fileType)) { + return; } - - // Fall back on the extractor created by file extension. - return buildResult(extractorByFileExtension); + fileTypes.add(fileType); } - private Extractor createExtractorByFileExtension( - Uri uri, + @SuppressLint("SwitchIntDef") // HLS only supports a small subset of the defined file types. + @Nullable + private Extractor createExtractorByFileType( + @FileTypes.Type int fileType, Format format, @Nullable List muxedCaptionFormats, TimestampAdjuster timestampAdjuster) { - String lastPathSegment = uri.getLastPathSegment(); - if (lastPathSegment == null) { - lastPathSegment = ""; - } - if (MimeTypes.TEXT_VTT.equals(format.sampleMimeType) - || lastPathSegment.endsWith(WEBVTT_FILE_EXTENSION) - || lastPathSegment.endsWith(VTT_FILE_EXTENSION)) { - return new WebvttExtractor(format.language, timestampAdjuster); - } else if (lastPathSegment.endsWith(AAC_FILE_EXTENSION)) { - return new AdtsExtractor(); - } else if (lastPathSegment.endsWith(AC3_FILE_EXTENSION) - || lastPathSegment.endsWith(EC3_FILE_EXTENSION)) { - return new Ac3Extractor(); - } else if (lastPathSegment.endsWith(AC4_FILE_EXTENSION)) { - return new Ac4Extractor(); - } else if (lastPathSegment.endsWith(MP3_FILE_EXTENSION)) { - return new Mp3Extractor(/* flags= */ 0, /* forcedFirstSampleTimestampUs= */ 0); - } else if (lastPathSegment.endsWith(MP4_FILE_EXTENSION) - || lastPathSegment.startsWith(M4_FILE_EXTENSION_PREFIX, lastPathSegment.length() - 4) - || lastPathSegment.startsWith(MP4_FILE_EXTENSION_PREFIX, lastPathSegment.length() - 5) - || lastPathSegment.startsWith(CMF_FILE_EXTENSION_PREFIX, lastPathSegment.length() - 5)) { - return createFragmentedMp4Extractor(timestampAdjuster, format, muxedCaptionFormats); - } else { - // For any other file extension, we assume TS format. - return createTsExtractor( - payloadReaderFactoryFlags, - exposeCea608WhenMissingDeclarations, - format, - muxedCaptionFormats, - timestampAdjuster); + switch (fileType) { + case FileTypes.WEBVTT: + return new WebvttExtractor(format.language, timestampAdjuster); + case FileTypes.ADTS: + return new AdtsExtractor(); + case FileTypes.AC3: + return new Ac3Extractor(); + case FileTypes.AC4: + return new Ac4Extractor(); + case FileTypes.MP3: + return new Mp3Extractor(/* flags= */ 0, /* forcedFirstSampleTimestampUs= */ 0); + case FileTypes.MP4: + return createFragmentedMp4Extractor(timestampAdjuster, format, muxedCaptionFormats); + case FileTypes.TS: + return createTsExtractor( + payloadReaderFactoryFlags, + exposeCea608WhenMissingDeclarations, + format, + muxedCaptionFormats, + timestampAdjuster); + default: + return null; } } @@ -235,23 +200,19 @@ private static TsExtractor createTsExtractor( // closed caption track on channel 0. muxedCaptionFormats = Collections.singletonList( - Format.createTextSampleFormat( - /* id= */ null, - MimeTypes.APPLICATION_CEA608, - /* selectionFlags= */ 0, - /* language= */ null)); + new Format.Builder().setSampleMimeType(MimeTypes.APPLICATION_CEA608).build()); } else { muxedCaptionFormats = Collections.emptyList(); } - String codecs = format.codecs; + @Nullable String codecs = format.codecs; if (!TextUtils.isEmpty(codecs)) { // Sometimes AAC and H264 streams are declared in TS chunks even though they don't really // exist. If we know from the codec attribute that they don't exist, then we can // explicitly ignore them even if they're declared. - if (!MimeTypes.AUDIO_AAC.equals(MimeTypes.getAudioMediaMimeType(codecs))) { + if (!MimeTypes.containsCodecsCorrespondingToMimeType(codecs, MimeTypes.AUDIO_AAC)) { payloadReaderFactoryFlags |= DefaultTsPayloadReaderFactory.FLAG_IGNORE_AAC_STREAM; } - if (!MimeTypes.VIDEO_H264.equals(MimeTypes.getVideoMediaMimeType(codecs))) { + if (!MimeTypes.containsCodecsCorrespondingToMimeType(codecs, MimeTypes.VIDEO_H264)) { payloadReaderFactoryFlags |= DefaultTsPayloadReaderFactory.FLAG_IGNORE_H264_STREAM; } } @@ -290,36 +251,8 @@ private static boolean isFmp4Variant(Format format) { return false; } - @Nullable - private static Result buildResultForSameExtractorType( - Extractor previousExtractor, Format format, TimestampAdjuster timestampAdjuster) { - if (previousExtractor instanceof WebvttExtractor) { - return buildResult(new WebvttExtractor(format.language, timestampAdjuster)); - } else if (previousExtractor instanceof AdtsExtractor) { - return buildResult(new AdtsExtractor()); - } else if (previousExtractor instanceof Ac3Extractor) { - return buildResult(new Ac3Extractor()); - } else if (previousExtractor instanceof Ac4Extractor) { - return buildResult(new Ac4Extractor()); - } else if (previousExtractor instanceof Mp3Extractor) { - return buildResult(new Mp3Extractor()); - } else { - return null; - } - } - - private static Result buildResult(Extractor extractor) { - return new Result( - extractor, - extractor instanceof AdtsExtractor - || extractor instanceof Ac3Extractor - || extractor instanceof Ac4Extractor - || extractor instanceof Mp3Extractor, - isReusable(extractor)); - } - private static boolean sniffQuietly(Extractor extractor, ExtractorInput input) - throws InterruptedException, IOException { + throws IOException { boolean result = false; try { result = extractor.sniff(input); @@ -330,9 +263,4 @@ private static boolean sniffQuietly(Extractor extractor, ExtractorInput input) } return result; } - - private static boolean isReusable(Extractor previousExtractor) { - return previousExtractor instanceof TsExtractor - || previousExtractor instanceof FragmentedMp4Extractor; - } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsChunkSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsChunkSource.java index 1a77715e71..5d5d398370 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsChunkSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsChunkSource.java @@ -15,11 +15,20 @@ */ package com.google.android.exoplayer2.source.hls; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static java.lang.Math.max; +import static java.lang.annotation.ElementType.TYPE_USE; + import android.net.Uri; import android.os.SystemClock; +import android.util.Pair; +import androidx.annotation.IntDef; import androidx.annotation.Nullable; +import androidx.annotation.VisibleForTesting; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.SeekParameters; +import com.google.android.exoplayer2.analytics.PlayerId; import com.google.android.exoplayer2.source.BehindLiveWindowException; import com.google.android.exoplayer2.source.TrackGroup; import com.google.android.exoplayer2.source.chunk.BaseMediaChunkIterator; @@ -31,25 +40,31 @@ import com.google.android.exoplayer2.source.hls.playlist.HlsMediaPlaylist.Segment; import com.google.android.exoplayer2.source.hls.playlist.HlsPlaylistTracker; import com.google.android.exoplayer2.trackselection.BaseTrackSelection; -import com.google.android.exoplayer2.trackselection.TrackSelection; +import com.google.android.exoplayer2.trackselection.ExoTrackSelection; import com.google.android.exoplayer2.upstream.DataSource; import com.google.android.exoplayer2.upstream.DataSpec; import com.google.android.exoplayer2.upstream.TransferListener; -import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.TimestampAdjuster; import com.google.android.exoplayer2.util.UriUtil; import com.google.android.exoplayer2.util.Util; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Iterables; +import com.google.common.primitives.Ints; import java.io.IOException; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.List; import org.checkerframework.checker.nullness.qual.MonotonicNonNull; /** Source of Hls (possibly adaptive) chunks. */ /* package */ class HlsChunkSource { - /** - * Chunk holder that allows the scheduling of retries. - */ + /** Chunk holder that allows the scheduling of retries. */ public static final class HlsChunkHolder { public HlsChunkHolder() { @@ -59,25 +74,45 @@ public HlsChunkHolder() { /** The chunk to be loaded next. */ @Nullable public Chunk chunk; - /** - * Indicates that the end of the stream has been reached. - */ + /** Indicates that the end of the stream has been reached. */ public boolean endOfStream; /** Indicates that the chunk source is waiting for the referred playlist to be refreshed. */ @Nullable public Uri playlistUrl; - /** - * Clears the holder. - */ + /** Clears the holder. */ public void clear() { chunk = null; endOfStream = false; playlistUrl = null; } - } + /** + * Chunk publication state. One of {@link #CHUNK_PUBLICATION_STATE_PRELOAD}, {@link + * #CHUNK_PUBLICATION_STATE_PUBLISHED}, {@link #CHUNK_PUBLICATION_STATE_REMOVED}. + */ + @Documented + @Target(TYPE_USE) + @IntDef({ + CHUNK_PUBLICATION_STATE_PRELOAD, + CHUNK_PUBLICATION_STATE_PUBLISHED, + CHUNK_PUBLICATION_STATE_REMOVED + }) + @Retention(RetentionPolicy.SOURCE) + @interface ChunkPublicationState {} + + /** Indicates that the chunk is based on a preload hint. */ + public static final int CHUNK_PUBLICATION_STATE_PRELOAD = 0; + /** Indicates that the chunk is definitely published. */ + public static final int CHUNK_PUBLICATION_STATE_PUBLISHED = 1; + /** + * Indicates that the chunk has been removed from the playlist. + * + *

      See RFC 8216, Section 6.2.6 also. + */ + public static final int CHUNK_PUBLICATION_STATE_REMOVED = 2; + /** * The maximum number of keys that the key cache can hold. This value must be 2 or greater in * order to hold initialization segment and media segment keys simultaneously. @@ -94,6 +129,7 @@ public void clear() { private final TrackGroup trackGroup; @Nullable private final List muxedCaptionFormats; private final FullSegmentEncryptionKeyCache keyCache; + private final PlayerId playerId; private boolean isTimestampMaster; private byte[] scratchSpace; @@ -103,8 +139,8 @@ public void clear() { // Note: The track group in the selection is typically *not* equal to trackGroup. This is due to // the way in which HlsSampleStreamWrapper generates track groups. Use only index based methods - // in TrackSelection to avoid unexpected behavior. - private TrackSelection trackSelection; + // in ExoTrackSelection to avoid unexpected behavior. + private ExoTrackSelection trackSelection; private long liveEdgeInPeriodTimeUs; private boolean seenExpectedPlaylistError; @@ -123,7 +159,7 @@ public void clear() { * {@link HlsChunkSource}s are used for a single playback, they should all share the same * provider. * @param muxedCaptionFormats List of muxed caption {@link Format}s. Null if no closed caption - * information is available in the master playlist. + * information is available in the multivariant playlist. */ public HlsChunkSource( HlsExtractorFactory extractorFactory, @@ -133,13 +169,15 @@ public HlsChunkSource( HlsDataSourceFactory dataSourceFactory, @Nullable TransferListener mediaTransferListener, TimestampAdjusterProvider timestampAdjusterProvider, - @Nullable List muxedCaptionFormats) { + @Nullable List muxedCaptionFormats, + PlayerId playerId) { this.extractorFactory = extractorFactory; this.playlistTracker = playlistTracker; this.playlistUrls = playlistUrls; this.playlistFormats = playlistFormats; this.timestampAdjusterProvider = timestampAdjusterProvider; this.muxedCaptionFormats = muxedCaptionFormats; + this.playerId = playerId; keyCache = new FullSegmentEncryptionKeyCache(KEY_CACHE_SIZE); scratchSpace = Util.EMPTY_BYTE_ARRAY; liveEdgeInPeriodTimeUs = C.TIME_UNSET; @@ -149,11 +187,15 @@ public HlsChunkSource( } encryptionDataSource = dataSourceFactory.createDataSource(C.DATA_TYPE_DRM); trackGroup = new TrackGroup(playlistFormats); - int[] initialTrackSelection = new int[playlistUrls.length]; + // Use only non-trickplay variants for preparation. See [Internal ref: b/161529098]. + ArrayList initialTrackSelection = new ArrayList<>(); for (int i = 0; i < playlistUrls.length; i++) { - initialTrackSelection[i] = i; + if ((playlistFormats[i].roleFlags & C.ROLE_FLAG_TRICK_PLAY) == 0) { + initialTrackSelection.add(i); + } } - trackSelection = new InitializationTrackSelection(trackGroup, initialTrackSelection); + trackSelection = + new InitializationTrackSelection(trackGroup, Ints.toArray(initialTrackSelection)); } /** @@ -171,9 +213,7 @@ public void maybeThrowError() throws IOException { } } - /** - * Returns the track group exposed by the source. - */ + /** Returns the track group exposed by the source. */ public TrackGroup getTrackGroup() { return trackGroup; } @@ -181,20 +221,18 @@ public TrackGroup getTrackGroup() { /** * Sets the current track selection. * - * @param trackSelection The {@link TrackSelection}. + * @param trackSelection The {@link ExoTrackSelection}. */ - public void setTrackSelection(TrackSelection trackSelection) { + public void setTrackSelection(ExoTrackSelection trackSelection) { this.trackSelection = trackSelection; } - /** Returns the current {@link TrackSelection}. */ - public TrackSelection getTrackSelection() { + /** Returns the current {@link ExoTrackSelection}. */ + public ExoTrackSelection getTrackSelection() { return trackSelection; } - /** - * Resets the source. - */ + /** Resets the source. */ public void reset() { fatalError = null; } @@ -209,6 +247,100 @@ public void setIsTimestampMaster(boolean isTimestampMaster) { this.isTimestampMaster = isTimestampMaster; } + /** + * Adjusts a seek position given the specified {@link SeekParameters}. + * + * @param positionUs The seek position in microseconds. + * @param seekParameters Parameters that control how the seek is performed. + * @return The adjusted seek position, in microseconds. + */ + public long getAdjustedSeekPositionUs(long positionUs, SeekParameters seekParameters) { + int selectedIndex = trackSelection.getSelectedIndex(); + @Nullable + HlsMediaPlaylist mediaPlaylist = + selectedIndex < playlistUrls.length && selectedIndex != C.INDEX_UNSET + ? playlistTracker.getPlaylistSnapshot( + playlistUrls[trackSelection.getSelectedIndexInTrackGroup()], + /* isForPlayback= */ true) + : null; + + if (mediaPlaylist == null + || mediaPlaylist.segments.isEmpty() + || !mediaPlaylist.hasIndependentSegments) { + return positionUs; + } + + // Segments start with sync samples (i.e., EXT-X-INDEPENDENT-SEGMENTS is set) and the playlist + // is non-empty, so we can use segment start times as sync points. Note that in the rare case + // that (a) an adaptive quality switch occurs between the adjustment and the seek being + // performed, and (b) segment start times are not aligned across variants, it's possible that + // the adjusted position may not be at a sync point when it was intended to be. However, this is + // very much an edge case, and getting it wrong is worth it for getting the vast majority of + // cases right whilst keeping the implementation relatively simple. + long startOfPlaylistInPeriodUs = + mediaPlaylist.startTimeUs - playlistTracker.getInitialStartTimeUs(); + long relativePositionUs = positionUs - startOfPlaylistInPeriodUs; + int segmentIndex = + Util.binarySearchFloor( + mediaPlaylist.segments, + relativePositionUs, + /* inclusive= */ true, + /* stayInBounds= */ true); + long firstSyncUs = mediaPlaylist.segments.get(segmentIndex).relativeStartTimeUs; + long secondSyncUs = firstSyncUs; + if (segmentIndex != mediaPlaylist.segments.size() - 1) { + secondSyncUs = mediaPlaylist.segments.get(segmentIndex + 1).relativeStartTimeUs; + } + return seekParameters.resolveSeekPositionUs(relativePositionUs, firstSyncUs, secondSyncUs) + + startOfPlaylistInPeriodUs; + } + + /** + * Returns the publication state of the given chunk. + * + * @param mediaChunk The media chunk for which to evaluate the publication state. + * @return Whether the media chunk is {@link #CHUNK_PUBLICATION_STATE_PRELOAD a preload chunk}, + * has been {@link #CHUNK_PUBLICATION_STATE_REMOVED removed} or is definitely {@link + * #CHUNK_PUBLICATION_STATE_PUBLISHED published}. + */ + public @ChunkPublicationState int getChunkPublicationState(HlsMediaChunk mediaChunk) { + if (mediaChunk.partIndex == C.INDEX_UNSET) { + // Chunks based on full segments can't be removed and are always published. + return CHUNK_PUBLICATION_STATE_PUBLISHED; + } + Uri playlistUrl = playlistUrls[trackGroup.indexOf(mediaChunk.trackFormat)]; + HlsMediaPlaylist mediaPlaylist = + checkNotNull(playlistTracker.getPlaylistSnapshot(playlistUrl, /* isForPlayback= */ false)); + int segmentIndexInPlaylist = (int) (mediaChunk.chunkIndex - mediaPlaylist.mediaSequence); + if (segmentIndexInPlaylist < 0) { + // The parent segment of the previous chunk is not in the current playlist anymore. + return CHUNK_PUBLICATION_STATE_PUBLISHED; + } + List partsInCurrentPlaylist = + segmentIndexInPlaylist < mediaPlaylist.segments.size() + ? mediaPlaylist.segments.get(segmentIndexInPlaylist).parts + : mediaPlaylist.trailingParts; + if (mediaChunk.partIndex >= partsInCurrentPlaylist.size()) { + // In case the part hinted in the previous playlist has been wrongly assigned to the then full + // but not yet terminated segment, we discard it regardless whether the URI is different or + // not. While this is theoretically possible and unspecified, it appears to be an edge case + // which we can avoid with a small inefficiency of discarding in vain. We could allow this + // here but, if the chunk is not discarded, it could create unpredictable problems later, + // because the media sequence in previous.chunkIndex does not match to the actual media + // sequence in the new playlist. + return CHUNK_PUBLICATION_STATE_REMOVED; + } + HlsMediaPlaylist.Part newPart = partsInCurrentPlaylist.get(mediaChunk.partIndex); + if (newPart.isPreload) { + // The playlist did not change and the part in the new playlist is still a preload hint. + return CHUNK_PUBLICATION_STATE_PRELOAD; + } + Uri newUri = Uri.parse(UriUtil.resolve(mediaPlaylist.baseUri, newPart.url)); + return Util.areEqual(newUri, mediaChunk.dataSpec.uri) + ? CHUNK_PUBLICATION_STATE_PUBLISHED + : CHUNK_PUBLICATION_STATE_REMOVED; + } + /** * Returns the next chunk to load. * @@ -234,7 +366,7 @@ public void getNextChunk( List queue, boolean allowEndOfStream, HlsChunkHolder out) { - HlsMediaChunk previous = queue.isEmpty() ? null : queue.get(queue.size() - 1); + @Nullable HlsMediaChunk previous = queue.isEmpty() ? null : Iterables.getLast(queue); int oldTrackIndex = previous == null ? C.INDEX_UNSET : trackGroup.indexOf(previous.trackFormat); long bufferedDurationUs = loadPositionUs - playbackPositionUs; long timeToLiveEdgeUs = resolveTimeToLiveEdgeUs(playbackPositionUs); @@ -246,9 +378,9 @@ public void getNextChunk( // buffered duration to time-to-live-edge to decide whether to switch. Therefore, we subtract // the duration of the last loaded segment from timeToLiveEdgeUs as well. long subtractedDurationUs = previous.getDurationUs(); - bufferedDurationUs = Math.max(0, bufferedDurationUs - subtractedDurationUs); + bufferedDurationUs = max(0, bufferedDurationUs - subtractedDurationUs); if (timeToLiveEdgeUs != C.TIME_UNSET) { - timeToLiveEdgeUs = Math.max(0, timeToLiveEdgeUs - subtractedDurationUs); + timeToLiveEdgeUs = max(0, timeToLiveEdgeUs - subtractedDurationUs); } } @@ -257,7 +389,6 @@ public void getNextChunk( trackSelection.updateSelectedTrack( playbackPositionUs, bufferedDurationUs, timeToLiveEdgeUs, queue, mediaChunkIterators); int selectedTrackIndex = trackSelection.getSelectedIndexInTrackGroup(); - boolean switchingTrack = oldTrackIndex != selectedTrackIndex; Uri selectedPlaylistUrl = playlistUrls[selectedTrackIndex]; if (!playlistTracker.isSnapshotValid(selectedPlaylistUrl)) { @@ -267,83 +398,109 @@ public void getNextChunk( // Retry when playlist is refreshed. return; } - HlsMediaPlaylist mediaPlaylist = + @Nullable + HlsMediaPlaylist playlist = playlistTracker.getPlaylistSnapshot(selectedPlaylistUrl, /* isForPlayback= */ true); - // playlistTracker snapshot is valid (checked by if() above), so mediaPlaylist must be non-null. - Assertions.checkNotNull(mediaPlaylist); - independentSegments = mediaPlaylist.hasIndependentSegments; + // playlistTracker snapshot is valid (checked by if() above), so playlist must be non-null. + checkNotNull(playlist); + independentSegments = playlist.hasIndependentSegments; - updateLiveEdgeTimeUs(mediaPlaylist); + updateLiveEdgeTimeUs(playlist); // Select the chunk. - long startOfPlaylistInPeriodUs = - mediaPlaylist.startTimeUs - playlistTracker.getInitialStartTimeUs(); - long chunkMediaSequence = - getChunkMediaSequence( - previous, switchingTrack, mediaPlaylist, startOfPlaylistInPeriodUs, loadPositionUs); - if (chunkMediaSequence < mediaPlaylist.mediaSequence && previous != null && switchingTrack) { - // We try getting the next chunk without adapting in case that's the reason for falling - // behind the live window. - selectedTrackIndex = oldTrackIndex; - selectedPlaylistUrl = playlistUrls[selectedTrackIndex]; - mediaPlaylist = + long startOfPlaylistInPeriodUs = playlist.startTimeUs - playlistTracker.getInitialStartTimeUs(); + Pair nextMediaSequenceAndPartIndex = + getNextMediaSequenceAndPartIndex( + previous, switchingTrack, playlist, startOfPlaylistInPeriodUs, loadPositionUs); + long chunkMediaSequence = nextMediaSequenceAndPartIndex.first; + int partIndex = nextMediaSequenceAndPartIndex.second; + if (chunkMediaSequence < playlist.mediaSequence && previous != null && switchingTrack) { + // We try getting the next chunk without adapting in case that's the reason for falling + // behind the live window. + selectedTrackIndex = oldTrackIndex; + selectedPlaylistUrl = playlistUrls[selectedTrackIndex]; + playlist = playlistTracker.getPlaylistSnapshot(selectedPlaylistUrl, /* isForPlayback= */ true); - // playlistTracker snapshot is valid (checked by if() above), so mediaPlaylist must be - // non-null. - Assertions.checkNotNull(mediaPlaylist); - startOfPlaylistInPeriodUs = - mediaPlaylist.startTimeUs - playlistTracker.getInitialStartTimeUs(); - chunkMediaSequence = previous.getNextChunkIndex(); - } - - if (chunkMediaSequence < mediaPlaylist.mediaSequence) { + // playlistTracker snapshot is valid (checked by if() above), so playlist must be non-null. + checkNotNull(playlist); + startOfPlaylistInPeriodUs = playlist.startTimeUs - playlistTracker.getInitialStartTimeUs(); + // Get the next segment/part without switching tracks. + Pair nextMediaSequenceAndPartIndexWithoutAdapting = + getNextMediaSequenceAndPartIndex( + previous, + /* switchingTrack= */ false, + playlist, + startOfPlaylistInPeriodUs, + loadPositionUs); + chunkMediaSequence = nextMediaSequenceAndPartIndexWithoutAdapting.first; + partIndex = nextMediaSequenceAndPartIndexWithoutAdapting.second; + } + + if (chunkMediaSequence < playlist.mediaSequence) { fatalError = new BehindLiveWindowException(); return; } - int segmentIndexInPlaylist = (int) (chunkMediaSequence - mediaPlaylist.mediaSequence); - int availableSegmentCount = mediaPlaylist.segments.size(); - if (segmentIndexInPlaylist >= availableSegmentCount) { - if (mediaPlaylist.hasEndTag) { - if (allowEndOfStream || availableSegmentCount == 0) { - out.endOfStream = true; - return; - } - segmentIndexInPlaylist = availableSegmentCount - 1; - } else /* Live */ { + @Nullable + SegmentBaseHolder segmentBaseHolder = + getNextSegmentHolder(playlist, chunkMediaSequence, partIndex); + if (segmentBaseHolder == null) { + if (!playlist.hasEndTag) { + // Reload the playlist in case of a live stream. out.playlistUrl = selectedPlaylistUrl; seenExpectedPlaylistError &= selectedPlaylistUrl.equals(expectedPlaylistUrl); expectedPlaylistUrl = selectedPlaylistUrl; return; + } else if (allowEndOfStream || playlist.segments.isEmpty()) { + out.endOfStream = true; + return; } + // Use the last segment available in case of a VOD stream. + segmentBaseHolder = + new SegmentBaseHolder( + Iterables.getLast(playlist.segments), + playlist.mediaSequence + playlist.segments.size() - 1, + /* partIndex= */ C.INDEX_UNSET); } - // We have a valid playlist snapshot, we can discard any playlist errors at this point. + + // We have a valid media segment, we can discard any playlist errors at this point. seenExpectedPlaylistError = false; expectedPlaylistUrl = null; - // Handle encryption. - HlsMediaPlaylist.Segment segment = mediaPlaylist.segments.get(segmentIndexInPlaylist); - - // Check if the segment or its initialization segment are fully encrypted. - Uri initSegmentKeyUri = getFullEncryptionKeyUri(mediaPlaylist, segment.initializationSegment); + // Check if the media segment or its initialization segment are fully encrypted. + @Nullable + Uri initSegmentKeyUri = + getFullEncryptionKeyUri(playlist, segmentBaseHolder.segmentBase.initializationSegment); out.chunk = maybeCreateEncryptionChunkFor(initSegmentKeyUri, selectedTrackIndex); if (out.chunk != null) { return; } - Uri mediaSegmentKeyUri = getFullEncryptionKeyUri(mediaPlaylist, segment); + @Nullable + Uri mediaSegmentKeyUri = getFullEncryptionKeyUri(playlist, segmentBaseHolder.segmentBase); out.chunk = maybeCreateEncryptionChunkFor(mediaSegmentKeyUri, selectedTrackIndex); if (out.chunk != null) { return; } + boolean shouldSpliceIn = + HlsMediaChunk.shouldSpliceIn( + previous, selectedPlaylistUrl, playlist, segmentBaseHolder, startOfPlaylistInPeriodUs); + if (shouldSpliceIn && segmentBaseHolder.isPreload) { + // We don't support discarding spliced-in segments [internal: b/159904763], but preload + // parts may need to be discarded if they are removed before becoming permanently published. + // Hence, don't allow this combination and instead wait with loading the next part until it + // becomes fully available (or the track selection selects another track). + return; + } + out.chunk = HlsMediaChunk.createInstance( extractorFactory, mediaDataSource, playlistFormats[selectedTrackIndex], startOfPlaylistInPeriodUs, - mediaPlaylist, - segmentIndexInPlaylist, + playlist, + segmentBaseHolder, selectedPlaylistUrl, muxedCaptionFormats, trackSelection.getSelectionReason(), @@ -352,7 +509,44 @@ public void getNextChunk( timestampAdjusterProvider, previous, /* mediaSegmentKey= */ keyCache.get(mediaSegmentKeyUri), - /* initSegmentKey= */ keyCache.get(initSegmentKeyUri)); + /* initSegmentKey= */ keyCache.get(initSegmentKeyUri), + shouldSpliceIn, + playerId); + } + + @Nullable + private static SegmentBaseHolder getNextSegmentHolder( + HlsMediaPlaylist mediaPlaylist, long nextMediaSequence, int nextPartIndex) { + int segmentIndexInPlaylist = (int) (nextMediaSequence - mediaPlaylist.mediaSequence); + if (segmentIndexInPlaylist == mediaPlaylist.segments.size()) { + int index = nextPartIndex != C.INDEX_UNSET ? nextPartIndex : 0; + return index < mediaPlaylist.trailingParts.size() + ? new SegmentBaseHolder(mediaPlaylist.trailingParts.get(index), nextMediaSequence, index) + : null; + } + + Segment mediaSegment = mediaPlaylist.segments.get(segmentIndexInPlaylist); + if (nextPartIndex == C.INDEX_UNSET) { + return new SegmentBaseHolder(mediaSegment, nextMediaSequence, /* partIndex= */ C.INDEX_UNSET); + } + + if (nextPartIndex < mediaSegment.parts.size()) { + // The requested part is available in the requested segment. + return new SegmentBaseHolder( + mediaSegment.parts.get(nextPartIndex), nextMediaSequence, nextPartIndex); + } else if (segmentIndexInPlaylist + 1 < mediaPlaylist.segments.size()) { + // The first part of the next segment is requested, but we can use the next full segment. + return new SegmentBaseHolder( + mediaPlaylist.segments.get(segmentIndexInPlaylist + 1), + nextMediaSequence + 1, + /* partIndex= */ C.INDEX_UNSET); + } else if (!mediaPlaylist.trailingParts.isEmpty()) { + // The part index is rolling over to the first trailing part. + return new SegmentBaseHolder( + mediaPlaylist.trailingParts.get(0), nextMediaSequence + 1, /* partIndex= */ 0); + } + // End of stream. + return null; } /** @@ -365,34 +559,33 @@ public void onChunkLoadCompleted(Chunk chunk) { if (chunk instanceof EncryptionKeyChunk) { EncryptionKeyChunk encryptionKeyChunk = (EncryptionKeyChunk) chunk; scratchSpace = encryptionKeyChunk.getDataHolder(); - keyCache.put( - encryptionKeyChunk.dataSpec.uri, Assertions.checkNotNull(encryptionKeyChunk.getResult())); + keyCache.put(encryptionKeyChunk.dataSpec.uri, checkNotNull(encryptionKeyChunk.getResult())); } } /** - * Attempts to blacklist the track associated with the given chunk. Blacklisting will fail if the - * track is the only non-blacklisted track in the selection. + * Attempts to exclude the track associated with the given chunk. Exclusion will fail if the track + * is the only non-excluded track in the selection. * - * @param chunk The chunk whose load caused the blacklisting attempt. - * @param blacklistDurationMs The number of milliseconds for which the track selection should be - * blacklisted. - * @return Whether the blacklisting succeeded. + * @param chunk The chunk whose load caused the exclusion attempt. + * @param exclusionDurationMs The number of milliseconds for which the track selection should be + * excluded. + * @return Whether the exclusion succeeded. */ - public boolean maybeBlacklistTrack(Chunk chunk, long blacklistDurationMs) { + public boolean maybeExcludeTrack(Chunk chunk, long exclusionDurationMs) { return trackSelection.blacklist( - trackSelection.indexOf(trackGroup.indexOf(chunk.trackFormat)), blacklistDurationMs); + trackSelection.indexOf(trackGroup.indexOf(chunk.trackFormat)), exclusionDurationMs); } /** * Called when a playlist load encounters an error. * * @param playlistUrl The {@link Uri} of the playlist whose load encountered an error. - * @param blacklistDurationMs The duration for which the playlist should be blacklisted. Or {@link - * C#TIME_UNSET} if the playlist should not be blacklisted. - * @return True if blacklisting did not encounter errors. False otherwise. + * @param exclusionDurationMs The duration for which the playlist should be excluded. Or {@link + * C#TIME_UNSET} if the playlist should not be excluded. + * @return True if excluding did not encounter errors. False otherwise. */ - public boolean onPlaylistError(Uri playlistUrl, long blacklistDurationMs) { + public boolean onPlaylistError(Uri playlistUrl, long exclusionDurationMs) { int trackGroupIndex = C.INDEX_UNSET; for (int i = 0; i < playlistUrls.length; i++) { if (playlistUrls[i].equals(playlistUrl)) { @@ -408,8 +601,9 @@ public boolean onPlaylistError(Uri playlistUrl, long blacklistDurationMs) { return true; } seenExpectedPlaylistError |= playlistUrl.equals(expectedPlaylistUrl); - return blacklistDurationMs == C.TIME_UNSET - || trackSelection.blacklist(trackSelectionIndex, blacklistDurationMs); + return exclusionDurationMs == C.TIME_UNSET + || (trackSelection.blacklist(trackSelectionIndex, exclusionDurationMs) + && playlistTracker.excludeMediaPlaylist(playlistUrl, exclusionDurationMs)); } /** @@ -430,31 +624,119 @@ public MediaChunkIterator[] createMediaChunkIterators( chunkIterators[i] = MediaChunkIterator.EMPTY; continue; } + @Nullable HlsMediaPlaylist playlist = playlistTracker.getPlaylistSnapshot(playlistUrl, /* isForPlayback= */ false); // Playlist snapshot is valid (checked by if() above) so playlist must be non-null. - Assertions.checkNotNull(playlist); + checkNotNull(playlist); long startOfPlaylistInPeriodUs = playlist.startTimeUs - playlistTracker.getInitialStartTimeUs(); boolean switchingTrack = trackIndex != oldTrackIndex; - long chunkMediaSequence = - getChunkMediaSequence( + Pair chunkMediaSequenceAndPartIndex = + getNextMediaSequenceAndPartIndex( previous, switchingTrack, playlist, startOfPlaylistInPeriodUs, loadPositionUs); - if (chunkMediaSequence < playlist.mediaSequence) { - chunkIterators[i] = MediaChunkIterator.EMPTY; - continue; - } - int chunkIndex = (int) (chunkMediaSequence - playlist.mediaSequence); + long chunkMediaSequence = chunkMediaSequenceAndPartIndex.first; + int partIndex = chunkMediaSequenceAndPartIndex.second; chunkIterators[i] = - new HlsMediaPlaylistSegmentIterator(playlist, startOfPlaylistInPeriodUs, chunkIndex); + new HlsMediaPlaylistSegmentIterator( + playlist.baseUri, + startOfPlaylistInPeriodUs, + getSegmentBaseList(playlist, chunkMediaSequence, partIndex)); } return chunkIterators; } + /** + * Evaluates whether {@link MediaChunk MediaChunks} should be removed from the back of the queue. + * + *

      Removing {@link MediaChunk MediaChunks} from the back of the queue can be useful if they + * could be replaced with chunks of a significantly higher quality (e.g. because the available + * bandwidth has substantially increased). + * + *

      Will only be called if no {@link MediaChunk} in the queue is currently loading. + * + * @param playbackPositionUs The current playback position, in microseconds. + * @param queue The queue of buffered {@link MediaChunk MediaChunks}. + * @return The preferred queue size. + */ + public int getPreferredQueueSize(long playbackPositionUs, List queue) { + if (fatalError != null || trackSelection.length() < 2) { + return queue.size(); + } + return trackSelection.evaluateQueueSize(playbackPositionUs, queue); + } + + /** + * Returns whether an ongoing load of a chunk should be canceled. + * + * @param playbackPositionUs The current playback position, in microseconds. + * @param loadingChunk The currently loading {@link Chunk}. + * @param queue The queue of buffered {@link MediaChunk MediaChunks}. + * @return Whether the ongoing load of {@code loadingChunk} should be canceled. + */ + public boolean shouldCancelLoad( + long playbackPositionUs, Chunk loadingChunk, List queue) { + if (fatalError != null) { + return false; + } + return trackSelection.shouldCancelChunkLoad(playbackPositionUs, loadingChunk, queue); + } + + // Package methods. + + /** + * Returns a list with all segment bases in the playlist starting from {@code mediaSequence} and + * {@code partIndex} in the given playlist. The list may be empty if the starting point is not in + * the playlist. + */ + @VisibleForTesting + /* package */ static List getSegmentBaseList( + HlsMediaPlaylist playlist, long mediaSequence, int partIndex) { + int firstSegmentIndexInPlaylist = (int) (mediaSequence - playlist.mediaSequence); + if (firstSegmentIndexInPlaylist < 0 || playlist.segments.size() < firstSegmentIndexInPlaylist) { + // The first media sequence is not in the playlist. + return ImmutableList.of(); + } + List segmentBases = new ArrayList<>(); + if (firstSegmentIndexInPlaylist < playlist.segments.size()) { + if (partIndex != C.INDEX_UNSET) { + // The iterator starts with a part that belongs to a segment. + Segment firstSegment = playlist.segments.get(firstSegmentIndexInPlaylist); + if (partIndex == 0) { + // Use the full segment instead of the first part. + segmentBases.add(firstSegment); + } else if (partIndex < firstSegment.parts.size()) { + // Add the parts from the first requested segment. + segmentBases.addAll(firstSegment.parts.subList(partIndex, firstSegment.parts.size())); + } + firstSegmentIndexInPlaylist++; + } + partIndex = 0; + // Add all remaining segments. + segmentBases.addAll( + playlist.segments.subList(firstSegmentIndexInPlaylist, playlist.segments.size())); + } + + if (playlist.partTargetDurationUs != C.TIME_UNSET) { + // That's a low latency playlist. + partIndex = partIndex == C.INDEX_UNSET ? 0 : partIndex; + if (partIndex < playlist.trailingParts.size()) { + segmentBases.addAll( + playlist.trailingParts.subList(partIndex, playlist.trailingParts.size())); + } + } + return Collections.unmodifiableList(segmentBases); + } + + /** Returns whether this chunk source obtains chunks for the playlist with the given url. */ + public boolean obtainsChunksForPlaylist(Uri playlistUrl) { + return Util.contains(playlistUrls, playlistUrl); + } + // Private methods. /** - * Returns the media sequence number of the segment to load next in {@code mediaPlaylist}. + * Returns the media sequence number and part index to load next in the {@code mediaPlaylist}. * * @param previous The last (at least partially) loaded segment. * @param switchingTrack Whether the segment to load is not preceded by a segment in the same @@ -463,9 +745,9 @@ public MediaChunkIterator[] createMediaChunkIterators( * @param startOfPlaylistInPeriodUs The start of {@code mediaPlaylist} relative to the period * start in microseconds. * @param loadPositionUs The current load position relative to the period start in microseconds. - * @return The media sequence of the segment to load. + * @return The media sequence and part index to load. */ - private long getChunkMediaSequence( + private Pair getNextMediaSequenceAndPartIndex( @Nullable HlsMediaChunk previous, boolean switchingTrack, HlsMediaPlaylist mediaPlaylist, @@ -477,19 +759,48 @@ private long getChunkMediaSequence( (previous == null || independentSegments) ? loadPositionUs : previous.startTimeUs; if (!mediaPlaylist.hasEndTag && targetPositionInPeriodUs >= endOfPlaylistInPeriodUs) { // If the playlist is too old to contain the chunk, we need to refresh it. - return mediaPlaylist.mediaSequence + mediaPlaylist.segments.size(); + return new Pair<>( + mediaPlaylist.mediaSequence + mediaPlaylist.segments.size(), + /* partIndex */ C.INDEX_UNSET); } long targetPositionInPlaylistUs = targetPositionInPeriodUs - startOfPlaylistInPeriodUs; - return Util.binarySearchFloor( + int segmentIndexInPlaylist = + Util.binarySearchFloor( mediaPlaylist.segments, /* value= */ targetPositionInPlaylistUs, /* inclusive= */ true, - /* stayInBounds= */ !playlistTracker.isLive() || previous == null) - + mediaPlaylist.mediaSequence; - } - // We ignore the case of previous not having loaded completely, in which case we load the next - // segment. - return previous.getNextChunkIndex(); + /* stayInBounds= */ !playlistTracker.isLive() || previous == null); + long mediaSequence = segmentIndexInPlaylist + mediaPlaylist.mediaSequence; + int partIndex = C.INDEX_UNSET; + if (segmentIndexInPlaylist >= 0) { + // In case we are inside the live window, we try to pick a part if available. + Segment segment = mediaPlaylist.segments.get(segmentIndexInPlaylist); + List parts = + targetPositionInPlaylistUs < segment.relativeStartTimeUs + segment.durationUs + ? segment.parts + : mediaPlaylist.trailingParts; + for (int i = 0; i < parts.size(); i++) { + HlsMediaPlaylist.Part part = parts.get(i); + if (targetPositionInPlaylistUs < part.relativeStartTimeUs + part.durationUs) { + if (part.isIndependent) { + partIndex = i; + // Increase media sequence by one if the part is a trailing part. + mediaSequence += parts == mediaPlaylist.trailingParts ? 1 : 0; + } + break; + } + } + } + return new Pair<>(mediaSequence, partIndex); + } + // If loading has not completed, we return the previous chunk again. + return (previous.isLoadCompleted() + ? new Pair<>( + previous.partIndex == C.INDEX_UNSET + ? previous.getNextChunkIndex() + : previous.chunkIndex, + previous.partIndex == C.INDEX_UNSET ? C.INDEX_UNSET : previous.partIndex + 1) + : new Pair<>(previous.chunkIndex, previous.partIndex)); } private long resolveTimeToLiveEdgeUs(long playbackPositionUs) { @@ -512,7 +823,7 @@ private Chunk maybeCreateEncryptionChunkFor(@Nullable Uri keyUri, int selectedTr return null; } - byte[] encryptionKey = keyCache.remove(keyUri); + @Nullable byte[] encryptionKey = keyCache.remove(keyUri); if (encryptionKey != null) { // The key was present in the key cache. We re-insert it to prevent it from being evicted by // the following key addition. Note that removal of the key is necessary to affect the @@ -520,7 +831,8 @@ private Chunk maybeCreateEncryptionChunkFor(@Nullable Uri keyUri, int selectedTr keyCache.put(keyUri, encryptionKey); return null; } - DataSpec dataSpec = new DataSpec(keyUri, 0, C.LENGTH_UNSET, null, DataSpec.FLAG_ALLOW_GZIP); + DataSpec dataSpec = + new DataSpec.Builder().setUri(keyUri).setFlags(DataSpec.FLAG_ALLOW_GZIP).build(); return new EncryptionKeyChunk( encryptionDataSource, dataSpec, @@ -531,25 +843,47 @@ private Chunk maybeCreateEncryptionChunkFor(@Nullable Uri keyUri, int selectedTr } @Nullable - private static Uri getFullEncryptionKeyUri(HlsMediaPlaylist playlist, @Nullable Segment segment) { - if (segment == null || segment.fullSegmentEncryptionKeyUri == null) { + private static Uri getFullEncryptionKeyUri( + HlsMediaPlaylist playlist, @Nullable HlsMediaPlaylist.SegmentBase segmentBase) { + if (segmentBase == null || segmentBase.fullSegmentEncryptionKeyUri == null) { return null; } - return UriUtil.resolveToUri(playlist.baseUri, segment.fullSegmentEncryptionKeyUri); + return UriUtil.resolveToUri(playlist.baseUri, segmentBase.fullSegmentEncryptionKeyUri); + } + + // Package classes. + + /* package */ static final class SegmentBaseHolder { + + public final HlsMediaPlaylist.SegmentBase segmentBase; + public final long mediaSequence; + public final int partIndex; + public final boolean isPreload; + + /** Creates a new instance. */ + public SegmentBaseHolder( + HlsMediaPlaylist.SegmentBase segmentBase, long mediaSequence, int partIndex) { + this.segmentBase = segmentBase; + this.mediaSequence = mediaSequence; + this.partIndex = partIndex; + this.isPreload = + segmentBase instanceof HlsMediaPlaylist.Part + && ((HlsMediaPlaylist.Part) segmentBase).isPreload; + } } // Private classes. - /** - * A {@link TrackSelection} to use for initialization. - */ + /** A {@link ExoTrackSelection} to use for initialization. */ private static final class InitializationTrackSelection extends BaseTrackSelection { private int selectedIndex; public InitializationTrackSelection(TrackGroup group, int[] tracks) { super(group, tracks); - selectedIndex = indexOf(group.getFormat(0)); + // The initially selected index corresponds to the first EXT-X-STREAMINF tag in the + // multivariant playlist. + selectedIndex = indexOf(group.getFormat(tracks[0])); } @Override @@ -580,7 +914,7 @@ public int getSelectedIndex() { } @Override - public int getSelectionReason() { + public @C.SelectionReason int getSelectionReason() { return C.SELECTION_REASON_UNKNOWN; } @@ -589,7 +923,6 @@ public int getSelectionReason() { public Object getSelectionData() { return null; } - } private static final class EncryptionKeyChunk extends DataChunk { @@ -600,11 +933,17 @@ public EncryptionKeyChunk( DataSource dataSource, DataSpec dataSpec, Format trackFormat, - int trackSelectionReason, + @C.SelectionReason int trackSelectionReason, @Nullable Object trackSelectionData, byte[] scratchSpace) { - super(dataSource, dataSpec, C.DATA_TYPE_DRM, trackFormat, trackSelectionReason, - trackSelectionData, scratchSpace); + super( + dataSource, + dataSpec, + C.DATA_TYPE_DRM, + trackFormat, + trackSelectionReason, + trackSelectionData, + scratchSpace); } @Override @@ -617,52 +956,54 @@ protected void consume(byte[] data, int limit) { public byte[] getResult() { return result; } - } - /** {@link MediaChunkIterator} wrapping a {@link HlsMediaPlaylist}. */ - private static final class HlsMediaPlaylistSegmentIterator extends BaseMediaChunkIterator { + @VisibleForTesting + /* package */ static final class HlsMediaPlaylistSegmentIterator extends BaseMediaChunkIterator { - private final HlsMediaPlaylist playlist; + private final List segmentBases; private final long startOfPlaylistInPeriodUs; + private final String playlistBaseUri; /** - * Creates iterator. + * Creates an iterator instance wrapping a list of {@link HlsMediaPlaylist.SegmentBase}. * - * @param playlist The {@link HlsMediaPlaylist} to wrap. + * @param playlistBaseUri The base URI of the {@link HlsMediaPlaylist}. * @param startOfPlaylistInPeriodUs The start time of the playlist in the period, in * microseconds. - * @param chunkIndex The index of the first available chunk in the playlist. + * @param segmentBases The list of {@link HlsMediaPlaylist.SegmentBase segment bases} to wrap. */ public HlsMediaPlaylistSegmentIterator( - HlsMediaPlaylist playlist, long startOfPlaylistInPeriodUs, int chunkIndex) { - super(/* fromIndex= */ chunkIndex, /* toIndex= */ playlist.segments.size() - 1); - this.playlist = playlist; + String playlistBaseUri, + long startOfPlaylistInPeriodUs, + List segmentBases) { + super(/* fromIndex= */ 0, segmentBases.size() - 1); + this.playlistBaseUri = playlistBaseUri; this.startOfPlaylistInPeriodUs = startOfPlaylistInPeriodUs; + this.segmentBases = segmentBases; } @Override public DataSpec getDataSpec() { checkInBounds(); - Segment segment = playlist.segments.get((int) getCurrentIndex()); - Uri chunkUri = UriUtil.resolveToUri(playlist.baseUri, segment.url); - return new DataSpec( - chunkUri, segment.byterangeOffset, segment.byterangeLength, /* key= */ null); + HlsMediaPlaylist.SegmentBase segmentBase = segmentBases.get((int) getCurrentIndex()); + Uri chunkUri = UriUtil.resolveToUri(playlistBaseUri, segmentBase.url); + return new DataSpec(chunkUri, segmentBase.byteRangeOffset, segmentBase.byteRangeLength); } @Override public long getChunkStartTimeUs() { checkInBounds(); - Segment segment = playlist.segments.get((int) getCurrentIndex()); - return startOfPlaylistInPeriodUs + segment.relativeStartTimeUs; + return startOfPlaylistInPeriodUs + + segmentBases.get((int) getCurrentIndex()).relativeStartTimeUs; } @Override public long getChunkEndTimeUs() { checkInBounds(); - Segment segment = playlist.segments.get((int) getCurrentIndex()); - long segmentStartTimeInPeriodUs = startOfPlaylistInPeriodUs + segment.relativeStartTimeUs; - return segmentStartTimeInPeriodUs + segment.durationUs; + HlsMediaPlaylist.SegmentBase segmentBase = segmentBases.get((int) getCurrentIndex()); + long segmentStartTimeInPeriodUs = startOfPlaylistInPeriodUs + segmentBase.relativeStartTimeUs; + return segmentStartTimeInPeriodUs + segmentBase.durationUs; } } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsDataSourceFactory.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsDataSourceFactory.java index 30e7af5a0b..f9dd9cf7a9 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsDataSourceFactory.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsDataSourceFactory.java @@ -18,18 +18,14 @@ import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.upstream.DataSource; -/** - * Creates {@link DataSource}s for HLS playlists, encryption and media chunks. - */ +/** Creates {@link DataSource}s for HLS playlists, encryption and media chunks. */ public interface HlsDataSourceFactory { /** * Creates a {@link DataSource} for the given data type. * - * @param dataType The data type for which the {@link DataSource} will be used. One of {@link C} - * {@code .DATA_TYPE_*} constants. + * @param dataType The {@link C.DataType} for which the {@link DataSource} will be used. * @return A {@link DataSource} for the given data type. */ - DataSource createDataSource(int dataType); - + DataSource createDataSource(@C.DataType int dataType); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsExtractorFactory.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsExtractorFactory.java index ace04145ed..a137ece7a8 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsExtractorFactory.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsExtractorFactory.java @@ -18,6 +18,7 @@ import android.net.Uri; import androidx.annotation.Nullable; import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.analytics.PlayerId; import com.google.android.exoplayer2.extractor.Extractor; import com.google.android.exoplayer2.extractor.ExtractorInput; import com.google.android.exoplayer2.extractor.PositionHolder; @@ -26,67 +27,35 @@ import java.util.List; import java.util.Map; -/** - * Factory for HLS media chunk extractors. - */ +/** Factory for HLS media chunk extractors. */ public interface HlsExtractorFactory { - /** Holds an {@link Extractor} and associated parameters. */ - final class Result { - - /** The created extractor; */ - public final Extractor extractor; - /** Whether the segments for which {@link #extractor} is created are packed audio segments. */ - public final boolean isPackedAudioExtractor; - /** - * Whether {@link #extractor} may be reused for following continuous (no immediately preceding - * discontinuities) segments of the same variant. - */ - public final boolean isReusable; - - /** - * Creates a result. - * - * @param extractor See {@link #extractor}. - * @param isPackedAudioExtractor See {@link #isPackedAudioExtractor}. - * @param isReusable See {@link #isReusable}. - */ - public Result(Extractor extractor, boolean isPackedAudioExtractor, boolean isReusable) { - this.extractor = extractor; - this.isPackedAudioExtractor = isPackedAudioExtractor; - this.isReusable = isReusable; - } - } - HlsExtractorFactory DEFAULT = new DefaultHlsExtractorFactory(); /** * Creates an {@link Extractor} for extracting HLS media chunks. * - * @param previousExtractor A previously used {@link Extractor} which can be reused if the current - * chunk is a continuation of the previously extracted chunk, or null otherwise. It is the - * responsibility of implementers to only reuse extractors that are suited for reusage. * @param uri The URI of the media chunk. * @param format A {@link Format} associated with the chunk to extract. * @param muxedCaptionFormats List of muxed caption {@link Format}s. Null if no closed caption - * information is available in the master playlist. + * information is available in the multivariant playlist. * @param timestampAdjuster Adjuster corresponding to the provided discontinuity sequence number. * @param responseHeaders The HTTP response headers associated with the media segment or * initialization section to extract. * @param sniffingExtractorInput The first extractor input that will be passed to the returned * extractor's {@link Extractor#read(ExtractorInput, PositionHolder)}. Must only be used to * call {@link Extractor#sniff(ExtractorInput)}. - * @return A {@link Result}. - * @throws InterruptedException If the thread is interrupted while sniffing. + * @param playerId The {@link PlayerId} of the player using this extractors factory. + * @return An {@link HlsMediaChunkExtractor}. * @throws IOException If an I/O error is encountered while sniffing. */ - Result createExtractor( - @Nullable Extractor previousExtractor, + HlsMediaChunkExtractor createExtractor( Uri uri, Format format, @Nullable List muxedCaptionFormats, TimestampAdjuster timestampAdjuster, Map> responseHeaders, - ExtractorInput sniffingExtractorInput) - throws InterruptedException, IOException; + ExtractorInput sniffingExtractorInput, + PlayerId playerId) + throws IOException; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsManifest.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsManifest.java index 81d63fd4ad..bc74c0057a 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsManifest.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsManifest.java @@ -17,28 +17,44 @@ import com.google.android.exoplayer2.source.hls.playlist.HlsMasterPlaylist; import com.google.android.exoplayer2.source.hls.playlist.HlsMediaPlaylist; +import com.google.android.exoplayer2.source.hls.playlist.HlsMultivariantPlaylist; -/** - * Holds a master playlist along with a snapshot of one of its media playlists. - */ +/** Holds a multivariant playlist along with a snapshot of one of its media playlists. */ public final class HlsManifest { /** - * The master playlist of an HLS stream. + * @deprecated Use {@link #multivariantPlaylist} instead. */ + @Deprecated + @SuppressWarnings("deprecation") // Keeping deprecated field with deprecated class. public final HlsMasterPlaylist masterPlaylist; - /** - * A snapshot of a media playlist referred to by {@link #masterPlaylist}. - */ + /** The multivariant playlist of an HLS stream. */ + public final HlsMultivariantPlaylist multivariantPlaylist; + /** A snapshot of a media playlist referred to by {@link #multivariantPlaylist}. */ public final HlsMediaPlaylist mediaPlaylist; /** - * @param masterPlaylist The master playlist. + * @param multivariantPlaylist The multivariant playlist. * @param mediaPlaylist The media playlist. */ - HlsManifest(HlsMasterPlaylist masterPlaylist, HlsMediaPlaylist mediaPlaylist) { - this.masterPlaylist = masterPlaylist; + @SuppressWarnings("deprecation") // Intentionally creating deprecated hlsMasterPlaylist field. + /* package */ HlsManifest( + HlsMultivariantPlaylist multivariantPlaylist, HlsMediaPlaylist mediaPlaylist) { + this.multivariantPlaylist = multivariantPlaylist; this.mediaPlaylist = mediaPlaylist; + this.masterPlaylist = + new HlsMasterPlaylist( + multivariantPlaylist.baseUri, + multivariantPlaylist.tags, + multivariantPlaylist.variants, + multivariantPlaylist.videos, + multivariantPlaylist.audios, + multivariantPlaylist.subtitles, + multivariantPlaylist.closedCaptions, + multivariantPlaylist.muxedAudioFormat, + multivariantPlaylist.muxedCaptionFormats, + multivariantPlaylist.hasIndependentSegments, + multivariantPlaylist.variableDefinitions, + multivariantPlaylist.sessionKeyDrmInitData); } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsMediaChunk.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsMediaChunk.java index f9707a87fa..9bdc2b9079 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsMediaChunk.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsMediaChunk.java @@ -15,29 +15,33 @@ */ package com.google.android.exoplayer2.source.hls; +import static com.google.android.exoplayer2.upstream.DataSpec.FLAG_MIGHT_NOT_USE_FULL_NETWORK_SPEED; + import android.net.Uri; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.analytics.PlayerId; import com.google.android.exoplayer2.drm.DrmInitData; import com.google.android.exoplayer2.extractor.DefaultExtractorInput; -import com.google.android.exoplayer2.extractor.Extractor; import com.google.android.exoplayer2.extractor.ExtractorInput; -import com.google.android.exoplayer2.extractor.PositionHolder; import com.google.android.exoplayer2.metadata.Metadata; import com.google.android.exoplayer2.metadata.id3.Id3Decoder; import com.google.android.exoplayer2.metadata.id3.PrivFrame; import com.google.android.exoplayer2.source.chunk.MediaChunk; import com.google.android.exoplayer2.source.hls.playlist.HlsMediaPlaylist; import com.google.android.exoplayer2.upstream.DataSource; +import com.google.android.exoplayer2.upstream.DataSourceUtil; import com.google.android.exoplayer2.upstream.DataSpec; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.ParsableByteArray; import com.google.android.exoplayer2.util.TimestampAdjuster; import com.google.android.exoplayer2.util.UriUtil; -import com.google.android.exoplayer2.util.Util; +import com.google.common.base.Ascii; +import com.google.common.collect.ImmutableList; import java.io.EOFException; import java.io.IOException; +import java.io.InterruptedIOException; import java.math.BigInteger; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; @@ -45,23 +49,22 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.RequiresNonNull; -/** - * An HLS {@link MediaChunk}. - */ +/** An HLS {@link MediaChunk}. */ /* package */ final class HlsMediaChunk extends MediaChunk { /** * Creates a new instance. * - * @param extractorFactory A {@link HlsExtractorFactory} from which the HLS media chunk extractor - * is obtained. + * @param extractorFactory A {@link HlsExtractorFactory} from which the {@link + * HlsMediaChunkExtractor} is obtained. * @param dataSource The source from which the data should be loaded. * @param format The chunk format. * @param startOfPlaylistInPeriodUs The position of the playlist in the period in microseconds. * @param mediaPlaylist The media playlist from which this chunk was obtained. + * @param segmentBaseHolder The segment holder. * @param playlistUrl The url of the playlist from which this chunk was obtained. * @param muxedCaptionFormats List of muxed caption {@link Format}s. Null if no closed caption - * information is available in the master playlist. + * information is available in the multivariant playlist. * @param trackSelectionReason See {@link #trackSelectionReason}. * @param trackSelectionData See {@link #trackSelectionData}. * @param isMasterTimestampSource True if the chunk can initialize the timestamp adjuster. @@ -71,6 +74,7 @@ * @param mediaSegmentKey The media segment decryption key, if fully encrypted. Null otherwise. * @param initSegmentKey The initialization segment decryption key, if fully encrypted. Null * otherwise. + * @param shouldSpliceIn Whether samples for this chunk should be spliced into existing samples. */ public static HlsMediaChunk createInstance( HlsExtractorFactory extractorFactory, @@ -78,25 +82,29 @@ public static HlsMediaChunk createInstance( Format format, long startOfPlaylistInPeriodUs, HlsMediaPlaylist mediaPlaylist, - int segmentIndexInPlaylist, + HlsChunkSource.SegmentBaseHolder segmentBaseHolder, Uri playlistUrl, @Nullable List muxedCaptionFormats, - int trackSelectionReason, + @C.SelectionReason int trackSelectionReason, @Nullable Object trackSelectionData, boolean isMasterTimestampSource, TimestampAdjusterProvider timestampAdjusterProvider, @Nullable HlsMediaChunk previousChunk, @Nullable byte[] mediaSegmentKey, - @Nullable byte[] initSegmentKey) { + @Nullable byte[] initSegmentKey, + boolean shouldSpliceIn, + PlayerId playerId) { // Media segment. - HlsMediaPlaylist.Segment mediaSegment = mediaPlaylist.segments.get(segmentIndexInPlaylist); + HlsMediaPlaylist.SegmentBase mediaSegment = segmentBaseHolder.segmentBase; DataSpec dataSpec = - new DataSpec( - UriUtil.resolveToUri(mediaPlaylist.baseUri, mediaSegment.url), - mediaSegment.byterangeOffset, - mediaSegment.byterangeLength, - /* key= */ null); + new DataSpec.Builder() + .setUri(UriUtil.resolveToUri(mediaPlaylist.baseUri, mediaSegment.url)) + .setPosition(mediaSegment.byteRangeOffset) + .setLength(mediaSegment.byteRangeLength) + .setFlags(segmentBaseHolder.isPreload ? FLAG_MIGHT_NOT_USE_FULL_NETWORK_SPEED : 0) + .build(); boolean mediaSegmentEncrypted = mediaSegmentKey != null; + @Nullable byte[] mediaSegmentIv = mediaSegmentEncrypted ? getEncryptionIvArray(Assertions.checkNotNull(mediaSegment.encryptionIV)) @@ -107,20 +115,17 @@ public static HlsMediaChunk createInstance( HlsMediaPlaylist.Segment initSegment = mediaSegment.initializationSegment; DataSpec initDataSpec = null; boolean initSegmentEncrypted = false; - DataSource initDataSource = null; + @Nullable DataSource initDataSource = null; if (initSegment != null) { initSegmentEncrypted = initSegmentKey != null; + @Nullable byte[] initSegmentIv = initSegmentEncrypted ? getEncryptionIvArray(Assertions.checkNotNull(initSegment.encryptionIV)) : null; Uri initSegmentUri = UriUtil.resolveToUri(mediaPlaylist.baseUri, initSegment.url); initDataSpec = - new DataSpec( - initSegmentUri, - initSegment.byterangeOffset, - initSegment.byterangeLength, - /* key= */ null); + new DataSpec(initSegmentUri, initSegment.byteRangeOffset, initSegment.byteRangeLength); initDataSource = buildDataSource(dataSource, initSegmentKey, initSegmentIv); } @@ -129,27 +134,32 @@ public static HlsMediaChunk createInstance( int discontinuitySequenceNumber = mediaPlaylist.discontinuitySequence + mediaSegment.relativeDiscontinuitySequence; - Extractor previousExtractor = null; + @Nullable HlsMediaChunkExtractor previousExtractor = null; Id3Decoder id3Decoder; ParsableByteArray scratchId3Data; - boolean shouldSpliceIn; + if (previousChunk != null) { + boolean isSameInitData = + initDataSpec == previousChunk.initDataSpec + || (initDataSpec != null + && previousChunk.initDataSpec != null + && initDataSpec.uri.equals(previousChunk.initDataSpec.uri) + && initDataSpec.position == previousChunk.initDataSpec.position); + boolean isFollowingChunk = + playlistUrl.equals(previousChunk.playlistUrl) && previousChunk.loadCompleted; id3Decoder = previousChunk.id3Decoder; scratchId3Data = previousChunk.scratchId3Data; - shouldSpliceIn = - !playlistUrl.equals(previousChunk.playlistUrl) || !previousChunk.loadCompleted; previousExtractor = - previousChunk.isExtractorReusable + isSameInitData + && isFollowingChunk + && !previousChunk.extractorInvalidated && previousChunk.discontinuitySequenceNumber == discontinuitySequenceNumber - && !shouldSpliceIn ? previousChunk.extractor : null; } else { id3Decoder = new Id3Decoder(); scratchId3Data = new ParsableByteArray(Id3Decoder.ID3_HEADER_LENGTH); - shouldSpliceIn = false; } - return new HlsMediaChunk( extractorFactory, mediaDataSource, @@ -165,7 +175,9 @@ public static HlsMediaChunk createInstance( trackSelectionData, segmentStartTimeInPeriodUs, segmentEndTimeInPeriodUs, - /* chunkMediaSequence= */ mediaPlaylist.mediaSequence + segmentIndexInPlaylist, + segmentBaseHolder.mediaSequence, + segmentBaseHolder.partIndex, + /* isPublished= */ !segmentBaseHolder.isPreload, discontinuitySequenceNumber, mediaSegment.hasGapTag, isMasterTimestampSource, @@ -174,36 +186,72 @@ public static HlsMediaChunk createInstance( previousExtractor, id3Decoder, scratchId3Data, - shouldSpliceIn); + shouldSpliceIn, + playerId); + } + + /** + * Returns whether samples of a new HLS media chunk should be spliced into existing samples. + * + * @param previousChunk The previous existing media chunk, or null if the new chunk is the first + * in the queue. + * @param playlistUrl The URL of the playlist from which the new chunk will be obtained. + * @param mediaPlaylist The {@link HlsMediaPlaylist} containing the new chunk. + * @param segmentBaseHolder The {@link HlsChunkSource.SegmentBaseHolder} with information about + * the new chunk. + * @param startOfPlaylistInPeriodUs The start time of the playlist in the period, in microseconds. + * @return Whether samples of the new chunk should be spliced into existing samples. + */ + public static boolean shouldSpliceIn( + @Nullable HlsMediaChunk previousChunk, + Uri playlistUrl, + HlsMediaPlaylist mediaPlaylist, + HlsChunkSource.SegmentBaseHolder segmentBaseHolder, + long startOfPlaylistInPeriodUs) { + if (previousChunk == null) { + // First chunk doesn't require splicing. + return false; + } + if (playlistUrl.equals(previousChunk.playlistUrl) && previousChunk.loadCompleted) { + // Continuing with the next chunk in the same playlist after fully loading the previous chunk + // (i.e. the load wasn't cancelled or failed) is always possible. + return false; + } + // Changing playlists or continuing after a chunk cancellation/failure requires independent, + // non-overlapping segments to avoid the splice. + long segmentStartTimeInPeriodUs = + startOfPlaylistInPeriodUs + segmentBaseHolder.segmentBase.relativeStartTimeUs; + return !isIndependent(segmentBaseHolder, mediaPlaylist) + || segmentStartTimeInPeriodUs < previousChunk.endTimeUs; } public static final String PRIV_TIMESTAMP_FRAME_OWNER = "com.apple.streaming.transportStreamTimestamp"; - private static final PositionHolder DUMMY_POSITION_HOLDER = new PositionHolder(); private static final AtomicInteger uidSource = new AtomicInteger(); - /** - * A unique identifier for the chunk. - */ + /** A unique identifier for the chunk. */ public final int uid; - /** - * The discontinuity sequence number of the chunk. - */ + /** The discontinuity sequence number of the chunk. */ public final int discontinuitySequenceNumber; /** The url of the playlist from which this chunk was obtained. */ public final Uri playlistUrl; + /** Whether samples for this chunk should be spliced into existing samples. */ + public final boolean shouldSpliceIn; + + /** The part index or {@link C#INDEX_UNSET} if the chunk is a full segment */ + public final int partIndex; + @Nullable private final DataSource initDataSource; @Nullable private final DataSpec initDataSpec; - @Nullable private final Extractor previousExtractor; + @Nullable private final HlsMediaChunkExtractor previousExtractor; private final boolean isMasterTimestampSource; private final boolean hasGapTag; private final TimestampAdjuster timestampAdjuster; - private final boolean shouldSpliceIn; private final HlsExtractorFactory extractorFactory; @Nullable private final List muxedCaptionFormats; @Nullable private final DrmInitData drmInitData; @@ -211,16 +259,19 @@ public static HlsMediaChunk createInstance( private final ParsableByteArray scratchId3Data; private final boolean mediaSegmentEncrypted; private final boolean initSegmentEncrypted; + private final PlayerId playerId; - @MonotonicNonNull private Extractor extractor; - private boolean isExtractorReusable; - @MonotonicNonNull private HlsSampleStreamWrapper output; + private @MonotonicNonNull HlsMediaChunkExtractor extractor; + private @MonotonicNonNull HlsSampleStreamWrapper output; // nextLoadPosition refers to the init segment if initDataLoadRequired is true. // Otherwise, nextLoadPosition refers to the media segment. private int nextLoadPosition; private boolean initDataLoadRequired; private volatile boolean loadCanceled; private boolean loadCompleted; + private ImmutableList sampleQueueFirstSampleIndices; + private boolean extractorInvalidated; + private boolean isPublished; private HlsMediaChunk( HlsExtractorFactory extractorFactory, @@ -233,20 +284,23 @@ private HlsMediaChunk( boolean initSegmentEncrypted, Uri playlistUrl, @Nullable List muxedCaptionFormats, - int trackSelectionReason, + @C.SelectionReason int trackSelectionReason, @Nullable Object trackSelectionData, long startTimeUs, long endTimeUs, long chunkMediaSequence, + int partIndex, + boolean isPublished, int discontinuitySequenceNumber, boolean hasGapTag, boolean isMasterTimestampSource, TimestampAdjuster timestampAdjuster, @Nullable DrmInitData drmInitData, - @Nullable Extractor previousExtractor, + @Nullable HlsMediaChunkExtractor previousExtractor, Id3Decoder id3Decoder, ParsableByteArray scratchId3Data, - boolean shouldSpliceIn) { + boolean shouldSpliceIn, + PlayerId playerId) { super( mediaDataSource, dataSpec, @@ -257,6 +311,8 @@ private HlsMediaChunk( endTimeUs, chunkMediaSequence); this.mediaSegmentEncrypted = mediaSegmentEncrypted; + this.partIndex = partIndex; + this.isPublished = isPublished; this.discontinuitySequenceNumber = discontinuitySequenceNumber; this.initDataSpec = initDataSpec; this.initDataSource = initDataSource; @@ -273,18 +329,43 @@ private HlsMediaChunk( this.id3Decoder = id3Decoder; this.scratchId3Data = scratchId3Data; this.shouldSpliceIn = shouldSpliceIn; + this.playerId = playerId; + sampleQueueFirstSampleIndices = ImmutableList.of(); uid = uidSource.getAndIncrement(); } /** - * Initializes the chunk for loading, setting the {@link HlsSampleStreamWrapper} that will receive - * samples as they are loaded. + * Initializes the chunk for loading. * - * @param output The output that will receive the loaded samples. + * @param output The {@link HlsSampleStreamWrapper} that will receive the loaded samples. + * @param sampleQueueWriteIndices The current write indices in the existing sample queues of the + * output. */ - public void init(HlsSampleStreamWrapper output) { + public void init(HlsSampleStreamWrapper output, ImmutableList sampleQueueWriteIndices) { this.output = output; - output.init(uid, shouldSpliceIn); + this.sampleQueueFirstSampleIndices = sampleQueueWriteIndices; + } + + /** + * Returns the first sample index of this chunk in the specified sample queue in the output. + * + *

      Must not be used if {@link #shouldSpliceIn} is true. + * + * @param sampleQueueIndex The index of the sample queue in the output. + * @return The first sample index of this chunk in the specified sample queue. + */ + public int getFirstSampleIndex(int sampleQueueIndex) { + Assertions.checkState(!shouldSpliceIn); + if (sampleQueueIndex >= sampleQueueFirstSampleIndices.size()) { + // The sample queue was created by this chunk or a later chunk. + return 0; + } + return sampleQueueFirstSampleIndices.get(sampleQueueIndex); + } + + /** Prevents the extractor from being reused by a following media chunk. */ + public void invalidateExtractor() { + extractorInvalidated = true; } @Override @@ -300,12 +381,11 @@ public void cancelLoad() { } @Override - public void load() throws IOException, InterruptedException { + public void load() throws IOException { // output == null means init() hasn't been called. Assertions.checkNotNull(output); - if (extractor == null && previousExtractor != null) { + if (extractor == null && previousExtractor != null && previousExtractor.isReusable()) { extractor = previousExtractor; - isExtractorReusable = true; initDataLoadRequired = false; } maybeLoadInitData(); @@ -313,34 +393,49 @@ public void load() throws IOException, InterruptedException { if (!hasGapTag) { loadMedia(); } - loadCompleted = true; + loadCompleted = !loadCanceled; } } + /** + * Whether the chunk is a published chunk as opposed to a preload hint that may change when the + * playlist updates. + */ + public boolean isPublished() { + return isPublished; + } + + /** + * Sets the publish flag of the media chunk to indicate that it is not based on a part that is a + * preload hint in the playlist. + */ + public void publish() { + isPublished = true; + } + // Internal methods. @RequiresNonNull("output") - private void maybeLoadInitData() throws IOException, InterruptedException { + private void maybeLoadInitData() throws IOException { if (!initDataLoadRequired) { return; } // initDataLoadRequired => initDataSource != null && initDataSpec != null Assertions.checkNotNull(initDataSource); Assertions.checkNotNull(initDataSpec); - feedDataToExtractor(initDataSource, initDataSpec, initSegmentEncrypted); + feedDataToExtractor( + initDataSource, + initDataSpec, + initSegmentEncrypted, + /* initializeTimestampAdjuster= */ false); nextLoadPosition = 0; initDataLoadRequired = false; } @RequiresNonNull("output") - private void loadMedia() throws IOException, InterruptedException { - if (!isMasterTimestampSource) { - timestampAdjuster.waitUntilInitialized(); - } else if (timestampAdjuster.getFirstSampleTimestampUs() == TimestampAdjuster.DO_NOT_OFFSET) { - // We're the master and we haven't set the desired first sample timestamp yet. - timestampAdjuster.setFirstSampleTimestampUs(startTimeUs); - } - feedDataToExtractor(dataSource, dataSpec, mediaSegmentEncrypted); + private void loadMedia() throws IOException { + feedDataToExtractor( + dataSource, dataSpec, mediaSegmentEncrypted, /* initializeTimestampAdjuster= */ true); } /** @@ -350,8 +445,11 @@ private void loadMedia() throws IOException, InterruptedException { */ @RequiresNonNull("output") private void feedDataToExtractor( - DataSource dataSource, DataSpec dataSpec, boolean dataIsEncrypted) - throws IOException, InterruptedException { + DataSource dataSource, + DataSpec dataSpec, + boolean dataIsEncrypted, + boolean initializeTimestampAdjuster) + throws IOException { // If we previously fed part of this chunk to the extractor, we need to skip it this time. For // encrypted content we need to skip the data by reading it through the source, so as to ensure // correct decryption of the remainder of the chunk. For clear content, we can request the @@ -366,47 +464,61 @@ private void feedDataToExtractor( skipLoadedBytes = false; } try { - ExtractorInput input = prepareExtraction(dataSource, loadDataSpec); + ExtractorInput input = + prepareExtraction(dataSource, loadDataSpec, initializeTimestampAdjuster); if (skipLoadedBytes) { input.skipFully(nextLoadPosition); } try { - int result = Extractor.RESULT_CONTINUE; - while (result == Extractor.RESULT_CONTINUE && !loadCanceled) { - result = extractor.read(input, DUMMY_POSITION_HOLDER); + while (!loadCanceled && extractor.read(input)) {} + } catch (EOFException e) { + if ((trackFormat.roleFlags & C.ROLE_FLAG_TRICK_PLAY) != 0) { + // See onTruncatedSegmentParsed's javadoc for more info on why we are swallowing the EOF + // exception for trick play tracks. + extractor.onTruncatedSegmentParsed(); + } else { + throw e; } } finally { - nextLoadPosition = (int) (input.getPosition() - dataSpec.absoluteStreamPosition); + nextLoadPosition = (int) (input.getPosition() - dataSpec.position); } } finally { - Util.closeQuietly(dataSource); + DataSourceUtil.closeQuietly(dataSource); } } @RequiresNonNull("output") @EnsuresNonNull("extractor") - private DefaultExtractorInput prepareExtraction(DataSource dataSource, DataSpec dataSpec) - throws IOException, InterruptedException { + private DefaultExtractorInput prepareExtraction( + DataSource dataSource, DataSpec dataSpec, boolean initializeTimestampAdjuster) + throws IOException { long bytesToRead = dataSource.open(dataSpec); + if (initializeTimestampAdjuster) { + try { + timestampAdjuster.sharedInitializeOrWait(isMasterTimestampSource, startTimeUs); + } catch (InterruptedException e) { + throw new InterruptedIOException(); + } + } DefaultExtractorInput extractorInput = - new DefaultExtractorInput(dataSource, dataSpec.absoluteStreamPosition, bytesToRead); + new DefaultExtractorInput(dataSource, dataSpec.position, bytesToRead); if (extractor == null) { long id3Timestamp = peekId3PrivTimestamp(extractorInput); extractorInput.resetPeekPosition(); - HlsExtractorFactory.Result result = - extractorFactory.createExtractor( - previousExtractor, - dataSpec.uri, - trackFormat, - muxedCaptionFormats, - timestampAdjuster, - dataSource.getResponseHeaders(), - extractorInput); - extractor = result.extractor; - isExtractorReusable = result.isReusable; - if (result.isPackedAudioExtractor) { + extractor = + previousExtractor != null + ? previousExtractor.recreate() + : extractorFactory.createExtractor( + dataSpec.uri, + trackFormat, + muxedCaptionFormats, + timestampAdjuster, + dataSource.getResponseHeaders(), + extractorInput, + playerId); + if (extractor.isPackedAudioExtractor()) { output.setSampleOffsetUs( id3Timestamp != C.TIME_UNSET ? timestampAdjuster.adjustTsTimestamp(id3Timestamp) @@ -424,24 +536,23 @@ private DefaultExtractorInput prepareExtraction(DataSource dataSource, DataSpec } /** - * Peek the presentation timestamp of the first sample in the chunk from an ID3 PRIV as defined - * in the HLS spec, version 20, Section 3.4. Returns {@link C#TIME_UNSET} if the frame is not - * found. This method only modifies the peek position. + * Peek the presentation timestamp of the first sample in the chunk from an ID3 PRIV as defined in + * the HLS spec, version 20, Section 3.4. Returns {@link C#TIME_UNSET} if the frame is not found. + * This method only modifies the peek position. * * @param input The {@link ExtractorInput} to obtain the PRIV frame from. * @return The parsed, adjusted timestamp in microseconds * @throws IOException If an error occurred peeking from the input. - * @throws InterruptedException If the thread was interrupted. */ - private long peekId3PrivTimestamp(ExtractorInput input) throws IOException, InterruptedException { + private long peekId3PrivTimestamp(ExtractorInput input) throws IOException { input.resetPeekPosition(); try { - input.peekFully(scratchId3Data.data, 0, Id3Decoder.ID3_HEADER_LENGTH); + scratchId3Data.reset(Id3Decoder.ID3_HEADER_LENGTH); + input.peekFully(scratchId3Data.getData(), 0, Id3Decoder.ID3_HEADER_LENGTH); } catch (EOFException e) { // The input isn't long enough for there to be any ID3 data. return C.TIME_UNSET; } - scratchId3Data.reset(Id3Decoder.ID3_HEADER_LENGTH); int id = scratchId3Data.readUnsignedInt24(); if (id != Id3Decoder.ID3_TAG) { return C.TIME_UNSET; @@ -450,12 +561,12 @@ private long peekId3PrivTimestamp(ExtractorInput input) throws IOException, Inte int id3Size = scratchId3Data.readSynchSafeInt(); int requiredCapacity = id3Size + Id3Decoder.ID3_HEADER_LENGTH; if (requiredCapacity > scratchId3Data.capacity()) { - byte[] data = scratchId3Data.data; + byte[] data = scratchId3Data.getData(); scratchId3Data.reset(requiredCapacity); - System.arraycopy(data, 0, scratchId3Data.data, 0, Id3Decoder.ID3_HEADER_LENGTH); + System.arraycopy(data, 0, scratchId3Data.getData(), 0, Id3Decoder.ID3_HEADER_LENGTH); } - input.peekFully(scratchId3Data.data, Id3Decoder.ID3_HEADER_LENGTH, id3Size); - Metadata metadata = id3Decoder.decode(scratchId3Data.data, id3Size); + input.peekFully(scratchId3Data.getData(), Id3Decoder.ID3_HEADER_LENGTH, id3Size); + Metadata metadata = id3Decoder.decode(scratchId3Data.getData(), id3Size); if (metadata == null) { return C.TIME_UNSET; } @@ -466,8 +577,9 @@ private long peekId3PrivTimestamp(ExtractorInput input) throws IOException, Inte PrivFrame privFrame = (PrivFrame) frame; if (PRIV_TIMESTAMP_FRAME_OWNER.equals(privFrame.owner)) { System.arraycopy( - privFrame.privateData, 0, scratchId3Data.data, 0, 8 /* timestamp size */); - scratchId3Data.reset(8); + privFrame.privateData, 0, scratchId3Data.getData(), 0, 8 /* timestamp size */); + scratchId3Data.setPosition(0); + scratchId3Data.setLimit(8); // The top 31 bits should be zeros, but explicitly zero them to wrap in the case that the // streaming provider forgot. See: https://github.com/google/ExoPlayer/pull/3495. return scratchId3Data.readLong() & 0x1FFFFFFFFL; @@ -481,7 +593,7 @@ private long peekId3PrivTimestamp(ExtractorInput input) throws IOException, Inte private static byte[] getEncryptionIvArray(String ivString) { String trimmedIv; - if (Util.toLowerInvariant(ivString).startsWith("0x")) { + if (Ascii.toLowerCase(ivString).startsWith("0x")) { trimmedIv = ivString.substring(2); } else { trimmedIv = ivString; @@ -516,4 +628,12 @@ private static DataSource buildDataSource( return dataSource; } + private static boolean isIndependent( + HlsChunkSource.SegmentBaseHolder segmentBaseHolder, HlsMediaPlaylist mediaPlaylist) { + if (segmentBaseHolder.segmentBase instanceof HlsMediaPlaylist.Part) { + return ((HlsMediaPlaylist.Part) segmentBaseHolder.segmentBase).isIndependent + || (segmentBaseHolder.partIndex == 0 && mediaPlaylist.hasIndependentSegments); + } + return mediaPlaylist.hasIndependentSegments; + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsMediaChunkExtractor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsMediaChunkExtractor.java new file mode 100644 index 0000000000..084a3450ba --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsMediaChunkExtractor.java @@ -0,0 +1,73 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.hls; + +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.extractor.ExtractorInput; +import com.google.android.exoplayer2.extractor.ExtractorOutput; +import java.io.IOException; + +/** Extracts samples and track {@link Format Formats} from {@link HlsMediaChunk HlsMediaChunks}. */ +public interface HlsMediaChunkExtractor { + + /** + * Initializes the extractor with an {@link ExtractorOutput}. Called at most once. + * + * @param extractorOutput An {@link ExtractorOutput} to receive extracted data. + */ + void init(ExtractorOutput extractorOutput); + + /** + * Extracts data read from a provided {@link ExtractorInput}. Must not be called before {@link + * #init(ExtractorOutput)}. + * + *

      A single call to this method will block until some progress has been made, but will not + * block for longer than this. Hence each call will consume only a small amount of input data. + * + *

      When this method throws an {@link IOException}, extraction may continue by providing an + * {@link ExtractorInput} with an unchanged {@link ExtractorInput#getPosition() read position} to + * a subsequent call to this method. + * + * @param extractorInput The input to read from. + * @return Whether there is any data left to extract. Returns false if the end of input has been + * reached. + * @throws IOException If an error occurred reading from or parsing the input. + */ + boolean read(ExtractorInput extractorInput) throws IOException; + + /** Returns whether this is a packed audio extractor, as defined in RFC 8216, Section 3.4. */ + boolean isPackedAudioExtractor(); + + /** Returns whether this instance can be used for extracting multiple continuous segments. */ + boolean isReusable(); + + /** + * Returns a new instance for extracting the same type of media as this one. Can only be called on + * instances that are not {@link #isReusable() reusable}. + */ + HlsMediaChunkExtractor recreate(); + + /** + * Resets the sample parsing state. + * + *

      Resetting the parsing state allows support for Fragmented MP4 EXT-X-I-FRAME-STREAM-INF + * segments. EXT-X-I-FRAME-STREAM-INF segments are truncated to include only a leading key frame. + * After parsing said keyframe, an extractor may reach an unexpected end of file. By resetting its + * state, we can continue feeding samples from the following segments to the extractor. See #7512 for context. + */ + void onTruncatedSegmentParsed(); +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsMediaPeriod.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsMediaPeriod.java index 3b723af435..e215192ca3 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsMediaPeriod.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsMediaPeriod.java @@ -21,8 +21,10 @@ import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.SeekParameters; +import com.google.android.exoplayer2.analytics.PlayerId; import com.google.android.exoplayer2.drm.DrmInitData; import com.google.android.exoplayer2.drm.DrmSession; +import com.google.android.exoplayer2.drm.DrmSessionEventListener; import com.google.android.exoplayer2.drm.DrmSessionManager; import com.google.android.exoplayer2.extractor.Extractor; import com.google.android.exoplayer2.metadata.Metadata; @@ -34,11 +36,11 @@ import com.google.android.exoplayer2.source.SequenceableLoader; import com.google.android.exoplayer2.source.TrackGroup; import com.google.android.exoplayer2.source.TrackGroupArray; -import com.google.android.exoplayer2.source.hls.playlist.HlsMasterPlaylist; -import com.google.android.exoplayer2.source.hls.playlist.HlsMasterPlaylist.Rendition; -import com.google.android.exoplayer2.source.hls.playlist.HlsMasterPlaylist.Variant; +import com.google.android.exoplayer2.source.hls.playlist.HlsMultivariantPlaylist; +import com.google.android.exoplayer2.source.hls.playlist.HlsMultivariantPlaylist.Rendition; +import com.google.android.exoplayer2.source.hls.playlist.HlsMultivariantPlaylist.Variant; import com.google.android.exoplayer2.source.hls.playlist.HlsPlaylistTracker; -import com.google.android.exoplayer2.trackselection.TrackSelection; +import com.google.android.exoplayer2.trackselection.ExoTrackSelection; import com.google.android.exoplayer2.upstream.Allocator; import com.google.android.exoplayer2.upstream.DataSource; import com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy; @@ -46,6 +48,7 @@ import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.Util; +import com.google.common.primitives.Ints; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; @@ -57,17 +60,15 @@ import org.checkerframework.checker.nullness.compatqual.NullableType; import org.checkerframework.checker.nullness.qual.MonotonicNonNull; -/** - * A {@link MediaPeriod} that loads an HLS stream. - */ -public final class HlsMediaPeriod implements MediaPeriod, HlsSampleStreamWrapper.Callback, - HlsPlaylistTracker.PlaylistEventListener { +/** A {@link MediaPeriod} that loads an HLS stream. */ +public final class HlsMediaPeriod implements MediaPeriod, HlsPlaylistTracker.PlaylistEventListener { private final HlsExtractorFactory extractorFactory; private final HlsPlaylistTracker playlistTracker; private final HlsDataSourceFactory dataSourceFactory; @Nullable private final TransferListener mediaTransferListener; - private final DrmSessionManager drmSessionManager; + private final DrmSessionManager drmSessionManager; + private final DrmSessionEventListener.EventDispatcher drmEventDispatcher; private final LoadErrorHandlingPolicy loadErrorHandlingPolicy; private final EventDispatcher eventDispatcher; private final Allocator allocator; @@ -77,16 +78,18 @@ public final class HlsMediaPeriod implements MediaPeriod, HlsSampleStreamWrapper private final boolean allowChunklessPreparation; private final @HlsMediaSource.MetadataType int metadataType; private final boolean useSessionKeys; + private final PlayerId playerId; + private final HlsSampleStreamWrapper.Callback sampleStreamWrapperCallback; - @Nullable private Callback callback; + @Nullable private MediaPeriod.Callback mediaPeriodCallback; private int pendingPrepareCount; private @MonotonicNonNull TrackGroupArray trackGroups; private HlsSampleStreamWrapper[] sampleStreamWrappers; private HlsSampleStreamWrapper[] enabledSampleStreamWrappers; // Maps sample stream wrappers to variant/rendition index by matching array positions. private int[][] manifestUrlIndicesPerWrapper; + private int audioVideoSampleStreamWrapperCount; private SequenceableLoader compositeSequenceableLoader; - private boolean notifiedReadingStarted; /** * Creates an HLS media period. @@ -112,19 +115,22 @@ public HlsMediaPeriod( HlsPlaylistTracker playlistTracker, HlsDataSourceFactory dataSourceFactory, @Nullable TransferListener mediaTransferListener, - DrmSessionManager drmSessionManager, + DrmSessionManager drmSessionManager, + DrmSessionEventListener.EventDispatcher drmEventDispatcher, LoadErrorHandlingPolicy loadErrorHandlingPolicy, EventDispatcher eventDispatcher, Allocator allocator, CompositeSequenceableLoaderFactory compositeSequenceableLoaderFactory, boolean allowChunklessPreparation, @HlsMediaSource.MetadataType int metadataType, - boolean useSessionKeys) { + boolean useSessionKeys, + PlayerId playerId) { this.extractorFactory = extractorFactory; this.playlistTracker = playlistTracker; this.dataSourceFactory = dataSourceFactory; this.mediaTransferListener = mediaTransferListener; this.drmSessionManager = drmSessionManager; + this.drmEventDispatcher = drmEventDispatcher; this.loadErrorHandlingPolicy = loadErrorHandlingPolicy; this.eventDispatcher = eventDispatcher; this.allocator = allocator; @@ -132,6 +138,8 @@ public HlsMediaPeriod( this.allowChunklessPreparation = allowChunklessPreparation; this.metadataType = metadataType; this.useSessionKeys = useSessionKeys; + this.playerId = playerId; + sampleStreamWrapperCallback = new SampleStreamWrapperCallback(); compositeSequenceableLoader = compositeSequenceableLoaderFactory.createCompositeSequenceableLoader(); streamWrapperIndices = new IdentityHashMap<>(); @@ -139,7 +147,6 @@ public HlsMediaPeriod( sampleStreamWrappers = new HlsSampleStreamWrapper[0]; enabledSampleStreamWrappers = new HlsSampleStreamWrapper[0]; manifestUrlIndicesPerWrapper = new int[0][]; - eventDispatcher.mediaPeriodCreated(); } public void release() { @@ -147,13 +154,12 @@ public void release() { for (HlsSampleStreamWrapper sampleStreamWrapper : sampleStreamWrappers) { sampleStreamWrapper.release(); } - callback = null; - eventDispatcher.mediaPeriodReleased(); + mediaPeriodCallback = null; } @Override public void prepare(Callback callback, long positionUs) { - this.callback = callback; + this.mediaPeriodCallback = callback; playlistTracker.addListener(this); buildAndPrepareSampleStreamWrappers(positionUs); } @@ -171,17 +177,18 @@ public TrackGroupArray getTrackGroups() { return Assertions.checkNotNull(trackGroups); } - // TODO: When the master playlist does not de-duplicate variants by URL and allows Renditions with - // null URLs, this method must be updated to calculate stream keys that are compatible with those - // that may already be persisted for offline. + // TODO: When the multivariant playlist does not de-duplicate variants by URL and allows + // Renditions with null URLs, this method must be updated to calculate stream keys that are + // compatible with those that may already be persisted for offline. @Override - public List getStreamKeys(List trackSelections) { - // See HlsMasterPlaylist.copy for interpretation of StreamKeys. - HlsMasterPlaylist masterPlaylist = Assertions.checkNotNull(playlistTracker.getMasterPlaylist()); - boolean hasVariants = !masterPlaylist.variants.isEmpty(); + public List getStreamKeys(List trackSelections) { + // See HlsMultivariantPlaylist.copy for interpretation of StreamKeys. + HlsMultivariantPlaylist multivariantPlaylist = + Assertions.checkNotNull(playlistTracker.getMultivariantPlaylist()); + boolean hasVariants = !multivariantPlaylist.variants.isEmpty(); int audioWrapperOffset = hasVariants ? 1 : 0; // Subtitle sample stream wrappers are held last. - int subtitleWrapperOffset = sampleStreamWrappers.length - masterPlaylist.subtitles.size(); + int subtitleWrapperOffset = sampleStreamWrappers.length - multivariantPlaylist.subtitles.size(); TrackGroupArray mainWrapperTrackGroups; int mainWrapperPrimaryGroupIndex; @@ -200,7 +207,7 @@ public List getStreamKeys(List trackSelections) { List streamKeys = new ArrayList<>(); boolean needsPrimaryTrackGroupSelection = false; boolean hasPrimaryTrackGroupSelection = false; - for (TrackSelection trackSelection : trackSelections) { + for (ExoTrackSelection trackSelection : trackSelections) { TrackGroup trackSelectionGroup = trackSelection.getTrackGroup(); int mainWrapperTrackGroupIndex = mainWrapperTrackGroups.indexOf(trackSelectionGroup); if (mainWrapperTrackGroupIndex != C.INDEX_UNSET) { @@ -209,7 +216,8 @@ public List getStreamKeys(List trackSelections) { hasPrimaryTrackGroupSelection = true; for (int i = 0; i < trackSelection.length(); i++) { int variantIndex = mainWrapperVariantIndices[trackSelection.getIndexInTrackGroup(i)]; - streamKeys.add(new StreamKey(HlsMasterPlaylist.GROUP_INDEX_VARIANT, variantIndex)); + streamKeys.add( + new StreamKey(HlsMultivariantPlaylist.GROUP_INDEX_VARIANT, variantIndex)); } } else { // Embedded group in main wrapper. @@ -223,8 +231,8 @@ public List getStreamKeys(List trackSelections) { if (selectedTrackGroupIndex != C.INDEX_UNSET) { int groupIndexType = i < subtitleWrapperOffset - ? HlsMasterPlaylist.GROUP_INDEX_AUDIO - : HlsMasterPlaylist.GROUP_INDEX_SUBTITLE; + ? HlsMultivariantPlaylist.GROUP_INDEX_AUDIO + : HlsMultivariantPlaylist.GROUP_INDEX_SUBTITLE; int[] selectedWrapperUrlIndices = manifestUrlIndicesPerWrapper[i]; for (int trackIndex = 0; trackIndex < trackSelection.length(); trackIndex++) { int renditionIndex = @@ -240,23 +248,25 @@ public List getStreamKeys(List trackSelections) { // A track selection includes a variant-embedded track, but no variant is added yet. We use // the valid variant with the lowest bitrate to reduce overhead. int lowestBitrateIndex = mainWrapperVariantIndices[0]; - int lowestBitrate = masterPlaylist.variants.get(mainWrapperVariantIndices[0]).format.bitrate; + int lowestBitrate = + multivariantPlaylist.variants.get(mainWrapperVariantIndices[0]).format.bitrate; for (int i = 1; i < mainWrapperVariantIndices.length; i++) { int variantBitrate = - masterPlaylist.variants.get(mainWrapperVariantIndices[i]).format.bitrate; + multivariantPlaylist.variants.get(mainWrapperVariantIndices[i]).format.bitrate; if (variantBitrate < lowestBitrate) { lowestBitrate = variantBitrate; lowestBitrateIndex = mainWrapperVariantIndices[i]; } } - streamKeys.add(new StreamKey(HlsMasterPlaylist.GROUP_INDEX_VARIANT, lowestBitrateIndex)); + streamKeys.add( + new StreamKey(HlsMultivariantPlaylist.GROUP_INDEX_VARIANT, lowestBitrateIndex)); } return streamKeys; } @Override public long selectTracks( - @NullableType TrackSelection[] selections, + @NullableType ExoTrackSelection[] selections, boolean[] mayRetainStreamFlags, @NullableType SampleStream[] streams, boolean[] streamResetFlags, @@ -265,8 +275,8 @@ public long selectTracks( int[] streamChildIndices = new int[selections.length]; int[] selectionChildIndices = new int[selections.length]; for (int i = 0; i < selections.length; i++) { - streamChildIndices[i] = streams[i] == null ? C.INDEX_UNSET - : streamWrapperIndices.get(streams[i]); + streamChildIndices[i] = + streams[i] == null ? C.INDEX_UNSET : streamWrapperIndices.get(streams[i]); selectionChildIndices[i] = C.INDEX_UNSET; if (selections[i] != null) { TrackGroup trackGroup = selections[i].getTrackGroup(); @@ -284,7 +294,7 @@ public long selectTracks( // Select tracks for each child, copying the resulting streams back into a new streams array. SampleStream[] newStreams = new SampleStream[selections.length]; @NullableType SampleStream[] childStreams = new SampleStream[selections.length]; - @NullableType TrackSelection[] childSelections = new TrackSelection[selections.length]; + @NullableType ExoTrackSelection[] childSelections = new ExoTrackSelection[selections.length]; int newEnabledSampleStreamWrapperCount = 0; HlsSampleStreamWrapper[] newEnabledSampleStreamWrappers = new HlsSampleStreamWrapper[sampleStreamWrappers.length]; @@ -294,8 +304,14 @@ public long selectTracks( childSelections[j] = selectionChildIndices[j] == i ? selections[j] : null; } HlsSampleStreamWrapper sampleStreamWrapper = sampleStreamWrappers[i]; - boolean wasReset = sampleStreamWrapper.selectTracks(childSelections, mayRetainStreamFlags, - childStreams, streamResetFlags, positionUs, forceReset); + boolean wasReset = + sampleStreamWrapper.selectTracks( + childSelections, + mayRetainStreamFlags, + childStreams, + streamResetFlags, + positionUs, + forceReset); boolean wrapperEnabled = false; for (int j = 0; j < selections.length; j++) { SampleStream childStream = childStreams[j]; @@ -313,10 +329,12 @@ public long selectTracks( if (wrapperEnabled) { newEnabledSampleStreamWrappers[newEnabledSampleStreamWrapperCount] = sampleStreamWrapper; if (newEnabledSampleStreamWrapperCount++ == 0) { - // The first enabled wrapper is responsible for initializing timestamp adjusters. This - // way, if enabled, variants are responsible. Else audio renditions. Else text renditions. + // The first enabled wrapper is always allowed to initialize timestamp adjusters. Note + // that the first wrapper will correspond to a variant, or else an audio rendition, or + // else a text rendition, in that order. sampleStreamWrapper.setIsTimestampMaster(true); - if (wasReset || enabledSampleStreamWrappers.length == 0 + if (wasReset + || enabledSampleStreamWrappers.length == 0 || sampleStreamWrapper != enabledSampleStreamWrappers[0]) { // The wrapper responsible for initializing the timestamp adjusters was reset or // changed. We need to reset the timestamp adjuster provider and all other wrappers. @@ -324,7 +342,11 @@ public long selectTracks( forceReset = true; } } else { - sampleStreamWrapper.setIsTimestampMaster(false); + // Additional wrappers are also allowed to initialize timestamp adjusters if they contain + // audio or video, since they are expected to contain dense samples. Text wrappers are not + // permitted except in the case above in which no variant or audio rendition wrappers are + // enabled. + sampleStreamWrapper.setIsTimestampMaster(i < audioVideoSampleStreamWrapperCount); } } } @@ -376,10 +398,6 @@ public long getNextLoadPositionUs() { @Override public long readDiscontinuity() { - if (!notifiedReadingStarted) { - eventDispatcher.readingStarted(); - notifiedReadingStarted = true; - } return C.TIME_UNSET; } @@ -406,72 +424,52 @@ public long seekToUs(long positionUs) { @Override public long getAdjustedSeekPositionUs(long positionUs, SeekParameters seekParameters) { - return positionUs; - } - - // HlsSampleStreamWrapper.Callback implementation. - - @Override - public void onPrepared() { - if (--pendingPrepareCount > 0) { - return; - } - - int totalTrackGroupCount = 0; - for (HlsSampleStreamWrapper sampleStreamWrapper : sampleStreamWrappers) { - totalTrackGroupCount += sampleStreamWrapper.getTrackGroups().length; - } - TrackGroup[] trackGroupArray = new TrackGroup[totalTrackGroupCount]; - int trackGroupIndex = 0; - for (HlsSampleStreamWrapper sampleStreamWrapper : sampleStreamWrappers) { - int wrapperTrackGroupCount = sampleStreamWrapper.getTrackGroups().length; - for (int j = 0; j < wrapperTrackGroupCount; j++) { - trackGroupArray[trackGroupIndex++] = sampleStreamWrapper.getTrackGroups().get(j); + long seekTargetUs = positionUs; + for (HlsSampleStreamWrapper sampleStreamWrapper : enabledSampleStreamWrappers) { + if (sampleStreamWrapper.isVideoSampleStream()) { + seekTargetUs = sampleStreamWrapper.getAdjustedSeekPositionUs(positionUs, seekParameters); + break; } } - trackGroups = new TrackGroupArray(trackGroupArray); - callback.onPrepared(this); + return seekTargetUs; } - @Override - public void onPlaylistRefreshRequired(Uri url) { - playlistTracker.refreshPlaylist(url); - } - - @Override - public void onContinueLoadingRequested(HlsSampleStreamWrapper sampleStreamWrapper) { - callback.onContinueLoadingRequested(this); - } + // HlsSampleStreamWrapper.Callback implementation. // PlaylistListener implementation. @Override public void onPlaylistChanged() { - callback.onContinueLoadingRequested(this); + for (HlsSampleStreamWrapper streamWrapper : sampleStreamWrappers) { + streamWrapper.onPlaylistUpdated(); + } + mediaPeriodCallback.onContinueLoadingRequested(this); } @Override - public boolean onPlaylistError(Uri url, long blacklistDurationMs) { - boolean noBlacklistingFailure = true; + public boolean onPlaylistError( + Uri url, LoadErrorHandlingPolicy.LoadErrorInfo loadErrorInfo, boolean forceRetry) { + boolean exclusionSucceeded = true; for (HlsSampleStreamWrapper streamWrapper : sampleStreamWrappers) { - noBlacklistingFailure &= streamWrapper.onPlaylistError(url, blacklistDurationMs); + exclusionSucceeded &= streamWrapper.onPlaylistError(url, loadErrorInfo, forceRetry); } - callback.onContinueLoadingRequested(this); - return noBlacklistingFailure; + mediaPeriodCallback.onContinueLoadingRequested(this); + return exclusionSucceeded; } // Internal methods. private void buildAndPrepareSampleStreamWrappers(long positionUs) { - HlsMasterPlaylist masterPlaylist = Assertions.checkNotNull(playlistTracker.getMasterPlaylist()); + HlsMultivariantPlaylist multivariantPlaylist = + Assertions.checkNotNull(playlistTracker.getMultivariantPlaylist()); Map overridingDrmInitData = useSessionKeys - ? deriveOverridingDrmInitData(masterPlaylist.sessionKeyDrmInitData) + ? deriveOverridingDrmInitData(multivariantPlaylist.sessionKeyDrmInitData) : Collections.emptyMap(); - boolean hasVariants = !masterPlaylist.variants.isEmpty(); - List audioRenditions = masterPlaylist.audios; - List subtitleRenditions = masterPlaylist.subtitles; + boolean hasVariants = !multivariantPlaylist.variants.isEmpty(); + List audioRenditions = multivariantPlaylist.audios; + List subtitleRenditions = multivariantPlaylist.subtitles; pendingPrepareCount = 0; ArrayList sampleStreamWrappers = new ArrayList<>(); @@ -479,7 +477,7 @@ private void buildAndPrepareSampleStreamWrappers(long positionUs) { if (hasVariants) { buildAndPrepareMainSampleStreamWrapper( - masterPlaylist, + multivariantPlaylist, positionUs, sampleStreamWrappers, manifestUrlIndicesPerWrapper, @@ -495,11 +493,16 @@ private void buildAndPrepareSampleStreamWrappers(long positionUs) { manifestUrlIndicesPerWrapper, overridingDrmInitData); - // Subtitle stream wrappers. We can always use master playlist information to prepare these. + audioVideoSampleStreamWrapperCount = sampleStreamWrappers.size(); + + // Subtitle stream wrappers. We can always use multivariant playlist information to prepare + // these. for (int i = 0; i < subtitleRenditions.size(); i++) { Rendition subtitleRendition = subtitleRenditions.get(i); + String sampleStreamWrapperUid = "subtitle:" + i + ":" + subtitleRendition.name; HlsSampleStreamWrapper sampleStreamWrapper = buildSampleStreamWrapper( + sampleStreamWrapperUid, C.TRACK_TYPE_TEXT, new Uri[] {subtitleRendition.url}, new Format[] {subtitleRendition.format}, @@ -509,16 +512,18 @@ private void buildAndPrepareSampleStreamWrappers(long positionUs) { positionUs); manifestUrlIndicesPerWrapper.add(new int[] {i}); sampleStreamWrappers.add(sampleStreamWrapper); - sampleStreamWrapper.prepareWithMasterPlaylistInfo( - new TrackGroup[] {new TrackGroup(subtitleRendition.format)}, + sampleStreamWrapper.prepareWithMultivariantPlaylistInfo( + new TrackGroup[] {new TrackGroup(sampleStreamWrapperUid, subtitleRendition.format)}, /* primaryTrackGroupIndex= */ 0); } this.sampleStreamWrappers = sampleStreamWrappers.toArray(new HlsSampleStreamWrapper[0]); this.manifestUrlIndicesPerWrapper = manifestUrlIndicesPerWrapper.toArray(new int[0][]); pendingPrepareCount = this.sampleStreamWrappers.length; - // Set timestamp master and trigger preparation (if not already prepared) - this.sampleStreamWrappers[0].setIsTimestampMaster(true); + // Set timestamp masters and trigger preparation (if not already prepared) + for (int i = 0; i < audioVideoSampleStreamWrapperCount; i++) { + this.sampleStreamWrappers[i].setIsTimestampMaster(true); + } for (HlsSampleStreamWrapper sampleStreamWrapper : this.sampleStreamWrappers) { sampleStreamWrapper.continuePreparing(); } @@ -530,8 +535,8 @@ private void buildAndPrepareSampleStreamWrappers(long positionUs) { * This method creates and starts preparation of the main {@link HlsSampleStreamWrapper}. * *

      The main sample stream wrapper is the first element of {@link #sampleStreamWrappers}. It - * provides {@link SampleStream}s for the variant urls in the master playlist. It may be adaptive - * and may contain multiple muxed tracks. + * provides {@link SampleStream}s for the variant urls in the multivariant playlist. It may be + * adaptive and may contain multiple muxed tracks. * *

      If chunkless preparation is allowed, the media period will try preparation without segment * downloads. This is only possible if variants contain the CODECS attribute. If not, traditional @@ -540,13 +545,13 @@ private void buildAndPrepareSampleStreamWrappers(long positionUs) { * *

        *
      • A muxed audio track will be exposed if the codecs list contain an audio entry and the - * master playlist either contains an EXT-X-MEDIA tag without the URI attribute or does not - * contain any EXT-X-MEDIA tag. - *
      • Closed captions will only be exposed if they are declared by the master playlist. + * multivariant playlist either contains an EXT-X-MEDIA tag without the URI attribute or + * does not contain any EXT-X-MEDIA tag. + *
      • Closed captions will only be exposed if they are declared by the multivariant playlist. *
      • An ID3 track is exposed preemptively, in case the segments contain an ID3 track. *
      * - * @param masterPlaylist The HLS master playlist. + * @param multivariantPlaylist The HLS multivariant playlist. * @param positionUs If preparation requires any chunk downloads, the position in microseconds at * which downloading should start. Ignored otherwise. * @param sampleStreamWrappers List to which the built main sample stream wrapper should be added. @@ -555,16 +560,16 @@ private void buildAndPrepareSampleStreamWrappers(long positionUs) { * (i.e. {@link DrmInitData#schemeType}). */ private void buildAndPrepareMainSampleStreamWrapper( - HlsMasterPlaylist masterPlaylist, + HlsMultivariantPlaylist multivariantPlaylist, long positionUs, List sampleStreamWrappers, List manifestUrlIndicesPerWrapper, Map overridingDrmInitData) { - int[] variantTypes = new int[masterPlaylist.variants.size()]; + int[] variantTypes = new int[multivariantPlaylist.variants.size()]; int videoVariantCount = 0; int audioVariantCount = 0; - for (int i = 0; i < masterPlaylist.variants.size(); i++) { - Variant variant = masterPlaylist.variants.get(i); + for (int i = 0; i < multivariantPlaylist.variants.size(); i++) { + Variant variant = multivariantPlaylist.variants.get(i); Format format = variant.format; if (format.height > 0 || Util.getCodecsOfType(format.codecs, C.TRACK_TYPE_VIDEO) != null) { variantTypes[i] = C.TRACK_TYPE_VIDEO; @@ -581,8 +586,8 @@ private void buildAndPrepareMainSampleStreamWrapper( int selectedVariantsCount = variantTypes.length; if (videoVariantCount > 0) { // We've identified some variants as definitely containing video. Assume variants within the - // master playlist are marked consistently, and hence that we have the full set. Filter out - // any other variants, which are likely to be audio only. + // multivariant playlist are marked consistently, and hence that we have the full set. Filter + // out any other variants, which are likely to be audio only. useVideoVariantsOnly = true; selectedVariantsCount = videoVariantCount; } else if (audioVariantCount < variantTypes.length) { @@ -595,83 +600,94 @@ private void buildAndPrepareMainSampleStreamWrapper( Format[] selectedPlaylistFormats = new Format[selectedVariantsCount]; int[] selectedVariantIndices = new int[selectedVariantsCount]; int outIndex = 0; - for (int i = 0; i < masterPlaylist.variants.size(); i++) { + for (int i = 0; i < multivariantPlaylist.variants.size(); i++) { if ((!useVideoVariantsOnly || variantTypes[i] == C.TRACK_TYPE_VIDEO) && (!useNonAudioVariantsOnly || variantTypes[i] != C.TRACK_TYPE_AUDIO)) { - Variant variant = masterPlaylist.variants.get(i); + Variant variant = multivariantPlaylist.variants.get(i); selectedPlaylistUrls[outIndex] = variant.url; selectedPlaylistFormats[outIndex] = variant.format; selectedVariantIndices[outIndex++] = i; } } String codecs = selectedPlaylistFormats[0].codecs; + int numberOfVideoCodecs = Util.getCodecCountOfType(codecs, C.TRACK_TYPE_VIDEO); + int numberOfAudioCodecs = Util.getCodecCountOfType(codecs, C.TRACK_TYPE_AUDIO); + boolean codecsStringAllowsChunklessPreparation = + (numberOfAudioCodecs == 1 + || (numberOfAudioCodecs == 0 && multivariantPlaylist.audios.isEmpty())) + && numberOfVideoCodecs <= 1 + && numberOfAudioCodecs + numberOfVideoCodecs > 0; + @C.TrackType + int trackType = + !useVideoVariantsOnly && numberOfAudioCodecs > 0 + ? C.TRACK_TYPE_AUDIO + : C.TRACK_TYPE_DEFAULT; + String sampleStreamWrapperUid = "main"; HlsSampleStreamWrapper sampleStreamWrapper = buildSampleStreamWrapper( - C.TRACK_TYPE_DEFAULT, + sampleStreamWrapperUid, + trackType, selectedPlaylistUrls, selectedPlaylistFormats, - masterPlaylist.muxedAudioFormat, - masterPlaylist.muxedCaptionFormats, + multivariantPlaylist.muxedAudioFormat, + multivariantPlaylist.muxedCaptionFormats, overridingDrmInitData, positionUs); sampleStreamWrappers.add(sampleStreamWrapper); manifestUrlIndicesPerWrapper.add(selectedVariantIndices); - if (allowChunklessPreparation && codecs != null) { - boolean variantsContainVideoCodecs = Util.getCodecsOfType(codecs, C.TRACK_TYPE_VIDEO) != null; - boolean variantsContainAudioCodecs = Util.getCodecsOfType(codecs, C.TRACK_TYPE_AUDIO) != null; + if (allowChunklessPreparation && codecsStringAllowsChunklessPreparation) { List muxedTrackGroups = new ArrayList<>(); - if (variantsContainVideoCodecs) { + if (numberOfVideoCodecs > 0) { Format[] videoFormats = new Format[selectedVariantsCount]; for (int i = 0; i < videoFormats.length; i++) { videoFormats[i] = deriveVideoFormat(selectedPlaylistFormats[i]); } - muxedTrackGroups.add(new TrackGroup(videoFormats)); + muxedTrackGroups.add(new TrackGroup(sampleStreamWrapperUid, videoFormats)); - if (variantsContainAudioCodecs - && (masterPlaylist.muxedAudioFormat != null || masterPlaylist.audios.isEmpty())) { + if (numberOfAudioCodecs > 0 + && (multivariantPlaylist.muxedAudioFormat != null + || multivariantPlaylist.audios.isEmpty())) { muxedTrackGroups.add( new TrackGroup( + /* id= */ sampleStreamWrapperUid + ":audio", deriveAudioFormat( selectedPlaylistFormats[0], - masterPlaylist.muxedAudioFormat, + multivariantPlaylist.muxedAudioFormat, /* isPrimaryTrackInVariant= */ false))); } - List ccFormats = masterPlaylist.muxedCaptionFormats; + List ccFormats = multivariantPlaylist.muxedCaptionFormats; if (ccFormats != null) { for (int i = 0; i < ccFormats.size(); i++) { - muxedTrackGroups.add(new TrackGroup(ccFormats.get(i))); + String ccId = sampleStreamWrapperUid + ":cc:" + i; + muxedTrackGroups.add(new TrackGroup(ccId, ccFormats.get(i))); } } - } else if (variantsContainAudioCodecs) { + } else /* numberOfAudioCodecs > 0 */ { // Variants only contain audio. Format[] audioFormats = new Format[selectedVariantsCount]; for (int i = 0; i < audioFormats.length; i++) { audioFormats[i] = deriveAudioFormat( /* variantFormat= */ selectedPlaylistFormats[i], - masterPlaylist.muxedAudioFormat, + multivariantPlaylist.muxedAudioFormat, /* isPrimaryTrackInVariant= */ true); } - muxedTrackGroups.add(new TrackGroup(audioFormats)); - } else { - // Variants contain codecs but no video or audio entries could be identified. - throw new IllegalArgumentException("Unexpected codecs attribute: " + codecs); + muxedTrackGroups.add(new TrackGroup(sampleStreamWrapperUid, audioFormats)); } TrackGroup id3TrackGroup = new TrackGroup( - Format.createSampleFormat( - /* id= */ "ID3", - MimeTypes.APPLICATION_ID3, - /* codecs= */ null, - /* bitrate= */ Format.NO_VALUE, - /* drmInitData= */ null)); + /* id= */ sampleStreamWrapperUid + ":id3", + new Format.Builder() + .setId("ID3") + .setSampleMimeType(MimeTypes.APPLICATION_ID3) + .build()); muxedTrackGroups.add(id3TrackGroup); - sampleStreamWrapper.prepareWithMasterPlaylistInfo( + sampleStreamWrapper.prepareWithMultivariantPlaylistInfo( muxedTrackGroups.toArray(new TrackGroup[0]), /* primaryTrackGroupIndex= */ 0, - /* optionalTrackGroupsIndices= */ muxedTrackGroups.indexOf(id3TrackGroup)); + /* optionalTrackGroupsIndices...= */ muxedTrackGroups.indexOf(id3TrackGroup)); } } @@ -697,7 +713,7 @@ private void buildAndPrepareAudioSampleStreamWrappers( continue; } - boolean renditionsHaveCodecs = true; + boolean codecStringsAllowChunklessPreparation = true; scratchPlaylistUrls.clear(); scratchPlaylistFormats.clear(); scratchIndicesList.clear(); @@ -708,12 +724,15 @@ private void buildAndPrepareAudioSampleStreamWrappers( scratchIndicesList.add(renditionIndex); scratchPlaylistUrls.add(rendition.url); scratchPlaylistFormats.add(rendition.format); - renditionsHaveCodecs &= rendition.format.codecs != null; + codecStringsAllowChunklessPreparation &= + Util.getCodecCountOfType(rendition.format.codecs, C.TRACK_TYPE_AUDIO) == 1; } } + String sampleStreamWrapperUid = "audio:" + name; HlsSampleStreamWrapper sampleStreamWrapper = buildSampleStreamWrapper( + sampleStreamWrapperUid, C.TRACK_TYPE_AUDIO, scratchPlaylistUrls.toArray(Util.castNonNullTypeArray(new Uri[0])), scratchPlaylistFormats.toArray(new Format[0]), @@ -721,19 +740,21 @@ private void buildAndPrepareAudioSampleStreamWrappers( /* muxedCaptionFormats= */ Collections.emptyList(), overridingDrmInitData, positionUs); - manifestUrlsIndicesPerWrapper.add(Util.toArray(scratchIndicesList)); + manifestUrlsIndicesPerWrapper.add(Ints.toArray(scratchIndicesList)); sampleStreamWrappers.add(sampleStreamWrapper); - if (allowChunklessPreparation && renditionsHaveCodecs) { + if (allowChunklessPreparation && codecStringsAllowChunklessPreparation) { Format[] renditionFormats = scratchPlaylistFormats.toArray(new Format[0]); - sampleStreamWrapper.prepareWithMasterPlaylistInfo( - new TrackGroup[] {new TrackGroup(renditionFormats)}, /* primaryTrackGroupIndex= */ 0); + sampleStreamWrapper.prepareWithMultivariantPlaylistInfo( + new TrackGroup[] {new TrackGroup(sampleStreamWrapperUid, renditionFormats)}, + /* primaryTrackGroupIndex= */ 0); } } } private HlsSampleStreamWrapper buildSampleStreamWrapper( - int trackType, + String uid, + @C.TrackType int trackType, Uri[] playlistUrls, Format[] playlistFormats, @Nullable Format muxedAudioFormat, @@ -749,16 +770,19 @@ private HlsSampleStreamWrapper buildSampleStreamWrapper( dataSourceFactory, mediaTransferListener, timestampAdjusterProvider, - muxedCaptionFormats); + muxedCaptionFormats, + playerId); return new HlsSampleStreamWrapper( + uid, trackType, - /* callback= */ this, + /* callback= */ sampleStreamWrapperCallback, defaultChunkSource, overridingDrmInitData, allocator, positionUs, muxedAudioFormat, drmSessionManager, + drmEventDispatcher, loadErrorHandlingPolicy, eventDispatcher, metadataType); @@ -791,33 +815,34 @@ private static Map deriveOverridingDrmInitData( } private static Format deriveVideoFormat(Format variantFormat) { - String codecs = Util.getCodecsOfType(variantFormat.codecs, C.TRACK_TYPE_VIDEO); - String sampleMimeType = MimeTypes.getMediaMimeType(codecs); - return Format.createVideoContainerFormat( - variantFormat.id, - variantFormat.label, - variantFormat.containerMimeType, - sampleMimeType, - codecs, - variantFormat.metadata, - variantFormat.bitrate, - variantFormat.width, - variantFormat.height, - variantFormat.frameRate, - /* initializationData= */ null, - variantFormat.selectionFlags, - variantFormat.roleFlags); + @Nullable String codecs = Util.getCodecsOfType(variantFormat.codecs, C.TRACK_TYPE_VIDEO); + @Nullable String sampleMimeType = MimeTypes.getMediaMimeType(codecs); + return new Format.Builder() + .setId(variantFormat.id) + .setLabel(variantFormat.label) + .setContainerMimeType(variantFormat.containerMimeType) + .setSampleMimeType(sampleMimeType) + .setCodecs(codecs) + .setMetadata(variantFormat.metadata) + .setAverageBitrate(variantFormat.averageBitrate) + .setPeakBitrate(variantFormat.peakBitrate) + .setWidth(variantFormat.width) + .setHeight(variantFormat.height) + .setFrameRate(variantFormat.frameRate) + .setSelectionFlags(variantFormat.selectionFlags) + .setRoleFlags(variantFormat.roleFlags) + .build(); } private static Format deriveAudioFormat( Format variantFormat, @Nullable Format mediaTagFormat, boolean isPrimaryTrackInVariant) { - String codecs; - Metadata metadata; + @Nullable String codecs; + @Nullable Metadata metadata; int channelCount = Format.NO_VALUE; int selectionFlags = 0; int roleFlags = 0; - String language = null; - String label = null; + @Nullable String language = null; + @Nullable String label = null; if (mediaTagFormat != null) { codecs = mediaTagFormat.codecs; metadata = mediaTagFormat.metadata; @@ -837,22 +862,56 @@ private static Format deriveAudioFormat( label = variantFormat.label; } } - String sampleMimeType = MimeTypes.getMediaMimeType(codecs); - int bitrate = isPrimaryTrackInVariant ? variantFormat.bitrate : Format.NO_VALUE; - return Format.createAudioContainerFormat( - variantFormat.id, - label, - variantFormat.containerMimeType, - sampleMimeType, - codecs, - metadata, - bitrate, - channelCount, - /* sampleRate= */ Format.NO_VALUE, - /* initializationData= */ null, - selectionFlags, - roleFlags, - language); + @Nullable String sampleMimeType = MimeTypes.getMediaMimeType(codecs); + int averageBitrate = isPrimaryTrackInVariant ? variantFormat.averageBitrate : Format.NO_VALUE; + int peakBitrate = isPrimaryTrackInVariant ? variantFormat.peakBitrate : Format.NO_VALUE; + return new Format.Builder() + .setId(variantFormat.id) + .setLabel(label) + .setContainerMimeType(variantFormat.containerMimeType) + .setSampleMimeType(sampleMimeType) + .setCodecs(codecs) + .setMetadata(metadata) + .setAverageBitrate(averageBitrate) + .setPeakBitrate(peakBitrate) + .setChannelCount(channelCount) + .setSelectionFlags(selectionFlags) + .setRoleFlags(roleFlags) + .setLanguage(language) + .build(); } + private class SampleStreamWrapperCallback implements HlsSampleStreamWrapper.Callback { + @Override + public void onPrepared() { + if (--pendingPrepareCount > 0) { + return; + } + + int totalTrackGroupCount = 0; + for (HlsSampleStreamWrapper sampleStreamWrapper : sampleStreamWrappers) { + totalTrackGroupCount += sampleStreamWrapper.getTrackGroups().length; + } + TrackGroup[] trackGroupArray = new TrackGroup[totalTrackGroupCount]; + int trackGroupIndex = 0; + for (HlsSampleStreamWrapper sampleStreamWrapper : sampleStreamWrappers) { + int wrapperTrackGroupCount = sampleStreamWrapper.getTrackGroups().length; + for (int j = 0; j < wrapperTrackGroupCount; j++) { + trackGroupArray[trackGroupIndex++] = sampleStreamWrapper.getTrackGroups().get(j); + } + } + trackGroups = new TrackGroupArray(trackGroupArray); + mediaPeriodCallback.onPrepared(HlsMediaPeriod.this); + } + + @Override + public void onPlaylistRefreshRequired(Uri url) { + playlistTracker.refreshPlaylist(url); + } + + @Override + public void onContinueLoadingRequested(HlsSampleStreamWrapper sampleStreamWrapper) { + mediaPeriodCallback.onContinueLoadingRequested(HlsMediaPeriod.this); + } + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsMediaSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsMediaSource.java index cc2fe618fe..830b9fefd3 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsMediaSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsMediaSource.java @@ -15,16 +15,23 @@ */ package com.google.android.exoplayer2.source.hls; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static java.lang.annotation.ElementType.TYPE_USE; import static java.lang.annotation.RetentionPolicy.SOURCE; -import android.net.Uri; -import android.os.Handler; +import android.os.Looper; +import android.os.SystemClock; import androidx.annotation.IntDef; import androidx.annotation.Nullable; +import androidx.annotation.VisibleForTesting; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.ExoPlayerLibraryInfo; -import com.google.android.exoplayer2.drm.DrmSession; +import com.google.android.exoplayer2.MediaItem; +import com.google.android.exoplayer2.MediaItem.LiveConfiguration; +import com.google.android.exoplayer2.drm.DefaultDrmSessionManagerProvider; +import com.google.android.exoplayer2.drm.DrmSessionEventListener; import com.google.android.exoplayer2.drm.DrmSessionManager; +import com.google.android.exoplayer2.drm.DrmSessionManagerProvider; import com.google.android.exoplayer2.extractor.Extractor; import com.google.android.exoplayer2.offline.StreamKey; import com.google.android.exoplayer2.source.BaseMediaSource; @@ -33,7 +40,6 @@ import com.google.android.exoplayer2.source.MediaPeriod; import com.google.android.exoplayer2.source.MediaSource; import com.google.android.exoplayer2.source.MediaSourceEventListener; -import com.google.android.exoplayer2.source.MediaSourceEventListener.EventDispatcher; import com.google.android.exoplayer2.source.MediaSourceFactory; import com.google.android.exoplayer2.source.SequenceableLoader; import com.google.android.exoplayer2.source.SinglePeriodTimeline; @@ -48,10 +54,12 @@ import com.google.android.exoplayer2.upstream.DefaultLoadErrorHandlingPolicy; import com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy; import com.google.android.exoplayer2.upstream.TransferListener; -import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.Util; +import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.io.IOException; import java.lang.annotation.Documented; import java.lang.annotation.Retention; +import java.lang.annotation.Target; import java.util.List; /** An HLS {@link MediaSource}. */ @@ -76,6 +84,7 @@ public final class HlsMediaSource extends BaseMediaSource */ @Documented @Retention(SOURCE) + @Target(TYPE_USE) @IntDef({METADATA_TYPE_ID3, METADATA_TYPE_EMSG}) public @interface MetadataType {} @@ -85,26 +94,36 @@ public final class HlsMediaSource extends BaseMediaSource public static final int METADATA_TYPE_EMSG = 3; /** Factory for {@link HlsMediaSource}s. */ + @SuppressWarnings("deprecation") // Implement deprecated type for backwards compatibility. public static final class Factory implements MediaSourceFactory { private final HlsDataSourceFactory hlsDataSourceFactory; private HlsExtractorFactory extractorFactory; private HlsPlaylistParserFactory playlistParserFactory; - @Nullable private List streamKeys; private HlsPlaylistTracker.Factory playlistTrackerFactory; private CompositeSequenceableLoaderFactory compositeSequenceableLoaderFactory; - private DrmSessionManager drmSessionManager; + private DrmSessionManagerProvider drmSessionManagerProvider; private LoadErrorHandlingPolicy loadErrorHandlingPolicy; private boolean allowChunklessPreparation; - @MetadataType private int metadataType; + private @MetadataType int metadataType; private boolean useSessionKeys; - private boolean isCreateCalled; - @Nullable private Object tag; + private long elapsedRealTimeOffsetMs; /** * Creates a new factory for {@link HlsMediaSource}s. * + *

      The factory will use the following default components: + * + *

        + *
      • {@link DefaultDrmSessionManagerProvider} + *
      • {@link DefaultHlsPlaylistParserFactory} + *
      • {@link DefaultHlsPlaylistTracker#FACTORY} + *
      • {@link HlsExtractorFactory#DEFAULT} + *
      • {@link DefaultLoadErrorHandlingPolicy} + *
      • {@link DefaultCompositeSequenceableLoaderFactory} + *
      + * * @param dataSourceFactory A data source factory that will be wrapped by a {@link * DefaultHlsDataSourceFactory} to create {@link DataSource}s for manifests, segments and * keys. @@ -116,33 +135,31 @@ public Factory(DataSource.Factory dataSourceFactory) { /** * Creates a new factory for {@link HlsMediaSource}s. * + *

      The factory will use the following default components: + * + *

        + *
      • {@link DefaultDrmSessionManagerProvider} + *
      • {@link DefaultHlsPlaylistParserFactory} + *
      • {@link DefaultHlsPlaylistTracker#FACTORY} + *
      • {@link HlsExtractorFactory#DEFAULT} + *
      • {@link DefaultLoadErrorHandlingPolicy} + *
      • {@link DefaultCompositeSequenceableLoaderFactory} + *
      + * * @param hlsDataSourceFactory An {@link HlsDataSourceFactory} for {@link DataSource}s for * manifests, segments and keys. */ public Factory(HlsDataSourceFactory hlsDataSourceFactory) { - this.hlsDataSourceFactory = Assertions.checkNotNull(hlsDataSourceFactory); + this.hlsDataSourceFactory = checkNotNull(hlsDataSourceFactory); + drmSessionManagerProvider = new DefaultDrmSessionManagerProvider(); playlistParserFactory = new DefaultHlsPlaylistParserFactory(); playlistTrackerFactory = DefaultHlsPlaylistTracker.FACTORY; extractorFactory = HlsExtractorFactory.DEFAULT; - drmSessionManager = DrmSessionManager.getDummyDrmSessionManager(); loadErrorHandlingPolicy = new DefaultLoadErrorHandlingPolicy(); compositeSequenceableLoaderFactory = new DefaultCompositeSequenceableLoaderFactory(); metadataType = METADATA_TYPE_ID3; - } - - /** - * Sets a tag for the media source which will be published in the {@link - * com.google.android.exoplayer2.Timeline} of the source as {@link - * com.google.android.exoplayer2.Timeline.Window#tag}. - * - * @param tag A tag for the media source. - * @return This factory, for convenience. - * @throws IllegalStateException If one of the {@code create} methods has already been called. - */ - public Factory setTag(@Nullable Object tag) { - Assertions.checkState(!isCreateCalled); - this.tag = tag; - return this; + elapsedRealTimeOffsetMs = C.TIME_UNSET; + allowChunklessPreparation = true; } /** @@ -152,107 +169,91 @@ public Factory setTag(@Nullable Object tag) { * @param extractorFactory An {@link HlsExtractorFactory} for {@link Extractor}s for the * segments. * @return This factory, for convenience. - * @throws IllegalStateException If one of the {@code create} methods has already been called. */ - public Factory setExtractorFactory(HlsExtractorFactory extractorFactory) { - Assertions.checkState(!isCreateCalled); - this.extractorFactory = Assertions.checkNotNull(extractorFactory); + @CanIgnoreReturnValue + public Factory setExtractorFactory(@Nullable HlsExtractorFactory extractorFactory) { + this.extractorFactory = + extractorFactory != null ? extractorFactory : HlsExtractorFactory.DEFAULT; return this; } - /** - * Sets the {@link LoadErrorHandlingPolicy}. The default value is created by calling {@link - * DefaultLoadErrorHandlingPolicy#DefaultLoadErrorHandlingPolicy()}. - * - *

      Calling this method overrides any calls to {@link #setMinLoadableRetryCount(int)}. - * - * @param loadErrorHandlingPolicy A {@link LoadErrorHandlingPolicy}. - * @return This factory, for convenience. - * @throws IllegalStateException If one of the {@code create} methods has already been called. - */ + @CanIgnoreReturnValue + @Override public Factory setLoadErrorHandlingPolicy(LoadErrorHandlingPolicy loadErrorHandlingPolicy) { - Assertions.checkState(!isCreateCalled); - this.loadErrorHandlingPolicy = loadErrorHandlingPolicy; - return this; - } - - /** - * Sets the minimum number of times to retry if a loading error occurs. The default value is - * {@link DefaultLoadErrorHandlingPolicy#DEFAULT_MIN_LOADABLE_RETRY_COUNT}. - * - *

      Calling this method is equivalent to calling {@link #setLoadErrorHandlingPolicy} with - * {@link DefaultLoadErrorHandlingPolicy#DefaultLoadErrorHandlingPolicy(int) - * DefaultLoadErrorHandlingPolicy(minLoadableRetryCount)} - * - * @param minLoadableRetryCount The minimum number of times to retry if a loading error occurs. - * @return This factory, for convenience. - * @throws IllegalStateException If one of the {@code create} methods has already been called. - * @deprecated Use {@link #setLoadErrorHandlingPolicy(LoadErrorHandlingPolicy)} instead. - */ - @Deprecated - public Factory setMinLoadableRetryCount(int minLoadableRetryCount) { - Assertions.checkState(!isCreateCalled); - this.loadErrorHandlingPolicy = new DefaultLoadErrorHandlingPolicy(minLoadableRetryCount); + this.loadErrorHandlingPolicy = + checkNotNull( + loadErrorHandlingPolicy, + "MediaSource.Factory#setLoadErrorHandlingPolicy no longer handles null by" + + " instantiating a new DefaultLoadErrorHandlingPolicy. Explicitly construct and" + + " pass an instance in order to retain the old behavior."); return this; } /** - * Sets the factory from which playlist parsers will be obtained. The default value is a {@link - * DefaultHlsPlaylistParserFactory}. + * Sets the factory from which playlist parsers will be obtained. * * @param playlistParserFactory An {@link HlsPlaylistParserFactory}. * @return This factory, for convenience. - * @throws IllegalStateException If one of the {@code create} methods has already been called. */ + @CanIgnoreReturnValue public Factory setPlaylistParserFactory(HlsPlaylistParserFactory playlistParserFactory) { - Assertions.checkState(!isCreateCalled); - this.playlistParserFactory = Assertions.checkNotNull(playlistParserFactory); + this.playlistParserFactory = + checkNotNull( + playlistParserFactory, + "HlsMediaSource.Factory#setPlaylistParserFactory no longer handles null by" + + " instantiating a new DefaultHlsPlaylistParserFactory. Explicitly" + + " construct and pass an instance in order to retain the old behavior."); return this; } /** - * Sets the {@link HlsPlaylistTracker} factory. The default value is {@link - * DefaultHlsPlaylistTracker#FACTORY}. + * Sets the {@link HlsPlaylistTracker} factory. * * @param playlistTrackerFactory A factory for {@link HlsPlaylistTracker} instances. * @return This factory, for convenience. - * @throws IllegalStateException If one of the {@code create} methods has already been called. */ + @CanIgnoreReturnValue public Factory setPlaylistTrackerFactory(HlsPlaylistTracker.Factory playlistTrackerFactory) { - Assertions.checkState(!isCreateCalled); - this.playlistTrackerFactory = Assertions.checkNotNull(playlistTrackerFactory); + this.playlistTrackerFactory = + checkNotNull( + playlistTrackerFactory, + "HlsMediaSource.Factory#setPlaylistTrackerFactory no longer handles null by" + + " defaulting to DefaultHlsPlaylistTracker.FACTORY. Explicitly" + + " pass a reference to this instance in order to retain the old behavior."); return this; } /** * Sets the factory to create composite {@link SequenceableLoader}s for when this media source - * loads data from multiple streams (video, audio etc...). The default is an instance of {@link - * DefaultCompositeSequenceableLoaderFactory}. + * loads data from multiple streams (video, audio etc...). * * @param compositeSequenceableLoaderFactory A factory to create composite {@link * SequenceableLoader}s for when this media source loads data from multiple streams (video, * audio etc...). * @return This factory, for convenience. - * @throws IllegalStateException If one of the {@code create} methods has already been called. */ + @CanIgnoreReturnValue public Factory setCompositeSequenceableLoaderFactory( CompositeSequenceableLoaderFactory compositeSequenceableLoaderFactory) { - Assertions.checkState(!isCreateCalled); this.compositeSequenceableLoaderFactory = - Assertions.checkNotNull(compositeSequenceableLoaderFactory); + checkNotNull( + compositeSequenceableLoaderFactory, + "HlsMediaSource.Factory#setCompositeSequenceableLoaderFactory no longer handles null" + + " by instantiating a new DefaultCompositeSequenceableLoaderFactory. Explicitly" + + " construct and pass an instance in order to retain the old behavior."); return this; } /** * Sets whether chunkless preparation is allowed. If true, preparation without chunk downloads - * will be enabled for streams that provide sufficient information in their master playlist. + * will be enabled for streams that provide sufficient information in their multivariant + * playlist. * * @param allowChunklessPreparation Whether chunkless preparation is allowed. * @return This factory, for convenience. - * @throws IllegalStateException If one of the {@code create} methods has already been called. */ + @CanIgnoreReturnValue public Factory setAllowChunklessPreparation(boolean allowChunklessPreparation) { - Assertions.checkState(!isCreateCalled); this.allowChunklessPreparation = allowChunklessPreparation; return this; } @@ -276,152 +277,151 @@ public Factory setAllowChunklessPreparation(boolean allowChunklessPreparation) { * @param metadataType The type of metadata to extract. * @return This factory, for convenience. */ + @CanIgnoreReturnValue public Factory setMetadataType(@MetadataType int metadataType) { - Assertions.checkState(!isCreateCalled); this.metadataType = metadataType; return this; } /** - * Sets whether to use #EXT-X-SESSION-KEY tags provided in the master playlist. If enabled, it's - * assumed that any single session key declared in the master playlist can be used to obtain all - * of the keys required for playback. For media where this is not true, this option should not - * be enabled. + * Sets whether to use #EXT-X-SESSION-KEY tags provided in the multivariant playlist. If + * enabled, it's assumed that any single session key declared in the multivariant playlist can + * be used to obtain all of the keys required for playback. For media where this is not true, + * this option should not be enabled. * * @param useSessionKeys Whether to use #EXT-X-SESSION-KEY tags. * @return This factory, for convenience. */ + @CanIgnoreReturnValue public Factory setUseSessionKeys(boolean useSessionKeys) { this.useSessionKeys = useSessionKeys; return this; } - /** - * @deprecated Use {@link #createMediaSource(Uri)} and {@link #addEventListener(Handler, - * MediaSourceEventListener)} instead. - */ - @Deprecated - public HlsMediaSource createMediaSource( - Uri playlistUri, - @Nullable Handler eventHandler, - @Nullable MediaSourceEventListener eventListener) { - HlsMediaSource mediaSource = createMediaSource(playlistUri); - if (eventHandler != null && eventListener != null) { - mediaSource.addEventListener(eventHandler, eventListener); - } - return mediaSource; + @CanIgnoreReturnValue + @Override + public Factory setDrmSessionManagerProvider( + DrmSessionManagerProvider drmSessionManagerProvider) { + this.drmSessionManagerProvider = + checkNotNull( + drmSessionManagerProvider, + "MediaSource.Factory#setDrmSessionManagerProvider no longer handles null by" + + " instantiating a new DefaultDrmSessionManagerProvider. Explicitly construct" + + " and pass an instance in order to retain the old behavior."); + return this; } /** - * Sets the {@link DrmSessionManager} to use for acquiring {@link DrmSession DrmSessions}. The - * default value is {@link DrmSessionManager#DUMMY}. + * Sets the offset between {@link SystemClock#elapsedRealtime()} and the time since the Unix + * epoch. By default, is it set to {@link C#TIME_UNSET}. * - * @param drmSessionManager The {@link DrmSessionManager}. + * @param elapsedRealTimeOffsetMs The offset between {@link SystemClock#elapsedRealtime()} and + * the time since the Unix epoch, in milliseconds. * @return This factory, for convenience. - * @throws IllegalStateException If one of the {@code create} methods has already been called. */ - @Override - public Factory setDrmSessionManager(DrmSessionManager drmSessionManager) { - Assertions.checkState(!isCreateCalled); - this.drmSessionManager = - drmSessionManager != null - ? drmSessionManager - : DrmSessionManager.getDummyDrmSessionManager(); + @CanIgnoreReturnValue + @VisibleForTesting + /* package */ Factory setElapsedRealTimeOffsetMs(long elapsedRealTimeOffsetMs) { + this.elapsedRealTimeOffsetMs = elapsedRealTimeOffsetMs; return this; } /** * Returns a new {@link HlsMediaSource} using the current parameters. * + * @param mediaItem The {@link MediaItem}. * @return The new {@link HlsMediaSource}. + * @throws NullPointerException if {@link MediaItem#localConfiguration} is {@code null}. */ @Override - public HlsMediaSource createMediaSource(Uri playlistUri) { - isCreateCalled = true; - if (streamKeys != null) { + public HlsMediaSource createMediaSource(MediaItem mediaItem) { + checkNotNull(mediaItem.localConfiguration); + HlsPlaylistParserFactory playlistParserFactory = this.playlistParserFactory; + List streamKeys = mediaItem.localConfiguration.streamKeys; + if (!streamKeys.isEmpty()) { playlistParserFactory = new FilteringHlsPlaylistParserFactory(playlistParserFactory, streamKeys); } + return new HlsMediaSource( - playlistUri, + mediaItem, hlsDataSourceFactory, extractorFactory, compositeSequenceableLoaderFactory, - drmSessionManager, + drmSessionManagerProvider.get(mediaItem), loadErrorHandlingPolicy, playlistTrackerFactory.createTracker( hlsDataSourceFactory, loadErrorHandlingPolicy, playlistParserFactory), + elapsedRealTimeOffsetMs, allowChunklessPreparation, metadataType, - useSessionKeys, - tag); - } - - @Override - public Factory setStreamKeys(List streamKeys) { - Assertions.checkState(!isCreateCalled); - this.streamKeys = streamKeys; - return this; + useSessionKeys); } @Override - public int[] getSupportedTypes() { - return new int[] {C.TYPE_HLS}; + public @C.ContentType int[] getSupportedTypes() { + return new int[] {C.CONTENT_TYPE_HLS}; } - } private final HlsExtractorFactory extractorFactory; - private final Uri manifestUri; + private final MediaItem.LocalConfiguration localConfiguration; private final HlsDataSourceFactory dataSourceFactory; private final CompositeSequenceableLoaderFactory compositeSequenceableLoaderFactory; - private final DrmSessionManager drmSessionManager; + private final DrmSessionManager drmSessionManager; private final LoadErrorHandlingPolicy loadErrorHandlingPolicy; private final boolean allowChunklessPreparation; private final @MetadataType int metadataType; private final boolean useSessionKeys; private final HlsPlaylistTracker playlistTracker; - @Nullable private final Object tag; + private final long elapsedRealTimeOffsetMs; + private final MediaItem mediaItem; + private MediaItem.LiveConfiguration liveConfiguration; @Nullable private TransferListener mediaTransferListener; private HlsMediaSource( - Uri manifestUri, + MediaItem mediaItem, HlsDataSourceFactory dataSourceFactory, HlsExtractorFactory extractorFactory, CompositeSequenceableLoaderFactory compositeSequenceableLoaderFactory, - DrmSessionManager drmSessionManager, + DrmSessionManager drmSessionManager, LoadErrorHandlingPolicy loadErrorHandlingPolicy, HlsPlaylistTracker playlistTracker, + long elapsedRealTimeOffsetMs, boolean allowChunklessPreparation, @MetadataType int metadataType, - boolean useSessionKeys, - @Nullable Object tag) { - this.manifestUri = manifestUri; + boolean useSessionKeys) { + this.localConfiguration = checkNotNull(mediaItem.localConfiguration); + this.mediaItem = mediaItem; + this.liveConfiguration = mediaItem.liveConfiguration; this.dataSourceFactory = dataSourceFactory; this.extractorFactory = extractorFactory; this.compositeSequenceableLoaderFactory = compositeSequenceableLoaderFactory; this.drmSessionManager = drmSessionManager; this.loadErrorHandlingPolicy = loadErrorHandlingPolicy; this.playlistTracker = playlistTracker; + this.elapsedRealTimeOffsetMs = elapsedRealTimeOffsetMs; this.allowChunklessPreparation = allowChunklessPreparation; this.metadataType = metadataType; this.useSessionKeys = useSessionKeys; - this.tag = tag; } @Override - @Nullable - public Object getTag() { - return tag; + public MediaItem getMediaItem() { + return mediaItem; } @Override protected void prepareSourceInternal(@Nullable TransferListener mediaTransferListener) { this.mediaTransferListener = mediaTransferListener; drmSessionManager.prepare(); - EventDispatcher eventDispatcher = createEventDispatcher(/* mediaPeriodId= */ null); - playlistTracker.start(manifestUri, eventDispatcher, /* listener= */ this); + drmSessionManager.setPlayer( + /* playbackLooper= */ checkNotNull(Looper.myLooper()), getPlayerId()); + MediaSourceEventListener.EventDispatcher eventDispatcher = + createEventDispatcher(/* mediaPeriodId= */ null); + playlistTracker.start( + localConfiguration.uri, eventDispatcher, /* primaryPlaylistListener= */ this); } @Override @@ -431,20 +431,23 @@ public void maybeThrowSourceInfoRefreshError() throws IOException { @Override public MediaPeriod createPeriod(MediaPeriodId id, Allocator allocator, long startPositionUs) { - EventDispatcher eventDispatcher = createEventDispatcher(id); + MediaSourceEventListener.EventDispatcher mediaSourceEventDispatcher = createEventDispatcher(id); + DrmSessionEventListener.EventDispatcher drmEventDispatcher = createDrmEventDispatcher(id); return new HlsMediaPeriod( extractorFactory, playlistTracker, dataSourceFactory, mediaTransferListener, drmSessionManager, + drmEventDispatcher, loadErrorHandlingPolicy, - eventDispatcher, + mediaSourceEventDispatcher, allocator, compositeSequenceableLoaderFactory, allowChunklessPreparation, metadataType, - useSessionKeys); + useSessionKeys, + getPlayerId()); } @Override @@ -459,73 +462,212 @@ protected void releaseSourceInternal() { } @Override - public void onPrimaryPlaylistRefreshed(HlsMediaPlaylist playlist) { - SinglePeriodTimeline timeline; - long windowStartTimeMs = playlist.hasProgramDateTime ? C.usToMs(playlist.startTimeUs) - : C.TIME_UNSET; + public void onPrimaryPlaylistRefreshed(HlsMediaPlaylist mediaPlaylist) { + long windowStartTimeMs = + mediaPlaylist.hasProgramDateTime ? Util.usToMs(mediaPlaylist.startTimeUs) : C.TIME_UNSET; // For playlist types EVENT and VOD we know segments are never removed, so the presentation // started at the same time as the window. Otherwise, we don't know the presentation start time. long presentationStartTimeMs = - playlist.playlistType == HlsMediaPlaylist.PLAYLIST_TYPE_EVENT - || playlist.playlistType == HlsMediaPlaylist.PLAYLIST_TYPE_VOD + mediaPlaylist.playlistType == HlsMediaPlaylist.PLAYLIST_TYPE_EVENT + || mediaPlaylist.playlistType == HlsMediaPlaylist.PLAYLIST_TYPE_VOD ? windowStartTimeMs : C.TIME_UNSET; - long windowDefaultStartPositionUs = playlist.startOffsetUs; - // masterPlaylist is non-null because the first playlist has been fetched by now. + // The multivariant playlist is non-null because the first playlist has been fetched by now. HlsManifest manifest = - new HlsManifest(Assertions.checkNotNull(playlistTracker.getMasterPlaylist()), playlist); - if (playlistTracker.isLive()) { - long offsetFromInitialStartTimeUs = - playlist.startTimeUs - playlistTracker.getInitialStartTimeUs(); - long periodDurationUs = - playlist.hasEndTag ? offsetFromInitialStartTimeUs + playlist.durationUs : C.TIME_UNSET; - List segments = playlist.segments; - if (windowDefaultStartPositionUs == C.TIME_UNSET) { - windowDefaultStartPositionUs = 0; - if (!segments.isEmpty()) { - int defaultStartSegmentIndex = Math.max(0, segments.size() - 3); - // We attempt to set the default start position to be at least twice the target duration - // behind the live edge. - long minStartPositionUs = playlist.durationUs - playlist.targetDurationUs * 2; - while (defaultStartSegmentIndex > 0 - && segments.get(defaultStartSegmentIndex).relativeStartTimeUs > minStartPositionUs) { - defaultStartSegmentIndex--; - } - windowDefaultStartPositionUs = segments.get(defaultStartSegmentIndex).relativeStartTimeUs; - } + new HlsManifest(checkNotNull(playlistTracker.getMultivariantPlaylist()), mediaPlaylist); + SinglePeriodTimeline timeline = + playlistTracker.isLive() + ? createTimelineForLive( + mediaPlaylist, presentationStartTimeMs, windowStartTimeMs, manifest) + : createTimelineForOnDemand( + mediaPlaylist, presentationStartTimeMs, windowStartTimeMs, manifest); + refreshSourceInfo(timeline); + } + + private SinglePeriodTimeline createTimelineForLive( + HlsMediaPlaylist playlist, + long presentationStartTimeMs, + long windowStartTimeMs, + HlsManifest manifest) { + long offsetFromInitialStartTimeUs = + playlist.startTimeUs - playlistTracker.getInitialStartTimeUs(); + long periodDurationUs = + playlist.hasEndTag ? offsetFromInitialStartTimeUs + playlist.durationUs : C.TIME_UNSET; + long liveEdgeOffsetUs = getLiveEdgeOffsetUs(playlist); + long targetLiveOffsetUs; + if (liveConfiguration.targetOffsetMs != C.TIME_UNSET) { + // Media item has a defined target offset. + targetLiveOffsetUs = Util.msToUs(liveConfiguration.targetOffsetMs); + } else { + // Decide target offset from playlist. + targetLiveOffsetUs = getTargetLiveOffsetUs(playlist, liveEdgeOffsetUs); + } + // Ensure target live offset is within the live window and greater than the live edge offset. + targetLiveOffsetUs = + Util.constrainValue( + targetLiveOffsetUs, liveEdgeOffsetUs, playlist.durationUs + liveEdgeOffsetUs); + updateLiveConfiguration(playlist, targetLiveOffsetUs); + long windowDefaultStartPositionUs = + getLiveWindowDefaultStartPositionUs(playlist, liveEdgeOffsetUs); + boolean suppressPositionProjection = + playlist.playlistType == HlsMediaPlaylist.PLAYLIST_TYPE_EVENT + && playlist.hasPositiveStartOffset; + return new SinglePeriodTimeline( + presentationStartTimeMs, + windowStartTimeMs, + /* elapsedRealtimeEpochOffsetMs= */ C.TIME_UNSET, + periodDurationUs, + /* windowDurationUs= */ playlist.durationUs, + /* windowPositionInPeriodUs= */ offsetFromInitialStartTimeUs, + windowDefaultStartPositionUs, + /* isSeekable= */ true, + /* isDynamic= */ !playlist.hasEndTag, + suppressPositionProjection, + manifest, + mediaItem, + liveConfiguration); + } + + private SinglePeriodTimeline createTimelineForOnDemand( + HlsMediaPlaylist playlist, + long presentationStartTimeMs, + long windowStartTimeMs, + HlsManifest manifest) { + long windowDefaultStartPositionUs; + if (playlist.startOffsetUs == C.TIME_UNSET || playlist.segments.isEmpty()) { + windowDefaultStartPositionUs = 0; + } else { + if (playlist.preciseStart || playlist.startOffsetUs == playlist.durationUs) { + windowDefaultStartPositionUs = playlist.startOffsetUs; + } else { + windowDefaultStartPositionUs = + findClosestPrecedingSegment(playlist.segments, playlist.startOffsetUs) + .relativeStartTimeUs; } - timeline = - new SinglePeriodTimeline( - presentationStartTimeMs, - windowStartTimeMs, - periodDurationUs, - /* windowDurationUs= */ playlist.durationUs, - /* windowPositionInPeriodUs= */ offsetFromInitialStartTimeUs, - windowDefaultStartPositionUs, - /* isSeekable= */ true, - /* isDynamic= */ !playlist.hasEndTag, - /* isLive= */ true, - manifest, - tag); - } else /* not live */ { - if (windowDefaultStartPositionUs == C.TIME_UNSET) { - windowDefaultStartPositionUs = 0; + } + return new SinglePeriodTimeline( + presentationStartTimeMs, + windowStartTimeMs, + /* elapsedRealtimeEpochOffsetMs= */ C.TIME_UNSET, + /* periodDurationUs= */ playlist.durationUs, + /* windowDurationUs= */ playlist.durationUs, + /* windowPositionInPeriodUs= */ 0, + windowDefaultStartPositionUs, + /* isSeekable= */ true, + /* isDynamic= */ false, + /* suppressPositionProjection= */ true, + manifest, + mediaItem, + /* liveConfiguration= */ null); + } + + private long getLiveEdgeOffsetUs(HlsMediaPlaylist playlist) { + return playlist.hasProgramDateTime + ? Util.msToUs(Util.getNowUnixTimeMs(elapsedRealTimeOffsetMs)) - playlist.getEndTimeUs() + : 0; + } + + private long getLiveWindowDefaultStartPositionUs( + HlsMediaPlaylist playlist, long liveEdgeOffsetUs) { + long startPositionUs = + playlist.startOffsetUs != C.TIME_UNSET + ? playlist.startOffsetUs + : playlist.durationUs + + liveEdgeOffsetUs + - Util.msToUs(liveConfiguration.targetOffsetMs); + if (playlist.preciseStart) { + return startPositionUs; + } + @Nullable + HlsMediaPlaylist.Part part = + findClosestPrecedingIndependentPart(playlist.trailingParts, startPositionUs); + if (part != null) { + return part.relativeStartTimeUs; + } + if (playlist.segments.isEmpty()) { + return 0; + } + HlsMediaPlaylist.Segment segment = + findClosestPrecedingSegment(playlist.segments, startPositionUs); + part = findClosestPrecedingIndependentPart(segment.parts, startPositionUs); + if (part != null) { + return part.relativeStartTimeUs; + } + return segment.relativeStartTimeUs; + } + + private void updateLiveConfiguration(HlsMediaPlaylist playlist, long targetLiveOffsetUs) { + boolean disableSpeedAdjustment = + mediaItem.liveConfiguration.minPlaybackSpeed == C.RATE_UNSET + && mediaItem.liveConfiguration.maxPlaybackSpeed == C.RATE_UNSET + && playlist.serverControl.holdBackUs == C.TIME_UNSET + && playlist.serverControl.partHoldBackUs == C.TIME_UNSET; + liveConfiguration = + new LiveConfiguration.Builder() + .setTargetOffsetMs(Util.usToMs(targetLiveOffsetUs)) + .setMinPlaybackSpeed(disableSpeedAdjustment ? 1f : liveConfiguration.minPlaybackSpeed) + .setMaxPlaybackSpeed(disableSpeedAdjustment ? 1f : liveConfiguration.maxPlaybackSpeed) + .build(); + } + + /** + * Gets the target live offset, in microseconds, for a live playlist. + * + *

      The target offset is derived by checking the following in this order: + * + *

        + *
      1. The playlist defines a start offset. + *
      2. The playlist defines a part hold back in server control and has part duration. + *
      3. The playlist defines a hold back in server control. + *
      4. Fallback to {@code 3 x target duration}. + *
      + * + * @param playlist The playlist. + * @param liveEdgeOffsetUs The current live edge offset. + * @return The selected target live offset, in microseconds. + */ + private static long getTargetLiveOffsetUs(HlsMediaPlaylist playlist, long liveEdgeOffsetUs) { + HlsMediaPlaylist.ServerControl serverControl = playlist.serverControl; + long targetOffsetUs; + if (playlist.startOffsetUs != C.TIME_UNSET) { + targetOffsetUs = playlist.durationUs - playlist.startOffsetUs; + } else if (serverControl.partHoldBackUs != C.TIME_UNSET + && playlist.partTargetDurationUs != C.TIME_UNSET) { + // Select part hold back only if the playlist has a part target duration. + targetOffsetUs = serverControl.partHoldBackUs; + } else if (serverControl.holdBackUs != C.TIME_UNSET) { + targetOffsetUs = serverControl.holdBackUs; + } else { + // Fallback, see RFC 8216, Section 4.4.3.8. + targetOffsetUs = 3 * playlist.targetDurationUs; + } + return targetOffsetUs + liveEdgeOffsetUs; + } + + @Nullable + private static HlsMediaPlaylist.Part findClosestPrecedingIndependentPart( + List parts, long positionUs) { + @Nullable HlsMediaPlaylist.Part closestPart = null; + for (int i = 0; i < parts.size(); i++) { + HlsMediaPlaylist.Part part = parts.get(i); + if (part.relativeStartTimeUs <= positionUs && part.isIndependent) { + closestPart = part; + } else if (part.relativeStartTimeUs > positionUs) { + break; } - timeline = - new SinglePeriodTimeline( - presentationStartTimeMs, - windowStartTimeMs, - /* periodDurationUs= */ playlist.durationUs, - /* windowDurationUs= */ playlist.durationUs, - /* windowPositionInPeriodUs= */ 0, - windowDefaultStartPositionUs, - /* isSeekable= */ true, - /* isDynamic= */ false, - /* isLive= */ false, - manifest, - tag); } - refreshSourceInfo(timeline); + return closestPart; } + /** + * Gets the segment that contains {@code positionUs}, or the last segment if the position is + * beyond the segments list. + */ + private static HlsMediaPlaylist.Segment findClosestPrecedingSegment( + List segments, long positionUs) { + int segmentIndex = + Util.binarySearchFloor( + segments, positionUs, /* inclusive= */ true, /* stayInBounds= */ true); + return segments.get(segmentIndex); + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsSampleStream.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsSampleStream.java index c820038b80..a590607850 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsSampleStream.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsSampleStream.java @@ -22,9 +22,7 @@ import com.google.android.exoplayer2.util.Assertions; import java.io.IOException; -/** - * {@link SampleStream} for a particular sample queue in HLS. - */ +/** {@link SampleStream} for a particular sample queue in HLS. */ /* package */ final class HlsSampleStream implements SampleStream { private final int trackGroupIndex; @@ -70,13 +68,14 @@ public void maybeThrowError() throws IOException { } @Override - public int readData(FormatHolder formatHolder, DecoderInputBuffer buffer, boolean requireFormat) { + public int readData( + FormatHolder formatHolder, DecoderInputBuffer buffer, @ReadFlags int readFlags) { if (sampleQueueIndex == HlsSampleStreamWrapper.SAMPLE_QUEUE_INDEX_NO_MAPPING_NON_FATAL) { buffer.addFlag(C.BUFFER_FLAG_END_OF_STREAM); return C.RESULT_BUFFER_READ; } return hasValidSampleQueueIndex() - ? sampleStreamWrapper.readData(sampleQueueIndex, formatHolder, buffer, requireFormat) + ? sampleStreamWrapper.readData(sampleQueueIndex, formatHolder, buffer, readFlags) : C.RESULT_NOTHING_READ; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsSampleStreamWrapper.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsSampleStreamWrapper.java index c7116ba878..09221b2b0a 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsSampleStreamWrapper.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsSampleStreamWrapper.java @@ -15,22 +15,28 @@ */ package com.google.android.exoplayer2.source.hls; +import static com.google.android.exoplayer2.source.hls.HlsChunkSource.CHUNK_PUBLICATION_STATE_PUBLISHED; +import static com.google.android.exoplayer2.source.hls.HlsChunkSource.CHUNK_PUBLICATION_STATE_REMOVED; +import static com.google.android.exoplayer2.trackselection.TrackSelectionUtil.createFallbackOptions; +import static java.lang.Math.max; +import static java.lang.Math.min; + import android.net.Uri; import android.os.Handler; -import android.os.Looper; import android.util.SparseIntArray; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.FormatHolder; import com.google.android.exoplayer2.ParserException; +import com.google.android.exoplayer2.SeekParameters; import com.google.android.exoplayer2.decoder.DecoderInputBuffer; import com.google.android.exoplayer2.drm.DrmInitData; import com.google.android.exoplayer2.drm.DrmSession; +import com.google.android.exoplayer2.drm.DrmSessionEventListener; import com.google.android.exoplayer2.drm.DrmSessionManager; import com.google.android.exoplayer2.extractor.DummyTrackOutput; import com.google.android.exoplayer2.extractor.Extractor; -import com.google.android.exoplayer2.extractor.ExtractorInput; import com.google.android.exoplayer2.extractor.ExtractorOutput; import com.google.android.exoplayer2.extractor.SeekMap; import com.google.android.exoplayer2.extractor.TrackOutput; @@ -38,18 +44,24 @@ import com.google.android.exoplayer2.metadata.emsg.EventMessage; import com.google.android.exoplayer2.metadata.emsg.EventMessageDecoder; import com.google.android.exoplayer2.metadata.id3.PrivFrame; -import com.google.android.exoplayer2.source.MediaSourceEventListener.EventDispatcher; +import com.google.android.exoplayer2.source.LoadEventInfo; +import com.google.android.exoplayer2.source.MediaLoadData; +import com.google.android.exoplayer2.source.MediaSourceEventListener; import com.google.android.exoplayer2.source.SampleQueue; import com.google.android.exoplayer2.source.SampleQueue.UpstreamFormatChangedListener; import com.google.android.exoplayer2.source.SampleStream; +import com.google.android.exoplayer2.source.SampleStream.ReadFlags; import com.google.android.exoplayer2.source.SequenceableLoader; import com.google.android.exoplayer2.source.TrackGroup; import com.google.android.exoplayer2.source.TrackGroupArray; import com.google.android.exoplayer2.source.chunk.Chunk; import com.google.android.exoplayer2.source.chunk.MediaChunkIterator; -import com.google.android.exoplayer2.trackselection.TrackSelection; +import com.google.android.exoplayer2.trackselection.ExoTrackSelection; import com.google.android.exoplayer2.upstream.Allocator; +import com.google.android.exoplayer2.upstream.DataReader; +import com.google.android.exoplayer2.upstream.HttpDataSource; import com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy; +import com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy.LoadErrorInfo; import com.google.android.exoplayer2.upstream.Loader; import com.google.android.exoplayer2.upstream.Loader.LoadErrorAction; import com.google.android.exoplayer2.util.Assertions; @@ -57,6 +69,8 @@ import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.ParsableByteArray; import com.google.android.exoplayer2.util.Util; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Iterables; import java.io.EOFException; import java.io.IOException; import java.util.ArrayList; @@ -72,22 +86,24 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull; /** - * Loads {@link HlsMediaChunk}s obtained from a {@link HlsChunkSource}, and provides - * {@link SampleStream}s from which the loaded media can be consumed. + * Loads {@link HlsMediaChunk}s obtained from a {@link HlsChunkSource}, and provides {@link + * SampleStream}s from which the loaded media can be consumed. */ -/* package */ final class HlsSampleStreamWrapper implements Loader.Callback, - Loader.ReleaseCallback, SequenceableLoader, ExtractorOutput, UpstreamFormatChangedListener { - - /** - * A callback to be notified of events. - */ +/* package */ final class HlsSampleStreamWrapper + implements Loader.Callback, + Loader.ReleaseCallback, + SequenceableLoader, + ExtractorOutput, + UpstreamFormatChangedListener { + + /** A callback to be notified of events. */ public interface Callback extends SequenceableLoader.Callback { /** * Called when the wrapper has been prepared. * *

      Note: This method will be called on a later handler loop than the one on which either - * {@link #prepareWithMasterPlaylistInfo} or {@link #continuePreparing} are invoked. + * {@link #prepareWithMultivariantPlaylistInfo} or {@link #continuePreparing} are invoked. */ void onPrepared(); @@ -109,15 +125,17 @@ public interface Callback extends SequenceableLoader.Callback( Arrays.asList(C.TRACK_TYPE_AUDIO, C.TRACK_TYPE_VIDEO, C.TRACK_TYPE_METADATA))); - private final int trackType; + private final String uid; + private final @C.TrackType int trackType; private final Callback callback; private final HlsChunkSource chunkSource; private final Allocator allocator; @Nullable private final Format muxedAudioFormat; - private final DrmSessionManager drmSessionManager; + private final DrmSessionManager drmSessionManager; + private final DrmSessionEventListener.EventDispatcher drmEventDispatcher; private final LoadErrorHandlingPolicy loadErrorHandlingPolicy; private final Loader loader; - private final EventDispatcher eventDispatcher; + private final MediaSourceEventListener.EventDispatcher mediaSourceEventDispatcher; private final @HlsMediaSource.MetadataType int metadataType; private final HlsChunkSource.HlsChunkHolder nextChunkHolder; private final ArrayList mediaChunks; @@ -129,24 +147,25 @@ public interface Callback extends SequenceableLoader.Callback hlsSampleStreams; private final Map overridingDrmInitData; - private FormatAdjustingSampleQueue[] sampleQueues; + @Nullable private Chunk loadingChunk; + private HlsSampleQueue[] sampleQueues; private int[] sampleQueueTrackIds; private Set sampleQueueMappingDoneByType; private SparseIntArray sampleQueueIndicesByType; - @MonotonicNonNull private TrackOutput emsgUnwrappingTrackOutput; + private @MonotonicNonNull TrackOutput emsgUnwrappingTrackOutput; private int primarySampleQueueType; private int primarySampleQueueIndex; private boolean sampleQueuesBuilt; private boolean prepared; private int enabledTrackGroupCount; - @MonotonicNonNull private Format upstreamTrackFormat; + private @MonotonicNonNull Format upstreamTrackFormat; @Nullable private Format downstreamTrackFormat; private boolean released; // Tracks are complicated in HLS. See documentation of buildTracksFromSampleStreams for details. // Indexed by track (as exposed by this source). - @MonotonicNonNull private TrackGroupArray trackGroups; - @MonotonicNonNull private Set optionalTrackGroups; + private @MonotonicNonNull TrackGroupArray trackGroups; + private @MonotonicNonNull Set optionalTrackGroups; // Indexed by track group. private int @MonotonicNonNull [] trackGroupToSampleQueueIndex; private int primaryTrackGroupIndex; @@ -164,10 +183,12 @@ public interface Callback extends SequenceableLoader.Callback overridingDrmInitData, Allocator allocator, long positionUs, @Nullable Format muxedAudioFormat, - DrmSessionManager drmSessionManager, + DrmSessionManager drmSessionManager, + DrmSessionEventListener.EventDispatcher drmEventDispatcher, LoadErrorHandlingPolicy loadErrorHandlingPolicy, - EventDispatcher eventDispatcher, + MediaSourceEventListener.EventDispatcher mediaSourceEventDispatcher, @HlsMediaSource.MetadataType int metadataType) { + this.uid = uid; this.trackType = trackType; this.callback = callback; this.chunkSource = chunkSource; @@ -201,28 +228,29 @@ public HlsSampleStreamWrapper( this.allocator = allocator; this.muxedAudioFormat = muxedAudioFormat; this.drmSessionManager = drmSessionManager; + this.drmEventDispatcher = drmEventDispatcher; this.loadErrorHandlingPolicy = loadErrorHandlingPolicy; - this.eventDispatcher = eventDispatcher; + this.mediaSourceEventDispatcher = mediaSourceEventDispatcher; this.metadataType = metadataType; loader = new Loader("Loader:HlsSampleStreamWrapper"); nextChunkHolder = new HlsChunkSource.HlsChunkHolder(); sampleQueueTrackIds = new int[0]; sampleQueueMappingDoneByType = new HashSet<>(MAPPABLE_TYPES.size()); sampleQueueIndicesByType = new SparseIntArray(MAPPABLE_TYPES.size()); - sampleQueues = new FormatAdjustingSampleQueue[0]; + sampleQueues = new HlsSampleQueue[0]; sampleQueueIsAudioVideoFlags = new boolean[0]; sampleQueuesEnabledStates = new boolean[0]; mediaChunks = new ArrayList<>(); readOnlyMediaChunks = Collections.unmodifiableList(mediaChunks); hlsSampleStreams = new ArrayList<>(); // Suppressions are needed because `this` is not initialized here. - @SuppressWarnings("nullness:methodref.receiver.bound.invalid") + @SuppressWarnings("nullness:methodref.receiver.bound") Runnable maybeFinishPrepareRunnable = this::maybeFinishPrepare; this.maybeFinishPrepareRunnable = maybeFinishPrepareRunnable; - @SuppressWarnings("nullness:methodref.receiver.bound.invalid") + @SuppressWarnings("nullness:methodref.receiver.bound") Runnable onTracksEndedRunnable = this::onTracksEnded; this.onTracksEndedRunnable = onTracksEndedRunnable; - handler = new Handler(); + handler = Util.createHandlerForCurrentLooper(); lastSeekPositionUs = positionUs; pendingResetPositionUs = positionUs; } @@ -234,7 +262,7 @@ public void continuePreparing() { } /** - * Prepares the sample stream wrapper with master playlist information. + * Prepares the sample stream wrapper with multivariant playlist information. * * @param trackGroups The {@link TrackGroup TrackGroups} to expose through {@link * #getTrackGroups()}. @@ -242,7 +270,7 @@ public void continuePreparing() { * @param optionalTrackGroupsIndices The indices of any {@code trackGroups} that should not * trigger a failure if not found in the media playlist's segments. */ - public void prepareWithMasterPlaylistInfo( + public void prepareWithMultivariantPlaylistInfo( TrackGroup[] trackGroups, int primaryTrackGroupIndex, int... optionalTrackGroupsIndices) { this.trackGroups = createTrackGroupArrayWithDrmInfo(trackGroups); optionalTrackGroups = new HashSet<>(); @@ -257,7 +285,8 @@ public void prepareWithMasterPlaylistInfo( public void maybeThrowPrepareError() throws IOException { maybeThrowError(); if (loadingFinished && !prepared) { - throw new ParserException("Loading finished before preparation is complete."); + throw ParserException.createForMalformedContainer( + "Loading finished before preparation is complete.", /* cause= */ null); } } @@ -314,7 +343,7 @@ public void unbindSampleQueue(int trackGroupIndex) { * part of the track selection. */ public boolean selectTracks( - @NullableType TrackSelection[] selections, + @NullableType ExoTrackSelection[] selections, boolean[] mayRetainStreamFlags, @NullableType SampleStream[] streams, boolean[] streamResetFlags, @@ -341,11 +370,11 @@ public boolean selectTracks( : positionUs != lastSeekPositionUs); // Get the old (i.e. current before the loop below executes) primary track selection. The new // primary selection will equal the old one unless it's changed in the loop. - TrackSelection oldPrimaryTrackSelection = chunkSource.getTrackSelection(); - TrackSelection primaryTrackSelection = oldPrimaryTrackSelection; + ExoTrackSelection oldPrimaryTrackSelection = chunkSource.getTrackSelection(); + ExoTrackSelection primaryTrackSelection = oldPrimaryTrackSelection; // Select new tracks. for (int i = 0; i < selections.length; i++) { - TrackSelection selection = selections[i]; + ExoTrackSelection selection = selections[i]; if (selection == null) { continue; } @@ -476,6 +505,12 @@ public boolean seekToUs(long positionUs, boolean forceReset) { loadingFinished = false; mediaChunks.clear(); if (loader.isLoading()) { + if (sampleQueuesBuilt) { + // Discard as much as we can synchronously. + for (SampleQueue sampleQueue : sampleQueues) { + sampleQueue.discardToEnd(); + } + } loader.cancelLoading(); } else { loader.clearFatalError(); @@ -484,6 +519,23 @@ public boolean seekToUs(long positionUs, boolean forceReset) { return true; } + /** Called when the playlist is updated. */ + public void onPlaylistUpdated() { + if (mediaChunks.isEmpty()) { + return; + } + HlsMediaChunk lastMediaChunk = Iterables.getLast(mediaChunks); + @HlsChunkSource.ChunkPublicationState + int chunkState = chunkSource.getChunkPublicationState(lastMediaChunk); + if (chunkState == CHUNK_PUBLICATION_STATE_PUBLISHED) { + lastMediaChunk.publish(); + } else if (chunkState == CHUNK_PUBLICATION_STATE_REMOVED + && !loadingFinished + && loader.isLoading()) { + loader.cancelLoading(); + } + } + public void release() { if (prepared) { // Discard as much as we can synchronously. We only do this if we're prepared, since otherwise @@ -509,8 +561,50 @@ public void setIsTimestampMaster(boolean isTimestampMaster) { chunkSource.setIsTimestampMaster(isTimestampMaster); } - public boolean onPlaylistError(Uri playlistUrl, long blacklistDurationMs) { - return chunkSource.onPlaylistError(playlistUrl, blacklistDurationMs); + /** + * Called if an error is encountered while loading a playlist. + * + * @param playlistUrl The {@link Uri} of the playlist whose load encountered an error. + * @param loadErrorInfo The load error info. + * @param forceRetry Whether retry should be forced without considering exclusion. + * @return True if excluding did not encounter errors. False otherwise. + */ + public boolean onPlaylistError(Uri playlistUrl, LoadErrorInfo loadErrorInfo, boolean forceRetry) { + if (!chunkSource.obtainsChunksForPlaylist(playlistUrl)) { + // Return early if the chunk source doesn't deliver chunks for the failing playlist. + return true; + } + long exclusionDurationMs = C.TIME_UNSET; + if (!forceRetry) { + @Nullable + LoadErrorHandlingPolicy.FallbackSelection fallbackSelection = + loadErrorHandlingPolicy.getFallbackSelectionFor( + createFallbackOptions(chunkSource.getTrackSelection()), loadErrorInfo); + if (fallbackSelection != null + && fallbackSelection.type == LoadErrorHandlingPolicy.FALLBACK_TYPE_TRACK) { + exclusionDurationMs = fallbackSelection.exclusionDurationMs; + } + } + // We must call ChunkSource.onPlaylistError in any case to give the chunk source the chance to + // mark the playlist as failing. + return chunkSource.onPlaylistError(playlistUrl, exclusionDurationMs) + && exclusionDurationMs != C.TIME_UNSET; + } + + /** Returns whether the primary sample stream is {@link C#TRACK_TYPE_VIDEO}. */ + public boolean isVideoSampleStream() { + return primarySampleQueueType == C.TRACK_TYPE_VIDEO; + } + + /** + * Adjusts a seek position given the specified {@link SeekParameters}. + * + * @param positionUs The seek position in microseconds. + * @param seekParameters Parameters that control how the seek is performed. + * @return The adjusted seek position, in microseconds. + */ + public long getAdjustedSeekPositionUs(long positionUs, SeekParameters seekParameters) { + return chunkSource.getAdjustedSeekPositionUs(positionUs, seekParameters); } // SampleStream implementation. @@ -529,8 +623,11 @@ public void maybeThrowError() throws IOException { chunkSource.maybeThrowError(); } - public int readData(int sampleQueueIndex, FormatHolder formatHolder, DecoderInputBuffer buffer, - boolean requireFormat) { + public int readData( + int sampleQueueIndex, + FormatHolder formatHolder, + DecoderInputBuffer buffer, + @ReadFlags int readFlags) { if (isPendingReset()) { return C.RESULT_NOTHING_READ; } @@ -546,16 +643,23 @@ && finishedReadingChunk(mediaChunks.get(discardToMediaChunkIndex))) { HlsMediaChunk currentChunk = mediaChunks.get(0); Format trackFormat = currentChunk.trackFormat; if (!trackFormat.equals(downstreamTrackFormat)) { - eventDispatcher.downstreamFormatChanged(trackType, trackFormat, - currentChunk.trackSelectionReason, currentChunk.trackSelectionData, + mediaSourceEventDispatcher.downstreamFormatChanged( + trackType, + trackFormat, + currentChunk.trackSelectionReason, + currentChunk.trackSelectionData, currentChunk.startTimeUs); } downstreamTrackFormat = trackFormat; } + if (!mediaChunks.isEmpty() && !mediaChunks.get(0).isPublished()) { + // Don't read into preload chunks until we can be sure they are permanently published. + return C.RESULT_NOTHING_READ; + } + int result = - sampleQueues[sampleQueueIndex].read( - formatHolder, buffer, requireFormat, loadingFinished, lastSeekPositionUs); + sampleQueues[sampleQueueIndex].read(formatHolder, buffer, readFlags, loadingFinished); if (result == C.RESULT_FORMAT_READ) { Format format = Assertions.checkNotNull(formatHolder.format); if (sampleQueueIndex == primarySampleQueueIndex) { @@ -569,7 +673,7 @@ && finishedReadingChunk(mediaChunks.get(discardToMediaChunkIndex))) { chunkIndex < mediaChunks.size() ? mediaChunks.get(chunkIndex).trackFormat : Assertions.checkNotNull(upstreamTrackFormat); - format = format.copyWithManifestFormatInfo(trackFormat); + format = format.withManifestFormatInfo(trackFormat); } formatHolder.format = format; } @@ -582,11 +686,18 @@ public int skipData(int sampleQueueIndex, long positionUs) { } SampleQueue sampleQueue = sampleQueues[sampleQueueIndex]; - if (loadingFinished && positionUs > sampleQueue.getLargestQueuedTimestampUs()) { - return sampleQueue.advanceToEnd(); - } else { - return sampleQueue.advanceTo(positionUs); + int skipCount = sampleQueue.getSkipCount(positionUs, loadingFinished); + + // Ensure we don't skip into preload chunks until we can be sure they are permanently published. + @Nullable HlsMediaChunk lastChunk = Iterables.getLast(mediaChunks, /* defaultValue= */ null); + if (lastChunk != null && !lastChunk.isPublished()) { + int readIndex = sampleQueue.getReadIndex(); + int firstSampleIndex = lastChunk.getFirstSampleIndex(sampleQueueIndex); + skipCount = min(skipCount, firstSampleIndex - readIndex); } + + sampleQueue.skip(skipCount); + return skipCount; } // SequenceableLoader implementation @@ -600,15 +711,16 @@ public long getBufferedPositionUs() { } else { long bufferedPositionUs = lastSeekPositionUs; HlsMediaChunk lastMediaChunk = getLastMediaChunk(); - HlsMediaChunk lastCompletedMediaChunk = lastMediaChunk.isLoadCompleted() ? lastMediaChunk - : mediaChunks.size() > 1 ? mediaChunks.get(mediaChunks.size() - 2) : null; + HlsMediaChunk lastCompletedMediaChunk = + lastMediaChunk.isLoadCompleted() + ? lastMediaChunk + : mediaChunks.size() > 1 ? mediaChunks.get(mediaChunks.size() - 2) : null; if (lastCompletedMediaChunk != null) { - bufferedPositionUs = Math.max(bufferedPositionUs, lastCompletedMediaChunk.endTimeUs); + bufferedPositionUs = max(bufferedPositionUs, lastCompletedMediaChunk.endTimeUs); } if (sampleQueuesBuilt) { for (SampleQueue sampleQueue : sampleQueues) { - bufferedPositionUs = - Math.max(bufferedPositionUs, sampleQueue.getLargestQueuedTimestampUs()); + bufferedPositionUs = max(bufferedPositionUs, sampleQueue.getLargestQueuedTimestampUs()); } } return bufferedPositionUs; @@ -635,14 +747,18 @@ public boolean continueLoading(long positionUs) { if (isPendingReset()) { chunkQueue = Collections.emptyList(); loadPositionUs = pendingResetPositionUs; + for (SampleQueue sampleQueue : sampleQueues) { + sampleQueue.setStartTimeUs(pendingResetPositionUs); + } } else { chunkQueue = readOnlyMediaChunks; HlsMediaChunk lastMediaChunk = getLastMediaChunk(); loadPositionUs = lastMediaChunk.isLoadCompleted() ? lastMediaChunk.endTimeUs - : Math.max(lastSeekPositionUs, lastMediaChunk.startTimeUs); + : max(lastSeekPositionUs, lastMediaChunk.startTimeUs); } + nextChunkHolder.clear(); chunkSource.getNextChunk( positionUs, loadPositionUs, @@ -650,9 +766,8 @@ public boolean continueLoading(long positionUs) { /* allowEndOfStream= */ prepared || !chunkQueue.isEmpty(), nextChunkHolder); boolean endOfStream = nextChunkHolder.endOfStream; - Chunk loadable = nextChunkHolder.chunk; - Uri playlistUrlToLoad = nextChunkHolder.playlistUrl; - nextChunkHolder.clear(); + @Nullable Chunk loadable = nextChunkHolder.chunk; + @Nullable Uri playlistUrlToLoad = nextChunkHolder.playlistUrl; if (endOfStream) { pendingResetPositionUs = C.TIME_UNSET; @@ -668,25 +783,21 @@ public boolean continueLoading(long positionUs) { } if (isMediaChunk(loadable)) { - pendingResetPositionUs = C.TIME_UNSET; - HlsMediaChunk mediaChunk = (HlsMediaChunk) loadable; - mediaChunk.init(this); - mediaChunks.add(mediaChunk); - upstreamTrackFormat = mediaChunk.trackFormat; + initMediaChunkLoad((HlsMediaChunk) loadable); } + loadingChunk = loadable; long elapsedRealtimeMs = loader.startLoading( loadable, this, loadErrorHandlingPolicy.getMinimumLoadableRetryCount(loadable.type)); - eventDispatcher.loadStarted( - loadable.dataSpec, + mediaSourceEventDispatcher.loadStarted( + new LoadEventInfo(loadable.loadTaskId, loadable.dataSpec, elapsedRealtimeMs), loadable.type, trackType, loadable.trackFormat, loadable.trackSelectionReason, loadable.trackSelectionData, loadable.startTimeUs, - loadable.endTimeUs, - elapsedRealtimeMs); + loadable.endTimeUs); return true; } @@ -697,28 +808,59 @@ public boolean isLoading() { @Override public void reevaluateBuffer(long positionUs) { - // Do nothing. + if (loader.hasFatalError() || isPendingReset()) { + return; + } + + if (loader.isLoading()) { + Assertions.checkNotNull(loadingChunk); + if (chunkSource.shouldCancelLoad(positionUs, loadingChunk, readOnlyMediaChunks)) { + loader.cancelLoading(); + } + return; + } + + int newQueueSize = readOnlyMediaChunks.size(); + while (newQueueSize > 0 + && chunkSource.getChunkPublicationState(readOnlyMediaChunks.get(newQueueSize - 1)) + == CHUNK_PUBLICATION_STATE_REMOVED) { + newQueueSize--; + } + if (newQueueSize < readOnlyMediaChunks.size()) { + discardUpstream(newQueueSize); + } + + int preferredQueueSize = chunkSource.getPreferredQueueSize(positionUs, readOnlyMediaChunks); + if (preferredQueueSize < mediaChunks.size()) { + discardUpstream(preferredQueueSize); + } } // Loader.Callback implementation. @Override public void onLoadCompleted(Chunk loadable, long elapsedRealtimeMs, long loadDurationMs) { + loadingChunk = null; chunkSource.onChunkLoadCompleted(loadable); - eventDispatcher.loadCompleted( - loadable.dataSpec, - loadable.getUri(), - loadable.getResponseHeaders(), + LoadEventInfo loadEventInfo = + new LoadEventInfo( + loadable.loadTaskId, + loadable.dataSpec, + loadable.getUri(), + loadable.getResponseHeaders(), + elapsedRealtimeMs, + loadDurationMs, + loadable.bytesLoaded()); + loadErrorHandlingPolicy.onLoadTaskConcluded(loadable.loadTaskId); + mediaSourceEventDispatcher.loadCompleted( + loadEventInfo, loadable.type, trackType, loadable.trackFormat, loadable.trackSelectionReason, loadable.trackSelectionData, loadable.startTimeUs, - loadable.endTimeUs, - elapsedRealtimeMs, - loadDurationMs, - loadable.bytesLoaded()); + loadable.endTimeUs); if (!prepared) { continueLoading(lastSeekPositionUs); } else { @@ -727,24 +869,32 @@ public void onLoadCompleted(Chunk loadable, long elapsedRealtimeMs, long loadDur } @Override - public void onLoadCanceled(Chunk loadable, long elapsedRealtimeMs, long loadDurationMs, - boolean released) { - eventDispatcher.loadCanceled( - loadable.dataSpec, - loadable.getUri(), - loadable.getResponseHeaders(), + public void onLoadCanceled( + Chunk loadable, long elapsedRealtimeMs, long loadDurationMs, boolean released) { + loadingChunk = null; + LoadEventInfo loadEventInfo = + new LoadEventInfo( + loadable.loadTaskId, + loadable.dataSpec, + loadable.getUri(), + loadable.getResponseHeaders(), + elapsedRealtimeMs, + loadDurationMs, + loadable.bytesLoaded()); + loadErrorHandlingPolicy.onLoadTaskConcluded(loadable.loadTaskId); + mediaSourceEventDispatcher.loadCanceled( + loadEventInfo, loadable.type, trackType, loadable.trackFormat, loadable.trackSelectionReason, loadable.trackSelectionData, loadable.startTimeUs, - loadable.endTimeUs, - elapsedRealtimeMs, - loadDurationMs, - loadable.bytesLoaded()); + loadable.endTimeUs); if (!released) { - resetSampleQueues(); + if (isPendingReset() || enabledTrackGroupCount == 0) { + resetSampleQueues(); + } if (enabledTrackGroupCount > 0) { callback.onContinueLoadingRequested(this); } @@ -758,41 +908,73 @@ public LoadErrorAction onLoadError( long loadDurationMs, IOException error, int errorCount) { - long bytesLoaded = loadable.bytesLoaded(); boolean isMediaChunk = isMediaChunk(loadable); - boolean blacklistSucceeded = false; + if (isMediaChunk + && !((HlsMediaChunk) loadable).isPublished() + && error instanceof HttpDataSource.InvalidResponseCodeException) { + int responseCode = ((HttpDataSource.InvalidResponseCodeException) error).responseCode; + if (responseCode == 410 || responseCode == 404) { + // According to RFC 8216, Section 6.2.6 a server should respond with an HTTP 404 (Not found) + // for requests of hinted parts that are replaced and not available anymore. We've seen test + // streams with HTTP 410 (Gone) also. + return Loader.RETRY; + } + } + long bytesLoaded = loadable.bytesLoaded(); + boolean exclusionSucceeded = false; + LoadEventInfo loadEventInfo = + new LoadEventInfo( + loadable.loadTaskId, + loadable.dataSpec, + loadable.getUri(), + loadable.getResponseHeaders(), + elapsedRealtimeMs, + loadDurationMs, + bytesLoaded); + MediaLoadData mediaLoadData = + new MediaLoadData( + loadable.type, + trackType, + loadable.trackFormat, + loadable.trackSelectionReason, + loadable.trackSelectionData, + Util.usToMs(loadable.startTimeUs), + Util.usToMs(loadable.endTimeUs)); + LoadErrorInfo loadErrorInfo = + new LoadErrorInfo(loadEventInfo, mediaLoadData, error, errorCount); LoadErrorAction loadErrorAction; - - long blacklistDurationMs = - loadErrorHandlingPolicy.getBlacklistDurationMsFor( - loadable.type, loadDurationMs, error, errorCount); - if (blacklistDurationMs != C.TIME_UNSET) { - blacklistSucceeded = chunkSource.maybeBlacklistTrack(loadable, blacklistDurationMs); + @Nullable + LoadErrorHandlingPolicy.FallbackSelection fallbackSelection = + loadErrorHandlingPolicy.getFallbackSelectionFor( + createFallbackOptions(chunkSource.getTrackSelection()), loadErrorInfo); + if (fallbackSelection != null + && fallbackSelection.type == LoadErrorHandlingPolicy.FALLBACK_TYPE_TRACK) { + exclusionSucceeded = + chunkSource.maybeExcludeTrack(loadable, fallbackSelection.exclusionDurationMs); } - if (blacklistSucceeded) { + if (exclusionSucceeded) { if (isMediaChunk && bytesLoaded == 0) { HlsMediaChunk removed = mediaChunks.remove(mediaChunks.size() - 1); Assertions.checkState(removed == loadable); if (mediaChunks.isEmpty()) { pendingResetPositionUs = lastSeekPositionUs; + } else { + Iterables.getLast(mediaChunks).invalidateExtractor(); } } loadErrorAction = Loader.DONT_RETRY; - } else /* did not blacklist */ { - long retryDelayMs = - loadErrorHandlingPolicy.getRetryDelayMsFor( - loadable.type, loadDurationMs, error, errorCount); + } else /* did not exclude */ { + long retryDelayMs = loadErrorHandlingPolicy.getRetryDelayMsFor(loadErrorInfo); loadErrorAction = retryDelayMs != C.TIME_UNSET ? Loader.createRetryAction(/* resetErrorCount= */ false, retryDelayMs) : Loader.DONT_RETRY_FATAL; } - eventDispatcher.loadError( - loadable.dataSpec, - loadable.getUri(), - loadable.getResponseHeaders(), + boolean wasCanceled = !loadErrorAction.isRetry(); + mediaSourceEventDispatcher.loadError( + loadEventInfo, loadable.type, trackType, loadable.trackFormat, @@ -800,13 +982,14 @@ public LoadErrorAction onLoadError( loadable.trackSelectionData, loadable.startTimeUs, loadable.endTimeUs, - elapsedRealtimeMs, - loadDurationMs, - bytesLoaded, error, - /* wasCanceled= */ !loadErrorAction.isRetry()); + wasCanceled); + if (wasCanceled) { + loadingChunk = null; + loadErrorHandlingPolicy.onLoadTaskConcluded(loadable.loadTaskId); + } - if (blacklistSucceeded) { + if (exclusionSucceeded) { if (!prepared) { continueLoading(lastSeekPositionUs); } else { @@ -819,24 +1002,55 @@ public LoadErrorAction onLoadError( // Called by the consuming thread, but only when there is no loading thread. /** - * Initializes the wrapper for loading a chunk. + * Performs initialization for a media chunk that's about to start loading. * - * @param chunkUid The chunk's uid. - * @param shouldSpliceIn Whether the samples parsed from the chunk should be spliced into any - * samples already queued to the wrapper. + * @param chunk The media chunk that's about to start loading. */ - public void init(int chunkUid, boolean shouldSpliceIn) { - this.chunkUid = chunkUid; + private void initMediaChunkLoad(HlsMediaChunk chunk) { + sourceChunk = chunk; + upstreamTrackFormat = chunk.trackFormat; + pendingResetPositionUs = C.TIME_UNSET; + mediaChunks.add(chunk); + ImmutableList.Builder sampleQueueWriteIndicesBuilder = ImmutableList.builder(); for (SampleQueue sampleQueue : sampleQueues) { - sampleQueue.sourceId(chunkUid); + sampleQueueWriteIndicesBuilder.add(sampleQueue.getWriteIndex()); } - if (shouldSpliceIn) { - for (SampleQueue sampleQueue : sampleQueues) { + chunk.init(/* output= */ this, sampleQueueWriteIndicesBuilder.build()); + for (HlsSampleQueue sampleQueue : sampleQueues) { + sampleQueue.setSourceChunk(chunk); + if (chunk.shouldSpliceIn) { sampleQueue.splice(); } } } + private void discardUpstream(int preferredQueueSize) { + Assertions.checkState(!loader.isLoading()); + + int newQueueSize = C.LENGTH_UNSET; + for (int i = preferredQueueSize; i < mediaChunks.size(); i++) { + if (canDiscardUpstreamMediaChunksFromIndex(i)) { + newQueueSize = i; + break; + } + } + if (newQueueSize == C.LENGTH_UNSET) { + return; + } + + long endTimeUs = getLastMediaChunk().endTimeUs; + HlsMediaChunk firstRemovedChunk = discardUpstreamMediaChunksFromIndex(newQueueSize); + if (mediaChunks.isEmpty()) { + pendingResetPositionUs = lastSeekPositionUs; + } else { + Iterables.getLast(mediaChunks).invalidateExtractor(); + } + loadingFinished = false; + + mediaSourceEventDispatcher.upstreamDiscarded( + primarySampleQueueType, firstRemovedChunk.startTimeUs, endTimeUs); + } + // ExtractorOutput implementation. Called by the loading thread. @Override @@ -856,7 +1070,7 @@ public TrackOutput track(int id, int type) { if (trackOutput == null) { if (tracksEnded) { - return createDummyTrackOutput(id, type); + return createFakeTrackOutput(id, type); } else { // The relevant SampleQueue hasn't been constructed yet - so construct it. trackOutput = createSampleQueue(id, type); @@ -885,7 +1099,7 @@ public TrackOutput track(int id, int type) { * * @param id The ID of the track. * @param type The type of the track, must be one of {@link #MAPPABLE_TYPES}. - * @return The the mapped {@link TrackOutput}, or null if it's not been created yet. + * @return The mapped {@link TrackOutput}, or null if it's not been created yet. */ @Nullable private TrackOutput getMappedTrackOutput(int id, int type) { @@ -900,28 +1114,27 @@ private TrackOutput getMappedTrackOutput(int id, int type) { } return sampleQueueTrackIds[sampleQueueIndex] == id ? sampleQueues[sampleQueueIndex] - : createDummyTrackOutput(id, type); + : createFakeTrackOutput(id, type); } private SampleQueue createSampleQueue(int id, int type) { int trackCount = sampleQueues.length; boolean isAudioVideo = type == C.TRACK_TYPE_AUDIO || type == C.TRACK_TYPE_VIDEO; - FormatAdjustingSampleQueue trackOutput = - new FormatAdjustingSampleQueue( - allocator, - /* playbackLooper= */ handler.getLooper(), - drmSessionManager, - overridingDrmInitData); + HlsSampleQueue sampleQueue = + new HlsSampleQueue(allocator, drmSessionManager, drmEventDispatcher, overridingDrmInitData); + sampleQueue.setStartTimeUs(lastSeekPositionUs); if (isAudioVideo) { - trackOutput.setDrmInitData(drmInitData); + sampleQueue.setDrmInitData(drmInitData); + } + sampleQueue.setSampleOffsetUs(sampleOffsetUs); + if (sourceChunk != null) { + sampleQueue.setSourceChunk(sourceChunk); } - trackOutput.setSampleOffsetUs(sampleOffsetUs); - trackOutput.sourceId(chunkUid); - trackOutput.setUpstreamFormatChangeListener(this); + sampleQueue.setUpstreamFormatChangeListener(this); sampleQueueTrackIds = Arrays.copyOf(sampleQueueTrackIds, trackCount + 1); sampleQueueTrackIds[trackCount] = id; - sampleQueues = Util.nullSafeArrayAppend(sampleQueues, trackOutput); + sampleQueues = Util.nullSafeArrayAppend(sampleQueues, sampleQueue); sampleQueueIsAudioVideoFlags = Arrays.copyOf(sampleQueueIsAudioVideoFlags, trackCount + 1); sampleQueueIsAudioVideoFlags[trackCount] = isAudioVideo; haveAudioVideoSampleQueues |= sampleQueueIsAudioVideoFlags[trackCount]; @@ -932,7 +1145,7 @@ private SampleQueue createSampleQueue(int id, int type) { primarySampleQueueType = type; } sampleQueuesEnabledStates = Arrays.copyOf(sampleQueuesEnabledStates, trackCount + 1); - return trackOutput; + return sampleQueue; } @Override @@ -1014,7 +1227,7 @@ public void setDrmInitData(@Nullable DrmInitData drmInitData) { private void updateSampleStreams(@NullableType SampleStream[] streams) { hlsSampleStreams.clear(); - for (SampleStream stream : streams) { + for (@Nullable SampleStream stream : streams) { if (stream != null) { hlsSampleStreams.add((HlsSampleStream) stream); } @@ -1032,6 +1245,38 @@ private boolean finishedReadingChunk(HlsMediaChunk chunk) { return true; } + private boolean canDiscardUpstreamMediaChunksFromIndex(int mediaChunkIndex) { + for (int i = mediaChunkIndex; i < mediaChunks.size(); i++) { + if (mediaChunks.get(i).shouldSpliceIn) { + // Discarding not possible because a spliced-in chunk potentially removed sample metadata + // from the previous chunks. + // TODO: Keep sample metadata to allow restoring these chunks [internal b/159904763]. + return false; + } + } + HlsMediaChunk mediaChunk = mediaChunks.get(mediaChunkIndex); + for (int i = 0; i < sampleQueues.length; i++) { + int discardFromIndex = mediaChunk.getFirstSampleIndex(/* sampleQueueIndex= */ i); + if (sampleQueues[i].getReadIndex() > discardFromIndex) { + // Discarding not possible because we already read from the chunk. + // TODO: Sparse tracks (e.g. ID3) may prevent discarding in almost all cases because it + // means that most chunks have been read from already. See [internal b/161126666]. + return false; + } + } + return true; + } + + private HlsMediaChunk discardUpstreamMediaChunksFromIndex(int chunkIndex) { + HlsMediaChunk firstRemovedChunk = mediaChunks.get(chunkIndex); + Util.removeRange(mediaChunks, /* fromIndex= */ chunkIndex, /* toIndex= */ mediaChunks.size()); + for (int i = 0; i < sampleQueues.length; i++) { + int discardFromIndex = firstRemovedChunk.getFirstSampleIndex(/* sampleQueueIndex= */ i); + sampleQueues[i].discardUpstreamSamples(discardFromIndex); + } + return firstRemovedChunk; + } + private void resetSampleQueues() { for (SampleQueue sampleQueue : sampleQueues) { sampleQueue.reset(pendingResetUpstreamFormats); @@ -1054,8 +1299,8 @@ private void maybeFinishPrepare() { } } if (trackGroups != null) { - // The track groups were created with master playlist information. They only need to be mapped - // to a sample queue. + // The track groups were created with multivariant playlist information. They only need to be + // mapped to a sample queue. mapSampleQueuesToMatchTrackGroups(); } else { // Tracks are created using media segment information. @@ -1074,7 +1319,8 @@ private void mapSampleQueuesToMatchTrackGroups() { for (int i = 0; i < trackGroupCount; i++) { for (int queueIndex = 0; queueIndex < sampleQueues.length; queueIndex++) { SampleQueue sampleQueue = sampleQueues[queueIndex]; - if (formatsMatch(sampleQueue.getUpstreamFormat(), trackGroups.get(i).getFormat(0))) { + Format upstreamFormat = Assertions.checkStateNotNull(sampleQueue.getUpstreamFormat()); + if (formatsMatch(upstreamFormat, trackGroups.get(i).getFormat(0))) { trackGroupToSampleQueueIndex[i] = queueIndex; break; } @@ -1089,18 +1335,18 @@ private void mapSampleQueuesToMatchTrackGroups() { * Builds tracks that are exposed by this {@link HlsSampleStreamWrapper} instance, as well as * internal data-structures required for operation. * - *

      Tracks in HLS are complicated. A HLS master playlist contains a number of "variants". Each - * variant stream typically contains muxed video, audio and (possibly) additional audio, metadata - * and caption tracks. We wish to allow the user to select between an adaptive track that spans - * all variants, as well as each individual variant. If multiple audio tracks are present within - * each variant then we wish to allow the user to select between those also. + *

      Tracks in HLS are complicated. A HLS multivariant playlist contains a number of "variants". + * Each variant stream typically contains muxed video, audio and (possibly) additional audio, + * metadata and caption tracks. We wish to allow the user to select between an adaptive track that + * spans all variants, as well as each individual variant. If multiple audio tracks are present + * within each variant then we wish to allow the user to select between those also. * *

      To do this, tracks are constructed as follows. The {@link HlsChunkSource} exposes (N+1) - * tracks, where N is the number of variants defined in the HLS master playlist. These consist of - * one adaptive track defined to span all variants and a track for each individual variant. The - * adaptive track is initially selected. The extractor is then prepared to discover the tracks - * inside of each variant stream. The two sets of tracks are then combined by this method to - * create a third set, which is the set exposed by this {@link HlsSampleStreamWrapper}: + * tracks, where N is the number of variants defined in the HLS multivariant playlist. These + * consist of one adaptive track defined to span all variants and a track for each individual + * variant. The adaptive track is initially selected. The extractor is then prepared to discover + * the tracks inside of each variant stream. The two sets of tracks are then combined by this + * method to create a third set, which is the set exposed by this {@link HlsSampleStreamWrapper}: * *

        *
      • The extractor tracks are inspected to infer a "primary" track type. If a video track is @@ -1123,7 +1369,9 @@ private void buildTracksFromSampleStreams() { int primaryExtractorTrackIndex = C.INDEX_UNSET; int extractorTrackCount = sampleQueues.length; for (int i = 0; i < extractorTrackCount; i++) { - String sampleMimeType = sampleQueues[i].getUpstreamFormat().sampleMimeType; + @Nullable + String sampleMimeType = + Assertions.checkStateNotNull(sampleQueues[i].getUpstreamFormat()).sampleMimeType; int trackType; if (MimeTypes.isVideo(sampleMimeType)) { trackType = C.TRACK_TYPE_VIDEO; @@ -1158,25 +1406,36 @@ private void buildTracksFromSampleStreams() { // Construct the set of exposed track groups. TrackGroup[] trackGroups = new TrackGroup[extractorTrackCount]; for (int i = 0; i < extractorTrackCount; i++) { - Format sampleFormat = sampleQueues[i].getUpstreamFormat(); + Format sampleFormat = Assertions.checkStateNotNull(sampleQueues[i].getUpstreamFormat()); if (i == primaryExtractorTrackIndex) { Format[] formats = new Format[chunkSourceTrackCount]; - if (chunkSourceTrackCount == 1) { - formats[0] = sampleFormat.copyWithManifestFormatInfo(chunkSourceTrackGroup.getFormat(0)); - } else { - for (int j = 0; j < chunkSourceTrackCount; j++) { - formats[j] = deriveFormat(chunkSourceTrackGroup.getFormat(j), sampleFormat, true); + for (int j = 0; j < chunkSourceTrackCount; j++) { + Format playlistFormat = chunkSourceTrackGroup.getFormat(j); + if (primaryExtractorTrackType == C.TRACK_TYPE_AUDIO && muxedAudioFormat != null) { + playlistFormat = playlistFormat.withManifestFormatInfo(muxedAudioFormat); } + // If there's only a single variant (chunkSourceTrackCount == 1) then we can safely + // retain all fields from sampleFormat. Else we need to use deriveFormat to retain only + // the fields that will be the same for all variants. + formats[j] = + chunkSourceTrackCount == 1 + ? sampleFormat.withManifestFormatInfo(playlistFormat) + : deriveFormat(playlistFormat, sampleFormat, /* propagateBitrates= */ true); } - trackGroups[i] = new TrackGroup(formats); + trackGroups[i] = new TrackGroup(uid, formats); primaryTrackGroupIndex = i; } else { - Format trackFormat = + @Nullable + Format playlistFormat = primaryExtractorTrackType == C.TRACK_TYPE_VIDEO && MimeTypes.isAudio(sampleFormat.sampleMimeType) ? muxedAudioFormat : null; - trackGroups[i] = new TrackGroup(deriveFormat(trackFormat, sampleFormat, false)); + String muxedTrackGroupId = uid + ":muxed:" + (i < primaryExtractorTrackIndex ? i : i - 1); + trackGroups[i] = + new TrackGroup( + muxedTrackGroupId, + deriveFormat(playlistFormat, sampleFormat, /* propagateBitrates= */ false)); } } this.trackGroups = createTrackGroupArrayWithDrmInfo(trackGroups); @@ -1190,14 +1449,9 @@ private TrackGroupArray createTrackGroupArrayWithDrmInfo(TrackGroup[] trackGroup Format[] exposedFormats = new Format[trackGroup.length]; for (int j = 0; j < trackGroup.length; j++) { Format format = trackGroup.getFormat(j); - if (format.drmInitData != null) { - format = - format.copyWithExoMediaCryptoType( - drmSessionManager.getExoMediaCryptoType(format.drmInitData)); - } - exposedFormats[j] = format; + exposedFormats[j] = format.copyWithCryptoType(drmSessionManager.getCryptoType(format)); } - trackGroups[i] = new TrackGroup(exposedFormats); + trackGroups[i] = new TrackGroup(trackGroup.id, exposedFormats); } return new TrackGroupArray(trackGroups); } @@ -1265,43 +1519,80 @@ private static int getTrackTypeScore(int trackType) { } /** - * Derives a track sample format from the corresponding format in the master playlist, and a - * sample format that may have been obtained from a chunk belonging to a different track. + * Derives a track sample format from the corresponding format in the multivariant playlist, and a + * sample format that may have been obtained from a chunk belonging to a different track in the + * same track group. * - * @param playlistFormat The format information obtained from the master playlist. - * @param sampleFormat The format information obtained from the samples. - * @param propagateBitrate Whether the bitrate from the playlist format should be included in the - * derived format. + *

        Note: Since the sample format may have been obtained from a chunk belonging to a different + * track, it should not be used as a source for data that may vary between tracks. + * + * @param playlistFormat The format information obtained from the multivariant playlist. + * @param sampleFormat The format information obtained from samples within a chunk. The chunk may + * belong to a different track in the same track group. + * @param propagateBitrates Whether the bitrates from the playlist format should be included in + * the derived format. * @return The derived track format. */ private static Format deriveFormat( - @Nullable Format playlistFormat, Format sampleFormat, boolean propagateBitrate) { + @Nullable Format playlistFormat, Format sampleFormat, boolean propagateBitrates) { if (playlistFormat == null) { return sampleFormat; } - int bitrate = propagateBitrate ? playlistFormat.bitrate : Format.NO_VALUE; - int channelCount = - playlistFormat.channelCount != Format.NO_VALUE - ? playlistFormat.channelCount - : sampleFormat.channelCount; + int sampleTrackType = MimeTypes.getTrackType(sampleFormat.sampleMimeType); - String codecs = Util.getCodecsOfType(playlistFormat.codecs, sampleTrackType); - String mimeType = MimeTypes.getMediaMimeType(codecs); - if (mimeType == null) { - mimeType = sampleFormat.sampleMimeType; - } - return sampleFormat.copyWithContainerInfo( - playlistFormat.id, - playlistFormat.label, - mimeType, - codecs, - playlistFormat.metadata, - bitrate, - playlistFormat.width, - playlistFormat.height, - channelCount, - playlistFormat.selectionFlags, - playlistFormat.language); + @Nullable String sampleMimeType; + @Nullable String codecs; + if (Util.getCodecCountOfType(playlistFormat.codecs, sampleTrackType) == 1) { + // We can unequivocally map this track to a playlist variant because only one codec string + // matches this track's type. + codecs = Util.getCodecsOfType(playlistFormat.codecs, sampleTrackType); + sampleMimeType = MimeTypes.getMediaMimeType(codecs); + } else { + // The variant assigns more than one codec string to this track. We choose whichever codec + // string matches the sample mime type. This can happen when different languages are encoded + // using different codecs. + codecs = + MimeTypes.getCodecsCorrespondingToMimeType( + playlistFormat.codecs, sampleFormat.sampleMimeType); + sampleMimeType = sampleFormat.sampleMimeType; + } + + Format.Builder formatBuilder = + sampleFormat + .buildUpon() + .setId(playlistFormat.id) + .setLabel(playlistFormat.label) + .setLanguage(playlistFormat.language) + .setSelectionFlags(playlistFormat.selectionFlags) + .setRoleFlags(playlistFormat.roleFlags) + .setAverageBitrate(propagateBitrates ? playlistFormat.averageBitrate : Format.NO_VALUE) + .setPeakBitrate(propagateBitrates ? playlistFormat.peakBitrate : Format.NO_VALUE) + .setCodecs(codecs); + + if (sampleTrackType == C.TRACK_TYPE_VIDEO) { + formatBuilder + .setWidth(playlistFormat.width) + .setHeight(playlistFormat.height) + .setFrameRate(playlistFormat.frameRate); + } + + if (sampleMimeType != null) { + formatBuilder.setSampleMimeType(sampleMimeType); + } + + if (playlistFormat.channelCount != Format.NO_VALUE && sampleTrackType == C.TRACK_TYPE_AUDIO) { + formatBuilder.setChannelCount(playlistFormat.channelCount); + } + + if (playlistFormat.metadata != null) { + Metadata metadata = playlistFormat.metadata; + if (sampleFormat.metadata != null) { + metadata = sampleFormat.metadata.copyWithAppendedEntriesFrom(metadata); + } + formatBuilder.setMetadata(metadata); + } + + return formatBuilder.build(); } private static boolean isMediaChunk(Chunk chunk) { @@ -1309,8 +1600,8 @@ private static boolean isMediaChunk(Chunk chunk) { } private static boolean formatsMatch(Format manifestFormat, Format sampleFormat) { - String manifestFormatMimeType = manifestFormat.sampleMimeType; - String sampleFormatMimeType = sampleFormat.sampleMimeType; + @Nullable String manifestFormatMimeType = manifestFormat.sampleMimeType; + @Nullable String sampleFormatMimeType = sampleFormat.sampleMimeType; int manifestFormatTrackType = MimeTypes.getTrackType(manifestFormatMimeType); if (manifestFormatTrackType != C.TRACK_TYPE_TEXT) { return manifestFormatTrackType == MimeTypes.getTrackType(sampleFormatMimeType); @@ -1324,30 +1615,77 @@ private static boolean formatsMatch(Format manifestFormat, Format sampleFormat) return true; } - private static DummyTrackOutput createDummyTrackOutput(int id, int type) { + private static DummyTrackOutput createFakeTrackOutput(int id, int type) { Log.w(TAG, "Unmapped track with id " + id + " of type " + type); return new DummyTrackOutput(); } - private static final class FormatAdjustingSampleQueue extends SampleQueue { + /** + * A {@link SampleQueue} that adds HLS specific functionality: + * + *

          + *
        • Detection of spurious discontinuities, by checking sample timestamps against the range + * expected for the currently loading chunk. + *
        • Stripping private timestamp metadata from {@link Format Formats} to avoid an excessive + * number of format switches in the queue. + *
        • Overriding of {@link Format#drmInitData}. + *
        + */ + private static final class HlsSampleQueue extends SampleQueue { + + // TODO: Uncomment this to reject samples with unexpected timestamps. See + // https://github.com/google/ExoPlayer/issues/7030. + // /** + // * The fraction of the chunk duration from which timestamps of samples loaded from within a + // * chunk are allowed to deviate from the expected range. + // */ + // private static final double MAX_TIMESTAMP_DEVIATION_FRACTION = 0.5; + // + // /** + // * A minimum tolerance for sample timestamps in microseconds. Timestamps of samples loaded + // * from within a chunk are always allowed to deviate up to this amount from the expected + // * range. + // */ + // private static final long MIN_TIMESTAMP_DEVIATION_TOLERANCE_US = 4_000_000; + // + // @Nullable private HlsMediaChunk sourceChunk; + // private long sourceChunkLastSampleTimeUs; + // private long minAllowedSampleTimeUs; + // private long maxAllowedSampleTimeUs; private final Map overridingDrmInitData; @Nullable private DrmInitData drmInitData; - public FormatAdjustingSampleQueue( + private HlsSampleQueue( Allocator allocator, - Looper playbackLooper, - DrmSessionManager drmSessionManager, + DrmSessionManager drmSessionManager, + DrmSessionEventListener.EventDispatcher eventDispatcher, Map overridingDrmInitData) { - super(allocator, playbackLooper, drmSessionManager); + super(allocator, drmSessionManager, eventDispatcher); this.overridingDrmInitData = overridingDrmInitData; } + public void setSourceChunk(HlsMediaChunk chunk) { + sourceId(chunk.uid); + + // TODO: Uncomment this to reject samples with unexpected timestamps. See + // https://github.com/google/ExoPlayer/issues/7030. + // sourceChunk = chunk; + // sourceChunkLastSampleTimeUs = C.TIME_UNSET; + // long allowedDeviationUs = + // Math.max( + // (long) ((chunk.endTimeUs - chunk.startTimeUs) * MAX_TIMESTAMP_DEVIATION_FRACTION), + // MIN_TIMESTAMP_DEVIATION_TOLERANCE_US); + // minAllowedSampleTimeUs = chunk.startTimeUs - allowedDeviationUs; + // maxAllowedSampleTimeUs = chunk.endTimeUs + allowedDeviationUs; + } + public void setDrmInitData(@Nullable DrmInitData drmInitData) { this.drmInitData = drmInitData; invalidateUpstreamFormatAdjustment(); } + @SuppressWarnings("ReferenceEquality") @Override public Format getAdjustedUpstreamFormat(Format format) { @Nullable @@ -1359,8 +1697,11 @@ public Format getAdjustedUpstreamFormat(Format format) { drmInitData = overridingDrmInitData; } } - return super.getAdjustedUpstreamFormat( - format.copyWithAdjustments(drmInitData, getAdjustedMetadata(format.metadata))); + @Nullable Metadata metadata = getAdjustedMetadata(format.metadata); + if (drmInitData != format.drmInitData || metadata != format.metadata) { + format = format.buildUpon().setDrmInitData(drmInitData).setMetadata(metadata).build(); + } + return super.getAdjustedUpstreamFormat(format); } /** @@ -1399,24 +1740,38 @@ private Metadata getAdjustedMetadata(@Nullable Metadata metadata) { } return new Metadata(newMetadataEntries); } + + @Override + public void sampleMetadata( + long timeUs, + @C.BufferFlags int flags, + int size, + int offset, + @Nullable CryptoData cryptoData) { + // TODO: Uncomment this to reject samples with unexpected timestamps. See + // https://github.com/google/ExoPlayer/issues/7030. + // if (timeUs < minAllowedSampleTimeUs || timeUs > maxAllowedSampleTimeUs) { + // Util.sneakyThrow( + // new UnexpectedSampleTimestampException( + // sourceChunk, sourceChunkLastSampleTimeUs, timeUs)); + // } + // sourceChunkLastSampleTimeUs = timeUs; + super.sampleMetadata(timeUs, flags, size, offset, cryptoData); + } } private static class EmsgUnwrappingTrackOutput implements TrackOutput { - private static final String TAG = "EmsgUnwrappingTrackOutput"; - - // TODO(ibaker): Create a Formats util class with common constants like this. + // TODO: Create a Formats util class with common constants like this. private static final Format ID3_FORMAT = - Format.createSampleFormat( - /* id= */ null, MimeTypes.APPLICATION_ID3, Format.OFFSET_SAMPLE_RELATIVE); + new Format.Builder().setSampleMimeType(MimeTypes.APPLICATION_ID3).build(); private static final Format EMSG_FORMAT = - Format.createSampleFormat( - /* id= */ null, MimeTypes.APPLICATION_EMSG, Format.OFFSET_SAMPLE_RELATIVE); + new Format.Builder().setSampleMimeType(MimeTypes.APPLICATION_EMSG).build(); private final EventMessageDecoder emsgDecoder; private final TrackOutput delegate; private final Format delegateFormat; - @MonotonicNonNull private Format format; + private @MonotonicNonNull Format format; private byte[] buffer; private int bufferPosition; @@ -1447,8 +1802,9 @@ public void format(Format format) { } @Override - public int sampleData(ExtractorInput input, int length, boolean allowEndOfInput) - throws IOException, InterruptedException { + public int sampleData( + DataReader input, int length, boolean allowEndOfInput, @SampleDataPart int sampleDataPart) + throws IOException { ensureBufferCapacity(bufferPosition + length); int numBytesRead = input.read(buffer, bufferPosition, length); if (numBytesRead == C.RESULT_END_OF_INPUT) { @@ -1463,9 +1819,9 @@ public int sampleData(ExtractorInput input, int length, boolean allowEndOfInput) } @Override - public void sampleData(ParsableByteArray buffer, int length) { + public void sampleData(ParsableByteArray data, int length, @SampleDataPart int sampleDataPart) { ensureBufferCapacity(bufferPosition + length); - buffer.readBytes(this.buffer, bufferPosition, length); + data.readBytes(this.buffer, bufferPosition, length); bufferPosition += length; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsTrackMetadataEntry.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsTrackMetadataEntry.java index f26a9b8e9a..9a9566b63e 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsTrackMetadataEntry.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/HlsTrackMetadataEntry.java @@ -19,6 +19,7 @@ import android.os.Parcelable; import android.text.TextUtils; import androidx.annotation.Nullable; +import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.metadata.Metadata; import java.util.ArrayList; import java.util.Collections; @@ -30,8 +31,14 @@ public final class HlsTrackMetadataEntry implements Metadata.Entry { /** Holds attributes defined in an EXT-X-STREAM-INF tag. */ public static final class VariantInfo implements Parcelable { - /** The bitrate as declared by the EXT-X-STREAM-INF tag. */ - public final long bitrate; + /** + * The average bitrate as declared by the AVERAGE-BANDWIDTH attribute of the EXT-X-STREAM-INF + * tag, or {@link Format#NO_VALUE} if the attribute is not declared. + */ + public final int averageBitrate; + + /** The peak bitrate as declared by the BANDWIDTH attribute of the EXT-X-STREAM-INF tag. */ + public final int peakBitrate; /** * The VIDEO value as defined in the EXT-X-STREAM-INF tag, or null if the VIDEO attribute is not @@ -60,19 +67,22 @@ public static final class VariantInfo implements Parcelable { /** * Creates an instance. * - * @param bitrate See {@link #bitrate}. + * @param averageBitrate See {@link #averageBitrate}. + * @param peakBitrate See {@link #peakBitrate}. * @param videoGroupId See {@link #videoGroupId}. * @param audioGroupId See {@link #audioGroupId}. * @param subtitleGroupId See {@link #subtitleGroupId}. * @param captionGroupId See {@link #captionGroupId}. */ public VariantInfo( - long bitrate, + int averageBitrate, + int peakBitrate, @Nullable String videoGroupId, @Nullable String audioGroupId, @Nullable String subtitleGroupId, @Nullable String captionGroupId) { - this.bitrate = bitrate; + this.averageBitrate = averageBitrate; + this.peakBitrate = peakBitrate; this.videoGroupId = videoGroupId; this.audioGroupId = audioGroupId; this.subtitleGroupId = subtitleGroupId; @@ -80,7 +90,8 @@ public VariantInfo( } /* package */ VariantInfo(Parcel in) { - bitrate = in.readLong(); + averageBitrate = in.readInt(); + peakBitrate = in.readInt(); videoGroupId = in.readString(); audioGroupId = in.readString(); subtitleGroupId = in.readString(); @@ -96,7 +107,8 @@ public boolean equals(@Nullable Object other) { return false; } VariantInfo that = (VariantInfo) other; - return bitrate == that.bitrate + return averageBitrate == that.averageBitrate + && peakBitrate == that.peakBitrate && TextUtils.equals(videoGroupId, that.videoGroupId) && TextUtils.equals(audioGroupId, that.audioGroupId) && TextUtils.equals(subtitleGroupId, that.subtitleGroupId) @@ -105,7 +117,8 @@ public boolean equals(@Nullable Object other) { @Override public int hashCode() { - int result = (int) (bitrate ^ (bitrate >>> 32)); + int result = averageBitrate; + result = 31 * result + peakBitrate; result = 31 * result + (videoGroupId != null ? videoGroupId.hashCode() : 0); result = 31 * result + (audioGroupId != null ? audioGroupId.hashCode() : 0); result = 31 * result + (subtitleGroupId != null ? subtitleGroupId.hashCode() : 0); @@ -122,7 +135,8 @@ public int describeContents() { @Override public void writeToParcel(Parcel dest, int flags) { - dest.writeLong(bitrate); + dest.writeInt(averageBitrate); + dest.writeInt(peakBitrate); dest.writeString(videoGroupId); dest.writeString(audioGroupId); dest.writeString(subtitleGroupId); diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/MediaParserHlsMediaChunkExtractor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/MediaParserHlsMediaChunkExtractor.java new file mode 100644 index 0000000000..5b7956eb0d --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/MediaParserHlsMediaChunkExtractor.java @@ -0,0 +1,302 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.hls; + +import static android.media.MediaParser.PARAMETER_TS_IGNORE_AAC_STREAM; +import static android.media.MediaParser.PARAMETER_TS_IGNORE_AVC_STREAM; +import static android.media.MediaParser.PARAMETER_TS_IGNORE_SPLICE_INFO_STREAM; +import static android.media.MediaParser.PARAMETER_TS_MODE; +import static com.google.android.exoplayer2.source.mediaparser.MediaParserUtil.PARAMETER_EAGERLY_EXPOSE_TRACK_TYPE; +import static com.google.android.exoplayer2.source.mediaparser.MediaParserUtil.PARAMETER_EXPOSE_CAPTION_FORMATS; +import static com.google.android.exoplayer2.source.mediaparser.MediaParserUtil.PARAMETER_IGNORE_TIMESTAMP_OFFSET; +import static com.google.android.exoplayer2.source.mediaparser.MediaParserUtil.PARAMETER_IN_BAND_CRYPTO_INFO; +import static com.google.android.exoplayer2.source.mediaparser.MediaParserUtil.PARAMETER_OVERRIDE_IN_BAND_CAPTION_DECLARATIONS; + +import android.annotation.SuppressLint; +import android.media.MediaFormat; +import android.media.MediaParser; +import android.media.MediaParser.OutputConsumer; +import android.media.MediaParser.SeekPoint; +import android.text.TextUtils; +import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.analytics.PlayerId; +import com.google.android.exoplayer2.extractor.ExtractorInput; +import com.google.android.exoplayer2.extractor.ExtractorOutput; +import com.google.android.exoplayer2.source.mediaparser.InputReaderAdapterV30; +import com.google.android.exoplayer2.source.mediaparser.MediaParserUtil; +import com.google.android.exoplayer2.source.mediaparser.OutputConsumerAdapterV30; +import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.FileTypes; +import com.google.android.exoplayer2.util.MimeTypes; +import com.google.android.exoplayer2.util.Util; +import com.google.common.collect.ImmutableList; +import java.io.IOException; + +/** {@link HlsMediaChunkExtractor} implemented on top of the platform's {@link MediaParser}. */ +@RequiresApi(30) +public final class MediaParserHlsMediaChunkExtractor implements HlsMediaChunkExtractor { + + /** + * {@link HlsExtractorFactory} implementation that produces {@link + * MediaParserHlsMediaChunkExtractor} for all container formats except WebVTT, for which a {@link + * BundledHlsMediaChunkExtractor} is returned. + */ + public static final HlsExtractorFactory FACTORY = + (uri, + format, + muxedCaptionFormats, + timestampAdjuster, + responseHeaders, + sniffingExtractorInput, + playerId) -> { + if (FileTypes.inferFileTypeFromMimeType(format.sampleMimeType) == FileTypes.WEBVTT) { + // The segment contains WebVTT. MediaParser does not support WebVTT parsing, so we use the + // bundled extractor. + return new BundledHlsMediaChunkExtractor( + new WebvttExtractor(format.language, timestampAdjuster), format, timestampAdjuster); + } + + boolean overrideInBandCaptionDeclarations = muxedCaptionFormats != null; + ImmutableList.Builder muxedCaptionMediaFormatsBuilder = + ImmutableList.builder(); + if (muxedCaptionFormats != null) { + // The manifest contains captions declarations. We use those to determine which captions + // will be exposed by MediaParser. + for (int i = 0; i < muxedCaptionFormats.size(); i++) { + muxedCaptionMediaFormatsBuilder.add( + MediaParserUtil.toCaptionsMediaFormat(muxedCaptionFormats.get(i))); + } + } else { + // The manifest does not declare any captions in the stream. Imitate the default HLS + // extractor factory and declare a 608 track by default. + muxedCaptionMediaFormatsBuilder.add( + MediaParserUtil.toCaptionsMediaFormat( + new Format.Builder().setSampleMimeType(MimeTypes.APPLICATION_CEA608).build())); + } + + ImmutableList muxedCaptionMediaFormats = + muxedCaptionMediaFormatsBuilder.build(); + + // TODO: Factor out code for optimizing the sniffing order across both factories. + OutputConsumerAdapterV30 outputConsumerAdapter = new OutputConsumerAdapterV30(); + outputConsumerAdapter.setMuxedCaptionFormats( + muxedCaptionFormats != null ? muxedCaptionFormats : ImmutableList.of()); + outputConsumerAdapter.setTimestampAdjuster(timestampAdjuster); + MediaParser mediaParser = + createMediaParserInstance( + outputConsumerAdapter, + format, + overrideInBandCaptionDeclarations, + muxedCaptionMediaFormats, + playerId, + MediaParser.PARSER_NAME_FMP4, + MediaParser.PARSER_NAME_AC3, + MediaParser.PARSER_NAME_AC4, + MediaParser.PARSER_NAME_ADTS, + MediaParser.PARSER_NAME_MP3, + MediaParser.PARSER_NAME_TS); + + PeekingInputReader peekingInputReader = new PeekingInputReader(sniffingExtractorInput); + // The chunk extractor constructor requires an instance with a known parser name, so we + // advance once for MediaParser to sniff the content. + mediaParser.advance(peekingInputReader); + outputConsumerAdapter.setSelectedParserName(mediaParser.getParserName()); + + return new MediaParserHlsMediaChunkExtractor( + mediaParser, + outputConsumerAdapter, + format, + overrideInBandCaptionDeclarations, + muxedCaptionMediaFormats, + /* leadingBytesToSkip= */ peekingInputReader.totalPeekedBytes, + playerId); + }; + + private final OutputConsumerAdapterV30 outputConsumerAdapter; + private final InputReaderAdapterV30 inputReaderAdapter; + private final MediaParser mediaParser; + private final Format format; + private final boolean overrideInBandCaptionDeclarations; + private final ImmutableList muxedCaptionMediaFormats; + private final PlayerId playerId; + + private int pendingSkipBytes; + + /** + * Creates a new instance. + * + * @param mediaParser The {@link MediaParser} instance to use for extraction of segments. The + * provided instance must have completed sniffing, or must have been created by name. + * @param outputConsumerAdapter The {@link OutputConsumerAdapterV30} with which {@code + * mediaParser} was created. + * @param format The {@link Format} associated with the segment. + * @param overrideInBandCaptionDeclarations Whether to ignore any in-band caption track + * declarations in favor of using the {@code muxedCaptionMediaFormats} instead. If false, + * caption declarations found in the extracted media will be used, causing {@code + * muxedCaptionMediaFormats} to be ignored instead. + * @param muxedCaptionMediaFormats The list of in-band caption {@link MediaFormat MediaFormats} + * that {@link MediaParser} should expose. + * @param leadingBytesToSkip The number of bytes to skip from the start of the input before + * starting extraction. + * @param playerId The {@link PlayerId} of the player using this chunk extractor. + */ + public MediaParserHlsMediaChunkExtractor( + MediaParser mediaParser, + OutputConsumerAdapterV30 outputConsumerAdapter, + Format format, + boolean overrideInBandCaptionDeclarations, + ImmutableList muxedCaptionMediaFormats, + int leadingBytesToSkip, + PlayerId playerId) { + this.mediaParser = mediaParser; + this.outputConsumerAdapter = outputConsumerAdapter; + this.overrideInBandCaptionDeclarations = overrideInBandCaptionDeclarations; + this.muxedCaptionMediaFormats = muxedCaptionMediaFormats; + this.format = format; + this.playerId = playerId; + pendingSkipBytes = leadingBytesToSkip; + inputReaderAdapter = new InputReaderAdapterV30(); + } + + // ChunkExtractor implementation. + + @Override + public void init(ExtractorOutput extractorOutput) { + outputConsumerAdapter.setExtractorOutput(extractorOutput); + } + + @Override + public boolean read(ExtractorInput extractorInput) throws IOException { + extractorInput.skipFully(pendingSkipBytes); + pendingSkipBytes = 0; + inputReaderAdapter.setDataReader(extractorInput, extractorInput.getLength()); + return mediaParser.advance(inputReaderAdapter); + } + + @Override + public boolean isPackedAudioExtractor() { + String parserName = mediaParser.getParserName(); + return MediaParser.PARSER_NAME_AC3.equals(parserName) + || MediaParser.PARSER_NAME_AC4.equals(parserName) + || MediaParser.PARSER_NAME_ADTS.equals(parserName) + || MediaParser.PARSER_NAME_MP3.equals(parserName); + } + + @Override + public boolean isReusable() { + String parserName = mediaParser.getParserName(); + return MediaParser.PARSER_NAME_FMP4.equals(parserName) + || MediaParser.PARSER_NAME_TS.equals(parserName); + } + + @Override + public HlsMediaChunkExtractor recreate() { + Assertions.checkState(!isReusable()); + return new MediaParserHlsMediaChunkExtractor( + createMediaParserInstance( + outputConsumerAdapter, + format, + overrideInBandCaptionDeclarations, + muxedCaptionMediaFormats, + playerId, + mediaParser.getParserName()), + outputConsumerAdapter, + format, + overrideInBandCaptionDeclarations, + muxedCaptionMediaFormats, + /* leadingBytesToSkip= */ 0, + playerId); + } + + @Override + public void onTruncatedSegmentParsed() { + mediaParser.seek(SeekPoint.START); + } + + // Allow constants that are not part of the public MediaParser API. + @SuppressLint({"WrongConstant"}) + private static MediaParser createMediaParserInstance( + OutputConsumer outputConsumer, + Format format, + boolean overrideInBandCaptionDeclarations, + ImmutableList muxedCaptionMediaFormats, + PlayerId playerId, + String... parserNames) { + MediaParser mediaParser = + parserNames.length == 1 + ? MediaParser.createByName(parserNames[0], outputConsumer) + : MediaParser.create(outputConsumer, parserNames); + mediaParser.setParameter(PARAMETER_EXPOSE_CAPTION_FORMATS, muxedCaptionMediaFormats); + mediaParser.setParameter( + PARAMETER_OVERRIDE_IN_BAND_CAPTION_DECLARATIONS, overrideInBandCaptionDeclarations); + mediaParser.setParameter(PARAMETER_IN_BAND_CRYPTO_INFO, true); + mediaParser.setParameter(PARAMETER_EAGERLY_EXPOSE_TRACK_TYPE, true); + mediaParser.setParameter(PARAMETER_IGNORE_TIMESTAMP_OFFSET, true); + mediaParser.setParameter(PARAMETER_TS_IGNORE_SPLICE_INFO_STREAM, true); + mediaParser.setParameter(PARAMETER_TS_MODE, "hls"); + @Nullable String codecs = format.codecs; + if (!TextUtils.isEmpty(codecs)) { + // Sometimes AAC and H264 streams are declared in TS chunks even though they don't really + // exist. If we know from the codec attribute that they don't exist, then we can + // explicitly ignore them even if they're declared. + if (!MimeTypes.AUDIO_AAC.equals(MimeTypes.getAudioMediaMimeType(codecs))) { + mediaParser.setParameter(PARAMETER_TS_IGNORE_AAC_STREAM, true); + } + if (!MimeTypes.VIDEO_H264.equals(MimeTypes.getVideoMediaMimeType(codecs))) { + mediaParser.setParameter(PARAMETER_TS_IGNORE_AVC_STREAM, true); + } + } + if (Util.SDK_INT >= 31) { + MediaParserUtil.setLogSessionIdOnMediaParser(mediaParser, playerId); + } + return mediaParser; + } + + private static final class PeekingInputReader implements MediaParser.SeekableInputReader { + + private final ExtractorInput extractorInput; + private int totalPeekedBytes; + + private PeekingInputReader(ExtractorInput extractorInput) { + this.extractorInput = extractorInput; + } + + @Override + public int read(byte[] buffer, int offset, int readLength) throws IOException { + int peekedBytes = extractorInput.peek(buffer, offset, readLength); + totalPeekedBytes += peekedBytes; + return peekedBytes; + } + + @Override + public long getPosition() { + return extractorInput.getPeekPosition(); + } + + @Override + public long getLength() { + return extractorInput.getLength(); + } + + @Override + public void seekToPosition(long position) { + // Seeking is not allowed when sniffing the content. + throw new UnsupportedOperationException(); + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/SampleQueueMappingException.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/SampleQueueMappingException.java index 38b18da50b..648c8630d8 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/SampleQueueMappingException.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/SampleQueueMappingException.java @@ -23,7 +23,9 @@ /** Thrown when it is not possible to map a {@link TrackGroup} to a {@link SampleQueue}. */ public final class SampleQueueMappingException extends IOException { - /** @param mimeType The mime type of the track group whose mapping failed. */ + /** + * @param mimeType The mime type of the track group whose mapping failed. + */ public SampleQueueMappingException(@Nullable String mimeType) { super("Unable to bind a sample queue to TrackGroup with mime type " + mimeType + "."); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/TimestampAdjusterProvider.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/TimestampAdjusterProvider.java index 85a4276ea2..54de6425e6 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/TimestampAdjusterProvider.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/TimestampAdjusterProvider.java @@ -15,12 +15,13 @@ */ package com.google.android.exoplayer2.source.hls; +import static com.google.android.exoplayer2.util.TimestampAdjuster.MODE_SHARED; + import android.util.SparseArray; +import androidx.annotation.Nullable; import com.google.android.exoplayer2.util.TimestampAdjuster; -/** - * Provides {@link TimestampAdjuster} instances for use during HLS playbacks. - */ +/** Provides {@link TimestampAdjuster} instances for use during HLS playbacks. */ public final class TimestampAdjusterProvider { // TODO: Prevent this array from growing indefinitely large by removing adjusters that are no @@ -32,26 +33,23 @@ public TimestampAdjusterProvider() { } /** - * Returns a {@link TimestampAdjuster} suitable for adjusting the pts timestamps contained in - * a chunk with a given discontinuity sequence. + * Returns a {@link TimestampAdjuster} suitable for adjusting the pts timestamps contained in a + * chunk with a given discontinuity sequence. * * @param discontinuitySequence The chunk's discontinuity sequence. * @return A {@link TimestampAdjuster}. */ public TimestampAdjuster getAdjuster(int discontinuitySequence) { - TimestampAdjuster adjuster = timestampAdjusters.get(discontinuitySequence); + @Nullable TimestampAdjuster adjuster = timestampAdjusters.get(discontinuitySequence); if (adjuster == null) { - adjuster = new TimestampAdjuster(TimestampAdjuster.DO_NOT_OFFSET); + adjuster = new TimestampAdjuster(MODE_SHARED); timestampAdjusters.put(discontinuitySequence, adjuster); } return adjuster; } - /** - * Resets the provider. - */ + /** Resets the provider. */ public void reset() { timestampAdjusters.clear(); } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/UnexpectedSampleTimestampException.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/UnexpectedSampleTimestampException.java new file mode 100644 index 0000000000..331af0fa7d --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/UnexpectedSampleTimestampException.java @@ -0,0 +1,66 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.hls; + +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.source.SampleQueue; +import com.google.android.exoplayer2.source.chunk.MediaChunk; +import com.google.android.exoplayer2.util.Util; +import java.io.IOException; + +/** + * Thrown when an attempt is made to write a sample to a {@link SampleQueue} whose timestamp is + * inconsistent with the chunk from which it originates. + */ +/* package */ final class UnexpectedSampleTimestampException extends IOException { + + /** The {@link MediaChunk} that contained the rejected sample. */ + public final MediaChunk mediaChunk; + + /** + * The timestamp of the last sample that was loaded from {@link #mediaChunk} and successfully + * written to the {@link SampleQueue}, in microseconds. {@link C#TIME_UNSET} if the first sample + * in the chunk was rejected. + */ + public final long lastAcceptedSampleTimeUs; + + /** The timestamp of the rejected sample, in microseconds. */ + public final long rejectedSampleTimeUs; + + /** + * Constructs an instance. + * + * @param mediaChunk The {@link MediaChunk} with the unexpected sample timestamp. + * @param lastAcceptedSampleTimeUs The timestamp of the last sample that was loaded from the chunk + * and successfully written to the {@link SampleQueue}, in microseconds. {@link C#TIME_UNSET} + * if the first sample in the chunk was rejected. + * @param rejectedSampleTimeUs The timestamp of the rejected sample, in microseconds. + */ + public UnexpectedSampleTimestampException( + MediaChunk mediaChunk, long lastAcceptedSampleTimeUs, long rejectedSampleTimeUs) { + super( + "Unexpected sample timestamp: " + + Util.usToMs(rejectedSampleTimeUs) + + " in chunk [" + + mediaChunk.startTimeUs + + ", " + + mediaChunk.endTimeUs + + "]"); + this.mediaChunk = mediaChunk; + this.lastAcceptedSampleTimeUs = lastAcceptedSampleTimeUs; + this.rejectedSampleTimeUs = rejectedSampleTimeUs; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/WebvttExtractor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/WebvttExtractor.java index 285ec1b6a1..7a5771e208 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/WebvttExtractor.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/WebvttExtractor.java @@ -72,7 +72,7 @@ public WebvttExtractor(@Nullable String language, TimestampAdjuster timestampAdj // Extractor implementation. @Override - public boolean sniff(ExtractorInput input) throws IOException, InterruptedException { + public boolean sniff(ExtractorInput input) throws IOException { // Check whether there is a header without BOM. input.peekFully( sampleData, /* offset= */ 0, /* length= */ HEADER_MIN_LENGTH, /* allowEndOfInput= */ false); @@ -108,16 +108,17 @@ public void release() { } @Override - public int read(ExtractorInput input, PositionHolder seekPosition) - throws IOException, InterruptedException { + public int read(ExtractorInput input, PositionHolder seekPosition) throws IOException { // output == null suggests init() hasn't been called Assertions.checkNotNull(output); int currentFileSize = (int) input.getLength(); // Increase the size of sampleData if necessary. if (sampleSize == sampleData.length) { - sampleData = Arrays.copyOf(sampleData, - (currentFileSize != C.LENGTH_UNSET ? currentFileSize : sampleData.length) * 3 / 2); + sampleData = + Arrays.copyOf( + sampleData, + (currentFileSize != C.LENGTH_UNSET ? currentFileSize : sampleData.length) * 3 / 2); } // Consume to the input. @@ -152,14 +153,20 @@ private void processSample() throws ParserException { if (line.startsWith("X-TIMESTAMP-MAP")) { Matcher localTimestampMatcher = LOCAL_TIMESTAMP.matcher(line); if (!localTimestampMatcher.find()) { - throw new ParserException("X-TIMESTAMP-MAP doesn't contain local timestamp: " + line); + throw ParserException.createForMalformedContainer( + "X-TIMESTAMP-MAP doesn't contain local timestamp: " + line, /* cause= */ null); } Matcher mediaTimestampMatcher = MEDIA_TIMESTAMP.matcher(line); if (!mediaTimestampMatcher.find()) { - throw new ParserException("X-TIMESTAMP-MAP doesn't contain media timestamp: " + line); + throw ParserException.createForMalformedContainer( + "X-TIMESTAMP-MAP doesn't contain media timestamp: " + line, /* cause= */ null); } - vttTimestampUs = WebvttParserUtil.parseTimestampUs(localTimestampMatcher.group(1)); - tsTimestampUs = TimestampAdjuster.ptsToUs(Long.parseLong(mediaTimestampMatcher.group(1))); + vttTimestampUs = + WebvttParserUtil.parseTimestampUs( + Assertions.checkNotNull(localTimestampMatcher.group(1))); + tsTimestampUs = + TimestampAdjuster.ptsToUs( + Long.parseLong(Assertions.checkNotNull(mediaTimestampMatcher.group(1)))); } } @@ -171,9 +178,11 @@ private void processSample() throws ParserException { return; } - long firstCueTimeUs = WebvttParserUtil.parseTimestampUs(cueHeaderMatcher.group(1)); - long sampleTimeUs = timestampAdjuster.adjustTsTimestamp( - TimestampAdjuster.usToPts(firstCueTimeUs + tsTimestampUs - vttTimestampUs)); + long firstCueTimeUs = + WebvttParserUtil.parseTimestampUs(Assertions.checkNotNull(cueHeaderMatcher.group(1))); + long sampleTimeUs = + timestampAdjuster.adjustTsTimestamp( + TimestampAdjuster.usToWrappedPts(firstCueTimeUs + tsTimestampUs - vttTimestampUs)); long subsampleOffsetUs = sampleTimeUs - firstCueTimeUs; // Output the track. TrackOutput trackOutput = buildTrackOutput(subsampleOffsetUs); @@ -186,10 +195,13 @@ private void processSample() throws ParserException { @RequiresNonNull("output") private TrackOutput buildTrackOutput(long subsampleOffsetUs) { TrackOutput trackOutput = output.track(0, C.TRACK_TYPE_TEXT); - trackOutput.format(Format.createTextSampleFormat(null, MimeTypes.TEXT_VTT, null, - Format.NO_VALUE, 0, language, null, subsampleOffsetUs)); + trackOutput.format( + new Format.Builder() + .setSampleMimeType(MimeTypes.TEXT_VTT) + .setLanguage(language) + .setSubsampleOffsetUs(subsampleOffsetUs) + .build()); output.endTracks(); return trackOutput; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/offline/HlsDownloader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/offline/HlsDownloader.java index 6e6d0afd49..a22efa428a 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/offline/HlsDownloader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/offline/HlsDownloader.java @@ -16,22 +16,23 @@ package com.google.android.exoplayer2.source.hls.offline; import android.net.Uri; -import com.google.android.exoplayer2.C; -import com.google.android.exoplayer2.offline.DownloaderConstructorHelper; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.MediaItem; import com.google.android.exoplayer2.offline.SegmentDownloader; -import com.google.android.exoplayer2.offline.StreamKey; -import com.google.android.exoplayer2.source.hls.playlist.HlsMasterPlaylist; import com.google.android.exoplayer2.source.hls.playlist.HlsMediaPlaylist; +import com.google.android.exoplayer2.source.hls.playlist.HlsMultivariantPlaylist; import com.google.android.exoplayer2.source.hls.playlist.HlsPlaylist; import com.google.android.exoplayer2.source.hls.playlist.HlsPlaylistParser; import com.google.android.exoplayer2.upstream.DataSource; import com.google.android.exoplayer2.upstream.DataSpec; -import com.google.android.exoplayer2.upstream.ParsingLoadable; +import com.google.android.exoplayer2.upstream.ParsingLoadable.Parser; +import com.google.android.exoplayer2.upstream.cache.CacheDataSource; import com.google.android.exoplayer2.util.UriUtil; import java.io.IOException; import java.util.ArrayList; import java.util.HashSet; import java.util.List; +import java.util.concurrent.Executor; /** * A downloader for HLS streams. @@ -40,50 +41,84 @@ * *
        {@code
          * SimpleCache cache = new SimpleCache(downloadFolder, new NoOpCacheEvictor(), databaseProvider);
        - * DefaultHttpDataSourceFactory factory = new DefaultHttpDataSourceFactory("ExoPlayer", null);
        - * DownloaderConstructorHelper constructorHelper =
        - *     new DownloaderConstructorHelper(cache, factory);
        - * // Create a downloader for the first variant in a master playlist.
        + * CacheDataSource.Factory cacheDataSourceFactory =
        + *     new CacheDataSource.Factory()
        + *         .setCache(cache)
        + *         .setUpstreamDataSourceFactory(new DefaultHttpDataSource.Factory());
        + * // Create a downloader for the first variant in a multivariant playlist.
          * HlsDownloader hlsDownloader =
          *     new HlsDownloader(
        - *         playlistUri,
        - *         Collections.singletonList(new StreamKey(HlsMasterPlaylist.GROUP_INDEX_VARIANT, 0)),
        - *         constructorHelper);
        + *         new MediaItem.Builder()
        + *             .setUri(playlistUri)
        + *             .setStreamKeys(
        + *                 Collections.singletonList(
        + *                     new StreamKey(HlsMultivariantPlaylist.GROUP_INDEX_VARIANT, 0)))
        + *             .build(),
        + *         Collections.singletonList();
          * // Perform the download.
          * hlsDownloader.download(progressListener);
        - * // Access downloaded data using CacheDataSource
        - * CacheDataSource cacheDataSource =
        - *     new CacheDataSource(cache, factory.createDataSource(), CacheDataSource.FLAG_BLOCK_ON_CACHE);
        + * // Use the downloaded data for playback.
        + * HlsMediaSource mediaSource =
        + *     new HlsMediaSource.Factory(cacheDataSourceFactory).createMediaSource(mediaItem);
          * }
        */ public final class HlsDownloader extends SegmentDownloader { /** - * @param playlistUri The {@link Uri} of the playlist to be downloaded. - * @param streamKeys Keys defining which renditions in the playlist should be selected for - * download. If empty, all renditions are downloaded. - * @param constructorHelper A {@link DownloaderConstructorHelper} instance. + * Creates a new instance. + * + * @param mediaItem The {@link MediaItem} to be downloaded. + * @param cacheDataSourceFactory A {@link CacheDataSource.Factory} for the cache into which the + * download will be written. + */ + public HlsDownloader(MediaItem mediaItem, CacheDataSource.Factory cacheDataSourceFactory) { + this(mediaItem, cacheDataSourceFactory, Runnable::run); + } + + /** + * Creates a new instance. + * + * @param mediaItem The {@link MediaItem} to be downloaded. + * @param cacheDataSourceFactory A {@link CacheDataSource.Factory} for the cache into which the + * download will be written. + * @param executor An {@link Executor} used to make requests for the media being downloaded. + * Providing an {@link Executor} that uses multiple threads will speed up the download by + * allowing parts of it to be executed in parallel. */ public HlsDownloader( - Uri playlistUri, List streamKeys, DownloaderConstructorHelper constructorHelper) { - super(playlistUri, streamKeys, constructorHelper); + MediaItem mediaItem, CacheDataSource.Factory cacheDataSourceFactory, Executor executor) { + this(mediaItem, new HlsPlaylistParser(), cacheDataSourceFactory, executor); } - @Override - protected HlsPlaylist getManifest(DataSource dataSource, DataSpec dataSpec) throws IOException { - return loadManifest(dataSource, dataSpec); + /** + * Creates a new instance. + * + * @param mediaItem The {@link MediaItem} to be downloaded. + * @param manifestParser A parser for HLS playlists. + * @param cacheDataSourceFactory A {@link CacheDataSource.Factory} for the cache into which the + * download will be written. + * @param executor An {@link Executor} used to make requests for the media being downloaded. + * Providing an {@link Executor} that uses multiple threads will speed up the download by + * allowing parts of it to be executed in parallel. + */ + public HlsDownloader( + MediaItem mediaItem, + Parser manifestParser, + CacheDataSource.Factory cacheDataSourceFactory, + Executor executor) { + super(mediaItem, manifestParser, cacheDataSourceFactory, executor); } @Override - protected List getSegments( - DataSource dataSource, HlsPlaylist playlist, boolean allowIncompleteList) throws IOException { + protected List getSegments(DataSource dataSource, HlsPlaylist manifest, boolean removing) + throws IOException, InterruptedException { ArrayList mediaPlaylistDataSpecs = new ArrayList<>(); - if (playlist instanceof HlsMasterPlaylist) { - HlsMasterPlaylist masterPlaylist = (HlsMasterPlaylist) playlist; - addMediaPlaylistDataSpecs(masterPlaylist.mediaPlaylistUrls, mediaPlaylistDataSpecs); + if (manifest instanceof HlsMultivariantPlaylist) { + HlsMultivariantPlaylist multivariantPlaylist = (HlsMultivariantPlaylist) manifest; + addMediaPlaylistDataSpecs(multivariantPlaylist.mediaPlaylistUrls, mediaPlaylistDataSpecs); } else { mediaPlaylistDataSpecs.add( - SegmentDownloader.getCompressibleDataSpec(Uri.parse(playlist.baseUri))); + SegmentDownloader.getCompressibleDataSpec(Uri.parse(manifest.baseUri))); } ArrayList segments = new ArrayList<>(); @@ -92,15 +127,15 @@ protected List getSegments( segments.add(new Segment(/* startTimeUs= */ 0, mediaPlaylistDataSpec)); HlsMediaPlaylist mediaPlaylist; try { - mediaPlaylist = (HlsMediaPlaylist) loadManifest(dataSource, mediaPlaylistDataSpec); + mediaPlaylist = (HlsMediaPlaylist) getManifest(dataSource, mediaPlaylistDataSpec, removing); } catch (IOException e) { - if (!allowIncompleteList) { + if (!removing) { throw e; } // Generating an incomplete segment list is allowed. Advance to the next media playlist. continue; } - HlsMediaPlaylist.Segment lastInitSegment = null; + @Nullable HlsMediaPlaylist.Segment lastInitSegment = null; List hlsSegments = mediaPlaylist.segments; for (int i = 0; i < hlsSegments.size(); i++) { HlsMediaPlaylist.Segment segment = hlsSegments.get(i); @@ -121,12 +156,6 @@ private void addMediaPlaylistDataSpecs(List mediaPlaylistUrls, List createPlaylistParser() { @Override public ParsingLoadable.Parser createPlaylistParser( - HlsMasterPlaylist masterPlaylist) { - return new HlsPlaylistParser(masterPlaylist); + HlsMultivariantPlaylist multivariantPlaylist, + @Nullable HlsMediaPlaylist previousMediaPlaylist) { + return new HlsPlaylistParser(multivariantPlaylist, previousMediaPlaylist); } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/playlist/DefaultHlsPlaylistTracker.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/playlist/DefaultHlsPlaylistTracker.java index f4fa2ad030..c9114855e9 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/playlist/DefaultHlsPlaylistTracker.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/playlist/DefaultHlsPlaylistTracker.java @@ -15,26 +15,38 @@ */ package com.google.android.exoplayer2.source.hls.playlist; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Util.castNonNull; +import static java.lang.Math.max; + import android.net.Uri; import android.os.Handler; import android.os.SystemClock; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.ParserException; +import com.google.android.exoplayer2.source.LoadEventInfo; +import com.google.android.exoplayer2.source.MediaLoadData; import com.google.android.exoplayer2.source.MediaSourceEventListener.EventDispatcher; import com.google.android.exoplayer2.source.hls.HlsDataSourceFactory; -import com.google.android.exoplayer2.source.hls.playlist.HlsMasterPlaylist.Variant; +import com.google.android.exoplayer2.source.hls.playlist.HlsMediaPlaylist.Part; +import com.google.android.exoplayer2.source.hls.playlist.HlsMediaPlaylist.RenditionReport; import com.google.android.exoplayer2.source.hls.playlist.HlsMediaPlaylist.Segment; +import com.google.android.exoplayer2.source.hls.playlist.HlsMultivariantPlaylist.Variant; import com.google.android.exoplayer2.upstream.DataSource; +import com.google.android.exoplayer2.upstream.HttpDataSource; import com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy; +import com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy.LoadErrorInfo; import com.google.android.exoplayer2.upstream.Loader; import com.google.android.exoplayer2.upstream.Loader.LoadErrorAction; import com.google.android.exoplayer2.upstream.ParsingLoadable; import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.Util; +import com.google.common.collect.Iterables; import java.io.IOException; -import java.util.ArrayList; import java.util.HashMap; import java.util.List; +import java.util.concurrent.CopyOnWriteArrayList; /** Default implementation for {@link HlsPlaylistTracker}. */ public final class DefaultHlsPlaylistTracker @@ -53,15 +65,14 @@ public final class DefaultHlsPlaylistTracker private final HlsPlaylistParserFactory playlistParserFactory; private final LoadErrorHandlingPolicy loadErrorHandlingPolicy; private final HashMap playlistBundles; - private final List listeners; + private final CopyOnWriteArrayList listeners; private final double playlistStuckTargetDurationCoefficient; - @Nullable private ParsingLoadable.Parser mediaPlaylistParser; @Nullable private EventDispatcher eventDispatcher; @Nullable private Loader initialPlaylistLoader; @Nullable private Handler playlistRefreshHandler; @Nullable private PrimaryPlaylistListener primaryPlaylistListener; - @Nullable private HlsMasterPlaylist masterPlaylist; + @Nullable private HlsMultivariantPlaylist multivariantPlaylist; @Nullable private Uri primaryMediaPlaylistUrl; @Nullable private HlsMediaPlaylist primaryMediaPlaylistSnapshot; private boolean isLive; @@ -105,7 +116,7 @@ public DefaultHlsPlaylistTracker( this.playlistParserFactory = playlistParserFactory; this.loadErrorHandlingPolicy = loadErrorHandlingPolicy; this.playlistStuckTargetDurationCoefficient = playlistStuckTargetDurationCoefficient; - listeners = new ArrayList<>(); + listeners = new CopyOnWriteArrayList<>(); playlistBundles = new HashMap<>(); initialStartTimeUs = C.TIME_UNSET; } @@ -117,33 +128,36 @@ public void start( Uri initialPlaylistUri, EventDispatcher eventDispatcher, PrimaryPlaylistListener primaryPlaylistListener) { - this.playlistRefreshHandler = new Handler(); + this.playlistRefreshHandler = Util.createHandlerForCurrentLooper(); this.eventDispatcher = eventDispatcher; this.primaryPlaylistListener = primaryPlaylistListener; - ParsingLoadable masterPlaylistLoadable = + ParsingLoadable multivariantPlaylistLoadable = new ParsingLoadable<>( dataSourceFactory.createDataSource(C.DATA_TYPE_MANIFEST), initialPlaylistUri, C.DATA_TYPE_MANIFEST, playlistParserFactory.createPlaylistParser()); Assertions.checkState(initialPlaylistLoader == null); - initialPlaylistLoader = new Loader("DefaultHlsPlaylistTracker:MasterPlaylist"); + initialPlaylistLoader = new Loader("DefaultHlsPlaylistTracker:MultivariantPlaylist"); long elapsedRealtime = initialPlaylistLoader.startLoading( - masterPlaylistLoadable, + multivariantPlaylistLoadable, this, - loadErrorHandlingPolicy.getMinimumLoadableRetryCount(masterPlaylistLoadable.type)); + loadErrorHandlingPolicy.getMinimumLoadableRetryCount( + multivariantPlaylistLoadable.type)); eventDispatcher.loadStarted( - masterPlaylistLoadable.dataSpec, - masterPlaylistLoadable.type, - elapsedRealtime); + new LoadEventInfo( + multivariantPlaylistLoadable.loadTaskId, + multivariantPlaylistLoadable.dataSpec, + elapsedRealtime), + multivariantPlaylistLoadable.type); } @Override public void stop() { primaryMediaPlaylistUrl = null; primaryMediaPlaylistSnapshot = null; - masterPlaylist = null; + multivariantPlaylist = null; initialStartTimeUs = C.TIME_UNSET; initialPlaylistLoader.release(); initialPlaylistLoader = null; @@ -157,6 +171,7 @@ public void stop() { @Override public void addListener(PlaylistEventListener listener) { + checkNotNull(listener); listeners.add(listener); } @@ -167,14 +182,14 @@ public void removeListener(PlaylistEventListener listener) { @Override @Nullable - public HlsMasterPlaylist getMasterPlaylist() { - return masterPlaylist; + public HlsMultivariantPlaylist getMultivariantPlaylist() { + return multivariantPlaylist; } @Override @Nullable public HlsMediaPlaylist getPlaylistSnapshot(Uri url, boolean isForPlayback) { - HlsMediaPlaylist snapshot = playlistBundles.get(url).getPlaylistSnapshot(); + @Nullable HlsMediaPlaylist snapshot = playlistBundles.get(url).getPlaylistSnapshot(); if (snapshot != null && isForPlayback) { maybeSetPrimaryUrl(url); } @@ -216,38 +231,52 @@ public boolean isLive() { return isLive; } + @Override + public boolean excludeMediaPlaylist(Uri playlistUrl, long exclusionDurationMs) { + @Nullable MediaPlaylistBundle bundle = playlistBundles.get(playlistUrl); + if (bundle != null) { + return !bundle.excludePlaylist(exclusionDurationMs); + } + return false; + } + // Loader.Callback implementation. @Override public void onLoadCompleted( ParsingLoadable loadable, long elapsedRealtimeMs, long loadDurationMs) { HlsPlaylist result = loadable.getResult(); - HlsMasterPlaylist masterPlaylist; + HlsMultivariantPlaylist multivariantPlaylist; boolean isMediaPlaylist = result instanceof HlsMediaPlaylist; if (isMediaPlaylist) { - masterPlaylist = HlsMasterPlaylist.createSingleVariantMasterPlaylist(result.baseUri); - } else /* result instanceof HlsMasterPlaylist */ { - masterPlaylist = (HlsMasterPlaylist) result; - } - this.masterPlaylist = masterPlaylist; - mediaPlaylistParser = playlistParserFactory.createPlaylistParser(masterPlaylist); - primaryMediaPlaylistUrl = masterPlaylist.variants.get(0).url; - createBundles(masterPlaylist.mediaPlaylistUrls); + multivariantPlaylist = + HlsMultivariantPlaylist.createSingleVariantMultivariantPlaylist(result.baseUri); + } else /* result instanceof HlsMultivariantPlaylist */ { + multivariantPlaylist = (HlsMultivariantPlaylist) result; + } + this.multivariantPlaylist = multivariantPlaylist; + primaryMediaPlaylistUrl = multivariantPlaylist.variants.get(0).url; + // Add a temporary playlist listener for loading the first primary playlist. + listeners.add(new FirstPrimaryMediaPlaylistListener()); + createBundles(multivariantPlaylist.mediaPlaylistUrls); + LoadEventInfo loadEventInfo = + new LoadEventInfo( + loadable.loadTaskId, + loadable.dataSpec, + loadable.getUri(), + loadable.getResponseHeaders(), + elapsedRealtimeMs, + loadDurationMs, + loadable.bytesLoaded()); MediaPlaylistBundle primaryBundle = playlistBundles.get(primaryMediaPlaylistUrl); if (isMediaPlaylist) { // We don't need to load the playlist again. We can use the same result. - primaryBundle.processLoadedPlaylist((HlsMediaPlaylist) result, loadDurationMs); + primaryBundle.processLoadedPlaylist((HlsMediaPlaylist) result, loadEventInfo); } else { primaryBundle.loadPlaylist(); } - eventDispatcher.loadCompleted( - loadable.dataSpec, - loadable.getUri(), - loadable.getResponseHeaders(), - C.DATA_TYPE_MANIFEST, - elapsedRealtimeMs, - loadDurationMs, - loadable.bytesLoaded()); + loadErrorHandlingPolicy.onLoadTaskConcluded(loadable.loadTaskId); + eventDispatcher.loadCompleted(loadEventInfo, C.DATA_TYPE_MANIFEST); } @Override @@ -256,14 +285,17 @@ public void onLoadCanceled( long elapsedRealtimeMs, long loadDurationMs, boolean released) { - eventDispatcher.loadCanceled( - loadable.dataSpec, - loadable.getUri(), - loadable.getResponseHeaders(), - C.DATA_TYPE_MANIFEST, - elapsedRealtimeMs, - loadDurationMs, - loadable.bytesLoaded()); + LoadEventInfo loadEventInfo = + new LoadEventInfo( + loadable.loadTaskId, + loadable.dataSpec, + loadable.getUri(), + loadable.getResponseHeaders(), + elapsedRealtimeMs, + loadDurationMs, + loadable.bytesLoaded()); + loadErrorHandlingPolicy.onLoadTaskConcluded(loadable.loadTaskId); + eventDispatcher.loadCanceled(loadEventInfo, C.DATA_TYPE_MANIFEST); } @Override @@ -273,20 +305,24 @@ public LoadErrorAction onLoadError( long loadDurationMs, IOException error, int errorCount) { + LoadEventInfo loadEventInfo = + new LoadEventInfo( + loadable.loadTaskId, + loadable.dataSpec, + loadable.getUri(), + loadable.getResponseHeaders(), + elapsedRealtimeMs, + loadDurationMs, + loadable.bytesLoaded()); + MediaLoadData mediaLoadData = new MediaLoadData(loadable.type); long retryDelayMs = loadErrorHandlingPolicy.getRetryDelayMsFor( - loadable.type, loadDurationMs, error, errorCount); + new LoadErrorInfo(loadEventInfo, mediaLoadData, error, errorCount)); boolean isFatal = retryDelayMs == C.TIME_UNSET; - eventDispatcher.loadError( - loadable.dataSpec, - loadable.getUri(), - loadable.getResponseHeaders(), - C.DATA_TYPE_MANIFEST, - elapsedRealtimeMs, - loadDurationMs, - loadable.bytesLoaded(), - error, - isFatal); + eventDispatcher.loadError(loadEventInfo, loadable.type, error, isFatal); + if (isFatal) { + loadErrorHandlingPolicy.onLoadTaskConcluded(loadable.loadTaskId); + } return isFatal ? Loader.DONT_RETRY_FATAL : Loader.createRetryAction(/* resetErrorCount= */ false, retryDelayMs); @@ -295,14 +331,14 @@ public LoadErrorAction onLoadError( // Internal methods. private boolean maybeSelectNewPrimaryUrl() { - List variants = masterPlaylist.variants; + List variants = multivariantPlaylist.variants; int variantsSize = variants.size(); long currentTimeMs = SystemClock.elapsedRealtime(); for (int i = 0; i < variantsSize; i++) { - MediaPlaylistBundle bundle = playlistBundles.get(variants.get(i).url); - if (currentTimeMs > bundle.blacklistUntilMs) { + MediaPlaylistBundle bundle = checkNotNull(playlistBundles.get(variants.get(i).url)); + if (currentTimeMs > bundle.excludeUntilMs) { primaryMediaPlaylistUrl = bundle.playlistUrl; - bundle.loadPlaylist(); + bundle.loadPlaylistInternal(getRequestUriForPrimaryChange(primaryMediaPlaylistUrl)); return true; } } @@ -318,12 +354,44 @@ private void maybeSetPrimaryUrl(Uri url) { return; } primaryMediaPlaylistUrl = url; - playlistBundles.get(primaryMediaPlaylistUrl).loadPlaylist(); + MediaPlaylistBundle newPrimaryBundle = playlistBundles.get(primaryMediaPlaylistUrl); + @Nullable HlsMediaPlaylist newPrimarySnapshot = newPrimaryBundle.playlistSnapshot; + if (newPrimarySnapshot != null && newPrimarySnapshot.hasEndTag) { + primaryMediaPlaylistSnapshot = newPrimarySnapshot; + primaryPlaylistListener.onPrimaryPlaylistRefreshed(newPrimarySnapshot); + } else { + // The snapshot for the new primary media playlist URL may be stale. Defer updating the + // primary snapshot until after we've refreshed it. + newPrimaryBundle.loadPlaylistInternal(getRequestUriForPrimaryChange(url)); + } + } + + private Uri getRequestUriForPrimaryChange(Uri newPrimaryPlaylistUri) { + if (primaryMediaPlaylistSnapshot != null + && primaryMediaPlaylistSnapshot.serverControl.canBlockReload) { + @Nullable + RenditionReport renditionReport = + primaryMediaPlaylistSnapshot.renditionReports.get(newPrimaryPlaylistUri); + if (renditionReport != null) { + Uri.Builder uriBuilder = newPrimaryPlaylistUri.buildUpon(); + uriBuilder.appendQueryParameter( + MediaPlaylistBundle.BLOCK_MSN_PARAM, String.valueOf(renditionReport.lastMediaSequence)); + if (renditionReport.lastPartIndex != C.INDEX_UNSET) { + uriBuilder.appendQueryParameter( + MediaPlaylistBundle.BLOCK_PART_PARAM, String.valueOf(renditionReport.lastPartIndex)); + } + return uriBuilder.build(); + } + } + return newPrimaryPlaylistUri; } - /** Returns whether any of the variants in the master playlist have the specified playlist URL. */ + /** + * Returns whether any of the variants in the multivariant playlist have the specified playlist + * URL. + */ private boolean isVariantUrl(Uri playlistUrl) { - List variants = masterPlaylist.variants; + List variants = multivariantPlaylist.variants; for (int i = 0; i < variants.size(); i++) { if (playlistUrl.equals(variants.get(i).url)) { return true; @@ -357,23 +425,22 @@ private void onPlaylistUpdated(Uri url, HlsMediaPlaylist newSnapshot) { primaryMediaPlaylistSnapshot = newSnapshot; primaryPlaylistListener.onPrimaryPlaylistRefreshed(newSnapshot); } - int listenersSize = listeners.size(); - for (int i = 0; i < listenersSize; i++) { - listeners.get(i).onPlaylistChanged(); + for (PlaylistEventListener listener : listeners) { + listener.onPlaylistChanged(); } } - private boolean notifyPlaylistError(Uri playlistUrl, long blacklistDurationMs) { - int listenersSize = listeners.size(); - boolean anyBlacklistingFailed = false; - for (int i = 0; i < listenersSize; i++) { - anyBlacklistingFailed |= !listeners.get(i).onPlaylistError(playlistUrl, blacklistDurationMs); + private boolean notifyPlaylistError( + Uri playlistUrl, LoadErrorInfo loadErrorInfo, boolean forceRetry) { + boolean anyExclusionFailed = false; + for (PlaylistEventListener listener : listeners) { + anyExclusionFailed |= !listener.onPlaylistError(playlistUrl, loadErrorInfo, forceRetry); } - return anyBlacklistingFailed; + return anyExclusionFailed; } private HlsMediaPlaylist getLatestPlaylistSnapshot( - HlsMediaPlaylist oldPlaylist, HlsMediaPlaylist loadedPlaylist) { + @Nullable HlsMediaPlaylist oldPlaylist, HlsMediaPlaylist loadedPlaylist) { if (!loadedPlaylist.isNewerThan(oldPlaylist)) { if (loadedPlaylist.hasEndTag) { // If the loaded playlist has an end tag but is not newer than the old playlist then we have @@ -391,7 +458,7 @@ private HlsMediaPlaylist getLatestPlaylistSnapshot( } private long getLoadedPlaylistStartTimeUs( - HlsMediaPlaylist oldPlaylist, HlsMediaPlaylist loadedPlaylist) { + @Nullable HlsMediaPlaylist oldPlaylist, HlsMediaPlaylist loadedPlaylist) { if (loadedPlaylist.hasProgramDateTime) { return loadedPlaylist.startTimeUs; } @@ -413,7 +480,7 @@ private long getLoadedPlaylistStartTimeUs( } private int getLoadedPlaylistDiscontinuitySequence( - HlsMediaPlaylist oldPlaylist, HlsMediaPlaylist loadedPlaylist) { + @Nullable HlsMediaPlaylist oldPlaylist, HlsMediaPlaylist loadedPlaylist) { if (loadedPlaylist.hasDiscontinuitySequence) { return loadedPlaylist.discontinuitySequence; } @@ -442,30 +509,28 @@ private static Segment getFirstOldOverlappingSegment( } /** Holds all information related to a specific Media Playlist. */ - private final class MediaPlaylistBundle - implements Loader.Callback>, Runnable { + private final class MediaPlaylistBundle implements Loader.Callback> { + + private static final String BLOCK_MSN_PARAM = "_HLS_msn"; + private static final String BLOCK_PART_PARAM = "_HLS_part"; + private static final String SKIP_PARAM = "_HLS_skip"; private final Uri playlistUrl; private final Loader mediaPlaylistLoader; - private final ParsingLoadable mediaPlaylistLoadable; + private final DataSource mediaPlaylistDataSource; @Nullable private HlsMediaPlaylist playlistSnapshot; private long lastSnapshotLoadMs; private long lastSnapshotChangeMs; private long earliestNextLoadTimeMs; - private long blacklistUntilMs; + private long excludeUntilMs; private boolean loadPending; - private IOException playlistError; + @Nullable private IOException playlistError; public MediaPlaylistBundle(Uri playlistUrl) { this.playlistUrl = playlistUrl; mediaPlaylistLoader = new Loader("DefaultHlsPlaylistTracker:MediaPlaylist"); - mediaPlaylistLoadable = - new ParsingLoadable<>( - dataSourceFactory.createDataSource(C.DATA_TYPE_MANIFEST), - playlistUrl, - C.DATA_TYPE_MANIFEST, - mediaPlaylistParser); + mediaPlaylistDataSource = dataSourceFactory.createDataSource(C.DATA_TYPE_MANIFEST); } @Nullable @@ -478,30 +543,15 @@ public boolean isSnapshotValid() { return false; } long currentTimeMs = SystemClock.elapsedRealtime(); - long snapshotValidityDurationMs = Math.max(30000, C.usToMs(playlistSnapshot.durationUs)); + long snapshotValidityDurationMs = max(30000, Util.usToMs(playlistSnapshot.durationUs)); return playlistSnapshot.hasEndTag || playlistSnapshot.playlistType == HlsMediaPlaylist.PLAYLIST_TYPE_EVENT || playlistSnapshot.playlistType == HlsMediaPlaylist.PLAYLIST_TYPE_VOD || lastSnapshotLoadMs + snapshotValidityDurationMs > currentTimeMs; } - public void release() { - mediaPlaylistLoader.release(); - } - public void loadPlaylist() { - blacklistUntilMs = 0; - if (loadPending || mediaPlaylistLoader.isLoading() || mediaPlaylistLoader.hasFatalError()) { - // Load already pending, in progress, or a fatal error has been encountered. Do nothing. - return; - } - long currentTimeMs = SystemClock.elapsedRealtime(); - if (currentTimeMs < earliestNextLoadTimeMs) { - loadPending = true; - playlistRefreshHandler.postDelayed(this, earliestNextLoadTimeMs - currentTimeMs); - } else { - loadPlaylistImmediately(); - } + loadPlaylistInternal(playlistUrl); } public void maybeThrowPlaylistRefreshError() throws IOException { @@ -511,25 +561,36 @@ public void maybeThrowPlaylistRefreshError() throws IOException { } } + public void release() { + mediaPlaylistLoader.release(); + } + // Loader.Callback implementation. @Override public void onLoadCompleted( ParsingLoadable loadable, long elapsedRealtimeMs, long loadDurationMs) { - HlsPlaylist result = loadable.getResult(); + @Nullable HlsPlaylist result = loadable.getResult(); + LoadEventInfo loadEventInfo = + new LoadEventInfo( + loadable.loadTaskId, + loadable.dataSpec, + loadable.getUri(), + loadable.getResponseHeaders(), + elapsedRealtimeMs, + loadDurationMs, + loadable.bytesLoaded()); if (result instanceof HlsMediaPlaylist) { - processLoadedPlaylist((HlsMediaPlaylist) result, loadDurationMs); - eventDispatcher.loadCompleted( - loadable.dataSpec, - loadable.getUri(), - loadable.getResponseHeaders(), - C.DATA_TYPE_MANIFEST, - elapsedRealtimeMs, - loadDurationMs, - loadable.bytesLoaded()); + processLoadedPlaylist((HlsMediaPlaylist) result, loadEventInfo); + eventDispatcher.loadCompleted(loadEventInfo, C.DATA_TYPE_MANIFEST); } else { - playlistError = new ParserException("Loaded playlist has unexpected type."); + playlistError = + ParserException.createForMalformedManifest( + "Loaded playlist has unexpected type.", /* cause= */ null); + eventDispatcher.loadError( + loadEventInfo, C.DATA_TYPE_MANIFEST, playlistError, /* wasCanceled= */ true); } + loadErrorHandlingPolicy.onLoadTaskConcluded(loadable.loadTaskId); } @Override @@ -538,14 +599,17 @@ public void onLoadCanceled( long elapsedRealtimeMs, long loadDurationMs, boolean released) { - eventDispatcher.loadCanceled( - loadable.dataSpec, - loadable.getUri(), - loadable.getResponseHeaders(), - C.DATA_TYPE_MANIFEST, - elapsedRealtimeMs, - loadDurationMs, - loadable.bytesLoaded()); + LoadEventInfo loadEventInfo = + new LoadEventInfo( + loadable.loadTaskId, + loadable.dataSpec, + loadable.getUri(), + loadable.getResponseHeaders(), + elapsedRealtimeMs, + loadDurationMs, + loadable.bytesLoaded()); + loadErrorHandlingPolicy.onLoadTaskConcluded(loadable.loadTaskId); + eventDispatcher.loadCanceled(loadEventInfo, C.DATA_TYPE_MANIFEST); } @Override @@ -555,23 +619,41 @@ public LoadErrorAction onLoadError( long loadDurationMs, IOException error, int errorCount) { - LoadErrorAction loadErrorAction; - - long blacklistDurationMs = - loadErrorHandlingPolicy.getBlacklistDurationMsFor( - loadable.type, loadDurationMs, error, errorCount); - boolean shouldBlacklist = blacklistDurationMs != C.TIME_UNSET; - - boolean blacklistingFailed = - notifyPlaylistError(playlistUrl, blacklistDurationMs) || !shouldBlacklist; - if (shouldBlacklist) { - blacklistingFailed |= blacklistPlaylist(blacklistDurationMs); + LoadEventInfo loadEventInfo = + new LoadEventInfo( + loadable.loadTaskId, + loadable.dataSpec, + loadable.getUri(), + loadable.getResponseHeaders(), + elapsedRealtimeMs, + loadDurationMs, + loadable.bytesLoaded()); + boolean isBlockingRequest = loadable.getUri().getQueryParameter(BLOCK_MSN_PARAM) != null; + boolean deltaUpdateFailed = error instanceof HlsPlaylistParser.DeltaUpdateException; + if (isBlockingRequest || deltaUpdateFailed) { + int responseCode = Integer.MAX_VALUE; + if (error instanceof HttpDataSource.InvalidResponseCodeException) { + responseCode = ((HttpDataSource.InvalidResponseCodeException) error).responseCode; + } + if (deltaUpdateFailed || responseCode == 400 || responseCode == 503) { + // Intercept failed delta updates and blocking requests producing a Bad Request (400) and + // Service Unavailable (503). In such cases, force a full, non-blocking request (see RFC + // 8216, section 6.2.5.2 and 6.3.7). + earliestNextLoadTimeMs = SystemClock.elapsedRealtime(); + loadPlaylist(); + castNonNull(eventDispatcher) + .loadError(loadEventInfo, loadable.type, error, /* wasCanceled= */ true); + return Loader.DONT_RETRY; + } } - - if (blacklistingFailed) { - long retryDelay = - loadErrorHandlingPolicy.getRetryDelayMsFor( - loadable.type, loadDurationMs, error, errorCount); + MediaLoadData mediaLoadData = new MediaLoadData(loadable.type); + LoadErrorInfo loadErrorInfo = + new LoadErrorInfo(loadEventInfo, mediaLoadData, error, errorCount); + boolean exclusionFailed = + notifyPlaylistError(playlistUrl, loadErrorInfo, /* forceRetry= */ false); + LoadErrorAction loadErrorAction; + if (exclusionFailed) { + long retryDelay = loadErrorHandlingPolicy.getRetryDelayMsFor(loadErrorInfo); loadErrorAction = retryDelay != C.TIME_UNSET ? Loader.createRetryAction(false, retryDelay) @@ -580,44 +662,59 @@ public LoadErrorAction onLoadError( loadErrorAction = Loader.DONT_RETRY; } - eventDispatcher.loadError( - loadable.dataSpec, - loadable.getUri(), - loadable.getResponseHeaders(), - C.DATA_TYPE_MANIFEST, - elapsedRealtimeMs, - loadDurationMs, - loadable.bytesLoaded(), - error, - /* wasCanceled= */ !loadErrorAction.isRetry()); - + boolean wasCanceled = !loadErrorAction.isRetry(); + eventDispatcher.loadError(loadEventInfo, loadable.type, error, wasCanceled); + if (wasCanceled) { + loadErrorHandlingPolicy.onLoadTaskConcluded(loadable.loadTaskId); + } return loadErrorAction; } - // Runnable implementation. + // Internal methods. - @Override - public void run() { - loadPending = false; - loadPlaylistImmediately(); + private void loadPlaylistInternal(Uri playlistRequestUri) { + excludeUntilMs = 0; + if (loadPending || mediaPlaylistLoader.isLoading() || mediaPlaylistLoader.hasFatalError()) { + // Load already pending, in progress, or a fatal error has been encountered. Do nothing. + return; + } + long currentTimeMs = SystemClock.elapsedRealtime(); + if (currentTimeMs < earliestNextLoadTimeMs) { + loadPending = true; + playlistRefreshHandler.postDelayed( + () -> { + loadPending = false; + loadPlaylistImmediately(playlistRequestUri); + }, + earliestNextLoadTimeMs - currentTimeMs); + } else { + loadPlaylistImmediately(playlistRequestUri); + } } - // Internal methods. - - private void loadPlaylistImmediately() { + private void loadPlaylistImmediately(Uri playlistRequestUri) { + ParsingLoadable.Parser mediaPlaylistParser = + playlistParserFactory.createPlaylistParser(multivariantPlaylist, playlistSnapshot); + ParsingLoadable mediaPlaylistLoadable = + new ParsingLoadable<>( + mediaPlaylistDataSource, + playlistRequestUri, + C.DATA_TYPE_MANIFEST, + mediaPlaylistParser); long elapsedRealtime = mediaPlaylistLoader.startLoading( mediaPlaylistLoadable, - this, + /* callback= */ this, loadErrorHandlingPolicy.getMinimumLoadableRetryCount(mediaPlaylistLoadable.type)); eventDispatcher.loadStarted( - mediaPlaylistLoadable.dataSpec, - mediaPlaylistLoadable.type, - elapsedRealtime); + new LoadEventInfo( + mediaPlaylistLoadable.loadTaskId, mediaPlaylistLoadable.dataSpec, elapsedRealtime), + mediaPlaylistLoadable.type); } - private void processLoadedPlaylist(HlsMediaPlaylist loadedPlaylist, long loadDurationMs) { - HlsMediaPlaylist oldPlaylist = playlistSnapshot; + private void processLoadedPlaylist( + HlsMediaPlaylist loadedPlaylist, LoadEventInfo loadEventInfo) { + @Nullable HlsMediaPlaylist oldPlaylist = playlistSnapshot; long currentTimeMs = SystemClock.elapsedRealtime(); lastSnapshotLoadMs = currentTimeMs; playlistSnapshot = getLatestPlaylistSnapshot(oldPlaylist, loadedPlaylist); @@ -626,53 +723,139 @@ private void processLoadedPlaylist(HlsMediaPlaylist loadedPlaylist, long loadDur lastSnapshotChangeMs = currentTimeMs; onPlaylistUpdated(playlistUrl, playlistSnapshot); } else if (!playlistSnapshot.hasEndTag) { + boolean forceRetry = false; + @Nullable IOException playlistError = null; if (loadedPlaylist.mediaSequence + loadedPlaylist.segments.size() < playlistSnapshot.mediaSequence) { // TODO: Allow customization of playlist resets handling. // The media sequence jumped backwards. The server has probably reset. We do not try - // blacklisting in this case. + // excluding in this case. + forceRetry = true; playlistError = new PlaylistResetException(playlistUrl); - notifyPlaylistError(playlistUrl, C.TIME_UNSET); } else if (currentTimeMs - lastSnapshotChangeMs - > C.usToMs(playlistSnapshot.targetDurationUs) + > Util.usToMs(playlistSnapshot.targetDurationUs) * playlistStuckTargetDurationCoefficient) { // TODO: Allow customization of stuck playlists handling. playlistError = new PlaylistStuckException(playlistUrl); - long blacklistDurationMs = - loadErrorHandlingPolicy.getBlacklistDurationMsFor( - C.DATA_TYPE_MANIFEST, loadDurationMs, playlistError, /* errorCount= */ 1); - notifyPlaylistError(playlistUrl, blacklistDurationMs); - if (blacklistDurationMs != C.TIME_UNSET) { - blacklistPlaylist(blacklistDurationMs); + } + if (playlistError != null) { + this.playlistError = playlistError; + notifyPlaylistError( + playlistUrl, + new LoadErrorInfo( + loadEventInfo, + new MediaLoadData(C.DATA_TYPE_MANIFEST), + playlistError, + /* errorCount= */ 1), + forceRetry); + } + } + long durationUntilNextLoadUs = 0L; + if (!playlistSnapshot.serverControl.canBlockReload) { + // If blocking requests are not supported, do not allow the playlist to load again within + // the target duration if we obtained a new snapshot, or half the target duration otherwise. + durationUntilNextLoadUs = + playlistSnapshot != oldPlaylist + ? playlistSnapshot.targetDurationUs + : (playlistSnapshot.targetDurationUs / 2); + } + earliestNextLoadTimeMs = currentTimeMs + Util.usToMs(durationUntilNextLoadUs); + // Schedule a load if this is the primary playlist or a playlist of a low-latency stream and + // it doesn't have an end tag. Else the next load will be scheduled when refreshPlaylist is + // called, or when this playlist becomes the primary. + boolean scheduleLoad = + playlistSnapshot.partTargetDurationUs != C.TIME_UNSET + || playlistUrl.equals(primaryMediaPlaylistUrl); + if (scheduleLoad && !playlistSnapshot.hasEndTag) { + loadPlaylistInternal(getMediaPlaylistUriForReload()); + } + } + + private Uri getMediaPlaylistUriForReload() { + if (playlistSnapshot == null + || (playlistSnapshot.serverControl.skipUntilUs == C.TIME_UNSET + && !playlistSnapshot.serverControl.canBlockReload)) { + return playlistUrl; + } + Uri.Builder uriBuilder = playlistUrl.buildUpon(); + if (playlistSnapshot.serverControl.canBlockReload) { + long targetMediaSequence = + playlistSnapshot.mediaSequence + playlistSnapshot.segments.size(); + uriBuilder.appendQueryParameter(BLOCK_MSN_PARAM, String.valueOf(targetMediaSequence)); + if (playlistSnapshot.partTargetDurationUs != C.TIME_UNSET) { + List trailingParts = playlistSnapshot.trailingParts; + int targetPartIndex = trailingParts.size(); + if (!trailingParts.isEmpty() && Iterables.getLast(trailingParts).isPreload) { + // Ignore the preload part. + targetPartIndex--; } + uriBuilder.appendQueryParameter(BLOCK_PART_PARAM, String.valueOf(targetPartIndex)); } } - // Do not allow the playlist to load again within the target duration if we obtained a new - // snapshot, or half the target duration otherwise. - earliestNextLoadTimeMs = - currentTimeMs - + C.usToMs( - playlistSnapshot != oldPlaylist - ? playlistSnapshot.targetDurationUs - : (playlistSnapshot.targetDurationUs / 2)); - // Schedule a load if this is the primary playlist and it doesn't have an end tag. Else the - // next load will be scheduled when refreshPlaylist is called, or when this playlist becomes - // the primary. - if (playlistUrl.equals(primaryMediaPlaylistUrl) && !playlistSnapshot.hasEndTag) { - loadPlaylist(); + if (playlistSnapshot.serverControl.skipUntilUs != C.TIME_UNSET) { + uriBuilder.appendQueryParameter( + SKIP_PARAM, playlistSnapshot.serverControl.canSkipDateRanges ? "v2" : "YES"); } + return uriBuilder.build(); } /** - * Blacklists the playlist. + * Excludes the playlist. * - * @param blacklistDurationMs The number of milliseconds for which the playlist should be - * blacklisted. - * @return Whether the playlist is the primary, despite being blacklisted. + * @param exclusionDurationMs The number of milliseconds for which the playlist should be + * excluded. + * @return Whether the playlist is the primary, despite being excluded. */ - private boolean blacklistPlaylist(long blacklistDurationMs) { - blacklistUntilMs = SystemClock.elapsedRealtime() + blacklistDurationMs; + private boolean excludePlaylist(long exclusionDurationMs) { + excludeUntilMs = SystemClock.elapsedRealtime() + exclusionDurationMs; return playlistUrl.equals(primaryMediaPlaylistUrl) && !maybeSelectNewPrimaryUrl(); } } + + /** + * Takes care of handling load errors of the first media playlist and applies exclusion according + * to the {@link LoadErrorHandlingPolicy} before the first media period has been created and + * prepared. + */ + private class FirstPrimaryMediaPlaylistListener implements PlaylistEventListener { + + @Override + public void onPlaylistChanged() { + // Remove the temporary playlist listener that is waiting for the first playlist only. + listeners.remove(this); + } + + @Override + public boolean onPlaylistError(Uri url, LoadErrorInfo loadErrorInfo, boolean forceRetry) { + if (primaryMediaPlaylistSnapshot == null) { + long nowMs = SystemClock.elapsedRealtime(); + int variantExclusionCounter = 0; + List variants = castNonNull(multivariantPlaylist).variants; + for (int i = 0; i < variants.size(); i++) { + @Nullable + MediaPlaylistBundle mediaPlaylistBundle = playlistBundles.get(variants.get(i).url); + if (mediaPlaylistBundle != null && nowMs < mediaPlaylistBundle.excludeUntilMs) { + variantExclusionCounter++; + } + } + LoadErrorHandlingPolicy.FallbackOptions fallbackOptions = + new LoadErrorHandlingPolicy.FallbackOptions( + /* numberOfLocations= */ 1, + /* numberOfExcludedLocations= */ 0, + /* numberOfTracks= */ multivariantPlaylist.variants.size(), + /* numberOfExcludedTracks= */ variantExclusionCounter); + @Nullable + LoadErrorHandlingPolicy.FallbackSelection fallbackSelection = + loadErrorHandlingPolicy.getFallbackSelectionFor(fallbackOptions, loadErrorInfo); + if (fallbackSelection != null + && fallbackSelection.type == LoadErrorHandlingPolicy.FALLBACK_TYPE_TRACK) { + @Nullable MediaPlaylistBundle mediaPlaylistBundle = playlistBundles.get(url); + if (mediaPlaylistBundle != null) { + mediaPlaylistBundle.excludePlaylist(fallbackSelection.exclusionDurationMs); + } + } + } + return false; + } + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/playlist/FilteringHlsPlaylistParserFactory.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/playlist/FilteringHlsPlaylistParserFactory.java index 2d7ad5a78a..e556ee099e 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/playlist/FilteringHlsPlaylistParserFactory.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/playlist/FilteringHlsPlaylistParserFactory.java @@ -15,6 +15,7 @@ */ package com.google.android.exoplayer2.source.hls.playlist; +import androidx.annotation.Nullable; import com.google.android.exoplayer2.offline.FilteringManifestParser; import com.google.android.exoplayer2.offline.StreamKey; import com.google.android.exoplayer2.upstream.ParsingLoadable; @@ -48,8 +49,10 @@ public ParsingLoadable.Parser createPlaylistParser() { @Override public ParsingLoadable.Parser createPlaylistParser( - HlsMasterPlaylist masterPlaylist) { + HlsMultivariantPlaylist multivariantPlaylist, + @Nullable HlsMediaPlaylist previousMediaPlaylist) { return new FilteringManifestParser<>( - hlsPlaylistParserFactory.createPlaylistParser(masterPlaylist), streamKeys); + hlsPlaylistParserFactory.createPlaylistParser(multivariantPlaylist, previousMediaPlaylist), + streamKeys); } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/playlist/HlsMasterPlaylist.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/playlist/HlsMasterPlaylist.java index f96c7dfa92..fadbea7fcf 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/playlist/HlsMasterPlaylist.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/playlist/HlsMasterPlaylist.java @@ -15,191 +15,24 @@ */ package com.google.android.exoplayer2.source.hls.playlist; -import android.net.Uri; import androidx.annotation.Nullable; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.drm.DrmInitData; -import com.google.android.exoplayer2.offline.StreamKey; -import com.google.android.exoplayer2.util.MimeTypes; -import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.Map; -/** Represents an HLS master playlist. */ -public final class HlsMasterPlaylist extends HlsPlaylist { - - /** Represents an empty master playlist, from which no attributes can be inherited. */ - public static final HlsMasterPlaylist EMPTY = - new HlsMasterPlaylist( - /* baseUri= */ "", - /* tags= */ Collections.emptyList(), - /* variants= */ Collections.emptyList(), - /* videos= */ Collections.emptyList(), - /* audios= */ Collections.emptyList(), - /* subtitles= */ Collections.emptyList(), - /* closedCaptions= */ Collections.emptyList(), - /* muxedAudioFormat= */ null, - /* muxedCaptionFormats= */ Collections.emptyList(), - /* hasIndependentSegments= */ false, - /* variableDefinitions= */ Collections.emptyMap(), - /* sessionKeyDrmInitData= */ Collections.emptyList()); - - // These constants must not be changed because they are persisted in offline stream keys. - public static final int GROUP_INDEX_VARIANT = 0; - public static final int GROUP_INDEX_AUDIO = 1; - public static final int GROUP_INDEX_SUBTITLE = 2; - - /** A variant (i.e. an #EXT-X-STREAM-INF tag) in a master playlist. */ - public static final class Variant { - - /** The variant's url. */ - public final Uri url; - - /** Format information associated with this variant. */ - public final Format format; - - /** The video rendition group referenced by this variant, or {@code null}. */ - @Nullable public final String videoGroupId; - - /** The audio rendition group referenced by this variant, or {@code null}. */ - @Nullable public final String audioGroupId; - - /** The subtitle rendition group referenced by this variant, or {@code null}. */ - @Nullable public final String subtitleGroupId; - - /** The caption rendition group referenced by this variant, or {@code null}. */ - @Nullable public final String captionGroupId; - - /** - * @param url See {@link #url}. - * @param format See {@link #format}. - * @param videoGroupId See {@link #videoGroupId}. - * @param audioGroupId See {@link #audioGroupId}. - * @param subtitleGroupId See {@link #subtitleGroupId}. - * @param captionGroupId See {@link #captionGroupId}. - */ - public Variant( - Uri url, - Format format, - @Nullable String videoGroupId, - @Nullable String audioGroupId, - @Nullable String subtitleGroupId, - @Nullable String captionGroupId) { - this.url = url; - this.format = format; - this.videoGroupId = videoGroupId; - this.audioGroupId = audioGroupId; - this.subtitleGroupId = subtitleGroupId; - this.captionGroupId = captionGroupId; - } - - /** - * Creates a variant for a given media playlist url. - * - * @param url The media playlist url. - * @return The variant instance. - */ - public static Variant createMediaPlaylistVariantUrl(Uri url) { - Format format = - Format.createContainerFormat( - "0", - /* label= */ null, - MimeTypes.APPLICATION_M3U8, - /* sampleMimeType= */ null, - /* codecs= */ null, - /* bitrate= */ Format.NO_VALUE, - /* selectionFlags= */ 0, - /* roleFlags= */ 0, - /* language= */ null); - return new Variant( - url, - format, - /* videoGroupId= */ null, - /* audioGroupId= */ null, - /* subtitleGroupId= */ null, - /* captionGroupId= */ null); - } - - /** Returns a copy of this instance with the given {@link Format}. */ - public Variant copyWithFormat(Format format) { - return new Variant(url, format, videoGroupId, audioGroupId, subtitleGroupId, captionGroupId); - } - } - - /** A rendition (i.e. an #EXT-X-MEDIA tag) in a master playlist. */ - public static final class Rendition { - - /** The rendition's url, or null if the tag does not have a URI attribute. */ - @Nullable public final Uri url; - - /** Format information associated with this rendition. */ - public final Format format; - - /** The group to which this rendition belongs. */ - public final String groupId; - - /** The name of the rendition. */ - public final String name; - - /** - * @param url See {@link #url}. - * @param format See {@link #format}. - * @param groupId See {@link #groupId}. - * @param name See {@link #name}. - */ - public Rendition(@Nullable Uri url, Format format, String groupId, String name) { - this.url = url; - this.format = format; - this.groupId = groupId; - this.name = name; - } - - } - - /** All of the media playlist URLs referenced by the playlist. */ - public final List mediaPlaylistUrls; - /** The variants declared by the playlist. */ - public final List variants; - /** The video renditions declared by the playlist. */ - public final List videos; - /** The audio renditions declared by the playlist. */ - public final List audios; - /** The subtitle renditions declared by the playlist. */ - public final List subtitles; - /** The closed caption renditions declared by the playlist. */ - public final List closedCaptions; - - /** - * The format of the audio muxed in the variants. May be null if the playlist does not declare any - * muxed audio. - */ - @Nullable public final Format muxedAudioFormat; - /** - * The format of the closed captions declared by the playlist. May be empty if the playlist - * explicitly declares no captions are available, or null if the playlist does not declare any - * captions information. - */ - @Nullable public final List muxedCaptionFormats; - /** Contains variable definitions, as defined by the #EXT-X-DEFINE tag. */ - public final Map variableDefinitions; - /** DRM initialization data derived from #EXT-X-SESSION-KEY tags. */ - public final List sessionKeyDrmInitData; +/** + * @deprecated Use {@link HlsMultivariantPlaylist} instead. + */ +@Deprecated +public final class HlsMasterPlaylist extends HlsMultivariantPlaylist { /** - * @param baseUri See {@link #baseUri}. - * @param tags See {@link #tags}. - * @param variants See {@link #variants}. - * @param videos See {@link #videos}. - * @param audios See {@link #audios}. - * @param subtitles See {@link #subtitles}. - * @param closedCaptions See {@link #closedCaptions}. - * @param muxedAudioFormat See {@link #muxedAudioFormat}. - * @param muxedCaptionFormats See {@link #muxedCaptionFormats}. - * @param hasIndependentSegments See {@link #hasIndependentSegments}. - * @param variableDefinitions See {@link #variableDefinitions}. - * @param sessionKeyDrmInitData See {@link #sessionKeyDrmInitData}. + * Creates an HLS multivariant playlist. + * + * @deprecated Use {@link HlsMultivariantPlaylist#HlsMultivariantPlaylist} instead. */ + @Deprecated public HlsMasterPlaylist( String baseUri, List tags, @@ -213,118 +46,18 @@ public HlsMasterPlaylist( boolean hasIndependentSegments, Map variableDefinitions, List sessionKeyDrmInitData) { - super(baseUri, tags, hasIndependentSegments); - this.mediaPlaylistUrls = - Collections.unmodifiableList( - getMediaPlaylistUrls(variants, videos, audios, subtitles, closedCaptions)); - this.variants = Collections.unmodifiableList(variants); - this.videos = Collections.unmodifiableList(videos); - this.audios = Collections.unmodifiableList(audios); - this.subtitles = Collections.unmodifiableList(subtitles); - this.closedCaptions = Collections.unmodifiableList(closedCaptions); - this.muxedAudioFormat = muxedAudioFormat; - this.muxedCaptionFormats = muxedCaptionFormats != null - ? Collections.unmodifiableList(muxedCaptionFormats) : null; - this.variableDefinitions = Collections.unmodifiableMap(variableDefinitions); - this.sessionKeyDrmInitData = Collections.unmodifiableList(sessionKeyDrmInitData); - } - - @Override - public HlsMasterPlaylist copy(List streamKeys) { - return new HlsMasterPlaylist( + super( baseUri, tags, - copyStreams(variants, GROUP_INDEX_VARIANT, streamKeys), - // TODO: Allow stream keys to specify video renditions to be retained. - /* videos= */ Collections.emptyList(), - copyStreams(audios, GROUP_INDEX_AUDIO, streamKeys), - copyStreams(subtitles, GROUP_INDEX_SUBTITLE, streamKeys), - // TODO: Update to retain all closed captions. - /* closedCaptions= */ Collections.emptyList(), + variants, + videos, + audios, + subtitles, + closedCaptions, muxedAudioFormat, muxedCaptionFormats, hasIndependentSegments, variableDefinitions, sessionKeyDrmInitData); } - - /** - * Creates a playlist with a single variant. - * - * @param variantUrl The url of the single variant. - * @return A master playlist with a single variant for the provided url. - */ - public static HlsMasterPlaylist createSingleVariantMasterPlaylist(String variantUrl) { - List variant = - Collections.singletonList(Variant.createMediaPlaylistVariantUrl(Uri.parse(variantUrl))); - return new HlsMasterPlaylist( - /* baseUri= */ "", - /* tags= */ Collections.emptyList(), - variant, - /* videos= */ Collections.emptyList(), - /* audios= */ Collections.emptyList(), - /* subtitles= */ Collections.emptyList(), - /* closedCaptions= */ Collections.emptyList(), - /* muxedAudioFormat= */ null, - /* muxedCaptionFormats= */ null, - /* hasIndependentSegments= */ false, - /* variableDefinitions= */ Collections.emptyMap(), - /* sessionKeyDrmInitData= */ Collections.emptyList()); - } - - private static List getMediaPlaylistUrls( - List variants, - List videos, - List audios, - List subtitles, - List closedCaptions) { - ArrayList mediaPlaylistUrls = new ArrayList<>(); - for (int i = 0; i < variants.size(); i++) { - Uri uri = variants.get(i).url; - if (!mediaPlaylistUrls.contains(uri)) { - mediaPlaylistUrls.add(uri); - } - } - addMediaPlaylistUrls(videos, mediaPlaylistUrls); - addMediaPlaylistUrls(audios, mediaPlaylistUrls); - addMediaPlaylistUrls(subtitles, mediaPlaylistUrls); - addMediaPlaylistUrls(closedCaptions, mediaPlaylistUrls); - return mediaPlaylistUrls; - } - - private static void addMediaPlaylistUrls(List renditions, List out) { - for (int i = 0; i < renditions.size(); i++) { - Uri uri = renditions.get(i).url; - if (uri != null && !out.contains(uri)) { - out.add(uri); - } - } - } - - private static List copyStreams( - List streams, int groupIndex, List streamKeys) { - List copiedStreams = new ArrayList<>(streamKeys.size()); - // TODO: - // 1. When variants with the same URL are not de-duplicated, duplicates must not increment - // trackIndex so as to avoid breaking stream keys that have been persisted for offline. All - // duplicates should be copied if the first variant is copied, or discarded otherwise. - // 2. When renditions with null URLs are permitted, they must not increment trackIndex so as to - // avoid breaking stream keys that have been persisted for offline. All renitions with null - // URLs should be copied. They may become unreachable if all variants that reference them are - // removed, but this is OK. - // 3. Renditions with URLs matching copied variants should always themselves be copied, even if - // the corresponding stream key is omitted. Else we're throwing away information for no gain. - for (int i = 0; i < streams.size(); i++) { - T stream = streams.get(i); - for (int j = 0; j < streamKeys.size(); j++) { - StreamKey streamKey = streamKeys.get(j); - if (streamKey.groupIndex == groupIndex && streamKey.trackIndex == i) { - copiedStreams.add(stream); - break; - } - } - } - return copiedStreams; - } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/playlist/HlsMediaPlaylist.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/playlist/HlsMediaPlaylist.java index 58f500cf94..9a77f444ea 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/playlist/HlsMediaPlaylist.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/playlist/HlsMediaPlaylist.java @@ -15,85 +15,100 @@ */ package com.google.android.exoplayer2.source.hls.playlist; +import static java.lang.Math.max; +import static java.lang.Math.min; +import static java.lang.annotation.ElementType.TYPE_USE; + +import android.net.Uri; import androidx.annotation.IntDef; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.drm.DrmInitData; import com.google.android.exoplayer2.offline.StreamKey; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Iterables; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; -import java.util.Collections; +import java.lang.annotation.Target; +import java.util.ArrayList; import java.util.List; +import java.util.Map; /** Represents an HLS media playlist. */ public final class HlsMediaPlaylist extends HlsPlaylist { - /** Media segment reference. */ - @SuppressWarnings("ComparableType") - public static final class Segment implements Comparable { + /** Server control attributes. */ + public static final class ServerControl { /** - * The url of the segment. - */ - public final String url; - /** - * The media initialization section for this segment, as defined by #EXT-X-MAP. May be null if - * the media playlist does not define a media section for this segment. The same instance is - * used for all segments that share an EXT-X-MAP tag. - */ - @Nullable public final Segment initializationSegment; - /** The duration of the segment in microseconds, as defined by #EXTINF. */ - public final long durationUs; - /** The human readable title of the segment. */ - public final String title; - /** - * The number of #EXT-X-DISCONTINUITY tags in the playlist before the segment. - */ - public final int relativeDiscontinuitySequence; - /** - * The start time of the segment in microseconds, relative to the start of the playlist. - */ - public final long relativeStartTimeUs; - /** - * DRM initialization data for sample decryption, or null if the segment does not use CDM-DRM - * protection. + * The skip boundary for delta updates in microseconds, or {@link C#TIME_UNSET} if delta updates + * are not supported. */ - @Nullable public final DrmInitData drmInitData; + public final long skipUntilUs; /** - * The encryption identity key uri as defined by #EXT-X-KEY, or null if the segment does not use - * full segment encryption with identity key. + * Whether the playlist can produce delta updates that skip older #EXT-X-DATERANGE tags in + * addition to media segments. */ - @Nullable public final String fullSegmentEncryptionKeyUri; + public final boolean canSkipDateRanges; /** - * The encryption initialization vector as defined by #EXT-X-KEY, or null if the segment is not - * encrypted. + * The server-recommended live offset in microseconds, or {@link C#TIME_UNSET} if none defined. */ - @Nullable public final String encryptionIV; + public final long holdBackUs; /** - * The segment's byte range offset, as defined by #EXT-X-BYTERANGE. + * The server-recommended live offset in microseconds in low-latency mode, or {@link + * C#TIME_UNSET} if none defined. */ - public final long byterangeOffset; + public final long partHoldBackUs; + /** Whether the server supports blocking playlist reload. */ + public final boolean canBlockReload; + /** - * The segment's byte range length, as defined by #EXT-X-BYTERANGE, or {@link C#LENGTH_UNSET} if - * no byte range is specified. + * Creates a new instance. + * + * @param skipUntilUs See {@link #skipUntilUs}. + * @param canSkipDateRanges See {@link #canSkipDateRanges}. + * @param holdBackUs See {@link #holdBackUs}. + * @param partHoldBackUs See {@link #partHoldBackUs}. + * @param canBlockReload See {@link #canBlockReload}. */ - public final long byterangeLength; + public ServerControl( + long skipUntilUs, + boolean canSkipDateRanges, + long holdBackUs, + long partHoldBackUs, + boolean canBlockReload) { + this.skipUntilUs = skipUntilUs; + this.canSkipDateRanges = canSkipDateRanges; + this.holdBackUs = holdBackUs; + this.partHoldBackUs = partHoldBackUs; + this.canBlockReload = canBlockReload; + } + } - /** Whether the segment is tagged with #EXT-X-GAP. */ - public final boolean hasGapTag; + /** Media segment reference. */ + @SuppressWarnings("ComparableType") + public static final class Segment extends SegmentBase { + + /** The human readable title of the segment. */ + public final String title; + /** The parts belonging to this segment. */ + public final List parts; /** + * Creates an instance to be used as init segment. + * * @param uri See {@link #url}. - * @param byterangeOffset See {@link #byterangeOffset}. - * @param byterangeLength See {@link #byterangeLength}. + * @param byteRangeOffset See {@link #byteRangeOffset}. + * @param byteRangeLength See {@link #byteRangeLength}. * @param fullSegmentEncryptionKeyUri See {@link #fullSegmentEncryptionKeyUri}. * @param encryptionIV See {@link #encryptionIV}. */ public Segment( String uri, - long byterangeOffset, - long byterangeLength, + long byteRangeOffset, + long byteRangeLength, @Nullable String fullSegmentEncryptionKeyUri, @Nullable String encryptionIV) { this( @@ -106,12 +121,15 @@ public Segment( /* drmInitData= */ null, fullSegmentEncryptionKeyUri, encryptionIV, - byterangeOffset, - byterangeLength, - /* hasGapTag= */ false); + byteRangeOffset, + byteRangeLength, + /* hasGapTag= */ false, + /* parts= */ ImmutableList.of()); } /** + * Creates an instance. + * * @param url See {@link #url}. * @param initializationSegment See {@link #initializationSegment}. * @param title See {@link #title}. @@ -121,9 +139,10 @@ public Segment( * @param drmInitData See {@link #drmInitData}. * @param fullSegmentEncryptionKeyUri See {@link #fullSegmentEncryptionKeyUri}. * @param encryptionIV See {@link #encryptionIV}. - * @param byterangeOffset See {@link #byterangeOffset}. - * @param byterangeLength See {@link #byterangeLength}. + * @param byteRangeOffset See {@link #byteRangeOffset}. + * @param byteRangeLength See {@link #byteRangeLength}. * @param hasGapTag See {@link #hasGapTag}. + * @param parts See {@link #parts}. */ public Segment( String url, @@ -135,29 +154,231 @@ public Segment( @Nullable DrmInitData drmInitData, @Nullable String fullSegmentEncryptionKeyUri, @Nullable String encryptionIV, - long byterangeOffset, - long byterangeLength, + long byteRangeOffset, + long byteRangeLength, + boolean hasGapTag, + List parts) { + super( + url, + initializationSegment, + durationUs, + relativeDiscontinuitySequence, + relativeStartTimeUs, + drmInitData, + fullSegmentEncryptionKeyUri, + encryptionIV, + byteRangeOffset, + byteRangeLength, + hasGapTag); + this.title = title; + this.parts = ImmutableList.copyOf(parts); + } + + public Segment copyWith(long relativeStartTimeUs, int relativeDiscontinuitySequence) { + List updatedParts = new ArrayList<>(); + long relativePartStartTimeUs = relativeStartTimeUs; + for (int i = 0; i < parts.size(); i++) { + Part part = parts.get(i); + updatedParts.add(part.copyWith(relativePartStartTimeUs, relativeDiscontinuitySequence)); + relativePartStartTimeUs += part.durationUs; + } + return new Segment( + url, + initializationSegment, + title, + durationUs, + relativeDiscontinuitySequence, + relativeStartTimeUs, + drmInitData, + fullSegmentEncryptionKeyUri, + encryptionIV, + byteRangeOffset, + byteRangeLength, + hasGapTag, + updatedParts); + } + } + + /** A media part. */ + public static final class Part extends SegmentBase { + + /** Whether the part is independent. */ + public final boolean isIndependent; + /** Whether the part is a preloading part. */ + public final boolean isPreload; + + /** + * Creates an instance. + * + * @param url See {@link #url}. + * @param initializationSegment See {@link #initializationSegment}. + * @param durationUs See {@link #durationUs}. + * @param relativeDiscontinuitySequence See {@link #relativeDiscontinuitySequence}. + * @param relativeStartTimeUs See {@link #relativeStartTimeUs}. + * @param drmInitData See {@link #drmInitData}. + * @param fullSegmentEncryptionKeyUri See {@link #fullSegmentEncryptionKeyUri}. + * @param encryptionIV See {@link #encryptionIV}. + * @param byteRangeOffset See {@link #byteRangeOffset}. + * @param byteRangeLength See {@link #byteRangeLength}. + * @param hasGapTag See {@link #hasGapTag}. + * @param isIndependent See {@link #isIndependent}. + * @param isPreload See {@link #isPreload}. + */ + public Part( + String url, + @Nullable Segment initializationSegment, + long durationUs, + int relativeDiscontinuitySequence, + long relativeStartTimeUs, + @Nullable DrmInitData drmInitData, + @Nullable String fullSegmentEncryptionKeyUri, + @Nullable String encryptionIV, + long byteRangeOffset, + long byteRangeLength, + boolean hasGapTag, + boolean isIndependent, + boolean isPreload) { + super( + url, + initializationSegment, + durationUs, + relativeDiscontinuitySequence, + relativeStartTimeUs, + drmInitData, + fullSegmentEncryptionKeyUri, + encryptionIV, + byteRangeOffset, + byteRangeLength, + hasGapTag); + this.isIndependent = isIndependent; + this.isPreload = isPreload; + } + + public Part copyWith(long relativeStartTimeUs, int relativeDiscontinuitySequence) { + return new Part( + url, + initializationSegment, + durationUs, + relativeDiscontinuitySequence, + relativeStartTimeUs, + drmInitData, + fullSegmentEncryptionKeyUri, + encryptionIV, + byteRangeOffset, + byteRangeLength, + hasGapTag, + isIndependent, + isPreload); + } + } + + /** The base for a {@link Segment} or a {@link Part} required for playback. */ + @SuppressWarnings("ComparableType") + public static class SegmentBase implements Comparable { + /** The url of the segment. */ + public final String url; + /** + * The media initialization section for this segment, as defined by #EXT-X-MAP. May be null if + * the media playlist does not define a media initialization section for this segment. The same + * instance is used for all segments that share an EXT-X-MAP tag. + */ + @Nullable public final Segment initializationSegment; + /** The duration of the segment in microseconds, as defined by #EXTINF or #EXT-X-PART. */ + public final long durationUs; + /** The number of #EXT-X-DISCONTINUITY tags in the playlist before the segment. */ + public final int relativeDiscontinuitySequence; + /** The start time of the segment in microseconds, relative to the start of the playlist. */ + public final long relativeStartTimeUs; + /** + * DRM initialization data for sample decryption, or null if the segment does not use CDM-DRM + * protection. + */ + @Nullable public final DrmInitData drmInitData; + /** + * The encryption identity key uri as defined by #EXT-X-KEY, or null if the segment does not use + * full segment encryption with identity key. + */ + @Nullable public final String fullSegmentEncryptionKeyUri; + /** + * The encryption initialization vector as defined by #EXT-X-KEY, or null if the segment is not + * encrypted. + */ + @Nullable public final String encryptionIV; + /** + * The segment's byte range offset, as defined by #EXT-X-BYTERANGE, #EXT-X-PART or + * #EXT-X-PRELOAD-HINT. + */ + public final long byteRangeOffset; + /** + * The segment's byte range length, as defined by #EXT-X-BYTERANGE, #EXT-X-PART or + * #EXT-X-PRELOAD-HINT, or {@link C#LENGTH_UNSET} if no byte range is specified or the byte + * range is open-ended. + */ + public final long byteRangeLength; + /** Whether the segment is marked as a gap. */ + public final boolean hasGapTag; + + private SegmentBase( + String url, + @Nullable Segment initializationSegment, + long durationUs, + int relativeDiscontinuitySequence, + long relativeStartTimeUs, + @Nullable DrmInitData drmInitData, + @Nullable String fullSegmentEncryptionKeyUri, + @Nullable String encryptionIV, + long byteRangeOffset, + long byteRangeLength, boolean hasGapTag) { this.url = url; this.initializationSegment = initializationSegment; - this.title = title; this.durationUs = durationUs; this.relativeDiscontinuitySequence = relativeDiscontinuitySequence; this.relativeStartTimeUs = relativeStartTimeUs; this.drmInitData = drmInitData; this.fullSegmentEncryptionKeyUri = fullSegmentEncryptionKeyUri; this.encryptionIV = encryptionIV; - this.byterangeOffset = byterangeOffset; - this.byterangeLength = byterangeLength; + this.byteRangeOffset = byteRangeOffset; + this.byteRangeLength = byteRangeLength; this.hasGapTag = hasGapTag; } @Override public int compareTo(Long relativeStartTimeUs) { return this.relativeStartTimeUs > relativeStartTimeUs - ? 1 : (this.relativeStartTimeUs < relativeStartTimeUs ? -1 : 0); + ? 1 + : (this.relativeStartTimeUs < relativeStartTimeUs ? -1 : 0); } + } + /** + * A rendition report for an alternative rendition defined in another media playlist. + * + *

        See RFC 8216, section 4.4.5.1.4. + */ + public static final class RenditionReport { + /** The URI of the media playlist of the reported rendition. */ + public final Uri playlistUri; + /** The last media sequence that is in the playlist of the reported rendition. */ + public final long lastMediaSequence; + /** + * The last part index that is in the playlist of the reported rendition, or {@link + * C#INDEX_UNSET} if the rendition does not contain partial segments. + */ + public final int lastPartIndex; + + /** + * Creates a new instance. + * + * @param playlistUri See {@link #playlistUri}. + * @param lastMediaSequence See {@link #lastMediaSequence}. + * @param lastPartIndex See {@link #lastPartIndex}. + */ + public RenditionReport(Uri playlistUri, long lastMediaSequence, int lastPartIndex) { + this.playlistUri = playlistUri; + this.lastMediaSequence = lastMediaSequence; + this.lastPartIndex = lastPartIndex; + } } /** @@ -166,6 +387,7 @@ public int compareTo(Long relativeStartTimeUs) { */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({PLAYLIST_TYPE_UNKNOWN, PLAYLIST_TYPE_VOD, PLAYLIST_TYPE_EVENT}) public @interface PlaylistType {} @@ -173,23 +395,28 @@ public int compareTo(Long relativeStartTimeUs) { public static final int PLAYLIST_TYPE_VOD = 1; public static final int PLAYLIST_TYPE_EVENT = 2; + /** The type of the playlist. See {@link PlaylistType}. */ + public final @PlaylistType int playlistType; /** - * The type of the playlist. See {@link PlaylistType}. + * The start offset in microseconds from the beginning of the playlist, as defined by + * #EXT-X-START, or {@link C#TIME_UNSET} if undefined. The value is guaranteed to be between 0 and + * {@link #durationUs}, inclusive. */ - @PlaylistType public final int playlistType; + public final long startOffsetUs; /** - * The start offset in microseconds, as defined by #EXT-X-START. + * Whether the {@link #startOffsetUs} was explicitly defined by #EXT-X-START as a positive value + * or zero. */ - public final long startOffsetUs; + public final boolean hasPositiveStartOffset; + /** Whether the start position should be precise, as defined by #EXT-X-START. */ + public final boolean preciseStart; /** * If {@link #hasProgramDateTime} is true, contains the datetime as microseconds since epoch. * Otherwise, contains the aggregated duration of removed segments up to this snapshot of the * playlist. */ public final long startTimeUs; - /** - * Whether the playlist contains the #EXT-X-DISCONTINUITY-SEQUENCE tag. - */ + /** Whether the playlist contains the #EXT-X-DISCONTINUITY-SEQUENCE tag. */ public final boolean hasDiscontinuitySequence; /** * The discontinuity sequence number of the first media segment in the playlist, as defined by @@ -201,35 +428,36 @@ public int compareTo(Long relativeStartTimeUs) { * #EXT-X-MEDIA-SEQUENCE. */ public final long mediaSequence; - /** - * The compatibility version, as defined by #EXT-X-VERSION. - */ + /** The compatibility version, as defined by #EXT-X-VERSION. */ public final int version; - /** - * The target duration in microseconds, as defined by #EXT-X-TARGETDURATION. - */ + /** The target duration in microseconds, as defined by #EXT-X-TARGETDURATION. */ public final long targetDurationUs; /** - * Whether the playlist contains the #EXT-X-ENDLIST tag. + * The target duration for segment parts, as defined by #EXT-X-PART-INF, or {@link C#TIME_UNSET} + * if undefined. */ + public final long partTargetDurationUs; + /** Whether the playlist contains the #EXT-X-ENDLIST tag. */ public final boolean hasEndTag; - /** - * Whether the playlist contains a #EXT-X-PROGRAM-DATE-TIME tag. - */ + /** Whether the playlist contains a #EXT-X-PROGRAM-DATE-TIME tag. */ public final boolean hasProgramDateTime; /** * Contains the CDM protection schemes used by segments in this playlist. Does not contain any key * acquisition data. Null if none of the segments in the playlist is CDM-encrypted. */ @Nullable public final DrmInitData protectionSchemes; - /** - * The list of segments in the playlist. - */ + /** The list of segments in the playlist. */ public final List segments; /** - * The total duration of the playlist in microseconds. + * The list of parts at the end of the playlist for which the segment is not in the playlist yet. */ + public final List trailingParts; + /** The rendition reports of alternative rendition playlists. */ + public final Map renditionReports; + /** The total duration of the playlist in microseconds. */ public final long durationUs; + /** The attributes of the #EXT-X-SERVER-CONTROL header. */ + public final ServerControl serverControl; /** * @param playlistType See {@link #playlistType}. @@ -244,46 +472,70 @@ public int compareTo(Long relativeStartTimeUs) { * @param targetDurationUs See {@link #targetDurationUs}. * @param hasIndependentSegments See {@link #hasIndependentSegments}. * @param hasEndTag See {@link #hasEndTag}. - * @param protectionSchemes See {@link #protectionSchemes}. * @param hasProgramDateTime See {@link #hasProgramDateTime}. + * @param protectionSchemes See {@link #protectionSchemes}. * @param segments See {@link #segments}. + * @param trailingParts See {@link #trailingParts}. + * @param serverControl See {@link #serverControl} + * @param renditionReports See {@link #renditionReports}. */ public HlsMediaPlaylist( @PlaylistType int playlistType, String baseUri, List tags, long startOffsetUs, + boolean preciseStart, long startTimeUs, boolean hasDiscontinuitySequence, int discontinuitySequence, long mediaSequence, int version, long targetDurationUs, + long partTargetDurationUs, boolean hasIndependentSegments, boolean hasEndTag, boolean hasProgramDateTime, @Nullable DrmInitData protectionSchemes, - List segments) { + List segments, + List trailingParts, + ServerControl serverControl, + Map renditionReports) { super(baseUri, tags, hasIndependentSegments); this.playlistType = playlistType; this.startTimeUs = startTimeUs; + this.preciseStart = preciseStart; this.hasDiscontinuitySequence = hasDiscontinuitySequence; this.discontinuitySequence = discontinuitySequence; this.mediaSequence = mediaSequence; this.version = version; this.targetDurationUs = targetDurationUs; + this.partTargetDurationUs = partTargetDurationUs; this.hasEndTag = hasEndTag; this.hasProgramDateTime = hasProgramDateTime; this.protectionSchemes = protectionSchemes; - this.segments = Collections.unmodifiableList(segments); - if (!segments.isEmpty()) { - Segment last = segments.get(segments.size() - 1); - durationUs = last.relativeStartTimeUs + last.durationUs; + this.segments = ImmutableList.copyOf(segments); + this.trailingParts = ImmutableList.copyOf(trailingParts); + this.renditionReports = ImmutableMap.copyOf(renditionReports); + if (!trailingParts.isEmpty()) { + Part lastPart = Iterables.getLast(trailingParts); + durationUs = lastPart.relativeStartTimeUs + lastPart.durationUs; + } else if (!segments.isEmpty()) { + Segment lastSegment = Iterables.getLast(segments); + durationUs = lastSegment.relativeStartTimeUs + lastSegment.durationUs; } else { durationUs = 0; } - this.startOffsetUs = startOffsetUs == C.TIME_UNSET ? C.TIME_UNSET - : startOffsetUs >= 0 ? startOffsetUs : durationUs + startOffsetUs; + // From RFC 8216, section 4.4.2.2: If startOffsetUs is negative, it indicates the offset from + // the end of the playlist. If the absolute value exceeds the duration of the playlist, it + // indicates the beginning (if negative) or the end (if positive) of the playlist. + this.startOffsetUs = + startOffsetUs == C.TIME_UNSET + ? C.TIME_UNSET + : startOffsetUs >= 0 + ? min(durationUs, startOffsetUs) + : max(0, durationUs + startOffsetUs); + this.hasPositiveStartOffset = startOffsetUs >= 0; + this.serverControl = serverControl; } @Override @@ -297,7 +549,7 @@ public HlsMediaPlaylist copy(List streamKeys) { * @param other The playlist to compare. * @return Whether this playlist is newer than {@code other}. */ - public boolean isNewerThan(HlsMediaPlaylist other) { + public boolean isNewerThan(@Nullable HlsMediaPlaylist other) { if (other == null || mediaSequence > other.mediaSequence) { return true; } @@ -305,15 +557,17 @@ public boolean isNewerThan(HlsMediaPlaylist other) { return false; } // The media sequences are equal. - int segmentCount = segments.size(); - int otherSegmentCount = other.segments.size(); - return segmentCount > otherSegmentCount - || (segmentCount == otherSegmentCount && hasEndTag && !other.hasEndTag); + int segmentCountDifference = segments.size() - other.segments.size(); + if (segmentCountDifference != 0) { + return segmentCountDifference > 0; + } + int partCount = trailingParts.size(); + int otherPartCount = other.trailingParts.size(); + return partCount > otherPartCount + || (partCount == otherPartCount && hasEndTag && !other.hasEndTag); } - /** - * Returns the result of adding the duration of the playlist to its start time. - */ + /** Returns the result of adding the duration of the playlist to its start time. */ public long getEndTimeUs() { return startTimeUs + durationUs; } @@ -333,17 +587,22 @@ public HlsMediaPlaylist copyWith(long startTimeUs, int discontinuitySequence) { baseUri, tags, startOffsetUs, + preciseStart, startTimeUs, /* hasDiscontinuitySequence= */ true, discontinuitySequence, mediaSequence, version, targetDurationUs, + partTargetDurationUs, hasIndependentSegments, hasEndTag, hasProgramDateTime, protectionSchemes, - segments); + segments, + trailingParts, + serverControl, + renditionReports); } /** @@ -359,17 +618,21 @@ public HlsMediaPlaylist copyWithEndTag() { baseUri, tags, startOffsetUs, + preciseStart, startTimeUs, hasDiscontinuitySequence, discontinuitySequence, mediaSequence, version, targetDurationUs, + partTargetDurationUs, hasIndependentSegments, /* hasEndTag= */ true, hasProgramDateTime, protectionSchemes, - segments); + segments, + trailingParts, + serverControl, + renditionReports); } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/playlist/HlsMultivariantPlaylist.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/playlist/HlsMultivariantPlaylist.java new file mode 100644 index 0000000000..ae04015c2a --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/playlist/HlsMultivariantPlaylist.java @@ -0,0 +1,320 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.hls.playlist; + +import android.net.Uri; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.drm.DrmInitData; +import com.google.android.exoplayer2.offline.StreamKey; +import com.google.android.exoplayer2.util.MimeTypes; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +/** Represents an HLS multivariant playlist. */ +// TODO(b/211458101): Make non-final once HlsMasterPlaylist is removed. +public class HlsMultivariantPlaylist extends HlsPlaylist { + + /** Represents an empty multivariant playlist, from which no attributes can be inherited. */ + public static final HlsMultivariantPlaylist EMPTY = + new HlsMultivariantPlaylist( + /* baseUri= */ "", + /* tags= */ Collections.emptyList(), + /* variants= */ Collections.emptyList(), + /* videos= */ Collections.emptyList(), + /* audios= */ Collections.emptyList(), + /* subtitles= */ Collections.emptyList(), + /* closedCaptions= */ Collections.emptyList(), + /* muxedAudioFormat= */ null, + /* muxedCaptionFormats= */ Collections.emptyList(), + /* hasIndependentSegments= */ false, + /* variableDefinitions= */ Collections.emptyMap(), + /* sessionKeyDrmInitData= */ Collections.emptyList()); + + // These constants must not be changed because they are persisted in offline stream keys. + public static final int GROUP_INDEX_VARIANT = 0; + public static final int GROUP_INDEX_AUDIO = 1; + public static final int GROUP_INDEX_SUBTITLE = 2; + + /** A variant (i.e. an #EXT-X-STREAM-INF tag) in a multivariant playlist. */ + public static final class Variant { + + /** The variant's url. */ + public final Uri url; + + /** Format information associated with this variant. */ + public final Format format; + + /** The video rendition group referenced by this variant, or {@code null}. */ + @Nullable public final String videoGroupId; + + /** The audio rendition group referenced by this variant, or {@code null}. */ + @Nullable public final String audioGroupId; + + /** The subtitle rendition group referenced by this variant, or {@code null}. */ + @Nullable public final String subtitleGroupId; + + /** The caption rendition group referenced by this variant, or {@code null}. */ + @Nullable public final String captionGroupId; + + /** + * @param url See {@link #url}. + * @param format See {@link #format}. + * @param videoGroupId See {@link #videoGroupId}. + * @param audioGroupId See {@link #audioGroupId}. + * @param subtitleGroupId See {@link #subtitleGroupId}. + * @param captionGroupId See {@link #captionGroupId}. + */ + public Variant( + Uri url, + Format format, + @Nullable String videoGroupId, + @Nullable String audioGroupId, + @Nullable String subtitleGroupId, + @Nullable String captionGroupId) { + this.url = url; + this.format = format; + this.videoGroupId = videoGroupId; + this.audioGroupId = audioGroupId; + this.subtitleGroupId = subtitleGroupId; + this.captionGroupId = captionGroupId; + } + + /** + * Creates a variant for a given media playlist url. + * + * @param url The media playlist url. + * @return The variant instance. + */ + public static Variant createMediaPlaylistVariantUrl(Uri url) { + Format format = + new Format.Builder().setId("0").setContainerMimeType(MimeTypes.APPLICATION_M3U8).build(); + return new Variant( + url, + format, + /* videoGroupId= */ null, + /* audioGroupId= */ null, + /* subtitleGroupId= */ null, + /* captionGroupId= */ null); + } + + /** Returns a copy of this instance with the given {@link Format}. */ + public Variant copyWithFormat(Format format) { + return new Variant(url, format, videoGroupId, audioGroupId, subtitleGroupId, captionGroupId); + } + } + + /** A rendition (i.e. an #EXT-X-MEDIA tag) in a multivariant playlist. */ + public static final class Rendition { + + /** The rendition's url, or null if the tag does not have a URI attribute. */ + @Nullable public final Uri url; + + /** Format information associated with this rendition. */ + public final Format format; + + /** The group to which this rendition belongs. */ + public final String groupId; + + /** The name of the rendition. */ + public final String name; + + /** + * @param url See {@link #url}. + * @param format See {@link #format}. + * @param groupId See {@link #groupId}. + * @param name See {@link #name}. + */ + public Rendition(@Nullable Uri url, Format format, String groupId, String name) { + this.url = url; + this.format = format; + this.groupId = groupId; + this.name = name; + } + } + + /** All of the media playlist URLs referenced by the playlist. */ + public final List mediaPlaylistUrls; + /** The variants declared by the playlist. */ + public final List variants; + /** The video renditions declared by the playlist. */ + public final List videos; + /** The audio renditions declared by the playlist. */ + public final List audios; + /** The subtitle renditions declared by the playlist. */ + public final List subtitles; + /** The closed caption renditions declared by the playlist. */ + public final List closedCaptions; + + /** + * The format of the audio muxed in the variants. May be null if the playlist does not declare any + * muxed audio. + */ + @Nullable public final Format muxedAudioFormat; + /** + * The format of the closed captions declared by the playlist. May be empty if the playlist + * explicitly declares no captions are available, or null if the playlist does not declare any + * captions information. + */ + @Nullable public final List muxedCaptionFormats; + /** Contains variable definitions, as defined by the #EXT-X-DEFINE tag. */ + public final Map variableDefinitions; + /** DRM initialization data derived from #EXT-X-SESSION-KEY tags. */ + public final List sessionKeyDrmInitData; + + /** + * @param baseUri See {@link #baseUri}. + * @param tags See {@link #tags}. + * @param variants See {@link #variants}. + * @param videos See {@link #videos}. + * @param audios See {@link #audios}. + * @param subtitles See {@link #subtitles}. + * @param closedCaptions See {@link #closedCaptions}. + * @param muxedAudioFormat See {@link #muxedAudioFormat}. + * @param muxedCaptionFormats See {@link #muxedCaptionFormats}. + * @param hasIndependentSegments See {@link #hasIndependentSegments}. + * @param variableDefinitions See {@link #variableDefinitions}. + * @param sessionKeyDrmInitData See {@link #sessionKeyDrmInitData}. + */ + public HlsMultivariantPlaylist( + String baseUri, + List tags, + List variants, + List videos, + List audios, + List subtitles, + List closedCaptions, + @Nullable Format muxedAudioFormat, + @Nullable List muxedCaptionFormats, + boolean hasIndependentSegments, + Map variableDefinitions, + List sessionKeyDrmInitData) { + super(baseUri, tags, hasIndependentSegments); + this.mediaPlaylistUrls = + Collections.unmodifiableList( + getMediaPlaylistUrls(variants, videos, audios, subtitles, closedCaptions)); + this.variants = Collections.unmodifiableList(variants); + this.videos = Collections.unmodifiableList(videos); + this.audios = Collections.unmodifiableList(audios); + this.subtitles = Collections.unmodifiableList(subtitles); + this.closedCaptions = Collections.unmodifiableList(closedCaptions); + this.muxedAudioFormat = muxedAudioFormat; + this.muxedCaptionFormats = + muxedCaptionFormats != null ? Collections.unmodifiableList(muxedCaptionFormats) : null; + this.variableDefinitions = Collections.unmodifiableMap(variableDefinitions); + this.sessionKeyDrmInitData = Collections.unmodifiableList(sessionKeyDrmInitData); + } + + @Override + public HlsMultivariantPlaylist copy(List streamKeys) { + return new HlsMultivariantPlaylist( + baseUri, + tags, + copyStreams(variants, GROUP_INDEX_VARIANT, streamKeys), + // TODO: Allow stream keys to specify video renditions to be retained. + /* videos= */ Collections.emptyList(), + copyStreams(audios, GROUP_INDEX_AUDIO, streamKeys), + copyStreams(subtitles, GROUP_INDEX_SUBTITLE, streamKeys), + // TODO: Update to retain all closed captions. + /* closedCaptions= */ Collections.emptyList(), + muxedAudioFormat, + muxedCaptionFormats, + hasIndependentSegments, + variableDefinitions, + sessionKeyDrmInitData); + } + + /** + * Creates a playlist with a single variant. + * + * @param variantUrl The url of the single variant. + * @return A multivariant playlist with a single variant for the provided url. + */ + public static HlsMultivariantPlaylist createSingleVariantMultivariantPlaylist(String variantUrl) { + List variant = + Collections.singletonList(Variant.createMediaPlaylistVariantUrl(Uri.parse(variantUrl))); + return new HlsMultivariantPlaylist( + /* baseUri= */ "", + /* tags= */ Collections.emptyList(), + variant, + /* videos= */ Collections.emptyList(), + /* audios= */ Collections.emptyList(), + /* subtitles= */ Collections.emptyList(), + /* closedCaptions= */ Collections.emptyList(), + /* muxedAudioFormat= */ null, + /* muxedCaptionFormats= */ null, + /* hasIndependentSegments= */ false, + /* variableDefinitions= */ Collections.emptyMap(), + /* sessionKeyDrmInitData= */ Collections.emptyList()); + } + + private static List getMediaPlaylistUrls( + List variants, + List videos, + List audios, + List subtitles, + List closedCaptions) { + ArrayList mediaPlaylistUrls = new ArrayList<>(); + for (int i = 0; i < variants.size(); i++) { + Uri uri = variants.get(i).url; + if (!mediaPlaylistUrls.contains(uri)) { + mediaPlaylistUrls.add(uri); + } + } + addMediaPlaylistUrls(videos, mediaPlaylistUrls); + addMediaPlaylistUrls(audios, mediaPlaylistUrls); + addMediaPlaylistUrls(subtitles, mediaPlaylistUrls); + addMediaPlaylistUrls(closedCaptions, mediaPlaylistUrls); + return mediaPlaylistUrls; + } + + private static void addMediaPlaylistUrls(List renditions, List out) { + for (int i = 0; i < renditions.size(); i++) { + Uri uri = renditions.get(i).url; + if (uri != null && !out.contains(uri)) { + out.add(uri); + } + } + } + + private static List copyStreams( + List streams, int groupIndex, List streamKeys) { + List copiedStreams = new ArrayList<>(streamKeys.size()); + // TODO: + // 1. When variants with the same URL are not de-duplicated, duplicates must not increment + // trackIndex so as to avoid breaking stream keys that have been persisted for offline. All + // duplicates should be copied if the first variant is copied, or discarded otherwise. + // 2. When renditions with null URLs are permitted, they must not increment trackIndex so as to + // avoid breaking stream keys that have been persisted for offline. All renitions with null + // URLs should be copied. They may become unreachable if all variants that reference them are + // removed, but this is OK. + // 3. Renditions with URLs matching copied variants should always themselves be copied, even if + // the corresponding stream key is omitted. Else we're throwing away information for no gain. + for (int i = 0; i < streams.size(); i++) { + T stream = streams.get(i); + for (int j = 0; j < streamKeys.size(); j++) { + StreamKey streamKey = streamKeys.get(j); + if (streamKey.groupIndex == groupIndex && streamKey.streamIndex == i) { + copiedStreams.add(stream); + break; + } + } + } + return copiedStreams; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/playlist/HlsPlaylist.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/playlist/HlsPlaylist.java index 9cec1cd33b..17d31d4ceb 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/playlist/HlsPlaylist.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/playlist/HlsPlaylist.java @@ -22,13 +22,9 @@ /** Represents an HLS playlist. */ public abstract class HlsPlaylist implements FilterableManifest { - /** - * The base uri. Used to resolve relative paths. - */ + /** The base uri. Used to resolve relative paths. */ public final String baseUri; - /** - * The list of tags in the playlist. - */ + /** The list of tags in the playlist. */ public final List tags; /** * Whether the media is formed of independent segments, as defined by the @@ -46,5 +42,4 @@ protected HlsPlaylist(String baseUri, List tags, boolean hasIndependentS this.tags = Collections.unmodifiableList(tags); this.hasIndependentSegments = hasIndependentSegments; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/playlist/HlsPlaylistParser.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/playlist/HlsPlaylistParser.java index 6bd447f746..39e5ff627e 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/playlist/HlsPlaylistParser.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/playlist/HlsPlaylistParser.java @@ -15,6 +15,10 @@ */ package com.google.android.exoplayer2.source.hls.playlist; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Assertions.checkState; +import static com.google.android.exoplayer2.util.Util.castNonNull; + import android.net.Uri; import android.text.TextUtils; import android.util.Base64; @@ -26,21 +30,25 @@ import com.google.android.exoplayer2.drm.DrmInitData.SchemeData; import com.google.android.exoplayer2.extractor.mp4.PsshAtomUtil; import com.google.android.exoplayer2.metadata.Metadata; -import com.google.android.exoplayer2.source.UnrecognizedInputFormatException; import com.google.android.exoplayer2.source.hls.HlsTrackMetadataEntry; import com.google.android.exoplayer2.source.hls.HlsTrackMetadataEntry.VariantInfo; -import com.google.android.exoplayer2.source.hls.playlist.HlsMasterPlaylist.Rendition; -import com.google.android.exoplayer2.source.hls.playlist.HlsMasterPlaylist.Variant; +import com.google.android.exoplayer2.source.hls.playlist.HlsMediaPlaylist.Part; +import com.google.android.exoplayer2.source.hls.playlist.HlsMediaPlaylist.RenditionReport; import com.google.android.exoplayer2.source.hls.playlist.HlsMediaPlaylist.Segment; +import com.google.android.exoplayer2.source.hls.playlist.HlsMultivariantPlaylist.Rendition; +import com.google.android.exoplayer2.source.hls.playlist.HlsMultivariantPlaylist.Variant; import com.google.android.exoplayer2.upstream.ParsingLoadable; import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.UriUtil; import com.google.android.exoplayer2.util.Util; +import com.google.common.collect.Iterables; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; +import java.math.BigDecimal; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Collections; @@ -56,11 +64,14 @@ import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf; import org.checkerframework.checker.nullness.qual.PolyNull; -/** - * HLS playlists parsing logic. - */ +/** HLS playlists parsing logic. */ public final class HlsPlaylistParser implements ParsingLoadable.Parser { + /** Exception thrown when merging a delta update fails. */ + public static final class DeltaUpdateException extends IOException {} + + private static final String LOG_TAG = "HlsPlaylistParser"; + private static final String PLAYLIST_HEADER = "#EXTM3U"; private static final String TAG_PREFIX = "#EXT"; @@ -68,7 +79,12 @@ public final class HlsPlaylistParser implements ParsingLoadable.Parser> urlToVariantInfos = new HashMap<>(); HashMap variableDefinitions = new HashMap<>(); ArrayList variants = new ArrayList<>(); @@ -281,6 +346,7 @@ private static HlsMasterPlaylist parseMasterPlaylist(LineIterator iterator, Stri // We expose all tags through the playlist. tags.add(line); } + boolean isIFrameOnlyVariant = line.startsWith(TAG_I_FRAME_STREAM_INF); if (line.startsWith(TAG_DEFINE)) { variableDefinitions.put( @@ -301,10 +367,10 @@ private static HlsMasterPlaylist parseMasterPlaylist(LineIterator iterator, Stri String scheme = parseEncryptionScheme(method); sessionKeyDrmInitData.add(new DrmInitData(scheme, schemeData)); } - } else if (line.startsWith(TAG_STREAM_INF)) { + } else if (line.startsWith(TAG_STREAM_INF) || isIFrameOnlyVariant) { noClosedCaptions |= line.contains(ATTR_CLOSED_CAPTIONS_NONE); - int bitrate = parseIntAttr(line, REGEX_BANDWIDTH); - // TODO: Plumb this into Format. + int roleFlags = isIFrameOnlyVariant ? C.ROLE_FLAG_TRICK_PLAY : 0; + int peakBitrate = parseIntAttr(line, REGEX_BANDWIDTH); int averageBitrate = parseOptionalIntAttr(line, REGEX_AVERAGE_BANDWIDTH, -1); String codecs = parseOptionalStringAttr(line, REGEX_CODECS, variableDefinitions); String resolutionString = @@ -312,7 +378,7 @@ private static HlsMasterPlaylist parseMasterPlaylist(LineIterator iterator, Stri int width; int height; if (resolutionString != null) { - String[] widthAndHeight = resolutionString.split("x"); + String[] widthAndHeight = Util.split(resolutionString, "x"); width = Integer.parseInt(widthAndHeight[0]); height = Integer.parseInt(widthAndHeight[1]); if (width <= 0 || height <= 0) { @@ -336,40 +402,48 @@ private static HlsMasterPlaylist parseMasterPlaylist(LineIterator iterator, Stri parseOptionalStringAttr(line, REGEX_SUBTITLES, variableDefinitions); String closedCaptionsGroupId = parseOptionalStringAttr(line, REGEX_CLOSED_CAPTIONS, variableDefinitions); - if (!iterator.hasNext()) { - throw new ParserException("#EXT-X-STREAM-INF tag must be followed by another line"); + Uri uri; + if (isIFrameOnlyVariant) { + uri = + UriUtil.resolveToUri(baseUri, parseStringAttr(line, REGEX_URI, variableDefinitions)); + } else if (!iterator.hasNext()) { + throw ParserException.createForMalformedManifest( + "#EXT-X-STREAM-INF must be followed by another line", /* cause= */ null); + } else { + // The following line contains #EXT-X-STREAM-INF's URI. + line = replaceVariableReferences(iterator.next(), variableDefinitions); + uri = UriUtil.resolveToUri(baseUri, line); } - line = - replaceVariableReferences( - iterator.next(), variableDefinitions); // #EXT-X-STREAM-INF's URI. - Uri uri = UriUtil.resolveToUri(baseUri, line); + Format format = - Format.createVideoContainerFormat( - /* id= */ Integer.toString(variants.size()), - /* label= */ null, - /* containerMimeType= */ MimeTypes.APPLICATION_M3U8, - /* sampleMimeType= */ null, - codecs, - /* metadata= */ null, - bitrate, - width, - height, - frameRate, - /* initializationData= */ null, - /* selectionFlags= */ 0, - /* roleFlags= */ 0); + new Format.Builder() + .setId(variants.size()) + .setContainerMimeType(MimeTypes.APPLICATION_M3U8) + .setCodecs(codecs) + .setAverageBitrate(averageBitrate) + .setPeakBitrate(peakBitrate) + .setWidth(width) + .setHeight(height) + .setFrameRate(frameRate) + .setRoleFlags(roleFlags) + .build(); Variant variant = new Variant( uri, format, videoGroupId, audioGroupId, subtitlesGroupId, closedCaptionsGroupId); variants.add(variant); - ArrayList variantInfosForUrl = urlToVariantInfos.get(uri); + @Nullable ArrayList variantInfosForUrl = urlToVariantInfos.get(uri); if (variantInfosForUrl == null) { variantInfosForUrl = new ArrayList<>(); urlToVariantInfos.put(uri, variantInfosForUrl); } variantInfosForUrl.add( new VariantInfo( - bitrate, videoGroupId, audioGroupId, subtitlesGroupId, closedCaptionsGroupId)); + averageBitrate, + peakBitrate, + videoGroupId, + audioGroupId, + subtitlesGroupId, + closedCaptionsGroupId)); } } @@ -384,10 +458,10 @@ private static HlsMasterPlaylist parseMasterPlaylist(LineIterator iterator, Stri new HlsTrackMetadataEntry( /* groupId= */ null, /* name= */ null, - Assertions.checkNotNull(urlToVariantInfos.get(variant.url))); - deduplicatedVariants.add( - variant.copyWithFormat( - variant.format.copyWithMetadata(new Metadata(hlsMetadataEntry)))); + checkNotNull(urlToVariantInfos.get(variant.url))); + Metadata metadata = new Metadata(hlsMetadataEntry); + Format format = variant.format.buildUpon().setMetadata(metadata).build(); + deduplicatedVariants.add(variant.copyWithFormat(format)); } } @@ -395,141 +469,105 @@ private static HlsMasterPlaylist parseMasterPlaylist(LineIterator iterator, Stri line = mediaTags.get(i); String groupId = parseStringAttr(line, REGEX_GROUP_ID, variableDefinitions); String name = parseStringAttr(line, REGEX_NAME, variableDefinitions); - String referenceUri = parseOptionalStringAttr(line, REGEX_URI, variableDefinitions); - Uri uri = referenceUri == null ? null : UriUtil.resolveToUri(baseUri, referenceUri); - String language = parseOptionalStringAttr(line, REGEX_LANGUAGE, variableDefinitions); - @C.SelectionFlags int selectionFlags = parseSelectionFlags(line); - @C.RoleFlags int roleFlags = parseRoleFlags(line, variableDefinitions); - String formatId = groupId + ":" + name; - Format format; + Format.Builder formatBuilder = + new Format.Builder() + .setId(groupId + ":" + name) + .setLabel(name) + .setContainerMimeType(MimeTypes.APPLICATION_M3U8) + .setSelectionFlags(parseSelectionFlags(line)) + .setRoleFlags(parseRoleFlags(line, variableDefinitions)) + .setLanguage(parseOptionalStringAttr(line, REGEX_LANGUAGE, variableDefinitions)); + + @Nullable String referenceUri = parseOptionalStringAttr(line, REGEX_URI, variableDefinitions); + @Nullable Uri uri = referenceUri == null ? null : UriUtil.resolveToUri(baseUri, referenceUri); Metadata metadata = new Metadata(new HlsTrackMetadataEntry(groupId, name, Collections.emptyList())); switch (parseStringAttr(line, REGEX_TYPE, variableDefinitions)) { case TYPE_VIDEO: - Variant variant = getVariantWithVideoGroup(variants, groupId); - String codecs = null; - int width = Format.NO_VALUE; - int height = Format.NO_VALUE; - float frameRate = Format.NO_VALUE; + @Nullable Variant variant = getVariantWithVideoGroup(variants, groupId); if (variant != null) { Format variantFormat = variant.format; - codecs = Util.getCodecsOfType(variantFormat.codecs, C.TRACK_TYPE_VIDEO); - width = variantFormat.width; - height = variantFormat.height; - frameRate = variantFormat.frameRate; + @Nullable + String codecs = Util.getCodecsOfType(variantFormat.codecs, C.TRACK_TYPE_VIDEO); + formatBuilder + .setCodecs(codecs) + .setSampleMimeType(MimeTypes.getMediaMimeType(codecs)) + .setWidth(variantFormat.width) + .setHeight(variantFormat.height) + .setFrameRate(variantFormat.frameRate); } - String sampleMimeType = codecs != null ? MimeTypes.getMediaMimeType(codecs) : null; - format = - Format.createVideoContainerFormat( - /* id= */ formatId, - /* label= */ name, - /* containerMimeType= */ MimeTypes.APPLICATION_M3U8, - sampleMimeType, - codecs, - /* metadata= */ null, - /* bitrate= */ Format.NO_VALUE, - width, - height, - frameRate, - /* initializationData= */ null, - selectionFlags, - roleFlags) - .copyWithMetadata(metadata); if (uri == null) { // TODO: Remove this case and add a Rendition with a null uri to videos. } else { - videos.add(new Rendition(uri, format, groupId, name)); + formatBuilder.setMetadata(metadata); + videos.add(new Rendition(uri, formatBuilder.build(), groupId, name)); } break; case TYPE_AUDIO: + @Nullable String sampleMimeType = null; variant = getVariantWithAudioGroup(variants, groupId); - codecs = - variant != null - ? Util.getCodecsOfType(variant.format.codecs, C.TRACK_TYPE_AUDIO) - : null; - sampleMimeType = codecs != null ? MimeTypes.getMediaMimeType(codecs) : null; + if (variant != null) { + @Nullable + String codecs = Util.getCodecsOfType(variant.format.codecs, C.TRACK_TYPE_AUDIO); + formatBuilder.setCodecs(codecs); + sampleMimeType = MimeTypes.getMediaMimeType(codecs); + } + @Nullable String channelsString = parseOptionalStringAttr(line, REGEX_CHANNELS, variableDefinitions); - int channelCount = Format.NO_VALUE; if (channelsString != null) { - channelCount = Integer.parseInt(Util.splitAtFirst(channelsString, "/")[0]); + int channelCount = Integer.parseInt(Util.splitAtFirst(channelsString, "/")[0]); + formatBuilder.setChannelCount(channelCount); if (MimeTypes.AUDIO_E_AC3.equals(sampleMimeType) && channelsString.endsWith("/JOC")) { sampleMimeType = MimeTypes.AUDIO_E_AC3_JOC; + formatBuilder.setCodecs(MimeTypes.CODEC_E_AC3_JOC); } } - format = - Format.createAudioContainerFormat( - /* id= */ formatId, - /* label= */ name, - /* containerMimeType= */ MimeTypes.APPLICATION_M3U8, - sampleMimeType, - codecs, - /* metadata= */ null, - /* bitrate= */ Format.NO_VALUE, - channelCount, - /* sampleRate= */ Format.NO_VALUE, - /* initializationData= */ null, - selectionFlags, - roleFlags, - language); - if (uri == null) { + formatBuilder.setSampleMimeType(sampleMimeType); + if (uri != null) { + formatBuilder.setMetadata(metadata); + audios.add(new Rendition(uri, formatBuilder.build(), groupId, name)); + } else if (variant != null) { // TODO: Remove muxedAudioFormat and add a Rendition with a null uri to audios. - muxedAudioFormat = format; - } else { - audios.add(new Rendition(uri, format.copyWithMetadata(metadata), groupId, name)); + muxedAudioFormat = formatBuilder.build(); } break; case TYPE_SUBTITLES: - codecs = null; sampleMimeType = null; variant = getVariantWithSubtitleGroup(variants, groupId); if (variant != null) { - codecs = Util.getCodecsOfType(variant.format.codecs, C.TRACK_TYPE_TEXT); + @Nullable + String codecs = Util.getCodecsOfType(variant.format.codecs, C.TRACK_TYPE_TEXT); + formatBuilder.setCodecs(codecs); sampleMimeType = MimeTypes.getMediaMimeType(codecs); } if (sampleMimeType == null) { sampleMimeType = MimeTypes.TEXT_VTT; } - format = - Format.createTextContainerFormat( - /* id= */ formatId, - /* label= */ name, - /* containerMimeType= */ MimeTypes.APPLICATION_M3U8, - sampleMimeType, - codecs, - /* bitrate= */ Format.NO_VALUE, - selectionFlags, - roleFlags, - language) - .copyWithMetadata(metadata); - subtitles.add(new Rendition(uri, format, groupId, name)); + formatBuilder.setSampleMimeType(sampleMimeType).setMetadata(metadata); + if (uri != null) { + subtitles.add(new Rendition(uri, formatBuilder.build(), groupId, name)); + } else { + Log.w(LOG_TAG, "EXT-X-MEDIA tag with missing mandatory URI attribute: skipping"); + } break; case TYPE_CLOSED_CAPTIONS: String instreamId = parseStringAttr(line, REGEX_INSTREAM_ID, variableDefinitions); - String mimeType; int accessibilityChannel; if (instreamId.startsWith("CC")) { - mimeType = MimeTypes.APPLICATION_CEA608; + sampleMimeType = MimeTypes.APPLICATION_CEA608; accessibilityChannel = Integer.parseInt(instreamId.substring(2)); } else /* starts with SERVICE */ { - mimeType = MimeTypes.APPLICATION_CEA708; + sampleMimeType = MimeTypes.APPLICATION_CEA708; accessibilityChannel = Integer.parseInt(instreamId.substring(7)); } if (muxedCaptionFormats == null) { muxedCaptionFormats = new ArrayList<>(); } - muxedCaptionFormats.add( - Format.createTextContainerFormat( - /* id= */ formatId, - /* label= */ name, - /* containerMimeType= */ null, - /* sampleMimeType= */ mimeType, - /* codecs= */ null, - /* bitrate= */ Format.NO_VALUE, - selectionFlags, - roleFlags, - language, - accessibilityChannel)); + formatBuilder + .setSampleMimeType(sampleMimeType) + .setAccessibilityChannel(accessibilityChannel); + muxedCaptionFormats.add(formatBuilder.build()); // TODO: Remove muxedCaptionFormats and add a Rendition with a null uri to closedCaptions. break; default: @@ -542,7 +580,7 @@ private static HlsMasterPlaylist parseMasterPlaylist(LineIterator iterator, Stri muxedCaptionFormats = Collections.emptyList(); } - return new HlsMasterPlaylist( + return new HlsMultivariantPlaylist( baseUri, tags, deduplicatedVariants, @@ -591,17 +629,26 @@ private static Variant getVariantWithSubtitleGroup(ArrayList variants, } private static HlsMediaPlaylist parseMediaPlaylist( - HlsMasterPlaylist masterPlaylist, LineIterator iterator, String baseUri) throws IOException { + HlsMultivariantPlaylist multivariantPlaylist, + @Nullable HlsMediaPlaylist previousMediaPlaylist, + LineIterator iterator, + String baseUri) + throws IOException { @HlsMediaPlaylist.PlaylistType int playlistType = HlsMediaPlaylist.PLAYLIST_TYPE_UNKNOWN; long startOffsetUs = C.TIME_UNSET; long mediaSequence = 0; int version = 1; // Default version == 1. long targetDurationUs = C.TIME_UNSET; - boolean hasIndependentSegmentsTag = masterPlaylist.hasIndependentSegments; + long partTargetDurationUs = C.TIME_UNSET; + boolean hasIndependentSegmentsTag = multivariantPlaylist.hasIndependentSegments; boolean hasEndTag = false; - Segment initializationSegment = null; + @Nullable Segment initializationSegment = null; HashMap variableDefinitions = new HashMap<>(); + HashMap urlToInferredInitSegment = new HashMap<>(); List segments = new ArrayList<>(); + List trailingParts = new ArrayList<>(); + @Nullable Part preloadPart = null; + List renditionReports = new ArrayList<>(); List tags = new ArrayList<>(); long segmentDurationUs = 0; @@ -611,17 +658,28 @@ private static HlsMediaPlaylist parseMediaPlaylist( int relativeDiscontinuitySequence = 0; long playlistStartTimeUs = 0; long segmentStartTimeUs = 0; + boolean preciseStart = false; long segmentByteRangeOffset = 0; long segmentByteRangeLength = C.LENGTH_UNSET; + long partStartTimeUs = 0; + long partByteRangeOffset = 0; + boolean isIFrameOnly = false; long segmentMediaSequence = 0; boolean hasGapTag = false; + HlsMediaPlaylist.ServerControl serverControl = + new HlsMediaPlaylist.ServerControl( + /* skipUntilUs= */ C.TIME_UNSET, + /* canSkipDateRanges= */ false, + /* holdBackUs= */ C.TIME_UNSET, + /* partHoldBackUs= */ C.TIME_UNSET, + /* canBlockReload= */ false); - DrmInitData playlistProtectionSchemes = null; - String fullSegmentEncryptionKeyUri = null; - String fullSegmentEncryptionIV = null; + @Nullable DrmInitData playlistProtectionSchemes = null; + @Nullable String fullSegmentEncryptionKeyUri = null; + @Nullable String fullSegmentEncryptionIV = null; TreeMap currentSchemeDatas = new TreeMap<>(); - String encryptionScheme = null; - DrmInitData cachedDrmInitData = null; + @Nullable String encryptionScheme = null; + @Nullable DrmInitData cachedDrmInitData = null; String line; while (iterator.hasNext()) { @@ -639,23 +697,37 @@ private static HlsMediaPlaylist parseMediaPlaylist( } else if ("EVENT".equals(playlistTypeString)) { playlistType = HlsMediaPlaylist.PLAYLIST_TYPE_EVENT; } + } else if (line.equals(TAG_IFRAME)) { + isIFrameOnly = true; } else if (line.startsWith(TAG_START)) { startOffsetUs = (long) (parseDoubleAttr(line, REGEX_TIME_OFFSET) * C.MICROS_PER_SECOND); + preciseStart = + parseOptionalBooleanAttribute(line, REGEX_PRECISE, /* defaultValue= */ false); + } else if (line.startsWith(TAG_SERVER_CONTROL)) { + serverControl = parseServerControl(line); + } else if (line.startsWith(TAG_PART_INF)) { + double partTargetDurationSeconds = parseDoubleAttr(line, REGEX_PART_TARGET_DURATION); + partTargetDurationUs = (long) (partTargetDurationSeconds * C.MICROS_PER_SECOND); } else if (line.startsWith(TAG_INIT_SEGMENT)) { String uri = parseStringAttr(line, REGEX_URI, variableDefinitions); String byteRange = parseOptionalStringAttr(line, REGEX_ATTR_BYTERANGE, variableDefinitions); if (byteRange != null) { - String[] splitByteRange = byteRange.split("@"); + String[] splitByteRange = Util.split(byteRange, "@"); segmentByteRangeLength = Long.parseLong(splitByteRange[0]); if (splitByteRange.length > 1) { segmentByteRangeOffset = Long.parseLong(splitByteRange[1]); } } + if (segmentByteRangeLength == C.LENGTH_UNSET) { + // The segment has no byte range defined. + segmentByteRangeOffset = 0; + } if (fullSegmentEncryptionKeyUri != null && fullSegmentEncryptionIV == null) { // See RFC 8216, Section 4.3.2.5. - throw new ParserException( - "The encryption IV attribute must be present when an initialization segment is " - + "encrypted with METHOD=AES-128."); + throw ParserException.createForMalformedManifest( + "The encryption IV attribute must be present when an initialization segment is" + + " encrypted with METHOD=AES-128.", + /* cause= */ null); } initializationSegment = new Segment( @@ -664,7 +736,9 @@ private static HlsMediaPlaylist parseMediaPlaylist( segmentByteRangeLength, fullSegmentEncryptionKeyUri, fullSegmentEncryptionIV); - segmentByteRangeOffset = 0; + if (segmentByteRangeLength != C.LENGTH_UNSET) { + segmentByteRangeOffset += segmentByteRangeLength; + } segmentByteRangeLength = C.LENGTH_UNSET; } else if (line.startsWith(TAG_TARGET_DURATION)) { targetDurationUs = parseIntAttr(line, REGEX_TARGET_DURATION) * C.MICROS_PER_SECOND; @@ -676,11 +750,11 @@ private static HlsMediaPlaylist parseMediaPlaylist( } else if (line.startsWith(TAG_DEFINE)) { String importName = parseOptionalStringAttr(line, REGEX_IMPORT, variableDefinitions); if (importName != null) { - String value = masterPlaylist.variableDefinitions.get(importName); + String value = multivariantPlaylist.variableDefinitions.get(importName); if (value != null) { variableDefinitions.put(importName, value); } else { - // The master playlist does not declare the imported variable. Ignore. + // The multivariant playlist does not declare the imported variable. Ignore. } } else { variableDefinitions.put( @@ -688,9 +762,45 @@ private static HlsMediaPlaylist parseMediaPlaylist( parseStringAttr(line, REGEX_VALUE, variableDefinitions)); } } else if (line.startsWith(TAG_MEDIA_DURATION)) { - segmentDurationUs = - (long) (parseDoubleAttr(line, REGEX_MEDIA_DURATION) * C.MICROS_PER_SECOND); + segmentDurationUs = parseTimeSecondsToUs(line, REGEX_MEDIA_DURATION); segmentTitle = parseOptionalStringAttr(line, REGEX_MEDIA_TITLE, "", variableDefinitions); + } else if (line.startsWith(TAG_SKIP)) { + int skippedSegmentCount = parseIntAttr(line, REGEX_SKIPPED_SEGMENTS); + checkState(previousMediaPlaylist != null && segments.isEmpty()); + int startIndex = (int) (mediaSequence - castNonNull(previousMediaPlaylist).mediaSequence); + int endIndex = startIndex + skippedSegmentCount; + if (startIndex < 0 || endIndex > previousMediaPlaylist.segments.size()) { + // Throw to force a reload if not all segments are available in the previous playlist. + throw new DeltaUpdateException(); + } + for (int i = startIndex; i < endIndex; i++) { + Segment segment = previousMediaPlaylist.segments.get(i); + if (mediaSequence != previousMediaPlaylist.mediaSequence) { + // If the media sequences of the playlists are not the same, we need to recreate the + // object with the updated relative start time and the relative discontinuity + // sequence. With identical playlist media sequences these values do not change. + int newRelativeDiscontinuitySequence = + previousMediaPlaylist.discontinuitySequence + - playlistDiscontinuitySequence + + segment.relativeDiscontinuitySequence; + segment = segment.copyWith(segmentStartTimeUs, newRelativeDiscontinuitySequence); + } + segments.add(segment); + segmentStartTimeUs += segment.durationUs; + partStartTimeUs = segmentStartTimeUs; + if (segment.byteRangeLength != C.LENGTH_UNSET) { + segmentByteRangeOffset = segment.byteRangeOffset + segment.byteRangeLength; + } + relativeDiscontinuitySequence = segment.relativeDiscontinuitySequence; + initializationSegment = segment.initializationSegment; + cachedDrmInitData = segment.drmInitData; + fullSegmentEncryptionKeyUri = segment.fullSegmentEncryptionKeyUri; + if (segment.encryptionIV == null + || !segment.encryptionIV.equals(Long.toHexString(segmentMediaSequence))) { + fullSegmentEncryptionIV = segment.encryptionIV; + } + segmentMediaSequence++; + } } else if (line.startsWith(TAG_KEY)) { String method = parseStringAttr(line, REGEX_METHOD, variableDefinitions); String keyFormat = @@ -723,7 +833,7 @@ private static HlsMediaPlaylist parseMediaPlaylist( } } else if (line.startsWith(TAG_BYTERANGE)) { String byteRange = parseStringAttr(line, REGEX_BYTERANGE, variableDefinitions); - String[] splitByteRange = byteRange.split("@"); + String[] splitByteRange = Util.split(byteRange, "@"); segmentByteRangeLength = Long.parseLong(splitByteRange[0]); if (splitByteRange.length > 1) { segmentByteRangeOffset = Long.parseLong(splitByteRange[1]); @@ -736,7 +846,7 @@ private static HlsMediaPlaylist parseMediaPlaylist( } else if (line.startsWith(TAG_PROGRAM_DATE_TIME)) { if (playlistStartTimeUs == 0) { long programDatetimeUs = - C.msToUs(Util.parseXsDateTime(line.substring(line.indexOf(':') + 1))); + Util.msToUs(Util.parseXsDateTime(line.substring(line.indexOf(':') + 1))); playlistStartTimeUs = programDatetimeUs - segmentStartTimeUs; } } else if (line.equals(TAG_GAP)) { @@ -745,37 +855,145 @@ private static HlsMediaPlaylist parseMediaPlaylist( hasIndependentSegmentsTag = true; } else if (line.equals(TAG_ENDLIST)) { hasEndTag = true; - } else if (!line.startsWith("#")) { - String segmentEncryptionIV; - if (fullSegmentEncryptionKeyUri == null) { - segmentEncryptionIV = null; - } else if (fullSegmentEncryptionIV != null) { - segmentEncryptionIV = fullSegmentEncryptionIV; - } else { - segmentEncryptionIV = Long.toHexString(segmentMediaSequence); + } else if (line.startsWith(TAG_RENDITION_REPORT)) { + long lastMediaSequence = parseOptionalLongAttr(line, REGEX_LAST_MSN, C.INDEX_UNSET); + int lastPartIndex = parseOptionalIntAttr(line, REGEX_LAST_PART, C.INDEX_UNSET); + String uri = parseStringAttr(line, REGEX_URI, variableDefinitions); + Uri playlistUri = Uri.parse(UriUtil.resolve(baseUri, uri)); + renditionReports.add(new RenditionReport(playlistUri, lastMediaSequence, lastPartIndex)); + } else if (line.startsWith(TAG_PRELOAD_HINT)) { + if (preloadPart != null) { + continue; } - + String type = parseStringAttr(line, REGEX_PRELOAD_HINT_TYPE, variableDefinitions); + if (!TYPE_PART.equals(type)) { + continue; + } + String url = parseStringAttr(line, REGEX_URI, variableDefinitions); + long byteRangeStart = + parseOptionalLongAttr(line, REGEX_BYTERANGE_START, /* defaultValue= */ C.LENGTH_UNSET); + long byteRangeLength = + parseOptionalLongAttr(line, REGEX_BYTERANGE_LENGTH, /* defaultValue= */ C.LENGTH_UNSET); + @Nullable + String segmentEncryptionIV = + getSegmentEncryptionIV( + segmentMediaSequence, fullSegmentEncryptionKeyUri, fullSegmentEncryptionIV); + if (cachedDrmInitData == null && !currentSchemeDatas.isEmpty()) { + SchemeData[] schemeDatas = currentSchemeDatas.values().toArray(new SchemeData[0]); + cachedDrmInitData = new DrmInitData(encryptionScheme, schemeDatas); + if (playlistProtectionSchemes == null) { + playlistProtectionSchemes = getPlaylistProtectionSchemes(encryptionScheme, schemeDatas); + } + } + if (byteRangeStart == C.LENGTH_UNSET || byteRangeLength != C.LENGTH_UNSET) { + // Skip preload part if it is an unbounded range request. + preloadPart = + new Part( + url, + initializationSegment, + /* durationUs= */ 0, + relativeDiscontinuitySequence, + partStartTimeUs, + cachedDrmInitData, + fullSegmentEncryptionKeyUri, + segmentEncryptionIV, + byteRangeStart != C.LENGTH_UNSET ? byteRangeStart : 0, + byteRangeLength, + /* hasGapTag= */ false, + /* isIndependent= */ false, + /* isPreload= */ true); + } + } else if (line.startsWith(TAG_PART)) { + @Nullable + String segmentEncryptionIV = + getSegmentEncryptionIV( + segmentMediaSequence, fullSegmentEncryptionKeyUri, fullSegmentEncryptionIV); + String url = parseStringAttr(line, REGEX_URI, variableDefinitions); + long partDurationUs = + (long) (parseDoubleAttr(line, REGEX_ATTR_DURATION) * C.MICROS_PER_SECOND); + boolean isIndependent = + parseOptionalBooleanAttribute(line, REGEX_INDEPENDENT, /* defaultValue= */ false); + // The first part of a segment is always independent if the segments are independent. + isIndependent |= hasIndependentSegmentsTag && trailingParts.isEmpty(); + boolean isGap = parseOptionalBooleanAttribute(line, REGEX_GAP, /* defaultValue= */ false); + @Nullable + String byteRange = parseOptionalStringAttr(line, REGEX_ATTR_BYTERANGE, variableDefinitions); + long partByteRangeLength = C.LENGTH_UNSET; + if (byteRange != null) { + String[] splitByteRange = Util.split(byteRange, "@"); + partByteRangeLength = Long.parseLong(splitByteRange[0]); + if (splitByteRange.length > 1) { + partByteRangeOffset = Long.parseLong(splitByteRange[1]); + } + } + if (partByteRangeLength == C.LENGTH_UNSET) { + partByteRangeOffset = 0; + } + if (cachedDrmInitData == null && !currentSchemeDatas.isEmpty()) { + SchemeData[] schemeDatas = currentSchemeDatas.values().toArray(new SchemeData[0]); + cachedDrmInitData = new DrmInitData(encryptionScheme, schemeDatas); + if (playlistProtectionSchemes == null) { + playlistProtectionSchemes = getPlaylistProtectionSchemes(encryptionScheme, schemeDatas); + } + } + trailingParts.add( + new Part( + url, + initializationSegment, + partDurationUs, + relativeDiscontinuitySequence, + partStartTimeUs, + cachedDrmInitData, + fullSegmentEncryptionKeyUri, + segmentEncryptionIV, + partByteRangeOffset, + partByteRangeLength, + isGap, + isIndependent, + /* isPreload= */ false)); + partStartTimeUs += partDurationUs; + if (partByteRangeLength != C.LENGTH_UNSET) { + partByteRangeOffset += partByteRangeLength; + } + } else if (!line.startsWith("#")) { + @Nullable + String segmentEncryptionIV = + getSegmentEncryptionIV( + segmentMediaSequence, fullSegmentEncryptionKeyUri, fullSegmentEncryptionIV); segmentMediaSequence++; + String segmentUri = replaceVariableReferences(line, variableDefinitions); + @Nullable Segment inferredInitSegment = urlToInferredInitSegment.get(segmentUri); if (segmentByteRangeLength == C.LENGTH_UNSET) { + // The segment has no byte range defined. segmentByteRangeOffset = 0; + } else if (isIFrameOnly && initializationSegment == null && inferredInitSegment == null) { + // The segment is a resource byte range without an initialization segment. + // As per RFC 8216, Section 4.3.3.6, we assume the initialization section exists in the + // bytes preceding the first segment in this segment's URL. + // We assume the implicit initialization segment is unencrypted, since there's no way for + // the playlist to provide an initialization vector for it. + inferredInitSegment = + new Segment( + segmentUri, + /* byteRangeOffset= */ 0, + segmentByteRangeOffset, + /* fullSegmentEncryptionKeyUri= */ null, + /* encryptionIV= */ null); + urlToInferredInitSegment.put(segmentUri, inferredInitSegment); } if (cachedDrmInitData == null && !currentSchemeDatas.isEmpty()) { SchemeData[] schemeDatas = currentSchemeDatas.values().toArray(new SchemeData[0]); cachedDrmInitData = new DrmInitData(encryptionScheme, schemeDatas); if (playlistProtectionSchemes == null) { - SchemeData[] playlistSchemeDatas = new SchemeData[schemeDatas.length]; - for (int i = 0; i < schemeDatas.length; i++) { - playlistSchemeDatas[i] = schemeDatas[i].copyWithData(null); - } - playlistProtectionSchemes = new DrmInitData(encryptionScheme, playlistSchemeDatas); + playlistProtectionSchemes = getPlaylistProtectionSchemes(encryptionScheme, schemeDatas); } } segments.add( new Segment( - replaceVariableReferences(line, variableDefinitions), - initializationSegment, + segmentUri, + initializationSegment != null ? initializationSegment : inferredInitSegment, segmentTitle, segmentDurationUs, relativeDiscontinuitySequence, @@ -785,10 +1003,13 @@ private static HlsMediaPlaylist parseMediaPlaylist( segmentEncryptionIV, segmentByteRangeOffset, segmentByteRangeLength, - hasGapTag)); + hasGapTag, + trailingParts)); segmentStartTimeUs += segmentDurationUs; + partStartTimeUs = segmentStartTimeUs; segmentDurationUs = 0; segmentTitle = ""; + trailingParts = new ArrayList<>(); if (segmentByteRangeLength != C.LENGTH_UNSET) { segmentByteRangeOffset += segmentByteRangeLength; } @@ -796,26 +1017,75 @@ private static HlsMediaPlaylist parseMediaPlaylist( hasGapTag = false; } } + + Map renditionReportMap = new HashMap<>(); + for (int i = 0; i < renditionReports.size(); i++) { + RenditionReport renditionReport = renditionReports.get(i); + long lastMediaSequence = renditionReport.lastMediaSequence; + if (lastMediaSequence == C.INDEX_UNSET) { + lastMediaSequence = mediaSequence + segments.size() - (trailingParts.isEmpty() ? 1 : 0); + } + int lastPartIndex = renditionReport.lastPartIndex; + if (lastPartIndex == C.INDEX_UNSET && partTargetDurationUs != C.TIME_UNSET) { + List lastParts = + trailingParts.isEmpty() ? Iterables.getLast(segments).parts : trailingParts; + lastPartIndex = lastParts.size() - 1; + } + renditionReportMap.put( + renditionReport.playlistUri, + new RenditionReport(renditionReport.playlistUri, lastMediaSequence, lastPartIndex)); + } + + if (preloadPart != null) { + trailingParts.add(preloadPart); + } + return new HlsMediaPlaylist( playlistType, baseUri, tags, startOffsetUs, + preciseStart, playlistStartTimeUs, hasDiscontinuitySequence, playlistDiscontinuitySequence, mediaSequence, version, targetDurationUs, + partTargetDurationUs, hasIndependentSegmentsTag, hasEndTag, /* hasProgramDateTime= */ playlistStartTimeUs != 0, playlistProtectionSchemes, - segments); + segments, + trailingParts, + serverControl, + renditionReportMap); + } + + private static DrmInitData getPlaylistProtectionSchemes( + @Nullable String encryptionScheme, SchemeData[] schemeDatas) { + SchemeData[] playlistSchemeDatas = new SchemeData[schemeDatas.length]; + for (int i = 0; i < schemeDatas.length; i++) { + playlistSchemeDatas[i] = schemeDatas[i].copyWithData(null); + } + return new DrmInitData(encryptionScheme, playlistSchemeDatas); } - @C.SelectionFlags - private static int parseSelectionFlags(String line) { + @Nullable + private static String getSegmentEncryptionIV( + long segmentMediaSequence, + @Nullable String fullSegmentEncryptionKeyUri, + @Nullable String fullSegmentEncryptionIV) { + if (fullSegmentEncryptionKeyUri == null) { + return null; + } else if (fullSegmentEncryptionIV != null) { + return fullSegmentEncryptionIV; + } + return Long.toHexString(segmentMediaSequence); + } + + private static @C.SelectionFlags int parseSelectionFlags(String line) { int flags = 0; if (parseOptionalBooleanAttribute(line, REGEX_DEFAULT, false)) { flags |= C.SELECTION_FLAG_DEFAULT; @@ -829,8 +1099,8 @@ private static int parseSelectionFlags(String line) { return flags; } - @C.RoleFlags - private static int parseRoleFlags(String line, Map variableDefinitions) { + private static @C.RoleFlags int parseRoleFlags( + String line, Map variableDefinitions) { String concatenatedCharacteristics = parseOptionalStringAttr(line, REGEX_CHARACTERISTICS, variableDefinitions); if (TextUtils.isEmpty(concatenatedCharacteristics)) { @@ -876,6 +1146,33 @@ private static SchemeData parseDrmSchemeData( return null; } + private static HlsMediaPlaylist.ServerControl parseServerControl(String line) { + double skipUntilSeconds = + parseOptionalDoubleAttr(line, REGEX_CAN_SKIP_UNTIL, /* defaultValue= */ C.TIME_UNSET); + long skipUntilUs = + skipUntilSeconds == C.TIME_UNSET + ? C.TIME_UNSET + : (long) (skipUntilSeconds * C.MICROS_PER_SECOND); + boolean canSkipDateRanges = + parseOptionalBooleanAttribute(line, REGEX_CAN_SKIP_DATE_RANGES, /* defaultValue= */ false); + double holdBackSeconds = + parseOptionalDoubleAttr(line, REGEX_HOLD_BACK, /* defaultValue= */ C.TIME_UNSET); + long holdBackUs = + holdBackSeconds == C.TIME_UNSET + ? C.TIME_UNSET + : (long) (holdBackSeconds * C.MICROS_PER_SECOND); + double partHoldBackSeconds = parseOptionalDoubleAttr(line, REGEX_PART_HOLD_BACK, C.TIME_UNSET); + long partHoldBackUs = + partHoldBackSeconds == C.TIME_UNSET + ? C.TIME_UNSET + : (long) (partHoldBackSeconds * C.MICROS_PER_SECOND); + boolean canBlockReload = + parseOptionalBooleanAttribute(line, REGEX_CAN_BLOCK_RELOAD, /* defaultValue= */ false); + + return new HlsMediaPlaylist.ServerControl( + skipUntilUs, canSkipDateRanges, holdBackUs, partHoldBackUs, canBlockReload); + } + private static String parseEncryptionScheme(String method) { return METHOD_SAMPLE_AES_CENC.equals(method) || METHOD_SAMPLE_AES_CTR.equals(method) ? C.CENC_TYPE_cenc @@ -889,7 +1186,7 @@ private static int parseIntAttr(String line, Pattern pattern) throws ParserExcep private static int parseOptionalIntAttr(String line, Pattern pattern, int defaultValue) { Matcher matcher = pattern.matcher(line); if (matcher.find()) { - return Integer.parseInt(matcher.group(1)); + return Integer.parseInt(checkNotNull(matcher.group(1))); } return defaultValue; } @@ -898,6 +1195,20 @@ private static long parseLongAttr(String line, Pattern pattern) throws ParserExc return Long.parseLong(parseStringAttr(line, pattern, Collections.emptyMap())); } + private static long parseOptionalLongAttr(String line, Pattern pattern, long defaultValue) { + Matcher matcher = pattern.matcher(line); + if (matcher.find()) { + return Long.parseLong(checkNotNull(matcher.group(1))); + } + return defaultValue; + } + + private static long parseTimeSecondsToUs(String line, Pattern pattern) throws ParserException { + String timeValueSeconds = parseStringAttr(line, pattern, Collections.emptyMap()); + BigDecimal timeValue = new BigDecimal(timeValueSeconds); + return timeValue.multiply(new BigDecimal(C.MICROS_PER_SECOND)).longValue(); + } + private static double parseDoubleAttr(String line, Pattern pattern) throws ParserException { return Double.parseDouble(parseStringAttr(line, pattern, Collections.emptyMap())); } @@ -909,11 +1220,13 @@ private static String parseStringAttr( if (value != null) { return value; } else { - throw new ParserException("Couldn't match " + pattern.pattern() + " in " + line); + throw ParserException.createForMalformedManifest( + "Couldn't match " + pattern.pattern() + " in " + line, /* cause= */ null); } } - private static @Nullable String parseOptionalStringAttr( + @Nullable + private static String parseOptionalStringAttr( String line, Pattern pattern, Map variableDefinitions) { return parseOptionalStringAttr(line, pattern, null, variableDefinitions); } @@ -924,12 +1237,20 @@ private static String parseStringAttr( @PolyNull String defaultValue, Map variableDefinitions) { Matcher matcher = pattern.matcher(line); - String value = matcher.find() ? matcher.group(1) : defaultValue; + @PolyNull String value = matcher.find() ? checkNotNull(matcher.group(1)) : defaultValue; return variableDefinitions.isEmpty() || value == null ? value : replaceVariableReferences(value, variableDefinitions); } + private static double parseOptionalDoubleAttr(String line, Pattern pattern, double defaultValue) { + Matcher matcher = pattern.matcher(line); + if (matcher.find()) { + return Double.parseDouble(checkNotNull(matcher.group(1))); + } + return defaultValue; + } + private static String replaceVariableReferences( String string, Map variableDefinitions) { Matcher matcher = REGEX_VARIABLE_REFERENCE.matcher(string); @@ -952,7 +1273,7 @@ private static boolean parseOptionalBooleanAttribute( String line, Pattern pattern, boolean defaultValue) { Matcher matcher = pattern.matcher(line); if (matcher.find()) { - return matcher.group(1).equals(BOOLEAN_TRUE); + return BOOLEAN_TRUE.equals(matcher.group(1)); } return defaultValue; } @@ -979,7 +1300,7 @@ public boolean hasNext() throws IOException { return true; } if (!extraLines.isEmpty()) { - next = Assertions.checkNotNull(extraLines.poll()); + next = checkNotNull(extraLines.poll()); return true; } while ((next = reader.readLine()) != null) { @@ -1001,7 +1322,5 @@ public String next() throws IOException { throw new NoSuchElementException(); } } - } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/playlist/HlsPlaylistParserFactory.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/playlist/HlsPlaylistParserFactory.java index 814060bf7d..64fddad6e9 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/playlist/HlsPlaylistParserFactory.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/playlist/HlsPlaylistParserFactory.java @@ -15,6 +15,7 @@ */ package com.google.android.exoplayer2.source.hls.playlist; +import androidx.annotation.Nullable; import com.google.android.exoplayer2.upstream.ParsingLoadable; /** Factory for {@link HlsPlaylist} parsers. */ @@ -28,11 +29,16 @@ public interface HlsPlaylistParserFactory { /** * Returns a playlist parser for playlists that were referenced by the given {@link - * HlsMasterPlaylist}. Returned {@link HlsMediaPlaylist} instances may inherit attributes from - * {@code masterPlaylist}. + * HlsMultivariantPlaylist}. Returned {@link HlsMediaPlaylist} instances may inherit attributes + * from {@code multivariantPlaylist}. * - * @param masterPlaylist The master playlist that referenced any parsed media playlists. + * @param multivariantPlaylist The multivariant playlist that referenced any parsed media + * playlists. + * @param previousMediaPlaylist The previous media playlist or null if there is no previous media + * playlist. * @return A parser for HLS playlists. */ - ParsingLoadable.Parser createPlaylistParser(HlsMasterPlaylist masterPlaylist); + ParsingLoadable.Parser createPlaylistParser( + HlsMultivariantPlaylist multivariantPlaylist, + @Nullable HlsMediaPlaylist previousMediaPlaylist); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/playlist/HlsPlaylistTracker.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/playlist/HlsPlaylistTracker.java index 96c9660db0..06f369d2a2 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/playlist/HlsPlaylistTracker.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/hls/playlist/HlsPlaylistTracker.java @@ -29,10 +29,10 @@ *

        The playlist tracker is responsible for exposing the seeking window, which is defined by the * segments that one of the playlists exposes. This playlist is called primary and needs to be * periodically refreshed in the case of live streams. Note that the primary playlist is one of the - * media playlists while the master playlist is an optional kind of playlist defined by the HLS - * specification (RFC 8216). + * media playlists while the multivariant playlist is an optional kind of playlist defined by the + * HLS specification (RFC 8216). * - *

        Playlist loads might encounter errors. The tracker may choose to blacklist them to ensure a + *

        Playlist loads might encounter errors. The tracker may choose to exclude them to ensure a * primary playlist is always available. */ public interface HlsPlaylistTracker { @@ -67,20 +67,19 @@ interface PrimaryPlaylistListener { /** Called on playlist loading events. */ interface PlaylistEventListener { - /** - * Called a playlist changes. - */ + /** Called a playlist changes. */ void onPlaylistChanged(); /** * Called if an error is encountered while loading a playlist. * * @param url The loaded url that caused the error. - * @param blacklistDurationMs The duration for which the playlist should be blacklisted. Or - * {@link C#TIME_UNSET} if the playlist should not be blacklisted. - * @return True if blacklisting did not encounter errors. False otherwise. + * @param loadErrorInfo The load error info. + * @param forceRetry Whether retry should be forced without considering exclusion. + * @return True if excluding did not encounter errors. False otherwise. */ - boolean onPlaylistError(Uri url, long blacklistDurationMs); + boolean onPlaylistError( + Uri url, LoadErrorHandlingPolicy.LoadErrorInfo loadErrorInfo, boolean forceRetry); } /** Thrown when a playlist is considered to be stuck due to a server side error. */ @@ -121,13 +120,15 @@ public PlaylistResetException(Uri url) { *

        Must be called from the playback thread. A tracker may be restarted after a {@link #stop()} * call. * - * @param initialPlaylistUri Uri of the HLS stream. Can point to a media playlist or a master - * playlist. + * @param initialPlaylistUri Uri of the HLS stream. Can point to a media playlist or a + * multivariant playlist. * @param eventDispatcher A dispatcher to notify of events. - * @param listener A callback for the primary playlist change events. + * @param primaryPlaylistListener A callback for the primary playlist change events. */ void start( - Uri initialPlaylistUri, EventDispatcher eventDispatcher, PrimaryPlaylistListener listener); + Uri initialPlaylistUri, + EventDispatcher eventDispatcher, + PrimaryPlaylistListener primaryPlaylistListener); /** * Stops the playlist tracker and releases any acquired resources. @@ -151,15 +152,15 @@ void start( void removeListener(PlaylistEventListener listener); /** - * Returns the master playlist. + * Returns the multivariant playlist. * - *

        If the uri passed to {@link #start} points to a media playlist, an {@link HlsMasterPlaylist} - * with a single variant for said media playlist is returned. + *

        If the uri passed to {@link #start} points to a media playlist, an {@link + * HlsMultivariantPlaylist} with a single variant for said media playlist is returned. * - * @return The master playlist. Null if the initial playlist has yet to be loaded. + * @return The multivariant playlist. Null if the initial playlist has yet to be loaded. */ @Nullable - HlsMasterPlaylist getMasterPlaylist(); + HlsMultivariantPlaylist getMultivariantPlaylist(); /** * Returns the most recent snapshot available of the playlist referenced by the provided {@link @@ -191,8 +192,8 @@ void start( boolean isSnapshotValid(Uri url); /** - * If the tracker is having trouble refreshing the master playlist or the primary playlist, this - * method throws the underlying error. Otherwise, does nothing. + * If the tracker is having trouble refreshing the multivariant playlist or the primary playlist, + * this method throws the underlying error. Otherwise, does nothing. * * @throws IOException The underlying error. */ @@ -208,10 +209,19 @@ void start( void maybeThrowPlaylistRefreshError(Uri url) throws IOException; /** - * Requests a playlist refresh and whitelists it. + * Excludes the given media playlist for the given duration, in milliseconds. + * + * @param playlistUrl The URL of the media playlist. + * @param exclusionDurationMs The duration for which to exclude the playlist. + * @return Whether exclusion was successful. + */ + boolean excludeMediaPlaylist(Uri playlistUrl, long exclusionDurationMs); + + /** + * Requests a playlist refresh and removes it from the exclusion list. * - *

        The playlist tracker may choose the delay the playlist refresh. The request is discarded if - * a refresh was already pending. + *

        The playlist tracker may choose to delay the playlist refresh. The request is discarded if a + * refresh was already pending. * * @param url The {@link Uri} of the playlist to be refreshed. */ diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/mediaparser/InputReaderAdapterV30.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/mediaparser/InputReaderAdapterV30.java new file mode 100644 index 0000000000..3a55645e96 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/mediaparser/InputReaderAdapterV30.java @@ -0,0 +1,87 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.mediaparser; + +import android.annotation.SuppressLint; +import android.media.MediaParser; +import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.upstream.DataReader; +import com.google.android.exoplayer2.util.Util; +import java.io.IOException; + +/** {@link MediaParser.SeekableInputReader} implementation wrapping a {@link DataReader}. */ +@RequiresApi(30) +@SuppressLint("Override") // TODO: Remove once the SDK becomes stable. +public final class InputReaderAdapterV30 implements MediaParser.SeekableInputReader { + + @Nullable private DataReader dataReader; + private long resourceLength; + private long currentPosition; + private long lastSeekPosition; + + /** + * Sets the wrapped {@link DataReader}. + * + * @param dataReader The {@link DataReader} to wrap. + * @param length The length of the resource from which {@code dataReader} reads. + */ + public void setDataReader(DataReader dataReader, long length) { + this.dataReader = dataReader; + resourceLength = length; + lastSeekPosition = C.POSITION_UNSET; + } + + /** Sets the absolute position in the resource from which the wrapped {@link DataReader} reads. */ + public void setCurrentPosition(long position) { + currentPosition = position; + } + + /** + * Returns the last value passed to {@link #seekToPosition(long)} and sets the stored value to + * {@link C#POSITION_UNSET}. + */ + public long getAndResetSeekPosition() { + long lastSeekPosition = this.lastSeekPosition; + this.lastSeekPosition = C.POSITION_UNSET; + return lastSeekPosition; + } + + // SeekableInputReader implementation. + + @Override + public void seekToPosition(long position) { + lastSeekPosition = position; + } + + @Override + public int read(byte[] bytes, int offset, int readLength) throws IOException { + int bytesRead = Util.castNonNull(dataReader).read(bytes, offset, readLength); + currentPosition += bytesRead; + return bytesRead; + } + + @Override + public long getPosition() { + return currentPosition; + } + + @Override + public long getLength() { + return resourceLength; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/mediaparser/MediaParserUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/mediaparser/MediaParserUtil.java new file mode 100644 index 0000000000..30cf3460aa --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/mediaparser/MediaParserUtil.java @@ -0,0 +1,88 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.mediaparser; + +import android.media.MediaFormat; +import android.media.MediaParser; +import android.media.metrics.LogSessionId; +import androidx.annotation.DoNotInline; +import androidx.annotation.RequiresApi; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.analytics.PlayerId; + +/** + * Miscellaneous constants and utility methods related to the {@link MediaParser} integration. + * + *

        For documentation on constants, please see the {@link MediaParser} documentation. + */ +public final class MediaParserUtil { + + public static final String PARAMETER_IN_BAND_CRYPTO_INFO = + "android.media.mediaparser.inBandCryptoInfo"; + public static final String PARAMETER_INCLUDE_SUPPLEMENTAL_DATA = + "android.media.mediaparser.includeSupplementalData"; + public static final String PARAMETER_EAGERLY_EXPOSE_TRACK_TYPE = + "android.media.mediaparser.eagerlyExposeTrackType"; + public static final String PARAMETER_EXPOSE_DUMMY_SEEK_MAP = + "android.media.mediaparser.exposeDummySeekMap"; + public static final String PARAMETER_EXPOSE_CHUNK_INDEX_AS_MEDIA_FORMAT = + "android.media.mediaParser.exposeChunkIndexAsMediaFormat"; + public static final String PARAMETER_OVERRIDE_IN_BAND_CAPTION_DECLARATIONS = + "android.media.mediaParser.overrideInBandCaptionDeclarations"; + public static final String PARAMETER_EXPOSE_CAPTION_FORMATS = + "android.media.mediaParser.exposeCaptionFormats"; + public static final String PARAMETER_IGNORE_TIMESTAMP_OFFSET = + "android.media.mediaparser.ignoreTimestampOffset"; + + private MediaParserUtil() {} + + /** + * Returns a {@link MediaFormat} with equivalent {@link MediaFormat#KEY_MIME} and {@link + * MediaFormat#KEY_CAPTION_SERVICE_NUMBER} to the given {@link Format}. + */ + public static MediaFormat toCaptionsMediaFormat(Format format) { + MediaFormat mediaFormat = new MediaFormat(); + mediaFormat.setString(MediaFormat.KEY_MIME, format.sampleMimeType); + if (format.accessibilityChannel != Format.NO_VALUE) { + mediaFormat.setInteger(MediaFormat.KEY_CAPTION_SERVICE_NUMBER, format.accessibilityChannel); + } + return mediaFormat; + } + + /** + * Calls {@link MediaParser#setLogSessionId(LogSessionId)}. + * + * @param mediaParser The {@link MediaParser} to call the method on. + * @param playerId The {@link PlayerId} to obtain the {@link LogSessionId} from. + */ + @RequiresApi(31) + public static void setLogSessionIdOnMediaParser(MediaParser mediaParser, PlayerId playerId) { + Api31.setLogSessionIdOnMediaParser(mediaParser, playerId); + } + + @RequiresApi(31) + private static final class Api31 { + private Api31() {} + + @DoNotInline + public static void setLogSessionIdOnMediaParser(MediaParser mediaParser, PlayerId playerId) { + LogSessionId logSessionId = playerId.getLogSessionId(); + if (!logSessionId.equals(LogSessionId.LOG_SESSION_ID_NONE)) { + mediaParser.setLogSessionId(logSessionId); + } + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/mediaparser/OutputConsumerAdapterV30.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/mediaparser/OutputConsumerAdapterV30.java new file mode 100644 index 0000000000..73b9a61433 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/mediaparser/OutputConsumerAdapterV30.java @@ -0,0 +1,661 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.mediaparser; + +import static android.media.MediaParser.PARSER_NAME_AC3; +import static android.media.MediaParser.PARSER_NAME_AC4; +import static android.media.MediaParser.PARSER_NAME_ADTS; +import static android.media.MediaParser.PARSER_NAME_AMR; +import static android.media.MediaParser.PARSER_NAME_FLAC; +import static android.media.MediaParser.PARSER_NAME_FLV; +import static android.media.MediaParser.PARSER_NAME_FMP4; +import static android.media.MediaParser.PARSER_NAME_MATROSKA; +import static android.media.MediaParser.PARSER_NAME_MP3; +import static android.media.MediaParser.PARSER_NAME_MP4; +import static android.media.MediaParser.PARSER_NAME_OGG; +import static android.media.MediaParser.PARSER_NAME_PS; +import static android.media.MediaParser.PARSER_NAME_TS; +import static android.media.MediaParser.PARSER_NAME_WAV; + +import android.annotation.SuppressLint; +import android.media.DrmInitData.SchemeInitData; +import android.media.MediaCodec; +import android.media.MediaCodec.CryptoInfo; +import android.media.MediaFormat; +import android.media.MediaParser; +import android.media.MediaParser.TrackData; +import android.util.Pair; +import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.C.SelectionFlags; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.drm.DrmInitData; +import com.google.android.exoplayer2.drm.DrmInitData.SchemeData; +import com.google.android.exoplayer2.extractor.ChunkIndex; +import com.google.android.exoplayer2.extractor.DummyExtractorOutput; +import com.google.android.exoplayer2.extractor.ExtractorOutput; +import com.google.android.exoplayer2.extractor.SeekMap; +import com.google.android.exoplayer2.extractor.SeekPoint; +import com.google.android.exoplayer2.extractor.TrackOutput; +import com.google.android.exoplayer2.extractor.TrackOutput.CryptoData; +import com.google.android.exoplayer2.upstream.DataReader; +import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.Log; +import com.google.android.exoplayer2.util.MediaFormatUtil; +import com.google.android.exoplayer2.util.MimeTypes; +import com.google.android.exoplayer2.util.TimestampAdjuster; +import com.google.android.exoplayer2.util.Util; +import com.google.common.collect.ImmutableList; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.IntBuffer; +import java.nio.LongBuffer; +import java.util.ArrayList; +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import org.checkerframework.checker.nullness.compatqual.NullableType; + +/** + * {@link MediaParser.OutputConsumer} implementation that redirects output to an {@link + * ExtractorOutput}. + */ +@RequiresApi(30) +@SuppressLint("Override") // TODO: Remove once the SDK becomes stable. +public final class OutputConsumerAdapterV30 implements MediaParser.OutputConsumer { + + private static final String TAG = "OConsumerAdapterV30"; + + private static final Pair SEEK_POINT_PAIR_START = + Pair.create(MediaParser.SeekPoint.START, MediaParser.SeekPoint.START); + private static final String MEDIA_FORMAT_KEY_TRACK_TYPE = "track-type-string"; + private static final String MEDIA_FORMAT_KEY_CHUNK_INDEX_SIZES = "chunk-index-int-sizes"; + private static final String MEDIA_FORMAT_KEY_CHUNK_INDEX_OFFSETS = "chunk-index-long-offsets"; + private static final String MEDIA_FORMAT_KEY_CHUNK_INDEX_DURATIONS = + "chunk-index-long-us-durations"; + private static final String MEDIA_FORMAT_KEY_CHUNK_INDEX_TIMES = "chunk-index-long-us-times"; + private static final Pattern REGEX_CRYPTO_INFO_PATTERN = + Pattern.compile("pattern \\(encrypt: (\\d+), skip: (\\d+)\\)"); + + private final ArrayList<@NullableType TrackOutput> trackOutputs; + private final ArrayList<@NullableType Format> trackFormats; + private final ArrayList<@NullableType CryptoInfo> lastReceivedCryptoInfos; + private final ArrayList<@NullableType CryptoData> lastOutputCryptoDatas; + private final DataReaderAdapter scratchDataReaderAdapter; + private final boolean expectDummySeekMap; + private final @C.TrackType int primaryTrackType; + @Nullable private final Format primaryTrackManifestFormat; + + private ExtractorOutput extractorOutput; + @Nullable private MediaParser.SeekMap dummySeekMap; + @Nullable private MediaParser.SeekMap lastSeekMap; + @Nullable private String containerMimeType; + @Nullable private ChunkIndex lastChunkIndex; + @Nullable private TimestampAdjuster timestampAdjuster; + private List muxedCaptionFormats; + private int primaryTrackIndex; + private long sampleTimestampUpperLimitFilterUs; + private boolean tracksFoundCalled; + private boolean tracksEnded; + private boolean seekingDisabled; + + /** + * Equivalent to {@link #OutputConsumerAdapterV30(Format, int, boolean) + * OutputConsumerAdapterV30(primaryTrackManifestFormat= null, primaryTrackType= C.TRACK_TYPE_NONE, + * expectDummySeekMap= false)} + */ + public OutputConsumerAdapterV30() { + this( + /* primaryTrackManifestFormat= */ null, + /* primaryTrackType= */ C.TRACK_TYPE_NONE, + /* expectDummySeekMap= */ false); + } + + /** + * Creates a new instance. + * + * @param primaryTrackManifestFormat The manifest-obtained format of the primary track, or null if + * not applicable. + * @param primaryTrackType The {@link C.TrackType type} of the primary track. {@link + * C#TRACK_TYPE_NONE} if there is no primary track. + * @param expectDummySeekMap Whether the output consumer should expect an initial dummy seek map + * which should be exposed through {@link #getDummySeekMap()}. + */ + public OutputConsumerAdapterV30( + @Nullable Format primaryTrackManifestFormat, + @C.TrackType int primaryTrackType, + boolean expectDummySeekMap) { + this.expectDummySeekMap = expectDummySeekMap; + this.primaryTrackManifestFormat = primaryTrackManifestFormat; + this.primaryTrackType = primaryTrackType; + trackOutputs = new ArrayList<>(); + trackFormats = new ArrayList<>(); + lastReceivedCryptoInfos = new ArrayList<>(); + lastOutputCryptoDatas = new ArrayList<>(); + scratchDataReaderAdapter = new DataReaderAdapter(); + extractorOutput = new DummyExtractorOutput(); + sampleTimestampUpperLimitFilterUs = C.TIME_UNSET; + muxedCaptionFormats = ImmutableList.of(); + } + + /** + * Sets an upper limit for sample timestamp filtering. + * + *

        When set, samples with timestamps greater than {@code sampleTimestampUpperLimitFilterUs} + * will be discarded. + * + * @param sampleTimestampUpperLimitFilterUs The maximum allowed sample timestamp, or {@link + * C#TIME_UNSET} to remove filtering. + */ + public void setSampleTimestampUpperLimitFilterUs(long sampleTimestampUpperLimitFilterUs) { + this.sampleTimestampUpperLimitFilterUs = sampleTimestampUpperLimitFilterUs; + } + + /** Sets a {@link TimestampAdjuster} for adjusting the timestamps of the output samples. */ + public void setTimestampAdjuster(TimestampAdjuster timestampAdjuster) { + this.timestampAdjuster = timestampAdjuster; + } + + /** + * Sets the {@link ExtractorOutput} to which {@link MediaParser MediaParser's} output is directed. + */ + public void setExtractorOutput(ExtractorOutput extractorOutput) { + this.extractorOutput = extractorOutput; + } + + /** Sets {@link Format} information associated to the caption tracks multiplexed in the media. */ + public void setMuxedCaptionFormats(List muxedCaptionFormats) { + this.muxedCaptionFormats = muxedCaptionFormats; + } + + /** Overrides future received {@link SeekMap SeekMaps} with non-seekable instances. */ + public void disableSeeking() { + seekingDisabled = true; + } + + /** + * Returns a dummy {@link MediaParser.SeekMap}, or null if not available. + * + *

        the dummy {@link MediaParser.SeekMap} returns a single {@link MediaParser.SeekPoint} whose + * {@link MediaParser.SeekPoint#timeMicros} matches the requested timestamp, and {@link + * MediaParser.SeekPoint#position} is 0. + */ + @Nullable + public MediaParser.SeekMap getDummySeekMap() { + return dummySeekMap; + } + + /** Returns the most recently output {@link ChunkIndex}, or null if none has been output. */ + @Nullable + public ChunkIndex getChunkIndex() { + return lastChunkIndex; + } + + /** + * Returns the {@link MediaParser.SeekPoint} instances corresponding to the given timestamp. + * + * @param seekTimeUs The timestamp in microseconds to retrieve {@link MediaParser.SeekPoint} + * instances for. + * @return The {@link MediaParser.SeekPoint} instances corresponding to the given timestamp. + */ + public Pair getSeekPoints(long seekTimeUs) { + return lastSeekMap != null ? lastSeekMap.getSeekPoints(seekTimeUs) : SEEK_POINT_PAIR_START; + } + + /** + * Defines the container mime type to propagate through {@link TrackOutput#format}. + * + * @param parserName The name of the selected parser. + */ + public void setSelectedParserName(String parserName) { + containerMimeType = getMimeType(parserName); + } + + /** + * Returns the last output format for each track, or null if not all the tracks have been + * identified. + */ + @Nullable + public Format[] getSampleFormats() { + if (!tracksFoundCalled) { + return null; + } + Format[] sampleFormats = new Format[trackFormats.size()]; + for (int i = 0; i < trackFormats.size(); i++) { + sampleFormats[i] = Assertions.checkNotNull(trackFormats.get(i)); + } + return sampleFormats; + } + + // MediaParser.OutputConsumer implementation. + + @Override + public void onTrackCountFound(int numberOfTracks) { + tracksFoundCalled = true; + maybeEndTracks(); + } + + @Override + public void onSeekMapFound(MediaParser.SeekMap seekMap) { + if (expectDummySeekMap && dummySeekMap == null) { + // This is a dummy seek map. + dummySeekMap = seekMap; + } else { + lastSeekMap = seekMap; + long durationUs = seekMap.getDurationMicros(); + extractorOutput.seekMap( + seekingDisabled + ? new SeekMap.Unseekable( + durationUs != MediaParser.SeekMap.UNKNOWN_DURATION ? durationUs : C.TIME_UNSET) + : new SeekMapAdapter(seekMap)); + } + } + + @Override + public void onTrackDataFound(int trackIndex, TrackData trackData) { + if (maybeObtainChunkIndex(trackData.mediaFormat)) { + // The MediaFormat contains a chunk index. It does not contain anything else. + return; + } + + ensureSpaceForTrackIndex(trackIndex); + @Nullable TrackOutput trackOutput = trackOutputs.get(trackIndex); + if (trackOutput == null) { + @Nullable + String trackTypeString = trackData.mediaFormat.getString(MEDIA_FORMAT_KEY_TRACK_TYPE); + int trackType = + toTrackTypeConstant( + trackTypeString != null + ? trackTypeString + : trackData.mediaFormat.getString(MediaFormat.KEY_MIME)); + if (trackType == primaryTrackType) { + primaryTrackIndex = trackIndex; + } + trackOutput = extractorOutput.track(trackIndex, trackType); + trackOutputs.set(trackIndex, trackOutput); + if (trackTypeString != null) { + // The MediaFormat includes the track type string, so it cannot include any other keys, as + // per the android.media.mediaparser.eagerlyExposeTrackType parameter documentation. + return; + } + } + Format format = toExoPlayerFormat(trackData); + trackOutput.format( + primaryTrackManifestFormat != null && trackIndex == primaryTrackIndex + ? format.withManifestFormatInfo(primaryTrackManifestFormat) + : format); + trackFormats.set(trackIndex, format); + maybeEndTracks(); + } + + @Override + public void onSampleDataFound(int trackIndex, MediaParser.InputReader sampleData) + throws IOException { + ensureSpaceForTrackIndex(trackIndex); + scratchDataReaderAdapter.input = sampleData; + TrackOutput trackOutput = trackOutputs.get(trackIndex); + if (trackOutput == null) { + trackOutput = extractorOutput.track(trackIndex, C.TRACK_TYPE_UNKNOWN); + trackOutputs.set(trackIndex, trackOutput); + } + trackOutput.sampleData( + scratchDataReaderAdapter, (int) sampleData.getLength(), /* allowEndOfInput= */ true); + } + + @Override + public void onSampleCompleted( + int trackIndex, + long timeUs, + int flags, + int size, + int offset, + @Nullable MediaCodec.CryptoInfo cryptoInfo) { + if (sampleTimestampUpperLimitFilterUs != C.TIME_UNSET + && timeUs >= sampleTimestampUpperLimitFilterUs) { + // Ignore this sample. + return; + } else if (timestampAdjuster != null) { + timeUs = timestampAdjuster.adjustSampleTimestamp(timeUs); + } + Assertions.checkNotNull(trackOutputs.get(trackIndex)) + .sampleMetadata(timeUs, flags, size, offset, toExoPlayerCryptoData(trackIndex, cryptoInfo)); + } + + // Private methods. + + private boolean maybeObtainChunkIndex(MediaFormat mediaFormat) { + @Nullable + ByteBuffer chunkIndexSizesByteBuffer = + mediaFormat.getByteBuffer(MEDIA_FORMAT_KEY_CHUNK_INDEX_SIZES); + if (chunkIndexSizesByteBuffer == null) { + return false; + } + IntBuffer chunkIndexSizes = chunkIndexSizesByteBuffer.asIntBuffer(); + LongBuffer chunkIndexOffsets = + Assertions.checkNotNull(mediaFormat.getByteBuffer(MEDIA_FORMAT_KEY_CHUNK_INDEX_OFFSETS)) + .asLongBuffer(); + LongBuffer chunkIndexDurationsUs = + Assertions.checkNotNull(mediaFormat.getByteBuffer(MEDIA_FORMAT_KEY_CHUNK_INDEX_DURATIONS)) + .asLongBuffer(); + LongBuffer chunkIndexTimesUs = + Assertions.checkNotNull(mediaFormat.getByteBuffer(MEDIA_FORMAT_KEY_CHUNK_INDEX_TIMES)) + .asLongBuffer(); + int[] sizes = new int[chunkIndexSizes.remaining()]; + long[] offsets = new long[chunkIndexOffsets.remaining()]; + long[] durationsUs = new long[chunkIndexDurationsUs.remaining()]; + long[] timesUs = new long[chunkIndexTimesUs.remaining()]; + chunkIndexSizes.get(sizes); + chunkIndexOffsets.get(offsets); + chunkIndexDurationsUs.get(durationsUs); + chunkIndexTimesUs.get(timesUs); + lastChunkIndex = new ChunkIndex(sizes, offsets, durationsUs, timesUs); + extractorOutput.seekMap(lastChunkIndex); + return true; + } + + private void ensureSpaceForTrackIndex(int trackIndex) { + for (int i = trackOutputs.size(); i <= trackIndex; i++) { + trackOutputs.add(null); + trackFormats.add(null); + lastReceivedCryptoInfos.add(null); + lastOutputCryptoDatas.add(null); + } + } + + @Nullable + private CryptoData toExoPlayerCryptoData(int trackIndex, @Nullable CryptoInfo cryptoInfo) { + if (cryptoInfo == null) { + return null; + } + + @Nullable CryptoInfo lastReceivedCryptoInfo = lastReceivedCryptoInfos.get(trackIndex); + CryptoData cryptoDataToOutput; + // MediaParser keeps identity and value equality aligned for efficient comparison. + if (lastReceivedCryptoInfo == cryptoInfo) { + // They match, we can reuse the last one we created. + cryptoDataToOutput = Assertions.checkNotNull(lastOutputCryptoDatas.get(trackIndex)); + } else { + // They don't match, we create a new CryptoData. + + // TODO: Access pattern encryption info directly once the Android SDK makes it visible. + // See [Internal ref: b/154248283]. + int encryptedBlocks; + int clearBlocks; + try { + Matcher matcher = REGEX_CRYPTO_INFO_PATTERN.matcher(cryptoInfo.toString()); + matcher.find(); + encryptedBlocks = Integer.parseInt(Util.castNonNull(matcher.group(1))); + clearBlocks = Integer.parseInt(Util.castNonNull(matcher.group(2))); + } catch (RuntimeException e) { + // Should never happen. + Log.e(TAG, "Unexpected error while parsing CryptoInfo: " + cryptoInfo, e); + // Assume no-pattern encryption. + encryptedBlocks = 0; + clearBlocks = 0; + } + cryptoDataToOutput = + new CryptoData(cryptoInfo.mode, cryptoInfo.key, encryptedBlocks, clearBlocks); + lastReceivedCryptoInfos.set(trackIndex, cryptoInfo); + lastOutputCryptoDatas.set(trackIndex, cryptoDataToOutput); + } + return cryptoDataToOutput; + } + + private void maybeEndTracks() { + if (!tracksFoundCalled || tracksEnded) { + return; + } + int size = trackOutputs.size(); + for (int i = 0; i < size; i++) { + if (trackOutputs.get(i) == null) { + return; + } + } + extractorOutput.endTracks(); + tracksEnded = true; + } + + private static @C.TrackType int toTrackTypeConstant(@Nullable String string) { + if (string == null) { + return C.TRACK_TYPE_UNKNOWN; + } + switch (string) { + case "audio": + return C.TRACK_TYPE_AUDIO; + case "video": + return C.TRACK_TYPE_VIDEO; + case "text": + return C.TRACK_TYPE_TEXT; + case "metadata": + return C.TRACK_TYPE_METADATA; + case "unknown": + return C.TRACK_TYPE_UNKNOWN; + default: + // Must be a MIME type. + return MimeTypes.getTrackType(string); + } + } + + private Format toExoPlayerFormat(TrackData trackData) { + // TODO: Consider adding support for the following: + // format.id + // format.stereoMode + // format.projectionData + MediaFormat mediaFormat = trackData.mediaFormat; + @Nullable String mediaFormatMimeType = mediaFormat.getString(MediaFormat.KEY_MIME); + int mediaFormatAccessibilityChannel = + mediaFormat.getInteger( + MediaFormat.KEY_CAPTION_SERVICE_NUMBER, /* defaultValue= */ Format.NO_VALUE); + Format.Builder formatBuilder = + new Format.Builder() + .setDrmInitData( + toExoPlayerDrmInitData( + mediaFormat.getString("crypto-mode-fourcc"), trackData.drmInitData)) + .setContainerMimeType(containerMimeType) + .setPeakBitrate( + mediaFormat.getInteger( + MediaFormat.KEY_BIT_RATE, /* defaultValue= */ Format.NO_VALUE)) + .setChannelCount( + mediaFormat.getInteger( + MediaFormat.KEY_CHANNEL_COUNT, /* defaultValue= */ Format.NO_VALUE)) + .setColorInfo(MediaFormatUtil.getColorInfo(mediaFormat)) + .setSampleMimeType(mediaFormatMimeType) + .setCodecs(mediaFormat.getString(MediaFormat.KEY_CODECS_STRING)) + .setFrameRate( + mediaFormat.getFloat( + MediaFormat.KEY_FRAME_RATE, /* defaultValue= */ Format.NO_VALUE)) + .setWidth( + mediaFormat.getInteger(MediaFormat.KEY_WIDTH, /* defaultValue= */ Format.NO_VALUE)) + .setHeight( + mediaFormat.getInteger(MediaFormat.KEY_HEIGHT, /* defaultValue= */ Format.NO_VALUE)) + .setInitializationData(getInitializationData(mediaFormat)) + .setLanguage(mediaFormat.getString(MediaFormat.KEY_LANGUAGE)) + .setMaxInputSize( + mediaFormat.getInteger( + MediaFormat.KEY_MAX_INPUT_SIZE, /* defaultValue= */ Format.NO_VALUE)) + .setPcmEncoding( + mediaFormat.getInteger("exo-pcm-encoding", /* defaultValue= */ Format.NO_VALUE)) + .setRotationDegrees( + mediaFormat.getInteger(MediaFormat.KEY_ROTATION, /* defaultValue= */ 0)) + .setSampleRate( + mediaFormat.getInteger( + MediaFormat.KEY_SAMPLE_RATE, /* defaultValue= */ Format.NO_VALUE)) + .setSelectionFlags(getSelectionFlags(mediaFormat)) + .setEncoderDelay( + mediaFormat.getInteger(MediaFormat.KEY_ENCODER_DELAY, /* defaultValue= */ 0)) + .setEncoderPadding( + mediaFormat.getInteger(MediaFormat.KEY_ENCODER_PADDING, /* defaultValue= */ 0)) + .setPixelWidthHeightRatio( + mediaFormat.getFloat("pixel-width-height-ratio-float", /* defaultValue= */ 1f)) + .setSubsampleOffsetUs( + mediaFormat.getLong( + "subsample-offset-us-long", /* defaultValue= */ Format.OFFSET_SAMPLE_RELATIVE)) + .setAccessibilityChannel(mediaFormatAccessibilityChannel); + for (int i = 0; i < muxedCaptionFormats.size(); i++) { + Format muxedCaptionFormat = muxedCaptionFormats.get(i); + if (Util.areEqual(muxedCaptionFormat.sampleMimeType, mediaFormatMimeType) + && muxedCaptionFormat.accessibilityChannel == mediaFormatAccessibilityChannel) { + // The track's format matches this muxedCaptionFormat, so we apply the manifest format + // information to the track. + formatBuilder + .setLanguage(muxedCaptionFormat.language) + .setRoleFlags(muxedCaptionFormat.roleFlags) + .setSelectionFlags(muxedCaptionFormat.selectionFlags) + .setLabel(muxedCaptionFormat.label) + .setMetadata(muxedCaptionFormat.metadata); + break; + } + } + return formatBuilder.build(); + } + + @Nullable + private static DrmInitData toExoPlayerDrmInitData( + @Nullable String schemeType, @Nullable android.media.DrmInitData drmInitData) { + if (drmInitData == null) { + return null; + } + SchemeData[] schemeDatas = new SchemeData[drmInitData.getSchemeInitDataCount()]; + for (int i = 0; i < schemeDatas.length; i++) { + SchemeInitData schemeInitData = drmInitData.getSchemeInitDataAt(i); + schemeDatas[i] = + new SchemeData(schemeInitData.uuid, schemeInitData.mimeType, schemeInitData.data); + } + return new DrmInitData(schemeType, schemeDatas); + } + + private static @SelectionFlags int getSelectionFlags(MediaFormat mediaFormat) { + int selectionFlags = 0; + selectionFlags |= + getFlag( + mediaFormat, + /* key= */ MediaFormat.KEY_IS_AUTOSELECT, + /* returnValueIfPresent= */ C.SELECTION_FLAG_AUTOSELECT); + selectionFlags |= + getFlag( + mediaFormat, + /* key= */ MediaFormat.KEY_IS_DEFAULT, + /* returnValueIfPresent= */ C.SELECTION_FLAG_DEFAULT); + selectionFlags |= + getFlag( + mediaFormat, + /* key= */ MediaFormat.KEY_IS_FORCED_SUBTITLE, + /* returnValueIfPresent= */ C.SELECTION_FLAG_FORCED); + return selectionFlags; + } + + private static int getFlag(MediaFormat mediaFormat, String key, int returnValueIfPresent) { + return mediaFormat.getInteger(key, /* defaultValue= */ 0) != 0 ? returnValueIfPresent : 0; + } + + private static List getInitializationData(MediaFormat mediaFormat) { + ArrayList initData = new ArrayList<>(); + int i = 0; + while (true) { + @Nullable ByteBuffer byteBuffer = mediaFormat.getByteBuffer("csd-" + i++); + if (byteBuffer == null) { + break; + } + initData.add(MediaFormatUtil.getArray(byteBuffer)); + } + return initData; + } + + private static String getMimeType(String parserName) { + switch (parserName) { + case PARSER_NAME_MATROSKA: + return MimeTypes.VIDEO_WEBM; + case PARSER_NAME_FMP4: + case PARSER_NAME_MP4: + return MimeTypes.VIDEO_MP4; + case PARSER_NAME_MP3: + return MimeTypes.AUDIO_MPEG; + case PARSER_NAME_ADTS: + return MimeTypes.AUDIO_AAC; + case PARSER_NAME_AC3: + return MimeTypes.AUDIO_AC3; + case PARSER_NAME_TS: + return MimeTypes.VIDEO_MP2T; + case PARSER_NAME_FLV: + return MimeTypes.VIDEO_FLV; + case PARSER_NAME_OGG: + return MimeTypes.AUDIO_OGG; + case PARSER_NAME_PS: + return MimeTypes.VIDEO_PS; + case PARSER_NAME_WAV: + return MimeTypes.AUDIO_RAW; + case PARSER_NAME_AMR: + return MimeTypes.AUDIO_AMR; + case PARSER_NAME_AC4: + return MimeTypes.AUDIO_AC4; + case PARSER_NAME_FLAC: + return MimeTypes.AUDIO_FLAC; + default: + throw new IllegalArgumentException("Illegal parser name: " + parserName); + } + } + + private static final class SeekMapAdapter implements SeekMap { + + private final MediaParser.SeekMap adaptedSeekMap; + + public SeekMapAdapter(MediaParser.SeekMap adaptedSeekMap) { + this.adaptedSeekMap = adaptedSeekMap; + } + + @Override + public boolean isSeekable() { + return adaptedSeekMap.isSeekable(); + } + + @Override + public long getDurationUs() { + long durationMicros = adaptedSeekMap.getDurationMicros(); + return durationMicros != MediaParser.SeekMap.UNKNOWN_DURATION ? durationMicros : C.TIME_UNSET; + } + + @Override + @SuppressWarnings("ReferenceEquality") + public SeekPoints getSeekPoints(long timeUs) { + Pair seekPoints = + adaptedSeekMap.getSeekPoints(timeUs); + SeekPoints exoPlayerSeekPoints; + if (seekPoints.first == seekPoints.second) { + exoPlayerSeekPoints = new SeekPoints(asExoPlayerSeekPoint(seekPoints.first)); + } else { + exoPlayerSeekPoints = + new SeekPoints( + asExoPlayerSeekPoint(seekPoints.first), asExoPlayerSeekPoint(seekPoints.second)); + } + return exoPlayerSeekPoints; + } + + private static SeekPoint asExoPlayerSeekPoint(MediaParser.SeekPoint seekPoint) { + return new SeekPoint(seekPoint.timeMicros, seekPoint.position); + } + } + + private static final class DataReaderAdapter implements DataReader { + + @Nullable public MediaParser.InputReader input; + + @Override + public int read(byte[] buffer, int offset, int length) throws IOException { + return Util.castNonNull(input).read(buffer, offset, length); + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/mediaparser/package-info.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/mediaparser/package-info.java new file mode 100644 index 0000000000..3eedf0c7a4 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/mediaparser/package-info.java @@ -0,0 +1,19 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +@NonNullApi +package com.google.android.exoplayer2.source.mediaparser; + +import com.google.android.exoplayer2.util.NonNullApi; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/package-info.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/package-info.java new file mode 100644 index 0000000000..adb05a46f2 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/package-info.java @@ -0,0 +1,19 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +@NonNullApi +package com.google.android.exoplayer2.source; + +import com.google.android.exoplayer2.util.NonNullApi; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/MediaDescription.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/MediaDescription.java new file mode 100644 index 0000000000..7ddc8a6b22 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/MediaDescription.java @@ -0,0 +1,383 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.android.exoplayer2.source.rtsp; + +import static com.google.android.exoplayer2.source.rtsp.RtspMessageUtil.parseInt; +import static com.google.android.exoplayer2.source.rtsp.SessionDescription.ATTR_FMTP; +import static com.google.android.exoplayer2.source.rtsp.SessionDescription.ATTR_RTPMAP; +import static com.google.android.exoplayer2.util.Assertions.checkArgument; +import static com.google.android.exoplayer2.util.Util.castNonNull; + +import androidx.annotation.Nullable; +import androidx.annotation.StringDef; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.ParserException; +import com.google.android.exoplayer2.util.Util; +import com.google.common.collect.ImmutableMap; +import com.google.errorprone.annotations.CanIgnoreReturnValue; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.util.HashMap; + +/** Represents one media description section in a SDP message. */ +/* package */ final class MediaDescription { + + /** Represents the mandatory RTPMAP attribute in MediaDescription. Reference RFC 2327 Page 22. */ + public static final class RtpMapAttribute { + + /** Parses the RTPMAP attribute value (with the part "a=rtpmap:" removed). */ + public static RtpMapAttribute parse(String rtpmapString) throws ParserException { + String[] rtpmapInfo = Util.splitAtFirst(rtpmapString, " "); + checkArgument(rtpmapInfo.length == 2); + int payloadType = parseInt(rtpmapInfo[0]); + + String[] mediaInfo = Util.split(rtpmapInfo[1].trim(), "/"); + checkArgument(mediaInfo.length >= 2); + int clockRate = parseInt(mediaInfo[1]); + int encodingParameters = C.INDEX_UNSET; + if (mediaInfo.length == 3) { + encodingParameters = parseInt(mediaInfo[2]); + } + return new RtpMapAttribute( + payloadType, /* mediaEncoding= */ mediaInfo[0], clockRate, encodingParameters); + } + + /** The assigned RTP payload type. */ + public final int payloadType; + /** The encoding method used in the RTP stream. */ + public final String mediaEncoding; + /** The clock rate used in the RTP stream. */ + public final int clockRate; + /** The optional encoding parameter. */ + public final int encodingParameters; + + private RtpMapAttribute( + int payloadType, String mediaEncoding, int clockRate, int encodingParameters) { + this.payloadType = payloadType; + this.mediaEncoding = mediaEncoding; + this.clockRate = clockRate; + this.encodingParameters = encodingParameters; + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + RtpMapAttribute that = (RtpMapAttribute) o; + return payloadType == that.payloadType + && mediaEncoding.equals(that.mediaEncoding) + && clockRate == that.clockRate + && encodingParameters == that.encodingParameters; + } + + @Override + public int hashCode() { + int result = 7; + result = 31 * result + payloadType; + result = 31 * result + mediaEncoding.hashCode(); + result = 31 * result + clockRate; + result = 31 * result + encodingParameters; + return result; + } + } + + /** Builder class for {@link MediaDescription}. */ + public static final class Builder { + + /** + * RTPMAP attribute format: {@code //}. + */ + private static final String RTP_MAP_ATTR_AUDIO_FMT = "%d %s/%d/%d"; + + private static final int RTP_STATIC_PAYLOAD_TYPE_PCMU = 0; + private static final int RTP_STATIC_PAYLOAD_TYPE_PCMA = 8; + private static final int RTP_STATIC_PAYLOAD_TYPE_L16_STEREO = 10; + private static final int RTP_STATIC_PAYLOAD_TYPE_L16_MONO = 11; + + private final String mediaType; + private final int port; + private final String transportProtocol; + private final int payloadType; + private final HashMap attributes; + + private int bitrate; + @Nullable private String mediaTitle; + @Nullable private String connection; + @Nullable private String key; + + /** + * Creates a new instance. + * + * @param mediaType The media type. + * @param port The associated port number. + * @param transportProtocol The protocol used for data transport. + * @param payloadType The RTP payload type used for data transport. + */ + public Builder(String mediaType, int port, String transportProtocol, int payloadType) { + this.mediaType = mediaType; + this.port = port; + this.transportProtocol = transportProtocol; + this.payloadType = payloadType; + attributes = new HashMap<>(); + bitrate = Format.NO_VALUE; + } + + /** + * Sets {@link MediaDescription#mediaTitle}. The default is {@code null}. + * + * @param mediaTitle The assigned media title. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setMediaTitle(String mediaTitle) { + this.mediaTitle = mediaTitle; + return this; + } + + /** + * Sets {@link MediaDescription#connection}. The default is {@code null}. + * + * @param connection The connection parameter. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setConnection(String connection) { + this.connection = connection; + return this; + } + + /** + * Sets {@link MediaDescription#bitrate}. The default is {@link Format#NO_VALUE}. + * + * @param bitrate The estimated bitrate measured in bits per second. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setBitrate(int bitrate) { + this.bitrate = bitrate; + return this; + } + + /** + * Sets {@link MediaDescription#key}. The default is {@code null}. + * + * @param key The encryption parameter. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setKey(String key) { + this.key = key; + return this; + } + + /** + * Adds an attribute entry to {@link MediaDescription#attributes}. + * + *

        Previously added attribute under the same name will be overwritten. + * + * @param attributeName The name of the attribute. + * @param attributeValue The value of the attribute, or "" if the attribute bears no value. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder addAttribute(String attributeName, String attributeValue) { + attributes.put(attributeName, attributeValue); + return this; + } + + /** + * Builds a new {@link MediaDescription} instance. + * + * @throws IllegalStateException When the rtpmap attribute (RFC 2327 Page 22) is not set, or + * cannot be parsed. + */ + public MediaDescription build() { + try { + RtpMapAttribute rtpMapAttribute = + attributes.containsKey(ATTR_RTPMAP) + ? RtpMapAttribute.parse(castNonNull(attributes.get(ATTR_RTPMAP))) + : RtpMapAttribute.parse(getRtpMapStringByPayloadType(payloadType)); + return new MediaDescription(this, ImmutableMap.copyOf(attributes), rtpMapAttribute); + } catch (ParserException e) { + throw new IllegalStateException(e); + } + } + + private static String getRtpMapStringByPayloadType(int rtpPayloadType) { + checkArgument(rtpPayloadType < 96); + + switch (rtpPayloadType) { + // See RFC3551 Section 6. + case RTP_STATIC_PAYLOAD_TYPE_PCMU: + return constructAudioRtpMap( + RTP_STATIC_PAYLOAD_TYPE_PCMU, + /* mediaEncoding= */ "PCMU", + /* clockRate= */ 8_000, + /* channelCount= */ 1); + case RTP_STATIC_PAYLOAD_TYPE_PCMA: + return constructAudioRtpMap( + RTP_STATIC_PAYLOAD_TYPE_PCMA, + /* mediaEncoding= */ "PCMA", + /* clockRate= */ 8_000, + /* channelCount= */ 1); + case RTP_STATIC_PAYLOAD_TYPE_L16_STEREO: + return constructAudioRtpMap( + RTP_STATIC_PAYLOAD_TYPE_L16_STEREO, + /* mediaEncoding= */ "L16", + /* clockRate= */ 44_100, + /* channelCount= */ 2); + case RTP_STATIC_PAYLOAD_TYPE_L16_MONO: + return constructAudioRtpMap( + RTP_STATIC_PAYLOAD_TYPE_L16_MONO, + /* mediaEncoding= */ "L16", + /* clockRate= */ 44_100, + /* channelCount= */ 1); + default: + throw new IllegalStateException("Unsupported static paylod type " + rtpPayloadType); + } + } + + private static String constructAudioRtpMap( + int payloadType, String mediaEncoding, int clockRate, int channelCount) { + return Util.formatInvariant( + RTP_MAP_ATTR_AUDIO_FMT, payloadType, mediaEncoding, clockRate, channelCount); + } + } + + /** The media types allowed in a SDP media description. */ + @Retention(RetentionPolicy.SOURCE) + @StringDef({MEDIA_TYPE_VIDEO, MEDIA_TYPE_AUDIO}) + @Documented + public @interface MediaType {} + /** Audio media type. */ + public static final String MEDIA_TYPE_AUDIO = "audio"; + /** Video media type. */ + public static final String MEDIA_TYPE_VIDEO = "video"; + /** Default RTP/AVP profile. */ + public static final String RTP_AVP_PROFILE = "RTP/AVP"; + + /** The {@link MediaType}. */ + @MediaType public final String mediaType; + /** The associated port number. */ + public final int port; + /** The protocol used for data transport. */ + public final String transportProtocol; + /** The assigned RTP payload type. */ + public final int payloadType; + /** The estimated connection bitrate in bits per second. */ + public final int bitrate; + /** The assigned media title. */ + @Nullable public final String mediaTitle; + /** The connection parameters. */ + @Nullable public final String connection; + /** The encryption parameter. */ + @Nullable public final String key; + /** The media-specific attributes. */ + public final ImmutableMap attributes; + /** The mandatory rtpmap attribute in the media description (RFC2327 Page 22). */ + public final RtpMapAttribute rtpMapAttribute; + + /** Creates a new instance. */ + private MediaDescription( + Builder builder, ImmutableMap attributes, RtpMapAttribute rtpMapAttribute) { + this.mediaType = builder.mediaType; + this.port = builder.port; + this.transportProtocol = builder.transportProtocol; + this.payloadType = builder.payloadType; + this.mediaTitle = builder.mediaTitle; + this.connection = builder.connection; + this.bitrate = builder.bitrate; + this.key = builder.key; + this.attributes = attributes; + this.rtpMapAttribute = rtpMapAttribute; + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + + if (o == null || getClass() != o.getClass()) { + return false; + } + + MediaDescription other = (MediaDescription) o; + return mediaType.equals(other.mediaType) + && port == other.port + && transportProtocol.equals(other.transportProtocol) + && payloadType == other.payloadType + && bitrate == other.bitrate + && attributes.equals(other.attributes) + && rtpMapAttribute.equals(other.rtpMapAttribute) + && Util.areEqual(mediaTitle, other.mediaTitle) + && Util.areEqual(connection, other.connection) + && Util.areEqual(key, other.key); + } + + @Override + public int hashCode() { + int result = 7; + result = 31 * result + mediaType.hashCode(); + result = 31 * result + port; + result = 31 * result + transportProtocol.hashCode(); + result = 31 * result + payloadType; + result = 31 * result + bitrate; + result = 31 * result + attributes.hashCode(); + result = 31 * result + rtpMapAttribute.hashCode(); + result = 31 * result + (mediaTitle == null ? 0 : mediaTitle.hashCode()); + result = 31 * result + (connection == null ? 0 : connection.hashCode()); + result = 31 * result + (key == null ? 0 : key.hashCode()); + return result; + } + + /** + * Returns the FMTP attribute as a map of FMTP parameter names to values; or an empty map if the + * {@link MediaDescription} does not contain any FMTP attribute. + * + *

        FMTP format reference: RFC2327 Page 27. The spaces around the FMTP attribute delimiters are + * removed. + */ + public ImmutableMap getFmtpParametersAsMap() { + @Nullable String fmtpAttributeValue = attributes.get(ATTR_FMTP); + if (fmtpAttributeValue == null) { + return ImmutableMap.of(); + } + + // fmtp format: RFC2327 Page 27. + String[] fmtpComponents = Util.splitAtFirst(fmtpAttributeValue, " "); + checkArgument(fmtpComponents.length == 2, fmtpAttributeValue); + + // Format of the parameter: RFC3640 Section 4.4.1: + // =[; =]. + // Split with an explicit limit of 0 to handle an optional trailing semicolon. + String[] parameters = fmtpComponents[1].split(";\\s?", /* limit= */ 0); + ImmutableMap.Builder formatParametersBuilder = new ImmutableMap.Builder<>(); + for (String parameter : parameters) { + // The parameter values can bear equal signs, so splitAtFirst must be used. + String[] parameterPair = Util.splitAtFirst(parameter, "="); + formatParametersBuilder.put(parameterPair[0], parameterPair[1]); + } + return formatParametersBuilder.buildOrThrow(); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtpDataChannel.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtpDataChannel.java new file mode 100644 index 0000000000..ab39263102 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtpDataChannel.java @@ -0,0 +1,60 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.rtsp; + +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.source.rtsp.RtspMessageChannel.InterleavedBinaryDataListener; +import com.google.android.exoplayer2.upstream.DataSource; +import java.io.IOException; + +/** An RTP {@link DataSource}. */ +/* package */ interface RtpDataChannel extends DataSource { + + /** Creates {@link RtpDataChannel} for RTSP streams. */ + interface Factory { + + /** + * Creates a new {@link RtpDataChannel} instance for RTP data transfer. + * + * @param trackId The track ID. + * @throws IOException If the data channels failed to open. + */ + RtpDataChannel createAndOpenDataChannel(int trackId) throws IOException; + + /** Returns a fallback {@code Factory}, {@code null} when there is no fallback available. */ + @Nullable + default Factory createFallbackDataChannelFactory() { + return null; + } + } + + /** Returns the RTSP transport header for this {@link RtpDataChannel} */ + String getTransport(); + + /** + * Returns the receiving port or channel used by the underlying transport protocol, {@link + * C#INDEX_UNSET} if the data channel is not opened. + */ + int getLocalPort(); + + /** + * Returns a {@link InterleavedBinaryDataListener} if the implementation supports receiving RTP + * packets on a side-band protocol, for example RTP-over-RTSP; otherwise {@code null}. + */ + @Nullable + InterleavedBinaryDataListener getInterleavedBinaryDataListener(); +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtpDataLoadable.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtpDataLoadable.java new file mode 100644 index 0000000000..d2f0617966 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtpDataLoadable.java @@ -0,0 +1,196 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.android.exoplayer2.source.rtsp; + +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; + +import android.os.Handler; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.extractor.DefaultExtractorInput; +import com.google.android.exoplayer2.extractor.Extractor; +import com.google.android.exoplayer2.extractor.ExtractorInput; +import com.google.android.exoplayer2.extractor.ExtractorOutput; +import com.google.android.exoplayer2.extractor.PositionHolder; +import com.google.android.exoplayer2.upstream.DataSourceUtil; +import com.google.android.exoplayer2.upstream.Loader; +import com.google.android.exoplayer2.util.Util; +import java.io.IOException; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; + +/** + * A {@link Loader.Loadable} that uses two {@link RtpDataChannel} instances to listen on incoming + * RTP and RTCP packets. + * + *

          + *
        • When using UDP as RTP transport, the local RTP UDP port number is selected by the runtime + * on opening the first {@link RtpDataChannel}; the second {@link RtpDataChannel} for RTCP + * uses the port number that is the RTP UDP port number plus one. + *
        • When using TCP as RTP transport, the first {@link RtpDataChannel} for RTP uses the {@link + * #trackId} as its interleaved channel number; the second {@link RtpDataChannel} for RTCP + * uses the interleaved channel number that is the RTP interleaved channel number plus one. + *
        + * + *

        Pass a listener via the constructor to receive a callback when the RTSP transport is ready. + * {@link #load} will throw an {@link IOException} if either of the two data channels fails to open. + * + *

        Received RTP packets' payloads will be extracted by an {@link RtpExtractor}, and will be + * written to the {@link ExtractorOutput} instance provided at construction. + */ +/* package */ final class RtpDataLoadable implements Loader.Loadable { + + /** Called on loadable events. */ + public interface EventListener { + /** + * Called when the transport information for receiving incoming RTP and RTCP packets is ready. + * + * @param transport The RTSP transport (RFC2326 Section 12.39) including the client data port + * and RTCP port. + * @param rtpDataChannel The {@link RtpDataChannel} associated with the transport. + */ + void onTransportReady(String transport, RtpDataChannel rtpDataChannel); + } + + /** The track ID associated with the Loadable. */ + public final int trackId; + /** The {@link RtspMediaTrack} to load. */ + public final RtspMediaTrack rtspMediaTrack; + + private final EventListener eventListener; + private final ExtractorOutput output; + private final Handler playbackThreadHandler; + private final RtpDataChannel.Factory rtpDataChannelFactory; + + private @MonotonicNonNull RtpExtractor extractor; + + private volatile boolean loadCancelled; + private volatile long pendingSeekPositionUs; + private volatile long nextRtpTimestamp; + + /** + * Creates an {@link RtpDataLoadable} that listens on incoming RTP traffic. + * + *

        Caller of this constructor must be on playback thread. + * + * @param trackId The track ID associated with the Loadable. + * @param rtspMediaTrack The {@link RtspMediaTrack} to load. + * @param eventListener The {@link EventListener}. + * @param output A {@link ExtractorOutput} instance to which the received and extracted data will + * @param rtpDataChannelFactory A {@link RtpDataChannel.Factory} for {@link RtpDataChannel}. + */ + public RtpDataLoadable( + int trackId, + RtspMediaTrack rtspMediaTrack, + EventListener eventListener, + ExtractorOutput output, + RtpDataChannel.Factory rtpDataChannelFactory) { + this.trackId = trackId; + this.rtspMediaTrack = rtspMediaTrack; + this.eventListener = eventListener; + this.output = output; + this.playbackThreadHandler = Util.createHandlerForCurrentLooper(); + this.rtpDataChannelFactory = rtpDataChannelFactory; + pendingSeekPositionUs = C.TIME_UNSET; + } + + /** + * Sets the timestamp of an RTP packet to arrive. + * + * @param timestamp The timestamp of the RTP packet to arrive. Supply {@link C#TIME_UNSET} if its + * unavailable. + */ + public void setTimestamp(long timestamp) { + if (timestamp != C.TIME_UNSET) { + if (!checkNotNull(extractor).hasReadFirstRtpPacket()) { + extractor.setFirstTimestamp(timestamp); + } + } + } + + /** + * Sets the timestamp of an RTP packet to arrive. + * + * @param sequenceNumber The sequence number of the RTP packet to arrive. Supply {@link + * C#INDEX_UNSET} if its unavailable. + */ + public void setSequenceNumber(int sequenceNumber) { + if (!checkNotNull(extractor).hasReadFirstRtpPacket()) { + extractor.setFirstSequenceNumber(sequenceNumber); + } + } + + @Override + public void cancelLoad() { + loadCancelled = true; + } + + @Override + public void load() throws IOException { + @Nullable RtpDataChannel dataChannel = null; + try { + dataChannel = rtpDataChannelFactory.createAndOpenDataChannel(trackId); + String transport = dataChannel.getTransport(); + + RtpDataChannel finalDataChannel = dataChannel; + playbackThreadHandler.post(() -> eventListener.onTransportReady(transport, finalDataChannel)); + + // Sets up the extractor. + ExtractorInput extractorInput = + new DefaultExtractorInput( + checkNotNull(dataChannel), /* position= */ 0, /* length= */ C.LENGTH_UNSET); + extractor = new RtpExtractor(rtspMediaTrack.payloadFormat, trackId); + extractor.init(output); + + while (!loadCancelled) { + if (pendingSeekPositionUs != C.TIME_UNSET) { + extractor.seek(nextRtpTimestamp, pendingSeekPositionUs); + pendingSeekPositionUs = C.TIME_UNSET; + } + + @Extractor.ReadResult + int readResult = extractor.read(extractorInput, /* seekPosition= */ new PositionHolder()); + if (readResult == Extractor.RESULT_END_OF_INPUT) { + // Loading is finished. + break; + } + } + } finally { + DataSourceUtil.closeQuietly(dataChannel); + } + } + + /** + * Signals when performing an RTSP seek that involves RTSP message exchange. + * + *

        {@link #seekToUs} must be called after the seek is successful. + */ + public void resetForSeek() { + checkNotNull(extractor).preSeek(); + } + + /** + * Sets the correct start position and RTP timestamp after a successful RTSP seek. + * + * @param positionUs The position in microseconds from the start, from which the server starts + * play. + * @param nextRtpTimestamp The first RTP packet's timestamp after the seek. + */ + public void seekToUs(long positionUs, long nextRtpTimestamp) { + pendingSeekPositionUs = positionUs; + this.nextRtpTimestamp = nextRtpTimestamp; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtpExtractor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtpExtractor.java new file mode 100644 index 0000000000..12e81635d1 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtpExtractor.java @@ -0,0 +1,216 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.android.exoplayer2.source.rtsp; + +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; + +import android.os.SystemClock; +import androidx.annotation.GuardedBy; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.extractor.Extractor; +import com.google.android.exoplayer2.extractor.ExtractorInput; +import com.google.android.exoplayer2.extractor.ExtractorOutput; +import com.google.android.exoplayer2.extractor.PositionHolder; +import com.google.android.exoplayer2.extractor.SeekMap; +import com.google.android.exoplayer2.source.rtsp.reader.DefaultRtpPayloadReaderFactory; +import com.google.android.exoplayer2.source.rtsp.reader.RtpPayloadReader; +import com.google.android.exoplayer2.util.ParsableByteArray; +import java.io.IOException; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; + +/** Extracts data from RTP packets. */ +/* package */ final class RtpExtractor implements Extractor { + + private final RtpPayloadReader payloadReader; + private final ParsableByteArray rtpPacketScratchBuffer; + private final ParsableByteArray rtpPacketDataBuffer; + private final int trackId; + private final Object lock; + private final RtpPacketReorderingQueue reorderingQueue; + + private @MonotonicNonNull ExtractorOutput output; + private boolean firstPacketRead; + private volatile long firstTimestamp; + private volatile int firstSequenceNumber; + + @GuardedBy("lock") + private boolean isSeekPending; + + @GuardedBy("lock") + private long nextRtpTimestamp; + + @GuardedBy("lock") + private long playbackStartTimeUs; + + public RtpExtractor(RtpPayloadFormat payloadFormat, int trackId) { + this.trackId = trackId; + + payloadReader = + checkNotNull(new DefaultRtpPayloadReaderFactory().createPayloadReader(payloadFormat)); + rtpPacketScratchBuffer = new ParsableByteArray(RtpPacket.MAX_SIZE); + rtpPacketDataBuffer = new ParsableByteArray(); + lock = new Object(); + reorderingQueue = new RtpPacketReorderingQueue(); + firstTimestamp = C.TIME_UNSET; + firstSequenceNumber = C.INDEX_UNSET; + nextRtpTimestamp = C.TIME_UNSET; + playbackStartTimeUs = C.TIME_UNSET; + } + + /** Sets the timestamp of the first RTP packet to arrive. */ + public void setFirstTimestamp(long firstTimestamp) { + this.firstTimestamp = firstTimestamp; + } + + /** Sets the sequence number of the first RTP packet to arrive. */ + public void setFirstSequenceNumber(int firstSequenceNumber) { + this.firstSequenceNumber = firstSequenceNumber; + } + + /** Returns whether the first RTP packet is processed. */ + public boolean hasReadFirstRtpPacket() { + return firstPacketRead; + } + + /** + * Signals when performing an RTSP seek that involves RTSP message exchange. + * + *

        {@link #seek} must be called after a successful RTSP seek. + * + *

        After this method in called, the incoming RTP packets are read from the {@link + * ExtractorInput}, but they are not further processed by the {@link RtpPayloadReader readers}. + * + *

        The user must clear the {@link ExtractorOutput} after calling this method, to ensure no + * samples are written to {@link ExtractorOutput}. + */ + public void preSeek() { + synchronized (lock) { + isSeekPending = true; + } + } + + @Override + public boolean sniff(ExtractorInput input) { + throw new UnsupportedOperationException( + "RTP packets are transmitted in a packet stream do not support sniffing."); + } + + @Override + public void init(ExtractorOutput output) { + payloadReader.createTracks(output, trackId); + output.endTracks(); + // RTP does not embed duration or seek info. + output.seekMap(new SeekMap.Unseekable(C.TIME_UNSET)); + this.output = output; + } + + @Override + public int read(ExtractorInput input, PositionHolder seekPosition) throws IOException { + checkNotNull(output); // Asserts init is called. + + // Reads one RTP packet at a time. + int bytesRead = input.read(rtpPacketScratchBuffer.getData(), 0, RtpPacket.MAX_SIZE); + if (bytesRead == C.RESULT_END_OF_INPUT) { + return Extractor.RESULT_END_OF_INPUT; + } else if (bytesRead == 0) { + return Extractor.RESULT_CONTINUE; + } + + rtpPacketScratchBuffer.setPosition(0); + rtpPacketScratchBuffer.setLimit(bytesRead); + @Nullable RtpPacket packet = RtpPacket.parse(rtpPacketScratchBuffer); + if (packet == null) { + return RESULT_CONTINUE; + } + + long packetArrivalTimeMs = SystemClock.elapsedRealtime(); + long packetCutoffTimeMs = getCutoffTimeMs(packetArrivalTimeMs); + reorderingQueue.offer(packet, packetArrivalTimeMs); + @Nullable RtpPacket dequeuedPacket = reorderingQueue.poll(packetCutoffTimeMs); + if (dequeuedPacket == null) { + // No packet is available for reading. + return RESULT_CONTINUE; + } + packet = dequeuedPacket; + + if (!firstPacketRead) { + // firstTimestamp and firstSequenceNumber are transmitted over RTSP. There is no guarantee + // that they arrive before the RTP packets. We use whichever comes first. + if (firstTimestamp == C.TIME_UNSET) { + firstTimestamp = packet.timestamp; + } + if (firstSequenceNumber == C.INDEX_UNSET) { + firstSequenceNumber = packet.sequenceNumber; + } + payloadReader.onReceivingFirstPacket(firstTimestamp, firstSequenceNumber); + firstPacketRead = true; + } + + synchronized (lock) { + // Ignores the incoming packets while seek is pending. + if (isSeekPending) { + if (nextRtpTimestamp != C.TIME_UNSET && playbackStartTimeUs != C.TIME_UNSET) { + reorderingQueue.reset(); + payloadReader.seek(nextRtpTimestamp, playbackStartTimeUs); + isSeekPending = false; + nextRtpTimestamp = C.TIME_UNSET; + playbackStartTimeUs = C.TIME_UNSET; + } + } else { + do { + // Deplete the reordering queue as much as possible. + rtpPacketDataBuffer.reset(packet.payloadData); + payloadReader.consume( + rtpPacketDataBuffer, packet.timestamp, packet.sequenceNumber, packet.marker); + packet = reorderingQueue.poll(packetCutoffTimeMs); + } while (packet != null); + } + } + return RESULT_CONTINUE; + } + + @Override + public void seek(long nextRtpTimestamp, long playbackStartTimeUs) { + synchronized (lock) { + if (!isSeekPending) { + // Sets the isSeekPending flag, in the case preSeek() is not called, when seeking does not + // require RTSP message exchange. For example, playing back with non-zero start position. + isSeekPending = true; + } + this.nextRtpTimestamp = nextRtpTimestamp; + this.playbackStartTimeUs = playbackStartTimeUs; + } + } + + @Override + public void release() { + // Do nothing. + } + + /** + * Returns the cutoff time of waiting for an out-of-order packet. + * + *

        Returns the cutoff time to pass to {@link RtpPacketReorderingQueue#poll(long)} based on the + * given RtpPacket arrival time. + */ + private static long getCutoffTimeMs(long packetArrivalTimeMs) { + // TODO(internal b/172331505) 30ms is roughly the time for one video frame. It is not rigorously + // chosen and will need fine tuning in the future. + return packetArrivalTimeMs - 30; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtpPacket.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtpPacket.java new file mode 100644 index 0000000000..21dd6f1a6b --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtpPacket.java @@ -0,0 +1,344 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.android.exoplayer2.source.rtsp; + +import static com.google.android.exoplayer2.util.Assertions.checkArgument; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; + +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.util.ParsableByteArray; +import com.google.android.exoplayer2.util.Util; +import com.google.common.math.IntMath; +import com.google.errorprone.annotations.CanIgnoreReturnValue; +import java.nio.ByteBuffer; + +/** + * Represents the header and the payload of an RTP packet. + * + *

        Not supported parsing at the moment: header extension and CSRC. + * + *

        Structure of an RTP header (RFC3550, Section 5.1). + * + *

        + *  0                   1                   2                   3
        + *  0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
        + * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
        + * |V=2|P|X|  CC   |M|     PT      |       sequence number         |
        + * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
        + * |                           timestamp                           |
        + * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
        + * |           synchronization source (SSRC) identifier            |
        + * +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
        + * |            contributing source (CSRC) identifiers             |
        + * |                             ....                              |
        + * +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
        + * | Profile-specific extension ID |   Extension header length     |
        + * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
        + * |                       Extension header                        |
        + * |                             ....                              |
        + * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
        + *    3                   2                   1
        + *  1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
        + * 
        + */ +public final class RtpPacket { + + /** Builder class for an {@link RtpPacket} */ + public static final class Builder { + private boolean padding; + private boolean marker; + private byte payloadType; + private int sequenceNumber; + private long timestamp; + private int ssrc; + private byte[] csrc = EMPTY; + private byte[] payloadData = EMPTY; + + /** Sets the {@link RtpPacket#padding}. The default is false. */ + @CanIgnoreReturnValue + public Builder setPadding(boolean padding) { + this.padding = padding; + return this; + } + + /** Sets {@link RtpPacket#marker}. The default is false. */ + @CanIgnoreReturnValue + public Builder setMarker(boolean marker) { + this.marker = marker; + return this; + } + + /** Sets {@link RtpPacket#payloadType}. The default is 0. */ + @CanIgnoreReturnValue + public Builder setPayloadType(byte payloadType) { + this.payloadType = payloadType; + return this; + } + + /** Sets {@link RtpPacket#sequenceNumber}. The default is 0. */ + @CanIgnoreReturnValue + public Builder setSequenceNumber(int sequenceNumber) { + checkArgument(sequenceNumber >= MIN_SEQUENCE_NUMBER && sequenceNumber <= MAX_SEQUENCE_NUMBER); + this.sequenceNumber = sequenceNumber & 0xFFFF; + return this; + } + + /** Sets {@link RtpPacket#timestamp}. The default is 0. */ + @CanIgnoreReturnValue + public Builder setTimestamp(long timestamp) { + this.timestamp = timestamp; + return this; + } + + /** Sets {@link RtpPacket#ssrc}. The default is 0. */ + @CanIgnoreReturnValue + public Builder setSsrc(int ssrc) { + this.ssrc = ssrc; + return this; + } + + /** Sets {@link RtpPacket#csrc}. The default is an empty byte array. */ + @CanIgnoreReturnValue + public Builder setCsrc(byte[] csrc) { + checkNotNull(csrc); + this.csrc = csrc; + return this; + } + + /** Sets {@link RtpPacket#payloadData}. The default is an empty byte array. */ + @CanIgnoreReturnValue + public Builder setPayloadData(byte[] payloadData) { + checkNotNull(payloadData); + this.payloadData = payloadData; + return this; + } + + /** Builds the {@link RtpPacket}. */ + public RtpPacket build() { + return new RtpPacket(this); + } + } + + public static final int RTP_VERSION = 2; + + public static final int MAX_SIZE = 65507; + public static final int MIN_HEADER_SIZE = 12; + public static final int MIN_SEQUENCE_NUMBER = 0; + public static final int MAX_SEQUENCE_NUMBER = 0xFFFF; + public static final int CSRC_SIZE = 4; + + /** Returns the next sequence number of the {@code sequenceNumber}. */ + public static int getNextSequenceNumber(int sequenceNumber) { + return IntMath.mod(sequenceNumber + 1, MAX_SEQUENCE_NUMBER + 1); + } + + /** Returns the previous sequence number from the {@code sequenceNumber}. */ + public static int getPreviousSequenceNumber(int sequenceNumber) { + return IntMath.mod(sequenceNumber - 1, MAX_SEQUENCE_NUMBER + 1); + } + + private static final byte[] EMPTY = new byte[0]; + + /** The RTP version field (Word 0, bits 0-1), should always be 2. */ + public final byte version = RTP_VERSION; + /** The RTP padding bit (Word 0, bit 2). */ + public final boolean padding; + /** The RTP extension bit (Word 0, bit 3). */ + public final boolean extension; + /** The RTP CSRC count field (Word 0, bits 4-7). */ + public final byte csrcCount; + + /** The RTP marker bit (Word 0, bit 8). */ + public final boolean marker; + /** The RTP CSRC count field (Word 0, bits 9-15). */ + public final byte payloadType; + + /** The RTP sequence number field (Word 0, bits 16-31). */ + public final int sequenceNumber; + + /** The RTP timestamp field (Word 1). */ + public final long timestamp; + + /** The RTP SSRC field (Word 2). */ + public final int ssrc; + + /** The RTP CSRC fields (Optional, up to 15 items). */ + public final byte[] csrc; + + public final byte[] payloadData; + + /** + * Creates an {@link RtpPacket} from a {@link ParsableByteArray}. + * + * @param packetBuffer The buffer that contains the RTP packet data. + * @return The built {@link RtpPacket}. + */ + @Nullable + public static RtpPacket parse(ParsableByteArray packetBuffer) { + if (packetBuffer.bytesLeft() < MIN_HEADER_SIZE) { + return null; + } + + // Word 0. + int firstByte = packetBuffer.readUnsignedByte(); + byte version = (byte) (firstByte >> 6); + boolean padding = ((firstByte >> 5) & 0x1) == 1; + byte csrcCount = (byte) (firstByte & 0xF); + + if (version != RTP_VERSION) { + return null; + } + + int secondByte = packetBuffer.readUnsignedByte(); + boolean marker = ((secondByte >> 7) & 0x1) == 1; + byte payloadType = (byte) (secondByte & 0x7F); + + int sequenceNumber = packetBuffer.readUnsignedShort(); + + // Word 1. + long timestamp = packetBuffer.readUnsignedInt(); + + // Word 2. + int ssrc = packetBuffer.readInt(); + + // CSRC. + byte[] csrc; + if (csrcCount > 0) { + csrc = new byte[csrcCount * CSRC_SIZE]; + for (int i = 0; i < csrcCount; i++) { + packetBuffer.readBytes(csrc, i * CSRC_SIZE, CSRC_SIZE); + } + } else { + csrc = EMPTY; + } + + // Everything else will be RTP payload. + byte[] payloadData = new byte[packetBuffer.bytesLeft()]; + packetBuffer.readBytes(payloadData, 0, packetBuffer.bytesLeft()); + + Builder builder = new Builder(); + return builder + .setPadding(padding) + .setMarker(marker) + .setPayloadType(payloadType) + .setSequenceNumber(sequenceNumber) + .setTimestamp(timestamp) + .setSsrc(ssrc) + .setCsrc(csrc) + .setPayloadData(payloadData) + .build(); + } + + /** + * Creates an {@link RtpPacket} from a byte array. + * + * @param buffer The buffer that contains the RTP packet data. + * @param length The length of the RTP packet. + * @return The built {@link RtpPacket}. + */ + @Nullable + public static RtpPacket parse(byte[] buffer, int length) { + return parse(new ParsableByteArray(buffer, length)); + } + + private RtpPacket(Builder builder) { + this.padding = builder.padding; + this.extension = false; + this.marker = builder.marker; + this.payloadType = builder.payloadType; + this.sequenceNumber = builder.sequenceNumber; + this.timestamp = builder.timestamp; + this.ssrc = builder.ssrc; + this.csrc = builder.csrc; + this.csrcCount = (byte) (this.csrc.length / CSRC_SIZE); + this.payloadData = builder.payloadData; + } + + /** + * Writes the data in an RTP packet to a target buffer. + * + *

        The size of the target buffer and the length argument should be big enough so that the + * entire RTP packet could fit. That is, if there is not enough space to store the entire RTP + * packet, no bytes will be written. The maximum size of an RTP packet is defined as {@link + * RtpPacket#MAX_SIZE}. + * + * @param target A target byte buffer to which the packet data is copied. + * @param offset The offset into the target array at which to write. + * @param length The maximum number of bytes that can be written. + * @return The number of bytes written, or {@link C#LENGTH_UNSET} if there is not enough space to + * write the packet. + */ + public int writeToBuffer(byte[] target, int offset, int length) { + int packetLength = MIN_HEADER_SIZE + (CSRC_SIZE * csrcCount) + payloadData.length; + if (length < packetLength || target.length - offset < packetLength) { + return C.LENGTH_UNSET; + } + + ByteBuffer buffer = ByteBuffer.wrap(target, offset, length); + byte firstByte = + (byte) + ((version << 6) + | ((padding ? 1 : 0) << 5) + | ((extension ? 1 : 0) << 4) + | (csrcCount & 0xF)); + byte secondByte = (byte) (((marker ? 1 : 0) << 7) | (payloadType & 0x7F)); + buffer + .put(firstByte) + .put(secondByte) + .putShort((short) sequenceNumber) + .putInt((int) timestamp) + .putInt(ssrc) + .put(csrc) + .put(payloadData); + return packetLength; + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + RtpPacket rtpPacket = (RtpPacket) o; + return payloadType == rtpPacket.payloadType + && sequenceNumber == rtpPacket.sequenceNumber + && marker == rtpPacket.marker + && timestamp == rtpPacket.timestamp + && ssrc == rtpPacket.ssrc; + } + + @Override + public int hashCode() { + int result = 17; + result = 31 * result + payloadType; + result = 31 * result + sequenceNumber; + result = 31 * result + (marker ? 1 : 0); + result = 31 * result + (int) (timestamp ^ (timestamp >>> 32)); + result = 31 * result + ssrc; + return result; + } + + @Override + public String toString() { + return Util.formatInvariant( + "RtpPacket(payloadType=%d, seq=%d, timestamp=%d, ssrc=%x, marker=%b)", + payloadType, sequenceNumber, timestamp, ssrc, marker); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtpPacketReorderingQueue.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtpPacketReorderingQueue.java new file mode 100644 index 0000000000..582206f6ef --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtpPacketReorderingQueue.java @@ -0,0 +1,196 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.android.exoplayer2.source.rtsp; + +import static java.lang.Math.abs; +import static java.lang.Math.max; +import static java.lang.Math.min; + +import androidx.annotation.GuardedBy; +import androidx.annotation.Nullable; +import androidx.annotation.VisibleForTesting; +import com.google.android.exoplayer2.C; +import java.util.TreeSet; + +/** + * Orders RTP packets by their sequence numbers to correct the possible alternation in packet + * ordering, introduced by UDP transport. + */ +/* package */ final class RtpPacketReorderingQueue { + /** The maximum sequence number discontinuity allowed without resetting the re-ordering buffer. */ + @VisibleForTesting /* package */ static final int MAX_SEQUENCE_LEAP_ALLOWED = 1000; + + /** Queue size threshold for resetting the queue. 5000 packets equate about 7MB in buffer size. */ + private static final int QUEUE_SIZE_THRESHOLD_FOR_RESET = 5000; + + // Use set to eliminate duplicating packets. + @GuardedBy("this") + private final TreeSet packetQueue; + + @GuardedBy("this") + private int lastReceivedSequenceNumber; + + @GuardedBy("this") + private int lastDequeuedSequenceNumber; + + @GuardedBy("this") + private boolean started; + + /** Creates an instance. */ + public RtpPacketReorderingQueue() { + packetQueue = + new TreeSet<>( + (packetContainer1, packetContainer2) -> + calculateSequenceNumberShift( + packetContainer1.packet.sequenceNumber, + packetContainer2.packet.sequenceNumber)); + + reset(); + } + + public synchronized void reset() { + packetQueue.clear(); + started = false; + lastDequeuedSequenceNumber = C.INDEX_UNSET; + lastReceivedSequenceNumber = C.INDEX_UNSET; + } + + /** + * Offer one packet to the reordering queue. + * + *

        A packet will not be added to the queue, if a logically preceding packet has already been + * dequeued. + * + *

        If a packet creates a shift in sequence number that is at least {@link + * #MAX_SEQUENCE_LEAP_ALLOWED} compared to the last offered packet, the queue is emptied and then + * the packet is added. + * + * @param packet The packet to add. + * @param receivedTimestampMs The timestamp in milliseconds, at which the packet was received. + * @return Returns {@code false} if the packet was dropped because it was outside the expected + * range of accepted packets, otherwise {@code true} (on duplicated packets, this method + * returns {@code true}). + */ + public synchronized boolean offer(RtpPacket packet, long receivedTimestampMs) { + if (packetQueue.size() >= QUEUE_SIZE_THRESHOLD_FOR_RESET) { + throw new IllegalStateException( + "Queue size limit of " + QUEUE_SIZE_THRESHOLD_FOR_RESET + " reached."); + } + + int packetSequenceNumber = packet.sequenceNumber; + if (!started) { + reset(); + lastDequeuedSequenceNumber = RtpPacket.getPreviousSequenceNumber(packetSequenceNumber); + started = true; + addToQueue(new RtpPacketContainer(packet, receivedTimestampMs)); + return true; + } + + int expectedSequenceNumber = RtpPacket.getNextSequenceNumber(lastReceivedSequenceNumber); + // A positive shift means the packet succeeds the last received packet. + int sequenceNumberShift = + calculateSequenceNumberShift(packetSequenceNumber, expectedSequenceNumber); + if (abs(sequenceNumberShift) < MAX_SEQUENCE_LEAP_ALLOWED) { + if (calculateSequenceNumberShift(packetSequenceNumber, lastDequeuedSequenceNumber) > 0) { + // Add the packet in the queue only if a succeeding packet has not been dequeued already. + addToQueue(new RtpPacketContainer(packet, receivedTimestampMs)); + return true; + } + } else { + // Discard all previous received packets and start subsequent receiving from here. + lastDequeuedSequenceNumber = RtpPacket.getPreviousSequenceNumber(packetSequenceNumber); + packetQueue.clear(); + addToQueue(new RtpPacketContainer(packet, receivedTimestampMs)); + return true; + } + return false; + } + + /** + * Polls an {@link RtpPacket} from the queue. + * + * @param cutoffTimestampMs A cutoff timestamp in milliseconds used to determine if the head of + * the queue should be dequeued, even if it's not the next packet in sequence. + * @return Returns a packet if the packet at the queue head is the next packet in sequence; or its + * {@link #offer received} timestamp is before {@code cutoffTimestampMs}. Otherwise {@code + * null}. + */ + @Nullable + public synchronized RtpPacket poll(long cutoffTimestampMs) { + if (packetQueue.isEmpty()) { + return null; + } + + RtpPacketContainer packetContainer = packetQueue.first(); + int packetSequenceNumber = packetContainer.packet.sequenceNumber; + + if (packetSequenceNumber == RtpPacket.getNextSequenceNumber(lastDequeuedSequenceNumber) + || cutoffTimestampMs >= packetContainer.receivedTimestampMs) { + packetQueue.pollFirst(); + lastDequeuedSequenceNumber = packetSequenceNumber; + return packetContainer.packet; + } + + return null; + } + + // Internals. + + private synchronized void addToQueue(RtpPacketContainer packet) { + lastReceivedSequenceNumber = packet.packet.sequenceNumber; + packetQueue.add(packet); + } + + private static final class RtpPacketContainer { + public final RtpPacket packet; + public final long receivedTimestampMs; + + /** Creates an instance. */ + public RtpPacketContainer(RtpPacket packet, long receivedTimestampMs) { + this.packet = packet; + this.receivedTimestampMs = receivedTimestampMs; + } + } + + /** + * Calculates the sequence number shift, accounting for wrapping around. + * + * @param sequenceNumber The currently received sequence number. + * @param previousSequenceNumber The previous sequence number to compare against. + * @return The shift in the sequence numbers. A positive shift indicates that {@code + * sequenceNumber} is logically after {@code previousSequenceNumber}, whereas a negative shift + * means that {@code sequenceNumber} is logically before {@code previousSequenceNumber}. + */ + private static int calculateSequenceNumberShift(int sequenceNumber, int previousSequenceNumber) { + int sequenceShift = sequenceNumber - previousSequenceNumber; + if (abs(sequenceShift) > MAX_SEQUENCE_LEAP_ALLOWED) { + int shift = + min(sequenceNumber, previousSequenceNumber) + - max(sequenceNumber, previousSequenceNumber) + + RtpPacket.MAX_SEQUENCE_NUMBER; + // Check whether this is actually an wrap-over. For example, it is a wrap around if receiving + // 65500 (prevSequenceNumber) after 1 (sequenceNumber); but it is not when prevSequenceNumber + // is 30000. + if (shift < MAX_SEQUENCE_LEAP_ALLOWED) { + return sequenceNumber < previousSequenceNumber + ? /* receiving 65000 (curr) then 1 (prev) */ shift + : /* receiving 1 (curr) then 65500 (prev) */ -shift; + } + } + return sequenceShift; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtpPayloadFormat.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtpPayloadFormat.java new file mode 100644 index 0000000000..e50556f3cc --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtpPayloadFormat.java @@ -0,0 +1,200 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.rtsp; + +import static com.google.android.exoplayer2.util.Assertions.checkArgument; + +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.util.MimeTypes; +import com.google.common.base.Ascii; +import com.google.common.collect.ImmutableMap; +import java.util.Map; + +/** + * Represents the payload format used in RTP. + * + *

        In RTSP playback, the format information is always present in the {@link SessionDescription} + * enclosed in the response of a DESCRIBE request. Within each track's {@link MediaDescription}, it + * is the attributes FMTP and RTPMAP that allows us to recreate the media format. + * + *

        This class wraps around the {@link Format} class, in addition to the instance fields that are + * specific to RTP. + */ +public final class RtpPayloadFormat { + + public static final String RTP_MEDIA_AC3 = "AC3"; + public static final String RTP_MEDIA_AMR = "AMR"; + public static final String RTP_MEDIA_AMR_WB = "AMR-WB"; + public static final String RTP_MEDIA_MPEG4_GENERIC = "MPEG4-GENERIC"; + public static final String RTP_MEDIA_MPEG4_LATM_AUDIO = "MP4A-LATM"; + public static final String RTP_MEDIA_MPEG4_VIDEO = "MP4V-ES"; + public static final String RTP_MEDIA_H263_1998 = "H263-1998"; + public static final String RTP_MEDIA_H263_2000 = "H263-2000"; + public static final String RTP_MEDIA_H264 = "H264"; + public static final String RTP_MEDIA_H265 = "H265"; + public static final String RTP_MEDIA_OPUS = "OPUS"; + public static final String RTP_MEDIA_PCM_L8 = "L8"; + public static final String RTP_MEDIA_PCM_L16 = "L16"; + public static final String RTP_MEDIA_PCMA = "PCMA"; + public static final String RTP_MEDIA_PCMU = "PCMU"; + public static final String RTP_MEDIA_VP8 = "VP8"; + public static final String RTP_MEDIA_VP9 = "VP9"; + + /** Returns whether the format of a {@link MediaDescription} is supported. */ + /* package */ static boolean isFormatSupported(MediaDescription mediaDescription) { + switch (Ascii.toUpperCase(mediaDescription.rtpMapAttribute.mediaEncoding)) { + case RTP_MEDIA_AC3: + case RTP_MEDIA_AMR: + case RTP_MEDIA_AMR_WB: + case RTP_MEDIA_H263_1998: + case RTP_MEDIA_H263_2000: + case RTP_MEDIA_H264: + case RTP_MEDIA_H265: + case RTP_MEDIA_MPEG4_GENERIC: + case RTP_MEDIA_MPEG4_LATM_AUDIO: + case RTP_MEDIA_MPEG4_VIDEO: + case RTP_MEDIA_OPUS: + case RTP_MEDIA_PCM_L8: + case RTP_MEDIA_PCM_L16: + case RTP_MEDIA_PCMA: + case RTP_MEDIA_PCMU: + case RTP_MEDIA_VP8: + case RTP_MEDIA_VP9: + return true; + default: + return false; + } + } + + /** + * Gets the MIME type that is associated with the RTP media type. + * + *

        For instance, RTP media type "H264" maps to {@link MimeTypes#VIDEO_H264}. + * + * @throws IllegalArgumentException When the media type is not supported/recognized. + */ + public static String getMimeTypeFromRtpMediaType(String mediaType) { + switch (Ascii.toUpperCase(mediaType)) { + case RTP_MEDIA_AC3: + return MimeTypes.AUDIO_AC3; + case RTP_MEDIA_AMR: + return MimeTypes.AUDIO_AMR_NB; + case RTP_MEDIA_AMR_WB: + return MimeTypes.AUDIO_AMR_WB; + case RTP_MEDIA_MPEG4_GENERIC: + case RTP_MEDIA_MPEG4_LATM_AUDIO: + return MimeTypes.AUDIO_AAC; + case RTP_MEDIA_OPUS: + return MimeTypes.AUDIO_OPUS; + case RTP_MEDIA_PCM_L8: + case RTP_MEDIA_PCM_L16: + return MimeTypes.AUDIO_RAW; + case RTP_MEDIA_PCMA: + return MimeTypes.AUDIO_ALAW; + case RTP_MEDIA_PCMU: + return MimeTypes.AUDIO_MLAW; + case RTP_MEDIA_H263_1998: + case RTP_MEDIA_H263_2000: + return MimeTypes.VIDEO_H263; + case RTP_MEDIA_H264: + return MimeTypes.VIDEO_H264; + case RTP_MEDIA_H265: + return MimeTypes.VIDEO_H265; + case RTP_MEDIA_MPEG4_VIDEO: + return MimeTypes.VIDEO_MP4V; + case RTP_MEDIA_VP8: + return MimeTypes.VIDEO_VP8; + case RTP_MEDIA_VP9: + return MimeTypes.VIDEO_VP9; + default: + throw new IllegalArgumentException(mediaType); + } + } + + /** Returns the PCM encoding type for {@code mediaEncoding}. */ + public static @C.PcmEncoding int getRawPcmEncodingType(String mediaEncoding) { + checkArgument( + mediaEncoding.equals(RTP_MEDIA_PCM_L8) || mediaEncoding.equals(RTP_MEDIA_PCM_L16)); + return mediaEncoding.equals(RtpPayloadFormat.RTP_MEDIA_PCM_L8) + ? C.ENCODING_PCM_8BIT + : C.ENCODING_PCM_16BIT_BIG_ENDIAN; + } + + /** The payload type associated with this format. */ + public final int rtpPayloadType; + /** The clock rate in Hertz, associated with the format. */ + public final int clockRate; + /** The {@link Format} of this RTP payload. */ + public final Format format; + /** The format parameters, mapped from the SDP FMTP attribute (RFC2327 Page 22). */ + public final ImmutableMap fmtpParameters; + /** The RTP media encoding. */ + public final String mediaEncoding; + + /** + * Creates a new instance. + * + * @param format The associated {@link Format media format}. + * @param rtpPayloadType The assigned RTP payload type, from the RTPMAP attribute in {@link + * MediaDescription}. + * @param clockRate The associated clock rate in hertz. + * @param fmtpParameters The format parameters, from the SDP FMTP attribute (RFC2327 Page 22), + * empty if unset. The keys and values are specified in the RFCs for specific formats. For + * instance, RFC3640 Section 4.1 defines keys like profile-level-id and config. + * @param mediaEncoding The RTP media encoding. + */ + public RtpPayloadFormat( + Format format, + int rtpPayloadType, + int clockRate, + Map fmtpParameters, + String mediaEncoding) { + this.rtpPayloadType = rtpPayloadType; + this.clockRate = clockRate; + this.format = format; + this.fmtpParameters = ImmutableMap.copyOf(fmtpParameters); + this.mediaEncoding = mediaEncoding; + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + RtpPayloadFormat that = (RtpPayloadFormat) o; + return rtpPayloadType == that.rtpPayloadType + && clockRate == that.clockRate + && format.equals(that.format) + && fmtpParameters.equals(that.fmtpParameters) + && mediaEncoding.equals(that.mediaEncoding); + } + + @Override + public int hashCode() { + int result = 7; + result = 31 * result + rtpPayloadType; + result = 31 * result + clockRate; + result = 31 * result + format.hashCode(); + result = 31 * result + fmtpParameters.hashCode(); + result = 31 * result + mediaEncoding.hashCode(); + return result; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtpUtils.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtpUtils.java new file mode 100644 index 0000000000..d37163f024 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtpUtils.java @@ -0,0 +1,34 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.rtsp; + +import android.net.Uri; +import com.google.android.exoplayer2.upstream.DataSpec; +import com.google.android.exoplayer2.util.Util; + +/** Utility methods for RTP. */ +public final class RtpUtils { + + private static final String RTP_ANY_INCOMING_IPV4 = "rtp://0.0.0.0"; + + /** Returns the {@link DataSpec} with the {@link Uri} for incoming RTP connection. */ + public static DataSpec getIncomingRtpDataSpec(int portNumber) { + return new DataSpec( + Uri.parse(Util.formatInvariant("%s:%d", RTP_ANY_INCOMING_IPV4, portNumber))); + } + + private RtpUtils() {} +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspAuthenticationInfo.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspAuthenticationInfo.java new file mode 100644 index 0000000000..97b5d18ee7 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspAuthenticationInfo.java @@ -0,0 +1,162 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.rtsp; + +import static java.lang.annotation.ElementType.TYPE_USE; + +import android.net.Uri; +import android.util.Base64; +import androidx.annotation.IntDef; +import com.google.android.exoplayer2.ParserException; +import com.google.android.exoplayer2.source.rtsp.RtspMessageUtil.RtspAuthUserInfo; +import com.google.android.exoplayer2.util.Util; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; + +/** Wraps RTSP authentication information. */ +/* package */ final class RtspAuthenticationInfo { + + /** The supported authentication methods. */ + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef({BASIC, DIGEST}) + @interface AuthenticationMechanism {} + + /** HTTP basic authentication (RFC2068 Section 11.1). */ + public static final int BASIC = 1; + /** HTTP digest authentication (RFC2069). */ + public static final int DIGEST = 2; + + /** Basic authorization header format, see RFC7617. */ + private static final String BASIC_AUTHORIZATION_HEADER_FORMAT = "Basic %s"; + + /** Digest authorization header format, see RFC7616. */ + private static final String DIGEST_AUTHORIZATION_HEADER_FORMAT = + "Digest username=\"%s\", realm=\"%s\", nonce=\"%s\", uri=\"%s\", response=\"%s\""; + + private static final String DIGEST_AUTHORIZATION_HEADER_FORMAT_WITH_OPAQUE = + "Digest username=\"%s\", realm=\"%s\", nonce=\"%s\", uri=\"%s\", response=\"%s\"," + + " opaque=\"%s\""; + + private static final String ALGORITHM = "MD5"; + + /** The authentication mechanism. */ + public final @AuthenticationMechanism int authenticationMechanism; + /** The authentication realm. */ + public final String realm; + /** The nonce used in digest authentication; empty if using {@link #BASIC} authentication. */ + public final String nonce; + /** The opaque used in digest authentication; empty if using {@link #BASIC} authentication. */ + public final String opaque; + + /** + * Creates a new instance. + * + * @param authenticationMechanism The authentication mechanism, as defined by {@link + * AuthenticationMechanism}. + * @param realm The authentication realm. + * @param nonce The nonce in digest authentication; empty if using {@link #BASIC} authentication. + * @param opaque The opaque in digest authentication; empty if using {@link #BASIC} + * authentication. + */ + public RtspAuthenticationInfo( + @AuthenticationMechanism int authenticationMechanism, + String realm, + String nonce, + String opaque) { + this.authenticationMechanism = authenticationMechanism; + this.realm = realm; + this.nonce = nonce; + this.opaque = opaque; + } + + /** + * Gets the string value for {@link RtspHeaders#AUTHORIZATION} header. + * + * @param authUserInfo The {@link RtspAuthUserInfo} for authentication. + * @param uri The request {@link Uri}. + * @param requestMethod The request method, defined in {@link RtspRequest.Method}. + * @return The string value for {@link RtspHeaders#AUTHORIZATION} header. + * @throws ParserException If the MD5 algorithm is not supported by {@link MessageDigest}. + */ + public String getAuthorizationHeaderValue( + RtspAuthUserInfo authUserInfo, Uri uri, @RtspRequest.Method int requestMethod) + throws ParserException { + switch (authenticationMechanism) { + case BASIC: + return getBasicAuthorizationHeaderValue(authUserInfo); + case DIGEST: + return getDigestAuthorizationHeaderValue(authUserInfo, uri, requestMethod); + default: + throw ParserException.createForManifestWithUnsupportedFeature( + /* message= */ null, new UnsupportedOperationException()); + } + } + + private String getBasicAuthorizationHeaderValue(RtspAuthUserInfo authUserInfo) { + return Util.formatInvariant( + BASIC_AUTHORIZATION_HEADER_FORMAT, + Base64.encodeToString( + RtspMessageUtil.getStringBytes(authUserInfo.username + ":" + authUserInfo.password), + Base64.DEFAULT)); + } + + private String getDigestAuthorizationHeaderValue( + RtspAuthUserInfo authUserInfo, Uri uri, @RtspRequest.Method int requestMethod) + throws ParserException { + try { + MessageDigest md = MessageDigest.getInstance(ALGORITHM); + String methodName = RtspMessageUtil.toMethodString(requestMethod); + // From RFC2069 Section 2.1.2: + // response-digest = H( H(A1) ":" unquoted nonce-value ":" H(A2) ) + // A1 = unquoted username-value ":" unquoted realm-value ":" password + // A2 = Method ":" request-uri + // H(x) = MD5(x) + + String hashA1 = + Util.toHexString( + md.digest( + RtspMessageUtil.getStringBytes( + authUserInfo.username + ":" + realm + ":" + authUserInfo.password))); + String hashA2 = + Util.toHexString(md.digest(RtspMessageUtil.getStringBytes(methodName + ":" + uri))); + String response = + Util.toHexString( + md.digest(RtspMessageUtil.getStringBytes(hashA1 + ":" + nonce + ":" + hashA2))); + + if (opaque.isEmpty()) { + return Util.formatInvariant( + DIGEST_AUTHORIZATION_HEADER_FORMAT, authUserInfo.username, realm, nonce, uri, response); + } else { + return Util.formatInvariant( + DIGEST_AUTHORIZATION_HEADER_FORMAT_WITH_OPAQUE, + authUserInfo.username, + realm, + nonce, + uri, + response, + opaque); + } + } catch (NoSuchAlgorithmException e) { + throw ParserException.createForManifestWithUnsupportedFeature(/* message= */ null, e); + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspClient.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspClient.java new file mode 100644 index 0000000000..5ff812bd82 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspClient.java @@ -0,0 +1,776 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.rtsp; + +import static com.google.android.exoplayer2.source.rtsp.RtspMessageChannel.DEFAULT_RTSP_PORT; +import static com.google.android.exoplayer2.source.rtsp.RtspRequest.METHOD_ANNOUNCE; +import static com.google.android.exoplayer2.source.rtsp.RtspRequest.METHOD_DESCRIBE; +import static com.google.android.exoplayer2.source.rtsp.RtspRequest.METHOD_GET_PARAMETER; +import static com.google.android.exoplayer2.source.rtsp.RtspRequest.METHOD_OPTIONS; +import static com.google.android.exoplayer2.source.rtsp.RtspRequest.METHOD_PAUSE; +import static com.google.android.exoplayer2.source.rtsp.RtspRequest.METHOD_PLAY; +import static com.google.android.exoplayer2.source.rtsp.RtspRequest.METHOD_PLAY_NOTIFY; +import static com.google.android.exoplayer2.source.rtsp.RtspRequest.METHOD_RECORD; +import static com.google.android.exoplayer2.source.rtsp.RtspRequest.METHOD_REDIRECT; +import static com.google.android.exoplayer2.source.rtsp.RtspRequest.METHOD_SETUP; +import static com.google.android.exoplayer2.source.rtsp.RtspRequest.METHOD_SET_PARAMETER; +import static com.google.android.exoplayer2.source.rtsp.RtspRequest.METHOD_TEARDOWN; +import static com.google.android.exoplayer2.source.rtsp.RtspRequest.METHOD_UNSET; +import static com.google.android.exoplayer2.util.Assertions.checkArgument; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Assertions.checkState; +import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; +import static com.google.common.base.Strings.nullToEmpty; +import static java.lang.Math.max; +import static java.lang.annotation.ElementType.TYPE_USE; + +import android.net.Uri; +import android.os.Handler; +import android.os.Looper; +import android.util.SparseArray; +import androidx.annotation.IntDef; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.ParserException; +import com.google.android.exoplayer2.source.rtsp.RtspMediaPeriod.RtpLoadInfo; +import com.google.android.exoplayer2.source.rtsp.RtspMediaSource.RtspPlaybackException; +import com.google.android.exoplayer2.source.rtsp.RtspMessageChannel.InterleavedBinaryDataListener; +import com.google.android.exoplayer2.source.rtsp.RtspMessageUtil.RtspAuthUserInfo; +import com.google.android.exoplayer2.source.rtsp.RtspMessageUtil.RtspSessionHeader; +import com.google.android.exoplayer2.util.Log; +import com.google.android.exoplayer2.util.Util; +import com.google.common.base.Joiner; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Iterables; +import com.google.common.collect.Multimap; +import java.io.Closeable; +import java.io.IOException; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import java.net.Socket; +import java.util.ArrayDeque; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import javax.net.SocketFactory; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; + +/** The RTSP client. */ +/* package */ final class RtspClient implements Closeable { + + /** + * The RTSP session state (RFC2326, Section A.1). One of {@link #RTSP_STATE_UNINITIALIZED}, {@link + * #RTSP_STATE_INIT}, {@link #RTSP_STATE_READY}, or {@link #RTSP_STATE_PLAYING}. + */ + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef({RTSP_STATE_UNINITIALIZED, RTSP_STATE_INIT, RTSP_STATE_READY, RTSP_STATE_PLAYING}) + public @interface RtspState {} + /** RTSP uninitialized state, the state before sending any SETUP request. */ + public static final int RTSP_STATE_UNINITIALIZED = -1; + /** RTSP initial state, the state after sending SETUP REQUEST. */ + public static final int RTSP_STATE_INIT = 0; + /** RTSP ready state, the state after receiving SETUP, or PAUSE response. */ + public static final int RTSP_STATE_READY = 1; + /** RTSP playing state, the state after receiving PLAY response. */ + public static final int RTSP_STATE_PLAYING = 2; + + private static final String TAG = "RtspClient"; + private static final long DEFAULT_RTSP_KEEP_ALIVE_INTERVAL_MS = 30_000; + + /** A listener for session information update. */ + public interface SessionInfoListener { + /** Called when the session information is available. */ + void onSessionTimelineUpdated(RtspSessionTiming timing, ImmutableList tracks); + /** + * Called when failed to get session information from the RTSP server, or when error happened + * during updating the session timeline. + */ + void onSessionTimelineRequestFailed(String message, @Nullable Throwable cause); + } + + /** A listener for playback events. */ + public interface PlaybackEventListener { + /** Called when setup is completed and playback can start. */ + void onRtspSetupCompleted(); + + /** + * Called when a PLAY request is acknowledged by the server and playback can start. + * + * @param startPositionUs The server-supplied start position in microseconds. + * @param trackTimingList The list of {@link RtspTrackTiming} for the playing tracks. + */ + void onPlaybackStarted(long startPositionUs, ImmutableList trackTimingList); + + /** Called when errors are encountered during playback. */ + void onPlaybackError(RtspPlaybackException error); + } + + private final SessionInfoListener sessionInfoListener; + private final PlaybackEventListener playbackEventListener; + private final String userAgent; + private final SocketFactory socketFactory; + private final boolean debugLoggingEnabled; + private final ArrayDeque pendingSetupRtpLoadInfos; + // TODO(b/172331505) Add a timeout monitor for pending requests. + private final SparseArray pendingRequests; + private final MessageSender messageSender; + + /** RTSP session URI. */ + private Uri uri; + + private RtspMessageChannel messageChannel; + @Nullable private RtspAuthUserInfo rtspAuthUserInfo; + @Nullable private String sessionId; + @Nullable private KeepAliveMonitor keepAliveMonitor; + @Nullable private RtspAuthenticationInfo rtspAuthenticationInfo; + private @RtspState int rtspState; + private boolean hasUpdatedTimelineAndTracks; + private boolean receivedAuthorizationRequest; + private boolean hasPendingPauseRequest; + private long pendingSeekPositionUs; + + /** + * Creates a new instance. + * + *

        The constructor must be called on the playback thread. The thread is also where {@link + * SessionInfoListener} and {@link PlaybackEventListener} events are sent. User must {@link + * #start} the client, and {@link #close} it when done. + * + *

        Note: all method invocations must be made from the playback thread. + * + * @param sessionInfoListener The {@link SessionInfoListener}. + * @param playbackEventListener The {@link PlaybackEventListener}. + * @param userAgent The user agent. + * @param uri The RTSP playback URI. + * @param socketFactory A socket factory for the RTSP connection. + * @param debugLoggingEnabled Whether to log RTSP messages. + */ + public RtspClient( + SessionInfoListener sessionInfoListener, + PlaybackEventListener playbackEventListener, + String userAgent, + Uri uri, + SocketFactory socketFactory, + boolean debugLoggingEnabled) { + this.sessionInfoListener = sessionInfoListener; + this.playbackEventListener = playbackEventListener; + this.userAgent = userAgent; + this.socketFactory = socketFactory; + this.debugLoggingEnabled = debugLoggingEnabled; + this.pendingSetupRtpLoadInfos = new ArrayDeque<>(); + this.pendingRequests = new SparseArray<>(); + this.messageSender = new MessageSender(); + this.uri = RtspMessageUtil.removeUserInfo(uri); + this.messageChannel = new RtspMessageChannel(new MessageListener()); + this.rtspAuthUserInfo = RtspMessageUtil.parseUserInfo(uri); + this.pendingSeekPositionUs = C.TIME_UNSET; + this.rtspState = RTSP_STATE_UNINITIALIZED; + } + + /** + * Starts the client and sends an OPTIONS request. + * + *

        Calls {@link #close()} if {@link IOException} is thrown when opening a connection to the + * supplied {@link Uri}. + * + * @throws IOException When failed to open a connection to the supplied {@link Uri}. + */ + public void start() throws IOException { + try { + messageChannel.open(getSocket(uri)); + } catch (IOException e) { + Util.closeQuietly(messageChannel); + throw e; + } + messageSender.sendOptionsRequest(uri, sessionId); + } + + /** Returns the current {@link RtspState RTSP state}. */ + public @RtspState int getState() { + return rtspState; + } + + /** + * Triggers RTSP SETUP requests after track selection. + * + *

        All selected tracks (represented by {@link RtpLoadInfo}) must have valid transport. + * + * @param loadInfos A list of selected tracks represented by {@link RtpLoadInfo}. + */ + public void setupSelectedTracks(List loadInfos) { + pendingSetupRtpLoadInfos.addAll(loadInfos); + continueSetupRtspTrack(); + } + + /** + * Starts RTSP playback by sending RTSP PLAY request. + * + * @param offsetMs The playback offset in milliseconds, with respect to the stream start position. + */ + public void startPlayback(long offsetMs) { + messageSender.sendPlayRequest(uri, offsetMs, checkNotNull(sessionId)); + } + + /** + * Seeks to a specific time using RTSP. + * + *

        Call this method only when in-buffer seek is not feasible. An RTSP PAUSE, and an RTSP PLAY + * request will be sent out to perform a seek on the server side. + * + * @param positionUs The seek time measured in microseconds. + */ + public void seekToUs(long positionUs) { + // RTSP state is PLAYING after sending out a PAUSE, before receiving the PAUSE response. Sends + // out PAUSE only when state PLAYING and no PAUSE is sent. + if (rtspState == RTSP_STATE_PLAYING && !hasPendingPauseRequest) { + messageSender.sendPauseRequest(uri, checkNotNull(sessionId)); + } + pendingSeekPositionUs = positionUs; + } + + @Override + public void close() throws IOException { + if (keepAliveMonitor != null) { + // Playback has started. We have to stop the periodic keep alive and send a TEARDOWN so that + // the RTSP server stops sending RTP packets and frees up resources. + keepAliveMonitor.close(); + keepAliveMonitor = null; + messageSender.sendTeardownRequest(uri, checkNotNull(sessionId)); + } + messageChannel.close(); + } + + /** + * Sets up a new playback session using TCP as RTP lower transport. + * + *

        This mode is also known as "RTP-over-RTSP". + */ + public void retryWithRtpTcp() { + try { + close(); + messageChannel = new RtspMessageChannel(new MessageListener()); + messageChannel.open(getSocket(uri)); + sessionId = null; + receivedAuthorizationRequest = false; + rtspAuthenticationInfo = null; + } catch (IOException e) { + playbackEventListener.onPlaybackError(new RtspPlaybackException(e)); + } + } + + /** Registers an {@link InterleavedBinaryDataListener} to receive RTSP interleaved data. */ + public void registerInterleavedDataChannel( + int channel, InterleavedBinaryDataListener interleavedBinaryDataListener) { + messageChannel.registerInterleavedBinaryDataListener(channel, interleavedBinaryDataListener); + } + + private void continueSetupRtspTrack() { + @Nullable RtpLoadInfo loadInfo = pendingSetupRtpLoadInfos.pollFirst(); + if (loadInfo == null) { + playbackEventListener.onRtspSetupCompleted(); + return; + } + messageSender.sendSetupRequest(loadInfo.getTrackUri(), loadInfo.getTransport(), sessionId); + } + + private void maybeLogMessage(List message) { + if (debugLoggingEnabled) { + Log.d(TAG, Joiner.on("\n").join(message)); + } + } + + /** Returns a {@link Socket} that is connected to the {@code uri}. */ + private Socket getSocket(Uri uri) throws IOException { + checkArgument(uri.getHost() != null); + int rtspPort = uri.getPort() > 0 ? uri.getPort() : DEFAULT_RTSP_PORT; + return socketFactory.createSocket(checkNotNull(uri.getHost()), rtspPort); + } + + private void dispatchRtspError(Throwable error) { + RtspPlaybackException playbackException = + error instanceof RtspPlaybackException + ? (RtspPlaybackException) error + : new RtspPlaybackException(error); + + if (hasUpdatedTimelineAndTracks) { + // Playback event listener must be non-null after timeline has been updated. + playbackEventListener.onPlaybackError(playbackException); + } else { + sessionInfoListener.onSessionTimelineRequestFailed(nullToEmpty(error.getMessage()), error); + } + } + + /** + * Returns whether the RTSP server supports the DESCRIBE method. + * + *

        The DESCRIBE method is marked "recommended to implement" in RFC2326 Section 10. We assume + * the server supports DESCRIBE, if the OPTIONS response does not include a PUBLIC header. + * + * @param serverSupportedMethods A list of RTSP methods (as defined in RFC2326 Section 10, encoded + * as {@link RtspRequest.Method}) that are supported by the RTSP server. + */ + private static boolean serverSupportsDescribe(List serverSupportedMethods) { + return serverSupportedMethods.isEmpty() || serverSupportedMethods.contains(METHOD_DESCRIBE); + } + + /** + * Gets the included {@link RtspMediaTrack RtspMediaTracks} from a {@link SessionDescription}. + * + * @param sessionDescription The {@link SessionDescription}. + * @param uri The RTSP playback URI. + */ + private static ImmutableList buildTrackList( + SessionDescription sessionDescription, Uri uri) { + ImmutableList.Builder trackListBuilder = new ImmutableList.Builder<>(); + for (int i = 0; i < sessionDescription.mediaDescriptionList.size(); i++) { + MediaDescription mediaDescription = sessionDescription.mediaDescriptionList.get(i); + // Includes tracks with supported formats only. + if (RtpPayloadFormat.isFormatSupported(mediaDescription)) { + trackListBuilder.add(new RtspMediaTrack(mediaDescription, uri)); + } + } + return trackListBuilder.build(); + } + + private final class MessageSender { + + private int cSeq; + private @MonotonicNonNull RtspRequest lastRequest; + + public void sendOptionsRequest(Uri uri, @Nullable String sessionId) { + sendRequest( + getRequestWithCommonHeaders( + METHOD_OPTIONS, sessionId, /* additionalHeaders= */ ImmutableMap.of(), uri)); + } + + public void sendDescribeRequest(Uri uri, @Nullable String sessionId) { + sendRequest( + getRequestWithCommonHeaders( + METHOD_DESCRIBE, sessionId, /* additionalHeaders= */ ImmutableMap.of(), uri)); + } + + public void sendSetupRequest(Uri trackUri, String transport, @Nullable String sessionId) { + rtspState = RTSP_STATE_INIT; + sendRequest( + getRequestWithCommonHeaders( + METHOD_SETUP, + sessionId, + /* additionalHeaders= */ ImmutableMap.of(RtspHeaders.TRANSPORT, transport), + trackUri)); + } + + public void sendPlayRequest(Uri uri, long offsetMs, String sessionId) { + checkState(rtspState == RTSP_STATE_READY || rtspState == RTSP_STATE_PLAYING); + sendRequest( + getRequestWithCommonHeaders( + METHOD_PLAY, + sessionId, + /* additionalHeaders= */ ImmutableMap.of( + RtspHeaders.RANGE, RtspSessionTiming.getOffsetStartTimeTiming(offsetMs)), + uri)); + } + + public void sendTeardownRequest(Uri uri, String sessionId) { + if (rtspState == RTSP_STATE_UNINITIALIZED || rtspState == RTSP_STATE_INIT) { + // No need to perform session teardown before a session is set up, where the state is + // RTSP_STATE_READY or RTSP_STATE_PLAYING. + return; + } + + rtspState = RTSP_STATE_INIT; + sendRequest( + getRequestWithCommonHeaders( + METHOD_TEARDOWN, sessionId, /* additionalHeaders= */ ImmutableMap.of(), uri)); + } + + public void sendPauseRequest(Uri uri, String sessionId) { + checkState(rtspState == RTSP_STATE_PLAYING); + sendRequest( + getRequestWithCommonHeaders( + METHOD_PAUSE, sessionId, /* additionalHeaders= */ ImmutableMap.of(), uri)); + hasPendingPauseRequest = true; + } + + public void retryLastRequest() { + checkStateNotNull(lastRequest); + + Multimap headersMultiMap = lastRequest.headers.asMultiMap(); + Map lastRequestHeaders = new HashMap<>(); + for (String headerName : headersMultiMap.keySet()) { + if (headerName.equals(RtspHeaders.CSEQ) + || headerName.equals(RtspHeaders.USER_AGENT) + || headerName.equals(RtspHeaders.SESSION) + || headerName.equals(RtspHeaders.AUTHORIZATION)) { + // Clear session-specific header values. + continue; + } + // Only include the header value that is written most recently. + lastRequestHeaders.put(headerName, Iterables.getLast(headersMultiMap.get(headerName))); + } + + sendRequest( + getRequestWithCommonHeaders( + lastRequest.method, sessionId, lastRequestHeaders, lastRequest.uri)); + } + + public void sendMethodNotAllowedResponse(int cSeq) { + // RTSP status code 405: Method Not Allowed (RFC2326 Section 7.1.1). + sendResponse( + new RtspResponse( + /* status= */ 405, new RtspHeaders.Builder(userAgent, sessionId, cSeq).build())); + + // The server could send a cSeq that is larger than the current stored cSeq. To maintain a + // monotonically increasing cSeq number, this.cSeq needs to be reset to server's cSeq + 1. + this.cSeq = max(this.cSeq, cSeq + 1); + } + + private RtspRequest getRequestWithCommonHeaders( + @RtspRequest.Method int method, + @Nullable String sessionId, + Map additionalHeaders, + Uri uri) { + RtspHeaders.Builder headersBuilder = new RtspHeaders.Builder(userAgent, sessionId, cSeq++); + + if (rtspAuthenticationInfo != null) { + checkStateNotNull(rtspAuthUserInfo); + try { + headersBuilder.add( + RtspHeaders.AUTHORIZATION, + rtspAuthenticationInfo.getAuthorizationHeaderValue(rtspAuthUserInfo, uri, method)); + } catch (ParserException e) { + dispatchRtspError(new RtspPlaybackException(e)); + } + } + + headersBuilder.addAll(additionalHeaders); + return new RtspRequest(uri, method, headersBuilder.build(), /* messageBody= */ ""); + } + + private void sendRequest(RtspRequest request) { + int cSeq = Integer.parseInt(checkNotNull(request.headers.get(RtspHeaders.CSEQ))); + checkState(pendingRequests.get(cSeq) == null); + pendingRequests.append(cSeq, request); + List message = RtspMessageUtil.serializeRequest(request); + maybeLogMessage(message); + messageChannel.send(message); + lastRequest = request; + } + + private void sendResponse(RtspResponse response) { + List message = RtspMessageUtil.serializeResponse(response); + maybeLogMessage(message); + messageChannel.send(message); + } + } + + private final class MessageListener implements RtspMessageChannel.MessageListener { + + private final Handler messageHandler; + + /** + * Creates a new instance. + * + *

        The constructor must be called on a {@link Looper} thread, on which all the received RTSP + * messages are processed. + */ + public MessageListener() { + messageHandler = Util.createHandlerForCurrentLooper(); + } + + @Override + public void onRtspMessageReceived(List message) { + messageHandler.post(() -> handleRtspMessage(message)); + } + + private void handleRtspMessage(List message) { + maybeLogMessage(message); + + if (RtspMessageUtil.isRtspResponse(message)) { + handleRtspResponse(message); + } else { + handleRtspRequest(message); + } + } + + private void handleRtspRequest(List message) { + // Handling RTSP requests on the client is optional (RFC2326 Section 10). Decline all + // requests with 'Method Not Allowed'. + messageSender.sendMethodNotAllowedResponse( + Integer.parseInt( + checkNotNull(RtspMessageUtil.parseRequest(message).headers.get(RtspHeaders.CSEQ)))); + } + + private void handleRtspResponse(List message) { + RtspResponse response = RtspMessageUtil.parseResponse(message); + + int cSeq = Integer.parseInt(checkNotNull(response.headers.get(RtspHeaders.CSEQ))); + + @Nullable RtspRequest matchingRequest = pendingRequests.get(cSeq); + if (matchingRequest == null) { + return; + } else { + pendingRequests.remove(cSeq); + } + + @RtspRequest.Method int requestMethod = matchingRequest.method; + + try { + switch (response.status) { + case 200: + break; + case 301: + case 302: + // Redirection request. + if (rtspState != RTSP_STATE_UNINITIALIZED) { + rtspState = RTSP_STATE_INIT; + } + @Nullable String redirectionUriString = response.headers.get(RtspHeaders.LOCATION); + if (redirectionUriString == null) { + sessionInfoListener.onSessionTimelineRequestFailed( + "Redirection without new location.", /* cause= */ null); + } else { + Uri redirectionUri = Uri.parse(redirectionUriString); + RtspClient.this.uri = RtspMessageUtil.removeUserInfo(redirectionUri); + RtspClient.this.rtspAuthUserInfo = RtspMessageUtil.parseUserInfo(redirectionUri); + messageSender.sendDescribeRequest(RtspClient.this.uri, RtspClient.this.sessionId); + } + return; + case 401: + if (rtspAuthUserInfo != null && !receivedAuthorizationRequest) { + // Unauthorized. + ImmutableList wwwAuthenticateHeaders = + response.headers.values(RtspHeaders.WWW_AUTHENTICATE); + if (wwwAuthenticateHeaders.isEmpty()) { + throw ParserException.createForMalformedManifest( + "Missing WWW-Authenticate header in a 401 response.", /* cause= */ null); + } + + for (int i = 0; i < wwwAuthenticateHeaders.size(); i++) { + rtspAuthenticationInfo = + RtspMessageUtil.parseWwwAuthenticateHeader(wwwAuthenticateHeaders.get(i)); + if (rtspAuthenticationInfo.authenticationMechanism + == RtspAuthenticationInfo.DIGEST) { + // Prefers DIGEST when RTSP servers sends both BASIC and DIGEST auth info. + break; + } + } + + messageSender.retryLastRequest(); + receivedAuthorizationRequest = true; + return; + } + // fall through: if unauthorized and no userInfo present, or previous authentication + // unsuccessful. + default: + dispatchRtspError( + new RtspPlaybackException( + RtspMessageUtil.toMethodString(requestMethod) + " " + response.status)); + return; + } + + switch (requestMethod) { + case METHOD_OPTIONS: + onOptionsResponseReceived( + new RtspOptionsResponse( + response.status, + RtspMessageUtil.parsePublicHeader(response.headers.get(RtspHeaders.PUBLIC)))); + break; + + case METHOD_DESCRIBE: + onDescribeResponseReceived( + new RtspDescribeResponse( + response.status, SessionDescriptionParser.parse(response.messageBody))); + break; + + case METHOD_SETUP: + @Nullable String sessionHeaderString = response.headers.get(RtspHeaders.SESSION); + @Nullable String transportHeaderString = response.headers.get(RtspHeaders.TRANSPORT); + if (sessionHeaderString == null || transportHeaderString == null) { + throw ParserException.createForMalformedManifest( + "Missing mandatory session or transport header", /* cause= */ null); + } + + RtspSessionHeader sessionHeader = + RtspMessageUtil.parseSessionHeader(sessionHeaderString); + onSetupResponseReceived( + new RtspSetupResponse(response.status, sessionHeader, transportHeaderString)); + break; + + case METHOD_PLAY: + // Range header is optional for a PLAY response (RFC2326 Section 12). + @Nullable String startTimingString = response.headers.get(RtspHeaders.RANGE); + RtspSessionTiming timing = + startTimingString == null + ? RtspSessionTiming.DEFAULT + : RtspSessionTiming.parseTiming(startTimingString); + + ImmutableList trackTimingList; + try { + @Nullable String rtpInfoString = response.headers.get(RtspHeaders.RTP_INFO); + trackTimingList = + rtpInfoString == null + ? ImmutableList.of() + : RtspTrackTiming.parseTrackTiming(rtpInfoString, uri); + } catch (ParserException e) { + trackTimingList = ImmutableList.of(); + } + + onPlayResponseReceived(new RtspPlayResponse(response.status, timing, trackTimingList)); + break; + + case METHOD_PAUSE: + onPauseResponseReceived(); + break; + + case METHOD_GET_PARAMETER: + case METHOD_TEARDOWN: + case METHOD_PLAY_NOTIFY: + case METHOD_RECORD: + case METHOD_REDIRECT: + case METHOD_ANNOUNCE: + case METHOD_SET_PARAMETER: + break; + case METHOD_UNSET: + default: + throw new IllegalStateException(); + } + } catch (ParserException e) { + dispatchRtspError(new RtspPlaybackException(e)); + } + } + + // Response handlers must only be called only on 200 (OK) responses. + + private void onOptionsResponseReceived(RtspOptionsResponse response) { + if (keepAliveMonitor != null) { + // Ignores the OPTIONS requests that are sent to keep RTSP connection alive. + return; + } + + if (serverSupportsDescribe(response.supportedMethods)) { + messageSender.sendDescribeRequest(uri, sessionId); + } else { + sessionInfoListener.onSessionTimelineRequestFailed( + "DESCRIBE not supported.", /* cause= */ null); + } + } + + private void onDescribeResponseReceived(RtspDescribeResponse response) { + RtspSessionTiming sessionTiming = RtspSessionTiming.DEFAULT; + @Nullable + String sessionRangeAttributeString = + response.sessionDescription.attributes.get(SessionDescription.ATTR_RANGE); + if (sessionRangeAttributeString != null) { + try { + sessionTiming = RtspSessionTiming.parseTiming(sessionRangeAttributeString); + } catch (ParserException e) { + sessionInfoListener.onSessionTimelineRequestFailed("SDP format error.", /* cause= */ e); + return; + } + } + + ImmutableList tracks = buildTrackList(response.sessionDescription, uri); + if (tracks.isEmpty()) { + sessionInfoListener.onSessionTimelineRequestFailed("No playable track.", /* cause= */ null); + return; + } + + sessionInfoListener.onSessionTimelineUpdated(sessionTiming, tracks); + hasUpdatedTimelineAndTracks = true; + } + + private void onSetupResponseReceived(RtspSetupResponse response) { + checkState(rtspState != RTSP_STATE_UNINITIALIZED); + + rtspState = RTSP_STATE_READY; + sessionId = response.sessionHeader.sessionId; + continueSetupRtspTrack(); + } + + private void onPlayResponseReceived(RtspPlayResponse response) { + checkState(rtspState == RTSP_STATE_READY); + + rtspState = RTSP_STATE_PLAYING; + if (keepAliveMonitor == null) { + keepAliveMonitor = new KeepAliveMonitor(DEFAULT_RTSP_KEEP_ALIVE_INTERVAL_MS); + keepAliveMonitor.start(); + } + + pendingSeekPositionUs = C.TIME_UNSET; + // onPlaybackStarted could initiate another seek request, which will set + // pendingSeekPositionUs. + playbackEventListener.onPlaybackStarted( + Util.msToUs(response.sessionTiming.startTimeMs), response.trackTimingList); + } + + private void onPauseResponseReceived() { + checkState(rtspState == RTSP_STATE_PLAYING); + + rtspState = RTSP_STATE_READY; + hasPendingPauseRequest = false; + if (pendingSeekPositionUs != C.TIME_UNSET) { + startPlayback(Util.usToMs(pendingSeekPositionUs)); + } + } + } + + /** Sends periodic OPTIONS requests to keep RTSP connection alive. */ + private final class KeepAliveMonitor implements Runnable, Closeable { + + private final Handler keepAliveHandler; + private final long intervalMs; + private boolean isStarted; + + /** + * Creates a new instance. + * + *

        Constructor must be invoked on the playback thread. + * + * @param intervalMs The time between consecutive RTSP keep-alive requests, in milliseconds. + */ + public KeepAliveMonitor(long intervalMs) { + this.intervalMs = intervalMs; + keepAliveHandler = Util.createHandlerForCurrentLooper(); + } + + /** Starts Keep-alive. */ + public void start() { + if (isStarted) { + return; + } + + isStarted = true; + keepAliveHandler.postDelayed(this, intervalMs); + } + + @Override + public void run() { + messageSender.sendOptionsRequest(uri, sessionId); + keepAliveHandler.postDelayed(this, intervalMs); + } + + @Override + public void close() { + isStarted = false; + keepAliveHandler.removeCallbacks(this); + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspDescribeResponse.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspDescribeResponse.java new file mode 100644 index 0000000000..04d8273ca8 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspDescribeResponse.java @@ -0,0 +1,35 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.rtsp; + +/** Represents an RTSP DESCRIBE response. */ +/* package */ final class RtspDescribeResponse { + /** The response's status code. */ + public final int status; + /** The {@link SessionDescription} (see RFC2327) in the DESCRIBE response. */ + public final SessionDescription sessionDescription; + + /** + * Creates a new instance. + * + * @param status The response's status code. + * @param sessionDescription The {@link SessionDescription} in the DESCRIBE response. + */ + public RtspDescribeResponse(int status, SessionDescription sessionDescription) { + this.status = status; + this.sessionDescription = sessionDescription; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspHeaders.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspHeaders.java new file mode 100644 index 0000000000..cd651b5db0 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspHeaders.java @@ -0,0 +1,294 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.android.exoplayer2.source.rtsp; + +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.util.Util; +import com.google.common.base.Ascii; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableListMultimap; +import com.google.common.collect.Iterables; +import com.google.errorprone.annotations.CanIgnoreReturnValue; +import java.util.List; +import java.util.Map; + +/** + * RTSP message headers. + * + *

        {@link Builder} must be used to construct an instance. Use {@link #get} to query header values + * with case-insensitive header names. The extra spaces around header names and values are trimmed. + * Contrary to HTTP, RTSP does not allow ambiguous/arbitrary header names (RFC 2326 Section 12). + */ +/* package */ final class RtspHeaders { + + public static final String ACCEPT = "Accept"; + public static final String ALLOW = "Allow"; + public static final String AUTHORIZATION = "Authorization"; + public static final String BANDWIDTH = "Bandwidth"; + public static final String BLOCKSIZE = "Blocksize"; + public static final String CACHE_CONTROL = "Cache-Control"; + public static final String CONNECTION = "Connection"; + public static final String CONTENT_BASE = "Content-Base"; + public static final String CONTENT_ENCODING = "Content-Encoding"; + public static final String CONTENT_LANGUAGE = "Content-Language"; + public static final String CONTENT_LENGTH = "Content-Length"; + public static final String CONTENT_LOCATION = "Content-Location"; + public static final String CONTENT_TYPE = "Content-Type"; + public static final String CSEQ = "CSeq"; + public static final String DATE = "Date"; + public static final String EXPIRES = "Expires"; + public static final String LOCATION = "Location"; + public static final String PROXY_AUTHENTICATE = "Proxy-Authenticate"; + public static final String PROXY_REQUIRE = "Proxy-Require"; + public static final String PUBLIC = "Public"; + public static final String RANGE = "Range"; + public static final String RTP_INFO = "RTP-Info"; + public static final String RTCP_INTERVAL = "RTCP-Interval"; + public static final String SCALE = "Scale"; + public static final String SESSION = "Session"; + public static final String SPEED = "Speed"; + public static final String SUPPORTED = "Supported"; + public static final String TIMESTAMP = "Timestamp"; + public static final String TRANSPORT = "Transport"; + public static final String USER_AGENT = "User-Agent"; + public static final String VIA = "Via"; + public static final String WWW_AUTHENTICATE = "WWW-Authenticate"; + + /** An empty header object. */ + public static final RtspHeaders EMPTY = new RtspHeaders.Builder().build(); + + /** Builds {@link RtspHeaders} instances. */ + public static final class Builder { + private final ImmutableListMultimap.Builder namesAndValuesBuilder; + + /** Creates a new instance. */ + public Builder() { + namesAndValuesBuilder = new ImmutableListMultimap.Builder<>(); + } + + /** + * Creates a new instance with common header values. + * + * @param userAgent The user agent string. + * @param sessionId The RTSP session ID; use {@code null} when the session is not yet set up. + * @param cSeq The RTSP cSeq sequence number. + */ + public Builder(String userAgent, @Nullable String sessionId, int cSeq) { + this(); + + add(USER_AGENT, userAgent); + add(CSEQ, String.valueOf(cSeq)); + if (sessionId != null) { + add(SESSION, sessionId); + } + } + + /** + * Creates a new instance to build upon the provided {@link RtspHeaders}. + * + * @param namesAndValuesBuilder A {@link ImmutableListMultimap.Builder} that this builder builds + * upon. + */ + private Builder(ImmutableListMultimap.Builder namesAndValuesBuilder) { + this.namesAndValuesBuilder = namesAndValuesBuilder; + } + + /** + * Adds a header name and header value pair. + * + * @param headerName The name of the header. + * @param headerValue The value of the header. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder add(String headerName, String headerValue) { + namesAndValuesBuilder.put(convertToStandardHeaderName(headerName.trim()), headerValue.trim()); + return this; + } + + /** + * Adds a list of headers. + * + * @param headers The list of headers, each item must following the format <headerName>: + * <headerValue> + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder addAll(List headers) { + for (int i = 0; i < headers.size(); i++) { + String[] header = Util.splitAtFirst(headers.get(i), ":\\s?"); + if (header.length == 2) { + add(header[0], header[1]); + } + } + return this; + } + + /** + * Adds multiple headers in a map. + * + * @param headers The map of headers, where the keys are the header names and the values are the + * header values. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder addAll(Map headers) { + for (Map.Entry header : headers.entrySet()) { + add(header.getKey(), header.getValue()); + } + return this; + } + + /** + * Builds a new {@link RtspHeaders} instance. + * + * @return The newly built {@link RtspHeaders} instance. + */ + public RtspHeaders build() { + return new RtspHeaders(this); + } + } + + private final ImmutableListMultimap namesAndValues; + + @Override + public boolean equals(@Nullable Object obj) { + if (this == obj) { + return true; + } + if (!(obj instanceof RtspHeaders)) { + return false; + } + RtspHeaders headers = (RtspHeaders) obj; + return namesAndValues.equals(headers.namesAndValues); + } + + @Override + public int hashCode() { + return namesAndValues.hashCode(); + } + + /** Returns a {@link Builder} initialized with the values of this instance. */ + public Builder buildUpon() { + ImmutableListMultimap.Builder namesAndValuesBuilder = + new ImmutableListMultimap.Builder<>(); + namesAndValuesBuilder.putAll(namesAndValues); + return new Builder(namesAndValuesBuilder); + } + + /** + * Returns a map that associates header names to the list of values associated with the + * corresponding header name. + */ + public ImmutableListMultimap asMultiMap() { + return namesAndValues; + } + + /** + * Returns the most recent header value mapped to the argument, {@code null} if the header name is + * not recorded. + */ + @Nullable + public String get(String headerName) { + ImmutableList headerValues = values(headerName); + if (headerValues.isEmpty()) { + return null; + } + return Iterables.getLast(headerValues); + } + + /** + * Returns a list of header values mapped to the argument, in the addition order. The returned + * list is empty if the header name is not recorded. + */ + public ImmutableList values(String headerName) { + return namesAndValues.get(convertToStandardHeaderName(headerName)); + } + + private RtspHeaders(Builder builder) { + this.namesAndValues = builder.namesAndValuesBuilder.build(); + } + + private static String convertToStandardHeaderName(String messageHeaderName) { + if (Ascii.equalsIgnoreCase(messageHeaderName, ACCEPT)) { + return ACCEPT; + } else if (Ascii.equalsIgnoreCase(messageHeaderName, ALLOW)) { + return ALLOW; + } else if (Ascii.equalsIgnoreCase(messageHeaderName, AUTHORIZATION)) { + return AUTHORIZATION; + } else if (Ascii.equalsIgnoreCase(messageHeaderName, BANDWIDTH)) { + return BANDWIDTH; + } else if (Ascii.equalsIgnoreCase(messageHeaderName, BLOCKSIZE)) { + return BLOCKSIZE; + } else if (Ascii.equalsIgnoreCase(messageHeaderName, CACHE_CONTROL)) { + return CACHE_CONTROL; + } else if (Ascii.equalsIgnoreCase(messageHeaderName, CONNECTION)) { + return CONNECTION; + } else if (Ascii.equalsIgnoreCase(messageHeaderName, CONTENT_BASE)) { + return CONTENT_BASE; + } else if (Ascii.equalsIgnoreCase(messageHeaderName, CONTENT_ENCODING)) { + return CONTENT_ENCODING; + } else if (Ascii.equalsIgnoreCase(messageHeaderName, CONTENT_LANGUAGE)) { + return CONTENT_LANGUAGE; + } else if (Ascii.equalsIgnoreCase(messageHeaderName, CONTENT_LENGTH)) { + return CONTENT_LENGTH; + } else if (Ascii.equalsIgnoreCase(messageHeaderName, CONTENT_LOCATION)) { + return CONTENT_LOCATION; + } else if (Ascii.equalsIgnoreCase(messageHeaderName, CONTENT_TYPE)) { + return CONTENT_TYPE; + } else if (Ascii.equalsIgnoreCase(messageHeaderName, CSEQ)) { + return CSEQ; + } else if (Ascii.equalsIgnoreCase(messageHeaderName, DATE)) { + return DATE; + } else if (Ascii.equalsIgnoreCase(messageHeaderName, EXPIRES)) { + return EXPIRES; + } else if (Ascii.equalsIgnoreCase(messageHeaderName, LOCATION)) { + return LOCATION; + } else if (Ascii.equalsIgnoreCase(messageHeaderName, PROXY_AUTHENTICATE)) { + return PROXY_AUTHENTICATE; + } else if (Ascii.equalsIgnoreCase(messageHeaderName, PROXY_REQUIRE)) { + return PROXY_REQUIRE; + } else if (Ascii.equalsIgnoreCase(messageHeaderName, PUBLIC)) { + return PUBLIC; + } else if (Ascii.equalsIgnoreCase(messageHeaderName, RANGE)) { + return RANGE; + } else if (Ascii.equalsIgnoreCase(messageHeaderName, RTP_INFO)) { + return RTP_INFO; + } else if (Ascii.equalsIgnoreCase(messageHeaderName, RTCP_INTERVAL)) { + return RTCP_INTERVAL; + } else if (Ascii.equalsIgnoreCase(messageHeaderName, SCALE)) { + return SCALE; + } else if (Ascii.equalsIgnoreCase(messageHeaderName, SESSION)) { + return SESSION; + } else if (Ascii.equalsIgnoreCase(messageHeaderName, SPEED)) { + return SPEED; + } else if (Ascii.equalsIgnoreCase(messageHeaderName, SUPPORTED)) { + return SUPPORTED; + } else if (Ascii.equalsIgnoreCase(messageHeaderName, TIMESTAMP)) { + return TIMESTAMP; + } else if (Ascii.equalsIgnoreCase(messageHeaderName, TRANSPORT)) { + return TRANSPORT; + } else if (Ascii.equalsIgnoreCase(messageHeaderName, USER_AGENT)) { + return USER_AGENT; + } else if (Ascii.equalsIgnoreCase(messageHeaderName, VIA)) { + return VIA; + } else if (Ascii.equalsIgnoreCase(messageHeaderName, WWW_AUTHENTICATE)) { + return WWW_AUTHENTICATE; + } + return messageHeaderName; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspMediaPeriod.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspMediaPeriod.java new file mode 100644 index 0000000000..7e8eab668e --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspMediaPeriod.java @@ -0,0 +1,892 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.android.exoplayer2.source.rtsp; + +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Assertions.checkState; +import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; +import static java.lang.Math.min; + +import android.net.Uri; +import android.os.Handler; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.FormatHolder; +import com.google.android.exoplayer2.SeekParameters; +import com.google.android.exoplayer2.decoder.DecoderInputBuffer; +import com.google.android.exoplayer2.extractor.ExtractorOutput; +import com.google.android.exoplayer2.extractor.SeekMap; +import com.google.android.exoplayer2.extractor.TrackOutput; +import com.google.android.exoplayer2.offline.StreamKey; +import com.google.android.exoplayer2.source.MediaPeriod; +import com.google.android.exoplayer2.source.SampleQueue; +import com.google.android.exoplayer2.source.SampleQueue.UpstreamFormatChangedListener; +import com.google.android.exoplayer2.source.SampleStream; +import com.google.android.exoplayer2.source.SampleStream.ReadDataResult; +import com.google.android.exoplayer2.source.SampleStream.ReadFlags; +import com.google.android.exoplayer2.source.TrackGroup; +import com.google.android.exoplayer2.source.TrackGroupArray; +import com.google.android.exoplayer2.source.rtsp.RtspClient.PlaybackEventListener; +import com.google.android.exoplayer2.source.rtsp.RtspClient.SessionInfoListener; +import com.google.android.exoplayer2.source.rtsp.RtspMediaSource.RtspPlaybackException; +import com.google.android.exoplayer2.trackselection.ExoTrackSelection; +import com.google.android.exoplayer2.trackselection.TrackSelection; +import com.google.android.exoplayer2.upstream.Allocator; +import com.google.android.exoplayer2.upstream.Loader; +import com.google.android.exoplayer2.upstream.Loader.Loadable; +import com.google.android.exoplayer2.util.Util; +import com.google.common.collect.ImmutableList; +import java.io.IOException; +import java.net.BindException; +import java.util.ArrayList; +import java.util.List; +import javax.net.SocketFactory; +import org.checkerframework.checker.nullness.compatqual.NullableType; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; + +/** A {@link MediaPeriod} that loads an RTSP stream. */ +/* package */ final class RtspMediaPeriod implements MediaPeriod { + + /** Listener for information about the period. */ + interface Listener { + + /** Called when the {@link RtspSessionTiming} is available. */ + void onSourceInfoRefreshed(RtspSessionTiming timing); + + /** Called when the RTSP server does not support seeking. */ + default void onSeekingUnsupported() {} + } + + /** The maximum times to retry if the underlying data channel failed to bind. */ + private static final int PORT_BINDING_MAX_RETRY_COUNT = 3; + + private final Allocator allocator; + private final Handler handler; + private final InternalListener internalListener; + private final RtspClient rtspClient; + private final List rtspLoaderWrappers; + private final List selectedLoadInfos; + private final Listener listener; + private final RtpDataChannel.Factory rtpDataChannelFactory; + + private @MonotonicNonNull Callback callback; + private @MonotonicNonNull ImmutableList trackGroups; + @Nullable private IOException preparationError; + @Nullable private RtspPlaybackException playbackException; + + private long requestedSeekPositionUs; + private long pendingSeekPositionUs; + private long pendingSeekPositionUsForTcpRetry; + private boolean loadingFinished; + private boolean notifyDiscontinuity; + private boolean released; + private boolean prepared; + private boolean trackSelected; + private int portBindingRetryCount; + private boolean isUsingRtpTcp; + + /** + * Creates an RTSP media period. + * + * @param allocator An {@link Allocator} from which to obtain media buffer allocations. + * @param rtpDataChannelFactory A {@link RtpDataChannel.Factory} for {@link RtpDataChannel}. + * @param uri The RTSP playback {@link Uri}. + * @param listener A {@link Listener} to receive session information updates. + * @param userAgent The user agent. + * @param socketFactory A socket factory for {@link RtspClient}'s connection. + * @param debugLoggingEnabled Whether to log RTSP messages. + */ + public RtspMediaPeriod( + Allocator allocator, + RtpDataChannel.Factory rtpDataChannelFactory, + Uri uri, + Listener listener, + String userAgent, + SocketFactory socketFactory, + boolean debugLoggingEnabled) { + this.allocator = allocator; + this.rtpDataChannelFactory = rtpDataChannelFactory; + this.listener = listener; + + handler = Util.createHandlerForCurrentLooper(); + internalListener = new InternalListener(); + rtspClient = + new RtspClient( + /* sessionInfoListener= */ internalListener, + /* playbackEventListener= */ internalListener, + /* userAgent= */ userAgent, + /* uri= */ uri, + socketFactory, + debugLoggingEnabled); + rtspLoaderWrappers = new ArrayList<>(); + selectedLoadInfos = new ArrayList<>(); + + pendingSeekPositionUs = C.TIME_UNSET; + requestedSeekPositionUs = C.TIME_UNSET; + pendingSeekPositionUsForTcpRetry = C.TIME_UNSET; + } + + /** Releases the {@link RtspMediaPeriod}. */ + public void release() { + for (int i = 0; i < rtspLoaderWrappers.size(); i++) { + rtspLoaderWrappers.get(i).release(); + } + Util.closeQuietly(rtspClient); + released = true; + } + + @Override + public void prepare(Callback callback, long positionUs) { + this.callback = callback; + + try { + rtspClient.start(); + } catch (IOException e) { + preparationError = e; + Util.closeQuietly(rtspClient); + } + } + + @Override + public void maybeThrowPrepareError() throws IOException { + if (preparationError != null) { + throw preparationError; + } + } + + @Override + public TrackGroupArray getTrackGroups() { + checkState(prepared); + return new TrackGroupArray(checkNotNull(trackGroups).toArray(new TrackGroup[0])); + } + + @Override + public ImmutableList getStreamKeys(List trackSelections) { + return ImmutableList.of(); + } + + @Override + public long selectTracks( + @NullableType ExoTrackSelection[] selections, + boolean[] mayRetainStreamFlags, + @NullableType SampleStream[] streams, + boolean[] streamResetFlags, + long positionUs) { + + // Deselect old tracks. + // Input array streams contains the streams selected in the previous track selection. + for (int i = 0; i < selections.length; i++) { + if (streams[i] != null && (selections[i] == null || !mayRetainStreamFlags[i])) { + streams[i] = null; + } + } + + // Select new tracks. + selectedLoadInfos.clear(); + for (int i = 0; i < selections.length; i++) { + TrackSelection selection = selections[i]; + if (selection == null) { + continue; + } + + TrackGroup trackGroup = selection.getTrackGroup(); + int trackGroupIndex = checkNotNull(trackGroups).indexOf(trackGroup); + selectedLoadInfos.add(checkNotNull(rtspLoaderWrappers.get(trackGroupIndex)).loadInfo); + + // Find the sampleStreamWrapper that contains this track group. + if (trackGroups.contains(trackGroup)) { + if (streams[i] == null) { + streams[i] = new SampleStreamImpl(trackGroupIndex); + // Update flag for newly created SampleStream. + streamResetFlags[i] = true; + } + } + } + + // Cancel non-selected loadables. + for (int i = 0; i < rtspLoaderWrappers.size(); i++) { + RtspLoaderWrapper loadControl = rtspLoaderWrappers.get(i); + if (!selectedLoadInfos.contains(loadControl.loadInfo)) { + loadControl.cancelLoad(); + } + } + + trackSelected = true; + if (positionUs != 0) { + // Track selection is performed only once in RTSP streams. + requestedSeekPositionUs = positionUs; + pendingSeekPositionUs = positionUs; + pendingSeekPositionUsForTcpRetry = positionUs; + } + maybeSetupTracks(); + return positionUs; + } + + @Override + public void discardBuffer(long positionUs, boolean toKeyframe) { + if (isSeekPending()) { + return; + } + + for (int i = 0; i < rtspLoaderWrappers.size(); i++) { + RtspLoaderWrapper loaderWrapper = rtspLoaderWrappers.get(i); + if (!loaderWrapper.canceled) { + loaderWrapper.sampleQueue.discardTo(positionUs, toKeyframe, /* stopAtReadPosition= */ true); + } + } + } + + @Override + public long readDiscontinuity() { + // Discontinuity only happens in RTSP when seeking an unexpectedly un-seekable RTSP server (a + // server that doesn't include the required RTP-Info header in its PLAY responses). This only + // applies to seeks made before receiving the first RTSP PLAY response. The playback can only + // start from time zero in this case. + if (notifyDiscontinuity) { + notifyDiscontinuity = false; + return 0; + } + return C.TIME_UNSET; + } + + @Override + public long seekToUs(long positionUs) { + // Handles all RTSP seeking cases: + // 1. Seek before the first RTP/UDP packet is received. The seek position is cached to be used + // after retrying playback with RTP/TCP. + // 2a. Normal RTSP seek: if no additional seek is requested after the first seek. Request RTSP + // PAUSE and then PLAY at the seek position. + // 2b. If additional seek is requested after the first seek, records the new seek position, + // 2b.1. If RTSP PLAY (for the first seek) is already sent, the new seek position is used to + // initiate another seek upon receiving PLAY response by invoking this method again. + // 2b.2. If RTSP PLAY (for the first seek) has not been sent, the new seek position will be + // used in the following PLAY request. + + // TODO(internal: b/213153670) Handle dropped seek position. + if (getBufferedPositionUs() == 0 && !isUsingRtpTcp) { + // Stores the seek position for later, if no RTP packet is received when using UDP. + pendingSeekPositionUsForTcpRetry = positionUs; + return positionUs; + } + + discardBuffer(positionUs, /* toKeyframe= */ false); + requestedSeekPositionUs = positionUs; + + if (isSeekPending()) { + switch (rtspClient.getState()) { + case RtspClient.RTSP_STATE_READY: + // PLAY request is sent, yet to receive the response. requestedSeekPositionUs stores the + // new position to do another seek upon receiving the PLAY response. + return positionUs; + case RtspClient.RTSP_STATE_PLAYING: + // Pending PAUSE response, updates client with the newest seek position for the following + // PLAY request. + pendingSeekPositionUs = positionUs; + rtspClient.seekToUs(pendingSeekPositionUs); + return positionUs; + case RtspClient.RTSP_STATE_UNINITIALIZED: + case RtspClient.RTSP_STATE_INIT: + default: + // Never happens. + throw new IllegalStateException(); + } + } + + if (seekInsideBufferUs(positionUs)) { + return positionUs; + } + + pendingSeekPositionUs = positionUs; + rtspClient.seekToUs(positionUs); + for (int i = 0; i < rtspLoaderWrappers.size(); i++) { + rtspLoaderWrappers.get(i).seekTo(positionUs); + } + return positionUs; + } + + @Override + public long getAdjustedSeekPositionUs(long positionUs, SeekParameters seekParameters) { + return positionUs; + } + + @Override + public long getBufferedPositionUs() { + if (loadingFinished || rtspLoaderWrappers.isEmpty()) { + return C.TIME_END_OF_SOURCE; + } + + if (requestedSeekPositionUs != C.TIME_UNSET) { + return requestedSeekPositionUs; + } + + boolean allLoaderWrappersAreCanceled = true; + long bufferedPositionUs = Long.MAX_VALUE; + for (int i = 0; i < rtspLoaderWrappers.size(); i++) { + RtspLoaderWrapper loaderWrapper = rtspLoaderWrappers.get(i); + if (!loaderWrapper.canceled) { + bufferedPositionUs = min(bufferedPositionUs, loaderWrapper.getBufferedPositionUs()); + allLoaderWrappersAreCanceled = false; + } + } + + return allLoaderWrappersAreCanceled || bufferedPositionUs == Long.MIN_VALUE + ? 0 + : bufferedPositionUs; + } + + @Override + public long getNextLoadPositionUs() { + return getBufferedPositionUs(); + } + + @Override + public boolean continueLoading(long positionUs) { + return isLoading(); + } + + @Override + public boolean isLoading() { + return !loadingFinished; + } + + @Override + public void reevaluateBuffer(long positionUs) { + // Do nothing. + } + + // SampleStream methods. + + /* package */ boolean isReady(int trackGroupIndex) { + return !suppressRead() && rtspLoaderWrappers.get(trackGroupIndex).isSampleQueueReady(); + } + + @ReadDataResult + /* package */ int readData( + int sampleQueueIndex, + FormatHolder formatHolder, + DecoderInputBuffer buffer, + @ReadFlags int readFlags) { + if (suppressRead()) { + return C.RESULT_NOTHING_READ; + } + return rtspLoaderWrappers.get(sampleQueueIndex).read(formatHolder, buffer, readFlags); + } + + /* package */ int skipData(int sampleQueueIndex, long positionUs) { + if (suppressRead()) { + return C.RESULT_NOTHING_READ; + } + return rtspLoaderWrappers.get(sampleQueueIndex).skipData(positionUs); + } + + private boolean suppressRead() { + return notifyDiscontinuity; + } + + // Internal methods. + + @Nullable + private RtpDataLoadable getLoadableByTrackUri(Uri trackUri) { + for (int i = 0; i < rtspLoaderWrappers.size(); i++) { + if (!rtspLoaderWrappers.get(i).canceled) { + RtpLoadInfo loadInfo = rtspLoaderWrappers.get(i).loadInfo; + if (loadInfo.getTrackUri().equals(trackUri)) { + return loadInfo.loadable; + } + } + } + return null; + } + + private boolean isSeekPending() { + return pendingSeekPositionUs != C.TIME_UNSET; + } + + private void maybeFinishPrepare() { + if (released || prepared) { + return; + } + + // Make sure all sample queues have got format assigned. + for (int i = 0; i < rtspLoaderWrappers.size(); i++) { + if (rtspLoaderWrappers.get(i).sampleQueue.getUpstreamFormat() == null) { + return; + } + } + + prepared = true; + trackGroups = buildTrackGroups(ImmutableList.copyOf(rtspLoaderWrappers)); + checkNotNull(callback).onPrepared(/* mediaPeriod= */ this); + } + + /** + * Attempts to seek to the specified position within the sample queues. + * + * @param positionUs The seek position in microseconds. + * @return Whether the in-buffer seek was successful for all loading RTSP tracks. + */ + private boolean seekInsideBufferUs(long positionUs) { + for (int i = 0; i < rtspLoaderWrappers.size(); i++) { + SampleQueue sampleQueue = rtspLoaderWrappers.get(i).sampleQueue; + if (!sampleQueue.seekTo(positionUs, /* allowTimeBeyondBuffer= */ false)) { + return false; + } + } + return true; + } + + private void maybeSetupTracks() { + boolean transportReady = true; + for (int i = 0; i < selectedLoadInfos.size(); i++) { + transportReady &= selectedLoadInfos.get(i).isTransportReady(); + } + + if (transportReady && trackSelected) { + rtspClient.setupSelectedTracks(selectedLoadInfos); + } + } + + private void updateLoadingFinished() { + loadingFinished = true; + for (int i = 0; i < rtspLoaderWrappers.size(); i++) { + loadingFinished &= rtspLoaderWrappers.get(i).canceled; + } + } + + private static ImmutableList buildTrackGroups( + ImmutableList rtspLoaderWrappers) { + ImmutableList.Builder listBuilder = new ImmutableList.Builder<>(); + SampleQueue sampleQueue; + for (int i = 0; i < rtspLoaderWrappers.size(); i++) { + sampleQueue = rtspLoaderWrappers.get(i).sampleQueue; + listBuilder.add( + new TrackGroup( + /* id= */ Integer.toString(i), checkNotNull(sampleQueue.getUpstreamFormat()))); + } + return listBuilder.build(); + } + + private final class InternalListener + implements ExtractorOutput, + Loader.Callback, + UpstreamFormatChangedListener, + SessionInfoListener, + PlaybackEventListener { + + // ExtractorOutput implementation. + + @Override + public TrackOutput track(int id, int type) { + return checkNotNull(rtspLoaderWrappers.get(id)).sampleQueue; + } + + @Override + public void endTracks() { + handler.post(RtspMediaPeriod.this::maybeFinishPrepare); + } + + @Override + public void seekMap(SeekMap seekMap) { + // RTSP does not support seek map. + } + + // Loadable.Callback implementation. + + @Override + public void onLoadCompleted( + RtpDataLoadable loadable, long elapsedRealtimeMs, long loadDurationMs) { + if (getBufferedPositionUs() == 0) { + if (!isUsingRtpTcp) { + // Retry playback with TCP if no sample has been received so far, and we are not already + // using TCP. Retrying will setup new loadables, so will not retry with the current + // loadables. + retryWithRtpTcp(); + isUsingRtpTcp = true; + } + return; + } + + // Cancel the loader wrapper associated with the completed loadable. + for (int i = 0; i < rtspLoaderWrappers.size(); i++) { + RtspLoaderWrapper loaderWrapper = rtspLoaderWrappers.get(i); + if (loaderWrapper.loadInfo.loadable == loadable) { + loaderWrapper.cancelLoad(); + break; + } + } + } + + @Override + public void onLoadCanceled( + RtpDataLoadable loadable, long elapsedRealtimeMs, long loadDurationMs, boolean released) {} + + @Override + public Loader.LoadErrorAction onLoadError( + RtpDataLoadable loadable, + long elapsedRealtimeMs, + long loadDurationMs, + IOException error, + int errorCount) { + if (!prepared) { + preparationError = error; + } else { + if (error.getCause() instanceof BindException) { + // Allow for retry on RTP port open failure by catching BindException. Two ports are + // opened for each RTP stream, the first port number is auto assigned by the system, while + // the second is manually selected. It is thus possible that the second port fails to + // bind. Failing is more likely when running in a server-side testing environment, it is + // less likely on real devices. + if (portBindingRetryCount++ < PORT_BINDING_MAX_RETRY_COUNT) { + return Loader.RETRY; + } + } else { + playbackException = + new RtspPlaybackException( + /* message= */ loadable.rtspMediaTrack.uri.toString(), error); + } + } + return Loader.DONT_RETRY; + } + + // SampleQueue.UpstreamFormatChangedListener implementation. + + @Override + public void onUpstreamFormatChanged(Format format) { + handler.post(RtspMediaPeriod.this::maybeFinishPrepare); + } + + // RtspClient.PlaybackEventListener implementation. + + @Override + public void onRtspSetupCompleted() { + long offsetMs = 0; + if (pendingSeekPositionUs != C.TIME_UNSET) { + offsetMs = Util.usToMs(pendingSeekPositionUs); + } else if (pendingSeekPositionUsForTcpRetry != C.TIME_UNSET) { + offsetMs = Util.usToMs(pendingSeekPositionUsForTcpRetry); + } + rtspClient.startPlayback(offsetMs); + } + + @Override + public void onPlaybackStarted( + long startPositionUs, ImmutableList trackTimingList) { + + // Validate that the trackTimingList contains timings for the selected tracks, and notify the + // listener. + ArrayList trackUrisWithTiming = new ArrayList<>(trackTimingList.size()); + for (int i = 0; i < trackTimingList.size(); i++) { + trackUrisWithTiming.add(checkNotNull(trackTimingList.get(i).uri.getPath())); + } + for (int i = 0; i < selectedLoadInfos.size(); i++) { + RtpLoadInfo loadInfo = selectedLoadInfos.get(i); + if (!trackUrisWithTiming.contains(loadInfo.getTrackUri().getPath())) { + listener.onSeekingUnsupported(); + if (isSeekPending()) { + notifyDiscontinuity = true; + pendingSeekPositionUs = C.TIME_UNSET; + requestedSeekPositionUs = C.TIME_UNSET; + pendingSeekPositionUsForTcpRetry = C.TIME_UNSET; + } + } + } + + for (int i = 0; i < trackTimingList.size(); i++) { + RtspTrackTiming trackTiming = trackTimingList.get(i); + @Nullable RtpDataLoadable dataLoadable = getLoadableByTrackUri(trackTiming.uri); + if (dataLoadable == null) { + continue; + } + + dataLoadable.setTimestamp(trackTiming.rtpTimestamp); + dataLoadable.setSequenceNumber(trackTiming.sequenceNumber); + + if (isSeekPending() && pendingSeekPositionUs == requestedSeekPositionUs) { + // Seek loadable only when all pending seeks are processed, or SampleQueues will report + // inconsistent bufferedPosition. + // Seeks to the start position when the initial seek position is set. + dataLoadable.seekToUs(startPositionUs, trackTiming.rtpTimestamp); + } + } + + if (isSeekPending()) { + if (pendingSeekPositionUs == requestedSeekPositionUs) { + // No seek request was made after the current pending seek. + pendingSeekPositionUs = C.TIME_UNSET; + requestedSeekPositionUs = C.TIME_UNSET; + } else { + // Resets pendingSeekPositionUs to perform a fresh RTSP seek. + pendingSeekPositionUs = C.TIME_UNSET; + seekToUs(requestedSeekPositionUs); + } + } else if (pendingSeekPositionUsForTcpRetry != C.TIME_UNSET && isUsingRtpTcp) { + seekToUs(pendingSeekPositionUsForTcpRetry); + pendingSeekPositionUsForTcpRetry = C.TIME_UNSET; + } + } + + @Override + public void onPlaybackError(RtspPlaybackException error) { + playbackException = error; + } + + @Override + public void onSessionTimelineUpdated( + RtspSessionTiming timing, ImmutableList tracks) { + for (int i = 0; i < tracks.size(); i++) { + RtspMediaTrack rtspMediaTrack = tracks.get(i); + RtspLoaderWrapper loaderWrapper = + new RtspLoaderWrapper(rtspMediaTrack, /* trackId= */ i, rtpDataChannelFactory); + rtspLoaderWrappers.add(loaderWrapper); + loaderWrapper.startLoading(); + } + + listener.onSourceInfoRefreshed(timing); + } + + @Override + public void onSessionTimelineRequestFailed(String message, @Nullable Throwable cause) { + preparationError = cause == null ? new IOException(message) : new IOException(message, cause); + } + } + + private void retryWithRtpTcp() { + rtspClient.retryWithRtpTcp(); + + @Nullable + RtpDataChannel.Factory fallbackRtpDataChannelFactory = + rtpDataChannelFactory.createFallbackDataChannelFactory(); + if (fallbackRtpDataChannelFactory == null) { + playbackException = + new RtspPlaybackException("No fallback data channel factory for TCP retry"); + return; + } + + ArrayList newLoaderWrappers = new ArrayList<>(rtspLoaderWrappers.size()); + ArrayList newSelectedLoadInfos = new ArrayList<>(selectedLoadInfos.size()); + + // newLoaderWrappers' elements and orders must match those of rtspLoaderWrappers'. + for (int i = 0; i < rtspLoaderWrappers.size(); i++) { + RtspLoaderWrapper loaderWrapper = rtspLoaderWrappers.get(i); + + if (!loaderWrapper.canceled) { + RtspLoaderWrapper newLoaderWrapper = + new RtspLoaderWrapper( + loaderWrapper.loadInfo.mediaTrack, /* trackId= */ i, fallbackRtpDataChannelFactory); + newLoaderWrappers.add(newLoaderWrapper); + newLoaderWrapper.startLoading(); + if (selectedLoadInfos.contains(loaderWrapper.loadInfo)) { + newSelectedLoadInfos.add(newLoaderWrapper.loadInfo); + } + } else { + newLoaderWrappers.add(loaderWrapper); + } + } + + // Switch to new LoaderWrappers. + ImmutableList oldRtspLoaderWrappers = + ImmutableList.copyOf(rtspLoaderWrappers); + rtspLoaderWrappers.clear(); + rtspLoaderWrappers.addAll(newLoaderWrappers); + selectedLoadInfos.clear(); + selectedLoadInfos.addAll(newSelectedLoadInfos); + + // Cancel old loadable wrappers after switching, so that buffered position is always read from + // active sample queues. + for (int i = 0; i < oldRtspLoaderWrappers.size(); i++) { + oldRtspLoaderWrappers.get(i).cancelLoad(); + } + } + + private final class SampleStreamImpl implements SampleStream { + private final int track; + + public SampleStreamImpl(int track) { + this.track = track; + } + + @Override + public boolean isReady() { + return RtspMediaPeriod.this.isReady(track); + } + + @Override + public void maybeThrowError() throws RtspPlaybackException { + if (playbackException != null) { + throw playbackException; + } + } + + @Override + public int readData( + FormatHolder formatHolder, DecoderInputBuffer buffer, @ReadFlags int readFlags) { + return RtspMediaPeriod.this.readData(track, formatHolder, buffer, readFlags); + } + + @Override + public int skipData(long positionUs) { + return RtspMediaPeriod.this.skipData(track, positionUs); + } + } + + /** Manages the loading of an RTSP track. */ + private final class RtspLoaderWrapper { + /** The {@link RtpLoadInfo} of the RTSP track to load. */ + public final RtpLoadInfo loadInfo; + + private final Loader loader; + private final SampleQueue sampleQueue; + private boolean canceled; + private boolean released; + + /** + * Creates a new instance. + * + *

        Instances must be {@link #release() released} after loadings conclude. + */ + public RtspLoaderWrapper( + RtspMediaTrack mediaTrack, int trackId, RtpDataChannel.Factory rtpDataChannelFactory) { + loadInfo = new RtpLoadInfo(mediaTrack, trackId, rtpDataChannelFactory); + loader = new Loader("ExoPlayer:RtspMediaPeriod:RtspLoaderWrapper " + trackId); + sampleQueue = SampleQueue.createWithoutDrm(allocator); + sampleQueue.setUpstreamFormatChangeListener(internalListener); + } + + /** + * Returns the largest buffered position in microseconds; or {@link Long#MIN_VALUE} if no sample + * has been queued. + */ + public long getBufferedPositionUs() { + return sampleQueue.getLargestQueuedTimestampUs(); + } + + /** Starts loading. */ + public void startLoading() { + loader.startLoading( + loadInfo.loadable, /* callback= */ internalListener, /* defaultMinRetryCount= */ 0); + } + + public boolean isSampleQueueReady() { + return sampleQueue.isReady(/* loadingFinished= */ canceled); + } + + public @ReadDataResult int read( + FormatHolder formatHolder, DecoderInputBuffer buffer, @ReadFlags int readFlags) { + return sampleQueue.read(formatHolder, buffer, readFlags, /* loadingFinished= */ canceled); + } + + public int skipData(long positionUs) { + int skipCount = sampleQueue.getSkipCount(positionUs, /* allowEndOfQueue= */ canceled); + sampleQueue.skip(skipCount); + return skipCount; + } + + /** Cancels loading. */ + public void cancelLoad() { + if (!canceled) { + loadInfo.loadable.cancelLoad(); + canceled = true; + + // Update loadingFinished every time loading is canceled. + updateLoadingFinished(); + } + } + + /** Resets the {@link Loadable} and {@link SampleQueue} to prepare for an RTSP seek. */ + public void seekTo(long positionUs) { + if (!canceled) { + loadInfo.loadable.resetForSeek(); + sampleQueue.reset(); + sampleQueue.setStartTimeUs(positionUs); + } + } + + /** Releases the instance. */ + public void release() { + if (released) { + return; + } + loader.release(); + sampleQueue.release(); + released = true; + } + } + + /** Groups the info needed for loading one RTSP track in RTP. */ + /* package */ final class RtpLoadInfo { + /** The {@link RtspMediaTrack}. */ + public final RtspMediaTrack mediaTrack; + + private final RtpDataLoadable loadable; + + @Nullable private String transport; + + /** Creates a new instance. */ + public RtpLoadInfo( + RtspMediaTrack mediaTrack, int trackId, RtpDataChannel.Factory rtpDataChannelFactory) { + this.mediaTrack = mediaTrack; + + // This listener runs on the playback thread, posted by the Loader thread. + RtpDataLoadable.EventListener transportEventListener = + (transport, rtpDataChannel) -> { + RtpLoadInfo.this.transport = transport; + + @Nullable + RtspMessageChannel.InterleavedBinaryDataListener interleavedBinaryDataListener = + rtpDataChannel.getInterleavedBinaryDataListener(); + if (interleavedBinaryDataListener != null) { + rtspClient.registerInterleavedDataChannel( + rtpDataChannel.getLocalPort(), interleavedBinaryDataListener); + isUsingRtpTcp = true; + } + maybeSetupTracks(); + }; + + this.loadable = + new RtpDataLoadable( + trackId, + mediaTrack, + /* eventListener= */ transportEventListener, + /* output= */ internalListener, + rtpDataChannelFactory); + } + + /** + * Returns whether RTP transport is ready. Call {@link #getTransport()} only after transport is + * ready. + */ + public boolean isTransportReady() { + return transport != null; + } + + /** + * Gets the transport string for RTP loading. + * + * @throws IllegalStateException When transport for this RTP stream is not set. + */ + public String getTransport() { + checkStateNotNull(transport); + return transport; + } + + /** Gets the {@link Uri} for the loading RTSP track. */ + public Uri getTrackUri() { + return loadable.rtspMediaTrack.uri; + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspMediaSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspMediaSource.java new file mode 100644 index 0000000000..747bb36daa --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspMediaSource.java @@ -0,0 +1,320 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.android.exoplayer2.source.rtsp; + +import static com.google.android.exoplayer2.util.Assertions.checkArgument; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; + +import android.net.Uri; +import androidx.annotation.IntRange; +import androidx.annotation.Nullable; +import androidx.annotation.VisibleForTesting; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.ExoPlayerLibraryInfo; +import com.google.android.exoplayer2.MediaItem; +import com.google.android.exoplayer2.Timeline; +import com.google.android.exoplayer2.drm.DrmSessionManagerProvider; +import com.google.android.exoplayer2.source.BaseMediaSource; +import com.google.android.exoplayer2.source.ForwardingTimeline; +import com.google.android.exoplayer2.source.MediaPeriod; +import com.google.android.exoplayer2.source.MediaSource; +import com.google.android.exoplayer2.source.MediaSourceFactory; +import com.google.android.exoplayer2.source.SinglePeriodTimeline; +import com.google.android.exoplayer2.upstream.Allocator; +import com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy; +import com.google.android.exoplayer2.upstream.TransferListener; +import com.google.android.exoplayer2.util.Util; +import com.google.errorprone.annotations.CanIgnoreReturnValue; +import java.io.IOException; +import javax.net.SocketFactory; + +/** An Rtsp {@link MediaSource} */ +public final class RtspMediaSource extends BaseMediaSource { + + static { + ExoPlayerLibraryInfo.registerModule("goog.exo.rtsp"); + } + + /** The default value for {@link Factory#setTimeoutMs}. */ + public static final long DEFAULT_TIMEOUT_MS = 8000; + + /** + * Factory for {@link RtspMediaSource} + * + *

        This factory doesn't support the following methods from {@link MediaSourceFactory}: + * + *

          + *
        • {@link #setDrmSessionManagerProvider(DrmSessionManagerProvider)} + *
        • {@link #setLoadErrorHandlingPolicy(LoadErrorHandlingPolicy)} + *
        + */ + @SuppressWarnings("deprecation") // Implement deprecated type for backwards compatibility. + public static final class Factory implements MediaSourceFactory { + + private long timeoutMs; + private String userAgent; + private SocketFactory socketFactory; + private boolean forceUseRtpTcp; + private boolean debugLoggingEnabled; + + public Factory() { + timeoutMs = DEFAULT_TIMEOUT_MS; + userAgent = ExoPlayerLibraryInfo.VERSION_SLASHY; + socketFactory = SocketFactory.getDefault(); + } + + /** + * Sets whether to force using TCP as the default RTP transport. + * + *

        The default value is {@code false}, the source will first try streaming RTSP with UDP. If + * no data is received on the UDP channel (for instance, when streaming behind a NAT) for a + * while, the source will switch to streaming using TCP. If this value is set to {@code true}, + * the source will always use TCP for streaming. + * + * @param forceUseRtpTcp Whether force to use TCP for streaming. + * @return This Factory, for convenience. + */ + @CanIgnoreReturnValue + public Factory setForceUseRtpTcp(boolean forceUseRtpTcp) { + this.forceUseRtpTcp = forceUseRtpTcp; + return this; + } + + /** + * Sets the user agent, the default value is {@link ExoPlayerLibraryInfo#VERSION_SLASHY}. + * + * @param userAgent The user agent. + * @return This Factory, for convenience. + */ + @CanIgnoreReturnValue + public Factory setUserAgent(String userAgent) { + this.userAgent = userAgent; + return this; + } + + /** + * Sets a socket factory for {@link RtspClient}'s connection, the default value is {@link + * SocketFactory#getDefault()}. + * + * @param socketFactory A socket factory. + * @return This Factory, for convenience. + */ + @CanIgnoreReturnValue + public Factory setSocketFactory(SocketFactory socketFactory) { + this.socketFactory = socketFactory; + return this; + } + + /** + * Sets whether to log RTSP messages, the default value is {@code false}. + * + *

        This option presents a privacy risk, since it may expose sensitive information such as + * user's credentials. + * + * @param debugLoggingEnabled Whether to log RTSP messages. + * @return This Factory, for convenience. + */ + @CanIgnoreReturnValue + public Factory setDebugLoggingEnabled(boolean debugLoggingEnabled) { + this.debugLoggingEnabled = debugLoggingEnabled; + return this; + } + + /** + * Sets the timeout in milliseconds, the default value is {@link #DEFAULT_TIMEOUT_MS}. + * + *

        A positive number of milliseconds to wait before lack of received RTP packets is treated + * as the end of input. + * + * @param timeoutMs The timeout measured in milliseconds. + * @return This Factory, for convenience. + */ + @CanIgnoreReturnValue + public Factory setTimeoutMs(@IntRange(from = 1) long timeoutMs) { + checkArgument(timeoutMs > 0); + this.timeoutMs = timeoutMs; + return this; + } + + /** Does nothing. {@link RtspMediaSource} does not support DRM. */ + @Override + public Factory setDrmSessionManagerProvider(DrmSessionManagerProvider drmSessionManager) { + return this; + } + + /** Does nothing. {@link RtspMediaSource} does not support error handling policies. */ + @Override + public Factory setLoadErrorHandlingPolicy(LoadErrorHandlingPolicy loadErrorHandlingPolicy) { + // TODO(internal b/172331505): Implement support. + return this; + } + + @Override + public @C.ContentType int[] getSupportedTypes() { + return new int[] {C.CONTENT_TYPE_RTSP}; + } + + /** + * Returns a new {@link RtspMediaSource} using the current parameters. + * + * @param mediaItem The {@link MediaItem}. + * @return The new {@link RtspMediaSource}. + * @throws NullPointerException if {@link MediaItem#localConfiguration} is {@code null}. + */ + @Override + public RtspMediaSource createMediaSource(MediaItem mediaItem) { + checkNotNull(mediaItem.localConfiguration); + return new RtspMediaSource( + mediaItem, + forceUseRtpTcp + ? new TransferRtpDataChannelFactory(timeoutMs) + : new UdpDataSourceRtpDataChannelFactory(timeoutMs), + userAgent, + socketFactory, + debugLoggingEnabled); + } + } + + /** Thrown when an exception or error is encountered during loading an RTSP stream. */ + public static final class RtspPlaybackException extends IOException { + public RtspPlaybackException(String message) { + super(message); + } + + public RtspPlaybackException(Throwable e) { + super(e); + } + + public RtspPlaybackException(String message, Throwable e) { + super(message, e); + } + } + + private final MediaItem mediaItem; + private final RtpDataChannel.Factory rtpDataChannelFactory; + private final String userAgent; + private final Uri uri; + private final SocketFactory socketFactory; + private final boolean debugLoggingEnabled; + + private long timelineDurationUs; + private boolean timelineIsSeekable; + private boolean timelineIsLive; + private boolean timelineIsPlaceholder; + + @VisibleForTesting + /* package */ RtspMediaSource( + MediaItem mediaItem, + RtpDataChannel.Factory rtpDataChannelFactory, + String userAgent, + SocketFactory socketFactory, + boolean debugLoggingEnabled) { + this.mediaItem = mediaItem; + this.rtpDataChannelFactory = rtpDataChannelFactory; + this.userAgent = userAgent; + this.uri = checkNotNull(this.mediaItem.localConfiguration).uri; + this.socketFactory = socketFactory; + this.debugLoggingEnabled = debugLoggingEnabled; + this.timelineDurationUs = C.TIME_UNSET; + this.timelineIsPlaceholder = true; + } + + @Override + protected void prepareSourceInternal(@Nullable TransferListener mediaTransferListener) { + notifySourceInfoRefreshed(); + } + + @Override + protected void releaseSourceInternal() { + // Do nothing. + } + + @Override + public MediaItem getMediaItem() { + return mediaItem; + } + + @Override + public void maybeThrowSourceInfoRefreshError() { + // Do nothing. + } + + @Override + public MediaPeriod createPeriod(MediaPeriodId id, Allocator allocator, long startPositionUs) { + return new RtspMediaPeriod( + allocator, + rtpDataChannelFactory, + uri, + new RtspMediaPeriod.Listener() { + @Override + public void onSourceInfoRefreshed(RtspSessionTiming timing) { + timelineDurationUs = Util.msToUs(timing.getDurationMs()); + timelineIsSeekable = !timing.isLive(); + timelineIsLive = timing.isLive(); + timelineIsPlaceholder = false; + notifySourceInfoRefreshed(); + } + + @Override + public void onSeekingUnsupported() { + timelineIsSeekable = false; + notifySourceInfoRefreshed(); + } + }, + userAgent, + socketFactory, + debugLoggingEnabled); + } + + @Override + public void releasePeriod(MediaPeriod mediaPeriod) { + ((RtspMediaPeriod) mediaPeriod).release(); + } + + // Internal methods. + + private void notifySourceInfoRefreshed() { + Timeline timeline = + new SinglePeriodTimeline( + timelineDurationUs, + timelineIsSeekable, + /* isDynamic= */ false, + /* useLiveConfiguration= */ timelineIsLive, + /* manifest= */ null, + mediaItem); + if (timelineIsPlaceholder) { + timeline = + new ForwardingTimeline(timeline) { + @Override + public Window getWindow( + int windowIndex, Window window, long defaultPositionProjectionUs) { + super.getWindow(windowIndex, window, defaultPositionProjectionUs); + window.isPlaceholder = true; + return window; + } + + @Override + public Period getPeriod(int periodIndex, Period period, boolean setIds) { + super.getPeriod(periodIndex, period, setIds); + period.isPlaceholder = true; + return period; + } + }; + } + refreshSourceInfo(timeline); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspMediaTrack.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspMediaTrack.java new file mode 100644 index 0000000000..d28d3e20a9 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspMediaTrack.java @@ -0,0 +1,469 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.rtsp; + +import static com.google.android.exoplayer2.source.rtsp.MediaDescription.MEDIA_TYPE_AUDIO; +import static com.google.android.exoplayer2.source.rtsp.RtpPayloadFormat.getMimeTypeFromRtpMediaType; +import static com.google.android.exoplayer2.source.rtsp.SessionDescription.ATTR_CONTROL; +import static com.google.android.exoplayer2.util.Assertions.checkArgument; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.NalUnitUtil.NAL_START_CODE; +import static com.google.android.exoplayer2.util.Util.castNonNull; + +import android.net.Uri; +import android.util.Base64; +import android.util.Pair; +import androidx.annotation.Nullable; +import androidx.annotation.VisibleForTesting; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.ParserException; +import com.google.android.exoplayer2.audio.AacUtil; +import com.google.android.exoplayer2.util.CodecSpecificDataUtil; +import com.google.android.exoplayer2.util.MimeTypes; +import com.google.android.exoplayer2.util.NalUnitUtil; +import com.google.android.exoplayer2.util.ParsableBitArray; +import com.google.android.exoplayer2.util.Util; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; + +/** Represents a media track in an RTSP playback. */ +/* package */ final class RtspMediaTrack { + // Format specific parameter names. + private static final String PARAMETER_PROFILE_LEVEL_ID = "profile-level-id"; + private static final String PARAMETER_SPROP_PARAMS = "sprop-parameter-sets"; + + private static final String PARAMETER_AMR_OCTET_ALIGN = "octet-align"; + private static final String PARAMETER_AMR_INTERLEAVING = "interleaving"; + private static final String PARAMETER_H265_SPROP_SPS = "sprop-sps"; + private static final String PARAMETER_H265_SPROP_PPS = "sprop-pps"; + private static final String PARAMETER_H265_SPROP_VPS = "sprop-vps"; + private static final String PARAMETER_H265_SPROP_MAX_DON_DIFF = "sprop-max-don-diff"; + private static final String PARAMETER_MP4A_CONFIG = "config"; + private static final String PARAMETER_MP4A_C_PRESENT = "cpresent"; + + /** Prefix for the RFC6381 codecs string for AAC formats. */ + private static final String AAC_CODECS_PREFIX = "mp4a.40."; + /** Prefix for the RFC6381 codecs string for AVC formats. */ + private static final String H264_CODECS_PREFIX = "avc1."; + /** Prefix for the RFC6416 codecs string for MPEG4V-ES formats. */ + private static final String MPEG4_CODECS_PREFIX = "mp4v."; + + private static final String GENERIC_CONTROL_ATTR = "*"; + /** + * Default height for MP4V. + * + *

        RFC6416 does not mandate codec specific data (like width and height) in the fmtp attribute. + * These values are taken from Android's software MP4V decoder. + */ + private static final int DEFAULT_MP4V_WIDTH = 352; + + /** + * Default height for MP4V. + * + *

        RFC6416 does not mandate codec specific data (like width and height) in the fmtp attribute. + * These values are taken from Android's software MP4V decoder. + */ + private static final int DEFAULT_MP4V_HEIGHT = 288; + + /** + * Default width for VP8. + * + *

        RFC7741 never uses codec specific data (like width and height) in the fmtp attribute. These + * values are taken from Android's + * software VP8 decoder. + */ + private static final int DEFAULT_VP8_WIDTH = 320; + /** + * Default height for VP8. + * + *

        RFC7741 never uses codec specific data (like width and height) in the fmtp attribute. These + * values are taken from Android's + * software VP8 decoder. + */ + private static final int DEFAULT_VP8_HEIGHT = 240; + + /** RFC7587 Section 6.1 Sampling rate for OPUS is fixed at 48KHz. */ + private static final int OPUS_CLOCK_RATE = 48_000; + + /** + * Default width for VP9. + * + *

        VP9 RFC (this draft + * RFC) never uses codec specific data (like width and height) in the fmtp attribute. These + * values are taken from Android's + * software VP9 decoder. + */ + private static final int DEFAULT_VP9_WIDTH = 320; + /** + * Default height for VP9. + * + *

        VP9 RFC (this draft + * RFC) never uses codec specific data (like width and height) in the fmtp attribute. These + * values are taken from Android's + * software VP9 decoder. + */ + private static final int DEFAULT_VP9_HEIGHT = 240; + + /** + * Default height for H263. + * + *

        RFC4629 does not mandate codec specific data (like width and height) in the fmtp attribute. + * These values are taken from Android's software H263 decoder. + */ + private static final int DEFAULT_H263_WIDTH = 352; + /** + * Default height for H263. + * + *

        RFC4629 does not mandate codec specific data (like width and height) in the fmtp attribute. + * These values are taken from Android's software H263 decoder. + */ + private static final int DEFAULT_H263_HEIGHT = 288; + + /** The track's associated {@link RtpPayloadFormat}. */ + public final RtpPayloadFormat payloadFormat; + /** The track's URI. */ + public final Uri uri; + + /** + * Creates a new instance from a {@link MediaDescription}. + * + * @param mediaDescription The {@link MediaDescription} of this track. + * @param sessionUri The {@link Uri} of the RTSP playback session. + */ + public RtspMediaTrack(MediaDescription mediaDescription, Uri sessionUri) { + checkArgument(mediaDescription.attributes.containsKey(ATTR_CONTROL)); + payloadFormat = generatePayloadFormat(mediaDescription); + uri = extractTrackUri(sessionUri, castNonNull(mediaDescription.attributes.get(ATTR_CONTROL))); + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + RtspMediaTrack that = (RtspMediaTrack) o; + return payloadFormat.equals(that.payloadFormat) && uri.equals(that.uri); + } + + @Override + public int hashCode() { + int result = 7; + result = 31 * result + payloadFormat.hashCode(); + result = 31 * result + uri.hashCode(); + return result; + } + + @VisibleForTesting + /* package */ static RtpPayloadFormat generatePayloadFormat(MediaDescription mediaDescription) { + Format.Builder formatBuilder = new Format.Builder(); + + if (mediaDescription.bitrate > 0) { + formatBuilder.setAverageBitrate(mediaDescription.bitrate); + } + + int rtpPayloadType = mediaDescription.rtpMapAttribute.payloadType; + String mediaEncoding = mediaDescription.rtpMapAttribute.mediaEncoding; + + String mimeType = getMimeTypeFromRtpMediaType(mediaEncoding); + formatBuilder.setSampleMimeType(mimeType); + + int clockRate = mediaDescription.rtpMapAttribute.clockRate; + int channelCount = C.INDEX_UNSET; + if (MEDIA_TYPE_AUDIO.equals(mediaDescription.mediaType)) { + channelCount = + inferChannelCount(mediaDescription.rtpMapAttribute.encodingParameters, mimeType); + formatBuilder.setSampleRate(clockRate).setChannelCount(channelCount); + } + + ImmutableMap fmtpParameters = mediaDescription.getFmtpParametersAsMap(); + switch (mimeType) { + case MimeTypes.AUDIO_AAC: + checkArgument(channelCount != C.INDEX_UNSET); + checkArgument(!fmtpParameters.isEmpty()); + if (mediaEncoding.equals(RtpPayloadFormat.RTP_MEDIA_MPEG4_LATM_AUDIO)) { + // cpresent is defined in RFC3016 Section 5.3. cpresent=0 means the config fmtp parameter + // must exist. + checkArgument( + fmtpParameters.containsKey(PARAMETER_MP4A_C_PRESENT) + && fmtpParameters.get(PARAMETER_MP4A_C_PRESENT).equals("0"), + "Only supports cpresent=0 in AAC audio."); + @Nullable String config = fmtpParameters.get(PARAMETER_MP4A_CONFIG); + checkNotNull(config, "AAC audio stream must include config fmtp parameter"); + // config is a hex string. + checkArgument(config.length() % 2 == 0, "Malformat MPEG4 config: " + config); + AacUtil.Config aacConfig = parseAacStreamMuxConfig(config); + formatBuilder + .setSampleRate(aacConfig.sampleRateHz) + .setChannelCount(aacConfig.channelCount) + .setCodecs(aacConfig.codecs); + } + processAacFmtpAttribute(formatBuilder, fmtpParameters, channelCount, clockRate); + break; + case MimeTypes.AUDIO_AMR_NB: + case MimeTypes.AUDIO_AMR_WB: + checkArgument(channelCount == 1, "Multi channel AMR is not currently supported."); + checkArgument( + !fmtpParameters.isEmpty(), + "fmtp parameters must include " + PARAMETER_AMR_OCTET_ALIGN + "."); + checkArgument( + fmtpParameters.containsKey(PARAMETER_AMR_OCTET_ALIGN), + "Only octet aligned mode is currently supported."); + checkArgument( + !fmtpParameters.containsKey(PARAMETER_AMR_INTERLEAVING), + "Interleaving mode is not currently supported."); + break; + case MimeTypes.AUDIO_OPUS: + checkArgument(channelCount != C.INDEX_UNSET); + // RFC7587 Section 6.1: the RTP timestamp is incremented with a 48000 Hz clock rate + // for all modes of Opus and all sampling rates. + checkArgument(clockRate == OPUS_CLOCK_RATE, "Invalid OPUS clock rate."); + break; + case MimeTypes.VIDEO_MP4V: + checkArgument(!fmtpParameters.isEmpty()); + processMPEG4FmtpAttribute(formatBuilder, fmtpParameters); + break; + case MimeTypes.VIDEO_H263: + // H263 never uses fmtp width and height attributes (RFC4629 Section 8.2), setting default + // width and height. + formatBuilder.setWidth(DEFAULT_H263_WIDTH).setHeight(DEFAULT_H263_HEIGHT); + break; + case MimeTypes.VIDEO_H264: + checkArgument(!fmtpParameters.isEmpty()); + processH264FmtpAttribute(formatBuilder, fmtpParameters); + break; + case MimeTypes.VIDEO_H265: + checkArgument(!fmtpParameters.isEmpty()); + processH265FmtpAttribute(formatBuilder, fmtpParameters); + break; + case MimeTypes.VIDEO_VP8: + // VP8 never uses fmtp width and height attributes (RFC7741 Section 6.2), setting default + // width and height. + formatBuilder.setWidth(DEFAULT_VP8_WIDTH).setHeight(DEFAULT_VP8_HEIGHT); + break; + case MimeTypes.VIDEO_VP9: + // VP9 never uses fmtp width and height attributes, setting default width and height. + formatBuilder.setWidth(DEFAULT_VP9_WIDTH).setHeight(DEFAULT_VP9_HEIGHT); + break; + case MimeTypes.AUDIO_RAW: + formatBuilder.setPcmEncoding(RtpPayloadFormat.getRawPcmEncodingType(mediaEncoding)); + break; + case MimeTypes.AUDIO_AC3: + case MimeTypes.AUDIO_ALAW: + case MimeTypes.AUDIO_MLAW: + // Does not require a fmtp attribute. Fall through. + default: + // Do nothing. + } + + checkArgument(clockRate > 0); + return new RtpPayloadFormat( + formatBuilder.build(), rtpPayloadType, clockRate, fmtpParameters, mediaEncoding); + } + + private static int inferChannelCount(int encodingParameter, String mimeType) { + if (encodingParameter != C.INDEX_UNSET) { + // The encoding parameter specifies the number of channels in audio streams when + // present. If omitted, the number of channels is one. This parameter has no significance in + // video streams. (RFC2327 Page 22). + return encodingParameter; + } + + if (mimeType.equals(MimeTypes.AUDIO_AC3)) { + // If RTPMAP attribute does not include channel count for AC3, default to 6. + return 6; + } + + return 1; + } + + private static void processAacFmtpAttribute( + Format.Builder formatBuilder, + ImmutableMap fmtpAttributes, + int channelCount, + int sampleRate) { + checkArgument(fmtpAttributes.containsKey(PARAMETER_PROFILE_LEVEL_ID)); + String profileLevel = checkNotNull(fmtpAttributes.get(PARAMETER_PROFILE_LEVEL_ID)); + formatBuilder.setCodecs(AAC_CODECS_PREFIX + profileLevel); + formatBuilder.setInitializationData( + ImmutableList.of( + // Clock rate equals to sample rate in RTP. + AacUtil.buildAacLcAudioSpecificConfig(sampleRate, channelCount))); + } + + /** + * Returns the {@link AacUtil.Config} by parsing the MPEG4 Audio Stream Mux configuration. + * + *

        fmtp attribute {@code config} includes the MPEG4 Audio Stream Mux configuration + * (ISO/IEC14496-3, Chapter 1.7.3). + */ + private static AacUtil.Config parseAacStreamMuxConfig(String streamMuxConfig) { + ParsableBitArray config = new ParsableBitArray(Util.getBytesFromHexString(streamMuxConfig)); + checkArgument(config.readBits(1) == 0, "Only supports audio mux version 0."); + checkArgument(config.readBits(1) == 1, "Only supports allStreamsSameTimeFraming."); + config.skipBits(6); + checkArgument(config.readBits(4) == 0, "Only supports one program."); + checkArgument(config.readBits(3) == 0, "Only supports one numLayer."); + try { + return AacUtil.parseAudioSpecificConfig(config, false); + } catch (ParserException e) { + throw new IllegalArgumentException(e); + } + } + + private static void processMPEG4FmtpAttribute( + Format.Builder formatBuilder, ImmutableMap fmtpAttributes) { + @Nullable String configInput = fmtpAttributes.get(PARAMETER_MP4A_CONFIG); + if (configInput != null) { + byte[] configBuffer = Util.getBytesFromHexString(configInput); + formatBuilder.setInitializationData(ImmutableList.of(configBuffer)); + Pair resolution = + CodecSpecificDataUtil.getVideoResolutionFromMpeg4VideoConfig(configBuffer); + formatBuilder.setWidth(resolution.first).setHeight(resolution.second); + } else { + // set the default width and height + formatBuilder.setWidth(DEFAULT_MP4V_WIDTH).setHeight(DEFAULT_MP4V_HEIGHT); + } + @Nullable String profileLevel = fmtpAttributes.get(PARAMETER_PROFILE_LEVEL_ID); + formatBuilder.setCodecs(MPEG4_CODECS_PREFIX + (profileLevel == null ? "1" : profileLevel)); + } + + /** Returns H264/H265 initialization data from the RTP parameter set. */ + private static byte[] getInitializationDataFromParameterSet(String parameterSet) { + byte[] decodedParameterNalData = Base64.decode(parameterSet, Base64.DEFAULT); + byte[] decodedParameterNalUnit = + new byte[decodedParameterNalData.length + NAL_START_CODE.length]; + System.arraycopy( + NAL_START_CODE, + /* srcPos= */ 0, + decodedParameterNalUnit, + /* destPos= */ 0, + NAL_START_CODE.length); + System.arraycopy( + decodedParameterNalData, + /* srcPos= */ 0, + decodedParameterNalUnit, + /* destPos= */ NAL_START_CODE.length, + decodedParameterNalData.length); + return decodedParameterNalUnit; + } + + private static void processH264FmtpAttribute( + Format.Builder formatBuilder, ImmutableMap fmtpAttributes) { + checkArgument(fmtpAttributes.containsKey(PARAMETER_SPROP_PARAMS)); + String spropParameterSets = checkNotNull(fmtpAttributes.get(PARAMETER_SPROP_PARAMS)); + String[] parameterSets = Util.split(spropParameterSets, ","); + checkArgument(parameterSets.length == 2); + ImmutableList initializationData = + ImmutableList.of( + getInitializationDataFromParameterSet(parameterSets[0]), + getInitializationDataFromParameterSet(parameterSets[1])); + formatBuilder.setInitializationData(initializationData); + + // Process SPS (Sequence Parameter Set). + byte[] spsNalDataWithStartCode = initializationData.get(0); + NalUnitUtil.SpsData spsData = + NalUnitUtil.parseSpsNalUnit( + spsNalDataWithStartCode, NAL_START_CODE.length, spsNalDataWithStartCode.length); + formatBuilder.setPixelWidthHeightRatio(spsData.pixelWidthHeightRatio); + formatBuilder.setHeight(spsData.height); + formatBuilder.setWidth(spsData.width); + + @Nullable String profileLevel = fmtpAttributes.get(PARAMETER_PROFILE_LEVEL_ID); + if (profileLevel != null) { + formatBuilder.setCodecs(H264_CODECS_PREFIX + profileLevel); + } else { + formatBuilder.setCodecs( + CodecSpecificDataUtil.buildAvcCodecString( + spsData.profileIdc, spsData.constraintsFlagsAndReservedZero2Bits, spsData.levelIdc)); + } + } + + private static void processH265FmtpAttribute( + Format.Builder formatBuilder, ImmutableMap fmtpAttributes) { + if (fmtpAttributes.containsKey(PARAMETER_H265_SPROP_MAX_DON_DIFF)) { + int maxDonDiff = + Integer.parseInt(checkNotNull(fmtpAttributes.get(PARAMETER_H265_SPROP_MAX_DON_DIFF))); + checkArgument( + maxDonDiff == 0, "non-zero sprop-max-don-diff " + maxDonDiff + " is not supported"); + } + + checkArgument(fmtpAttributes.containsKey(PARAMETER_H265_SPROP_VPS)); + String spropVPS = checkNotNull(fmtpAttributes.get(PARAMETER_H265_SPROP_VPS)); + checkArgument(fmtpAttributes.containsKey(PARAMETER_H265_SPROP_SPS)); + String spropSPS = checkNotNull(fmtpAttributes.get(PARAMETER_H265_SPROP_SPS)); + checkArgument(fmtpAttributes.containsKey(PARAMETER_H265_SPROP_PPS)); + String spropPPS = checkNotNull(fmtpAttributes.get(PARAMETER_H265_SPROP_PPS)); + ImmutableList initializationData = + ImmutableList.of( + getInitializationDataFromParameterSet(spropVPS), + getInitializationDataFromParameterSet(spropSPS), + getInitializationDataFromParameterSet(spropPPS)); + formatBuilder.setInitializationData(initializationData); + + // Process the SPS (Sequence Parameter Set). + byte[] spsNalDataWithStartCode = initializationData.get(1); + NalUnitUtil.H265SpsData spsData = + NalUnitUtil.parseH265SpsNalUnit( + spsNalDataWithStartCode, NAL_START_CODE.length, spsNalDataWithStartCode.length); + formatBuilder.setPixelWidthHeightRatio(spsData.pixelWidthHeightRatio); + formatBuilder.setHeight(spsData.height).setWidth(spsData.width); + + formatBuilder.setCodecs( + CodecSpecificDataUtil.buildHevcCodecString( + spsData.generalProfileSpace, + spsData.generalTierFlag, + spsData.generalProfileIdc, + spsData.generalProfileCompatibilityFlags, + spsData.constraintBytes, + spsData.generalLevelIdc)); + } + + /** + * Extracts the track URI. + * + *

        The processing logic is specified in RFC2326 Section C.1.1. + * + * @param sessionUri The session URI. + * @param controlAttributeString The control attribute from the track's {@link MediaDescription}. + * @return The extracted track URI. + */ + private static Uri extractTrackUri(Uri sessionUri, String controlAttributeString) { + Uri controlAttributeUri = Uri.parse(controlAttributeString); + if (controlAttributeUri.isAbsolute()) { + return controlAttributeUri; + } else if (controlAttributeString.equals(GENERIC_CONTROL_ATTR)) { + return sessionUri; + } else { + return sessionUri.buildUpon().appendEncodedPath(controlAttributeString).build(); + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspMessageChannel.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspMessageChannel.java new file mode 100644 index 0000000000..8aa97986e9 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspMessageChannel.java @@ -0,0 +1,510 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.rtsp; + +import static com.google.android.exoplayer2.source.rtsp.RtspMessageUtil.isRtspStartLine; +import static com.google.android.exoplayer2.util.Assertions.checkArgument; +import static com.google.android.exoplayer2.util.Assertions.checkState; +import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; +import static java.lang.annotation.ElementType.TYPE_USE; + +import android.os.Handler; +import android.os.HandlerThread; +import androidx.annotation.IntDef; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.ParserException; +import com.google.android.exoplayer2.upstream.Loader; +import com.google.android.exoplayer2.upstream.Loader.LoadErrorAction; +import com.google.android.exoplayer2.upstream.Loader.Loadable; +import com.google.common.base.Ascii; +import com.google.common.base.Charsets; +import com.google.common.collect.ImmutableList; +import com.google.common.primitives.Ints; +import java.io.ByteArrayOutputStream; +import java.io.Closeable; +import java.io.DataInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import java.net.Socket; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; + +/** Sends and receives RTSP messages. */ +/* package */ final class RtspMessageChannel implements Closeable { + + /** RTSP uses UTF-8 (RFC2326 Section 1.1). */ + public static final Charset CHARSET = Charsets.UTF_8; + + private static final String TAG = "RtspMessageChannel"; + + /** A listener for received RTSP messages and possible failures. */ + public interface MessageListener { + + /** + * Called when an RTSP message is received. + * + * @param message The non-empty list of received lines, with line terminators removed. + */ + void onRtspMessageReceived(List message); + + /** + * Called when failed to send an RTSP message. + * + * @param message The list of lines making up the RTSP message that is failed to send. + * @param e The thrown {@link Exception}. + */ + default void onSendingFailed(List message, Exception e) {} + + /** + * Called when failed to receive an RTSP message. + * + * @param e The thrown {@link Exception}. + */ + default void onReceivingFailed(Exception e) {} + } + + /** A listener for received interleaved binary data from RTSP. */ + public interface InterleavedBinaryDataListener { + + /** + * Called when interleaved binary data is received on RTSP. + * + * @param data The received binary data. The byte array will not be reused by {@link + * RtspMessageChannel}, and will always be full. + */ + void onInterleavedBinaryDataReceived(byte[] data); + } + + /** + * The IANA-registered default port for RTSP. See here + */ + public static final int DEFAULT_RTSP_PORT = 554; + + private final MessageListener messageListener; + private final Loader receiverLoader; + private final Map interleavedBinaryDataListeners; + private @MonotonicNonNull Sender sender; + private @MonotonicNonNull Socket socket; + + private volatile boolean closed; + + /** + * Constructs a new instance. + * + *

        A connected {@link Socket} must be provided in {@link #open} in order to send and receive + * RTSP messages. {@link #close} must be called when done, which would also close the socket. + * + *

        {@link MessageListener} and {@link InterleavedBinaryDataListener} implementations must not + * make assumptions about which thread called their listener methods; and must be thread-safe. + * + *

        Note: all method invocations must be made from the thread on which this class is created. + * + * @param messageListener The {@link MessageListener} to receive events. + */ + public RtspMessageChannel(MessageListener messageListener) { + this.messageListener = messageListener; + this.receiverLoader = new Loader("ExoPlayer:RtspMessageChannel:ReceiverLoader"); + this.interleavedBinaryDataListeners = Collections.synchronizedMap(new HashMap<>()); + } + + /** + * Opens the message channel to send and receive RTSP messages. + * + *

        Note: If an {@link IOException} is thrown, callers must still call {@link #close()} to + * ensure that any partial effects of the invocation are cleaned up. + * + * @param socket A connected {@link Socket}. + */ + public void open(Socket socket) throws IOException { + this.socket = socket; + sender = new Sender(socket.getOutputStream()); + + receiverLoader.startLoading( + new Receiver(socket.getInputStream()), + new LoaderCallbackImpl(), + /* defaultMinRetryCount= */ 0); + } + + /** + * Closes the RTSP message channel. + * + *

        The closed instance must not be re-opened again. The {@link MessageListener} will not + * receive further messages after closing. + * + * @throws IOException If an error occurs closing the message channel. + */ + @Override + public void close() throws IOException { + // TODO(internal b/172331505) Make sure most resources are closed before throwing, and close() + // can be called again to close the resources that are still open. + if (closed) { + return; + } + try { + if (sender != null) { + sender.close(); + } + receiverLoader.release(); + + if (socket != null) { + socket.close(); + } + } finally { + closed = true; + } + } + + /** + * Sends a serialized RTSP message. + * + * @param message The list of strings representing the serialized RTSP message. + */ + public void send(List message) { + checkStateNotNull(sender); + sender.send(message); + } + + /** + * Registers an {@link InterleavedBinaryDataListener} to receive RTSP interleaved data. + * + *

        The listener method {@link InterleavedBinaryDataListener#onInterleavedBinaryDataReceived} is + * called on {@link RtspMessageChannel}'s internal thread for receiving RTSP messages. + */ + public void registerInterleavedBinaryDataListener( + int channel, InterleavedBinaryDataListener listener) { + interleavedBinaryDataListeners.put(channel, listener); + } + + private final class Sender implements Closeable { + + private final OutputStream outputStream; + private final HandlerThread senderThread; + private final Handler senderThreadHandler; + + /** + * Creates a new instance. + * + * @param outputStream The {@link OutputStream} of the opened RTSP {@link Socket}, to which the + * request is sent. The caller needs to close the {@link OutputStream}. + */ + public Sender(OutputStream outputStream) { + this.outputStream = outputStream; + this.senderThread = new HandlerThread("ExoPlayer:RtspMessageChannel:Sender"); + this.senderThread.start(); + this.senderThreadHandler = new Handler(this.senderThread.getLooper()); + } + + /** + * Sends out RTSP messages that are in the forms of lists of strings. + * + *

        If {@link Exception} is thrown while sending, the message {@link + * MessageListener#onSendingFailed} is dispatched to the thread that created the {@link + * RtspMessageChannel}. + * + * @param message The must of strings representing the serialized RTSP message. + */ + public void send(List message) { + byte[] data = RtspMessageUtil.convertMessageToByteArray(message); + senderThreadHandler.post( + () -> { + try { + outputStream.write(data); + } catch (Exception e) { + if (!closed) { + messageListener.onSendingFailed(message, e); + } + } + }); + } + + @Override + public void close() { + senderThreadHandler.post(senderThread::quit); + try { + // Waits until all the messages posted to the sender thread are handled. + senderThread.join(); + } catch (InterruptedException e) { + senderThread.interrupt(); + } + } + } + + /** A {@link Loadable} for receiving RTSP responses. */ + private final class Receiver implements Loadable { + + /** ASCII dollar encapsulates the RTP packets in interleaved mode (RFC2326 Section 10.12). */ + private static final byte INTERLEAVED_MESSAGE_MARKER = '$'; + + private final DataInputStream dataInputStream; + private final MessageParser messageParser; + private volatile boolean loadCanceled; + + /** + * Creates a new instance. + * + * @param inputStream The {@link InputStream} of the opened RTSP {@link Socket}, from which the + * {@link RtspResponse RtspResponses} are received. The caller needs to close the {@link + * InputStream}. + */ + public Receiver(InputStream inputStream) { + dataInputStream = new DataInputStream(inputStream); + messageParser = new MessageParser(); + } + + @Override + public void cancelLoad() { + loadCanceled = true; + } + + @Override + public void load() throws IOException { + while (!loadCanceled) { + byte firstByte = dataInputStream.readByte(); + if (firstByte == INTERLEAVED_MESSAGE_MARKER) { + handleInterleavedBinaryData(); + } else { + handleRtspMessage(firstByte); + } + } + } + + /** Handles an entire RTSP message. */ + private void handleRtspMessage(byte firstByte) throws IOException { + if (!closed) { + messageListener.onRtspMessageReceived(messageParser.parseNext(firstByte, dataInputStream)); + } + } + + private void handleInterleavedBinaryData() throws IOException { + int channel = dataInputStream.readUnsignedByte(); + int size = dataInputStream.readUnsignedShort(); + byte[] data = new byte[size]; + dataInputStream.readFully(data, /* off= */ 0, size); + + @Nullable + InterleavedBinaryDataListener listener = interleavedBinaryDataListeners.get(channel); + if (listener != null && !closed) { + listener.onInterleavedBinaryDataReceived(data); + } + } + } + + private final class LoaderCallbackImpl implements Loader.Callback { + @Override + public void onLoadCompleted(Receiver loadable, long elapsedRealtimeMs, long loadDurationMs) {} + + @Override + public void onLoadCanceled( + Receiver loadable, long elapsedRealtimeMs, long loadDurationMs, boolean released) {} + + @Override + public LoadErrorAction onLoadError( + Receiver loadable, + long elapsedRealtimeMs, + long loadDurationMs, + IOException error, + int errorCount) { + if (!closed) { + messageListener.onReceivingFailed(error); + } + return Loader.DONT_RETRY; + } + } + + /** Processes RTSP messages line-by-line. */ + private static final class MessageParser { + + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef({STATE_READING_FIRST_LINE, STATE_READING_HEADER, STATE_READING_BODY}) + @interface ReadingState {} + + private static final int STATE_READING_FIRST_LINE = 1; + private static final int STATE_READING_HEADER = 2; + private static final int STATE_READING_BODY = 3; + + private final List messageLines; + + private @ReadingState int state; + private long messageBodyLength; + + /** Creates a new instance. */ + public MessageParser() { + messageLines = new ArrayList<>(); + state = STATE_READING_FIRST_LINE; + } + + /** + * Receives and parses an entire RTSP message. + * + * @param firstByte The first byte received for the RTSP message. + * @param dataInputStream The {@link DataInputStream} on which RTSP messages are received. + * @return An {@link ImmutableList} of the lines that make up an RTSP message. + */ + public ImmutableList parseNext(byte firstByte, DataInputStream dataInputStream) + throws IOException { + @Nullable + ImmutableList parsedMessageLines = + addMessageLine(parseNextLine(firstByte, dataInputStream)); + + while (parsedMessageLines == null) { + if (state == STATE_READING_BODY) { + if (messageBodyLength > 0) { + // Message body's format is not regulated under RTSP, so it could use LF (instead of + // RTSP's CRLF) as line ending. The length of the message body is included in the RTSP + // Content-Length header. + // Assume the message body length is within a 32-bit integer. + int messageBodyLengthInt = Ints.checkedCast(messageBodyLength); + checkState(messageBodyLengthInt != C.LENGTH_UNSET); + byte[] messageBodyBytes = new byte[messageBodyLengthInt]; + dataInputStream.readFully(messageBodyBytes, /* off= */ 0, messageBodyLengthInt); + parsedMessageLines = addMessageBody(messageBodyBytes); + } else { + throw new IllegalStateException("Expects a greater than zero Content-Length."); + } + } else { + parsedMessageLines = + addMessageLine(parseNextLine(dataInputStream.readByte(), dataInputStream)); + } + } + return parsedMessageLines; + } + + /** Returns the byte representation of a complete RTSP line, with CRLF line terminator. */ + private static byte[] parseNextLine(byte firstByte, DataInputStream dataInputStream) + throws IOException { + ByteArrayOutputStream messageByteStream = new ByteArrayOutputStream(); + + byte[] peekedBytes = new byte[2]; + peekedBytes[0] = firstByte; + peekedBytes[1] = dataInputStream.readByte(); + messageByteStream.write(peekedBytes); + + while (peekedBytes[0] != Ascii.CR || peekedBytes[1] != Ascii.LF) { + // Shift the CRLF buffer. + peekedBytes[0] = peekedBytes[1]; + peekedBytes[1] = dataInputStream.readByte(); + messageByteStream.write(peekedBytes[1]); + } + + return messageByteStream.toByteArray(); + } + + /** + * Returns a list of completed RTSP message lines, without the CRLF line terminators; or {@code + * null} if the message is not yet complete. + */ + @Nullable + private ImmutableList addMessageLine(byte[] lineBytes) throws ParserException { + // Trim CRLF. RTSP lists are terminated by a CRLF. + checkArgument( + lineBytes.length >= 2 + && lineBytes[lineBytes.length - 2] == Ascii.CR + && lineBytes[lineBytes.length - 1] == Ascii.LF); + String line = + new String(lineBytes, /* offset= */ 0, /* length= */ lineBytes.length - 2, CHARSET); + messageLines.add(line); + + switch (state) { + case STATE_READING_FIRST_LINE: + if (isRtspStartLine(line)) { + state = STATE_READING_HEADER; + } + break; + + case STATE_READING_HEADER: + // Check if the line contains RTSP Content-Length header. + long contentLength = RtspMessageUtil.parseContentLengthHeader(line); + if (contentLength != C.LENGTH_UNSET) { + messageBodyLength = contentLength; + } + + if (line.isEmpty()) { + // An empty line signals the end of the header section. + if (messageBodyLength > 0) { + state = STATE_READING_BODY; + } else { + ImmutableList linesToReturn = ImmutableList.copyOf(messageLines); + reset(); + return linesToReturn; + } + } + break; + + case STATE_READING_BODY: + // Message body must be handled by addMessageBody(). + + default: + throw new IllegalStateException(); + } + return null; + } + + /** Returns a list of completed RTSP message lines, without the line terminators. */ + private ImmutableList addMessageBody(byte[] messageBodyBytes) { + checkState(state == STATE_READING_BODY); + + String messageBody; + if (messageBodyBytes.length > 0 + && messageBodyBytes[messageBodyBytes.length - 1] == Ascii.LF) { + if (messageBodyBytes.length > 1 + && messageBodyBytes[messageBodyBytes.length - 2] == Ascii.CR) { + // Line ends with CRLF. + messageBody = + new String( + messageBodyBytes, + /* offset= */ 0, + /* length= */ messageBodyBytes.length - 2, + CHARSET); + } else { + // Line ends with LF. + messageBody = + new String( + messageBodyBytes, + /* offset= */ 0, + /* length= */ messageBodyBytes.length - 1, + CHARSET); + } + } else { + throw new IllegalArgumentException("Message body is empty or does not end with a LF."); + } + + messageLines.add(messageBody); + ImmutableList linesToReturn = ImmutableList.copyOf(messageLines); + reset(); + return linesToReturn; + } + + private void reset() { + messageLines.clear(); + state = STATE_READING_FIRST_LINE; + messageBodyLength = 0; + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspMessageUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspMessageUtil.java new file mode 100644 index 0000000000..76b4beae9c --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspMessageUtil.java @@ -0,0 +1,523 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.rtsp; + +import static com.google.android.exoplayer2.source.rtsp.RtspRequest.METHOD_ANNOUNCE; +import static com.google.android.exoplayer2.source.rtsp.RtspRequest.METHOD_DESCRIBE; +import static com.google.android.exoplayer2.source.rtsp.RtspRequest.METHOD_GET_PARAMETER; +import static com.google.android.exoplayer2.source.rtsp.RtspRequest.METHOD_OPTIONS; +import static com.google.android.exoplayer2.source.rtsp.RtspRequest.METHOD_PAUSE; +import static com.google.android.exoplayer2.source.rtsp.RtspRequest.METHOD_PLAY; +import static com.google.android.exoplayer2.source.rtsp.RtspRequest.METHOD_PLAY_NOTIFY; +import static com.google.android.exoplayer2.source.rtsp.RtspRequest.METHOD_RECORD; +import static com.google.android.exoplayer2.source.rtsp.RtspRequest.METHOD_REDIRECT; +import static com.google.android.exoplayer2.source.rtsp.RtspRequest.METHOD_SETUP; +import static com.google.android.exoplayer2.source.rtsp.RtspRequest.METHOD_SET_PARAMETER; +import static com.google.android.exoplayer2.source.rtsp.RtspRequest.METHOD_TEARDOWN; +import static com.google.android.exoplayer2.source.rtsp.RtspRequest.METHOD_UNSET; +import static com.google.android.exoplayer2.util.Assertions.checkArgument; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.common.base.Strings.nullToEmpty; +import static java.util.regex.Pattern.CASE_INSENSITIVE; + +import android.net.Uri; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.ParserException; +import com.google.android.exoplayer2.util.Util; +import com.google.common.base.Ascii; +import com.google.common.base.Joiner; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableListMultimap; +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** Utility methods for RTSP messages. */ +/* package */ final class RtspMessageUtil { + /** Represents a RTSP Session header (RFC2326 Section 12.37). */ + public static final class RtspSessionHeader { + /** The session ID. */ + public final String sessionId; + /** + * The session timeout, measured in milliseconds, {@link #DEFAULT_RTSP_TIMEOUT_MS} if not + * specified in the Session header. + */ + public final long timeoutMs; + + /** Creates a new instance. */ + public RtspSessionHeader(String sessionId, long timeoutMs) { + this.sessionId = sessionId; + this.timeoutMs = timeoutMs; + } + } + + /** Wraps username and password for authentication purposes. */ + public static final class RtspAuthUserInfo { + /** The username. */ + public final String username; + /** The password. */ + public final String password; + + /** Creates a new instance. */ + public RtspAuthUserInfo(String username, String password) { + this.username = username; + this.password = password; + } + } + + /** The default timeout, in milliseconds, defined for RTSP (RFC2326 Section 12.37). */ + public static final long DEFAULT_RTSP_TIMEOUT_MS = 60_000; + + // Status line pattern, see RFC2326 Section 6.1. + private static final Pattern REQUEST_LINE_PATTERN = Pattern.compile("([A-Z_]+) (.*) RTSP/1\\.0"); + + // Status line pattern, see RFC2326 Section 7.1. + private static final Pattern STATUS_LINE_PATTERN = Pattern.compile("RTSP/1\\.0 (\\d+) (.+)"); + + // Content length header pattern, see RFC2326 Section 12.14. + private static final Pattern CONTENT_LENGTH_HEADER_PATTERN = + Pattern.compile("Content-Length:\\s?(\\d+)", CASE_INSENSITIVE); + + // Session header pattern, see RFC2326 Sections 3.4 and 12.37. + private static final Pattern SESSION_HEADER_PATTERN = + Pattern.compile("([\\w$\\-_.+]+)(?:;\\s?timeout=(\\d+))?"); + + // WWW-Authenticate header pattern, see RFC2068 Sections 14.46 and RFC2069. + private static final Pattern WWW_AUTHENTICATION_HEADER_DIGEST_PATTERN = + Pattern.compile( + "Digest realm=\"([^\"\\x00-\\x08\\x0A-\\x1f\\x7f]+)\"" + + ",\\s?(?:domain=\"(.+)\"" + + ",\\s?)?nonce=\"([^\"\\x00-\\x08\\x0A-\\x1f\\x7f]+)\"" + + "(?:,\\s?opaque=\"([^\"\\x00-\\x08\\x0A-\\x1f\\x7f]+)\")?"); + // WWW-Authenticate header pattern, see RFC2068 Section 11.1 and RFC2069. + private static final Pattern WWW_AUTHENTICATION_HEADER_BASIC_PATTERN = + Pattern.compile("Basic realm=\"([^\"\\x00-\\x08\\x0A-\\x1f\\x7f]+)\""); + + private static final String RTSP_VERSION = "RTSP/1.0"; + private static final String LF = new String(new byte[] {Ascii.LF}); + private static final String CRLF = new String(new byte[] {Ascii.CR, Ascii.LF}); + + /** + * Serializes an {@link RtspRequest} to an {@link ImmutableList} of strings. + * + *

        The {@link RtspRequest} must include the {@link RtspHeaders#CSEQ} header, or this method + * throws {@link IllegalArgumentException}. + * + * @param request The {@link RtspRequest}. + * @return A list of the lines of the {@link RtspRequest}, without line terminators (CRLF). + */ + public static ImmutableList serializeRequest(RtspRequest request) { + checkArgument(request.headers.get(RtspHeaders.CSEQ) != null); + + ImmutableList.Builder builder = new ImmutableList.Builder<>(); + // Request line. + builder.add( + Util.formatInvariant( + "%s %s %s", toMethodString(request.method), request.uri, RTSP_VERSION)); + + ImmutableListMultimap headers = request.headers.asMultiMap(); + for (String headerName : headers.keySet()) { + ImmutableList headerValuesForName = headers.get(headerName); + for (int i = 0; i < headerValuesForName.size(); i++) { + builder.add(Util.formatInvariant("%s: %s", headerName, headerValuesForName.get(i))); + } + } + // Empty line after headers. + builder.add(""); + builder.add(request.messageBody); + return builder.build(); + } + + /** + * Serializes an {@link RtspResponse} to an {@link ImmutableList} of strings. + * + *

        The {@link RtspResponse} must include the {@link RtspHeaders#CSEQ} header, or this method + * throws {@link IllegalArgumentException}. + * + * @param response The {@link RtspResponse}. + * @return A list of the lines of the {@link RtspResponse}, without line terminators (CRLF). + */ + public static ImmutableList serializeResponse(RtspResponse response) { + checkArgument(response.headers.get(RtspHeaders.CSEQ) != null); + + ImmutableList.Builder builder = new ImmutableList.Builder<>(); + // Request line. + builder.add( + Util.formatInvariant( + "%s %s %s", RTSP_VERSION, response.status, getRtspStatusReasonPhrase(response.status))); + + ImmutableListMultimap headers = response.headers.asMultiMap(); + for (String headerName : headers.keySet()) { + ImmutableList headerValuesForName = headers.get(headerName); + for (int i = 0; i < headerValuesForName.size(); i++) { + builder.add(Util.formatInvariant("%s: %s", headerName, headerValuesForName.get(i))); + } + } + // Empty line after headers. + builder.add(""); + builder.add(response.messageBody); + return builder.build(); + } + + /** + * Converts an RTSP message to a byte array. + * + * @param message The non-empty list of the lines of an RTSP message, with line terminators + * removed. + */ + public static byte[] convertMessageToByteArray(List message) { + return Joiner.on(CRLF).join(message).getBytes(RtspMessageChannel.CHARSET); + } + + /** Removes the user info from the supplied {@link Uri}. */ + public static Uri removeUserInfo(Uri uri) { + if (uri.getUserInfo() == null) { + return uri; + } + + // The Uri must include a "@" if the user info is non-null. + String authorityWithUserInfo = checkNotNull(uri.getAuthority()); + checkArgument(authorityWithUserInfo.contains("@")); + String authority = Util.split(authorityWithUserInfo, "@")[1]; + return uri.buildUpon().encodedAuthority(authority).build(); + } + + /** + * Parses the user info encapsulated in the RTSP {@link Uri}. + * + * @param uri The {@link Uri}. + * @return The extracted {@link RtspAuthUserInfo}, {@code null} if the argument {@link Uri} does + * not contain userinfo, or it's not properly formatted. + */ + @Nullable + public static RtspAuthUserInfo parseUserInfo(Uri uri) { + @Nullable String userInfo = uri.getUserInfo(); + if (userInfo == null) { + return null; + } + if (userInfo.contains(":")) { + String[] userInfoStrings = Util.splitAtFirst(userInfo, ":"); + return new RtspAuthUserInfo(userInfoStrings[0], userInfoStrings[1]); + } + return null; + } + + /** Returns the byte array representation of a string, using RTSP's character encoding. */ + public static byte[] getStringBytes(String s) { + return s.getBytes(RtspMessageChannel.CHARSET); + } + + /** Returns the corresponding String representation of the {@link RtspRequest.Method} argument. */ + public static String toMethodString(@RtspRequest.Method int method) { + switch (method) { + case METHOD_ANNOUNCE: + return "ANNOUNCE"; + case METHOD_DESCRIBE: + return "DESCRIBE"; + case METHOD_GET_PARAMETER: + return "GET_PARAMETER"; + case METHOD_OPTIONS: + return "OPTIONS"; + case METHOD_PAUSE: + return "PAUSE"; + case METHOD_PLAY: + return "PLAY"; + case METHOD_PLAY_NOTIFY: + return "PLAY_NOTIFY"; + case METHOD_RECORD: + return "RECORD"; + case METHOD_REDIRECT: + return "REDIRECT"; + case METHOD_SETUP: + return "SETUP"; + case METHOD_SET_PARAMETER: + return "SET_PARAMETER"; + case METHOD_TEARDOWN: + return "TEARDOWN"; + case METHOD_UNSET: + default: + throw new IllegalStateException(); + } + } + + private static @RtspRequest.Method int parseMethodString(String method) { + switch (method) { + case "ANNOUNCE": + return METHOD_ANNOUNCE; + case "DESCRIBE": + return METHOD_DESCRIBE; + case "GET_PARAMETER": + return METHOD_GET_PARAMETER; + case "OPTIONS": + return METHOD_OPTIONS; + case "PAUSE": + return METHOD_PAUSE; + case "PLAY": + return METHOD_PLAY; + case "PLAY_NOTIFY": + return METHOD_PLAY_NOTIFY; + case "RECORD": + return METHOD_RECORD; + case "REDIRECT": + return METHOD_REDIRECT; + case "SETUP": + return METHOD_SETUP; + case "SET_PARAMETER": + return METHOD_SET_PARAMETER; + case "TEARDOWN": + return METHOD_TEARDOWN; + default: + throw new IllegalArgumentException(); + } + } + + /** + * Parses lines of a received RTSP response into an {@link RtspResponse} instance. + * + * @param lines The non-empty list of received lines, with line terminators removed. + * @return The parsed {@link RtspResponse} object. + */ + public static RtspResponse parseResponse(List lines) { + Matcher statusLineMatcher = STATUS_LINE_PATTERN.matcher(lines.get(0)); + checkArgument(statusLineMatcher.matches()); + + int statusCode = Integer.parseInt(checkNotNull(statusLineMatcher.group(1))); + // An empty line marks the boundary between header and body. + int messageBodyOffset = lines.indexOf(""); + checkArgument(messageBodyOffset > 0); + + List headerLines = lines.subList(1, messageBodyOffset); + RtspHeaders headers = new RtspHeaders.Builder().addAll(headerLines).build(); + + String messageBody = Joiner.on(CRLF).join(lines.subList(messageBodyOffset + 1, lines.size())); + return new RtspResponse(statusCode, headers, messageBody); + } + + /** + * Parses lines of a received RTSP request into an {@link RtspRequest} instance. + * + * @param lines The non-empty list of received lines, with line terminators removed. + * @return The parsed {@link RtspRequest} object. + */ + public static RtspRequest parseRequest(List lines) { + Matcher requestMatcher = REQUEST_LINE_PATTERN.matcher(lines.get(0)); + checkArgument(requestMatcher.matches()); + + @RtspRequest.Method int method = parseMethodString(checkNotNull(requestMatcher.group(1))); + Uri requestUri = Uri.parse(checkNotNull(requestMatcher.group(2))); + // An empty line marks the boundary between header and body. + int messageBodyOffset = lines.indexOf(""); + checkArgument(messageBodyOffset > 0); + + List headerLines = lines.subList(1, messageBodyOffset); + RtspHeaders headers = new RtspHeaders.Builder().addAll(headerLines).build(); + + String messageBody = Joiner.on(CRLF).join(lines.subList(messageBodyOffset + 1, lines.size())); + return new RtspRequest(requestUri, method, headers, messageBody); + } + + /** Returns whether the line is a valid RTSP start line. */ + public static boolean isRtspStartLine(String line) { + return REQUEST_LINE_PATTERN.matcher(line).matches() + || STATUS_LINE_PATTERN.matcher(line).matches(); + } + + /** + * Returns whether the RTSP message is an RTSP response. + * + * @param lines The non-empty list of received lines, with line terminators removed. + * @return Whether the lines represent an RTSP response. + */ + public static boolean isRtspResponse(List lines) { + return STATUS_LINE_PATTERN.matcher(lines.get(0)).matches(); + } + + /** Returns the lines in an RTSP message body split by the line terminator used in body. */ + public static String[] splitRtspMessageBody(String body) { + return Util.split(body, body.contains(CRLF) ? CRLF : LF); + } + + /** + * Returns the length in bytes if the line contains a Content-Length header, otherwise {@link + * C#LENGTH_UNSET}. + * + * @throws ParserException If Content-Length cannot be parsed to an integer. + */ + public static long parseContentLengthHeader(String line) throws ParserException { + try { + Matcher matcher = CONTENT_LENGTH_HEADER_PATTERN.matcher(line); + if (matcher.find()) { + return Long.parseLong(checkNotNull(matcher.group(1))); + } else { + return C.LENGTH_UNSET; + } + } catch (NumberFormatException e) { + throw ParserException.createForMalformedManifest(line, e); + } + } + + /** + * Parses the RTSP PUBLIC header into a list of RTSP methods. + * + * @param publicHeader The PUBLIC header content, null if not available. + * @return The list of supported RTSP methods, encoded in {@link RtspRequest.Method}, or an empty + * list if the PUBLIC header is null. + */ + public static ImmutableList parsePublicHeader(@Nullable String publicHeader) { + if (publicHeader == null) { + return ImmutableList.of(); + } + + ImmutableList.Builder methodListBuilder = new ImmutableList.Builder<>(); + for (String method : Util.split(publicHeader, ",\\s?")) { + methodListBuilder.add(parseMethodString(method)); + } + return methodListBuilder.build(); + } + + /** + * Parses a Session header in an RTSP message to {@link RtspSessionHeader}. + * + *

        The format of the Session header is + * + *

        +   * Session: session-id[;timeout=delta-seconds]
        +   * 
        + * + * @param headerValue The string represent the content without the header name (Session: ). + * @return The parsed {@link RtspSessionHeader}. + * @throws ParserException When the input header value does not follow the Session header format. + */ + public static RtspSessionHeader parseSessionHeader(String headerValue) throws ParserException { + Matcher matcher = SESSION_HEADER_PATTERN.matcher(headerValue); + if (!matcher.matches()) { + throw ParserException.createForMalformedManifest(headerValue, /* cause= */ null); + } + + String sessionId = checkNotNull(matcher.group(1)); + // Optional parameter timeout. + long timeoutMs = DEFAULT_RTSP_TIMEOUT_MS; + @Nullable String timeoutString; + if ((timeoutString = matcher.group(2)) != null) { + try { + timeoutMs = Integer.parseInt(timeoutString) * C.MILLIS_PER_SECOND; + } catch (NumberFormatException e) { + throw ParserException.createForMalformedManifest(headerValue, e); + } + } + + return new RtspSessionHeader(sessionId, timeoutMs); + } + + /** + * Parses a WWW-Authenticate header. + * + *

        Reference RFC2068 Section 14.46 for WWW-Authenticate header. Only digest and basic + * authentication mechanisms are supported. + * + * @param headerValue The string representation of the content, without the header name + * (WWW-Authenticate: ). + * @return The parsed {@link RtspAuthenticationInfo}. + * @throws ParserException When the input header value does not follow the WWW-Authenticate header + * format, or is not using either Basic or Digest mechanisms. + */ + public static RtspAuthenticationInfo parseWwwAuthenticateHeader(String headerValue) + throws ParserException { + Matcher matcher = WWW_AUTHENTICATION_HEADER_DIGEST_PATTERN.matcher(headerValue); + if (matcher.find()) { + return new RtspAuthenticationInfo( + RtspAuthenticationInfo.DIGEST, + /* realm= */ checkNotNull(matcher.group(1)), + /* nonce= */ checkNotNull(matcher.group(3)), + /* opaque= */ nullToEmpty(matcher.group(4))); + } + matcher = WWW_AUTHENTICATION_HEADER_BASIC_PATTERN.matcher(headerValue); + if (matcher.matches()) { + return new RtspAuthenticationInfo( + RtspAuthenticationInfo.BASIC, + /* realm= */ checkNotNull(matcher.group(1)), + /* nonce= */ "", + /* opaque= */ ""); + } + throw ParserException.createForMalformedManifest( + "Invalid WWW-Authenticate header " + headerValue, /* cause= */ null); + } + + /** + * Throws {@link ParserException#createForMalformedManifest ParserException} if {@code expression} + * evaluates to false. + * + * @param expression The expression to evaluate. + * @param message The error message. + * @throws ParserException If {@code expression} is false. + */ + public static void checkManifestExpression(boolean expression, @Nullable String message) + throws ParserException { + if (!expression) { + throw ParserException.createForMalformedManifest(message, /* cause= */ null); + } + } + + private static String getRtspStatusReasonPhrase(int statusCode) { + switch (statusCode) { + case 200: + return "OK"; + case 301: + return "Move Permanently"; + case 302: + return "Move Temporarily"; + case 400: + return "Bad Request"; + case 401: + return "Unauthorized"; + case 404: + return "Not Found"; + case 405: + return "Method Not Allowed"; + case 454: + return "Session Not Found"; + case 455: + return "Method Not Valid In This State"; + case 456: + return "Header Field Not Valid"; + case 457: + return "Invalid Range"; + case 461: + return "Unsupported Transport"; + case 500: + return "Internal Server Error"; + case 505: + return "RTSP Version Not Supported"; + default: + throw new IllegalArgumentException(); + } + } + + /** + * Parses the string argument as an integer, wraps the potential {@link NumberFormatException} in + * {@link ParserException}. + */ + public static int parseInt(String intString) throws ParserException { + try { + return Integer.parseInt(intString); + } catch (NumberFormatException e) { + throw ParserException.createForMalformedManifest(intString, e); + } + } + + private RtspMessageUtil() {} +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspOptionsResponse.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspOptionsResponse.java new file mode 100644 index 0000000000..6a3403c797 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspOptionsResponse.java @@ -0,0 +1,42 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.rtsp; + +import com.google.common.collect.ImmutableList; +import java.util.List; + +/** Represents an RTSP OPTIONS response. */ +/* package */ final class RtspOptionsResponse { + /** The response's status code. */ + public final int status; + /** + * A list of methods supported by the RTSP server, encoded as {@link RtspRequest.Method}; or an + * empty list if the server does not disclose the supported methods. + */ + public final ImmutableList supportedMethods; + + /** + * Creates a new instance. + * + * @param status The response's status code. + * @param supportedMethods A list of methods supported by the RTSP server, encoded as {@link + * RtspRequest.Method}; or an empty list if such information is not available. + */ + public RtspOptionsResponse(int status, List supportedMethods) { + this.status = status; + this.supportedMethods = ImmutableList.copyOf(supportedMethods); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspPlayResponse.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspPlayResponse.java new file mode 100644 index 0000000000..26d0344111 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspPlayResponse.java @@ -0,0 +1,45 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.rtsp; + +import com.google.common.collect.ImmutableList; +import java.util.List; + +/** Represents an RTSP PLAY response. */ +/* package */ final class RtspPlayResponse { + /** The response's status code. */ + public final int status; + /** The playback start timing, {@link RtspSessionTiming#DEFAULT} if not present. */ + public final RtspSessionTiming sessionTiming; + /** The list of {@link RtspTrackTiming} representing the {@link RtspHeaders#RTP_INFO} header. */ + public final ImmutableList trackTimingList; + + /** + * Creates a new instance. + * + * @param status The response's status code. + * @param sessionTiming The {@link RtspSessionTiming}, pass {@link RtspSessionTiming#DEFAULT} if + * not present. + * @param trackTimingList The list of {@link RtspTrackTiming} representing the {@link + * RtspHeaders#RTP_INFO} header. + */ + public RtspPlayResponse( + int status, RtspSessionTiming sessionTiming, List trackTimingList) { + this.status = status; + this.sessionTiming = sessionTiming; + this.trackTimingList = ImmutableList.copyOf(trackTimingList); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspRequest.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspRequest.java new file mode 100644 index 0000000000..7526926565 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspRequest.java @@ -0,0 +1,109 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.android.exoplayer2.source.rtsp; + +import static java.lang.annotation.ElementType.TYPE_USE; + +import android.net.Uri; +import androidx.annotation.IntDef; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** Represents an RTSP request. */ +/* package */ final class RtspRequest { + /** + * RTSP request methods, as defined in RFC2326 Section 10. + * + *

        The possible values are: + * + *

          + *
        • {@link #METHOD_UNSET} + *
        • {@link #METHOD_ANNOUNCE} + *
        • {@link #METHOD_DESCRIBE} + *
        • {@link #METHOD_GET_PARAMETER} + *
        • {@link #METHOD_OPTIONS} + *
        • {@link #METHOD_PAUSE} + *
        • {@link #METHOD_PLAY} + *
        • {@link #METHOD_PLAY_NOTIFY} + *
        • {@link #METHOD_RECORD} + *
        • {@link #METHOD_REDIRECT} + *
        • {@link #METHOD_SETUP} + *
        • {@link #METHOD_SET_PARAMETER} + *
        • {@link #METHOD_TEARDOWN} + *
        + */ + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef( + value = { + METHOD_UNSET, + METHOD_ANNOUNCE, + METHOD_DESCRIBE, + METHOD_GET_PARAMETER, + METHOD_OPTIONS, + METHOD_PAUSE, + METHOD_PLAY, + METHOD_PLAY_NOTIFY, + METHOD_RECORD, + METHOD_REDIRECT, + METHOD_SETUP, + METHOD_SET_PARAMETER, + METHOD_TEARDOWN + }) + public @interface Method {} + + public static final int METHOD_UNSET = 0; + public static final int METHOD_ANNOUNCE = 1; + public static final int METHOD_DESCRIBE = 2; + public static final int METHOD_GET_PARAMETER = 3; + public static final int METHOD_OPTIONS = 4; + public static final int METHOD_PAUSE = 5; + public static final int METHOD_PLAY = 6; + public static final int METHOD_PLAY_NOTIFY = 7; + public static final int METHOD_RECORD = 8; + public static final int METHOD_REDIRECT = 9; + public static final int METHOD_SETUP = 10; + public static final int METHOD_SET_PARAMETER = 11; + public static final int METHOD_TEARDOWN = 12; + + /** The {@link Uri} to which this request is sent. */ + public final Uri uri; + /** The request method, as defined in {@link Method}. */ + public final @Method int method; + /** The headers of this request. */ + public final RtspHeaders headers; + /** The body of this RTSP message, or empty string if absent. */ + public final String messageBody; + + /** + * Creates a new instance. + * + * @param uri The {@link Uri} to which this request is sent. + * @param method The request method, as defined in {@link Method}. + * @param headers The headers of this request. + * @param messageBody The body of this RTSP message, or empty string if absent. + */ + public RtspRequest(Uri uri, @Method int method, RtspHeaders headers, String messageBody) { + this.uri = uri; + this.method = method; + this.headers = headers; + this.messageBody = messageBody; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspResponse.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspResponse.java new file mode 100644 index 0000000000..ec15fa7556 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspResponse.java @@ -0,0 +1,51 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.android.exoplayer2.source.rtsp; + +/** Represents an RTSP Response. */ +/* package */ final class RtspResponse { + + /** The status code of this response, as defined in RFC 2326 section 11. */ + public final int status; + /** The headers of this response. */ + public final RtspHeaders headers; + /** The body of this RTSP message, or empty string if absent. */ + public final String messageBody; + + /** + * Creates a new instance. + * + * @param status The status code of this response, as defined in RFC 2326 section 11. + * @param headers The headers of this response. + * @param messageBody The body of this RTSP message, or empty string if absent. + */ + public RtspResponse(int status, RtspHeaders headers, String messageBody) { + this.status = status; + this.headers = headers; + this.messageBody = messageBody; + } + + /** + * Creates a new instance with an empty {@link #messageBody}. + * + * @param status The status code of this response, as defined in RFC 2326 section 11. + * @param headers The headers of this response. + */ + public RtspResponse(int status, RtspHeaders headers) { + this(status, headers, /* messageBody= */ ""); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspSessionTiming.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspSessionTiming.java new file mode 100644 index 0000000000..711c60b365 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspSessionTiming.java @@ -0,0 +1,107 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.rtsp; + +import static com.google.android.exoplayer2.source.rtsp.RtspMessageUtil.checkManifestExpression; +import static com.google.android.exoplayer2.util.Util.castNonNull; + +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.ParserException; +import com.google.android.exoplayer2.util.Util; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * Represent the timing (RTSP Normal Playback Time format) of an RTSP session. + * + *

        Currently only NPT is supported. See RFC2326 Section 3.6 for detail of NPT. + */ +/* package */ final class RtspSessionTiming { + /** The default session timing starting from 0.000 and indefinite length, effectively live. */ + public static final RtspSessionTiming DEFAULT = + new RtspSessionTiming(/* startTimeMs= */ 0, /* stopTimeMs= */ C.TIME_UNSET); + + // We only support npt=xxx-[xxx], but not npt=-xxx. See RFC2326 Section 3.6. + // Supports both npt= and npt: identifier. + private static final Pattern NPT_RANGE_PATTERN = + Pattern.compile("npt[:=]([.\\d]+|now)\\s?-\\s?([.\\d]+)?"); + private static final String START_TIMING_NTP_FORMAT = "npt=%.3f-"; + + private static final long LIVE_START_TIME = 0; + + /** Parses an SDP range attribute (RFC2326 Section 3.6). */ + public static RtspSessionTiming parseTiming(String sdpRangeAttribute) throws ParserException { + long startTimeMs; + long stopTimeMs; + Matcher matcher = NPT_RANGE_PATTERN.matcher(sdpRangeAttribute); + checkManifestExpression(matcher.matches(), /* message= */ sdpRangeAttribute); + + @Nullable String startTimeString = matcher.group(1); + checkManifestExpression(startTimeString != null, /* message= */ sdpRangeAttribute); + if (castNonNull(startTimeString).equals("now")) { + startTimeMs = LIVE_START_TIME; + } else { + startTimeMs = (long) (Float.parseFloat(startTimeString) * C.MILLIS_PER_SECOND); + } + + @Nullable String stopTimeString = matcher.group(2); + if (stopTimeString != null) { + try { + stopTimeMs = (long) (Float.parseFloat(stopTimeString) * C.MILLIS_PER_SECOND); + } catch (NumberFormatException e) { + throw ParserException.createForMalformedManifest(stopTimeString, e); + } + checkManifestExpression(stopTimeMs >= startTimeMs, /* message= */ sdpRangeAttribute); + } else { + stopTimeMs = C.TIME_UNSET; + } + + return new RtspSessionTiming(startTimeMs, stopTimeMs); + } + + /** Gets a Range RTSP header for an RTSP PLAY request. */ + public static String getOffsetStartTimeTiming(long offsetStartTimeMs) { + double offsetStartTimeSec = (double) offsetStartTimeMs / C.MILLIS_PER_SECOND; + return Util.formatInvariant(START_TIMING_NTP_FORMAT, offsetStartTimeSec); + } + + /** + * The start time of this session, in milliseconds. When playing a live session, the start time is + * always zero. + */ + public final long startTimeMs; + /** + * The stop time of the session, in milliseconds, or {@link C#TIME_UNSET} when the stop time is + * not set, for example when playing a live session. + */ + public final long stopTimeMs; + + private RtspSessionTiming(long startTimeMs, long stopTimeMs) { + this.startTimeMs = startTimeMs; + this.stopTimeMs = stopTimeMs; + } + + /** Tests whether the timing is live. */ + public boolean isLive() { + return stopTimeMs == C.TIME_UNSET; + } + + /** Gets the session duration in milliseconds. */ + public long getDurationMs() { + return stopTimeMs - startTimeMs; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspSetupResponse.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspSetupResponse.java new file mode 100644 index 0000000000..5f4cf51b4b --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspSetupResponse.java @@ -0,0 +1,41 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.rtsp; + +/** Represents an RTSP SETUP response. */ +/* package */ final class RtspSetupResponse { + + /** The response's status code. */ + public final int status; + /** The Session header (RFC2326 Section 12.37). */ + public final RtspMessageUtil.RtspSessionHeader sessionHeader; + /** The Transport header (RFC2326 Section 12.39). */ + public final String transport; + + /** + * Creates a new instance. + * + * @param status The response's status code. + * @param sessionHeader The {@link RtspMessageUtil.RtspSessionHeader}. + * @param transport The transport header included in the RTSP SETUP response. + */ + public RtspSetupResponse( + int status, RtspMessageUtil.RtspSessionHeader sessionHeader, String transport) { + this.status = status; + this.sessionHeader = sessionHeader; + this.transport = transport; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspTrackTiming.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspTrackTiming.java new file mode 100644 index 0000000000..cf0fd5f691 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/RtspTrackTiming.java @@ -0,0 +1,167 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.rtsp; + +import static com.google.android.exoplayer2.util.Assertions.checkArgument; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; + +import android.net.Uri; +import androidx.annotation.Nullable; +import androidx.annotation.VisibleForTesting; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.ParserException; +import com.google.android.exoplayer2.util.UriUtil; +import com.google.android.exoplayer2.util.Util; +import com.google.common.collect.ImmutableList; + +/** + * Represents an RTSP track's timing info, included as {@link RtspHeaders#RTP_INFO} in an RTSP PLAY + * response (RFC2326 Section 12.33). + */ +/* package */ final class RtspTrackTiming { + + /** + * Parses the RTP-Info header into a list of {@link RtspTrackTiming RtspTrackTimings}. + * + *

        The syntax of the RTP-Info (RFC2326 Section 12.33): + * + *

        +   *   RTP-Info        = "RTP-Info" ":" 1#stream-url 1*parameter
        +   *   stream-url      = "url" "=" url
        +   *   parameter       = ";" "seq" "=" 1*DIGIT
        +   *                   | ";" "rtptime" "=" 1*DIGIT
        +   * 
        + * + *

        Examples from RFC2326: + * + *

        +   *   RTP-Info:url=rtsp://foo.com/bar.file; seq=232433;rtptime=972948234
        +   *   RTP-Info:url=rtsp://foo.com/bar.avi/streamid=0;seq=45102,
        +   *            url=rtsp://foo.com/bar.avi/streamid=1;seq=30211
        +   * 
        + * + * @param rtpInfoString The value of the RTP-Info header, with header name (RTP-Info) removed. + * @param sessionUri The session URI, must include an {@code rtsp} scheme. + * @return A list of parsed {@link RtspTrackTiming}. + * @throws ParserException If parsing failed. + */ + public static ImmutableList parseTrackTiming( + String rtpInfoString, Uri sessionUri) throws ParserException { + + ImmutableList.Builder listBuilder = new ImmutableList.Builder<>(); + for (String perTrackTimingString : Util.split(rtpInfoString, ",")) { + long rtpTime = C.TIME_UNSET; + int sequenceNumber = C.INDEX_UNSET; + @Nullable Uri uri = null; + + for (String attributePair : Util.split(perTrackTimingString, ";")) { + try { + String[] attributes = Util.splitAtFirst(attributePair, "="); + String attributeName = attributes[0]; + String attributeValue = attributes[1]; + + switch (attributeName) { + case "url": + uri = resolveUri(/* urlString= */ attributeValue, sessionUri); + break; + case "seq": + sequenceNumber = Integer.parseInt(attributeValue); + break; + case "rtptime": + rtpTime = Long.parseLong(attributeValue); + break; + default: + throw ParserException.createForMalformedManifest(attributeName, /* cause= */ null); + } + } catch (Exception e) { + throw ParserException.createForMalformedManifest(attributePair, e); + } + } + + if (uri == null + || uri.getScheme() == null // Checks if the URI is a URL. + || (sequenceNumber == C.INDEX_UNSET && rtpTime == C.TIME_UNSET)) { + throw ParserException.createForMalformedManifest(perTrackTimingString, /* cause= */ null); + } + + listBuilder.add(new RtspTrackTiming(rtpTime, sequenceNumber, uri)); + } + return listBuilder.build(); + } + + /** + * Resolves the input string to always be an absolute URL with RTP-Info headers + * + *

        Handles some servers do not send absolute URL in RTP-Info headers. This method takes in + * RTP-Info header's url string, and returns the correctly formatted {@link Uri url} for this + * track. The input url string could be + * + *

          + *
        • A correctly formatted URL, like "{@code rtsp://foo.bar/video}". + *
        • A correct URI that is missing the scheme, like "{@code foo.bar/video}". + *
        • A path to the resource, like "{@code video}" or "{@code /video}". + *
        + * + * @param urlString The URL included in the RTP-Info header, without the {@code url=} identifier. + * @param sessionUri The session URI, must include an {@code rtsp} scheme, or {@link + * IllegalArgumentException} is thrown. + * @return The formatted URL. + */ + @VisibleForTesting + /* package */ static Uri resolveUri(String urlString, Uri sessionUri) { + checkArgument(checkNotNull(sessionUri.getScheme()).equals("rtsp")); + + Uri uri = Uri.parse(urlString); + if (uri.isAbsolute()) { + return uri; + } + + // The urlString is at least missing the scheme. + uri = Uri.parse("rtsp://" + urlString); + String sessionUriString = sessionUri.toString(); + + String host = checkNotNull(uri.getHost()); + if (host.equals(sessionUri.getHost())) { + // Handles the case that the urlString is only missing the scheme. + return uri; + } + + return sessionUriString.endsWith("/") + ? UriUtil.resolveToUri(sessionUriString, urlString) + : UriUtil.resolveToUri(sessionUriString + "/", urlString); + } + + /** + * The timestamp of the next RTP packet, {@link C#TIME_UNSET} if not present. + * + *

        Cannot be {@link C#TIME_UNSET} if {@link #sequenceNumber} is {@link C#INDEX_UNSET}. + */ + public final long rtpTimestamp; + /** + * The sequence number of the next RTP packet, {@link C#INDEX_UNSET} if not present. + * + *

        Cannot be {@link C#INDEX_UNSET} if {@link #rtpTimestamp} is {@link C#TIME_UNSET}. + */ + public final int sequenceNumber; + /** The {@link Uri} that identifies a matching {@link RtspMediaTrack}. */ + public final Uri uri; + + private RtspTrackTiming(long rtpTimestamp, int sequenceNumber, Uri uri) { + this.rtpTimestamp = rtpTimestamp; + this.sequenceNumber = sequenceNumber; + this.uri = uri; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/SessionDescription.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/SessionDescription.java new file mode 100644 index 0000000000..665aba9545 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/SessionDescription.java @@ -0,0 +1,324 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.android.exoplayer2.source.rtsp; + +import static com.google.android.exoplayer2.util.Util.castNonNull; + +import android.net.Uri; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.util.Util; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.errorprone.annotations.CanIgnoreReturnValue; +import java.util.HashMap; + +/** + * Records all the information in a SDP message. + * + *

        SDP messages encapsulate information on the media play back session, including session + * configuration information, formats of each playable track, etc. SDP is defined in RFC4566. + */ +/* package */ final class SessionDescription { + + /** Builder class for {@link SessionDescription}. */ + public static final class Builder { + private final HashMap attributes; + private final ImmutableList.Builder mediaDescriptionListBuilder; + private int bitrate; + @Nullable private String sessionName; + @Nullable private String origin; + @Nullable private String timing; + @Nullable private Uri uri; + @Nullable private String connection; + @Nullable private String key; + @Nullable private String sessionInfo; + @Nullable private String emailAddress; + @Nullable private String phoneNumber; + + /** Creates a new instance. */ + public Builder() { + attributes = new HashMap<>(); + mediaDescriptionListBuilder = new ImmutableList.Builder<>(); + bitrate = Format.NO_VALUE; + } + + /** + * Sets {@link SessionDescription#sessionName}. + * + *

        This property must be set before calling {@link #build()}. + * + * @param sessionName The {@link SessionDescription#sessionName}. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setSessionName(String sessionName) { + this.sessionName = sessionName; + return this; + } + + /** + * Sets {@link SessionDescription#sessionInfo}. The default is {@code null}. + * + * @param sessionInfo The {@link SessionDescription#sessionInfo}. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setSessionInfo(String sessionInfo) { + this.sessionInfo = sessionInfo; + return this; + } + + /** + * Sets {@link SessionDescription#uri}. The default is {@code null}. + * + * @param uri The {@link SessionDescription#uri}. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setUri(Uri uri) { + this.uri = uri; + return this; + } + + /** + * Sets {@link SessionDescription#origin}. + * + *

        This property must be set before calling {@link #build()}. + * + * @param origin The {@link SessionDescription#origin}. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setOrigin(String origin) { + this.origin = origin; + return this; + } + + /** + * Sets {@link SessionDescription#connection}. The default is {@code null}. + * + * @param connection The {@link SessionDescription#connection}. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setConnection(String connection) { + this.connection = connection; + return this; + } + + /** + * Sets {@link SessionDescription#bitrate}. The default is {@link Format#NO_VALUE}. + * + * @param bitrate The {@link SessionDescription#bitrate} in bits per second. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setBitrate(int bitrate) { + this.bitrate = bitrate; + return this; + } + + /** + * Sets {@link SessionDescription#timing}. + * + *

        This property must be set before calling {@link #build()}. + * + * @param timing The {@link SessionDescription#timing}. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setTiming(String timing) { + this.timing = timing; + return this; + } + + /** + * Sets {@link SessionDescription#key}. The default is {@code null}. + * + * @param key The {@link SessionDescription#key}. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setKey(String key) { + this.key = key; + return this; + } + + /** + * Sets {@link SessionDescription#emailAddress}. The default is {@code null}. + * + * @param emailAddress The {@link SessionDescription#emailAddress}. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setEmailAddress(String emailAddress) { + this.emailAddress = emailAddress; + return this; + } + + /** + * Sets {@link SessionDescription#phoneNumber}. The default is {@code null}. + * + * @param phoneNumber The {@link SessionDescription#phoneNumber}. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setPhoneNumber(String phoneNumber) { + this.phoneNumber = phoneNumber; + return this; + } + + /** + * Adds one attribute to {@link SessionDescription#attributes}. + * + * @param attributeName The name of the attribute. + * @param attributeValue The value of the attribute. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder addAttribute(String attributeName, String attributeValue) { + attributes.put(attributeName, attributeValue); + return this; + } + + /** + * Adds one {@link MediaDescription} to the {@link SessionDescription#mediaDescriptionList}. + * + * @param mediaDescription The {@link MediaDescription}. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder addMediaDescription(MediaDescription mediaDescription) { + mediaDescriptionListBuilder.add(mediaDescription); + return this; + } + + /** + * Builds a new {@link SessionDescription} instance. + * + * @return The newly built {@link SessionDescription} instance. + */ + public SessionDescription build() { + return new SessionDescription(this); + } + } + + /** The only supported SDP version, will be checked against every SDP message received. */ + public static final String SUPPORTED_SDP_VERSION = "0"; + /** The control attribute name. */ + public static final String ATTR_CONTROL = "control"; + /** The format property attribute name. */ + public static final String ATTR_FMTP = "fmtp"; + /** The length property attribute name. */ + public static final String ATTR_LENGTH = "length"; + /** The range property attribute name. */ + public static final String ATTR_RANGE = "range"; + /** The RTP format mapping property attribute name. */ + public static final String ATTR_RTPMAP = "rtpmap"; + /** The tool property attribute name. */ + public static final String ATTR_TOOL = "tool"; + /** The type property attribute name. */ + public static final String ATTR_TYPE = "type"; + + /** + * All the session attributes, mapped from attribute name to value. The value is {@code ""} if not + * present. + */ + public final ImmutableMap attributes; + /** + * The {@link MediaDescription MediaDescriptions} for each media track included in the session. + */ + public final ImmutableList mediaDescriptionList; + /** The name of a session. */ + @Nullable public final String sessionName; + /** The origin sender info. */ + @Nullable public final String origin; + /** The timing info. */ + @Nullable public final String timing; + /** The estimated bitrate in bits per seconds. */ + public final int bitrate; + /** The uri of a linked content. */ + @Nullable public final Uri uri; + /** The connection info. */ + @Nullable public final String connection; + /** The encryption method and key info. */ + @Nullable public final String key; + /** The email info. */ + @Nullable public final String emailAddress; + /** The phone number info. */ + @Nullable public final String phoneNumber; + /** The session info, a detailed description of the session. */ + @Nullable public final String sessionInfo; + + /** Creates a new instance. */ + private SessionDescription(Builder builder) { + this.attributes = ImmutableMap.copyOf(builder.attributes); + this.mediaDescriptionList = builder.mediaDescriptionListBuilder.build(); + this.sessionName = castNonNull(builder.sessionName); + this.origin = castNonNull(builder.origin); + this.timing = castNonNull(builder.timing); + this.uri = builder.uri; + this.connection = builder.connection; + this.bitrate = builder.bitrate; + this.key = builder.key; + this.emailAddress = builder.emailAddress; + this.phoneNumber = builder.phoneNumber; + this.sessionInfo = builder.sessionInfo; + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + SessionDescription that = (SessionDescription) o; + return bitrate == that.bitrate + && attributes.equals(that.attributes) + && mediaDescriptionList.equals(that.mediaDescriptionList) + && Util.areEqual(origin, that.origin) + && Util.areEqual(sessionName, that.sessionName) + && Util.areEqual(timing, that.timing) + && Util.areEqual(sessionInfo, that.sessionInfo) + && Util.areEqual(uri, that.uri) + && Util.areEqual(emailAddress, that.emailAddress) + && Util.areEqual(phoneNumber, that.phoneNumber) + && Util.areEqual(connection, that.connection) + && Util.areEqual(key, that.key); + } + + @Override + public int hashCode() { + int result = 7; + result = 31 * result + attributes.hashCode(); + result = 31 * result + mediaDescriptionList.hashCode(); + result = 31 * result + (origin == null ? 0 : origin.hashCode()); + result = 31 * result + (sessionName == null ? 0 : sessionName.hashCode()); + result = 31 * result + (timing == null ? 0 : timing.hashCode()); + result = 31 * result + bitrate; + result = 31 * result + (sessionInfo == null ? 0 : sessionInfo.hashCode()); + result = 31 * result + (uri == null ? 0 : uri.hashCode()); + result = 31 * result + (emailAddress == null ? 0 : emailAddress.hashCode()); + result = 31 * result + (phoneNumber == null ? 0 : phoneNumber.hashCode()); + result = 31 * result + (connection == null ? 0 : connection.hashCode()); + result = 31 * result + (key == null ? 0 : key.hashCode()); + return result; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/SessionDescriptionParser.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/SessionDescriptionParser.java new file mode 100644 index 0000000000..d85dd216db --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/SessionDescriptionParser.java @@ -0,0 +1,237 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.android.exoplayer2.source.rtsp; + +import static com.google.android.exoplayer2.source.rtsp.SessionDescription.SUPPORTED_SDP_VERSION; +import static com.google.android.exoplayer2.util.Assertions.checkArgument; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.common.base.Strings.nullToEmpty; + +import android.net.Uri; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.ParserException; +import com.google.android.exoplayer2.util.Util; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** Parses a String based SDP message into {@link SessionDescription}. */ +/* package */ final class SessionDescriptionParser { + // SDP line always starts with an one letter tag, followed by an equal sign. The information + // under the given tag follows an optional space. + private static final Pattern SDP_LINE_PATTERN = Pattern.compile("([a-z])=\\s?(.+)"); + // Matches an attribute line (with a= sdp tag removed. Example: range:npt=0-50.0). + // Attribute can also be a flag, i.e. without a value, like recvonly. Reference RFC4566 Section 9 + // Page 43, under "token-char". + private static final Pattern ATTRIBUTE_PATTERN = + Pattern.compile( + "([\\x21\\x23-\\x27\\x2a\\x2b\\x2d\\x2e\\x30-\\x39\\x41-\\x5a\\x5e-\\x7e]+)(?::(.*))?"); + // SDP media description line: + // For instance: audio 0 RTP/AVP 97 + private static final Pattern MEDIA_DESCRIPTION_PATTERN = + Pattern.compile("(\\S+)\\s(\\S+)\\s(\\S+)\\s(\\S+)"); + + private static final String VERSION_TYPE = "v"; + private static final String ORIGIN_TYPE = "o"; + private static final String SESSION_TYPE = "s"; + private static final String INFORMATION_TYPE = "i"; + private static final String URI_TYPE = "u"; + private static final String EMAIL_TYPE = "e"; + private static final String PHONE_NUMBER_TYPE = "p"; + private static final String CONNECTION_TYPE = "c"; + private static final String BANDWIDTH_TYPE = "b"; + private static final String TIMING_TYPE = "t"; + private static final String KEY_TYPE = "k"; + private static final String ATTRIBUTE_TYPE = "a"; + private static final String MEDIA_TYPE = "m"; + private static final String REPEAT_TYPE = "r"; + private static final String ZONE_TYPE = "z"; + + /** + * Parses a String based SDP message into {@link SessionDescription}. + * + * @throws ParserException On SDP message line that cannot be parsed, or when one or more of the + * mandatory SDP fields {@link SessionDescription#timing}, {@link SessionDescription#origin} + * and {@link SessionDescription#sessionName} are not set. + */ + public static SessionDescription parse(String sdpString) throws ParserException { + SessionDescription.Builder sessionDescriptionBuilder = new SessionDescription.Builder(); + @Nullable MediaDescription.Builder mediaDescriptionBuilder = null; + + // Lines are separated by an CRLF. + for (String line : RtspMessageUtil.splitRtspMessageBody(sdpString)) { + if ("".equals(line)) { + continue; + } + + Matcher matcher = SDP_LINE_PATTERN.matcher(line); + if (!matcher.matches()) { + throw ParserException.createForMalformedManifest( + "Malformed SDP line: " + line, /* cause= */ null); + } + + String sdpType = checkNotNull(matcher.group(1)); + String sdpValue = checkNotNull(matcher.group(2)); + + switch (sdpType) { + case VERSION_TYPE: + if (!SUPPORTED_SDP_VERSION.equals(sdpValue)) { + throw ParserException.createForMalformedManifest( + String.format("SDP version %s is not supported.", sdpValue), /* cause= */ null); + } + break; + + case ORIGIN_TYPE: + sessionDescriptionBuilder.setOrigin(sdpValue); + break; + + case SESSION_TYPE: + sessionDescriptionBuilder.setSessionName(sdpValue); + break; + + case INFORMATION_TYPE: + if (mediaDescriptionBuilder == null) { + sessionDescriptionBuilder.setSessionInfo(sdpValue); + } else { + mediaDescriptionBuilder.setMediaTitle(sdpValue); + } + break; + + case URI_TYPE: + sessionDescriptionBuilder.setUri(Uri.parse(sdpValue)); + break; + + case EMAIL_TYPE: + sessionDescriptionBuilder.setEmailAddress(sdpValue); + break; + + case PHONE_NUMBER_TYPE: + sessionDescriptionBuilder.setPhoneNumber(sdpValue); + break; + + case CONNECTION_TYPE: + if (mediaDescriptionBuilder == null) { + sessionDescriptionBuilder.setConnection(sdpValue); + } else { + mediaDescriptionBuilder.setConnection(sdpValue); + } + break; + + case BANDWIDTH_TYPE: + String[] bandwidthComponents = Util.split(sdpValue, ":\\s?"); + checkArgument(bandwidthComponents.length == 2); + int bitrateKbps = Integer.parseInt(bandwidthComponents[1]); + + // Converting kilobits per second to bits per second. + if (mediaDescriptionBuilder == null) { + sessionDescriptionBuilder.setBitrate(bitrateKbps * 1000); + } else { + mediaDescriptionBuilder.setBitrate(bitrateKbps * 1000); + } + break; + + case TIMING_TYPE: + sessionDescriptionBuilder.setTiming(sdpValue); + break; + + case KEY_TYPE: + if (mediaDescriptionBuilder == null) { + sessionDescriptionBuilder.setKey(sdpValue); + } else { + mediaDescriptionBuilder.setKey(sdpValue); + } + break; + + case ATTRIBUTE_TYPE: + matcher = ATTRIBUTE_PATTERN.matcher(sdpValue); + if (!matcher.matches()) { + throw ParserException.createForMalformedManifest( + "Malformed Attribute line: " + line, /* cause= */ null); + } + + String attributeName = checkNotNull(matcher.group(1)); + // The second catching group is optional and thus could be null. + String attributeValue = nullToEmpty(matcher.group(2)); + + if (mediaDescriptionBuilder == null) { + sessionDescriptionBuilder.addAttribute(attributeName, attributeValue); + } else { + mediaDescriptionBuilder.addAttribute(attributeName, attributeValue); + } + break; + + case MEDIA_TYPE: + if (mediaDescriptionBuilder != null) { + addMediaDescriptionToSession(sessionDescriptionBuilder, mediaDescriptionBuilder); + } + mediaDescriptionBuilder = parseMediaDescriptionLine(sdpValue); + break; + case REPEAT_TYPE: + case ZONE_TYPE: + default: + // Not handled. + } + } + + if (mediaDescriptionBuilder != null) { + addMediaDescriptionToSession(sessionDescriptionBuilder, mediaDescriptionBuilder); + } + + try { + return sessionDescriptionBuilder.build(); + } catch (IllegalArgumentException | IllegalStateException e) { + throw ParserException.createForMalformedManifest(/* message= */ null, e); + } + } + + private static void addMediaDescriptionToSession( + SessionDescription.Builder sessionDescriptionBuilder, + MediaDescription.Builder mediaDescriptionBuilder) + throws ParserException { + try { + sessionDescriptionBuilder.addMediaDescription(mediaDescriptionBuilder.build()); + } catch (IllegalArgumentException | IllegalStateException e) { + throw ParserException.createForMalformedManifest(/* message= */ null, e); + } + } + + private static MediaDescription.Builder parseMediaDescriptionLine(String line) + throws ParserException { + Matcher matcher = MEDIA_DESCRIPTION_PATTERN.matcher(line); + if (!matcher.matches()) { + throw ParserException.createForMalformedManifest( + "Malformed SDP media description line: " + line, /* cause= */ null); + } + String mediaType = checkNotNull(matcher.group(1)); + String portString = checkNotNull(matcher.group(2)); + String transportProtocol = checkNotNull(matcher.group(3)); + String payloadTypeString = checkNotNull(matcher.group(4)); + + try { + return new MediaDescription.Builder( + mediaType, + Integer.parseInt(portString), + transportProtocol, + Integer.parseInt(payloadTypeString)); + } catch (NumberFormatException e) { + throw ParserException.createForMalformedManifest( + "Malformed SDP media description line: " + line, e); + } + } + + /** Prevents initialization. */ + private SessionDescriptionParser() {} +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/TransferRtpDataChannel.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/TransferRtpDataChannel.java new file mode 100644 index 0000000000..c96a3ea725 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/TransferRtpDataChannel.java @@ -0,0 +1,129 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.rtsp; + +import static com.google.android.exoplayer2.util.Assertions.checkState; +import static java.lang.Math.min; +import static java.util.concurrent.TimeUnit.MILLISECONDS; + +import android.net.Uri; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.source.rtsp.RtspMessageChannel.InterleavedBinaryDataListener; +import com.google.android.exoplayer2.upstream.BaseDataSource; +import com.google.android.exoplayer2.upstream.DataSpec; +import com.google.android.exoplayer2.util.Util; +import java.util.Arrays; +import java.util.concurrent.LinkedBlockingQueue; + +/** An {@link RtpDataChannel} that transfers received data in-memory. */ +/* package */ final class TransferRtpDataChannel extends BaseDataSource + implements RtpDataChannel, RtspMessageChannel.InterleavedBinaryDataListener { + + private static final String DEFAULT_TCP_TRANSPORT_FORMAT = + "RTP/AVP/TCP;unicast;interleaved=%d-%d"; + + private final LinkedBlockingQueue packetQueue; + private final long pollTimeoutMs; + + private byte[] unreadData; + private int channelNumber; + + /** + * Creates a new instance. + * + * @param pollTimeoutMs The number of milliseconds which {@link #read} waits for a packet to be + * available. After the time has expired, {@link C#RESULT_END_OF_INPUT} is returned. + */ + public TransferRtpDataChannel(long pollTimeoutMs) { + super(/* isNetwork= */ true); + this.pollTimeoutMs = pollTimeoutMs; + packetQueue = new LinkedBlockingQueue<>(); + unreadData = new byte[0]; + channelNumber = C.INDEX_UNSET; + } + + @Override + public String getTransport() { + checkState(channelNumber != C.INDEX_UNSET); // Assert open() is called. + return Util.formatInvariant(DEFAULT_TCP_TRANSPORT_FORMAT, channelNumber, channelNumber + 1); + } + + @Override + public int getLocalPort() { + return channelNumber; + } + + @Override + public InterleavedBinaryDataListener getInterleavedBinaryDataListener() { + return this; + } + + @Override + public long open(DataSpec dataSpec) { + this.channelNumber = dataSpec.uri.getPort(); + return C.LENGTH_UNSET; + } + + @Override + public void close() {} + + @Nullable + @Override + public Uri getUri() { + return null; + } + + @Override + public int read(byte[] buffer, int offset, int length) { + if (length == 0) { + return 0; + } + + int bytesRead = 0; + int bytesToRead = min(length, unreadData.length); + System.arraycopy(unreadData, /* srcPos= */ 0, buffer, offset, bytesToRead); + bytesRead += bytesToRead; + unreadData = Arrays.copyOfRange(unreadData, bytesToRead, unreadData.length); + + if (bytesRead == length) { + return bytesRead; + } + + @Nullable byte[] data; + try { + data = packetQueue.poll(pollTimeoutMs, MILLISECONDS); + if (data == null) { + return C.RESULT_END_OF_INPUT; + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + return C.RESULT_END_OF_INPUT; + } + + bytesToRead = min(length - bytesRead, data.length); + System.arraycopy(data, /* srcPos= */ 0, buffer, offset + bytesRead, bytesToRead); + if (bytesToRead < data.length) { + unreadData = Arrays.copyOfRange(data, bytesToRead, data.length); + } + return bytesRead + bytesToRead; + } + + @Override + public void onInterleavedBinaryDataReceived(byte[] data) { + packetQueue.add(data); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/TransferRtpDataChannelFactory.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/TransferRtpDataChannelFactory.java new file mode 100644 index 0000000000..e3829aab58 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/TransferRtpDataChannelFactory.java @@ -0,0 +1,41 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.rtsp; + +/** Factory for {@link TransferRtpDataChannel}. */ +/* package */ final class TransferRtpDataChannelFactory implements RtpDataChannel.Factory { + + private static final int INTERLEAVED_CHANNELS_PER_TRACK = 2; + + private final long timeoutMs; + + /** + * Creates a new instance. + * + * @param timeoutMs A positive number of milliseconds to wait before lack of received RTP packets + * is treated as the end of input. + */ + public TransferRtpDataChannelFactory(long timeoutMs) { + this.timeoutMs = timeoutMs; + } + + @Override + public RtpDataChannel createAndOpenDataChannel(int trackId) { + TransferRtpDataChannel dataChannel = new TransferRtpDataChannel(timeoutMs); + dataChannel.open(RtpUtils.getIncomingRtpDataSpec(trackId * INTERLEAVED_CHANNELS_PER_TRACK)); + return dataChannel; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/UdpDataSourceRtpDataChannel.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/UdpDataSourceRtpDataChannel.java new file mode 100644 index 0000000000..44331f524b --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/UdpDataSourceRtpDataChannel.java @@ -0,0 +1,113 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.rtsp; + +import static com.google.android.exoplayer2.util.Assertions.checkArgument; +import static com.google.android.exoplayer2.util.Assertions.checkState; + +import android.net.Uri; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.PlaybackException; +import com.google.android.exoplayer2.upstream.DataSpec; +import com.google.android.exoplayer2.upstream.TransferListener; +import com.google.android.exoplayer2.upstream.UdpDataSource; +import com.google.android.exoplayer2.util.Util; +import com.google.common.primitives.Ints; +import java.io.IOException; + +/** An {@link RtpDataChannel} for UDP transport. */ +/* package */ final class UdpDataSourceRtpDataChannel implements RtpDataChannel { + + private static final String DEFAULT_UDP_TRANSPORT_FORMAT = "RTP/AVP;unicast;client_port=%d-%d"; + + private final UdpDataSource dataSource; + + /** The associated RTCP channel; {@code null} if the current channel is an RTCP channel. */ + @Nullable private UdpDataSourceRtpDataChannel rtcpChannel; + + /** + * Creates a new instance. + * + * @param socketTimeoutMs The timeout for {@link #read} in milliseconds. + */ + public UdpDataSourceRtpDataChannel(long socketTimeoutMs) { + dataSource = + new UdpDataSource(UdpDataSource.DEFAULT_MAX_PACKET_SIZE, Ints.checkedCast(socketTimeoutMs)); + } + + @Override + public String getTransport() { + int dataPortNumber = getLocalPort(); + checkState(dataPortNumber != C.INDEX_UNSET); // Assert open() is called. + return Util.formatInvariant(DEFAULT_UDP_TRANSPORT_FORMAT, dataPortNumber, dataPortNumber + 1); + } + + @Override + public int getLocalPort() { + int port = dataSource.getLocalPort(); + return port == UdpDataSource.UDP_PORT_UNSET ? C.INDEX_UNSET : port; + } + + @Nullable + @Override + public RtspMessageChannel.InterleavedBinaryDataListener getInterleavedBinaryDataListener() { + return null; + } + + @Override + public void addTransferListener(TransferListener transferListener) { + dataSource.addTransferListener(transferListener); + } + + @Override + public long open(DataSpec dataSpec) throws IOException { + return dataSource.open(dataSpec); + } + + @Nullable + @Override + public Uri getUri() { + return dataSource.getUri(); + } + + @Override + public void close() { + dataSource.close(); + + if (rtcpChannel != null) { + rtcpChannel.close(); + } + } + + @Override + public int read(byte[] buffer, int offset, int length) throws IOException { + try { + return dataSource.read(buffer, offset, length); + } catch (UdpDataSource.UdpDataSourceException e) { + if (e.reason == PlaybackException.ERROR_CODE_IO_NETWORK_CONNECTION_TIMEOUT) { + return C.RESULT_END_OF_INPUT; + } else { + throw e; + } + } + } + + public void setRtcpChannel(UdpDataSourceRtpDataChannel rtcpChannel) { + checkArgument(this != rtcpChannel); + this.rtcpChannel = rtcpChannel; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/UdpDataSourceRtpDataChannelFactory.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/UdpDataSourceRtpDataChannelFactory.java new file mode 100644 index 0000000000..b4c0b4c5ed --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/UdpDataSourceRtpDataChannelFactory.java @@ -0,0 +1,73 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.rtsp; + +import com.google.android.exoplayer2.upstream.DataSourceUtil; +import java.io.IOException; + +/** Factory for {@link UdpDataSourceRtpDataChannel}. */ +/* package */ final class UdpDataSourceRtpDataChannelFactory implements RtpDataChannel.Factory { + + private final long socketTimeoutMs; + + /** + * Creates a new instance. + * + * @param socketTimeoutMs A positive number of milliseconds to wait before lack of received RTP + * packets is treated as the end of input. + */ + public UdpDataSourceRtpDataChannelFactory(long socketTimeoutMs) { + this.socketTimeoutMs = socketTimeoutMs; + } + + @Override + public RtpDataChannel createAndOpenDataChannel(int trackId) throws IOException { + UdpDataSourceRtpDataChannel firstChannel = new UdpDataSourceRtpDataChannel(socketTimeoutMs); + UdpDataSourceRtpDataChannel secondChannel = new UdpDataSourceRtpDataChannel(socketTimeoutMs); + + try { + // From RFC3550 Section 11: "For UDP and similar protocols, RTP SHOULD use an even destination + // port number and the corresponding RTCP stream SHOULD use the next higher (odd) destination + // port number". Some RTSP servers are strict about this rule. We open a data channel first, + // and depending its port number, open the next data channel with a port number that is either + // the higher or the lower. + + // Using port zero will cause the system to generate a port. + firstChannel.open(RtpUtils.getIncomingRtpDataSpec(/* portNumber= */ 0)); + int firstPort = firstChannel.getLocalPort(); + boolean isFirstPortEven = firstPort % 2 == 0; + int portToOpen = isFirstPortEven ? firstPort + 1 : firstPort - 1; + secondChannel.open(RtpUtils.getIncomingRtpDataSpec(/* portNumber= */ portToOpen)); + + if (isFirstPortEven) { + firstChannel.setRtcpChannel(secondChannel); + return firstChannel; + } else { + secondChannel.setRtcpChannel(firstChannel); + return secondChannel; + } + } catch (IOException e) { + DataSourceUtil.closeQuietly(firstChannel); + DataSourceUtil.closeQuietly(secondChannel); + throw e; + } + } + + @Override + public RtpDataChannel.Factory createFallbackDataChannelFactory() { + return new TransferRtpDataChannelFactory(/* timeoutMs= */ socketTimeoutMs); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/package-info.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/package-info.java new file mode 100644 index 0000000000..3b5e2fefa0 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/package-info.java @@ -0,0 +1,19 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +@NonNullApi +package com.google.android.exoplayer2.source.rtsp; + +import com.google.android.exoplayer2.util.NonNullApi; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/DefaultRtpPayloadReaderFactory.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/DefaultRtpPayloadReaderFactory.java new file mode 100644 index 0000000000..710eba8cc9 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/DefaultRtpPayloadReaderFactory.java @@ -0,0 +1,67 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.android.exoplayer2.source.rtsp.reader; + +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; + +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.source.rtsp.RtpPayloadFormat; +import com.google.android.exoplayer2.util.MimeTypes; + +/** Default {@link RtpPayloadReader.Factory} implementation. */ +/* package */ public final class DefaultRtpPayloadReaderFactory + implements RtpPayloadReader.Factory { + + @Override + @Nullable + public RtpPayloadReader createPayloadReader(RtpPayloadFormat payloadFormat) { + switch (checkNotNull(payloadFormat.format.sampleMimeType)) { + case MimeTypes.AUDIO_AC3: + return new RtpAc3Reader(payloadFormat); + case MimeTypes.AUDIO_AAC: + if (payloadFormat.mediaEncoding.equals(RtpPayloadFormat.RTP_MEDIA_MPEG4_LATM_AUDIO)) { + return new RtpMp4aReader(payloadFormat); + } else { + return new RtpAacReader(payloadFormat); + } + case MimeTypes.AUDIO_AMR_NB: + case MimeTypes.AUDIO_AMR_WB: + return new RtpAmrReader(payloadFormat); + case MimeTypes.AUDIO_OPUS: + return new RtpOpusReader(payloadFormat); + case MimeTypes.AUDIO_RAW: + case MimeTypes.AUDIO_ALAW: + case MimeTypes.AUDIO_MLAW: + return new RtpPcmReader(payloadFormat); + case MimeTypes.VIDEO_H263: + return new RtpH263Reader(payloadFormat); + case MimeTypes.VIDEO_H264: + return new RtpH264Reader(payloadFormat); + case MimeTypes.VIDEO_H265: + return new RtpH265Reader(payloadFormat); + case MimeTypes.VIDEO_MP4V: + return new RtpMpeg4Reader(payloadFormat); + case MimeTypes.VIDEO_VP8: + return new RtpVp8Reader(payloadFormat); + case MimeTypes.VIDEO_VP9: + return new RtpVp9Reader(payloadFormat); + default: + // No supported reader, returning null. + } + return null; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpAacReader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpAacReader.java new file mode 100644 index 0000000000..82ed9ea17c --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpAacReader.java @@ -0,0 +1,157 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.rtsp.reader; + +import static com.google.android.exoplayer2.source.rtsp.reader.RtpReaderUtils.toSampleTimeUs; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; + +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.extractor.ExtractorOutput; +import com.google.android.exoplayer2.extractor.TrackOutput; +import com.google.android.exoplayer2.source.rtsp.RtpPayloadFormat; +import com.google.android.exoplayer2.util.ParsableBitArray; +import com.google.android.exoplayer2.util.ParsableByteArray; +import com.google.android.exoplayer2.util.Util; +import com.google.common.base.Ascii; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; + +/** + * Parses a AAC byte stream carried on RTP packets and extracts individual samples. Interleaving + * mode is not supported. + */ +/* package */ final class RtpAacReader implements RtpPayloadReader { + + /** AAC low bit rate mode, RFC3640 Section 3.3.5. */ + private static final String AAC_LOW_BITRATE_MODE = "AAC-lbr"; + /** AAC high bit rate mode, RFC3640 Section 3.3.6. */ + private static final String AAC_HIGH_BITRATE_MODE = "AAC-hbr"; + + private static final String TAG = "RtpAacReader"; + + private final RtpPayloadFormat payloadFormat; + private final ParsableBitArray auHeaderScratchBit; + private final int sampleRate; + private final int auSizeFieldBitSize; + private final int auIndexFieldBitSize; + private final int numBitsInAuHeader; + + private long firstReceivedTimestamp; + private @MonotonicNonNull TrackOutput trackOutput; + private long startTimeOffsetUs; + + public RtpAacReader(RtpPayloadFormat payloadFormat) { + this.payloadFormat = payloadFormat; + this.auHeaderScratchBit = new ParsableBitArray(); + this.sampleRate = this.payloadFormat.clockRate; + + // mode attribute is mandatory. See RFC3640 Section 4.1. + String mode = checkNotNull(payloadFormat.fmtpParameters.get("mode")); + if (Ascii.equalsIgnoreCase(mode, AAC_HIGH_BITRATE_MODE)) { + auSizeFieldBitSize = 13; + auIndexFieldBitSize = 3; + } else if (Ascii.equalsIgnoreCase(mode, AAC_LOW_BITRATE_MODE)) { + auSizeFieldBitSize = 6; + auIndexFieldBitSize = 2; + } else { + throw new UnsupportedOperationException("AAC mode not supported"); + } + // TODO(b/172331505) Add support for other AU-Header fields, like CTS-flag, CTS-delta, etc. + numBitsInAuHeader = auIndexFieldBitSize + auSizeFieldBitSize; + } + + // RtpPayloadReader implementation. + + @Override + public void createTracks(ExtractorOutput extractorOutput, int trackId) { + trackOutput = extractorOutput.track(trackId, C.TRACK_TYPE_AUDIO); + trackOutput.format(payloadFormat.format); + } + + @Override + public void onReceivingFirstPacket(long timestamp, int sequenceNumber) { + this.firstReceivedTimestamp = timestamp; + } + + @Override + public void consume( + ParsableByteArray data, long timestamp, int sequenceNumber, boolean rtpMarker) { + /* + AAC as RTP payload (RFC3640): + +---------+-----------+-----------+---------------+ + | RTP | AU Header | Auxiliary | Access Unit | + | Header | Section | Section | Data Section | + +---------+-----------+-----------+---------------+ + <----------RTP Packet Payload-----------> + + Access Unit(AU) Header section + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- .. -+-+-+-+-+-+-+-+-+-+ + |AU-headers-length|AU-header|AU-header| |AU-header|padding| + |in bits | (1) | (2) | | (n) | bits | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- .. -+-+-+-+-+-+-+-+-+-+ + + The 16-bit AU-headers-length is mandatory in the AAC-lbr and AAC-hbr modes that we support. + */ + checkNotNull(trackOutput); + // Reads AU-header-length that specifies the length in bits of the immediately following + // AU-headers, excluding the padding. + int auHeadersBitLength = data.readShort(); + int auHeaderCount = auHeadersBitLength / numBitsInAuHeader; + long sampleTimeUs = + toSampleTimeUs(startTimeOffsetUs, timestamp, firstReceivedTimestamp, sampleRate); + + // Points to the start of the AU-headers (right past the AU-headers-length). + auHeaderScratchBit.reset(data); + if (auHeaderCount == 1) { + // Reads the first AU-Header that contains AU-Size and AU-Index/AU-Index-delta. + int auSize = auHeaderScratchBit.readBits(auSizeFieldBitSize); + auHeaderScratchBit.skipBits(auIndexFieldBitSize); + + // Outputs all the received data, whether fragmented or not. + trackOutput.sampleData(data, data.bytesLeft()); + if (rtpMarker) { + outputSampleMetadata(trackOutput, sampleTimeUs, auSize); + } + } else { + // Skips the AU-headers section to the data section, accounts for the possible padding bits. + data.skipBytes((auHeadersBitLength + 7) / 8); + for (int i = 0; i < auHeaderCount; i++) { + int auSize = auHeaderScratchBit.readBits(auSizeFieldBitSize); + auHeaderScratchBit.skipBits(auIndexFieldBitSize); + + trackOutput.sampleData(data, auSize); + outputSampleMetadata(trackOutput, sampleTimeUs, auSize); + // The sample time of the of the i-th AU (RFC3640 Page 17): + // (timestamp-of-the-first-AU) + i * (access-unit-duration) + sampleTimeUs += + Util.scaleLargeTimestamp( + auHeaderCount, /* multiplier= */ C.MICROS_PER_SECOND, /* divisor= */ sampleRate); + } + } + } + + @Override + public void seek(long nextRtpTimestamp, long timeUs) { + firstReceivedTimestamp = nextRtpTimestamp; + startTimeOffsetUs = timeUs; + } + + // Internal methods. + + private static void outputSampleMetadata(TrackOutput trackOutput, long sampleTimeUs, int size) { + trackOutput.sampleMetadata( + sampleTimeUs, C.BUFFER_FLAG_KEY_FRAME, size, /* offset= */ 0, /* cryptoData= */ null); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpAc3Reader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpAc3Reader.java new file mode 100644 index 0000000000..aa301e8198 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpAc3Reader.java @@ -0,0 +1,209 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.rtsp.reader; + +import static com.google.android.exoplayer2.source.rtsp.reader.RtpReaderUtils.toSampleTimeUs; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Assertions.checkState; +import static com.google.android.exoplayer2.util.Util.castNonNull; + +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.audio.Ac3Util; +import com.google.android.exoplayer2.extractor.ExtractorOutput; +import com.google.android.exoplayer2.extractor.TrackOutput; +import com.google.android.exoplayer2.source.rtsp.RtpPayloadFormat; +import com.google.android.exoplayer2.util.ParsableBitArray; +import com.google.android.exoplayer2.util.ParsableByteArray; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; + +/** Parses an AC3 byte stream carried on RTP packets, and extracts AC3 frames. */ +/* package */ public final class RtpAc3Reader implements RtpPayloadReader { + + /** AC3 frame types defined in RFC4184 Section 4.1.1. */ + private static final int AC3_FRAME_TYPE_COMPLETE_FRAME = 0; + /** Initial fragment of frame which includes the first 5/8ths of the frame. */ + private static final int AC3_FRAME_TYPE_INITIAL_FRAGMENT_A = 1; + /** Initial fragment of frame which does not include the first 5/8ths of the frame. */ + private static final int AC3_FRAME_TYPE_INITIAL_FRAGMENT_B = 2; + + private static final int AC3_FRAME_TYPE_NON_INITIAL_FRAGMENT = 3; + + /** AC3 payload header size in bytes. */ + private static final int AC3_PAYLOAD_HEADER_SIZE = 2; + + private final RtpPayloadFormat payloadFormat; + private final ParsableBitArray scratchBitBuffer; + + private @MonotonicNonNull TrackOutput trackOutput; + private int numBytesPendingMetadataOutput; + private long firstReceivedTimestamp; + private long sampleTimeUsOfFramePendingMetadataOutput; + private long startTimeOffsetUs; + + public RtpAc3Reader(RtpPayloadFormat payloadFormat) { + this.payloadFormat = payloadFormat; + scratchBitBuffer = new ParsableBitArray(); + firstReceivedTimestamp = C.TIME_UNSET; + } + + @Override + public void createTracks(ExtractorOutput extractorOutput, int trackId) { + trackOutput = extractorOutput.track(trackId, C.TRACK_TYPE_AUDIO); + trackOutput.format(payloadFormat.format); + } + + @Override + public void onReceivingFirstPacket(long timestamp, int sequenceNumber) { + checkState(firstReceivedTimestamp == C.TIME_UNSET); + firstReceivedTimestamp = timestamp; + } + + @Override + public void consume( + ParsableByteArray data, long timestamp, int sequenceNumber, boolean rtpMarker) { + /* + AC-3 payload as an RTP payload (RFC4184). + +-+-+-+-+-+-+-+-+-+-+-+-+-+- .. +-+-+-+-+-+-+-+ + | Payload | Frame | Frame | | Frame | + | Header | (1) | (2) | | (n) | + +-+-+-+-+-+-+-+-+-+-+-+-+-+- .. +-+-+-+-+-+-+-+ + + The payload header: + 0 1 + 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | MBZ | FT| NF | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + FT: frame type. + NF: number of frames/fragments. + */ + int frameType = data.readUnsignedByte() & 0x3; + int numOfFrames = data.readUnsignedByte() & 0xFF; + + long sampleTimeUs = + toSampleTimeUs( + startTimeOffsetUs, timestamp, firstReceivedTimestamp, payloadFormat.clockRate); + + switch (frameType) { + case AC3_FRAME_TYPE_COMPLETE_FRAME: + maybeOutputSampleMetadata(); + if (numOfFrames == 1) { + // Single AC3 frame in one RTP packet. + processSingleFramePacket(data, sampleTimeUs); + } else { + // Multiple AC3 frames in one RTP packet. + processMultiFramePacket(data, numOfFrames, sampleTimeUs); + } + break; + + case AC3_FRAME_TYPE_INITIAL_FRAGMENT_A: + case AC3_FRAME_TYPE_INITIAL_FRAGMENT_B: + maybeOutputSampleMetadata(); + // Falls through. + case AC3_FRAME_TYPE_NON_INITIAL_FRAGMENT: + // The content of an AC3 frame is split into multiple RTP packets. + processFragmentedPacket(data, rtpMarker, frameType, sampleTimeUs); + break; + + default: + throw new IllegalArgumentException(String.valueOf(frameType)); + } + } + + @Override + public void seek(long nextRtpTimestamp, long timeUs) { + firstReceivedTimestamp = nextRtpTimestamp; + startTimeOffsetUs = timeUs; + } + + private void processSingleFramePacket(ParsableByteArray data, long sampleTimeUs) { + int frameSize = data.bytesLeft(); + checkNotNull(trackOutput).sampleData(data, frameSize); + castNonNull(trackOutput) + .sampleMetadata( + /* timeUs= */ sampleTimeUs, + /* flags= */ C.BUFFER_FLAG_KEY_FRAME, + /* size= */ frameSize, + /* offset= */ 0, + /* cryptoData= */ null); + } + + private void processMultiFramePacket(ParsableByteArray data, int numOfFrames, long sampleTimeUs) { + // The size of each frame must be obtained by reading AC3 sync frame. + scratchBitBuffer.reset(data.getData()); + // Move the read location after the AC3 payload header. + scratchBitBuffer.skipBytes(AC3_PAYLOAD_HEADER_SIZE); + + for (int i = 0; i < numOfFrames; i++) { + Ac3Util.SyncFrameInfo frameInfo = Ac3Util.parseAc3SyncframeInfo(scratchBitBuffer); + + checkNotNull(trackOutput).sampleData(data, frameInfo.frameSize); + castNonNull(trackOutput) + .sampleMetadata( + /* timeUs= */ sampleTimeUs, + /* flags= */ C.BUFFER_FLAG_KEY_FRAME, + /* size= */ frameInfo.frameSize, + /* offset= */ 0, + /* cryptoData= */ null); + + sampleTimeUs += (frameInfo.sampleCount / frameInfo.sampleRate) * C.MICROS_PER_SECOND; + // Advance the position by the number of bytes read. + scratchBitBuffer.skipBytes(frameInfo.frameSize); + } + } + + private void processFragmentedPacket( + ParsableByteArray data, boolean isFrameBoundary, int frameType, long sampleTimeUs) { + int bytesToWrite = data.bytesLeft(); + checkNotNull(trackOutput).sampleData(data, bytesToWrite); + numBytesPendingMetadataOutput += bytesToWrite; + sampleTimeUsOfFramePendingMetadataOutput = sampleTimeUs; + + if (isFrameBoundary && frameType == AC3_FRAME_TYPE_NON_INITIAL_FRAGMENT) { + // Last RTP packet in the series of fragmentation packets. + outputSampleMetadataForFragmentedPackets(); + } + } + + /** + * Checks and outputs sample metadata, if the last packet of a series of fragmented packets is + * lost. + * + *

        Call this method only when receiving an initial packet, i.e. on packets with type + * + *

          + *
        • {@link #AC3_FRAME_TYPE_COMPLETE_FRAME}, + *
        • {@link #AC3_FRAME_TYPE_INITIAL_FRAGMENT_A}, or + *
        • {@link #AC3_FRAME_TYPE_INITIAL_FRAGMENT_B}. + *
        + */ + private void maybeOutputSampleMetadata() { + if (numBytesPendingMetadataOutput > 0) { + outputSampleMetadataForFragmentedPackets(); + } + } + + private void outputSampleMetadataForFragmentedPackets() { + castNonNull(trackOutput) + .sampleMetadata( + /* timeUs= */ sampleTimeUsOfFramePendingMetadataOutput, + /* flags= */ C.BUFFER_FLAG_KEY_FRAME, + /* size= */ numBytesPendingMetadataOutput, + /* offset= */ 0, + /* cryptoData= */ null); + numBytesPendingMetadataOutput = 0; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpAmrReader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpAmrReader.java new file mode 100644 index 0000000000..35ffd79f90 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpAmrReader.java @@ -0,0 +1,187 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.rtsp.reader; + +import static com.google.android.exoplayer2.source.rtsp.reader.RtpReaderUtils.toSampleTimeUs; +import static com.google.android.exoplayer2.util.Assertions.checkArgument; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; + +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.extractor.ExtractorOutput; +import com.google.android.exoplayer2.extractor.TrackOutput; +import com.google.android.exoplayer2.source.rtsp.RtpPacket; +import com.google.android.exoplayer2.source.rtsp.RtpPayloadFormat; +import com.google.android.exoplayer2.util.Log; +import com.google.android.exoplayer2.util.MimeTypes; +import com.google.android.exoplayer2.util.ParsableByteArray; +import com.google.android.exoplayer2.util.Util; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; + +/** + * Parses an AMR byte stream carried on RTP packets and extracts individual samples. Interleaving + * mode is not supported. Refer to RFC4867 for more details. + */ +/* package */ final class RtpAmrReader implements RtpPayloadReader { + private static final String TAG = "RtpAmrReader"; + /** + * The frame size in bytes, including header (1 byte), for each of the 16 frame types for AMR-NB + * (narrow band). AMR-NB supports eight narrow band speech encoding modes with bit rates between + * 4.75 and 12.2 kbps defined in RFC4867 Section 3.1. Refer to table 1a in 3GPP TS 26.101 for the + * mapping definition. + */ + private static final int[] AMR_NB_FRAME_TYPE_INDEX_TO_FRAME_SIZE = { + 13, // 4.75kbps + 14, // 5.15kbps + 16, // 5.90kbps + 18, // 6.70kbps PDC-EFR + 20, // 7.40kbps TDMA-EFR + 21, // 7.95kbps + 27, // 10.2kbps + 32, // 12.2kbps GSM-EFR + 6, // AMR SID + 7, // GSM-EFR SID + 6, // TDMA-EFR SID + 6, // PDC-EFR SID + 1, // Future use + 1, // Future use + 1, // Future use + 1 // No data + }; + + /** + * The frame size in bytes, including header (1 byte), for each of the 16 frame types for AMR-WB + * (wide band). AMR-WB supports nine wide band speech encoding modes with bit rates between 6.6 to + * 23.85 kbps defined in RFC4867 Section 3.2. Refer to table 1a in 3GPP TS 26.201. for the mapping + * definition. + */ + private static final int[] AMR_WB_FRAME_TYPE_INDEX_TO_FRAME_SIZE = { + 18, // 6.60kbps + 24, // 8.85kbps + 33, // 12.65kbps + 37, // 14.25kbps + 41, // 15.85kbps + 47, // 18.25kbps + 51, // 19.85kbps + 59, // 23.05kbps + 61, // 23.85kbps + 6, // AMR-WB SID + 1, // Future use + 1, // Future use + 1, // Future use + 1, // Future use + 1, // speech lost + 1 // No data + }; + + private final RtpPayloadFormat payloadFormat; + private final boolean isWideBand; + private final int sampleRate; + + private @MonotonicNonNull TrackOutput trackOutput; + private long firstReceivedTimestamp; + private long startTimeOffsetUs; + private int previousSequenceNumber; + + public RtpAmrReader(RtpPayloadFormat payloadFormat) { + this.payloadFormat = payloadFormat; + this.isWideBand = + MimeTypes.AUDIO_AMR_WB.equals(checkNotNull(payloadFormat.format.sampleMimeType)); + this.sampleRate = payloadFormat.clockRate; + this.firstReceivedTimestamp = C.TIME_UNSET; + this.previousSequenceNumber = C.INDEX_UNSET; + // Start time offset must be 0 before the first seek. + this.startTimeOffsetUs = 0; + } + + // RtpPayloadReader implementation. + + @Override + public void createTracks(ExtractorOutput extractorOutput, int trackId) { + trackOutput = extractorOutput.track(trackId, C.TRACK_TYPE_AUDIO); + trackOutput.format(payloadFormat.format); + } + + @Override + public void onReceivingFirstPacket(long timestamp, int sequenceNumber) { + this.firstReceivedTimestamp = timestamp; + } + + @Override + public void consume( + ParsableByteArray data, long timestamp, int sequenceNumber, boolean rtpMarker) { + checkStateNotNull(trackOutput); + // Check that this packet is in the sequence of the previous packet. + if (previousSequenceNumber != C.INDEX_UNSET) { + int expectedSequenceNumber = RtpPacket.getNextSequenceNumber(previousSequenceNumber); + if (sequenceNumber != expectedSequenceNumber) { + Log.w( + TAG, + Util.formatInvariant( + "Received RTP packet with unexpected sequence number. Expected: %d; received: %d.", + expectedSequenceNumber, sequenceNumber)); + } + } + // + // AMR as RTP payload (RFC4867 Section 4.2). + // + // +----------------+-------------------+---------------- + // | payload header | table of contents | speech data ... + // +----------------+-------------------+---------------- + // + // Payload header (RFC4867 Section 4.4.1). + // + // The header won't contain ILL and ILP, as interleaving is not currently supported. + // +-+-+-+-+-+-+-+- - - - - - - - + // | CMR |R|R|R|R| ILL | ILP | + // +-+-+-+-+-+-+-+- - - - - - - - + // + // Skip CMR and reserved bits. + data.skipBytes(1); + // Loop over sampleSize to send multiple frames along with appropriate timestamp when compound + // payload support is added. + int frameType = (data.peekUnsignedByte() >> 3) & 0x0f; + int frameSize = getFrameSize(frameType, isWideBand); + int sampleSize = data.bytesLeft(); + checkArgument(sampleSize == frameSize, "compound payload not supported currently"); + trackOutput.sampleData(data, sampleSize); + long sampleTimeUs = + toSampleTimeUs(startTimeOffsetUs, timestamp, firstReceivedTimestamp, sampleRate); + trackOutput.sampleMetadata( + sampleTimeUs, C.BUFFER_FLAG_KEY_FRAME, sampleSize, /* offset= */ 0, /* cryptoData= */ null); + + previousSequenceNumber = sequenceNumber; + } + + @Override + public void seek(long nextRtpTimestamp, long timeUs) { + firstReceivedTimestamp = nextRtpTimestamp; + startTimeOffsetUs = timeUs; + } + + // Internal methods. + + public static int getFrameSize(int frameType, boolean isWideBand) { + checkArgument( + // Valid frame types are defined in RFC4867 Section 4.3.1. + (frameType >= 0 && frameType <= 8) || frameType == 15, + "Illegal AMR " + (isWideBand ? "WB" : "NB") + " frame type " + frameType); + + return isWideBand + ? AMR_WB_FRAME_TYPE_INDEX_TO_FRAME_SIZE[frameType] + : AMR_NB_FRAME_TYPE_INDEX_TO_FRAME_SIZE[frameType]; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpH263Reader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpH263Reader.java new file mode 100644 index 0000000000..51a612da42 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpH263Reader.java @@ -0,0 +1,247 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.rtsp.reader; + +import static com.google.android.exoplayer2.source.rtsp.reader.RtpReaderUtils.toSampleTimeUs; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Assertions.checkState; +import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; + +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.extractor.ExtractorOutput; +import com.google.android.exoplayer2.extractor.TrackOutput; +import com.google.android.exoplayer2.source.rtsp.RtpPacket; +import com.google.android.exoplayer2.source.rtsp.RtpPayloadFormat; +import com.google.android.exoplayer2.util.Log; +import com.google.android.exoplayer2.util.ParsableByteArray; +import com.google.android.exoplayer2.util.Util; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; + +/** + * Parses a H263 byte stream carried on RTP packets, and extracts H263 frames as defined in RFC4629. + */ +/* package */ final class RtpH263Reader implements RtpPayloadReader { + private static final String TAG = "RtpH263Reader"; + + private static final int MEDIA_CLOCK_FREQUENCY = 90_000; + + /** I-frame VOP unit type. */ + private static final int I_VOP = 0; + + /** Picture start code, P=1, V=0, PLEN=0. Refer to RFC4629 Section 6.1. */ + private static final int PICTURE_START_CODE = 128; + + private final RtpPayloadFormat payloadFormat; + + private @MonotonicNonNull TrackOutput trackOutput; + + /** + * First received RTP timestamp. All RTP timestamps are dimension-less, the time base is defined + * by {@link #MEDIA_CLOCK_FREQUENCY}. + */ + private long firstReceivedTimestamp; + + /** The combined size of a sample that is fragmented into multiple RTP packets. */ + private int fragmentedSampleSizeBytes; + + private int previousSequenceNumber; + + private int width; + private int height; + private boolean isKeyFrame; + private boolean isOutputFormatSet; + private long startTimeOffsetUs; + private long fragmentedSampleTimeUs; + /** + * Whether the first packet of a H263 frame is received, it mark the start of a H263 partition. A + * H263 frame can be split into multiple RTP packets. + */ + private boolean gotFirstPacketOfH263Frame; + + /** Creates an instance. */ + public RtpH263Reader(RtpPayloadFormat payloadFormat) { + this.payloadFormat = payloadFormat; + firstReceivedTimestamp = C.TIME_UNSET; + previousSequenceNumber = C.INDEX_UNSET; + } + + @Override + public void createTracks(ExtractorOutput extractorOutput, int trackId) { + trackOutput = extractorOutput.track(trackId, C.TRACK_TYPE_VIDEO); + trackOutput.format(payloadFormat.format); + } + + @Override + public void onReceivingFirstPacket(long timestamp, int sequenceNumber) { + checkState(firstReceivedTimestamp == C.TIME_UNSET); + firstReceivedTimestamp = timestamp; + } + + @Override + public void consume( + ParsableByteArray data, long timestamp, int sequenceNumber, boolean rtpMarker) { + checkStateNotNull(trackOutput); + + // H263 Header Payload Header, RFC4629 Section 5.1. + // 0 1 + // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + // | RR |P|V| PLEN |PEBIT| + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + int currentPosition = data.getPosition(); + int header = data.readUnsignedShort(); + boolean pBitIsSet = (header & 0x400) > 0; + + // Check if optional V (Video Redundancy Coding), PLEN or PEBIT is present, RFC4629 Section 5.1. + if ((header & 0x200) != 0 || (header & 0x1F8) != 0 || (header & 0x7) != 0) { + Log.w( + TAG, + "Dropping packet: video reduncancy coding is not supported, packet header VRC, or PLEN or" + + " PEBIT is non-zero"); + return; + } + + if (pBitIsSet) { + if (gotFirstPacketOfH263Frame && fragmentedSampleSizeBytes > 0) { + // Received new H263 fragment, output data of previous fragment to decoder. + outputSampleMetadataForFragmentedPackets(); + } + gotFirstPacketOfH263Frame = true; + + int payloadStartCode = data.peekUnsignedByte() & 0xFC; + // Packets that begin with a Picture Start Code(100000). Refer RFC4629 Section 6.1. + if (payloadStartCode < PICTURE_START_CODE) { + Log.w(TAG, "Picture start Code (PSC) missing, dropping packet."); + return; + } + // Setting first two bytes of the start code. Refer RFC4629 Section 6.1.1. + data.getData()[currentPosition] = 0; + data.getData()[currentPosition + 1] = 0; + data.setPosition(currentPosition); + } else if (gotFirstPacketOfH263Frame) { + // Check that this packet is in the sequence of the previous packet. + int expectedSequenceNumber = RtpPacket.getNextSequenceNumber(previousSequenceNumber); + if (sequenceNumber < expectedSequenceNumber) { + Log.w( + TAG, + Util.formatInvariant( + "Received RTP packet with unexpected sequence number. Expected: %d; received: %d." + + " Dropping packet.", + expectedSequenceNumber, sequenceNumber)); + return; + } + } else { + Log.w( + TAG, + "First payload octet of the H263 packet is not the beginning of a new H263 partition," + + " Dropping current packet."); + return; + } + + if (fragmentedSampleSizeBytes == 0) { + parseVopHeader(data, isOutputFormatSet); + if (!isOutputFormatSet && isKeyFrame) { + if (width != payloadFormat.format.width || height != payloadFormat.format.height) { + trackOutput.format( + payloadFormat.format.buildUpon().setWidth(width).setHeight(height).build()); + } + isOutputFormatSet = true; + } + } + int fragmentSize = data.bytesLeft(); + // Write the video sample. + trackOutput.sampleData(data, fragmentSize); + fragmentedSampleSizeBytes += fragmentSize; + fragmentedSampleTimeUs = + toSampleTimeUs(startTimeOffsetUs, timestamp, firstReceivedTimestamp, MEDIA_CLOCK_FREQUENCY); + + if (rtpMarker) { + outputSampleMetadataForFragmentedPackets(); + } + previousSequenceNumber = sequenceNumber; + } + + @Override + public void seek(long nextRtpTimestamp, long timeUs) { + firstReceivedTimestamp = nextRtpTimestamp; + fragmentedSampleSizeBytes = 0; + startTimeOffsetUs = timeUs; + } + + /** + * Parses and set VOP Coding type and resolution. The {@linkplain ParsableByteArray#getPosition() + * position} is preserved. + */ + private void parseVopHeader(ParsableByteArray data, boolean gotResolution) { + // Picture Segment Packets (RFC4629 Section 6.1). + // Search for SHORT_VIDEO_START_MARKER (0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0). + int currentPosition = data.getPosition(); + + /* + * Parse short video header. + * + * These values are taken from Android's software H263 decoder. + */ + long shortVideoHeader = data.readUnsignedInt(); + if (((shortVideoHeader >> 10) & 0x3F) == 0x20) { + int header = data.peekUnsignedByte(); + int vopType = ((header >> 1) & 0x1); + if (!gotResolution && vopType == I_VOP) { + /* + * Parse resolution from source format. + * + * These values are taken from Android's software H263 decoder. + */ + int sourceFormat = ((header >> 2) & 0x07); + if (sourceFormat == 1) { + width = 128; + height = 96; + } else { + width = 176 << (sourceFormat - 2); + height = 144 << (sourceFormat - 2); + } + } + data.setPosition(currentPosition); + isKeyFrame = vopType == I_VOP; + return; + } + data.setPosition(currentPosition); + isKeyFrame = false; + } + + /** + * Outputs sample metadata of the received fragmented packets. + * + *

        Call this method only after receiving an end of a H263 partition. + */ + private void outputSampleMetadataForFragmentedPackets() { + checkNotNull(trackOutput) + .sampleMetadata( + fragmentedSampleTimeUs, + isKeyFrame ? C.BUFFER_FLAG_KEY_FRAME : 0, + fragmentedSampleSizeBytes, + /* offset= */ 0, + /* cryptoData= */ null); + fragmentedSampleSizeBytes = 0; + fragmentedSampleTimeUs = C.TIME_UNSET; + isKeyFrame = false; + gotFirstPacketOfH263Frame = false; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpH264Reader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpH264Reader.java new file mode 100644 index 0000000000..35b1b5d781 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpH264Reader.java @@ -0,0 +1,296 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.rtsp.reader; + +import static com.google.android.exoplayer2.source.rtsp.reader.RtpReaderUtils.toSampleTimeUs; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; +import static com.google.android.exoplayer2.util.Util.castNonNull; + +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.ParserException; +import com.google.android.exoplayer2.extractor.ExtractorOutput; +import com.google.android.exoplayer2.extractor.TrackOutput; +import com.google.android.exoplayer2.source.rtsp.RtpPacket; +import com.google.android.exoplayer2.source.rtsp.RtpPayloadFormat; +import com.google.android.exoplayer2.util.Log; +import com.google.android.exoplayer2.util.NalUnitUtil; +import com.google.android.exoplayer2.util.ParsableByteArray; +import com.google.android.exoplayer2.util.Util; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; +import org.checkerframework.checker.nullness.qual.RequiresNonNull; + +/** Parses an H264 byte stream carried on RTP packets, and extracts H264 Access Units. */ +/* package */ final class RtpH264Reader implements RtpPayloadReader { + private static final String TAG = "RtpH264Reader"; + + private static final int MEDIA_CLOCK_FREQUENCY = 90_000; + + /** Offset of payload data within a FU type A payload. */ + private static final int FU_PAYLOAD_OFFSET = 2; + + /** Single Time Aggregation Packet type A. */ + private static final int RTP_PACKET_TYPE_STAP_A = 24; + /** Fragmentation Unit type A. */ + private static final int RTP_PACKET_TYPE_FU_A = 28; + + /** IDR NAL unit type. */ + private static final int NAL_UNIT_TYPE_IDR = 5; + + /** Scratch for Fragmentation Unit RTP packets. */ + private final ParsableByteArray fuScratchBuffer; + + private final ParsableByteArray nalStartCodeArray = + new ParsableByteArray(NalUnitUtil.NAL_START_CODE); + + private final RtpPayloadFormat payloadFormat; + + private @MonotonicNonNull TrackOutput trackOutput; + private @C.BufferFlags int bufferFlags; + + private long firstReceivedTimestamp; + private int previousSequenceNumber; + /** The combined size of a sample that is fragmented into multiple RTP packets. */ + private int fragmentedSampleSizeBytes; + + private long startTimeOffsetUs; + + /** Creates an instance. */ + public RtpH264Reader(RtpPayloadFormat payloadFormat) { + this.payloadFormat = payloadFormat; + fuScratchBuffer = new ParsableByteArray(); + firstReceivedTimestamp = C.TIME_UNSET; + previousSequenceNumber = C.INDEX_UNSET; + } + + @Override + public void createTracks(ExtractorOutput extractorOutput, int trackId) { + trackOutput = extractorOutput.track(trackId, C.TRACK_TYPE_VIDEO); + + castNonNull(trackOutput).format(payloadFormat.format); + } + + @Override + public void onReceivingFirstPacket(long timestamp, int sequenceNumber) {} + + @Override + public void consume(ParsableByteArray data, long timestamp, int sequenceNumber, boolean rtpMarker) + throws ParserException { + + int rtpH264PacketMode; + try { + // RFC6184 Section 5.6, 5.7 and 5.8. + rtpH264PacketMode = data.getData()[0] & 0x1F; + } catch (IndexOutOfBoundsException e) { + throw ParserException.createForMalformedManifest(/* message= */ null, e); + } + + checkStateNotNull(trackOutput); + if (rtpH264PacketMode > 0 && rtpH264PacketMode < 24) { + processSingleNalUnitPacket(data); + } else if (rtpH264PacketMode == RTP_PACKET_TYPE_STAP_A) { + processSingleTimeAggregationPacket(data); + } else if (rtpH264PacketMode == RTP_PACKET_TYPE_FU_A) { + processFragmentationUnitPacket(data, sequenceNumber); + } else { + throw ParserException.createForMalformedManifest( + String.format("RTP H264 packetization mode [%d] not supported.", rtpH264PacketMode), + /* cause= */ null); + } + + if (rtpMarker) { + if (firstReceivedTimestamp == C.TIME_UNSET) { + firstReceivedTimestamp = timestamp; + } + + long timeUs = + toSampleTimeUs( + startTimeOffsetUs, timestamp, firstReceivedTimestamp, MEDIA_CLOCK_FREQUENCY); + trackOutput.sampleMetadata( + timeUs, bufferFlags, fragmentedSampleSizeBytes, /* offset= */ 0, /* cryptoData= */ null); + fragmentedSampleSizeBytes = 0; + } + + previousSequenceNumber = sequenceNumber; + } + + @Override + public void seek(long nextRtpTimestamp, long timeUs) { + firstReceivedTimestamp = nextRtpTimestamp; + fragmentedSampleSizeBytes = 0; + startTimeOffsetUs = timeUs; + } + + // Internal methods. + + /** + * Processes Single NAL Unit packet (RFC6184 Section 5.6). + * + *

        Outputs the single NAL Unit (with start code prepended) to {@link #trackOutput}. Sets {@link + * #bufferFlags} and {@link #fragmentedSampleSizeBytes} accordingly. + */ + @RequiresNonNull("trackOutput") + private void processSingleNalUnitPacket(ParsableByteArray data) { + // Example of a Single Nal Unit packet + // 0 1 2 3 + // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + // |F|NRI| Type | | + // +-+-+-+-+-+-+-+-+ | + // | | + // | Bytes 2..n of a single NAL unit | + // | | + // | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + // | :...OPTIONAL RTP padding | + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + + int numBytesInData = data.bytesLeft(); + fragmentedSampleSizeBytes += writeStartCode(); + trackOutput.sampleData(data, numBytesInData); + fragmentedSampleSizeBytes += numBytesInData; + + int nalHeaderType = data.getData()[0] & 0x1F; + bufferFlags = getBufferFlagsFromNalType(nalHeaderType); + } + + /** + * Processes STAP Type A packet (RFC6184 Section 5.7). + * + *

        Outputs the received aggregation units (with start code prepended) to {@link #trackOutput}. + * Sets {@link #bufferFlags} and {@link #fragmentedSampleSizeBytes} accordingly. + */ + @RequiresNonNull("trackOutput") + private void processSingleTimeAggregationPacket(ParsableByteArray data) { + // Example of an STAP-A packet. + // 0 1 2 3 + // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + // | RTP Header | + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + // |STAP-A NAL HDR | NALU 1 Size | NALU 1 HDR | + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + // | NALU 1 Data | + // : : + // + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + // | | NALU 2 Size | NALU 2 HDR | + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + // | NALU 2 Data | + // : : + // | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + // | :...OPTIONAL RTP padding | + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + + // Skips STAP-A NAL HDR that has the NAL format |F|NRI|Type|, but with Type replaced by the + // STAP-A type id (RTP_PACKET_TYPE_STAP_A). + data.readUnsignedByte(); + + // Gets all NAL units until the remaining bytes are only enough to store an RTP padding. + int nalUnitLength; + while (data.bytesLeft() > 4) { + nalUnitLength = data.readUnsignedShort(); + fragmentedSampleSizeBytes += writeStartCode(); + trackOutput.sampleData(data, nalUnitLength); + fragmentedSampleSizeBytes += nalUnitLength; + } + + // Treat Aggregated NAL units as non key frames. + bufferFlags = 0; + } + + /** + * Processes Fragmentation Unit Type A packet (RFC6184 Section 5.8). + * + *

        This method will be invoked multiple times to receive a single frame that is broken down + * into a series of fragmentation units in multiple RTP packets. + * + *

        Outputs the received fragmentation units (with start code prepended) to {@link + * #trackOutput}. Sets {@link #bufferFlags} and {@link #fragmentedSampleSizeBytes} accordingly. + */ + @RequiresNonNull("trackOutput") + private void processFragmentationUnitPacket(ParsableByteArray data, int packetSequenceNumber) { + // FU-A mode packet layout. + // 0 1 2 3 + // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + // | FU indicator | FU header | | + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | + // | | + // | FU payload | + // | | + // | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + // | :...OPTIONAL RTP padding | + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + // + // FU Indicator FU Header + // 0 1 2 3 4 5 6 7 0 1 2 3 4 5 6 7 + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + // |F|NRI| Type |S|E|R| Type | + // +---------------+---------------+ + // Indicator: Upper 3 bits are the same as NALU header, Type = 28 (FU-A type). + // Header: Start/End/Reserved/Type. Type is same as NALU type. + int fuIndicator = data.getData()[0]; + int fuHeader = data.getData()[1]; + int nalHeader = (fuIndicator & 0xE0) | (fuHeader & 0x1F); + boolean isFirstFuPacket = (fuHeader & 0x80) > 0; + boolean isLastFuPacket = (fuHeader & 0x40) > 0; + + if (isFirstFuPacket) { + // Prepends starter code. + fragmentedSampleSizeBytes += writeStartCode(); + + // The bytes needed is 1 (NALU header) + payload size. The original data array has size 2 (FU + // indicator/header) + payload size. Thus setting the correct header and set position to 1. + data.getData()[1] = (byte) nalHeader; + fuScratchBuffer.reset(data.getData()); + fuScratchBuffer.setPosition(1); + } else { + // Check that this packet is in the sequence of the previous packet. + int expectedSequenceNumber = RtpPacket.getNextSequenceNumber(previousSequenceNumber); + if (packetSequenceNumber != expectedSequenceNumber) { + Log.w( + TAG, + Util.formatInvariant( + "Received RTP packet with unexpected sequence number. Expected: %d; received: %d." + + " Dropping packet.", + expectedSequenceNumber, packetSequenceNumber)); + return; + } + + // Setting position to ignore FU indicator and header. + fuScratchBuffer.reset(data.getData()); + fuScratchBuffer.setPosition(FU_PAYLOAD_OFFSET); + } + + int fragmentSize = fuScratchBuffer.bytesLeft(); + trackOutput.sampleData(fuScratchBuffer, fragmentSize); + fragmentedSampleSizeBytes += fragmentSize; + + if (isLastFuPacket) { + bufferFlags = getBufferFlagsFromNalType(nalHeader & 0x1F); + } + } + + private int writeStartCode() { + nalStartCodeArray.setPosition(/* position= */ 0); + int bytesWritten = nalStartCodeArray.bytesLeft(); + checkNotNull(trackOutput).sampleData(nalStartCodeArray, bytesWritten); + return bytesWritten; + } + + private static @C.BufferFlags int getBufferFlagsFromNalType(int nalType) { + return nalType == NAL_UNIT_TYPE_IDR ? C.BUFFER_FLAG_KEY_FRAME : 0; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpH265Reader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpH265Reader.java new file mode 100644 index 0000000000..84222e08bb --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpH265Reader.java @@ -0,0 +1,263 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.rtsp.reader; + +import static com.google.android.exoplayer2.source.rtsp.reader.RtpReaderUtils.toSampleTimeUs; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; + +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.ParserException; +import com.google.android.exoplayer2.extractor.ExtractorOutput; +import com.google.android.exoplayer2.extractor.TrackOutput; +import com.google.android.exoplayer2.source.rtsp.RtpPacket; +import com.google.android.exoplayer2.source.rtsp.RtpPayloadFormat; +import com.google.android.exoplayer2.util.Log; +import com.google.android.exoplayer2.util.NalUnitUtil; +import com.google.android.exoplayer2.util.ParsableByteArray; +import com.google.android.exoplayer2.util.Util; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; +import org.checkerframework.checker.nullness.qual.RequiresNonNull; + +/** + * Parses an H265 byte stream carried on RTP packets, and extracts H265 Access Units. Refer to + * RFC7798 for more details. + */ +/* package */ final class RtpH265Reader implements RtpPayloadReader { + + private static final String TAG = "RtpH265Reader"; + private static final int MEDIA_CLOCK_FREQUENCY = 90_000; + /** Offset of payload data within a FU payload. */ + private static final int FU_PAYLOAD_OFFSET = 3; + /** Aggregation Packet. RFC7798 Section 4.4.2. */ + private static final int RTP_PACKET_TYPE_AP = 48; + /** Fragmentation Unit. RFC7798 Section 4.4.3. */ + private static final int RTP_PACKET_TYPE_FU = 49; + /** IDR NAL unit types. */ + private static final int NAL_IDR_W_RADL = 19; + + private static final int NAL_IDR_N_LP = 20; + + /** Scratch for Fragmentation Unit RTP packets. */ + private final ParsableByteArray fuScratchBuffer; + + private final ParsableByteArray nalStartCodeArray; + private final RtpPayloadFormat payloadFormat; + + private @MonotonicNonNull TrackOutput trackOutput; + private @C.BufferFlags int bufferFlags; + private long firstReceivedTimestamp; + private int previousSequenceNumber; + /** The combined size of a sample that is fragmented into multiple RTP packets. */ + private int fragmentedSampleSizeBytes; + + private long startTimeOffsetUs; + + /** Creates an instance. */ + public RtpH265Reader(RtpPayloadFormat payloadFormat) { + this.fuScratchBuffer = new ParsableByteArray(); + this.nalStartCodeArray = new ParsableByteArray(NalUnitUtil.NAL_START_CODE); + this.payloadFormat = payloadFormat; + firstReceivedTimestamp = C.TIME_UNSET; + previousSequenceNumber = C.INDEX_UNSET; + } + + @Override + public void createTracks(ExtractorOutput extractorOutput, int trackId) { + trackOutput = extractorOutput.track(trackId, C.TRACK_TYPE_VIDEO); + trackOutput.format(payloadFormat.format); + } + + @Override + public void onReceivingFirstPacket(long timestamp, int sequenceNumber) {} + + @Override + public void consume(ParsableByteArray data, long timestamp, int sequenceNumber, boolean rtpMarker) + throws ParserException { + if (data.getData().length == 0) { + throw ParserException.createForMalformedManifest("Empty RTP data packet.", /* cause= */ null); + } + // NAL Unit Header.type (RFC7798 Section 1.1.4). + int payloadType = (data.getData()[0] >> 1) & 0x3F; + + checkStateNotNull(trackOutput); + if (payloadType >= 0 && payloadType < RTP_PACKET_TYPE_AP) { + processSingleNalUnitPacket(data); + } else if (payloadType == RTP_PACKET_TYPE_AP) { + // TODO: Support AggregationPacket mode. + throw new UnsupportedOperationException("need to implement processAggregationPacket"); + } else if (payloadType == RTP_PACKET_TYPE_FU) { + processFragmentationUnitPacket(data, sequenceNumber); + } else { + throw ParserException.createForMalformedManifest( + String.format("RTP H265 payload type [%d] not supported.", payloadType), + /* cause= */ null); + } + + if (rtpMarker) { + if (firstReceivedTimestamp == C.TIME_UNSET) { + firstReceivedTimestamp = timestamp; + } + + long timeUs = + toSampleTimeUs( + startTimeOffsetUs, timestamp, firstReceivedTimestamp, MEDIA_CLOCK_FREQUENCY); + trackOutput.sampleMetadata( + timeUs, bufferFlags, fragmentedSampleSizeBytes, /* offset= */ 0, /* cryptoData= */ null); + fragmentedSampleSizeBytes = 0; + } + + previousSequenceNumber = sequenceNumber; + } + + @Override + public void seek(long nextRtpTimestamp, long timeUs) { + firstReceivedTimestamp = nextRtpTimestamp; + fragmentedSampleSizeBytes = 0; + startTimeOffsetUs = timeUs; + } + + // Internal methods. + + /** + * Processes Single NAL Unit packet (RFC7798 Section 4.4.1). + * + *

        Outputs the single NAL Unit (with start code prepended) to {@link #trackOutput}. Sets {@link + * #bufferFlags} and {@link #fragmentedSampleSizeBytes} accordingly. + */ + @RequiresNonNull("trackOutput") + private void processSingleNalUnitPacket(ParsableByteArray data) { + // The structure a single NAL unit packet. + // 0 1 2 3 + // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + // | PayloadHdr | DONL (conditional) | + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + // | | + // | NAL unit payload data | + // | | + // | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + // | :...OPTIONAL RTP padding | + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + + int numBytesInData = data.bytesLeft(); + fragmentedSampleSizeBytes += writeStartCode(); + trackOutput.sampleData(data, numBytesInData); + fragmentedSampleSizeBytes += numBytesInData; + + int nalHeaderType = (data.getData()[0] >> 1) & 0x3F; + bufferFlags = getBufferFlagsFromNalType(nalHeaderType); + } + + /** + * Processes Fragmentation Unit packet (RFC7798 Section 4.4.3). + * + *

        This method will be invoked multiple times to receive a single frame that is broken down + * into a series of fragmentation units in multiple RTP packets. + * + *

        Outputs the received fragmentation units (with start code prepended) to {@link + * #trackOutput}. Sets {@link #bufferFlags} and {@link #fragmentedSampleSizeBytes} accordingly. + */ + @RequiresNonNull("trackOutput") + private void processFragmentationUnitPacket(ParsableByteArray data, int packetSequenceNumber) + throws ParserException { + // The structure of an FU packet. + // 0 1 2 3 + // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + // | PayloadHdr (Type=49) | FU header | DONL (cond) | + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-| + // | DONL (cond) | | + // |-+-+-+-+-+-+-+-+ | + // | FU payload | + // | | + // | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + // | :...OPTIONAL RTP padding | + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + // + // FU header. + // +---------------+ + // |0|1|2|3|4|5|6|7| + // +-+-+-+-+-+-+-+-+ + // |S|E| FuType | + // +---------------+ + // + // Structure of the PayloadHdr and HEVC NAL unit header, RFC7798 Section 1.1.4. + // +---------------+---------------+ + // |0|1|2|3|4|5|6|7|0|1|2|3|4|5|6|7| + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + // |F| Type | LayerId | TID | + // +-------------+-----------------+ + if (data.getData().length < 3) { + throw ParserException.createForMalformedManifest("Malformed FU header.", /* cause= */ null); + } + int tid = (data.getData()[1] & 0x7); + int fuHeader = data.getData()[2]; + int nalUnitType = fuHeader & 0x3F; + boolean isFirstFuPacket = (fuHeader & 0x80) > 0; + boolean isLastFuPacket = (fuHeader & 0x40) > 0; + + if (isFirstFuPacket) { + // Prepends starter code. + fragmentedSampleSizeBytes += writeStartCode(); + + // Convert RTP header into HEVC NAL Unit header accoding to RFC7798 Section 1.1.4. + // RTP byte 0: ignored. + // RTP byte 1: repurposed as HEVC HALU byte 0, copy NALU type. + // RTP Byte 2: repurposed as HEVC HALU byte 1, layerId required to be zero, copying only tid. + // Set data position from byte 1 as byte 0 is ignored. + data.getData()[1] = (byte) ((nalUnitType << 1) & 0x7F); + data.getData()[2] = (byte) tid; + fuScratchBuffer.reset(data.getData()); + fuScratchBuffer.setPosition(1); + } else { + // Check that this packet is in the sequence of the previous packet. + int expectedSequenceNumber = (previousSequenceNumber + 1) % RtpPacket.MAX_SEQUENCE_NUMBER; + if (packetSequenceNumber != expectedSequenceNumber) { + Log.w( + TAG, + Util.formatInvariant( + "Received RTP packet with unexpected sequence number. Expected: %d; received: %d." + + " Dropping packet.", + expectedSequenceNumber, packetSequenceNumber)); + return; + } + + // Setting position to ignore payload and FU header. + fuScratchBuffer.reset(data.getData()); + fuScratchBuffer.setPosition(FU_PAYLOAD_OFFSET); + } + + int fragmentSize = fuScratchBuffer.bytesLeft(); + trackOutput.sampleData(fuScratchBuffer, fragmentSize); + fragmentedSampleSizeBytes += fragmentSize; + + if (isLastFuPacket) { + bufferFlags = getBufferFlagsFromNalType(nalUnitType); + } + } + + private int writeStartCode() { + nalStartCodeArray.setPosition(/* position= */ 0); + int bytesWritten = nalStartCodeArray.bytesLeft(); + checkNotNull(trackOutput).sampleData(nalStartCodeArray, bytesWritten); + return bytesWritten; + } + + private static @C.BufferFlags int getBufferFlagsFromNalType(int nalType) { + return (nalType == NAL_IDR_W_RADL || nalType == NAL_IDR_N_LP) ? C.BUFFER_FLAG_KEY_FRAME : 0; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpMp4aReader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpMp4aReader.java new file mode 100644 index 0000000000..d59e506275 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpMp4aReader.java @@ -0,0 +1,180 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.rtsp.reader; + +import static com.google.android.exoplayer2.source.rtsp.reader.RtpReaderUtils.toSampleTimeUs; +import static com.google.android.exoplayer2.util.Assertions.checkArgument; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Assertions.checkState; +import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; +import static com.google.android.exoplayer2.util.Util.castNonNull; + +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.ParserException; +import com.google.android.exoplayer2.extractor.ExtractorOutput; +import com.google.android.exoplayer2.extractor.TrackOutput; +import com.google.android.exoplayer2.source.rtsp.RtpPacket; +import com.google.android.exoplayer2.source.rtsp.RtpPayloadFormat; +import com.google.android.exoplayer2.util.ParsableBitArray; +import com.google.android.exoplayer2.util.ParsableByteArray; +import com.google.android.exoplayer2.util.Util; +import com.google.common.collect.ImmutableMap; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; + +/** + * Parses an MP4A-LATM byte stream carried on RTP packets, and extracts MP4A-LATM Access Units. + * + *

        Refer to RFC3016 for more details. The LATM byte stream format is defined in ISO/IEC14496-3. + */ +/* package */ final class RtpMp4aReader implements RtpPayloadReader { + private static final String TAG = "RtpMp4aReader"; + + private static final String PARAMETER_MP4A_CONFIG = "config"; + + private final RtpPayloadFormat payloadFormat; + private final int numberOfSubframes; + private @MonotonicNonNull TrackOutput trackOutput; + private long firstReceivedTimestamp; + private int previousSequenceNumber; + /** The combined size of a sample that is fragmented into multiple subFrames. */ + private int fragmentedSampleSizeBytes; + + private long startTimeOffsetUs; + private long fragmentedSampleTimeUs; + + /** + * Creates an instance. + * + * @throws IllegalArgumentException If {@link RtpPayloadFormat payloadFormat} is malformed. + */ + public RtpMp4aReader(RtpPayloadFormat payloadFormat) { + this.payloadFormat = payloadFormat; + try { + numberOfSubframes = getNumOfSubframesFromMpeg4AudioConfig(payloadFormat.fmtpParameters); + } catch (ParserException e) { + throw new IllegalArgumentException(e); + } + firstReceivedTimestamp = C.TIME_UNSET; + previousSequenceNumber = C.INDEX_UNSET; + fragmentedSampleSizeBytes = 0; + // The start time offset must be 0 until the first seek. + startTimeOffsetUs = 0; + fragmentedSampleTimeUs = C.TIME_UNSET; + } + + @Override + public void createTracks(ExtractorOutput extractorOutput, int trackId) { + trackOutput = extractorOutput.track(trackId, C.TRACK_TYPE_VIDEO); + castNonNull(trackOutput).format(payloadFormat.format); + } + + @Override + public void onReceivingFirstPacket(long timestamp, int sequenceNumber) { + checkState(firstReceivedTimestamp == C.TIME_UNSET); + firstReceivedTimestamp = timestamp; + } + + @Override + public void consume( + ParsableByteArray data, long timestamp, int sequenceNumber, boolean rtpMarker) { + checkStateNotNull(trackOutput); + + int expectedSequenceNumber = RtpPacket.getNextSequenceNumber(previousSequenceNumber); + if (fragmentedSampleSizeBytes > 0 && expectedSequenceNumber < sequenceNumber) { + outputSampleMetadataForFragmentedPackets(); + } + + for (int subFrameIndex = 0; subFrameIndex < numberOfSubframes; subFrameIndex++) { + int sampleLength = 0; + // Implements PayloadLengthInfo() in ISO/IEC14496-3 Chapter 1.7.3.1, it only supports one + // program and one layer. Each subframe starts with a variable length encoding. + while (data.getPosition() < data.limit()) { + int payloadMuxLength = data.readUnsignedByte(); + sampleLength += payloadMuxLength; + if (payloadMuxLength != 0xff) { + break; + } + } + + trackOutput.sampleData(data, sampleLength); + fragmentedSampleSizeBytes += sampleLength; + } + fragmentedSampleTimeUs = + toSampleTimeUs( + startTimeOffsetUs, timestamp, firstReceivedTimestamp, payloadFormat.clockRate); + if (rtpMarker) { + outputSampleMetadataForFragmentedPackets(); + } + previousSequenceNumber = sequenceNumber; + } + + @Override + public void seek(long nextRtpTimestamp, long timeUs) { + firstReceivedTimestamp = nextRtpTimestamp; + fragmentedSampleSizeBytes = 0; + startTimeOffsetUs = timeUs; + } + + // Internal methods. + + /** + * Parses an MPEG-4 Audio Stream Mux configuration, as defined in ISO/IEC14496-3. + * + *

        FMTP attribute {@code config} contains the MPEG-4 Audio Stream Mux configuration. + * + * @param fmtpAttributes The format parameters, mapped from the SDP FMTP attribute. + * @return The number of subframes that is carried in each RTP packet. + */ + private static int getNumOfSubframesFromMpeg4AudioConfig( + ImmutableMap fmtpAttributes) throws ParserException { + @Nullable String configInput = fmtpAttributes.get(PARAMETER_MP4A_CONFIG); + int numberOfSubframes = 0; + if (configInput != null && configInput.length() % 2 == 0) { + byte[] configBuffer = Util.getBytesFromHexString(configInput); + ParsableBitArray scratchBits = new ParsableBitArray(configBuffer); + int audioMuxVersion = scratchBits.readBits(1); + if (audioMuxVersion == 0) { + checkArgument(scratchBits.readBits(1) == 1, "Only supports allStreamsSameTimeFraming."); + numberOfSubframes = scratchBits.readBits(6); + checkArgument(scratchBits.readBits(4) == 0, "Only suppors one program."); + checkArgument(scratchBits.readBits(3) == 0, "Only suppors one layer."); + } else { + throw ParserException.createForMalformedDataOfUnknownType( + "unsupported audio mux version: " + audioMuxVersion, null); + } + } + // ISO/IEC14496-3 Chapter 1.7.3.2.3: The minimum value is 0 indicating 1 subframe. + return numberOfSubframes + 1; + } + + /** + * Outputs sample metadata. + * + *

        Call this method only after receiving the end of an MPEG4 partition. + */ + private void outputSampleMetadataForFragmentedPackets() { + checkNotNull(trackOutput) + .sampleMetadata( + fragmentedSampleTimeUs, + C.BUFFER_FLAG_KEY_FRAME, + fragmentedSampleSizeBytes, + /* offset= */ 0, + /* cryptoData= */ null); + fragmentedSampleSizeBytes = 0; + fragmentedSampleTimeUs = C.TIME_UNSET; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpMpeg4Reader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpMpeg4Reader.java new file mode 100644 index 0000000000..b00d0e8aae --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpMpeg4Reader.java @@ -0,0 +1,141 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.rtsp.reader; + +import static com.google.android.exoplayer2.source.rtsp.reader.RtpReaderUtils.toSampleTimeUs; +import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; +import static com.google.android.exoplayer2.util.Util.castNonNull; + +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.extractor.ExtractorOutput; +import com.google.android.exoplayer2.extractor.TrackOutput; +import com.google.android.exoplayer2.source.rtsp.RtpPacket; +import com.google.android.exoplayer2.source.rtsp.RtpPayloadFormat; +import com.google.android.exoplayer2.util.Log; +import com.google.android.exoplayer2.util.ParsableByteArray; +import com.google.android.exoplayer2.util.Util; +import com.google.common.primitives.Bytes; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; + +/** + * Parses an MPEG4 byte stream carried on RTP packets, and extracts MPEG4 Access Units. Refer to + * RFC6416 for more details. + */ +/* package */ final class RtpMpeg4Reader implements RtpPayloadReader { + private static final String TAG = "RtpMpeg4Reader"; + + private static final int MEDIA_CLOCK_FREQUENCY = 90_000; + + /** VOP (Video Object Plane) unit type. */ + private static final int I_VOP = 0; + + private final RtpPayloadFormat payloadFormat; + private @MonotonicNonNull TrackOutput trackOutput; + private @C.BufferFlags int bufferFlags; + + /** + * First received RTP timestamp. All RTP timestamps are dimension-less, the time base is defined + * by {@link #MEDIA_CLOCK_FREQUENCY}. + */ + private long firstReceivedTimestamp; + + private int previousSequenceNumber; + private long startTimeOffsetUs; + private int sampleLength; + + /** Creates an instance. */ + public RtpMpeg4Reader(RtpPayloadFormat payloadFormat) { + this.payloadFormat = payloadFormat; + firstReceivedTimestamp = C.TIME_UNSET; + previousSequenceNumber = C.INDEX_UNSET; + } + + @Override + public void createTracks(ExtractorOutput extractorOutput, int trackId) { + trackOutput = extractorOutput.track(trackId, C.TRACK_TYPE_VIDEO); + castNonNull(trackOutput).format(payloadFormat.format); + } + + @Override + public void onReceivingFirstPacket(long timestamp, int sequenceNumber) {} + + @Override + public void consume( + ParsableByteArray data, long timestamp, int sequenceNumber, boolean rtpMarker) { + checkStateNotNull(trackOutput); + // Check that this packet is in the sequence of the previous packet. + if (previousSequenceNumber != C.INDEX_UNSET) { + int expectedSequenceNumber = RtpPacket.getNextSequenceNumber(previousSequenceNumber); + if (sequenceNumber != expectedSequenceNumber) { + Log.w( + TAG, + Util.formatInvariant( + "Received RTP packet with unexpected sequence number. Expected: %d; received: %d." + + " Dropping packet.", + expectedSequenceNumber, sequenceNumber)); + } + } + + // Parse VOP Type and get the buffer flags + int limit = data.bytesLeft(); + trackOutput.sampleData(data, limit); + if (sampleLength == 0) { + bufferFlags = getBufferFlagsFromVop(data); + } + sampleLength += limit; + + // RTP marker indicates the last packet carrying a VOP. + if (rtpMarker) { + if (firstReceivedTimestamp == C.TIME_UNSET) { + firstReceivedTimestamp = timestamp; + } + + long timeUs = + toSampleTimeUs( + startTimeOffsetUs, timestamp, firstReceivedTimestamp, MEDIA_CLOCK_FREQUENCY); + trackOutput.sampleMetadata(timeUs, bufferFlags, sampleLength, 0, null); + sampleLength = 0; + } + previousSequenceNumber = sequenceNumber; + } + + @Override + public void seek(long nextRtpTimestamp, long timeUs) { + firstReceivedTimestamp = nextRtpTimestamp; + startTimeOffsetUs = timeUs; + sampleLength = 0; + } + + // Internal methods. + + /** + * Returns VOP (Video Object Plane) Coding type. + * + *

        Sets {@link #bufferFlags} according to the VOP Coding type. + */ + private static @C.BufferFlags int getBufferFlagsFromVop(ParsableByteArray data) { + // search for VOP_START_CODE (00 00 01 B6) + byte[] inputData = data.getData(); + byte[] startCode = new byte[] {0x0, 0x0, 0x1, (byte) 0xB6}; + int vopStartCodePos = Bytes.indexOf(inputData, startCode); + if (vopStartCodePos != -1) { + data.setPosition(vopStartCodePos + 4); + int vopType = data.peekUnsignedByte() >> 6; + return vopType == I_VOP ? C.BUFFER_FLAG_KEY_FRAME : 0; + } + return 0; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpOpusReader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpOpusReader.java new file mode 100644 index 0000000000..af1da2cb77 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpOpusReader.java @@ -0,0 +1,150 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.rtsp.reader; + +import static com.google.android.exoplayer2.source.rtsp.reader.RtpReaderUtils.toSampleTimeUs; +import static com.google.android.exoplayer2.util.Assertions.checkArgument; +import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; + +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.audio.OpusUtil; +import com.google.android.exoplayer2.extractor.ExtractorOutput; +import com.google.android.exoplayer2.extractor.TrackOutput; +import com.google.android.exoplayer2.source.rtsp.RtpPacket; +import com.google.android.exoplayer2.source.rtsp.RtpPayloadFormat; +import com.google.android.exoplayer2.util.Log; +import com.google.android.exoplayer2.util.ParsableByteArray; +import com.google.android.exoplayer2.util.Util; +import java.util.List; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; + +/** + * Parses an OPUS byte stream carried on RTP packets and extracts individual samples. Refer to + * RFC7845 for more details. + */ +/* package */ final class RtpOpusReader implements RtpPayloadReader { + private static final String TAG = "RtpOpusReader"; + /* Opus uses a fixed 48KHz media clock RFC7845 Section 4. */ + private static final int MEDIA_CLOCK_FREQUENCY = 48_000; + + private final RtpPayloadFormat payloadFormat; + + private @MonotonicNonNull TrackOutput trackOutput; + + /** + * First received RTP timestamp. All RTP timestamps are dimension-less, the time base is defined + * by {@link #MEDIA_CLOCK_FREQUENCY}. + */ + private long firstReceivedTimestamp; + + private long startTimeOffsetUs; + private int previousSequenceNumber; + private boolean foundOpusIDHeader; + private boolean foundOpusCommentHeader; + + /** Creates an instance. */ + public RtpOpusReader(RtpPayloadFormat payloadFormat) { + this.payloadFormat = payloadFormat; + this.firstReceivedTimestamp = C.INDEX_UNSET; + this.previousSequenceNumber = C.INDEX_UNSET; + } + + // RtpPayloadReader implementation. + + @Override + public void createTracks(ExtractorOutput extractorOutput, int trackId) { + trackOutput = extractorOutput.track(trackId, C.TRACK_TYPE_AUDIO); + trackOutput.format(payloadFormat.format); + } + + @Override + public void onReceivingFirstPacket(long timestamp, int sequenceNumber) { + this.firstReceivedTimestamp = timestamp; + } + + @Override + public void consume( + ParsableByteArray data, long timestamp, int sequenceNumber, boolean rtpMarker) { + checkStateNotNull(trackOutput); + + /* RFC7845 Section 3. + * +---------+ +----------------+ +--------------------+ +----- + * |ID Header| | Comment Header | |Audio Data Packet 1 | | ... + * +---------+ +----------------+ +--------------------+ +----- + */ + if (!foundOpusIDHeader) { + validateOpusIdHeader(data); + List initializationData = OpusUtil.buildInitializationData(data.getData()); + Format.Builder formatBuilder = payloadFormat.format.buildUpon(); + formatBuilder.setInitializationData(initializationData); + trackOutput.format(formatBuilder.build()); + foundOpusIDHeader = true; + } else if (!foundOpusCommentHeader) { + // Comment Header RFC7845 Section 5.2. + int sampleSize = data.limit(); + checkArgument(sampleSize >= 8, "Comment Header has insufficient data"); + String header = data.readString(8); + checkArgument(header.equals("OpusTags"), "Comment Header should follow ID Header"); + foundOpusCommentHeader = true; + } else { + // Check that this packet is in the sequence of the previous packet. + int expectedSequenceNumber = RtpPacket.getNextSequenceNumber(previousSequenceNumber); + if (sequenceNumber != expectedSequenceNumber) { + Log.w( + TAG, + Util.formatInvariant( + "Received RTP packet with unexpected sequence number. Expected: %d; received: %d.", + expectedSequenceNumber, sequenceNumber)); + } + + // sending opus data. + int size = data.bytesLeft(); + trackOutput.sampleData(data, size); + long timeUs = + toSampleTimeUs( + startTimeOffsetUs, timestamp, firstReceivedTimestamp, MEDIA_CLOCK_FREQUENCY); + trackOutput.sampleMetadata( + timeUs, C.BUFFER_FLAG_KEY_FRAME, size, /* offset*/ 0, /* cryptoData*/ null); + } + previousSequenceNumber = sequenceNumber; + } + + @Override + public void seek(long nextRtpTimestamp, long timeUs) { + firstReceivedTimestamp = nextRtpTimestamp; + startTimeOffsetUs = timeUs; + } + + // Internal methods. + + /** + * Validates the OPUS ID Header at {@code data}'s current position, throws {@link + * IllegalArgumentException} if the header is invalid. + * + *

        {@code data}'s position does not change after returning. + */ + private static void validateOpusIdHeader(ParsableByteArray data) { + int currPosition = data.getPosition(); + int sampleSize = data.limit(); + checkArgument(sampleSize > 18, "ID Header has insufficient data"); + String header = data.readString(8); + // Identification header RFC7845 Section 5.1. + checkArgument(header.equals("OpusHead"), "ID Header missing"); + checkArgument(data.readUnsignedByte() == 1, "version number must always be 1"); + data.setPosition(currPosition); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpPayloadReader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpPayloadReader.java new file mode 100644 index 0000000000..fef22be090 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpPayloadReader.java @@ -0,0 +1,84 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.android.exoplayer2.source.rtsp.reader; + +import com.google.android.exoplayer2.ParserException; +import com.google.android.exoplayer2.extractor.ExtractorOutput; +import com.google.android.exoplayer2.source.rtsp.RtpPayloadFormat; +import com.google.android.exoplayer2.util.ParsableByteArray; +import org.checkerframework.checker.nullness.qual.Nullable; + +/** Extracts media samples from the payload of received RTP packets. */ +/* package */ public interface RtpPayloadReader { + + /** Factory of {@link RtpPayloadReader} instances. */ + interface Factory { + + /** + * Returns a {@link RtpPayloadReader} for a given {@link RtpPayloadFormat}. + * + * @param payloadFormat The {@link RtpPayloadFormat} of the RTP stream. + * @return A {@link RtpPayloadReader} for the packet stream, or {@code null} if the stream + * format is not supported. + */ + @Nullable RtpPayloadReader createPayloadReader(RtpPayloadFormat payloadFormat); + } + + /** + * Initializes the reader by providing its output and track id. + * + * @param extractorOutput The {@link ExtractorOutput} instance that receives the extracted data. + * @param trackId The track identifier to set on the format. + */ + void createTracks(ExtractorOutput extractorOutput, int trackId); + + /** + * This method should be called on reading the first packet in a stream of incoming packets. + * + * @param timestamp The timestamp associated with the first received RTP packet. This number has + * no unit, the duration conveyed by it depends on the frequency of the media that the RTP + * packet is carrying. + * @param sequenceNumber The sequence associated with the first received RTP packet. + */ + void onReceivingFirstPacket(long timestamp, int sequenceNumber); + + /** + * Consumes the payload from the an RTP packet. + * + * @param data The RTP payload to consume. + * @param timestamp The timestamp of the RTP packet that transmitted the data. This number has no + * unit, the duration conveyed by it depends on the frequency of the media that the RTP packet + * is carrying. + * @param sequenceNumber The sequence number of the RTP packet. + * @param rtpMarker The marker bit of the RTP packet. The interpretation of this bit is specific + * to each payload format. + * @throws ParserException If the data could not be parsed. + */ + void consume(ParsableByteArray data, long timestamp, int sequenceNumber, boolean rtpMarker) + throws ParserException; + + /** + * Seeks the reader. + * + *

        This method must only be invoked after the PLAY request for seeking is acknowledged by the + * RTSP server. + * + * @param nextRtpTimestamp The timestamp of the first packet to arrive after seek. + * @param timeUs The server acknowledged seek time in microseconds. + */ + void seek(long nextRtpTimestamp, long timeUs); +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpPcmReader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpPcmReader.java new file mode 100644 index 0000000000..5887fd6bfd --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpPcmReader.java @@ -0,0 +1,96 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.rtsp.reader; + +import static com.google.android.exoplayer2.source.rtsp.reader.RtpReaderUtils.toSampleTimeUs; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; + +import android.util.Log; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.extractor.ExtractorOutput; +import com.google.android.exoplayer2.extractor.TrackOutput; +import com.google.android.exoplayer2.source.rtsp.RtpPacket; +import com.google.android.exoplayer2.source.rtsp.RtpPayloadFormat; +import com.google.android.exoplayer2.util.ParsableByteArray; +import com.google.android.exoplayer2.util.Util; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; + +/** + * Parses byte stream carried on RTP packets, and extracts PCM frames. Refer to RFC3551 for more + * details. + */ +/* package */ public final class RtpPcmReader implements RtpPayloadReader { + + private static final String TAG = "RtpPcmReader"; + private final RtpPayloadFormat payloadFormat; + + private @MonotonicNonNull TrackOutput trackOutput; + private long firstReceivedTimestamp; + private long startTimeOffsetUs; + private int previousSequenceNumber; + + public RtpPcmReader(RtpPayloadFormat payloadFormat) { + this.payloadFormat = payloadFormat; + firstReceivedTimestamp = C.TIME_UNSET; + // Start time offset must be 0 before the first seek. + startTimeOffsetUs = 0; + previousSequenceNumber = C.INDEX_UNSET; + } + + @Override + public void createTracks(ExtractorOutput extractorOutput, int trackId) { + trackOutput = extractorOutput.track(trackId, C.TRACK_TYPE_AUDIO); + trackOutput.format(payloadFormat.format); + } + + @Override + public void onReceivingFirstPacket(long timestamp, int sequenceNumber) { + firstReceivedTimestamp = timestamp; + } + + @Override + public void consume( + ParsableByteArray data, long timestamp, int sequenceNumber, boolean rtpMarker) { + checkNotNull(trackOutput); + if (previousSequenceNumber != C.INDEX_UNSET) { + int expectedSequenceNumber = RtpPacket.getNextSequenceNumber(previousSequenceNumber); + if (sequenceNumber != expectedSequenceNumber) { + Log.w( + TAG, + Util.formatInvariant( + "Received RTP packet with unexpected sequence number. Expected: %d; received: %d.", + expectedSequenceNumber, sequenceNumber)); + } + } + + long sampleTimeUs = + toSampleTimeUs( + startTimeOffsetUs, timestamp, firstReceivedTimestamp, payloadFormat.clockRate); + int size = data.bytesLeft(); + trackOutput.sampleData(data, size); + trackOutput.sampleMetadata( + sampleTimeUs, C.BUFFER_FLAG_KEY_FRAME, size, /* offset= */ 0, /* cryptoData= */ null); + + previousSequenceNumber = sequenceNumber; + } + + @Override + public void seek(long nextRtpTimestamp, long timeUs) { + // TODO(b/198620566) Rename firstReceivedTimestamp to timestampBase for all RtpPayloadReaders. + firstReceivedTimestamp = nextRtpTimestamp; + startTimeOffsetUs = timeUs; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpReaderUtils.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpReaderUtils.java new file mode 100644 index 0000000000..48aa4100d7 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpReaderUtils.java @@ -0,0 +1,46 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.rtsp.reader; + +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.util.Util; + +/** Utility methods for {@link RtpPayloadReader}s. */ +/* package */ class RtpReaderUtils { + + /** + * Converts RTP timestamp and media frequency to sample presentation time, in microseconds + * + * @param startTimeOffsetUs The offset of the RTP timebase, in microseconds. + * @param rtpTimestamp The RTP timestamp to convert. + * @param firstReceivedRtpTimestamp The first received RTP timestamp. + * @param mediaFrequency The media frequency. + * @return The calculated sample presentation time, in microseconds. + */ + public static long toSampleTimeUs( + long startTimeOffsetUs, + long rtpTimestamp, + long firstReceivedRtpTimestamp, + int mediaFrequency) { + return startTimeOffsetUs + + Util.scaleLargeTimestamp( + rtpTimestamp - firstReceivedRtpTimestamp, + /* multiplier= */ C.MICROS_PER_SECOND, + /* divisor= */ mediaFrequency); + } + + private RtpReaderUtils() {} +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpVp8Reader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpVp8Reader.java new file mode 100644 index 0000000000..bb2ba750cb --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpVp8Reader.java @@ -0,0 +1,221 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.rtsp.reader; + +import static com.google.android.exoplayer2.source.rtsp.reader.RtpReaderUtils.toSampleTimeUs; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Assertions.checkState; +import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; + +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.extractor.ExtractorOutput; +import com.google.android.exoplayer2.extractor.TrackOutput; +import com.google.android.exoplayer2.source.rtsp.RtpPacket; +import com.google.android.exoplayer2.source.rtsp.RtpPayloadFormat; +import com.google.android.exoplayer2.util.Log; +import com.google.android.exoplayer2.util.ParsableByteArray; +import com.google.android.exoplayer2.util.Util; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; + +/** + * Parses a VP8 byte stream carried on RTP packets, and extracts VP8 individual video frames as + * defined in RFC7741. + */ +/* package */ final class RtpVp8Reader implements RtpPayloadReader { + private static final String TAG = "RtpVP8Reader"; + + /** VP8 uses a 90 KHz media clock (RFC7741 Section 4.1). */ + private static final int MEDIA_CLOCK_FREQUENCY = 90_000; + + private final RtpPayloadFormat payloadFormat; + + private @MonotonicNonNull TrackOutput trackOutput; + + /** + * First received RTP timestamp. All RTP timestamps are dimension-less, the time base is defined + * by {@link #MEDIA_CLOCK_FREQUENCY}. + */ + private long firstReceivedTimestamp; + + private int previousSequenceNumber; + /** The combined size of a sample that is fragmented into multiple RTP packets. */ + private int fragmentedSampleSizeBytes; + + private long fragmentedSampleTimeUs; + + private long startTimeOffsetUs; + /** + * Whether the first packet of one VP8 frame is received. A VP8 frame can be split into two RTP + * packets. + */ + private boolean gotFirstPacketOfVp8Frame; + + private boolean isKeyFrame; + private boolean isOutputFormatSet; + + /** Creates an instance. */ + public RtpVp8Reader(RtpPayloadFormat payloadFormat) { + this.payloadFormat = payloadFormat; + firstReceivedTimestamp = C.TIME_UNSET; + previousSequenceNumber = C.INDEX_UNSET; + fragmentedSampleSizeBytes = C.LENGTH_UNSET; + fragmentedSampleTimeUs = C.TIME_UNSET; + // The start time offset must be 0 until the first seek. + startTimeOffsetUs = 0; + } + + @Override + public void createTracks(ExtractorOutput extractorOutput, int trackId) { + trackOutput = extractorOutput.track(trackId, C.TRACK_TYPE_VIDEO); + trackOutput.format(payloadFormat.format); + } + + @Override + public void onReceivingFirstPacket(long timestamp, int sequenceNumber) { + checkState(firstReceivedTimestamp == C.TIME_UNSET); + firstReceivedTimestamp = timestamp; + } + + @Override + public void consume( + ParsableByteArray data, long timestamp, int sequenceNumber, boolean rtpMarker) { + checkStateNotNull(trackOutput); + + boolean isValidVP8Descriptor = validateVp8Descriptor(data, sequenceNumber); + if (isValidVP8Descriptor) { + // VP8 Payload Header is defined in RFC7741 Section 4.3. + if (fragmentedSampleSizeBytes == C.LENGTH_UNSET && gotFirstPacketOfVp8Frame) { + isKeyFrame = (data.peekUnsignedByte() & 0x01) == 0; + } + if (!isOutputFormatSet) { + // Parsing frame data to get width and height, RFC6386 Section 19.1. + int currPosition = data.getPosition(); + // Skips the frame_tag and start_code. + data.setPosition(currPosition + 6); + // RFC6386 Section 19.1 specifically uses little endian. + int width = data.readLittleEndianUnsignedShort() & 0x3fff; + int height = data.readLittleEndianUnsignedShort() & 0x3fff; + data.setPosition(currPosition); + + if (width != payloadFormat.format.width || height != payloadFormat.format.height) { + trackOutput.format( + payloadFormat.format.buildUpon().setWidth(width).setHeight(height).build()); + } + isOutputFormatSet = true; + } + + int fragmentSize = data.bytesLeft(); + trackOutput.sampleData(data, fragmentSize); + if (fragmentedSampleSizeBytes == C.LENGTH_UNSET) { + fragmentedSampleSizeBytes = fragmentSize; + } else { + fragmentedSampleSizeBytes += fragmentSize; + } + + fragmentedSampleTimeUs = + toSampleTimeUs( + startTimeOffsetUs, timestamp, firstReceivedTimestamp, MEDIA_CLOCK_FREQUENCY); + + if (rtpMarker) { + outputSampleMetadataForFragmentedPackets(); + } + previousSequenceNumber = sequenceNumber; + } + } + + @Override + public void seek(long nextRtpTimestamp, long timeUs) { + firstReceivedTimestamp = nextRtpTimestamp; + fragmentedSampleSizeBytes = C.LENGTH_UNSET; + startTimeOffsetUs = timeUs; + } + + /** + * Returns {@code true} and sets the {@link ParsableByteArray#getPosition() payload.position} to + * the end of the descriptor, if a valid VP8 descriptor is present. + */ + private boolean validateVp8Descriptor(ParsableByteArray payload, int packetSequenceNumber) { + // VP8 Payload Descriptor is defined in RFC7741 Section 4.2. + int header = payload.readUnsignedByte(); + // TODO(b/198620566) Consider using ParsableBitArray. + // For start of VP8 partition S=1 and PID=0 as per RFC7741 Section 4.2. + if ((header & 0x10) == 0x10 && (header & 0x07) == 0) { + if (gotFirstPacketOfVp8Frame && fragmentedSampleSizeBytes > 0) { + // Received new VP8 fragment, output data of previous fragment to decoder. + outputSampleMetadataForFragmentedPackets(); + } + gotFirstPacketOfVp8Frame = true; + } else if (gotFirstPacketOfVp8Frame) { + // Check that this packet is in the sequence of the previous packet. + int expectedSequenceNumber = RtpPacket.getNextSequenceNumber(previousSequenceNumber); + if (packetSequenceNumber < expectedSequenceNumber) { + Log.w( + TAG, + Util.formatInvariant( + "Received RTP packet with unexpected sequence number. Expected: %d; received: %d." + + " Dropping packet.", + expectedSequenceNumber, packetSequenceNumber)); + return false; + } + } else { + Log.w(TAG, "RTP packet is not the start of a new VP8 partition, skipping."); + return false; + } + + // Check if optional X header is present. + if ((header & 0x80) != 0) { + int xHeader = payload.readUnsignedByte(); + + // Check if optional I header is present. + if ((xHeader & 0x80) != 0) { + int iHeader = payload.readUnsignedByte(); + // Check if I header's M bit is present. + if ((iHeader & 0x80) != 0) { + payload.skipBytes(1); + } + } + + // Check if optional L header is present. + if ((xHeader & 0x40) != 0) { + payload.skipBytes(1); + } + + // Check if optional T or K header(s) is present. + if ((xHeader & 0x20) != 0 || (xHeader & 0x10) != 0) { + payload.skipBytes(1); + } + } + return true; + } + + /** + * Outputs sample metadata of the received fragmented packets. + * + *

        Call this method only after receiving an end of a VP8 partition. + */ + private void outputSampleMetadataForFragmentedPackets() { + checkNotNull(trackOutput) + .sampleMetadata( + fragmentedSampleTimeUs, + isKeyFrame ? C.BUFFER_FLAG_KEY_FRAME : 0, + fragmentedSampleSizeBytes, + /* offset= */ 0, + /* cryptoData= */ null); + fragmentedSampleSizeBytes = C.LENGTH_UNSET; + fragmentedSampleTimeUs = C.TIME_UNSET; + gotFirstPacketOfVp8Frame = false; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpVp9Reader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpVp9Reader.java new file mode 100644 index 0000000000..c0c4f31007 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/RtpVp9Reader.java @@ -0,0 +1,277 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.source.rtsp.reader; + +import static com.google.android.exoplayer2.source.rtsp.reader.RtpReaderUtils.toSampleTimeUs; +import static com.google.android.exoplayer2.util.Assertions.checkArgument; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Assertions.checkState; +import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; + +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.extractor.ExtractorOutput; +import com.google.android.exoplayer2.extractor.TrackOutput; +import com.google.android.exoplayer2.source.rtsp.RtpPacket; +import com.google.android.exoplayer2.source.rtsp.RtpPayloadFormat; +import com.google.android.exoplayer2.util.Log; +import com.google.android.exoplayer2.util.ParsableByteArray; +import com.google.android.exoplayer2.util.Util; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; + +/** + * Parses a VP9 byte stream carried on RTP packets, and extracts VP9 Access Units. Refer to this draft RFC for more + * details. + */ +/* package */ final class RtpVp9Reader implements RtpPayloadReader { + + private static final String TAG = "RtpVp9Reader"; + + private static final int MEDIA_CLOCK_FREQUENCY = 90_000; + private static final int SCALABILITY_STRUCTURE_SIZE = 4; + + private final RtpPayloadFormat payloadFormat; + + private @MonotonicNonNull TrackOutput trackOutput; + + /** + * First received RTP timestamp. All RTP timestamps are dimension-less, the time base is defined + * by {@link #MEDIA_CLOCK_FREQUENCY}. + */ + private long firstReceivedTimestamp; + + private long startTimeOffsetUs; + private int previousSequenceNumber; + /** The combined size of a sample that is fragmented into multiple RTP packets. */ + private int fragmentedSampleSizeBytes; + + private long fragmentedSampleTimeUs; + + private int width; + private int height; + /** + * Whether the first packet of a VP9 frame is received, it mark the start of a VP9 partition. A + * VP9 frame can be split into multiple RTP packets. + */ + private boolean gotFirstPacketOfVp9Frame; + + private boolean reportedOutputFormat; + private boolean isKeyFrame; + + /** Creates an instance. */ + public RtpVp9Reader(RtpPayloadFormat payloadFormat) { + this.payloadFormat = payloadFormat; + firstReceivedTimestamp = C.TIME_UNSET; + fragmentedSampleSizeBytes = C.LENGTH_UNSET; + fragmentedSampleTimeUs = C.TIME_UNSET; + // The start time offset must be 0 until the first seek. + startTimeOffsetUs = 0; + previousSequenceNumber = C.INDEX_UNSET; + width = C.LENGTH_UNSET; + height = C.LENGTH_UNSET; + } + + @Override + public void createTracks(ExtractorOutput extractorOutput, int trackId) { + trackOutput = extractorOutput.track(trackId, C.TRACK_TYPE_VIDEO); + trackOutput.format(payloadFormat.format); + } + + @Override + public void onReceivingFirstPacket(long timestamp, int sequenceNumber) { + checkState(firstReceivedTimestamp == C.TIME_UNSET); + firstReceivedTimestamp = timestamp; + } + + @Override + public void consume( + ParsableByteArray data, long timestamp, int sequenceNumber, boolean rtpMarker) { + checkStateNotNull(trackOutput); + + if (validateVp9Descriptor(data, sequenceNumber)) { + if (fragmentedSampleSizeBytes == C.LENGTH_UNSET && gotFirstPacketOfVp9Frame) { + // Parsing the frame_type in VP9 uncompressed header, 0 - key frame, 1 - inter frame. + // Refer to VP9 Bitstream superframe and uncompressed header, Section 4.1. + isKeyFrame = (data.peekUnsignedByte() & 0x04) == 0; + } + + if (!reportedOutputFormat && width != C.LENGTH_UNSET && height != C.LENGTH_UNSET) { + if (width != payloadFormat.format.width || height != payloadFormat.format.height) { + trackOutput.format( + payloadFormat.format.buildUpon().setWidth(width).setHeight(height).build()); + } + reportedOutputFormat = true; + } + + int currentFragmentSizeBytes = data.bytesLeft(); + // Write the video sample. + trackOutput.sampleData(data, currentFragmentSizeBytes); + if (fragmentedSampleSizeBytes == C.LENGTH_UNSET) { + fragmentedSampleSizeBytes = currentFragmentSizeBytes; + } else { + fragmentedSampleSizeBytes += currentFragmentSizeBytes; + } + fragmentedSampleTimeUs = + toSampleTimeUs( + startTimeOffsetUs, timestamp, firstReceivedTimestamp, MEDIA_CLOCK_FREQUENCY); + + if (rtpMarker) { + outputSampleMetadataForFragmentedPackets(); + } + previousSequenceNumber = sequenceNumber; + } + } + + @Override + public void seek(long nextRtpTimestamp, long timeUs) { + firstReceivedTimestamp = nextRtpTimestamp; + fragmentedSampleSizeBytes = C.LENGTH_UNSET; + startTimeOffsetUs = timeUs; + } + + // Internal methods. + /** + * Returns {@code true} and sets the {@link ParsableByteArray#getPosition() payload.position} to + * the end of the descriptor, if a valid VP9 descriptor is present. + */ + private boolean validateVp9Descriptor(ParsableByteArray payload, int packetSequenceNumber) { + // VP9 Payload Descriptor, Section 4.2 + // 0 1 2 3 4 5 6 7 + // +-+-+-+-+-+-+-+-+ + // |I|P|L|F|B|E|V|Z| (REQUIRED) + // +-+-+-+-+-+-+-+-+ + // I: |M| PICTURE ID | (RECOMMENDED) + // +-+-+-+-+-+-+-+-+ + // M: | EXTENDED PID | (RECOMMENDED) + // +-+-+-+-+-+-+-+-+ + // L: | TID |U| SID |D| (Conditionally RECOMMENDED) + // +-+-+-+-+-+-+-+-+ + // | TL0PICIDX | (Conditionally REQUIRED) + // +-+-+-+-+-+-+-+-+ + // V: | SS | + // | .. | + // +-+-+-+-+-+-+-+-+ + + int header = payload.readUnsignedByte(); + if ((header & 0x08) == 0x08) { + if (gotFirstPacketOfVp9Frame && fragmentedSampleSizeBytes > 0) { + // Received new VP9 fragment, output data of previous fragment to decoder. + outputSampleMetadataForFragmentedPackets(); + } + gotFirstPacketOfVp9Frame = true; + } else if (gotFirstPacketOfVp9Frame) { + // Check that this packet is in the sequence of the previous packet. + int expectedSequenceNumber = RtpPacket.getNextSequenceNumber(previousSequenceNumber); + if (packetSequenceNumber < expectedSequenceNumber) { + Log.w( + TAG, + Util.formatInvariant( + "Received RTP packet with unexpected sequence number. Expected: %d; received: %d." + + " Dropping packet.", + expectedSequenceNumber, packetSequenceNumber)); + return false; + } + } else { + Log.w( + TAG, + "First payload octet of the RTP packet is not the beginning of a new VP9 partition," + + " Dropping current packet."); + return false; + } + + // Check if optional I header is present. + if ((header & 0x80) != 0) { + int optionalHeader = payload.readUnsignedByte(); + // Check M for 15 bits PictureID. + if ((optionalHeader & 0x80) != 0) { + if (payload.bytesLeft() < 1) { + return false; + } + } + } + + // Flexible-mode is not implemented. + checkArgument((header & 0x10) == 0, "VP9 flexible mode is not supported."); + + // Check if the optional L header is present. + if ((header & 0x20) != 0) { + payload.skipBytes(1); + if (payload.bytesLeft() < 1) { + return false; + } + // Check if TL0PICIDX header present (non-flexible mode). + if ((header & 0x10) == 0) { + payload.skipBytes(1); + } + } + + // Check if the optional V header is present, Refer to RFC Section 4.2.1. + if ((header & 0x02) != 0) { + int scalabilityStructure = payload.readUnsignedByte(); + int spatialLayersCount = (scalabilityStructure >> 5) & 0x7; + + // Check Y bit. + if ((scalabilityStructure & 0x10) != 0) { + int scalabilityStructureCount = spatialLayersCount + 1; + if (payload.bytesLeft() < scalabilityStructureCount * SCALABILITY_STRUCTURE_SIZE) { + return false; + } + for (int index = 0; index < scalabilityStructureCount; index++) { + width = payload.readUnsignedShort(); + height = payload.readUnsignedShort(); + } + } + + // Checks G bit, skips all additional temporal layers. + if ((scalabilityStructure & 0x08) != 0) { + // Reads N_G. + int numOfPicInPictureGroup = payload.readUnsignedByte(); + if (payload.bytesLeft() < numOfPicInPictureGroup) { + return false; + } + + for (int picIndex = 0; picIndex < numOfPicInPictureGroup; picIndex++) { + int picture = payload.readUnsignedShort(); + int referenceIndices = (picture & 0x0C) >> 2; + if (payload.bytesLeft() < referenceIndices) { + return false; + } + // Ignore Reference indices. + payload.skipBytes(referenceIndices); + } + } + } + return true; + } + + /** + * Outputs sample metadata of the received fragmented packets. + * + *

        Call this method only after receiving an end of a VP9 partition. + */ + private void outputSampleMetadataForFragmentedPackets() { + checkNotNull(trackOutput) + .sampleMetadata( + fragmentedSampleTimeUs, + isKeyFrame ? C.BUFFER_FLAG_KEY_FRAME : 0, + fragmentedSampleSizeBytes, + /* offset= */ 0, + /* cryptoData= */ null); + fragmentedSampleSizeBytes = C.LENGTH_UNSET; + fragmentedSampleTimeUs = C.TIME_UNSET; + gotFirstPacketOfVp9Frame = false; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/package-info.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/package-info.java new file mode 100644 index 0000000000..aca87817d6 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/rtsp/reader/package-info.java @@ -0,0 +1,19 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +@NonNullApi +package com.google.android.exoplayer2.source.rtsp.reader; + +import com.google.android.exoplayer2.util.NonNullApi; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/smoothstreaming/DefaultSsChunkSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/smoothstreaming/DefaultSsChunkSource.java index d005dac8da..f329cabfcf 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/smoothstreaming/DefaultSsChunkSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/smoothstreaming/DefaultSsChunkSource.java @@ -15,6 +15,8 @@ */ package com.google.android.exoplayer2.source.smoothstreaming; +import static com.google.android.exoplayer2.trackselection.TrackSelectionUtil.createFallbackOptions; + import android.net.Uri; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; @@ -25,26 +27,27 @@ import com.google.android.exoplayer2.extractor.mp4.TrackEncryptionBox; import com.google.android.exoplayer2.source.BehindLiveWindowException; import com.google.android.exoplayer2.source.chunk.BaseMediaChunkIterator; +import com.google.android.exoplayer2.source.chunk.BundledChunkExtractor; import com.google.android.exoplayer2.source.chunk.Chunk; -import com.google.android.exoplayer2.source.chunk.ChunkExtractorWrapper; +import com.google.android.exoplayer2.source.chunk.ChunkExtractor; import com.google.android.exoplayer2.source.chunk.ChunkHolder; import com.google.android.exoplayer2.source.chunk.ContainerMediaChunk; import com.google.android.exoplayer2.source.chunk.MediaChunk; import com.google.android.exoplayer2.source.chunk.MediaChunkIterator; import com.google.android.exoplayer2.source.smoothstreaming.manifest.SsManifest; import com.google.android.exoplayer2.source.smoothstreaming.manifest.SsManifest.StreamElement; -import com.google.android.exoplayer2.trackselection.TrackSelection; +import com.google.android.exoplayer2.trackselection.ExoTrackSelection; import com.google.android.exoplayer2.upstream.DataSource; import com.google.android.exoplayer2.upstream.DataSpec; +import com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy; +import com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy.FallbackSelection; import com.google.android.exoplayer2.upstream.LoaderErrorThrower; import com.google.android.exoplayer2.upstream.TransferListener; -import com.google.android.exoplayer2.util.Util; +import com.google.android.exoplayer2.util.Assertions; import java.io.IOException; import java.util.List; -/** - * A default {@link SsChunkSource} implementation. - */ +/** A default {@link SsChunkSource} implementation. */ public class DefaultSsChunkSource implements SsChunkSource { public static final class Factory implements SsChunkSource.Factory { @@ -59,29 +62,28 @@ public Factory(DataSource.Factory dataSourceFactory) { public SsChunkSource createChunkSource( LoaderErrorThrower manifestLoaderErrorThrower, SsManifest manifest, - int elementIndex, - TrackSelection trackSelection, + int streamElementIndex, + ExoTrackSelection trackSelection, @Nullable TransferListener transferListener) { DataSource dataSource = dataSourceFactory.createDataSource(); if (transferListener != null) { dataSource.addTransferListener(transferListener); } return new DefaultSsChunkSource( - manifestLoaderErrorThrower, manifest, elementIndex, trackSelection, dataSource); + manifestLoaderErrorThrower, manifest, streamElementIndex, trackSelection, dataSource); } - } private final LoaderErrorThrower manifestLoaderErrorThrower; private final int streamElementIndex; - private final ChunkExtractorWrapper[] extractorWrappers; + private final ChunkExtractor[] chunkExtractors; private final DataSource dataSource; - private TrackSelection trackSelection; + private ExoTrackSelection trackSelection; private SsManifest manifest; private int currentManifestChunkOffset; - private IOException fatalError; + @Nullable private IOException fatalError; /** * @param manifestLoaderErrorThrower Throws errors affecting loading of manifests. @@ -94,7 +96,7 @@ public DefaultSsChunkSource( LoaderErrorThrower manifestLoaderErrorThrower, SsManifest manifest, int streamElementIndex, - TrackSelection trackSelection, + ExoTrackSelection trackSelection, DataSource dataSource) { this.manifestLoaderErrorThrower = manifestLoaderErrorThrower; this.manifest = manifest; @@ -103,23 +105,36 @@ public DefaultSsChunkSource( this.dataSource = dataSource; StreamElement streamElement = manifest.streamElements[streamElementIndex]; - extractorWrappers = new ChunkExtractorWrapper[trackSelection.length()]; - for (int i = 0; i < extractorWrappers.length; i++) { + chunkExtractors = new ChunkExtractor[trackSelection.length()]; + for (int i = 0; i < chunkExtractors.length; i++) { int manifestTrackIndex = trackSelection.getIndexInTrackGroup(i); Format format = streamElement.formats[manifestTrackIndex]; + @Nullable TrackEncryptionBox[] trackEncryptionBoxes = - format.drmInitData != null ? manifest.protectionElement.trackEncryptionBoxes : null; + format.drmInitData != null + ? Assertions.checkNotNull(manifest.protectionElement).trackEncryptionBoxes + : null; int nalUnitLengthFieldLength = streamElement.type == C.TRACK_TYPE_VIDEO ? 4 : 0; - Track track = new Track(manifestTrackIndex, streamElement.type, streamElement.timescale, - C.TIME_UNSET, manifest.durationUs, format, Track.TRANSFORMATION_NONE, - trackEncryptionBoxes, nalUnitLengthFieldLength, null, null); + Track track = + new Track( + manifestTrackIndex, + streamElement.type, + streamElement.timescale, + C.TIME_UNSET, + manifest.durationUs, + format, + Track.TRANSFORMATION_NONE, + trackEncryptionBoxes, + nalUnitLengthFieldLength, + null, + null); FragmentedMp4Extractor extractor = new FragmentedMp4Extractor( FragmentedMp4Extractor.FLAG_WORKAROUND_EVERY_VIDEO_FRAME_IS_SYNC_FRAME | FragmentedMp4Extractor.FLAG_WORKAROUND_IGNORE_TFDT_BOX, /* timestampAdjuster= */ null, track); - extractorWrappers[i] = new ChunkExtractorWrapper(extractor, streamElement.type, format); + chunkExtractors[i] = new BundledChunkExtractor(extractor, streamElement.type, format); } } @@ -132,7 +147,7 @@ public long getAdjustedSeekPositionUs(long positionUs, SeekParameters seekParame firstSyncUs < positionUs && chunkIndex < streamElement.chunkCount - 1 ? streamElement.getStartTimeUs(chunkIndex + 1) : firstSyncUs; - return Util.resolveSeekPositionUs(positionUs, seekParameters, firstSyncUs, secondSyncUs); + return seekParameters.resolveSeekPositionUs(positionUs, firstSyncUs, secondSyncUs); } @Override @@ -144,8 +159,9 @@ public void updateManifest(SsManifest newManifest) { // There's no overlap between the old and new elements because at least one is empty. currentManifestChunkOffset += currentElementChunkCount; } else { - long currentElementEndTimeUs = currentElement.getStartTimeUs(currentElementChunkCount - 1) - + currentElement.getChunkDurationUs(currentElementChunkCount - 1); + long currentElementEndTimeUs = + currentElement.getStartTimeUs(currentElementChunkCount - 1) + + currentElement.getChunkDurationUs(currentElementChunkCount - 1); long newElementStartTimeUs = newElement.getStartTimeUs(0); if (currentElementEndTimeUs <= newElementStartTimeUs) { // There's no overlap between the old and new elements. @@ -159,7 +175,7 @@ public void updateManifest(SsManifest newManifest) { } @Override - public void updateTrackSelection(TrackSelection trackSelection) { + public void updateTrackSelection(ExoTrackSelection trackSelection) { this.trackSelection = trackSelection; } @@ -182,6 +198,15 @@ public int getPreferredQueueSize(long playbackPositionUs, List queue) { + if (fatalError != null) { + return false; + } + return trackSelection.shouldCancelChunkLoad(playbackPositionUs, loadingChunk, queue); + } + @Override public final void getNextChunk( long playbackPositionUs, @@ -235,7 +260,7 @@ public final void getNextChunk( int currentAbsoluteChunkIndex = chunkIndex + currentManifestChunkOffset; int trackSelectionIndex = trackSelection.getSelectedIndex(); - ChunkExtractorWrapper extractorWrapper = extractorWrappers[trackSelectionIndex]; + ChunkExtractor chunkExtractor = chunkExtractors[trackSelectionIndex]; int manifestTrackIndex = trackSelection.getIndexInTrackGroup(trackSelectionIndex); Uri uri = streamElement.buildRequestUri(manifestTrackIndex, chunkIndex); @@ -245,14 +270,13 @@ public final void getNextChunk( trackSelection.getSelectedFormat(), dataSource, uri, - null, currentAbsoluteChunkIndex, chunkStartTimeUs, chunkEndTimeUs, chunkSeekTimeUs, trackSelection.getSelectionReason(), trackSelection.getSelectionData(), - extractorWrapper); + chunkExtractor); } @Override @@ -262,10 +286,26 @@ public void onChunkLoadCompleted(Chunk chunk) { @Override public boolean onChunkLoadError( - Chunk chunk, boolean cancelable, Exception e, long blacklistDurationMs) { + Chunk chunk, + boolean cancelable, + LoadErrorHandlingPolicy.LoadErrorInfo loadErrorInfo, + LoadErrorHandlingPolicy loadErrorHandlingPolicy) { + @Nullable + FallbackSelection fallbackSelection = + loadErrorHandlingPolicy.getFallbackSelectionFor( + createFallbackOptions(trackSelection), loadErrorInfo); return cancelable - && blacklistDurationMs != C.TIME_UNSET - && trackSelection.blacklist(trackSelection.indexOf(chunk.trackFormat), blacklistDurationMs); + && fallbackSelection != null + && fallbackSelection.type == LoadErrorHandlingPolicy.FALLBACK_TYPE_TRACK + && trackSelection.blacklist( + trackSelection.indexOf(chunk.trackFormat), fallbackSelection.exclusionDurationMs); + } + + @Override + public void release() { + for (ChunkExtractor chunkExtractor : chunkExtractors) { + chunkExtractor.release(); + } } // Private methods. @@ -274,15 +314,14 @@ private static MediaChunk newMediaChunk( Format format, DataSource dataSource, Uri uri, - String cacheKey, int chunkIndex, long chunkStartTimeUs, long chunkEndTimeUs, long chunkSeekTimeUs, - int trackSelectionReason, - Object trackSelectionData, - ChunkExtractorWrapper extractorWrapper) { - DataSpec dataSpec = new DataSpec(uri, 0, C.LENGTH_UNSET, cacheKey); + @C.SelectionReason int trackSelectionReason, + @Nullable Object trackSelectionData, + ChunkExtractor chunkExtractor) { + DataSpec dataSpec = new DataSpec(uri); // In SmoothStreaming each chunk contains sample timestamps relative to the start of the chunk. // To convert them the absolute timestamps, we need to set sampleOffsetUs to chunkStartTimeUs. long sampleOffsetUs = chunkStartTimeUs; @@ -299,7 +338,7 @@ private static MediaChunk newMediaChunk( chunkIndex, /* chunkCount= */ 1, sampleOffsetUs, - extractorWrapper); + chunkExtractor); } private long resolveTimeToLiveEdgeUs(long playbackPositionUs) { @@ -309,8 +348,9 @@ private long resolveTimeToLiveEdgeUs(long playbackPositionUs) { StreamElement currentElement = manifest.streamElements[streamElementIndex]; int lastChunkIndex = currentElement.chunkCount - 1; - long lastChunkEndTimeUs = currentElement.getStartTimeUs(lastChunkIndex) - + currentElement.getChunkDurationUs(lastChunkIndex); + long lastChunkEndTimeUs = + currentElement.getStartTimeUs(lastChunkIndex) + + currentElement.getChunkDurationUs(lastChunkIndex); return lastChunkEndTimeUs - playbackPositionUs; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/smoothstreaming/SsChunkSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/smoothstreaming/SsChunkSource.java index 111393140e..875b1379c9 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/smoothstreaming/SsChunkSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/smoothstreaming/SsChunkSource.java @@ -18,13 +18,11 @@ import androidx.annotation.Nullable; import com.google.android.exoplayer2.source.chunk.ChunkSource; import com.google.android.exoplayer2.source.smoothstreaming.manifest.SsManifest; -import com.google.android.exoplayer2.trackselection.TrackSelection; +import com.google.android.exoplayer2.trackselection.ExoTrackSelection; import com.google.android.exoplayer2.upstream.LoaderErrorThrower; import com.google.android.exoplayer2.upstream.TransferListener; -/** - * A {@link ChunkSource} for SmoothStreaming. - */ +/** A {@link ChunkSource} for SmoothStreaming. */ public interface SsChunkSource extends ChunkSource { /** Factory for {@link SsChunkSource}s. */ @@ -45,7 +43,7 @@ SsChunkSource createChunkSource( LoaderErrorThrower manifestLoaderErrorThrower, SsManifest manifest, int streamElementIndex, - TrackSelection trackSelection, + ExoTrackSelection trackSelection, @Nullable TransferListener transferListener); } @@ -61,5 +59,5 @@ SsChunkSource createChunkSource( * * @param trackSelection The new track selection instance. Must be equivalent to the previous one. */ - void updateTrackSelection(TrackSelection trackSelection); + void updateTrackSelection(ExoTrackSelection trackSelection); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/smoothstreaming/SsMediaPeriod.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/smoothstreaming/SsMediaPeriod.java index f7940fed1b..d7e5d0bfd7 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/smoothstreaming/SsMediaPeriod.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/smoothstreaming/SsMediaPeriod.java @@ -19,18 +19,19 @@ import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.SeekParameters; +import com.google.android.exoplayer2.drm.DrmSessionEventListener; import com.google.android.exoplayer2.drm.DrmSessionManager; import com.google.android.exoplayer2.offline.StreamKey; import com.google.android.exoplayer2.source.CompositeSequenceableLoaderFactory; import com.google.android.exoplayer2.source.MediaPeriod; -import com.google.android.exoplayer2.source.MediaSourceEventListener.EventDispatcher; +import com.google.android.exoplayer2.source.MediaSourceEventListener; import com.google.android.exoplayer2.source.SampleStream; import com.google.android.exoplayer2.source.SequenceableLoader; import com.google.android.exoplayer2.source.TrackGroup; import com.google.android.exoplayer2.source.TrackGroupArray; import com.google.android.exoplayer2.source.chunk.ChunkSampleStream; import com.google.android.exoplayer2.source.smoothstreaming.manifest.SsManifest; -import com.google.android.exoplayer2.trackselection.TrackSelection; +import com.google.android.exoplayer2.trackselection.ExoTrackSelection; import com.google.android.exoplayer2.upstream.Allocator; import com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy; import com.google.android.exoplayer2.upstream.LoaderErrorThrower; @@ -47,9 +48,10 @@ private final SsChunkSource.Factory chunkSourceFactory; @Nullable private final TransferListener transferListener; private final LoaderErrorThrower manifestLoaderErrorThrower; - private final DrmSessionManager drmSessionManager; + private final DrmSessionManager drmSessionManager; + private final DrmSessionEventListener.EventDispatcher drmEventDispatcher; private final LoadErrorHandlingPolicy loadErrorHandlingPolicy; - private final EventDispatcher eventDispatcher; + private final MediaSourceEventListener.EventDispatcher mediaSourceEventDispatcher; private final Allocator allocator; private final TrackGroupArray trackGroups; private final CompositeSequenceableLoaderFactory compositeSequenceableLoaderFactory; @@ -58,16 +60,16 @@ private SsManifest manifest; private ChunkSampleStream[] sampleStreams; private SequenceableLoader compositeSequenceableLoader; - private boolean notifiedReadingStarted; public SsMediaPeriod( SsManifest manifest, SsChunkSource.Factory chunkSourceFactory, @Nullable TransferListener transferListener, CompositeSequenceableLoaderFactory compositeSequenceableLoaderFactory, - DrmSessionManager drmSessionManager, + DrmSessionManager drmSessionManager, + DrmSessionEventListener.EventDispatcher drmEventDispatcher, LoadErrorHandlingPolicy loadErrorHandlingPolicy, - EventDispatcher eventDispatcher, + MediaSourceEventListener.EventDispatcher mediaSourceEventDispatcher, LoaderErrorThrower manifestLoaderErrorThrower, Allocator allocator) { this.manifest = manifest; @@ -75,15 +77,15 @@ public SsMediaPeriod( this.transferListener = transferListener; this.manifestLoaderErrorThrower = manifestLoaderErrorThrower; this.drmSessionManager = drmSessionManager; + this.drmEventDispatcher = drmEventDispatcher; this.loadErrorHandlingPolicy = loadErrorHandlingPolicy; - this.eventDispatcher = eventDispatcher; + this.mediaSourceEventDispatcher = mediaSourceEventDispatcher; this.allocator = allocator; this.compositeSequenceableLoaderFactory = compositeSequenceableLoaderFactory; trackGroups = buildTrackGroups(manifest, drmSessionManager); sampleStreams = newSampleStreamArray(0); compositeSequenceableLoader = compositeSequenceableLoaderFactory.createCompositeSequenceableLoader(sampleStreams); - eventDispatcher.mediaPeriodCreated(); } public void updateManifest(SsManifest manifest) { @@ -99,7 +101,6 @@ public void release() { sampleStream.release(); } callback = null; - eventDispatcher.mediaPeriodReleased(); } // MediaPeriod implementation. @@ -122,7 +123,7 @@ public TrackGroupArray getTrackGroups() { @Override public long selectTracks( - @NullableType TrackSelection[] selections, + @NullableType ExoTrackSelection[] selections, boolean[] mayRetainStreamFlags, @NullableType SampleStream[] streams, boolean[] streamResetFlags, @@ -155,10 +156,10 @@ public long selectTracks( } @Override - public List getStreamKeys(List trackSelections) { + public List getStreamKeys(List trackSelections) { List streamKeys = new ArrayList<>(); for (int selectionIndex = 0; selectionIndex < trackSelections.size(); selectionIndex++) { - TrackSelection trackSelection = trackSelections.get(selectionIndex); + ExoTrackSelection trackSelection = trackSelections.get(selectionIndex); int streamElementIndex = trackGroups.indexOf(trackSelection.getTrackGroup()); for (int i = 0; i < trackSelection.length(); i++) { streamKeys.add(new StreamKey(streamElementIndex, trackSelection.getIndexInTrackGroup(i))); @@ -196,10 +197,6 @@ public long getNextLoadPositionUs() { @Override public long readDiscontinuity() { - if (!notifiedReadingStarted) { - eventDispatcher.readingStarted(); - notifiedReadingStarted = true; - } return C.TIME_UNSET; } @@ -235,16 +232,12 @@ public void onContinueLoadingRequested(ChunkSampleStream sampleSt // Private methods. - private ChunkSampleStream buildSampleStream(TrackSelection selection, - long positionUs) { + private ChunkSampleStream buildSampleStream( + ExoTrackSelection selection, long positionUs) { int streamElementIndex = trackGroups.indexOf(selection.getTrackGroup()); SsChunkSource chunkSource = chunkSourceFactory.createChunkSource( - manifestLoaderErrorThrower, - manifest, - streamElementIndex, - selection, - transferListener); + manifestLoaderErrorThrower, manifest, streamElementIndex, selection, transferListener); return new ChunkSampleStream<>( manifest.streamElements[streamElementIndex].type, null, @@ -254,12 +247,13 @@ private ChunkSampleStream buildSampleStream(TrackSelection select allocator, positionUs, drmSessionManager, + drmEventDispatcher, loadErrorHandlingPolicy, - eventDispatcher); + mediaSourceEventDispatcher); } private static TrackGroupArray buildTrackGroups( - SsManifest manifest, DrmSessionManager drmSessionManager) { + SsManifest manifest, DrmSessionManager drmSessionManager) { TrackGroup[] trackGroups = new TrackGroup[manifest.streamElements.length]; for (int i = 0; i < manifest.streamElements.length; i++) { Format[] manifestFormats = manifest.streamElements[i].formats; @@ -267,12 +261,9 @@ private static TrackGroupArray buildTrackGroups( for (int j = 0; j < manifestFormats.length; j++) { Format manifestFormat = manifestFormats[j]; exposedFormats[j] = - manifestFormat.drmInitData != null - ? manifestFormat.copyWithExoMediaCryptoType( - drmSessionManager.getExoMediaCryptoType(manifestFormat.drmInitData)) - : manifestFormat; + manifestFormat.copyWithCryptoType(drmSessionManager.getCryptoType(manifestFormat)); } - trackGroups[i] = new TrackGroup(exposedFormats); + trackGroups[i] = new TrackGroup(/* id= */ Integer.toString(i), exposedFormats); } return new TrackGroupArray(trackGroups); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/smoothstreaming/SsMediaSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/smoothstreaming/SsMediaSource.java index 89dd8039ef..eebab34190 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/smoothstreaming/SsMediaSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/smoothstreaming/SsMediaSource.java @@ -15,20 +15,30 @@ */ package com.google.android.exoplayer2.source.smoothstreaming; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static java.lang.Math.max; +import static java.lang.Math.min; + import android.net.Uri; import android.os.Handler; +import android.os.Looper; import android.os.SystemClock; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.ExoPlayerLibraryInfo; +import com.google.android.exoplayer2.MediaItem; import com.google.android.exoplayer2.Timeline; -import com.google.android.exoplayer2.drm.DrmSession; +import com.google.android.exoplayer2.drm.DefaultDrmSessionManagerProvider; +import com.google.android.exoplayer2.drm.DrmSessionEventListener; import com.google.android.exoplayer2.drm.DrmSessionManager; +import com.google.android.exoplayer2.drm.DrmSessionManagerProvider; import com.google.android.exoplayer2.offline.FilteringManifestParser; import com.google.android.exoplayer2.offline.StreamKey; import com.google.android.exoplayer2.source.BaseMediaSource; import com.google.android.exoplayer2.source.CompositeSequenceableLoaderFactory; import com.google.android.exoplayer2.source.DefaultCompositeSequenceableLoaderFactory; +import com.google.android.exoplayer2.source.LoadEventInfo; +import com.google.android.exoplayer2.source.MediaLoadData; import com.google.android.exoplayer2.source.MediaPeriod; import com.google.android.exoplayer2.source.MediaSource; import com.google.android.exoplayer2.source.MediaSourceEventListener; @@ -39,17 +49,21 @@ import com.google.android.exoplayer2.source.smoothstreaming.manifest.SsManifest; import com.google.android.exoplayer2.source.smoothstreaming.manifest.SsManifest.StreamElement; import com.google.android.exoplayer2.source.smoothstreaming.manifest.SsManifestParser; -import com.google.android.exoplayer2.source.smoothstreaming.manifest.SsUtil; import com.google.android.exoplayer2.upstream.Allocator; import com.google.android.exoplayer2.upstream.DataSource; import com.google.android.exoplayer2.upstream.DefaultLoadErrorHandlingPolicy; import com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy; +import com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy.LoadErrorInfo; import com.google.android.exoplayer2.upstream.Loader; import com.google.android.exoplayer2.upstream.Loader.LoadErrorAction; import com.google.android.exoplayer2.upstream.LoaderErrorThrower; import com.google.android.exoplayer2.upstream.ParsingLoadable; import com.google.android.exoplayer2.upstream.TransferListener; import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.MimeTypes; +import com.google.android.exoplayer2.util.Util; +import com.google.common.collect.ImmutableList; +import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.io.IOException; import java.util.ArrayList; import java.util.List; @@ -63,23 +77,30 @@ public final class SsMediaSource extends BaseMediaSource } /** Factory for {@link SsMediaSource}. */ + @SuppressWarnings("deprecation") // Implement deprecated type for backwards compatibility. public static final class Factory implements MediaSourceFactory { private final SsChunkSource.Factory chunkSourceFactory; @Nullable private final DataSource.Factory manifestDataSourceFactory; - @Nullable private ParsingLoadable.Parser manifestParser; - @Nullable private List streamKeys; private CompositeSequenceableLoaderFactory compositeSequenceableLoaderFactory; - private DrmSessionManager drmSessionManager; + private DrmSessionManagerProvider drmSessionManagerProvider; private LoadErrorHandlingPolicy loadErrorHandlingPolicy; private long livePresentationDelayMs; - private boolean isCreateCalled; - @Nullable private Object tag; + @Nullable private ParsingLoadable.Parser manifestParser; /** * Creates a new factory for {@link SsMediaSource}s. * + *

        The factory will use the following default components: + * + *

          + *
        • {@link DefaultSsChunkSource.Factory} + *
        • {@link DefaultDrmSessionManagerProvider} + *
        • {@link DefaultLoadErrorHandlingPolicy} + *
        • {@link DefaultCompositeSequenceableLoaderFactory} + *
        + * * @param dataSourceFactory A factory for {@link DataSource} instances that will be used to load * manifest and media data. */ @@ -88,70 +109,41 @@ public Factory(DataSource.Factory dataSourceFactory) { } /** - * Creates a new factory for {@link SsMediaSource}s. + * Creates a new factory for {@link SsMediaSource}s. The factory will use the following default + * components: + * + *
          + *
        • {@link DefaultDrmSessionManagerProvider} + *
        • {@link DefaultLoadErrorHandlingPolicy} + *
        • {@link DefaultCompositeSequenceableLoaderFactory} + *
        * * @param chunkSourceFactory A factory for {@link SsChunkSource} instances. * @param manifestDataSourceFactory A factory for {@link DataSource} instances that will be used * to load (and refresh) the manifest. May be {@code null} if the factory will only ever be * used to create create media sources with sideloaded manifests via {@link - * #createMediaSource(SsManifest, Handler, MediaSourceEventListener)}. + * #createMediaSource(SsManifest, MediaItem)}. */ public Factory( SsChunkSource.Factory chunkSourceFactory, @Nullable DataSource.Factory manifestDataSourceFactory) { - this.chunkSourceFactory = Assertions.checkNotNull(chunkSourceFactory); + this.chunkSourceFactory = checkNotNull(chunkSourceFactory); this.manifestDataSourceFactory = manifestDataSourceFactory; - drmSessionManager = DrmSessionManager.getDummyDrmSessionManager(); + drmSessionManagerProvider = new DefaultDrmSessionManagerProvider(); loadErrorHandlingPolicy = new DefaultLoadErrorHandlingPolicy(); livePresentationDelayMs = DEFAULT_LIVE_PRESENTATION_DELAY_MS; compositeSequenceableLoaderFactory = new DefaultCompositeSequenceableLoaderFactory(); } - /** - * Sets a tag for the media source which will be published in the {@link Timeline} of the source - * as {@link Timeline.Window#tag}. - * - * @param tag A tag for the media source. - * @return This factory, for convenience. - * @throws IllegalStateException If one of the {@code create} methods has already been called. - */ - public Factory setTag(@Nullable Object tag) { - Assertions.checkState(!isCreateCalled); - this.tag = tag; - return this; - } - - /** - * Sets the minimum number of times to retry if a loading error occurs. See {@link - * #setLoadErrorHandlingPolicy} for the default value. - * - *

        Calling this method is equivalent to calling {@link #setLoadErrorHandlingPolicy} with - * {@link DefaultLoadErrorHandlingPolicy#DefaultLoadErrorHandlingPolicy(int) - * DefaultLoadErrorHandlingPolicy(minLoadableRetryCount)} - * - * @param minLoadableRetryCount The minimum number of times to retry if a loading error occurs. - * @return This factory, for convenience. - * @throws IllegalStateException If one of the {@code create} methods has already been called. - * @deprecated Use {@link #setLoadErrorHandlingPolicy(LoadErrorHandlingPolicy)} instead. - */ - @Deprecated - public Factory setMinLoadableRetryCount(int minLoadableRetryCount) { - return setLoadErrorHandlingPolicy(new DefaultLoadErrorHandlingPolicy(minLoadableRetryCount)); - } - - /** - * Sets the {@link LoadErrorHandlingPolicy}. The default value is created by calling {@link - * DefaultLoadErrorHandlingPolicy#DefaultLoadErrorHandlingPolicy()}. - * - *

        Calling this method overrides any calls to {@link #setMinLoadableRetryCount(int)}. - * - * @param loadErrorHandlingPolicy A {@link LoadErrorHandlingPolicy}. - * @return This factory, for convenience. - * @throws IllegalStateException If one of the {@code create} methods has already been called. - */ + @CanIgnoreReturnValue + @Override public Factory setLoadErrorHandlingPolicy(LoadErrorHandlingPolicy loadErrorHandlingPolicy) { - Assertions.checkState(!isCreateCalled); - this.loadErrorHandlingPolicy = loadErrorHandlingPolicy; + this.loadErrorHandlingPolicy = + checkNotNull( + loadErrorHandlingPolicy, + "MediaSource.Factory#setLoadErrorHandlingPolicy no longer handles null by" + + " instantiating a new DefaultLoadErrorHandlingPolicy. Explicitly construct and" + + " pass an instance in order to retain the old behavior."); return this; } @@ -163,10 +155,9 @@ public Factory setLoadErrorHandlingPolicy(LoadErrorHandlingPolicy loadErrorHandl * @param livePresentationDelayMs For live playbacks, the duration in milliseconds by which the * default start position should precede the end of the live window. * @return This factory, for convenience. - * @throws IllegalStateException If one of the {@code create} methods has already been called. */ + @CanIgnoreReturnValue public Factory setLivePresentationDelayMs(long livePresentationDelayMs) { - Assertions.checkState(!isCreateCalled); this.livePresentationDelayMs = livePresentationDelayMs; return this; } @@ -176,30 +167,45 @@ public Factory setLivePresentationDelayMs(long livePresentationDelayMs) { * * @param manifestParser A parser for loaded manifest data. * @return This factory, for convenience. - * @throws IllegalStateException If one of the {@code create} methods has already been called. */ - public Factory setManifestParser(ParsingLoadable.Parser manifestParser) { - Assertions.checkState(!isCreateCalled); - this.manifestParser = Assertions.checkNotNull(manifestParser); + @CanIgnoreReturnValue + public Factory setManifestParser( + @Nullable ParsingLoadable.Parser manifestParser) { + this.manifestParser = manifestParser; return this; } /** * Sets the factory to create composite {@link SequenceableLoader}s for when this media source - * loads data from multiple streams (video, audio etc.). The default is an instance of {@link - * DefaultCompositeSequenceableLoaderFactory}. + * loads data from multiple streams (video, audio etc.). * * @param compositeSequenceableLoaderFactory A factory to create composite {@link * SequenceableLoader}s for when this media source loads data from multiple streams (video, * audio etc.). * @return This factory, for convenience. - * @throws IllegalStateException If one of the {@code create} methods has already been called. */ + @CanIgnoreReturnValue public Factory setCompositeSequenceableLoaderFactory( CompositeSequenceableLoaderFactory compositeSequenceableLoaderFactory) { - Assertions.checkState(!isCreateCalled); this.compositeSequenceableLoaderFactory = - Assertions.checkNotNull(compositeSequenceableLoaderFactory); + checkNotNull( + compositeSequenceableLoaderFactory, + "SsMediaSource.Factory#setCompositeSequenceableLoaderFactory no longer handles null" + + " by instantiating a new DefaultCompositeSequenceableLoaderFactory. Explicitly" + + " construct and pass an instance in order to retain the old behavior."); + return this; + } + + @CanIgnoreReturnValue + @Override + public Factory setDrmSessionManagerProvider( + DrmSessionManagerProvider drmSessionManagerProvider) { + this.drmSessionManagerProvider = + checkNotNull( + drmSessionManagerProvider, + "MediaSource.Factory#setDrmSessionManagerProvider no longer handles null by" + + " instantiating a new DefaultDrmSessionManagerProvider. Explicitly construct" + + " and pass an instance in order to retain the old behavior."); return this; } @@ -212,143 +218,109 @@ public Factory setCompositeSequenceableLoaderFactory( * @throws IllegalArgumentException If {@link SsManifest#isLive} is true. */ public SsMediaSource createMediaSource(SsManifest manifest) { + return createMediaSource(manifest, MediaItem.fromUri(Uri.EMPTY)); + } + + /** + * Returns a new {@link SsMediaSource} using the current parameters and the specified sideloaded + * manifest. + * + * @param manifest The manifest. {@link SsManifest#isLive} must be false. + * @param mediaItem The {@link MediaItem} to be included in the timeline. + * @return The new {@link SsMediaSource}. + * @throws IllegalArgumentException If {@link SsManifest#isLive} is true. + */ + public SsMediaSource createMediaSource(SsManifest manifest, MediaItem mediaItem) { Assertions.checkArgument(!manifest.isLive); - isCreateCalled = true; - if (streamKeys != null && !streamKeys.isEmpty()) { + List streamKeys = + mediaItem.localConfiguration != null + ? mediaItem.localConfiguration.streamKeys + : ImmutableList.of(); + if (!streamKeys.isEmpty()) { manifest = manifest.copy(streamKeys); } + boolean hasUri = mediaItem.localConfiguration != null; + mediaItem = + mediaItem + .buildUpon() + .setMimeType(MimeTypes.APPLICATION_SS) + .setUri(hasUri ? mediaItem.localConfiguration.uri : Uri.EMPTY) + .build(); return new SsMediaSource( + mediaItem, manifest, - /* manifestUri= */ null, /* manifestDataSourceFactory= */ null, /* manifestParser= */ null, chunkSourceFactory, compositeSequenceableLoaderFactory, - drmSessionManager, + drmSessionManagerProvider.get(mediaItem), loadErrorHandlingPolicy, - livePresentationDelayMs, - tag); - } - - /** - * @deprecated Use {@link #createMediaSource(SsManifest)} and {@link #addEventListener(Handler, - * MediaSourceEventListener)} instead. - */ - @Deprecated - public SsMediaSource createMediaSource( - SsManifest manifest, - @Nullable Handler eventHandler, - @Nullable MediaSourceEventListener eventListener) { - SsMediaSource mediaSource = createMediaSource(manifest); - if (eventHandler != null && eventListener != null) { - mediaSource.addEventListener(eventHandler, eventListener); - } - return mediaSource; - } - - /** - * @deprecated Use {@link #createMediaSource(Uri)} and {@link #addEventListener(Handler, - * MediaSourceEventListener)} instead. - */ - @Deprecated - public SsMediaSource createMediaSource( - Uri manifestUri, - @Nullable Handler eventHandler, - @Nullable MediaSourceEventListener eventListener) { - SsMediaSource mediaSource = createMediaSource(manifestUri); - if (eventHandler != null && eventListener != null) { - mediaSource.addEventListener(eventHandler, eventListener); - } - return mediaSource; - } - - /** - * Sets the {@link DrmSessionManager} to use for acquiring {@link DrmSession DrmSessions}. The - * default value is {@link DrmSessionManager#DUMMY}. - * - * @param drmSessionManager The {@link DrmSessionManager}. - * @return This factory, for convenience. - * @throws IllegalStateException If one of the {@code create} methods has already been called. - */ - @Override - public Factory setDrmSessionManager(DrmSessionManager drmSessionManager) { - Assertions.checkState(!isCreateCalled); - this.drmSessionManager = - drmSessionManager != null - ? drmSessionManager - : DrmSessionManager.getDummyDrmSessionManager(); - return this; + livePresentationDelayMs); } /** * Returns a new {@link SsMediaSource} using the current parameters. * - * @param manifestUri The manifest {@link Uri}. + * @param mediaItem The {@link MediaItem}. * @return The new {@link SsMediaSource}. + * @throws NullPointerException if {@link MediaItem#localConfiguration} is {@code null}. */ @Override - public SsMediaSource createMediaSource(Uri manifestUri) { - isCreateCalled = true; + public SsMediaSource createMediaSource(MediaItem mediaItem) { + checkNotNull(mediaItem.localConfiguration); + @Nullable ParsingLoadable.Parser manifestParser = this.manifestParser; if (manifestParser == null) { manifestParser = new SsManifestParser(); } - if (streamKeys != null) { + List streamKeys = mediaItem.localConfiguration.streamKeys; + if (!streamKeys.isEmpty()) { manifestParser = new FilteringManifestParser<>(manifestParser, streamKeys); } + return new SsMediaSource( + mediaItem, /* manifest= */ null, - Assertions.checkNotNull(manifestUri), manifestDataSourceFactory, manifestParser, chunkSourceFactory, compositeSequenceableLoaderFactory, - drmSessionManager, + drmSessionManagerProvider.get(mediaItem), loadErrorHandlingPolicy, - livePresentationDelayMs, - tag); + livePresentationDelayMs); } @Override - public Factory setStreamKeys(List streamKeys) { - Assertions.checkState(!isCreateCalled); - this.streamKeys = streamKeys; - return this; + public @C.ContentType int[] getSupportedTypes() { + return new int[] {C.CONTENT_TYPE_SS}; } - - @Override - public int[] getSupportedTypes() { - return new int[] {C.TYPE_SS}; - } - } /** * The default presentation delay for live streams. The presentation delay is the duration by * which the default start position precedes the end of the live window. */ - public static final long DEFAULT_LIVE_PRESENTATION_DELAY_MS = 30000; + public static final long DEFAULT_LIVE_PRESENTATION_DELAY_MS = 30_000; - /** - * The minimum period between manifest refreshes. - */ + /** The minimum period between manifest refreshes. */ private static final int MINIMUM_MANIFEST_REFRESH_PERIOD_MS = 5000; /** * The minimum default start position for live streams, relative to the start of the live window. */ - private static final long MIN_LIVE_DEFAULT_START_POSITION_US = 5000000; + private static final long MIN_LIVE_DEFAULT_START_POSITION_US = 5_000_000; private final boolean sideloadedManifest; private final Uri manifestUri; + private final MediaItem.LocalConfiguration localConfiguration; + private final MediaItem mediaItem; private final DataSource.Factory manifestDataSourceFactory; private final SsChunkSource.Factory chunkSourceFactory; private final CompositeSequenceableLoaderFactory compositeSequenceableLoaderFactory; - private final DrmSessionManager drmSessionManager; + private final DrmSessionManager drmSessionManager; private final LoadErrorHandlingPolicy loadErrorHandlingPolicy; private final long livePresentationDelayMs; private final EventDispatcher manifestEventDispatcher; private final ParsingLoadable.Parser manifestParser; private final ArrayList mediaPeriods; - @Nullable private final Object tag; private DataSource manifestDataSource; private Loader manifestLoader; @@ -360,178 +332,24 @@ public int[] getSupportedTypes() { private Handler manifestRefreshHandler; - /** - * Constructs an instance to play a given {@link SsManifest}, which must not be live. - * - * @param manifest The manifest. {@link SsManifest#isLive} must be false. - * @param chunkSourceFactory A factory for {@link SsChunkSource} instances. - * @param eventHandler A handler for events. May be null if delivery of events is not required. - * @param eventListener A listener of events. May be null if delivery of events is not required. - * @deprecated Use {@link Factory} instead. - */ - @Deprecated - @SuppressWarnings("deprecation") - public SsMediaSource( - SsManifest manifest, - SsChunkSource.Factory chunkSourceFactory, - @Nullable Handler eventHandler, - @Nullable MediaSourceEventListener eventListener) { - this( - manifest, - chunkSourceFactory, - DefaultLoadErrorHandlingPolicy.DEFAULT_MIN_LOADABLE_RETRY_COUNT, - eventHandler, - eventListener); - } - - /** - * Constructs an instance to play a given {@link SsManifest}, which must not be live. - * - * @param manifest The manifest. {@link SsManifest#isLive} must be false. - * @param chunkSourceFactory A factory for {@link SsChunkSource} instances. - * @param minLoadableRetryCount The minimum number of times to retry if a loading error occurs. - * @param eventHandler A handler for events. May be null if delivery of events is not required. - * @param eventListener A listener of events. May be null if delivery of events is not required. - * @deprecated Use {@link Factory} instead. - */ - @Deprecated - public SsMediaSource( - SsManifest manifest, - SsChunkSource.Factory chunkSourceFactory, - int minLoadableRetryCount, - @Nullable Handler eventHandler, - @Nullable MediaSourceEventListener eventListener) { - this( - manifest, - /* manifestUri= */ null, - /* manifestDataSourceFactory= */ null, - /* manifestParser= */ null, - chunkSourceFactory, - new DefaultCompositeSequenceableLoaderFactory(), - DrmSessionManager.getDummyDrmSessionManager(), - new DefaultLoadErrorHandlingPolicy(minLoadableRetryCount), - DEFAULT_LIVE_PRESENTATION_DELAY_MS, - /* tag= */ null); - if (eventHandler != null && eventListener != null) { - addEventListener(eventHandler, eventListener); - } - } - - /** - * Constructs an instance to play the manifest at a given {@link Uri}, which may be live or - * on-demand. - * - * @param manifestUri The manifest {@link Uri}. - * @param manifestDataSourceFactory A factory for {@link DataSource} instances that will be used - * to load (and refresh) the manifest. - * @param chunkSourceFactory A factory for {@link SsChunkSource} instances. - * @param eventHandler A handler for events. May be null if delivery of events is not required. - * @param eventListener A listener of events. May be null if delivery of events is not required. - * @deprecated Use {@link Factory} instead. - */ - @Deprecated - @SuppressWarnings("deprecation") - public SsMediaSource( - Uri manifestUri, - DataSource.Factory manifestDataSourceFactory, - SsChunkSource.Factory chunkSourceFactory, - @Nullable Handler eventHandler, - @Nullable MediaSourceEventListener eventListener) { - this( - manifestUri, - manifestDataSourceFactory, - chunkSourceFactory, - DefaultLoadErrorHandlingPolicy.DEFAULT_MIN_LOADABLE_RETRY_COUNT, - DEFAULT_LIVE_PRESENTATION_DELAY_MS, - eventHandler, - eventListener); - } - - /** - * Constructs an instance to play the manifest at a given {@link Uri}, which may be live or - * on-demand. - * - * @param manifestUri The manifest {@link Uri}. - * @param manifestDataSourceFactory A factory for {@link DataSource} instances that will be used - * to load (and refresh) the manifest. - * @param chunkSourceFactory A factory for {@link SsChunkSource} instances. - * @param minLoadableRetryCount The minimum number of times to retry if a loading error occurs. - * @param livePresentationDelayMs For live playbacks, the duration in milliseconds by which the - * default start position should precede the end of the live window. - * @param eventHandler A handler for events. May be null if delivery of events is not required. - * @param eventListener A listener of events. May be null if delivery of events is not required. - * @deprecated Use {@link Factory} instead. - */ - @Deprecated - @SuppressWarnings("deprecation") - public SsMediaSource( - Uri manifestUri, - DataSource.Factory manifestDataSourceFactory, - SsChunkSource.Factory chunkSourceFactory, - int minLoadableRetryCount, - long livePresentationDelayMs, - @Nullable Handler eventHandler, - @Nullable MediaSourceEventListener eventListener) { - this(manifestUri, manifestDataSourceFactory, new SsManifestParser(), chunkSourceFactory, - minLoadableRetryCount, livePresentationDelayMs, eventHandler, eventListener); - } - - /** - * Constructs an instance to play the manifest at a given {@link Uri}, which may be live or - * on-demand. - * - * @param manifestUri The manifest {@link Uri}. - * @param manifestDataSourceFactory A factory for {@link DataSource} instances that will be used - * to load (and refresh) the manifest. - * @param manifestParser A parser for loaded manifest data. - * @param chunkSourceFactory A factory for {@link SsChunkSource} instances. - * @param minLoadableRetryCount The minimum number of times to retry if a loading error occurs. - * @param livePresentationDelayMs For live playbacks, the duration in milliseconds by which the - * default start position should precede the end of the live window. - * @param eventHandler A handler for events. May be null if delivery of events is not required. - * @param eventListener A listener of events. May be null if delivery of events is not required. - * @deprecated Use {@link Factory} instead. - */ - @Deprecated - public SsMediaSource( - Uri manifestUri, - DataSource.Factory manifestDataSourceFactory, - ParsingLoadable.Parser manifestParser, - SsChunkSource.Factory chunkSourceFactory, - int minLoadableRetryCount, - long livePresentationDelayMs, - @Nullable Handler eventHandler, - @Nullable MediaSourceEventListener eventListener) { - this( - /* manifest= */ null, - manifestUri, - manifestDataSourceFactory, - manifestParser, - chunkSourceFactory, - new DefaultCompositeSequenceableLoaderFactory(), - DrmSessionManager.getDummyDrmSessionManager(), - new DefaultLoadErrorHandlingPolicy(minLoadableRetryCount), - livePresentationDelayMs, - /* tag= */ null); - if (eventHandler != null && eventListener != null) { - addEventListener(eventHandler, eventListener); - } - } - private SsMediaSource( + MediaItem mediaItem, @Nullable SsManifest manifest, - @Nullable Uri manifestUri, @Nullable DataSource.Factory manifestDataSourceFactory, @Nullable ParsingLoadable.Parser manifestParser, SsChunkSource.Factory chunkSourceFactory, CompositeSequenceableLoaderFactory compositeSequenceableLoaderFactory, - DrmSessionManager drmSessionManager, + DrmSessionManager drmSessionManager, LoadErrorHandlingPolicy loadErrorHandlingPolicy, - long livePresentationDelayMs, - @Nullable Object tag) { + long livePresentationDelayMs) { Assertions.checkState(manifest == null || !manifest.isLive); + this.mediaItem = mediaItem; + localConfiguration = checkNotNull(mediaItem.localConfiguration); this.manifest = manifest; - this.manifestUri = manifestUri == null ? null : SsUtil.fixManifestUri(manifestUri); + this.manifestUri = + localConfiguration.uri.equals(Uri.EMPTY) + ? null + : Util.fixSmoothStreamingIsmManifestUri(localConfiguration.uri); this.manifestDataSourceFactory = manifestDataSourceFactory; this.manifestParser = manifestParser; this.chunkSourceFactory = chunkSourceFactory; @@ -540,7 +358,6 @@ private SsMediaSource( this.loadErrorHandlingPolicy = loadErrorHandlingPolicy; this.livePresentationDelayMs = livePresentationDelayMs; this.manifestEventDispatcher = createEventDispatcher(/* mediaPeriodId= */ null); - this.tag = tag; sideloadedManifest = manifest != null; mediaPeriods = new ArrayList<>(); } @@ -548,23 +365,23 @@ private SsMediaSource( // MediaSource implementation. @Override - @Nullable - public Object getTag() { - return tag; + public MediaItem getMediaItem() { + return mediaItem; } @Override protected void prepareSourceInternal(@Nullable TransferListener mediaTransferListener) { this.mediaTransferListener = mediaTransferListener; drmSessionManager.prepare(); + drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), getPlayerId()); if (sideloadedManifest) { manifestLoaderErrorThrower = new LoaderErrorThrower.Dummy(); processManifest(); } else { manifestDataSource = manifestDataSourceFactory.createDataSource(); - manifestLoader = new Loader("Loader:Manifest"); + manifestLoader = new Loader("SsMediaSource"); manifestLoaderErrorThrower = manifestLoader; - manifestRefreshHandler = new Handler(); + manifestRefreshHandler = Util.createHandlerForCurrentLooper(); startLoadingManifest(); } } @@ -576,7 +393,8 @@ public void maybeThrowSourceInfoRefreshError() throws IOException { @Override public MediaPeriod createPeriod(MediaPeriodId id, Allocator allocator, long startPositionUs) { - EventDispatcher eventDispatcher = createEventDispatcher(id); + MediaSourceEventListener.EventDispatcher mediaSourceEventDispatcher = createEventDispatcher(id); + DrmSessionEventListener.EventDispatcher drmEventDispatcher = createDrmEventDispatcher(id); SsMediaPeriod period = new SsMediaPeriod( manifest, @@ -584,8 +402,9 @@ public MediaPeriod createPeriod(MediaPeriodId id, Allocator allocator, long star mediaTransferListener, compositeSequenceableLoaderFactory, drmSessionManager, + drmEventDispatcher, loadErrorHandlingPolicy, - eventDispatcher, + mediaSourceEventDispatcher, manifestLoaderErrorThrower, allocator); mediaPeriods.add(period); @@ -593,9 +412,9 @@ public MediaPeriod createPeriod(MediaPeriodId id, Allocator allocator, long star } @Override - public void releasePeriod(MediaPeriod period) { - ((SsMediaPeriod) period).release(); - mediaPeriods.remove(period); + public void releasePeriod(MediaPeriod mediaPeriod) { + ((SsMediaPeriod) mediaPeriod).release(); + mediaPeriods.remove(mediaPeriod); } @Override @@ -617,16 +436,19 @@ protected void releaseSourceInternal() { // Loader.Callback implementation @Override - public void onLoadCompleted(ParsingLoadable loadable, long elapsedRealtimeMs, - long loadDurationMs) { - manifestEventDispatcher.loadCompleted( - loadable.dataSpec, - loadable.getUri(), - loadable.getResponseHeaders(), - loadable.type, - elapsedRealtimeMs, - loadDurationMs, - loadable.bytesLoaded()); + public void onLoadCompleted( + ParsingLoadable loadable, long elapsedRealtimeMs, long loadDurationMs) { + LoadEventInfo loadEventInfo = + new LoadEventInfo( + loadable.loadTaskId, + loadable.dataSpec, + loadable.getUri(), + loadable.getResponseHeaders(), + elapsedRealtimeMs, + loadDurationMs, + loadable.bytesLoaded()); + loadErrorHandlingPolicy.onLoadTaskConcluded(loadable.loadTaskId); + manifestEventDispatcher.loadCompleted(loadEventInfo, loadable.type); manifest = loadable.getResult(); manifestLoadStartTimestamp = elapsedRealtimeMs - loadDurationMs; processManifest(); @@ -634,16 +456,22 @@ public void onLoadCompleted(ParsingLoadable loadable, long elapsedRe } @Override - public void onLoadCanceled(ParsingLoadable loadable, long elapsedRealtimeMs, - long loadDurationMs, boolean released) { - manifestEventDispatcher.loadCanceled( - loadable.dataSpec, - loadable.getUri(), - loadable.getResponseHeaders(), - loadable.type, - elapsedRealtimeMs, - loadDurationMs, - loadable.bytesLoaded()); + public void onLoadCanceled( + ParsingLoadable loadable, + long elapsedRealtimeMs, + long loadDurationMs, + boolean released) { + LoadEventInfo loadEventInfo = + new LoadEventInfo( + loadable.loadTaskId, + loadable.dataSpec, + loadable.getUri(), + loadable.getResponseHeaders(), + elapsedRealtimeMs, + loadDurationMs, + loadable.bytesLoaded()); + loadErrorHandlingPolicy.onLoadTaskConcluded(loadable.loadTaskId); + manifestEventDispatcher.loadCanceled(loadEventInfo, loadable.type); } @Override @@ -653,23 +481,28 @@ public LoadErrorAction onLoadError( long loadDurationMs, IOException error, int errorCount) { + LoadEventInfo loadEventInfo = + new LoadEventInfo( + loadable.loadTaskId, + loadable.dataSpec, + loadable.getUri(), + loadable.getResponseHeaders(), + elapsedRealtimeMs, + loadDurationMs, + loadable.bytesLoaded()); + MediaLoadData mediaLoadData = new MediaLoadData(loadable.type); long retryDelayMs = loadErrorHandlingPolicy.getRetryDelayMsFor( - C.DATA_TYPE_MANIFEST, loadDurationMs, error, errorCount); + new LoadErrorInfo(loadEventInfo, mediaLoadData, error, errorCount)); LoadErrorAction loadErrorAction = retryDelayMs == C.TIME_UNSET ? Loader.DONT_RETRY_FATAL : Loader.createRetryAction(/* resetErrorCount= */ false, retryDelayMs); - manifestEventDispatcher.loadError( - loadable.dataSpec, - loadable.getUri(), - loadable.getResponseHeaders(), - loadable.type, - elapsedRealtimeMs, - loadDurationMs, - loadable.bytesLoaded(), - error, - !loadErrorAction.isRetry()); + boolean wasCanceled = !loadErrorAction.isRetry(); + manifestEventDispatcher.loadError(loadEventInfo, loadable.type, error, wasCanceled); + if (wasCanceled) { + loadErrorHandlingPolicy.onLoadTaskConcluded(loadable.loadTaskId); + } return loadErrorAction; } @@ -684,9 +517,12 @@ private void processManifest() { long endTimeUs = Long.MIN_VALUE; for (StreamElement element : manifest.streamElements) { if (element.chunkCount > 0) { - startTimeUs = Math.min(startTimeUs, element.getStartTimeUs(0)); - endTimeUs = Math.max(endTimeUs, element.getStartTimeUs(element.chunkCount - 1) - + element.getChunkDurationUs(element.chunkCount - 1)); + startTimeUs = min(startTimeUs, element.getStartTimeUs(0)); + endTimeUs = + max( + endTimeUs, + element.getStartTimeUs(element.chunkCount - 1) + + element.getChunkDurationUs(element.chunkCount - 1)); } } @@ -701,20 +537,20 @@ private void processManifest() { /* windowDefaultStartPositionUs= */ 0, /* isSeekable= */ true, /* isDynamic= */ manifest.isLive, - /* isLive= */ manifest.isLive, + /* useLiveConfiguration= */ manifest.isLive, manifest, - tag); + mediaItem); } else if (manifest.isLive) { if (manifest.dvrWindowLengthUs != C.TIME_UNSET && manifest.dvrWindowLengthUs > 0) { - startTimeUs = Math.max(startTimeUs, endTimeUs - manifest.dvrWindowLengthUs); + startTimeUs = max(startTimeUs, endTimeUs - manifest.dvrWindowLengthUs); } long durationUs = endTimeUs - startTimeUs; - long defaultStartPositionUs = durationUs - C.msToUs(livePresentationDelayMs); + long defaultStartPositionUs = durationUs - Util.msToUs(livePresentationDelayMs); if (defaultStartPositionUs < MIN_LIVE_DEFAULT_START_POSITION_US) { // The default start position is too close to the start of the live window. Set it to the // minimum default start position provided the window is at least twice as big. Else set // it to the middle of the window. - defaultStartPositionUs = Math.min(MIN_LIVE_DEFAULT_START_POSITION_US, durationUs / 2); + defaultStartPositionUs = min(MIN_LIVE_DEFAULT_START_POSITION_US, durationUs / 2); } timeline = new SinglePeriodTimeline( @@ -724,12 +560,12 @@ private void processManifest() { defaultStartPositionUs, /* isSeekable= */ true, /* isDynamic= */ true, - /* isLive= */ true, + /* useLiveConfiguration= */ true, manifest, - tag); + mediaItem); } else { - long durationUs = manifest.durationUs != C.TIME_UNSET ? manifest.durationUs - : endTimeUs - startTimeUs; + long durationUs = + manifest.durationUs != C.TIME_UNSET ? manifest.durationUs : endTimeUs - startTimeUs; timeline = new SinglePeriodTimeline( startTimeUs + durationUs, @@ -738,9 +574,9 @@ private void processManifest() { /* windowDefaultStartPositionUs= */ 0, /* isSeekable= */ true, /* isDynamic= */ false, - /* isLive= */ false, + /* useLiveConfiguration= */ false, manifest, - tag); + mediaItem); } refreshSourceInfo(timeline); } @@ -750,7 +586,7 @@ private void scheduleManifestRefresh() { return; } long nextLoadTimestamp = manifestLoadStartTimestamp + MINIMUM_MANIFEST_REFRESH_PERIOD_MS; - long delayUntilNextLoad = Math.max(0, nextLoadTimestamp - SystemClock.elapsedRealtime()); + long delayUntilNextLoad = max(0, nextLoadTimestamp - SystemClock.elapsedRealtime()); manifestRefreshHandler.postDelayed(this::startLoadingManifest, delayUntilNextLoad); } @@ -758,12 +594,14 @@ private void startLoadingManifest() { if (manifestLoader.hasFatalError()) { return; } - ParsingLoadable loadable = new ParsingLoadable<>(manifestDataSource, - manifestUri, C.DATA_TYPE_MANIFEST, manifestParser); + ParsingLoadable loadable = + new ParsingLoadable<>( + manifestDataSource, manifestUri, C.DATA_TYPE_MANIFEST, manifestParser); long elapsedRealtimeMs = manifestLoader.startLoading( loadable, this, loadErrorHandlingPolicy.getMinimumLoadableRetryCount(loadable.type)); - manifestEventDispatcher.loadStarted(loadable.dataSpec, loadable.type, elapsedRealtimeMs); + manifestEventDispatcher.loadStarted( + new LoadEventInfo(loadable.loadTaskId, loadable.dataSpec, elapsedRealtimeMs), + loadable.type); } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/smoothstreaming/manifest/SsManifest.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/smoothstreaming/manifest/SsManifest.java index b91bfc8f67..d7acee8b97 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/smoothstreaming/manifest/SsManifest.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/smoothstreaming/manifest/SsManifest.java @@ -52,9 +52,7 @@ public ProtectionElement(UUID uuid, byte[] data, TrackEncryptionBox[] trackEncry } } - /** - * Represents a StreamIndex element. - */ + /** Represents a StreamIndex element. */ public static class StreamElement { private static final String URL_PLACEHOLDER_START_TIME_1 = "{start time}"; @@ -62,7 +60,7 @@ public static class StreamElement { private static final String URL_PLACEHOLDER_BITRATE_1 = "{bitrate}"; private static final String URL_PLACEHOLDER_BITRATE_2 = "{Bitrate}"; - public final int type; + public final @C.TrackType int type; public final String subType; public final long timescale; public final String name; @@ -84,7 +82,7 @@ public static class StreamElement { public StreamElement( String baseUri, String chunkTemplate, - int type, + @C.TrackType int type, String subType, long timescale, String name, @@ -117,7 +115,7 @@ public StreamElement( private StreamElement( String baseUri, String chunkTemplate, - int type, + @C.TrackType int type, String subType, long timescale, String name, @@ -156,9 +154,22 @@ private StreamElement( * @throws IndexOutOfBoundsException If a key has an invalid index. */ public StreamElement copy(Format[] formats) { - return new StreamElement(baseUri, chunkTemplate, type, subType, timescale, name, maxWidth, - maxHeight, displayWidth, displayHeight, language, formats, chunkStartTimes, - chunkStartTimesUs, lastChunkDurationUs); + return new StreamElement( + baseUri, + chunkTemplate, + type, + subType, + timescale, + name, + maxWidth, + maxHeight, + displayWidth, + displayHeight, + language, + formats, + chunkStartTimes, + chunkStartTimesUs, + lastChunkDurationUs); } /** @@ -188,7 +199,8 @@ public long getStartTimeUs(int chunkIndex) { * @return The duration of the chunk, in microseconds. */ public long getChunkDurationUs(int chunkIndex) { - return (chunkIndex == chunkCount - 1) ? lastChunkDurationUs + return (chunkIndex == chunkCount - 1) + ? lastChunkDurationUs : chunkStartTimesUs[chunkIndex + 1] - chunkStartTimesUs[chunkIndex]; } @@ -205,11 +217,12 @@ public Uri buildRequestUri(int track, int chunkIndex) { Assertions.checkState(chunkIndex < chunkStartTimes.size()); String bitrateString = Integer.toString(formats[track].bitrate); String startTimeString = chunkStartTimes.get(chunkIndex).toString(); - String chunkUrl = chunkTemplate - .replace(URL_PLACEHOLDER_BITRATE_1, bitrateString) - .replace(URL_PLACEHOLDER_BITRATE_2, bitrateString) - .replace(URL_PLACEHOLDER_START_TIME_1, startTimeString) - .replace(URL_PLACEHOLDER_START_TIME_2, startTimeString); + String chunkUrl = + chunkTemplate + .replace(URL_PLACEHOLDER_BITRATE_1, bitrateString) + .replace(URL_PLACEHOLDER_BITRATE_2, bitrateString) + .replace(URL_PLACEHOLDER_START_TIME_1, startTimeString) + .replace(URL_PLACEHOLDER_START_TIME_2, startTimeString); return UriUtil.resolveToUri(baseUri, chunkUrl); } } @@ -326,7 +339,7 @@ public final SsManifest copy(List streamKeys) { copiedFormats.clear(); } currentStreamElement = streamElement; - copiedFormats.add(streamElement.formats[key.trackIndex]); + copiedFormats.add(streamElement.formats[key.streamIndex]); } if (currentStreamElement != null) { // Add the last stream element. diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/smoothstreaming/manifest/SsManifestParser.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/smoothstreaming/manifest/SsManifestParser.java index d395e95fd9..0b07f3a248 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/smoothstreaming/manifest/SsManifestParser.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/smoothstreaming/manifest/SsManifestParser.java @@ -23,6 +23,7 @@ import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.ParserException; +import com.google.android.exoplayer2.audio.AacUtil; import com.google.android.exoplayer2.drm.DrmInitData; import com.google.android.exoplayer2.drm.DrmInitData.SchemeData; import com.google.android.exoplayer2.extractor.mp4.PsshAtomUtil; @@ -49,8 +50,8 @@ /** * Parses SmoothStreaming client manifests. * - * @see - * IIS Smooth Streaming Client Manifest Format + * @see IIS Smooth + * Streaming Client Manifest Format */ public class SsManifestParser implements ParsingLoadable.Parser { @@ -73,24 +74,23 @@ public SsManifest parse(Uri uri, InputStream inputStream) throws IOException { new SmoothStreamingMediaParser(null, uri.toString()); return (SsManifest) smoothStreamingMediaParser.parse(xmlParser); } catch (XmlPullParserException e) { - throw new ParserException(e); + throw ParserException.createForMalformedManifest(/* message= */ null, /* cause= */ e); } } - /** - * Thrown if a required field is missing. - */ + /** Thrown if a required field is missing. */ public static class MissingFieldException extends ParserException { public MissingFieldException(String fieldName) { - super("Missing required field: " + fieldName); + super( + "Missing required field: " + fieldName, + /* cause= */ null, + /* contentIsMalformed= */ true, + C.DATA_TYPE_MANIFEST); } - } - /** - * A base class for parsers that parse components of a smooth streaming manifest. - */ + /** A base class for parsers that parse components of a smooth streaming manifest. */ private abstract static class ElementParser { private final String baseUri; @@ -192,7 +192,7 @@ protected final void putNormalizedAttribute(String key, @Nullable Object value) * provided name, the parent element parser will be queried, and so on up the chain. * * @param key The name of the attribute. - * @return The stashed value, or null if the attribute was not be found. + * @return The stashed value, or null if the attribute was not found. */ @Nullable protected final Object getNormalizedAttribute(String key) { @@ -263,7 +263,7 @@ protected final int parseInt(XmlPullParser parser, String key, int defaultValue) try { return Integer.parseInt(value); } catch (NumberFormatException e) { - throw new ParserException(e); + throw ParserException.createForMalformedManifest(/* message= */ null, /* cause= */ e); } } else { return defaultValue; @@ -276,7 +276,7 @@ protected final int parseRequiredInt(XmlPullParser parser, String key) throws Pa try { return Integer.parseInt(value); } catch (NumberFormatException e) { - throw new ParserException(e); + throw ParserException.createForMalformedManifest(/* message= */ null, /* cause= */ e); } } else { throw new MissingFieldException(key); @@ -290,7 +290,7 @@ protected final long parseLong(XmlPullParser parser, String key, long defaultVal try { return Long.parseLong(value); } catch (NumberFormatException e) { - throw new ParserException(e); + throw ParserException.createForMalformedManifest(/* message= */ null, /* cause= */ e); } } else { return defaultValue; @@ -304,7 +304,7 @@ protected final long parseRequiredLong(XmlPullParser parser, String key) try { return Long.parseLong(value); } catch (NumberFormatException e) { - throw new ParserException(e); + throw ParserException.createForMalformedManifest(/* message= */ null, /* cause= */ e); } } else { throw new MissingFieldException(key); @@ -319,7 +319,6 @@ protected final boolean parseBoolean(XmlPullParser parser, String key, boolean d return defaultValue; } } - } private static class SmoothStreamingMediaParser extends ElementParser { @@ -379,22 +378,31 @@ public Object build() { StreamElement[] streamElementArray = new StreamElement[streamElements.size()]; streamElements.toArray(streamElementArray); if (protectionElement != null) { - DrmInitData drmInitData = new DrmInitData(new SchemeData(protectionElement.uuid, - MimeTypes.VIDEO_MP4, protectionElement.data)); + DrmInitData drmInitData = + new DrmInitData( + new SchemeData( + protectionElement.uuid, MimeTypes.VIDEO_MP4, protectionElement.data)); for (StreamElement streamElement : streamElementArray) { int type = streamElement.type; if (type == C.TRACK_TYPE_VIDEO || type == C.TRACK_TYPE_AUDIO) { Format[] formats = streamElement.formats; for (int i = 0; i < formats.length; i++) { - formats[i] = formats[i].copyWithDrmInitData(drmInitData); + formats[i] = formats[i].buildUpon().setDrmInitData(drmInitData).build(); } } } } - return new SsManifest(majorVersion, minorVersion, timescale, duration, dvrWindowLength, - lookAheadCount, isLive, protectionElement, streamElementArray); + return new SsManifest( + majorVersion, + minorVersion, + timescale, + duration, + dvrWindowLength, + lookAheadCount, + isLive, + protectionElement, + streamElementArray); } - } private static class ProtectionParser extends ElementParser { @@ -564,7 +572,8 @@ private void parseStreamFragmentStartTag(XmlPullParser parser) throws ParserExce startTime = startTimes.get(chunkIndex - 1) + lastChunkDuration; } else { // We don't have the start time, and we're unable to infer it. - throw new ParserException("Unable to infer start time"); + throw ParserException.createForMalformedManifest( + "Unable to infer start time", /* cause= */ null); } } chunkIndex++; @@ -573,7 +582,8 @@ private void parseStreamFragmentStartTag(XmlPullParser parser) throws ParserExce // Handle repeated chunks. long repeatCount = parseLong(parser, KEY_FRAGMENT_REPEAT_COUNT, 1L); if (repeatCount > 1 && lastChunkDuration == C.TIME_UNSET) { - throw new ParserException("Repeated chunk with unspecified duration"); + throw ParserException.createForMalformedManifest( + "Repeated chunk with unspecified duration", /* cause= */ null); } for (int i = 1; i < repeatCount; i++) { chunkIndex++; @@ -591,6 +601,7 @@ private void parseStreamElementStartTag(XmlPullParser parser) throws ParserExcep } putNormalizedAttribute(KEY_SUB_TYPE, subType); name = parser.getAttributeValue(null, KEY_NAME); + putNormalizedAttribute(KEY_NAME, name); url = parseRequiredString(parser, KEY_URL); maxWidth = parseInt(parser, KEY_MAX_WIDTH, Format.NO_VALUE); maxHeight = parseInt(parser, KEY_MAX_HEIGHT, Format.NO_VALUE); @@ -615,7 +626,8 @@ private int parseType(XmlPullParser parser) throws ParserException { } else if (KEY_TYPE_TEXT.equalsIgnoreCase(value)) { return C.TRACK_TYPE_TEXT; } else { - throw new ParserException("Invalid key value[" + value + "]"); + throw ParserException.createForMalformedManifest( + "Invalid key value[" + value + "]", /* cause= */ null); } } throw new MissingFieldException(KEY_TYPE); @@ -632,10 +644,22 @@ public void addChild(Object child) { public Object build() { Format[] formatArray = new Format[formats.size()]; formats.toArray(formatArray); - return new StreamElement(baseUri, url, type, subType, timescale, name, maxWidth, maxHeight, - displayWidth, displayHeight, language, formatArray, startTimes, lastChunkDuration); + return new StreamElement( + baseUri, + url, + type, + subType, + timescale, + name, + maxWidth, + maxHeight, + displayWidth, + displayHeight, + language, + formatArray, + startTimes, + lastChunkDuration); } - } private static class QualityLevelParser extends ElementParser { @@ -663,96 +687,65 @@ public QualityLevelParser(ElementParser parent, String baseUri) { @Override public void parseStartTag(XmlPullParser parser) throws ParserException { - int type = (Integer) getNormalizedAttribute(KEY_TYPE); - String id = parser.getAttributeValue(null, KEY_INDEX); - String name = (String) getNormalizedAttribute(KEY_NAME); - int bitrate = parseRequiredInt(parser, KEY_BITRATE); - String sampleMimeType = fourCCToMimeType(parseRequiredString(parser, KEY_FOUR_CC)); + Format.Builder formatBuilder = new Format.Builder(); + @Nullable String sampleMimeType = fourCCToMimeType(parseRequiredString(parser, KEY_FOUR_CC)); + int type = (Integer) getNormalizedAttribute(KEY_TYPE); if (type == C.TRACK_TYPE_VIDEO) { - int width = parseRequiredInt(parser, KEY_MAX_WIDTH); - int height = parseRequiredInt(parser, KEY_MAX_HEIGHT); - List codecSpecificData = buildCodecSpecificData( - parser.getAttributeValue(null, KEY_CODEC_PRIVATE_DATA)); - format = - Format.createVideoContainerFormat( - id, - name, - MimeTypes.VIDEO_MP4, - sampleMimeType, - /* codecs= */ null, - /* metadata= */ null, - bitrate, - width, - height, - /* frameRate= */ Format.NO_VALUE, - codecSpecificData, - /* selectionFlags= */ 0, - /* roleFlags= */ 0); + List codecSpecificData = + buildCodecSpecificData(parser.getAttributeValue(null, KEY_CODEC_PRIVATE_DATA)); + formatBuilder + .setContainerMimeType(MimeTypes.VIDEO_MP4) + .setWidth(parseRequiredInt(parser, KEY_MAX_WIDTH)) + .setHeight(parseRequiredInt(parser, KEY_MAX_HEIGHT)) + .setInitializationData(codecSpecificData); } else if (type == C.TRACK_TYPE_AUDIO) { - sampleMimeType = sampleMimeType == null ? MimeTypes.AUDIO_AAC : sampleMimeType; - int channels = parseRequiredInt(parser, KEY_CHANNELS); - int samplingRate = parseRequiredInt(parser, KEY_SAMPLING_RATE); - List codecSpecificData = buildCodecSpecificData( - parser.getAttributeValue(null, KEY_CODEC_PRIVATE_DATA)); + if (sampleMimeType == null) { + // If we don't know the MIME type, assume AAC. + sampleMimeType = MimeTypes.AUDIO_AAC; + } + int channelCount = parseRequiredInt(parser, KEY_CHANNELS); + int sampleRate = parseRequiredInt(parser, KEY_SAMPLING_RATE); + List codecSpecificData = + buildCodecSpecificData(parser.getAttributeValue(null, KEY_CODEC_PRIVATE_DATA)); if (codecSpecificData.isEmpty() && MimeTypes.AUDIO_AAC.equals(sampleMimeType)) { - codecSpecificData = Collections.singletonList( - CodecSpecificDataUtil.buildAacLcAudioSpecificConfig(samplingRate, channels)); + codecSpecificData = + Collections.singletonList( + AacUtil.buildAacLcAudioSpecificConfig(sampleRate, channelCount)); } - String language = (String) getNormalizedAttribute(KEY_LANGUAGE); - format = - Format.createAudioContainerFormat( - id, - name, - MimeTypes.AUDIO_MP4, - sampleMimeType, - /* codecs= */ null, - /* metadata= */ null, - bitrate, - channels, - samplingRate, - codecSpecificData, - /* selectionFlags= */ 0, - /* roleFlags= */ 0, - language); + formatBuilder + .setContainerMimeType(MimeTypes.AUDIO_MP4) + .setChannelCount(channelCount) + .setSampleRate(sampleRate) + .setInitializationData(codecSpecificData); } else if (type == C.TRACK_TYPE_TEXT) { - String subType = (String) getNormalizedAttribute(KEY_SUB_TYPE); @C.RoleFlags int roleFlags = 0; - switch (subType) { - case "CAPT": - roleFlags = C.ROLE_FLAG_CAPTION; - break; - case "DESC": - roleFlags = C.ROLE_FLAG_DESCRIBES_MUSIC_AND_SOUND; - break; - default: - break; + @Nullable String subType = (String) getNormalizedAttribute(KEY_SUB_TYPE); + if (subType != null) { + switch (subType) { + case "CAPT": + roleFlags = C.ROLE_FLAG_CAPTION; + break; + case "DESC": + roleFlags = C.ROLE_FLAG_DESCRIBES_MUSIC_AND_SOUND; + break; + default: + break; + } } - String language = (String) getNormalizedAttribute(KEY_LANGUAGE); - format = - Format.createTextContainerFormat( - id, - name, - MimeTypes.APPLICATION_MP4, - sampleMimeType, - /* codecs= */ null, - bitrate, - /* selectionFlags= */ 0, - roleFlags, - language); + formatBuilder.setContainerMimeType(MimeTypes.APPLICATION_MP4).setRoleFlags(roleFlags); } else { - format = - Format.createContainerFormat( - id, - name, - MimeTypes.APPLICATION_MP4, - sampleMimeType, - /* codecs= */ null, - bitrate, - /* selectionFlags= */ 0, - /* roleFlags= */ 0, - /* language= */ null); + formatBuilder.setContainerMimeType(MimeTypes.APPLICATION_MP4); } + + format = + formatBuilder + .setId(parser.getAttributeValue(null, KEY_INDEX)) + .setLabel((String) getNormalizedAttribute(KEY_NAME)) + .setSampleMimeType(sampleMimeType) + .setAverageBitrate(parseRequiredInt(parser, KEY_BITRATE)) + .setLanguage((String) getNormalizedAttribute(KEY_LANGUAGE)) + .build(); } @Override @@ -764,7 +757,7 @@ private static List buildCodecSpecificData(String codecSpecificDataStrin ArrayList csd = new ArrayList<>(); if (!TextUtils.isEmpty(codecSpecificDataString)) { byte[] codecPrivateData = Util.getBytesFromHexString(codecSpecificDataString); - byte[][] split = CodecSpecificDataUtil.splitNalUnits(codecPrivateData); + @Nullable byte[][] split = CodecSpecificDataUtil.splitNalUnits(codecPrivateData); if (split == null) { csd.add(codecPrivateData); } else { @@ -774,12 +767,17 @@ private static List buildCodecSpecificData(String codecSpecificDataStrin return csd; } + @Nullable private static String fourCCToMimeType(String fourCC) { - if (fourCC.equalsIgnoreCase("H264") || fourCC.equalsIgnoreCase("X264") - || fourCC.equalsIgnoreCase("AVC1") || fourCC.equalsIgnoreCase("DAVC")) { + if (fourCC.equalsIgnoreCase("H264") + || fourCC.equalsIgnoreCase("X264") + || fourCC.equalsIgnoreCase("AVC1") + || fourCC.equalsIgnoreCase("DAVC")) { return MimeTypes.VIDEO_H264; - } else if (fourCC.equalsIgnoreCase("AAC") || fourCC.equalsIgnoreCase("AACL") - || fourCC.equalsIgnoreCase("AACH") || fourCC.equalsIgnoreCase("AACP")) { + } else if (fourCC.equalsIgnoreCase("AAC") + || fourCC.equalsIgnoreCase("AACL") + || fourCC.equalsIgnoreCase("AACH") + || fourCC.equalsIgnoreCase("AACP")) { return MimeTypes.AUDIO_AAC; } else if (fourCC.equalsIgnoreCase("TTML") || fourCC.equalsIgnoreCase("DFXP")) { return MimeTypes.APPLICATION_TTML; @@ -798,7 +796,5 @@ private static String fourCCToMimeType(String fourCC) { } return null; } - } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/smoothstreaming/manifest/SsUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/smoothstreaming/manifest/SsUtil.java deleted file mode 100644 index b54b2abc74..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/smoothstreaming/manifest/SsUtil.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright (C) 2018 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.source.smoothstreaming.manifest; - -import android.net.Uri; -import com.google.android.exoplayer2.util.Util; - -/** SmoothStreaming related utility methods. */ -public final class SsUtil { - - /** Returns a fixed SmoothStreaming client manifest {@link Uri}. */ - public static Uri fixManifestUri(Uri manifestUri) { - String lastPathSegment = manifestUri.getLastPathSegment(); - if (lastPathSegment != null - && Util.toLowerInvariant(lastPathSegment).matches("manifest(\\(.+\\))?")) { - return manifestUri; - } - return Uri.withAppendedPath(manifestUri, "Manifest"); - } - - private SsUtil() {} -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/smoothstreaming/offline/SsDownloader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/smoothstreaming/offline/SsDownloader.java index 1331fe4617..695cbad321 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/smoothstreaming/offline/SsDownloader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/source/smoothstreaming/offline/SsDownloader.java @@ -15,21 +15,21 @@ */ package com.google.android.exoplayer2.source.smoothstreaming.offline; -import android.net.Uri; -import com.google.android.exoplayer2.C; -import com.google.android.exoplayer2.offline.DownloaderConstructorHelper; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; + +import com.google.android.exoplayer2.MediaItem; import com.google.android.exoplayer2.offline.SegmentDownloader; -import com.google.android.exoplayer2.offline.StreamKey; import com.google.android.exoplayer2.source.smoothstreaming.manifest.SsManifest; import com.google.android.exoplayer2.source.smoothstreaming.manifest.SsManifest.StreamElement; import com.google.android.exoplayer2.source.smoothstreaming.manifest.SsManifestParser; -import com.google.android.exoplayer2.source.smoothstreaming.manifest.SsUtil; import com.google.android.exoplayer2.upstream.DataSource; import com.google.android.exoplayer2.upstream.DataSpec; -import com.google.android.exoplayer2.upstream.ParsingLoadable; -import java.io.IOException; +import com.google.android.exoplayer2.upstream.ParsingLoadable.Parser; +import com.google.android.exoplayer2.upstream.cache.CacheDataSource; +import com.google.android.exoplayer2.util.Util; import java.util.ArrayList; import java.util.List; +import java.util.concurrent.Executor; /** * A downloader for SmoothStreaming streams. @@ -38,43 +38,84 @@ * *

        {@code
          * SimpleCache cache = new SimpleCache(downloadFolder, new NoOpCacheEvictor(), databaseProvider);
        - * DefaultHttpDataSourceFactory factory = new DefaultHttpDataSourceFactory("ExoPlayer", null);
        - * DownloaderConstructorHelper constructorHelper =
        - *     new DownloaderConstructorHelper(cache, factory);
        + * CacheDataSource.Factory cacheDataSourceFactory =
        + *     new CacheDataSource.Factory()
        + *         .setCache(cache)
        + *         .setUpstreamDataSourceFactory(new DefaultHttpDataSource.Factory());
          * // Create a downloader for the first track of the first stream element.
          * SsDownloader ssDownloader =
          *     new SsDownloader(
        - *         manifestUrl,
        - *         Collections.singletonList(new StreamKey(0, 0)),
        - *         constructorHelper);
        + *         new MediaItem.Builder()
        + *             .setUri(manifestUri)
        + *             .setStreamKeys(Collections.singletonList(new StreamKey(0, 0)))
        + *             .build(),
        + *         cacheDataSourceFactory);
          * // Perform the download.
          * ssDownloader.download(progressListener);
        - * // Access downloaded data using CacheDataSource
        - * CacheDataSource cacheDataSource =
        - *     new CacheDataSource(cache, factory.createDataSource(), CacheDataSource.FLAG_BLOCK_ON_CACHE);
        + * // Use the downloaded data for playback.
        + * SsMediaSource mediaSource =
        + *     new SsMediaSource.Factory(cacheDataSourceFactory).createMediaSource(mediaItem);
          * }
        */ public final class SsDownloader extends SegmentDownloader { /** - * @param manifestUri The {@link Uri} of the manifest to be downloaded. - * @param streamKeys Keys defining which streams in the manifest should be selected for download. - * If empty, all streams are downloaded. - * @param constructorHelper A {@link DownloaderConstructorHelper} instance. + * Creates an instance. + * + * @param mediaItem The {@link MediaItem} to be downloaded. + * @param cacheDataSourceFactory A {@link CacheDataSource.Factory} for the cache into which the + * download will be written. + */ + public SsDownloader(MediaItem mediaItem, CacheDataSource.Factory cacheDataSourceFactory) { + this(mediaItem, cacheDataSourceFactory, Runnable::run); + } + + /** + * Creates an instance. + * + * @param mediaItem The {@link MediaItem} to be downloaded. + * @param cacheDataSourceFactory A {@link CacheDataSource.Factory} for the cache into which the + * download will be written. + * @param executor An {@link Executor} used to make requests for the media being downloaded. + * Providing an {@link Executor} that uses multiple threads will speed up the download by + * allowing parts of it to be executed in parallel. */ public SsDownloader( - Uri manifestUri, List streamKeys, DownloaderConstructorHelper constructorHelper) { - super(SsUtil.fixManifestUri(manifestUri), streamKeys, constructorHelper); + MediaItem mediaItem, CacheDataSource.Factory cacheDataSourceFactory, Executor executor) { + this( + mediaItem + .buildUpon() + .setUri( + Util.fixSmoothStreamingIsmManifestUri( + checkNotNull(mediaItem.localConfiguration).uri)) + .build(), + new SsManifestParser(), + cacheDataSourceFactory, + executor); } - @Override - protected SsManifest getManifest(DataSource dataSource, DataSpec dataSpec) throws IOException { - return ParsingLoadable.load(dataSource, new SsManifestParser(), dataSpec, C.DATA_TYPE_MANIFEST); + /** + * Creates a new instance. + * + * @param mediaItem The {@link MediaItem} to be downloaded. + * @param manifestParser A parser for SmoothStreaming manifests. + * @param cacheDataSourceFactory A {@link CacheDataSource.Factory} for the cache into which the + * download will be written. + * @param executor An {@link Executor} used to make requests for the media being downloaded. + * Providing an {@link Executor} that uses multiple threads will speed up the download by + * allowing parts of it to be executed in parallel. + */ + public SsDownloader( + MediaItem mediaItem, + Parser manifestParser, + CacheDataSource.Factory cacheDataSourceFactory, + Executor executor) { + super(mediaItem, manifestParser, cacheDataSourceFactory, executor); } @Override protected List getSegments( - DataSource dataSource, SsManifest manifest, boolean allowIncompleteList) { + DataSource dataSource, SsManifest manifest, boolean removing) { ArrayList segments = new ArrayList<>(); for (StreamElement streamElement : manifest.streamElements) { for (int i = 0; i < streamElement.formats.length; i++) { @@ -88,5 +129,4 @@ protected List getSegments( } return segments; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/CaptionStyleCompat.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/CaptionStyleCompat.java deleted file mode 100644 index 51aec3638f..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/CaptionStyleCompat.java +++ /dev/null @@ -1,184 +0,0 @@ -/* - * Copyright (C) 2016 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.text; - -import android.annotation.TargetApi; -import android.graphics.Color; -import android.graphics.Typeface; -import android.view.accessibility.CaptioningManager; -import android.view.accessibility.CaptioningManager.CaptionStyle; -import androidx.annotation.IntDef; -import androidx.annotation.Nullable; -import com.google.android.exoplayer2.util.Util; -import java.lang.annotation.Documented; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; - -/** - * A compatibility wrapper for {@link CaptionStyle}. - */ -public final class CaptionStyleCompat { - - /** - * The type of edge, which may be none. One of {@link #EDGE_TYPE_NONE}, {@link - * #EDGE_TYPE_OUTLINE}, {@link #EDGE_TYPE_DROP_SHADOW}, {@link #EDGE_TYPE_RAISED} or {@link - * #EDGE_TYPE_DEPRESSED}. - */ - @Documented - @Retention(RetentionPolicy.SOURCE) - @IntDef({ - EDGE_TYPE_NONE, - EDGE_TYPE_OUTLINE, - EDGE_TYPE_DROP_SHADOW, - EDGE_TYPE_RAISED, - EDGE_TYPE_DEPRESSED - }) - public @interface EdgeType {} - /** - * Edge type value specifying no character edges. - */ - public static final int EDGE_TYPE_NONE = 0; - /** - * Edge type value specifying uniformly outlined character edges. - */ - public static final int EDGE_TYPE_OUTLINE = 1; - /** - * Edge type value specifying drop-shadowed character edges. - */ - public static final int EDGE_TYPE_DROP_SHADOW = 2; - /** - * Edge type value specifying raised bevel character edges. - */ - public static final int EDGE_TYPE_RAISED = 3; - /** - * Edge type value specifying depressed bevel character edges. - */ - public static final int EDGE_TYPE_DEPRESSED = 4; - - /** - * Use color setting specified by the track and fallback to default caption style. - */ - public static final int USE_TRACK_COLOR_SETTINGS = 1; - - /** Default caption style. */ - public static final CaptionStyleCompat DEFAULT = - new CaptionStyleCompat( - Color.WHITE, - Color.BLACK, - Color.TRANSPARENT, - EDGE_TYPE_NONE, - Color.WHITE, - /* typeface= */ null); - - /** - * The preferred foreground color. - */ - public final int foregroundColor; - - /** - * The preferred background color. - */ - public final int backgroundColor; - - /** - * The preferred window color. - */ - public final int windowColor; - - /** - * The preferred edge type. One of: - *
          - *
        • {@link #EDGE_TYPE_NONE} - *
        • {@link #EDGE_TYPE_OUTLINE} - *
        • {@link #EDGE_TYPE_DROP_SHADOW} - *
        • {@link #EDGE_TYPE_RAISED} - *
        • {@link #EDGE_TYPE_DEPRESSED} - *
        - */ - @EdgeType public final int edgeType; - - /** - * The preferred edge color, if using an edge type other than {@link #EDGE_TYPE_NONE}. - */ - public final int edgeColor; - - /** The preferred typeface, or {@code null} if unspecified. */ - @Nullable public final Typeface typeface; - - /** - * Creates a {@link CaptionStyleCompat} equivalent to a provided {@link CaptionStyle}. - * - * @param captionStyle A {@link CaptionStyle}. - * @return The equivalent {@link CaptionStyleCompat}. - */ - @TargetApi(19) - public static CaptionStyleCompat createFromCaptionStyle( - CaptioningManager.CaptionStyle captionStyle) { - if (Util.SDK_INT >= 21) { - return createFromCaptionStyleV21(captionStyle); - } else { - // Note - Any caller must be on at least API level 19 or greater (because CaptionStyle did - // not exist in earlier API levels). - return createFromCaptionStyleV19(captionStyle); - } - } - - /** - * @param foregroundColor See {@link #foregroundColor}. - * @param backgroundColor See {@link #backgroundColor}. - * @param windowColor See {@link #windowColor}. - * @param edgeType See {@link #edgeType}. - * @param edgeColor See {@link #edgeColor}. - * @param typeface See {@link #typeface}. - */ - public CaptionStyleCompat( - int foregroundColor, - int backgroundColor, - int windowColor, - @EdgeType int edgeType, - int edgeColor, - @Nullable Typeface typeface) { - this.foregroundColor = foregroundColor; - this.backgroundColor = backgroundColor; - this.windowColor = windowColor; - this.edgeType = edgeType; - this.edgeColor = edgeColor; - this.typeface = typeface; - } - - @TargetApi(19) - @SuppressWarnings("ResourceType") - private static CaptionStyleCompat createFromCaptionStyleV19( - CaptioningManager.CaptionStyle captionStyle) { - return new CaptionStyleCompat( - captionStyle.foregroundColor, captionStyle.backgroundColor, Color.TRANSPARENT, - captionStyle.edgeType, captionStyle.edgeColor, captionStyle.getTypeface()); - } - - @TargetApi(21) - @SuppressWarnings("ResourceType") - private static CaptionStyleCompat createFromCaptionStyleV21( - CaptioningManager.CaptionStyle captionStyle) { - return new CaptionStyleCompat( - captionStyle.hasForegroundColor() ? captionStyle.foregroundColor : DEFAULT.foregroundColor, - captionStyle.hasBackgroundColor() ? captionStyle.backgroundColor : DEFAULT.backgroundColor, - captionStyle.hasWindowColor() ? captionStyle.windowColor : DEFAULT.windowColor, - captionStyle.hasEdgeType() ? captionStyle.edgeType : DEFAULT.edgeType, - captionStyle.hasEdgeColor() ? captionStyle.edgeColor : DEFAULT.edgeColor, - captionStyle.getTypeface()); - } - -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/Cue.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/Cue.java index 946af76e53..d96fb263f6 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/Cue.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/Cue.java @@ -15,22 +15,42 @@ */ package com.google.android.exoplayer2.text; +import static java.lang.annotation.ElementType.FIELD; +import static java.lang.annotation.ElementType.LOCAL_VARIABLE; +import static java.lang.annotation.ElementType.METHOD; +import static java.lang.annotation.ElementType.PARAMETER; +import static java.lang.annotation.ElementType.TYPE_USE; + import android.graphics.Bitmap; import android.graphics.Color; +import android.os.Bundle; +import android.text.Layout; import android.text.Layout.Alignment; +import android.text.Spanned; +import android.text.SpannedString; +import android.text.TextUtils; +import androidx.annotation.ColorInt; import androidx.annotation.IntDef; import androidx.annotation.Nullable; +import com.google.android.exoplayer2.Bundleable; +import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.Util; +import com.google.common.base.Objects; +import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.checkerframework.dataflow.qual.Pure; -/** - * Contains information about a specific cue, including textual content and formatting data. - */ -public class Cue { +/** Contains information about a specific cue, including textual content and formatting data. */ +// This class shouldn't be sub-classed. If a subtitle format needs additional fields, either they +// should be generic enough to be added here, or the format-specific decoder should pass the +// information around in a sidecar object. +public final class Cue implements Bundleable { /** The empty cue. */ - public static final Cue EMPTY = new Cue(""); + public static final Cue EMPTY = new Cue.Builder().setText("").build(); /** An unset position, width or size. */ // Note: We deliberately don't use Float.MIN_VALUE because it's positive & very close to zero. @@ -40,14 +60,15 @@ public class Cue { * The type of anchor, which may be unset. One of {@link #TYPE_UNSET}, {@link #ANCHOR_TYPE_START}, * {@link #ANCHOR_TYPE_MIDDLE} or {@link #ANCHOR_TYPE_END}. */ + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. @Documented @Retention(RetentionPolicy.SOURCE) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) @IntDef({TYPE_UNSET, ANCHOR_TYPE_START, ANCHOR_TYPE_MIDDLE, ANCHOR_TYPE_END}) public @interface AnchorType {} - /** - * An unset anchor or line type value. - */ + /** An unset anchor, line, text size or vertical type value. */ public static final int TYPE_UNSET = Integer.MIN_VALUE; /** @@ -56,9 +77,7 @@ public class Cue { */ public static final int ANCHOR_TYPE_START = 0; - /** - * Anchors the middle of the cue box. - */ + /** Anchors the middle of the cue box. */ public static final int ANCHOR_TYPE_MIDDLE = 1; /** @@ -71,19 +90,18 @@ public class Cue { * The type of line, which may be unset. One of {@link #TYPE_UNSET}, {@link #LINE_TYPE_FRACTION} * or {@link #LINE_TYPE_NUMBER}. */ + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. @Documented @Retention(RetentionPolicy.SOURCE) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) @IntDef({TYPE_UNSET, LINE_TYPE_FRACTION, LINE_TYPE_NUMBER}) public @interface LineType {} - /** - * Value for {@link #lineType} when {@link #line} is a fractional position. - */ + /** Value for {@link #lineType} when {@link #line} is a fractional position. */ public static final int LINE_TYPE_FRACTION = 0; - /** - * Value for {@link #lineType} when {@link #line} is a line number. - */ + /** Value for {@link #lineType} when {@link #line} is a line number. */ public static final int LINE_TYPE_NUMBER = 1; /** @@ -91,8 +109,11 @@ public class Cue { * {@link #TEXT_SIZE_TYPE_FRACTIONAL}, {@link #TEXT_SIZE_TYPE_FRACTIONAL_IGNORE_PADDING} or {@link * #TEXT_SIZE_TYPE_ABSOLUTE}. */ + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. @Documented @Retention(RetentionPolicy.SOURCE) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) @IntDef({ TYPE_UNSET, TEXT_SIZE_TYPE_FRACTIONAL, @@ -110,6 +131,28 @@ public class Cue { /** Text size is measured in number of pixels. */ public static final int TEXT_SIZE_TYPE_ABSOLUTE = 2; + /** + * The type of vertical layout for this cue, which may be unset (i.e. horizontal). One of {@link + * #TYPE_UNSET}, {@link #VERTICAL_TYPE_RL} or {@link #VERTICAL_TYPE_LR}. + */ + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) + @IntDef({ + TYPE_UNSET, + VERTICAL_TYPE_RL, + VERTICAL_TYPE_LR, + }) + public @interface VerticalType {} + + /** Vertical right-to-left (e.g. for Japanese). */ + public static final int VERTICAL_TYPE_RL = 1; + + /** Vertical left-to-right (e.g. for Mongolian). */ + public static final int VERTICAL_TYPE_LR = 2; + /** * The cue text, or null if this is an image cue. Note the {@link CharSequence} may be decorated * with styling spans. @@ -119,48 +162,67 @@ public class Cue { /** The alignment of the cue text within the cue box, or null if the alignment is undefined. */ @Nullable public final Alignment textAlignment; + /** + * The alignment of multiple lines of text relative to the longest line, or null if the alignment + * is undefined. + */ + @Nullable public final Alignment multiRowAlignment; + /** The cue image, or null if this is a text cue. */ @Nullable public final Bitmap bitmap; /** - * The position of the {@link #lineAnchor} of the cue box within the viewport in the direction - * orthogonal to the writing direction, or {@link #DIMEN_UNSET}. When set, the interpretation of - * the value depends on the value of {@link #lineType}. - *

        - * For horizontal text and {@link #lineType} equal to {@link #LINE_TYPE_FRACTION}, this is the - * fractional vertical position relative to the top of the viewport. + * The position of the cue box within the viewport in the direction orthogonal to the writing + * direction (determined by {@link #verticalType}), or {@link #DIMEN_UNSET}. When set, the + * interpretation of the value depends on the value of {@link #lineType}. + * + *

        The measurement direction depends on {@link #verticalType}: + * + *

          + *
        • For {@link #TYPE_UNSET} (i.e. horizontal), this is the vertical position relative to the + * top of the viewport. + *
        • For {@link #VERTICAL_TYPE_LR} this is the horizontal position relative to the left of the + * viewport. + *
        • For {@link #VERTICAL_TYPE_RL} this is the horizontal position relative to the right of + * the viewport. + *
        */ public final float line; /** * The type of the {@link #line} value. * - *

        {@link #LINE_TYPE_FRACTION} indicates that {@link #line} is a fractional position within the - * viewport. - * - *

        {@link #LINE_TYPE_NUMBER} indicates that {@link #line} is a line number, where the size of - * each line is taken to be the size of the first line of the cue. When {@link #line} is greater - * than or equal to 0 lines count from the start of the viewport, with 0 indicating zero offset - * from the start edge. When {@link #line} is negative lines count from the end of the viewport, - * with -1 indicating zero offset from the end edge. For horizontal text the line spacing is the - * height of the first line of the cue, and the start and end of the viewport are the top and - * bottom respectively. - * - *

        Note that it's particularly important to consider the effect of {@link #lineAnchor} when - * using {@link #LINE_TYPE_NUMBER}. {@code (line == 0 && lineAnchor == ANCHOR_TYPE_START)} - * positions a (potentially multi-line) cue at the very top of the viewport. {@code (line == -1 && - * lineAnchor == ANCHOR_TYPE_END)} positions a (potentially multi-line) cue at the very bottom of - * the viewport. {@code (line == 0 && lineAnchor == ANCHOR_TYPE_END)} and {@code (line == -1 && - * lineAnchor == ANCHOR_TYPE_START)} position cues entirely outside of the viewport. {@code (line - * == 1 && lineAnchor == ANCHOR_TYPE_END)} positions a cue so that only the last line is visible - * at the top of the viewport. {@code (line == -2 && lineAnchor == ANCHOR_TYPE_START)} position a - * cue so that only its first line is visible at the bottom of the viewport. + *

          + *
        • {@link #LINE_TYPE_FRACTION} indicates that {@link #line} is a fractional position within + * the viewport (measured to the part of the cue box determined by {@link #lineAnchor}). + *
        • {@link #LINE_TYPE_NUMBER} indicates that {@link #line} is a viewport line number. The + * viewport is divided into lines (each equal in size to the first line of the cue box). The + * cue box is positioned to align with the viewport lines as follows: + *
            + *
          • {@link #lineAnchor}) is ignored. + *
          • When {@code line} is greater than or equal to 0 the first line in the cue box is + * aligned with a viewport line, with 0 meaning the first line of the viewport. + *
          • When {@code line} is negative the last line in the cue box is aligned with a + * viewport line, with -1 meaning the last line of the viewport. + *
          • For horizontal text the start and end of the viewport are the top and bottom + * respectively. + *
          + *
        */ public final @LineType int lineType; /** - * The cue box anchor positioned by {@link #line}. One of {@link #ANCHOR_TYPE_START}, {@link - * #ANCHOR_TYPE_MIDDLE}, {@link #ANCHOR_TYPE_END} and {@link #TYPE_UNSET}. + * The cue box anchor positioned by {@link #line} when {@link #lineType} is {@link + * #LINE_TYPE_FRACTION}. + * + *

        One of: + * + *

          + *
        • {@link #ANCHOR_TYPE_START} + *
        • {@link #ANCHOR_TYPE_MIDDLE} + *
        • {@link #ANCHOR_TYPE_END} + *
        • {@link #TYPE_UNSET} + *
        * *

        For the normal case of horizontal text, {@link #ANCHOR_TYPE_START}, {@link * #ANCHOR_TYPE_MIDDLE} and {@link #ANCHOR_TYPE_END} correspond to the top, middle and bottom of @@ -171,10 +233,16 @@ public class Cue { /** * The fractional position of the {@link #positionAnchor} of the cue box within the viewport in * the direction orthogonal to {@link #line}, or {@link #DIMEN_UNSET}. - *

        - * For horizontal text, this is the horizontal position relative to the left of the viewport. Note - * that positioning is relative to the left of the viewport even in the case of right-to-left - * text. + * + *

        The measurement direction depends on {@link #verticalType}. + * + *

          + *
        • For {@link #TYPE_UNSET} (i.e. horizontal), this is the horizontal position relative to + * the left of the viewport. Note that positioning is relative to the left of the viewport + * even in the case of right-to-left text. + *
        • For {@link #VERTICAL_TYPE_LR} and {@link #VERTICAL_TYPE_RL} (i.e. vertical), this is the + * vertical position relative to the top of the viewport. + *
        */ public final float position; @@ -201,14 +269,10 @@ public class Cue { */ public final float bitmapHeight; - /** - * Specifies whether or not the {@link #windowColor} property is set. - */ + /** Specifies whether or not the {@link #windowColor} property is set. */ public final boolean windowColorSet; - /** - * The fill color of the window. - */ + /** The fill color of the window. */ public final int windowColor; /** @@ -224,53 +288,26 @@ public class Cue { public final float textSize; /** - * Creates an image cue. - * - * @param bitmap See {@link #bitmap}. - * @param horizontalPosition The position of the horizontal anchor within the viewport, expressed - * as a fraction of the viewport width. - * @param horizontalPositionAnchor The horizontal anchor. One of {@link #ANCHOR_TYPE_START}, - * {@link #ANCHOR_TYPE_MIDDLE}, {@link #ANCHOR_TYPE_END} and {@link #TYPE_UNSET}. - * @param verticalPosition The position of the vertical anchor within the viewport, expressed as a - * fraction of the viewport height. - * @param verticalPositionAnchor The vertical anchor. One of {@link #ANCHOR_TYPE_START}, {@link - * #ANCHOR_TYPE_MIDDLE}, {@link #ANCHOR_TYPE_END} and {@link #TYPE_UNSET}. - * @param width The width of the cue as a fraction of the viewport width. - * @param height The height of the cue as a fraction of the viewport height, or {@link - * #DIMEN_UNSET} if the bitmap should be displayed at its natural height for the specified - * {@code width}. + * The vertical formatting of this Cue, or {@link #TYPE_UNSET} if the cue has no vertical setting + * (and so should be horizontal). */ - public Cue( - Bitmap bitmap, - float horizontalPosition, - @AnchorType int horizontalPositionAnchor, - float verticalPosition, - @AnchorType int verticalPositionAnchor, - float width, - float height) { - this( - /* text= */ null, - /* textAlignment= */ null, - bitmap, - verticalPosition, - /* lineType= */ LINE_TYPE_FRACTION, - verticalPositionAnchor, - horizontalPosition, - horizontalPositionAnchor, - /* textSizeType= */ TYPE_UNSET, - /* textSize= */ DIMEN_UNSET, - width, - height, - /* windowColorSet= */ false, - /* windowColor= */ Color.BLACK); - } + public final @VerticalType int verticalType; + + /** + * The shear angle in degrees to be applied to this Cue, expressed in graphics coordinates. This + * results in a skew transform for the block along the inline progression axis. + */ + public final float shearDegrees; /** * Creates a text cue whose {@link #textAlignment} is null, whose type parameters are set to * {@link #TYPE_UNSET} and whose dimension parameters are set to {@link #DIMEN_UNSET}. * * @param text See {@link #text}. + * @deprecated Use {@link Builder}. */ + @SuppressWarnings("deprecation") + @Deprecated public Cue(CharSequence text) { this( text, @@ -294,7 +331,10 @@ public Cue(CharSequence text) { * @param position See {@link #position}. * @param positionAnchor See {@link #positionAnchor}. * @param size See {@link #size}. + * @deprecated Use {@link Builder}. */ + @SuppressWarnings("deprecation") + @Deprecated public Cue( CharSequence text, @Nullable Alignment textAlignment, @@ -330,7 +370,9 @@ public Cue( * @param size See {@link #size}. * @param textSizeType See {@link #textSizeType}. * @param textSize See {@link #textSize}. + * @deprecated Use {@link Builder}. */ + @Deprecated public Cue( CharSequence text, @Nullable Alignment textAlignment, @@ -345,6 +387,7 @@ public Cue( this( text, textAlignment, + /* multiRowAlignment= */ null, /* bitmap= */ null, line, lineType, @@ -356,7 +399,9 @@ public Cue( size, /* bitmapHeight= */ DIMEN_UNSET, /* windowColorSet= */ false, - /* windowColor= */ Color.BLACK); + /* windowColor= */ Color.BLACK, + /* verticalType= */ TYPE_UNSET, + /* shearDegrees= */ 0f); } /** @@ -372,7 +417,9 @@ public Cue( * @param size See {@link #size}. * @param windowColorSet See {@link #windowColorSet}. * @param windowColor See {@link #windowColor}. + * @deprecated Use {@link Builder}. */ + @Deprecated public Cue( CharSequence text, @Nullable Alignment textAlignment, @@ -387,6 +434,7 @@ public Cue( this( text, textAlignment, + /* multiRowAlignment= */ null, /* bitmap= */ null, line, lineType, @@ -398,12 +446,15 @@ public Cue( size, /* bitmapHeight= */ DIMEN_UNSET, windowColorSet, - windowColor); + windowColor, + /* verticalType= */ TYPE_UNSET, + /* shearDegrees= */ 0f); } private Cue( @Nullable CharSequence text, @Nullable Alignment textAlignment, + @Nullable Alignment multiRowAlignment, @Nullable Bitmap bitmap, float line, @LineType int lineType, @@ -415,9 +466,24 @@ private Cue( float size, float bitmapHeight, boolean windowColorSet, - int windowColor) { - this.text = text; + int windowColor, + @VerticalType int verticalType, + float shearDegrees) { + // Exactly one of text or bitmap should be set. + if (text == null) { + Assertions.checkNotNull(bitmap); + } else { + Assertions.checkArgument(bitmap == null); + } + if (text instanceof Spanned) { + this.text = SpannedString.valueOf(text); + } else if (text != null) { + this.text = text.toString(); + } else { + this.text = null; + } this.textAlignment = textAlignment; + this.multiRowAlignment = multiRowAlignment; this.bitmap = bitmap; this.line = line; this.lineType = lineType; @@ -430,6 +496,576 @@ private Cue( this.windowColor = windowColor; this.textSizeType = textSizeType; this.textSize = textSize; + this.verticalType = verticalType; + this.shearDegrees = shearDegrees; + } + + /** Returns a new {@link Cue.Builder} initialized with the same values as this Cue. */ + public Builder buildUpon() { + return new Cue.Builder(this); + } + + @Override + public boolean equals(@Nullable Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + Cue that = (Cue) obj; + return TextUtils.equals(text, that.text) + && textAlignment == that.textAlignment + && multiRowAlignment == that.multiRowAlignment + && (bitmap == null + ? that.bitmap == null + : (that.bitmap != null && bitmap.sameAs(that.bitmap))) + && line == that.line + && lineType == that.lineType + && lineAnchor == that.lineAnchor + && position == that.position + && positionAnchor == that.positionAnchor + && size == that.size + && bitmapHeight == that.bitmapHeight + && windowColorSet == that.windowColorSet + && windowColor == that.windowColor + && textSizeType == that.textSizeType + && textSize == that.textSize + && verticalType == that.verticalType + && shearDegrees == that.shearDegrees; } + @Override + public int hashCode() { + return Objects.hashCode( + text, + textAlignment, + multiRowAlignment, + bitmap, + line, + lineType, + lineAnchor, + position, + positionAnchor, + size, + bitmapHeight, + windowColorSet, + windowColor, + textSizeType, + textSize, + verticalType, + shearDegrees); + } + + /** A builder for {@link Cue} objects. */ + public static final class Builder { + @Nullable private CharSequence text; + @Nullable private Bitmap bitmap; + @Nullable private Alignment textAlignment; + @Nullable private Alignment multiRowAlignment; + private float line; + private @LineType int lineType; + private @AnchorType int lineAnchor; + private float position; + private @AnchorType int positionAnchor; + private @TextSizeType int textSizeType; + private float textSize; + private float size; + private float bitmapHeight; + private boolean windowColorSet; + @ColorInt private int windowColor; + private @VerticalType int verticalType; + private float shearDegrees; + + public Builder() { + text = null; + bitmap = null; + textAlignment = null; + multiRowAlignment = null; + line = DIMEN_UNSET; + lineType = TYPE_UNSET; + lineAnchor = TYPE_UNSET; + position = DIMEN_UNSET; + positionAnchor = TYPE_UNSET; + textSizeType = TYPE_UNSET; + textSize = DIMEN_UNSET; + size = DIMEN_UNSET; + bitmapHeight = DIMEN_UNSET; + windowColorSet = false; + windowColor = Color.BLACK; + verticalType = TYPE_UNSET; + } + + private Builder(Cue cue) { + text = cue.text; + bitmap = cue.bitmap; + textAlignment = cue.textAlignment; + multiRowAlignment = cue.multiRowAlignment; + line = cue.line; + lineType = cue.lineType; + lineAnchor = cue.lineAnchor; + position = cue.position; + positionAnchor = cue.positionAnchor; + textSizeType = cue.textSizeType; + textSize = cue.textSize; + size = cue.size; + bitmapHeight = cue.bitmapHeight; + windowColorSet = cue.windowColorSet; + windowColor = cue.windowColor; + verticalType = cue.verticalType; + shearDegrees = cue.shearDegrees; + } + + /** + * Sets the cue text. + * + *

        Note that {@code text} may be decorated with styling spans. + * + * @see Cue#text + */ + @CanIgnoreReturnValue + public Builder setText(CharSequence text) { + this.text = text; + return this; + } + + /** + * Gets the cue text. + * + * @see Cue#text + */ + @Pure + @Nullable + public CharSequence getText() { + return text; + } + + /** + * Sets the cue image. + * + * @see Cue#bitmap + */ + @CanIgnoreReturnValue + public Builder setBitmap(Bitmap bitmap) { + this.bitmap = bitmap; + return this; + } + + /** + * Gets the cue image. + * + * @see Cue#bitmap + */ + @Pure + @Nullable + public Bitmap getBitmap() { + return bitmap; + } + + /** + * Sets the alignment of the cue text within the cue box. + * + *

        Passing null means the alignment is undefined. + * + * @see Cue#textAlignment + */ + @CanIgnoreReturnValue + public Builder setTextAlignment(@Nullable Layout.Alignment textAlignment) { + this.textAlignment = textAlignment; + return this; + } + + /** + * Gets the alignment of the cue text within the cue box, or null if the alignment is undefined. + * + * @see Cue#textAlignment + */ + @Pure + @Nullable + public Alignment getTextAlignment() { + return textAlignment; + } + + /** + * Sets the multi-row alignment of the cue. + * + *

        Passing null means the alignment is undefined. + * + * @see Cue#multiRowAlignment + */ + @CanIgnoreReturnValue + public Builder setMultiRowAlignment(@Nullable Layout.Alignment multiRowAlignment) { + this.multiRowAlignment = multiRowAlignment; + return this; + } + + /** + * Sets the position of the cue box within the viewport in the direction orthogonal to the + * writing direction. + * + * @see Cue#line + * @see Cue#lineType + */ + @CanIgnoreReturnValue + public Builder setLine(float line, @LineType int lineType) { + this.line = line; + this.lineType = lineType; + return this; + } + + /** + * Gets the position of the {@code lineAnchor} of the cue box within the viewport in the + * direction orthogonal to the writing direction. + * + * @see Cue#line + */ + @Pure + public float getLine() { + return line; + } + + /** + * Gets the type of the value of {@link #getLine()}. + * + * @see Cue#lineType + */ + @Pure + public @LineType int getLineType() { + return lineType; + } + + /** + * Sets the cue box anchor positioned by {@link #setLine(float, int) line}. + * + * @see Cue#lineAnchor + */ + @CanIgnoreReturnValue + public Builder setLineAnchor(@AnchorType int lineAnchor) { + this.lineAnchor = lineAnchor; + return this; + } + + /** + * Gets the cue box anchor positioned by {@link #setLine(float, int) line}. + * + * @see Cue#lineAnchor + */ + @Pure + public @AnchorType int getLineAnchor() { + return lineAnchor; + } + + /** + * Sets the fractional position of the {@link #setPositionAnchor(int) positionAnchor} of the cue + * box within the viewport in the direction orthogonal to {@link #setLine(float, int) line}. + * + * @see Cue#position + */ + @CanIgnoreReturnValue + public Builder setPosition(float position) { + this.position = position; + return this; + } + + /** + * Gets the fractional position of the {@link #setPositionAnchor(int) positionAnchor} of the cue + * box within the viewport in the direction orthogonal to {@link #setLine(float, int) line}. + * + * @see Cue#position + */ + @Pure + public float getPosition() { + return position; + } + + /** + * Sets the cue box anchor positioned by {@link #setPosition(float) position}. + * + * @see Cue#positionAnchor + */ + @CanIgnoreReturnValue + public Builder setPositionAnchor(@AnchorType int positionAnchor) { + this.positionAnchor = positionAnchor; + return this; + } + + /** + * Gets the cue box anchor positioned by {@link #setPosition(float) position}. + * + * @see Cue#positionAnchor + */ + @Pure + public @AnchorType int getPositionAnchor() { + return positionAnchor; + } + + /** + * Sets the default text size and type for this cue's text. + * + * @see Cue#textSize + * @see Cue#textSizeType + */ + @CanIgnoreReturnValue + public Builder setTextSize(float textSize, @TextSizeType int textSizeType) { + this.textSize = textSize; + this.textSizeType = textSizeType; + return this; + } + + /** + * Gets the default text size type for this cue's text. + * + * @see Cue#textSizeType + */ + @Pure + public @TextSizeType int getTextSizeType() { + return textSizeType; + } + + /** + * Gets the default text size for this cue's text. + * + * @see Cue#textSize + */ + @Pure + public float getTextSize() { + return textSize; + } + + /** + * Sets the size of the cue box in the writing direction specified as a fraction of the viewport + * size in that direction. + * + * @see Cue#size + */ + @CanIgnoreReturnValue + public Builder setSize(float size) { + this.size = size; + return this; + } + + /** + * Gets the size of the cue box in the writing direction specified as a fraction of the viewport + * size in that direction. + * + * @see Cue#size + */ + @Pure + public float getSize() { + return size; + } + + /** + * Sets the bitmap height as a fraction of the viewport size. + * + * @see Cue#bitmapHeight + */ + @CanIgnoreReturnValue + public Builder setBitmapHeight(float bitmapHeight) { + this.bitmapHeight = bitmapHeight; + return this; + } + + /** + * Gets the bitmap height as a fraction of the viewport size. + * + * @see Cue#bitmapHeight + */ + @Pure + public float getBitmapHeight() { + return bitmapHeight; + } + + /** + * Sets the fill color of the window. + * + *

        Also sets {@link Cue#windowColorSet} to true. + * + * @see Cue#windowColor + * @see Cue#windowColorSet + */ + @CanIgnoreReturnValue + public Builder setWindowColor(@ColorInt int windowColor) { + this.windowColor = windowColor; + this.windowColorSet = true; + return this; + } + + /** Sets {@link Cue#windowColorSet} to false. */ + @CanIgnoreReturnValue + public Builder clearWindowColor() { + this.windowColorSet = false; + return this; + } + + /** + * Returns true if the fill color of the window is set. + * + * @see Cue#windowColorSet + */ + public boolean isWindowColorSet() { + return windowColorSet; + } + + /** + * Gets the fill color of the window. + * + * @see Cue#windowColor + */ + @Pure + @ColorInt + public int getWindowColor() { + return windowColor; + } + + /** + * Sets the vertical formatting for this Cue. + * + * @see Cue#verticalType + */ + @CanIgnoreReturnValue + public Builder setVerticalType(@VerticalType int verticalType) { + this.verticalType = verticalType; + return this; + } + + /** Sets the shear angle for this Cue. */ + @CanIgnoreReturnValue + public Builder setShearDegrees(float shearDegrees) { + this.shearDegrees = shearDegrees; + return this; + } + + /** + * Gets the vertical formatting for this Cue. + * + * @see Cue#verticalType + */ + @Pure + public @VerticalType int getVerticalType() { + return verticalType; + } + + /** Build the cue. */ + public Cue build() { + return new Cue( + text, + textAlignment, + multiRowAlignment, + bitmap, + line, + lineType, + lineAnchor, + position, + positionAnchor, + textSizeType, + textSize, + size, + bitmapHeight, + windowColorSet, + windowColor, + verticalType, + shearDegrees); + } + } + + // Bundleable implementation. + + private static final String FIELD_TEXT = Util.intToStringMaxRadix(0); + private static final String FIELD_TEXT_ALIGNMENT = Util.intToStringMaxRadix(1); + private static final String FIELD_MULTI_ROW_ALIGNMENT = Util.intToStringMaxRadix(2); + private static final String FIELD_BITMAP = Util.intToStringMaxRadix(3); + private static final String FIELD_LINE = Util.intToStringMaxRadix(4); + private static final String FIELD_LINE_TYPE = Util.intToStringMaxRadix(5); + private static final String FIELD_LINE_ANCHOR = Util.intToStringMaxRadix(6); + private static final String FIELD_POSITION = Util.intToStringMaxRadix(7); + private static final String FIELD_POSITION_ANCHOR = Util.intToStringMaxRadix(8); + private static final String FIELD_TEXT_SIZE_TYPE = Util.intToStringMaxRadix(9); + private static final String FIELD_TEXT_SIZE = Util.intToStringMaxRadix(10); + private static final String FIELD_SIZE = Util.intToStringMaxRadix(11); + private static final String FIELD_BITMAP_HEIGHT = Util.intToStringMaxRadix(12); + private static final String FIELD_WINDOW_COLOR = Util.intToStringMaxRadix(13); + private static final String FIELD_WINDOW_COLOR_SET = Util.intToStringMaxRadix(14); + private static final String FIELD_VERTICAL_TYPE = Util.intToStringMaxRadix(15); + private static final String FIELD_SHEAR_DEGREES = Util.intToStringMaxRadix(16); + + @Override + public Bundle toBundle() { + Bundle bundle = new Bundle(); + bundle.putCharSequence(FIELD_TEXT, text); + bundle.putSerializable(FIELD_TEXT_ALIGNMENT, textAlignment); + bundle.putSerializable(FIELD_MULTI_ROW_ALIGNMENT, multiRowAlignment); + bundle.putParcelable(FIELD_BITMAP, bitmap); + bundle.putFloat(FIELD_LINE, line); + bundle.putInt(FIELD_LINE_TYPE, lineType); + bundle.putInt(FIELD_LINE_ANCHOR, lineAnchor); + bundle.putFloat(FIELD_POSITION, position); + bundle.putInt(FIELD_POSITION_ANCHOR, positionAnchor); + bundle.putInt(FIELD_TEXT_SIZE_TYPE, textSizeType); + bundle.putFloat(FIELD_TEXT_SIZE, textSize); + bundle.putFloat(FIELD_SIZE, size); + bundle.putFloat(FIELD_BITMAP_HEIGHT, bitmapHeight); + bundle.putBoolean(FIELD_WINDOW_COLOR_SET, windowColorSet); + bundle.putInt(FIELD_WINDOW_COLOR, windowColor); + bundle.putInt(FIELD_VERTICAL_TYPE, verticalType); + bundle.putFloat(FIELD_SHEAR_DEGREES, shearDegrees); + return bundle; + } + + public static final Creator CREATOR = Cue::fromBundle; + + private static final Cue fromBundle(Bundle bundle) { + Builder builder = new Builder(); + @Nullable CharSequence text = bundle.getCharSequence(FIELD_TEXT); + if (text != null) { + builder.setText(text); + } + @Nullable Alignment textAlignment = (Alignment) bundle.getSerializable(FIELD_TEXT_ALIGNMENT); + if (textAlignment != null) { + builder.setTextAlignment(textAlignment); + } + @Nullable + Alignment multiRowAlignment = (Alignment) bundle.getSerializable(FIELD_MULTI_ROW_ALIGNMENT); + if (multiRowAlignment != null) { + builder.setMultiRowAlignment(multiRowAlignment); + } + @Nullable Bitmap bitmap = bundle.getParcelable(FIELD_BITMAP); + if (bitmap != null) { + builder.setBitmap(bitmap); + } + if (bundle.containsKey(FIELD_LINE) && bundle.containsKey(FIELD_LINE_TYPE)) { + builder.setLine(bundle.getFloat(FIELD_LINE), bundle.getInt(FIELD_LINE_TYPE)); + } + if (bundle.containsKey(FIELD_LINE_ANCHOR)) { + builder.setLineAnchor(bundle.getInt(FIELD_LINE_ANCHOR)); + } + if (bundle.containsKey(FIELD_POSITION)) { + builder.setPosition(bundle.getFloat(FIELD_POSITION)); + } + if (bundle.containsKey(FIELD_POSITION_ANCHOR)) { + builder.setPositionAnchor(bundle.getInt(FIELD_POSITION_ANCHOR)); + } + if (bundle.containsKey(FIELD_TEXT_SIZE) && bundle.containsKey(FIELD_TEXT_SIZE_TYPE)) { + builder.setTextSize(bundle.getFloat(FIELD_TEXT_SIZE), bundle.getInt(FIELD_TEXT_SIZE_TYPE)); + } + if (bundle.containsKey(FIELD_SIZE)) { + builder.setSize(bundle.getFloat(FIELD_SIZE)); + } + if (bundle.containsKey(FIELD_BITMAP_HEIGHT)) { + builder.setBitmapHeight(bundle.getFloat(FIELD_BITMAP_HEIGHT)); + } + if (bundle.containsKey(FIELD_WINDOW_COLOR)) { + builder.setWindowColor(bundle.getInt(FIELD_WINDOW_COLOR)); + } + if (!bundle.getBoolean(FIELD_WINDOW_COLOR_SET, /* defaultValue= */ false)) { + builder.clearWindowColor(); + } + if (bundle.containsKey(FIELD_VERTICAL_TYPE)) { + builder.setVerticalType(bundle.getInt(FIELD_VERTICAL_TYPE)); + } + if (bundle.containsKey(FIELD_SHEAR_DEGREES)) { + builder.setShearDegrees(bundle.getFloat(FIELD_SHEAR_DEGREES)); + } + return builder.build(); + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/CueDecoder.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/CueDecoder.java new file mode 100644 index 0000000000..78e7107bb6 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/CueDecoder.java @@ -0,0 +1,48 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.text; + +import android.os.Bundle; +import android.os.Parcel; +import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.BundleableUtil; +import com.google.common.collect.ImmutableList; +import java.util.ArrayList; + +/** Decodes data encoded by {@link CueEncoder}. */ +public final class CueDecoder { + + // key under which list of cues is saved in the bundle + static final String BUNDLED_CUES = "c"; + + /** + * Decodes byte array into list of {@link Cue} objects. + * + * @param bytes byte array produced by {@link CueEncoder} + * @return decoded list of {@link Cue} objects. + */ + public ImmutableList decode(byte[] bytes) { + Parcel parcel = Parcel.obtain(); + parcel.unmarshall(bytes, 0, bytes.length); + parcel.setDataPosition(0); + Bundle bundle = parcel.readBundle(Bundle.class.getClassLoader()); + parcel.recycle(); + ArrayList bundledCues = + Assertions.checkNotNull(bundle.getParcelableArrayList(BUNDLED_CUES)); + + return BundleableUtil.fromBundleList(Cue.CREATOR, bundledCues); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/CueEncoder.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/CueEncoder.java new file mode 100644 index 0000000000..4f7d3c4a22 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/CueEncoder.java @@ -0,0 +1,44 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.text; + +import android.os.Bundle; +import android.os.Parcel; +import com.google.android.exoplayer2.util.BundleableUtil; +import java.util.ArrayList; +import java.util.List; + +/** Encodes data that can be decoded by {@link CueDecoder}. */ +public final class CueEncoder { + /** + * Encodes an {@link List} of {@link Cue} to a byte array that can be decoded by {@link + * CueDecoder}. + * + * @param cues Cues to be encoded. + * @return The serialized byte array. + */ + public byte[] encode(List cues) { + ArrayList bundledCues = BundleableUtil.toBundleArrayList(cues); + Bundle allCuesBundle = new Bundle(); + allCuesBundle.putParcelableArrayList(CueDecoder.BUNDLED_CUES, bundledCues); + Parcel parcel = Parcel.obtain(); + parcel.writeBundle(allCuesBundle); + byte[] bytes = parcel.marshall(); + parcel.recycle(); + + return bytes; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/CueGroup.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/CueGroup.java new file mode 100644 index 0000000000..32672539b7 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/CueGroup.java @@ -0,0 +1,98 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.text; + +import android.graphics.Bitmap; +import android.os.Bundle; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.Bundleable; +import com.google.android.exoplayer2.Timeline; +import com.google.android.exoplayer2.util.BundleableUtil; +import com.google.android.exoplayer2.util.Util; +import com.google.common.collect.ImmutableList; +import java.util.ArrayList; +import java.util.List; + +/** Class to represent the state of active {@link Cue Cues} at a particular time. */ +public final class CueGroup implements Bundleable { + + /** An empty group with no {@link Cue Cues} and presentation time of zero. */ + public static final CueGroup EMPTY_TIME_ZERO = + new CueGroup(ImmutableList.of(), /* presentationTimeUs= */ 0); + + /** + * The cues in this group. + * + *

        This list is in ascending order of priority. If any of the cue boxes overlap when displayed, + * the {@link Cue} nearer the end of the list should be shown on top. + * + *

        This list may be empty if the group represents a state with no cues. + */ + public final ImmutableList cues; + /** + * The presentation time of the {@link #cues}, in microseconds. + * + *

        This time is an offset from the start of the current {@link Timeline.Period}. + */ + public final long presentationTimeUs; + + /** Creates a CueGroup. */ + public CueGroup(List cues, long presentationTimeUs) { + this.cues = ImmutableList.copyOf(cues); + this.presentationTimeUs = presentationTimeUs; + } + + // Bundleable implementation. + + private static final String FIELD_CUES = Util.intToStringMaxRadix(0); + private static final String FIELD_PRESENTATION_TIME_US = Util.intToStringMaxRadix(1); + + @Override + public Bundle toBundle() { + Bundle bundle = new Bundle(); + bundle.putParcelableArrayList( + FIELD_CUES, BundleableUtil.toBundleArrayList(filterOutBitmapCues(cues))); + bundle.putLong(FIELD_PRESENTATION_TIME_US, presentationTimeUs); + return bundle; + } + + public static final Creator CREATOR = CueGroup::fromBundle; + + private static final CueGroup fromBundle(Bundle bundle) { + @Nullable ArrayList cueBundles = bundle.getParcelableArrayList(FIELD_CUES); + List cues = + cueBundles == null + ? ImmutableList.of() + : BundleableUtil.fromBundleList(Cue.CREATOR, cueBundles); + long presentationTimeUs = bundle.getLong(FIELD_PRESENTATION_TIME_US); + return new CueGroup(cues, presentationTimeUs); + } + + /** + * Filters out {@link Cue} objects containing {@link Bitmap}. It is used when transferring cues + * between processes to prevent transferring too much data. + */ + private static ImmutableList filterOutBitmapCues(List cues) { + ImmutableList.Builder builder = ImmutableList.builder(); + for (int i = 0; i < cues.size(); i++) { + if (cues.get(i).bitmap != null) { + continue; + } + builder.add(cues.get(i)); + } + return builder.build(); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ExoplayerCuesDecoder.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ExoplayerCuesDecoder.java new file mode 100644 index 0000000000..937bb52287 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ExoplayerCuesDecoder.java @@ -0,0 +1,175 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.text; + +import static com.google.android.exoplayer2.util.Assertions.checkArgument; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Assertions.checkState; +import static java.lang.annotation.ElementType.TYPE_USE; + +import androidx.annotation.IntDef; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.util.MimeTypes; +import com.google.common.collect.ImmutableList; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import java.util.ArrayDeque; +import java.util.Deque; +import java.util.List; + +/** + * A {@link SubtitleDecoder} that decodes subtitle samples of type {@link + * MimeTypes#TEXT_EXOPLAYER_CUES} + */ +public final class ExoplayerCuesDecoder implements SubtitleDecoder { + @Documented + @Target(TYPE_USE) + @IntDef(value = {INPUT_BUFFER_AVAILABLE, INPUT_BUFFER_DEQUEUED, INPUT_BUFFER_QUEUED}) + @Retention(RetentionPolicy.SOURCE) + private @interface InputBufferState {} + + private static final int INPUT_BUFFER_AVAILABLE = 0; + private static final int INPUT_BUFFER_DEQUEUED = 1; + private static final int INPUT_BUFFER_QUEUED = 2; + + private static final int OUTPUT_BUFFERS_COUNT = 2; + + private final CueDecoder cueDecoder; + private final SubtitleInputBuffer inputBuffer; + private final Deque availableOutputBuffers; + + private @InputBufferState int inputBufferState; + private boolean released; + + public ExoplayerCuesDecoder() { + cueDecoder = new CueDecoder(); + inputBuffer = new SubtitleInputBuffer(); + availableOutputBuffers = new ArrayDeque<>(); + for (int i = 0; i < OUTPUT_BUFFERS_COUNT; i++) { + availableOutputBuffers.addFirst( + new SubtitleOutputBuffer() { + @Override + public void release() { + ExoplayerCuesDecoder.this.releaseOutputBuffer(this); + } + }); + } + inputBufferState = INPUT_BUFFER_AVAILABLE; + } + + @Override + public String getName() { + return "ExoplayerCuesDecoder"; + } + + @Nullable + @Override + public SubtitleInputBuffer dequeueInputBuffer() throws SubtitleDecoderException { + checkState(!released); + if (inputBufferState != INPUT_BUFFER_AVAILABLE) { + return null; + } + inputBufferState = INPUT_BUFFER_DEQUEUED; + return inputBuffer; + } + + @Override + public void queueInputBuffer(SubtitleInputBuffer inputBuffer) throws SubtitleDecoderException { + checkState(!released); + checkState(inputBufferState == INPUT_BUFFER_DEQUEUED); + checkArgument(this.inputBuffer == inputBuffer); + inputBufferState = INPUT_BUFFER_QUEUED; + } + + @Nullable + @Override + public SubtitleOutputBuffer dequeueOutputBuffer() throws SubtitleDecoderException { + checkState(!released); + if (inputBufferState != INPUT_BUFFER_QUEUED || availableOutputBuffers.isEmpty()) { + return null; + } + SubtitleOutputBuffer outputBuffer = availableOutputBuffers.removeFirst(); + if (inputBuffer.isEndOfStream()) { + outputBuffer.addFlag(C.BUFFER_FLAG_END_OF_STREAM); + } else { + SingleEventSubtitle subtitle = + new SingleEventSubtitle( + inputBuffer.timeUs, cueDecoder.decode(checkNotNull(inputBuffer.data).array())); + outputBuffer.setContent(inputBuffer.timeUs, subtitle, /* subsampleOffsetUs=*/ 0); + } + inputBuffer.clear(); + inputBufferState = INPUT_BUFFER_AVAILABLE; + return outputBuffer; + } + + @Override + public void flush() { + checkState(!released); + inputBuffer.clear(); + inputBufferState = INPUT_BUFFER_AVAILABLE; + } + + @Override + public void release() { + released = true; + } + + @Override + public void setPositionUs(long positionUs) { + // Do nothing + } + + private void releaseOutputBuffer(SubtitleOutputBuffer outputBuffer) { + checkState(availableOutputBuffers.size() < OUTPUT_BUFFERS_COUNT); + checkArgument(!availableOutputBuffers.contains(outputBuffer)); + outputBuffer.clear(); + availableOutputBuffers.addFirst(outputBuffer); + } + + private static final class SingleEventSubtitle implements Subtitle { + private final long timeUs; + private final ImmutableList cues; + + public SingleEventSubtitle(long timeUs, ImmutableList cues) { + this.timeUs = timeUs; + this.cues = cues; + } + + @Override + public int getNextEventTimeIndex(long timeUs) { + return this.timeUs > timeUs ? 0 : C.INDEX_UNSET; + } + + @Override + public int getEventTimeCount() { + return 1; + } + + @Override + public long getEventTime(int index) { + checkArgument(index == 0); + return timeUs; + } + + @Override + public List getCues(long timeUs) { + return (timeUs >= this.timeUs) ? cues : ImmutableList.of(); + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/SimpleSubtitleDecoder.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/SimpleSubtitleDecoder.java index 8a1aea179a..d3f19355ed 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/SimpleSubtitleDecoder.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/SimpleSubtitleDecoder.java @@ -21,17 +21,17 @@ import com.google.android.exoplayer2.util.Assertions; import java.nio.ByteBuffer; -/** - * Base class for subtitle parsers that use their own decode thread. - */ -public abstract class SimpleSubtitleDecoder extends - SimpleDecoder implements - SubtitleDecoder { +/** Base class for subtitle parsers that use their own decode thread. */ +public abstract class SimpleSubtitleDecoder + extends SimpleDecoder + implements SubtitleDecoder { private final String name; - /** @param name The name of the decoder. */ - @SuppressWarnings("initialization:method.invocation.invalid") + /** + * @param name The name of the decoder. + */ + @SuppressWarnings("nullness:method.invocation") protected SimpleSubtitleDecoder(String name) { super(new SubtitleInputBuffer[2], new SubtitleOutputBuffer[2]); this.name = name; @@ -44,7 +44,7 @@ public final String getName() { } @Override - public void setPositionUs(long timeUs) { + public void setPositionUs(long positionUs) { // Do nothing } @@ -55,7 +55,12 @@ protected final SubtitleInputBuffer createInputBuffer() { @Override protected final SubtitleOutputBuffer createOutputBuffer() { - return new SimpleSubtitleOutputBuffer(this); + return new SubtitleOutputBuffer() { + @Override + public void release() { + SimpleSubtitleDecoder.this.releaseOutputBuffer(this); + } + }; } @Override @@ -63,11 +68,6 @@ protected final SubtitleDecoderException createUnexpectedDecodeException(Throwab return new SubtitleDecoderException("Unexpected decode error", error); } - @Override - protected final void releaseOutputBuffer(SubtitleOutputBuffer buffer) { - super.releaseOutputBuffer(buffer); - } - @SuppressWarnings("ByteBufferBackingArray") @Override @Nullable @@ -89,12 +89,11 @@ protected final SubtitleDecoderException decode( * Decodes data into a {@link Subtitle}. * * @param data An array holding the data to be decoded, starting at position 0. - * @param size The size of the data to be decoded. + * @param length The number of bytes from {@code data} to be decoded. * @param reset Whether the decoder must be reset before decoding. * @return The decoded {@link Subtitle}. * @throws SubtitleDecoderException If a decoding error occurs. */ - protected abstract Subtitle decode(byte[] data, int size, boolean reset) + protected abstract Subtitle decode(byte[] data, int length, boolean reset) throws SubtitleDecoderException; - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/SimpleSubtitleOutputBuffer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/SimpleSubtitleOutputBuffer.java deleted file mode 100644 index b2c25631f4..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/SimpleSubtitleOutputBuffer.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright (C) 2016 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.text; - -/** - * A {@link SubtitleOutputBuffer} for decoders that extend {@link SimpleSubtitleDecoder}. - */ -/* package */ final class SimpleSubtitleOutputBuffer extends SubtitleOutputBuffer { - - private final SimpleSubtitleDecoder owner; - - /** - * @param owner The decoder that owns this buffer. - */ - public SimpleSubtitleOutputBuffer(SimpleSubtitleDecoder owner) { - super(); - this.owner = owner; - } - - @Override - public final void release() { - owner.releaseOutputBuffer(this); - } - -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/Subtitle.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/Subtitle.java index 4dc5f61fb5..6581939d07 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/Subtitle.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/Subtitle.java @@ -18,9 +18,7 @@ import com.google.android.exoplayer2.C; import java.util.List; -/** - * A subtitle consisting of timed {@link Cue}s. - */ +/** A subtitle consisting of timed {@link Cue}s. */ public interface Subtitle { /** @@ -55,5 +53,4 @@ public interface Subtitle { * @return A list of cues that should be displayed, possibly empty. */ List getCues(long timeUs); - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/SubtitleDecoder.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/SubtitleDecoder.java index 2b080c6564..e02ac36e5c 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/SubtitleDecoder.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/SubtitleDecoder.java @@ -17,19 +17,16 @@ import com.google.android.exoplayer2.decoder.Decoder; -/** - * Decodes {@link Subtitle}s from {@link SubtitleInputBuffer}s. - */ -public interface SubtitleDecoder extends - Decoder { +/** Decodes {@link Subtitle}s from {@link SubtitleInputBuffer}s. */ +public interface SubtitleDecoder + extends Decoder { /** * Informs the decoder of the current playback position. - *

        - * Must be called prior to each attempt to dequeue output buffers from the decoder. + * + *

        Must be called prior to each attempt to dequeue output buffers from the decoder. * * @param positionUs The current playback position in microseconds. */ void setPositionUs(long positionUs); - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/SubtitleDecoderException.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/SubtitleDecoderException.java index b235706370..0d5e8c8305 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/SubtitleDecoderException.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/SubtitleDecoderException.java @@ -15,10 +15,11 @@ */ package com.google.android.exoplayer2.text; -/** - * Thrown when an error occurs decoding subtitle data. - */ -public class SubtitleDecoderException extends Exception { +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.decoder.DecoderException; + +/** Thrown when an error occurs decoding subtitle data. */ +public class SubtitleDecoderException extends DecoderException { /** * @param message The detail message for this exception. @@ -27,17 +28,18 @@ public SubtitleDecoderException(String message) { super(message); } - /** @param cause The cause of this exception. */ - public SubtitleDecoderException(Exception cause) { + /** + * @param cause The cause of this exception, or {@code null}. + */ + public SubtitleDecoderException(@Nullable Throwable cause) { super(cause); } /** * @param message The detail message for this exception. - * @param cause The cause of this exception. + * @param cause The cause of this exception, or {@code null}. */ - public SubtitleDecoderException(String message, Throwable cause) { + public SubtitleDecoderException(String message, @Nullable Throwable cause) { super(message, cause); } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/SubtitleDecoderFactory.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/SubtitleDecoderFactory.java index 927ee8be5e..06ce3079c7 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/SubtitleDecoderFactory.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/SubtitleDecoderFactory.java @@ -29,9 +29,7 @@ import com.google.android.exoplayer2.text.webvtt.WebvttDecoder; import com.google.android.exoplayer2.util.MimeTypes; -/** - * A factory for {@link SubtitleDecoder} instances. - */ +/** A factory for {@link SubtitleDecoder} instances. */ public interface SubtitleDecoderFactory { /** @@ -68,6 +66,7 @@ public interface SubtitleDecoderFactory { *

      • Cea708 ({@link Cea708Decoder}) *
      • DVB ({@link DvbDecoder}) *
      • PGS ({@link PgsDecoder}) + *
      • Exoplayer Cues ({@link ExoplayerCuesDecoder}) *
      */ SubtitleDecoderFactory DEFAULT = @@ -86,7 +85,8 @@ public boolean supportsFormat(Format format) { || MimeTypes.APPLICATION_MP4CEA608.equals(mimeType) || MimeTypes.APPLICATION_CEA708.equals(mimeType) || MimeTypes.APPLICATION_DVBSUBS.equals(mimeType) - || MimeTypes.APPLICATION_PGS.equals(mimeType); + || MimeTypes.APPLICATION_PGS.equals(mimeType) + || MimeTypes.TEXT_EXOPLAYER_CUES.equals(mimeType); } @Override @@ -108,13 +108,18 @@ public SubtitleDecoder createDecoder(Format format) { return new Tx3gDecoder(format.initializationData); case MimeTypes.APPLICATION_CEA608: case MimeTypes.APPLICATION_MP4CEA608: - return new Cea608Decoder(mimeType, format.accessibilityChannel); + return new Cea608Decoder( + mimeType, + format.accessibilityChannel, + Cea608Decoder.MIN_DATA_CHANNEL_TIMEOUT_MS); case MimeTypes.APPLICATION_CEA708: return new Cea708Decoder(format.accessibilityChannel, format.initializationData); case MimeTypes.APPLICATION_DVBSUBS: return new DvbDecoder(format.initializationData); case MimeTypes.APPLICATION_PGS: return new PgsDecoder(); + case MimeTypes.TEXT_EXOPLAYER_CUES: + return new ExoplayerCuesDecoder(); default: break; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/SubtitleExtractor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/SubtitleExtractor.java new file mode 100644 index 0000000000..95447b00d3 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/SubtitleExtractor.java @@ -0,0 +1,265 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.text; + +import static com.google.android.exoplayer2.util.Assertions.checkState; +import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; +import static java.lang.annotation.ElementType.TYPE_USE; + +import androidx.annotation.IntDef; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.ParserException; +import com.google.android.exoplayer2.extractor.Extractor; +import com.google.android.exoplayer2.extractor.ExtractorInput; +import com.google.android.exoplayer2.extractor.ExtractorOutput; +import com.google.android.exoplayer2.extractor.IndexSeekMap; +import com.google.android.exoplayer2.extractor.PositionHolder; +import com.google.android.exoplayer2.extractor.TrackOutput; +import com.google.android.exoplayer2.util.MimeTypes; +import com.google.android.exoplayer2.util.ParsableByteArray; +import com.google.android.exoplayer2.util.Util; +import com.google.common.primitives.Ints; +import java.io.IOException; +import java.io.InterruptedIOException; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import java.util.ArrayList; +import java.util.List; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; + +/** Generic extractor for extracting subtitles from various subtitle formats. */ +public class SubtitleExtractor implements Extractor { + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef({ + STATE_CREATED, + STATE_INITIALIZED, + STATE_EXTRACTING, + STATE_SEEKING, + STATE_FINISHED, + STATE_RELEASED + }) + private @interface State {} + + /** The extractor has been created. */ + private static final int STATE_CREATED = 0; + /** The extractor has been initialized. */ + private static final int STATE_INITIALIZED = 1; + /** The extractor is reading from the input and writing to the output. */ + private static final int STATE_EXTRACTING = 2; + /** The extractor has received a seek() operation after it has already finished extracting. */ + private static final int STATE_SEEKING = 3; + /** The extractor has finished extracting the input. */ + private static final int STATE_FINISHED = 4; + /** The extractor has been released. */ + private static final int STATE_RELEASED = 5; + + private static final int DEFAULT_BUFFER_SIZE = 1024; + + private final SubtitleDecoder subtitleDecoder; + private final CueEncoder cueEncoder; + private final ParsableByteArray subtitleData; + private final Format format; + private final List timestamps; + private final List samples; + + private @MonotonicNonNull ExtractorOutput extractorOutput; + private @MonotonicNonNull TrackOutput trackOutput; + private int bytesRead; + private @State int state; + private long seekTimeUs; + + /** + * @param subtitleDecoder The decoder used for decoding the subtitle data. The extractor will + * release the decoder in {@link SubtitleExtractor#release()}. + * @param format Format that describes subtitle data. + */ + public SubtitleExtractor(SubtitleDecoder subtitleDecoder, Format format) { + this.subtitleDecoder = subtitleDecoder; + cueEncoder = new CueEncoder(); + subtitleData = new ParsableByteArray(); + this.format = + format + .buildUpon() + .setSampleMimeType(MimeTypes.TEXT_EXOPLAYER_CUES) + .setCodecs(format.sampleMimeType) + .build(); + timestamps = new ArrayList<>(); + samples = new ArrayList<>(); + state = STATE_CREATED; + seekTimeUs = C.TIME_UNSET; + } + + @Override + public boolean sniff(ExtractorInput input) throws IOException { + // TODO: Implement sniff() according to the Extractor interface documentation. For now sniff() + // can safely return true because we plan to use this class in an ExtractorFactory that returns + // exactly one Extractor implementation. + return true; + } + + @Override + public void init(ExtractorOutput output) { + checkState(state == STATE_CREATED); + extractorOutput = output; + trackOutput = extractorOutput.track(/* id= */ 0, C.TRACK_TYPE_TEXT); + extractorOutput.endTracks(); + extractorOutput.seekMap( + new IndexSeekMap( + /* positions= */ new long[] {0}, + /* timesUs= */ new long[] {0}, + /* durationUs= */ C.TIME_UNSET)); + trackOutput.format(format); + state = STATE_INITIALIZED; + } + + @Override + public int read(ExtractorInput input, PositionHolder seekPosition) throws IOException { + checkState(state != STATE_CREATED && state != STATE_RELEASED); + if (state == STATE_INITIALIZED) { + subtitleData.reset( + input.getLength() != C.LENGTH_UNSET + ? Ints.checkedCast(input.getLength()) + : DEFAULT_BUFFER_SIZE); + bytesRead = 0; + state = STATE_EXTRACTING; + } + if (state == STATE_EXTRACTING) { + boolean inputFinished = readFromInput(input); + if (inputFinished) { + decode(); + writeToOutput(); + state = STATE_FINISHED; + } + } + if (state == STATE_SEEKING) { + boolean inputFinished = skipInput(input); + if (inputFinished) { + writeToOutput(); + state = STATE_FINISHED; + } + } + if (state == STATE_FINISHED) { + return RESULT_END_OF_INPUT; + } + return RESULT_CONTINUE; + } + + @Override + public void seek(long position, long timeUs) { + checkState(state != STATE_CREATED && state != STATE_RELEASED); + seekTimeUs = timeUs; + if (state == STATE_EXTRACTING) { + state = STATE_INITIALIZED; + } + if (state == STATE_FINISHED) { + state = STATE_SEEKING; + } + } + + /** Releases the extractor's resources, including the {@link SubtitleDecoder}. */ + @Override + public void release() { + if (state == STATE_RELEASED) { + return; + } + subtitleDecoder.release(); + state = STATE_RELEASED; + } + + /** Returns whether the input has been fully skipped. */ + private boolean skipInput(ExtractorInput input) throws IOException { + return input.skip( + input.getLength() != C.LENGTH_UNSET + ? Ints.checkedCast(input.getLength()) + : DEFAULT_BUFFER_SIZE) + == C.RESULT_END_OF_INPUT; + } + + /** Returns whether reading has been finished. */ + private boolean readFromInput(ExtractorInput input) throws IOException { + if (subtitleData.capacity() == bytesRead) { + subtitleData.ensureCapacity(bytesRead + DEFAULT_BUFFER_SIZE); + } + int readResult = + input.read(subtitleData.getData(), bytesRead, subtitleData.capacity() - bytesRead); + if (readResult != C.RESULT_END_OF_INPUT) { + bytesRead += readResult; + } + long inputLength = input.getLength(); + return (inputLength != C.LENGTH_UNSET && bytesRead == inputLength) + || readResult == C.RESULT_END_OF_INPUT; + } + + /** Decodes the subtitle data and stores the samples in the memory of the extractor. */ + private void decode() throws IOException { + try { + @Nullable SubtitleInputBuffer inputBuffer = subtitleDecoder.dequeueInputBuffer(); + while (inputBuffer == null) { + Thread.sleep(5); + inputBuffer = subtitleDecoder.dequeueInputBuffer(); + } + inputBuffer.ensureSpaceForWrite(bytesRead); + inputBuffer.data.put(subtitleData.getData(), /* offset= */ 0, bytesRead); + inputBuffer.data.limit(bytesRead); + subtitleDecoder.queueInputBuffer(inputBuffer); + @Nullable SubtitleOutputBuffer outputBuffer = subtitleDecoder.dequeueOutputBuffer(); + while (outputBuffer == null) { + Thread.sleep(5); + outputBuffer = subtitleDecoder.dequeueOutputBuffer(); + } + for (int i = 0; i < outputBuffer.getEventTimeCount(); i++) { + List cues = outputBuffer.getCues(outputBuffer.getEventTime(i)); + byte[] cuesSample = cueEncoder.encode(cues); + timestamps.add(outputBuffer.getEventTime(i)); + samples.add(new ParsableByteArray(cuesSample)); + } + outputBuffer.release(); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new InterruptedIOException(); + } catch (SubtitleDecoderException e) { + throw ParserException.createForMalformedContainer("SubtitleDecoder failed.", e); + } + } + + private void writeToOutput() { + checkStateNotNull(this.trackOutput); + checkState(timestamps.size() == samples.size()); + int index = + seekTimeUs == C.TIME_UNSET + ? 0 + : Util.binarySearchFloor( + timestamps, seekTimeUs, /* inclusive= */ true, /* stayInBounds= */ true); + for (int i = index; i < samples.size(); i++) { + ParsableByteArray sample = samples.get(i); + sample.setPosition(0); + int size = sample.getData().length; + trackOutput.sampleData(sample, size); + trackOutput.sampleMetadata( + /* timeUs= */ timestamps.get(i), + /* flags= */ C.BUFFER_FLAG_KEY_FRAME, + /* size= */ size, + /* offset= */ 0, + /* cryptoData= */ null); + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/SubtitleInputBuffer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/SubtitleInputBuffer.java index 9866517a58..1e80a62e11 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/SubtitleInputBuffer.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/SubtitleInputBuffer.java @@ -22,13 +22,12 @@ public class SubtitleInputBuffer extends DecoderInputBuffer { /** - * An offset that must be added to the subtitle's event times after it's been decoded, or - * {@link Format#OFFSET_SAMPLE_RELATIVE} if {@link #timeUs} should be added. + * An offset that must be added to the subtitle's event times after it's been decoded, or {@link + * Format#OFFSET_SAMPLE_RELATIVE} if {@link #timeUs} should be added. */ public long subsampleOffsetUs; public SubtitleInputBuffer() { super(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_NORMAL); } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/SubtitleOutputBuffer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/SubtitleOutputBuffer.java index 1dcdecf95f..898551b84d 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/SubtitleOutputBuffer.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/SubtitleOutputBuffer.java @@ -17,14 +17,12 @@ import androidx.annotation.Nullable; import com.google.android.exoplayer2.Format; -import com.google.android.exoplayer2.decoder.OutputBuffer; +import com.google.android.exoplayer2.decoder.DecoderOutputBuffer; import com.google.android.exoplayer2.util.Assertions; import java.util.List; -/** - * Base class for {@link SubtitleDecoder} output buffers. - */ -public abstract class SubtitleOutputBuffer extends OutputBuffer implements Subtitle { +/** Base class for {@link SubtitleDecoder} output buffers. */ +public abstract class SubtitleOutputBuffer extends DecoderOutputBuffer implements Subtitle { @Nullable private Subtitle subtitle; private long subsampleOffsetUs; @@ -35,14 +33,14 @@ public abstract class SubtitleOutputBuffer extends OutputBuffer implements Subti * * @param timeUs The time of the start of the subtitle in microseconds. * @param subtitle The subtitle. - * @param subsampleOffsetUs An offset that must be added to the subtitle's event times, or - * {@link Format#OFFSET_SAMPLE_RELATIVE} if {@code timeUs} should be added. + * @param subsampleOffsetUs An offset that must be added to the subtitle's event times, or {@link + * Format#OFFSET_SAMPLE_RELATIVE} if {@code timeUs} should be added. */ public void setContent(long timeUs, Subtitle subtitle, long subsampleOffsetUs) { this.timeUs = timeUs; this.subtitle = subtitle; - this.subsampleOffsetUs = subsampleOffsetUs == Format.OFFSET_SAMPLE_RELATIVE ? this.timeUs - : subsampleOffsetUs; + this.subsampleOffsetUs = + subsampleOffsetUs == Format.OFFSET_SAMPLE_RELATIVE ? this.timeUs : subsampleOffsetUs; } @Override @@ -65,13 +63,9 @@ public List getCues(long timeUs) { return Assertions.checkNotNull(subtitle).getCues(timeUs - subsampleOffsetUs); } - @Override - public abstract void release(); - @Override public void clear() { super.clear(); subtitle = null; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/TextOutput.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/TextOutput.java index aa3b4e5557..35dbcfed57 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/TextOutput.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/TextOutput.java @@ -17,15 +17,25 @@ import java.util.List; -/** - * Receives text output. - */ +/** Receives text output. */ public interface TextOutput { /** - * Called when there is a change in the {@link Cue}s. + * Called when there is a change in the {@link Cue Cues}. + * + *

      Both {@link #onCues(List)} and {@link #onCues(CueGroup)} are called when there is a change + * in the cues. You should only implement one or the other. + * + * @deprecated Use {@link #onCues(CueGroup)} instead. + */ + @Deprecated + default void onCues(List cues) {} + + /** + * Called when there is a change in the {@link CueGroup}. * - * @param cues The {@link Cue}s. May be empty. + *

      Both {@link #onCues(List)} and {@link #onCues(CueGroup)} are called when there is a change + * in the cues. You should only implement one or the other. */ - void onCues(List cues); + void onCues(CueGroup cueGroup); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/TextRenderer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/TextRenderer.java index 46c26db122..862463d33b 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/TextRenderer.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/TextRenderer.java @@ -15,6 +15,10 @@ */ package com.google.android.exoplayer2.text; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Assertions.checkState; +import static java.lang.annotation.ElementType.TYPE_USE; + import android.os.Handler; import android.os.Handler.Callback; import android.os.Looper; @@ -26,22 +30,24 @@ import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.FormatHolder; import com.google.android.exoplayer2.RendererCapabilities; -import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.source.SampleStream.ReadDataResult; import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.Util; +import com.google.common.collect.ImmutableList; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; -import java.util.Collections; -import java.util.List; +import java.lang.annotation.Target; +import org.checkerframework.checker.nullness.qual.RequiresNonNull; +import org.checkerframework.dataflow.qual.SideEffectFree; /** * A renderer for text. - *

      - * {@link Subtitle}s are decoded from sample data using {@link SubtitleDecoder} instances obtained - * from a {@link SubtitleDecoderFactory}. The actual rendering of the subtitle {@link Cue}s is - * delegated to a {@link TextOutput}. + * + *

      {@link Subtitle}s are decoded from sample data using {@link SubtitleDecoder} instances + * obtained from a {@link SubtitleDecoderFactory}. The actual rendering of the subtitle {@link Cue}s + * is delegated to a {@link TextOutput}. */ public final class TextRenderer extends BaseRenderer implements Callback { @@ -49,15 +55,14 @@ public final class TextRenderer extends BaseRenderer implements Callback { @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({ REPLACEMENT_STATE_NONE, REPLACEMENT_STATE_SIGNAL_END_OF_STREAM, REPLACEMENT_STATE_WAIT_END_OF_STREAM }) private @interface ReplacementState {} - /** - * The decoder does not need to be replaced. - */ + /** The decoder does not need to be replaced. */ private static final int REPLACEMENT_STATE_NONE = 0; /** * The decoder needs to be replaced, but we haven't yet signaled an end of stream to the existing @@ -81,13 +86,17 @@ public final class TextRenderer extends BaseRenderer implements Callback { private boolean inputStreamEnded; private boolean outputStreamEnded; - @ReplacementState private int decoderReplacementState; + private boolean waitingForKeyFrame; + private @ReplacementState int decoderReplacementState; @Nullable private Format streamFormat; @Nullable private SubtitleDecoder decoder; @Nullable private SubtitleInputBuffer nextInputBuffer; @Nullable private SubtitleOutputBuffer subtitle; @Nullable private SubtitleOutputBuffer nextSubtitle; private int nextSubtitleEventIndex; + private long finalStreamEndPositionUs; + private long outputStreamOffsetUs; + private long lastRendererPositionUs; /** * @param output The output. @@ -113,53 +122,92 @@ public TextRenderer(TextOutput output, @Nullable Looper outputLooper) { public TextRenderer( TextOutput output, @Nullable Looper outputLooper, SubtitleDecoderFactory decoderFactory) { super(C.TRACK_TYPE_TEXT); - this.output = Assertions.checkNotNull(output); + this.output = checkNotNull(output); this.outputHandler = outputLooper == null ? null : Util.createHandler(outputLooper, /* callback= */ this); this.decoderFactory = decoderFactory; formatHolder = new FormatHolder(); + finalStreamEndPositionUs = C.TIME_UNSET; + outputStreamOffsetUs = C.TIME_UNSET; + lastRendererPositionUs = C.TIME_UNSET; + } + + @Override + public String getName() { + return TAG; } @Override - @Capabilities - public int supportsFormat(Format format) { + public @Capabilities int supportsFormat(Format format) { if (decoderFactory.supportsFormat(format)) { return RendererCapabilities.create( - supportsFormatDrm(null, format.drmInitData) ? FORMAT_HANDLED : FORMAT_UNSUPPORTED_DRM); + format.cryptoType == C.CRYPTO_TYPE_NONE ? C.FORMAT_HANDLED : C.FORMAT_UNSUPPORTED_DRM); } else if (MimeTypes.isText(format.sampleMimeType)) { - return RendererCapabilities.create(FORMAT_UNSUPPORTED_SUBTYPE); + return RendererCapabilities.create(C.FORMAT_UNSUPPORTED_SUBTYPE); } else { - return RendererCapabilities.create(FORMAT_UNSUPPORTED_TYPE); + return RendererCapabilities.create(C.FORMAT_UNSUPPORTED_TYPE); } } + /** + * Sets the position at which to stop rendering the current stream. + * + *

      Must be called after {@link #setCurrentStreamFinal()}. + * + * @param streamEndPositionUs The position to stop rendering at or {@link C#LENGTH_UNSET} to + * render until the end of the current stream. + */ + // TODO(internal b/181312195): Remove this when it's no longer needed once subtitles are decoded + // on the loading side of SampleQueue. + public void setFinalStreamEndPositionUs(long streamEndPositionUs) { + checkState(isCurrentStreamFinal()); + this.finalStreamEndPositionUs = streamEndPositionUs; + } + @Override - protected void onStreamChanged(Format[] formats, long offsetUs) { + protected void onStreamChanged(Format[] formats, long startPositionUs, long offsetUs) { + outputStreamOffsetUs = offsetUs; streamFormat = formats[0]; if (decoder != null) { decoderReplacementState = REPLACEMENT_STATE_SIGNAL_END_OF_STREAM; } else { - decoder = decoderFactory.createDecoder(streamFormat); + initDecoder(); } } @Override protected void onPositionReset(long positionUs, boolean joining) { + lastRendererPositionUs = positionUs; + clearOutput(); inputStreamEnded = false; outputStreamEnded = false; - resetOutputAndDecoder(); + finalStreamEndPositionUs = C.TIME_UNSET; + if (decoderReplacementState != REPLACEMENT_STATE_NONE) { + replaceDecoder(); + } else { + releaseBuffers(); + checkNotNull(decoder).flush(); + } } @Override public void render(long positionUs, long elapsedRealtimeUs) { + lastRendererPositionUs = positionUs; + if (isCurrentStreamFinal() + && finalStreamEndPositionUs != C.TIME_UNSET + && positionUs >= finalStreamEndPositionUs) { + releaseBuffers(); + outputStreamEnded = true; + } + if (outputStreamEnded) { return; } if (nextSubtitle == null) { - decoder.setPositionUs(positionUs); + checkNotNull(decoder).setPositionUs(positionUs); try { - nextSubtitle = decoder.dequeueOutputBuffer(); + nextSubtitle = checkNotNull(decoder).dequeueOutputBuffer(); } catch (SubtitleDecoderException e) { handleDecoderError(e); return; @@ -181,8 +229,8 @@ public void render(long positionUs, long elapsedRealtimeUs) { textRendererNeedsUpdate = true; } } - if (nextSubtitle != null) { + SubtitleOutputBuffer nextSubtitle = this.nextSubtitle; if (nextSubtitle.isEndOfStream()) { if (!textRendererNeedsUpdate && getNextEventTime() == Long.MAX_VALUE) { if (decoderReplacementState == REPLACEMENT_STATE_WAIT_END_OF_STREAM) { @@ -197,16 +245,20 @@ public void render(long positionUs, long elapsedRealtimeUs) { if (subtitle != null) { subtitle.release(); } + nextSubtitleEventIndex = nextSubtitle.getNextEventTimeIndex(positionUs); subtitle = nextSubtitle; - nextSubtitle = null; - nextSubtitleEventIndex = subtitle.getNextEventTimeIndex(positionUs); + this.nextSubtitle = null; textRendererNeedsUpdate = true; } } if (textRendererNeedsUpdate) { + // If textRendererNeedsUpdate then subtitle must be non-null. + checkNotNull(subtitle); // textRendererNeedsUpdate is set and we're playing. Update the renderer. - updateOutput(subtitle.getCues(positionUs)); + long presentationTimeUs = getPresentationTimeUs(getCurrentEventTimeUs(positionUs)); + CueGroup cueGroup = new CueGroup(subtitle.getCues(positionUs), presentationTimeUs); + updateOutput(cueGroup); } if (decoderReplacementState == REPLACEMENT_STATE_WAIT_END_OF_STREAM) { @@ -215,44 +267,57 @@ public void render(long positionUs, long elapsedRealtimeUs) { try { while (!inputStreamEnded) { + @Nullable SubtitleInputBuffer nextInputBuffer = this.nextInputBuffer; if (nextInputBuffer == null) { - nextInputBuffer = decoder.dequeueInputBuffer(); + nextInputBuffer = checkNotNull(decoder).dequeueInputBuffer(); if (nextInputBuffer == null) { return; } + this.nextInputBuffer = nextInputBuffer; } if (decoderReplacementState == REPLACEMENT_STATE_SIGNAL_END_OF_STREAM) { nextInputBuffer.setFlags(C.BUFFER_FLAG_END_OF_STREAM); - decoder.queueInputBuffer(nextInputBuffer); - nextInputBuffer = null; + checkNotNull(decoder).queueInputBuffer(nextInputBuffer); + this.nextInputBuffer = null; decoderReplacementState = REPLACEMENT_STATE_WAIT_END_OF_STREAM; return; } // Try and read the next subtitle from the source. - int result = readSource(formatHolder, nextInputBuffer, false); + @ReadDataResult int result = readSource(formatHolder, nextInputBuffer, /* readFlags= */ 0); if (result == C.RESULT_BUFFER_READ) { if (nextInputBuffer.isEndOfStream()) { inputStreamEnded = true; + waitingForKeyFrame = false; } else { - nextInputBuffer.subsampleOffsetUs = formatHolder.format.subsampleOffsetUs; + @Nullable Format format = formatHolder.format; + if (format == null) { + // We haven't received a format yet. + return; + } + nextInputBuffer.subsampleOffsetUs = format.subsampleOffsetUs; nextInputBuffer.flip(); + waitingForKeyFrame &= !nextInputBuffer.isKeyFrame(); + } + if (!waitingForKeyFrame) { + checkNotNull(decoder).queueInputBuffer(nextInputBuffer); + this.nextInputBuffer = null; } - decoder.queueInputBuffer(nextInputBuffer); - nextInputBuffer = null; } else if (result == C.RESULT_NOTHING_READ) { return; } } } catch (SubtitleDecoderException e) { handleDecoderError(e); - return; } } @Override protected void onDisabled() { streamFormat = null; + finalStreamEndPositionUs = C.TIME_UNSET; clearOutput(); + outputStreamOffsetUs = C.TIME_UNSET; + lastRendererPositionUs = C.TIME_UNSET; releaseDecoder(); } @@ -283,48 +348,58 @@ private void releaseBuffers() { private void releaseDecoder() { releaseBuffers(); - decoder.release(); + checkNotNull(decoder).release(); decoder = null; decoderReplacementState = REPLACEMENT_STATE_NONE; } + private void initDecoder() { + waitingForKeyFrame = true; + decoder = decoderFactory.createDecoder(checkNotNull(streamFormat)); + } + private void replaceDecoder() { releaseDecoder(); - decoder = decoderFactory.createDecoder(streamFormat); + initDecoder(); } private long getNextEventTime() { - return nextSubtitleEventIndex == C.INDEX_UNSET - || nextSubtitleEventIndex >= subtitle.getEventTimeCount() - ? Long.MAX_VALUE : subtitle.getEventTime(nextSubtitleEventIndex); + if (nextSubtitleEventIndex == C.INDEX_UNSET) { + return Long.MAX_VALUE; + } + checkNotNull(subtitle); + return nextSubtitleEventIndex >= subtitle.getEventTimeCount() + ? Long.MAX_VALUE + : subtitle.getEventTime(nextSubtitleEventIndex); } - private void updateOutput(List cues) { + private void updateOutput(CueGroup cueGroup) { if (outputHandler != null) { - outputHandler.obtainMessage(MSG_UPDATE_OUTPUT, cues).sendToTarget(); + outputHandler.obtainMessage(MSG_UPDATE_OUTPUT, cueGroup).sendToTarget(); } else { - invokeUpdateOutputInternal(cues); + invokeUpdateOutputInternal(cueGroup); } } private void clearOutput() { - updateOutput(Collections.emptyList()); + updateOutput(new CueGroup(ImmutableList.of(), getPresentationTimeUs(lastRendererPositionUs))); } - @SuppressWarnings("unchecked") @Override public boolean handleMessage(Message msg) { switch (msg.what) { case MSG_UPDATE_OUTPUT: - invokeUpdateOutputInternal((List) msg.obj); + invokeUpdateOutputInternal((CueGroup) msg.obj); return true; default: throw new IllegalStateException(); } } - private void invokeUpdateOutputInternal(List cues) { - output.onCues(cues); + @SuppressWarnings("deprecation") // We need to call both onCues method for backward compatibility. + private void invokeUpdateOutputInternal(CueGroup cueGroup) { + output.onCues(cueGroup.cues); + output.onCues(cueGroup); } /** @@ -335,16 +410,28 @@ private void invokeUpdateOutputInternal(List cues) { */ private void handleDecoderError(SubtitleDecoderException e) { Log.e(TAG, "Subtitle decoding failed. streamFormat=" + streamFormat, e); - resetOutputAndDecoder(); + clearOutput(); + replaceDecoder(); } - private void resetOutputAndDecoder() { - clearOutput(); - if (decoderReplacementState != REPLACEMENT_STATE_NONE) { - replaceDecoder(); - } else { - releaseBuffers(); - decoder.flush(); + @RequiresNonNull("subtitle") + @SideEffectFree + private long getCurrentEventTimeUs(long positionUs) { + int nextEventTimeIndex = subtitle.getNextEventTimeIndex(positionUs); + if (nextEventTimeIndex == 0 || subtitle.getEventTimeCount() == 0) { + return subtitle.timeUs; } + + return nextEventTimeIndex == C.INDEX_UNSET + ? subtitle.getEventTime(subtitle.getEventTimeCount() - 1) + : subtitle.getEventTime(nextEventTimeIndex - 1); + } + + @SideEffectFree + private long getPresentationTimeUs(long positionUs) { + checkState(positionUs != C.TIME_UNSET); + checkState(outputStreamOffsetUs != C.TIME_UNSET); + + return positionUs - outputStreamOffsetUs; } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/cea/Cea608Decoder.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/cea/Cea608Decoder.java index 5a14063aa1..61d3e512f0 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/cea/Cea608Decoder.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/cea/Cea608Decoder.java @@ -15,6 +15,8 @@ */ package com.google.android.exoplayer2.text.cea; +import static java.lang.Math.min; + import android.graphics.Color; import android.graphics.Typeface; import android.text.Layout.Alignment; @@ -24,23 +26,35 @@ import android.text.style.ForegroundColorSpan; import android.text.style.StyleSpan; import android.text.style.UnderlineSpan; +import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.text.Cue; import com.google.android.exoplayer2.text.Subtitle; import com.google.android.exoplayer2.text.SubtitleDecoder; +import com.google.android.exoplayer2.text.SubtitleDecoderException; import com.google.android.exoplayer2.text.SubtitleInputBuffer; +import com.google.android.exoplayer2.text.SubtitleOutputBuffer; +import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.ParsableByteArray; +import com.google.android.exoplayer2.util.Util; +import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import org.checkerframework.checker.nullness.compatqual.NullableType; -/** - * A {@link SubtitleDecoder} for CEA-608 (also known as "line 21 captions" and "EIA-608"). - */ +/** A {@link SubtitleDecoder} for CEA-608 (also known as "line 21 captions" and "EIA-608"). */ public final class Cea608Decoder extends CeaDecoder { + /** + * The minimum value for the {@code validDataChannelTimeoutMs} constructor parameter permitted by + * ANSI/CTA-608-E R-2014 Annex C.9. + */ + public static final long MIN_DATA_CHANNEL_TIMEOUT_MS = 16_000; + private static final String TAG = "Cea608Decoder"; private static final int CC_VALID_FLAG = 0x04; @@ -127,72 +141,148 @@ public final class Cea608Decoder extends CeaDecoder { private static final byte CTRL_END_OF_CAPTION = 0x2F; // Basic North American 608 CC char set, mostly ASCII. Indexed by (char-0x20). - private static final int[] BASIC_CHARACTER_SET = new int[] { - 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, // ! " # $ % & ' - 0x28, 0x29, // ( ) - 0xE1, // 2A: 225 'á' "Latin small letter A with acute" - 0x2B, 0x2C, 0x2D, 0x2E, 0x2F, // + , - . / - 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, // 0 1 2 3 4 5 6 7 - 0x38, 0x39, 0x3A, 0x3B, 0x3C, 0x3D, 0x3E, 0x3F, // 8 9 : ; < = > ? - 0x40, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47, // @ A B C D E F G - 0x48, 0x49, 0x4A, 0x4B, 0x4C, 0x4D, 0x4E, 0x4F, // H I J K L M N O - 0x50, 0x51, 0x52, 0x53, 0x54, 0x55, 0x56, 0x57, // P Q R S T U V W - 0x58, 0x59, 0x5A, 0x5B, // X Y Z [ - 0xE9, // 5C: 233 'é' "Latin small letter E with acute" - 0x5D, // ] - 0xED, // 5E: 237 'í' "Latin small letter I with acute" - 0xF3, // 5F: 243 'ó' "Latin small letter O with acute" - 0xFA, // 60: 250 'ú' "Latin small letter U with acute" - 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, // a b c d e f g - 0x68, 0x69, 0x6A, 0x6B, 0x6C, 0x6D, 0x6E, 0x6F, // h i j k l m n o - 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, // p q r s t u v w - 0x78, 0x79, 0x7A, // x y z - 0xE7, // 7B: 231 'ç' "Latin small letter C with cedilla" - 0xF7, // 7C: 247 '÷' "Division sign" - 0xD1, // 7D: 209 'Ñ' "Latin capital letter N with tilde" - 0xF1, // 7E: 241 'ñ' "Latin small letter N with tilde" - 0x25A0 // 7F: "Black Square" (NB: 2588 = Full Block) - }; + private static final int[] BASIC_CHARACTER_SET = + new int[] { + 0x20, + 0x21, + 0x22, + 0x23, + 0x24, + 0x25, + 0x26, + 0x27, // ! " # $ % & ' + 0x28, + 0x29, // ( ) + 0xE1, // 2A: 225 'á' "Latin small letter A with acute" + 0x2B, + 0x2C, + 0x2D, + 0x2E, + 0x2F, // + , - . / + 0x30, + 0x31, + 0x32, + 0x33, + 0x34, + 0x35, + 0x36, + 0x37, // 0 1 2 3 4 5 6 7 + 0x38, + 0x39, + 0x3A, + 0x3B, + 0x3C, + 0x3D, + 0x3E, + 0x3F, // 8 9 : ; < = > ? + 0x40, + 0x41, + 0x42, + 0x43, + 0x44, + 0x45, + 0x46, + 0x47, // @ A B C D E F G + 0x48, + 0x49, + 0x4A, + 0x4B, + 0x4C, + 0x4D, + 0x4E, + 0x4F, // H I J K L M N O + 0x50, + 0x51, + 0x52, + 0x53, + 0x54, + 0x55, + 0x56, + 0x57, // P Q R S T U V W + 0x58, + 0x59, + 0x5A, + 0x5B, // X Y Z [ + 0xE9, // 5C: 233 'é' "Latin small letter E with acute" + 0x5D, // ] + 0xED, // 5E: 237 'í' "Latin small letter I with acute" + 0xF3, // 5F: 243 'ó' "Latin small letter O with acute" + 0xFA, // 60: 250 'ú' "Latin small letter U with acute" + 0x61, + 0x62, + 0x63, + 0x64, + 0x65, + 0x66, + 0x67, // a b c d e f g + 0x68, + 0x69, + 0x6A, + 0x6B, + 0x6C, + 0x6D, + 0x6E, + 0x6F, // h i j k l m n o + 0x70, + 0x71, + 0x72, + 0x73, + 0x74, + 0x75, + 0x76, + 0x77, // p q r s t u v w + 0x78, + 0x79, + 0x7A, // x y z + 0xE7, // 7B: 231 'ç' "Latin small letter C with cedilla" + 0xF7, // 7C: 247 '÷' "Division sign" + 0xD1, // 7D: 209 'Ñ' "Latin capital letter N with tilde" + 0xF1, // 7E: 241 'ñ' "Latin small letter N with tilde" + 0x25A0 // 7F: "Black Square" (NB: 2588 = Full Block) + }; // Special North American 608 CC char set. - private static final int[] SPECIAL_CHARACTER_SET = new int[] { - 0xAE, // 30: 174 '®' "Registered Sign" - registered trademark symbol - 0xB0, // 31: 176 '°' "Degree Sign" - 0xBD, // 32: 189 '½' "Vulgar Fraction One Half" (1/2 symbol) - 0xBF, // 33: 191 '¿' "Inverted Question Mark" - 0x2122, // 34: "Trade Mark Sign" (tm superscript) - 0xA2, // 35: 162 '¢' "Cent Sign" - 0xA3, // 36: 163 '£' "Pound Sign" - pounds sterling - 0x266A, // 37: "Eighth Note" - music note - 0xE0, // 38: 224 'à' "Latin small letter A with grave" - 0x20, // 39: TRANSPARENT SPACE - for now use ordinary space - 0xE8, // 3A: 232 'è' "Latin small letter E with grave" - 0xE2, // 3B: 226 'â' "Latin small letter A with circumflex" - 0xEA, // 3C: 234 'ê' "Latin small letter E with circumflex" - 0xEE, // 3D: 238 'î' "Latin small letter I with circumflex" - 0xF4, // 3E: 244 'ô' "Latin small letter O with circumflex" - 0xFB // 3F: 251 'û' "Latin small letter U with circumflex" - }; + private static final int[] SPECIAL_CHARACTER_SET = + new int[] { + 0xAE, // 30: 174 '®' "Registered Sign" - registered trademark symbol + 0xB0, // 31: 176 '°' "Degree Sign" + 0xBD, // 32: 189 '½' "Vulgar Fraction One Half" (1/2 symbol) + 0xBF, // 33: 191 '¿' "Inverted Question Mark" + 0x2122, // 34: "Trade Mark Sign" (tm superscript) + 0xA2, // 35: 162 '¢' "Cent Sign" + 0xA3, // 36: 163 '£' "Pound Sign" - pounds sterling + 0x266A, // 37: "Eighth Note" - music note + 0xE0, // 38: 224 'à' "Latin small letter A with grave" + 0x20, // 39: TRANSPARENT SPACE - for now use ordinary space + 0xE8, // 3A: 232 'è' "Latin small letter E with grave" + 0xE2, // 3B: 226 'â' "Latin small letter A with circumflex" + 0xEA, // 3C: 234 'ê' "Latin small letter E with circumflex" + 0xEE, // 3D: 238 'î' "Latin small letter I with circumflex" + 0xF4, // 3E: 244 'ô' "Latin small letter O with circumflex" + 0xFB // 3F: 251 'û' "Latin small letter U with circumflex" + }; // Extended Spanish/Miscellaneous and French char set. - private static final int[] SPECIAL_ES_FR_CHARACTER_SET = new int[] { - // Spanish and misc. - 0xC1, 0xC9, 0xD3, 0xDA, 0xDC, 0xFC, 0x2018, 0xA1, - 0x2A, 0x27, 0x2014, 0xA9, 0x2120, 0x2022, 0x201C, 0x201D, - // French. - 0xC0, 0xC2, 0xC7, 0xC8, 0xCA, 0xCB, 0xEB, 0xCE, - 0xCF, 0xEF, 0xD4, 0xD9, 0xF9, 0xDB, 0xAB, 0xBB - }; + private static final int[] SPECIAL_ES_FR_CHARACTER_SET = + new int[] { + // Spanish and misc. + 0xC1, 0xC9, 0xD3, 0xDA, 0xDC, 0xFC, 0x2018, 0xA1, + 0x2A, 0x27, 0x2014, 0xA9, 0x2120, 0x2022, 0x201C, 0x201D, + // French. + 0xC0, 0xC2, 0xC7, 0xC8, 0xCA, 0xCB, 0xEB, 0xCE, + 0xCF, 0xEF, 0xD4, 0xD9, 0xF9, 0xDB, 0xAB, 0xBB + }; - //Extended Portuguese and German/Danish char set. - private static final int[] SPECIAL_PT_DE_CHARACTER_SET = new int[] { - // Portuguese. - 0xC3, 0xE3, 0xCD, 0xCC, 0xEC, 0xD2, 0xF2, 0xD5, - 0xF5, 0x7B, 0x7D, 0x5C, 0x5E, 0x5F, 0x7C, 0x7E, - // German/Danish. - 0xC4, 0xE4, 0xD6, 0xF6, 0xDF, 0xA5, 0xA4, 0x2502, - 0xC5, 0xE5, 0xD8, 0xF8, 0x250C, 0x2510, 0x2514, 0x2518 - }; + // Extended Portuguese and German/Danish char set. + private static final int[] SPECIAL_PT_DE_CHARACTER_SET = + new int[] { + // Portuguese. + 0xC3, 0xE3, 0xCD, 0xCC, 0xEC, 0xD2, 0xF2, 0xD5, + 0xF5, 0x7B, 0x7D, 0x5C, 0x5E, 0x5F, 0x7C, 0x7E, + // German/Danish. + 0xC4, 0xE4, 0xD6, 0xF6, 0xDF, 0xA5, 0xA4, 0x2502, + 0xC5, 0xE5, 0xD8, 0xF8, 0x250C, 0x2510, 0x2514, 0x2518 + }; private static final boolean[] ODD_PARITY_BYTE_TABLE = { false, true, true, false, true, false, false, true, // 0 @@ -233,11 +323,12 @@ public final class Cea608Decoder extends CeaDecoder { private final int packetLength; private final int selectedField; private final int selectedChannel; + private final long validDataChannelTimeoutUs; private final ArrayList cueBuilders; private CueBuilder currentCueBuilder; - private List cues; - private List lastCues; + @Nullable private List cues; + @Nullable private List lastCues; private int captionMode; private int captionRowCount; @@ -253,11 +344,25 @@ public final class Cea608Decoder extends CeaDecoder { // service bytes and drops the rest. private boolean isInCaptionService; - public Cea608Decoder(String mimeType, int accessibilityChannel) { + private long lastCueUpdateUs; + + /** + * Constructs an instance. + * + * @param mimeType The MIME type of the CEA-608 data. + * @param accessibilityChannel The Accessibility channel, or {@link Format#NO_VALUE} if unknown. + * @param validDataChannelTimeoutMs The timeout (in milliseconds) permitted by ANSI/CTA-608-E + * R-2014 Annex C.9 to clear "stuck" captions where no removal control code is received. The + * timeout should be at least {@link #MIN_DATA_CHANNEL_TIMEOUT_MS} or {@link C#TIME_UNSET} for + * no timeout. + */ + public Cea608Decoder(String mimeType, int accessibilityChannel, long validDataChannelTimeoutMs) { ccData = new ParsableByteArray(); cueBuilders = new ArrayList<>(); currentCueBuilder = new CueBuilder(CC_MODE_UNKNOWN, DEFAULT_CAPTIONS_ROW_COUNT); currentChannel = NTSC_CC_CHANNEL_1; + this.validDataChannelTimeoutUs = + validDataChannelTimeoutMs > 0 ? validDataChannelTimeoutMs * 1000 : C.TIME_UNSET; packetLength = MimeTypes.APPLICATION_MP4CEA608.equals(mimeType) ? 2 : 3; switch (accessibilityChannel) { case 1: @@ -285,6 +390,7 @@ public Cea608Decoder(String mimeType, int accessibilityChannel) { setCaptionMode(CC_MODE_UNKNOWN); resetCueBuilders(); isInCaptionService = true; + lastCueUpdateUs = C.TIME_UNSET; } @Override @@ -306,6 +412,7 @@ public void flush() { repeatableControlCc2 = 0; currentChannel = NTSC_CC_CHANNEL_1; isInCaptionService = true; + lastCueUpdateUs = C.TIME_UNSET; } @Override @@ -313,6 +420,26 @@ public void release() { // Do nothing } + @Nullable + @Override + public SubtitleOutputBuffer dequeueOutputBuffer() throws SubtitleDecoderException { + SubtitleOutputBuffer outputBuffer = super.dequeueOutputBuffer(); + if (outputBuffer != null) { + return outputBuffer; + } + if (shouldClearStuckCaptions()) { + outputBuffer = getAvailableOutputBuffer(); + if (outputBuffer != null) { + cues = Collections.emptyList(); + lastCueUpdateUs = C.TIME_UNSET; + Subtitle subtitle = createSubtitle(); + outputBuffer.setContent(getPositionUs(), subtitle, Format.OFFSET_SAMPLE_RELATIVE); + return outputBuffer; + } + } + return null; + } + @Override protected boolean isNewSubtitleDataAvailable() { return cues != lastCues; @@ -321,17 +448,18 @@ protected boolean isNewSubtitleDataAvailable() { @Override protected Subtitle createSubtitle() { lastCues = cues; - return new CeaSubtitle(cues); + return new CeaSubtitle(Assertions.checkNotNull(cues)); } @SuppressWarnings("ByteBufferBackingArray") @Override protected void decode(SubtitleInputBuffer inputBuffer) { - ccData.reset(inputBuffer.data.array(), inputBuffer.data.limit()); + ByteBuffer subtitleData = Assertions.checkNotNull(inputBuffer.data); + ccData.reset(subtitleData.array(), subtitleData.limit()); boolean captionDataProcessed = false; while (ccData.bytesLeft() >= packetLength) { - byte ccHeader = packetLength == 2 ? CC_IMPLICIT_DATA_HEADER - : (byte) ccData.readUnsignedByte(); + int ccHeader = packetLength == 2 ? CC_IMPLICIT_DATA_HEADER : ccData.readUnsignedByte(); + int ccByte1 = ccData.readUnsignedByte(); int ccByte2 = ccData.readUnsignedByte(); @@ -418,6 +546,7 @@ protected void decode(SubtitleInputBuffer inputBuffer) { if (captionDataProcessed) { if (captionMode == CC_MODE_ROLL_UP || captionMode == CC_MODE_PAINT_ON) { cues = getDisplayCues(); + lastCueUpdateUs = getPositionUs(); } } } @@ -572,22 +701,23 @@ private List getDisplayCues() { // preference, then middle alignment, then end alignment. @Cue.AnchorType int positionAnchor = Cue.ANCHOR_TYPE_END; int cueBuilderCount = cueBuilders.size(); - List cueBuilderCues = new ArrayList<>(cueBuilderCount); + List<@NullableType Cue> cueBuilderCues = new ArrayList<>(cueBuilderCount); for (int i = 0; i < cueBuilderCount; i++) { - Cue cue = cueBuilders.get(i).build(/* forcedPositionAnchor= */ Cue.TYPE_UNSET); + @Nullable Cue cue = cueBuilders.get(i).build(/* forcedPositionAnchor= */ Cue.TYPE_UNSET); cueBuilderCues.add(cue); if (cue != null) { - positionAnchor = Math.min(positionAnchor, cue.positionAnchor); + positionAnchor = min(positionAnchor, cue.positionAnchor); } } // Skip null cues and rebuild any that don't have the preferred alignment. List displayCues = new ArrayList<>(cueBuilderCount); for (int i = 0; i < cueBuilderCount; i++) { - Cue cue = cueBuilderCues.get(i); + @Nullable Cue cue = cueBuilderCues.get(i); if (cue != null) { if (cue.positionAnchor != positionAnchor) { - cue = cueBuilders.get(i).build(positionAnchor); + // The last time we built this cue it was non-null, it will be non-null this time too. + cue = Assertions.checkNotNull(cueBuilders.get(i).build(positionAnchor)); } displayCues.add(cue); } @@ -614,7 +744,8 @@ private void setCaptionMode(int captionMode) { // Clear the working memory. resetCueBuilders(); - if (oldCaptionMode == CC_MODE_PAINT_ON || captionMode == CC_MODE_ROLL_UP + if (oldCaptionMode == CC_MODE_PAINT_ON + || captionMode == CC_MODE_ROLL_UP || captionMode == CC_MODE_UNKNOWN) { // When switching from paint-on or to roll-up or unknown, we also need to clear the caption. cues = Collections.emptyList(); @@ -741,11 +872,11 @@ private static boolean isXdsControlCode(byte cc1) { } private static boolean isServiceSwitchCommand(byte cc1) { - // cc1 - 0|0|0|1|C|1|0|0 - return (cc1 & 0xF7) == 0x14; + // cc1 - 0|0|0|1|C|1|0|F + return (cc1 & 0xF6) == 0x14; } - private static class CueBuilder { + private static final class CueBuilder { // 608 captions define a 15 row by 32 column screen grid. These constants convert from 608 // positions to normalized screen position. @@ -767,7 +898,7 @@ public CueBuilder(int captionMode, int captionRowCount) { rolledUpCaptions = new ArrayList<>(); captionStringBuilder = new StringBuilder(); reset(captionMode); - setCaptionRowCount(captionRowCount); + this.captionRowCount = captionRowCount; } public void reset(int captionMode) { @@ -816,28 +947,36 @@ public void backspace() { } public void append(char text) { - captionStringBuilder.append(text); + // Don't accept more than 32 chars. We'll trim further, considering indent & tabOffset, in + // build(). + if (captionStringBuilder.length() < SCREEN_CHARWIDTH) { + captionStringBuilder.append(text); + } } public void rollUp() { rolledUpCaptions.add(buildCurrentLine()); captionStringBuilder.setLength(0); cueStyles.clear(); - int numRows = Math.min(captionRowCount, row); + int numRows = min(captionRowCount, row); while (rolledUpCaptions.size() >= numRows) { rolledUpCaptions.remove(0); } } + @Nullable public Cue build(@Cue.AnchorType int forcedPositionAnchor) { + // The number of empty columns before the start of the text, in the range [0-31]. + int startPadding = indent + tabOffset; + int maxTextLength = SCREEN_CHARWIDTH - startPadding; SpannableStringBuilder cueString = new SpannableStringBuilder(); // Add any rolled up captions, separated by new lines. for (int i = 0; i < rolledUpCaptions.size(); i++) { - cueString.append(rolledUpCaptions.get(i)); + cueString.append(Util.truncateAscii(rolledUpCaptions.get(i), maxTextLength)); cueString.append('\n'); } // Add the current line. - cueString.append(buildCurrentLine()); + cueString.append(Util.truncateAscii(buildCurrentLine(), maxTextLength)); if (cueString.length() == 0) { // The cue is empty. @@ -845,8 +984,6 @@ public Cue build(@Cue.AnchorType int forcedPositionAnchor) { } int positionAnchor; - // The number of empty columns before the start of the text, in the range [0-31]. - int startPadding = indent + tabOffset; // The number of empty columns after the end of the text, in the same range. int endPadding = SCREEN_CHARWIDTH - startPadding - cueString.length(); int startEndPaddingDelta = startPadding - endPadding; @@ -884,31 +1021,29 @@ public Cue build(@Cue.AnchorType int forcedPositionAnchor) { break; } - int lineAnchor; int line; - // Note: Row indices are in the range [1-15]. - if (captionMode == CC_MODE_ROLL_UP || row > (BASE_ROW / 2)) { - lineAnchor = Cue.ANCHOR_TYPE_END; + // Note: Row indices are in the range [1-15], Cue.line counts from 0 (top) and -1 (bottom). + if (row > (BASE_ROW / 2)) { line = row - BASE_ROW; // Two line adjustments. The first is because line indices from the bottom of the window // start from -1 rather than 0. The second is a blank row to act as the safe area. line -= 2; } else { - lineAnchor = Cue.ANCHOR_TYPE_START; - // Line indices from the top of the window start from 0, but we want a blank row to act as - // the safe area. As a result no adjustment is necessary. - line = row; + // The `row` of roll-up cues positions the bottom line (even for cues shown in the top + // half of the screen), so we need to consider the number of rows in this cue. In + // non-roll-up, we don't need any further adjustments because we leave the first line + // (cue.line=0) blank to act as the safe area, so positioning row=1 at Cue.line=1 is + // correct. + line = captionMode == CC_MODE_ROLL_UP ? row - (captionRowCount - 1) : row; } - return new Cue( - cueString, - Alignment.ALIGN_NORMAL, - line, - Cue.LINE_TYPE_NUMBER, - lineAnchor, - position, - positionAnchor, - Cue.DIMEN_UNSET); + return new Cue.Builder() + .setText(cueString) + .setTextAlignment(Alignment.ALIGN_NORMAL) + .setLine(line, Cue.LINE_TYPE_NUMBER) + .setPosition(position) + .setPositionAnchor(positionAnchor) + .build(); } private SpannableString buildCurrentLine() { @@ -1006,9 +1141,15 @@ public CueStyle(int style, boolean underline, int start) { this.underline = underline; this.start = start; } - } - } + /** See ANSI/CTA-608-E R-2014 Annex C.9 for Caption Erase Logic. */ + private boolean shouldClearStuckCaptions() { + if (validDataChannelTimeoutUs == C.TIME_UNSET || lastCueUpdateUs == C.TIME_UNSET) { + return false; + } + long elapsedUs = getPositionUs() - lastCueUpdateUs; + return elapsedUs >= validDataChannelTimeoutUs; + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/cea/Cea708Cue.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/cea/Cea708Cue.java deleted file mode 100644 index e04094a8dc..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/cea/Cea708Cue.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright (C) 2016 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.text.cea; - -import android.text.Layout.Alignment; -import androidx.annotation.NonNull; -import com.google.android.exoplayer2.text.Cue; - -/** - * A {@link Cue} for CEA-708. - */ -/* package */ final class Cea708Cue extends Cue implements Comparable { - - /** - * The priority of the cue box. - */ - public final int priority; - - /** - * @param text See {@link #text}. - * @param textAlignment See {@link #textAlignment}. - * @param line See {@link #line}. - * @param lineType See {@link #lineType}. - * @param lineAnchor See {@link #lineAnchor}. - * @param position See {@link #position}. - * @param positionAnchor See {@link #positionAnchor}. - * @param size See {@link #size}. - * @param windowColorSet See {@link #windowColorSet}. - * @param windowColor See {@link #windowColor}. - * @param priority See (@link #priority}. - */ - public Cea708Cue(CharSequence text, Alignment textAlignment, float line, @LineType int lineType, - @AnchorType int lineAnchor, float position, @AnchorType int positionAnchor, float size, - boolean windowColorSet, int windowColor, int priority) { - super(text, textAlignment, line, lineType, lineAnchor, position, positionAnchor, size, - windowColorSet, windowColor); - this.priority = priority; - } - - @Override - public int compareTo(@NonNull Cea708Cue other) { - if (other.priority < priority) { - return -1; - } else if (other.priority > priority) { - return 1; - } - return 0; - } -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/cea/Cea708Decoder.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/cea/Cea708Decoder.java index 4391bc0bf0..e9a7be7015 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/cea/Cea708Decoder.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/cea/Cea708Decoder.java @@ -34,16 +34,18 @@ import com.google.android.exoplayer2.text.SubtitleDecoder; import com.google.android.exoplayer2.text.SubtitleInputBuffer; import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.CodecSpecificDataUtil; import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.ParsableBitArray; import com.google.android.exoplayer2.util.ParsableByteArray; +import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collections; +import java.util.Comparator; import java.util.List; +import org.checkerframework.checker.nullness.qual.RequiresNonNull; -/** - * A {@link SubtitleDecoder} for CEA-708 (also known as "EIA-708"). - */ +/** A {@link SubtitleDecoder} for CEA-708 (also known as "EIA-708"). */ public final class Cea708Decoder extends CeaDecoder { private static final String TAG = "Cea708Decoder"; @@ -55,62 +57,62 @@ public final class Cea708Decoder extends CeaDecoder { private static final int CC_VALID_FLAG = 0x04; // Base Commands - private static final int GROUP_C0_END = 0x1F; // Miscellaneous Control Codes - private static final int GROUP_G0_END = 0x7F; // ASCII Printable Characters - private static final int GROUP_C1_END = 0x9F; // Captioning Command Control Codes - private static final int GROUP_G1_END = 0xFF; // ISO 8859-1 LATIN-1 Character Set + private static final int GROUP_C0_END = 0x1F; // Miscellaneous Control Codes + private static final int GROUP_G0_END = 0x7F; // ASCII Printable Characters + private static final int GROUP_C1_END = 0x9F; // Captioning Command Control Codes + private static final int GROUP_G1_END = 0xFF; // ISO 8859-1 LATIN-1 Character Set // Extended Commands - private static final int GROUP_C2_END = 0x1F; // Extended Control Code Set 1 - private static final int GROUP_G2_END = 0x7F; // Extended Miscellaneous Characters - private static final int GROUP_C3_END = 0x9F; // Extended Control Code Set 2 - private static final int GROUP_G3_END = 0xFF; // Future Expansion + private static final int GROUP_C2_END = 0x1F; // Extended Control Code Set 1 + private static final int GROUP_G2_END = 0x7F; // Extended Miscellaneous Characters + private static final int GROUP_C3_END = 0x9F; // Extended Control Code Set 2 + private static final int GROUP_G3_END = 0xFF; // Future Expansion // Group C0 Commands - private static final int COMMAND_NUL = 0x00; // Nul - private static final int COMMAND_ETX = 0x03; // EndOfText - private static final int COMMAND_BS = 0x08; // Backspace - private static final int COMMAND_FF = 0x0C; // FormFeed (Flush) - private static final int COMMAND_CR = 0x0D; // CarriageReturn - private static final int COMMAND_HCR = 0x0E; // ClearLine - private static final int COMMAND_EXT1 = 0x10; // Extended Control Code Flag + private static final int COMMAND_NUL = 0x00; // Nul + private static final int COMMAND_ETX = 0x03; // EndOfText + private static final int COMMAND_BS = 0x08; // Backspace + private static final int COMMAND_FF = 0x0C; // FormFeed (Flush) + private static final int COMMAND_CR = 0x0D; // CarriageReturn + private static final int COMMAND_HCR = 0x0E; // ClearLine + private static final int COMMAND_EXT1 = 0x10; // Extended Control Code Flag private static final int COMMAND_EXT1_START = 0x11; private static final int COMMAND_EXT1_END = 0x17; private static final int COMMAND_P16_START = 0x18; private static final int COMMAND_P16_END = 0x1F; // Group C1 Commands - private static final int COMMAND_CW0 = 0x80; // SetCurrentWindow to 0 - private static final int COMMAND_CW1 = 0x81; // SetCurrentWindow to 1 - private static final int COMMAND_CW2 = 0x82; // SetCurrentWindow to 2 - private static final int COMMAND_CW3 = 0x83; // SetCurrentWindow to 3 - private static final int COMMAND_CW4 = 0x84; // SetCurrentWindow to 4 - private static final int COMMAND_CW5 = 0x85; // SetCurrentWindow to 5 - private static final int COMMAND_CW6 = 0x86; // SetCurrentWindow to 6 - private static final int COMMAND_CW7 = 0x87; // SetCurrentWindow to 7 - private static final int COMMAND_CLW = 0x88; // ClearWindows (+1 byte) - private static final int COMMAND_DSW = 0x89; // DisplayWindows (+1 byte) - private static final int COMMAND_HDW = 0x8A; // HideWindows (+1 byte) - private static final int COMMAND_TGW = 0x8B; // ToggleWindows (+1 byte) - private static final int COMMAND_DLW = 0x8C; // DeleteWindows (+1 byte) - private static final int COMMAND_DLY = 0x8D; // Delay (+1 byte) - private static final int COMMAND_DLC = 0x8E; // DelayCancel - private static final int COMMAND_RST = 0x8F; // Reset - private static final int COMMAND_SPA = 0x90; // SetPenAttributes (+2 bytes) - private static final int COMMAND_SPC = 0x91; // SetPenColor (+3 bytes) - private static final int COMMAND_SPL = 0x92; // SetPenLocation (+2 bytes) - private static final int COMMAND_SWA = 0x97; // SetWindowAttributes (+4 bytes) - private static final int COMMAND_DF0 = 0x98; // DefineWindow 0 (+6 bytes) - private static final int COMMAND_DF1 = 0x99; // DefineWindow 1 (+6 bytes) - private static final int COMMAND_DF2 = 0x9A; // DefineWindow 2 (+6 bytes) - private static final int COMMAND_DF3 = 0x9B; // DefineWindow 3 (+6 bytes) + private static final int COMMAND_CW0 = 0x80; // SetCurrentWindow to 0 + private static final int COMMAND_CW1 = 0x81; // SetCurrentWindow to 1 + private static final int COMMAND_CW2 = 0x82; // SetCurrentWindow to 2 + private static final int COMMAND_CW3 = 0x83; // SetCurrentWindow to 3 + private static final int COMMAND_CW4 = 0x84; // SetCurrentWindow to 4 + private static final int COMMAND_CW5 = 0x85; // SetCurrentWindow to 5 + private static final int COMMAND_CW6 = 0x86; // SetCurrentWindow to 6 + private static final int COMMAND_CW7 = 0x87; // SetCurrentWindow to 7 + private static final int COMMAND_CLW = 0x88; // ClearWindows (+1 byte) + private static final int COMMAND_DSW = 0x89; // DisplayWindows (+1 byte) + private static final int COMMAND_HDW = 0x8A; // HideWindows (+1 byte) + private static final int COMMAND_TGW = 0x8B; // ToggleWindows (+1 byte) + private static final int COMMAND_DLW = 0x8C; // DeleteWindows (+1 byte) + private static final int COMMAND_DLY = 0x8D; // Delay (+1 byte) + private static final int COMMAND_DLC = 0x8E; // DelayCancel + private static final int COMMAND_RST = 0x8F; // Reset + private static final int COMMAND_SPA = 0x90; // SetPenAttributes (+2 bytes) + private static final int COMMAND_SPC = 0x91; // SetPenColor (+3 bytes) + private static final int COMMAND_SPL = 0x92; // SetPenLocation (+2 bytes) + private static final int COMMAND_SWA = 0x97; // SetWindowAttributes (+4 bytes) + private static final int COMMAND_DF0 = 0x98; // DefineWindow 0 (+6 bytes) + private static final int COMMAND_DF1 = 0x99; // DefineWindow 1 (+6 bytes) + private static final int COMMAND_DF2 = 0x9A; // DefineWindow 2 (+6 bytes) + private static final int COMMAND_DF3 = 0x9B; // DefineWindow 3 (+6 bytes) private static final int COMMAND_DF4 = 0x9C; // DefineWindow 4 (+6 bytes) - private static final int COMMAND_DF5 = 0x9D; // DefineWindow 5 (+6 bytes) - private static final int COMMAND_DF6 = 0x9E; // DefineWindow 6 (+6 bytes) - private static final int COMMAND_DF7 = 0x9F; // DefineWindow 7 (+6 bytes) + private static final int COMMAND_DF5 = 0x9D; // DefineWindow 5 (+6 bytes) + private static final int COMMAND_DF6 = 0x9E; // DefineWindow 6 (+6 bytes) + private static final int COMMAND_DF7 = 0x9F; // DefineWindow 7 (+6 bytes) // G0 Table Special Chars - private static final int CHARACTER_MN = 0x7F; // MusicNote + private static final int CHARACTER_MN = 0x7F; // MusicNote // G2 Table Special Chars private static final int CHARACTER_TSP = 0x20; @@ -141,31 +143,37 @@ public final class Cea708Decoder extends CeaDecoder { private static final int CHARACTER_UPPER_LEFT_BORDER = 0x7F; private final ParsableByteArray ccData; - private final ParsableBitArray serviceBlockPacket; + private final ParsableBitArray captionChannelPacketData; + private int previousSequenceNumber; + // TODO: Use isWideAspectRatio in decoding. + @SuppressWarnings({"unused", "FieldCanBeLocal"}) + private final boolean isWideAspectRatio; private final int selectedServiceNumber; - private final CueBuilder[] cueBuilders; + private final CueInfoBuilder[] cueInfoBuilders; - private CueBuilder currentCueBuilder; - private List cues; - private List lastCues; + private CueInfoBuilder currentCueInfoBuilder; + @Nullable private List cues; + @Nullable private List lastCues; - private DtvCcPacket currentDtvCcPacket; + @Nullable private DtvCcPacket currentDtvCcPacket; private int currentWindow; - // TODO: Retrieve isWideAspectRatio from initializationData and use it. public Cea708Decoder(int accessibilityChannel, @Nullable List initializationData) { ccData = new ParsableByteArray(); - serviceBlockPacket = new ParsableBitArray(); + captionChannelPacketData = new ParsableBitArray(); + previousSequenceNumber = C.INDEX_UNSET; selectedServiceNumber = accessibilityChannel == Format.NO_VALUE ? 1 : accessibilityChannel; + isWideAspectRatio = + initializationData != null + && CodecSpecificDataUtil.parseCea708InitializationData(initializationData); - cueBuilders = new CueBuilder[NUM_WINDOWS]; + cueInfoBuilders = new CueInfoBuilder[NUM_WINDOWS]; for (int i = 0; i < NUM_WINDOWS; i++) { - cueBuilders[i] = new CueBuilder(); + cueInfoBuilders[i] = new CueInfoBuilder(); } - currentCueBuilder = cueBuilders[0]; - resetCueBuilders(); + currentCueInfoBuilder = cueInfoBuilders[0]; } @Override @@ -179,7 +187,7 @@ public void flush() { cues = null; lastCues = null; currentWindow = 0; - currentCueBuilder = cueBuilders[currentWindow]; + currentCueInfoBuilder = cueInfoBuilders[currentWindow]; resetCueBuilders(); currentDtvCcPacket = null; } @@ -192,15 +200,16 @@ protected boolean isNewSubtitleDataAvailable() { @Override protected Subtitle createSubtitle() { lastCues = cues; - return new CeaSubtitle(cues); + return new CeaSubtitle(Assertions.checkNotNull(cues)); } @Override protected void decode(SubtitleInputBuffer inputBuffer) { // Subtitle input buffers are non-direct and the position is zero, so calling array() is safe. + ByteBuffer subtitleData = Assertions.checkNotNull(inputBuffer.data); @SuppressWarnings("ByteBufferBackingArray") - byte[] inputBufferData = inputBuffer.data.array(); - ccData.reset(inputBufferData, inputBuffer.data.limit()); + byte[] inputBufferData = subtitleData.array(); + ccData.reset(inputBufferData, subtitleData.limit()); while (ccData.bytesLeft() >= 3) { int ccTypeAndValid = (ccData.readUnsignedByte() & 0x07); @@ -223,6 +232,18 @@ protected void decode(SubtitleInputBuffer inputBuffer) { finalizeCurrentPacket(); int sequenceNumber = (ccData1 & 0xC0) >> 6; // first 2 bits + if (previousSequenceNumber != C.INDEX_UNSET + && sequenceNumber != (previousSequenceNumber + 1) % 4) { + resetCueBuilders(); + Log.w( + TAG, + "Sequence number discontinuity. previous=" + + previousSequenceNumber + + " current=" + + sequenceNumber); + } + previousSequenceNumber = sequenceNumber; + int packetSize = ccData1 & 0x3F; // last 6 bits if (packetSize == 0) { packetSize = 64; @@ -259,37 +280,21 @@ private void finalizeCurrentPacket() { currentDtvCcPacket = null; } + @RequiresNonNull("currentDtvCcPacket") private void processCurrentPacket() { if (currentDtvCcPacket.currentIndex != (currentDtvCcPacket.packetSize * 2 - 1)) { - Log.w(TAG, "DtvCcPacket ended prematurely; size is " + (currentDtvCcPacket.packetSize * 2 - 1) - + ", but current index is " + currentDtvCcPacket.currentIndex + " (sequence number " - + currentDtvCcPacket.sequenceNumber + "); ignoring packet"); - return; - } - - serviceBlockPacket.reset(currentDtvCcPacket.packetData, currentDtvCcPacket.currentIndex); - - int serviceNumber = serviceBlockPacket.readBits(3); - int blockSize = serviceBlockPacket.readBits(5); - if (serviceNumber == 7) { - // extended service numbers - serviceBlockPacket.skipBits(2); - serviceNumber = serviceBlockPacket.readBits(6); - if (serviceNumber < 7) { - Log.w(TAG, "Invalid extended service number: " + serviceNumber); - } - } - - // Ignore packets in which blockSize is 0 - if (blockSize == 0) { - if (serviceNumber != 0) { - Log.w(TAG, "serviceNumber is non-zero (" + serviceNumber + ") when blockSize is 0"); - } - return; - } - - if (serviceNumber != selectedServiceNumber) { - return; + Log.d( + TAG, + "DtvCcPacket ended prematurely; size is " + + (currentDtvCcPacket.packetSize * 2 - 1) + + ", but current index is " + + currentDtvCcPacket.currentIndex + + " (sequence number " + + currentDtvCcPacket.sequenceNumber + + ");"); + // We've received cc_type=0x03 (packet start) before receiving packetSize byte pairs of data. + // This might indicate a byte pair has been lost, but we'll still attempt to process the data + // we have received. } // The cues should be updated if we receive a C0 ETX command, any C1 command, or if after @@ -297,39 +302,78 @@ private void processCurrentPacket() { // 8.10.4 for more details. boolean cuesNeedUpdate = false; - while (serviceBlockPacket.bitsLeft() > 0) { - int command = serviceBlockPacket.readBits(8); - if (command != COMMAND_EXT1) { - if (command <= GROUP_C0_END) { - handleC0Command(command); - // If the C0 command was an ETX command, the cues are updated in handleC0Command. - } else if (command <= GROUP_G0_END) { - handleG0Character(command); - cuesNeedUpdate = true; - } else if (command <= GROUP_C1_END) { - handleC1Command(command); - cuesNeedUpdate = true; - } else if (command <= GROUP_G1_END) { - handleG1Character(command); - cuesNeedUpdate = true; - } else { - Log.w(TAG, "Invalid base command: " + command); + // Streams with multiple embedded CC tracks (different language tracks) can be delivered + // in the same frame packet, so captionChannelPacketData can contain service blocks with + // different service numbers. + // + // We iterate over the full buffer until we find a null service block or until the buffer is + // exhausted. On each iteration we process a single service block. If the block has a service + // number different to the currently selected service, then we skip it and continue with the + // next service block. + captionChannelPacketData.reset(currentDtvCcPacket.packetData, currentDtvCcPacket.currentIndex); + while (captionChannelPacketData.bitsLeft() > 0) { + // Parse the Standard Service Block Header (see CEA-708B 6.2.1) + int serviceNumber = captionChannelPacketData.readBits(3); + int blockSize = captionChannelPacketData.readBits(5); + if (serviceNumber == 7) { + // Parse the Extended Service Block Header (see CEA-708B 6.2.2) + captionChannelPacketData.skipBits(2); + serviceNumber = captionChannelPacketData.readBits(6); + if (serviceNumber < 7) { + Log.w(TAG, "Invalid extended service number: " + serviceNumber); } - } else { - // Read the extended command - command = serviceBlockPacket.readBits(8); - if (command <= GROUP_C2_END) { - handleC2Command(command); - } else if (command <= GROUP_G2_END) { - handleG2Character(command); - cuesNeedUpdate = true; - } else if (command <= GROUP_C3_END) { - handleC3Command(command); - } else if (command <= GROUP_G3_END) { - handleG3Character(command); - cuesNeedUpdate = true; + } + + // Ignore packets with the Null Service Block Header (see CEA-708B 6.2.3) + if (blockSize == 0) { + if (serviceNumber != 0) { + Log.w(TAG, "serviceNumber is non-zero (" + serviceNumber + ") when blockSize is 0"); + } + break; + } + + if (serviceNumber != selectedServiceNumber) { + captionChannelPacketData.skipBytes(blockSize); + continue; + } + + // Process only the information for the current service block (there could be + // more data in the buffer, but it is not part of the current service block). + int endBlockPosition = captionChannelPacketData.getPosition() + (blockSize * 8); + while (captionChannelPacketData.getPosition() < endBlockPosition) { + int command = captionChannelPacketData.readBits(8); + if (command != COMMAND_EXT1) { + if (command <= GROUP_C0_END) { + handleC0Command(command); + // If the C0 command was an ETX command, the cues are updated in handleC0Command. + } else if (command <= GROUP_G0_END) { + handleG0Character(command); + cuesNeedUpdate = true; + } else if (command <= GROUP_C1_END) { + handleC1Command(command); + cuesNeedUpdate = true; + } else if (command <= GROUP_G1_END) { + handleG1Character(command); + cuesNeedUpdate = true; + } else { + Log.w(TAG, "Invalid base command: " + command); + } } else { - Log.w(TAG, "Invalid extended command: " + command); + // Read the extended command + command = captionChannelPacketData.readBits(8); + if (command <= GROUP_C2_END) { + handleC2Command(command); + } else if (command <= GROUP_G2_END) { + handleG2Character(command); + cuesNeedUpdate = true; + } else if (command <= GROUP_C3_END) { + handleC3Command(command); + } else if (command <= GROUP_G3_END) { + handleG3Character(command); + cuesNeedUpdate = true; + } else { + Log.w(TAG, "Invalid extended command: " + command); + } } } } @@ -348,13 +392,13 @@ private void handleC0Command(int command) { cues = getDisplayCues(); break; case COMMAND_BS: - currentCueBuilder.backspace(); + currentCueInfoBuilder.backspace(); break; case COMMAND_FF: resetCueBuilders(); break; case COMMAND_CR: - currentCueBuilder.append('\n'); + currentCueInfoBuilder.append('\n'); break; case COMMAND_HCR: // TODO: Add support for this command. @@ -362,10 +406,10 @@ private void handleC0Command(int command) { default: if (command >= COMMAND_EXT1_START && command <= COMMAND_EXT1_END) { Log.w(TAG, "Currently unsupported COMMAND_EXT1 Command: " + command); - serviceBlockPacket.skipBits(8); + captionChannelPacketData.skipBits(8); } else if (command >= COMMAND_P16_START && command <= COMMAND_P16_END) { Log.w(TAG, "Currently unsupported COMMAND_P16 Command: " + command); - serviceBlockPacket.skipBits(16); + captionChannelPacketData.skipBits(16); } else { Log.w(TAG, "Invalid C0 command: " + command); } @@ -386,48 +430,48 @@ private void handleC1Command(int command) { window = (command - COMMAND_CW0); if (currentWindow != window) { currentWindow = window; - currentCueBuilder = cueBuilders[window]; + currentCueInfoBuilder = cueInfoBuilders[window]; } break; case COMMAND_CLW: for (int i = 1; i <= NUM_WINDOWS; i++) { - if (serviceBlockPacket.readBit()) { - cueBuilders[NUM_WINDOWS - i].clear(); + if (captionChannelPacketData.readBit()) { + cueInfoBuilders[NUM_WINDOWS - i].clear(); } } break; case COMMAND_DSW: for (int i = 1; i <= NUM_WINDOWS; i++) { - if (serviceBlockPacket.readBit()) { - cueBuilders[NUM_WINDOWS - i].setVisibility(true); + if (captionChannelPacketData.readBit()) { + cueInfoBuilders[NUM_WINDOWS - i].setVisibility(true); } } break; case COMMAND_HDW: for (int i = 1; i <= NUM_WINDOWS; i++) { - if (serviceBlockPacket.readBit()) { - cueBuilders[NUM_WINDOWS - i].setVisibility(false); + if (captionChannelPacketData.readBit()) { + cueInfoBuilders[NUM_WINDOWS - i].setVisibility(false); } } break; case COMMAND_TGW: for (int i = 1; i <= NUM_WINDOWS; i++) { - if (serviceBlockPacket.readBit()) { - CueBuilder cueBuilder = cueBuilders[NUM_WINDOWS - i]; - cueBuilder.setVisibility(!cueBuilder.isVisible()); + if (captionChannelPacketData.readBit()) { + CueInfoBuilder cueInfoBuilder = cueInfoBuilders[NUM_WINDOWS - i]; + cueInfoBuilder.setVisibility(!cueInfoBuilder.isVisible()); } } break; case COMMAND_DLW: for (int i = 1; i <= NUM_WINDOWS; i++) { - if (serviceBlockPacket.readBit()) { - cueBuilders[NUM_WINDOWS - i].reset(); + if (captionChannelPacketData.readBit()) { + cueInfoBuilders[NUM_WINDOWS - i].reset(); } } break; case COMMAND_DLY: // TODO: Add support for delay commands. - serviceBlockPacket.skipBits(8); + captionChannelPacketData.skipBits(8); break; case COMMAND_DLC: // TODO: Add support for delay commands. @@ -436,33 +480,33 @@ private void handleC1Command(int command) { resetCueBuilders(); break; case COMMAND_SPA: - if (!currentCueBuilder.isDefined()) { + if (!currentCueInfoBuilder.isDefined()) { // ignore this command if the current window/cue isn't defined - serviceBlockPacket.skipBits(16); + captionChannelPacketData.skipBits(16); } else { handleSetPenAttributes(); } break; case COMMAND_SPC: - if (!currentCueBuilder.isDefined()) { + if (!currentCueInfoBuilder.isDefined()) { // ignore this command if the current window/cue isn't defined - serviceBlockPacket.skipBits(24); + captionChannelPacketData.skipBits(24); } else { handleSetPenColor(); } break; case COMMAND_SPL: - if (!currentCueBuilder.isDefined()) { + if (!currentCueInfoBuilder.isDefined()) { // ignore this command if the current window/cue isn't defined - serviceBlockPacket.skipBits(16); + captionChannelPacketData.skipBits(16); } else { handleSetPenLocation(); } break; case COMMAND_SWA: - if (!currentCueBuilder.isDefined()) { + if (!currentCueInfoBuilder.isDefined()) { // ignore this command if the current window/cue isn't defined - serviceBlockPacket.skipBits(32); + captionChannelPacketData.skipBits(32); } else { handleSetWindowAttributes(); } @@ -480,7 +524,7 @@ private void handleC1Command(int command) { // We also set the current window to the newly defined window. if (currentWindow != window) { currentWindow = window; - currentCueBuilder = cueBuilders[window]; + currentCueInfoBuilder = cueInfoBuilders[window]; } break; default: @@ -493,121 +537,121 @@ private void handleC2Command(int command) { if (command <= 0x07) { // Do nothing. } else if (command <= 0x0F) { - serviceBlockPacket.skipBits(8); + captionChannelPacketData.skipBits(8); } else if (command <= 0x17) { - serviceBlockPacket.skipBits(16); + captionChannelPacketData.skipBits(16); } else if (command <= 0x1F) { - serviceBlockPacket.skipBits(24); + captionChannelPacketData.skipBits(24); } } private void handleC3Command(int command) { // C3 Table doesn't contain any commands in CEA-708-B, but we do need to skip bytes if (command <= 0x87) { - serviceBlockPacket.skipBits(32); + captionChannelPacketData.skipBits(32); } else if (command <= 0x8F) { - serviceBlockPacket.skipBits(40); + captionChannelPacketData.skipBits(40); } else if (command <= 0x9F) { // 90-9F are variable length codes; the first byte defines the header with the first // 2 bits specifying the type and the last 6 bits specifying the remaining length of the // command in bytes - serviceBlockPacket.skipBits(2); - int length = serviceBlockPacket.readBits(6); - serviceBlockPacket.skipBits(8 * length); + captionChannelPacketData.skipBits(2); + int length = captionChannelPacketData.readBits(6); + captionChannelPacketData.skipBits(8 * length); } } private void handleG0Character(int characterCode) { if (characterCode == CHARACTER_MN) { - currentCueBuilder.append('\u266B'); + currentCueInfoBuilder.append('\u266B'); } else { - currentCueBuilder.append((char) (characterCode & 0xFF)); + currentCueInfoBuilder.append((char) (characterCode & 0xFF)); } } private void handleG1Character(int characterCode) { - currentCueBuilder.append((char) (characterCode & 0xFF)); + currentCueInfoBuilder.append((char) (characterCode & 0xFF)); } private void handleG2Character(int characterCode) { switch (characterCode) { case CHARACTER_TSP: - currentCueBuilder.append('\u0020'); + currentCueInfoBuilder.append('\u0020'); break; case CHARACTER_NBTSP: - currentCueBuilder.append('\u00A0'); + currentCueInfoBuilder.append('\u00A0'); break; case CHARACTER_ELLIPSIS: - currentCueBuilder.append('\u2026'); + currentCueInfoBuilder.append('\u2026'); break; case CHARACTER_BIG_CARONS: - currentCueBuilder.append('\u0160'); + currentCueInfoBuilder.append('\u0160'); break; case CHARACTER_BIG_OE: - currentCueBuilder.append('\u0152'); + currentCueInfoBuilder.append('\u0152'); break; case CHARACTER_SOLID_BLOCK: - currentCueBuilder.append('\u2588'); + currentCueInfoBuilder.append('\u2588'); break; case CHARACTER_OPEN_SINGLE_QUOTE: - currentCueBuilder.append('\u2018'); + currentCueInfoBuilder.append('\u2018'); break; case CHARACTER_CLOSE_SINGLE_QUOTE: - currentCueBuilder.append('\u2019'); + currentCueInfoBuilder.append('\u2019'); break; case CHARACTER_OPEN_DOUBLE_QUOTE: - currentCueBuilder.append('\u201C'); + currentCueInfoBuilder.append('\u201C'); break; case CHARACTER_CLOSE_DOUBLE_QUOTE: - currentCueBuilder.append('\u201D'); + currentCueInfoBuilder.append('\u201D'); break; case CHARACTER_BOLD_BULLET: - currentCueBuilder.append('\u2022'); + currentCueInfoBuilder.append('\u2022'); break; case CHARACTER_TM: - currentCueBuilder.append('\u2122'); + currentCueInfoBuilder.append('\u2122'); break; case CHARACTER_SMALL_CARONS: - currentCueBuilder.append('\u0161'); + currentCueInfoBuilder.append('\u0161'); break; case CHARACTER_SMALL_OE: - currentCueBuilder.append('\u0153'); + currentCueInfoBuilder.append('\u0153'); break; case CHARACTER_SM: - currentCueBuilder.append('\u2120'); + currentCueInfoBuilder.append('\u2120'); break; case CHARACTER_DIAERESIS_Y: - currentCueBuilder.append('\u0178'); + currentCueInfoBuilder.append('\u0178'); break; case CHARACTER_ONE_EIGHTH: - currentCueBuilder.append('\u215B'); + currentCueInfoBuilder.append('\u215B'); break; case CHARACTER_THREE_EIGHTHS: - currentCueBuilder.append('\u215C'); + currentCueInfoBuilder.append('\u215C'); break; case CHARACTER_FIVE_EIGHTHS: - currentCueBuilder.append('\u215D'); + currentCueInfoBuilder.append('\u215D'); break; case CHARACTER_SEVEN_EIGHTHS: - currentCueBuilder.append('\u215E'); + currentCueInfoBuilder.append('\u215E'); break; case CHARACTER_VERTICAL_BORDER: - currentCueBuilder.append('\u2502'); + currentCueInfoBuilder.append('\u2502'); break; case CHARACTER_UPPER_RIGHT_BORDER: - currentCueBuilder.append('\u2510'); + currentCueInfoBuilder.append('\u2510'); break; case CHARACTER_LOWER_LEFT_BORDER: - currentCueBuilder.append('\u2514'); + currentCueInfoBuilder.append('\u2514'); break; case CHARACTER_HORIZONTAL_BORDER: - currentCueBuilder.append('\u2500'); + currentCueInfoBuilder.append('\u2500'); break; case CHARACTER_LOWER_RIGHT_BORDER: - currentCueBuilder.append('\u2518'); + currentCueInfoBuilder.append('\u2518'); break; case CHARACTER_UPPER_LEFT_BORDER: - currentCueBuilder.append('\u250C'); + currentCueInfoBuilder.append('\u250C'); break; default: Log.w(TAG, "Invalid G2 character: " + characterCode); @@ -618,142 +662,166 @@ private void handleG2Character(int characterCode) { private void handleG3Character(int characterCode) { if (characterCode == 0xA0) { - currentCueBuilder.append('\u33C4'); + currentCueInfoBuilder.append('\u33C4'); } else { Log.w(TAG, "Invalid G3 character: " + characterCode); // Substitute any unsupported G3 character with an underscore as per CEA-708 specification. - currentCueBuilder.append('_'); + currentCueInfoBuilder.append('_'); } } private void handleSetPenAttributes() { // the SetPenAttributes command contains 2 bytes of data // first byte - int textTag = serviceBlockPacket.readBits(4); - int offset = serviceBlockPacket.readBits(2); - int penSize = serviceBlockPacket.readBits(2); + int textTag = captionChannelPacketData.readBits(4); + int offset = captionChannelPacketData.readBits(2); + int penSize = captionChannelPacketData.readBits(2); // second byte - boolean italicsToggle = serviceBlockPacket.readBit(); - boolean underlineToggle = serviceBlockPacket.readBit(); - int edgeType = serviceBlockPacket.readBits(3); - int fontStyle = serviceBlockPacket.readBits(3); + boolean italicsToggle = captionChannelPacketData.readBit(); + boolean underlineToggle = captionChannelPacketData.readBit(); + int edgeType = captionChannelPacketData.readBits(3); + int fontStyle = captionChannelPacketData.readBits(3); - currentCueBuilder.setPenAttributes(textTag, offset, penSize, italicsToggle, underlineToggle, - edgeType, fontStyle); + currentCueInfoBuilder.setPenAttributes( + textTag, offset, penSize, italicsToggle, underlineToggle, edgeType, fontStyle); } private void handleSetPenColor() { // the SetPenColor command contains 3 bytes of data // first byte - int foregroundO = serviceBlockPacket.readBits(2); - int foregroundR = serviceBlockPacket.readBits(2); - int foregroundG = serviceBlockPacket.readBits(2); - int foregroundB = serviceBlockPacket.readBits(2); - int foregroundColor = CueBuilder.getArgbColorFromCeaColor(foregroundR, foregroundG, foregroundB, - foregroundO); + int foregroundO = captionChannelPacketData.readBits(2); + int foregroundR = captionChannelPacketData.readBits(2); + int foregroundG = captionChannelPacketData.readBits(2); + int foregroundB = captionChannelPacketData.readBits(2); + int foregroundColor = + CueInfoBuilder.getArgbColorFromCeaColor(foregroundR, foregroundG, foregroundB, foregroundO); // second byte - int backgroundO = serviceBlockPacket.readBits(2); - int backgroundR = serviceBlockPacket.readBits(2); - int backgroundG = serviceBlockPacket.readBits(2); - int backgroundB = serviceBlockPacket.readBits(2); - int backgroundColor = CueBuilder.getArgbColorFromCeaColor(backgroundR, backgroundG, backgroundB, - backgroundO); + int backgroundO = captionChannelPacketData.readBits(2); + int backgroundR = captionChannelPacketData.readBits(2); + int backgroundG = captionChannelPacketData.readBits(2); + int backgroundB = captionChannelPacketData.readBits(2); + int backgroundColor = + CueInfoBuilder.getArgbColorFromCeaColor(backgroundR, backgroundG, backgroundB, backgroundO); // third byte - serviceBlockPacket.skipBits(2); // null padding - int edgeR = serviceBlockPacket.readBits(2); - int edgeG = serviceBlockPacket.readBits(2); - int edgeB = serviceBlockPacket.readBits(2); - int edgeColor = CueBuilder.getArgbColorFromCeaColor(edgeR, edgeG, edgeB); + captionChannelPacketData.skipBits(2); // null padding + int edgeR = captionChannelPacketData.readBits(2); + int edgeG = captionChannelPacketData.readBits(2); + int edgeB = captionChannelPacketData.readBits(2); + int edgeColor = CueInfoBuilder.getArgbColorFromCeaColor(edgeR, edgeG, edgeB); - currentCueBuilder.setPenColor(foregroundColor, backgroundColor, edgeColor); + currentCueInfoBuilder.setPenColor(foregroundColor, backgroundColor, edgeColor); } private void handleSetPenLocation() { // the SetPenLocation command contains 2 bytes of data // first byte - serviceBlockPacket.skipBits(4); - int row = serviceBlockPacket.readBits(4); + captionChannelPacketData.skipBits(4); + int row = captionChannelPacketData.readBits(4); // second byte - serviceBlockPacket.skipBits(2); - int column = serviceBlockPacket.readBits(6); + captionChannelPacketData.skipBits(2); + int column = captionChannelPacketData.readBits(6); - currentCueBuilder.setPenLocation(row, column); + currentCueInfoBuilder.setPenLocation(row, column); } private void handleSetWindowAttributes() { // the SetWindowAttributes command contains 4 bytes of data // first byte - int fillO = serviceBlockPacket.readBits(2); - int fillR = serviceBlockPacket.readBits(2); - int fillG = serviceBlockPacket.readBits(2); - int fillB = serviceBlockPacket.readBits(2); - int fillColor = CueBuilder.getArgbColorFromCeaColor(fillR, fillG, fillB, fillO); + int fillO = captionChannelPacketData.readBits(2); + int fillR = captionChannelPacketData.readBits(2); + int fillG = captionChannelPacketData.readBits(2); + int fillB = captionChannelPacketData.readBits(2); + int fillColor = CueInfoBuilder.getArgbColorFromCeaColor(fillR, fillG, fillB, fillO); // second byte - int borderType = serviceBlockPacket.readBits(2); // only the lower 2 bits of borderType - int borderR = serviceBlockPacket.readBits(2); - int borderG = serviceBlockPacket.readBits(2); - int borderB = serviceBlockPacket.readBits(2); - int borderColor = CueBuilder.getArgbColorFromCeaColor(borderR, borderG, borderB); + int borderType = captionChannelPacketData.readBits(2); // only the lower 2 bits of borderType + int borderR = captionChannelPacketData.readBits(2); + int borderG = captionChannelPacketData.readBits(2); + int borderB = captionChannelPacketData.readBits(2); + int borderColor = CueInfoBuilder.getArgbColorFromCeaColor(borderR, borderG, borderB); // third byte - if (serviceBlockPacket.readBit()) { + if (captionChannelPacketData.readBit()) { borderType |= 0x04; // set the top bit of the 3-bit borderType } - boolean wordWrapToggle = serviceBlockPacket.readBit(); - int printDirection = serviceBlockPacket.readBits(2); - int scrollDirection = serviceBlockPacket.readBits(2); - int justification = serviceBlockPacket.readBits(2); + boolean wordWrapToggle = captionChannelPacketData.readBit(); + int printDirection = captionChannelPacketData.readBits(2); + int scrollDirection = captionChannelPacketData.readBits(2); + int justification = captionChannelPacketData.readBits(2); // fourth byte // Note that we don't intend to support display effects - serviceBlockPacket.skipBits(8); // effectSpeed(4), effectDirection(2), displayEffect(2) - - currentCueBuilder.setWindowAttributes(fillColor, borderColor, wordWrapToggle, borderType, - printDirection, scrollDirection, justification); + captionChannelPacketData.skipBits(8); // effectSpeed(4), effectDirection(2), displayEffect(2) + + currentCueInfoBuilder.setWindowAttributes( + fillColor, + borderColor, + wordWrapToggle, + borderType, + printDirection, + scrollDirection, + justification); } private void handleDefineWindow(int window) { - CueBuilder cueBuilder = cueBuilders[window]; + CueInfoBuilder cueInfoBuilder = cueInfoBuilders[window]; // the DefineWindow command contains 6 bytes of data // first byte - serviceBlockPacket.skipBits(2); // null padding - boolean visible = serviceBlockPacket.readBit(); - boolean rowLock = serviceBlockPacket.readBit(); - boolean columnLock = serviceBlockPacket.readBit(); - int priority = serviceBlockPacket.readBits(3); + captionChannelPacketData.skipBits(2); // null padding + boolean visible = captionChannelPacketData.readBit(); + boolean rowLock = captionChannelPacketData.readBit(); + boolean columnLock = captionChannelPacketData.readBit(); + int priority = captionChannelPacketData.readBits(3); // second byte - boolean relativePositioning = serviceBlockPacket.readBit(); - int verticalAnchor = serviceBlockPacket.readBits(7); + boolean relativePositioning = captionChannelPacketData.readBit(); + int verticalAnchor = captionChannelPacketData.readBits(7); // third byte - int horizontalAnchor = serviceBlockPacket.readBits(8); + int horizontalAnchor = captionChannelPacketData.readBits(8); // fourth byte - int anchorId = serviceBlockPacket.readBits(4); - int rowCount = serviceBlockPacket.readBits(4); + int anchorId = captionChannelPacketData.readBits(4); + int rowCount = captionChannelPacketData.readBits(4); // fifth byte - serviceBlockPacket.skipBits(2); // null padding - int columnCount = serviceBlockPacket.readBits(6); + captionChannelPacketData.skipBits(2); // null padding + int columnCount = captionChannelPacketData.readBits(6); // sixth byte - serviceBlockPacket.skipBits(2); // null padding - int windowStyle = serviceBlockPacket.readBits(3); - int penStyle = serviceBlockPacket.readBits(3); - - cueBuilder.defineWindow(visible, rowLock, columnLock, priority, relativePositioning, - verticalAnchor, horizontalAnchor, rowCount, columnCount, anchorId, windowStyle, penStyle); + captionChannelPacketData.skipBits(2); // null padding + int windowStyle = captionChannelPacketData.readBits(3); + int penStyle = captionChannelPacketData.readBits(3); + + cueInfoBuilder.defineWindow( + visible, + rowLock, + columnLock, + priority, + relativePositioning, + verticalAnchor, + horizontalAnchor, + rowCount, + columnCount, + anchorId, + windowStyle, + penStyle); } private List getDisplayCues() { - List displayCues = new ArrayList<>(); + List displayCueInfos = new ArrayList<>(); for (int i = 0; i < NUM_WINDOWS; i++) { - if (!cueBuilders[i].isEmpty() && cueBuilders[i].isVisible()) { - displayCues.add(cueBuilders[i].build()); + if (!cueInfoBuilders[i].isEmpty() && cueInfoBuilders[i].isVisible()) { + @Nullable Cea708CueInfo cueInfo = cueInfoBuilders[i].build(); + if (cueInfo != null) { + displayCueInfos.add(cueInfo); + } } } - Collections.sort(displayCues); + Collections.sort(displayCueInfos, Cea708CueInfo.LEAST_IMPORTANT_FIRST); + List displayCues = new ArrayList<>(displayCueInfos.size()); + for (int i = 0; i < displayCueInfos.size(); i++) { + displayCues.add(displayCueInfos.get(i).cue); + } return Collections.unmodifiableList(displayCues); } private void resetCueBuilders() { for (int i = 0; i < NUM_WINDOWS; i++) { - cueBuilders[i].reset(); + cueInfoBuilders[i].reset(); } } @@ -771,12 +839,11 @@ public DtvCcPacket(int sequenceNumber, int packetSize) { packetData = new byte[2 * packetSize - 1]; currentIndex = 0; } - } - // TODO: There is a lot of overlap between Cea708Decoder.CueBuilder and Cea608Decoder.CueBuilder - // which could be refactored into a separate class. - private static final class CueBuilder { + // TODO: There is a lot of overlap between Cea708Decoder.CueInfoBuilder and + // Cea608Decoder.CueBuilder which could be refactored into a separate class. + private static final class CueInfoBuilder { private static final int RELATIVE_CUE_SIZE = 99; private static final int VERTICAL_SIZE = 74; @@ -818,45 +885,64 @@ private static final class CueBuilder { private static final int PEN_OFFSET_NORMAL = 1; // The window style properties are specified in the CEA-708 specification. - private static final int[] WINDOW_STYLE_JUSTIFICATION = new int[] { - JUSTIFICATION_LEFT, JUSTIFICATION_LEFT, JUSTIFICATION_LEFT, - JUSTIFICATION_LEFT, JUSTIFICATION_LEFT, JUSTIFICATION_CENTER, - JUSTIFICATION_LEFT - }; - private static final int[] WINDOW_STYLE_PRINT_DIRECTION = new int[] { - DIRECTION_LEFT_TO_RIGHT, DIRECTION_LEFT_TO_RIGHT, DIRECTION_LEFT_TO_RIGHT, - DIRECTION_LEFT_TO_RIGHT, DIRECTION_LEFT_TO_RIGHT, DIRECTION_LEFT_TO_RIGHT, - DIRECTION_TOP_TO_BOTTOM - }; - private static final int[] WINDOW_STYLE_SCROLL_DIRECTION = new int[] { - DIRECTION_BOTTOM_TO_TOP, DIRECTION_BOTTOM_TO_TOP, DIRECTION_BOTTOM_TO_TOP, - DIRECTION_BOTTOM_TO_TOP, DIRECTION_BOTTOM_TO_TOP, DIRECTION_BOTTOM_TO_TOP, - DIRECTION_RIGHT_TO_LEFT - }; - private static final boolean[] WINDOW_STYLE_WORD_WRAP = new boolean[] { - false, false, false, true, true, true, false - }; - private static final int[] WINDOW_STYLE_FILL = new int[] { - COLOR_SOLID_BLACK, COLOR_TRANSPARENT, COLOR_SOLID_BLACK, COLOR_SOLID_BLACK, - COLOR_TRANSPARENT, COLOR_SOLID_BLACK, COLOR_SOLID_BLACK - }; + private static final int[] WINDOW_STYLE_JUSTIFICATION = + new int[] { + JUSTIFICATION_LEFT, JUSTIFICATION_LEFT, JUSTIFICATION_LEFT, + JUSTIFICATION_LEFT, JUSTIFICATION_LEFT, JUSTIFICATION_CENTER, + JUSTIFICATION_LEFT + }; + private static final int[] WINDOW_STYLE_PRINT_DIRECTION = + new int[] { + DIRECTION_LEFT_TO_RIGHT, DIRECTION_LEFT_TO_RIGHT, DIRECTION_LEFT_TO_RIGHT, + DIRECTION_LEFT_TO_RIGHT, DIRECTION_LEFT_TO_RIGHT, DIRECTION_LEFT_TO_RIGHT, + DIRECTION_TOP_TO_BOTTOM + }; + private static final int[] WINDOW_STYLE_SCROLL_DIRECTION = + new int[] { + DIRECTION_BOTTOM_TO_TOP, DIRECTION_BOTTOM_TO_TOP, DIRECTION_BOTTOM_TO_TOP, + DIRECTION_BOTTOM_TO_TOP, DIRECTION_BOTTOM_TO_TOP, DIRECTION_BOTTOM_TO_TOP, + DIRECTION_RIGHT_TO_LEFT + }; + private static final boolean[] WINDOW_STYLE_WORD_WRAP = + new boolean[] {false, false, false, true, true, true, false}; + private static final int[] WINDOW_STYLE_FILL = + new int[] { + COLOR_SOLID_BLACK, + COLOR_TRANSPARENT, + COLOR_SOLID_BLACK, + COLOR_SOLID_BLACK, + COLOR_TRANSPARENT, + COLOR_SOLID_BLACK, + COLOR_SOLID_BLACK + }; // The pen style properties are specified in the CEA-708 specification. - private static final int[] PEN_STYLE_FONT_STYLE = new int[] { - PEN_FONT_STYLE_DEFAULT, PEN_FONT_STYLE_MONOSPACED_WITH_SERIFS, - PEN_FONT_STYLE_PROPORTIONALLY_SPACED_WITH_SERIFS, PEN_FONT_STYLE_MONOSPACED_WITHOUT_SERIFS, - PEN_FONT_STYLE_PROPORTIONALLY_SPACED_WITHOUT_SERIFS, - PEN_FONT_STYLE_MONOSPACED_WITHOUT_SERIFS, - PEN_FONT_STYLE_PROPORTIONALLY_SPACED_WITHOUT_SERIFS - }; - private static final int[] PEN_STYLE_EDGE_TYPE = new int[] { - BORDER_AND_EDGE_TYPE_NONE, BORDER_AND_EDGE_TYPE_NONE, BORDER_AND_EDGE_TYPE_NONE, - BORDER_AND_EDGE_TYPE_NONE, BORDER_AND_EDGE_TYPE_NONE, BORDER_AND_EDGE_TYPE_UNIFORM, - BORDER_AND_EDGE_TYPE_UNIFORM - }; - private static final int[] PEN_STYLE_BACKGROUND = new int[] { - COLOR_SOLID_BLACK, COLOR_SOLID_BLACK, COLOR_SOLID_BLACK, COLOR_SOLID_BLACK, - COLOR_SOLID_BLACK, COLOR_TRANSPARENT, COLOR_TRANSPARENT}; + private static final int[] PEN_STYLE_FONT_STYLE = + new int[] { + PEN_FONT_STYLE_DEFAULT, + PEN_FONT_STYLE_MONOSPACED_WITH_SERIFS, + PEN_FONT_STYLE_PROPORTIONALLY_SPACED_WITH_SERIFS, + PEN_FONT_STYLE_MONOSPACED_WITHOUT_SERIFS, + PEN_FONT_STYLE_PROPORTIONALLY_SPACED_WITHOUT_SERIFS, + PEN_FONT_STYLE_MONOSPACED_WITHOUT_SERIFS, + PEN_FONT_STYLE_PROPORTIONALLY_SPACED_WITHOUT_SERIFS + }; + private static final int[] PEN_STYLE_EDGE_TYPE = + new int[] { + BORDER_AND_EDGE_TYPE_NONE, BORDER_AND_EDGE_TYPE_NONE, BORDER_AND_EDGE_TYPE_NONE, + BORDER_AND_EDGE_TYPE_NONE, BORDER_AND_EDGE_TYPE_NONE, BORDER_AND_EDGE_TYPE_UNIFORM, + BORDER_AND_EDGE_TYPE_UNIFORM + }; + private static final int[] PEN_STYLE_BACKGROUND = + new int[] { + COLOR_SOLID_BLACK, + COLOR_SOLID_BLACK, + COLOR_SOLID_BLACK, + COLOR_SOLID_BLACK, + COLOR_SOLID_BLACK, + COLOR_TRANSPARENT, + COLOR_TRANSPARENT + }; private final List rolledUpCaptions; private final SpannableStringBuilder captionStringBuilder; @@ -885,7 +971,7 @@ private static final class CueBuilder { private int backgroundColor; private int row; - public CueBuilder() { + public CueInfoBuilder() { rolledUpCaptions = new ArrayList<>(); captionStringBuilder = new SpannableStringBuilder(); reset(); @@ -938,9 +1024,19 @@ public boolean isVisible() { return visible; } - public void defineWindow(boolean visible, boolean rowLock, boolean columnLock, int priority, - boolean relativePositioning, int verticalAnchor, int horizontalAnchor, int rowCount, - int columnCount, int anchorId, int windowStyleId, int penStyleId) { + public void defineWindow( + boolean visible, + boolean rowLock, + boolean columnLock, + int priority, + boolean relativePositioning, + int verticalAnchor, + int horizontalAnchor, + int rowCount, + int columnCount, + int anchorId, + int windowStyleId, + int penStyleId) { this.defined = true; this.visible = visible; this.rowLock = rowLock; @@ -968,8 +1064,11 @@ public void defineWindow(boolean visible, boolean rowLock, boolean columnLock, i // windowStyleId is 1-based. int windowStyleIdIndex = windowStyleId - 1; // Note that Border type and border color are the same for all window styles. - setWindowAttributes(WINDOW_STYLE_FILL[windowStyleIdIndex], COLOR_TRANSPARENT, - WINDOW_STYLE_WORD_WRAP[windowStyleIdIndex], BORDER_AND_EDGE_TYPE_NONE, + setWindowAttributes( + WINDOW_STYLE_FILL[windowStyleIdIndex], + COLOR_TRANSPARENT, + WINDOW_STYLE_WORD_WRAP[windowStyleIdIndex], + BORDER_AND_EDGE_TYPE_NONE, WINDOW_STYLE_PRINT_DIRECTION[windowStyleIdIndex], WINDOW_STYLE_SCROLL_DIRECTION[windowStyleIdIndex], WINDOW_STYLE_JUSTIFICATION[windowStyleIdIndex]); @@ -981,34 +1080,53 @@ public void defineWindow(boolean visible, boolean rowLock, boolean columnLock, i int penStyleIdIndex = penStyleId - 1; // Note that pen size, offset, italics, underline, foreground color, and foreground // opacity are the same for all pen styles. - setPenAttributes(0, PEN_OFFSET_NORMAL, PEN_SIZE_STANDARD, false, false, - PEN_STYLE_EDGE_TYPE[penStyleIdIndex], PEN_STYLE_FONT_STYLE[penStyleIdIndex]); + setPenAttributes( + 0, + PEN_OFFSET_NORMAL, + PEN_SIZE_STANDARD, + false, + false, + PEN_STYLE_EDGE_TYPE[penStyleIdIndex], + PEN_STYLE_FONT_STYLE[penStyleIdIndex]); setPenColor(COLOR_SOLID_WHITE, PEN_STYLE_BACKGROUND[penStyleIdIndex], COLOR_SOLID_BLACK); } } - - public void setWindowAttributes(int fillColor, int borderColor, boolean wordWrapToggle, - int borderType, int printDirection, int scrollDirection, int justification) { + public void setWindowAttributes( + int fillColor, + int borderColor, + boolean wordWrapToggle, + int borderType, + int printDirection, + int scrollDirection, + int justification) { this.windowFillColor = fillColor; // TODO: Add support for border color and types. // TODO: Add support for word wrap. // TODO: Add support for other scroll directions. // TODO: Add support for other print directions. this.justification = justification; - } - public void setPenAttributes(int textTag, int offset, int penSize, boolean italicsToggle, - boolean underlineToggle, int edgeType, int fontStyle) { + public void setPenAttributes( + int textTag, + int offset, + int penSize, + boolean italicsToggle, + boolean underlineToggle, + int edgeType, + int fontStyle) { // TODO: Add support for text tags. // TODO: Add support for other offsets. // TODO: Add support for other pen sizes. if (italicsStartPosition != C.POSITION_UNSET) { if (!italicsToggle) { - captionStringBuilder.setSpan(new StyleSpan(Typeface.ITALIC), italicsStartPosition, - captionStringBuilder.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + captionStringBuilder.setSpan( + new StyleSpan(Typeface.ITALIC), + italicsStartPosition, + captionStringBuilder.length(), + Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); italicsStartPosition = C.POSITION_UNSET; } } else if (italicsToggle) { @@ -1017,8 +1135,11 @@ public void setPenAttributes(int textTag, int offset, int penSize, boolean itali if (underlineStartPosition != C.POSITION_UNSET) { if (!underlineToggle) { - captionStringBuilder.setSpan(new UnderlineSpan(), underlineStartPosition, - captionStringBuilder.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + captionStringBuilder.setSpan( + new UnderlineSpan(), + underlineStartPosition, + captionStringBuilder.length(), + Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); underlineStartPosition = C.POSITION_UNSET; } } else if (underlineToggle) { @@ -1032,8 +1153,10 @@ public void setPenAttributes(int textTag, int offset, int penSize, boolean itali public void setPenColor(int foregroundColor, int backgroundColor, int edgeColor) { if (foregroundColorStartPosition != C.POSITION_UNSET) { if (this.foregroundColor != foregroundColor) { - captionStringBuilder.setSpan(new ForegroundColorSpan(this.foregroundColor), - foregroundColorStartPosition, captionStringBuilder.length(), + captionStringBuilder.setSpan( + new ForegroundColorSpan(this.foregroundColor), + foregroundColorStartPosition, + captionStringBuilder.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); } } @@ -1044,8 +1167,10 @@ public void setPenColor(int foregroundColor, int backgroundColor, int edgeColor) if (backgroundColorStartPosition != C.POSITION_UNSET) { if (this.backgroundColor != backgroundColor) { - captionStringBuilder.setSpan(new BackgroundColorSpan(this.backgroundColor), - backgroundColorStartPosition, captionStringBuilder.length(), + captionStringBuilder.setSpan( + new BackgroundColorSpan(this.backgroundColor), + backgroundColorStartPosition, + captionStringBuilder.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); } } @@ -1111,30 +1236,43 @@ public SpannableString buildSpannableString() { if (length > 0) { if (italicsStartPosition != C.POSITION_UNSET) { - spannableStringBuilder.setSpan(new StyleSpan(Typeface.ITALIC), italicsStartPosition, - length, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + spannableStringBuilder.setSpan( + new StyleSpan(Typeface.ITALIC), + italicsStartPosition, + length, + Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); } if (underlineStartPosition != C.POSITION_UNSET) { - spannableStringBuilder.setSpan(new UnderlineSpan(), underlineStartPosition, - length, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + spannableStringBuilder.setSpan( + new UnderlineSpan(), + underlineStartPosition, + length, + Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); } if (foregroundColorStartPosition != C.POSITION_UNSET) { - spannableStringBuilder.setSpan(new ForegroundColorSpan(foregroundColor), - foregroundColorStartPosition, length, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + spannableStringBuilder.setSpan( + new ForegroundColorSpan(foregroundColor), + foregroundColorStartPosition, + length, + Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); } if (backgroundColorStartPosition != C.POSITION_UNSET) { - spannableStringBuilder.setSpan(new BackgroundColorSpan(backgroundColor), - backgroundColorStartPosition, length, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + spannableStringBuilder.setSpan( + new BackgroundColorSpan(backgroundColor), + backgroundColorStartPosition, + length, + Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); } } return new SpannableString(spannableStringBuilder); } - public Cea708Cue build() { + @Nullable + public Cea708CueInfo build() { if (isEmpty()) { // The cue is empty. return null; @@ -1190,18 +1328,18 @@ public Cea708Cue build() { // | | // 6-----7-----8 @AnchorType int verticalAnchorType; - if (anchorId % 3 == 0) { + if (anchorId / 3 == 0) { verticalAnchorType = Cue.ANCHOR_TYPE_START; - } else if (anchorId % 3 == 1) { + } else if (anchorId / 3 == 1) { verticalAnchorType = Cue.ANCHOR_TYPE_MIDDLE; } else { verticalAnchorType = Cue.ANCHOR_TYPE_END; } // TODO: Add support for right-to-left languages (i.e. where start is on the right). @AnchorType int horizontalAnchorType; - if (anchorId / 3 == 0) { + if (anchorId % 3 == 0) { horizontalAnchorType = Cue.ANCHOR_TYPE_START; - } else if (anchorId / 3 == 1) { + } else if (anchorId % 3 == 1) { horizontalAnchorType = Cue.ANCHOR_TYPE_MIDDLE; } else { horizontalAnchorType = Cue.ANCHOR_TYPE_END; @@ -1209,8 +1347,17 @@ public Cea708Cue build() { boolean windowColorSet = (windowFillColor != COLOR_SOLID_BLACK); - return new Cea708Cue(cueString, alignment, line, Cue.LINE_TYPE_FRACTION, verticalAnchorType, - position, horizontalAnchorType, Cue.DIMEN_UNSET, windowColorSet, windowFillColor, + return new Cea708CueInfo( + cueString, + alignment, + line, + Cue.LINE_TYPE_FRACTION, + verticalAnchorType, + position, + horizontalAnchorType, + Cue.DIMEN_UNSET, + windowColorSet, + windowFillColor, priority); } @@ -1244,12 +1391,70 @@ public static int getArgbColorFromCeaColor(int red, int green, int blue, int opa // TODO: Add support for the Alternative Minimum Color List or the full 64 RGB combinations. // Return values based on the Minimum Color List - return Color.argb(alpha, - (red > 1 ? 255 : 0), - (green > 1 ? 255 : 0), - (blue > 1 ? 255 : 0)); + return Color.argb(alpha, (red > 1 ? 255 : 0), (green > 1 ? 255 : 0), (blue > 1 ? 255 : 0)); } - } + /** A {@link Cue} for CEA-708. */ + private static final class Cea708CueInfo { + + /** + * Sorts cue infos in order of ascending {@link Cea708CueInfo#priority} (which is descending by + * numeric value). + */ + private static final Comparator LEAST_IMPORTANT_FIRST = + (thisInfo, thatInfo) -> Integer.compare(thatInfo.priority, thisInfo.priority); + + public final Cue cue; + + /** + * The priority of the cue box. Low values are higher priority. + * + *

      If cue boxes overlap, higher priority cue boxes are drawn on top. + * + *

      See 8.4.2 of the CEA-708B spec. + */ + public final int priority; + + /** + * @param text See {@link Cue#text}. + * @param textAlignment See {@link Cue#textAlignment}. + * @param line See {@link Cue#line}. + * @param lineType See {@link Cue#lineType}. + * @param lineAnchor See {@link Cue#lineAnchor}. + * @param position See {@link Cue#position}. + * @param positionAnchor See {@link Cue#positionAnchor}. + * @param size See {@link Cue#size}. + * @param windowColorSet See {@link Cue#windowColorSet}. + * @param windowColor See {@link Cue#windowColor}. + * @param priority See {@link #priority}. + */ + public Cea708CueInfo( + CharSequence text, + Alignment textAlignment, + float line, + @Cue.LineType int lineType, + @AnchorType int lineAnchor, + float position, + @AnchorType int positionAnchor, + float size, + boolean windowColorSet, + int windowColor, + int priority) { + Cue.Builder cueBuilder = + new Cue.Builder() + .setText(text) + .setTextAlignment(textAlignment) + .setLine(line, lineType) + .setLineAnchor(lineAnchor) + .setPosition(position) + .setPositionAnchor(positionAnchor) + .setSize(size); + if (windowColorSet) { + cueBuilder.setWindowColor(windowColor); + } + this.cue = cueBuilder.build(); + this.priority = priority; + } + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/cea/Cea708InitializationData.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/cea/Cea708InitializationData.java deleted file mode 100644 index 10bed14adc..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/cea/Cea708InitializationData.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright (C) 2018 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.text.cea; - -import java.util.Collections; -import java.util.List; - -/** Initialization data for CEA-708 decoders. */ -public final class Cea708InitializationData { - - /** - * Whether the closed caption service is formatted for displays with 16:9 aspect ratio. If false, - * the closed caption service is formatted for 4:3 displays. - */ - public final boolean isWideAspectRatio; - - private Cea708InitializationData(List initializationData) { - isWideAspectRatio = initializationData.get(0)[0] != 0; - } - - /** - * Returns an object representation of CEA-708 initialization data - * - * @param initializationData Binary CEA-708 initialization data. - * @return The object representation. - */ - public static Cea708InitializationData fromData(List initializationData) { - return new Cea708InitializationData(initializationData); - } - - /** - * Builds binary CEA-708 initialization data. - * - * @param isWideAspectRatio Whether the closed caption service is formatted for displays with 16:9 - * aspect ratio. - * @return Binary CEA-708 initializaton data. - */ - public static List buildData(boolean isWideAspectRatio) { - return Collections.singletonList(new byte[] {(byte) (isWideAspectRatio ? 1 : 0)}); - } -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/cea/CeaDecoder.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/cea/CeaDecoder.java index ce9da9f5d5..4db023cc9c 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/cea/CeaDecoder.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/cea/CeaDecoder.java @@ -15,7 +15,7 @@ */ package com.google.android.exoplayer2.text.cea; -import androidx.annotation.NonNull; +import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.text.Subtitle; @@ -24,12 +24,11 @@ import com.google.android.exoplayer2.text.SubtitleInputBuffer; import com.google.android.exoplayer2.text.SubtitleOutputBuffer; import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.Util; import java.util.ArrayDeque; import java.util.PriorityQueue; -/** - * Base class for subtitle parsers for CEA captions. - */ +/** Base class for subtitle parsers for CEA captions. */ /* package */ abstract class CeaDecoder implements SubtitleDecoder { private static final int NUM_INPUT_BUFFERS = 10; @@ -39,10 +38,11 @@ private final ArrayDeque availableOutputBuffers; private final PriorityQueue queuedInputBuffers; - private CeaInputBuffer dequeuedInputBuffer; + @Nullable private CeaInputBuffer dequeuedInputBuffer; private long playbackPositionUs; private long queuedInputBufferCount; + @SuppressWarnings("nullness:methodref.receiver.bound") public CeaDecoder() { availableInputBuffers = new ArrayDeque<>(); for (int i = 0; i < NUM_INPUT_BUFFERS; i++) { @@ -50,7 +50,7 @@ public CeaDecoder() { } availableOutputBuffers = new ArrayDeque<>(); for (int i = 0; i < NUM_OUTPUT_BUFFERS; i++) { - availableOutputBuffers.add(new CeaOutputBuffer()); + availableOutputBuffers.add(new CeaOutputBuffer(this::releaseOutputBuffer)); } queuedInputBuffers = new PriorityQueue<>(); } @@ -64,6 +64,7 @@ public void setPositionUs(long positionUs) { } @Override + @Nullable public SubtitleInputBuffer dequeueInputBuffer() throws SubtitleDecoderException { Assertions.checkState(dequeuedInputBuffer == null); if (availableInputBuffers.isEmpty()) { @@ -76,33 +77,32 @@ public SubtitleInputBuffer dequeueInputBuffer() throws SubtitleDecoderException @Override public void queueInputBuffer(SubtitleInputBuffer inputBuffer) throws SubtitleDecoderException { Assertions.checkArgument(inputBuffer == dequeuedInputBuffer); - if (inputBuffer.isDecodeOnly()) { - // We can drop this buffer early (i.e. before it would be decoded) as the CEA formats allow - // for decoding to begin mid-stream. - releaseInputBuffer(dequeuedInputBuffer); + CeaInputBuffer ceaInputBuffer = (CeaInputBuffer) inputBuffer; + if (ceaInputBuffer.isDecodeOnly()) { + // We can start decoding anywhere in CEA formats, so discarding on the input side is fine. + releaseInputBuffer(ceaInputBuffer); } else { - dequeuedInputBuffer.queuedInputBufferCount = queuedInputBufferCount++; - queuedInputBuffers.add(dequeuedInputBuffer); + ceaInputBuffer.queuedInputBufferCount = queuedInputBufferCount++; + queuedInputBuffers.add(ceaInputBuffer); } dequeuedInputBuffer = null; } @Override + @Nullable public SubtitleOutputBuffer dequeueOutputBuffer() throws SubtitleDecoderException { if (availableOutputBuffers.isEmpty()) { return null; } - // iterate through all available input buffers whose timestamps are less than or equal - // to the current playback position; processing input buffers for future content should - // be deferred until they would be applicable + // Process input buffers up to the current playback position. Processing of input buffers for + // future content is deferred. while (!queuedInputBuffers.isEmpty() - && queuedInputBuffers.peek().timeUs <= playbackPositionUs) { - CeaInputBuffer inputBuffer = queuedInputBuffers.poll(); + && Util.castNonNull(queuedInputBuffers.peek()).timeUs <= playbackPositionUs) { + CeaInputBuffer inputBuffer = Util.castNonNull(queuedInputBuffers.poll()); - // If the input buffer indicates we've reached the end of the stream, we can - // return immediately with an output buffer propagating that if (inputBuffer.isEndOfStream()) { - SubtitleOutputBuffer outputBuffer = availableOutputBuffers.pollFirst(); + // availableOutputBuffers.isEmpty() is checked at the top of the method, so this is safe. + SubtitleOutputBuffer outputBuffer = Util.castNonNull(availableOutputBuffers.pollFirst()); outputBuffer.addFlag(C.BUFFER_FLAG_END_OF_STREAM); releaseInputBuffer(inputBuffer); return outputBuffer; @@ -110,22 +110,17 @@ public SubtitleOutputBuffer dequeueOutputBuffer() throws SubtitleDecoderExceptio decode(inputBuffer); - // check if we have any caption updates to report if (isNewSubtitleDataAvailable()) { - // Even if the subtitle is decode-only; we need to generate it to consume the data so it - // isn't accidentally prepended to the next subtitle Subtitle subtitle = createSubtitle(); - if (!inputBuffer.isDecodeOnly()) { - SubtitleOutputBuffer outputBuffer = availableOutputBuffers.pollFirst(); - outputBuffer.setContent(inputBuffer.timeUs, subtitle, Format.OFFSET_SAMPLE_RELATIVE); - releaseInputBuffer(inputBuffer); - return outputBuffer; - } + // availableOutputBuffers.isEmpty() is checked at the top of the method, so this is safe. + SubtitleOutputBuffer outputBuffer = Util.castNonNull(availableOutputBuffers.pollFirst()); + outputBuffer.setContent(inputBuffer.timeUs, subtitle, Format.OFFSET_SAMPLE_RELATIVE); + releaseInputBuffer(inputBuffer); + return outputBuffer; } releaseInputBuffer(inputBuffer); } - return null; } @@ -144,7 +139,7 @@ public void flush() { queuedInputBufferCount = 0; playbackPositionUs = 0; while (!queuedInputBuffers.isEmpty()) { - releaseInputBuffer(queuedInputBuffers.poll()); + releaseInputBuffer(Util.castNonNull(queuedInputBuffers.poll())); } if (dequeuedInputBuffer != null) { releaseInputBuffer(dequeuedInputBuffer); @@ -154,17 +149,13 @@ public void flush() { @Override public void release() { - // Do nothing + // Do nothing. } - /** - * Returns whether there is data available to create a new {@link Subtitle}. - */ + /** Returns whether there is data available to create a new {@link Subtitle}. */ protected abstract boolean isNewSubtitleDataAvailable(); - /** - * Creates a {@link Subtitle} from the available data. - */ + /** Creates a {@link Subtitle} from the available data. */ protected abstract Subtitle createSubtitle(); /** @@ -173,13 +164,22 @@ public void release() { */ protected abstract void decode(SubtitleInputBuffer inputBuffer); + @Nullable + protected final SubtitleOutputBuffer getAvailableOutputBuffer() { + return availableOutputBuffers.pollFirst(); + } + + protected final long getPositionUs() { + return playbackPositionUs; + } + private static final class CeaInputBuffer extends SubtitleInputBuffer implements Comparable { private long queuedInputBufferCount; @Override - public int compareTo(@NonNull CeaInputBuffer other) { + public int compareTo(CeaInputBuffer other) { if (isEndOfStream() != other.isEndOfStream()) { return isEndOfStream() ? 1 : -1; } @@ -194,11 +194,17 @@ public int compareTo(@NonNull CeaInputBuffer other) { } } - private final class CeaOutputBuffer extends SubtitleOutputBuffer { + private static final class CeaOutputBuffer extends SubtitleOutputBuffer { + + private Owner owner; + + public CeaOutputBuffer(Owner owner) { + this.owner = owner; + } @Override public final void release() { - releaseOutputBuffer(this); + owner.releaseOutputBuffer(this); } } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/cea/CeaSubtitle.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/cea/CeaSubtitle.java index 738f251e27..5650bebb46 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/cea/CeaSubtitle.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/cea/CeaSubtitle.java @@ -22,9 +22,7 @@ import java.util.Collections; import java.util.List; -/** - * A representation of a CEA subtitle. - */ +/** A representation of a CEA subtitle. */ /* package */ final class CeaSubtitle implements Subtitle { private final List cues; @@ -56,5 +54,4 @@ public long getEventTime(int index) { public List getCues(long timeUs) { return timeUs >= 0 ? cues : Collections.emptyList(); } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/dvb/DvbDecoder.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/dvb/DvbDecoder.java index 22ce893fce..0e98a2e7eb 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/dvb/DvbDecoder.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/dvb/DvbDecoder.java @@ -26,9 +26,9 @@ public final class DvbDecoder extends SimpleSubtitleDecoder { private final DvbParser parser; /** - * @param initializationData The initialization data for the decoder. The initialization data - * must consist of a single byte array containing 5 bytes: flag_pes_stripped (1), - * composition_page (2), ancillary_page (2). + * @param initializationData The initialization data for the decoder. The initialization data must + * consist of a single byte array containing 5 bytes: flag_pes_stripped (1), composition_page + * (2), ancillary_page (2). */ public DvbDecoder(List initializationData) { super("DvbDecoder"); @@ -45,5 +45,4 @@ protected Subtitle decode(byte[] data, int length, boolean reset) { } return new DvbSubtitle(parser.decode(data, length)); } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/dvb/DvbParser.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/dvb/DvbParser.java index 8382d9d9d0..1d5ec8454c 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/dvb/DvbParser.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/dvb/DvbParser.java @@ -15,6 +15,8 @@ */ package com.google.android.exoplayer2.text.dvb; +import static java.lang.Math.min; + import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Color; @@ -32,9 +34,7 @@ import java.util.List; import org.checkerframework.checker.nullness.qual.MonotonicNonNull; -/** - * Parses {@link Cue}s from a DVB subtitle bitstream. - */ +/** Parses {@link Cue}s from a DVB subtitle bitstream. */ /* package */ final class DvbParser { private static final String TAG = "DvbParser"; @@ -70,15 +70,14 @@ private static final int DATA_TYPE_END_LINE = 0xF0; // Clut mapping tables, as defined by ETSI EN 300 743 10.4, 10.5, 10.6 - private static final byte[] defaultMap2To4 = { - (byte) 0x00, (byte) 0x07, (byte) 0x08, (byte) 0x0F}; - private static final byte[] defaultMap2To8 = { - (byte) 0x00, (byte) 0x77, (byte) 0x88, (byte) 0xFF}; + private static final byte[] defaultMap2To4 = {(byte) 0x00, (byte) 0x07, (byte) 0x08, (byte) 0x0F}; + private static final byte[] defaultMap2To8 = {(byte) 0x00, (byte) 0x77, (byte) 0x88, (byte) 0xFF}; private static final byte[] defaultMap4To8 = { - (byte) 0x00, (byte) 0x11, (byte) 0x22, (byte) 0x33, - (byte) 0x44, (byte) 0x55, (byte) 0x66, (byte) 0x77, - (byte) 0x88, (byte) 0x99, (byte) 0xAA, (byte) 0xBB, - (byte) 0xCC, (byte) 0xDD, (byte) 0xEE, (byte) 0xFF}; + (byte) 0x00, (byte) 0x11, (byte) 0x22, (byte) 0x33, + (byte) 0x44, (byte) 0x55, (byte) 0x66, (byte) 0x77, + (byte) 0x88, (byte) 0x99, (byte) 0xAA, (byte) 0xBB, + (byte) 0xCC, (byte) 0xDD, (byte) 0xEE, (byte) 0xFF + }; private final Paint defaultPaint; private final Paint fillRegionPaint; @@ -87,7 +86,7 @@ private final ClutDefinition defaultClutDefinition; private final SubtitleService subtitleService; - @MonotonicNonNull private Bitmap bitmap; + private @MonotonicNonNull Bitmap bitmap; /** * Construct an instance for the given subtitle and ancillary page ids. @@ -106,14 +105,16 @@ public DvbParser(int subtitlePageId, int ancillaryPageId) { fillRegionPaint.setPathEffect(null); canvas = new Canvas(); defaultDisplayDefinition = new DisplayDefinition(719, 575, 0, 719, 0, 575); - defaultClutDefinition = new ClutDefinition(0, generateDefault2BitClutEntries(), - generateDefault4BitClutEntries(), generateDefault8BitClutEntries()); + defaultClutDefinition = + new ClutDefinition( + 0, + generateDefault2BitClutEntries(), + generateDefault4BitClutEntries(), + generateDefault8BitClutEntries()); subtitleService = new SubtitleService(subtitlePageId, ancillaryPageId); } - /** - * Resets the parser. - */ + /** Resets the parser. */ public void reset() { subtitleService.reset(); } @@ -139,12 +140,16 @@ public List decode(byte[] data, int limit) { } // Update the canvas bitmap if necessary. - DisplayDefinition displayDefinition = subtitleService.displayDefinition != null - ? subtitleService.displayDefinition : defaultDisplayDefinition; - if (bitmap == null || displayDefinition.width + 1 != bitmap.getWidth() + DisplayDefinition displayDefinition = + subtitleService.displayDefinition != null + ? subtitleService.displayDefinition + : defaultDisplayDefinition; + if (bitmap == null + || displayDefinition.width + 1 != bitmap.getWidth() || displayDefinition.height + 1 != bitmap.getHeight()) { - bitmap = Bitmap.createBitmap(displayDefinition.width + 1, displayDefinition.height + 1, - Bitmap.Config.ARGB_8888); + bitmap = + Bitmap.createBitmap( + displayDefinition.width + 1, displayDefinition.height + 1, Bitmap.Config.ARGB_8888); canvas.setBitmap(bitmap); } @@ -159,14 +164,18 @@ public List decode(byte[] data, int limit) { RegionComposition regionComposition = subtitleService.regions.get(regionId); // Clip drawing to the current region and display definition window. - int baseHorizontalAddress = pageRegion.horizontalAddress - + displayDefinition.horizontalPositionMinimum; - int baseVerticalAddress = pageRegion.verticalAddress - + displayDefinition.verticalPositionMinimum; - int clipRight = Math.min(baseHorizontalAddress + regionComposition.width, - displayDefinition.horizontalPositionMaximum); - int clipBottom = Math.min(baseVerticalAddress + regionComposition.height, - displayDefinition.verticalPositionMaximum); + int baseHorizontalAddress = + pageRegion.horizontalAddress + displayDefinition.horizontalPositionMinimum; + int baseVerticalAddress = + pageRegion.verticalAddress + displayDefinition.verticalPositionMinimum; + int clipRight = + min( + baseHorizontalAddress + regionComposition.width, + displayDefinition.horizontalPositionMaximum); + int clipBottom = + min( + baseVerticalAddress + regionComposition.height, + displayDefinition.verticalPositionMaximum); canvas.clipRect(baseHorizontalAddress, baseVerticalAddress, clipRight, clipBottom); ClutDefinition clutDefinition = subtitleService.cluts.get(regionComposition.clutId); if (clutDefinition == null) { @@ -186,9 +195,14 @@ public List decode(byte[] data, int limit) { } if (objectData != null) { @Nullable Paint paint = objectData.nonModifyingColorFlag ? null : defaultPaint; - paintPixelDataSubBlocks(objectData, clutDefinition, regionComposition.depth, + paintPixelDataSubBlocks( + objectData, + clutDefinition, + regionComposition.depth, baseHorizontalAddress + regionObject.horizontalPosition, - baseVerticalAddress + regionObject.verticalPosition, paint, canvas); + baseVerticalAddress + regionObject.verticalPosition, + paint, + canvas); } } @@ -202,18 +216,31 @@ public List decode(byte[] data, int limit) { color = clutDefinition.clutEntries2Bit[regionComposition.pixelCode2Bit]; } fillRegionPaint.setColor(color); - canvas.drawRect(baseHorizontalAddress, baseVerticalAddress, + canvas.drawRect( + baseHorizontalAddress, + baseVerticalAddress, baseHorizontalAddress + regionComposition.width, baseVerticalAddress + regionComposition.height, fillRegionPaint); } - Bitmap cueBitmap = Bitmap.createBitmap(bitmap, baseHorizontalAddress, baseVerticalAddress, - regionComposition.width, regionComposition.height); - cues.add(new Cue(cueBitmap, (float) baseHorizontalAddress / displayDefinition.width, - Cue.ANCHOR_TYPE_START, (float) baseVerticalAddress / displayDefinition.height, - Cue.ANCHOR_TYPE_START, (float) regionComposition.width / displayDefinition.width, - (float) regionComposition.height / displayDefinition.height)); + cues.add( + new Cue.Builder() + .setBitmap( + Bitmap.createBitmap( + bitmap, + baseHorizontalAddress, + baseVerticalAddress, + regionComposition.width, + regionComposition.height)) + .setPosition((float) baseHorizontalAddress / displayDefinition.width) + .setPositionAnchor(Cue.ANCHOR_TYPE_START) + .setLine( + (float) baseVerticalAddress / displayDefinition.height, Cue.LINE_TYPE_FRACTION) + .setLineAnchor(Cue.ANCHOR_TYPE_START) + .setSize((float) regionComposition.width / displayDefinition.width) + .setBitmapHeight((float) regionComposition.height / displayDefinition.height) + .build()); canvas.drawColor(Color.TRANSPARENT, PorterDuff.Mode.CLEAR); // Restore clean clipping state. @@ -227,8 +254,8 @@ public List decode(byte[] data, int limit) { /** * Parses a subtitling segment, as defined by ETSI EN 300 743 7.2 - *

      - * The {@link SubtitleService} is updated with the parsed segment data. + * + *

      The {@link SubtitleService} is updated with the parsed segment data. */ private static void parseSubtitlingSegment(ParsableBitArray data, SubtitleService service) { int segmentType = data.readBits(8); @@ -304,9 +331,7 @@ private static void parseSubtitlingSegment(ParsableBitArray data, SubtitleServic data.skipBytes(dataFieldLimit - data.getBytePosition()); } - /** - * Parses a display definition segment, as defined by ETSI EN 300 743 7.2.1. - */ + /** Parses a display definition segment, as defined by ETSI EN 300 743 7.2.1. */ private static DisplayDefinition parseDisplayDefinition(ParsableBitArray data) { data.skipBits(4); // dds_version_number (4). boolean displayWindowFlag = data.readBit(); @@ -330,13 +355,16 @@ private static DisplayDefinition parseDisplayDefinition(ParsableBitArray data) { verticalPositionMaximum = height; } - return new DisplayDefinition(width, height, horizontalPositionMinimum, - horizontalPositionMaximum, verticalPositionMinimum, verticalPositionMaximum); + return new DisplayDefinition( + width, + height, + horizontalPositionMinimum, + horizontalPositionMaximum, + verticalPositionMinimum, + verticalPositionMaximum); } - /** - * Parses a page composition segment, as defined by ETSI EN 300 743 7.2.2. - */ + /** Parses a page composition segment, as defined by ETSI EN 300 743 7.2.2. */ private static PageComposition parsePageComposition(ParsableBitArray data, int length) { int timeoutSecs = data.readBits(8); int version = data.readBits(4); @@ -357,9 +385,7 @@ private static PageComposition parsePageComposition(ParsableBitArray data, int l return new PageComposition(timeoutSecs, version, state, regions); } - /** - * Parses a region composition segment, as defined by ETSI EN 300 743 7.2.3. - */ + /** Parses a region composition segment, as defined by ETSI EN 300 743 7.2.3. */ private static RegionComposition parseRegionComposition(ParsableBitArray data, int length) { int id = data.readBits(8); data.skipBits(4); // Skip region_version_number @@ -395,18 +421,32 @@ private static RegionComposition parseRegionComposition(ParsableBitArray data, i remainingLength -= 2; } - regionObjects.put(objectId, new RegionObject(objectType, objectProvider, - objectHorizontalPosition, objectVerticalPosition, foregroundPixelCode, - backgroundPixelCode)); + regionObjects.put( + objectId, + new RegionObject( + objectType, + objectProvider, + objectHorizontalPosition, + objectVerticalPosition, + foregroundPixelCode, + backgroundPixelCode)); } - return new RegionComposition(id, fillFlag, width, height, levelOfCompatibility, depth, clutId, - pixelCode8Bit, pixelCode4Bit, pixelCode2Bit, regionObjects); + return new RegionComposition( + id, + fillFlag, + width, + height, + levelOfCompatibility, + depth, + clutId, + pixelCode8Bit, + pixelCode4Bit, + pixelCode2Bit, + regionObjects); } - /** - * Parses a CLUT definition segment, as defined by ETSI EN 300 743 7.2.4. - */ + /** Parses a CLUT definition segment, as defined by ETSI EN 300 743 7.2.4. */ private static ClutDefinition parseClutDefinition(ParsableBitArray data, int length) { int clutId = data.readBits(8); data.skipBits(8); // Skip clut_version_number (4), reserved (4) @@ -458,8 +498,12 @@ private static ClutDefinition parseClutDefinition(ParsableBitArray data, int len int r = (int) (y + (1.40200 * (cr - 128))); int g = (int) (y - (0.34414 * (cb - 128)) - (0.71414 * (cr - 128))); int b = (int) (y + (1.77200 * (cb - 128))); - clutEntries[entryId] = getColor(a, Util.constrainValue(r, 0, 255), - Util.constrainValue(g, 0, 255), Util.constrainValue(b, 0, 255)); + clutEntries[entryId] = + getColor( + a, + Util.constrainValue(r, 0, 255), + Util.constrainValue(g, 0, 255), + Util.constrainValue(b, 0, 255)); } return new ClutDefinition(clutId, clutEntries2Bit, clutEntries4Bit, clutEntries8Bit); @@ -477,8 +521,8 @@ private static ObjectData parseObjectData(ParsableBitArray data) { boolean nonModifyingColorFlag = data.readBit(); data.skipBits(1); // Skip reserved. - @Nullable byte[] topFieldData = null; - @Nullable byte[] bottomFieldData = null; + byte[] topFieldData = Util.EMPTY_BYTE_ARRAY; + byte[] bottomFieldData = Util.EMPTY_BYTE_ARRAY; if (objectCodingMethod == OBJECT_CODING_STRING) { int numberOfCodes = data.readBits(8); @@ -516,17 +560,19 @@ private static int[] generateDefault4BitClutEntries() { entries[0] = 0x00000000; for (int i = 1; i < entries.length; i++) { if (i < 8) { - entries[i] = getColor( - 0xFF, - ((i & 0x01) != 0 ? 0xFF : 0x00), - ((i & 0x02) != 0 ? 0xFF : 0x00), - ((i & 0x04) != 0 ? 0xFF : 0x00)); + entries[i] = + getColor( + 0xFF, + ((i & 0x01) != 0 ? 0xFF : 0x00), + ((i & 0x02) != 0 ? 0xFF : 0x00), + ((i & 0x04) != 0 ? 0xFF : 0x00)); } else { - entries[i] = getColor( - 0xFF, - ((i & 0x01) != 0 ? 0x7F : 0x00), - ((i & 0x02) != 0 ? 0x7F : 0x00), - ((i & 0x04) != 0 ? 0x7F : 0x00)); + entries[i] = + getColor( + 0xFF, + ((i & 0x01) != 0 ? 0x7F : 0x00), + ((i & 0x02) != 0 ? 0x7F : 0x00), + ((i & 0x04) != 0 ? 0x7F : 0x00)); } } return entries; @@ -537,40 +583,45 @@ private static int[] generateDefault8BitClutEntries() { entries[0] = 0x00000000; for (int i = 0; i < entries.length; i++) { if (i < 8) { - entries[i] = getColor( - 0x3F, - ((i & 0x01) != 0 ? 0xFF : 0x00), - ((i & 0x02) != 0 ? 0xFF : 0x00), - ((i & 0x04) != 0 ? 0xFF : 0x00)); + entries[i] = + getColor( + 0x3F, + ((i & 0x01) != 0 ? 0xFF : 0x00), + ((i & 0x02) != 0 ? 0xFF : 0x00), + ((i & 0x04) != 0 ? 0xFF : 0x00)); } else { switch (i & 0x88) { case 0x00: - entries[i] = getColor( - 0xFF, - (((i & 0x01) != 0 ? 0x55 : 0x00) + ((i & 0x10) != 0 ? 0xAA : 0x00)), - (((i & 0x02) != 0 ? 0x55 : 0x00) + ((i & 0x20) != 0 ? 0xAA : 0x00)), - (((i & 0x04) != 0 ? 0x55 : 0x00) + ((i & 0x40) != 0 ? 0xAA : 0x00))); + entries[i] = + getColor( + 0xFF, + (((i & 0x01) != 0 ? 0x55 : 0x00) + ((i & 0x10) != 0 ? 0xAA : 0x00)), + (((i & 0x02) != 0 ? 0x55 : 0x00) + ((i & 0x20) != 0 ? 0xAA : 0x00)), + (((i & 0x04) != 0 ? 0x55 : 0x00) + ((i & 0x40) != 0 ? 0xAA : 0x00))); break; case 0x08: - entries[i] = getColor( - 0x7F, - (((i & 0x01) != 0 ? 0x55 : 0x00) + ((i & 0x10) != 0 ? 0xAA : 0x00)), - (((i & 0x02) != 0 ? 0x55 : 0x00) + ((i & 0x20) != 0 ? 0xAA : 0x00)), - (((i & 0x04) != 0 ? 0x55 : 0x00) + ((i & 0x40) != 0 ? 0xAA : 0x00))); + entries[i] = + getColor( + 0x7F, + (((i & 0x01) != 0 ? 0x55 : 0x00) + ((i & 0x10) != 0 ? 0xAA : 0x00)), + (((i & 0x02) != 0 ? 0x55 : 0x00) + ((i & 0x20) != 0 ? 0xAA : 0x00)), + (((i & 0x04) != 0 ? 0x55 : 0x00) + ((i & 0x40) != 0 ? 0xAA : 0x00))); break; case 0x80: - entries[i] = getColor( - 0xFF, - (127 + ((i & 0x01) != 0 ? 0x2B : 0x00) + ((i & 0x10) != 0 ? 0x55 : 0x00)), - (127 + ((i & 0x02) != 0 ? 0x2B : 0x00) + ((i & 0x20) != 0 ? 0x55 : 0x00)), - (127 + ((i & 0x04) != 0 ? 0x2B : 0x00) + ((i & 0x40) != 0 ? 0x55 : 0x00))); + entries[i] = + getColor( + 0xFF, + (127 + ((i & 0x01) != 0 ? 0x2B : 0x00) + ((i & 0x10) != 0 ? 0x55 : 0x00)), + (127 + ((i & 0x02) != 0 ? 0x2B : 0x00) + ((i & 0x20) != 0 ? 0x55 : 0x00)), + (127 + ((i & 0x04) != 0 ? 0x2B : 0x00) + ((i & 0x40) != 0 ? 0x55 : 0x00))); break; case 0x88: - entries[i] = getColor( - 0xFF, - (((i & 0x01) != 0 ? 0x2B : 0x00) + ((i & 0x10) != 0 ? 0x55 : 0x00)), - (((i & 0x02) != 0 ? 0x2B : 0x00) + ((i & 0x20) != 0 ? 0x55 : 0x00)), - (((i & 0x04) != 0 ? 0x2B : 0x00) + ((i & 0x40) != 0 ? 0x55 : 0x00))); + entries[i] = + getColor( + 0xFF, + (((i & 0x01) != 0 ? 0x2B : 0x00) + ((i & 0x10) != 0 ? 0x55 : 0x00)), + (((i & 0x02) != 0 ? 0x2B : 0x00) + ((i & 0x20) != 0 ? 0x55 : 0x00)), + (((i & 0x04) != 0 ? 0x2B : 0x00) + ((i & 0x40) != 0 ? 0x55 : 0x00))); break; } } @@ -601,10 +652,22 @@ private static void paintPixelDataSubBlocks( } else { clutEntries = clutDefinition.clutEntries2Bit; } - paintPixelDataSubBlock(objectData.topFieldData, clutEntries, regionDepth, horizontalAddress, - verticalAddress, paint, canvas); - paintPixelDataSubBlock(objectData.bottomFieldData, clutEntries, regionDepth, horizontalAddress, - verticalAddress + 1, paint, canvas); + paintPixelDataSubBlock( + objectData.topFieldData, + clutEntries, + regionDepth, + horizontalAddress, + verticalAddress, + paint, + canvas); + paintPixelDataSubBlock( + objectData.bottomFieldData, + clutEntries, + regionDepth, + horizontalAddress, + verticalAddress + 1, + paint, + canvas); } /** Draws a pixel data sub-block, as defined by ETSI EN 300 743 7.2.5.1, into a canvas. */ @@ -635,8 +698,9 @@ private static void paintPixelDataSubBlock( } else { clutMapTable2ToX = null; } - column = paint2BitPixelCodeString(data, clutEntries, clutMapTable2ToX, column, line, - paint, canvas); + column = + paint2BitPixelCodeString( + data, clutEntries, clutMapTable2ToX, column, line, paint, canvas); data.byteAlign(); break; case DATA_TYPE_4BP_CODE_STRING: @@ -646,8 +710,9 @@ private static void paintPixelDataSubBlock( } else { clutMapTable4ToX = null; } - column = paint4BitPixelCodeString(data, clutEntries, clutMapTable4ToX, column, line, - paint, canvas); + column = + paint4BitPixelCodeString( + data, clutEntries, clutMapTable4ToX, column, line, paint, canvas); data.byteAlign(); break; case DATA_TYPE_8BP_CODE_STRING: @@ -837,9 +902,7 @@ private static byte[] buildClutMapTable(int length, int bitsPerEntry, ParsableBi // Private inner classes. - /** - * The subtitle service definition. - */ + /** The subtitle service definition. */ private static final class SubtitleService { public final int subtitlePageId; @@ -873,13 +936,12 @@ public void reset() { displayDefinition = null; pageComposition = null; } - } /** * Contains the geometry and active area of the subtitle service. - *

      - * See ETSI EN 300 743 7.2.1 + * + *

      See ETSI EN 300 743 7.2.1 */ private static final class DisplayDefinition { @@ -891,8 +953,13 @@ private static final class DisplayDefinition { public final int verticalPositionMinimum; public final int verticalPositionMaximum; - public DisplayDefinition(int width, int height, int horizontalPositionMinimum, - int horizontalPositionMaximum, int verticalPositionMinimum, int verticalPositionMaximum) { + public DisplayDefinition( + int width, + int height, + int horizontalPositionMinimum, + int horizontalPositionMaximum, + int verticalPositionMinimum, + int verticalPositionMaximum) { this.width = width; this.height = height; this.horizontalPositionMinimum = horizontalPositionMinimum; @@ -900,13 +967,12 @@ public DisplayDefinition(int width, int height, int horizontalPositionMinimum, this.verticalPositionMinimum = verticalPositionMinimum; this.verticalPositionMaximum = verticalPositionMaximum; } - } /** - * The page is the definition and arrangement of regions in the screen. - *

      - * See ETSI EN 300 743 7.2.2 + * The page is the definition and arrangement of regions in the screen. + * + *

      See ETSI EN 300 743 7.2.2 */ private static final class PageComposition { @@ -915,20 +981,19 @@ private static final class PageComposition { public final int state; public final SparseArray regions; - public PageComposition(int timeoutSecs, int version, int state, - SparseArray regions) { + public PageComposition( + int timeoutSecs, int version, int state, SparseArray regions) { this.timeOutSecs = timeoutSecs; this.version = version; this.state = state; this.regions = regions; } - } /** * A region within a {@link PageComposition}. - *

      - * See ETSI EN 300 743 7.2.2 + * + *

      See ETSI EN 300 743 7.2.2 */ private static final class PageRegion { @@ -939,13 +1004,12 @@ public PageRegion(int horizontalAddress, int verticalAddress) { this.horizontalAddress = horizontalAddress; this.verticalAddress = verticalAddress; } - } /** * An area of the page composed of a list of objects and a CLUT. - *

      - * See ETSI EN 300 743 7.2.3 + * + *

      See ETSI EN 300 743 7.2.3 */ private static final class RegionComposition { @@ -961,9 +1025,18 @@ private static final class RegionComposition { public final int pixelCode2Bit; public final SparseArray regionObjects; - public RegionComposition(int id, boolean fillFlag, int width, int height, - int levelOfCompatibility, int depth, int clutId, int pixelCode8Bit, int pixelCode4Bit, - int pixelCode2Bit, SparseArray regionObjects) { + public RegionComposition( + int id, + boolean fillFlag, + int width, + int height, + int levelOfCompatibility, + int depth, + int clutId, + int pixelCode8Bit, + int pixelCode4Bit, + int pixelCode2Bit, + SparseArray regionObjects) { this.id = id; this.fillFlag = fillFlag; this.width = width; @@ -983,13 +1056,12 @@ public void mergeFrom(RegionComposition otherRegionComposition) { regionObjects.put(otherRegionObjects.keyAt(i), otherRegionObjects.valueAt(i)); } } - } /** * An object within a {@link RegionComposition}. - *

      - * See ETSI EN 300 743 7.2.3 + * + *

      See ETSI EN 300 743 7.2.3 */ private static final class RegionObject { @@ -1000,8 +1072,13 @@ private static final class RegionObject { public final int foregroundPixelCode; // TODO: Use this or remove it. public final int backgroundPixelCode; // TODO: Use this or remove it. - public RegionObject(int type, int provider, int horizontalPosition, - int verticalPosition, int foregroundPixelCode, int backgroundPixelCode) { + public RegionObject( + int type, + int provider, + int horizontalPosition, + int verticalPosition, + int foregroundPixelCode, + int backgroundPixelCode) { this.type = type; this.provider = provider; this.horizontalPosition = horizontalPosition; @@ -1009,13 +1086,12 @@ public RegionObject(int type, int provider, int horizontalPosition, this.foregroundPixelCode = foregroundPixelCode; this.backgroundPixelCode = backgroundPixelCode; } - } /** * CLUT family definition containing the color tables for the three bit depths defined - *

      - * See ETSI EN 300 743 7.2.4 + * + *

      See ETSI EN 300 743 7.2.4 */ private static final class ClutDefinition { @@ -1024,20 +1100,19 @@ private static final class ClutDefinition { public final int[] clutEntries4Bit; public final int[] clutEntries8Bit; - public ClutDefinition(int id, int[] clutEntries2Bit, int[] clutEntries4Bit, - int[] clutEntries8bit) { + public ClutDefinition( + int id, int[] clutEntries2Bit, int[] clutEntries4Bit, int[] clutEntries8bit) { this.id = id; this.clutEntries2Bit = clutEntries2Bit; this.clutEntries4Bit = clutEntries4Bit; this.clutEntries8Bit = clutEntries8bit; } - } /** * The textual or graphical representation of an object. - *

      - * See ETSI EN 300 743 7.2.5 + * + *

      See ETSI EN 300 743 7.2.5 */ private static final class ObjectData { @@ -1046,14 +1121,12 @@ private static final class ObjectData { public final byte[] topFieldData; public final byte[] bottomFieldData; - public ObjectData(int id, boolean nonModifyingColorFlag, byte[] topFieldData, - byte[] bottomFieldData) { + public ObjectData( + int id, boolean nonModifyingColorFlag, byte[] topFieldData, byte[] bottomFieldData) { this.id = id; this.nonModifyingColorFlag = nonModifyingColorFlag; this.topFieldData = topFieldData; this.bottomFieldData = bottomFieldData; } - } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/dvb/DvbSubtitle.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/dvb/DvbSubtitle.java index 75728359c7..de9cf45369 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/dvb/DvbSubtitle.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/dvb/DvbSubtitle.java @@ -20,9 +20,7 @@ import com.google.android.exoplayer2.text.Subtitle; import java.util.List; -/** - * A representation of a DVB subtitle. - */ +/** A representation of a DVB subtitle. */ /* package */ final class DvbSubtitle implements Subtitle { private final List cues; @@ -50,5 +48,4 @@ public long getEventTime(int index) { public List getCues(long timeUs) { return cues; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/pgs/PgsDecoder.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/pgs/PgsDecoder.java index 9ef3556c8f..5aa3768df9 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/pgs/PgsDecoder.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/pgs/PgsDecoder.java @@ -15,6 +15,8 @@ */ package com.google.android.exoplayer2.text.pgs; +import static java.lang.Math.min; + import android.graphics.Bitmap; import androidx.annotation.Nullable; import com.google.android.exoplayer2.text.Cue; @@ -52,8 +54,9 @@ public PgsDecoder() { } @Override - protected Subtitle decode(byte[] data, int size, boolean reset) throws SubtitleDecoderException { - buffer.reset(data, size); + protected Subtitle decode(byte[] data, int length, boolean reset) + throws SubtitleDecoderException { + buffer.reset(data, length); maybeInflateData(buffer); cueBuilder.reset(); ArrayList cues = new ArrayList<>(); @@ -72,7 +75,7 @@ private void maybeInflateData(ParsableByteArray buffer) { inflater = new Inflater(); } if (Util.inflate(buffer, inflatedBuffer, inflater)) { - buffer.reset(inflatedBuffer.data, inflatedBuffer.limit()); + buffer.reset(inflatedBuffer.getData(), inflatedBuffer.limit()); } // else assume data is not compressed. } } @@ -132,7 +135,7 @@ public CueBuilder() { private void parsePaletteSection(ParsableByteArray buffer, int sectionLength) { if ((sectionLength % 5) != 2) { - // Section must be two bytes followed by a whole number of (index, y, cb, cr, a) entries. + // Section must be two bytes then a whole number of (index, Y, Cr, Cb, alpha) entries. return; } buffer.skipBytes(2); @@ -182,8 +185,8 @@ private void parseBitmapSection(ParsableByteArray buffer, int sectionLength) { int position = bitmapData.getPosition(); int limit = bitmapData.limit(); if (position < limit && sectionLength > 0) { - int bytesToRead = Math.min(sectionLength, limit - position); - buffer.readBytes(bitmapData.data, position, bytesToRead); + int bytesToRead = min(sectionLength, limit - position); + buffer.readBytes(bitmapData.getData(), position, bytesToRead); bitmapData.setPosition(position + bytesToRead); } } @@ -235,14 +238,15 @@ public Cue build() { Bitmap bitmap = Bitmap.createBitmap(argbBitmapData, bitmapWidth, bitmapHeight, Bitmap.Config.ARGB_8888); // Build the cue. - return new Cue( - bitmap, - (float) bitmapX / planeWidth, - Cue.ANCHOR_TYPE_START, - (float) bitmapY / planeHeight, - Cue.ANCHOR_TYPE_START, - (float) bitmapWidth / planeWidth, - (float) bitmapHeight / planeHeight); + return new Cue.Builder() + .setBitmap(bitmap) + .setPosition((float) bitmapX / planeWidth) + .setPositionAnchor(Cue.ANCHOR_TYPE_START) + .setLine((float) bitmapY / planeHeight, Cue.LINE_TYPE_FRACTION) + .setLineAnchor(Cue.ANCHOR_TYPE_START) + .setSize((float) bitmapWidth / planeWidth) + .setBitmapHeight((float) bitmapHeight / planeHeight) + .build(); } public void reset() { diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/span/HorizontalTextInVerticalContextSpan.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/span/HorizontalTextInVerticalContextSpan.java new file mode 100644 index 0000000000..85dd5aad9e --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/span/HorizontalTextInVerticalContextSpan.java @@ -0,0 +1,32 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package com.google.android.exoplayer2.text.span; + +/** + * A styling span for horizontal text in a vertical context. + * + *

      This is used in vertical text to write some characters in a horizontal orientation, known in + * Japanese as tate-chu-yoko. + * + *

      More information on tate-chu-yoko and span styling. + */ +// NOTE: There's no Android layout support for this, so this span currently doesn't extend any +// styling superclasses (e.g. MetricAffectingSpan). The only way to render this styling is to +// extract the spans and do the layout manually. +public final class HorizontalTextInVerticalContextSpan implements LanguageFeatureSpan {} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/span/LanguageFeatureSpan.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/span/LanguageFeatureSpan.java new file mode 100644 index 0000000000..704eb000d8 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/span/LanguageFeatureSpan.java @@ -0,0 +1,19 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.text.span; + +/** Marker interface for span classes that carry language features rather than style information. */ +public interface LanguageFeatureSpan {} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/span/RubySpan.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/span/RubySpan.java new file mode 100644 index 0000000000..cc25ce391a --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/span/RubySpan.java @@ -0,0 +1,45 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package com.google.android.exoplayer2.text.span; + +/** + * A styling span for ruby text. + * + *

      The text covered by this span is known as the "base text", and the ruby text is stored in + * {@link #rubyText}. + * + *

      More information on ruby characters + * and span styling. + */ +// NOTE: There's no Android layout support for rubies, so this span currently doesn't extend any +// styling superclasses (e.g. MetricAffectingSpan). The only way to render these rubies is to +// extract the spans and do the layout manually. +// TODO: Consider adding support for parenthetical text to be used when rendering doesn't support +// rubies (e.g. HTML tag). +public final class RubySpan implements LanguageFeatureSpan { + + /** The ruby text, i.e. the smaller explanatory characters. */ + public final String rubyText; + + /** The position of the ruby text relative to the base text. */ + public final @TextAnnotation.Position int position; + + public RubySpan(String rubyText, @TextAnnotation.Position int position) { + this.rubyText = rubyText; + this.position = position; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/span/SpanUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/span/SpanUtil.java new file mode 100644 index 0000000000..d215f368a4 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/span/SpanUtil.java @@ -0,0 +1,55 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.text.span; + +import android.text.Spannable; +import android.text.style.ForegroundColorSpan; + +/** + * Utility methods for Android span + * styling. + */ +public final class SpanUtil { + + /** + * Adds {@code span} to {@code spannable} between {@code start} and {@code end}, removing any + * existing spans of the same type and with the same indices and flags. + * + *

      This is useful for types of spans that don't make sense to duplicate and where the + * evaluation order might have an unexpected impact on the final text, e.g. {@link + * ForegroundColorSpan}. + * + * @param spannable The {@link Spannable} to add {@code span} to. + * @param span The span object to be added. + * @param start The start index to add the new span at. + * @param end The end index to add the new span at. + * @param spanFlags The flags to pass to {@link Spannable#setSpan(Object, int, int, int)}. + */ + public static void addOrReplaceSpan( + Spannable spannable, Object span, int start, int end, int spanFlags) { + Object[] existingSpans = spannable.getSpans(start, end, span.getClass()); + for (Object existingSpan : existingSpans) { + if (spannable.getSpanStart(existingSpan) == start + && spannable.getSpanEnd(existingSpan) == end + && spannable.getSpanFlags(existingSpan) == spanFlags) { + spannable.removeSpan(existingSpan); + } + } + spannable.setSpan(span, start, end, spanFlags); + } + + private SpanUtil() {} +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/span/TextAnnotation.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/span/TextAnnotation.java new file mode 100644 index 0000000000..81eeff99f4 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/span/TextAnnotation.java @@ -0,0 +1,65 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.text.span; + +import static java.lang.annotation.ElementType.TYPE_USE; +import static java.lang.annotation.RetentionPolicy.SOURCE; + +import androidx.annotation.IntDef; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.Target; + +/** Properties of a text annotation (i.e. ruby, text emphasis marks). */ +public final class TextAnnotation { + /** The text annotation position is unknown. */ + public static final int POSITION_UNKNOWN = -1; + + /** + * For horizontal text, the text annotation should be positioned above the base text. + * + *

      For vertical text it should be positioned to the right, same as CSS's ruby-position. + */ + public static final int POSITION_BEFORE = 1; + + /** + * For horizontal text, the text annotation should be positioned below the base text. + * + *

      For vertical text it should be positioned to the left, same as CSS's ruby-position. + */ + public static final int POSITION_AFTER = 2; + + /** + * The possible positions of the annotation text relative to the base text. + * + *

      One of: + * + *

        + *
      • {@link #POSITION_UNKNOWN} + *
      • {@link #POSITION_BEFORE} + *
      • {@link #POSITION_AFTER} + *
      + */ + @Documented + @Retention(SOURCE) + @Target(TYPE_USE) + @IntDef({POSITION_UNKNOWN, POSITION_BEFORE, POSITION_AFTER}) + public @interface Position {} + + private TextAnnotation() {} +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/span/TextEmphasisSpan.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/span/TextEmphasisSpan.java new file mode 100644 index 0000000000..46368d3b81 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/span/TextEmphasisSpan.java @@ -0,0 +1,98 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.text.span; + +import static java.lang.annotation.ElementType.TYPE_USE; +import static java.lang.annotation.RetentionPolicy.SOURCE; + +import androidx.annotation.IntDef; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.Target; + +/** + * A styling span for text emphasis marks. + * + *

      These are pronunciation aids such as Japanese boutens which can be + * rendered using the + * text-emphasis CSS property. + */ +// NOTE: There's no Android layout support for text emphasis, so this span currently doesn't extend +// any styling superclasses (e.g. MetricAffectingSpan). The only way to render this emphasis is to +// extract the spans and do the layout manually. +public final class TextEmphasisSpan implements LanguageFeatureSpan { + + /** + * The possible mark shapes that can be used. + * + *

      One of: + * + *

        + *
      • {@link #MARK_SHAPE_NONE} + *
      • {@link #MARK_SHAPE_CIRCLE} + *
      • {@link #MARK_SHAPE_DOT} + *
      • {@link #MARK_SHAPE_SESAME} + *
      + */ + @Documented + @Retention(SOURCE) + @Target(TYPE_USE) + @IntDef({MARK_SHAPE_NONE, MARK_SHAPE_CIRCLE, MARK_SHAPE_DOT, MARK_SHAPE_SESAME}) + public @interface MarkShape {} + + public static final int MARK_SHAPE_NONE = 0; + public static final int MARK_SHAPE_CIRCLE = 1; + public static final int MARK_SHAPE_DOT = 2; + public static final int MARK_SHAPE_SESAME = 3; + + /** + * The possible mark fills that can be used. + * + *

      One of: + * + *

        + *
      • {@link #MARK_FILL_UNKNOWN} + *
      • {@link #MARK_FILL_FILLED} + *
      • {@link #MARK_FILL_OPEN} + *
      + */ + @Documented + @Retention(SOURCE) + @Target(TYPE_USE) + @IntDef({MARK_FILL_UNKNOWN, MARK_FILL_FILLED, MARK_FILL_OPEN}) + public @interface MarkFill {} + + public static final int MARK_FILL_UNKNOWN = 0; + public static final int MARK_FILL_FILLED = 1; + public static final int MARK_FILL_OPEN = 2; + + /** The mark shape used for text emphasis. */ + public @MarkShape int markShape; + + /** The mark fill for the text emphasis mark. */ + public @MarkShape int markFill; + + /** The position of the text emphasis relative to the base text. */ + public final @TextAnnotation.Position int position; + + public TextEmphasisSpan( + @MarkShape int shape, @MarkFill int fill, @TextAnnotation.Position int position) { + this.markShape = shape; + this.markFill = fill; + this.position = position; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/span/package-info.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/span/package-info.java new file mode 100644 index 0000000000..87876b1054 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/span/package-info.java @@ -0,0 +1,19 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +@NonNullApi +package com.google.android.exoplayer2.text.span; + +import com.google.android.exoplayer2.util.NonNullApi; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ssa/SsaDecoder.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ssa/SsaDecoder.java index eef9d2eec1..241300ea2d 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ssa/SsaDecoder.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ssa/SsaDecoder.java @@ -15,9 +15,17 @@ */ package com.google.android.exoplayer2.text.ssa; +import static com.google.android.exoplayer2.text.Cue.LINE_TYPE_FRACTION; import static com.google.android.exoplayer2.util.Util.castNonNull; +import android.graphics.Typeface; import android.text.Layout; +import android.text.SpannableString; +import android.text.style.BackgroundColorSpan; +import android.text.style.ForegroundColorSpan; +import android.text.style.StrikethroughSpan; +import android.text.style.StyleSpan; +import android.text.style.UnderlineSpan; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.text.Cue; @@ -27,6 +35,7 @@ import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.ParsableByteArray; import com.google.android.exoplayer2.util.Util; +import com.google.common.base.Ascii; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; @@ -99,15 +108,15 @@ public SsaDecoder(@Nullable List initializationData) { } @Override - protected Subtitle decode(byte[] bytes, int length, boolean reset) { + protected Subtitle decode(byte[] data, int length, boolean reset) { List> cues = new ArrayList<>(); List cueTimesUs = new ArrayList<>(); - ParsableByteArray data = new ParsableByteArray(bytes, length); + ParsableByteArray parsableData = new ParsableByteArray(data, length); if (!haveInitializationData) { - parseHeader(data); + parseHeader(parsableData); } - parseEventBody(data, cues, cueTimesUs); + parseEventBody(parsableData, cues, cueTimesUs); return new SsaSubtitle(cues, cueTimesUs); } @@ -139,7 +148,7 @@ private void parseHeader(ParsableByteArray data) { * starts with {@code [} (i.e. the title of the next section). * * @param data A {@link ParsableByteArray} with {@link ParsableByteArray#getPosition() position} - * set to the beginning of of the first line after {@code [Script Info]}. + * set to the beginning of the first line after {@code [Script Info]}. */ private void parseScriptInfo(ParsableByteArray data) { @Nullable String currentLine; @@ -149,7 +158,7 @@ private void parseScriptInfo(ParsableByteArray data) { if (infoNameAndValue.length != 2) { continue; } - switch (Util.toLowerInvariant(infoNameAndValue[0].trim())) { + switch (Ascii.toLowerCase(infoNameAndValue[0].trim())) { case "playresx": try { screenWidth = Float.parseFloat(infoNameAndValue[1].trim()); @@ -175,7 +184,7 @@ private void parseScriptInfo(ParsableByteArray data) { * starts with {@code [} (i.e. the title of the next section). * * @param data A {@link ParsableByteArray} with {@link ParsableByteArray#getPosition()} pointing - * at the beginning of of the first line after {@code [V4+ Styles]}. + * at the beginning of the first line after {@code [V4+ Styles]}. */ private static Map parseStyles(ParsableByteArray data) { Map styles = new LinkedHashMap<>(); @@ -262,8 +271,9 @@ private void parseDialogueLine( SsaStyle.Overrides styleOverrides = SsaStyle.Overrides.parseFromDialogue(rawText); String text = SsaStyle.Overrides.stripStyleOverrides(rawText) - .replaceAll("\\\\N", "\n") - .replaceAll("\\\\n", "\n"); + .replace("\\N", "\n") + .replace("\\n", "\n") + .replace("\\h", "\u00A0"); Cue cue = createCue(text, style, styleOverrides, screenWidth, screenHeight); int startTimeIndex = addCuePlacerholderByTime(startTimeUs, cueTimesUs, cues); @@ -299,6 +309,63 @@ private static Cue createCue( SsaStyle.Overrides styleOverrides, float screenWidth, float screenHeight) { + SpannableString spannableText = new SpannableString(text); + Cue.Builder cue = new Cue.Builder().setText(spannableText); + + if (style != null) { + if (style.primaryColor != null) { + spannableText.setSpan( + new ForegroundColorSpan(style.primaryColor), + /* start= */ 0, + /* end= */ spannableText.length(), + SpannableString.SPAN_EXCLUSIVE_EXCLUSIVE); + } + if (style.borderStyle == SsaStyle.SSA_BORDER_STYLE_BOX && style.outlineColor != null) { + spannableText.setSpan( + new BackgroundColorSpan(style.outlineColor), + /* start= */ 0, + /* end= */ spannableText.length(), + SpannableString.SPAN_EXCLUSIVE_EXCLUSIVE); + } + if (style.fontSize != Cue.DIMEN_UNSET && screenHeight != Cue.DIMEN_UNSET) { + cue.setTextSize( + style.fontSize / screenHeight, Cue.TEXT_SIZE_TYPE_FRACTIONAL_IGNORE_PADDING); + } + if (style.bold && style.italic) { + spannableText.setSpan( + new StyleSpan(Typeface.BOLD_ITALIC), + /* start= */ 0, + /* end= */ spannableText.length(), + SpannableString.SPAN_EXCLUSIVE_EXCLUSIVE); + } else if (style.bold) { + spannableText.setSpan( + new StyleSpan(Typeface.BOLD), + /* start= */ 0, + /* end= */ spannableText.length(), + SpannableString.SPAN_EXCLUSIVE_EXCLUSIVE); + } else if (style.italic) { + spannableText.setSpan( + new StyleSpan(Typeface.ITALIC), + /* start= */ 0, + /* end= */ spannableText.length(), + SpannableString.SPAN_EXCLUSIVE_EXCLUSIVE); + } + if (style.underline) { + spannableText.setSpan( + new UnderlineSpan(), + /* start= */ 0, + /* end= */ spannableText.length(), + SpannableString.SPAN_EXCLUSIVE_EXCLUSIVE); + } + if (style.strikeout) { + spannableText.setSpan( + new StrikethroughSpan(), + /* start= */ 0, + /* end= */ spannableText.length(), + SpannableString.SPAN_EXCLUSIVE_EXCLUSIVE); + } + } + @SsaStyle.SsaAlignment int alignment; if (styleOverrides.alignment != SsaStyle.SSA_ALIGNMENT_UNKNOWN) { alignment = styleOverrides.alignment; @@ -307,31 +374,22 @@ private static Cue createCue( } else { alignment = SsaStyle.SSA_ALIGNMENT_UNKNOWN; } - @Cue.AnchorType int positionAnchor = toPositionAnchor(alignment); - @Cue.AnchorType int lineAnchor = toLineAnchor(alignment); + cue.setTextAlignment(toTextAlignment(alignment)) + .setPositionAnchor(toPositionAnchor(alignment)) + .setLineAnchor(toLineAnchor(alignment)); - float position; - float line; if (styleOverrides.position != null && screenHeight != Cue.DIMEN_UNSET && screenWidth != Cue.DIMEN_UNSET) { - position = styleOverrides.position.x / screenWidth; - line = styleOverrides.position.y / screenHeight; + cue.setPosition(styleOverrides.position.x / screenWidth); + cue.setLine(styleOverrides.position.y / screenHeight, LINE_TYPE_FRACTION); } else { // TODO: Read the MarginL, MarginR and MarginV values from the Style & Dialogue lines. - position = computeDefaultLineOrPosition(positionAnchor); - line = computeDefaultLineOrPosition(lineAnchor); + cue.setPosition(computeDefaultLineOrPosition(cue.getPositionAnchor())); + cue.setLine(computeDefaultLineOrPosition(cue.getLineAnchor()), LINE_TYPE_FRACTION); } - return new Cue( - text, - toTextAlignment(alignment), - line, - Cue.LINE_TYPE_FRACTION, - lineAnchor, - position, - positionAnchor, - /* size= */ Cue.DIMEN_UNSET); + return cue.build(); } @Nullable @@ -357,8 +415,7 @@ private static Layout.Alignment toTextAlignment(@SsaStyle.SsaAlignment int align } } - @Cue.AnchorType - private static int toLineAnchor(@SsaStyle.SsaAlignment int alignment) { + private static @Cue.AnchorType int toLineAnchor(@SsaStyle.SsaAlignment int alignment) { switch (alignment) { case SsaStyle.SSA_ALIGNMENT_BOTTOM_LEFT: case SsaStyle.SSA_ALIGNMENT_BOTTOM_CENTER: @@ -380,8 +437,7 @@ private static int toLineAnchor(@SsaStyle.SsaAlignment int alignment) { } } - @Cue.AnchorType - private static int toPositionAnchor(@SsaStyle.SsaAlignment int alignment) { + private static @Cue.AnchorType int toPositionAnchor(@SsaStyle.SsaAlignment int alignment) { switch (alignment) { case SsaStyle.SSA_ALIGNMENT_BOTTOM_LEFT: case SsaStyle.SSA_ALIGNMENT_MIDDLE_LEFT: diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ssa/SsaDialogueFormat.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ssa/SsaDialogueFormat.java index 03c025cd94..82f3dd642c 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ssa/SsaDialogueFormat.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ssa/SsaDialogueFormat.java @@ -22,7 +22,7 @@ import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.util.Assertions; -import com.google.android.exoplayer2.util.Util; +import com.google.common.base.Ascii; /** * Represents a {@code Format:} line from the {@code [Events]} section @@ -61,7 +61,7 @@ public static SsaDialogueFormat fromFormatLine(String formatLine) { Assertions.checkArgument(formatLine.startsWith(FORMAT_LINE_PREFIX)); String[] keys = TextUtils.split(formatLine.substring(FORMAT_LINE_PREFIX.length()), ","); for (int i = 0; i < keys.length; i++) { - switch (Util.toLowerInvariant(keys[i].trim())) { + switch (Ascii.toLowerCase(keys[i].trim())) { case "start": startTimeIndex = i; break; @@ -76,7 +76,9 @@ public static SsaDialogueFormat fromFormatLine(String formatLine) { break; } } - return (startTimeIndex != C.INDEX_UNSET && endTimeIndex != C.INDEX_UNSET) + return (startTimeIndex != C.INDEX_UNSET + && endTimeIndex != C.INDEX_UNSET + && textIndex != C.INDEX_UNSET) ? new SsaDialogueFormat(startTimeIndex, endTimeIndex, styleIndex, textIndex, keys.length) : null; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ssa/SsaStyle.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ssa/SsaStyle.java index fd2cb036b7..1e6868bb29 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ssa/SsaStyle.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ssa/SsaStyle.java @@ -17,18 +17,26 @@ package com.google.android.exoplayer2.text.ssa; import static com.google.android.exoplayer2.text.ssa.SsaDecoder.STYLE_LINE_PREFIX; +import static com.google.android.exoplayer2.util.Assertions.checkArgument; +import static java.lang.annotation.ElementType.TYPE_USE; import static java.lang.annotation.RetentionPolicy.SOURCE; +import android.graphics.Color; import android.graphics.PointF; import android.text.TextUtils; +import androidx.annotation.ColorInt; import androidx.annotation.IntDef; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.text.Cue; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.Util; +import com.google.common.base.Ascii; +import com.google.common.primitives.Ints; import java.lang.annotation.Documented; import java.lang.annotation.Retention; +import java.lang.annotation.Target; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -55,6 +63,7 @@ *
    • {@link #SSA_ALIGNMENT_TOP_RIGHT} * */ + @Target(TYPE_USE) @IntDef({ SSA_ALIGNMENT_UNKNOWN, SSA_ALIGNMENT_BOTTOM_LEFT, @@ -83,17 +92,69 @@ public static final int SSA_ALIGNMENT_TOP_CENTER = 8; public static final int SSA_ALIGNMENT_TOP_RIGHT = 9; + /** + * The SSA/ASS BorderStyle. + * + *

      Allowed values: + * + *

        + *
      • {@link #SSA_BORDER_STYLE_UNKNOWN} + *
      • {@link #SSA_BORDER_STYLE_OUTLINE} + *
      • {@link #SSA_BORDER_STYLE_BOX} + *
      + */ + @Target(TYPE_USE) + @IntDef({ + SSA_BORDER_STYLE_UNKNOWN, + SSA_BORDER_STYLE_OUTLINE, + SSA_BORDER_STYLE_BOX, + }) + @Documented + @Retention(SOURCE) + public @interface SsaBorderStyle {} + + // The numbering follows the ASS (v4+) spec. + public static final int SSA_BORDER_STYLE_UNKNOWN = -1; + public static final int SSA_BORDER_STYLE_OUTLINE = 1; + public static final int SSA_BORDER_STYLE_BOX = 3; + public final String name; - @SsaAlignment public final int alignment; + public final @SsaAlignment int alignment; + @Nullable @ColorInt public final Integer primaryColor; + @Nullable @ColorInt public final Integer outlineColor; + public final float fontSize; + public final boolean bold; + public final boolean italic; + public final boolean underline; + public final boolean strikeout; + public final @SsaBorderStyle int borderStyle; - private SsaStyle(String name, @SsaAlignment int alignment) { + private SsaStyle( + String name, + @SsaAlignment int alignment, + @Nullable @ColorInt Integer primaryColor, + @Nullable @ColorInt Integer outlineColor, + float fontSize, + boolean bold, + boolean italic, + boolean underline, + boolean strikeout, + @SsaBorderStyle int borderStyle) { this.name = name; this.alignment = alignment; + this.primaryColor = primaryColor; + this.outlineColor = outlineColor; + this.fontSize = fontSize; + this.bold = bold; + this.italic = italic; + this.underline = underline; + this.strikeout = strikeout; + this.borderStyle = borderStyle; } @Nullable public static SsaStyle fromStyleLine(String styleLine, Format format) { - Assertions.checkArgument(styleLine.startsWith(STYLE_LINE_PREFIX)); + checkArgument(styleLine.startsWith(STYLE_LINE_PREFIX)); String[] styleValues = TextUtils.split(styleLine.substring(STYLE_LINE_PREFIX.length()), ","); if (styleValues.length != format.length) { Log.w( @@ -105,15 +166,37 @@ public static SsaStyle fromStyleLine(String styleLine, Format format) { } try { return new SsaStyle( - styleValues[format.nameIndex].trim(), parseAlignment(styleValues[format.alignmentIndex])); + styleValues[format.nameIndex].trim(), + format.alignmentIndex != C.INDEX_UNSET + ? parseAlignment(styleValues[format.alignmentIndex].trim()) + : SSA_ALIGNMENT_UNKNOWN, + format.primaryColorIndex != C.INDEX_UNSET + ? parseColor(styleValues[format.primaryColorIndex].trim()) + : null, + format.outlineColorIndex != C.INDEX_UNSET + ? parseColor(styleValues[format.outlineColorIndex].trim()) + : null, + format.fontSizeIndex != C.INDEX_UNSET + ? parseFontSize(styleValues[format.fontSizeIndex].trim()) + : Cue.DIMEN_UNSET, + format.boldIndex != C.INDEX_UNSET + && parseBooleanValue(styleValues[format.boldIndex].trim()), + format.italicIndex != C.INDEX_UNSET + && parseBooleanValue(styleValues[format.italicIndex].trim()), + format.underlineIndex != C.INDEX_UNSET + && parseBooleanValue(styleValues[format.underlineIndex].trim()), + format.strikeoutIndex != C.INDEX_UNSET + && parseBooleanValue(styleValues[format.strikeoutIndex].trim()), + format.borderStyleIndex != C.INDEX_UNSET + ? parseBorderStyle(styleValues[format.borderStyleIndex].trim()) + : SSA_BORDER_STYLE_UNKNOWN); } catch (RuntimeException e) { Log.w(TAG, "Skipping malformed 'Style:' line: '" + styleLine + "'", e); return null; } } - @SsaAlignment - private static int parseAlignment(String alignmentStr) { + private static @SsaAlignment int parseAlignment(String alignmentStr) { try { @SsaAlignment int alignment = Integer.parseInt(alignmentStr.trim()); if (isValidAlignment(alignment)) { @@ -144,6 +227,87 @@ private static boolean isValidAlignment(@SsaAlignment int alignment) { } } + private static @SsaBorderStyle int parseBorderStyle(String borderStyleStr) { + try { + @SsaBorderStyle int borderStyle = Integer.parseInt(borderStyleStr.trim()); + if (isValidBorderStyle(borderStyle)) { + return borderStyle; + } + } catch (NumberFormatException e) { + // Swallow the exception and return UNKNOWN below. + } + Log.w(TAG, "Ignoring unknown BorderStyle: " + borderStyleStr); + return SSA_BORDER_STYLE_UNKNOWN; + } + + private static boolean isValidBorderStyle(@SsaBorderStyle int alignment) { + switch (alignment) { + case SSA_BORDER_STYLE_OUTLINE: + case SSA_BORDER_STYLE_BOX: + return true; + case SSA_BORDER_STYLE_UNKNOWN: + default: + return false; + } + } + + /** + * Parses a SSA V4+ color expression. + * + *

      A SSA V4+ color can be represented in hex {@code ("&HAABBGGRR")} or in 64-bit decimal format + * (byte order AABBGGRR). In both cases the alpha channel's value needs to be inverted because in + * SSA the 0xFF alpha value means transparent and 0x00 means opaque which is the opposite from the + * Android {@link ColorInt} representation. + * + * @param ssaColorExpression A SSA V4+ color expression. + * @return The parsed color value, or null if parsing failed. + */ + @Nullable + @ColorInt + public static Integer parseColor(String ssaColorExpression) { + // We use a long because the value is an unsigned 32-bit number, so can be larger than + // Integer.MAX_VALUE. + long abgr; + try { + abgr = + ssaColorExpression.startsWith("&H") + // Parse color from hex format (&HAABBGGRR). + ? Long.parseLong(ssaColorExpression.substring(2), /* radix= */ 16) + // Parse color from decimal format (bytes order AABBGGRR). + : Long.parseLong(ssaColorExpression); + // Ensure only the bottom 4 bytes of abgr are set. + checkArgument(abgr <= 0xFFFFFFFFL); + } catch (IllegalArgumentException e) { + Log.w(TAG, "Failed to parse color expression: '" + ssaColorExpression + "'", e); + return null; + } + // Convert ABGR to ARGB. + int a = Ints.checkedCast(((abgr >> 24) & 0xFF) ^ 0xFF); // Flip alpha. + int b = Ints.checkedCast((abgr >> 16) & 0xFF); + int g = Ints.checkedCast((abgr >> 8) & 0xFF); + int r = Ints.checkedCast(abgr & 0xFF); + return Color.argb(a, r, g, b); + } + + private static float parseFontSize(String fontSize) { + try { + return Float.parseFloat(fontSize); + } catch (NumberFormatException e) { + Log.w(TAG, "Failed to parse font size: '" + fontSize + "'", e); + return Cue.DIMEN_UNSET; + } + } + + private static boolean parseBooleanValue(String booleanValue) { + try { + int value = Integer.parseInt(booleanValue); + return value == 1 || value == -1; + } catch (NumberFormatException e) { + Log.w(TAG, "Failed to parse boolean value: '" + booleanValue + "'", e); + return false; + } + } + /** * Represents a {@code Format:} line from the {@code [V4+ Styles]} section * @@ -154,11 +318,38 @@ private static boolean isValidAlignment(@SsaAlignment int alignment) { public final int nameIndex; public final int alignmentIndex; + public final int primaryColorIndex; + public final int outlineColorIndex; + public final int fontSizeIndex; + public final int boldIndex; + public final int italicIndex; + public final int underlineIndex; + public final int strikeoutIndex; + public final int borderStyleIndex; public final int length; - private Format(int nameIndex, int alignmentIndex, int length) { + private Format( + int nameIndex, + int alignmentIndex, + int primaryColorIndex, + int outlineColorIndex, + int fontSizeIndex, + int boldIndex, + int italicIndex, + int underlineIndex, + int strikeoutIndex, + int borderStyleIndex, + int length) { this.nameIndex = nameIndex; this.alignmentIndex = alignmentIndex; + this.primaryColorIndex = primaryColorIndex; + this.outlineColorIndex = outlineColorIndex; + this.fontSizeIndex = fontSizeIndex; + this.boldIndex = boldIndex; + this.italicIndex = italicIndex; + this.underlineIndex = underlineIndex; + this.strikeoutIndex = strikeoutIndex; + this.borderStyleIndex = borderStyleIndex; this.length = length; } @@ -171,19 +362,64 @@ private Format(int nameIndex, int alignmentIndex, int length) { public static Format fromFormatLine(String styleFormatLine) { int nameIndex = C.INDEX_UNSET; int alignmentIndex = C.INDEX_UNSET; + int primaryColorIndex = C.INDEX_UNSET; + int outlineColorIndex = C.INDEX_UNSET; + int fontSizeIndex = C.INDEX_UNSET; + int boldIndex = C.INDEX_UNSET; + int italicIndex = C.INDEX_UNSET; + int underlineIndex = C.INDEX_UNSET; + int strikeoutIndex = C.INDEX_UNSET; + int borderStyleIndex = C.INDEX_UNSET; String[] keys = TextUtils.split(styleFormatLine.substring(SsaDecoder.FORMAT_LINE_PREFIX.length()), ","); for (int i = 0; i < keys.length; i++) { - switch (Util.toLowerInvariant(keys[i].trim())) { + switch (Ascii.toLowerCase(keys[i].trim())) { case "name": nameIndex = i; break; case "alignment": alignmentIndex = i; break; + case "primarycolour": + primaryColorIndex = i; + break; + case "outlinecolour": + outlineColorIndex = i; + break; + case "fontsize": + fontSizeIndex = i; + break; + case "bold": + boldIndex = i; + break; + case "italic": + italicIndex = i; + break; + case "underline": + underlineIndex = i; + break; + case "strikeout": + strikeoutIndex = i; + break; + case "borderstyle": + borderStyleIndex = i; + break; } } - return nameIndex != C.INDEX_UNSET ? new Format(nameIndex, alignmentIndex, keys.length) : null; + return nameIndex != C.INDEX_UNSET + ? new Format( + nameIndex, + alignmentIndex, + primaryColorIndex, + outlineColorIndex, + fontSizeIndex, + boldIndex, + italicIndex, + underlineIndex, + strikeoutIndex, + borderStyleIndex, + keys.length) + : null; } } @@ -214,7 +450,7 @@ public static Format fromFormatLine(String styleFormatLine) { /** Matches "\anx" and returns x in group 1 */ private static final Pattern ALIGNMENT_OVERRIDE_PATTERN = Pattern.compile("\\\\an(\\d+)"); - @SsaAlignment public final int alignment; + public final @SsaAlignment int alignment; @Nullable public final PointF position; private Overrides(@SsaAlignment int alignment, @Nullable PointF position) { @@ -227,7 +463,7 @@ public static Overrides parseFromDialogue(String text) { PointF position = null; Matcher matcher = BRACES_PATTERN.matcher(text); while (matcher.find()) { - String braceContents = matcher.group(1); + String braceContents = Assertions.checkNotNull(matcher.group(1)); try { PointF parsedPosition = parsePosition(braceContents); if (parsedPosition != null) { @@ -292,10 +528,11 @@ private static PointF parsePosition(String styleOverride) { Float.parseFloat(Assertions.checkNotNull(y).trim())); } - @SsaAlignment - private static int parseAlignmentOverride(String braceContents) { + private static @SsaAlignment int parseAlignmentOverride(String braceContents) { Matcher matcher = ALIGNMENT_OVERRIDE_PATTERN.matcher(braceContents); - return matcher.find() ? parseAlignment(matcher.group(1)) : SSA_ALIGNMENT_UNKNOWN; + return matcher.find() + ? parseAlignment(Assertions.checkNotNull(matcher.group(1))) + : SSA_ALIGNMENT_UNKNOWN; } } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ssa/SsaSubtitle.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ssa/SsaSubtitle.java index 4093f7974d..79da60c34e 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ssa/SsaSubtitle.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ssa/SsaSubtitle.java @@ -23,9 +23,7 @@ import java.util.Collections; import java.util.List; -/** - * A representation of an SSA/ASS subtitle. - */ +/** A representation of an SSA/ASS subtitle. */ /* package */ final class SsaSubtitle implements Subtitle { private final List> cues; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/subrip/SubripDecoder.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/subrip/SubripDecoder.java index cef7e3f53f..0c7883b0a5 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/subrip/SubripDecoder.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/subrip/SubripDecoder.java @@ -22,16 +22,17 @@ import com.google.android.exoplayer2.text.Cue; import com.google.android.exoplayer2.text.SimpleSubtitleDecoder; import com.google.android.exoplayer2.text.Subtitle; +import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.LongArray; import com.google.android.exoplayer2.util.ParsableByteArray; +import com.google.common.base.Charsets; +import java.nio.charset.Charset; import java.util.ArrayList; import java.util.regex.Matcher; import java.util.regex.Pattern; -/** - * A {@link SimpleSubtitleDecoder} for SubRip. - */ +/** A {@link SimpleSubtitleDecoder} for SubRip. */ public final class SubripDecoder extends SimpleSubtitleDecoder { // Fractional positions for use when alignment tags are present. @@ -71,19 +72,20 @@ public SubripDecoder() { } @Override - protected Subtitle decode(byte[] bytes, int length, boolean reset) { + protected Subtitle decode(byte[] data, int length, boolean reset) { ArrayList cues = new ArrayList<>(); LongArray cueTimesUs = new LongArray(); - ParsableByteArray subripData = new ParsableByteArray(bytes, length); + ParsableByteArray subripData = new ParsableByteArray(data, length); + Charset charset = detectUtfCharset(subripData); @Nullable String currentLine; - while ((currentLine = subripData.readLine()) != null) { + while ((currentLine = subripData.readLine(charset)) != null) { if (currentLine.length() == 0) { // Skip blank lines. continue; } - // Parse the index line as a sanity check. + // Parse and check the index line. try { Integer.parseInt(currentLine); } catch (NumberFormatException e) { @@ -92,7 +94,7 @@ protected Subtitle decode(byte[] bytes, int length, boolean reset) { } // Read and parse the timing line. - currentLine = subripData.readLine(); + currentLine = subripData.readLine(charset); if (currentLine == null) { Log.w(TAG, "Unexpected end"); break; @@ -110,13 +112,13 @@ protected Subtitle decode(byte[] bytes, int length, boolean reset) { // Read and parse the text and tags. textBuilder.setLength(0); tags.clear(); - currentLine = subripData.readLine(); + currentLine = subripData.readLine(charset); while (!TextUtils.isEmpty(currentLine)) { if (textBuilder.length() > 0) { textBuilder.append("
      "); } textBuilder.append(processLine(currentLine, tags)); - currentLine = subripData.readLine(); + currentLine = subripData.readLine(charset); } Spanned text = Html.fromHtml(textBuilder.toString()); @@ -134,12 +136,20 @@ protected Subtitle decode(byte[] bytes, int length, boolean reset) { cues.add(Cue.EMPTY); } - Cue[] cuesArray = new Cue[cues.size()]; - cues.toArray(cuesArray); + Cue[] cuesArray = cues.toArray(new Cue[0]); long[] cueTimesUsArray = cueTimesUs.toArray(); return new SubripSubtitle(cuesArray, cueTimesUsArray); } + /** + * Determine UTF encoding of the byte array from a byte order mark (BOM), defaulting to UTF-8 if + * no BOM is found. + */ + private Charset detectUtfCharset(ParsableByteArray data) { + @Nullable Charset charset = data.readUtfCharsetFromBom(); + return charset != null ? charset : Charsets.UTF_8; + } + /** * Trims and removes tags from the given line. The removed tags are added to {@code tags}. * @@ -173,68 +183,62 @@ private String processLine(String line, ArrayList tags) { * @return Built cue */ private Cue buildCue(Spanned text, @Nullable String alignmentTag) { + Cue.Builder cue = new Cue.Builder().setText(text); if (alignmentTag == null) { - return new Cue(text); + return cue.build(); } // Horizontal alignment. - @Cue.AnchorType int positionAnchor; switch (alignmentTag) { case ALIGN_BOTTOM_LEFT: case ALIGN_MID_LEFT: case ALIGN_TOP_LEFT: - positionAnchor = Cue.ANCHOR_TYPE_START; + cue.setPositionAnchor(Cue.ANCHOR_TYPE_START); break; case ALIGN_BOTTOM_RIGHT: case ALIGN_MID_RIGHT: case ALIGN_TOP_RIGHT: - positionAnchor = Cue.ANCHOR_TYPE_END; + cue.setPositionAnchor(Cue.ANCHOR_TYPE_END); break; case ALIGN_BOTTOM_MID: case ALIGN_MID_MID: case ALIGN_TOP_MID: default: - positionAnchor = Cue.ANCHOR_TYPE_MIDDLE; + cue.setPositionAnchor(Cue.ANCHOR_TYPE_MIDDLE); break; } // Vertical alignment. - @Cue.AnchorType int lineAnchor; switch (alignmentTag) { case ALIGN_BOTTOM_LEFT: case ALIGN_BOTTOM_MID: case ALIGN_BOTTOM_RIGHT: - lineAnchor = Cue.ANCHOR_TYPE_END; + cue.setLineAnchor(Cue.ANCHOR_TYPE_END); break; case ALIGN_TOP_LEFT: case ALIGN_TOP_MID: case ALIGN_TOP_RIGHT: - lineAnchor = Cue.ANCHOR_TYPE_START; + cue.setLineAnchor(Cue.ANCHOR_TYPE_START); break; case ALIGN_MID_LEFT: case ALIGN_MID_MID: case ALIGN_MID_RIGHT: default: - lineAnchor = Cue.ANCHOR_TYPE_MIDDLE; + cue.setLineAnchor(Cue.ANCHOR_TYPE_MIDDLE); break; } - return new Cue( - text, - /* textAlignment= */ null, - getFractionalPositionForAnchorType(lineAnchor), - Cue.LINE_TYPE_FRACTION, - lineAnchor, - getFractionalPositionForAnchorType(positionAnchor), - positionAnchor, - Cue.DIMEN_UNSET); + return cue.setPosition(getFractionalPositionForAnchorType(cue.getPositionAnchor())) + .setLine(getFractionalPositionForAnchorType(cue.getLineAnchor()), Cue.LINE_TYPE_FRACTION) + .build(); } private static long parseTimecode(Matcher matcher, int groupOffset) { @Nullable String hours = matcher.group(groupOffset + 1); long timestampMs = hours != null ? Long.parseLong(hours) * 60 * 60 * 1000 : 0; - timestampMs += Long.parseLong(matcher.group(groupOffset + 2)) * 60 * 1000; - timestampMs += Long.parseLong(matcher.group(groupOffset + 3)) * 1000; + timestampMs += + Long.parseLong(Assertions.checkNotNull(matcher.group(groupOffset + 2))) * 60 * 1000; + timestampMs += Long.parseLong(Assertions.checkNotNull(matcher.group(groupOffset + 3))) * 1000; @Nullable String millis = matcher.group(groupOffset + 4); if (millis != null) { timestampMs += Long.parseLong(millis); diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/subrip/SubripSubtitle.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/subrip/SubripSubtitle.java index 01ed1711a9..631a638408 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/subrip/SubripSubtitle.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/subrip/SubripSubtitle.java @@ -23,9 +23,7 @@ import java.util.Collections; import java.util.List; -/** - * A representation of a SubRip subtitle. - */ +/** A representation of a SubRip subtitle. */ /* package */ final class SubripSubtitle implements Subtitle { private final Cue[] cues; @@ -68,5 +66,4 @@ public List getCues(long timeUs) { return Collections.singletonList(cues[index]); } } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ttml/DeleteTextSpan.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ttml/DeleteTextSpan.java new file mode 100644 index 0000000000..be41c3957c --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ttml/DeleteTextSpan.java @@ -0,0 +1,30 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package com.google.android.exoplayer2.text.ttml; + +import android.text.Spanned; + +/** + * A span used to mark a section of text for later deletion. + * + *

      This is deliberately package-private because it's not generally supported by Android and + * results in surprising behaviour when simply calling {@link Spanned#toString} (i.e. the text isn't + * deleted). + * + *

      This span is explicitly handled in {@code TtmlNode#cleanUpText}. + */ +/* package */ final class DeleteTextSpan {} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ttml/TextEmphasis.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ttml/TextEmphasis.java new file mode 100644 index 0000000000..bd0a5a834a --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ttml/TextEmphasis.java @@ -0,0 +1,222 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.text.ttml; + +import static java.lang.annotation.ElementType.TYPE_USE; +import static java.lang.annotation.RetentionPolicy.SOURCE; + +import android.text.TextUtils; +import androidx.annotation.IntDef; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.text.Cue; +import com.google.android.exoplayer2.text.span.TextAnnotation; +import com.google.android.exoplayer2.text.span.TextEmphasisSpan; +import com.google.common.base.Ascii; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Iterables; +import com.google.common.collect.Sets; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.Target; +import java.util.Set; +import java.util.regex.Pattern; + +/** + * Represents a + * tts:textEmphasis attribute. + */ +/* package */ final class TextEmphasis { + + @Documented + @Retention(SOURCE) + @Target(TYPE_USE) + @IntDef({ + TextEmphasisSpan.MARK_SHAPE_NONE, + TextEmphasisSpan.MARK_SHAPE_CIRCLE, + TextEmphasisSpan.MARK_SHAPE_DOT, + TextEmphasisSpan.MARK_SHAPE_SESAME, + MARK_SHAPE_AUTO + }) + @interface MarkShape {} + + /** + * The "auto" mark shape is only defined in TTML and is resolved to a concrete shape when building + * the {@link Cue}. Hence, it is not defined in {@link TextEmphasisSpan.MarkShape}. + */ + public static final int MARK_SHAPE_AUTO = -1; + + @Documented + @Retention(SOURCE) + @Target(TYPE_USE) + @IntDef({ + TextAnnotation.POSITION_UNKNOWN, + TextAnnotation.POSITION_BEFORE, + TextAnnotation.POSITION_AFTER, + POSITION_OUTSIDE + }) + public @interface Position {} + + /** + * The "outside" position is only defined in TTML and is resolved before outputting a {@link Cue} + * object. Hence, it is not defined in {@link TextAnnotation.Position}. + */ + public static final int POSITION_OUTSIDE = -2; + + private static final Pattern WHITESPACE_PATTERN = Pattern.compile("\\s+"); + + private static final ImmutableSet SINGLE_STYLE_VALUES = + ImmutableSet.of(TtmlNode.TEXT_EMPHASIS_AUTO, TtmlNode.TEXT_EMPHASIS_NONE); + + private static final ImmutableSet MARK_SHAPE_VALUES = + ImmutableSet.of( + TtmlNode.TEXT_EMPHASIS_MARK_DOT, + TtmlNode.TEXT_EMPHASIS_MARK_SESAME, + TtmlNode.TEXT_EMPHASIS_MARK_CIRCLE); + + private static final ImmutableSet MARK_FILL_VALUES = + ImmutableSet.of(TtmlNode.TEXT_EMPHASIS_MARK_FILLED, TtmlNode.TEXT_EMPHASIS_MARK_OPEN); + + private static final ImmutableSet POSITION_VALUES = + ImmutableSet.of( + TtmlNode.ANNOTATION_POSITION_AFTER, + TtmlNode.ANNOTATION_POSITION_BEFORE, + TtmlNode.ANNOTATION_POSITION_OUTSIDE); + + /** The text emphasis mark shape. */ + public final @MarkShape int markShape; + + /** The fill style of the text emphasis mark. */ + public final @TextEmphasisSpan.MarkFill int markFill; + + /** The position of the text emphasis relative to the base text. */ + public final @Position int position; + + private TextEmphasis( + @MarkShape int markShape, + @TextEmphasisSpan.MarkFill int markFill, + @TextAnnotation.Position int position) { + this.markShape = markShape; + this.markFill = markFill; + this.position = position; + } + + /** + * Parses a TTML + * tts:textEmphasis attribute. Returns null if parsing fails. + * + *

      The parser searches for {@code emphasis-style} and {@code emphasis-position} independently. + * If a valid style is not found, the default style is used. If a valid position is not found, the + * default position is used. + * + *

      Not implemented: + * + *

        + *
      • {@code emphasis-color} + *
      • Quoted string {@code emphasis-style} + *
      + */ + @Nullable + public static TextEmphasis parse(@Nullable String value) { + if (value == null) { + return null; + } + + String parsingValue = Ascii.toLowerCase(value.trim()); + if (parsingValue.isEmpty()) { + return null; + } + + return parseWords(ImmutableSet.copyOf(TextUtils.split(parsingValue, WHITESPACE_PATTERN))); + } + + private static TextEmphasis parseWords(ImmutableSet nodes) { + Set matchingPositions = Sets.intersection(POSITION_VALUES, nodes); + // If no emphasis position is specified, then the emphasis position must be interpreted as if + // a position of outside were specified: + // https://www.w3.org/TR/2018/REC-ttml2-20181108/#style-attribute-textEmphasis + @Position int position; + switch (Iterables.getFirst(matchingPositions, TtmlNode.ANNOTATION_POSITION_OUTSIDE)) { + case TtmlNode.ANNOTATION_POSITION_AFTER: + position = TextAnnotation.POSITION_AFTER; + break; + case TtmlNode.ANNOTATION_POSITION_OUTSIDE: + position = POSITION_OUTSIDE; + break; + case TtmlNode.ANNOTATION_POSITION_BEFORE: + default: + // If an implementation does not recognize or otherwise distinguish an annotation position + // value, then it must be interpreted as if a position of 'before' were specified: + // https://www.w3.org/TR/2018/REC-ttml2-20181108/#style-attribute-textEmphasis + position = TextAnnotation.POSITION_BEFORE; + } + + Set matchingSingleStyles = Sets.intersection(SINGLE_STYLE_VALUES, nodes); + if (!matchingSingleStyles.isEmpty()) { + // If "none" or "auto" are found in the description, ignore the other style (fill, shape) + // attributes. + @MarkShape int markShape; + switch (matchingSingleStyles.iterator().next()) { + case TtmlNode.TEXT_EMPHASIS_NONE: + markShape = TextEmphasisSpan.MARK_SHAPE_NONE; + break; + case TtmlNode.TEXT_EMPHASIS_AUTO: + default: + markShape = MARK_SHAPE_AUTO; + } + // markFill is ignored when markShape is NONE or AUTO + return new TextEmphasis(markShape, TextEmphasisSpan.MARK_FILL_UNKNOWN, position); + } + + Set matchingFills = Sets.intersection(MARK_FILL_VALUES, nodes); + Set matchingShapes = Sets.intersection(MARK_SHAPE_VALUES, nodes); + if (matchingFills.isEmpty() && matchingShapes.isEmpty()) { + // If an implementation does not recognize or otherwise distinguish an emphasis style value, + // then it must be interpreted as if a style of auto were specified; as such, an + // implementation that supports text emphasis marks must minimally support the auto value. + // https://www.w3.org/TR/ttml2/#style-value-emphasis-style. + // + // markFill is ignored when markShape is NONE or AUTO. + return new TextEmphasis(MARK_SHAPE_AUTO, TextEmphasisSpan.MARK_FILL_UNKNOWN, position); + } + + @TextEmphasisSpan.MarkFill int markFill; + switch (Iterables.getFirst(matchingFills, TtmlNode.TEXT_EMPHASIS_MARK_FILLED)) { + case TtmlNode.TEXT_EMPHASIS_MARK_OPEN: + markFill = TextEmphasisSpan.MARK_FILL_OPEN; + break; + case TtmlNode.TEXT_EMPHASIS_MARK_FILLED: + default: + markFill = TextEmphasisSpan.MARK_FILL_FILLED; + } + + @MarkShape int markShape; + switch (Iterables.getFirst(matchingShapes, TtmlNode.TEXT_EMPHASIS_MARK_CIRCLE)) { + case TtmlNode.TEXT_EMPHASIS_MARK_DOT: + markShape = TextEmphasisSpan.MARK_SHAPE_DOT; + break; + case TtmlNode.TEXT_EMPHASIS_MARK_SESAME: + markShape = TextEmphasisSpan.MARK_SHAPE_SESAME; + break; + case TtmlNode.TEXT_EMPHASIS_MARK_CIRCLE: + default: + markShape = TextEmphasisSpan.MARK_SHAPE_CIRCLE; + } + + return new TextEmphasis(markShape, markFill, position); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ttml/TtmlDecoder.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ttml/TtmlDecoder.java index 6dabcdd904..272fa7734f 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ttml/TtmlDecoder.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ttml/TtmlDecoder.java @@ -15,16 +15,23 @@ */ package com.google.android.exoplayer2.text.ttml; +import static java.lang.Math.max; +import static java.lang.Math.min; + import android.text.Layout; +import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.text.Cue; import com.google.android.exoplayer2.text.SimpleSubtitleDecoder; import com.google.android.exoplayer2.text.Subtitle; import com.google.android.exoplayer2.text.SubtitleDecoderException; +import com.google.android.exoplayer2.text.span.TextAnnotation; +import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.ColorParser; import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.util.XmlPullParserUtil; +import com.google.common.base.Ascii; import java.io.ByteArrayInputStream; import java.io.IOException; import java.util.ArrayDeque; @@ -32,6 +39,7 @@ import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; +import org.checkerframework.checker.nullness.qual.PolyNull; import org.xmlpull.v1.XmlPullParser; import org.xmlpull.v1.XmlPullParserException; import org.xmlpull.v1.XmlPullParserFactory; @@ -72,12 +80,14 @@ public final class TtmlDecoder extends SimpleSubtitleDecoder { private static final String ATTR_IMAGE = "backgroundImage"; private static final Pattern CLOCK_TIME = - Pattern.compile("^([0-9][0-9]+):([0-9][0-9]):([0-9][0-9])" - + "(?:(\\.[0-9]+)|:([0-9][0-9])(?:\\.([0-9]+))?)?$"); + Pattern.compile( + "^([0-9][0-9]+):([0-9][0-9]):([0-9][0-9])" + + "(?:(\\.[0-9]+)|:([0-9][0-9])(?:\\.([0-9]+))?)?$"); private static final Pattern OFFSET_TIME = Pattern.compile("^([0-9]+(?:\\.[0-9]+)?)(h|m|s|ms|f|t)$"); private static final Pattern FONT_SIZE = Pattern.compile("^(([0-9]*.)?[0-9]+)(px|em|%)$"); - private static final Pattern PERCENTAGE_COORDINATES = + static final Pattern SIGNED_PERCENTAGE = Pattern.compile("^([-+]?\\d+\\.?\\d*?)%$"); + static final Pattern PERCENTAGE_COORDINATES = Pattern.compile("^(\\d+\\.?\\d*?)% (\\d+\\.?\\d*?)%$"); private static final Pattern PIXEL_COORDINATES = Pattern.compile("^(\\d+\\.?\\d*?)px (\\d+\\.?\\d*?)px$"); @@ -103,25 +113,25 @@ public TtmlDecoder() { } @Override - protected Subtitle decode(byte[] bytes, int length, boolean reset) + protected Subtitle decode(byte[] data, int length, boolean reset) throws SubtitleDecoderException { try { XmlPullParser xmlParser = xmlParserFactory.newPullParser(); Map globalStyles = new HashMap<>(); Map regionMap = new HashMap<>(); Map imageMap = new HashMap<>(); - regionMap.put(TtmlNode.ANONYMOUS_REGION_ID, new TtmlRegion(null)); - ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes, 0, length); + regionMap.put(TtmlNode.ANONYMOUS_REGION_ID, new TtmlRegion(TtmlNode.ANONYMOUS_REGION_ID)); + ByteArrayInputStream inputStream = new ByteArrayInputStream(data, 0, length); xmlParser.setInput(inputStream, null); - TtmlSubtitle ttmlSubtitle = null; + @Nullable TtmlSubtitle ttmlSubtitle = null; ArrayDeque nodeStack = new ArrayDeque<>(); int unsupportedNodeDepth = 0; int eventType = xmlParser.getEventType(); FrameAndTickRate frameAndTickRate = DEFAULT_FRAME_AND_TICK_RATE; CellResolution cellResolution = DEFAULT_CELL_RESOLUTION; - TtsExtent ttsExtent = null; + @Nullable TtsExtent ttsExtent = null; while (eventType != XmlPullParser.END_DOCUMENT) { - TtmlNode parent = nodeStack.peek(); + @Nullable TtmlNode parent = nodeStack.peek(); if (unsupportedNodeDepth == 0) { String name = xmlParser.getName(); if (eventType == XmlPullParser.START_TAG) { @@ -149,10 +159,12 @@ protected Subtitle decode(byte[] bytes, int length, boolean reset) } } } else if (eventType == XmlPullParser.TEXT) { - parent.addChild(TtmlNode.buildTextNode(xmlParser.getText())); + Assertions.checkNotNull(parent).addChild(TtmlNode.buildTextNode(xmlParser.getText())); } else if (eventType == XmlPullParser.END_TAG) { if (xmlParser.getName().equals(TtmlNode.TAG_TT)) { - ttmlSubtitle = new TtmlSubtitle(nodeStack.peek(), globalStyles, regionMap, imageMap); + ttmlSubtitle = + new TtmlSubtitle( + Assertions.checkNotNull(nodeStack.peek()), globalStyles, regionMap, imageMap); } nodeStack.pop(); } @@ -166,7 +178,11 @@ protected Subtitle decode(byte[] bytes, int length, boolean reset) xmlParser.next(); eventType = xmlParser.getEventType(); } - return ttmlSubtitle; + if (ttmlSubtitle != null) { + return ttmlSubtitle; + } else { + throw new SubtitleDecoderException("No TTML subtitles found"); + } } catch (XmlPullParserException xppe) { throw new SubtitleDecoderException("Unable to decode source", xppe); } catch (IOException e) { @@ -174,7 +190,7 @@ protected Subtitle decode(byte[] bytes, int length, boolean reset) } } - private FrameAndTickRate parseFrameAndTickRates(XmlPullParser xmlParser) + private static FrameAndTickRate parseFrameAndTickRates(XmlPullParser xmlParser) throws SubtitleDecoderException { int frameRate = DEFAULT_FRAME_RATE; String frameRateString = xmlParser.getAttributeValue(TTP, "frameRate"); @@ -208,8 +224,8 @@ private FrameAndTickRate parseFrameAndTickRates(XmlPullParser xmlParser) return new FrameAndTickRate(frameRate * frameRateMultiplier, subFrameRate, tickRate); } - private CellResolution parseCellResolution(XmlPullParser xmlParser, CellResolution defaultValue) - throws SubtitleDecoderException { + private static CellResolution parseCellResolution( + XmlPullParser xmlParser, CellResolution defaultValue) throws SubtitleDecoderException { String cellResolution = xmlParser.getAttributeValue(TTP, "cellResolution"); if (cellResolution == null) { return defaultValue; @@ -221,8 +237,8 @@ private CellResolution parseCellResolution(XmlPullParser xmlParser, CellResoluti return defaultValue; } try { - int columns = Integer.parseInt(cellResolutionMatcher.group(1)); - int rows = Integer.parseInt(cellResolutionMatcher.group(2)); + int columns = Integer.parseInt(Assertions.checkNotNull(cellResolutionMatcher.group(1))); + int rows = Integer.parseInt(Assertions.checkNotNull(cellResolutionMatcher.group(2))); if (columns == 0 || rows == 0) { throw new SubtitleDecoderException("Invalid cell resolution " + columns + " " + rows); } @@ -233,7 +249,9 @@ private CellResolution parseCellResolution(XmlPullParser xmlParser, CellResoluti } } - private TtsExtent parseTtsExtent(XmlPullParser xmlParser) { + @Nullable + private static TtsExtent parseTtsExtent(XmlPullParser xmlParser) { + @Nullable String ttsExtent = XmlPullParserUtil.getAttributeValue(xmlParser, TtmlNode.ATTR_TTS_EXTENT); if (ttsExtent == null) { return null; @@ -245,8 +263,8 @@ private TtsExtent parseTtsExtent(XmlPullParser xmlParser) { return null; } try { - int width = Integer.parseInt(extentMatcher.group(1)); - int height = Integer.parseInt(extentMatcher.group(2)); + int width = Integer.parseInt(Assertions.checkNotNull(extentMatcher.group(1))); + int height = Integer.parseInt(Assertions.checkNotNull(extentMatcher.group(2))); return new TtsExtent(width, height); } catch (NumberFormatException e) { Log.w(TAG, "Ignoring malformed tts extent: " + ttsExtent); @@ -254,28 +272,30 @@ private TtsExtent parseTtsExtent(XmlPullParser xmlParser) { } } - private Map parseHeader( + private static Map parseHeader( XmlPullParser xmlParser, Map globalStyles, CellResolution cellResolution, - TtsExtent ttsExtent, + @Nullable TtsExtent ttsExtent, Map globalRegions, Map imageMap) throws IOException, XmlPullParserException { do { xmlParser.next(); if (XmlPullParserUtil.isStartTag(xmlParser, TtmlNode.TAG_STYLE)) { - String parentStyleId = XmlPullParserUtil.getAttributeValue(xmlParser, ATTR_STYLE); + @Nullable String parentStyleId = XmlPullParserUtil.getAttributeValue(xmlParser, ATTR_STYLE); TtmlStyle style = parseStyleAttributes(xmlParser, new TtmlStyle()); if (parentStyleId != null) { for (String id : parseStyleIds(parentStyleId)) { style.chain(globalStyles.get(id)); } } - if (style.getId() != null) { - globalStyles.put(style.getId(), style); + String styleId = style.getId(); + if (styleId != null) { + globalStyles.put(styleId, style); } } else if (XmlPullParserUtil.isStartTag(xmlParser, TtmlNode.TAG_REGION)) { + @Nullable TtmlRegion ttmlRegion = parseRegionAttributes(xmlParser, cellResolution, ttsExtent); if (ttmlRegion != null) { globalRegions.put(ttmlRegion.id, ttmlRegion); @@ -287,12 +307,12 @@ private Map parseHeader( return globalStyles; } - private void parseMetadata(XmlPullParser xmlParser, Map imageMap) + private static void parseMetadata(XmlPullParser xmlParser, Map imageMap) throws IOException, XmlPullParserException { do { xmlParser.next(); if (XmlPullParserUtil.isStartTag(xmlParser, TtmlNode.TAG_IMAGE)) { - String id = XmlPullParserUtil.getAttributeValue(xmlParser, "id"); + @Nullable String id = XmlPullParserUtil.getAttributeValue(xmlParser, "id"); if (id != null) { String encodedBitmapData = xmlParser.nextText(); imageMap.put(id, encodedBitmapData); @@ -309,9 +329,10 @@ private void parseMetadata(XmlPullParser xmlParser, Map imageMap * fractions. In case of missing tts:extent the pixel defined regions can't be parsed, and null is * returned. */ - private TtmlRegion parseRegionAttributes( - XmlPullParser xmlParser, CellResolution cellResolution, TtsExtent ttsExtent) { - String regionId = XmlPullParserUtil.getAttributeValue(xmlParser, TtmlNode.ATTR_ID); + @Nullable + private static TtmlRegion parseRegionAttributes( + XmlPullParser xmlParser, CellResolution cellResolution, @Nullable TtsExtent ttsExtent) { + @Nullable String regionId = XmlPullParserUtil.getAttributeValue(xmlParser, TtmlNode.ATTR_ID); if (regionId == null) { return null; } @@ -319,14 +340,16 @@ private TtmlRegion parseRegionAttributes( float position; float line; + @Nullable String regionOrigin = XmlPullParserUtil.getAttributeValue(xmlParser, TtmlNode.ATTR_TTS_ORIGIN); if (regionOrigin != null) { Matcher originPercentageMatcher = PERCENTAGE_COORDINATES.matcher(regionOrigin); Matcher originPixelMatcher = PIXEL_COORDINATES.matcher(regionOrigin); if (originPercentageMatcher.matches()) { try { - position = Float.parseFloat(originPercentageMatcher.group(1)) / 100f; - line = Float.parseFloat(originPercentageMatcher.group(2)) / 100f; + position = + Float.parseFloat(Assertions.checkNotNull(originPercentageMatcher.group(1))) / 100f; + line = Float.parseFloat(Assertions.checkNotNull(originPercentageMatcher.group(2))) / 100f; } catch (NumberFormatException e) { Log.w(TAG, "Ignoring region with malformed origin: " + regionOrigin); return null; @@ -337,8 +360,8 @@ private TtmlRegion parseRegionAttributes( return null; } try { - int width = Integer.parseInt(originPixelMatcher.group(1)); - int height = Integer.parseInt(originPixelMatcher.group(2)); + int width = Integer.parseInt(Assertions.checkNotNull(originPixelMatcher.group(1))); + int height = Integer.parseInt(Assertions.checkNotNull(originPixelMatcher.group(2))); // Convert pixel values to fractions. position = width / (float) ttsExtent.width; line = height / (float) ttsExtent.height; @@ -362,14 +385,17 @@ private TtmlRegion parseRegionAttributes( float width; float height; + @Nullable String regionExtent = XmlPullParserUtil.getAttributeValue(xmlParser, TtmlNode.ATTR_TTS_EXTENT); if (regionExtent != null) { Matcher extentPercentageMatcher = PERCENTAGE_COORDINATES.matcher(regionExtent); Matcher extentPixelMatcher = PIXEL_COORDINATES.matcher(regionExtent); if (extentPercentageMatcher.matches()) { try { - width = Float.parseFloat(extentPercentageMatcher.group(1)) / 100f; - height = Float.parseFloat(extentPercentageMatcher.group(2)) / 100f; + width = + Float.parseFloat(Assertions.checkNotNull(extentPercentageMatcher.group(1))) / 100f; + height = + Float.parseFloat(Assertions.checkNotNull(extentPercentageMatcher.group(2))) / 100f; } catch (NumberFormatException e) { Log.w(TAG, "Ignoring region with malformed extent: " + regionOrigin); return null; @@ -380,8 +406,8 @@ private TtmlRegion parseRegionAttributes( return null; } try { - int extentWidth = Integer.parseInt(extentPixelMatcher.group(1)); - int extentHeight = Integer.parseInt(extentPixelMatcher.group(2)); + int extentWidth = Integer.parseInt(Assertions.checkNotNull(extentPixelMatcher.group(1))); + int extentHeight = Integer.parseInt(Assertions.checkNotNull(extentPixelMatcher.group(2))); // Convert pixel values to fractions. width = extentWidth / (float) ttsExtent.width; height = extentHeight / (float) ttsExtent.height; @@ -404,10 +430,11 @@ private TtmlRegion parseRegionAttributes( } @Cue.AnchorType int lineAnchor = Cue.ANCHOR_TYPE_START; - String displayAlign = XmlPullParserUtil.getAttributeValue(xmlParser, - TtmlNode.ATTR_TTS_DISPLAY_ALIGN); + @Nullable + String displayAlign = + XmlPullParserUtil.getAttributeValue(xmlParser, TtmlNode.ATTR_TTS_DISPLAY_ALIGN); if (displayAlign != null) { - switch (Util.toLowerInvariant(displayAlign)) { + switch (Ascii.toLowerCase(displayAlign)) { case "center": lineAnchor = Cue.ANCHOR_TYPE_MIDDLE; line += height / 2; @@ -423,6 +450,26 @@ private TtmlRegion parseRegionAttributes( } float regionTextHeight = 1.0f / cellResolution.rows; + + @Cue.VerticalType int verticalType = Cue.TYPE_UNSET; + @Nullable + String writingDirection = + XmlPullParserUtil.getAttributeValue(xmlParser, TtmlNode.ATTR_TTS_WRITING_MODE); + if (writingDirection != null) { + switch (Ascii.toLowerCase(writingDirection)) { + // TODO: Support horizontal RTL modes. + case TtmlNode.VERTICAL: + case TtmlNode.VERTICAL_LR: + verticalType = Cue.VERTICAL_TYPE_LR; + break; + case TtmlNode.VERTICAL_RL: + verticalType = Cue.VERTICAL_TYPE_RL; + break; + default: + // ignore + break; + } + } return new TtmlRegion( regionId, position, @@ -432,15 +479,17 @@ private TtmlRegion parseRegionAttributes( width, height, /* textSizeType= */ Cue.TEXT_SIZE_TYPE_FRACTIONAL_IGNORE_PADDING, - /* textSize= */ regionTextHeight); + /* textSize= */ regionTextHeight, + verticalType); } - private String[] parseStyleIds(String parentStyleIds) { + private static String[] parseStyleIds(String parentStyleIds) { parentStyleIds = parentStyleIds.trim(); return parentStyleIds.isEmpty() ? new String[0] : Util.split(parentStyleIds, "\\s+"); } - private TtmlStyle parseStyleAttributes(XmlPullParser parser, TtmlStyle style) { + private static @PolyNull TtmlStyle parseStyleAttributes( + XmlPullParser parser, @PolyNull TtmlStyle style) { int attributeCount = parser.getAttributeCount(); for (int i = 0; i < attributeCount; i++) { String attributeValue = parser.getAttributeValue(i); @@ -478,34 +527,66 @@ private TtmlStyle parseStyleAttributes(XmlPullParser parser, TtmlStyle style) { } break; case TtmlNode.ATTR_TTS_FONT_WEIGHT: - style = createIfNull(style).setBold( - TtmlNode.BOLD.equalsIgnoreCase(attributeValue)); + style = createIfNull(style).setBold(TtmlNode.BOLD.equalsIgnoreCase(attributeValue)); break; case TtmlNode.ATTR_TTS_FONT_STYLE: - style = createIfNull(style).setItalic( - TtmlNode.ITALIC.equalsIgnoreCase(attributeValue)); + style = createIfNull(style).setItalic(TtmlNode.ITALIC.equalsIgnoreCase(attributeValue)); break; case TtmlNode.ATTR_TTS_TEXT_ALIGN: - switch (Util.toLowerInvariant(attributeValue)) { - case TtmlNode.LEFT: - style = createIfNull(style).setTextAlign(Layout.Alignment.ALIGN_NORMAL); + style = createIfNull(style).setTextAlign(parseAlignment(attributeValue)); + break; + case TtmlNode.ATTR_EBUTTS_MULTI_ROW_ALIGN: + style = createIfNull(style).setMultiRowAlign(parseAlignment(attributeValue)); + break; + case TtmlNode.ATTR_TTS_TEXT_COMBINE: + switch (Ascii.toLowerCase(attributeValue)) { + case TtmlNode.COMBINE_NONE: + style = createIfNull(style).setTextCombine(false); + break; + case TtmlNode.COMBINE_ALL: + style = createIfNull(style).setTextCombine(true); + break; + default: + // ignore break; - case TtmlNode.START: - style = createIfNull(style).setTextAlign(Layout.Alignment.ALIGN_NORMAL); + } + break; + case TtmlNode.ATTR_TTS_RUBY: + switch (Ascii.toLowerCase(attributeValue)) { + case TtmlNode.RUBY_CONTAINER: + style = createIfNull(style).setRubyType(TtmlStyle.RUBY_TYPE_CONTAINER); + break; + case TtmlNode.RUBY_BASE: + case TtmlNode.RUBY_BASE_CONTAINER: + style = createIfNull(style).setRubyType(TtmlStyle.RUBY_TYPE_BASE); break; - case TtmlNode.RIGHT: - style = createIfNull(style).setTextAlign(Layout.Alignment.ALIGN_OPPOSITE); + case TtmlNode.RUBY_TEXT: + case TtmlNode.RUBY_TEXT_CONTAINER: + style = createIfNull(style).setRubyType(TtmlStyle.RUBY_TYPE_TEXT); break; - case TtmlNode.END: - style = createIfNull(style).setTextAlign(Layout.Alignment.ALIGN_OPPOSITE); + case TtmlNode.RUBY_DELIMITER: + style = createIfNull(style).setRubyType(TtmlStyle.RUBY_TYPE_DELIMITER); break; - case TtmlNode.CENTER: - style = createIfNull(style).setTextAlign(Layout.Alignment.ALIGN_CENTER); + default: + // ignore + break; + } + break; + case TtmlNode.ATTR_TTS_RUBY_POSITION: + switch (Ascii.toLowerCase(attributeValue)) { + case TtmlNode.ANNOTATION_POSITION_BEFORE: + style = createIfNull(style).setRubyPosition(TextAnnotation.POSITION_BEFORE); + break; + case TtmlNode.ANNOTATION_POSITION_AFTER: + style = createIfNull(style).setRubyPosition(TextAnnotation.POSITION_AFTER); + break; + default: + // ignore break; } break; case TtmlNode.ATTR_TTS_TEXT_DECORATION: - switch (Util.toLowerInvariant(attributeValue)) { + switch (Ascii.toLowerCase(attributeValue)) { case TtmlNode.LINETHROUGH: style = createIfNull(style).setLinethrough(true); break; @@ -520,6 +601,12 @@ private TtmlStyle parseStyleAttributes(XmlPullParser parser, TtmlStyle style) { break; } break; + case TtmlNode.ATTR_TTS_TEXT_EMPHASIS: + style = createIfNull(style).setTextEmphasis(TextEmphasis.parse(attributeValue)); + break; + case TtmlNode.ATTR_TTS_SHEAR: + style = createIfNull(style).setShearPercentage(parseShear(attributeValue)); + break; default: // ignore break; @@ -528,21 +615,40 @@ private TtmlStyle parseStyleAttributes(XmlPullParser parser, TtmlStyle style) { return style; } - private TtmlStyle createIfNull(TtmlStyle style) { + private static TtmlStyle createIfNull(@Nullable TtmlStyle style) { return style == null ? new TtmlStyle() : style; } - private TtmlNode parseNode(XmlPullParser parser, TtmlNode parent, - Map regionMap, FrameAndTickRate frameAndTickRate) + @Nullable + private static Layout.Alignment parseAlignment(String alignment) { + switch (Ascii.toLowerCase(alignment)) { + case TtmlNode.LEFT: + case TtmlNode.START: + return Layout.Alignment.ALIGN_NORMAL; + case TtmlNode.RIGHT: + case TtmlNode.END: + return Layout.Alignment.ALIGN_OPPOSITE; + case TtmlNode.CENTER: + return Layout.Alignment.ALIGN_CENTER; + default: + return null; + } + } + + private static TtmlNode parseNode( + XmlPullParser parser, + @Nullable TtmlNode parent, + Map regionMap, + FrameAndTickRate frameAndTickRate) throws SubtitleDecoderException { long duration = C.TIME_UNSET; long startTime = C.TIME_UNSET; long endTime = C.TIME_UNSET; String regionId = TtmlNode.ANONYMOUS_REGION_ID; - String imageId = null; - String[] styleIds = null; + @Nullable String imageId = null; + @Nullable String[] styleIds = null; int attributeCount = parser.getAttributeCount(); - TtmlStyle style = parseStyleAttributes(parser, null); + @Nullable TtmlStyle style = parseStyleAttributes(parser, null); for (int i = 0; i < attributeCount; i++) { String attr = parser.getAttributeName(i); String value = parser.getAttributeValue(i); @@ -599,8 +705,9 @@ private TtmlNode parseNode(XmlPullParser parser, TtmlNode parent, endTime = parent.endTimeUs; } } + return TtmlNode.buildNode( - parser.getName(), startTime, endTime, style, styleIds, regionId, imageId); + parser.getName(), startTime, endTime, style, styleIds, regionId, imageId, parent); } private static boolean isSupportedTag(String tag) { @@ -621,23 +728,25 @@ private static boolean isSupportedTag(String tag) { || tag.equals(TtmlNode.TAG_INFORMATION); } - private static void parseFontSize(String expression, TtmlStyle out) throws - SubtitleDecoderException { + private static void parseFontSize(String expression, TtmlStyle out) + throws SubtitleDecoderException { String[] expressions = Util.split(expression, "\\s+"); Matcher matcher; if (expressions.length == 1) { matcher = FONT_SIZE.matcher(expression); - } else if (expressions.length == 2){ + } else if (expressions.length == 2) { matcher = FONT_SIZE.matcher(expressions[1]); - Log.w(TAG, "Multiple values in fontSize attribute. Picking the second value for vertical font" - + " size and ignoring the first."); + Log.w( + TAG, + "Multiple values in fontSize attribute. Picking the second value for vertical font" + + " size and ignoring the first."); } else { - throw new SubtitleDecoderException("Invalid number of entries for fontSize: " - + expressions.length + "."); + throw new SubtitleDecoderException( + "Invalid number of entries for fontSize: " + expressions.length + "."); } if (matcher.matches()) { - String unit = matcher.group(3); + String unit = Assertions.checkNotNull(matcher.group(3)); switch (unit) { case "px": out.setFontSizeUnit(TtmlStyle.FONT_SIZE_UNIT_PIXEL); @@ -651,17 +760,42 @@ private static void parseFontSize(String expression, TtmlStyle out) throws default: throw new SubtitleDecoderException("Invalid unit for fontSize: '" + unit + "'."); } - out.setFontSize(Float.valueOf(matcher.group(1))); + out.setFontSize(Float.parseFloat(Assertions.checkNotNull(matcher.group(1)))); } else { throw new SubtitleDecoderException("Invalid expression for fontSize: '" + expression + "'."); } } + /** + * Returns the parsed shear percentage (between -100.0 and +100.0 inclusive), or {@link + * TtmlStyle#UNSPECIFIED_SHEAR} if parsing failed. + */ + private static float parseShear(String expression) { + Matcher matcher = SIGNED_PERCENTAGE.matcher(expression); + if (!matcher.matches()) { + Log.w(TAG, "Invalid value for shear: " + expression); + return TtmlStyle.UNSPECIFIED_SHEAR; + } + try { + String percentage = Assertions.checkNotNull(matcher.group(1)); + float value = Float.parseFloat(percentage); + // https://www.w3.org/TR/2018/REC-ttml2-20181108/#semantics-style-procedures-shear + // If the absolute value of the specified percentage is greater than 100%, then it must be + // interpreted as if 100% were specified with the appropriate sign. + value = max(-100f, value); + value = min(100f, value); + return value; + } catch (NumberFormatException e) { + Log.w(TAG, "Failed to parse shear: " + expression, e); + return TtmlStyle.UNSPECIFIED_SHEAR; + } + } + /** * Parses a time expression, returning the parsed timestamp. - *

      - * For the format of a time expression, see: - * timeExpression + * + *

      For the format of a time expression, see: timeExpression * * @param time A string that includes the time expression. * @param frameAndTickRate The effective frame and tick rates of the stream. @@ -672,29 +806,31 @@ private static long parseTimeExpression(String time, FrameAndTickRate frameAndTi throws SubtitleDecoderException { Matcher matcher = CLOCK_TIME.matcher(time); if (matcher.matches()) { - String hours = matcher.group(1); + String hours = Assertions.checkNotNull(matcher.group(1)); double durationSeconds = Long.parseLong(hours) * 3600; - String minutes = matcher.group(2); + String minutes = Assertions.checkNotNull(matcher.group(2)); durationSeconds += Long.parseLong(minutes) * 60; - String seconds = matcher.group(3); + String seconds = Assertions.checkNotNull(matcher.group(3)); durationSeconds += Long.parseLong(seconds); - String fraction = matcher.group(4); + @Nullable String fraction = matcher.group(4); durationSeconds += (fraction != null) ? Double.parseDouble(fraction) : 0; - String frames = matcher.group(5); - durationSeconds += (frames != null) - ? Long.parseLong(frames) / frameAndTickRate.effectiveFrameRate : 0; - String subframes = matcher.group(6); - durationSeconds += (subframes != null) - ? ((double) Long.parseLong(subframes)) / frameAndTickRate.subFrameRate - / frameAndTickRate.effectiveFrameRate - : 0; + @Nullable String frames = matcher.group(5); + durationSeconds += + (frames != null) ? Long.parseLong(frames) / frameAndTickRate.effectiveFrameRate : 0; + @Nullable String subframes = matcher.group(6); + durationSeconds += + (subframes != null) + ? ((double) Long.parseLong(subframes)) + / frameAndTickRate.subFrameRate + / frameAndTickRate.effectiveFrameRate + : 0; return (long) (durationSeconds * C.MICROS_PER_SECOND); } matcher = OFFSET_TIME.matcher(time); if (matcher.matches()) { - String timeValue = matcher.group(1); + String timeValue = Assertions.checkNotNull(matcher.group(1)); double offsetSeconds = Double.parseDouble(timeValue); - String unit = matcher.group(2); + String unit = Assertions.checkNotNull(matcher.group(2)); switch (unit) { case "h": offsetSeconds *= 3600; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ttml/TtmlNode.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ttml/TtmlNode.java index 3365749e1a..0677ef1e27 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ttml/TtmlNode.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ttml/TtmlNode.java @@ -28,13 +28,11 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Map.Entry; import java.util.TreeMap; import java.util.TreeSet; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; -/** - * A package internal representation of TTML node. - */ +/** A package internal representation of TTML node. */ /* package */ final class TtmlNode { public static final String TAG_TT = "tt"; @@ -64,9 +62,30 @@ public static final String ATTR_TTS_FONT_FAMILY = "fontFamily"; public static final String ATTR_TTS_FONT_WEIGHT = "fontWeight"; public static final String ATTR_TTS_COLOR = "color"; + public static final String ATTR_TTS_RUBY = "ruby"; + public static final String ATTR_TTS_RUBY_POSITION = "rubyPosition"; public static final String ATTR_TTS_TEXT_DECORATION = "textDecoration"; public static final String ATTR_TTS_TEXT_ALIGN = "textAlign"; - + public static final String ATTR_TTS_TEXT_COMBINE = "textCombine"; + public static final String ATTR_TTS_TEXT_EMPHASIS = "textEmphasis"; + public static final String ATTR_TTS_WRITING_MODE = "writingMode"; + public static final String ATTR_TTS_SHEAR = "shear"; + public static final String ATTR_EBUTTS_MULTI_ROW_ALIGN = "multiRowAlign"; + + // Values for ruby + public static final String RUBY_CONTAINER = "container"; + public static final String RUBY_BASE = "base"; + public static final String RUBY_BASE_CONTAINER = "baseContainer"; + public static final String RUBY_TEXT = "text"; + public static final String RUBY_TEXT_CONTAINER = "textContainer"; + public static final String RUBY_DELIMITER = "delimiter"; + + // Values for text annotation (i.e. ruby, text emphasis) position + public static final String ANNOTATION_POSITION_BEFORE = "before"; + public static final String ANNOTATION_POSITION_AFTER = "after"; + public static final String ANNOTATION_POSITION_OUTSIDE = "outside"; + + // Values for textDecoration public static final String LINETHROUGH = "linethrough"; public static final String NO_LINETHROUGH = "nolinethrough"; public static final String UNDERLINE = "underline"; @@ -74,12 +93,31 @@ public static final String ITALIC = "italic"; public static final String BOLD = "bold"; + // Values for textAlign public static final String LEFT = "left"; public static final String CENTER = "center"; public static final String RIGHT = "right"; public static final String START = "start"; public static final String END = "end"; + // Values for textCombine + public static final String COMBINE_NONE = "none"; + public static final String COMBINE_ALL = "all"; + + // Values for writingMode + public static final String VERTICAL = "tb"; + public static final String VERTICAL_LR = "tblr"; + public static final String VERTICAL_RL = "tbrl"; + + // Values for textEmphasis + public static final String TEXT_EMPHASIS_NONE = "none"; + public static final String TEXT_EMPHASIS_AUTO = "auto"; + public static final String TEXT_EMPHASIS_MARK_DOT = "dot"; + public static final String TEXT_EMPHASIS_MARK_SESAME = "sesame"; + public static final String TEXT_EMPHASIS_MARK_CIRCLE = "circle"; + public static final String TEXT_EMPHASIS_MARK_FILLED = "filled"; + public static final String TEXT_EMPHASIS_MARK_OPEN = "open"; + @Nullable public final String tag; @Nullable public final String text; public final boolean isTextNode; @@ -89,11 +127,12 @@ @Nullable private final String[] styleIds; public final String regionId; @Nullable public final String imageId; + @Nullable public final TtmlNode parent; private final HashMap nodeStartsByRegion; private final HashMap nodeEndsByRegion; - private List children; + private @MonotonicNonNull List children; public static TtmlNode buildTextNode(String text) { return new TtmlNode( @@ -104,7 +143,8 @@ public static TtmlNode buildTextNode(String text) { /* style= */ null, /* styleIds= */ null, ANONYMOUS_REGION_ID, - /* imageId= */ null); + /* imageId= */ null, + /* parent= */ null); } public static TtmlNode buildNode( @@ -114,9 +154,10 @@ public static TtmlNode buildNode( @Nullable TtmlStyle style, @Nullable String[] styleIds, String regionId, - @Nullable String imageId) { + @Nullable String imageId, + @Nullable TtmlNode parent) { return new TtmlNode( - tag, /* text= */ null, startTimeUs, endTimeUs, style, styleIds, regionId, imageId); + tag, /* text= */ null, startTimeUs, endTimeUs, style, styleIds, regionId, imageId, parent); } private TtmlNode( @@ -127,7 +168,8 @@ private TtmlNode( @Nullable TtmlStyle style, @Nullable String[] styleIds, String regionId, - @Nullable String imageId) { + @Nullable String imageId, + @Nullable TtmlNode parent) { this.tag = tag; this.text = text; this.imageId = imageId; @@ -137,6 +179,7 @@ private TtmlNode( this.startTimeUs = startTimeUs; this.endTimeUs = endTimeUs; this.regionId = Assertions.checkNotNull(regionId); + this.parent = parent; nodeStartsByRegion = new HashMap<>(); nodeEndsByRegion = new HashMap<>(); } @@ -196,6 +239,7 @@ private void getEventTimes(TreeSet out, boolean descendsPNode) { } } + @Nullable public String[] getStyleIds() { return styleIds; } @@ -209,15 +253,15 @@ public List getCues( List> regionImageOutputs = new ArrayList<>(); traverseForImage(timeUs, regionId, regionImageOutputs); - TreeMap regionTextOutputs = new TreeMap<>(); + TreeMap regionTextOutputs = new TreeMap<>(); traverseForText(timeUs, false, regionId, regionTextOutputs); - traverseForStyle(timeUs, globalStyles, regionTextOutputs); + traverseForStyle(timeUs, globalStyles, regionMap, regionId, regionTextOutputs); List cues = new ArrayList<>(); // Create image based cues. for (Pair regionImagePair : regionImageOutputs) { - String encodedBitmapData = imageMap.get(regionImagePair.second); + @Nullable String encodedBitmapData = imageMap.get(regionImagePair.second); if (encodedBitmapData == null) { // Image reference points to an invalid image. Do nothing. continue; @@ -225,34 +269,33 @@ public List getCues( byte[] bitmapData = Base64.decode(encodedBitmapData, Base64.DEFAULT); Bitmap bitmap = BitmapFactory.decodeByteArray(bitmapData, /* offset= */ 0, bitmapData.length); - TtmlRegion region = regionMap.get(regionImagePair.first); + TtmlRegion region = Assertions.checkNotNull(regionMap.get(regionImagePair.first)); cues.add( - new Cue( - bitmap, - region.position, - Cue.ANCHOR_TYPE_START, - region.line, - region.lineAnchor, - region.width, - region.height)); + new Cue.Builder() + .setBitmap(bitmap) + .setPosition(region.position) + .setPositionAnchor(Cue.ANCHOR_TYPE_START) + .setLine(region.line, Cue.LINE_TYPE_FRACTION) + .setLineAnchor(region.lineAnchor) + .setSize(region.width) + .setBitmapHeight(region.height) + .setVerticalType(region.verticalType) + .build()); } // Create text based cues. - for (Entry entry : regionTextOutputs.entrySet()) { - TtmlRegion region = regionMap.get(entry.getKey()); - cues.add( - new Cue( - cleanUpText(entry.getValue()), - /* textAlignment= */ null, - region.line, - region.lineType, - region.lineAnchor, - region.position, - /* positionAnchor= */ Cue.TYPE_UNSET, - region.width, - region.textSizeType, - region.textSize)); + for (Map.Entry entry : regionTextOutputs.entrySet()) { + TtmlRegion region = Assertions.checkNotNull(regionMap.get(entry.getKey())); + Cue.Builder regionOutput = entry.getValue(); + cleanUpText((SpannableStringBuilder) Assertions.checkNotNull(regionOutput.getText())); + regionOutput.setLine(region.line, region.lineType); + regionOutput.setLineAnchor(region.lineAnchor); + regionOutput.setPosition(region.position); + regionOutput.setSize(region.width); + regionOutput.setTextSize(region.textSize, region.textSizeType); + regionOutput.setVerticalType(region.verticalType); + cues.add(regionOutput.build()); } return cues; @@ -274,7 +317,7 @@ private void traverseForText( long timeUs, boolean descendsPNode, String inheritedRegion, - Map regionOutputs) { + Map regionOutputs) { nodeStartsByRegion.clear(); nodeEndsByRegion.clear(); if (TAG_METADATA.equals(tag)) { @@ -285,75 +328,115 @@ private void traverseForText( String resolvedRegionId = ANONYMOUS_REGION_ID.equals(regionId) ? inheritedRegion : regionId; if (isTextNode && descendsPNode) { - getRegionOutput(resolvedRegionId, regionOutputs).append(text); + getRegionOutputText(resolvedRegionId, regionOutputs).append(Assertions.checkNotNull(text)); } else if (TAG_BR.equals(tag) && descendsPNode) { - getRegionOutput(resolvedRegionId, regionOutputs).append('\n'); + getRegionOutputText(resolvedRegionId, regionOutputs).append('\n'); } else if (isActive(timeUs)) { // This is a container node, which can contain zero or more children. - for (Entry entry : regionOutputs.entrySet()) { - nodeStartsByRegion.put(entry.getKey(), entry.getValue().length()); + for (Map.Entry entry : regionOutputs.entrySet()) { + nodeStartsByRegion.put( + entry.getKey(), Assertions.checkNotNull(entry.getValue().getText()).length()); } boolean isPNode = TAG_P.equals(tag); for (int i = 0; i < getChildCount(); i++) { - getChild(i).traverseForText(timeUs, descendsPNode || isPNode, resolvedRegionId, - regionOutputs); + getChild(i) + .traverseForText(timeUs, descendsPNode || isPNode, resolvedRegionId, regionOutputs); } if (isPNode) { - TtmlRenderUtil.endParagraph(getRegionOutput(resolvedRegionId, regionOutputs)); + TtmlRenderUtil.endParagraph(getRegionOutputText(resolvedRegionId, regionOutputs)); } - for (Entry entry : regionOutputs.entrySet()) { - nodeEndsByRegion.put(entry.getKey(), entry.getValue().length()); + for (Map.Entry entry : regionOutputs.entrySet()) { + nodeEndsByRegion.put( + entry.getKey(), Assertions.checkNotNull(entry.getValue().getText()).length()); } } } - private static SpannableStringBuilder getRegionOutput( - String resolvedRegionId, Map regionOutputs) { + private static SpannableStringBuilder getRegionOutputText( + String resolvedRegionId, Map regionOutputs) { if (!regionOutputs.containsKey(resolvedRegionId)) { - regionOutputs.put(resolvedRegionId, new SpannableStringBuilder()); + Cue.Builder regionOutput = new Cue.Builder(); + regionOutput.setText(new SpannableStringBuilder()); + regionOutputs.put(resolvedRegionId, regionOutput); } - return regionOutputs.get(resolvedRegionId); + return (SpannableStringBuilder) + Assertions.checkNotNull(regionOutputs.get(resolvedRegionId).getText()); } private void traverseForStyle( long timeUs, Map globalStyles, - Map regionOutputs) { + Map regionMaps, + String inheritedRegion, + Map regionOutputs) { if (!isActive(timeUs)) { return; } - for (Entry entry : nodeEndsByRegion.entrySet()) { + String resolvedRegionId = ANONYMOUS_REGION_ID.equals(regionId) ? inheritedRegion : regionId; + for (Map.Entry entry : nodeEndsByRegion.entrySet()) { String regionId = entry.getKey(); int start = nodeStartsByRegion.containsKey(regionId) ? nodeStartsByRegion.get(regionId) : 0; int end = entry.getValue(); if (start != end) { - SpannableStringBuilder regionOutput = regionOutputs.get(regionId); - applyStyleToOutput(globalStyles, regionOutput, start, end); + Cue.Builder regionOutput = Assertions.checkNotNull(regionOutputs.get(regionId)); + @Cue.VerticalType + int verticalType = Assertions.checkNotNull(regionMaps.get(resolvedRegionId)).verticalType; + applyStyleToOutput(globalStyles, regionOutput, start, end, verticalType); } } for (int i = 0; i < getChildCount(); ++i) { - getChild(i).traverseForStyle(timeUs, globalStyles, regionOutputs); + getChild(i) + .traverseForStyle(timeUs, globalStyles, regionMaps, resolvedRegionId, regionOutputs); } } private void applyStyleToOutput( Map globalStyles, - SpannableStringBuilder regionOutput, + Cue.Builder regionOutput, int start, - int end) { - TtmlStyle resolvedStyle = TtmlRenderUtil.resolveStyle(style, styleIds, globalStyles); + int end, + @Cue.VerticalType int verticalType) { + @Nullable TtmlStyle resolvedStyle = TtmlRenderUtil.resolveStyle(style, styleIds, globalStyles); + @Nullable SpannableStringBuilder text = (SpannableStringBuilder) regionOutput.getText(); + if (text == null) { + text = new SpannableStringBuilder(); + regionOutput.setText(text); + } if (resolvedStyle != null) { - TtmlRenderUtil.applyStylesToSpan(regionOutput, start, end, resolvedStyle); + TtmlRenderUtil.applyStylesToSpan( + text, start, end, resolvedStyle, parent, globalStyles, verticalType); + if (TAG_P.equals(tag)) { + if (resolvedStyle.getShearPercentage() != TtmlStyle.UNSPECIFIED_SHEAR) { + // Shear style should only be applied to P nodes + // https://www.w3.org/TR/2018/REC-ttml2-20181108/#style-attribute-shear + // The spec doesn't specify the coordinate system to use for block shear + // however the spec shows examples of how different values are expected to be rendered. + // See: https://www.w3.org/TR/2018/REC-ttml2-20181108/#style-attribute-shear + // https://www.w3.org/TR/2018/REC-ttml2-20181108/#style-attribute-fontShear + // This maps the shear percentage to shear angle in graphics coordinates + regionOutput.setShearDegrees((resolvedStyle.getShearPercentage() * -90) / 100); + } + if (resolvedStyle.getTextAlign() != null) { + regionOutput.setTextAlignment(resolvedStyle.getTextAlign()); + } + if (resolvedStyle.getMultiRowAlign() != null) { + regionOutput.setMultiRowAlignment(resolvedStyle.getMultiRowAlign()); + } + } } } - private SpannableStringBuilder cleanUpText(SpannableStringBuilder builder) { + private static void cleanUpText(SpannableStringBuilder builder) { // Having joined the text elements, we need to do some final cleanup on the result. - // 1. Collapse multiple consecutive spaces into a single space. - int builderLength = builder.length(); - for (int i = 0; i < builderLength; i++) { + // Remove any text covered by a DeleteTextSpan (e.g. ruby text). + DeleteTextSpan[] deleteTextSpans = builder.getSpans(0, builder.length(), DeleteTextSpan.class); + for (DeleteTextSpan deleteTextSpan : deleteTextSpans) { + builder.replace(builder.getSpanStart(deleteTextSpan), builder.getSpanEnd(deleteTextSpan), ""); + } + // Collapse multiple consecutive spaces into a single space. + for (int i = 0; i < builder.length(); i++) { if (builder.charAt(i) == ' ') { int j = i + 1; while (j < builder.length() && builder.charAt(j) == ' ') { @@ -362,38 +445,30 @@ private SpannableStringBuilder cleanUpText(SpannableStringBuilder builder) { int spacesToDelete = j - (i + 1); if (spacesToDelete > 0) { builder.delete(i, i + spacesToDelete); - builderLength -= spacesToDelete; } } } - // 2. Remove any spaces from the start of each line. - if (builderLength > 0 && builder.charAt(0) == ' ') { + // Remove any spaces from the start of each line. + if (builder.length() > 0 && builder.charAt(0) == ' ') { builder.delete(0, 1); - builderLength--; } - for (int i = 0; i < builderLength - 1; i++) { + for (int i = 0; i < builder.length() - 1; i++) { if (builder.charAt(i) == '\n' && builder.charAt(i + 1) == ' ') { builder.delete(i + 1, i + 2); - builderLength--; } } - // 3. Remove any spaces from the end of each line. - if (builderLength > 0 && builder.charAt(builderLength - 1) == ' ') { - builder.delete(builderLength - 1, builderLength); - builderLength--; + // Remove any spaces from the end of each line. + if (builder.length() > 0 && builder.charAt(builder.length() - 1) == ' ') { + builder.delete(builder.length() - 1, builder.length()); } - for (int i = 0; i < builderLength - 1; i++) { + for (int i = 0; i < builder.length() - 1; i++) { if (builder.charAt(i) == ' ' && builder.charAt(i + 1) == '\n') { builder.delete(i, i + 1); - builderLength--; } } - // 4. Trim a trailing newline, if there is one. - if (builderLength > 0 && builder.charAt(builderLength - 1) == '\n') { - builder.delete(builderLength - 1, builderLength); - /*builderLength--;*/ + // Trim a trailing newline, if there is one. + if (builder.length() > 0 && builder.charAt(builder.length() - 1) == '\n') { + builder.delete(builder.length() - 1, builder.length()); } - return builder; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ttml/TtmlRegion.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ttml/TtmlRegion.java index 3cbc25d4b2..57b5e1ec3a 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ttml/TtmlRegion.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ttml/TtmlRegion.java @@ -17,9 +17,7 @@ import com.google.android.exoplayer2.text.Cue; -/** - * Represents a TTML Region. - */ +/** Represents a TTML Region. */ /* package */ final class TtmlRegion { public final String id; @@ -31,6 +29,7 @@ public final float height; public final @Cue.TextSizeType int textSizeType; public final float textSize; + public final @Cue.VerticalType int verticalType; public TtmlRegion(String id) { this( @@ -42,7 +41,8 @@ public TtmlRegion(String id) { /* width= */ Cue.DIMEN_UNSET, /* height= */ Cue.DIMEN_UNSET, /* textSizeType= */ Cue.TYPE_UNSET, - /* textSize= */ Cue.DIMEN_UNSET); + /* textSize= */ Cue.DIMEN_UNSET, + /* verticalType= */ Cue.TYPE_UNSET); } public TtmlRegion( @@ -54,7 +54,8 @@ public TtmlRegion( float width, float height, int textSizeType, - float textSize) { + float textSize, + @Cue.VerticalType int verticalType) { this.id = id; this.position = position; this.line = line; @@ -64,6 +65,6 @@ public TtmlRegion( this.height = height; this.textSizeType = textSizeType; this.textSize = textSize; + this.verticalType = verticalType; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ttml/TtmlRenderUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ttml/TtmlRenderUtil.java index 21333081c6..8d2724622e 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ttml/TtmlRenderUtil.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ttml/TtmlRenderUtil.java @@ -15,11 +15,12 @@ */ package com.google.android.exoplayer2.text.ttml; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; + import android.text.Spannable; import android.text.SpannableStringBuilder; import android.text.Spanned; import android.text.style.AbsoluteSizeSpan; -import android.text.style.AlignmentSpan; import android.text.style.BackgroundColorSpan; import android.text.style.ForegroundColorSpan; import android.text.style.RelativeSizeSpan; @@ -27,48 +28,70 @@ import android.text.style.StyleSpan; import android.text.style.TypefaceSpan; import android.text.style.UnderlineSpan; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.text.Cue; +import com.google.android.exoplayer2.text.span.HorizontalTextInVerticalContextSpan; +import com.google.android.exoplayer2.text.span.RubySpan; +import com.google.android.exoplayer2.text.span.SpanUtil; +import com.google.android.exoplayer2.text.span.TextAnnotation; +import com.google.android.exoplayer2.text.span.TextEmphasisSpan; +import com.google.android.exoplayer2.util.Log; +import com.google.android.exoplayer2.util.Util; +import java.util.ArrayDeque; +import java.util.Deque; import java.util.Map; -/** - * Package internal utility class to render styled TtmlNodes. - */ +/** Package internal utility class to render styled TtmlNodes. */ /* package */ final class TtmlRenderUtil { - public static TtmlStyle resolveStyle(TtmlStyle style, String[] styleIds, - Map globalStyles) { - if (style == null && styleIds == null) { - // No styles at all. - return null; - } else if (style == null && styleIds.length == 1) { - // Only one single referential style present. - return globalStyles.get(styleIds[0]); - } else if (style == null && styleIds.length > 1) { - // Only multiple referential styles present. - TtmlStyle chainedStyle = new TtmlStyle(); - for (String id : styleIds) { - chainedStyle.chain(globalStyles.get(id)); + private static final String TAG = "TtmlRenderUtil"; + + @Nullable + public static TtmlStyle resolveStyle( + @Nullable TtmlStyle style, @Nullable String[] styleIds, Map globalStyles) { + if (style == null) { + if (styleIds == null) { + // No styles at all. + return null; + } else if (styleIds.length == 1) { + // Only one single referential style present. + return globalStyles.get(styleIds[0]); + } else if (styleIds.length > 1) { + // Only multiple referential styles present. + TtmlStyle chainedStyle = new TtmlStyle(); + for (String id : styleIds) { + chainedStyle.chain(globalStyles.get(id)); + } + return chainedStyle; } - return chainedStyle; - } else if (style != null && styleIds != null && styleIds.length == 1) { - // Merge a single referential style into inline style. - return style.chain(globalStyles.get(styleIds[0])); - } else if (style != null && styleIds != null && styleIds.length > 1) { - // Merge multiple referential styles into inline style. - for (String id : styleIds) { - style.chain(globalStyles.get(id)); + } else /* style != null */ { + if (styleIds != null && styleIds.length == 1) { + // Merge a single referential style into inline style. + return style.chain(globalStyles.get(styleIds[0])); + } else if (styleIds != null && styleIds.length > 1) { + // Merge multiple referential styles into inline style. + for (String id : styleIds) { + style.chain(globalStyles.get(id)); + } + return style; } - return style; } // Only inline styles available. return style; } - public static void applyStylesToSpan(SpannableStringBuilder builder, - int start, int end, TtmlStyle style) { + public static void applyStylesToSpan( + Spannable builder, + int start, + int end, + TtmlStyle style, + @Nullable TtmlNode parent, + Map globalStyles, + @Cue.VerticalType int verticalType) { if (style.getStyle() != TtmlStyle.UNSPECIFIED) { - builder.setSpan(new StyleSpan(style.getStyle()), start, end, - Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + builder.setSpan( + new StyleSpan(style.getStyle()), start, end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); } if (style.isLinethrough()) { builder.setSpan(new StrikethroughSpan(), start, end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); @@ -77,32 +100,150 @@ public static void applyStylesToSpan(SpannableStringBuilder builder, builder.setSpan(new UnderlineSpan(), start, end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); } if (style.hasFontColor()) { - builder.setSpan(new ForegroundColorSpan(style.getFontColor()), start, end, - Spannable.SPAN_EXCLUSIVE_EXCLUSIVE); + SpanUtil.addOrReplaceSpan( + builder, + new ForegroundColorSpan(style.getFontColor()), + start, + end, + Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); } if (style.hasBackgroundColor()) { - builder.setSpan(new BackgroundColorSpan(style.getBackgroundColor()), start, end, - Spannable.SPAN_EXCLUSIVE_EXCLUSIVE); + SpanUtil.addOrReplaceSpan( + builder, + new BackgroundColorSpan(style.getBackgroundColor()), + start, + end, + Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); } if (style.getFontFamily() != null) { - builder.setSpan(new TypefaceSpan(style.getFontFamily()), start, end, + SpanUtil.addOrReplaceSpan( + builder, + new TypefaceSpan(style.getFontFamily()), + start, + end, + Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + } + if (style.getTextEmphasis() != null) { + TextEmphasis textEmphasis = checkNotNull(style.getTextEmphasis()); + @TextEmphasisSpan.MarkShape int markShape; + @TextEmphasisSpan.MarkFill int markFill; + if (textEmphasis.markShape == TextEmphasis.MARK_SHAPE_AUTO) { + // If a vertical writing mode applies, then 'auto' is equivalent to 'filled sesame'; + // otherwise, it's equivalent to 'filled circle': + // https://www.w3.org/TR/ttml2/#style-value-emphasis-style + markShape = + (verticalType == Cue.VERTICAL_TYPE_LR || verticalType == Cue.VERTICAL_TYPE_RL) + ? TextEmphasisSpan.MARK_SHAPE_SESAME + : TextEmphasisSpan.MARK_SHAPE_CIRCLE; + markFill = TextEmphasisSpan.MARK_FILL_FILLED; + } else { + markShape = textEmphasis.markShape; + markFill = textEmphasis.markFill; + } + + @TextEmphasis.Position int position; + if (textEmphasis.position == TextEmphasis.POSITION_OUTSIDE) { + // 'outside' is not supported by TextEmphasisSpan, so treat it as 'before': + // https://www.w3.org/TR/ttml2/#style-value-annotation-position + position = TextAnnotation.POSITION_BEFORE; + } else { + position = textEmphasis.position; + } + + SpanUtil.addOrReplaceSpan( + builder, + new TextEmphasisSpan(markShape, markFill, position), + start, + end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); } - if (style.getTextAlign() != null) { - builder.setSpan(new AlignmentSpan.Standard(style.getTextAlign()), start, end, + switch (style.getRubyType()) { + case TtmlStyle.RUBY_TYPE_BASE: + // look for the sibling RUBY_TEXT and add it as span between start & end. + @Nullable TtmlNode containerNode = findRubyContainerNode(parent, globalStyles); + if (containerNode == null) { + // No matching container node + break; + } + @Nullable TtmlNode textNode = findRubyTextNode(containerNode, globalStyles); + if (textNode == null) { + // no matching text node + break; + } + String rubyText; + if (textNode.getChildCount() == 1 && textNode.getChild(0).text != null) { + rubyText = Util.castNonNull(textNode.getChild(0).text); + } else { + Log.i(TAG, "Skipping rubyText node without exactly one text child."); + break; + } + + @Nullable + TtmlStyle textStyle = resolveStyle(textNode.style, textNode.getStyleIds(), globalStyles); + + // Use position from ruby text node if defined. + @TextAnnotation.Position + int rubyPosition = + textStyle != null ? textStyle.getRubyPosition() : TextAnnotation.POSITION_UNKNOWN; + + if (rubyPosition == TextAnnotation.POSITION_UNKNOWN) { + // If ruby position is not defined, use position info from container node. + @Nullable + TtmlStyle containerStyle = + resolveStyle(containerNode.style, containerNode.getStyleIds(), globalStyles); + rubyPosition = containerStyle != null ? containerStyle.getRubyPosition() : rubyPosition; + } + + builder.setSpan( + new RubySpan(rubyText, rubyPosition), start, end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + break; + case TtmlStyle.RUBY_TYPE_DELIMITER: + // TODO: Add support for this when RubySpan supports parenthetical text. For now, just + // fall through and delete the text. + case TtmlStyle.RUBY_TYPE_TEXT: + // We can't just remove the text directly from `builder` here because TtmlNode has fixed + // ideas of where every node starts and ends (nodeStartsByRegion and nodeEndsByRegion) so + // all these indices become invalid if we mutate the underlying string at this point. + // Instead we add a special span that's then handled in TtmlNode#cleanUpText. + builder.setSpan(new DeleteTextSpan(), start, end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + break; + case TtmlStyle.RUBY_TYPE_CONTAINER: + case TtmlStyle.UNSPECIFIED: + default: + // Do nothing + break; + } + if (style.getTextCombine()) { + SpanUtil.addOrReplaceSpan( + builder, + new HorizontalTextInVerticalContextSpan(), + start, + end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); } switch (style.getFontSizeUnit()) { case TtmlStyle.FONT_SIZE_UNIT_PIXEL: - builder.setSpan(new AbsoluteSizeSpan((int) style.getFontSize(), true), start, end, + SpanUtil.addOrReplaceSpan( + builder, + new AbsoluteSizeSpan((int) style.getFontSize(), true), + start, + end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); break; case TtmlStyle.FONT_SIZE_UNIT_EM: - builder.setSpan(new RelativeSizeSpan(style.getFontSize()), start, end, + SpanUtil.addOrReplaceSpan( + builder, + new RelativeSizeSpan(style.getFontSize()), + start, + end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); break; case TtmlStyle.FONT_SIZE_UNIT_PERCENT: - builder.setSpan(new RelativeSizeSpan(style.getFontSize() / 100), start, end, + SpanUtil.addOrReplaceSpan( + builder, + new RelativeSizeSpan(style.getFontSize() / 100), + start, + end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); break; case TtmlStyle.UNSPECIFIED: @@ -111,6 +252,39 @@ public static void applyStylesToSpan(SpannableStringBuilder builder, } } + @Nullable + private static TtmlNode findRubyTextNode( + TtmlNode rubyContainerNode, Map globalStyles) { + Deque childNodesStack = new ArrayDeque<>(); + childNodesStack.push(rubyContainerNode); + while (!childNodesStack.isEmpty()) { + TtmlNode childNode = childNodesStack.pop(); + @Nullable + TtmlStyle style = resolveStyle(childNode.style, childNode.getStyleIds(), globalStyles); + if (style != null && style.getRubyType() == TtmlStyle.RUBY_TYPE_TEXT) { + return childNode; + } + for (int i = childNode.getChildCount() - 1; i >= 0; i--) { + childNodesStack.push(childNode.getChild(i)); + } + } + + return null; + } + + @Nullable + private static TtmlNode findRubyContainerNode( + @Nullable TtmlNode node, Map globalStyles) { + while (node != null) { + @Nullable TtmlStyle style = resolveStyle(node.style, node.getStyleIds(), globalStyles); + if (style != null && style.getRubyType() == TtmlStyle.RUBY_TYPE_CONTAINER) { + return node; + } + node = node.parent; + } + return null; + } + /** * Called when the end of a paragraph is encountered. Adds a newline if there are one or more * non-space characters since the previous newline. @@ -147,5 +321,4 @@ public static void applyStylesToSpan(SpannableStringBuilder builder, } private TtmlRenderUtil() {} - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ttml/TtmlStyle.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ttml/TtmlStyle.java index e90b099173..a3c5d76681 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ttml/TtmlStyle.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ttml/TtmlStyle.java @@ -15,23 +15,28 @@ */ package com.google.android.exoplayer2.text.ttml; +import static java.lang.annotation.ElementType.TYPE_USE; + import android.graphics.Typeface; import android.text.Layout; import androidx.annotation.IntDef; -import com.google.android.exoplayer2.util.Assertions; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.text.span.TextAnnotation; +import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; -/** - * Style object of a TtmlNode - */ +/** Style object of a TtmlNode */ /* package */ final class TtmlStyle { public static final int UNSPECIFIED = -1; + public static final float UNSPECIFIED_SHEAR = Float.MAX_VALUE; @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef( flag = true, value = {UNSPECIFIED, STYLE_NORMAL, STYLE_BOLD, STYLE_ITALIC, STYLE_BOLD_ITALIC}) @@ -44,6 +49,7 @@ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({UNSPECIFIED, FONT_SIZE_UNIT_PIXEL, FONT_SIZE_UNIT_EM, FONT_SIZE_UNIT_PERCENT}) public @interface FontSizeUnit {} @@ -53,26 +59,43 @@ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({UNSPECIFIED, OFF, ON}) private @interface OptionalBoolean {} private static final int OFF = 0; private static final int ON = 1; - private String fontFamily; + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef({UNSPECIFIED, RUBY_TYPE_CONTAINER, RUBY_TYPE_BASE, RUBY_TYPE_TEXT, RUBY_TYPE_DELIMITER}) + public @interface RubyType {} + + public static final int RUBY_TYPE_CONTAINER = 1; + public static final int RUBY_TYPE_BASE = 2; + public static final int RUBY_TYPE_TEXT = 3; + public static final int RUBY_TYPE_DELIMITER = 4; + + @Nullable private String fontFamily; private int fontColor; private boolean hasFontColor; private int backgroundColor; private boolean hasBackgroundColor; - @OptionalBoolean private int linethrough; - @OptionalBoolean private int underline; - @OptionalBoolean private int bold; - @OptionalBoolean private int italic; - @FontSizeUnit private int fontSizeUnit; + private @OptionalBoolean int linethrough; + private @OptionalBoolean int underline; + private @OptionalBoolean int bold; + private @OptionalBoolean int italic; + private @FontSizeUnit int fontSizeUnit; private float fontSize; - private String id; - private TtmlStyle inheritableStyle; - private Layout.Alignment textAlign; + @Nullable private String id; + private @RubyType int rubyType; + private @TextAnnotation.Position int rubyPosition; + @Nullable private Layout.Alignment textAlign; + @Nullable private Layout.Alignment multiRowAlign; + private @OptionalBoolean int textCombine; + @Nullable private TextEmphasis textEmphasis; + private float shearPercentage; public TtmlStyle() { linethrough = UNSPECIFIED; @@ -80,6 +103,10 @@ public TtmlStyle() { bold = UNSPECIFIED; italic = UNSPECIFIED; fontSizeUnit = UNSPECIFIED; + rubyType = UNSPECIFIED; + rubyPosition = TextAnnotation.POSITION_UNKNOWN; + textCombine = UNSPECIFIED; + shearPercentage = UNSPECIFIED_SHEAR; } /** @@ -88,20 +115,19 @@ public TtmlStyle() { * @return {@link #UNSPECIFIED}, {@link #STYLE_NORMAL}, {@link #STYLE_BOLD}, {@link #STYLE_BOLD} * or {@link #STYLE_BOLD_ITALIC}. */ - @StyleFlags public int getStyle() { + public @StyleFlags int getStyle() { if (bold == UNSPECIFIED && italic == UNSPECIFIED) { return UNSPECIFIED; } - return (bold == ON ? STYLE_BOLD : STYLE_NORMAL) - | (italic == ON ? STYLE_ITALIC : STYLE_NORMAL); + return (bold == ON ? STYLE_BOLD : STYLE_NORMAL) | (italic == ON ? STYLE_ITALIC : STYLE_NORMAL); } public boolean isLinethrough() { return linethrough == ON; } + @CanIgnoreReturnValue public TtmlStyle setLinethrough(boolean linethrough) { - Assertions.checkState(inheritableStyle == null); this.linethrough = linethrough ? ON : OFF; return this; } @@ -110,30 +136,31 @@ public boolean isUnderline() { return underline == ON; } + @CanIgnoreReturnValue public TtmlStyle setUnderline(boolean underline) { - Assertions.checkState(inheritableStyle == null); this.underline = underline ? ON : OFF; return this; } + @CanIgnoreReturnValue public TtmlStyle setBold(boolean bold) { - Assertions.checkState(inheritableStyle == null); this.bold = bold ? ON : OFF; return this; } + @CanIgnoreReturnValue public TtmlStyle setItalic(boolean italic) { - Assertions.checkState(inheritableStyle == null); this.italic = italic ? ON : OFF; return this; } + @Nullable public String getFontFamily() { return fontFamily; } - public TtmlStyle setFontFamily(String fontFamily) { - Assertions.checkState(inheritableStyle == null); + @CanIgnoreReturnValue + public TtmlStyle setFontFamily(@Nullable String fontFamily) { this.fontFamily = fontFamily; return this; } @@ -145,8 +172,8 @@ public int getFontColor() { return fontColor; } + @CanIgnoreReturnValue public TtmlStyle setFontColor(int fontColor) { - Assertions.checkState(inheritableStyle == null); this.fontColor = fontColor; hasFontColor = true; return this; @@ -163,6 +190,7 @@ public int getBackgroundColor() { return backgroundColor; } + @CanIgnoreReturnValue public TtmlStyle setBackgroundColor(int backgroundColor) { this.backgroundColor = backgroundColor; hasBackgroundColor = true; @@ -173,28 +201,41 @@ public boolean hasBackgroundColor() { return hasBackgroundColor; } + @CanIgnoreReturnValue + public TtmlStyle setShearPercentage(float shearPercentage) { + this.shearPercentage = shearPercentage; + return this; + } + + public float getShearPercentage() { + return shearPercentage; + } + /** - * Inherits from an ancestor style. Properties like tts:backgroundColor which - * are not inheritable are not inherited as well as properties which are already set locally - * are never overridden. + * Chains this style to referential style. Local properties which are already set are never + * overridden. * - * @param ancestor the ancestor style to inherit from + * @param ancestor the referential style to inherit from */ - public TtmlStyle inherit(TtmlStyle ancestor) { - return inherit(ancestor, false); + @CanIgnoreReturnValue + public TtmlStyle chain(@Nullable TtmlStyle ancestor) { + return inherit(ancestor, true); } /** - * Chains this style to referential style. Local properties which are already set - * are never overridden. + * Inherits from an ancestor style. Properties like tts:backgroundColor which are not + * inheritable are not inherited as well as properties which are already set locally are never + * overridden. * - * @param ancestor the referential style to inherit from + * @param ancestor the ancestor style to inherit from */ - public TtmlStyle chain(TtmlStyle ancestor) { - return inherit(ancestor, true); + @CanIgnoreReturnValue + public TtmlStyle inherit(@Nullable TtmlStyle ancestor) { + return inherit(ancestor, false); } - private TtmlStyle inherit(TtmlStyle ancestor, boolean chaining) { + @CanIgnoreReturnValue + private TtmlStyle inherit(@Nullable TtmlStyle ancestor, boolean chaining) { if (ancestor != null) { if (!hasFontColor && ancestor.hasFontColor) { setFontColor(ancestor.fontColor); @@ -205,7 +246,7 @@ private TtmlStyle inherit(TtmlStyle ancestor, boolean chaining) { if (italic == UNSPECIFIED) { italic = ancestor.italic; } - if (fontFamily == null) { + if (fontFamily == null && ancestor.fontFamily != null) { fontFamily = ancestor.fontFamily; } if (linethrough == UNSPECIFIED) { @@ -214,55 +255,131 @@ private TtmlStyle inherit(TtmlStyle ancestor, boolean chaining) { if (underline == UNSPECIFIED) { underline = ancestor.underline; } - if (textAlign == null) { + if (rubyPosition == TextAnnotation.POSITION_UNKNOWN) { + rubyPosition = ancestor.rubyPosition; + } + if (textAlign == null && ancestor.textAlign != null) { textAlign = ancestor.textAlign; } + if (multiRowAlign == null && ancestor.multiRowAlign != null) { + multiRowAlign = ancestor.multiRowAlign; + } + if (textCombine == UNSPECIFIED) { + textCombine = ancestor.textCombine; + } if (fontSizeUnit == UNSPECIFIED) { fontSizeUnit = ancestor.fontSizeUnit; fontSize = ancestor.fontSize; } + if (textEmphasis == null) { + textEmphasis = ancestor.textEmphasis; + } + if (shearPercentage == UNSPECIFIED_SHEAR) { + shearPercentage = ancestor.shearPercentage; + } // attributes not inherited as of http://www.w3.org/TR/ttml1/ if (chaining && !hasBackgroundColor && ancestor.hasBackgroundColor) { setBackgroundColor(ancestor.backgroundColor); } + if (chaining && rubyType == UNSPECIFIED && ancestor.rubyType != UNSPECIFIED) { + rubyType = ancestor.rubyType; + } } return this; } - public TtmlStyle setId(String id) { + @CanIgnoreReturnValue + public TtmlStyle setId(@Nullable String id) { this.id = id; return this; } + @Nullable public String getId() { return id; } + @CanIgnoreReturnValue + public TtmlStyle setRubyType(@RubyType int rubyType) { + this.rubyType = rubyType; + return this; + } + + public @RubyType int getRubyType() { + return rubyType; + } + + @CanIgnoreReturnValue + public TtmlStyle setRubyPosition(@TextAnnotation.Position int position) { + this.rubyPosition = position; + return this; + } + + public @TextAnnotation.Position int getRubyPosition() { + return rubyPosition; + } + + @Nullable public Layout.Alignment getTextAlign() { return textAlign; } - public TtmlStyle setTextAlign(Layout.Alignment textAlign) { + @CanIgnoreReturnValue + public TtmlStyle setTextAlign(@Nullable Layout.Alignment textAlign) { this.textAlign = textAlign; return this; } + @Nullable + public Layout.Alignment getMultiRowAlign() { + return multiRowAlign; + } + + @CanIgnoreReturnValue + public TtmlStyle setMultiRowAlign(@Nullable Layout.Alignment multiRowAlign) { + this.multiRowAlign = multiRowAlign; + return this; + } + + /** Returns true if the source entity has {@code tts:textCombine=all}. */ + public boolean getTextCombine() { + return textCombine == ON; + } + + @CanIgnoreReturnValue + public TtmlStyle setTextCombine(boolean combine) { + this.textCombine = combine ? ON : OFF; + return this; + } + + @Nullable + public TextEmphasis getTextEmphasis() { + return textEmphasis; + } + + @CanIgnoreReturnValue + public TtmlStyle setTextEmphasis(@Nullable TextEmphasis textEmphasis) { + this.textEmphasis = textEmphasis; + return this; + } + + @CanIgnoreReturnValue public TtmlStyle setFontSize(float fontSize) { this.fontSize = fontSize; return this; } + @CanIgnoreReturnValue public TtmlStyle setFontSizeUnit(int fontSizeUnit) { this.fontSizeUnit = fontSizeUnit; return this; } - @FontSizeUnit public int getFontSizeUnit() { + public @FontSizeUnit int getFontSizeUnit() { return fontSizeUnit; } public float getFontSize() { return fontSize; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ttml/TtmlSubtitle.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ttml/TtmlSubtitle.java index 6a52338a94..68c11a9a25 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ttml/TtmlSubtitle.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/ttml/TtmlSubtitle.java @@ -24,9 +24,7 @@ import java.util.List; import java.util.Map; -/** - * A representation of a TTML subtitle. - */ +/** A representation of a TTML subtitle. */ /* package */ final class TtmlSubtitle implements Subtitle { private final TtmlNode root; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/tx3g/Tx3gDecoder.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/tx3g/Tx3gDecoder.java index c8f2979c58..eab76a929d 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/tx3g/Tx3gDecoder.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/tx3g/Tx3gDecoder.java @@ -15,6 +15,9 @@ */ package com.google.android.exoplayer2.text.tx3g; +import static com.google.android.exoplayer2.text.Cue.ANCHOR_TYPE_START; +import static com.google.android.exoplayer2.text.Cue.LINE_TYPE_FRACTION; + import android.graphics.Color; import android.graphics.Typeface; import android.text.SpannableStringBuilder; @@ -23,25 +26,27 @@ import android.text.style.StyleSpan; import android.text.style.TypefaceSpan; import android.text.style.UnderlineSpan; +import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.text.Cue; import com.google.android.exoplayer2.text.SimpleSubtitleDecoder; import com.google.android.exoplayer2.text.Subtitle; import com.google.android.exoplayer2.text.SubtitleDecoderException; +import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.ParsableByteArray; import com.google.android.exoplayer2.util.Util; +import com.google.common.base.Charsets; import java.nio.charset.Charset; import java.util.List; /** * A {@link SimpleSubtitleDecoder} for tx3g. - *

      - * Currently supports parsing of a single text track with embedded styles. + * + *

      Currently supports parsing of a single text track with embedded styles. */ public final class Tx3gDecoder extends SimpleSubtitleDecoder { - private static final char BOM_UTF16_BE = '\uFEFF'; - private static final char BOM_UTF16_LE = '\uFFFE'; + private static final String TAG = "Tx3gDecoder"; private static final int TYPE_STYL = 0x7374796c; private static final int TYPE_TBOX = 0x74626f78; @@ -49,7 +54,6 @@ public final class Tx3gDecoder extends SimpleSubtitleDecoder { private static final int SIZE_ATOM_HEADER = 8; private static final int SIZE_SHORT = 2; - private static final int SIZE_BOM_UTF16 = 2; private static final int SIZE_STYLE_RECORD = 12; private static final int FONT_FACE_BOLD = 0x0001; @@ -66,12 +70,12 @@ public final class Tx3gDecoder extends SimpleSubtitleDecoder { private final ParsableByteArray parsableByteArray; - private boolean customVerticalPlacement; - private int defaultFontFace; - private int defaultColorRgba; - private String defaultFontFamily; - private float defaultVerticalPlacement; - private int calculatedVideoTrackHeight; + private final boolean customVerticalPlacement; + private final int defaultFontFace; + private final int defaultColorRgba; + private final String defaultFontFamily; + private final float defaultVerticalPlacement; + private final int calculatedVideoTrackHeight; /** * Sets up a new {@link Tx3gDecoder} with default values. @@ -82,25 +86,27 @@ public Tx3gDecoder(List initializationData) { super("Tx3gDecoder"); parsableByteArray = new ParsableByteArray(); - if (initializationData != null && initializationData.size() == 1 + if (initializationData.size() == 1 && (initializationData.get(0).length == 48 || initializationData.get(0).length == 53)) { byte[] initializationBytes = initializationData.get(0); defaultFontFace = initializationBytes[24]; - defaultColorRgba = ((initializationBytes[26] & 0xFF) << 24) - | ((initializationBytes[27] & 0xFF) << 16) - | ((initializationBytes[28] & 0xFF) << 8) - | (initializationBytes[29] & 0xFF); + defaultColorRgba = + ((initializationBytes[26] & 0xFF) << 24) + | ((initializationBytes[27] & 0xFF) << 16) + | ((initializationBytes[28] & 0xFF) << 8) + | (initializationBytes[29] & 0xFF); String fontFamily = Util.fromUtf8Bytes(initializationBytes, 43, initializationBytes.length - 43); defaultFontFamily = TX3G_SERIF.equals(fontFamily) ? C.SERIF_NAME : C.SANS_SERIF_NAME; - //font size (initializationBytes[25]) is 5% of video height + // font size (initializationBytes[25]) is 5% of video height calculatedVideoTrackHeight = 20 * initializationBytes[25]; customVerticalPlacement = (initializationBytes[0] & 0x20) != 0; if (customVerticalPlacement) { - int requestedVerticalPlacement = ((initializationBytes[10] & 0xFF) << 8) - | (initializationBytes[11] & 0xFF); - defaultVerticalPlacement = (float) requestedVerticalPlacement / calculatedVideoTrackHeight; - defaultVerticalPlacement = Util.constrainValue(defaultVerticalPlacement, 0.0f, 0.95f); + int requestedVerticalPlacement = + ((initializationBytes[10] & 0xFF) << 8) | (initializationBytes[11] & 0xFF); + defaultVerticalPlacement = + Util.constrainValue( + (float) requestedVerticalPlacement / calculatedVideoTrackHeight, 0.0f, 0.95f); } else { defaultVerticalPlacement = DEFAULT_VERTICAL_PLACEMENT; } @@ -110,25 +116,24 @@ public Tx3gDecoder(List initializationData) { defaultFontFamily = DEFAULT_FONT_FAMILY; customVerticalPlacement = false; defaultVerticalPlacement = DEFAULT_VERTICAL_PLACEMENT; + calculatedVideoTrackHeight = C.LENGTH_UNSET; } } @Override - protected Subtitle decode(byte[] bytes, int length, boolean reset) + protected Subtitle decode(byte[] data, int length, boolean reset) throws SubtitleDecoderException { - parsableByteArray.reset(bytes, length); + parsableByteArray.reset(data, length); String cueTextString = readSubtitleText(parsableByteArray); if (cueTextString.isEmpty()) { return Tx3gSubtitle.EMPTY; } // Attach default styles. SpannableStringBuilder cueText = new SpannableStringBuilder(cueTextString); - attachFontFace(cueText, defaultFontFace, DEFAULT_FONT_FACE, 0, cueText.length(), - SPAN_PRIORITY_LOW); - attachColor(cueText, defaultColorRgba, DEFAULT_COLOR, 0, cueText.length(), - SPAN_PRIORITY_LOW); - attachFontFamily(cueText, defaultFontFamily, DEFAULT_FONT_FAMILY, 0, cueText.length(), - SPAN_PRIORITY_LOW); + attachFontFace( + cueText, defaultFontFace, DEFAULT_FONT_FACE, 0, cueText.length(), SPAN_PRIORITY_LOW); + attachColor(cueText, defaultColorRgba, DEFAULT_COLOR, 0, cueText.length(), SPAN_PRIORITY_LOW); + attachFontFamily(cueText, defaultFontFamily, 0, cueText.length()); float verticalPlacement = defaultVerticalPlacement; // Find and attach additional styles. while (parsableByteArray.bytesLeft() >= SIZE_ATOM_HEADER) { @@ -150,15 +155,11 @@ protected Subtitle decode(byte[] bytes, int length, boolean reset) parsableByteArray.setPosition(position + atomSize); } return new Tx3gSubtitle( - new Cue( - cueText, - /* textAlignment= */ null, - verticalPlacement, - Cue.LINE_TYPE_FRACTION, - Cue.ANCHOR_TYPE_START, - Cue.DIMEN_UNSET, - Cue.TYPE_UNSET, - Cue.DIMEN_UNSET)); + new Cue.Builder() + .setText(cueText) + .setLine(verticalPlacement, LINE_TYPE_FRACTION) + .setLineAnchor(ANCHOR_TYPE_START) + .build()); } private static String readSubtitleText(ParsableByteArray parsableByteArray) @@ -168,17 +169,15 @@ private static String readSubtitleText(ParsableByteArray parsableByteArray) if (textLength == 0) { return ""; } - if (parsableByteArray.bytesLeft() >= SIZE_BOM_UTF16) { - char firstChar = parsableByteArray.peekChar(); - if (firstChar == BOM_UTF16_BE || firstChar == BOM_UTF16_LE) { - return parsableByteArray.readString(textLength, Charset.forName(C.UTF16_NAME)); - } - } - return parsableByteArray.readString(textLength, Charset.forName(C.UTF8_NAME)); + int textStartPosition = parsableByteArray.getPosition(); + @Nullable Charset charset = parsableByteArray.readUtfCharsetFromBom(); + int bomSize = parsableByteArray.getPosition() - textStartPosition; + return parsableByteArray.readString( + textLength - bomSize, charset != null ? charset : Charsets.UTF_8); } - private void applyStyleRecord(ParsableByteArray parsableByteArray, - SpannableStringBuilder cueText) throws SubtitleDecoderException { + private void applyStyleRecord(ParsableByteArray parsableByteArray, SpannableStringBuilder cueText) + throws SubtitleDecoderException { assertTrue(parsableByteArray.bytesLeft() >= SIZE_STYLE_RECORD); int start = parsableByteArray.readUnsignedShort(); int end = parsableByteArray.readUnsignedShort(); @@ -186,12 +185,27 @@ private void applyStyleRecord(ParsableByteArray parsableByteArray, int fontFace = parsableByteArray.readUnsignedByte(); parsableByteArray.skipBytes(1); // font size int colorRgba = parsableByteArray.readInt(); + + if (end > cueText.length()) { + Log.w( + TAG, "Truncating styl end (" + end + ") to cueText.length() (" + cueText.length() + ")."); + end = cueText.length(); + } + if (start >= end) { + Log.w(TAG, "Ignoring styl with start (" + start + ") >= end (" + end + ")."); + return; + } attachFontFace(cueText, fontFace, defaultFontFace, start, end, SPAN_PRIORITY_HIGH); attachColor(cueText, colorRgba, defaultColorRgba, start, end, SPAN_PRIORITY_HIGH); } - private static void attachFontFace(SpannableStringBuilder cueText, int fontFace, - int defaultFontFace, int start, int end, int spanPriority) { + private static void attachFontFace( + SpannableStringBuilder cueText, + int fontFace, + int defaultFontFace, + int start, + int end, + int spanPriority) { if (fontFace != defaultFontFace) { final int flags = Spanned.SPAN_EXCLUSIVE_EXCLUSIVE | spanPriority; boolean isBold = (fontFace & FONT_FACE_BOLD) != 0; @@ -215,21 +229,32 @@ private static void attachFontFace(SpannableStringBuilder cueText, int fontFace, } } - private static void attachColor(SpannableStringBuilder cueText, int colorRgba, - int defaultColorRgba, int start, int end, int spanPriority) { + private static void attachColor( + SpannableStringBuilder cueText, + int colorRgba, + int defaultColorRgba, + int start, + int end, + int spanPriority) { if (colorRgba != defaultColorRgba) { int colorArgb = ((colorRgba & 0xFF) << 24) | (colorRgba >>> 8); - cueText.setSpan(new ForegroundColorSpan(colorArgb), start, end, + cueText.setSpan( + new ForegroundColorSpan(colorArgb), + start, + end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE | spanPriority); } } @SuppressWarnings("ReferenceEquality") - private static void attachFontFamily(SpannableStringBuilder cueText, String fontFamily, - String defaultFontFamily, int start, int end, int spanPriority) { - if (fontFamily != defaultFontFamily) { - cueText.setSpan(new TypefaceSpan(fontFamily), start, end, - Spanned.SPAN_EXCLUSIVE_EXCLUSIVE | spanPriority); + private static void attachFontFamily( + SpannableStringBuilder cueText, String fontFamily, int start, int end) { + if (fontFamily != Tx3gDecoder.DEFAULT_FONT_FAMILY) { + cueText.setSpan( + new TypefaceSpan(fontFamily), + start, + end, + Spanned.SPAN_EXCLUSIVE_EXCLUSIVE | Tx3gDecoder.SPAN_PRIORITY_LOW); } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/tx3g/Tx3gSubtitle.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/tx3g/Tx3gSubtitle.java index adb1190ce4..100e080056 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/tx3g/Tx3gSubtitle.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/tx3g/Tx3gSubtitle.java @@ -22,9 +22,7 @@ import java.util.Collections; import java.util.List; -/** - * A representation of a tx3g subtitle. - */ +/** A representation of a tx3g subtitle. */ /* package */ final class Tx3gSubtitle implements Subtitle { public static final Tx3gSubtitle EMPTY = new Tx3gSubtitle(); @@ -59,5 +57,4 @@ public long getEventTime(int index) { public List getCues(long timeUs) { return timeUs >= 0 ? cues : Collections.emptyList(); } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/webvtt/CssParser.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/webvtt/CssParser.java deleted file mode 100644 index 9a5ac40a05..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/webvtt/CssParser.java +++ /dev/null @@ -1,347 +0,0 @@ -/* - * Copyright (C) 2016 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.text.webvtt; - -import android.text.TextUtils; -import androidx.annotation.Nullable; -import com.google.android.exoplayer2.util.ColorParser; -import com.google.android.exoplayer2.util.ParsableByteArray; -import com.google.android.exoplayer2.util.Util; -import java.util.ArrayList; -import java.util.List; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -/** - * Provides a CSS parser for STYLE blocks in Webvtt files. Supports only a subset of the CSS - * features. - */ -/* package */ final class CssParser { - - private static final String PROPERTY_BGCOLOR = "background-color"; - private static final String PROPERTY_FONT_FAMILY = "font-family"; - private static final String PROPERTY_FONT_WEIGHT = "font-weight"; - private static final String PROPERTY_TEXT_DECORATION = "text-decoration"; - private static final String VALUE_BOLD = "bold"; - private static final String VALUE_UNDERLINE = "underline"; - private static final String RULE_START = "{"; - private static final String RULE_END = "}"; - private static final String PROPERTY_FONT_STYLE = "font-style"; - private static final String VALUE_ITALIC = "italic"; - - private static final Pattern VOICE_NAME_PATTERN = Pattern.compile("\\[voice=\"([^\"]*)\"\\]"); - - // Temporary utility data structures. - private final ParsableByteArray styleInput; - private final StringBuilder stringBuilder; - - public CssParser() { - styleInput = new ParsableByteArray(); - stringBuilder = new StringBuilder(); - } - - /** - * Takes a CSS style block and consumes up to the first empty line. Attempts to parse the contents - * of the style block and returns a list of {@link WebvttCssStyle} instances if successful. If - * parsing fails, it returns a list including only the styles which have been successfully parsed - * up to the style rule which was malformed. - * - * @param input The input from which the style block should be read. - * @return A list of {@link WebvttCssStyle}s that represents the parsed block, or a list - * containing the styles up to the parsing failure. - */ - public List parseBlock(ParsableByteArray input) { - stringBuilder.setLength(0); - int initialInputPosition = input.getPosition(); - skipStyleBlock(input); - styleInput.reset(input.data, input.getPosition()); - styleInput.setPosition(initialInputPosition); - - List styles = new ArrayList<>(); - String selector; - while ((selector = parseSelector(styleInput, stringBuilder)) != null) { - if (!RULE_START.equals(parseNextToken(styleInput, stringBuilder))) { - return styles; - } - WebvttCssStyle style = new WebvttCssStyle(); - applySelectorToStyle(style, selector); - String token = null; - boolean blockEndFound = false; - while (!blockEndFound) { - int position = styleInput.getPosition(); - token = parseNextToken(styleInput, stringBuilder); - blockEndFound = token == null || RULE_END.equals(token); - if (!blockEndFound) { - styleInput.setPosition(position); - parseStyleDeclaration(styleInput, style, stringBuilder); - } - } - // Check that the style rule ended correctly. - if (RULE_END.equals(token)) { - styles.add(style); - } - } - return styles; - } - - /** - * Returns a string containing the selector. The input is expected to have the form {@code - * ::cue(tag#id.class1.class2[voice="someone"]}, where every element is optional. - * - * @param input From which the selector is obtained. - * @return A string containing the target, empty string if the selector is universal (targets all - * cues) or null if an error was encountered. - */ - @Nullable - private static String parseSelector(ParsableByteArray input, StringBuilder stringBuilder) { - skipWhitespaceAndComments(input); - if (input.bytesLeft() < 5) { - return null; - } - String cueSelector = input.readString(5); - if (!"::cue".equals(cueSelector)) { - return null; - } - int position = input.getPosition(); - String token = parseNextToken(input, stringBuilder); - if (token == null) { - return null; - } - if (RULE_START.equals(token)) { - input.setPosition(position); - return ""; - } - String target = null; - if ("(".equals(token)) { - target = readCueTarget(input); - } - token = parseNextToken(input, stringBuilder); - if (!")".equals(token)) { - return null; - } - return target; - } - - /** - * Reads the contents of ::cue() and returns it as a string. - */ - private static String readCueTarget(ParsableByteArray input) { - int position = input.getPosition(); - int limit = input.limit(); - boolean cueTargetEndFound = false; - while (position < limit && !cueTargetEndFound) { - char c = (char) input.data[position++]; - cueTargetEndFound = c == ')'; - } - return input.readString(--position - input.getPosition()).trim(); - // --offset to return ')' to the input. - } - - private static void parseStyleDeclaration(ParsableByteArray input, WebvttCssStyle style, - StringBuilder stringBuilder) { - skipWhitespaceAndComments(input); - String property = parseIdentifier(input, stringBuilder); - if ("".equals(property)) { - return; - } - if (!":".equals(parseNextToken(input, stringBuilder))) { - return; - } - skipWhitespaceAndComments(input); - String value = parsePropertyValue(input, stringBuilder); - if (value == null || "".equals(value)) { - return; - } - int position = input.getPosition(); - String token = parseNextToken(input, stringBuilder); - if (";".equals(token)) { - // The style declaration is well formed. - } else if (RULE_END.equals(token)) { - // The style declaration is well formed and we can go on, but the closing bracket had to be - // fed back. - input.setPosition(position); - } else { - // The style declaration is not well formed. - return; - } - // At this point we have a presumably valid declaration, we need to parse it and fill the style. - if ("color".equals(property)) { - style.setFontColor(ColorParser.parseCssColor(value)); - } else if (PROPERTY_BGCOLOR.equals(property)) { - style.setBackgroundColor(ColorParser.parseCssColor(value)); - } else if (PROPERTY_TEXT_DECORATION.equals(property)) { - if (VALUE_UNDERLINE.equals(value)) { - style.setUnderline(true); - } - } else if (PROPERTY_FONT_FAMILY.equals(property)) { - style.setFontFamily(value); - } else if (PROPERTY_FONT_WEIGHT.equals(property)) { - if (VALUE_BOLD.equals(value)) { - style.setBold(true); - } - } else if (PROPERTY_FONT_STYLE.equals(property)) { - if (VALUE_ITALIC.equals(value)) { - style.setItalic(true); - } - } - // TODO: Fill remaining supported styles. - } - - // Visible for testing. - /* package */ static void skipWhitespaceAndComments(ParsableByteArray input) { - boolean skipping = true; - while (input.bytesLeft() > 0 && skipping) { - skipping = maybeSkipWhitespace(input) || maybeSkipComment(input); - } - } - - // Visible for testing. - @Nullable - /* package */ static String parseNextToken(ParsableByteArray input, StringBuilder stringBuilder) { - skipWhitespaceAndComments(input); - if (input.bytesLeft() == 0) { - return null; - } - String identifier = parseIdentifier(input, stringBuilder); - if (!"".equals(identifier)) { - return identifier; - } - // We found a delimiter. - return "" + (char) input.readUnsignedByte(); - } - - private static boolean maybeSkipWhitespace(ParsableByteArray input) { - switch(peekCharAtPosition(input, input.getPosition())) { - case '\t': - case '\r': - case '\n': - case '\f': - case ' ': - input.skipBytes(1); - return true; - default: - return false; - } - } - - // Visible for testing. - /* package */ static void skipStyleBlock(ParsableByteArray input) { - // The style block cannot contain empty lines, so we assume the input ends when a empty line - // is found. - String line; - do { - line = input.readLine(); - } while (!TextUtils.isEmpty(line)); - } - - private static char peekCharAtPosition(ParsableByteArray input, int position) { - return (char) input.data[position]; - } - - @Nullable - private static String parsePropertyValue(ParsableByteArray input, StringBuilder stringBuilder) { - StringBuilder expressionBuilder = new StringBuilder(); - String token; - int position; - boolean expressionEndFound = false; - // TODO: Add support for "Strings in quotes with spaces". - while (!expressionEndFound) { - position = input.getPosition(); - token = parseNextToken(input, stringBuilder); - if (token == null) { - // Syntax error. - return null; - } - if (RULE_END.equals(token) || ";".equals(token)) { - input.setPosition(position); - expressionEndFound = true; - } else { - expressionBuilder.append(token); - } - } - return expressionBuilder.toString(); - } - - private static boolean maybeSkipComment(ParsableByteArray input) { - int position = input.getPosition(); - int limit = input.limit(); - byte[] data = input.data; - if (position + 2 <= limit && data[position++] == '/' && data[position++] == '*') { - while (position + 1 < limit) { - char skippedChar = (char) data[position++]; - if (skippedChar == '*') { - if (((char) data[position]) == '/') { - position++; - limit = position; - } - } - } - input.skipBytes(limit - input.getPosition()); - return true; - } - return false; - } - - private static String parseIdentifier(ParsableByteArray input, StringBuilder stringBuilder) { - stringBuilder.setLength(0); - int position = input.getPosition(); - int limit = input.limit(); - boolean identifierEndFound = false; - while (position < limit && !identifierEndFound) { - char c = (char) input.data[position]; - if ((c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z') || (c >= '0' && c <= '9') || c == '#' - || c == '-' || c == '.' || c == '_') { - position++; - stringBuilder.append(c); - } else { - identifierEndFound = true; - } - } - input.skipBytes(position - input.getPosition()); - return stringBuilder.toString(); - } - - /** - * Sets the target of a {@link WebvttCssStyle} by splitting a selector of the form - * {@code ::cue(tag#id.class1.class2[voice="someone"]}, where every element is optional. - */ - private void applySelectorToStyle(WebvttCssStyle style, String selector) { - if ("".equals(selector)) { - return; // Universal selector. - } - int voiceStartIndex = selector.indexOf('['); - if (voiceStartIndex != -1) { - Matcher matcher = VOICE_NAME_PATTERN.matcher(selector.substring(voiceStartIndex)); - if (matcher.matches()) { - style.setTargetVoice(matcher.group(1)); - } - selector = selector.substring(0, voiceStartIndex); - } - String[] classDivision = Util.split(selector, "\\."); - String tagAndIdDivision = classDivision[0]; - int idPrefixIndex = tagAndIdDivision.indexOf('#'); - if (idPrefixIndex != -1) { - style.setTargetTagName(tagAndIdDivision.substring(0, idPrefixIndex)); - style.setTargetId(tagAndIdDivision.substring(idPrefixIndex + 1)); // We discard the '#'. - } else { - style.setTargetTagName(tagAndIdDivision); - } - if (classDivision.length > 1) { - style.setTargetClasses(Util.nullSafeArrayCopyOfRange(classDivision, 1, classDivision.length)); - } - } - -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/webvtt/Mp4WebvttDecoder.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/webvtt/Mp4WebvttDecoder.java index 8b255ac2bd..4ceb431d44 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/webvtt/Mp4WebvttDecoder.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/webvtt/Mp4WebvttDecoder.java @@ -15,6 +15,7 @@ */ package com.google.android.exoplayer2.text.webvtt; +import androidx.annotation.Nullable; import com.google.android.exoplayer2.text.Cue; import com.google.android.exoplayer2.text.SimpleSubtitleDecoder; import com.google.android.exoplayer2.text.Subtitle; @@ -41,20 +42,18 @@ public final class Mp4WebvttDecoder extends SimpleSubtitleDecoder { private static final int TYPE_vttc = 0x76747463; private final ParsableByteArray sampleData; - private final WebvttCue.Builder builder; public Mp4WebvttDecoder() { super("Mp4WebvttDecoder"); sampleData = new ParsableByteArray(); - builder = new WebvttCue.Builder(); } @Override - protected Subtitle decode(byte[] bytes, int length, boolean reset) + protected Subtitle decode(byte[] data, int length, boolean reset) throws SubtitleDecoderException { // Webvtt in Mp4 samples have boxes inside of them, so we have to do a traditional box parsing: // first 4 bytes size and then 4 bytes type. - sampleData.reset(bytes, length); + sampleData.reset(data, length); List resultingCueList = new ArrayList<>(); while (sampleData.bytesLeft() > 0) { if (sampleData.bytesLeft() < BOX_HEADER_SIZE) { @@ -63,7 +62,7 @@ protected Subtitle decode(byte[] bytes, int length, boolean reset) int boxSize = sampleData.readInt(); int boxType = sampleData.readInt(); if (boxType == TYPE_vttc) { - resultingCueList.add(parseVttCueBox(sampleData, builder, boxSize - BOX_HEADER_SIZE)); + resultingCueList.add(parseVttCueBox(sampleData, boxSize - BOX_HEADER_SIZE)); } else { // Peers of the VTTCueBox are still not supported and are skipped. sampleData.skipBytes(boxSize - BOX_HEADER_SIZE); @@ -72,9 +71,10 @@ protected Subtitle decode(byte[] bytes, int length, boolean reset) return new Mp4WebvttSubtitle(resultingCueList); } - private static Cue parseVttCueBox(ParsableByteArray sampleData, WebvttCue.Builder builder, - int remainingCueBoxBytes) throws SubtitleDecoderException { - builder.reset(); + private static Cue parseVttCueBox(ParsableByteArray sampleData, int remainingCueBoxBytes) + throws SubtitleDecoderException { + @Nullable Cue.Builder cueBuilder = null; + @Nullable CharSequence cueText = null; while (remainingCueBoxBytes > 0) { if (remainingCueBoxBytes < BOX_HEADER_SIZE) { throw new SubtitleDecoderException("Incomplete vtt cue box header found."); @@ -84,18 +84,24 @@ private static Cue parseVttCueBox(ParsableByteArray sampleData, WebvttCue.Builde remainingCueBoxBytes -= BOX_HEADER_SIZE; int payloadLength = boxSize - BOX_HEADER_SIZE; String boxPayload = - Util.fromUtf8Bytes(sampleData.data, sampleData.getPosition(), payloadLength); + Util.fromUtf8Bytes(sampleData.getData(), sampleData.getPosition(), payloadLength); sampleData.skipBytes(payloadLength); remainingCueBoxBytes -= payloadLength; if (boxType == TYPE_sttg) { - WebvttCueParser.parseCueSettingsList(boxPayload, builder); + cueBuilder = WebvttCueParser.parseCueSettingsList(boxPayload); } else if (boxType == TYPE_payl) { - WebvttCueParser.parseCueText(null, boxPayload.trim(), builder, Collections.emptyList()); + cueText = + WebvttCueParser.parseCueText( + /* id= */ null, boxPayload.trim(), /* styles= */ Collections.emptyList()); } else { // Other VTTCueBox children are still not supported and are ignored. } } - return builder.build(); + if (cueText == null) { + cueText = ""; + } + return cueBuilder != null + ? cueBuilder.setText(cueText).build() + : WebvttCueParser.newCueForText(cueText); } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/webvtt/Mp4WebvttSubtitle.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/webvtt/Mp4WebvttSubtitle.java index c87c88133c..00c8ecdc48 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/webvtt/Mp4WebvttSubtitle.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/webvtt/Mp4WebvttSubtitle.java @@ -22,9 +22,7 @@ import java.util.Collections; import java.util.List; -/** - * Representation of a Webvtt subtitle embedded in a MP4 container file. - */ +/** Representation of a Webvtt subtitle embedded in a MP4 container file. */ /* package */ final class Mp4WebvttSubtitle implements Subtitle { private final List cues; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/webvtt/WebvttCssParser.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/webvtt/WebvttCssParser.java new file mode 100644 index 0000000000..6fd236e98f --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/webvtt/WebvttCssParser.java @@ -0,0 +1,400 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.text.webvtt; + +import android.text.TextUtils; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.text.span.TextAnnotation; +import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.ColorParser; +import com.google.android.exoplayer2.util.Log; +import com.google.android.exoplayer2.util.ParsableByteArray; +import com.google.android.exoplayer2.util.Util; +import com.google.common.base.Ascii; +import java.util.ArrayList; +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * Provides a CSS parser for STYLE blocks in Webvtt files. Supports only a subset of the CSS + * features. + */ +/* package */ final class WebvttCssParser { + + private static final String TAG = "WebvttCssParser"; + + private static final String RULE_START = "{"; + private static final String RULE_END = "}"; + private static final String PROPERTY_COLOR = "color"; + private static final String PROPERTY_BGCOLOR = "background-color"; + private static final String PROPERTY_FONT_FAMILY = "font-family"; + private static final String PROPERTY_FONT_WEIGHT = "font-weight"; + private static final String PROPERTY_FONT_SIZE = "font-size"; + private static final String PROPERTY_RUBY_POSITION = "ruby-position"; + private static final String VALUE_OVER = "over"; + private static final String VALUE_UNDER = "under"; + private static final String PROPERTY_TEXT_COMBINE_UPRIGHT = "text-combine-upright"; + private static final String VALUE_ALL = "all"; + private static final String VALUE_DIGITS = "digits"; + private static final String PROPERTY_TEXT_DECORATION = "text-decoration"; + private static final String VALUE_BOLD = "bold"; + private static final String VALUE_UNDERLINE = "underline"; + private static final String PROPERTY_FONT_STYLE = "font-style"; + private static final String VALUE_ITALIC = "italic"; + + private static final Pattern VOICE_NAME_PATTERN = Pattern.compile("\\[voice=\"([^\"]*)\"\\]"); + private static final Pattern FONT_SIZE_PATTERN = + Pattern.compile("^((?:[0-9]*\\.)?[0-9]+)(px|em|%)$"); + + // Temporary utility data structures. + private final ParsableByteArray styleInput; + private final StringBuilder stringBuilder; + + public WebvttCssParser() { + styleInput = new ParsableByteArray(); + stringBuilder = new StringBuilder(); + } + + /** + * Takes a CSS style block and consumes up to the first empty line. Attempts to parse the contents + * of the style block and returns a list of {@link WebvttCssStyle} instances if successful. If + * parsing fails, it returns a list including only the styles which have been successfully parsed + * up to the style rule which was malformed. + * + * @param input The input from which the style block should be read. + * @return A list of {@link WebvttCssStyle}s that represents the parsed block, or a list + * containing the styles up to the parsing failure. + */ + public List parseBlock(ParsableByteArray input) { + stringBuilder.setLength(0); + int initialInputPosition = input.getPosition(); + skipStyleBlock(input); + styleInput.reset(input.getData(), input.getPosition()); + styleInput.setPosition(initialInputPosition); + + List styles = new ArrayList<>(); + String selector; + while ((selector = parseSelector(styleInput, stringBuilder)) != null) { + if (!RULE_START.equals(parseNextToken(styleInput, stringBuilder))) { + return styles; + } + WebvttCssStyle style = new WebvttCssStyle(); + applySelectorToStyle(style, selector); + String token = null; + boolean blockEndFound = false; + while (!blockEndFound) { + int position = styleInput.getPosition(); + token = parseNextToken(styleInput, stringBuilder); + blockEndFound = token == null || RULE_END.equals(token); + if (!blockEndFound) { + styleInput.setPosition(position); + parseStyleDeclaration(styleInput, style, stringBuilder); + } + } + // Check that the style rule ended correctly. + if (RULE_END.equals(token)) { + styles.add(style); + } + } + return styles; + } + + /** + * Returns a string containing the selector. The input is expected to have the form {@code + * ::cue(tag#id.class1.class2[voice="someone"]}, where every element is optional. + * + * @param input From which the selector is obtained. + * @return A string containing the target, empty string if the selector is universal (targets all + * cues) or null if an error was encountered. + */ + @Nullable + private static String parseSelector(ParsableByteArray input, StringBuilder stringBuilder) { + skipWhitespaceAndComments(input); + if (input.bytesLeft() < 5) { + return null; + } + String cueSelector = input.readString(5); + if (!"::cue".equals(cueSelector)) { + return null; + } + int position = input.getPosition(); + String token = parseNextToken(input, stringBuilder); + if (token == null) { + return null; + } + if (RULE_START.equals(token)) { + input.setPosition(position); + return ""; + } + String target = null; + if ("(".equals(token)) { + target = readCueTarget(input); + } + token = parseNextToken(input, stringBuilder); + if (!")".equals(token)) { + return null; + } + return target; + } + + /** Reads the contents of ::cue() and returns it as a string. */ + private static String readCueTarget(ParsableByteArray input) { + int position = input.getPosition(); + int limit = input.limit(); + boolean cueTargetEndFound = false; + while (position < limit && !cueTargetEndFound) { + char c = (char) input.getData()[position++]; + cueTargetEndFound = c == ')'; + } + return input.readString(--position - input.getPosition()).trim(); + // --offset to return ')' to the input. + } + + private static void parseStyleDeclaration( + ParsableByteArray input, WebvttCssStyle style, StringBuilder stringBuilder) { + skipWhitespaceAndComments(input); + String property = parseIdentifier(input, stringBuilder); + if ("".equals(property)) { + return; + } + if (!":".equals(parseNextToken(input, stringBuilder))) { + return; + } + skipWhitespaceAndComments(input); + String value = parsePropertyValue(input, stringBuilder); + if (value == null || "".equals(value)) { + return; + } + int position = input.getPosition(); + String token = parseNextToken(input, stringBuilder); + if (";".equals(token)) { + // The style declaration is well formed. + } else if (RULE_END.equals(token)) { + // The style declaration is well formed and we can go on, but the closing bracket had to be + // fed back. + input.setPosition(position); + } else { + // The style declaration is not well formed. + return; + } + // At this point we have a presumably valid declaration, we need to parse it and fill the style. + if (PROPERTY_COLOR.equals(property)) { + style.setFontColor(ColorParser.parseCssColor(value)); + } else if (PROPERTY_BGCOLOR.equals(property)) { + style.setBackgroundColor(ColorParser.parseCssColor(value)); + } else if (PROPERTY_RUBY_POSITION.equals(property)) { + if (VALUE_OVER.equals(value)) { + style.setRubyPosition(TextAnnotation.POSITION_BEFORE); + } else if (VALUE_UNDER.equals(value)) { + style.setRubyPosition(TextAnnotation.POSITION_AFTER); + } + } else if (PROPERTY_TEXT_COMBINE_UPRIGHT.equals(property)) { + style.setCombineUpright(VALUE_ALL.equals(value) || value.startsWith(VALUE_DIGITS)); + } else if (PROPERTY_TEXT_DECORATION.equals(property)) { + if (VALUE_UNDERLINE.equals(value)) { + style.setUnderline(true); + } + } else if (PROPERTY_FONT_FAMILY.equals(property)) { + style.setFontFamily(value); + } else if (PROPERTY_FONT_WEIGHT.equals(property)) { + if (VALUE_BOLD.equals(value)) { + style.setBold(true); + } + } else if (PROPERTY_FONT_STYLE.equals(property)) { + if (VALUE_ITALIC.equals(value)) { + style.setItalic(true); + } + } else if (PROPERTY_FONT_SIZE.equals(property)) { + parseFontSize(value, style); + } + // TODO: Fill remaining supported styles. + } + + // Visible for testing. + /* package */ static void skipWhitespaceAndComments(ParsableByteArray input) { + boolean skipping = true; + while (input.bytesLeft() > 0 && skipping) { + skipping = maybeSkipWhitespace(input) || maybeSkipComment(input); + } + } + + // Visible for testing. + @Nullable + /* package */ static String parseNextToken(ParsableByteArray input, StringBuilder stringBuilder) { + skipWhitespaceAndComments(input); + if (input.bytesLeft() == 0) { + return null; + } + String identifier = parseIdentifier(input, stringBuilder); + if (!"".equals(identifier)) { + return identifier; + } + // We found a delimiter. + return "" + (char) input.readUnsignedByte(); + } + + private static boolean maybeSkipWhitespace(ParsableByteArray input) { + switch (peekCharAtPosition(input, input.getPosition())) { + case '\t': + case '\r': + case '\n': + case '\f': + case ' ': + input.skipBytes(1); + return true; + default: + return false; + } + } + + // Visible for testing. + /* package */ static void skipStyleBlock(ParsableByteArray input) { + // The style block cannot contain empty lines, so we assume the input ends when a empty line + // is found. + String line; + do { + line = input.readLine(); + } while (!TextUtils.isEmpty(line)); + } + + private static char peekCharAtPosition(ParsableByteArray input, int position) { + return (char) input.getData()[position]; + } + + @Nullable + private static String parsePropertyValue(ParsableByteArray input, StringBuilder stringBuilder) { + StringBuilder expressionBuilder = new StringBuilder(); + String token; + int position; + boolean expressionEndFound = false; + // TODO: Add support for "Strings in quotes with spaces". + while (!expressionEndFound) { + position = input.getPosition(); + token = parseNextToken(input, stringBuilder); + if (token == null) { + // Syntax error. + return null; + } + if (RULE_END.equals(token) || ";".equals(token)) { + input.setPosition(position); + expressionEndFound = true; + } else { + expressionBuilder.append(token); + } + } + return expressionBuilder.toString(); + } + + private static boolean maybeSkipComment(ParsableByteArray input) { + int position = input.getPosition(); + int limit = input.limit(); + byte[] data = input.getData(); + if (position + 2 <= limit && data[position++] == '/' && data[position++] == '*') { + while (position + 1 < limit) { + char skippedChar = (char) data[position++]; + if (skippedChar == '*') { + if (((char) data[position]) == '/') { + position++; + limit = position; + } + } + } + input.skipBytes(limit - input.getPosition()); + return true; + } + return false; + } + + private static String parseIdentifier(ParsableByteArray input, StringBuilder stringBuilder) { + stringBuilder.setLength(0); + int position = input.getPosition(); + int limit = input.limit(); + boolean identifierEndFound = false; + while (position < limit && !identifierEndFound) { + char c = (char) input.getData()[position]; + if ((c >= 'A' && c <= 'Z') + || (c >= 'a' && c <= 'z') + || (c >= '0' && c <= '9') + || c == '#' + || c == '-' + || c == '.' + || c == '_') { + position++; + stringBuilder.append(c); + } else { + identifierEndFound = true; + } + } + input.skipBytes(position - input.getPosition()); + return stringBuilder.toString(); + } + + private static void parseFontSize(String fontSize, WebvttCssStyle style) { + Matcher matcher = FONT_SIZE_PATTERN.matcher(Ascii.toLowerCase(fontSize)); + if (!matcher.matches()) { + Log.w(TAG, "Invalid font-size: '" + fontSize + "'."); + return; + } + String unit = Assertions.checkNotNull(matcher.group(2)); + switch (unit) { + case "px": + style.setFontSizeUnit(WebvttCssStyle.FONT_SIZE_UNIT_PIXEL); + break; + case "em": + style.setFontSizeUnit(WebvttCssStyle.FONT_SIZE_UNIT_EM); + break; + case "%": + style.setFontSizeUnit(WebvttCssStyle.FONT_SIZE_UNIT_PERCENT); + break; + default: + // this line should never be reached because when the fontSize matches the FONT_SIZE_PATTERN + // unit must be one of: px, em, % + throw new IllegalStateException(); + } + style.setFontSize(Float.parseFloat(Assertions.checkNotNull(matcher.group(1)))); + } + + /** + * Sets the target of a {@link WebvttCssStyle} by splitting a selector of the form {@code + * ::cue(tag#id.class1.class2[voice="someone"]}, where every element is optional. + */ + private void applySelectorToStyle(WebvttCssStyle style, String selector) { + if ("".equals(selector)) { + return; // Universal selector. + } + int voiceStartIndex = selector.indexOf('['); + if (voiceStartIndex != -1) { + Matcher matcher = VOICE_NAME_PATTERN.matcher(selector.substring(voiceStartIndex)); + if (matcher.matches()) { + style.setTargetVoice(Assertions.checkNotNull(matcher.group(1))); + } + selector = selector.substring(0, voiceStartIndex); + } + String[] classDivision = Util.split(selector, "\\."); + String tagAndIdDivision = classDivision[0]; + int idPrefixIndex = tagAndIdDivision.indexOf('#'); + if (idPrefixIndex != -1) { + style.setTargetTagName(tagAndIdDivision.substring(0, idPrefixIndex)); + style.setTargetId(tagAndIdDivision.substring(idPrefixIndex + 1)); // We discard the '#'. + } else { + style.setTargetTagName(tagAndIdDivision); + } + if (classDivision.length > 1) { + style.setTargetClasses(Util.nullSafeArrayCopyOfRange(classDivision, 1, classDivision.length)); + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/webvtt/WebvttCssStyle.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/webvtt/WebvttCssStyle.java index 97c0acb1ec..2fbd266e3c 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/webvtt/WebvttCssStyle.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/webvtt/WebvttCssStyle.java @@ -15,19 +15,24 @@ */ package com.google.android.exoplayer2.text.webvtt; +import static java.lang.annotation.ElementType.TYPE_USE; + import android.graphics.Typeface; -import android.text.Layout; import android.text.TextUtils; +import androidx.annotation.ColorInt; import androidx.annotation.IntDef; import androidx.annotation.Nullable; -import com.google.android.exoplayer2.util.Util; +import com.google.android.exoplayer2.text.span.TextAnnotation; +import com.google.common.base.Ascii; +import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; import java.util.Arrays; import java.util.Collections; -import java.util.List; -import org.checkerframework.checker.nullness.qual.EnsuresNonNull; +import java.util.HashSet; +import java.util.Set; /** * Style object of a Css style block in a Webvtt file. @@ -45,6 +50,7 @@ public final class WebvttCssStyle { */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef( flag = true, value = {UNSPECIFIED, STYLE_NORMAL, STYLE_BOLD, STYLE_ITALIC, STYLE_BOLD_ITALIC}) @@ -61,6 +67,7 @@ public final class WebvttCssStyle { */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({UNSPECIFIED, FONT_SIZE_UNIT_PIXEL, FONT_SIZE_UNIT_EM, FONT_SIZE_UNIT_PERCENT}) public @interface FontSizeUnit {} @@ -70,6 +77,7 @@ public final class WebvttCssStyle { @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({UNSPECIFIED, OFF, ON}) private @interface OptionalBoolean {} @@ -79,35 +87,28 @@ public final class WebvttCssStyle { // Selector properties. private String targetId; private String targetTag; - private List targetClasses; + private Set targetClasses; private String targetVoice; // Style properties. @Nullable private String fontFamily; - private int fontColor; + @ColorInt private int fontColor; private boolean hasFontColor; private int backgroundColor; private boolean hasBackgroundColor; - @OptionalBoolean private int linethrough; - @OptionalBoolean private int underline; - @OptionalBoolean private int bold; - @OptionalBoolean private int italic; - @FontSizeUnit private int fontSizeUnit; + private @OptionalBoolean int linethrough; + private @OptionalBoolean int underline; + private @OptionalBoolean int bold; + private @OptionalBoolean int italic; + private @FontSizeUnit int fontSizeUnit; private float fontSize; - @Nullable private Layout.Alignment textAlign; + private @TextAnnotation.Position int rubyPosition; + private boolean combineUpright; - // Calling reset() is forbidden because `this` isn't initialized. This can be safely suppressed - // because reset() only assigns fields, it doesn't read any. - @SuppressWarnings("nullness:method.invocation.invalid") public WebvttCssStyle() { - reset(); - } - - @EnsuresNonNull({"targetId", "targetTag", "targetClasses", "targetVoice"}) - public void reset() { targetId = ""; targetTag = ""; - targetClasses = Collections.emptyList(); + targetClasses = Collections.emptySet(); targetVoice = ""; fontFamily = null; hasFontColor = false; @@ -117,11 +118,12 @@ public void reset() { bold = UNSPECIFIED; italic = UNSPECIFIED; fontSizeUnit = UNSPECIFIED; - textAlign = null; + rubyPosition = TextAnnotation.POSITION_UNKNOWN; + combineUpright = false; } public void setTargetId(String targetId) { - this.targetId = targetId; + this.targetId = targetId; } public void setTargetTagName(String targetTag) { @@ -129,7 +131,7 @@ public void setTargetTagName(String targetTag) { } public void setTargetClasses(String[] targetClasses) { - this.targetClasses = Arrays.asList(targetClasses); + this.targetClasses = new HashSet<>(Arrays.asList(targetClasses)); } public void setTargetVoice(String targetVoice) { @@ -139,24 +141,27 @@ public void setTargetVoice(String targetVoice) { /** * Returns a value in a score system compliant with the CSS Specificity rules. * - * @see CSS Cascading - *

      The score works as follows: - *

        - *
      • Id match adds 0x40000000 to the score. - *
      • Each class and voice match adds 4 to the score. - *
      • Tag matching adds 2 to the score. - *
      • Universal selector matching scores 1. - *
      + *

      The score works as follows: + * + *

        + *
      • Id match adds 0x40000000 to the score. + *
      • Each class and voice match adds 4 to the score. + *
      • Tag matching adds 2 to the score. + *
      • Universal selector matching scores 1. + *
      * * @param id The id of the cue if present, {@code null} otherwise. * @param tag Name of the tag, {@code null} if it refers to the entire cue. * @param classes An array containing the classes the tag belongs to. Must not be null. * @param voice Annotated voice if present, {@code null} otherwise. * @return The score of the match, zero if there is no match. + * @see CSS Cascading */ public int getSpecificityScore( - @Nullable String id, @Nullable String tag, String[] classes, @Nullable String voice) { - if (targetId.isEmpty() && targetTag.isEmpty() && targetClasses.isEmpty() + @Nullable String id, @Nullable String tag, Set classes, @Nullable String voice) { + if (targetId.isEmpty() + && targetTag.isEmpty() + && targetClasses.isEmpty() && targetVoice.isEmpty()) { // The selector is universal. It matches with the minimum score if and only if the given // element is a whole cue. @@ -166,7 +171,7 @@ public int getSpecificityScore( score = updateScoreForMatch(score, targetId, id, 0x40000000); score = updateScoreForMatch(score, targetTag, tag, 2); score = updateScoreForMatch(score, targetVoice, voice, 4); - if (score == -1 || !Arrays.asList(classes).containsAll(targetClasses)) { + if (score == -1 || !classes.containsAll(targetClasses)) { return 0; } else { score += targetClasses.size() * 4; @@ -180,18 +185,18 @@ public int getSpecificityScore( * @return {@link #UNSPECIFIED}, {@link #STYLE_NORMAL}, {@link #STYLE_BOLD}, {@link #STYLE_BOLD} * or {@link #STYLE_BOLD_ITALIC}. */ - @StyleFlags public int getStyle() { + public @StyleFlags int getStyle() { if (bold == UNSPECIFIED && italic == UNSPECIFIED) { return UNSPECIFIED; } - return (bold == ON ? STYLE_BOLD : STYLE_NORMAL) - | (italic == ON ? STYLE_ITALIC : STYLE_NORMAL); + return (bold == ON ? STYLE_BOLD : STYLE_NORMAL) | (italic == ON ? STYLE_ITALIC : STYLE_NORMAL); } public boolean isLinethrough() { return linethrough == ON; } + @CanIgnoreReturnValue public WebvttCssStyle setLinethrough(boolean linethrough) { this.linethrough = linethrough ? ON : OFF; return this; @@ -201,15 +206,19 @@ public boolean isUnderline() { return underline == ON; } + @CanIgnoreReturnValue public WebvttCssStyle setUnderline(boolean underline) { this.underline = underline ? ON : OFF; return this; } + + @CanIgnoreReturnValue public WebvttCssStyle setBold(boolean bold) { this.bold = bold ? ON : OFF; return this; } + @CanIgnoreReturnValue public WebvttCssStyle setItalic(boolean italic) { this.italic = italic ? ON : OFF; return this; @@ -220,8 +229,9 @@ public String getFontFamily() { return fontFamily; } + @CanIgnoreReturnValue public WebvttCssStyle setFontFamily(@Nullable String fontFamily) { - this.fontFamily = Util.toLowerInvariant(fontFamily); + this.fontFamily = fontFamily == null ? null : Ascii.toLowerCase(fontFamily); return this; } @@ -232,6 +242,7 @@ public int getFontColor() { return fontColor; } + @CanIgnoreReturnValue public WebvttCssStyle setFontColor(int color) { this.fontColor = color; hasFontColor = true; @@ -249,6 +260,7 @@ public int getBackgroundColor() { return backgroundColor; } + @CanIgnoreReturnValue public WebvttCssStyle setBackgroundColor(int backgroundColor) { this.backgroundColor = backgroundColor; hasBackgroundColor = true; @@ -259,27 +271,19 @@ public boolean hasBackgroundColor() { return hasBackgroundColor; } - @Nullable - public Layout.Alignment getTextAlign() { - return textAlign; - } - - public WebvttCssStyle setTextAlign(@Nullable Layout.Alignment textAlign) { - this.textAlign = textAlign; - return this; - } - + @CanIgnoreReturnValue public WebvttCssStyle setFontSize(float fontSize) { this.fontSize = fontSize; return this; } - public WebvttCssStyle setFontSizeUnit(short unit) { + @CanIgnoreReturnValue + public WebvttCssStyle setFontSizeUnit(@FontSizeUnit int unit) { this.fontSizeUnit = unit; return this; } - @FontSizeUnit public int getFontSizeUnit() { + public @FontSizeUnit int getFontSizeUnit() { return fontSizeUnit; } @@ -287,35 +291,24 @@ public float getFontSize() { return fontSize; } - public void cascadeFrom(WebvttCssStyle style) { - if (style.hasFontColor) { - setFontColor(style.fontColor); - } - if (style.bold != UNSPECIFIED) { - bold = style.bold; - } - if (style.italic != UNSPECIFIED) { - italic = style.italic; - } - if (style.fontFamily != null) { - fontFamily = style.fontFamily; - } - if (linethrough == UNSPECIFIED) { - linethrough = style.linethrough; - } - if (underline == UNSPECIFIED) { - underline = style.underline; - } - if (textAlign == null) { - textAlign = style.textAlign; - } - if (fontSizeUnit == UNSPECIFIED) { - fontSizeUnit = style.fontSizeUnit; - fontSize = style.fontSize; - } - if (style.hasBackgroundColor) { - setBackgroundColor(style.backgroundColor); - } + @CanIgnoreReturnValue + public WebvttCssStyle setRubyPosition(@TextAnnotation.Position int rubyPosition) { + this.rubyPosition = rubyPosition; + return this; + } + + public @TextAnnotation.Position int getRubyPosition() { + return rubyPosition; + } + + @CanIgnoreReturnValue + public WebvttCssStyle setCombineUpright(boolean enabled) { + this.combineUpright = enabled; + return this; + } + + public boolean getCombineUpright() { + return combineUpright; } private static int updateScoreForMatch( @@ -325,5 +318,4 @@ private static int updateScoreForMatch( } return target.equals(actual) ? currentScore + score : -1; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/webvtt/WebvttCue.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/webvtt/WebvttCue.java deleted file mode 100644 index 55e568efa1..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/webvtt/WebvttCue.java +++ /dev/null @@ -1,319 +0,0 @@ -/* - * Copyright (C) 2016 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.text.webvtt; - -import static java.lang.annotation.RetentionPolicy.SOURCE; - -import android.text.Layout.Alignment; -import androidx.annotation.IntDef; -import androidx.annotation.Nullable; -import com.google.android.exoplayer2.text.Cue; -import com.google.android.exoplayer2.util.Assertions; -import com.google.android.exoplayer2.util.Log; -import java.lang.annotation.Documented; -import java.lang.annotation.Retention; - -/** A representation of a WebVTT cue. */ -public final class WebvttCue extends Cue { - - private static final float DEFAULT_POSITION = 0.5f; - - public final long startTime; - public final long endTime; - - private WebvttCue( - long startTime, - long endTime, - CharSequence text, - @Nullable Alignment textAlignment, - float line, - @Cue.LineType int lineType, - @Cue.AnchorType int lineAnchor, - float position, - @Cue.AnchorType int positionAnchor, - float width) { - super(text, textAlignment, line, lineType, lineAnchor, position, positionAnchor, width); - this.startTime = startTime; - this.endTime = endTime; - } - - /** - * Returns whether or not this cue should be placed in the default position and rolled-up with - * the other "normal" cues. - * - * @return Whether this cue should be placed in the default position. - */ - public boolean isNormalCue() { - return (line == DIMEN_UNSET && position == DEFAULT_POSITION); - } - - /** Builder for WebVTT cues. */ - @SuppressWarnings("hiding") - public static class Builder { - - /** - * Valid values for {@link #setTextAlignment(int)}. - * - *

      We use a custom list (and not {@link Alignment} directly) in order to include both {@code - * START}/{@code LEFT} and {@code END}/{@code RIGHT}. The distinction is important for {@link - * #derivePosition(int)}. - * - *

      These correspond to the valid values for the 'align' cue setting in the WebVTT spec. - */ - @Documented - @Retention(SOURCE) - @IntDef({ - TEXT_ALIGNMENT_START, - TEXT_ALIGNMENT_CENTER, - TEXT_ALIGNMENT_END, - TEXT_ALIGNMENT_LEFT, - TEXT_ALIGNMENT_RIGHT - }) - public @interface TextAlignment {} - /** - * See WebVTT's align:start. - */ - public static final int TEXT_ALIGNMENT_START = 1; - - /** - * See WebVTT's align:center. - */ - public static final int TEXT_ALIGNMENT_CENTER = 2; - - /** - * See WebVTT's align:end. - */ - public static final int TEXT_ALIGNMENT_END = 3; - - /** - * See WebVTT's align:left. - */ - public static final int TEXT_ALIGNMENT_LEFT = 4; - - /** - * See WebVTT's align:right. - */ - public static final int TEXT_ALIGNMENT_RIGHT = 5; - - private static final String TAG = "WebvttCueBuilder"; - - private long startTime; - private long endTime; - @Nullable private CharSequence text; - @TextAlignment private int textAlignment; - private float line; - // Equivalent to WebVTT's snap-to-lines flag: - // https://www.w3.org/TR/webvtt1/#webvtt-cue-snap-to-lines-flag - @LineType private int lineType; - @AnchorType private int lineAnchor; - private float position; - @AnchorType private int positionAnchor; - private float width; - - // Initialization methods - - // Calling reset() is forbidden because `this` isn't initialized. This can be safely - // suppressed because reset() only assigns fields, it doesn't read any. - @SuppressWarnings("nullness:method.invocation.invalid") - public Builder() { - reset(); - } - - public void reset() { - startTime = 0; - endTime = 0; - text = null; - // Default: https://www.w3.org/TR/webvtt1/#webvtt-cue-text-alignment - textAlignment = TEXT_ALIGNMENT_CENTER; - line = Cue.DIMEN_UNSET; - // Defaults to NUMBER (true): https://www.w3.org/TR/webvtt1/#webvtt-cue-snap-to-lines-flag - lineType = Cue.LINE_TYPE_NUMBER; - // Default: https://www.w3.org/TR/webvtt1/#webvtt-cue-line-alignment - lineAnchor = Cue.ANCHOR_TYPE_START; - position = Cue.DIMEN_UNSET; - positionAnchor = Cue.TYPE_UNSET; - // Default: https://www.w3.org/TR/webvtt1/#webvtt-cue-size - width = 1.0f; - } - - // Construction methods. - - public WebvttCue build() { - line = computeLine(line, lineType); - - if (position == Cue.DIMEN_UNSET) { - position = derivePosition(textAlignment); - } - - if (positionAnchor == Cue.TYPE_UNSET) { - positionAnchor = derivePositionAnchor(textAlignment); - } - - width = Math.min(width, deriveMaxSize(positionAnchor, position)); - - return new WebvttCue( - startTime, - endTime, - Assertions.checkNotNull(text), - convertTextAlignment(textAlignment), - line, - lineType, - lineAnchor, - position, - positionAnchor, - width); - } - - public Builder setStartTime(long time) { - startTime = time; - return this; - } - - public Builder setEndTime(long time) { - endTime = time; - return this; - } - - public Builder setText(CharSequence text) { - this.text = text; - return this; - } - - public Builder setTextAlignment(@TextAlignment int textAlignment) { - this.textAlignment = textAlignment; - return this; - } - - public Builder setLine(float line) { - this.line = line; - return this; - } - - public Builder setLineType(@LineType int lineType) { - this.lineType = lineType; - return this; - } - - public Builder setLineAnchor(@AnchorType int lineAnchor) { - this.lineAnchor = lineAnchor; - return this; - } - - public Builder setPosition(float position) { - this.position = position; - return this; - } - - public Builder setPositionAnchor(@AnchorType int positionAnchor) { - this.positionAnchor = positionAnchor; - return this; - } - - public Builder setWidth(float width) { - this.width = width; - return this; - } - - // https://www.w3.org/TR/webvtt1/#webvtt-cue-line - private static float computeLine(float line, @LineType int lineType) { - if (line != Cue.DIMEN_UNSET - && lineType == Cue.LINE_TYPE_FRACTION - && (line < 0.0f || line > 1.0f)) { - return 1.0f; // Step 1 - } else if (line != Cue.DIMEN_UNSET) { - // Step 2: Do nothing, line is already correct. - return line; - } else if (lineType == Cue.LINE_TYPE_FRACTION) { - return 1.0f; // Step 3 - } else { - // Steps 4 - 10 (stacking multiple simultaneous cues) are handled by WebvttSubtitle#getCues - // and WebvttCue#isNormalCue. - return DIMEN_UNSET; - } - } - - // https://www.w3.org/TR/webvtt1/#webvtt-cue-position - private static float derivePosition(@TextAlignment int textAlignment) { - switch (textAlignment) { - case TEXT_ALIGNMENT_LEFT: - return 0.0f; - case TEXT_ALIGNMENT_RIGHT: - return 1.0f; - case TEXT_ALIGNMENT_START: - case TEXT_ALIGNMENT_CENTER: - case TEXT_ALIGNMENT_END: - default: - return DEFAULT_POSITION; - } - } - - // https://www.w3.org/TR/webvtt1/#webvtt-cue-position-alignment - @AnchorType - private static int derivePositionAnchor(@TextAlignment int textAlignment) { - switch (textAlignment) { - case TEXT_ALIGNMENT_LEFT: - case TEXT_ALIGNMENT_START: - return Cue.ANCHOR_TYPE_START; - case TEXT_ALIGNMENT_RIGHT: - case TEXT_ALIGNMENT_END: - return Cue.ANCHOR_TYPE_END; - case TEXT_ALIGNMENT_CENTER: - default: - return Cue.ANCHOR_TYPE_MIDDLE; - } - } - - @Nullable - private static Alignment convertTextAlignment(@TextAlignment int textAlignment) { - switch (textAlignment) { - case TEXT_ALIGNMENT_START: - case TEXT_ALIGNMENT_LEFT: - return Alignment.ALIGN_NORMAL; - case TEXT_ALIGNMENT_CENTER: - return Alignment.ALIGN_CENTER; - case TEXT_ALIGNMENT_END: - case TEXT_ALIGNMENT_RIGHT: - return Alignment.ALIGN_OPPOSITE; - default: - Log.w(TAG, "Unknown textAlignment: " + textAlignment); - return null; - } - } - - // Step 2 here: https://www.w3.org/TR/webvtt1/#processing-cue-settings - private static float deriveMaxSize(@AnchorType int positionAnchor, float position) { - switch (positionAnchor) { - case Cue.ANCHOR_TYPE_START: - return 1.0f - position; - case Cue.ANCHOR_TYPE_END: - return position; - case Cue.ANCHOR_TYPE_MIDDLE: - if (position <= 0.5f) { - return position * 2; - } else { - return (1.0f - position) * 2; - } - case Cue.TYPE_UNSET: - default: - throw new IllegalStateException(String.valueOf(positionAnchor)); - } - } - } -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/webvtt/WebvttCueInfo.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/webvtt/WebvttCueInfo.java new file mode 100644 index 0000000000..5c8ce58d33 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/webvtt/WebvttCueInfo.java @@ -0,0 +1,32 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.text.webvtt; + +import com.google.android.exoplayer2.text.Cue; + +/** A representation of a WebVTT cue. */ +public final class WebvttCueInfo { + + public final Cue cue; + public final long startTimeUs; + public final long endTimeUs; + + public WebvttCueInfo(Cue cue, long startTimeUs, long endTimeUs) { + this.cue = cue; + this.startTimeUs = startTimeUs; + this.endTimeUs = endTimeUs; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/webvtt/WebvttCueParser.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/webvtt/WebvttCueParser.java index b6ddf89dc3..0a726cff8a 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/webvtt/WebvttCueParser.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/webvtt/WebvttCueParser.java @@ -15,14 +15,18 @@ */ package com.google.android.exoplayer2.text.webvtt; +import static java.lang.Math.min; +import static java.lang.annotation.ElementType.TYPE_USE; +import static java.lang.annotation.RetentionPolicy.SOURCE; + +import android.graphics.Color; import android.graphics.Typeface; import android.text.Layout; -import android.text.Spannable; import android.text.SpannableStringBuilder; import android.text.Spanned; +import android.text.SpannedString; import android.text.TextUtils; import android.text.style.AbsoluteSizeSpan; -import android.text.style.AlignmentSpan; import android.text.style.BackgroundColorSpan; import android.text.style.ForegroundColorSpan; import android.text.style.RelativeSizeSpan; @@ -30,26 +34,88 @@ import android.text.style.StyleSpan; import android.text.style.TypefaceSpan; import android.text.style.UnderlineSpan; -import androidx.annotation.NonNull; +import androidx.annotation.IntDef; import androidx.annotation.Nullable; import com.google.android.exoplayer2.text.Cue; +import com.google.android.exoplayer2.text.span.HorizontalTextInVerticalContextSpan; +import com.google.android.exoplayer2.text.span.RubySpan; +import com.google.android.exoplayer2.text.span.SpanUtil; +import com.google.android.exoplayer2.text.span.TextAnnotation; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.ParsableByteArray; import com.google.android.exoplayer2.util.Util; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.Target; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; +import java.util.Map; +import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; /** Parser for WebVTT cues. (https://w3c.github.io/webvtt/#cues) */ public final class WebvttCueParser { - public static final Pattern CUE_HEADER_PATTERN = Pattern - .compile("^(\\S+)\\s+-->\\s+(\\S+)(.*)?$"); + /** + * Valid values for {@link WebvttCueInfoBuilder#textAlignment}. + * + *

      We use a custom list (and not {@link Layout.Alignment} directly) in order to include both + * {@code START}/{@code LEFT} and {@code END}/{@code RIGHT}. The distinction is important for + * {@link WebvttCueInfoBuilder#derivePosition(int)}. + * + *

      These correspond to the valid values for the 'align' cue setting in the WebVTT spec. + */ + @Documented + @Retention(SOURCE) + @Target(TYPE_USE) + @IntDef({ + TEXT_ALIGNMENT_START, + TEXT_ALIGNMENT_CENTER, + TEXT_ALIGNMENT_END, + TEXT_ALIGNMENT_LEFT, + TEXT_ALIGNMENT_RIGHT + }) + private @interface TextAlignment {} + + /** + * See WebVTT's align:start. + */ + private static final int TEXT_ALIGNMENT_START = 1; + + /** + * See WebVTT's align:center. + */ + private static final int TEXT_ALIGNMENT_CENTER = 2; + /** + * See WebVTT's align:end. + */ + private static final int TEXT_ALIGNMENT_END = 3; + + /** + * See WebVTT's align:left. + */ + private static final int TEXT_ALIGNMENT_LEFT = 4; + + /** + * See WebVTT's align:right. + */ + private static final int TEXT_ALIGNMENT_RIGHT = 5; + + public static final Pattern CUE_HEADER_PATTERN = + Pattern.compile("^(\\S+)\\s+-->\\s+(\\S+)(.*)?$"); private static final Pattern CUE_SETTING_PATTERN = Pattern.compile("(\\S+?):(\\S+)"); private static final char CHAR_LESS_THAN = '<'; @@ -65,101 +131,123 @@ public final class WebvttCueParser { private static final String ENTITY_NON_BREAK_SPACE = "nbsp"; private static final String TAG_BOLD = "b"; + private static final String TAG_CLASS = "c"; private static final String TAG_ITALIC = "i"; + private static final String TAG_LANG = "lang"; + private static final String TAG_RUBY = "ruby"; + private static final String TAG_RUBY_TEXT = "rt"; private static final String TAG_UNDERLINE = "u"; - private static final String TAG_CLASS = "c"; private static final String TAG_VOICE = "v"; - private static final String TAG_LANG = "lang"; private static final int STYLE_BOLD = Typeface.BOLD; private static final int STYLE_ITALIC = Typeface.ITALIC; + /* package */ static final float DEFAULT_POSITION = 0.5f; + private static final String TAG = "WebvttCueParser"; - private final StringBuilder textBuilder; + /** + * See WebVTT's default text + * colors. + */ + private static final Map DEFAULT_TEXT_COLORS; + + static { + Map defaultColors = new HashMap<>(); + defaultColors.put("white", Color.rgb(255, 255, 255)); + defaultColors.put("lime", Color.rgb(0, 255, 0)); + defaultColors.put("cyan", Color.rgb(0, 255, 255)); + defaultColors.put("red", Color.rgb(255, 0, 0)); + defaultColors.put("yellow", Color.rgb(255, 255, 0)); + defaultColors.put("magenta", Color.rgb(255, 0, 255)); + defaultColors.put("blue", Color.rgb(0, 0, 255)); + defaultColors.put("black", Color.rgb(0, 0, 0)); + DEFAULT_TEXT_COLORS = Collections.unmodifiableMap(defaultColors); + } - public WebvttCueParser() { - textBuilder = new StringBuilder(); + /** + * See WebVTT's default text + * background colors. + */ + private static final Map DEFAULT_BACKGROUND_COLORS; + + static { + Map defaultBackgroundColors = new HashMap<>(); + defaultBackgroundColors.put("bg_white", Color.rgb(255, 255, 255)); + defaultBackgroundColors.put("bg_lime", Color.rgb(0, 255, 0)); + defaultBackgroundColors.put("bg_cyan", Color.rgb(0, 255, 255)); + defaultBackgroundColors.put("bg_red", Color.rgb(255, 0, 0)); + defaultBackgroundColors.put("bg_yellow", Color.rgb(255, 255, 0)); + defaultBackgroundColors.put("bg_magenta", Color.rgb(255, 0, 255)); + defaultBackgroundColors.put("bg_blue", Color.rgb(0, 0, 255)); + defaultBackgroundColors.put("bg_black", Color.rgb(0, 0, 0)); + DEFAULT_BACKGROUND_COLORS = Collections.unmodifiableMap(defaultBackgroundColors); } /** * Parses the next valid WebVTT cue in a parsable array, including timestamps, settings and text. * * @param webvttData Parsable WebVTT file data. - * @param builder Builder for WebVTT Cues (output parameter). * @param styles List of styles defined by the CSS style blocks preceding the cues. - * @return Whether a valid Cue was found. + * @return The parsed cue info, or null if no valid cue was found. */ - public boolean parseCue( - ParsableByteArray webvttData, WebvttCue.Builder builder, List styles) { + @Nullable + public static WebvttCueInfo parseCue(ParsableByteArray webvttData, List styles) { @Nullable String firstLine = webvttData.readLine(); if (firstLine == null) { - return false; + return null; } Matcher cueHeaderMatcher = WebvttCueParser.CUE_HEADER_PATTERN.matcher(firstLine); if (cueHeaderMatcher.matches()) { // We have found the timestamps in the first line. No id present. - return parseCue(null, cueHeaderMatcher, webvttData, builder, textBuilder, styles); + return parseCue(null, cueHeaderMatcher, webvttData, styles); } // The first line is not the timestamps, but could be the cue id. @Nullable String secondLine = webvttData.readLine(); if (secondLine == null) { - return false; + return null; } cueHeaderMatcher = WebvttCueParser.CUE_HEADER_PATTERN.matcher(secondLine); if (cueHeaderMatcher.matches()) { // We can do the rest of the parsing, including the id. - return parseCue(firstLine.trim(), cueHeaderMatcher, webvttData, builder, textBuilder, - styles); + return parseCue(firstLine.trim(), cueHeaderMatcher, webvttData, styles); } - return false; + return null; } /** * Parses a string containing a list of cue settings. * * @param cueSettingsList String containing the settings for a given cue. - * @param builder The {@link WebvttCue.Builder} where incremental construction takes place. + * @return The cue settings parsed into a {@link Cue.Builder}. */ - /* package */ static void parseCueSettingsList(String cueSettingsList, - WebvttCue.Builder builder) { - // Parse the cue settings list. - Matcher cueSettingMatcher = CUE_SETTING_PATTERN.matcher(cueSettingsList); - while (cueSettingMatcher.find()) { - String name = cueSettingMatcher.group(1); - String value = cueSettingMatcher.group(2); - try { - if ("line".equals(name)) { - parseLineAttribute(value, builder); - } else if ("align".equals(name)) { - builder.setTextAlignment(parseTextAlignment(value)); - } else if ("position".equals(name)) { - parsePositionAttribute(value, builder); - } else if ("size".equals(name)) { - builder.setWidth(WebvttParserUtil.parsePercentage(value)); - } else { - Log.w(TAG, "Unknown cue setting " + name + ":" + value); - } - } catch (NumberFormatException e) { - Log.w(TAG, "Skipping bad cue setting: " + cueSettingMatcher.group()); - } - } + /* package */ static Cue.Builder parseCueSettingsList(String cueSettingsList) { + WebvttCueInfoBuilder builder = new WebvttCueInfoBuilder(); + parseCueSettingsList(cueSettingsList, builder); + return builder.toCueBuilder(); + } + + /** Create a new {@link Cue} containing {@code text} and with WebVTT default values. */ + /* package */ static Cue newCueForText(CharSequence text) { + WebvttCueInfoBuilder infoBuilder = new WebvttCueInfoBuilder(); + infoBuilder.text = text; + return infoBuilder.toCueBuilder().build(); } /** - * Parses the text payload of a WebVTT Cue and applies modifications on {@link WebvttCue.Builder}. + * Parses the text payload of a WebVTT Cue and returns it as a styled {@link SpannedString}. * - * @param id Id of the cue, {@code null} if it is not present. + * @param id ID of the cue, {@code null} if it is not present. * @param markup The markup text to be parsed. * @param styles List of styles defined by the CSS style blocks preceding the cues. - * @param builder Output builder. + * @return The styled cue text. */ - /* package */ static void parseCueText( - @Nullable String id, String markup, WebvttCue.Builder builder, List styles) { + /* package */ static SpannedString parseCueText( + @Nullable String id, String markup, List styles) { SpannableStringBuilder spannedText = new SpannableStringBuilder(); ArrayDeque startTagStack = new ArrayDeque<>(); - List scratchStyleMatches = new ArrayList<>(); int pos = 0; + List nestedElements = new ArrayList<>(); while (pos < markup.length()) { char curr = markup.charAt(pos); switch (curr) { @@ -172,8 +260,8 @@ public boolean parseCue( boolean isClosingTag = markup.charAt(ltPos + 1) == CHAR_SLASH; pos = findEndOfTag(markup, ltPos + 1); boolean isVoidTag = markup.charAt(pos - 2) == CHAR_SLASH; - String fullTagExpression = markup.substring(ltPos + (isClosingTag ? 2 : 1), - isVoidTag ? pos - 2 : pos - 1); + String fullTagExpression = + markup.substring(ltPos + (isClosingTag ? 2 : 1), isVoidTag ? pos - 2 : pos - 1); if (fullTagExpression.trim().isEmpty()) { continue; } @@ -188,8 +276,13 @@ public boolean parseCue( break; } startTag = startTagStack.pop(); - applySpansForTag(id, startTag, spannedText, styles, scratchStyleMatches); - } while(!startTag.name.equals(tagName)); + applySpansForTag(id, startTag, nestedElements, spannedText, styles); + if (!startTagStack.isEmpty()) { + nestedElements.add(new Element(startTag, spannedText.length())); + } else { + nestedElements.clear(); + } + } while (!startTag.name.equals(tagName)); } else if (!isVoidTag) { startTagStack.push(StartTag.buildStartTag(fullTagExpression, spannedText.length())); } @@ -197,9 +290,12 @@ public boolean parseCue( case CHAR_AMPERSAND: int semiColonEndIndex = markup.indexOf(CHAR_SEMI_COLON, pos + 1); int spaceEndIndex = markup.indexOf(CHAR_SPACE, pos + 1); - int entityEndIndex = semiColonEndIndex == -1 ? spaceEndIndex - : (spaceEndIndex == -1 ? semiColonEndIndex - : Math.min(semiColonEndIndex, spaceEndIndex)); + int entityEndIndex = + semiColonEndIndex == -1 + ? spaceEndIndex + : (spaceEndIndex == -1 + ? semiColonEndIndex + : min(semiColonEndIndex, spaceEndIndex)); if (entityEndIndex != -1) { applyEntity(markup.substring(pos + 1, entityEndIndex), spannedText); if (entityEndIndex == spaceEndIndex) { @@ -219,33 +315,41 @@ public boolean parseCue( } // apply unclosed tags while (!startTagStack.isEmpty()) { - applySpansForTag(id, startTagStack.pop(), spannedText, styles, scratchStyleMatches); - } - applySpansForTag(id, StartTag.buildWholeCueVirtualTag(), spannedText, styles, - scratchStyleMatches); - builder.setText(spannedText); + applySpansForTag(id, startTagStack.pop(), nestedElements, spannedText, styles); + } + applySpansForTag( + id, + StartTag.buildWholeCueVirtualTag(), + /* nestedElements= */ Collections.emptyList(), + spannedText, + styles); + return SpannedString.valueOf(spannedText); } - private static boolean parseCue( + // Internal methods + + @Nullable + private static WebvttCueInfo parseCue( @Nullable String id, Matcher cueHeaderMatcher, ParsableByteArray webvttData, - WebvttCue.Builder builder, - StringBuilder textBuilder, List styles) { + WebvttCueInfoBuilder builder = new WebvttCueInfoBuilder(); try { // Parse the cue start and end times. - builder.setStartTime(WebvttParserUtil.parseTimestampUs(cueHeaderMatcher.group(1))) - .setEndTime(WebvttParserUtil.parseTimestampUs(cueHeaderMatcher.group(2))); + builder.startTimeUs = + WebvttParserUtil.parseTimestampUs(Assertions.checkNotNull(cueHeaderMatcher.group(1))); + builder.endTimeUs = + WebvttParserUtil.parseTimestampUs(Assertions.checkNotNull(cueHeaderMatcher.group(2))); } catch (NumberFormatException e) { Log.w(TAG, "Skipping cue with bad header: " + cueHeaderMatcher.group()); - return false; + return null; } - parseCueSettingsList(cueHeaderMatcher.group(3), builder); + parseCueSettingsList(Assertions.checkNotNull(cueHeaderMatcher.group(3)), builder); // Parse the cue text. - textBuilder.setLength(0); + StringBuilder textBuilder = new StringBuilder(); for (String line = webvttData.readLine(); !TextUtils.isEmpty(line); line = webvttData.readLine()) { @@ -254,48 +358,85 @@ private static boolean parseCue( } textBuilder.append(line.trim()); } - parseCueText(id, textBuilder.toString(), builder, styles); - return true; + builder.text = parseCueText(id, textBuilder.toString(), styles); + return builder.build(); } - // Internal methods + private static void parseCueSettingsList(String cueSettingsList, WebvttCueInfoBuilder builder) { + // Parse the cue settings list. + Matcher cueSettingMatcher = CUE_SETTING_PATTERN.matcher(cueSettingsList); + + while (cueSettingMatcher.find()) { + String name = Assertions.checkNotNull(cueSettingMatcher.group(1)); + String value = Assertions.checkNotNull(cueSettingMatcher.group(2)); + try { + if ("line".equals(name)) { + parseLineAttribute(value, builder); + } else if ("align".equals(name)) { + builder.textAlignment = parseTextAlignment(value); + } else if ("position".equals(name)) { + parsePositionAttribute(value, builder); + } else if ("size".equals(name)) { + builder.size = WebvttParserUtil.parsePercentage(value); + } else if ("vertical".equals(name)) { + builder.verticalType = parseVerticalAttribute(value); + } else { + Log.w(TAG, "Unknown cue setting " + name + ":" + value); + } + } catch (NumberFormatException e) { + Log.w(TAG, "Skipping bad cue setting: " + cueSettingMatcher.group()); + } + } + } - private static void parseLineAttribute(String s, WebvttCue.Builder builder) { + private static void parseLineAttribute(String s, WebvttCueInfoBuilder builder) { int commaIndex = s.indexOf(','); if (commaIndex != -1) { - builder.setLineAnchor(parsePositionAnchor(s.substring(commaIndex + 1))); + builder.lineAnchor = parseLineAnchor(s.substring(commaIndex + 1)); s = s.substring(0, commaIndex); } if (s.endsWith("%")) { - builder.setLine(WebvttParserUtil.parsePercentage(s)).setLineType(Cue.LINE_TYPE_FRACTION); + builder.line = WebvttParserUtil.parsePercentage(s); + builder.lineType = Cue.LINE_TYPE_FRACTION; } else { - int lineNumber = Integer.parseInt(s); - if (lineNumber < 0) { - // WebVTT defines line -1 as last visible row when lineAnchor is ANCHOR_TYPE_START, where-as - // Cue defines it to be the first row that's not visible. - lineNumber--; - } - builder.setLine(lineNumber).setLineType(Cue.LINE_TYPE_NUMBER); + builder.line = Integer.parseInt(s); + builder.lineType = Cue.LINE_TYPE_NUMBER; } } - private static void parsePositionAttribute(String s, WebvttCue.Builder builder) { + private static @Cue.AnchorType int parseLineAnchor(String s) { + switch (s) { + case "start": + return Cue.ANCHOR_TYPE_START; + case "center": + case "middle": + return Cue.ANCHOR_TYPE_MIDDLE; + case "end": + return Cue.ANCHOR_TYPE_END; + default: + Log.w(TAG, "Invalid anchor value: " + s); + return Cue.TYPE_UNSET; + } + } + + private static void parsePositionAttribute(String s, WebvttCueInfoBuilder builder) { int commaIndex = s.indexOf(','); if (commaIndex != -1) { - builder.setPositionAnchor(parsePositionAnchor(s.substring(commaIndex + 1))); + builder.positionAnchor = parsePositionAnchor(s.substring(commaIndex + 1)); s = s.substring(0, commaIndex); } - builder.setPosition(WebvttParserUtil.parsePercentage(s)); + builder.position = WebvttParserUtil.parsePercentage(s); } - @Cue.AnchorType - private static int parsePositionAnchor(String s) { + private static @Cue.AnchorType int parsePositionAnchor(String s) { switch (s) { + case "line-left": case "start": return Cue.ANCHOR_TYPE_START; case "center": case "middle": return Cue.ANCHOR_TYPE_MIDDLE; + case "line-right": case "end": return Cue.ANCHOR_TYPE_END; default: @@ -304,24 +445,35 @@ private static int parsePositionAnchor(String s) { } } - @WebvttCue.Builder.TextAlignment - private static int parseTextAlignment(String s) { + private static @Cue.VerticalType int parseVerticalAttribute(String s) { + switch (s) { + case "rl": + return Cue.VERTICAL_TYPE_RL; + case "lr": + return Cue.VERTICAL_TYPE_LR; + default: + Log.w(TAG, "Invalid 'vertical' value: " + s); + return Cue.TYPE_UNSET; + } + } + + private static @TextAlignment int parseTextAlignment(String s) { switch (s) { case "start": - return WebvttCue.Builder.TEXT_ALIGNMENT_START; + return TEXT_ALIGNMENT_START; case "left": - return WebvttCue.Builder.TEXT_ALIGNMENT_LEFT; + return TEXT_ALIGNMENT_LEFT; case "center": case "middle": - return WebvttCue.Builder.TEXT_ALIGNMENT_CENTER; + return TEXT_ALIGNMENT_CENTER; case "end": - return WebvttCue.Builder.TEXT_ALIGNMENT_END; + return TEXT_ALIGNMENT_END; case "right": - return WebvttCue.Builder.TEXT_ALIGNMENT_RIGHT; + return TEXT_ALIGNMENT_RIGHT; default: Log.w(TAG, "Invalid alignment value: " + s); // Default value: https://www.w3.org/TR/webvtt1/#webvtt-cue-text-alignment - return WebvttCue.Builder.TEXT_ALIGNMENT_CENTER; + return TEXT_ALIGNMENT_CENTER; } } @@ -363,6 +515,8 @@ private static boolean isSupportedTag(String tagName) { case TAG_CLASS: case TAG_ITALIC: case TAG_LANG: + case TAG_RUBY: + case TAG_RUBY_TEXT: case TAG_UNDERLINE: case TAG_VOICE: return true; @@ -374,24 +528,28 @@ private static boolean isSupportedTag(String tagName) { private static void applySpansForTag( @Nullable String cueId, StartTag startTag, + List nestedElements, SpannableStringBuilder text, - List styles, - List scratchStyleMatches) { + List styles) { int start = startTag.position; int end = text.length(); - switch(startTag.name) { + + switch (startTag.name) { case TAG_BOLD: - text.setSpan(new StyleSpan(STYLE_BOLD), start, end, - Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + text.setSpan(new StyleSpan(STYLE_BOLD), start, end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); break; case TAG_ITALIC: - text.setSpan(new StyleSpan(STYLE_ITALIC), start, end, - Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + text.setSpan(new StyleSpan(STYLE_ITALIC), start, end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + break; + case TAG_RUBY: + applyRubySpans(text, cueId, startTag, nestedElements, styles); break; case TAG_UNDERLINE: text.setSpan(new UnderlineSpan(), start, end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); break; case TAG_CLASS: + applyDefaultColors(text, startTag.classes, start, end); + break; case TAG_LANG: case TAG_VOICE: case "": // Case of the "whole cue" virtual tag. @@ -399,21 +557,113 @@ private static void applySpansForTag( default: return; } - scratchStyleMatches.clear(); - getApplicableStyles(styles, cueId, startTag, scratchStyleMatches); - int styleMatchesCount = scratchStyleMatches.size(); - for (int i = 0; i < styleMatchesCount; i++) { - applyStyleToText(text, scratchStyleMatches.get(i).style, start, end); + + List applicableStyles = getApplicableStyles(styles, cueId, startTag); + for (int i = 0; i < applicableStyles.size(); i++) { + applyStyleToText(text, applicableStyles.get(i).style, start, end); + } + } + + private static void applyRubySpans( + SpannableStringBuilder text, + @Nullable String cueId, + StartTag startTag, + List nestedElements, + List styles) { + @TextAnnotation.Position int rubyTagPosition = getRubyPosition(styles, cueId, startTag); + List sortedNestedElements = new ArrayList<>(nestedElements.size()); + sortedNestedElements.addAll(nestedElements); + Collections.sort(sortedNestedElements, Element.BY_START_POSITION_ASC); + int deletedCharCount = 0; + int lastRubyTextEnd = startTag.position; + for (int i = 0; i < sortedNestedElements.size(); i++) { + if (!TAG_RUBY_TEXT.equals(sortedNestedElements.get(i).startTag.name)) { + continue; + } + Element rubyTextElement = sortedNestedElements.get(i); + // Use the element's ruby-position if set, otherwise the element's and otherwise + // default to OVER. + @TextAnnotation.Position + int rubyPosition = + firstKnownRubyPosition( + getRubyPosition(styles, cueId, rubyTextElement.startTag), + rubyTagPosition, + TextAnnotation.POSITION_BEFORE); + // Move the rubyText from spannedText into the RubySpan. + int adjustedRubyTextStart = rubyTextElement.startTag.position - deletedCharCount; + int adjustedRubyTextEnd = rubyTextElement.endPosition - deletedCharCount; + CharSequence rubyText = text.subSequence(adjustedRubyTextStart, adjustedRubyTextEnd); + text.delete(adjustedRubyTextStart, adjustedRubyTextEnd); + text.setSpan( + new RubySpan(rubyText.toString(), rubyPosition), + lastRubyTextEnd, + adjustedRubyTextStart, + Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + deletedCharCount += rubyText.length(); + // The ruby text has been deleted, so new-start == old-end. + lastRubyTextEnd = adjustedRubyTextStart; + } + } + + private static @TextAnnotation.Position int getRubyPosition( + List styles, @Nullable String cueId, StartTag startTag) { + List styleMatches = getApplicableStyles(styles, cueId, startTag); + for (int i = 0; i < styleMatches.size(); i++) { + WebvttCssStyle style = styleMatches.get(i).style; + if (style.getRubyPosition() != TextAnnotation.POSITION_UNKNOWN) { + return style.getRubyPosition(); + } } + return TextAnnotation.POSITION_UNKNOWN; } - private static void applyStyleToText(SpannableStringBuilder spannedText, WebvttCssStyle style, - int start, int end) { + private static @TextAnnotation.Position int firstKnownRubyPosition( + @TextAnnotation.Position int position1, + @TextAnnotation.Position int position2, + @TextAnnotation.Position int position3) { + if (position1 != TextAnnotation.POSITION_UNKNOWN) { + return position1; + } + if (position2 != TextAnnotation.POSITION_UNKNOWN) { + return position2; + } + if (position3 != TextAnnotation.POSITION_UNKNOWN) { + return position3; + } + throw new IllegalArgumentException(); + } + + /** + * Adds {@link ForegroundColorSpan}s and {@link BackgroundColorSpan}s to {@code text} for entries + * in {@code classes} that match WebVTT's default text colors or default text background + * colors. + */ + private static void applyDefaultColors( + SpannableStringBuilder text, Set classes, int start, int end) { + for (String className : classes) { + if (DEFAULT_TEXT_COLORS.containsKey(className)) { + int color = DEFAULT_TEXT_COLORS.get(className); + text.setSpan(new ForegroundColorSpan(color), start, end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + } else if (DEFAULT_BACKGROUND_COLORS.containsKey(className)) { + int color = DEFAULT_BACKGROUND_COLORS.get(className); + text.setSpan(new BackgroundColorSpan(color), start, end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + } + } + } + + private static void applyStyleToText( + SpannableStringBuilder spannedText, WebvttCssStyle style, int start, int end) { if (style == null) { return; } if (style.getStyle() != WebvttCssStyle.UNSPECIFIED) { - spannedText.setSpan(new StyleSpan(style.getStyle()), start, end, + SpanUtil.addOrReplaceSpan( + spannedText, + new StyleSpan(style.getStyle()), + start, + end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); } if (style.isLinethrough()) { @@ -423,39 +673,62 @@ private static void applyStyleToText(SpannableStringBuilder spannedText, WebvttC spannedText.setSpan(new UnderlineSpan(), start, end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); } if (style.hasFontColor()) { - spannedText.setSpan(new ForegroundColorSpan(style.getFontColor()), start, end, - Spannable.SPAN_EXCLUSIVE_EXCLUSIVE); + SpanUtil.addOrReplaceSpan( + spannedText, + new ForegroundColorSpan(style.getFontColor()), + start, + end, + Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); } if (style.hasBackgroundColor()) { - spannedText.setSpan(new BackgroundColorSpan(style.getBackgroundColor()), start, end, - Spannable.SPAN_EXCLUSIVE_EXCLUSIVE); + SpanUtil.addOrReplaceSpan( + spannedText, + new BackgroundColorSpan(style.getBackgroundColor()), + start, + end, + Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); } if (style.getFontFamily() != null) { - spannedText.setSpan(new TypefaceSpan(style.getFontFamily()), start, end, + SpanUtil.addOrReplaceSpan( + spannedText, + new TypefaceSpan(style.getFontFamily()), + start, + end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); } - Layout.Alignment textAlign = style.getTextAlign(); - if (textAlign != null) { - spannedText.setSpan( - new AlignmentSpan.Standard(textAlign), start, end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); - } switch (style.getFontSizeUnit()) { case WebvttCssStyle.FONT_SIZE_UNIT_PIXEL: - spannedText.setSpan(new AbsoluteSizeSpan((int) style.getFontSize(), true), start, end, + SpanUtil.addOrReplaceSpan( + spannedText, + new AbsoluteSizeSpan((int) style.getFontSize(), true), + start, + end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); break; case WebvttCssStyle.FONT_SIZE_UNIT_EM: - spannedText.setSpan(new RelativeSizeSpan(style.getFontSize()), start, end, + SpanUtil.addOrReplaceSpan( + spannedText, + new RelativeSizeSpan(style.getFontSize()), + start, + end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); break; case WebvttCssStyle.FONT_SIZE_UNIT_PERCENT: - spannedText.setSpan(new RelativeSizeSpan(style.getFontSize() / 100), start, end, + SpanUtil.addOrReplaceSpan( + spannedText, + new RelativeSizeSpan(style.getFontSize() / 100), + start, + end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); break; case WebvttCssStyle.UNSPECIFIED: // Do nothing. break; } + if (style.getCombineUpright()) { + spannedText.setSpan( + new HorizontalTextInVerticalContextSpan(), start, end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + } } /** @@ -470,20 +743,165 @@ private static String getTagName(String tagExpression) { return Util.splitAtFirst(tagExpression, "[ \\.]")[0]; } - private static void getApplicableStyles( - List declaredStyles, - @Nullable String id, - StartTag tag, - List output) { - int styleCount = declaredStyles.size(); - for (int i = 0; i < styleCount; i++) { + private static List getApplicableStyles( + List declaredStyles, @Nullable String id, StartTag tag) { + List applicableStyles = new ArrayList<>(); + for (int i = 0; i < declaredStyles.size(); i++) { WebvttCssStyle style = declaredStyles.get(i); int score = style.getSpecificityScore(id, tag.name, tag.classes, tag.voice); if (score > 0) { - output.add(new StyleMatch(score, style)); + applicableStyles.add(new StyleMatch(score, style)); + } + } + Collections.sort(applicableStyles); + return applicableStyles; + } + + private static final class WebvttCueInfoBuilder { + + public long startTimeUs; + public long endTimeUs; + public @MonotonicNonNull CharSequence text; + public @TextAlignment int textAlignment; + public float line; + // Equivalent to WebVTT's snap-to-lines flag: + // https://www.w3.org/TR/webvtt1/#webvtt-cue-snap-to-lines-flag + public @Cue.LineType int lineType; + public @Cue.AnchorType int lineAnchor; + public float position; + public @Cue.AnchorType int positionAnchor; + public float size; + public @Cue.VerticalType int verticalType; + + public WebvttCueInfoBuilder() { + startTimeUs = 0; + endTimeUs = 0; + // Default: https://www.w3.org/TR/webvtt1/#webvtt-cue-text-alignment + textAlignment = TEXT_ALIGNMENT_CENTER; + line = Cue.DIMEN_UNSET; + // Defaults to NUMBER (true): https://www.w3.org/TR/webvtt1/#webvtt-cue-snap-to-lines-flag + lineType = Cue.LINE_TYPE_NUMBER; + // Default: https://www.w3.org/TR/webvtt1/#webvtt-cue-line-alignment + lineAnchor = Cue.ANCHOR_TYPE_START; + position = Cue.DIMEN_UNSET; + positionAnchor = Cue.TYPE_UNSET; + // Default: https://www.w3.org/TR/webvtt1/#webvtt-cue-size + size = 1.0f; + verticalType = Cue.TYPE_UNSET; + } + + public WebvttCueInfo build() { + return new WebvttCueInfo(toCueBuilder().build(), startTimeUs, endTimeUs); + } + + public Cue.Builder toCueBuilder() { + float position = + this.position != Cue.DIMEN_UNSET ? this.position : derivePosition(textAlignment); + @Cue.AnchorType + int positionAnchor = + this.positionAnchor != Cue.TYPE_UNSET + ? this.positionAnchor + : derivePositionAnchor(textAlignment); + Cue.Builder cueBuilder = + new Cue.Builder() + .setTextAlignment(convertTextAlignment(textAlignment)) + .setLine(computeLine(line, lineType), lineType) + .setLineAnchor(lineAnchor) + .setPosition(position) + .setPositionAnchor(positionAnchor) + .setSize(min(size, deriveMaxSize(positionAnchor, position))) + .setVerticalType(verticalType); + + if (text != null) { + cueBuilder.setText(text); + } + + return cueBuilder; + } + + // https://www.w3.org/TR/webvtt1/#webvtt-cue-line + private static float computeLine(float line, @Cue.LineType int lineType) { + if (line != Cue.DIMEN_UNSET + && lineType == Cue.LINE_TYPE_FRACTION + && (line < 0.0f || line > 1.0f)) { + return 1.0f; // Step 1 + } else if (line != Cue.DIMEN_UNSET) { + // Step 2: Do nothing, line is already correct. + return line; + } else if (lineType == Cue.LINE_TYPE_FRACTION) { + return 1.0f; // Step 3 + } else { + // Steps 4 - 10 (stacking multiple simultaneous cues) are handled by + // WebvttSubtitle.getCues(long) and WebvttSubtitle.isNormal(Cue). + return Cue.DIMEN_UNSET; + } + } + + // https://www.w3.org/TR/webvtt1/#webvtt-cue-position + private static float derivePosition(@TextAlignment int textAlignment) { + switch (textAlignment) { + case TEXT_ALIGNMENT_LEFT: + return 0.0f; + case TEXT_ALIGNMENT_RIGHT: + return 1.0f; + case TEXT_ALIGNMENT_START: + case TEXT_ALIGNMENT_CENTER: + case TEXT_ALIGNMENT_END: + default: + return DEFAULT_POSITION; + } + } + + // https://www.w3.org/TR/webvtt1/#webvtt-cue-position-alignment + private static @Cue.AnchorType int derivePositionAnchor(@TextAlignment int textAlignment) { + switch (textAlignment) { + case TEXT_ALIGNMENT_LEFT: + case TEXT_ALIGNMENT_START: + return Cue.ANCHOR_TYPE_START; + case TEXT_ALIGNMENT_RIGHT: + case TEXT_ALIGNMENT_END: + return Cue.ANCHOR_TYPE_END; + case TEXT_ALIGNMENT_CENTER: + default: + return Cue.ANCHOR_TYPE_MIDDLE; + } + } + + @Nullable + private static Layout.Alignment convertTextAlignment(@TextAlignment int textAlignment) { + switch (textAlignment) { + case TEXT_ALIGNMENT_START: + case TEXT_ALIGNMENT_LEFT: + return Layout.Alignment.ALIGN_NORMAL; + case TEXT_ALIGNMENT_CENTER: + return Layout.Alignment.ALIGN_CENTER; + case TEXT_ALIGNMENT_END: + case TEXT_ALIGNMENT_RIGHT: + return Layout.Alignment.ALIGN_OPPOSITE; + default: + Log.w(TAG, "Unknown textAlignment: " + textAlignment); + return null; + } + } + + // Step 2 here: https://www.w3.org/TR/webvtt1/#processing-cue-settings + private static float deriveMaxSize(@Cue.AnchorType int positionAnchor, float position) { + switch (positionAnchor) { + case Cue.ANCHOR_TYPE_START: + return 1.0f - position; + case Cue.ANCHOR_TYPE_END: + return position; + case Cue.ANCHOR_TYPE_MIDDLE: + if (position <= 0.5f) { + return position * 2; + } else { + return (1.0f - position) * 2; + } + case Cue.TYPE_UNSET: + default: + throw new IllegalStateException(String.valueOf(positionAnchor)); } } - Collections.sort(output); } private static final class StyleMatch implements Comparable { @@ -497,22 +915,19 @@ public StyleMatch(int score, WebvttCssStyle style) { } @Override - public int compareTo(@NonNull StyleMatch another) { - return this.score - another.score; + public int compareTo(StyleMatch another) { + return Integer.compare(this.score, another.score); } - } private static final class StartTag { - private static final String[] NO_CLASSES = new String[0]; - public final String name; public final int position; public final String voice; - public final String[] classes; + public final Set classes; - private StartTag(String name, int position, String voice, String[] classes) { + private StartTag(String name, int position, String voice, Set classes) { this.position = position; this.name = name; this.voice = voice; @@ -532,19 +947,37 @@ public static StartTag buildStartTag(String fullTagExpression, int position) { } String[] nameAndClasses = Util.split(fullTagExpression, "\\."); String name = nameAndClasses[0]; - String[] classes; - if (nameAndClasses.length > 1) { - classes = Util.nullSafeArrayCopyOfRange(nameAndClasses, 1, nameAndClasses.length); - } else { - classes = NO_CLASSES; + Set classes = new HashSet<>(); + for (int i = 1; i < nameAndClasses.length; i++) { + classes.add(nameAndClasses[i]); } return new StartTag(name, position, voice, classes); } public static StartTag buildWholeCueVirtualTag() { - return new StartTag("", 0, "", new String[0]); + return new StartTag( + /* name= */ "", + /* position= */ 0, + /* voice= */ "", + /* classes= */ Collections.emptySet()); } - } + /** Information about a complete element (i.e. start tag and end position). */ + private static class Element { + private static final Comparator BY_START_POSITION_ASC = + (e1, e2) -> Integer.compare(e1.startTag.position, e2.startTag.position); + + private final StartTag startTag; + /** + * The position of the end of this element's text in the un-marked-up cue text (i.e. the + * corollary to {@link StartTag#position}). + */ + private final int endPosition; + + private Element(StartTag startTag, int endPosition) { + this.startTag = startTag; + this.endPosition = endPosition; + } + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/webvtt/WebvttDecoder.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/webvtt/WebvttDecoder.java index 9b356f0988..2e3a7fdafc 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/webvtt/WebvttDecoder.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/webvtt/WebvttDecoder.java @@ -16,6 +16,7 @@ package com.google.android.exoplayer2.text.webvtt; import android.text.TextUtils; +import androidx.annotation.Nullable; import com.google.android.exoplayer2.ParserException; import com.google.android.exoplayer2.text.SimpleSubtitleDecoder; import com.google.android.exoplayer2.text.Subtitle; @@ -26,7 +27,7 @@ /** * A {@link SimpleSubtitleDecoder} for WebVTT. - *

      + * * @see WebVTT specification */ public final class WebvttDecoder extends SimpleSubtitleDecoder { @@ -40,28 +41,20 @@ public final class WebvttDecoder extends SimpleSubtitleDecoder { private static final String COMMENT_START = "NOTE"; private static final String STYLE_START = "STYLE"; - private final WebvttCueParser cueParser; private final ParsableByteArray parsableWebvttData; - private final WebvttCue.Builder webvttCueBuilder; - private final CssParser cssParser; - private final List definedStyles; + private final WebvttCssParser cssParser; public WebvttDecoder() { super("WebvttDecoder"); - cueParser = new WebvttCueParser(); parsableWebvttData = new ParsableByteArray(); - webvttCueBuilder = new WebvttCue.Builder(); - cssParser = new CssParser(); - definedStyles = new ArrayList<>(); + cssParser = new WebvttCssParser(); } @Override - protected Subtitle decode(byte[] bytes, int length, boolean reset) + protected Subtitle decode(byte[] data, int length, boolean reset) throws SubtitleDecoderException { - parsableWebvttData.reset(bytes, length); - // Initialization for consistent starting state. - webvttCueBuilder.reset(); - definedStyles.clear(); + parsableWebvttData.reset(data, length); + List definedStyles = new ArrayList<>(); // Validate the first line of the header, and skip the remainder. try { @@ -72,24 +65,25 @@ protected Subtitle decode(byte[] bytes, int length, boolean reset) while (!TextUtils.isEmpty(parsableWebvttData.readLine())) {} int event; - ArrayList subtitles = new ArrayList<>(); + List cueInfos = new ArrayList<>(); while ((event = getNextEvent(parsableWebvttData)) != EVENT_END_OF_FILE) { if (event == EVENT_COMMENT) { skipComment(parsableWebvttData); } else if (event == EVENT_STYLE_BLOCK) { - if (!subtitles.isEmpty()) { + if (!cueInfos.isEmpty()) { throw new SubtitleDecoderException("A style block was found after the first cue."); } parsableWebvttData.readLine(); // Consume the "STYLE" header. definedStyles.addAll(cssParser.parseBlock(parsableWebvttData)); } else if (event == EVENT_CUE) { - if (cueParser.parseCue(parsableWebvttData, webvttCueBuilder, definedStyles)) { - subtitles.add(webvttCueBuilder.build()); - webvttCueBuilder.reset(); + @Nullable + WebvttCueInfo cueInfo = WebvttCueParser.parseCue(parsableWebvttData, definedStyles); + if (cueInfo != null) { + cueInfos.add(cueInfo); } } } - return new WebvttSubtitle(subtitles); + return new WebvttSubtitle(cueInfos); } /** @@ -121,5 +115,4 @@ private static int getNextEvent(ParsableByteArray parsableWebvttData) { private static void skipComment(ParsableByteArray parsableWebvttData) { while (!TextUtils.isEmpty(parsableWebvttData.readLine())) {} } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/webvtt/WebvttParserUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/webvtt/WebvttParserUtil.java index 9075083111..52f0cdb8d1 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/webvtt/WebvttParserUtil.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/webvtt/WebvttParserUtil.java @@ -22,9 +22,7 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -/** - * Utility methods for parsing WebVTT data. - */ +/** Utility methods for parsing WebVTT data. */ public final class WebvttParserUtil { private static final Pattern COMMENT = Pattern.compile("^NOTE([ \t].*)?$"); @@ -42,7 +40,8 @@ public static void validateWebvttHeaderLine(ParsableByteArray input) throws Pars int startPosition = input.getPosition(); if (!isWebvttHeaderLine(input)) { input.setPosition(startPosition); - throw new ParserException("Expected WEBVTT. Got " + input.readLine()); + throw ParserException.createForMalformedContainer( + "Expected WEBVTT. Got " + input.readLine(), /* cause= */ null); } } @@ -115,5 +114,4 @@ public static Matcher findNextCueHeader(ParsableByteArray input) { } return null; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/webvtt/WebvttSubtitle.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/webvtt/WebvttSubtitle.java index 2833ff2d0b..301f4ea19a 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/webvtt/WebvttSubtitle.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/text/webvtt/WebvttSubtitle.java @@ -15,7 +15,6 @@ */ package com.google.android.exoplayer2.text.webvtt; -import android.text.SpannableStringBuilder; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.text.Cue; import com.google.android.exoplayer2.text.Subtitle; @@ -23,30 +22,25 @@ import com.google.android.exoplayer2.util.Util; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.List; -/** - * A representation of a WebVTT subtitle. - */ +/** A representation of a WebVTT subtitle. */ /* package */ final class WebvttSubtitle implements Subtitle { - private final List cues; - private final int numCues; + private final List cueInfos; private final long[] cueTimesUs; private final long[] sortedCueTimesUs; - /** - * @param cues A list of the cues in this subtitle. - */ - public WebvttSubtitle(List cues) { - this.cues = cues; - numCues = cues.size(); - cueTimesUs = new long[2 * numCues]; - for (int cueIndex = 0; cueIndex < numCues; cueIndex++) { - WebvttCue cue = cues.get(cueIndex); + /** Constructs a new WebvttSubtitle from a list of {@link WebvttCueInfo}s. */ + public WebvttSubtitle(List cueInfos) { + this.cueInfos = Collections.unmodifiableList(new ArrayList<>(cueInfos)); + cueTimesUs = new long[2 * cueInfos.size()]; + for (int cueIndex = 0; cueIndex < cueInfos.size(); cueIndex++) { + WebvttCueInfo cueInfo = cueInfos.get(cueIndex); int arrayIndex = cueIndex * 2; - cueTimesUs[arrayIndex] = cue.startTime; - cueTimesUs[arrayIndex + 1] = cue.endTime; + cueTimesUs[arrayIndex] = cueInfo.startTimeUs; + cueTimesUs[arrayIndex + 1] = cueInfo.endTimeUs; } sortedCueTimesUs = Arrays.copyOf(cueTimesUs, cueTimesUs.length); Arrays.sort(sortedCueTimesUs); @@ -72,44 +66,25 @@ public long getEventTime(int index) { @Override public List getCues(long timeUs) { - List list = new ArrayList<>(); - WebvttCue firstNormalCue = null; - SpannableStringBuilder normalCueTextBuilder = null; - - for (int i = 0; i < numCues; i++) { + List currentCues = new ArrayList<>(); + List cuesWithUnsetLine = new ArrayList<>(); + for (int i = 0; i < cueInfos.size(); i++) { if ((cueTimesUs[i * 2] <= timeUs) && (timeUs < cueTimesUs[i * 2 + 1])) { - WebvttCue cue = cues.get(i); - // TODO(ibaker): Replace this with a closer implementation of the WebVTT spec (keeping - // individual cues, but tweaking their `line` value): - // https://www.w3.org/TR/webvtt1/#cue-computed-line - if (cue.isNormalCue()) { - // we want to merge all of the normal cues into a single cue to ensure they are drawn - // correctly (i.e. don't overlap) and to emulate roll-up, but only if there are multiple - // normal cues, otherwise we can just append the single normal cue - if (firstNormalCue == null) { - firstNormalCue = cue; - } else if (normalCueTextBuilder == null) { - normalCueTextBuilder = new SpannableStringBuilder(); - normalCueTextBuilder - .append(Assertions.checkNotNull(firstNormalCue.text)) - .append("\n") - .append(Assertions.checkNotNull(cue.text)); - } else { - normalCueTextBuilder.append("\n").append(Assertions.checkNotNull(cue.text)); - } + WebvttCueInfo cueInfo = cueInfos.get(i); + if (cueInfo.cue.line == Cue.DIMEN_UNSET) { + cuesWithUnsetLine.add(cueInfo); } else { - list.add(cue); + currentCues.add(cueInfo.cue); } } } - if (normalCueTextBuilder != null) { - // there were multiple normal cues, so create a new cue with all of the text - list.add(new WebvttCue.Builder().setText(normalCueTextBuilder).build()); - } else if (firstNormalCue != null) { - // there was only a single normal cue, so just add it to the list - list.add(firstNormalCue); + // Steps 4 - 10 of https://www.w3.org/TR/webvtt1/#cue-computed-line + // (steps 1 - 3 are handled by WebvttCueParser#computeLine(float, int)) + Collections.sort(cuesWithUnsetLine, (c1, c2) -> Long.compare(c1.startTimeUs, c2.startTimeUs)); + for (int i = 0; i < cuesWithUnsetLine.size(); i++) { + Cue cue = cuesWithUnsetLine.get(i).cue; + currentCues.add(cue.buildUpon().setLine((float) (-1 - i), Cue.LINE_TYPE_NUMBER).build()); } - return list; + return currentCues; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/AdaptiveTrackSelection.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/AdaptiveTrackSelection.java index 3e8cdd1ca4..eefc253274 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/AdaptiveTrackSelection.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/AdaptiveTrackSelection.java @@ -15,37 +15,49 @@ */ package com.google.android.exoplayer2.trackselection; +import static java.lang.Math.max; +import static java.lang.Math.min; + +import androidx.annotation.CallSuper; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; -import com.google.android.exoplayer2.SimpleExoPlayer; +import com.google.android.exoplayer2.Timeline; +import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId; import com.google.android.exoplayer2.source.TrackGroup; import com.google.android.exoplayer2.source.chunk.MediaChunk; import com.google.android.exoplayer2.source.chunk.MediaChunkIterator; import com.google.android.exoplayer2.upstream.BandwidthMeter; -import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Clock; +import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.Util; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Iterables; +import com.google.common.collect.Multimap; +import com.google.common.collect.MultimapBuilder; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import org.checkerframework.checker.nullness.compatqual.NullableType; /** - * A bandwidth based adaptive {@link TrackSelection}, whose selected track is updated to be the one - * of highest quality given the current network conditions and the state of the buffer. + * A bandwidth based adaptive {@link ExoTrackSelection}, whose selected track is updated to be the + * one of highest quality given the current network conditions and the state of the buffer. */ public class AdaptiveTrackSelection extends BaseTrackSelection { + private static final String TAG = "AdaptiveTrackSelection"; + /** Factory for {@link AdaptiveTrackSelection} instances. */ - public static class Factory implements TrackSelection.Factory { + public static class Factory implements ExoTrackSelection.Factory { - @Nullable private final BandwidthMeter bandwidthMeter; private final int minDurationForQualityIncreaseMs; private final int maxDurationForQualityDecreaseMs; private final int minDurationToRetainAfterDiscardMs; + private final int maxWidthToDiscard; + private final int maxHeightToDiscard; private final float bandwidthFraction; private final float bufferedFractionToLiveEdgeForQualityIncrease; - private final long minTimeBetweenBufferReevaluationMs; private final Clock clock; /** Creates an adaptive track selection factory with default parameters. */ @@ -54,28 +66,7 @@ public Factory() { DEFAULT_MIN_DURATION_FOR_QUALITY_INCREASE_MS, DEFAULT_MAX_DURATION_FOR_QUALITY_DECREASE_MS, DEFAULT_MIN_DURATION_TO_RETAIN_AFTER_DISCARD_MS, - DEFAULT_BANDWIDTH_FRACTION, - DEFAULT_BUFFERED_FRACTION_TO_LIVE_EDGE_FOR_QUALITY_INCREASE, - DEFAULT_MIN_TIME_BETWEEN_BUFFER_REEVALUTATION_MS, - Clock.DEFAULT); - } - - /** - * @deprecated Use {@link #Factory()} instead. Custom bandwidth meter should be directly passed - * to the player in {@link SimpleExoPlayer.Builder}. - */ - @Deprecated - @SuppressWarnings("deprecation") - public Factory(BandwidthMeter bandwidthMeter) { - this( - bandwidthMeter, - DEFAULT_MIN_DURATION_FOR_QUALITY_INCREASE_MS, - DEFAULT_MAX_DURATION_FOR_QUALITY_DECREASE_MS, - DEFAULT_MIN_DURATION_TO_RETAIN_AFTER_DISCARD_MS, - DEFAULT_BANDWIDTH_FRACTION, - DEFAULT_BUFFERED_FRACTION_TO_LIVE_EDGE_FOR_QUALITY_INCREASE, - DEFAULT_MIN_TIME_BETWEEN_BUFFER_REEVALUTATION_MS, - Clock.DEFAULT); + DEFAULT_BANDWIDTH_FRACTION); } /** @@ -85,10 +76,11 @@ public Factory(BandwidthMeter bandwidthMeter) { * selected track to switch to one of higher quality. * @param maxDurationForQualityDecreaseMs The maximum duration of buffered data required for the * selected track to switch to one of lower quality. - * @param minDurationToRetainAfterDiscardMs When switching to a track of significantly higher - * quality, the selection may indicate that media already buffered at the lower quality can - * be discarded to speed up the switch. This is the minimum duration of media that must be - * retained at the lower quality. + * @param minDurationToRetainAfterDiscardMs When switching to a video track of higher quality, + * the selection may indicate that media already buffered at the lower quality can be + * discarded to speed up the switch. This is the minimum duration of media that must be + * retained at the lower quality. It must be at least {@code + * minDurationForQualityIncreaseMs}. * @param bandwidthFraction The fraction of the available bandwidth that the selection should * consider available for use. Setting to a value less than 1 is recommended to account for * inaccuracies in the bandwidth estimator. @@ -102,32 +94,48 @@ public Factory( minDurationForQualityIncreaseMs, maxDurationForQualityDecreaseMs, minDurationToRetainAfterDiscardMs, + DEFAULT_MAX_WIDTH_TO_DISCARD, + DEFAULT_MAX_HEIGHT_TO_DISCARD, bandwidthFraction, DEFAULT_BUFFERED_FRACTION_TO_LIVE_EDGE_FOR_QUALITY_INCREASE, - DEFAULT_MIN_TIME_BETWEEN_BUFFER_REEVALUTATION_MS, Clock.DEFAULT); } /** - * @deprecated Use {@link #Factory(int, int, int, float)} instead. Custom bandwidth meter should - * be directly passed to the player in {@link SimpleExoPlayer.Builder}. + * Creates an adaptive track selection factory. + * + * @param minDurationForQualityIncreaseMs The minimum duration of buffered data required for the + * selected track to switch to one of higher quality. + * @param maxDurationForQualityDecreaseMs The maximum duration of buffered data required for the + * selected track to switch to one of lower quality. + * @param minDurationToRetainAfterDiscardMs When switching to a video track of higher quality, + * the selection may indicate that media already buffered at the lower quality can be + * discarded to speed up the switch. This is the minimum duration of media that must be + * retained at the lower quality. It must be at least {@code + * minDurationForQualityIncreaseMs}. + * @param maxWidthToDiscard The maximum video width that the selector may discard from the + * buffer to speed up switching to a higher quality. + * @param maxHeightToDiscard The maximum video height that the selector may discard from the + * buffer to speed up switching to a higher quality. + * @param bandwidthFraction The fraction of the available bandwidth that the selection should + * consider available for use. Setting to a value less than 1 is recommended to account for + * inaccuracies in the bandwidth estimator. */ - @Deprecated - @SuppressWarnings("deprecation") public Factory( - BandwidthMeter bandwidthMeter, int minDurationForQualityIncreaseMs, int maxDurationForQualityDecreaseMs, int minDurationToRetainAfterDiscardMs, + int maxWidthToDiscard, + int maxHeightToDiscard, float bandwidthFraction) { this( - bandwidthMeter, minDurationForQualityIncreaseMs, maxDurationForQualityDecreaseMs, minDurationToRetainAfterDiscardMs, + maxWidthToDiscard, + maxHeightToDiscard, bandwidthFraction, DEFAULT_BUFFERED_FRACTION_TO_LIVE_EDGE_FOR_QUALITY_INCREASE, - DEFAULT_MIN_TIME_BETWEEN_BUFFER_REEVALUTATION_MS, Clock.DEFAULT); } @@ -138,10 +146,11 @@ public Factory( * selected track to switch to one of higher quality. * @param maxDurationForQualityDecreaseMs The maximum duration of buffered data required for the * selected track to switch to one of lower quality. - * @param minDurationToRetainAfterDiscardMs When switching to a track of significantly higher - * quality, the selection may indicate that media already buffered at the lower quality can - * be discarded to speed up the switch. This is the minimum duration of media that must be - * retained at the lower quality. + * @param minDurationToRetainAfterDiscardMs When switching to a video track of higher quality, + * the selection may indicate that media already buffered at the lower quality can be + * discarded to speed up the switch. This is the minimum duration of media that must be + * retained at the lower quality. It must be at least {@code + * minDurationForQualityIncreaseMs}. * @param bandwidthFraction The fraction of the available bandwidth that the selection should * consider available for use. Setting to a value less than 1 is recommended to account for * inaccuracies in the bandwidth estimator. @@ -151,106 +160,99 @@ public Factory( * applied when the playback position is closer to the live edge than {@code * minDurationForQualityIncreaseMs}, which would otherwise prevent switching to a higher * quality from happening. - * @param minTimeBetweenBufferReevaluationMs The track selection may periodically reevaluate its - * buffer and discard some chunks of lower quality to improve the playback quality if - * network conditions have changed. This is the minimum duration between 2 consecutive - * buffer reevaluation calls. * @param clock A {@link Clock}. */ - @SuppressWarnings("deprecation") public Factory( int minDurationForQualityIncreaseMs, int maxDurationForQualityDecreaseMs, int minDurationToRetainAfterDiscardMs, float bandwidthFraction, float bufferedFractionToLiveEdgeForQualityIncrease, - long minTimeBetweenBufferReevaluationMs, Clock clock) { this( - /* bandwidthMeter= */ null, minDurationForQualityIncreaseMs, maxDurationForQualityDecreaseMs, minDurationToRetainAfterDiscardMs, + DEFAULT_MAX_WIDTH_TO_DISCARD, + DEFAULT_MAX_HEIGHT_TO_DISCARD, bandwidthFraction, bufferedFractionToLiveEdgeForQualityIncrease, - minTimeBetweenBufferReevaluationMs, clock); } /** - * @deprecated Use {@link #Factory(int, int, int, float, float, long, Clock)} instead. Custom - * bandwidth meter should be directly passed to the player in {@link - * SimpleExoPlayer.Builder}. + * Creates an adaptive track selection factory. + * + * @param minDurationForQualityIncreaseMs The minimum duration of buffered data required for the + * selected track to switch to one of higher quality. + * @param maxDurationForQualityDecreaseMs The maximum duration of buffered data required for the + * selected track to switch to one of lower quality. + * @param minDurationToRetainAfterDiscardMs When switching to a video track of higher quality, + * the selection may indicate that media already buffered at the lower quality can be + * discarded to speed up the switch. This is the minimum duration of media that must be + * retained at the lower quality. It must be at least {@code + * minDurationForQualityIncreaseMs}. + * @param maxWidthToDiscard The maximum video width that the selector may discard from the + * buffer to speed up switching to a higher quality. + * @param maxHeightToDiscard The maximum video height that the selector may discard from the + * buffer to speed up switching to a higher quality. + * @param bandwidthFraction The fraction of the available bandwidth that the selection should + * consider available for use. Setting to a value less than 1 is recommended to account for + * inaccuracies in the bandwidth estimator. + * @param bufferedFractionToLiveEdgeForQualityIncrease For live streaming, the fraction of the + * duration from current playback position to the live edge that has to be buffered before + * the selected track can be switched to one of higher quality. This parameter is only + * applied when the playback position is closer to the live edge than {@code + * minDurationForQualityIncreaseMs}, which would otherwise prevent switching to a higher + * quality from happening. + * @param clock A {@link Clock}. */ - @Deprecated public Factory( - @Nullable BandwidthMeter bandwidthMeter, int minDurationForQualityIncreaseMs, int maxDurationForQualityDecreaseMs, int minDurationToRetainAfterDiscardMs, + int maxWidthToDiscard, + int maxHeightToDiscard, float bandwidthFraction, float bufferedFractionToLiveEdgeForQualityIncrease, - long minTimeBetweenBufferReevaluationMs, Clock clock) { - this.bandwidthMeter = bandwidthMeter; this.minDurationForQualityIncreaseMs = minDurationForQualityIncreaseMs; this.maxDurationForQualityDecreaseMs = maxDurationForQualityDecreaseMs; this.minDurationToRetainAfterDiscardMs = minDurationToRetainAfterDiscardMs; + this.maxWidthToDiscard = maxWidthToDiscard; + this.maxHeightToDiscard = maxHeightToDiscard; this.bandwidthFraction = bandwidthFraction; this.bufferedFractionToLiveEdgeForQualityIncrease = bufferedFractionToLiveEdgeForQualityIncrease; - this.minTimeBetweenBufferReevaluationMs = minTimeBetweenBufferReevaluationMs; this.clock = clock; } @Override - public final @NullableType TrackSelection[] createTrackSelections( - @NullableType Definition[] definitions, BandwidthMeter bandwidthMeter) { - if (this.bandwidthMeter != null) { - bandwidthMeter = this.bandwidthMeter; - } - TrackSelection[] selections = new TrackSelection[definitions.length]; - int totalFixedBandwidth = 0; - for (int i = 0; i < definitions.length; i++) { - Definition definition = definitions[i]; - if (definition != null && definition.tracks.length == 1) { - // Make fixed selections first to know their total bandwidth. - selections[i] = - new FixedTrackSelection( - definition.group, definition.tracks[0], definition.reason, definition.data); - int trackBitrate = definition.group.getFormat(definition.tracks[0]).bitrate; - if (trackBitrate != Format.NO_VALUE) { - totalFixedBandwidth += trackBitrate; - } - } - } - List adaptiveSelections = new ArrayList<>(); + public final @NullableType ExoTrackSelection[] createTrackSelections( + @NullableType Definition[] definitions, + BandwidthMeter bandwidthMeter, + MediaPeriodId mediaPeriodId, + Timeline timeline) { + ImmutableList> adaptationCheckpoints = + getAdaptationCheckpoints(definitions); + ExoTrackSelection[] selections = new ExoTrackSelection[definitions.length]; for (int i = 0; i < definitions.length; i++) { - Definition definition = definitions[i]; - if (definition != null && definition.tracks.length > 1) { - AdaptiveTrackSelection adaptiveSelection = - createAdaptiveTrackSelection( - definition.group, bandwidthMeter, definition.tracks, totalFixedBandwidth); - adaptiveSelections.add(adaptiveSelection); - selections[i] = adaptiveSelection; - } - } - if (adaptiveSelections.size() > 1) { - long[][] adaptiveTrackBitrates = new long[adaptiveSelections.size()][]; - for (int i = 0; i < adaptiveSelections.size(); i++) { - AdaptiveTrackSelection adaptiveSelection = adaptiveSelections.get(i); - adaptiveTrackBitrates[i] = new long[adaptiveSelection.length()]; - for (int j = 0; j < adaptiveSelection.length(); j++) { - adaptiveTrackBitrates[i][j] = - adaptiveSelection.getFormat(adaptiveSelection.length() - j - 1).bitrate; - } - } - long[][][] bandwidthCheckpoints = getAllocationCheckpoints(adaptiveTrackBitrates); - for (int i = 0; i < adaptiveSelections.size(); i++) { - adaptiveSelections - .get(i) - .experimental_setBandwidthAllocationCheckpoints(bandwidthCheckpoints[i]); + @Nullable Definition definition = definitions[i]; + if (definition == null || definition.tracks.length == 0) { + continue; } + selections[i] = + definition.tracks.length == 1 + ? new FixedTrackSelection( + definition.group, + /* track= */ definition.tracks[0], + /* type= */ definition.type) + : createAdaptiveTrackSelection( + definition.group, + definition.tracks, + definition.type, + bandwidthMeter, + adaptationCheckpoints.get(i)); } return selections; } @@ -259,49 +261,62 @@ public Factory( * Creates a single adaptive selection for the given group, bandwidth meter and tracks. * * @param group The {@link TrackGroup}. - * @param bandwidthMeter A {@link BandwidthMeter} which can be used to select tracks. * @param tracks The indices of the selected tracks in the track group. - * @param totalFixedTrackBandwidth The total bandwidth used by all non-adaptive tracks, in bits - * per second. + * @param type The type that will be returned from {@link TrackSelection#getType()}. + * @param bandwidthMeter A {@link BandwidthMeter} which can be used to select tracks. + * @param adaptationCheckpoints The {@link AdaptationCheckpoint checkpoints} that can be used to + * calculate available bandwidth for this selection. * @return An {@link AdaptiveTrackSelection} for the specified tracks. */ protected AdaptiveTrackSelection createAdaptiveTrackSelection( TrackGroup group, - BandwidthMeter bandwidthMeter, int[] tracks, - int totalFixedTrackBandwidth) { + int type, + BandwidthMeter bandwidthMeter, + ImmutableList adaptationCheckpoints) { return new AdaptiveTrackSelection( group, tracks, - new DefaultBandwidthProvider(bandwidthMeter, bandwidthFraction, totalFixedTrackBandwidth), + type, + bandwidthMeter, minDurationForQualityIncreaseMs, maxDurationForQualityDecreaseMs, minDurationToRetainAfterDiscardMs, + maxWidthToDiscard, + maxHeightToDiscard, + bandwidthFraction, bufferedFractionToLiveEdgeForQualityIncrease, - minTimeBetweenBufferReevaluationMs, + adaptationCheckpoints, clock); } } - public static final int DEFAULT_MIN_DURATION_FOR_QUALITY_INCREASE_MS = 10000; - public static final int DEFAULT_MAX_DURATION_FOR_QUALITY_DECREASE_MS = 25000; - public static final int DEFAULT_MIN_DURATION_TO_RETAIN_AFTER_DISCARD_MS = 25000; + public static final int DEFAULT_MIN_DURATION_FOR_QUALITY_INCREASE_MS = 10_000; + public static final int DEFAULT_MAX_DURATION_FOR_QUALITY_DECREASE_MS = 25_000; + public static final int DEFAULT_MIN_DURATION_TO_RETAIN_AFTER_DISCARD_MS = 25_000; + public static final int DEFAULT_MAX_WIDTH_TO_DISCARD = 1279; + public static final int DEFAULT_MAX_HEIGHT_TO_DISCARD = 719; public static final float DEFAULT_BANDWIDTH_FRACTION = 0.7f; public static final float DEFAULT_BUFFERED_FRACTION_TO_LIVE_EDGE_FOR_QUALITY_INCREASE = 0.75f; - public static final long DEFAULT_MIN_TIME_BETWEEN_BUFFER_REEVALUTATION_MS = 2000; - private final BandwidthProvider bandwidthProvider; + private static final long MIN_TIME_BETWEEN_BUFFER_REEVALUTATION_MS = 1000; + + private final BandwidthMeter bandwidthMeter; private final long minDurationForQualityIncreaseUs; private final long maxDurationForQualityDecreaseUs; private final long minDurationToRetainAfterDiscardUs; + private final int maxWidthToDiscard; + private final int maxHeightToDiscard; + private final float bandwidthFraction; private final float bufferedFractionToLiveEdgeForQualityIncrease; - private final long minTimeBetweenBufferReevaluationMs; + private final ImmutableList adaptationCheckpoints; private final Clock clock; private float playbackSpeed; private int selectedIndex; - private int reason; + private @C.SelectionReason int reason; private long lastBufferEvaluationMs; + @Nullable private MediaChunk lastBufferEvaluationMediaChunk; /** * @param group The {@link TrackGroup}. @@ -309,19 +324,20 @@ protected AdaptiveTrackSelection createAdaptiveTrackSelection( * empty. May be in any order. * @param bandwidthMeter Provides an estimate of the currently available bandwidth. */ - public AdaptiveTrackSelection(TrackGroup group, int[] tracks, - BandwidthMeter bandwidthMeter) { + public AdaptiveTrackSelection(TrackGroup group, int[] tracks, BandwidthMeter bandwidthMeter) { this( group, tracks, + TrackSelection.TYPE_UNSET, bandwidthMeter, - /* reservedBandwidth= */ 0, DEFAULT_MIN_DURATION_FOR_QUALITY_INCREASE_MS, DEFAULT_MAX_DURATION_FOR_QUALITY_DECREASE_MS, DEFAULT_MIN_DURATION_TO_RETAIN_AFTER_DISCARD_MS, + DEFAULT_MAX_WIDTH_TO_DISCARD, + DEFAULT_MAX_HEIGHT_TO_DISCARD, DEFAULT_BANDWIDTH_FRACTION, DEFAULT_BUFFERED_FRACTION_TO_LIVE_EDGE_FOR_QUALITY_INCREASE, - DEFAULT_MIN_TIME_BETWEEN_BUFFER_REEVALUTATION_MS, + /* adaptationCheckpoints= */ ImmutableList.of(), Clock.DEFAULT); } @@ -329,17 +345,20 @@ public AdaptiveTrackSelection(TrackGroup group, int[] tracks, * @param group The {@link TrackGroup}. * @param tracks The indices of the selected tracks within the {@link TrackGroup}. Must not be * empty. May be in any order. + * @param type The type that will be returned from {@link TrackSelection#getType()}. * @param bandwidthMeter Provides an estimate of the currently available bandwidth. - * @param reservedBandwidth The reserved bandwidth, which shouldn't be considered available for - * use, in bits per second. * @param minDurationForQualityIncreaseMs The minimum duration of buffered data required for the * selected track to switch to one of higher quality. * @param maxDurationForQualityDecreaseMs The maximum duration of buffered data required for the * selected track to switch to one of lower quality. - * @param minDurationToRetainAfterDiscardMs When switching to a track of significantly higher - * quality, the selection may indicate that media already buffered at the lower quality can be - * discarded to speed up the switch. This is the minimum duration of media that must be - * retained at the lower quality. + * @param minDurationToRetainAfterDiscardMs When switching to a video track of higher quality, the + * selection may indicate that media already buffered at the lower quality can be discarded to + * speed up the switch. This is the minimum duration of media that must be retained at the + * lower quality. It must be at least {@code minDurationForQualityIncreaseMs}. + * @param maxWidthToDiscard The maximum video width that the selector may discard from the buffer + * to speed up switching to a higher quality. + * @param maxHeightToDiscard The maximum video height that the selector may discard from the + * buffer to speed up switching to a higher quality. * @param bandwidthFraction The fraction of the available bandwidth that the selection should * consider available for use. Setting to a value less than 1 is recommended to account for * inaccuracies in the bandwidth estimator. @@ -349,73 +368,60 @@ public AdaptiveTrackSelection(TrackGroup group, int[] tracks, * when the playback position is closer to the live edge than {@code * minDurationForQualityIncreaseMs}, which would otherwise prevent switching to a higher * quality from happening. - * @param minTimeBetweenBufferReevaluationMs The track selection may periodically reevaluate its - * buffer and discard some chunks of lower quality to improve the playback quality if network - * condition has changed. This is the minimum duration between 2 consecutive buffer - * reevaluation calls. + * @param adaptationCheckpoints The {@link AdaptationCheckpoint checkpoints} that can be used to + * calculate available bandwidth for this selection. + * @param clock The {@link Clock}. */ - public AdaptiveTrackSelection( + protected AdaptiveTrackSelection( TrackGroup group, int[] tracks, + @Type int type, BandwidthMeter bandwidthMeter, - long reservedBandwidth, long minDurationForQualityIncreaseMs, long maxDurationForQualityDecreaseMs, long minDurationToRetainAfterDiscardMs, + int maxWidthToDiscard, + int maxHeightToDiscard, float bandwidthFraction, float bufferedFractionToLiveEdgeForQualityIncrease, - long minTimeBetweenBufferReevaluationMs, + List adaptationCheckpoints, Clock clock) { - this( - group, - tracks, - new DefaultBandwidthProvider(bandwidthMeter, bandwidthFraction, reservedBandwidth), - minDurationForQualityIncreaseMs, - maxDurationForQualityDecreaseMs, - minDurationToRetainAfterDiscardMs, - bufferedFractionToLiveEdgeForQualityIncrease, - minTimeBetweenBufferReevaluationMs, - clock); - } - - private AdaptiveTrackSelection( - TrackGroup group, - int[] tracks, - BandwidthProvider bandwidthProvider, - long minDurationForQualityIncreaseMs, - long maxDurationForQualityDecreaseMs, - long minDurationToRetainAfterDiscardMs, - float bufferedFractionToLiveEdgeForQualityIncrease, - long minTimeBetweenBufferReevaluationMs, - Clock clock) { - super(group, tracks); - this.bandwidthProvider = bandwidthProvider; + super(group, tracks, type); + if (minDurationToRetainAfterDiscardMs < minDurationForQualityIncreaseMs) { + Log.w( + TAG, + "Adjusting minDurationToRetainAfterDiscardMs to be at least" + + " minDurationForQualityIncreaseMs"); + minDurationToRetainAfterDiscardMs = minDurationForQualityIncreaseMs; + } + this.bandwidthMeter = bandwidthMeter; this.minDurationForQualityIncreaseUs = minDurationForQualityIncreaseMs * 1000L; this.maxDurationForQualityDecreaseUs = maxDurationForQualityDecreaseMs * 1000L; this.minDurationToRetainAfterDiscardUs = minDurationToRetainAfterDiscardMs * 1000L; + this.maxWidthToDiscard = maxWidthToDiscard; + this.maxHeightToDiscard = maxHeightToDiscard; + this.bandwidthFraction = bandwidthFraction; this.bufferedFractionToLiveEdgeForQualityIncrease = bufferedFractionToLiveEdgeForQualityIncrease; - this.minTimeBetweenBufferReevaluationMs = minTimeBetweenBufferReevaluationMs; + this.adaptationCheckpoints = ImmutableList.copyOf(adaptationCheckpoints); this.clock = clock; playbackSpeed = 1f; reason = C.SELECTION_REASON_UNKNOWN; lastBufferEvaluationMs = C.TIME_UNSET; } - /** - * Sets checkpoints to determine the allocation bandwidth based on the total bandwidth. - * - * @param allocationCheckpoints List of checkpoints. Each element must be a long[2], with [0] - * being the total bandwidth and [1] being the allocated bandwidth. - */ - public void experimental_setBandwidthAllocationCheckpoints(long[][] allocationCheckpoints) { - ((DefaultBandwidthProvider) bandwidthProvider) - .experimental_setBandwidthAllocationCheckpoints(allocationCheckpoints); - } - + @CallSuper @Override public void enable() { lastBufferEvaluationMs = C.TIME_UNSET; + lastBufferEvaluationMediaChunk = null; + } + + @CallSuper + @Override + public void disable() { + // Avoid keeping a reference to a MediaChunk in case it prevents garbage collection. + lastBufferEvaluationMediaChunk = null; } @Override @@ -431,41 +437,46 @@ public void updateSelectedTrack( List queue, MediaChunkIterator[] mediaChunkIterators) { long nowMs = clock.elapsedRealtime(); + long chunkDurationUs = getNextChunkDurationUs(mediaChunkIterators, queue); // Make initial selection if (reason == C.SELECTION_REASON_UNKNOWN) { reason = C.SELECTION_REASON_INITIAL; - selectedIndex = determineIdealSelectedIndex(nowMs); + selectedIndex = determineIdealSelectedIndex(nowMs, chunkDurationUs); return; } - // Stash the current selection, then make a new one. - int currentSelectedIndex = selectedIndex; - selectedIndex = determineIdealSelectedIndex(nowMs); - if (selectedIndex == currentSelectedIndex) { - return; + int previousSelectedIndex = selectedIndex; + @C.SelectionReason int previousReason = reason; + int formatIndexOfPreviousChunk = + queue.isEmpty() ? C.INDEX_UNSET : indexOf(Iterables.getLast(queue).trackFormat); + if (formatIndexOfPreviousChunk != C.INDEX_UNSET) { + previousSelectedIndex = formatIndexOfPreviousChunk; + previousReason = Iterables.getLast(queue).trackSelectionReason; } - - if (!isBlacklisted(currentSelectedIndex, nowMs)) { - // Revert back to the current selection if conditions are not suitable for switching. - Format currentFormat = getFormat(currentSelectedIndex); - Format selectedFormat = getFormat(selectedIndex); + int newSelectedIndex = determineIdealSelectedIndex(nowMs, chunkDurationUs); + if (!isBlacklisted(previousSelectedIndex, nowMs)) { + // Revert back to the previous selection if conditions are not suitable for switching. + Format currentFormat = getFormat(previousSelectedIndex); + Format selectedFormat = getFormat(newSelectedIndex); + long minDurationForQualityIncreaseUs = + minDurationForQualityIncreaseUs(availableDurationUs, chunkDurationUs); if (selectedFormat.bitrate > currentFormat.bitrate - && bufferedDurationUs < minDurationForQualityIncreaseUs(availableDurationUs)) { + && bufferedDurationUs < minDurationForQualityIncreaseUs) { // The selected track is a higher quality, but we have insufficient buffer to safely switch // up. Defer switching up for now. - selectedIndex = currentSelectedIndex; + newSelectedIndex = previousSelectedIndex; } else if (selectedFormat.bitrate < currentFormat.bitrate && bufferedDurationUs >= maxDurationForQualityDecreaseUs) { // The selected track is a lower quality, but we have sufficient buffer to defer switching // down for now. - selectedIndex = currentSelectedIndex; + newSelectedIndex = previousSelectedIndex; } } // If we adapted, update the trigger. - if (selectedIndex != currentSelectedIndex) { - reason = C.SELECTION_REASON_ADAPTIVE; - } + reason = + newSelectedIndex == previousSelectedIndex ? previousReason : C.SELECTION_REASON_ADAPTIVE; + selectedIndex = newSelectedIndex; } @Override @@ -474,7 +485,7 @@ public int getSelectedIndex() { } @Override - public int getSelectionReason() { + public @C.SelectionReason int getSelectionReason() { return reason; } @@ -487,15 +498,15 @@ public Object getSelectionData() { @Override public int evaluateQueueSize(long playbackPositionUs, List queue) { long nowMs = clock.elapsedRealtime(); - if (!shouldEvaluateQueueSize(nowMs)) { + if (!shouldEvaluateQueueSize(nowMs, queue)) { return queue.size(); } - lastBufferEvaluationMs = nowMs; + lastBufferEvaluationMediaChunk = queue.isEmpty() ? null : Iterables.getLast(queue); + if (queue.isEmpty()) { return 0; } - int queueSize = queue.size(); MediaChunk lastChunk = queue.get(queueSize - 1); long playoutBufferedDurationBeforeLastChunkUs = @@ -505,11 +516,11 @@ public int evaluateQueueSize(long playbackPositionUs, List if (playoutBufferedDurationBeforeLastChunkUs < minDurationToRetainAfterDiscardUs) { return queueSize; } - int idealSelectedIndex = determineIdealSelectedIndex(nowMs); + int idealSelectedIndex = determineIdealSelectedIndex(nowMs, getLastChunkDurationUs(queue)); Format idealFormat = getFormat(idealSelectedIndex); - // If the chunks contain video, discard from the first SD chunk beyond - // minDurationToRetainAfterDiscardUs whose resolution and bitrate are both lower than the ideal - // track. + // If chunks contain video, discard from the first chunk after minDurationToRetainAfterDiscardUs + // whose resolution and bitrate are both lower than the ideal track, and whose width and height + // are less than or equal to maxWidthToDiscard and maxHeightToDiscard respectively. for (int i = 0; i < queueSize; i++) { MediaChunk chunk = queue.get(i); Format format = chunk.trackFormat; @@ -518,8 +529,10 @@ public int evaluateQueueSize(long playbackPositionUs, List Util.getPlayoutDurationForMediaDuration(mediaDurationBeforeThisChunkUs, playbackSpeed); if (playoutDurationBeforeThisChunkUs >= minDurationToRetainAfterDiscardUs && format.bitrate < idealFormat.bitrate - && format.height != Format.NO_VALUE && format.height < 720 - && format.width != Format.NO_VALUE && format.width < 1280 + && format.height != Format.NO_VALUE + && format.height <= maxHeightToDiscard + && format.width != Format.NO_VALUE + && format.width <= maxWidthToDiscard && format.height < idealFormat.height) { return i; } @@ -533,14 +546,12 @@ public int evaluateQueueSize(long playbackPositionUs, List * @param format The {@link Format} of the candidate track. * @param trackBitrate The estimated bitrate of the track. May differ from {@link Format#bitrate} * if a more accurate estimate of the current track bitrate is available. - * @param playbackSpeed The current playback speed. * @param effectiveBitrate The bitrate available to this selection. * @return Whether this {@link Format} can be selected. */ @SuppressWarnings("unused") - protected boolean canSelectFormat( - Format format, int trackBitrate, float playbackSpeed, long effectiveBitrate) { - return Math.round(trackBitrate * playbackSpeed) <= effectiveBitrate; + protected boolean canSelectFormat(Format format, int trackBitrate, long effectiveBitrate) { + return trackBitrate <= effectiveBitrate; } /** @@ -548,11 +559,13 @@ protected boolean canSelectFormat( * performed. * * @param nowMs The current value of {@link Clock#elapsedRealtime()}. + * @param queue The queue of buffered {@link MediaChunk MediaChunks}. Must not be modified. * @return Whether an evaluation should be performed. */ - protected boolean shouldEvaluateQueueSize(long nowMs) { + protected boolean shouldEvaluateQueueSize(long nowMs, List queue) { return lastBufferEvaluationMs == C.TIME_UNSET - || nowMs - lastBufferEvaluationMs >= minTimeBetweenBufferReevaluationMs; + || nowMs - lastBufferEvaluationMs >= MIN_TIME_BETWEEN_BUFFER_REEVALUTATION_MS + || (!queue.isEmpty() && !Iterables.getLast(queue).equals(lastBufferEvaluationMediaChunk)); } /** @@ -569,193 +582,270 @@ protected long getMinDurationToRetainAfterDiscardUs() { * Computes the ideal selected index ignoring buffer health. * * @param nowMs The current time in the timebase of {@link Clock#elapsedRealtime()}, or {@link - * Long#MIN_VALUE} to ignore blacklisting. + * Long#MIN_VALUE} to ignore track exclusion. + * @param chunkDurationUs The duration of a media chunk in microseconds, or {@link C#TIME_UNSET} + * if unknown. */ - private int determineIdealSelectedIndex(long nowMs) { - long effectiveBitrate = bandwidthProvider.getAllocatedBandwidth(); - int lowestBitrateNonBlacklistedIndex = 0; + private int determineIdealSelectedIndex(long nowMs, long chunkDurationUs) { + long effectiveBitrate = getAllocatedBandwidth(chunkDurationUs); + int lowestBitrateAllowedIndex = 0; for (int i = 0; i < length; i++) { if (nowMs == Long.MIN_VALUE || !isBlacklisted(i, nowMs)) { Format format = getFormat(i); - if (canSelectFormat(format, format.bitrate, playbackSpeed, effectiveBitrate)) { + if (canSelectFormat(format, format.bitrate, effectiveBitrate)) { return i; } else { - lowestBitrateNonBlacklistedIndex = i; + lowestBitrateAllowedIndex = i; } } } - return lowestBitrateNonBlacklistedIndex; + return lowestBitrateAllowedIndex; } - private long minDurationForQualityIncreaseUs(long availableDurationUs) { - boolean isAvailableDurationTooShort = availableDurationUs != C.TIME_UNSET - && availableDurationUs <= minDurationForQualityIncreaseUs; - return isAvailableDurationTooShort - ? (long) (availableDurationUs * bufferedFractionToLiveEdgeForQualityIncrease) - : minDurationForQualityIncreaseUs; + private long minDurationForQualityIncreaseUs(long availableDurationUs, long chunkDurationUs) { + if (availableDurationUs == C.TIME_UNSET) { + // We are not in a live stream. Use the configured value. + return minDurationForQualityIncreaseUs; + } + if (chunkDurationUs != C.TIME_UNSET) { + // We are currently selecting a new live chunk. Even under perfect conditions, the buffered + // duration can't include the last chunk duration yet because we are still selecting a track + // for this or a previous chunk. Hence, we subtract one chunk duration from the total + // available live duration to ensure we only compare the buffered duration against what is + // actually achievable. + availableDurationUs -= chunkDurationUs; + } + long adjustedMinDurationForQualityIncreaseUs = + (long) (availableDurationUs * bufferedFractionToLiveEdgeForQualityIncrease); + return min(adjustedMinDurationForQualityIncreaseUs, minDurationForQualityIncreaseUs); } - /** Provides the allocated bandwidth. */ - private interface BandwidthProvider { - - /** Returns the allocated bitrate. */ - long getAllocatedBandwidth(); + /** + * Returns a best estimate of the duration of the next chunk, in microseconds, or {@link + * C#TIME_UNSET} if an estimate could not be determined. + */ + private long getNextChunkDurationUs( + MediaChunkIterator[] mediaChunkIterators, List queue) { + // Try to get the next chunk duration for the currently selected format. + if (selectedIndex < mediaChunkIterators.length && mediaChunkIterators[selectedIndex].next()) { + MediaChunkIterator iterator = mediaChunkIterators[selectedIndex]; + return iterator.getChunkEndTimeUs() - iterator.getChunkStartTimeUs(); + } + // Try to get the next chunk duration for another format, on the assumption that chunks + // belonging to different formats are likely to have identical or similar durations. + for (MediaChunkIterator iterator : mediaChunkIterators) { + if (iterator.next()) { + return iterator.getChunkEndTimeUs() - iterator.getChunkStartTimeUs(); + } + } + // Try to get chunk duration for last chunk in the queue, on the assumption that the next chunk + // is likely to have a similar duration. + return getLastChunkDurationUs(queue); } - private static final class DefaultBandwidthProvider implements BandwidthProvider { - - private final BandwidthMeter bandwidthMeter; - private final float bandwidthFraction; - private final long reservedBandwidth; - - @Nullable private long[][] allocationCheckpoints; - - /* package */ - // the constructor does not initialize fields: allocationCheckpoints - @SuppressWarnings("nullness:initialization.fields.uninitialized") - DefaultBandwidthProvider( - BandwidthMeter bandwidthMeter, float bandwidthFraction, long reservedBandwidth) { - this.bandwidthMeter = bandwidthMeter; - this.bandwidthFraction = bandwidthFraction; - this.reservedBandwidth = reservedBandwidth; + /** + * Returns the duration of the last chunk in the queue, in microseconds, or {@link C#TIME_UNSET} + * if the queue is empty or if the last chunk has an undefined start or end time. + */ + private long getLastChunkDurationUs(List queue) { + if (queue.isEmpty()) { + return C.TIME_UNSET; } + MediaChunk lastChunk = Iterables.getLast(queue); + return lastChunk.startTimeUs != C.TIME_UNSET && lastChunk.endTimeUs != C.TIME_UNSET + ? lastChunk.endTimeUs - lastChunk.startTimeUs + : C.TIME_UNSET; + } - // unboxing a possibly-null reference allocationCheckpoints[nextIndex][0] - @SuppressWarnings("nullness:unboxing.of.nullable") - @Override - public long getAllocatedBandwidth() { - long totalBandwidth = (long) (bandwidthMeter.getBitrateEstimate() * bandwidthFraction); - long allocatableBandwidth = Math.max(0L, totalBandwidth - reservedBandwidth); - if (allocationCheckpoints == null) { - return allocatableBandwidth; - } - int nextIndex = 1; - while (nextIndex < allocationCheckpoints.length - 1 - && allocationCheckpoints[nextIndex][0] < allocatableBandwidth) { - nextIndex++; - } - long[] previous = allocationCheckpoints[nextIndex - 1]; - long[] next = allocationCheckpoints[nextIndex]; - float fractionBetweenCheckpoints = - (float) (allocatableBandwidth - previous[0]) / (next[0] - previous[0]); - return previous[1] + (long) (fractionBetweenCheckpoints * (next[1] - previous[1])); + private long getAllocatedBandwidth(long chunkDurationUs) { + long totalBandwidth = getTotalAllocatableBandwidth(chunkDurationUs); + if (adaptationCheckpoints.isEmpty()) { + return totalBandwidth; + } + int nextIndex = 1; + while (nextIndex < adaptationCheckpoints.size() - 1 + && adaptationCheckpoints.get(nextIndex).totalBandwidth < totalBandwidth) { + nextIndex++; } + AdaptationCheckpoint previous = adaptationCheckpoints.get(nextIndex - 1); + AdaptationCheckpoint next = adaptationCheckpoints.get(nextIndex); + float fractionBetweenCheckpoints = + (float) (totalBandwidth - previous.totalBandwidth) + / (next.totalBandwidth - previous.totalBandwidth); + return previous.allocatedBandwidth + + (long) + (fractionBetweenCheckpoints * (next.allocatedBandwidth - previous.allocatedBandwidth)); + } - /* package */ void experimental_setBandwidthAllocationCheckpoints( - long[][] allocationCheckpoints) { - Assertions.checkArgument(allocationCheckpoints.length >= 2); - this.allocationCheckpoints = allocationCheckpoints; + private long getTotalAllocatableBandwidth(long chunkDurationUs) { + long cautiousBandwidthEstimate = + (long) (bandwidthMeter.getBitrateEstimate() * bandwidthFraction); + long timeToFirstByteEstimateUs = bandwidthMeter.getTimeToFirstByteEstimateUs(); + if (timeToFirstByteEstimateUs == C.TIME_UNSET || chunkDurationUs == C.TIME_UNSET) { + return (long) (cautiousBandwidthEstimate / playbackSpeed); } + float availableTimeToLoadUs = + max(chunkDurationUs / playbackSpeed - timeToFirstByteEstimateUs, 0); + return (long) (cautiousBandwidthEstimate * availableTimeToLoadUs / chunkDurationUs); } /** - * Returns allocation checkpoints for allocating bandwidth between multiple adaptive track - * selections. + * Returns adaptation checkpoints for allocating bandwidth for adaptive track selections. * - * @param trackBitrates Array of [selectionIndex][trackIndex] -> trackBitrate. - * @return Array of allocation checkpoints [selectionIndex][checkpointIndex][2] with [0]=total - * bandwidth at checkpoint and [1]=allocated bandwidth at checkpoint. + * @param definitions Array of track selection {@link Definition definitions}. Elements may be + * null. + * @return List of {@link AdaptationCheckpoint checkpoints} for each adaptive {@link Definition} + * with more than one selected track. */ - private static long[][][] getAllocationCheckpoints(long[][] trackBitrates) { - // Algorithm: - // 1. Use log bitrates to treat all resolution update steps equally. - // 2. Distribute switch points for each selection equally in the same [0.0-1.0] range. - // 3. Switch up one format at a time in the order of the switch points. - double[][] logBitrates = getLogArrayValues(trackBitrates); - double[][] switchPoints = getSwitchPoints(logBitrates); - - // There will be (count(switch point) + 3) checkpoints: - // [0] = all zero, [1] = minimum bitrates, [2-(end-1)] = up-switch points, - // [end] = extra point to set slope for additional bitrate. - int checkpointCount = countArrayElements(switchPoints) + 3; - long[][][] checkpoints = new long[logBitrates.length][checkpointCount][2]; - int[] currentSelection = new int[logBitrates.length]; - setCheckpointValues(checkpoints, /* checkpointIndex= */ 1, trackBitrates, currentSelection); - for (int checkpointIndex = 2; checkpointIndex < checkpointCount - 1; checkpointIndex++) { - int nextUpdateIndex = 0; - double nextUpdateSwitchPoint = Double.MAX_VALUE; - for (int i = 0; i < logBitrates.length; i++) { - if (currentSelection[i] + 1 == logBitrates[i].length) { - continue; - } - double switchPoint = switchPoints[i][currentSelection[i]]; - if (switchPoint < nextUpdateSwitchPoint) { - nextUpdateSwitchPoint = switchPoint; - nextUpdateIndex = i; - } + private static ImmutableList> getAdaptationCheckpoints( + @NullableType Definition[] definitions) { + List> checkPointBuilders = + new ArrayList<>(); + for (int i = 0; i < definitions.length; i++) { + if (definitions[i] != null && definitions[i].tracks.length > 1) { + ImmutableList.Builder builder = ImmutableList.builder(); + // Add initial all-zero checkpoint. + builder.add(new AdaptationCheckpoint(/* totalBandwidth= */ 0, /* allocatedBandwidth= */ 0)); + checkPointBuilders.add(builder); + } else { + checkPointBuilders.add(null); + } + } + // Add minimum bitrate selection checkpoint. + long[][] trackBitrates = getSortedTrackBitrates(definitions); + int[] currentTrackIndices = new int[trackBitrates.length]; + long[] currentTrackBitrates = new long[trackBitrates.length]; + for (int i = 0; i < trackBitrates.length; i++) { + currentTrackBitrates[i] = trackBitrates[i].length == 0 ? 0 : trackBitrates[i][0]; + } + addCheckpoint(checkPointBuilders, currentTrackBitrates); + // Iterate through all adaptive checkpoints. + ImmutableList switchOrder = getSwitchOrder(trackBitrates); + for (int i = 0; i < switchOrder.size(); i++) { + int switchIndex = switchOrder.get(i); + int newTrackIndex = ++currentTrackIndices[switchIndex]; + currentTrackBitrates[switchIndex] = trackBitrates[switchIndex][newTrackIndex]; + addCheckpoint(checkPointBuilders, currentTrackBitrates); + } + // Add final checkpoint to extrapolate additional bandwidth for adaptive selections. + for (int i = 0; i < definitions.length; i++) { + if (checkPointBuilders.get(i) != null) { + currentTrackBitrates[i] *= 2; } - currentSelection[nextUpdateIndex]++; - setCheckpointValues(checkpoints, checkpointIndex, trackBitrates, currentSelection); } - for (long[][] points : checkpoints) { - points[checkpointCount - 1][0] = 2 * points[checkpointCount - 2][0]; - points[checkpointCount - 1][1] = 2 * points[checkpointCount - 2][1]; + addCheckpoint(checkPointBuilders, currentTrackBitrates); + ImmutableList.Builder> output = ImmutableList.builder(); + for (int i = 0; i < checkPointBuilders.size(); i++) { + @Nullable ImmutableList.Builder builder = checkPointBuilders.get(i); + output.add(builder == null ? ImmutableList.of() : builder.build()); } - return checkpoints; + return output.build(); } - /** Converts all input values to Math.log(value). */ - private static double[][] getLogArrayValues(long[][] values) { - double[][] logValues = new double[values.length][]; - for (int i = 0; i < values.length; i++) { - logValues[i] = new double[values[i].length]; - for (int j = 0; j < values[i].length; j++) { - logValues[i][j] = values[i][j] == Format.NO_VALUE ? 0 : Math.log(values[i][j]); + /** Returns sorted track bitrates for all selected tracks. */ + private static long[][] getSortedTrackBitrates(@NullableType Definition[] definitions) { + long[][] trackBitrates = new long[definitions.length][]; + for (int i = 0; i < definitions.length; i++) { + @Nullable Definition definition = definitions[i]; + if (definition == null) { + trackBitrates[i] = new long[0]; + continue; } + trackBitrates[i] = new long[definition.tracks.length]; + for (int j = 0; j < definition.tracks.length; j++) { + long bitrate = definition.group.getFormat(definition.tracks[j]).bitrate; + trackBitrates[i][j] = bitrate == Format.NO_VALUE ? 0 : bitrate; + } + Arrays.sort(trackBitrates[i]); } - return logValues; + return trackBitrates; } /** - * Returns idealized switch points for each switch between consecutive track selection bitrates. + * Returns order of track indices in which the respective track should be switched up. * - * @param logBitrates Log bitrates with [selectionCount][formatCount]. - * @return Linearly distributed switch points in the range of [0.0-1.0]. + * @param trackBitrates Sorted tracks bitrates for each selection. + * @return List of track indices indicating in which order tracks should be switched up. */ - private static double[][] getSwitchPoints(double[][] logBitrates) { - double[][] switchPoints = new double[logBitrates.length][]; - for (int i = 0; i < logBitrates.length; i++) { - switchPoints[i] = new double[logBitrates[i].length - 1]; - if (switchPoints[i].length == 0) { + private static ImmutableList getSwitchOrder(long[][] trackBitrates) { + // Algorithm: + // 1. Use log bitrates to treat all bitrate update steps equally. + // 2. Distribute switch points for each selection equally in the same [0.0-1.0] range. + // 3. Switch up one format at a time in the order of the switch points. + Multimap switchPoints = MultimapBuilder.treeKeys().arrayListValues().build(); + for (int i = 0; i < trackBitrates.length; i++) { + if (trackBitrates[i].length <= 1) { continue; } - double totalBitrateDiff = logBitrates[i][logBitrates[i].length - 1] - logBitrates[i][0]; - for (int j = 0; j < logBitrates[i].length - 1; j++) { - double switchBitrate = 0.5 * (logBitrates[i][j] + logBitrates[i][j + 1]); - switchPoints[i][j] = - totalBitrateDiff == 0.0 ? 1.0 : (switchBitrate - logBitrates[i][0]) / totalBitrateDiff; + double[] logBitrates = new double[trackBitrates[i].length]; + for (int j = 0; j < trackBitrates[i].length; j++) { + logBitrates[j] = + trackBitrates[i][j] == Format.NO_VALUE ? 0 : Math.log((double) trackBitrates[i][j]); + } + double totalBitrateDiff = logBitrates[logBitrates.length - 1] - logBitrates[0]; + for (int j = 0; j < logBitrates.length - 1; j++) { + double switchBitrate = 0.5 * (logBitrates[j] + logBitrates[j + 1]); + double switchPoint = + totalBitrateDiff == 0.0 ? 1.0 : (switchBitrate - logBitrates[0]) / totalBitrateDiff; + switchPoints.put(switchPoint, i); } } - return switchPoints; - } - - /** Returns total number of elements in a 2D array. */ - private static int countArrayElements(double[][] array) { - int count = 0; - for (double[] subArray : array) { - count += subArray.length; - } - return count; + return ImmutableList.copyOf(switchPoints.values()); } /** - * Sets checkpoint bitrates. + * Add a checkpoint to the builders. * - * @param checkpoints Output checkpoints with [selectionIndex][checkpointIndex][2] where [0]=Total - * bitrate and [1]=Allocated bitrate. - * @param checkpointIndex The checkpoint index. - * @param trackBitrates The track bitrates with [selectionIndex][trackIndex]. - * @param selectedTracks The indices of selected tracks for each selection for this checkpoint. + * @param checkPointBuilders Builders for adaptation checkpoints. May have null elements. + * @param checkpointBitrates The bitrates of each track at this checkpoint. */ - private static void setCheckpointValues( - long[][][] checkpoints, int checkpointIndex, long[][] trackBitrates, int[] selectedTracks) { + private static void addCheckpoint( + List> checkPointBuilders, + long[] checkpointBitrates) { + // Total bitrate includes all fixed tracks. long totalBitrate = 0; - for (int i = 0; i < checkpoints.length; i++) { - checkpoints[i][checkpointIndex][1] = trackBitrates[i][selectedTracks[i]]; - totalBitrate += checkpoints[i][checkpointIndex][1]; + for (int i = 0; i < checkpointBitrates.length; i++) { + totalBitrate += checkpointBitrates[i]; + } + for (int i = 0; i < checkPointBuilders.size(); i++) { + @Nullable ImmutableList.Builder builder = checkPointBuilders.get(i); + if (builder == null) { + continue; + } + builder.add( + new AdaptationCheckpoint( + /* totalBandwidth= */ totalBitrate, /* allocatedBandwidth= */ checkpointBitrates[i])); } - for (long[][] points : checkpoints) { - points[checkpointIndex][0] = totalBitrate; + } + + /** Checkpoint to determine allocated bandwidth. */ + public static final class AdaptationCheckpoint { + + /** Total bandwidth in bits per second at which this checkpoint applies. */ + public final long totalBandwidth; + /** Allocated bandwidth at this checkpoint in bits per second. */ + public final long allocatedBandwidth; + + public AdaptationCheckpoint(long totalBandwidth, long allocatedBandwidth) { + this.totalBandwidth = totalBandwidth; + this.allocatedBandwidth = allocatedBandwidth; + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (!(o instanceof AdaptationCheckpoint)) { + return false; + } + AdaptationCheckpoint that = (AdaptationCheckpoint) o; + return totalBandwidth == that.totalBandwidth && allocatedBandwidth == that.allocatedBandwidth; + } + + @Override + public int hashCode() { + return 31 * (int) totalBandwidth + (int) allocatedBandwidth; } } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/BaseTrackSelection.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/BaseTrackSelection.java index dc0b3f6747..d0ddf18b8f 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/BaseTrackSelection.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/BaseTrackSelection.java @@ -15,6 +15,8 @@ */ package com.google.android.exoplayer2.trackselection; +import static java.lang.Math.max; + import android.os.SystemClock; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; @@ -24,35 +26,24 @@ import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Util; import java.util.Arrays; -import java.util.Comparator; import java.util.List; -/** - * An abstract base class suitable for most {@link TrackSelection} implementations. - */ -public abstract class BaseTrackSelection implements TrackSelection { +/** An abstract base class suitable for most {@link ExoTrackSelection} implementations. */ +public abstract class BaseTrackSelection implements ExoTrackSelection { - /** - * The selected {@link TrackGroup}. - */ + /** The selected {@link TrackGroup}. */ protected final TrackGroup group; - /** - * The number of selected tracks within the {@link TrackGroup}. Always greater than zero. - */ + /** The number of selected tracks within the {@link TrackGroup}. Always greater than zero. */ protected final int length; - /** - * The indices of the selected tracks in {@link #group}, in order of decreasing bandwidth. - */ + /** The indices of the selected tracks in {@link #group}, in order of decreasing bandwidth. */ protected final int[] tracks; - /** - * The {@link Format}s of the selected tracks, in order of decreasing bandwidth. - */ + /** The type of the selection. */ + private final @Type int type; + /** The {@link Format}s of the selected tracks, in order of decreasing bandwidth. */ private final Format[] formats; - /** - * Selected track blacklist timestamps, in order of decreasing bandwidth. - */ - private final long[] blacklistUntilTimes; + /** Selected track exclusion timestamps, in order of decreasing bandwidth. */ + private final long[] excludeUntilTimes; // Lazily initialized hashcode. private int hashCode; @@ -63,7 +54,18 @@ public abstract class BaseTrackSelection implements TrackSelection { * null or empty. May be in any order. */ public BaseTrackSelection(TrackGroup group, int... tracks) { + this(group, tracks, TrackSelection.TYPE_UNSET); + } + + /** + * @param group The {@link TrackGroup}. Must not be null. + * @param tracks The indices of the selected tracks within the {@link TrackGroup}. Must not be + * null or empty. May be in any order. + * @param type The type that will be returned from {@link TrackSelection#getType()}. + */ + public BaseTrackSelection(TrackGroup group, int[] tracks, @Type int type) { Assertions.checkState(tracks.length > 0); + this.type = type; this.group = Assertions.checkNotNull(group); this.length = tracks.length; // Set the formats, sorted in order of decreasing bandwidth. @@ -71,23 +73,21 @@ public BaseTrackSelection(TrackGroup group, int... tracks) { for (int i = 0; i < tracks.length; i++) { formats[i] = group.getFormat(tracks[i]); } - Arrays.sort(formats, new DecreasingBandwidthComparator()); + // Sort in order of decreasing bandwidth. + Arrays.sort(formats, (a, b) -> b.bitrate - a.bitrate); // Set the format indices in the same order. this.tracks = new int[length]; for (int i = 0; i < length; i++) { this.tracks[i] = group.indexOf(formats[i]); } - blacklistUntilTimes = new long[length]; + excludeUntilTimes = new long[length]; } - @Override - public void enable() { - // Do nothing. - } + // TrackSelection implementation. @Override - public void disable() { - // Do nothing. + public final int getType() { + return type; } @Override @@ -131,6 +131,8 @@ public final int indexOf(int indexInTrackGroup) { return C.INDEX_UNSET; } + // ExoTrackSelection specific methods. + @Override public final Format getSelectedFormat() { return formats[getSelectedIndex()]; @@ -141,6 +143,16 @@ public final int getSelectedIndexInTrackGroup() { return tracks[getSelectedIndex()]; } + @Override + public void enable() { + // Do nothing. + } + + @Override + public void disable() { + // Do nothing. + } + @Override public void onPlaybackSpeed(float playbackSpeed) { // Do nothing. @@ -152,30 +164,25 @@ public int evaluateQueueSize(long playbackPositionUs, List } @Override - public final boolean blacklist(int index, long blacklistDurationMs) { + public boolean blacklist(int index, long exclusionDurationMs) { long nowMs = SystemClock.elapsedRealtime(); - boolean canBlacklist = isBlacklisted(index, nowMs); - for (int i = 0; i < length && !canBlacklist; i++) { - canBlacklist = i != index && !isBlacklisted(i, nowMs); + boolean canExclude = isBlacklisted(index, nowMs); + for (int i = 0; i < length && !canExclude; i++) { + canExclude = i != index && !isBlacklisted(i, nowMs); } - if (!canBlacklist) { + if (!canExclude) { return false; } - blacklistUntilTimes[index] = - Math.max( - blacklistUntilTimes[index], - Util.addWithOverflowDefault(nowMs, blacklistDurationMs, Long.MAX_VALUE)); + excludeUntilTimes[index] = + max( + excludeUntilTimes[index], + Util.addWithOverflowDefault(nowMs, exclusionDurationMs, Long.MAX_VALUE)); return true; } - /** - * Returns whether the track at the specified index in the selection is blacklisted. - * - * @param index The index of the track in the selection. - * @param nowMs The current time in the timebase of {@link SystemClock#elapsedRealtime()}. - */ - protected final boolean isBlacklisted(int index, long nowMs) { - return blacklistUntilTimes[index] > nowMs; + @Override + public boolean isBlacklisted(int index, long nowMs) { + return excludeUntilTimes[index] > nowMs; } // Object overrides. @@ -201,17 +208,4 @@ public boolean equals(@Nullable Object obj) { BaseTrackSelection other = (BaseTrackSelection) obj; return group == other.group && Arrays.equals(tracks, other.tracks); } - - /** - * Sorts {@link Format} objects in order of decreasing bandwidth. - */ - private static final class DecreasingBandwidthComparator implements Comparator { - - @Override - public int compare(Format a, Format b) { - return b.bitrate - a.bitrate; - } - - } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/BufferSizeAdaptationBuilder.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/BufferSizeAdaptationBuilder.java deleted file mode 100644 index b850a08aeb..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/BufferSizeAdaptationBuilder.java +++ /dev/null @@ -1,494 +0,0 @@ -/* - * Copyright (C) 2018 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.trackselection; - -import android.util.Pair; -import androidx.annotation.Nullable; -import com.google.android.exoplayer2.C; -import com.google.android.exoplayer2.DefaultLoadControl; -import com.google.android.exoplayer2.Format; -import com.google.android.exoplayer2.LoadControl; -import com.google.android.exoplayer2.source.TrackGroup; -import com.google.android.exoplayer2.source.chunk.MediaChunk; -import com.google.android.exoplayer2.source.chunk.MediaChunkIterator; -import com.google.android.exoplayer2.trackselection.TrackSelection.Definition; -import com.google.android.exoplayer2.upstream.BandwidthMeter; -import com.google.android.exoplayer2.upstream.DefaultAllocator; -import com.google.android.exoplayer2.util.Assertions; -import com.google.android.exoplayer2.util.Clock; -import java.util.List; -import org.checkerframework.checker.nullness.compatqual.NullableType; - -/** - * Builder for a {@link TrackSelection.Factory} and {@link LoadControl} that implement buffer size - * based track adaptation. - */ -public final class BufferSizeAdaptationBuilder { - - /** Dynamic filter for formats, which is applied when selecting a new track. */ - public interface DynamicFormatFilter { - - /** Filter which allows all formats. */ - DynamicFormatFilter NO_FILTER = (format, trackBitrate, isInitialSelection) -> true; - - /** - * Called when updating the selected track to determine whether a candidate track is allowed. If - * no format is allowed or eligible, the lowest quality format will be used. - * - * @param format The {@link Format} of the candidate track. - * @param trackBitrate The estimated bitrate of the track. May differ from {@link - * Format#bitrate} if a more accurate estimate of the current track bitrate is available. - * @param isInitialSelection Whether this is for the initial track selection. - */ - boolean isFormatAllowed(Format format, int trackBitrate, boolean isInitialSelection); - } - - /** - * The default minimum duration of media that the player will attempt to ensure is buffered at all - * times, in milliseconds. - */ - public static final int DEFAULT_MIN_BUFFER_MS = 15000; - - /** - * The default maximum duration of media that the player will attempt to buffer, in milliseconds. - */ - public static final int DEFAULT_MAX_BUFFER_MS = 50000; - - /** - * The default duration of media that must be buffered for playback to start or resume following a - * user action such as a seek, in milliseconds. - */ - public static final int DEFAULT_BUFFER_FOR_PLAYBACK_MS = - DefaultLoadControl.DEFAULT_BUFFER_FOR_PLAYBACK_MS; - - /** - * The default duration of media that must be buffered for playback to resume after a rebuffer, in - * milliseconds. A rebuffer is defined to be caused by buffer depletion rather than a user action. - */ - public static final int DEFAULT_BUFFER_FOR_PLAYBACK_AFTER_REBUFFER_MS = - DefaultLoadControl.DEFAULT_BUFFER_FOR_PLAYBACK_AFTER_REBUFFER_MS; - - /** - * The default offset the current duration of buffered media must deviate from the ideal duration - * of buffered media for the currently selected format, before the selected format is changed. - */ - public static final int DEFAULT_HYSTERESIS_BUFFER_MS = 5000; - - /** - * During start-up phase, the default fraction of the available bandwidth that the selection - * should consider available for use. Setting to a value less than 1 is recommended to account for - * inaccuracies in the bandwidth estimator. - */ - public static final float DEFAULT_START_UP_BANDWIDTH_FRACTION = - AdaptiveTrackSelection.DEFAULT_BANDWIDTH_FRACTION; - - /** - * During start-up phase, the default minimum duration of buffered media required for the selected - * track to switch to one of higher quality based on measured bandwidth. - */ - public static final int DEFAULT_START_UP_MIN_BUFFER_FOR_QUALITY_INCREASE_MS = - AdaptiveTrackSelection.DEFAULT_MIN_DURATION_FOR_QUALITY_INCREASE_MS; - - @Nullable private DefaultAllocator allocator; - private Clock clock; - private int minBufferMs; - private int maxBufferMs; - private int bufferForPlaybackMs; - private int bufferForPlaybackAfterRebufferMs; - private int hysteresisBufferMs; - private float startUpBandwidthFraction; - private int startUpMinBufferForQualityIncreaseMs; - private DynamicFormatFilter dynamicFormatFilter; - private boolean buildCalled; - - /** Creates builder with default values. */ - public BufferSizeAdaptationBuilder() { - clock = Clock.DEFAULT; - minBufferMs = DEFAULT_MIN_BUFFER_MS; - maxBufferMs = DEFAULT_MAX_BUFFER_MS; - bufferForPlaybackMs = DEFAULT_BUFFER_FOR_PLAYBACK_MS; - bufferForPlaybackAfterRebufferMs = DEFAULT_BUFFER_FOR_PLAYBACK_AFTER_REBUFFER_MS; - hysteresisBufferMs = DEFAULT_HYSTERESIS_BUFFER_MS; - startUpBandwidthFraction = DEFAULT_START_UP_BANDWIDTH_FRACTION; - startUpMinBufferForQualityIncreaseMs = DEFAULT_START_UP_MIN_BUFFER_FOR_QUALITY_INCREASE_MS; - dynamicFormatFilter = DynamicFormatFilter.NO_FILTER; - } - - /** - * Set the clock to use. Should only be set for testing purposes. - * - * @param clock The {@link Clock}. - * @return This builder, for convenience. - * @throws IllegalStateException If {@link #buildPlayerComponents()} has already been called. - */ - public BufferSizeAdaptationBuilder setClock(Clock clock) { - Assertions.checkState(!buildCalled); - this.clock = clock; - return this; - } - - /** - * Sets the {@link DefaultAllocator} used by the loader. - * - * @param allocator The {@link DefaultAllocator}. - * @return This builder, for convenience. - * @throws IllegalStateException If {@link #buildPlayerComponents()} has already been called. - */ - public BufferSizeAdaptationBuilder setAllocator(DefaultAllocator allocator) { - Assertions.checkState(!buildCalled); - this.allocator = allocator; - return this; - } - - /** - * Sets the buffer duration parameters. - * - * @param minBufferMs The minimum duration of media that the player will attempt to ensure is - * buffered at all times, in milliseconds. - * @param maxBufferMs The maximum duration of media that the player will attempt to buffer, in - * milliseconds. - * @param bufferForPlaybackMs The duration of media that must be buffered for playback to start or - * resume following a user action such as a seek, in milliseconds. - * @param bufferForPlaybackAfterRebufferMs The default duration of media that must be buffered for - * playback to resume after a rebuffer, in milliseconds. A rebuffer is defined to be caused by - * buffer depletion rather than a user action. - * @return This builder, for convenience. - * @throws IllegalStateException If {@link #buildPlayerComponents()} has already been called. - */ - public BufferSizeAdaptationBuilder setBufferDurationsMs( - int minBufferMs, - int maxBufferMs, - int bufferForPlaybackMs, - int bufferForPlaybackAfterRebufferMs) { - Assertions.checkState(!buildCalled); - this.minBufferMs = minBufferMs; - this.maxBufferMs = maxBufferMs; - this.bufferForPlaybackMs = bufferForPlaybackMs; - this.bufferForPlaybackAfterRebufferMs = bufferForPlaybackAfterRebufferMs; - return this; - } - - /** - * Sets the hysteresis buffer used to prevent repeated format switching. - * - * @param hysteresisBufferMs The offset the current duration of buffered media must deviate from - * the ideal duration of buffered media for the currently selected format, before the selected - * format is changed. This value must be smaller than {@code maxBufferMs - minBufferMs}. - * @return This builder, for convenience. - * @throws IllegalStateException If {@link #buildPlayerComponents()} has already been called. - */ - public BufferSizeAdaptationBuilder setHysteresisBufferMs(int hysteresisBufferMs) { - Assertions.checkState(!buildCalled); - this.hysteresisBufferMs = hysteresisBufferMs; - return this; - } - - /** - * Sets track selection parameters used during the start-up phase before the selection can be made - * purely on based on buffer size. During the start-up phase the selection is based on the current - * bandwidth estimate. - * - * @param bandwidthFraction The fraction of the available bandwidth that the selection should - * consider available for use. Setting to a value less than 1 is recommended to account for - * inaccuracies in the bandwidth estimator. - * @param minBufferForQualityIncreaseMs The minimum duration of buffered media required for the - * selected track to switch to one of higher quality. - * @return This builder, for convenience. - * @throws IllegalStateException If {@link #buildPlayerComponents()} has already been called. - */ - public BufferSizeAdaptationBuilder setStartUpTrackSelectionParameters( - float bandwidthFraction, int minBufferForQualityIncreaseMs) { - Assertions.checkState(!buildCalled); - this.startUpBandwidthFraction = bandwidthFraction; - this.startUpMinBufferForQualityIncreaseMs = minBufferForQualityIncreaseMs; - return this; - } - - /** - * Sets the {@link DynamicFormatFilter} to use when updating the selected track. - * - * @param dynamicFormatFilter The {@link DynamicFormatFilter}. - * @return This builder, for convenience. - * @throws IllegalStateException If {@link #buildPlayerComponents()} has already been called. - */ - public BufferSizeAdaptationBuilder setDynamicFormatFilter( - DynamicFormatFilter dynamicFormatFilter) { - Assertions.checkState(!buildCalled); - this.dynamicFormatFilter = dynamicFormatFilter; - return this; - } - - /** - * Builds player components for buffer size based track adaptation. - * - * @return A pair of a {@link TrackSelection.Factory} and a {@link LoadControl}, which should be - * used to construct the player. - */ - public Pair buildPlayerComponents() { - Assertions.checkArgument(hysteresisBufferMs < maxBufferMs - minBufferMs); - Assertions.checkState(!buildCalled); - buildCalled = true; - - DefaultLoadControl.Builder loadControlBuilder = - new DefaultLoadControl.Builder() - .setTargetBufferBytes(/* targetBufferBytes = */ Integer.MAX_VALUE) - .setBufferDurationsMs( - /* minBufferMs= */ maxBufferMs, - maxBufferMs, - bufferForPlaybackMs, - bufferForPlaybackAfterRebufferMs); - if (allocator != null) { - loadControlBuilder.setAllocator(allocator); - } - - TrackSelection.Factory trackSelectionFactory = - new TrackSelection.Factory() { - @Override - public @NullableType TrackSelection[] createTrackSelections( - @NullableType Definition[] definitions, BandwidthMeter bandwidthMeter) { - return TrackSelectionUtil.createTrackSelectionsForDefinitions( - definitions, - definition -> - new BufferSizeAdaptiveTrackSelection( - definition.group, - definition.tracks, - bandwidthMeter, - minBufferMs, - maxBufferMs, - hysteresisBufferMs, - startUpBandwidthFraction, - startUpMinBufferForQualityIncreaseMs, - dynamicFormatFilter, - clock)); - } - }; - - return Pair.create(trackSelectionFactory, loadControlBuilder.createDefaultLoadControl()); - } - - private static final class BufferSizeAdaptiveTrackSelection extends BaseTrackSelection { - - private static final int BITRATE_BLACKLISTED = Format.NO_VALUE; - - private final BandwidthMeter bandwidthMeter; - private final Clock clock; - private final DynamicFormatFilter dynamicFormatFilter; - private final int[] formatBitrates; - private final long minBufferUs; - private final long maxBufferUs; - private final long hysteresisBufferUs; - private final float startUpBandwidthFraction; - private final long startUpMinBufferForQualityIncreaseUs; - private final int minBitrate; - private final int maxBitrate; - private final double bitrateToBufferFunctionSlope; - private final double bitrateToBufferFunctionIntercept; - - private boolean isInSteadyState; - private int selectedIndex; - private int selectionReason; - private float playbackSpeed; - - private BufferSizeAdaptiveTrackSelection( - TrackGroup trackGroup, - int[] tracks, - BandwidthMeter bandwidthMeter, - int minBufferMs, - int maxBufferMs, - int hysteresisBufferMs, - float startUpBandwidthFraction, - int startUpMinBufferForQualityIncreaseMs, - DynamicFormatFilter dynamicFormatFilter, - Clock clock) { - super(trackGroup, tracks); - this.bandwidthMeter = bandwidthMeter; - this.minBufferUs = C.msToUs(minBufferMs); - this.maxBufferUs = C.msToUs(maxBufferMs); - this.hysteresisBufferUs = C.msToUs(hysteresisBufferMs); - this.startUpBandwidthFraction = startUpBandwidthFraction; - this.startUpMinBufferForQualityIncreaseUs = C.msToUs(startUpMinBufferForQualityIncreaseMs); - this.dynamicFormatFilter = dynamicFormatFilter; - this.clock = clock; - - formatBitrates = new int[length]; - maxBitrate = getFormat(/* index= */ 0).bitrate; - minBitrate = getFormat(/* index= */ length - 1).bitrate; - selectionReason = C.SELECTION_REASON_UNKNOWN; - playbackSpeed = 1.0f; - - // We use a log-linear function to map from bitrate to buffer size: - // buffer = slope * ln(bitrate) + intercept, - // with buffer(minBitrate) = minBuffer and buffer(maxBitrate) = maxBuffer - hysteresisBuffer. - bitrateToBufferFunctionSlope = - (maxBufferUs - hysteresisBufferUs - minBufferUs) - / Math.log((double) maxBitrate / minBitrate); - bitrateToBufferFunctionIntercept = - minBufferUs - bitrateToBufferFunctionSlope * Math.log(minBitrate); - } - - @Override - public void onPlaybackSpeed(float playbackSpeed) { - this.playbackSpeed = playbackSpeed; - } - - @Override - public void onDiscontinuity() { - isInSteadyState = false; - } - - @Override - public int getSelectedIndex() { - return selectedIndex; - } - - @Override - public int getSelectionReason() { - return selectionReason; - } - - @Override - @Nullable - public Object getSelectionData() { - return null; - } - - @Override - public void updateSelectedTrack( - long playbackPositionUs, - long bufferedDurationUs, - long availableDurationUs, - List queue, - MediaChunkIterator[] mediaChunkIterators) { - updateFormatBitrates(/* nowMs= */ clock.elapsedRealtime()); - - // Make initial selection - if (selectionReason == C.SELECTION_REASON_UNKNOWN) { - selectionReason = C.SELECTION_REASON_INITIAL; - selectedIndex = selectIdealIndexUsingBandwidth(/* isInitialSelection= */ true); - return; - } - - long bufferUs = getCurrentPeriodBufferedDurationUs(playbackPositionUs, bufferedDurationUs); - int oldSelectedIndex = selectedIndex; - if (isInSteadyState) { - selectIndexSteadyState(bufferUs); - } else { - selectIndexStartUpPhase(bufferUs); - } - if (selectedIndex != oldSelectedIndex) { - selectionReason = C.SELECTION_REASON_ADAPTIVE; - } - } - - // Steady state. - - private void selectIndexSteadyState(long bufferUs) { - if (isOutsideHysteresis(bufferUs)) { - selectedIndex = selectIdealIndexUsingBufferSize(bufferUs); - } - } - - private boolean isOutsideHysteresis(long bufferUs) { - if (formatBitrates[selectedIndex] == BITRATE_BLACKLISTED) { - return true; - } - long targetBufferForCurrentBitrateUs = - getTargetBufferForBitrateUs(formatBitrates[selectedIndex]); - long bufferDiffUs = bufferUs - targetBufferForCurrentBitrateUs; - return Math.abs(bufferDiffUs) > hysteresisBufferUs; - } - - private int selectIdealIndexUsingBufferSize(long bufferUs) { - int lowestBitrateNonBlacklistedIndex = 0; - for (int i = 0; i < formatBitrates.length; i++) { - if (formatBitrates[i] != BITRATE_BLACKLISTED) { - if (getTargetBufferForBitrateUs(formatBitrates[i]) <= bufferUs - && dynamicFormatFilter.isFormatAllowed( - getFormat(i), formatBitrates[i], /* isInitialSelection= */ false)) { - return i; - } - lowestBitrateNonBlacklistedIndex = i; - } - } - return lowestBitrateNonBlacklistedIndex; - } - - // Startup. - - private void selectIndexStartUpPhase(long bufferUs) { - int startUpSelectedIndex = selectIdealIndexUsingBandwidth(/* isInitialSelection= */ false); - int steadyStateSelectedIndex = selectIdealIndexUsingBufferSize(bufferUs); - if (steadyStateSelectedIndex <= selectedIndex) { - // Switch to steady state if we have enough buffer to maintain current selection. - selectedIndex = steadyStateSelectedIndex; - isInSteadyState = true; - } else { - if (bufferUs < startUpMinBufferForQualityIncreaseUs - && startUpSelectedIndex < selectedIndex - && formatBitrates[selectedIndex] != BITRATE_BLACKLISTED) { - // Switching up from a non-blacklisted track is only allowed if we have enough buffer. - return; - } - selectedIndex = startUpSelectedIndex; - } - } - - private int selectIdealIndexUsingBandwidth(boolean isInitialSelection) { - long effectiveBitrate = - (long) (bandwidthMeter.getBitrateEstimate() * startUpBandwidthFraction); - int lowestBitrateNonBlacklistedIndex = 0; - for (int i = 0; i < formatBitrates.length; i++) { - if (formatBitrates[i] != BITRATE_BLACKLISTED) { - if (Math.round(formatBitrates[i] * playbackSpeed) <= effectiveBitrate - && dynamicFormatFilter.isFormatAllowed( - getFormat(i), formatBitrates[i], isInitialSelection)) { - return i; - } - lowestBitrateNonBlacklistedIndex = i; - } - } - return lowestBitrateNonBlacklistedIndex; - } - - // Utility methods. - - private void updateFormatBitrates(long nowMs) { - for (int i = 0; i < length; i++) { - if (nowMs == Long.MIN_VALUE || !isBlacklisted(i, nowMs)) { - formatBitrates[i] = getFormat(i).bitrate; - } else { - formatBitrates[i] = BITRATE_BLACKLISTED; - } - } - } - - private long getTargetBufferForBitrateUs(int bitrate) { - if (bitrate <= minBitrate) { - return minBufferUs; - } - if (bitrate >= maxBitrate) { - return maxBufferUs - hysteresisBufferUs; - } - return (int) - (bitrateToBufferFunctionSlope * Math.log(bitrate) + bitrateToBufferFunctionIntercept); - } - - private static long getCurrentPeriodBufferedDurationUs( - long playbackPositionUs, long bufferedDurationUs) { - return playbackPositionUs >= 0 ? bufferedDurationUs : playbackPositionUs + bufferedDurationUs; - } - } -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/DefaultTrackSelector.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/DefaultTrackSelector.java index 5330894dab..786f789857 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/DefaultTrackSelector.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/DefaultTrackSelector.java @@ -15,181 +15,113 @@ */ package com.google.android.exoplayer2.trackselection; +import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; +import static com.google.android.exoplayer2.util.Util.castNonNull; +import static java.lang.annotation.ElementType.TYPE_USE; +import static java.util.Collections.max; + +import android.annotation.SuppressLint; import android.content.Context; import android.graphics.Point; -import android.os.Parcel; -import android.os.Parcelable; +import android.media.AudioFormat; +import android.media.AudioManager; +import android.media.Spatializer; +import android.os.Bundle; +import android.os.Handler; +import android.os.Looper; import android.text.TextUtils; import android.util.Pair; import android.util.SparseArray; import android.util.SparseBooleanArray; +import androidx.annotation.GuardedBy; +import androidx.annotation.IntDef; import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; +import com.google.android.exoplayer2.Bundleable; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.C.FormatSupport; +import com.google.android.exoplayer2.C.RoleFlags; import com.google.android.exoplayer2.ExoPlaybackException; import com.google.android.exoplayer2.Format; -import com.google.android.exoplayer2.Player; import com.google.android.exoplayer2.Renderer; import com.google.android.exoplayer2.RendererCapabilities; import com.google.android.exoplayer2.RendererCapabilities.AdaptiveSupport; import com.google.android.exoplayer2.RendererCapabilities.Capabilities; -import com.google.android.exoplayer2.RendererCapabilities.FormatSupport; import com.google.android.exoplayer2.RendererConfiguration; +import com.google.android.exoplayer2.Timeline; +import com.google.android.exoplayer2.audio.AudioAttributes; +import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId; import com.google.android.exoplayer2.source.TrackGroup; import com.google.android.exoplayer2.source.TrackGroupArray; -import com.google.android.exoplayer2.upstream.BandwidthMeter; import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.BundleableUtil; +import com.google.android.exoplayer2.util.Log; +import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.Util; +import com.google.common.base.Predicate; +import com.google.common.collect.ComparisonChain; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Ordering; +import com.google.common.primitives.Ints; +import com.google.errorprone.annotations.CanIgnoreReturnValue; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; import java.util.ArrayList; import java.util.Arrays; +import java.util.Comparator; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.concurrent.atomic.AtomicReference; -import org.checkerframework.checker.initialization.qual.UnderInitialization; +import java.util.Set; import org.checkerframework.checker.nullness.compatqual.NullableType; /** - * A default {@link TrackSelector} suitable for most use cases. Track selections are made according - * to configurable {@link Parameters}, which can be set by calling {@link - * #setParameters(Parameters)}. - * - *

      Modifying parameters

      + * A default {@link TrackSelector} suitable for most use cases. * - * To modify only some aspects of the parameters currently used by a selector, it's possible to - * obtain a {@link ParametersBuilder} initialized with the current {@link Parameters}. The desired - * modifications can be made on the builder, and the resulting {@link Parameters} can then be built - * and set on the selector. For example the following code modifies the parameters to restrict video - * track selections to SD, and to select a German audio track if there is one: - * - *
      {@code
      - * // Build on the current parameters.
      - * Parameters currentParameters = trackSelector.getParameters();
      - * // Build the resulting parameters.
      - * Parameters newParameters = currentParameters
      - *     .buildUpon()
      - *     .setMaxVideoSizeSd()
      - *     .setPreferredAudioLanguage("deu")
      - *     .build();
      - * // Set the new parameters.
      - * trackSelector.setParameters(newParameters);
      - * }
      + *

      Modifying parameters

      * - * Convenience methods and chaining allow this to be written more concisely as: + * Track selection parameters should be modified by obtaining a {@link + * TrackSelectionParameters.Builder} initialized with the current {@link TrackSelectionParameters} + * from the player. The desired modifications can be made on the builder, and the resulting {@link + * TrackSelectionParameters} can then be built and set on the player: * *
      {@code
      - * trackSelector.setParameters(
      - *     trackSelector
      - *         .buildUponParameters()
      + * player.setTrackSelectionParameters(
      + *     player.getTrackSelectionParameters()
      + *         .buildUpon()
        *         .setMaxVideoSizeSd()
      - *         .setPreferredAudioLanguage("deu"));
      - * }
      - * - * Selection {@link Parameters} support many different options, some of which are described below. - * - *

      Selecting specific tracks

      - * - * Track selection overrides can be used to select specific tracks. To specify an override for a - * renderer, it's first necessary to obtain the tracks that have been mapped to it: - * - *
      {@code
      - * MappedTrackInfo mappedTrackInfo = trackSelector.getCurrentMappedTrackInfo();
      - * TrackGroupArray rendererTrackGroups = mappedTrackInfo == null ? null
      - *     : mappedTrackInfo.getTrackGroups(rendererIndex);
      - * }
      - * - * If {@code rendererTrackGroups} is null then there aren't any currently mapped tracks, and so - * setting an override isn't possible. Note that a {@link Player.EventListener} registered on the - * player can be used to determine when the current tracks (and therefore the mapping) changes. If - * {@code rendererTrackGroups} is non-null then an override can be set. The next step is to query - * the properties of the available tracks to determine the {@code groupIndex} and the {@code - * trackIndices} within the group it that should be selected. The override can then be specified - * using {@link ParametersBuilder#setSelectionOverride}: - * - *
      {@code
      - * SelectionOverride selectionOverride = new SelectionOverride(groupIndex, trackIndices);
      - * trackSelector.setParameters(
      - *     trackSelector
      - *         .buildUponParameters()
      - *         .setSelectionOverride(rendererIndex, rendererTrackGroups, selectionOverride));
      + *         .setPreferredAudioLanguage("de")
      + *         .build());
        * }
      * - *

      Constraint based track selection

      - * - * Whilst track selection overrides make it possible to select specific tracks, the recommended way - * of controlling which tracks are selected is by specifying constraints. For example consider the - * case of wanting to restrict video track selections to SD, and preferring German audio tracks. - * Track selection overrides could be used to select specific tracks meeting these criteria, however - * a simpler and more flexible approach is to specify these constraints directly: + * Some specialized parameters are only available in the extended {@link Parameters} class, which + * can be retrieved and modified in a similar way by calling methods directly on this class: * *
      {@code
      - * trackSelector.setParameters(
      - *     trackSelector
      - *         .buildUponParameters()
      - *         .setMaxVideoSizeSd()
      - *         .setPreferredAudioLanguage("deu"));
      + * defaultTrackSelector.setParameters(
      + *     defaultTrackSelector.getParameters()
      + *         .buildUpon()
      + *         .setTunnelingEnabled(true)
      + *         .build());
        * }
      - * - * There are several benefits to using constraint based track selection instead of specific track - * overrides: - * - *
        - *
      • You can specify constraints before knowing what tracks the media provides. This can - * simplify track selection code (e.g. you don't have to listen for changes in the available - * tracks before configuring the selector). - *
      • Constraints can be applied consistently across all periods in a complex piece of media, - * even if those periods contain different tracks. In contrast, a specific track override is - * only applied to periods whose tracks match those for which the override was set. - *
      - * - *

      Disabling renderers

      - * - * Renderers can be disabled using {@link ParametersBuilder#setRendererDisabled}. Disabling a - * renderer differs from setting a {@code null} override because the renderer is disabled - * unconditionally, whereas a {@code null} override is applied only when the track groups available - * to the renderer match the {@link TrackGroupArray} for which it was specified. - * - *

      Tunneling

      - * - * Tunneled playback can be enabled in cases where the combination of renderers and selected tracks - * support it. Tunneled playback is enabled by passing an audio session ID to {@link - * ParametersBuilder#setTunnelingAudioSessionId(int)}. */ public class DefaultTrackSelector extends MappingTrackSelector { + private static final String TAG = "DefaultTrackSelector"; + private static final String AUDIO_CHANNEL_COUNT_CONSTRAINTS_WARN_MESSAGE = + "Audio channel count constraints cannot be applied without reference to Context. Build the" + + " track selector instance with one of the non-deprecated constructors that take a" + + " Context argument."; + /** - * A builder for {@link Parameters}. See the {@link Parameters} documentation for explanations of - * the parameters that can be configured using this builder. + * @deprecated Use {@link Parameters.Builder} instead. */ + @Deprecated public static final class ParametersBuilder extends TrackSelectionParameters.Builder { - // Video - private int maxVideoWidth; - private int maxVideoHeight; - private int maxVideoFrameRate; - private int maxVideoBitrate; - private boolean exceedVideoConstraintsIfNecessary; - private boolean allowVideoMixedMimeTypeAdaptiveness; - private boolean allowVideoNonSeamlessAdaptiveness; - private int viewportWidth; - private int viewportHeight; - private boolean viewportOrientationMayChange; - // Audio - private int maxAudioChannelCount; - private int maxAudioBitrate; - private boolean exceedAudioConstraintsIfNecessary; - private boolean allowAudioMixedMimeTypeAdaptiveness; - private boolean allowAudioMixedSampleRateAdaptiveness; - private boolean allowAudioMixedChannelCountAdaptiveness; - // General - private boolean forceLowestBitrate; - private boolean forceHighestSupportedBitrate; - private boolean exceedRendererCapabilitiesIfNecessary; - private int tunnelingAudioSessionId; - - private final SparseArray> - selectionOverrides; - private final SparseBooleanArray rendererDisabledFlags; + private final Parameters.Builder delegate; /** * @deprecated {@link Context} constraints will not be set using this constructor. Use {@link @@ -198,10 +130,7 @@ public static final class ParametersBuilder extends TrackSelectionParameters.Bui @Deprecated @SuppressWarnings({"deprecation"}) public ParametersBuilder() { - super(); - setInitialValuesWithoutContext(); - selectionOverrides = new SparseArray<>(); - rendererDisabledFlags = new SparseBooleanArray(); + delegate = new Parameters.Builder(); } /** @@ -209,116 +138,89 @@ public ParametersBuilder() { * * @param context Any context. */ - public ParametersBuilder(Context context) { - super(context); - setInitialValuesWithoutContext(); - selectionOverrides = new SparseArray<>(); - rendererDisabledFlags = new SparseBooleanArray(); - setViewportSizeToPhysicalDisplaySize(context, /* viewportOrientationMayChange= */ true); + delegate = new Parameters.Builder(context); } - /** - * @param initialValues The {@link Parameters} from which the initial values of the builder are - * obtained. - */ - private ParametersBuilder(Parameters initialValues) { - super(initialValues); - // Video - maxVideoWidth = initialValues.maxVideoWidth; - maxVideoHeight = initialValues.maxVideoHeight; - maxVideoFrameRate = initialValues.maxVideoFrameRate; - maxVideoBitrate = initialValues.maxVideoBitrate; - exceedVideoConstraintsIfNecessary = initialValues.exceedVideoConstraintsIfNecessary; - allowVideoMixedMimeTypeAdaptiveness = initialValues.allowVideoMixedMimeTypeAdaptiveness; - allowVideoNonSeamlessAdaptiveness = initialValues.allowVideoNonSeamlessAdaptiveness; - viewportWidth = initialValues.viewportWidth; - viewportHeight = initialValues.viewportHeight; - viewportOrientationMayChange = initialValues.viewportOrientationMayChange; - // Audio - maxAudioChannelCount = initialValues.maxAudioChannelCount; - maxAudioBitrate = initialValues.maxAudioBitrate; - exceedAudioConstraintsIfNecessary = initialValues.exceedAudioConstraintsIfNecessary; - allowAudioMixedMimeTypeAdaptiveness = initialValues.allowAudioMixedMimeTypeAdaptiveness; - allowAudioMixedSampleRateAdaptiveness = initialValues.allowAudioMixedSampleRateAdaptiveness; - allowAudioMixedChannelCountAdaptiveness = - initialValues.allowAudioMixedChannelCountAdaptiveness; - // General - forceLowestBitrate = initialValues.forceLowestBitrate; - forceHighestSupportedBitrate = initialValues.forceHighestSupportedBitrate; - exceedRendererCapabilitiesIfNecessary = initialValues.exceedRendererCapabilitiesIfNecessary; - tunnelingAudioSessionId = initialValues.tunnelingAudioSessionId; - // Overrides - selectionOverrides = cloneSelectionOverrides(initialValues.selectionOverrides); - rendererDisabledFlags = initialValues.rendererDisabledFlags.clone(); + @CanIgnoreReturnValue + @Override + protected ParametersBuilder set(TrackSelectionParameters parameters) { + delegate.set(parameters); + return this; } // Video - /** - * Equivalent to {@link #setMaxVideoSize setMaxVideoSize(1279, 719)}. - * - * @return This builder. - */ - public ParametersBuilder setMaxVideoSizeSd() { - return setMaxVideoSize(1279, 719); + @CanIgnoreReturnValue + @Override + public DefaultTrackSelector.ParametersBuilder setMaxVideoSizeSd() { + delegate.setMaxVideoSizeSd(); + return this; } - /** - * Equivalent to {@link #setMaxVideoSize setMaxVideoSize(Integer.MAX_VALUE, Integer.MAX_VALUE)}. - * - * @return This builder. - */ - public ParametersBuilder clearVideoSizeConstraints() { - return setMaxVideoSize(Integer.MAX_VALUE, Integer.MAX_VALUE); + @CanIgnoreReturnValue + @Override + public DefaultTrackSelector.ParametersBuilder clearVideoSizeConstraints() { + delegate.clearVideoSizeConstraints(); + return this; } - /** - * Sets the maximum allowed video width and height. - * - * @param maxVideoWidth Maximum allowed video width in pixels. - * @param maxVideoHeight Maximum allowed video height in pixels. - * @return This builder. - */ - public ParametersBuilder setMaxVideoSize(int maxVideoWidth, int maxVideoHeight) { - this.maxVideoWidth = maxVideoWidth; - this.maxVideoHeight = maxVideoHeight; + @CanIgnoreReturnValue + @Override + public DefaultTrackSelector.ParametersBuilder setMaxVideoSize( + int maxVideoWidth, int maxVideoHeight) { + delegate.setMaxVideoSize(maxVideoWidth, maxVideoHeight); return this; } - /** - * Sets the maximum allowed video frame rate. - * - * @param maxVideoFrameRate Maximum allowed video frame rate in hertz. - * @return This builder. - */ - public ParametersBuilder setMaxVideoFrameRate(int maxVideoFrameRate) { - this.maxVideoFrameRate = maxVideoFrameRate; + @CanIgnoreReturnValue + @Override + public DefaultTrackSelector.ParametersBuilder setMaxVideoFrameRate(int maxVideoFrameRate) { + delegate.setMaxVideoFrameRate(maxVideoFrameRate); return this; } - /** - * Sets the maximum allowed video bitrate. - * - * @param maxVideoBitrate Maximum allowed video bitrate in bits per second. - * @return This builder. - */ - public ParametersBuilder setMaxVideoBitrate(int maxVideoBitrate) { - this.maxVideoBitrate = maxVideoBitrate; + @CanIgnoreReturnValue + @Override + public DefaultTrackSelector.ParametersBuilder setMaxVideoBitrate(int maxVideoBitrate) { + delegate.setMaxVideoBitrate(maxVideoBitrate); + return this; + } + + @CanIgnoreReturnValue + @Override + public DefaultTrackSelector.ParametersBuilder setMinVideoSize( + int minVideoWidth, int minVideoHeight) { + delegate.setMinVideoSize(minVideoWidth, minVideoHeight); + return this; + } + + @CanIgnoreReturnValue + @Override + public DefaultTrackSelector.ParametersBuilder setMinVideoFrameRate(int minVideoFrameRate) { + delegate.setMinVideoFrameRate(minVideoFrameRate); + return this; + } + + @CanIgnoreReturnValue + @Override + public DefaultTrackSelector.ParametersBuilder setMinVideoBitrate(int minVideoBitrate) { + delegate.setMinVideoBitrate(minVideoBitrate); return this; } /** - * Sets whether to exceed the {@link #setMaxVideoSize(int, int)} and {@link - * #setMaxAudioBitrate(int)} constraints when no selection can be made otherwise. + * Sets whether to exceed the {@link #setMaxVideoBitrate}, {@link #setMaxVideoSize(int, int)} + * and {@link #setMaxVideoFrameRate} constraints when no selection can be made otherwise. * * @param exceedVideoConstraintsIfNecessary Whether to exceed video constraints when no * selection can be made otherwise. * @return This builder. */ + @CanIgnoreReturnValue public ParametersBuilder setExceedVideoConstraintsIfNecessary( boolean exceedVideoConstraintsIfNecessary) { - this.exceedVideoConstraintsIfNecessary = exceedVideoConstraintsIfNecessary; + delegate.setExceedVideoConstraintsIfNecessary(exceedVideoConstraintsIfNecessary); return this; } @@ -333,9 +235,10 @@ public ParametersBuilder setExceedVideoConstraintsIfNecessary( * containing mixed MIME types. * @return This builder. */ + @CanIgnoreReturnValue public ParametersBuilder setAllowVideoMixedMimeTypeAdaptiveness( boolean allowVideoMixedMimeTypeAdaptiveness) { - this.allowVideoMixedMimeTypeAdaptiveness = allowVideoMixedMimeTypeAdaptiveness; + delegate.setAllowVideoMixedMimeTypeAdaptiveness(allowVideoMixedMimeTypeAdaptiveness); return this; } @@ -347,83 +250,109 @@ public ParametersBuilder setAllowVideoMixedMimeTypeAdaptiveness( * adaptation may not be completely seamless. * @return This builder. */ + @CanIgnoreReturnValue public ParametersBuilder setAllowVideoNonSeamlessAdaptiveness( boolean allowVideoNonSeamlessAdaptiveness) { - this.allowVideoNonSeamlessAdaptiveness = allowVideoNonSeamlessAdaptiveness; + delegate.setAllowVideoNonSeamlessAdaptiveness(allowVideoNonSeamlessAdaptiveness); return this; } /** - * Equivalent to calling {@link #setViewportSize(int, int, boolean)} with the viewport size - * obtained from {@link Util#getCurrentDisplayModeSize(Context)}. + * Sets whether to allow adaptive video selections with mixed levels of {@link + * RendererCapabilities.DecoderSupport} and {@link + * RendererCapabilities.HardwareAccelerationSupport}. * - * @param context Any context. - * @param viewportOrientationMayChange Whether the viewport orientation may change during - * playback. + * @param allowVideoMixedDecoderSupportAdaptiveness Whether to allow adaptive video selections + * with mixed levels of decoder and hardware acceleration support. * @return This builder. */ + @CanIgnoreReturnValue + public ParametersBuilder setAllowVideoMixedDecoderSupportAdaptiveness( + boolean allowVideoMixedDecoderSupportAdaptiveness) { + delegate.setAllowVideoMixedDecoderSupportAdaptiveness( + allowVideoMixedDecoderSupportAdaptiveness); + return this; + } + + @CanIgnoreReturnValue + @Override public ParametersBuilder setViewportSizeToPhysicalDisplaySize( Context context, boolean viewportOrientationMayChange) { - // Assume the viewport is fullscreen. - Point viewportSize = Util.getCurrentDisplayModeSize(context); - return setViewportSize(viewportSize.x, viewportSize.y, viewportOrientationMayChange); + delegate.setViewportSizeToPhysicalDisplaySize(context, viewportOrientationMayChange); + return this; } - /** - * Equivalent to {@link #setViewportSize setViewportSize(Integer.MAX_VALUE, Integer.MAX_VALUE, - * true)}. - * - * @return This builder. - */ + @CanIgnoreReturnValue + @Override public ParametersBuilder clearViewportSizeConstraints() { - return setViewportSize(Integer.MAX_VALUE, Integer.MAX_VALUE, true); + delegate.clearViewportSizeConstraints(); + return this; } - /** - * Sets the viewport size to constrain adaptive video selections so that only tracks suitable - * for the viewport are selected. - * - * @param viewportWidth Viewport width in pixels. - * @param viewportHeight Viewport height in pixels. - * @param viewportOrientationMayChange Whether the viewport orientation may change during - * playback. - * @return This builder. - */ + @CanIgnoreReturnValue + @Override public ParametersBuilder setViewportSize( int viewportWidth, int viewportHeight, boolean viewportOrientationMayChange) { - this.viewportWidth = viewportWidth; - this.viewportHeight = viewportHeight; - this.viewportOrientationMayChange = viewportOrientationMayChange; + delegate.setViewportSize(viewportWidth, viewportHeight, viewportOrientationMayChange); + return this; + } + + @CanIgnoreReturnValue + @Override + public ParametersBuilder setPreferredVideoMimeType(@Nullable String mimeType) { + delegate.setPreferredVideoMimeType(mimeType); + return this; + } + + @CanIgnoreReturnValue + @Override + public ParametersBuilder setPreferredVideoMimeTypes(String... mimeTypes) { + delegate.setPreferredVideoMimeTypes(mimeTypes); + return this; + } + + @CanIgnoreReturnValue + @Override + public DefaultTrackSelector.ParametersBuilder setPreferredVideoRoleFlags( + @RoleFlags int preferredVideoRoleFlags) { + delegate.setPreferredVideoRoleFlags(preferredVideoRoleFlags); return this; } // Audio + @CanIgnoreReturnValue @Override public ParametersBuilder setPreferredAudioLanguage(@Nullable String preferredAudioLanguage) { - super.setPreferredAudioLanguage(preferredAudioLanguage); + delegate.setPreferredAudioLanguage(preferredAudioLanguage); return this; } - /** - * Sets the maximum allowed audio channel count. - * - * @param maxAudioChannelCount Maximum allowed audio channel count. - * @return This builder. - */ + @CanIgnoreReturnValue + @Override + public ParametersBuilder setPreferredAudioLanguages(String... preferredAudioLanguages) { + delegate.setPreferredAudioLanguages(preferredAudioLanguages); + return this; + } + + @CanIgnoreReturnValue + @Override + public ParametersBuilder setPreferredAudioRoleFlags(@C.RoleFlags int preferredAudioRoleFlags) { + delegate.setPreferredAudioRoleFlags(preferredAudioRoleFlags); + return this; + } + + @CanIgnoreReturnValue + @Override public ParametersBuilder setMaxAudioChannelCount(int maxAudioChannelCount) { - this.maxAudioChannelCount = maxAudioChannelCount; + delegate.setMaxAudioChannelCount(maxAudioChannelCount); return this; } - /** - * Sets the maximum allowed audio bitrate. - * - * @param maxAudioBitrate Maximum allowed audio bitrate in bits per second. - * @return This builder. - */ + @CanIgnoreReturnValue + @Override public ParametersBuilder setMaxAudioBitrate(int maxAudioBitrate) { - this.maxAudioBitrate = maxAudioBitrate; + delegate.setMaxAudioBitrate(maxAudioBitrate); return this; } @@ -435,9 +364,10 @@ public ParametersBuilder setMaxAudioBitrate(int maxAudioBitrate) { * selection can be made otherwise. * @return This builder. */ + @CanIgnoreReturnValue public ParametersBuilder setExceedAudioConstraintsIfNecessary( boolean exceedAudioConstraintsIfNecessary) { - this.exceedAudioConstraintsIfNecessary = exceedAudioConstraintsIfNecessary; + delegate.setExceedAudioConstraintsIfNecessary(exceedAudioConstraintsIfNecessary); return this; } @@ -450,9 +380,10 @@ public ParametersBuilder setExceedAudioConstraintsIfNecessary( * containing mixed MIME types. * @return This builder. */ + @CanIgnoreReturnValue public ParametersBuilder setAllowAudioMixedMimeTypeAdaptiveness( boolean allowAudioMixedMimeTypeAdaptiveness) { - this.allowAudioMixedMimeTypeAdaptiveness = allowAudioMixedMimeTypeAdaptiveness; + delegate.setAllowAudioMixedMimeTypeAdaptiveness(allowAudioMixedMimeTypeAdaptiveness); return this; } @@ -465,9 +396,10 @@ public ParametersBuilder setAllowAudioMixedMimeTypeAdaptiveness( * containing mixed sample rates. * @return This builder. */ + @CanIgnoreReturnValue public ParametersBuilder setAllowAudioMixedSampleRateAdaptiveness( boolean allowAudioMixedSampleRateAdaptiveness) { - this.allowAudioMixedSampleRateAdaptiveness = allowAudioMixedSampleRateAdaptiveness; + delegate.setAllowAudioMixedSampleRateAdaptiveness(allowAudioMixedSampleRateAdaptiveness); return this; } @@ -480,90 +412,170 @@ public ParametersBuilder setAllowAudioMixedSampleRateAdaptiveness( * containing mixed channel counts. * @return This builder. */ + @CanIgnoreReturnValue public ParametersBuilder setAllowAudioMixedChannelCountAdaptiveness( boolean allowAudioMixedChannelCountAdaptiveness) { - this.allowAudioMixedChannelCountAdaptiveness = allowAudioMixedChannelCountAdaptiveness; + delegate.setAllowAudioMixedChannelCountAdaptiveness(allowAudioMixedChannelCountAdaptiveness); + return this; + } + + /** + * Sets whether to allow adaptive audio selections with mixed levels of {@link + * RendererCapabilities.DecoderSupport} and {@link + * RendererCapabilities.HardwareAccelerationSupport}. + * + * @param allowAudioMixedDecoderSupportAdaptiveness Whether to allow adaptive audio selections + * with mixed levels of decoder and hardware acceleration support. + * @return This builder. + */ + @CanIgnoreReturnValue + public ParametersBuilder setAllowAudioMixedDecoderSupportAdaptiveness( + boolean allowAudioMixedDecoderSupportAdaptiveness) { + delegate.setAllowAudioMixedDecoderSupportAdaptiveness( + allowAudioMixedDecoderSupportAdaptiveness); + return this; + } + + @CanIgnoreReturnValue + @Override + public ParametersBuilder setPreferredAudioMimeType(@Nullable String mimeType) { + delegate.setPreferredAudioMimeType(mimeType); + return this; + } + + @CanIgnoreReturnValue + @Override + public ParametersBuilder setPreferredAudioMimeTypes(String... mimeTypes) { + delegate.setPreferredAudioMimeTypes(mimeTypes); return this; } // Text + @CanIgnoreReturnValue @Override public ParametersBuilder setPreferredTextLanguageAndRoleFlagsToCaptioningManagerSettings( Context context) { - super.setPreferredTextLanguageAndRoleFlagsToCaptioningManagerSettings(context); + delegate.setPreferredTextLanguageAndRoleFlagsToCaptioningManagerSettings(context); return this; } + @CanIgnoreReturnValue @Override public ParametersBuilder setPreferredTextLanguage(@Nullable String preferredTextLanguage) { - super.setPreferredTextLanguage(preferredTextLanguage); + delegate.setPreferredTextLanguage(preferredTextLanguage); + return this; + } + + @CanIgnoreReturnValue + @Override + public ParametersBuilder setPreferredTextLanguages(String... preferredTextLanguages) { + delegate.setPreferredTextLanguages(preferredTextLanguages); return this; } + @CanIgnoreReturnValue @Override public ParametersBuilder setPreferredTextRoleFlags(@C.RoleFlags int preferredTextRoleFlags) { - super.setPreferredTextRoleFlags(preferredTextRoleFlags); + delegate.setPreferredTextRoleFlags(preferredTextRoleFlags); + return this; + } + + @CanIgnoreReturnValue + @Override + public ParametersBuilder setIgnoredTextSelectionFlags( + @C.SelectionFlags int ignoredTextSelectionFlags) { + delegate.setIgnoredTextSelectionFlags(ignoredTextSelectionFlags); return this; } + @CanIgnoreReturnValue @Override public ParametersBuilder setSelectUndeterminedTextLanguage( boolean selectUndeterminedTextLanguage) { - super.setSelectUndeterminedTextLanguage(selectUndeterminedTextLanguage); + delegate.setSelectUndeterminedTextLanguage(selectUndeterminedTextLanguage); return this; } - @Override + /** + * @deprecated Use {@link #setIgnoredTextSelectionFlags}. + */ + @CanIgnoreReturnValue + @Deprecated public ParametersBuilder setDisabledTextTrackSelectionFlags( @C.SelectionFlags int disabledTextTrackSelectionFlags) { - super.setDisabledTextTrackSelectionFlags(disabledTextTrackSelectionFlags); + delegate.setDisabledTextTrackSelectionFlags(disabledTextTrackSelectionFlags); return this; } // General - /** - * Sets whether to force selection of the single lowest bitrate audio and video tracks that - * comply with all other constraints. - * - * @param forceLowestBitrate Whether to force selection of the single lowest bitrate audio and - * video tracks. - * @return This builder. - */ + @CanIgnoreReturnValue + @Override public ParametersBuilder setForceLowestBitrate(boolean forceLowestBitrate) { - this.forceLowestBitrate = forceLowestBitrate; + delegate.setForceLowestBitrate(forceLowestBitrate); return this; } - /** - * Sets whether to force selection of the highest bitrate audio and video tracks that comply - * with all other constraints. - * - * @param forceHighestSupportedBitrate Whether to force selection of the highest bitrate audio - * and video tracks. - * @return This builder. - */ + @CanIgnoreReturnValue + @Override public ParametersBuilder setForceHighestSupportedBitrate(boolean forceHighestSupportedBitrate) { - this.forceHighestSupportedBitrate = forceHighestSupportedBitrate; + delegate.setForceHighestSupportedBitrate(forceHighestSupportedBitrate); + return this; + } + + @CanIgnoreReturnValue + @Override + public ParametersBuilder addOverride(TrackSelectionOverride override) { + delegate.addOverride(override); + return this; + } + + @CanIgnoreReturnValue + @Override + public ParametersBuilder clearOverride(TrackGroup trackGroup) { + delegate.clearOverride(trackGroup); + return this; + } + + @CanIgnoreReturnValue + @Override + public ParametersBuilder setOverrideForType(TrackSelectionOverride override) { + delegate.setOverrideForType(override); + return this; + } + + @CanIgnoreReturnValue + @Override + public ParametersBuilder clearOverridesOfType(@C.TrackType int trackType) { + delegate.clearOverridesOfType(trackType); + return this; + } + + @CanIgnoreReturnValue + @Override + public ParametersBuilder clearOverrides() { + delegate.clearOverrides(); return this; } /** - * @deprecated Use {@link #setAllowVideoMixedMimeTypeAdaptiveness(boolean)} and {@link - * #setAllowAudioMixedMimeTypeAdaptiveness(boolean)}. + * @deprecated Use {@link #setTrackTypeDisabled(int, boolean)}. */ + @CanIgnoreReturnValue + @Override @Deprecated - public ParametersBuilder setAllowMixedMimeAdaptiveness(boolean allowMixedMimeAdaptiveness) { - setAllowAudioMixedMimeTypeAdaptiveness(allowMixedMimeAdaptiveness); - setAllowVideoMixedMimeTypeAdaptiveness(allowMixedMimeAdaptiveness); + @SuppressWarnings("deprecation") + public ParametersBuilder setDisabledTrackTypes(Set<@C.TrackType Integer> disabledTrackTypes) { + delegate.setDisabledTrackTypes(disabledTrackTypes); return this; } - /** @deprecated Use {@link #setAllowVideoNonSeamlessAdaptiveness(boolean)} */ - @Deprecated - public ParametersBuilder setAllowNonSeamlessAdaptiveness(boolean allowNonSeamlessAdaptiveness) { - return setAllowVideoNonSeamlessAdaptiveness(allowNonSeamlessAdaptiveness); + @CanIgnoreReturnValue + @Override + public ParametersBuilder setTrackTypeDisabled(@C.TrackType int trackType, boolean disabled) { + delegate.setTrackTypeDisabled(trackType, disabled); + return this; } /** @@ -578,27 +590,43 @@ public ParametersBuilder setAllowNonSeamlessAdaptiveness(boolean allowNonSeamles * selection can be made otherwise. * @return This builder. */ + @CanIgnoreReturnValue public ParametersBuilder setExceedRendererCapabilitiesIfNecessary( boolean exceedRendererCapabilitiesIfNecessary) { - this.exceedRendererCapabilitiesIfNecessary = exceedRendererCapabilitiesIfNecessary; + delegate.setExceedRendererCapabilitiesIfNecessary(exceedRendererCapabilitiesIfNecessary); return this; } /** - * Sets the audio session id to use when tunneling. - * - *

      Enables or disables tunneling. To enable tunneling, pass an audio session id to use when - * in tunneling mode. Session ids can be generated using {@link - * C#generateAudioSessionIdV21(Context)}. To disable tunneling pass {@link - * C#AUDIO_SESSION_ID_UNSET}. Tunneling will only be activated if it's both enabled and + * Sets whether to enable tunneling if possible. Tunneling will only be enabled if it's * supported by the audio and video renderers for the selected tracks. * - * @param tunnelingAudioSessionId The audio session id to use when tunneling, or {@link - * C#AUDIO_SESSION_ID_UNSET} to disable tunneling. + *

      Tunneling is known to have many device specific issues and limitations. Manual testing is + * strongly recommended to check that the media plays correctly when this option is enabled. See + * [#9661](https://github.com/google/ExoPlayer/issues/9661), + * [#9133](https://github.com/google/ExoPlayer/issues/9133), + * [#9317](https://github.com/google/ExoPlayer/issues/9317), + * [#9502](https://github.com/google/ExoPlayer/issues/9502). + * + * @param tunnelingEnabled Whether to enable tunneling if possible. * @return This builder. */ - public ParametersBuilder setTunnelingAudioSessionId(int tunnelingAudioSessionId) { - this.tunnelingAudioSessionId = tunnelingAudioSessionId; + @CanIgnoreReturnValue + public ParametersBuilder setTunnelingEnabled(boolean tunnelingEnabled) { + delegate.setTunnelingEnabled(tunnelingEnabled); + return this; + } + + /** + * Sets whether multiple adaptive selections with more than one track are allowed. + * + * @param allowMultipleAdaptiveSelections Whether multiple adaptive selections are allowed. + * @return This builder. + */ + @CanIgnoreReturnValue + public ParametersBuilder setAllowMultipleAdaptiveSelections( + boolean allowMultipleAdaptiveSelections) { + delegate.setAllowMultipleAdaptiveSelections(allowMultipleAdaptiveSelections); return this; } @@ -612,17 +640,9 @@ public ParametersBuilder setTunnelingAudioSessionId(int tunnelingAudioSessionId) * @param disabled Whether the renderer is disabled. * @return This builder. */ - public final ParametersBuilder setRendererDisabled(int rendererIndex, boolean disabled) { - if (rendererDisabledFlags.get(rendererIndex) == disabled) { - // The disabled flag is unchanged. - return this; - } - // Only true values are placed in the array to make it easier to check for equality. - if (disabled) { - rendererDisabledFlags.put(rendererIndex, true); - } else { - rendererDisabledFlags.delete(rendererIndex); - } + @CanIgnoreReturnValue + public ParametersBuilder setRendererDisabled(int rendererIndex, boolean disabled) { + delegate.setRendererDisabled(rendererIndex, disabled); return this; } @@ -648,20 +668,13 @@ public final ParametersBuilder setRendererDisabled(int rendererIndex, boolean di * @param groups The {@link TrackGroupArray} for which the override should be applied. * @param override The override. * @return This builder. + * @deprecated Use {@link TrackSelectionParameters.Builder#addOverride(TrackSelectionOverride)}. */ - public final ParametersBuilder setSelectionOverride( + @CanIgnoreReturnValue + @Deprecated + public ParametersBuilder setSelectionOverride( int rendererIndex, TrackGroupArray groups, @Nullable SelectionOverride override) { - Map overrides = - selectionOverrides.get(rendererIndex); - if (overrides == null) { - overrides = new HashMap<>(); - selectionOverrides.put(rendererIndex, overrides); - } - if (overrides.containsKey(groups) && Util.areEqual(overrides.get(groups), override)) { - // The override is unchanged. - return this; - } - overrides.put(groups, override); + delegate.setSelectionOverride(rendererIndex, groups, override); return this; } @@ -671,19 +684,12 @@ public final ParametersBuilder setSelectionOverride( * @param rendererIndex The renderer index. * @param groups The {@link TrackGroupArray} for which the override should be cleared. * @return This builder. + * @deprecated Use {@link TrackSelectionParameters.Builder#clearOverride(TrackGroup)}. */ - public final ParametersBuilder clearSelectionOverride( - int rendererIndex, TrackGroupArray groups) { - Map overrides = - selectionOverrides.get(rendererIndex); - if (overrides == null || !overrides.containsKey(groups)) { - // Nothing to clear. - return this; - } - overrides.remove(groups); - if (overrides.isEmpty()) { - selectionOverrides.remove(rendererIndex); - } + @CanIgnoreReturnValue + @Deprecated + public ParametersBuilder clearSelectionOverride(int rendererIndex, TrackGroupArray groups) { + delegate.clearSelectionOverride(rendererIndex, groups); return this; } @@ -692,15 +698,12 @@ public final ParametersBuilder clearSelectionOverride( * * @param rendererIndex The renderer index. * @return This builder. + * @deprecated Use {@link TrackSelectionParameters.Builder#clearOverridesOfType(int)}. */ - public final ParametersBuilder clearSelectionOverrides(int rendererIndex) { - Map overrides = - selectionOverrides.get(rendererIndex); - if (overrides == null || overrides.isEmpty()) { - // Nothing to clear. - return this; - } - selectionOverrides.remove(rendererIndex); + @CanIgnoreReturnValue + @Deprecated + public ParametersBuilder clearSelectionOverrides(int rendererIndex) { + delegate.clearSelectionOverrides(rendererIndex); return this; } @@ -708,164 +711,926 @@ public final ParametersBuilder clearSelectionOverrides(int rendererIndex) { * Clears all track selection overrides for all renderers. * * @return This builder. + * @deprecated Use {@link TrackSelectionParameters.Builder#clearOverrides()}. */ - public final ParametersBuilder clearSelectionOverrides() { - if (selectionOverrides.size() == 0) { - // Nothing to clear. - return this; - } - selectionOverrides.clear(); + @CanIgnoreReturnValue + @Deprecated + public ParametersBuilder clearSelectionOverrides() { + delegate.clearSelectionOverrides(); return this; } - /** - * Builds a {@link Parameters} instance with the selected values. - */ + /** Builds a {@link Parameters} instance with the selected values. */ + @Override public Parameters build() { - return new Parameters( - // Video - maxVideoWidth, - maxVideoHeight, - maxVideoFrameRate, - maxVideoBitrate, - exceedVideoConstraintsIfNecessary, - allowVideoMixedMimeTypeAdaptiveness, - allowVideoNonSeamlessAdaptiveness, - viewportWidth, - viewportHeight, - viewportOrientationMayChange, - // Audio - preferredAudioLanguage, - maxAudioChannelCount, - maxAudioBitrate, - exceedAudioConstraintsIfNecessary, - allowAudioMixedMimeTypeAdaptiveness, - allowAudioMixedSampleRateAdaptiveness, - allowAudioMixedChannelCountAdaptiveness, - // Text - preferredTextLanguage, - preferredTextRoleFlags, - selectUndeterminedTextLanguage, - disabledTextTrackSelectionFlags, - // General - forceLowestBitrate, - forceHighestSupportedBitrate, - exceedRendererCapabilitiesIfNecessary, - tunnelingAudioSessionId, - selectionOverrides, - rendererDisabledFlags); - } - - private void setInitialValuesWithoutContext(@UnderInitialization ParametersBuilder this) { - // Video - maxVideoWidth = Integer.MAX_VALUE; - maxVideoHeight = Integer.MAX_VALUE; - maxVideoFrameRate = Integer.MAX_VALUE; - maxVideoBitrate = Integer.MAX_VALUE; - exceedVideoConstraintsIfNecessary = true; - allowVideoMixedMimeTypeAdaptiveness = false; - allowVideoNonSeamlessAdaptiveness = true; - viewportWidth = Integer.MAX_VALUE; - viewportHeight = Integer.MAX_VALUE; - viewportOrientationMayChange = true; - // Audio - maxAudioChannelCount = Integer.MAX_VALUE; - maxAudioBitrate = Integer.MAX_VALUE; - exceedAudioConstraintsIfNecessary = true; - allowAudioMixedMimeTypeAdaptiveness = false; - allowAudioMixedSampleRateAdaptiveness = false; - allowAudioMixedChannelCountAdaptiveness = false; - // General - forceLowestBitrate = false; - forceHighestSupportedBitrate = false; - exceedRendererCapabilitiesIfNecessary = true; - tunnelingAudioSessionId = C.AUDIO_SESSION_ID_UNSET; - } - - private static SparseArray> - cloneSelectionOverrides( - SparseArray> selectionOverrides) { - SparseArray> clone = - new SparseArray<>(); - for (int i = 0; i < selectionOverrides.size(); i++) { - clone.put(selectionOverrides.keyAt(i), new HashMap<>(selectionOverrides.valueAt(i))); - } - return clone; + return delegate.build(); } } /** - * Extends {@link TrackSelectionParameters} by adding fields that are specific to {@link - * DefaultTrackSelector}. + * Extends {@link Parameters} by adding fields that are specific to {@link DefaultTrackSelector}. */ - public static final class Parameters extends TrackSelectionParameters { - - /** - * An instance with default values, except those obtained from the {@link Context}. - * - *

      If possible, use {@link #getDefaults(Context)} instead. - * - *

      This instance will not have the following settings: - * - *

        - *
      • {@link ParametersBuilder#setViewportSizeToPhysicalDisplaySize(Context, boolean) - * Viewport constraints} configured for the primary display. - *
      • {@link - * ParametersBuilder#setPreferredTextLanguageAndRoleFlagsToCaptioningManagerSettings(Context) - * Preferred text language and role flags} configured to the accessibility settings of - * {@link android.view.accessibility.CaptioningManager}. - *
      - */ - @SuppressWarnings("deprecation") - public static final Parameters DEFAULT_WITHOUT_CONTEXT = new ParametersBuilder().build(); + public static final class Parameters extends TrackSelectionParameters implements Bundleable { /** - * @deprecated This instance does not have {@link Context} constraints configured. Use {@link - * #getDefaults(Context)} instead. + * A builder for {@link Parameters}. See the {@link Parameters} documentation for explanations + * of the parameters that can be configured using this builder. */ - @Deprecated public static final Parameters DEFAULT_WITHOUT_VIEWPORT = DEFAULT_WITHOUT_CONTEXT; + public static final class Builder extends TrackSelectionParameters.Builder { - /** - * @deprecated This instance does not have {@link Context} constraints configured. Use {@link - * #getDefaults(Context)} instead. - */ - @Deprecated - public static final Parameters DEFAULT = DEFAULT_WITHOUT_CONTEXT; + // Video + private boolean exceedVideoConstraintsIfNecessary; + private boolean allowVideoMixedMimeTypeAdaptiveness; + private boolean allowVideoNonSeamlessAdaptiveness; + private boolean allowVideoMixedDecoderSupportAdaptiveness; + // Audio + private boolean exceedAudioConstraintsIfNecessary; + private boolean allowAudioMixedMimeTypeAdaptiveness; + private boolean allowAudioMixedSampleRateAdaptiveness; + private boolean allowAudioMixedChannelCountAdaptiveness; + private boolean allowAudioMixedDecoderSupportAdaptiveness; + private boolean constrainAudioChannelCountToDeviceCapabilities; + // General + private boolean exceedRendererCapabilitiesIfNecessary; + private boolean tunnelingEnabled; + private boolean allowMultipleAdaptiveSelections; + // Overrides + private final SparseArray> + selectionOverrides; + private final SparseBooleanArray rendererDisabledFlags; + + /** + * @deprecated {@link Context} constraints will not be set using this constructor. Use {@link + * #Builder(Context)} instead. + */ + @Deprecated + @SuppressWarnings({"deprecation"}) + public Builder() { + super(); + selectionOverrides = new SparseArray<>(); + rendererDisabledFlags = new SparseBooleanArray(); + init(); + } - /** Returns an instance configured with default values. */ - public static Parameters getDefaults(Context context) { - return new ParametersBuilder(context).build(); + /** + * Creates a builder with default initial values. + * + * @param context Any context. + */ + public Builder(Context context) { + super(context); + selectionOverrides = new SparseArray<>(); + rendererDisabledFlags = new SparseBooleanArray(); + init(); + } + + /** + * @param initialValues The {@link Parameters} from which the initial values of the builder + * are obtained. + */ + private Builder(Parameters initialValues) { + super(initialValues); + // Video + exceedVideoConstraintsIfNecessary = initialValues.exceedVideoConstraintsIfNecessary; + allowVideoMixedMimeTypeAdaptiveness = initialValues.allowVideoMixedMimeTypeAdaptiveness; + allowVideoNonSeamlessAdaptiveness = initialValues.allowVideoNonSeamlessAdaptiveness; + allowVideoMixedDecoderSupportAdaptiveness = + initialValues.allowVideoMixedDecoderSupportAdaptiveness; + // Audio + exceedAudioConstraintsIfNecessary = initialValues.exceedAudioConstraintsIfNecessary; + allowAudioMixedMimeTypeAdaptiveness = initialValues.allowAudioMixedMimeTypeAdaptiveness; + allowAudioMixedSampleRateAdaptiveness = initialValues.allowAudioMixedSampleRateAdaptiveness; + allowAudioMixedChannelCountAdaptiveness = + initialValues.allowAudioMixedChannelCountAdaptiveness; + allowAudioMixedDecoderSupportAdaptiveness = + initialValues.allowAudioMixedDecoderSupportAdaptiveness; + constrainAudioChannelCountToDeviceCapabilities = + initialValues.constrainAudioChannelCountToDeviceCapabilities; + // General + exceedRendererCapabilitiesIfNecessary = initialValues.exceedRendererCapabilitiesIfNecessary; + tunnelingEnabled = initialValues.tunnelingEnabled; + allowMultipleAdaptiveSelections = initialValues.allowMultipleAdaptiveSelections; + // Overrides + selectionOverrides = cloneSelectionOverrides(initialValues.selectionOverrides); + rendererDisabledFlags = initialValues.rendererDisabledFlags.clone(); + } + + @SuppressWarnings("method.invocation") // Only setter are invoked. + private Builder(Bundle bundle) { + super(bundle); + init(); + Parameters defaultValue = Parameters.DEFAULT_WITHOUT_CONTEXT; + // Video + setExceedVideoConstraintsIfNecessary( + bundle.getBoolean( + Parameters.FIELD_EXCEED_VIDEO_CONSTRAINTS_IF_NECESSARY, + defaultValue.exceedVideoConstraintsIfNecessary)); + setAllowVideoMixedMimeTypeAdaptiveness( + bundle.getBoolean( + Parameters.FIELD_ALLOW_VIDEO_MIXED_MIME_TYPE_ADAPTIVENESS, + defaultValue.allowVideoMixedMimeTypeAdaptiveness)); + setAllowVideoNonSeamlessAdaptiveness( + bundle.getBoolean( + Parameters.FIELD_ALLOW_VIDEO_NON_SEAMLESS_ADAPTIVENESS, + defaultValue.allowVideoNonSeamlessAdaptiveness)); + setAllowVideoMixedDecoderSupportAdaptiveness( + bundle.getBoolean( + Parameters.FIELD_ALLOW_VIDEO_MIXED_DECODER_SUPPORT_ADAPTIVENESS, + defaultValue.allowVideoMixedDecoderSupportAdaptiveness)); + // Audio + setExceedAudioConstraintsIfNecessary( + bundle.getBoolean( + Parameters.FIELD_EXCEED_AUDIO_CONSTRAINTS_IF_NECESSARY, + defaultValue.exceedAudioConstraintsIfNecessary)); + setAllowAudioMixedMimeTypeAdaptiveness( + bundle.getBoolean( + Parameters.FIELD_ALLOW_AUDIO_MIXED_MIME_TYPE_ADAPTIVENESS, + defaultValue.allowAudioMixedMimeTypeAdaptiveness)); + setAllowAudioMixedSampleRateAdaptiveness( + bundle.getBoolean( + Parameters.FIELD_ALLOW_AUDIO_MIXED_SAMPLE_RATE_ADAPTIVENESS, + defaultValue.allowAudioMixedSampleRateAdaptiveness)); + setAllowAudioMixedChannelCountAdaptiveness( + bundle.getBoolean( + Parameters.FIELD_ALLOW_AUDIO_MIXED_CHANNEL_COUNT_ADAPTIVENESS, + defaultValue.allowAudioMixedChannelCountAdaptiveness)); + setAllowAudioMixedDecoderSupportAdaptiveness( + bundle.getBoolean( + Parameters.FIELD_ALLOW_AUDIO_MIXED_DECODER_SUPPORT_ADAPTIVENESS, + defaultValue.allowAudioMixedDecoderSupportAdaptiveness)); + setConstrainAudioChannelCountToDeviceCapabilities( + bundle.getBoolean( + Parameters.FIELD_CONSTRAIN_AUDIO_CHANNEL_COUNT_TO_DEVICE_CAPABILITIES, + defaultValue.constrainAudioChannelCountToDeviceCapabilities)); + // General + setExceedRendererCapabilitiesIfNecessary( + bundle.getBoolean( + Parameters.FIELD_EXCEED_RENDERER_CAPABILITIES_IF_NECESSARY, + defaultValue.exceedRendererCapabilitiesIfNecessary)); + setTunnelingEnabled( + bundle.getBoolean(Parameters.FIELD_TUNNELING_ENABLED, defaultValue.tunnelingEnabled)); + setAllowMultipleAdaptiveSelections( + bundle.getBoolean( + Parameters.FIELD_ALLOW_MULTIPLE_ADAPTIVE_SELECTIONS, + defaultValue.allowMultipleAdaptiveSelections)); + // Overrides + selectionOverrides = new SparseArray<>(); + setSelectionOverridesFromBundle(bundle); + rendererDisabledFlags = + makeSparseBooleanArrayFromTrueKeys( + bundle.getIntArray(Parameters.FIELD_RENDERER_DISABLED_INDICES)); + } + + @CanIgnoreReturnValue + @Override + protected Builder set(TrackSelectionParameters parameters) { + super.set(parameters); + return this; + } + + // Video + + @CanIgnoreReturnValue + @Override + public Builder setMaxVideoSizeSd() { + super.setMaxVideoSizeSd(); + return this; + } + + @CanIgnoreReturnValue + @Override + public Builder clearVideoSizeConstraints() { + super.clearVideoSizeConstraints(); + return this; + } + + @CanIgnoreReturnValue + @Override + public Builder setMaxVideoSize(int maxVideoWidth, int maxVideoHeight) { + super.setMaxVideoSize(maxVideoWidth, maxVideoHeight); + return this; + } + + @CanIgnoreReturnValue + @Override + public Builder setMaxVideoFrameRate(int maxVideoFrameRate) { + super.setMaxVideoFrameRate(maxVideoFrameRate); + return this; + } + + @CanIgnoreReturnValue + @Override + public Builder setMaxVideoBitrate(int maxVideoBitrate) { + super.setMaxVideoBitrate(maxVideoBitrate); + return this; + } + + @CanIgnoreReturnValue + @Override + public Builder setMinVideoSize(int minVideoWidth, int minVideoHeight) { + super.setMinVideoSize(minVideoWidth, minVideoHeight); + return this; + } + + @CanIgnoreReturnValue + @Override + public Builder setMinVideoFrameRate(int minVideoFrameRate) { + super.setMinVideoFrameRate(minVideoFrameRate); + return this; + } + + @CanIgnoreReturnValue + @Override + public Builder setMinVideoBitrate(int minVideoBitrate) { + super.setMinVideoBitrate(minVideoBitrate); + return this; + } + + /** + * Sets whether to exceed the {@link #setMaxVideoBitrate}, {@link #setMaxVideoSize(int, int)} + * and {@link #setMaxVideoFrameRate} constraints when no selection can be made otherwise. + * + * @param exceedVideoConstraintsIfNecessary Whether to exceed video constraints when no + * selection can be made otherwise. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setExceedVideoConstraintsIfNecessary( + boolean exceedVideoConstraintsIfNecessary) { + this.exceedVideoConstraintsIfNecessary = exceedVideoConstraintsIfNecessary; + return this; + } + + /** + * Sets whether to allow adaptive video selections containing mixed MIME types. + * + *

      Adaptations between different MIME types may not be completely seamless, in which case + * {@link #setAllowVideoNonSeamlessAdaptiveness(boolean)} also needs to be {@code true} for + * mixed MIME type selections to be made. + * + * @param allowVideoMixedMimeTypeAdaptiveness Whether to allow adaptive video selections + * containing mixed MIME types. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setAllowVideoMixedMimeTypeAdaptiveness( + boolean allowVideoMixedMimeTypeAdaptiveness) { + this.allowVideoMixedMimeTypeAdaptiveness = allowVideoMixedMimeTypeAdaptiveness; + return this; + } + + /** + * Sets whether to allow adaptive video selections where adaptation may not be completely + * seamless. + * + * @param allowVideoNonSeamlessAdaptiveness Whether to allow adaptive video selections where + * adaptation may not be completely seamless. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setAllowVideoNonSeamlessAdaptiveness( + boolean allowVideoNonSeamlessAdaptiveness) { + this.allowVideoNonSeamlessAdaptiveness = allowVideoNonSeamlessAdaptiveness; + return this; + } + + /** + * Sets whether to allow adaptive video selections with mixed levels of {@link + * RendererCapabilities.DecoderSupport} and {@link + * RendererCapabilities.HardwareAccelerationSupport}. + * + * @param allowVideoMixedDecoderSupportAdaptiveness Whether to allow adaptive video selections + * with mixed levels of decoder and hardware acceleration support. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setAllowVideoMixedDecoderSupportAdaptiveness( + boolean allowVideoMixedDecoderSupportAdaptiveness) { + this.allowVideoMixedDecoderSupportAdaptiveness = allowVideoMixedDecoderSupportAdaptiveness; + return this; + } + + @CanIgnoreReturnValue + @Override + public Builder setViewportSizeToPhysicalDisplaySize( + Context context, boolean viewportOrientationMayChange) { + super.setViewportSizeToPhysicalDisplaySize(context, viewportOrientationMayChange); + return this; + } + + @CanIgnoreReturnValue + @Override + public Builder clearViewportSizeConstraints() { + super.clearViewportSizeConstraints(); + return this; + } + + @CanIgnoreReturnValue + @Override + public Builder setViewportSize( + int viewportWidth, int viewportHeight, boolean viewportOrientationMayChange) { + super.setViewportSize(viewportWidth, viewportHeight, viewportOrientationMayChange); + return this; + } + + @CanIgnoreReturnValue + @Override + public Builder setPreferredVideoMimeType(@Nullable String mimeType) { + super.setPreferredVideoMimeType(mimeType); + return this; + } + + @CanIgnoreReturnValue + @Override + public Builder setPreferredVideoMimeTypes(String... mimeTypes) { + super.setPreferredVideoMimeTypes(mimeTypes); + return this; + } + + @CanIgnoreReturnValue + @Override + public Builder setPreferredVideoRoleFlags(@RoleFlags int preferredVideoRoleFlags) { + super.setPreferredVideoRoleFlags(preferredVideoRoleFlags); + return this; + } + + // Audio + + @CanIgnoreReturnValue + @Override + public Builder setPreferredAudioLanguage(@Nullable String preferredAudioLanguage) { + super.setPreferredAudioLanguage(preferredAudioLanguage); + return this; + } + + @CanIgnoreReturnValue + @Override + public Builder setPreferredAudioLanguages(String... preferredAudioLanguages) { + super.setPreferredAudioLanguages(preferredAudioLanguages); + return this; + } + + @CanIgnoreReturnValue + @Override + public Builder setPreferredAudioRoleFlags(@C.RoleFlags int preferredAudioRoleFlags) { + super.setPreferredAudioRoleFlags(preferredAudioRoleFlags); + return this; + } + + @CanIgnoreReturnValue + @Override + public Builder setMaxAudioChannelCount(int maxAudioChannelCount) { + super.setMaxAudioChannelCount(maxAudioChannelCount); + return this; + } + + @CanIgnoreReturnValue + @Override + public Builder setMaxAudioBitrate(int maxAudioBitrate) { + super.setMaxAudioBitrate(maxAudioBitrate); + return this; + } + + /** + * Sets whether to exceed the {@link #setMaxAudioChannelCount(int)} and {@link + * #setMaxAudioBitrate(int)} constraints when no selection can be made otherwise. + * + * @param exceedAudioConstraintsIfNecessary Whether to exceed audio constraints when no + * selection can be made otherwise. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setExceedAudioConstraintsIfNecessary( + boolean exceedAudioConstraintsIfNecessary) { + this.exceedAudioConstraintsIfNecessary = exceedAudioConstraintsIfNecessary; + return this; + } + + /** + * Sets whether to allow adaptive audio selections containing mixed MIME types. + * + *

      Adaptations between different MIME types may not be completely seamless. + * + * @param allowAudioMixedMimeTypeAdaptiveness Whether to allow adaptive audio selections + * containing mixed MIME types. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setAllowAudioMixedMimeTypeAdaptiveness( + boolean allowAudioMixedMimeTypeAdaptiveness) { + this.allowAudioMixedMimeTypeAdaptiveness = allowAudioMixedMimeTypeAdaptiveness; + return this; + } + + /** + * Sets whether to allow adaptive audio selections containing mixed sample rates. + * + *

      Adaptations between different sample rates may not be completely seamless. + * + * @param allowAudioMixedSampleRateAdaptiveness Whether to allow adaptive audio selections + * containing mixed sample rates. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setAllowAudioMixedSampleRateAdaptiveness( + boolean allowAudioMixedSampleRateAdaptiveness) { + this.allowAudioMixedSampleRateAdaptiveness = allowAudioMixedSampleRateAdaptiveness; + return this; + } + + /** + * Sets whether to allow adaptive audio selections containing mixed channel counts. + * + *

      Adaptations between different channel counts may not be completely seamless. + * + * @param allowAudioMixedChannelCountAdaptiveness Whether to allow adaptive audio selections + * containing mixed channel counts. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setAllowAudioMixedChannelCountAdaptiveness( + boolean allowAudioMixedChannelCountAdaptiveness) { + this.allowAudioMixedChannelCountAdaptiveness = allowAudioMixedChannelCountAdaptiveness; + return this; + } + + /** + * Sets whether to allow adaptive audio selections with mixed levels of {@link + * RendererCapabilities.DecoderSupport} and {@link + * RendererCapabilities.HardwareAccelerationSupport}. + * + * @param allowAudioMixedDecoderSupportAdaptiveness Whether to allow adaptive audio selections + * with mixed levels of decoder and hardware acceleration support. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setAllowAudioMixedDecoderSupportAdaptiveness( + boolean allowAudioMixedDecoderSupportAdaptiveness) { + this.allowAudioMixedDecoderSupportAdaptiveness = allowAudioMixedDecoderSupportAdaptiveness; + return this; + } + + @CanIgnoreReturnValue + @Override + public Builder setPreferredAudioMimeType(@Nullable String mimeType) { + super.setPreferredAudioMimeType(mimeType); + return this; + } + + @CanIgnoreReturnValue + @Override + public Builder setPreferredAudioMimeTypes(String... mimeTypes) { + super.setPreferredAudioMimeTypes(mimeTypes); + return this; + } + + /** + * Whether to only select audio tracks with channel counts that don't exceed the device's + * output capabilities. The default value is {@code true}. + * + *

      When enabled, the track selector will prefer stereo/mono audio tracks over multichannel + * if the audio cannot be spatialized or the device is outputting stereo audio. For example, + * on a mobile device that outputs non-spatialized audio to its speakers. Dolby surround sound + * formats are excluded from these constraints because some Dolby decoders are known to + * spatialize multichannel audio on Android OS versions that don't support the {@link + * Spatializer} API. + * + *

      For devices with Android 12L+ that support {@linkplain Spatializer audio + * spatialization}, when this is enabled the track selector will trigger a new track selection + * everytime a change in {@linkplain Spatializer.OnSpatializerStateChangedListener + * spatialization properties} is detected. + * + *

      The constraints do not apply on devices with {@code + * television} UI mode. + * + *

      The constraints do not apply when the track selector is created without a reference to a + * {@link Context} via the deprecated {@link + * DefaultTrackSelector#DefaultTrackSelector(TrackSelectionParameters, + * ExoTrackSelection.Factory)} constructor. + */ + @CanIgnoreReturnValue + public Builder setConstrainAudioChannelCountToDeviceCapabilities(boolean enabled) { + constrainAudioChannelCountToDeviceCapabilities = enabled; + return this; + } + + // Text + + @CanIgnoreReturnValue + @Override + public Builder setPreferredTextLanguageAndRoleFlagsToCaptioningManagerSettings( + Context context) { + super.setPreferredTextLanguageAndRoleFlagsToCaptioningManagerSettings(context); + return this; + } + + @CanIgnoreReturnValue + @Override + public Builder setPreferredTextLanguage(@Nullable String preferredTextLanguage) { + super.setPreferredTextLanguage(preferredTextLanguage); + return this; + } + + @CanIgnoreReturnValue + @Override + public Builder setPreferredTextLanguages(String... preferredTextLanguages) { + super.setPreferredTextLanguages(preferredTextLanguages); + return this; + } + + @CanIgnoreReturnValue + @Override + public Builder setPreferredTextRoleFlags(@C.RoleFlags int preferredTextRoleFlags) { + super.setPreferredTextRoleFlags(preferredTextRoleFlags); + return this; + } + + @CanIgnoreReturnValue + @Override + public Builder setIgnoredTextSelectionFlags(@C.SelectionFlags int ignoredTextSelectionFlags) { + super.setIgnoredTextSelectionFlags(ignoredTextSelectionFlags); + return this; + } + + @CanIgnoreReturnValue + @Override + public Builder setSelectUndeterminedTextLanguage(boolean selectUndeterminedTextLanguage) { + super.setSelectUndeterminedTextLanguage(selectUndeterminedTextLanguage); + return this; + } + + /** + * @deprecated Use {@link #setIgnoredTextSelectionFlags}. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setDisabledTextTrackSelectionFlags( + @C.SelectionFlags int disabledTextTrackSelectionFlags) { + return setIgnoredTextSelectionFlags(disabledTextTrackSelectionFlags); + } + + // General + + @CanIgnoreReturnValue + @Override + public Builder setForceLowestBitrate(boolean forceLowestBitrate) { + super.setForceLowestBitrate(forceLowestBitrate); + return this; + } + + @CanIgnoreReturnValue + @Override + public Builder setForceHighestSupportedBitrate(boolean forceHighestSupportedBitrate) { + super.setForceHighestSupportedBitrate(forceHighestSupportedBitrate); + return this; + } + + @CanIgnoreReturnValue + @Override + public Builder addOverride(TrackSelectionOverride override) { + super.addOverride(override); + return this; + } + + @CanIgnoreReturnValue + @Override + public Builder clearOverride(TrackGroup trackGroup) { + super.clearOverride(trackGroup); + return this; + } + + @CanIgnoreReturnValue + @Override + public Builder setOverrideForType(TrackSelectionOverride override) { + super.setOverrideForType(override); + return this; + } + + @CanIgnoreReturnValue + @Override + public Builder clearOverridesOfType(@C.TrackType int trackType) { + super.clearOverridesOfType(trackType); + return this; + } + + @CanIgnoreReturnValue + @Override + public Builder clearOverrides() { + super.clearOverrides(); + return this; + } + + /** + * @deprecated Use {@link #setTrackTypeDisabled(int, boolean)}. + */ + @CanIgnoreReturnValue + @Override + @Deprecated + @SuppressWarnings("deprecation") + public Builder setDisabledTrackTypes(Set<@C.TrackType Integer> disabledTrackTypes) { + super.setDisabledTrackTypes(disabledTrackTypes); + return this; + } + + @CanIgnoreReturnValue + @Override + public Builder setTrackTypeDisabled(@C.TrackType int trackType, boolean disabled) { + super.setTrackTypeDisabled(trackType, disabled); + return this; + } + + /** + * Sets whether to exceed renderer capabilities when no selection can be made otherwise. + * + *

      This parameter applies when all of the tracks available for a renderer exceed the + * renderer's reported capabilities. If the parameter is {@code true} then the lowest quality + * track will still be selected. Playback may succeed if the renderer has under-reported its + * true capabilities. If {@code false} then no track will be selected. + * + * @param exceedRendererCapabilitiesIfNecessary Whether to exceed renderer capabilities when + * no selection can be made otherwise. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setExceedRendererCapabilitiesIfNecessary( + boolean exceedRendererCapabilitiesIfNecessary) { + this.exceedRendererCapabilitiesIfNecessary = exceedRendererCapabilitiesIfNecessary; + return this; + } + + /** + * Sets whether to enable tunneling if possible. Tunneling will only be enabled if it's + * supported by the audio and video renderers for the selected tracks. + * + *

      Tunneling is known to have many device specific issues and limitations. Manual testing + * is strongly recommended to check that the media plays correctly when this option is + * enabled. See [#9661](https://github.com/google/ExoPlayer/issues/9661), + * [#9133](https://github.com/google/ExoPlayer/issues/9133), + * [#9317](https://github.com/google/ExoPlayer/issues/9317), + * [#9502](https://github.com/google/ExoPlayer/issues/9502). + * + * @param tunnelingEnabled Whether to enable tunneling if possible. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setTunnelingEnabled(boolean tunnelingEnabled) { + this.tunnelingEnabled = tunnelingEnabled; + return this; + } + + /** + * Sets whether multiple adaptive selections with more than one track are allowed. + * + * @param allowMultipleAdaptiveSelections Whether multiple adaptive selections are allowed. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setAllowMultipleAdaptiveSelections(boolean allowMultipleAdaptiveSelections) { + this.allowMultipleAdaptiveSelections = allowMultipleAdaptiveSelections; + return this; + } + + // Overrides + + /** + * Sets whether the renderer at the specified index is disabled. Disabling a renderer prevents + * the selector from selecting any tracks for it. + * + * @param rendererIndex The renderer index. + * @param disabled Whether the renderer is disabled. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setRendererDisabled(int rendererIndex, boolean disabled) { + if (rendererDisabledFlags.get(rendererIndex) == disabled) { + // The disabled flag is unchanged. + return this; + } + // Only true values are placed in the array to make it easier to check for equality. + if (disabled) { + rendererDisabledFlags.put(rendererIndex, true); + } else { + rendererDisabledFlags.delete(rendererIndex); + } + return this; + } + + /** + * Overrides the track selection for the renderer at the specified index. + * + *

      When the {@link TrackGroupArray} mapped to the renderer matches the one provided, the + * override is applied. When the {@link TrackGroupArray} does not match, the override has no + * effect. The override replaces any previous override for the specified {@link + * TrackGroupArray} for the specified {@link Renderer}. + * + *

      Passing a {@code null} override will cause the renderer to be disabled when the {@link + * TrackGroupArray} mapped to it matches the one provided. When the {@link TrackGroupArray} + * does not match a {@code null} override has no effect. Hence a {@code null} override differs + * from disabling the renderer using {@link #setRendererDisabled(int, boolean)} because the + * renderer is disabled conditionally on the {@link TrackGroupArray} mapped to it, where-as + * {@link #setRendererDisabled(int, boolean)} disables the renderer unconditionally. + * + *

      To remove overrides use {@link #clearSelectionOverride(int, TrackGroupArray)}, {@link + * #clearSelectionOverrides(int)} or {@link #clearSelectionOverrides()}. + * + * @param rendererIndex The renderer index. + * @param groups The {@link TrackGroupArray} for which the override should be applied. + * @param override The override. + * @return This builder. + * @deprecated Use {@link + * TrackSelectionParameters.Builder#addOverride(TrackSelectionOverride)}. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setSelectionOverride( + int rendererIndex, TrackGroupArray groups, @Nullable SelectionOverride override) { + Map overrides = + selectionOverrides.get(rendererIndex); + if (overrides == null) { + overrides = new HashMap<>(); + selectionOverrides.put(rendererIndex, overrides); + } + if (overrides.containsKey(groups) && Util.areEqual(overrides.get(groups), override)) { + // The override is unchanged. + return this; + } + overrides.put(groups, override); + return this; + } + + /** + * Clears a track selection override for the specified renderer and {@link TrackGroupArray}. + * + * @param rendererIndex The renderer index. + * @param groups The {@link TrackGroupArray} for which the override should be cleared. + * @return This builder. + * @deprecated Use {@link TrackSelectionParameters.Builder#clearOverride(TrackGroup)}. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder clearSelectionOverride(int rendererIndex, TrackGroupArray groups) { + Map overrides = + selectionOverrides.get(rendererIndex); + if (overrides == null || !overrides.containsKey(groups)) { + // Nothing to clear. + return this; + } + overrides.remove(groups); + if (overrides.isEmpty()) { + selectionOverrides.remove(rendererIndex); + } + return this; + } + + /** + * Clears all track selection overrides for the specified renderer. + * + * @param rendererIndex The renderer index. + * @return This builder. + * @deprecated Use {@link TrackSelectionParameters.Builder#clearOverridesOfType(int)}. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder clearSelectionOverrides(int rendererIndex) { + Map overrides = + selectionOverrides.get(rendererIndex); + if (overrides == null || overrides.isEmpty()) { + // Nothing to clear. + return this; + } + selectionOverrides.remove(rendererIndex); + return this; + } + + /** + * Clears all track selection overrides for all renderers. + * + * @return This builder. + * @deprecated Use {@link TrackSelectionParameters.Builder#clearOverrides()}. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder clearSelectionOverrides() { + if (selectionOverrides.size() == 0) { + // Nothing to clear. + return this; + } + selectionOverrides.clear(); + return this; + } + + /** Builds a {@link Parameters} instance with the selected values. */ + @Override + public Parameters build() { + return new Parameters(this); + } + + private void init(Builder this) { + // Video + exceedVideoConstraintsIfNecessary = true; + allowVideoMixedMimeTypeAdaptiveness = false; + allowVideoNonSeamlessAdaptiveness = true; + allowVideoMixedDecoderSupportAdaptiveness = false; + // Audio + exceedAudioConstraintsIfNecessary = true; + allowAudioMixedMimeTypeAdaptiveness = false; + allowAudioMixedSampleRateAdaptiveness = false; + allowAudioMixedChannelCountAdaptiveness = false; + allowAudioMixedDecoderSupportAdaptiveness = false; + constrainAudioChannelCountToDeviceCapabilities = true; + // General + exceedRendererCapabilitiesIfNecessary = true; + tunnelingEnabled = false; + allowMultipleAdaptiveSelections = true; + } + + private static SparseArray> + cloneSelectionOverrides( + SparseArray> + selectionOverrides) { + SparseArray> clone = + new SparseArray<>(); + for (int i = 0; i < selectionOverrides.size(); i++) { + clone.put(selectionOverrides.keyAt(i), new HashMap<>(selectionOverrides.valueAt(i))); + } + return clone; + } + + private void setSelectionOverridesFromBundle(Bundle bundle) { + @Nullable + int[] rendererIndices = + bundle.getIntArray(Parameters.FIELD_SELECTION_OVERRIDES_RENDERER_INDICES); + @Nullable + ArrayList trackGroupArrayBundles = + bundle.getParcelableArrayList(Parameters.FIELD_SELECTION_OVERRIDES_TRACK_GROUP_ARRAYS); + List trackGroupArrays = + trackGroupArrayBundles == null + ? ImmutableList.of() + : BundleableUtil.fromBundleList(TrackGroupArray.CREATOR, trackGroupArrayBundles); + @Nullable + SparseArray selectionOverrideBundles = + bundle.getSparseParcelableArray(Parameters.FIELD_SELECTION_OVERRIDES); + SparseArray selectionOverrides = + selectionOverrideBundles == null + ? new SparseArray<>() + : BundleableUtil.fromBundleSparseArray( + SelectionOverride.CREATOR, selectionOverrideBundles); + + if (rendererIndices == null || rendererIndices.length != trackGroupArrays.size()) { + return; // Incorrect format, ignore all overrides. + } + for (int i = 0; i < rendererIndices.length; i++) { + int rendererIndex = rendererIndices[i]; + TrackGroupArray groups = trackGroupArrays.get(i); + @Nullable SelectionOverride selectionOverride = selectionOverrides.get(i); + setSelectionOverride(rendererIndex, groups, selectionOverride); + } + } + + private SparseBooleanArray makeSparseBooleanArrayFromTrueKeys(@Nullable int[] trueKeys) { + if (trueKeys == null) { + return new SparseBooleanArray(); + } + SparseBooleanArray sparseBooleanArray = new SparseBooleanArray(trueKeys.length); + for (int trueKey : trueKeys) { + sparseBooleanArray.append(trueKey, true); + } + return sparseBooleanArray; + } } - // Video /** - * Maximum allowed video width in pixels. The default value is {@link Integer#MAX_VALUE} (i.e. - * no constraint). + * An instance with default values, except those obtained from the {@link Context}. * - *

      To constrain adaptive video track selections to be suitable for a given viewport (the - * region of the display within which video will be played), use ({@link #viewportWidth}, {@link - * #viewportHeight} and {@link #viewportOrientationMayChange}) instead. - */ - public final int maxVideoWidth; - /** - * Maximum allowed video height in pixels. The default value is {@link Integer#MAX_VALUE} (i.e. - * no constraint). + *

      If possible, use {@link #getDefaults(Context)} instead. * - *

      To constrain adaptive video track selections to be suitable for a given viewport (the - * region of the display within which video will be played), use ({@link #viewportWidth}, {@link - * #viewportHeight} and {@link #viewportOrientationMayChange}) instead. - */ - public final int maxVideoHeight; - /** - * Maximum allowed video frame rate in hertz. The default value is {@link Integer#MAX_VALUE} - * (i.e. no constraint). + *

      This instance will not have the following settings: + * + *

        + *
      • {@linkplain Builder#setViewportSizeToPhysicalDisplaySize(Context, boolean) Viewport + * constraints} configured for the primary display. + *
      • {@linkplain + * Builder#setPreferredTextLanguageAndRoleFlagsToCaptioningManagerSettings(Context) + * Preferred text language and role flags} configured to the accessibility settings of + * {@link android.view.accessibility.CaptioningManager}. + *
      */ - public final int maxVideoFrameRate; + @SuppressWarnings("deprecation") + public static final Parameters DEFAULT_WITHOUT_CONTEXT = new Builder().build(); /** - * Maximum allowed video bitrate in bits per second. The default value is {@link - * Integer#MAX_VALUE} (i.e. no constraint). + * @deprecated This instance is not configured using {@link Context} constraints. Use {@link + * #getDefaults(Context)} instead. */ - public final int maxVideoBitrate; + @Deprecated public static final Parameters DEFAULT = DEFAULT_WITHOUT_CONTEXT; + + /** Returns an instance configured with default values. */ + public static Parameters getDefaults(Context context) { + return new Parameters.Builder(context).build(); + } + + // Video + /** * Whether to exceed the {@link #maxVideoWidth}, {@link #maxVideoHeight} and {@link * #maxVideoBitrate} constraints when no selection can be made otherwise. The default value is @@ -885,34 +1650,14 @@ public static Parameters getDefaults(Context context) { */ public final boolean allowVideoNonSeamlessAdaptiveness; /** - * Viewport width in pixels. Constrains video track selections for adaptive content so that only - * tracks suitable for the viewport are selected. The default value is the physical width of the - * primary display, in pixels. - */ - public final int viewportWidth; - /** - * Viewport height in pixels. Constrains video track selections for adaptive content so that - * only tracks suitable for the viewport are selected. The default value is the physical height - * of the primary display, in pixels. + * Whether to allow adaptive video selections with mixed levels of {@link + * RendererCapabilities.DecoderSupport} and {@link + * RendererCapabilities.HardwareAccelerationSupport}. */ - public final int viewportHeight; - /** - * Whether the viewport orientation may change during playback. Constrains video track - * selections for adaptive content so that only tracks suitable for the viewport are selected. - * The default value is {@code true}. - */ - public final boolean viewportOrientationMayChange; + public final boolean allowVideoMixedDecoderSupportAdaptiveness; + // Audio - /** - * Maximum allowed audio channel count. The default value is {@link Integer#MAX_VALUE} (i.e. no - * constraint). - */ - public final int maxAudioChannelCount; - /** - * Maximum allowed audio bitrate in bits per second. The default value is {@link - * Integer#MAX_VALUE} (i.e. no constraint). - */ - public final int maxAudioBitrate; + /** * Whether to exceed the {@link #maxAudioChannelCount} and {@link #maxAudioBitrate} constraints * when no selection can be made otherwise. The default value is {@code true}. @@ -934,25 +1679,20 @@ public static Parameters getDefaults(Context context) { * false}. */ public final boolean allowAudioMixedChannelCountAdaptiveness; - - // General - /** - * Whether to force selection of the single lowest bitrate audio and video tracks that comply - * with all other constraints. The default value is {@code false}. - */ - public final boolean forceLowestBitrate; /** - * Whether to force selection of the highest bitrate audio and video tracks that comply with all - * other constraints. The default value is {@code false}. + * Whether to allow adaptive audio selections with mixed levels of {@link + * RendererCapabilities.DecoderSupport} and {@link + * RendererCapabilities.HardwareAccelerationSupport}. */ - public final boolean forceHighestSupportedBitrate; + public final boolean allowAudioMixedDecoderSupportAdaptiveness; /** - * @deprecated Use {@link #allowVideoMixedMimeTypeAdaptiveness} and {@link - * #allowAudioMixedMimeTypeAdaptiveness}. + * Whether to constrain audio track selection so that the selected track's channel count does + * not exceed the device's output capabilities. The default value is {@code true}. */ - @Deprecated public final boolean allowMixedMimeAdaptiveness; - /** @deprecated Use {@link #allowVideoNonSeamlessAdaptiveness}. */ - @Deprecated public final boolean allowNonSeamlessAdaptiveness; + public final boolean constrainAudioChannelCountToDeviceCapabilities; + + // General + /** * Whether to exceed renderer capabilities when no selection can be made otherwise. * @@ -963,120 +1703,45 @@ public static Parameters getDefaults(Context context) { * {@code true}. */ public final boolean exceedRendererCapabilitiesIfNecessary; + /** Whether to enable tunneling if possible. */ + public final boolean tunnelingEnabled; /** - * The audio session id to use when tunneling, or {@link C#AUDIO_SESSION_ID_UNSET} if tunneling - * is disabled. The default value is {@link C#AUDIO_SESSION_ID_UNSET} (i.e. tunneling is - * disabled). + * Whether multiple adaptive selections with more than one track are allowed. The default value + * is {@code true}. + * + *

      Note that tracks are only eligible for adaptation if they define a bitrate, the renderers + * support the tracks and allow adaptation between them, and they are not excluded based on + * other track selection parameters. */ - public final int tunnelingAudioSessionId; + public final boolean allowMultipleAdaptiveSelections; // Overrides private final SparseArray> selectionOverrides; private final SparseBooleanArray rendererDisabledFlags; - /* package */ Parameters( - // Video - int maxVideoWidth, - int maxVideoHeight, - int maxVideoFrameRate, - int maxVideoBitrate, - boolean exceedVideoConstraintsIfNecessary, - boolean allowVideoMixedMimeTypeAdaptiveness, - boolean allowVideoNonSeamlessAdaptiveness, - int viewportWidth, - int viewportHeight, - boolean viewportOrientationMayChange, - // Audio - @Nullable String preferredAudioLanguage, - int maxAudioChannelCount, - int maxAudioBitrate, - boolean exceedAudioConstraintsIfNecessary, - boolean allowAudioMixedMimeTypeAdaptiveness, - boolean allowAudioMixedSampleRateAdaptiveness, - boolean allowAudioMixedChannelCountAdaptiveness, - // Text - @Nullable String preferredTextLanguage, - @C.RoleFlags int preferredTextRoleFlags, - boolean selectUndeterminedTextLanguage, - @C.SelectionFlags int disabledTextTrackSelectionFlags, - // General - boolean forceLowestBitrate, - boolean forceHighestSupportedBitrate, - boolean exceedRendererCapabilitiesIfNecessary, - int tunnelingAudioSessionId, - // Overrides - SparseArray> selectionOverrides, - SparseBooleanArray rendererDisabledFlags) { - super( - preferredAudioLanguage, - preferredTextLanguage, - preferredTextRoleFlags, - selectUndeterminedTextLanguage, - disabledTextTrackSelectionFlags); + private Parameters(Builder builder) { + super(builder); // Video - this.maxVideoWidth = maxVideoWidth; - this.maxVideoHeight = maxVideoHeight; - this.maxVideoFrameRate = maxVideoFrameRate; - this.maxVideoBitrate = maxVideoBitrate; - this.exceedVideoConstraintsIfNecessary = exceedVideoConstraintsIfNecessary; - this.allowVideoMixedMimeTypeAdaptiveness = allowVideoMixedMimeTypeAdaptiveness; - this.allowVideoNonSeamlessAdaptiveness = allowVideoNonSeamlessAdaptiveness; - this.viewportWidth = viewportWidth; - this.viewportHeight = viewportHeight; - this.viewportOrientationMayChange = viewportOrientationMayChange; + exceedVideoConstraintsIfNecessary = builder.exceedVideoConstraintsIfNecessary; + allowVideoMixedMimeTypeAdaptiveness = builder.allowVideoMixedMimeTypeAdaptiveness; + allowVideoNonSeamlessAdaptiveness = builder.allowVideoNonSeamlessAdaptiveness; + allowVideoMixedDecoderSupportAdaptiveness = builder.allowVideoMixedDecoderSupportAdaptiveness; // Audio - this.maxAudioChannelCount = maxAudioChannelCount; - this.maxAudioBitrate = maxAudioBitrate; - this.exceedAudioConstraintsIfNecessary = exceedAudioConstraintsIfNecessary; - this.allowAudioMixedMimeTypeAdaptiveness = allowAudioMixedMimeTypeAdaptiveness; - this.allowAudioMixedSampleRateAdaptiveness = allowAudioMixedSampleRateAdaptiveness; - this.allowAudioMixedChannelCountAdaptiveness = allowAudioMixedChannelCountAdaptiveness; + exceedAudioConstraintsIfNecessary = builder.exceedAudioConstraintsIfNecessary; + allowAudioMixedMimeTypeAdaptiveness = builder.allowAudioMixedMimeTypeAdaptiveness; + allowAudioMixedSampleRateAdaptiveness = builder.allowAudioMixedSampleRateAdaptiveness; + allowAudioMixedChannelCountAdaptiveness = builder.allowAudioMixedChannelCountAdaptiveness; + allowAudioMixedDecoderSupportAdaptiveness = builder.allowAudioMixedDecoderSupportAdaptiveness; + constrainAudioChannelCountToDeviceCapabilities = + builder.constrainAudioChannelCountToDeviceCapabilities; // General - this.forceLowestBitrate = forceLowestBitrate; - this.forceHighestSupportedBitrate = forceHighestSupportedBitrate; - this.exceedRendererCapabilitiesIfNecessary = exceedRendererCapabilitiesIfNecessary; - this.tunnelingAudioSessionId = tunnelingAudioSessionId; - // Deprecated fields. - this.allowMixedMimeAdaptiveness = allowVideoMixedMimeTypeAdaptiveness; - this.allowNonSeamlessAdaptiveness = allowVideoNonSeamlessAdaptiveness; + exceedRendererCapabilitiesIfNecessary = builder.exceedRendererCapabilitiesIfNecessary; + tunnelingEnabled = builder.tunnelingEnabled; + allowMultipleAdaptiveSelections = builder.allowMultipleAdaptiveSelections; // Overrides - this.selectionOverrides = selectionOverrides; - this.rendererDisabledFlags = rendererDisabledFlags; - } - - /* package */ - Parameters(Parcel in) { - super(in); - // Video - this.maxVideoWidth = in.readInt(); - this.maxVideoHeight = in.readInt(); - this.maxVideoFrameRate = in.readInt(); - this.maxVideoBitrate = in.readInt(); - this.exceedVideoConstraintsIfNecessary = Util.readBoolean(in); - this.allowVideoMixedMimeTypeAdaptiveness = Util.readBoolean(in); - this.allowVideoNonSeamlessAdaptiveness = Util.readBoolean(in); - this.viewportWidth = in.readInt(); - this.viewportHeight = in.readInt(); - this.viewportOrientationMayChange = Util.readBoolean(in); - // Audio - this.maxAudioChannelCount = in.readInt(); - this.maxAudioBitrate = in.readInt(); - this.exceedAudioConstraintsIfNecessary = Util.readBoolean(in); - this.allowAudioMixedMimeTypeAdaptiveness = Util.readBoolean(in); - this.allowAudioMixedSampleRateAdaptiveness = Util.readBoolean(in); - this.allowAudioMixedChannelCountAdaptiveness = Util.readBoolean(in); - // General - this.forceLowestBitrate = Util.readBoolean(in); - this.forceHighestSupportedBitrate = Util.readBoolean(in); - this.exceedRendererCapabilitiesIfNecessary = Util.readBoolean(in); - this.tunnelingAudioSessionId = in.readInt(); - // Overrides - this.selectionOverrides = readSelectionOverrides(in); - this.rendererDisabledFlags = Util.castNonNull(in.readSparseBooleanArray()); - // Deprecated fields. - this.allowMixedMimeAdaptiveness = allowVideoMixedMimeTypeAdaptiveness; - this.allowNonSeamlessAdaptiveness = allowVideoNonSeamlessAdaptiveness; + selectionOverrides = builder.selectionOverrides; + rendererDisabledFlags = builder.rendererDisabledFlags; } /** @@ -1085,7 +1750,7 @@ public static Parameters getDefaults(Context context) { * @param rendererIndex The renderer index. * @return Whether the renderer is disabled. */ - public final boolean getRendererDisabled(int rendererIndex) { + public boolean getRendererDisabled(int rendererIndex) { return rendererDisabledFlags.get(rendererIndex); } @@ -1095,8 +1760,13 @@ public final boolean getRendererDisabled(int rendererIndex) { * @param rendererIndex The renderer index. * @param groups The {@link TrackGroupArray}. * @return Whether there is an override. + * @deprecated Only works to retrieve the overrides set with the deprecated {@link + * Builder#setSelectionOverride(int, TrackGroupArray, SelectionOverride)}. Use {@link + * TrackSelectionParameters#overrides} instead. */ - public final boolean hasSelectionOverride(int rendererIndex, TrackGroupArray groups) { + @Deprecated + public boolean hasSelectionOverride(int rendererIndex, TrackGroupArray groups) { + @Nullable Map overrides = selectionOverrides.get(rendererIndex); return overrides != null && overrides.containsKey(groups); @@ -1108,20 +1778,27 @@ public final boolean hasSelectionOverride(int rendererIndex, TrackGroupArray gro * @param rendererIndex The renderer index. * @param groups The {@link TrackGroupArray}. * @return The override, or null if no override exists. + * @deprecated Only works to retrieve the overrides set with the deprecated {@link + * Builder#setSelectionOverride(int, TrackGroupArray, SelectionOverride)}. Use {@link + * TrackSelectionParameters#overrides} instead. */ + @Deprecated @Nullable - public final SelectionOverride getSelectionOverride(int rendererIndex, TrackGroupArray groups) { + public SelectionOverride getSelectionOverride(int rendererIndex, TrackGroupArray groups) { + @Nullable Map overrides = selectionOverrides.get(rendererIndex); return overrides != null ? overrides.get(groups) : null; } - /** Creates a new {@link ParametersBuilder}, copying the initial values from this instance. */ + /** Creates a new {@link Parameters.Builder}, copying the initial values from this instance. */ @Override - public ParametersBuilder buildUpon() { - return new ParametersBuilder(this); + public Parameters.Builder buildUpon() { + return new Parameters.Builder(this); } + @SuppressWarnings( + "EqualsGetClass") // Class extends TrackSelectionParameters for backwards compatibility. @Override public boolean equals(@Nullable Object obj) { if (this == obj) { @@ -1131,31 +1808,27 @@ public boolean equals(@Nullable Object obj) { return false; } Parameters other = (Parameters) obj; - return super.equals(obj) + return super.equals(other) // Video - && maxVideoWidth == other.maxVideoWidth - && maxVideoHeight == other.maxVideoHeight - && maxVideoFrameRate == other.maxVideoFrameRate - && maxVideoBitrate == other.maxVideoBitrate && exceedVideoConstraintsIfNecessary == other.exceedVideoConstraintsIfNecessary && allowVideoMixedMimeTypeAdaptiveness == other.allowVideoMixedMimeTypeAdaptiveness && allowVideoNonSeamlessAdaptiveness == other.allowVideoNonSeamlessAdaptiveness - && viewportOrientationMayChange == other.viewportOrientationMayChange - && viewportWidth == other.viewportWidth - && viewportHeight == other.viewportHeight + && allowVideoMixedDecoderSupportAdaptiveness + == other.allowVideoMixedDecoderSupportAdaptiveness // Audio - && maxAudioChannelCount == other.maxAudioChannelCount - && maxAudioBitrate == other.maxAudioBitrate && exceedAudioConstraintsIfNecessary == other.exceedAudioConstraintsIfNecessary && allowAudioMixedMimeTypeAdaptiveness == other.allowAudioMixedMimeTypeAdaptiveness && allowAudioMixedSampleRateAdaptiveness == other.allowAudioMixedSampleRateAdaptiveness && allowAudioMixedChannelCountAdaptiveness == other.allowAudioMixedChannelCountAdaptiveness + && allowAudioMixedDecoderSupportAdaptiveness + == other.allowAudioMixedDecoderSupportAdaptiveness + && constrainAudioChannelCountToDeviceCapabilities + == other.constrainAudioChannelCountToDeviceCapabilities // General - && forceLowestBitrate == other.forceLowestBitrate - && forceHighestSupportedBitrate == other.forceHighestSupportedBitrate && exceedRendererCapabilitiesIfNecessary == other.exceedRendererCapabilitiesIfNecessary - && tunnelingAudioSessionId == other.tunnelingAudioSessionId + && tunnelingEnabled == other.tunnelingEnabled + && allowMultipleAdaptiveSelections == other.allowMultipleAdaptiveSelections // Overrides && areRendererDisabledFlagsEqual(rendererDisabledFlags, other.rendererDisabledFlags) && areSelectionOverridesEqual(selectionOverrides, other.selectionOverrides); @@ -1163,128 +1836,152 @@ && areRendererDisabledFlagsEqual(rendererDisabledFlags, other.rendererDisabledFl @Override public int hashCode() { - int result = super.hashCode(); + int result = 1; + result = 31 * result + super.hashCode(); // Video - result = 31 * result + maxVideoWidth; - result = 31 * result + maxVideoHeight; - result = 31 * result + maxVideoFrameRate; - result = 31 * result + maxVideoBitrate; result = 31 * result + (exceedVideoConstraintsIfNecessary ? 1 : 0); result = 31 * result + (allowVideoMixedMimeTypeAdaptiveness ? 1 : 0); result = 31 * result + (allowVideoNonSeamlessAdaptiveness ? 1 : 0); - result = 31 * result + (viewportOrientationMayChange ? 1 : 0); - result = 31 * result + viewportWidth; - result = 31 * result + viewportHeight; + result = 31 * result + (allowVideoMixedDecoderSupportAdaptiveness ? 1 : 0); // Audio - result = 31 * result + maxAudioChannelCount; - result = 31 * result + maxAudioBitrate; result = 31 * result + (exceedAudioConstraintsIfNecessary ? 1 : 0); result = 31 * result + (allowAudioMixedMimeTypeAdaptiveness ? 1 : 0); result = 31 * result + (allowAudioMixedSampleRateAdaptiveness ? 1 : 0); result = 31 * result + (allowAudioMixedChannelCountAdaptiveness ? 1 : 0); + result = 31 * result + (allowAudioMixedDecoderSupportAdaptiveness ? 1 : 0); + result = 31 * result + (constrainAudioChannelCountToDeviceCapabilities ? 1 : 0); // General - result = 31 * result + (forceLowestBitrate ? 1 : 0); - result = 31 * result + (forceHighestSupportedBitrate ? 1 : 0); result = 31 * result + (exceedRendererCapabilitiesIfNecessary ? 1 : 0); - result = 31 * result + tunnelingAudioSessionId; + result = 31 * result + (tunnelingEnabled ? 1 : 0); + result = 31 * result + (allowMultipleAdaptiveSelections ? 1 : 0); // Overrides (omitted from hashCode). return result; } - // Parcelable implementation. + // Bundleable implementation. + + private static final String FIELD_EXCEED_VIDEO_CONSTRAINTS_IF_NECESSARY = + Util.intToStringMaxRadix(FIELD_CUSTOM_ID_BASE); + private static final String FIELD_ALLOW_VIDEO_MIXED_MIME_TYPE_ADAPTIVENESS = + Util.intToStringMaxRadix(FIELD_CUSTOM_ID_BASE + 1); + private static final String FIELD_ALLOW_VIDEO_NON_SEAMLESS_ADAPTIVENESS = + Util.intToStringMaxRadix(FIELD_CUSTOM_ID_BASE + 2); + private static final String FIELD_EXCEED_AUDIO_CONSTRAINTS_IF_NECESSARY = + Util.intToStringMaxRadix(FIELD_CUSTOM_ID_BASE + 3); + private static final String FIELD_ALLOW_AUDIO_MIXED_MIME_TYPE_ADAPTIVENESS = + Util.intToStringMaxRadix(FIELD_CUSTOM_ID_BASE + 4); + private static final String FIELD_ALLOW_AUDIO_MIXED_SAMPLE_RATE_ADAPTIVENESS = + Util.intToStringMaxRadix(FIELD_CUSTOM_ID_BASE + 5); + private static final String FIELD_ALLOW_AUDIO_MIXED_CHANNEL_COUNT_ADAPTIVENESS = + Util.intToStringMaxRadix(FIELD_CUSTOM_ID_BASE + 6); + private static final String FIELD_EXCEED_RENDERER_CAPABILITIES_IF_NECESSARY = + Util.intToStringMaxRadix(FIELD_CUSTOM_ID_BASE + 7); + private static final String FIELD_TUNNELING_ENABLED = + Util.intToStringMaxRadix(FIELD_CUSTOM_ID_BASE + 8); + private static final String FIELD_ALLOW_MULTIPLE_ADAPTIVE_SELECTIONS = + Util.intToStringMaxRadix(FIELD_CUSTOM_ID_BASE + 9); + private static final String FIELD_SELECTION_OVERRIDES_RENDERER_INDICES = + Util.intToStringMaxRadix(FIELD_CUSTOM_ID_BASE + 10); + private static final String FIELD_SELECTION_OVERRIDES_TRACK_GROUP_ARRAYS = + Util.intToStringMaxRadix(FIELD_CUSTOM_ID_BASE + 11); + private static final String FIELD_SELECTION_OVERRIDES = + Util.intToStringMaxRadix(FIELD_CUSTOM_ID_BASE + 12); + private static final String FIELD_RENDERER_DISABLED_INDICES = + Util.intToStringMaxRadix(FIELD_CUSTOM_ID_BASE + 13); + private static final String FIELD_ALLOW_VIDEO_MIXED_DECODER_SUPPORT_ADAPTIVENESS = + Util.intToStringMaxRadix(FIELD_CUSTOM_ID_BASE + 14); + private static final String FIELD_ALLOW_AUDIO_MIXED_DECODER_SUPPORT_ADAPTIVENESS = + Util.intToStringMaxRadix(FIELD_CUSTOM_ID_BASE + 15); + private static final String FIELD_CONSTRAIN_AUDIO_CHANNEL_COUNT_TO_DEVICE_CAPABILITIES = + Util.intToStringMaxRadix(FIELD_CUSTOM_ID_BASE + 16); @Override - public int describeContents() { - return 0; - } + public Bundle toBundle() { + Bundle bundle = super.toBundle(); - @Override - public void writeToParcel(Parcel dest, int flags) { - super.writeToParcel(dest, flags); // Video - dest.writeInt(maxVideoWidth); - dest.writeInt(maxVideoHeight); - dest.writeInt(maxVideoFrameRate); - dest.writeInt(maxVideoBitrate); - Util.writeBoolean(dest, exceedVideoConstraintsIfNecessary); - Util.writeBoolean(dest, allowVideoMixedMimeTypeAdaptiveness); - Util.writeBoolean(dest, allowVideoNonSeamlessAdaptiveness); - dest.writeInt(viewportWidth); - dest.writeInt(viewportHeight); - Util.writeBoolean(dest, viewportOrientationMayChange); + bundle.putBoolean( + FIELD_EXCEED_VIDEO_CONSTRAINTS_IF_NECESSARY, exceedVideoConstraintsIfNecessary); + bundle.putBoolean( + FIELD_ALLOW_VIDEO_MIXED_MIME_TYPE_ADAPTIVENESS, allowVideoMixedMimeTypeAdaptiveness); + bundle.putBoolean( + FIELD_ALLOW_VIDEO_NON_SEAMLESS_ADAPTIVENESS, allowVideoNonSeamlessAdaptiveness); + bundle.putBoolean( + FIELD_ALLOW_VIDEO_MIXED_DECODER_SUPPORT_ADAPTIVENESS, + allowVideoMixedDecoderSupportAdaptiveness); // Audio - dest.writeInt(maxAudioChannelCount); - dest.writeInt(maxAudioBitrate); - Util.writeBoolean(dest, exceedAudioConstraintsIfNecessary); - Util.writeBoolean(dest, allowAudioMixedMimeTypeAdaptiveness); - Util.writeBoolean(dest, allowAudioMixedSampleRateAdaptiveness); - Util.writeBoolean(dest, allowAudioMixedChannelCountAdaptiveness); + bundle.putBoolean( + FIELD_EXCEED_AUDIO_CONSTRAINTS_IF_NECESSARY, exceedAudioConstraintsIfNecessary); + bundle.putBoolean( + FIELD_ALLOW_AUDIO_MIXED_MIME_TYPE_ADAPTIVENESS, allowAudioMixedMimeTypeAdaptiveness); + bundle.putBoolean( + FIELD_ALLOW_AUDIO_MIXED_SAMPLE_RATE_ADAPTIVENESS, allowAudioMixedSampleRateAdaptiveness); + bundle.putBoolean( + FIELD_ALLOW_AUDIO_MIXED_CHANNEL_COUNT_ADAPTIVENESS, + allowAudioMixedChannelCountAdaptiveness); + bundle.putBoolean( + FIELD_ALLOW_AUDIO_MIXED_DECODER_SUPPORT_ADAPTIVENESS, + allowAudioMixedDecoderSupportAdaptiveness); + bundle.putBoolean( + FIELD_CONSTRAIN_AUDIO_CHANNEL_COUNT_TO_DEVICE_CAPABILITIES, + constrainAudioChannelCountToDeviceCapabilities); // General - Util.writeBoolean(dest, forceLowestBitrate); - Util.writeBoolean(dest, forceHighestSupportedBitrate); - Util.writeBoolean(dest, exceedRendererCapabilitiesIfNecessary); - dest.writeInt(tunnelingAudioSessionId); - // Overrides - writeSelectionOverridesToParcel(dest, selectionOverrides); - dest.writeSparseBooleanArray(rendererDisabledFlags); - } + bundle.putBoolean( + FIELD_EXCEED_RENDERER_CAPABILITIES_IF_NECESSARY, exceedRendererCapabilitiesIfNecessary); + bundle.putBoolean(FIELD_TUNNELING_ENABLED, tunnelingEnabled); + bundle.putBoolean(FIELD_ALLOW_MULTIPLE_ADAPTIVE_SELECTIONS, allowMultipleAdaptiveSelections); - public static final Parcelable.Creator CREATOR = - new Parcelable.Creator() { + putSelectionOverridesToBundle(bundle, selectionOverrides); + // Only true values are put into rendererDisabledFlags. + bundle.putIntArray( + FIELD_RENDERER_DISABLED_INDICES, getKeysFromSparseBooleanArray(rendererDisabledFlags)); - @Override - public Parameters createFromParcel(Parcel in) { - return new Parameters(in); - } + return bundle; + } - @Override - public Parameters[] newArray(int size) { - return new Parameters[size]; - } - }; + /** Object that can restore {@code Parameters} from a {@link Bundle}. */ + public static final Creator CREATOR = + bundle -> new Parameters.Builder(bundle).build(); - // Static utility methods. + /** + * Bundles selection overrides in 3 arrays of equal length. Each triplet of matching indices is: + * the selection override (stored in a sparse array as they can be null), the trackGroupArray of + * that override, the rendererIndex of that override. + */ + private static void putSelectionOverridesToBundle( + Bundle bundle, + SparseArray> selectionOverrides) { + ArrayList rendererIndices = new ArrayList<>(); + ArrayList trackGroupArrays = new ArrayList<>(); + SparseArray selections = new SparseArray<>(); - private static SparseArray> - readSelectionOverrides(Parcel in) { - int renderersWithOverridesCount = in.readInt(); - SparseArray> selectionOverrides = - new SparseArray<>(renderersWithOverridesCount); - for (int i = 0; i < renderersWithOverridesCount; i++) { - int rendererIndex = in.readInt(); - int overrideCount = in.readInt(); - Map overrides = - new HashMap<>(overrideCount); - for (int j = 0; j < overrideCount; j++) { - TrackGroupArray trackGroups = - Assertions.checkNotNull(in.readParcelable(TrackGroupArray.class.getClassLoader())); - @Nullable - SelectionOverride override = in.readParcelable(SelectionOverride.class.getClassLoader()); - overrides.put(trackGroups, override); + for (int i = 0; i < selectionOverrides.size(); i++) { + int rendererIndex = selectionOverrides.keyAt(i); + for (Map.Entry override : + selectionOverrides.valueAt(i).entrySet()) { + @Nullable SelectionOverride selection = override.getValue(); + if (selection != null) { + selections.put(trackGroupArrays.size(), selection); + } + trackGroupArrays.add(override.getKey()); + rendererIndices.add(rendererIndex); } - selectionOverrides.put(rendererIndex, overrides); + bundle.putIntArray( + FIELD_SELECTION_OVERRIDES_RENDERER_INDICES, Ints.toArray(rendererIndices)); + bundle.putParcelableArrayList( + FIELD_SELECTION_OVERRIDES_TRACK_GROUP_ARRAYS, + BundleableUtil.toBundleArrayList(trackGroupArrays)); + bundle.putSparseParcelableArray( + FIELD_SELECTION_OVERRIDES, BundleableUtil.toBundleSparseArray(selections)); } - return selectionOverrides; } - private static void writeSelectionOverridesToParcel( - Parcel dest, - SparseArray> selectionOverrides) { - int renderersWithOverridesCount = selectionOverrides.size(); - dest.writeInt(renderersWithOverridesCount); - for (int i = 0; i < renderersWithOverridesCount; i++) { - int rendererIndex = selectionOverrides.keyAt(i); - Map overrides = - selectionOverrides.valueAt(i); - int overrideCount = overrides.size(); - dest.writeInt(rendererIndex); - dest.writeInt(overrideCount); - for (Map.Entry override : - overrides.entrySet()) { - dest.writeParcelable(override.getKey(), /* parcelableFlags= */ 0); - dest.writeParcelable(override.getValue(), /* parcelableFlags= */ 0); - } + private static int[] getKeysFromSparseBooleanArray(SparseBooleanArray sparseBooleanArray) { + int[] keys = new int[sparseBooleanArray.size()]; + for (int i = 0; i < sparseBooleanArray.size(); i++) { + keys[i] = sparseBooleanArray.keyAt(i); } + return keys; } private static boolean areRendererDisabledFlagsEqual( @@ -1339,46 +2036,38 @@ private static boolean areSelectionOverridesEqual( } /** A track selection override. */ - public static final class SelectionOverride implements Parcelable { + public static final class SelectionOverride implements Bundleable { public final int groupIndex; public final int[] tracks; public final int length; - public final int reason; - public final int data; + public final @TrackSelection.Type int type; /** + * Constructs a {@code SelectionOverride} to override tracks of a group. + * * @param groupIndex The overriding track group index. * @param tracks The overriding track indices within the track group. */ public SelectionOverride(int groupIndex, int... tracks) { - this(groupIndex, tracks, C.SELECTION_REASON_MANUAL, /* data= */ 0); + this(groupIndex, tracks, TrackSelection.TYPE_UNSET); } /** + * Constructs a {@code SelectionOverride} of the given type to override tracks of a group. + * * @param groupIndex The overriding track group index. * @param tracks The overriding track indices within the track group. - * @param reason The reason for the override. One of the {@link C} SELECTION_REASON_ constants. - * @param data Optional data associated with this override. + * @param type The type that will be returned from {@link TrackSelection#getType()}. */ - public SelectionOverride(int groupIndex, int[] tracks, int reason, int data) { + public SelectionOverride(int groupIndex, int[] tracks, @TrackSelection.Type int type) { this.groupIndex = groupIndex; this.tracks = Arrays.copyOf(tracks, tracks.length); this.length = tracks.length; - this.reason = reason; - this.data = data; + this.type = type; Arrays.sort(this.tracks); } - /* package */ SelectionOverride(Parcel in) { - groupIndex = in.readInt(); - length = in.readByte(); - tracks = new int[length]; - in.readIntArray(tracks); - reason = in.readInt(); - data = in.readInt(); - } - /** Returns whether this override contains the specified track index. */ public boolean containsTrack(int track) { for (int overrideTrack : tracks) { @@ -1392,8 +2081,7 @@ public boolean containsTrack(int track) { @Override public int hashCode() { int hash = 31 * groupIndex + Arrays.hashCode(tracks); - hash = 31 * hash + reason; - return 31 * hash + data; + return 31 * hash + type; } @Override @@ -1407,252 +2095,319 @@ public boolean equals(@Nullable Object obj) { SelectionOverride other = (SelectionOverride) obj; return groupIndex == other.groupIndex && Arrays.equals(tracks, other.tracks) - && reason == other.reason - && data == other.data; + && type == other.type; } - // Parcelable implementation. + // Bundleable implementation. - @Override - public int describeContents() { - return 0; - } + private static final String FIELD_GROUP_INDEX = Util.intToStringMaxRadix(0); + private static final String FIELD_TRACKS = Util.intToStringMaxRadix(1); + private static final String FIELD_TRACK_TYPE = Util.intToStringMaxRadix(2); @Override - public void writeToParcel(Parcel dest, int flags) { - dest.writeInt(groupIndex); - dest.writeInt(tracks.length); - dest.writeIntArray(tracks); - dest.writeInt(reason); - dest.writeInt(data); - } - - public static final Parcelable.Creator CREATOR = - new Parcelable.Creator() { - - @Override - public SelectionOverride createFromParcel(Parcel in) { - return new SelectionOverride(in); - } - - @Override - public SelectionOverride[] newArray(int size) { - return new SelectionOverride[size]; - } + public Bundle toBundle() { + Bundle bundle = new Bundle(); + bundle.putInt(FIELD_GROUP_INDEX, groupIndex); + bundle.putIntArray(FIELD_TRACKS, tracks); + bundle.putInt(FIELD_TRACK_TYPE, type); + return bundle; + } + + /** Object that can restore {@code SelectionOverride} from a {@link Bundle}. */ + public static final Creator CREATOR = + bundle -> { + int groupIndex = bundle.getInt(FIELD_GROUP_INDEX, -1); + @Nullable int[] tracks = bundle.getIntArray(FIELD_TRACKS); + int trackType = bundle.getInt(FIELD_TRACK_TYPE, -1); + Assertions.checkArgument(groupIndex >= 0 && trackType >= 0); + Assertions.checkNotNull(tracks); + return new SelectionOverride(groupIndex, tracks, trackType); }; } + /** + * The extent to which tracks are eligible for selection. One of {@link + * #SELECTION_ELIGIBILITY_NO}, {@link #SELECTION_ELIGIBILITY_FIXED} or {@link + * #SELECTION_ELIGIBILITY_ADAPTIVE}. + */ + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef({SELECTION_ELIGIBILITY_NO, SELECTION_ELIGIBILITY_FIXED, SELECTION_ELIGIBILITY_ADAPTIVE}) + protected @interface SelectionEligibility {} + + /** Track is not eligible for selection. */ + protected static final int SELECTION_ELIGIBILITY_NO = 0; + /** Track is eligible for a fixed selection with one track. */ + protected static final int SELECTION_ELIGIBILITY_FIXED = 1; + /** + * Track is eligible for both a fixed selection and as part of an adaptive selection with multiple + * tracks. + */ + protected static final int SELECTION_ELIGIBILITY_ADAPTIVE = 2; + /** * If a dimension (i.e. width or height) of a video is greater or equal to this fraction of the * corresponding viewport dimension, then the video is considered as filling the viewport (in that * dimension). */ private static final float FRACTION_TO_CONSIDER_FULLSCREEN = 0.98f; - private static final int[] NO_TRACKS = new int[0]; - private static final int WITHIN_RENDERER_CAPABILITIES_BONUS = 1000; - private final TrackSelection.Factory trackSelectionFactory; - private final AtomicReference parametersReference; + /** Ordering of two format values. A known value is considered greater than Format#NO_VALUE. */ + private static final Ordering FORMAT_VALUE_ORDERING = + Ordering.from( + (first, second) -> + first == Format.NO_VALUE + ? (second == Format.NO_VALUE ? 0 : -1) + : (second == Format.NO_VALUE ? 1 : (first - second))); + /** Ordering where all elements are equal. */ + private static final Ordering NO_ORDER = Ordering.from((first, second) -> 0); + + private final Object lock; + @Nullable public final Context context; + private final ExoTrackSelection.Factory trackSelectionFactory; + private final boolean deviceIsTV; + + @GuardedBy("lock") + private Parameters parameters; + + @GuardedBy("lock") + @Nullable + private SpatializerWrapperV32 spatializer; - private boolean allowMultipleAdaptiveSelections; + @GuardedBy("lock") + private AudioAttributes audioAttributes; - /** @deprecated Use {@link #DefaultTrackSelector(Context)} instead. */ + /** + * @deprecated Use {@link #DefaultTrackSelector(Context)} instead. + */ @Deprecated - @SuppressWarnings("deprecation") public DefaultTrackSelector() { - this(new AdaptiveTrackSelection.Factory()); + this(Parameters.DEFAULT_WITHOUT_CONTEXT, new AdaptiveTrackSelection.Factory()); } /** - * @deprecated Use {@link #DefaultTrackSelector(Context)} instead. The bandwidth meter should be - * passed directly to the player in {@link - * com.google.android.exoplayer2.SimpleExoPlayer.Builder}. + * @param context Any {@link Context}. */ - @Deprecated - @SuppressWarnings("deprecation") - public DefaultTrackSelector(BandwidthMeter bandwidthMeter) { - this(new AdaptiveTrackSelection.Factory(bandwidthMeter)); - } - - /** @deprecated Use {@link #DefaultTrackSelector(Context, TrackSelection.Factory)}. */ - @Deprecated - public DefaultTrackSelector(TrackSelection.Factory trackSelectionFactory) { - this(Parameters.DEFAULT_WITHOUT_CONTEXT, trackSelectionFactory); - } - - /** @param context Any {@link Context}. */ public DefaultTrackSelector(Context context) { this(context, new AdaptiveTrackSelection.Factory()); } /** * @param context Any {@link Context}. - * @param trackSelectionFactory A factory for {@link TrackSelection}s. + * @param trackSelectionFactory A factory for {@link ExoTrackSelection}s. */ - public DefaultTrackSelector(Context context, TrackSelection.Factory trackSelectionFactory) { - this(Parameters.getDefaults(context), trackSelectionFactory); + public DefaultTrackSelector(Context context, ExoTrackSelection.Factory trackSelectionFactory) { + this(context, Parameters.getDefaults(context), trackSelectionFactory); } /** - * @param parameters Initial {@link Parameters}. - * @param trackSelectionFactory A factory for {@link TrackSelection}s. + * @param context Any {@link Context}. + * @param parameters Initial {@link TrackSelectionParameters}. */ - public DefaultTrackSelector(Parameters parameters, TrackSelection.Factory trackSelectionFactory) { - this.trackSelectionFactory = trackSelectionFactory; - parametersReference = new AtomicReference<>(parameters); + public DefaultTrackSelector(Context context, TrackSelectionParameters parameters) { + this(context, parameters, new AdaptiveTrackSelection.Factory()); } /** - * Atomically sets the provided parameters for track selection. - * - * @param parameters The parameters for track selection. + * @deprecated Use {@link #DefaultTrackSelector(Context, TrackSelectionParameters, + * ExoTrackSelection.Factory)} */ - public void setParameters(Parameters parameters) { - Assertions.checkNotNull(parameters); - if (!parametersReference.getAndSet(parameters).equals(parameters)) { - invalidate(); - } + @Deprecated + public DefaultTrackSelector( + TrackSelectionParameters parameters, ExoTrackSelection.Factory trackSelectionFactory) { + this(parameters, trackSelectionFactory, /* context= */ null); } /** - * Atomically sets the provided parameters for track selection. - * - * @param parametersBuilder A builder from which to obtain the parameters for track selection. + * @param context Any {@link Context}. + * @param parameters Initial {@link TrackSelectionParameters}. + * @param trackSelectionFactory A factory for {@link ExoTrackSelection}s. */ - public void setParameters(ParametersBuilder parametersBuilder) { - setParameters(parametersBuilder.build()); + public DefaultTrackSelector( + Context context, + TrackSelectionParameters parameters, + ExoTrackSelection.Factory trackSelectionFactory) { + this(parameters, trackSelectionFactory, context); } /** - * Gets the current selection parameters. + * Exists for backwards compatibility so that the deprecated constructor {@link + * #DefaultTrackSelector(TrackSelectionParameters, ExoTrackSelection.Factory)} can initialize + * {@code context} with {@code null} while we don't have a public constructor with a {@code + * Nullable context}. * - * @return The current selection parameters. + * @param context Any {@link Context}. + * @param parameters Initial {@link TrackSelectionParameters}. + * @param trackSelectionFactory A factory for {@link ExoTrackSelection}s. */ - public Parameters getParameters() { - return parametersReference.get(); - } - - /** Returns a new {@link ParametersBuilder} initialized with the current selection parameters. */ - public ParametersBuilder buildUponParameters() { - return getParameters().buildUpon(); - } - - /** @deprecated Use {@link ParametersBuilder#setRendererDisabled(int, boolean)}. */ - @Deprecated - public final void setRendererDisabled(int rendererIndex, boolean disabled) { - setParameters(buildUponParameters().setRendererDisabled(rendererIndex, disabled)); + private DefaultTrackSelector( + TrackSelectionParameters parameters, + ExoTrackSelection.Factory trackSelectionFactory, + @Nullable Context context) { + this.lock = new Object(); + this.context = context != null ? context.getApplicationContext() : null; + this.trackSelectionFactory = trackSelectionFactory; + if (parameters instanceof Parameters) { + this.parameters = (Parameters) parameters; + } else { + Parameters defaultParameters = + context == null ? Parameters.DEFAULT_WITHOUT_CONTEXT : Parameters.getDefaults(context); + this.parameters = defaultParameters.buildUpon().set(parameters).build(); + } + this.audioAttributes = AudioAttributes.DEFAULT; + this.deviceIsTV = context != null && Util.isTv(context); + if (!deviceIsTV && context != null && Util.SDK_INT >= 32) { + spatializer = SpatializerWrapperV32.tryCreateInstance(context); + } + if (this.parameters.constrainAudioChannelCountToDeviceCapabilities && context == null) { + Log.w(TAG, AUDIO_CHANNEL_COUNT_CONSTRAINTS_WARN_MESSAGE); + } } - /** @deprecated Use {@link Parameters#getRendererDisabled(int)}. */ - @Deprecated - public final boolean getRendererDisabled(int rendererIndex) { - return getParameters().getRendererDisabled(rendererIndex); + @Override + public void release() { + synchronized (lock) { + if (Util.SDK_INT >= 32 && spatializer != null) { + spatializer.release(); + } + } + super.release(); } - /** - * @deprecated Use {@link ParametersBuilder#setSelectionOverride(int, TrackGroupArray, - * SelectionOverride)}. - */ - @Deprecated - public final void setSelectionOverride( - int rendererIndex, TrackGroupArray groups, @Nullable SelectionOverride override) { - setParameters(buildUponParameters().setSelectionOverride(rendererIndex, groups, override)); + @Override + public Parameters getParameters() { + synchronized (lock) { + return parameters; + } } - /** @deprecated Use {@link Parameters#hasSelectionOverride(int, TrackGroupArray)}. */ - @Deprecated - public final boolean hasSelectionOverride(int rendererIndex, TrackGroupArray groups) { - return getParameters().hasSelectionOverride(rendererIndex, groups); + @Override + public boolean isSetParametersSupported() { + return true; } - /** @deprecated Use {@link Parameters#getSelectionOverride(int, TrackGroupArray)}. */ - @Deprecated - @Nullable - public final SelectionOverride getSelectionOverride(int rendererIndex, TrackGroupArray groups) { - return getParameters().getSelectionOverride(rendererIndex, groups); + @Override + public void setParameters(TrackSelectionParameters parameters) { + if (parameters instanceof Parameters) { + setParametersInternal((Parameters) parameters); + } + // Only add the fields of `TrackSelectionParameters` to `parameters`. + Parameters mergedParameters = new Parameters.Builder(getParameters()).set(parameters).build(); + setParametersInternal(mergedParameters); } - /** @deprecated Use {@link ParametersBuilder#clearSelectionOverride(int, TrackGroupArray)}. */ - @Deprecated - public final void clearSelectionOverride(int rendererIndex, TrackGroupArray groups) { - setParameters(buildUponParameters().clearSelectionOverride(rendererIndex, groups)); + @Override + public void setAudioAttributes(AudioAttributes audioAttributes) { + boolean audioAttributesChanged; + synchronized (lock) { + audioAttributesChanged = !this.audioAttributes.equals(audioAttributes); + this.audioAttributes = audioAttributes; + } + if (audioAttributesChanged) { + maybeInvalidateForAudioChannelCountConstraints(); + } } - /** @deprecated Use {@link ParametersBuilder#clearSelectionOverrides(int)}. */ + /** + * @deprecated Use {@link #setParameters(Parameters.Builder)} instead. + */ @Deprecated - public final void clearSelectionOverrides(int rendererIndex) { - setParameters(buildUponParameters().clearSelectionOverrides(rendererIndex)); + @SuppressWarnings("deprecation") // Allow setting the deprecated builder + public void setParameters(ParametersBuilder parametersBuilder) { + setParametersInternal(parametersBuilder.build()); } - /** @deprecated Use {@link ParametersBuilder#clearSelectionOverrides()}. */ - @Deprecated - public final void clearSelectionOverrides() { - setParameters(buildUponParameters().clearSelectionOverrides()); + /** + * Atomically sets the provided parameters for track selection. + * + * @param parametersBuilder A builder from which to obtain the parameters for track selection. + */ + public void setParameters(Parameters.Builder parametersBuilder) { + setParametersInternal(parametersBuilder.build()); } - /** @deprecated Use {@link ParametersBuilder#setTunnelingAudioSessionId(int)}. */ - @Deprecated - public void setTunnelingAudioSessionId(int tunnelingAudioSessionId) { - setParameters(buildUponParameters().setTunnelingAudioSessionId(tunnelingAudioSessionId)); + /** Returns a new {@link Parameters.Builder} initialized with the current selection parameters. */ + public Parameters.Builder buildUponParameters() { + return getParameters().buildUpon(); } /** - * Allows the creation of multiple adaptive track selections. + * Atomically sets the provided {@link Parameters} for track selection. * - *

      This method is experimental, and will be renamed or removed in a future release. + * @param parameters The parameters for track selection. */ - public void experimental_allowMultipleAdaptiveSelections() { - this.allowMultipleAdaptiveSelections = true; + private void setParametersInternal(Parameters parameters) { + Assertions.checkNotNull(parameters); + boolean parametersChanged; + synchronized (lock) { + parametersChanged = !this.parameters.equals(parameters); + this.parameters = parameters; + } + + if (parametersChanged) { + if (parameters.constrainAudioChannelCountToDeviceCapabilities && context == null) { + Log.w(TAG, AUDIO_CHANNEL_COUNT_CONSTRAINTS_WARN_MESSAGE); + } + invalidate(); + } } // MappingTrackSelector implementation. @Override - protected final Pair<@NullableType RendererConfiguration[], @NullableType TrackSelection[]> + protected final Pair<@NullableType RendererConfiguration[], @NullableType ExoTrackSelection[]> selectTracks( MappedTrackInfo mappedTrackInfo, @Capabilities int[][][] rendererFormatSupports, - @AdaptiveSupport int[] rendererMixedMimeTypeAdaptationSupports) + @AdaptiveSupport int[] rendererMixedMimeTypeAdaptationSupport, + MediaPeriodId mediaPeriodId, + Timeline timeline) throws ExoPlaybackException { - Parameters params = parametersReference.get(); + Parameters parameters; + synchronized (lock) { + parameters = this.parameters; + if (parameters.constrainAudioChannelCountToDeviceCapabilities + && Util.SDK_INT >= 32 + && spatializer != null) { + // Initialize the spatializer now so we can get a reference to the playback looper with + // Looper.myLooper(). + spatializer.ensureInitialized(this, checkStateNotNull(Looper.myLooper())); + } + } int rendererCount = mappedTrackInfo.getRendererCount(); - TrackSelection.@NullableType Definition[] definitions = + ExoTrackSelection.@NullableType Definition[] definitions = selectAllTracks( mappedTrackInfo, rendererFormatSupports, - rendererMixedMimeTypeAdaptationSupports, - params); + rendererMixedMimeTypeAdaptationSupport, + parameters); + + applyTrackSelectionOverrides(mappedTrackInfo, parameters, definitions); + applyLegacyRendererOverrides(mappedTrackInfo, parameters, definitions); - // Apply track disabling and overriding. + // Disable renderers if needed. for (int i = 0; i < rendererCount; i++) { - if (params.getRendererDisabled(i)) { + @C.TrackType int rendererType = mappedTrackInfo.getRendererType(i); + if (parameters.getRendererDisabled(i) + || parameters.disabledTrackTypes.contains(rendererType)) { definitions[i] = null; - continue; - } - TrackGroupArray rendererTrackGroups = mappedTrackInfo.getTrackGroups(i); - if (params.hasSelectionOverride(i, rendererTrackGroups)) { - SelectionOverride override = params.getSelectionOverride(i, rendererTrackGroups); - definitions[i] = - override == null - ? null - : new TrackSelection.Definition( - rendererTrackGroups.get(override.groupIndex), - override.tracks, - override.reason, - override.data); } } @NullableType - TrackSelection[] rendererTrackSelections = - trackSelectionFactory.createTrackSelections(definitions, getBandwidthMeter()); + ExoTrackSelection[] rendererTrackSelections = + trackSelectionFactory.createTrackSelections( + definitions, getBandwidthMeter(), mediaPeriodId, timeline); // Initialize the renderer configurations to the default configuration for all renderers with // selections, and null otherwise. - @NullableType RendererConfiguration[] rendererConfigurations = - new RendererConfiguration[rendererCount]; + @NullableType + RendererConfiguration[] rendererConfigurations = new RendererConfiguration[rendererCount]; for (int i = 0; i < rendererCount; i++) { - boolean forceRendererDisabled = params.getRendererDisabled(i); + @C.TrackType int rendererType = mappedTrackInfo.getRendererType(i); + boolean forceRendererDisabled = + parameters.getRendererDisabled(i) || parameters.disabledTrackTypes.contains(rendererType); boolean rendererEnabled = !forceRendererDisabled && (mappedTrackInfo.getRendererType(i) == C.TRACK_TYPE_NONE @@ -1661,12 +2416,10 @@ public void experimental_allowMultipleAdaptiveSelections() { } // Configure audio and video renderers to use tunneling if appropriate. - maybeConfigureRenderersForTunneling( - mappedTrackInfo, - rendererFormatSupports, - rendererConfigurations, - rendererTrackSelections, - params.tunnelingAudioSessionId); + if (parameters.tunnelingEnabled) { + maybeConfigureRenderersForTunneling( + mappedTrackInfo, rendererFormatSupports, rendererConfigurations, rendererTrackSelections); + } return Pair.create(rendererConfigurations, rendererTrackSelections); } @@ -1674,8 +2427,9 @@ public void experimental_allowMultipleAdaptiveSelections() { // Track selection prior to overrides and disabled flags being applied. /** - * Called from {@link #selectTracks(MappedTrackInfo, int[][][], int[])} to make a track selection - * for each renderer, prior to overrides and disabled flags being applied. + * Called from {@link #selectTracks(MappedTrackInfo, int[][][], int[], MediaPeriodId, Timeline)} + * to make a track selection for each renderer, prior to overrides and disabled flags being + * applied. * *

      The implementation should not account for overrides and disabled flags. Track selections * generated by this method will be overridden to account for these properties. @@ -1685,104 +2439,62 @@ public void experimental_allowMultipleAdaptiveSelections() { * renderer, track group and track (in that order). * @param rendererMixedMimeTypeAdaptationSupports The {@link AdaptiveSupport} for mixed MIME type * adaptation for the renderer. - * @return The {@link TrackSelection.Definition}s for the renderers. A null entry indicates no + * @return The {@link ExoTrackSelection.Definition}s for the renderers. A null entry indicates no * selection was made. * @throws ExoPlaybackException If an error occurs while selecting the tracks. */ - protected TrackSelection.@NullableType Definition[] selectAllTracks( + protected ExoTrackSelection.@NullableType Definition[] selectAllTracks( MappedTrackInfo mappedTrackInfo, @Capabilities int[][][] rendererFormatSupports, @AdaptiveSupport int[] rendererMixedMimeTypeAdaptationSupports, Parameters params) throws ExoPlaybackException { int rendererCount = mappedTrackInfo.getRendererCount(); - TrackSelection.@NullableType Definition[] definitions = - new TrackSelection.Definition[rendererCount]; + ExoTrackSelection.@NullableType Definition[] definitions = + new ExoTrackSelection.Definition[rendererCount]; - boolean seenVideoRendererWithMappedTracks = false; - boolean selectedVideoTracks = false; - for (int i = 0; i < rendererCount; i++) { - if (C.TRACK_TYPE_VIDEO == mappedTrackInfo.getRendererType(i)) { - if (!selectedVideoTracks) { - definitions[i] = - selectVideoTrack( - mappedTrackInfo.getTrackGroups(i), - rendererFormatSupports[i], - rendererMixedMimeTypeAdaptationSupports[i], - params, - /* enableAdaptiveTrackSelection= */ true); - selectedVideoTracks = definitions[i] != null; - } - seenVideoRendererWithMappedTracks |= mappedTrackInfo.getTrackGroups(i).length > 0; - } + @Nullable + Pair selectedVideo = + selectVideoTrack( + mappedTrackInfo, + rendererFormatSupports, + rendererMixedMimeTypeAdaptationSupports, + params); + if (selectedVideo != null) { + definitions[selectedVideo.second] = selectedVideo.first; } - AudioTrackScore selectedAudioTrackScore = null; - String selectedAudioLanguage = null; - int selectedAudioRendererIndex = C.INDEX_UNSET; - for (int i = 0; i < rendererCount; i++) { - if (C.TRACK_TYPE_AUDIO == mappedTrackInfo.getRendererType(i)) { - boolean enableAdaptiveTrackSelection = - allowMultipleAdaptiveSelections || !seenVideoRendererWithMappedTracks; - Pair audioSelection = - selectAudioTrack( - mappedTrackInfo.getTrackGroups(i), - rendererFormatSupports[i], - rendererMixedMimeTypeAdaptationSupports[i], - params, - enableAdaptiveTrackSelection); - if (audioSelection != null - && (selectedAudioTrackScore == null - || audioSelection.second.compareTo(selectedAudioTrackScore) > 0)) { - if (selectedAudioRendererIndex != C.INDEX_UNSET) { - // We've already made a selection for another audio renderer, but it had a lower - // score. Clear the selection for that renderer. - definitions[selectedAudioRendererIndex] = null; - } - TrackSelection.Definition definition = audioSelection.first; - definitions[i] = definition; - // We assume that audio tracks in the same group have matching language. - selectedAudioLanguage = definition.group.getFormat(definition.tracks[0]).language; - selectedAudioTrackScore = audioSelection.second; - selectedAudioRendererIndex = i; - } - } + @Nullable + Pair selectedAudio = + selectAudioTrack( + mappedTrackInfo, + rendererFormatSupports, + rendererMixedMimeTypeAdaptationSupports, + params); + if (selectedAudio != null) { + definitions[selectedAudio.second] = selectedAudio.first; + } + + @Nullable + String selectedAudioLanguage = + selectedAudio == null + ? null + : selectedAudio.first.group.getFormat(selectedAudio.first.tracks[0]).language; + @Nullable + Pair selectedText = + selectTextTrack(mappedTrackInfo, rendererFormatSupports, params, selectedAudioLanguage); + if (selectedText != null) { + definitions[selectedText.second] = selectedText.first; } - TextTrackScore selectedTextTrackScore = null; - int selectedTextRendererIndex = C.INDEX_UNSET; for (int i = 0; i < rendererCount; i++) { int trackType = mappedTrackInfo.getRendererType(i); - switch (trackType) { - case C.TRACK_TYPE_VIDEO: - case C.TRACK_TYPE_AUDIO: - // Already done. Do nothing. - break; - case C.TRACK_TYPE_TEXT: - Pair textSelection = - selectTextTrack( - mappedTrackInfo.getTrackGroups(i), - rendererFormatSupports[i], - params, - selectedAudioLanguage); - if (textSelection != null - && (selectedTextTrackScore == null - || textSelection.second.compareTo(selectedTextTrackScore) > 0)) { - if (selectedTextRendererIndex != C.INDEX_UNSET) { - // We've already made a selection for another text renderer, but it had a lower score. - // Clear the selection for that renderer. - definitions[selectedTextRendererIndex] = null; - } - definitions[i] = textSelection.first; - selectedTextTrackScore = textSelection.second; - selectedTextRendererIndex = i; - } - break; - default: - definitions[i] = - selectOtherTrack( - trackType, mappedTrackInfo.getTrackGroups(i), rendererFormatSupports[i], params); - break; + if (trackType != C.TRACK_TYPE_VIDEO + && trackType != C.TRACK_TYPE_AUDIO + && trackType != C.TRACK_TYPE_TEXT) { + definitions[i] = + selectOtherTrack( + trackType, mappedTrackInfo.getTrackGroups(i), rendererFormatSupports[i], params); } } @@ -1793,555 +2505,180 @@ public void experimental_allowMultipleAdaptiveSelections() { /** * Called by {@link #selectAllTracks(MappedTrackInfo, int[][][], int[], Parameters)} to create a - * {@link TrackSelection} for a video renderer. + * {@link ExoTrackSelection.Definition} for a video track selection. * - * @param groups The {@link TrackGroupArray} mapped to the renderer. - * @param formatSupports The {@link Capabilities} for each mapped track, indexed by renderer, - * track group and track (in that order). - * @param mixedMimeTypeAdaptationSupports The {@link AdaptiveSupport} for mixed MIME type - * adaptation for the renderer. + * @param mappedTrackInfo Mapped track information. + * @param rendererFormatSupports The {@link Capabilities} for each mapped track, indexed by + * renderer, track group and track (in that order). + * @param mixedMimeTypeSupports The {@link AdaptiveSupport} for mixed MIME type adaptation for the + * renderer. * @param params The selector's current constraint parameters. - * @param enableAdaptiveTrackSelection Whether adaptive track selection is allowed. - * @return The {@link TrackSelection.Definition} for the renderer, or null if no selection was - * made. + * @return A pair of the selected {@link ExoTrackSelection.Definition} and the corresponding + * renderer index, or null if no selection was made. * @throws ExoPlaybackException If an error occurs while selecting the tracks. */ @Nullable - protected TrackSelection.Definition selectVideoTrack( - TrackGroupArray groups, - @Capabilities int[][] formatSupports, - @AdaptiveSupport int mixedMimeTypeAdaptationSupports, - Parameters params, - boolean enableAdaptiveTrackSelection) + protected Pair selectVideoTrack( + MappedTrackInfo mappedTrackInfo, + @Capabilities int[][][] rendererFormatSupports, + @AdaptiveSupport int[] mixedMimeTypeSupports, + Parameters params) throws ExoPlaybackException { - TrackSelection.Definition definition = null; - if (!params.forceHighestSupportedBitrate - && !params.forceLowestBitrate - && enableAdaptiveTrackSelection) { - definition = - selectAdaptiveVideoTrack(groups, formatSupports, mixedMimeTypeAdaptationSupports, params); - } - if (definition == null) { - definition = selectFixedVideoTrack(groups, formatSupports, params); - } - return definition; - } - - @Nullable - private static TrackSelection.Definition selectAdaptiveVideoTrack( - TrackGroupArray groups, - @Capabilities int[][] formatSupport, - @AdaptiveSupport int mixedMimeTypeAdaptationSupports, - Parameters params) { - int requiredAdaptiveSupport = - params.allowVideoNonSeamlessAdaptiveness - ? (RendererCapabilities.ADAPTIVE_NOT_SEAMLESS | RendererCapabilities.ADAPTIVE_SEAMLESS) - : RendererCapabilities.ADAPTIVE_SEAMLESS; - boolean allowMixedMimeTypes = - params.allowVideoMixedMimeTypeAdaptiveness - && (mixedMimeTypeAdaptationSupports & requiredAdaptiveSupport) != 0; - for (int i = 0; i < groups.length; i++) { - TrackGroup group = groups.get(i); - int[] adaptiveTracks = - getAdaptiveVideoTracksForGroup( - group, - formatSupport[i], - allowMixedMimeTypes, - requiredAdaptiveSupport, - params.maxVideoWidth, - params.maxVideoHeight, - params.maxVideoFrameRate, - params.maxVideoBitrate, - params.viewportWidth, - params.viewportHeight, - params.viewportOrientationMayChange); - if (adaptiveTracks.length > 0) { - return new TrackSelection.Definition(group, adaptiveTracks); - } - } - return null; - } - - private static int[] getAdaptiveVideoTracksForGroup( - TrackGroup group, - @Capabilities int[] formatSupport, - boolean allowMixedMimeTypes, - int requiredAdaptiveSupport, - int maxVideoWidth, - int maxVideoHeight, - int maxVideoFrameRate, - int maxVideoBitrate, - int viewportWidth, - int viewportHeight, - boolean viewportOrientationMayChange) { - if (group.length < 2) { - return NO_TRACKS; - } - - List selectedTrackIndices = getViewportFilteredTrackIndices(group, viewportWidth, - viewportHeight, viewportOrientationMayChange); - if (selectedTrackIndices.size() < 2) { - return NO_TRACKS; - } - - String selectedMimeType = null; - if (!allowMixedMimeTypes) { - // Select the mime type for which we have the most adaptive tracks. - HashSet<@NullableType String> seenMimeTypes = new HashSet<>(); - int selectedMimeTypeTrackCount = 0; - for (int i = 0; i < selectedTrackIndices.size(); i++) { - int trackIndex = selectedTrackIndices.get(i); - String sampleMimeType = group.getFormat(trackIndex).sampleMimeType; - if (seenMimeTypes.add(sampleMimeType)) { - int countForMimeType = - getAdaptiveVideoTrackCountForMimeType( - group, - formatSupport, - requiredAdaptiveSupport, - sampleMimeType, - maxVideoWidth, - maxVideoHeight, - maxVideoFrameRate, - maxVideoBitrate, - selectedTrackIndices); - if (countForMimeType > selectedMimeTypeTrackCount) { - selectedMimeType = sampleMimeType; - selectedMimeTypeTrackCount = countForMimeType; - } - } - } - } - - // Filter by the selected mime type. - filterAdaptiveVideoTrackCountForMimeType( - group, - formatSupport, - requiredAdaptiveSupport, - selectedMimeType, - maxVideoWidth, - maxVideoHeight, - maxVideoFrameRate, - maxVideoBitrate, - selectedTrackIndices); - - return selectedTrackIndices.size() < 2 ? NO_TRACKS : Util.toArray(selectedTrackIndices); - } - - private static int getAdaptiveVideoTrackCountForMimeType( - TrackGroup group, - @Capabilities int[] formatSupport, - int requiredAdaptiveSupport, - @Nullable String mimeType, - int maxVideoWidth, - int maxVideoHeight, - int maxVideoFrameRate, - int maxVideoBitrate, - List selectedTrackIndices) { - int adaptiveTrackCount = 0; - for (int i = 0; i < selectedTrackIndices.size(); i++) { - int trackIndex = selectedTrackIndices.get(i); - if (isSupportedAdaptiveVideoTrack( - group.getFormat(trackIndex), - mimeType, - formatSupport[trackIndex], - requiredAdaptiveSupport, - maxVideoWidth, - maxVideoHeight, - maxVideoFrameRate, - maxVideoBitrate)) { - adaptiveTrackCount++; - } - } - return adaptiveTrackCount; - } - - private static void filterAdaptiveVideoTrackCountForMimeType( - TrackGroup group, - @Capabilities int[] formatSupport, - int requiredAdaptiveSupport, - @Nullable String mimeType, - int maxVideoWidth, - int maxVideoHeight, - int maxVideoFrameRate, - int maxVideoBitrate, - List selectedTrackIndices) { - for (int i = selectedTrackIndices.size() - 1; i >= 0; i--) { - int trackIndex = selectedTrackIndices.get(i); - if (!isSupportedAdaptiveVideoTrack( - group.getFormat(trackIndex), - mimeType, - formatSupport[trackIndex], - requiredAdaptiveSupport, - maxVideoWidth, - maxVideoHeight, - maxVideoFrameRate, - maxVideoBitrate)) { - selectedTrackIndices.remove(i); - } - } - } - - private static boolean isSupportedAdaptiveVideoTrack( - Format format, - @Nullable String mimeType, - @Capabilities int formatSupport, - int requiredAdaptiveSupport, - int maxVideoWidth, - int maxVideoHeight, - int maxVideoFrameRate, - int maxVideoBitrate) { - if ((format.roleFlags & C.ROLE_FLAG_TRICK_PLAY) != 0) { - // Ignore trick-play tracks for now. - return false; - } - return isSupported(formatSupport, false) - && ((formatSupport & requiredAdaptiveSupport) != 0) - && (mimeType == null || Util.areEqual(format.sampleMimeType, mimeType)) - && (format.width == Format.NO_VALUE || format.width <= maxVideoWidth) - && (format.height == Format.NO_VALUE || format.height <= maxVideoHeight) - && (format.frameRate == Format.NO_VALUE || format.frameRate <= maxVideoFrameRate) - && (format.bitrate == Format.NO_VALUE || format.bitrate <= maxVideoBitrate); - } - - @Nullable - private static TrackSelection.Definition selectFixedVideoTrack( - TrackGroupArray groups, @Capabilities int[][] formatSupports, Parameters params) { - TrackGroup selectedGroup = null; - int selectedTrackIndex = 0; - int selectedTrackScore = 0; - int selectedBitrate = Format.NO_VALUE; - int selectedPixelCount = Format.NO_VALUE; - for (int groupIndex = 0; groupIndex < groups.length; groupIndex++) { - TrackGroup trackGroup = groups.get(groupIndex); - List selectedTrackIndices = getViewportFilteredTrackIndices(trackGroup, - params.viewportWidth, params.viewportHeight, params.viewportOrientationMayChange); - @Capabilities int[] trackFormatSupport = formatSupports[groupIndex]; - for (int trackIndex = 0; trackIndex < trackGroup.length; trackIndex++) { - Format format = trackGroup.getFormat(trackIndex); - if ((format.roleFlags & C.ROLE_FLAG_TRICK_PLAY) != 0) { - // Ignore trick-play tracks for now. - continue; - } - if (isSupported(trackFormatSupport[trackIndex], - params.exceedRendererCapabilitiesIfNecessary)) { - boolean isWithinConstraints = - selectedTrackIndices.contains(trackIndex) - && (format.width == Format.NO_VALUE || format.width <= params.maxVideoWidth) - && (format.height == Format.NO_VALUE || format.height <= params.maxVideoHeight) - && (format.frameRate == Format.NO_VALUE - || format.frameRate <= params.maxVideoFrameRate) - && (format.bitrate == Format.NO_VALUE - || format.bitrate <= params.maxVideoBitrate); - if (!isWithinConstraints && !params.exceedVideoConstraintsIfNecessary) { - // Track should not be selected. - continue; - } - int trackScore = isWithinConstraints ? 2 : 1; - boolean isWithinCapabilities = isSupported(trackFormatSupport[trackIndex], false); - if (isWithinCapabilities) { - trackScore += WITHIN_RENDERER_CAPABILITIES_BONUS; - } - boolean selectTrack = trackScore > selectedTrackScore; - if (trackScore == selectedTrackScore) { - int bitrateComparison = compareFormatValues(format.bitrate, selectedBitrate); - if (params.forceLowestBitrate && bitrateComparison != 0) { - // Use bitrate as a tie breaker, preferring the lower bitrate. - selectTrack = bitrateComparison < 0; - } else { - // Use the pixel count as a tie breaker (or bitrate if pixel counts are tied). If - // we're within constraints prefer a higher pixel count (or bitrate), else prefer a - // lower count (or bitrate). If still tied then prefer the first track (i.e. the one - // that's already selected). - int formatPixelCount = format.getPixelCount(); - int comparisonResult = formatPixelCount != selectedPixelCount - ? compareFormatValues(formatPixelCount, selectedPixelCount) - : compareFormatValues(format.bitrate, selectedBitrate); - selectTrack = isWithinCapabilities && isWithinConstraints - ? comparisonResult > 0 : comparisonResult < 0; - } - } - if (selectTrack) { - selectedGroup = trackGroup; - selectedTrackIndex = trackIndex; - selectedTrackScore = trackScore; - selectedBitrate = format.bitrate; - selectedPixelCount = format.getPixelCount(); - } - } - } - } - return selectedGroup == null - ? null - : new TrackSelection.Definition(selectedGroup, selectedTrackIndex); + return selectTracksForType( + C.TRACK_TYPE_VIDEO, + mappedTrackInfo, + rendererFormatSupports, + (int rendererIndex, TrackGroup group, @Capabilities int[] support) -> + VideoTrackInfo.createForTrackGroup( + rendererIndex, group, params, support, mixedMimeTypeSupports[rendererIndex]), + VideoTrackInfo::compareSelections); } // Audio track selection implementation. /** * Called by {@link #selectAllTracks(MappedTrackInfo, int[][][], int[], Parameters)} to create a - * {@link TrackSelection} for an audio renderer. + * {@link ExoTrackSelection.Definition} for an audio track selection. * - * @param groups The {@link TrackGroupArray} mapped to the renderer. - * @param formatSupports The {@link Capabilities} for each mapped track, indexed by renderer, - * track group and track (in that order). - * @param mixedMimeTypeAdaptationSupports The {@link AdaptiveSupport} for mixed MIME type + * @param mappedTrackInfo Mapped track information. + * @param rendererFormatSupports The {@link Capabilities} for each mapped track, indexed by + * renderer, track group and track (in that order). + * @param rendererMixedMimeTypeAdaptationSupports The {@link AdaptiveSupport} for mixed MIME type * adaptation for the renderer. * @param params The selector's current constraint parameters. - * @param enableAdaptiveTrackSelection Whether adaptive track selection is allowed. - * @return The {@link TrackSelection.Definition} and corresponding {@link AudioTrackScore}, or - * null if no selection was made. + * @return A pair of the selected {@link ExoTrackSelection.Definition} and the corresponding + * renderer index, or null if no selection was made. * @throws ExoPlaybackException If an error occurs while selecting the tracks. */ - @SuppressWarnings("unused") @Nullable - protected Pair selectAudioTrack( - TrackGroupArray groups, - @Capabilities int[][] formatSupports, - @AdaptiveSupport int mixedMimeTypeAdaptationSupports, - Parameters params, - boolean enableAdaptiveTrackSelection) + protected Pair selectAudioTrack( + MappedTrackInfo mappedTrackInfo, + @Capabilities int[][][] rendererFormatSupports, + @AdaptiveSupport int[] rendererMixedMimeTypeAdaptationSupports, + Parameters params) throws ExoPlaybackException { - int selectedTrackIndex = C.INDEX_UNSET; - int selectedGroupIndex = C.INDEX_UNSET; - AudioTrackScore selectedTrackScore = null; - for (int groupIndex = 0; groupIndex < groups.length; groupIndex++) { - TrackGroup trackGroup = groups.get(groupIndex); - @Capabilities int[] trackFormatSupport = formatSupports[groupIndex]; - for (int trackIndex = 0; trackIndex < trackGroup.length; trackIndex++) { - if (isSupported(trackFormatSupport[trackIndex], - params.exceedRendererCapabilitiesIfNecessary)) { - Format format = trackGroup.getFormat(trackIndex); - AudioTrackScore trackScore = - new AudioTrackScore(format, params, trackFormatSupport[trackIndex]); - if (!trackScore.isWithinConstraints && !params.exceedAudioConstraintsIfNecessary) { - // Track should not be selected. - continue; - } - if (selectedTrackScore == null || trackScore.compareTo(selectedTrackScore) > 0) { - selectedGroupIndex = groupIndex; - selectedTrackIndex = trackIndex; - selectedTrackScore = trackScore; - } - } - } - } - - if (selectedGroupIndex == C.INDEX_UNSET) { - return null; - } - - TrackGroup selectedGroup = groups.get(selectedGroupIndex); - - TrackSelection.Definition definition = null; - if (!params.forceHighestSupportedBitrate - && !params.forceLowestBitrate - && enableAdaptiveTrackSelection) { - // If the group of the track with the highest score allows it, try to enable adaptation. - int[] adaptiveTracks = - getAdaptiveAudioTracks( - selectedGroup, - formatSupports[selectedGroupIndex], - params.maxAudioBitrate, - params.allowAudioMixedMimeTypeAdaptiveness, - params.allowAudioMixedSampleRateAdaptiveness, - params.allowAudioMixedChannelCountAdaptiveness); - if (adaptiveTracks.length > 0) { - definition = new TrackSelection.Definition(selectedGroup, adaptiveTracks); + boolean hasVideoRendererWithMappedTracks = false; + for (int i = 0; i < mappedTrackInfo.getRendererCount(); i++) { + if (C.TRACK_TYPE_VIDEO == mappedTrackInfo.getRendererType(i) + && mappedTrackInfo.getTrackGroups(i).length > 0) { + hasVideoRendererWithMappedTracks = true; + break; } } - if (definition == null) { - // We didn't make an adaptive selection, so make a fixed one instead. - definition = new TrackSelection.Definition(selectedGroup, selectedTrackIndex); - } - - return Pair.create(definition, Assertions.checkNotNull(selectedTrackScore)); - } - - private static int[] getAdaptiveAudioTracks( - TrackGroup group, - @Capabilities int[] formatSupport, - int maxAudioBitrate, - boolean allowMixedMimeTypeAdaptiveness, - boolean allowMixedSampleRateAdaptiveness, - boolean allowAudioMixedChannelCountAdaptiveness) { - int selectedConfigurationTrackCount = 0; - AudioConfigurationTuple selectedConfiguration = null; - HashSet seenConfigurationTuples = new HashSet<>(); - for (int i = 0; i < group.length; i++) { - Format format = group.getFormat(i); - AudioConfigurationTuple configuration = - new AudioConfigurationTuple( - format.channelCount, format.sampleRate, format.sampleMimeType); - if (seenConfigurationTuples.add(configuration)) { - int configurationCount = - getAdaptiveAudioTrackCount( + boolean hasVideoRendererWithMappedTracksFinal = hasVideoRendererWithMappedTracks; + return selectTracksForType( + C.TRACK_TYPE_AUDIO, + mappedTrackInfo, + rendererFormatSupports, + (int rendererIndex, TrackGroup group, @Capabilities int[] support) -> + AudioTrackInfo.createForTrackGroup( + rendererIndex, group, - formatSupport, - configuration, - maxAudioBitrate, - allowMixedMimeTypeAdaptiveness, - allowMixedSampleRateAdaptiveness, - allowAudioMixedChannelCountAdaptiveness); - if (configurationCount > selectedConfigurationTrackCount) { - selectedConfiguration = configuration; - selectedConfigurationTrackCount = configurationCount; - } - } - } - - if (selectedConfigurationTrackCount > 1) { - Assertions.checkNotNull(selectedConfiguration); - int[] adaptiveIndices = new int[selectedConfigurationTrackCount]; - int index = 0; - for (int i = 0; i < group.length; i++) { - Format format = group.getFormat(i); - if (isSupportedAdaptiveAudioTrack( - format, - formatSupport[i], - selectedConfiguration, - maxAudioBitrate, - allowMixedMimeTypeAdaptiveness, - allowMixedSampleRateAdaptiveness, - allowAudioMixedChannelCountAdaptiveness)) { - adaptiveIndices[index++] = i; - } - } - return adaptiveIndices; - } - return NO_TRACKS; - } - - private static int getAdaptiveAudioTrackCount( - TrackGroup group, - @Capabilities int[] formatSupport, - AudioConfigurationTuple configuration, - int maxAudioBitrate, - boolean allowMixedMimeTypeAdaptiveness, - boolean allowMixedSampleRateAdaptiveness, - boolean allowAudioMixedChannelCountAdaptiveness) { - int count = 0; - for (int i = 0; i < group.length; i++) { - if (isSupportedAdaptiveAudioTrack( - group.getFormat(i), - formatSupport[i], - configuration, - maxAudioBitrate, - allowMixedMimeTypeAdaptiveness, - allowMixedSampleRateAdaptiveness, - allowAudioMixedChannelCountAdaptiveness)) { - count++; - } - } - return count; + params, + support, + hasVideoRendererWithMappedTracksFinal, + this::isAudioFormatWithinAudioChannelCountConstraints), + AudioTrackInfo::compareSelections); } - private static boolean isSupportedAdaptiveAudioTrack( - Format format, - @Capabilities int formatSupport, - AudioConfigurationTuple configuration, - int maxAudioBitrate, - boolean allowMixedMimeTypeAdaptiveness, - boolean allowMixedSampleRateAdaptiveness, - boolean allowAudioMixedChannelCountAdaptiveness) { - return isSupported(formatSupport, false) - && (format.bitrate == Format.NO_VALUE || format.bitrate <= maxAudioBitrate) - && (allowAudioMixedChannelCountAdaptiveness - || (format.channelCount != Format.NO_VALUE - && format.channelCount == configuration.channelCount)) - && (allowMixedMimeTypeAdaptiveness - || (format.sampleMimeType != null - && TextUtils.equals(format.sampleMimeType, configuration.mimeType))) - && (allowMixedSampleRateAdaptiveness - || (format.sampleRate != Format.NO_VALUE - && format.sampleRate == configuration.sampleRate)); + /** + * Returns whether an audio format is within the audio channel count constraints. + * + *

      This method returns {@code true} if one of the following holds: + * + *

        + *
      • Audio channel count constraints are not applicable (all formats are considered within + * constraints). + *
      • The device has a {@code + * television} UI mode. + *
      • {@code format} has up to 2 channels. + *
      • The device does not support audio spatialization and the format is {@linkplain + * #isDolbyAudio(Format) a Dolby one}. + *
      • Audio spatialization is applicable and {@code format} can be spatialized. + *
      + */ + private boolean isAudioFormatWithinAudioChannelCountConstraints(Format format) { + synchronized (lock) { + return !parameters.constrainAudioChannelCountToDeviceCapabilities + || deviceIsTV + || format.channelCount <= 2 + || (isDolbyAudio(format) + && (Util.SDK_INT < 32 + || spatializer == null + || !spatializer.isSpatializationSupported())) + || (Util.SDK_INT >= 32 + && spatializer != null + && spatializer.isSpatializationSupported() + && spatializer.isAvailable() + && spatializer.isEnabled() + && spatializer.canBeSpatialized(audioAttributes, format)); + } } // Text track selection implementation. /** * Called by {@link #selectAllTracks(MappedTrackInfo, int[][][], int[], Parameters)} to create a - * {@link TrackSelection} for a text renderer. + * {@link ExoTrackSelection.Definition} for a text track selection. * - * @param groups The {@link TrackGroupArray} mapped to the renderer. - * @param formatSupport The {@link Capabilities} for each mapped track, indexed by renderer, track - * group and track (in that order). + * @param mappedTrackInfo Mapped track information. + * @param rendererFormatSupports The {@link Capabilities} for each mapped track, indexed by + * renderer, track group and track (in that order). * @param params The selector's current constraint parameters. * @param selectedAudioLanguage The language of the selected audio track. May be null if the - * selected text track declares no language or no text track was selected. - * @return The {@link TrackSelection.Definition} and corresponding {@link TextTrackScore}, or null - * if no selection was made. + * selected audio track declares no language or no audio track was selected. + * @return A pair of the selected {@link ExoTrackSelection.Definition} and the corresponding + * renderer index, or null if no selection was made. * @throws ExoPlaybackException If an error occurs while selecting the tracks. */ @Nullable - protected Pair selectTextTrack( - TrackGroupArray groups, - @Capabilities int[][] formatSupport, + protected Pair selectTextTrack( + MappedTrackInfo mappedTrackInfo, + @Capabilities int[][][] rendererFormatSupports, Parameters params, @Nullable String selectedAudioLanguage) throws ExoPlaybackException { - TrackGroup selectedGroup = null; - int selectedTrackIndex = C.INDEX_UNSET; - TextTrackScore selectedTrackScore = null; - for (int groupIndex = 0; groupIndex < groups.length; groupIndex++) { - TrackGroup trackGroup = groups.get(groupIndex); - @Capabilities int[] trackFormatSupport = formatSupport[groupIndex]; - for (int trackIndex = 0; trackIndex < trackGroup.length; trackIndex++) { - if (isSupported(trackFormatSupport[trackIndex], - params.exceedRendererCapabilitiesIfNecessary)) { - Format format = trackGroup.getFormat(trackIndex); - TextTrackScore trackScore = - new TextTrackScore( - format, params, trackFormatSupport[trackIndex], selectedAudioLanguage); - if (trackScore.isWithinConstraints - && (selectedTrackScore == null || trackScore.compareTo(selectedTrackScore) > 0)) { - selectedGroup = trackGroup; - selectedTrackIndex = trackIndex; - selectedTrackScore = trackScore; - } - } - } - } - return selectedGroup == null - ? null - : Pair.create( - new TrackSelection.Definition(selectedGroup, selectedTrackIndex), - Assertions.checkNotNull(selectedTrackScore)); + return selectTracksForType( + C.TRACK_TYPE_TEXT, + mappedTrackInfo, + rendererFormatSupports, + (int rendererIndex, TrackGroup group, @Capabilities int[] support) -> + TextTrackInfo.createForTrackGroup( + rendererIndex, group, params, support, selectedAudioLanguage), + TextTrackInfo::compareSelections); } - // General track selection methods. + // Generic track selection methods. /** * Called by {@link #selectAllTracks(MappedTrackInfo, int[][][], int[], Parameters)} to create a - * {@link TrackSelection} for a renderer whose type is neither video, audio or text. + * {@link ExoTrackSelection} for a renderer whose type is neither video, audio or text. * * @param trackType The type of the renderer. * @param groups The {@link TrackGroupArray} mapped to the renderer. - * @param formatSupport The {@link Capabilities} for each mapped track, indexed by renderer, track - * group and track (in that order). + * @param formatSupport The {@link Capabilities} for each mapped track, indexed by track group and + * track (in that order). * @param params The selector's current constraint parameters. - * @return The {@link TrackSelection} for the renderer, or null if no selection was made. + * @return The {@link ExoTrackSelection} for the renderer, or null if no selection was made. * @throws ExoPlaybackException If an error occurs while selecting the tracks. */ @Nullable - protected TrackSelection.Definition selectOtherTrack( + protected ExoTrackSelection.Definition selectOtherTrack( int trackType, TrackGroupArray groups, @Capabilities int[][] formatSupport, Parameters params) throws ExoPlaybackException { - TrackGroup selectedGroup = null; + @Nullable TrackGroup selectedGroup = null; int selectedTrackIndex = 0; - int selectedTrackScore = 0; + @Nullable OtherTrackScore selectedTrackScore = null; for (int groupIndex = 0; groupIndex < groups.length; groupIndex++) { TrackGroup trackGroup = groups.get(groupIndex); @Capabilities int[] trackFormatSupport = formatSupport[groupIndex]; for (int trackIndex = 0; trackIndex < trackGroup.length; trackIndex++) { - if (isSupported(trackFormatSupport[trackIndex], - params.exceedRendererCapabilitiesIfNecessary)) { + if (isSupported( + trackFormatSupport[trackIndex], params.exceedRendererCapabilitiesIfNecessary)) { Format format = trackGroup.getFormat(trackIndex); - boolean isDefault = (format.selectionFlags & C.SELECTION_FLAG_DEFAULT) != 0; - int trackScore = isDefault ? 2 : 1; - if (isSupported(trackFormatSupport[trackIndex], false)) { - trackScore += WITHIN_RENDERER_CAPABILITIES_BONUS; - } - if (trackScore > selectedTrackScore) { + OtherTrackScore trackScore = new OtherTrackScore(format, trackFormatSupport[trackIndex]); + if (selectedTrackScore == null || trackScore.compareTo(selectedTrackScore) > 0) { selectedGroup = trackGroup; selectedTrackIndex = trackIndex; selectedTrackScore = trackScore; @@ -2351,15 +2688,176 @@ protected TrackSelection.Definition selectOtherTrack( } return selectedGroup == null ? null - : new TrackSelection.Definition(selectedGroup, selectedTrackIndex); + : new ExoTrackSelection.Definition(selectedGroup, selectedTrackIndex); + } + + @Nullable + private > Pair selectTracksForType( + @C.TrackType int trackType, + MappedTrackInfo mappedTrackInfo, + @Capabilities int[][][] formatSupport, + TrackInfo.Factory trackInfoFactory, + Comparator> selectionComparator) { + ArrayList> possibleSelections = new ArrayList<>(); + int rendererCount = mappedTrackInfo.getRendererCount(); + for (int rendererIndex = 0; rendererIndex < rendererCount; rendererIndex++) { + if (trackType == mappedTrackInfo.getRendererType(rendererIndex)) { + TrackGroupArray groups = mappedTrackInfo.getTrackGroups(rendererIndex); + for (int groupIndex = 0; groupIndex < groups.length; groupIndex++) { + TrackGroup trackGroup = groups.get(groupIndex); + @Capabilities int[] groupSupport = formatSupport[rendererIndex][groupIndex]; + List trackInfos = trackInfoFactory.create(rendererIndex, trackGroup, groupSupport); + boolean[] usedTrackInSelection = new boolean[trackGroup.length]; + for (int trackIndex = 0; trackIndex < trackGroup.length; trackIndex++) { + T trackInfo = trackInfos.get(trackIndex); + @SelectionEligibility int eligibility = trackInfo.getSelectionEligibility(); + if (usedTrackInSelection[trackIndex] || eligibility == SELECTION_ELIGIBILITY_NO) { + continue; + } + List selection; + if (eligibility == SELECTION_ELIGIBILITY_FIXED) { + selection = ImmutableList.of(trackInfo); + } else { + selection = new ArrayList<>(); + selection.add(trackInfo); + for (int i = trackIndex + 1; i < trackGroup.length; i++) { + T otherTrackInfo = trackInfos.get(i); + if (otherTrackInfo.getSelectionEligibility() == SELECTION_ELIGIBILITY_ADAPTIVE) { + if (trackInfo.isCompatibleForAdaptationWith(otherTrackInfo)) { + selection.add(otherTrackInfo); + usedTrackInSelection[i] = true; + } + } + } + } + possibleSelections.add(selection); + } + } + } + } + if (possibleSelections.isEmpty()) { + return null; + } + List bestSelection = max(possibleSelections, selectionComparator); + int[] trackIndices = new int[bestSelection.size()]; + for (int i = 0; i < bestSelection.size(); i++) { + trackIndices[i] = bestSelection.get(i).trackIndex; + } + T firstTrackInfo = bestSelection.get(0); + return Pair.create( + new ExoTrackSelection.Definition(firstTrackInfo.trackGroup, trackIndices), + firstTrackInfo.rendererIndex); + } + + private void maybeInvalidateForAudioChannelCountConstraints() { + boolean shouldInvalidate; + synchronized (lock) { + shouldInvalidate = + parameters.constrainAudioChannelCountToDeviceCapabilities + && !deviceIsTV + && Util.SDK_INT >= 32 + && spatializer != null + && spatializer.isSpatializationSupported(); + } + if (shouldInvalidate) { + invalidate(); + } } // Utility methods. + private static void applyTrackSelectionOverrides( + MappedTrackInfo mappedTrackInfo, + TrackSelectionParameters params, + ExoTrackSelection.@NullableType Definition[] outDefinitions) { + int rendererCount = mappedTrackInfo.getRendererCount(); + + // Determine overrides to apply. + HashMap<@C.TrackType Integer, TrackSelectionOverride> overridesByType = new HashMap<>(); + for (int rendererIndex = 0; rendererIndex < rendererCount; rendererIndex++) { + collectTrackSelectionOverrides( + mappedTrackInfo.getTrackGroups(rendererIndex), params, overridesByType); + } + collectTrackSelectionOverrides( + mappedTrackInfo.getUnmappedTrackGroups(), params, overridesByType); + + // Apply the overrides. + for (int rendererIndex = 0; rendererIndex < rendererCount; rendererIndex++) { + @C.TrackType int trackType = mappedTrackInfo.getRendererType(rendererIndex); + @Nullable TrackSelectionOverride overrideForType = overridesByType.get(trackType); + if (overrideForType == null) { + continue; + } + // If the override is non-empty and applies to this renderer, then apply it. Else we don't + // want the renderer to be enabled at all, so clear any existing selection. + @Nullable ExoTrackSelection.Definition selection; + if (!overrideForType.trackIndices.isEmpty() + && mappedTrackInfo.getTrackGroups(rendererIndex).indexOf(overrideForType.mediaTrackGroup) + != -1) { + selection = + new ExoTrackSelection.Definition( + overrideForType.mediaTrackGroup, Ints.toArray(overrideForType.trackIndices)); + } else { + selection = null; + } + outDefinitions[rendererIndex] = selection; + } + } + + /** + * Adds {@link TrackSelectionOverride TrackSelectionOverrides} in {@code params} to {@code + * overridesByType} if they apply to tracks in {@code trackGroups}. If there's an existing + * override for a track type, it is replaced only if the existing override is empty and the one + * being considered is not. + */ + private static void collectTrackSelectionOverrides( + TrackGroupArray trackGroups, + TrackSelectionParameters params, + Map<@C.TrackType Integer, TrackSelectionOverride> overridesByType) { + for (int trackGroupIndex = 0; trackGroupIndex < trackGroups.length; trackGroupIndex++) { + TrackGroup trackGroup = trackGroups.get(trackGroupIndex); + @Nullable TrackSelectionOverride override = params.overrides.get(trackGroup); + if (override == null) { + continue; + } + @Nullable TrackSelectionOverride existingOverride = overridesByType.get(override.getType()); + // Only replace an existing override if it's empty and the one being considered is not. + if (existingOverride == null + || (existingOverride.trackIndices.isEmpty() && !override.trackIndices.isEmpty())) { + overridesByType.put(override.getType(), override); + } + } + } + + @SuppressWarnings("deprecation") // Calling legacy hasSelectionOverride and getSelectionOverride + private static void applyLegacyRendererOverrides( + MappedTrackInfo mappedTrackInfo, + Parameters params, + ExoTrackSelection.@NullableType Definition[] outDefinitions) { + int rendererCount = mappedTrackInfo.getRendererCount(); + for (int rendererIndex = 0; rendererIndex < rendererCount; rendererIndex++) { + TrackGroupArray trackGroups = mappedTrackInfo.getTrackGroups(rendererIndex); + if (!params.hasSelectionOverride(rendererIndex, trackGroups)) { + continue; + } + @Nullable + SelectionOverride override = params.getSelectionOverride(rendererIndex, trackGroups); + @Nullable ExoTrackSelection.Definition selection; + if (override != null && override.tracks.length != 0) { + selection = + new ExoTrackSelection.Definition( + trackGroups.get(override.groupIndex), override.tracks, override.type); + } else { + selection = null; + } + outDefinitions[rendererIndex] = selection; + } + } + /** - * Determines whether tunneling should be enabled, replacing {@link RendererConfiguration}s in - * {@code rendererConfigurations} with configurations that enable tunneling on the appropriate - * renderers if so. + * Determines whether tunneling can be enabled, replacing {@link RendererConfiguration}s in {@code + * rendererConfigurations} with configurations that enable tunneling on the appropriate renderers + * if so. * * @param mappedTrackInfo Mapped track information. * @param renderererFormatSupports The {@link Capabilities} for each mapped track, indexed by @@ -2367,18 +2865,12 @@ protected TrackSelection.Definition selectOtherTrack( * @param rendererConfigurations The renderer configurations. Configurations may be replaced with * ones that enable tunneling as a result of this call. * @param trackSelections The renderer track selections. - * @param tunnelingAudioSessionId The audio session id to use when tunneling, or {@link - * C#AUDIO_SESSION_ID_UNSET} if tunneling should not be enabled. */ private static void maybeConfigureRenderersForTunneling( MappedTrackInfo mappedTrackInfo, @Capabilities int[][][] renderererFormatSupports, @NullableType RendererConfiguration[] rendererConfigurations, - @NullableType TrackSelection[] trackSelections, - int tunnelingAudioSessionId) { - if (tunnelingAudioSessionId == C.AUDIO_SESSION_ID_UNSET) { - return; - } + @NullableType ExoTrackSelection[] trackSelections) { // Check whether we can enable tunneling. To enable tunneling we require exactly one audio and // one video renderer to support tunneling and have a selection. int tunnelingAudioRendererIndex = -1; @@ -2386,7 +2878,7 @@ private static void maybeConfigureRenderersForTunneling( boolean enableTunneling = true; for (int i = 0; i < mappedTrackInfo.getRendererCount(); i++) { int rendererType = mappedTrackInfo.getRendererType(i); - TrackSelection trackSelection = trackSelections[i]; + ExoTrackSelection trackSelection = trackSelections[i]; if ((rendererType == C.TRACK_TYPE_AUDIO || rendererType == C.TRACK_TYPE_VIDEO) && trackSelection != null) { if (rendererSupportsTunneling( @@ -2412,30 +2904,32 @@ private static void maybeConfigureRenderersForTunneling( enableTunneling &= tunnelingAudioRendererIndex != -1 && tunnelingVideoRendererIndex != -1; if (enableTunneling) { RendererConfiguration tunnelingRendererConfiguration = - new RendererConfiguration(tunnelingAudioSessionId); + new RendererConfiguration(/* tunneling= */ true); rendererConfigurations[tunnelingAudioRendererIndex] = tunnelingRendererConfiguration; rendererConfigurations[tunnelingVideoRendererIndex] = tunnelingRendererConfiguration; } } /** - * Returns whether a renderer supports tunneling for a {@link TrackSelection}. + * Returns whether a renderer supports tunneling for a {@link ExoTrackSelection}. * - * @param formatSupports The {@link Capabilities} for each track, indexed by group index and track + * @param formatSupport The {@link Capabilities} for each track, indexed by group index and track * index (in that order). * @param trackGroups The {@link TrackGroupArray}s for the renderer. * @param selection The track selection. - * @return Whether the renderer supports tunneling for the {@link TrackSelection}. + * @return Whether the renderer supports tunneling for the {@link ExoTrackSelection}. */ private static boolean rendererSupportsTunneling( - @Capabilities int[][] formatSupports, TrackGroupArray trackGroups, TrackSelection selection) { + @Capabilities int[][] formatSupport, + TrackGroupArray trackGroups, + ExoTrackSelection selection) { if (selection == null) { return false; } int trackGroupIndex = trackGroups.indexOf(selection.getTrackGroup()); for (int i = 0; i < selection.length(); i++) { @Capabilities - int trackFormatSupport = formatSupports[trackGroupIndex][selection.getIndexInTrackGroup(i)]; + int trackFormatSupport = formatSupport[trackGroupIndex][selection.getIndexInTrackGroup(i)]; if (RendererCapabilities.getTunnelingSupport(trackFormatSupport) != RendererCapabilities.TUNNELING_SUPPORTED) { return false; @@ -2444,38 +2938,23 @@ private static boolean rendererSupportsTunneling( return true; } - /** - * Compares two format values for order. A known value is considered greater than {@link - * Format#NO_VALUE}. - * - * @param first The first value. - * @param second The second value. - * @return A negative integer if the first value is less than the second. Zero if they are equal. - * A positive integer if the first value is greater than the second. - */ - private static int compareFormatValues(int first, int second) { - return first == Format.NO_VALUE - ? (second == Format.NO_VALUE ? 0 : -1) - : (second == Format.NO_VALUE ? 1 : (first - second)); - } - /** * Returns true if the {@link FormatSupport} in the given {@link Capabilities} is {@link - * RendererCapabilities#FORMAT_HANDLED} or if {@code allowExceedsCapabilities} is set and the - * format support is {@link RendererCapabilities#FORMAT_EXCEEDS_CAPABILITIES}. + * C#FORMAT_HANDLED} or if {@code allowExceedsCapabilities} is set and the format support is + * {@link C#FORMAT_EXCEEDS_CAPABILITIES}. * * @param formatSupport {@link Capabilities}. * @param allowExceedsCapabilities Whether to return true if {@link FormatSupport} is {@link - * RendererCapabilities#FORMAT_EXCEEDS_CAPABILITIES}. - * @return True if {@link FormatSupport} is {@link RendererCapabilities#FORMAT_HANDLED}, or if - * {@code allowExceedsCapabilities} is set and the format support is {@link - * RendererCapabilities#FORMAT_EXCEEDS_CAPABILITIES}. + * C#FORMAT_EXCEEDS_CAPABILITIES}. + * @return True if {@link FormatSupport} is {@link C#FORMAT_HANDLED}, or if {@code + * allowExceedsCapabilities} is set and the format support is {@link + * C#FORMAT_EXCEEDS_CAPABILITIES}. */ protected static boolean isSupported( @Capabilities int formatSupport, boolean allowExceedsCapabilities) { @FormatSupport int maskedSupport = RendererCapabilities.getFormatSupport(formatSupport); - return maskedSupport == RendererCapabilities.FORMAT_HANDLED || (allowExceedsCapabilities - && maskedSupport == RendererCapabilities.FORMAT_EXCEEDS_CAPABILITIES); + return maskedSupport == C.FORMAT_HANDLED + || (allowExceedsCapabilities && maskedSupport == C.FORMAT_EXCEEDS_CAPABILITIES); } /** @@ -2528,28 +3007,20 @@ protected static int getFormatLanguageScore( return 0; } - private static List getViewportFilteredTrackIndices(TrackGroup group, int viewportWidth, - int viewportHeight, boolean orientationMayChange) { - // Initially include all indices. - ArrayList selectedTrackIndices = new ArrayList<>(group.length); - for (int i = 0; i < group.length; i++) { - selectedTrackIndices.add(i); - } - + private static int getMaxVideoPixelsToRetainForViewport( + TrackGroup group, int viewportWidth, int viewportHeight, boolean orientationMayChange) { if (viewportWidth == Integer.MAX_VALUE || viewportHeight == Integer.MAX_VALUE) { - // Viewport dimensions not set. Return the full set of indices. - return selectedTrackIndices; + return Integer.MAX_VALUE; } - int maxVideoPixelsToRetain = Integer.MAX_VALUE; for (int i = 0; i < group.length; i++) { Format format = group.getFormat(i); // Keep track of the number of pixels of the selected format whose resolution is the // smallest to exceed the maximum size at which it can be displayed within the viewport. - // We'll discard formats of higher resolution. if (format.width > 0 && format.height > 0) { - Point maxVideoSizeInViewport = getMaxVideoSizeInViewport(orientationMayChange, - viewportWidth, viewportHeight, format.width, format.height); + Point maxVideoSizeInViewport = + getMaxVideoSizeInViewport( + orientationMayChange, viewportWidth, viewportHeight, format.width, format.height); int videoPixels = format.width * format.height; if (format.width >= (int) (maxVideoSizeInViewport.x * FRACTION_TO_CONSIDER_FULLSCREEN) && format.height >= (int) (maxVideoSizeInViewport.y * FRACTION_TO_CONSIDER_FULLSCREEN) @@ -2558,29 +3029,19 @@ private static List getViewportFilteredTrackIndices(TrackGroup group, i } } } - - // Filter out formats that exceed maxVideoPixelsToRetain. These formats have an unnecessarily - // high resolution given the size at which the video will be displayed within the viewport. Also - // filter out formats with unknown dimensions, since we have some whose dimensions are known. - if (maxVideoPixelsToRetain != Integer.MAX_VALUE) { - for (int i = selectedTrackIndices.size() - 1; i >= 0; i--) { - Format format = group.getFormat(selectedTrackIndices.get(i)); - int pixelCount = format.getPixelCount(); - if (pixelCount == Format.NO_VALUE || pixelCount > maxVideoPixelsToRetain) { - selectedTrackIndices.remove(i); - } - } - } - - return selectedTrackIndices; + return maxVideoPixelsToRetain; } /** * Given viewport dimensions and video dimensions, computes the maximum size of the video as it * will be rendered to fit inside of the viewport. */ - private static Point getMaxVideoSizeInViewport(boolean orientationMayChange, int viewportWidth, - int viewportHeight, int videoWidth, int videoHeight) { + private static Point getMaxVideoSizeInViewport( + boolean orientationMayChange, + int viewportWidth, + int viewportHeight, + int videoWidth, + int videoHeight) { if (orientationMayChange && (videoWidth > videoHeight) != (viewportWidth > viewportHeight)) { // Rotation is allowed, and the video will be larger in the rotated viewport. int tempViewportWidth = viewportWidth; @@ -2597,47 +3058,370 @@ private static Point getMaxVideoSizeInViewport(boolean orientationMayChange, int } } + private static int getRoleFlagMatchScore(int trackRoleFlags, int preferredRoleFlags) { + if (trackRoleFlags != 0 && trackRoleFlags == preferredRoleFlags) { + // Prefer perfect match over partial matches. + return Integer.MAX_VALUE; + } + return Integer.bitCount(trackRoleFlags & preferredRoleFlags); + } + /** - * Compares two integers in a safe way avoiding potential overflow. - * - * @param first The first value. - * @param second The second value. - * @return A negative integer if the first value is less than the second. Zero if they are equal. - * A positive integer if the first value is greater than the second. + * Returns preference score for primary, hardware-accelerated video codecs, with higher score + * being preferred. */ - private static int compareInts(int first, int second) { - return first > second ? 1 : (second > first ? -1 : 0); + private static int getVideoCodecPreferenceScore(@Nullable String mimeType) { + if (mimeType == null) { + return 0; + } + switch (mimeType) { + case MimeTypes.VIDEO_DOLBY_VISION: + return 5; + case MimeTypes.VIDEO_AV1: + return 4; + case MimeTypes.VIDEO_H265: + return 3; + case MimeTypes.VIDEO_VP9: + return 2; + case MimeTypes.VIDEO_H264: + return 1; + default: + return 0; + } + } + + private static boolean isDolbyAudio(Format format) { + if (format.sampleMimeType == null) { + return false; + } + switch (format.sampleMimeType) { + case MimeTypes.AUDIO_AC3: + case MimeTypes.AUDIO_E_AC3: + case MimeTypes.AUDIO_E_AC3_JOC: + case MimeTypes.AUDIO_AC4: + return true; + default: + return false; + } } - /** Represents how well an audio track matches the selection {@link Parameters}. */ - protected static final class AudioTrackScore implements Comparable { + /** Base class for track selection information of a {@link Format}. */ + private abstract static class TrackInfo> { + /** Factory for {@link TrackInfo} implementations for a given {@link TrackGroup}. */ + public interface Factory> { + List create(int rendererIndex, TrackGroup trackGroup, @Capabilities int[] formatSupports); + } + + public final int rendererIndex; + public final TrackGroup trackGroup; + public final int trackIndex; + public final Format format; + + public TrackInfo(int rendererIndex, TrackGroup trackGroup, int trackIndex) { + this.rendererIndex = rendererIndex; + this.trackGroup = trackGroup; + this.trackIndex = trackIndex; + this.format = trackGroup.getFormat(trackIndex); + } + + /** Returns to what extent the track is {@link SelectionEligibility eligible for selection}. */ + public abstract @SelectionEligibility int getSelectionEligibility(); /** - * Whether the provided format is within the parameter constraints. If {@code false}, the format - * should not be selected. + * Returns whether this track is compatible for an adaptive selection with the specified other + * track. */ - public final boolean isWithinConstraints; + public abstract boolean isCompatibleForAdaptationWith(T otherTrack); + } + + private static final class VideoTrackInfo extends TrackInfo { + + public static ImmutableList createForTrackGroup( + int rendererIndex, + TrackGroup trackGroup, + Parameters params, + @Capabilities int[] formatSupport, + @AdaptiveSupport int mixedMimeTypeAdaptionSupport) { + int maxPixelsToRetainForViewport = + getMaxVideoPixelsToRetainForViewport( + trackGroup, + params.viewportWidth, + params.viewportHeight, + params.viewportOrientationMayChange); + ImmutableList.Builder listBuilder = ImmutableList.builder(); + for (int i = 0; i < trackGroup.length; i++) { + int pixelCount = trackGroup.getFormat(i).getPixelCount(); + boolean isSuitableForViewport = + maxPixelsToRetainForViewport == Integer.MAX_VALUE + || (pixelCount != Format.NO_VALUE && pixelCount <= maxPixelsToRetainForViewport); + listBuilder.add( + new VideoTrackInfo( + rendererIndex, + trackGroup, + /* trackIndex= */ i, + params, + formatSupport[i], + mixedMimeTypeAdaptionSupport, + isSuitableForViewport)); + } + return listBuilder.build(); + } + + private final boolean isWithinMaxConstraints; + private final Parameters parameters; + private final boolean isWithinMinConstraints; + private final boolean isWithinRendererCapabilities; + private final int bitrate; + private final int pixelCount; + private final int preferredMimeTypeMatchIndex; + private final int preferredRoleFlagsScore; + private final boolean hasMainOrNoRoleFlag; + private final boolean allowMixedMimeTypes; + private final @SelectionEligibility int selectionEligibility; + private final boolean usesPrimaryDecoder; + private final boolean usesHardwareAcceleration; + private final int codecPreferenceScore; + + public VideoTrackInfo( + int rendererIndex, + TrackGroup trackGroup, + int trackIndex, + Parameters parameters, + @Capabilities int formatSupport, + @AdaptiveSupport int mixedMimeTypeAdaptationSupport, + boolean isSuitableForViewport) { + super(rendererIndex, trackGroup, trackIndex); + this.parameters = parameters; + @SuppressLint("WrongConstant") + int requiredAdaptiveSupport = + parameters.allowVideoNonSeamlessAdaptiveness + ? (RendererCapabilities.ADAPTIVE_NOT_SEAMLESS + | RendererCapabilities.ADAPTIVE_SEAMLESS) + : RendererCapabilities.ADAPTIVE_SEAMLESS; + allowMixedMimeTypes = + parameters.allowVideoMixedMimeTypeAdaptiveness + && (mixedMimeTypeAdaptationSupport & requiredAdaptiveSupport) != 0; + isWithinMaxConstraints = + isSuitableForViewport + && (format.width == Format.NO_VALUE || format.width <= parameters.maxVideoWidth) + && (format.height == Format.NO_VALUE || format.height <= parameters.maxVideoHeight) + && (format.frameRate == Format.NO_VALUE + || format.frameRate <= parameters.maxVideoFrameRate) + && (format.bitrate == Format.NO_VALUE + || format.bitrate <= parameters.maxVideoBitrate); + isWithinMinConstraints = + isSuitableForViewport + && (format.width == Format.NO_VALUE || format.width >= parameters.minVideoWidth) + && (format.height == Format.NO_VALUE || format.height >= parameters.minVideoHeight) + && (format.frameRate == Format.NO_VALUE + || format.frameRate >= parameters.minVideoFrameRate) + && (format.bitrate == Format.NO_VALUE + || format.bitrate >= parameters.minVideoBitrate); + isWithinRendererCapabilities = + isSupported(formatSupport, /* allowExceedsCapabilities= */ false); + bitrate = format.bitrate; + pixelCount = format.getPixelCount(); + preferredRoleFlagsScore = + getRoleFlagMatchScore(format.roleFlags, parameters.preferredVideoRoleFlags); + hasMainOrNoRoleFlag = format.roleFlags == 0 || (format.roleFlags & C.ROLE_FLAG_MAIN) != 0; + int bestMimeTypeMatchIndex = Integer.MAX_VALUE; + for (int i = 0; i < parameters.preferredVideoMimeTypes.size(); i++) { + if (format.sampleMimeType != null + && format.sampleMimeType.equals(parameters.preferredVideoMimeTypes.get(i))) { + bestMimeTypeMatchIndex = i; + break; + } + } + preferredMimeTypeMatchIndex = bestMimeTypeMatchIndex; + usesPrimaryDecoder = + RendererCapabilities.getDecoderSupport(formatSupport) + == RendererCapabilities.DECODER_SUPPORT_PRIMARY; + usesHardwareAcceleration = + RendererCapabilities.getHardwareAccelerationSupport(formatSupport) + == RendererCapabilities.HARDWARE_ACCELERATION_SUPPORTED; + codecPreferenceScore = getVideoCodecPreferenceScore(format.sampleMimeType); + selectionEligibility = evaluateSelectionEligibility(formatSupport, requiredAdaptiveSupport); + } + @Override + public @SelectionEligibility int getSelectionEligibility() { + return selectionEligibility; + } + + @Override + public boolean isCompatibleForAdaptationWith(VideoTrackInfo otherTrack) { + return (allowMixedMimeTypes + || Util.areEqual(format.sampleMimeType, otherTrack.format.sampleMimeType)) + && (parameters.allowVideoMixedDecoderSupportAdaptiveness + || (this.usesPrimaryDecoder == otherTrack.usesPrimaryDecoder + && this.usesHardwareAcceleration == otherTrack.usesHardwareAcceleration)); + } + + private @SelectionEligibility int evaluateSelectionEligibility( + @Capabilities int rendererSupport, @AdaptiveSupport int requiredAdaptiveSupport) { + if ((format.roleFlags & C.ROLE_FLAG_TRICK_PLAY) != 0) { + // Ignore trick-play tracks for now. + return SELECTION_ELIGIBILITY_NO; + } + if (!isSupported(rendererSupport, parameters.exceedRendererCapabilitiesIfNecessary)) { + return SELECTION_ELIGIBILITY_NO; + } + if (!isWithinMaxConstraints && !parameters.exceedVideoConstraintsIfNecessary) { + return SELECTION_ELIGIBILITY_NO; + } + return isSupported(rendererSupport, /* allowExceedsCapabilities= */ false) + && isWithinMinConstraints + && isWithinMaxConstraints + && format.bitrate != Format.NO_VALUE + && !parameters.forceHighestSupportedBitrate + && !parameters.forceLowestBitrate + && ((rendererSupport & requiredAdaptiveSupport) != 0) + ? SELECTION_ELIGIBILITY_ADAPTIVE + : SELECTION_ELIGIBILITY_FIXED; + } + + private static int compareNonQualityPreferences(VideoTrackInfo info1, VideoTrackInfo info2) { + ComparisonChain chain = + ComparisonChain.start() + .compareFalseFirst( + info1.isWithinRendererCapabilities, info2.isWithinRendererCapabilities) + // 1. Compare match with specific content preferences set by the parameters. + .compare(info1.preferredRoleFlagsScore, info2.preferredRoleFlagsScore) + // 2. Compare match with implicit content preferences set by the media. + .compareFalseFirst(info1.hasMainOrNoRoleFlag, info2.hasMainOrNoRoleFlag) + // 3. Compare match with technical preferences set by the parameters. + .compareFalseFirst(info1.isWithinMaxConstraints, info2.isWithinMaxConstraints) + .compareFalseFirst(info1.isWithinMinConstraints, info2.isWithinMinConstraints) + .compare( + info1.preferredMimeTypeMatchIndex, + info2.preferredMimeTypeMatchIndex, + Ordering.natural().reverse()) + // 4. Compare match with renderer capability preferences. + .compareFalseFirst(info1.usesPrimaryDecoder, info2.usesPrimaryDecoder) + .compareFalseFirst(info1.usesHardwareAcceleration, info2.usesHardwareAcceleration); + if (info1.usesPrimaryDecoder && info1.usesHardwareAcceleration) { + chain = chain.compare(info1.codecPreferenceScore, info2.codecPreferenceScore); + } + return chain.result(); + } + + private static int compareQualityPreferences(VideoTrackInfo info1, VideoTrackInfo info2) { + // The preferred ordering by video quality depends on the constraints: + // - Not within renderer capabilities: Prefer lower quality because it's more likely to play. + // - Within min and max constraints: Prefer higher quality. + // - Within max constraints only: Prefer higher quality because it gets us closest to + // satisfying the violated min constraints. + // - Within min constraints only: Prefer lower quality because it gets us closest to + // satisfying the violated max constraints. + // - Outside min and max constraints: Arbitrarily prefer lower quality. + Ordering qualityOrdering = + info1.isWithinMaxConstraints && info1.isWithinRendererCapabilities + ? FORMAT_VALUE_ORDERING + : FORMAT_VALUE_ORDERING.reverse(); + return ComparisonChain.start() + .compare( + info1.bitrate, + info2.bitrate, + info1.parameters.forceLowestBitrate ? FORMAT_VALUE_ORDERING.reverse() : NO_ORDER) + .compare(info1.pixelCount, info2.pixelCount, qualityOrdering) + .compare(info1.bitrate, info2.bitrate, qualityOrdering) + .result(); + } + + public static int compareSelections(List infos1, List infos2) { + return ComparisonChain.start() + // Compare non-quality preferences of the best individual track with each other. + .compare( + max(infos1, VideoTrackInfo::compareNonQualityPreferences), + max(infos2, VideoTrackInfo::compareNonQualityPreferences), + VideoTrackInfo::compareNonQualityPreferences) + // Prefer selections with more formats (all non-quality preferences being equal). + .compare(infos1.size(), infos2.size()) + // Prefer selections with the best individual track quality. + .compare( + max(infos1, VideoTrackInfo::compareQualityPreferences), + max(infos2, VideoTrackInfo::compareQualityPreferences), + VideoTrackInfo::compareQualityPreferences) + .result(); + } + } + + private static final class AudioTrackInfo extends TrackInfo + implements Comparable { + + public static ImmutableList createForTrackGroup( + int rendererIndex, + TrackGroup trackGroup, + Parameters params, + @Capabilities int[] formatSupport, + boolean hasMappedVideoTracks, + Predicate withinAudioChannelCountConstraints) { + ImmutableList.Builder listBuilder = ImmutableList.builder(); + for (int i = 0; i < trackGroup.length; i++) { + listBuilder.add( + new AudioTrackInfo( + rendererIndex, + trackGroup, + /* trackIndex= */ i, + params, + formatSupport[i], + hasMappedVideoTracks, + withinAudioChannelCountConstraints)); + } + return listBuilder.build(); + } + + private final @SelectionEligibility int selectionEligibility; + private final boolean isWithinConstraints; @Nullable private final String language; private final Parameters parameters; private final boolean isWithinRendererCapabilities; private final int preferredLanguageScore; + private final int preferredLanguageIndex; + private final int preferredRoleFlagsScore; + private final boolean hasMainOrNoRoleFlag; private final int localeLanguageMatchIndex; private final int localeLanguageScore; private final boolean isDefaultSelectionFlag; private final int channelCount; private final int sampleRate; private final int bitrate; - - public AudioTrackScore(Format format, Parameters parameters, @Capabilities int formatSupport) { + private final int preferredMimeTypeMatchIndex; + private final boolean usesPrimaryDecoder; + private final boolean usesHardwareAcceleration; + + public AudioTrackInfo( + int rendererIndex, + TrackGroup trackGroup, + int trackIndex, + Parameters parameters, + @Capabilities int formatSupport, + boolean hasMappedVideoTracks, + Predicate withinAudioChannelCountConstraints) { + super(rendererIndex, trackGroup, trackIndex); this.parameters = parameters; this.language = normalizeUndeterminedLanguageToNull(format.language); - isWithinRendererCapabilities = isSupported(formatSupport, false); - preferredLanguageScore = - getFormatLanguageScore( - format, - parameters.preferredAudioLanguage, - /* allowUndeterminedFormatLanguage= */ false); + isWithinRendererCapabilities = + isSupported(formatSupport, /* allowExceedsCapabilities= */ false); + int bestLanguageScore = 0; + int bestLanguageIndex = Integer.MAX_VALUE; + for (int i = 0; i < parameters.preferredAudioLanguages.size(); i++) { + int score = + getFormatLanguageScore( + format, + parameters.preferredAudioLanguages.get(i), + /* allowUndeterminedFormatLanguage= */ false); + if (score > 0) { + bestLanguageIndex = i; + bestLanguageScore = score; + break; + } + } + preferredLanguageIndex = bestLanguageIndex; + preferredLanguageScore = bestLanguageScore; + preferredRoleFlagsScore = + getRoleFlagMatchScore(format.roleFlags, parameters.preferredAudioRoleFlags); + hasMainOrNoRoleFlag = format.roleFlags == 0 || (format.roleFlags & C.ROLE_FLAG_MAIN) != 0; isDefaultSelectionFlag = (format.selectionFlags & C.SELECTION_FLAG_DEFAULT) != 0; channelCount = format.channelCount; sampleRate = format.sampleRate; @@ -2645,191 +3429,370 @@ public AudioTrackScore(Format format, Parameters parameters, @Capabilities int f isWithinConstraints = (format.bitrate == Format.NO_VALUE || format.bitrate <= parameters.maxAudioBitrate) && (format.channelCount == Format.NO_VALUE - || format.channelCount <= parameters.maxAudioChannelCount); + || format.channelCount <= parameters.maxAudioChannelCount) + && withinAudioChannelCountConstraints.apply(format); String[] localeLanguages = Util.getSystemLanguageCodes(); - int bestMatchIndex = Integer.MAX_VALUE; - int bestMatchScore = 0; + int bestLocaleMatchIndex = Integer.MAX_VALUE; + int bestLocaleMatchScore = 0; for (int i = 0; i < localeLanguages.length; i++) { int score = getFormatLanguageScore( format, localeLanguages[i], /* allowUndeterminedFormatLanguage= */ false); if (score > 0) { - bestMatchIndex = i; - bestMatchScore = score; + bestLocaleMatchIndex = i; + bestLocaleMatchScore = score; break; } } - localeLanguageMatchIndex = bestMatchIndex; - localeLanguageScore = bestMatchScore; - } - - /** - * Compares this score with another. - * - * @param other The other score to compare to. - * @return A positive integer if this score is better than the other. Zero if they are equal. A - * negative integer if this score is worse than the other. - */ - @Override - public int compareTo(AudioTrackScore other) { - if (this.isWithinRendererCapabilities != other.isWithinRendererCapabilities) { - return this.isWithinRendererCapabilities ? 1 : -1; - } - if (this.preferredLanguageScore != other.preferredLanguageScore) { - return compareInts(this.preferredLanguageScore, other.preferredLanguageScore); - } - if (this.isWithinConstraints != other.isWithinConstraints) { - return this.isWithinConstraints ? 1 : -1; - } - if (parameters.forceLowestBitrate) { - int bitrateComparison = compareFormatValues(bitrate, other.bitrate); - if (bitrateComparison != 0) { - return bitrateComparison > 0 ? -1 : 1; + localeLanguageMatchIndex = bestLocaleMatchIndex; + localeLanguageScore = bestLocaleMatchScore; + int bestMimeTypeMatchIndex = Integer.MAX_VALUE; + for (int i = 0; i < parameters.preferredAudioMimeTypes.size(); i++) { + if (format.sampleMimeType != null + && format.sampleMimeType.equals(parameters.preferredAudioMimeTypes.get(i))) { + bestMimeTypeMatchIndex = i; + break; } } - if (this.isDefaultSelectionFlag != other.isDefaultSelectionFlag) { - return this.isDefaultSelectionFlag ? 1 : -1; - } - if (this.localeLanguageMatchIndex != other.localeLanguageMatchIndex) { - return -compareInts(this.localeLanguageMatchIndex, other.localeLanguageMatchIndex); - } - if (this.localeLanguageScore != other.localeLanguageScore) { - return compareInts(this.localeLanguageScore, other.localeLanguageScore); - } - // If the formats are within constraints and renderer capabilities then prefer higher values - // of channel count, sample rate and bit rate in that order. Otherwise, prefer lower values. - int resultSign = isWithinConstraints && isWithinRendererCapabilities ? 1 : -1; - if (this.channelCount != other.channelCount) { - return resultSign * compareInts(this.channelCount, other.channelCount); - } - if (this.sampleRate != other.sampleRate) { - return resultSign * compareInts(this.sampleRate, other.sampleRate); - } - if (Util.areEqual(this.language, other.language)) { - // Only compare bit rates of tracks with the same or unknown language. - return resultSign * compareInts(this.bitrate, other.bitrate); - } - return 0; + preferredMimeTypeMatchIndex = bestMimeTypeMatchIndex; + usesPrimaryDecoder = + RendererCapabilities.getDecoderSupport(formatSupport) + == RendererCapabilities.DECODER_SUPPORT_PRIMARY; + usesHardwareAcceleration = + RendererCapabilities.getHardwareAccelerationSupport(formatSupport) + == RendererCapabilities.HARDWARE_ACCELERATION_SUPPORTED; + selectionEligibility = evaluateSelectionEligibility(formatSupport, hasMappedVideoTracks); } - } - private static final class AudioConfigurationTuple { - - public final int channelCount; - public final int sampleRate; - @Nullable public final String mimeType; + @Override + public @SelectionEligibility int getSelectionEligibility() { + return selectionEligibility; + } - public AudioConfigurationTuple(int channelCount, int sampleRate, @Nullable String mimeType) { - this.channelCount = channelCount; - this.sampleRate = sampleRate; - this.mimeType = mimeType; + @Override + public boolean isCompatibleForAdaptationWith(AudioTrackInfo otherTrack) { + return (parameters.allowAudioMixedChannelCountAdaptiveness + || (format.channelCount != Format.NO_VALUE + && format.channelCount == otherTrack.format.channelCount)) + && (parameters.allowAudioMixedMimeTypeAdaptiveness + || (format.sampleMimeType != null + && TextUtils.equals(format.sampleMimeType, otherTrack.format.sampleMimeType))) + && (parameters.allowAudioMixedSampleRateAdaptiveness + || (format.sampleRate != Format.NO_VALUE + && format.sampleRate == otherTrack.format.sampleRate)) + && (parameters.allowAudioMixedDecoderSupportAdaptiveness + || (this.usesPrimaryDecoder == otherTrack.usesPrimaryDecoder + && this.usesHardwareAcceleration == otherTrack.usesHardwareAcceleration)); } @Override - public boolean equals(@Nullable Object obj) { - if (this == obj) { - return true; + public int compareTo(AudioTrackInfo other) { + // If the formats are within constraints and renderer capabilities then prefer higher values + // of channel count, sample rate and bit rate in that order. Otherwise, prefer lower values. + Ordering qualityOrdering = + isWithinConstraints && isWithinRendererCapabilities + ? FORMAT_VALUE_ORDERING + : FORMAT_VALUE_ORDERING.reverse(); + return ComparisonChain.start() + .compareFalseFirst(this.isWithinRendererCapabilities, other.isWithinRendererCapabilities) + // 1. Compare match with specific content preferences set by the parameters. + .compare( + this.preferredLanguageIndex, + other.preferredLanguageIndex, + Ordering.natural().reverse()) + .compare(this.preferredLanguageScore, other.preferredLanguageScore) + .compare(this.preferredRoleFlagsScore, other.preferredRoleFlagsScore) + // 2. Compare match with implicit content preferences set by the media or the system. + .compareFalseFirst(this.isDefaultSelectionFlag, other.isDefaultSelectionFlag) + .compareFalseFirst(this.hasMainOrNoRoleFlag, other.hasMainOrNoRoleFlag) + .compare( + this.localeLanguageMatchIndex, + other.localeLanguageMatchIndex, + Ordering.natural().reverse()) + .compare(this.localeLanguageScore, other.localeLanguageScore) + // 3. Compare match with technical preferences set by the parameters. + .compareFalseFirst(this.isWithinConstraints, other.isWithinConstraints) + .compare( + this.preferredMimeTypeMatchIndex, + other.preferredMimeTypeMatchIndex, + Ordering.natural().reverse()) + .compare( + this.bitrate, + other.bitrate, + parameters.forceLowestBitrate ? FORMAT_VALUE_ORDERING.reverse() : NO_ORDER) + // 4. Compare match with renderer capability preferences. + .compareFalseFirst(this.usesPrimaryDecoder, other.usesPrimaryDecoder) + .compareFalseFirst(this.usesHardwareAcceleration, other.usesHardwareAcceleration) + // 5. Compare technical quality. + .compare(this.channelCount, other.channelCount, qualityOrdering) + .compare(this.sampleRate, other.sampleRate, qualityOrdering) + .compare( + this.bitrate, + other.bitrate, + // Only compare bit rates of tracks with matching language information. + Util.areEqual(this.language, other.language) ? qualityOrdering : NO_ORDER) + .result(); + } + + private @SelectionEligibility int evaluateSelectionEligibility( + @Capabilities int rendererSupport, boolean hasMappedVideoTracks) { + if (!isSupported(rendererSupport, parameters.exceedRendererCapabilitiesIfNecessary)) { + return SELECTION_ELIGIBILITY_NO; } - if (obj == null || getClass() != obj.getClass()) { - return false; + if (!isWithinConstraints && !parameters.exceedAudioConstraintsIfNecessary) { + return SELECTION_ELIGIBILITY_NO; } - AudioConfigurationTuple other = (AudioConfigurationTuple) obj; - return channelCount == other.channelCount && sampleRate == other.sampleRate - && TextUtils.equals(mimeType, other.mimeType); + return isSupported(rendererSupport, /* allowExceedsCapabilities= */ false) + && isWithinConstraints + && format.bitrate != Format.NO_VALUE + && !parameters.forceHighestSupportedBitrate + && !parameters.forceLowestBitrate + && (parameters.allowMultipleAdaptiveSelections || !hasMappedVideoTracks) + ? SELECTION_ELIGIBILITY_ADAPTIVE + : SELECTION_ELIGIBILITY_FIXED; } - @Override - public int hashCode() { - int result = channelCount; - result = 31 * result + sampleRate; - result = 31 * result + (mimeType != null ? mimeType.hashCode() : 0); - return result; + public static int compareSelections(List infos1, List infos2) { + // Compare best tracks of each selection with each other. + return max(infos1).compareTo(max(infos2)); } - } - /** Represents how well a text track matches the selection {@link Parameters}. */ - protected static final class TextTrackScore implements Comparable { + private static final class TextTrackInfo extends TrackInfo + implements Comparable { - /** - * Whether the provided format is within the parameter constraints. If {@code false}, the format - * should not be selected. - */ - public final boolean isWithinConstraints; + public static ImmutableList createForTrackGroup( + int rendererIndex, + TrackGroup trackGroup, + Parameters params, + @Capabilities int[] formatSupport, + @Nullable String selectedAudioLanguage) { + ImmutableList.Builder listBuilder = ImmutableList.builder(); + for (int i = 0; i < trackGroup.length; i++) { + listBuilder.add( + new TextTrackInfo( + rendererIndex, + trackGroup, + /* trackIndex= */ i, + params, + formatSupport[i], + selectedAudioLanguage)); + } + return listBuilder.build(); + } + private final @SelectionEligibility int selectionEligibility; private final boolean isWithinRendererCapabilities; private final boolean isDefault; - private final boolean hasPreferredIsForcedFlag; + private final boolean isForced; + private final int preferredLanguageIndex; private final int preferredLanguageScore; private final int preferredRoleFlagsScore; private final int selectedAudioLanguageScore; private final boolean hasCaptionRoleFlags; - public TextTrackScore( - Format format, + public TextTrackInfo( + int rendererIndex, + TrackGroup trackGroup, + int trackIndex, Parameters parameters, @Capabilities int trackFormatSupport, @Nullable String selectedAudioLanguage) { + super(rendererIndex, trackGroup, trackIndex); isWithinRendererCapabilities = isSupported(trackFormatSupport, /* allowExceedsCapabilities= */ false); - int maskedSelectionFlags = - format.selectionFlags & ~parameters.disabledTextTrackSelectionFlags; + int maskedSelectionFlags = format.selectionFlags & ~parameters.ignoredTextSelectionFlags; isDefault = (maskedSelectionFlags & C.SELECTION_FLAG_DEFAULT) != 0; - boolean isForced = (maskedSelectionFlags & C.SELECTION_FLAG_FORCED) != 0; - preferredLanguageScore = - getFormatLanguageScore( - format, parameters.preferredTextLanguage, parameters.selectUndeterminedTextLanguage); + isForced = (maskedSelectionFlags & C.SELECTION_FLAG_FORCED) != 0; + int bestLanguageIndex = Integer.MAX_VALUE; + int bestLanguageScore = 0; + // Compare against empty (unset) language if no preference is given to allow the selection of + // a text track with undetermined language. + ImmutableList preferredLanguages = + parameters.preferredTextLanguages.isEmpty() + ? ImmutableList.of("") + : parameters.preferredTextLanguages; + for (int i = 0; i < preferredLanguages.size(); i++) { + int score = + getFormatLanguageScore( + format, preferredLanguages.get(i), parameters.selectUndeterminedTextLanguage); + if (score > 0) { + bestLanguageIndex = i; + bestLanguageScore = score; + break; + } + } + preferredLanguageIndex = bestLanguageIndex; + preferredLanguageScore = bestLanguageScore; preferredRoleFlagsScore = - Integer.bitCount(format.roleFlags & parameters.preferredTextRoleFlags); + getRoleFlagMatchScore(format.roleFlags, parameters.preferredTextRoleFlags); hasCaptionRoleFlags = (format.roleFlags & (C.ROLE_FLAG_CAPTION | C.ROLE_FLAG_DESCRIBES_MUSIC_AND_SOUND)) != 0; - // Prefer non-forced to forced if a preferred text language has been matched. Where both are - // provided the non-forced track will usually contain the forced subtitles as a subset. - // Otherwise, prefer a forced track. - hasPreferredIsForcedFlag = - (preferredLanguageScore > 0 && !isForced) || (preferredLanguageScore == 0 && isForced); boolean selectedAudioLanguageUndetermined = normalizeUndeterminedLanguageToNull(selectedAudioLanguage) == null; selectedAudioLanguageScore = getFormatLanguageScore(format, selectedAudioLanguage, selectedAudioLanguageUndetermined); - isWithinConstraints = + boolean isWithinConstraints = preferredLanguageScore > 0 - || (parameters.preferredTextLanguage == null && preferredRoleFlagsScore > 0) + || (parameters.preferredTextLanguages.isEmpty() && preferredRoleFlagsScore > 0) || isDefault || (isForced && selectedAudioLanguageScore > 0); + selectionEligibility = + isSupported(trackFormatSupport, parameters.exceedRendererCapabilitiesIfNecessary) + && isWithinConstraints + ? SELECTION_ELIGIBILITY_FIXED + : SELECTION_ELIGIBILITY_NO; } - /** - * Compares this score with another. - * - * @param other The other score to compare to. - * @return A positive integer if this score is better than the other. Zero if they are equal. A - * negative integer if this score is worse than the other. - */ @Override - public int compareTo(TextTrackScore other) { - if (this.isWithinRendererCapabilities != other.isWithinRendererCapabilities) { - return this.isWithinRendererCapabilities ? 1 : -1; - } - if (this.preferredLanguageScore != other.preferredLanguageScore) { - return compareInts(this.preferredLanguageScore, other.preferredLanguageScore); - } - if (this.preferredRoleFlagsScore != other.preferredRoleFlagsScore) { - return compareInts(this.preferredRoleFlagsScore, other.preferredRoleFlagsScore); - } - if (this.isDefault != other.isDefault) { - return this.isDefault ? 1 : -1; + public @SelectionEligibility int getSelectionEligibility() { + return selectionEligibility; + } + + @Override + public boolean isCompatibleForAdaptationWith(TextTrackInfo otherTrack) { + return false; + } + + @Override + public int compareTo(TextTrackInfo other) { + ComparisonChain chain = + ComparisonChain.start() + .compareFalseFirst( + this.isWithinRendererCapabilities, other.isWithinRendererCapabilities) + // 1. Compare match with specific content preferences set by the parameters. + .compare( + this.preferredLanguageIndex, + other.preferredLanguageIndex, + Ordering.natural().reverse()) + .compare(this.preferredLanguageScore, other.preferredLanguageScore) + .compare(this.preferredRoleFlagsScore, other.preferredRoleFlagsScore) + // 2. Compare match with implicit content preferences set by the media. + .compareFalseFirst(this.isDefault, other.isDefault) + .compare( + this.isForced, + other.isForced, + // Prefer non-forced to forced if a preferred text language has been matched. + // Where both are provided the non-forced track will usually contain the forced + // subtitles as a subset. Otherwise, prefer a forced track. + preferredLanguageScore == 0 ? Ordering.natural() : Ordering.natural().reverse()) + .compare(this.selectedAudioLanguageScore, other.selectedAudioLanguageScore); + if (preferredRoleFlagsScore == 0) { + chain = chain.compareTrueFirst(this.hasCaptionRoleFlags, other.hasCaptionRoleFlags); } - if (this.hasPreferredIsForcedFlag != other.hasPreferredIsForcedFlag) { - return this.hasPreferredIsForcedFlag ? 1 : -1; + return chain.result(); + } + + public static int compareSelections(List infos1, List infos2) { + return infos1.get(0).compareTo(infos2.get(0)); + } + } + + private static final class OtherTrackScore implements Comparable { + + private final boolean isDefault; + private final boolean isWithinRendererCapabilities; + + public OtherTrackScore(Format format, @Capabilities int trackFormatSupport) { + isDefault = (format.selectionFlags & C.SELECTION_FLAG_DEFAULT) != 0; + isWithinRendererCapabilities = + isSupported(trackFormatSupport, /* allowExceedsCapabilities= */ false); + } + + @Override + public int compareTo(OtherTrackScore other) { + return ComparisonChain.start() + .compareFalseFirst(this.isWithinRendererCapabilities, other.isWithinRendererCapabilities) + .compareFalseFirst(this.isDefault, other.isDefault) + .result(); + } + } + + /** + * Wraps the {@link Spatializer} in order to encapsulate its APIs within an inner class, to avoid + * runtime linking on devices with {@code API < 32}. + */ + @RequiresApi(32) + private static class SpatializerWrapperV32 { + + private final Spatializer spatializer; + private final boolean spatializationSupported; + + @Nullable private Handler handler; + @Nullable private Spatializer.OnSpatializerStateChangedListener listener; + + @Nullable + public static SpatializerWrapperV32 tryCreateInstance(Context context) { + @Nullable + AudioManager audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE); + return audioManager == null ? null : new SpatializerWrapperV32(audioManager.getSpatializer()); + } + + private SpatializerWrapperV32(Spatializer spatializer) { + this.spatializer = spatializer; + this.spatializationSupported = + spatializer.getImmersiveAudioLevel() != Spatializer.SPATIALIZER_IMMERSIVE_LEVEL_NONE; + } + + public void ensureInitialized(DefaultTrackSelector defaultTrackSelector, Looper looper) { + if (listener != null || handler != null) { + return; } - if (this.selectedAudioLanguageScore != other.selectedAudioLanguageScore) { - return compareInts(this.selectedAudioLanguageScore, other.selectedAudioLanguageScore); + this.listener = + new Spatializer.OnSpatializerStateChangedListener() { + @Override + public void onSpatializerEnabledChanged(Spatializer spatializer, boolean enabled) { + defaultTrackSelector.maybeInvalidateForAudioChannelCountConstraints(); + } + + @Override + public void onSpatializerAvailableChanged(Spatializer spatializer, boolean available) { + defaultTrackSelector.maybeInvalidateForAudioChannelCountConstraints(); + } + }; + this.handler = new Handler(looper); + spatializer.addOnSpatializerStateChangedListener(handler::post, listener); + } + + public boolean isSpatializationSupported() { + return spatializationSupported; + } + + public boolean isAvailable() { + return spatializer.isAvailable(); + } + + public boolean isEnabled() { + return spatializer.isEnabled(); + } + + public boolean canBeSpatialized(AudioAttributes audioAttributes, Format format) { + // For E-AC3 JOC, the format is object based. When the channel count is 16, this maps to 12 + // linear channels and the rest are used for objects. See + // https://github.com/google/ExoPlayer/pull/10322#discussion_r895265881 + int linearChannelCount = + MimeTypes.AUDIO_E_AC3_JOC.equals(format.sampleMimeType) && format.channelCount == 16 + ? 12 + : format.channelCount; + AudioFormat.Builder builder = + new AudioFormat.Builder() + .setEncoding(AudioFormat.ENCODING_PCM_16BIT) + .setChannelMask(Util.getAudioTrackChannelConfig(linearChannelCount)); + if (format.sampleRate != Format.NO_VALUE) { + builder.setSampleRate(format.sampleRate); } - if (preferredRoleFlagsScore == 0 && this.hasCaptionRoleFlags != other.hasCaptionRoleFlags) { - return this.hasCaptionRoleFlags ? -1 : 1; + return spatializer.canBeSpatialized( + audioAttributes.getAudioAttributesV21().audioAttributes, builder.build()); + } + + public void release() { + if (listener == null || handler == null) { + return; } - return 0; + spatializer.removeOnSpatializerStateChangedListener(listener); + castNonNull(handler).removeCallbacksAndMessages(/* token= */ null); + handler = null; + listener = null; } } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/ExoTrackSelection.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/ExoTrackSelection.java new file mode 100644 index 0000000000..2908bb5fdb --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/ExoTrackSelection.java @@ -0,0 +1,291 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.trackselection; + +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.Timeline; +import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId; +import com.google.android.exoplayer2.source.TrackGroup; +import com.google.android.exoplayer2.source.chunk.Chunk; +import com.google.android.exoplayer2.source.chunk.MediaChunk; +import com.google.android.exoplayer2.source.chunk.MediaChunkIterator; +import com.google.android.exoplayer2.upstream.BandwidthMeter; +import com.google.android.exoplayer2.util.Log; +import java.util.List; +import org.checkerframework.checker.nullness.compatqual.NullableType; + +/** + * A {@link TrackSelection} that can change the individually selected track as a result of calling + * {@link #updateSelectedTrack(long, long, long, List, MediaChunkIterator[])} or {@link + * #evaluateQueueSize(long, List)}. This only happens between calls to {@link #enable()} and {@link + * #disable()}. + */ +public interface ExoTrackSelection extends TrackSelection { + + /** Contains of a subset of selected tracks belonging to a {@link TrackGroup}. */ + final class Definition { + /** The {@link TrackGroup} which tracks belong to. */ + public final TrackGroup group; + /** The indices of the selected tracks in {@link #group}. */ + public final int[] tracks; + /** The type that will be returned from {@link TrackSelection#getType()}. */ + public final @Type int type; + + private static final String TAG = "ETSDefinition"; + + /** + * @param group The {@link TrackGroup}. Must not be null. + * @param tracks The indices of the selected tracks within the {@link TrackGroup}. Must not be + * null or empty. May be in any order. + */ + public Definition(TrackGroup group, int... tracks) { + this(group, tracks, TrackSelection.TYPE_UNSET); + } + + /** + * @param group The {@link TrackGroup}. Must not be null. + * @param tracks The indices of the selected tracks within the {@link TrackGroup}. Must not be + * null or empty. May be in any order. + * @param type The type that will be returned from {@link TrackSelection#getType()}. + */ + public Definition(TrackGroup group, int[] tracks, @Type int type) { + if (tracks.length == 0) { + // TODO: Turn this into an assertion. + Log.e(TAG, "Empty tracks are not allowed", new IllegalArgumentException()); + } + this.group = group; + this.tracks = tracks; + this.type = type; + } + } + + /** Factory for {@link ExoTrackSelection} instances. */ + interface Factory { + + /** + * Creates track selections for the provided {@link Definition Definitions}. + * + *

      Implementations that create at most one adaptive track selection may use {@link + * TrackSelectionUtil#createTrackSelectionsForDefinitions}. + * + * @param definitions A {@link Definition} array. May include null values. + * @param bandwidthMeter A {@link BandwidthMeter} which can be used to select tracks. + * @param mediaPeriodId The {@link MediaPeriodId} of the period for which tracks are to be + * selected. + * @param timeline The {@link Timeline} holding the period for which tracks are to be selected. + * @return The created selections. Must have the same length as {@code definitions} and may + * include null values. + */ + @NullableType + ExoTrackSelection[] createTrackSelections( + @NullableType Definition[] definitions, + BandwidthMeter bandwidthMeter, + MediaPeriodId mediaPeriodId, + Timeline timeline); + } + + /** + * Enables the track selection. Dynamic changes via {@link #updateSelectedTrack(long, long, long, + * List, MediaChunkIterator[])}, {@link #evaluateQueueSize(long, List)} or {@link + * #shouldCancelChunkLoad(long, Chunk, List)} will only happen after this call. + * + *

      This method may not be called when the track selection is already enabled. + */ + void enable(); + + /** + * Disables this track selection. No further dynamic changes via {@link #updateSelectedTrack(long, + * long, long, List, MediaChunkIterator[])}, {@link #evaluateQueueSize(long, List)} or {@link + * #shouldCancelChunkLoad(long, Chunk, List)} will happen after this call. + * + *

      This method may only be called when the track selection is already enabled. + */ + void disable(); + + // Individual selected track. + + /** Returns the {@link Format} of the individual selected track. */ + Format getSelectedFormat(); + + /** Returns the index in the track group of the individual selected track. */ + int getSelectedIndexInTrackGroup(); + + /** Returns the index of the selected track. */ + int getSelectedIndex(); + + /** Returns the reason for the current track selection. */ + @C.SelectionReason + int getSelectionReason(); + + /** Returns optional data associated with the current track selection. */ + @Nullable + Object getSelectionData(); + + // Adaptation. + + /** + * Called to notify the selection of the current playback speed. The playback speed may affect + * adaptive track selection. + * + * @param playbackSpeed The factor by which playback is sped up. + */ + void onPlaybackSpeed(float playbackSpeed); + + /** + * Called to notify the selection of a position discontinuity. + * + *

      This happens when the playback position jumps, e.g., as a result of a seek being performed. + */ + default void onDiscontinuity() {} + + /** + * Called to notify when a rebuffer occurred. + * + *

      A rebuffer is defined to be caused by buffer depletion rather than a user action. Hence this + * method is not called during initial buffering or when buffering as a result of a seek + * operation. + */ + default void onRebuffer() {} + + /** + * Called to notify when the playback is paused or resumed. + * + * @param playWhenReady Whether playback will proceed when ready. + */ + default void onPlayWhenReadyChanged(boolean playWhenReady) {} + + /** + * Updates the selected track for sources that load media in discrete {@link MediaChunk}s. + * + *

      This method will only be called when the selection is enabled. + * + * @param playbackPositionUs The current playback position in microseconds. If playback of the + * period to which this track selection belongs has not yet started, the value will be the + * starting position in the period minus the duration of any media in previous periods still + * to be played. + * @param bufferedDurationUs The duration of media currently buffered from the current playback + * position, in microseconds. Note that the next load position can be calculated as {@code + * (playbackPositionUs + bufferedDurationUs)}. + * @param availableDurationUs The duration of media available for buffering from the current + * playback position, in microseconds, or {@link C#TIME_UNSET} if media can be buffered to the + * end of the current period. Note that if not set to {@link C#TIME_UNSET}, the position up to + * which media is available for buffering can be calculated as {@code (playbackPositionUs + + * availableDurationUs)}. + * @param queue The queue of already buffered {@link MediaChunk}s. Must not be modified. + * @param mediaChunkIterators An array of {@link MediaChunkIterator}s providing information about + * the sequence of upcoming media chunks for each track in the selection. All iterators start + * from the media chunk which will be loaded next if the respective track is selected. Note + * that this information may not be available for all tracks, and so some iterators may be + * empty. + */ + void updateSelectedTrack( + long playbackPositionUs, + long bufferedDurationUs, + long availableDurationUs, + List queue, + MediaChunkIterator[] mediaChunkIterators); + + /** + * Returns the number of chunks that should be retained in the queue. + * + *

      May be called by sources that load media in discrete {@link MediaChunk MediaChunks} and + * support discarding of buffered chunks. + * + *

      To avoid excessive re-buffering, implementations should normally return the size of the + * queue. An example of a case where a smaller value may be returned is if network conditions have + * improved dramatically, allowing chunks to be discarded and re-buffered in a track of + * significantly higher quality. Discarding chunks may allow faster switching to a higher quality + * track in this case. + * + *

      Note that even if the source supports discarding of buffered chunks, the actual number of + * discarded chunks is not guaranteed. The source will call {@link #updateSelectedTrack(long, + * long, long, List, MediaChunkIterator[])} with the updated queue of chunks before loading a new + * chunk to allow switching to another quality. + * + *

      This method will only be called when the selection is enabled and none of the {@link + * MediaChunk MediaChunks} in the queue are currently loading. + * + * @param playbackPositionUs The current playback position in microseconds. If playback of the + * period to which this track selection belongs has not yet started, the value will be the + * starting position in the period minus the duration of any media in previous periods still + * to be played. + * @param queue The queue of buffered {@link MediaChunk MediaChunks}. Must not be modified. + * @return The number of chunks to retain in the queue. + */ + int evaluateQueueSize(long playbackPositionUs, List queue); + + /** + * Returns whether an ongoing load of a chunk should be canceled. + * + *

      May be called by sources that load media in discrete {@link MediaChunk MediaChunks} and + * support canceling the ongoing chunk load. The ongoing chunk load is either the last {@link + * MediaChunk} in the queue or another type of {@link Chunk}, for example, if the source loads + * initialization or encryption data. + * + *

      To avoid excessive re-buffering, implementations should normally return {@code false}. An + * example where {@code true} might be returned is if a load of a high quality chunk gets stuck + * and canceling this load in favor of a lower quality alternative may avoid a rebuffer. + * + *

      The source will call {@link #evaluateQueueSize(long, List)} after the cancelation finishes + * to allow discarding of chunks, and {@link #updateSelectedTrack(long, long, long, List, + * MediaChunkIterator[])} before loading a new chunk to allow switching to another quality. + * + *

      This method will only be called when the selection is enabled. + * + * @param playbackPositionUs The current playback position in microseconds. If playback of the + * period to which this track selection belongs has not yet started, the value will be the + * starting position in the period minus the duration of any media in previous periods still + * to be played. + * @param loadingChunk The currently loading {@link Chunk} that will be canceled if this method + * returns {@code true}. + * @param queue The queue of buffered {@link MediaChunk MediaChunks}, including the {@code + * loadingChunk} if it's a {@link MediaChunk}. Must not be modified. + * @return Whether the ongoing load of {@code loadingChunk} should be canceled. + */ + default boolean shouldCancelChunkLoad( + long playbackPositionUs, Chunk loadingChunk, List queue) { + return false; + } + + /** + * Attempts to exclude the track at the specified index in the selection, making it ineligible for + * selection by calls to {@link #updateSelectedTrack(long, long, long, List, + * MediaChunkIterator[])} for the specified period of time. + * + *

      Exclusion will fail if all other tracks are currently excluded. If excluding the currently + * selected track, note that it will remain selected until the next call to {@link + * #updateSelectedTrack(long, long, long, List, MediaChunkIterator[])}. + * + *

      This method will only be called when the selection is enabled. + * + * @param index The index of the track in the selection. + * @param exclusionDurationMs The duration of time for which the track should be excluded, in + * milliseconds. + * @return Whether exclusion was successful. + */ + boolean blacklist(int index, long exclusionDurationMs); + + /** + * Returns whether the track at the specified index in the selection is excluded. + * + * @param index The index of the track in the selection. + * @param nowMs The current time in the timebase of {@link + * android.os.SystemClock#elapsedRealtime()}. + */ + boolean isBlacklisted(int index, long nowMs); +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/FixedTrackSelection.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/FixedTrackSelection.java index fefad00cbd..178fba1960 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/FixedTrackSelection.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/FixedTrackSelection.java @@ -20,70 +20,45 @@ import com.google.android.exoplayer2.source.TrackGroup; import com.google.android.exoplayer2.source.chunk.MediaChunk; import com.google.android.exoplayer2.source.chunk.MediaChunkIterator; -import com.google.android.exoplayer2.upstream.BandwidthMeter; import java.util.List; -import org.checkerframework.checker.nullness.compatqual.NullableType; -/** - * A {@link TrackSelection} consisting of a single track. - */ +/** A {@link TrackSelection} consisting of a single track. */ public final class FixedTrackSelection extends BaseTrackSelection { + private final @C.SelectionReason int reason; + @Nullable private final Object data; + /** - * @deprecated Don't use as adaptive track selection factory as it will throw when multiple tracks - * are selected. If you would like to disable adaptive selection in {@link - * DefaultTrackSelector}, enable the {@link - * DefaultTrackSelector.Parameters#forceHighestSupportedBitrate} flag instead. + * @param group The {@link TrackGroup}. Must not be null. + * @param track The index of the selected track within the {@link TrackGroup}. */ - @Deprecated - public static final class Factory implements TrackSelection.Factory { - - private final int reason; - @Nullable private final Object data; - - public Factory() { - this.reason = C.SELECTION_REASON_UNKNOWN; - this.data = null; - } - - /** - * @param reason A reason for the track selection. - * @param data Optional data associated with the track selection. - */ - public Factory(int reason, @Nullable Object data) { - this.reason = reason; - this.data = data; - } - - @Override - public @NullableType TrackSelection[] createTrackSelections( - @NullableType Definition[] definitions, BandwidthMeter bandwidthMeter) { - return TrackSelectionUtil.createTrackSelectionsForDefinitions( - definitions, - definition -> - new FixedTrackSelection(definition.group, definition.tracks[0], reason, data)); - } + public FixedTrackSelection(TrackGroup group, int track) { + this(group, /* track= */ track, /* type= */ TrackSelection.TYPE_UNSET); } - private final int reason; - @Nullable private final Object data; - /** * @param group The {@link TrackGroup}. Must not be null. * @param track The index of the selected track within the {@link TrackGroup}. + * @param type The type that will be returned from {@link TrackSelection#getType()}. */ - public FixedTrackSelection(TrackGroup group, int track) { - this(group, track, C.SELECTION_REASON_UNKNOWN, null); + public FixedTrackSelection(TrackGroup group, int track, @Type int type) { + this(group, track, type, C.SELECTION_REASON_UNKNOWN, /* data= */ null); } /** * @param group The {@link TrackGroup}. Must not be null. * @param track The index of the selected track within the {@link TrackGroup}. + * @param type The type that will be returned from {@link TrackSelection#getType()}. * @param reason A reason for the track selection. * @param data Optional data associated with the track selection. */ - public FixedTrackSelection(TrackGroup group, int track, int reason, @Nullable Object data) { - super(group, track); + public FixedTrackSelection( + TrackGroup group, + int track, + @Type int type, + @C.SelectionReason int reason, + @Nullable Object data) { + super(group, /* tracks= */ new int[] {track}, type); this.reason = reason; this.data = data; } @@ -104,7 +79,7 @@ public int getSelectedIndex() { } @Override - public int getSelectionReason() { + public @C.SelectionReason int getSelectionReason() { return reason; } @@ -113,5 +88,4 @@ public int getSelectionReason() { public Object getSelectionData() { return data; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/MappingTrackSelector.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/MappingTrackSelector.java index f6ba1f259e..eb252ddf05 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/MappingTrackSelector.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/MappingTrackSelector.java @@ -15,39 +15,47 @@ */ package com.google.android.exoplayer2.trackselection; +import static java.lang.Math.max; +import static java.lang.Math.min; +import static java.lang.annotation.ElementType.FIELD; +import static java.lang.annotation.ElementType.LOCAL_VARIABLE; +import static java.lang.annotation.ElementType.METHOD; +import static java.lang.annotation.ElementType.PARAMETER; +import static java.lang.annotation.ElementType.TYPE_USE; + import android.util.Pair; import androidx.annotation.IntDef; import androidx.annotation.Nullable; +import androidx.annotation.VisibleForTesting; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.C.FormatSupport; import com.google.android.exoplayer2.ExoPlaybackException; import com.google.android.exoplayer2.Renderer; import com.google.android.exoplayer2.RendererCapabilities; import com.google.android.exoplayer2.RendererCapabilities.AdaptiveSupport; import com.google.android.exoplayer2.RendererCapabilities.Capabilities; -import com.google.android.exoplayer2.RendererCapabilities.FormatSupport; import com.google.android.exoplayer2.RendererConfiguration; import com.google.android.exoplayer2.Timeline; +import com.google.android.exoplayer2.Tracks; import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId; import com.google.android.exoplayer2.source.TrackGroup; import com.google.android.exoplayer2.source.TrackGroupArray; -import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.Util; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; import java.util.Arrays; import org.checkerframework.checker.nullness.compatqual.NullableType; /** * Base class for {@link TrackSelector}s that first establish a mapping between {@link TrackGroup}s - * and {@link Renderer}s, and then from that mapping create a {@link TrackSelection} for each + * and {@link Renderer}s, and then from that mapping create a {@link ExoTrackSelection} for each * renderer. */ public abstract class MappingTrackSelector extends TrackSelector { - /** - * Provides mapped track information for each renderer. - */ + /** Provides mapped track information for each renderer. */ public static final class MappedTrackInfo { /** @@ -55,51 +63,52 @@ public static final class MappedTrackInfo { * {@link #RENDERER_SUPPORT_NO_TRACKS}, {@link #RENDERER_SUPPORT_UNSUPPORTED_TRACKS}, {@link * #RENDERER_SUPPORT_EXCEEDS_CAPABILITIES_TRACKS} or {@link #RENDERER_SUPPORT_PLAYABLE_TRACKS}. */ + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. @Documented @Retention(RetentionPolicy.SOURCE) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) @IntDef({ RENDERER_SUPPORT_NO_TRACKS, RENDERER_SUPPORT_UNSUPPORTED_TRACKS, RENDERER_SUPPORT_EXCEEDS_CAPABILITIES_TRACKS, RENDERER_SUPPORT_PLAYABLE_TRACKS }) - @interface RendererSupport {} + public @interface RendererSupport {} /** The renderer does not have any associated tracks. */ public static final int RENDERER_SUPPORT_NO_TRACKS = 0; /** * The renderer has tracks mapped to it, but all are unsupported. In other words, {@link - * #getTrackSupport(int, int, int)} returns {@link RendererCapabilities#FORMAT_UNSUPPORTED_DRM}, - * {@link RendererCapabilities#FORMAT_UNSUPPORTED_SUBTYPE} or {@link - * RendererCapabilities#FORMAT_UNSUPPORTED_TYPE} for all tracks mapped to the renderer. + * #getTrackSupport(int, int, int)} returns {@link C#FORMAT_UNSUPPORTED_DRM}, {@link + * C#FORMAT_UNSUPPORTED_SUBTYPE} or {@link C#FORMAT_UNSUPPORTED_TYPE} for all tracks mapped to + * the renderer. */ public static final int RENDERER_SUPPORT_UNSUPPORTED_TRACKS = 1; /** * The renderer has tracks mapped to it and at least one is of a supported type, but all such * tracks exceed the renderer's capabilities. In other words, {@link #getTrackSupport(int, int, - * int)} returns {@link RendererCapabilities#FORMAT_EXCEEDS_CAPABILITIES} for at least one - * track mapped to the renderer, but does not return {@link - * RendererCapabilities#FORMAT_HANDLED} for any tracks mapped to the renderer. + * int)} returns {@link C#FORMAT_EXCEEDS_CAPABILITIES} for at least one track mapped to the + * renderer, but does not return {@link C#FORMAT_HANDLED} for any tracks mapped to the renderer. */ public static final int RENDERER_SUPPORT_EXCEEDS_CAPABILITIES_TRACKS = 2; /** * The renderer has tracks mapped to it, and at least one such track is playable. In other - * words, {@link #getTrackSupport(int, int, int)} returns {@link - * RendererCapabilities#FORMAT_HANDLED} for at least one track mapped to the renderer. + * words, {@link #getTrackSupport(int, int, int)} returns {@link C#FORMAT_HANDLED} for at least + * one track mapped to the renderer. */ public static final int RENDERER_SUPPORT_PLAYABLE_TRACKS = 3; - /** @deprecated Use {@link #getRendererCount()}. */ - @Deprecated public final int length; - private final int rendererCount; - private final int[] rendererTrackTypes; + private final String[] rendererNames; + private final @C.TrackType int[] rendererTrackTypes; private final TrackGroupArray[] rendererTrackGroups; - @AdaptiveSupport private final int[] rendererMixedMimeTypeAdaptiveSupports; - @Capabilities private final int[][][] rendererFormatSupports; + private final @AdaptiveSupport int[] rendererMixedMimeTypeAdaptiveSupports; + private final @Capabilities int[][][] rendererFormatSupports; private final TrackGroupArray unmappedTrackGroups; /** - * @param rendererTrackTypes The track type handled by each renderer. + * @param rendererNames The name of each renderer. + * @param rendererTrackTypes The {@link C.TrackType track type} handled by each renderer. * @param rendererTrackGroups The {@link TrackGroup}s mapped to each renderer. * @param rendererMixedMimeTypeAdaptiveSupports The {@link AdaptiveSupport} for mixed MIME type * adaptation for the renderer. @@ -107,20 +116,21 @@ public static final class MappedTrackInfo { * renderer, track group and track (in that order). * @param unmappedTrackGroups {@link TrackGroup}s not mapped to any renderer. */ - @SuppressWarnings("deprecation") + @VisibleForTesting /* package */ MappedTrackInfo( - int[] rendererTrackTypes, + String[] rendererNames, + @C.TrackType int[] rendererTrackTypes, TrackGroupArray[] rendererTrackGroups, @AdaptiveSupport int[] rendererMixedMimeTypeAdaptiveSupports, @Capabilities int[][][] rendererFormatSupports, TrackGroupArray unmappedTrackGroups) { + this.rendererNames = rendererNames; this.rendererTrackTypes = rendererTrackTypes; this.rendererTrackGroups = rendererTrackGroups; this.rendererFormatSupports = rendererFormatSupports; this.rendererMixedMimeTypeAdaptiveSupports = rendererMixedMimeTypeAdaptiveSupports; this.unmappedTrackGroups = unmappedTrackGroups; this.rendererCount = rendererTrackTypes.length; - this.length = rendererCount; } /** Returns the number of renderers. */ @@ -128,14 +138,25 @@ public int getRendererCount() { return rendererCount; } + /** + * Returns the name of the renderer at a given index. + * + * @see Renderer#getName() + * @param rendererIndex The renderer index. + * @return The name of the renderer. + */ + public String getRendererName(int rendererIndex) { + return rendererNames[rendererIndex]; + } + /** * Returns the track type that the renderer at a given index handles. * * @see Renderer#getTrackType() * @param rendererIndex The renderer index. - * @return One of the {@code TRACK_TYPE_*} constants defined in {@link C}. + * @return The {@link C.TrackType} of the renderer. */ - public int getRendererType(int rendererIndex) { + public @C.TrackType int getRendererType(int rendererIndex) { return rendererTrackTypes[rendererIndex]; } @@ -155,65 +176,61 @@ public TrackGroupArray getTrackGroups(int rendererIndex) { * @param rendererIndex The renderer index. * @return The {@link RendererSupport}. */ - @RendererSupport - public int getRendererSupport(int rendererIndex) { + public @RendererSupport int getRendererSupport(int rendererIndex) { @RendererSupport int bestRendererSupport = RENDERER_SUPPORT_NO_TRACKS; @Capabilities int[][] rendererFormatSupport = rendererFormatSupports[rendererIndex]; for (@Capabilities int[] trackGroupFormatSupport : rendererFormatSupport) { for (@Capabilities int trackFormatSupport : trackGroupFormatSupport) { int trackRendererSupport; switch (RendererCapabilities.getFormatSupport(trackFormatSupport)) { - case RendererCapabilities.FORMAT_HANDLED: + case C.FORMAT_HANDLED: return RENDERER_SUPPORT_PLAYABLE_TRACKS; - case RendererCapabilities.FORMAT_EXCEEDS_CAPABILITIES: + case C.FORMAT_EXCEEDS_CAPABILITIES: trackRendererSupport = RENDERER_SUPPORT_EXCEEDS_CAPABILITIES_TRACKS; break; - case RendererCapabilities.FORMAT_UNSUPPORTED_TYPE: - case RendererCapabilities.FORMAT_UNSUPPORTED_SUBTYPE: - case RendererCapabilities.FORMAT_UNSUPPORTED_DRM: + case C.FORMAT_UNSUPPORTED_TYPE: + case C.FORMAT_UNSUPPORTED_SUBTYPE: + case C.FORMAT_UNSUPPORTED_DRM: trackRendererSupport = RENDERER_SUPPORT_UNSUPPORTED_TRACKS; break; default: throw new IllegalStateException(); } - bestRendererSupport = Math.max(bestRendererSupport, trackRendererSupport); + bestRendererSupport = max(bestRendererSupport, trackRendererSupport); } } return bestRendererSupport; } - /** @deprecated Use {@link #getTypeSupport(int)}. */ - @Deprecated - @RendererSupport - public int getTrackTypeRendererSupport(int trackType) { - return getTypeSupport(trackType); - } - /** * Returns the extent to which tracks of a specified type are supported. This is the best level * of support obtained from {@link #getRendererSupport(int)} for all renderers that handle the * specified type. If no such renderers exist then {@link #RENDERER_SUPPORT_NO_TRACKS} is * returned. * - * @param trackType The track type. One of the {@link C} {@code TRACK_TYPE_*} constants. + * @param trackType The {@link C.TrackType track type}. * @return The {@link RendererSupport}. */ - @RendererSupport - public int getTypeSupport(int trackType) { + public @RendererSupport int getTypeSupport(@C.TrackType int trackType) { @RendererSupport int bestRendererSupport = RENDERER_SUPPORT_NO_TRACKS; for (int i = 0; i < rendererCount; i++) { if (rendererTrackTypes[i] == trackType) { - bestRendererSupport = Math.max(bestRendererSupport, getRendererSupport(i)); + bestRendererSupport = max(bestRendererSupport, getRendererSupport(i)); } } return bestRendererSupport; } - /** @deprecated Use {@link #getTrackSupport(int, int, int)}. */ - @Deprecated - @FormatSupport - public int getTrackFormatSupport(int rendererIndex, int groupIndex, int trackIndex) { - return getTrackSupport(rendererIndex, groupIndex, trackIndex); + /** + * Returns the {@link Capabilities} of the renderer for an individual track. + * + * @param rendererIndex The renderer index. + * @param groupIndex The index of the track group to which the track belongs. + * @param trackIndex The index of the track within the track group. + * @return The {@link Capabilities}. + */ + public @Capabilities int getCapabilities(int rendererIndex, int groupIndex, int trackIndex) { + return rendererFormatSupports[rendererIndex][groupIndex][trackIndex]; } /** @@ -224,24 +241,21 @@ public int getTrackFormatSupport(int rendererIndex, int groupIndex, int trackInd * @param trackIndex The index of the track within the track group. * @return The {@link FormatSupport}. */ - @FormatSupport - public int getTrackSupport(int rendererIndex, int groupIndex, int trackIndex) { + public @FormatSupport int getTrackSupport(int rendererIndex, int groupIndex, int trackIndex) { return RendererCapabilities.getFormatSupport( - rendererFormatSupports[rendererIndex][groupIndex][trackIndex]); + getCapabilities(rendererIndex, groupIndex, trackIndex)); } /** * Returns the extent to which a renderer supports adaptation between supported tracks in a * specified {@link TrackGroup}. * - *

      Tracks for which {@link #getTrackSupport(int, int, int)} returns {@link - * RendererCapabilities#FORMAT_HANDLED} are always considered. Tracks for which {@link - * #getTrackSupport(int, int, int)} returns {@link - * RendererCapabilities#FORMAT_EXCEEDS_CAPABILITIES} are also considered if {@code + *

      Tracks for which {@link #getTrackSupport(int, int, int)} returns {@link C#FORMAT_HANDLED} + * are always considered. Tracks for which {@link #getTrackSupport(int, int, int)} returns + * {@link C#FORMAT_EXCEEDS_CAPABILITIES} are also considered if {@code * includeCapabilitiesExceededTracks} is set to {@code true}. Tracks for which {@link - * #getTrackSupport(int, int, int)} returns {@link RendererCapabilities#FORMAT_UNSUPPORTED_DRM}, - * {@link RendererCapabilities#FORMAT_UNSUPPORTED_TYPE} or {@link - * RendererCapabilities#FORMAT_UNSUPPORTED_SUBTYPE} are never considered. + * #getTrackSupport(int, int, int)} returns {@link C#FORMAT_UNSUPPORTED_DRM}, {@link + * C#FORMAT_UNSUPPORTED_TYPE} or {@link C#FORMAT_UNSUPPORTED_SUBTYPE} are never considered. * * @param rendererIndex The renderer index. * @param groupIndex The index of the track group. @@ -249,8 +263,7 @@ public int getTrackSupport(int rendererIndex, int groupIndex, int trackIndex) { * renderer are included when determining support. * @return The {@link AdaptiveSupport}. */ - @AdaptiveSupport - public int getAdaptiveSupport( + public @AdaptiveSupport int getAdaptiveSupport( int rendererIndex, int groupIndex, boolean includeCapabilitiesExceededTracks) { int trackCount = rendererTrackGroups[rendererIndex].get(groupIndex).length; // Iterate over the tracks in the group, recording the indices of those to consider. @@ -258,9 +271,9 @@ public int getAdaptiveSupport( int trackIndexCount = 0; for (int i = 0; i < trackCount; i++) { @FormatSupport int fixedSupport = getTrackSupport(rendererIndex, groupIndex, i); - if (fixedSupport == RendererCapabilities.FORMAT_HANDLED + if (fixedSupport == C.FORMAT_HANDLED || (includeCapabilitiesExceededTracks - && fixedSupport == RendererCapabilities.FORMAT_EXCEEDS_CAPABILITIES)) { + && fixedSupport == C.FORMAT_EXCEEDS_CAPABILITIES)) { trackIndices[trackIndexCount++] = i; } } @@ -276,14 +289,15 @@ public int getAdaptiveSupport( * @param groupIndex The index of the track group. * @return The {@link AdaptiveSupport}. */ - @AdaptiveSupport - public int getAdaptiveSupport(int rendererIndex, int groupIndex, int[] trackIndices) { + public @AdaptiveSupport int getAdaptiveSupport( + int rendererIndex, int groupIndex, int[] trackIndices) { int handledTrackCount = 0; @AdaptiveSupport int adaptiveSupport = RendererCapabilities.ADAPTIVE_SEAMLESS; boolean multipleMimeTypes = false; String firstSampleMimeType = null; for (int i = 0; i < trackIndices.length; i++) { int trackIndex = trackIndices[i]; + @Nullable String sampleMimeType = rendererTrackGroups[rendererIndex].get(groupIndex).getFormat(trackIndex).sampleMimeType; if (handledTrackCount++ == 0) { @@ -292,27 +306,20 @@ public int getAdaptiveSupport(int rendererIndex, int groupIndex, int[] trackIndi multipleMimeTypes |= !Util.areEqual(firstSampleMimeType, sampleMimeType); } adaptiveSupport = - Math.min( + min( adaptiveSupport, RendererCapabilities.getAdaptiveSupport( rendererFormatSupports[rendererIndex][groupIndex][i])); } return multipleMimeTypes - ? Math.min(adaptiveSupport, rendererMixedMimeTypeAdaptiveSupports[rendererIndex]) + ? min(adaptiveSupport, rendererMixedMimeTypeAdaptiveSupports[rendererIndex]) : adaptiveSupport; } - /** @deprecated Use {@link #getUnmappedTrackGroups()}. */ - @Deprecated - public TrackGroupArray getUnassociatedTrackGroups() { - return getUnmappedTrackGroups(); - } - /** Returns {@link TrackGroup}s not mapped to any renderer. */ public TrackGroupArray getUnmappedTrackGroups() { return unmappedTrackGroups; } - } @Nullable private MappedTrackInfo currentMappedTrackInfo; @@ -321,14 +328,15 @@ public TrackGroupArray getUnmappedTrackGroups() { * Returns the mapping information for the currently active track selection, or null if no * selection is currently active. */ - public final @Nullable MappedTrackInfo getCurrentMappedTrackInfo() { + @Nullable + public final MappedTrackInfo getCurrentMappedTrackInfo() { return currentMappedTrackInfo; } // TrackSelector implementation. @Override - public final void onSelectionActivated(Object info) { + public final void onSelectionActivated(@Nullable Object info) { currentMappedTrackInfo = (MappedTrackInfo) info; } @@ -360,8 +368,7 @@ public final TrackSelectorResult selectTracks( for (int groupIndex = 0; groupIndex < trackGroups.length; groupIndex++) { TrackGroup group = trackGroups.get(groupIndex); // Associate the group to a preferred renderer. - boolean preferUnassociatedRenderer = - MimeTypes.getTrackType(group.getFormat(0).sampleMimeType) == C.TRACK_TYPE_METADATA; + boolean preferUnassociatedRenderer = group.type == C.TRACK_TYPE_METADATA; int rendererIndex = findRenderer( rendererCapabilities, group, rendererTrackGroupCounts, preferUnassociatedRenderer); @@ -380,6 +387,7 @@ public final TrackSelectorResult selectTracks( // Create a track group array for each renderer, and trim each rendererFormatSupports entry. TrackGroupArray[] rendererTrackGroupArrays = new TrackGroupArray[rendererCapabilities.length]; + String[] rendererNames = new String[rendererCapabilities.length]; int[] rendererTrackTypes = new int[rendererCapabilities.length]; for (int i = 0; i < rendererCapabilities.length; i++) { int rendererTrackGroupCount = rendererTrackGroupCounts[i]; @@ -388,6 +396,7 @@ public final TrackSelectorResult selectTracks( Util.nullSafeArrayCopy(rendererTrackGroups[i], rendererTrackGroupCount)); rendererFormatSupports[i] = Util.nullSafeArrayCopy(rendererFormatSupports[i], rendererTrackGroupCount); + rendererNames[i] = rendererCapabilities[i].getName(); rendererTrackTypes[i] = rendererCapabilities[i].getTrackType(); } @@ -401,47 +410,58 @@ public final TrackSelectorResult selectTracks( // Package up the track information and selections. MappedTrackInfo mappedTrackInfo = new MappedTrackInfo( + rendererNames, rendererTrackTypes, rendererTrackGroupArrays, rendererMixedMimeTypeAdaptationSupports, rendererFormatSupports, unmappedTrackGroupArray); - Pair<@NullableType RendererConfiguration[], @NullableType TrackSelection[]> result = + Pair<@NullableType RendererConfiguration[], @NullableType ExoTrackSelection[]> result = selectTracks( - mappedTrackInfo, rendererFormatSupports, rendererMixedMimeTypeAdaptationSupports); - return new TrackSelectorResult(result.first, result.second, mappedTrackInfo); + mappedTrackInfo, + rendererFormatSupports, + rendererMixedMimeTypeAdaptationSupports, + periodId, + timeline); + + Tracks tracks = TrackSelectionUtil.buildTracks(mappedTrackInfo, result.second); + + return new TrackSelectorResult(result.first, result.second, tracks, mappedTrackInfo); } /** * Given mapped track information, returns a track selection and configuration for each renderer. * * @param mappedTrackInfo Mapped track information. - * @param rendererFormatSupports The {@link Capabilities} for ach mapped track, indexed by + * @param rendererFormatSupports The {@link Capabilities} for each mapped track, indexed by * renderer, track group and track (in that order). * @param rendererMixedMimeTypeAdaptationSupport The {@link AdaptiveSupport} for mixed MIME type * adaptation for the renderer. + * @param mediaPeriodId The {@link MediaPeriodId} of the period for which tracks are to be + * selected. + * @param timeline The {@link Timeline} holding the period for which tracks are to be selected. * @return A pair consisting of the track selections and configurations for each renderer. A null * configuration indicates the renderer should be disabled, in which case the track selection * will also be null. A track selection may also be null for a non-disabled renderer if {@link * RendererCapabilities#getTrackType()} is {@link C#TRACK_TYPE_NONE}. * @throws ExoPlaybackException If an error occurs while selecting the tracks. */ - protected abstract Pair<@NullableType RendererConfiguration[], @NullableType TrackSelection[]> + protected abstract Pair<@NullableType RendererConfiguration[], @NullableType ExoTrackSelection[]> selectTracks( MappedTrackInfo mappedTrackInfo, @Capabilities int[][][] rendererFormatSupports, - @AdaptiveSupport int[] rendererMixedMimeTypeAdaptationSupport) + @AdaptiveSupport int[] rendererMixedMimeTypeAdaptationSupport, + MediaPeriodId mediaPeriodId, + Timeline timeline) throws ExoPlaybackException; /** * Finds the renderer to which the provided {@link TrackGroup} should be mapped. * *

      A {@link TrackGroup} is mapped to the renderer that reports the highest of (listed in - * decreasing order of support) {@link RendererCapabilities#FORMAT_HANDLED}, {@link - * RendererCapabilities#FORMAT_EXCEEDS_CAPABILITIES}, {@link - * RendererCapabilities#FORMAT_UNSUPPORTED_DRM} and {@link - * RendererCapabilities#FORMAT_UNSUPPORTED_SUBTYPE}. + * decreasing order of support) {@link C#FORMAT_HANDLED}, {@link C#FORMAT_EXCEEDS_CAPABILITIES}, + * {@link C#FORMAT_UNSUPPORTED_DRM} and {@link C#FORMAT_UNSUPPORTED_SUBTYPE}. * *

      In the case that two or more renderers report the same level of support, the assignment * depends on {@code preferUnassociatedRenderer}. @@ -454,9 +474,9 @@ public final TrackSelectorResult selectTracks( * available renderers have already mapped track groups. * * - *

      If all renderers report {@link RendererCapabilities#FORMAT_UNSUPPORTED_TYPE} for all of the - * tracks in the group, then {@code renderers.length} is returned to indicate that the group was - * not mapped to any renderer. + *

      If all renderers report {@link C#FORMAT_UNSUPPORTED_TYPE} for all of the tracks in the + * group, then {@code renderers.length} is returned to indicate that the group was not mapped to + * any renderer. * * @param rendererCapabilities The {@link RendererCapabilities} of the renderers. * @param group The track group to map to a renderer. @@ -474,17 +494,17 @@ private static int findRenderer( boolean preferUnassociatedRenderer) throws ExoPlaybackException { int bestRendererIndex = rendererCapabilities.length; - @FormatSupport int bestFormatSupportLevel = RendererCapabilities.FORMAT_UNSUPPORTED_TYPE; + @FormatSupport int bestFormatSupportLevel = C.FORMAT_UNSUPPORTED_TYPE; boolean bestRendererIsUnassociated = true; for (int rendererIndex = 0; rendererIndex < rendererCapabilities.length; rendererIndex++) { RendererCapabilities rendererCapability = rendererCapabilities[rendererIndex]; - @FormatSupport int formatSupportLevel = RendererCapabilities.FORMAT_UNSUPPORTED_TYPE; + @FormatSupport int formatSupportLevel = C.FORMAT_UNSUPPORTED_TYPE; for (int trackIndex = 0; trackIndex < group.length; trackIndex++) { @FormatSupport int trackFormatSupportLevel = RendererCapabilities.getFormatSupport( rendererCapability.supportsFormat(group.getFormat(trackIndex))); - formatSupportLevel = Math.max(formatSupportLevel, trackFormatSupportLevel); + formatSupportLevel = max(formatSupportLevel, trackFormatSupportLevel); } boolean rendererIsUnassociated = rendererTrackGroupCounts[rendererIndex] == 0; if (formatSupportLevel > bestFormatSupportLevel @@ -509,9 +529,8 @@ private static int findRenderer( * @return An array containing {@link Capabilities} for each track in the group. * @throws ExoPlaybackException If an error occurs determining the format support. */ - @Capabilities - private static int[] getFormatSupport(RendererCapabilities rendererCapabilities, TrackGroup group) - throws ExoPlaybackException { + private static @Capabilities int[] getFormatSupport( + RendererCapabilities rendererCapabilities, TrackGroup group) throws ExoPlaybackException { @Capabilities int[] formatSupport = new int[group.length]; for (int i = 0; i < group.length; i++) { formatSupport[i] = rendererCapabilities.supportsFormat(group.getFormat(i)); @@ -528,8 +547,7 @@ private static int[] getFormatSupport(RendererCapabilities rendererCapabilities, * renderer. * @throws ExoPlaybackException If an error occurs determining the adaptation support. */ - @AdaptiveSupport - private static int[] getMixedMimeTypeAdaptationSupports( + private static @AdaptiveSupport int[] getMixedMimeTypeAdaptationSupports( RendererCapabilities[] rendererCapabilities) throws ExoPlaybackException { @AdaptiveSupport int[] mixedMimeTypeAdaptationSupport = new int[rendererCapabilities.length]; for (int i = 0; i < mixedMimeTypeAdaptationSupport.length; i++) { @@ -537,5 +555,4 @@ private static int[] getMixedMimeTypeAdaptationSupports( } return mixedMimeTypeAdaptationSupport; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/RandomTrackSelection.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/RandomTrackSelection.java index f35e7ec755..f2c13ba9c0 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/RandomTrackSelection.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/RandomTrackSelection.java @@ -18,6 +18,8 @@ import android.os.SystemClock; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.Timeline; +import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId; import com.google.android.exoplayer2.source.TrackGroup; import com.google.android.exoplayer2.source.chunk.MediaChunk; import com.google.android.exoplayer2.source.chunk.MediaChunkIterator; @@ -26,15 +28,11 @@ import java.util.Random; import org.checkerframework.checker.nullness.compatqual.NullableType; -/** - * A {@link TrackSelection} whose selected track is updated randomly. - */ +/** An {@link ExoTrackSelection} whose selected track is updated randomly. */ public final class RandomTrackSelection extends BaseTrackSelection { - /** - * Factory for {@link RandomTrackSelection} instances. - */ - public static final class Factory implements TrackSelection.Factory { + /** Factory for {@link RandomTrackSelection} instances. */ + public static final class Factory implements ExoTrackSelection.Factory { private final Random random; @@ -50,11 +48,16 @@ public Factory(int seed) { } @Override - public @NullableType TrackSelection[] createTrackSelections( - @NullableType Definition[] definitions, BandwidthMeter bandwidthMeter) { + public @NullableType ExoTrackSelection[] createTrackSelections( + @NullableType Definition[] definitions, + BandwidthMeter bandwidthMeter, + MediaPeriodId mediaPeriodId, + Timeline timeline) { return TrackSelectionUtil.createTrackSelectionsForDefinitions( definitions, - definition -> new RandomTrackSelection(definition.group, definition.tracks, random)); + definition -> + new RandomTrackSelection( + definition.group, definition.tracks, definition.type, random)); } } @@ -62,35 +65,14 @@ public Factory(int seed) { private int selectedIndex; - /** - * @param group The {@link TrackGroup}. Must not be null. - * @param tracks The indices of the selected tracks within the {@link TrackGroup}. Must not be - * null or empty. May be in any order. - */ - public RandomTrackSelection(TrackGroup group, int... tracks) { - super(group, tracks); - random = new Random(); - selectedIndex = random.nextInt(length); - } - - /** - * @param group The {@link TrackGroup}. Must not be null. - * @param tracks The indices of the selected tracks within the {@link TrackGroup}. Must not be - * null or empty. May be in any order. - * @param seed A seed for the {@link Random} instance used to update the selected track. - */ - public RandomTrackSelection(TrackGroup group, int[] tracks, long seed) { - this(group, tracks, new Random(seed)); - } - /** * @param group The {@link TrackGroup}. Must not be null. * @param tracks The indices of the selected tracks within the {@link TrackGroup}. Must not be * null or empty. May be in any order. * @param random A source of random numbers. */ - public RandomTrackSelection(TrackGroup group, int[] tracks, Random random) { - super(group, tracks); + public RandomTrackSelection(TrackGroup group, int[] tracks, int type, Random random) { + super(group, tracks, type); this.random = random; selectedIndex = random.nextInt(length); } @@ -102,21 +84,21 @@ public void updateSelectedTrack( long availableDurationUs, List queue, MediaChunkIterator[] mediaChunkIterators) { - // Count the number of non-blacklisted formats. + // Count the number of allowed formats. long nowMs = SystemClock.elapsedRealtime(); - int nonBlacklistedFormatCount = 0; + int allowedFormatCount = 0; for (int i = 0; i < length; i++) { if (!isBlacklisted(i, nowMs)) { - nonBlacklistedFormatCount++; + allowedFormatCount++; } } - selectedIndex = random.nextInt(nonBlacklistedFormatCount); - if (nonBlacklistedFormatCount != length) { - // Adjust the format index to account for blacklisted formats. - nonBlacklistedFormatCount = 0; + selectedIndex = random.nextInt(allowedFormatCount); + if (allowedFormatCount != length) { + // Adjust the format index to account for excluded formats. + allowedFormatCount = 0; for (int i = 0; i < length; i++) { - if (!isBlacklisted(i, nowMs) && selectedIndex == nonBlacklistedFormatCount++) { + if (!isBlacklisted(i, nowMs) && selectedIndex == allowedFormatCount++) { selectedIndex = i; return; } @@ -139,5 +121,4 @@ public int getSelectionReason() { public Object getSelectionData() { return null; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/TrackSelection.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/TrackSelection.java index ad1a6ef1f2..493d41c90c 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/TrackSelection.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/TrackSelection.java @@ -15,110 +15,58 @@ */ package com.google.android.exoplayer2.trackselection; -import androidx.annotation.Nullable; +import static java.lang.annotation.ElementType.TYPE_USE; + +import androidx.annotation.IntDef; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.source.TrackGroup; -import com.google.android.exoplayer2.source.chunk.MediaChunk; -import com.google.android.exoplayer2.source.chunk.MediaChunkIterator; -import com.google.android.exoplayer2.upstream.BandwidthMeter; -import java.util.List; -import org.checkerframework.checker.nullness.compatqual.NullableType; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; /** * A track selection consisting of a static subset of selected tracks belonging to a {@link - * TrackGroup}, and a possibly varying individual selected track from the subset. + * TrackGroup}. * - *

      Tracks belonging to the subset are exposed in decreasing bandwidth order. The individual - * selected track may change dynamically as a result of calling {@link #updateSelectedTrack(long, - * long, long, List, MediaChunkIterator[])} or {@link #evaluateQueueSize(long, List)}. This only - * happens between calls to {@link #enable()} and {@link #disable()}. + *

      Tracks belonging to the subset are exposed in decreasing bandwidth order. */ public interface TrackSelection { - /** Contains of a subset of selected tracks belonging to a {@link TrackGroup}. */ - final class Definition { - /** The {@link TrackGroup} which tracks belong to. */ - public final TrackGroup group; - /** The indices of the selected tracks in {@link #group}. */ - public final int[] tracks; - /** The track selection reason. One of the {@link C} SELECTION_REASON_ constants. */ - public final int reason; - /** Optional data associated with this selection of tracks. */ - @Nullable public final Object data; - - /** - * @param group The {@link TrackGroup}. Must not be null. - * @param tracks The indices of the selected tracks within the {@link TrackGroup}. Must not be - * null or empty. May be in any order. - */ - public Definition(TrackGroup group, int... tracks) { - this(group, tracks, C.SELECTION_REASON_UNKNOWN, /* data= */ null); - } - - /** - * @param group The {@link TrackGroup}. Must not be null. - * @param tracks The indices of the selected tracks within the {@link TrackGroup}. Must not be - * @param reason The track selection reason. One of the {@link C} SELECTION_REASON_ constants. - * @param data Optional data associated with this selection of tracks. - */ - public Definition(TrackGroup group, int[] tracks, int reason, @Nullable Object data) { - this.group = group; - this.tracks = tracks; - this.reason = reason; - this.data = data; - } - } - /** - * Factory for {@link TrackSelection} instances. + * Represents a type track selection. Either {@link #TYPE_UNSET} or an app-defined value (see + * {@link #TYPE_CUSTOM_BASE}). */ - interface Factory { - - /** - * Creates track selections for the provided {@link Definition Definitions}. - * - *

      Implementations that create at most one adaptive track selection may use {@link - * TrackSelectionUtil#createTrackSelectionsForDefinitions}. - * - * @param definitions A {@link Definition} array. May include null values. - * @param bandwidthMeter A {@link BandwidthMeter} which can be used to select tracks. - * @return The created selections. Must have the same length as {@code definitions} and may - * include null values. - */ - @NullableType - TrackSelection[] createTrackSelections( - @NullableType Definition[] definitions, BandwidthMeter bandwidthMeter); - } + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef( + open = true, + value = {TYPE_UNSET}) + @interface Type {} + /** An unspecified track selection type. */ + int TYPE_UNSET = 0; + /** The first value that can be used for application specific track selection types. */ + int TYPE_CUSTOM_BASE = 10000; /** - * Enables the track selection. Dynamic changes via {@link #updateSelectedTrack(long, long, long, - * List, MediaChunkIterator[])} or {@link #evaluateQueueSize(long, List)} will only happen after - * this call. + * Returns an integer specifying the type of the selection, or {@link #TYPE_UNSET} if not + * specified. * - *

      This method may not be called when the track selection is already enabled. + *

      Track selection types are specific to individual applications, but should be defined + * starting from {@link #TYPE_CUSTOM_BASE} to ensure they don't conflict with any types that may + * be added to the library in the future. */ - void enable(); + @Type + int getType(); - /** - * Disables this track selection. No further dynamic changes via {@link #updateSelectedTrack(long, - * long, long, List, MediaChunkIterator[])} or {@link #evaluateQueueSize(long, List)} will happen - * after this call. - * - *

      This method may only be called when the track selection is already enabled. - */ - void disable(); - - /** - * Returns the {@link TrackGroup} to which the selected tracks belong. - */ + /** Returns the {@link TrackGroup} to which the selected tracks belong. */ TrackGroup getTrackGroup(); // Static subset of selected tracks. - /** - * Returns the number of tracks in the selection. - */ + /** Returns the number of tracks in the selection. */ int length(); /** @@ -156,114 +104,4 @@ TrackSelection[] createTrackSelections( * index is not part of the selection. */ int indexOf(int indexInTrackGroup); - - // Individual selected track. - - /** - * Returns the {@link Format} of the individual selected track. - */ - Format getSelectedFormat(); - - /** - * Returns the index in the track group of the individual selected track. - */ - int getSelectedIndexInTrackGroup(); - - /** - * Returns the index of the selected track. - */ - int getSelectedIndex(); - - /** - * Returns the reason for the current track selection. - */ - int getSelectionReason(); - - /** Returns optional data associated with the current track selection. */ - @Nullable Object getSelectionData(); - - // Adaptation. - - /** - * Called to notify the selection of the current playback speed. The playback speed may affect - * adaptive track selection. - * - * @param speed The playback speed. - */ - void onPlaybackSpeed(float speed); - - /** - * Called to notify the selection of a position discontinuity. - * - *

      This happens when the playback position jumps, e.g., as a result of a seek being performed. - */ - default void onDiscontinuity() {} - - /** - * Updates the selected track for sources that load media in discrete {@link MediaChunk}s. - * - *

      This method may only be called when the selection is enabled. - * - * @param playbackPositionUs The current playback position in microseconds. If playback of the - * period to which this track selection belongs has not yet started, the value will be the - * starting position in the period minus the duration of any media in previous periods still - * to be played. - * @param bufferedDurationUs The duration of media currently buffered from the current playback - * position, in microseconds. Note that the next load position can be calculated as {@code - * (playbackPositionUs + bufferedDurationUs)}. - * @param availableDurationUs The duration of media available for buffering from the current - * playback position, in microseconds, or {@link C#TIME_UNSET} if media can be buffered to the - * end of the current period. Note that if not set to {@link C#TIME_UNSET}, the position up to - * which media is available for buffering can be calculated as {@code (playbackPositionUs + - * availableDurationUs)}. - * @param queue The queue of already buffered {@link MediaChunk}s. Must not be modified. - * @param mediaChunkIterators An array of {@link MediaChunkIterator}s providing information about - * the sequence of upcoming media chunks for each track in the selection. All iterators start - * from the media chunk which will be loaded next if the respective track is selected. Note - * that this information may not be available for all tracks, and so some iterators may be - * empty. - */ - void updateSelectedTrack( - long playbackPositionUs, - long bufferedDurationUs, - long availableDurationUs, - List queue, - MediaChunkIterator[] mediaChunkIterators); - - /** - * May be called periodically by sources that load media in discrete {@link MediaChunk}s and - * support discarding of buffered chunks in order to re-buffer using a different selected track. - * Returns the number of chunks that should be retained in the queue. - *

      - * To avoid excessive re-buffering, implementations should normally return the size of the queue. - * An example of a case where a smaller value may be returned is if network conditions have - * improved dramatically, allowing chunks to be discarded and re-buffered in a track of - * significantly higher quality. Discarding chunks may allow faster switching to a higher quality - * track in this case. This method may only be called when the selection is enabled. - * - * @param playbackPositionUs The current playback position in microseconds. If playback of the - * period to which this track selection belongs has not yet started, the value will be the - * starting position in the period minus the duration of any media in previous periods still - * to be played. - * @param queue The queue of buffered {@link MediaChunk}s. Must not be modified. - * @return The number of chunks to retain in the queue. - */ - int evaluateQueueSize(long playbackPositionUs, List queue); - - /** - * Attempts to blacklist the track at the specified index in the selection, making it ineligible - * for selection by calls to {@link #updateSelectedTrack(long, long, long, List, - * MediaChunkIterator[])} for the specified period of time. Blacklisting will fail if all other - * tracks are currently blacklisted. If blacklisting the currently selected track, note that it - * will remain selected until the next call to {@link #updateSelectedTrack(long, long, long, List, - * MediaChunkIterator[])}. - * - *

      This method may only be called when the selection is enabled. - * - * @param index The index of the track in the selection. - * @param blacklistDurationMs The duration of time for which the track should be blacklisted, in - * milliseconds. - * @return Whether blacklisting was successful. - */ - boolean blacklist(int index, long blacklistDurationMs); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/TrackSelectionArray.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/TrackSelectionArray.java index fc20e863ba..0c02f68218 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/TrackSelectionArray.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/TrackSelectionArray.java @@ -30,7 +30,9 @@ public final class TrackSelectionArray { // Lazily initialized hashcode. private int hashCode; - /** @param trackSelections The selections. Must not be null, but may contain null elements. */ + /** + * @param trackSelections The selections. Must not be null, but may contain null elements. + */ public TrackSelectionArray(@NullableType TrackSelection... trackSelections) { this.trackSelections = trackSelections; this.length = trackSelections.length; @@ -73,5 +75,4 @@ public boolean equals(@Nullable Object obj) { TrackSelectionArray other = (TrackSelectionArray) obj; return Arrays.equals(trackSelections, other.trackSelections); } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/TrackSelectionOverride.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/TrackSelectionOverride.java new file mode 100644 index 0000000000..26acbdde9b --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/TrackSelectionOverride.java @@ -0,0 +1,125 @@ +/* + * Copyright (C) 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.trackselection; + +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static java.util.Collections.max; +import static java.util.Collections.min; + +import android.os.Bundle; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.Bundleable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.source.TrackGroup; +import com.google.android.exoplayer2.util.Util; +import com.google.common.collect.ImmutableList; +import com.google.common.primitives.Ints; +import java.util.List; + +/** + * A track selection override, consisting of a {@link TrackGroup} and the indices of the tracks + * within the group that should be selected. + * + *

      A track selection override is applied during playback if the media being played contains a + * {@link TrackGroup} equal to the one in the override. If a {@link TrackSelectionParameters} + * contains only one override of a given track type that applies to the media, this override will be + * used to control the track selection for that type. If multiple overrides of a given track type + * apply then the player will apply only one of them. + * + *

      If {@link #trackIndices} is empty then the override specifies that no tracks should be + * selected. Adding an empty override to a {@link TrackSelectionParameters} is similar to {@link + * TrackSelectionParameters.Builder#setTrackTypeDisabled disabling a track type}, except that an + * empty override will only be applied if the media being played contains a {@link TrackGroup} equal + * to the one in the override. Conversely, disabling a track type will prevent selection of tracks + * of that type for all media. + */ +public final class TrackSelectionOverride implements Bundleable { + + /** The media {@link TrackGroup} whose {@link #trackIndices} are forced to be selected. */ + public final TrackGroup mediaTrackGroup; + /** The indices of tracks in a {@link TrackGroup} to be selected. */ + public final ImmutableList trackIndices; + + private static final String FIELD_TRACK_GROUP = Util.intToStringMaxRadix(0); + private static final String FIELD_TRACKS = Util.intToStringMaxRadix(1); + + /** + * Constructs an instance to force {@code trackIndex} in {@code trackGroup} to be selected. + * + * @param mediaTrackGroup The media {@link TrackGroup} for which to override the track selection. + * @param trackIndex The index of the track in the {@link TrackGroup} to select. + */ + public TrackSelectionOverride(TrackGroup mediaTrackGroup, int trackIndex) { + this(mediaTrackGroup, ImmutableList.of(trackIndex)); + } + + /** + * Constructs an instance to force {@code trackIndices} in {@code trackGroup} to be selected. + * + * @param mediaTrackGroup The media {@link TrackGroup} for which to override the track selection. + * @param trackIndices The indices of the tracks in the {@link TrackGroup} to select. + */ + public TrackSelectionOverride(TrackGroup mediaTrackGroup, List trackIndices) { + if (!trackIndices.isEmpty()) { + if (min(trackIndices) < 0 || max(trackIndices) >= mediaTrackGroup.length) { + throw new IndexOutOfBoundsException(); + } + } + this.mediaTrackGroup = mediaTrackGroup; + this.trackIndices = ImmutableList.copyOf(trackIndices); + } + + /** Returns the {@link C.TrackType} of the overridden track group. */ + public @C.TrackType int getType() { + return mediaTrackGroup.type; + } + + @Override + public boolean equals(@Nullable Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + TrackSelectionOverride that = (TrackSelectionOverride) obj; + return mediaTrackGroup.equals(that.mediaTrackGroup) && trackIndices.equals(that.trackIndices); + } + + @Override + public int hashCode() { + return mediaTrackGroup.hashCode() + 31 * trackIndices.hashCode(); + } + + // Bundleable implementation + + @Override + public Bundle toBundle() { + Bundle bundle = new Bundle(); + bundle.putBundle(FIELD_TRACK_GROUP, mediaTrackGroup.toBundle()); + bundle.putIntArray(FIELD_TRACKS, Ints.toArray(trackIndices)); + return bundle; + } + + /** Object that can restore {@code TrackSelectionOverride} from a {@link Bundle}. */ + public static final Creator CREATOR = + bundle -> { + Bundle trackGroupBundle = checkNotNull(bundle.getBundle(FIELD_TRACK_GROUP)); + TrackGroup mediaTrackGroup = TrackGroup.CREATOR.fromBundle(trackGroupBundle); + int[] tracks = checkNotNull(bundle.getIntArray(FIELD_TRACKS)); + return new TrackSelectionOverride(mediaTrackGroup, Ints.asList(tracks)); + }; +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/TrackSelectionParameters.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/TrackSelectionParameters.java index 6e10171f08..bbcfde8d2a 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/TrackSelectionParameters.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/TrackSelectionParameters.java @@ -15,69 +15,463 @@ */ package com.google.android.exoplayer2.trackselection; -import android.annotation.TargetApi; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.BundleableUtil.toBundleArrayList; +import static com.google.common.base.MoreObjects.firstNonNull; + import android.content.Context; +import android.graphics.Point; +import android.os.Bundle; import android.os.Looper; -import android.os.Parcel; -import android.os.Parcelable; -import android.text.TextUtils; import android.view.accessibility.CaptioningManager; import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; +import com.google.android.exoplayer2.Bundleable; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.Player; +import com.google.android.exoplayer2.source.TrackGroup; +import com.google.android.exoplayer2.util.BundleableUtil; import com.google.android.exoplayer2.util.Util; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.google.common.primitives.Ints; +import com.google.errorprone.annotations.CanIgnoreReturnValue; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; import java.util.Locale; +import java.util.Set; +import org.checkerframework.checker.initialization.qual.UnknownInitialization; +import org.checkerframework.checker.nullness.qual.EnsuresNonNull; -/** Constraint parameters for track selection. */ -public class TrackSelectionParameters implements Parcelable { +/** + * Parameters for controlling track selection. + * + *

      Parameters can be queried and set on a {@link Player}. For example the following code modifies + * the parameters to restrict video track selections to SD, and to select a German audio track if + * there is one: + * + *

      {@code
      + * // Build on the current parameters.
      + * TrackSelectionParameters currentParameters = player.getTrackSelectionParameters()
      + * // Build the resulting parameters.
      + * TrackSelectionParameters newParameters = currentParameters
      + *     .buildUpon()
      + *     .setMaxVideoSizeSd()
      + *     .setPreferredAudioLanguage("deu")
      + *     .build();
      + * // Set the new parameters.
      + * player.setTrackSelectionParameters(newParameters);
      + * }
      + */ +public class TrackSelectionParameters implements Bundleable { /** * A builder for {@link TrackSelectionParameters}. See the {@link TrackSelectionParameters} * documentation for explanations of the parameters that can be configured using this builder. */ public static class Builder { + // Video + private int maxVideoWidth; + private int maxVideoHeight; + private int maxVideoFrameRate; + private int maxVideoBitrate; + private int minVideoWidth; + private int minVideoHeight; + private int minVideoFrameRate; + private int minVideoBitrate; + private int viewportWidth; + private int viewportHeight; + private boolean viewportOrientationMayChange; + private ImmutableList preferredVideoMimeTypes; + private @C.RoleFlags int preferredVideoRoleFlags; + // Audio + private ImmutableList preferredAudioLanguages; + private @C.RoleFlags int preferredAudioRoleFlags; + private int maxAudioChannelCount; + private int maxAudioBitrate; + private ImmutableList preferredAudioMimeTypes; + // Text + private ImmutableList preferredTextLanguages; + private @C.RoleFlags int preferredTextRoleFlags; + private @C.SelectionFlags int ignoredTextSelectionFlags; + private boolean selectUndeterminedTextLanguage; + // General + private boolean forceLowestBitrate; + private boolean forceHighestSupportedBitrate; + private HashMap overrides; + private HashSet<@C.TrackType Integer> disabledTrackTypes; - @Nullable /* package */ String preferredAudioLanguage; - @Nullable /* package */ String preferredTextLanguage; - @C.RoleFlags /* package */ int preferredTextRoleFlags; - /* package */ boolean selectUndeterminedTextLanguage; - @C.SelectionFlags /* package */ int disabledTextTrackSelectionFlags; + /** + * @deprecated {@link Context} constraints will not be set using this constructor. Use {@link + * #Builder(Context)} instead. + */ + @Deprecated + public Builder() { + // Video + maxVideoWidth = Integer.MAX_VALUE; + maxVideoHeight = Integer.MAX_VALUE; + maxVideoFrameRate = Integer.MAX_VALUE; + maxVideoBitrate = Integer.MAX_VALUE; + viewportWidth = Integer.MAX_VALUE; + viewportHeight = Integer.MAX_VALUE; + viewportOrientationMayChange = true; + preferredVideoMimeTypes = ImmutableList.of(); + preferredVideoRoleFlags = 0; + // Audio + preferredAudioLanguages = ImmutableList.of(); + preferredAudioRoleFlags = 0; + maxAudioChannelCount = Integer.MAX_VALUE; + maxAudioBitrate = Integer.MAX_VALUE; + preferredAudioMimeTypes = ImmutableList.of(); + // Text + preferredTextLanguages = ImmutableList.of(); + preferredTextRoleFlags = 0; + ignoredTextSelectionFlags = 0; + selectUndeterminedTextLanguage = false; + // General + forceLowestBitrate = false; + forceHighestSupportedBitrate = false; + overrides = new HashMap<>(); + disabledTrackTypes = new HashSet<>(); + } /** * Creates a builder with default initial values. * * @param context Any context. */ - @SuppressWarnings({"deprecation", "initialization:method.invocation.invalid"}) + @SuppressWarnings({"deprecation", "method.invocation"}) // Methods invoked are setter only. public Builder(Context context) { this(); setPreferredTextLanguageAndRoleFlagsToCaptioningManagerSettings(context); + setViewportSizeToPhysicalDisplaySize(context, /* viewportOrientationMayChange= */ true); } + /** Creates a builder with the initial values specified in {@code initialValues}. */ + protected Builder(TrackSelectionParameters initialValues) { + init(initialValues); + } + + /** Creates a builder with the initial values specified in {@code bundle}. */ + protected Builder(Bundle bundle) { + // Video + maxVideoWidth = bundle.getInt(FIELD_MAX_VIDEO_WIDTH, DEFAULT_WITHOUT_CONTEXT.maxVideoWidth); + maxVideoHeight = + bundle.getInt(FIELD_MAX_VIDEO_HEIGHT, DEFAULT_WITHOUT_CONTEXT.maxVideoHeight); + maxVideoFrameRate = + bundle.getInt(FIELD_MAX_VIDEO_FRAMERATE, DEFAULT_WITHOUT_CONTEXT.maxVideoFrameRate); + maxVideoBitrate = + bundle.getInt(FIELD_MAX_VIDEO_BITRATE, DEFAULT_WITHOUT_CONTEXT.maxVideoBitrate); + minVideoWidth = bundle.getInt(FIELD_MIN_VIDEO_WIDTH, DEFAULT_WITHOUT_CONTEXT.minVideoWidth); + minVideoHeight = + bundle.getInt(FIELD_MIN_VIDEO_HEIGHT, DEFAULT_WITHOUT_CONTEXT.minVideoHeight); + minVideoFrameRate = + bundle.getInt(FIELD_MIN_VIDEO_FRAMERATE, DEFAULT_WITHOUT_CONTEXT.minVideoFrameRate); + minVideoBitrate = + bundle.getInt(FIELD_MIN_VIDEO_BITRATE, DEFAULT_WITHOUT_CONTEXT.minVideoBitrate); + viewportWidth = bundle.getInt(FIELD_VIEWPORT_WIDTH, DEFAULT_WITHOUT_CONTEXT.viewportWidth); + viewportHeight = bundle.getInt(FIELD_VIEWPORT_HEIGHT, DEFAULT_WITHOUT_CONTEXT.viewportHeight); + viewportOrientationMayChange = + bundle.getBoolean( + FIELD_VIEWPORT_ORIENTATION_MAY_CHANGE, + DEFAULT_WITHOUT_CONTEXT.viewportOrientationMayChange); + preferredVideoMimeTypes = + ImmutableList.copyOf( + firstNonNull(bundle.getStringArray(FIELD_PREFERRED_VIDEO_MIMETYPES), new String[0])); + preferredVideoRoleFlags = + bundle.getInt( + FIELD_PREFERRED_VIDEO_ROLE_FLAGS, DEFAULT_WITHOUT_CONTEXT.preferredVideoRoleFlags); + // Audio + String[] preferredAudioLanguages1 = + firstNonNull(bundle.getStringArray(FIELD_PREFERRED_AUDIO_LANGUAGES), new String[0]); + preferredAudioLanguages = normalizeLanguageCodes(preferredAudioLanguages1); + preferredAudioRoleFlags = + bundle.getInt( + FIELD_PREFERRED_AUDIO_ROLE_FLAGS, DEFAULT_WITHOUT_CONTEXT.preferredAudioRoleFlags); + maxAudioChannelCount = + bundle.getInt( + FIELD_MAX_AUDIO_CHANNEL_COUNT, DEFAULT_WITHOUT_CONTEXT.maxAudioChannelCount); + maxAudioBitrate = + bundle.getInt(FIELD_MAX_AUDIO_BITRATE, DEFAULT_WITHOUT_CONTEXT.maxAudioBitrate); + preferredAudioMimeTypes = + ImmutableList.copyOf( + firstNonNull(bundle.getStringArray(FIELD_PREFERRED_AUDIO_MIME_TYPES), new String[0])); + // Text + preferredTextLanguages = + normalizeLanguageCodes( + firstNonNull(bundle.getStringArray(FIELD_PREFERRED_TEXT_LANGUAGES), new String[0])); + preferredTextRoleFlags = + bundle.getInt( + FIELD_PREFERRED_TEXT_ROLE_FLAGS, DEFAULT_WITHOUT_CONTEXT.preferredTextRoleFlags); + ignoredTextSelectionFlags = + bundle.getInt( + FIELD_IGNORED_TEXT_SELECTION_FLAGS, + DEFAULT_WITHOUT_CONTEXT.ignoredTextSelectionFlags); + selectUndeterminedTextLanguage = + bundle.getBoolean( + FIELD_SELECT_UNDETERMINED_TEXT_LANGUAGE, + DEFAULT_WITHOUT_CONTEXT.selectUndeterminedTextLanguage); + // General + forceLowestBitrate = + bundle.getBoolean(FIELD_FORCE_LOWEST_BITRATE, DEFAULT_WITHOUT_CONTEXT.forceLowestBitrate); + forceHighestSupportedBitrate = + bundle.getBoolean( + FIELD_FORCE_HIGHEST_SUPPORTED_BITRATE, + DEFAULT_WITHOUT_CONTEXT.forceHighestSupportedBitrate); + @Nullable + List overrideBundleList = bundle.getParcelableArrayList(FIELD_SELECTION_OVERRIDES); + List overrideList = + overrideBundleList == null + ? ImmutableList.of() + : BundleableUtil.fromBundleList(TrackSelectionOverride.CREATOR, overrideBundleList); + overrides = new HashMap<>(); + for (int i = 0; i < overrideList.size(); i++) { + TrackSelectionOverride override = overrideList.get(i); + overrides.put(override.mediaTrackGroup, override); + } + int[] disabledTrackTypeArray = + firstNonNull(bundle.getIntArray(FIELD_DISABLED_TRACK_TYPE), new int[0]); + disabledTrackTypes = new HashSet<>(); + for (@C.TrackType int disabledTrackType : disabledTrackTypeArray) { + disabledTrackTypes.add(disabledTrackType); + } + } + + /** Overrides the value of the builder with the value of {@link TrackSelectionParameters}. */ + @EnsuresNonNull({ + "preferredVideoMimeTypes", + "preferredAudioLanguages", + "preferredAudioMimeTypes", + "preferredTextLanguages", + "overrides", + "disabledTrackTypes", + }) + private void init(@UnknownInitialization Builder this, TrackSelectionParameters parameters) { + // Video + maxVideoWidth = parameters.maxVideoWidth; + maxVideoHeight = parameters.maxVideoHeight; + maxVideoFrameRate = parameters.maxVideoFrameRate; + maxVideoBitrate = parameters.maxVideoBitrate; + minVideoWidth = parameters.minVideoWidth; + minVideoHeight = parameters.minVideoHeight; + minVideoFrameRate = parameters.minVideoFrameRate; + minVideoBitrate = parameters.minVideoBitrate; + viewportWidth = parameters.viewportWidth; + viewportHeight = parameters.viewportHeight; + viewportOrientationMayChange = parameters.viewportOrientationMayChange; + preferredVideoMimeTypes = parameters.preferredVideoMimeTypes; + preferredVideoRoleFlags = parameters.preferredVideoRoleFlags; + // Audio + preferredAudioLanguages = parameters.preferredAudioLanguages; + preferredAudioRoleFlags = parameters.preferredAudioRoleFlags; + maxAudioChannelCount = parameters.maxAudioChannelCount; + maxAudioBitrate = parameters.maxAudioBitrate; + preferredAudioMimeTypes = parameters.preferredAudioMimeTypes; + // Text + preferredTextLanguages = parameters.preferredTextLanguages; + preferredTextRoleFlags = parameters.preferredTextRoleFlags; + ignoredTextSelectionFlags = parameters.ignoredTextSelectionFlags; + selectUndeterminedTextLanguage = parameters.selectUndeterminedTextLanguage; + // General + forceLowestBitrate = parameters.forceLowestBitrate; + forceHighestSupportedBitrate = parameters.forceHighestSupportedBitrate; + disabledTrackTypes = new HashSet<>(parameters.disabledTrackTypes); + overrides = new HashMap<>(parameters.overrides); + } + + /** Overrides the value of the builder with the value of {@link TrackSelectionParameters}. */ + @CanIgnoreReturnValue + protected Builder set(TrackSelectionParameters parameters) { + init(parameters); + return this; + } + + // Video + /** - * @deprecated {@link Context} constraints will not be set when using this constructor. Use - * {@link #Builder(Context)} instead. + * Equivalent to {@link #setMaxVideoSize setMaxVideoSize(1279, 719)}. + * + * @return This builder. */ - @Deprecated - public Builder() { - preferredAudioLanguage = null; - preferredTextLanguage = null; - preferredTextRoleFlags = 0; - selectUndeterminedTextLanguage = false; - disabledTextTrackSelectionFlags = 0; + @CanIgnoreReturnValue + public Builder setMaxVideoSizeSd() { + return setMaxVideoSize(1279, 719); + } + + /** + * Equivalent to {@link #setMaxVideoSize setMaxVideoSize(Integer.MAX_VALUE, Integer.MAX_VALUE)}. + * + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder clearVideoSizeConstraints() { + return setMaxVideoSize(Integer.MAX_VALUE, Integer.MAX_VALUE); + } + + /** + * Sets the maximum allowed video width and height. + * + * @param maxVideoWidth Maximum allowed video width in pixels. + * @param maxVideoHeight Maximum allowed video height in pixels. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setMaxVideoSize(int maxVideoWidth, int maxVideoHeight) { + this.maxVideoWidth = maxVideoWidth; + this.maxVideoHeight = maxVideoHeight; + return this; + } + + /** + * Sets the maximum allowed video frame rate. + * + * @param maxVideoFrameRate Maximum allowed video frame rate in hertz. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setMaxVideoFrameRate(int maxVideoFrameRate) { + this.maxVideoFrameRate = maxVideoFrameRate; + return this; + } + + /** + * Sets the maximum allowed video bitrate. + * + * @param maxVideoBitrate Maximum allowed video bitrate in bits per second. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setMaxVideoBitrate(int maxVideoBitrate) { + this.maxVideoBitrate = maxVideoBitrate; + return this; + } + + /** + * Sets the minimum allowed video width and height. + * + * @param minVideoWidth Minimum allowed video width in pixels. + * @param minVideoHeight Minimum allowed video height in pixels. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setMinVideoSize(int minVideoWidth, int minVideoHeight) { + this.minVideoWidth = minVideoWidth; + this.minVideoHeight = minVideoHeight; + return this; + } + + /** + * Sets the minimum allowed video frame rate. + * + * @param minVideoFrameRate Minimum allowed video frame rate in hertz. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setMinVideoFrameRate(int minVideoFrameRate) { + this.minVideoFrameRate = minVideoFrameRate; + return this; + } + + /** + * Sets the minimum allowed video bitrate. + * + * @param minVideoBitrate Minimum allowed video bitrate in bits per second. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setMinVideoBitrate(int minVideoBitrate) { + this.minVideoBitrate = minVideoBitrate; + return this; } /** - * @param initialValues The {@link TrackSelectionParameters} from which the initial values of - * the builder are obtained. + * Equivalent to calling {@link #setViewportSize(int, int, boolean)} with the viewport size + * obtained from {@link Util#getCurrentDisplayModeSize(Context)}. + * + * @param context Any context. + * @param viewportOrientationMayChange Whether the viewport orientation may change during + * playback. + * @return This builder. */ - /* package */ Builder(TrackSelectionParameters initialValues) { - preferredAudioLanguage = initialValues.preferredAudioLanguage; - preferredTextLanguage = initialValues.preferredTextLanguage; - preferredTextRoleFlags = initialValues.preferredTextRoleFlags; - selectUndeterminedTextLanguage = initialValues.selectUndeterminedTextLanguage; - disabledTextTrackSelectionFlags = initialValues.disabledTextTrackSelectionFlags; + @CanIgnoreReturnValue + public Builder setViewportSizeToPhysicalDisplaySize( + Context context, boolean viewportOrientationMayChange) { + // Assume the viewport is fullscreen. + Point viewportSize = Util.getCurrentDisplayModeSize(context); + return setViewportSize(viewportSize.x, viewportSize.y, viewportOrientationMayChange); } + /** + * Equivalent to {@link #setViewportSize setViewportSize(Integer.MAX_VALUE, Integer.MAX_VALUE, + * true)}. + * + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder clearViewportSizeConstraints() { + return setViewportSize(Integer.MAX_VALUE, Integer.MAX_VALUE, true); + } + + /** + * Sets the viewport size to constrain adaptive video selections so that only tracks suitable + * for the viewport are selected. + * + * @param viewportWidth Viewport width in pixels. + * @param viewportHeight Viewport height in pixels. + * @param viewportOrientationMayChange Whether the viewport orientation may change during + * playback. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setViewportSize( + int viewportWidth, int viewportHeight, boolean viewportOrientationMayChange) { + this.viewportWidth = viewportWidth; + this.viewportHeight = viewportHeight; + this.viewportOrientationMayChange = viewportOrientationMayChange; + return this; + } + + /** + * Sets the preferred sample MIME type for video tracks. + * + * @param mimeType The preferred MIME type for video tracks, or {@code null} to clear a + * previously set preference. + * @return This builder. + */ + public Builder setPreferredVideoMimeType(@Nullable String mimeType) { + return mimeType == null ? setPreferredVideoMimeTypes() : setPreferredVideoMimeTypes(mimeType); + } + + /** + * Sets the preferred sample MIME types for video tracks. + * + * @param mimeTypes The preferred MIME types for video tracks in order of preference, or an + * empty list for no preference. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setPreferredVideoMimeTypes(String... mimeTypes) { + preferredVideoMimeTypes = ImmutableList.copyOf(mimeTypes); + return this; + } + + /** + * Sets the preferred {@link C.RoleFlags} for video tracks. + * + * @param preferredVideoRoleFlags Preferred video role flags. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setPreferredVideoRoleFlags(@C.RoleFlags int preferredVideoRoleFlags) { + this.preferredVideoRoleFlags = preferredVideoRoleFlags; + return this; + } + + // Audio + /** * Sets the preferred language for audio and forced text tracks. * @@ -86,10 +480,87 @@ public Builder() { * @return This builder. */ public Builder setPreferredAudioLanguage(@Nullable String preferredAudioLanguage) { - this.preferredAudioLanguage = preferredAudioLanguage; + return preferredAudioLanguage == null + ? setPreferredAudioLanguages() + : setPreferredAudioLanguages(preferredAudioLanguage); + } + + /** + * Sets the preferred languages for audio and forced text tracks. + * + * @param preferredAudioLanguages Preferred audio languages as IETF BCP 47 conformant tags in + * order of preference, or an empty array to select the default track, or the first track if + * there's no default. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setPreferredAudioLanguages(String... preferredAudioLanguages) { + this.preferredAudioLanguages = normalizeLanguageCodes(preferredAudioLanguages); + return this; + } + + /** + * Sets the preferred {@link C.RoleFlags} for audio tracks. + * + * @param preferredAudioRoleFlags Preferred audio role flags. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setPreferredAudioRoleFlags(@C.RoleFlags int preferredAudioRoleFlags) { + this.preferredAudioRoleFlags = preferredAudioRoleFlags; + return this; + } + + /** + * Sets the maximum allowed audio channel count. + * + * @param maxAudioChannelCount Maximum allowed audio channel count. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setMaxAudioChannelCount(int maxAudioChannelCount) { + this.maxAudioChannelCount = maxAudioChannelCount; + return this; + } + + /** + * Sets the maximum allowed audio bitrate. + * + * @param maxAudioBitrate Maximum allowed audio bitrate in bits per second. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setMaxAudioBitrate(int maxAudioBitrate) { + this.maxAudioBitrate = maxAudioBitrate; return this; } + /** + * Sets the preferred sample MIME type for audio tracks. + * + * @param mimeType The preferred MIME type for audio tracks, or {@code null} to clear a + * previously set preference. + * @return This builder. + */ + public Builder setPreferredAudioMimeType(@Nullable String mimeType) { + return mimeType == null ? setPreferredAudioMimeTypes() : setPreferredAudioMimeTypes(mimeType); + } + + /** + * Sets the preferred sample MIME types for audio tracks. + * + * @param mimeTypes The preferred MIME types for audio tracks in order of preference, or an + * empty list for no preference. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setPreferredAudioMimeTypes(String... mimeTypes) { + preferredAudioMimeTypes = ImmutableList.copyOf(mimeTypes); + return this; + } + + // Text + /** * Sets the preferred language and role flags for text tracks based on the accessibility * settings of {@link CaptioningManager}. @@ -99,6 +570,7 @@ public Builder setPreferredAudioLanguage(@Nullable String preferredAudioLanguage * @param context A {@link Context}. * @return This builder. */ + @CanIgnoreReturnValue public Builder setPreferredTextLanguageAndRoleFlagsToCaptioningManagerSettings( Context context) { if (Util.SDK_INT >= 19) { @@ -115,7 +587,22 @@ public Builder setPreferredTextLanguageAndRoleFlagsToCaptioningManagerSettings( * @return This builder. */ public Builder setPreferredTextLanguage(@Nullable String preferredTextLanguage) { - this.preferredTextLanguage = preferredTextLanguage; + return preferredTextLanguage == null + ? setPreferredTextLanguages() + : setPreferredTextLanguages(preferredTextLanguage); + } + + /** + * Sets the preferred languages for text tracks. + * + * @param preferredTextLanguages Preferred text languages as IETF BCP 47 conformant tags in + * order of preference, or an empty array to select the default track if there is one, or no + * track otherwise. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setPreferredTextLanguages(String... preferredTextLanguages) { + this.preferredTextLanguages = normalizeLanguageCodes(preferredTextLanguages); return this; } @@ -125,51 +612,152 @@ public Builder setPreferredTextLanguage(@Nullable String preferredTextLanguage) * @param preferredTextRoleFlags Preferred text role flags. * @return This builder. */ + @CanIgnoreReturnValue public Builder setPreferredTextRoleFlags(@C.RoleFlags int preferredTextRoleFlags) { this.preferredTextRoleFlags = preferredTextRoleFlags; return this; } + /** + * Sets a bitmask of selection flags that are ignored for text track selections. + * + * @param ignoredTextSelectionFlags A bitmask of {@link C.SelectionFlags} that are ignored for + * text track selections. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setIgnoredTextSelectionFlags(@C.SelectionFlags int ignoredTextSelectionFlags) { + this.ignoredTextSelectionFlags = ignoredTextSelectionFlags; + return this; + } + /** * Sets whether a text track with undetermined language should be selected if no track with - * {@link #setPreferredTextLanguage(String)} is available, or if the preferred language is - * unset. + * {@link #setPreferredTextLanguages(String...) a preferred language} is available, or if the + * preferred language is unset. * * @param selectUndeterminedTextLanguage Whether a text track with undetermined language should * be selected if no preferred language track is available. * @return This builder. */ + @CanIgnoreReturnValue public Builder setSelectUndeterminedTextLanguage(boolean selectUndeterminedTextLanguage) { this.selectUndeterminedTextLanguage = selectUndeterminedTextLanguage; return this; } + // General + + /** + * Sets whether to force selection of the single lowest bitrate audio and video tracks that + * comply with all other constraints. + * + * @param forceLowestBitrate Whether to force selection of the single lowest bitrate audio and + * video tracks. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setForceLowestBitrate(boolean forceLowestBitrate) { + this.forceLowestBitrate = forceLowestBitrate; + return this; + } + + /** + * Sets whether to force selection of the highest bitrate audio and video tracks that comply + * with all other constraints. + * + * @param forceHighestSupportedBitrate Whether to force selection of the highest bitrate audio + * and video tracks. + * @return This builder. + */ + @CanIgnoreReturnValue + public Builder setForceHighestSupportedBitrate(boolean forceHighestSupportedBitrate) { + this.forceHighestSupportedBitrate = forceHighestSupportedBitrate; + return this; + } + + /** Adds an override, replacing any override for the same {@link TrackGroup}. */ + @CanIgnoreReturnValue + public Builder addOverride(TrackSelectionOverride override) { + overrides.put(override.mediaTrackGroup, override); + return this; + } + + /** Sets an override, replacing all existing overrides with the same track type. */ + @CanIgnoreReturnValue + public Builder setOverrideForType(TrackSelectionOverride override) { + clearOverridesOfType(override.getType()); + overrides.put(override.mediaTrackGroup, override); + return this; + } + + /** Removes the override for the provided media {@link TrackGroup}, if there is one. */ + @CanIgnoreReturnValue + public Builder clearOverride(TrackGroup mediaTrackGroup) { + overrides.remove(mediaTrackGroup); + return this; + } + + /** Removes all overrides of the provided track type. */ + @CanIgnoreReturnValue + public Builder clearOverridesOfType(@C.TrackType int trackType) { + Iterator it = overrides.values().iterator(); + while (it.hasNext()) { + TrackSelectionOverride override = it.next(); + if (override.getType() == trackType) { + it.remove(); + } + } + return this; + } + + /** Removes all overrides. */ + @CanIgnoreReturnValue + public Builder clearOverrides() { + overrides.clear(); + return this; + } + + /** + * Sets the disabled track types, preventing all tracks of those types from being selected for + * playback. Any previously disabled track types are cleared. + * + * @param disabledTrackTypes The track types to disable. + * @return This builder. + * @deprecated Use {@link #setTrackTypeDisabled(int, boolean)}. + */ + @CanIgnoreReturnValue + @Deprecated + public Builder setDisabledTrackTypes(Set<@C.TrackType Integer> disabledTrackTypes) { + this.disabledTrackTypes.clear(); + this.disabledTrackTypes.addAll(disabledTrackTypes); + return this; + } + /** - * Sets a bitmask of selection flags that are disabled for text track selections. + * Sets whether a track type is disabled. If disabled, no tracks of the specified type will be + * selected for playback. * - * @param disabledTextTrackSelectionFlags A bitmask of {@link C.SelectionFlags} that are - * disabled for text track selections. + * @param trackType The track type. + * @param disabled Whether the track type should be disabled. * @return This builder. */ - public Builder setDisabledTextTrackSelectionFlags( - @C.SelectionFlags int disabledTextTrackSelectionFlags) { - this.disabledTextTrackSelectionFlags = disabledTextTrackSelectionFlags; + @CanIgnoreReturnValue + public Builder setTrackTypeDisabled(@C.TrackType int trackType, boolean disabled) { + if (disabled) { + disabledTrackTypes.add(trackType); + } else { + disabledTrackTypes.remove(trackType); + } return this; } /** Builds a {@link TrackSelectionParameters} instance with the selected values. */ public TrackSelectionParameters build() { - return new TrackSelectionParameters( - // Audio - preferredAudioLanguage, - // Text - preferredTextLanguage, - preferredTextRoleFlags, - selectUndeterminedTextLanguage, - disabledTextTrackSelectionFlags); + return new TrackSelectionParameters(this); } - @TargetApi(19) + @RequiresApi(19) private void setPreferredTextLanguageAndRoleFlagsToCaptioningManagerSettingsV19( Context context) { if (Util.SDK_INT < 23 && Looper.myLooper() == null) { @@ -185,8 +773,16 @@ private void setPreferredTextLanguageAndRoleFlagsToCaptioningManagerSettingsV19( preferredTextRoleFlags = C.ROLE_FLAG_CAPTION | C.ROLE_FLAG_DESCRIBES_MUSIC_AND_SOUND; Locale preferredLocale = captioningManager.getLocale(); if (preferredLocale != null) { - preferredTextLanguage = Util.getLocaleLanguageTag(preferredLocale); + preferredTextLanguages = ImmutableList.of(Util.getLocaleLanguageTag(preferredLocale)); + } + } + + private static ImmutableList normalizeLanguageCodes(String[] preferredTextLanguages) { + ImmutableList.Builder listBuilder = ImmutableList.builder(); + for (String language : checkNotNull(preferredTextLanguages)) { + listBuilder.add(Util.normalizeLanguageCode(checkNotNull(language))); } + return listBuilder.build(); } } @@ -198,6 +794,8 @@ private void setPreferredTextLanguageAndRoleFlagsToCaptioningManagerSettingsV19( *

      This instance will not have the following settings: * *

        + *
      • {@link Builder#setViewportSizeToPhysicalDisplaySize(Context, boolean) Viewport + * constraints} configured for the primary display. *
      • {@link Builder#setPreferredTextLanguageAndRoleFlagsToCaptioningManagerSettings(Context) * Preferred text language and role flags} configured to the accessibility settings of * {@link CaptioningManager}. @@ -205,7 +803,6 @@ private void setPreferredTextLanguageAndRoleFlagsToCaptioningManagerSettingsV19( */ @SuppressWarnings("deprecation") public static final TrackSelectionParameters DEFAULT_WITHOUT_CONTEXT = new Builder().build(); - /** * @deprecated This instance is not configured using {@link Context} constraints. Use {@link * #getDefaults(Context)} instead. @@ -217,58 +814,179 @@ public static TrackSelectionParameters getDefaults(Context context) { return new Builder(context).build(); } + // Video + /** + * Maximum allowed video width in pixels. The default value is {@link Integer#MAX_VALUE} (i.e. no + * constraint). + * + *

        To constrain adaptive video track selections to be suitable for a given viewport (the region + * of the display within which video will be played), use ({@link #viewportWidth}, {@link + * #viewportHeight} and {@link #viewportOrientationMayChange}) instead. + */ + public final int maxVideoWidth; + /** + * Maximum allowed video height in pixels. The default value is {@link Integer#MAX_VALUE} (i.e. no + * constraint). + * + *

        To constrain adaptive video track selections to be suitable for a given viewport (the region + * of the display within which video will be played), use ({@link #viewportWidth}, {@link + * #viewportHeight} and {@link #viewportOrientationMayChange}) instead. + */ + public final int maxVideoHeight; + /** + * Maximum allowed video frame rate in hertz. The default value is {@link Integer#MAX_VALUE} (i.e. + * no constraint). + */ + public final int maxVideoFrameRate; /** - * The preferred language for audio and forced text tracks as an IETF BCP 47 conformant tag. - * {@code null} selects the default track, or the first track if there's no default. The default - * value is {@code null}. + * Maximum allowed video bitrate in bits per second. The default value is {@link + * Integer#MAX_VALUE} (i.e. no constraint). */ - @Nullable public final String preferredAudioLanguage; + public final int maxVideoBitrate; + /** Minimum allowed video width in pixels. The default value is 0 (i.e. no constraint). */ + public final int minVideoWidth; + /** Minimum allowed video height in pixels. The default value is 0 (i.e. no constraint). */ + public final int minVideoHeight; + /** Minimum allowed video frame rate in hertz. The default value is 0 (i.e. no constraint). */ + public final int minVideoFrameRate; /** - * The preferred language for text tracks as an IETF BCP 47 conformant tag. {@code null} selects - * the default track if there is one, or no track otherwise. The default value is {@code null}, or - * the language of the accessibility {@link CaptioningManager} if enabled. + * Minimum allowed video bitrate in bits per second. The default value is 0 (i.e. no constraint). */ - @Nullable public final String preferredTextLanguage; + public final int minVideoBitrate; + /** + * Viewport width in pixels. Constrains video track selections for adaptive content so that only + * tracks suitable for the viewport are selected. The default value is the physical width of the + * primary display, in pixels. + */ + public final int viewportWidth; + /** + * Viewport height in pixels. Constrains video track selections for adaptive content so that only + * tracks suitable for the viewport are selected. The default value is the physical height of the + * primary display, in pixels. + */ + public final int viewportHeight; + /** + * Whether the viewport orientation may change during playback. Constrains video track selections + * for adaptive content so that only tracks suitable for the viewport are selected. The default + * value is {@code true}. + */ + public final boolean viewportOrientationMayChange; + /** + * The preferred sample MIME types for video tracks in order of preference, or an empty list for + * no preference. The default is an empty list. + */ + public final ImmutableList preferredVideoMimeTypes; + /** + * The preferred {@link C.RoleFlags} for video tracks. {@code 0} selects the default track if + * there is one, or the first track if there's no default. The default value is {@code 0}. + */ + public final @C.RoleFlags int preferredVideoRoleFlags; + // Audio + /** + * The preferred languages for audio and forced text tracks as IETF BCP 47 conformant tags in + * order of preference. An empty list selects the default track, or the first track if there's no + * default. The default value is an empty list. + */ + public final ImmutableList preferredAudioLanguages; + /** + * The preferred {@link C.RoleFlags} for audio tracks. {@code 0} selects the default track if + * there is one, or the first track if there's no default. The default value is {@code 0}. + */ + public final @C.RoleFlags int preferredAudioRoleFlags; + /** + * Maximum allowed audio channel count. The default value is {@link Integer#MAX_VALUE} (i.e. no + * constraint). + */ + public final int maxAudioChannelCount; + /** + * Maximum allowed audio bitrate in bits per second. The default value is {@link + * Integer#MAX_VALUE} (i.e. no constraint). + */ + public final int maxAudioBitrate; + /** + * The preferred sample MIME types for audio tracks in order of preference, or an empty list for + * no preference. The default is an empty list. + */ + public final ImmutableList preferredAudioMimeTypes; + // Text + /** + * The preferred languages for text tracks as IETF BCP 47 conformant tags in order of preference. + * An empty list selects the default track if there is one, or no track otherwise. The default + * value is an empty list, or the language of the accessibility {@link CaptioningManager} if + * enabled. + */ + public final ImmutableList preferredTextLanguages; /** * The preferred {@link C.RoleFlags} for text tracks. {@code 0} selects the default track if there * is one, or no track otherwise. The default value is {@code 0}, or {@link C#ROLE_FLAG_SUBTITLE} * | {@link C#ROLE_FLAG_DESCRIBES_MUSIC_AND_SOUND} if the accessibility {@link CaptioningManager} * is enabled. */ - @C.RoleFlags public final int preferredTextRoleFlags; + public final @C.RoleFlags int preferredTextRoleFlags; + /** + * Bitmask of selection flags that are ignored for text track selections. See {@link + * C.SelectionFlags}. The default value is {@code 0} (i.e., no flags are ignored). + */ + public final @C.SelectionFlags int ignoredTextSelectionFlags; /** * Whether a text track with undetermined language should be selected if no track with {@link - * #preferredTextLanguage} is available, or if {@link #preferredTextLanguage} is unset. The + * #preferredTextLanguages} is available, or if {@link #preferredTextLanguages} is unset. The * default value is {@code false}. */ public final boolean selectUndeterminedTextLanguage; + // General + /** + * Whether to force selection of the single lowest bitrate audio and video tracks that comply with + * all other constraints. The default value is {@code false}. + */ + public final boolean forceLowestBitrate; /** - * Bitmask of selection flags that are disabled for text track selections. See {@link - * C.SelectionFlags}. The default value is {@code 0} (i.e. no flags). + * Whether to force selection of the highest bitrate audio and video tracks that comply with all + * other constraints. The default value is {@code false}. */ - @C.SelectionFlags public final int disabledTextTrackSelectionFlags; - - /* package */ TrackSelectionParameters( - @Nullable String preferredAudioLanguage, - @Nullable String preferredTextLanguage, - @C.RoleFlags int preferredTextRoleFlags, - boolean selectUndeterminedTextLanguage, - @C.SelectionFlags int disabledTextTrackSelectionFlags) { + public final boolean forceHighestSupportedBitrate; + + /** Overrides to force selection of specific tracks. */ + public final ImmutableMap overrides; + + /** + * The track types that are disabled. No track of a disabled type will be selected, thus no track + * type contained in the set will be played. The default value is that no track type is disabled + * (empty set). + */ + public final ImmutableSet<@C.TrackType Integer> disabledTrackTypes; + + protected TrackSelectionParameters(Builder builder) { + // Video + this.maxVideoWidth = builder.maxVideoWidth; + this.maxVideoHeight = builder.maxVideoHeight; + this.maxVideoFrameRate = builder.maxVideoFrameRate; + this.maxVideoBitrate = builder.maxVideoBitrate; + this.minVideoWidth = builder.minVideoWidth; + this.minVideoHeight = builder.minVideoHeight; + this.minVideoFrameRate = builder.minVideoFrameRate; + this.minVideoBitrate = builder.minVideoBitrate; + this.viewportWidth = builder.viewportWidth; + this.viewportHeight = builder.viewportHeight; + this.viewportOrientationMayChange = builder.viewportOrientationMayChange; + this.preferredVideoMimeTypes = builder.preferredVideoMimeTypes; + this.preferredVideoRoleFlags = builder.preferredVideoRoleFlags; // Audio - this.preferredAudioLanguage = Util.normalizeLanguageCode(preferredAudioLanguage); + this.preferredAudioLanguages = builder.preferredAudioLanguages; + this.preferredAudioRoleFlags = builder.preferredAudioRoleFlags; + this.maxAudioChannelCount = builder.maxAudioChannelCount; + this.maxAudioBitrate = builder.maxAudioBitrate; + this.preferredAudioMimeTypes = builder.preferredAudioMimeTypes; // Text - this.preferredTextLanguage = Util.normalizeLanguageCode(preferredTextLanguage); - this.preferredTextRoleFlags = preferredTextRoleFlags; - this.selectUndeterminedTextLanguage = selectUndeterminedTextLanguage; - this.disabledTextTrackSelectionFlags = disabledTextTrackSelectionFlags; - } - - /* package */ TrackSelectionParameters(Parcel in) { - this.preferredAudioLanguage = in.readString(); - this.preferredTextLanguage = in.readString(); - this.preferredTextRoleFlags = in.readInt(); - this.selectUndeterminedTextLanguage = Util.readBoolean(in); - this.disabledTextTrackSelectionFlags = in.readInt(); + this.preferredTextLanguages = builder.preferredTextLanguages; + this.preferredTextRoleFlags = builder.preferredTextRoleFlags; + this.ignoredTextSelectionFlags = builder.ignoredTextSelectionFlags; + this.selectUndeterminedTextLanguage = builder.selectUndeterminedTextLanguage; + // General + this.forceLowestBitrate = builder.forceLowestBitrate; + this.forceHighestSupportedBitrate = builder.forceHighestSupportedBitrate; + this.overrides = ImmutableMap.copyOf(builder.overrides); + this.disabledTrackTypes = ImmutableSet.copyOf(builder.disabledTrackTypes); } /** Creates a new {@link Builder}, copying the initial values from this instance. */ @@ -286,51 +1004,164 @@ public boolean equals(@Nullable Object obj) { return false; } TrackSelectionParameters other = (TrackSelectionParameters) obj; - return TextUtils.equals(preferredAudioLanguage, other.preferredAudioLanguage) - && TextUtils.equals(preferredTextLanguage, other.preferredTextLanguage) + // Video + return maxVideoWidth == other.maxVideoWidth + && maxVideoHeight == other.maxVideoHeight + && maxVideoFrameRate == other.maxVideoFrameRate + && maxVideoBitrate == other.maxVideoBitrate + && minVideoWidth == other.minVideoWidth + && minVideoHeight == other.minVideoHeight + && minVideoFrameRate == other.minVideoFrameRate + && minVideoBitrate == other.minVideoBitrate + && viewportOrientationMayChange == other.viewportOrientationMayChange + && viewportWidth == other.viewportWidth + && viewportHeight == other.viewportHeight + && preferredVideoMimeTypes.equals(other.preferredVideoMimeTypes) + && preferredVideoRoleFlags == other.preferredVideoRoleFlags + // Audio + && preferredAudioLanguages.equals(other.preferredAudioLanguages) + && preferredAudioRoleFlags == other.preferredAudioRoleFlags + && maxAudioChannelCount == other.maxAudioChannelCount + && maxAudioBitrate == other.maxAudioBitrate + && preferredAudioMimeTypes.equals(other.preferredAudioMimeTypes) + // Text + && preferredTextLanguages.equals(other.preferredTextLanguages) && preferredTextRoleFlags == other.preferredTextRoleFlags + && ignoredTextSelectionFlags == other.ignoredTextSelectionFlags && selectUndeterminedTextLanguage == other.selectUndeterminedTextLanguage - && disabledTextTrackSelectionFlags == other.disabledTextTrackSelectionFlags; + // General + && forceLowestBitrate == other.forceLowestBitrate + && forceHighestSupportedBitrate == other.forceHighestSupportedBitrate + && overrides.equals(other.overrides) + && disabledTrackTypes.equals(other.disabledTrackTypes); } @Override public int hashCode() { int result = 1; - result = 31 * result + (preferredAudioLanguage == null ? 0 : preferredAudioLanguage.hashCode()); - result = 31 * result + (preferredTextLanguage == null ? 0 : preferredTextLanguage.hashCode()); + // Video + result = 31 * result + maxVideoWidth; + result = 31 * result + maxVideoHeight; + result = 31 * result + maxVideoFrameRate; + result = 31 * result + maxVideoBitrate; + result = 31 * result + minVideoWidth; + result = 31 * result + minVideoHeight; + result = 31 * result + minVideoFrameRate; + result = 31 * result + minVideoBitrate; + result = 31 * result + (viewportOrientationMayChange ? 1 : 0); + result = 31 * result + viewportWidth; + result = 31 * result + viewportHeight; + result = 31 * result + preferredVideoMimeTypes.hashCode(); + result = 31 * result + preferredVideoRoleFlags; + // Audio + result = 31 * result + preferredAudioLanguages.hashCode(); + result = 31 * result + preferredAudioRoleFlags; + result = 31 * result + maxAudioChannelCount; + result = 31 * result + maxAudioBitrate; + result = 31 * result + preferredAudioMimeTypes.hashCode(); + // Text + result = 31 * result + preferredTextLanguages.hashCode(); result = 31 * result + preferredTextRoleFlags; + result = 31 * result + ignoredTextSelectionFlags; result = 31 * result + (selectUndeterminedTextLanguage ? 1 : 0); - result = 31 * result + disabledTextTrackSelectionFlags; + // General + result = 31 * result + (forceLowestBitrate ? 1 : 0); + result = 31 * result + (forceHighestSupportedBitrate ? 1 : 0); + result = 31 * result + overrides.hashCode(); + result = 31 * result + disabledTrackTypes.hashCode(); return result; } - // Parcelable implementation. + // Bundleable implementation + + private static final String FIELD_PREFERRED_AUDIO_LANGUAGES = Util.intToStringMaxRadix(1); + private static final String FIELD_PREFERRED_AUDIO_ROLE_FLAGS = Util.intToStringMaxRadix(2); + private static final String FIELD_PREFERRED_TEXT_LANGUAGES = Util.intToStringMaxRadix(3); + private static final String FIELD_PREFERRED_TEXT_ROLE_FLAGS = Util.intToStringMaxRadix(4); + private static final String FIELD_SELECT_UNDETERMINED_TEXT_LANGUAGE = Util.intToStringMaxRadix(5); + private static final String FIELD_MAX_VIDEO_WIDTH = Util.intToStringMaxRadix(6); + private static final String FIELD_MAX_VIDEO_HEIGHT = Util.intToStringMaxRadix(7); + private static final String FIELD_MAX_VIDEO_FRAMERATE = Util.intToStringMaxRadix(8); + private static final String FIELD_MAX_VIDEO_BITRATE = Util.intToStringMaxRadix(9); + private static final String FIELD_MIN_VIDEO_WIDTH = Util.intToStringMaxRadix(10); + private static final String FIELD_MIN_VIDEO_HEIGHT = Util.intToStringMaxRadix(11); + private static final String FIELD_MIN_VIDEO_FRAMERATE = Util.intToStringMaxRadix(12); + private static final String FIELD_MIN_VIDEO_BITRATE = Util.intToStringMaxRadix(13); + private static final String FIELD_VIEWPORT_WIDTH = Util.intToStringMaxRadix(14); + private static final String FIELD_VIEWPORT_HEIGHT = Util.intToStringMaxRadix(15); + private static final String FIELD_VIEWPORT_ORIENTATION_MAY_CHANGE = Util.intToStringMaxRadix(16); + private static final String FIELD_PREFERRED_VIDEO_MIMETYPES = Util.intToStringMaxRadix(17); + private static final String FIELD_MAX_AUDIO_CHANNEL_COUNT = Util.intToStringMaxRadix(18); + private static final String FIELD_MAX_AUDIO_BITRATE = Util.intToStringMaxRadix(19); + private static final String FIELD_PREFERRED_AUDIO_MIME_TYPES = Util.intToStringMaxRadix(20); + private static final String FIELD_FORCE_LOWEST_BITRATE = Util.intToStringMaxRadix(21); + private static final String FIELD_FORCE_HIGHEST_SUPPORTED_BITRATE = Util.intToStringMaxRadix(22); + private static final String FIELD_SELECTION_OVERRIDES = Util.intToStringMaxRadix(23); + private static final String FIELD_DISABLED_TRACK_TYPE = Util.intToStringMaxRadix(24); + private static final String FIELD_PREFERRED_VIDEO_ROLE_FLAGS = Util.intToStringMaxRadix(25); + private static final String FIELD_IGNORED_TEXT_SELECTION_FLAGS = Util.intToStringMaxRadix(26); + + /** + * Defines a minimum field ID value for subclasses to use when implementing {@link #toBundle()} + * and {@link Bundleable.Creator}. + * + *

        Subclasses should obtain keys for their {@link Bundle} representation by applying a + * non-negative offset on this constant and passing the result to {@link + * Util#intToStringMaxRadix(int)}. + */ + protected static final int FIELD_CUSTOM_ID_BASE = 1000; @Override - public int describeContents() { - return 0; + public Bundle toBundle() { + Bundle bundle = new Bundle(); + + // Video + bundle.putInt(FIELD_MAX_VIDEO_WIDTH, maxVideoWidth); + bundle.putInt(FIELD_MAX_VIDEO_HEIGHT, maxVideoHeight); + bundle.putInt(FIELD_MAX_VIDEO_FRAMERATE, maxVideoFrameRate); + bundle.putInt(FIELD_MAX_VIDEO_BITRATE, maxVideoBitrate); + bundle.putInt(FIELD_MIN_VIDEO_WIDTH, minVideoWidth); + bundle.putInt(FIELD_MIN_VIDEO_HEIGHT, minVideoHeight); + bundle.putInt(FIELD_MIN_VIDEO_FRAMERATE, minVideoFrameRate); + bundle.putInt(FIELD_MIN_VIDEO_BITRATE, minVideoBitrate); + bundle.putInt(FIELD_VIEWPORT_WIDTH, viewportWidth); + bundle.putInt(FIELD_VIEWPORT_HEIGHT, viewportHeight); + bundle.putBoolean(FIELD_VIEWPORT_ORIENTATION_MAY_CHANGE, viewportOrientationMayChange); + bundle.putStringArray( + FIELD_PREFERRED_VIDEO_MIMETYPES, preferredVideoMimeTypes.toArray(new String[0])); + bundle.putInt(FIELD_PREFERRED_VIDEO_ROLE_FLAGS, preferredVideoRoleFlags); + // Audio + bundle.putStringArray( + FIELD_PREFERRED_AUDIO_LANGUAGES, preferredAudioLanguages.toArray(new String[0])); + bundle.putInt(FIELD_PREFERRED_AUDIO_ROLE_FLAGS, preferredAudioRoleFlags); + bundle.putInt(FIELD_MAX_AUDIO_CHANNEL_COUNT, maxAudioChannelCount); + bundle.putInt(FIELD_MAX_AUDIO_BITRATE, maxAudioBitrate); + bundle.putStringArray( + FIELD_PREFERRED_AUDIO_MIME_TYPES, preferredAudioMimeTypes.toArray(new String[0])); + // Text + bundle.putStringArray( + FIELD_PREFERRED_TEXT_LANGUAGES, preferredTextLanguages.toArray(new String[0])); + bundle.putInt(FIELD_PREFERRED_TEXT_ROLE_FLAGS, preferredTextRoleFlags); + bundle.putInt(FIELD_IGNORED_TEXT_SELECTION_FLAGS, ignoredTextSelectionFlags); + bundle.putBoolean(FIELD_SELECT_UNDETERMINED_TEXT_LANGUAGE, selectUndeterminedTextLanguage); + // General + bundle.putBoolean(FIELD_FORCE_LOWEST_BITRATE, forceLowestBitrate); + bundle.putBoolean(FIELD_FORCE_HIGHEST_SUPPORTED_BITRATE, forceHighestSupportedBitrate); + bundle.putParcelableArrayList(FIELD_SELECTION_OVERRIDES, toBundleArrayList(overrides.values())); + bundle.putIntArray(FIELD_DISABLED_TRACK_TYPE, Ints.toArray(disabledTrackTypes)); + + return bundle; } - @Override - public void writeToParcel(Parcel dest, int flags) { - dest.writeString(preferredAudioLanguage); - dest.writeString(preferredTextLanguage); - dest.writeInt(preferredTextRoleFlags); - Util.writeBoolean(dest, selectUndeterminedTextLanguage); - dest.writeInt(disabledTextTrackSelectionFlags); + /** Construct an instance from a {@link Bundle} produced by {@link #toBundle()}. */ + public static TrackSelectionParameters fromBundle(Bundle bundle) { + return new Builder(bundle).build(); } + /** + * @deprecated Use {@link #fromBundle(Bundle)} instead. + */ + @Deprecated public static final Creator CREATOR = - new Creator() { - - @Override - public TrackSelectionParameters createFromParcel(Parcel in) { - return new TrackSelectionParameters(in); - } - - @Override - public TrackSelectionParameters[] newArray(int size) { - return new TrackSelectionParameters[size]; - } - }; + TrackSelectionParameters::fromBundle; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/TrackSelectionUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/TrackSelectionUtil.java index 0f2748b1ac..8ade709c84 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/TrackSelectionUtil.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/TrackSelectionUtil.java @@ -15,10 +15,19 @@ */ package com.google.android.exoplayer2.trackselection; +import android.os.SystemClock; import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.RendererCapabilities; +import com.google.android.exoplayer2.Tracks; +import com.google.android.exoplayer2.source.TrackGroup; import com.google.android.exoplayer2.source.TrackGroupArray; import com.google.android.exoplayer2.trackselection.DefaultTrackSelector.SelectionOverride; -import com.google.android.exoplayer2.trackselection.TrackSelection.Definition; +import com.google.android.exoplayer2.trackselection.ExoTrackSelection.Definition; +import com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy; +import com.google.common.collect.ImmutableList; +import java.util.Arrays; +import java.util.List; import org.checkerframework.checker.nullness.compatqual.NullableType; /** Track selection related utility methods. */ @@ -35,7 +44,7 @@ public interface AdaptiveTrackSelectionFactory { * @param trackSelectionDefinition A {@link Definition} for the track selection. * @return The created track selection. */ - TrackSelection createAdaptiveTrackSelection(Definition trackSelectionDefinition); + ExoTrackSelection createAdaptiveTrackSelection(Definition trackSelectionDefinition); } /** @@ -48,10 +57,10 @@ public interface AdaptiveTrackSelectionFactory { * @return The array of created track selection. For null entries in {@code definitions} returns * null values. */ - public static @NullableType TrackSelection[] createTrackSelectionsForDefinitions( + public static @NullableType ExoTrackSelection[] createTrackSelectionsForDefinitions( @NullableType Definition[] definitions, AdaptiveTrackSelectionFactory adaptiveTrackSelectionFactory) { - TrackSelection[] selections = new TrackSelection[definitions.length]; + ExoTrackSelection[] selections = new ExoTrackSelection[definitions.length]; boolean createdAdaptiveTrackSelection = false; for (int i = 0; i < definitions.length; i++) { Definition definition = definitions[i]; @@ -64,7 +73,7 @@ public interface AdaptiveTrackSelectionFactory { } else { selections[i] = new FixedTrackSelection( - definition.group, definition.tracks[0], definition.reason, definition.data); + definition.group, definition.tracks[0], /* type= */ definition.type); } } return selections; @@ -87,7 +96,7 @@ public static DefaultTrackSelector.Parameters updateParametersWithOverride( TrackGroupArray trackGroupArray, boolean isDisabled, @Nullable SelectionOverride override) { - DefaultTrackSelector.ParametersBuilder builder = + DefaultTrackSelector.Parameters.Builder builder = parameters .buildUpon() .clearSelectionOverrides(rendererIndex) @@ -97,4 +106,105 @@ public static DefaultTrackSelector.Parameters updateParametersWithOverride( } return builder.build(); } + + /** + * Returns the {@link LoadErrorHandlingPolicy.FallbackOptions} with the tracks of the given {@link + * ExoTrackSelection} and with a single location option indicating that there are no alternative + * locations available. + * + * @param trackSelection The track selection to get the number of total and excluded tracks. + * @return The {@link LoadErrorHandlingPolicy.FallbackOptions} for the given track selection. + */ + public static LoadErrorHandlingPolicy.FallbackOptions createFallbackOptions( + ExoTrackSelection trackSelection) { + long nowMs = SystemClock.elapsedRealtime(); + int numberOfTracks = trackSelection.length(); + int numberOfExcludedTracks = 0; + for (int i = 0; i < numberOfTracks; i++) { + if (trackSelection.isBlacklisted(i, nowMs)) { + numberOfExcludedTracks++; + } + } + return new LoadErrorHandlingPolicy.FallbackOptions( + /* numberOfLocations= */ 1, + /* numberOfExcludedLocations= */ 0, + numberOfTracks, + numberOfExcludedTracks); + } + + /** + * Returns {@link Tracks} built from {@link MappingTrackSelector.MappedTrackInfo} and {@link + * TrackSelection TrackSelections} for each renderer. + * + * @param mappedTrackInfo The {@link MappingTrackSelector.MappedTrackInfo} + * @param selections The track selections, indexed by renderer. A null entry indicates that a + * renderer does not have any selected tracks. + * @return The corresponding {@link Tracks}. + */ + @SuppressWarnings({"unchecked", "rawtypes"}) // Initialization of array of Lists. + public static Tracks buildTracks( + MappingTrackSelector.MappedTrackInfo mappedTrackInfo, + @NullableType TrackSelection[] selections) { + List[] listSelections = new List[selections.length]; + for (int i = 0; i < selections.length; i++) { + @Nullable TrackSelection selection = selections[i]; + listSelections[i] = selection != null ? ImmutableList.of(selection) : ImmutableList.of(); + } + return buildTracks(mappedTrackInfo, listSelections); + } + + /** + * Returns {@link Tracks} built from {@link MappingTrackSelector.MappedTrackInfo} and {@link + * TrackSelection TrackSelections} for each renderer. + * + * @param mappedTrackInfo The {@link MappingTrackSelector.MappedTrackInfo} + * @param selections The track selections, indexed by renderer. Null entries are not permitted. An + * empty list indicates that a renderer does not have any selected tracks. + * @return The corresponding {@link Tracks}. + */ + public static Tracks buildTracks( + MappingTrackSelector.MappedTrackInfo mappedTrackInfo, + List[] selections) { + ImmutableList.Builder trackGroups = new ImmutableList.Builder<>(); + for (int rendererIndex = 0; + rendererIndex < mappedTrackInfo.getRendererCount(); + rendererIndex++) { + TrackGroupArray trackGroupArray = mappedTrackInfo.getTrackGroups(rendererIndex); + List rendererTrackSelections = selections[rendererIndex]; + for (int groupIndex = 0; groupIndex < trackGroupArray.length; groupIndex++) { + TrackGroup trackGroup = trackGroupArray.get(groupIndex); + boolean adaptiveSupported = + mappedTrackInfo.getAdaptiveSupport( + rendererIndex, groupIndex, /* includeCapabilitiesExceededTracks= */ false) + != RendererCapabilities.ADAPTIVE_NOT_SUPPORTED; + @C.FormatSupport int[] trackSupport = new int[trackGroup.length]; + boolean[] selected = new boolean[trackGroup.length]; + for (int trackIndex = 0; trackIndex < trackGroup.length; trackIndex++) { + trackSupport[trackIndex] = + mappedTrackInfo.getTrackSupport(rendererIndex, groupIndex, trackIndex); + boolean isTrackSelected = false; + for (int i = 0; i < rendererTrackSelections.size(); i++) { + TrackSelection trackSelection = rendererTrackSelections.get(i); + if (trackSelection.getTrackGroup().equals(trackGroup) + && trackSelection.indexOf(trackIndex) != C.INDEX_UNSET) { + isTrackSelected = true; + break; + } + } + selected[trackIndex] = isTrackSelected; + } + trackGroups.add(new Tracks.Group(trackGroup, adaptiveSupported, trackSupport, selected)); + } + } + TrackGroupArray unmappedTrackGroups = mappedTrackInfo.getUnmappedTrackGroups(); + for (int groupIndex = 0; groupIndex < unmappedTrackGroups.length; groupIndex++) { + TrackGroup trackGroup = unmappedTrackGroups.get(groupIndex); + @C.FormatSupport int[] trackSupport = new int[trackGroup.length]; + Arrays.fill(trackSupport, C.FORMAT_UNSUPPORTED_TYPE); + boolean[] selected = new boolean[trackGroup.length]; + trackGroups.add( + new Tracks.Group(trackGroup, /* adaptiveSupported= */ false, trackSupport, selected)); + } + return new Tracks(trackGroups.build()); + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/TrackSelector.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/TrackSelector.java index d48c140ac8..06f32480d7 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/TrackSelector.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/TrackSelector.java @@ -15,24 +15,28 @@ */ package com.google.android.exoplayer2.trackselection; +import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; + +import androidx.annotation.CallSuper; import androidx.annotation.Nullable; import com.google.android.exoplayer2.ExoPlaybackException; import com.google.android.exoplayer2.ExoPlayer; +import com.google.android.exoplayer2.Player; import com.google.android.exoplayer2.Renderer; import com.google.android.exoplayer2.RendererCapabilities; import com.google.android.exoplayer2.RendererConfiguration; import com.google.android.exoplayer2.Timeline; +import com.google.android.exoplayer2.audio.AudioAttributes; import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId; import com.google.android.exoplayer2.source.TrackGroupArray; import com.google.android.exoplayer2.upstream.BandwidthMeter; -import com.google.android.exoplayer2.util.Assertions; /** * The component of an {@link ExoPlayer} responsible for selecting tracks to be consumed by each of * the player's {@link Renderer}s. The {@link DefaultTrackSelector} implementation should be * suitable for most use cases. * - *

        Interactions with the player

        + *

        Interactions with the player

        * * The following interactions occur between the player and its track selector during playback. * @@ -61,9 +65,13 @@ * prefer audio tracks in a particular language. This will trigger the player to make new * track selections. Note that the player will have to re-buffer in the case that the new * track selection for the currently playing period differs from the one that was invalidated. + * Implementing subclasses can trigger invalidation by calling {@link #invalidate()}, which + * will call {@link InvalidationListener#onTrackSelectionsInvalidated()}. + *
      • When the player is {@linkplain Player#release() released}, it will release the track + * selector by calling {@link #release()}. *
      * - *

      Renderer configuration

      + *

      Renderer configuration

      * * The {@link TrackSelectorResult} returned by {@link #selectTracks(RendererCapabilities[], * TrackGroupArray, MediaPeriodId, Timeline)} contains not only {@link TrackSelection}s for each @@ -75,7 +83,7 @@ * configure renderers in a particular way if certain tracks are selected. Hence it makes sense to * determine the track selection and corresponding renderer configurations in a single step. * - *

      Threading model

      + *

      Threading model

      * * All calls made by the player into the track selector are on the player's internal playback * thread. The track selector may call {@link InvalidationListener#onTrackSelectionsInvalidated()} @@ -83,9 +91,7 @@ */ public abstract class TrackSelector { - /** - * Notified when selections previously made by a {@link TrackSelector} are no longer valid. - */ + /** Notified when selections previously made by a {@link TrackSelector} are no longer valid. */ public interface InvalidationListener { /** @@ -93,7 +99,6 @@ public interface InvalidationListener { * longer valid. May be called from any thread. */ void onTrackSelectionsInvalidated(); - } @Nullable private InvalidationListener listener; @@ -106,11 +111,22 @@ public interface InvalidationListener { * it has previously made are no longer valid. * @param bandwidthMeter A bandwidth meter which can be used by track selections to select tracks. */ - public final void init(InvalidationListener listener, BandwidthMeter bandwidthMeter) { + @CallSuper + public void init(InvalidationListener listener, BandwidthMeter bandwidthMeter) { this.listener = listener; this.bandwidthMeter = bandwidthMeter; } + /** + * Called by the player to release the selector. The selector cannot be used until {@link + * #init(InvalidationListener, BandwidthMeter)} is called again. + */ + @CallSuper + public void release() { + listener = null; + bandwidthMeter = null; + } + /** * Called by the player to perform a track selection. * @@ -135,7 +151,38 @@ public abstract TrackSelectorResult selectTracks( * * @param info The value of {@link TrackSelectorResult#info} in the activated selection. */ - public abstract void onSelectionActivated(Object info); + public abstract void onSelectionActivated(@Nullable Object info); + + /** Returns the current parameters for track selection. */ + public TrackSelectionParameters getParameters() { + return TrackSelectionParameters.DEFAULT_WITHOUT_CONTEXT; + } + + /** + * Called by the player to provide parameters for track selection. + * + *

      Only supported if {@link #isSetParametersSupported()} returns true. + * + * @param parameters The parameters for track selection. + */ + public void setParameters(TrackSelectionParameters parameters) { + // Default implementation doesn't support this method. + } + + /** + * Returns if this {@code TrackSelector} supports {@link + * #setParameters(TrackSelectionParameters)}. + * + *

      The same value is always returned for a given {@code TrackSelector} instance. + */ + public boolean isSetParametersSupported() { + return false; + } + + /** Called by the player to set the {@link AudioAttributes} that will be used for playback. */ + public void setAudioAttributes(AudioAttributes audioAttributes) { + // Default implementation is no-op. + } /** * Calls {@link InvalidationListener#onTrackSelectionsInvalidated()} to invalidate all previously @@ -149,9 +196,10 @@ protected final void invalidate() { /** * Returns a bandwidth meter which can be used by track selections to select tracks. Must only be - * called after {@link #init(InvalidationListener, BandwidthMeter)} has been called. + * called when the track selector is {@linkplain #init(InvalidationListener, BandwidthMeter) + * initialized}. */ protected final BandwidthMeter getBandwidthMeter() { - return Assertions.checkNotNull(bandwidthMeter); + return checkStateNotNull(bandwidthMeter); } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/TrackSelectorResult.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/TrackSelectorResult.java index 9228f3af62..6fab2c7968 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/TrackSelectorResult.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/trackselection/TrackSelectorResult.java @@ -17,12 +17,11 @@ import androidx.annotation.Nullable; import com.google.android.exoplayer2.RendererConfiguration; +import com.google.android.exoplayer2.Tracks; import com.google.android.exoplayer2.util.Util; import org.checkerframework.checker.nullness.compatqual.NullableType; -/** - * The result of a {@link TrackSelector} operation. - */ +/** The result of a {@link TrackSelector} operation. */ public final class TrackSelectorResult { /** The number of selections in the result. Greater than or equal to zero. */ @@ -32,29 +31,51 @@ public final class TrackSelectorResult { * renderer should be disabled. */ public final @NullableType RendererConfiguration[] rendererConfigurations; - /** - * A {@link TrackSelectionArray} containing the track selection for each renderer. - */ - public final TrackSelectionArray selections; + /** A {@link ExoTrackSelection} array containing the track selection for each renderer. */ + public final @NullableType ExoTrackSelection[] selections; + /** Describe the tracks and which one were selected. */ + public final Tracks tracks; /** * An opaque object that will be returned to {@link TrackSelector#onSelectionActivated(Object)} * should the selections be activated. */ - public final Object info; + @Nullable public final Object info; /** * @param rendererConfigurations A {@link RendererConfiguration} for each renderer. A null entry * indicates the corresponding renderer should be disabled. - * @param selections A {@link TrackSelectionArray} containing the selection for each renderer. + * @param selections A {@link ExoTrackSelection} array containing the selection for each renderer. * @param info An opaque object that will be returned to {@link - * TrackSelector#onSelectionActivated(Object)} should the selection be activated. + * TrackSelector#onSelectionActivated(Object)} should the selection be activated. May be + * {@code null}. + * @deprecated Use {@link #TrackSelectorResult(RendererConfiguration[], ExoTrackSelection[], + * Tracks, Object)}. */ + @Deprecated public TrackSelectorResult( @NullableType RendererConfiguration[] rendererConfigurations, - @NullableType TrackSelection[] selections, - Object info) { + @NullableType ExoTrackSelection[] selections, + @Nullable Object info) { + this(rendererConfigurations, selections, Tracks.EMPTY, info); + } + + /** + * @param rendererConfigurations A {@link RendererConfiguration} for each renderer. A null entry + * indicates the corresponding renderer should be disabled. + * @param selections A {@link ExoTrackSelection} array containing the selection for each renderer. + * @param tracks Description of the available tracks and which one were selected. + * @param info An opaque object that will be returned to {@link + * TrackSelector#onSelectionActivated(Object)} should the selection be activated. May be + * {@code null}. + */ + public TrackSelectorResult( + @NullableType RendererConfiguration[] rendererConfigurations, + @NullableType ExoTrackSelection[] selections, + Tracks tracks, + @Nullable Object info) { this.rendererConfigurations = rendererConfigurations; - this.selections = new TrackSelectionArray(selections); + this.selections = selections.clone(); + this.tracks = tracks; this.info = info; length = rendererConfigurations.length; } @@ -99,7 +120,6 @@ public boolean isEquivalent(@Nullable TrackSelectorResult other, int index) { return false; } return Util.areEqual(rendererConfigurations[index], other.rendererConfigurations[index]) - && Util.areEqual(selections.get(index), other.selections.get(index)); + && Util.areEqual(selections[index], other.selections[index]); } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ui/AdOverlayInfo.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ui/AdOverlayInfo.java new file mode 100644 index 0000000000..fa1102854f --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ui/AdOverlayInfo.java @@ -0,0 +1,118 @@ +/* + * Copyright (C) 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.ui; + +import static java.lang.annotation.ElementType.FIELD; +import static java.lang.annotation.ElementType.LOCAL_VARIABLE; +import static java.lang.annotation.ElementType.METHOD; +import static java.lang.annotation.ElementType.PARAMETER; +import static java.lang.annotation.ElementType.TYPE_USE; + +import android.view.View; +import androidx.annotation.IntDef; +import androidx.annotation.Nullable; +import com.google.errorprone.annotations.CanIgnoreReturnValue; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** Provides information about an overlay view shown on top of an ad view group. */ +public final class AdOverlayInfo { + + /** + * The purpose of the overlay. One of {@link #PURPOSE_CONTROLS}, {@link #PURPOSE_CLOSE_AD}, {@link + * #PURPOSE_OTHER} or {@link #PURPOSE_NOT_VISIBLE}. + */ + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) + @IntDef({PURPOSE_CONTROLS, PURPOSE_CLOSE_AD, PURPOSE_OTHER, PURPOSE_NOT_VISIBLE}) + public @interface Purpose {} + /** Purpose for playback controls overlaying the player. */ + public static final int PURPOSE_CONTROLS = 1; + /** Purpose for ad close buttons overlaying the player. */ + public static final int PURPOSE_CLOSE_AD = 2; + /** Purpose for other overlays. */ + public static final int PURPOSE_OTHER = 3; + /** Purpose for overlays that are not visible. */ + public static final int PURPOSE_NOT_VISIBLE = 4; + + /** A builder for {@link AdOverlayInfo} instances. */ + public static final class Builder { + + private final View view; + private final @Purpose int purpose; + + @Nullable private String detailedReason; + + /** + * Creates a new builder. + * + * @param view The view that is overlaying the player. + * @param purpose The purpose of the view. + */ + public Builder(View view, @Purpose int purpose) { + this.view = view; + this.purpose = purpose; + } + + /** + * Sets an optional, detailed reason that the view is on top of the player. + * + * @return This builder, for convenience. + */ + @CanIgnoreReturnValue + public Builder setDetailedReason(@Nullable String detailedReason) { + this.detailedReason = detailedReason; + return this; + } + + /** Returns a new {@link AdOverlayInfo} instance with the current builder values. */ + // Using deprecated constructor while it still exists. + @SuppressWarnings("deprecation") + public AdOverlayInfo build() { + return new AdOverlayInfo(view, purpose, detailedReason); + } + } + + /** The overlay view. */ + public final View view; + /** The purpose of the overlay view. */ + public final @Purpose int purpose; + /** An optional, detailed reason that the overlay view is needed. */ + @Nullable public final String reasonDetail; + + /** + * @deprecated Use {@link Builder} instead. + */ + @Deprecated + public AdOverlayInfo(View view, @Purpose int purpose) { + this(view, purpose, /* detailedReason= */ null); + } + + /** + * @deprecated Use {@link Builder} instead. + */ + @Deprecated + public AdOverlayInfo(View view, @Purpose int purpose, @Nullable String detailedReason) { + this.view = view; + this.purpose = purpose; + this.reasonDetail = detailedReason; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ui/AdViewProvider.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ui/AdViewProvider.java new file mode 100644 index 0000000000..dd6fa84184 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ui/AdViewProvider.java @@ -0,0 +1,48 @@ +/* + * Copyright (C) 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.ui; + +import android.view.ViewGroup; +import androidx.annotation.Nullable; +import com.google.common.collect.ImmutableList; +import java.util.List; + +/** Provides information about views for the ad playback UI. */ +public interface AdViewProvider { + + /** + * Returns the {@link ViewGroup} on top of the player that will show any ad UI, or {@code null} if + * playing audio-only ads. Any views on top of the returned view group must be described by {@link + * AdOverlayInfo AdOverlayInfos} returned by {@link #getAdOverlayInfos()}, for accurate + * viewability measurement. + */ + @Nullable + ViewGroup getAdViewGroup(); + + /** + * Returns a list of {@link AdOverlayInfo} instances describing views that are on top of the ad + * view group, but that are essential for controlling playback and should be excluded from ad + * viewability measurements. + * + *

      Each view must be either a fully transparent overlay (for capturing touch events), or a + * small piece of transient UI that is essential to the user experience of playback (such as a + * button to pause/resume playback or a transient full-screen or cast button). For more + * information see the documentation for your ads loader. + */ + default List getAdOverlayInfos() { + return ImmutableList.of(); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ui/AspectRatioFrameLayout.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ui/AspectRatioFrameLayout.java old mode 100755 new mode 100644 diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/ui/package-info.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ui/package-info.java new file mode 100644 index 0000000000..85903f4659 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/ui/package-info.java @@ -0,0 +1,19 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +@NonNullApi +package com.google.android.exoplayer2.ui; + +import com.google.android.exoplayer2.util.NonNullApi; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/Allocation.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/Allocation.java index f5aa81f325..7e815d259f 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/Allocation.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/Allocation.java @@ -17,9 +17,9 @@ /** * An allocation within a byte array. - *

      - * The allocation's length is obtained by calling {@link Allocator#getIndividualAllocationLength()} - * on the {@link Allocator} from which it was obtained. + * + *

      The allocation's length is obtained by calling {@link + * Allocator#getIndividualAllocationLength()} on the {@link Allocator} from which it was obtained. */ public final class Allocation { @@ -29,9 +29,7 @@ public final class Allocation { */ public final byte[] data; - /** - * The offset of the allocated space in {@link #data}. - */ + /** The offset of the allocated space in {@link #data}. */ public final int offset; /** @@ -42,5 +40,4 @@ public Allocation(byte[] data, int offset) { this.data = data; this.offset = offset; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/Allocator.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/Allocator.java index 17b7dfd6e9..aae5dc9715 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/Allocator.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/Allocator.java @@ -15,15 +15,26 @@ */ package com.google.android.exoplayer2.upstream; -/** - * A source of allocations. - */ +import androidx.annotation.Nullable; + +/** A source of allocations. */ public interface Allocator { + /** A node in a chain of {@link Allocation Allocations}. */ + interface AllocationNode { + + /** Returns the {@link Allocation} associated to this chain node. */ + Allocation getAllocation(); + + /** Returns the next chain node, or {@code null} if this is the last node in the chain. */ + @Nullable + AllocationNode next(); + } + /** * Obtain an {@link Allocation}. - *

      - * When the caller has finished with the {@link Allocation}, it should be returned by calling + * + *

      When the caller has finished with the {@link Allocation}, it should be returned by calling * {@link #release(Allocation)}. * * @return The {@link Allocation}. @@ -38,26 +49,22 @@ public interface Allocator { void release(Allocation allocation); /** - * Releases an array of {@link Allocation}s back to the allocator. + * Releases all {@link Allocation Allocations} in the chain starting at the given {@link + * AllocationNode}. * - * @param allocations The array of {@link Allocation}s being released. + *

      Implementations must not make memory allocations. */ - void release(Allocation[] allocations); + void release(AllocationNode allocationNode); /** - * Hints to the allocator that it should make a best effort to release any excess - * {@link Allocation}s. + * Hints to the allocator that it should make a best effort to release any excess {@link + * Allocation Allocations}. */ void trim(); - /** - * Returns the total number of bytes currently allocated. - */ + /** Returns the total number of bytes currently allocated. */ int getTotalBytesAllocated(); - /** - * Returns the length of each individual {@link Allocation}. - */ + /** Returns the length of each individual {@link Allocation}. */ int getIndividualAllocationLength(); - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/AssetDataSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/AssetDataSource.java index 3c92b039cc..9df0dd87fa 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/AssetDataSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/AssetDataSource.java @@ -16,29 +16,43 @@ package com.google.android.exoplayer2.upstream; import static com.google.android.exoplayer2.util.Util.castNonNull; +import static java.lang.Math.min; import android.content.Context; import android.content.res.AssetManager; import android.net.Uri; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.PlaybackException; import com.google.android.exoplayer2.util.Assertions; -import java.io.EOFException; +import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; /** A {@link DataSource} for reading from a local asset. */ public final class AssetDataSource extends BaseDataSource { - /** - * Thrown when an {@link IOException} is encountered reading a local asset. - */ - public static final class AssetDataSourceException extends IOException { + /** Thrown when an {@link IOException} is encountered reading a local asset. */ + public static final class AssetDataSourceException extends DataSourceException { + /** + * @deprecated Use {@link #AssetDataSourceException(Throwable, int)}. + */ + @Deprecated public AssetDataSourceException(IOException cause) { - super(cause); + super(cause, PlaybackException.ERROR_CODE_IO_UNSPECIFIED); } + /** + * Creates a new instance. + * + * @param cause The error cause. + * @param errorCode See {@link PlaybackException.ErrorCode}. + */ + public AssetDataSourceException( + @Nullable Throwable cause, @PlaybackException.ErrorCode int errorCode) { + super(cause, errorCode); + } } private final AssetManager assetManager; @@ -48,7 +62,9 @@ public AssetDataSourceException(IOException cause) { private long bytesRemaining; private boolean opened; - /** @param context A context. */ + /** + * @param context A context. + */ public AssetDataSource(Context context) { super(/* isNetwork= */ false); this.assetManager = context.getAssets(); @@ -70,7 +86,8 @@ public long open(DataSpec dataSpec) throws AssetDataSourceException { if (skipped < dataSpec.position) { // assetManager.open() returns an AssetInputStream, whose skip() implementation only skips // fewer bytes than requested if the skip is beyond the end of the asset's data. - throw new EOFException(); + throw new AssetDataSourceException( + /* cause= */ null, PlaybackException.ERROR_CODE_IO_READ_POSITION_OUT_OF_RANGE); } if (dataSpec.length != C.LENGTH_UNSET) { bytesRemaining = dataSpec.length; @@ -83,8 +100,14 @@ public long open(DataSpec dataSpec) throws AssetDataSourceException { bytesRemaining = C.LENGTH_UNSET; } } + } catch (AssetDataSourceException e) { + throw e; } catch (IOException e) { - throw new AssetDataSourceException(e); + throw new AssetDataSourceException( + e, + e instanceof FileNotFoundException + ? PlaybackException.ERROR_CODE_IO_FILE_NOT_FOUND + : PlaybackException.ERROR_CODE_IO_UNSPECIFIED); } opened = true; @@ -93,8 +116,8 @@ public long open(DataSpec dataSpec) throws AssetDataSourceException { } @Override - public int read(byte[] buffer, int offset, int readLength) throws AssetDataSourceException { - if (readLength == 0) { + public int read(byte[] buffer, int offset, int length) throws AssetDataSourceException { + if (length == 0) { return 0; } else if (bytesRemaining == 0) { return C.RESULT_END_OF_INPUT; @@ -102,18 +125,14 @@ public int read(byte[] buffer, int offset, int readLength) throws AssetDataSourc int bytesRead; try { - int bytesToRead = bytesRemaining == C.LENGTH_UNSET ? readLength - : (int) Math.min(bytesRemaining, readLength); + int bytesToRead = + bytesRemaining == C.LENGTH_UNSET ? length : (int) min(bytesRemaining, length); bytesRead = castNonNull(inputStream).read(buffer, offset, bytesToRead); } catch (IOException e) { - throw new AssetDataSourceException(e); + throw new AssetDataSourceException(e, PlaybackException.ERROR_CODE_IO_UNSPECIFIED); } if (bytesRead == -1) { - if (bytesRemaining != C.LENGTH_UNSET) { - // End of stream reached having not read sufficient data. - throw new AssetDataSourceException(new EOFException()); - } return C.RESULT_END_OF_INPUT; } if (bytesRemaining != C.LENGTH_UNSET) { @@ -137,7 +156,7 @@ public void close() throws AssetDataSourceException { inputStream.close(); } } catch (IOException e) { - throw new AssetDataSourceException(e); + throw new AssetDataSourceException(e, PlaybackException.ERROR_CODE_IO_UNSPECIFIED); } finally { inputStream = null; if (opened) { @@ -146,5 +165,4 @@ public void close() throws AssetDataSourceException { } } } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/BandwidthMeter.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/BandwidthMeter.java index 853a9af526..44e0bb4d4b 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/BandwidthMeter.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/BandwidthMeter.java @@ -17,15 +17,14 @@ import android.os.Handler; import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.util.Assertions; +import java.util.concurrent.CopyOnWriteArrayList; -/** - * Provides estimates of the currently available bandwidth. - */ +/** Provides estimates of the currently available bandwidth. */ public interface BandwidthMeter { - /** - * A listener of {@link BandwidthMeter} events. - */ + /** A listener of {@link BandwidthMeter} events. */ interface EventListener { /** @@ -33,7 +32,7 @@ interface EventListener { * changed. * *

      Note: The estimated bitrate is typically derived from more information than just {@code - * bytes} and {@code elapsedMs}. + * bytesTransferred} and {@code elapsedMs}. * * @param elapsedMs The time taken to transfer {@code bytesTransferred}, in milliseconds. This * is at most the elapsed time since the last callback, but may be less if there were @@ -42,11 +41,76 @@ interface EventListener { * @param bitrateEstimate The estimated bitrate in bits/sec. */ void onBandwidthSample(int elapsedMs, long bytesTransferred, long bitrateEstimate); + + /** Event dispatcher which allows listener registration. */ + final class EventDispatcher { + + private final CopyOnWriteArrayList listeners; + + /** Creates an event dispatcher. */ + public EventDispatcher() { + listeners = new CopyOnWriteArrayList<>(); + } + + /** Adds a listener to the event dispatcher. */ + public void addListener(Handler eventHandler, BandwidthMeter.EventListener eventListener) { + Assertions.checkNotNull(eventHandler); + Assertions.checkNotNull(eventListener); + removeListener(eventListener); + listeners.add(new HandlerAndListener(eventHandler, eventListener)); + } + + /** Removes a listener from the event dispatcher. */ + public void removeListener(BandwidthMeter.EventListener eventListener) { + for (HandlerAndListener handlerAndListener : listeners) { + if (handlerAndListener.listener == eventListener) { + handlerAndListener.release(); + listeners.remove(handlerAndListener); + } + } + } + + public void bandwidthSample(int elapsedMs, long bytesTransferred, long bitrateEstimate) { + for (HandlerAndListener handlerAndListener : listeners) { + if (!handlerAndListener.released) { + handlerAndListener.handler.post( + () -> + handlerAndListener.listener.onBandwidthSample( + elapsedMs, bytesTransferred, bitrateEstimate)); + } + } + } + + private static final class HandlerAndListener { + + private final Handler handler; + private final BandwidthMeter.EventListener listener; + + private boolean released; + + public HandlerAndListener(Handler handler, BandwidthMeter.EventListener eventListener) { + this.handler = handler; + this.listener = eventListener; + } + + public void release() { + released = true; + } + } + } } /** Returns the estimated bitrate. */ long getBitrateEstimate(); + /** + * Returns the estimated time to first byte, in microseconds, or {@link C#TIME_UNSET} if no + * estimate is available. + */ + default long getTimeToFirstByteEstimateUs() { + return C.TIME_UNSET; + } + /** * Returns the {@link TransferListener} that this instance uses to gather bandwidth information * from data transfers. May be null if the implementation does not listen to data transfers. diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/BaseDataSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/BaseDataSource.java index 80687db31f..ce6243eda0 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/BaseDataSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/BaseDataSource.java @@ -15,6 +15,7 @@ */ package com.google.android.exoplayer2.upstream; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; import static com.google.android.exoplayer2.util.Util.castNonNull; import androidx.annotation.Nullable; @@ -47,6 +48,7 @@ protected BaseDataSource(boolean isNetwork) { @Override public final void addTransferListener(TransferListener transferListener) { + checkNotNull(transferListener); if (!listeners.contains(transferListener)) { listeners.add(transferListener); listenerCount++; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/ByteArrayDataSink.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/ByteArrayDataSink.java index 2ba6ab4c69..9471483419 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/ByteArrayDataSink.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/ByteArrayDataSink.java @@ -24,9 +24,7 @@ import java.io.IOException; import org.checkerframework.checker.nullness.qual.MonotonicNonNull; -/** - * A {@link DataSink} for writing to a byte array. - */ +/** A {@link DataSink} for writing to a byte array. */ public final class ByteArrayDataSink implements DataSink { private @MonotonicNonNull ByteArrayOutputStream stream; @@ -59,5 +57,4 @@ public void write(byte[] buffer, int offset, int length) { public byte[] getData() { return stream == null ? null : stream.toByteArray(); } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/ByteArrayDataSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/ByteArrayDataSource.java index ed5ba9064b..bd0751d26a 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/ByteArrayDataSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/ByteArrayDataSource.java @@ -15,9 +15,12 @@ */ package com.google.android.exoplayer2.upstream; +import static java.lang.Math.min; + import android.net.Uri; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.PlaybackException; import com.google.android.exoplayer2.util.Assertions; import java.io.IOException; @@ -45,32 +48,33 @@ public ByteArrayDataSource(byte[] data) { public long open(DataSpec dataSpec) throws IOException { uri = dataSpec.uri; transferInitializing(dataSpec); + if (dataSpec.position > data.length) { + throw new DataSourceException(PlaybackException.ERROR_CODE_IO_READ_POSITION_OUT_OF_RANGE); + } readPosition = (int) dataSpec.position; - bytesRemaining = (int) ((dataSpec.length == C.LENGTH_UNSET) - ? (data.length - dataSpec.position) : dataSpec.length); - if (bytesRemaining <= 0 || readPosition + bytesRemaining > data.length) { - throw new IOException("Unsatisfiable range: [" + readPosition + ", " + dataSpec.length - + "], length: " + data.length); + bytesRemaining = data.length - (int) dataSpec.position; + if (dataSpec.length != C.LENGTH_UNSET) { + bytesRemaining = (int) min(bytesRemaining, dataSpec.length); } opened = true; transferStarted(dataSpec); - return bytesRemaining; + return dataSpec.length != C.LENGTH_UNSET ? dataSpec.length : bytesRemaining; } @Override - public int read(byte[] buffer, int offset, int readLength) { - if (readLength == 0) { + public int read(byte[] buffer, int offset, int length) { + if (length == 0) { return 0; } else if (bytesRemaining == 0) { return C.RESULT_END_OF_INPUT; } - readLength = Math.min(readLength, bytesRemaining); - System.arraycopy(data, readPosition, buffer, offset, readLength); - readPosition += readLength; - bytesRemaining -= readLength; - bytesTransferred(readLength); - return readLength; + length = min(length, bytesRemaining); + System.arraycopy(data, readPosition, buffer, offset, length); + readPosition += length; + bytesRemaining -= length; + bytesTransferred(length); + return length; } @Override @@ -87,5 +91,4 @@ public void close() { } uri = null; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CachedRegionTracker.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/CachedRegionTracker.java similarity index 82% rename from TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CachedRegionTracker.java rename to TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/CachedRegionTracker.java index fb2d4f694f..047287b98e 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CachedRegionTracker.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/CachedRegionTracker.java @@ -13,10 +13,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.google.android.exoplayer2.upstream.cache; +package com.google.android.exoplayer2.upstream; -import androidx.annotation.NonNull; +import androidx.annotation.Nullable; import com.google.android.exoplayer2.extractor.ChunkIndex; +import com.google.android.exoplayer2.upstream.cache.Cache; +import com.google.android.exoplayer2.upstream.cache.CacheSpan; import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.Util; import java.util.Arrays; @@ -25,8 +27,8 @@ import java.util.TreeSet; /** - * Utility class for efficiently tracking regions of data that are stored in a {@link Cache} - * for a given cache key. + * Utility class for efficiently tracking regions of data that are stored in a {@link Cache} for a + * given cache key. */ public final class CachedRegionTracker implements Cache.Listener { @@ -66,19 +68,20 @@ public void release() { } /** - * When provided with a byte offset, this method locates the cached region within which the - * offset falls, and returns the approximate end position in milliseconds of that region. If the - * byte offset does not fall within a cached region then {@link #NOT_CACHED} is returned. - * If the cached region extends to the end of the stream, {@link #CACHED_TO_END} is returned. + * When provided with a byte offset, this method locates the cached region within which the offset + * falls, and returns the approximate end position in milliseconds of that region. If the byte + * offset does not fall within a cached region then {@link #NOT_CACHED} is returned. If the cached + * region extends to the end of the stream, {@link #CACHED_TO_END} is returned. * * @param byteOffset The byte offset in the underlying stream. - * @return The end position of the corresponding cache region, {@link #NOT_CACHED}, or - * {@link #CACHED_TO_END}. + * @return The end position of the corresponding cache region, {@link #NOT_CACHED}, or {@link + * #CACHED_TO_END}. */ public synchronized int getRegionEndTimeMs(long byteOffset) { lookupRegion.startOffset = byteOffset; - Region floorRegion = regions.floor(lookupRegion); - if (floorRegion == null || byteOffset > floorRegion.endOffset + @Nullable Region floorRegion = regions.floor(lookupRegion); + if (floorRegion == null + || byteOffset > floorRegion.endOffset || floorRegion.endOffsetIndex == -1) { return NOT_CACHED; } @@ -87,8 +90,9 @@ public synchronized int getRegionEndTimeMs(long byteOffset) { && floorRegion.endOffset == (chunkIndex.offsets[index] + chunkIndex.sizes[index])) { return CACHED_TO_END; } - long segmentFractionUs = (chunkIndex.durationsUs[index] - * (floorRegion.endOffset - chunkIndex.offsets[index])) / chunkIndex.sizes[index]; + long segmentFractionUs = + (chunkIndex.durationsUs[index] * (floorRegion.endOffset - chunkIndex.offsets[index])) + / chunkIndex.sizes[index]; return (int) ((chunkIndex.timesUs[index] + segmentFractionUs) / 1000); } @@ -102,7 +106,7 @@ public synchronized void onSpanRemoved(Cache cache, CacheSpan span) { Region removedRegion = new Region(span.position, span.position + span.length); // Look up a region this span falls into. - Region floorRegion = regions.floor(removedRegion); + @Nullable Region floorRegion = regions.floor(removedRegion); if (floorRegion == null) { Log.e(TAG, "Removed a span we were not aware of"); return; @@ -134,8 +138,8 @@ public void onSpanTouched(Cache cache, CacheSpan oldSpan, CacheSpan newSpan) { private void mergeSpan(CacheSpan span) { Region newRegion = new Region(span.position, span.position + span.length); - Region floorRegion = regions.floor(newRegion); - Region ceilingRegion = regions.ceiling(newRegion); + @Nullable Region floorRegion = regions.floor(newRegion); + @Nullable Region ceilingRegion = regions.ceiling(newRegion); boolean floorConnects = regionsConnect(floorRegion, newRegion); boolean ceilingConnects = regionsConnect(newRegion, ceilingRegion); @@ -168,19 +172,15 @@ private void mergeSpan(CacheSpan span) { } } - private boolean regionsConnect(Region lower, Region upper) { + private boolean regionsConnect(@Nullable Region lower, @Nullable Region upper) { return lower != null && upper != null && lower.endOffset == upper.startOffset; } private static class Region implements Comparable { - /** - * The first byte of the region (inclusive). - */ + /** The first byte of the region (inclusive). */ public long startOffset; - /** - * End offset of the region (exclusive). - */ + /** End offset of the region (exclusive). */ public long endOffset; /** * The index in chunkIndex that contains the end offset. May be -1 if the end offset comes @@ -195,10 +195,8 @@ public Region(long position, long endOffset) { } @Override - public int compareTo(@NonNull Region another) { + public int compareTo(Region another) { return Util.compareLong(startOffset, another.startOffset); } - } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/ContentDataSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/ContentDataSource.java index baaa677127..15f340c865 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/ContentDataSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/ContentDataSource.java @@ -16,14 +16,17 @@ package com.google.android.exoplayer2.upstream; import static com.google.android.exoplayer2.util.Util.castNonNull; +import static java.lang.Math.min; import android.content.ContentResolver; import android.content.Context; import android.content.res.AssetFileDescriptor; import android.net.Uri; +import android.os.Bundle; +import android.provider.MediaStore; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; -import java.io.EOFException; +import com.google.android.exoplayer2.PlaybackException; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; @@ -32,15 +35,22 @@ /** A {@link DataSource} for reading from a content URI. */ public final class ContentDataSource extends BaseDataSource { - /** - * Thrown when an {@link IOException} is encountered reading from a content URI. - */ - public static class ContentDataSourceException extends IOException { + /** Thrown when an {@link IOException} is encountered reading from a content URI. */ + public static class ContentDataSourceException extends DataSourceException { + /** + * @deprecated Use {@link #ContentDataSourceException(IOException, int)}. + */ + @Deprecated public ContentDataSourceException(IOException cause) { - super(cause); + this(cause, PlaybackException.ERROR_CODE_IO_UNSPECIFIED); } + /** Creates a new instance. */ + public ContentDataSourceException( + @Nullable IOException cause, @PlaybackException.ErrorCode int errorCode) { + super(cause, errorCode); + } } private final ContentResolver resolver; @@ -60,54 +70,103 @@ public ContentDataSource(Context context) { } @Override + @SuppressWarnings("InlinedApi") // We are inlining EXTRA_ACCEPT_ORIGINAL_MEDIA_FORMAT. public long open(DataSpec dataSpec) throws ContentDataSourceException { try { Uri uri = dataSpec.uri; this.uri = uri; transferInitializing(dataSpec); - AssetFileDescriptor assetFileDescriptor = resolver.openAssetFileDescriptor(uri, "r"); + + AssetFileDescriptor assetFileDescriptor; + if ("content".equals(dataSpec.uri.getScheme())) { + Bundle providerOptions = new Bundle(); + // We don't want compatible media transcoding. + providerOptions.putBoolean(MediaStore.EXTRA_ACCEPT_ORIGINAL_MEDIA_FORMAT, true); + assetFileDescriptor = + resolver.openTypedAssetFileDescriptor(uri, /* mimeType= */ "*/*", providerOptions); + } else { + // This path supports file URIs, although support may be removed in the future. See + // [Internal ref: b/195384732]. + assetFileDescriptor = resolver.openAssetFileDescriptor(uri, "r"); + } this.assetFileDescriptor = assetFileDescriptor; if (assetFileDescriptor == null) { - throw new FileNotFoundException("Could not open file descriptor for: " + uri); + // assetFileDescriptor may be null if the provider recently crashed. + throw new ContentDataSourceException( + new IOException("Could not open file descriptor for: " + uri), + PlaybackException.ERROR_CODE_IO_UNSPECIFIED); } + + long assetFileDescriptorLength = assetFileDescriptor.getLength(); FileInputStream inputStream = new FileInputStream(assetFileDescriptor.getFileDescriptor()); this.inputStream = inputStream; - long assetStartOffset = assetFileDescriptor.getStartOffset(); - long skipped = inputStream.skip(assetStartOffset + dataSpec.position) - assetStartOffset; + // We can't rely only on the "skipped < dataSpec.position" check below to detect whether the + // position is beyond the end of the asset being read. This is because the file may contain + // multiple assets, and there's nothing to prevent InputStream.skip() from succeeding by + // skipping into the data of the next asset. Hence we also need to check against the asset + // length explicitly, which is guaranteed to be set unless the asset extends to the end of the + // file. + if (assetFileDescriptorLength != AssetFileDescriptor.UNKNOWN_LENGTH + && dataSpec.position > assetFileDescriptorLength) { + throw new ContentDataSourceException( + /* cause= */ null, PlaybackException.ERROR_CODE_IO_READ_POSITION_OUT_OF_RANGE); + } + long assetFileDescriptorOffset = assetFileDescriptor.getStartOffset(); + long skipped = + inputStream.skip(assetFileDescriptorOffset + dataSpec.position) + - assetFileDescriptorOffset; if (skipped != dataSpec.position) { // We expect the skip to be satisfied in full. If it isn't then we're probably trying to - // skip beyond the end of the data. - throw new EOFException(); + // read beyond the end of the last resource in the file. + throw new ContentDataSourceException( + /* cause= */ null, PlaybackException.ERROR_CODE_IO_READ_POSITION_OUT_OF_RANGE); } - if (dataSpec.length != C.LENGTH_UNSET) { - bytesRemaining = dataSpec.length; - } else { - long assetFileDescriptorLength = assetFileDescriptor.getLength(); - if (assetFileDescriptorLength == AssetFileDescriptor.UNKNOWN_LENGTH) { - // The asset must extend to the end of the file. If FileInputStream.getChannel().size() - // returns 0 then the remaining length cannot be determined. - FileChannel channel = inputStream.getChannel(); - long channelSize = channel.size(); - bytesRemaining = channelSize == 0 ? C.LENGTH_UNSET : channelSize - channel.position(); + if (assetFileDescriptorLength == AssetFileDescriptor.UNKNOWN_LENGTH) { + // The asset must extend to the end of the file. We can try and resolve the length with + // FileInputStream.getChannel().size(). + FileChannel channel = inputStream.getChannel(); + long channelSize = channel.size(); + if (channelSize == 0) { + bytesRemaining = C.LENGTH_UNSET; } else { - bytesRemaining = assetFileDescriptorLength - skipped; + bytesRemaining = channelSize - channel.position(); + if (bytesRemaining < 0) { + // The skip above was satisfied in full, but skipped beyond the end of the file. + throw new ContentDataSourceException( + /* cause= */ null, PlaybackException.ERROR_CODE_IO_READ_POSITION_OUT_OF_RANGE); + } + } + } else { + bytesRemaining = assetFileDescriptorLength - skipped; + if (bytesRemaining < 0) { + throw new ContentDataSourceException( + /* cause= */ null, PlaybackException.ERROR_CODE_IO_READ_POSITION_OUT_OF_RANGE); } } + } catch (ContentDataSourceException e) { + throw e; } catch (IOException e) { - throw new ContentDataSourceException(e); + throw new ContentDataSourceException( + e, + e instanceof FileNotFoundException + ? PlaybackException.ERROR_CODE_IO_FILE_NOT_FOUND + : PlaybackException.ERROR_CODE_IO_UNSPECIFIED); } + if (dataSpec.length != C.LENGTH_UNSET) { + bytesRemaining = + bytesRemaining == C.LENGTH_UNSET ? dataSpec.length : min(bytesRemaining, dataSpec.length); + } opened = true; transferStarted(dataSpec); - - return bytesRemaining; + return dataSpec.length != C.LENGTH_UNSET ? dataSpec.length : bytesRemaining; } @Override - public int read(byte[] buffer, int offset, int readLength) throws ContentDataSourceException { - if (readLength == 0) { + public int read(byte[] buffer, int offset, int length) throws ContentDataSourceException { + if (length == 0) { return 0; } else if (bytesRemaining == 0) { return C.RESULT_END_OF_INPUT; @@ -115,18 +174,14 @@ public int read(byte[] buffer, int offset, int readLength) throws ContentDataSou int bytesRead; try { - int bytesToRead = bytesRemaining == C.LENGTH_UNSET ? readLength - : (int) Math.min(bytesRemaining, readLength); + int bytesToRead = + bytesRemaining == C.LENGTH_UNSET ? length : (int) min(bytesRemaining, length); bytesRead = castNonNull(inputStream).read(buffer, offset, bytesToRead); } catch (IOException e) { - throw new ContentDataSourceException(e); + throw new ContentDataSourceException(e, PlaybackException.ERROR_CODE_IO_UNSPECIFIED); } if (bytesRead == -1) { - if (bytesRemaining != C.LENGTH_UNSET) { - // End of stream reached having not read sufficient data. - throw new ContentDataSourceException(new EOFException()); - } return C.RESULT_END_OF_INPUT; } if (bytesRemaining != C.LENGTH_UNSET) { @@ -151,7 +206,7 @@ public void close() throws ContentDataSourceException { inputStream.close(); } } catch (IOException e) { - throw new ContentDataSourceException(e); + throw new ContentDataSourceException(e, PlaybackException.ERROR_CODE_IO_UNSPECIFIED); } finally { inputStream = null; try { @@ -159,7 +214,7 @@ public void close() throws ContentDataSourceException { assetFileDescriptor.close(); } } catch (IOException e) { - throw new ContentDataSourceException(e); + throw new ContentDataSourceException(e, PlaybackException.ERROR_CODE_IO_UNSPECIFIED); } finally { assetFileDescriptor = null; if (opened) { @@ -169,5 +224,4 @@ public void close() throws ContentDataSourceException { } } } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DataReader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DataReader.java new file mode 100644 index 0000000000..4b96a3ddfb --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DataReader.java @@ -0,0 +1,40 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.upstream; + +import com.google.android.exoplayer2.C; +import java.io.IOException; + +/** Reads bytes from a data stream. */ +public interface DataReader { + /** + * Reads up to {@code length} bytes of data from the input. + * + *

      If {@code readLength} is zero then 0 is returned. Otherwise, if no data is available because + * the end of the opened range has been reached, then {@link C#RESULT_END_OF_INPUT} is returned. + * Otherwise, the call will block until at least one byte of data has been read and the number of + * bytes read is returned. + * + * @param buffer A target array into which data should be written. + * @param offset The offset into the target array at which to write. + * @param length The maximum number of bytes to read from the input. + * @return The number of bytes read, or {@link C#RESULT_END_OF_INPUT} if the input has ended. This + * may be less than {@code length} because the end of the input (or available data) was + * reached, the method was interrupted, or the operation was aborted early for another reason. + * @throws IOException If an error occurs reading from the input. + */ + int read(byte[] buffer, int offset, int length) throws IOException; +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DataSchemeDataSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DataSchemeDataSource.java index e592c3bec3..786d497730 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DataSchemeDataSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DataSchemeDataSource.java @@ -16,13 +16,17 @@ package com.google.android.exoplayer2.upstream; import static com.google.android.exoplayer2.util.Util.castNonNull; +import static java.lang.Math.min; import android.net.Uri; import android.util.Base64; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.ParserException; +import com.google.android.exoplayer2.PlaybackException; +import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Util; +import com.google.common.base.Charsets; import java.io.IOException; import java.net.URLDecoder; @@ -33,11 +37,9 @@ public final class DataSchemeDataSource extends BaseDataSource { @Nullable private DataSpec dataSpec; @Nullable private byte[] data; - private int endPosition; private int readPosition; + private int bytesRemaining; - // the constructor does not initialize fields: data - @SuppressWarnings("nullness:initialization.fields.uninitialized") public DataSchemeDataSource() { super(/* isNetwork= */ false); } @@ -46,51 +48,53 @@ public DataSchemeDataSource() { public long open(DataSpec dataSpec) throws IOException { transferInitializing(dataSpec); this.dataSpec = dataSpec; - readPosition = (int) dataSpec.position; Uri uri = dataSpec.uri; String scheme = uri.getScheme(); - if (!SCHEME_DATA.equals(scheme)) { - throw new ParserException("Unsupported scheme: " + scheme); - } + Assertions.checkArgument(SCHEME_DATA.equals(scheme), "Unsupported scheme: " + scheme); String[] uriParts = Util.split(uri.getSchemeSpecificPart(), ","); if (uriParts.length != 2) { - throw new ParserException("Unexpected URI format: " + uri); + throw ParserException.createForMalformedDataOfUnknownType( + "Unexpected URI format: " + uri, /* cause= */ null); } String dataString = uriParts[1]; if (uriParts[0].contains(";base64")) { try { - data = Base64.decode(dataString, 0); + data = Base64.decode(dataString, /* flags= */ Base64.DEFAULT); } catch (IllegalArgumentException e) { - throw new ParserException("Error while parsing Base64 encoded string: " + dataString, e); + throw ParserException.createForMalformedDataOfUnknownType( + "Error while parsing Base64 encoded string: " + dataString, e); } } else { // TODO: Add support for other charsets. - data = Util.getUtf8Bytes(URLDecoder.decode(dataString, C.ASCII_NAME)); + data = Util.getUtf8Bytes(URLDecoder.decode(dataString, Charsets.US_ASCII.name())); } - endPosition = - dataSpec.length != C.LENGTH_UNSET ? (int) dataSpec.length + readPosition : data.length; - if (endPosition > data.length || readPosition > endPosition) { + if (dataSpec.position > data.length) { data = null; - throw new DataSourceException(DataSourceException.POSITION_OUT_OF_RANGE); + throw new DataSourceException(PlaybackException.ERROR_CODE_IO_READ_POSITION_OUT_OF_RANGE); + } + readPosition = (int) dataSpec.position; + bytesRemaining = data.length - readPosition; + if (dataSpec.length != C.LENGTH_UNSET) { + bytesRemaining = (int) min(bytesRemaining, dataSpec.length); } transferStarted(dataSpec); - return (long) endPosition - readPosition; + return dataSpec.length != C.LENGTH_UNSET ? dataSpec.length : bytesRemaining; } @Override - public int read(byte[] buffer, int offset, int readLength) { - if (readLength == 0) { + public int read(byte[] buffer, int offset, int length) { + if (length == 0) { return 0; } - int remainingBytes = endPosition - readPosition; - if (remainingBytes == 0) { + if (bytesRemaining == 0) { return C.RESULT_END_OF_INPUT; } - readLength = Math.min(readLength, remainingBytes); - System.arraycopy(castNonNull(data), readPosition, buffer, offset, readLength); - readPosition += readLength; - bytesTransferred(readLength); - return readLength; + length = min(length, bytesRemaining); + System.arraycopy(castNonNull(data), readPosition, buffer, offset, length); + readPosition += length; + bytesRemaining -= length; + bytesTransferred(length); + return length; } @Override diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DataSink.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DataSink.java index 4973bb71e8..af644a8c48 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DataSink.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DataSink.java @@ -17,21 +17,14 @@ import java.io.IOException; -/** - * A component to which streams of data can be written. - */ +/** A component to which streams of data can be written. */ public interface DataSink { - /** - * A factory for {@link DataSink} instances. - */ + /** A factory for {@link DataSink} instances. */ interface Factory { - /** - * Creates a {@link DataSink} instance. - */ + /** Creates a {@link DataSink} instance. */ DataSink createDataSink(); - } /** diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DataSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DataSource.java index 204b9d4d66..c6ac0a261a 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DataSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DataSource.java @@ -23,19 +23,13 @@ import java.util.List; import java.util.Map; -/** - * A component from which streams of data can be read. - */ -public interface DataSource { +/** Reads data from URI-identified resources. */ +public interface DataSource extends DataReader { - /** - * A factory for {@link DataSource} instances. - */ + /** A factory for {@link DataSource} instances. */ interface Factory { - /** - * Creates a {@link DataSource} instance. - */ + /** Creates a {@link DataSource} instance. */ DataSource createDataSource(); } @@ -47,40 +41,36 @@ interface Factory { void addTransferListener(TransferListener transferListener); /** - * Opens the source to read the specified data. - *

      - * Note: If an {@link IOException} is thrown, callers must still call {@link #close()} to ensure - * that any partial effects of the invocation are cleaned up. + * Opens the source to read the specified data. If an {@link IOException} is thrown, callers must + * still call {@link #close()} to ensure that any partial effects of the invocation are cleaned + * up. + * + *

      The following edge case behaviors apply: + * + *

        + *
      • If the {@link DataSpec#position requested position} is within the resource, but the + * {@link DataSpec#length requested length} extends beyond the end of the resource, then + * {@link #open} will succeed and data from the requested position to the end of the + * resource will be made available through {@link #read}. + *
      • If the {@link DataSpec#position requested position} is equal to the length of the + * resource, then {@link #open} will succeed, and {@link #read} will immediately return + * {@link C#RESULT_END_OF_INPUT}. + *
      • If the {@link DataSpec#position requested position} is greater than the length of the + * resource, then {@link #open} will throw an {@link IOException} for which {@link + * DataSourceException#isCausedByPositionOutOfRange} will be {@code true}. + *
      * * @param dataSpec Defines the data to be read. * @throws IOException If an error occurs opening the source. {@link DataSourceException} can be * thrown or used as a cause of the thrown exception to specify the reason of the error. * @return The number of bytes that can be read from the opened source. For unbounded requests - * (i.e. requests where {@link DataSpec#length} equals {@link C#LENGTH_UNSET}) this value - * is the resolved length of the request, or {@link C#LENGTH_UNSET} if the length is still + * (i.e., requests where {@link DataSpec#length} equals {@link C#LENGTH_UNSET}) this value is + * the resolved length of the request, or {@link C#LENGTH_UNSET} if the length is still * unresolved. For all other requests, the value returned will be equal to the request's * {@link DataSpec#length}. */ long open(DataSpec dataSpec) throws IOException; - /** - * Reads up to {@code readLength} bytes of data and stores them into {@code buffer}, starting at - * index {@code offset}. - * - *

      If {@code readLength} is zero then 0 is returned. Otherwise, if no data is available because - * the end of the opened range has been reached, then {@link C#RESULT_END_OF_INPUT} is returned. - * Otherwise, the call will block until at least one byte of data has been read and the number of - * bytes read is returned. - * - * @param buffer The buffer into which the read data should be stored. - * @param offset The start offset into {@code buffer} at which data should be written. - * @param readLength The maximum number of bytes to read. - * @return The number of bytes read, or {@link C#RESULT_END_OF_INPUT} if no data is available - * because the end of the opened range has been reached. - * @throws IOException If an error occurs reading from the source. - */ - int read(byte[] buffer, int offset, int readLength) throws IOException; - /** * When the source is open, returns the {@link Uri} from which data is being read. The returned * {@link Uri} will be identical to the one passed {@link #open(DataSpec)} in the {@link DataSpec} @@ -89,21 +79,22 @@ interface Factory { * * @return The {@link Uri} from which data is being read, or null if the source is not open. */ - @Nullable Uri getUri(); + @Nullable + Uri getUri(); /** * When the source is open, returns the response headers associated with the last {@link #open} * call. Otherwise, returns an empty map. + * + *

      Key look-up in the returned map is case-insensitive. */ default Map> getResponseHeaders() { return Collections.emptyMap(); } /** - * Closes the source. - *

      - * Note: This method must be called even if the corresponding call to {@link #open(DataSpec)} - * threw an {@link IOException}. See {@link #open(DataSpec)} for more details. + * Closes the source. This method must be called even if the corresponding call to {@link + * #open(DataSpec)} threw an {@link IOException}. * * @throws IOException If an error occurs closing the source. */ diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DataSourceException.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DataSourceException.java index e6b3ae2707..1ed9d47dd3 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DataSourceException.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DataSourceException.java @@ -15,27 +15,95 @@ */ package com.google.android.exoplayer2.upstream; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.PlaybackException; import java.io.IOException; -/** - * Used to specify reason of a DataSource error. - */ -public final class DataSourceException extends IOException { +/** Used to specify reason of a DataSource error. */ +public class DataSourceException extends IOException { + + /** + * Returns whether the given {@link IOException} was caused by a {@link DataSourceException} whose + * {@link #reason} is {@link PlaybackException#ERROR_CODE_IO_READ_POSITION_OUT_OF_RANGE} in its + * cause stack. + */ + public static boolean isCausedByPositionOutOfRange(IOException e) { + @Nullable Throwable cause = e; + while (cause != null) { + if (cause instanceof DataSourceException) { + int reason = ((DataSourceException) cause).reason; + if (reason == PlaybackException.ERROR_CODE_IO_READ_POSITION_OUT_OF_RANGE) { + return true; + } + } + cause = cause.getCause(); + } + return false; + } + + /** + * Indicates that the {@link DataSpec#position starting position} of the request was outside the + * bounds of the data. + * + * @deprecated Use {@link PlaybackException#ERROR_CODE_IO_READ_POSITION_OUT_OF_RANGE}. + */ + @Deprecated + public static final int POSITION_OUT_OF_RANGE = + PlaybackException.ERROR_CODE_IO_READ_POSITION_OUT_OF_RANGE; - public static final int POSITION_OUT_OF_RANGE = 0; + /** + * The reason of this {@link DataSourceException}, should be one of the {@code ERROR_CODE_IO_*} in + * {@link PlaybackException.ErrorCode}. + */ + public final @PlaybackException.ErrorCode int reason; + + /** + * Constructs a DataSourceException. + * + * @param reason Reason of the error, should be one of the {@code ERROR_CODE_IO_*} in {@link + * PlaybackException.ErrorCode}. + */ + public DataSourceException(@PlaybackException.ErrorCode int reason) { + this.reason = reason; + } /** - * The reason of this {@link DataSourceException}. It can only be {@link #POSITION_OUT_OF_RANGE}. + * Constructs a DataSourceException. + * + * @param cause The error cause. + * @param reason Reason of the error, should be one of the {@code ERROR_CODE_IO_*} in {@link + * PlaybackException.ErrorCode}. */ - public final int reason; + public DataSourceException(@Nullable Throwable cause, @PlaybackException.ErrorCode int reason) { + super(cause); + this.reason = reason; + } /** * Constructs a DataSourceException. * - * @param reason Reason of the error. It can only be {@link #POSITION_OUT_OF_RANGE}. + * @param message The error message. + * @param reason Reason of the error, should be one of the {@code ERROR_CODE_IO_*} in {@link + * PlaybackException.ErrorCode}. */ - public DataSourceException(int reason) { + public DataSourceException(@Nullable String message, @PlaybackException.ErrorCode int reason) { + super(message); this.reason = reason; } + /** + * Constructs a DataSourceException. + * + * @param message The error message. + * @param cause The error cause. + * @param reason Reason of the error, should be one of the {@code ERROR_CODE_IO_*} in {@link + * PlaybackException.ErrorCode}. + */ + public DataSourceException( + @Nullable String message, + @Nullable Throwable cause, + @PlaybackException.ErrorCode int reason) { + super(message, cause); + this.reason = reason; + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DataSourceInputStream.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DataSourceInputStream.java index 6c4e77a90a..ddc68d137a 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DataSourceInputStream.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DataSourceInputStream.java @@ -15,7 +15,6 @@ */ package com.google.android.exoplayer2.upstream; -import androidx.annotation.NonNull; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.util.Assertions; import java.io.IOException; @@ -45,17 +44,15 @@ public DataSourceInputStream(DataSource dataSource, DataSpec dataSpec) { singleByteArray = new byte[1]; } - /** - * Returns the total number of bytes that have been read or skipped. - */ + /** Returns the total number of bytes that have been read or skipped. */ public long bytesRead() { return totalBytesRead; } /** * Optional call to open the underlying {@link DataSource}. - *

      - * Calling this method does nothing if the {@link DataSource} is already open. Calling this + * + *

      Calling this method does nothing if the {@link DataSource} is already open. Calling this * method is optional, since the read and skip methods will automatically open the underlying * {@link DataSource} if it's not open already. * @@ -72,12 +69,12 @@ public int read() throws IOException { } @Override - public int read(@NonNull byte[] buffer) throws IOException { + public int read(byte[] buffer) throws IOException { return read(buffer, 0, buffer.length); } @Override - public int read(@NonNull byte[] buffer, int offset, int length) throws IOException { + public int read(byte[] buffer, int offset, int length) throws IOException { Assertions.checkState(!closed); checkOpened(); int bytesRead = dataSource.read(buffer, offset, length); @@ -103,5 +100,4 @@ private void checkOpened() throws IOException { opened = true; } } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DataSourceUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DataSourceUtil.java new file mode 100644 index 0000000000..b0a31d9b7b --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DataSourceUtil.java @@ -0,0 +1,90 @@ +/* + * Copyright (C) 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.upstream; + +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import java.io.IOException; +import java.util.Arrays; + +/** Utility methods for {@link DataSource}. */ +public final class DataSourceUtil { + + private DataSourceUtil() {} + + /** + * Reads data from the specified opened {@link DataSource} until it ends, and returns a byte array + * containing the read data. + * + * @param dataSource The source from which to read. + * @return The concatenation of all read data. + * @throws IOException If an error occurs reading from the source. + */ + public static byte[] readToEnd(DataSource dataSource) throws IOException { + byte[] data = new byte[1024]; + int position = 0; + int bytesRead = 0; + while (bytesRead != C.RESULT_END_OF_INPUT) { + if (position == data.length) { + data = Arrays.copyOf(data, data.length * 2); + } + bytesRead = dataSource.read(data, position, data.length - position); + if (bytesRead != C.RESULT_END_OF_INPUT) { + position += bytesRead; + } + } + return Arrays.copyOf(data, position); + } + + /** + * Reads {@code length} bytes from the specified opened {@link DataSource}, and returns a byte + * array containing the read data. + * + * @param dataSource The source from which to read. + * @return The read data. + * @throws IOException If an error occurs reading from the source. + * @throws IllegalStateException If the end of the source was reached before {@code length} bytes + * could be read. + */ + public static byte[] readExactly(DataSource dataSource, int length) throws IOException { + byte[] data = new byte[length]; + int position = 0; + while (position < length) { + int bytesRead = dataSource.read(data, position, data.length - position); + if (bytesRead == C.RESULT_END_OF_INPUT) { + throw new IllegalStateException( + "Not enough data could be read: " + position + " < " + length); + } + position += bytesRead; + } + return data; + } + + /** + * Closes a {@link DataSource}, suppressing any {@link IOException} that may occur. + * + * @param dataSource The {@link DataSource} to close. + */ + public static void closeQuietly(@Nullable DataSource dataSource) { + try { + if (dataSource != null) { + dataSource.close(); + } + } catch (IOException e) { + // Ignore. + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DataSpec.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DataSpec.java index acf5550427..7e5dd4c302 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DataSpec.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DataSpec.java @@ -15,34 +15,247 @@ */ package com.google.android.exoplayer2.upstream; +import static java.lang.annotation.ElementType.TYPE_USE; + import android.net.Uri; import androidx.annotation.IntDef; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.ExoPlayerLibraryInfo; import com.google.android.exoplayer2.util.Assertions; +import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; -import java.util.Arrays; +import java.lang.annotation.Target; import java.util.Collections; import java.util.HashMap; import java.util.Map; -/** - * Defines a region of data. - */ +/** Defines a region of data in a resource. */ public final class DataSpec { + static { + ExoPlayerLibraryInfo.registerModule("goog.exo.datasource"); + } + + /** + * Builds {@link DataSpec} instances. + * + *

      Use DataSpec#buildUpon() to obtain a builder representing an existing {@link DataSpec}. + */ + public static final class Builder { + + @Nullable private Uri uri; + private long uriPositionOffset; + private @HttpMethod int httpMethod; + @Nullable private byte[] httpBody; + private Map httpRequestHeaders; + private long position; + private long length; + @Nullable private String key; + private @Flags int flags; + @Nullable private Object customData; + + /** Creates a new instance with default values. */ + public Builder() { + httpMethod = HTTP_METHOD_GET; + httpRequestHeaders = Collections.emptyMap(); + length = C.LENGTH_UNSET; + } + + /** + * Creates a new instance to build upon the provided {@link DataSpec}. + * + * @param dataSpec The {@link DataSpec} to build upon. + */ + private Builder(DataSpec dataSpec) { + uri = dataSpec.uri; + uriPositionOffset = dataSpec.uriPositionOffset; + httpMethod = dataSpec.httpMethod; + httpBody = dataSpec.httpBody; + httpRequestHeaders = dataSpec.httpRequestHeaders; + position = dataSpec.position; + length = dataSpec.length; + key = dataSpec.key; + flags = dataSpec.flags; + customData = dataSpec.customData; + } + + /** + * Sets {@link DataSpec#uri}. + * + * @param uriString The {@link DataSpec#uri}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setUri(String uriString) { + this.uri = Uri.parse(uriString); + return this; + } + + /** + * Sets {@link DataSpec#uri}. + * + * @param uri The {@link DataSpec#uri}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setUri(Uri uri) { + this.uri = uri; + return this; + } + + /** + * Sets the {@link DataSpec#uriPositionOffset}. The default value is 0. + * + * @param uriPositionOffset The {@link DataSpec#uriPositionOffset}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setUriPositionOffset(long uriPositionOffset) { + this.uriPositionOffset = uriPositionOffset; + return this; + } + + /** + * Sets {@link DataSpec#httpMethod}. The default value is {@link #HTTP_METHOD_GET}. + * + * @param httpMethod The {@link DataSpec#httpMethod}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setHttpMethod(@HttpMethod int httpMethod) { + this.httpMethod = httpMethod; + return this; + } + + /** + * Sets {@link DataSpec#httpBody}. The default value is {@code null}. + * + * @param httpBody The {@link DataSpec#httpBody}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setHttpBody(@Nullable byte[] httpBody) { + this.httpBody = httpBody; + return this; + } + + /** + * Sets the {@link DataSpec#httpRequestHeaders}. The default value is an empty map. + * + *

      Note: {@code Range}, {@code Accept-Encoding} and {@code User-Agent} should not be set with + * this method, since they are set directly by {@link HttpDataSource} implementations. See + * {@link DataSpec#httpRequestHeaders} for more details. + * + * @param httpRequestHeaders The {@link DataSpec#httpRequestHeaders}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setHttpRequestHeaders(Map httpRequestHeaders) { + this.httpRequestHeaders = httpRequestHeaders; + return this; + } + + /** + * Sets the {@link DataSpec#position}. The default value is 0. + * + * @param position The {@link DataSpec#position}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setPosition(long position) { + this.position = position; + return this; + } + + /** + * Sets the {@link DataSpec#length}. The default value is {@link C#LENGTH_UNSET}. + * + * @param length The {@link DataSpec#length}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setLength(long length) { + this.length = length; + return this; + } + + /** + * Sets the {@link DataSpec#key}. The default value is {@code null}. + * + * @param key The {@link DataSpec#key}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setKey(@Nullable String key) { + this.key = key; + return this; + } + + /** + * Sets the {@link DataSpec#flags}. The default value is 0. + * + * @param flags The {@link DataSpec#flags}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setFlags(@Flags int flags) { + this.flags = flags; + return this; + } + + /** + * Sets the {@link DataSpec#customData}. The default value is {@code null}. + * + * @param customData The {@link DataSpec#customData}. + * @return The builder. + */ + @CanIgnoreReturnValue + public Builder setCustomData(@Nullable Object customData) { + this.customData = customData; + return this; + } + + /** + * Builds a {@link DataSpec} with the builder's current values. + * + * @return The build {@link DataSpec}. + * @throws IllegalStateException If {@link #setUri} has not been called. + */ + public DataSpec build() { + Assertions.checkStateNotNull(uri, "The uri must be set."); + return new DataSpec( + uri, + uriPositionOffset, + httpMethod, + httpBody, + httpRequestHeaders, + position, + length, + key, + flags, + customData); + } + } + /** * The flags that apply to any request for data. Possible flag values are {@link - * #FLAG_ALLOW_GZIP}, {@link #FLAG_DONT_CACHE_IF_LENGTH_UNKNOWN} and {@link - * #FLAG_ALLOW_CACHE_FRAGMENTATION}. + * #FLAG_ALLOW_GZIP}, {@link #FLAG_DONT_CACHE_IF_LENGTH_UNKNOWN}, {@link + * #FLAG_ALLOW_CACHE_FRAGMENTATION}, and {@link #FLAG_MIGHT_NOT_USE_FULL_NETWORK_SPEED}. */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef( flag = true, - value = {FLAG_ALLOW_GZIP, FLAG_DONT_CACHE_IF_LENGTH_UNKNOWN, FLAG_ALLOW_CACHE_FRAGMENTATION}) + value = { + FLAG_ALLOW_GZIP, + FLAG_DONT_CACHE_IF_LENGTH_UNKNOWN, + FLAG_ALLOW_CACHE_FRAGMENTATION, + FLAG_MIGHT_NOT_USE_FULL_NETWORK_SPEED + }) public @interface Flags {} /** * Allows an underlying network stack to request that the server use gzip compression. @@ -57,138 +270,238 @@ public final class DataSpec { */ public static final int FLAG_ALLOW_GZIP = 1; /** Prevents caching if the length cannot be resolved when the {@link DataSource} is opened. */ - public static final int FLAG_DONT_CACHE_IF_LENGTH_UNKNOWN = 1 << 1; // 2 + public static final int FLAG_DONT_CACHE_IF_LENGTH_UNKNOWN = 1 << 1; /** * Allows fragmentation of this request into multiple cache files, meaning a cache eviction policy * will be able to evict individual fragments of the data. Depending on the cache implementation, * setting this flag may also enable more concurrent access to the data (e.g. reading one fragment * whilst writing another). */ - public static final int FLAG_ALLOW_CACHE_FRAGMENTATION = 1 << 2; // 4 + public static final int FLAG_ALLOW_CACHE_FRAGMENTATION = 1 << 2; + /** + * Indicates there are known external factors that might prevent the data from being loaded at + * full network speed (e.g. server throttling or unfinished live media chunks). + */ + public static final int FLAG_MIGHT_NOT_USE_FULL_NETWORK_SPEED = 1 << 3; /** - * The set of HTTP methods that are supported by ExoPlayer {@link HttpDataSource}s. One of {@link - * #HTTP_METHOD_GET}, {@link #HTTP_METHOD_POST} or {@link #HTTP_METHOD_HEAD}. + * HTTP methods supported by ExoPlayer {@link HttpDataSource}s. One of {@link #HTTP_METHOD_GET}, + * {@link #HTTP_METHOD_POST} or {@link #HTTP_METHOD_HEAD}. */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({HTTP_METHOD_GET, HTTP_METHOD_POST, HTTP_METHOD_HEAD}) public @interface HttpMethod {} - + /** HTTP GET method. */ public static final int HTTP_METHOD_GET = 1; + /** HTTP POST method. */ public static final int HTTP_METHOD_POST = 2; + /** HTTP HEAD method. */ public static final int HTTP_METHOD_HEAD = 3; /** - * The source from which data should be read. + * Returns an uppercase HTTP method name (e.g., "GET", "POST", "HEAD") corresponding to the given + * {@link HttpMethod}. */ + public static String getStringForHttpMethod(@HttpMethod int httpMethod) { + switch (httpMethod) { + case HTTP_METHOD_GET: + return "GET"; + case HTTP_METHOD_POST: + return "POST"; + case HTTP_METHOD_HEAD: + return "HEAD"; + default: + // Never happens. + throw new IllegalStateException(); + } + } + + /** A {@link Uri} from which data belonging to the resource can be read. */ public final Uri uri; /** - * The HTTP method, which will be used by {@link HttpDataSource} when requesting this DataSpec. - * This value will be ignored by non-http {@link DataSource}s. + * The offset of the data located at {@link #uri} within the resource. + * + *

      Equal to 0 unless {@link #uri} provides access to a subset of the resource. As an example, + * consider a resource that can be requested over the network and is 1000 bytes long. If {@link + * #uri} points to a local file that contains just bytes [200-300], then this field will be set to + * {@code 200}. + * + *

      This field can be ignored except for in specific circumstances where the absolute position + * in the resource is required in a {@link DataSource} chain. One example is when a {@link + * DataSource} needs to decrypt the content as it's read. In this case the absolute position in + * the resource is typically needed to correctly initialize the decryption algorithm. + */ + public final long uriPositionOffset; + + /** + * The HTTP method to use when requesting the data. This value will be ignored by non-HTTP {@link + * DataSource} implementations. */ public final @HttpMethod int httpMethod; /** - * The HTTP request body, null otherwise. If the body is non-null, then httpBody.length will be - * non-zero. + * The HTTP request body, null otherwise. If the body is non-null, then {@code httpBody.length} + * will be non-zero. */ @Nullable public final byte[] httpBody; - /** Immutable map containing the headers to use in HTTP requests. */ + /** + * Additional HTTP headers to use when requesting the data. + * + *

      Note: This map is for additional headers specific to the data being requested. It does not + * include headers that are set directly by {@link HttpDataSource} implementations. In particular, + * this means the following headers are not included: + * + *

        + *
      • {@code Range}: {@link HttpDataSource} implementations derive the {@code Range} header + * from {@link #position} and {@link #length}. + *
      • {@code Accept-Encoding}: {@link HttpDataSource} implementations derive the {@code + * Accept-Encoding} header based on whether {@link #flags} includes {@link + * #FLAG_ALLOW_GZIP}. + *
      • {@code User-Agent}: {@link HttpDataSource} implementations set the {@code User-Agent} + * header directly. + *
      • Other headers set at the {@link HttpDataSource} layer. I.e., headers set using {@link + * HttpDataSource#setRequestProperty(String, String)}, and using {@link + * HttpDataSource.Factory#setDefaultRequestProperties(Map)}. + *
      + */ public final Map httpRequestHeaders; - /** The absolute position of the data in the full stream. */ - public final long absoluteStreamPosition; /** - * The position of the data when read from {@link #uri}. - *

      - * Always equal to {@link #absoluteStreamPosition} unless the {@link #uri} defines the location - * of a subset of the underlying data. + * The absolute position of the data in the resource. + * + * @deprecated Use {@link #position} except for specific use cases where the absolute position + * within the resource is required within a {@link DataSource} chain. Where the absolute + * position is required, use {@code uriPositionOffset + position}. */ + @Deprecated public final long absoluteStreamPosition; + + /** The position of the data when read from {@link #uri}. */ public final long position; - /** - * The length of the data, or {@link C#LENGTH_UNSET}. - */ + + /** The length of the data, or {@link C#LENGTH_UNSET}. */ public final long length; + /** - * A key that uniquely identifies the original stream. Used for cache indexing. May be null if the - * data spec is not intended to be used in conjunction with a cache. + * A key that uniquely identifies the resource. Used for cache indexing. May be null if the data + * spec is not intended to be used in conjunction with a cache. */ @Nullable public final String key; + /** Request {@link Flags flags}. */ public final @Flags int flags; /** - * Construct a data spec for the given uri and with {@link #key} set to null. + * Application specific data. + * + *

      This field is intended for advanced use cases in which applications require the ability to + * attach custom data to {@link DataSpec} instances. The custom data should be immutable. + */ + @Nullable public final Object customData; + + /** + * Constructs an instance. * * @param uri {@link #uri}. */ public DataSpec(Uri uri) { - this(uri, 0); + this(uri, /* position= */ 0, /* length= */ C.LENGTH_UNSET); } /** - * Construct a data spec for the given uri and with {@link #key} set to null. + * Constructs an instance. * * @param uri {@link #uri}. + * @param position {@link #position}. + * @param length {@link #length}. + */ + public DataSpec(Uri uri, long position, long length) { + this( + uri, + /* uriPositionOffset= */ 0, + HTTP_METHOD_GET, + /* httpBody= */ null, + /* httpRequestHeaders= */ Collections.emptyMap(), + position, + length, + /* key= */ null, + /* flags= */ 0, + /* customData= */ null); + } + + /** + * Constructs an instance. + * + * @deprecated Use {@link Builder}. + * @param uri {@link #uri}. * @param flags {@link #flags}. */ + @SuppressWarnings("deprecation") + @Deprecated public DataSpec(Uri uri, @Flags int flags) { - this(uri, 0, C.LENGTH_UNSET, null, flags); + this(uri, /* position= */ 0, C.LENGTH_UNSET, /* key= */ null, flags); } /** - * Construct a data spec where {@link #position} equals {@link #absoluteStreamPosition}. + * Constructs an instance. * + * @deprecated Use {@link Builder}. * @param uri {@link #uri}. - * @param absoluteStreamPosition {@link #absoluteStreamPosition}, equal to {@link #position}. + * @param position {@link #position}. * @param length {@link #length}. * @param key {@link #key}. */ - public DataSpec(Uri uri, long absoluteStreamPosition, long length, @Nullable String key) { - this(uri, absoluteStreamPosition, absoluteStreamPosition, length, key, 0); + @SuppressWarnings("deprecation") + @Deprecated + public DataSpec(Uri uri, long position, long length, @Nullable String key) { + this(uri, position, position, length, key, /* flags= */ 0); } /** - * Construct a data spec where {@link #position} equals {@link #absoluteStreamPosition}. + * Constructs an instance. * + * @deprecated Use {@link Builder}. * @param uri {@link #uri}. - * @param absoluteStreamPosition {@link #absoluteStreamPosition}, equal to {@link #position}. + * @param position {@link #position}. * @param length {@link #length}. * @param key {@link #key}. * @param flags {@link #flags}. */ - public DataSpec( - Uri uri, long absoluteStreamPosition, long length, @Nullable String key, @Flags int flags) { - this(uri, absoluteStreamPosition, absoluteStreamPosition, length, key, flags); + @SuppressWarnings("deprecation") + @Deprecated + public DataSpec(Uri uri, long position, long length, @Nullable String key, @Flags int flags) { + this(uri, position, position, length, key, flags); } /** - * Construct a data spec where {@link #position} equals {@link #absoluteStreamPosition} and has - * request headers. + * Constructs an instance. * + * @deprecated Use {@link Builder}. * @param uri {@link #uri}. - * @param absoluteStreamPosition {@link #absoluteStreamPosition}, equal to {@link #position}. + * @param position {@link #position}, equal to {@link #position}. * @param length {@link #length}. * @param key {@link #key}. * @param flags {@link #flags}. * @param httpRequestHeaders {@link #httpRequestHeaders} */ + @SuppressWarnings("deprecation") + @Deprecated public DataSpec( Uri uri, - long absoluteStreamPosition, + long position, long length, @Nullable String key, @Flags int flags, Map httpRequestHeaders) { this( uri, - inferHttpMethod(null), - null, - absoluteStreamPosition, - absoluteStreamPosition, + HTTP_METHOD_GET, + /* httpBody= */ null, + position, + position, length, key, flags, @@ -196,15 +509,18 @@ public DataSpec( } /** - * Construct a data spec where {@link #position} may differ from {@link #absoluteStreamPosition}. + * Constructs an instance where {@link #uriPositionOffset} may be non-zero. * + * @deprecated Use {@link Builder}. * @param uri {@link #uri}. - * @param absoluteStreamPosition {@link #absoluteStreamPosition}. + * @param absoluteStreamPosition The sum of {@link #uriPositionOffset} and {@link #position}. * @param position {@link #position}. * @param length {@link #length}. * @param key {@link #key}. * @param flags {@link #flags}. */ + @SuppressWarnings("deprecation") + @Deprecated public DataSpec( Uri uri, long absoluteStreamPosition, @@ -212,23 +528,28 @@ public DataSpec( long length, @Nullable String key, @Flags int flags) { - this(uri, null, absoluteStreamPosition, position, length, key, flags); + this(uri, /* postBody= */ null, absoluteStreamPosition, position, length, key, flags); } /** - * Construct a data spec by inferring the {@link #httpMethod} based on the {@code postBody} - * parameter. If postBody is non-null, then httpMethod is set to {@link #HTTP_METHOD_POST}. If - * postBody is null, then httpMethod is set to {@link #HTTP_METHOD_GET}. + * Construct a instance where {@link #uriPositionOffset} may be non-zero. The {@link #httpMethod} + * is inferred from {@code postBody}. If {@code postBody} is non-null then {@link #httpMethod} is + * set to {@link #HTTP_METHOD_POST}. If {@code postBody} is null then {@link #httpMethod} is set + * to {@link #HTTP_METHOD_GET}. * + * @deprecated Use {@link Builder}. Note that the httpMethod must be set explicitly for the + * Builder. * @param uri {@link #uri}. * @param postBody {@link #httpBody} The body of the HTTP request, which is also used to infer the * {@link #httpMethod}. - * @param absoluteStreamPosition {@link #absoluteStreamPosition}. + * @param absoluteStreamPosition The sum of {@link #uriPositionOffset} and {@link #position}. * @param position {@link #position}. * @param length {@link #length}. * @param key {@link #key}. * @param flags {@link #flags}. */ + @SuppressWarnings("deprecation") + @Deprecated public DataSpec( Uri uri, @Nullable byte[] postBody, @@ -239,7 +560,7 @@ public DataSpec( @Flags int flags) { this( uri, - /* httpMethod= */ inferHttpMethod(postBody), + /* httpMethod= */ postBody != null ? HTTP_METHOD_POST : HTTP_METHOD_GET, /* httpBody= */ postBody, absoluteStreamPosition, position, @@ -249,17 +570,20 @@ public DataSpec( } /** - * Construct a data spec where {@link #position} may differ from {@link #absoluteStreamPosition}. + * Construct a instance where {@link #uriPositionOffset} may be non-zero. * + * @deprecated Use {@link Builder}. * @param uri {@link #uri}. * @param httpMethod {@link #httpMethod}. * @param httpBody {@link #httpBody}. - * @param absoluteStreamPosition {@link #absoluteStreamPosition}. + * @param absoluteStreamPosition The sum of {@link #uriPositionOffset} and {@link #position}. * @param position {@link #position}. * @param length {@link #length}. * @param key {@link #key}. * @param flags {@link #flags}. */ + @SuppressWarnings("deprecation") + @Deprecated public DataSpec( Uri uri, @HttpMethod int httpMethod, @@ -282,18 +606,20 @@ public DataSpec( } /** - * Construct a data spec with request parameters to be used as HTTP headers inside HTTP requests. + * Construct a instance where {@link #uriPositionOffset} may be non-zero. * + * @deprecated Use {@link Builder}. * @param uri {@link #uri}. * @param httpMethod {@link #httpMethod}. * @param httpBody {@link #httpBody}. - * @param absoluteStreamPosition {@link #absoluteStreamPosition}. + * @param absoluteStreamPosition The sum of {@link #uriPositionOffset} and {@link #position}. * @param position {@link #position}. * @param length {@link #length}. * @param key {@link #key}. * @param flags {@link #flags}. * @param httpRequestHeaders {@link #httpRequestHeaders}. */ + @Deprecated public DataSpec( Uri uri, @HttpMethod int httpMethod, @@ -304,18 +630,47 @@ public DataSpec( @Nullable String key, @Flags int flags, Map httpRequestHeaders) { - Assertions.checkArgument(absoluteStreamPosition >= 0); + this( + uri, + /* uriPositionOffset= */ absoluteStreamPosition - position, + httpMethod, + httpBody, + httpRequestHeaders, + position, + length, + key, + flags, + /* customData= */ null); + } + + @SuppressWarnings("deprecation") + private DataSpec( + Uri uri, + long uriPositionOffset, + @HttpMethod int httpMethod, + @Nullable byte[] httpBody, + Map httpRequestHeaders, + long position, + long length, + @Nullable String key, + @Flags int flags, + @Nullable Object customData) { + // TODO: Replace this assertion with a stricter one checking "uriPositionOffset >= 0", after + // validating there are no violations in ExoPlayer and 1P apps. + Assertions.checkArgument(uriPositionOffset + position >= 0); Assertions.checkArgument(position >= 0); Assertions.checkArgument(length > 0 || length == C.LENGTH_UNSET); this.uri = uri; + this.uriPositionOffset = uriPositionOffset; this.httpMethod = httpMethod; - this.httpBody = (httpBody != null && httpBody.length != 0) ? httpBody : null; - this.absoluteStreamPosition = absoluteStreamPosition; + this.httpBody = httpBody != null && httpBody.length != 0 ? httpBody : null; + this.httpRequestHeaders = Collections.unmodifiableMap(new HashMap<>(httpRequestHeaders)); this.position = position; + this.absoluteStreamPosition = uriPositionOffset + position; this.length = length; this.key = key; this.flags = flags; - this.httpRequestHeaders = Collections.unmodifiableMap(new HashMap<>(httpRequestHeaders)); + this.customData = customData; } /** @@ -327,50 +682,17 @@ public boolean isFlagSet(@Flags int flag) { return (this.flags & flag) == flag; } - @Override - public String toString() { - return "DataSpec[" - + getHttpMethodString() - + " " - + uri - + ", " - + Arrays.toString(httpBody) - + ", " - + absoluteStreamPosition - + ", " - + position - + ", " - + length - + ", " - + key - + ", " - + flags - + "]"; - } - /** - * Returns an uppercase HTTP method name (e.g., "GET", "POST", "HEAD") corresponding to the {@link - * #httpMethod}. + * Returns the uppercase HTTP method name (e.g., "GET", "POST", "HEAD") corresponding to the + * {@link #httpMethod}. */ public final String getHttpMethodString() { return getStringForHttpMethod(httpMethod); } - /** - * Returns an uppercase HTTP method name (e.g., "GET", "POST", "HEAD") corresponding to the {@code - * httpMethod}. - */ - public static String getStringForHttpMethod(@HttpMethod int httpMethod) { - switch (httpMethod) { - case HTTP_METHOD_GET: - return "GET"; - case HTTP_METHOD_POST: - return "POST"; - case HTTP_METHOD_HEAD: - return "HEAD"; - default: - throw new AssertionError(httpMethod); - } + /** Returns a {@link DataSpec.Builder} initialized with the values of this instance. */ + public DataSpec.Builder buildUpon() { + return new Builder(this); } /** @@ -397,14 +719,15 @@ public DataSpec subrange(long offset, long length) { } else { return new DataSpec( uri, + uriPositionOffset, httpMethod, httpBody, - absoluteStreamPosition + offset, + httpRequestHeaders, position + offset, length, key, flags, - httpRequestHeaders); + customData); } } @@ -417,62 +740,76 @@ public DataSpec subrange(long offset, long length) { public DataSpec withUri(Uri uri) { return new DataSpec( uri, + uriPositionOffset, httpMethod, httpBody, - absoluteStreamPosition, + httpRequestHeaders, position, length, key, flags, - httpRequestHeaders); + customData); } /** - * Returns a copy of this data spec with the specified request headers. + * Returns a copy of this data spec with the specified HTTP request headers. Headers already in + * the data spec are not copied to the new instance. * - * @param requestHeaders The HTTP request headers. - * @return The copied data spec with the specified request headers. + * @param httpRequestHeaders The HTTP request headers. + * @return The copied data spec with the specified HTTP request headers. */ - public DataSpec withRequestHeaders(Map requestHeaders) { + public DataSpec withRequestHeaders(Map httpRequestHeaders) { return new DataSpec( uri, + uriPositionOffset, httpMethod, httpBody, - absoluteStreamPosition, + httpRequestHeaders, position, length, key, flags, - requestHeaders); + customData); } /** - * Returns a copy this data spec with additional request headers. - * - *

      Note: Values in {@code requestHeaders} will overwrite values with the same header key that - * were previously set in this instance's {@code #httpRequestHeaders}. + * Returns a copy this data spec with additional HTTP request headers. Headers in {@code + * additionalHttpRequestHeaders} will overwrite any headers already in the data spec that have the + * same keys. * - * @param requestHeaders The additional HTTP request headers. - * @return The copied data with the additional HTTP request headers. + * @param additionalHttpRequestHeaders The additional HTTP request headers. + * @return The copied data spec with the additional HTTP request headers. */ - public DataSpec withAdditionalHeaders(Map requestHeaders) { - Map totalHeaders = new HashMap<>(this.httpRequestHeaders); - totalHeaders.putAll(requestHeaders); - + public DataSpec withAdditionalHeaders(Map additionalHttpRequestHeaders) { + Map httpRequestHeaders = new HashMap<>(this.httpRequestHeaders); + httpRequestHeaders.putAll(additionalHttpRequestHeaders); return new DataSpec( uri, + uriPositionOffset, httpMethod, httpBody, - absoluteStreamPosition, + httpRequestHeaders, position, length, key, flags, - totalHeaders); + customData); } - @HttpMethod - private static int inferHttpMethod(@Nullable byte[] postBody) { - return postBody != null ? HTTP_METHOD_POST : HTTP_METHOD_GET; + @Override + public String toString() { + return "DataSpec[" + + getHttpMethodString() + + " " + + uri + + ", " + + position + + ", " + + length + + ", " + + key + + ", " + + flags + + "]"; } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DefaultAllocator.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DefaultAllocator.java index 71e2d8d19f..7caaf4d3f8 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DefaultAllocator.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DefaultAllocator.java @@ -15,26 +15,27 @@ */ package com.google.android.exoplayer2.upstream; +import static java.lang.Math.max; + +import androidx.annotation.Nullable; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Util; import java.util.Arrays; +import org.checkerframework.checker.nullness.compatqual.NullableType; -/** - * Default implementation of {@link Allocator}. - */ +/** Default implementation of {@link Allocator}. */ public final class DefaultAllocator implements Allocator { private static final int AVAILABLE_EXTRA_CAPACITY = 100; private final boolean trimOnReset; private final int individualAllocationSize; - private final byte[] initialAllocationBlock; - private final Allocation[] singleAllocationReleaseHolder; + @Nullable private final byte[] initialAllocationBlock; private int targetBufferSize; private int allocatedCount; private int availableCount; - private Allocation[] availableAllocations; + private @NullableType Allocation[] availableAllocations; /** * Constructs an instance without creating any {@link Allocation}s up front. @@ -49,16 +50,16 @@ public DefaultAllocator(boolean trimOnReset, int individualAllocationSize) { /** * Constructs an instance with some {@link Allocation}s created up front. - *

      - * Note: {@link Allocation}s created up front will never be discarded by {@link #trim()}. + * + *

      Note: {@link Allocation}s created up front will never be discarded by {@link #trim()}. * * @param trimOnReset Whether memory is freed when the allocator is reset. Should be true unless * the allocator will be re-used by multiple player instances. * @param individualAllocationSize The length of each individual {@link Allocation}. * @param initialAllocationCount The number of allocations to create up front. */ - public DefaultAllocator(boolean trimOnReset, int individualAllocationSize, - int initialAllocationCount) { + public DefaultAllocator( + boolean trimOnReset, int individualAllocationSize, int initialAllocationCount) { Assertions.checkArgument(individualAllocationSize > 0); Assertions.checkArgument(initialAllocationCount >= 0); this.trimOnReset = trimOnReset; @@ -74,7 +75,6 @@ public DefaultAllocator(boolean trimOnReset, int individualAllocationSize, } else { initialAllocationBlock = null; } - singleAllocationReleaseHolder = new Allocation[1]; } public synchronized void reset() { @@ -96,30 +96,35 @@ public synchronized Allocation allocate() { allocatedCount++; Allocation allocation; if (availableCount > 0) { - allocation = availableAllocations[--availableCount]; + allocation = Assertions.checkNotNull(availableAllocations[--availableCount]); availableAllocations[availableCount] = null; } else { allocation = new Allocation(new byte[individualAllocationSize], 0); + if (allocatedCount > availableAllocations.length) { + // Make availableAllocations be large enough to contain all allocations made by this + // allocator so that release() does not need to grow the availableAllocations array. See + // [Internal ref: b/209801945]. + availableAllocations = Arrays.copyOf(availableAllocations, availableAllocations.length * 2); + } } return allocation; } @Override public synchronized void release(Allocation allocation) { - singleAllocationReleaseHolder[0] = allocation; - release(singleAllocationReleaseHolder); + availableAllocations[availableCount++] = allocation; + allocatedCount--; + // Wake up threads waiting for the allocated size to drop. + notifyAll(); } @Override - public synchronized void release(Allocation[] allocations) { - if (availableCount + allocations.length >= availableAllocations.length) { - availableAllocations = Arrays.copyOf(availableAllocations, - Math.max(availableAllocations.length * 2, availableCount + allocations.length)); + public synchronized void release(@Nullable AllocationNode allocationNode) { + while (allocationNode != null) { + availableAllocations[availableCount++] = allocationNode.getAllocation(); + allocatedCount--; + allocationNode = allocationNode.next(); } - for (Allocation allocation : allocations) { - availableAllocations[availableCount++] = allocation; - } - allocatedCount -= allocations.length; // Wake up threads waiting for the allocated size to drop. notifyAll(); } @@ -127,7 +132,7 @@ public synchronized void release(Allocation[] allocations) { @Override public synchronized void trim() { int targetAllocationCount = Util.ceilDivide(targetBufferSize, individualAllocationSize); - int targetAvailableCount = Math.max(0, targetAllocationCount - allocatedCount); + int targetAvailableCount = max(0, targetAllocationCount - allocatedCount); if (targetAvailableCount >= availableCount) { // We're already at or below the target. return; @@ -140,11 +145,11 @@ public synchronized void trim() { int lowIndex = 0; int highIndex = availableCount - 1; while (lowIndex <= highIndex) { - Allocation lowAllocation = availableAllocations[lowIndex]; + Allocation lowAllocation = Assertions.checkNotNull(availableAllocations[lowIndex]); if (lowAllocation.data == initialAllocationBlock) { lowIndex++; } else { - Allocation highAllocation = availableAllocations[highIndex]; + Allocation highAllocation = Assertions.checkNotNull(availableAllocations[highIndex]); if (highAllocation.data != initialAllocationBlock) { highIndex--; } else { @@ -154,7 +159,7 @@ public synchronized void trim() { } } // lowIndex is the index of the first allocation not backed by an initial block. - targetAvailableCount = Math.max(targetAvailableCount, lowIndex); + targetAvailableCount = max(targetAvailableCount, lowIndex); if (targetAvailableCount >= availableCount) { // We're already at or below the target. return; @@ -175,5 +180,4 @@ public synchronized int getTotalBytesAllocated() { public int getIndividualAllocationLength() { return individualAllocationSize; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DefaultBandwidthMeter.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DefaultBandwidthMeter.java index 6cbc17d3e0..7803f316e2 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DefaultBandwidthMeter.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DefaultBandwidthMeter.java @@ -15,27 +15,21 @@ */ package com.google.android.exoplayer2.upstream; -import android.content.BroadcastReceiver; import android.content.Context; -import android.content.Intent; -import android.content.IntentFilter; -import android.net.ConnectivityManager; import android.os.Handler; -import android.os.Looper; -import android.util.SparseArray; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.upstream.BandwidthMeter.EventListener.EventDispatcher; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Clock; -import com.google.android.exoplayer2.util.EventDispatcher; -import com.google.android.exoplayer2.util.SlidingPercentile; +import com.google.android.exoplayer2.util.NetworkTypeObserver; import com.google.android.exoplayer2.util.Util; -import java.lang.ref.WeakReference; -import java.util.ArrayList; -import java.util.Collections; +import com.google.common.base.Ascii; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.util.HashMap; import java.util.Map; -import org.checkerframework.checker.nullness.qual.MonotonicNonNull; /** * Estimates bandwidth by listening to data transfers. @@ -47,28 +41,29 @@ */ public final class DefaultBandwidthMeter implements BandwidthMeter, TransferListener { - /** - * Country groups used to determine the default initial bitrate estimate. The group assignment for - * each country is an array of group indices for [Wifi, 2G, 3G, 4G]. - */ - public static final Map DEFAULT_INITIAL_BITRATE_COUNTRY_GROUPS = - createInitialBitrateCountryGroupAssignment(); - /** Default initial Wifi bitrate estimate in bits per second. */ - public static final long[] DEFAULT_INITIAL_BITRATE_ESTIMATES_WIFI = - new long[] {5_800_000, 3_500_000, 1_900_000, 1_000_000, 520_000}; + public static final ImmutableList DEFAULT_INITIAL_BITRATE_ESTIMATES_WIFI = + ImmutableList.of(4_400_000L, 3_200_000L, 2_300_000L, 1_600_000L, 810_000L); /** Default initial 2G bitrate estimates in bits per second. */ - public static final long[] DEFAULT_INITIAL_BITRATE_ESTIMATES_2G = - new long[] {204_000, 154_000, 139_000, 122_000, 102_000}; + public static final ImmutableList DEFAULT_INITIAL_BITRATE_ESTIMATES_2G = + ImmutableList.of(1_400_000L, 990_000L, 730_000L, 510_000L, 230_000L); /** Default initial 3G bitrate estimates in bits per second. */ - public static final long[] DEFAULT_INITIAL_BITRATE_ESTIMATES_3G = - new long[] {2_200_000, 1_150_000, 810_000, 640_000, 450_000}; + public static final ImmutableList DEFAULT_INITIAL_BITRATE_ESTIMATES_3G = + ImmutableList.of(2_100_000L, 1_400_000L, 1_000_000L, 890_000L, 640_000L); /** Default initial 4G bitrate estimates in bits per second. */ - public static final long[] DEFAULT_INITIAL_BITRATE_ESTIMATES_4G = - new long[] {4_900_000, 2_300_000, 1_500_000, 970_000, 540_000}; + public static final ImmutableList DEFAULT_INITIAL_BITRATE_ESTIMATES_4G = + ImmutableList.of(2_600_000L, 1_700_000L, 1_300_000L, 1_000_000L, 700_000L); + + /** Default initial 5G-NSA bitrate estimates in bits per second. */ + public static final ImmutableList DEFAULT_INITIAL_BITRATE_ESTIMATES_5G_NSA = + ImmutableList.of(5_700_000L, 3_700_000L, 2_300_000L, 1_700_000L, 990_000L); + + /** Default initial 5G-SA bitrate estimates in bits per second. */ + public static final ImmutableList DEFAULT_INITIAL_BITRATE_ESTIMATES_5G_SA = + ImmutableList.of(2_800_000L, 1_800_000L, 1_400_000L, 1_100_000L, 870_000L); /** * Default initial bitrate estimate used when the device is offline or the network type cannot be @@ -79,6 +74,37 @@ public final class DefaultBandwidthMeter implements BandwidthMeter, TransferList /** Default maximum weight for the sliding window. */ public static final int DEFAULT_SLIDING_WINDOW_MAX_WEIGHT = 2000; + /** + * Index for the Wifi group index in the array returned by {@link + * #getInitialBitrateCountryGroupAssignment}. + */ + private static final int COUNTRY_GROUP_INDEX_WIFI = 0; + /** + * Index for the 2G group index in the array returned by {@link + * #getInitialBitrateCountryGroupAssignment}. + */ + private static final int COUNTRY_GROUP_INDEX_2G = 1; + /** + * Index for the 3G group index in the array returned by {@link + * #getInitialBitrateCountryGroupAssignment}. + */ + private static final int COUNTRY_GROUP_INDEX_3G = 2; + /** + * Index for the 4G group index in the array returned by {@link + * #getInitialBitrateCountryGroupAssignment}. + */ + private static final int COUNTRY_GROUP_INDEX_4G = 3; + /** + * Index for the 5G-NSA group index in the array returned by {@link + * #getInitialBitrateCountryGroupAssignment}. + */ + private static final int COUNTRY_GROUP_INDEX_5G_NSA = 4; + /** + * Index for the 5G-SA group index in the array returned by {@link + * #getInitialBitrateCountryGroupAssignment}. + */ + private static final int COUNTRY_GROUP_INDEX_5G_SA = 5; + @Nullable private static DefaultBandwidthMeter singletonInstance; /** Builder for a bandwidth meter. */ @@ -86,7 +112,7 @@ public static final class Builder { @Nullable private final Context context; - private SparseArray initialBitrateEstimates; + private Map initialBitrateEstimates; private int slidingWindowMaxWeight; private Clock clock; private boolean resetOnNetworkTypeChange; @@ -111,6 +137,7 @@ public Builder(Context context) { * @param slidingWindowMaxWeight The maximum weight for the sliding window. * @return This builder. */ + @CanIgnoreReturnValue public Builder setSlidingWindowMaxWeight(int slidingWindowMaxWeight) { this.slidingWindowMaxWeight = slidingWindowMaxWeight; return this; @@ -123,9 +150,10 @@ public Builder setSlidingWindowMaxWeight(int slidingWindowMaxWeight) { * @param initialBitrateEstimate The initial bitrate estimate in bits per second. * @return This builder. */ + @CanIgnoreReturnValue public Builder setInitialBitrateEstimate(long initialBitrateEstimate) { - for (int i = 0; i < initialBitrateEstimates.size(); i++) { - initialBitrateEstimates.setValueAt(i, initialBitrateEstimate); + for (Integer networkType : initialBitrateEstimates.keySet()) { + setInitialBitrateEstimate(networkType, initialBitrateEstimate); } return this; } @@ -138,6 +166,7 @@ public Builder setInitialBitrateEstimate(long initialBitrateEstimate) { * @param initialBitrateEstimate The initial bitrate estimate in bits per second. * @return This builder. */ + @CanIgnoreReturnValue public Builder setInitialBitrateEstimate( @C.NetworkType int networkType, long initialBitrateEstimate) { initialBitrateEstimates.put(networkType, initialBitrateEstimate); @@ -152,9 +181,10 @@ public Builder setInitialBitrateEstimate( * estimates should be used. * @return This builder. */ + @CanIgnoreReturnValue public Builder setInitialBitrateEstimate(String countryCode) { initialBitrateEstimates = - getInitialBitrateEstimatesForCountry(Util.toUpperInvariant(countryCode)); + getInitialBitrateEstimatesForCountry(Ascii.toUpperCase(countryCode)); return this; } @@ -165,6 +195,7 @@ public Builder setInitialBitrateEstimate(String countryCode) { * @param clock The clock used to estimate bandwidth from data transfers. * @return This builder. */ + @CanIgnoreReturnValue public Builder setClock(Clock clock) { this.clock = clock; return this; @@ -176,6 +207,7 @@ public Builder setClock(Clock clock) { * @param resetOnNetworkTypeChange Whether to reset if the network type changes. * @return This builder. */ + @CanIgnoreReturnValue public Builder setResetOnNetworkTypeChange(boolean resetOnNetworkTypeChange) { this.resetOnNetworkTypeChange = resetOnNetworkTypeChange; return this; @@ -195,27 +227,34 @@ public DefaultBandwidthMeter build() { resetOnNetworkTypeChange); } - private static SparseArray getInitialBitrateEstimatesForCountry(String countryCode) { - int[] groupIndices = getCountryGroupIndices(countryCode); - SparseArray result = new SparseArray<>(/* initialCapacity= */ 6); - result.append(C.NETWORK_TYPE_UNKNOWN, DEFAULT_INITIAL_BITRATE_ESTIMATE); - result.append(C.NETWORK_TYPE_WIFI, DEFAULT_INITIAL_BITRATE_ESTIMATES_WIFI[groupIndices[0]]); - result.append(C.NETWORK_TYPE_2G, DEFAULT_INITIAL_BITRATE_ESTIMATES_2G[groupIndices[1]]); - result.append(C.NETWORK_TYPE_3G, DEFAULT_INITIAL_BITRATE_ESTIMATES_3G[groupIndices[2]]); - result.append(C.NETWORK_TYPE_4G, DEFAULT_INITIAL_BITRATE_ESTIMATES_4G[groupIndices[3]]); - // Assume default Wifi and 4G bitrate for Ethernet and 5G, respectively, to prevent using the - // slower fallback. - result.append( - C.NETWORK_TYPE_ETHERNET, DEFAULT_INITIAL_BITRATE_ESTIMATES_WIFI[groupIndices[0]]); - result.append(C.NETWORK_TYPE_5G, DEFAULT_INITIAL_BITRATE_ESTIMATES_4G[groupIndices[3]]); + private static Map getInitialBitrateEstimatesForCountry(String countryCode) { + int[] groupIndices = getInitialBitrateCountryGroupAssignment(countryCode); + Map result = new HashMap<>(/* initialCapacity= */ 8); + result.put(C.NETWORK_TYPE_UNKNOWN, DEFAULT_INITIAL_BITRATE_ESTIMATE); + result.put( + C.NETWORK_TYPE_WIFI, + DEFAULT_INITIAL_BITRATE_ESTIMATES_WIFI.get(groupIndices[COUNTRY_GROUP_INDEX_WIFI])); + result.put( + C.NETWORK_TYPE_2G, + DEFAULT_INITIAL_BITRATE_ESTIMATES_2G.get(groupIndices[COUNTRY_GROUP_INDEX_2G])); + result.put( + C.NETWORK_TYPE_3G, + DEFAULT_INITIAL_BITRATE_ESTIMATES_3G.get(groupIndices[COUNTRY_GROUP_INDEX_3G])); + result.put( + C.NETWORK_TYPE_4G, + DEFAULT_INITIAL_BITRATE_ESTIMATES_4G.get(groupIndices[COUNTRY_GROUP_INDEX_4G])); + result.put( + C.NETWORK_TYPE_5G_NSA, + DEFAULT_INITIAL_BITRATE_ESTIMATES_5G_NSA.get(groupIndices[COUNTRY_GROUP_INDEX_5G_NSA])); + result.put( + C.NETWORK_TYPE_5G_SA, + DEFAULT_INITIAL_BITRATE_ESTIMATES_5G_SA.get(groupIndices[COUNTRY_GROUP_INDEX_5G_SA])); + // Assume default Wifi speed for Ethernet to prevent using the slower fallback. + result.put( + C.NETWORK_TYPE_ETHERNET, + DEFAULT_INITIAL_BITRATE_ESTIMATES_WIFI.get(groupIndices[COUNTRY_GROUP_INDEX_WIFI])); return result; } - - private static int[] getCountryGroupIndices(String countryCode) { - int[] groupIndices = DEFAULT_INITIAL_BITRATE_COUNTRY_GROUPS.get(countryCode); - // Assume median group if not found. - return groupIndices == null ? new int[] {2, 2, 2, 2} : groupIndices; - } } /** @@ -234,31 +273,33 @@ public static synchronized DefaultBandwidthMeter getSingletonInstance(Context co private static final int ELAPSED_MILLIS_FOR_ESTIMATE = 2000; private static final int BYTES_TRANSFERRED_FOR_ESTIMATE = 512 * 1024; - @Nullable private final Context context; - private final SparseArray initialBitrateEstimates; - private final EventDispatcher eventDispatcher; + private final ImmutableMap initialBitrateEstimates; + private final EventDispatcher eventDispatcher; private final SlidingPercentile slidingPercentile; private final Clock clock; + private final boolean resetOnNetworkTypeChange; private int streamCount; private long sampleStartTimeMs; private long sampleBytesTransferred; - @C.NetworkType private int networkType; + private @C.NetworkType int networkType; private long totalElapsedTimeMs; private long totalBytesTransferred; private long bitrateEstimate; private long lastReportedBitrateEstimate; private boolean networkTypeOverrideSet; - @C.NetworkType private int networkTypeOverride; + private @C.NetworkType int networkTypeOverride; - /** @deprecated Use {@link Builder} instead. */ + /** + * @deprecated Use {@link Builder} instead. + */ @Deprecated public DefaultBandwidthMeter() { this( /* context= */ null, - /* initialBitrateEstimates= */ new SparseArray<>(), + /* initialBitrateEstimates= */ ImmutableMap.of(), DEFAULT_SLIDING_WINDOW_MAX_WEIGHT, Clock.DEFAULT, /* resetOnNetworkTypeChange= */ false); @@ -266,23 +307,23 @@ public DefaultBandwidthMeter() { private DefaultBandwidthMeter( @Nullable Context context, - SparseArray initialBitrateEstimates, + Map initialBitrateEstimates, int maxWeight, Clock clock, boolean resetOnNetworkTypeChange) { - this.context = context == null ? null : context.getApplicationContext(); - this.initialBitrateEstimates = initialBitrateEstimates; - this.eventDispatcher = new EventDispatcher<>(); + this.initialBitrateEstimates = ImmutableMap.copyOf(initialBitrateEstimates); + this.eventDispatcher = new EventDispatcher(); this.slidingPercentile = new SlidingPercentile(maxWeight); this.clock = clock; - // Set the initial network type and bitrate estimate - networkType = context == null ? C.NETWORK_TYPE_UNKNOWN : Util.getNetworkType(context); - bitrateEstimate = getInitialBitrateEstimateForNetworkType(networkType); - // Register to receive connectivity actions if possible. - if (context != null && resetOnNetworkTypeChange) { - ConnectivityActionReceiver connectivityActionReceiver = - ConnectivityActionReceiver.getInstance(context); - connectivityActionReceiver.register(/* bandwidthMeter= */ this); + this.resetOnNetworkTypeChange = resetOnNetworkTypeChange; + if (context != null) { + NetworkTypeObserver networkTypeObserver = NetworkTypeObserver.getInstance(context); + networkType = networkTypeObserver.getNetworkType(); + bitrateEstimate = getInitialBitrateEstimateForNetworkType(networkType); + networkTypeObserver.register(/* listener= */ this::onNetworkTypeChanged); + } else { + networkType = C.NETWORK_TYPE_UNKNOWN; + bitrateEstimate = getInitialBitrateEstimateForNetworkType(C.NETWORK_TYPE_UNKNOWN); } } @@ -297,7 +338,7 @@ private DefaultBandwidthMeter( public synchronized void setNetworkTypeOverride(@C.NetworkType int networkType) { networkTypeOverride = networkType; networkTypeOverrideSet = true; - onConnectivityAction(); + onNetworkTypeChanged(networkType); } @Override @@ -306,13 +347,14 @@ public synchronized long getBitrateEstimate() { } @Override - @Nullable public TransferListener getTransferListener() { return this; } @Override public void addEventListener(Handler eventHandler, EventListener eventListener) { + Assertions.checkNotNull(eventHandler); + Assertions.checkNotNull(eventListener); eventDispatcher.addListener(eventHandler, eventListener); } @@ -329,7 +371,7 @@ public void onTransferInitializing(DataSource source, DataSpec dataSpec, boolean @Override public synchronized void onTransferStart( DataSource source, DataSpec dataSpec, boolean isNetwork) { - if (!isNetwork) { + if (!isTransferAtFullNetworkSpeed(dataSpec, isNetwork)) { return; } if (streamCount == 0) { @@ -340,16 +382,16 @@ public synchronized void onTransferStart( @Override public synchronized void onBytesTransferred( - DataSource source, DataSpec dataSpec, boolean isNetwork, int bytes) { - if (!isNetwork) { + DataSource source, DataSpec dataSpec, boolean isNetwork, int bytesTransferred) { + if (!isTransferAtFullNetworkSpeed(dataSpec, isNetwork)) { return; } - sampleBytesTransferred += bytes; + sampleBytesTransferred += bytesTransferred; } @Override public synchronized void onTransferEnd(DataSource source, DataSpec dataSpec, boolean isNetwork) { - if (!isNetwork) { + if (!isTransferAtFullNetworkSpeed(dataSpec, isNetwork)) { return; } Assertions.checkState(streamCount > 0); @@ -371,11 +413,15 @@ public synchronized void onTransferEnd(DataSource source, DataSpec dataSpec, boo streamCount--; } - private synchronized void onConnectivityAction() { - int networkType = - networkTypeOverrideSet - ? networkTypeOverride - : (context == null ? C.NETWORK_TYPE_UNKNOWN : Util.getNetworkType(context)); + private synchronized void onNetworkTypeChanged(@C.NetworkType int networkType) { + if (this.networkType != C.NETWORK_TYPE_UNKNOWN && !resetOnNetworkTypeChange) { + // Reset on network change disabled. Ignore all updates except the initial one. + return; + } + + if (networkTypeOverrideSet) { + networkType = networkTypeOverride; + } if (this.networkType == networkType) { return; } @@ -408,8 +454,7 @@ private void maybeNotifyBandwidthSample( return; } lastReportedBitrateEstimate = bitrateEstimate; - eventDispatcher.dispatch( - listener -> listener.onBandwidthSample(elapsedMs, bytesTransferred, bitrateEstimate)); + eventDispatcher.bandwidthSample(elapsedMs, bytesTransferred, bitrateEstimate); } private long getInitialBitrateEstimateForNetworkType(@C.NetworkType int networkType) { @@ -423,311 +468,422 @@ private long getInitialBitrateEstimateForNetworkType(@C.NetworkType int networkT return initialBitrateEstimate; } - /* - * Note: This class only holds a weak reference to DefaultBandwidthMeter instances. It should not - * be made non-static, since doing so adds a strong reference (i.e. DefaultBandwidthMeter.this). - */ - private static class ConnectivityActionReceiver extends BroadcastReceiver { - - private static @MonotonicNonNull ConnectivityActionReceiver staticInstance; - - private final Handler mainHandler; - private final ArrayList> bandwidthMeters; - - public static synchronized ConnectivityActionReceiver getInstance(Context context) { - if (staticInstance == null) { - staticInstance = new ConnectivityActionReceiver(); - IntentFilter filter = new IntentFilter(); - filter.addAction(ConnectivityManager.CONNECTIVITY_ACTION); - context.registerReceiver(staticInstance, filter); - } - return staticInstance; - } - - private ConnectivityActionReceiver() { - mainHandler = new Handler(Looper.getMainLooper()); - bandwidthMeters = new ArrayList<>(); - } - - public synchronized void register(DefaultBandwidthMeter bandwidthMeter) { - removeClearedReferences(); - bandwidthMeters.add(new WeakReference<>(bandwidthMeter)); - // Simulate an initial update on the main thread (like the sticky broadcast we'd receive if - // we were to register a separate broadcast receiver for each bandwidth meter). - mainHandler.post(() -> updateBandwidthMeter(bandwidthMeter)); - } - - @Override - public synchronized void onReceive(Context context, Intent intent) { - if (isInitialStickyBroadcast()) { - return; - } - removeClearedReferences(); - for (int i = 0; i < bandwidthMeters.size(); i++) { - WeakReference bandwidthMeterReference = bandwidthMeters.get(i); - DefaultBandwidthMeter bandwidthMeter = bandwidthMeterReference.get(); - if (bandwidthMeter != null) { - updateBandwidthMeter(bandwidthMeter); - } - } - } - - private void updateBandwidthMeter(DefaultBandwidthMeter bandwidthMeter) { - bandwidthMeter.onConnectivityAction(); - } - - private void removeClearedReferences() { - for (int i = bandwidthMeters.size() - 1; i >= 0; i--) { - WeakReference bandwidthMeterReference = bandwidthMeters.get(i); - DefaultBandwidthMeter bandwidthMeter = bandwidthMeterReference.get(); - if (bandwidthMeter == null) { - bandwidthMeters.remove(i); - } - } - } + private static boolean isTransferAtFullNetworkSpeed(DataSpec dataSpec, boolean isNetwork) { + return isNetwork && !dataSpec.isFlagSet(DataSpec.FLAG_MIGHT_NOT_USE_FULL_NETWORK_SPEED); } - private static Map createInitialBitrateCountryGroupAssignment() { - HashMap countryGroupAssignment = new HashMap<>(); - countryGroupAssignment.put("AD", new int[] {0, 2, 0, 0}); - countryGroupAssignment.put("AE", new int[] {2, 4, 4, 4}); - countryGroupAssignment.put("AF", new int[] {4, 4, 3, 3}); - countryGroupAssignment.put("AG", new int[] {4, 2, 2, 3}); - countryGroupAssignment.put("AI", new int[] {0, 3, 2, 4}); - countryGroupAssignment.put("AL", new int[] {1, 2, 0, 1}); - countryGroupAssignment.put("AM", new int[] {2, 2, 1, 2}); - countryGroupAssignment.put("AO", new int[] {3, 4, 3, 1}); - countryGroupAssignment.put("AQ", new int[] {4, 2, 2, 2}); - countryGroupAssignment.put("AR", new int[] {2, 3, 1, 2}); - countryGroupAssignment.put("AS", new int[] {2, 2, 4, 2}); - countryGroupAssignment.put("AT", new int[] {0, 3, 0, 0}); - countryGroupAssignment.put("AU", new int[] {0, 2, 0, 1}); - countryGroupAssignment.put("AW", new int[] {1, 1, 2, 4}); - countryGroupAssignment.put("AX", new int[] {0, 1, 0, 0}); - countryGroupAssignment.put("AZ", new int[] {3, 3, 3, 3}); - countryGroupAssignment.put("BA", new int[] {1, 1, 0, 1}); - countryGroupAssignment.put("BB", new int[] {0, 3, 0, 0}); - countryGroupAssignment.put("BD", new int[] {2, 0, 4, 3}); - countryGroupAssignment.put("BE", new int[] {0, 1, 2, 3}); - countryGroupAssignment.put("BF", new int[] {4, 4, 4, 1}); - countryGroupAssignment.put("BG", new int[] {0, 1, 0, 0}); - countryGroupAssignment.put("BH", new int[] {1, 0, 3, 4}); - countryGroupAssignment.put("BI", new int[] {4, 4, 4, 4}); - countryGroupAssignment.put("BJ", new int[] {4, 4, 3, 4}); - countryGroupAssignment.put("BL", new int[] {1, 0, 4, 3}); - countryGroupAssignment.put("BM", new int[] {0, 1, 0, 0}); - countryGroupAssignment.put("BN", new int[] {4, 0, 2, 4}); - countryGroupAssignment.put("BO", new int[] {1, 3, 3, 3}); - countryGroupAssignment.put("BQ", new int[] {1, 0, 1, 0}); - countryGroupAssignment.put("BR", new int[] {2, 4, 3, 1}); - countryGroupAssignment.put("BS", new int[] {3, 1, 1, 3}); - countryGroupAssignment.put("BT", new int[] {3, 0, 3, 1}); - countryGroupAssignment.put("BW", new int[] {3, 4, 3, 3}); - countryGroupAssignment.put("BY", new int[] {0, 1, 1, 1}); - countryGroupAssignment.put("BZ", new int[] {1, 3, 2, 1}); - countryGroupAssignment.put("CA", new int[] {0, 3, 2, 2}); - countryGroupAssignment.put("CD", new int[] {3, 4, 2, 2}); - countryGroupAssignment.put("CF", new int[] {4, 3, 2, 2}); - countryGroupAssignment.put("CG", new int[] {3, 4, 1, 1}); - countryGroupAssignment.put("CH", new int[] {0, 0, 0, 0}); - countryGroupAssignment.put("CI", new int[] {3, 4, 3, 3}); - countryGroupAssignment.put("CK", new int[] {2, 0, 1, 0}); - countryGroupAssignment.put("CL", new int[] {1, 2, 2, 3}); - countryGroupAssignment.put("CM", new int[] {3, 4, 3, 2}); - countryGroupAssignment.put("CN", new int[] {1, 0, 1, 1}); - countryGroupAssignment.put("CO", new int[] {2, 3, 3, 2}); - countryGroupAssignment.put("CR", new int[] {2, 2, 4, 4}); - countryGroupAssignment.put("CU", new int[] {4, 4, 2, 1}); - countryGroupAssignment.put("CV", new int[] {2, 3, 3, 2}); - countryGroupAssignment.put("CW", new int[] {1, 1, 0, 0}); - countryGroupAssignment.put("CY", new int[] {1, 1, 0, 0}); - countryGroupAssignment.put("CZ", new int[] {0, 1, 0, 0}); - countryGroupAssignment.put("DE", new int[] {0, 1, 2, 3}); - countryGroupAssignment.put("DJ", new int[] {4, 2, 4, 4}); - countryGroupAssignment.put("DK", new int[] {0, 0, 1, 0}); - countryGroupAssignment.put("DM", new int[] {1, 1, 0, 2}); - countryGroupAssignment.put("DO", new int[] {3, 3, 4, 4}); - countryGroupAssignment.put("DZ", new int[] {3, 3, 4, 4}); - countryGroupAssignment.put("EC", new int[] {2, 3, 4, 2}); - countryGroupAssignment.put("EE", new int[] {0, 0, 0, 0}); - countryGroupAssignment.put("EG", new int[] {3, 4, 2, 1}); - countryGroupAssignment.put("EH", new int[] {2, 0, 3, 1}); - countryGroupAssignment.put("ER", new int[] {4, 2, 4, 4}); - countryGroupAssignment.put("ES", new int[] {0, 1, 1, 1}); - countryGroupAssignment.put("ET", new int[] {4, 4, 4, 1}); - countryGroupAssignment.put("FI", new int[] {0, 0, 1, 0}); - countryGroupAssignment.put("FJ", new int[] {3, 0, 4, 4}); - countryGroupAssignment.put("FK", new int[] {2, 2, 2, 1}); - countryGroupAssignment.put("FM", new int[] {3, 2, 4, 1}); - countryGroupAssignment.put("FO", new int[] {1, 1, 0, 0}); - countryGroupAssignment.put("FR", new int[] {1, 1, 1, 1}); - countryGroupAssignment.put("GA", new int[] {3, 2, 2, 2}); - countryGroupAssignment.put("GB", new int[] {0, 1, 1, 1}); - countryGroupAssignment.put("GD", new int[] {1, 1, 3, 1}); - countryGroupAssignment.put("GE", new int[] {1, 0, 1, 4}); - countryGroupAssignment.put("GF", new int[] {2, 0, 1, 3}); - countryGroupAssignment.put("GG", new int[] {1, 0, 0, 0}); - countryGroupAssignment.put("GH", new int[] {3, 3, 3, 3}); - countryGroupAssignment.put("GI", new int[] {4, 4, 0, 0}); - countryGroupAssignment.put("GL", new int[] {2, 1, 1, 2}); - countryGroupAssignment.put("GM", new int[] {4, 3, 2, 4}); - countryGroupAssignment.put("GN", new int[] {3, 4, 4, 2}); - countryGroupAssignment.put("GP", new int[] {2, 1, 3, 4}); - countryGroupAssignment.put("GQ", new int[] {4, 4, 4, 0}); - countryGroupAssignment.put("GR", new int[] {1, 1, 0, 1}); - countryGroupAssignment.put("GT", new int[] {3, 2, 2, 2}); - countryGroupAssignment.put("GU", new int[] {1, 0, 2, 2}); - countryGroupAssignment.put("GW", new int[] {3, 4, 4, 3}); - countryGroupAssignment.put("GY", new int[] {3, 2, 1, 1}); - countryGroupAssignment.put("HK", new int[] {0, 2, 3, 4}); - countryGroupAssignment.put("HN", new int[] {3, 1, 3, 3}); - countryGroupAssignment.put("HR", new int[] {1, 1, 0, 1}); - countryGroupAssignment.put("HT", new int[] {4, 4, 4, 4}); - countryGroupAssignment.put("HU", new int[] {0, 1, 0, 0}); - countryGroupAssignment.put("ID", new int[] {2, 2, 2, 3}); - countryGroupAssignment.put("IE", new int[] {1, 0, 1, 1}); - countryGroupAssignment.put("IL", new int[] {1, 0, 2, 3}); - countryGroupAssignment.put("IM", new int[] {0, 0, 0, 1}); - countryGroupAssignment.put("IN", new int[] {2, 2, 4, 3}); - countryGroupAssignment.put("IO", new int[] {4, 4, 2, 3}); - countryGroupAssignment.put("IQ", new int[] {3, 3, 4, 2}); - countryGroupAssignment.put("IR", new int[] {3, 0, 2, 1}); - countryGroupAssignment.put("IS", new int[] {0, 1, 0, 0}); - countryGroupAssignment.put("IT", new int[] {1, 1, 1, 2}); - countryGroupAssignment.put("JE", new int[] {1, 0, 0, 1}); - countryGroupAssignment.put("JM", new int[] {3, 3, 3, 4}); - countryGroupAssignment.put("JO", new int[] {1, 2, 1, 1}); - countryGroupAssignment.put("JP", new int[] {0, 2, 0, 0}); - countryGroupAssignment.put("KE", new int[] {3, 4, 3, 3}); - countryGroupAssignment.put("KG", new int[] {2, 0, 2, 2}); - countryGroupAssignment.put("KH", new int[] {1, 0, 4, 3}); - countryGroupAssignment.put("KI", new int[] {4, 4, 4, 0}); - countryGroupAssignment.put("KM", new int[] {4, 3, 2, 4}); - countryGroupAssignment.put("KN", new int[] {1, 0, 2, 4}); - countryGroupAssignment.put("KP", new int[] {4, 2, 0, 2}); - countryGroupAssignment.put("KR", new int[] {0, 1, 0, 1}); - countryGroupAssignment.put("KW", new int[] {2, 3, 1, 2}); - countryGroupAssignment.put("KY", new int[] {3, 1, 2, 3}); - countryGroupAssignment.put("KZ", new int[] {1, 2, 2, 2}); - countryGroupAssignment.put("LA", new int[] {2, 2, 1, 1}); - countryGroupAssignment.put("LB", new int[] {3, 2, 0, 0}); - countryGroupAssignment.put("LC", new int[] {1, 1, 0, 0}); - countryGroupAssignment.put("LI", new int[] {0, 0, 1, 1}); - countryGroupAssignment.put("LK", new int[] {2, 0, 2, 3}); - countryGroupAssignment.put("LR", new int[] {3, 4, 4, 2}); - countryGroupAssignment.put("LS", new int[] {3, 3, 2, 2}); - countryGroupAssignment.put("LT", new int[] {0, 0, 0, 0}); - countryGroupAssignment.put("LU", new int[] {0, 0, 0, 0}); - countryGroupAssignment.put("LV", new int[] {0, 0, 0, 0}); - countryGroupAssignment.put("LY", new int[] {3, 3, 4, 3}); - countryGroupAssignment.put("MA", new int[] {3, 2, 3, 2}); - countryGroupAssignment.put("MC", new int[] {0, 4, 0, 0}); - countryGroupAssignment.put("MD", new int[] {1, 1, 0, 0}); - countryGroupAssignment.put("ME", new int[] {1, 3, 1, 2}); - countryGroupAssignment.put("MF", new int[] {2, 3, 1, 1}); - countryGroupAssignment.put("MG", new int[] {3, 4, 2, 3}); - countryGroupAssignment.put("MH", new int[] {4, 0, 2, 4}); - countryGroupAssignment.put("MK", new int[] {1, 0, 0, 0}); - countryGroupAssignment.put("ML", new int[] {4, 4, 2, 0}); - countryGroupAssignment.put("MM", new int[] {3, 3, 2, 2}); - countryGroupAssignment.put("MN", new int[] {2, 3, 1, 1}); - countryGroupAssignment.put("MO", new int[] {0, 0, 4, 4}); - countryGroupAssignment.put("MP", new int[] {0, 2, 1, 2}); - countryGroupAssignment.put("MQ", new int[] {2, 1, 1, 3}); - countryGroupAssignment.put("MR", new int[] {4, 2, 4, 4}); - countryGroupAssignment.put("MS", new int[] {1, 4, 3, 4}); - countryGroupAssignment.put("MT", new int[] {0, 0, 0, 0}); - countryGroupAssignment.put("MU", new int[] {2, 2, 4, 4}); - countryGroupAssignment.put("MV", new int[] {4, 3, 2, 4}); - countryGroupAssignment.put("MW", new int[] {3, 1, 1, 1}); - countryGroupAssignment.put("MX", new int[] {2, 4, 3, 3}); - countryGroupAssignment.put("MY", new int[] {2, 1, 3, 3}); - countryGroupAssignment.put("MZ", new int[] {3, 3, 3, 3}); - countryGroupAssignment.put("NA", new int[] {4, 3, 3, 3}); - countryGroupAssignment.put("NC", new int[] {2, 0, 4, 4}); - countryGroupAssignment.put("NE", new int[] {4, 4, 4, 4}); - countryGroupAssignment.put("NF", new int[] {1, 2, 2, 0}); - countryGroupAssignment.put("NG", new int[] {3, 3, 2, 2}); - countryGroupAssignment.put("NI", new int[] {3, 2, 4, 3}); - countryGroupAssignment.put("NL", new int[] {0, 2, 3, 2}); - countryGroupAssignment.put("NO", new int[] {0, 2, 1, 0}); - countryGroupAssignment.put("NP", new int[] {2, 2, 2, 2}); - countryGroupAssignment.put("NR", new int[] {4, 0, 3, 2}); - countryGroupAssignment.put("NZ", new int[] {0, 0, 1, 2}); - countryGroupAssignment.put("OM", new int[] {2, 3, 0, 2}); - countryGroupAssignment.put("PA", new int[] {1, 3, 3, 3}); - countryGroupAssignment.put("PE", new int[] {2, 4, 4, 4}); - countryGroupAssignment.put("PF", new int[] {2, 1, 1, 1}); - countryGroupAssignment.put("PG", new int[] {4, 3, 3, 2}); - countryGroupAssignment.put("PH", new int[] {3, 0, 3, 4}); - countryGroupAssignment.put("PK", new int[] {3, 2, 3, 2}); - countryGroupAssignment.put("PL", new int[] {1, 0, 1, 2}); - countryGroupAssignment.put("PM", new int[] {0, 2, 2, 0}); - countryGroupAssignment.put("PR", new int[] {2, 2, 2, 2}); - countryGroupAssignment.put("PS", new int[] {3, 3, 1, 4}); - countryGroupAssignment.put("PT", new int[] {1, 1, 0, 0}); - countryGroupAssignment.put("PW", new int[] {1, 1, 3, 0}); - countryGroupAssignment.put("PY", new int[] {2, 0, 3, 3}); - countryGroupAssignment.put("QA", new int[] {2, 3, 1, 1}); - countryGroupAssignment.put("RE", new int[] {1, 0, 2, 2}); - countryGroupAssignment.put("RO", new int[] {0, 1, 1, 2}); - countryGroupAssignment.put("RS", new int[] {1, 2, 0, 0}); - countryGroupAssignment.put("RU", new int[] {0, 1, 0, 1}); - countryGroupAssignment.put("RW", new int[] {4, 4, 4, 4}); - countryGroupAssignment.put("SA", new int[] {2, 2, 2, 1}); - countryGroupAssignment.put("SB", new int[] {4, 4, 4, 1}); - countryGroupAssignment.put("SC", new int[] {4, 2, 0, 1}); - countryGroupAssignment.put("SD", new int[] {4, 4, 4, 4}); - countryGroupAssignment.put("SE", new int[] {0, 1, 0, 0}); - countryGroupAssignment.put("SG", new int[] {1, 0, 3, 3}); - countryGroupAssignment.put("SH", new int[] {4, 2, 2, 2}); - countryGroupAssignment.put("SI", new int[] {0, 1, 0, 0}); - countryGroupAssignment.put("SJ", new int[] {2, 2, 2, 4}); - countryGroupAssignment.put("SK", new int[] {0, 1, 0, 0}); - countryGroupAssignment.put("SL", new int[] {4, 3, 3, 1}); - countryGroupAssignment.put("SM", new int[] {0, 0, 1, 2}); - countryGroupAssignment.put("SN", new int[] {4, 4, 4, 3}); - countryGroupAssignment.put("SO", new int[] {3, 4, 3, 4}); - countryGroupAssignment.put("SR", new int[] {2, 2, 2, 1}); - countryGroupAssignment.put("SS", new int[] {4, 4, 4, 4}); - countryGroupAssignment.put("ST", new int[] {2, 3, 1, 2}); - countryGroupAssignment.put("SV", new int[] {2, 2, 4, 4}); - countryGroupAssignment.put("SX", new int[] {2, 4, 1, 0}); - countryGroupAssignment.put("SY", new int[] {4, 3, 1, 1}); - countryGroupAssignment.put("SZ", new int[] {4, 4, 3, 4}); - countryGroupAssignment.put("TC", new int[] {1, 2, 1, 0}); - countryGroupAssignment.put("TD", new int[] {4, 4, 4, 3}); - countryGroupAssignment.put("TG", new int[] {3, 2, 1, 0}); - countryGroupAssignment.put("TH", new int[] {1, 3, 3, 3}); - countryGroupAssignment.put("TJ", new int[] {4, 4, 4, 4}); - countryGroupAssignment.put("TL", new int[] {4, 2, 4, 4}); - countryGroupAssignment.put("TM", new int[] {4, 2, 2, 2}); - countryGroupAssignment.put("TN", new int[] {2, 1, 1, 1}); - countryGroupAssignment.put("TO", new int[] {4, 3, 4, 4}); - countryGroupAssignment.put("TR", new int[] {1, 2, 1, 1}); - countryGroupAssignment.put("TT", new int[] {1, 3, 2, 4}); - countryGroupAssignment.put("TV", new int[] {4, 2, 3, 4}); - countryGroupAssignment.put("TW", new int[] {0, 0, 0, 0}); - countryGroupAssignment.put("TZ", new int[] {3, 4, 3, 3}); - countryGroupAssignment.put("UA", new int[] {0, 3, 1, 1}); - countryGroupAssignment.put("UG", new int[] {3, 2, 2, 3}); - countryGroupAssignment.put("US", new int[] {0, 1, 2, 2}); - countryGroupAssignment.put("UY", new int[] {2, 1, 2, 2}); - countryGroupAssignment.put("UZ", new int[] {2, 2, 3, 2}); - countryGroupAssignment.put("VA", new int[] {0, 2, 2, 2}); - countryGroupAssignment.put("VC", new int[] {2, 3, 0, 2}); - countryGroupAssignment.put("VE", new int[] {4, 4, 4, 4}); - countryGroupAssignment.put("VG", new int[] {3, 1, 2, 4}); - countryGroupAssignment.put("VI", new int[] {1, 4, 4, 3}); - countryGroupAssignment.put("VN", new int[] {0, 1, 3, 4}); - countryGroupAssignment.put("VU", new int[] {4, 0, 3, 3}); - countryGroupAssignment.put("WS", new int[] {3, 2, 4, 3}); - countryGroupAssignment.put("XK", new int[] {1, 2, 1, 0}); - countryGroupAssignment.put("YE", new int[] {4, 4, 4, 3}); - countryGroupAssignment.put("YT", new int[] {2, 2, 2, 3}); - countryGroupAssignment.put("ZA", new int[] {2, 3, 2, 2}); - countryGroupAssignment.put("ZM", new int[] {3, 2, 3, 3}); - countryGroupAssignment.put("ZW", new int[] {3, 3, 2, 3}); - return Collections.unmodifiableMap(countryGroupAssignment); + /** + * Returns initial bitrate group assignments for a {@code country}. The initial bitrate is a list + * of indices for [Wifi, 2G, 3G, 4G, 5G_NSA, 5G_SA]. + */ + private static int[] getInitialBitrateCountryGroupAssignment(String country) { + switch (country) { + case "AD": + case "CW": + return new int[] {2, 2, 0, 0, 2, 2}; + case "AE": + return new int[] {1, 4, 3, 4, 4, 2}; + case "AG": + return new int[] {2, 4, 3, 4, 2, 2}; + case "AL": + return new int[] {1, 1, 1, 3, 2, 2}; + case "AM": + return new int[] {2, 3, 2, 3, 2, 2}; + case "AO": + return new int[] {4, 4, 4, 3, 2, 2}; + case "AS": + return new int[] {2, 2, 3, 3, 2, 2}; + case "AT": + return new int[] {1, 2, 1, 4, 1, 4}; + case "AU": + return new int[] {0, 2, 1, 1, 3, 0}; + case "BE": + return new int[] {0, 1, 4, 4, 3, 2}; + case "BH": + return new int[] {1, 3, 1, 4, 4, 2}; + case "BJ": + return new int[] {4, 4, 2, 3, 2, 2}; + case "BN": + return new int[] {3, 2, 0, 1, 2, 2}; + case "BO": + return new int[] {1, 2, 3, 2, 2, 2}; + case "BR": + return new int[] {1, 1, 2, 1, 1, 0}; + case "BW": + return new int[] {3, 2, 1, 0, 2, 2}; + case "BY": + return new int[] {1, 1, 2, 3, 2, 2}; + case "CA": + return new int[] {0, 2, 3, 3, 3, 3}; + case "CH": + return new int[] {0, 0, 0, 0, 0, 3}; + case "BZ": + case "CK": + return new int[] {2, 2, 2, 1, 2, 2}; + case "CL": + return new int[] {1, 1, 2, 1, 3, 2}; + case "CM": + return new int[] {4, 3, 3, 4, 2, 2}; + case "CN": + return new int[] {2, 0, 4, 3, 3, 1}; + case "CO": + return new int[] {2, 3, 4, 2, 2, 2}; + case "CR": + return new int[] {2, 4, 4, 4, 2, 2}; + case "CV": + return new int[] {2, 3, 0, 1, 2, 2}; + case "CZ": + return new int[] {0, 0, 2, 0, 1, 2}; + case "DE": + return new int[] {0, 1, 3, 2, 2, 2}; + case "DO": + return new int[] {3, 4, 4, 4, 4, 2}; + case "AZ": + case "BF": + case "DZ": + return new int[] {3, 3, 4, 4, 2, 2}; + case "EC": + return new int[] {1, 3, 2, 1, 2, 2}; + case "CI": + case "EG": + return new int[] {3, 4, 3, 3, 2, 2}; + case "FI": + return new int[] {0, 0, 0, 2, 0, 2}; + case "FJ": + return new int[] {3, 1, 2, 3, 2, 2}; + case "FM": + return new int[] {4, 2, 3, 0, 2, 2}; + case "AI": + case "BB": + case "BM": + case "BQ": + case "DM": + case "FO": + return new int[] {0, 2, 0, 0, 2, 2}; + case "FR": + return new int[] {1, 1, 2, 1, 1, 2}; + case "GB": + return new int[] {0, 1, 1, 2, 1, 2}; + case "GE": + return new int[] {1, 0, 0, 2, 2, 2}; + case "GG": + return new int[] {0, 2, 1, 0, 2, 2}; + case "CG": + case "GH": + return new int[] {3, 3, 3, 3, 2, 2}; + case "GM": + return new int[] {4, 3, 2, 4, 2, 2}; + case "GN": + return new int[] {4, 4, 4, 2, 2, 2}; + case "GP": + return new int[] {3, 1, 1, 3, 2, 2}; + case "GQ": + return new int[] {4, 4, 3, 3, 2, 2}; + case "GT": + return new int[] {2, 2, 2, 1, 1, 2}; + case "AW": + case "GU": + return new int[] {1, 2, 4, 4, 2, 2}; + case "GW": + return new int[] {4, 4, 2, 2, 2, 2}; + case "GY": + return new int[] {3, 0, 1, 1, 2, 2}; + case "HK": + return new int[] {0, 1, 1, 3, 2, 0}; + case "HN": + return new int[] {3, 3, 2, 2, 2, 2}; + case "ID": + return new int[] {3, 1, 1, 2, 3, 2}; + case "BA": + case "IE": + return new int[] {1, 1, 1, 1, 2, 2}; + case "IL": + return new int[] {1, 2, 2, 3, 4, 2}; + case "IM": + return new int[] {0, 2, 0, 1, 2, 2}; + case "IN": + return new int[] {1, 1, 2, 1, 2, 1}; + case "IR": + return new int[] {4, 2, 3, 3, 4, 2}; + case "IS": + return new int[] {0, 0, 1, 0, 0, 2}; + case "IT": + return new int[] {0, 0, 1, 1, 1, 2}; + case "GI": + case "JE": + return new int[] {1, 2, 0, 1, 2, 2}; + case "JM": + return new int[] {2, 4, 2, 1, 2, 2}; + case "JO": + return new int[] {2, 0, 1, 1, 2, 2}; + case "JP": + return new int[] {0, 3, 3, 3, 4, 4}; + case "KE": + return new int[] {3, 2, 2, 1, 2, 2}; + case "KH": + return new int[] {1, 0, 4, 2, 2, 2}; + case "CU": + case "KI": + return new int[] {4, 2, 4, 3, 2, 2}; + case "CD": + case "KM": + return new int[] {4, 3, 3, 2, 2, 2}; + case "KR": + return new int[] {0, 2, 2, 4, 4, 4}; + case "KW": + return new int[] {1, 0, 1, 0, 0, 2}; + case "BD": + case "KZ": + return new int[] {2, 1, 2, 2, 2, 2}; + case "LA": + return new int[] {1, 2, 1, 3, 2, 2}; + case "BS": + case "LB": + return new int[] {3, 2, 1, 2, 2, 2}; + case "LK": + return new int[] {3, 2, 3, 4, 4, 2}; + case "LR": + return new int[] {3, 4, 3, 4, 2, 2}; + case "LU": + return new int[] {1, 1, 4, 2, 0, 2}; + case "CY": + case "HR": + case "LV": + return new int[] {1, 0, 0, 0, 0, 2}; + case "MA": + return new int[] {3, 3, 2, 1, 2, 2}; + case "MC": + return new int[] {0, 2, 2, 0, 2, 2}; + case "MD": + return new int[] {1, 0, 0, 0, 2, 2}; + case "ME": + return new int[] {2, 0, 0, 1, 1, 2}; + case "MH": + return new int[] {4, 2, 1, 3, 2, 2}; + case "MK": + return new int[] {2, 0, 0, 1, 3, 2}; + case "MM": + return new int[] {2, 2, 2, 3, 4, 2}; + case "MN": + return new int[] {2, 0, 1, 2, 2, 2}; + case "MO": + return new int[] {0, 2, 4, 4, 4, 2}; + case "KG": + case "MQ": + return new int[] {2, 1, 1, 2, 2, 2}; + case "MR": + return new int[] {4, 2, 3, 4, 2, 2}; + case "DK": + case "EE": + case "HU": + case "LT": + case "MT": + return new int[] {0, 0, 0, 0, 0, 2}; + case "MV": + return new int[] {3, 4, 1, 3, 3, 2}; + case "MW": + return new int[] {4, 2, 3, 3, 2, 2}; + case "MX": + return new int[] {3, 4, 4, 4, 2, 2}; + case "MY": + return new int[] {1, 0, 4, 1, 2, 2}; + case "NA": + return new int[] {3, 4, 3, 2, 2, 2}; + case "NC": + return new int[] {3, 2, 3, 4, 2, 2}; + case "NG": + return new int[] {3, 4, 2, 1, 2, 2}; + case "NI": + return new int[] {2, 3, 4, 3, 2, 2}; + case "NL": + return new int[] {0, 2, 3, 3, 0, 4}; + case "NO": + return new int[] {0, 1, 2, 1, 1, 2}; + case "NP": + return new int[] {2, 1, 4, 3, 2, 2}; + case "NR": + return new int[] {4, 0, 3, 2, 2, 2}; + case "NU": + return new int[] {4, 2, 2, 1, 2, 2}; + case "NZ": + return new int[] {1, 0, 2, 2, 4, 2}; + case "OM": + return new int[] {2, 3, 1, 3, 4, 2}; + case "PA": + return new int[] {2, 3, 3, 3, 2, 2}; + case "PE": + return new int[] {1, 2, 4, 4, 3, 2}; + case "AF": + case "PG": + return new int[] {4, 3, 3, 3, 2, 2}; + case "PH": + return new int[] {2, 1, 3, 2, 2, 0}; + case "PL": + return new int[] {2, 1, 2, 2, 4, 2}; + case "PR": + return new int[] {2, 0, 2, 0, 2, 1}; + case "PS": + return new int[] {3, 4, 1, 4, 2, 2}; + case "PT": + return new int[] {1, 0, 0, 0, 1, 2}; + case "PW": + return new int[] {2, 2, 4, 2, 2, 2}; + case "BL": + case "MF": + case "PY": + return new int[] {1, 2, 2, 2, 2, 2}; + case "QA": + return new int[] {1, 4, 4, 4, 4, 2}; + case "RE": + return new int[] {1, 2, 2, 3, 1, 2}; + case "RO": + return new int[] {0, 0, 1, 2, 1, 2}; + case "RS": + return new int[] {2, 0, 0, 0, 2, 2}; + case "RU": + return new int[] {1, 0, 0, 0, 3, 3}; + case "RW": + return new int[] {3, 3, 1, 0, 2, 2}; + case "MU": + case "SA": + return new int[] {3, 1, 1, 2, 2, 2}; + case "CF": + case "SB": + return new int[] {4, 2, 4, 2, 2, 2}; + case "SC": + return new int[] {4, 3, 1, 1, 2, 2}; + case "SD": + return new int[] {4, 3, 4, 2, 2, 2}; + case "SE": + return new int[] {0, 1, 1, 1, 0, 2}; + case "SG": + return new int[] {2, 3, 3, 3, 3, 3}; + case "AQ": + case "ER": + case "SH": + return new int[] {4, 2, 2, 2, 2, 2}; + case "BG": + case "ES": + case "GR": + case "SI": + return new int[] {0, 0, 0, 0, 1, 2}; + case "IQ": + case "SJ": + return new int[] {3, 2, 2, 2, 2, 2}; + case "SK": + return new int[] {1, 1, 1, 1, 3, 2}; + case "GF": + case "PK": + case "SL": + return new int[] {3, 2, 3, 3, 2, 2}; + case "ET": + case "SN": + return new int[] {4, 4, 3, 2, 2, 2}; + case "SO": + return new int[] {3, 2, 2, 4, 4, 2}; + case "SR": + return new int[] {2, 4, 3, 0, 2, 2}; + case "ST": + return new int[] {2, 2, 1, 2, 2, 2}; + case "PF": + case "SV": + return new int[] {2, 3, 3, 1, 2, 2}; + case "SZ": + return new int[] {4, 4, 3, 4, 2, 2}; + case "TC": + return new int[] {2, 2, 1, 3, 2, 2}; + case "GA": + case "TG": + return new int[] {3, 4, 1, 0, 2, 2}; + case "TH": + return new int[] {0, 1, 2, 1, 2, 2}; + case "DJ": + case "SY": + case "TJ": + return new int[] {4, 3, 4, 4, 2, 2}; + case "GL": + case "TK": + return new int[] {2, 2, 2, 4, 2, 2}; + case "TL": + return new int[] {4, 2, 4, 4, 2, 2}; + case "SS": + case "TM": + return new int[] {4, 2, 2, 3, 2, 2}; + case "TR": + return new int[] {1, 0, 0, 1, 3, 2}; + case "TT": + return new int[] {1, 4, 0, 0, 2, 2}; + case "TW": + return new int[] {0, 2, 0, 0, 0, 0}; + case "ML": + case "TZ": + return new int[] {3, 4, 2, 2, 2, 2}; + case "UA": + return new int[] {0, 1, 1, 2, 4, 2}; + case "LS": + case "UG": + return new int[] {3, 3, 3, 2, 2, 2}; + case "US": + return new int[] {1, 1, 4, 1, 3, 1}; + case "TN": + case "UY": + return new int[] {2, 1, 1, 1, 2, 2}; + case "UZ": + return new int[] {2, 2, 3, 4, 3, 2}; + case "AX": + case "CX": + case "LI": + case "MP": + case "MS": + case "PM": + case "SM": + case "VA": + return new int[] {0, 2, 2, 2, 2, 2}; + case "GD": + case "KN": + case "KY": + case "LC": + case "SX": + case "VC": + return new int[] {1, 2, 0, 0, 2, 2}; + case "VG": + return new int[] {2, 2, 0, 1, 2, 2}; + case "VI": + return new int[] {0, 2, 1, 2, 2, 2}; + case "VN": + return new int[] {0, 0, 1, 2, 2, 1}; + case "VU": + return new int[] {4, 3, 3, 1, 2, 2}; + case "IO": + case "TV": + case "WF": + return new int[] {4, 2, 2, 4, 2, 2}; + case "BT": + case "MZ": + case "WS": + return new int[] {3, 1, 2, 1, 2, 2}; + case "XK": + return new int[] {1, 2, 1, 1, 2, 2}; + case "BI": + case "HT": + case "MG": + case "NE": + case "TD": + case "VE": + case "YE": + return new int[] {4, 4, 4, 4, 2, 2}; + case "YT": + return new int[] {2, 3, 3, 4, 2, 2}; + case "ZA": + return new int[] {2, 3, 2, 1, 2, 2}; + case "ZM": + return new int[] {4, 4, 4, 3, 3, 2}; + case "LY": + case "TO": + case "ZW": + return new int[] {3, 2, 4, 3, 2, 2}; + default: + return new int[] {2, 2, 2, 2, 2, 2}; + } } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DefaultDataSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DefaultDataSource.java index 98026c4677..5720ef7ac1 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DefaultDataSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DefaultDataSource.java @@ -15,12 +15,14 @@ */ package com.google.android.exoplayer2.upstream; +import android.content.ContentResolver; import android.content.Context; import android.net.Uri; import androidx.annotation.Nullable; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.Util; +import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; @@ -31,33 +33,98 @@ * A {@link DataSource} that supports multiple URI schemes. The supported schemes are: * *

        - *
      • file: For fetching data from a local file (e.g. file:///path/to/media/media.mp4, or just - * /path/to/media/media.mp4 because the implementation assumes that a URI without a scheme is - * a local file URI). - *
      • asset: For fetching data from an asset in the application's apk (e.g. asset:///media.mp4). - *
      • rawresource: For fetching data from a raw resource in the application's apk (e.g. - * rawresource:///resourceId, where rawResourceId is the integer identifier of the raw - * resource). - *
      • content: For fetching data from a content URI (e.g. content://authority/path/123). - *
      • rtmp: For fetching data over RTMP. Only supported if the project using ExoPlayer has an - * explicit dependency on ExoPlayer's RTMP extension. - *
      • data: For parsing data inlined in the URI as defined in RFC 2397. - *
      • udp: For fetching data over UDP (e.g. udp://something.com/media). - *
      • http(s): For fetching data over HTTP and HTTPS (e.g. https://www.something.com/media.mp4), - * if constructed using {@link #DefaultDataSource(Context, String, boolean)}, or any other - * schemes supported by a base data source if constructed using {@link - * #DefaultDataSource(Context, DataSource)}. + *
      • {@code file}: For fetching data from a local file (e.g. {@code + * file:///path/to/media/media.mp4}, or just {@code /path/to/media/media.mp4} because the + * implementation assumes that a URI without a scheme is a local file URI). + *
      • {@code asset}: For fetching data from an asset in the application's APK (e.g. {@code + * asset:///media.mp4}). + *
      • {@code rawresource}: For fetching data from a raw resource in the application's APK (e.g. + * {@code rawresource:///resourceId}, where {@code rawResourceId} is the integer identifier of + * the raw resource). + *
      • {@code android.resource}: For fetching data in the application's APK (e.g. {@code + * android.resource:///resourceId} or {@code android.resource://resourceType/resourceName}). + * See {@link RawResourceDataSource} for more information about the URI form. + *
      • {@code content}: For fetching data from a content URI (e.g. {@code + * content://authority/path/123}). + *
      • {@code rtmp}: For fetching data over RTMP. Only supported if the project using ExoPlayer + * has an explicit dependency on ExoPlayer's RTMP extension. + *
      • {@code data}: For parsing data inlined in the URI as defined in RFC 2397. + *
      • {@code udp}: For fetching data over UDP (e.g. {@code udp://something.com/media}). + *
      • {@code http(s)}: For fetching data over HTTP and HTTPS (e.g. {@code + * https://www.something.com/media.mp4}), if constructed using {@link + * #DefaultDataSource(Context, String, boolean)}, or any other schemes supported by a base + * data source if constructed using {@link #DefaultDataSource(Context, DataSource)}. *
      */ public final class DefaultDataSource implements DataSource { + /** {@link DataSource.Factory} for {@link DefaultDataSource} instances. */ + public static final class Factory implements DataSource.Factory { + + private final Context context; + private final DataSource.Factory baseDataSourceFactory; + @Nullable private TransferListener transferListener; + + /** + * Creates an instance. + * + * @param context A context. + */ + public Factory(Context context) { + this(context, new DefaultHttpDataSource.Factory()); + } + + /** + * Creates an instance. + * + * @param context A context. + * @param baseDataSourceFactory The {@link DataSource.Factory} to be used to create base {@link + * DataSource DataSources} for {@link DefaultDataSource} instances. The base {@link + * DataSource} is normally an {@link HttpDataSource}, and is responsible for fetching data + * over HTTP and HTTPS, as well as any other URI schemes not otherwise supported by {@link + * DefaultDataSource}. + */ + public Factory(Context context, DataSource.Factory baseDataSourceFactory) { + this.context = context.getApplicationContext(); + this.baseDataSourceFactory = baseDataSourceFactory; + } + + /** + * Sets the {@link TransferListener} that will be used. + * + *

      The default is {@code null}. + * + *

      See {@link DataSource#addTransferListener(TransferListener)}. + * + * @param transferListener The listener that will be used. + * @return This factory. + */ + @CanIgnoreReturnValue + public Factory setTransferListener(@Nullable TransferListener transferListener) { + this.transferListener = transferListener; + return this; + } + + @Override + public DefaultDataSource createDataSource() { + DefaultDataSource dataSource = + new DefaultDataSource(context, baseDataSourceFactory.createDataSource()); + if (transferListener != null) { + dataSource.addTransferListener(transferListener); + } + return dataSource; + } + } + private static final String TAG = "DefaultDataSource"; private static final String SCHEME_ASSET = "asset"; private static final String SCHEME_CONTENT = "content"; private static final String SCHEME_RTMP = "rtmp"; private static final String SCHEME_UDP = "udp"; + private static final String SCHEME_DATA = DataSchemeDataSource.SCHEME_DATA; private static final String SCHEME_RAW = RawResourceDataSource.RAW_RESOURCE_SCHEME; + private static final String SCHEME_ANDROID_RESOURCE = ContentResolver.SCHEME_ANDROID_RESOURCE; private final Context context; private final List transferListeners; @@ -78,11 +145,27 @@ public final class DefaultDataSource implements DataSource { * Constructs a new instance, optionally configured to follow cross-protocol redirects. * * @param context A context. - * @param userAgent The User-Agent to use when requesting remote data. + */ + public DefaultDataSource(Context context, boolean allowCrossProtocolRedirects) { + this( + context, + /* userAgent= */ null, + DefaultHttpDataSource.DEFAULT_CONNECT_TIMEOUT_MILLIS, + DefaultHttpDataSource.DEFAULT_READ_TIMEOUT_MILLIS, + allowCrossProtocolRedirects); + } + + /** + * Constructs a new instance, optionally configured to follow cross-protocol redirects. + * + * @param context A context. + * @param userAgent The user agent that will be used when requesting remote data, or {@code null} + * to use the default user agent of the underlying platform. * @param allowCrossProtocolRedirects Whether cross-protocol redirects (i.e. redirects from HTTP * to HTTPS and vice versa) are enabled when fetching remote data. */ - public DefaultDataSource(Context context, String userAgent, boolean allowCrossProtocolRedirects) { + public DefaultDataSource( + Context context, @Nullable String userAgent, boolean allowCrossProtocolRedirects) { this( context, userAgent, @@ -95,7 +178,8 @@ public DefaultDataSource(Context context, String userAgent, boolean allowCrossPr * Constructs a new instance, optionally configured to follow cross-protocol redirects. * * @param context A context. - * @param userAgent The User-Agent to use when requesting remote data. + * @param userAgent The user agent that will be used when requesting remote data, or {@code null} + * to use the default user agent of the underlying platform. * @param connectTimeoutMillis The connection timeout that should be used when requesting remote * data, in milliseconds. A timeout of zero is interpreted as an infinite timeout. * @param readTimeoutMillis The read timeout that should be used when requesting remote data, in @@ -105,18 +189,18 @@ public DefaultDataSource(Context context, String userAgent, boolean allowCrossPr */ public DefaultDataSource( Context context, - String userAgent, + @Nullable String userAgent, int connectTimeoutMillis, int readTimeoutMillis, boolean allowCrossProtocolRedirects) { this( context, - new DefaultHttpDataSource( - userAgent, - connectTimeoutMillis, - readTimeoutMillis, - allowCrossProtocolRedirects, - /* defaultRequestProperties= */ null)); + new DefaultHttpDataSource.Factory() + .setUserAgent(userAgent) + .setConnectTimeoutMs(connectTimeoutMillis) + .setReadTimeoutMs(readTimeoutMillis) + .setAllowCrossProtocolRedirects(allowCrossProtocolRedirects) + .createDataSource()); } /** @@ -135,6 +219,7 @@ public DefaultDataSource(Context context, DataSource baseDataSource) { @Override public void addTransferListener(TransferListener transferListener) { + Assertions.checkNotNull(transferListener); baseDataSource.addTransferListener(transferListener); transferListeners.add(transferListener); maybeAddListenerToDataSource(fileDataSource, transferListener); @@ -166,9 +251,9 @@ public long open(DataSpec dataSpec) throws IOException { dataSource = getRtmpDataSource(); } else if (SCHEME_UDP.equals(scheme)) { dataSource = getUdpDataSource(); - } else if (DataSchemeDataSource.SCHEME_DATA.equals(scheme)) { + } else if (SCHEME_DATA.equals(scheme)) { dataSource = getDataSchemeDataSource(); - } else if (SCHEME_RAW.equals(scheme)) { + } else if (SCHEME_RAW.equals(scheme) || SCHEME_ANDROID_RESOURCE.equals(scheme)) { dataSource = getRawResourceDataSource(); } else { dataSource = baseDataSource; @@ -178,8 +263,8 @@ public long open(DataSpec dataSpec) throws IOException { } @Override - public int read(byte[] buffer, int offset, int readLength) throws IOException { - return Assertions.checkNotNull(dataSource).read(buffer, offset, readLength); + public int read(byte[] buffer, int offset, int length) throws IOException { + return Assertions.checkNotNull(dataSource).read(buffer, offset, length); } @Override @@ -239,10 +324,8 @@ private DataSource getContentDataSource() { private DataSource getRtmpDataSource() { if (rtmpDataSource == null) { try { - // LINT.IfChange Class clazz = Class.forName("com.google.android.exoplayer2.ext.rtmp.RtmpDataSource"); rtmpDataSource = (DataSource) clazz.getConstructor().newInstance(); - // LINT.ThenChange(../../../../../../../../proguard-rules.txt) addListenersToDataSource(rtmpDataSource); } catch (ClassNotFoundException e) { // Expected if the app was built without the RTMP extension. diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DefaultDataSourceFactory.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DefaultDataSourceFactory.java index 6b1131a3bd..492a0b97b2 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DefaultDataSourceFactory.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DefaultDataSourceFactory.java @@ -20,9 +20,9 @@ import com.google.android.exoplayer2.upstream.DataSource.Factory; /** - * A {@link Factory} that produces {@link DefaultDataSource} instances that delegate to - * {@link DefaultHttpDataSource}s for non-file/asset/content URIs. + * @deprecated Use {@link DefaultDataSource.Factory} instead. */ +@Deprecated public final class DefaultDataSourceFactory implements Factory { private final Context context; @@ -30,24 +30,41 @@ public final class DefaultDataSourceFactory implements Factory { private final DataSource.Factory baseDataSourceFactory; /** + * Creates an instance. + * * @param context A context. - * @param userAgent The User-Agent string that should be used. */ - public DefaultDataSourceFactory(Context context, String userAgent) { + public DefaultDataSourceFactory(Context context) { + this(context, /* userAgent= */ (String) null, /* listener= */ null); + } + + /** + * Creates an instance. + * + * @param context A context. + * @param userAgent The user agent that will be used when requesting remote data, or {@code null} + * to use the default user agent of the underlying platform. + */ + public DefaultDataSourceFactory(Context context, @Nullable String userAgent) { this(context, userAgent, /* listener= */ null); } /** + * Creates an instance. + * * @param context A context. - * @param userAgent The User-Agent string that should be used. + * @param userAgent The user agent that will be used when requesting remote data, or {@code null} + * to use the default user agent of the underlying platform. * @param listener An optional listener. */ public DefaultDataSourceFactory( - Context context, String userAgent, @Nullable TransferListener listener) { - this(context, listener, new DefaultHttpDataSourceFactory(userAgent, listener)); + Context context, @Nullable String userAgent, @Nullable TransferListener listener) { + this(context, listener, new DefaultHttpDataSource.Factory().setUserAgent(userAgent)); } /** + * Creates an instance. + * * @param context A context. * @param baseDataSourceFactory A {@link Factory} to be used to create a base {@link DataSource} * for {@link DefaultDataSource}. @@ -58,6 +75,8 @@ public DefaultDataSourceFactory(Context context, DataSource.Factory baseDataSour } /** + * Creates an instance. + * * @param context A context. * @param listener An optional listener. * @param baseDataSourceFactory A {@link Factory} to be used to create a base {@link DataSource} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DefaultHttpDataSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DefaultHttpDataSource.java index ec11ad2348..757de37b8e 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DefaultHttpDataSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DefaultHttpDataSource.java @@ -15,54 +15,202 @@ */ package com.google.android.exoplayer2.upstream; +import static com.google.android.exoplayer2.upstream.HttpUtil.buildRangeRequestHeader; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Util.castNonNull; +import static java.lang.Math.min; + import android.net.Uri; -import android.text.TextUtils; import androidx.annotation.Nullable; import androidx.annotation.VisibleForTesting; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.PlaybackException; import com.google.android.exoplayer2.upstream.DataSpec.HttpMethod; -import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Log; -import com.google.android.exoplayer2.util.Predicate; import com.google.android.exoplayer2.util.Util; -import java.io.EOFException; +import com.google.common.base.Predicate; +import com.google.common.collect.ForwardingMap; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Sets; +import com.google.common.net.HttpHeaders; +import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.io.IOException; import java.io.InputStream; import java.io.InterruptedIOException; import java.io.OutputStream; import java.lang.reflect.Method; import java.net.HttpURLConnection; +import java.net.MalformedURLException; import java.net.NoRouteToHostException; -import java.net.ProtocolException; import java.net.URL; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.concurrent.atomic.AtomicReference; -import java.util.regex.Matcher; -import java.util.regex.Pattern; +import java.util.Set; import java.util.zip.GZIPInputStream; /** * An {@link HttpDataSource} that uses Android's {@link HttpURLConnection}. * *

      By default this implementation will not follow cross-protocol redirects (i.e. redirects from - * HTTP to HTTPS or vice versa). Cross-protocol redirects can be enabled by using the {@link - * #DefaultHttpDataSource(String, int, int, boolean, RequestProperties)} constructor and passing - * {@code true} for the {@code allowCrossProtocolRedirects} argument. + * HTTP to HTTPS or vice versa). Cross-protocol redirects can be enabled by passing {@code true} to + * {@link DefaultHttpDataSource.Factory#setAllowCrossProtocolRedirects(boolean)}. * *

      Note: HTTP request headers will be set using all parameters passed via (in order of decreasing - * priority) the {@code dataSpec}, {@link #setRequestProperty} and the default parameters used to - * construct the instance. + * priority) the {@code dataSpec}, {@link #setRequestProperty} and the default properties that can + * be passed to {@link HttpDataSource.Factory#setDefaultRequestProperties(Map)}. */ public class DefaultHttpDataSource extends BaseDataSource implements HttpDataSource { + /** {@link DataSource.Factory} for {@link DefaultHttpDataSource} instances. */ + public static final class Factory implements HttpDataSource.Factory { + + private final RequestProperties defaultRequestProperties; + + @Nullable private TransferListener transferListener; + @Nullable private Predicate contentTypePredicate; + @Nullable private String userAgent; + private int connectTimeoutMs; + private int readTimeoutMs; + private boolean allowCrossProtocolRedirects; + private boolean keepPostFor302Redirects; + + /** Creates an instance. */ + public Factory() { + defaultRequestProperties = new RequestProperties(); + connectTimeoutMs = DEFAULT_CONNECT_TIMEOUT_MILLIS; + readTimeoutMs = DEFAULT_READ_TIMEOUT_MILLIS; + } + + @CanIgnoreReturnValue + @Override + public final Factory setDefaultRequestProperties(Map defaultRequestProperties) { + this.defaultRequestProperties.clearAndSet(defaultRequestProperties); + return this; + } + + /** + * Sets the user agent that will be used. + * + *

      The default is {@code null}, which causes the default user agent of the underlying + * platform to be used. + * + * @param userAgent The user agent that will be used, or {@code null} to use the default user + * agent of the underlying platform. + * @return This factory. + */ + @CanIgnoreReturnValue + public Factory setUserAgent(@Nullable String userAgent) { + this.userAgent = userAgent; + return this; + } + + /** + * Sets the connect timeout, in milliseconds. + * + *

      The default is {@link DefaultHttpDataSource#DEFAULT_CONNECT_TIMEOUT_MILLIS}. + * + * @param connectTimeoutMs The connect timeout, in milliseconds, that will be used. + * @return This factory. + */ + @CanIgnoreReturnValue + public Factory setConnectTimeoutMs(int connectTimeoutMs) { + this.connectTimeoutMs = connectTimeoutMs; + return this; + } + + /** + * Sets the read timeout, in milliseconds. + * + *

      The default is {@link DefaultHttpDataSource#DEFAULT_READ_TIMEOUT_MILLIS}. + * + * @param readTimeoutMs The connect timeout, in milliseconds, that will be used. + * @return This factory. + */ + @CanIgnoreReturnValue + public Factory setReadTimeoutMs(int readTimeoutMs) { + this.readTimeoutMs = readTimeoutMs; + return this; + } + + /** + * Sets whether to allow cross protocol redirects. + * + *

      The default is {@code false}. + * + * @param allowCrossProtocolRedirects Whether to allow cross protocol redirects. + * @return This factory. + */ + @CanIgnoreReturnValue + public Factory setAllowCrossProtocolRedirects(boolean allowCrossProtocolRedirects) { + this.allowCrossProtocolRedirects = allowCrossProtocolRedirects; + return this; + } + + /** + * Sets a content type {@link Predicate}. If a content type is rejected by the predicate then a + * {@link HttpDataSource.InvalidContentTypeException} is thrown from {@link + * DefaultHttpDataSource#open(DataSpec)}. + * + *

      The default is {@code null}. + * + * @param contentTypePredicate The content type {@link Predicate}, or {@code null} to clear a + * predicate that was previously set. + * @return This factory. + */ + @CanIgnoreReturnValue + public Factory setContentTypePredicate(@Nullable Predicate contentTypePredicate) { + this.contentTypePredicate = contentTypePredicate; + return this; + } + + /** + * Sets the {@link TransferListener} that will be used. + * + *

      The default is {@code null}. + * + *

      See {@link DataSource#addTransferListener(TransferListener)}. + * + * @param transferListener The listener that will be used. + * @return This factory. + */ + @CanIgnoreReturnValue + public Factory setTransferListener(@Nullable TransferListener transferListener) { + this.transferListener = transferListener; + return this; + } + + /** + * Sets whether we should keep the POST method and body when we have HTTP 302 redirects for a + * POST request. + */ + @CanIgnoreReturnValue + public Factory setKeepPostFor302Redirects(boolean keepPostFor302Redirects) { + this.keepPostFor302Redirects = keepPostFor302Redirects; + return this; + } + + @Override + public DefaultHttpDataSource createDataSource() { + DefaultHttpDataSource dataSource = + new DefaultHttpDataSource( + userAgent, + connectTimeoutMs, + readTimeoutMs, + allowCrossProtocolRedirects, + defaultRequestProperties, + contentTypePredicate, + keepPostFor302Redirects); + if (transferListener != null) { + dataSource.addTransferListener(transferListener); + } + return dataSource; + } + } + /** The default connection timeout, in milliseconds. */ public static final int DEFAULT_CONNECT_TIMEOUT_MILLIS = 8 * 1000; - /** - * The default read timeout, in milliseconds. - */ + /** The default read timeout, in milliseconds. */ public static final int DEFAULT_READ_TIMEOUT_MILLIS = 8 * 1000; private static final String TAG = "DefaultHttpDataSource"; @@ -70,16 +218,14 @@ public class DefaultHttpDataSource extends BaseDataSource implements HttpDataSou private static final int HTTP_STATUS_TEMPORARY_REDIRECT = 307; private static final int HTTP_STATUS_PERMANENT_REDIRECT = 308; private static final long MAX_BYTES_TO_DRAIN = 2048; - private static final Pattern CONTENT_RANGE_HEADER = - Pattern.compile("^bytes (\\d+)-(\\d+)/(\\d+)$"); - private static final AtomicReference skipBufferReference = new AtomicReference<>(); private final boolean allowCrossProtocolRedirects; private final int connectTimeoutMillis; private final int readTimeoutMillis; - private final String userAgent; + @Nullable private final String userAgent; @Nullable private final RequestProperties defaultRequestProperties; private final RequestProperties requestProperties; + private final boolean keepPostFor302Redirects; @Nullable private Predicate contentTypePredicate; @Nullable private DataSpec dataSpec; @@ -87,100 +233,36 @@ public class DefaultHttpDataSource extends BaseDataSource implements HttpDataSou @Nullable private InputStream inputStream; private boolean opened; private int responseCode; - - private long bytesToSkip; private long bytesToRead; - - private long bytesSkipped; private long bytesRead; - /** @param userAgent The User-Agent string that should be used. */ - public DefaultHttpDataSource(String userAgent) { - this(userAgent, DEFAULT_CONNECT_TIMEOUT_MILLIS, DEFAULT_READ_TIMEOUT_MILLIS); - } - /** - * @param userAgent The User-Agent string that should be used. - * @param connectTimeoutMillis The connection timeout, in milliseconds. A timeout of zero is - * interpreted as an infinite timeout. - * @param readTimeoutMillis The read timeout, in milliseconds. A timeout of zero is interpreted as - * an infinite timeout. + * @deprecated Use {@link DefaultHttpDataSource.Factory} instead. */ - public DefaultHttpDataSource(String userAgent, int connectTimeoutMillis, int readTimeoutMillis) { - this( - userAgent, - connectTimeoutMillis, - readTimeoutMillis, - /* allowCrossProtocolRedirects= */ false, - /* defaultRequestProperties= */ null); - } - - /** - * @param userAgent The User-Agent string that should be used. - * @param connectTimeoutMillis The connection timeout, in milliseconds. A timeout of zero is - * interpreted as an infinite timeout. Pass {@link #DEFAULT_CONNECT_TIMEOUT_MILLIS} to use the - * default value. - * @param readTimeoutMillis The read timeout, in milliseconds. A timeout of zero is interpreted as - * an infinite timeout. Pass {@link #DEFAULT_READ_TIMEOUT_MILLIS} to use the default value. - * @param allowCrossProtocolRedirects Whether cross-protocol redirects (i.e. redirects from HTTP - * to HTTPS and vice versa) are enabled. - * @param defaultRequestProperties The default request properties to be sent to the server as HTTP - * headers or {@code null} if not required. - */ - public DefaultHttpDataSource( - String userAgent, - int connectTimeoutMillis, - int readTimeoutMillis, - boolean allowCrossProtocolRedirects, - @Nullable RequestProperties defaultRequestProperties) { - super(/* isNetwork= */ true); - this.userAgent = Assertions.checkNotEmpty(userAgent); - this.requestProperties = new RequestProperties(); - this.connectTimeoutMillis = connectTimeoutMillis; - this.readTimeoutMillis = readTimeoutMillis; - this.allowCrossProtocolRedirects = allowCrossProtocolRedirects; - this.defaultRequestProperties = defaultRequestProperties; + @SuppressWarnings("deprecation") + @Deprecated + public DefaultHttpDataSource() { + this(/* userAgent= */ null, DEFAULT_CONNECT_TIMEOUT_MILLIS, DEFAULT_READ_TIMEOUT_MILLIS); } /** - * @param userAgent The User-Agent string that should be used. - * @param contentTypePredicate An optional {@link Predicate}. If a content type is rejected by the - * predicate then a {@link HttpDataSource.InvalidContentTypeException} is thrown from {@link - * #open(DataSpec)}. - * @deprecated Use {@link #DefaultHttpDataSource(String)} and {@link - * #setContentTypePredicate(Predicate)}. + * @deprecated Use {@link DefaultHttpDataSource.Factory} instead. */ + @SuppressWarnings("deprecation") @Deprecated - public DefaultHttpDataSource(String userAgent, @Nullable Predicate contentTypePredicate) { - this( - userAgent, - contentTypePredicate, - DEFAULT_CONNECT_TIMEOUT_MILLIS, - DEFAULT_READ_TIMEOUT_MILLIS); + public DefaultHttpDataSource(@Nullable String userAgent) { + this(userAgent, DEFAULT_CONNECT_TIMEOUT_MILLIS, DEFAULT_READ_TIMEOUT_MILLIS); } /** - * @param userAgent The User-Agent string that should be used. - * @param contentTypePredicate An optional {@link Predicate}. If a content type is rejected by the - * predicate then a {@link HttpDataSource.InvalidContentTypeException} is thrown from {@link - * #open(DataSpec)}. - * @param connectTimeoutMillis The connection timeout, in milliseconds. A timeout of zero is - * interpreted as an infinite timeout. - * @param readTimeoutMillis The read timeout, in milliseconds. A timeout of zero is interpreted as - * an infinite timeout. - * @deprecated Use {@link #DefaultHttpDataSource(String, int, int)} and {@link - * #setContentTypePredicate(Predicate)}. + * @deprecated Use {@link DefaultHttpDataSource.Factory} instead. */ @SuppressWarnings("deprecation") @Deprecated public DefaultHttpDataSource( - String userAgent, - @Nullable Predicate contentTypePredicate, - int connectTimeoutMillis, - int readTimeoutMillis) { + @Nullable String userAgent, int connectTimeoutMillis, int readTimeoutMillis) { this( userAgent, - contentTypePredicate, connectTimeoutMillis, readTimeoutMillis, /* allowCrossProtocolRedirects= */ false, @@ -188,47 +270,49 @@ public DefaultHttpDataSource( } /** - * @param userAgent The User-Agent string that should be used. - * @param contentTypePredicate An optional {@link Predicate}. If a content type is rejected by the - * predicate then a {@link HttpDataSource.InvalidContentTypeException} is thrown from {@link - * #open(DataSpec)}. - * @param connectTimeoutMillis The connection timeout, in milliseconds. A timeout of zero is - * interpreted as an infinite timeout. Pass {@link #DEFAULT_CONNECT_TIMEOUT_MILLIS} to use the - * default value. - * @param readTimeoutMillis The read timeout, in milliseconds. A timeout of zero is interpreted as - * an infinite timeout. Pass {@link #DEFAULT_READ_TIMEOUT_MILLIS} to use the default value. - * @param allowCrossProtocolRedirects Whether cross-protocol redirects (i.e. redirects from HTTP - * to HTTPS and vice versa) are enabled. - * @param defaultRequestProperties The default request properties to be sent to the server as HTTP - * headers or {@code null} if not required. - * @deprecated Use {@link #DefaultHttpDataSource(String, int, int, boolean, RequestProperties)} - * and {@link #setContentTypePredicate(Predicate)}. + * @deprecated Use {@link DefaultHttpDataSource.Factory} instead. */ @Deprecated public DefaultHttpDataSource( - String userAgent, - @Nullable Predicate contentTypePredicate, + @Nullable String userAgent, int connectTimeoutMillis, int readTimeoutMillis, boolean allowCrossProtocolRedirects, @Nullable RequestProperties defaultRequestProperties) { + this( + userAgent, + connectTimeoutMillis, + readTimeoutMillis, + allowCrossProtocolRedirects, + defaultRequestProperties, + /* contentTypePredicate= */ null, + /* keepPostFor302Redirects= */ false); + } + + private DefaultHttpDataSource( + @Nullable String userAgent, + int connectTimeoutMillis, + int readTimeoutMillis, + boolean allowCrossProtocolRedirects, + @Nullable RequestProperties defaultRequestProperties, + @Nullable Predicate contentTypePredicate, + boolean keepPostFor302Redirects) { super(/* isNetwork= */ true); - this.userAgent = Assertions.checkNotEmpty(userAgent); - this.contentTypePredicate = contentTypePredicate; - this.requestProperties = new RequestProperties(); + this.userAgent = userAgent; this.connectTimeoutMillis = connectTimeoutMillis; this.readTimeoutMillis = readTimeoutMillis; this.allowCrossProtocolRedirects = allowCrossProtocolRedirects; this.defaultRequestProperties = defaultRequestProperties; + this.contentTypePredicate = contentTypePredicate; + this.requestProperties = new RequestProperties(); + this.keepPostFor302Redirects = keepPostFor302Redirects; } /** - * Sets a content type {@link Predicate}. If a content type is rejected by the predicate then a - * {@link HttpDataSource.InvalidContentTypeException} is thrown from {@link #open(DataSpec)}. - * - * @param contentTypePredicate The content type {@link Predicate}, or {@code null} to clear a - * predicate that was previously set. + * @deprecated Use {@link DefaultHttpDataSource.Factory#setContentTypePredicate(Predicate)} + * instead. */ + @Deprecated public void setContentTypePredicate(@Nullable Predicate contentTypePredicate) { this.contentTypePredicate = contentTypePredicate; } @@ -246,19 +330,30 @@ public int getResponseCode() { @Override public Map> getResponseHeaders() { - return connection == null ? Collections.emptyMap() : connection.getHeaderFields(); + if (connection == null) { + return ImmutableMap.of(); + } + // connection.getHeaderFields() always contains a null key with a value like + // ["HTTP/1.1 200 OK"]. The response code is available from HttpURLConnection#getResponseCode() + // and the HTTP version is fixed when establishing the connection. + // DataSource#getResponseHeaders() doesn't allow null keys in the returned map, so we need to + // remove it. + // connection.getHeaderFields() returns a special unmodifiable case-insensitive Map + // so we can't just remove the null key or make a copy without the null key. Instead we wrap it + // in a ForwardingMap subclass that ignores and filters out null keys in the read methods. + return new NullFilteringHeadersMap(connection.getHeaderFields()); } @Override public void setRequestProperty(String name, String value) { - Assertions.checkNotNull(name); - Assertions.checkNotNull(value); + checkNotNull(name); + checkNotNull(value); requestProperties.set(name, value); } @Override public void clearRequestProperty(String name) { - Assertions.checkNotNull(name); + checkNotNull(name); requestProperties.remove(name); } @@ -267,47 +362,61 @@ public void clearAllRequestProperties() { requestProperties.clear(); } - /** - * Opens the source to read the specified data. - */ + /** Opens the source to read the specified data. */ @Override public long open(DataSpec dataSpec) throws HttpDataSourceException { this.dataSpec = dataSpec; - this.bytesRead = 0; - this.bytesSkipped = 0; + bytesRead = 0; + bytesToRead = 0; transferInitializing(dataSpec); - try { - connection = makeConnection(dataSpec); - } catch (IOException e) { - throw new HttpDataSourceException( - "Unable to connect", e, dataSpec, HttpDataSourceException.TYPE_OPEN); - } String responseMessage; + HttpURLConnection connection; try { + this.connection = makeConnection(dataSpec); + connection = this.connection; responseCode = connection.getResponseCode(); responseMessage = connection.getResponseMessage(); } catch (IOException e) { closeConnectionQuietly(); - throw new HttpDataSourceException( - "Unable to connect", e, dataSpec, HttpDataSourceException.TYPE_OPEN); + throw HttpDataSourceException.createForIOException( + e, dataSpec, HttpDataSourceException.TYPE_OPEN); } // Check for a valid response code. if (responseCode < 200 || responseCode > 299) { Map> headers = connection.getHeaderFields(); - closeConnectionQuietly(); - InvalidResponseCodeException exception = - new InvalidResponseCodeException(responseCode, responseMessage, headers, dataSpec); if (responseCode == 416) { - exception.initCause(new DataSourceException(DataSourceException.POSITION_OUT_OF_RANGE)); + long documentSize = + HttpUtil.getDocumentSize(connection.getHeaderField(HttpHeaders.CONTENT_RANGE)); + if (dataSpec.position == documentSize) { + opened = true; + transferStarted(dataSpec); + return dataSpec.length != C.LENGTH_UNSET ? dataSpec.length : 0; + } + } + + @Nullable InputStream errorStream = connection.getErrorStream(); + byte[] errorResponseBody; + try { + errorResponseBody = + errorStream != null ? Util.toByteArray(errorStream) : Util.EMPTY_BYTE_ARRAY; + } catch (IOException e) { + errorResponseBody = Util.EMPTY_BYTE_ARRAY; } - throw exception; + closeConnectionQuietly(); + @Nullable + IOException cause = + responseCode == 416 + ? new DataSourceException(PlaybackException.ERROR_CODE_IO_READ_POSITION_OUT_OF_RANGE) + : null; + throw new InvalidResponseCodeException( + responseCode, responseMessage, cause, headers, dataSpec, errorResponseBody); } // Check for a valid content type. String contentType = connection.getContentType(); - if (contentTypePredicate != null && !contentTypePredicate.evaluate(contentType)) { + if (contentTypePredicate != null && !contentTypePredicate.apply(contentType)) { closeConnectionQuietly(); throw new InvalidContentTypeException(contentType, dataSpec); } @@ -315,7 +424,7 @@ public long open(DataSpec dataSpec) throws HttpDataSourceException { // If we requested a range starting from a non-zero position and received a 200 rather than a // 206, then the server does not support partial requests. We'll need to manually skip to the // requested position. - bytesToSkip = responseCode == 200 && dataSpec.position != 0 ? dataSpec.position : 0; + long bytesToSkip = responseCode == 200 && dataSpec.position != 0 ? dataSpec.position : 0; // Determine the length of the data to be read, after skipping. boolean isCompressed = isCompressed(connection); @@ -323,9 +432,12 @@ public long open(DataSpec dataSpec) throws HttpDataSourceException { if (dataSpec.length != C.LENGTH_UNSET) { bytesToRead = dataSpec.length; } else { - long contentLength = getContentLength(connection); - bytesToRead = contentLength != C.LENGTH_UNSET ? (contentLength - bytesToSkip) - : C.LENGTH_UNSET; + long contentLength = + HttpUtil.getContentLength( + connection.getHeaderField(HttpHeaders.CONTENT_LENGTH), + connection.getHeaderField(HttpHeaders.CONTENT_RANGE)); + bytesToRead = + contentLength != C.LENGTH_UNSET ? (contentLength - bytesToSkip) : C.LENGTH_UNSET; } } else { // Gzip is enabled. If the server opts to use gzip then the content length in the response @@ -341,34 +453,60 @@ public long open(DataSpec dataSpec) throws HttpDataSourceException { } } catch (IOException e) { closeConnectionQuietly(); - throw new HttpDataSourceException(e, dataSpec, HttpDataSourceException.TYPE_OPEN); + throw new HttpDataSourceException( + e, + dataSpec, + PlaybackException.ERROR_CODE_IO_UNSPECIFIED, + HttpDataSourceException.TYPE_OPEN); } opened = true; transferStarted(dataSpec); + try { + skipFully(bytesToSkip, dataSpec); + } catch (IOException e) { + closeConnectionQuietly(); + + if (e instanceof HttpDataSourceException) { + throw (HttpDataSourceException) e; + } + throw new HttpDataSourceException( + e, + dataSpec, + PlaybackException.ERROR_CODE_IO_UNSPECIFIED, + HttpDataSourceException.TYPE_OPEN); + } + return bytesToRead; } @Override - public int read(byte[] buffer, int offset, int readLength) throws HttpDataSourceException { + public int read(byte[] buffer, int offset, int length) throws HttpDataSourceException { try { - skipInternal(); - return readInternal(buffer, offset, readLength); + return readInternal(buffer, offset, length); } catch (IOException e) { - throw new HttpDataSourceException(e, dataSpec, HttpDataSourceException.TYPE_READ); + throw HttpDataSourceException.createForIOException( + e, castNonNull(dataSpec), HttpDataSourceException.TYPE_READ); } } @Override public void close() throws HttpDataSourceException { try { + @Nullable InputStream inputStream = this.inputStream; if (inputStream != null) { - maybeTerminateInputStream(connection, bytesRemaining()); + long bytesRemaining = + bytesToRead == C.LENGTH_UNSET ? C.LENGTH_UNSET : bytesToRead - bytesRead; + maybeTerminateInputStream(connection, bytesRemaining); try { inputStream.close(); } catch (IOException e) { - throw new HttpDataSourceException(e, dataSpec, HttpDataSourceException.TYPE_CLOSE); + throw new HttpDataSourceException( + e, + castNonNull(dataSpec), + PlaybackException.ERROR_CODE_IO_UNSPECIFIED, + HttpDataSourceException.TYPE_CLOSE); } } } finally { @@ -381,59 +519,16 @@ public void close() throws HttpDataSourceException { } } - /** - * Returns the current connection, or null if the source is not currently opened. - * - * @return The current open connection, or null. - */ - protected final @Nullable HttpURLConnection getConnection() { - return connection; - } - - /** - * Returns the number of bytes that have been skipped since the most recent call to - * {@link #open(DataSpec)}. - * - * @return The number of bytes skipped. - */ - protected final long bytesSkipped() { - return bytesSkipped; - } - - /** - * Returns the number of bytes that have been read since the most recent call to - * {@link #open(DataSpec)}. - * - * @return The number of bytes read. - */ - protected final long bytesRead() { - return bytesRead; - } - - /** - * Returns the number of bytes that are still to be read for the current {@link DataSpec}. - *

      - * If the total length of the data being read is known, then this length minus {@code bytesRead()} - * is returned. If the total length is unknown, {@link C#LENGTH_UNSET} is returned. - * - * @return The remaining length, or {@link C#LENGTH_UNSET}. - */ - protected final long bytesRemaining() { - return bytesToRead == C.LENGTH_UNSET ? bytesToRead : bytesToRead - bytesRead; - } - - /** - * Establishes a connection, following redirects to do so where permitted. - */ + /** Establishes a connection, following redirects to do so where permitted. */ private HttpURLConnection makeConnection(DataSpec dataSpec) throws IOException { URL url = new URL(dataSpec.uri.toString()); @HttpMethod int httpMethod = dataSpec.httpMethod; - byte[] httpBody = dataSpec.httpBody; + @Nullable byte[] httpBody = dataSpec.httpBody; long position = dataSpec.position; long length = dataSpec.length; boolean allowGzip = dataSpec.isFlagSet(DataSpec.FLAG_ALLOW_GZIP); - if (!allowCrossProtocolRedirects) { + if (!allowCrossProtocolRedirects && !keepPostFor302Redirects) { // HttpURLConnection disallows cross-protocol redirects, but otherwise performs redirection // automatically. This is the behavior we want, so use it. return makeConnection( @@ -447,7 +542,8 @@ private HttpURLConnection makeConnection(DataSpec dataSpec) throws IOException { dataSpec.httpRequestHeaders); } - // We need to handle redirects ourselves to allow cross-protocol redirects. + // We need to handle redirects ourselves to allow cross-protocol redirects or to keep the POST + // request method for 302. int redirectCount = 0; while (redirectCount++ <= MAX_REDIRECTS) { HttpURLConnection connection = @@ -470,24 +566,32 @@ private HttpURLConnection makeConnection(DataSpec dataSpec) throws IOException { || responseCode == HTTP_STATUS_TEMPORARY_REDIRECT || responseCode == HTTP_STATUS_PERMANENT_REDIRECT)) { connection.disconnect(); - url = handleRedirect(url, location); + url = handleRedirect(url, location, dataSpec); } else if (httpMethod == DataSpec.HTTP_METHOD_POST && (responseCode == HttpURLConnection.HTTP_MULT_CHOICE || responseCode == HttpURLConnection.HTTP_MOVED_PERM || responseCode == HttpURLConnection.HTTP_MOVED_TEMP || responseCode == HttpURLConnection.HTTP_SEE_OTHER)) { - // POST request follows the redirect and is transformed into a GET request. connection.disconnect(); - httpMethod = DataSpec.HTTP_METHOD_GET; - httpBody = null; - url = handleRedirect(url, location); + boolean shouldKeepPost = + keepPostFor302Redirects && responseCode == HttpURLConnection.HTTP_MOVED_TEMP; + if (!shouldKeepPost) { + // POST request follows the redirect and is transformed into a GET request. + httpMethod = DataSpec.HTTP_METHOD_GET; + httpBody = null; + } + url = handleRedirect(url, location, dataSpec); } else { return connection; } } // If we get here we've been redirected more times than are permitted. - throw new NoRouteToHostException("Too many redirects: " + redirectCount); + throw new HttpDataSourceException( + new NoRouteToHostException("Too many redirects: " + redirectCount), + dataSpec, + PlaybackException.ERROR_CODE_IO_NETWORK_CONNECTION_FAILED, + HttpDataSourceException.TYPE_OPEN); } /** @@ -495,7 +599,7 @@ private HttpURLConnection makeConnection(DataSpec dataSpec) throws IOException { * * @param url The url to connect to. * @param httpMethod The http method. - * @param httpBody The body data. + * @param httpBody The body data, or {@code null} if not required. * @param position The byte offset of the requested data. * @param length The length of the requested data, or {@link C#LENGTH_UNSET}. * @param allowGzip Whether to allow the use of gzip. @@ -505,7 +609,7 @@ private HttpURLConnection makeConnection(DataSpec dataSpec) throws IOException { private HttpURLConnection makeConnection( URL url, @HttpMethod int httpMethod, - byte[] httpBody, + @Nullable byte[] httpBody, long position, long length, boolean allowGzip, @@ -527,19 +631,18 @@ private HttpURLConnection makeConnection( connection.setRequestProperty(property.getKey(), property.getValue()); } - if (!(position == 0 && length == C.LENGTH_UNSET)) { - String rangeRequest = "bytes=" + position + "-"; - if (length != C.LENGTH_UNSET) { - rangeRequest += (position + length - 1); - } - connection.setRequestProperty("Range", rangeRequest); + @Nullable String rangeHeader = buildRangeRequestHeader(position, length); + if (rangeHeader != null) { + connection.setRequestProperty(HttpHeaders.RANGE, rangeHeader); + } + if (userAgent != null) { + connection.setRequestProperty(HttpHeaders.USER_AGENT, userAgent); } - connection.setRequestProperty("User-Agent", userAgent); - connection.setRequestProperty("Accept-Encoding", allowGzip ? "gzip" : "identity"); + connection.setRequestProperty(HttpHeaders.ACCEPT_ENCODING, allowGzip ? "gzip" : "identity"); connection.setInstanceFollowRedirects(followRedirects); connection.setDoOutput(httpBody != null); connection.setRequestMethod(DataSpec.getStringForHttpMethod(httpMethod)); - + if (httpBody != null) { connection.setFixedLengthStreamingMode(httpBody.length); connection.connect(); @@ -562,117 +665,95 @@ private HttpURLConnection makeConnection( * Handles a redirect. * * @param originalUrl The original URL. - * @param location The Location header in the response. + * @param location The Location header in the response. May be {@code null}. + * @param dataSpec The {@link DataSpec}. * @return The next URL. - * @throws IOException If redirection isn't possible. + * @throws HttpDataSourceException If redirection isn't possible. */ - private static URL handleRedirect(URL originalUrl, String location) throws IOException { + private URL handleRedirect(URL originalUrl, @Nullable String location, DataSpec dataSpec) + throws HttpDataSourceException { if (location == null) { - throw new ProtocolException("Null location redirect"); + throw new HttpDataSourceException( + "Null location redirect", + dataSpec, + PlaybackException.ERROR_CODE_IO_NETWORK_CONNECTION_FAILED, + HttpDataSourceException.TYPE_OPEN); } // Form the new url. - URL url = new URL(originalUrl, location); + URL url; + try { + url = new URL(originalUrl, location); + } catch (MalformedURLException e) { + throw new HttpDataSourceException( + e, + dataSpec, + PlaybackException.ERROR_CODE_IO_NETWORK_CONNECTION_FAILED, + HttpDataSourceException.TYPE_OPEN); + } + // Check that the protocol of the new url is supported. String protocol = url.getProtocol(); if (!"https".equals(protocol) && !"http".equals(protocol)) { - throw new ProtocolException("Unsupported protocol redirect: " + protocol); - } - // Currently this method is only called if allowCrossProtocolRedirects is true, and so the code - // below isn't required. If we ever decide to handle redirects ourselves when cross-protocol - // redirects are disabled, we'll need to uncomment this block of code. - // if (!allowCrossProtocolRedirects && !protocol.equals(originalUrl.getProtocol())) { - // throw new ProtocolException("Disallowed cross-protocol redirect (" - // + originalUrl.getProtocol() + " to " + protocol + ")"); - // } - return url; - } - - /** - * Attempts to extract the length of the content from the response headers of an open connection. - * - * @param connection The open connection. - * @return The extracted length, or {@link C#LENGTH_UNSET}. - */ - private static long getContentLength(HttpURLConnection connection) { - long contentLength = C.LENGTH_UNSET; - String contentLengthHeader = connection.getHeaderField("Content-Length"); - if (!TextUtils.isEmpty(contentLengthHeader)) { - try { - contentLength = Long.parseLong(contentLengthHeader); - } catch (NumberFormatException e) { - Log.e(TAG, "Unexpected Content-Length [" + contentLengthHeader + "]"); - } + throw new HttpDataSourceException( + "Unsupported protocol redirect: " + protocol, + dataSpec, + PlaybackException.ERROR_CODE_IO_NETWORK_CONNECTION_FAILED, + HttpDataSourceException.TYPE_OPEN); } - String contentRangeHeader = connection.getHeaderField("Content-Range"); - if (!TextUtils.isEmpty(contentRangeHeader)) { - Matcher matcher = CONTENT_RANGE_HEADER.matcher(contentRangeHeader); - if (matcher.find()) { - try { - long contentLengthFromRange = - Long.parseLong(matcher.group(2)) - Long.parseLong(matcher.group(1)) + 1; - if (contentLength < 0) { - // Some proxy servers strip the Content-Length header. Fall back to the length - // calculated here in this case. - contentLength = contentLengthFromRange; - } else if (contentLength != contentLengthFromRange) { - // If there is a discrepancy between the Content-Length and Content-Range headers, - // assume the one with the larger value is correct. We have seen cases where carrier - // change one of them to reduce the size of a request, but it is unlikely anybody would - // increase it. - Log.w(TAG, "Inconsistent headers [" + contentLengthHeader + "] [" + contentRangeHeader - + "]"); - contentLength = Math.max(contentLength, contentLengthFromRange); - } - } catch (NumberFormatException e) { - Log.e(TAG, "Unexpected Content-Range [" + contentRangeHeader + "]"); - } - } + if (!allowCrossProtocolRedirects && !protocol.equals(originalUrl.getProtocol())) { + throw new HttpDataSourceException( + "Disallowed cross-protocol redirect (" + + originalUrl.getProtocol() + + " to " + + protocol + + ")", + dataSpec, + PlaybackException.ERROR_CODE_IO_NETWORK_CONNECTION_FAILED, + HttpDataSourceException.TYPE_OPEN); } - return contentLength; + return url; } /** - * Skips any bytes that need skipping. Else does nothing. - *

      - * This implementation is based roughly on {@code libcore.io.Streams.skipByReading()}. + * Attempts to skip the specified number of bytes in full. * - * @throws InterruptedIOException If the thread is interrupted during the operation. - * @throws EOFException If the end of the input stream is reached before the bytes are skipped. + * @param bytesToSkip The number of bytes to skip. + * @param dataSpec The {@link DataSpec}. + * @throws IOException If the thread is interrupted during the operation, or if the data ended + * before skipping the specified number of bytes. */ - private void skipInternal() throws IOException { - if (bytesSkipped == bytesToSkip) { + private void skipFully(long bytesToSkip, DataSpec dataSpec) throws IOException { + if (bytesToSkip == 0) { return; } - - // Acquire the shared skip buffer. - byte[] skipBuffer = skipBufferReference.getAndSet(null); - if (skipBuffer == null) { - skipBuffer = new byte[4096]; - } - - while (bytesSkipped != bytesToSkip) { - int readLength = (int) Math.min(bytesToSkip - bytesSkipped, skipBuffer.length); - int read = inputStream.read(skipBuffer, 0, readLength); + byte[] skipBuffer = new byte[4096]; + while (bytesToSkip > 0) { + int readLength = (int) min(bytesToSkip, skipBuffer.length); + int read = castNonNull(inputStream).read(skipBuffer, 0, readLength); if (Thread.currentThread().isInterrupted()) { - throw new InterruptedIOException(); + throw new HttpDataSourceException( + new InterruptedIOException(), + dataSpec, + PlaybackException.ERROR_CODE_IO_UNSPECIFIED, + HttpDataSourceException.TYPE_OPEN); } if (read == -1) { - throw new EOFException(); + throw new HttpDataSourceException( + dataSpec, + PlaybackException.ERROR_CODE_IO_READ_POSITION_OUT_OF_RANGE, + HttpDataSourceException.TYPE_OPEN); } - bytesSkipped += read; + bytesToSkip -= read; bytesTransferred(read); } - - // Release the shared skip buffer. - skipBufferReference.set(skipBuffer); } /** - * Reads up to {@code length} bytes of data and stores them into {@code buffer}, starting at - * index {@code offset}. - *

      - * This method blocks until at least one byte of data can be read, the end of the opened range is - * detected, or an exception is thrown. + * Reads up to {@code length} bytes of data and stores them into {@code buffer}, starting at index + * {@code offset}. + * + *

      This method blocks until at least one byte of data can be read, the end of the opened range + * is detected, or an exception is thrown. * * @param buffer The buffer into which the read data should be stored. * @param offset The start offset into {@code buffer} at which data should be written. @@ -690,15 +771,11 @@ private int readInternal(byte[] buffer, int offset, int readLength) throws IOExc if (bytesRemaining == 0) { return C.RESULT_END_OF_INPUT; } - readLength = (int) Math.min(readLength, bytesRemaining); + readLength = (int) min(readLength, bytesRemaining); } - int read = inputStream.read(buffer, offset, readLength); + int read = castNonNull(inputStream).read(buffer, offset, readLength); if (read == -1) { - if (bytesToRead != C.LENGTH_UNSET) { - // End of stream reached having not read sufficient data. - throw new EOFException(); - } return C.RESULT_END_OF_INPUT; } @@ -718,8 +795,9 @@ private int readInternal(byte[] buffer, int offset, int readLength) throws IOExc * @param bytesRemaining The number of bytes remaining to be read from the input stream if its * length is known. {@link C#LENGTH_UNSET} otherwise. */ - private static void maybeTerminateInputStream(HttpURLConnection connection, long bytesRemaining) { - if (Util.SDK_INT != 19 && Util.SDK_INT != 20) { + private static void maybeTerminateInputStream( + @Nullable HttpURLConnection connection, long bytesRemaining) { + if (connection == null || Util.SDK_INT < 19 || Util.SDK_INT > 20) { return; } @@ -740,7 +818,8 @@ private static void maybeTerminateInputStream(HttpURLConnection connection, long || "com.android.okhttp.internal.http.HttpTransport$FixedLengthInputStream" .equals(className)) { Class superclass = inputStream.getClass().getSuperclass(); - Method unexpectedEndOfInput = superclass.getDeclaredMethod("unexpectedEndOfInput"); + Method unexpectedEndOfInput = + checkNotNull(superclass).getDeclaredMethod("unexpectedEndOfInput"); unexpectedEndOfInput.setAccessible(true); unexpectedEndOfInput.invoke(inputStream); } @@ -751,10 +830,7 @@ private static void maybeTerminateInputStream(HttpURLConnection connection, long } } - - /** - * Closes the current connection quietly, if there is one. - */ + /** Closes the current connection quietly, if there is one. */ private void closeConnectionQuietly() { if (connection != null) { try { @@ -770,4 +846,64 @@ private static boolean isCompressed(HttpURLConnection connection) { String contentEncoding = connection.getHeaderField("Content-Encoding"); return "gzip".equalsIgnoreCase(contentEncoding); } + + private static class NullFilteringHeadersMap extends ForwardingMap> { + + private final Map> headers; + + public NullFilteringHeadersMap(Map> headers) { + this.headers = headers; + } + + @Override + protected Map> delegate() { + return headers; + } + + @Override + public boolean containsKey(@Nullable Object key) { + return key != null && super.containsKey(key); + } + + @Nullable + @Override + public List get(@Nullable Object key) { + return key == null ? null : super.get(key); + } + + @Override + public Set keySet() { + return Sets.filter(super.keySet(), key -> key != null); + } + + @Override + public Set>> entrySet() { + return Sets.filter(super.entrySet(), entry -> entry.getKey() != null); + } + + @Override + public int size() { + return super.size() - (super.containsKey(null) ? 1 : 0); + } + + @Override + public boolean isEmpty() { + return super.isEmpty() || (super.size() == 1 && super.containsKey(null)); + } + + @Override + public boolean containsValue(@Nullable Object value) { + return super.standardContainsValue(value); + } + + @Override + public boolean equals(@Nullable Object object) { + return object != null && super.standardEquals(object); + } + + @Override + public int hashCode() { + return super.standardHashCode(); + } + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DefaultLoadErrorHandlingPolicy.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DefaultLoadErrorHandlingPolicy.java index 435f4bf578..422142d119 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DefaultLoadErrorHandlingPolicy.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DefaultLoadErrorHandlingPolicy.java @@ -15,8 +15,12 @@ */ package com.google.android.exoplayer2.upstream; +import static java.lang.Math.min; + +import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.ParserException; +import com.google.android.exoplayer2.upstream.HttpDataSource.CleartextNotPermittedException; import com.google.android.exoplayer2.upstream.HttpDataSource.InvalidResponseCodeException; import com.google.android.exoplayer2.upstream.Loader.UnexpectedLoaderException; import java.io.FileNotFoundException; @@ -32,8 +36,14 @@ public class DefaultLoadErrorHandlingPolicy implements LoadErrorHandlingPolicy { * streams. */ public static final int DEFAULT_MIN_LOADABLE_RETRY_COUNT_PROGRESSIVE_LIVE = 6; - /** The default duration for which a track is blacklisted in milliseconds. */ - public static final long DEFAULT_TRACK_BLACKLIST_MS = 60000; + /** The default duration for which a track is excluded in milliseconds. */ + public static final long DEFAULT_TRACK_EXCLUSION_MS = 60_000; + /** + * @deprecated Use {@link #DEFAULT_TRACK_EXCLUSION_MS} instead. + */ + @Deprecated public static final long DEFAULT_TRACK_BLACKLIST_MS = DEFAULT_TRACK_EXCLUSION_MS; + /** The default duration for which a location is excluded in milliseconds. */ + public static final long DEFAULT_LOCATION_EXCLUSION_MS = 5 * 60_000; private static final int DEFAULT_BEHAVIOR_MIN_LOADABLE_RETRY_COUNT = -1; @@ -61,36 +71,52 @@ public DefaultLoadErrorHandlingPolicy(int minimumLoadableRetryCount) { } /** - * Blacklists resources whose load error was an {@link InvalidResponseCodeException} with response - * code HTTP 404 or 410. The duration of the blacklisting is {@link #DEFAULT_TRACK_BLACKLIST_MS}. + * Returns whether a loader should fall back to using another resource on encountering an error, + * and if so the duration for which the failing resource should be excluded. + * + *

        + *
      • This policy will only specify a fallback if {@link #isEligibleForFallback} returns {@code + * true} for the error. + *
      • This policy will always specify a location fallback rather than a track fallback if both + * {@link FallbackOptions#isFallbackAvailable(int) are available}. + *
      • When a fallback is specified, the duration for which the failing resource will be + * excluded is {@link #DEFAULT_LOCATION_EXCLUSION_MS} or {@link + * #DEFAULT_TRACK_EXCLUSION_MS}, depending on the fallback type. + *
      */ @Override - public long getBlacklistDurationMsFor( - int dataType, long loadDurationMs, IOException exception, int errorCount) { - if (exception instanceof InvalidResponseCodeException) { - int responseCode = ((InvalidResponseCodeException) exception).responseCode; - return responseCode == 404 // HTTP 404 Not Found. - || responseCode == 410 // HTTP 410 Gone. - || responseCode == 416 // HTTP 416 Range Not Satisfiable. - ? DEFAULT_TRACK_BLACKLIST_MS - : C.TIME_UNSET; + @Nullable + public FallbackSelection getFallbackSelectionFor( + FallbackOptions fallbackOptions, LoadErrorInfo loadErrorInfo) { + if (!isEligibleForFallback(loadErrorInfo.exception)) { + return null; } - return C.TIME_UNSET; + // Prefer location fallbacks to track fallbacks, when both are available. + if (fallbackOptions.isFallbackAvailable(FALLBACK_TYPE_LOCATION)) { + return new FallbackSelection(FALLBACK_TYPE_LOCATION, DEFAULT_LOCATION_EXCLUSION_MS); + } else if (fallbackOptions.isFallbackAvailable(FALLBACK_TYPE_TRACK)) { + return new FallbackSelection(FALLBACK_TYPE_TRACK, DEFAULT_TRACK_EXCLUSION_MS); + } + return null; } /** * Retries for any exception that is not a subclass of {@link ParserException}, {@link - * FileNotFoundException} or {@link UnexpectedLoaderException}. The retry delay is calculated as - * {@code Math.min((errorCount - 1) * 1000, 5000)}. + * FileNotFoundException}, {@link CleartextNotPermittedException} or {@link + * UnexpectedLoaderException}, and for which {@link + * DataSourceException#isCausedByPositionOutOfRange} returns {@code false}. The retry delay is + * calculated as {@code Math.min((errorCount - 1) * 1000, 5000)}. */ @Override - public long getRetryDelayMsFor( - int dataType, long loadDurationMs, IOException exception, int errorCount) { + public long getRetryDelayMsFor(LoadErrorInfo loadErrorInfo) { + IOException exception = loadErrorInfo.exception; return exception instanceof ParserException || exception instanceof FileNotFoundException + || exception instanceof CleartextNotPermittedException || exception instanceof UnexpectedLoaderException + || DataSourceException.isCausedByPositionOutOfRange(exception) ? C.TIME_UNSET - : Math.min((errorCount - 1) * 1000, 5000); + : min((loadErrorInfo.errorCount - 1) * 1000, 5000); } /** @@ -107,4 +133,19 @@ public int getMinimumLoadableRetryCount(int dataType) { return minimumLoadableRetryCount; } } + + /** Returns whether an error should trigger a fallback if possible. */ + protected boolean isEligibleForFallback(IOException exception) { + if (!(exception instanceof InvalidResponseCodeException)) { + return false; + } + InvalidResponseCodeException invalidResponseCodeException = + (InvalidResponseCodeException) exception; + return invalidResponseCodeException.responseCode == 403 // HTTP 403 Forbidden. + || invalidResponseCodeException.responseCode == 404 // HTTP 404 Not Found. + || invalidResponseCodeException.responseCode == 410 // HTTP 410 Gone. + || invalidResponseCodeException.responseCode == 416 // HTTP 416 Range Not Satisfiable. + || invalidResponseCodeException.responseCode == 500 // HTTP 500 Internal Server Error. + || invalidResponseCodeException.responseCode == 503; // HTTP 503 Service Unavailable. + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DummyDataSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DummyDataSource.java deleted file mode 100644 index 4124a2531f..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/DummyDataSource.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright (C) 2017 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.upstream; - -import android.net.Uri; -import androidx.annotation.Nullable; -import java.io.IOException; - -/** - * A dummy DataSource which provides no data. {@link #open(DataSpec)} throws {@link IOException}. - */ -public final class DummyDataSource implements DataSource { - - public static final DummyDataSource INSTANCE = new DummyDataSource(); - - /** A factory that produces {@link DummyDataSource}. */ - public static final Factory FACTORY = DummyDataSource::new; - - private DummyDataSource() {} - - @Override - public void addTransferListener(TransferListener transferListener) { - // Do nothing. - } - - @Override - public long open(DataSpec dataSpec) throws IOException { - throw new IOException("Dummy source"); - } - - @Override - public int read(byte[] buffer, int offset, int readLength) { - throw new UnsupportedOperationException(); - } - - @Override - @Nullable - public Uri getUri() { - return null; - } - - @Override - public void close() { - // do nothing. - } -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/FileDataSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/FileDataSource.java index 2661469efd..1b8ef9749d 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/FileDataSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/FileDataSource.java @@ -16,13 +16,20 @@ package com.google.android.exoplayer2.upstream; import static com.google.android.exoplayer2.util.Util.castNonNull; +import static java.lang.Math.min; import android.net.Uri; +import android.system.ErrnoException; +import android.system.OsConstants; import android.text.TextUtils; +import androidx.annotation.DoNotInline; import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.PlaybackException; import com.google.android.exoplayer2.util.Assertions; -import java.io.EOFException; +import com.google.android.exoplayer2.util.Util; +import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.io.FileNotFoundException; import java.io.IOException; import java.io.RandomAccessFile; @@ -31,14 +38,35 @@ public final class FileDataSource extends BaseDataSource { /** Thrown when a {@link FileDataSource} encounters an error reading a file. */ - public static class FileDataSourceException extends IOException { + public static class FileDataSourceException extends DataSourceException { - public FileDataSourceException(IOException cause) { - super(cause); + /** + * @deprecated Use {@link #FileDataSourceException(Throwable, int)} + */ + @Deprecated + public FileDataSourceException(Exception cause) { + super(cause, PlaybackException.ERROR_CODE_IO_UNSPECIFIED); } + /** + * @deprecated Use {@link #FileDataSourceException(String, Throwable, int)} + */ + @Deprecated public FileDataSourceException(String message, IOException cause) { - super(message, cause); + super(message, cause, PlaybackException.ERROR_CODE_IO_UNSPECIFIED); + } + + /** Creates a {@code FileDataSourceException}. */ + public FileDataSourceException(Throwable cause, @PlaybackException.ErrorCode int errorCode) { + super(cause, errorCode); + } + + /** Creates a {@code FileDataSourceException}. */ + public FileDataSourceException( + @Nullable String message, + @Nullable Throwable cause, + @PlaybackException.ErrorCode int errorCode) { + super(message, cause, errorCode); } } @@ -53,6 +81,7 @@ public static final class Factory implements DataSource.Factory { * @param listener The {@link TransferListener}. * @return This factory. */ + @CanIgnoreReturnValue public Factory setListener(@Nullable TransferListener listener) { this.listener = listener; return this; @@ -79,22 +108,22 @@ public FileDataSource() { @Override public long open(DataSpec dataSpec) throws FileDataSourceException { + Uri uri = dataSpec.uri; + this.uri = uri; + transferInitializing(dataSpec); + this.file = openLocalFile(uri); try { - Uri uri = dataSpec.uri; - this.uri = uri; - - transferInitializing(dataSpec); - - this.file = openLocalFile(uri); - file.seek(dataSpec.position); - bytesRemaining = dataSpec.length == C.LENGTH_UNSET ? file.length() - dataSpec.position - : dataSpec.length; - if (bytesRemaining < 0) { - throw new EOFException(); - } + bytesRemaining = + dataSpec.length == C.LENGTH_UNSET ? file.length() - dataSpec.position : dataSpec.length; } catch (IOException e) { - throw new FileDataSourceException(e); + throw new FileDataSourceException(e, PlaybackException.ERROR_CODE_IO_UNSPECIFIED); + } + if (bytesRemaining < 0) { + throw new FileDataSourceException( + /* message= */ null, + /* cause= */ null, + PlaybackException.ERROR_CODE_IO_READ_POSITION_OUT_OF_RANGE); } opened = true; @@ -103,36 +132,18 @@ public long open(DataSpec dataSpec) throws FileDataSourceException { return bytesRemaining; } - private static RandomAccessFile openLocalFile(Uri uri) throws FileDataSourceException { - try { - return new RandomAccessFile(Assertions.checkNotNull(uri.getPath()), "r"); - } catch (FileNotFoundException e) { - if (!TextUtils.isEmpty(uri.getQuery()) || !TextUtils.isEmpty(uri.getFragment())) { - throw new FileDataSourceException( - String.format( - "uri has query and/or fragment, which are not supported. Did you call Uri.parse()" - + " on a string containing '?' or '#'? Use Uri.fromFile(new File(path)) to" - + " avoid this. path=%s,query=%s,fragment=%s", - uri.getPath(), uri.getQuery(), uri.getFragment()), - e); - } - throw new FileDataSourceException(e); - } - } - @Override - public int read(byte[] buffer, int offset, int readLength) throws FileDataSourceException { - if (readLength == 0) { + public int read(byte[] buffer, int offset, int length) throws FileDataSourceException { + if (length == 0) { return 0; } else if (bytesRemaining == 0) { return C.RESULT_END_OF_INPUT; } else { int bytesRead; try { - bytesRead = - castNonNull(file).read(buffer, offset, (int) Math.min(bytesRemaining, readLength)); + bytesRead = castNonNull(file).read(buffer, offset, (int) min(bytesRemaining, length)); } catch (IOException e) { - throw new FileDataSourceException(e); + throw new FileDataSourceException(e, PlaybackException.ERROR_CODE_IO_UNSPECIFIED); } if (bytesRead > 0) { @@ -158,7 +169,7 @@ public void close() throws FileDataSourceException { file.close(); } } catch (IOException e) { - throw new FileDataSourceException(e); + throw new FileDataSourceException(e, PlaybackException.ERROR_CODE_IO_UNSPECIFIED); } finally { file = null; if (opened) { @@ -168,4 +179,40 @@ public void close() throws FileDataSourceException { } } + private static RandomAccessFile openLocalFile(Uri uri) throws FileDataSourceException { + try { + return new RandomAccessFile(Assertions.checkNotNull(uri.getPath()), "r"); + } catch (FileNotFoundException e) { + if (!TextUtils.isEmpty(uri.getQuery()) || !TextUtils.isEmpty(uri.getFragment())) { + throw new FileDataSourceException( + String.format( + "uri has query and/or fragment, which are not supported. Did you call Uri.parse()" + + " on a string containing '?' or '#'? Use Uri.fromFile(new File(path)) to" + + " avoid this. path=%s,query=%s,fragment=%s", + uri.getPath(), uri.getQuery(), uri.getFragment()), + e, + PlaybackException.ERROR_CODE_FAILED_RUNTIME_CHECK); + } + + // TODO(internal b/193503588): Add tests to ensure the correct error codes are assigned under + // different SDK versions. + throw new FileDataSourceException( + e, + Util.SDK_INT >= 21 && Api21.isPermissionError(e.getCause()) + ? PlaybackException.ERROR_CODE_IO_NO_PERMISSION + : PlaybackException.ERROR_CODE_IO_FILE_NOT_FOUND); + } catch (SecurityException e) { + throw new FileDataSourceException(e, PlaybackException.ERROR_CODE_IO_NO_PERMISSION); + } catch (RuntimeException e) { + throw new FileDataSourceException(e, PlaybackException.ERROR_CODE_IO_UNSPECIFIED); + } + } + + @RequiresApi(21) + private static final class Api21 { + @DoNotInline + private static boolean isPermissionError(@Nullable Throwable e) { + return e instanceof ErrnoException && ((ErrnoException) e).errno == OsConstants.EACCES; + } + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/FileDataSourceFactory.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/FileDataSourceFactory.java deleted file mode 100644 index 004a68fdaf..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/FileDataSourceFactory.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright (C) 2016 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.upstream; - -import androidx.annotation.Nullable; - -/** @deprecated Use {@link FileDataSource.Factory}. */ -@Deprecated -public final class FileDataSourceFactory implements DataSource.Factory { - - private final FileDataSource.Factory wrappedFactory; - - public FileDataSourceFactory() { - this(/* listener= */ null); - } - - public FileDataSourceFactory(@Nullable TransferListener listener) { - wrappedFactory = new FileDataSource.Factory().setListener(listener); - } - - @Override - public FileDataSource createDataSource() { - return wrappedFactory.createDataSource(); - } -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/HttpDataSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/HttpDataSource.java index 63cad8786b..7de2702664 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/HttpDataSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/HttpDataSource.java @@ -15,81 +15,61 @@ */ package com.google.android.exoplayer2.upstream; +import static java.lang.annotation.ElementType.TYPE_USE; + import android.text.TextUtils; import androidx.annotation.IntDef; import androidx.annotation.Nullable; -import com.google.android.exoplayer2.util.Predicate; +import com.google.android.exoplayer2.PlaybackException; import com.google.android.exoplayer2.util.Util; +import com.google.common.base.Ascii; +import com.google.common.base.Predicate; +import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.io.IOException; +import java.io.InterruptedIOException; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import java.net.SocketTimeoutException; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; -/** - * An HTTP {@link DataSource}. - */ +/** An HTTP {@link DataSource}. */ public interface HttpDataSource extends DataSource { - /** - * A factory for {@link HttpDataSource} instances. - */ + /** A factory for {@link HttpDataSource} instances. */ interface Factory extends DataSource.Factory { @Override HttpDataSource createDataSource(); /** - * Gets the default request properties used by all {@link HttpDataSource}s created by the - * factory. Changes to the properties will be reflected in any future requests made by - * {@link HttpDataSource}s created by the factory. - * - * @return The default request properties of the factory. - */ - RequestProperties getDefaultRequestProperties(); - - /** - * Sets a default request header for {@link HttpDataSource} instances created by the factory. - * - * @deprecated Use {@link #getDefaultRequestProperties} instead. - * @param name The name of the header field. - * @param value The value of the field. - */ - @Deprecated - void setDefaultRequestProperty(String name, String value); - - /** - * Clears a default request header for {@link HttpDataSource} instances created by the factory. + * Sets the default request headers for {@link HttpDataSource} instances created by the factory. * - * @deprecated Use {@link #getDefaultRequestProperties} instead. - * @param name The name of the header field. - */ - @Deprecated - void clearDefaultRequestProperty(String name); - - /** - * Clears all default request headers for all {@link HttpDataSource} instances created by the - * factory. + *

      The new request properties will be used for future requests made by {@link HttpDataSource + * HttpDataSources} created by the factory, including instances that have already been created. + * Modifying the {@code defaultRequestProperties} map after a call to this method will have no + * effect, and so it's necessary to call this method again each time the request properties need + * to be updated. * - * @deprecated Use {@link #getDefaultRequestProperties} instead. + * @param defaultRequestProperties The default request properties. + * @return This factory. */ - @Deprecated - void clearAllDefaultRequestProperties(); - + Factory setDefaultRequestProperties(Map defaultRequestProperties); } /** - * Stores HTTP request properties (aka HTTP headers) and provides methods to modify the headers - * in a thread safe way to avoid the potential of creating snapshots of an inconsistent or - * unintended state. + * Stores HTTP request properties (aka HTTP headers) and provides methods to modify the headers in + * a thread safe way to avoid the potential of creating snapshots of an inconsistent or unintended + * state. */ final class RequestProperties { private final Map requestProperties; - private Map requestPropertiesSnapshot; + @Nullable private Map requestPropertiesSnapshot; public RequestProperties() { requestProperties = new HashMap<>(); @@ -140,9 +120,7 @@ public synchronized void remove(String name) { requestProperties.remove(name); } - /** - * Clears all request properties. - */ + /** Clears all request properties. */ public synchronized void clear() { requestPropertiesSnapshot = null; requestProperties.clear(); @@ -159,12 +137,9 @@ public synchronized Map getSnapshot() { } return requestPropertiesSnapshot; } - } - /** - * Base implementation of {@link Factory} that sets default request properties. - */ + /** Base implementation of {@link Factory} that sets default request properties. */ abstract class BaseFactory implements Factory { private final RequestProperties defaultRequestProperties; @@ -178,30 +153,11 @@ public final HttpDataSource createDataSource() { return createDataSourceInternal(defaultRequestProperties); } + @CanIgnoreReturnValue @Override - public final RequestProperties getDefaultRequestProperties() { - return defaultRequestProperties; - } - - /** @deprecated Use {@link #getDefaultRequestProperties} instead. */ - @Deprecated - @Override - public final void setDefaultRequestProperty(String name, String value) { - defaultRequestProperties.set(name, value); - } - - /** @deprecated Use {@link #getDefaultRequestProperties} instead. */ - @Deprecated - @Override - public final void clearDefaultRequestProperty(String name) { - defaultRequestProperties.remove(name); - } - - /** @deprecated Use {@link #getDefaultRequestProperties} instead. */ - @Deprecated - @Override - public final void clearAllDefaultRequestProperties() { - defaultRequestProperties.clear(); + public final Factory setDefaultRequestProperties(Map defaultRequestProperties) { + this.defaultRequestProperties.clearAndSet(defaultRequestProperties); + return this; } /** @@ -211,81 +167,228 @@ public final void clearAllDefaultRequestProperties() { * {@link HttpDataSource} instance. * @return A {@link HttpDataSource} instance. */ - protected abstract HttpDataSource createDataSourceInternal(RequestProperties - defaultRequestProperties); - + protected abstract HttpDataSource createDataSourceInternal( + RequestProperties defaultRequestProperties); } /** A {@link Predicate} that rejects content types often used for pay-walls. */ Predicate REJECT_PAYWALL_TYPES = contentType -> { - contentType = Util.toLowerInvariant(contentType); + if (contentType == null) { + return false; + } + contentType = Ascii.toLowerCase(contentType); return !TextUtils.isEmpty(contentType) && (!contentType.contains("text") || contentType.contains("text/vtt")) && !contentType.contains("html") && !contentType.contains("xml"); }; - /** - * Thrown when an error is encountered when trying to read from a {@link HttpDataSource}. - */ - class HttpDataSourceException extends IOException { + /** Thrown when an error is encountered when trying to read from a {@link HttpDataSource}. */ + class HttpDataSourceException extends DataSourceException { + /** + * The type of operation that produced the error. One of {@link #TYPE_READ}, {@link #TYPE_OPEN} + * {@link #TYPE_CLOSE}. + */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({TYPE_OPEN, TYPE_READ, TYPE_CLOSE}) public @interface Type {} + /** The error occurred reading data from a {@code HttpDataSource}. */ public static final int TYPE_OPEN = 1; + /** The error occurred in opening a {@code HttpDataSource}. */ public static final int TYPE_READ = 2; + /** The error occurred in closing a {@code HttpDataSource}. */ public static final int TYPE_CLOSE = 3; - @Type public final int type; - /** - * The {@link DataSpec} associated with the current connection. + * Returns a {@code HttpDataSourceException} whose error code is assigned according to the cause + * and type. */ + public static HttpDataSourceException createForIOException( + IOException cause, DataSpec dataSpec, @Type int type) { + @PlaybackException.ErrorCode int errorCode; + @Nullable String message = cause.getMessage(); + if (cause instanceof SocketTimeoutException) { + errorCode = PlaybackException.ERROR_CODE_IO_NETWORK_CONNECTION_TIMEOUT; + } else if (cause instanceof InterruptedIOException) { + // An interruption means the operation is being cancelled, in which case this exception + // should not cause the player to fail. If it does, it likely means that the owner of the + // operation is failing to swallow the interruption, which makes us enter an invalid state. + errorCode = PlaybackException.ERROR_CODE_FAILED_RUNTIME_CHECK; + } else if (message != null + && Ascii.toLowerCase(message).matches("cleartext.*not permitted.*")) { + errorCode = PlaybackException.ERROR_CODE_IO_CLEARTEXT_NOT_PERMITTED; + } else { + errorCode = PlaybackException.ERROR_CODE_IO_NETWORK_CONNECTION_FAILED; + } + return errorCode == PlaybackException.ERROR_CODE_IO_CLEARTEXT_NOT_PERMITTED + ? new CleartextNotPermittedException(cause, dataSpec) + : new HttpDataSourceException(cause, dataSpec, errorCode, type); + } + + /** The {@link DataSpec} associated with the current connection. */ public final DataSpec dataSpec; + public final @Type int type; + + /** + * @deprecated Use {@link #HttpDataSourceException(DataSpec, int, int) + * HttpDataSourceException(DataSpec, PlaybackException.ERROR_CODE_IO_UNSPECIFIED, int)}. + */ + @Deprecated public HttpDataSourceException(DataSpec dataSpec, @Type int type) { - super(); + this(dataSpec, PlaybackException.ERROR_CODE_IO_UNSPECIFIED, type); + } + + /** + * Constructs an HttpDataSourceException. + * + * @param dataSpec The {@link DataSpec}. + * @param errorCode Reason of the error, should be one of the {@code ERROR_CODE_IO_*} in {@link + * PlaybackException.ErrorCode}. + * @param type See {@link Type}. + */ + public HttpDataSourceException( + DataSpec dataSpec, @PlaybackException.ErrorCode int errorCode, @Type int type) { + super(assignErrorCode(errorCode, type)); this.dataSpec = dataSpec; this.type = type; } + /** + * @deprecated Use {@link #HttpDataSourceException(String, DataSpec, int, int) + * HttpDataSourceException(String, DataSpec, PlaybackException.ERROR_CODE_IO_UNSPECIFIED, + * int)}. + */ + @Deprecated public HttpDataSourceException(String message, DataSpec dataSpec, @Type int type) { - super(message); + this(message, dataSpec, PlaybackException.ERROR_CODE_IO_UNSPECIFIED, type); + } + + /** + * Constructs an HttpDataSourceException. + * + * @param message The error message. + * @param dataSpec The {@link DataSpec}. + * @param errorCode Reason of the error, should be one of the {@code ERROR_CODE_IO_*} in {@link + * PlaybackException.ErrorCode}. + * @param type See {@link Type}. + */ + public HttpDataSourceException( + String message, + DataSpec dataSpec, + @PlaybackException.ErrorCode int errorCode, + @Type int type) { + super(message, assignErrorCode(errorCode, type)); this.dataSpec = dataSpec; this.type = type; } + /** + * @deprecated Use {@link #HttpDataSourceException(IOException, DataSpec, int, int) + * HttpDataSourceException(IOException, DataSpec, + * PlaybackException.ERROR_CODE_IO_UNSPECIFIED, int)}. + */ + @Deprecated public HttpDataSourceException(IOException cause, DataSpec dataSpec, @Type int type) { - super(cause); + this(cause, dataSpec, PlaybackException.ERROR_CODE_IO_UNSPECIFIED, type); + } + + /** + * Constructs an HttpDataSourceException. + * + * @param cause The error cause. + * @param dataSpec The {@link DataSpec}. + * @param errorCode Reason of the error, should be one of the {@code ERROR_CODE_IO_*} in {@link + * PlaybackException.ErrorCode}. + * @param type See {@link Type}. + */ + public HttpDataSourceException( + IOException cause, + DataSpec dataSpec, + @PlaybackException.ErrorCode int errorCode, + @Type int type) { + super(cause, assignErrorCode(errorCode, type)); this.dataSpec = dataSpec; this.type = type; } - public HttpDataSourceException(String message, IOException cause, DataSpec dataSpec, + /** + * @deprecated Use {@link #HttpDataSourceException(String, IOException, DataSpec, int, int) + * HttpDataSourceException(String, IOException, DataSpec, + * PlaybackException.ERROR_CODE_IO_UNSPECIFIED, int)}. + */ + @Deprecated + public HttpDataSourceException( + String message, IOException cause, DataSpec dataSpec, @Type int type) { + this(message, cause, dataSpec, PlaybackException.ERROR_CODE_IO_UNSPECIFIED, type); + } + + /** + * Constructs an HttpDataSourceException. + * + * @param message The error message. + * @param cause The error cause. + * @param dataSpec The {@link DataSpec}. + * @param errorCode Reason of the error, should be one of the {@code ERROR_CODE_IO_*} in {@link + * PlaybackException.ErrorCode}. + * @param type See {@link Type}. + */ + public HttpDataSourceException( + String message, + @Nullable IOException cause, + DataSpec dataSpec, + @PlaybackException.ErrorCode int errorCode, @Type int type) { - super(message, cause); + super(message, cause, assignErrorCode(errorCode, type)); this.dataSpec = dataSpec; this.type = type; } + private static @PlaybackException.ErrorCode int assignErrorCode( + @PlaybackException.ErrorCode int errorCode, @Type int type) { + return errorCode == PlaybackException.ERROR_CODE_IO_UNSPECIFIED && type == TYPE_OPEN + ? PlaybackException.ERROR_CODE_IO_NETWORK_CONNECTION_FAILED + : errorCode; + } } /** - * Thrown when the content type is invalid. + * Thrown when cleartext HTTP traffic is not permitted. For more information including how to + * enable cleartext traffic, see the corresponding troubleshooting + * topic. */ + final class CleartextNotPermittedException extends HttpDataSourceException { + + public CleartextNotPermittedException(IOException cause, DataSpec dataSpec) { + super( + "Cleartext HTTP traffic not permitted. See" + + " https://exoplayer.dev/issues/cleartext-not-permitted", + cause, + dataSpec, + PlaybackException.ERROR_CODE_IO_CLEARTEXT_NOT_PERMITTED, + TYPE_OPEN); + } + } + + /** Thrown when the content type is invalid. */ final class InvalidContentTypeException extends HttpDataSourceException { public final String contentType; public InvalidContentTypeException(String contentType, DataSpec dataSpec) { - super("Invalid content type: " + contentType, dataSpec, TYPE_OPEN); + super( + "Invalid content type: " + contentType, + dataSpec, + PlaybackException.ERROR_CODE_IO_INVALID_HTTP_CONTENT_TYPE, + TYPE_OPEN); this.contentType = contentType; } - } /** @@ -293,37 +396,71 @@ public InvalidContentTypeException(String contentType, DataSpec dataSpec) { */ final class InvalidResponseCodeException extends HttpDataSourceException { - /** - * The response code that was outside of the 2xx range. - */ + /** The response code that was outside of the 2xx range. */ public final int responseCode; /** The http status message. */ @Nullable public final String responseMessage; - /** - * An unmodifiable map of the response header fields and values. - */ + /** An unmodifiable map of the response header fields and values. */ public final Map> headerFields; - /** @deprecated Use {@link #InvalidResponseCodeException(int, String, Map, DataSpec)}. */ + /** The response body. */ + public final byte[] responseBody; + + /** + * @deprecated Use {@link #InvalidResponseCodeException(int, String, IOException, Map, DataSpec, + * byte[])}. + */ @Deprecated public InvalidResponseCodeException( int responseCode, Map> headerFields, DataSpec dataSpec) { - this(responseCode, /* responseMessage= */ null, headerFields, dataSpec); + this( + responseCode, + /* responseMessage= */ null, + /* cause= */ null, + headerFields, + dataSpec, + /* responseBody= */ Util.EMPTY_BYTE_ARRAY); } + /** + * @deprecated Use {@link #InvalidResponseCodeException(int, String, IOException, Map, DataSpec, + * byte[])}. + */ + @Deprecated public InvalidResponseCodeException( int responseCode, @Nullable String responseMessage, Map> headerFields, DataSpec dataSpec) { - super("Response code: " + responseCode, dataSpec, TYPE_OPEN); + this( + responseCode, + responseMessage, + /* cause= */ null, + headerFields, + dataSpec, + /* responseBody= */ Util.EMPTY_BYTE_ARRAY); + } + + public InvalidResponseCodeException( + int responseCode, + @Nullable String responseMessage, + @Nullable IOException cause, + Map> headerFields, + DataSpec dataSpec, + byte[] responseBody) { + super( + "Response code: " + responseCode, + cause, + dataSpec, + PlaybackException.ERROR_CODE_IO_BAD_HTTP_STATUS, + TYPE_OPEN); this.responseCode = responseCode; this.responseMessage = responseMessage; this.headerFields = headerFields; + this.responseBody = responseBody; } - } /** @@ -340,7 +477,7 @@ public InvalidResponseCodeException( void close() throws HttpDataSourceException; @Override - int read(byte[] buffer, int offset, int readLength) throws HttpDataSourceException; + int read(byte[] buffer, int offset, int length) throws HttpDataSourceException; /** * Sets the value of a request header. The value will be used for subsequent connections @@ -363,9 +500,7 @@ public InvalidResponseCodeException( */ void clearRequestProperty(String name); - /** - * Clears all request headers that were set by {@link #setRequestProperty(String, String)}. - */ + /** Clears all request headers that were set by {@link #setRequestProperty(String, String)}. */ void clearAllRequestProperties(); /** diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/HttpUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/HttpUtil.java new file mode 100644 index 0000000000..ac433009a7 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/HttpUtil.java @@ -0,0 +1,129 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.upstream; + +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static java.lang.Math.max; + +import android.text.TextUtils; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.util.Log; +import com.google.common.net.HttpHeaders; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** Utility methods for HTTP. */ +public final class HttpUtil { + + private static final String TAG = "HttpUtil"; + private static final Pattern CONTENT_RANGE_WITH_START_AND_END = + Pattern.compile("bytes (\\d+)-(\\d+)/(?:\\d+|\\*)"); + private static final Pattern CONTENT_RANGE_WITH_SIZE = + Pattern.compile("bytes (?:(?:\\d+-\\d+)|\\*)/(\\d+)"); + + /** Class only contains static methods. */ + private HttpUtil() {} + + /** + * Builds a {@link HttpHeaders#RANGE Range header} for the given position and length. + * + * @param position The request position. + * @param length The request length, or {@link C#LENGTH_UNSET} if the request is unbounded. + * @return The corresponding range header, or {@code null} if a header is unnecessary because the + * whole resource is being requested. + */ + @Nullable + public static String buildRangeRequestHeader(long position, long length) { + if (position == 0 && length == C.LENGTH_UNSET) { + return null; + } + StringBuilder rangeValue = new StringBuilder(); + rangeValue.append("bytes="); + rangeValue.append(position); + rangeValue.append("-"); + if (length != C.LENGTH_UNSET) { + rangeValue.append(position + length - 1); + } + return rangeValue.toString(); + } + + /** + * Attempts to parse the document size from a {@link HttpHeaders#CONTENT_RANGE Content-Range + * header}. + * + * @param contentRangeHeader The {@link HttpHeaders#CONTENT_RANGE Content-Range header}, or {@code + * null} if not set. + * @return The document size, or {@link C#LENGTH_UNSET} if it could not be determined. + */ + public static long getDocumentSize(@Nullable String contentRangeHeader) { + if (TextUtils.isEmpty(contentRangeHeader)) { + return C.LENGTH_UNSET; + } + Matcher matcher = CONTENT_RANGE_WITH_SIZE.matcher(contentRangeHeader); + return matcher.matches() ? Long.parseLong(checkNotNull(matcher.group(1))) : C.LENGTH_UNSET; + } + + /** + * Attempts to parse the length of a response body from the corresponding response headers. + * + * @param contentLengthHeader The {@link HttpHeaders#CONTENT_LENGTH Content-Length header}, or + * {@code null} if not set. + * @param contentRangeHeader The {@link HttpHeaders#CONTENT_RANGE Content-Range header}, or {@code + * null} if not set. + * @return The length of the response body, or {@link C#LENGTH_UNSET} if it could not be + * determined. + */ + public static long getContentLength( + @Nullable String contentLengthHeader, @Nullable String contentRangeHeader) { + long contentLength = C.LENGTH_UNSET; + if (!TextUtils.isEmpty(contentLengthHeader)) { + try { + contentLength = Long.parseLong(contentLengthHeader); + } catch (NumberFormatException e) { + Log.e(TAG, "Unexpected Content-Length [" + contentLengthHeader + "]"); + } + } + if (!TextUtils.isEmpty(contentRangeHeader)) { + Matcher matcher = CONTENT_RANGE_WITH_START_AND_END.matcher(contentRangeHeader); + if (matcher.matches()) { + try { + long contentLengthFromRange = + Long.parseLong(checkNotNull(matcher.group(2))) + - Long.parseLong(checkNotNull(matcher.group(1))) + + 1; + if (contentLength < 0) { + // Some proxy servers strip the Content-Length header. Fall back to the length + // calculated here in this case. + contentLength = contentLengthFromRange; + } else if (contentLength != contentLengthFromRange) { + // If there is a discrepancy between the Content-Length and Content-Range headers, + // assume the one with the larger value is correct. We have seen cases where carrier + // change one of them to reduce the size of a request, but it is unlikely anybody would + // increase it. + Log.w( + TAG, + "Inconsistent headers [" + contentLengthHeader + "] [" + contentRangeHeader + "]"); + contentLength = max(contentLength, contentLengthFromRange); + } + } catch (NumberFormatException e) { + Log.e(TAG, "Unexpected Content-Range [" + contentRangeHeader + "]"); + } + } + } + return contentLength; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/LoadErrorHandlingPolicy.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/LoadErrorHandlingPolicy.java index 293d1e7510..0a5032b021 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/LoadErrorHandlingPolicy.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/LoadErrorHandlingPolicy.java @@ -15,72 +15,183 @@ */ package com.google.android.exoplayer2.upstream; +import static com.google.android.exoplayer2.util.Assertions.checkArgument; +import static java.lang.annotation.ElementType.TYPE_USE; + +import androidx.annotation.IntDef; +import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.source.LoadEventInfo; +import com.google.android.exoplayer2.source.MediaLoadData; import com.google.android.exoplayer2.upstream.Loader.Callback; import com.google.android.exoplayer2.upstream.Loader.Loadable; import java.io.IOException; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; /** - * Defines how errors encountered by {@link Loader Loaders} are handled. + * A policy that defines how load errors are handled. * - *

      Loader clients may blacklist a resource when a load error occurs. Blacklisting works around - * load errors by loading an alternative resource. Clients do not try blacklisting when a resource - * does not have an alternative. When a resource does have valid alternatives, {@link - * #getBlacklistDurationMsFor(int, long, IOException, int)} defines whether the resource should be - * blacklisted. Blacklisting will succeed if any of the alternatives is not in the black list. + *

      Some loaders are able to choose between a number of alternate resources. Such loaders will + * call {@link #getFallbackSelectionFor(FallbackOptions, LoadErrorInfo)} when a load error occurs. + * The {@link FallbackSelection} returned by the policy defines whether the loader should fall back + * to using another resource, and if so the duration for which the failing resource should be + * excluded. * - *

      When blacklisting does not take place, {@link #getRetryDelayMsFor(int, long, IOException, - * int)} defines whether the load is retried. Errors whose load is not retried are propagated. Load - * errors whose load is retried are propagated according to {@link - * #getMinimumLoadableRetryCount(int)}. + *

      When fallback does not take place, a loader will call {@link + * #getRetryDelayMsFor(LoadErrorInfo)}. The value returned by the policy defines whether the failed + * load can be retried, and if so the duration to wait before retrying. If the policy indicates that + * a load error should not be retried, it will be considered fatal by the loader. The loader may + * also consider load errors that can be retried fatal if at least {@link + * #getMinimumLoadableRetryCount(int)} retries have been attempted. * *

      Methods are invoked on the playback thread. */ public interface LoadErrorHandlingPolicy { + /** Fallback type. One of {@link #FALLBACK_TYPE_LOCATION} or {@link #FALLBACK_TYPE_TRACK}. */ + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef({FALLBACK_TYPE_LOCATION, FALLBACK_TYPE_TRACK}) + @interface FallbackType {} + + /** + * Fallback to the same resource at a different location (i.e., a different URL through which the + * exact same data can be requested). + */ + int FALLBACK_TYPE_LOCATION = 1; + /** + * Fallback to a different track (i.e., a different representation of the same content; for + * example the same video encoded at a different bitrate or resolution). + */ + int FALLBACK_TYPE_TRACK = 2; + + /** Holds information about a load task error. */ + final class LoadErrorInfo { + + /** The {@link LoadEventInfo} associated with the load that encountered an error. */ + public final LoadEventInfo loadEventInfo; + /** {@link MediaLoadData} associated with the load that encountered an error. */ + public final MediaLoadData mediaLoadData; + /** The exception associated to the load error. */ + public final IOException exception; + /** The number of errors this load task has encountered, including this one. */ + public final int errorCount; + + /** Creates an instance with the given values. */ + public LoadErrorInfo( + LoadEventInfo loadEventInfo, + MediaLoadData mediaLoadData, + IOException exception, + int errorCount) { + this.loadEventInfo = loadEventInfo; + this.mediaLoadData = mediaLoadData; + this.exception = exception; + this.errorCount = errorCount; + } + } + + /** Holds information about the available fallback options. */ + final class FallbackOptions { + /** The number of available locations. */ + public final int numberOfLocations; + /** The number of locations that are already excluded. */ + public final int numberOfExcludedLocations; + /** The number of tracks. */ + public final int numberOfTracks; + /** The number of tracks that are already excluded. */ + public final int numberOfExcludedTracks; + + /** Creates an instance. */ + public FallbackOptions( + int numberOfLocations, + int numberOfExcludedLocations, + int numberOfTracks, + int numberOfExcludedTracks) { + this.numberOfLocations = numberOfLocations; + this.numberOfExcludedLocations = numberOfExcludedLocations; + this.numberOfTracks = numberOfTracks; + this.numberOfExcludedTracks = numberOfExcludedTracks; + } + + /** Returns whether a fallback is available for the given {@link FallbackType fallback type}. */ + public boolean isFallbackAvailable(@FallbackType int type) { + return type == FALLBACK_TYPE_LOCATION + ? numberOfLocations - numberOfExcludedLocations > 1 + : numberOfTracks - numberOfExcludedTracks > 1; + } + } + + /** A selected fallback option. */ + final class FallbackSelection { + /** The type of fallback. */ + public final @FallbackType int type; + /** The duration for which the failing resource should be excluded, in milliseconds. */ + public final long exclusionDurationMs; + + /** + * Creates an instance. + * + * @param type The type of fallback. + * @param exclusionDurationMs The duration for which the failing resource should be excluded, in + * milliseconds. Must be non-negative. + */ + public FallbackSelection(@FallbackType int type, long exclusionDurationMs) { + checkArgument(exclusionDurationMs >= 0); + this.type = type; + this.exclusionDurationMs = exclusionDurationMs; + } + } + /** - * Returns the number of milliseconds for which a resource associated to a provided load error - * should be blacklisted, or {@link C#TIME_UNSET} if the resource should not be blacklisted. + * Returns whether a loader should fall back to using another resource on encountering an error, + * and if so the duration for which the failing resource should be excluded. * - * @param dataType One of the {@link C C.DATA_TYPE_*} constants indicating the type of data to - * load. - * @param loadDurationMs The duration in milliseconds of the load from the start of the first load - * attempt up to the point at which the error occurred. - * @param exception The load error. - * @param errorCount The number of errors this load has encountered, including this one. - * @return The blacklist duration in milliseconds, or {@link C#TIME_UNSET} if the resource should - * not be blacklisted. + *

      If the returned {@link FallbackSelection#type fallback type} was not {@link + * FallbackOptions#isFallbackAvailable(int) advertised as available}, then the loader will not + * fall back. + * + * @param fallbackOptions The available fallback options. + * @param loadErrorInfo A {@link LoadErrorInfo} holding information about the load error. + * @return The selected fallback, or {@code null} if the calling loader should not fall back. */ - long getBlacklistDurationMsFor( - int dataType, long loadDurationMs, IOException exception, int errorCount); + @Nullable + FallbackSelection getFallbackSelectionFor( + FallbackOptions fallbackOptions, LoadErrorInfo loadErrorInfo); /** - * Returns the number of milliseconds to wait before attempting the load again, or {@link - * C#TIME_UNSET} if the error is fatal and should not be retried. + * Returns whether a loader can retry on encountering an error, and if so the duration to wait + * before retrying. A return value of {@link C#TIME_UNSET} indicates that the error is fatal and + * should not be retried. + * + *

      For loads that can be retried, loaders may ignore the retry delay returned by this method in + * order to wait for a specific event before retrying. * - *

      {@link Loader} clients may ignore the retry delay returned by this method in order to wait - * for a specific event before retrying. However, the load is retried if and only if this method - * does not return {@link C#TIME_UNSET}. + * @param loadErrorInfo A {@link LoadErrorInfo} holding information about the load error. + * @return The duration to wait before retrying in milliseconds, or {@link C#TIME_UNSET} if the + * error is fatal and should not be retried. + */ + long getRetryDelayMsFor(LoadErrorInfo loadErrorInfo); + + /** + * Called once {@code loadTaskId} will not be associated with any more load errors. * - * @param dataType One of the {@link C C.DATA_TYPE_*} constants indicating the type of data to - * load. - * @param loadDurationMs The duration in milliseconds of the load from the start of the first load - * attempt up to the point at which the error occurred. - * @param exception The load error. - * @param errorCount The number of errors this load has encountered, including this one. - * @return The number of milliseconds to wait before attempting the load again, or {@link - * C#TIME_UNSET} if the error is fatal and should not be retried. + *

      Implementations should clean up any resources associated with {@code loadTaskId} when this + * method is called. */ - long getRetryDelayMsFor(int dataType, long loadDurationMs, IOException exception, int errorCount); + default void onLoadTaskConcluded(long loadTaskId) {} /** - * Returns the minimum number of times to retry a load in the case of a load error, before - * propagating the error. + * Returns the minimum number of times to retry a load before a load error that can be retried may + * be considered fatal. * - * @param dataType One of the {@link C C.DATA_TYPE_*} constants indicating the type of data to - * load. - * @return The minimum number of times to retry a load in the case of a load error, before - * propagating the error. + * @param dataType One of the {@link C C.DATA_TYPE_*} constants indicating the type of data being + * loaded. + * @return The minimum number of times to retry a load before a load error that can be retried may + * be considered fatal. * @see Loader#startLoading(Loadable, Callback, int) */ int getMinimumLoadableRetryCount(int dataType); diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/Loader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/Loader.java index 5b7846f5ce..84402b8c2e 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/Loader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/Loader.java @@ -15,6 +15,9 @@ */ package com.google.android.exoplayer2.upstream; +import static java.lang.Math.min; +import static java.lang.annotation.ElementType.TYPE_USE; + import android.annotation.SuppressLint; import android.os.Handler; import android.os.Looper; @@ -31,28 +34,22 @@ import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; import java.util.concurrent.ExecutorService; import java.util.concurrent.atomic.AtomicBoolean; -/** - * Manages the background loading of {@link Loadable}s. - */ +/** Manages the background loading of {@link Loadable}s. */ public final class Loader implements LoaderErrorThrower { - /** - * Thrown when an unexpected exception or error is encountered during loading. - */ + /** Thrown when an unexpected exception or error is encountered during loading. */ public static final class UnexpectedLoaderException extends IOException { public UnexpectedLoaderException(Throwable cause) { super("Unexpected " + cause.getClass().getSimpleName() + ": " + cause.getMessage(), cause); } - } - /** - * An object that can be loaded using a {@link Loader}. - */ + /** An object that can be loaded using a {@link Loader}. */ public interface Loadable { /** @@ -79,15 +76,11 @@ public interface Loadable { * Performs the load, returning on completion or cancellation. * * @throws IOException If the input could not be loaded. - * @throws InterruptedException If the thread was interrupted. */ - void load() throws IOException, InterruptedException; - + void load() throws IOException; } - /** - * A callback to be notified of {@link Loader} events. - */ + /** A callback to be notified of {@link Loader} events. */ public interface Callback { /** @@ -140,21 +133,19 @@ LoadErrorAction onLoadError( T loadable, long elapsedRealtimeMs, long loadDurationMs, IOException error, int errorCount); } - /** - * A callback to be notified when a {@link Loader} has finished being released. - */ + /** A callback to be notified when a {@link Loader} has finished being released. */ public interface ReleaseCallback { - /** - * Called when the {@link Loader} has finished being released. - */ + /** Called when the {@link Loader} has finished being released. */ void onLoaderReleased(); - } + private static final String THREAD_NAME_PREFIX = "ExoPlayer:Loader:"; + /** Types of action that can be taken in response to a load error. */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({ ACTION_TYPE_RETRY, ACTION_TYPE_RETRY_AND_RESET_ERROR_COUNT, @@ -210,10 +201,12 @@ public boolean isRetry() { @Nullable private IOException fatalError; /** - * @param threadName A name for the loader's thread. + * @param threadNameSuffix A name suffix for the loader's thread. This should be the name of the + * component using the loader. */ - public Loader(String threadName) { - this.downloadExecutorService = Util.newSingleThreadExecutor(threadName); + public Loader(String threadNameSuffix) { + this.downloadExecutorService = + Util.newSingleThreadExecutor(THREAD_NAME_PREFIX + threadNameSuffix); } /** @@ -312,8 +305,8 @@ public void maybeThrowError(int minRetryCount) throws IOException { if (fatalError != null) { throw fatalError; } else if (currentTask != null) { - currentTask.maybeThrowError(minRetryCount == Integer.MIN_VALUE - ? currentTask.defaultMinRetryCount : minRetryCount); + currentTask.maybeThrowError( + minRetryCount == Integer.MIN_VALUE ? currentTask.defaultMinRetryCount : minRetryCount); } } @@ -342,8 +335,12 @@ private final class LoadTask extends Handler implements Runn private boolean canceled; private volatile boolean released; - public LoadTask(Looper looper, T loadable, Loader.Callback callback, - int defaultMinRetryCount, long startTimeMs) { + public LoadTask( + Looper looper, + T loadable, + Loader.Callback callback, + int defaultMinRetryCount, + long startTimeMs) { super(looper); this.loadable = loadable; this.callback = callback; @@ -429,32 +426,26 @@ public void run() { if (!released) { obtainMessage(MSG_IO_EXCEPTION, e).sendToTarget(); } - } catch (InterruptedException e) { - // The load was canceled. - Assertions.checkState(canceled); - if (!released) { - sendEmptyMessage(MSG_FINISH); - } } catch (Exception e) { // This should never happen, but handle it anyway. - Log.e(TAG, "Unexpected exception loading stream", e); if (!released) { + Log.e(TAG, "Unexpected exception loading stream", e); obtainMessage(MSG_IO_EXCEPTION, new UnexpectedLoaderException(e)).sendToTarget(); } } catch (OutOfMemoryError e) { // This can occur if a stream is malformed in a way that causes an extractor to think it // needs to allocate a large amount of memory. We don't want the process to die in this // case, but we do want the playback to fail. - Log.e(TAG, "OutOfMemory error loading stream", e); if (!released) { + Log.e(TAG, "OutOfMemory error loading stream", e); obtainMessage(MSG_IO_EXCEPTION, new UnexpectedLoaderException(e)).sendToTarget(); } } catch (Error e) { - // We'd hope that the platform would kill the process if an Error is thrown here, but the - // executor may catch the error (b/20616433). Throw it here, but also pass and throw it from - // the handler thread so that the process dies even if the executor behaves in this way. - Log.e(TAG, "Unexpected error loading stream", e); + // We'd hope that the platform would shut down the process if an Error is thrown here, but + // the executor may catch the error (b/20616433). Throw it here, but also pass and throw it + // from the handler thread so the process dies even if the executor behaves in this way. if (!released) { + Log.e(TAG, "Unexpected error loading stream", e); obtainMessage(MSG_FATAL_ERROR, e).sendToTarget(); } throw e; @@ -524,9 +515,8 @@ private void finish() { } private long getRetryDelayMillis() { - return Math.min((errorCount - 1) * 1000, 5000); + return min((errorCount - 1) * 1000, 5000); } - } private static final class ReleaseTask implements Runnable { @@ -541,7 +531,5 @@ public ReleaseTask(ReleaseCallback callback) { public void run() { callback.onLoaderReleased(); } - } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/LoaderErrorThrower.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/LoaderErrorThrower.java index 4f9e9fa5e6..eea3da8556 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/LoaderErrorThrower.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/LoaderErrorThrower.java @@ -18,9 +18,7 @@ import com.google.android.exoplayer2.upstream.Loader.Loadable; import java.io.IOException; -/** - * Conditionally throws errors affecting a {@link Loader}. - */ +/** Conditionally throws errors affecting a {@link Loader}. */ public interface LoaderErrorThrower { /** @@ -34,8 +32,8 @@ public interface LoaderErrorThrower { /** * Throws a fatal error, or a non-fatal error if loading is currently backed off and the current - * {@link Loadable} has incurred a number of errors greater than the specified minimum number - * of retries. Else does nothing. + * {@link Loadable} has incurred a number of errors greater than the specified minimum number of + * retries. Else does nothing. * * @param minRetryCount A minimum retry count that must be exceeded for a non-fatal error to be * thrown. Should be non-negative. @@ -43,21 +41,17 @@ public interface LoaderErrorThrower { */ void maybeThrowError(int minRetryCount) throws IOException; - /** - * A {@link LoaderErrorThrower} that never throws. - */ + /** A {@link LoaderErrorThrower} that never throws. */ final class Dummy implements LoaderErrorThrower { @Override - public void maybeThrowError() throws IOException { + public void maybeThrowError() { // Do nothing. } @Override - public void maybeThrowError(int minRetryCount) throws IOException { + public void maybeThrowError(int minRetryCount) { // Do nothing. } - } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/ParsingLoadable.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/ParsingLoadable.java index edec849b88..12f686532e 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/ParsingLoadable.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/ParsingLoadable.java @@ -19,6 +19,7 @@ import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.ParserException; +import com.google.android.exoplayer2.source.LoadEventInfo; import com.google.android.exoplayer2.upstream.Loader.Loadable; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Util; @@ -34,9 +35,7 @@ */ public final class ParsingLoadable implements Loadable { - /** - * Parses an object from loaded data. - */ + /** Parses an object from loaded data. */ public interface Parser { /** @@ -49,7 +48,6 @@ public interface Parser { * @throws IOException If an error occurs reading data from the stream. */ T parse(Uri uri, InputStream inputStream) throws IOException; - } /** @@ -87,9 +85,9 @@ public static T load( return Assertions.checkNotNull(loadable.getResult()); } - /** - * The {@link DataSpec} that defines the data to be loaded. - */ + /** Identifies the load task for this loadable. */ + public final long loadTaskId; + /** The {@link DataSpec} that defines the data to be loaded. */ public final DataSpec dataSpec; /** * The type of the data. One of the {@code DATA_TYPE_*} constants defined in {@link C}. For @@ -100,7 +98,7 @@ public static T load( private final StatsDataSource dataSource; private final Parser parser; - private volatile @Nullable T result; + @Nullable private volatile T result; /** * @param dataSource A {@link DataSource} to use when loading the data. @@ -109,7 +107,11 @@ public static T load( * @param parser Parses the object from the response. */ public ParsingLoadable(DataSource dataSource, Uri uri, int type, Parser parser) { - this(dataSource, new DataSpec(uri, DataSpec.FLAG_ALLOW_GZIP), type, parser); + this( + dataSource, + new DataSpec.Builder().setUri(uri).setFlags(DataSpec.FLAG_ALLOW_GZIP).build(), + type, + parser); } /** @@ -118,16 +120,18 @@ public ParsingLoadable(DataSource dataSource, Uri uri, int type, Parser parser) { + public ParsingLoadable( + DataSource dataSource, DataSpec dataSpec, int type, Parser parser) { this.dataSource = new StatsDataSource(dataSource); this.dataSpec = dataSpec; this.type = type; this.parser = parser; + loadTaskId = LoadEventInfo.getNewId(); } /** Returns the loaded object, or null if an object has not been loaded. */ - public final @Nullable T getResult() { + @Nullable + public final T getResult() { return result; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/PlaceholderDataSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/PlaceholderDataSource.java new file mode 100644 index 0000000000..b5b71f2b93 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/PlaceholderDataSource.java @@ -0,0 +1,57 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.upstream; + +import android.net.Uri; +import androidx.annotation.Nullable; +import java.io.IOException; + +/** A DataSource which provides no data. {@link #open(DataSpec)} throws {@link IOException}. */ +public final class PlaceholderDataSource implements DataSource { + + public static final PlaceholderDataSource INSTANCE = new PlaceholderDataSource(); + + /** A factory that produces {@link PlaceholderDataSource}. */ + public static final Factory FACTORY = PlaceholderDataSource::new; + + private PlaceholderDataSource() {} + + @Override + public void addTransferListener(TransferListener transferListener) { + // Do nothing. + } + + @Override + public long open(DataSpec dataSpec) throws IOException { + throw new IOException("PlaceholderDataSource cannot be opened"); + } + + @Override + public int read(byte[] buffer, int offset, int length) { + throw new UnsupportedOperationException(); + } + + @Override + @Nullable + public Uri getUri() { + return null; + } + + @Override + public void close() { + // do nothing. + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/PriorityDataSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/PriorityDataSource.java index 767b6d78a3..c8dde48192 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/PriorityDataSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/PriorityDataSource.java @@ -24,19 +24,51 @@ import java.util.Map; /** - * A {@link DataSource} that can be used as part of a task registered with a - * {@link PriorityTaskManager}. - *

      - * Calls to {@link #open(DataSpec)} and {@link #read(byte[], int, int)} are allowed to proceed only - * if there are no higher priority tasks registered to the {@link PriorityTaskManager}. If there - * exists a higher priority task then {@link PriorityTaskManager.PriorityTooLowException} is thrown. - *

      - * Instances of this class are intended to be used as parts of (possibly larger) tasks that are + * A {@link DataSource} that can be used as part of a task registered with a {@link + * PriorityTaskManager}. + * + *

      Calls to {@link #open(DataSpec)} and {@link #read(byte[], int, int)} are allowed to proceed + * only if there are no higher priority tasks registered to the {@link PriorityTaskManager}. If + * there exists a higher priority task then {@link PriorityTaskManager.PriorityTooLowException} is + * thrown. + * + *

      Instances of this class are intended to be used as parts of (possibly larger) tasks that are * registered with the {@link PriorityTaskManager}, and hence do not register as tasks * themselves. */ public final class PriorityDataSource implements DataSource { + /** {@link DataSource.Factory} for {@link PriorityDataSource} instances. */ + public static final class Factory implements DataSource.Factory { + + private final DataSource.Factory upstreamFactory; + private final PriorityTaskManager priorityTaskManager; + private final int priority; + + /** + * Creates an instance. + * + * @param upstreamFactory A {@link DataSource.Factory} that provides upstream {@link DataSource + * DataSources} for {@link PriorityDataSource} instances created by the factory. + * @param priorityTaskManager The {@link PriorityTaskManager} to which tasks using {@link + * PriorityDataSource} instances created by this factory will be registered. + * @param priority The priority of the tasks using {@link PriorityDataSource} instances created + * by this factory. + */ + public Factory( + DataSource.Factory upstreamFactory, PriorityTaskManager priorityTaskManager, int priority) { + this.upstreamFactory = upstreamFactory; + this.priorityTaskManager = priorityTaskManager; + this.priority = priority; + } + + @Override + public PriorityDataSource createDataSource() { + return new PriorityDataSource( + upstreamFactory.createDataSource(), priorityTaskManager, priority); + } + } + private final DataSource upstream; private final PriorityTaskManager priorityTaskManager; private final int priority; @@ -46,8 +78,8 @@ public final class PriorityDataSource implements DataSource { * @param priorityTaskManager The priority manager to which the task is registered. * @param priority The priority of the task. */ - public PriorityDataSource(DataSource upstream, PriorityTaskManager priorityTaskManager, - int priority) { + public PriorityDataSource( + DataSource upstream, PriorityTaskManager priorityTaskManager, int priority) { this.upstream = Assertions.checkNotNull(upstream); this.priorityTaskManager = Assertions.checkNotNull(priorityTaskManager); this.priority = priority; @@ -55,6 +87,7 @@ public PriorityDataSource(DataSource upstream, PriorityTaskManager priorityTaskM @Override public void addTransferListener(TransferListener transferListener) { + Assertions.checkNotNull(transferListener); upstream.addTransferListener(transferListener); } @@ -65,9 +98,9 @@ public long open(DataSpec dataSpec) throws IOException { } @Override - public int read(byte[] buffer, int offset, int max) throws IOException { + public int read(byte[] buffer, int offset, int length) throws IOException { priorityTaskManager.proceedOrThrow(priority); - return upstream.read(buffer, offset, max); + return upstream.read(buffer, offset, length); } @Override @@ -85,5 +118,4 @@ public Map> getResponseHeaders() { public void close() throws IOException { upstream.close(); } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/PriorityDataSourceFactory.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/PriorityDataSourceFactory.java index daad41a9a6..2bc2d19dbe 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/PriorityDataSourceFactory.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/PriorityDataSourceFactory.java @@ -19,8 +19,9 @@ import com.google.android.exoplayer2.util.PriorityTaskManager; /** - * A {@link DataSource.Factory} that produces {@link PriorityDataSource} instances. + * @deprecated Use {@link PriorityDataSource.Factory}. */ +@Deprecated public final class PriorityDataSourceFactory implements Factory { private final Factory upstreamFactory; @@ -33,8 +34,8 @@ public final class PriorityDataSourceFactory implements Factory { * @param priorityTaskManager The priority manager to which PriorityDataSource task is registered. * @param priority The priority of PriorityDataSource task. */ - public PriorityDataSourceFactory(Factory upstreamFactory, PriorityTaskManager priorityTaskManager, - int priority) { + public PriorityDataSourceFactory( + Factory upstreamFactory, PriorityTaskManager priorityTaskManager, int priority) { this.upstreamFactory = upstreamFactory; this.priorityTaskManager = priorityTaskManager; this.priority = priority; @@ -42,8 +43,7 @@ public PriorityDataSourceFactory(Factory upstreamFactory, PriorityTaskManager pr @Override public PriorityDataSource createDataSource() { - return new PriorityDataSource(upstreamFactory.createDataSource(), priorityTaskManager, - priority); + return new PriorityDataSource( + upstreamFactory.createDataSource(), priorityTaskManager, priority); } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/RawResourceDataSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/RawResourceDataSource.java index fbfd698610..652353d3a2 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/RawResourceDataSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/RawResourceDataSource.java @@ -16,7 +16,9 @@ package com.google.android.exoplayer2.upstream; import static com.google.android.exoplayer2.util.Util.castNonNull; +import static java.lang.Math.min; +import android.content.ContentResolver; import android.content.Context; import android.content.res.AssetFileDescriptor; import android.content.res.Resources; @@ -24,31 +26,58 @@ import android.text.TextUtils; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.PlaybackException; import com.google.android.exoplayer2.util.Assertions; import java.io.EOFException; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; +import java.nio.channels.FileChannel; /** * A {@link DataSource} for reading a raw resource inside the APK. * - *

      URIs supported by this source are of the form {@code rawresource:///rawResourceId}, where - * rawResourceId is the integer identifier of a raw resource. {@link #buildRawResourceUri(int)} can - * be used to build {@link Uri}s in this format. + *

      URIs supported by this source are of one of the forms: + * + *

        + *
      • {@code rawresource:///id}, where {@code id} is the integer identifier of a raw resource. + *
      • {@code android.resource:///id}, where {@code id} is the integer identifier of a raw + * resource. + *
      • {@code android.resource://[package]/[type/]name}, where {@code package} is the name of the + * package in which the resource is located, {@code type} is the resource type and {@code + * name} is the resource name. The package and the type are optional. Their default value is + * the package of this application and "raw", respectively. Using the two other forms is more + * efficient. + *
      + * + *

      {@link #buildRawResourceUri(int)} can be used to build supported {@link Uri}s. */ public final class RawResourceDataSource extends BaseDataSource { - /** - * Thrown when an {@link IOException} is encountered reading from a raw resource. - */ - public static class RawResourceDataSourceException extends IOException { + /** Thrown when an {@link IOException} is encountered reading from a raw resource. */ + public static class RawResourceDataSourceException extends DataSourceException { + /** + * @deprecated Use {@link #RawResourceDataSourceException(String, Throwable, int)}. + */ + @Deprecated public RawResourceDataSourceException(String message) { - super(message); + super(message, /* cause= */ null, PlaybackException.ERROR_CODE_IO_UNSPECIFIED); } - public RawResourceDataSourceException(IOException e) { - super(e); + /** + * @deprecated Use {@link #RawResourceDataSourceException(String, Throwable, int)}. + */ + @Deprecated + public RawResourceDataSourceException(Throwable cause) { + super(cause, PlaybackException.ERROR_CODE_IO_UNSPECIFIED); + } + + /** Creates a new instance. */ + public RawResourceDataSourceException( + @Nullable String message, + @Nullable Throwable cause, + @PlaybackException.ErrorCode int errorCode) { + super(message, cause, errorCode); } } @@ -66,6 +95,7 @@ public static Uri buildRawResourceUri(int rawResourceId) { public static final String RAW_RESOURCE_SCHEME = "rawresource"; private final Resources resources; + private final String packageName; @Nullable private Uri uri; @Nullable private AssetFileDescriptor assetFileDescriptor; @@ -79,61 +109,142 @@ public static Uri buildRawResourceUri(int rawResourceId) { public RawResourceDataSource(Context context) { super(/* isNetwork= */ false); this.resources = context.getResources(); + this.packageName = context.getPackageName(); } @Override public long open(DataSpec dataSpec) throws RawResourceDataSourceException { - try { - Uri uri = dataSpec.uri; - this.uri = uri; - if (!TextUtils.equals(RAW_RESOURCE_SCHEME, uri.getScheme())) { - throw new RawResourceDataSourceException("URI must use scheme " + RAW_RESOURCE_SCHEME); - } + Uri uri = dataSpec.uri; + this.uri = uri; - int resourceId; + int resourceId; + if (TextUtils.equals(RAW_RESOURCE_SCHEME, uri.getScheme()) + || (TextUtils.equals(ContentResolver.SCHEME_ANDROID_RESOURCE, uri.getScheme()) + && uri.getPathSegments().size() == 1 + && Assertions.checkNotNull(uri.getLastPathSegment()).matches("\\d+"))) { try { resourceId = Integer.parseInt(Assertions.checkNotNull(uri.getLastPathSegment())); } catch (NumberFormatException e) { - throw new RawResourceDataSourceException("Resource identifier must be an integer."); + throw new RawResourceDataSourceException( + "Resource identifier must be an integer.", + /* cause= */ null, + PlaybackException.ERROR_CODE_FAILED_RUNTIME_CHECK); } - - transferInitializing(dataSpec); - AssetFileDescriptor assetFileDescriptor = resources.openRawResourceFd(resourceId); - this.assetFileDescriptor = assetFileDescriptor; - if (assetFileDescriptor == null) { - throw new RawResourceDataSourceException("Resource is compressed: " + uri); + } else if (TextUtils.equals(ContentResolver.SCHEME_ANDROID_RESOURCE, uri.getScheme())) { + String path = Assertions.checkNotNull(uri.getPath()); + if (path.startsWith("/")) { + path = path.substring(1); + } + @Nullable String host = uri.getHost(); + String resourceName = (TextUtils.isEmpty(host) ? "" : (host + ":")) + path; + resourceId = + resources.getIdentifier( + resourceName, /* defType= */ "raw", /* defPackage= */ packageName); + if (resourceId == 0) { + throw new RawResourceDataSourceException( + "Resource not found.", + /* cause= */ null, + PlaybackException.ERROR_CODE_IO_FILE_NOT_FOUND); } - FileInputStream inputStream = new FileInputStream(assetFileDescriptor.getFileDescriptor()); - this.inputStream = inputStream; + } else { + throw new RawResourceDataSourceException( + "URI must either use scheme " + + RAW_RESOURCE_SCHEME + + " or " + + ContentResolver.SCHEME_ANDROID_RESOURCE, + /* cause= */ null, + PlaybackException.ERROR_CODE_FAILED_RUNTIME_CHECK); + } + + transferInitializing(dataSpec); - inputStream.skip(assetFileDescriptor.getStartOffset()); - long skipped = inputStream.skip(dataSpec.position); - if (skipped < dataSpec.position) { + AssetFileDescriptor assetFileDescriptor; + try { + assetFileDescriptor = resources.openRawResourceFd(resourceId); + } catch (Resources.NotFoundException e) { + throw new RawResourceDataSourceException( + /* message= */ null, e, PlaybackException.ERROR_CODE_IO_FILE_NOT_FOUND); + } + + this.assetFileDescriptor = assetFileDescriptor; + if (assetFileDescriptor == null) { + throw new RawResourceDataSourceException( + "Resource is compressed: " + uri, + /* cause= */ null, + PlaybackException.ERROR_CODE_IO_UNSPECIFIED); + } + + long assetFileDescriptorLength = assetFileDescriptor.getLength(); + FileInputStream inputStream = new FileInputStream(assetFileDescriptor.getFileDescriptor()); + this.inputStream = inputStream; + + try { + // We can't rely only on the "skipped < dataSpec.position" check below to detect whether the + // position is beyond the end of the resource being read. This is because the file will + // typically contain multiple resources, and there's nothing to prevent InputStream.skip() + // from succeeding by skipping into the data of the next resource. Hence we also need to check + // against the resource length explicitly, which is guaranteed to be set unless the resource + // extends to the end of the file. + if (assetFileDescriptorLength != AssetFileDescriptor.UNKNOWN_LENGTH + && dataSpec.position > assetFileDescriptorLength) { + throw new RawResourceDataSourceException( + /* message= */ null, + /* cause= */ null, + PlaybackException.ERROR_CODE_IO_READ_POSITION_OUT_OF_RANGE); + } + long assetFileDescriptorOffset = assetFileDescriptor.getStartOffset(); + long skipped = + inputStream.skip(assetFileDescriptorOffset + dataSpec.position) + - assetFileDescriptorOffset; + if (skipped != dataSpec.position) { // We expect the skip to be satisfied in full. If it isn't then we're probably trying to - // skip beyond the end of the data. - throw new EOFException(); + // read beyond the end of the last resource in the file. + throw new RawResourceDataSourceException( + /* message= */ null, + /* cause= */ null, + PlaybackException.ERROR_CODE_IO_READ_POSITION_OUT_OF_RANGE); } - if (dataSpec.length != C.LENGTH_UNSET) { - bytesRemaining = dataSpec.length; + if (assetFileDescriptorLength == AssetFileDescriptor.UNKNOWN_LENGTH) { + // The asset must extend to the end of the file. We can try and resolve the length with + // FileInputStream.getChannel().size(). + FileChannel channel = inputStream.getChannel(); + if (channel.size() == 0) { + bytesRemaining = C.LENGTH_UNSET; + } else { + bytesRemaining = channel.size() - channel.position(); + if (bytesRemaining < 0) { + // The skip above was satisfied in full, but skipped beyond the end of the file. + throw new RawResourceDataSourceException( + /* message= */ null, + /* cause= */ null, + PlaybackException.ERROR_CODE_IO_READ_POSITION_OUT_OF_RANGE); + } + } } else { - long assetFileDescriptorLength = assetFileDescriptor.getLength(); - // If the length is UNKNOWN_LENGTH then the asset extends to the end of the file. - bytesRemaining = assetFileDescriptorLength == AssetFileDescriptor.UNKNOWN_LENGTH - ? C.LENGTH_UNSET : (assetFileDescriptorLength - dataSpec.position); + bytesRemaining = assetFileDescriptorLength - skipped; + if (bytesRemaining < 0) { + throw new DataSourceException(PlaybackException.ERROR_CODE_IO_READ_POSITION_OUT_OF_RANGE); + } } + } catch (RawResourceDataSourceException e) { + throw e; } catch (IOException e) { - throw new RawResourceDataSourceException(e); + throw new RawResourceDataSourceException( + /* message= */ null, e, PlaybackException.ERROR_CODE_IO_UNSPECIFIED); } + if (dataSpec.length != C.LENGTH_UNSET) { + bytesRemaining = + bytesRemaining == C.LENGTH_UNSET ? dataSpec.length : min(bytesRemaining, dataSpec.length); + } opened = true; transferStarted(dataSpec); - - return bytesRemaining; + return dataSpec.length != C.LENGTH_UNSET ? dataSpec.length : bytesRemaining; } @Override - public int read(byte[] buffer, int offset, int readLength) throws RawResourceDataSourceException { - if (readLength == 0) { + public int read(byte[] buffer, int offset, int length) throws RawResourceDataSourceException { + if (length == 0) { return 0; } else if (bytesRemaining == 0) { return C.RESULT_END_OF_INPUT; @@ -141,17 +252,21 @@ public int read(byte[] buffer, int offset, int readLength) throws RawResourceDat int bytesRead; try { - int bytesToRead = bytesRemaining == C.LENGTH_UNSET ? readLength - : (int) Math.min(bytesRemaining, readLength); + int bytesToRead = + bytesRemaining == C.LENGTH_UNSET ? length : (int) min(bytesRemaining, length); bytesRead = castNonNull(inputStream).read(buffer, offset, bytesToRead); } catch (IOException e) { - throw new RawResourceDataSourceException(e); + throw new RawResourceDataSourceException( + /* message= */ null, e, PlaybackException.ERROR_CODE_IO_UNSPECIFIED); } if (bytesRead == -1) { if (bytesRemaining != C.LENGTH_UNSET) { // End of stream reached having not read sufficient data. - throw new RawResourceDataSourceException(new EOFException()); + throw new RawResourceDataSourceException( + "End of stream reached having not read sufficient data.", + new EOFException(), + PlaybackException.ERROR_CODE_IO_UNSPECIFIED); } return C.RESULT_END_OF_INPUT; } @@ -177,7 +292,8 @@ public void close() throws RawResourceDataSourceException { inputStream.close(); } } catch (IOException e) { - throw new RawResourceDataSourceException(e); + throw new RawResourceDataSourceException( + /* message= */ null, e, PlaybackException.ERROR_CODE_IO_UNSPECIFIED); } finally { inputStream = null; try { @@ -185,7 +301,8 @@ public void close() throws RawResourceDataSourceException { assetFileDescriptor.close(); } } catch (IOException e) { - throw new RawResourceDataSourceException(e); + throw new RawResourceDataSourceException( + /* message= */ null, e, PlaybackException.ERROR_CODE_IO_UNSPECIFIED); } finally { assetFileDescriptor = null; if (opened) { @@ -195,5 +312,4 @@ public void close() throws RawResourceDataSourceException { } } } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/ResolvingDataSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/ResolvingDataSource.java index 412f866e99..56301245e6 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/ResolvingDataSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/ResolvingDataSource.java @@ -15,6 +15,8 @@ */ package com.google.android.exoplayer2.upstream; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; + import android.net.Uri; import androidx.annotation.Nullable; import java.io.IOException; @@ -95,6 +97,7 @@ public ResolvingDataSource(DataSource upstreamDataSource, Resolver resolver) { @Override public void addTransferListener(TransferListener transferListener) { + checkNotNull(transferListener); upstreamDataSource.addTransferListener(transferListener); } @@ -106,14 +109,14 @@ public long open(DataSpec dataSpec) throws IOException { } @Override - public int read(byte[] buffer, int offset, int readLength) throws IOException { - return upstreamDataSource.read(buffer, offset, readLength); + public int read(byte[] buffer, int offset, int length) throws IOException { + return upstreamDataSource.read(buffer, offset, length); } - @Nullable @Override + @Nullable public Uri getUri() { - Uri reportedUri = upstreamDataSource.getUri(); + @Nullable Uri reportedUri = upstreamDataSource.getUri(); return reportedUri == null ? null : resolver.resolveReportedUri(reportedUri); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/SlidingPercentile.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/SlidingPercentile.java similarity index 89% rename from TMessagesProj/src/main/java/com/google/android/exoplayer2/util/SlidingPercentile.java rename to TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/SlidingPercentile.java index c9c21023c3..198aa0057e 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/SlidingPercentile.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/SlidingPercentile.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.google.android.exoplayer2.util; +package com.google.android.exoplayer2.upstream; import java.util.ArrayList; import java.util.Collections; @@ -24,10 +24,10 @@ * configured. Once the total weight of the values reaches the maximum weight, the oldest value is * reduced in weight until it reaches zero and is removed. This maintains a constant total weight, * equal to the maximum allowed, at the steady state. - *

      - * This class can be used for bandwidth estimation based on a sliding window of past transfer rate - * observations. This is an alternative to sliding mean and exponential averaging which suffer from - * susceptibility to outliers and slow adaptation to step functions. + * + *

      This class can be used for bandwidth estimation based on a sliding window of past transfer + * rate observations. This is an alternative to sliding mean and exponential averaging which suffer + * from susceptibility to outliers and slow adaptation to step functions. * * @see Wiki: Moving average * @see Wiki: Selection algorithm @@ -82,8 +82,8 @@ public void reset() { public void addSample(int weight, float value) { ensureSortedByIndex(); - Sample newSample = recycledSampleCount > 0 ? recycledSamples[--recycledSampleCount] - : new Sample(); + Sample newSample = + recycledSampleCount > 0 ? recycledSamples[--recycledSampleCount] : new Sample(); newSample.index = nextSampleIndex++; newSample.weight = weight; newSample.value = value; @@ -127,9 +127,7 @@ public float getPercentile(float percentile) { return samples.isEmpty() ? Float.NaN : samples.get(samples.size() - 1).value; } - /** - * Sorts the samples by index. - */ + /** Sorts the samples by index. */ private void ensureSortedByIndex() { if (currentSortOrder != SORT_ORDER_BY_INDEX) { Collections.sort(samples, INDEX_COMPARATOR); @@ -137,9 +135,7 @@ private void ensureSortedByIndex() { } } - /** - * Sorts the samples by value. - */ + /** Sorts the samples by value. */ private void ensureSortedByValue() { if (currentSortOrder != SORT_ORDER_BY_VALUE) { Collections.sort(samples, VALUE_COMPARATOR); @@ -152,7 +148,5 @@ private static class Sample { public int index; public int weight; public float value; - } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/StatsDataSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/StatsDataSource.java index 6cdc381ba2..325865b26a 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/StatsDataSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/StatsDataSource.java @@ -72,6 +72,7 @@ public Map> getLastResponseHeaders() { @Override public void addTransferListener(TransferListener transferListener) { + Assertions.checkNotNull(transferListener); dataSource.addTransferListener(transferListener); } @@ -87,8 +88,8 @@ public long open(DataSpec dataSpec) throws IOException { } @Override - public int read(byte[] buffer, int offset, int readLength) throws IOException { - int bytesRead = dataSource.read(buffer, offset, readLength); + public int read(byte[] buffer, int offset, int length) throws IOException { + int bytesRead = dataSource.read(buffer, offset, length); if (bytesRead != C.RESULT_END_OF_INPUT) { this.bytesRead += bytesRead; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/TeeDataSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/TeeDataSource.java index f56f19a6ca..53e99cc346 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/TeeDataSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/TeeDataSource.java @@ -23,9 +23,7 @@ import java.util.List; import java.util.Map; -/** - * Tees data into a {@link DataSink} as the data is read. - */ +/** Tees data into a {@link DataSink} as the data is read. */ public final class TeeDataSource implements DataSource { private final DataSource upstream; @@ -45,6 +43,7 @@ public TeeDataSource(DataSource upstream, DataSink dataSink) { @Override public void addTransferListener(TransferListener transferListener) { + Assertions.checkNotNull(transferListener); upstream.addTransferListener(transferListener); } @@ -64,11 +63,11 @@ public long open(DataSpec dataSpec) throws IOException { } @Override - public int read(byte[] buffer, int offset, int max) throws IOException { + public int read(byte[] buffer, int offset, int length) throws IOException { if (bytesRemaining == 0) { return C.RESULT_END_OF_INPUT; } - int bytesRead = upstream.read(buffer, offset, max); + int bytesRead = upstream.read(buffer, offset, length); if (bytesRead > 0) { // TODO: Consider continuing even if writes to the sink fail. dataSink.write(buffer, offset, bytesRead); @@ -101,5 +100,4 @@ public void close() throws IOException { } } } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/TimeToFirstByteEstimator.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/TimeToFirstByteEstimator.java new file mode 100644 index 0000000000..7a8b38018a --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/TimeToFirstByteEstimator.java @@ -0,0 +1,44 @@ +/* + * Copyright (C) 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.upstream; + +import com.google.android.exoplayer2.C; + +/** Provides an estimate of the time to first byte of a transfer. */ +public interface TimeToFirstByteEstimator { + /** + * Returns the estimated time to first byte of the response body, in microseconds, or {@link + * C#TIME_UNSET} if no estimate is available. + */ + long getTimeToFirstByteEstimateUs(); + + /** Resets the estimator. */ + void reset(); + + /** + * Called when a transfer is being initialized. + * + * @param dataSpec Describes the data for which the transfer is initialized. + */ + void onTransferInitializing(DataSpec dataSpec); + + /** + * Called when a transfer starts. + * + * @param dataSpec Describes the data being transferred. + */ + void onTransferStart(DataSpec dataSpec); +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/TransferListener.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/TransferListener.java index a8971e71a4..806efca73f 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/TransferListener.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/TransferListener.java @@ -61,7 +61,7 @@ public interface TransferListener { * @param source The source performing the transfer. * @param dataSpec Describes the data being transferred. * @param isNetwork Whether the data is transferred through a network. - * @param bytesTransferred The number of bytes transferred since the previous call to this method + * @param bytesTransferred The number of bytes transferred since the previous call to this method. */ void onBytesTransferred( DataSource source, DataSpec dataSpec, boolean isNetwork, int bytesTransferred); diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/UdpDataSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/UdpDataSource.java index 4d9b375334..19ea9726f1 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/UdpDataSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/UdpDataSource.java @@ -15,39 +15,47 @@ */ package com.google.android.exoplayer2.upstream; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static java.lang.Math.min; + import android.net.Uri; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.PlaybackException; import java.io.IOException; import java.net.DatagramPacket; import java.net.DatagramSocket; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.MulticastSocket; -import java.net.SocketException; +import java.net.SocketTimeoutException; /** A UDP {@link DataSource}. */ public final class UdpDataSource extends BaseDataSource { - /** - * Thrown when an error is encountered when trying to read from a {@link UdpDataSource}. - */ - public static final class UdpDataSourceException extends IOException { - - public UdpDataSourceException(IOException cause) { - super(cause); + /** Thrown when an error is encountered when trying to read from a {@link UdpDataSource}. */ + public static final class UdpDataSourceException extends DataSourceException { + + /** + * Creates a {@code UdpDataSourceException}. + * + * @param cause The error cause. + * @param errorCode Reason of the error, should be one of the {@code ERROR_CODE_IO_*} in {@link + * PlaybackException.ErrorCode}. + */ + public UdpDataSourceException(Throwable cause, @PlaybackException.ErrorCode int errorCode) { + super(cause, errorCode); } - } - /** - * The default maximum datagram packet size, in bytes. - */ + /** The default maximum datagram packet size, in bytes. */ public static final int DEFAULT_MAX_PACKET_SIZE = 2000; /** The default socket timeout, in milliseconds. */ public static final int DEFAULT_SOCKET_TIMEOUT_MILLIS = 8 * 1000; + public static final int UDP_PORT_UNSET = -1; + private final int socketTimeoutMillis; private final byte[] packetBuffer; private final DatagramPacket packet; @@ -56,7 +64,6 @@ public UdpDataSourceException(IOException cause) { @Nullable private DatagramSocket socket; @Nullable private MulticastSocket multicastSocket; @Nullable private InetAddress address; - @Nullable private InetSocketAddress socketAddress; private boolean opened; private int packetRemaining; @@ -91,12 +98,12 @@ public UdpDataSource(int maxPacketSize, int socketTimeoutMillis) { @Override public long open(DataSpec dataSpec) throws UdpDataSourceException { uri = dataSpec.uri; - String host = uri.getHost(); + String host = checkNotNull(uri.getHost()); int port = uri.getPort(); transferInitializing(dataSpec); try { address = InetAddress.getByName(host); - socketAddress = new InetSocketAddress(address, port); + InetSocketAddress socketAddress = new InetSocketAddress(address, port); if (address.isMulticastAddress()) { multicastSocket = new MulticastSocket(socketAddress); multicastSocket.joinGroup(address); @@ -104,14 +111,12 @@ public long open(DataSpec dataSpec) throws UdpDataSourceException { } else { socket = new DatagramSocket(socketAddress); } - } catch (IOException e) { - throw new UdpDataSourceException(e); - } - - try { socket.setSoTimeout(socketTimeoutMillis); - } catch (SocketException e) { - throw new UdpDataSourceException(e); + } catch (SecurityException e) { + throw new UdpDataSourceException(e, PlaybackException.ERROR_CODE_IO_NO_PERMISSION); + } catch (IOException e) { + throw new UdpDataSourceException( + e, PlaybackException.ERROR_CODE_IO_NETWORK_CONNECTION_FAILED); } opened = true; @@ -120,24 +125,28 @@ public long open(DataSpec dataSpec) throws UdpDataSourceException { } @Override - public int read(byte[] buffer, int offset, int readLength) throws UdpDataSourceException { - if (readLength == 0) { + public int read(byte[] buffer, int offset, int length) throws UdpDataSourceException { + if (length == 0) { return 0; } if (packetRemaining == 0) { // We've read all of the data from the current packet. Get another. try { - socket.receive(packet); + checkNotNull(socket).receive(packet); + } catch (SocketTimeoutException e) { + throw new UdpDataSourceException( + e, PlaybackException.ERROR_CODE_IO_NETWORK_CONNECTION_TIMEOUT); } catch (IOException e) { - throw new UdpDataSourceException(e); + throw new UdpDataSourceException( + e, PlaybackException.ERROR_CODE_IO_NETWORK_CONNECTION_FAILED); } packetRemaining = packet.getLength(); bytesTransferred(packetRemaining); } int packetOffset = packet.getLength() - packetRemaining; - int bytesToRead = Math.min(packetRemaining, readLength); + int bytesToRead = min(packetRemaining, length); System.arraycopy(packetBuffer, packetOffset, buffer, offset, bytesToRead); packetRemaining -= bytesToRead; return bytesToRead; @@ -154,7 +163,7 @@ public void close() { uri = null; if (multicastSocket != null) { try { - multicastSocket.leaveGroup(address); + multicastSocket.leaveGroup(checkNotNull(address)); } catch (IOException e) { // Do nothing. } @@ -165,7 +174,6 @@ public void close() { socket = null; } address = null; - socketAddress = null; packetRemaining = 0; if (opened) { opened = false; @@ -173,4 +181,14 @@ public void close() { } } + /** + * Returns the local port number opened for the UDP connection, or {@link #UDP_PORT_UNSET} if no + * connection is open + */ + public int getLocalPort() { + if (socket == null) { + return UDP_PORT_UNSET; + } + return socket.getLocalPort(); + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/Cache.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/Cache.java index 1d504159e6..cdce51aa1c 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/Cache.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/Cache.java @@ -24,13 +24,24 @@ import java.util.Set; /** - * An interface for cache. + * A cache that supports partial caching of resources. + * + *

      Terminology

      + * + *
        + *
      • A resource is a complete piece of logical data, for example a complete media file. + *
      • A cache key uniquely identifies a resource. URIs are often suitable for use as + * cache keys, however this is not always the case. URIs are not suitable when caching + * resources obtained from a service that generates multiple URIs for the same underlying + * resource, for example because the service uses expiring URIs as a form of access control. + *
      • A cache span is a byte range within a resource, which may or may not be cached. A + * cache span that's not cached is called a hole span. A cache span that is cached + * corresponds to a single underlying file in the cache. + *
      */ public interface Cache { - /** - * Listener of {@link Cache} events. - */ + /** Listener of {@link Cache} events. */ interface Listener { /** @@ -64,9 +75,7 @@ interface Listener { void onSpanTouched(Cache cache, CacheSpan oldSpan, CacheSpan newSpan); } - /** - * Thrown when an error is encountered when writing data. - */ + /** Thrown when an error is encountered when writing data. */ class CacheException extends IOException { public CacheException(String message) { @@ -108,57 +117,49 @@ public CacheException(String message, Throwable cause) { void release(); /** - * Registers a listener to listen for changes to a given key. + * Registers a listener to listen for changes to a given resource. * *

      No guarantees are made about the thread or threads on which the listener is called, but it * is guaranteed that listener methods will be called in a serial fashion (i.e. one at a time) and * in the same order as events occurred. * - * @param key The key to listen to. + * @param key The cache key of the resource. * @param listener The listener to add. - * @return The current spans for the key. + * @return The current spans for the resource. */ NavigableSet addListener(String key, Listener listener); /** * Unregisters a listener. * - * @param key The key to stop listening to. + * @param key The cache key of the resource. * @param listener The listener to remove. */ void removeListener(String key, Listener listener); /** - * Returns the cached spans for a given cache key. + * Returns the cached spans for a given resource. * - * @param key The key for which spans should be returned. + * @param key The cache key of the resource. * @return The spans for the key. */ NavigableSet getCachedSpans(String key); - /** - * Returns all keys in the cache. - * - * @return All the keys in the cache. - */ + /** Returns the cache keys of all of the resources that are at least partially cached. */ Set getKeys(); - /** - * Returns the total disk space in bytes used by the cache. - * - * @return The total disk space in bytes. - */ + /** Returns the total disk space in bytes used by the cache. */ long getCacheSpace(); /** - * A caller should invoke this method when they require data from a given position for a given - * key. + * A caller should invoke this method when they require data starting from a given position in a + * given resource. * *

      If there is a cache entry that overlaps the position, then the returned {@link CacheSpan} * defines the file in which the data is stored. {@link CacheSpan#isCached} is true. The caller * may read from the cache file, but does not acquire any locks. * - *

      If there is no cache entry overlapping {@code offset}, then the returned {@link CacheSpan} + *

      If there is no cache entry overlapping {@code position}, then the returned {@link CacheSpan} * defines a hole in the cache starting at {@code position} into which the caller may write as it * obtains the data from some other source. The returned {@link CacheSpan} serves as a lock. * Whilst the caller holds the lock it may write data into the hole. It may split data into @@ -168,38 +169,47 @@ public CacheException(String message, Throwable cause) { * *

      This method may be slow and shouldn't normally be called on the main thread. * - * @param key The key of the data being requested. - * @param position The position of the data being requested. + * @param key The cache key of the resource. + * @param position The starting position in the resource from which data is required. + * @param length The length of the data being requested, or {@link C#LENGTH_UNSET} if unbounded. + * The length is ignored if there is a cache entry that overlaps the position. Else, it + * defines the maximum length of the hole {@link CacheSpan} that's returned. Cache + * implementations may support parallel writes into non-overlapping holes, and so passing the + * actual required length should be preferred to passing {@link C#LENGTH_UNSET} when possible. * @return The {@link CacheSpan}. * @throws InterruptedException If the thread was interrupted. * @throws CacheException If an error is encountered. */ @WorkerThread - CacheSpan startReadWrite(String key, long position) throws InterruptedException, CacheException; + CacheSpan startReadWrite(String key, long position, long length) + throws InterruptedException, CacheException; /** - * Same as {@link #startReadWrite(String, long)}. However, if the cache entry is locked, then - * instead of blocking, this method will return null as the {@link CacheSpan}. + * Same as {@link #startReadWrite(String, long, long)}. However, if the cache entry is locked, + * then instead of blocking, this method will return null as the {@link CacheSpan}. * *

      This method may be slow and shouldn't normally be called on the main thread. * - * @param key The key of the data being requested. - * @param position The position of the data being requested. + * @param key The cache key of the resource. + * @param position The starting position in the resource from which data is required. + * @param length The length of the data being requested, or {@link C#LENGTH_UNSET} if unbounded. + * The length is ignored if there is a cache entry that overlaps the position. Else, it + * defines the range of data locked by the returned {@link CacheSpan}. * @return The {@link CacheSpan}. Or null if the cache entry is locked. * @throws CacheException If an error is encountered. */ @WorkerThread @Nullable - CacheSpan startReadWriteNonBlocking(String key, long position) throws CacheException; + CacheSpan startReadWriteNonBlocking(String key, long position, long length) throws CacheException; /** * Obtains a cache file into which data can be written. Must only be called when holding a - * corresponding hole {@link CacheSpan} obtained from {@link #startReadWrite(String, long)}. + * corresponding hole {@link CacheSpan} obtained from {@link #startReadWrite(String, long, long)}. * *

      This method may be slow and shouldn't normally be called on the main thread. * - * @param key The cache key for the data. - * @param position The starting position of the data. + * @param key The cache key of the resource being written. + * @param position The starting position in the resource from which data will be written. * @param length The length of the data being written, or {@link C#LENGTH_UNSET} if unknown. Used * only to ensure that there is enough space in the cache. * @return The file into which data should be written. @@ -210,7 +220,7 @@ public CacheException(String message, Throwable cause) { /** * Commits a file into the cache. Must only be called when holding a corresponding hole {@link - * CacheSpan} obtained from {@link #startReadWrite(String, long)}. + * CacheSpan} obtained from {@link #startReadWrite(String, long, long)}. * *

      This method may be slow and shouldn't normally be called on the main thread. * @@ -222,53 +232,75 @@ public CacheException(String message, Throwable cause) { void commitFile(File file, long length) throws CacheException; /** - * Releases a {@link CacheSpan} obtained from {@link #startReadWrite(String, long)} which + * Releases a {@link CacheSpan} obtained from {@link #startReadWrite(String, long, long)} which * corresponded to a hole in the cache. * * @param holeSpan The {@link CacheSpan} being released. */ void releaseHoleSpan(CacheSpan holeSpan); + /** + * Removes all {@link CacheSpan CacheSpans} for a resource, deleting the underlying files. + * + * @param key The cache key of the resource being removed. + */ + @WorkerThread + void removeResource(String key); + /** * Removes a cached {@link CacheSpan} from the cache, deleting the underlying file. * *

      This method may be slow and shouldn't normally be called on the main thread. * * @param span The {@link CacheSpan} to remove. - * @throws CacheException If an error is encountered. */ @WorkerThread - void removeSpan(CacheSpan span) throws CacheException; + void removeSpan(CacheSpan span); /** - * Queries if a range is entirely available in the cache. + * Returns whether the specified range of data in a resource is fully cached. * - * @param key The cache key for the data. - * @param position The starting position of the data. + * @param key The cache key of the resource. + * @param position The starting position of the data in the resource. * @param length The length of the data. * @return true if the data is available in the Cache otherwise false; */ boolean isCached(String key, long position, long length); /** - * Returns the length of the cached data block starting from the {@code position} to the block end - * up to {@code length} bytes. If the {@code position} isn't cached then -(the length of the gap - * to the next cached data up to {@code length} bytes) is returned. + * Returns the length of continuously cached data starting from {@code position}, up to a maximum + * of {@code maxLength}, of a resource. If {@code position} isn't cached then {@code -holeLength} + * is returned, where {@code holeLength} is the length of continuously uncached data starting from + * {@code position}, up to a maximum of {@code maxLength}. * - * @param key The cache key for the data. - * @param position The starting position of the data. - * @param length The maximum length of the data to be returned. - * @return The length of the cached or not cached data block length. + * @param key The cache key of the resource. + * @param position The starting position of the data in the resource. + * @param length The maximum length of the data or hole to be returned. {@link C#LENGTH_UNSET} is + * permitted, and is equivalent to passing {@link Long#MAX_VALUE}. + * @return The length of the continuously cached data, or {@code -holeLength} if {@code position} + * isn't cached. */ long getCachedLength(String key, long position, long length); /** - * Applies {@code mutations} to the {@link ContentMetadata} for the given key. A new {@link - * CachedContent} is added if there isn't one already with the given key. + * Returns the total number of cached bytes between {@code position} (inclusive) and {@code + * (position + length)} (exclusive) of a resource. + * + * @param key The cache key of the resource. + * @param position The starting position of the data in the resource. + * @param length The length of the data to check. {@link C#LENGTH_UNSET} is permitted, and is + * equivalent to passing {@link Long#MAX_VALUE}. + * @return The total number of cached bytes. + */ + long getCachedBytes(String key, long position, long length); + + /** + * Applies {@code mutations} to the {@link ContentMetadata} for the given resource. A new {@link + * CachedContent} is added if there isn't one already for the resource. * *

      This method may be slow and shouldn't normally be called on the main thread. * - * @param key The cache key for the data. + * @param key The cache key of the resource. * @param mutations Contains mutations to be applied to the metadata. * @throws CacheException If an error is encountered. */ @@ -277,10 +309,10 @@ void applyContentMetadataMutations(String key, ContentMetadataMutations mutation throws CacheException; /** - * Returns a {@link ContentMetadata} for the given key. + * Returns a {@link ContentMetadata} for the given resource. * - * @param key The cache key for the data. - * @return A {@link ContentMetadata} for the given key. + * @param key The cache key of the resource. + * @return The {@link ContentMetadata} for the resource. */ ContentMetadata getContentMetadata(String key); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CacheDataSink.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CacheDataSink.java index 22ed3892ec..8811faee28 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CacheDataSink.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CacheDataSink.java @@ -15,18 +15,24 @@ */ package com.google.android.exoplayer2.upstream.cache; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Util.castNonNull; +import static java.lang.Math.min; + +import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.upstream.DataSink; import com.google.android.exoplayer2.upstream.DataSpec; import com.google.android.exoplayer2.upstream.cache.Cache.CacheException; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Log; -import com.google.android.exoplayer2.util.ReusableBufferedOutputStream; import com.google.android.exoplayer2.util.Util; +import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; /** * Writes data into a cache. @@ -37,6 +43,81 @@ */ public final class CacheDataSink implements DataSink { + /** {@link DataSink.Factory} for {@link CacheDataSink} instances. */ + public static final class Factory implements DataSink.Factory { + + private @MonotonicNonNull Cache cache; + private long fragmentSize; + private int bufferSize; + + /** Creates an instance. */ + public Factory() { + fragmentSize = CacheDataSink.DEFAULT_FRAGMENT_SIZE; + bufferSize = CacheDataSink.DEFAULT_BUFFER_SIZE; + } + + /** + * Sets the cache to which data will be written. + * + *

      Must be called before the factory is used. + * + * @param cache The cache to which data will be written. + * @return This factory. + */ + @CanIgnoreReturnValue + public Factory setCache(Cache cache) { + this.cache = cache; + return this; + } + + /** + * Sets the cache file fragment size. For requests that should be fragmented into multiple cache + * files, this is the maximum size of a cache file in bytes. If set to {@link C#LENGTH_UNSET} + * then no fragmentation will occur. Using a small value allows for finer-grained cache eviction + * policies, at the cost of increased overhead both on the cache implementation and the file + * system. Values under {@code (2 * 1024 * 1024)} are not recommended. + * + *

      The default value is {@link CacheDataSink#DEFAULT_FRAGMENT_SIZE}. + * + * @param fragmentSize The fragment size in bytes, or {@link C#LENGTH_UNSET} to disable + * fragmentation. + * @return This factory. + */ + @CanIgnoreReturnValue + public Factory setFragmentSize(long fragmentSize) { + this.fragmentSize = fragmentSize; + return this; + } + + /** + * Sets the size of an in-memory buffer used when writing to a cache file. A zero or negative + * value disables buffering. + * + *

      The default value is {@link CacheDataSink#DEFAULT_BUFFER_SIZE}. + * + * @param bufferSize The buffer size in bytes. + * @return This factory. + */ + @CanIgnoreReturnValue + public Factory setBufferSize(int bufferSize) { + this.bufferSize = bufferSize; + return this; + } + + @Override + public DataSink createDataSink() { + return new CacheDataSink(checkNotNull(cache), fragmentSize, bufferSize); + } + } + + /** Thrown when an {@link IOException} is encountered when writing data to the sink. */ + public static final class CacheDataSinkException extends CacheException { + + public CacheDataSinkException(IOException cause) { + super(cause); + } + } + /** Default {@code fragmentSize} recommended for caching use cases. */ public static final long DEFAULT_FRAGMENT_SIZE = 5 * 1024 * 1024; /** Default buffer size in bytes. */ @@ -49,24 +130,13 @@ public final class CacheDataSink implements DataSink { private final long fragmentSize; private final int bufferSize; - private DataSpec dataSpec; + @Nullable private DataSpec dataSpec; private long dataSpecFragmentSize; - private File file; - private OutputStream outputStream; + @Nullable private File file; + @Nullable private OutputStream outputStream; private long outputStreamBytesWritten; private long dataSpecBytesWritten; - private ReusableBufferedOutputStream bufferedOutputStream; - - /** - * Thrown when IOException is encountered when writing data into sink. - */ - public static class CacheDataSinkException extends CacheException { - - public CacheDataSinkException(IOException cause) { - super(cause); - } - - } + private @MonotonicNonNull ReusableBufferedOutputStream bufferedOutputStream; /** * Constructs an instance using {@link #DEFAULT_BUFFER_SIZE}. @@ -103,13 +173,14 @@ public CacheDataSink(Cache cache, long fragmentSize, int bufferSize) { + MIN_RECOMMENDED_FRAGMENT_SIZE + ". This may cause poor cache performance."); } - this.cache = Assertions.checkNotNull(cache); + this.cache = checkNotNull(cache); this.fragmentSize = fragmentSize == C.LENGTH_UNSET ? Long.MAX_VALUE : fragmentSize; this.bufferSize = bufferSize; } @Override public void open(DataSpec dataSpec) throws CacheDataSinkException { + checkNotNull(dataSpec.key); if (dataSpec.length == C.LENGTH_UNSET && dataSpec.isFlagSet(DataSpec.FLAG_DONT_CACHE_IF_LENGTH_UNKNOWN)) { this.dataSpec = null; @@ -120,7 +191,7 @@ public void open(DataSpec dataSpec) throws CacheDataSinkException { dataSpec.isFlagSet(DataSpec.FLAG_ALLOW_CACHE_FRAGMENTATION) ? fragmentSize : Long.MAX_VALUE; dataSpecBytesWritten = 0; try { - openNextOutputStream(); + openNextOutputStream(dataSpec); } catch (IOException e) { throw new CacheDataSinkException(e); } @@ -128,6 +199,7 @@ public void open(DataSpec dataSpec) throws CacheDataSinkException { @Override public void write(byte[] buffer, int offset, int length) throws CacheDataSinkException { + @Nullable DataSpec dataSpec = this.dataSpec; if (dataSpec == null) { return; } @@ -136,11 +208,11 @@ public void write(byte[] buffer, int offset, int length) throws CacheDataSinkExc while (bytesWritten < length) { if (outputStreamBytesWritten == dataSpecFragmentSize) { closeCurrentOutputStream(); - openNextOutputStream(); + openNextOutputStream(dataSpec); } int bytesToWrite = - (int) Math.min(length - bytesWritten, dataSpecFragmentSize - outputStreamBytesWritten); - outputStream.write(buffer, offset + bytesWritten, bytesToWrite); + (int) min(length - bytesWritten, dataSpecFragmentSize - outputStreamBytesWritten); + castNonNull(outputStream).write(buffer, offset + bytesWritten, bytesToWrite); bytesWritten += bytesToWrite; outputStreamBytesWritten += bytesToWrite; dataSpecBytesWritten += bytesToWrite; @@ -162,19 +234,19 @@ public void close() throws CacheDataSinkException { } } - private void openNextOutputStream() throws IOException { + private void openNextOutputStream(DataSpec dataSpec) throws IOException { long length = dataSpec.length == C.LENGTH_UNSET ? C.LENGTH_UNSET - : Math.min(dataSpec.length - dataSpecBytesWritten, dataSpecFragmentSize); + : min(dataSpec.length - dataSpecBytesWritten, dataSpecFragmentSize); file = cache.startFile( - dataSpec.key, dataSpec.absoluteStreamPosition + dataSpecBytesWritten, length); + castNonNull(dataSpec.key), dataSpec.position + dataSpecBytesWritten, length); FileOutputStream underlyingFileOutputStream = new FileOutputStream(file); if (bufferSize > 0) { if (bufferedOutputStream == null) { - bufferedOutputStream = new ReusableBufferedOutputStream(underlyingFileOutputStream, - bufferSize); + bufferedOutputStream = + new ReusableBufferedOutputStream(underlyingFileOutputStream, bufferSize); } else { bufferedOutputStream.reset(underlyingFileOutputStream); } @@ -197,7 +269,7 @@ private void closeCurrentOutputStream() throws IOException { } finally { Util.closeQuietly(outputStream); outputStream = null; - File fileToCommit = file; + File fileToCommit = castNonNull(file); file = null; if (success) { cache.commitFile(fileToCommit, outputStreamBytesWritten); @@ -206,5 +278,4 @@ private void closeCurrentOutputStream() throws IOException { } } } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CacheDataSinkFactory.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CacheDataSinkFactory.java deleted file mode 100644 index ce9735badd..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CacheDataSinkFactory.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright (C) 2016 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.upstream.cache; - -import com.google.android.exoplayer2.upstream.DataSink; - -/** - * A {@link DataSink.Factory} that produces {@link CacheDataSink}. - */ -public final class CacheDataSinkFactory implements DataSink.Factory { - - private final Cache cache; - private final long fragmentSize; - private final int bufferSize; - - /** @see CacheDataSink#CacheDataSink(Cache, long) */ - public CacheDataSinkFactory(Cache cache, long fragmentSize) { - this(cache, fragmentSize, CacheDataSink.DEFAULT_BUFFER_SIZE); - } - - /** @see CacheDataSink#CacheDataSink(Cache, long, int) */ - public CacheDataSinkFactory(Cache cache, long fragmentSize, int bufferSize) { - this.cache = cache; - this.fragmentSize = fragmentSize; - this.bufferSize = bufferSize; - } - - @Override - public DataSink createDataSink() { - return new CacheDataSink(cache, fragmentSize, bufferSize); - } -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CacheDataSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CacheDataSource.java index 94ec2c6dff..713c0c6f8d 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CacheDataSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CacheDataSource.java @@ -15,28 +15,43 @@ */ package com.google.android.exoplayer2.upstream.cache; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Util.castNonNull; +import static java.lang.Math.min; +import static java.lang.annotation.ElementType.FIELD; +import static java.lang.annotation.ElementType.LOCAL_VARIABLE; +import static java.lang.annotation.ElementType.METHOD; +import static java.lang.annotation.ElementType.PARAMETER; +import static java.lang.annotation.ElementType.TYPE_USE; + import android.net.Uri; import androidx.annotation.IntDef; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.PlaybackException; import com.google.android.exoplayer2.upstream.DataSink; import com.google.android.exoplayer2.upstream.DataSource; import com.google.android.exoplayer2.upstream.DataSourceException; import com.google.android.exoplayer2.upstream.DataSpec; -import com.google.android.exoplayer2.upstream.DataSpec.HttpMethod; import com.google.android.exoplayer2.upstream.FileDataSource; +import com.google.android.exoplayer2.upstream.PlaceholderDataSource; +import com.google.android.exoplayer2.upstream.PriorityDataSource; import com.google.android.exoplayer2.upstream.TeeDataSource; import com.google.android.exoplayer2.upstream.TransferListener; import com.google.android.exoplayer2.upstream.cache.Cache.CacheException; import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.PriorityTaskManager; +import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.io.IOException; import java.io.InterruptedIOException; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; import java.util.Collections; import java.util.List; import java.util.Map; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; /** * A {@link DataSource} that reads and writes a {@link Cache}. Requests are fulfilled from the cache @@ -45,6 +60,283 @@ */ public final class CacheDataSource implements DataSource { + /** {@link DataSource.Factory} for {@link CacheDataSource} instances. */ + public static final class Factory implements DataSource.Factory { + + private @MonotonicNonNull Cache cache; + private DataSource.Factory cacheReadDataSourceFactory; + @Nullable private DataSink.Factory cacheWriteDataSinkFactory; + private CacheKeyFactory cacheKeyFactory; + private boolean cacheIsReadOnly; + @Nullable private DataSource.Factory upstreamDataSourceFactory; + @Nullable private PriorityTaskManager upstreamPriorityTaskManager; + private int upstreamPriority; + private @CacheDataSource.Flags int flags; + @Nullable private CacheDataSource.EventListener eventListener; + + public Factory() { + cacheReadDataSourceFactory = new FileDataSource.Factory(); + cacheKeyFactory = CacheKeyFactory.DEFAULT; + } + + /** + * Sets the cache that will be used. + * + *

      Must be called before the factory is used. + * + * @param cache The cache that will be used. + * @return This factory. + */ + @CanIgnoreReturnValue + public Factory setCache(Cache cache) { + this.cache = cache; + return this; + } + + /** + * Returns the cache that will be used, or {@code null} if {@link #setCache} has yet to be + * called. + */ + @Nullable + public Cache getCache() { + return cache; + } + + /** + * Sets the {@link DataSource.Factory} for {@link DataSource DataSources} for reading from the + * cache. + * + *

      The default is a {@link FileDataSource.Factory} in its default configuration. + * + * @param cacheReadDataSourceFactory The {@link DataSource.Factory} for reading from the cache. + * @return This factory. + */ + @CanIgnoreReturnValue + public Factory setCacheReadDataSourceFactory(DataSource.Factory cacheReadDataSourceFactory) { + this.cacheReadDataSourceFactory = cacheReadDataSourceFactory; + return this; + } + + /** + * Sets the {@link DataSink.Factory} for generating {@link DataSink DataSinks} for writing data + * to the cache. Passing {@code null} causes the cache to be read-only. + * + *

      The default is a {@link CacheDataSink.Factory} in its default configuration. + * + * @param cacheWriteDataSinkFactory The {@link DataSink.Factory} for generating {@link DataSink + * DataSinks} for writing data to the cache, or {@code null} to disable writing. + * @return This factory. + */ + @CanIgnoreReturnValue + public Factory setCacheWriteDataSinkFactory( + @Nullable DataSink.Factory cacheWriteDataSinkFactory) { + this.cacheWriteDataSinkFactory = cacheWriteDataSinkFactory; + this.cacheIsReadOnly = cacheWriteDataSinkFactory == null; + return this; + } + + /** + * Sets the {@link CacheKeyFactory}. + * + *

      The default is {@link CacheKeyFactory#DEFAULT}. + * + * @param cacheKeyFactory The {@link CacheKeyFactory}. + * @return This factory. + */ + @CanIgnoreReturnValue + public Factory setCacheKeyFactory(CacheKeyFactory cacheKeyFactory) { + this.cacheKeyFactory = cacheKeyFactory; + return this; + } + + /** Returns the {@link CacheKeyFactory} that will be used. */ + public CacheKeyFactory getCacheKeyFactory() { + return cacheKeyFactory; + } + + /** + * Sets the {@link DataSource.Factory} for upstream {@link DataSource DataSources}, which are + * used to read data in the case of a cache miss. + * + *

      The default is {@code null}, and so this method must be called before the factory is used + * in order for data to be read from upstream in the case of a cache miss. + * + * @param upstreamDataSourceFactory The upstream {@link DataSource} for reading data not in the + * cache, or {@code null} to cause failure in the case of a cache miss. + * @return This factory. + */ + @CanIgnoreReturnValue + public Factory setUpstreamDataSourceFactory( + @Nullable DataSource.Factory upstreamDataSourceFactory) { + this.upstreamDataSourceFactory = upstreamDataSourceFactory; + return this; + } + + /** + * Sets an optional {@link PriorityTaskManager} to use when requesting data from upstream. + * + *

      If set, reads from the upstream {@link DataSource} will only be allowed to proceed if + * there are no higher priority tasks registered to the {@link PriorityTaskManager}. If there + * exists a higher priority task then {@link PriorityTaskManager.PriorityTooLowException} will + * be thrown instead. + * + *

      Note that requests to {@link CacheDataSource} instances are intended to be used as parts + * of (possibly larger) tasks that are registered with the {@link PriorityTaskManager}, and + * hence {@link CacheDataSource} does not register a task by itself. This must be done + * by the surrounding code that uses the {@link CacheDataSource} instances. + * + *

      The default is {@code null}. + * + * @param upstreamPriorityTaskManager The upstream {@link PriorityTaskManager}. + * @return This factory. + */ + @CanIgnoreReturnValue + public Factory setUpstreamPriorityTaskManager( + @Nullable PriorityTaskManager upstreamPriorityTaskManager) { + this.upstreamPriorityTaskManager = upstreamPriorityTaskManager; + return this; + } + + /** + * Returns the {@link PriorityTaskManager} that will bs used when requesting data from upstream, + * or {@code null} if there is none. + */ + @Nullable + public PriorityTaskManager getUpstreamPriorityTaskManager() { + return upstreamPriorityTaskManager; + } + + /** + * Sets the priority to use when requesting data from upstream. The priority is only used if a + * {@link PriorityTaskManager} is set by calling {@link #setUpstreamPriorityTaskManager}. + * + *

      The default is {@link C#PRIORITY_PLAYBACK}. + * + * @param upstreamPriority The priority to use when requesting data from upstream. + * @return This factory. + */ + @CanIgnoreReturnValue + public Factory setUpstreamPriority(int upstreamPriority) { + this.upstreamPriority = upstreamPriority; + return this; + } + + /** + * Sets the {@link CacheDataSource.Flags}. + * + *

      The default is {@code 0}. + * + * @param flags The {@link CacheDataSource.Flags}. + * @return This factory. + */ + @CanIgnoreReturnValue + public Factory setFlags(@CacheDataSource.Flags int flags) { + this.flags = flags; + return this; + } + + /** + * Sets the {link EventListener} to which events are delivered. + * + *

      The default is {@code null}. + * + * @param eventListener The {@link EventListener}. + * @return This factory. + */ + @CanIgnoreReturnValue + public Factory setEventListener(@Nullable EventListener eventListener) { + this.eventListener = eventListener; + return this; + } + + @Override + public CacheDataSource createDataSource() { + return createDataSourceInternal( + upstreamDataSourceFactory != null ? upstreamDataSourceFactory.createDataSource() : null, + flags, + upstreamPriority); + } + + /** + * Returns an instance suitable for downloading content. The created instance is equivalent to + * one that would be created by {@link #createDataSource()}, except: + * + *

        + *
      • The {@link #FLAG_BLOCK_ON_CACHE} is always set. + *
      • The task priority is overridden to be {@link C#PRIORITY_DOWNLOAD}. + *
      + * + * @return An instance suitable for downloading content. + */ + public CacheDataSource createDataSourceForDownloading() { + return createDataSourceInternal( + upstreamDataSourceFactory != null ? upstreamDataSourceFactory.createDataSource() : null, + flags | FLAG_BLOCK_ON_CACHE, + C.PRIORITY_DOWNLOAD); + } + + /** + * Returns an instance suitable for reading cached content as part of removing a download. The + * created instance is equivalent to one that would be created by {@link #createDataSource()}, + * except: + * + *
        + *
      • The upstream is overridden to be {@code null}, since when removing content we don't + * want to request anything that's not already cached. + *
      • The {@link #FLAG_BLOCK_ON_CACHE} is always set. + *
      • The task priority is overridden to be {@link C#PRIORITY_DOWNLOAD}. + *
      + * + * @return An instance suitable for reading cached content as part of removing a download. + */ + public CacheDataSource createDataSourceForRemovingDownload() { + return createDataSourceInternal( + /* upstreamDataSource= */ null, flags | FLAG_BLOCK_ON_CACHE, C.PRIORITY_DOWNLOAD); + } + + private CacheDataSource createDataSourceInternal( + @Nullable DataSource upstreamDataSource, @Flags int flags, int upstreamPriority) { + Cache cache = checkNotNull(this.cache); + @Nullable DataSink cacheWriteDataSink; + if (cacheIsReadOnly || upstreamDataSource == null) { + cacheWriteDataSink = null; + } else if (cacheWriteDataSinkFactory != null) { + cacheWriteDataSink = cacheWriteDataSinkFactory.createDataSink(); + } else { + cacheWriteDataSink = new CacheDataSink.Factory().setCache(cache).createDataSink(); + } + return new CacheDataSource( + cache, + upstreamDataSource, + cacheReadDataSourceFactory.createDataSource(), + cacheWriteDataSink, + cacheKeyFactory, + flags, + upstreamPriorityTaskManager, + upstreamPriority, + eventListener); + } + } + + /** Listener of {@link CacheDataSource} events. */ + public interface EventListener { + + /** + * Called when bytes have been read from the cache. + * + * @param cacheSizeBytes Current cache size in bytes. + * @param cachedBytesRead Total bytes read from the cache since this method was last called. + */ + void onCachedBytesRead(long cacheSizeBytes, long cachedBytesRead); + + /** + * Called when the current request ignores cache. + * + * @param reason Reason cache is bypassed. + */ + void onCacheIgnored(@CacheIgnoredReason int reason); + } + /** * Flags controlling the CacheDataSource's behavior. Possible flag values are {@link * #FLAG_BLOCK_ON_CACHE}, {@link #FLAG_IGNORE_CACHE_ON_ERROR} and {@link @@ -52,6 +344,7 @@ public final class CacheDataSource implements DataSource { */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef( flag = true, value = { @@ -67,9 +360,9 @@ public final class CacheDataSource implements DataSource { public static final int FLAG_BLOCK_ON_CACHE = 1; /** - * A flag indicating whether the cache is bypassed following any cache related error. If set - * then cache related exceptions may be thrown for one cycle of open, read and close calls. - * Subsequent cycles of these calls will then bypass the cache. + * A flag indicating whether the cache is bypassed following any cache related error. If set then + * cache related exceptions may be thrown for one cycle of open, read and close calls. Subsequent + * cycles of these calls will then bypass the cache. */ public static final int FLAG_IGNORE_CACHE_ON_ERROR = 1 << 1; // 2 @@ -83,8 +376,11 @@ public final class CacheDataSource implements DataSource { * Reasons the cache may be ignored. One of {@link #CACHE_IGNORED_REASON_ERROR} or {@link * #CACHE_IGNORED_REASON_UNSET_LENGTH}. */ + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. @Documented @Retention(RetentionPolicy.SOURCE) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) @IntDef({CACHE_IGNORED_REASON_ERROR, CACHE_IGNORED_REASON_UNSET_LENGTH}) public @interface CacheIgnoredReason {} @@ -97,27 +393,6 @@ public final class CacheDataSource implements DataSource { /** Cache ignored due to a request with an unset length. */ public static final int CACHE_IGNORED_REASON_UNSET_LENGTH = 1; - /** - * Listener of {@link CacheDataSource} events. - */ - public interface EventListener { - - /** - * Called when bytes have been read from the cache. - * - * @param cacheSizeBytes Current cache size in bytes. - * @param cachedBytesRead Total bytes read from the cache since this method was last called. - */ - void onCachedBytesRead(long cacheSizeBytes, long cachedBytesRead); - - /** - * Called when the current request ignores cache. - * - * @param reason Reason cache is bypassed. - */ - void onCacheIgnored(@CacheIgnoredReason int reason); - } - /** Minimum number of bytes to read before checking cache for availability. */ private static final long MIN_READ_BEFORE_CHECKING_CACHE = 100 * 1024; @@ -132,15 +407,11 @@ public interface EventListener { private final boolean ignoreCacheOnError; private final boolean ignoreCacheForUnsetLengthRequests; - @Nullable private DataSource currentDataSource; - private boolean currentDataSpecLengthUnset; - @Nullable private Uri uri; @Nullable private Uri actualUri; - @HttpMethod private int httpMethod; - @Nullable private byte[] httpBody; - private Map httpRequestHeaders = Collections.emptyMap(); - @DataSpec.Flags private int flags; - @Nullable private String key; + @Nullable private DataSpec requestDataSpec; + @Nullable private DataSpec currentDataSpec; + @Nullable private DataSource currentDataSource; + private long currentDataSourceBytesRead; private long readPosition; private long bytesRemaining; @Nullable private CacheSpan currentHoleSpan; @@ -154,10 +425,11 @@ public interface EventListener { * reading and writing the cache. * * @param cache The cache. - * @param upstream A {@link DataSource} for reading data not in the cache. + * @param upstreamDataSource A {@link DataSource} for reading data not in the cache. If null, + * reading will fail if a cache miss occurs. */ - public CacheDataSource(Cache cache, DataSource upstream) { - this(cache, upstream, /* flags= */ 0); + public CacheDataSource(Cache cache, @Nullable DataSource upstreamDataSource) { + this(cache, upstreamDataSource, /* flags= */ 0); } /** @@ -165,14 +437,15 @@ public CacheDataSource(Cache cache, DataSource upstream) { * reading and writing the cache. * * @param cache The cache. - * @param upstream A {@link DataSource} for reading data not in the cache. + * @param upstreamDataSource A {@link DataSource} for reading data not in the cache. If null, + * reading will fail if a cache miss occurs. * @param flags A combination of {@link #FLAG_BLOCK_ON_CACHE}, {@link #FLAG_IGNORE_CACHE_ON_ERROR} * and {@link #FLAG_IGNORE_CACHE_FOR_UNSET_LENGTH_REQUESTS}, or 0. */ - public CacheDataSource(Cache cache, DataSource upstream, @Flags int flags) { + public CacheDataSource(Cache cache, @Nullable DataSource upstreamDataSource, @Flags int flags) { this( cache, - upstream, + upstreamDataSource, new FileDataSource(), new CacheDataSink(cache, CacheDataSink.DEFAULT_FRAGMENT_SIZE), flags, @@ -185,7 +458,8 @@ public CacheDataSource(Cache cache, DataSource upstream, @Flags int flags) { * before it is written to disk. * * @param cache The cache. - * @param upstream A {@link DataSource} for reading data not in the cache. + * @param upstreamDataSource A {@link DataSource} for reading data not in the cache. If null, + * reading will fail if a cache miss occurs. * @param cacheReadDataSource A {@link DataSource} for reading data from the cache. * @param cacheWriteDataSink A {@link DataSink} for writing data to the cache. If null, cache is * accessed read-only. @@ -195,14 +469,14 @@ public CacheDataSource(Cache cache, DataSource upstream, @Flags int flags) { */ public CacheDataSource( Cache cache, - DataSource upstream, + @Nullable DataSource upstreamDataSource, DataSource cacheReadDataSource, @Nullable DataSink cacheWriteDataSink, @Flags int flags, @Nullable EventListener eventListener) { this( cache, - upstream, + upstreamDataSource, cacheReadDataSource, cacheWriteDataSink, flags, @@ -216,7 +490,8 @@ public CacheDataSource( * before it is written to disk. * * @param cache The cache. - * @param upstream A {@link DataSource} for reading data not in the cache. + * @param upstreamDataSource A {@link DataSource} for reading data not in the cache. If null, + * reading will fail if a cache miss occurs. * @param cacheReadDataSource A {@link DataSource} for reading data from the cache. * @param cacheWriteDataSink A {@link DataSink} for writing data to the cache. If null, cache is * accessed read-only. @@ -227,31 +502,72 @@ public CacheDataSource( */ public CacheDataSource( Cache cache, - DataSource upstream, + @Nullable DataSource upstreamDataSource, DataSource cacheReadDataSource, @Nullable DataSink cacheWriteDataSink, @Flags int flags, @Nullable EventListener eventListener, @Nullable CacheKeyFactory cacheKeyFactory) { + this( + cache, + upstreamDataSource, + cacheReadDataSource, + cacheWriteDataSink, + cacheKeyFactory, + flags, + /* upstreamPriorityTaskManager= */ null, + /* upstreamPriority= */ C.PRIORITY_PLAYBACK, + eventListener); + } + + private CacheDataSource( + Cache cache, + @Nullable DataSource upstreamDataSource, + DataSource cacheReadDataSource, + @Nullable DataSink cacheWriteDataSink, + @Nullable CacheKeyFactory cacheKeyFactory, + @Flags int flags, + @Nullable PriorityTaskManager upstreamPriorityTaskManager, + int upstreamPriority, + @Nullable EventListener eventListener) { this.cache = cache; this.cacheReadDataSource = cacheReadDataSource; - this.cacheKeyFactory = - cacheKeyFactory != null ? cacheKeyFactory : CacheUtil.DEFAULT_CACHE_KEY_FACTORY; + this.cacheKeyFactory = cacheKeyFactory != null ? cacheKeyFactory : CacheKeyFactory.DEFAULT; this.blockOnCache = (flags & FLAG_BLOCK_ON_CACHE) != 0; this.ignoreCacheOnError = (flags & FLAG_IGNORE_CACHE_ON_ERROR) != 0; this.ignoreCacheForUnsetLengthRequests = (flags & FLAG_IGNORE_CACHE_FOR_UNSET_LENGTH_REQUESTS) != 0; - this.upstreamDataSource = upstream; - if (cacheWriteDataSink != null) { - this.cacheWriteDataSource = new TeeDataSource(upstream, cacheWriteDataSink); + if (upstreamDataSource != null) { + if (upstreamPriorityTaskManager != null) { + upstreamDataSource = + new PriorityDataSource( + upstreamDataSource, upstreamPriorityTaskManager, upstreamPriority); + } + this.upstreamDataSource = upstreamDataSource; + this.cacheWriteDataSource = + cacheWriteDataSink != null + ? new TeeDataSource(upstreamDataSource, cacheWriteDataSink) + : null; } else { + this.upstreamDataSource = PlaceholderDataSource.INSTANCE; this.cacheWriteDataSource = null; } this.eventListener = eventListener; } + /** Returns the {@link Cache} used by this instance. */ + public Cache getCache() { + return cache; + } + + /** Returns the {@link CacheKeyFactory} used by this instance. */ + public CacheKeyFactory getCacheKeyFactory() { + return cacheKeyFactory; + } + @Override public void addTransferListener(TransferListener transferListener) { + checkNotNull(transferListener); cacheReadDataSource.addTransferListener(transferListener); upstreamDataSource.addTransferListener(transferListener); } @@ -259,13 +575,10 @@ public void addTransferListener(TransferListener transferListener) { @Override public long open(DataSpec dataSpec) throws IOException { try { - key = cacheKeyFactory.buildCacheKey(dataSpec); - uri = dataSpec.uri; - actualUri = getRedirectedUriOrDefault(cache, key, /* defaultUri= */ uri); - httpMethod = dataSpec.httpMethod; - httpBody = dataSpec.httpBody; - httpRequestHeaders = dataSpec.httpRequestHeaders; - flags = dataSpec.flags; + String key = cacheKeyFactory.buildCacheKey(dataSpec); + DataSpec requestDataSpec = dataSpec.buildUpon().setKey(key).build(); + this.requestDataSpec = requestDataSpec; + actualUri = getRedirectedUriOrDefault(cache, key, /* defaultUri= */ requestDataSpec.uri); readPosition = dataSpec.position; int reason = shouldIgnoreCacheForRequest(dataSpec); @@ -274,19 +587,28 @@ public long open(DataSpec dataSpec) throws IOException { notifyCacheIgnored(reason); } - if (dataSpec.length != C.LENGTH_UNSET || currentRequestIgnoresCache) { - bytesRemaining = dataSpec.length; + if (currentRequestIgnoresCache) { + bytesRemaining = C.LENGTH_UNSET; } else { bytesRemaining = ContentMetadata.getContentLength(cache.getContentMetadata(key)); if (bytesRemaining != C.LENGTH_UNSET) { bytesRemaining -= dataSpec.position; - if (bytesRemaining <= 0) { - throw new DataSourceException(DataSourceException.POSITION_OUT_OF_RANGE); + if (bytesRemaining < 0) { + throw new DataSourceException( + PlaybackException.ERROR_CODE_IO_READ_POSITION_OUT_OF_RANGE); } } } - openNextSource(false); - return bytesRemaining; + if (dataSpec.length != C.LENGTH_UNSET) { + bytesRemaining = + bytesRemaining == C.LENGTH_UNSET + ? dataSpec.length + : min(bytesRemaining, dataSpec.length); + } + if (bytesRemaining > 0 || bytesRemaining == C.LENGTH_UNSET) { + openNextSource(requestDataSpec, false); + } + return dataSpec.length != C.LENGTH_UNSET ? dataSpec.length : bytesRemaining; } catch (Throwable e) { handleBeforeThrow(e); throw e; @@ -294,41 +616,42 @@ public long open(DataSpec dataSpec) throws IOException { } @Override - public int read(byte[] buffer, int offset, int readLength) throws IOException { - if (readLength == 0) { + public int read(byte[] buffer, int offset, int length) throws IOException { + if (length == 0) { return 0; } if (bytesRemaining == 0) { return C.RESULT_END_OF_INPUT; } + DataSpec requestDataSpec = checkNotNull(this.requestDataSpec); + DataSpec currentDataSpec = checkNotNull(this.currentDataSpec); try { if (readPosition >= checkCachePosition) { - openNextSource(true); + openNextSource(requestDataSpec, true); } - int bytesRead = currentDataSource.read(buffer, offset, readLength); + int bytesRead = checkNotNull(currentDataSource).read(buffer, offset, length); if (bytesRead != C.RESULT_END_OF_INPUT) { if (isReadingFromCache()) { totalCachedBytesRead += bytesRead; } readPosition += bytesRead; + currentDataSourceBytesRead += bytesRead; if (bytesRemaining != C.LENGTH_UNSET) { bytesRemaining -= bytesRead; } - } else if (currentDataSpecLengthUnset) { - setNoBytesRemainingAndMaybeStoreLength(); + } else if (isReadingFromUpstream() + && (currentDataSpec.length == C.LENGTH_UNSET + || currentDataSourceBytesRead < currentDataSpec.length)) { + // We've encountered RESULT_END_OF_INPUT from the upstream DataSource at a position not + // imposed by the current DataSpec. This must mean that we've reached the end of the + // resource. + setNoBytesRemainingAndMaybeStoreLength(castNonNull(requestDataSpec.key)); } else if (bytesRemaining > 0 || bytesRemaining == C.LENGTH_UNSET) { closeCurrentSource(); - openNextSource(false); - return read(buffer, offset, readLength); + openNextSource(requestDataSpec, false); + return read(buffer, offset, length); } return bytesRead; - } catch (IOException e) { - if (currentDataSpecLengthUnset && CacheUtil.isCausedByPositionOutOfRange(e)) { - setNoBytesRemainingAndMaybeStoreLength(); - return C.RESULT_END_OF_INPUT; - } - handleBeforeThrow(e); - throw e; } catch (Throwable e) { handleBeforeThrow(e); throw e; @@ -351,14 +674,9 @@ public Map> getResponseHeaders() { @Override public void close() throws IOException { - uri = null; + requestDataSpec = null; actualUri = null; - httpMethod = DataSpec.HTTP_METHOD_GET; - httpBody = null; - httpRequestHeaders = Collections.emptyMap(); - flags = 0; readPosition = 0; - key = null; notifyBytesRead(); try { closeCurrentSource(); @@ -379,22 +697,24 @@ public void close() throws IOException { * opened if it's possible to switch to reading from or writing to the cache. If a switch isn't * possible then the current source is left unchanged. * + * @param requestDataSpec The original {@link DataSpec} to build upon for the next source. * @param checkCache If true tries to switch to reading from or writing to cache instead of * reading from {@link #upstreamDataSource}, which is the currently open source. */ - private void openNextSource(boolean checkCache) throws IOException { - CacheSpan nextSpan; + private void openNextSource(DataSpec requestDataSpec, boolean checkCache) throws IOException { + @Nullable CacheSpan nextSpan; + String key = castNonNull(requestDataSpec.key); if (currentRequestIgnoresCache) { nextSpan = null; } else if (blockOnCache) { try { - nextSpan = cache.startReadWrite(key, readPosition); + nextSpan = cache.startReadWrite(key, readPosition, bytesRemaining); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new InterruptedIOException(); } } else { - nextSpan = cache.startReadWriteNonBlocking(key, readPosition); + nextSpan = cache.startReadWriteNonBlocking(key, readPosition, bytesRemaining); } DataSpec nextDataSpec; @@ -404,27 +724,24 @@ private void openNextSource(boolean checkCache) throws IOException { // from upstream. nextDataSource = upstreamDataSource; nextDataSpec = - new DataSpec( - uri, - httpMethod, - httpBody, - readPosition, - readPosition, - bytesRemaining, - key, - flags, - httpRequestHeaders); + requestDataSpec.buildUpon().setPosition(readPosition).setLength(bytesRemaining).build(); } else if (nextSpan.isCached) { - // Data is cached, read from cache. - Uri fileUri = Uri.fromFile(nextSpan.file); - long filePosition = readPosition - nextSpan.position; - long length = nextSpan.length - filePosition; + // Data is cached in a span file starting at nextSpan.position. + Uri fileUri = Uri.fromFile(castNonNull(nextSpan.file)); + long filePositionOffset = nextSpan.position; + long positionInFile = readPosition - filePositionOffset; + long length = nextSpan.length - positionInFile; if (bytesRemaining != C.LENGTH_UNSET) { - length = Math.min(length, bytesRemaining); + length = min(length, bytesRemaining); } - // Deliberately skip the HTTP-related parameters since we're reading from the cache, not - // making an HTTP request. - nextDataSpec = new DataSpec(fileUri, readPosition, filePosition, length, key, flags); + nextDataSpec = + requestDataSpec + .buildUpon() + .setUri(fileUri) + .setUriPositionOffset(filePositionOffset) + .setPosition(positionInFile) + .setLength(length) + .build(); nextDataSource = cacheReadDataSource; } else { // Data is not cached, and data is not locked, read from upstream with cache backing. @@ -434,20 +751,11 @@ private void openNextSource(boolean checkCache) throws IOException { } else { length = nextSpan.length; if (bytesRemaining != C.LENGTH_UNSET) { - length = Math.min(length, bytesRemaining); + length = min(length, bytesRemaining); } } nextDataSpec = - new DataSpec( - uri, - httpMethod, - httpBody, - readPosition, - readPosition, - length, - key, - flags, - httpRequestHeaders); + requestDataSpec.buildUpon().setPosition(readPosition).setLength(length).build(); if (cacheWriteDataSource != null) { nextDataSource = cacheWriteDataSource; } else { @@ -471,7 +779,7 @@ private void openNextSource(boolean checkCache) throws IOException { try { closeCurrentSource(); } catch (Throwable e) { - if (nextSpan.isHoleSpan()) { + if (castNonNull(nextSpan).isHoleSpan()) { // Release the hole span before throwing, else we'll hold it forever. cache.releaseHoleSpan(nextSpan); } @@ -483,18 +791,19 @@ private void openNextSource(boolean checkCache) throws IOException { currentHoleSpan = nextSpan; } currentDataSource = nextDataSource; - currentDataSpecLengthUnset = nextDataSpec.length == C.LENGTH_UNSET; + currentDataSpec = nextDataSpec; + currentDataSourceBytesRead = 0; long resolvedLength = nextDataSource.open(nextDataSpec); // Update bytesRemaining, actualUri and (if writing to cache) the cache metadata. ContentMetadataMutations mutations = new ContentMetadataMutations(); - if (currentDataSpecLengthUnset && resolvedLength != C.LENGTH_UNSET) { + if (nextDataSpec.length == C.LENGTH_UNSET && resolvedLength != C.LENGTH_UNSET) { bytesRemaining = resolvedLength; ContentMetadataMutations.setContentLength(mutations, readPosition + bytesRemaining); } if (isReadingFromUpstream()) { - actualUri = currentDataSource.getUri(); - boolean isRedirected = !uri.equals(actualUri); + actualUri = nextDataSource.getUri(); + boolean isRedirected = !requestDataSpec.uri.equals(actualUri); ContentMetadataMutations.setRedirectedUri(mutations, isRedirected ? actualUri : null); } if (isWritingToCache()) { @@ -502,7 +811,7 @@ private void openNextSource(boolean checkCache) throws IOException { } } - private void setNoBytesRemainingAndMaybeStoreLength() throws IOException { + private void setNoBytesRemainingAndMaybeStoreLength(String key) throws IOException { bytesRemaining = 0; if (isWritingToCache()) { ContentMetadataMutations mutations = new ContentMetadataMutations(); @@ -512,7 +821,7 @@ private void setNoBytesRemainingAndMaybeStoreLength() throws IOException { } private static Uri getRedirectedUriOrDefault(Cache cache, String key, Uri defaultUri) { - Uri redirectedUri = ContentMetadata.getRedirectedUri(cache.getContentMetadata(key)); + @Nullable Uri redirectedUri = ContentMetadata.getRedirectedUri(cache.getContentMetadata(key)); return redirectedUri != null ? redirectedUri : defaultUri; } @@ -539,8 +848,8 @@ private void closeCurrentSource() throws IOException { try { currentDataSource.close(); } finally { + currentDataSpec = null; currentDataSource = null; - currentDataSpecLengthUnset = false; if (currentHoleSpan != null) { cache.releaseHoleSpan(currentHoleSpan); currentHoleSpan = null; @@ -576,5 +885,4 @@ private void notifyBytesRead() { totalCachedBytesRead = 0; } } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CacheDataSourceFactory.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CacheDataSourceFactory.java deleted file mode 100644 index 21758bdceb..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CacheDataSourceFactory.java +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Copyright (C) 2016 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.upstream.cache; - -import androidx.annotation.Nullable; -import com.google.android.exoplayer2.upstream.DataSink; -import com.google.android.exoplayer2.upstream.DataSource; -import com.google.android.exoplayer2.upstream.FileDataSource; - -/** A {@link DataSource.Factory} that produces {@link CacheDataSource}. */ -public final class CacheDataSourceFactory implements DataSource.Factory { - - private final Cache cache; - private final DataSource.Factory upstreamFactory; - private final DataSource.Factory cacheReadDataSourceFactory; - @CacheDataSource.Flags private final int flags; - @Nullable private final DataSink.Factory cacheWriteDataSinkFactory; - @Nullable private final CacheDataSource.EventListener eventListener; - @Nullable private final CacheKeyFactory cacheKeyFactory; - - /** - * Constructs a factory which creates {@link CacheDataSource} instances with default {@link - * DataSource} and {@link DataSink} instances for reading and writing the cache. - * - * @param cache The cache. - * @param upstreamFactory A {@link DataSource.Factory} for creating upstream {@link DataSource}s - * for reading data not in the cache. - */ - public CacheDataSourceFactory(Cache cache, DataSource.Factory upstreamFactory) { - this(cache, upstreamFactory, /* flags= */ 0); - } - - /** @see CacheDataSource#CacheDataSource(Cache, DataSource, int) */ - public CacheDataSourceFactory( - Cache cache, DataSource.Factory upstreamFactory, @CacheDataSource.Flags int flags) { - this( - cache, - upstreamFactory, - new FileDataSource.Factory(), - new CacheDataSinkFactory(cache, CacheDataSink.DEFAULT_FRAGMENT_SIZE), - flags, - /* eventListener= */ null); - } - - /** - * @see CacheDataSource#CacheDataSource(Cache, DataSource, DataSource, DataSink, int, - * CacheDataSource.EventListener) - */ - public CacheDataSourceFactory( - Cache cache, - DataSource.Factory upstreamFactory, - DataSource.Factory cacheReadDataSourceFactory, - @Nullable DataSink.Factory cacheWriteDataSinkFactory, - @CacheDataSource.Flags int flags, - @Nullable CacheDataSource.EventListener eventListener) { - this( - cache, - upstreamFactory, - cacheReadDataSourceFactory, - cacheWriteDataSinkFactory, - flags, - eventListener, - /* cacheKeyFactory= */ null); - } - - /** - * @see CacheDataSource#CacheDataSource(Cache, DataSource, DataSource, DataSink, int, - * CacheDataSource.EventListener, CacheKeyFactory) - */ - public CacheDataSourceFactory( - Cache cache, - DataSource.Factory upstreamFactory, - DataSource.Factory cacheReadDataSourceFactory, - @Nullable DataSink.Factory cacheWriteDataSinkFactory, - @CacheDataSource.Flags int flags, - @Nullable CacheDataSource.EventListener eventListener, - @Nullable CacheKeyFactory cacheKeyFactory) { - this.cache = cache; - this.upstreamFactory = upstreamFactory; - this.cacheReadDataSourceFactory = cacheReadDataSourceFactory; - this.cacheWriteDataSinkFactory = cacheWriteDataSinkFactory; - this.flags = flags; - this.eventListener = eventListener; - this.cacheKeyFactory = cacheKeyFactory; - } - - @Override - public CacheDataSource createDataSource() { - return new CacheDataSource( - cache, - upstreamFactory.createDataSource(), - cacheReadDataSourceFactory.createDataSource(), - cacheWriteDataSinkFactory == null ? null : cacheWriteDataSinkFactory.createDataSink(), - flags, - eventListener, - cacheKeyFactory); - } - -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CacheEvictor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CacheEvictor.java index 6ebfe01df4..6e59c4ecdc 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CacheEvictor.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CacheEvictor.java @@ -30,9 +30,7 @@ public interface CacheEvictor extends Cache.Listener { */ boolean requiresCacheSpanTouches(); - /** - * Called when cache has been initialized. - */ + /** Called when cache has been initialized. */ void onCacheInitialized(); /** diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CacheFileMetadataIndex.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CacheFileMetadataIndex.java index e288a5258e..1e641d0de3 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CacheFileMetadataIndex.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CacheFileMetadataIndex.java @@ -15,6 +15,8 @@ */ package com.google.android.exoplayer2.upstream.cache; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; + import android.content.ContentValues; import android.database.Cursor; import android.database.SQLException; @@ -92,7 +94,9 @@ public static void delete(DatabaseProvider databaseProvider, long uid) } } - /** @param databaseProvider Provides the database in which the index is stored. */ + /** + * @param databaseProvider Provides the database in which the index is stored. + */ public CacheFileMetadataIndex(DatabaseProvider databaseProvider) { this.databaseProvider = databaseProvider; } @@ -146,7 +150,7 @@ public Map getAll() throws DatabaseIOException { try (Cursor cursor = getCursor()) { Map fileMetadata = new HashMap<>(cursor.getCount()); while (cursor.moveToNext()) { - String name = cursor.getString(COLUMN_INDEX_NAME); + String name = checkNotNull(cursor.getString(COLUMN_INDEX_NAME)); long length = cursor.getLong(COLUMN_INDEX_LENGTH); long lastTouchTimestamp = cursor.getLong(COLUMN_INDEX_LAST_TOUCH_TIMESTAMP); fileMetadata.put(name, new CacheFileMetadata(length, lastTouchTimestamp)); diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CacheKeyFactory.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CacheKeyFactory.java index bfa404c074..69e9b73fdd 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CacheKeyFactory.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CacheKeyFactory.java @@ -20,10 +20,20 @@ /** Factory for cache keys. */ public interface CacheKeyFactory { + /** Default {@link CacheKeyFactory}. */ + CacheKeyFactory DEFAULT = + (dataSpec) -> dataSpec.key != null ? dataSpec.key : dataSpec.uri.toString(); + /** - * Returns a cache key for the given {@link DataSpec}. + * Returns the cache key of the resource containing the data defined by a {@link DataSpec}. + * + *

      Note that since the returned cache key corresponds to the whole resource, implementations + * must not return different cache keys for {@link DataSpec DataSpecs} that define different + * ranges of the same resource. As a result, implementations should not use fields such as {@link + * DataSpec#position} and {@link DataSpec#length}. * - * @param dataSpec The data being cached. + * @param dataSpec The {@link DataSpec}. + * @return The cache key of the resource. */ String buildCacheKey(DataSpec dataSpec); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CacheSpan.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CacheSpan.java index 609e933c9d..ceb8ee7410 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CacheSpan.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CacheSpan.java @@ -15,31 +15,22 @@ */ package com.google.android.exoplayer2.upstream.cache; -import androidx.annotation.NonNull; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import java.io.File; -/** - * Defines a span of data that may or may not be cached (as indicated by {@link #isCached}). - */ +/** Defines a span of data that may or may not be cached (as indicated by {@link #isCached}). */ public class CacheSpan implements Comparable { - /** - * The cache key that uniquely identifies the original stream. - */ + /** The cache key that uniquely identifies the resource. */ public final String key; - /** - * The position of the {@link CacheSpan} in the original stream. - */ + /** The position of the {@link CacheSpan} in the resource. */ public final long position; /** * The length of the {@link CacheSpan}, or {@link C#LENGTH_UNSET} if this is an open-ended hole. */ public final long length; - /** - * Whether the {@link CacheSpan} is cached. - */ + /** Whether the {@link CacheSpan} is cached. */ public final boolean isCached; /** The file corresponding to this {@link CacheSpan}, or null if {@link #isCached} is false. */ @Nullable public final File file; @@ -50,8 +41,8 @@ public class CacheSpan implements Comparable { * Creates a hole CacheSpan which isn't cached, has no last touch timestamp and no file * associated. * - * @param key The cache key that uniquely identifies the original stream. - * @param position The position of the {@link CacheSpan} in the original stream. + * @param key The cache key that uniquely identifies the resource. + * @param position The position of the {@link CacheSpan} in the resource. * @param length The length of the {@link CacheSpan}, or {@link C#LENGTH_UNSET} if this is an * open-ended hole. */ @@ -62,8 +53,8 @@ public CacheSpan(String key, long position, long length) { /** * Creates a CacheSpan. * - * @param key The cache key that uniquely identifies the original stream. - * @param position The position of the {@link CacheSpan} in the original stream. + * @param key The cache key that uniquely identifies the resource. + * @param position The position of the {@link CacheSpan} in the resource. * @param length The length of the {@link CacheSpan}, or {@link C#LENGTH_UNSET} if this is an * open-ended hole. * @param lastTouchTimestamp The last touch timestamp, or {@link C#TIME_UNSET} if {@link @@ -80,22 +71,18 @@ public CacheSpan( this.lastTouchTimestamp = lastTouchTimestamp; } - /** - * Returns whether this is an open-ended {@link CacheSpan}. - */ + /** Returns whether this is an open-ended {@link CacheSpan}. */ public boolean isOpenEnded() { return length == C.LENGTH_UNSET; } - /** - * Returns whether this is a hole {@link CacheSpan}. - */ + /** Returns whether this is a hole {@link CacheSpan}. */ public boolean isHoleSpan() { return !isCached; } @Override - public int compareTo(@NonNull CacheSpan another) { + public int compareTo(CacheSpan another) { if (!key.equals(another.key)) { return key.compareTo(another.key); } @@ -103,4 +90,8 @@ public int compareTo(@NonNull CacheSpan another) { return startOffsetDiff == 0 ? 0 : ((startOffsetDiff < 0) ? -1 : 1); } + @Override + public String toString() { + return "[" + position + ", " + length + "]"; + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CacheUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CacheUtil.java deleted file mode 100644 index 9f1fc54462..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CacheUtil.java +++ /dev/null @@ -1,432 +0,0 @@ -/* - * Copyright (C) 2017 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.upstream.cache; - -import android.net.Uri; -import android.util.Pair; -import androidx.annotation.Nullable; -import androidx.annotation.WorkerThread; -import com.google.android.exoplayer2.C; -import com.google.android.exoplayer2.upstream.DataSource; -import com.google.android.exoplayer2.upstream.DataSourceException; -import com.google.android.exoplayer2.upstream.DataSpec; -import com.google.android.exoplayer2.util.Assertions; -import com.google.android.exoplayer2.util.PriorityTaskManager; -import com.google.android.exoplayer2.util.Util; -import java.io.EOFException; -import java.io.IOException; -import java.util.NavigableSet; -import java.util.concurrent.atomic.AtomicBoolean; - -/** - * Caching related utility methods. - */ -public final class CacheUtil { - - /** Receives progress updates during cache operations. */ - public interface ProgressListener { - - /** - * Called when progress is made during a cache operation. - * - * @param requestLength The length of the content being cached in bytes, or {@link - * C#LENGTH_UNSET} if unknown. - * @param bytesCached The number of bytes that are cached. - * @param newBytesCached The number of bytes that have been newly cached since the last progress - * update. - */ - void onProgress(long requestLength, long bytesCached, long newBytesCached); - } - - /** Default buffer size to be used while caching. */ - public static final int DEFAULT_BUFFER_SIZE_BYTES = 128 * 1024; - - /** Default {@link CacheKeyFactory}. */ - public static final CacheKeyFactory DEFAULT_CACHE_KEY_FACTORY = - (dataSpec) -> dataSpec.key != null ? dataSpec.key : generateKey(dataSpec.uri); - - /** - * Generates a cache key out of the given {@link Uri}. - * - * @param uri Uri of a content which the requested key is for. - */ - public static String generateKey(Uri uri) { - return uri.toString(); - } - - /** - * Queries the cache to obtain the request length and the number of bytes already cached for a - * given {@link DataSpec}. - * - * @param dataSpec Defines the data to be checked. - * @param cache A {@link Cache} which has the data. - * @param cacheKeyFactory An optional factory for cache keys. - * @return A pair containing the request length and the number of bytes that are already cached. - */ - public static Pair getCached( - DataSpec dataSpec, Cache cache, @Nullable CacheKeyFactory cacheKeyFactory) { - String key = buildCacheKey(dataSpec, cacheKeyFactory); - long position = dataSpec.absoluteStreamPosition; - long requestLength = getRequestLength(dataSpec, cache, key); - long bytesAlreadyCached = 0; - long bytesLeft = requestLength; - while (bytesLeft != 0) { - long blockLength = - cache.getCachedLength( - key, position, bytesLeft != C.LENGTH_UNSET ? bytesLeft : Long.MAX_VALUE); - if (blockLength > 0) { - bytesAlreadyCached += blockLength; - } else { - blockLength = -blockLength; - if (blockLength == Long.MAX_VALUE) { - break; - } - } - position += blockLength; - bytesLeft -= bytesLeft == C.LENGTH_UNSET ? 0 : blockLength; - } - return Pair.create(requestLength, bytesAlreadyCached); - } - - /** - * Caches the data defined by {@code dataSpec}, skipping already cached data. Caching stops early - * if the end of the input is reached. - * - *

      This method may be slow and shouldn't normally be called on the main thread. - * - * @param dataSpec Defines the data to be cached. - * @param cache A {@link Cache} to store the data. - * @param upstream A {@link DataSource} for reading data not in the cache. - * @param progressListener A listener to receive progress updates, or {@code null}. - * @param isCanceled An optional flag that will interrupt caching if set to true. - * @throws IOException If an error occurs reading from the source. - * @throws InterruptedException If the thread was interrupted directly or via {@code isCanceled}. - */ - @WorkerThread - public static void cache( - DataSpec dataSpec, - Cache cache, - DataSource upstream, - @Nullable ProgressListener progressListener, - @Nullable AtomicBoolean isCanceled) - throws IOException, InterruptedException { - cache( - dataSpec, - cache, - /* cacheKeyFactory= */ null, - new CacheDataSource(cache, upstream), - new byte[DEFAULT_BUFFER_SIZE_BYTES], - /* priorityTaskManager= */ null, - /* priority= */ 0, - progressListener, - isCanceled, - /* enableEOFException= */ false); - } - - /** - * Caches the data defined by {@code dataSpec}, skipping already cached data. Caching stops early - * if end of input is reached and {@code enableEOFException} is false. - * - *

      If a {@link PriorityTaskManager} is provided, it's used to pause and resume caching - * depending on {@code priority} and the priority of other tasks registered to the - * PriorityTaskManager. Please note that it's the responsibility of the calling code to call - * {@link PriorityTaskManager#add} to register with the manager before calling this method, and to - * call {@link PriorityTaskManager#remove} afterwards to unregister. - * - *

      This method may be slow and shouldn't normally be called on the main thread. - * - * @param dataSpec Defines the data to be cached. - * @param cache A {@link Cache} to store the data. - * @param cacheKeyFactory An optional factory for cache keys. - * @param dataSource A {@link CacheDataSource} that works on the {@code cache}. - * @param buffer The buffer to be used while caching. - * @param priorityTaskManager If not null it's used to check whether it is allowed to proceed with - * caching. - * @param priority The priority of this task. Used with {@code priorityTaskManager}. - * @param progressListener A listener to receive progress updates, or {@code null}. - * @param isCanceled An optional flag that will interrupt caching if set to true. - * @param enableEOFException Whether to throw an {@link EOFException} if end of input has been - * reached unexpectedly. - * @throws IOException If an error occurs reading from the source. - * @throws InterruptedException If the thread was interrupted directly or via {@code isCanceled}. - */ - @WorkerThread - public static void cache( - DataSpec dataSpec, - Cache cache, - @Nullable CacheKeyFactory cacheKeyFactory, - CacheDataSource dataSource, - byte[] buffer, - @Nullable PriorityTaskManager priorityTaskManager, - int priority, - @Nullable ProgressListener progressListener, - @Nullable AtomicBoolean isCanceled, - boolean enableEOFException) - throws IOException, InterruptedException { - Assertions.checkNotNull(dataSource); - Assertions.checkNotNull(buffer); - - String key = buildCacheKey(dataSpec, cacheKeyFactory); - long bytesLeft; - ProgressNotifier progressNotifier = null; - if (progressListener != null) { - progressNotifier = new ProgressNotifier(progressListener); - Pair lengthAndBytesAlreadyCached = getCached(dataSpec, cache, cacheKeyFactory); - progressNotifier.init(lengthAndBytesAlreadyCached.first, lengthAndBytesAlreadyCached.second); - bytesLeft = lengthAndBytesAlreadyCached.first; - } else { - bytesLeft = getRequestLength(dataSpec, cache, key); - } - - long position = dataSpec.absoluteStreamPosition; - boolean lengthUnset = bytesLeft == C.LENGTH_UNSET; - while (bytesLeft != 0) { - throwExceptionIfInterruptedOrCancelled(isCanceled); - long blockLength = - cache.getCachedLength(key, position, lengthUnset ? Long.MAX_VALUE : bytesLeft); - if (blockLength > 0) { - // Skip already cached data. - } else { - // There is a hole in the cache which is at least "-blockLength" long. - blockLength = -blockLength; - long length = blockLength == Long.MAX_VALUE ? C.LENGTH_UNSET : blockLength; - boolean isLastBlock = length == bytesLeft; - long read = - readAndDiscard( - dataSpec, - position, - length, - dataSource, - buffer, - priorityTaskManager, - priority, - progressNotifier, - isLastBlock, - isCanceled); - if (read < blockLength) { - // Reached to the end of the data. - if (enableEOFException && !lengthUnset) { - throw new EOFException(); - } - break; - } - } - position += blockLength; - if (!lengthUnset) { - bytesLeft -= blockLength; - } - } - } - - private static long getRequestLength(DataSpec dataSpec, Cache cache, String key) { - if (dataSpec.length != C.LENGTH_UNSET) { - return dataSpec.length; - } else { - long contentLength = ContentMetadata.getContentLength(cache.getContentMetadata(key)); - return contentLength == C.LENGTH_UNSET - ? C.LENGTH_UNSET - : contentLength - dataSpec.absoluteStreamPosition; - } - } - - /** - * Reads and discards all data specified by the {@code dataSpec}. - * - * @param dataSpec Defines the data to be read. {@code absoluteStreamPosition} and {@code length} - * fields are overwritten by the following parameters. - * @param absoluteStreamPosition The absolute position of the data to be read. - * @param length Length of the data to be read, or {@link C#LENGTH_UNSET} if it is unknown. - * @param dataSource The {@link DataSource} to read the data from. - * @param buffer The buffer to be used while downloading. - * @param priorityTaskManager If not null it's used to check whether it is allowed to proceed with - * caching. - * @param priority The priority of this task. - * @param progressNotifier A notifier through which to report progress updates, or {@code null}. - * @param isLastBlock Whether this read block is the last block of the content. - * @param isCanceled An optional flag that will interrupt caching if set to true. - * @return Number of read bytes, or 0 if no data is available because the end of the opened range - * has been reached. - */ - private static long readAndDiscard( - DataSpec dataSpec, - long absoluteStreamPosition, - long length, - DataSource dataSource, - byte[] buffer, - @Nullable PriorityTaskManager priorityTaskManager, - int priority, - @Nullable ProgressNotifier progressNotifier, - boolean isLastBlock, - @Nullable AtomicBoolean isCanceled) - throws IOException, InterruptedException { - long positionOffset = absoluteStreamPosition - dataSpec.absoluteStreamPosition; - long initialPositionOffset = positionOffset; - long endOffset = length != C.LENGTH_UNSET ? positionOffset + length : C.POSITION_UNSET; - while (true) { - if (priorityTaskManager != null) { - // Wait for any other thread with higher priority to finish its job. - priorityTaskManager.proceed(priority); - } - throwExceptionIfInterruptedOrCancelled(isCanceled); - try { - long resolvedLength = C.LENGTH_UNSET; - boolean isDataSourceOpen = false; - if (endOffset != C.POSITION_UNSET) { - // If a specific length is given, first try to open the data source for that length to - // avoid more data then required to be requested. If the given length exceeds the end of - // input we will get a "position out of range" error. In that case try to open the source - // again with unset length. - try { - resolvedLength = - dataSource.open(dataSpec.subrange(positionOffset, endOffset - positionOffset)); - isDataSourceOpen = true; - } catch (IOException exception) { - if (!isLastBlock || !isCausedByPositionOutOfRange(exception)) { - throw exception; - } - Util.closeQuietly(dataSource); - } - } - if (!isDataSourceOpen) { - resolvedLength = dataSource.open(dataSpec.subrange(positionOffset, C.LENGTH_UNSET)); - } - if (isLastBlock && progressNotifier != null && resolvedLength != C.LENGTH_UNSET) { - progressNotifier.onRequestLengthResolved(positionOffset + resolvedLength); - } - while (positionOffset != endOffset) { - throwExceptionIfInterruptedOrCancelled(isCanceled); - int bytesRead = - dataSource.read( - buffer, - 0, - endOffset != C.POSITION_UNSET - ? (int) Math.min(buffer.length, endOffset - positionOffset) - : buffer.length); - if (bytesRead == C.RESULT_END_OF_INPUT) { - if (progressNotifier != null) { - progressNotifier.onRequestLengthResolved(positionOffset); - } - break; - } - positionOffset += bytesRead; - if (progressNotifier != null) { - progressNotifier.onBytesCached(bytesRead); - } - } - return positionOffset - initialPositionOffset; - } catch (PriorityTaskManager.PriorityTooLowException exception) { - // catch and try again - } finally { - Util.closeQuietly(dataSource); - } - } - } - - /** - * Removes all of the data specified by the {@code dataSpec}. - * - *

      This methods blocks until the operation is complete. - * - * @param dataSpec Defines the data to be removed. - * @param cache A {@link Cache} to store the data. - * @param cacheKeyFactory An optional factory for cache keys. - */ - @WorkerThread - public static void remove( - DataSpec dataSpec, Cache cache, @Nullable CacheKeyFactory cacheKeyFactory) { - remove(cache, buildCacheKey(dataSpec, cacheKeyFactory)); - } - - /** - * Removes all of the data specified by the {@code key}. - * - *

      This methods blocks until the operation is complete. - * - * @param cache A {@link Cache} to store the data. - * @param key The key whose data should be removed. - */ - @WorkerThread - public static void remove(Cache cache, String key) { - NavigableSet cachedSpans = cache.getCachedSpans(key); - for (CacheSpan cachedSpan : cachedSpans) { - try { - cache.removeSpan(cachedSpan); - } catch (Cache.CacheException e) { - // Do nothing. - } - } - } - - /* package */ static boolean isCausedByPositionOutOfRange(IOException e) { - Throwable cause = e; - while (cause != null) { - if (cause instanceof DataSourceException) { - int reason = ((DataSourceException) cause).reason; - if (reason == DataSourceException.POSITION_OUT_OF_RANGE) { - return true; - } - } - cause = cause.getCause(); - } - return false; - } - - private static String buildCacheKey( - DataSpec dataSpec, @Nullable CacheKeyFactory cacheKeyFactory) { - return (cacheKeyFactory != null ? cacheKeyFactory : DEFAULT_CACHE_KEY_FACTORY) - .buildCacheKey(dataSpec); - } - - private static void throwExceptionIfInterruptedOrCancelled(@Nullable AtomicBoolean isCanceled) - throws InterruptedException { - if (Thread.interrupted() || (isCanceled != null && isCanceled.get())) { - throw new InterruptedException(); - } - } - - private CacheUtil() {} - - private static final class ProgressNotifier { - /** The listener to notify when progress is made. */ - private final ProgressListener listener; - /** The length of the content being cached in bytes, or {@link C#LENGTH_UNSET} if unknown. */ - private long requestLength; - /** The number of bytes that are cached. */ - private long bytesCached; - - public ProgressNotifier(ProgressListener listener) { - this.listener = listener; - } - - public void init(long requestLength, long bytesCached) { - this.requestLength = requestLength; - this.bytesCached = bytesCached; - listener.onProgress(requestLength, bytesCached, /* newBytesCached= */ 0); - } - - public void onRequestLengthResolved(long requestLength) { - if (this.requestLength == C.LENGTH_UNSET && requestLength != C.LENGTH_UNSET) { - this.requestLength = requestLength; - listener.onProgress(requestLength, bytesCached, /* newBytesCached= */ 0); - } - } - - public void onBytesCached(long newBytesCached) { - bytesCached += newBytesCached; - listener.onProgress(requestLength, bytesCached, newBytesCached); - } - } -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CacheWriter.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CacheWriter.java new file mode 100644 index 0000000000..5fa6638981 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CacheWriter.java @@ -0,0 +1,234 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.upstream.cache; + +import androidx.annotation.Nullable; +import androidx.annotation.WorkerThread; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.upstream.DataSourceUtil; +import com.google.android.exoplayer2.upstream.DataSpec; +import com.google.android.exoplayer2.util.PriorityTaskManager; +import com.google.android.exoplayer2.util.PriorityTaskManager.PriorityTooLowException; +import java.io.IOException; +import java.io.InterruptedIOException; + +/** Caching related utility methods. */ +public final class CacheWriter { + + /** Receives progress updates during cache operations. */ + public interface ProgressListener { + + /** + * Called when progress is made during a cache operation. + * + * @param requestLength The length of the content being cached in bytes, or {@link + * C#LENGTH_UNSET} if unknown. + * @param bytesCached The number of bytes that are cached. + * @param newBytesCached The number of bytes that have been newly cached since the last progress + * update. + */ + void onProgress(long requestLength, long bytesCached, long newBytesCached); + } + + /** Default buffer size to be used while caching. */ + public static final int DEFAULT_BUFFER_SIZE_BYTES = 128 * 1024; + + private final CacheDataSource dataSource; + private final Cache cache; + private final DataSpec dataSpec; + private final String cacheKey; + private final byte[] temporaryBuffer; + @Nullable private final ProgressListener progressListener; + + private long nextPosition; + private long endPosition; + private long bytesCached; + + private volatile boolean isCanceled; + + /** + * @param dataSource A {@link CacheDataSource} that writes to the target cache. + * @param dataSpec Defines the data to be written. + * @param temporaryBuffer A temporary buffer to be used during caching, or {@code null} if the + * writer should instantiate its own internal temporary buffer. + * @param progressListener An optional progress listener. + */ + public CacheWriter( + CacheDataSource dataSource, + DataSpec dataSpec, + @Nullable byte[] temporaryBuffer, + @Nullable ProgressListener progressListener) { + this.dataSource = dataSource; + this.cache = dataSource.getCache(); + this.dataSpec = dataSpec; + this.temporaryBuffer = + temporaryBuffer == null ? new byte[DEFAULT_BUFFER_SIZE_BYTES] : temporaryBuffer; + this.progressListener = progressListener; + cacheKey = dataSource.getCacheKeyFactory().buildCacheKey(dataSpec); + nextPosition = dataSpec.position; + } + + /** + * Cancels this writer's caching operation. {@link #cache} checks for cancelation frequently + * during execution, and throws an {@link InterruptedIOException} if it sees that the caching + * operation has been canceled. + */ + public void cancel() { + isCanceled = true; + } + + /** + * Caches the requested data, skipping any that's already cached. + * + *

      If the {@link CacheDataSource} used by the writer has a {@link PriorityTaskManager}, then + * it's the responsibility of the caller to call {@link PriorityTaskManager#add} to register with + * the manager before calling this method, and to call {@link PriorityTaskManager#remove} + * afterwards to unregister. {@link PriorityTooLowException} will be thrown if the priority + * required by the {@link CacheDataSource} is not high enough for progress to be made. + * + *

      This method may be slow and shouldn't normally be called on the main thread. + * + * @throws IOException If an error occurs reading the data, or writing the data into the cache, or + * if the operation is canceled. If canceled, an {@link InterruptedIOException} is thrown. The + * method may be called again to continue the operation from where the error occurred. + */ + @WorkerThread + public void cache() throws IOException { + throwIfCanceled(); + + bytesCached = cache.getCachedBytes(cacheKey, dataSpec.position, dataSpec.length); + if (dataSpec.length != C.LENGTH_UNSET) { + endPosition = dataSpec.position + dataSpec.length; + } else { + long contentLength = ContentMetadata.getContentLength(cache.getContentMetadata(cacheKey)); + endPosition = contentLength == C.LENGTH_UNSET ? C.POSITION_UNSET : contentLength; + } + if (progressListener != null) { + progressListener.onProgress(getLength(), bytesCached, /* newBytesCached= */ 0); + } + + while (endPosition == C.POSITION_UNSET || nextPosition < endPosition) { + throwIfCanceled(); + long maxRemainingLength = + endPosition == C.POSITION_UNSET ? Long.MAX_VALUE : endPosition - nextPosition; + long blockLength = cache.getCachedLength(cacheKey, nextPosition, maxRemainingLength); + if (blockLength > 0) { + nextPosition += blockLength; + } else { + // There's a hole of length -blockLength. + blockLength = -blockLength; + long nextRequestLength = blockLength == Long.MAX_VALUE ? C.LENGTH_UNSET : blockLength; + nextPosition += readBlockToCache(nextPosition, nextRequestLength); + } + } + } + + /** + * Reads the specified block of data, writing it into the cache. + * + * @param position The starting position of the block. + * @param length The length of the block, or {@link C#LENGTH_UNSET} if unbounded. + * @return The number of bytes read. + * @throws IOException If an error occurs reading the data or writing it to the cache. + */ + private long readBlockToCache(long position, long length) throws IOException { + boolean isLastBlock = position + length == endPosition || length == C.LENGTH_UNSET; + + long resolvedLength = C.LENGTH_UNSET; + boolean isDataSourceOpen = false; + if (length != C.LENGTH_UNSET) { + // If the length is specified, try to open the data source with a bounded request to avoid + // the underlying network stack requesting more data than required. + DataSpec boundedDataSpec = + dataSpec.buildUpon().setPosition(position).setLength(length).build(); + try { + resolvedLength = dataSource.open(boundedDataSpec); + isDataSourceOpen = true; + } catch (IOException e) { + DataSourceUtil.closeQuietly(dataSource); + } + } + + if (!isDataSourceOpen) { + // Either the length was unspecified, or we allow short content and our attempt to open the + // DataSource with the specified length failed. + throwIfCanceled(); + DataSpec unboundedDataSpec = + dataSpec.buildUpon().setPosition(position).setLength(C.LENGTH_UNSET).build(); + try { + resolvedLength = dataSource.open(unboundedDataSpec); + } catch (IOException e) { + DataSourceUtil.closeQuietly(dataSource); + throw e; + } + } + + int totalBytesRead = 0; + try { + if (isLastBlock && resolvedLength != C.LENGTH_UNSET) { + onRequestEndPosition(position + resolvedLength); + } + int bytesRead = 0; + while (bytesRead != C.RESULT_END_OF_INPUT) { + throwIfCanceled(); + bytesRead = dataSource.read(temporaryBuffer, /* offset= */ 0, temporaryBuffer.length); + if (bytesRead != C.RESULT_END_OF_INPUT) { + onNewBytesCached(bytesRead); + totalBytesRead += bytesRead; + } + } + if (isLastBlock) { + onRequestEndPosition(position + totalBytesRead); + } + } catch (IOException e) { + DataSourceUtil.closeQuietly(dataSource); + throw e; + } + + // Util.closeQuietly(dataSource) is not used here because it's important that an exception is + // thrown if DataSource.close fails. This is because there's no way of knowing whether the block + // was successfully cached in this case. + dataSource.close(); + return totalBytesRead; + } + + private void onRequestEndPosition(long endPosition) { + if (this.endPosition == endPosition) { + return; + } + this.endPosition = endPosition; + if (progressListener != null) { + progressListener.onProgress(getLength(), bytesCached, /* newBytesCached= */ 0); + } + } + + private void onNewBytesCached(long newBytesCached) { + bytesCached += newBytesCached; + if (progressListener != null) { + progressListener.onProgress(getLength(), bytesCached, newBytesCached); + } + } + + private long getLength() { + return endPosition == C.POSITION_UNSET ? C.LENGTH_UNSET : endPosition - dataSpec.position; + } + + private void throwIfCanceled() throws InterruptedIOException { + if (isCanceled) { + throw new InterruptedIOException(); + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CachedContent.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CachedContent.java index b6a55c8da4..4c3f58f2c6 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CachedContent.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CachedContent.java @@ -16,34 +16,40 @@ package com.google.android.exoplayer2.upstream.cache; import static com.google.android.exoplayer2.util.Assertions.checkArgument; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; import static com.google.android.exoplayer2.util.Assertions.checkState; +import static java.lang.Math.max; +import static java.lang.Math.min; import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.util.Log; import java.io.File; +import java.util.ArrayList; import java.util.TreeSet; -/** Defines the cached content for a single stream. */ +/** Defines the cached content for a single resource. */ /* package */ final class CachedContent { private static final String TAG = "CachedContent"; - /** The cache file id that uniquely identifies the original stream. */ + /** The cache id that uniquely identifies the resource. */ public final int id; - /** The cache key that uniquely identifies the original stream. */ + /** The cache key that uniquely identifies the resource. */ public final String key; /** The cached spans of this content. */ private final TreeSet cachedSpans; + /** Currently locked ranges. */ + private final ArrayList lockedRanges; + /** Metadata values. */ private DefaultContentMetadata metadata; - /** Whether the content is locked. */ - private boolean locked; /** * Creates a CachedContent. * - * @param id The cache file id. - * @param key The cache stream key. + * @param id The cache id of the resource. + * @param key The cache key of the resource. */ public CachedContent(int id, String key) { this(id, key, DefaultContentMetadata.EMPTY); @@ -53,7 +59,8 @@ public CachedContent(int id, String key, DefaultContentMetadata metadata) { this.id = id; this.key = key; this.metadata = metadata; - this.cachedSpans = new TreeSet<>(); + cachedSpans = new TreeSet<>(); + lockedRanges = new ArrayList<>(); } /** Returns the metadata. */ @@ -72,14 +79,58 @@ public boolean applyMetadataMutations(ContentMetadataMutations mutations) { return !metadata.equals(oldMetadata); } - /** Returns whether the content is locked. */ - public boolean isLocked() { - return locked; + /** Returns whether the entire resource is fully unlocked. */ + public boolean isFullyUnlocked() { + return lockedRanges.isEmpty(); } - /** Sets the locked state of the content. */ - public void setLocked(boolean locked) { - this.locked = locked; + /** + * Returns whether the specified range of the resource is fully locked by a single lock. + * + * @param position The position of the range. + * @param length The length of the range, or {@link C#LENGTH_UNSET} if unbounded. + * @return Whether the range is fully locked by a single lock. + */ + public boolean isFullyLocked(long position, long length) { + for (int i = 0; i < lockedRanges.size(); i++) { + if (lockedRanges.get(i).contains(position, length)) { + return true; + } + } + return false; + } + + /** + * Attempts to lock the specified range of the resource. + * + * @param position The position of the range. + * @param length The length of the range, or {@link C#LENGTH_UNSET} if unbounded. + * @return Whether the range was successfully locked. + */ + public boolean lockRange(long position, long length) { + for (int i = 0; i < lockedRanges.size(); i++) { + if (lockedRanges.get(i).intersects(position, length)) { + return false; + } + } + lockedRanges.add(new Range(position, length)); + return true; + } + + /** + * Unlocks the currently locked range starting at the specified position. + * + * @param position The starting position of the locked range. + * @throws IllegalStateException If there was no locked range starting at the specified position. + */ + public void unlockRange(long position) { + for (int i = 0; i < lockedRanges.size(); i++) { + if (lockedRanges.get(i).position == position) { + lockedRanges.remove(i); + return; + } + } + throw new IllegalStateException(); } /** Adds the given {@link SimpleCacheSpan} which contains a part of the content. */ @@ -93,36 +144,45 @@ public TreeSet getSpans() { } /** - * Returns the span containing the position. If there isn't one, it returns a hole span - * which defines the maximum extents of the hole in the cache. + * Returns the cache span corresponding to the provided range. See {@link + * Cache#startReadWrite(String, long, long)} for detailed descriptions of the returned spans. + * + * @param position The position of the span being requested. + * @param length The length of the span, or {@link C#LENGTH_UNSET} if unbounded. + * @return The corresponding cache {@link SimpleCacheSpan}. */ - public SimpleCacheSpan getSpan(long position) { + public SimpleCacheSpan getSpan(long position, long length) { SimpleCacheSpan lookupSpan = SimpleCacheSpan.createLookup(key, position); SimpleCacheSpan floorSpan = cachedSpans.floor(lookupSpan); if (floorSpan != null && floorSpan.position + floorSpan.length > position) { return floorSpan; } SimpleCacheSpan ceilSpan = cachedSpans.ceiling(lookupSpan); - return ceilSpan == null ? SimpleCacheSpan.createOpenHole(key, position) - : SimpleCacheSpan.createClosedHole(key, position, ceilSpan.position - position); + if (ceilSpan != null) { + long holeLength = ceilSpan.position - position; + length = length == C.LENGTH_UNSET ? holeLength : min(holeLength, length); + } + return SimpleCacheSpan.createHole(key, position, length); } /** - * Returns the length of the cached data block starting from the {@code position} to the block end - * up to {@code length} bytes. If the {@code position} isn't cached then -(the length of the gap - * to the next cached data up to {@code length} bytes) is returned. + * Returns the length of continuously cached data starting from {@code position}, up to a maximum + * of {@code maxLength}. If {@code position} isn't cached, then {@code -holeLength} is returned, + * where {@code holeLength} is the length of continuously un-cached data starting from {@code + * position}, up to a maximum of {@code maxLength}. * * @param position The starting position of the data. - * @param length The maximum length of the data to be returned. - * @return the length of the cached or not cached data block length. + * @param length The maximum length of the data or hole to be returned. + * @return The length of continuously cached data, or {@code -holeLength} if {@code position} + * isn't cached. */ public long getCachedBytesLength(long position, long length) { checkArgument(position >= 0); checkArgument(length >= 0); - SimpleCacheSpan span = getSpan(position); + SimpleCacheSpan span = getSpan(position, length); if (span.isHoleSpan()) { // We don't have a span covering the start of the queried region. - return -Math.min(span.isOpenEnded() ? Long.MAX_VALUE : span.length, length); + return -min(span.isOpenEnded() ? Long.MAX_VALUE : span.length, length); } long queryEndPosition = position + length; if (queryEndPosition < 0) { @@ -138,14 +198,14 @@ public long getCachedBytesLength(long position, long length) { } // We expect currentEndPosition to always equal (next.position + next.length), but // perform a max check anyway to guard against the existence of overlapping spans. - currentEndPosition = Math.max(currentEndPosition, next.position + next.length); + currentEndPosition = max(currentEndPosition, next.position + next.length); if (currentEndPosition >= queryEndPosition) { // We've found spans covering the queried region. break; } } } - return Math.min(currentEndPosition - position, length); + return min(currentEndPosition - position, length); } /** @@ -160,9 +220,9 @@ public long getCachedBytesLength(long position, long length) { public SimpleCacheSpan setLastTouchTimestamp( SimpleCacheSpan cacheSpan, long lastTouchTimestamp, boolean updateFile) { checkState(cachedSpans.remove(cacheSpan)); - File file = cacheSpan.file; + File file = checkNotNull(cacheSpan.file); if (updateFile) { - File directory = file.getParentFile(); + File directory = checkNotNull(file.getParentFile()); long position = cacheSpan.position; File newFile = SimpleCacheSpan.getCacheFile(directory, id, position, lastTouchTimestamp); if (file.renameTo(newFile)) { @@ -185,7 +245,9 @@ public boolean isEmpty() { /** Removes the given span from cache. */ public boolean removeSpan(CacheSpan span) { if (cachedSpans.remove(span)) { - span.file.delete(); + if (span.file != null) { + span.file.delete(); + } return true; } return false; @@ -213,4 +275,51 @@ public boolean equals(@Nullable Object o) { && cachedSpans.equals(that.cachedSpans) && metadata.equals(that.metadata); } + + private static final class Range { + + /** The starting position of the range. */ + public final long position; + /** The length of the range, or {@link C#LENGTH_UNSET} if unbounded. */ + public final long length; + + public Range(long position, long length) { + this.position = position; + this.length = length; + } + + /** + * Returns whether this range fully contains the range specified by {@code otherPosition} and + * {@code otherLength}. + * + * @param otherPosition The position of the range to check. + * @param otherLength The length of the range to check, or {@link C#LENGTH_UNSET} if unbounded. + * @return Whether this range fully contains the specified range. + */ + public boolean contains(long otherPosition, long otherLength) { + if (length == C.LENGTH_UNSET) { + return otherPosition >= position; + } else if (otherLength == C.LENGTH_UNSET) { + return false; + } else { + return position <= otherPosition && (otherPosition + otherLength) <= (position + length); + } + } + + /** + * Returns whether this range intersects with the range specified by {@code otherPosition} and + * {@code otherLength}. + * + * @param otherPosition The position of the range to check. + * @param otherLength The length of the range to check, or {@link C#LENGTH_UNSET} if unbounded. + * @return Whether this range intersects with the specified range. + */ + public boolean intersects(long otherPosition, long otherLength) { + if (position <= otherPosition) { + return length == C.LENGTH_UNSET || position + length > otherPosition; + } else { + return otherLength == C.LENGTH_UNSET || otherPosition + otherLength > position; + } + } + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CachedContentIndex.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CachedContentIndex.java index 7e09025ddd..e8ba423a0a 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CachedContentIndex.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/CachedContentIndex.java @@ -15,6 +15,11 @@ */ package com.google.android.exoplayer2.upstream.cache; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Assertions.checkState; +import static com.google.android.exoplayer2.util.Util.castNonNull; +import static java.lang.Math.min; + import android.annotation.SuppressLint; import android.content.ContentValues; import android.database.Cursor; @@ -31,8 +36,8 @@ import com.google.android.exoplayer2.database.VersionTable; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.AtomicFile; -import com.google.android.exoplayer2.util.ReusableBufferedOutputStream; import com.google.android.exoplayer2.util.Util; +import com.google.common.collect.ImmutableSet; import java.io.BufferedInputStream; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -45,11 +50,12 @@ import java.security.InvalidAlgorithmParameterException; import java.security.InvalidKeyException; import java.security.NoSuchAlgorithmException; +import java.security.SecureRandom; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.HashMap; import java.util.Map; -import java.util.Random; import java.util.Set; import javax.crypto.Cipher; import javax.crypto.CipherInputStream; @@ -58,6 +64,7 @@ import javax.crypto.spec.IvParameterSpec; import javax.crypto.spec.SecretKeySpec; import org.checkerframework.checker.nullness.compatqual.NullableType; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; /** Maintains the index of cached content. */ /* package */ class CachedContentIndex { @@ -74,7 +81,7 @@ * *

      [1] (key1, id1) is removed from the in-memory index ... the index is not stored to disk ... * [2] id1 is reused for a different key2 ... the index is not stored to disk ... [3] A file for - * key2 is partially written using a path corresponding to id1 ... the process is killed before + * key2 is partially written using a path corresponding to id1 ... the process is shut down before * the index is stored to disk ... [4] The index is read from disk, causing the partially written * file to be incorrectly associated to key1 * @@ -152,13 +159,15 @@ public CachedContentIndex( @Nullable byte[] legacyStorageSecretKey, boolean legacyStorageEncrypt, boolean preferLegacyStorage) { - Assertions.checkState(databaseProvider != null || legacyStorageDir != null); + checkState(databaseProvider != null || legacyStorageDir != null); keyToContent = new HashMap<>(); idToKey = new SparseArray<>(); removedIds = new SparseBooleanArray(); newIds = new SparseBooleanArray(); + @Nullable Storage databaseStorage = databaseProvider != null ? new DatabaseStorage(databaseProvider) : null; + @Nullable Storage legacyStorage = legacyStorageDir != null ? new LegacyStorage( @@ -167,7 +176,7 @@ public CachedContentIndex( legacyStorageEncrypt) : null; if (databaseStorage == null || (legacyStorage != null && preferLegacyStorage)) { - storage = legacyStorage; + storage = castNonNull(legacyStorage); previousStorage = databaseStorage; } else { storage = databaseStorage; @@ -223,30 +232,35 @@ public void store() throws IOException { } /** - * Adds the given key to the index if it isn't there already. + * Adds a resource to the index, if it's not there already. * - * @param key The cache key that uniquely identifies the original stream. - * @return A new or existing CachedContent instance with the given key. + * @param key The cache key of the resource. + * @return The new or existing {@link CachedContent} corresponding to the resource. */ public CachedContent getOrAdd(String key) { - CachedContent cachedContent = keyToContent.get(key); + @Nullable CachedContent cachedContent = keyToContent.get(key); return cachedContent == null ? addNew(key) : cachedContent; } - /** Returns a CachedContent instance with the given key or null if there isn't one. */ + /** + * Returns the {@link CachedContent} for a resource, or {@code null} if the resource is not + * present in the index. + * + * @param key The cache key of the resource. + */ + @Nullable public CachedContent get(String key) { return keyToContent.get(key); } /** - * Returns a Collection of all CachedContent instances in the index. The collection is backed by - * the {@code keyToContent} map, so changes to the map are reflected in the collection, and - * vice-versa. If the map is modified while an iteration over the collection is in progress - * (except through the iterator's own remove operation), the results of the iteration are - * undefined. + * Returns a read only collection of all {@link CachedContent CachedContents} in the index. + * + *

      Subsequent changes to the index are reflected in the returned collection. If the index is + * modified whilst iterating over the collection, the result of the iteration is undefined. */ public Collection getAll() { - return keyToContent.values(); + return Collections.unmodifiableCollection(keyToContent.values()); } /** Returns an existing or new id assigned to the given key. */ @@ -254,15 +268,20 @@ public int assignIdForKey(String key) { return getOrAdd(key).id; } - /** Returns the key which has the given id assigned. */ + /** Returns the key which has the given id assigned, or {@code null} if no such key exists. */ + @Nullable public String getKeyForId(int id) { return idToKey.get(id); } - /** Removes {@link CachedContent} with the given key from index if it's empty and not locked. */ + /** + * Removes a resource if its {@link CachedContent} is both empty and unlocked. + * + * @param key The cache key of the resource. + */ public void maybeRemove(String key) { - CachedContent cachedContent = keyToContent.get(key); - if (cachedContent != null && cachedContent.isEmpty() && !cachedContent.isLocked()) { + @Nullable CachedContent cachedContent = keyToContent.get(key); + if (cachedContent != null && cachedContent.isEmpty() && cachedContent.isFullyUnlocked()) { keyToContent.remove(key); int id = cachedContent.id; boolean neverStored = newIds.get(id); @@ -280,11 +299,10 @@ public void maybeRemove(String key) { } } - /** Removes empty and not locked {@link CachedContent} instances from index. */ + /** Removes all resources whose {@link CachedContent CachedContents} are empty and unlocked. */ public void removeEmpty() { - String[] keys = new String[keyToContent.size()]; - keyToContent.keySet().toArray(keys); - for (String key : keys) { + // Create a copy of the keys as the underlying map is modified by maybeRemove(key). + for (String key : ImmutableSet.copyOf(keyToContent.keySet())) { maybeRemove(key); } } @@ -312,7 +330,7 @@ public void applyContentMetadataMutations(String key, ContentMetadataMutations m /** Returns a {@link ContentMetadata} for the given key. */ public ContentMetadata getContentMetadata(String key) { - CachedContent cachedContent = get(key); + @Nullable CachedContent cachedContent = get(key); return cachedContent != null ? cachedContent.getMetadata() : DefaultContentMetadata.EMPTY; } @@ -345,7 +363,7 @@ private static Cipher getCipher() throws NoSuchPaddingException, NoSuchAlgorithm * returns the smallest unused non-negative integer. */ @VisibleForTesting - /* package */ static int getNewId(SparseArray idToKey) { + /* package */ static int getNewId(SparseArray<@NullableType String> idToKey) { int size = idToKey.size(); int id = size == 0 ? 0 : (idToKey.keyAt(size - 1) + 1); if (id < 0) { // In case if we pass max int value. @@ -380,13 +398,13 @@ private static DefaultContentMetadata readContentMetadata(DataInputStream input) // large) valueSize was read. In such cases the implementation below is expected to throw // IOException from one of the readFully calls, due to the end of the input being reached. int bytesRead = 0; - int nextBytesToRead = Math.min(valueSize, INCREMENTAL_METADATA_READ_LENGTH); + int nextBytesToRead = min(valueSize, INCREMENTAL_METADATA_READ_LENGTH); byte[] value = Util.EMPTY_BYTE_ARRAY; while (bytesRead != valueSize) { value = Arrays.copyOf(value, bytesRead + nextBytesToRead); input.readFully(value, bytesRead, nextBytesToRead); bytesRead += nextBytesToRead; - nextBytesToRead = Math.min(valueSize - bytesRead, INCREMENTAL_METADATA_READ_LENGTH); + nextBytesToRead = min(valueSize - bytesRead, INCREMENTAL_METADATA_READ_LENGTH); } metadata.put(name, value); } @@ -492,15 +510,16 @@ private static class LegacyStorage implements Storage { private final boolean encrypt; @Nullable private final Cipher cipher; @Nullable private final SecretKeySpec secretKeySpec; - @Nullable private final Random random; + @Nullable private final SecureRandom random; private final AtomicFile atomicFile; private boolean changed; @Nullable private ReusableBufferedOutputStream bufferedOutputStream; public LegacyStorage(File file, @Nullable byte[] secretKey, boolean encrypt) { - Cipher cipher = null; - SecretKeySpec secretKeySpec = null; + checkState(secretKey != null || !encrypt); + @Nullable Cipher cipher = null; + @Nullable SecretKeySpec secretKeySpec = null; if (secretKey != null) { Assertions.checkArgument(secretKey.length == 16); try { @@ -515,7 +534,7 @@ public LegacyStorage(File file, @Nullable byte[] secretKey, boolean encrypt) { this.encrypt = encrypt; this.cipher = cipher; this.secretKeySpec = secretKeySpec; - random = encrypt ? new Random() : null; + random = encrypt ? new SecureRandom() : null; atomicFile = new AtomicFile(file); } @@ -537,7 +556,7 @@ public void delete() { @Override public void load( HashMap content, SparseArray<@NullableType String> idToKey) { - Assertions.checkState(!changed); + checkState(!changed); if (!readFile(content, idToKey)) { content.clear(); idToKey.clear(); @@ -575,7 +594,7 @@ private boolean readFile( return true; } - DataInputStream input = null; + @Nullable DataInputStream input = null; try { InputStream inputStream = new BufferedInputStream(atomicFile.openRead()); input = new DataInputStream(inputStream); @@ -593,7 +612,7 @@ private boolean readFile( input.readFully(initializationVector); IvParameterSpec ivParameterSpec = new IvParameterSpec(initializationVector); try { - cipher.init(Cipher.DECRYPT_MODE, secretKeySpec, ivParameterSpec); + cipher.init(Cipher.DECRYPT_MODE, castNonNull(secretKeySpec), ivParameterSpec); } catch (InvalidKeyException | InvalidAlgorithmParameterException e) { throw new IllegalStateException(e); } @@ -626,7 +645,7 @@ private boolean readFile( } private void writeFile(HashMap content) throws IOException { - DataOutputStream output = null; + @Nullable DataOutputStream output = null; try { OutputStream outputStream = atomicFile.startWrite(); if (bufferedOutputStream == null) { @@ -634,6 +653,7 @@ private void writeFile(HashMap content) throws IOExceptio } else { bufferedOutputStream.reset(outputStream); } + ReusableBufferedOutputStream bufferedOutputStream = this.bufferedOutputStream; output = new DataOutputStream(bufferedOutputStream); output.writeInt(VERSION); @@ -642,11 +662,12 @@ private void writeFile(HashMap content) throws IOExceptio if (encrypt) { byte[] initializationVector = new byte[16]; - random.nextBytes(initializationVector); + castNonNull(random).nextBytes(initializationVector); output.write(initializationVector); IvParameterSpec ivParameterSpec = new IvParameterSpec(initializationVector); try { - cipher.init(Cipher.ENCRYPT_MODE, secretKeySpec, ivParameterSpec); + castNonNull(cipher) + .init(Cipher.ENCRYPT_MODE, castNonNull(secretKeySpec), ivParameterSpec); } catch (InvalidKeyException | InvalidAlgorithmParameterException e) { throw new IllegalStateException(e); // Should never happen. } @@ -749,16 +770,17 @@ private static final class DatabaseStorage implements Storage { + " BLOB NOT NULL)"; private final DatabaseProvider databaseProvider; - private final SparseArray pendingUpdates; + private final SparseArray<@NullableType CachedContent> pendingUpdates; - private String hexUid; - private String tableName; + private @MonotonicNonNull String hexUid; + private @MonotonicNonNull String tableName; public static void delete(DatabaseProvider databaseProvider, long uid) throws DatabaseIOException { delete(databaseProvider, Long.toHexString(uid)); } + @SuppressWarnings("nullness:initialization.fields.uninitialized") public DatabaseStorage(DatabaseProvider databaseProvider) { this.databaseProvider = databaseProvider; pendingUpdates = new SparseArray<>(); @@ -775,26 +797,26 @@ public boolean exists() throws DatabaseIOException { return VersionTable.getVersion( databaseProvider.getReadableDatabase(), VersionTable.FEATURE_CACHE_CONTENT_METADATA, - hexUid) + checkNotNull(hexUid)) != VersionTable.VERSION_UNSET; } @Override public void delete() throws DatabaseIOException { - delete(databaseProvider, hexUid); + delete(databaseProvider, checkNotNull(hexUid)); } @Override public void load( HashMap content, SparseArray<@NullableType String> idToKey) throws IOException { - Assertions.checkState(pendingUpdates.size() == 0); + checkState(pendingUpdates.size() == 0); try { int version = VersionTable.getVersion( databaseProvider.getReadableDatabase(), VersionTable.FEATURE_CACHE_CONTENT_METADATA, - hexUid); + checkNotNull(hexUid)); if (version != TABLE_VERSION) { SQLiteDatabase writableDatabase = databaseProvider.getWritableDatabase(); writableDatabase.beginTransactionNonExclusive(); @@ -809,7 +831,7 @@ public void load( try (Cursor cursor = getCursor()) { while (cursor.moveToNext()) { int id = cursor.getInt(COLUMN_INDEX_ID); - String key = cursor.getString(COLUMN_INDEX_KEY); + String key = checkNotNull(cursor.getString(COLUMN_INDEX_KEY)); byte[] metadataBytes = cursor.getBlob(COLUMN_INDEX_METADATA); ByteArrayInputStream inputStream = new ByteArrayInputStream(metadataBytes); @@ -858,7 +880,7 @@ public void storeIncremental(HashMap content) throws IOEx writableDatabase.beginTransactionNonExclusive(); try { for (int i = 0; i < pendingUpdates.size(); i++) { - CachedContent cachedContent = pendingUpdates.valueAt(i); + @Nullable CachedContent cachedContent = pendingUpdates.valueAt(i); if (cachedContent == null) { deleteRow(writableDatabase, pendingUpdates.keyAt(i)); } else { @@ -893,7 +915,7 @@ private Cursor getCursor() { return databaseProvider .getReadableDatabase() .query( - tableName, + checkNotNull(tableName), COLUMNS, /* selection= */ null, /* selectionArgs= */ null, @@ -904,13 +926,17 @@ private Cursor getCursor() { private void initializeTable(SQLiteDatabase writableDatabase) throws DatabaseIOException { VersionTable.setVersion( - writableDatabase, VersionTable.FEATURE_CACHE_CONTENT_METADATA, hexUid, TABLE_VERSION); - dropTable(writableDatabase, tableName); + writableDatabase, + VersionTable.FEATURE_CACHE_CONTENT_METADATA, + checkNotNull(hexUid), + TABLE_VERSION); + dropTable(writableDatabase, checkNotNull(tableName)); writableDatabase.execSQL("CREATE TABLE " + tableName + " " + TABLE_SCHEMA); } private void deleteRow(SQLiteDatabase writableDatabase, int key) { - writableDatabase.delete(tableName, WHERE_ID_EQUALS, new String[] {Integer.toString(key)}); + writableDatabase.delete( + checkNotNull(tableName), WHERE_ID_EQUALS, new String[] {Integer.toString(key)}); } private void addOrUpdateRow(SQLiteDatabase writableDatabase, CachedContent cachedContent) @@ -923,7 +949,7 @@ private void addOrUpdateRow(SQLiteDatabase writableDatabase, CachedContent cache values.put(COLUMN_ID, cachedContent.id); values.put(COLUMN_KEY, cachedContent.key); values.put(COLUMN_METADATA, data); - writableDatabase.replaceOrThrow(tableName, /* nullColumnHack= */ null, values); + writableDatabase.replaceOrThrow(checkNotNull(tableName), /* nullColumnHack= */ null, values); } private static void delete(DatabaseProvider databaseProvider, String hexUid) diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/ContentMetadata.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/ContentMetadata.java index 4cc6e6b860..3a68458866 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/ContentMetadata.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/ContentMetadata.java @@ -19,9 +19,7 @@ import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; -/** - * Interface for an immutable snapshot of keyed metadata. - */ +/** Interface for an immutable snapshot of keyed metadata. */ public interface ContentMetadata { /** @@ -81,7 +79,7 @@ static long getContentLength(ContentMetadata contentMetadata) { */ @Nullable static Uri getRedirectedUri(ContentMetadata contentMetadata) { - String redirectedUri = contentMetadata.get(KEY_REDIRECTED_URI, (String) null); + @Nullable String redirectedUri = contentMetadata.get(KEY_REDIRECTED_URI, (String) null); return redirectedUri == null ? null : Uri.parse(redirectedUri); } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/ContentMetadataMutations.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/ContentMetadataMutations.java index 5715b8fbd4..579988a1a6 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/ContentMetadataMutations.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/ContentMetadataMutations.java @@ -19,6 +19,7 @@ import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.util.Assertions; +import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -73,36 +74,37 @@ public ContentMetadataMutations() { } /** - * Adds a mutation to set a metadata value. Passing {@code null} as {@code name} or {@code value} - * isn't allowed. + * Adds a mutation to set a metadata value. * * @param name The name of the metadata value. * @param value The value to be set. * @return This instance, for convenience. */ + @CanIgnoreReturnValue public ContentMetadataMutations set(String name, String value) { return checkAndSet(name, value); } /** - * Adds a mutation to set a metadata value. Passing {@code null} as {@code name} isn't allowed. + * Adds a mutation to set a metadata value. * * @param name The name of the metadata value. * @param value The value to be set. * @return This instance, for convenience. */ + @CanIgnoreReturnValue public ContentMetadataMutations set(String name, long value) { return checkAndSet(name, value); } /** - * Adds a mutation to set a metadata value. Passing {@code null} as {@code name} or {@code value} - * isn't allowed. + * Adds a mutation to set a metadata value. * * @param name The name of the metadata value. * @param value The value to be set. * @return This instance, for convenience. */ + @CanIgnoreReturnValue public ContentMetadataMutations set(String name, byte[] value) { return checkAndSet(name, Arrays.copyOf(value, value.length)); } @@ -113,6 +115,7 @@ public ContentMetadataMutations set(String name, byte[] value) { * @param name The name of the metadata value. * @return This instance, for convenience. */ + @CanIgnoreReturnValue public ContentMetadataMutations remove(String name) { removedValues.add(name); editedValues.remove(name); @@ -124,7 +127,7 @@ public List getRemovedValues() { return Collections.unmodifiableList(new ArrayList<>(removedValues)); } - /** Returns a map of metadata name, value pairs to be set. Values are copied. */ + /** Returns a map of metadata name, value pairs to be set. Values are copied. */ public Map getEditedValues() { HashMap hashMap = new HashMap<>(editedValues); for (Entry entry : hashMap.entrySet()) { @@ -137,6 +140,7 @@ public Map getEditedValues() { return Collections.unmodifiableMap(hashMap); } + @CanIgnoreReturnValue private ContentMetadataMutations checkAndSet(String name, Object value) { editedValues.put(Assertions.checkNotNull(name), Assertions.checkNotNull(value)); removedValues.remove(name); diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/DefaultContentMetadata.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/DefaultContentMetadata.java index 1f07af938a..4518add73f 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/DefaultContentMetadata.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/DefaultContentMetadata.java @@ -16,9 +16,8 @@ package com.google.android.exoplayer2.upstream.cache; import androidx.annotation.Nullable; -import com.google.android.exoplayer2.C; +import com.google.common.base.Charsets; import java.nio.ByteBuffer; -import java.nio.charset.Charset; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; @@ -42,7 +41,9 @@ public DefaultContentMetadata() { this(Collections.emptyMap()); } - /** @param metadata The metadata entries in their raw byte array form. */ + /** + * @param metadata The metadata entries in their raw byte array form. + */ public DefaultContentMetadata(Map metadata) { this.metadata = Collections.unmodifiableMap(metadata); } @@ -66,9 +67,9 @@ public Set> entrySet() { @Override @Nullable - public final byte[] get(String name, @Nullable byte[] defaultValue) { - if (metadata.containsKey(name)) { - byte[] bytes = metadata.get(name); + public final byte[] get(String key, @Nullable byte[] defaultValue) { + @Nullable byte[] bytes = metadata.get(key); + if (bytes != null) { return Arrays.copyOf(bytes, bytes.length); } else { return defaultValue; @@ -77,19 +78,19 @@ public final byte[] get(String name, @Nullable byte[] defaultValue) { @Override @Nullable - public final String get(String name, @Nullable String defaultValue) { - if (metadata.containsKey(name)) { - byte[] bytes = metadata.get(name); - return new String(bytes, Charset.forName(C.UTF8_NAME)); + public final String get(String key, @Nullable String defaultValue) { + @Nullable byte[] bytes = metadata.get(key); + if (bytes != null) { + return new String(bytes, Charsets.UTF_8); } else { return defaultValue; } } @Override - public final long get(String name, long defaultValue) { - if (metadata.containsKey(name)) { - byte[] bytes = metadata.get(name); + public final long get(String key, long defaultValue) { + @Nullable byte[] bytes = metadata.get(key); + if (bytes != null) { return ByteBuffer.wrap(bytes).getLong(); } else { return defaultValue; @@ -97,8 +98,8 @@ public final long get(String name, long defaultValue) { } @Override - public final boolean contains(String name) { - return metadata.containsKey(name); + public final boolean contains(String key) { + return metadata.containsKey(key); } @Override @@ -130,7 +131,7 @@ private static boolean isMetadataEqual(Map first, Map entry : first.entrySet()) { byte[] value = entry.getValue(); - byte[] otherValue = second.get(entry.getKey()); + @Nullable byte[] otherValue = second.get(entry.getKey()); if (!Arrays.equals(value, otherValue)) { return false; } @@ -153,8 +154,8 @@ private static void removeValues(HashMap metadata, List } private static void addValues(HashMap metadata, Map values) { - for (String name : values.keySet()) { - metadata.put(name, getBytes(values.get(name))); + for (Entry entry : values.entrySet()) { + metadata.put(entry.getKey(), getBytes(entry.getValue())); } } @@ -162,12 +163,11 @@ private static byte[] getBytes(Object value) { if (value instanceof Long) { return ByteBuffer.allocate(8).putLong((Long) value).array(); } else if (value instanceof String) { - return ((String) value).getBytes(Charset.forName(C.UTF8_NAME)); + return ((String) value).getBytes(Charsets.UTF_8); } else if (value instanceof byte[]) { return (byte[]) value; } else { throw new IllegalArgumentException(); } } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/LeastRecentlyUsedCacheEvictor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/LeastRecentlyUsedCacheEvictor.java index c88e2643d8..fb461813ae 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/LeastRecentlyUsedCacheEvictor.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/LeastRecentlyUsedCacheEvictor.java @@ -16,7 +16,6 @@ package com.google.android.exoplayer2.upstream.cache; import com.google.android.exoplayer2.C; -import com.google.android.exoplayer2.upstream.cache.Cache.CacheException; import java.util.TreeSet; /** Evicts least recently used cache files first. */ @@ -70,11 +69,7 @@ public void onSpanTouched(Cache cache, CacheSpan oldSpan, CacheSpan newSpan) { private void evictCache(Cache cache, long requiredSpace) { while (currentSize + requiredSpace > maxBytes && !leastRecentlyUsed.isEmpty()) { - try { - cache.removeSpan(leastRecentlyUsed.first()); - } catch (CacheException e) { - // do nothing. - } + cache.removeSpan(leastRecentlyUsed.first()); } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/NoOpCacheEvictor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/NoOpCacheEvictor.java index da89dc1cb3..a4113de0df 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/NoOpCacheEvictor.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/NoOpCacheEvictor.java @@ -15,12 +15,11 @@ */ package com.google.android.exoplayer2.upstream.cache; - /** * Evictor that doesn't ever evict cache files. * - * Warning: Using this evictor might have unforeseeable consequences if cache - * size is not managed elsewhere. + *

      Warning: Using this evictor might have unforeseeable consequences if cache size is not managed + * elsewhere. */ public final class NoOpCacheEvictor implements CacheEvictor { @@ -35,7 +34,7 @@ public void onCacheInitialized() { } @Override - public void onStartFile(Cache cache, String key, long position, long maxLength) { + public void onStartFile(Cache cache, String key, long position, long length) { // Do nothing. } @@ -53,5 +52,4 @@ public void onSpanRemoved(Cache cache, CacheSpan span) { public void onSpanTouched(Cache cache, CacheSpan oldSpan, CacheSpan newSpan) { // Do nothing. } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/ReusableBufferedOutputStream.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/ReusableBufferedOutputStream.java similarity index 88% rename from TMessagesProj/src/main/java/com/google/android/exoplayer2/util/ReusableBufferedOutputStream.java rename to TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/ReusableBufferedOutputStream.java index 1db3d2c1f4..b952fdc1b9 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/ReusableBufferedOutputStream.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/ReusableBufferedOutputStream.java @@ -13,8 +13,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.google.android.exoplayer2.util; +package com.google.android.exoplayer2.upstream.cache; +import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.Util; import java.io.BufferedOutputStream; import java.io.IOException; import java.io.OutputStream; @@ -23,7 +25,7 @@ * This is a subclass of {@link BufferedOutputStream} with a {@link #reset(OutputStream)} method * that allows an instance to be re-used with another underlying output stream. */ -public final class ReusableBufferedOutputStream extends BufferedOutputStream { +/* package */ final class ReusableBufferedOutputStream extends BufferedOutputStream { private boolean closed; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/SimpleCache.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/SimpleCache.java index a4fade25e0..0f1da88115 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/SimpleCache.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/SimpleCache.java @@ -16,7 +16,6 @@ package com.google.android.exoplayer2.upstream.cache; import android.os.ConditionVariable; -import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.annotation.WorkerThread; import com.google.android.exoplayer2.C; @@ -135,6 +134,7 @@ public static void delete(File cacheDir, @Nullable DatabaseProvider databaseProv * @deprecated Use a constructor that takes a {@link DatabaseProvider} for improved performance. */ @Deprecated + @SuppressWarnings("deprecation") public SimpleCache(File cacheDir, CacheEvictor evictor) { this(cacheDir, evictor, null, false); } @@ -260,7 +260,7 @@ public SimpleCache( // Start cache initialization. final ConditionVariable conditionVariable = new ConditionVariable(); - new Thread("SimpleCache.initialize()") { + new Thread("ExoPlayer:SimpleCacheInit") { @Override public void run() { synchronized (SimpleCache.this) { @@ -309,6 +309,8 @@ public synchronized void release() { @Override public synchronized NavigableSet addListener(String key, Listener listener) { Assertions.checkState(!released); + Assertions.checkNotNull(key); + Assertions.checkNotNull(listener); ArrayList listenersForKey = listeners.get(key); if (listenersForKey == null) { listenersForKey = new ArrayList<>(); @@ -332,7 +334,6 @@ public synchronized void removeListener(String key, Listener listener) { } } - @NonNull @Override public synchronized NavigableSet getCachedSpans(String key) { Assertions.checkState(!released); @@ -355,13 +356,13 @@ public synchronized long getCacheSpace() { } @Override - public synchronized CacheSpan startReadWrite(String key, long position) + public synchronized CacheSpan startReadWrite(String key, long position, long length) throws InterruptedException, CacheException { Assertions.checkState(!released); checkInitialization(); while (true) { - CacheSpan span = startReadWriteNonBlocking(key, position); + CacheSpan span = startReadWriteNonBlocking(key, position, length); if (span != null) { return span; } else { @@ -377,12 +378,12 @@ public synchronized CacheSpan startReadWrite(String key, long position) @Override @Nullable - public synchronized CacheSpan startReadWriteNonBlocking(String key, long position) + public synchronized CacheSpan startReadWriteNonBlocking(String key, long position, long length) throws CacheException { Assertions.checkState(!released); checkInitialization(); - SimpleCacheSpan span = getSpan(key, position); + SimpleCacheSpan span = getSpan(key, position, length); if (span.isCached) { // Read case. @@ -390,9 +391,8 @@ public synchronized CacheSpan startReadWriteNonBlocking(String key, long positio } CachedContent cachedContent = contentIndex.getOrAdd(key); - if (!cachedContent.isLocked()) { + if (cachedContent.lockRange(position, span.length)) { // Write case. - cachedContent.setLocked(true); return span; } @@ -407,20 +407,22 @@ public synchronized File startFile(String key, long position, long length) throw CachedContent cachedContent = contentIndex.get(key); Assertions.checkNotNull(cachedContent); - Assertions.checkState(cachedContent.isLocked()); + Assertions.checkState(cachedContent.isFullyLocked(position, length)); if (!cacheDir.exists()) { - // For some reason the cache directory doesn't exist. Make a best effort to create it. - cacheDir.mkdirs(); + // The cache directory has been deleted from underneath us. Recreate it, and remove in-memory + // spans corresponding to cache files that no longer exist. + createCacheDirectories(cacheDir); removeStaleSpans(); } evictor.onStartFile(this, key, position, length); // Randomly distribute files into subdirectories with a uniform distribution. - File fileDir = new File(cacheDir, Integer.toString(random.nextInt(SUBDIRECTORY_COUNT))); - if (!fileDir.exists()) { - fileDir.mkdir(); + File cacheSubDir = new File(cacheDir, Integer.toString(random.nextInt(SUBDIRECTORY_COUNT))); + if (!cacheSubDir.exists()) { + createCacheDirectories(cacheSubDir); } long lastTouchTimestamp = System.currentTimeMillis(); - return SimpleCacheSpan.getCacheFile(fileDir, cachedContent.id, position, lastTouchTimestamp); + return SimpleCacheSpan.getCacheFile( + cacheSubDir, cachedContent.id, position, lastTouchTimestamp); } @Override @@ -437,7 +439,7 @@ public synchronized void commitFile(File file, long length) throws CacheExceptio SimpleCacheSpan span = Assertions.checkNotNull(SimpleCacheSpan.createCacheEntry(file, length, contentIndex)); CachedContent cachedContent = Assertions.checkNotNull(contentIndex.get(span.key)); - Assertions.checkState(cachedContent.isLocked()); + Assertions.checkState(cachedContent.isFullyLocked(span.position, span.length)); // Check if the span conflicts with the set content length long contentLength = ContentMetadata.getContentLength(cachedContent.getMetadata()); @@ -465,14 +467,20 @@ public synchronized void commitFile(File file, long length) throws CacheExceptio @Override public synchronized void releaseHoleSpan(CacheSpan holeSpan) { Assertions.checkState(!released); - CachedContent cachedContent = contentIndex.get(holeSpan.key); - Assertions.checkNotNull(cachedContent); - Assertions.checkState(cachedContent.isLocked()); - cachedContent.setLocked(false); + CachedContent cachedContent = Assertions.checkNotNull(contentIndex.get(holeSpan.key)); + cachedContent.unlockRange(holeSpan.position); contentIndex.maybeRemove(cachedContent.key); notifyAll(); } + @Override + public synchronized void removeResource(String key) { + Assertions.checkState(!released); + for (CacheSpan span : getCachedSpans(key)) { + removeSpanInternal(span); + } + } + @Override public synchronized void removeSpan(CacheSpan span) { Assertions.checkState(!released); @@ -482,17 +490,43 @@ public synchronized void removeSpan(CacheSpan span) { @Override public synchronized boolean isCached(String key, long position, long length) { Assertions.checkState(!released); - CachedContent cachedContent = contentIndex.get(key); + @Nullable CachedContent cachedContent = contentIndex.get(key); return cachedContent != null && cachedContent.getCachedBytesLength(position, length) >= length; } @Override public synchronized long getCachedLength(String key, long position, long length) { Assertions.checkState(!released); - CachedContent cachedContent = contentIndex.get(key); + if (length == C.LENGTH_UNSET) { + length = Long.MAX_VALUE; + } + @Nullable CachedContent cachedContent = contentIndex.get(key); return cachedContent != null ? cachedContent.getCachedBytesLength(position, length) : -length; } + @Override + public synchronized long getCachedBytes(String key, long position, long length) { + long endPosition = length == C.LENGTH_UNSET ? Long.MAX_VALUE : position + length; + if (endPosition < 0) { + // The calculation rolled over (length is probably Long.MAX_VALUE). + endPosition = Long.MAX_VALUE; + } + long currentPosition = position; + long cachedBytes = 0; + while (currentPosition < endPosition) { + long maxRemainingLength = endPosition - currentPosition; + long blockLength = getCachedLength(key, currentPosition, maxRemainingLength); + if (blockLength > 0) { + cachedBytes += blockLength; + } else { + // There's a hole of length -blockLength. + blockLength = -blockLength; + } + currentPosition += blockLength; + } + return cachedBytes; + } + @Override public synchronized void applyContentMetadataMutations( String key, ContentMetadataMutations mutations) throws CacheException { @@ -516,15 +550,15 @@ public synchronized ContentMetadata getContentMetadata(String key) { /** Ensures that the cache's in-memory representation has been initialized. */ private void initialize() { if (!cacheDir.exists()) { - if (!cacheDir.mkdirs()) { - String message = "Failed to create cache directory: " + cacheDir; - Log.e(TAG, message); - initializationException = new CacheException(message); + try { + createCacheDirectories(cacheDir); + } catch (CacheException e) { + initializationException = e; return; } } - File[] files = cacheDir.listFiles(); + @Nullable File[] files = cacheDir.listFiles(); if (files == null) { String message = "Failed to list cache directory files: " + cacheDir; Log.e(TAG, message); @@ -605,11 +639,13 @@ private void loadDirectory( } long length = C.LENGTH_UNSET; long lastTouchTimestamp = C.TIME_UNSET; + @Nullable CacheFileMetadata metadata = fileMetadata != null ? fileMetadata.remove(fileName) : null; if (metadata != null) { length = metadata.length; lastTouchTimestamp = metadata.lastTouchTimestamp; } + @Nullable SimpleCacheSpan span = SimpleCacheSpan.createCacheEntry(file, length, lastTouchTimestamp, contentIndex); if (span != null) { @@ -655,23 +691,21 @@ private SimpleCacheSpan touchSpan(String key, SimpleCacheSpan span) { } /** - * Returns the cache span corresponding to the provided lookup span. - * - *

      If the lookup position is contained by an existing entry in the cache, then the returned - * span defines the file in which the data is stored. If the lookup position is not contained by - * an existing entry, then the returned span defines the maximum extents of the hole in the cache. + * Returns the cache span corresponding to the provided key and range. See {@link + * Cache#startReadWrite(String, long, long)} for detailed descriptions of the returned spans. * * @param key The key of the span being requested. * @param position The position of the span being requested. + * @param length The length of the span, or {@link C#LENGTH_UNSET} if unbounded. * @return The corresponding cache {@link SimpleCacheSpan}. */ - private SimpleCacheSpan getSpan(String key, long position) { - CachedContent cachedContent = contentIndex.get(key); + private SimpleCacheSpan getSpan(String key, long position, long length) { + @Nullable CachedContent cachedContent = contentIndex.get(key); if (cachedContent == null) { - return SimpleCacheSpan.createOpenHole(key, position); + return SimpleCacheSpan.createHole(key, position, length); } while (true) { - SimpleCacheSpan span = cachedContent.getSpan(position); + SimpleCacheSpan span = cachedContent.getSpan(position, length); if (span.isCached && span.file.length() != span.length) { // The file has been modified or deleted underneath us. It's likely that other files will // have been modified too, so scan the whole in-memory representation. @@ -694,7 +728,7 @@ private void addSpan(SimpleCacheSpan span) { } private void removeSpanInternal(CacheSpan span) { - CachedContent cachedContent = contentIndex.get(span.key); + @Nullable CachedContent cachedContent = contentIndex.get(span.key); if (cachedContent == null || !cachedContent.removeSpan(span)) { return; } @@ -732,7 +766,7 @@ private void removeStaleSpans() { } private void notifySpanRemoved(CacheSpan span) { - ArrayList keyListeners = listeners.get(span.key); + @Nullable ArrayList keyListeners = listeners.get(span.key); if (keyListeners != null) { for (int i = keyListeners.size() - 1; i >= 0; i--) { keyListeners.get(i).onSpanRemoved(this, span); @@ -742,7 +776,7 @@ private void notifySpanRemoved(CacheSpan span) { } private void notifySpanAdded(SimpleCacheSpan span) { - ArrayList keyListeners = listeners.get(span.key); + @Nullable ArrayList keyListeners = listeners.get(span.key); if (keyListeners != null) { for (int i = keyListeners.size() - 1; i >= 0; i--) { keyListeners.get(i).onSpanAdded(this, span); @@ -752,7 +786,7 @@ private void notifySpanAdded(SimpleCacheSpan span) { } private void notifySpanTouched(SimpleCacheSpan oldSpan, CacheSpan newSpan) { - ArrayList keyListeners = listeners.get(oldSpan.key); + @Nullable ArrayList keyListeners = listeners.get(oldSpan.key); if (keyListeners != null) { for (int i = keyListeners.size() - 1; i >= 0; i--) { keyListeners.get(i).onSpanTouched(this, oldSpan, newSpan); @@ -802,6 +836,15 @@ private static long parseUid(String fileName) { return Long.parseLong(fileName.substring(0, fileName.indexOf('.')), /* radix= */ 16); } + private static void createCacheDirectories(File cacheDir) throws CacheException { + // If mkdirs() returns false, double check that the directory doesn't exist before throwing. + if (!cacheDir.mkdirs() && !cacheDir.isDirectory()) { + String message = "Failed to create cache directory: " + cacheDir; + Log.e(TAG, message); + throw new CacheException(message); + } + } + private static synchronized boolean lockFolder(File cacheDir) { return lockedCacheDirs.add(cacheDir.getAbsoluteFile()); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/SimpleCacheSpan.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/SimpleCacheSpan.java index 5f6ea338e6..c9a70761b5 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/SimpleCacheSpan.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/SimpleCacheSpan.java @@ -23,18 +23,18 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -/** This class stores span metadata in filename. */ +/** A {@link CacheSpan} that encodes metadata into the names of the underlying cache files. */ /* package */ final class SimpleCacheSpan extends CacheSpan { /* package */ static final String COMMON_SUFFIX = ".exo"; private static final String SUFFIX = ".v3" + COMMON_SUFFIX; - private static final Pattern CACHE_FILE_PATTERN_V1 = Pattern.compile( - "^(.+)\\.(\\d+)\\.(\\d+)\\.v1\\.exo$", Pattern.DOTALL); - private static final Pattern CACHE_FILE_PATTERN_V2 = Pattern.compile( - "^(.+)\\.(\\d+)\\.(\\d+)\\.v2\\.exo$", Pattern.DOTALL); - private static final Pattern CACHE_FILE_PATTERN_V3 = Pattern.compile( - "^(\\d+)\\.(\\d+)\\.(\\d+)\\.v3\\.exo$", Pattern.DOTALL); + private static final Pattern CACHE_FILE_PATTERN_V1 = + Pattern.compile("^(.+)\\.(\\d+)\\.(\\d+)\\.v1\\.exo$", Pattern.DOTALL); + private static final Pattern CACHE_FILE_PATTERN_V2 = + Pattern.compile("^(.+)\\.(\\d+)\\.(\\d+)\\.v2\\.exo$", Pattern.DOTALL); + private static final Pattern CACHE_FILE_PATTERN_V3 = + Pattern.compile("^(\\d+)\\.(\\d+)\\.(\\d+)\\.v3\\.exo$", Pattern.DOTALL); /** * Returns a new {@link File} instance from {@code cacheDir}, {@code id}, {@code position}, {@code @@ -42,7 +42,7 @@ * * @param cacheDir The parent abstract pathname. * @param id The cache file id. - * @param position The position of the stored data in the original stream. + * @param position The position of the stored data in the resource. * @param timestamp The file timestamp. * @return The cache file. */ @@ -53,8 +53,8 @@ public static File getCacheFile(File cacheDir, int id, long position, long times /** * Creates a lookup span. * - * @param key The cache key. - * @param position The position of the {@link CacheSpan} in the original stream. + * @param key The cache key of the resource. + * @param position The position of the span in the resource. * @return The span. */ public static SimpleCacheSpan createLookup(String key, long position) { @@ -62,25 +62,14 @@ public static SimpleCacheSpan createLookup(String key, long position) { } /** - * Creates an open hole span. + * Creates a hole span. * - * @param key The cache key. - * @param position The position of the {@link CacheSpan} in the original stream. - * @return The span. - */ - public static SimpleCacheSpan createOpenHole(String key, long position) { - return new SimpleCacheSpan(key, position, C.LENGTH_UNSET, C.TIME_UNSET, null); - } - - /** - * Creates a closed hole span. - * - * @param key The cache key. - * @param position The position of the {@link CacheSpan} in the original stream. - * @param length The length of the {@link CacheSpan}. - * @return The span. + * @param key The cache key of the resource. + * @param position The position of the span in the resource. + * @param length The length of the span, or {@link C#LENGTH_UNSET} if unbounded. + * @return The hole span. */ - public static SimpleCacheSpan createClosedHole(String key, long position, long length) { + public static SimpleCacheSpan createHole(String key, long position, long length) { return new SimpleCacheSpan(key, position, length, C.TIME_UNSET, null); } @@ -91,6 +80,7 @@ public static SimpleCacheSpan createClosedHole(String key, long position, long l * @param length The length of the cache file in bytes, or {@link C#LENGTH_UNSET} to query the * underlying file system. Querying the underlying file system can be expensive, so callers * that already know the length of the file should pass it explicitly. + * @param index The cached content index. * @return The span, or null if the file name is not correctly formatted, or if the id is not * present in the content index, or if the length is 0. */ @@ -108,6 +98,7 @@ public static SimpleCacheSpan createCacheEntry(File file, long length, CachedCon * that already know the length of the file should pass it explicitly. * @param lastTouchTimestamp The last touch timestamp, or {@link C#TIME_UNSET} to use the file * timestamp. + * @param index The cached content index. * @return The span, or null if the file name is not correctly formatted, or if the id is not * present in the content index, or if the length is 0. */ @@ -129,8 +120,8 @@ public static SimpleCacheSpan createCacheEntry( return null; } - int id = Integer.parseInt(matcher.group(1)); - String key = index.getKeyForId(id); + int id = Integer.parseInt(Assertions.checkNotNull(matcher.group(1))); + @Nullable String key = index.getKeyForId(id); if (key == null) { return null; } @@ -142,9 +133,9 @@ public static SimpleCacheSpan createCacheEntry( return null; } - long position = Long.parseLong(matcher.group(2)); + long position = Long.parseLong(Assertions.checkNotNull(matcher.group(2))); if (lastTouchTimestamp == C.TIME_UNSET) { - lastTouchTimestamp = Long.parseLong(matcher.group(3)); + lastTouchTimestamp = Long.parseLong(Assertions.checkNotNull(matcher.group(3))); } return new SimpleCacheSpan(key, position, length, lastTouchTimestamp, file); } @@ -153,34 +144,34 @@ public static SimpleCacheSpan createCacheEntry( * Upgrades the cache file if it is created by an earlier version of {@link SimpleCache}. * * @param file The cache file. - * @param index Cached content index. + * @param index The cached content index. * @return Upgraded cache file or {@code null} if the file name is not correctly formatted or the * file can not be renamed. */ @Nullable private static File upgradeFile(File file, CachedContentIndex index) { - String key; + @Nullable String key = null; String filename = file.getName(); Matcher matcher = CACHE_FILE_PATTERN_V2.matcher(filename); if (matcher.matches()) { - key = Util.unescapeFileName(matcher.group(1)); - if (key == null) { - return null; - } + key = Util.unescapeFileName(Assertions.checkNotNull(matcher.group(1))); } else { matcher = CACHE_FILE_PATTERN_V1.matcher(filename); - if (!matcher.matches()) { - return null; + if (matcher.matches()) { + key = Assertions.checkNotNull(matcher.group(1)); // Keys were not escaped in version 1. } - key = matcher.group(1); // Keys were not escaped in version 1. + } + + if (key == null) { + return null; } File newCacheFile = getCacheFile( Assertions.checkStateNotNull(file.getParentFile()), index.assignIdForKey(key), - Long.parseLong(matcher.group(2)), - Long.parseLong(matcher.group(3))); + Long.parseLong(Assertions.checkNotNull(matcher.group(2))), + Long.parseLong(Assertions.checkNotNull(matcher.group(3)))); if (!file.renameTo(newCacheFile)) { return null; } @@ -188,13 +179,12 @@ private static File upgradeFile(File file, CachedContentIndex index) { } /** - * @param key The cache key. - * @param position The position of the {@link CacheSpan} in the original stream. - * @param length The length of the {@link CacheSpan}, or {@link C#LENGTH_UNSET} if this is an - * open-ended hole. + * @param key The cache key of the resource. + * @param position The position of the span in the resource. + * @param length The length of the span, or {@link C#LENGTH_UNSET} if this is an open-ended hole. * @param lastTouchTimestamp The last touch timestamp, or {@link C#TIME_UNSET} if {@link * #isCached} is false. - * @param file The file corresponding to this {@link CacheSpan}, or null if it's a hole. + * @param file The file corresponding to this span, or null if it's a hole. */ private SimpleCacheSpan( String key, long position, long length, long lastTouchTimestamp, @Nullable File file) { @@ -213,5 +203,4 @@ public SimpleCacheSpan copyWithFileAndLastTouchTimestamp(File file, long lastTou Assertions.checkState(isCached); return new SimpleCacheSpan(key, position, length, lastTouchTimestamp, file); } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/package-info.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/package-info.java new file mode 100644 index 0000000000..bb6cf77458 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/cache/package-info.java @@ -0,0 +1,19 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +@NonNullApi +package com.google.android.exoplayer2.upstream.cache; + +import com.google.android.exoplayer2.util.NonNullApi; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/crypto/AesCipherDataSink.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/crypto/AesCipherDataSink.java index d9b3ff0069..4e5b9f2b8e 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/crypto/AesCipherDataSink.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/crypto/AesCipherDataSink.java @@ -16,6 +16,7 @@ package com.google.android.exoplayer2.upstream.crypto; import static com.google.android.exoplayer2.util.Util.castNonNull; +import static java.lang.Math.min; import androidx.annotation.Nullable; import com.google.android.exoplayer2.upstream.DataSink; @@ -23,9 +24,7 @@ import java.io.IOException; import javax.crypto.Cipher; -/** - * A wrapping {@link DataSink} that encrypts the data being consumed. - */ +/** A wrapping {@link DataSink} that encrypts the data being consumed. */ public final class AesCipherDataSink implements DataSink { private final DataSink wrappedDataSink; @@ -67,24 +66,27 @@ public AesCipherDataSink(byte[] secretKey, DataSink wrappedDataSink, @Nullable b @Override public void open(DataSpec dataSpec) throws IOException { wrappedDataSink.open(dataSpec); - long nonce = CryptoUtil.getFNV64Hash(dataSpec.key); - cipher = new AesFlushingCipher(Cipher.ENCRYPT_MODE, secretKey, nonce, - dataSpec.absoluteStreamPosition); + cipher = + new AesFlushingCipher( + Cipher.ENCRYPT_MODE, + secretKey, + dataSpec.key, + dataSpec.uriPositionOffset + dataSpec.position); } @Override - public void write(byte[] data, int offset, int length) throws IOException { + public void write(byte[] buffer, int offset, int length) throws IOException { if (scratch == null) { // In-place mode. Writes over the input data. - castNonNull(cipher).updateInPlace(data, offset, length); - wrappedDataSink.write(data, offset, length); + castNonNull(cipher).updateInPlace(buffer, offset, length); + wrappedDataSink.write(buffer, offset, length); } else { // Use scratch space. The original data remains intact. int bytesProcessed = 0; while (bytesProcessed < length) { - int bytesToProcess = Math.min(length - bytesProcessed, scratch.length); + int bytesToProcess = min(length - bytesProcessed, scratch.length); castNonNull(cipher) - .update(data, offset + bytesProcessed, bytesToProcess, scratch, /* outOffset= */ 0); + .update(buffer, offset + bytesProcessed, bytesToProcess, scratch, /* outOffset= */ 0); wrappedDataSink.write(scratch, /* offset= */ 0, bytesToProcess); bytesProcessed += bytesToProcess; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/crypto/AesCipherDataSource.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/crypto/AesCipherDataSource.java index 0910c63c19..98ec914fa0 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/crypto/AesCipherDataSource.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/crypto/AesCipherDataSource.java @@ -15,6 +15,7 @@ */ package com.google.android.exoplayer2.upstream.crypto; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; import static com.google.android.exoplayer2.util.Util.castNonNull; import android.net.Uri; @@ -28,9 +29,7 @@ import java.util.Map; import javax.crypto.Cipher; -/** - * A {@link DataSource} that decrypts the data read from an upstream source. - */ +/** A {@link DataSource} that decrypts the data read from an upstream source. */ public final class AesCipherDataSource implements DataSource { private final DataSource upstream; @@ -45,28 +44,32 @@ public AesCipherDataSource(byte[] secretKey, DataSource upstream) { @Override public void addTransferListener(TransferListener transferListener) { + checkNotNull(transferListener); upstream.addTransferListener(transferListener); } @Override public long open(DataSpec dataSpec) throws IOException { long dataLength = upstream.open(dataSpec); - long nonce = CryptoUtil.getFNV64Hash(dataSpec.key); - cipher = new AesFlushingCipher(Cipher.DECRYPT_MODE, secretKey, nonce, - dataSpec.absoluteStreamPosition); + cipher = + new AesFlushingCipher( + Cipher.DECRYPT_MODE, + secretKey, + dataSpec.key, + dataSpec.uriPositionOffset + dataSpec.position); return dataLength; } @Override - public int read(byte[] data, int offset, int readLength) throws IOException { - if (readLength == 0) { + public int read(byte[] buffer, int offset, int length) throws IOException { + if (length == 0) { return 0; } - int read = upstream.read(data, offset, readLength); + int read = upstream.read(buffer, offset, length); if (read == C.RESULT_END_OF_INPUT) { return C.RESULT_END_OF_INPUT; } - castNonNull(cipher).updateInPlace(data, offset, read); + castNonNull(cipher).updateInPlace(buffer, offset, read); return read; } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/crypto/AesFlushingCipher.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/crypto/AesFlushingCipher.java index 1721b1d8b7..96cb13604e 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/crypto/AesFlushingCipher.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/crypto/AesFlushingCipher.java @@ -15,6 +15,7 @@ */ package com.google.android.exoplayer2.upstream.crypto; +import androidx.annotation.Nullable; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Util; import java.nio.ByteBuffer; @@ -30,8 +31,8 @@ /** * A flushing variant of a AES/CTR/NoPadding {@link Cipher}. * - * Unlike a regular {@link Cipher}, the update methods of this class are guaranteed to process all - * of the bytes input (and hence output the same number of bytes). + *

      Unlike a regular {@link Cipher}, the update methods of this class are guaranteed to process + * all of the bytes input (and hence output the same number of bytes). */ public final class AesFlushingCipher { @@ -42,6 +43,10 @@ public final class AesFlushingCipher { private int pendingXorBytes; + public AesFlushingCipher(int mode, byte[] secretKey, @Nullable String nonce, long offset) { + this(mode, secretKey, getFNV64Hash(nonce), offset); + } + public AesFlushingCipher(int mode, byte[] secretKey, long nonce, long offset) { try { cipher = Cipher.getInstance("AES/CTR/NoPadding"); @@ -57,7 +62,9 @@ public AesFlushingCipher(int mode, byte[] secretKey, long nonce, long offset) { if (startPadding != 0) { updateInPlace(new byte[startPadding], 0, startPadding); } - } catch (NoSuchAlgorithmException | NoSuchPaddingException | InvalidKeyException + } catch (NoSuchAlgorithmException + | NoSuchPaddingException + | InvalidKeyException | InvalidAlgorithmParameterException e) { // Should never happen. throw new RuntimeException(e); @@ -120,4 +127,22 @@ private byte[] getInitializationVector(long nonce, long counter) { return ByteBuffer.allocate(16).putLong(nonce).putLong(counter).array(); } + /** + * Returns the hash value of the input as a long using the 64 bit FNV-1a hash function. The hash + * values produced by this function are less likely to collide than those produced by {@link + * #hashCode()}. + */ + private static long getFNV64Hash(@Nullable String input) { + if (input == null) { + return 0; + } + + long hash = 0; + for (int i = 0; i < input.length(); i++) { + hash ^= input.charAt(i); + // This is equivalent to hash *= 0x100000001b3 (the FNV magic prime number). + hash += (hash << 1) + (hash << 4) + (hash << 5) + (hash << 7) + (hash << 8) + (hash << 40); + } + return hash; + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/crypto/CryptoUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/crypto/CryptoUtil.java deleted file mode 100644 index 3418f46ed0..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/crypto/CryptoUtil.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright (C) 2016 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.upstream.crypto; - -import androidx.annotation.Nullable; - -/** - * Utility functions for the crypto package. - */ -/* package */ final class CryptoUtil { - - private CryptoUtil() {} - - /** - * Returns the hash value of the input as a long using the 64 bit FNV-1a hash function. The hash - * values produced by this function are less likely to collide than those produced by {@link - * #hashCode()}. - */ - public static long getFNV64Hash(@Nullable String input) { - if (input == null) { - return 0; - } - - long hash = 0; - for (int i = 0; i < input.length(); i++) { - hash ^= input.charAt(i); - // This is equivalent to hash *= 0x100000001b3 (the FNV magic prime number). - hash += (hash << 1) + (hash << 4) + (hash << 5) + (hash << 7) + (hash << 8) + (hash << 40); - } - return hash; - } - -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/crypto/package-info.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/crypto/package-info.java new file mode 100644 index 0000000000..9c4005e815 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/crypto/package-info.java @@ -0,0 +1,19 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +@NonNullApi +package com.google.android.exoplayer2.upstream.crypto; + +import com.google.android.exoplayer2.util.NonNullApi; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/package-info.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/package-info.java new file mode 100644 index 0000000000..1fb49d4b96 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/upstream/package-info.java @@ -0,0 +1,19 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +@NonNullApi +package com.google.android.exoplayer2.upstream; + +import com.google.android.exoplayer2.util.NonNullApi; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/Assertions.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/Assertions.java index 0f3bbfa14d..64496358cf 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/Assertions.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/Assertions.java @@ -20,10 +20,9 @@ import androidx.annotation.Nullable; import com.google.android.exoplayer2.ExoPlayerLibraryInfo; import org.checkerframework.checker.nullness.qual.EnsuresNonNull; +import org.checkerframework.dataflow.qual.Pure; -/** - * Provides methods for asserting the truth of expressions and properties. - */ +/** Provides methods for asserting the truth of expressions and properties. */ public final class Assertions { private Assertions() {} @@ -34,6 +33,7 @@ private Assertions() {} * @param expression The expression to evaluate. * @throws IllegalArgumentException If {@code expression} is false. */ + @Pure public static void checkArgument(boolean expression) { if (ExoPlayerLibraryInfo.ASSERTIONS_ENABLED && !expression) { throw new IllegalArgumentException(); @@ -48,6 +48,7 @@ public static void checkArgument(boolean expression) { * to a {@link String} using {@link String#valueOf(Object)}. * @throws IllegalArgumentException If {@code expression} is false. */ + @Pure public static void checkArgument(boolean expression, Object errorMessage) { if (ExoPlayerLibraryInfo.ASSERTIONS_ENABLED && !expression) { throw new IllegalArgumentException(String.valueOf(errorMessage)); @@ -63,6 +64,7 @@ public static void checkArgument(boolean expression, Object errorMessage) { * @return The {@code index} that was validated. * @throws IndexOutOfBoundsException If {@code index} falls outside the specified bounds. */ + @Pure public static int checkIndex(int index, int start, int limit) { if (index < start || index >= limit) { throw new IndexOutOfBoundsException(); @@ -76,6 +78,7 @@ public static int checkIndex(int index, int start, int limit) { * @param expression The expression to evaluate. * @throws IllegalStateException If {@code expression} is false. */ + @Pure public static void checkState(boolean expression) { if (ExoPlayerLibraryInfo.ASSERTIONS_ENABLED && !expression) { throw new IllegalStateException(); @@ -90,6 +93,7 @@ public static void checkState(boolean expression) { * to a {@link String} using {@link String#valueOf(Object)}. * @throws IllegalStateException If {@code expression} is false. */ + @Pure public static void checkState(boolean expression, Object errorMessage) { if (ExoPlayerLibraryInfo.ASSERTIONS_ENABLED && !expression) { throw new IllegalStateException(String.valueOf(errorMessage)); @@ -104,8 +108,9 @@ public static void checkState(boolean expression, Object errorMessage) { * @return The non-null reference that was validated. * @throws IllegalStateException If {@code reference} is null. */ - @SuppressWarnings({"contracts.postcondition.not.satisfied", "return.type.incompatible"}) + @SuppressWarnings({"nullness:contracts.postcondition", "nullness:return"}) @EnsuresNonNull({"#1"}) + @Pure public static T checkStateNotNull(@Nullable T reference) { if (ExoPlayerLibraryInfo.ASSERTIONS_ENABLED && reference == null) { throw new IllegalStateException(); @@ -123,8 +128,9 @@ public static T checkStateNotNull(@Nullable T reference) { * @return The non-null reference that was validated. * @throws IllegalStateException If {@code reference} is null. */ - @SuppressWarnings({"contracts.postcondition.not.satisfied", "return.type.incompatible"}) + @SuppressWarnings({"nullness:contracts.postcondition", "nullness:return"}) @EnsuresNonNull({"#1"}) + @Pure public static T checkStateNotNull(@Nullable T reference, Object errorMessage) { if (ExoPlayerLibraryInfo.ASSERTIONS_ENABLED && reference == null) { throw new IllegalStateException(String.valueOf(errorMessage)); @@ -140,8 +146,9 @@ public static T checkStateNotNull(@Nullable T reference, Object errorMessage * @return The non-null reference that was validated. * @throws NullPointerException If {@code reference} is null. */ - @SuppressWarnings({"contracts.postcondition.not.satisfied", "return.type.incompatible"}) + @SuppressWarnings({"nullness:contracts.postcondition", "nullness:return"}) @EnsuresNonNull({"#1"}) + @Pure public static T checkNotNull(@Nullable T reference) { if (ExoPlayerLibraryInfo.ASSERTIONS_ENABLED && reference == null) { throw new NullPointerException(); @@ -159,8 +166,9 @@ public static T checkNotNull(@Nullable T reference) { * @return The non-null reference that was validated. * @throws NullPointerException If {@code reference} is null. */ - @SuppressWarnings({"contracts.postcondition.not.satisfied", "return.type.incompatible"}) + @SuppressWarnings({"nullness:contracts.postcondition", "nullness:return"}) @EnsuresNonNull({"#1"}) + @Pure public static T checkNotNull(@Nullable T reference, Object errorMessage) { if (ExoPlayerLibraryInfo.ASSERTIONS_ENABLED && reference == null) { throw new NullPointerException(String.valueOf(errorMessage)); @@ -175,8 +183,9 @@ public static T checkNotNull(@Nullable T reference, Object errorMessage) { * @return The non-null, non-empty string that was validated. * @throws IllegalArgumentException If {@code string} is null or 0-length. */ - @SuppressWarnings({"contracts.postcondition.not.satisfied", "return.type.incompatible"}) + @SuppressWarnings({"nullness:contracts.postcondition", "nullness:return"}) @EnsuresNonNull({"#1"}) + @Pure public static String checkNotEmpty(@Nullable String string) { if (ExoPlayerLibraryInfo.ASSERTIONS_ENABLED && TextUtils.isEmpty(string)) { throw new IllegalArgumentException(); @@ -193,8 +202,9 @@ public static String checkNotEmpty(@Nullable String string) { * @return The non-null, non-empty string that was validated. * @throws IllegalArgumentException If {@code string} is null or 0-length. */ - @SuppressWarnings({"contracts.postcondition.not.satisfied", "return.type.incompatible"}) + @SuppressWarnings({"nullness:contracts.postcondition", "nullness:return"}) @EnsuresNonNull({"#1"}) + @Pure public static String checkNotEmpty(@Nullable String string, Object errorMessage) { if (ExoPlayerLibraryInfo.ASSERTIONS_ENABLED && TextUtils.isEmpty(string)) { throw new IllegalArgumentException(String.valueOf(errorMessage)); @@ -208,10 +218,10 @@ public static String checkNotEmpty(@Nullable String string, Object errorMessage) * * @throws IllegalStateException If the calling thread is not the application's main thread. */ + @Pure public static void checkMainThread() { if (ExoPlayerLibraryInfo.ASSERTIONS_ENABLED && Looper.myLooper() != Looper.getMainLooper()) { throw new IllegalStateException("Not in applications main thread"); } } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/AtomicFile.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/AtomicFile.java index fa40f0f012..2e3b00044c 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/AtomicFile.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/AtomicFile.java @@ -65,8 +65,8 @@ public void delete() { /** * Start a new write operation on the file. This returns an {@link OutputStream} to which you can * write the new file data. If the whole data is written successfully you must call - * {@link #endWrite(OutputStream)}. On failure you should call {@link OutputStream#close()} - * only to free up resources used by it. + * {@link #endWrite(OutputStream)}. On failure you should call {@link OutputStream#close()} only + * to free up resources used by it. * *

      Example usage: * diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/BundleUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/BundleUtil.java new file mode 100644 index 0000000000..fcf07a8901 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/BundleUtil.java @@ -0,0 +1,111 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.util; + +import android.os.Bundle; +import android.os.IBinder; +import androidx.annotation.Nullable; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; + +/** Utilities for {@link Bundle}. */ +public final class BundleUtil { + + private static final String TAG = "BundleUtil"; + + @Nullable private static Method getIBinderMethod; + @Nullable private static Method putIBinderMethod; + + /** + * Gets an {@link IBinder} inside a {@link Bundle} for all Android versions. + * + * @param bundle The bundle to get the {@link IBinder}. + * @param key The key to use while getting the {@link IBinder}. + * @return The {@link IBinder} that was obtained. + */ + @Nullable + public static IBinder getBinder(Bundle bundle, @Nullable String key) { + if (Util.SDK_INT >= 18) { + return bundle.getBinder(key); + } else { + return getBinderByReflection(bundle, key); + } + } + + /** + * Puts an {@link IBinder} inside a {@link Bundle} for all Android versions. + * + * @param bundle The bundle to insert the {@link IBinder}. + * @param key The key to use while putting the {@link IBinder}. + * @param binder The {@link IBinder} to put. + */ + public static void putBinder(Bundle bundle, @Nullable String key, @Nullable IBinder binder) { + if (Util.SDK_INT >= 18) { + bundle.putBinder(key, binder); + } else { + putBinderByReflection(bundle, key, binder); + } + } + + // Method.invoke may take null "key". + @SuppressWarnings("nullness:argument") + @Nullable + private static IBinder getBinderByReflection(Bundle bundle, @Nullable String key) { + @Nullable Method getIBinder = getIBinderMethod; + if (getIBinder == null) { + try { + getIBinderMethod = Bundle.class.getMethod("getIBinder", String.class); + getIBinderMethod.setAccessible(true); + } catch (NoSuchMethodException e) { + Log.i(TAG, "Failed to retrieve getIBinder method", e); + return null; + } + getIBinder = getIBinderMethod; + } + + try { + return (IBinder) getIBinder.invoke(bundle, key); + } catch (InvocationTargetException | IllegalAccessException | IllegalArgumentException e) { + Log.i(TAG, "Failed to invoke getIBinder via reflection", e); + return null; + } + } + + // Method.invoke may take null "key" and "binder". + @SuppressWarnings("nullness:argument") + private static void putBinderByReflection( + Bundle bundle, @Nullable String key, @Nullable IBinder binder) { + @Nullable Method putIBinder = putIBinderMethod; + if (putIBinder == null) { + try { + putIBinderMethod = Bundle.class.getMethod("putIBinder", String.class, IBinder.class); + putIBinderMethod.setAccessible(true); + } catch (NoSuchMethodException e) { + Log.i(TAG, "Failed to retrieve putIBinder method", e); + return; + } + putIBinder = putIBinderMethod; + } + + try { + putIBinder.invoke(bundle, key, binder); + } catch (InvocationTargetException | IllegalAccessException | IllegalArgumentException e) { + Log.i(TAG, "Failed to invoke putIBinder via reflection", e); + } + } + + private BundleUtil() {} +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/BundleableUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/BundleableUtil.java new file mode 100644 index 0000000000..295ff13292 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/BundleableUtil.java @@ -0,0 +1,109 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.util; + +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Util.castNonNull; + +import android.os.Bundle; +import android.util.SparseArray; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.Bundleable; +import com.google.common.collect.ImmutableList; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +/** Utilities for {@link Bundleable}. */ +public final class BundleableUtil { + + /** Converts a list of {@link Bundleable} to a list {@link Bundle}. */ + public static ImmutableList toBundleList(List bundleableList) { + ImmutableList.Builder builder = ImmutableList.builder(); + for (int i = 0; i < bundleableList.size(); i++) { + Bundleable bundleable = bundleableList.get(i); + builder.add(bundleable.toBundle()); + } + return builder.build(); + } + + /** Converts a list of {@link Bundle} to a list of {@link Bundleable}. */ + public static ImmutableList fromBundleList( + Bundleable.Creator creator, List bundleList) { + ImmutableList.Builder builder = ImmutableList.builder(); + for (int i = 0; i < bundleList.size(); i++) { + Bundle bundle = checkNotNull(bundleList.get(i)); // Fail fast during parsing. + T bundleable = creator.fromBundle(bundle); + builder.add(bundleable); + } + return builder.build(); + } + + /** + * Converts a collection of {@link Bundleable} to an {@link ArrayList} of {@link Bundle} so that + * the returned list can be put to {@link Bundle} using {@link Bundle#putParcelableArrayList} + * conveniently. + */ + public static ArrayList toBundleArrayList( + Collection bundleables) { + ArrayList arrayList = new ArrayList<>(bundleables.size()); + for (T element : bundleables) { + arrayList.add(element.toBundle()); + } + return arrayList; + } + + /** + * Converts a {@link SparseArray} of {@link Bundle} to a {@link SparseArray} of {@link + * Bundleable}. + */ + public static SparseArray fromBundleSparseArray( + Bundleable.Creator creator, SparseArray bundleSparseArray) { + SparseArray result = new SparseArray<>(bundleSparseArray.size()); + for (int i = 0; i < bundleSparseArray.size(); i++) { + result.put(bundleSparseArray.keyAt(i), creator.fromBundle(bundleSparseArray.valueAt(i))); + } + return result; + } + + /** + * Converts a {@link SparseArray} of {@link Bundleable} to an {@link SparseArray} of {@link + * Bundle} so that the returned {@link SparseArray} can be put to {@link Bundle} using {@link + * Bundle#putSparseParcelableArray} conveniently. + */ + public static SparseArray toBundleSparseArray( + SparseArray bundleableSparseArray) { + SparseArray sparseArray = new SparseArray<>(bundleableSparseArray.size()); + for (int i = 0; i < bundleableSparseArray.size(); i++) { + sparseArray.put(bundleableSparseArray.keyAt(i), bundleableSparseArray.valueAt(i).toBundle()); + } + return sparseArray; + } + + /** + * Sets the application class loader to the given {@link Bundle} if no class loader is present. + * + *

      This assumes that all classes unparceled from {@code bundle} are sharing the class loader of + * {@code BundleableUtils}. + */ + public static void ensureClassLoader(@Nullable Bundle bundle) { + if (bundle != null) { + bundle.setClassLoader(castNonNull(BundleableUtil.class.getClassLoader())); + } + } + + private BundleableUtil() {} +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/Clock.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/Clock.java index ffb8236bd1..68945f61f1 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/Clock.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/Clock.java @@ -25,9 +25,7 @@ */ public interface Clock { - /** - * Default {@link Clock} to use for all non-test cases. - */ + /** Default {@link Clock} to use for all non-test cases. */ Clock DEFAULT = new SystemClock(); /** @@ -37,15 +35,16 @@ public interface Clock { */ long currentTimeMillis(); - /** @see android.os.SystemClock#elapsedRealtime() */ + /** + * @see android.os.SystemClock#elapsedRealtime() + */ long elapsedRealtime(); - /** @see android.os.SystemClock#uptimeMillis() */ + /** + * @see android.os.SystemClock#uptimeMillis() + */ long uptimeMillis(); - /** @see android.os.SystemClock#sleep(long) */ - void sleep(long sleepTimeMs); - /** * Creates a {@link HandlerWrapper} using a specified looper and a specified callback for handling * messages. @@ -53,4 +52,12 @@ public interface Clock { * @see Handler#Handler(Looper, Handler.Callback) */ HandlerWrapper createHandler(Looper looper, @Nullable Handler.Callback callback); + + /** + * Notifies the clock that the current thread is about to be blocked and won't return until a + * condition on another thread becomes true. + * + *

      Should be a no-op for all non-test cases. + */ + void onThreadBlocked(); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/CodecSpecificDataUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/CodecSpecificDataUtil.java index 3372f23971..d85fd4e0d2 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/CodecSpecificDataUtil.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/CodecSpecificDataUtil.java @@ -15,210 +15,148 @@ */ package com.google.android.exoplayer2.util; +import static com.google.android.exoplayer2.util.Assertions.checkArgument; + import android.util.Pair; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; -import com.google.android.exoplayer2.ParserException; import java.util.ArrayList; +import java.util.Collections; import java.util.List; -/** - * Provides static utility methods for manipulating various types of codec specific data. - */ +/** Provides utilities for handling various types of codec-specific data. */ public final class CodecSpecificDataUtil { private static final byte[] NAL_START_CODE = new byte[] {0, 0, 0, 1}; + private static final String[] HEVC_GENERAL_PROFILE_SPACE_STRINGS = + new String[] {"", "A", "B", "C"}; - private static final int AUDIO_SPECIFIC_CONFIG_FREQUENCY_INDEX_ARBITRARY = 0xF; - - private static final int[] AUDIO_SPECIFIC_CONFIG_SAMPLING_RATE_TABLE = new int[] { - 96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350 - }; + // MP4V-ES + private static final int VISUAL_OBJECT_LAYER = 1; + private static final int VISUAL_OBJECT_LAYER_START = 0x20; + private static final int EXTENDED_PAR = 0x0F; + private static final int RECTANGULAR = 0x00; - private static final int AUDIO_SPECIFIC_CONFIG_CHANNEL_CONFIGURATION_INVALID = -1; /** - * In the channel configurations below, indicates a single channel element; (A, B) indicates a - * channel pair element; and [A] indicates a low-frequency effects element. - * The speaker mapping short forms used are: - * - FC: front center - * - BC: back center - * - FL/FR: front left/right - * - FCL/FCR: front center left/right - * - FTL/FTR: front top left/right - * - SL/SR: back surround left/right - * - BL/BR: back left/right - * - LFE: low frequency effects - */ - private static final int[] AUDIO_SPECIFIC_CONFIG_CHANNEL_COUNT_TABLE = - new int[] { - 0, - 1, /* mono: */ - 2, /* stereo: (FL, FR) */ - 3, /* 3.0: , (FL, FR) */ - 4, /* 4.0: , (FL, FR), */ - 5, /* 5.0 back: , (FL, FR), (SL, SR) */ - 6, /* 5.1 back: , (FL, FR), (SL, SR), , [LFE] */ - 8, /* 7.1 wide back: , (FCL, FCR), (FL, FR), (SL, SR), [LFE] */ - AUDIO_SPECIFIC_CONFIG_CHANNEL_CONFIGURATION_INVALID, - AUDIO_SPECIFIC_CONFIG_CHANNEL_CONFIGURATION_INVALID, - AUDIO_SPECIFIC_CONFIG_CHANNEL_CONFIGURATION_INVALID, - 7, /* 6.1: , (FL, FR), (SL, SR), , [LFE] */ - 8, /* 7.1: , (FL, FR), (SL, SR), (BL, BR), [LFE] */ - AUDIO_SPECIFIC_CONFIG_CHANNEL_CONFIGURATION_INVALID, - 8, /* 7.1 top: , (FL, FR), (SL, SR), [LFE], (FTL, FTR) */ - AUDIO_SPECIFIC_CONFIG_CHANNEL_CONFIGURATION_INVALID - }; - - // Advanced Audio Coding Low-Complexity profile. - private static final int AUDIO_OBJECT_TYPE_AAC_LC = 2; - // Spectral Band Replication. - private static final int AUDIO_OBJECT_TYPE_SBR = 5; - // Error Resilient Bit-Sliced Arithmetic Coding. - private static final int AUDIO_OBJECT_TYPE_ER_BSAC = 22; - // Parametric Stereo. - private static final int AUDIO_OBJECT_TYPE_PS = 29; - // Escape code for extended audio object types. - private static final int AUDIO_OBJECT_TYPE_ESCAPE = 31; - - private CodecSpecificDataUtil() {} - - /** - * Parses an AAC AudioSpecificConfig, as defined in ISO 14496-3 1.6.2.1 + * Parses an ALAC AudioSpecificConfig (i.e. an ALACSpecificConfig). * * @param audioSpecificConfig A byte array containing the AudioSpecificConfig to parse. * @return A pair consisting of the sample rate in Hz and the channel count. - * @throws ParserException If the AudioSpecificConfig cannot be parsed as it's not supported. */ - public static Pair parseAacAudioSpecificConfig(byte[] audioSpecificConfig) - throws ParserException { - return parseAacAudioSpecificConfig(new ParsableBitArray(audioSpecificConfig), false); + public static Pair parseAlacAudioSpecificConfig(byte[] audioSpecificConfig) { + ParsableByteArray byteArray = new ParsableByteArray(audioSpecificConfig); + byteArray.setPosition(9); + int channelCount = byteArray.readUnsignedByte(); + byteArray.setPosition(20); + int sampleRate = byteArray.readUnsignedIntToInt(); + return Pair.create(sampleRate, channelCount); } /** - * Parses an AAC AudioSpecificConfig, as defined in ISO 14496-3 1.6.2.1 + * Returns initialization data for formats with MIME type {@link MimeTypes#APPLICATION_CEA708}. * - * @param bitArray A {@link ParsableBitArray} containing the AudioSpecificConfig to parse. The - * position is advanced to the end of the AudioSpecificConfig. - * @param forceReadToEnd Whether the entire AudioSpecificConfig should be read. Required for - * knowing the length of the configuration payload. - * @return A pair consisting of the sample rate in Hz and the channel count. - * @throws ParserException If the AudioSpecificConfig cannot be parsed as it's not supported. + * @param isWideAspectRatio Whether the CEA-708 closed caption service is formatted for displays + * with 16:9 aspect ratio. + * @return Initialization data for formats with MIME type {@link MimeTypes#APPLICATION_CEA708}. */ - public static Pair parseAacAudioSpecificConfig( - ParsableBitArray bitArray, boolean forceReadToEnd) throws ParserException { - int audioObjectType = getAacAudioObjectType(bitArray); - int sampleRate = getAacSamplingFrequency(bitArray); - int channelConfiguration = bitArray.readBits(4); - if (audioObjectType == AUDIO_OBJECT_TYPE_SBR || audioObjectType == AUDIO_OBJECT_TYPE_PS) { - // For an AAC bitstream using spectral band replication (SBR) or parametric stereo (PS) with - // explicit signaling, we return the extension sampling frequency as the sample rate of the - // content; this is identical to the sample rate of the decoded output but may differ from - // the sample rate set above. - // Use the extensionSamplingFrequencyIndex. - sampleRate = getAacSamplingFrequency(bitArray); - audioObjectType = getAacAudioObjectType(bitArray); - if (audioObjectType == AUDIO_OBJECT_TYPE_ER_BSAC) { - // Use the extensionChannelConfiguration. - channelConfiguration = bitArray.readBits(4); - } - } + public static List buildCea708InitializationData(boolean isWideAspectRatio) { + return Collections.singletonList(isWideAspectRatio ? new byte[] {1} : new byte[] {0}); + } - if (forceReadToEnd) { - switch (audioObjectType) { - case 1: - case 2: - case 3: - case 4: - case 6: - case 7: - case 17: - case 19: - case 20: - case 21: - case 22: - case 23: - parseGaSpecificConfig(bitArray, audioObjectType, channelConfiguration); - break; - default: - throw new ParserException("Unsupported audio object type: " + audioObjectType); - } - switch (audioObjectType) { - case 17: - case 19: - case 20: - case 21: - case 22: - case 23: - int epConfig = bitArray.readBits(2); - if (epConfig == 2 || epConfig == 3) { - throw new ParserException("Unsupported epConfig: " + epConfig); - } - break; - } - } - // For supported containers, bits_to_decode() is always 0. - int channelCount = AUDIO_SPECIFIC_CONFIG_CHANNEL_COUNT_TABLE[channelConfiguration]; - Assertions.checkArgument(channelCount != AUDIO_SPECIFIC_CONFIG_CHANNEL_CONFIGURATION_INVALID); - return Pair.create(sampleRate, channelCount); + /** + * Returns whether the CEA-708 closed caption service with the given initialization data is + * formatted for displays with 16:9 aspect ratio. + * + * @param initializationData The initialization data to parse. + * @return Whether the CEA-708 closed caption service is formatted for displays with 16:9 aspect + * ratio. + */ + public static boolean parseCea708InitializationData(List initializationData) { + return initializationData.size() == 1 + && initializationData.get(0).length == 1 + && initializationData.get(0)[0] == 1; } /** - * Builds a simple HE-AAC LC AudioSpecificConfig, as defined in ISO 14496-3 1.6.2.1 + * Parses an MPEG-4 Visual configuration information, as defined in ISO/IEC14496-2. * - * @param sampleRate The sample rate in Hz. - * @param channelCount The channel count. - * @return The AudioSpecificConfig. + * @param videoSpecificConfig A byte array containing the MPEG-4 Visual configuration information + * to parse. + * @return A pair of the video's width and height. */ - public static byte[] buildAacLcAudioSpecificConfig(int sampleRate, int channelCount) { - int sampleRateIndex = C.INDEX_UNSET; - for (int i = 0; i < AUDIO_SPECIFIC_CONFIG_SAMPLING_RATE_TABLE.length; ++i) { - if (sampleRate == AUDIO_SPECIFIC_CONFIG_SAMPLING_RATE_TABLE[i]) { - sampleRateIndex = i; + public static Pair getVideoResolutionFromMpeg4VideoConfig( + byte[] videoSpecificConfig) { + int offset = 0; + boolean foundVOL = false; + ParsableByteArray scratchBytes = new ParsableByteArray(videoSpecificConfig); + while (offset + 3 < videoSpecificConfig.length) { + if (scratchBytes.readUnsignedInt24() != VISUAL_OBJECT_LAYER + || (videoSpecificConfig[offset + 3] & 0xF0) != VISUAL_OBJECT_LAYER_START) { + scratchBytes.setPosition(scratchBytes.getPosition() - 2); + offset++; + continue; } + foundVOL = true; + break; + } + + checkArgument(foundVOL, "Invalid input: VOL not found."); + + ParsableBitArray scratchBits = new ParsableBitArray(videoSpecificConfig); + // Skip the start codecs from the bitstream + scratchBits.skipBits((offset + 4) * 8); + scratchBits.skipBits(1); // random_accessible_vol + scratchBits.skipBits(8); // video_object_type_indication + + if (scratchBits.readBit()) { // object_layer_identifier + scratchBits.skipBits(4); // video_object_layer_verid + scratchBits.skipBits(3); // video_object_layer_priority } - int channelConfig = C.INDEX_UNSET; - for (int i = 0; i < AUDIO_SPECIFIC_CONFIG_CHANNEL_COUNT_TABLE.length; ++i) { - if (channelCount == AUDIO_SPECIFIC_CONFIG_CHANNEL_COUNT_TABLE[i]) { - channelConfig = i; + + int aspectRatioInfo = scratchBits.readBits(4); + if (aspectRatioInfo == EXTENDED_PAR) { + scratchBits.skipBits(8); // par_width + scratchBits.skipBits(8); // par_height + } + + if (scratchBits.readBit()) { // vol_control_parameters + scratchBits.skipBits(2); // chroma_format + scratchBits.skipBits(1); // low_delay + if (scratchBits.readBit()) { // vbv_parameters + scratchBits.skipBits(79); } } - if (sampleRate == C.INDEX_UNSET || channelConfig == C.INDEX_UNSET) { - throw new IllegalArgumentException( - "Invalid sample rate or number of channels: " + sampleRate + ", " + channelCount); + + int videoObjectLayerShape = scratchBits.readBits(2); + checkArgument( + videoObjectLayerShape == RECTANGULAR, + "Only supports rectangular video object layer shape."); + + checkArgument(scratchBits.readBit()); // marker_bit + int vopTimeIncrementResolution = scratchBits.readBits(16); + checkArgument(scratchBits.readBit()); // marker_bit + + if (scratchBits.readBit()) { // fixed_vop_rate + checkArgument(vopTimeIncrementResolution > 0); + vopTimeIncrementResolution--; + int numBitsToSkip = 0; + while (vopTimeIncrementResolution > 0) { + numBitsToSkip++; + vopTimeIncrementResolution >>= 1; + } + scratchBits.skipBits(numBitsToSkip); // fixed_vop_time_increment } - return buildAacAudioSpecificConfig(AUDIO_OBJECT_TYPE_AAC_LC, sampleRateIndex, channelConfig); - } - /** - * Builds a simple AudioSpecificConfig, as defined in ISO 14496-3 1.6.2.1 - * - * @param audioObjectType The audio object type. - * @param sampleRateIndex The sample rate index. - * @param channelConfig The channel configuration. - * @return The AudioSpecificConfig. - */ - public static byte[] buildAacAudioSpecificConfig(int audioObjectType, int sampleRateIndex, - int channelConfig) { - byte[] specificConfig = new byte[2]; - specificConfig[0] = (byte) (((audioObjectType << 3) & 0xF8) | ((sampleRateIndex >> 1) & 0x07)); - specificConfig[1] = (byte) (((sampleRateIndex << 7) & 0x80) | ((channelConfig << 3) & 0x78)); - return specificConfig; - } + checkArgument(scratchBits.readBit()); // marker_bit + int videoObjectLayerWidth = scratchBits.readBits(13); + checkArgument(scratchBits.readBit()); // marker_bit + int videoObjectLayerHeight = scratchBits.readBits(13); + checkArgument(scratchBits.readBit()); // marker_bit - /** - * Parses an ALAC AudioSpecificConfig (i.e. an ALACSpecificConfig). - * - * @param audioSpecificConfig A byte array containing the AudioSpecificConfig to parse. - * @return A pair consisting of the sample rate in Hz and the channel count. - */ - public static Pair parseAlacAudioSpecificConfig(byte[] audioSpecificConfig) { - ParsableByteArray byteArray = new ParsableByteArray(audioSpecificConfig); - byteArray.setPosition(9); - int channelCount = byteArray.readUnsignedByte(); - byteArray.setPosition(20); - int sampleRate = byteArray.readUnsignedIntToInt(); - return Pair.create(sampleRate, channelCount); + scratchBits.skipBits(1); // interlaced + + return Pair.create(videoObjectLayerWidth, videoObjectLayerHeight); } /** @@ -236,6 +174,34 @@ public static String buildAvcCodecString( "avc1.%02X%02X%02X", profileIdc, constraintsFlagsAndReservedZero2Bits, levelIdc); } + /** Builds an RFC 6381 HEVC codec string using the provided parameters. */ + public static String buildHevcCodecString( + int generalProfileSpace, + boolean generalTierFlag, + int generalProfileIdc, + int generalProfileCompatibilityFlags, + int[] constraintBytes, + int generalLevelIdc) { + StringBuilder builder = + new StringBuilder( + Util.formatInvariant( + "hvc1.%s%d.%X.%c%d", + HEVC_GENERAL_PROFILE_SPACE_STRINGS[generalProfileSpace], + generalProfileIdc, + generalProfileCompatibilityFlags, + generalTierFlag ? 'H' : 'L', + generalLevelIdc)); + // Omit trailing zero bytes. + int trailingZeroIndex = constraintBytes.length; + while (trailingZeroIndex > 0 && constraintBytes[trailingZeroIndex - 1] == 0) { + trailingZeroIndex--; + } + for (int i = 0; i < trailingZeroIndex; i++) { + builder.append(String.format(".%02X", constraintBytes[i])); + } + return builder.toString(); + } + /** * Constructs a NAL unit consisting of the NAL start code followed by the specified data. * @@ -262,7 +228,8 @@ public static byte[] buildNalUnit(byte[] data, int offset, int length) { * @return The individual NAL units, or null if the input did not consist of NAL start code * delimited units. */ - public static @Nullable byte[][] splitNalUnits(byte[] data) { + @Nullable + public static byte[][] splitNalUnits(byte[] data) { if (!isNalStartCode(data, 0)) { // data does not consist of NAL start code delimited units. return null; @@ -320,65 +287,5 @@ private static boolean isNalStartCode(byte[] data, int index) { return true; } - /** - * Returns the AAC audio object type as specified in 14496-3 (2005) Table 1.14. - * - * @param bitArray The bit array containing the audio specific configuration. - * @return The audio object type. - */ - private static int getAacAudioObjectType(ParsableBitArray bitArray) { - int audioObjectType = bitArray.readBits(5); - if (audioObjectType == AUDIO_OBJECT_TYPE_ESCAPE) { - audioObjectType = 32 + bitArray.readBits(6); - } - return audioObjectType; - } - - /** - * Returns the AAC sampling frequency (or extension sampling frequency) as specified in 14496-3 - * (2005) Table 1.13. - * - * @param bitArray The bit array containing the audio specific configuration. - * @return The sampling frequency. - */ - private static int getAacSamplingFrequency(ParsableBitArray bitArray) { - int samplingFrequency; - int frequencyIndex = bitArray.readBits(4); - if (frequencyIndex == AUDIO_SPECIFIC_CONFIG_FREQUENCY_INDEX_ARBITRARY) { - samplingFrequency = bitArray.readBits(24); - } else { - Assertions.checkArgument(frequencyIndex < 13); - samplingFrequency = AUDIO_SPECIFIC_CONFIG_SAMPLING_RATE_TABLE[frequencyIndex]; - } - return samplingFrequency; - } - - private static void parseGaSpecificConfig(ParsableBitArray bitArray, int audioObjectType, - int channelConfiguration) { - bitArray.skipBits(1); // frameLengthFlag. - boolean dependsOnCoreDecoder = bitArray.readBit(); - if (dependsOnCoreDecoder) { - bitArray.skipBits(14); // coreCoderDelay. - } - boolean extensionFlag = bitArray.readBit(); - if (channelConfiguration == 0) { - throw new UnsupportedOperationException(); // TODO: Implement programConfigElement(); - } - if (audioObjectType == 6 || audioObjectType == 20) { - bitArray.skipBits(3); // layerNr. - } - if (extensionFlag) { - if (audioObjectType == 22) { - bitArray.skipBits(16); // numOfSubFrame (5), layer_length(11). - } - if (audioObjectType == 17 || audioObjectType == 19 || audioObjectType == 20 - || audioObjectType == 23) { - // aacSectionDataResilienceFlag, aacScalefactorDataResilienceFlag, - // aacSpectralDataResilienceFlag. - bitArray.skipBits(3); - } - bitArray.skipBits(1); // extensionFlag3. - } - } - + private CodecSpecificDataUtil() {} } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/ColorParser.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/ColorParser.java index 54f52e0a14..1200fcb70b 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/ColorParser.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/ColorParser.java @@ -15,7 +15,10 @@ */ package com.google.android.exoplayer2.util; +import android.graphics.Color; import android.text.TextUtils; +import androidx.annotation.ColorInt; +import com.google.common.base.Ascii; import java.util.HashMap; import java.util.Map; import java.util.regex.Matcher; @@ -32,14 +35,14 @@ public final class ColorParser { private static final String RGB = "rgb"; private static final String RGBA = "rgba"; - private static final Pattern RGB_PATTERN = Pattern.compile( - "^rgb\\((\\d{1,3}),(\\d{1,3}),(\\d{1,3})\\)$"); + private static final Pattern RGB_PATTERN = + Pattern.compile("^rgb\\((\\d{1,3}),(\\d{1,3}),(\\d{1,3})\\)$"); - private static final Pattern RGBA_PATTERN_INT_ALPHA = Pattern.compile( - "^rgba\\((\\d{1,3}),(\\d{1,3}),(\\d{1,3}),(\\d{1,3})\\)$"); + private static final Pattern RGBA_PATTERN_INT_ALPHA = + Pattern.compile("^rgba\\((\\d{1,3}),(\\d{1,3}),(\\d{1,3}),(\\d{1,3})\\)$"); - private static final Pattern RGBA_PATTERN_FLOAT_ALPHA = Pattern.compile( - "^rgba\\((\\d{1,3}),(\\d{1,3}),(\\d{1,3}),(\\d*\\.?\\d*?)\\)$"); + private static final Pattern RGBA_PATTERN_FLOAT_ALPHA = + Pattern.compile("^rgba\\((\\d{1,3}),(\\d{1,3}),(\\d{1,3}),(\\d*\\.?\\d*?)\\)$"); private static final Map COLOR_MAP; @@ -49,6 +52,7 @@ public final class ColorParser { * @param colorExpression The color expression. * @return The parsed ARGB color. */ + @ColorInt public static int parseTtmlColor(String colorExpression) { return parseColorInternal(colorExpression, false); } @@ -59,10 +63,12 @@ public static int parseTtmlColor(String colorExpression) { * @param colorExpression The color expression. * @return The parsed ARGB color. */ + @ColorInt public static int parseCssColor(String colorExpression) { return parseColorInternal(colorExpression, true); } + @ColorInt private static int parseColorInternal(String colorExpression, boolean alphaHasFloatFormat) { Assertions.checkArgument(!TextUtils.isEmpty(colorExpression)); colorExpression = colorExpression.replace(" ", ""); @@ -80,29 +86,29 @@ private static int parseColorInternal(String colorExpression, boolean alphaHasFl } return color; } else if (colorExpression.startsWith(RGBA)) { - Matcher matcher = (alphaHasFloatFormat ? RGBA_PATTERN_FLOAT_ALPHA : RGBA_PATTERN_INT_ALPHA) - .matcher(colorExpression); + Matcher matcher = + (alphaHasFloatFormat ? RGBA_PATTERN_FLOAT_ALPHA : RGBA_PATTERN_INT_ALPHA) + .matcher(colorExpression); if (matcher.matches()) { - return argb( - alphaHasFloatFormat ? (int) (255 * Float.parseFloat(matcher.group(4))) - : Integer.parseInt(matcher.group(4), 10), - Integer.parseInt(matcher.group(1), 10), - Integer.parseInt(matcher.group(2), 10), - Integer.parseInt(matcher.group(3), 10) - ); + return Color.argb( + alphaHasFloatFormat + ? (int) (255 * Float.parseFloat(Assertions.checkNotNull(matcher.group(4)))) + : Integer.parseInt(Assertions.checkNotNull(matcher.group(4)), 10), + Integer.parseInt(Assertions.checkNotNull(matcher.group(1)), 10), + Integer.parseInt(Assertions.checkNotNull(matcher.group(2)), 10), + Integer.parseInt(Assertions.checkNotNull(matcher.group(3)), 10)); } } else if (colorExpression.startsWith(RGB)) { Matcher matcher = RGB_PATTERN.matcher(colorExpression); if (matcher.matches()) { - return rgb( - Integer.parseInt(matcher.group(1), 10), - Integer.parseInt(matcher.group(2), 10), - Integer.parseInt(matcher.group(3), 10) - ); + return Color.rgb( + Integer.parseInt(Assertions.checkNotNull(matcher.group(1)), 10), + Integer.parseInt(Assertions.checkNotNull(matcher.group(2)), 10), + Integer.parseInt(Assertions.checkNotNull(matcher.group(3)), 10)); } } else { // we use our own color map - Integer color = COLOR_MAP.get(Util.toLowerInvariant(colorExpression)); + Integer color = COLOR_MAP.get(Ascii.toLowerCase(colorExpression)); if (color != null) { return color; } @@ -110,14 +116,6 @@ private static int parseColorInternal(String colorExpression, boolean alphaHasFl throw new IllegalArgumentException(); } - private static int argb(int alpha, int red, int green, int blue) { - return (alpha << 24) | (red << 16) | (green << 8) | blue; - } - - private static int rgb(int red, int green, int blue) { - return argb(0xFF, red, green, blue); - } - static { COLOR_MAP = new HashMap<>(); COLOR_MAP.put("aliceblue", 0xFFF0F8FF); diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/ConditionVariable.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/ConditionVariable.java index 69782ab1e8..bbdfd23d8c 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/ConditionVariable.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/ConditionVariable.java @@ -29,7 +29,7 @@ *

    • {@link #open()} and {@link #close()} return whether they changed the variable's state. * */ -public final class ConditionVariable { +public class ConditionVariable { private final Clock clock; private boolean isOpen; @@ -40,7 +40,7 @@ public ConditionVariable() { } /** - * Creates an instance. + * Creates an instance, which starts closed. * * @param clock The {@link Clock} whose {@link Clock#elapsedRealtime()} method is used to * determine when {@link #block(long)} should time out. @@ -111,6 +111,26 @@ public synchronized boolean block(long timeoutMs) throws InterruptedException { return isOpen; } + /** + * Blocks until the condition is open. Unlike {@link #block}, this method will continue to block + * if the calling thread is interrupted. If the calling thread was interrupted then its {@link + * Thread#isInterrupted() interrupted status} will be set when the method returns. + */ + public synchronized void blockUninterruptible() { + boolean wasInterrupted = false; + while (!isOpen) { + try { + wait(); + } catch (InterruptedException e) { + wasInterrupted = true; + } + } + if (wasInterrupted) { + // Restore the interrupted status. + Thread.currentThread().interrupt(); + } + } + /** Returns whether the condition is opened. */ public synchronized boolean isOpen() { return isOpen; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/Consumer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/Consumer.java new file mode 100644 index 0000000000..8e982fc646 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/Consumer.java @@ -0,0 +1,26 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.util; + +/** + * Represents an operation that accepts a single input argument and returns no result. Unlike most + * other functional interfaces, Consumer is expected to operate via side-effects. + */ +public interface Consumer { + + /** Performs this operation on the given argument. */ + void accept(T t); +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/CopyOnWriteMultiset.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/CopyOnWriteMultiset.java new file mode 100644 index 0000000000..c473e2206b --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/CopyOnWriteMultiset.java @@ -0,0 +1,148 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package com.google.android.exoplayer2.util; + +import androidx.annotation.GuardedBy; +import androidx.annotation.Nullable; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; + +/** + * An unordered collection of elements that allows duplicates, but also allows access to a set of + * unique elements. + * + *

      This class is thread-safe using the same method as {@link + * java.util.concurrent.CopyOnWriteArrayList}. Mutation methods cause the underlying data to be + * copied. {@link #elementSet()} and {@link #iterator()} return snapshots that are unaffected by + * subsequent mutations. + * + *

      Iterating directly on this class reveals duplicate elements. Unique elements can be accessed + * via {@link #elementSet()}. Iteration order for both of these is not defined. + * + * @param The type of element being stored. + */ +// Intentionally extending @NonNull-by-default Object to disallow @Nullable E types. +@SuppressWarnings("TypeParameterExplicitlyExtendsObject") +public final class CopyOnWriteMultiset implements Iterable { + + private final Object lock; + + @GuardedBy("lock") + private final Map elementCounts; + + @GuardedBy("lock") + private Set elementSet; + + @GuardedBy("lock") + private List elements; + + public CopyOnWriteMultiset() { + lock = new Object(); + elementCounts = new HashMap<>(); + elementSet = Collections.emptySet(); + elements = Collections.emptyList(); + } + + /** + * Adds {@code element} to the multiset. + * + * @param element The element to be added. + */ + public void add(E element) { + synchronized (lock) { + List elements = new ArrayList<>(this.elements); + elements.add(element); + this.elements = Collections.unmodifiableList(elements); + + @Nullable Integer count = elementCounts.get(element); + if (count == null) { + Set elementSet = new HashSet<>(this.elementSet); + elementSet.add(element); + this.elementSet = Collections.unmodifiableSet(elementSet); + } + elementCounts.put(element, count != null ? count + 1 : 1); + } + } + + /** + * Removes {@code element} from the multiset. + * + * @param element The element to be removed. + */ + public void remove(E element) { + synchronized (lock) { + @Nullable Integer count = elementCounts.get(element); + if (count == null) { + return; + } + + List elements = new ArrayList<>(this.elements); + elements.remove(element); + this.elements = Collections.unmodifiableList(elements); + + if (count == 1) { + elementCounts.remove(element); + Set elementSet = new HashSet<>(this.elementSet); + elementSet.remove(element); + this.elementSet = Collections.unmodifiableSet(elementSet); + } else { + elementCounts.put(element, count - 1); + } + } + } + + /** + * Returns a snapshot of the unique elements currently in this multiset. + * + *

      Changes to the underlying multiset are not reflected in the returned value. + * + * @return An unmodifiable set containing the unique elements in this multiset. + */ + public Set elementSet() { + synchronized (lock) { + return elementSet; + } + } + + /** + * Returns an iterator over a snapshot of all the elements currently in this multiset (including + * duplicates). + * + *

      Changes to the underlying multiset are not reflected in the returned value. + * + * @return An unmodifiable iterator over all the elements in this multiset (including duplicates). + */ + @Override + public Iterator iterator() { + synchronized (lock) { + return elements.iterator(); + } + } + + /** Returns the number of occurrences of an element in this multiset. */ + public int count(E element) { + synchronized (lock) { + return elementCounts.containsKey(element) ? elementCounts.get(element) : 0; + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/DebugTextViewHelper.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/DebugTextViewHelper.java new file mode 100644 index 0000000000..3461ee9e48 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/DebugTextViewHelper.java @@ -0,0 +1,227 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.util; + +import android.annotation.SuppressLint; +import android.os.Looper; +import android.widget.TextView; +import com.google.android.exoplayer2.ExoPlayer; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.Player; +import com.google.android.exoplayer2.decoder.DecoderCounters; +import java.util.Locale; + +/** + * A helper class for periodically updating a {@link TextView} with debug information obtained from + * an {@link ExoPlayer}. + */ +public class DebugTextViewHelper { + + private static final int REFRESH_INTERVAL_MS = 1000; + + private final ExoPlayer player; + private final TextView textView; + private final Updater updater; + + private boolean started; + + /** + * @param player The {@link ExoPlayer} from which debug information should be obtained. Only + * players which are accessed on the main thread are supported ({@code + * player.getApplicationLooper() == Looper.getMainLooper()}). + * @param textView The {@link TextView} that should be updated to display the information. + */ + public DebugTextViewHelper(ExoPlayer player, TextView textView) { + Assertions.checkArgument(player.getApplicationLooper() == Looper.getMainLooper()); + this.player = player; + this.textView = textView; + this.updater = new Updater(); + } + + /** + * Starts periodic updates of the {@link TextView}. Must be called from the application's main + * thread. + */ + public final void start() { + if (started) { + return; + } + started = true; + player.addListener(updater); + updateAndPost(); + } + + /** + * Stops periodic updates of the {@link TextView}. Must be called from the application's main + * thread. + */ + public final void stop() { + if (!started) { + return; + } + started = false; + player.removeListener(updater); + textView.removeCallbacks(updater); + } + + // Protected methods. + + @SuppressLint("SetTextI18n") + protected final void updateAndPost() { + textView.setText(getDebugString()); + textView.removeCallbacks(updater); + textView.postDelayed(updater, REFRESH_INTERVAL_MS); + } + + /** Returns the debugging information string to be shown by the target {@link TextView}. */ + protected String getDebugString() { + return getPlayerStateString() + getVideoString() + getAudioString(); + } + + /** Returns a string containing player state debugging information. */ + protected String getPlayerStateString() { + String playbackStateString; + switch (player.getPlaybackState()) { + case Player.STATE_BUFFERING: + playbackStateString = "buffering"; + break; + case Player.STATE_ENDED: + playbackStateString = "ended"; + break; + case Player.STATE_IDLE: + playbackStateString = "idle"; + break; + case Player.STATE_READY: + playbackStateString = "ready"; + break; + default: + playbackStateString = "unknown"; + break; + } + return String.format( + "playWhenReady:%s playbackState:%s item:%s", + player.getPlayWhenReady(), playbackStateString, player.getCurrentMediaItemIndex()); + } + + /** Returns a string containing video debugging information. */ + protected String getVideoString() { + Format format = player.getVideoFormat(); + DecoderCounters decoderCounters = player.getVideoDecoderCounters(); + if (format == null || decoderCounters == null) { + return ""; + } + return "\n" + + format.sampleMimeType + + "(id:" + + format.id + + " r:" + + format.width + + "x" + + format.height + + getPixelAspectRatioString(format.pixelWidthHeightRatio) + + getDecoderCountersBufferCountString(decoderCounters) + + " vfpo: " + + getVideoFrameProcessingOffsetAverageString( + decoderCounters.totalVideoFrameProcessingOffsetUs, + decoderCounters.videoFrameProcessingOffsetCount) + + ")"; + } + + /** Returns a string containing audio debugging information. */ + protected String getAudioString() { + Format format = player.getAudioFormat(); + DecoderCounters decoderCounters = player.getAudioDecoderCounters(); + if (format == null || decoderCounters == null) { + return ""; + } + return "\n" + + format.sampleMimeType + + "(id:" + + format.id + + " hz:" + + format.sampleRate + + " ch:" + + format.channelCount + + getDecoderCountersBufferCountString(decoderCounters) + + ")"; + } + + private static String getDecoderCountersBufferCountString(DecoderCounters counters) { + if (counters == null) { + return ""; + } + counters.ensureUpdated(); + return " sib:" + + counters.skippedInputBufferCount + + " sb:" + + counters.skippedOutputBufferCount + + " rb:" + + counters.renderedOutputBufferCount + + " db:" + + counters.droppedBufferCount + + " mcdb:" + + counters.maxConsecutiveDroppedBufferCount + + " dk:" + + counters.droppedToKeyframeCount; + } + + private static String getPixelAspectRatioString(float pixelAspectRatio) { + return pixelAspectRatio == Format.NO_VALUE || pixelAspectRatio == 1f + ? "" + : (" par:" + String.format(Locale.US, "%.02f", pixelAspectRatio)); + } + + private static String getVideoFrameProcessingOffsetAverageString( + long totalOffsetUs, int frameCount) { + if (frameCount == 0) { + return "N/A"; + } else { + long averageUs = (long) ((double) totalOffsetUs / frameCount); + return String.valueOf(averageUs); + } + } + + private final class Updater implements Player.Listener, Runnable { + + // Player.Listener implementation. + + @Override + public void onPlaybackStateChanged(@Player.State int playbackState) { + updateAndPost(); + } + + @Override + public void onPlayWhenReadyChanged( + boolean playWhenReady, @Player.PlayWhenReadyChangeReason int reason) { + updateAndPost(); + } + + @Override + public void onPositionDiscontinuity( + Player.PositionInfo oldPosition, + Player.PositionInfo newPosition, + @Player.DiscontinuityReason int reason) { + updateAndPost(); + } + + // Runnable implementation. + + @Override + public void run() { + updateAndPost(); + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/DebugViewProvider.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/DebugViewProvider.java new file mode 100644 index 0000000000..fad33aa6a6 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/DebugViewProvider.java @@ -0,0 +1,35 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.util; + +import android.view.SurfaceView; +import androidx.annotation.Nullable; + +/** Provider for views to show diagnostic information during a transformation, for debugging. */ +public interface DebugViewProvider { + + /** Debug view provider that doesn't show any debug info. */ + DebugViewProvider NONE = (int width, int height) -> null; + + /** + * Returns a new surface view to show a preview of transformer output with the given width/height + * in pixels, or {@code null} if no debug information should be shown. + * + *

      This method may be called on an arbitrary thread. + */ + @Nullable + SurfaceView getDebugPreviewSurfaceView(int width, int height); +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/EGLSurfaceTexture.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/EGLSurfaceTexture.java index e72e72c3c4..7855a4ca3e 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/EGLSurfaceTexture.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/EGLSurfaceTexture.java @@ -15,7 +15,8 @@ */ package com.google.android.exoplayer2.util; -import android.annotation.TargetApi; +import static java.lang.annotation.ElementType.TYPE_USE; + import android.graphics.SurfaceTexture; import android.opengl.EGL14; import android.opengl.EGLConfig; @@ -26,12 +27,14 @@ import android.os.Handler; import androidx.annotation.IntDef; import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; /** Generates a {@link SurfaceTexture} using EGL/GLES functions. */ -@TargetApi(17) +@RequiresApi(17) public final class EGLSurfaceTexture implements SurfaceTexture.OnFrameAvailableListener, Runnable { /** Listener to be called when the texture image on {@link SurfaceTexture} has been updated. */ @@ -46,6 +49,7 @@ public interface TextureImageListener { */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({SECURE_MODE_NONE, SECURE_MODE_SURFACELESS_CONTEXT, SECURE_MODE_PROTECTED_PBUFFER}) public @interface SecureMode {} @@ -74,13 +78,6 @@ public interface TextureImageListener { private static final int EGL_PROTECTED_CONTENT_EXT = 0x32C0; - /** A runtime exception to be thrown if some EGL operations failed. */ - public static final class GlException extends RuntimeException { - private GlException(String msg) { - super(msg); - } - } - private final Handler handler; private final int[] textureIdHolder; @Nullable private final TextureImageListener callback; @@ -120,7 +117,7 @@ public EGLSurfaceTexture(Handler handler, @Nullable TextureImageListener callbac * * @param secureMode The {@link SecureMode} to be used for EGL surface. */ - public void init(@SecureMode int secureMode) { + public void init(@SecureMode int secureMode) throws GlUtil.GlException { display = getDefaultDisplay(); EGLConfig config = chooseEGLConfig(display); context = createEGLContext(display, config, secureMode); @@ -131,7 +128,7 @@ public void init(@SecureMode int secureMode) { } /** Releases all allocated resources. */ - @SuppressWarnings({"nullness:argument.type.incompatible"}) + @SuppressWarnings("nullness:argument") public void release() { handler.removeCallbacks(this); try { @@ -201,22 +198,18 @@ private void dispatchOnFrameAvailable() { } } - private static EGLDisplay getDefaultDisplay() { + private static EGLDisplay getDefaultDisplay() throws GlUtil.GlException { EGLDisplay display = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY); - if (display == null) { - throw new GlException("eglGetDisplay failed"); - } + GlUtil.checkGlException(display != null, "eglGetDisplay failed"); int[] version = new int[2]; boolean eglInitialized = EGL14.eglInitialize(display, version, /* majorOffset= */ 0, version, /* minorOffset= */ 1); - if (!eglInitialized) { - throw new GlException("eglInitialize failed"); - } + GlUtil.checkGlException(eglInitialized, "eglInitialize failed"); return display; } - private static EGLConfig chooseEGLConfig(EGLDisplay display) { + private static EGLConfig chooseEGLConfig(EGLDisplay display) throws GlUtil.GlException { EGLConfig[] configs = new EGLConfig[1]; int[] numConfigs = new int[1]; boolean success = @@ -229,18 +222,17 @@ private static EGLConfig chooseEGLConfig(EGLDisplay display) { /* config_size= */ 1, numConfigs, /* num_configOffset= */ 0); - if (!success || numConfigs[0] <= 0 || configs[0] == null) { - throw new GlException( - Util.formatInvariant( - /* format= */ "eglChooseConfig failed: success=%b, numConfigs[0]=%d, configs[0]=%s", - success, numConfigs[0], configs[0])); - } + GlUtil.checkGlException( + success && numConfigs[0] > 0 && configs[0] != null, + Util.formatInvariant( + /* format= */ "eglChooseConfig failed: success=%b, numConfigs[0]=%d, configs[0]=%s", + success, numConfigs[0], configs[0])); return configs[0]; } private static EGLContext createEGLContext( - EGLDisplay display, EGLConfig config, @SecureMode int secureMode) { + EGLDisplay display, EGLConfig config, @SecureMode int secureMode) throws GlUtil.GlException { int[] glAttributes; if (secureMode == SECURE_MODE_NONE) { glAttributes = new int[] {EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, EGL14.EGL_NONE}; @@ -257,14 +249,13 @@ private static EGLContext createEGLContext( EGLContext context = EGL14.eglCreateContext( display, config, android.opengl.EGL14.EGL_NO_CONTEXT, glAttributes, 0); - if (context == null) { - throw new GlException("eglCreateContext failed"); - } + GlUtil.checkGlException(context != null, "eglCreateContext failed"); return context; } private static EGLSurface createEGLSurface( - EGLDisplay display, EGLConfig config, EGLContext context, @SecureMode int secureMode) { + EGLDisplay display, EGLConfig config, EGLContext context, @SecureMode int secureMode) + throws GlUtil.GlException { EGLSurface surface; if (secureMode == SECURE_MODE_SURFACELESS_CONTEXT) { surface = EGL14.EGL_NO_SURFACE; @@ -292,20 +283,16 @@ private static EGLSurface createEGLSurface( }; } surface = EGL14.eglCreatePbufferSurface(display, config, pbufferAttributes, /* offset= */ 0); - if (surface == null) { - throw new GlException("eglCreatePbufferSurface failed"); - } + GlUtil.checkGlException(surface != null, "eglCreatePbufferSurface failed"); } boolean eglMadeCurrent = EGL14.eglMakeCurrent(display, /* draw= */ surface, /* read= */ surface, context); - if (!eglMadeCurrent) { - throw new GlException("eglMakeCurrent failed"); - } + GlUtil.checkGlException(eglMadeCurrent, "eglMakeCurrent failed"); return surface; } - private static void generateTextureIds(int[] textureIdHolder) { + private static void generateTextureIds(int[] textureIdHolder) throws GlUtil.GlException { GLES20.glGenTextures(/* n= */ 1, textureIdHolder, /* offset= */ 0); GlUtil.checkGlError(); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/Effect.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/Effect.java new file mode 100644 index 0000000000..eeda686dce --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/Effect.java @@ -0,0 +1,20 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.android.exoplayer2.util; + +/** Marker interface for a video frame effect. */ +public interface Effect {} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/EventDispatcher.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/EventDispatcher.java deleted file mode 100644 index 07f278c808..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/EventDispatcher.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Copyright (C) 2018 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.util; - -import android.os.Handler; -import java.util.concurrent.CopyOnWriteArrayList; - -/** - * Event dispatcher which allows listener registration. - * - * @param The type of listener. - */ -public final class EventDispatcher { - - /** Functional interface to send an event. */ - public interface Event { - - /** - * Sends the event to a listener. - * - * @param listener The listener to send the event to. - */ - void sendTo(T listener); - } - - /** The list of listeners and handlers. */ - private final CopyOnWriteArrayList> listeners; - - /** Creates an event dispatcher. */ - public EventDispatcher() { - listeners = new CopyOnWriteArrayList<>(); - } - - /** Adds a listener to the event dispatcher. */ - public void addListener(Handler handler, T eventListener) { - Assertions.checkArgument(handler != null && eventListener != null); - removeListener(eventListener); - listeners.add(new HandlerAndListener<>(handler, eventListener)); - } - - /** Removes a listener from the event dispatcher. */ - public void removeListener(T eventListener) { - for (HandlerAndListener handlerAndListener : listeners) { - if (handlerAndListener.listener == eventListener) { - handlerAndListener.release(); - listeners.remove(handlerAndListener); - } - } - } - - /** - * Dispatches an event to all registered listeners. - * - * @param event The {@link Event}. - */ - public void dispatch(Event event) { - for (HandlerAndListener handlerAndListener : listeners) { - handlerAndListener.dispatch(event); - } - } - - private static final class HandlerAndListener { - - private final Handler handler; - private final T listener; - - private boolean released; - - public HandlerAndListener(Handler handler, T eventListener) { - this.handler = handler; - this.listener = eventListener; - } - - public void release() { - released = true; - } - - public void dispatch(Event event) { - handler.post( - () -> { - if (!released) { - event.sendTo(listener); - } - }); - } - } -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/EventLogger.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/EventLogger.java index 9d145caee5..8f1630724c 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/EventLogger.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/EventLogger.java @@ -15,31 +15,32 @@ */ package com.google.android.exoplayer2.util; +import static com.google.android.exoplayer2.util.Util.getFormatSupportString; +import static java.lang.Math.min; + import android.os.SystemClock; import android.text.TextUtils; -import android.view.Surface; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; -import com.google.android.exoplayer2.ExoPlaybackException; import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.MediaItem; +import com.google.android.exoplayer2.PlaybackException; import com.google.android.exoplayer2.PlaybackParameters; import com.google.android.exoplayer2.Player; import com.google.android.exoplayer2.Player.PlaybackSuppressionReason; -import com.google.android.exoplayer2.RendererCapabilities; -import com.google.android.exoplayer2.RendererCapabilities.AdaptiveSupport; import com.google.android.exoplayer2.Timeline; +import com.google.android.exoplayer2.Tracks; import com.google.android.exoplayer2.analytics.AnalyticsListener; import com.google.android.exoplayer2.audio.AudioAttributes; import com.google.android.exoplayer2.decoder.DecoderCounters; +import com.google.android.exoplayer2.decoder.DecoderReuseEvaluation; +import com.google.android.exoplayer2.drm.DrmSession; import com.google.android.exoplayer2.metadata.Metadata; -import com.google.android.exoplayer2.source.MediaSourceEventListener.LoadEventInfo; -import com.google.android.exoplayer2.source.MediaSourceEventListener.MediaLoadData; -import com.google.android.exoplayer2.source.TrackGroup; -import com.google.android.exoplayer2.source.TrackGroupArray; +import com.google.android.exoplayer2.source.LoadEventInfo; +import com.google.android.exoplayer2.source.MediaLoadData; import com.google.android.exoplayer2.trackselection.MappingTrackSelector; -import com.google.android.exoplayer2.trackselection.MappingTrackSelector.MappedTrackInfo; -import com.google.android.exoplayer2.trackselection.TrackSelection; -import com.google.android.exoplayer2.trackselection.TrackSelectionArray; +import com.google.android.exoplayer2.video.VideoSize; +import com.google.common.collect.ImmutableList; import java.io.IOException; import java.text.NumberFormat; import java.util.Locale; @@ -51,6 +52,7 @@ public class EventLogger implements AnalyticsListener { private static final String DEFAULT_TAG = "EventLogger"; private static final int MAX_TIMELINE_ITEM_LINES = 3; private static final NumberFormat TIME_FORMAT; + static { TIME_FORMAT = NumberFormat.getInstance(Locale.US); TIME_FORMAT.setMinimumFractionDigits(2); @@ -58,48 +60,70 @@ public class EventLogger implements AnalyticsListener { TIME_FORMAT.setGroupingUsed(false); } - @Nullable private final MappingTrackSelector trackSelector; private final String tag; private final Timeline.Window window; private final Timeline.Period period; private final long startTimeMs; + /** Creates an instance. */ + public EventLogger() { + this(DEFAULT_TAG); + } + + /** + * Creates an instance. + * + * @param tag The tag used for logging. + */ + public EventLogger(String tag) { + this.tag = tag; + window = new Timeline.Window(); + period = new Timeline.Period(); + startTimeMs = SystemClock.elapsedRealtime(); + } + /** - * Creates event logger. + * Creates an instance. * - * @param trackSelector The mapping track selector used by the player. May be null if detailed - * logging of track mapping is not required. + * @param trackSelector This parameter is ignored. + * @deprecated Use {@link EventLogger()} */ + @Deprecated public EventLogger(@Nullable MappingTrackSelector trackSelector) { - this(trackSelector, DEFAULT_TAG); + this(DEFAULT_TAG); } /** - * Creates event logger. + * Creates an instance. * - * @param trackSelector The mapping track selector used by the player. May be null if detailed - * logging of track mapping is not required. + * @param trackSelector This parameter is ignored. * @param tag The tag used for logging. + * @deprecated Use {@link EventLogger(String)} */ + @Deprecated public EventLogger(@Nullable MappingTrackSelector trackSelector, String tag) { - this.trackSelector = trackSelector; - this.tag = tag; - window = new Timeline.Window(); - period = new Timeline.Period(); - startTimeMs = SystemClock.elapsedRealtime(); + this(tag); } // AnalyticsListener @Override - public void onLoadingChanged(EventTime eventTime, boolean isLoading) { + public void onIsLoadingChanged(EventTime eventTime, boolean isLoading) { logd(eventTime, "loading", Boolean.toString(isLoading)); } @Override - public void onPlayerStateChanged( - EventTime eventTime, boolean playWhenReady, @Player.State int state) { - logd(eventTime, "state", playWhenReady + ", " + getStateString(state)); + public void onPlaybackStateChanged(EventTime eventTime, @Player.State int state) { + logd(eventTime, "state", getStateString(state)); + } + + @Override + public void onPlayWhenReadyChanged( + EventTime eventTime, boolean playWhenReady, @Player.PlayWhenReadyChangeReason int reason) { + logd( + eventTime, + "playWhenReady", + playWhenReady + ", " + getPlayWhenReadyChangeReasonString(reason)); } @Override @@ -127,24 +151,56 @@ public void onShuffleModeChanged(EventTime eventTime, boolean shuffleModeEnabled } @Override - public void onPositionDiscontinuity(EventTime eventTime, @Player.DiscontinuityReason int reason) { - logd(eventTime, "positionDiscontinuity", getDiscontinuityReasonString(reason)); - } - - @Override - public void onSeekStarted(EventTime eventTime) { - logd(eventTime, "seekStarted"); + public void onPositionDiscontinuity( + EventTime eventTime, + Player.PositionInfo oldPosition, + Player.PositionInfo newPosition, + @Player.DiscontinuityReason int reason) { + StringBuilder builder = new StringBuilder(); + builder + .append("reason=") + .append(getDiscontinuityReasonString(reason)) + .append(", PositionInfo:old [") + .append("mediaItem=") + .append(oldPosition.mediaItemIndex) + .append(", period=") + .append(oldPosition.periodIndex) + .append(", pos=") + .append(oldPosition.positionMs); + if (oldPosition.adGroupIndex != C.INDEX_UNSET) { + builder + .append(", contentPos=") + .append(oldPosition.contentPositionMs) + .append(", adGroup=") + .append(oldPosition.adGroupIndex) + .append(", ad=") + .append(oldPosition.adIndexInAdGroup); + } + builder + .append("], PositionInfo:new [") + .append("mediaItem=") + .append(newPosition.mediaItemIndex) + .append(", period=") + .append(newPosition.periodIndex) + .append(", pos=") + .append(newPosition.positionMs); + if (newPosition.adGroupIndex != C.INDEX_UNSET) { + builder + .append(", contentPos=") + .append(newPosition.contentPositionMs) + .append(", adGroup=") + .append(newPosition.adGroupIndex) + .append(", ad=") + .append(newPosition.adIndexInAdGroup); + } + builder.append("]"); + logd(eventTime, "positionDiscontinuity", builder.toString()); } @Override public void onPlaybackParametersChanged( EventTime eventTime, PlaybackParameters playbackParameters) { - logd( - eventTime, - "playbackParameters", - Util.formatInvariant( - "speed=%.2f, pitch=%.2f, skipSilence=%s", - playbackParameters.speed, playbackParameters.pitch, playbackParameters.skipSilence)); + logd(eventTime, "playbackParameters", playbackParameters.toString()); } @Override @@ -160,22 +216,22 @@ public void onTimelineChanged(EventTime eventTime, @Player.TimelineChangeReason + windowCount + ", reason=" + getTimelineChangeReasonString(reason)); - for (int i = 0; i < Math.min(periodCount, MAX_TIMELINE_ITEM_LINES); i++) { + for (int i = 0; i < min(periodCount, MAX_TIMELINE_ITEM_LINES); i++) { eventTime.timeline.getPeriod(i, period); logd(" " + "period [" + getTimeString(period.getDurationMs()) + "]"); } if (periodCount > MAX_TIMELINE_ITEM_LINES) { logd(" ..."); } - for (int i = 0; i < Math.min(windowCount, MAX_TIMELINE_ITEM_LINES); i++) { + for (int i = 0; i < min(windowCount, MAX_TIMELINE_ITEM_LINES); i++) { eventTime.timeline.getWindow(i, window); logd( " " + "window [" + getTimeString(window.getDurationMs()) - + ", " + + ", seekable=" + window.isSeekable - + ", " + + ", dynamic=" + window.isDynamic + "]"); } @@ -186,101 +242,64 @@ public void onTimelineChanged(EventTime eventTime, @Player.TimelineChangeReason } @Override - public void onPlayerError(EventTime eventTime, ExoPlaybackException e) { - loge(eventTime, "playerFailed", e); + public void onMediaItemTransition( + EventTime eventTime, @Nullable MediaItem mediaItem, int reason) { + logd( + "mediaItem [" + + getEventTimeString(eventTime) + + ", reason=" + + getMediaItemTransitionReasonString(reason) + + "]"); } @Override - public void onTracksChanged( - EventTime eventTime, TrackGroupArray ignored, TrackSelectionArray trackSelections) { - MappedTrackInfo mappedTrackInfo = - trackSelector != null ? trackSelector.getCurrentMappedTrackInfo() : null; - if (mappedTrackInfo == null) { - logd(eventTime, "tracks", "[]"); - return; - } + public void onPlayerError(EventTime eventTime, PlaybackException error) { + loge(eventTime, "playerFailed", error); + } + + @Override + public void onTracksChanged(EventTime eventTime, Tracks tracks) { logd("tracks [" + getEventTimeString(eventTime)); // Log tracks associated to renderers. - int rendererCount = mappedTrackInfo.getRendererCount(); - for (int rendererIndex = 0; rendererIndex < rendererCount; rendererIndex++) { - TrackGroupArray rendererTrackGroups = mappedTrackInfo.getTrackGroups(rendererIndex); - TrackSelection trackSelection = trackSelections.get(rendererIndex); - if (rendererTrackGroups.length > 0) { - logd(" Renderer:" + rendererIndex + " ["); - for (int groupIndex = 0; groupIndex < rendererTrackGroups.length; groupIndex++) { - TrackGroup trackGroup = rendererTrackGroups.get(groupIndex); - String adaptiveSupport = - getAdaptiveSupportString( - trackGroup.length, - mappedTrackInfo.getAdaptiveSupport( - rendererIndex, groupIndex, /* includeCapabilitiesExceededTracks= */ false)); - logd(" Group:" + groupIndex + ", adaptive_supported=" + adaptiveSupport + " ["); - for (int trackIndex = 0; trackIndex < trackGroup.length; trackIndex++) { - String status = getTrackStatusString(trackSelection, trackGroup, trackIndex); - String formatSupport = - RendererCapabilities.getFormatSupportString( - mappedTrackInfo.getTrackSupport(rendererIndex, groupIndex, trackIndex)); - logd( - " " - + status - + " Track:" - + trackIndex - + ", " - + Format.toLogString(trackGroup.getFormat(trackIndex)) - + ", supported=" - + formatSupport); - } - logd(" ]"); - } - // Log metadata for at most one of the tracks selected for the renderer. - if (trackSelection != null) { - for (int selectionIndex = 0; selectionIndex < trackSelection.length(); selectionIndex++) { - Metadata metadata = trackSelection.getFormat(selectionIndex).metadata; - if (metadata != null) { - logd(" Metadata ["); - printMetadata(metadata, " "); - logd(" ]"); - break; - } - } - } - logd(" ]"); + ImmutableList trackGroups = tracks.getGroups(); + for (int groupIndex = 0; groupIndex < trackGroups.size(); groupIndex++) { + Tracks.Group trackGroup = trackGroups.get(groupIndex); + logd(" group ["); + for (int trackIndex = 0; trackIndex < trackGroup.length; trackIndex++) { + String status = getTrackStatusString(trackGroup.isTrackSelected(trackIndex)); + String formatSupport = getFormatSupportString(trackGroup.getTrackSupport(trackIndex)); + logd( + " " + + status + + " Track:" + + trackIndex + + ", " + + Format.toLogString(trackGroup.getTrackFormat(trackIndex)) + + ", supported=" + + formatSupport); } + logd(" ]"); } - // Log tracks not associated with a renderer. - TrackGroupArray unassociatedTrackGroups = mappedTrackInfo.getUnmappedTrackGroups(); - if (unassociatedTrackGroups.length > 0) { - logd(" Renderer:None ["); - for (int groupIndex = 0; groupIndex < unassociatedTrackGroups.length; groupIndex++) { - logd(" Group:" + groupIndex + " ["); - TrackGroup trackGroup = unassociatedTrackGroups.get(groupIndex); - for (int trackIndex = 0; trackIndex < trackGroup.length; trackIndex++) { - String status = getTrackStatusString(false); - String formatSupport = - RendererCapabilities.getFormatSupportString( - RendererCapabilities.FORMAT_UNSUPPORTED_TYPE); - logd( - " " - + status - + " Track:" - + trackIndex - + ", " - + Format.toLogString(trackGroup.getFormat(trackIndex)) - + ", supported=" - + formatSupport); + // TODO: Replace this with an override of onMediaMetadataChanged. + // Log metadata for at most one of the selected tracks. + boolean loggedMetadata = false; + for (int groupIndex = 0; !loggedMetadata && groupIndex < trackGroups.size(); groupIndex++) { + Tracks.Group trackGroup = trackGroups.get(groupIndex); + for (int trackIndex = 0; !loggedMetadata && trackIndex < trackGroup.length; trackIndex++) { + if (trackGroup.isTrackSelected(trackIndex)) { + @Nullable Metadata metadata = trackGroup.getTrackFormat(trackIndex).metadata; + if (metadata != null && metadata.length() > 0) { + logd(" Metadata ["); + printMetadata(metadata, " "); + logd(" ]"); + loggedMetadata = true; + } } - logd(" ]"); } - logd(" ]"); } logd("]"); } - @Override - public void onSeekProcessed(EventTime eventTime) { - logd(eventTime, "seekProcessed"); - } - @Override public void onMetadata(EventTime eventTime, Metadata metadata) { logd("metadata [" + getEventTimeString(eventTime)); @@ -289,12 +308,44 @@ public void onMetadata(EventTime eventTime, Metadata metadata) { } @Override - public void onDecoderEnabled(EventTime eventTime, int trackType, DecoderCounters counters) { - logd(eventTime, "decoderEnabled", Util.getTrackTypeString(trackType)); + public void onAudioEnabled(EventTime eventTime, DecoderCounters decoderCounters) { + logd(eventTime, "audioEnabled"); + } + + @Override + public void onAudioDecoderInitialized( + EventTime eventTime, String decoderName, long initializationDurationMs) { + logd(eventTime, "audioDecoderInitialized", decoderName); + } + + @Override + public void onAudioInputFormatChanged( + EventTime eventTime, Format format, @Nullable DecoderReuseEvaluation decoderReuseEvaluation) { + logd(eventTime, "audioInputFormat", Format.toLogString(format)); + } + + @Override + public void onAudioUnderrun( + EventTime eventTime, int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) { + loge( + eventTime, + "audioTrackUnderrun", + bufferSize + ", " + bufferSizeMs + ", " + elapsedSinceLastFeedMs, + /* throwable= */ null); + } + + @Override + public void onAudioDecoderReleased(EventTime eventTime, String decoderName) { + logd(eventTime, "audioDecoderReleased", decoderName); } @Override - public void onAudioSessionId(EventTime eventTime, int audioSessionId) { + public void onAudioDisabled(EventTime eventTime, DecoderCounters decoderCounters) { + logd(eventTime, "audioDisabled"); + } + + @Override + public void onAudioSessionIdChanged(EventTime eventTime, int audioSessionId) { logd(eventTime, "audioSessionId", Integer.toString(audioSessionId)); } @@ -313,67 +364,55 @@ public void onAudioAttributesChanged(EventTime eventTime, AudioAttributes audioA } @Override - public void onVolumeChanged(EventTime eventTime, float volume) { - logd(eventTime, "volume", Float.toString(volume)); + public void onSkipSilenceEnabledChanged(EventTime eventTime, boolean skipSilenceEnabled) { + logd(eventTime, "skipSilenceEnabled", Boolean.toString(skipSilenceEnabled)); } @Override - public void onDecoderInitialized( - EventTime eventTime, int trackType, String decoderName, long initializationDurationMs) { - logd(eventTime, "decoderInitialized", Util.getTrackTypeString(trackType) + ", " + decoderName); + public void onVolumeChanged(EventTime eventTime, float volume) { + logd(eventTime, "volume", Float.toString(volume)); } @Override - public void onDecoderInputFormatChanged(EventTime eventTime, int trackType, Format format) { - logd( - eventTime, - "decoderInputFormat", - Util.getTrackTypeString(trackType) + ", " + Format.toLogString(format)); + public void onVideoEnabled(EventTime eventTime, DecoderCounters decoderCounters) { + logd(eventTime, "videoEnabled"); } @Override - public void onDecoderDisabled(EventTime eventTime, int trackType, DecoderCounters counters) { - logd(eventTime, "decoderDisabled", Util.getTrackTypeString(trackType)); + public void onVideoDecoderInitialized( + EventTime eventTime, String decoderName, long initializationDurationMs) { + logd(eventTime, "videoDecoderInitialized", decoderName); } @Override - public void onAudioUnderrun( - EventTime eventTime, int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) { - loge( - eventTime, - "audioTrackUnderrun", - bufferSize + ", " + bufferSizeMs + ", " + elapsedSinceLastFeedMs + "]", - null); + public void onVideoInputFormatChanged( + EventTime eventTime, Format format, @Nullable DecoderReuseEvaluation decoderReuseEvaluation) { + logd(eventTime, "videoInputFormat", Format.toLogString(format)); } @Override - public void onDroppedVideoFrames(EventTime eventTime, int count, long elapsedMs) { - logd(eventTime, "droppedFrames", Integer.toString(count)); + public void onDroppedVideoFrames(EventTime eventTime, int droppedFrames, long elapsedMs) { + logd(eventTime, "droppedFrames", Integer.toString(droppedFrames)); } @Override - public void onVideoSizeChanged( - EventTime eventTime, - int width, - int height, - int unappliedRotationDegrees, - float pixelWidthHeightRatio) { - logd(eventTime, "videoSize", width + ", " + height); + public void onVideoDecoderReleased(EventTime eventTime, String decoderName) { + logd(eventTime, "videoDecoderReleased", decoderName); } @Override - public void onRenderedFirstFrame(EventTime eventTime, @Nullable Surface surface) { - logd(eventTime, "renderedFirstFrame", String.valueOf(surface)); + public void onVideoDisabled(EventTime eventTime, DecoderCounters decoderCounters) { + logd(eventTime, "videoDisabled"); } @Override - public void onMediaPeriodCreated(EventTime eventTime) { - logd(eventTime, "mediaPeriodCreated"); + public void onRenderedFirstFrame(EventTime eventTime, Object output, long renderTimeMs) { + logd(eventTime, "renderedFirstFrame", String.valueOf(output)); } @Override - public void onMediaPeriodReleased(EventTime eventTime) { - logd(eventTime, "mediaPeriodReleased"); + public void onVideoSizeChanged(EventTime eventTime, VideoSize videoSize) { + logd(eventTime, "videoSize", videoSize.width + ", " + videoSize.height); } @Override @@ -404,11 +443,6 @@ public void onLoadCompleted( // Do nothing. } - @Override - public void onReadingStarted(EventTime eventTime) { - logd(eventTime, "mediaPeriodReadingStarted"); - } - @Override public void onBandwidthEstimate( EventTime eventTime, int totalLoadTimeMs, long totalBytesLoaded, long bitrateEstimate) { @@ -431,13 +465,13 @@ public void onDownstreamFormatChanged(EventTime eventTime, MediaLoadData mediaLo } @Override - public void onDrmSessionAcquired(EventTime eventTime) { - logd(eventTime, "drmSessionAcquired"); + public void onDrmSessionAcquired(EventTime eventTime, @DrmSession.State int state) { + logd(eventTime, "drmSessionAcquired", "state=" + state); } @Override - public void onDrmSessionManagerError(EventTime eventTime, Exception e) { - printInternalError(eventTime, "drmSessionManagerError", e); + public void onDrmSessionManagerError(EventTime eventTime, Exception error) { + printInternalError(eventTime, "drmSessionManagerError", error); } @Override @@ -516,6 +550,9 @@ private String getEventString( @Nullable String eventDescription, @Nullable Throwable throwable) { String eventString = eventName + " [" + getEventTimeString(eventTime); + if (throwable instanceof PlaybackException) { + eventString += ", errorCode=" + ((PlaybackException) throwable).getErrorCodeName(); + } if (eventDescription != null) { eventString += ", " + eventDescription; } @@ -540,7 +577,7 @@ private String getEventTimeString(EventTime eventTime) { return "eventTime=" + getTimeString(eventTime.realtimeMs - startTimeMs) + ", mediaPos=" - + getTimeString(eventTime.currentPlaybackPositionMs) + + getTimeString(eventTime.eventPlaybackPositionMs) + ", " + windowPeriodString; } @@ -564,34 +601,8 @@ private static String getStateString(int state) { } } - private static String getAdaptiveSupportString( - int trackCount, @AdaptiveSupport int adaptiveSupport) { - if (trackCount < 2) { - return "N/A"; - } - switch (adaptiveSupport) { - case RendererCapabilities.ADAPTIVE_SEAMLESS: - return "YES"; - case RendererCapabilities.ADAPTIVE_NOT_SEAMLESS: - return "YES_NOT_SEAMLESS"; - case RendererCapabilities.ADAPTIVE_NOT_SUPPORTED: - return "NO"; - default: - throw new IllegalStateException(); - } - } - - // Suppressing reference equality warning because the track group stored in the track selection - // must point to the exact track group object to be considered part of it. - @SuppressWarnings("ReferenceEquality") - private static String getTrackStatusString( - @Nullable TrackSelection selection, TrackGroup group, int trackIndex) { - return getTrackStatusString(selection != null && selection.getTrackGroup() == group - && selection.indexOf(trackIndex) != C.INDEX_UNSET); - } - - private static String getTrackStatusString(boolean enabled) { - return enabled ? "[X]" : "[ ]"; + private static String getTrackStatusString(boolean selected) { + return selected ? "[X]" : "[ ]"; } private static String getRepeatModeString(@Player.RepeatMode int repeatMode) { @@ -609,14 +620,16 @@ private static String getRepeatModeString(@Player.RepeatMode int repeatMode) { private static String getDiscontinuityReasonString(@Player.DiscontinuityReason int reason) { switch (reason) { - case Player.DISCONTINUITY_REASON_PERIOD_TRANSITION: - return "PERIOD_TRANSITION"; + case Player.DISCONTINUITY_REASON_AUTO_TRANSITION: + return "AUTO_TRANSITION"; case Player.DISCONTINUITY_REASON_SEEK: return "SEEK"; case Player.DISCONTINUITY_REASON_SEEK_ADJUSTMENT: return "SEEK_ADJUSTMENT"; - case Player.DISCONTINUITY_REASON_AD_INSERTION: - return "AD_INSERTION"; + case Player.DISCONTINUITY_REASON_REMOVE: + return "REMOVE"; + case Player.DISCONTINUITY_REASON_SKIP: + return "SKIP"; case Player.DISCONTINUITY_REASON_INTERNAL: return "INTERNAL"; default: @@ -626,12 +639,26 @@ private static String getDiscontinuityReasonString(@Player.DiscontinuityReason i private static String getTimelineChangeReasonString(@Player.TimelineChangeReason int reason) { switch (reason) { - case Player.TIMELINE_CHANGE_REASON_PREPARED: - return "PREPARED"; - case Player.TIMELINE_CHANGE_REASON_RESET: - return "RESET"; - case Player.TIMELINE_CHANGE_REASON_DYNAMIC: - return "DYNAMIC"; + case Player.TIMELINE_CHANGE_REASON_SOURCE_UPDATE: + return "SOURCE_UPDATE"; + case Player.TIMELINE_CHANGE_REASON_PLAYLIST_CHANGED: + return "PLAYLIST_CHANGED"; + default: + return "?"; + } + } + + private static String getMediaItemTransitionReasonString( + @Player.MediaItemTransitionReason int reason) { + switch (reason) { + case Player.MEDIA_ITEM_TRANSITION_REASON_AUTO: + return "AUTO"; + case Player.MEDIA_ITEM_TRANSITION_REASON_PLAYLIST_CHANGED: + return "PLAYLIST_CHANGED"; + case Player.MEDIA_ITEM_TRANSITION_REASON_REPEAT: + return "REPEAT"; + case Player.MEDIA_ITEM_TRANSITION_REASON_SEEK: + return "SEEK"; default: return "?"; } @@ -648,4 +675,22 @@ private static String getPlaybackSuppressionReasonString( return "?"; } } + + private static String getPlayWhenReadyChangeReasonString( + @Player.PlayWhenReadyChangeReason int reason) { + switch (reason) { + case Player.PLAY_WHEN_READY_CHANGE_REASON_AUDIO_BECOMING_NOISY: + return "AUDIO_BECOMING_NOISY"; + case Player.PLAY_WHEN_READY_CHANGE_REASON_AUDIO_FOCUS_LOSS: + return "AUDIO_FOCUS_LOSS"; + case Player.PLAY_WHEN_READY_CHANGE_REASON_REMOTE: + return "REMOTE"; + case Player.PLAY_WHEN_READY_CHANGE_REASON_USER_REQUEST: + return "USER_REQUEST"; + case Player.PLAY_WHEN_READY_CHANGE_REASON_END_OF_MEDIA_ITEM: + return "END_OF_MEDIA_ITEM"; + default: + return "?"; + } + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/FileTypes.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/FileTypes.java new file mode 100644 index 0000000000..e1a5716879 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/FileTypes.java @@ -0,0 +1,256 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.util; + +import static com.google.android.exoplayer2.util.MimeTypes.normalizeMimeType; +import static java.lang.annotation.ElementType.TYPE_USE; + +import android.net.Uri; +import androidx.annotation.IntDef; +import androidx.annotation.Nullable; +import androidx.annotation.VisibleForTesting; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import java.util.List; +import java.util.Map; + +/** Defines common file type constants and helper methods. */ +public final class FileTypes { + + /** + * File types. One of {@link #UNKNOWN}, {@link #AC3}, {@link #AC4}, {@link #ADTS}, {@link #AMR}, + * {@link #FLAC}, {@link #FLV}, {@link #MATROSKA}, {@link #MP3}, {@link #MP4}, {@link #OGG}, + * {@link #PS}, {@link #TS}, {@link #WAV}, {@link #WEBVTT}, {@link #JPEG} and {@link #MIDI}. + */ + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef({ + UNKNOWN, AC3, AC4, ADTS, AMR, FLAC, FLV, MATROSKA, MP3, MP4, OGG, PS, TS, WAV, WEBVTT, JPEG, + MIDI, AVI + }) + public @interface Type {} + /** Unknown file type. */ + public static final int UNKNOWN = -1; + /** File type for the AC-3 and E-AC-3 formats. */ + public static final int AC3 = 0; + /** File type for the AC-4 format. */ + public static final int AC4 = 1; + /** File type for the ADTS format. */ + public static final int ADTS = 2; + /** File type for the AMR format. */ + public static final int AMR = 3; + /** File type for the FLAC format. */ + public static final int FLAC = 4; + /** File type for the FLV format. */ + public static final int FLV = 5; + /** File type for the Matroska and WebM formats. */ + public static final int MATROSKA = 6; + /** File type for the MP3 format. */ + public static final int MP3 = 7; + /** File type for the MP4 format. */ + public static final int MP4 = 8; + /** File type for the Ogg format. */ + public static final int OGG = 9; + /** File type for the MPEG-PS format. */ + public static final int PS = 10; + /** File type for the MPEG-TS format. */ + public static final int TS = 11; + /** File type for the WAV format. */ + public static final int WAV = 12; + /** File type for the WebVTT format. */ + public static final int WEBVTT = 13; + /** File type for the JPEG format. */ + public static final int JPEG = 14; + /** File type for the MIDI format. */ + public static final int MIDI = 15; + /** File type for the AVI format. */ + public static final int AVI = 16; + + @VisibleForTesting /* package */ static final String HEADER_CONTENT_TYPE = "Content-Type"; + + private static final String EXTENSION_AC3 = ".ac3"; + private static final String EXTENSION_EC3 = ".ec3"; + private static final String EXTENSION_AC4 = ".ac4"; + private static final String EXTENSION_ADTS = ".adts"; + private static final String EXTENSION_AAC = ".aac"; + private static final String EXTENSION_AMR = ".amr"; + private static final String EXTENSION_FLAC = ".flac"; + private static final String EXTENSION_FLV = ".flv"; + private static final String EXTENSION_MID = ".mid"; + private static final String EXTENSION_MIDI = ".midi"; + private static final String EXTENSION_SMF = ".smf"; + private static final String EXTENSION_PREFIX_MK = ".mk"; + private static final String EXTENSION_WEBM = ".webm"; + private static final String EXTENSION_PREFIX_OG = ".og"; + private static final String EXTENSION_OPUS = ".opus"; + private static final String EXTENSION_MP3 = ".mp3"; + private static final String EXTENSION_MP4 = ".mp4"; + private static final String EXTENSION_PREFIX_M4 = ".m4"; + private static final String EXTENSION_PREFIX_MP4 = ".mp4"; + private static final String EXTENSION_PREFIX_CMF = ".cmf"; + private static final String EXTENSION_PS = ".ps"; + private static final String EXTENSION_MPEG = ".mpeg"; + private static final String EXTENSION_MPG = ".mpg"; + private static final String EXTENSION_M2P = ".m2p"; + private static final String EXTENSION_TS = ".ts"; + private static final String EXTENSION_PREFIX_TS = ".ts"; + private static final String EXTENSION_WAV = ".wav"; + private static final String EXTENSION_WAVE = ".wave"; + private static final String EXTENSION_VTT = ".vtt"; + private static final String EXTENSION_WEBVTT = ".webvtt"; + private static final String EXTENSION_JPG = ".jpg"; + private static final String EXTENSION_JPEG = ".jpeg"; + private static final String EXTENSION_AVI = ".avi"; + + private FileTypes() {} + + /** Returns the {@link Type} corresponding to the response headers provided. */ + public static @FileTypes.Type int inferFileTypeFromResponseHeaders( + Map> responseHeaders) { + @Nullable List contentTypes = responseHeaders.get(HEADER_CONTENT_TYPE); + @Nullable + String mimeType = contentTypes == null || contentTypes.isEmpty() ? null : contentTypes.get(0); + return inferFileTypeFromMimeType(mimeType); + } + + /** + * Returns the {@link Type} corresponding to the MIME type provided. + * + *

      Returns {@link #UNKNOWN} if the mime type is {@code null}. + */ + public static @FileTypes.Type int inferFileTypeFromMimeType(@Nullable String mimeType) { + if (mimeType == null) { + return FileTypes.UNKNOWN; + } + mimeType = normalizeMimeType(mimeType); + switch (mimeType) { + case MimeTypes.AUDIO_AC3: + case MimeTypes.AUDIO_E_AC3: + case MimeTypes.AUDIO_E_AC3_JOC: + return FileTypes.AC3; + case MimeTypes.AUDIO_AC4: + return FileTypes.AC4; + case MimeTypes.AUDIO_AMR: + case MimeTypes.AUDIO_AMR_NB: + case MimeTypes.AUDIO_AMR_WB: + return FileTypes.AMR; + case MimeTypes.AUDIO_FLAC: + return FileTypes.FLAC; + case MimeTypes.VIDEO_FLV: + return FileTypes.FLV; + case MimeTypes.AUDIO_MIDI: + return FileTypes.MIDI; + case MimeTypes.VIDEO_MATROSKA: + case MimeTypes.AUDIO_MATROSKA: + case MimeTypes.VIDEO_WEBM: + case MimeTypes.AUDIO_WEBM: + case MimeTypes.APPLICATION_WEBM: + return FileTypes.MATROSKA; + case MimeTypes.AUDIO_MPEG: + return FileTypes.MP3; + case MimeTypes.VIDEO_MP4: + case MimeTypes.AUDIO_MP4: + case MimeTypes.APPLICATION_MP4: + return FileTypes.MP4; + case MimeTypes.AUDIO_OGG: + return FileTypes.OGG; + case MimeTypes.VIDEO_PS: + return FileTypes.PS; + case MimeTypes.VIDEO_MP2T: + return FileTypes.TS; + case MimeTypes.AUDIO_WAV: + return FileTypes.WAV; + case MimeTypes.TEXT_VTT: + return FileTypes.WEBVTT; + case MimeTypes.IMAGE_JPEG: + return FileTypes.JPEG; + case MimeTypes.VIDEO_AVI: + return FileTypes.AVI; + default: + return FileTypes.UNKNOWN; + } + } + + /** Returns the {@link Type} corresponding to the {@link Uri} provided. */ + public static @FileTypes.Type int inferFileTypeFromUri(Uri uri) { + @Nullable String filename = uri.getLastPathSegment(); + if (filename == null) { + return FileTypes.UNKNOWN; + } else if (filename.endsWith(EXTENSION_AC3) || filename.endsWith(EXTENSION_EC3)) { + return FileTypes.AC3; + } else if (filename.endsWith(EXTENSION_AC4)) { + return FileTypes.AC4; + } else if (filename.endsWith(EXTENSION_ADTS) || filename.endsWith(EXTENSION_AAC)) { + return FileTypes.ADTS; + } else if (filename.endsWith(EXTENSION_AMR)) { + return FileTypes.AMR; + } else if (filename.endsWith(EXTENSION_FLAC)) { + return FileTypes.FLAC; + } else if (filename.endsWith(EXTENSION_FLV)) { + return FileTypes.FLV; + } else if (filename.endsWith(EXTENSION_MID) + || filename.endsWith(EXTENSION_MIDI) + || filename.endsWith(EXTENSION_SMF)) { + return FileTypes.MIDI; + } else if (filename.startsWith( + EXTENSION_PREFIX_MK, + /* toffset= */ filename.length() - (EXTENSION_PREFIX_MK.length() + 1)) + || filename.endsWith(EXTENSION_WEBM)) { + return FileTypes.MATROSKA; + } else if (filename.endsWith(EXTENSION_MP3)) { + return FileTypes.MP3; + } else if (filename.endsWith(EXTENSION_MP4) + || filename.startsWith( + EXTENSION_PREFIX_M4, + /* toffset= */ filename.length() - (EXTENSION_PREFIX_M4.length() + 1)) + || filename.startsWith( + EXTENSION_PREFIX_MP4, + /* toffset= */ filename.length() - (EXTENSION_PREFIX_MP4.length() + 1)) + || filename.startsWith( + EXTENSION_PREFIX_CMF, + /* toffset= */ filename.length() - (EXTENSION_PREFIX_CMF.length() + 1))) { + return FileTypes.MP4; + } else if (filename.startsWith( + EXTENSION_PREFIX_OG, + /* toffset= */ filename.length() - (EXTENSION_PREFIX_OG.length() + 1)) + || filename.endsWith(EXTENSION_OPUS)) { + return FileTypes.OGG; + } else if (filename.endsWith(EXTENSION_PS) + || filename.endsWith(EXTENSION_MPEG) + || filename.endsWith(EXTENSION_MPG) + || filename.endsWith(EXTENSION_M2P)) { + return FileTypes.PS; + } else if (filename.endsWith(EXTENSION_TS) + || filename.startsWith( + EXTENSION_PREFIX_TS, + /* toffset= */ filename.length() - (EXTENSION_PREFIX_TS.length() + 1))) { + return FileTypes.TS; + } else if (filename.endsWith(EXTENSION_WAV) || filename.endsWith(EXTENSION_WAVE)) { + return FileTypes.WAV; + } else if (filename.endsWith(EXTENSION_VTT) || filename.endsWith(EXTENSION_WEBVTT)) { + return FileTypes.WEBVTT; + } else if (filename.endsWith(EXTENSION_JPG) || filename.endsWith(EXTENSION_JPEG)) { + return FileTypes.JPEG; + } else if (filename.endsWith(EXTENSION_AVI)) { + return FileTypes.AVI; + } else { + return FileTypes.UNKNOWN; + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/FlagSet.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/FlagSet.java new file mode 100644 index 0000000000..4afff25dd8 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/FlagSet.java @@ -0,0 +1,251 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.util; + +import static com.google.android.exoplayer2.util.Assertions.checkIndex; +import static com.google.android.exoplayer2.util.Assertions.checkState; + +import android.util.SparseBooleanArray; +import androidx.annotation.Nullable; +import com.google.errorprone.annotations.CanIgnoreReturnValue; + +/** + * A set of integer flags. + * + *

      Intended for usages where the number of flags may exceed 32 and can no longer be represented + * by an IntDef. + * + *

      Instances are immutable. + */ +public final class FlagSet { + + /** A builder for {@link FlagSet} instances. */ + public static final class Builder { + + private final SparseBooleanArray flags; + + private boolean buildCalled; + + /** Creates a builder. */ + public Builder() { + flags = new SparseBooleanArray(); + } + + /** + * Adds a flag. + * + * @param flag A flag. + * @return This builder. + * @throws IllegalStateException If {@link #build()} has already been called. + */ + @CanIgnoreReturnValue + public Builder add(int flag) { + checkState(!buildCalled); + flags.append(flag, /* value= */ true); + return this; + } + + /** + * Adds a flag if the provided condition is true. Does nothing otherwise. + * + * @param flag A flag. + * @param condition A condition. + * @return This builder. + * @throws IllegalStateException If {@link #build()} has already been called. + */ + @CanIgnoreReturnValue + public Builder addIf(int flag, boolean condition) { + if (condition) { + return add(flag); + } + return this; + } + + /** + * Adds flags. + * + * @param flags The flags to add. + * @return This builder. + * @throws IllegalStateException If {@link #build()} has already been called. + */ + @CanIgnoreReturnValue + public Builder addAll(int... flags) { + for (int flag : flags) { + add(flag); + } + return this; + } + + /** + * Adds {@link FlagSet flags}. + * + * @param flags The set of flags to add. + * @return This builder. + * @throws IllegalStateException If {@link #build()} has already been called. + */ + @CanIgnoreReturnValue + public Builder addAll(FlagSet flags) { + for (int i = 0; i < flags.size(); i++) { + add(flags.get(i)); + } + return this; + } + + /** + * Removes a flag. + * + * @param flag A flag. + * @return This builder. + * @throws IllegalStateException If {@link #build()} has already been called. + */ + @CanIgnoreReturnValue + public Builder remove(int flag) { + checkState(!buildCalled); + flags.delete(flag); + return this; + } + + /** + * Removes a flag if the provided condition is true. Does nothing otherwise. + * + * @param flag A flag. + * @param condition A condition. + * @return This builder. + * @throws IllegalStateException If {@link #build()} has already been called. + */ + @CanIgnoreReturnValue + public Builder removeIf(int flag, boolean condition) { + if (condition) { + return remove(flag); + } + return this; + } + + /** + * Removes flags. + * + * @param flags The flags to remove. + * @return This builder. + * @throws IllegalStateException If {@link #build()} has already been called. + */ + @CanIgnoreReturnValue + public Builder removeAll(int... flags) { + for (int flag : flags) { + remove(flag); + } + return this; + } + + /** + * Builds an {@link FlagSet} instance. + * + * @throws IllegalStateException If this method has already been called. + */ + public FlagSet build() { + checkState(!buildCalled); + buildCalled = true; + return new FlagSet(flags); + } + } + + // A SparseBooleanArray is used instead of a Set to avoid auto-boxing the flag values. + private final SparseBooleanArray flags; + + private FlagSet(SparseBooleanArray flags) { + this.flags = flags; + } + + /** + * Returns whether the set contains the given flag. + * + * @param flag The flag. + * @return Whether the set contains the flag. + */ + public boolean contains(int flag) { + return flags.get(flag); + } + + /** + * Returns whether the set contains at least one of the given flags. + * + * @param flags The flags. + * @return Whether the set contains at least one of the flags. + */ + public boolean containsAny(int... flags) { + for (int flag : flags) { + if (contains(flag)) { + return true; + } + } + return false; + } + + /** Returns the number of flags in this set. */ + public int size() { + return flags.size(); + } + + /** + * Returns the flag at the given index. + * + * @param index The index. Must be between 0 (inclusive) and {@link #size()} (exclusive). + * @return The flag at the given index. + * @throws IndexOutOfBoundsException If index is outside the allowed range. + */ + public int get(int index) { + checkIndex(index, /* start= */ 0, /* limit= */ size()); + return flags.keyAt(index); + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (!(o instanceof FlagSet)) { + return false; + } + FlagSet that = (FlagSet) o; + if (Util.SDK_INT < 24) { + // SparseBooleanArray.equals() is not implemented on API levels below 24. + if (size() != that.size()) { + return false; + } + for (int i = 0; i < size(); i++) { + if (get(i) != that.get(i)) { + return false; + } + } + return true; + } else { + return flags.equals(that.flags); + } + } + + @Override + public int hashCode() { + if (Util.SDK_INT < 24) { + // SparseBooleanArray.hashCode() is not implemented on API levels below 24. + int hashCode = size(); + for (int i = 0; i < size(); i++) { + hashCode = 31 * hashCode + get(i); + } + return hashCode; + } else { + return flags.hashCode(); + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/FrameInfo.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/FrameInfo.java new file mode 100644 index 0000000000..865bfec67e --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/FrameInfo.java @@ -0,0 +1,57 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.util; + +import static com.google.android.exoplayer2.util.Assertions.checkArgument; + +/** Value class specifying information about a decoded video frame. */ +public class FrameInfo { + /** The width of the frame, in pixels. */ + public final int width; + /** The height of the frame, in pixels. */ + public final int height; + /** The ratio of width over height for each pixel. */ + public final float pixelWidthHeightRatio; + /** + * An offset in microseconds that is part of the input timestamps and should be ignored for + * processing but added back to the output timestamps. + * + *

      The offset stays constant within a stream but changes in between streams to ensure that + * frame timestamps are always monotonically increasing. + */ + public final long streamOffsetUs; + + // TODO(b/227624622): Add color space information for HDR. + + /** + * Creates a new instance. + * + * @param width The width of the frame, in pixels. + * @param height The height of the frame, in pixels. + * @param pixelWidthHeightRatio The ratio of width over height for each pixel. + * @param streamOffsetUs An offset in microseconds that is part of the input timestamps and should + * be ignored for processing but added back to the output timestamps. + */ + public FrameInfo(int width, int height, float pixelWidthHeightRatio, long streamOffsetUs) { + checkArgument(width > 0, "width must be positive, but is: " + width); + checkArgument(height > 0, "height must be positive, but is: " + height); + + this.width = width; + this.height = height; + this.pixelWidthHeightRatio = pixelWidthHeightRatio; + this.streamOffsetUs = streamOffsetUs; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/FrameProcessingException.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/FrameProcessingException.java new file mode 100644 index 0000000000..524b6dfd9d --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/FrameProcessingException.java @@ -0,0 +1,110 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.util; + +import com.google.android.exoplayer2.C; + +/** Thrown when an exception occurs while applying effects to video frames. */ +public final class FrameProcessingException extends Exception { + + /** + * Wraps the given exception in a {@code FrameProcessingException} if it is not already a {@code + * FrameProcessingException} and returns the exception otherwise. + */ + public static FrameProcessingException from(Exception exception) { + return from(exception, /* presentationTimeUs= */ C.TIME_UNSET); + } + + /** + * Wraps the given exception in a {@code FrameProcessingException} with the given timestamp if it + * is not already a {@code FrameProcessingException} and returns the exception otherwise. + */ + public static FrameProcessingException from(Exception exception, long presentationTimeUs) { + if (exception instanceof FrameProcessingException) { + return (FrameProcessingException) exception; + } else { + return new FrameProcessingException(exception, presentationTimeUs); + } + } + + /** + * The microsecond timestamp of the frame being processed while the exception occurred or {@link + * C#TIME_UNSET} if unknown. + */ + public final long presentationTimeUs; + + /** + * Creates an instance. + * + * @param message The detail message for this exception. + */ + public FrameProcessingException(String message) { + this(message, /* presentationTimeUs= */ C.TIME_UNSET); + } + + /** + * Creates an instance. + * + * @param message The detail message for this exception. + * @param presentationTimeUs The timestamp of the frame for which the exception occurred. + */ + public FrameProcessingException(String message, long presentationTimeUs) { + super(message); + this.presentationTimeUs = presentationTimeUs; + } + + /** + * Creates an instance. + * + * @param message The detail message for this exception. + * @param cause The cause of this exception. + */ + public FrameProcessingException(String message, Throwable cause) { + this(message, cause, /* presentationTimeUs= */ C.TIME_UNSET); + } + + /** + * Creates an instance. + * + * @param message The detail message for this exception. + * @param cause The cause of this exception. + * @param presentationTimeUs The timestamp of the frame for which the exception occurred. + */ + public FrameProcessingException(String message, Throwable cause, long presentationTimeUs) { + super(message, cause); + this.presentationTimeUs = presentationTimeUs; + } + + /** + * Creates an instance. + * + * @param cause The cause of this exception. + */ + public FrameProcessingException(Throwable cause) { + this(cause, /* presentationTimeUs= */ C.TIME_UNSET); + } + + /** + * Creates an instance. + * + * @param cause The cause of this exception. + * @param presentationTimeUs The timestamp of the frame for which the exception occurred. + */ + public FrameProcessingException(Throwable cause, long presentationTimeUs) { + super(cause); + this.presentationTimeUs = presentationTimeUs; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/FrameProcessor.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/FrameProcessor.java new file mode 100644 index 0000000000..23ba3e7ab4 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/FrameProcessor.java @@ -0,0 +1,204 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.util; + +import android.content.Context; +import android.opengl.EGLExt; +import android.view.Surface; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.video.ColorInfo; +import java.util.List; + +/** + * Interface for a frame processor that applies changes to individual video frames. + * + *

      The changes are specified by {@link Effect} instances passed to {@link Factory#create}. + * + *

      Manages its input {@link Surface}, which can be accessed via {@link #getInputSurface()}. The + * output {@link Surface} must be set by the caller using {@link + * #setOutputSurfaceInfo(SurfaceInfo)}. + * + *

      The caller must {@linkplain #registerInputFrame() register} input frames before rendering them + * to the input {@link Surface}. + */ +public interface FrameProcessor { + // TODO(b/243036513): Allow effects to be replaced. + + /** A factory for {@link FrameProcessor} instances. */ + interface Factory { + /** + * Creates a new {@link FrameProcessor} instance. + * + * @param context A {@link Context}. + * @param listener A {@link Listener}. + * @param effects The {@link Effect} instances to apply to each frame. + * @param debugViewProvider A {@link DebugViewProvider}. + * @param colorInfo The {@link ColorInfo} for input and output frames. + * @param releaseFramesAutomatically If {@code true}, the {@link FrameProcessor} will render + * output frames to the {@linkplain #setOutputSurfaceInfo(SurfaceInfo) output surface} + * automatically as {@link FrameProcessor} is done processing them. If {@code false}, the + * {@link FrameProcessor} will block until {@link #releaseOutputFrame(long)} is called, to + * render or drop the frame. + * @return A new instance. + * @throws FrameProcessingException If a problem occurs while creating the {@link + * FrameProcessor}. + */ + FrameProcessor create( + Context context, + Listener listener, + List effects, + DebugViewProvider debugViewProvider, + ColorInfo colorInfo, + boolean releaseFramesAutomatically) + throws FrameProcessingException; + } + + /** + * Listener for asynchronous frame processing events. + * + *

      All listener methods must be called from the same thread. + */ + interface Listener { + + /** + * Called when the output size changes. + * + *

      The output size is the frame size in pixels after applying all {@linkplain Effect + * effects}. + * + *

      The output size may differ from the size specified using {@link + * #setOutputSurfaceInfo(SurfaceInfo)}. + */ + void onOutputSizeChanged(int width, int height); + + /** + * Called when an output frame with the given {@code presentationTimeUs} becomes available. + * + * @param presentationTimeUs The presentation time of the frame, in microseconds. + */ + void onOutputFrameAvailable(long presentationTimeUs); + + /** + * Called when an exception occurs during asynchronous frame processing. + * + *

      If an error occurred, consuming and producing further frames will not work as expected and + * the {@link FrameProcessor} should be released. + */ + void onFrameProcessingError(FrameProcessingException exception); + + /** Called after the {@link FrameProcessor} has produced its final output frame. */ + void onFrameProcessingEnded(); + } + + /** + * Indicates the frame should be released immediately after {@link #releaseOutputFrame(long)} is + * invoked. + */ + long RELEASE_OUTPUT_FRAME_IMMEDIATELY = -1; + + /** Indicates the frame should be dropped after {@link #releaseOutputFrame(long)} is invoked. */ + long DROP_OUTPUT_FRAME = -2; + + /** Returns the input {@link Surface}, where {@link FrameProcessor} consumes input frames from. */ + Surface getInputSurface(); + + /** + * Sets information about the input frames. + * + *

      The new input information is applied from the next frame {@linkplain #registerInputFrame() + * registered} onwards. + * + *

      Pixels are expanded using the {@link FrameInfo#pixelWidthHeightRatio} so that the output + * frames' pixels have a ratio of 1. + * + *

      The caller should update {@link FrameInfo#streamOffsetUs} when switching input streams to + * ensure that frame timestamps are always monotonically increasing. + */ + void setInputFrameInfo(FrameInfo inputFrameInfo); + + /** + * Informs the {@code FrameProcessor} that a frame will be queued to its input surface. + * + *

      Must be called before rendering a frame to the frame processor's input surface. + * + * @throws IllegalStateException If called after {@link #signalEndOfInput()} or before {@link + * #setInputFrameInfo(FrameInfo)}. + */ + void registerInputFrame(); + + /** + * Returns the number of input frames that have been {@linkplain #registerInputFrame() registered} + * but not processed off the {@linkplain #getInputSurface() input surface} yet. + */ + int getPendingInputFrameCount(); + + /** + * Sets the output surface and supporting information. When output frames are released and not + * dropped, they will be rendered to this output {@link SurfaceInfo}. + * + *

      The new output {@link SurfaceInfo} is applied from the next output frame rendered onwards. + * If the output {@link SurfaceInfo} is {@code null}, the {@code FrameProcessor} will stop + * rendering pending frames and resume rendering once a non-null {@link SurfaceInfo} is set. + * + *

      If the dimensions given in {@link SurfaceInfo} do not match the {@linkplain + * Listener#onOutputSizeChanged(int,int) output size after applying the final effect} the frames + * are resized before rendering to the surface and letter/pillar-boxing is applied. + * + *

      The caller is responsible for tracking the lifecycle of the {@link SurfaceInfo#surface} + * including calling this method with a new surface if it is destroyed. When this method returns, + * the previous output surface is no longer being used and can safely be released by the caller. + */ + void setOutputSurfaceInfo(@Nullable SurfaceInfo outputSurfaceInfo); + + /** + * Releases the oldest unreleased output frame that has become {@linkplain + * Listener#onOutputFrameAvailable(long) available} at the given {@code releaseTimeNs}. + * + *

      This will either render the output frame to the {@linkplain #setOutputSurfaceInfo output + * surface}, or drop the frame, per {@code releaseTimeNs}. + * + *

      This method must only be called if {@code releaseFramesAutomatically} was set to {@code + * false} using the {@link Factory} and should be called exactly once for each frame that becomes + * {@linkplain Listener#onOutputFrameAvailable(long) available}. + * + *

      The {@code releaseTimeNs} may be passed to {@link EGLExt#eglPresentationTimeANDROID} + * depending on the implementation. + * + * @param releaseTimeNs The release time to use for the frame, in nanoseconds. The release time + * can be before of after the current system time. Use {@link #DROP_OUTPUT_FRAME} to drop the + * frame, or {@link #RELEASE_OUTPUT_FRAME_IMMEDIATELY} to release the frame immediately. + */ + void releaseOutputFrame(long releaseTimeNs); + + /** + * Informs the {@code FrameProcessor} that no further input frames should be accepted. + * + * @throws IllegalStateException If called more than once. + */ + void signalEndOfInput(); + + /** + * Releases all resources. + * + *

      If the frame processor is released before it has {@linkplain + * Listener#onFrameProcessingEnded() ended}, it will attempt to cancel processing any input frames + * that have already become available. Input frames that become available after release are + * ignored. + * + *

      This method blocks until all resources are released or releasing times out. + */ + void release(); +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/GlProgram.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/GlProgram.java new file mode 100644 index 0000000000..18a1cc60bc --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/GlProgram.java @@ -0,0 +1,438 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.util; + +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; + +import android.content.Context; +import android.opengl.GLES11Ext; +import android.opengl.GLES20; +import androidx.annotation.Nullable; +import java.io.IOException; +import java.io.InputStream; +import java.nio.Buffer; +import java.util.HashMap; +import java.util.Map; + +/** + * Represents a GLSL shader program. + * + *

      After constructing a program, keep a reference for its lifetime and call {@link #delete()} (or + * release the current GL context) when it's no longer needed. + */ +public final class GlProgram { + + // https://www.khronos.org/registry/OpenGL/extensions/EXT/EXT_YUV_target.txt + private static final int GL_SAMPLER_EXTERNAL_2D_Y2Y_EXT = 0x8BE7; + /** The identifier of a compiled and linked GLSL shader program. */ + private final int programId; + + private final Attribute[] attributes; + private final Uniform[] uniforms; + private final Map attributeByName; + private final Map uniformByName; + + /** + * Compiles a GL shader program from vertex and fragment shader GLSL GLES20 code. + * + * @param context The {@link Context}. + * @param vertexShaderFilePath The path to a vertex shader program. + * @param fragmentShaderFilePath The path to a fragment shader program. + * @throws IOException When failing to read shader files. + */ + public GlProgram(Context context, String vertexShaderFilePath, String fragmentShaderFilePath) + throws IOException, GlUtil.GlException { + this(loadAsset(context, vertexShaderFilePath), loadAsset(context, fragmentShaderFilePath)); + } + + /** + * Loads a file from the assets folder. + * + * @param context The {@link Context}. + * @param assetPath The path to the file to load, from the assets folder. + * @return The content of the file to load. + * @throws IOException If the file couldn't be read. + */ + public static String loadAsset(Context context, String assetPath) throws IOException { + @Nullable InputStream inputStream = null; + try { + inputStream = context.getAssets().open(assetPath); + return Util.fromUtf8Bytes(Util.toByteArray(inputStream)); + } finally { + Util.closeQuietly(inputStream); + } + } + + /** + * Creates a GL shader program from vertex and fragment shader GLSL GLES20 code. + * + *

      This involves slow steps, like compiling, linking, and switching the GL program, so do not + * call this in fast rendering loops. + * + * @param vertexShaderGlsl The vertex shader program. + * @param fragmentShaderGlsl The fragment shader program. + */ + public GlProgram(String vertexShaderGlsl, String fragmentShaderGlsl) throws GlUtil.GlException { + programId = GLES20.glCreateProgram(); + GlUtil.checkGlError(); + + // Add the vertex and fragment shaders. + addShader(programId, GLES20.GL_VERTEX_SHADER, vertexShaderGlsl); + addShader(programId, GLES20.GL_FRAGMENT_SHADER, fragmentShaderGlsl); + + // Link and use the program, and enumerate attributes/uniforms. + GLES20.glLinkProgram(programId); + int[] linkStatus = new int[] {GLES20.GL_FALSE}; + GLES20.glGetProgramiv(programId, GLES20.GL_LINK_STATUS, linkStatus, /* offset= */ 0); + GlUtil.checkGlException( + linkStatus[0] == GLES20.GL_TRUE, + "Unable to link shader program: \n" + GLES20.glGetProgramInfoLog(programId)); + GLES20.glUseProgram(programId); + attributeByName = new HashMap<>(); + int[] attributeCount = new int[1]; + GLES20.glGetProgramiv(programId, GLES20.GL_ACTIVE_ATTRIBUTES, attributeCount, /* offset= */ 0); + attributes = new Attribute[attributeCount[0]]; + for (int i = 0; i < attributeCount[0]; i++) { + Attribute attribute = Attribute.create(programId, i); + attributes[i] = attribute; + attributeByName.put(attribute.name, attribute); + } + uniformByName = new HashMap<>(); + int[] uniformCount = new int[1]; + GLES20.glGetProgramiv(programId, GLES20.GL_ACTIVE_UNIFORMS, uniformCount, /* offset= */ 0); + uniforms = new Uniform[uniformCount[0]]; + for (int i = 0; i < uniformCount[0]; i++) { + Uniform uniform = Uniform.create(programId, i); + uniforms[i] = uniform; + uniformByName.put(uniform.name, uniform); + } + GlUtil.checkGlError(); + } + + private static void addShader(int programId, int type, String glsl) throws GlUtil.GlException { + int shader = GLES20.glCreateShader(type); + GLES20.glShaderSource(shader, glsl); + GLES20.glCompileShader(shader); + + int[] result = new int[] {GLES20.GL_FALSE}; + GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, result, /* offset= */ 0); + GlUtil.checkGlException( + result[0] == GLES20.GL_TRUE, GLES20.glGetShaderInfoLog(shader) + ", source: " + glsl); + + GLES20.glAttachShader(programId, shader); + GLES20.glDeleteShader(shader); + GlUtil.checkGlError(); + } + + private static int getAttributeLocation(int programId, String attributeName) { + return GLES20.glGetAttribLocation(programId, attributeName); + } + + /** Returns the location of an {@link Attribute}. */ + private int getAttributeLocation(String attributeName) { + return getAttributeLocation(programId, attributeName); + } + + private static int getUniformLocation(int programId, String uniformName) { + return GLES20.glGetUniformLocation(programId, uniformName); + } + + /** Returns the location of a {@link Uniform}. */ + public int getUniformLocation(String uniformName) { + return getUniformLocation(programId, uniformName); + } + + /** + * Uses the program. + * + *

      Call this in the rendering loop to switch between different programs. + */ + public void use() throws GlUtil.GlException { + GLES20.glUseProgram(programId); + GlUtil.checkGlError(); + } + + /** Deletes the program. Deleted programs cannot be used again. */ + public void delete() throws GlUtil.GlException { + GLES20.glDeleteProgram(programId); + GlUtil.checkGlError(); + } + + /** + * Returns the location of an {@link Attribute}, which has been enabled as a vertex attribute + * array. + */ + public int getAttributeArrayLocationAndEnable(String attributeName) throws GlUtil.GlException { + int location = getAttributeLocation(attributeName); + GLES20.glEnableVertexAttribArray(location); + GlUtil.checkGlError(); + return location; + } + + /** Sets a float buffer type attribute. */ + public void setBufferAttribute(String name, float[] values, int size) { + checkNotNull(attributeByName.get(name)).setBuffer(values, size); + } + + /** + * Sets a texture sampler type uniform. + * + * @param name The uniform's name. + * @param texId The texture identifier. + * @param texUnitIndex The texture unit index. Use a different index (0, 1, 2, ...) for each + * texture sampler in the program. + */ + public void setSamplerTexIdUniform(String name, int texId, int texUnitIndex) { + checkNotNull(uniformByName.get(name)).setSamplerTexId(texId, texUnitIndex); + } + + /** Sets an {@code int} type uniform. */ + public void setIntUniform(String name, int value) { + checkNotNull(uniformByName.get(name)).setInt(value); + } + + /** Sets a {@code float} type uniform. */ + public void setFloatUniform(String name, float value) { + checkNotNull(uniformByName.get(name)).setFloat(value); + } + + /** Sets a {@code float[]} type uniform. */ + public void setFloatsUniform(String name, float[] value) { + checkNotNull(uniformByName.get(name)).setFloats(value); + } + + /** Binds all attributes and uniforms in the program. */ + public void bindAttributesAndUniforms() throws GlUtil.GlException { + for (Attribute attribute : attributes) { + attribute.bind(); + } + for (Uniform uniform : uniforms) { + uniform.bind(); + } + } + + /** Returns the length of the null-terminated C string in {@code cString}. */ + private static int getCStringLength(byte[] cString) { + for (int i = 0; i < cString.length; ++i) { + if (cString[i] == '\0') { + return i; + } + } + return cString.length; + } + + /** + * GL attribute, which can be attached to a buffer with {@link Attribute#setBuffer(float[], int)}. + */ + private static final class Attribute { + + /* Returns the attribute at the given index in the program. */ + public static Attribute create(int programId, int index) { + int[] length = new int[1]; + GLES20.glGetProgramiv( + programId, GLES20.GL_ACTIVE_ATTRIBUTE_MAX_LENGTH, length, /* offset= */ 0); + byte[] nameBytes = new byte[length[0]]; + + GLES20.glGetActiveAttrib( + programId, + index, + length[0], + /* unusedLength */ new int[1], + /* lengthOffset= */ 0, + /* unusedSize */ new int[1], + /* sizeOffset= */ 0, + /* unusedType */ new int[1], + /* typeOffset= */ 0, + nameBytes, + /* nameOffset= */ 0); + String name = new String(nameBytes, /* offset= */ 0, getCStringLength(nameBytes)); + int location = getAttributeLocation(programId, name); + + return new Attribute(name, index, location); + } + + /** The name of the attribute in the GLSL sources. */ + public final String name; + + private final int index; + private final int location; + + @Nullable private Buffer buffer; + private int size; + + private Attribute(String name, int index, int location) { + this.name = name; + this.index = index; + this.location = location; + } + + /** + * Configures {@link #bind()} to attach vertices in {@code buffer} (each of size {@code size} + * elements) to this {@link Attribute}. + * + * @param buffer Buffer to bind to this attribute. + * @param size Number of elements per vertex. + */ + public void setBuffer(float[] buffer, int size) { + this.buffer = GlUtil.createBuffer(buffer); + this.size = size; + } + + /** + * Sets the vertex attribute to whatever was attached via {@link #setBuffer(float[], int)}. + * + *

      Should be called before each drawing call. + */ + public void bind() throws GlUtil.GlException { + Buffer buffer = checkNotNull(this.buffer, "call setBuffer before bind"); + GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, /* buffer= */ 0); + GLES20.glVertexAttribPointer( + location, size, GLES20.GL_FLOAT, /* normalized= */ false, /* stride= */ 0, buffer); + GLES20.glEnableVertexAttribArray(index); + GlUtil.checkGlError(); + } + } + + /** + * GL uniform, which can be attached to a sampler using {@link Uniform#setSamplerTexId(int, int)}. + */ + private static final class Uniform { + + /** Returns the uniform at the given index in the program. */ + public static Uniform create(int programId, int index) { + int[] length = new int[1]; + GLES20.glGetProgramiv( + programId, GLES20.GL_ACTIVE_UNIFORM_MAX_LENGTH, length, /* offset= */ 0); + + int[] type = new int[1]; + byte[] nameBytes = new byte[length[0]]; + + GLES20.glGetActiveUniform( + programId, + index, + length[0], + /* unusedLength */ new int[1], + /* lengthOffset= */ 0, + /* unusedSize */ new int[1], + /*sizeOffset= */ 0, + type, + /* typeOffset= */ 0, + nameBytes, + /* nameOffset= */ 0); + String name = new String(nameBytes, /* offset= */ 0, getCStringLength(nameBytes)); + int location = getUniformLocation(programId, name); + + return new Uniform(name, location, type[0]); + } + + /** The name of the uniform in the GLSL sources. */ + public final String name; + + private final int location; + private final int type; + private final float[] floatValue; + + private int intValue; + private int texIdValue; + private int texUnitIndex; + + private Uniform(String name, int location, int type) { + this.name = name; + this.location = location; + this.type = type; + this.floatValue = new float[16]; + } + + /** + * Configures {@link #bind()} to use the specified {@code texId} for this sampler uniform. + * + * @param texId The GL texture identifier from which to sample. + * @param texUnitIndex The GL texture unit index. + */ + public void setSamplerTexId(int texId, int texUnitIndex) { + this.texIdValue = texId; + this.texUnitIndex = texUnitIndex; + } + /** Configures {@link #bind()} to use the specified {@code int} {@code value}. */ + public void setInt(int value) { + this.intValue = value; + } + + /** Configures {@link #bind()} to use the specified {@code float} {@code value}. */ + public void setFloat(float value) { + this.floatValue[0] = value; + } + + /** Configures {@link #bind()} to use the specified {@code float[]} {@code value}. */ + public void setFloats(float[] value) { + System.arraycopy(value, /* srcPos= */ 0, this.floatValue, /* destPos= */ 0, value.length); + } + + /** + * Sets the uniform to whatever value was passed via {@link #setSamplerTexId(int, int)}, {@link + * #setFloat(float)} or {@link #setFloats(float[])}. + * + *

      Should be called before each drawing call. + */ + public void bind() throws GlUtil.GlException { + switch (type) { + case GLES20.GL_INT: + GLES20.glUniform1i(location, intValue); + break; + case GLES20.GL_FLOAT: + GLES20.glUniform1fv(location, /* count= */ 1, floatValue, /* offset= */ 0); + GlUtil.checkGlError(); + break; + case GLES20.GL_FLOAT_VEC2: + GLES20.glUniform2fv(location, /* count= */ 1, floatValue, /* offset= */ 0); + GlUtil.checkGlError(); + break; + case GLES20.GL_FLOAT_VEC3: + GLES20.glUniform3fv(location, /* count= */ 1, floatValue, /* offset= */ 0); + GlUtil.checkGlError(); + break; + case GLES20.GL_FLOAT_MAT3: + GLES20.glUniformMatrix3fv( + location, /* count= */ 1, /* transpose= */ false, floatValue, /* offset= */ 0); + GlUtil.checkGlError(); + break; + case GLES20.GL_FLOAT_MAT4: + GLES20.glUniformMatrix4fv( + location, /* count= */ 1, /* transpose= */ false, floatValue, /* offset= */ 0); + GlUtil.checkGlError(); + break; + case GLES20.GL_SAMPLER_2D: + case GLES11Ext.GL_SAMPLER_EXTERNAL_OES: + case GL_SAMPLER_EXTERNAL_2D_Y2Y_EXT: + if (texIdValue == 0) { + throw new IllegalStateException("No call to setSamplerTexId() before bind."); + } + GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + texUnitIndex); + GlUtil.checkGlError(); + GlUtil.bindTexture( + type == GLES20.GL_SAMPLER_2D + ? GLES20.GL_TEXTURE_2D + : GLES11Ext.GL_TEXTURE_EXTERNAL_OES, + texIdValue); + GLES20.glUniform1i(location, texUnitIndex); + GlUtil.checkGlError(); + break; + default: + throw new IllegalStateException("Unexpected uniform type: " + type); + } + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/GlUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/GlUtil.java index cc4866118d..ca0968e823 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/GlUtil.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/GlUtil.java @@ -16,199 +16,135 @@ package com.google.android.exoplayer2.util; import static android.opengl.GLU.gluErrorString; +import static com.google.android.exoplayer2.util.Assertions.checkArgument; +import static com.google.android.exoplayer2.util.Assertions.checkState; -import android.annotation.TargetApi; import android.content.Context; import android.content.pm.PackageManager; import android.opengl.EGL14; +import android.opengl.EGLConfig; +import android.opengl.EGLContext; import android.opengl.EGLDisplay; +import android.opengl.EGLSurface; import android.opengl.GLES11Ext; import android.opengl.GLES20; -import android.text.TextUtils; +import android.opengl.GLES30; +import android.opengl.Matrix; +import androidx.annotation.DoNotInline; import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; import com.google.android.exoplayer2.C; -import com.google.android.exoplayer2.ExoPlayerLibraryInfo; -import java.nio.Buffer; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.FloatBuffer; -import java.nio.IntBuffer; +import java.util.Arrays; +import java.util.List; import javax.microedition.khronos.egl.EGL10; -/** GL utilities. */ +/** OpenGL ES utilities. */ +@SuppressWarnings("InlinedApi") // GLES constants are used safely based on the API version. public final class GlUtil { - /** - * GL attribute, which can be attached to a buffer with {@link Attribute#setBuffer(float[], int)}. - */ - public static final class Attribute { - - /** The name of the attribute in the GLSL sources. */ - public final String name; - - private final int index; - private final int location; - - @Nullable private Buffer buffer; - private int size; - - /** - * Creates a new GL attribute. - * - * @param program The identifier of a compiled and linked GLSL shader program. - * @param index The index of the attribute. After this instance has been constructed, the name - * of the attribute is available via the {@link #name} field. - */ - public Attribute(int program, int index) { - int[] len = new int[1]; - GLES20.glGetProgramiv(program, GLES20.GL_ACTIVE_ATTRIBUTE_MAX_LENGTH, len, 0); - - int[] type = new int[1]; - int[] size = new int[1]; - byte[] nameBytes = new byte[len[0]]; - int[] ignore = new int[1]; - - GLES20.glGetActiveAttrib(program, index, len[0], ignore, 0, size, 0, type, 0, nameBytes, 0); - name = new String(nameBytes, 0, strlen(nameBytes)); - location = GLES20.glGetAttribLocation(program, name); - this.index = index; - } - - /** - * Configures {@link #bind()} to attach vertices in {@code buffer} (each of size {@code size} - * elements) to this {@link Attribute}. - * - * @param buffer Buffer to bind to this attribute. - * @param size Number of elements per vertex. - */ - public void setBuffer(float[] buffer, int size) { - this.buffer = createBuffer(buffer); - this.size = size; - } - - /** - * Sets the vertex attribute to whatever was attached via {@link #setBuffer(float[], int)}. - * - *

      Should be called before each drawing call. - */ - public void bind() { - Buffer buffer = Assertions.checkNotNull(this.buffer, "call setBuffer before bind"); - GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0); - GLES20.glVertexAttribPointer( - location, - size, // count - GLES20.GL_FLOAT, // type - false, // normalize - 0, // stride - buffer); - GLES20.glEnableVertexAttribArray(index); - checkGlError(); + /** Thrown when an OpenGL error occurs. */ + public static final class GlException extends Exception { + /** Creates an instance with the specified error message. */ + public GlException(String message) { + super(message); } } - /** - * GL uniform, which can be attached to a sampler using {@link Uniform#setSamplerTexId(int, int)}. - */ - public static final class Uniform { - - /** The name of the uniform in the GLSL sources. */ - public final String name; - - private final int location; - private final int type; - private final float[] value; - - private int texId; - private int unit; - - /** - * Creates a new GL uniform. - * - * @param program The identifier of a compiled and linked GLSL shader program. - * @param index The index of the uniform. After this instance has been constructed, the name of - * the uniform is available via the {@link #name} field. - */ - public Uniform(int program, int index) { - int[] len = new int[1]; - GLES20.glGetProgramiv(program, GLES20.GL_ACTIVE_UNIFORM_MAX_LENGTH, len, 0); - - int[] type = new int[1]; - int[] size = new int[1]; - byte[] name = new byte[len[0]]; - int[] ignore = new int[1]; - - GLES20.glGetActiveUniform(program, index, len[0], ignore, 0, size, 0, type, 0, name, 0); - this.name = new String(name, 0, strlen(name)); - location = GLES20.glGetUniformLocation(program, this.name); - this.type = type[0]; - - value = new float[1]; - } + /** Number of elements in a 3d homogeneous coordinate vector describing a vertex. */ + public static final int HOMOGENEOUS_COORDINATE_VECTOR_SIZE = 4; + + /** Length of the normalized device coordinate (NDC) space, which spans from -1 to 1. */ + public static final float LENGTH_NDC = 2f; + + public static final int[] EGL_CONFIG_ATTRIBUTES_RGBA_8888 = + new int[] { + EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT, + EGL14.EGL_RED_SIZE, /* redSize= */ 8, + EGL14.EGL_GREEN_SIZE, /* greenSize= */ 8, + EGL14.EGL_BLUE_SIZE, /* blueSize= */ 8, + EGL14.EGL_ALPHA_SIZE, /* alphaSize= */ 8, + EGL14.EGL_DEPTH_SIZE, /* depthSize= */ 0, + EGL14.EGL_STENCIL_SIZE, /* stencilSize= */ 0, + EGL14.EGL_NONE + }; + public static final int[] EGL_CONFIG_ATTRIBUTES_RGBA_1010102 = + new int[] { + EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT, + EGL14.EGL_RED_SIZE, /* redSize= */ 10, + EGL14.EGL_GREEN_SIZE, /* greenSize= */ 10, + EGL14.EGL_BLUE_SIZE, /* blueSize= */ 10, + EGL14.EGL_ALPHA_SIZE, /* alphaSize= */ 2, + EGL14.EGL_DEPTH_SIZE, /* depthSize= */ 0, + EGL14.EGL_STENCIL_SIZE, /* stencilSize= */ 0, + EGL14.EGL_NONE + }; + + // https://www.khronos.org/registry/EGL/extensions/EXT/EGL_EXT_protected_content.txt + private static final String EXTENSION_PROTECTED_CONTENT = "EGL_EXT_protected_content"; + // https://www.khronos.org/registry/EGL/extensions/KHR/EGL_KHR_surfaceless_context.txt + private static final String EXTENSION_SURFACELESS_CONTEXT = "EGL_KHR_surfaceless_context"; + // https://www.khronos.org/registry/OpenGL/extensions/EXT/EXT_YUV_target.txt + private static final String EXTENSION_YUV_TARGET = "GL_EXT_YUV_target"; - /** - * Configures {@link #bind()} to use the specified {@code texId} for this sampler uniform. - * - * @param texId The GL texture identifier from which to sample. - * @param unit The GL texture unit index. - */ - public void setSamplerTexId(int texId, int unit) { - this.texId = texId; - this.unit = unit; - } + private static final int[] EGL_WINDOW_SURFACE_ATTRIBUTES_NONE = new int[] {EGL14.EGL_NONE}; - /** Configures {@link #bind()} to use the specified float {@code value} for this uniform. */ - public void setFloat(float value) { - this.value[0] = value; - } + /** Class only contains static methods. */ + private GlUtil() {} - /** - * Sets the uniform to whatever value was passed via {@link #setSamplerTexId(int, int)} or - * {@link #setFloat(float)}. - * - *

      Should be called before each drawing call. - */ - public void bind() { - if (type == GLES20.GL_FLOAT) { - GLES20.glUniform1fv(location, 1, value, 0); - checkGlError(); - return; - } + /** Bounds of normalized device coordinates, commonly used for defining viewport boundaries. */ + public static float[] getNormalizedCoordinateBounds() { + return new float[] { + -1, -1, 0, 1, + 1, -1, 0, 1, + -1, 1, 0, 1, + 1, 1, 0, 1 + }; + } - if (texId == 0) { - throw new IllegalStateException("call setSamplerTexId before bind"); - } - GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + unit); - if (type == GLES11Ext.GL_SAMPLER_EXTERNAL_OES) { - GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texId); - } else if (type == GLES20.GL_SAMPLER_2D) { - GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texId); - } else { - throw new IllegalStateException("unexpected uniform type: " + type); - } - GLES20.glUniform1i(location, unit); - GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); - GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); - GLES20.glTexParameteri( - GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); - GLES20.glTexParameteri( - GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); - checkGlError(); - } + /** Typical bounds used for sampling from textures. */ + public static float[] getTextureCoordinateBounds() { + return new float[] { + 0, 0, 0, 1, + 1, 0, 0, 1, + 0, 1, 0, 1, + 1, 1, 0, 1 + }; } - private static final String TAG = "GlUtil"; + /** Creates a 4x4 identity matrix. */ + public static float[] create4x4IdentityMatrix() { + float[] matrix = new float[16]; + setToIdentity(matrix); + return matrix; + } - private static final String EXTENSION_PROTECTED_CONTENT = "EGL_EXT_protected_content"; - private static final String EXTENSION_SURFACELESS_CONTEXT = "EGL_KHR_surfaceless_context"; + /** Sets the input {@code matrix} to an identity matrix. */ + public static void setToIdentity(float[] matrix) { + Matrix.setIdentityM(matrix, /* smOffset= */ 0); + } - /** Class only contains static methods. */ - private GlUtil() {} + /** Flattens the list of 4 element NDC coordinate vectors into a buffer. */ + public static float[] createVertexBuffer(List vertexList) { + float[] vertexBuffer = new float[HOMOGENEOUS_COORDINATE_VECTOR_SIZE * vertexList.size()]; + for (int i = 0; i < vertexList.size(); i++) { + System.arraycopy( + /* src= */ vertexList.get(i), + /* srcPos= */ 0, + /* dest= */ vertexBuffer, + /* destPos= */ HOMOGENEOUS_COORDINATE_VECTOR_SIZE * i, + /* length= */ HOMOGENEOUS_COORDINATE_VECTOR_SIZE); + } + return vertexBuffer; + } /** - * Returns whether creating a GL context with {@value EXTENSION_PROTECTED_CONTENT} is possible. If - * {@code true}, the device supports a protected output path for DRM content when using GL. + * Returns whether creating a GL context with {@value #EXTENSION_PROTECTED_CONTENT} is possible. + * + *

      If {@code true}, the device supports a protected output path for DRM content when using GL. */ - @TargetApi(24) public static boolean isProtectedContentExtensionSupported(Context context) { if (Util.SDK_INT < 24) { return false; @@ -234,9 +170,12 @@ public static boolean isProtectedContentExtensionSupported(Context context) { } /** - * Returns whether creating a GL context with {@value EXTENSION_SURFACELESS_CONTEXT} is possible. + * Returns whether the {@value #EXTENSION_SURFACELESS_CONTEXT} extension is supported. + * + *

      This extension allows passing {@link EGL14#EGL_NO_SURFACE} for both the write and read + * surfaces in a call to {@link EGL14#eglMakeCurrent(EGLDisplay, EGLSurface, EGLSurface, + * EGLContext)}. */ - @TargetApi(17) public static boolean isSurfacelessContextExtensionSupported() { if (Util.SDK_INT < 17) { return false; @@ -247,87 +186,287 @@ public static boolean isSurfacelessContextExtensionSupported() { } /** - * If there is an OpenGl error, logs the error and if {@link - * ExoPlayerLibraryInfo#GL_ASSERTIONS_ENABLED} is true throws a {@link RuntimeException}. + * Returns whether the {@value #EXTENSION_YUV_TARGET} extension is supported. + * + *

      This extension allows sampling raw YUV values from an external texture, which is required + * for HDR. */ - public static void checkGlError() { - int lastError = GLES20.GL_NO_ERROR; - int error; - while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) { - Log.e(TAG, "glError " + gluErrorString(error)); - lastError = error; + public static boolean isYuvTargetExtensionSupported() { + if (Util.SDK_INT < 17) { + return false; } - if (ExoPlayerLibraryInfo.GL_ASSERTIONS_ENABLED && lastError != GLES20.GL_NO_ERROR) { - throw new RuntimeException("glError " + gluErrorString(lastError)); + + @Nullable String glExtensions; + if (Util.areEqual(EGL14.eglGetCurrentContext(), EGL14.EGL_NO_CONTEXT)) { + // Create a placeholder context and make it current to allow calling GLES20.glGetString(). + try { + EGLDisplay eglDisplay = createEglDisplay(); + EGLContext eglContext = createEglContext(eglDisplay); + focusPlaceholderEglSurface(eglContext, eglDisplay); + glExtensions = GLES20.glGetString(GLES20.GL_EXTENSIONS); + destroyEglContext(eglDisplay, eglContext); + } catch (GlException e) { + return false; + } + } else { + glExtensions = GLES20.glGetString(GLES20.GL_EXTENSIONS); } + + return glExtensions != null && glExtensions.contains(EXTENSION_YUV_TARGET); + } + + /** Returns an initialized default {@link EGLDisplay}. */ + @RequiresApi(17) + public static EGLDisplay createEglDisplay() throws GlException { + return Api17.createEglDisplay(); } /** - * Builds a GL shader program from vertex and fragment shader code. + * Creates a new {@link EGLContext} for the specified {@link EGLDisplay}. + * + *

      Configures the {@link EGLContext} with {@link #EGL_CONFIG_ATTRIBUTES_RGBA_8888} and OpenGL + * ES 2.0. * - * @param vertexCode GLES20 vertex shader program as arrays of strings. Strings are joined by - * adding a new line character in between each of them. - * @param fragmentCode GLES20 fragment shader program as arrays of strings. Strings are joined by - * adding a new line character in between each of them. - * @return GLES20 program id. + * @param eglDisplay The {@link EGLDisplay} to create an {@link EGLContext} for. */ - public static int compileProgram(String[] vertexCode, String[] fragmentCode) { - return compileProgram(TextUtils.join("\n", vertexCode), TextUtils.join("\n", fragmentCode)); + @RequiresApi(17) + public static EGLContext createEglContext(EGLDisplay eglDisplay) throws GlException { + return createEglContext(eglDisplay, EGL_CONFIG_ATTRIBUTES_RGBA_8888); } /** - * Builds a GL shader program from vertex and fragment shader code. + * Creates a new {@link EGLContext} for the specified {@link EGLDisplay}. * - * @param vertexCode GLES20 vertex shader program. - * @param fragmentCode GLES20 fragment shader program. - * @return GLES20 program id. + * @param eglDisplay The {@link EGLDisplay} to create an {@link EGLContext} for. + * @param configAttributes The attributes to configure EGL with. Accepts either {@link + * #EGL_CONFIG_ATTRIBUTES_RGBA_1010102}, which will request OpenGL ES 3.0, or {@link + * #EGL_CONFIG_ATTRIBUTES_RGBA_8888}, which will request OpenGL ES 2.0. */ - public static int compileProgram(String vertexCode, String fragmentCode) { - int program = GLES20.glCreateProgram(); - checkGlError(); + @RequiresApi(17) + public static EGLContext createEglContext(EGLDisplay eglDisplay, int[] configAttributes) + throws GlException { + checkArgument( + Arrays.equals(configAttributes, EGL_CONFIG_ATTRIBUTES_RGBA_8888) + || Arrays.equals(configAttributes, EGL_CONFIG_ATTRIBUTES_RGBA_1010102)); + return Api17.createEglContext( + eglDisplay, + /* version= */ Arrays.equals(configAttributes, EGL_CONFIG_ATTRIBUTES_RGBA_1010102) ? 3 : 2, + configAttributes); + } - // Add the vertex and fragment shaders. - addShader(GLES20.GL_VERTEX_SHADER, vertexCode, program); - addShader(GLES20.GL_FRAGMENT_SHADER, fragmentCode, program); + /** + * Returns a new {@link EGLSurface} wrapping the specified {@code surface}. + * + *

      The {@link EGLSurface} will configure with {@link #EGL_CONFIG_ATTRIBUTES_RGBA_8888} and + * OpenGL ES 2.0. + * + * @param eglDisplay The {@link EGLDisplay} to attach the surface to. + * @param surface The surface to wrap; must be a surface, surface texture or surface holder. + */ + @RequiresApi(17) + public static EGLSurface getEglSurface(EGLDisplay eglDisplay, Object surface) throws GlException { + return Api17.getEglSurface( + eglDisplay, surface, EGL_CONFIG_ATTRIBUTES_RGBA_8888, EGL_WINDOW_SURFACE_ATTRIBUTES_NONE); + } - // Link and check for errors. - GLES20.glLinkProgram(program); - int[] linkStatus = new int[] {GLES20.GL_FALSE}; - GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0); - if (linkStatus[0] != GLES20.GL_TRUE) { - throwGlError("Unable to link shader program: \n" + GLES20.glGetProgramInfoLog(program)); - } - checkGlError(); + /** + * Returns a new {@link EGLSurface} wrapping the specified {@code surface}. + * + * @param eglDisplay The {@link EGLDisplay} to attach the surface to. + * @param surface The surface to wrap; must be a surface, surface texture or surface holder. + * @param configAttributes The attributes to configure EGL with. Accepts {@link + * #EGL_CONFIG_ATTRIBUTES_RGBA_1010102} and {@link #EGL_CONFIG_ATTRIBUTES_RGBA_8888}. + */ + @RequiresApi(17) + public static EGLSurface getEglSurface( + EGLDisplay eglDisplay, Object surface, int[] configAttributes) throws GlException { + return Api17.getEglSurface( + eglDisplay, surface, configAttributes, EGL_WINDOW_SURFACE_ATTRIBUTES_NONE); + } - return program; + /** + * Creates a new {@link EGLSurface} wrapping a pixel buffer. + * + * @param eglDisplay The {@link EGLDisplay} to attach the surface to. + * @param width The width of the pixel buffer. + * @param height The height of the pixel buffer. + * @param configAttributes EGL configuration attributes. Valid arguments include {@link + * #EGL_CONFIG_ATTRIBUTES_RGBA_8888} and {@link #EGL_CONFIG_ATTRIBUTES_RGBA_1010102}. + */ + @RequiresApi(17) + private static EGLSurface createPbufferSurface( + EGLDisplay eglDisplay, int width, int height, int[] configAttributes) throws GlException { + int[] pbufferAttributes = + new int[] { + EGL14.EGL_WIDTH, width, + EGL14.EGL_HEIGHT, height, + EGL14.EGL_NONE + }; + return Api17.createEglPbufferSurface(eglDisplay, configAttributes, pbufferAttributes); } - /** Returns the {@link Attribute}s in the specified {@code program}. */ - public static Attribute[] getAttributes(int program) { - int[] attributeCount = new int[1]; - GLES20.glGetProgramiv(program, GLES20.GL_ACTIVE_ATTRIBUTES, attributeCount, 0); - if (attributeCount[0] != 2) { - throw new IllegalStateException("expected two attributes"); + /** + * Creates and focuses a placeholder {@link EGLSurface}. + * + *

      This makes a {@link EGLContext} current when reading and writing to a surface is not + * required, configured with {@link #EGL_CONFIG_ATTRIBUTES_RGBA_8888}. + * + * @param eglContext The {@link EGLContext} to make current. + * @param eglDisplay The {@link EGLDisplay} to attach the surface to. + * @return {@link EGL14#EGL_NO_SURFACE} if supported and a 1x1 pixel buffer surface otherwise. + */ + @RequiresApi(17) + public static EGLSurface focusPlaceholderEglSurface(EGLContext eglContext, EGLDisplay eglDisplay) + throws GlException { + return createFocusedPlaceholderEglSurface( + eglContext, eglDisplay, EGL_CONFIG_ATTRIBUTES_RGBA_8888); + } + + /** + * Creates and focuses a placeholder {@link EGLSurface}. + * + *

      This makes a {@link EGLContext} current when reading and writing to a surface is not + * required. + * + * @param eglContext The {@link EGLContext} to make current. + * @param eglDisplay The {@link EGLDisplay} to attach the surface to. + * @param configAttributes The attributes to configure EGL with. Accepts {@link + * #EGL_CONFIG_ATTRIBUTES_RGBA_1010102} and {@link #EGL_CONFIG_ATTRIBUTES_RGBA_8888}. + * @return A placeholder {@link EGLSurface} that has been focused to allow rendering to take + * place, or {@link EGL14#EGL_NO_SURFACE} if the current context supports rendering without a + * surface. + */ + @RequiresApi(17) + public static EGLSurface createFocusedPlaceholderEglSurface( + EGLContext eglContext, EGLDisplay eglDisplay, int[] configAttributes) throws GlException { + EGLSurface eglSurface = + isSurfacelessContextExtensionSupported() + ? EGL14.EGL_NO_SURFACE + : createPbufferSurface(eglDisplay, /* width= */ 1, /* height= */ 1, configAttributes); + + focusEglSurface(eglDisplay, eglContext, eglSurface, /* width= */ 1, /* height= */ 1); + return eglSurface; + } + + /** + * Collects all OpenGL errors that occurred since this method was last called and throws a {@link + * GlException} with the combined error message. + */ + public static void checkGlError() throws GlException { + StringBuilder errorMessageBuilder = new StringBuilder(); + boolean foundError = false; + int error; + while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) { + if (foundError) { + errorMessageBuilder.append('\n'); + } + errorMessageBuilder.append("glError: ").append(gluErrorString(error)); + foundError = true; + } + if (foundError) { + throw new GlException(errorMessageBuilder.toString()); } + } - Attribute[] attributes = new Attribute[attributeCount[0]]; - for (int i = 0; i < attributeCount[0]; i++) { - attributes[i] = new Attribute(program, i); + /** + * Asserts the texture size is valid. + * + * @param width The width for a texture. + * @param height The height for a texture. + * @throws GlException If the texture width or height is invalid. + */ + private static void assertValidTextureSize(int width, int height) throws GlException { + // TODO(b/201293185): Consider handling adjustments for sizes > GL_MAX_TEXTURE_SIZE + // (ex. downscaling appropriately) in a texture processor instead of asserting incorrect + // values. + // For valid GL sizes, see: + // https://www.khronos.org/registry/OpenGL-Refpages/es2.0/xhtml/glTexImage2D.xml + int[] maxTextureSizeBuffer = new int[1]; + GLES20.glGetIntegerv(GLES20.GL_MAX_TEXTURE_SIZE, maxTextureSizeBuffer, 0); + int maxTextureSize = maxTextureSizeBuffer[0]; + checkState( + maxTextureSize > 0, + "Create a OpenGL context first or run the GL methods on an OpenGL thread."); + + if (width < 0 || height < 0) { + throw new GlException("width or height is less than 0"); + } + if (width > maxTextureSize || height > maxTextureSize) { + throw new GlException( + "width or height is greater than GL_MAX_TEXTURE_SIZE " + maxTextureSize); } - return attributes; } - /** Returns the {@link Uniform}s in the specified {@code program}. */ - public static Uniform[] getUniforms(int program) { - int[] uniformCount = new int[1]; - GLES20.glGetProgramiv(program, GLES20.GL_ACTIVE_UNIFORMS, uniformCount, 0); + /** Fills the pixels in the current output render target with (r=0, g=0, b=0, a=0). */ + public static void clearOutputFrame() throws GlException { + GLES20.glClearColor(/* red= */ 0, /* green= */ 0, /* blue= */ 0, /* alpha= */ 0); + GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); + GlUtil.checkGlError(); + } - Uniform[] uniforms = new Uniform[uniformCount[0]]; - for (int i = 0; i < uniformCount[0]; i++) { - uniforms[i] = new Uniform(program, i); - } + /** + * Makes the specified {@code eglSurface} the render target, using a viewport of {@code width} by + * {@code height} pixels. + */ + @RequiresApi(17) + public static void focusEglSurface( + EGLDisplay eglDisplay, EGLContext eglContext, EGLSurface eglSurface, int width, int height) + throws GlException { + Api17.focusRenderTarget( + eglDisplay, eglContext, eglSurface, /* framebuffer= */ 0, width, height); + } - return uniforms; + /** + * Makes the specified {@code framebuffer} the render target, using a viewport of {@code width} by + * {@code height} pixels. + */ + @RequiresApi(17) + public static void focusFramebuffer( + EGLDisplay eglDisplay, + EGLContext eglContext, + EGLSurface eglSurface, + int framebuffer, + int width, + int height) + throws GlException { + Api17.focusRenderTarget(eglDisplay, eglContext, eglSurface, framebuffer, width, height); + } + + /** + * Makes the specified {@code framebuffer} the render target, using a viewport of {@code width} by + * {@code height} pixels. + * + *

      The caller must ensure that there is a current OpenGL context before calling this method. + * + * @param framebuffer The identifier of the framebuffer object to bind as the output render + * target. + * @param width The viewport width, in pixels. + * @param height The viewport height, in pixels. + */ + @RequiresApi(17) + public static void focusFramebufferUsingCurrentContext(int framebuffer, int width, int height) + throws GlException { + Api17.focusFramebufferUsingCurrentContext(framebuffer, width, height); + } + + /** + * Deletes a GL texture. + * + * @param textureId The ID of the texture to delete. + */ + public static void deleteTexture(int textureId) throws GlException { + GLES20.glDeleteTextures(/* n= */ 1, new int[] {textureId}, /* offset= */ 0); + checkGlError(); + } + + /** + * Destroys the {@link EGLContext} identified by the provided {@link EGLDisplay} and {@link + * EGLContext}. + */ + @RequiresApi(17) + public static void destroyEglContext( + @Nullable EGLDisplay eglDisplay, @Nullable EGLContext eglContext) throws GlException { + Api17.destroyEglContext(eglDisplay, eglContext); } /** @@ -344,7 +483,7 @@ public static FloatBuffer createBuffer(float[] data) { * * @param capacity The new buffer's capacity, in floats. */ - public static FloatBuffer createBuffer(int capacity) { + private static FloatBuffer createBuffer(int capacity) { ByteBuffer byteBuffer = ByteBuffer.allocateDirect(capacity * C.BYTES_PER_FLOAT); return byteBuffer.order(ByteOrder.nativeOrder()).asFloatBuffer(); } @@ -353,52 +492,268 @@ public static FloatBuffer createBuffer(int capacity) { * Creates a GL_TEXTURE_EXTERNAL_OES with default configuration of GL_LINEAR filtering and * GL_CLAMP_TO_EDGE wrapping. */ - public static int createExternalTexture() { + public static int createExternalTexture() throws GlException { + int texId = generateTexture(); + bindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texId); + return texId; + } + + /** + * Allocates a new RGBA texture with the specified dimensions and color component precision. + * + * @param width The width of the new texture in pixels. + * @param height The height of the new texture in pixels. + * @param useHighPrecisionColorComponents If {@code false}, uses 8-bit unsigned bytes. If {@code + * true}, use 16-bit (half-precision) floating-point. + * @throws GlException If the texture allocation fails. + * @return The texture identifier for the newly-allocated texture. + */ + public static int createTexture(int width, int height, boolean useHighPrecisionColorComponents) + throws GlException { + // TODO(227624622): Implement a pixel test that confirms 16f has less posterization. + if (useHighPrecisionColorComponents) { + checkState(Util.SDK_INT >= 18, "GLES30 extensions are not supported below API 18."); + return createTexture(width, height, GLES30.GL_RGBA16F, GLES30.GL_HALF_FLOAT); + } + return createTexture(width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE); + } + + /** + * Allocates a new RGBA texture with the specified dimensions and color component precision. + * + * @param width The width of the new texture in pixels. + * @param height The height of the new texture in pixels. + * @param internalFormat The number of color components in the texture, as well as their format. + * @param type The data type of the pixel data. + * @throws GlException If the texture allocation fails. + * @return The texture identifier for the newly-allocated texture. + */ + private static int createTexture(int width, int height, int internalFormat, int type) + throws GlException { + assertValidTextureSize(width, height); + int texId = generateTexture(); + bindTexture(GLES20.GL_TEXTURE_2D, texId); + ByteBuffer byteBuffer = ByteBuffer.allocateDirect(width * height * 4); + GLES20.glTexImage2D( + GLES20.GL_TEXTURE_2D, + /* level= */ 0, + internalFormat, + width, + height, + /* border= */ 0, + GLES20.GL_RGBA, + type, + byteBuffer); + checkGlError(); + return texId; + } + + /** Returns a new GL texture identifier. */ + private static int generateTexture() throws GlException { + checkGlException( + !Util.areEqual(EGL14.eglGetCurrentContext(), EGL14.EGL_NO_CONTEXT), "No current context"); + int[] texId = new int[1]; - GLES20.glGenTextures(1, IntBuffer.wrap(texId)); - GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texId[0]); - GLES20.glTexParameteri( - GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); - GLES20.glTexParameteri( - GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); - GLES20.glTexParameteri( - GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); - GLES20.glTexParameteri( - GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); + GLES20.glGenTextures(/* n= */ 1, texId, /* offset= */ 0); checkGlError(); return texId[0]; } - private static void addShader(int type, String source, int program) { - int shader = GLES20.glCreateShader(type); - GLES20.glShaderSource(shader, source); - GLES20.glCompileShader(shader); + /** + * Binds the texture of the given type with default configuration of GL_LINEAR filtering and + * GL_CLAMP_TO_EDGE wrapping. + * + * @param textureTarget The target to which the texture is bound, e.g. {@link + * GLES20#GL_TEXTURE_2D} for a two-dimensional texture or {@link + * GLES11Ext#GL_TEXTURE_EXTERNAL_OES} for an external texture. + * @param texId The texture identifier. + */ + public static void bindTexture(int textureTarget, int texId) throws GlException { + GLES20.glBindTexture(textureTarget, texId); + checkGlError(); + GLES20.glTexParameteri(textureTarget, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); + checkGlError(); + GLES20.glTexParameteri(textureTarget, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); + checkGlError(); + GLES20.glTexParameteri(textureTarget, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); + checkGlError(); + GLES20.glTexParameteri(textureTarget, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); + checkGlError(); + } - int[] result = new int[] {GLES20.GL_FALSE}; - GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, result, 0); - if (result[0] != GLES20.GL_TRUE) { - throwGlError(GLES20.glGetShaderInfoLog(shader) + ", source: " + source); - } + /** + * Returns a new framebuffer for the texture. + * + * @param texId The identifier of the texture to attach to the framebuffer. + */ + public static int createFboForTexture(int texId) throws GlException { + checkGlException( + !Util.areEqual(EGL14.eglGetCurrentContext(), EGL14.EGL_NO_CONTEXT), "No current context"); - GLES20.glAttachShader(program, shader); - GLES20.glDeleteShader(shader); + int[] fboId = new int[1]; + GLES20.glGenFramebuffers(/* n= */ 1, fboId, /* offset= */ 0); checkGlError(); + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fboId[0]); + checkGlError(); + GLES20.glFramebufferTexture2D( + GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, texId, 0); + checkGlError(); + return fboId[0]; } - private static void throwGlError(String errorMsg) { - Log.e(TAG, errorMsg); - if (ExoPlayerLibraryInfo.GL_ASSERTIONS_ENABLED) { - throw new RuntimeException(errorMsg); + /** + * Throws a {@link GlException} with the given message if {@code expression} evaluates to {@code + * false}. + */ + public static void checkGlException(boolean expression, String errorMessage) throws GlException { + if (!expression) { + throw new GlException(errorMessage); } } - /** Returns the length of the null-terminated string in {@code strVal}. */ - private static int strlen(byte[] strVal) { - for (int i = 0; i < strVal.length; ++i) { - if (strVal[i] == '\0') { - return i; + private static void checkEglException(String errorMessage) throws GlException { + int error = EGL14.eglGetError(); + checkGlException(error == EGL14.EGL_SUCCESS, errorMessage + ", error code: " + error); + } + + @RequiresApi(17) + private static final class Api17 { + private Api17() {} + + @DoNotInline + public static EGLDisplay createEglDisplay() throws GlException { + EGLDisplay eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY); + checkGlException(!eglDisplay.equals(EGL14.EGL_NO_DISPLAY), "No EGL display."); + checkGlException( + EGL14.eglInitialize( + eglDisplay, + /* unusedMajor */ new int[1], + /* majorOffset= */ 0, + /* unusedMinor */ new int[1], + /* minorOffset= */ 0), + "Error in eglInitialize."); + checkGlError(); + return eglDisplay; + } + + @DoNotInline + public static EGLContext createEglContext( + EGLDisplay eglDisplay, int version, int[] configAttributes) throws GlException { + int[] contextAttributes = {EGL14.EGL_CONTEXT_CLIENT_VERSION, version, EGL14.EGL_NONE}; + EGLContext eglContext = + EGL14.eglCreateContext( + eglDisplay, + getEglConfig(eglDisplay, configAttributes), + EGL14.EGL_NO_CONTEXT, + contextAttributes, + /* offset= */ 0); + if (eglContext == null) { + EGL14.eglTerminate(eglDisplay); + throw new GlException( + "eglCreateContext() failed to create a valid context. The device may not support EGL" + + " version " + + version); + } + checkGlError(); + return eglContext; + } + + @DoNotInline + public static EGLSurface getEglSurface( + EGLDisplay eglDisplay, + Object surface, + int[] configAttributes, + int[] windowSurfaceAttributes) + throws GlException { + EGLSurface eglSurface = + EGL14.eglCreateWindowSurface( + eglDisplay, + getEglConfig(eglDisplay, configAttributes), + surface, + windowSurfaceAttributes, + /* offset= */ 0); + checkEglException("Error creating surface"); + return eglSurface; + } + + @DoNotInline + public static EGLSurface createEglPbufferSurface( + EGLDisplay eglDisplay, int[] configAttributes, int[] pbufferAttributes) throws GlException { + EGLSurface eglSurface = + EGL14.eglCreatePbufferSurface( + eglDisplay, + getEglConfig(eglDisplay, configAttributes), + pbufferAttributes, + /* offset= */ 0); + checkEglException("Error creating surface"); + return eglSurface; + } + + @DoNotInline + public static void focusRenderTarget( + EGLDisplay eglDisplay, + EGLContext eglContext, + EGLSurface eglSurface, + int framebuffer, + int width, + int height) + throws GlException { + EGL14.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext); + checkEglException("Error making context current"); + focusFramebufferUsingCurrentContext(framebuffer, width, height); + } + + @DoNotInline + public static void focusFramebufferUsingCurrentContext(int framebuffer, int width, int height) + throws GlException { + checkGlException( + !Util.areEqual(EGL14.eglGetCurrentContext(), EGL14.EGL_NO_CONTEXT), "No current context"); + + int[] boundFramebuffer = new int[1]; + GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING, boundFramebuffer, /* offset= */ 0); + if (boundFramebuffer[0] != framebuffer) { + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, framebuffer); + } + checkGlError(); + GLES20.glViewport(/* x= */ 0, /* y= */ 0, width, height); + checkGlError(); + } + + @DoNotInline + public static void destroyEglContext( + @Nullable EGLDisplay eglDisplay, @Nullable EGLContext eglContext) throws GlException { + if (eglDisplay == null) { + return; + } + EGL14.eglMakeCurrent( + eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT); + checkEglException("Error releasing context"); + if (eglContext != null) { + EGL14.eglDestroyContext(eglDisplay, eglContext); + checkEglException("Error destroying context"); + } + EGL14.eglReleaseThread(); + checkEglException("Error releasing thread"); + EGL14.eglTerminate(eglDisplay); + checkEglException("Error terminating display"); + } + + @DoNotInline + private static EGLConfig getEglConfig(EGLDisplay eglDisplay, int[] attributes) + throws GlException { + EGLConfig[] eglConfigs = new EGLConfig[1]; + if (!EGL14.eglChooseConfig( + eglDisplay, + attributes, + /* attrib_listOffset= */ 0, + eglConfigs, + /* configsOffset= */ 0, + /* config_size= */ 1, + /* unusedNumConfig */ new int[1], + /* num_configOffset= */ 0)) { + throw new GlException("eglChooseConfig failed."); } + return eglConfigs[0]; } - return strVal.length; } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/HandlerWrapper.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/HandlerWrapper.java index 5b85b26c3f..8247447d93 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/HandlerWrapper.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/HandlerWrapper.java @@ -17,7 +17,6 @@ import android.os.Handler; import android.os.Looper; -import android.os.Message; import androidx.annotation.Nullable; /** @@ -26,36 +25,58 @@ */ public interface HandlerWrapper { - /** @see Handler#getLooper() */ + /** A message obtained from the handler. */ + interface Message { + + /** See {@link android.os.Message#sendToTarget()}. */ + void sendToTarget(); + + /** See {@link android.os.Message#getTarget()}. */ + HandlerWrapper getTarget(); + } + + /** See {@link Handler#getLooper()}. */ Looper getLooper(); - /** @see Handler#obtainMessage(int) */ + /** See {@link Handler#hasMessages(int)}. */ + boolean hasMessages(int what); + + /** See {@link Handler#obtainMessage(int)}. */ Message obtainMessage(int what); - /** @see Handler#obtainMessage(int, Object) */ + /** See {@link Handler#obtainMessage(int, Object)}. */ Message obtainMessage(int what, @Nullable Object obj); - /** @see Handler#obtainMessage(int, int, int) */ + /** See {@link Handler#obtainMessage(int, int, int)}. */ Message obtainMessage(int what, int arg1, int arg2); - /** @see Handler#obtainMessage(int, int, int, Object) */ + /** See {@link Handler#obtainMessage(int, int, int, Object)}. */ Message obtainMessage(int what, int arg1, int arg2, @Nullable Object obj); - /** @see Handler#sendEmptyMessage(int) */ + /** See {@link Handler#sendMessageAtFrontOfQueue(android.os.Message)}. */ + boolean sendMessageAtFrontOfQueue(Message message); + + /** See {@link Handler#sendEmptyMessage(int)}. */ boolean sendEmptyMessage(int what); - /** @see Handler#sendEmptyMessageAtTime(int, long) */ + /** See {@link Handler#sendEmptyMessageDelayed(int, long)}. */ + boolean sendEmptyMessageDelayed(int what, int delayMs); + + /** See {@link Handler#sendEmptyMessageAtTime(int, long)}. */ boolean sendEmptyMessageAtTime(int what, long uptimeMs); - /** @see Handler#removeMessages(int) */ + /** See {@link Handler#removeMessages(int)}. */ void removeMessages(int what); - /** @see Handler#removeCallbacksAndMessages(Object) */ + /** See {@link Handler#removeCallbacksAndMessages(Object)}. */ void removeCallbacksAndMessages(@Nullable Object token); - /** @see Handler#post(Runnable) */ + /** See {@link Handler#post(Runnable)}. */ boolean post(Runnable runnable); - /** @see Handler#postDelayed(Runnable, long) */ + /** See {@link Handler#postDelayed(Runnable, long)}. */ boolean postDelayed(Runnable runnable, long delayMs); + + /** See {@link android.os.Handler#postAtFrontOfQueue(Runnable)}. */ + boolean postAtFrontOfQueue(Runnable runnable); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/LibraryLoader.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/LibraryLoader.java index 7ee88d8f0f..e8c6ce3576 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/LibraryLoader.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/LibraryLoader.java @@ -17,10 +17,8 @@ import java.util.Arrays; -/** - * Configurable loader for native libraries. - */ -public final class LibraryLoader { +/** Configurable loader for native libraries. */ +public abstract class LibraryLoader { private static final String TAG = "LibraryLoader"; @@ -36,17 +34,15 @@ public LibraryLoader(String... libraries) { } /** - * Overrides the names of the libraries to load. Must be called before any call to - * {@link #isAvailable()}. + * Overrides the names of the libraries to load. Must be called before any call to {@link + * #isAvailable()}. */ public synchronized void setLibraries(String... libraries) { Assertions.checkState(!loadAttempted, "Cannot set libraries after loading"); nativeLibraries = libraries; } - /** - * Returns whether the underlying libraries are available, loading them if necessary. - */ + /** Returns whether the underlying libraries are available, loading them if necessary. */ public synchronized boolean isAvailable() { if (loadAttempted) { return isAvailable; @@ -54,7 +50,7 @@ public synchronized boolean isAvailable() { loadAttempted = true; try { for (String lib : nativeLibraries) { - System.loadLibrary(lib); + loadLibrary(lib); } isAvailable = true; } catch (UnsatisfiedLinkError exception) { @@ -65,4 +61,16 @@ public synchronized boolean isAvailable() { return isAvailable; } + /** + * Should be implemented to call {@code System.loadLibrary(name)}. + * + *

      It's necessary for each subclass to implement this method because {@link + * System#loadLibrary(String)} uses reflection to obtain the calling class, which is then used to + * obtain the class loader to use when loading the native library. If this class were to implement + * the method directly, and if a subclass were to have a different class loader, then loading of + * the native library would fail. + * + * @param name The name of the library to load. + */ + protected abstract void loadLibrary(String name); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/ListenerSet.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/ListenerSet.java new file mode 100644 index 0000000000..e1a6ec6504 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/ListenerSet.java @@ -0,0 +1,361 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.util; + +import static com.google.android.exoplayer2.util.Assertions.checkState; + +import android.os.Looper; +import android.os.Message; +import androidx.annotation.CheckResult; +import androidx.annotation.GuardedBy; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import java.util.ArrayDeque; +import java.util.concurrent.CopyOnWriteArraySet; +import org.checkerframework.checker.nullness.qual.NonNull; + +/** + * A set of listeners. + * + *

      Events are guaranteed to arrive in the order in which they happened even if a new event is + * triggered recursively from another listener. + * + *

      Events are also guaranteed to be only sent to the listeners registered at the time the event + * was enqueued and haven't been removed since. + * + *

      All methods must be called on the {@link Looper} passed to the constructor unless indicated + * otherwise. + * + * @param The listener type. + */ +public final class ListenerSet { + + /** + * An event sent to a listener. + * + * @param The listener type. + */ + public interface Event { + + /** Invokes the event notification on the given listener. */ + void invoke(T listener); + } + + /** + * An event sent to a listener when all other events sent during one {@link Looper} message queue + * iteration were handled by the listener. + * + * @param The listener type. + */ + public interface IterationFinishedEvent { + + /** + * Invokes the iteration finished event. + * + * @param listener The listener to invoke the event on. + * @param eventFlags The combined event {@link FlagSet flags} of all events sent in this + * iteration. + */ + void invoke(T listener, FlagSet eventFlags); + } + + private static final int MSG_ITERATION_FINISHED = 0; + + private final Clock clock; + private final HandlerWrapper handler; + private final IterationFinishedEvent iterationFinishedEvent; + private final CopyOnWriteArraySet> listeners; + private final ArrayDeque flushingEvents; + private final ArrayDeque queuedEvents; + private final Object releasedLock; + + @GuardedBy("releasedLock") + private boolean released; + + private boolean throwsWhenUsingWrongThread; + + /** + * Creates a new listener set. + * + * @param looper A {@link Looper} used to call listeners on. The same {@link Looper} must be used + * to call all other methods of this class unless indicated otherwise. + * @param clock A {@link Clock}. + * @param iterationFinishedEvent An {@link IterationFinishedEvent} sent when all other events sent + * during one {@link Looper} message queue iteration were handled by the listeners. + */ + public ListenerSet(Looper looper, Clock clock, IterationFinishedEvent iterationFinishedEvent) { + this(/* listeners= */ new CopyOnWriteArraySet<>(), looper, clock, iterationFinishedEvent); + } + + private ListenerSet( + CopyOnWriteArraySet> listeners, + Looper looper, + Clock clock, + IterationFinishedEvent iterationFinishedEvent) { + this.clock = clock; + this.listeners = listeners; + this.iterationFinishedEvent = iterationFinishedEvent; + releasedLock = new Object(); + flushingEvents = new ArrayDeque<>(); + queuedEvents = new ArrayDeque<>(); + // It's safe to use "this" because we don't send a message before exiting the constructor. + @SuppressWarnings("nullness:methodref.receiver.bound") + HandlerWrapper handler = clock.createHandler(looper, this::handleMessage); + this.handler = handler; + throwsWhenUsingWrongThread = true; + } + + /** + * Copies the listener set. + * + *

      This method can be called from any thread. + * + * @param looper The new {@link Looper} for the copied listener set. + * @param iterationFinishedEvent The new {@link IterationFinishedEvent} sent when all other events + * sent during one {@link Looper} message queue iteration were handled by the listeners. + * @return The copied listener set. + */ + @CheckResult + public ListenerSet copy(Looper looper, IterationFinishedEvent iterationFinishedEvent) { + return copy(looper, clock, iterationFinishedEvent); + } + + /** + * Copies the listener set. + * + *

      This method can be called from any thread. + * + * @param looper The new {@link Looper} for the copied listener set. + * @param clock The new {@link Clock} for the copied listener set. + * @param iterationFinishedEvent The new {@link IterationFinishedEvent} sent when all other events + * sent during one {@link Looper} message queue iteration were handled by the listeners. + * @return The copied listener set. + */ + @CheckResult + public ListenerSet copy( + Looper looper, Clock clock, IterationFinishedEvent iterationFinishedEvent) { + return new ListenerSet<>(listeners, looper, clock, iterationFinishedEvent); + } + + /** + * Adds a listener to the set. + * + *

      If a listener is already present, it will not be added again. + * + *

      This method can be called from any thread. + * + * @param listener The listener to be added. + */ + public void add(T listener) { + Assertions.checkNotNull(listener); + synchronized (releasedLock) { + if (released) { + return; + } + listeners.add(new ListenerHolder<>(listener)); + } + } + + /** + * Removes a listener from the set. + * + *

      If the listener is not present, nothing happens. + * + * @param listener The listener to be removed. + */ + public void remove(T listener) { + verifyCurrentThread(); + for (ListenerHolder listenerHolder : listeners) { + if (listenerHolder.listener.equals(listener)) { + listenerHolder.release(iterationFinishedEvent); + listeners.remove(listenerHolder); + } + } + } + + /** Removes all listeners from the set. */ + public void clear() { + verifyCurrentThread(); + listeners.clear(); + } + + /** Returns the number of added listeners. */ + public int size() { + verifyCurrentThread(); + return listeners.size(); + } + + /** + * Adds an event that is sent to the listeners when {@link #flushEvents} is called. + * + * @param eventFlag An integer indicating the type of the event, or {@link C#INDEX_UNSET} to + * report this event without flag. + * @param event The event. + */ + public void queueEvent(int eventFlag, Event event) { + verifyCurrentThread(); + CopyOnWriteArraySet> listenerSnapshot = new CopyOnWriteArraySet<>(listeners); + queuedEvents.add( + () -> { + for (ListenerHolder holder : listenerSnapshot) { + holder.invoke(eventFlag, event); + } + }); + } + + /** Notifies listeners of events previously enqueued with {@link #queueEvent(int, Event)}. */ + public void flushEvents() { + verifyCurrentThread(); + if (queuedEvents.isEmpty()) { + return; + } + if (!handler.hasMessages(MSG_ITERATION_FINISHED)) { + handler.sendMessageAtFrontOfQueue(handler.obtainMessage(MSG_ITERATION_FINISHED)); + } + boolean recursiveFlushInProgress = !flushingEvents.isEmpty(); + flushingEvents.addAll(queuedEvents); + queuedEvents.clear(); + if (recursiveFlushInProgress) { + // Recursive call to flush. Let the outer call handle the flush queue. + return; + } + while (!flushingEvents.isEmpty()) { + flushingEvents.peekFirst().run(); + flushingEvents.removeFirst(); + } + } + + /** + * {@link #queueEvent(int, Event) Queues} a single event and immediately {@link #flushEvents() + * flushes} the event queue to notify all listeners. + * + * @param eventFlag An integer flag indicating the type of the event, or {@link C#INDEX_UNSET} to + * report this event without flag. + * @param event The event. + */ + public void sendEvent(int eventFlag, Event event) { + queueEvent(eventFlag, event); + flushEvents(); + } + + /** + * Releases the set of listeners immediately. + * + *

      This will ensure no events are sent to any listener after this method has been called. + */ + public void release() { + verifyCurrentThread(); + synchronized (releasedLock) { + released = true; + } + for (ListenerHolder listenerHolder : listeners) { + listenerHolder.release(iterationFinishedEvent); + } + listeners.clear(); + } + + /** + * Sets whether methods throw when using the wrong thread. + * + *

      Do not use this method unless to support legacy use cases. + * + * @param throwsWhenUsingWrongThread Whether to throw when using the wrong thread. + * @deprecated Do not use this method and ensure all calls are made from the correct thread. + */ + @Deprecated + public void setThrowsWhenUsingWrongThread(boolean throwsWhenUsingWrongThread) { + this.throwsWhenUsingWrongThread = throwsWhenUsingWrongThread; + } + + private boolean handleMessage(Message message) { + for (ListenerHolder holder : listeners) { + holder.iterationFinished(iterationFinishedEvent); + if (handler.hasMessages(MSG_ITERATION_FINISHED)) { + // The invocation above triggered new events (and thus scheduled a new message). We need + // to stop here because this new message will take care of informing every listener about + // the new update (including the ones already called here). + break; + } + } + return true; + } + + private void verifyCurrentThread() { + if (!throwsWhenUsingWrongThread) { + return; + } + checkState(Thread.currentThread() == handler.getLooper().getThread()); + } + + private static final class ListenerHolder { + + public final T listener; + + private FlagSet.Builder flagsBuilder; + private boolean needsIterationFinishedEvent; + private boolean released; + + public ListenerHolder(T listener) { + this.listener = listener; + this.flagsBuilder = new FlagSet.Builder(); + } + + public void release(IterationFinishedEvent event) { + released = true; + if (needsIterationFinishedEvent) { + needsIterationFinishedEvent = false; + event.invoke(listener, flagsBuilder.build()); + } + } + + public void invoke(int eventFlag, Event event) { + if (!released) { + if (eventFlag != C.INDEX_UNSET) { + flagsBuilder.add(eventFlag); + } + needsIterationFinishedEvent = true; + event.invoke(listener); + } + } + + public void iterationFinished(IterationFinishedEvent event) { + if (!released && needsIterationFinishedEvent) { + // Reset flags before invoking the listener to ensure we keep all new flags that are set by + // recursive events triggered from this callback. + FlagSet flagsToNotify = flagsBuilder.build(); + flagsBuilder = new FlagSet.Builder(); + needsIterationFinishedEvent = false; + event.invoke(listener, flagsToNotify); + } + } + + @Override + public boolean equals(@Nullable Object other) { + if (this == other) { + return true; + } + if (other == null || getClass() != other.getClass()) { + return false; + } + return listener.equals(((ListenerHolder) other).listener); + } + + @Override + public int hashCode() { + return listener.hashCode(); + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/Log.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/Log.java index e5e6f88d4d..dbb3cf2502 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/Log.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/Log.java @@ -15,15 +15,24 @@ */ package com.google.android.exoplayer2.util; +import static java.lang.annotation.ElementType.TYPE_USE; + import android.text.TextUtils; +import androidx.annotation.GuardedBy; import androidx.annotation.IntDef; import androidx.annotation.Nullable; +import androidx.annotation.Size; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; import java.net.UnknownHostException; +import org.checkerframework.dataflow.qual.Pure; -/** Wrapper around {@link android.util.Log} which allows to set the log level. */ +/** + * Wrapper around {@link android.util.Log} which allows to set the log level and to specify a custom + * log output. + */ public final class Log { /** @@ -32,8 +41,9 @@ public final class Log { */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({LOG_LEVEL_ALL, LOG_LEVEL_INFO, LOG_LEVEL_WARNING, LOG_LEVEL_ERROR, LOG_LEVEL_OFF}) - @interface LogLevel {} + public @interface LogLevel {} /** Log level to log all messages. */ public static final int LOG_LEVEL_ALL = 0; /** Log level to only log informative, warning and error messages. */ @@ -45,19 +55,89 @@ public final class Log { /** Log level to disable all logging. */ public static final int LOG_LEVEL_OFF = Integer.MAX_VALUE; + /** + * Interface for a logger that can output messages with a tag. + * + *

      Use {@link #DEFAULT} to output to {@link android.util.Log}. + */ + public interface Logger { + + /** The default instance logging to {@link android.util.Log}. */ + Logger DEFAULT = + new Logger() { + @Override + public void d(String tag, String message) { + android.util.Log.d(tag, message); + } + + @Override + public void i(String tag, String message) { + android.util.Log.i(tag, message); + } + + @Override + public void w(String tag, String message) { + android.util.Log.w(tag, message); + } + + @Override + public void e(String tag, String message) { + android.util.Log.e(tag, message); + } + }; + + /** + * Logs a debug-level message. + * + * @param tag The tag of the message. + * @param message The message. + */ + void d(String tag, String message); + + /** + * Logs an information-level message. + * + * @param tag The tag of the message. + * @param message The message. + */ + void i(String tag, String message); + + /** + * Logs a warning-level message. + * + * @param tag The tag of the message. + * @param message The message. + */ + void w(String tag, String message); + + /** + * Logs an error-level message. + * + * @param tag The tag of the message. + * @param message The message. + */ + void e(String tag, String message); + } + + private static final Object lock = new Object(); + + @GuardedBy("lock") private static int logLevel = LOG_LEVEL_ALL; + + @GuardedBy("lock") private static boolean logStackTraces = true; + @GuardedBy("lock") + private static Logger logger = Logger.DEFAULT; + private Log() {} /** Returns current {@link LogLevel} for ExoPlayer logcat logging. */ + @Pure public static @LogLevel int getLogLevel() { - return logLevel; - } - - /** Returns whether stack traces of {@link Throwable}s will be logged to logcat. */ - public boolean getLogStackTraces() { - return logStackTraces; + synchronized (lock) { + return logLevel; + } } /** @@ -66,7 +146,9 @@ public boolean getLogStackTraces() { * @param logLevel The new {@link LogLevel}. */ public static void setLogLevel(@LogLevel int logLevel) { - Log.logLevel = logLevel; + synchronized (lock) { + Log.logLevel = logLevel; + } } /** @@ -76,54 +158,99 @@ public static void setLogLevel(@LogLevel int logLevel) { * @param logStackTraces Whether stack traces will be logged. */ public static void setLogStackTraces(boolean logStackTraces) { - Log.logStackTraces = logStackTraces; + synchronized (lock) { + Log.logStackTraces = logStackTraces; + } + } + + /** + * Sets a custom {@link Logger} as the output. + * + * @param logger The {@link Logger}. + */ + public static void setLogger(Logger logger) { + synchronized (lock) { + Log.logger = logger; + } } - /** @see android.util.Log#d(String, String) */ - public static void d(String tag, String message) { - if (logLevel == LOG_LEVEL_ALL) { - android.util.Log.d(tag, message); + /** + * @see android.util.Log#d(String, String) + */ + @Pure + public static void d(@Size(max = 23) String tag, String message) { + synchronized (lock) { + if (logLevel == LOG_LEVEL_ALL) { + logger.d(tag, message); + } } } - /** @see android.util.Log#d(String, String, Throwable) */ - public static void d(String tag, String message, @Nullable Throwable throwable) { + /** + * @see android.util.Log#d(String, String, Throwable) + */ + @Pure + public static void d(@Size(max = 23) String tag, String message, @Nullable Throwable throwable) { d(tag, appendThrowableString(message, throwable)); } - /** @see android.util.Log#i(String, String) */ - public static void i(String tag, String message) { - if (logLevel <= LOG_LEVEL_INFO) { - android.util.Log.i(tag, message); + /** + * @see android.util.Log#i(String, String) + */ + @Pure + public static void i(@Size(max = 23) String tag, String message) { + synchronized (lock) { + if (logLevel <= LOG_LEVEL_INFO) { + logger.i(tag, message); + } } } - /** @see android.util.Log#i(String, String, Throwable) */ - public static void i(String tag, String message, @Nullable Throwable throwable) { + /** + * @see android.util.Log#i(String, String, Throwable) + */ + @Pure + public static void i(@Size(max = 23) String tag, String message, @Nullable Throwable throwable) { i(tag, appendThrowableString(message, throwable)); } - /** @see android.util.Log#w(String, String) */ - public static void w(String tag, String message) { - if (logLevel <= LOG_LEVEL_WARNING) { - android.util.Log.w(tag, message); + /** + * @see android.util.Log#w(String, String) + */ + @Pure + public static void w(@Size(max = 23) String tag, String message) { + synchronized (lock) { + if (logLevel <= LOG_LEVEL_WARNING) { + logger.w(tag, message); + } } } - /** @see android.util.Log#w(String, String, Throwable) */ - public static void w(String tag, String message, @Nullable Throwable throwable) { + /** + * @see android.util.Log#w(String, String, Throwable) + */ + @Pure + public static void w(@Size(max = 23) String tag, String message, @Nullable Throwable throwable) { w(tag, appendThrowableString(message, throwable)); } - /** @see android.util.Log#e(String, String) */ - public static void e(String tag, String message) { - if (logLevel <= LOG_LEVEL_ERROR) { - android.util.Log.e(tag, message); + /** + * @see android.util.Log#e(String, String) + */ + @Pure + public static void e(@Size(max = 23) String tag, String message) { + synchronized (lock) { + if (logLevel <= LOG_LEVEL_ERROR) { + logger.e(tag, message); + } } } - /** @see android.util.Log#e(String, String, Throwable) */ - public static void e(String tag, String message, @Nullable Throwable throwable) { + /** + * @see android.util.Log#e(String, String, Throwable) + */ + @Pure + public static void e(@Size(max = 23) String tag, String message, @Nullable Throwable throwable) { e(tag, appendThrowableString(message, throwable)); } @@ -139,24 +266,29 @@ public static void e(String tag, String message, @Nullable Throwable throwable) * @return The string representation of the {@link Throwable}. */ @Nullable + @Pure public static String getThrowableString(@Nullable Throwable throwable) { - if (throwable == null) { - return null; - } else if (isCausedByUnknownHostException(throwable)) { - // UnknownHostException implies the device doesn't have network connectivity. - // UnknownHostException.getMessage() may return a string that's more verbose than desired for - // logging an expected failure mode. Conversely, android.util.Log.getStackTraceString has - // special handling to return the empty string, which can result in logging that doesn't - // indicate the failure mode at all. Hence we special case this exception to always return a - // concise but useful message. - return "UnknownHostException (no network)"; - } else if (!logStackTraces) { - return throwable.getMessage(); - } else { - return android.util.Log.getStackTraceString(throwable).trim().replace("\t", " "); + synchronized (lock) { + if (throwable == null) { + return null; + } else if (isCausedByUnknownHostException(throwable)) { + // UnknownHostException implies the device doesn't have network connectivity. + // UnknownHostException.getMessage() may return a string that's more verbose than desired + // for + // logging an expected failure mode. Conversely, android.util.Log.getStackTraceString has + // special handling to return the empty string, which can result in logging that doesn't + // indicate the failure mode at all. Hence we special case this exception to always return a + // concise but useful message. + return "UnknownHostException (no network)"; + } else if (!logStackTraces) { + return throwable.getMessage(); + } else { + return android.util.Log.getStackTraceString(throwable).trim().replace("\t", " "); + } } } + @Pure private static String appendThrowableString(String message, @Nullable Throwable throwable) { @Nullable String throwableString = getThrowableString(throwable); if (!TextUtils.isEmpty(throwableString)) { @@ -165,6 +297,7 @@ private static String appendThrowableString(String message, @Nullable Throwable return message; } + @Pure private static boolean isCausedByUnknownHostException(@Nullable Throwable throwable) { while (throwable != null) { if (throwable instanceof UnknownHostException) { diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/LongArray.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/LongArray.java index 6d9725ad3d..d83ec0c1d1 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/LongArray.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/LongArray.java @@ -17,9 +17,7 @@ import java.util.Arrays; -/** - * An append-only, auto-growing {@code long[]}. - */ +/** An append-only, auto-growing {@code long[]}. */ public final class LongArray { private static final int DEFAULT_INITIAL_CAPACITY = 32; @@ -65,9 +63,7 @@ public long get(int index) { return values[index]; } - /** - * Returns the current size of the array. - */ + /** Returns the current size of the array. */ public int size() { return size; } @@ -80,5 +76,4 @@ public int size() { public long[] toArray() { return Arrays.copyOf(values, size); } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/MediaClock.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/MediaClock.java index e9f08a35c9..baa4802372 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/MediaClock.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/MediaClock.java @@ -17,27 +17,20 @@ import com.google.android.exoplayer2.PlaybackParameters; -/** - * Tracks the progression of media time. - */ +/** Tracks the progression of media time. */ public interface MediaClock { - /** - * Returns the current media position in microseconds. - */ + /** Returns the current media position in microseconds. */ long getPositionUs(); /** - * Attempts to set the playback parameters. The media clock may override these parameters if they - * are not supported. + * Attempts to set the playback parameters. The media clock may override the speed if changing the + * playback parameters is not supported. * * @param playbackParameters The playback parameters to attempt to set. */ void setPlaybackParameters(PlaybackParameters playbackParameters); - /** - * Returns the active playback parameters. - */ + /** Returns the active playback parameters. */ PlaybackParameters getPlaybackParameters(); - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/MediaFormatUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/MediaFormatUtil.java new file mode 100644 index 0000000000..758f535dd8 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/MediaFormatUtil.java @@ -0,0 +1,331 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.util; + +import static com.google.android.exoplayer2.util.Util.SDK_INT; + +import android.annotation.SuppressLint; +import android.media.AudioFormat; +import android.media.MediaFormat; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.video.ColorInfo; +import java.nio.ByteBuffer; +import java.util.List; + +/** Helper class containing utility methods for managing {@link MediaFormat} instances. */ +public final class MediaFormatUtil { + + /** + * Custom {@link MediaFormat} key associated with a float representing the ratio between a pixel's + * width and height. + */ + // The constant value must not be changed, because it's also set by the framework MediaParser API. + public static final String KEY_PIXEL_WIDTH_HEIGHT_RATIO_FLOAT = + "exo-pixel-width-height-ratio-float"; + + /** + * Custom {@link MediaFormat} key associated with an integer representing the PCM encoding. + * + *

      Equivalent to {@link MediaFormat#KEY_PCM_ENCODING}, except it allows additional values + * defined by {@link C.PcmEncoding}, including {@link C#ENCODING_PCM_16BIT_BIG_ENDIAN}, {@link + * C#ENCODING_PCM_24BIT}, and {@link C#ENCODING_PCM_32BIT}. + */ + // The constant value must not be changed, because it's also set by the framework MediaParser API. + public static final String KEY_PCM_ENCODING_EXTENDED = "exo-pcm-encoding-int"; + + /** + * The {@link MediaFormat} key for the maximum bitrate in bits per second. + * + *

      The associated value is an integer. + * + *

      The key string constant is the same as {@code MediaFormat#KEY_MAX_BITRATE}. Values for it + * are already returned by the framework MediaExtractor; the key is a hidden field in {@code + * MediaFormat} though, which is why it's being replicated here. + */ + // The constant value must not be changed, because it's also set by the framework MediaParser and + // MediaExtractor APIs. + public static final String KEY_MAX_BIT_RATE = "max-bitrate"; + + private static final int MAX_POWER_OF_TWO_INT = 1 << 30; + + /** + * Returns a {@link MediaFormat} representing the given ExoPlayer {@link Format}. + * + *

      May include the following custom keys: + * + *

        + *
      • {@link #KEY_PIXEL_WIDTH_HEIGHT_RATIO_FLOAT}. + *
      • {@link #KEY_PCM_ENCODING_EXTENDED}. + *
      + */ + @SuppressLint("InlinedApi") // Inlined MediaFormat keys. + public static MediaFormat createMediaFormatFromFormat(Format format) { + MediaFormat result = new MediaFormat(); + maybeSetInteger(result, MediaFormat.KEY_BIT_RATE, format.bitrate); + maybeSetInteger(result, KEY_MAX_BIT_RATE, format.peakBitrate); + maybeSetInteger(result, MediaFormat.KEY_CHANNEL_COUNT, format.channelCount); + + maybeSetColorInfo(result, format.colorInfo); + + maybeSetString(result, MediaFormat.KEY_MIME, format.sampleMimeType); + maybeSetString(result, MediaFormat.KEY_CODECS_STRING, format.codecs); + maybeSetFloat(result, MediaFormat.KEY_FRAME_RATE, format.frameRate); + maybeSetInteger(result, MediaFormat.KEY_WIDTH, format.width); + maybeSetInteger(result, MediaFormat.KEY_HEIGHT, format.height); + + setCsdBuffers(result, format.initializationData); + maybeSetPcmEncoding(result, format.pcmEncoding); + maybeSetString(result, MediaFormat.KEY_LANGUAGE, format.language); + maybeSetInteger(result, MediaFormat.KEY_MAX_INPUT_SIZE, format.maxInputSize); + maybeSetInteger(result, MediaFormat.KEY_SAMPLE_RATE, format.sampleRate); + maybeSetInteger(result, MediaFormat.KEY_CAPTION_SERVICE_NUMBER, format.accessibilityChannel); + result.setInteger(MediaFormat.KEY_ROTATION, format.rotationDegrees); + + int selectionFlags = format.selectionFlags; + setBooleanAsInt( + result, MediaFormat.KEY_IS_AUTOSELECT, selectionFlags & C.SELECTION_FLAG_AUTOSELECT); + setBooleanAsInt(result, MediaFormat.KEY_IS_DEFAULT, selectionFlags & C.SELECTION_FLAG_DEFAULT); + setBooleanAsInt( + result, MediaFormat.KEY_IS_FORCED_SUBTITLE, selectionFlags & C.SELECTION_FLAG_FORCED); + + result.setInteger(MediaFormat.KEY_ENCODER_DELAY, format.encoderDelay); + result.setInteger(MediaFormat.KEY_ENCODER_PADDING, format.encoderPadding); + + maybeSetPixelAspectRatio(result, format.pixelWidthHeightRatio); + return result; + } + + /** + * Sets a {@link MediaFormat} {@link String} value. Does nothing if {@code value} is null. + * + * @param format The {@link MediaFormat} being configured. + * @param key The key to set. + * @param value The value to set. + */ + public static void maybeSetString(MediaFormat format, String key, @Nullable String value) { + if (value != null) { + format.setString(key, value); + } + } + + /** + * Sets a {@link MediaFormat}'s codec specific data buffers. + * + * @param format The {@link MediaFormat} being configured. + * @param csdBuffers The csd buffers to set. + */ + public static void setCsdBuffers(MediaFormat format, List csdBuffers) { + for (int i = 0; i < csdBuffers.size(); i++) { + format.setByteBuffer("csd-" + i, ByteBuffer.wrap(csdBuffers.get(i))); + } + } + + /** + * Sets a {@link MediaFormat} integer value. Does nothing if {@code value} is {@link + * Format#NO_VALUE}. + * + * @param format The {@link MediaFormat} being configured. + * @param key The key to set. + * @param value The value to set. + */ + public static void maybeSetInteger(MediaFormat format, String key, int value) { + if (value != Format.NO_VALUE) { + format.setInteger(key, value); + } + } + + /** + * Sets a {@link MediaFormat} float value. Does nothing if {@code value} is {@link + * Format#NO_VALUE}. + * + * @param format The {@link MediaFormat} being configured. + * @param key The key to set. + * @param value The value to set. + */ + public static void maybeSetFloat(MediaFormat format, String key, float value) { + if (value != Format.NO_VALUE) { + format.setFloat(key, value); + } + } + + /** + * Sets a {@link MediaFormat} {@link ByteBuffer} value. Does nothing if {@code value} is null. + * + * @param format The {@link MediaFormat} being configured. + * @param key The key to set. + * @param value The byte array that will be wrapped to obtain the value. + */ + public static void maybeSetByteBuffer(MediaFormat format, String key, @Nullable byte[] value) { + if (value != null) { + format.setByteBuffer(key, ByteBuffer.wrap(value)); + } + } + + /** + * Sets a {@link MediaFormat}'s color information. Does nothing if {@code colorInfo} is null. + * + * @param format The {@link MediaFormat} being configured. + * @param colorInfo The color info to set. + */ + @SuppressWarnings("InlinedApi") + public static void maybeSetColorInfo(MediaFormat format, @Nullable ColorInfo colorInfo) { + if (colorInfo != null) { + maybeSetInteger(format, MediaFormat.KEY_COLOR_TRANSFER, colorInfo.colorTransfer); + maybeSetInteger(format, MediaFormat.KEY_COLOR_STANDARD, colorInfo.colorSpace); + maybeSetInteger(format, MediaFormat.KEY_COLOR_RANGE, colorInfo.colorRange); + maybeSetByteBuffer(format, MediaFormat.KEY_HDR_STATIC_INFO, colorInfo.hdrStaticInfo); + } + } + + /** + * Creates and returns a {@code ColorInfo}, if a valid instance is described in the {@link + * MediaFormat}. + */ + @Nullable + public static ColorInfo getColorInfo(MediaFormat mediaFormat) { + if (SDK_INT < 29) { + return null; + } + int colorSpace = + mediaFormat.getInteger(MediaFormat.KEY_COLOR_STANDARD, /* defaultValue= */ Format.NO_VALUE); + int colorRange = + mediaFormat.getInteger(MediaFormat.KEY_COLOR_RANGE, /* defaultValue= */ Format.NO_VALUE); + int colorTransfer = + mediaFormat.getInteger(MediaFormat.KEY_COLOR_TRANSFER, /* defaultValue= */ Format.NO_VALUE); + @Nullable + ByteBuffer hdrStaticInfoByteBuffer = mediaFormat.getByteBuffer(MediaFormat.KEY_HDR_STATIC_INFO); + @Nullable + byte[] hdrStaticInfo = + hdrStaticInfoByteBuffer != null ? getArray(hdrStaticInfoByteBuffer) : null; + // Some devices may produce invalid values from MediaFormat#getInteger. + // See b/239435670 for more information. + if (!isValidColorSpace(colorSpace)) { + colorSpace = Format.NO_VALUE; + } + if (!isValidColorRange(colorRange)) { + colorRange = Format.NO_VALUE; + } + if (!isValidColorTransfer(colorTransfer)) { + colorTransfer = Format.NO_VALUE; + } + + if (colorSpace != Format.NO_VALUE + || colorRange != Format.NO_VALUE + || colorTransfer != Format.NO_VALUE + || hdrStaticInfo != null) { + return new ColorInfo(colorSpace, colorRange, colorTransfer, hdrStaticInfo); + } + return null; + } + + public static byte[] getArray(ByteBuffer byteBuffer) { + byte[] array = new byte[byteBuffer.remaining()]; + byteBuffer.get(array); + return array; + } + + // Internal methods. + + private static void setBooleanAsInt(MediaFormat format, String key, int value) { + format.setInteger(key, value != 0 ? 1 : 0); + } + + // Inlined MediaFormat.KEY_PIXEL_ASPECT_RATIO_WIDTH and MediaFormat.KEY_PIXEL_ASPECT_RATIO_HEIGHT. + @SuppressLint("InlinedApi") + private static void maybeSetPixelAspectRatio( + MediaFormat mediaFormat, float pixelWidthHeightRatio) { + mediaFormat.setFloat(KEY_PIXEL_WIDTH_HEIGHT_RATIO_FLOAT, pixelWidthHeightRatio); + int pixelAspectRatioWidth = 1; + int pixelAspectRatioHeight = 1; + // ExoPlayer extractors output the pixel aspect ratio as a float. Do our best to recreate the + // pixel aspect ratio width and height by using a large power of two factor. + if (pixelWidthHeightRatio < 1.0f) { + pixelAspectRatioHeight = MAX_POWER_OF_TWO_INT; + pixelAspectRatioWidth = (int) (pixelWidthHeightRatio * pixelAspectRatioHeight); + } else if (pixelWidthHeightRatio > 1.0f) { + pixelAspectRatioWidth = MAX_POWER_OF_TWO_INT; + pixelAspectRatioHeight = (int) (pixelAspectRatioWidth / pixelWidthHeightRatio); + } + mediaFormat.setInteger(MediaFormat.KEY_PIXEL_ASPECT_RATIO_WIDTH, pixelAspectRatioWidth); + mediaFormat.setInteger(MediaFormat.KEY_PIXEL_ASPECT_RATIO_HEIGHT, pixelAspectRatioHeight); + } + + @SuppressLint("InlinedApi") // Inlined KEY_PCM_ENCODING. + private static void maybeSetPcmEncoding( + MediaFormat mediaFormat, @C.PcmEncoding int exoPcmEncoding) { + if (exoPcmEncoding == Format.NO_VALUE) { + return; + } + int mediaFormatPcmEncoding; + maybeSetInteger(mediaFormat, KEY_PCM_ENCODING_EXTENDED, exoPcmEncoding); + switch (exoPcmEncoding) { + case C.ENCODING_PCM_8BIT: + mediaFormatPcmEncoding = AudioFormat.ENCODING_PCM_8BIT; + break; + case C.ENCODING_PCM_16BIT: + mediaFormatPcmEncoding = AudioFormat.ENCODING_PCM_16BIT; + break; + case C.ENCODING_PCM_FLOAT: + mediaFormatPcmEncoding = AudioFormat.ENCODING_PCM_FLOAT; + break; + case C.ENCODING_PCM_24BIT: + mediaFormatPcmEncoding = AudioFormat.ENCODING_PCM_24BIT_PACKED; + break; + case C.ENCODING_PCM_32BIT: + mediaFormatPcmEncoding = AudioFormat.ENCODING_PCM_32BIT; + break; + case C.ENCODING_INVALID: + mediaFormatPcmEncoding = AudioFormat.ENCODING_INVALID; + break; + case Format.NO_VALUE: + case C.ENCODING_PCM_16BIT_BIG_ENDIAN: + default: + // No matching value. Do nothing. + return; + } + mediaFormat.setInteger(MediaFormat.KEY_PCM_ENCODING, mediaFormatPcmEncoding); + } + + /** Whether this is a valid {@link C.ColorSpace} instance. */ + private static boolean isValidColorSpace(int colorSpace) { + // LINT.IfChange(color_space) + return colorSpace == C.COLOR_SPACE_BT601 + || colorSpace == C.COLOR_SPACE_BT709 + || colorSpace == C.COLOR_SPACE_BT2020 + || colorSpace == Format.NO_VALUE; + } + + /** Whether this is a valid {@link C.ColorRange} instance. */ + private static boolean isValidColorRange(int colorRange) { + // LINT.IfChange(color_range) + return colorRange == C.COLOR_RANGE_LIMITED + || colorRange == C.COLOR_RANGE_FULL + || colorRange == Format.NO_VALUE; + } + + /** Whether this is a valid {@link C.ColorTransfer} instance. */ + private static boolean isValidColorTransfer(int colorTransfer) { + // LINT.IfChange(color_transfer) + return colorTransfer == C.COLOR_TRANSFER_SDR + || colorTransfer == C.COLOR_TRANSFER_ST2084 + || colorTransfer == C.COLOR_TRANSFER_HLG + || colorTransfer == Format.NO_VALUE; + } + + private MediaFormatUtil() {} +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/MimeTypes.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/MimeTypes.java index b1480806f6..7a19de56d2 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/MimeTypes.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/MimeTypes.java @@ -17,20 +17,26 @@ import android.text.TextUtils; import androidx.annotation.Nullable; +import androidx.annotation.VisibleForTesting; import com.google.android.exoplayer2.C; +import com.google.common.base.Ascii; import java.util.ArrayList; +import java.util.regex.Matcher; +import java.util.regex.Pattern; -/** - * Defines common MIME types and helper methods. - */ +/** Defines common MIME types and helper methods. */ public final class MimeTypes { public static final String BASE_TYPE_VIDEO = "video"; public static final String BASE_TYPE_AUDIO = "audio"; public static final String BASE_TYPE_TEXT = "text"; + public static final String BASE_TYPE_IMAGE = "image"; public static final String BASE_TYPE_APPLICATION = "application"; + // video/ MIME types + public static final String VIDEO_MP4 = BASE_TYPE_VIDEO + "/mp4"; + public static final String VIDEO_MATROSKA = BASE_TYPE_VIDEO + "/x-matroska"; public static final String VIDEO_WEBM = BASE_TYPE_VIDEO + "/webm"; public static final String VIDEO_H263 = BASE_TYPE_VIDEO + "/3gpp"; public static final String VIDEO_H264 = BASE_TYPE_VIDEO + "/avc"; @@ -38,20 +44,33 @@ public final class MimeTypes { public static final String VIDEO_VP8 = BASE_TYPE_VIDEO + "/x-vnd.on2.vp8"; public static final String VIDEO_VP9 = BASE_TYPE_VIDEO + "/x-vnd.on2.vp9"; public static final String VIDEO_AV1 = BASE_TYPE_VIDEO + "/av01"; + public static final String VIDEO_MP2T = BASE_TYPE_VIDEO + "/mp2t"; public static final String VIDEO_MP4V = BASE_TYPE_VIDEO + "/mp4v-es"; public static final String VIDEO_MPEG = BASE_TYPE_VIDEO + "/mpeg"; + public static final String VIDEO_PS = BASE_TYPE_VIDEO + "/mp2p"; public static final String VIDEO_MPEG2 = BASE_TYPE_VIDEO + "/mpeg2"; public static final String VIDEO_VC1 = BASE_TYPE_VIDEO + "/wvc1"; public static final String VIDEO_DIVX = BASE_TYPE_VIDEO + "/divx"; + public static final String VIDEO_FLV = BASE_TYPE_VIDEO + "/x-flv"; public static final String VIDEO_DOLBY_VISION = BASE_TYPE_VIDEO + "/dolby-vision"; + public static final String VIDEO_OGG = BASE_TYPE_VIDEO + "/ogg"; + public static final String VIDEO_AVI = BASE_TYPE_VIDEO + "/x-msvideo"; + public static final String VIDEO_MJPEG = BASE_TYPE_VIDEO + "/mjpeg"; + public static final String VIDEO_MP42 = BASE_TYPE_VIDEO + "/mp42"; + public static final String VIDEO_MP43 = BASE_TYPE_VIDEO + "/mp43"; public static final String VIDEO_UNKNOWN = BASE_TYPE_VIDEO + "/x-unknown"; + // audio/ MIME types + public static final String AUDIO_MP4 = BASE_TYPE_AUDIO + "/mp4"; public static final String AUDIO_AAC = BASE_TYPE_AUDIO + "/mp4a-latm"; + public static final String AUDIO_MATROSKA = BASE_TYPE_AUDIO + "/x-matroska"; public static final String AUDIO_WEBM = BASE_TYPE_AUDIO + "/webm"; public static final String AUDIO_MPEG = BASE_TYPE_AUDIO + "/mpeg"; public static final String AUDIO_MPEG_L1 = BASE_TYPE_AUDIO + "/mpeg-L1"; public static final String AUDIO_MPEG_L2 = BASE_TYPE_AUDIO + "/mpeg-L2"; + public static final String AUDIO_MPEGH_MHA1 = BASE_TYPE_AUDIO + "/mha1"; + public static final String AUDIO_MPEGH_MHM1 = BASE_TYPE_AUDIO + "/mhm1"; public static final String AUDIO_RAW = BASE_TYPE_AUDIO + "/raw"; public static final String AUDIO_ALAW = BASE_TYPE_AUDIO + "/g711-alaw"; public static final String AUDIO_MLAW = BASE_TYPE_AUDIO + "/g711-mlaw"; @@ -63,22 +82,40 @@ public final class MimeTypes { public static final String AUDIO_DTS = BASE_TYPE_AUDIO + "/vnd.dts"; public static final String AUDIO_DTS_HD = BASE_TYPE_AUDIO + "/vnd.dts.hd"; public static final String AUDIO_DTS_EXPRESS = BASE_TYPE_AUDIO + "/vnd.dts.hd;profile=lbr"; + public static final String AUDIO_DTS_X = BASE_TYPE_AUDIO + "/vnd.dts.uhd;profile=p2"; public static final String AUDIO_VORBIS = BASE_TYPE_AUDIO + "/vorbis"; public static final String AUDIO_OPUS = BASE_TYPE_AUDIO + "/opus"; + public static final String AUDIO_AMR = BASE_TYPE_AUDIO + "/amr"; public static final String AUDIO_AMR_NB = BASE_TYPE_AUDIO + "/3gpp"; public static final String AUDIO_AMR_WB = BASE_TYPE_AUDIO + "/amr-wb"; public static final String AUDIO_FLAC = BASE_TYPE_AUDIO + "/flac"; public static final String AUDIO_ALAC = BASE_TYPE_AUDIO + "/alac"; public static final String AUDIO_MSGSM = BASE_TYPE_AUDIO + "/gsm"; + public static final String AUDIO_OGG = BASE_TYPE_AUDIO + "/ogg"; public static final String AUDIO_WAV = BASE_TYPE_AUDIO + "/wav"; public static final String AUDIO_X_WAV = BASE_TYPE_AUDIO + "/x-wav"; + public static final String AUDIO_MIDI = BASE_TYPE_AUDIO + "/midi"; + + public static final String AUDIO_EXOPLAYER_MIDI = BASE_TYPE_AUDIO + "/x-exoplayer-midi"; + public static final String AUDIO_UNKNOWN = BASE_TYPE_AUDIO + "/x-unknown"; + // text/ MIME types + public static final String TEXT_VTT = BASE_TYPE_TEXT + "/vtt"; public static final String TEXT_SSA = BASE_TYPE_TEXT + "/x-ssa"; + public static final String TEXT_EXOPLAYER_CUES = BASE_TYPE_TEXT + "/x-exoplayer-cues"; + + public static final String TEXT_UNKNOWN = BASE_TYPE_TEXT + "/x-unknown"; + + // application/ MIME types + public static final String APPLICATION_MP4 = BASE_TYPE_APPLICATION + "/mp4"; public static final String APPLICATION_WEBM = BASE_TYPE_APPLICATION + "/webm"; + + public static final String APPLICATION_MATROSKA = BASE_TYPE_APPLICATION + "/x-matroska"; + public static final String APPLICATION_MPD = BASE_TYPE_APPLICATION + "/dash+xml"; public static final String APPLICATION_M3U8 = BASE_TYPE_APPLICATION + "/x-mpegURL"; public static final String APPLICATION_SS = BASE_TYPE_APPLICATION + "/vnd.ms-sstr+xml"; @@ -94,25 +131,45 @@ public final class MimeTypes { public static final String APPLICATION_VOBSUB = BASE_TYPE_APPLICATION + "/vobsub"; public static final String APPLICATION_PGS = BASE_TYPE_APPLICATION + "/pgs"; public static final String APPLICATION_SCTE35 = BASE_TYPE_APPLICATION + "/x-scte35"; + public static final String APPLICATION_CAMERA_MOTION = BASE_TYPE_APPLICATION + "/x-camera-motion"; + public static final String APPLICATION_EMSG = BASE_TYPE_APPLICATION + "/x-emsg"; public static final String APPLICATION_DVBSUBS = BASE_TYPE_APPLICATION + "/dvbsubs"; public static final String APPLICATION_EXIF = BASE_TYPE_APPLICATION + "/x-exif"; public static final String APPLICATION_ICY = BASE_TYPE_APPLICATION + "/x-icy"; + public static final String APPLICATION_AIT = BASE_TYPE_APPLICATION + "/vnd.dvb.ait"; + public static final String APPLICATION_RTSP = BASE_TYPE_APPLICATION + "/x-rtsp"; + + // image/ MIME types + + public static final String IMAGE_JPEG = BASE_TYPE_IMAGE + "/jpeg"; + + /** + * A non-standard codec string for E-AC3-JOC. Use of this constant allows for disambiguation + * between regular E-AC3 ("ec-3") and E-AC3-JOC ("ec+3") streams from the codec string alone. The + * standard is to use "ec-3" for both, as per the MP4RA + * registered codec types. + */ + public static final String CODEC_E_AC3_JOC = "ec+3"; private static final ArrayList customMimeTypes = new ArrayList<>(); + private static final Pattern MP4A_RFC_6381_CODEC_PATTERN = + Pattern.compile("^mp4a\\.([a-zA-Z0-9]{2})(?:\\.([0-9]{1,2}))?$"); + /** * Registers a custom MIME type. Most applications do not need to call this method, as handling of * standard MIME types is built in. These built-in MIME types take precedence over any registered * via this method. If this method is used, it must be called before creating any player(s). * * @param mimeType The custom MIME type to register. - * @param codecPrefix The RFC 6381-style codec string prefix associated with the MIME type. - * @param trackType The {@link C}{@code .TRACK_TYPE_*} constant associated with the MIME type. - * This value is ignored if the top-level type of {@code mimeType} is audio, video or text. + * @param codecPrefix The RFC 6381 codec string prefix associated with the MIME type. + * @param trackType The {@link C.TrackType track type} associated with the MIME type. This value + * is ignored if the top-level type of {@code mimeType} is audio, video or text. */ - public static void registerCustomMimeType(String mimeType, String codecPrefix, int trackType) { + public static void registerCustomMimeType( + String mimeType, String codecPrefix, @C.TrackType int trackType) { CustomMimeType customMimeType = new CustomMimeType(mimeType, codecPrefix, trackType); int customMimeTypeCount = customMimeTypes.size(); for (int i = 0; i < customMimeTypeCount; i++) { @@ -134,26 +191,41 @@ public static boolean isVideo(@Nullable String mimeType) { return BASE_TYPE_VIDEO.equals(getTopLevelType(mimeType)); } - /** Returns whether the given string is a text MIME type. */ + /** + * Returns whether the given string is a text MIME type, including known text types that use + * "application" as their base type. + */ public static boolean isText(@Nullable String mimeType) { - return BASE_TYPE_TEXT.equals(getTopLevelType(mimeType)); + return BASE_TYPE_TEXT.equals(getTopLevelType(mimeType)) + || APPLICATION_CEA608.equals(mimeType) + || APPLICATION_CEA708.equals(mimeType) + || APPLICATION_MP4CEA608.equals(mimeType) + || APPLICATION_SUBRIP.equals(mimeType) + || APPLICATION_TTML.equals(mimeType) + || APPLICATION_TX3G.equals(mimeType) + || APPLICATION_MP4VTT.equals(mimeType) + || APPLICATION_RAWCC.equals(mimeType) + || APPLICATION_VOBSUB.equals(mimeType) + || APPLICATION_PGS.equals(mimeType) + || APPLICATION_DVBSUBS.equals(mimeType); } - /** Returns whether the given string is an application MIME type. */ - public static boolean isApplication(@Nullable String mimeType) { - return BASE_TYPE_APPLICATION.equals(getTopLevelType(mimeType)); + /** Returns whether the given string is an image MIME type. */ + public static boolean isImage(@Nullable String mimeType) { + return BASE_TYPE_IMAGE.equals(getTopLevelType(mimeType)); } /** - * Returns true if it is known that all samples in a stream of the given sample MIME type are + * Returns true if it is known that all samples in a stream of the given MIME type and codec are * guaranteed to be sync samples (i.e., {@link C#BUFFER_FLAG_KEY_FRAME} is guaranteed to be set on * every sample). * - * @param mimeType The sample MIME type. - * @return True if it is known that all samples in a stream of the given sample MIME type are - * guaranteed to be sync samples. False otherwise, including if {@code null} is passed. + * @param mimeType The MIME type of the stream. + * @param codec The RFC 6381 codec string of the stream, or {@code null} if unknown. + * @return Whether it is known that all samples in the stream are guaranteed to be sync samples. */ - public static boolean allSamplesAreSyncSamples(@Nullable String mimeType) { + public static boolean allSamplesAreSyncSamples( + @Nullable String mimeType, @Nullable String codec) { if (mimeType == null) { return false; } @@ -168,22 +240,34 @@ public static boolean allSamplesAreSyncSamples(@Nullable String mimeType) { case AUDIO_RAW: case AUDIO_ALAW: case AUDIO_MLAW: - case AUDIO_OPUS: case AUDIO_FLAC: case AUDIO_AC3: case AUDIO_E_AC3: case AUDIO_E_AC3_JOC: return true; + case AUDIO_AAC: + if (codec == null) { + return false; + } + @Nullable Mp4aObjectType objectType = getObjectTypeFromMp4aRFC6381CodecString(codec); + if (objectType == null) { + return false; + } + @C.Encoding int encoding = objectType.getEncoding(); + // xHE-AAC is an exception in which it's not true that all samples will be sync samples. + // Also return false for ENCODING_INVALID, which indicates we weren't able to parse the + // encoding from the codec string. + return encoding != C.ENCODING_INVALID && encoding != C.ENCODING_AAC_XHE; default: return false; } } /** - * Derives a video sample mimeType from a codecs attribute. + * Returns the first video MIME type derived from an RFC 6381 codecs string. * - * @param codecs The codecs attribute. - * @return The derived video mimeType, or null if it could not be derived. + * @param codecs An RFC 6381 codecs string. + * @return The first derived video MIME type, or {@code null}. */ @Nullable public static String getVideoMediaMimeType(@Nullable String codecs) { @@ -201,10 +285,54 @@ public static String getVideoMediaMimeType(@Nullable String codecs) { } /** - * Derives a audio sample mimeType from a codecs attribute. + * Returns whether the given {@code codecs} string contains a codec which corresponds to the given + * {@code mimeType}. * - * @param codecs The codecs attribute. - * @return The derived audio mimeType, or null if it could not be derived. + * @param codecs An RFC 6381 codecs string. + * @param mimeType A MIME type to look for. + * @return Whether the given {@code codecs} string contains a codec which corresponds to the given + * {@code mimeType}. + */ + public static boolean containsCodecsCorrespondingToMimeType( + @Nullable String codecs, String mimeType) { + return getCodecsCorrespondingToMimeType(codecs, mimeType) != null; + } + + /** + * Returns a subsequence of {@code codecs} containing the codec strings that correspond to the + * given {@code mimeType}. Returns null if {@code mimeType} is null, {@code codecs} is null, or + * {@code codecs} does not contain a codec that corresponds to {@code mimeType}. + * + * @param codecs An RFC 6381 codecs string. + * @param mimeType A MIME type to look for. + * @return A subsequence of {@code codecs} containing the codec strings that correspond to the + * given {@code mimeType}. Returns null if {@code mimeType} is null, {@code codecs} is null, + * or {@code codecs} does not contain a codec that corresponds to {@code mimeType}. + */ + @Nullable + public static String getCodecsCorrespondingToMimeType( + @Nullable String codecs, @Nullable String mimeType) { + if (codecs == null || mimeType == null) { + return null; + } + String[] codecList = Util.splitCodecs(codecs); + StringBuilder builder = new StringBuilder(); + for (String codec : codecList) { + if (mimeType.equals(getMediaMimeType(codec))) { + if (builder.length() > 0) { + builder.append(","); + } + builder.append(codec); + } + } + return builder.length() > 0 ? builder.toString() : null; + } + + /** + * Returns the first audio MIME type derived from an RFC 6381 codecs string. + * + * @param codecs An RFC 6381 codecs string. + * @return The first derived audio MIME type, or {@code null}. */ @Nullable public static String getAudioMediaMimeType(@Nullable String codecs) { @@ -222,17 +350,39 @@ public static String getAudioMediaMimeType(@Nullable String codecs) { } /** - * Derives a mimeType from a codec identifier, as defined in RFC 6381. + * Returns the first text MIME type derived from an RFC 6381 codecs string. + * + * @param codecs An RFC 6381 codecs string. + * @return The first derived text MIME type, or {@code null}. + */ + @Nullable + public static String getTextMediaMimeType(@Nullable String codecs) { + if (codecs == null) { + return null; + } + String[] codecList = Util.splitCodecs(codecs); + for (String codec : codecList) { + @Nullable String mimeType = getMediaMimeType(codec); + if (mimeType != null && isText(mimeType)) { + return mimeType; + } + } + return null; + } + + /** + * Returns the MIME type corresponding to an RFC 6381 codec string, or {@code null} if it could + * not be determined. * - * @param codec The codec identifier to derive. - * @return The mimeType, or null if it could not be derived. + * @param codec An RFC 6381 codec string. + * @return The corresponding MIME type, or {@code null} if it could not be determined. */ @Nullable public static String getMediaMimeType(@Nullable String codec) { if (codec == null) { return null; } - codec = Util.toLowerInvariant(codec.trim()); + codec = Ascii.toLowerCase(codec.trim()); if (codec.startsWith("avc1") || codec.startsWith("avc3")) { return MimeTypes.VIDEO_H264; } else if (codec.startsWith("hev1") || codec.startsWith("hvc1")) { @@ -251,30 +401,32 @@ public static String getMediaMimeType(@Nullable String codec) { } else if (codec.startsWith("mp4a")) { @Nullable String mimeType = null; if (codec.startsWith("mp4a.")) { - String objectTypeString = codec.substring(5); // remove the 'mp4a.' prefix - if (objectTypeString.length() >= 2) { - try { - String objectTypeHexString = Util.toUpperInvariant(objectTypeString.substring(0, 2)); - int objectTypeInt = Integer.parseInt(objectTypeHexString, 16); - mimeType = getMimeTypeFromMp4ObjectType(objectTypeInt); - } catch (NumberFormatException ignored) { - // Ignored. - } + @Nullable Mp4aObjectType objectType = getObjectTypeFromMp4aRFC6381CodecString(codec); + if (objectType != null) { + mimeType = getMimeTypeFromMp4ObjectType(objectType.objectTypeIndication); } } return mimeType == null ? MimeTypes.AUDIO_AAC : mimeType; + } else if (codec.startsWith("mha1")) { + return MimeTypes.AUDIO_MPEGH_MHA1; + } else if (codec.startsWith("mhm1")) { + return MimeTypes.AUDIO_MPEGH_MHM1; } else if (codec.startsWith("ac-3") || codec.startsWith("dac3")) { return MimeTypes.AUDIO_AC3; } else if (codec.startsWith("ec-3") || codec.startsWith("dec3")) { return MimeTypes.AUDIO_E_AC3; - } else if (codec.startsWith("ec+3")) { + } else if (codec.startsWith(CODEC_E_AC3_JOC)) { return MimeTypes.AUDIO_E_AC3_JOC; } else if (codec.startsWith("ac-4") || codec.startsWith("dac4")) { return MimeTypes.AUDIO_AC4; - } else if (codec.startsWith("dtsc") || codec.startsWith("dtse")) { + } else if (codec.startsWith("dtsc")) { return MimeTypes.AUDIO_DTS; + } else if (codec.startsWith("dtse")) { + return MimeTypes.AUDIO_DTS_EXPRESS; } else if (codec.startsWith("dtsh") || codec.startsWith("dtsl")) { return MimeTypes.AUDIO_DTS_HD; + } else if (codec.startsWith("dtsx")) { + return MimeTypes.AUDIO_DTS_X; } else if (codec.startsWith("opus")) { return MimeTypes.AUDIO_OPUS; } else if (codec.startsWith("vorbis")) { @@ -285,17 +437,21 @@ public static String getMediaMimeType(@Nullable String codec) { return MimeTypes.APPLICATION_TTML; } else if (codec.startsWith("wvtt")) { return MimeTypes.TEXT_VTT; + } else if (codec.contains("cea708")) { + return MimeTypes.APPLICATION_CEA708; + } else if (codec.contains("eia608") || codec.contains("cea608")) { + return MimeTypes.APPLICATION_CEA608; } else { return getCustomMimeTypeForCodec(codec); } } /** - * Derives a mimeType from MP4 object type identifier, as defined in RFC 6381 and - * https://mp4ra.org/#/object_types. + * Returns the MIME type corresponding to an MP4 object type identifier, as defined in RFC 6381 + * and https://mp4ra.org/#/object_types. * - * @param objectType The objectType identifier to derive. - * @return The mimeType, or null if it could not be derived. + * @param objectType An MP4 object type identifier. + * @return The corresponding MIME type, or {@code null} if it could not be determined. */ @Nullable public static String getMimeTypeFromMp4ObjectType(int objectType) { @@ -347,27 +503,24 @@ public static String getMimeTypeFromMp4ObjectType(int objectType) { } /** - * Returns the {@link C}{@code .TRACK_TYPE_*} constant that corresponds to a specified MIME type. - * {@link C#TRACK_TYPE_UNKNOWN} if the MIME type is not known or the mapping cannot be - * established. + * Returns the {@link C.TrackType track type} constant corresponding to a specified MIME type, + * which may be {@link C#TRACK_TYPE_UNKNOWN} if it could not be determined. * - * @param mimeType The MIME type. - * @return The {@link C}{@code .TRACK_TYPE_*} constant that corresponds to a specified MIME type. + * @param mimeType A MIME type. + * @return The corresponding {@link C.TrackType track type}, which may be {@link + * C#TRACK_TYPE_UNKNOWN} if it could not be determined. */ - public static int getTrackType(@Nullable String mimeType) { + public static @C.TrackType int getTrackType(@Nullable String mimeType) { if (TextUtils.isEmpty(mimeType)) { return C.TRACK_TYPE_UNKNOWN; } else if (isAudio(mimeType)) { return C.TRACK_TYPE_AUDIO; } else if (isVideo(mimeType)) { return C.TRACK_TYPE_VIDEO; - } else if (isText(mimeType) || APPLICATION_CEA608.equals(mimeType) - || APPLICATION_CEA708.equals(mimeType) || APPLICATION_MP4CEA608.equals(mimeType) - || APPLICATION_SUBRIP.equals(mimeType) || APPLICATION_TTML.equals(mimeType) - || APPLICATION_TX3G.equals(mimeType) || APPLICATION_MP4VTT.equals(mimeType) - || APPLICATION_RAWCC.equals(mimeType) || APPLICATION_VOBSUB.equals(mimeType) - || APPLICATION_PGS.equals(mimeType) || APPLICATION_DVBSUBS.equals(mimeType)) { + } else if (isText(mimeType)) { return C.TRACK_TYPE_TEXT; + } else if (isImage(mimeType)) { + return C.TRACK_TYPE_IMAGE; } else if (APPLICATION_ID3.equals(mimeType) || APPLICATION_EMSG.equals(mimeType) || APPLICATION_SCTE35.equals(mimeType)) { @@ -380,17 +533,27 @@ public static int getTrackType(@Nullable String mimeType) { } /** - * Returns the {@link C}{@code .ENCODING_*} constant that corresponds to specified MIME type, if - * it is an encoded (non-PCM) audio format, or {@link C#ENCODING_INVALID} otherwise. + * Returns the {@link C.Encoding} constant corresponding to the specified audio MIME type and RFC + * 6381 codec string, or {@link C#ENCODING_INVALID} if the corresponding {@link C.Encoding} cannot + * be determined. * - * @param mimeType The MIME type. - * @return The {@link C}{@code .ENCODING_*} constant that corresponds to a specified MIME type, or - * {@link C#ENCODING_INVALID}. + * @param mimeType A MIME type. + * @param codec An RFC 6381 codec string, or {@code null} if unknown or not applicable. + * @return The corresponding {@link C.Encoding}, or {@link C#ENCODING_INVALID}. */ - public static @C.Encoding int getEncoding(String mimeType) { + public static @C.Encoding int getEncoding(String mimeType, @Nullable String codec) { switch (mimeType) { case MimeTypes.AUDIO_MPEG: return C.ENCODING_MP3; + case MimeTypes.AUDIO_AAC: + if (codec == null) { + return C.ENCODING_INVALID; + } + @Nullable Mp4aObjectType objectType = getObjectTypeFromMp4aRFC6381CodecString(codec); + if (objectType == null) { + return C.ENCODING_INVALID; + } + return objectType.getEncoding(); case MimeTypes.AUDIO_AC3: return C.ENCODING_AC3; case MimeTypes.AUDIO_E_AC3: @@ -405,6 +568,8 @@ public static int getTrackType(@Nullable String mimeType) { return C.ENCODING_DTS_HD; case MimeTypes.AUDIO_TRUEHD: return C.ENCODING_DOLBY_TRUEHD; + case MimeTypes.AUDIO_OPUS: + return C.ENCODING_OPUS; default: return C.ENCODING_INVALID; } @@ -413,13 +578,46 @@ public static int getTrackType(@Nullable String mimeType) { /** * Equivalent to {@code getTrackType(getMediaMimeType(codec))}. * - * @param codec The codec. - * @return The {@link C}{@code .TRACK_TYPE_*} constant that corresponds to a specified codec. + * @param codec An RFC 6381 codec string. + * @return The corresponding {@link C.TrackType track type}, which may be {@link + * C#TRACK_TYPE_UNKNOWN} if it could not be determined. */ - public static int getTrackTypeOfCodec(String codec) { + public static @C.TrackType int getTrackTypeOfCodec(String codec) { return getTrackType(getMediaMimeType(codec)); } + /** + * Normalizes the MIME type provided so that equivalent MIME types are uniquely represented. + * + * @param mimeType A MIME type to normalize. + * @return The normalized MIME type, or the argument MIME type if its normalized form is unknown. + */ + public static String normalizeMimeType(String mimeType) { + switch (mimeType) { + case BASE_TYPE_AUDIO + "/x-flac": + return AUDIO_FLAC; + case BASE_TYPE_AUDIO + "/mp3": + return AUDIO_MPEG; + case BASE_TYPE_AUDIO + "/x-wav": + return AUDIO_WAV; + default: + return mimeType; + } + } + + /** Returns whether the given {@code mimeType} is a Matroska MIME type, including WebM. */ + public static boolean isMatroska(@Nullable String mimeType) { + if (mimeType == null) { + return false; + } + return mimeType.startsWith(MimeTypes.VIDEO_WEBM) + || mimeType.startsWith(MimeTypes.AUDIO_WEBM) + || mimeType.startsWith(MimeTypes.APPLICATION_WEBM) + || mimeType.startsWith(MimeTypes.VIDEO_MATROSKA) + || mimeType.startsWith(MimeTypes.AUDIO_MATROSKA) + || mimeType.startsWith(MimeTypes.APPLICATION_MATROSKA); + } + /** * Returns the top-level type of {@code mimeType}, or null if {@code mimeType} is null or does not * contain a forward slash character ({@code '/'}). @@ -448,7 +646,7 @@ private static String getCustomMimeTypeForCodec(String codec) { return null; } - private static int getTrackTypeForCustomMimeType(String mimeType) { + private static @C.TrackType int getTrackTypeForCustomMimeType(String mimeType) { int customMimeTypeCount = customMimeTypes.size(); for (int i = 0; i < customMimeTypeCount; i++) { CustomMimeType customMimeType = customMimeTypes.get(i); @@ -463,12 +661,86 @@ private MimeTypes() { // Prevent instantiation. } + /** + * Returns the {@link Mp4aObjectType} of an RFC 6381 MP4 audio codec string. + * + *

      Per https://mp4ra.org/#/object_types and https://tools.ietf.org/html/rfc6381#section-3.3, an + * MP4 codec string has the form: + * + *

      +   *         ~~~~~~~~~~~~~~ Object Type Indication (OTI) byte in hex
      +   *    mp4a.[a-zA-Z0-9]{2}(.[0-9]{1,2})?
      +   *                         ~~~~~~~~~~ audio OTI, decimal. Only for certain OTI.
      +   * 
      + * + * For example, mp4a.40.2 has an OTI of 0x40 and an audio OTI of 2. + * + * @param codec An RFC 6381 MP4 audio codec string. + * @return The {@link Mp4aObjectType}, or {@code null} if the input was invalid. + */ + @VisibleForTesting + @Nullable + /* package */ static Mp4aObjectType getObjectTypeFromMp4aRFC6381CodecString(String codec) { + Matcher matcher = MP4A_RFC_6381_CODEC_PATTERN.matcher(codec); + if (!matcher.matches()) { + return null; + } + String objectTypeIndicationHex = Assertions.checkNotNull(matcher.group(1)); + @Nullable String audioObjectTypeIndicationDec = matcher.group(2); + int objectTypeIndication; + int audioObjectTypeIndication = 0; + try { + objectTypeIndication = Integer.parseInt(objectTypeIndicationHex, 16); + if (audioObjectTypeIndicationDec != null) { + audioObjectTypeIndication = Integer.parseInt(audioObjectTypeIndicationDec); + } + } catch (NumberFormatException e) { + return null; + } + return new Mp4aObjectType(objectTypeIndication, audioObjectTypeIndication); + } + + /** An MP4A Object Type Indication (OTI) and its optional audio OTI is defined by RFC 6381. */ + @VisibleForTesting + /* package */ static final class Mp4aObjectType { + /** The Object Type Indication of the MP4A codec. */ + public final int objectTypeIndication; + /** The Audio Object Type Indication of the MP4A codec, or 0 if it is absent. */ + public final int audioObjectTypeIndication; + + public Mp4aObjectType(int objectTypeIndication, int audioObjectTypeIndication) { + this.objectTypeIndication = objectTypeIndication; + this.audioObjectTypeIndication = audioObjectTypeIndication; + } + + /** Returns the encoding for {@link #audioObjectTypeIndication}. */ + public @C.Encoding int getEncoding() { + // See AUDIO_OBJECT_TYPE_AAC_* constants in AacUtil. + switch (audioObjectTypeIndication) { + case 2: + return C.ENCODING_AAC_LC; + case 5: + return C.ENCODING_AAC_HE_V1; + case 29: + return C.ENCODING_AAC_HE_V2; + case 42: + return C.ENCODING_AAC_XHE; + case 23: + return C.ENCODING_AAC_ELD; + case 22: + return C.ENCODING_AAC_ER_BSAC; + default: + return C.ENCODING_INVALID; + } + } + } + private static final class CustomMimeType { public final String mimeType; public final String codecPrefix; - public final int trackType; + public final @C.TrackType int trackType; - public CustomMimeType(String mimeType, String codecPrefix, int trackType) { + public CustomMimeType(String mimeType, String codecPrefix, @C.TrackType int trackType) { this.mimeType = mimeType; this.codecPrefix = codecPrefix; this.trackType = trackType; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/NalUnitUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/NalUnitUtil.java index 05585d5301..b2232b0a5e 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/NalUnitUtil.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/NalUnitUtil.java @@ -15,28 +15,44 @@ */ package com.google.android.exoplayer2.util; +import static java.lang.Math.min; + +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; import java.nio.ByteBuffer; import java.util.Arrays; -/** - * Utility methods for handling H.264/AVC and H.265/HEVC NAL units. - */ +/** Utility methods for handling H.264/AVC and H.265/HEVC NAL units. */ public final class NalUnitUtil { private static final String TAG = "NalUnitUtil"; - /** - * Holds data parsed from a sequence parameter set NAL unit. - */ + /** Coded slice of a non-IDR picture. */ + public static final int NAL_UNIT_TYPE_NON_IDR = 1; + /** Coded slice data partition A. */ + public static final int NAL_UNIT_TYPE_PARTITION_A = 2; + /** Coded slice of an IDR picture. */ + public static final int NAL_UNIT_TYPE_IDR = 5; + /** Supplemental enhancement information. */ + public static final int NAL_UNIT_TYPE_SEI = 6; + /** Sequence parameter set. */ + public static final int NAL_UNIT_TYPE_SPS = 7; + /** Picture parameter set. */ + public static final int NAL_UNIT_TYPE_PPS = 8; + /** Access unit delimiter. */ + public static final int NAL_UNIT_TYPE_AUD = 9; + + /** Holds data parsed from a H.264 sequence parameter set NAL unit. */ public static final class SpsData { public final int profileIdc; public final int constraintsFlagsAndReservedZero2Bits; public final int levelIdc; public final int seqParameterSetId; + public final int maxNumRefFrames; public final int width; public final int height; - public final float pixelWidthAspectRatio; + public final float pixelWidthHeightRatio; public final boolean separateColorPlaneFlag; public final boolean frameMbsOnlyFlag; public final int frameNumLength; @@ -49,9 +65,10 @@ public SpsData( int constraintsFlagsAndReservedZero2Bits, int levelIdc, int seqParameterSetId, + int maxNumRefFrames, int width, int height, - float pixelWidthAspectRatio, + float pixelWidthHeightRatio, boolean separateColorPlaneFlag, boolean frameMbsOnlyFlag, int frameNumLength, @@ -62,9 +79,10 @@ public SpsData( this.constraintsFlagsAndReservedZero2Bits = constraintsFlagsAndReservedZero2Bits; this.levelIdc = levelIdc; this.seqParameterSetId = seqParameterSetId; + this.maxNumRefFrames = maxNumRefFrames; this.width = width; this.height = height; - this.pixelWidthAspectRatio = pixelWidthAspectRatio; + this.pixelWidthHeightRatio = pixelWidthHeightRatio; this.separateColorPlaneFlag = separateColorPlaneFlag; this.frameMbsOnlyFlag = frameMbsOnlyFlag; this.frameNumLength = frameNumLength; @@ -72,25 +90,61 @@ public SpsData( this.picOrderCntLsbLength = picOrderCntLsbLength; this.deltaPicOrderAlwaysZeroFlag = deltaPicOrderAlwaysZeroFlag; } + } + /** Holds data parsed from a H.265 sequence parameter set NAL unit. */ + public static final class H265SpsData { + + public final int generalProfileSpace; + public final boolean generalTierFlag; + public final int generalProfileIdc; + public final int generalProfileCompatibilityFlags; + public final int[] constraintBytes; + public final int generalLevelIdc; + public final int seqParameterSetId; + public final int width; + public final int height; + public final float pixelWidthHeightRatio; + + public H265SpsData( + int generalProfileSpace, + boolean generalTierFlag, + int generalProfileIdc, + int generalProfileCompatibilityFlags, + int[] constraintBytes, + int generalLevelIdc, + int seqParameterSetId, + int width, + int height, + float pixelWidthHeightRatio) { + this.generalProfileSpace = generalProfileSpace; + this.generalTierFlag = generalTierFlag; + this.generalProfileIdc = generalProfileIdc; + this.generalProfileCompatibilityFlags = generalProfileCompatibilityFlags; + this.constraintBytes = constraintBytes; + this.generalLevelIdc = generalLevelIdc; + this.seqParameterSetId = seqParameterSetId; + this.width = width; + this.height = height; + this.pixelWidthHeightRatio = pixelWidthHeightRatio; + } } - /** - * Holds data parsed from a picture parameter set NAL unit. - */ + /** Holds data parsed from a picture parameter set NAL unit. */ public static final class PpsData { public final int picParameterSetId; public final int seqParameterSetId; public final boolean bottomFieldPicOrderInFramePresentFlag; - public PpsData(int picParameterSetId, int seqParameterSetId, + public PpsData( + int picParameterSetId, + int seqParameterSetId, boolean bottomFieldPicOrderInFramePresentFlag) { this.picParameterSetId = picParameterSetId; this.seqParameterSetId = seqParameterSetId; this.bottomFieldPicOrderInFramePresentFlag = bottomFieldPicOrderInFramePresentFlag; } - } /** Four initial bytes that must prefix NAL units for decoding. */ @@ -99,25 +153,26 @@ public PpsData(int picParameterSetId, int seqParameterSetId, /** Value for aspect_ratio_idc indicating an extended aspect ratio, in H.264 and H.265 SPSs. */ public static final int EXTENDED_SAR = 0xFF; /** Aspect ratios indexed by aspect_ratio_idc, in H.264 and H.265 SPSs. */ - public static final float[] ASPECT_RATIO_IDC_VALUES = new float[] { - 1f /* Unspecified. Assume square */, - 1f, - 12f / 11f, - 10f / 11f, - 16f / 11f, - 40f / 33f, - 24f / 11f, - 20f / 11f, - 32f / 11f, - 80f / 33f, - 18f / 11f, - 15f / 11f, - 64f / 33f, - 160f / 99f, - 4f / 3f, - 3f / 2f, - 2f - }; + public static final float[] ASPECT_RATIO_IDC_VALUES = + new float[] { + 1f /* Unspecified. Assume square */, + 1f, + 12f / 11f, + 10f / 11f, + 16f / 11f, + 40f / 33f, + 24f / 11f, + 20f / 11f, + 32f / 11f, + 80f / 33f, + 18f / 11f, + 15f / 11f, + 64f / 33f, + 160f / 99f, + 4f / 3f, + 3f / 2f, + 2f + }; private static final int H264_NAL_UNIT_TYPE_SEI = 6; // Supplemental enhancement information private static final int H264_NAL_UNIT_TYPE_SPS = 7; // Sequence parameter set @@ -132,10 +187,10 @@ public PpsData(int picParameterSetId, int seqParameterSetId, private static int[] scratchEscapePositions = new int[10]; /** - * Unescapes {@code data} up to the specified limit, replacing occurrences of [0, 0, 3] with - * [0, 0]. The unescaped data is returned in-place, with the return value indicating its length. - *

      - * Executions of this method are mutually exclusive, so it should not be called with very large + * Unescapes {@code data} up to the specified limit, replacing occurrences of [0, 0, 3] with [0, + * 0]. The unescaped data is returned in-place, with the return value indicating its length. + * + *

      Executions of this method are mutually exclusive, so it should not be called with very large * buffers. * * @param data The data to unescape. @@ -151,8 +206,8 @@ public static int unescapeStream(byte[] data, int limit) { if (position < limit) { if (scratchEscapePositions.length <= scratchEscapeCount) { // Grow scratchEscapePositions to hold a larger number of positions. - scratchEscapePositions = Arrays.copyOf(scratchEscapePositions, - scratchEscapePositions.length * 2); + scratchEscapePositions = + Arrays.copyOf(scratchEscapePositions, scratchEscapePositions.length * 2); } scratchEscapePositions[scratchEscapeCount++] = position; position += 3; @@ -181,9 +236,9 @@ public static int unescapeStream(byte[] data, int limit) { /** * Discards data from the buffer up to the first SPS, where {@code data.position()} is interpreted * as the length of the buffer. - *

      - * When the method returns, {@code data.position()} will contain the new length of the buffer. If - * the buffer is not empty it is guaranteed to start with an SPS. + * + *

      When the method returns, {@code data.position()} will contain the new length of the buffer. + * If the buffer is not empty it is guaranteed to start with an SPS. * * @param data Buffer containing start code delimited NAL units. */ @@ -219,15 +274,16 @@ public static void discardToSps(ByteBuffer data) { * Returns whether the NAL unit with the specified header contains supplemental enhancement * information. * - * @param mimeType The sample MIME type. + * @param mimeType The sample MIME type, or {@code null} if unknown. * @param nalUnitHeaderFirstByte The first byte of nal_unit(). - * @return Whether the NAL unit with the specified header is an SEI NAL unit. + * @return Whether the NAL unit with the specified header is an SEI NAL unit. False is returned if + * the {@code MimeType} is {@code null}. */ - public static boolean isNalUnitSei(String mimeType, byte nalUnitHeaderFirstByte) { + public static boolean isNalUnitSei(@Nullable String mimeType, byte nalUnitHeaderFirstByte) { return (MimeTypes.VIDEO_H264.equals(mimeType) - && (nalUnitHeaderFirstByte & 0x1F) == H264_NAL_UNIT_TYPE_SEI) + && (nalUnitHeaderFirstByte & 0x1F) == H264_NAL_UNIT_TYPE_SEI) || (MimeTypes.VIDEO_H265.equals(mimeType) - && ((nalUnitHeaderFirstByte & 0x7E) >> 1) == H265_NAL_UNIT_TYPE_PREFIX_SEI); + && ((nalUnitHeaderFirstByte & 0x7E) >> 1) == H265_NAL_UNIT_TYPE_PREFIX_SEI); } /** @@ -255,7 +311,7 @@ public static int getH265NalUnitType(byte[] data, int offset) { } /** - * Parses an SPS NAL unit using the syntax defined in ITU-T Recommendation H.264 (2013) subsection + * Parses a SPS NAL unit using the syntax defined in ITU-T Recommendation H.264 (2013) subsection * 7.3.2.1.1. * * @param nalData A buffer containing escaped SPS data. @@ -264,8 +320,20 @@ public static int getH265NalUnitType(byte[] data, int offset) { * @return A parsed representation of the SPS data. */ public static SpsData parseSpsNalUnit(byte[] nalData, int nalOffset, int nalLimit) { + return parseSpsNalUnitPayload(nalData, nalOffset + 1, nalLimit); + } + + /** + * Parses a SPS NAL unit payload (excluding the NAL unit header) using the syntax defined in ITU-T + * Recommendation H.264 (2013) subsection 7.3.2.1.1. + * + * @param nalData A buffer containing escaped SPS data. + * @param nalOffset The offset of the NAL unit payload in {@code nalData}. + * @param nalLimit The limit of the NAL unit in {@code nalData}. + * @return A parsed representation of the SPS data. + */ + public static SpsData parseSpsNalUnitPayload(byte[] nalData, int nalOffset, int nalLimit) { ParsableNalUnitBitArray data = new ParsableNalUnitBitArray(nalData, nalOffset, nalLimit); - data.skipBits(8); // nal_unit int profileIdc = data.readBits(8); int constraintsFlagsAndReservedZero2Bits = data.readBits(8); int levelIdc = data.readBits(8); @@ -273,9 +341,16 @@ public static SpsData parseSpsNalUnit(byte[] nalData, int nalOffset, int nalLimi int chromaFormatIdc = 1; // Default is 4:2:0 boolean separateColorPlaneFlag = false; - if (profileIdc == 100 || profileIdc == 110 || profileIdc == 122 || profileIdc == 244 - || profileIdc == 44 || profileIdc == 83 || profileIdc == 86 || profileIdc == 118 - || profileIdc == 128 || profileIdc == 138) { + if (profileIdc == 100 + || profileIdc == 110 + || profileIdc == 122 + || profileIdc == 244 + || profileIdc == 44 + || profileIdc == 83 + || profileIdc == 86 + || profileIdc == 118 + || profileIdc == 128 + || profileIdc == 138) { chromaFormatIdc = data.readUnsignedExpGolombCodedInt(); if (chromaFormatIdc == 3) { separateColorPlaneFlag = data.readBit(); @@ -311,7 +386,7 @@ public static SpsData parseSpsNalUnit(byte[] nalData, int nalOffset, int nalLimi data.readUnsignedExpGolombCodedInt(); // offset_for_ref_frame[i] } } - data.readUnsignedExpGolombCodedInt(); // max_num_ref_frames + int maxNumRefFrames = data.readUnsignedExpGolombCodedInt(); // max_num_ref_frames data.skipBit(); // gaps_in_frame_num_value_allowed_flag int picWidthInMbs = data.readUnsignedExpGolombCodedInt() + 1; @@ -371,6 +446,7 @@ public static SpsData parseSpsNalUnit(byte[] nalData, int nalOffset, int nalLimi constraintsFlagsAndReservedZero2Bits, levelIdc, seqParameterSetId, + maxNumRefFrames, frameWidth, frameHeight, pixelWidthHeightRatio, @@ -382,6 +458,168 @@ public static SpsData parseSpsNalUnit(byte[] nalData, int nalOffset, int nalLimi deltaPicOrderAlwaysZeroFlag); } + /** + * Parses a H.265 SPS NAL unit using the syntax defined in ITU-T Recommendation H.265 (2019) + * subsection 7.3.2.2.1. + * + * @param nalData A buffer containing escaped SPS data. + * @param nalOffset The offset of the NAL unit header in {@code nalData}. + * @param nalLimit The limit of the NAL unit in {@code nalData}. + * @return A parsed representation of the SPS data. + */ + public static H265SpsData parseH265SpsNalUnit(byte[] nalData, int nalOffset, int nalLimit) { + return parseH265SpsNalUnitPayload(nalData, nalOffset + 2, nalLimit); + } + + /** + * Parses a H.265 SPS NAL unit payload (excluding the NAL unit header) using the syntax defined in + * ITU-T Recommendation H.265 (2019) subsection 7.3.2.2.1. + * + * @param nalData A buffer containing escaped SPS data. + * @param nalOffset The offset of the NAL unit payload in {@code nalData}. + * @param nalLimit The limit of the NAL unit in {@code nalData}. + * @return A parsed representation of the SPS data. + */ + public static H265SpsData parseH265SpsNalUnitPayload( + byte[] nalData, int nalOffset, int nalLimit) { + ParsableNalUnitBitArray data = new ParsableNalUnitBitArray(nalData, nalOffset, nalLimit); + data.skipBits(4); // sps_video_parameter_set_id + int maxSubLayersMinus1 = data.readBits(3); + data.skipBit(); // sps_temporal_id_nesting_flag + int generalProfileSpace = data.readBits(2); + boolean generalTierFlag = data.readBit(); + int generalProfileIdc = data.readBits(5); + int generalProfileCompatibilityFlags = 0; + for (int i = 0; i < 32; i++) { + if (data.readBit()) { + generalProfileCompatibilityFlags |= (1 << i); + } + } + int[] constraintBytes = new int[6]; + for (int i = 0; i < constraintBytes.length; ++i) { + constraintBytes[i] = data.readBits(8); + } + int generalLevelIdc = data.readBits(8); + int toSkip = 0; + for (int i = 0; i < maxSubLayersMinus1; i++) { + if (data.readBit()) { // sub_layer_profile_present_flag[i] + toSkip += 89; + } + if (data.readBit()) { // sub_layer_level_present_flag[i] + toSkip += 8; + } + } + data.skipBits(toSkip); + if (maxSubLayersMinus1 > 0) { + data.skipBits(2 * (8 - maxSubLayersMinus1)); + } + int seqParameterSetId = data.readUnsignedExpGolombCodedInt(); + int chromaFormatIdc = data.readUnsignedExpGolombCodedInt(); + if (chromaFormatIdc == 3) { + data.skipBit(); // separate_colour_plane_flag + } + int frameWidth = data.readUnsignedExpGolombCodedInt(); + int frameHeight = data.readUnsignedExpGolombCodedInt(); + if (data.readBit()) { // conformance_window_flag + int confWinLeftOffset = data.readUnsignedExpGolombCodedInt(); + int confWinRightOffset = data.readUnsignedExpGolombCodedInt(); + int confWinTopOffset = data.readUnsignedExpGolombCodedInt(); + int confWinBottomOffset = data.readUnsignedExpGolombCodedInt(); + // H.265/HEVC (2014) Table 6-1 + int subWidthC = chromaFormatIdc == 1 || chromaFormatIdc == 2 ? 2 : 1; + int subHeightC = chromaFormatIdc == 1 ? 2 : 1; + frameWidth -= subWidthC * (confWinLeftOffset + confWinRightOffset); + frameHeight -= subHeightC * (confWinTopOffset + confWinBottomOffset); + } + data.readUnsignedExpGolombCodedInt(); // bit_depth_luma_minus8 + data.readUnsignedExpGolombCodedInt(); // bit_depth_chroma_minus8 + int log2MaxPicOrderCntLsbMinus4 = data.readUnsignedExpGolombCodedInt(); + // for (i = sps_sub_layer_ordering_info_present_flag ? 0 : sps_max_sub_layers_minus1; ...) + for (int i = data.readBit() ? 0 : maxSubLayersMinus1; i <= maxSubLayersMinus1; i++) { + data.readUnsignedExpGolombCodedInt(); // sps_max_dec_pic_buffering_minus1[i] + data.readUnsignedExpGolombCodedInt(); // sps_max_num_reorder_pics[i] + data.readUnsignedExpGolombCodedInt(); // sps_max_latency_increase_plus1[i] + } + data.readUnsignedExpGolombCodedInt(); // log2_min_luma_coding_block_size_minus3 + data.readUnsignedExpGolombCodedInt(); // log2_diff_max_min_luma_coding_block_size + data.readUnsignedExpGolombCodedInt(); // log2_min_luma_transform_block_size_minus2 + data.readUnsignedExpGolombCodedInt(); // log2_diff_max_min_luma_transform_block_size + data.readUnsignedExpGolombCodedInt(); // max_transform_hierarchy_depth_inter + data.readUnsignedExpGolombCodedInt(); // max_transform_hierarchy_depth_intra + // if (scaling_list_enabled_flag) { if (sps_scaling_list_data_present_flag) {...}} + boolean scalingListEnabled = data.readBit(); + if (scalingListEnabled && data.readBit()) { + skipH265ScalingList(data); + } + data.skipBits(2); // amp_enabled_flag (1), sample_adaptive_offset_enabled_flag (1) + if (data.readBit()) { // pcm_enabled_flag + // pcm_sample_bit_depth_luma_minus1 (4), pcm_sample_bit_depth_chroma_minus1 (4) + data.skipBits(8); + data.readUnsignedExpGolombCodedInt(); // log2_min_pcm_luma_coding_block_size_minus3 + data.readUnsignedExpGolombCodedInt(); // log2_diff_max_min_pcm_luma_coding_block_size + data.skipBit(); // pcm_loop_filter_disabled_flag + } + skipShortTermReferencePictureSets(data); + if (data.readBit()) { // long_term_ref_pics_present_flag + // num_long_term_ref_pics_sps + for (int i = 0; i < data.readUnsignedExpGolombCodedInt(); i++) { + int ltRefPicPocLsbSpsLength = log2MaxPicOrderCntLsbMinus4 + 4; + // lt_ref_pic_poc_lsb_sps[i], used_by_curr_pic_lt_sps_flag[i] + data.skipBits(ltRefPicPocLsbSpsLength + 1); + } + } + data.skipBits(2); // sps_temporal_mvp_enabled_flag, strong_intra_smoothing_enabled_flag + float pixelWidthHeightRatio = 1; + if (data.readBit()) { // vui_parameters_present_flag + if (data.readBit()) { // aspect_ratio_info_present_flag + int aspectRatioIdc = data.readBits(8); + if (aspectRatioIdc == NalUnitUtil.EXTENDED_SAR) { + int sarWidth = data.readBits(16); + int sarHeight = data.readBits(16); + if (sarWidth != 0 && sarHeight != 0) { + pixelWidthHeightRatio = (float) sarWidth / sarHeight; + } + } else if (aspectRatioIdc < NalUnitUtil.ASPECT_RATIO_IDC_VALUES.length) { + pixelWidthHeightRatio = NalUnitUtil.ASPECT_RATIO_IDC_VALUES[aspectRatioIdc]; + } else { + Log.w(TAG, "Unexpected aspect_ratio_idc value: " + aspectRatioIdc); + } + } + if (data.readBit()) { // overscan_info_present_flag + data.skipBit(); // overscan_appropriate_flag + } + if (data.readBit()) { // video_signal_type_present_flag + data.skipBits(4); // video_format, video_full_range_flag + if (data.readBit()) { // colour_description_present_flag + // colour_primaries, transfer_characteristics, matrix_coeffs + data.skipBits(24); + } + } + if (data.readBit()) { // chroma_loc_info_present_flag + data.readUnsignedExpGolombCodedInt(); // chroma_sample_loc_type_top_field + data.readUnsignedExpGolombCodedInt(); // chroma_sample_loc_type_bottom_field + } + data.skipBit(); // neutral_chroma_indication_flag + if (data.readBit()) { // field_seq_flag + // field_seq_flag equal to 1 indicates that the coded video sequence conveys pictures that + // represent fields, which means that frame height is double the picture height. + frameHeight *= 2; + } + } + + return new H265SpsData( + generalProfileSpace, + generalTierFlag, + generalProfileIdc, + generalProfileCompatibilityFlags, + constraintBytes, + generalLevelIdc, + seqParameterSetId, + frameWidth, + frameHeight, + pixelWidthHeightRatio); + } + /** * Parses a PPS NAL unit using the syntax defined in ITU-T Recommendation H.264 (2013) subsection * 7.3.2.2. @@ -392,8 +630,20 @@ public static SpsData parseSpsNalUnit(byte[] nalData, int nalOffset, int nalLimi * @return A parsed representation of the PPS data. */ public static PpsData parsePpsNalUnit(byte[] nalData, int nalOffset, int nalLimit) { + return parsePpsNalUnitPayload(nalData, nalOffset + 1, nalLimit); + } + + /** + * Parses a PPS NAL unit payload (excluding the NAL unit header) using the syntax defined in ITU-T + * Recommendation H.264 (2013) subsection 7.3.2.2. + * + * @param nalData A buffer containing escaped PPS data. + * @param nalOffset The offset of the NAL unit payload in {@code nalData}. + * @param nalLimit The limit of the NAL unit in {@code nalData}. + * @return A parsed representation of the PPS data. + */ + public static PpsData parsePpsNalUnitPayload(byte[] nalData, int nalOffset, int nalLimit) { ParsableNalUnitBitArray data = new ParsableNalUnitBitArray(nalData, nalOffset, nalLimit); - data.skipBits(8); // nal_unit int picParameterSetId = data.readUnsignedExpGolombCodedInt(); int seqParameterSetId = data.readUnsignedExpGolombCodedInt(); data.skipBit(); // entropy_coding_mode_flag @@ -403,16 +653,16 @@ public static PpsData parsePpsNalUnit(byte[] nalData, int nalOffset, int nalLimi /** * Finds the first NAL unit in {@code data}. - *

      - * If {@code prefixFlags} is null then the first three bytes of a NAL unit must be entirely + * + *

      If {@code prefixFlags} is null then the first three bytes of a NAL unit must be entirely * contained within the part of the array being searched in order for it to be found. - *

      - * When {@code prefixFlags} is non-null, this method supports finding NAL units whose first four - * bytes span {@code data} arrays passed to successive calls. To use this feature, pass the same - * {@code prefixFlags} parameter to successive calls. State maintained in this parameter enables - * the detection of such NAL units. Note that when using this feature, the return value may be 3, - * 2 or 1 less than {@code startOffset}, to indicate a NAL unit starting 3, 2 or 1 bytes before - * the first byte in the current array. + * + *

      When {@code prefixFlags} is non-null, this method supports finding NAL units whose first + * four bytes span {@code data} arrays passed to successive calls. To use this feature, pass the + * same {@code prefixFlags} parameter to successive calls. State maintained in this parameter + * enables the detection of such NAL units. Note that when using this feature, the return value + * may be 3, 2 or 1 less than {@code startOffset}, to indicate a NAL unit starting 3, 2 or 1 bytes + * before the first byte in the current array. * * @param data The data to search. * @param startOffset The offset (inclusive) in the data to start the search. @@ -422,8 +672,8 @@ public static PpsData parsePpsNalUnit(byte[] nalData, int nalOffset, int nalLimi * must be at least 3 elements long. * @return The offset of the NAL unit, or {@code endOffset} if a NAL unit was not found. */ - public static int findNalUnit(byte[] data, int startOffset, int endOffset, - boolean[] prefixFlags) { + public static int findNalUnit( + byte[] data, int startOffset, int endOffset, boolean[] prefixFlags) { int length = endOffset - startOffset; Assertions.checkState(length >= 0); @@ -431,18 +681,18 @@ public static int findNalUnit(byte[] data, int startOffset, int endOffset, return endOffset; } - if (prefixFlags != null) { - if (prefixFlags[0]) { - clearPrefixFlags(prefixFlags); - return startOffset - 3; - } else if (length > 1 && prefixFlags[1] && data[startOffset] == 1) { - clearPrefixFlags(prefixFlags); - return startOffset - 2; - } else if (length > 2 && prefixFlags[2] && data[startOffset] == 0 - && data[startOffset + 1] == 1) { - clearPrefixFlags(prefixFlags); - return startOffset - 1; - } + if (prefixFlags[0]) { + clearPrefixFlags(prefixFlags); + return startOffset - 3; + } else if (length > 1 && prefixFlags[1] && data[startOffset] == 1) { + clearPrefixFlags(prefixFlags); + return startOffset - 2; + } else if (length > 2 + && prefixFlags[2] + && data[startOffset] == 0 + && data[startOffset + 1] == 1) { + clearPrefixFlags(prefixFlags); + return startOffset - 1; } int limit = endOffset - 1; @@ -453,9 +703,7 @@ public static int findNalUnit(byte[] data, int startOffset, int endOffset, // There isn't a NAL prefix here, or at the next two positions. Do nothing and let the // loop advance the index by three. } else if (data[i - 2] == 0 && data[i - 1] == 0 && data[i] == 1) { - if (prefixFlags != null) { - clearPrefixFlags(prefixFlags); - } + clearPrefixFlags(prefixFlags); return i - 2; } else { // There isn't a NAL prefix here, but there might be at the next position. We should @@ -464,18 +712,20 @@ public static int findNalUnit(byte[] data, int startOffset, int endOffset, } } - if (prefixFlags != null) { - // True if the last three bytes in the data seen so far are {0,0,1}. - prefixFlags[0] = length > 2 - ? (data[endOffset - 3] == 0 && data[endOffset - 2] == 0 && data[endOffset - 1] == 1) - : length == 2 ? (prefixFlags[2] && data[endOffset - 2] == 0 && data[endOffset - 1] == 1) - : (prefixFlags[1] && data[endOffset - 1] == 1); - // True if the last two bytes in the data seen so far are {0,0}. - prefixFlags[1] = length > 1 ? data[endOffset - 2] == 0 && data[endOffset - 1] == 0 - : prefixFlags[2] && data[endOffset - 1] == 0; - // True if the last byte in the data seen so far is {0}. - prefixFlags[2] = data[endOffset - 1] == 0; - } + // True if the last three bytes in the data seen so far are {0,0,1}. + prefixFlags[0] = + length > 2 + ? (data[endOffset - 3] == 0 && data[endOffset - 2] == 0 && data[endOffset - 1] == 1) + : length == 2 + ? (prefixFlags[2] && data[endOffset - 2] == 0 && data[endOffset - 1] == 1) + : (prefixFlags[1] && data[endOffset - 1] == 1); + // True if the last two bytes in the data seen so far are {0,0}. + prefixFlags[1] = + length > 1 + ? data[endOffset - 2] == 0 && data[endOffset - 1] == 0 + : prefixFlags[2] && data[endOffset - 1] == 0; + // True if the last byte in the data seen so far is {0}. + prefixFlags[2] = data[endOffset - 1] == 0; return endOffset; } @@ -512,8 +762,129 @@ private static void skipScalingList(ParsableNalUnitBitArray bitArray, int size) } } + private static void skipH265ScalingList(ParsableNalUnitBitArray bitArray) { + for (int sizeId = 0; sizeId < 4; sizeId++) { + for (int matrixId = 0; matrixId < 6; matrixId += sizeId == 3 ? 3 : 1) { + if (!bitArray.readBit()) { // scaling_list_pred_mode_flag[sizeId][matrixId] + // scaling_list_pred_matrix_id_delta[sizeId][matrixId] + bitArray.readUnsignedExpGolombCodedInt(); + } else { + int coefNum = min(64, 1 << (4 + (sizeId << 1))); + if (sizeId > 1) { + // scaling_list_dc_coef_minus8[sizeId - 2][matrixId] + bitArray.readSignedExpGolombCodedInt(); + } + for (int i = 0; i < coefNum; i++) { + bitArray.readSignedExpGolombCodedInt(); // scaling_list_delta_coef + } + } + } + } + } + + /** + * Skips any short term reference picture sets contained in a SPS. + * + *

      Note: The st_ref_pic_set parsing in this method is simplified for the case where they're + * contained in a SPS, and would need generalizing for use elsewhere. + */ + private static void skipShortTermReferencePictureSets(ParsableNalUnitBitArray bitArray) { + int numShortTermRefPicSets = bitArray.readUnsignedExpGolombCodedInt(); + // As this method applies in a SPS, each short term reference picture set only accesses data + // from the previous one. This is because RefRpsIdx = stRpsIdx - (delta_idx_minus1 + 1), and + // delta_idx_minus1 is always zero in a SPS. Hence we just keep track of variables from the + // previous one as we iterate. + int previousNumNegativePics = C.INDEX_UNSET; + int previousNumPositivePics = C.INDEX_UNSET; + int[] previousDeltaPocS0 = new int[0]; + int[] previousDeltaPocS1 = new int[0]; + for (int stRpsIdx = 0; stRpsIdx < numShortTermRefPicSets; stRpsIdx++) { + int numNegativePics; + int numPositivePics; + int[] deltaPocS0; + int[] deltaPocS1; + + boolean interRefPicSetPredictionFlag = stRpsIdx != 0 && bitArray.readBit(); + if (interRefPicSetPredictionFlag) { + int previousNumDeltaPocs = previousNumNegativePics + previousNumPositivePics; + + int deltaRpsSign = bitArray.readBit() ? 1 : 0; + int absDeltaRps = bitArray.readUnsignedExpGolombCodedInt() + 1; + int deltaRps = (1 - 2 * deltaRpsSign) * absDeltaRps; + + boolean[] useDeltaFlags = new boolean[previousNumDeltaPocs + 1]; + for (int j = 0; j <= previousNumDeltaPocs; j++) { + if (!bitArray.readBit()) { // used_by_curr_pic_flag[j] + useDeltaFlags[j] = bitArray.readBit(); + } else { + // When use_delta_flag[j] is not present, its value is 1. + useDeltaFlags[j] = true; + } + } + + // Derive numNegativePics, numPositivePics, deltaPocS0 and deltaPocS1 as per Rec. ITU-T + // H.265 v6 (06/2019) Section 7.4.8 + int i = 0; + deltaPocS0 = new int[previousNumDeltaPocs + 1]; + deltaPocS1 = new int[previousNumDeltaPocs + 1]; + for (int j = previousNumPositivePics - 1; j >= 0; j--) { + int dPoc = previousDeltaPocS1[j] + deltaRps; + if (dPoc < 0 && useDeltaFlags[previousNumNegativePics + j]) { + deltaPocS0[i++] = dPoc; + } + } + if (deltaRps < 0 && useDeltaFlags[previousNumDeltaPocs]) { + deltaPocS0[i++] = deltaRps; + } + for (int j = 0; j < previousNumNegativePics; j++) { + int dPoc = previousDeltaPocS0[j] + deltaRps; + if (dPoc < 0 && useDeltaFlags[j]) { + deltaPocS0[i++] = dPoc; + } + } + numNegativePics = i; + deltaPocS0 = Arrays.copyOf(deltaPocS0, numNegativePics); + + i = 0; + for (int j = previousNumNegativePics - 1; j >= 0; j--) { + int dPoc = previousDeltaPocS0[j] + deltaRps; + if (dPoc > 0 && useDeltaFlags[j]) { + deltaPocS1[i++] = dPoc; + } + } + if (deltaRps > 0 && useDeltaFlags[previousNumDeltaPocs]) { + deltaPocS1[i++] = deltaRps; + } + for (int j = 0; j < previousNumPositivePics; j++) { + int dPoc = previousDeltaPocS1[j] + deltaRps; + if (dPoc > 0 && useDeltaFlags[previousNumNegativePics + j]) { + deltaPocS1[i++] = dPoc; + } + } + numPositivePics = i; + deltaPocS1 = Arrays.copyOf(deltaPocS1, numPositivePics); + } else { + numNegativePics = bitArray.readUnsignedExpGolombCodedInt(); + numPositivePics = bitArray.readUnsignedExpGolombCodedInt(); + deltaPocS0 = new int[numNegativePics]; + for (int i = 0; i < numNegativePics; i++) { + deltaPocS0[i] = bitArray.readUnsignedExpGolombCodedInt() + 1; + bitArray.skipBit(); // used_by_curr_pic_s0_flag[i] + } + deltaPocS1 = new int[numPositivePics]; + for (int i = 0; i < numPositivePics; i++) { + deltaPocS1[i] = bitArray.readUnsignedExpGolombCodedInt() + 1; + bitArray.skipBit(); // used_by_curr_pic_s1_flag[i] + } + } + previousNumNegativePics = numNegativePics; + previousNumPositivePics = numPositivePics; + previousDeltaPocS0 = deltaPocS0; + previousDeltaPocS1 = deltaPocS1; + } + } + private NalUnitUtil() { // Prevent instantiation. } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/NetworkTypeObserver.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/NetworkTypeObserver.java new file mode 100644 index 0000000000..4112e6bb6b --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/NetworkTypeObserver.java @@ -0,0 +1,263 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.util; + +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; + +import android.content.BroadcastReceiver; +import android.content.Context; +import android.content.Intent; +import android.content.IntentFilter; +import android.net.ConnectivityManager; +import android.net.NetworkInfo; +import android.os.Handler; +import android.os.Looper; +import android.telephony.TelephonyCallback; +import android.telephony.TelephonyCallback.DisplayInfoListener; +import android.telephony.TelephonyDisplayInfo; +import android.telephony.TelephonyManager; +import androidx.annotation.GuardedBy; +import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; +import androidx.annotation.VisibleForTesting; +import com.google.android.exoplayer2.C; +import java.lang.ref.WeakReference; +import java.util.concurrent.CopyOnWriteArrayList; + +/** + * Observer for network type changes. + * + *

      {@link #register Registered} listeners are informed at registration and whenever the network + * type changes. + * + *

      The current network type can also be {@link #getNetworkType queried} without registration. + */ +public final class NetworkTypeObserver { + + /** A listener for network type changes. */ + public interface Listener { + + /** + * Called when the network type changed or when the listener is first registered. + * + *

      This method is always called on the main thread. + */ + void onNetworkTypeChanged(@C.NetworkType int networkType); + } + + @Nullable private static NetworkTypeObserver staticInstance; + + private final Handler mainHandler; + // This class needs to hold weak references as it doesn't require listeners to unregister. + private final CopyOnWriteArrayList> listeners; + private final Object networkTypeLock; + + @GuardedBy("networkTypeLock") + private @C.NetworkType int networkType; + + /** + * Returns a network type observer instance. + * + * @param context A {@link Context}. + */ + public static synchronized NetworkTypeObserver getInstance(Context context) { + if (staticInstance == null) { + staticInstance = new NetworkTypeObserver(context); + } + return staticInstance; + } + + /** Resets the network type observer for tests. */ + @VisibleForTesting + public static synchronized void resetForTests() { + staticInstance = null; + } + + private NetworkTypeObserver(Context context) { + mainHandler = new Handler(Looper.getMainLooper()); + listeners = new CopyOnWriteArrayList<>(); + networkTypeLock = new Object(); + networkType = C.NETWORK_TYPE_UNKNOWN; + IntentFilter filter = new IntentFilter(); + filter.addAction(ConnectivityManager.CONNECTIVITY_ACTION); + Util.registerReceiverNotExported(context, new Receiver(), filter); + } + + /** + * Registers a listener. + * + *

      The current network type will be reported to the listener after registration. + * + * @param listener The {@link Listener}. + */ + public void register(Listener listener) { + removeClearedReferences(); + listeners.add(new WeakReference<>(listener)); + // Simulate an initial update on the main thread (like the sticky broadcast we'd receive if + // we were to register a separate broadcast receiver for each listener). + mainHandler.post(() -> listener.onNetworkTypeChanged(getNetworkType())); + } + + /** Returns the current network type. */ + public @C.NetworkType int getNetworkType() { + synchronized (networkTypeLock) { + return networkType; + } + } + + private void removeClearedReferences() { + for (WeakReference listenerReference : listeners) { + if (listenerReference.get() == null) { + listeners.remove(listenerReference); + } + } + } + + private void updateNetworkType(@C.NetworkType int networkType) { + synchronized (networkTypeLock) { + if (this.networkType == networkType) { + return; + } + this.networkType = networkType; + } + for (WeakReference listenerReference : listeners) { + @Nullable Listener listener = listenerReference.get(); + if (listener != null) { + listener.onNetworkTypeChanged(networkType); + } else { + listeners.remove(listenerReference); + } + } + } + + private static @C.NetworkType int getNetworkTypeFromConnectivityManager(Context context) { + NetworkInfo networkInfo; + @Nullable + ConnectivityManager connectivityManager = + (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE); + if (connectivityManager == null) { + return C.NETWORK_TYPE_UNKNOWN; + } + try { + networkInfo = connectivityManager.getActiveNetworkInfo(); + } catch (SecurityException e) { + // Expected if permission was revoked. + return C.NETWORK_TYPE_UNKNOWN; + } + if (networkInfo == null || !networkInfo.isConnected()) { + return C.NETWORK_TYPE_OFFLINE; + } + switch (networkInfo.getType()) { + case ConnectivityManager.TYPE_WIFI: + return C.NETWORK_TYPE_WIFI; + case ConnectivityManager.TYPE_WIMAX: + return C.NETWORK_TYPE_4G; + case ConnectivityManager.TYPE_MOBILE: + case ConnectivityManager.TYPE_MOBILE_DUN: + case ConnectivityManager.TYPE_MOBILE_HIPRI: + return getMobileNetworkType(networkInfo); + case ConnectivityManager.TYPE_ETHERNET: + return C.NETWORK_TYPE_ETHERNET; + default: + return C.NETWORK_TYPE_OTHER; + } + } + + private static @C.NetworkType int getMobileNetworkType(NetworkInfo networkInfo) { + switch (networkInfo.getSubtype()) { + case TelephonyManager.NETWORK_TYPE_EDGE: + case TelephonyManager.NETWORK_TYPE_GPRS: + return C.NETWORK_TYPE_2G; + case TelephonyManager.NETWORK_TYPE_1xRTT: + case TelephonyManager.NETWORK_TYPE_CDMA: + case TelephonyManager.NETWORK_TYPE_EVDO_0: + case TelephonyManager.NETWORK_TYPE_EVDO_A: + case TelephonyManager.NETWORK_TYPE_EVDO_B: + case TelephonyManager.NETWORK_TYPE_HSDPA: + case TelephonyManager.NETWORK_TYPE_HSPA: + case TelephonyManager.NETWORK_TYPE_HSUPA: + case TelephonyManager.NETWORK_TYPE_IDEN: + case TelephonyManager.NETWORK_TYPE_UMTS: + case TelephonyManager.NETWORK_TYPE_EHRPD: + case TelephonyManager.NETWORK_TYPE_HSPAP: + case TelephonyManager.NETWORK_TYPE_TD_SCDMA: + return C.NETWORK_TYPE_3G; + case TelephonyManager.NETWORK_TYPE_LTE: + return C.NETWORK_TYPE_4G; + case TelephonyManager.NETWORK_TYPE_NR: + return Util.SDK_INT >= 29 ? C.NETWORK_TYPE_5G_SA : C.NETWORK_TYPE_UNKNOWN; + case TelephonyManager.NETWORK_TYPE_IWLAN: + return C.NETWORK_TYPE_WIFI; + case TelephonyManager.NETWORK_TYPE_GSM: + case TelephonyManager.NETWORK_TYPE_UNKNOWN: + default: // Future mobile network types. + return C.NETWORK_TYPE_CELLULAR_UNKNOWN; + } + } + + private final class Receiver extends BroadcastReceiver { + + @Override + public void onReceive(Context context, Intent intent) { + @C.NetworkType int networkType = getNetworkTypeFromConnectivityManager(context); + if (Util.SDK_INT >= 31 && networkType == C.NETWORK_TYPE_4G) { + // Delay update of the network type to check whether this is actually 5G-NSA. + Api31.disambiguate4gAnd5gNsa(context, /* instance= */ NetworkTypeObserver.this); + } else { + updateNetworkType(networkType); + } + } + } + + @RequiresApi(31) + private static final class Api31 { + + public static void disambiguate4gAnd5gNsa(Context context, NetworkTypeObserver instance) { + try { + TelephonyManager telephonyManager = + checkNotNull((TelephonyManager) context.getSystemService(Context.TELEPHONY_SERVICE)); + DisplayInfoCallback callback = new DisplayInfoCallback(instance); + telephonyManager.registerTelephonyCallback(context.getMainExecutor(), callback); + // We are only interested in the initial response with the current state, so unregister + // the listener immediately. + telephonyManager.unregisterTelephonyCallback(callback); + } catch (RuntimeException e) { + // Ignore problems with listener registration and keep reporting as 4G. + instance.updateNetworkType(C.NETWORK_TYPE_4G); + } + } + + private static final class DisplayInfoCallback extends TelephonyCallback + implements DisplayInfoListener { + + private final NetworkTypeObserver instance; + + public DisplayInfoCallback(NetworkTypeObserver instance) { + this.instance = instance; + } + + @Override + public void onDisplayInfoChanged(TelephonyDisplayInfo telephonyDisplayInfo) { + int overrideNetworkType = telephonyDisplayInfo.getOverrideNetworkType(); + boolean is5gNsa = + overrideNetworkType == TelephonyDisplayInfo.OVERRIDE_NETWORK_TYPE_NR_NSA + || overrideNetworkType == TelephonyDisplayInfo.OVERRIDE_NETWORK_TYPE_NR_NSA_MMWAVE + || overrideNetworkType == TelephonyDisplayInfo.OVERRIDE_NETWORK_TYPE_NR_ADVANCED; + instance.updateNetworkType(is5gNsa ? C.NETWORK_TYPE_5G_NSA : C.NETWORK_TYPE_4G); + } + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/NonNullApi.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/NonNullApi.java index 694351fc7f..7762e76585 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/NonNullApi.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/NonNullApi.java @@ -15,6 +15,7 @@ */ package com.google.android.exoplayer2.util; +import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/NotificationUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/NotificationUtil.java index 756494f9d0..54c797a105 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/NotificationUtil.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/NotificationUtil.java @@ -15,6 +15,9 @@ */ package com.google.android.exoplayer2.util; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static java.lang.annotation.ElementType.TYPE_USE; + import android.annotation.SuppressLint; import android.app.Notification; import android.app.NotificationChannel; @@ -27,6 +30,7 @@ import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; /** Utility methods for displaying {@link Notification Notifications}. */ @SuppressLint("InlinedApi") @@ -39,6 +43,7 @@ public final class NotificationUtil { */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({ IMPORTANCE_UNSPECIFIED, IMPORTANCE_NONE, @@ -48,27 +53,31 @@ public final class NotificationUtil { IMPORTANCE_HIGH }) public @interface Importance {} - /** @see NotificationManager#IMPORTANCE_UNSPECIFIED */ + /** + * @see NotificationManager#IMPORTANCE_UNSPECIFIED + */ public static final int IMPORTANCE_UNSPECIFIED = NotificationManager.IMPORTANCE_UNSPECIFIED; - /** @see NotificationManager#IMPORTANCE_NONE */ + /** + * @see NotificationManager#IMPORTANCE_NONE + */ public static final int IMPORTANCE_NONE = NotificationManager.IMPORTANCE_NONE; - /** @see NotificationManager#IMPORTANCE_MIN */ + /** + * @see NotificationManager#IMPORTANCE_MIN + */ public static final int IMPORTANCE_MIN = NotificationManager.IMPORTANCE_MIN; - /** @see NotificationManager#IMPORTANCE_LOW */ + /** + * @see NotificationManager#IMPORTANCE_LOW + */ public static final int IMPORTANCE_LOW = NotificationManager.IMPORTANCE_LOW; - /** @see NotificationManager#IMPORTANCE_DEFAULT */ + /** + * @see NotificationManager#IMPORTANCE_DEFAULT + */ public static final int IMPORTANCE_DEFAULT = NotificationManager.IMPORTANCE_DEFAULT; - /** @see NotificationManager#IMPORTANCE_HIGH */ + /** + * @see NotificationManager#IMPORTANCE_HIGH + */ public static final int IMPORTANCE_HIGH = NotificationManager.IMPORTANCE_HIGH; - /** @deprecated Use {@link #createNotificationChannel(Context, String, int, int, int)}. */ - @Deprecated - public static void createNotificationChannel( - Context context, String id, @StringRes int nameResourceId, @Importance int importance) { - createNotificationChannel( - context, id, nameResourceId, /* descriptionResourceId= */ 0, importance); - } - /** * Creates a notification channel that notifications can be posted to. See {@link * NotificationChannel} and {@link @@ -99,7 +108,8 @@ public static void createNotificationChannel( @Importance int importance) { if (Util.SDK_INT >= 26) { NotificationManager notificationManager = - (NotificationManager) context.getSystemService(Context.NOTIFICATION_SERVICE); + checkNotNull( + (NotificationManager) context.getSystemService(Context.NOTIFICATION_SERVICE)); NotificationChannel channel = new NotificationChannel(id, context.getString(nameResourceId), importance); if (descriptionResourceId != 0) { @@ -122,7 +132,7 @@ public static void createNotificationChannel( */ public static void setNotification(Context context, int id, @Nullable Notification notification) { NotificationManager notificationManager = - (NotificationManager) context.getSystemService(Context.NOTIFICATION_SERVICE); + checkNotNull((NotificationManager) context.getSystemService(Context.NOTIFICATION_SERVICE)); if (notification != null) { notificationManager.notify(id, notification); } else { diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/ParsableBitArray.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/ParsableBitArray.java index fc1bc653c6..7efaa42c7a 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/ParsableBitArray.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/ParsableBitArray.java @@ -15,9 +15,12 @@ */ package com.google.android.exoplayer2.util; -/** - * Wraps a byte array, providing methods that allow it to be read as a bitstream. - */ +import static java.lang.Math.min; + +import com.google.common.base.Charsets; +import java.nio.charset.Charset; + +/** Wraps a byte array, providing methods that allow it to be read as a bitstream. */ public final class ParsableBitArray { public byte[] data; @@ -69,7 +72,7 @@ public void reset(byte[] data) { * @param parsableByteArray The {@link ParsableByteArray}. */ public void reset(ParsableByteArray parsableByteArray) { - reset(parsableByteArray.data, parsableByteArray.limit()); + reset(parsableByteArray.getData(), parsableByteArray.limit()); setPosition(parsableByteArray.getPosition() * 8); } @@ -86,16 +89,12 @@ public void reset(byte[] data, int limit) { byteLimit = limit; } - /** - * Returns the number of bits yet to be read. - */ + /** Returns the number of bits yet to be read. */ public int bitsLeft() { return (byteLimit - byteOffset) * 8 - bitOffset; } - /** - * Returns the current bit offset. - */ + /** Returns the current bit offset. */ public int getPosition() { return byteOffset * 8 + bitOffset; } @@ -121,9 +120,7 @@ public void setPosition(int position) { assertValidOffset(); } - /** - * Skips a single bit. - */ + /** Skips a single bit. */ public void skipBit() { if (++bitOffset == 8) { bitOffset = 0; @@ -277,6 +274,31 @@ public void skipBytes(int length) { assertValidOffset(); } + /** + * Reads the next {@code length} bytes as a UTF-8 string. Must only be called when the position is + * byte aligned. + * + * @param length The number of bytes to read. + * @return The string encoded by the bytes in UTF-8. + */ + public String readBytesAsString(int length) { + return readBytesAsString(length, Charsets.UTF_8); + } + + /** + * Reads the next {@code length} bytes as a string encoded in {@link Charset}. Must only be called + * when the position is byte aligned. + * + * @param length The number of bytes to read. + * @param charset The character set of the encoded characters. + * @return The string encoded by the bytes in the specified character set. + */ + public String readBytesAsString(int length, Charset charset) { + byte[] bytes = new byte[length]; + readBytes(bytes, 0, length); + return new String(bytes, charset); + } + /** * Overwrites {@code numBits} from this array using the {@code numBits} least significant bits * from {@code value}. Bits are written in order from most significant to least significant. The @@ -291,7 +313,7 @@ public void putInt(int value, int numBits) { if (numBits < 32) { value &= (1 << numBits) - 1; } - int firstByteReadSize = Math.min(8 - bitOffset, numBits); + int firstByteReadSize = min(8 - bitOffset, numBits); int firstByteRightPaddingSize = 8 - bitOffset - firstByteReadSize; int firstByteBitmask = (0xFF00 >> bitOffset) | ((1 << firstByteRightPaddingSize) - 1); data[byteOffset] = (byte) (data[byteOffset] & firstByteBitmask); @@ -316,8 +338,7 @@ public void putInt(int value, int numBits) { private void assertValidOffset() { // It is fine for position to be at the end of the array, but no further. - Assertions.checkState(byteOffset >= 0 - && (byteOffset < byteLimit || (byteOffset == byteLimit && bitOffset == 0))); + Assertions.checkState( + byteOffset >= 0 && (byteOffset < byteLimit || (byteOffset == byteLimit && bitOffset == 0))); } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/ParsableByteArray.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/ParsableByteArray.java index 67686ad64f..29c2aa5153 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/ParsableByteArray.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/ParsableByteArray.java @@ -16,9 +16,13 @@ package com.google.android.exoplayer2.util; import androidx.annotation.Nullable; -import com.google.android.exoplayer2.C; +import com.google.common.base.Charsets; +import com.google.common.collect.ImmutableSet; +import com.google.common.primitives.Chars; +import com.google.common.primitives.UnsignedBytes; import java.nio.ByteBuffer; import java.nio.charset.Charset; +import java.util.Arrays; /** * Wraps a byte array, providing a set of methods for parsing data from it. Numerical values are @@ -26,9 +30,15 @@ */ public final class ParsableByteArray { - public byte[] data; + private static final char[] CR_AND_LF = {'\r', '\n'}; + private static final char[] LF = {'\n'}; + private static final ImmutableSet SUPPORTED_CHARSETS_FOR_READLINE = + ImmutableSet.of( + Charsets.US_ASCII, Charsets.UTF_8, Charsets.UTF_16, Charsets.UTF_16BE, Charsets.UTF_16LE); + private byte[] data; private int position; + // TODO(internal b/147657250): Enforce this limit on all read methods. private int limit; /** Creates a new instance that initially has no backing data. */ @@ -67,15 +77,9 @@ public ParsableByteArray(byte[] data, int limit) { this.limit = limit; } - /** Sets the position and limit to zero. */ - public void reset() { - position = 0; - limit = 0; - } - /** - * Resets the position to zero and the limit to the specified value. If the limit exceeds the - * capacity, {@code data} is replaced with a new array of sufficient size. + * Resets the position to zero and the limit to the specified value. This might replace or wipe + * the {@link #getData() underlying array}, potentially invalidating any local references. * * @param limit The limit to set. */ @@ -106,15 +110,26 @@ public void reset(byte[] data, int limit) { } /** - * Returns the number of bytes yet to be read. + * Ensures the backing array is at least {@code requiredCapacity} long. + * + *

      {@link #getPosition() position}, {@link #limit() limit}, and all data in the underlying + * array (including that beyond {@link #limit()}) are preserved. + * + *

      This might replace or wipe the {@link #getData() underlying array}, potentially invalidating + * any local references. */ + public void ensureCapacity(int requiredCapacity) { + if (requiredCapacity > capacity()) { + data = Arrays.copyOf(data, requiredCapacity); + } + } + + /** Returns the number of bytes yet to be read. */ public int bytesLeft() { return limit - position; } - /** - * Returns the limit. - */ + /** Returns the limit. */ public int limit() { return limit; } @@ -129,20 +144,11 @@ public void setLimit(int limit) { this.limit = limit; } - /** - * Returns the current offset in the array, in bytes. - */ + /** Returns the current offset in the array, in bytes. */ public int getPosition() { return position; } - /** - * Returns the capacity of the array, which may be larger than the limit. - */ - public int capacity() { - return data.length; - } - /** * Sets the reading offset in the array. * @@ -156,6 +162,23 @@ public void setPosition(int position) { this.position = position; } + /** + * Returns the underlying array. + * + *

      Changes to this array are reflected in the results of the {@code read...()} methods. + * + *

      This reference must be assumed to become invalid when {@link #reset} or {@link + * #ensureCapacity} are called (because the array might get reallocated). + */ + public byte[] getData() { + return data; + } + + /** Returns the capacity of the array, which may be larger than the limit. */ + public int capacity() { + return data.length; + } + /** * Moves the reading offset by {@code bytes}. * @@ -168,8 +191,8 @@ public void skipBytes(int bytes) { } /** - * Reads the next {@code length} bytes into {@code bitArray}, and resets the position of - * {@code bitArray} to zero. + * Reads the next {@code length} bytes into {@code bitArray}, and resets the position of {@code + * bitArray} to zero. * * @param bitArray The {@link ParsableBitArray} into which the bytes should be read. * @param length The number of bytes to write. @@ -204,97 +227,70 @@ public void readBytes(ByteBuffer buffer, int length) { position += length; } - /** - * Peeks at the next byte as an unsigned value. - */ + /** Peeks at the next byte as an unsigned value. */ public int peekUnsignedByte() { return (data[position] & 0xFF); } - /** - * Peeks at the next char. - */ + /** Peeks at the next char. */ public char peekChar() { - return (char) ((data[position] & 0xFF) << 8 - | (data[position + 1] & 0xFF)); + return (char) ((data[position] & 0xFF) << 8 | (data[position + 1] & 0xFF)); } - /** - * Reads the next byte as an unsigned value. - */ + /** Reads the next byte as an unsigned value. */ public int readUnsignedByte() { return (data[position++] & 0xFF); } - /** - * Reads the next two bytes as an unsigned value. - */ + /** Reads the next two bytes as an unsigned value. */ public int readUnsignedShort() { - return (data[position++] & 0xFF) << 8 - | (data[position++] & 0xFF); + return (data[position++] & 0xFF) << 8 | (data[position++] & 0xFF); } - /** - * Reads the next two bytes as an unsigned value. - */ + /** Reads the next two bytes as an unsigned value. */ public int readLittleEndianUnsignedShort() { return (data[position++] & 0xFF) | (data[position++] & 0xFF) << 8; } - /** - * Reads the next two bytes as a signed value. - */ + /** Reads the next two bytes as a signed value. */ public short readShort() { - return (short) ((data[position++] & 0xFF) << 8 - | (data[position++] & 0xFF)); + return (short) ((data[position++] & 0xFF) << 8 | (data[position++] & 0xFF)); } - /** - * Reads the next two bytes as a signed value. - */ + /** Reads the next two bytes as a signed value. */ public short readLittleEndianShort() { return (short) ((data[position++] & 0xFF) | (data[position++] & 0xFF) << 8); } - /** - * Reads the next three bytes as an unsigned value. - */ + /** Reads the next three bytes as an unsigned value. */ public int readUnsignedInt24() { return (data[position++] & 0xFF) << 16 | (data[position++] & 0xFF) << 8 | (data[position++] & 0xFF); } - /** - * Reads the next three bytes as a signed value. - */ + /** Reads the next three bytes as a signed value. */ public int readInt24() { return ((data[position++] & 0xFF) << 24) >> 8 | (data[position++] & 0xFF) << 8 | (data[position++] & 0xFF); } - /** - * Reads the next three bytes as a signed value in little endian order. - */ + /** Reads the next three bytes as a signed value in little endian order. */ public int readLittleEndianInt24() { return (data[position++] & 0xFF) | (data[position++] & 0xFF) << 8 | (data[position++] & 0xFF) << 16; } - /** - * Reads the next three bytes as an unsigned value in little endian order. - */ + /** Reads the next three bytes as an unsigned value in little endian order. */ public int readLittleEndianUnsignedInt24() { return (data[position++] & 0xFF) | (data[position++] & 0xFF) << 8 | (data[position++] & 0xFF) << 16; } - /** - * Reads the next four bytes as an unsigned value. - */ + /** Reads the next four bytes as an unsigned value. */ public long readUnsignedInt() { return (data[position++] & 0xFFL) << 24 | (data[position++] & 0xFFL) << 16 @@ -302,9 +298,7 @@ public long readUnsignedInt() { | (data[position++] & 0xFFL); } - /** - * Reads the next four bytes as an unsigned value in little endian order. - */ + /** Reads the next four bytes as an unsigned value in little endian order. */ public long readLittleEndianUnsignedInt() { return (data[position++] & 0xFFL) | (data[position++] & 0xFFL) << 8 @@ -312,9 +306,7 @@ public long readLittleEndianUnsignedInt() { | (data[position++] & 0xFFL) << 24; } - /** - * Reads the next four bytes as a signed value - */ + /** Reads the next four bytes as a signed value */ public int readInt() { return (data[position++] & 0xFF) << 24 | (data[position++] & 0xFF) << 16 @@ -322,9 +314,7 @@ public int readInt() { | (data[position++] & 0xFF); } - /** - * Reads the next four bytes as a signed value in little endian order. - */ + /** Reads the next four bytes as a signed value in little endian order. */ public int readLittleEndianInt() { return (data[position++] & 0xFF) | (data[position++] & 0xFF) << 8 @@ -332,9 +322,7 @@ public int readLittleEndianInt() { | (data[position++] & 0xFF) << 24; } - /** - * Reads the next eight bytes as a signed value. - */ + /** Reads the next eight bytes as a signed value. */ public long readLong() { return (data[position++] & 0xFFL) << 56 | (data[position++] & 0xFFL) << 48 @@ -346,9 +334,7 @@ public long readLong() { | (data[position++] & 0xFFL); } - /** - * Reads the next eight bytes as a signed value in little endian order. - */ + /** Reads the next eight bytes as a signed value in little endian order. */ public long readLittleEndianLong() { return (data[position++] & 0xFFL) | (data[position++] & 0xFFL) << 8 @@ -360,20 +346,17 @@ public long readLittleEndianLong() { | (data[position++] & 0xFFL) << 56; } - /** - * Reads the next four bytes, returning the integer portion of the fixed point 16.16 integer. - */ + /** Reads the next four bytes, returning the integer portion of the fixed point 16.16 integer. */ public int readUnsignedFixedPoint1616() { - int result = (data[position++] & 0xFF) << 8 - | (data[position++] & 0xFF); + int result = (data[position++] & 0xFF) << 8 | (data[position++] & 0xFF); position += 2; // Skip the non-integer portion. return result; } /** * Reads a Synchsafe integer. - *

      - * Synchsafe integers keep the highest bit of every byte zeroed. A 32 bit synchsafe integer can + * + *

      Synchsafe integers keep the highest bit of every byte zeroed. A 32 bit synchsafe integer can * store 28 bits of information. * * @return The parsed value. @@ -426,16 +409,12 @@ public long readUnsignedLongToLong() { return result; } - /** - * Reads the next four bytes as a 32-bit floating point value. - */ + /** Reads the next four bytes as a 32-bit floating point value. */ public float readFloat() { return Float.intBitsToFloat(readInt()); } - /** - * Reads the next eight bytes as a 64-bit floating point value. - */ + /** Reads the next eight bytes as a 64-bit floating point value. */ public double readDouble() { return Double.longBitsToDouble(readLong()); } @@ -447,7 +426,7 @@ public double readDouble() { * @return The string encoded by the bytes. */ public String readString(int length) { - return readString(length, Charset.forName(C.UTF8_NAME)); + return readString(length, Charsets.UTF_8); } /** @@ -492,11 +471,22 @@ public String readNullTerminatedString(int length) { */ @Nullable public String readNullTerminatedString() { + return readDelimiterTerminatedString('\0'); + } + + /** + * Reads up to the next delimiter byte (or the limit) as UTF-8 characters. + * + * @return The string not including any terminating delimiter byte, or null if the end of the data + * has already been reached. + */ + @Nullable + public String readDelimiterTerminatedString(char delimiter) { if (bytesLeft() == 0) { return null; } int stringLimit = position; - while (stringLimit < limit && data[stringLimit] != 0) { + while (stringLimit < limit && data[stringLimit] != delimiter) { stringLimit++; } String string = Util.fromUtf8Bytes(data, position, stringLimit - position); @@ -508,43 +498,47 @@ public String readNullTerminatedString() { } /** - * Reads a line of text. + * Reads a line of text in UTF-8. + * + *

      Equivalent to passing {@link Charsets#UTF_8} to {@link #readLine(Charset)}. + */ + @Nullable + public String readLine() { + return readLine(Charsets.UTF_8); + } + + /** + * Reads a line of text in {@code charset}. * *

      A line is considered to be terminated by any one of a carriage return ('\r'), a line feed - * ('\n'), or a carriage return followed immediately by a line feed ('\r\n'). The system's default - * charset (UTF-8) is used. This method discards leading UTF-8 byte order marks, if present. + * ('\n'), or a carriage return followed immediately by a line feed ('\r\n'). This method discards + * leading UTF byte order marks (BOM), if present. * + *

      The {@linkplain #getPosition() position} is advanced to start of the next line (i.e. any + * line terminators are skipped). + * + * @param charset The charset used to interpret the bytes as a {@link String}. * @return The line not including any line-termination characters, or null if the end of the data * has already been reached. + * @throws IllegalArgumentException if charset is not supported. Only US_ASCII, UTF-8, UTF-16, + * UTF-16BE, and UTF-16LE are supported. */ @Nullable - public String readLine() { + public String readLine(Charset charset) { + Assertions.checkArgument( + SUPPORTED_CHARSETS_FOR_READLINE.contains(charset), "Unsupported charset: " + charset); if (bytesLeft() == 0) { return null; } - int lineLimit = position; - while (lineLimit < limit && !Util.isLinebreak(data[lineLimit])) { - lineLimit++; + if (!charset.equals(Charsets.US_ASCII)) { + readUtfCharsetFromBom(); // Skip BOM if present } - if (lineLimit - position >= 3 && data[position] == (byte) 0xEF - && data[position + 1] == (byte) 0xBB && data[position + 2] == (byte) 0xBF) { - // There's a UTF-8 byte order mark at the start of the line. Discard it. - position += 3; - } - String line = Util.fromUtf8Bytes(data, position, lineLimit - position); - position = lineLimit; + int lineLimit = findNextLineTerminator(charset); + String line = readString(lineLimit - position, charset); if (position == limit) { return line; } - if (data[position] == '\r') { - position++; - if (position == limit) { - return line; - } - } - if (data[position] == '\n') { - position++; - } + skipLineTerminator(charset); return line; } @@ -583,4 +577,98 @@ public long readUtf8EncodedLong() { return value; } + /** + * Reads a UTF byte order mark (BOM) and returns the UTF {@link Charset} it represents. Returns + * {@code null} without advancing {@link #getPosition() position} if no BOM is found. + */ + @Nullable + public Charset readUtfCharsetFromBom() { + if (bytesLeft() >= 3 + && data[position] == (byte) 0xEF + && data[position + 1] == (byte) 0xBB + && data[position + 2] == (byte) 0xBF) { + position += 3; + return Charsets.UTF_8; + } else if (bytesLeft() >= 2) { + if (data[position] == (byte) 0xFE && data[position + 1] == (byte) 0xFF) { + position += 2; + return Charsets.UTF_16BE; + } else if (data[position] == (byte) 0xFF && data[position + 1] == (byte) 0xFE) { + position += 2; + return Charsets.UTF_16LE; + } + } + return null; + } + + /** + * Returns the index of the next occurrence of '\n' or '\r', or {@link #limit} if none is found. + */ + private int findNextLineTerminator(Charset charset) { + int stride; + if (charset.equals(Charsets.UTF_8) || charset.equals(Charsets.US_ASCII)) { + stride = 1; + } else if (charset.equals(Charsets.UTF_16) + || charset.equals(Charsets.UTF_16LE) + || charset.equals(Charsets.UTF_16BE)) { + stride = 2; + } else { + throw new IllegalArgumentException("Unsupported charset: " + charset); + } + for (int i = position; i < limit - (stride - 1); i += stride) { + if ((charset.equals(Charsets.UTF_8) || charset.equals(Charsets.US_ASCII)) + && Util.isLinebreak(data[i])) { + return i; + } else if ((charset.equals(Charsets.UTF_16) || charset.equals(Charsets.UTF_16BE)) + && data[i] == 0x00 + && Util.isLinebreak(data[i + 1])) { + return i; + } else if (charset.equals(Charsets.UTF_16LE) + && data[i + 1] == 0x00 + && Util.isLinebreak(data[i])) { + return i; + } + } + return limit; + } + + private void skipLineTerminator(Charset charset) { + if (readCharacterIfInList(charset, CR_AND_LF) == '\r') { + readCharacterIfInList(charset, LF); + } + } + + /** + * Peeks at the character at {@link #position} (as decoded by {@code charset}), returns it and + * advances {@link #position} past it if it's in {@code chars}, otherwise returns {@code 0} + * without advancing {@link #position}. Returns {@code 0} if {@link #bytesLeft()} doesn't allow + * reading a whole character in {@code charset}. + * + *

      Only supports characters in {@code chars} that occupy a single code unit (i.e. one byte for + * UTF-8 and two bytes for UTF-16). + */ + private char readCharacterIfInList(Charset charset, char[] chars) { + char character; + int characterSize; + if ((charset.equals(Charsets.UTF_8) || charset.equals(Charsets.US_ASCII)) && bytesLeft() >= 1) { + character = Chars.checkedCast(UnsignedBytes.toInt(data[position])); + characterSize = 1; + } else if ((charset.equals(Charsets.UTF_16) || charset.equals(Charsets.UTF_16BE)) + && bytesLeft() >= 2) { + character = Chars.fromBytes(data[position], data[position + 1]); + characterSize = 2; + } else if (charset.equals(Charsets.UTF_16LE) && bytesLeft() >= 2) { + character = Chars.fromBytes(data[position + 1], data[position]); + characterSize = 2; + } else { + return 0; + } + + if (Chars.contains(chars, character)) { + position += characterSize; + return Chars.checkedCast(character); + } else { + return 0; + } + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/ParsableNalUnitBitArray.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/ParsableNalUnitBitArray.java index 6d34a4190e..33db40b91d 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/ParsableNalUnitBitArray.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/ParsableNalUnitBitArray.java @@ -15,11 +15,12 @@ */ package com.google.android.exoplayer2.util; + /** * Wraps a byte array, providing methods that allow it to be read as a NAL unit bitstream. - *

      - * Whenever the byte sequence [0, 0, 3] appears in the wrapped byte array, it is treated as [0, 0] - * for all reading/skipping operations, which makes the bitstream appear to be unescaped. + * + *

      Whenever the byte sequence [0, 0, 3] appears in the wrapped byte array, it is treated as [0, + * 0] for all reading/skipping operations, which makes the bitstream appear to be unescaped. */ public final class ParsableNalUnitBitArray { @@ -35,7 +36,7 @@ public final class ParsableNalUnitBitArray { * @param offset The byte offset in {@code data} to start reading from. * @param limit The byte offset of the end of the bitstream in {@code data}. */ - @SuppressWarnings({"initialization.fields.uninitialized", "method.invocation.invalid"}) + @SuppressWarnings({"initialization.fields.uninitialized", "nullness:method.invocation"}) public ParsableNalUnitBitArray(byte[] data, int offset, int limit) { reset(data, offset, limit); } @@ -55,9 +56,7 @@ public void reset(byte[] data, int offset, int limit) { assertValidOffset(); } - /** - * Skips a single bit. - */ + /** Skips a single bit. */ public void skipBit() { if (++bitOffset == 8) { bitOffset = 0; @@ -198,14 +197,16 @@ private int readExpGolombCodeNum() { } private boolean shouldSkipByte(int offset) { - return 2 <= offset && offset < byteLimit && data[offset] == (byte) 0x03 - && data[offset - 2] == (byte) 0x00 && data[offset - 1] == (byte) 0x00; + return 2 <= offset + && offset < byteLimit + && data[offset] == (byte) 0x03 + && data[offset - 2] == (byte) 0x00 + && data[offset - 1] == (byte) 0x00; } private void assertValidOffset() { // It is fine for position to be at the end of the array, but no further. - Assertions.checkState(byteOffset >= 0 - && (byteOffset < byteLimit || (byteOffset == byteLimit && bitOffset == 0))); + Assertions.checkState( + byteOffset >= 0 && (byteOffset < byteLimit || (byteOffset == byteLimit && bitOffset == 0))); } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/Predicate.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/Predicate.java deleted file mode 100644 index b582cf3f7c..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/Predicate.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright (C) 2016 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.util; - -/** - * Determines a true or false value for a given input. - * - * @param The input type of the predicate. - */ -public interface Predicate { - - /** - * Evaluates an input. - * - * @param input The input to evaluate. - * @return The evaluated result. - */ - boolean evaluate(T input); - -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/PriorityTaskManager.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/PriorityTaskManager.java index 2ebda60821..e380d84e35 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/PriorityTaskManager.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/PriorityTaskManager.java @@ -15,29 +15,28 @@ */ package com.google.android.exoplayer2.util; +import static java.lang.Math.max; + import java.io.IOException; import java.util.Collections; import java.util.PriorityQueue; /** * Allows tasks with associated priorities to control how they proceed relative to one another. - *

      - * A task should call {@link #add(int)} to register with the manager and {@link #remove(int)} to + * + *

      A task should call {@link #add(int)} to register with the manager and {@link #remove(int)} to * unregister. A registered task will prevent tasks of lower priority from proceeding, and should * call {@link #proceed(int)}, {@link #proceedNonBlocking(int)} or {@link #proceedOrThrow(int)} each * time it wishes to check whether it is itself allowed to proceed. */ public final class PriorityTaskManager { - /** - * Thrown when task attempts to proceed when another registered task has a higher priority. - */ + /** Thrown when task attempts to proceed when another registered task has a higher priority. */ public static class PriorityTooLowException extends IOException { public PriorityTooLowException(int priority, int highestPriority) { super("Priority too low [priority=" + priority + ", highest=" + highestPriority + "]"); } - } private final Object lock = new Object(); @@ -59,7 +58,7 @@ public PriorityTaskManager() { public void add(int priority) { synchronized (lock) { queue.add(priority); - highestPriority = Math.max(highestPriority, priority); + highestPriority = max(highestPriority, priority); } } @@ -115,5 +114,4 @@ public void remove(int priority) { lock.notifyAll(); } } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/RepeatModeUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/RepeatModeUtil.java index 3485877bc4..68ddc0e445 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/RepeatModeUtil.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/RepeatModeUtil.java @@ -15,40 +15,42 @@ */ package com.google.android.exoplayer2.util; +import static java.lang.annotation.ElementType.FIELD; +import static java.lang.annotation.ElementType.LOCAL_VARIABLE; +import static java.lang.annotation.ElementType.METHOD; +import static java.lang.annotation.ElementType.PARAMETER; +import static java.lang.annotation.ElementType.TYPE_USE; + import androidx.annotation.IntDef; import com.google.android.exoplayer2.Player; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; -/** - * Util class for repeat mode handling. - */ +/** Util class for repeat mode handling. */ public final class RepeatModeUtil { - // LINT.IfChange /** * Set of repeat toggle modes. Can be combined using bit-wise operations. Possible flag values are * {@link #REPEAT_TOGGLE_MODE_NONE}, {@link #REPEAT_TOGGLE_MODE_ONE} and {@link * #REPEAT_TOGGLE_MODE_ALL}. */ + // @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility + // with Kotlin usages from before TYPE_USE was added. @Documented @Retention(RetentionPolicy.SOURCE) + @Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE}) @IntDef( flag = true, value = {REPEAT_TOGGLE_MODE_NONE, REPEAT_TOGGLE_MODE_ONE, REPEAT_TOGGLE_MODE_ALL}) public @interface RepeatToggleModes {} - /** - * All repeat mode buttons disabled. - */ + /** All repeat mode buttons disabled. */ public static final int REPEAT_TOGGLE_MODE_NONE = 0; - /** - * "Repeat One" button enabled. - */ + /** "Repeat One" button enabled. */ public static final int REPEAT_TOGGLE_MODE_ONE = 1; /** "Repeat All" button enabled. */ public static final int REPEAT_TOGGLE_MODE_ALL = 1 << 1; // 2 - // LINT.ThenChange(../../../../../../../../../ui/src/main/res/values/attrs.xml) private RepeatModeUtil() { // Prevent instantiation. @@ -61,8 +63,8 @@ private RepeatModeUtil() { * @param enabledModes Bitmask of enabled modes. * @return The next repeat mode. */ - public static @Player.RepeatMode int getNextRepeatMode(@Player.RepeatMode int currentMode, - int enabledModes) { + public static @Player.RepeatMode int getNextRepeatMode( + @Player.RepeatMode int currentMode, int enabledModes) { for (int offset = 1; offset <= 2; offset++) { @Player.RepeatMode int proposedMode = (currentMode + offset) % 3; if (isRepeatModeEnabled(proposedMode, enabledModes)) { @@ -91,5 +93,4 @@ public static boolean isRepeatModeEnabled(@Player.RepeatMode int repeatMode, int return false; } } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/RunnableFutureTask.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/RunnableFutureTask.java new file mode 100644 index 0000000000..ac785295b9 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/RunnableFutureTask.java @@ -0,0 +1,174 @@ +/* + * Copyright 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.util; + +import static java.util.concurrent.TimeUnit.MILLISECONDS; + +import androidx.annotation.Nullable; +import java.util.concurrent.CancellationException; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.RunnableFuture; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; + +/** + * A {@link RunnableFuture} that supports additional uninterruptible operations to query whether + * execution has started and finished. + * + * @param The type of the result. + * @param The type of any {@link ExecutionException} cause. + */ +public abstract class RunnableFutureTask implements RunnableFuture { + + private final ConditionVariable started; + private final ConditionVariable finished; + private final Object cancelLock; + + @Nullable private Exception exception; + @Nullable private R result; + + @Nullable private Thread workThread; + private boolean canceled; + + protected RunnableFutureTask() { + started = new ConditionVariable(); + finished = new ConditionVariable(); + cancelLock = new Object(); + } + + /** Blocks until the task has started, or has been canceled without having been started. */ + public final void blockUntilStarted() { + started.blockUninterruptible(); + } + + /** Blocks until the task has finished, or has been canceled without having been started. */ + public final void blockUntilFinished() { + finished.blockUninterruptible(); + } + + // Future implementation. + + @Override + @UnknownNull + public final R get() throws ExecutionException, InterruptedException { + finished.block(); + return getResult(); + } + + @Override + @UnknownNull + public final R get(long timeout, TimeUnit unit) + throws ExecutionException, InterruptedException, TimeoutException { + long timeoutMs = MILLISECONDS.convert(timeout, unit); + if (!finished.block(timeoutMs)) { + throw new TimeoutException(); + } + return getResult(); + } + + @Override + public final boolean cancel(boolean interruptIfRunning) { + synchronized (cancelLock) { + if (canceled || finished.isOpen()) { + return false; + } + canceled = true; + cancelWork(); + @Nullable Thread workThread = this.workThread; + if (workThread != null) { + if (interruptIfRunning) { + workThread.interrupt(); + } + } else { + started.open(); + finished.open(); + } + return true; + } + } + + @Override + public final boolean isDone() { + return finished.isOpen(); + } + + @Override + public final boolean isCancelled() { + return canceled; + } + + // Runnable implementation. + + @Override + public final void run() { + synchronized (cancelLock) { + if (canceled) { + return; + } + workThread = Thread.currentThread(); + } + started.open(); + try { + result = doWork(); + } catch (Exception e) { + // Must be an instance of E or RuntimeException. + exception = e; + } finally { + synchronized (cancelLock) { + finished.open(); + workThread = null; + // Clear the interrupted flag if set, to avoid it leaking into any subsequent tasks executed + // using the calling thread. + Thread.interrupted(); + } + } + } + + // Internal methods. + + /** + * Performs the work or computation. + * + * @return The computed result. + * @throws E If an error occurred. + */ + @UnknownNull + protected abstract R doWork() throws E; + + /** + * Cancels any work being done by {@link #doWork()}. If {@link #doWork()} is currently executing + * then the thread on which it's executing may be interrupted immediately after this method + * returns. + * + *

      The default implementation does nothing. + */ + protected void cancelWork() { + // Do nothing. + } + + // The return value is guaranteed to be non-null if and only if R is a non-null type, but there's + // no way to assert this. Suppress the warning instead. + @SuppressWarnings("nullness:return") + @UnknownNull + private R getResult() throws ExecutionException { + if (canceled) { + throw new CancellationException(); + } else if (exception != null) { + throw new ExecutionException(exception); + } + return result; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/Size.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/Size.java new file mode 100644 index 0000000000..bbd8e25893 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/Size.java @@ -0,0 +1,87 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.android.exoplayer2.util; + +import static com.google.android.exoplayer2.util.Assertions.checkArgument; + +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; + +/** Immutable class for describing width and height dimensions in pixels. */ +public final class Size { + + /** A static instance to represent an unknown size value. */ + public static final Size UNKNOWN = + new Size(/* width= */ C.LENGTH_UNSET, /* height= */ C.LENGTH_UNSET); + + /* A static instance to represent a size of zero height and width. */ + public static final Size ZERO = new Size(/* width= */ 0, /* height= */ 0); + + private final int width; + private final int height; + + /** + * Creates a new immutable Size instance. + * + * @param width The width of the size, in pixels, or {@link C#LENGTH_UNSET} if unknown. + * @param height The height of the size, in pixels, or {@link C#LENGTH_UNSET} if unknown. + * @throws IllegalArgumentException if an invalid {@code width} or {@code height} is specified. + */ + public Size(int width, int height) { + checkArgument( + (width == C.LENGTH_UNSET || width >= 0) && (height == C.LENGTH_UNSET || height >= 0)); + + this.width = width; + this.height = height; + } + + /** Returns the width of the size (in pixels), or {@link C#LENGTH_UNSET} if unknown. */ + public int getWidth() { + return width; + } + + /** Returns the height of the size (in pixels), or {@link C#LENGTH_UNSET} if unknown. */ + public int getHeight() { + return height; + } + + @Override + public boolean equals(@Nullable Object obj) { + if (obj == null) { + return false; + } + if (this == obj) { + return true; + } + if (obj instanceof Size) { + Size other = (Size) obj; + return width == other.width && height == other.height; + } + return false; + } + + @Override + public String toString() { + return width + "x" + height; + } + + @Override + public int hashCode() { + // assuming most sizes are <2^16, doing a rotate will give us perfect hashing + return height ^ ((width << (Integer.SIZE / 2)) | (width >>> (Integer.SIZE / 2))); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/SntpClient.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/SntpClient.java new file mode 100644 index 0000000000..03336fdeba --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/SntpClient.java @@ -0,0 +1,347 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.util; + +import android.os.SystemClock; +import androidx.annotation.GuardedBy; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.upstream.Loader; +import com.google.android.exoplayer2.upstream.Loader.LoadErrorAction; +import com.google.android.exoplayer2.upstream.Loader.Loadable; +import java.io.IOException; +import java.net.DatagramPacket; +import java.net.DatagramSocket; +import java.net.InetAddress; +import java.util.Arrays; +import java.util.ConcurrentModificationException; + +/** + * Static utility to retrieve the device time offset using SNTP. + * + *

      Based on the Android + * framework SntpClient. + */ +public final class SntpClient { + + /** The default NTP host address used to retrieve {@link #getElapsedRealtimeOffsetMs()}. */ + public static final String DEFAULT_NTP_HOST = "time.android.com"; + + /** Callback for calls to {@link #initialize(Loader, InitializationCallback)}. */ + public interface InitializationCallback { + + /** Called when the device time offset has been initialized. */ + void onInitialized(); + + /** + * Called when the device time offset failed to initialize. + * + * @param error The error that caused the initialization failure. + */ + void onInitializationFailed(IOException error); + } + + private static final int TIMEOUT_MS = 10_000; + + private static final int ORIGINATE_TIME_OFFSET = 24; + private static final int RECEIVE_TIME_OFFSET = 32; + private static final int TRANSMIT_TIME_OFFSET = 40; + private static final int NTP_PACKET_SIZE = 48; + + private static final int NTP_PORT = 123; + private static final int NTP_MODE_CLIENT = 3; + private static final int NTP_MODE_SERVER = 4; + private static final int NTP_MODE_BROADCAST = 5; + private static final int NTP_VERSION = 3; + + private static final int NTP_LEAP_NOSYNC = 3; + private static final int NTP_STRATUM_DEATH = 0; + private static final int NTP_STRATUM_MAX = 15; + + private static final long OFFSET_1900_TO_1970 = ((365L * 70L) + 17L) * 24L * 60L * 60L; + + private static final Object loaderLock = new Object(); + private static final Object valueLock = new Object(); + + @GuardedBy("valueLock") + private static boolean isInitialized; + + @GuardedBy("valueLock") + private static long elapsedRealtimeOffsetMs; + + @GuardedBy("valueLock") + private static String ntpHost = DEFAULT_NTP_HOST; + + private SntpClient() {} + + /** Returns the NTP host address used to retrieve {@link #getElapsedRealtimeOffsetMs()}. */ + public static String getNtpHost() { + synchronized (valueLock) { + return ntpHost; + } + } + + /** + * Sets the NTP host address used to retrieve {@link #getElapsedRealtimeOffsetMs()}. + * + *

      The default is {@link #DEFAULT_NTP_HOST}. + * + *

      If the new host address is different from the previous one, the NTP client will be {@link + * #isInitialized()} uninitialized} again. + * + * @param ntpHost The NTP host address. + */ + public static void setNtpHost(String ntpHost) { + synchronized (valueLock) { + if (!SntpClient.ntpHost.equals(ntpHost)) { + SntpClient.ntpHost = ntpHost; + isInitialized = false; + } + } + } + + /** + * Returns whether the device time offset has already been loaded. + * + *

      If {@code false}, use {@link #initialize(Loader, InitializationCallback)} to start the + * initialization. + */ + public static boolean isInitialized() { + synchronized (valueLock) { + return isInitialized; + } + } + + /** + * Returns the offset between {@link SystemClock#elapsedRealtime()} and the NTP server time in + * milliseconds, or {@link C#TIME_UNSET} if {@link #isInitialized()} returns false. + * + *

      The offset is calculated as {@code ntpServerTime - deviceElapsedRealTime}. + */ + public static long getElapsedRealtimeOffsetMs() { + synchronized (valueLock) { + return isInitialized ? elapsedRealtimeOffsetMs : C.TIME_UNSET; + } + } + + /** + * Starts loading the device time offset. + * + * @param loader A {@link Loader} to use for loading the time offset, or null to create a new one. + * @param callback An optional {@link InitializationCallback} to be notified when the time offset + * has been initialized or initialization failed. + */ + public static void initialize( + @Nullable Loader loader, @Nullable InitializationCallback callback) { + if (isInitialized()) { + if (callback != null) { + callback.onInitialized(); + } + return; + } + if (loader == null) { + loader = new Loader("SntpClient"); + } + loader.startLoading( + new NtpTimeLoadable(), new NtpTimeCallback(callback), /* defaultMinRetryCount= */ 1); + } + + private static long loadNtpTimeOffsetMs() throws IOException { + InetAddress address = InetAddress.getByName(getNtpHost()); + try (DatagramSocket socket = new DatagramSocket()) { + socket.setSoTimeout(TIMEOUT_MS); + byte[] buffer = new byte[NTP_PACKET_SIZE]; + DatagramPacket request = new DatagramPacket(buffer, buffer.length, address, NTP_PORT); + + // Set mode = 3 (client) and version = 3. Mode is in low 3 bits of the first byte and Version + // is in bits 3-5 of the first byte. + buffer[0] = NTP_MODE_CLIENT | (NTP_VERSION << 3); + + // Get current time and write it to the request packet. + long requestTime = System.currentTimeMillis(); + long requestTicks = SystemClock.elapsedRealtime(); + writeTimestamp(buffer, TRANSMIT_TIME_OFFSET, requestTime); + + socket.send(request); + + // Read the response. + DatagramPacket response = new DatagramPacket(buffer, buffer.length); + socket.receive(response); + final long responseTicks = SystemClock.elapsedRealtime(); + final long responseTime = requestTime + (responseTicks - requestTicks); + + // Extract the results. + final byte leap = (byte) ((buffer[0] >> 6) & 0x3); + final byte mode = (byte) (buffer[0] & 0x7); + final int stratum = (int) (buffer[1] & 0xff); + final long originateTime = readTimestamp(buffer, ORIGINATE_TIME_OFFSET); + final long receiveTime = readTimestamp(buffer, RECEIVE_TIME_OFFSET); + final long transmitTime = readTimestamp(buffer, TRANSMIT_TIME_OFFSET); + + // Check server reply validity according to RFC. + checkValidServerReply(leap, mode, stratum, transmitTime); + + // receiveTime = originateTime + transit + skew + // responseTime = transmitTime + transit - skew + // clockOffset = ((receiveTime - originateTime) + (transmitTime - responseTime))/2 + // = ((originateTime + transit + skew - originateTime) + + // (transmitTime - (transmitTime + transit - skew)))/2 + // = ((transit + skew) + (transmitTime - transmitTime - transit + skew))/2 + // = (transit + skew - transit + skew)/2 + // = (2 * skew)/2 = skew + long clockOffset = ((receiveTime - originateTime) + (transmitTime - responseTime)) / 2; + + // Save our results using the times on this side of the network latency (i.e. response rather + // than request time) + long ntpTime = responseTime + clockOffset; + long ntpTimeReference = responseTicks; + + return ntpTime - ntpTimeReference; + } + } + + private static long readTimestamp(byte[] buffer, int offset) { + long seconds = read32(buffer, offset); + long fraction = read32(buffer, offset + 4); + // Special case: zero means zero. + if (seconds == 0 && fraction == 0) { + return 0; + } + return ((seconds - OFFSET_1900_TO_1970) * 1000) + ((fraction * 1000L) / 0x100000000L); + } + + private static void writeTimestamp(byte[] buffer, int offset, long time) { + // Special case: zero means zero. + if (time == 0) { + Arrays.fill(buffer, offset, offset + 8, (byte) 0x00); + return; + } + + long seconds = time / 1000L; + long milliseconds = time - seconds * 1000L; + seconds += OFFSET_1900_TO_1970; + + // Write seconds in big endian format. + buffer[offset++] = (byte) (seconds >> 24); + buffer[offset++] = (byte) (seconds >> 16); + buffer[offset++] = (byte) (seconds >> 8); + buffer[offset++] = (byte) (seconds >> 0); + + long fraction = milliseconds * 0x100000000L / 1000L; + // Write fraction in big endian format. + buffer[offset++] = (byte) (fraction >> 24); + buffer[offset++] = (byte) (fraction >> 16); + buffer[offset++] = (byte) (fraction >> 8); + // Low order bits should be random data. + buffer[offset++] = (byte) (Math.random() * 255.0); + } + + private static long read32(byte[] buffer, int offset) { + byte b0 = buffer[offset]; + byte b1 = buffer[offset + 1]; + byte b2 = buffer[offset + 2]; + byte b3 = buffer[offset + 3]; + + // Convert signed bytes to unsigned values. + int i0 = ((b0 & 0x80) == 0x80 ? (b0 & 0x7F) + 0x80 : b0); + int i1 = ((b1 & 0x80) == 0x80 ? (b1 & 0x7F) + 0x80 : b1); + int i2 = ((b2 & 0x80) == 0x80 ? (b2 & 0x7F) + 0x80 : b2); + int i3 = ((b3 & 0x80) == 0x80 ? (b3 & 0x7F) + 0x80 : b3); + + return ((long) i0 << 24) + ((long) i1 << 16) + ((long) i2 << 8) + (long) i3; + } + + private static void checkValidServerReply(byte leap, byte mode, int stratum, long transmitTime) + throws IOException { + if (leap == NTP_LEAP_NOSYNC) { + throw new IOException("SNTP: Unsynchronized server"); + } + if ((mode != NTP_MODE_SERVER) && (mode != NTP_MODE_BROADCAST)) { + throw new IOException("SNTP: Untrusted mode: " + mode); + } + if ((stratum == NTP_STRATUM_DEATH) || (stratum > NTP_STRATUM_MAX)) { + throw new IOException("SNTP: Untrusted stratum: " + stratum); + } + if (transmitTime == 0) { + throw new IOException("SNTP: Zero transmitTime"); + } + } + + private static final class NtpTimeLoadable implements Loadable { + + @Override + public void cancelLoad() {} + + @Override + public void load() throws IOException { + // Synchronized to prevent redundant parallel requests. + synchronized (loaderLock) { + synchronized (valueLock) { + if (isInitialized) { + return; + } + } + long offsetMs = loadNtpTimeOffsetMs(); + synchronized (valueLock) { + elapsedRealtimeOffsetMs = offsetMs; + isInitialized = true; + } + } + } + } + + private static final class NtpTimeCallback implements Loader.Callback { + + @Nullable private final InitializationCallback callback; + + public NtpTimeCallback(@Nullable InitializationCallback callback) { + this.callback = callback; + } + + @Override + public void onLoadCompleted(Loadable loadable, long elapsedRealtimeMs, long loadDurationMs) { + if (callback != null) { + if (!SntpClient.isInitialized()) { + // This may happen in the unlikely edge case of someone calling setNtpHost between the end + // of the load method and this callback. + callback.onInitializationFailed(new IOException(new ConcurrentModificationException())); + } else { + callback.onInitialized(); + } + } + } + + @Override + public void onLoadCanceled( + Loadable loadable, long elapsedRealtimeMs, long loadDurationMs, boolean released) { + // Ignore. + } + + @Override + public LoadErrorAction onLoadError( + Loadable loadable, + long elapsedRealtimeMs, + long loadDurationMs, + IOException error, + int errorCount) { + if (callback != null) { + callback.onInitializationFailed(error); + } + return Loader.DONT_RETRY; + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/StandaloneMediaClock.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/StandaloneMediaClock.java index e5f9aa645f..46806cb65a 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/StandaloneMediaClock.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/StandaloneMediaClock.java @@ -15,7 +15,6 @@ */ package com.google.android.exoplayer2.util; -import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.PlaybackParameters; /** @@ -38,12 +37,10 @@ public final class StandaloneMediaClock implements MediaClock { */ public StandaloneMediaClock(Clock clock) { this.clock = clock; - this.playbackParameters = PlaybackParameters.DEFAULT; + playbackParameters = PlaybackParameters.DEFAULT; } - /** - * Starts the clock. Does nothing if the clock is already started. - */ + /** Starts the clock. Does nothing if the clock is already started. */ public void start() { if (!started) { baseElapsedMs = clock.elapsedRealtime(); @@ -51,9 +48,7 @@ public void start() { } } - /** - * Stops the clock. Does nothing if the clock is already stopped. - */ + /** Stops the clock. Does nothing if the clock is already stopped. */ public void stop() { if (started) { resetPosition(getPositionUs()); @@ -79,8 +74,10 @@ public long getPositionUs() { if (started) { long elapsedSinceBaseMs = clock.elapsedRealtime() - baseElapsedMs; if (playbackParameters.speed == 1f) { - positionUs += C.msToUs(elapsedSinceBaseMs); + positionUs += Util.msToUs(elapsedSinceBaseMs); } else { + // Add the media time in microseconds that will elapse in elapsedSinceBaseMs milliseconds of + // wallclock time positionUs += playbackParameters.getMediaTimeUsForPlayoutTimeMs(elapsedSinceBaseMs); } } @@ -100,5 +97,4 @@ public void setPlaybackParameters(PlaybackParameters playbackParameters) { public PlaybackParameters getPlaybackParameters() { return playbackParameters; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/SurfaceInfo.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/SurfaceInfo.java new file mode 100644 index 0000000000..3ee936554d --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/SurfaceInfo.java @@ -0,0 +1,81 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.util; + +import static com.google.android.exoplayer2.util.Assertions.checkArgument; + +import android.view.Surface; +import androidx.annotation.Nullable; + +/** Immutable value class for a {@link Surface} and supporting information. */ +public final class SurfaceInfo { + + /** The {@link Surface}. */ + public final Surface surface; + /** The width of frames rendered to the {@link #surface}, in pixels. */ + public final int width; + /** The height of frames rendered to the {@link #surface}, in pixels. */ + public final int height; + /** + * A counter-clockwise rotation to apply to frames before rendering them to the {@link #surface}. + * + *

      Must be 0, 90, 180, or 270 degrees. Default is 0. + */ + public final int orientationDegrees; + + /** Creates a new instance. */ + public SurfaceInfo(Surface surface, int width, int height) { + this(surface, width, height, /* orientationDegrees= */ 0); + } + + /** Creates a new instance. */ + public SurfaceInfo(Surface surface, int width, int height, int orientationDegrees) { + checkArgument( + orientationDegrees == 0 + || orientationDegrees == 90 + || orientationDegrees == 180 + || orientationDegrees == 270, + "orientationDegrees must be 0, 90, 180, or 270"); + this.surface = surface; + this.width = width; + this.height = height; + this.orientationDegrees = orientationDegrees; + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (!(o instanceof SurfaceInfo)) { + return false; + } + SurfaceInfo that = (SurfaceInfo) o; + return width == that.width + && height == that.height + && orientationDegrees == that.orientationDegrees + && surface.equals(that.surface); + } + + @Override + public int hashCode() { + int result = surface.hashCode(); + result = 31 * result + width; + result = 31 * result + height; + result = 31 * result + orientationDegrees; + return result; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/SystemClock.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/SystemClock.java index 89e1c60d7a..c3b31aa5c9 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/SystemClock.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/SystemClock.java @@ -44,12 +44,12 @@ public long uptimeMillis() { } @Override - public void sleep(long sleepTimeMs) { - android.os.SystemClock.sleep(sleepTimeMs); + public HandlerWrapper createHandler(Looper looper, @Nullable Callback callback) { + return new SystemHandlerWrapper(new Handler(looper, callback)); } @Override - public HandlerWrapper createHandler(Looper looper, @Nullable Callback callback) { - return new SystemHandlerWrapper(new Handler(looper, callback)); + public void onThreadBlocked() { + // Do nothing. } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/SystemHandlerWrapper.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/SystemHandlerWrapper.java index 1fbea2ed7e..d7d01858c8 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/SystemHandlerWrapper.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/SystemHandlerWrapper.java @@ -15,13 +15,24 @@ */ package com.google.android.exoplayer2.util; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; + +import android.os.Handler; import android.os.Looper; -import android.os.Message; +import androidx.annotation.GuardedBy; import androidx.annotation.Nullable; +import com.google.errorprone.annotations.CanIgnoreReturnValue; +import java.util.ArrayList; +import java.util.List; /** The standard implementation of {@link HandlerWrapper}. */ /* package */ final class SystemHandlerWrapper implements HandlerWrapper { + private static final int MAX_POOL_SIZE = 50; + + @GuardedBy("messagePool") + private static final List messagePool = new ArrayList<>(MAX_POOL_SIZE); + private final android.os.Handler handler; public SystemHandlerWrapper(android.os.Handler handler) { @@ -33,24 +44,36 @@ public Looper getLooper() { return handler.getLooper(); } + @Override + public boolean hasMessages(int what) { + return handler.hasMessages(what); + } + @Override public Message obtainMessage(int what) { - return handler.obtainMessage(what); + return obtainSystemMessage().setMessage(handler.obtainMessage(what), /* handler= */ this); } @Override public Message obtainMessage(int what, @Nullable Object obj) { - return handler.obtainMessage(what, obj); + return obtainSystemMessage().setMessage(handler.obtainMessage(what, obj), /* handler= */ this); } @Override public Message obtainMessage(int what, int arg1, int arg2) { - return handler.obtainMessage(what, arg1, arg2); + return obtainSystemMessage() + .setMessage(handler.obtainMessage(what, arg1, arg2), /* handler= */ this); } @Override public Message obtainMessage(int what, int arg1, int arg2, @Nullable Object obj) { - return handler.obtainMessage(what, arg1, arg2, obj); + return obtainSystemMessage() + .setMessage(handler.obtainMessage(what, arg1, arg2, obj), /* handler= */ this); + } + + @Override + public boolean sendMessageAtFrontOfQueue(Message message) { + return ((SystemMessage) message).sendAtFrontOfQueue(handler); } @Override @@ -58,6 +81,11 @@ public boolean sendEmptyMessage(int what) { return handler.sendEmptyMessage(what); } + @Override + public boolean sendEmptyMessageDelayed(int what, int delayMs) { + return handler.sendEmptyMessageDelayed(what, delayMs); + } + @Override public boolean sendEmptyMessageAtTime(int what, long uptimeMs) { return handler.sendEmptyMessageAtTime(what, uptimeMs); @@ -82,4 +110,61 @@ public boolean post(Runnable runnable) { public boolean postDelayed(Runnable runnable, long delayMs) { return handler.postDelayed(runnable, delayMs); } + + @Override + public boolean postAtFrontOfQueue(Runnable runnable) { + return handler.postAtFrontOfQueue(runnable); + } + + private static SystemMessage obtainSystemMessage() { + synchronized (messagePool) { + return messagePool.isEmpty() + ? new SystemMessage() + : messagePool.remove(messagePool.size() - 1); + } + } + + private static void recycleMessage(SystemMessage message) { + synchronized (messagePool) { + if (messagePool.size() < MAX_POOL_SIZE) { + messagePool.add(message); + } + } + } + + private static final class SystemMessage implements Message { + + @Nullable private android.os.Message message; + @Nullable private SystemHandlerWrapper handler; + + @CanIgnoreReturnValue + public SystemMessage setMessage(android.os.Message message, SystemHandlerWrapper handler) { + this.message = message; + this.handler = handler; + return this; + } + + public boolean sendAtFrontOfQueue(Handler handler) { + boolean success = handler.sendMessageAtFrontOfQueue(checkNotNull(message)); + recycle(); + return success; + } + + @Override + public void sendToTarget() { + checkNotNull(message).sendToTarget(); + recycle(); + } + + @Override + public HandlerWrapper getTarget() { + return checkNotNull(handler); + } + + private void recycle() { + message = null; + handler = null; + recycleMessage(this); + } + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/TimedValueQueue.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/TimedValueQueue.java index da5d9bafeb..d49b37224c 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/TimedValueQueue.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/TimedValueQueue.java @@ -62,6 +62,12 @@ public synchronized int size() { return size; } + /** Removes and returns the first value in the queue, or null if the queue is empty. */ + @Nullable + public synchronized V pollFirst() { + return size == 0 ? null : popFirst(); + } + /** * Returns the value with the greatest timestamp which is less than or equal to the given * timestamp. Removes all older values and the returned one from the buffer. @@ -71,7 +77,8 @@ public synchronized int size() { * timestamp or null if there is no such value. * @see #poll(long) */ - public synchronized @Nullable V pollFloor(long timestamp) { + @Nullable + public synchronized V pollFloor(long timestamp) { return poll(timestamp, /* onlyOlder= */ true); } @@ -83,7 +90,8 @@ public synchronized int size() { * @return The value with the closest timestamp or null if the buffer is empty. * @see #pollFloor(long) */ - public synchronized @Nullable V poll(long timestamp) { + @Nullable + public synchronized V poll(long timestamp) { return poll(timestamp, /* onlyOlder= */ false); } @@ -99,7 +107,7 @@ public synchronized int size() { */ @Nullable private V poll(long timestamp, boolean onlyOlder) { - V value = null; + @Nullable V value = null; long previousTimeDiff = Long.MAX_VALUE; while (size > 0) { long timeDiff = timestamp - timestamps[first]; @@ -107,14 +115,21 @@ private V poll(long timestamp, boolean onlyOlder) { break; } previousTimeDiff = timeDiff; - value = values[first]; - values[first] = null; - first = (first + 1) % values.length; - size--; + value = popFirst(); } return value; } + @Nullable + private V popFirst() { + Assertions.checkState(size > 0); + @Nullable V value = values[first]; + values[first] = null; + first = (first + 1) % values.length; + size--; + return value; + } + private void clearBufferOnTimeDiscontinuity(long timestamp) { if (size > 0) { int last = (first + size - 1) % values.length; @@ -131,7 +146,7 @@ private void doubleCapacityIfFull() { } int newCapacity = capacity * 2; long[] newTimestamps = new long[newCapacity]; - V[] newValues = newArray(newCapacity); + @NullableType V[] newValues = newArray(newCapacity); // Reset the loop starting index to 0 while coping to the new buffer. // First copy the values from 'first' index to the end of original array. int length = capacity - first; @@ -155,7 +170,7 @@ private void addUnchecked(long timestamp, V value) { } @SuppressWarnings("unchecked") - private static V[] newArray(int length) { + private static @NullableType V[] newArray(int length) { return (V[]) new Object[length]; } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/TimestampAdjuster.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/TimestampAdjuster.java index 439374a086..8d7b709abd 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/TimestampAdjuster.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/TimestampAdjuster.java @@ -15,19 +15,36 @@ */ package com.google.android.exoplayer2.util; +import androidx.annotation.GuardedBy; import com.google.android.exoplayer2.C; /** - * Offsets timestamps according to an initial sample timestamp offset. MPEG-2 TS timestamps scaling - * and adjustment is supported, taking into account timestamp rollover. + * Adjusts and offsets sample timestamps. MPEG-2 TS timestamps scaling and adjustment is supported, + * taking into account timestamp rollover. */ public final class TimestampAdjuster { /** * A special {@code firstSampleTimestampUs} value indicating that presentation timestamps should - * not be offset. + * not be offset. In this mode: + * + *

        + *
      • {@link #getFirstSampleTimestampUs()} will always return {@link C#TIME_UNSET}. + *
      • The only timestamp adjustment performed is to account for MPEG-2 TS timestamp rollover. + *
      + */ + public static final long MODE_NO_OFFSET = Long.MAX_VALUE; + + /** + * A special {@code firstSampleTimestampUs} value indicating that the adjuster will be shared by + * multiple threads. In this mode: + * + *
        + *
      • {@link #getFirstSampleTimestampUs()} will always return {@link C#TIME_UNSET}. + *
      • Calling threads must call {@link #sharedInitializeOrWait} prior to adjusting timestamps. + *
      */ - public static final long DO_NOT_OFFSET = Long.MAX_VALUE; + public static final long MODE_SHARED = Long.MAX_VALUE - 1; /** * The value one greater than the largest representable (33 bit) MPEG-2 TS 90 kHz clock @@ -35,69 +52,111 @@ public final class TimestampAdjuster { */ private static final long MAX_PTS_PLUS_ONE = 0x200000000L; + @GuardedBy("this") private long firstSampleTimestampUs; + + @GuardedBy("this") private long timestampOffsetUs; - // Volatile to allow isInitialized to be called on a different thread to adjustSampleTimestamp. - private volatile long lastSampleTimestampUs; + @GuardedBy("this") + private long lastUnadjustedTimestampUs; + + /** + * Next sample timestamps for calling threads in shared mode when {@link #timestampOffsetUs} has + * not yet been set. + */ + // incompatible type argument for type parameter T of ThreadLocal. + @SuppressWarnings("nullness:type.argument.type.incompatible") + private final ThreadLocal nextSampleTimestampUs; /** - * @param firstSampleTimestampUs See {@link #setFirstSampleTimestampUs(long)}. + * @param firstSampleTimestampUs The desired value of the first adjusted sample timestamp in + * microseconds, or {@link #MODE_NO_OFFSET} if timestamps should not be offset, or {@link + * #MODE_SHARED} if the adjuster will be used in shared mode. */ + // incompatible types in assignment. + @SuppressWarnings("nullness:assignment.type.incompatible") public TimestampAdjuster(long firstSampleTimestampUs) { - lastSampleTimestampUs = C.TIME_UNSET; - setFirstSampleTimestampUs(firstSampleTimestampUs); + nextSampleTimestampUs = new ThreadLocal<>(); + reset(firstSampleTimestampUs); } /** - * Sets the desired result of the first call to {@link #adjustSampleTimestamp(long)}. Can only be - * called before any timestamps have been adjusted. + * For shared timestamp adjusters, performs necessary initialization actions for a caller. * - * @param firstSampleTimestampUs The first adjusted sample timestamp in microseconds, or - * {@link #DO_NOT_OFFSET} if presentation timestamps should not be offset. + *
        + *
      • If the adjuster has already established a {@link #getTimestampOffsetUs timestamp offset} + * then this method is a no-op. + *
      • If {@code canInitialize} is {@code true} and the adjuster has not yet established a + * timestamp offset, then the adjuster records the desired first sample timestamp for the + * calling thread and returns to allow the caller to proceed. If the timestamp offset has + * still not been established when the caller attempts to adjust its first timestamp, then + * the recorded timestamp is used to set it. + *
      • If {@code canInitialize} is {@code false} and the adjuster has not yet established a + * timestamp offset, then the call blocks until the timestamp offset is set. + *
      + * + * @param canInitialize Whether the caller is able to initialize the adjuster, if needed. + * @param nextSampleTimestampUs The desired timestamp for the next sample loaded by the calling + * thread, in microseconds. Only used if {@code canInitialize} is {@code true}. + * @throws InterruptedException If the thread is interrupted whilst blocked waiting for + * initialization to complete. */ - public synchronized void setFirstSampleTimestampUs(long firstSampleTimestampUs) { - Assertions.checkState(lastSampleTimestampUs == C.TIME_UNSET); - this.firstSampleTimestampUs = firstSampleTimestampUs; + public synchronized void sharedInitializeOrWait(boolean canInitialize, long nextSampleTimestampUs) + throws InterruptedException { + Assertions.checkState(firstSampleTimestampUs == MODE_SHARED); + if (timestampOffsetUs != C.TIME_UNSET) { + // Already initialized. + return; + } else if (canInitialize) { + this.nextSampleTimestampUs.set(nextSampleTimestampUs); + } else { + // Wait for another calling thread to complete initialization. + while (timestampOffsetUs == C.TIME_UNSET) { + wait(); + } + } } - /** Returns the last value passed to {@link #setFirstSampleTimestampUs(long)}. */ - public long getFirstSampleTimestampUs() { - return firstSampleTimestampUs; + /** + * Returns the value of the first adjusted sample timestamp in microseconds, or {@link + * C#TIME_UNSET} if timestamps will not be offset or if the adjuster is in shared mode. + */ + public synchronized long getFirstSampleTimestampUs() { + return firstSampleTimestampUs == MODE_NO_OFFSET || firstSampleTimestampUs == MODE_SHARED + ? C.TIME_UNSET + : firstSampleTimestampUs; } /** - * Returns the last value obtained from {@link #adjustSampleTimestamp}. If {@link - * #adjustSampleTimestamp} has not been called, returns the result of calling {@link - * #getFirstSampleTimestampUs()}. If this value is {@link #DO_NOT_OFFSET}, returns {@link - * C#TIME_UNSET}. + * Returns the last adjusted timestamp, in microseconds. If no timestamps have been adjusted yet + * then the result of {@link #getFirstSampleTimestampUs()} is returned. */ - public long getLastAdjustedTimestampUs() { - return lastSampleTimestampUs != C.TIME_UNSET - ? (lastSampleTimestampUs + timestampOffsetUs) - : firstSampleTimestampUs != DO_NOT_OFFSET ? firstSampleTimestampUs : C.TIME_UNSET; + public synchronized long getLastAdjustedTimestampUs() { + return lastUnadjustedTimestampUs != C.TIME_UNSET + ? lastUnadjustedTimestampUs + timestampOffsetUs + : getFirstSampleTimestampUs(); } /** - * Returns the offset between the input of {@link #adjustSampleTimestamp(long)} and its output. - * If {@link #DO_NOT_OFFSET} was provided to the constructor, 0 is returned. If the timestamp - * adjuster is yet not initialized, {@link C#TIME_UNSET} is returned. - * - * @return The offset between {@link #adjustSampleTimestamp(long)}'s input and output. - * {@link C#TIME_UNSET} if the adjuster is not yet initialized and 0 if timestamps should not - * be offset. + * Returns the offset between the input of {@link #adjustSampleTimestamp(long)} and its output, or + * {@link C#TIME_UNSET} if the offset has not yet been determined. */ - public long getTimestampOffsetUs() { - return firstSampleTimestampUs == DO_NOT_OFFSET - ? 0 - : lastSampleTimestampUs == C.TIME_UNSET ? C.TIME_UNSET : timestampOffsetUs; + public synchronized long getTimestampOffsetUs() { + return timestampOffsetUs; } /** - * Resets the instance to its initial state. + * Resets the instance. + * + * @param firstSampleTimestampUs The desired value of the first adjusted sample timestamp after + * this reset in microseconds, or {@link #MODE_NO_OFFSET} if timestamps should not be offset, + * or {@link #MODE_SHARED} if the adjuster will be used in shared mode. */ - public void reset() { - lastSampleTimestampUs = C.TIME_UNSET; + public synchronized void reset(long firstSampleTimestampUs) { + this.firstSampleTimestampUs = firstSampleTimestampUs; + timestampOffsetUs = firstSampleTimestampUs == MODE_NO_OFFSET ? 0 : C.TIME_UNSET; + lastUnadjustedTimestampUs = C.TIME_UNSET; } /** @@ -106,14 +165,14 @@ public void reset() { * @param pts90Khz A 90 kHz clock MPEG-2 TS presentation timestamp. * @return The adjusted timestamp in microseconds. */ - public long adjustTsTimestamp(long pts90Khz) { + public synchronized long adjustTsTimestamp(long pts90Khz) { if (pts90Khz == C.TIME_UNSET) { return C.TIME_UNSET; } - if (lastSampleTimestampUs != C.TIME_UNSET) { + if (lastUnadjustedTimestampUs != C.TIME_UNSET) { // The wrap count for the current PTS may be closestWrapCount or (closestWrapCount - 1), // and we need to snap to the one closest to lastSampleTimestampUs. - long lastPts = usToPts(lastSampleTimestampUs); + long lastPts = usToNonWrappedPts(lastUnadjustedTimestampUs); long closestWrapCount = (lastPts + (MAX_PTS_PLUS_ONE / 2)) / MAX_PTS_PLUS_ONE; long ptsWrapBelow = pts90Khz + (MAX_PTS_PLUS_ONE * (closestWrapCount - 1)); long ptsWrapAbove = pts90Khz + (MAX_PTS_PLUS_ONE * closestWrapCount); @@ -131,38 +190,23 @@ public long adjustTsTimestamp(long pts90Khz) { * @param timeUs The timestamp to adjust in microseconds. * @return The adjusted timestamp in microseconds. */ - public long adjustSampleTimestamp(long timeUs) { + public synchronized long adjustSampleTimestamp(long timeUs) { if (timeUs == C.TIME_UNSET) { return C.TIME_UNSET; } - // Record the adjusted PTS to adjust for wraparound next time. - if (lastSampleTimestampUs != C.TIME_UNSET) { - lastSampleTimestampUs = timeUs; - } else { - if (firstSampleTimestampUs != DO_NOT_OFFSET) { - // Calculate the timestamp offset. - timestampOffsetUs = firstSampleTimestampUs - timeUs; - } - synchronized (this) { - lastSampleTimestampUs = timeUs; - // Notify threads waiting for this adjuster to be initialized. - notifyAll(); - } + if (timestampOffsetUs == C.TIME_UNSET) { + long desiredSampleTimestampUs = + firstSampleTimestampUs == MODE_SHARED + ? Assertions.checkNotNull(nextSampleTimestampUs.get()) + : firstSampleTimestampUs; + timestampOffsetUs = desiredSampleTimestampUs - timeUs; + // Notify threads waiting for the timestamp offset to be determined. + notifyAll(); } + lastUnadjustedTimestampUs = timeUs; return timeUs + timestampOffsetUs; } - /** - * Blocks the calling thread until this adjuster is initialized. - * - * @throws InterruptedException If the thread was interrupted. - */ - public synchronized void waitUntilInitialized() throws InterruptedException { - while (lastSampleTimestampUs == C.TIME_UNSET) { - wait(); - } - } - /** * Converts a 90 kHz clock timestamp to a timestamp in microseconds. * @@ -173,14 +217,27 @@ public static long ptsToUs(long pts) { return (pts * C.MICROS_PER_SECOND) / 90000; } + /** + * Converts a timestamp in microseconds to a 90 kHz clock timestamp, performing wraparound to keep + * the result within 33-bits. + * + * @param us A value in microseconds. + * @return The corresponding value as a 90 kHz clock timestamp, wrapped to 33 bits. + */ + public static long usToWrappedPts(long us) { + return usToNonWrappedPts(us) % MAX_PTS_PLUS_ONE; + } + /** * Converts a timestamp in microseconds to a 90 kHz clock timestamp. * + *

      Does not perform any wraparound. To get a 90 kHz timestamp suitable for use with MPEG-TS, + * use {@link #usToWrappedPts(long)}. + * * @param us A value in microseconds. * @return The corresponding value as a 90 kHz clock timestamp. */ - public static long usToPts(long us) { + public static long usToNonWrappedPts(long us) { return (us * 90000) / C.MICROS_PER_SECOND; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/TraceUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/TraceUtil.java index 8fb409c04a..87d63fcd06 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/TraceUtil.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/TraceUtil.java @@ -15,12 +15,10 @@ */ package com.google.android.exoplayer2.util; -import android.annotation.TargetApi; +import androidx.annotation.RequiresApi; import com.google.android.exoplayer2.ExoPlayerLibraryInfo; -/** - * Calls through to {@link android.os.Trace} methods on supported API levels. - */ +/** Calls through to {@link android.os.Trace} methods on supported API levels. */ public final class TraceUtil { private TraceUtil() {} @@ -49,14 +47,13 @@ public static void endSection() { } } - @TargetApi(18) + @RequiresApi(18) private static void beginSectionV18(String sectionName) { android.os.Trace.beginSection(sectionName); } - @TargetApi(18) + @RequiresApi(18) private static void endSectionV18() { android.os.Trace.endSection(); } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/UnknownNull.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/UnknownNull.java new file mode 100644 index 0000000000..0ccad43a12 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/UnknownNull.java @@ -0,0 +1,32 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.util; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import javax.annotation.Nonnull; +import javax.annotation.meta.TypeQualifierDefault; +import javax.annotation.meta.When; + +/** + * Annotation for specifying unknown nullness. Useful for clearing the effects of an automatically + * propagated {@link Nonnull} annotation. + */ +@Nonnull(when = When.UNKNOWN) +@TypeQualifierDefault(ElementType.TYPE_USE) +@Retention(RetentionPolicy.CLASS) +public @interface UnknownNull {} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/UriUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/UriUtil.java index 90be8660c6..40bd4de36b 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/UriUtil.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/UriUtil.java @@ -19,44 +19,40 @@ import android.text.TextUtils; import androidx.annotation.Nullable; -/** - * Utility methods for manipulating URIs. - */ +/** Utility methods for manipulating URIs. */ public final class UriUtil { - /** - * The length of arrays returned by {@link #getUriIndices(String)}. - */ + /** The length of arrays returned by {@link #getUriIndices(String)}. */ private static final int INDEX_COUNT = 4; /** * An index into an array returned by {@link #getUriIndices(String)}. - *

      - * The value at this position in the array is the index of the ':' after the scheme. Equals -1 if - * the URI is a relative reference (no scheme). The hier-part starts at (schemeColon + 1), + * + *

      The value at this position in the array is the index of the ':' after the scheme. Equals -1 + * if the URI is a relative reference (no scheme). The hier-part starts at (schemeColon + 1), * including when the URI has no scheme. */ private static final int SCHEME_COLON = 0; /** * An index into an array returned by {@link #getUriIndices(String)}. - *

      - * The value at this position in the array is the index of the path part. Equals (schemeColon + 1) - * if no authority part, (schemeColon + 3) if the authority part consists of just "//", and + * + *

      The value at this position in the array is the index of the path part. Equals (schemeColon + + * 1) if no authority part, (schemeColon + 3) if the authority part consists of just "//", and * (query) if no path part. The characters starting at this index can be "//" only if the * authority part is non-empty (in this case the double-slash means the first segment is empty). */ private static final int PATH = 1; /** * An index into an array returned by {@link #getUriIndices(String)}. - *

      - * The value at this position in the array is the index of the query part, including the '?' + * + *

      The value at this position in the array is the index of the query part, including the '?' * before the query. Equals fragment if no query part, and (fragment - 1) if the query part is a * single '?' with no data. */ private static final int QUERY = 2; /** * An index into an array returned by {@link #getUriIndices(String)}. - *

      - * The value at this position in the array is the index of the fragment part, including the '#' + * + *

      The value at this position in the array is the index of the fragment part, including the '#' * before the fragment. Equal to the length of the URI if no fragment part, and (length - 1) if * the fragment part is a single '#' with no data. */ @@ -144,12 +140,17 @@ public static String resolve(@Nullable String baseUri, @Nullable String referenc } } + /** Returns true if the URI is starting with a scheme component, false otherwise. */ + public static boolean isAbsolute(@Nullable String uri) { + return uri != null && getUriIndices(uri)[SCHEME_COLON] != -1; + } + /** - * Removes query parameter from an Uri, if present. + * Removes query parameter from a URI, if present. * - * @param uri The uri. + * @param uri The URI. * @param queryParameterName The name of the query parameter. - * @return The uri without the query parameter. + * @return The URI without the query parameter. */ public static Uri removeQueryParameter(Uri uri, String queryParameterName) { Uri.Builder builder = uri.buildUpon(); @@ -200,7 +201,8 @@ private static String removeDotSegments(StringBuilder uri, int offset, int limit uri.delete(segmentStart, nextSegmentStart); limit -= nextSegmentStart - segmentStart; i = segmentStart; - } else if (i == segmentStart + 2 && uri.charAt(segmentStart) == '.' + } else if (i == segmentStart + 2 + && uri.charAt(segmentStart) == '.' && uri.charAt(segmentStart + 1) == '.') { // Given "abc/def/../ghi", remove "def/../" to get "abc/ghi". int prevSegmentStart = uri.lastIndexOf("/", segmentStart - 2) + 1; @@ -256,9 +258,10 @@ private static int[] getUriIndices(String uriString) { // Determine hier-part structure: hier-part = "//" authority path / path // This block can also cope with schemeIndex == -1. - boolean hasAuthority = schemeIndex + 2 < queryIndex - && uriString.charAt(schemeIndex + 1) == '/' - && uriString.charAt(schemeIndex + 2) == '/'; + boolean hasAuthority = + schemeIndex + 2 < queryIndex + && uriString.charAt(schemeIndex + 1) == '/' + && uriString.charAt(schemeIndex + 2) == '/'; int pathIndex; if (hasAuthority) { pathIndex = uriString.indexOf('/', schemeIndex + 3); // find first '/' after "://" @@ -275,5 +278,4 @@ private static int[] getUriIndices(String uriString) { indices[FRAGMENT] = fragmentIndex; return indices; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/Util.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/Util.java index a7a46b163d..ed042272a2 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/Util.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/Util.java @@ -16,47 +16,77 @@ package com.google.android.exoplayer2.util; import static android.content.Context.UI_MODE_SERVICE; +import static com.google.android.exoplayer2.Player.COMMAND_SEEK_BACK; +import static com.google.android.exoplayer2.Player.COMMAND_SEEK_FORWARD; +import static com.google.android.exoplayer2.Player.COMMAND_SEEK_IN_CURRENT_MEDIA_ITEM; +import static com.google.android.exoplayer2.Player.COMMAND_SEEK_TO_DEFAULT_POSITION; +import static com.google.android.exoplayer2.Player.COMMAND_SEEK_TO_MEDIA_ITEM; +import static com.google.android.exoplayer2.Player.COMMAND_SEEK_TO_NEXT; +import static com.google.android.exoplayer2.Player.COMMAND_SEEK_TO_NEXT_MEDIA_ITEM; +import static com.google.android.exoplayer2.Player.COMMAND_SEEK_TO_PREVIOUS; +import static com.google.android.exoplayer2.Player.COMMAND_SEEK_TO_PREVIOUS_MEDIA_ITEM; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static java.lang.Math.abs; +import static java.lang.Math.max; +import static java.lang.Math.min; import android.Manifest.permission; import android.annotation.SuppressLint; -import android.annotation.TargetApi; import android.app.Activity; import android.app.UiModeManager; +import android.content.BroadcastReceiver; import android.content.ComponentName; import android.content.Context; import android.content.Intent; +import android.content.IntentFilter; import android.content.pm.PackageInfo; import android.content.pm.PackageManager; import android.content.pm.PackageManager.NameNotFoundException; import android.content.res.Configuration; import android.content.res.Resources; +import android.database.DatabaseUtils; +import android.database.sqlite.SQLiteDatabase; import android.graphics.Point; +import android.graphics.drawable.Drawable; +import android.hardware.display.DisplayManager; import android.media.AudioFormat; -import android.net.ConnectivityManager; -import android.net.NetworkInfo; +import android.media.AudioManager; +import android.media.MediaDrm; import android.net.Uri; import android.os.Build; import android.os.Handler; import android.os.Looper; import android.os.Parcel; +import android.os.SystemClock; +import android.provider.MediaStore; import android.security.NetworkSecurityPolicy; import android.telephony.TelephonyManager; import android.text.TextUtils; +import android.util.Base64; +import android.util.SparseLongArray; import android.view.Display; import android.view.SurfaceView; import android.view.WindowManager; +import androidx.annotation.DoNotInline; +import androidx.annotation.DrawableRes; import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.C.ContentType; import com.google.android.exoplayer2.ExoPlayerLibraryInfo; import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.MediaItem; import com.google.android.exoplayer2.ParserException; -import com.google.android.exoplayer2.Renderer; -import com.google.android.exoplayer2.RendererCapabilities; -import com.google.android.exoplayer2.RenderersFactory; -import com.google.android.exoplayer2.SeekParameters; -import com.google.android.exoplayer2.audio.AudioRendererEventListener; -import com.google.android.exoplayer2.upstream.DataSource; -import com.google.android.exoplayer2.video.VideoRendererEventListener; +import com.google.android.exoplayer2.PlaybackException; +import com.google.android.exoplayer2.Player; +import com.google.android.exoplayer2.Player.Commands; +import com.google.common.base.Ascii; +import com.google.common.base.Charsets; +import com.google.common.util.concurrent.AsyncFunction; +import com.google.common.util.concurrent.Futures; +import com.google.common.util.concurrent.ListenableFuture; +import com.google.common.util.concurrent.MoreExecutors; +import com.google.common.util.concurrent.SettableFuture; import java.io.ByteArrayOutputStream; import java.io.Closeable; import java.io.File; @@ -64,7 +94,9 @@ import java.io.InputStream; import java.lang.reflect.Method; import java.math.BigDecimal; -import java.nio.charset.Charset; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.util.ArrayDeque; import java.util.Arrays; import java.util.Calendar; import java.util.Collections; @@ -74,27 +106,29 @@ import java.util.List; import java.util.Locale; import java.util.MissingResourceException; +import java.util.NoSuchElementException; import java.util.TimeZone; import java.util.UUID; +import java.util.concurrent.CancellationException; +import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.zip.DataFormatException; +import java.util.zip.GZIPOutputStream; import java.util.zip.Inflater; import org.checkerframework.checker.initialization.qual.UnknownInitialization; import org.checkerframework.checker.nullness.compatqual.NullableType; import org.checkerframework.checker.nullness.qual.EnsuresNonNull; import org.checkerframework.checker.nullness.qual.PolyNull; -/** - * Miscellaneous utility methods. - */ +/** Miscellaneous utility methods. */ public final class Util { /** - * Like {@link android.os.Build.VERSION#SDK_INT}, but in a place where it can be conveniently - * overridden for local testing. + * Like {@link Build.VERSION#SDK_INT}, but in a place where it can be conveniently overridden for + * local testing. */ public static final int SDK_INT = Build.VERSION.SDK_INT; @@ -116,25 +150,31 @@ public final class Util { */ public static final String MODEL = Build.MODEL; - /** - * A concise description of the device that it can be useful to log for debugging purposes. - */ - public static final String DEVICE_DEBUG_INFO = DEVICE + ", " + MODEL + ", " + MANUFACTURER + ", " - + SDK_INT; + /** A concise description of the device that it can be useful to log for debugging purposes. */ + public static final String DEVICE_DEBUG_INFO = + DEVICE + ", " + MODEL + ", " + MANUFACTURER + ", " + SDK_INT; /** An empty byte array. */ public static final byte[] EMPTY_BYTE_ARRAY = new byte[0]; private static final String TAG = "Util"; - private static final Pattern XS_DATE_TIME_PATTERN = Pattern.compile( - "(\\d\\d\\d\\d)\\-(\\d\\d)\\-(\\d\\d)[Tt]" - + "(\\d\\d):(\\d\\d):(\\d\\d)([\\.,](\\d+))?" - + "([Zz]|((\\+|\\-)(\\d?\\d):?(\\d\\d)))?"); + private static final Pattern XS_DATE_TIME_PATTERN = + Pattern.compile( + "(\\d\\d\\d\\d)\\-(\\d\\d)\\-(\\d\\d)[Tt]" + + "(\\d\\d):(\\d\\d):(\\d\\d)([\\.,](\\d+))?" + + "([Zz]|((\\+|\\-)(\\d?\\d):?(\\d\\d)))?"); private static final Pattern XS_DURATION_PATTERN = - Pattern.compile("^(-)?P(([0-9]*)Y)?(([0-9]*)M)?(([0-9]*)D)?" - + "(T(([0-9]*)H)?(([0-9]*)M)?(([0-9.]*)S)?)?$"); + Pattern.compile( + "^(-)?P(([0-9]*)Y)?(([0-9]*)M)?(([0-9]*)D)?" + + "(T(([0-9]*)H)?(([0-9]*)M)?(([0-9.]*)S)?)?$"); private static final Pattern ESCAPED_CHARACTER_PATTERN = Pattern.compile("%([A-Fa-f0-9]{2})"); + // https://docs.microsoft.com/en-us/azure/media-services/previous/media-services-deliver-content-overview#URLs + private static final Pattern ISM_PATH_PATTERN = + Pattern.compile("(?:.*\\.)?isml?(?:/(manifest(.*))?)?", Pattern.CASE_INSENSITIVE); + private static final String ISM_HLS_FORMAT_EXTENSION = "format=m3u8-aapl"; + private static final String ISM_DASH_FORMAT_EXTENSION = "format=mpd-time-csf"; + // Replacement map of ISO language codes used for normalization. @Nullable private static HashMap languageTagReplacementMap; @@ -158,6 +198,52 @@ public static byte[] toByteArray(InputStream inputStream) throws IOException { return outputStream.toByteArray(); } + /** + * Registers a {@link BroadcastReceiver} that's not intended to receive broadcasts from other + * apps. This will be enforced by specifying {@link Context#RECEIVER_NOT_EXPORTED} if {@link + * #SDK_INT} is 33 or above. + * + * @param context The context on which {@link Context#registerReceiver} will be called. + * @param receiver The {@link BroadcastReceiver} to register. This value may be null. + * @param filter Selects the Intent broadcasts to be received. + * @return The first sticky intent found that matches {@code filter}, or null if there are none. + */ + @Nullable + public static Intent registerReceiverNotExported( + Context context, @Nullable BroadcastReceiver receiver, IntentFilter filter) { + if (SDK_INT < 33) { + return context.registerReceiver(receiver, filter); + } else { + return context.registerReceiver(receiver, filter, Context.RECEIVER_NOT_EXPORTED); + } + } + + /** + * Registers a {@link BroadcastReceiver} that's not intended to receive broadcasts from other + * apps. This will be enforced by specifying {@link Context#RECEIVER_NOT_EXPORTED} if {@link + * #SDK_INT} is 33 or above. + * + * @param context The context on which {@link Context#registerReceiver} will be called. + * @param receiver The {@link BroadcastReceiver} to register. This value may be null. + * @param filter Selects the Intent broadcasts to be received. + * @param handler Handler identifying the thread that will receive the Intent. + * @return The first sticky intent found that matches {@code filter}, or null if there are none. + */ + @Nullable + public static Intent registerReceiverNotExported( + Context context, BroadcastReceiver receiver, IntentFilter filter, Handler handler) { + if (SDK_INT < 33) { + return context.registerReceiver(receiver, filter, /* broadcastPermission= */ null, handler); + } else { + return context.registerReceiver( + receiver, + filter, + /* broadcastPermission= */ null, + handler, + Context.RECEIVER_NOT_EXPORTED); + } + } + /** * Calls {@link Context#startForegroundService(Intent)} if {@link #SDK_INT} is 26 or higher, or * {@link Context#startService(Intent)} otherwise. @@ -168,7 +254,7 @@ public static byte[] toByteArray(InputStream inputStream) throws IOException { */ @Nullable public static ComponentName startForegroundService(Context context, Intent intent) { - if (Util.SDK_INT >= 26) { + if (SDK_INT >= 26) { return context.startForegroundService(intent); } else { return context.startService(intent); @@ -183,44 +269,94 @@ public static ComponentName startForegroundService(Context context, Intent inten * @param uris {@link Uri}s that may require {@link permission#READ_EXTERNAL_STORAGE} to read. * @return Whether a permission request was made. */ - @TargetApi(23) public static boolean maybeRequestReadExternalStoragePermission(Activity activity, Uri... uris) { - if (Util.SDK_INT < 23) { + if (SDK_INT < 23) { return false; } for (Uri uri : uris) { - if (isLocalFileUri(uri)) { - if (activity.checkSelfPermission(permission.READ_EXTERNAL_STORAGE) - != PackageManager.PERMISSION_GRANTED) { - activity.requestPermissions(new String[] {permission.READ_EXTERNAL_STORAGE}, 0); + if (maybeRequestReadExternalStoragePermission(activity, uri)) { + return true; + } + } + return false; + } + + /** + * Checks whether it's necessary to request the {@link permission#READ_EXTERNAL_STORAGE} + * permission for the specified {@link MediaItem media items}, requesting the permission if + * necessary. + * + * @param activity The host activity for checking and requesting the permission. + * @param mediaItems {@link MediaItem Media items}s that may require {@link + * permission#READ_EXTERNAL_STORAGE} to read. + * @return Whether a permission request was made. + */ + public static boolean maybeRequestReadExternalStoragePermission( + Activity activity, MediaItem... mediaItems) { + if (SDK_INT < 23) { + return false; + } + for (MediaItem mediaItem : mediaItems) { + if (mediaItem.localConfiguration == null) { + continue; + } + if (maybeRequestReadExternalStoragePermission(activity, mediaItem.localConfiguration.uri)) { + return true; + } + List subtitleConfigs = + mediaItem.localConfiguration.subtitleConfigurations; + for (int i = 0; i < subtitleConfigs.size(); i++) { + if (maybeRequestReadExternalStoragePermission(activity, subtitleConfigs.get(i).uri)) { return true; } - break; } } return false; } + private static boolean maybeRequestReadExternalStoragePermission(Activity activity, Uri uri) { + return SDK_INT >= 23 + && (isLocalFileUri(uri) || isMediaStoreExternalContentUri(uri)) + && requestExternalStoragePermission(activity); + } + + private static boolean isMediaStoreExternalContentUri(Uri uri) { + if (!"content".equals(uri.getScheme()) || !MediaStore.AUTHORITY.equals(uri.getAuthority())) { + return false; + } + List pathSegments = uri.getPathSegments(); + if (pathSegments.isEmpty()) { + return false; + } + String firstPathSegment = pathSegments.get(0); + return MediaStore.VOLUME_EXTERNAL.equals(firstPathSegment) + || MediaStore.VOLUME_EXTERNAL_PRIMARY.equals(firstPathSegment); + } + /** - * Returns whether it may be possible to load the given URIs based on the network security - * policy's cleartext traffic permissions. + * Returns whether it may be possible to load the URIs of the given media items based on the + * network security policy's cleartext traffic permissions. * - * @param uris A list of URIs that will be loaded. - * @return Whether it may be possible to load the given URIs. + * @param mediaItems A list of {@link MediaItem media items}. + * @return Whether it may be possible to load the URIs of the given media items. */ - @TargetApi(24) - public static boolean checkCleartextTrafficPermitted(Uri... uris) { - if (Util.SDK_INT < 24) { + public static boolean checkCleartextTrafficPermitted(MediaItem... mediaItems) { + if (SDK_INT < 24) { // We assume cleartext traffic is permitted. return true; } - for (Uri uri : uris) { - if ("http".equals(uri.getScheme()) - && !NetworkSecurityPolicy.getInstance() - .isCleartextTrafficPermitted(Assertions.checkNotNull(uri.getHost()))) { - // The security policy prevents cleartext traffic. + for (MediaItem mediaItem : mediaItems) { + if (mediaItem.localConfiguration == null) { + continue; + } + if (isTrafficRestricted(mediaItem.localConfiguration.uri)) { return false; } + for (int i = 0; i < mediaItem.localConfiguration.subtitleConfigurations.size(); i++) { + if (isTrafficRestricted(mediaItem.localConfiguration.subtitleConfigurations.get(i).uri)) { + return false; + } + } } return true; } @@ -291,14 +427,14 @@ public static void removeRange(List list, int fromIndex, int toIndex) { * *

      Use {@link Assertions#checkNotNull(Object)} to throw if the value is null. */ - @SuppressWarnings({"contracts.postcondition.not.satisfied", "return.type.incompatible"}) + @SuppressWarnings({"nullness:contracts.postcondition", "nullness:return"}) @EnsuresNonNull("#1") public static T castNonNull(@Nullable T value) { return value; } /** Casts a nullable type array to a non-null type array without runtime null check. */ - @SuppressWarnings({"contracts.postcondition.not.satisfied", "return.type.incompatible"}) + @SuppressWarnings({"nullness:contracts.postcondition", "nullness:return"}) @EnsuresNonNull("#1") public static T[] castNonNullTypeArray(@NullableType T[] value) { return value; @@ -312,7 +448,7 @@ public static T[] castNonNullTypeArray(@NullableType T[] value) { * @param length The output array length. Must be less or equal to the length of the input array. * @return The copied array. */ - @SuppressWarnings({"nullness:argument.type.incompatible", "nullness:return.type.incompatible"}) + @SuppressWarnings({"nullness:argument", "nullness:return"}) public static T[] nullSafeArrayCopy(T[] input, int length) { Assertions.checkArgument(length <= input.length); return Arrays.copyOf(input, length); @@ -326,7 +462,7 @@ public static T[] nullSafeArrayCopy(T[] input, int length) { * @param to The end of the range to be copied, exclusive. * @return The copied array. */ - @SuppressWarnings({"nullness:argument.type.incompatible", "nullness:return.type.incompatible"}) + @SuppressWarnings({"nullness:argument", "nullness:return"}) public static T[] nullSafeArrayCopyOfRange(T[] input, int from, int to) { Assertions.checkArgument(0 <= from); Assertions.checkArgument(to <= input.length); @@ -353,7 +489,7 @@ public static T[] nullSafeArrayAppend(T[] original, T newElement) { * @param second The second array. * @return The concatenated result. */ - @SuppressWarnings({"nullness:assignment.type.incompatible"}) + @SuppressWarnings("nullness:assignment") public static T[] nullSafeArrayConcatenation(T[] first, T[] second) { T[] concatenation = Arrays.copyOf(first, first.length + second.length); System.arraycopy( @@ -364,44 +500,209 @@ public static T[] nullSafeArrayConcatenation(T[] first, T[] second) { /* length= */ second.length); return concatenation; } + + /** + * Copies the contents of {@code list} into {@code array}. + * + *

      {@code list.size()} must be the same as {@code array.length} to ensure the contents can be + * copied into {@code array} without leaving any nulls at the end. + * + * @param list The list to copy items from. + * @param array The array to copy items to. + */ + @SuppressWarnings("nullness:toArray.nullable.elements.not.newarray") + public static void nullSafeListToArray(List list, T[] array) { + Assertions.checkState(list.size() == array.length); + list.toArray(array); + } + + /** + * Creates a {@link Handler} on the current {@link Looper} thread. + * + * @throws IllegalStateException If the current thread doesn't have a {@link Looper}. + */ + public static Handler createHandlerForCurrentLooper() { + return createHandlerForCurrentLooper(/* callback= */ null); + } + + /** + * Creates a {@link Handler} with the specified {@link Handler.Callback} on the current {@link + * Looper} thread. + * + *

      The method accepts partially initialized objects as callback under the assumption that the + * Handler won't be used to send messages until the callback is fully initialized. + * + * @param callback A {@link Handler.Callback}. May be a partially initialized class, or null if no + * callback is required. + * @return A {@link Handler} with the specified callback on the current {@link Looper} thread. + * @throws IllegalStateException If the current thread doesn't have a {@link Looper}. + */ + public static Handler createHandlerForCurrentLooper( + @Nullable Handler.@UnknownInitialization Callback callback) { + return createHandler(Assertions.checkStateNotNull(Looper.myLooper()), callback); + } + + /** + * Creates a {@link Handler} on the current {@link Looper} thread. + * + *

      If the current thread doesn't have a {@link Looper}, the application's main thread {@link + * Looper} is used. + */ + public static Handler createHandlerForCurrentOrMainLooper() { + return createHandlerForCurrentOrMainLooper(/* callback= */ null); + } + /** * Creates a {@link Handler} with the specified {@link Handler.Callback} on the current {@link - * Looper} thread. The method accepts partially initialized objects as callback under the - * assumption that the Handler won't be used to send messages until the callback is fully - * initialized. + * Looper} thread. + * + *

      The method accepts partially initialized objects as callback under the assumption that the + * Handler won't be used to send messages until the callback is fully initialized. * *

      If the current thread doesn't have a {@link Looper}, the application's main thread {@link * Looper} is used. * - * @param callback A {@link Handler.Callback}. May be a partially initialized class. + * @param callback A {@link Handler.Callback}. May be a partially initialized class, or null if no + * callback is required. * @return A {@link Handler} with the specified callback on the current {@link Looper} thread. */ - public static Handler createHandler(Handler.@UnknownInitialization Callback callback) { - return createHandler(getLooper(), callback); + public static Handler createHandlerForCurrentOrMainLooper( + @Nullable Handler.@UnknownInitialization Callback callback) { + return createHandler(getCurrentOrMainLooper(), callback); } /** * Creates a {@link Handler} with the specified {@link Handler.Callback} on the specified {@link - * Looper} thread. The method accepts partially initialized objects as callback under the - * assumption that the Handler won't be used to send messages until the callback is fully - * initialized. + * Looper} thread. + * + *

      The method accepts partially initialized objects as callback under the assumption that the + * Handler won't be used to send messages until the callback is fully initialized. * * @param looper A {@link Looper} to run the callback on. - * @param callback A {@link Handler.Callback}. May be a partially initialized class. + * @param callback A {@link Handler.Callback}. May be a partially initialized class, or null if no + * callback is required. * @return A {@link Handler} with the specified callback on the current {@link Looper} thread. */ - @SuppressWarnings({"nullness:argument.type.incompatible", "nullness:return.type.incompatible"}) + @SuppressWarnings({"nullness:argument", "nullness:return"}) public static Handler createHandler( - Looper looper, Handler.@UnknownInitialization Callback callback) { + Looper looper, @Nullable Handler.@UnknownInitialization Callback callback) { return new Handler(looper, callback); } + /** + * Posts the {@link Runnable} if the calling thread differs with the {@link Looper} of the {@link + * Handler}. Otherwise, runs the {@link Runnable} directly. + * + * @param handler The handler to which the {@link Runnable} will be posted. + * @param runnable The runnable to either post or run. + * @return {@code true} if the {@link Runnable} was successfully posted to the {@link Handler} or + * run. {@code false} otherwise. + */ + public static boolean postOrRun(Handler handler, Runnable runnable) { + Looper looper = handler.getLooper(); + if (!looper.getThread().isAlive()) { + return false; + } + if (handler.getLooper() == Looper.myLooper()) { + runnable.run(); + return true; + } else { + return handler.post(runnable); + } + } + + /** + * Posts the {@link Runnable} if the calling thread differs with the {@link Looper} of the {@link + * Handler}. Otherwise, runs the {@link Runnable} directly. Also returns a {@link + * ListenableFuture} for when the {@link Runnable} has run. + * + * @param handler The handler to which the {@link Runnable} will be posted. + * @param runnable The runnable to either post or run. + * @param successValue The value to set in the {@link ListenableFuture} once the runnable + * completes. + * @param The type of {@code successValue}. + * @return A {@link ListenableFuture} for when the {@link Runnable} has run. + */ + public static ListenableFuture postOrRunWithCompletion( + Handler handler, Runnable runnable, T successValue) { + SettableFuture outputFuture = SettableFuture.create(); + postOrRun( + handler, + () -> { + try { + if (outputFuture.isCancelled()) { + return; + } + runnable.run(); + outputFuture.set(successValue); + } catch (Throwable e) { + outputFuture.setException(e); + } + }); + return outputFuture; + } + + /** + * Asynchronously transforms the result of a {@link ListenableFuture}. + * + *

      The transformation function is called using a {@linkplain MoreExecutors#directExecutor() + * direct executor}. + * + *

      The returned Future attempts to keep its cancellation state in sync with that of the input + * future and that of the future returned by the transform function. That is, if the returned + * Future is cancelled, it will attempt to cancel the other two, and if either of the other two is + * cancelled, the returned Future will also be cancelled. All forwarded cancellations will not + * attempt to interrupt. + * + * @param future The input {@link ListenableFuture}. + * @param transformFunction The function transforming the result of the input future. + * @param The result type of the input future. + * @param The result type of the transformation function. + * @return A {@link ListenableFuture} for the transformed result. + */ + public static ListenableFuture transformFutureAsync( + ListenableFuture future, AsyncFunction transformFunction) { + // This is a simplified copy of Guava's Futures.transformAsync. + SettableFuture outputFuture = SettableFuture.create(); + outputFuture.addListener( + () -> { + if (outputFuture.isCancelled()) { + future.cancel(/* mayInterruptIfRunning= */ false); + } + }, + MoreExecutors.directExecutor()); + future.addListener( + () -> { + U inputFutureResult; + try { + inputFutureResult = Futures.getDone(future); + } catch (CancellationException cancellationException) { + outputFuture.cancel(/* mayInterruptIfRunning= */ false); + return; + } catch (ExecutionException exception) { + @Nullable Throwable cause = exception.getCause(); + outputFuture.setException(cause == null ? exception : cause); + return; + } catch (RuntimeException | Error error) { + outputFuture.setException(error); + return; + } + try { + outputFuture.setFuture(transformFunction.apply(inputFutureResult)); + } catch (Throwable exception) { + outputFuture.setException(exception); + } + }, + MoreExecutors.directExecutor()); + return outputFuture; + } + /** * Returns the {@link Looper} associated with the current thread, or the {@link Looper} of the * application's main thread if the current thread doesn't have a {@link Looper}. */ - public static Looper getLooper() { - Looper myLooper = Looper.myLooper(); + public static Looper getCurrentOrMainLooper() { + @Nullable Looper myLooper = Looper.myLooper(); return myLooper != null ? myLooper : Looper.getMainLooper(); } @@ -411,25 +712,10 @@ public static Looper getLooper() { * @param threadName The name of the thread. * @return The executor. */ - public static ExecutorService newSingleThreadExecutor(final String threadName) { + public static ExecutorService newSingleThreadExecutor(String threadName) { return Executors.newSingleThreadExecutor(runnable -> new Thread(runnable, threadName)); } - /** - * Closes a {@link DataSource}, suppressing any {@link IOException} that may occur. - * - * @param dataSource The {@link DataSource} to close. - */ - public static void closeQuietly(@Nullable DataSource dataSource) { - try { - if (dataSource != null) { - dataSource.close(); - } - } catch (IOException e) { - // Ignore. - } - } - /** * Closes a {@link Closeable}, suppressing any {@link IOException} that may occur. Both {@link * java.io.OutputStream} and {@link InputStream} are {@code Closeable}. @@ -497,12 +783,12 @@ public static String getLocaleLanguageTag(Locale locale) { // Locale data (especially for API < 21) may produce tags with '_' instead of the // standard-conformant '-'. String normalizedTag = language.replace('_', '-'); - if (normalizedTag.isEmpty() || "und".equals(normalizedTag)) { + if (normalizedTag.isEmpty() || normalizedTag.equals(C.LANGUAGE_UNDETERMINED)) { // Tag isn't valid, keep using the original. normalizedTag = language; } - normalizedTag = Util.toLowerInvariant(normalizedTag); - String mainLanguage = Util.splitAtFirst(normalizedTag, "-")[0]; + normalizedTag = Ascii.toLowerCase(normalizedTag); + String mainLanguage = splitAtFirst(normalizedTag, "-")[0]; if (languageTagReplacementMap == null) { languageTagReplacementMap = createIsoLanguageReplacementMap(); } @@ -513,7 +799,7 @@ public static String getLocaleLanguageTag(Locale locale) { mainLanguage = replacedLanguage; } if ("no".equals(mainLanguage) || "i".equals(mainLanguage) || "zh".equals(mainLanguage)) { - normalizedTag = maybeReplaceGrandfatheredLanguageTags(normalizedTag); + normalizedTag = maybeReplaceLegacyLanguageTags(normalizedTag); } return normalizedTag; } @@ -525,7 +811,7 @@ public static String getLocaleLanguageTag(Locale locale) { * @return The string. */ public static String fromUtf8Bytes(byte[] bytes) { - return new String(bytes, Charset.forName(C.UTF8_NAME)); + return new String(bytes, Charsets.UTF_8); } /** @@ -537,7 +823,7 @@ public static String fromUtf8Bytes(byte[] bytes) { * @return The string. */ public static String fromUtf8Bytes(byte[] bytes, int offset, int length) { - return new String(bytes, offset, length, Charset.forName(C.UTF8_NAME)); + return new String(bytes, offset, length, Charsets.UTF_8); } /** @@ -547,7 +833,7 @@ public static String fromUtf8Bytes(byte[] bytes, int offset, int length) { * @return The code points encoding using UTF-8. */ public static byte[] getUtf8Bytes(String value) { - return value.getBytes(Charset.forName(C.UTF8_NAME)); + return value.getBytes(Charsets.UTF_8); } /** @@ -587,26 +873,6 @@ public static boolean isLinebreak(int c) { return c == '\n' || c == '\r'; } - /** - * Converts text to lower case using {@link Locale#US}. - * - * @param text The text to convert. - * @return The lower case text, or null if {@code text} is null. - */ - public static @PolyNull String toLowerInvariant(@PolyNull String text) { - return text == null ? text : text.toLowerCase(Locale.US); - } - - /** - * Converts text to upper case using {@link Locale#US}. - * - * @param text The text to convert. - * @return The upper case text, or null if {@code text} is null. - */ - public static @PolyNull String toUpperInvariant(@PolyNull String text) { - return text == null ? text : text.toUpperCase(Locale.US); - } - /** * Formats a string using {@link Locale#US}. * @@ -647,7 +913,7 @@ public static long ceilDivide(long numerator, long denominator) { * @return The constrained value {@code Math.max(min, Math.min(value, max))}. */ public static int constrainValue(int value, int min, int max) { - return Math.max(min, Math.min(value, max)); + return max(min, min(value, max)); } /** @@ -659,7 +925,7 @@ public static int constrainValue(int value, int min, int max) { * @return The constrained value {@code Math.max(min, Math.min(value, max))}. */ public static long constrainValue(long value, long min, long max) { - return Math.max(min, Math.min(value, max)); + return max(min, min(value, max)); } /** @@ -671,7 +937,7 @@ public static long constrainValue(long value, long min, long max) { * @return The constrained value {@code Math.max(min, Math.min(value, max))}. */ public static float constrainValue(float value, float min, float max) { - return Math.max(min, Math.min(value, max)); + return max(min, min(value, max)); } /** @@ -773,14 +1039,14 @@ public static int binarySearchFloor( index++; } } - return stayInBounds ? Math.max(0, index) : index; + return stayInBounds ? max(0, index) : index; } /** * Returns the index of the largest element in {@code array} that is less than (or optionally * equal to) a specified {@code value}. - *

      - * The search is performed using a binary search algorithm, so the array must be sorted. If the + * + *

      The search is performed using a binary search algorithm, so the array must be sorted. If the * array contains multiple elements equal to {@code value} and {@code inclusive} is true, the * index of the first one will be returned. * @@ -794,8 +1060,8 @@ public static int binarySearchFloor( * @return The index of the largest element in {@code array} that is less than (or optionally * equal to) {@code value}. */ - public static int binarySearchFloor(long[] array, long value, boolean inclusive, - boolean stayInBounds) { + public static int binarySearchFloor( + long[] array, long value, boolean inclusive, boolean stayInBounds) { int index = Arrays.binarySearch(array, value); if (index < 0) { index = -(index + 2); @@ -805,7 +1071,7 @@ public static int binarySearchFloor(long[] array, long value, boolean inclusive, index++; } } - return stayInBounds ? Math.max(0, index) : index; + return stayInBounds ? max(0, index) : index; } /** @@ -841,7 +1107,48 @@ public static > int binarySearchFloor( index++; } } - return stayInBounds ? Math.max(0, index) : index; + return stayInBounds ? max(0, index) : index; + } + + /** + * Returns the index of the largest element in {@code longArray} that is less than (or optionally + * equal to) a specified {@code value}. + * + *

      The search is performed using a binary search algorithm, so the array must be sorted. If the + * array contains multiple elements equal to {@code value} and {@code inclusive} is true, the + * index of the first one will be returned. + * + * @param longArray The array to search. + * @param value The value being searched for. + * @param inclusive If the value is present in the array, whether to return the corresponding + * index. If false then the returned index corresponds to the largest element strictly less + * than the value. + * @param stayInBounds If true, then 0 will be returned in the case that the value is smaller than + * the smallest element in the array. If false then -1 will be returned. + * @return The index of the largest element in {@code array} that is less than (or optionally + * equal to) {@code value}. + */ + public static int binarySearchFloor( + LongArray longArray, long value, boolean inclusive, boolean stayInBounds) { + int lowIndex = 0; + int highIndex = longArray.size() - 1; + + while (lowIndex <= highIndex) { + int midIndex = (lowIndex + highIndex) >>> 1; + if (longArray.get(midIndex) < value) { + lowIndex = midIndex + 1; + } else { + highIndex = midIndex - 1; + } + } + + if (inclusive && highIndex + 1 < longArray.size() && longArray.get(highIndex + 1) == value) { + highIndex++; + } else if (stayInBounds && highIndex == -1) { + highIndex = 0; + } + + return highIndex; } /** @@ -874,7 +1181,7 @@ public static int binarySearchCeil( index--; } } - return stayInBounds ? Math.min(array.length - 1, index) : index; + return stayInBounds ? min(array.length - 1, index) : index; } /** @@ -907,7 +1214,7 @@ public static int binarySearchCeil( index--; } } - return stayInBounds ? Math.min(array.length - 1, index) : index; + return stayInBounds ? min(array.length - 1, index) : index; } /** @@ -945,7 +1252,7 @@ public static > int binarySearchCeil( index--; } } - return stayInBounds ? Math.min(list.size() - 1, index) : index; + return stayInBounds ? min(list.size() - 1, index) : index; } /** @@ -960,6 +1267,66 @@ public static int compareLong(long left, long right) { return left < right ? -1 : left == right ? 0 : 1; } + /** + * Returns the minimum value in the given {@link SparseLongArray}. + * + * @param sparseLongArray The {@link SparseLongArray}. + * @return The minimum value. + * @throws NoSuchElementException If the array is empty. + */ + @RequiresApi(18) + public static long minValue(SparseLongArray sparseLongArray) { + if (sparseLongArray.size() == 0) { + throw new NoSuchElementException(); + } + long min = Long.MAX_VALUE; + for (int i = 0; i < sparseLongArray.size(); i++) { + min = min(min, sparseLongArray.valueAt(i)); + } + return min; + } + + /** + * Returns the maximum value in the given {@link SparseLongArray}. + * + * @param sparseLongArray The {@link SparseLongArray}. + * @return The maximum value. + * @throws NoSuchElementException If the array is empty. + */ + @RequiresApi(18) + public static long maxValue(SparseLongArray sparseLongArray) { + if (sparseLongArray.size() == 0) { + throw new NoSuchElementException(); + } + long max = Long.MIN_VALUE; + for (int i = 0; i < sparseLongArray.size(); i++) { + max = max(max, sparseLongArray.valueAt(i)); + } + return max; + } + + /** + * Converts a time in microseconds to the corresponding time in milliseconds, preserving {@link + * C#TIME_UNSET} and {@link C#TIME_END_OF_SOURCE} values. + * + * @param timeUs The time in microseconds. + * @return The corresponding time in milliseconds. + */ + public static long usToMs(long timeUs) { + return (timeUs == C.TIME_UNSET || timeUs == C.TIME_END_OF_SOURCE) ? timeUs : (timeUs / 1000); + } + + /** + * Converts a time in milliseconds to the corresponding time in microseconds, preserving {@link + * C#TIME_UNSET} values and {@link C#TIME_END_OF_SOURCE} values. + * + * @param timeMs The time in milliseconds. + * @return The corresponding time in microseconds. + */ + public static long msToUs(long timeMs) { + return (timeMs == C.TIME_UNSET || timeMs == C.TIME_END_OF_SOURCE) ? timeMs : (timeMs * 1000); + } + /** * Parses an xs:duration attribute value, returning the parsed duration in milliseconds. * @@ -992,17 +1359,21 @@ public static long parseXsDuration(String value) { } /** - * Parses an xs:dateTime attribute value, returning the parsed timestamp in milliseconds since - * the epoch. + * Parses an xs:dateTime attribute value, returning the parsed timestamp in milliseconds since the + * epoch. * * @param value The attribute value to decode. * @return The parsed timestamp in milliseconds since the epoch. * @throws ParserException if an error occurs parsing the dateTime attribute value. */ + // incompatible types in argument. + // dereference of possibly-null reference matcher.group(9) + @SuppressWarnings({"nullness:argument", "nullness:dereference.of.nullable"}) public static long parseXsDateTime(String value) throws ParserException { Matcher matcher = XS_DATE_TIME_PATTERN.matcher(value); if (!matcher.matches()) { - throw new ParserException("Invalid date/time format: " + value); + throw ParserException.createForMalformedContainer( + "Invalid date/time format: " + value, /* cause= */ null); } int timezoneShift; @@ -1012,8 +1383,8 @@ public static long parseXsDateTime(String value) throws ParserException { } else if (matcher.group(9).equalsIgnoreCase("Z")) { timezoneShift = 0; } else { - timezoneShift = ((Integer.parseInt(matcher.group(12)) * 60 - + Integer.parseInt(matcher.group(13)))); + timezoneShift = + ((Integer.parseInt(matcher.group(12)) * 60 + Integer.parseInt(matcher.group(13)))); if ("-".equals(matcher.group(11))) { timezoneShift *= -1; } @@ -1023,12 +1394,13 @@ public static long parseXsDateTime(String value) throws ParserException { dateTime.clear(); // Note: The month value is 0-based, hence the -1 on group(2) - dateTime.set(Integer.parseInt(matcher.group(1)), - Integer.parseInt(matcher.group(2)) - 1, - Integer.parseInt(matcher.group(3)), - Integer.parseInt(matcher.group(4)), - Integer.parseInt(matcher.group(5)), - Integer.parseInt(matcher.group(6))); + dateTime.set( + Integer.parseInt(matcher.group(1)), + Integer.parseInt(matcher.group(2)) - 1, + Integer.parseInt(matcher.group(3)), + Integer.parseInt(matcher.group(4)), + Integer.parseInt(matcher.group(5)), + Integer.parseInt(matcher.group(6))); if (!TextUtils.isEmpty(matcher.group(8))) { final BigDecimal bd = new BigDecimal("0." + matcher.group(8)); // we care only for milliseconds, so movePointRight(3) @@ -1037,7 +1409,7 @@ public static long parseXsDateTime(String value) throws ParserException { long time = dateTime.getTimeInMillis(); if (timezoneShift != 0) { - time -= timezoneShift * 60000; + time -= timezoneShift * 60000L; } return time; @@ -1045,9 +1417,9 @@ public static long parseXsDateTime(String value) throws ParserException { /** * Scales a large timestamp. - *

      - * Logically, scaling consists of a multiplication followed by a division. The actual operations - * performed are designed to minimize the probability of overflow. + * + *

      Logically, scaling consists of a multiplication followed by a division. The actual + * operations performed are designed to minimize the probability of overflow. * * @param timestamp The timestamp to scale. * @param multiplier The multiplier. @@ -1126,7 +1498,7 @@ public static void scaleLargeTimestampsInPlace(long[] timestamps, long multiplie * Returns the duration of media that will elapse in {@code playoutDuration}. * * @param playoutDuration The duration to scale. - * @param speed The playback speed. + * @param speed The factor by which playback is sped up. * @return The scaled duration, in the same units as {@code playoutDuration}. */ public static long getMediaDurationForPlayoutDuration(long playoutDuration, float speed) { @@ -1149,62 +1521,6 @@ public static long getPlayoutDurationForMediaDuration(long mediaDuration, float return Math.round((double) mediaDuration / speed); } - /** - * Resolves a seek given the requested seek position, a {@link SeekParameters} and two candidate - * sync points. - * - * @param positionUs The requested seek position, in microseocnds. - * @param seekParameters The {@link SeekParameters}. - * @param firstSyncUs The first candidate seek point, in micrseconds. - * @param secondSyncUs The second candidate seek point, in microseconds. May equal {@code - * firstSyncUs} if there's only one candidate. - * @return The resolved seek position, in microseconds. - */ - public static long resolveSeekPositionUs( - long positionUs, SeekParameters seekParameters, long firstSyncUs, long secondSyncUs) { - if (SeekParameters.EXACT.equals(seekParameters)) { - return positionUs; - } - long minPositionUs = - subtractWithOverflowDefault(positionUs, seekParameters.toleranceBeforeUs, Long.MIN_VALUE); - long maxPositionUs = - addWithOverflowDefault(positionUs, seekParameters.toleranceAfterUs, Long.MAX_VALUE); - boolean firstSyncPositionValid = minPositionUs <= firstSyncUs && firstSyncUs <= maxPositionUs; - boolean secondSyncPositionValid = - minPositionUs <= secondSyncUs && secondSyncUs <= maxPositionUs; - if (firstSyncPositionValid && secondSyncPositionValid) { - if (Math.abs(firstSyncUs - positionUs) <= Math.abs(secondSyncUs - positionUs)) { - return firstSyncUs; - } else { - return secondSyncUs; - } - } else if (firstSyncPositionValid) { - return firstSyncUs; - } else if (secondSyncPositionValid) { - return secondSyncUs; - } else { - return minPositionUs; - } - } - - /** - * Converts a list of integers to a primitive array. - * - * @param list A list of integers. - * @return The list in array form, or null if the input list was null. - */ - public static int @PolyNull [] toArray(@PolyNull List list) { - if (list == null) { - return null; - } - int length = list.size(); - int[] intArray = new int[length]; - for (int i = 0; i < length; i++) { - intArray[i] = list.get(i); - } - return intArray; - } - /** * Returns the integer equal to the big-endian concatenation of the characters in {@code string} * as bytes. The string must be no more than four characters long. @@ -1234,7 +1550,7 @@ public static long toUnsignedLong(int x) { } /** - * Return the long that is composed of the bits of the 2 specified integers. + * Returns the long that is composed of the bits of the 2 specified integers. * * @param mostSignificantBits The 32 most significant bits of the long to return. * @param leastSignificantBits The 32 least significant bits of the long to return. @@ -1245,6 +1561,24 @@ public static long toLong(int mostSignificantBits, int leastSignificantBits) { return (toUnsignedLong(mostSignificantBits) << 32) | toUnsignedLong(leastSignificantBits); } + /** + * Truncates a sequence of ASCII characters to a maximum length. + * + *

      This preserves span styling in the {@link CharSequence}. If that's not important, use {@link + * Ascii#truncate(CharSequence, int, String)}. + * + *

      Note: This is not safe to use in general on Unicode text because it may separate + * characters from combining characters or split up surrogate pairs. + * + * @param sequence The character sequence to truncate. + * @param maxLength The max length to truncate to. + * @return {@code sequence} directly if {@code sequence.length() <= maxLength}, otherwise {@code + * sequence.subsequence(0, maxLength}. + */ + public static CharSequence truncateAscii(CharSequence sequence, int maxLength) { + return sequence.length() <= maxLength ? sequence : sequence.subSequence(0, maxLength); + } + /** * Returns a byte array containing values parsed from the hex string provided. * @@ -1255,12 +1589,30 @@ public static byte[] getBytesFromHexString(String hexString) { byte[] data = new byte[hexString.length() / 2]; for (int i = 0; i < data.length; i++) { int stringOffset = i * 2; - data[i] = (byte) ((Character.digit(hexString.charAt(stringOffset), 16) << 4) - + Character.digit(hexString.charAt(stringOffset + 1), 16)); + data[i] = + (byte) + ((Character.digit(hexString.charAt(stringOffset), 16) << 4) + + Character.digit(hexString.charAt(stringOffset + 1), 16)); } return data; } + /** + * Returns a string containing a lower-case hex representation of the bytes provided. + * + * @param bytes The byte data to convert to hex. + * @return A String containing the hex representation of {@code bytes}. + */ + public static String toHexString(byte[] bytes) { + StringBuilder result = new StringBuilder(bytes.length * 2); + for (int i = 0; i < bytes.length; i++) { + result + .append(Character.forDigit((bytes[i] >> 4) & 0xF, 16)) + .append(Character.forDigit(bytes[i] & 0xF, 16)); + } + return result.toString(); + } + /** * Returns a string with comma delimited simple names of each object's class. * @@ -1294,8 +1646,25 @@ public static String getUserAgent(Context context, String applicationName) { } catch (NameNotFoundException e) { versionName = "?"; } - return applicationName + "/" + versionName + " (Linux;Android " + Build.VERSION.RELEASE - + ") " + ExoPlayerLibraryInfo.VERSION_SLASHY; + return applicationName + + "/" + + versionName + + " (Linux;Android " + + Build.VERSION.RELEASE + + ") " + + ExoPlayerLibraryInfo.VERSION_SLASHY; + } + + /** Returns the number of codec strings in {@code codecs} whose type matches {@code trackType}. */ + public static int getCodecCountOfType(@Nullable String codecs, @C.TrackType int trackType) { + String[] codecArray = splitCodecs(codecs); + int count = 0; + for (String codec : codecArray) { + if (trackType == MimeTypes.getTrackTypeOfCodec(codec)) { + count++; + } + } + return count; } /** @@ -1303,11 +1672,12 @@ public static String getUserAgent(Context context, String applicationName) { * trackType}. * * @param codecs A codec sequence string, as defined in RFC 6381. - * @param trackType One of {@link C}{@code .TRACK_TYPE_*}. + * @param trackType The {@link C.TrackType track type}. * @return A copy of {@code codecs} without the codecs whose track type doesn't match {@code - * trackType}. If this ends up empty, or {@code codecs} is null, return null. + * trackType}. If this ends up empty, or {@code codecs} is null, returns null. */ - public static @Nullable String getCodecsOfType(@Nullable String codecs, int trackType) { + @Nullable + public static String getCodecsOfType(@Nullable String codecs, @C.TrackType int trackType) { String[] codecArray = splitCodecs(codecs); if (codecArray.length == 0) { return null; @@ -1337,17 +1707,32 @@ public static String[] splitCodecs(@Nullable String codecs) { return split(codecs.trim(), "(\\s*,\\s*)"); } + /** + * Gets a PCM {@link Format} with the specified parameters. + * + * @param pcmEncoding The {@link C.PcmEncoding}. + * @param channels The number of channels, or {@link Format#NO_VALUE} if unknown. + * @param sampleRate The sample rate in Hz, or {@link Format#NO_VALUE} if unknown. + * @return The PCM format. + */ + public static Format getPcmFormat(@C.PcmEncoding int pcmEncoding, int channels, int sampleRate) { + return new Format.Builder() + .setSampleMimeType(MimeTypes.AUDIO_RAW) + .setChannelCount(channels) + .setSampleRate(sampleRate) + .setPcmEncoding(pcmEncoding) + .build(); + } + /** * Converts a sample bit depth to a corresponding PCM encoding constant. * * @param bitDepth The bit depth. Supported values are 8, 16, 24 and 32. - * @return The corresponding encoding. One of {@link C#ENCODING_PCM_8BIT}, - * {@link C#ENCODING_PCM_16BIT}, {@link C#ENCODING_PCM_24BIT} and - * {@link C#ENCODING_PCM_32BIT}. If the bit depth is unsupported then - * {@link C#ENCODING_INVALID} is returned. + * @return The corresponding encoding. One of {@link C#ENCODING_PCM_8BIT}, {@link + * C#ENCODING_PCM_16BIT}, {@link C#ENCODING_PCM_24BIT} and {@link C#ENCODING_PCM_32BIT}. If + * the bit depth is unsupported then {@link C#ENCODING_INVALID} is returned. */ - @C.PcmEncoding - public static int getPcmEncoding(int bitDepth) { + public static @C.PcmEncoding int getPcmEncoding(int bitDepth) { switch (bitDepth) { case 8: return C.ENCODING_PCM_8BIT; @@ -1391,12 +1776,13 @@ public static boolean isEncodingHighResolutionPcm(@C.PcmEncoding int encoding) { /** * Returns the audio track channel configuration for the given channel count, or {@link - * AudioFormat#CHANNEL_INVALID} if output is not poossible. + * AudioFormat#CHANNEL_INVALID} if output is not possible. * * @param channelCount The number of channels in the input audio. * @return The channel configuration or {@link AudioFormat#CHANNEL_INVALID} if output is not * possible. */ + @SuppressLint("InlinedApi") // Inlined AudioFormat constants. public static int getAudioTrackChannelConfig(int channelCount) { switch (channelCount) { case 1: @@ -1414,17 +1800,9 @@ public static int getAudioTrackChannelConfig(int channelCount) { case 7: return AudioFormat.CHANNEL_OUT_5POINT1 | AudioFormat.CHANNEL_OUT_BACK_CENTER; case 8: - if (Util.SDK_INT >= 23) { - return AudioFormat.CHANNEL_OUT_7POINT1_SURROUND; - } else if (Util.SDK_INT >= 21) { - // Equal to AudioFormat.CHANNEL_OUT_7POINT1_SURROUND, which is hidden before Android M. - return AudioFormat.CHANNEL_OUT_5POINT1 - | AudioFormat.CHANNEL_OUT_SIDE_LEFT - | AudioFormat.CHANNEL_OUT_SIDE_RIGHT; - } else { - // 8 ch output is not supported before Android L. - return AudioFormat.CHANNEL_INVALID; - } + return AudioFormat.CHANNEL_OUT_7POINT1_SURROUND; + case 12: + return AudioFormat.CHANNEL_OUT_7POINT1POINT4; default: return AudioFormat.CHANNEL_INVALID; } @@ -1456,11 +1834,8 @@ public static int getPcmFrameSize(@C.PcmEncoding int pcmEncoding, int channelCou } } - /** - * Returns the {@link C.AudioUsage} corresponding to the specified {@link C.StreamType}. - */ - @C.AudioUsage - public static int getAudioUsageForStreamType(@C.StreamType int streamType) { + /** Returns the {@link C.AudioUsage} corresponding to the specified {@link C.StreamType}. */ + public static @C.AudioUsage int getAudioUsageForStreamType(@C.StreamType int streamType) { switch (streamType) { case C.STREAM_TYPE_ALARM: return C.USAGE_ALARM; @@ -1474,39 +1849,32 @@ public static int getAudioUsageForStreamType(@C.StreamType int streamType) { return C.USAGE_ASSISTANCE_SONIFICATION; case C.STREAM_TYPE_VOICE_CALL: return C.USAGE_VOICE_COMMUNICATION; - case C.STREAM_TYPE_USE_DEFAULT: case C.STREAM_TYPE_MUSIC: default: return C.USAGE_MEDIA; } } - /** - * Returns the {@link C.AudioContentType} corresponding to the specified {@link C.StreamType}. - */ - @C.AudioContentType - public static int getAudioContentTypeForStreamType(@C.StreamType int streamType) { + /** Returns the {@link C.AudioContentType} corresponding to the specified {@link C.StreamType}. */ + public static @C.AudioContentType int getAudioContentTypeForStreamType( + @C.StreamType int streamType) { switch (streamType) { case C.STREAM_TYPE_ALARM: case C.STREAM_TYPE_DTMF: case C.STREAM_TYPE_NOTIFICATION: case C.STREAM_TYPE_RING: case C.STREAM_TYPE_SYSTEM: - return C.CONTENT_TYPE_SONIFICATION; + return C.AUDIO_CONTENT_TYPE_SONIFICATION; case C.STREAM_TYPE_VOICE_CALL: - return C.CONTENT_TYPE_SPEECH; - case C.STREAM_TYPE_USE_DEFAULT: + return C.AUDIO_CONTENT_TYPE_SPEECH; case C.STREAM_TYPE_MUSIC: default: - return C.CONTENT_TYPE_MUSIC; + return C.AUDIO_CONTENT_TYPE_MUSIC; } } - /** - * Returns the {@link C.StreamType} corresponding to the specified {@link C.AudioUsage}. - */ - @C.StreamType - public static int getStreamTypeForAudioUsage(@C.AudioUsage int usage) { + /** Returns the {@link C.StreamType} corresponding to the specified {@link C.AudioUsage}. */ + public static @C.StreamType int getStreamTypeForAudioUsage(@C.AudioUsage int usage) { switch (usage) { case C.USAGE_MEDIA: case C.USAGE_GAME: @@ -1536,6 +1904,19 @@ public static int getStreamTypeForAudioUsage(@C.AudioUsage int usage) { } } + /** + * Returns a newly generated audio session identifier, or {@link AudioManager#ERROR} if an error + * occurred in which case audio playback may fail. + * + * @see AudioManager#generateAudioSessionId() + */ + @RequiresApi(21) + public static int generateAudioSessionIdV21(Context context) { + @Nullable + AudioManager audioManager = ((AudioManager) context.getSystemService(Context.AUDIO_SERVICE)); + return audioManager == null ? AudioManager.ERROR : audioManager.generateAudioSessionId(); + } + /** * Derives a DRM {@link UUID} from {@code drmScheme}. * @@ -1543,8 +1924,9 @@ public static int getStreamTypeForAudioUsage(@C.AudioUsage int usage) { * "clearkey"}. * @return The derived {@link UUID}, or {@code null} if one could not be derived. */ - public static @Nullable UUID getDrmUuid(String drmScheme) { - switch (toLowerInvariant(drmScheme)) { + @Nullable + public static UUID getDrmUuid(String drmScheme) { + switch (Ascii.toLowerCase(drmScheme)) { case "widevine": return C.WIDEVINE_UUID; case "playready": @@ -1561,51 +1943,193 @@ public static int getStreamTypeForAudioUsage(@C.AudioUsage int usage) { } /** - * Makes a best guess to infer the type from a {@link Uri}. - * - * @param uri The {@link Uri}. - * @param overrideExtension If not null, used to infer the type. - * @return The content type. + * Returns a {@link PlaybackException.ErrorCode} value that corresponds to the provided {@link + * MediaDrm.ErrorCodes} value. Returns {@link PlaybackException#ERROR_CODE_DRM_SYSTEM_ERROR} if + * the provided error code isn't recognised. + */ + public static @PlaybackException.ErrorCode int getErrorCodeForMediaDrmErrorCode( + int mediaDrmErrorCode) { + switch (mediaDrmErrorCode) { + case MediaDrm.ErrorCodes.ERROR_PROVISIONING_CONFIG: + case MediaDrm.ErrorCodes.ERROR_PROVISIONING_PARSE: + case MediaDrm.ErrorCodes.ERROR_PROVISIONING_REQUEST_REJECTED: + case MediaDrm.ErrorCodes.ERROR_PROVISIONING_CERTIFICATE: + case MediaDrm.ErrorCodes.ERROR_PROVISIONING_RETRY: + return PlaybackException.ERROR_CODE_DRM_PROVISIONING_FAILED; + case MediaDrm.ErrorCodes.ERROR_LICENSE_PARSE: + case MediaDrm.ErrorCodes.ERROR_LICENSE_RELEASE: + case MediaDrm.ErrorCodes.ERROR_LICENSE_REQUEST_REJECTED: + case MediaDrm.ErrorCodes.ERROR_LICENSE_RESTORE: + case MediaDrm.ErrorCodes.ERROR_LICENSE_STATE: + case MediaDrm.ErrorCodes.ERROR_CERTIFICATE_MALFORMED: + return PlaybackException.ERROR_CODE_DRM_LICENSE_ACQUISITION_FAILED; + case MediaDrm.ErrorCodes.ERROR_LICENSE_POLICY: + case MediaDrm.ErrorCodes.ERROR_INSUFFICIENT_OUTPUT_PROTECTION: + case MediaDrm.ErrorCodes.ERROR_INSUFFICIENT_SECURITY: + case MediaDrm.ErrorCodes.ERROR_KEY_EXPIRED: + case MediaDrm.ErrorCodes.ERROR_KEY_NOT_LOADED: + return PlaybackException.ERROR_CODE_DRM_DISALLOWED_OPERATION; + case MediaDrm.ErrorCodes.ERROR_INIT_DATA: + case MediaDrm.ErrorCodes.ERROR_FRAME_TOO_LARGE: + return PlaybackException.ERROR_CODE_DRM_CONTENT_ERROR; + default: + return PlaybackException.ERROR_CODE_DRM_SYSTEM_ERROR; + } + } + + /** + * @deprecated Use {@link #inferContentTypeForExtension(String)} when {@code overrideExtension} is + * non-empty, and {@link #inferContentType(Uri)} otherwise. */ - @C.ContentType - public static int inferContentType(Uri uri, @Nullable String overrideExtension) { + @Deprecated + public static @ContentType int inferContentType(Uri uri, @Nullable String overrideExtension) { return TextUtils.isEmpty(overrideExtension) ? inferContentType(uri) - : inferContentType("." + overrideExtension); + : inferContentTypeForExtension(overrideExtension); } /** - * Makes a best guess to infer the type from a {@link Uri}. + * Makes a best guess to infer the {@link ContentType} from a {@link Uri}. * * @param uri The {@link Uri}. * @return The content type. */ - @C.ContentType - public static int inferContentType(Uri uri) { - String path = uri.getPath(); - return path == null ? C.TYPE_OTHER : inferContentType(path); + public static @ContentType int inferContentType(Uri uri) { + @Nullable String scheme = uri.getScheme(); + if (scheme != null && Ascii.equalsIgnoreCase("rtsp", scheme)) { + return C.CONTENT_TYPE_RTSP; + } + + @Nullable String lastPathSegment = uri.getLastPathSegment(); + if (lastPathSegment == null) { + return C.CONTENT_TYPE_OTHER; + } + int lastDotIndex = lastPathSegment.lastIndexOf('.'); + if (lastDotIndex >= 0) { + @C.ContentType + int contentType = inferContentTypeForExtension(lastPathSegment.substring(lastDotIndex + 1)); + if (contentType != C.CONTENT_TYPE_OTHER) { + // If contentType is TYPE_SS that indicates the extension is .ism or .isml and shows the ISM + // URI is missing the "/manifest" suffix, which contains the information used to + // disambiguate between Smooth Streaming, HLS and DASH below - so we can just return TYPE_SS + // here without further checks. + return contentType; + } + } + + Matcher ismMatcher = ISM_PATH_PATTERN.matcher(checkNotNull(uri.getPath())); + if (ismMatcher.matches()) { + @Nullable String extensions = ismMatcher.group(2); + if (extensions != null) { + if (extensions.contains(ISM_DASH_FORMAT_EXTENSION)) { + return C.CONTENT_TYPE_DASH; + } else if (extensions.contains(ISM_HLS_FORMAT_EXTENSION)) { + return C.CONTENT_TYPE_HLS; + } + } + return C.CONTENT_TYPE_SS; + } + + return C.CONTENT_TYPE_OTHER; + } + + /** + * @deprecated Use {@link Uri#parse(String)} and {@link #inferContentType(Uri)} for full file + * paths or {@link #inferContentTypeForExtension(String)} for extensions. + */ + @Deprecated + public static @ContentType int inferContentType(String fileName) { + return inferContentType(Uri.parse("file:///" + fileName)); + } + + /** + * Makes a best guess to infer the {@link ContentType} from a file extension. + * + * @param fileExtension The extension of the file (excluding the '.'). + * @return The content type. + */ + public static @ContentType int inferContentTypeForExtension(String fileExtension) { + fileExtension = Ascii.toLowerCase(fileExtension); + switch (fileExtension) { + case "mpd": + return C.CONTENT_TYPE_DASH; + case "m3u8": + return C.CONTENT_TYPE_HLS; + case "ism": + case "isml": + return C.TYPE_SS; + default: + return C.CONTENT_TYPE_OTHER; + } } /** - * Makes a best guess to infer the type from a file name. + * Makes a best guess to infer the {@link ContentType} from a {@link Uri} and optional MIME type. * - * @param fileName Name of the file. It can include the path of the file. + * @param uri The {@link Uri}. + * @param mimeType If MIME type, or {@code null}. * @return The content type. */ - @C.ContentType - public static int inferContentType(String fileName) { - fileName = toLowerInvariant(fileName); - if (fileName.endsWith(".mpd")) { - return C.TYPE_DASH; - } else if (fileName.endsWith(".m3u8")) { - return C.TYPE_HLS; - } else if (fileName.matches(".*\\.ism(l)?(/manifest(\\(.+\\))?)?")) { - return C.TYPE_SS; - } else { - return C.TYPE_OTHER; + public static @ContentType int inferContentTypeForUriAndMimeType( + Uri uri, @Nullable String mimeType) { + if (mimeType == null) { + return inferContentType(uri); + } + switch (mimeType) { + case MimeTypes.APPLICATION_MPD: + return C.CONTENT_TYPE_DASH; + case MimeTypes.APPLICATION_M3U8: + return C.CONTENT_TYPE_HLS; + case MimeTypes.APPLICATION_SS: + return C.CONTENT_TYPE_SS; + case MimeTypes.APPLICATION_RTSP: + return C.CONTENT_TYPE_RTSP; + default: + return C.CONTENT_TYPE_OTHER; + } + } + + /** + * Returns the MIME type corresponding to the given adaptive {@link ContentType}, or {@code null} + * if the content type is not adaptive. + */ + @Nullable + public static String getAdaptiveMimeTypeForContentType(@ContentType int contentType) { + switch (contentType) { + case C.CONTENT_TYPE_DASH: + return MimeTypes.APPLICATION_MPD; + case C.CONTENT_TYPE_HLS: + return MimeTypes.APPLICATION_M3U8; + case C.CONTENT_TYPE_SS: + return MimeTypes.APPLICATION_SS; + case C.CONTENT_TYPE_RTSP: + case C.CONTENT_TYPE_OTHER: + default: + return null; } } + /** + * If the provided URI is an ISM Presentation URI, returns the URI with "Manifest" appended to its + * path (i.e., the corresponding default manifest URI). Else returns the provided URI without + * modification. See [MS-SSTR] v20180912, section 2.2.1. + * + * @param uri The original URI. + * @return The fixed URI. + */ + public static Uri fixSmoothStreamingIsmManifestUri(Uri uri) { + @Nullable String path = uri.getPath(); + if (path == null) { + return uri; + } + Matcher ismMatcher = ISM_PATH_PATTERN.matcher(path); + if (ismMatcher.matches() && ismMatcher.group(1) == null) { + // Add missing "Manifest" suffix. + return Uri.withAppendedPath(uri, "Manifest"); + } + return uri; + } + /** * Returns the specified millisecond time formatted as a string. * @@ -1618,23 +2142,26 @@ public static String getStringForTime(StringBuilder builder, Formatter formatter if (timeMs == C.TIME_UNSET) { timeMs = 0; } + String prefix = timeMs < 0 ? "-" : ""; + timeMs = abs(timeMs); long totalSeconds = (timeMs + 500) / 1000; long seconds = totalSeconds % 60; long minutes = (totalSeconds / 60) % 60; long hours = totalSeconds / 3600; builder.setLength(0); - return hours > 0 ? formatter.format("%d:%02d:%02d", hours, minutes, seconds).toString() - : formatter.format("%02d:%02d", minutes, seconds).toString(); + return hours > 0 + ? formatter.format("%s%d:%02d:%02d", prefix, hours, minutes, seconds).toString() + : formatter.format("%s%02d:%02d", prefix, minutes, seconds).toString(); } /** * Escapes a string so that it's safe for use as a file or directory name on at least FAT32 * filesystems. FAT32 is the most restrictive of all filesystems still commonly used today. * - *

      For simplicity, this only handles common characters known to be illegal on FAT32: - * <, >, :, ", /, \, |, ?, and *. % is also escaped since it is used as the escape - * character. Escaping is performed in a consistent way so that no collisions occur and - * {@link #unescapeFileName(String)} can be used to retrieve the original file name. + *

      For simplicity, this only handles common characters known to be illegal on FAT32: <, + * >, :, ", /, \, |, ?, and *. % is also escaped since it is used as the escape character. + * Escaping is performed in a consistent way so that no collisions occur and {@link + * #unescapeFileName(String)} can be used to retrieve the original file name. * * @param fileName File name to be escaped. * @return An escaped file name which will be safe for use on at least FAT32 filesystems. @@ -1695,7 +2222,8 @@ private static boolean shouldEscapeCharacter(char c) { * @return The original value of the file name before it was escaped, or null if the escaped * fileName seems invalid. */ - public static @Nullable String unescapeFileName(String fileName) { + @Nullable + public static String unescapeFileName(String fileName) { int length = fileName.length(); int percentCharacterCount = 0; for (int i = 0; i < length; i++) { @@ -1712,7 +2240,7 @@ private static boolean shouldEscapeCharacter(char c) { Matcher matcher = ESCAPED_CHARACTER_PATTERN.matcher(fileName); int startOfNotEscaped = 0; while (percentCharacterCount > 0 && matcher.find()) { - char unescapedCharacter = (char) Integer.parseInt(matcher.group(1), 16); + char unescapedCharacter = (char) Integer.parseInt(checkNotNull(matcher.group(1)), 16); builder.append(fileName, startOfNotEscaped, matcher.start()).append(unescapedCharacter); startOfNotEscaped = matcher.end(); percentCharacterCount--; @@ -1726,9 +2254,15 @@ private static boolean shouldEscapeCharacter(char c) { return builder.toString(); } + /** Returns a data URI with the specified MIME type and data. */ + public static Uri getDataUriForString(String mimeType, String data) { + return Uri.parse( + "data:" + mimeType + ";base64," + Base64.encodeToString(data.getBytes(), Base64.NO_WRAP)); + } + /** - * A hacky method that always throws {@code t} even if {@code t} is a checked exception, - * and is not declared to be thrown. + * A hacky method that always throws {@code t} even if {@code t} is a checked exception, and is + * not declared to be thrown. */ public static void sneakyThrow(Throwable t) { sneakyThrowInternal(t); @@ -1760,7 +2294,7 @@ public static File createTempDirectory(Context context, String prefix) throws IO /** Creates a new empty file in the directory returned by {@link Context#getCacheDir()}. */ public static File createTempFile(Context context, String prefix) throws IOException { - return File.createTempFile(prefix, null, context.getCacheDir()); + return File.createTempFile(prefix, null, checkNotNull(context.getCacheDir())); } /** @@ -1775,8 +2309,9 @@ public static File createTempFile(Context context, String prefix) throws IOExcep */ public static int crc32(byte[] bytes, int start, int end, int initialValue) { for (int i = start; i < end; i++) { - initialValue = (initialValue << 8) - ^ CRC32_BYTES_MSBF[((initialValue >>> 24) ^ (bytes[i] & 0xFF)) & 0xFF]; + initialValue = + (initialValue << 8) + ^ CRC32_BYTES_MSBF[((initialValue >>> 24) ^ (bytes[i] & 0xFF)) & 0xFF]; } return initialValue; } @@ -1798,47 +2333,32 @@ public static int crc8(byte[] bytes, int start, int end, int initialValue) { return initialValue; } + /** Compresses {@code input} using gzip and returns the result in a newly allocated byte array. */ + public static byte[] gzip(byte[] input) { + ByteArrayOutputStream output = new ByteArrayOutputStream(); + try (GZIPOutputStream os = new GZIPOutputStream(output)) { + os.write(input); + } catch (IOException e) { + // A ByteArrayOutputStream wrapped in a GZipOutputStream should never throw IOException since + // no I/O is happening. + throw new IllegalStateException(e); + } + return output.toByteArray(); + } + /** - * Returns the {@link C.NetworkType} of the current network connection. + * Absolute get method for reading an int value in {@link ByteOrder#BIG_ENDIAN} in a {@link + * ByteBuffer}. Same as {@link ByteBuffer#getInt(int)} except the buffer's order as returned by + * {@link ByteBuffer#order()} is ignored and {@link ByteOrder#BIG_ENDIAN} is used instead. * - * @param context A context to access the connectivity manager. - * @return The {@link C.NetworkType} of the current network connection. + * @param buffer The buffer from which to read an int in big endian. + * @param index The index from which the bytes will be read. + * @return The int value at the given index with the buffer bytes ordered most significant to + * least significant. */ - @C.NetworkType - public static int getNetworkType(Context context) { - if (context == null) { - // Note: This is for backward compatibility only (context used to be @Nullable). - return C.NETWORK_TYPE_UNKNOWN; - } - NetworkInfo networkInfo; - ConnectivityManager connectivityManager = - (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE); - if (connectivityManager == null) { - return C.NETWORK_TYPE_UNKNOWN; - } - try { - networkInfo = connectivityManager.getActiveNetworkInfo(); - } catch (SecurityException e) { - // Expected if permission was revoked. - return C.NETWORK_TYPE_UNKNOWN; - } - if (networkInfo == null || !networkInfo.isConnected()) { - return C.NETWORK_TYPE_OFFLINE; - } - switch (networkInfo.getType()) { - case ConnectivityManager.TYPE_WIFI: - return C.NETWORK_TYPE_WIFI; - case ConnectivityManager.TYPE_WIMAX: - return C.NETWORK_TYPE_4G; - case ConnectivityManager.TYPE_MOBILE: - case ConnectivityManager.TYPE_MOBILE_DUN: - case ConnectivityManager.TYPE_MOBILE_HIPRI: - return getMobileNetworkType(networkInfo); - case ConnectivityManager.TYPE_ETHERNET: - return C.NETWORK_TYPE_ETHERNET; - default: // VPN, Bluetooth, Dummy. - return C.NETWORK_TYPE_OTHER; - } + public static int getBigEndianInt(ByteBuffer buffer, int index) { + int value = buffer.getInt(index); + return buffer.order() == ByteOrder.BIG_ENDIAN ? value : Integer.reverseBytes(value); } /** @@ -1850,16 +2370,17 @@ public static int getNetworkType(Context context) { */ public static String getCountryCode(@Nullable Context context) { if (context != null) { + @Nullable TelephonyManager telephonyManager = (TelephonyManager) context.getSystemService(Context.TELEPHONY_SERVICE); if (telephonyManager != null) { String countryCode = telephonyManager.getNetworkCountryIso(); if (!TextUtils.isEmpty(countryCode)) { - return toUpperInvariant(countryCode); + return Ascii.toUpperCase(countryCode); } } } - return toUpperInvariant(Locale.getDefault().getCountry()); + return Ascii.toUpperCase(Locale.getDefault().getCountry()); } /** @@ -1874,6 +2395,11 @@ public static String[] getSystemLanguageCodes() { return systemLocales; } + /** Returns the default {@link Locale.Category#DISPLAY DISPLAY} {@link Locale}. */ + public static Locale getDefaultDisplayLocale() { + return SDK_INT >= 24 ? Locale.getDefault(Locale.Category.DISPLAY) : Locale.getDefault(); + } + /** * Uncompresses the data in {@code input}. * @@ -1891,27 +2417,27 @@ public static boolean inflate( if (input.bytesLeft() <= 0) { return false; } - byte[] outputData = output.data; - if (outputData.length < input.bytesLeft()) { - outputData = new byte[2 * input.bytesLeft()]; + if (output.capacity() < input.bytesLeft()) { + output.ensureCapacity(2 * input.bytesLeft()); } if (inflater == null) { inflater = new Inflater(); } - inflater.setInput(input.data, input.getPosition(), input.bytesLeft()); + inflater.setInput(input.getData(), input.getPosition(), input.bytesLeft()); try { int outputSize = 0; while (true) { - outputSize += inflater.inflate(outputData, outputSize, outputData.length - outputSize); + outputSize += + inflater.inflate(output.getData(), outputSize, output.capacity() - outputSize); if (inflater.finished()) { - output.reset(outputData, outputSize); + output.setLimit(outputSize); return true; } if (inflater.needsDictionary() || inflater.needsInput()) { return false; } - if (outputSize == outputData.length) { - outputData = Arrays.copyOf(outputData, outputData.length * 2); + if (outputSize == output.capacity()) { + output.ensureCapacity(output.capacity() * 2); } } } catch (DataFormatException e) { @@ -1929,12 +2455,24 @@ public static boolean inflate( */ public static boolean isTv(Context context) { // See https://developer.android.com/training/tv/start/hardware.html#runtime-check. + @Nullable UiModeManager uiModeManager = (UiModeManager) context.getApplicationContext().getSystemService(UI_MODE_SERVICE); return uiModeManager != null && uiModeManager.getCurrentModeType() == Configuration.UI_MODE_TYPE_TELEVISION; } + /** + * Returns whether the app is running on an automotive device. + * + * @param context Any context. + * @return Whether the app is running on an automotive device. + */ + public static boolean isAutomotive(Context context) { + return SDK_INT >= 23 + && context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_AUTOMOTIVE); + } + /** * Gets the size of the current mode of the default display, in pixels. * @@ -1948,8 +2486,23 @@ public static boolean isTv(Context context) { * @return The size of the current mode, in pixels. */ public static Point getCurrentDisplayModeSize(Context context) { - WindowManager windowManager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE); - return getCurrentDisplayModeSize(context, windowManager.getDefaultDisplay()); + @Nullable Display defaultDisplay = null; + if (SDK_INT >= 17) { + @Nullable + DisplayManager displayManager = + (DisplayManager) context.getSystemService(Context.DISPLAY_SERVICE); + // We don't expect displayManager to ever be null, so this check is just precautionary. + // Consider removing it when the library minSdkVersion is increased to 17 or higher. + if (displayManager != null) { + defaultDisplay = displayManager.getDisplay(Display.DEFAULT_DISPLAY); + } + } + if (defaultDisplay == null) { + WindowManager windowManager = + checkNotNull((WindowManager) context.getSystemService(Context.WINDOW_SERVICE)); + defaultDisplay = windowManager.getDefaultDisplay(); + } + return getCurrentDisplayModeSize(context, defaultDisplay); } /** @@ -1966,23 +2519,23 @@ public static Point getCurrentDisplayModeSize(Context context) { * @return The size of the current mode, in pixels. */ public static Point getCurrentDisplayModeSize(Context context, Display display) { - if (Util.SDK_INT <= 29 && display.getDisplayId() == Display.DEFAULT_DISPLAY && isTv(context)) { - // On Android TVs it is common for the UI to be configured for a lower resolution than - // SurfaceViews can output. Before API 26 the Display object does not provide a way to - // identify this case, and up to and including API 28 many devices still do not correctly set - // their hardware compositor output size. - - // Sony Android TVs advertise support for 4k output via a system feature. - if ("Sony".equals(Util.MANUFACTURER) - && Util.MODEL.startsWith("BRAVIA") - && context.getPackageManager().hasSystemFeature("com.sony.dtv.hardware.panel.qfhd")) { - return new Point(3840, 2160); - } - - // Otherwise check the system property for display size. From API 28 treble may prevent the - // system from writing sys.display-size so we check vendor.display-size instead. + if (display.getDisplayId() == Display.DEFAULT_DISPLAY && isTv(context)) { + // On Android TVs it's common for the UI to be driven at a lower resolution than the physical + // resolution of the display (e.g., driving the UI at 1080p when the display is 4K). + // SurfaceView outputs are still able to use the full physical resolution on such devices. + // + // Prior to API level 26, the Display object did not provide a way to obtain the true physical + // resolution of the display. From API level 26, Display.getMode().getPhysical[Width|Height] + // is expected to return the display's true physical resolution, but we still see devices + // setting their hardware compositor output size incorrectly, which makes this unreliable. + // Hence for TV devices, we try and read the display's true physical resolution from system + // properties. + // + // From API level 28, Treble may prevent the system from writing sys.display-size, so we check + // vendor.display-size instead. + @Nullable String displaySize = - Util.SDK_INT < 28 + SDK_INT < 28 ? getSystemProperty("sys.display-size") : getSystemProperty("vendor.display-size"); // If we managed to read the display size, attempt to parse it. @@ -2001,12 +2554,19 @@ public static Point getCurrentDisplayModeSize(Context context, Display display) } Log.e(TAG, "Invalid display size: " + displaySize); } + + // Sony Android TVs advertise support for 4k output via a system feature. + if ("Sony".equals(MANUFACTURER) + && MODEL.startsWith("BRAVIA") + && context.getPackageManager().hasSystemFeature("com.sony.dtv.hardware.panel.qfhd")) { + return new Point(3840, 2160); + } } Point displaySize = new Point(); - if (Util.SDK_INT >= 23) { + if (SDK_INT >= 23) { getDisplaySizeV23(display, displaySize); - } else if (Util.SDK_INT >= 17) { + } else if (SDK_INT >= 17) { getDisplaySizeV17(display, displaySize); } else { getDisplaySizeV16(display, displaySize); @@ -2015,55 +2575,203 @@ public static Point getCurrentDisplayModeSize(Context context, Display display) } /** - * Extract renderer capabilities for the renderers created by the provided renderers factory. - * - * @param renderersFactory A {@link RenderersFactory}. - * @return The {@link RendererCapabilities} for each renderer created by the {@code - * renderersFactory}. - */ - public static RendererCapabilities[] getRendererCapabilities(RenderersFactory renderersFactory) { - Renderer[] renderers = - renderersFactory.createRenderers( - new Handler(), - new VideoRendererEventListener() {}, - new AudioRendererEventListener() {}, - (cues) -> {}, - (metadata) -> {}, - /* drmSessionManager= */ null); - RendererCapabilities[] capabilities = new RendererCapabilities[renderers.length]; - for (int i = 0; i < renderers.length; i++) { - capabilities[i] = renderers[i].getCapabilities(); - } - return capabilities; - } - - /** - * Returns a string representation of a {@code TRACK_TYPE_*} constant defined in {@link C}. + * Returns a string representation of a {@link C.TrackType}. * - * @param trackType A {@code TRACK_TYPE_*} constant, + * @param trackType A {@link C.TrackType} constant, * @return A string representation of this constant. */ - public static String getTrackTypeString(int trackType) { + public static String getTrackTypeString(@C.TrackType int trackType) { switch (trackType) { - case C.TRACK_TYPE_AUDIO: - return "audio"; case C.TRACK_TYPE_DEFAULT: return "default"; + case C.TRACK_TYPE_AUDIO: + return "audio"; + case C.TRACK_TYPE_VIDEO: + return "video"; + case C.TRACK_TYPE_TEXT: + return "text"; + case C.TRACK_TYPE_IMAGE: + return "image"; case C.TRACK_TYPE_METADATA: return "metadata"; case C.TRACK_TYPE_CAMERA_MOTION: return "camera motion"; case C.TRACK_TYPE_NONE: return "none"; - case C.TRACK_TYPE_TEXT: - return "text"; - case C.TRACK_TYPE_VIDEO: - return "video"; + case C.TRACK_TYPE_UNKNOWN: + return "unknown"; default: return trackType >= C.TRACK_TYPE_CUSTOM_BASE ? "custom (" + trackType + ")" : "?"; } } + /** + * Returns the current time in milliseconds since the epoch. + * + * @param elapsedRealtimeEpochOffsetMs The offset between {@link SystemClock#elapsedRealtime()} + * and the time since the Unix epoch, or {@link C#TIME_UNSET} if unknown. + * @return The Unix time in milliseconds since the epoch. + */ + public static long getNowUnixTimeMs(long elapsedRealtimeEpochOffsetMs) { + return elapsedRealtimeEpochOffsetMs == C.TIME_UNSET + ? System.currentTimeMillis() + : SystemClock.elapsedRealtime() + elapsedRealtimeEpochOffsetMs; + } + + /** + * Moves the elements starting at {@code fromIndex} to {@code newFromIndex}. + * + * @param items The list of which to move elements. + * @param fromIndex The index at which the items to move start. + * @param toIndex The index up to which elements should be moved (exclusive). + * @param newFromIndex The new from index. + */ + @SuppressWarnings("ExtendsObject") // See go/lsc-extends-object + public static void moveItems( + List items, int fromIndex, int toIndex, int newFromIndex) { + ArrayDeque removedItems = new ArrayDeque<>(); + int removedItemsLength = toIndex - fromIndex; + for (int i = removedItemsLength - 1; i >= 0; i--) { + removedItems.addFirst(items.remove(fromIndex + i)); + } + items.addAll(min(newFromIndex, items.size()), removedItems); + } + + /** Returns whether the table exists in the database. */ + public static boolean tableExists(SQLiteDatabase database, String tableName) { + long count = + DatabaseUtils.queryNumEntries( + database, "sqlite_master", "tbl_name = ?", new String[] {tableName}); + return count > 0; + } + + /** + * Attempts to parse an error code from a diagnostic string found in framework media exceptions. + * + *

      For example: android.media.MediaCodec.error_1 or android.media.MediaDrm.error_neg_2. + * + * @param diagnosticsInfo A string from which to parse the error code. + * @return The parser error code, or 0 if an error code could not be parsed. + */ + public static int getErrorCodeFromPlatformDiagnosticsInfo(@Nullable String diagnosticsInfo) { + // TODO (internal b/192337376): Change 0 for ERROR_UNKNOWN once available. + if (diagnosticsInfo == null) { + return 0; + } + String[] strings = split(diagnosticsInfo, "_"); + int length = strings.length; + if (length < 2) { + return 0; + } + String digitsSection = strings[length - 1]; + boolean isNegative = length >= 3 && "neg".equals(strings[length - 2]); + try { + int errorCode = Integer.parseInt(Assertions.checkNotNull(digitsSection)); + return isNegative ? -errorCode : errorCode; + } catch (NumberFormatException e) { + return 0; + } + } + + /** + * Returns string representation of a {@link C.FormatSupport} flag. + * + * @param formatSupport A {@link C.FormatSupport} flag. + * @return A string representation of the flag. + */ + public static String getFormatSupportString(@C.FormatSupport int formatSupport) { + switch (formatSupport) { + case C.FORMAT_HANDLED: + return "YES"; + case C.FORMAT_EXCEEDS_CAPABILITIES: + return "NO_EXCEEDS_CAPABILITIES"; + case C.FORMAT_UNSUPPORTED_DRM: + return "NO_UNSUPPORTED_DRM"; + case C.FORMAT_UNSUPPORTED_SUBTYPE: + return "NO_UNSUPPORTED_TYPE"; + case C.FORMAT_UNSUPPORTED_TYPE: + return "NO"; + default: + throw new IllegalStateException(); + } + } + + /** + * Returns the {@link Commands} available in the {@link Player}. + * + * @param player The {@link Player}. + * @param permanentAvailableCommands The commands permanently available in the player. + * @return The available {@link Commands}. + */ + public static Commands getAvailableCommands(Player player, Commands permanentAvailableCommands) { + boolean isPlayingAd = player.isPlayingAd(); + boolean isCurrentMediaItemSeekable = player.isCurrentMediaItemSeekable(); + boolean hasPreviousMediaItem = player.hasPreviousMediaItem(); + boolean hasNextMediaItem = player.hasNextMediaItem(); + boolean isCurrentMediaItemLive = player.isCurrentMediaItemLive(); + boolean isCurrentMediaItemDynamic = player.isCurrentMediaItemDynamic(); + boolean isTimelineEmpty = player.getCurrentTimeline().isEmpty(); + return new Commands.Builder() + .addAll(permanentAvailableCommands) + .addIf(COMMAND_SEEK_TO_DEFAULT_POSITION, !isPlayingAd) + .addIf(COMMAND_SEEK_IN_CURRENT_MEDIA_ITEM, isCurrentMediaItemSeekable && !isPlayingAd) + .addIf(COMMAND_SEEK_TO_PREVIOUS_MEDIA_ITEM, hasPreviousMediaItem && !isPlayingAd) + .addIf( + COMMAND_SEEK_TO_PREVIOUS, + !isTimelineEmpty + && (hasPreviousMediaItem || !isCurrentMediaItemLive || isCurrentMediaItemSeekable) + && !isPlayingAd) + .addIf(COMMAND_SEEK_TO_NEXT_MEDIA_ITEM, hasNextMediaItem && !isPlayingAd) + .addIf( + COMMAND_SEEK_TO_NEXT, + !isTimelineEmpty + && (hasNextMediaItem || (isCurrentMediaItemLive && isCurrentMediaItemDynamic)) + && !isPlayingAd) + .addIf(COMMAND_SEEK_TO_MEDIA_ITEM, !isPlayingAd) + .addIf(COMMAND_SEEK_BACK, isCurrentMediaItemSeekable && !isPlayingAd) + .addIf(COMMAND_SEEK_FORWARD, isCurrentMediaItemSeekable && !isPlayingAd) + .build(); + } + + /** + * Returns the sum of all summands of the given array. + * + * @param summands The summands to calculate the sum from. + * @return The sum of all summands. + */ + public static long sum(long... summands) { + long sum = 0; + for (long summand : summands) { + sum += summand; + } + return sum; + } + + /** + * Returns a {@link Drawable} for the given resource or throws a {@link + * Resources.NotFoundException} if not found. + * + * @param context The context to get the theme from starting with API 21. + * @param resources The resources to load the drawable from. + * @param drawableRes The drawable resource int. + * @return The loaded {@link Drawable}. + */ + public static Drawable getDrawable( + Context context, Resources resources, @DrawableRes int drawableRes) { + return SDK_INT >= 21 + ? Api21.getDrawable(context, resources, drawableRes) + : resources.getDrawable(drawableRes); + } + + /** + * Returns a string representation of the integer using radix value {@link Character#MAX_RADIX}. + * + * @param i An integer to be converted to String. + */ + public static String intToStringMaxRadix(int i) { + return Integer.toString(i, Character.MAX_RADIX); + } + @Nullable private static String getSystemProperty(String name) { try { @@ -2077,14 +2785,14 @@ private static String getSystemProperty(String name) { } } - @TargetApi(23) + @RequiresApi(23) private static void getDisplaySizeV23(Display display, Point outSize) { Display.Mode mode = display.getMode(); outSize.x = mode.getPhysicalWidth(); outSize.y = mode.getPhysicalHeight(); } - @TargetApi(17) + @RequiresApi(17) private static void getDisplaySizeV17(Display display, Point outSize) { display.getRealSize(outSize); } @@ -2100,48 +2808,16 @@ private static String[] getSystemLocales() { : new String[] {getLocaleLanguageTag(config.locale)}; } - @TargetApi(24) + @RequiresApi(24) private static String[] getSystemLocalesV24(Configuration config) { - return Util.split(config.getLocales().toLanguageTags(), ","); + return split(config.getLocales().toLanguageTags(), ","); } - @TargetApi(21) + @RequiresApi(21) private static String getLocaleLanguageTagV21(Locale locale) { return locale.toLanguageTag(); } - private static @C.NetworkType int getMobileNetworkType(NetworkInfo networkInfo) { - switch (networkInfo.getSubtype()) { - case TelephonyManager.NETWORK_TYPE_EDGE: - case TelephonyManager.NETWORK_TYPE_GPRS: - return C.NETWORK_TYPE_2G; - case TelephonyManager.NETWORK_TYPE_1xRTT: - case TelephonyManager.NETWORK_TYPE_CDMA: - case TelephonyManager.NETWORK_TYPE_EVDO_0: - case TelephonyManager.NETWORK_TYPE_EVDO_A: - case TelephonyManager.NETWORK_TYPE_EVDO_B: - case TelephonyManager.NETWORK_TYPE_HSDPA: - case TelephonyManager.NETWORK_TYPE_HSPA: - case TelephonyManager.NETWORK_TYPE_HSUPA: - case TelephonyManager.NETWORK_TYPE_IDEN: - case TelephonyManager.NETWORK_TYPE_UMTS: - case TelephonyManager.NETWORK_TYPE_EHRPD: - case TelephonyManager.NETWORK_TYPE_HSPAP: - case TelephonyManager.NETWORK_TYPE_TD_SCDMA: - return C.NETWORK_TYPE_3G; - case TelephonyManager.NETWORK_TYPE_LTE: - return C.NETWORK_TYPE_4G; - case TelephonyManager.NETWORK_TYPE_NR: - return C.NETWORK_TYPE_5G; - case TelephonyManager.NETWORK_TYPE_IWLAN: - return C.NETWORK_TYPE_WIFI; - case TelephonyManager.NETWORK_TYPE_GSM: - case TelephonyManager.NETWORK_TYPE_UNKNOWN: - default: // Future mobile network types. - return C.NETWORK_TYPE_CELLULAR_UNKNOWN; - } - } - private static HashMap createIsoLanguageReplacementMap() { String[] iso2Languages = Locale.getISOLanguages(); HashMap replacedLanguages = @@ -2166,11 +2842,29 @@ private static HashMap createIsoLanguageReplacementMap() { return replacedLanguages; } - private static String maybeReplaceGrandfatheredLanguageTags(String languageTag) { - for (int i = 0; i < isoGrandfatheredTagReplacements.length; i += 2) { - if (languageTag.startsWith(isoGrandfatheredTagReplacements[i])) { - return isoGrandfatheredTagReplacements[i + 1] - + languageTag.substring(/* beginIndex= */ isoGrandfatheredTagReplacements[i].length()); + @RequiresApi(api = Build.VERSION_CODES.M) + private static boolean requestExternalStoragePermission(Activity activity) { + if (activity.checkSelfPermission(permission.READ_EXTERNAL_STORAGE) + != PackageManager.PERMISSION_GRANTED) { + activity.requestPermissions( + new String[] {permission.READ_EXTERNAL_STORAGE}, /* requestCode= */ 0); + return true; + } + return false; + } + + @RequiresApi(api = Build.VERSION_CODES.N) + private static boolean isTrafficRestricted(Uri uri) { + return "http".equals(uri.getScheme()) + && !NetworkSecurityPolicy.getInstance() + .isCleartextTrafficPermitted(checkNotNull(uri.getHost())); + } + + private static String maybeReplaceLegacyLanguageTags(String languageTag) { + for (int i = 0; i < isoLegacyTagReplacements.length; i += 2) { + if (languageTag.startsWith(isoLegacyTagReplacements[i])) { + return isoLegacyTagReplacements[i + 1] + + languageTag.substring(/* beginIndex= */ isoLegacyTagReplacements[i].length()); } } return languageTag; @@ -2210,6 +2904,7 @@ private static String maybeReplaceGrandfatheredLanguageTags(String languageTag) "ji", "yi", // Individual macrolanguage codes mapped back to full macrolanguage code. // See https://en.wikipedia.org/wiki/ISO_639_macrolanguage + "arb", "ar-arb", "in", "ms-ind", "ind", "ms-ind", "nb", "no-nob", @@ -2230,9 +2925,9 @@ private static String maybeReplaceGrandfatheredLanguageTags(String languageTag) "hsn", "zh-hsn" }; - // "Grandfathered tags", replaced by modern equivalents (including macrolanguage) + // Legacy tags that have been replaced by modern equivalents (including macrolanguage) // See https://www.iana.org/assignments/language-subtag-registry/language-subtag-registry. - private static final String[] isoGrandfatheredTagReplacements = + private static final String[] isoLegacyTagReplacements = new String[] { "i-lux", "lb", "i-hak", "zh-hak", @@ -2313,4 +3008,12 @@ private static String maybeReplaceGrandfatheredLanguageTags(String languageTag) 0xDE, 0xD9, 0xD0, 0xD7, 0xC2, 0xC5, 0xCC, 0xCB, 0xE6, 0xE1, 0xE8, 0xEF, 0xFA, 0xFD, 0xF4, 0xF3 }; + + @RequiresApi(21) + private static final class Api21 { + @DoNotInline + public static Drawable getDrawable(Context context, Resources resources, @DrawableRes int res) { + return resources.getDrawable(res, context.getTheme()); + } + } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/XmlPullParserUtil.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/XmlPullParserUtil.java index a9b252b775..5aa002bdd6 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/XmlPullParserUtil.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/util/XmlPullParserUtil.java @@ -19,9 +19,7 @@ import org.xmlpull.v1.XmlPullParser; import org.xmlpull.v1.XmlPullParserException; -/** - * {@link XmlPullParser} utility methods. - */ +/** {@link XmlPullParser} utility methods. */ public final class XmlPullParserUtil { private XmlPullParserUtil() {} @@ -94,7 +92,8 @@ public static boolean isStartTagIgnorePrefix(XmlPullParser xpp, String name) * @return The value of the attribute, or null if the current event is not a start tag or if no * such attribute was found. */ - public static @Nullable String getAttributeValue(XmlPullParser xpp, String attributeName) { + @Nullable + public static String getAttributeValue(XmlPullParser xpp, String attributeName) { int attributeCount = xpp.getAttributeCount(); for (int i = 0; i < attributeCount; i++) { if (xpp.getAttributeName(i).equals(attributeName)) { @@ -113,8 +112,8 @@ public static boolean isStartTagIgnorePrefix(XmlPullParser xpp, String name) * @return The value of the attribute, or null if the current event is not a start tag or if no * such attribute was found. */ - public static @Nullable String getAttributeValueIgnorePrefix( - XmlPullParser xpp, String attributeName) { + @Nullable + public static String getAttributeValueIgnorePrefix(XmlPullParser xpp, String attributeName) { int attributeCount = xpp.getAttributeCount(); for (int i = 0; i < attributeCount; i++) { if (stripPrefix(xpp.getAttributeName(i)).equals(attributeName)) { diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/AvcConfig.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/AvcConfig.java index 3886fdfb23..31dce7119b 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/AvcConfig.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/AvcConfig.java @@ -15,6 +15,7 @@ */ package com.google.android.exoplayer2.video; +import androidx.annotation.Nullable; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.ParserException; import com.google.android.exoplayer2.util.CodecSpecificDataUtil; @@ -24,23 +25,15 @@ import java.util.ArrayList; import java.util.List; -/** - * AVC configuration data. - */ +/** AVC configuration data. */ public final class AvcConfig { - public final List initializationData; - public final int nalUnitLengthFieldLength; - public final int width; - public final int height; - public final float pixelWidthAspectRatio; - /** * Parses AVC configuration data. * * @param data A {@link ParsableByteArray}, whose position is set to the start of the AVC * configuration data to parse. - * @return A parsed representation of the HEVC configuration data. + * @return A parsed representation of the AVC configuration data. * @throws ParserException If an error occurred parsing the data. */ public static AvcConfig parse(ParsableByteArray data) throws ParserException { @@ -62,36 +55,78 @@ public static AvcConfig parse(ParsableByteArray data) throws ParserException { int width = Format.NO_VALUE; int height = Format.NO_VALUE; - float pixelWidthAspectRatio = 1; + float pixelWidthHeightRatio = 1; + @Nullable String codecs = null; if (numSequenceParameterSets > 0) { byte[] sps = initializationData.get(0); - SpsData spsData = NalUnitUtil.parseSpsNalUnit(initializationData.get(0), - nalUnitLengthFieldLength, sps.length); + SpsData spsData = + NalUnitUtil.parseSpsNalUnit( + initializationData.get(0), nalUnitLengthFieldLength, sps.length); width = spsData.width; height = spsData.height; - pixelWidthAspectRatio = spsData.pixelWidthAspectRatio; + pixelWidthHeightRatio = spsData.pixelWidthHeightRatio; + codecs = + CodecSpecificDataUtil.buildAvcCodecString( + spsData.profileIdc, spsData.constraintsFlagsAndReservedZero2Bits, spsData.levelIdc); } - return new AvcConfig(initializationData, nalUnitLengthFieldLength, width, height, - pixelWidthAspectRatio); + + return new AvcConfig( + initializationData, + nalUnitLengthFieldLength, + width, + height, + pixelWidthHeightRatio, + codecs); } catch (ArrayIndexOutOfBoundsException e) { - throw new ParserException("Error parsing AVC config", e); + throw ParserException.createForMalformedContainer("Error parsing AVC config", e); } } - private AvcConfig(List initializationData, int nalUnitLengthFieldLength, - int width, int height, float pixelWidthAspectRatio) { + /** + * List of buffers containing the codec-specific data to be provided to the decoder. + * + *

      See {@link Format#initializationData}. + */ + public final List initializationData; + + /** The length of the NAL unit length field in the bitstream's container, in bytes. */ + public final int nalUnitLengthFieldLength; + + /** The width of each decoded frame, or {@link Format#NO_VALUE} if unknown. */ + public final int width; + + /** The height of each decoded frame, or {@link Format#NO_VALUE} if unknown. */ + public final int height; + + /** The pixel width to height ratio. */ + public final float pixelWidthHeightRatio; + + /** + * An RFC 6381 codecs string representing the video format, or {@code null} if not known. + * + *

      See {@link Format#codecs}. + */ + @Nullable public final String codecs; + + private AvcConfig( + List initializationData, + int nalUnitLengthFieldLength, + int width, + int height, + float pixelWidthHeightRatio, + @Nullable String codecs) { this.initializationData = initializationData; this.nalUnitLengthFieldLength = nalUnitLengthFieldLength; this.width = width; this.height = height; - this.pixelWidthAspectRatio = pixelWidthAspectRatio; + this.pixelWidthHeightRatio = pixelWidthHeightRatio; + this.codecs = codecs; } private static byte[] buildNalUnitForChild(ParsableByteArray data) { int length = data.readUnsignedShort(); int offset = data.getPosition(); data.skipBytes(length); - return CodecSpecificDataUtil.buildNalUnit(data.data, offset, length); + return CodecSpecificDataUtil.buildNalUnit(data.getData(), offset, length); } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/ColorInfo.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/ColorInfo.java index ed2ca9c034..53a0a4147a 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/ColorInfo.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/ColorInfo.java @@ -15,40 +15,100 @@ */ package com.google.android.exoplayer2.video; -import android.os.Parcel; -import android.os.Parcelable; +import android.os.Bundle; import androidx.annotation.Nullable; +import com.google.android.exoplayer2.Bundleable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.util.Util; import java.util.Arrays; +import org.checkerframework.dataflow.qual.Pure; /** * Stores color info. + * + *

      When a {@code null} {@code ColorInfo} instance is used, this often represents a generic {@link + * #SDR_BT709_LIMITED} instance. */ -public final class ColorInfo implements Parcelable { +public final class ColorInfo implements Bundleable { + + /** Color info representing SDR BT.709 limited range, which is a common SDR video color format. */ + public static final ColorInfo SDR_BT709_LIMITED = + new ColorInfo( + C.COLOR_SPACE_BT709, + C.COLOR_RANGE_LIMITED, + C.COLOR_TRANSFER_SDR, + /* hdrStaticInfo= */ null); + + /** + * Returns the {@link C.ColorSpace} corresponding to the given ISO color primary code, as per + * table A.7.21.1 in Rec. ITU-T T.832 (03/2009), or {@link Format#NO_VALUE} if no mapping can be + * made. + */ + @Pure + public static @C.ColorSpace int isoColorPrimariesToColorSpace(int isoColorPrimaries) { + switch (isoColorPrimaries) { + case 1: + return C.COLOR_SPACE_BT709; + case 4: // BT.470M. + case 5: // BT.470BG. + case 6: // SMPTE 170M. + case 7: // SMPTE 240M. + return C.COLOR_SPACE_BT601; + case 9: + return C.COLOR_SPACE_BT2020; + default: + return Format.NO_VALUE; + } + } + + /** + * Returns the {@link C.ColorTransfer} corresponding to the given ISO transfer characteristics + * code, as per table A.7.21.2 in Rec. ITU-T T.832 (03/2009), or {@link Format#NO_VALUE} if no + * mapping can be made. + */ + @Pure + public static @C.ColorTransfer int isoTransferCharacteristicsToColorTransfer( + int isoTransferCharacteristics) { + switch (isoTransferCharacteristics) { + case 1: // BT.709. + case 6: // SMPTE 170M. + case 7: // SMPTE 240M. + return C.COLOR_TRANSFER_SDR; + case 16: + return C.COLOR_TRANSFER_ST2084; + case 18: + return C.COLOR_TRANSFER_HLG; + default: + return Format.NO_VALUE; + } + } + + /** Returns whether the {@code ColorInfo} uses an HDR {@link C.ColorTransfer}. */ + public static boolean isTransferHdr(@Nullable ColorInfo colorInfo) { + return colorInfo != null + && colorInfo.colorTransfer != Format.NO_VALUE + && colorInfo.colorTransfer != C.COLOR_TRANSFER_SDR; + } /** * The color space of the video. Valid values are {@link C#COLOR_SPACE_BT601}, {@link * C#COLOR_SPACE_BT709}, {@link C#COLOR_SPACE_BT2020} or {@link Format#NO_VALUE} if unknown. */ - @C.ColorSpace - public final int colorSpace; + public final @C.ColorSpace int colorSpace; /** * The color range of the video. Valid values are {@link C#COLOR_RANGE_LIMITED}, {@link * C#COLOR_RANGE_FULL} or {@link Format#NO_VALUE} if unknown. */ - @C.ColorRange - public final int colorRange; + public final @C.ColorRange int colorRange; /** - * The color transfer characteristicks of the video. Valid values are {@link - * C#COLOR_TRANSFER_HLG}, {@link C#COLOR_TRANSFER_ST2084}, {@link C#COLOR_TRANSFER_SDR} or {@link - * Format#NO_VALUE} if unknown. + * The color transfer characteristics of the video. Valid values are {@link C#COLOR_TRANSFER_HLG}, + * {@link C#COLOR_TRANSFER_ST2084}, {@link C#COLOR_TRANSFER_SDR} or {@link Format#NO_VALUE} if + * unknown. */ - @C.ColorTransfer - public final int colorTransfer; + public final @C.ColorTransfer int colorTransfer; /** HdrStaticInfo as defined in CTA-861.3, or null if none specified. */ @Nullable public final byte[] hdrStaticInfo; @@ -75,16 +135,6 @@ public ColorInfo( this.hdrStaticInfo = hdrStaticInfo; } - @SuppressWarnings("ResourceType") - /* package */ ColorInfo(Parcel in) { - colorSpace = in.readInt(); - colorRange = in.readInt(); - colorTransfer = in.readInt(); - boolean hasHdrStaticInfo = Util.readBoolean(in); - hdrStaticInfo = hasHdrStaticInfo ? in.createByteArray() : null; - } - - // Parcelable implementation. @Override public boolean equals(@Nullable Object obj) { if (this == obj) { @@ -102,8 +152,15 @@ public boolean equals(@Nullable Object obj) { @Override public String toString() { - return "ColorInfo(" + colorSpace + ", " + colorRange + ", " + colorTransfer - + ", " + (hdrStaticInfo != null) + ")"; + return "ColorInfo(" + + colorSpace + + ", " + + colorRange + + ", " + + colorTransfer + + ", " + + (hdrStaticInfo != null) + + ")"; } @Override @@ -119,32 +176,28 @@ public int hashCode() { return hashCode; } - @Override - public int describeContents() { - return 0; - } + // Bundleable implementation + + private static final String FIELD_COLOR_SPACE = Util.intToStringMaxRadix(0); + private static final String FIELD_COLOR_RANGE = Util.intToStringMaxRadix(1); + private static final String FIELD_COLOR_TRANSFER = Util.intToStringMaxRadix(2); + private static final String FIELD_HDR_STATIC_INFO = Util.intToStringMaxRadix(3); @Override - public void writeToParcel(Parcel dest, int flags) { - dest.writeInt(colorSpace); - dest.writeInt(colorRange); - dest.writeInt(colorTransfer); - Util.writeBoolean(dest, hdrStaticInfo != null); - if (hdrStaticInfo != null) { - dest.writeByteArray(hdrStaticInfo); - } + public Bundle toBundle() { + Bundle bundle = new Bundle(); + bundle.putInt(FIELD_COLOR_SPACE, colorSpace); + bundle.putInt(FIELD_COLOR_RANGE, colorRange); + bundle.putInt(FIELD_COLOR_TRANSFER, colorTransfer); + bundle.putByteArray(FIELD_HDR_STATIC_INFO, hdrStaticInfo); + return bundle; } - public static final Parcelable.Creator CREATOR = - new Parcelable.Creator() { - @Override - public ColorInfo createFromParcel(Parcel in) { - return new ColorInfo(in); - } - - @Override - public ColorInfo[] newArray(int size) { - return new ColorInfo[size]; - } - }; + public static final Creator CREATOR = + bundle -> + new ColorInfo( + bundle.getInt(FIELD_COLOR_SPACE, Format.NO_VALUE), + bundle.getInt(FIELD_COLOR_RANGE, Format.NO_VALUE), + bundle.getInt(FIELD_COLOR_TRANSFER, Format.NO_VALUE), + bundle.getByteArray(FIELD_HDR_STATIC_INFO)); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/DecoderVideoRenderer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/DecoderVideoRenderer.java new file mode 100644 index 0000000000..87ef28706d --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/DecoderVideoRenderer.java @@ -0,0 +1,969 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.video; + +import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.DISCARD_REASON_DRM_SESSION_CHANGED; +import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.DISCARD_REASON_REUSE_NOT_IMPLEMENTED; +import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.REUSE_RESULT_NO; +import static com.google.android.exoplayer2.source.SampleStream.FLAG_REQUIRE_FORMAT; +import static java.lang.Math.max; +import static java.lang.annotation.ElementType.TYPE_USE; + +import android.os.Handler; +import android.os.SystemClock; +import android.view.Surface; +import androidx.annotation.CallSuper; +import androidx.annotation.IntDef; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.BaseRenderer; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.C.VideoOutputMode; +import com.google.android.exoplayer2.ExoPlaybackException; +import com.google.android.exoplayer2.ExoPlayer; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.FormatHolder; +import com.google.android.exoplayer2.PlaybackException; +import com.google.android.exoplayer2.PlayerMessage; +import com.google.android.exoplayer2.decoder.CryptoConfig; +import com.google.android.exoplayer2.decoder.Decoder; +import com.google.android.exoplayer2.decoder.DecoderCounters; +import com.google.android.exoplayer2.decoder.DecoderException; +import com.google.android.exoplayer2.decoder.DecoderInputBuffer; +import com.google.android.exoplayer2.decoder.DecoderReuseEvaluation; +import com.google.android.exoplayer2.decoder.VideoDecoderOutputBuffer; +import com.google.android.exoplayer2.drm.DrmSession; +import com.google.android.exoplayer2.drm.DrmSession.DrmSessionException; +import com.google.android.exoplayer2.source.SampleStream.ReadDataResult; +import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.Log; +import com.google.android.exoplayer2.util.TimedValueQueue; +import com.google.android.exoplayer2.util.TraceUtil; +import com.google.android.exoplayer2.util.Util; +import com.google.android.exoplayer2.video.VideoRendererEventListener.EventDispatcher; +import java.lang.annotation.Documented; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Decodes and renders video using a {@link Decoder}. + * + *

      This renderer accepts the following messages sent via {@link + * ExoPlayer#createMessage(PlayerMessage.Target)} on the playback thread: + * + *

        + *
      • Message with type {@link #MSG_SET_VIDEO_OUTPUT} to set the output surface. The message + * payload should be the target {@link Surface} or {@link VideoDecoderOutputBufferRenderer}, + * or null. Other non-null payloads have the effect of clearing the output. + *
      • Message with type {@link #MSG_SET_VIDEO_FRAME_METADATA_LISTENER} to set a listener for + * metadata associated with frames being rendered. The message payload should be the {@link + * VideoFrameMetadataListener}, or null. + *
      + */ +public abstract class DecoderVideoRenderer extends BaseRenderer { + + private static final String TAG = "DecoderVideoRenderer"; + + /** Decoder reinitialization states. */ + @Documented + @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) + @IntDef({ + REINITIALIZATION_STATE_NONE, + REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM, + REINITIALIZATION_STATE_WAIT_END_OF_STREAM + }) + private @interface ReinitializationState {} + /** The decoder does not need to be re-initialized. */ + private static final int REINITIALIZATION_STATE_NONE = 0; + /** + * The input format has changed in a way that requires the decoder to be re-initialized, but we + * haven't yet signaled an end of stream to the existing decoder. We need to do so in order to + * ensure that it outputs any remaining buffers before we release it. + */ + private static final int REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM = 1; + /** + * The input format has changed in a way that requires the decoder to be re-initialized, and we've + * signaled an end of stream to the existing decoder. We're waiting for the decoder to output an + * end of stream signal to indicate that it has output any remaining buffers before we release it. + */ + private static final int REINITIALIZATION_STATE_WAIT_END_OF_STREAM = 2; + + private final long allowedJoiningTimeMs; + private final int maxDroppedFramesToNotify; + private final EventDispatcher eventDispatcher; + private final TimedValueQueue formatQueue; + private final DecoderInputBuffer flagsOnlyBuffer; + + private Format inputFormat; + private Format outputFormat; + + @Nullable + private Decoder< + DecoderInputBuffer, ? extends VideoDecoderOutputBuffer, ? extends DecoderException> + decoder; + + private DecoderInputBuffer inputBuffer; + private VideoDecoderOutputBuffer outputBuffer; + private @VideoOutputMode int outputMode; + @Nullable private Object output; + @Nullable private Surface outputSurface; + @Nullable private VideoDecoderOutputBufferRenderer outputBufferRenderer; + @Nullable private VideoFrameMetadataListener frameMetadataListener; + + @Nullable private DrmSession decoderDrmSession; + @Nullable private DrmSession sourceDrmSession; + + private @ReinitializationState int decoderReinitializationState; + private boolean decoderReceivedBuffers; + + private boolean renderedFirstFrameAfterReset; + private boolean mayRenderFirstFrameAfterEnableIfNotStarted; + private boolean renderedFirstFrameAfterEnable; + private long initialPositionUs; + private long joiningDeadlineMs; + private boolean waitingForFirstSampleInFormat; + + private boolean inputStreamEnded; + private boolean outputStreamEnded; + @Nullable private VideoSize reportedVideoSize; + + private long droppedFrameAccumulationStartTimeMs; + private int droppedFrames; + private int consecutiveDroppedFrameCount; + private int buffersInCodecCount; + private long lastRenderTimeUs; + private long outputStreamOffsetUs; + + /** Decoder event counters used for debugging purposes. */ + protected DecoderCounters decoderCounters; + + /** + * @param allowedJoiningTimeMs The maximum duration in milliseconds for which this video renderer + * can attempt to seamlessly join an ongoing playback. + * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be + * null if delivery of events is not required. + * @param eventListener A listener of events. May be null if delivery of events is not required. + * @param maxDroppedFramesToNotify The maximum number of frames that can be dropped between + * invocations of {@link VideoRendererEventListener#onDroppedFrames(int, long)}. + */ + protected DecoderVideoRenderer( + long allowedJoiningTimeMs, + @Nullable Handler eventHandler, + @Nullable VideoRendererEventListener eventListener, + int maxDroppedFramesToNotify) { + super(C.TRACK_TYPE_VIDEO); + this.allowedJoiningTimeMs = allowedJoiningTimeMs; + this.maxDroppedFramesToNotify = maxDroppedFramesToNotify; + joiningDeadlineMs = C.TIME_UNSET; + clearReportedVideoSize(); + formatQueue = new TimedValueQueue<>(); + flagsOnlyBuffer = DecoderInputBuffer.newNoDataInstance(); + eventDispatcher = new EventDispatcher(eventHandler, eventListener); + decoderReinitializationState = REINITIALIZATION_STATE_NONE; + outputMode = C.VIDEO_OUTPUT_MODE_NONE; + } + + // BaseRenderer implementation. + + @Override + public void render(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException { + if (outputStreamEnded) { + return; + } + + if (inputFormat == null) { + // We don't have a format yet, so try and read one. + FormatHolder formatHolder = getFormatHolder(); + flagsOnlyBuffer.clear(); + @ReadDataResult int result = readSource(formatHolder, flagsOnlyBuffer, FLAG_REQUIRE_FORMAT); + if (result == C.RESULT_FORMAT_READ) { + onInputFormatChanged(formatHolder); + } else if (result == C.RESULT_BUFFER_READ) { + // End of stream read having not read a format. + Assertions.checkState(flagsOnlyBuffer.isEndOfStream()); + inputStreamEnded = true; + outputStreamEnded = true; + return; + } else { + // We still don't have a format and can't make progress without one. + return; + } + } + + // If we don't have a decoder yet, we need to instantiate one. + maybeInitDecoder(); + + if (decoder != null) { + try { + // Rendering loop. + TraceUtil.beginSection("drainAndFeed"); + while (drainOutputBuffer(positionUs, elapsedRealtimeUs)) {} + while (feedInputBuffer()) {} + TraceUtil.endSection(); + } catch (DecoderException e) { + Log.e(TAG, "Video codec error", e); + eventDispatcher.videoCodecError(e); + throw createRendererException(e, inputFormat, PlaybackException.ERROR_CODE_DECODING_FAILED); + } + decoderCounters.ensureUpdated(); + } + } + + @Override + public boolean isEnded() { + return outputStreamEnded; + } + + @Override + public boolean isReady() { + if (inputFormat != null + && (isSourceReady() || outputBuffer != null) + && (renderedFirstFrameAfterReset || !hasOutput())) { + // Ready. If we were joining then we've now joined, so clear the joining deadline. + joiningDeadlineMs = C.TIME_UNSET; + return true; + } else if (joiningDeadlineMs == C.TIME_UNSET) { + // Not joining. + return false; + } else if (SystemClock.elapsedRealtime() < joiningDeadlineMs) { + // Joining and still within the joining deadline. + return true; + } else { + // The joining deadline has been exceeded. Give up and clear the deadline. + joiningDeadlineMs = C.TIME_UNSET; + return false; + } + } + + // PlayerMessage.Target implementation. + + @Override + public void handleMessage(@MessageType int messageType, @Nullable Object message) + throws ExoPlaybackException { + if (messageType == MSG_SET_VIDEO_OUTPUT) { + setOutput(message); + } else if (messageType == MSG_SET_VIDEO_FRAME_METADATA_LISTENER) { + frameMetadataListener = (VideoFrameMetadataListener) message; + } else { + super.handleMessage(messageType, message); + } + } + + // Protected methods. + + @Override + protected void onEnabled(boolean joining, boolean mayRenderStartOfStream) + throws ExoPlaybackException { + decoderCounters = new DecoderCounters(); + eventDispatcher.enabled(decoderCounters); + mayRenderFirstFrameAfterEnableIfNotStarted = mayRenderStartOfStream; + renderedFirstFrameAfterEnable = false; + } + + @Override + protected void onPositionReset(long positionUs, boolean joining) throws ExoPlaybackException { + inputStreamEnded = false; + outputStreamEnded = false; + clearRenderedFirstFrame(); + initialPositionUs = C.TIME_UNSET; + consecutiveDroppedFrameCount = 0; + if (decoder != null) { + flushDecoder(); + } + if (joining) { + setJoiningDeadlineMs(); + } else { + joiningDeadlineMs = C.TIME_UNSET; + } + formatQueue.clear(); + } + + @Override + protected void onStarted() { + droppedFrames = 0; + droppedFrameAccumulationStartTimeMs = SystemClock.elapsedRealtime(); + lastRenderTimeUs = SystemClock.elapsedRealtime() * 1000; + } + + @Override + protected void onStopped() { + joiningDeadlineMs = C.TIME_UNSET; + maybeNotifyDroppedFrames(); + } + + @Override + protected void onDisabled() { + inputFormat = null; + clearReportedVideoSize(); + clearRenderedFirstFrame(); + try { + setSourceDrmSession(null); + releaseDecoder(); + } finally { + eventDispatcher.disabled(decoderCounters); + } + } + + @Override + protected void onStreamChanged(Format[] formats, long startPositionUs, long offsetUs) + throws ExoPlaybackException { + // TODO: This shouldn't just update the output stream offset as long as there are still buffers + // of the previous stream in the decoder. It should also make sure to render the first frame of + // the next stream if the playback position reached the new stream. + outputStreamOffsetUs = offsetUs; + super.onStreamChanged(formats, startPositionUs, offsetUs); + } + + /** + * Flushes the decoder. + * + * @throws ExoPlaybackException If an error occurs reinitializing a decoder. + */ + @CallSuper + protected void flushDecoder() throws ExoPlaybackException { + buffersInCodecCount = 0; + if (decoderReinitializationState != REINITIALIZATION_STATE_NONE) { + releaseDecoder(); + maybeInitDecoder(); + } else { + inputBuffer = null; + if (outputBuffer != null) { + outputBuffer.release(); + outputBuffer = null; + } + decoder.flush(); + decoderReceivedBuffers = false; + } + } + + /** Releases the decoder. */ + @CallSuper + protected void releaseDecoder() { + inputBuffer = null; + outputBuffer = null; + decoderReinitializationState = REINITIALIZATION_STATE_NONE; + decoderReceivedBuffers = false; + buffersInCodecCount = 0; + if (decoder != null) { + decoderCounters.decoderReleaseCount++; + decoder.release(); + eventDispatcher.decoderReleased(decoder.getName()); + decoder = null; + } + setDecoderDrmSession(null); + } + + /** + * Called when a new format is read from the upstream source. + * + * @param formatHolder A {@link FormatHolder} that holds the new {@link Format}. + * @throws ExoPlaybackException If an error occurs (re-)initializing the decoder. + */ + @CallSuper + protected void onInputFormatChanged(FormatHolder formatHolder) throws ExoPlaybackException { + waitingForFirstSampleInFormat = true; + Format newFormat = Assertions.checkNotNull(formatHolder.format); + setSourceDrmSession(formatHolder.drmSession); + Format oldFormat = inputFormat; + inputFormat = newFormat; + + if (decoder == null) { + maybeInitDecoder(); + eventDispatcher.inputFormatChanged(inputFormat, /* decoderReuseEvaluation= */ null); + return; + } + + DecoderReuseEvaluation evaluation; + if (sourceDrmSession != decoderDrmSession) { + evaluation = + new DecoderReuseEvaluation( + decoder.getName(), + oldFormat, + newFormat, + REUSE_RESULT_NO, + DISCARD_REASON_DRM_SESSION_CHANGED); + } else { + evaluation = canReuseDecoder(decoder.getName(), oldFormat, newFormat); + } + + if (evaluation.result == REUSE_RESULT_NO) { + if (decoderReceivedBuffers) { + // Signal end of stream and wait for any final output buffers before re-initialization. + decoderReinitializationState = REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM; + } else { + // There aren't any final output buffers, so release the decoder immediately. + releaseDecoder(); + maybeInitDecoder(); + } + } + eventDispatcher.inputFormatChanged(inputFormat, evaluation); + } + + /** + * Called immediately before an input buffer is queued into the decoder. + * + *

      The default implementation is a no-op. + * + * @param buffer The buffer that will be queued. + */ + protected void onQueueInputBuffer(DecoderInputBuffer buffer) { + // Do nothing. + } + + /** + * Called when an output buffer is successfully processed. + * + * @param presentationTimeUs The timestamp associated with the output buffer. + */ + @CallSuper + protected void onProcessedOutputBuffer(long presentationTimeUs) { + buffersInCodecCount--; + } + + /** + * Returns whether the buffer being processed should be dropped. + * + * @param earlyUs The time until the buffer should be presented in microseconds. A negative value + * indicates that the buffer is late. + * @param elapsedRealtimeUs {@link android.os.SystemClock#elapsedRealtime()} in microseconds, + * measured at the start of the current iteration of the rendering loop. + */ + protected boolean shouldDropOutputBuffer(long earlyUs, long elapsedRealtimeUs) { + return isBufferLate(earlyUs); + } + + /** + * Returns whether to drop all buffers from the buffer being processed to the keyframe at or after + * the current playback position, if possible. + * + * @param earlyUs The time until the current buffer should be presented in microseconds. A + * negative value indicates that the buffer is late. + * @param elapsedRealtimeUs {@link android.os.SystemClock#elapsedRealtime()} in microseconds, + * measured at the start of the current iteration of the rendering loop. + */ + protected boolean shouldDropBuffersToKeyframe(long earlyUs, long elapsedRealtimeUs) { + return isBufferVeryLate(earlyUs); + } + + /** + * Returns whether to force rendering an output buffer. + * + * @param earlyUs The time until the current buffer should be presented in microseconds. A + * negative value indicates that the buffer is late. + * @param elapsedSinceLastRenderUs The elapsed time since the last output buffer was rendered, in + * microseconds. + * @return Returns whether to force rendering an output buffer. + */ + protected boolean shouldForceRenderOutputBuffer(long earlyUs, long elapsedSinceLastRenderUs) { + return isBufferLate(earlyUs) && elapsedSinceLastRenderUs > 100000; + } + + /** + * Skips the specified output buffer and releases it. + * + * @param outputBuffer The output buffer to skip. + */ + protected void skipOutputBuffer(VideoDecoderOutputBuffer outputBuffer) { + decoderCounters.skippedOutputBufferCount++; + outputBuffer.release(); + } + + /** + * Drops the specified output buffer and releases it. + * + * @param outputBuffer The output buffer to drop. + */ + protected void dropOutputBuffer(VideoDecoderOutputBuffer outputBuffer) { + updateDroppedBufferCounters( + /* droppedInputBufferCount= */ 0, /* droppedDecoderBufferCount= */ 1); + outputBuffer.release(); + } + + /** + * Drops frames from the current output buffer to the next keyframe at or before the playback + * position. If no such keyframe exists, as the playback position is inside the same group of + * pictures as the buffer being processed, returns {@code false}. Returns {@code true} otherwise. + * + * @param positionUs The current playback position, in microseconds. + * @return Whether any buffers were dropped. + * @throws ExoPlaybackException If an error occurs flushing the decoder. + */ + protected boolean maybeDropBuffersToKeyframe(long positionUs) throws ExoPlaybackException { + int droppedSourceBufferCount = skipSource(positionUs); + if (droppedSourceBufferCount == 0) { + return false; + } + decoderCounters.droppedToKeyframeCount++; + // We dropped some buffers to catch up, so update the decoder counters and flush the decoder, + // which releases all pending buffers buffers including the current output buffer. + updateDroppedBufferCounters( + droppedSourceBufferCount, /* droppedDecoderBufferCount= */ buffersInCodecCount); + flushDecoder(); + return true; + } + + /** + * Updates local counters and {@link #decoderCounters} to reflect that buffers were dropped. + * + * @param droppedInputBufferCount The number of buffers dropped from the source before being + * passed to the decoder. + * @param droppedDecoderBufferCount The number of buffers dropped after being passed to the + * decoder. + */ + protected void updateDroppedBufferCounters( + int droppedInputBufferCount, int droppedDecoderBufferCount) { + decoderCounters.droppedInputBufferCount += droppedInputBufferCount; + int totalDroppedBufferCount = droppedInputBufferCount + droppedDecoderBufferCount; + decoderCounters.droppedBufferCount += totalDroppedBufferCount; + droppedFrames += totalDroppedBufferCount; + consecutiveDroppedFrameCount += totalDroppedBufferCount; + decoderCounters.maxConsecutiveDroppedBufferCount = + max(consecutiveDroppedFrameCount, decoderCounters.maxConsecutiveDroppedBufferCount); + if (maxDroppedFramesToNotify > 0 && droppedFrames >= maxDroppedFramesToNotify) { + maybeNotifyDroppedFrames(); + } + } + + /** + * Creates a decoder for the given format. + * + * @param format The format for which a decoder is required. + * @param cryptoConfig The {@link CryptoConfig} object required for decoding encrypted content. + * May be null and can be ignored if decoder does not handle encrypted content. + * @return The decoder. + * @throws DecoderException If an error occurred creating a suitable decoder. + */ + protected abstract Decoder< + DecoderInputBuffer, ? extends VideoDecoderOutputBuffer, ? extends DecoderException> + createDecoder(Format format, @Nullable CryptoConfig cryptoConfig) throws DecoderException; + + /** + * Renders the specified output buffer. + * + *

      The implementation of this method takes ownership of the output buffer and is responsible + * for calling {@link VideoDecoderOutputBuffer#release()} either immediately or in the future. + * + * @param outputBuffer {@link VideoDecoderOutputBuffer} to render. + * @param presentationTimeUs Presentation time in microseconds. + * @param outputFormat Output {@link Format}. + * @throws DecoderException If an error occurs when rendering the output buffer. + */ + protected void renderOutputBuffer( + VideoDecoderOutputBuffer outputBuffer, long presentationTimeUs, Format outputFormat) + throws DecoderException { + if (frameMetadataListener != null) { + frameMetadataListener.onVideoFrameAboutToBeRendered( + presentationTimeUs, System.nanoTime(), outputFormat, /* mediaFormat= */ null); + } + lastRenderTimeUs = Util.msToUs(SystemClock.elapsedRealtime() * 1000); + int bufferMode = outputBuffer.mode; + boolean renderSurface = bufferMode == C.VIDEO_OUTPUT_MODE_SURFACE_YUV && outputSurface != null; + boolean renderYuv = bufferMode == C.VIDEO_OUTPUT_MODE_YUV && outputBufferRenderer != null; + if (!renderYuv && !renderSurface) { + dropOutputBuffer(outputBuffer); + } else { + maybeNotifyVideoSizeChanged(outputBuffer.width, outputBuffer.height); + if (renderYuv) { + outputBufferRenderer.setOutputBuffer(outputBuffer); + } else { + renderOutputBufferToSurface(outputBuffer, outputSurface); + } + consecutiveDroppedFrameCount = 0; + decoderCounters.renderedOutputBufferCount++; + maybeNotifyRenderedFirstFrame(); + } + } + + /** + * Renders the specified output buffer to the passed surface. + * + *

      The implementation of this method takes ownership of the output buffer and is responsible + * for calling {@link VideoDecoderOutputBuffer#release()} either immediately or in the future. + * + * @param outputBuffer {@link VideoDecoderOutputBuffer} to render. + * @param surface Output {@link Surface}. + * @throws DecoderException If an error occurs when rendering the output buffer. + */ + protected abstract void renderOutputBufferToSurface( + VideoDecoderOutputBuffer outputBuffer, Surface surface) throws DecoderException; + + /** Sets the video output. */ + protected final void setOutput(@Nullable Object output) { + if (output instanceof Surface) { + outputSurface = (Surface) output; + outputBufferRenderer = null; + outputMode = C.VIDEO_OUTPUT_MODE_SURFACE_YUV; + } else if (output instanceof VideoDecoderOutputBufferRenderer) { + outputSurface = null; + outputBufferRenderer = (VideoDecoderOutputBufferRenderer) output; + outputMode = C.VIDEO_OUTPUT_MODE_YUV; + } else { + // Handle unsupported outputs by clearing the output. + output = null; + outputSurface = null; + outputBufferRenderer = null; + outputMode = C.VIDEO_OUTPUT_MODE_NONE; + } + if (this.output != output) { + this.output = output; + if (output != null) { + if (decoder != null) { + setDecoderOutputMode(outputMode); + } + onOutputChanged(); + } else { + // The output has been removed. We leave the outputMode of the underlying decoder unchanged + // in anticipation that a subsequent output will likely be of the same type. + onOutputRemoved(); + } + } else if (output != null) { + // The output is unchanged and non-null. + onOutputReset(); + } + } + + /** + * Sets output mode of the decoder. + * + * @param outputMode Output mode. + */ + protected abstract void setDecoderOutputMode(@VideoOutputMode int outputMode); + + /** + * Evaluates whether the existing decoder can be reused for a new {@link Format}. + * + *

      The default implementation does not allow decoder reuse. + * + * @param oldFormat The previous format. + * @param newFormat The new format. + * @return The result of the evaluation. + */ + protected DecoderReuseEvaluation canReuseDecoder( + String decoderName, Format oldFormat, Format newFormat) { + return new DecoderReuseEvaluation( + decoderName, oldFormat, newFormat, REUSE_RESULT_NO, DISCARD_REASON_REUSE_NOT_IMPLEMENTED); + } + + // Internal methods. + + private void setSourceDrmSession(@Nullable DrmSession session) { + DrmSession.replaceSession(sourceDrmSession, session); + sourceDrmSession = session; + } + + private void setDecoderDrmSession(@Nullable DrmSession session) { + DrmSession.replaceSession(decoderDrmSession, session); + decoderDrmSession = session; + } + + private void maybeInitDecoder() throws ExoPlaybackException { + if (decoder != null) { + return; + } + + setDecoderDrmSession(sourceDrmSession); + + CryptoConfig cryptoConfig = null; + if (decoderDrmSession != null) { + cryptoConfig = decoderDrmSession.getCryptoConfig(); + if (cryptoConfig == null) { + DrmSessionException drmError = decoderDrmSession.getError(); + if (drmError != null) { + // Continue for now. We may be able to avoid failure if a new input format causes the + // session to be replaced without it having been used. + } else { + // The drm session isn't open yet. + return; + } + } + } + + try { + long decoderInitializingTimestamp = SystemClock.elapsedRealtime(); + decoder = createDecoder(inputFormat, cryptoConfig); + setDecoderOutputMode(outputMode); + long decoderInitializedTimestamp = SystemClock.elapsedRealtime(); + eventDispatcher.decoderInitialized( + decoder.getName(), + decoderInitializedTimestamp, + decoderInitializedTimestamp - decoderInitializingTimestamp); + decoderCounters.decoderInitCount++; + } catch (DecoderException e) { + Log.e(TAG, "Video codec error", e); + eventDispatcher.videoCodecError(e); + throw createRendererException( + e, inputFormat, PlaybackException.ERROR_CODE_DECODER_INIT_FAILED); + } catch (OutOfMemoryError e) { + throw createRendererException( + e, inputFormat, PlaybackException.ERROR_CODE_DECODER_INIT_FAILED); + } + } + + private boolean feedInputBuffer() throws DecoderException, ExoPlaybackException { + if (decoder == null + || decoderReinitializationState == REINITIALIZATION_STATE_WAIT_END_OF_STREAM + || inputStreamEnded) { + // We need to reinitialize the decoder or the input stream has ended. + return false; + } + + if (inputBuffer == null) { + inputBuffer = decoder.dequeueInputBuffer(); + if (inputBuffer == null) { + return false; + } + } + + if (decoderReinitializationState == REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM) { + inputBuffer.setFlags(C.BUFFER_FLAG_END_OF_STREAM); + decoder.queueInputBuffer(inputBuffer); + inputBuffer = null; + decoderReinitializationState = REINITIALIZATION_STATE_WAIT_END_OF_STREAM; + return false; + } + + FormatHolder formatHolder = getFormatHolder(); + switch (readSource(formatHolder, inputBuffer, /* readFlags= */ 0)) { + case C.RESULT_NOTHING_READ: + return false; + case C.RESULT_FORMAT_READ: + onInputFormatChanged(formatHolder); + return true; + case C.RESULT_BUFFER_READ: + if (inputBuffer.isEndOfStream()) { + inputStreamEnded = true; + decoder.queueInputBuffer(inputBuffer); + inputBuffer = null; + return false; + } + if (waitingForFirstSampleInFormat) { + formatQueue.add(inputBuffer.timeUs, inputFormat); + waitingForFirstSampleInFormat = false; + } + inputBuffer.flip(); + inputBuffer.format = inputFormat; + onQueueInputBuffer(inputBuffer); + decoder.queueInputBuffer(inputBuffer); + buffersInCodecCount++; + decoderReceivedBuffers = true; + decoderCounters.queuedInputBufferCount++; + inputBuffer = null; + return true; + default: + throw new IllegalStateException(); + } + } + + /** + * Attempts to dequeue an output buffer from the decoder and, if successful, passes it to {@link + * #processOutputBuffer(long, long)}. + * + * @param positionUs The player's current position. + * @param elapsedRealtimeUs {@link android.os.SystemClock#elapsedRealtime()} in microseconds, + * measured at the start of the current iteration of the rendering loop. + * @return Whether it may be possible to drain more output data. + * @throws ExoPlaybackException If an error occurs draining the output buffer. + */ + private boolean drainOutputBuffer(long positionUs, long elapsedRealtimeUs) + throws ExoPlaybackException, DecoderException { + if (outputBuffer == null) { + outputBuffer = decoder.dequeueOutputBuffer(); + if (outputBuffer == null) { + return false; + } + decoderCounters.skippedOutputBufferCount += outputBuffer.skippedOutputBufferCount; + buffersInCodecCount -= outputBuffer.skippedOutputBufferCount; + } + + if (outputBuffer.isEndOfStream()) { + if (decoderReinitializationState == REINITIALIZATION_STATE_WAIT_END_OF_STREAM) { + // We're waiting to re-initialize the decoder, and have now processed all final buffers. + releaseDecoder(); + maybeInitDecoder(); + } else { + outputBuffer.release(); + outputBuffer = null; + outputStreamEnded = true; + } + return false; + } + + boolean processedOutputBuffer = processOutputBuffer(positionUs, elapsedRealtimeUs); + if (processedOutputBuffer) { + onProcessedOutputBuffer(outputBuffer.timeUs); + outputBuffer = null; + } + return processedOutputBuffer; + } + + /** + * Processes {@link #outputBuffer} by rendering it, skipping it or doing nothing, and returns + * whether it may be possible to process another output buffer. + * + * @param positionUs The player's current position. + * @param elapsedRealtimeUs {@link android.os.SystemClock#elapsedRealtime()} in microseconds, + * measured at the start of the current iteration of the rendering loop. + * @return Whether it may be possible to drain another output buffer. + * @throws ExoPlaybackException If an error occurs processing the output buffer. + */ + private boolean processOutputBuffer(long positionUs, long elapsedRealtimeUs) + throws ExoPlaybackException, DecoderException { + if (initialPositionUs == C.TIME_UNSET) { + initialPositionUs = positionUs; + } + + long earlyUs = outputBuffer.timeUs - positionUs; + if (!hasOutput()) { + // Skip frames in sync with playback, so we'll be at the right frame if the mode changes. + if (isBufferLate(earlyUs)) { + skipOutputBuffer(outputBuffer); + return true; + } + return false; + } + + long presentationTimeUs = outputBuffer.timeUs - outputStreamOffsetUs; + Format format = formatQueue.pollFloor(presentationTimeUs); + if (format != null) { + outputFormat = format; + } + + long elapsedRealtimeNowUs = SystemClock.elapsedRealtime() * 1000; + long elapsedSinceLastRenderUs = elapsedRealtimeNowUs - lastRenderTimeUs; + boolean isStarted = getState() == STATE_STARTED; + boolean shouldRenderFirstFrame = + !renderedFirstFrameAfterEnable + ? (isStarted || mayRenderFirstFrameAfterEnableIfNotStarted) + : !renderedFirstFrameAfterReset; + // TODO: We shouldn't force render while we are joining an ongoing playback. + if (shouldRenderFirstFrame + || (isStarted && shouldForceRenderOutputBuffer(earlyUs, elapsedSinceLastRenderUs))) { + renderOutputBuffer(outputBuffer, presentationTimeUs, outputFormat); + return true; + } + + if (!isStarted || positionUs == initialPositionUs) { + return false; + } + + // TODO: Treat dropped buffers as skipped while we are joining an ongoing playback. + if (shouldDropBuffersToKeyframe(earlyUs, elapsedRealtimeUs) + && maybeDropBuffersToKeyframe(positionUs)) { + return false; + } else if (shouldDropOutputBuffer(earlyUs, elapsedRealtimeUs)) { + dropOutputBuffer(outputBuffer); + return true; + } + + if (earlyUs < 30000) { + renderOutputBuffer(outputBuffer, presentationTimeUs, outputFormat); + return true; + } + + return false; + } + + private boolean hasOutput() { + return outputMode != C.VIDEO_OUTPUT_MODE_NONE; + } + + private void onOutputChanged() { + // If we know the video size, report it again immediately. + maybeRenotifyVideoSizeChanged(); + // We haven't rendered to the new output yet. + clearRenderedFirstFrame(); + if (getState() == STATE_STARTED) { + setJoiningDeadlineMs(); + } + } + + private void onOutputRemoved() { + clearReportedVideoSize(); + clearRenderedFirstFrame(); + } + + private void onOutputReset() { + // The output is unchanged and non-null. If we know the video size and/or have already + // rendered to the output, report these again immediately. + maybeRenotifyVideoSizeChanged(); + maybeRenotifyRenderedFirstFrame(); + } + + private void setJoiningDeadlineMs() { + joiningDeadlineMs = + allowedJoiningTimeMs > 0 + ? (SystemClock.elapsedRealtime() + allowedJoiningTimeMs) + : C.TIME_UNSET; + } + + private void clearRenderedFirstFrame() { + renderedFirstFrameAfterReset = false; + } + + private void maybeNotifyRenderedFirstFrame() { + renderedFirstFrameAfterEnable = true; + if (!renderedFirstFrameAfterReset) { + renderedFirstFrameAfterReset = true; + eventDispatcher.renderedFirstFrame(output); + } + } + + private void maybeRenotifyRenderedFirstFrame() { + if (renderedFirstFrameAfterReset) { + eventDispatcher.renderedFirstFrame(output); + } + } + + private void clearReportedVideoSize() { + reportedVideoSize = null; + } + + private void maybeNotifyVideoSizeChanged(int width, int height) { + if (reportedVideoSize == null + || reportedVideoSize.width != width + || reportedVideoSize.height != height) { + reportedVideoSize = new VideoSize(width, height); + eventDispatcher.videoSizeChanged(reportedVideoSize); + } + } + + private void maybeRenotifyVideoSizeChanged() { + if (reportedVideoSize != null) { + eventDispatcher.videoSizeChanged(reportedVideoSize); + } + } + + private void maybeNotifyDroppedFrames() { + if (droppedFrames > 0) { + long now = SystemClock.elapsedRealtime(); + long elapsedMs = now - droppedFrameAccumulationStartTimeMs; + eventDispatcher.droppedFrames(droppedFrames, elapsedMs); + droppedFrames = 0; + droppedFrameAccumulationStartTimeMs = now; + } + } + + private static boolean isBufferLate(long earlyUs) { + // Class a buffer as late if it should have been presented more than 30 ms ago. + return earlyUs < -30000; + } + + private static boolean isBufferVeryLate(long earlyUs) { + // Class a buffer as very late if it should have been presented more than 500 ms ago. + return earlyUs < -500000; + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/DolbyVisionConfig.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/DolbyVisionConfig.java index 3a13540e12..9321330061 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/DolbyVisionConfig.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/DolbyVisionConfig.java @@ -45,7 +45,7 @@ public static DolbyVisionConfig parse(ParsableByteArray data) { } else { return null; } - String codecs = codecsPrefix + ".0" + dvProfile + ".0" + dvLevel; + String codecs = codecsPrefix + ".0" + dvProfile + (dvLevel < 10 ? ".0" : ".") + dvLevel; return new DolbyVisionConfig(dvProfile, dvLevel, codecs); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/DummySurface.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/DummySurface.java deleted file mode 100644 index 0a900999b1..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/DummySurface.java +++ /dev/null @@ -1,228 +0,0 @@ -/* - * Copyright (C) 2017 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.video; - -import static com.google.android.exoplayer2.util.EGLSurfaceTexture.SECURE_MODE_NONE; -import static com.google.android.exoplayer2.util.EGLSurfaceTexture.SECURE_MODE_PROTECTED_PBUFFER; -import static com.google.android.exoplayer2.util.EGLSurfaceTexture.SECURE_MODE_SURFACELESS_CONTEXT; - -import android.annotation.TargetApi; -import android.content.Context; -import android.graphics.SurfaceTexture; -import android.os.Handler; -import android.os.Handler.Callback; -import android.os.HandlerThread; -import android.os.Message; -import android.view.Surface; -import androidx.annotation.Nullable; -import com.google.android.exoplayer2.util.Assertions; -import com.google.android.exoplayer2.util.EGLSurfaceTexture; -import com.google.android.exoplayer2.util.EGLSurfaceTexture.SecureMode; -import com.google.android.exoplayer2.util.GlUtil; -import com.google.android.exoplayer2.util.Log; -import com.google.android.exoplayer2.util.Util; -import org.checkerframework.checker.nullness.qual.MonotonicNonNull; - -/** A dummy {@link Surface}. */ -@TargetApi(17) -public final class DummySurface extends Surface { - - private static final String TAG = "DummySurface"; - - /** - * Whether the surface is secure. - */ - public final boolean secure; - - private static @SecureMode int secureMode; - private static boolean secureModeInitialized; - - private final DummySurfaceThread thread; - private boolean threadReleased; - - /** - * Returns whether the device supports secure dummy surfaces. - * - * @param context Any {@link Context}. - * @return Whether the device supports secure dummy surfaces. - */ - public static synchronized boolean isSecureSupported(Context context) { - if (!secureModeInitialized) { - secureMode = getSecureMode(context); - secureModeInitialized = true; - } - return secureMode != SECURE_MODE_NONE; - } - - /** - * Returns a newly created dummy surface. The surface must be released by calling {@link #release} - * when it's no longer required. - *

      - * Must only be called if {@link Util#SDK_INT} is 17 or higher. - * - * @param context Any {@link Context}. - * @param secure Whether a secure surface is required. Must only be requested if - * {@link #isSecureSupported(Context)} returns {@code true}. - * @throws IllegalStateException If a secure surface is requested on a device for which - * {@link #isSecureSupported(Context)} returns {@code false}. - */ - public static DummySurface newInstanceV17(Context context, boolean secure) { - assertApiLevel17OrHigher(); - Assertions.checkState(!secure || isSecureSupported(context)); - DummySurfaceThread thread = new DummySurfaceThread(); - return thread.init(secure ? secureMode : SECURE_MODE_NONE); - } - - private DummySurface(DummySurfaceThread thread, SurfaceTexture surfaceTexture, boolean secure) { - super(surfaceTexture); - this.thread = thread; - this.secure = secure; - } - - @Override - public void release() { - super.release(); - // The Surface may be released multiple times (explicitly and by Surface.finalize()). The - // implementation of super.release() has its own deduplication logic. Below we need to - // deduplicate ourselves. Synchronization is required as we don't control the thread on which - // Surface.finalize() is called. - synchronized (thread) { - if (!threadReleased) { - thread.release(); - threadReleased = true; - } - } - } - - private static void assertApiLevel17OrHigher() { - if (Util.SDK_INT < 17) { - throw new UnsupportedOperationException("Unsupported prior to API level 17"); - } - } - - @SecureMode - private static int getSecureMode(Context context) { - if (GlUtil.isProtectedContentExtensionSupported(context)) { - if (GlUtil.isSurfacelessContextExtensionSupported()) { - return SECURE_MODE_SURFACELESS_CONTEXT; - } else { - // If we can't use surfaceless contexts, we use a protected 1 * 1 pixel buffer surface. - // This may require support for EXT_protected_surface, but in practice it works on some - // devices that don't have that extension. See also - // https://github.com/google/ExoPlayer/issues/3558. - return SECURE_MODE_PROTECTED_PBUFFER; - } - } else { - return SECURE_MODE_NONE; - } - } - - private static class DummySurfaceThread extends HandlerThread implements Callback { - - private static final int MSG_INIT = 1; - private static final int MSG_RELEASE = 2; - - private @MonotonicNonNull EGLSurfaceTexture eglSurfaceTexture; - private @MonotonicNonNull Handler handler; - @Nullable private Error initError; - @Nullable private RuntimeException initException; - @Nullable private DummySurface surface; - - public DummySurfaceThread() { - super("dummySurface"); - } - - public DummySurface init(@SecureMode int secureMode) { - start(); - handler = new Handler(getLooper(), /* callback= */ this); - eglSurfaceTexture = new EGLSurfaceTexture(handler); - boolean wasInterrupted = false; - synchronized (this) { - handler.obtainMessage(MSG_INIT, secureMode, 0).sendToTarget(); - while (surface == null && initException == null && initError == null) { - try { - wait(); - } catch (InterruptedException e) { - wasInterrupted = true; - } - } - } - if (wasInterrupted) { - // Restore the interrupted status. - Thread.currentThread().interrupt(); - } - if (initException != null) { - throw initException; - } else if (initError != null) { - throw initError; - } else { - return Assertions.checkNotNull(surface); - } - } - - public void release() { - Assertions.checkNotNull(handler); - handler.sendEmptyMessage(MSG_RELEASE); - } - - @Override - public boolean handleMessage(Message msg) { - switch (msg.what) { - case MSG_INIT: - try { - initInternal(/* secureMode= */ msg.arg1); - } catch (RuntimeException e) { - Log.e(TAG, "Failed to initialize dummy surface", e); - initException = e; - } catch (Error e) { - Log.e(TAG, "Failed to initialize dummy surface", e); - initError = e; - } finally { - synchronized (this) { - notify(); - } - } - return true; - case MSG_RELEASE: - try { - releaseInternal(); - } catch (Throwable e) { - Log.e(TAG, "Failed to release dummy surface", e); - } finally { - quit(); - } - return true; - default: - return true; - } - } - - private void initInternal(@SecureMode int secureMode) { - Assertions.checkNotNull(eglSurfaceTexture); - eglSurfaceTexture.init(secureMode); - this.surface = - new DummySurface( - this, eglSurfaceTexture.getSurfaceTexture(), secureMode != SECURE_MODE_NONE); - } - - private void releaseInternal() { - Assertions.checkNotNull(eglSurfaceTexture); - eglSurfaceTexture.release(); - } - - } - -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/FixedFrameRateEstimator.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/FixedFrameRateEstimator.java new file mode 100644 index 0000000000..a4315327f3 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/FixedFrameRateEstimator.java @@ -0,0 +1,220 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.video; + +import androidx.annotation.VisibleForTesting; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.Format; +import java.util.Arrays; + +/** + * Attempts to detect and refine a fixed frame rate estimate based on frame presentation timestamps. + */ +/* package */ final class FixedFrameRateEstimator { + + /** The number of consecutive matching frame durations required to detect a fixed frame rate. */ + public static final int CONSECUTIVE_MATCHING_FRAME_DURATIONS_FOR_SYNC = 15; + /** + * The maximum amount frame durations can differ for them to be considered matching, in + * nanoseconds. + * + *

      This constant is set to 1ms to account for container formats that only represent frame + * presentation timestamps to the nearest millisecond. In such cases, frame durations need to + * switch between values that are 1ms apart to achieve common fixed frame rates (e.g., 30fps + * content will need frames that are 33ms and 34ms). + */ + @VisibleForTesting static final long MAX_MATCHING_FRAME_DIFFERENCE_NS = 1_000_000; + + private Matcher currentMatcher; + private Matcher candidateMatcher; + private boolean candidateMatcherActive; + private boolean switchToCandidateMatcherWhenSynced; + private long lastFramePresentationTimeNs; + private int framesWithoutSyncCount; + + public FixedFrameRateEstimator() { + currentMatcher = new Matcher(); + candidateMatcher = new Matcher(); + lastFramePresentationTimeNs = C.TIME_UNSET; + } + + /** Resets the estimator. */ + public void reset() { + currentMatcher.reset(); + candidateMatcher.reset(); + candidateMatcherActive = false; + lastFramePresentationTimeNs = C.TIME_UNSET; + framesWithoutSyncCount = 0; + } + + /** + * Called with each frame presentation timestamp. + * + * @param framePresentationTimeNs The frame presentation timestamp, in nanoseconds. + */ + public void onNextFrame(long framePresentationTimeNs) { + currentMatcher.onNextFrame(framePresentationTimeNs); + if (currentMatcher.isSynced() && !switchToCandidateMatcherWhenSynced) { + candidateMatcherActive = false; + } else if (lastFramePresentationTimeNs != C.TIME_UNSET) { + if (!candidateMatcherActive || candidateMatcher.isLastFrameOutlier()) { + // Reset the candidate with the last and current frame presentation timestamps, so that it + // will try and match against the duration of the previous frame. + candidateMatcher.reset(); + candidateMatcher.onNextFrame(lastFramePresentationTimeNs); + } + candidateMatcherActive = true; + candidateMatcher.onNextFrame(framePresentationTimeNs); + } + if (candidateMatcherActive && candidateMatcher.isSynced()) { + // The candidate matcher should be promoted to be the current matcher. The current matcher + // can be re-used as the next candidate matcher. + Matcher previousMatcher = currentMatcher; + currentMatcher = candidateMatcher; + candidateMatcher = previousMatcher; + candidateMatcherActive = false; + switchToCandidateMatcherWhenSynced = false; + } + lastFramePresentationTimeNs = framePresentationTimeNs; + framesWithoutSyncCount = currentMatcher.isSynced() ? 0 : framesWithoutSyncCount + 1; + } + + /** Returns whether the estimator has detected a fixed frame rate. */ + public boolean isSynced() { + return currentMatcher.isSynced(); + } + + /** Returns the number of frames since the estimator last detected a fixed frame rate. */ + public int getFramesWithoutSyncCount() { + return framesWithoutSyncCount; + } + + /** + * Returns the sum of all frame durations used to calculate the current fixed frame rate estimate, + * or {@link C#TIME_UNSET} if {@link #isSynced()} is {@code false}. + */ + public long getMatchingFrameDurationSumNs() { + return isSynced() ? currentMatcher.getMatchingFrameDurationSumNs() : C.TIME_UNSET; + } + + /** + * The currently detected fixed frame duration estimate in nanoseconds, or {@link C#TIME_UNSET} if + * {@link #isSynced()} is {@code false}. Whilst synced, the estimate is refined each time {@link + * #onNextFrame} is called with a new frame presentation timestamp. + */ + public long getFrameDurationNs() { + return isSynced() ? currentMatcher.getFrameDurationNs() : C.TIME_UNSET; + } + + /** + * The currently detected fixed frame rate estimate, or {@link Format#NO_VALUE} if {@link + * #isSynced()} is {@code false}. Whilst synced, the estimate is refined each time {@link + * #onNextFrame} is called with a new frame presentation timestamp. + */ + public float getFrameRate() { + return isSynced() + ? (float) ((double) C.NANOS_PER_SECOND / currentMatcher.getFrameDurationNs()) + : Format.NO_VALUE; + } + + /** Tries to match frame durations against the duration of the first frame it receives. */ + private static final class Matcher { + + private long firstFramePresentationTimeNs; + private long firstFrameDurationNs; + private long lastFramePresentationTimeNs; + private long frameCount; + + /** The total number of frames that have matched the frame duration being tracked. */ + private long matchingFrameCount; + /** The sum of the frame durations of all matching frames. */ + private long matchingFrameDurationSumNs; + /** Cyclic buffer of flags indicating whether the most recent frame durations were outliers. */ + private final boolean[] recentFrameOutlierFlags; + /** + * The number of recent frame durations that were outliers. Equal to the number of {@code true} + * values in {@link #recentFrameOutlierFlags}. + */ + private int recentFrameOutlierCount; + + public Matcher() { + recentFrameOutlierFlags = new boolean[CONSECUTIVE_MATCHING_FRAME_DURATIONS_FOR_SYNC]; + } + + public void reset() { + frameCount = 0; + matchingFrameCount = 0; + matchingFrameDurationSumNs = 0; + recentFrameOutlierCount = 0; + Arrays.fill(recentFrameOutlierFlags, false); + } + + public boolean isSynced() { + return frameCount > CONSECUTIVE_MATCHING_FRAME_DURATIONS_FOR_SYNC + && recentFrameOutlierCount == 0; + } + + public boolean isLastFrameOutlier() { + if (frameCount == 0) { + return false; + } + return recentFrameOutlierFlags[getRecentFrameOutlierIndex(frameCount - 1)]; + } + + public long getMatchingFrameDurationSumNs() { + return matchingFrameDurationSumNs; + } + + public long getFrameDurationNs() { + return matchingFrameCount == 0 ? 0 : (matchingFrameDurationSumNs / matchingFrameCount); + } + + public void onNextFrame(long framePresentationTimeNs) { + if (frameCount == 0) { + firstFramePresentationTimeNs = framePresentationTimeNs; + } else if (frameCount == 1) { + // This is the frame duration that the tracker will match against. + firstFrameDurationNs = framePresentationTimeNs - firstFramePresentationTimeNs; + matchingFrameDurationSumNs = firstFrameDurationNs; + matchingFrameCount = 1; + } else { + long lastFrameDurationNs = framePresentationTimeNs - lastFramePresentationTimeNs; + int recentFrameOutlierIndex = getRecentFrameOutlierIndex(frameCount); + if (Math.abs(lastFrameDurationNs - firstFrameDurationNs) + <= MAX_MATCHING_FRAME_DIFFERENCE_NS) { + matchingFrameCount++; + matchingFrameDurationSumNs += lastFrameDurationNs; + if (recentFrameOutlierFlags[recentFrameOutlierIndex]) { + recentFrameOutlierFlags[recentFrameOutlierIndex] = false; + recentFrameOutlierCount--; + } + } else { + if (!recentFrameOutlierFlags[recentFrameOutlierIndex]) { + recentFrameOutlierFlags[recentFrameOutlierIndex] = true; + recentFrameOutlierCount++; + } + } + } + + frameCount++; + lastFramePresentationTimeNs = framePresentationTimeNs; + } + + private static int getRecentFrameOutlierIndex(long frameCount) { + return (int) (frameCount % CONSECUTIVE_MATCHING_FRAME_DURATIONS_FOR_SYNC); + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/HevcConfig.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/HevcConfig.java index bb11ef0005..6b8e633555 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/HevcConfig.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/HevcConfig.java @@ -16,20 +16,17 @@ package com.google.android.exoplayer2.video; import androidx.annotation.Nullable; +import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.ParserException; +import com.google.android.exoplayer2.util.CodecSpecificDataUtil; import com.google.android.exoplayer2.util.NalUnitUtil; import com.google.android.exoplayer2.util.ParsableByteArray; import java.util.Collections; import java.util.List; -/** - * HEVC configuration data. - */ +/** HEVC configuration data. */ public final class HevcConfig { - @Nullable public final List initializationData; - public final int nalUnitLengthFieldLength; - /** * Parses HEVC configuration data. * @@ -48,7 +45,7 @@ public static HevcConfig parse(ParsableByteArray data) throws ParserException { int csdLength = 0; int csdStartPosition = data.getPosition(); for (int i = 0; i < numberOfArrays; i++) { - data.skipBytes(1); // completeness (1), nal_unit_type (7) + data.skipBytes(1); // completeness (1), reserved (1), nal_unit_type (6) int numberOfNalUnits = data.readUnsignedShort(); for (int j = 0; j < numberOfNalUnits; j++) { int nalUnitLength = data.readUnsignedShort(); @@ -61,31 +58,95 @@ public static HevcConfig parse(ParsableByteArray data) throws ParserException { data.setPosition(csdStartPosition); byte[] buffer = new byte[csdLength]; int bufferPosition = 0; + int width = Format.NO_VALUE; + int height = Format.NO_VALUE; + float pixelWidthHeightRatio = 1; + @Nullable String codecs = null; for (int i = 0; i < numberOfArrays; i++) { - data.skipBytes(1); // completeness (1), nal_unit_type (7) + int nalUnitType = + data.readUnsignedByte() & 0x3F; // completeness (1), reserved (1), nal_unit_type (6) int numberOfNalUnits = data.readUnsignedShort(); for (int j = 0; j < numberOfNalUnits; j++) { int nalUnitLength = data.readUnsignedShort(); - System.arraycopy(NalUnitUtil.NAL_START_CODE, 0, buffer, bufferPosition, + System.arraycopy( + NalUnitUtil.NAL_START_CODE, + 0, + buffer, + bufferPosition, NalUnitUtil.NAL_START_CODE.length); bufferPosition += NalUnitUtil.NAL_START_CODE.length; - System - .arraycopy(data.data, data.getPosition(), buffer, bufferPosition, nalUnitLength); + System.arraycopy( + data.getData(), data.getPosition(), buffer, bufferPosition, nalUnitLength); + if (nalUnitType == SPS_NAL_UNIT_TYPE && j == 0) { + NalUnitUtil.H265SpsData spsData = + NalUnitUtil.parseH265SpsNalUnit( + buffer, bufferPosition, bufferPosition + nalUnitLength); + width = spsData.width; + height = spsData.height; + pixelWidthHeightRatio = spsData.pixelWidthHeightRatio; + codecs = + CodecSpecificDataUtil.buildHevcCodecString( + spsData.generalProfileSpace, + spsData.generalTierFlag, + spsData.generalProfileIdc, + spsData.generalProfileCompatibilityFlags, + spsData.constraintBytes, + spsData.generalLevelIdc); + } bufferPosition += nalUnitLength; data.skipBytes(nalUnitLength); } } - List initializationData = csdLength == 0 ? null : Collections.singletonList(buffer); - return new HevcConfig(initializationData, lengthSizeMinusOne + 1); + List initializationData = + csdLength == 0 ? Collections.emptyList() : Collections.singletonList(buffer); + return new HevcConfig( + initializationData, lengthSizeMinusOne + 1, width, height, pixelWidthHeightRatio, codecs); } catch (ArrayIndexOutOfBoundsException e) { - throw new ParserException("Error parsing HEVC config", e); + throw ParserException.createForMalformedContainer("Error parsing HEVC config", e); } } - private HevcConfig(@Nullable List initializationData, int nalUnitLengthFieldLength) { + private static final int SPS_NAL_UNIT_TYPE = 33; + + /** + * List of buffers containing the codec-specific data to be provided to the decoder. + * + *

      See {@link Format#initializationData}. + */ + public final List initializationData; + + /** The length of the NAL unit length field in the bitstream's container, in bytes. */ + public final int nalUnitLengthFieldLength; + + /** The width of each decoded frame, or {@link Format#NO_VALUE} if unknown. */ + public final int width; + + /** The height of each decoded frame, or {@link Format#NO_VALUE} if unknown. */ + public final int height; + + /** The pixel width to height ratio. */ + public final float pixelWidthHeightRatio; + + /** + * An RFC 6381 codecs string representing the video format, or {@code null} if not known. + * + *

      See {@link Format#codecs}. + */ + @Nullable public final String codecs; + + private HevcConfig( + List initializationData, + int nalUnitLengthFieldLength, + int width, + int height, + float pixelWidthHeightRatio, + @Nullable String codecs) { this.initializationData = initializationData; this.nalUnitLengthFieldLength = nalUnitLengthFieldLength; + this.width = width; + this.height = height; + this.pixelWidthHeightRatio = pixelWidthHeightRatio; + this.codecs = codecs; } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/MediaCodecVideoDecoderException.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/MediaCodecVideoDecoderException.java new file mode 100644 index 0000000000..9846ecdca6 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/MediaCodecVideoDecoderException.java @@ -0,0 +1,39 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.video; + +import android.media.MediaCodec; +import android.view.Surface; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.mediacodec.MediaCodecDecoderException; +import com.google.android.exoplayer2.mediacodec.MediaCodecInfo; + +/** Thrown when a failure occurs in a {@link MediaCodec} video decoder. */ +public class MediaCodecVideoDecoderException extends MediaCodecDecoderException { + + /** The {@link System#identityHashCode(Object)} of the surface when the exception occurred. */ + public final int surfaceIdentityHashCode; + + /** Whether the surface was valid when the exception occurred. */ + public final boolean isSurfaceValid; + + public MediaCodecVideoDecoderException( + Throwable cause, @Nullable MediaCodecInfo codecInfo, @Nullable Surface surface) { + super(cause, codecInfo); + surfaceIdentityHashCode = System.identityHashCode(surface); + isSurfaceValid = surface == null || surface.isValid(); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/MediaCodecVideoRenderer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/MediaCodecVideoRenderer.java index 0d36c02807..e608a02657 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/MediaCodecVideoRenderer.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/MediaCodecVideoRenderer.java @@ -15,10 +15,20 @@ */ package com.google.android.exoplayer2.video; +import static android.view.Display.DEFAULT_DISPLAY; +import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.DISCARD_REASON_MAX_INPUT_SIZE_EXCEEDED; +import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.DISCARD_REASON_VIDEO_MAX_RESOLUTION_EXCEEDED; +import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.REUSE_RESULT_NO; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Assertions.checkState; +import static java.lang.Math.max; +import static java.lang.Math.min; + import android.annotation.SuppressLint; import android.annotation.TargetApi; import android.content.Context; import android.graphics.Point; +import android.hardware.display.DisplayManager; import android.media.MediaCodec; import android.media.MediaCodecInfo.CodecCapabilities; import android.media.MediaCodecInfo.CodecProfileLevel; @@ -29,9 +39,12 @@ import android.os.Message; import android.os.SystemClock; import android.util.Pair; +import android.view.Display; import android.view.Surface; import androidx.annotation.CallSuper; +import androidx.annotation.DoNotInline; import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.ExoPlaybackException; import com.google.android.exoplayer2.ExoPlayer; @@ -39,25 +52,26 @@ import com.google.android.exoplayer2.FormatHolder; import com.google.android.exoplayer2.PlayerMessage.Target; import com.google.android.exoplayer2.RendererCapabilities; +import com.google.android.exoplayer2.decoder.DecoderCounters; import com.google.android.exoplayer2.decoder.DecoderInputBuffer; +import com.google.android.exoplayer2.decoder.DecoderReuseEvaluation; +import com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.DecoderDiscardReasons; import com.google.android.exoplayer2.drm.DrmInitData; -import com.google.android.exoplayer2.drm.DrmSessionManager; -import com.google.android.exoplayer2.drm.FrameworkMediaCrypto; +import com.google.android.exoplayer2.mediacodec.MediaCodecAdapter; +import com.google.android.exoplayer2.mediacodec.MediaCodecDecoderException; import com.google.android.exoplayer2.mediacodec.MediaCodecInfo; import com.google.android.exoplayer2.mediacodec.MediaCodecRenderer; import com.google.android.exoplayer2.mediacodec.MediaCodecSelector; import com.google.android.exoplayer2.mediacodec.MediaCodecUtil; import com.google.android.exoplayer2.mediacodec.MediaCodecUtil.DecoderQueryException; -import com.google.android.exoplayer2.mediacodec.MediaFormatUtil; -import com.google.android.exoplayer2.source.MediaSource; -import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Log; +import com.google.android.exoplayer2.util.MediaFormatUtil; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.TraceUtil; import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.video.VideoRendererEventListener.EventDispatcher; +import com.google.common.collect.ImmutableList; import java.nio.ByteBuffer; -import java.util.Collections; import java.util.List; /** @@ -67,12 +81,18 @@ * on the playback thread: * *

        - *
      • Message with type {@link C#MSG_SET_SURFACE} to set the output surface. The message payload - * should be the target {@link Surface}, or null. - *
      • Message with type {@link C#MSG_SET_SCALING_MODE} to set the video scaling mode. The message + *
      • Message with type {@link #MSG_SET_VIDEO_OUTPUT} to set the output. The message payload + * should be the target {@link Surface}, or null to clear the output. Other non-null payloads + * have the effect of clearing the output. + *
      • Message with type {@link #MSG_SET_SCALING_MODE} to set the video scaling mode. The message * payload should be one of the integer scaling modes in {@link C.VideoScalingMode}. Note that * the scaling mode only applies if the {@link Surface} targeted by this renderer is owned by * a {@link android.view.SurfaceView}. + *
      • Message with type {@link #MSG_SET_CHANGE_FRAME_RATE_STRATEGY} to set the strategy used to + * call {@link Surface#setFrameRate}. + *
      • Message with type {@link #MSG_SET_VIDEO_FRAME_METADATA_LISTENER} to set a listener for + * metadata associated with frames being rendered. The message payload should be the {@link + * VideoFrameMetadataListener}, or null. *
      */ public class MediaCodecVideoRenderer extends MediaCodecRenderer { @@ -84,12 +104,9 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer { private static final String KEY_CROP_TOP = "crop-top"; // Long edge length in pixels for standard video formats, in decreasing in order. - private static final int[] STANDARD_LONG_EDGE_VIDEO_PX = new int[] { - 1920, 1600, 1440, 1280, 960, 854, 640, 540, 480}; + private static final int[] STANDARD_LONG_EDGE_VIDEO_PX = + new int[] {1920, 1600, 1440, 1280, 960, 854, 640, 540, 480}; - // Generally there is zero or one pending output stream offset. We track more offsets to allow for - // pending output streams that have fewer frames than the codec latency. - private static final int MAX_PENDING_OUTPUT_STREAM_OFFSET_COUNT = 10; /** * Scale factor for the initial maximum input size used to configure the codec in non-adaptive * playbacks. See {@link #getCodecMaxValues(MediaCodecInfo, Format, Format[])}. @@ -99,71 +116,50 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer { /** Magic frame render timestamp that indicates the EOS in tunneling mode. */ private static final long TUNNELING_EOS_PRESENTATION_TIME_US = Long.MAX_VALUE; - /** A {@link DecoderException} with additional surface information. */ - public static final class VideoDecoderException extends DecoderException { - - /** The {@link System#identityHashCode(Object)} of the surface when the exception occurred. */ - public final int surfaceIdentityHashCode; - - /** Whether the surface was valid when the exception occurred. */ - public final boolean isSurfaceValid; - - public VideoDecoderException( - Throwable cause, @Nullable MediaCodecInfo codecInfo, @Nullable Surface surface) { - super(cause, codecInfo); - surfaceIdentityHashCode = System.identityHashCode(surface); - isSurfaceValid = surface == null || surface.isValid(); - } - } + /** The minimum input buffer size for HEVC. */ + private static final int HEVC_MAX_INPUT_SIZE_THRESHOLD = 2 * 1024 * 1024; private static boolean evaluatedDeviceNeedsSetOutputSurfaceWorkaround; private static boolean deviceNeedsSetOutputSurfaceWorkaround; private final Context context; - private final VideoFrameReleaseTimeHelper frameReleaseTimeHelper; + private final VideoFrameReleaseHelper frameReleaseHelper; private final EventDispatcher eventDispatcher; private final long allowedJoiningTimeMs; private final int maxDroppedFramesToNotify; private final boolean deviceNeedsNoPostProcessWorkaround; - private final long[] pendingOutputStreamOffsetsUs; - private final long[] pendingOutputStreamSwitchTimesUs; private CodecMaxValues codecMaxValues; private boolean codecNeedsSetOutputSurfaceWorkaround; private boolean codecHandlesHdr10PlusOutOfBandMetadata; - private Surface surface; - private Surface dummySurface; - @C.VideoScalingMode - private int scalingMode; - private boolean renderedFirstFrame; + @Nullable private Surface surface; + @Nullable private PlaceholderSurface placeholderSurface; + private boolean haveReportedFirstFrameRenderedForCurrentSurface; + private @C.VideoScalingMode int scalingMode; + private boolean renderedFirstFrameAfterReset; + private boolean mayRenderFirstFrameAfterEnableIfNotStarted; + private boolean renderedFirstFrameAfterEnable; private long initialPositionUs; private long joiningDeadlineMs; private long droppedFrameAccumulationStartTimeMs; private int droppedFrames; private int consecutiveDroppedFrameCount; private int buffersInCodecCount; - private long lastRenderTimeUs; + private long lastBufferPresentationTimeUs; + private long lastRenderRealtimeUs; + private long totalVideoFrameProcessingOffsetUs; + private int videoFrameProcessingOffsetCount; - private int pendingRotationDegrees; - private float pendingPixelWidthHeightRatio; - @Nullable private MediaFormat currentMediaFormat; private int currentWidth; private int currentHeight; private int currentUnappliedRotationDegrees; private float currentPixelWidthHeightRatio; - private int reportedWidth; - private int reportedHeight; - private int reportedUnappliedRotationDegrees; - private float reportedPixelWidthHeightRatio; + @Nullable private VideoSize reportedVideoSize; private boolean tunneling; private int tunnelingAudioSessionId; /* package */ @Nullable OnFrameRenderedListenerV23 tunnelingOnFrameRenderedListener; - - private long lastInputTimeUs; - private long outputStreamOffsetUs; - private int pendingOutputStreamOffsetCount; @Nullable private VideoFrameMetadataListener frameMetadataListener; /** @@ -180,15 +176,15 @@ public MediaCodecVideoRenderer(Context context, MediaCodecSelector mediaCodecSel * @param allowedJoiningTimeMs The maximum duration in milliseconds for which this video renderer * can attempt to seamlessly join an ongoing playback. */ - public MediaCodecVideoRenderer(Context context, MediaCodecSelector mediaCodecSelector, - long allowedJoiningTimeMs) { + public MediaCodecVideoRenderer( + Context context, MediaCodecSelector mediaCodecSelector, long allowedJoiningTimeMs) { this( context, mediaCodecSelector, allowedJoiningTimeMs, /* eventHandler= */ null, /* eventListener= */ null, - /* maxDroppedFramesToNotify= */ -1); + /* maxDroppedFramesToNotify= */ 0); } /** @@ -202,7 +198,6 @@ public MediaCodecVideoRenderer(Context context, MediaCodecSelector mediaCodecSel * @param maxDroppedFramesToNotify The maximum number of frames that can be dropped between * invocations of {@link VideoRendererEventListener#onDroppedFrames(int, long)}. */ - @SuppressWarnings("deprecation") public MediaCodecVideoRenderer( Context context, MediaCodecSelector mediaCodecSelector, @@ -212,13 +207,14 @@ public MediaCodecVideoRenderer( int maxDroppedFramesToNotify) { this( context, + MediaCodecAdapter.Factory.DEFAULT, mediaCodecSelector, allowedJoiningTimeMs, - /* drmSessionManager= */ null, - /* playClearSamplesWithoutKeys= */ false, + /* enableDecoderFallback= */ false, eventHandler, eventListener, - maxDroppedFramesToNotify); + maxDroppedFramesToNotify, + /* assumedMinimumCodecOperatingRate= */ 30); } /** @@ -226,47 +222,39 @@ public MediaCodecVideoRenderer( * @param mediaCodecSelector A decoder selector. * @param allowedJoiningTimeMs The maximum duration in milliseconds for which this video renderer * can attempt to seamlessly join an ongoing playback. - * @param drmSessionManager For use with encrypted content. May be null if support for encrypted - * content is not required. - * @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions. - * For example a media file may start with a short clear region so as to allow playback to - * begin in parallel with key acquisition. This parameter specifies whether the renderer is - * permitted to play clear regions of encrypted media files before {@code drmSessionManager} - * has obtained the keys necessary to decrypt encrypted regions of the media. + * @param enableDecoderFallback Whether to enable fallback to lower-priority decoders if decoder + * initialization fails. This may result in using a decoder that is slower/less efficient than + * the primary decoder. * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be * null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required. * @param maxDroppedFramesToNotify The maximum number of frames that can be dropped between * invocations of {@link VideoRendererEventListener#onDroppedFrames(int, long)}. - * @deprecated Use {@link #MediaCodecVideoRenderer(Context, MediaCodecSelector, long, boolean, - * Handler, VideoRendererEventListener, int)} instead, and pass DRM-related parameters to the - * {@link MediaSource} factories. */ - @Deprecated - @SuppressWarnings("deprecation") public MediaCodecVideoRenderer( Context context, MediaCodecSelector mediaCodecSelector, long allowedJoiningTimeMs, - @Nullable DrmSessionManager drmSessionManager, - boolean playClearSamplesWithoutKeys, + boolean enableDecoderFallback, @Nullable Handler eventHandler, @Nullable VideoRendererEventListener eventListener, int maxDroppedFramesToNotify) { this( context, + MediaCodecAdapter.Factory.DEFAULT, mediaCodecSelector, allowedJoiningTimeMs, - drmSessionManager, - playClearSamplesWithoutKeys, - /* enableDecoderFallback= */ false, + enableDecoderFallback, eventHandler, eventListener, - maxDroppedFramesToNotify); + maxDroppedFramesToNotify, + /* assumedMinimumCodecOperatingRate= */ 30); } /** * @param context A context. + * @param codecAdapterFactory The {@link MediaCodecAdapter.Factory} used to create {@link + * MediaCodecAdapter} instances. * @param mediaCodecSelector A decoder selector. * @param allowedJoiningTimeMs The maximum duration in milliseconds for which this video renderer * can attempt to seamlessly join an ongoing playback. @@ -279,39 +267,37 @@ public MediaCodecVideoRenderer( * @param maxDroppedFramesToNotify The maximum number of frames that can be dropped between * invocations of {@link VideoRendererEventListener#onDroppedFrames(int, long)}. */ - @SuppressWarnings("deprecation") public MediaCodecVideoRenderer( Context context, + MediaCodecAdapter.Factory codecAdapterFactory, MediaCodecSelector mediaCodecSelector, long allowedJoiningTimeMs, boolean enableDecoderFallback, @Nullable Handler eventHandler, @Nullable VideoRendererEventListener eventListener, int maxDroppedFramesToNotify) { + this( context, + codecAdapterFactory, mediaCodecSelector, allowedJoiningTimeMs, - /* drmSessionManager= */ null, - /* playClearSamplesWithoutKeys= */ false, enableDecoderFallback, eventHandler, eventListener, - maxDroppedFramesToNotify); + maxDroppedFramesToNotify, + /* assumedMinimumCodecOperatingRate= */ 30); } /** + * Creates a new instance. + * * @param context A context. + * @param codecAdapterFactory The {@link MediaCodecAdapter.Factory} used to create {@link + * MediaCodecAdapter} instances. * @param mediaCodecSelector A decoder selector. * @param allowedJoiningTimeMs The maximum duration in milliseconds for which this video renderer * can attempt to seamlessly join an ongoing playback. - * @param drmSessionManager For use with encrypted content. May be null if support for encrypted - * content is not required. - * @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions. - * For example a media file may start with a short clear region so as to allow playback to - * begin in parallel with key acquisition. This parameter specifies whether the renderer is - * permitted to play clear regions of encrypted media files before {@code drmSessionManager} - * has obtained the keys necessary to decrypt encrypted regions of the media. * @param enableDecoderFallback Whether to enable fallback to lower-priority decoders if decoder * initialization fails. This may result in using a decoder that is slower/less efficient than * the primary decoder. @@ -320,63 +306,59 @@ public MediaCodecVideoRenderer( * @param eventListener A listener of events. May be null if delivery of events is not required. * @param maxDroppedFramesToNotify The maximum number of frames that can be dropped between * invocations of {@link VideoRendererEventListener#onDroppedFrames(int, long)}. - * @deprecated Use {@link #MediaCodecVideoRenderer(Context, MediaCodecSelector, long, boolean, - * Handler, VideoRendererEventListener, int)} instead, and pass DRM-related parameters to the - * {@link MediaSource} factories. + * @param assumedMinimumCodecOperatingRate A codec operating rate that all codecs instantiated by + * this renderer are assumed to meet implicitly (i.e. without the operating rate being set + * explicitly using {@link MediaFormat#KEY_OPERATING_RATE}). */ - @Deprecated public MediaCodecVideoRenderer( Context context, + MediaCodecAdapter.Factory codecAdapterFactory, MediaCodecSelector mediaCodecSelector, long allowedJoiningTimeMs, - @Nullable DrmSessionManager drmSessionManager, - boolean playClearSamplesWithoutKeys, boolean enableDecoderFallback, @Nullable Handler eventHandler, @Nullable VideoRendererEventListener eventListener, - int maxDroppedFramesToNotify) { + int maxDroppedFramesToNotify, + float assumedMinimumCodecOperatingRate) { super( C.TRACK_TYPE_VIDEO, + codecAdapterFactory, mediaCodecSelector, - drmSessionManager, - playClearSamplesWithoutKeys, enableDecoderFallback, - /* assumedMinimumCodecOperatingRate= */ 30); + assumedMinimumCodecOperatingRate); this.allowedJoiningTimeMs = allowedJoiningTimeMs; this.maxDroppedFramesToNotify = maxDroppedFramesToNotify; this.context = context.getApplicationContext(); - frameReleaseTimeHelper = new VideoFrameReleaseTimeHelper(this.context); + frameReleaseHelper = new VideoFrameReleaseHelper(this.context); eventDispatcher = new EventDispatcher(eventHandler, eventListener); deviceNeedsNoPostProcessWorkaround = deviceNeedsNoPostProcessWorkaround(); - pendingOutputStreamOffsetsUs = new long[MAX_PENDING_OUTPUT_STREAM_OFFSET_COUNT]; - pendingOutputStreamSwitchTimesUs = new long[MAX_PENDING_OUTPUT_STREAM_OFFSET_COUNT]; - outputStreamOffsetUs = C.TIME_UNSET; - lastInputTimeUs = C.TIME_UNSET; joiningDeadlineMs = C.TIME_UNSET; currentWidth = Format.NO_VALUE; currentHeight = Format.NO_VALUE; currentPixelWidthHeightRatio = Format.NO_VALUE; - pendingPixelWidthHeightRatio = Format.NO_VALUE; scalingMode = C.VIDEO_SCALING_MODE_DEFAULT; + tunnelingAudioSessionId = C.AUDIO_SESSION_ID_UNSET; clearReportedVideoSize(); } @Override - @Capabilities - protected int supportsFormat( - MediaCodecSelector mediaCodecSelector, - @Nullable DrmSessionManager drmSessionManager, - Format format) + public String getName() { + return TAG; + } + + @Override + protected @Capabilities int supportsFormat(MediaCodecSelector mediaCodecSelector, Format format) throws DecoderQueryException { String mimeType = format.sampleMimeType; if (!MimeTypes.isVideo(mimeType)) { - return RendererCapabilities.create(FORMAT_UNSUPPORTED_TYPE); + return RendererCapabilities.create(C.FORMAT_UNSUPPORTED_TYPE); } @Nullable DrmInitData drmInitData = format.drmInitData; // Assume encrypted content requires secure decoders. boolean requiresSecureDecryption = drmInitData != null; List decoderInfos = getDecoderInfos( + context, mediaCodecSelector, format, requiresSecureDecryption, @@ -385,59 +367,111 @@ protected int supportsFormat( // No secure decoders are available. Fall back to non-secure decoders. decoderInfos = getDecoderInfos( + context, mediaCodecSelector, format, /* requiresSecureDecoder= */ false, /* requiresTunnelingDecoder= */ false); } if (decoderInfos.isEmpty()) { - return RendererCapabilities.create(FORMAT_UNSUPPORTED_SUBTYPE); + return RendererCapabilities.create(C.FORMAT_UNSUPPORTED_SUBTYPE); } - boolean supportsFormatDrm = - drmInitData == null - || FrameworkMediaCrypto.class.equals(format.exoMediaCryptoType) - || (format.exoMediaCryptoType == null - && supportsFormatDrm(drmSessionManager, drmInitData)); - if (!supportsFormatDrm) { - return RendererCapabilities.create(FORMAT_UNSUPPORTED_DRM); + if (!supportsFormatDrm(format)) { + return RendererCapabilities.create(C.FORMAT_UNSUPPORTED_DRM); } - // Check capabilities for the first decoder in the list, which takes priority. + // Check whether the first decoder supports the format. This is the preferred decoder for the + // format's MIME type, according to the MediaCodecSelector. MediaCodecInfo decoderInfo = decoderInfos.get(0); boolean isFormatSupported = decoderInfo.isFormatSupported(format); + boolean isPreferredDecoder = true; + if (!isFormatSupported) { + // Check whether any of the other decoders support the format. + for (int i = 1; i < decoderInfos.size(); i++) { + MediaCodecInfo otherDecoderInfo = decoderInfos.get(i); + if (otherDecoderInfo.isFormatSupported(format)) { + decoderInfo = otherDecoderInfo; + isFormatSupported = true; + isPreferredDecoder = false; + break; + } + } + } + @C.FormatSupport + int formatSupport = isFormatSupported ? C.FORMAT_HANDLED : C.FORMAT_EXCEEDS_CAPABILITIES; @AdaptiveSupport int adaptiveSupport = decoderInfo.isSeamlessAdaptationSupported(format) ? ADAPTIVE_SEAMLESS : ADAPTIVE_NOT_SEAMLESS; + @HardwareAccelerationSupport + int hardwareAccelerationSupport = + decoderInfo.hardwareAccelerated + ? HARDWARE_ACCELERATION_SUPPORTED + : HARDWARE_ACCELERATION_NOT_SUPPORTED; + @DecoderSupport + int decoderSupport = isPreferredDecoder ? DECODER_SUPPORT_PRIMARY : DECODER_SUPPORT_FALLBACK; + + if (Util.SDK_INT >= 26 + && MimeTypes.VIDEO_DOLBY_VISION.equals(format.sampleMimeType) + && !Api26.doesDisplaySupportDolbyVision(context)) { + decoderSupport = DECODER_SUPPORT_FALLBACK_MIMETYPE; + } + @TunnelingSupport int tunnelingSupport = TUNNELING_NOT_SUPPORTED; if (isFormatSupported) { List tunnelingDecoderInfos = getDecoderInfos( + context, mediaCodecSelector, format, requiresSecureDecryption, /* requiresTunnelingDecoder= */ true); if (!tunnelingDecoderInfos.isEmpty()) { - MediaCodecInfo tunnelingDecoderInfo = tunnelingDecoderInfos.get(0); + MediaCodecInfo tunnelingDecoderInfo = + MediaCodecUtil.getDecoderInfosSortedByFormatSupport(tunnelingDecoderInfos, format) + .get(0); if (tunnelingDecoderInfo.isFormatSupported(format) && tunnelingDecoderInfo.isSeamlessAdaptationSupported(format)) { tunnelingSupport = TUNNELING_SUPPORTED; } } } - @FormatSupport - int formatSupport = isFormatSupported ? FORMAT_HANDLED : FORMAT_EXCEEDS_CAPABILITIES; - return RendererCapabilities.create(formatSupport, adaptiveSupport, tunnelingSupport); + + return RendererCapabilities.create( + formatSupport, + adaptiveSupport, + tunnelingSupport, + hardwareAccelerationSupport, + decoderSupport); } @Override protected List getDecoderInfos( MediaCodecSelector mediaCodecSelector, Format format, boolean requiresSecureDecoder) throws DecoderQueryException { - return getDecoderInfos(mediaCodecSelector, format, requiresSecureDecoder, tunneling); + return MediaCodecUtil.getDecoderInfosSortedByFormatSupport( + getDecoderInfos(context, mediaCodecSelector, format, requiresSecureDecoder, tunneling), + format); } + /** + * Returns a list of decoders that can decode media in the specified format, in the priority order + * specified by the {@link MediaCodecSelector}. Note that since the {@link MediaCodecSelector} + * only has access to {@link Format#sampleMimeType}, the list is not ordered to account for + * whether each decoder supports the details of the format (e.g., taking into account the format's + * profile, level, resolution and so on). {@link + * MediaCodecUtil#getDecoderInfosSortedByFormatSupport} can be used to further sort the list into + * an order where decoders that fully support the format come first. + * + * @param mediaCodecSelector The decoder selector. + * @param format The {@link Format} for which a decoder is required. + * @param requiresSecureDecoder Whether a secure decoder is required. + * @param requiresTunnelingDecoder Whether a tunneling decoder is required. + * @return A list of {@link MediaCodecInfo}s corresponding to decoders. May be empty. + * @throws DecoderQueryException Thrown if there was an error querying decoders. + */ private static List getDecoderInfos( + Context context, MediaCodecSelector mediaCodecSelector, Format format, boolean requiresSecureDecoder, @@ -445,74 +479,75 @@ private static List getDecoderInfos( throws DecoderQueryException { @Nullable String mimeType = format.sampleMimeType; if (mimeType == null) { - return Collections.emptyList(); + return ImmutableList.of(); } List decoderInfos = mediaCodecSelector.getDecoderInfos( mimeType, requiresSecureDecoder, requiresTunnelingDecoder); - decoderInfos = MediaCodecUtil.getDecoderInfosSortedByFormatSupport(decoderInfos, format); - if (MimeTypes.VIDEO_DOLBY_VISION.equals(mimeType)) { - // Fall back to H.264/AVC or H.265/HEVC for the relevant DV profiles. - @Nullable - Pair codecProfileAndLevel = MediaCodecUtil.getCodecProfileAndLevel(format); - if (codecProfileAndLevel != null) { - int profile = codecProfileAndLevel.first; - if (profile == CodecProfileLevel.DolbyVisionProfileDvheDtr - || profile == CodecProfileLevel.DolbyVisionProfileDvheSt) { - decoderInfos.addAll( - mediaCodecSelector.getDecoderInfos( - MimeTypes.VIDEO_H265, requiresSecureDecoder, requiresTunnelingDecoder)); - } else if (profile == CodecProfileLevel.DolbyVisionProfileDvavSe) { - decoderInfos.addAll( - mediaCodecSelector.getDecoderInfos( - MimeTypes.VIDEO_H264, requiresSecureDecoder, requiresTunnelingDecoder)); + @Nullable String alternativeMimeType = MediaCodecUtil.getAlternativeCodecMimeType(format); + if (alternativeMimeType == null) { + return ImmutableList.copyOf(decoderInfos); + } + List alternativeDecoderInfos = + mediaCodecSelector.getDecoderInfos( + alternativeMimeType, requiresSecureDecoder, requiresTunnelingDecoder); + if (Util.SDK_INT >= 26 + && MimeTypes.VIDEO_DOLBY_VISION.equals(format.sampleMimeType) + && !alternativeDecoderInfos.isEmpty() + && !Api26.doesDisplaySupportDolbyVision(context)) { + return ImmutableList.copyOf(alternativeDecoderInfos); + } + return ImmutableList.builder() + .addAll(decoderInfos) + .addAll(alternativeDecoderInfos) + .build(); + } + + @RequiresApi(26) + private static final class Api26 { + @DoNotInline + public static boolean doesDisplaySupportDolbyVision(Context context) { + boolean supportsDolbyVision = false; + DisplayManager displayManager = + (DisplayManager) context.getSystemService(Context.DISPLAY_SERVICE); + Display display = + (displayManager != null) ? displayManager.getDisplay(DEFAULT_DISPLAY) : null; + if (display != null && display.isHdr()) { + int[] supportedHdrTypes = display.getHdrCapabilities().getSupportedHdrTypes(); + for (int hdrType : supportedHdrTypes) { + if (hdrType == Display.HdrCapabilities.HDR_TYPE_DOLBY_VISION) { + supportsDolbyVision = true; + break; + } } } + return supportsDolbyVision; } - return Collections.unmodifiableList(decoderInfos); } @Override - protected void onEnabled(boolean joining) throws ExoPlaybackException { - super.onEnabled(joining); - int oldTunnelingAudioSessionId = tunnelingAudioSessionId; - tunnelingAudioSessionId = getConfiguration().tunnelingAudioSessionId; - tunneling = tunnelingAudioSessionId != C.AUDIO_SESSION_ID_UNSET; - if (tunnelingAudioSessionId != oldTunnelingAudioSessionId) { + protected void onEnabled(boolean joining, boolean mayRenderStartOfStream) + throws ExoPlaybackException { + super.onEnabled(joining, mayRenderStartOfStream); + boolean tunneling = getConfiguration().tunneling; + checkState(!tunneling || tunnelingAudioSessionId != C.AUDIO_SESSION_ID_UNSET); + if (this.tunneling != tunneling) { + this.tunneling = tunneling; releaseCodec(); } eventDispatcher.enabled(decoderCounters); - frameReleaseTimeHelper.enable(); - } - - @Override - protected void onStreamChanged(Format[] formats, long offsetUs) throws ExoPlaybackException { - if (outputStreamOffsetUs == C.TIME_UNSET) { - outputStreamOffsetUs = offsetUs; - } else { - if (pendingOutputStreamOffsetCount == pendingOutputStreamOffsetsUs.length) { - Log.w(TAG, "Too many stream changes, so dropping offset: " - + pendingOutputStreamOffsetsUs[pendingOutputStreamOffsetCount - 1]); - } else { - pendingOutputStreamOffsetCount++; - } - pendingOutputStreamOffsetsUs[pendingOutputStreamOffsetCount - 1] = offsetUs; - pendingOutputStreamSwitchTimesUs[pendingOutputStreamOffsetCount - 1] = lastInputTimeUs; - } - super.onStreamChanged(formats, offsetUs); + mayRenderFirstFrameAfterEnableIfNotStarted = mayRenderStartOfStream; + renderedFirstFrameAfterEnable = false; } @Override protected void onPositionReset(long positionUs, boolean joining) throws ExoPlaybackException { super.onPositionReset(positionUs, joining); clearRenderedFirstFrame(); + frameReleaseHelper.onPositionReset(); + lastBufferPresentationTimeUs = C.TIME_UNSET; initialPositionUs = C.TIME_UNSET; consecutiveDroppedFrameCount = 0; - lastInputTimeUs = C.TIME_UNSET; - if (pendingOutputStreamOffsetCount != 0) { - outputStreamOffsetUs = pendingOutputStreamOffsetsUs[pendingOutputStreamOffsetCount - 1]; - pendingOutputStreamOffsetCount = 0; - } if (joining) { setJoiningDeadlineMs(); } else { @@ -522,8 +557,11 @@ protected void onPositionReset(long positionUs, boolean joining) throws ExoPlayb @Override public boolean isReady() { - if (super.isReady() && (renderedFirstFrame || (dummySurface != null && surface == dummySurface) - || getCodec() == null || tunneling)) { + if (super.isReady() + && (renderedFirstFrameAfterReset + || (placeholderSurface != null && surface == placeholderSurface) + || getCodec() == null + || tunneling)) { // Ready. If we were joining then we've now joined, so clear the joining deadline. joiningDeadlineMs = C.TIME_UNSET; return true; @@ -545,25 +583,26 @@ protected void onStarted() { super.onStarted(); droppedFrames = 0; droppedFrameAccumulationStartTimeMs = SystemClock.elapsedRealtime(); - lastRenderTimeUs = SystemClock.elapsedRealtime() * 1000; + lastRenderRealtimeUs = SystemClock.elapsedRealtime() * 1000; + totalVideoFrameProcessingOffsetUs = 0; + videoFrameProcessingOffsetCount = 0; + frameReleaseHelper.onStarted(); } @Override protected void onStopped() { joiningDeadlineMs = C.TIME_UNSET; maybeNotifyDroppedFrames(); + maybeNotifyVideoFrameProcessingOffset(); + frameReleaseHelper.onStopped(); super.onStopped(); } @Override protected void onDisabled() { - lastInputTimeUs = C.TIME_UNSET; - outputStreamOffsetUs = C.TIME_UNSET; - pendingOutputStreamOffsetCount = 0; - currentMediaFormat = null; clearReportedVideoSize(); clearRenderedFirstFrame(); - frameReleaseTimeHelper.disable(); + haveReportedFirstFrameRenderedForCurrentSurface = false; tunnelingOnFrameRenderedListener = null; try { super.onDisabled(); @@ -572,56 +611,83 @@ protected void onDisabled() { } } + @TargetApi(17) // Needed for placeholderSurface usage, as it is always null on API level 16. @Override protected void onReset() { try { super.onReset(); } finally { - if (dummySurface != null) { - if (surface == dummySurface) { - surface = null; - } - dummySurface.release(); - dummySurface = null; + if (placeholderSurface != null) { + releasePlaceholderSurface(); } } } @Override - public void handleMessage(int messageType, @Nullable Object message) throws ExoPlaybackException { - if (messageType == C.MSG_SET_SURFACE) { - setSurface((Surface) message); - } else if (messageType == C.MSG_SET_SCALING_MODE) { - scalingMode = (Integer) message; - MediaCodec codec = getCodec(); - if (codec != null) { - codec.setVideoScalingMode(scalingMode); - } - } else if (messageType == C.MSG_SET_VIDEO_FRAME_METADATA_LISTENER) { - frameMetadataListener = (VideoFrameMetadataListener) message; - } else { - super.handleMessage(messageType, message); + public void handleMessage(@MessageType int messageType, @Nullable Object message) + throws ExoPlaybackException { + switch (messageType) { + case MSG_SET_VIDEO_OUTPUT: + setOutput(message); + break; + case MSG_SET_SCALING_MODE: + scalingMode = (Integer) message; + @Nullable MediaCodecAdapter codec = getCodec(); + if (codec != null) { + codec.setVideoScalingMode(scalingMode); + } + break; + case MSG_SET_CHANGE_FRAME_RATE_STRATEGY: + frameReleaseHelper.setChangeFrameRateStrategy((int) message); + break; + case MSG_SET_VIDEO_FRAME_METADATA_LISTENER: + frameMetadataListener = (VideoFrameMetadataListener) message; + break; + case MSG_SET_AUDIO_SESSION_ID: + int tunnelingAudioSessionId = (int) message; + if (this.tunnelingAudioSessionId != tunnelingAudioSessionId) { + this.tunnelingAudioSessionId = tunnelingAudioSessionId; + if (tunneling) { + releaseCodec(); + } + } + break; + case MSG_SET_AUDIO_ATTRIBUTES: + case MSG_SET_AUX_EFFECT_INFO: + case MSG_SET_CAMERA_MOTION_LISTENER: + case MSG_SET_SKIP_SILENCE_ENABLED: + case MSG_SET_VOLUME: + case MSG_SET_WAKEUP_LISTENER: + default: + super.handleMessage(messageType, message); } } - private void setSurface(Surface surface) throws ExoPlaybackException { + private void setOutput(@Nullable Object output) throws ExoPlaybackException { + // Handle unsupported (i.e., non-Surface) outputs by clearing the surface. + @Nullable Surface surface = output instanceof Surface ? (Surface) output : null; + if (surface == null) { - // Use a dummy surface if possible. - if (dummySurface != null) { - surface = dummySurface; + // Use a placeholder surface if possible. + if (placeholderSurface != null) { + surface = placeholderSurface; } else { MediaCodecInfo codecInfo = getCodecInfo(); - if (codecInfo != null && shouldUseDummySurface(codecInfo)) { - dummySurface = DummySurface.newInstanceV17(context, codecInfo.secure); - surface = dummySurface; + if (codecInfo != null && shouldUsePlaceholderSurface(codecInfo)) { + placeholderSurface = PlaceholderSurface.newInstanceV17(context, codecInfo.secure); + surface = placeholderSurface; } } } + // We only need to update the codec if the surface has changed. if (this.surface != surface) { this.surface = surface; + frameReleaseHelper.onSurfaceChanged(surface); + haveReportedFirstFrameRenderedForCurrentSurface = false; + @State int state = getState(); - MediaCodec codec = getCodec(); + @Nullable MediaCodecAdapter codec = getCodec(); if (codec != null) { if (Util.SDK_INT >= 23 && surface != null && !codecNeedsSetOutputSurfaceWorkaround) { try { @@ -631,10 +697,10 @@ private void setSurface(Surface surface) throws ExoPlaybackException { } } else { releaseCodec(); - maybeInitCodec(); + maybeInitCodecOrBypass(); } } - if (surface != null && surface != dummySurface) { + if (surface != null && surface != placeholderSurface) { // If we know the video size, report it again immediately. maybeRenotifyVideoSizeChanged(); // We haven't rendered to the new surface yet. @@ -647,7 +713,7 @@ private void setSurface(Surface surface) throws ExoPlaybackException { clearReportedVideoSize(); clearRenderedFirstFrame(); } - } else if (surface != null && surface != dummySurface) { + } else if (surface != null && surface != placeholderSurface) { // The surface is set and unchanged. If we know the video size and/or have already rendered to // the surface, report these again immediately. maybeRenotifyVideoSizeChanged(); @@ -657,7 +723,7 @@ private void setSurface(Surface surface) throws ExoPlaybackException { @Override protected boolean shouldInitCodec(MediaCodecInfo codecInfo) { - return surface != null || shouldUseDummySurface(codecInfo); + return surface != null || shouldUsePlaceholderSurface(codecInfo); } @Override @@ -666,13 +732,17 @@ protected boolean getCodecNeedsEosPropagation() { return tunneling && Util.SDK_INT < 23; } + @TargetApi(17) // Needed for placeHolderSurface usage, as it is always null on API level 16. @Override - protected void configureCodec( + protected MediaCodecAdapter.Configuration getMediaCodecConfiguration( MediaCodecInfo codecInfo, - MediaCodec codec, Format format, @Nullable MediaCrypto crypto, float codecOperatingRate) { + if (placeholderSurface != null && placeholderSurface.secure != codecInfo.secure) { + // We can't re-use the current DummySurface instance with the new decoder. + releasePlaceholderSurface(); + } String codecMimeType = codecInfo.codecMimeType; codecMaxValues = getCodecMaxValues(codecInfo, format, getStreamFormats()); MediaFormat mediaFormat = @@ -682,102 +752,192 @@ protected void configureCodec( codecMaxValues, codecOperatingRate, deviceNeedsNoPostProcessWorkaround, - tunnelingAudioSessionId); + tunneling ? tunnelingAudioSessionId : C.AUDIO_SESSION_ID_UNSET); if (surface == null) { - Assertions.checkState(shouldUseDummySurface(codecInfo)); - if (dummySurface == null) { - dummySurface = DummySurface.newInstanceV17(context, codecInfo.secure); + if (!shouldUsePlaceholderSurface(codecInfo)) { + throw new IllegalStateException(); } - surface = dummySurface; - } - codec.configure(mediaFormat, surface, crypto, 0); - if (Util.SDK_INT >= 23 && tunneling) { - tunnelingOnFrameRenderedListener = new OnFrameRenderedListenerV23(codec); + if (placeholderSurface == null) { + placeholderSurface = PlaceholderSurface.newInstanceV17(context, codecInfo.secure); + } + surface = placeholderSurface; } + return MediaCodecAdapter.Configuration.createForVideoDecoding( + codecInfo, mediaFormat, format, surface, crypto); } @Override - protected @KeepCodecResult int canKeepCodec( - MediaCodec codec, MediaCodecInfo codecInfo, Format oldFormat, Format newFormat) { - if (codecInfo.isSeamlessAdaptationSupported( - oldFormat, newFormat, /* isNewFormatComplete= */ true) - && newFormat.width <= codecMaxValues.width - && newFormat.height <= codecMaxValues.height - && getMaxInputSize(codecInfo, newFormat) <= codecMaxValues.inputSize) { - return oldFormat.initializationDataEquals(newFormat) - ? KEEP_CODEC_RESULT_YES_WITHOUT_RECONFIGURATION - : KEEP_CODEC_RESULT_YES_WITH_RECONFIGURATION; + protected DecoderReuseEvaluation canReuseCodec( + MediaCodecInfo codecInfo, Format oldFormat, Format newFormat) { + DecoderReuseEvaluation evaluation = codecInfo.canReuseCodec(oldFormat, newFormat); + + @DecoderDiscardReasons int discardReasons = evaluation.discardReasons; + if (newFormat.width > codecMaxValues.width || newFormat.height > codecMaxValues.height) { + discardReasons |= DISCARD_REASON_VIDEO_MAX_RESOLUTION_EXCEEDED; + } + if (getMaxInputSize(codecInfo, newFormat) > codecMaxValues.inputSize) { + discardReasons |= DISCARD_REASON_MAX_INPUT_SIZE_EXCEEDED; } - return KEEP_CODEC_RESULT_NO; + + return new DecoderReuseEvaluation( + codecInfo.name, + oldFormat, + newFormat, + discardReasons != 0 ? REUSE_RESULT_NO : evaluation.result, + discardReasons); } @CallSuper @Override - protected void releaseCodec() { - try { - super.releaseCodec(); - } finally { - buffersInCodecCount = 0; - } + protected void resetCodecStateForFlush() { + super.resetCodecStateForFlush(); + buffersInCodecCount = 0; } - @CallSuper @Override - protected boolean flushOrReleaseCodec() { - try { - return super.flushOrReleaseCodec(); - } finally { - buffersInCodecCount = 0; + public void setPlaybackSpeed(float currentPlaybackSpeed, float targetPlaybackSpeed) + throws ExoPlaybackException { + super.setPlaybackSpeed(currentPlaybackSpeed, targetPlaybackSpeed); + frameReleaseHelper.onPlaybackSpeed(currentPlaybackSpeed); + } + + /** + * Returns a maximum input size for a given codec and format. + * + * @param codecInfo Information about the {@link MediaCodec} being configured. + * @param format The format. + * @return A maximum input size in bytes, or {@link Format#NO_VALUE} if a maximum could not be + * determined. + */ + public static int getCodecMaxInputSize(MediaCodecInfo codecInfo, Format format) { + int width = format.width; + int height = format.height; + if (width == Format.NO_VALUE || height == Format.NO_VALUE) { + // We can't infer a maximum input size without video dimensions. + return Format.NO_VALUE; + } + + String sampleMimeType = format.sampleMimeType; + if (MimeTypes.VIDEO_DOLBY_VISION.equals(sampleMimeType)) { + // Dolby vision can be a wrapper around H264 or H265. We assume it's wrapping H265 by default + // because it's the common case, and because some devices may fail to allocate the codec when + // the larger buffer size required for H264 is requested. We size buffers for H264 only if the + // format contains sufficient information for us to determine unambiguously that it's a H264 + // profile. + sampleMimeType = MimeTypes.VIDEO_H265; + @Nullable + Pair codecProfileAndLevel = MediaCodecUtil.getCodecProfileAndLevel(format); + if (codecProfileAndLevel != null) { + int profile = codecProfileAndLevel.first; + if (profile == CodecProfileLevel.DolbyVisionProfileDvavSe + || profile == CodecProfileLevel.DolbyVisionProfileDvavPer + || profile == CodecProfileLevel.DolbyVisionProfileDvavPen) { + sampleMimeType = MimeTypes.VIDEO_H264; + } + } + } + + // Attempt to infer a maximum input size from the format. + switch (sampleMimeType) { + case MimeTypes.VIDEO_H263: + case MimeTypes.VIDEO_MP4V: + case MimeTypes.VIDEO_AV1: + // Assume a min compression of 2 similar to the platform's C2SoftAomDec.cpp. + case MimeTypes.VIDEO_VP8: + // Assume a min compression of 2 similar to the platform's SoftVPX.cpp. + return getMaxSampleSize(/* pixelCount= */ width * height, /* minCompressionRatio= */ 2); + case MimeTypes.VIDEO_H265: + // Assume a min compression of 2 similar to the platform's C2SoftHevcDec.cpp, but restrict + // the minimum size. + return max( + HEVC_MAX_INPUT_SIZE_THRESHOLD, + getMaxSampleSize(/* pixelCount= */ width * height, /* minCompressionRatio= */ 2)); + case MimeTypes.VIDEO_H264: + if ("BRAVIA 4K 2015".equals(Util.MODEL) // Sony Bravia 4K + || ("Amazon".equals(Util.MANUFACTURER) + && ("KFSOWI".equals(Util.MODEL) // Kindle Soho + || ("AFTS".equals(Util.MODEL) && codecInfo.secure)))) { // Fire TV Gen 2 + // Use the default value for cases where platform limitations may prevent buffers of the + // calculated maximum input size from being allocated. + return Format.NO_VALUE; + } + // Round up width/height to an integer number of macroblocks. + int maxPixels = Util.ceilDivide(width, 16) * Util.ceilDivide(height, 16) * 16 * 16; + return getMaxSampleSize(maxPixels, /* minCompressionRatio= */ 2); + case MimeTypes.VIDEO_VP9: + return getMaxSampleSize(/* pixelCount= */ width * height, /* minCompressionRatio= */ 4); + default: + // Leave the default max input size. + return Format.NO_VALUE; } } @Override protected float getCodecOperatingRateV23( - float operatingRate, Format format, Format[] streamFormats) { + float targetPlaybackSpeed, Format format, Format[] streamFormats) { // Use the highest known stream frame-rate up front, to avoid having to reconfigure the codec // should an adaptive switch to that stream occur. float maxFrameRate = -1; for (Format streamFormat : streamFormats) { float streamFrameRate = streamFormat.frameRate; if (streamFrameRate != Format.NO_VALUE) { - maxFrameRate = Math.max(maxFrameRate, streamFrameRate); + maxFrameRate = max(maxFrameRate, streamFrameRate); } } - return maxFrameRate == -1 ? CODEC_OPERATING_RATE_UNSET : (maxFrameRate * operatingRate); + return maxFrameRate == -1 ? CODEC_OPERATING_RATE_UNSET : (maxFrameRate * targetPlaybackSpeed); } @Override - protected void onCodecInitialized(String name, long initializedTimestampMs, + protected void onCodecInitialized( + String name, + MediaCodecAdapter.Configuration configuration, + long initializedTimestampMs, long initializationDurationMs) { eventDispatcher.decoderInitialized(name, initializedTimestampMs, initializationDurationMs); codecNeedsSetOutputSurfaceWorkaround = codecNeedsSetOutputSurfaceWorkaround(name); codecHandlesHdr10PlusOutOfBandMetadata = - Assertions.checkNotNull(getCodecInfo()).isHdr10PlusOutOfBandMetadataSupported(); + checkNotNull(getCodecInfo()).isHdr10PlusOutOfBandMetadataSupported(); + if (Util.SDK_INT >= 23 && tunneling) { + tunnelingOnFrameRenderedListener = new OnFrameRenderedListenerV23(checkNotNull(getCodec())); + } + } + + @Override + protected void onCodecReleased(String name) { + eventDispatcher.decoderReleased(name); } @Override - protected void onInputFormatChanged(FormatHolder formatHolder) throws ExoPlaybackException { - super.onInputFormatChanged(formatHolder); - Format newFormat = formatHolder.format; - eventDispatcher.inputFormatChanged(newFormat); - pendingPixelWidthHeightRatio = newFormat.pixelWidthHeightRatio; - pendingRotationDegrees = newFormat.rotationDegrees; + protected void onCodecError(Exception codecError) { + Log.e(TAG, "Video codec error", codecError); + eventDispatcher.videoCodecError(codecError); + } + + @Override + @Nullable + protected DecoderReuseEvaluation onInputFormatChanged(FormatHolder formatHolder) + throws ExoPlaybackException { + @Nullable DecoderReuseEvaluation evaluation = super.onInputFormatChanged(formatHolder); + eventDispatcher.inputFormatChanged(formatHolder.format, evaluation); + return evaluation; } /** * Called immediately before an input buffer is queued into the codec. * + *

      In tunneling mode for pre Marshmallow, the buffer is treated as if immediately output. + * * @param buffer The buffer to be queued. + * @throws ExoPlaybackException Thrown if an error occurs handling the input buffer. */ @CallSuper @Override - protected void onQueueInputBuffer(DecoderInputBuffer buffer) { + protected void onQueueInputBuffer(DecoderInputBuffer buffer) throws ExoPlaybackException { // In tunneling mode the device may do frame rate conversion, so in general we can't keep track // of the number of buffers in the codec. if (!tunneling) { buffersInCodecCount++; } - lastInputTimeUs = Math.max(buffer.timeUs, lastInputTimeUs); if (Util.SDK_INT < 23 && tunneling) { // In tunneled mode before API 23 we don't have a way to know when the buffer is output, so // treat it as if it were output immediately. @@ -786,35 +946,57 @@ protected void onQueueInputBuffer(DecoderInputBuffer buffer) { } @Override - protected void onOutputFormatChanged(MediaCodec codec, MediaFormat outputMediaFormat) { - currentMediaFormat = outputMediaFormat; - boolean hasCrop = - outputMediaFormat.containsKey(KEY_CROP_RIGHT) - && outputMediaFormat.containsKey(KEY_CROP_LEFT) - && outputMediaFormat.containsKey(KEY_CROP_BOTTOM) - && outputMediaFormat.containsKey(KEY_CROP_TOP); - int width = - hasCrop - ? outputMediaFormat.getInteger(KEY_CROP_RIGHT) - - outputMediaFormat.getInteger(KEY_CROP_LEFT) - + 1 - : outputMediaFormat.getInteger(MediaFormat.KEY_WIDTH); - int height = - hasCrop - ? outputMediaFormat.getInteger(KEY_CROP_BOTTOM) - - outputMediaFormat.getInteger(KEY_CROP_TOP) - + 1 - : outputMediaFormat.getInteger(MediaFormat.KEY_HEIGHT); - processOutputFormat(codec, width, height); + protected void onOutputFormatChanged(Format format, @Nullable MediaFormat mediaFormat) { + @Nullable MediaCodecAdapter codec = getCodec(); + if (codec != null) { + // Must be applied each time the output format changes. + codec.setVideoScalingMode(scalingMode); + } + if (tunneling) { + currentWidth = format.width; + currentHeight = format.height; + } else { + checkNotNull(mediaFormat); + boolean hasCrop = + mediaFormat.containsKey(KEY_CROP_RIGHT) + && mediaFormat.containsKey(KEY_CROP_LEFT) + && mediaFormat.containsKey(KEY_CROP_BOTTOM) + && mediaFormat.containsKey(KEY_CROP_TOP); + currentWidth = + hasCrop + ? mediaFormat.getInteger(KEY_CROP_RIGHT) - mediaFormat.getInteger(KEY_CROP_LEFT) + 1 + : mediaFormat.getInteger(MediaFormat.KEY_WIDTH); + currentHeight = + hasCrop + ? mediaFormat.getInteger(KEY_CROP_BOTTOM) - mediaFormat.getInteger(KEY_CROP_TOP) + 1 + : mediaFormat.getInteger(MediaFormat.KEY_HEIGHT); + } + currentPixelWidthHeightRatio = format.pixelWidthHeightRatio; + if (Util.SDK_INT >= 21) { + // On API level 21 and above the decoder applies the rotation when rendering to the surface. + // Hence currentUnappliedRotation should always be 0. For 90 and 270 degree rotations, we need + // to flip the width, height and pixel aspect ratio to reflect the rotation that was applied. + if (format.rotationDegrees == 90 || format.rotationDegrees == 270) { + int rotatedHeight = currentWidth; + currentWidth = currentHeight; + currentHeight = rotatedHeight; + currentPixelWidthHeightRatio = 1 / currentPixelWidthHeightRatio; + } + } else { + // On API level 20 and below the decoder does not apply the rotation. + currentUnappliedRotationDegrees = format.rotationDegrees; + } + frameReleaseHelper.onFormatChanged(format.frameRate); } @Override + @TargetApi(29) // codecHandlesHdr10PlusOutOfBandMetadata is false if Util.SDK_INT < 29 protected void handleInputBufferSupplementalData(DecoderInputBuffer buffer) throws ExoPlaybackException { if (!codecHandlesHdr10PlusOutOfBandMetadata) { return; } - ByteBuffer data = Assertions.checkNotNull(buffer.supplementalData); + ByteBuffer data = checkNotNull(buffer.supplementalData); if (data.remaining() >= 7) { // Check for HDR10+ out-of-band metadata. See User_data_registered_itu_t_t35 in ST 2094-40. byte ituTT35CountryCode = data.get(); @@ -827,13 +1009,12 @@ protected void handleInputBufferSupplementalData(DecoderInputBuffer buffer) && ituTT35TerminalProviderCode == 0x003C && ituTT35TerminalProviderOrientedCode == 0x0001 && applicationIdentifier == 4 - && applicationVersion == 0) { + && (applicationVersion == 0 || applicationVersion == 1)) { // The metadata size may vary so allocate a new array every time. This is not too // inefficient because the metadata is only a few tens of bytes. byte[] hdr10PlusInfo = new byte[data.remaining()]; data.get(hdr10PlusInfo); data.position(0); - // If codecHandlesHdr10PlusOutOfBandMetadata is true, this is an API 29 or later build. setHdr10PlusInfoV29(getCodec(), hdr10PlusInfo); } } @@ -843,19 +1024,28 @@ protected void handleInputBufferSupplementalData(DecoderInputBuffer buffer) protected boolean processOutputBuffer( long positionUs, long elapsedRealtimeUs, - MediaCodec codec, - ByteBuffer buffer, + @Nullable MediaCodecAdapter codec, + @Nullable ByteBuffer buffer, int bufferIndex, int bufferFlags, + int sampleCount, long bufferPresentationTimeUs, boolean isDecodeOnlyBuffer, boolean isLastBuffer, Format format) throws ExoPlaybackException { + checkNotNull(codec); // Can not render video without codec + if (initialPositionUs == C.TIME_UNSET) { initialPositionUs = positionUs; } + if (bufferPresentationTimeUs != lastBufferPresentationTimeUs) { + frameReleaseHelper.onNextFrame(bufferPresentationTimeUs); + this.lastBufferPresentationTimeUs = bufferPresentationTimeUs; + } + + long outputStreamOffsetUs = getOutputStreamOffsetUs(); long presentationTimeUs = bufferPresentationTimeUs - outputStreamOffsetUs; if (isDecodeOnlyBuffer && !isLastBuffer) { @@ -863,33 +1053,50 @@ protected boolean processOutputBuffer( return true; } - long earlyUs = bufferPresentationTimeUs - positionUs; - if (surface == dummySurface) { + // Note: Use of double rather than float is intentional for accuracy in the calculations below. + double playbackSpeed = getPlaybackSpeed(); + boolean isStarted = getState() == STATE_STARTED; + long elapsedRealtimeNowUs = SystemClock.elapsedRealtime() * 1000; + + // Calculate how early we are. In other words, the realtime duration that needs to elapse whilst + // the renderer is started before the frame should be rendered. A negative value means that + // we're already late. + long earlyUs = (long) ((bufferPresentationTimeUs - positionUs) / playbackSpeed); + if (isStarted) { + // Account for the elapsed time since the start of this iteration of the rendering loop. + earlyUs -= elapsedRealtimeNowUs - elapsedRealtimeUs; + } + + if (surface == placeholderSurface) { // Skip frames in sync with playback, so we'll be at the right frame if the mode changes. if (isBufferLate(earlyUs)) { skipOutputBuffer(codec, bufferIndex, presentationTimeUs); + updateVideoFrameProcessingOffsetCounters(earlyUs); return true; } return false; } - long elapsedRealtimeNowUs = SystemClock.elapsedRealtime() * 1000; - long elapsedSinceLastRenderUs = elapsedRealtimeNowUs - lastRenderTimeUs; - boolean isStarted = getState() == STATE_STARTED; + long elapsedSinceLastRenderUs = elapsedRealtimeNowUs - lastRenderRealtimeUs; + boolean shouldRenderFirstFrame = + !renderedFirstFrameAfterEnable + ? (isStarted || mayRenderFirstFrameAfterEnableIfNotStarted) + : !renderedFirstFrameAfterReset; // Don't force output until we joined and the position reached the current stream. boolean forceRenderOutputBuffer = joiningDeadlineMs == C.TIME_UNSET && positionUs >= outputStreamOffsetUs - && (!renderedFirstFrame + && (shouldRenderFirstFrame || (isStarted && shouldForceRenderOutputBuffer(earlyUs, elapsedSinceLastRenderUs))); if (forceRenderOutputBuffer) { long releaseTimeNs = System.nanoTime(); - notifyFrameMetadataListener(presentationTimeUs, releaseTimeNs, format, currentMediaFormat); + notifyFrameMetadataListener(presentationTimeUs, releaseTimeNs, format); if (Util.SDK_INT >= 21) { renderOutputBufferV21(codec, bufferIndex, presentationTimeUs, releaseTimeNs); } else { renderOutputBuffer(codec, bufferIndex, presentationTimeUs); } + updateVideoFrameProcessingOffsetCounters(earlyUs); return true; } @@ -897,24 +1104,17 @@ protected boolean processOutputBuffer( return false; } - // Fine-grained adjustment of earlyUs based on the elapsed time since the start of the current - // iteration of the rendering loop. - long elapsedSinceStartOfLoopUs = elapsedRealtimeNowUs - elapsedRealtimeUs; - earlyUs -= elapsedSinceStartOfLoopUs; - // Compute the buffer's desired release time in nanoseconds. long systemTimeNs = System.nanoTime(); long unadjustedFrameReleaseTimeNs = systemTimeNs + (earlyUs * 1000); // Apply a timestamp adjustment, if there is one. - long adjustedReleaseTimeNs = frameReleaseTimeHelper.adjustReleaseTime( - bufferPresentationTimeUs, unadjustedFrameReleaseTimeNs); + long adjustedReleaseTimeNs = frameReleaseHelper.adjustReleaseTime(unadjustedFrameReleaseTimeNs); earlyUs = (adjustedReleaseTimeNs - systemTimeNs) / 1000; boolean treatDroppedBuffersAsSkipped = joiningDeadlineMs != C.TIME_UNSET; if (shouldDropBuffersToKeyframe(earlyUs, elapsedRealtimeUs, isLastBuffer) - && maybeDropBuffersToKeyframe( - codec, bufferIndex, presentationTimeUs, positionUs, treatDroppedBuffersAsSkipped)) { + && maybeDropBuffersToKeyframe(positionUs, treatDroppedBuffersAsSkipped)) { return false; } else if (shouldDropOutputBuffer(earlyUs, elapsedRealtimeUs, isLastBuffer)) { if (treatDroppedBuffersAsSkipped) { @@ -922,15 +1122,16 @@ && maybeDropBuffersToKeyframe( } else { dropOutputBuffer(codec, bufferIndex, presentationTimeUs); } + updateVideoFrameProcessingOffsetCounters(earlyUs); return true; } if (Util.SDK_INT >= 21) { // Let the underlying framework time the release. if (earlyUs < 50000) { - notifyFrameMetadataListener( - presentationTimeUs, adjustedReleaseTimeNs, format, currentMediaFormat); + notifyFrameMetadataListener(presentationTimeUs, adjustedReleaseTimeNs, format); renderOutputBufferV21(codec, bufferIndex, presentationTimeUs, adjustedReleaseTimeNs); + updateVideoFrameProcessingOffsetCounters(earlyUs); return true; } } else { @@ -947,9 +1148,9 @@ && maybeDropBuffersToKeyframe( return false; } } - notifyFrameMetadataListener( - presentationTimeUs, adjustedReleaseTimeNs, format, currentMediaFormat); + notifyFrameMetadataListener(presentationTimeUs, adjustedReleaseTimeNs, format); renderOutputBuffer(codec, bufferIndex, presentationTimeUs); + updateVideoFrameProcessingOffsetCounters(earlyUs); return true; } } @@ -958,51 +1159,17 @@ && maybeDropBuffersToKeyframe( return false; } - private void processOutputFormat(MediaCodec codec, int width, int height) { - currentWidth = width; - currentHeight = height; - currentPixelWidthHeightRatio = pendingPixelWidthHeightRatio; - if (Util.SDK_INT >= 21) { - // On API level 21 and above the decoder applies the rotation when rendering to the surface. - // Hence currentUnappliedRotation should always be 0. For 90 and 270 degree rotations, we need - // to flip the width, height and pixel aspect ratio to reflect the rotation that was applied. - if (pendingRotationDegrees == 90 || pendingRotationDegrees == 270) { - int rotatedHeight = currentWidth; - currentWidth = currentHeight; - currentHeight = rotatedHeight; - currentPixelWidthHeightRatio = 1 / currentPixelWidthHeightRatio; - } - } else { - // On API level 20 and below the decoder does not apply the rotation. - currentUnappliedRotationDegrees = pendingRotationDegrees; - } - // Must be applied each time the output MediaFormat changes. - codec.setVideoScalingMode(scalingMode); - } - private void notifyFrameMetadataListener( - long presentationTimeUs, long releaseTimeNs, Format format, MediaFormat mediaFormat) { + long presentationTimeUs, long releaseTimeNs, Format format) { if (frameMetadataListener != null) { frameMetadataListener.onVideoFrameAboutToBeRendered( - presentationTimeUs, releaseTimeNs, format, mediaFormat); + presentationTimeUs, releaseTimeNs, format, getCodecOutputMediaFormat()); } } - /** - * Returns the offset that should be subtracted from {@code bufferPresentationTimeUs} in {@link - * #processOutputBuffer(long, long, MediaCodec, ByteBuffer, int, int, long, boolean, boolean, - * Format)} to get the playback position with respect to the media. - */ - protected long getOutputStreamOffsetUs() { - return outputStreamOffsetUs; - } - /** Called when a buffer was processed in tunneling mode. */ - protected void onProcessedTunneledBuffer(long presentationTimeUs) { - @Nullable Format format = updateOutputFormatForTime(presentationTimeUs); - if (format != null) { - processOutputFormat(getCodec(), format.width, format.height); - } + protected void onProcessedTunneledBuffer(long presentationTimeUs) throws ExoPlaybackException { + updateOutputFormatForTime(presentationTimeUs); maybeNotifyVideoSizeChanged(); decoderCounters.renderedOutputBufferCount++; maybeNotifyRenderedFirstFrame(); @@ -1014,35 +1181,19 @@ private void onProcessedTunneledEndOfStream() { setPendingOutputEndOfStream(); } - /** - * Called when an output buffer is successfully processed. - * - * @param presentationTimeUs The timestamp associated with the output buffer. - */ @CallSuper @Override protected void onProcessedOutputBuffer(long presentationTimeUs) { + super.onProcessedOutputBuffer(presentationTimeUs); if (!tunneling) { buffersInCodecCount--; } - while (pendingOutputStreamOffsetCount != 0 - && presentationTimeUs >= pendingOutputStreamSwitchTimesUs[0]) { - outputStreamOffsetUs = pendingOutputStreamOffsetsUs[0]; - pendingOutputStreamOffsetCount--; - System.arraycopy( - pendingOutputStreamOffsetsUs, - /* srcPos= */ 1, - pendingOutputStreamOffsetsUs, - /* destPos= */ 0, - pendingOutputStreamOffsetCount); - System.arraycopy( - pendingOutputStreamSwitchTimesUs, - /* srcPos= */ 1, - pendingOutputStreamSwitchTimesUs, - /* destPos= */ 0, - pendingOutputStreamOffsetCount); - clearRenderedFirstFrame(); - } + } + + @Override + protected void onProcessedStreamChange() { + super.onProcessedStreamChange(); + clearRenderedFirstFrame(); } /** @@ -1095,7 +1246,7 @@ protected boolean shouldForceRenderOutputBuffer(long earlyUs, long elapsedSinceL * @param index The index of the output buffer to skip. * @param presentationTimeUs The presentation time of the output buffer, in microseconds. */ - protected void skipOutputBuffer(MediaCodec codec, int index, long presentationTimeUs) { + protected void skipOutputBuffer(MediaCodecAdapter codec, int index, long presentationTimeUs) { TraceUtil.beginSection("skipVideoBuffer"); codec.releaseOutputBuffer(index, false); TraceUtil.endSection(); @@ -1109,11 +1260,12 @@ protected void skipOutputBuffer(MediaCodec codec, int index, long presentationTi * @param index The index of the output buffer to drop. * @param presentationTimeUs The presentation time of the output buffer, in microseconds. */ - protected void dropOutputBuffer(MediaCodec codec, int index, long presentationTimeUs) { + protected void dropOutputBuffer(MediaCodecAdapter codec, int index, long presentationTimeUs) { TraceUtil.beginSection("dropVideoBuffer"); codec.releaseOutputBuffer(index, false); TraceUtil.endSection(); - updateDroppedBufferCounters(1); + updateDroppedBufferCounters( + /* droppedInputBufferCount= */ 0, /* droppedDecoderBufferCount= */ 1); } /** @@ -1121,9 +1273,6 @@ protected void dropOutputBuffer(MediaCodec codec, int index, long presentationTi * position. If no such keyframe exists, as the playback position is inside the same group of * pictures as the buffer being processed, returns {@code false}. Returns {@code true} otherwise. * - * @param codec The codec that owns the output buffer. - * @param index The index of the output buffer to drop. - * @param presentationTimeUs The presentation time of the output buffer, in microseconds. * @param positionUs The current playback position, in microseconds. * @param treatDroppedBuffersAsSkipped Whether dropped buffers should be treated as intentionally * skipped. @@ -1131,46 +1280,58 @@ protected void dropOutputBuffer(MediaCodec codec, int index, long presentationTi * @throws ExoPlaybackException If an error occurs flushing the codec. */ protected boolean maybeDropBuffersToKeyframe( - MediaCodec codec, - int index, - long presentationTimeUs, - long positionUs, - boolean treatDroppedBuffersAsSkipped) - throws ExoPlaybackException { + long positionUs, boolean treatDroppedBuffersAsSkipped) throws ExoPlaybackException { int droppedSourceBufferCount = skipSource(positionUs); if (droppedSourceBufferCount == 0) { return false; } - decoderCounters.droppedToKeyframeCount++; // We dropped some buffers to catch up, so update the decoder counters and flush the codec, // which releases all pending buffers buffers including the current output buffer. - int totalDroppedBufferCount = buffersInCodecCount + droppedSourceBufferCount; if (treatDroppedBuffersAsSkipped) { - decoderCounters.skippedOutputBufferCount += totalDroppedBufferCount; + decoderCounters.skippedInputBufferCount += droppedSourceBufferCount; + decoderCounters.skippedOutputBufferCount += buffersInCodecCount; } else { - updateDroppedBufferCounters(totalDroppedBufferCount); + decoderCounters.droppedToKeyframeCount++; + updateDroppedBufferCounters( + droppedSourceBufferCount, /* droppedDecoderBufferCount= */ buffersInCodecCount); } flushOrReinitializeCodec(); return true; } /** - * Updates decoder counters to reflect that {@code droppedBufferCount} additional buffers were - * dropped. + * Updates local counters and {@link #decoderCounters} to reflect that buffers were dropped. * - * @param droppedBufferCount The number of additional dropped buffers. + * @param droppedInputBufferCount The number of buffers dropped from the source before being + * passed to the decoder. + * @param droppedDecoderBufferCount The number of buffers dropped after being passed to the + * decoder. */ - protected void updateDroppedBufferCounters(int droppedBufferCount) { - decoderCounters.droppedBufferCount += droppedBufferCount; - droppedFrames += droppedBufferCount; - consecutiveDroppedFrameCount += droppedBufferCount; - decoderCounters.maxConsecutiveDroppedBufferCount = Math.max(consecutiveDroppedFrameCount, - decoderCounters.maxConsecutiveDroppedBufferCount); + protected void updateDroppedBufferCounters( + int droppedInputBufferCount, int droppedDecoderBufferCount) { + decoderCounters.droppedInputBufferCount += droppedInputBufferCount; + int totalDroppedBufferCount = droppedInputBufferCount + droppedDecoderBufferCount; + decoderCounters.droppedBufferCount += totalDroppedBufferCount; + droppedFrames += totalDroppedBufferCount; + consecutiveDroppedFrameCount += totalDroppedBufferCount; + decoderCounters.maxConsecutiveDroppedBufferCount = + max(consecutiveDroppedFrameCount, decoderCounters.maxConsecutiveDroppedBufferCount); if (maxDroppedFramesToNotify > 0 && droppedFrames >= maxDroppedFramesToNotify) { maybeNotifyDroppedFrames(); } } + /** + * Updates local counters and {@link DecoderCounters} with a new video frame processing offset. + * + * @param processingOffsetUs The video frame processing offset. + */ + protected void updateVideoFrameProcessingOffsetCounters(long processingOffsetUs) { + decoderCounters.addVideoFrameProcessingOffset(processingOffsetUs); + totalVideoFrameProcessingOffsetUs += processingOffsetUs; + videoFrameProcessingOffsetCount++; + } + /** * Renders the output buffer with the specified index. This method is only called if the platform * API version of the device is less than 21. @@ -1179,12 +1340,12 @@ protected void updateDroppedBufferCounters(int droppedBufferCount) { * @param index The index of the output buffer to drop. * @param presentationTimeUs The presentation time of the output buffer, in microseconds. */ - protected void renderOutputBuffer(MediaCodec codec, int index, long presentationTimeUs) { + protected void renderOutputBuffer(MediaCodecAdapter codec, int index, long presentationTimeUs) { maybeNotifyVideoSizeChanged(); TraceUtil.beginSection("releaseOutputBuffer"); codec.releaseOutputBuffer(index, true); TraceUtil.endSection(); - lastRenderTimeUs = SystemClock.elapsedRealtime() * 1000; + lastRenderRealtimeUs = SystemClock.elapsedRealtime() * 1000; decoderCounters.renderedOutputBufferCount++; consecutiveDroppedFrameCount = 0; maybeNotifyRenderedFirstFrame(); @@ -1199,39 +1360,50 @@ protected void renderOutputBuffer(MediaCodec codec, int index, long presentation * @param presentationTimeUs The presentation time of the output buffer, in microseconds. * @param releaseTimeNs The wallclock time at which the frame should be displayed, in nanoseconds. */ - @TargetApi(21) + @RequiresApi(21) protected void renderOutputBufferV21( - MediaCodec codec, int index, long presentationTimeUs, long releaseTimeNs) { + MediaCodecAdapter codec, int index, long presentationTimeUs, long releaseTimeNs) { maybeNotifyVideoSizeChanged(); TraceUtil.beginSection("releaseOutputBuffer"); codec.releaseOutputBuffer(index, releaseTimeNs); TraceUtil.endSection(); - lastRenderTimeUs = SystemClock.elapsedRealtime() * 1000; + lastRenderRealtimeUs = SystemClock.elapsedRealtime() * 1000; decoderCounters.renderedOutputBufferCount++; consecutiveDroppedFrameCount = 0; maybeNotifyRenderedFirstFrame(); } - private boolean shouldUseDummySurface(MediaCodecInfo codecInfo) { + private boolean shouldUsePlaceholderSurface(MediaCodecInfo codecInfo) { return Util.SDK_INT >= 23 && !tunneling && !codecNeedsSetOutputSurfaceWorkaround(codecInfo.name) - && (!codecInfo.secure || DummySurface.isSecureSupported(context)); + && (!codecInfo.secure || PlaceholderSurface.isSecureSupported(context)); + } + + @RequiresApi(17) + private void releasePlaceholderSurface() { + if (surface == placeholderSurface) { + surface = null; + } + placeholderSurface.release(); + placeholderSurface = null; } private void setJoiningDeadlineMs() { - joiningDeadlineMs = allowedJoiningTimeMs > 0 - ? (SystemClock.elapsedRealtime() + allowedJoiningTimeMs) : C.TIME_UNSET; + joiningDeadlineMs = + allowedJoiningTimeMs > 0 + ? (SystemClock.elapsedRealtime() + allowedJoiningTimeMs) + : C.TIME_UNSET; } private void clearRenderedFirstFrame() { - renderedFirstFrame = false; + renderedFirstFrameAfterReset = false; // The first frame notification is triggered by renderOutputBuffer or renderOutputBufferV21 for // non-tunneled playback, onQueueInputBuffer for tunneled playback prior to API level 23, and // OnFrameRenderedListenerV23.onFrameRenderedListener for tunneled playback on API level 23 and // above. if (Util.SDK_INT >= 23 && tunneling) { - MediaCodec codec = getCodec(); + @Nullable MediaCodecAdapter codec = getCodec(); // If codec is null then the listener will be instantiated in configureCodec. if (codec != null) { tunnelingOnFrameRenderedListener = new OnFrameRenderedListenerV23(codec); @@ -1240,43 +1412,44 @@ private void clearRenderedFirstFrame() { } /* package */ void maybeNotifyRenderedFirstFrame() { - if (!renderedFirstFrame) { - renderedFirstFrame = true; + renderedFirstFrameAfterEnable = true; + if (!renderedFirstFrameAfterReset) { + renderedFirstFrameAfterReset = true; eventDispatcher.renderedFirstFrame(surface); + haveReportedFirstFrameRenderedForCurrentSurface = true; } } private void maybeRenotifyRenderedFirstFrame() { - if (renderedFirstFrame) { + if (haveReportedFirstFrameRenderedForCurrentSurface) { eventDispatcher.renderedFirstFrame(surface); } } private void clearReportedVideoSize() { - reportedWidth = Format.NO_VALUE; - reportedHeight = Format.NO_VALUE; - reportedPixelWidthHeightRatio = Format.NO_VALUE; - reportedUnappliedRotationDegrees = Format.NO_VALUE; + reportedVideoSize = null; } private void maybeNotifyVideoSizeChanged() { if ((currentWidth != Format.NO_VALUE || currentHeight != Format.NO_VALUE) - && (reportedWidth != currentWidth || reportedHeight != currentHeight - || reportedUnappliedRotationDegrees != currentUnappliedRotationDegrees - || reportedPixelWidthHeightRatio != currentPixelWidthHeightRatio)) { - eventDispatcher.videoSizeChanged(currentWidth, currentHeight, currentUnappliedRotationDegrees, - currentPixelWidthHeightRatio); - reportedWidth = currentWidth; - reportedHeight = currentHeight; - reportedUnappliedRotationDegrees = currentUnappliedRotationDegrees; - reportedPixelWidthHeightRatio = currentPixelWidthHeightRatio; + && (reportedVideoSize == null + || reportedVideoSize.width != currentWidth + || reportedVideoSize.height != currentHeight + || reportedVideoSize.unappliedRotationDegrees != currentUnappliedRotationDegrees + || reportedVideoSize.pixelWidthHeightRatio != currentPixelWidthHeightRatio)) { + reportedVideoSize = + new VideoSize( + currentWidth, + currentHeight, + currentUnappliedRotationDegrees, + currentPixelWidthHeightRatio); + eventDispatcher.videoSizeChanged(reportedVideoSize); } } private void maybeRenotifyVideoSizeChanged() { - if (reportedWidth != Format.NO_VALUE || reportedHeight != Format.NO_VALUE) { - eventDispatcher.videoSizeChanged(reportedWidth, reportedHeight, - reportedUnappliedRotationDegrees, reportedPixelWidthHeightRatio); + if (reportedVideoSize != null) { + eventDispatcher.videoSizeChanged(reportedVideoSize); } } @@ -1290,6 +1463,15 @@ private void maybeNotifyDroppedFrames() { } } + private void maybeNotifyVideoFrameProcessingOffset() { + if (videoFrameProcessingOffsetCount != 0) { + eventDispatcher.reportVideoFrameProcessingOffset( + totalVideoFrameProcessingOffsetUs, videoFrameProcessingOffsetCount); + totalVideoFrameProcessingOffsetUs = 0; + videoFrameProcessingOffsetCount = 0; + } + } + private static boolean isBufferLate(long earlyUs) { // Class a buffer as late if it should have been presented more than 30 ms ago. return earlyUs < -30000; @@ -1300,19 +1482,19 @@ private static boolean isBufferVeryLate(long earlyUs) { return earlyUs < -500000; } - @TargetApi(29) - private static void setHdr10PlusInfoV29(MediaCodec codec, byte[] hdr10PlusInfo) { + @RequiresApi(29) + private static void setHdr10PlusInfoV29(MediaCodecAdapter codec, byte[] hdr10PlusInfo) { Bundle codecParameters = new Bundle(); codecParameters.putByteArray(MediaCodec.PARAMETER_KEY_HDR10_PLUS_INFO, hdr10PlusInfo); codec.setParameters(codecParameters); } - @TargetApi(23) - private static void setOutputSurfaceV23(MediaCodec codec, Surface surface) { + @RequiresApi(23) + protected void setOutputSurfaceV23(MediaCodecAdapter codec, Surface surface) { codec.setOutputSurface(surface); } - @TargetApi(21) + @RequiresApi(21) private static void configureTunnelingV21(MediaFormat mediaFormat, int tunnelingAudioSessionId) { mediaFormat.setFeatureEnabled(CodecCapabilities.FEATURE_TunneledPlayback, true); mediaFormat.setInteger(MediaFormat.KEY_AUDIO_SESSION_ID, tunnelingAudioSessionId); @@ -1333,6 +1515,7 @@ private static void configureTunnelingV21(MediaFormat mediaFormat, int tunneling * @return The framework {@link MediaFormat} that should be used to configure the decoder. */ @SuppressLint("InlinedApi") + @TargetApi(21) // tunnelingAudioSessionId is unset if Util.SDK_INT < 21 protected MediaFormat getMediaFormat( Format format, String codecMimeType, @@ -1399,8 +1582,7 @@ protected CodecMaxValues getCodecMaxValues( // The single entry in streamFormats must correspond to the format for which the codec is // being configured. if (maxInputSize != Format.NO_VALUE) { - int codecMaxInputSize = - getCodecMaxInputSize(codecInfo, format.sampleMimeType, format.width, format.height); + int codecMaxInputSize = getCodecMaxInputSize(codecInfo, format); if (codecMaxInputSize != Format.NO_VALUE) { // Scale up the initial video decoder maximum input size so playlist item transitions with // small increases in maximum sample size don't require reinitialization. This only makes @@ -1408,32 +1590,37 @@ protected CodecMaxValues getCodecMaxValues( int scaledMaxInputSize = (int) (maxInputSize * INITIAL_FORMAT_MAX_INPUT_SIZE_SCALE_FACTOR); // Avoid exceeding the maximum expected for the codec. - maxInputSize = Math.min(scaledMaxInputSize, codecMaxInputSize); + maxInputSize = min(scaledMaxInputSize, codecMaxInputSize); } } return new CodecMaxValues(maxWidth, maxHeight, maxInputSize); } boolean haveUnknownDimensions = false; for (Format streamFormat : streamFormats) { - if (codecInfo.isSeamlessAdaptationSupported( - format, streamFormat, /* isNewFormatComplete= */ false)) { + if (format.colorInfo != null && streamFormat.colorInfo == null) { + // streamFormat likely has incomplete color information. Copy the complete color information + // from format to avoid codec re-use being ruled out for only this reason. + streamFormat = streamFormat.buildUpon().setColorInfo(format.colorInfo).build(); + } + if (codecInfo.canReuseCodec(format, streamFormat).result != REUSE_RESULT_NO) { haveUnknownDimensions |= (streamFormat.width == Format.NO_VALUE || streamFormat.height == Format.NO_VALUE); - maxWidth = Math.max(maxWidth, streamFormat.width); - maxHeight = Math.max(maxHeight, streamFormat.height); - maxInputSize = Math.max(maxInputSize, getMaxInputSize(codecInfo, streamFormat)); + maxWidth = max(maxWidth, streamFormat.width); + maxHeight = max(maxHeight, streamFormat.height); + maxInputSize = max(maxInputSize, getMaxInputSize(codecInfo, streamFormat)); } } if (haveUnknownDimensions) { Log.w(TAG, "Resolutions unknown. Codec max resolution: " + maxWidth + "x" + maxHeight); - Point codecMaxSize = getCodecMaxSize(codecInfo, format); + @Nullable Point codecMaxSize = getCodecMaxSize(codecInfo, format); if (codecMaxSize != null) { - maxWidth = Math.max(maxWidth, codecMaxSize.x); - maxHeight = Math.max(maxHeight, codecMaxSize.y); + maxWidth = max(maxWidth, codecMaxSize.x); + maxHeight = max(maxHeight, codecMaxSize.y); maxInputSize = - Math.max( + max( maxInputSize, - getCodecMaxInputSize(codecInfo, format.sampleMimeType, maxWidth, maxHeight)); + getCodecMaxInputSize( + codecInfo, format.buildUpon().setWidth(maxWidth).setHeight(maxHeight).build())); Log.w(TAG, "Codec max resolution adjusted to: " + maxWidth + "x" + maxHeight); } } @@ -1441,9 +1628,9 @@ protected CodecMaxValues getCodecMaxValues( } @Override - protected DecoderException createDecoderException( + protected MediaCodecDecoderException createDecoderException( Throwable cause, @Nullable MediaCodecInfo codecInfo) { - return new VideoDecoderException(cause, codecInfo, surface); + return new MediaCodecVideoDecoderException(cause, codecInfo, surface); } /** @@ -1453,8 +1640,10 @@ protected DecoderException createDecoderException( * * @param codecInfo Information about the {@link MediaCodec} being configured. * @param format The {@link Format} for which the codec is being configured. - * @return The maximum video size to use, or null if the size of {@code format} should be used. + * @return The maximum video size to use, or {@code null} if the size of {@code format} should be + * used. */ + @Nullable private static Point getCodecMaxSize(MediaCodecInfo codecInfo, Format format) { boolean isVerticalVideo = format.height > format.width; int formatLongEdgePx = isVerticalVideo ? format.height : format.width; @@ -1466,8 +1655,10 @@ private static Point getCodecMaxSize(MediaCodecInfo codecInfo, Format format) { // Don't return a size not larger than the format for which the codec is being configured. return null; } else if (Util.SDK_INT >= 21) { - Point alignedSize = codecInfo.alignVideoSizeV21(isVerticalVideo ? shortEdgePx : longEdgePx, - isVerticalVideo ? longEdgePx : shortEdgePx); + Point alignedSize = + codecInfo.alignVideoSizeV21( + isVerticalVideo ? shortEdgePx : longEdgePx, + isVerticalVideo ? longEdgePx : shortEdgePx); float frameRate = format.frameRate; if (codecInfo.isVideoSizeAndRateSupportedV21(alignedSize.x, alignedSize.y, frameRate)) { return alignedSize; @@ -1499,7 +1690,7 @@ private static Point getCodecMaxSize(MediaCodecInfo codecInfo, Format format) { * @return A maximum input buffer size in bytes, or {@link Format#NO_VALUE} if a maximum could not * be determined. */ - private static int getMaxInputSize(MediaCodecInfo codecInfo, Format format) { + protected static int getMaxInputSize(MediaCodecInfo codecInfo, Format format) { if (format.maxInputSize != Format.NO_VALUE) { // The format defines an explicit maximum input size. Add the total size of initialization // data buffers, as they may need to be queued in the same input buffer as the largest sample. @@ -1510,69 +1701,10 @@ private static int getMaxInputSize(MediaCodecInfo codecInfo, Format format) { } return format.maxInputSize + totalInitializationDataSize; } else { - // Calculated maximum input sizes are overestimates, so it's not necessary to add the size of - // initialization data. - return getCodecMaxInputSize(codecInfo, format.sampleMimeType, format.width, format.height); + return getCodecMaxInputSize(codecInfo, format); } } - /** - * Returns a maximum input size for a given codec, MIME type, width and height. - * - * @param codecInfo Information about the {@link MediaCodec} being configured. - * @param sampleMimeType The format mime type. - * @param width The width in pixels. - * @param height The height in pixels. - * @return A maximum input size in bytes, or {@link Format#NO_VALUE} if a maximum could not be - * determined. - */ - private static int getCodecMaxInputSize( - MediaCodecInfo codecInfo, String sampleMimeType, int width, int height) { - if (width == Format.NO_VALUE || height == Format.NO_VALUE) { - // We can't infer a maximum input size without video dimensions. - return Format.NO_VALUE; - } - - // Attempt to infer a maximum input size from the format. - int maxPixels; - int minCompressionRatio; - switch (sampleMimeType) { - case MimeTypes.VIDEO_H263: - case MimeTypes.VIDEO_MP4V: - maxPixels = width * height; - minCompressionRatio = 2; - break; - case MimeTypes.VIDEO_H264: - if ("BRAVIA 4K 2015".equals(Util.MODEL) // Sony Bravia 4K - || ("Amazon".equals(Util.MANUFACTURER) - && ("KFSOWI".equals(Util.MODEL) // Kindle Soho - || ("AFTS".equals(Util.MODEL) && codecInfo.secure)))) { // Fire TV Gen 2 - // Use the default value for cases where platform limitations may prevent buffers of the - // calculated maximum input size from being allocated. - return Format.NO_VALUE; - } - // Round up width/height to an integer number of macroblocks. - maxPixels = Util.ceilDivide(width, 16) * Util.ceilDivide(height, 16) * 16 * 16; - minCompressionRatio = 2; - break; - case MimeTypes.VIDEO_VP8: - // VPX does not specify a ratio so use the values from the platform's SoftVPX.cpp. - maxPixels = width * height; - minCompressionRatio = 2; - break; - case MimeTypes.VIDEO_H265: - case MimeTypes.VIDEO_VP9: - maxPixels = width * height; - minCompressionRatio = 4; - break; - default: - // Leave the default max input size. - return Format.NO_VALUE; - } - // Estimate the maximum input size assuming three channel 4:2:0 subsampled input frames. - return (maxPixels * 3) / (2 * minCompressionRatio); - } - /** * Returns whether the device is known to do post processing by default that isn't compatible with * ExoPlayer. @@ -1620,178 +1752,7 @@ protected boolean codecNeedsSetOutputSurfaceWorkaround(String name) { } synchronized (MediaCodecVideoRenderer.class) { if (!evaluatedDeviceNeedsSetOutputSurfaceWorkaround) { - if ("dangal".equals(Util.DEVICE)) { - // Workaround for MiTV devices: - // https://github.com/google/ExoPlayer/issues/5169, - // https://github.com/google/ExoPlayer/issues/6899. - deviceNeedsSetOutputSurfaceWorkaround = true; - } else if (Util.SDK_INT <= 27 && "HWEML".equals(Util.DEVICE)) { - // Workaround for Huawei P20: - // https://github.com/google/ExoPlayer/issues/4468#issuecomment-459291645. - deviceNeedsSetOutputSurfaceWorkaround = true; - } else if (Util.SDK_INT >= 27) { - // In general, devices running API level 27 or later should be unaffected. Do nothing. - } else { - // Enable the workaround on a per-device basis. Works around: - // https://github.com/google/ExoPlayer/issues/3236, - // https://github.com/google/ExoPlayer/issues/3355, - // https://github.com/google/ExoPlayer/issues/3439, - // https://github.com/google/ExoPlayer/issues/3724, - // https://github.com/google/ExoPlayer/issues/3835, - // https://github.com/google/ExoPlayer/issues/4006, - // https://github.com/google/ExoPlayer/issues/4084, - // https://github.com/google/ExoPlayer/issues/4104, - // https://github.com/google/ExoPlayer/issues/4134, - // https://github.com/google/ExoPlayer/issues/4315, - // https://github.com/google/ExoPlayer/issues/4419, - // https://github.com/google/ExoPlayer/issues/4460, - // https://github.com/google/ExoPlayer/issues/4468, - // https://github.com/google/ExoPlayer/issues/5312, - // https://github.com/google/ExoPlayer/issues/6503. - switch (Util.DEVICE) { - case "1601": - case "1713": - case "1714": - case "A10-70F": - case "A10-70L": - case "A1601": - case "A2016a40": - case "A7000-a": - case "A7000plus": - case "A7010a48": - case "A7020a48": - case "AquaPowerM": - case "ASUS_X00AD_2": - case "Aura_Note_2": - case "BLACK-1X": - case "BRAVIA_ATV2": - case "BRAVIA_ATV3_4K": - case "C1": - case "ComioS1": - case "CP8676_I02": - case "CPH1609": - case "CPY83_I00": - case "cv1": - case "cv3": - case "deb": - case "E5643": - case "ELUGA_A3_Pro": - case "ELUGA_Note": - case "ELUGA_Prim": - case "ELUGA_Ray_X": - case "EverStar_S": - case "F3111": - case "F3113": - case "F3116": - case "F3211": - case "F3213": - case "F3215": - case "F3311": - case "flo": - case "fugu": - case "GiONEE_CBL7513": - case "GiONEE_GBL7319": - case "GIONEE_GBL7360": - case "GIONEE_SWW1609": - case "GIONEE_SWW1627": - case "GIONEE_SWW1631": - case "GIONEE_WBL5708": - case "GIONEE_WBL7365": - case "GIONEE_WBL7519": - case "griffin": - case "htc_e56ml_dtul": - case "hwALE-H": - case "HWBLN-H": - case "HWCAM-H": - case "HWVNS-H": - case "HWWAS-H": - case "i9031": - case "iball8735_9806": - case "Infinix-X572": - case "iris60": - case "itel_S41": - case "j2xlteins": - case "JGZ": - case "K50a40": - case "kate": - case "l5460": - case "le_x6": - case "LS-5017": - case "M5c": - case "manning": - case "marino_f": - case "MEIZU_M5": - case "mh": - case "mido": - case "MX6": - case "namath": - case "nicklaus_f": - case "NX541J": - case "NX573J": - case "OnePlus5T": - case "p212": - case "P681": - case "P85": - case "panell_d": - case "panell_dl": - case "panell_ds": - case "panell_dt": - case "PB2-670M": - case "PGN528": - case "PGN610": - case "PGN611": - case "Phantom6": - case "Pixi4-7_3G": - case "Pixi5-10_4G": - case "PLE": - case "PRO7S": - case "Q350": - case "Q4260": - case "Q427": - case "Q4310": - case "Q5": - case "QM16XE_U": - case "QX1": - case "santoni": - case "Slate_Pro": - case "SVP-DTV15": - case "s905x018": - case "taido_row": - case "TB3-730F": - case "TB3-730X": - case "TB3-850F": - case "TB3-850M": - case "tcl_eu": - case "V1": - case "V23GB": - case "V5": - case "vernee_M5": - case "watson": - case "whyred": - case "woods_f": - case "woods_fn": - case "X3_HK": - case "XE2X": - case "XT1663": - case "Z12_PRO": - case "Z80": - deviceNeedsSetOutputSurfaceWorkaround = true; - break; - default: - // Do nothing. - break; - } - switch (Util.MODEL) { - case "AFTA": - case "AFTN": - case "JSN-L21": - deviceNeedsSetOutputSurfaceWorkaround = true; - break; - default: - // Do nothing. - break; - } - } + deviceNeedsSetOutputSurfaceWorkaround = evaluateDeviceNeedsSetOutputSurfaceWorkaround(); evaluatedDeviceNeedsSetOutputSurfaceWorkaround = true; } } @@ -1813,24 +1774,251 @@ public CodecMaxValues(int width, int height, int inputSize) { this.height = height; this.inputSize = inputSize; } + } + /** + * Returns the maximum sample size assuming three channel 4:2:0 subsampled input frames with the + * specified {@code minCompressionRatio} + * + * @param pixelCount The number of pixels + * @param minCompressionRatio The minimum compression ratio + */ + private static int getMaxSampleSize(int pixelCount, int minCompressionRatio) { + return (pixelCount * 3) / (2 * minCompressionRatio); + } + + private static boolean evaluateDeviceNeedsSetOutputSurfaceWorkaround() { + if (Util.SDK_INT <= 28) { + // Workaround for MiTV and MiBox devices which have been observed broken up to API 28. + // https://github.com/google/ExoPlayer/issues/5169, + // https://github.com/google/ExoPlayer/issues/6899. + // https://github.com/google/ExoPlayer/issues/8014. + // https://github.com/google/ExoPlayer/issues/8329. + // https://github.com/google/ExoPlayer/issues/9710. + switch (Util.DEVICE) { + case "aquaman": + case "dangal": + case "dangalUHD": + case "dangalFHD": + case "magnolia": + case "machuca": + case "once": + case "oneday": + return true; + default: + break; // Do nothing. + } + } + if (Util.SDK_INT <= 27 && "HWEML".equals(Util.DEVICE)) { + // Workaround for Huawei P20: + // https://github.com/google/ExoPlayer/issues/4468#issuecomment-459291645. + return true; + } + switch (Util.MODEL) { + // Workaround for some Fire OS devices. + case "AFTA": + case "AFTN": + case "AFTR": + case "AFTEU011": + case "AFTEU014": + case "AFTEUFF014": + case "AFTJMST12": + case "AFTKMST12": + case "AFTSO001": + return true; + default: + break; // Do nothing. + } + if (Util.SDK_INT <= 26) { + // In general, devices running API level 27 or later should be unaffected unless observed + // otherwise. Enable the workaround on a per-device basis. Works around: + // https://github.com/google/ExoPlayer/issues/3236, + // https://github.com/google/ExoPlayer/issues/3355, + // https://github.com/google/ExoPlayer/issues/3439, + // https://github.com/google/ExoPlayer/issues/3724, + // https://github.com/google/ExoPlayer/issues/3835, + // https://github.com/google/ExoPlayer/issues/4006, + // https://github.com/google/ExoPlayer/issues/4084, + // https://github.com/google/ExoPlayer/issues/4104, + // https://github.com/google/ExoPlayer/issues/4134, + // https://github.com/google/ExoPlayer/issues/4315, + // https://github.com/google/ExoPlayer/issues/4419, + // https://github.com/google/ExoPlayer/issues/4460, + // https://github.com/google/ExoPlayer/issues/4468, + // https://github.com/google/ExoPlayer/issues/5312, + // https://github.com/google/ExoPlayer/issues/6503. + // https://github.com/google/ExoPlayer/issues/8014, + // https://github.com/google/ExoPlayer/pull/8030. + switch (Util.DEVICE) { + case "1601": + case "1713": + case "1714": + case "601LV": + case "602LV": + case "A10-70F": + case "A10-70L": + case "A1601": + case "A2016a40": + case "A7000-a": + case "A7000plus": + case "A7010a48": + case "A7020a48": + case "AquaPowerM": + case "ASUS_X00AD_2": + case "Aura_Note_2": + case "b5": + case "BLACK-1X": + case "BRAVIA_ATV2": + case "BRAVIA_ATV3_4K": + case "C1": + case "ComioS1": + case "CP8676_I02": + case "CPH1609": + case "CPH1715": + case "CPY83_I00": + case "cv1": + case "cv3": + case "deb": + case "DM-01K": + case "E5643": + case "ELUGA_A3_Pro": + case "ELUGA_Note": + case "ELUGA_Prim": + case "ELUGA_Ray_X": + case "EverStar_S": + case "F01H": + case "F01J": + case "F02H": + case "F03H": + case "F04H": + case "F04J": + case "F3111": + case "F3113": + case "F3116": + case "F3211": + case "F3213": + case "F3215": + case "F3311": + case "flo": + case "fugu": + case "GiONEE_CBL7513": + case "GiONEE_GBL7319": + case "GIONEE_GBL7360": + case "GIONEE_SWW1609": + case "GIONEE_SWW1627": + case "GIONEE_SWW1631": + case "GIONEE_WBL5708": + case "GIONEE_WBL7365": + case "GIONEE_WBL7519": + case "griffin": + case "htc_e56ml_dtul": + case "hwALE-H": + case "HWBLN-H": + case "HWCAM-H": + case "HWVNS-H": + case "HWWAS-H": + case "i9031": + case "iball8735_9806": + case "Infinix-X572": + case "iris60": + case "itel_S41": + case "j2xlteins": + case "JGZ": + case "K50a40": + case "kate": + case "l5460": + case "le_x6": + case "LS-5017": + case "M04": + case "M5c": + case "manning": + case "marino_f": + case "MEIZU_M5": + case "mh": + case "mido": + case "MX6": + case "namath": + case "nicklaus_f": + case "NX541J": + case "NX573J": + case "OnePlus5T": + case "p212": + case "P681": + case "P85": + case "pacificrim": + case "panell_d": + case "panell_dl": + case "panell_ds": + case "panell_dt": + case "PB2-670M": + case "PGN528": + case "PGN610": + case "PGN611": + case "Phantom6": + case "Pixi4-7_3G": + case "Pixi5-10_4G": + case "PLE": + case "PRO7S": + case "Q350": + case "Q4260": + case "Q427": + case "Q4310": + case "Q5": + case "QM16XE_U": + case "QX1": + case "RAIJIN": + case "santoni": + case "Slate_Pro": + case "SVP-DTV15": + case "s905x018": + case "taido_row": + case "TB3-730F": + case "TB3-730X": + case "TB3-850F": + case "TB3-850M": + case "tcl_eu": + case "V1": + case "V23GB": + case "V5": + case "vernee_M5": + case "watson": + case "whyred": + case "woods_f": + case "woods_fn": + case "X3_HK": + case "XE2X": + case "XT1663": + case "Z12_PRO": + case "Z80": + return true; + default: + break; // Do nothing. + } + switch (Util.MODEL) { + case "JSN-L21": + return true; + default: + break; // Do nothing. + } + } + return false; } - @TargetApi(23) + @RequiresApi(23) private final class OnFrameRenderedListenerV23 - implements MediaCodec.OnFrameRenderedListener, Handler.Callback { + implements MediaCodecAdapter.OnFrameRenderedListener, Handler.Callback { private static final int HANDLE_FRAME_RENDERED = 0; private final Handler handler; - public OnFrameRenderedListenerV23(MediaCodec codec) { - handler = new Handler(this); + public OnFrameRenderedListenerV23(MediaCodecAdapter codec) { + handler = Util.createHandlerForCurrentLooper(/* callback= */ this); codec.setOnFrameRenderedListener(/* listener= */ this, handler); } @Override - public void onFrameRendered(MediaCodec codec, long presentationTimeUs, long nanoTime) { + public void onFrameRendered(MediaCodecAdapter codec, long presentationTimeUs, long nanoTime) { // Workaround bug in MediaCodec that causes deadlock if you call directly back into the // MediaCodec from this listener method. // Deadlock occurs because MediaCodec calls this listener method holding a lock, @@ -1863,14 +2051,18 @@ public boolean handleMessage(Message message) { } private void handleFrameRendered(long presentationTimeUs) { - if (this != tunnelingOnFrameRenderedListener) { + if (this != tunnelingOnFrameRenderedListener || getCodec() == null) { // Stale event. return; } if (presentationTimeUs == TUNNELING_EOS_PRESENTATION_TIME_US) { onProcessedTunneledEndOfStream(); } else { - onProcessedTunneledBuffer(presentationTimeUs); + try { + onProcessedTunneledBuffer(presentationTimeUs); + } catch (ExoPlaybackException e) { + setPendingPlaybackException(e); + } } } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/PlaceholderSurface.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/PlaceholderSurface.java new file mode 100644 index 0000000000..885e34b4d7 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/PlaceholderSurface.java @@ -0,0 +1,219 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.video; + +import static com.google.android.exoplayer2.util.EGLSurfaceTexture.SECURE_MODE_NONE; +import static com.google.android.exoplayer2.util.EGLSurfaceTexture.SECURE_MODE_PROTECTED_PBUFFER; +import static com.google.android.exoplayer2.util.EGLSurfaceTexture.SECURE_MODE_SURFACELESS_CONTEXT; + +import android.content.Context; +import android.graphics.SurfaceTexture; +import android.os.Handler; +import android.os.HandlerThread; +import android.os.Message; +import android.view.Surface; +import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; +import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.EGLSurfaceTexture; +import com.google.android.exoplayer2.util.EGLSurfaceTexture.SecureMode; +import com.google.android.exoplayer2.util.GlUtil; +import com.google.android.exoplayer2.util.Log; +import com.google.android.exoplayer2.util.Util; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; + +/** A placeholder {@link Surface}. */ +@RequiresApi(17) +public final class PlaceholderSurface extends Surface { + + private static final String TAG = "PlaceholderSurface"; + + /** Whether the surface is secure. */ + public final boolean secure; + + private static @SecureMode int secureMode; + private static boolean secureModeInitialized; + + private final PlaceholderSurfaceThread thread; + private boolean threadReleased; + + /** + * Returns whether the device supports secure placeholder surfaces. + * + * @param context Any {@link Context}. + * @return Whether the device supports secure placeholder surfaces. + */ + public static synchronized boolean isSecureSupported(Context context) { + if (!secureModeInitialized) { + secureMode = getSecureMode(context); + secureModeInitialized = true; + } + return secureMode != SECURE_MODE_NONE; + } + + /** + * Returns a newly created placeholder surface. The surface must be released by calling {@link + * #release} when it's no longer required. + * + *

      Must only be called if {@link Util#SDK_INT} is 17 or higher. + * + * @param context Any {@link Context}. + * @param secure Whether a secure surface is required. Must only be requested if {@link + * #isSecureSupported(Context)} returns {@code true}. + * @throws IllegalStateException If a secure surface is requested on a device for which {@link + * #isSecureSupported(Context)} returns {@code false}. + */ + public static PlaceholderSurface newInstanceV17(Context context, boolean secure) { + Assertions.checkState(!secure || isSecureSupported(context)); + PlaceholderSurfaceThread thread = new PlaceholderSurfaceThread(); + return thread.init(secure ? secureMode : SECURE_MODE_NONE); + } + + private PlaceholderSurface( + PlaceholderSurfaceThread thread, SurfaceTexture surfaceTexture, boolean secure) { + super(surfaceTexture); + this.thread = thread; + this.secure = secure; + } + + @Override + public void release() { + super.release(); + // The Surface may be released multiple times (explicitly and by Surface.finalize()). The + // implementation of super.release() has its own deduplication logic. Below we need to + // deduplicate ourselves. Synchronization is required as we don't control the thread on which + // Surface.finalize() is called. + synchronized (thread) { + if (!threadReleased) { + thread.release(); + threadReleased = true; + } + } + } + + private static @SecureMode int getSecureMode(Context context) { + if (GlUtil.isProtectedContentExtensionSupported(context)) { + if (GlUtil.isSurfacelessContextExtensionSupported()) { + return SECURE_MODE_SURFACELESS_CONTEXT; + } else { + // If we can't use surfaceless contexts, we use a protected 1 * 1 pixel buffer surface. + // This may require support for EXT_protected_surface, but in practice it works on some + // devices that don't have that extension. See also + // https://github.com/google/ExoPlayer/issues/3558. + return SECURE_MODE_PROTECTED_PBUFFER; + } + } else { + return SECURE_MODE_NONE; + } + } + + private static class PlaceholderSurfaceThread extends HandlerThread implements Handler.Callback { + + private static final int MSG_INIT = 1; + private static final int MSG_RELEASE = 2; + + private @MonotonicNonNull EGLSurfaceTexture eglSurfaceTexture; + private @MonotonicNonNull Handler handler; + @Nullable private Error initError; + @Nullable private RuntimeException initException; + @Nullable private PlaceholderSurface surface; + + public PlaceholderSurfaceThread() { + super("ExoPlayer:PlaceholderSurface"); + } + + public PlaceholderSurface init(@SecureMode int secureMode) { + start(); + handler = new Handler(getLooper(), /* callback= */ this); + eglSurfaceTexture = new EGLSurfaceTexture(handler); + boolean wasInterrupted = false; + synchronized (this) { + handler.obtainMessage(MSG_INIT, secureMode, 0).sendToTarget(); + while (surface == null && initException == null && initError == null) { + try { + wait(); + } catch (InterruptedException e) { + wasInterrupted = true; + } + } + } + if (wasInterrupted) { + // Restore the interrupted status. + Thread.currentThread().interrupt(); + } + if (initException != null) { + throw initException; + } else if (initError != null) { + throw initError; + } else { + return Assertions.checkNotNull(surface); + } + } + + public void release() { + Assertions.checkNotNull(handler); + handler.sendEmptyMessage(MSG_RELEASE); + } + + @Override + public boolean handleMessage(Message msg) { + switch (msg.what) { + case MSG_INIT: + try { + initInternal(/* secureMode= */ msg.arg1); + } catch (RuntimeException e) { + Log.e(TAG, "Failed to initialize placeholder surface", e); + initException = e; + } catch (GlUtil.GlException e) { + Log.e(TAG, "Failed to initialize placeholder surface", e); + initException = new IllegalStateException(e); + } catch (Error e) { + Log.e(TAG, "Failed to initialize placeholder surface", e); + initError = e; + } finally { + synchronized (this) { + notify(); + } + } + return true; + case MSG_RELEASE: + try { + releaseInternal(); + } catch (Throwable e) { + Log.e(TAG, "Failed to release placeholder surface", e); + } finally { + quit(); + } + return true; + default: + return true; + } + } + + private void initInternal(@SecureMode int secureMode) throws GlUtil.GlException { + Assertions.checkNotNull(eglSurfaceTexture); + eglSurfaceTexture.init(secureMode); + this.surface = + new PlaceholderSurface( + this, eglSurfaceTexture.getSurfaceTexture(), secureMode != SECURE_MODE_NONE); + } + + private void releaseInternal() { + Assertions.checkNotNull(eglSurfaceTexture); + eglSurfaceTexture.release(); + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/SimpleDecoderVideoRenderer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/SimpleDecoderVideoRenderer.java deleted file mode 100644 index 6d8147b012..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/SimpleDecoderVideoRenderer.java +++ /dev/null @@ -1,975 +0,0 @@ -/* - * Copyright (C) 2019 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.video; - -import android.os.Handler; -import android.os.SystemClock; -import android.view.Surface; -import androidx.annotation.CallSuper; -import androidx.annotation.IntDef; -import androidx.annotation.Nullable; -import com.google.android.exoplayer2.BaseRenderer; -import com.google.android.exoplayer2.C; -import com.google.android.exoplayer2.ExoPlaybackException; -import com.google.android.exoplayer2.Format; -import com.google.android.exoplayer2.FormatHolder; -import com.google.android.exoplayer2.RendererCapabilities; -import com.google.android.exoplayer2.decoder.DecoderCounters; -import com.google.android.exoplayer2.decoder.DecoderInputBuffer; -import com.google.android.exoplayer2.decoder.SimpleDecoder; -import com.google.android.exoplayer2.drm.DrmSession; -import com.google.android.exoplayer2.drm.DrmSession.DrmSessionException; -import com.google.android.exoplayer2.drm.DrmSessionManager; -import com.google.android.exoplayer2.drm.ExoMediaCrypto; -import com.google.android.exoplayer2.util.Assertions; -import com.google.android.exoplayer2.util.TimedValueQueue; -import com.google.android.exoplayer2.util.TraceUtil; -import com.google.android.exoplayer2.video.VideoRendererEventListener.EventDispatcher; -import java.lang.annotation.Documented; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; - -/** Decodes and renders video using a {@link SimpleDecoder}. */ -public abstract class SimpleDecoderVideoRenderer extends BaseRenderer { - - /** Decoder reinitialization states. */ - @Documented - @Retention(RetentionPolicy.SOURCE) - @IntDef({ - REINITIALIZATION_STATE_NONE, - REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM, - REINITIALIZATION_STATE_WAIT_END_OF_STREAM - }) - private @interface ReinitializationState {} - /** The decoder does not need to be re-initialized. */ - private static final int REINITIALIZATION_STATE_NONE = 0; - /** - * The input format has changed in a way that requires the decoder to be re-initialized, but we - * haven't yet signaled an end of stream to the existing decoder. We need to do so in order to - * ensure that it outputs any remaining buffers before we release it. - */ - private static final int REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM = 1; - /** - * The input format has changed in a way that requires the decoder to be re-initialized, and we've - * signaled an end of stream to the existing decoder. We're waiting for the decoder to output an - * end of stream signal to indicate that it has output any remaining buffers before we release it. - */ - private static final int REINITIALIZATION_STATE_WAIT_END_OF_STREAM = 2; - - private final long allowedJoiningTimeMs; - private final int maxDroppedFramesToNotify; - private final boolean playClearSamplesWithoutKeys; - private final EventDispatcher eventDispatcher; - private final TimedValueQueue formatQueue; - private final DecoderInputBuffer flagsOnlyBuffer; - private final DrmSessionManager drmSessionManager; - - private boolean drmResourcesAcquired; - private Format inputFormat; - private Format outputFormat; - private SimpleDecoder< - VideoDecoderInputBuffer, - ? extends VideoDecoderOutputBuffer, - ? extends VideoDecoderException> - decoder; - private VideoDecoderInputBuffer inputBuffer; - private VideoDecoderOutputBuffer outputBuffer; - @Nullable private Surface surface; - @Nullable private VideoDecoderOutputBufferRenderer outputBufferRenderer; - @C.VideoOutputMode private int outputMode; - - @Nullable private DrmSession decoderDrmSession; - @Nullable private DrmSession sourceDrmSession; - - @ReinitializationState private int decoderReinitializationState; - private boolean decoderReceivedBuffers; - - private boolean renderedFirstFrame; - private long initialPositionUs; - private long joiningDeadlineMs; - private boolean waitingForKeys; - private boolean waitingForFirstSampleInFormat; - - private boolean inputStreamEnded; - private boolean outputStreamEnded; - private int reportedWidth; - private int reportedHeight; - - private long droppedFrameAccumulationStartTimeMs; - private int droppedFrames; - private int consecutiveDroppedFrameCount; - private int buffersInCodecCount; - private long lastRenderTimeUs; - private long outputStreamOffsetUs; - - /** Decoder event counters used for debugging purposes. */ - protected DecoderCounters decoderCounters; - - /** - * @param allowedJoiningTimeMs The maximum duration in milliseconds for which this video renderer - * can attempt to seamlessly join an ongoing playback. - * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be - * null if delivery of events is not required. - * @param eventListener A listener of events. May be null if delivery of events is not required. - * @param maxDroppedFramesToNotify The maximum number of frames that can be dropped between - * invocations of {@link VideoRendererEventListener#onDroppedFrames(int, long)}. - * @param drmSessionManager For use with encrypted media. May be null if support for encrypted - * media is not required. - * @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions. - * For example a media file may start with a short clear region so as to allow playback to - * begin in parallel with key acquisition. This parameter specifies whether the renderer is - * permitted to play clear regions of encrypted media files before {@code drmSessionManager} - * has obtained the keys necessary to decrypt encrypted regions of the media. - */ - protected SimpleDecoderVideoRenderer( - long allowedJoiningTimeMs, - @Nullable Handler eventHandler, - @Nullable VideoRendererEventListener eventListener, - int maxDroppedFramesToNotify, - @Nullable DrmSessionManager drmSessionManager, - boolean playClearSamplesWithoutKeys) { - super(C.TRACK_TYPE_VIDEO); - this.allowedJoiningTimeMs = allowedJoiningTimeMs; - this.maxDroppedFramesToNotify = maxDroppedFramesToNotify; - this.drmSessionManager = drmSessionManager; - this.playClearSamplesWithoutKeys = playClearSamplesWithoutKeys; - joiningDeadlineMs = C.TIME_UNSET; - clearReportedVideoSize(); - formatQueue = new TimedValueQueue<>(); - flagsOnlyBuffer = DecoderInputBuffer.newFlagsOnlyInstance(); - eventDispatcher = new EventDispatcher(eventHandler, eventListener); - decoderReinitializationState = REINITIALIZATION_STATE_NONE; - outputMode = C.VIDEO_OUTPUT_MODE_NONE; - } - - // BaseRenderer implementation. - - @Override - @Capabilities - public final int supportsFormat(Format format) { - return supportsFormatInternal(drmSessionManager, format); - } - - @Override - public void render(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException { - if (outputStreamEnded) { - return; - } - - if (inputFormat == null) { - // We don't have a format yet, so try and read one. - FormatHolder formatHolder = getFormatHolder(); - flagsOnlyBuffer.clear(); - int result = readSource(formatHolder, flagsOnlyBuffer, true); - if (result == C.RESULT_FORMAT_READ) { - onInputFormatChanged(formatHolder); - } else if (result == C.RESULT_BUFFER_READ) { - // End of stream read having not read a format. - Assertions.checkState(flagsOnlyBuffer.isEndOfStream()); - inputStreamEnded = true; - outputStreamEnded = true; - return; - } else { - // We still don't have a format and can't make progress without one. - return; - } - } - - // If we don't have a decoder yet, we need to instantiate one. - maybeInitDecoder(); - - if (decoder != null) { - try { - // Rendering loop. - TraceUtil.beginSection("drainAndFeed"); - while (drainOutputBuffer(positionUs, elapsedRealtimeUs)) {} - while (feedInputBuffer()) {} - TraceUtil.endSection(); - } catch (VideoDecoderException e) { - throw createRendererException(e, inputFormat); - } - decoderCounters.ensureUpdated(); - } - } - - @Override - public boolean isEnded() { - return outputStreamEnded; - } - - @Override - public boolean isReady() { - if (waitingForKeys) { - return false; - } - if (inputFormat != null - && (isSourceReady() || outputBuffer != null) - && (renderedFirstFrame || !hasOutput())) { - // Ready. If we were joining then we've now joined, so clear the joining deadline. - joiningDeadlineMs = C.TIME_UNSET; - return true; - } else if (joiningDeadlineMs == C.TIME_UNSET) { - // Not joining. - return false; - } else if (SystemClock.elapsedRealtime() < joiningDeadlineMs) { - // Joining and still within the joining deadline. - return true; - } else { - // The joining deadline has been exceeded. Give up and clear the deadline. - joiningDeadlineMs = C.TIME_UNSET; - return false; - } - } - - // Protected methods. - - @Override - protected void onEnabled(boolean joining) throws ExoPlaybackException { - if (drmSessionManager != null && !drmResourcesAcquired) { - drmResourcesAcquired = true; - drmSessionManager.prepare(); - } - decoderCounters = new DecoderCounters(); - eventDispatcher.enabled(decoderCounters); - } - - @Override - protected void onPositionReset(long positionUs, boolean joining) throws ExoPlaybackException { - inputStreamEnded = false; - outputStreamEnded = false; - clearRenderedFirstFrame(); - initialPositionUs = C.TIME_UNSET; - consecutiveDroppedFrameCount = 0; - if (decoder != null) { - flushDecoder(); - } - if (joining) { - setJoiningDeadlineMs(); - } else { - joiningDeadlineMs = C.TIME_UNSET; - } - formatQueue.clear(); - } - - @Override - protected void onStarted() { - droppedFrames = 0; - droppedFrameAccumulationStartTimeMs = SystemClock.elapsedRealtime(); - lastRenderTimeUs = SystemClock.elapsedRealtime() * 1000; - } - - @Override - protected void onStopped() { - joiningDeadlineMs = C.TIME_UNSET; - maybeNotifyDroppedFrames(); - } - - @Override - protected void onDisabled() { - inputFormat = null; - waitingForKeys = false; - clearReportedVideoSize(); - clearRenderedFirstFrame(); - try { - setSourceDrmSession(null); - releaseDecoder(); - } finally { - eventDispatcher.disabled(decoderCounters); - } - } - - @Override - protected void onReset() { - if (drmSessionManager != null && drmResourcesAcquired) { - drmResourcesAcquired = false; - drmSessionManager.release(); - } - } - - @Override - protected void onStreamChanged(Format[] formats, long offsetUs) throws ExoPlaybackException { - outputStreamOffsetUs = offsetUs; - super.onStreamChanged(formats, offsetUs); - } - - /** - * Called when a decoder has been created and configured. - * - *

      The default implementation is a no-op. - * - * @param name The name of the decoder that was initialized. - * @param initializedTimestampMs {@link SystemClock#elapsedRealtime()} when initialization - * finished. - * @param initializationDurationMs The time taken to initialize the decoder, in milliseconds. - */ - @CallSuper - protected void onDecoderInitialized( - String name, long initializedTimestampMs, long initializationDurationMs) { - eventDispatcher.decoderInitialized(name, initializedTimestampMs, initializationDurationMs); - } - - /** - * Flushes the decoder. - * - * @throws ExoPlaybackException If an error occurs reinitializing a decoder. - */ - @CallSuper - protected void flushDecoder() throws ExoPlaybackException { - waitingForKeys = false; - buffersInCodecCount = 0; - if (decoderReinitializationState != REINITIALIZATION_STATE_NONE) { - releaseDecoder(); - maybeInitDecoder(); - } else { - inputBuffer = null; - if (outputBuffer != null) { - outputBuffer.release(); - outputBuffer = null; - } - decoder.flush(); - decoderReceivedBuffers = false; - } - } - - /** Releases the decoder. */ - @CallSuper - protected void releaseDecoder() { - inputBuffer = null; - outputBuffer = null; - decoderReinitializationState = REINITIALIZATION_STATE_NONE; - decoderReceivedBuffers = false; - buffersInCodecCount = 0; - if (decoder != null) { - decoder.release(); - decoder = null; - decoderCounters.decoderReleaseCount++; - } - setDecoderDrmSession(null); - } - - /** - * Called when a new format is read from the upstream source. - * - * @param formatHolder A {@link FormatHolder} that holds the new {@link Format}. - * @throws ExoPlaybackException If an error occurs (re-)initializing the decoder. - */ - @CallSuper - @SuppressWarnings("unchecked") - protected void onInputFormatChanged(FormatHolder formatHolder) throws ExoPlaybackException { - waitingForFirstSampleInFormat = true; - Format newFormat = Assertions.checkNotNull(formatHolder.format); - if (formatHolder.includesDrmSession) { - setSourceDrmSession((DrmSession) formatHolder.drmSession); - } else { - sourceDrmSession = - getUpdatedSourceDrmSession(inputFormat, newFormat, drmSessionManager, sourceDrmSession); - } - inputFormat = newFormat; - - if (sourceDrmSession != decoderDrmSession) { - if (decoderReceivedBuffers) { - // Signal end of stream and wait for any final output buffers before re-initialization. - decoderReinitializationState = REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM; - } else { - // There aren't any final output buffers, so release the decoder immediately. - releaseDecoder(); - maybeInitDecoder(); - } - } - - eventDispatcher.inputFormatChanged(inputFormat); - } - - /** - * Called immediately before an input buffer is queued into the decoder. - * - *

      The default implementation is a no-op. - * - * @param buffer The buffer that will be queued. - */ - protected void onQueueInputBuffer(VideoDecoderInputBuffer buffer) { - // Do nothing. - } - - /** - * Called when an output buffer is successfully processed. - * - * @param presentationTimeUs The timestamp associated with the output buffer. - */ - @CallSuper - protected void onProcessedOutputBuffer(long presentationTimeUs) { - buffersInCodecCount--; - } - - /** - * Returns whether the buffer being processed should be dropped. - * - * @param earlyUs The time until the buffer should be presented in microseconds. A negative value - * indicates that the buffer is late. - * @param elapsedRealtimeUs {@link android.os.SystemClock#elapsedRealtime()} in microseconds, - * measured at the start of the current iteration of the rendering loop. - */ - protected boolean shouldDropOutputBuffer(long earlyUs, long elapsedRealtimeUs) { - return isBufferLate(earlyUs); - } - - /** - * Returns whether to drop all buffers from the buffer being processed to the keyframe at or after - * the current playback position, if possible. - * - * @param earlyUs The time until the current buffer should be presented in microseconds. A - * negative value indicates that the buffer is late. - * @param elapsedRealtimeUs {@link android.os.SystemClock#elapsedRealtime()} in microseconds, - * measured at the start of the current iteration of the rendering loop. - */ - protected boolean shouldDropBuffersToKeyframe(long earlyUs, long elapsedRealtimeUs) { - return isBufferVeryLate(earlyUs); - } - - /** - * Returns whether to force rendering an output buffer. - * - * @param earlyUs The time until the current buffer should be presented in microseconds. A - * negative value indicates that the buffer is late. - * @param elapsedSinceLastRenderUs The elapsed time since the last output buffer was rendered, in - * microseconds. - * @return Returns whether to force rendering an output buffer. - */ - protected boolean shouldForceRenderOutputBuffer(long earlyUs, long elapsedSinceLastRenderUs) { - return isBufferLate(earlyUs) && elapsedSinceLastRenderUs > 100000; - } - - /** - * Skips the specified output buffer and releases it. - * - * @param outputBuffer The output buffer to skip. - */ - protected void skipOutputBuffer(VideoDecoderOutputBuffer outputBuffer) { - decoderCounters.skippedOutputBufferCount++; - outputBuffer.release(); - } - - /** - * Drops the specified output buffer and releases it. - * - * @param outputBuffer The output buffer to drop. - */ - protected void dropOutputBuffer(VideoDecoderOutputBuffer outputBuffer) { - updateDroppedBufferCounters(1); - outputBuffer.release(); - } - - /** - * Drops frames from the current output buffer to the next keyframe at or before the playback - * position. If no such keyframe exists, as the playback position is inside the same group of - * pictures as the buffer being processed, returns {@code false}. Returns {@code true} otherwise. - * - * @param positionUs The current playback position, in microseconds. - * @return Whether any buffers were dropped. - * @throws ExoPlaybackException If an error occurs flushing the decoder. - */ - protected boolean maybeDropBuffersToKeyframe(long positionUs) throws ExoPlaybackException { - int droppedSourceBufferCount = skipSource(positionUs); - if (droppedSourceBufferCount == 0) { - return false; - } - decoderCounters.droppedToKeyframeCount++; - // We dropped some buffers to catch up, so update the decoder counters and flush the decoder, - // which releases all pending buffers buffers including the current output buffer. - updateDroppedBufferCounters(buffersInCodecCount + droppedSourceBufferCount); - flushDecoder(); - return true; - } - - /** - * Updates decoder counters to reflect that {@code droppedBufferCount} additional buffers were - * dropped. - * - * @param droppedBufferCount The number of additional dropped buffers. - */ - protected void updateDroppedBufferCounters(int droppedBufferCount) { - decoderCounters.droppedBufferCount += droppedBufferCount; - droppedFrames += droppedBufferCount; - consecutiveDroppedFrameCount += droppedBufferCount; - decoderCounters.maxConsecutiveDroppedBufferCount = - Math.max(consecutiveDroppedFrameCount, decoderCounters.maxConsecutiveDroppedBufferCount); - if (maxDroppedFramesToNotify > 0 && droppedFrames >= maxDroppedFramesToNotify) { - maybeNotifyDroppedFrames(); - } - } - - /** - * Returns the {@link Capabilities} for the given {@link Format}. - * - * @param drmSessionManager The renderer's {@link DrmSessionManager}. - * @param format The format, which has a video {@link Format#sampleMimeType}. - * @return The {@link Capabilities} for this {@link Format}. - * @see RendererCapabilities#supportsFormat(Format) - */ - @Capabilities - protected abstract int supportsFormatInternal( - @Nullable DrmSessionManager drmSessionManager, Format format); - - /** - * Creates a decoder for the given format. - * - * @param format The format for which a decoder is required. - * @param mediaCrypto The {@link ExoMediaCrypto} object required for decoding encrypted content. - * May be null and can be ignored if decoder does not handle encrypted content. - * @return The decoder. - * @throws VideoDecoderException If an error occurred creating a suitable decoder. - */ - protected abstract SimpleDecoder< - VideoDecoderInputBuffer, - ? extends VideoDecoderOutputBuffer, - ? extends VideoDecoderException> - createDecoder(Format format, @Nullable ExoMediaCrypto mediaCrypto) - throws VideoDecoderException; - - /** - * Renders the specified output buffer. - * - *

      The implementation of this method takes ownership of the output buffer and is responsible - * for calling {@link VideoDecoderOutputBuffer#release()} either immediately or in the future. - * - * @param outputBuffer {@link VideoDecoderOutputBuffer} to render. - * @param presentationTimeUs Presentation time in microseconds. - * @param outputFormat Output {@link Format}. - * @throws VideoDecoderException If an error occurs when rendering the output buffer. - */ - protected void renderOutputBuffer( - VideoDecoderOutputBuffer outputBuffer, long presentationTimeUs, Format outputFormat) - throws VideoDecoderException { - lastRenderTimeUs = C.msToUs(SystemClock.elapsedRealtime() * 1000); - int bufferMode = outputBuffer.mode; - boolean renderSurface = bufferMode == C.VIDEO_OUTPUT_MODE_SURFACE_YUV && surface != null; - boolean renderYuv = bufferMode == C.VIDEO_OUTPUT_MODE_YUV && outputBufferRenderer != null; - if (!renderYuv && !renderSurface) { - dropOutputBuffer(outputBuffer); - } else { - maybeNotifyVideoSizeChanged(outputBuffer.width, outputBuffer.height); - if (renderYuv) { - outputBufferRenderer.setOutputBuffer(outputBuffer); - } else { - renderOutputBufferToSurface(outputBuffer, surface); - } - consecutiveDroppedFrameCount = 0; - decoderCounters.renderedOutputBufferCount++; - maybeNotifyRenderedFirstFrame(); - } - } - - /** - * Renders the specified output buffer to the passed surface. - * - *

      The implementation of this method takes ownership of the output buffer and is responsible - * for calling {@link VideoDecoderOutputBuffer#release()} either immediately or in the future. - * - * @param outputBuffer {@link VideoDecoderOutputBuffer} to render. - * @param surface Output {@link Surface}. - * @throws VideoDecoderException If an error occurs when rendering the output buffer. - */ - protected abstract void renderOutputBufferToSurface( - VideoDecoderOutputBuffer outputBuffer, Surface surface) throws VideoDecoderException; - - /** - * Sets output surface. - * - * @param surface Surface. - */ - protected final void setOutputSurface(@Nullable Surface surface) { - if (this.surface != surface) { - // The output has changed. - this.surface = surface; - if (surface != null) { - outputBufferRenderer = null; - outputMode = C.VIDEO_OUTPUT_MODE_SURFACE_YUV; - if (decoder != null) { - setDecoderOutputMode(outputMode); - } - onOutputChanged(); - } else { - // The output has been removed. We leave the outputMode of the underlying decoder unchanged - // in anticipation that a subsequent output will likely be of the same type. - outputMode = C.VIDEO_OUTPUT_MODE_NONE; - onOutputRemoved(); - } - } else if (surface != null) { - // The output is unchanged and non-null. - onOutputReset(); - } - } - - /** - * Sets output buffer renderer. - * - * @param outputBufferRenderer Output buffer renderer. - */ - protected final void setOutputBufferRenderer( - @Nullable VideoDecoderOutputBufferRenderer outputBufferRenderer) { - if (this.outputBufferRenderer != outputBufferRenderer) { - // The output has changed. - this.outputBufferRenderer = outputBufferRenderer; - if (outputBufferRenderer != null) { - surface = null; - outputMode = C.VIDEO_OUTPUT_MODE_YUV; - if (decoder != null) { - setDecoderOutputMode(outputMode); - } - onOutputChanged(); - } else { - // The output has been removed. We leave the outputMode of the underlying decoder unchanged - // in anticipation that a subsequent output will likely be of the same type. - outputMode = C.VIDEO_OUTPUT_MODE_NONE; - onOutputRemoved(); - } - } else if (outputBufferRenderer != null) { - // The output is unchanged and non-null. - onOutputReset(); - } - } - - /** - * Sets output mode of the decoder. - * - * @param outputMode Output mode. - */ - protected abstract void setDecoderOutputMode(@C.VideoOutputMode int outputMode); - - // Internal methods. - - private void setSourceDrmSession(@Nullable DrmSession session) { - DrmSession.replaceSession(sourceDrmSession, session); - sourceDrmSession = session; - } - - private void setDecoderDrmSession(@Nullable DrmSession session) { - DrmSession.replaceSession(decoderDrmSession, session); - decoderDrmSession = session; - } - - private void maybeInitDecoder() throws ExoPlaybackException { - if (decoder != null) { - return; - } - - setDecoderDrmSession(sourceDrmSession); - - ExoMediaCrypto mediaCrypto = null; - if (decoderDrmSession != null) { - mediaCrypto = decoderDrmSession.getMediaCrypto(); - if (mediaCrypto == null) { - DrmSessionException drmError = decoderDrmSession.getError(); - if (drmError != null) { - // Continue for now. We may be able to avoid failure if the session recovers, or if a new - // input format causes the session to be replaced before it's used. - } else { - // The drm session isn't open yet. - return; - } - } - } - - try { - long decoderInitializingTimestamp = SystemClock.elapsedRealtime(); - decoder = createDecoder(inputFormat, mediaCrypto); - setDecoderOutputMode(outputMode); - long decoderInitializedTimestamp = SystemClock.elapsedRealtime(); - onDecoderInitialized( - decoder.getName(), - decoderInitializedTimestamp, - decoderInitializedTimestamp - decoderInitializingTimestamp); - decoderCounters.decoderInitCount++; - } catch (VideoDecoderException e) { - throw createRendererException(e, inputFormat); - } - } - - private boolean feedInputBuffer() throws VideoDecoderException, ExoPlaybackException { - if (decoder == null - || decoderReinitializationState == REINITIALIZATION_STATE_WAIT_END_OF_STREAM - || inputStreamEnded) { - // We need to reinitialize the decoder or the input stream has ended. - return false; - } - - if (inputBuffer == null) { - inputBuffer = decoder.dequeueInputBuffer(); - if (inputBuffer == null) { - return false; - } - } - - if (decoderReinitializationState == REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM) { - inputBuffer.setFlags(C.BUFFER_FLAG_END_OF_STREAM); - decoder.queueInputBuffer(inputBuffer); - inputBuffer = null; - decoderReinitializationState = REINITIALIZATION_STATE_WAIT_END_OF_STREAM; - return false; - } - - int result; - FormatHolder formatHolder = getFormatHolder(); - if (waitingForKeys) { - // We've already read an encrypted sample into buffer, and are waiting for keys. - result = C.RESULT_BUFFER_READ; - } else { - result = readSource(formatHolder, inputBuffer, false); - } - - if (result == C.RESULT_NOTHING_READ) { - return false; - } - if (result == C.RESULT_FORMAT_READ) { - onInputFormatChanged(formatHolder); - return true; - } - if (inputBuffer.isEndOfStream()) { - inputStreamEnded = true; - decoder.queueInputBuffer(inputBuffer); - inputBuffer = null; - return false; - } - boolean bufferEncrypted = inputBuffer.isEncrypted(); - waitingForKeys = shouldWaitForKeys(bufferEncrypted); - if (waitingForKeys) { - return false; - } - if (waitingForFirstSampleInFormat) { - formatQueue.add(inputBuffer.timeUs, inputFormat); - waitingForFirstSampleInFormat = false; - } - inputBuffer.flip(); - inputBuffer.colorInfo = inputFormat.colorInfo; - onQueueInputBuffer(inputBuffer); - decoder.queueInputBuffer(inputBuffer); - buffersInCodecCount++; - decoderReceivedBuffers = true; - decoderCounters.inputBufferCount++; - inputBuffer = null; - return true; - } - - /** - * Attempts to dequeue an output buffer from the decoder and, if successful, passes it to {@link - * #processOutputBuffer(long, long)}. - * - * @param positionUs The player's current position. - * @param elapsedRealtimeUs {@link android.os.SystemClock#elapsedRealtime()} in microseconds, - * measured at the start of the current iteration of the rendering loop. - * @return Whether it may be possible to drain more output data. - * @throws ExoPlaybackException If an error occurs draining the output buffer. - */ - private boolean drainOutputBuffer(long positionUs, long elapsedRealtimeUs) - throws ExoPlaybackException, VideoDecoderException { - if (outputBuffer == null) { - outputBuffer = decoder.dequeueOutputBuffer(); - if (outputBuffer == null) { - return false; - } - decoderCounters.skippedOutputBufferCount += outputBuffer.skippedOutputBufferCount; - buffersInCodecCount -= outputBuffer.skippedOutputBufferCount; - } - - if (outputBuffer.isEndOfStream()) { - if (decoderReinitializationState == REINITIALIZATION_STATE_WAIT_END_OF_STREAM) { - // We're waiting to re-initialize the decoder, and have now processed all final buffers. - releaseDecoder(); - maybeInitDecoder(); - } else { - outputBuffer.release(); - outputBuffer = null; - outputStreamEnded = true; - } - return false; - } - - boolean processedOutputBuffer = processOutputBuffer(positionUs, elapsedRealtimeUs); - if (processedOutputBuffer) { - onProcessedOutputBuffer(outputBuffer.timeUs); - outputBuffer = null; - } - return processedOutputBuffer; - } - - /** - * Processes {@link #outputBuffer} by rendering it, skipping it or doing nothing, and returns - * whether it may be possible to process another output buffer. - * - * @param positionUs The player's current position. - * @param elapsedRealtimeUs {@link android.os.SystemClock#elapsedRealtime()} in microseconds, - * measured at the start of the current iteration of the rendering loop. - * @return Whether it may be possible to drain another output buffer. - * @throws ExoPlaybackException If an error occurs processing the output buffer. - */ - private boolean processOutputBuffer(long positionUs, long elapsedRealtimeUs) - throws ExoPlaybackException, VideoDecoderException { - if (initialPositionUs == C.TIME_UNSET) { - initialPositionUs = positionUs; - } - - long earlyUs = outputBuffer.timeUs - positionUs; - if (!hasOutput()) { - // Skip frames in sync with playback, so we'll be at the right frame if the mode changes. - if (isBufferLate(earlyUs)) { - skipOutputBuffer(outputBuffer); - return true; - } - return false; - } - - long presentationTimeUs = outputBuffer.timeUs - outputStreamOffsetUs; - Format format = formatQueue.pollFloor(presentationTimeUs); - if (format != null) { - outputFormat = format; - } - - long elapsedRealtimeNowUs = SystemClock.elapsedRealtime() * 1000; - boolean isStarted = getState() == STATE_STARTED; - if (!renderedFirstFrame - || (isStarted - && shouldForceRenderOutputBuffer(earlyUs, elapsedRealtimeNowUs - lastRenderTimeUs))) { - renderOutputBuffer(outputBuffer, presentationTimeUs, outputFormat); - return true; - } - - if (!isStarted || positionUs == initialPositionUs) { - return false; - } - - if (shouldDropBuffersToKeyframe(earlyUs, elapsedRealtimeUs) - && maybeDropBuffersToKeyframe(positionUs)) { - return false; - } else if (shouldDropOutputBuffer(earlyUs, elapsedRealtimeUs)) { - dropOutputBuffer(outputBuffer); - return true; - } - - if (earlyUs < 30000) { - renderOutputBuffer(outputBuffer, presentationTimeUs, outputFormat); - return true; - } - - return false; - } - - private boolean hasOutput() { - return outputMode != C.VIDEO_OUTPUT_MODE_NONE; - } - - private void onOutputChanged() { - // If we know the video size, report it again immediately. - maybeRenotifyVideoSizeChanged(); - // We haven't rendered to the new output yet. - clearRenderedFirstFrame(); - if (getState() == STATE_STARTED) { - setJoiningDeadlineMs(); - } - } - - private void onOutputRemoved() { - clearReportedVideoSize(); - clearRenderedFirstFrame(); - } - - private void onOutputReset() { - // The output is unchanged and non-null. If we know the video size and/or have already - // rendered to the output, report these again immediately. - maybeRenotifyVideoSizeChanged(); - maybeRenotifyRenderedFirstFrame(); - } - - private boolean shouldWaitForKeys(boolean bufferEncrypted) throws ExoPlaybackException { - if (decoderDrmSession == null - || (!bufferEncrypted - && (playClearSamplesWithoutKeys || decoderDrmSession.playClearSamplesWithoutKeys()))) { - return false; - } - @DrmSession.State int drmSessionState = decoderDrmSession.getState(); - if (drmSessionState == DrmSession.STATE_ERROR) { - throw createRendererException(decoderDrmSession.getError(), inputFormat); - } - return drmSessionState != DrmSession.STATE_OPENED_WITH_KEYS; - } - - private void setJoiningDeadlineMs() { - joiningDeadlineMs = - allowedJoiningTimeMs > 0 - ? (SystemClock.elapsedRealtime() + allowedJoiningTimeMs) - : C.TIME_UNSET; - } - - private void clearRenderedFirstFrame() { - renderedFirstFrame = false; - } - - private void maybeNotifyRenderedFirstFrame() { - if (!renderedFirstFrame) { - renderedFirstFrame = true; - eventDispatcher.renderedFirstFrame(surface); - } - } - - private void maybeRenotifyRenderedFirstFrame() { - if (renderedFirstFrame) { - eventDispatcher.renderedFirstFrame(surface); - } - } - - private void clearReportedVideoSize() { - reportedWidth = Format.NO_VALUE; - reportedHeight = Format.NO_VALUE; - } - - private void maybeNotifyVideoSizeChanged(int width, int height) { - if (reportedWidth != width || reportedHeight != height) { - reportedWidth = width; - reportedHeight = height; - eventDispatcher.videoSizeChanged( - width, height, /* unappliedRotationDegrees= */ 0, /* pixelWidthHeightRatio= */ 1); - } - } - - private void maybeRenotifyVideoSizeChanged() { - if (reportedWidth != Format.NO_VALUE || reportedHeight != Format.NO_VALUE) { - eventDispatcher.videoSizeChanged( - reportedWidth, - reportedHeight, - /* unappliedRotationDegrees= */ 0, - /* pixelWidthHeightRatio= */ 1); - } - } - - private void maybeNotifyDroppedFrames() { - if (droppedFrames > 0) { - long now = SystemClock.elapsedRealtime(); - long elapsedMs = now - droppedFrameAccumulationStartTimeMs; - eventDispatcher.droppedFrames(droppedFrames, elapsedMs); - droppedFrames = 0; - droppedFrameAccumulationStartTimeMs = now; - } - } - - private static boolean isBufferLate(long earlyUs) { - // Class a buffer as late if it should have been presented more than 30 ms ago. - return earlyUs < -30000; - } - - private static boolean isBufferVeryLate(long earlyUs) { - // Class a buffer as very late if it should have been presented more than 500 ms ago. - return earlyUs < -500000; - } -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoDecoderException.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoDecoderException.java deleted file mode 100644 index 68108af636..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoDecoderException.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright (C) 2019 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.video; - -/** Thrown when a video decoder error occurs. */ -public class VideoDecoderException extends Exception { - - /** - * Creates an instance with the given message. - * - * @param message The detail message for this exception. - */ - public VideoDecoderException(String message) { - super(message); - } - - /** - * Creates an instance with the given message and cause. - * - * @param message The detail message for this exception. - * @param cause the cause (which is saved for later retrieval by the {@link #getCause()} method). - * A null value is permitted, and indicates that the cause is nonexistent or unknown. - */ - public VideoDecoderException(String message, Throwable cause) { - super(message, cause); - } -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoDecoderGLSurfaceView.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoDecoderGLSurfaceView.java index 99f3d07b65..2dc3592e3f 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoDecoderGLSurfaceView.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoDecoderGLSurfaceView.java @@ -15,24 +15,45 @@ */ package com.google.android.exoplayer2.video; +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; + import android.content.Context; +import android.opengl.GLES20; import android.opengl.GLSurfaceView; import android.util.AttributeSet; +import android.util.Log; import androidx.annotation.Nullable; +import com.google.android.exoplayer2.decoder.VideoDecoderOutputBuffer; +import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.GlProgram; +import com.google.android.exoplayer2.util.GlUtil; +import java.nio.ByteBuffer; +import java.nio.FloatBuffer; +import java.util.concurrent.atomic.AtomicReference; +import javax.microedition.khronos.egl.EGLConfig; +import javax.microedition.khronos.opengles.GL10; +import org.checkerframework.checker.nullness.compatqual.NullableType; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; +import org.checkerframework.checker.nullness.qual.RequiresNonNull; /** - * GLSurfaceView for rendering video output. To render video in this view, call {@link - * #getVideoDecoderOutputBufferRenderer()} to get a {@link VideoDecoderOutputBufferRenderer} that - * will render video decoder output buffers in this view. + * GLSurfaceView implementing {@link VideoDecoderOutputBufferRenderer} for rendering {@link + * VideoDecoderOutputBuffer VideoDecoderOutputBuffers}. * - *

      This view is intended for use only with extension renderers. For other use cases a {@link - * android.view.SurfaceView} or {@link android.view.TextureView} should be used instead. + *

      This view is intended for use only with decoders that produce {@link VideoDecoderOutputBuffer + * VideoDecoderOutputBuffers}. For other use cases a {@link android.view.SurfaceView} or {@link + * android.view.TextureView} should be used instead. */ -public class VideoDecoderGLSurfaceView extends GLSurfaceView { +public final class VideoDecoderGLSurfaceView extends GLSurfaceView + implements VideoDecoderOutputBufferRenderer { + + private static final String TAG = "VideoDecoderGLSV"; - private final VideoDecoderRenderer renderer; + private final Renderer renderer; - /** @param context A {@link Context}. */ + /** + * @param context A {@link Context}. + */ public VideoDecoderGLSurfaceView(Context context) { this(context, /* attrs= */ null); } @@ -41,17 +62,264 @@ public VideoDecoderGLSurfaceView(Context context) { * @param context A {@link Context}. * @param attrs Custom attributes. */ + @SuppressWarnings({"nullness:assignment", "nullness:argument", "nullness:method.invocation"}) public VideoDecoderGLSurfaceView(Context context, @Nullable AttributeSet attrs) { super(context, attrs); - renderer = new VideoDecoderRenderer(this); + renderer = new Renderer(/* surfaceView= */ this); setPreserveEGLContextOnPause(true); setEGLContextClientVersion(2); setRenderer(renderer); setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY); } - /** Returns the {@link VideoDecoderOutputBufferRenderer} that will render frames in this view. */ + @Override + public void setOutputBuffer(VideoDecoderOutputBuffer outputBuffer) { + renderer.setOutputBuffer(outputBuffer); + } + + /** + * @deprecated This class implements {@link VideoDecoderOutputBufferRenderer} directly. + */ + @Deprecated public VideoDecoderOutputBufferRenderer getVideoDecoderOutputBufferRenderer() { - return renderer; + return this; + } + + private static final class Renderer implements GLSurfaceView.Renderer { + + private static final float[] kColorConversion601 = { + 1.164f, 1.164f, 1.164f, + 0.0f, -0.392f, 2.017f, + 1.596f, -0.813f, 0.0f, + }; + + private static final float[] kColorConversion709 = { + 1.164f, 1.164f, 1.164f, + 0.0f, -0.213f, 2.112f, + 1.793f, -0.533f, 0.0f, + }; + + private static final float[] kColorConversion2020 = { + 1.168f, 1.168f, 1.168f, + 0.0f, -0.188f, 2.148f, + 1.683f, -0.652f, 0.0f, + }; + + private static final String VERTEX_SHADER = + "varying vec2 interp_tc_y;\n" + + "varying vec2 interp_tc_u;\n" + + "varying vec2 interp_tc_v;\n" + + "attribute vec4 in_pos;\n" + + "attribute vec2 in_tc_y;\n" + + "attribute vec2 in_tc_u;\n" + + "attribute vec2 in_tc_v;\n" + + "void main() {\n" + + " gl_Position = in_pos;\n" + + " interp_tc_y = in_tc_y;\n" + + " interp_tc_u = in_tc_u;\n" + + " interp_tc_v = in_tc_v;\n" + + "}\n"; + private static final String[] TEXTURE_UNIFORMS = {"y_tex", "u_tex", "v_tex"}; + private static final String FRAGMENT_SHADER = + "precision mediump float;\n" + + "varying vec2 interp_tc_y;\n" + + "varying vec2 interp_tc_u;\n" + + "varying vec2 interp_tc_v;\n" + + "uniform sampler2D y_tex;\n" + + "uniform sampler2D u_tex;\n" + + "uniform sampler2D v_tex;\n" + + "uniform mat3 mColorConversion;\n" + + "void main() {\n" + + " vec3 yuv;\n" + + " yuv.x = texture2D(y_tex, interp_tc_y).r - 0.0625;\n" + + " yuv.y = texture2D(u_tex, interp_tc_u).r - 0.5;\n" + + " yuv.z = texture2D(v_tex, interp_tc_v).r - 0.5;\n" + + " gl_FragColor = vec4(mColorConversion * yuv, 1.0);\n" + + "}\n"; + + private static final FloatBuffer TEXTURE_VERTICES = + GlUtil.createBuffer(new float[] {-1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, 1.0f, -1.0f}); + + private final GLSurfaceView surfaceView; + private final int[] yuvTextures; + private final int[] texLocations; + private final int[] previousWidths; + private final int[] previousStrides; + private final AtomicReference<@NullableType VideoDecoderOutputBuffer> + pendingOutputBufferReference; + + // Kept in field rather than a local variable in order not to get garbage collected before + // glDrawArrays uses it. + private final FloatBuffer[] textureCoords; + + private @MonotonicNonNull GlProgram program; + private int colorMatrixLocation; + + // Accessed only from the GL thread. + private @MonotonicNonNull VideoDecoderOutputBuffer renderedOutputBuffer; + + public Renderer(GLSurfaceView surfaceView) { + this.surfaceView = surfaceView; + yuvTextures = new int[3]; + texLocations = new int[3]; + previousWidths = new int[3]; + previousStrides = new int[3]; + pendingOutputBufferReference = new AtomicReference<>(); + textureCoords = new FloatBuffer[3]; + for (int i = 0; i < 3; i++) { + previousWidths[i] = previousStrides[i] = -1; + } + } + + @Override + public void onSurfaceCreated(GL10 unused, EGLConfig config) { + try { + program = new GlProgram(VERTEX_SHADER, FRAGMENT_SHADER); + int posLocation = program.getAttributeArrayLocationAndEnable("in_pos"); + GLES20.glVertexAttribPointer( + posLocation, + 2, + GLES20.GL_FLOAT, + /* normalized= */ false, + /* stride= */ 0, + TEXTURE_VERTICES); + texLocations[0] = program.getAttributeArrayLocationAndEnable("in_tc_y"); + texLocations[1] = program.getAttributeArrayLocationAndEnable("in_tc_u"); + texLocations[2] = program.getAttributeArrayLocationAndEnable("in_tc_v"); + colorMatrixLocation = program.getUniformLocation("mColorConversion"); + GlUtil.checkGlError(); + setupTextures(); + GlUtil.checkGlError(); + } catch (GlUtil.GlException e) { + Log.e(TAG, "Failed to set up the textures and program", e); + } + } + + @Override + public void onSurfaceChanged(GL10 unused, int width, int height) { + GLES20.glViewport(0, 0, width, height); + } + + @Override + public void onDrawFrame(GL10 unused) { + @Nullable + VideoDecoderOutputBuffer pendingOutputBuffer = + pendingOutputBufferReference.getAndSet(/* newValue= */ null); + if (pendingOutputBuffer == null && renderedOutputBuffer == null) { + // There is no output buffer to render at the moment. + return; + } + if (pendingOutputBuffer != null) { + if (renderedOutputBuffer != null) { + renderedOutputBuffer.release(); + } + renderedOutputBuffer = pendingOutputBuffer; + } + + VideoDecoderOutputBuffer outputBuffer = checkNotNull(renderedOutputBuffer); + + // Set color matrix. Assume BT709 if the color space is unknown. + float[] colorConversion = kColorConversion709; + switch (outputBuffer.colorspace) { + case VideoDecoderOutputBuffer.COLORSPACE_BT601: + colorConversion = kColorConversion601; + break; + case VideoDecoderOutputBuffer.COLORSPACE_BT2020: + colorConversion = kColorConversion2020; + break; + case VideoDecoderOutputBuffer.COLORSPACE_BT709: + default: + // Do nothing. + break; + } + GLES20.glUniformMatrix3fv( + colorMatrixLocation, + /* color= */ 1, + /* transpose= */ false, + colorConversion, + /* offset= */ 0); + + int[] yuvStrides = checkNotNull(outputBuffer.yuvStrides); + ByteBuffer[] yuvPlanes = checkNotNull(outputBuffer.yuvPlanes); + + for (int i = 0; i < 3; i++) { + int h = (i == 0) ? outputBuffer.height : (outputBuffer.height + 1) / 2; + GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i); + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]); + GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1); + GLES20.glTexImage2D( + GLES20.GL_TEXTURE_2D, + /* level= */ 0, + GLES20.GL_LUMINANCE, + yuvStrides[i], + h, + /* border= */ 0, + GLES20.GL_LUMINANCE, + GLES20.GL_UNSIGNED_BYTE, + yuvPlanes[i]); + } + + int[] widths = new int[3]; + widths[0] = outputBuffer.width; + // TODO(b/142097774): Handle streams where chroma channels are not stored at half width and + // height compared to the luma channel. U and V planes are being stored at half width compared + // to Y. + widths[1] = widths[2] = (widths[0] + 1) / 2; + for (int i = 0; i < 3; i++) { + // Set cropping of stride if either width or stride has changed. + if (previousWidths[i] != widths[i] || previousStrides[i] != yuvStrides[i]) { + Assertions.checkState(yuvStrides[i] != 0); + float widthRatio = (float) widths[i] / yuvStrides[i]; + // These buffers are consumed during each call to glDrawArrays. They need to be member + // variables rather than local variables in order not to get garbage collected. + textureCoords[i] = + GlUtil.createBuffer( + new float[] {0.0f, 0.0f, 0.0f, 1.0f, widthRatio, 0.0f, widthRatio, 1.0f}); + GLES20.glVertexAttribPointer( + texLocations[i], + /* size= */ 2, + GLES20.GL_FLOAT, + /* normalized= */ false, + /* stride= */ 0, + textureCoords[i]); + previousWidths[i] = widths[i]; + previousStrides[i] = yuvStrides[i]; + } + } + + GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /* first= */ 0, /* count= */ 4); + try { + GlUtil.checkGlError(); + } catch (GlUtil.GlException e) { + Log.e(TAG, "Failed to draw a frame", e); + } + } + + public void setOutputBuffer(VideoDecoderOutputBuffer outputBuffer) { + @Nullable + VideoDecoderOutputBuffer oldPendingOutputBuffer = + pendingOutputBufferReference.getAndSet(outputBuffer); + if (oldPendingOutputBuffer != null) { + // The old pending output buffer will never be used for rendering, so release it now. + oldPendingOutputBuffer.release(); + } + surfaceView.requestRender(); + } + + @RequiresNonNull("program") + private void setupTextures() { + try { + GLES20.glGenTextures(/* n= */ 3, yuvTextures, /* offset= */ 0); + for (int i = 0; i < 3; i++) { + GLES20.glUniform1i(program.getUniformLocation(TEXTURE_UNIFORMS[i]), i); + GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i); + GlUtil.bindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]); + } + GlUtil.checkGlError(); + } catch (GlUtil.GlException e) { + Log.e(TAG, "Failed to set up the textures", e); + } + } } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoDecoderInputBuffer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoDecoderInputBuffer.java deleted file mode 100644 index 360279c11c..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoDecoderInputBuffer.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (C) 2019 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.video; - -import androidx.annotation.Nullable; -import com.google.android.exoplayer2.decoder.DecoderInputBuffer; - -/** Input buffer to a video decoder. */ -public class VideoDecoderInputBuffer extends DecoderInputBuffer { - - @Nullable public ColorInfo colorInfo; - - public VideoDecoderInputBuffer() { - super(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DIRECT); - } - -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoDecoderOutputBufferRenderer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoDecoderOutputBufferRenderer.java index c57794f454..921922f961 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoDecoderOutputBufferRenderer.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoDecoderOutputBufferRenderer.java @@ -15,6 +15,8 @@ */ package com.google.android.exoplayer2.video; +import com.google.android.exoplayer2.decoder.VideoDecoderOutputBuffer; + /** Renders the {@link VideoDecoderOutputBuffer}. */ public interface VideoDecoderOutputBufferRenderer { diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoDecoderRenderer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoDecoderRenderer.java deleted file mode 100644 index cb9c4eb59b..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoDecoderRenderer.java +++ /dev/null @@ -1,241 +0,0 @@ -/* - * Copyright (C) 2016 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.video; - -import android.opengl.GLES20; -import android.opengl.GLSurfaceView; -import androidx.annotation.Nullable; -import com.google.android.exoplayer2.util.Assertions; -import com.google.android.exoplayer2.util.GlUtil; -import java.nio.FloatBuffer; -import java.util.concurrent.atomic.AtomicReference; -import javax.microedition.khronos.egl.EGLConfig; -import javax.microedition.khronos.opengles.GL10; - -/** - * GLSurfaceView.Renderer implementation that can render YUV Frames returned by a video decoder - * after decoding. It does the YUV to RGB color conversion in the Fragment Shader. - */ -/* package */ class VideoDecoderRenderer - implements GLSurfaceView.Renderer, VideoDecoderOutputBufferRenderer { - - private static final float[] kColorConversion601 = { - 1.164f, 1.164f, 1.164f, - 0.0f, -0.392f, 2.017f, - 1.596f, -0.813f, 0.0f, - }; - - private static final float[] kColorConversion709 = { - 1.164f, 1.164f, 1.164f, - 0.0f, -0.213f, 2.112f, - 1.793f, -0.533f, 0.0f, - }; - - private static final float[] kColorConversion2020 = { - 1.168f, 1.168f, 1.168f, - 0.0f, -0.188f, 2.148f, - 1.683f, -0.652f, 0.0f, - }; - - private static final String VERTEX_SHADER = - "varying vec2 interp_tc_y;\n" - + "varying vec2 interp_tc_u;\n" - + "varying vec2 interp_tc_v;\n" - + "attribute vec4 in_pos;\n" - + "attribute vec2 in_tc_y;\n" - + "attribute vec2 in_tc_u;\n" - + "attribute vec2 in_tc_v;\n" - + "void main() {\n" - + " gl_Position = in_pos;\n" - + " interp_tc_y = in_tc_y;\n" - + " interp_tc_u = in_tc_u;\n" - + " interp_tc_v = in_tc_v;\n" - + "}\n"; - private static final String[] TEXTURE_UNIFORMS = {"y_tex", "u_tex", "v_tex"}; - private static final String FRAGMENT_SHADER = - "precision mediump float;\n" - + "varying vec2 interp_tc_y;\n" - + "varying vec2 interp_tc_u;\n" - + "varying vec2 interp_tc_v;\n" - + "uniform sampler2D y_tex;\n" - + "uniform sampler2D u_tex;\n" - + "uniform sampler2D v_tex;\n" - + "uniform mat3 mColorConversion;\n" - + "void main() {\n" - + " vec3 yuv;\n" - + " yuv.x = texture2D(y_tex, interp_tc_y).r - 0.0625;\n" - + " yuv.y = texture2D(u_tex, interp_tc_u).r - 0.5;\n" - + " yuv.z = texture2D(v_tex, interp_tc_v).r - 0.5;\n" - + " gl_FragColor = vec4(mColorConversion * yuv, 1.0);\n" - + "}\n"; - - private static final FloatBuffer TEXTURE_VERTICES = - GlUtil.createBuffer(new float[] {-1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, 1.0f, -1.0f}); - private final GLSurfaceView surfaceView; - private final int[] yuvTextures = new int[3]; - private final AtomicReference pendingOutputBufferReference; - - // Kept in field rather than a local variable in order not to get garbage collected before - // glDrawArrays uses it. - private FloatBuffer[] textureCoords; - - private int program; - private int[] texLocations; - private int colorMatrixLocation; - private int[] previousWidths; - private int[] previousStrides; - - @Nullable - private VideoDecoderOutputBuffer renderedOutputBuffer; // Accessed only from the GL thread. - - public VideoDecoderRenderer(GLSurfaceView surfaceView) { - this.surfaceView = surfaceView; - pendingOutputBufferReference = new AtomicReference<>(); - textureCoords = new FloatBuffer[3]; - texLocations = new int[3]; - previousWidths = new int[3]; - previousStrides = new int[3]; - for (int i = 0; i < 3; i++) { - previousWidths[i] = previousStrides[i] = -1; - } - } - - @Override - public void onSurfaceCreated(GL10 unused, EGLConfig config) { - program = GlUtil.compileProgram(VERTEX_SHADER, FRAGMENT_SHADER); - GLES20.glUseProgram(program); - int posLocation = GLES20.glGetAttribLocation(program, "in_pos"); - GLES20.glEnableVertexAttribArray(posLocation); - GLES20.glVertexAttribPointer(posLocation, 2, GLES20.GL_FLOAT, false, 0, TEXTURE_VERTICES); - texLocations[0] = GLES20.glGetAttribLocation(program, "in_tc_y"); - GLES20.glEnableVertexAttribArray(texLocations[0]); - texLocations[1] = GLES20.glGetAttribLocation(program, "in_tc_u"); - GLES20.glEnableVertexAttribArray(texLocations[1]); - texLocations[2] = GLES20.glGetAttribLocation(program, "in_tc_v"); - GLES20.glEnableVertexAttribArray(texLocations[2]); - GlUtil.checkGlError(); - colorMatrixLocation = GLES20.glGetUniformLocation(program, "mColorConversion"); - GlUtil.checkGlError(); - setupTextures(); - GlUtil.checkGlError(); - } - - @Override - public void onSurfaceChanged(GL10 unused, int width, int height) { - GLES20.glViewport(0, 0, width, height); - } - - @Override - public void onDrawFrame(GL10 unused) { - VideoDecoderOutputBuffer pendingOutputBuffer = pendingOutputBufferReference.getAndSet(null); - if (pendingOutputBuffer == null && renderedOutputBuffer == null) { - // There is no output buffer to render at the moment. - return; - } - if (pendingOutputBuffer != null) { - if (renderedOutputBuffer != null) { - renderedOutputBuffer.release(); - } - renderedOutputBuffer = pendingOutputBuffer; - } - VideoDecoderOutputBuffer outputBuffer = renderedOutputBuffer; - // Set color matrix. Assume BT709 if the color space is unknown. - float[] colorConversion = kColorConversion709; - switch (outputBuffer.colorspace) { - case VideoDecoderOutputBuffer.COLORSPACE_BT601: - colorConversion = kColorConversion601; - break; - case VideoDecoderOutputBuffer.COLORSPACE_BT2020: - colorConversion = kColorConversion2020; - break; - case VideoDecoderOutputBuffer.COLORSPACE_BT709: - default: - break; // Do nothing - } - GLES20.glUniformMatrix3fv(colorMatrixLocation, 1, false, colorConversion, 0); - - for (int i = 0; i < 3; i++) { - int h = (i == 0) ? outputBuffer.height : (outputBuffer.height + 1) / 2; - GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i); - GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]); - GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1); - GLES20.glTexImage2D( - GLES20.GL_TEXTURE_2D, - 0, - GLES20.GL_LUMINANCE, - outputBuffer.yuvStrides[i], - h, - 0, - GLES20.GL_LUMINANCE, - GLES20.GL_UNSIGNED_BYTE, - outputBuffer.yuvPlanes[i]); - } - - int[] widths = new int[3]; - widths[0] = outputBuffer.width; - // TODO: Handle streams where chroma channels are not stored at half width and height - // compared to luma channel. See [Internal: b/142097774]. - // U and V planes are being stored at half width compared to Y. - widths[1] = widths[2] = (widths[0] + 1) / 2; - for (int i = 0; i < 3; i++) { - // Set cropping of stride if either width or stride has changed. - if (previousWidths[i] != widths[i] || previousStrides[i] != outputBuffer.yuvStrides[i]) { - Assertions.checkState(outputBuffer.yuvStrides[i] != 0); - float widthRatio = (float) widths[i] / outputBuffer.yuvStrides[i]; - // These buffers are consumed during each call to glDrawArrays. They need to be member - // variables rather than local variables in order not to get garbage collected. - textureCoords[i] = - GlUtil.createBuffer( - new float[] {0.0f, 0.0f, 0.0f, 1.0f, widthRatio, 0.0f, widthRatio, 1.0f}); - GLES20.glVertexAttribPointer( - texLocations[i], 2, GLES20.GL_FLOAT, false, 0, textureCoords[i]); - previousWidths[i] = widths[i]; - previousStrides[i] = outputBuffer.yuvStrides[i]; - } - } - - GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); - GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); - GlUtil.checkGlError(); - } - - @Override - public void setOutputBuffer(VideoDecoderOutputBuffer outputBuffer) { - VideoDecoderOutputBuffer oldPendingOutputBuffer = - pendingOutputBufferReference.getAndSet(outputBuffer); - if (oldPendingOutputBuffer != null) { - // The old pending output buffer will never be used for rendering, so release it now. - oldPendingOutputBuffer.release(); - } - surfaceView.requestRender(); - } - - private void setupTextures() { - GLES20.glGenTextures(3, yuvTextures, 0); - for (int i = 0; i < 3; i++) { - GLES20.glUniform1i(GLES20.glGetUniformLocation(program, TEXTURE_UNIFORMS[i]), i); - GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i); - GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]); - GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); - GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); - GLES20.glTexParameterf( - GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); - GLES20.glTexParameterf( - GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); - } - GlUtil.checkGlError(); - } -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoFrameMetadataListener.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoFrameMetadataListener.java index 746903a101..bc275f1fb0 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoFrameMetadataListener.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoFrameMetadataListener.java @@ -19,14 +19,14 @@ import androidx.annotation.Nullable; import com.google.android.exoplayer2.Format; -/** A listener for metadata corresponding to video frame being rendered. */ +/** A listener for metadata corresponding to video frames being rendered. */ public interface VideoFrameMetadataListener { /** - * Called when the video frame about to be rendered. This method is called on the playback thread. + * Called on the playback thread when a video frame is about to be rendered. * - * @param presentationTimeUs The presentation time of the output buffer, in microseconds. + * @param presentationTimeUs The presentation time of the frame, in microseconds. * @param releaseTimeNs The wallclock time at which the frame should be displayed, in nanoseconds. - * If the platform API version of the device is less than 21, then this is the best effort. + * If the platform API version of the device is less than 21, then this is a best effort. * @param format The format associated with the frame. * @param mediaFormat The framework media format associated with the frame, or {@code null} if not * known or not applicable (e.g., because the frame was not output by a {@link diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoFrameReleaseHelper.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoFrameReleaseHelper.java new file mode 100644 index 0000000000..5d67695448 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoFrameReleaseHelper.java @@ -0,0 +1,650 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.video; + +import static com.google.android.exoplayer2.util.Assertions.checkNotNull; + +import android.content.Context; +import android.hardware.display.DisplayManager; +import android.os.Handler; +import android.os.HandlerThread; +import android.os.Message; +import android.view.Choreographer; +import android.view.Choreographer.FrameCallback; +import android.view.Display; +import android.view.Surface; +import android.view.WindowManager; +import androidx.annotation.DoNotInline; +import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.Renderer; +import com.google.android.exoplayer2.util.Log; +import com.google.android.exoplayer2.util.Util; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; + +/** + * Helps a video {@link Renderer} release frames to a {@link Surface}. The helper: + * + *

        + *
      • Adjusts frame release timestamps to achieve a smoother visual result. The release + * timestamps are smoothed, and aligned with the default display's vsync signal. + *
      • Adjusts the {@link Surface} frame rate to inform the underlying platform of a fixed frame + * rate, when there is one. + *
      + */ +public final class VideoFrameReleaseHelper { + + private static final String TAG = "VideoFrameReleaseHelper"; + + /** + * The minimum sum of frame durations used to calculate the current fixed frame rate estimate, for + * the estimate to be treated as a high confidence estimate. + */ + private static final long MINIMUM_MATCHING_FRAME_DURATION_FOR_HIGH_CONFIDENCE_NS = 5_000_000_000L; + + /** + * The minimum change in media frame rate that will trigger a change in surface frame rate, given + * a high confidence estimate. + */ + private static final float MINIMUM_MEDIA_FRAME_RATE_CHANGE_FOR_UPDATE_HIGH_CONFIDENCE = 0.02f; + + /** + * The minimum change in media frame rate that will trigger a change in surface frame rate, given + * a low confidence estimate. + */ + private static final float MINIMUM_MEDIA_FRAME_RATE_CHANGE_FOR_UPDATE_LOW_CONFIDENCE = 1f; + + /** + * The minimum number of frames without a frame rate estimate, for the surface frame rate to be + * cleared. + */ + private static final int MINIMUM_FRAMES_WITHOUT_SYNC_TO_CLEAR_SURFACE_FRAME_RATE = + 2 * FixedFrameRateEstimator.CONSECUTIVE_MATCHING_FRAME_DURATIONS_FOR_SYNC; + + /** The period between sampling display VSYNC timestamps, in milliseconds. */ + private static final long VSYNC_SAMPLE_UPDATE_PERIOD_MS = 500; + /** + * The maximum adjustment that can be made to a frame release timestamp, in nanoseconds, excluding + * the part of the adjustment that aligns frame release timestamps with the display VSYNC. + */ + private static final long MAX_ALLOWED_ADJUSTMENT_NS = 20_000_000; + /** + * If a frame is targeted to a display VSYNC with timestamp {@code vsyncTime}, the adjusted frame + * release timestamp will be calculated as {@code releaseTime = vsyncTime - ((vsyncDuration * + * VSYNC_OFFSET_PERCENTAGE) / 100)}. + */ + private static final long VSYNC_OFFSET_PERCENTAGE = 80; + + private final FixedFrameRateEstimator frameRateEstimator; + @Nullable private final DisplayHelper displayHelper; + @Nullable private final VSyncSampler vsyncSampler; + + private boolean started; + @Nullable private Surface surface; + + /** The media frame rate specified in the {@link Format}. */ + private float formatFrameRate; + /** + * The media frame rate used to calculate the playback frame rate of the {@link Surface}. This may + * be different to {@link #formatFrameRate} if {@link #formatFrameRate} is unspecified or + * inaccurate. + */ + private float surfaceMediaFrameRate; + /** The playback frame rate set on the {@link Surface}. */ + private float surfacePlaybackFrameRate; + + private float playbackSpeed; + private @C.VideoChangeFrameRateStrategy int changeFrameRateStrategy; + + private long vsyncDurationNs; + private long vsyncOffsetNs; + + private long frameIndex; + private long pendingLastAdjustedFrameIndex; + private long pendingLastAdjustedReleaseTimeNs; + private long lastAdjustedFrameIndex; + private long lastAdjustedReleaseTimeNs; + + /** + * Constructs an instance. + * + * @param context A context from which information about the default display can be retrieved. + */ + public VideoFrameReleaseHelper(@Nullable Context context) { + frameRateEstimator = new FixedFrameRateEstimator(); + displayHelper = maybeBuildDisplayHelper(context); + vsyncSampler = displayHelper != null ? VSyncSampler.getInstance() : null; + vsyncDurationNs = C.TIME_UNSET; + vsyncOffsetNs = C.TIME_UNSET; + formatFrameRate = Format.NO_VALUE; + playbackSpeed = 1f; + changeFrameRateStrategy = C.VIDEO_CHANGE_FRAME_RATE_STRATEGY_ONLY_IF_SEAMLESS; + } + + /** + * Change the {@link C.VideoChangeFrameRateStrategy} used when calling {@link + * Surface#setFrameRate}. + */ + public void setChangeFrameRateStrategy( + @C.VideoChangeFrameRateStrategy int changeFrameRateStrategy) { + if (this.changeFrameRateStrategy == changeFrameRateStrategy) { + return; + } + this.changeFrameRateStrategy = changeFrameRateStrategy; + updateSurfacePlaybackFrameRate(/* forceUpdate= */ true); + } + + /** Called when the renderer is started. */ + public void onStarted() { + started = true; + resetAdjustment(); + if (displayHelper != null) { + checkNotNull(vsyncSampler).addObserver(); + displayHelper.register(this::updateDefaultDisplayRefreshRateParams); + } + updateSurfacePlaybackFrameRate(/* forceUpdate= */ false); + } + + /** + * Called when the renderer changes which {@link Surface} it's rendering to renders to. + * + * @param surface The new {@link Surface}, or {@code null} if the renderer does not have one. + */ + public void onSurfaceChanged(@Nullable Surface surface) { + if (surface instanceof PlaceholderSurface) { + // We don't care about dummy surfaces for release timing, since they're not visible. + surface = null; + } + if (this.surface == surface) { + return; + } + clearSurfaceFrameRate(); + this.surface = surface; + updateSurfacePlaybackFrameRate(/* forceUpdate= */ true); + } + + /** Called when the renderer's position is reset. */ + public void onPositionReset() { + resetAdjustment(); + } + + /** + * Called when the renderer's playback speed changes. + * + * @param playbackSpeed The factor by which playback is sped up. + */ + public void onPlaybackSpeed(float playbackSpeed) { + this.playbackSpeed = playbackSpeed; + resetAdjustment(); + updateSurfacePlaybackFrameRate(/* forceUpdate= */ false); + } + + /** + * Called when the renderer's output format changes. + * + * @param formatFrameRate The format's frame rate, or {@link Format#NO_VALUE} if unknown. + */ + public void onFormatChanged(float formatFrameRate) { + this.formatFrameRate = formatFrameRate; + frameRateEstimator.reset(); + updateSurfaceMediaFrameRate(); + } + + /** + * Called by the renderer for each frame, prior to it being skipped, dropped or rendered. + * + * @param framePresentationTimeUs The frame presentation timestamp, in microseconds. + */ + public void onNextFrame(long framePresentationTimeUs) { + if (pendingLastAdjustedFrameIndex != C.INDEX_UNSET) { + lastAdjustedFrameIndex = pendingLastAdjustedFrameIndex; + lastAdjustedReleaseTimeNs = pendingLastAdjustedReleaseTimeNs; + } + frameIndex++; + frameRateEstimator.onNextFrame(framePresentationTimeUs * 1000); + updateSurfaceMediaFrameRate(); + } + + /** Called when the renderer is stopped. */ + public void onStopped() { + started = false; + if (displayHelper != null) { + displayHelper.unregister(); + checkNotNull(vsyncSampler).removeObserver(); + } + clearSurfaceFrameRate(); + } + + // Frame release time adjustment. + + /** + * Adjusts the release timestamp for the next frame. This is the frame whose presentation + * timestamp was most recently passed to {@link #onNextFrame}. + * + *

      This method may be called any number of times for each frame, including zero times (for + * skipped frames, or when rendering the first frame prior to playback starting), or more than + * once (if the caller wishes to give the helper the opportunity to refine a release time closer + * to when the frame needs to be released). + * + * @param releaseTimeNs The frame's unadjusted release time, in nanoseconds and in the same time + * base as {@link System#nanoTime()}. + * @return The adjusted frame release timestamp, in nanoseconds and in the same time base as + * {@link System#nanoTime()}. + */ + public long adjustReleaseTime(long releaseTimeNs) { + // Until we know better, the adjustment will be a no-op. + long adjustedReleaseTimeNs = releaseTimeNs; + + if (lastAdjustedFrameIndex != C.INDEX_UNSET && frameRateEstimator.isSynced()) { + long frameDurationNs = frameRateEstimator.getFrameDurationNs(); + long candidateAdjustedReleaseTimeNs = + lastAdjustedReleaseTimeNs + + (long) ((frameDurationNs * (frameIndex - lastAdjustedFrameIndex)) / playbackSpeed); + if (adjustmentAllowed(releaseTimeNs, candidateAdjustedReleaseTimeNs)) { + adjustedReleaseTimeNs = candidateAdjustedReleaseTimeNs; + } else { + resetAdjustment(); + } + } + pendingLastAdjustedFrameIndex = frameIndex; + pendingLastAdjustedReleaseTimeNs = adjustedReleaseTimeNs; + + if (vsyncSampler == null || vsyncDurationNs == C.TIME_UNSET) { + return adjustedReleaseTimeNs; + } + long sampledVsyncTimeNs = vsyncSampler.sampledVsyncTimeNs; + if (sampledVsyncTimeNs == C.TIME_UNSET) { + return adjustedReleaseTimeNs; + } + // Find the timestamp of the closest vsync. This is the vsync that we're targeting. + long snappedTimeNs = closestVsync(adjustedReleaseTimeNs, sampledVsyncTimeNs, vsyncDurationNs); + // Apply an offset so that we release before the target vsync, but after the previous one. + return snappedTimeNs - vsyncOffsetNs; + } + + private void resetAdjustment() { + frameIndex = 0; + lastAdjustedFrameIndex = C.INDEX_UNSET; + pendingLastAdjustedFrameIndex = C.INDEX_UNSET; + } + + private static boolean adjustmentAllowed( + long unadjustedReleaseTimeNs, long adjustedReleaseTimeNs) { + return Math.abs(unadjustedReleaseTimeNs - adjustedReleaseTimeNs) <= MAX_ALLOWED_ADJUSTMENT_NS; + } + + // Surface frame rate adjustment. + + /** + * Updates the media frame rate that's used to calculate the playback frame rate of the current + * {@link #surface}. If the frame rate is updated then {@link #updateSurfacePlaybackFrameRate} is + * called to update the surface. + */ + private void updateSurfaceMediaFrameRate() { + if (Util.SDK_INT < 30 || surface == null) { + return; + } + + float candidateFrameRate = + frameRateEstimator.isSynced() ? frameRateEstimator.getFrameRate() : formatFrameRate; + if (candidateFrameRate == surfaceMediaFrameRate) { + return; + } + + // The candidate is different to the current surface media frame rate. Decide whether to update + // the surface media frame rate. + boolean shouldUpdate; + if (candidateFrameRate != Format.NO_VALUE && surfaceMediaFrameRate != Format.NO_VALUE) { + boolean candidateIsHighConfidence = + frameRateEstimator.isSynced() + && frameRateEstimator.getMatchingFrameDurationSumNs() + >= MINIMUM_MATCHING_FRAME_DURATION_FOR_HIGH_CONFIDENCE_NS; + float minimumChangeForUpdate = + candidateIsHighConfidence + ? MINIMUM_MEDIA_FRAME_RATE_CHANGE_FOR_UPDATE_HIGH_CONFIDENCE + : MINIMUM_MEDIA_FRAME_RATE_CHANGE_FOR_UPDATE_LOW_CONFIDENCE; + shouldUpdate = Math.abs(candidateFrameRate - surfaceMediaFrameRate) >= minimumChangeForUpdate; + } else if (candidateFrameRate != Format.NO_VALUE) { + shouldUpdate = true; + } else { + shouldUpdate = + frameRateEstimator.getFramesWithoutSyncCount() + >= MINIMUM_FRAMES_WITHOUT_SYNC_TO_CLEAR_SURFACE_FRAME_RATE; + } + + if (shouldUpdate) { + surfaceMediaFrameRate = candidateFrameRate; + updateSurfacePlaybackFrameRate(/* forceUpdate= */ false); + } + } + + /** + * Updates the playback frame rate of the current {@link #surface} based on the playback speed, + * frame rate of the content, and whether the renderer is started. + * + *

      Does nothing if {@link #changeFrameRateStrategy} is {@link + * C#VIDEO_CHANGE_FRAME_RATE_STRATEGY_OFF}. + * + * @param forceUpdate Whether to call {@link Surface#setFrameRate} even if the frame rate is + * unchanged. + */ + private void updateSurfacePlaybackFrameRate(boolean forceUpdate) { + if (Util.SDK_INT < 30 + || surface == null + || changeFrameRateStrategy == C.VIDEO_CHANGE_FRAME_RATE_STRATEGY_OFF) { + return; + } + + float surfacePlaybackFrameRate = 0; + if (started && surfaceMediaFrameRate != Format.NO_VALUE) { + surfacePlaybackFrameRate = surfaceMediaFrameRate * playbackSpeed; + } + // We always set the frame-rate if we have a new surface, since we have no way of knowing what + // it might have been set to previously. + if (!forceUpdate && this.surfacePlaybackFrameRate == surfacePlaybackFrameRate) { + return; + } + this.surfacePlaybackFrameRate = surfacePlaybackFrameRate; + Api30.setSurfaceFrameRate(surface, surfacePlaybackFrameRate); + } + + /** + * Clears the frame-rate of the current {@link #surface}. + * + *

      Does nothing if {@link #changeFrameRateStrategy} is {@link + * C#VIDEO_CHANGE_FRAME_RATE_STRATEGY_OFF}. + */ + private void clearSurfaceFrameRate() { + if (Util.SDK_INT < 30 + || surface == null + || changeFrameRateStrategy == C.VIDEO_CHANGE_FRAME_RATE_STRATEGY_OFF + || surfacePlaybackFrameRate == 0) { + return; + } + surfacePlaybackFrameRate = 0; + Api30.setSurfaceFrameRate(surface, /* frameRate= */ 0); + } + + // Display refresh rate and vsync logic. + + private void updateDefaultDisplayRefreshRateParams(@Nullable Display defaultDisplay) { + if (defaultDisplay != null) { + double defaultDisplayRefreshRate = defaultDisplay.getRefreshRate(); + vsyncDurationNs = (long) (C.NANOS_PER_SECOND / defaultDisplayRefreshRate); + vsyncOffsetNs = (vsyncDurationNs * VSYNC_OFFSET_PERCENTAGE) / 100; + } else { + Log.w(TAG, "Unable to query display refresh rate"); + vsyncDurationNs = C.TIME_UNSET; + vsyncOffsetNs = C.TIME_UNSET; + } + } + + private static long closestVsync(long releaseTime, long sampledVsyncTime, long vsyncDuration) { + long vsyncCount = (releaseTime - sampledVsyncTime) / vsyncDuration; + long snappedTimeNs = sampledVsyncTime + (vsyncDuration * vsyncCount); + long snappedBeforeNs; + long snappedAfterNs; + if (releaseTime <= snappedTimeNs) { + snappedBeforeNs = snappedTimeNs - vsyncDuration; + snappedAfterNs = snappedTimeNs; + } else { + snappedBeforeNs = snappedTimeNs; + snappedAfterNs = snappedTimeNs + vsyncDuration; + } + long snappedAfterDiff = snappedAfterNs - releaseTime; + long snappedBeforeDiff = releaseTime - snappedBeforeNs; + return snappedAfterDiff < snappedBeforeDiff ? snappedAfterNs : snappedBeforeNs; + } + + @Nullable + private static DisplayHelper maybeBuildDisplayHelper(@Nullable Context context) { + @Nullable DisplayHelper displayHelper = null; + if (context != null) { + context = context.getApplicationContext(); + if (Util.SDK_INT >= 17) { + displayHelper = DisplayHelperV17.maybeBuildNewInstance(context); + } + if (displayHelper == null) { + displayHelper = DisplayHelperV16.maybeBuildNewInstance(context); + } + } + return displayHelper; + } + + // Nested classes. + + @RequiresApi(30) + private static final class Api30 { + @DoNotInline + public static void setSurfaceFrameRate(Surface surface, float frameRate) { + int compatibility = + frameRate == 0 + ? Surface.FRAME_RATE_COMPATIBILITY_DEFAULT + : Surface.FRAME_RATE_COMPATIBILITY_FIXED_SOURCE; + try { + surface.setFrameRate(frameRate, compatibility); + } catch (IllegalStateException e) { + Log.e(TAG, "Failed to call Surface.setFrameRate", e); + } + } + } + + /** Helper for listening to changes to the default display. */ + private interface DisplayHelper { + + /** Listener for changes to the default display. */ + interface Listener { + + /** + * Called when the default display changes. + * + * @param defaultDisplay The default display, or {@code null} if a corresponding {@link + * Display} object could not be obtained. + */ + void onDefaultDisplayChanged(@Nullable Display defaultDisplay); + } + + /** + * Enables the helper, invoking {@link Listener#onDefaultDisplayChanged(Display)} to pass the + * initial default display. + */ + void register(Listener listener); + + /** Disables the helper. */ + void unregister(); + } + + private static final class DisplayHelperV16 implements DisplayHelper { + + @Nullable + public static DisplayHelper maybeBuildNewInstance(Context context) { + WindowManager windowManager = + (WindowManager) context.getSystemService(Context.WINDOW_SERVICE); + return windowManager != null ? new DisplayHelperV16(windowManager) : null; + } + + private final WindowManager windowManager; + + private DisplayHelperV16(WindowManager windowManager) { + this.windowManager = windowManager; + } + + @Override + public void register(Listener listener) { + listener.onDefaultDisplayChanged(windowManager.getDefaultDisplay()); + } + + @Override + public void unregister() { + // Do nothing. + } + } + + @RequiresApi(17) + private static final class DisplayHelperV17 + implements DisplayHelper, DisplayManager.DisplayListener { + + @Nullable + public static DisplayHelper maybeBuildNewInstance(Context context) { + DisplayManager displayManager = + (DisplayManager) context.getSystemService(Context.DISPLAY_SERVICE); + return displayManager != null ? new DisplayHelperV17(displayManager) : null; + } + + private final DisplayManager displayManager; + @Nullable private Listener listener; + + private DisplayHelperV17(DisplayManager displayManager) { + this.displayManager = displayManager; + } + + @Override + public void register(Listener listener) { + this.listener = listener; + displayManager.registerDisplayListener(this, Util.createHandlerForCurrentLooper()); + listener.onDefaultDisplayChanged(getDefaultDisplay()); + } + + @Override + public void unregister() { + displayManager.unregisterDisplayListener(this); + listener = null; + } + + @Override + public void onDisplayChanged(int displayId) { + if (listener != null && displayId == Display.DEFAULT_DISPLAY) { + listener.onDefaultDisplayChanged(getDefaultDisplay()); + } + } + + @Override + public void onDisplayAdded(int displayId) { + // Do nothing. + } + + @Override + public void onDisplayRemoved(int displayId) { + // Do nothing. + } + + private Display getDefaultDisplay() { + return displayManager.getDisplay(Display.DEFAULT_DISPLAY); + } + } + + /** + * Samples display vsync timestamps. A single instance using a single {@link Choreographer} is + * shared by all {@link VideoFrameReleaseHelper} instances. This is done to avoid a resource leak + * in the platform on API levels prior to 23. See [Internal: b/12455729]. + */ + private static final class VSyncSampler implements FrameCallback, Handler.Callback { + + public volatile long sampledVsyncTimeNs; + + private static final int CREATE_CHOREOGRAPHER = 0; + private static final int MSG_ADD_OBSERVER = 1; + private static final int MSG_REMOVE_OBSERVER = 2; + + private static final VSyncSampler INSTANCE = new VSyncSampler(); + + private final Handler handler; + private final HandlerThread choreographerOwnerThread; + private @MonotonicNonNull Choreographer choreographer; + private int observerCount; + + public static VSyncSampler getInstance() { + return INSTANCE; + } + + private VSyncSampler() { + sampledVsyncTimeNs = C.TIME_UNSET; + choreographerOwnerThread = new HandlerThread("ExoPlayer:FrameReleaseChoreographer"); + choreographerOwnerThread.start(); + handler = Util.createHandler(choreographerOwnerThread.getLooper(), /* callback= */ this); + handler.sendEmptyMessage(CREATE_CHOREOGRAPHER); + } + + /** + * Notifies the sampler that a {@link VideoFrameReleaseHelper} is observing {@link + * #sampledVsyncTimeNs}, and hence that the value should be periodically updated. + */ + public void addObserver() { + handler.sendEmptyMessage(MSG_ADD_OBSERVER); + } + + /** + * Notifies the sampler that a {@link VideoFrameReleaseHelper} is no longer observing {@link + * #sampledVsyncTimeNs}. + */ + public void removeObserver() { + handler.sendEmptyMessage(MSG_REMOVE_OBSERVER); + } + + @Override + public void doFrame(long vsyncTimeNs) { + sampledVsyncTimeNs = vsyncTimeNs; + checkNotNull(choreographer).postFrameCallbackDelayed(this, VSYNC_SAMPLE_UPDATE_PERIOD_MS); + } + + @Override + public boolean handleMessage(Message message) { + switch (message.what) { + case CREATE_CHOREOGRAPHER: + createChoreographerInstanceInternal(); + return true; + case MSG_ADD_OBSERVER: + addObserverInternal(); + return true; + case MSG_REMOVE_OBSERVER: + removeObserverInternal(); + return true; + default: + return false; + } + } + + private void createChoreographerInstanceInternal() { + try { + choreographer = Choreographer.getInstance(); + } catch (RuntimeException e) { + // See [Internal: b/213926330]. + Log.w(TAG, "Vsync sampling disabled due to platform error", e); + } + } + + private void addObserverInternal() { + if (choreographer != null) { + observerCount++; + if (observerCount == 1) { + choreographer.postFrameCallback(this); + } + } + } + + private void removeObserverInternal() { + if (choreographer != null) { + observerCount--; + if (observerCount == 0) { + choreographer.removeFrameCallback(this); + sampledVsyncTimeNs = C.TIME_UNSET; + } + } + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoFrameReleaseTimeHelper.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoFrameReleaseTimeHelper.java deleted file mode 100644 index bf31ce2abb..0000000000 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoFrameReleaseTimeHelper.java +++ /dev/null @@ -1,361 +0,0 @@ -/* - * Copyright (C) 2016 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.android.exoplayer2.video; - -import android.annotation.TargetApi; -import android.content.Context; -import android.hardware.display.DisplayManager; -import android.os.Handler; -import android.os.HandlerThread; -import android.os.Message; -import android.view.Choreographer; -import android.view.Choreographer.FrameCallback; -import android.view.Display; -import android.view.WindowManager; -import androidx.annotation.Nullable; -import com.google.android.exoplayer2.C; -import com.google.android.exoplayer2.util.Util; - -/** - * Makes a best effort to adjust frame release timestamps for a smoother visual result. - */ -public final class VideoFrameReleaseTimeHelper { - - private static final long CHOREOGRAPHER_SAMPLE_DELAY_MILLIS = 500; - private static final long MAX_ALLOWED_DRIFT_NS = 20000000; - - private static final long VSYNC_OFFSET_PERCENTAGE = 80; - private static final int MIN_FRAMES_FOR_ADJUSTMENT = 6; - - private final WindowManager windowManager; - private final VSyncSampler vsyncSampler; - private final DefaultDisplayListener displayListener; - - private long vsyncDurationNs; - private long vsyncOffsetNs; - - private long lastFramePresentationTimeUs; - private long adjustedLastFrameTimeNs; - private long pendingAdjustedFrameTimeNs; - - private boolean haveSync; - private long syncUnadjustedReleaseTimeNs; - private long syncFramePresentationTimeNs; - private long frameCount; - - /** - * Constructs an instance that smooths frame release timestamps but does not align them with - * the default display's vsync signal. - */ - public VideoFrameReleaseTimeHelper() { - this(null); - } - - /** - * Constructs an instance that smooths frame release timestamps and aligns them with the default - * display's vsync signal. - * - * @param context A context from which information about the default display can be retrieved. - */ - public VideoFrameReleaseTimeHelper(@Nullable Context context) { - if (context != null) { - context = context.getApplicationContext(); - windowManager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE); - } else { - windowManager = null; - } - if (windowManager != null) { - displayListener = Util.SDK_INT >= 17 ? maybeBuildDefaultDisplayListenerV17(context) : null; - vsyncSampler = VSyncSampler.getInstance(); - } else { - displayListener = null; - vsyncSampler = null; - } - vsyncDurationNs = C.TIME_UNSET; - vsyncOffsetNs = C.TIME_UNSET; - } - - /** - * Enables the helper. Must be called from the playback thread. - */ - public void enable() { - haveSync = false; - if (windowManager != null) { - vsyncSampler.addObserver(); - if (displayListener != null) { - displayListener.register(); - } - updateDefaultDisplayRefreshRateParams(); - } - } - - /** - * Disables the helper. Must be called from the playback thread. - */ - public void disable() { - if (windowManager != null) { - if (displayListener != null) { - displayListener.unregister(); - } - vsyncSampler.removeObserver(); - } - } - - /** - * Adjusts a frame release timestamp. Must be called from the playback thread. - * - * @param framePresentationTimeUs The frame's presentation time, in microseconds. - * @param unadjustedReleaseTimeNs The frame's unadjusted release time, in nanoseconds and in - * the same time base as {@link System#nanoTime()}. - * @return The adjusted frame release timestamp, in nanoseconds and in the same time base as - * {@link System#nanoTime()}. - */ - public long adjustReleaseTime(long framePresentationTimeUs, long unadjustedReleaseTimeNs) { - long framePresentationTimeNs = framePresentationTimeUs * 1000; - - // Until we know better, the adjustment will be a no-op. - long adjustedFrameTimeNs = framePresentationTimeNs; - long adjustedReleaseTimeNs = unadjustedReleaseTimeNs; - - if (haveSync) { - // See if we've advanced to the next frame. - if (framePresentationTimeUs != lastFramePresentationTimeUs) { - frameCount++; - adjustedLastFrameTimeNs = pendingAdjustedFrameTimeNs; - } - if (frameCount >= MIN_FRAMES_FOR_ADJUSTMENT) { - // We're synced and have waited the required number of frames to apply an adjustment. - // Calculate the average frame time across all the frames we've seen since the last sync. - // This will typically give us a frame rate at a finer granularity than the frame times - // themselves (which often only have millisecond granularity). - long averageFrameDurationNs = (framePresentationTimeNs - syncFramePresentationTimeNs) - / frameCount; - // Project the adjusted frame time forward using the average. - long candidateAdjustedFrameTimeNs = adjustedLastFrameTimeNs + averageFrameDurationNs; - - if (isDriftTooLarge(candidateAdjustedFrameTimeNs, unadjustedReleaseTimeNs)) { - haveSync = false; - } else { - adjustedFrameTimeNs = candidateAdjustedFrameTimeNs; - adjustedReleaseTimeNs = syncUnadjustedReleaseTimeNs + adjustedFrameTimeNs - - syncFramePresentationTimeNs; - } - } else { - // We're synced but haven't waited the required number of frames to apply an adjustment. - // Check drift anyway. - if (isDriftTooLarge(framePresentationTimeNs, unadjustedReleaseTimeNs)) { - haveSync = false; - } - } - } - - // If we need to sync, do so now. - if (!haveSync) { - syncFramePresentationTimeNs = framePresentationTimeNs; - syncUnadjustedReleaseTimeNs = unadjustedReleaseTimeNs; - frameCount = 0; - haveSync = true; - } - - lastFramePresentationTimeUs = framePresentationTimeUs; - pendingAdjustedFrameTimeNs = adjustedFrameTimeNs; - - if (vsyncSampler == null || vsyncDurationNs == C.TIME_UNSET) { - return adjustedReleaseTimeNs; - } - long sampledVsyncTimeNs = vsyncSampler.sampledVsyncTimeNs; - if (sampledVsyncTimeNs == C.TIME_UNSET) { - return adjustedReleaseTimeNs; - } - - // Find the timestamp of the closest vsync. This is the vsync that we're targeting. - long snappedTimeNs = closestVsync(adjustedReleaseTimeNs, sampledVsyncTimeNs, vsyncDurationNs); - // Apply an offset so that we release before the target vsync, but after the previous one. - return snappedTimeNs - vsyncOffsetNs; - } - - @TargetApi(17) - private DefaultDisplayListener maybeBuildDefaultDisplayListenerV17(Context context) { - DisplayManager manager = (DisplayManager) context.getSystemService(Context.DISPLAY_SERVICE); - return manager == null ? null : new DefaultDisplayListener(manager); - } - - private void updateDefaultDisplayRefreshRateParams() { - // Note: If we fail to update the parameters, we leave them set to their previous values. - Display defaultDisplay = windowManager.getDefaultDisplay(); - if (defaultDisplay != null) { - double defaultDisplayRefreshRate = defaultDisplay.getRefreshRate(); - vsyncDurationNs = (long) (C.NANOS_PER_SECOND / defaultDisplayRefreshRate); - vsyncOffsetNs = (vsyncDurationNs * VSYNC_OFFSET_PERCENTAGE) / 100; - } - } - - private boolean isDriftTooLarge(long frameTimeNs, long releaseTimeNs) { - long elapsedFrameTimeNs = frameTimeNs - syncFramePresentationTimeNs; - long elapsedReleaseTimeNs = releaseTimeNs - syncUnadjustedReleaseTimeNs; - return Math.abs(elapsedReleaseTimeNs - elapsedFrameTimeNs) > MAX_ALLOWED_DRIFT_NS; - } - - private static long closestVsync(long releaseTime, long sampledVsyncTime, long vsyncDuration) { - long vsyncCount = (releaseTime - sampledVsyncTime) / vsyncDuration; - long snappedTimeNs = sampledVsyncTime + (vsyncDuration * vsyncCount); - long snappedBeforeNs; - long snappedAfterNs; - if (releaseTime <= snappedTimeNs) { - snappedBeforeNs = snappedTimeNs - vsyncDuration; - snappedAfterNs = snappedTimeNs; - } else { - snappedBeforeNs = snappedTimeNs; - snappedAfterNs = snappedTimeNs + vsyncDuration; - } - long snappedAfterDiff = snappedAfterNs - releaseTime; - long snappedBeforeDiff = releaseTime - snappedBeforeNs; - return snappedAfterDiff < snappedBeforeDiff ? snappedAfterNs : snappedBeforeNs; - } - - @TargetApi(17) - private final class DefaultDisplayListener implements DisplayManager.DisplayListener { - - private final DisplayManager displayManager; - - public DefaultDisplayListener(DisplayManager displayManager) { - this.displayManager = displayManager; - } - - public void register() { - displayManager.registerDisplayListener(this, null); - } - - public void unregister() { - displayManager.unregisterDisplayListener(this); - } - - @Override - public void onDisplayAdded(int displayId) { - // Do nothing. - } - - @Override - public void onDisplayRemoved(int displayId) { - // Do nothing. - } - - @Override - public void onDisplayChanged(int displayId) { - if (displayId == Display.DEFAULT_DISPLAY) { - updateDefaultDisplayRefreshRateParams(); - } - } - - } - - /** - * Samples display vsync timestamps. A single instance using a single {@link Choreographer} is - * shared by all {@link VideoFrameReleaseTimeHelper} instances. This is done to avoid a resource - * leak in the platform on API levels prior to 23. See [Internal: b/12455729]. - */ - private static final class VSyncSampler implements FrameCallback, Handler.Callback { - - public volatile long sampledVsyncTimeNs; - - private static final int CREATE_CHOREOGRAPHER = 0; - private static final int MSG_ADD_OBSERVER = 1; - private static final int MSG_REMOVE_OBSERVER = 2; - - private static final VSyncSampler INSTANCE = new VSyncSampler(); - - private final Handler handler; - private final HandlerThread choreographerOwnerThread; - private Choreographer choreographer; - private int observerCount; - - public static VSyncSampler getInstance() { - return INSTANCE; - } - - private VSyncSampler() { - sampledVsyncTimeNs = C.TIME_UNSET; - choreographerOwnerThread = new HandlerThread("ChoreographerOwner:Handler"); - choreographerOwnerThread.start(); - handler = Util.createHandler(choreographerOwnerThread.getLooper(), /* callback= */ this); - handler.sendEmptyMessage(CREATE_CHOREOGRAPHER); - } - - /** - * Notifies the sampler that a {@link VideoFrameReleaseTimeHelper} is observing - * {@link #sampledVsyncTimeNs}, and hence that the value should be periodically updated. - */ - public void addObserver() { - handler.sendEmptyMessage(MSG_ADD_OBSERVER); - } - - /** - * Notifies the sampler that a {@link VideoFrameReleaseTimeHelper} is no longer observing - * {@link #sampledVsyncTimeNs}. - */ - public void removeObserver() { - handler.sendEmptyMessage(MSG_REMOVE_OBSERVER); - } - - @Override - public void doFrame(long vsyncTimeNs) { - sampledVsyncTimeNs = vsyncTimeNs; - choreographer.postFrameCallbackDelayed(this, CHOREOGRAPHER_SAMPLE_DELAY_MILLIS); - } - - @Override - public boolean handleMessage(Message message) { - switch (message.what) { - case CREATE_CHOREOGRAPHER: { - createChoreographerInstanceInternal(); - return true; - } - case MSG_ADD_OBSERVER: { - addObserverInternal(); - return true; - } - case MSG_REMOVE_OBSERVER: { - removeObserverInternal(); - return true; - } - default: { - return false; - } - } - } - - private void createChoreographerInstanceInternal() { - choreographer = Choreographer.getInstance(); - } - - private void addObserverInternal() { - observerCount++; - if (observerCount == 1) { - choreographer.postFrameCallback(this); - } - } - - private void removeObserverInternal() { - observerCount--; - if (observerCount == 0) { - choreographer.removeFrameCallback(this); - sampledVsyncTimeNs = C.TIME_UNSET; - } - } - - } - -} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoListener.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoListener.java old mode 100755 new mode 100644 index 07a6f98b21..f41f7e3e9a --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoListener.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoListener.java @@ -20,44 +20,8 @@ /** A listener for metadata corresponding to video being rendered. */ public interface VideoListener { - /** - * Called each time there's a change in the size of the video being rendered. - * - * @param width The video width in pixels. - * @param height The video height in pixels. - * @param unappliedRotationDegrees For videos that require a rotation, this is the clockwise - * rotation in degrees that the application should apply for the video for it to be rendered - * in the correct orientation. This value will always be zero on API levels 21 and above, - * since the renderer will apply all necessary rotations internally. On earlier API levels - * this is not possible. Applications that use {@link android.view.TextureView} can apply the - * rotation by calling {@link android.view.TextureView#setTransform}. Applications that do not - * expect to encounter rotated videos can safely ignore this parameter. - * @param pixelWidthHeightRatio The width to height ratio of each pixel. For the normal case of - * square pixels this will be equal to 1.0. Different values are indicative of anamorphic - * content. - */ - default void onVideoSizeChanged( - int width, int height, int unappliedRotationDegrees, float pixelWidthHeightRatio) {} - - /** - * Called each time there's a change in the size of the surface onto which the video is being - * rendered. - * - * @param width The surface width in pixels. May be {@link - * com.google.android.exoplayer2.C#LENGTH_UNSET} if unknown, or 0 if the video is not rendered - * onto a surface. - * @param height The surface height in pixels. May be {@link - * com.google.android.exoplayer2.C#LENGTH_UNSET} if unknown, or 0 if the video is not rendered - * onto a surface. - */ default void onSurfaceSizeChanged(int width, int height) {} - /** - * Called when a frame is rendered for the first time since setting the surface, and when a frame - * is rendered for the first time since a video track was selected. - */ - default void onRenderedFirstFrame() {} - boolean onSurfaceDestroyed(SurfaceTexture surfaceTexture); void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoRendererEventListener.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoRendererEventListener.java index e7dfd123b1..81b4796fc7 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoRendererEventListener.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoRendererEventListener.java @@ -17,14 +17,18 @@ import static com.google.android.exoplayer2.util.Util.castNonNull; +import android.media.MediaCodec; +import android.media.MediaCodec.CodecException; import android.os.Handler; import android.os.SystemClock; import android.view.Surface; -import android.view.TextureView; import androidx.annotation.Nullable; import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.Player; import com.google.android.exoplayer2.Renderer; import com.google.android.exoplayer2.decoder.DecoderCounters; +import com.google.android.exoplayer2.decoder.DecoderException; +import com.google.android.exoplayer2.decoder.DecoderReuseEvaluation; import com.google.android.exoplayer2.util.Assertions; /** @@ -52,12 +56,22 @@ default void onVideoEnabled(DecoderCounters counters) {} default void onVideoDecoderInitialized( String decoderName, long initializedTimestampMs, long initializationDurationMs) {} + /** + * @deprecated Use {@link #onVideoInputFormatChanged(Format, DecoderReuseEvaluation)}. + */ + @Deprecated + default void onVideoInputFormatChanged(Format format) {} + /** * Called when the format of the media being consumed by the renderer changes. * * @param format The new format. + * @param decoderReuseEvaluation The result of the evaluation to determine whether an existing + * decoder instance can be reused for the new format, or {@code null} if the renderer did not + * have a decoder. */ - default void onVideoInputFormatChanged(Format format) {} + default void onVideoInputFormatChanged( + Format format, @Nullable DecoderReuseEvaluation decoderReuseEvaluation) {} /** * Called to report the number of frames dropped by the renderer. Dropped frames are reported @@ -71,34 +85,50 @@ default void onVideoInputFormatChanged(Format format) {} */ default void onDroppedFrames(int count, long elapsedMs) {} + /** + * Called to report the video processing offset of video frames processed by the video renderer. + * + *

      Video processing offset represents how early a video frame is processed compared to the + * player's current position. For each video frame, the offset is calculated as Pvf + * - Ppl where Pvf is the presentation timestamp of the video + * frame and Ppl is the current position of the player. Positive values + * indicate the frame was processed early enough whereas negative values indicate that the + * player's position had progressed beyond the frame's timestamp when the frame was processed (and + * the frame was probably dropped). + * + *

      The renderer reports the sum of video processing offset samples (one sample per processed + * video frame: dropped, skipped or rendered) and the total number of samples. + * + * @param totalProcessingOffsetUs The sum of all video frame processing offset samples for the + * video frames processed by the renderer in microseconds. + * @param frameCount The number of samples included in the {@code totalProcessingOffsetUs}. + */ + default void onVideoFrameProcessingOffset(long totalProcessingOffsetUs, int frameCount) {} + /** * Called before a frame is rendered for the first time since setting the surface, and each time * there's a change in the size, rotation or pixel aspect ratio of the video being rendered. * - * @param width The video width in pixels. - * @param height The video height in pixels. - * @param unappliedRotationDegrees For videos that require a rotation, this is the clockwise - * rotation in degrees that the application should apply for the video for it to be rendered - * in the correct orientation. This value will always be zero on API levels 21 and above, - * since the renderer will apply all necessary rotations internally. On earlier API levels - * this is not possible. Applications that use {@link TextureView} can apply the rotation by - * calling {@link TextureView#setTransform}. Applications that do not expect to encounter - * rotated videos can safely ignore this parameter. - * @param pixelWidthHeightRatio The width to height ratio of each pixel. For the normal case of - * square pixels this will be equal to 1.0. Different values are indicative of anamorphic - * content. + * @param videoSize The new size of the video. */ - default void onVideoSizeChanged( - int width, int height, int unappliedRotationDegrees, float pixelWidthHeightRatio) {} + default void onVideoSizeChanged(VideoSize videoSize) {} /** - * Called when a frame is rendered for the first time since setting the surface, and when a frame - * is rendered for the first time since the renderer was reset. + * Called when a frame is rendered for the first time since setting the output, or since the + * renderer was reset, or since the stream being rendered was changed. * - * @param surface The {@link Surface} to which a first frame has been rendered, or {@code null} if - * the renderer renders to something that isn't a {@link Surface}. + * @param output The output of the video renderer. Normally a {@link Surface}, however some video + * renderers may have other output types (e.g., a {@link VideoDecoderOutputBufferRenderer}). + * @param renderTimeMs The {@link SystemClock#elapsedRealtime()} when the frame was rendered. */ - default void onRenderedFirstFrame(@Nullable Surface surface) {} + default void onRenderedFirstFrame(Object output, long renderTimeMs) {} + + /** + * Called when a decoder is released. + * + * @param decoderName The decoder that was released. + */ + default void onVideoDecoderReleased(String decoderName) {} /** * Called when the renderer is disabled. @@ -108,20 +138,33 @@ default void onRenderedFirstFrame(@Nullable Surface surface) {} default void onVideoDisabled(DecoderCounters counters) {} /** - * Dispatches events to a {@link VideoRendererEventListener}. + * Called when a video decoder encounters an error. + * + *

      This method being called does not indicate that playback has failed, or that it will fail. + * The player may be able to recover from the error. Hence applications should not + * implement this method to display a user visible error or initiate an application level retry. + * {@link Player.Listener#onPlayerError} is the appropriate place to implement such behavior. This + * method is called to provide the application with an opportunity to log the error if it wishes + * to do so. + * + * @param videoCodecError The error. Typically a {@link CodecException} if the renderer uses + * {@link MediaCodec}, or a {@link DecoderException} if the renderer uses a software decoder. */ + default void onVideoCodecError(Exception videoCodecError) {} + + /** Dispatches events to a {@link VideoRendererEventListener}. */ final class EventDispatcher { @Nullable private final Handler handler; @Nullable private final VideoRendererEventListener listener; /** - * @param handler A handler for dispatching events, or null if creating a dummy instance. - * @param listener The listener to which events should be dispatched, or null if creating a - * dummy instance. + * @param handler A handler for dispatching events, or null if events should not be dispatched. + * @param listener The listener to which events should be dispatched, or null if events should + * not be dispatched. */ - public EventDispatcher(@Nullable Handler handler, - @Nullable VideoRendererEventListener listener) { + public EventDispatcher( + @Nullable Handler handler, @Nullable VideoRendererEventListener listener) { this.handler = listener != null ? Assertions.checkNotNull(handler) : null; this.listener = listener; } @@ -145,10 +188,19 @@ public void decoderInitialized( } } - /** Invokes {@link VideoRendererEventListener#onVideoInputFormatChanged(Format)}. */ - public void inputFormatChanged(Format format) { + /** + * Invokes {@link VideoRendererEventListener#onVideoInputFormatChanged(Format, + * DecoderReuseEvaluation)}. + */ + @SuppressWarnings("deprecation") // Calling deprecated listener method. + public void inputFormatChanged( + Format format, @Nullable DecoderReuseEvaluation decoderReuseEvaluation) { if (handler != null) { - handler.post(() -> castNonNull(listener).onVideoInputFormatChanged(format)); + handler.post( + () -> { + castNonNull(listener).onVideoInputFormatChanged(format); + castNonNull(listener).onVideoInputFormatChanged(format, decoderReuseEvaluation); + }); } } @@ -159,25 +211,36 @@ public void droppedFrames(int droppedFrameCount, long elapsedMs) { } } - /** Invokes {@link VideoRendererEventListener#onVideoSizeChanged(int, int, int, float)}. */ - public void videoSizeChanged( - int width, - int height, - final int unappliedRotationDegrees, - final float pixelWidthHeightRatio) { + /** Invokes {@link VideoRendererEventListener#onVideoFrameProcessingOffset}. */ + public void reportVideoFrameProcessingOffset(long totalProcessingOffsetUs, int frameCount) { if (handler != null) { handler.post( () -> castNonNull(listener) - .onVideoSizeChanged( - width, height, unappliedRotationDegrees, pixelWidthHeightRatio)); + .onVideoFrameProcessingOffset(totalProcessingOffsetUs, frameCount)); + } + } + + /** Invokes {@link VideoRendererEventListener#onVideoSizeChanged(VideoSize)}. */ + public void videoSizeChanged(VideoSize videoSize) { + if (handler != null) { + handler.post(() -> castNonNull(listener).onVideoSizeChanged(videoSize)); } } - /** Invokes {@link VideoRendererEventListener#onRenderedFirstFrame(Surface)}. */ - public void renderedFirstFrame(@Nullable Surface surface) { + /** Invokes {@link VideoRendererEventListener#onRenderedFirstFrame(Object, long)}. */ + public void renderedFirstFrame(Object output) { if (handler != null) { - handler.post(() -> castNonNull(listener).onRenderedFirstFrame(surface)); + // TODO: Replace this timestamp with the actual frame release time. + long renderTimeMs = SystemClock.elapsedRealtime(); + handler.post(() -> castNonNull(listener).onRenderedFirstFrame(output, renderTimeMs)); + } + } + + /** Invokes {@link VideoRendererEventListener#onVideoDecoderReleased(String)}. */ + public void decoderReleased(String decoderName) { + if (handler != null) { + handler.post(() -> castNonNull(listener).onVideoDecoderReleased(decoderName)); } } @@ -193,6 +256,11 @@ public void disabled(DecoderCounters counters) { } } + /** Invokes {@link VideoRendererEventListener#onVideoCodecError(Exception)}. */ + public void videoCodecError(Exception videoCodecError) { + if (handler != null) { + handler.post(() -> castNonNull(listener).onVideoCodecError(videoCodecError)); + } + } } - } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoSize.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoSize.java new file mode 100644 index 0000000000..2a9a1861d0 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/VideoSize.java @@ -0,0 +1,153 @@ +/* + * Copyright 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.video; + +import android.os.Bundle; +import androidx.annotation.FloatRange; +import androidx.annotation.IntRange; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.Bundleable; +import com.google.android.exoplayer2.util.Util; + +/** Represents the video size. */ +public final class VideoSize implements Bundleable { + + private static final int DEFAULT_WIDTH = 0; + private static final int DEFAULT_HEIGHT = 0; + private static final int DEFAULT_UNAPPLIED_ROTATION_DEGREES = 0; + private static final float DEFAULT_PIXEL_WIDTH_HEIGHT_RATIO = 1F; + + public static final VideoSize UNKNOWN = new VideoSize(DEFAULT_WIDTH, DEFAULT_HEIGHT); + + /** The video width in pixels, 0 when unknown. */ + @IntRange(from = 0) + public final int width; + + /** The video height in pixels, 0 when unknown. */ + @IntRange(from = 0) + public final int height; + + /** + * Clockwise rotation in degrees that the application should apply for the video for it to be + * rendered in the correct orientation. + * + *

      Is 0 if unknown or if no rotation is needed. + * + *

      Player should apply video rotation internally, in which case unappliedRotationDegrees is 0. + * But when a player can't apply the rotation, for example before API level 21, the unapplied + * rotation is reported by this field for application to handle. + * + *

      Applications that use {@link android.view.TextureView} can apply the rotation by calling + * {@link android.view.TextureView#setTransform}. + */ + @IntRange(from = 0, to = 359) + public final int unappliedRotationDegrees; + + /** + * The width to height ratio of each pixel, 1 if unknown. + * + *

      For the normal case of square pixels this will be equal to 1.0. Different values are + * indicative of anamorphic content. + */ + @FloatRange(from = 0, fromInclusive = false) + public final float pixelWidthHeightRatio; + + /** + * Creates a VideoSize without unapplied rotation or anamorphic content. + * + * @param width The video width in pixels. + * @param height The video height in pixels. + */ + public VideoSize(@IntRange(from = 0) int width, @IntRange(from = 0) int height) { + this(width, height, DEFAULT_UNAPPLIED_ROTATION_DEGREES, DEFAULT_PIXEL_WIDTH_HEIGHT_RATIO); + } + + /** + * Creates a VideoSize. + * + * @param width The video width in pixels. + * @param height The video height in pixels. + * @param unappliedRotationDegrees Clockwise rotation in degrees that the application should apply + * for the video for it to be rendered in the correct orientation. See {@link + * #unappliedRotationDegrees}. + * @param pixelWidthHeightRatio The width to height ratio of each pixel. For the normal case of + * square pixels this will be equal to 1.0. Different values are indicative of anamorphic + * content. + */ + public VideoSize( + @IntRange(from = 0) int width, + @IntRange(from = 0) int height, + @IntRange(from = 0, to = 359) int unappliedRotationDegrees, + @FloatRange(from = 0, fromInclusive = false) float pixelWidthHeightRatio) { + this.width = width; + this.height = height; + this.unappliedRotationDegrees = unappliedRotationDegrees; + this.pixelWidthHeightRatio = pixelWidthHeightRatio; + } + + @Override + public boolean equals(@Nullable Object obj) { + if (this == obj) { + return true; + } + if (obj instanceof VideoSize) { + VideoSize other = (VideoSize) obj; + return width == other.width + && height == other.height + && unappliedRotationDegrees == other.unappliedRotationDegrees + && pixelWidthHeightRatio == other.pixelWidthHeightRatio; + } + return false; + } + + @Override + public int hashCode() { + int result = 7; + result = 31 * result + width; + result = 31 * result + height; + result = 31 * result + unappliedRotationDegrees; + result = 31 * result + Float.floatToRawIntBits(pixelWidthHeightRatio); + return result; + } + + // Bundleable implementation. + + private static final String FIELD_WIDTH = Util.intToStringMaxRadix(0); + private static final String FIELD_HEIGHT = Util.intToStringMaxRadix(1); + private static final String FIELD_UNAPPLIED_ROTATION_DEGREES = Util.intToStringMaxRadix(2); + private static final String FIELD_PIXEL_WIDTH_HEIGHT_RATIO = Util.intToStringMaxRadix(3); + + @Override + public Bundle toBundle() { + Bundle bundle = new Bundle(); + bundle.putInt(FIELD_WIDTH, width); + bundle.putInt(FIELD_HEIGHT, height); + bundle.putInt(FIELD_UNAPPLIED_ROTATION_DEGREES, unappliedRotationDegrees); + bundle.putFloat(FIELD_PIXEL_WIDTH_HEIGHT_RATIO, pixelWidthHeightRatio); + return bundle; + } + + public static final Creator CREATOR = + bundle -> { + int width = bundle.getInt(FIELD_WIDTH, DEFAULT_WIDTH); + int height = bundle.getInt(FIELD_HEIGHT, DEFAULT_HEIGHT); + int unappliedRotationDegrees = + bundle.getInt(FIELD_UNAPPLIED_ROTATION_DEGREES, DEFAULT_UNAPPLIED_ROTATION_DEGREES); + float pixelWidthHeightRatio = + bundle.getFloat(FIELD_PIXEL_WIDTH_HEIGHT_RATIO, DEFAULT_PIXEL_WIDTH_HEIGHT_RATIO); + return new VideoSize(width, height, unappliedRotationDegrees, pixelWidthHeightRatio); + }; +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/spherical/CameraMotionRenderer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/spherical/CameraMotionRenderer.java index 35804adbe3..b435ccb125 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/spherical/CameraMotionRenderer.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/spherical/CameraMotionRenderer.java @@ -24,16 +24,18 @@ import com.google.android.exoplayer2.Renderer; import com.google.android.exoplayer2.RendererCapabilities; import com.google.android.exoplayer2.decoder.DecoderInputBuffer; +import com.google.android.exoplayer2.source.SampleStream.ReadDataResult; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.ParsableByteArray; import com.google.android.exoplayer2.util.Util; import java.nio.ByteBuffer; /** A {@link Renderer} that parses the camera motion track. */ -public class CameraMotionRenderer extends BaseRenderer { +public final class CameraMotionRenderer extends BaseRenderer { + private static final String TAG = "CameraMotionRenderer"; // The amount of time to read samples ahead of the current time. - private static final int SAMPLE_WINDOW_DURATION_US = 100000; + private static final int SAMPLE_WINDOW_DURATION_US = 100_000; private final DecoderInputBuffer buffer; private final ParsableByteArray scratch; @@ -49,16 +51,21 @@ public CameraMotionRenderer() { } @Override - @Capabilities - public int supportsFormat(Format format) { + public String getName() { + return TAG; + } + + @Override + public @Capabilities int supportsFormat(Format format) { return MimeTypes.APPLICATION_CAMERA_MOTION.equals(format.sampleMimeType) - ? RendererCapabilities.create(FORMAT_HANDLED) - : RendererCapabilities.create(FORMAT_UNSUPPORTED_TYPE); + ? RendererCapabilities.create(C.FORMAT_HANDLED) + : RendererCapabilities.create(C.FORMAT_UNSUPPORTED_TYPE); } @Override - public void handleMessage(int messageType, @Nullable Object message) throws ExoPlaybackException { - if (messageType == C.MSG_SET_CAMERA_MOTION_LISTENER) { + public void handleMessage(@MessageType int messageType, @Nullable Object message) + throws ExoPlaybackException { + if (messageType == MSG_SET_CAMERA_MOTION_LISTENER) { listener = (CameraMotionListener) message; } else { super.handleMessage(messageType, message); @@ -66,12 +73,13 @@ public void handleMessage(int messageType, @Nullable Object message) throws ExoP } @Override - protected void onStreamChanged(Format[] formats, long offsetUs) throws ExoPlaybackException { + protected void onStreamChanged(Format[] formats, long startPositionUs, long offsetUs) { this.offsetUs = offsetUs; } @Override - protected void onPositionReset(long positionUs, boolean joining) throws ExoPlaybackException { + protected void onPositionReset(long positionUs, boolean joining) { + lastTimestampUs = Long.MIN_VALUE; resetListener(); } @@ -81,24 +89,28 @@ protected void onDisabled() { } @Override - public void render(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException { + public void render(long positionUs, long elapsedRealtimeUs) { // Keep reading available samples as long as the sample time is not too far into the future. while (!hasReadStreamToEnd() && lastTimestampUs < positionUs + SAMPLE_WINDOW_DURATION_US) { buffer.clear(); FormatHolder formatHolder = getFormatHolder(); - int result = readSource(formatHolder, buffer, /* formatRequired= */ false); + @ReadDataResult int result = readSource(formatHolder, buffer, /* readFlags= */ 0); if (result != C.RESULT_BUFFER_READ || buffer.isEndOfStream()) { return; } - buffer.flip(); lastTimestampUs = buffer.timeUs; - if (listener != null) { - float[] rotation = parseMetadata(Util.castNonNull(buffer.data)); - if (rotation != null) { - Util.castNonNull(listener).onCameraMotion(lastTimestampUs - offsetUs, rotation); - } + if (listener == null || buffer.isDecodeOnly()) { + continue; } + + buffer.flip(); + @Nullable float[] rotation = parseMetadata(Util.castNonNull(buffer.data)); + if (rotation == null) { + continue; + } + + Util.castNonNull(listener).onCameraMotion(lastTimestampUs - offsetUs, rotation); } } @@ -112,7 +124,8 @@ public boolean isReady() { return true; } - private @Nullable float[] parseMetadata(ByteBuffer data) { + @Nullable + private float[] parseMetadata(ByteBuffer data) { if (data.remaining() != 16) { return null; } @@ -126,7 +139,6 @@ public boolean isReady() { } private void resetListener() { - lastTimestampUs = 0; if (listener != null) { listener.onCameraMotionReset(); } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/spherical/FrameRotationQueue.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/spherical/FrameRotationQueue.java index d464bf04fa..e2cecbbe4d 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/spherical/FrameRotationQueue.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/spherical/FrameRotationQueue.java @@ -16,6 +16,7 @@ package com.google.android.exoplayer2.video.spherical; import android.opengl.Matrix; +import com.google.android.exoplayer2.util.GlUtil; import com.google.android.exoplayer2.util.TimedValueQueue; /** @@ -27,7 +28,7 @@ *

    • Recenters the rotations to componsate the yaw of the initial rotation. * */ -public final class FrameRotationQueue { +/* package */ final class FrameRotationQueue { private final float[] recenterMatrix; private final float[] rotationMatrix; private final TimedValueQueue rotations; @@ -96,7 +97,7 @@ public static void computeRecenterMatrix(float[] recenterMatrix, float[] rotatio // | 0 1 0 0| // recenter = | temp[8] 0 temp[10] 0| // | 0 0 0 1| - Matrix.setIdentityM(recenterMatrix, 0); + GlUtil.setToIdentity(recenterMatrix); float normRowSqr = rotationMatrix[10] * rotationMatrix[10] + rotationMatrix[8] * rotationMatrix[8]; float normRow = (float) Math.sqrt(normRowSqr); @@ -118,7 +119,7 @@ private static void getRotationMatrixFromAngleAxis(float[] matrix, float[] angle float angleDeg = (float) Math.toDegrees(angleRad); Matrix.setRotateM(matrix, 0, angleDeg, x / angleRad, y / angleRad, z / angleRad); } else { - Matrix.setIdentityM(matrix, 0); + GlUtil.setToIdentity(matrix); } } } diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/spherical/OrientationListener.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/spherical/OrientationListener.java new file mode 100644 index 0000000000..d35c7531b1 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/spherical/OrientationListener.java @@ -0,0 +1,125 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.video.spherical; + +import android.hardware.Sensor; +import android.hardware.SensorEvent; +import android.hardware.SensorEventListener; +import android.hardware.SensorManager; +import android.opengl.Matrix; +import android.view.Display; +import android.view.Surface; +import androidx.annotation.BinderThread; + +/** + * Listens for orientation sensor events, converts event data to rotation matrix and roll value, and + * notifies its own listeners. + */ +/* package */ final class OrientationListener implements SensorEventListener { + /** A listener for orientation changes. */ + public interface Listener { + /** + * Called on device orientation change. + * + * @param deviceOrientationMatrix A 4x4 matrix defining device orientation. + * @param deviceRoll Device roll value, in radians. The range of values is -π/2 to π/2. + */ + void onOrientationChange(float[] deviceOrientationMatrix, float deviceRoll); + } + + private final float[] deviceOrientationMatrix4x4 = new float[16]; + private final float[] tempMatrix4x4 = new float[16]; + private final float[] recenterMatrix4x4 = new float[16]; + private final float[] angles = new float[3]; + private final Display display; + private final Listener[] listeners; + private boolean recenterMatrixComputed; + + public OrientationListener(Display display, Listener... listeners) { + this.display = display; + this.listeners = listeners; + } + + @Override + @BinderThread + public void onSensorChanged(SensorEvent event) { + SensorManager.getRotationMatrixFromVector(deviceOrientationMatrix4x4, event.values); + rotateAroundZ(deviceOrientationMatrix4x4, display.getRotation()); + float roll = extractRoll(deviceOrientationMatrix4x4); + // Rotation vector sensor assumes Y is parallel to the ground. + rotateYtoSky(deviceOrientationMatrix4x4); + recenter(deviceOrientationMatrix4x4); + notifyListeners(deviceOrientationMatrix4x4, roll); + } + + @Override + public void onAccuracyChanged(Sensor sensor, int accuracy) { + // Do nothing. + } + + private void notifyListeners(float[] deviceOrientationMatrix, float roll) { + for (Listener listener : listeners) { + listener.onOrientationChange(deviceOrientationMatrix, roll); + } + } + + private void recenter(float[] matrix) { + if (!recenterMatrixComputed) { + FrameRotationQueue.computeRecenterMatrix(recenterMatrix4x4, matrix); + recenterMatrixComputed = true; + } + System.arraycopy(matrix, 0, tempMatrix4x4, 0, tempMatrix4x4.length); + Matrix.multiplyMM(matrix, 0, tempMatrix4x4, 0, recenterMatrix4x4, 0); + } + + private float extractRoll(float[] matrix) { + // Remapping is required since we need the calculated roll of the phone to be independent of the + // phone's pitch & yaw. + SensorManager.remapCoordinateSystem( + matrix, SensorManager.AXIS_X, SensorManager.AXIS_MINUS_Z, tempMatrix4x4); + SensorManager.getOrientation(tempMatrix4x4, angles); + return angles[2]; + } + + private void rotateAroundZ(float[] matrix, int rotation) { + int xAxis; + int yAxis; + switch (rotation) { + case Surface.ROTATION_270: + xAxis = SensorManager.AXIS_MINUS_Y; + yAxis = SensorManager.AXIS_X; + break; + case Surface.ROTATION_180: + xAxis = SensorManager.AXIS_MINUS_X; + yAxis = SensorManager.AXIS_MINUS_Y; + break; + case Surface.ROTATION_90: + xAxis = SensorManager.AXIS_Y; + yAxis = SensorManager.AXIS_MINUS_X; + break; + case Surface.ROTATION_0: + return; + default: + throw new IllegalStateException(); + } + System.arraycopy(matrix, 0, tempMatrix4x4, 0, tempMatrix4x4.length); + SensorManager.remapCoordinateSystem(tempMatrix4x4, xAxis, yAxis, matrix); + } + + private static void rotateYtoSky(float[] matrix) { + Matrix.rotateM(matrix, 0, 90, 1, 0, 0); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/spherical/Projection.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/spherical/Projection.java index 8ba24bb06e..285288fc90 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/spherical/Projection.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/spherical/Projection.java @@ -15,6 +15,8 @@ */ package com.google.android.exoplayer2.video.spherical; +import static java.lang.annotation.ElementType.TYPE_USE; + import androidx.annotation.IntDef; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C.StereoMode; @@ -22,13 +24,15 @@ import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; /** The projection mesh used with 360/VR videos. */ -public final class Projection { +/* package */ final class Projection { /** Enforces allowed (sub) mesh draw modes. */ @Documented @Retention(RetentionPolicy.SOURCE) + @Target(TYPE_USE) @IntDef({DRAW_MODE_TRIANGLES, DRAW_MODE_TRIANGLES_STRIP, DRAW_MODE_TRIANGLES_FAN}) public @interface DrawMode {} /** Triangle draw mode. */ @@ -109,7 +113,7 @@ public static Projection createEquirectangular( for (int i = 0; i < longitudes + 1; ++i) { // For each vertical edge in the band. for (int k = 0; k < 2; ++k) { // For low and high points on an edge. - // For each point, determine it's position in polar coordinates. + // For each point, determine its position in polar coordinates. float phi = k == 0 ? phiLow : phiHigh; float theta = quadWidthRads * i + (float) Math.PI - horizontalFovRads / 2; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/spherical/ProjectionDecoder.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/spherical/ProjectionDecoder.java index eadc617ea7..3f21b36cd7 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/spherical/ProjectionDecoder.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/spherical/ProjectionDecoder.java @@ -34,7 +34,7 @@ * *

      The decoder does not perform CRC checks at the moment. */ -public final class ProjectionDecoder { +/* package */ final class ProjectionDecoder { private static final int TYPE_YTMP = 0x79746d70; private static final int TYPE_MSHP = 0x6d736870; @@ -43,9 +43,9 @@ public final class ProjectionDecoder { private static final int TYPE_MESH = 0x6d657368; private static final int TYPE_PROJ = 0x70726f6a; - // Sanity limits to prevent a bad file from creating an OOM situation. We don't expect a mesh to + // Limits to prevent a bad file from creating an OOM situation. We don't expect a mesh to // exceed these limits. - private static final int MAX_COORDINATE_COUNT = 10000; + private static final int MAX_COORDINATE_COUNT = 10_000; private static final int MAX_VERTEX_COUNT = 32 * 1000; private static final int MAX_TRIANGLE_INDICES = 128 * 1000; @@ -58,7 +58,8 @@ private ProjectionDecoder() {} * @param stereoMode A {@link C.StereoMode} value. * @return The projection or null if the data can't be decoded. */ - public static @Nullable Projection decode(byte[] projectionData, @C.StereoMode int stereoMode) { + @Nullable + public static Projection decode(byte[] projectionData, @C.StereoMode int stereoMode) { ParsableByteArray input = new ParsableByteArray(projectionData); // MP4 containers include the proj box but webm containers do not. // Both containers use mshp. @@ -91,7 +92,8 @@ private static boolean isProj(ParsableByteArray input) { return type == TYPE_PROJ; } - private static @Nullable ArrayList parseProj(ParsableByteArray input) { + @Nullable + private static ArrayList parseProj(ParsableByteArray input) { input.skipBytes(8); // size and type. int position = input.getPosition(); int limit = input.limit(); @@ -112,7 +114,8 @@ private static boolean isProj(ParsableByteArray input) { return null; } - private static @Nullable ArrayList parseMshp(ParsableByteArray input) { + @Nullable + private static ArrayList parseMshp(ParsableByteArray input) { int version = input.readUnsignedByte(); if (version != 0) { return null; @@ -137,7 +140,8 @@ private static boolean isProj(ParsableByteArray input) { } /** Parses MSHP data after the encoding_four_cc field. */ - private static @Nullable ArrayList parseRawMshpData(ParsableByteArray input) { + @Nullable + private static ArrayList parseRawMshpData(ParsableByteArray input) { ArrayList meshes = new ArrayList<>(); int position = input.getPosition(); int limit = input.limit(); @@ -160,7 +164,8 @@ private static boolean isProj(ParsableByteArray input) { return meshes; } - private static @Nullable Mesh parseMesh(ParsableByteArray input) { + @Nullable + private static Mesh parseMesh(ParsableByteArray input) { // Read the coordinates. int coordinateCount = input.readInt(); if (coordinateCount > MAX_COORDINATE_COUNT) { @@ -179,7 +184,7 @@ private static boolean isProj(ParsableByteArray input) { final double log2 = Math.log(2.0); int coordinateCountSizeBits = (int) Math.ceil(Math.log(2.0 * coordinateCount) / log2); - ParsableBitArray bitInput = new ParsableBitArray(input.data); + ParsableBitArray bitInput = new ParsableBitArray(input.getData()); bitInput.setPosition(input.getPosition() * 8); float[] vertices = new float[vertexCount * 5]; int[] coordinateIndices = new int[5]; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/spherical/ProjectionRenderer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/spherical/ProjectionRenderer.java new file mode 100644 index 0000000000..c7ba8c4b4c --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/spherical/ProjectionRenderer.java @@ -0,0 +1,242 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.video.spherical; + +import static com.google.android.exoplayer2.util.GlUtil.checkGlError; + +import android.opengl.GLES11Ext; +import android.opengl.GLES20; +import android.util.Log; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.util.GlProgram; +import com.google.android.exoplayer2.util.GlUtil; +import java.nio.FloatBuffer; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; + +/** + * Utility class to render spherical meshes for video or images. Call {@link #init()} on the GL + * thread when ready. + */ +/* package */ final class ProjectionRenderer { + + /** + * Returns whether {@code projection} is supported. At least it should have left mesh and there + * should be only one sub mesh per mesh. + */ + public static boolean isSupported(Projection projection) { + Projection.Mesh leftMesh = projection.leftMesh; + Projection.Mesh rightMesh = projection.rightMesh; + return leftMesh.getSubMeshCount() == 1 + && leftMesh.getSubMesh(0).textureId == Projection.SubMesh.VIDEO_TEXTURE_ID + && rightMesh.getSubMeshCount() == 1 + && rightMesh.getSubMesh(0).textureId == Projection.SubMesh.VIDEO_TEXTURE_ID; + } + + private static final String TAG = "ProjectionRenderer"; + + // Basic vertex & fragment shaders to render a mesh with 3D position & 2D texture data. + private static final String VERTEX_SHADER = + "uniform mat4 uMvpMatrix;\n" + + "uniform mat3 uTexMatrix;\n" + + "attribute vec4 aPosition;\n" + + "attribute vec2 aTexCoords;\n" + + "varying vec2 vTexCoords;\n" + + "// Standard transformation.\n" + + "void main() {\n" + + " gl_Position = uMvpMatrix * aPosition;\n" + + " vTexCoords = (uTexMatrix * vec3(aTexCoords, 1)).xy;\n" + + "}\n"; + private static final String FRAGMENT_SHADER = + "// This is required since the texture data is GL_TEXTURE_EXTERNAL_OES.\n" + + "#extension GL_OES_EGL_image_external : require\n" + + "precision mediump float;\n" + + "// Standard texture rendering shader.\n" + + "uniform samplerExternalOES uTexture;\n" + + "varying vec2 vTexCoords;\n" + + "void main() {\n" + + " gl_FragColor = texture2D(uTexture, vTexCoords);\n" + + "}\n"; + + // Texture transform matrices. + private static final float[] TEX_MATRIX_WHOLE = { + 1.0f, 0.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, 1.0f, 1.0f + }; + private static final float[] TEX_MATRIX_TOP = { + 1.0f, 0.0f, 0.0f, 0.0f, -0.5f, 0.0f, 0.0f, 0.5f, 1.0f + }; + private static final float[] TEX_MATRIX_BOTTOM = { + 1.0f, 0.0f, 0.0f, 0.0f, -0.5f, 0.0f, 0.0f, 1.0f, 1.0f + }; + private static final float[] TEX_MATRIX_LEFT = { + 0.5f, 0.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, 1.0f, 1.0f + }; + private static final float[] TEX_MATRIX_RIGHT = { + 0.5f, 0.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.5f, 1.0f, 1.0f + }; + + private int stereoMode; + @Nullable private MeshData leftMeshData; + @Nullable private MeshData rightMeshData; + private @MonotonicNonNull GlProgram program; + + // Program related GL items. These are only valid if Program is valid. + private int mvpMatrixHandle; + private int uTexMatrixHandle; + private int positionHandle; + private int texCoordsHandle; + private int textureHandle; + + /** + * Sets a {@link Projection} to be used. + * + * @param projection Contains the projection data to be rendered. + * @see #isSupported(Projection) + */ + public void setProjection(Projection projection) { + if (!isSupported(projection)) { + return; + } + stereoMode = projection.stereoMode; + leftMeshData = new MeshData(projection.leftMesh.getSubMesh(0)); + rightMeshData = + projection.singleMesh ? leftMeshData : new MeshData(projection.rightMesh.getSubMesh(0)); + } + + /** Initializes of the GL components. */ + public void init() { + try { + program = new GlProgram(VERTEX_SHADER, FRAGMENT_SHADER); + mvpMatrixHandle = program.getUniformLocation("uMvpMatrix"); + uTexMatrixHandle = program.getUniformLocation("uTexMatrix"); + positionHandle = program.getAttributeArrayLocationAndEnable("aPosition"); + texCoordsHandle = program.getAttributeArrayLocationAndEnable("aTexCoords"); + textureHandle = program.getUniformLocation("uTexture"); + } catch (GlUtil.GlException e) { + Log.e(TAG, "Failed to initialize the program", e); + } + } + + /** + * Renders the mesh. If the projection hasn't been set, does nothing. This must be called on the + * GL thread. + * + * @param textureId GL_TEXTURE_EXTERNAL_OES used for this mesh. + * @param mvpMatrix The Model View Projection matrix. + * @param rightEye Whether the right eye view should be drawn. If {@code false}, the left eye view + * is drawn. + */ + public void draw(int textureId, float[] mvpMatrix, boolean rightEye) { + MeshData meshData = rightEye ? rightMeshData : leftMeshData; + if (meshData == null) { + return; + } + + // Configure shader. + float[] texMatrix; + if (stereoMode == C.STEREO_MODE_TOP_BOTTOM) { + texMatrix = rightEye ? TEX_MATRIX_BOTTOM : TEX_MATRIX_TOP; + } else if (stereoMode == C.STEREO_MODE_LEFT_RIGHT) { + texMatrix = rightEye ? TEX_MATRIX_RIGHT : TEX_MATRIX_LEFT; + } else { + texMatrix = TEX_MATRIX_WHOLE; + } + GLES20.glUniformMatrix3fv(uTexMatrixHandle, 1, false, texMatrix, 0); + + // TODO(b/205002913): Update to use GlProgram.Uniform.bind(). + GLES20.glUniformMatrix4fv(mvpMatrixHandle, 1, false, mvpMatrix, 0); + GLES20.glActiveTexture(GLES20.GL_TEXTURE0); + GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId); + GLES20.glUniform1i(textureHandle, 0); + try { + checkGlError(); + } catch (GlUtil.GlException e) { + Log.e(TAG, "Failed to bind uniforms", e); + } + + // Load position data. + GLES20.glVertexAttribPointer( + positionHandle, + Projection.POSITION_COORDS_PER_VERTEX, + GLES20.GL_FLOAT, + false, + Projection.POSITION_COORDS_PER_VERTEX * C.BYTES_PER_FLOAT, + meshData.vertexBuffer); + try { + checkGlError(); + } catch (GlUtil.GlException e) { + Log.e(TAG, "Failed to load position data", e); + } + + // Load texture data. + GLES20.glVertexAttribPointer( + texCoordsHandle, + Projection.TEXTURE_COORDS_PER_VERTEX, + GLES20.GL_FLOAT, + false, + Projection.TEXTURE_COORDS_PER_VERTEX * C.BYTES_PER_FLOAT, + meshData.textureBuffer); + try { + checkGlError(); + } catch (GlUtil.GlException e) { + Log.e(TAG, "Failed to load texture data", e); + } + + // Render. + GLES20.glDrawArrays(meshData.drawMode, /* first= */ 0, meshData.vertexCount); + try { + checkGlError(); + } catch (GlUtil.GlException e) { + Log.e(TAG, "Failed to render", e); + } + } + + /** Cleans up GL resources. */ + public void shutdown() { + if (program != null) { + try { + program.delete(); + } catch (GlUtil.GlException e) { + Log.e(TAG, "Failed to delete the shader program", e); + } + } + } + + private static class MeshData { + private final int vertexCount; + private final FloatBuffer vertexBuffer; + private final FloatBuffer textureBuffer; + private final int drawMode; + + public MeshData(Projection.SubMesh subMesh) { + vertexCount = subMesh.getVertexCount(); + vertexBuffer = GlUtil.createBuffer(subMesh.vertices); + textureBuffer = GlUtil.createBuffer(subMesh.textureCoords); + switch (subMesh.mode) { + case Projection.DRAW_MODE_TRIANGLES_STRIP: + drawMode = GLES20.GL_TRIANGLE_STRIP; + break; + case Projection.DRAW_MODE_TRIANGLES_FAN: + drawMode = GLES20.GL_TRIANGLE_FAN; + break; + case Projection.DRAW_MODE_TRIANGLES: + default: + drawMode = GLES20.GL_TRIANGLES; + break; + } + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/spherical/SceneRenderer.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/spherical/SceneRenderer.java new file mode 100644 index 0000000000..0719af26f9 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/spherical/SceneRenderer.java @@ -0,0 +1,208 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.video.spherical; + +import static com.google.android.exoplayer2.util.GlUtil.checkGlError; + +import android.graphics.SurfaceTexture; +import android.media.MediaFormat; +import android.opengl.GLES20; +import android.opengl.Matrix; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.GlUtil; +import com.google.android.exoplayer2.util.Log; +import com.google.android.exoplayer2.util.TimedValueQueue; +import com.google.android.exoplayer2.video.VideoFrameMetadataListener; +import java.util.Arrays; +import java.util.concurrent.atomic.AtomicBoolean; +import org.checkerframework.checker.nullness.qual.MonotonicNonNull; + +/** Renders a GL Scene. */ +/* package */ final class SceneRenderer + implements VideoFrameMetadataListener, CameraMotionListener { + + private static final String TAG = "SceneRenderer"; + + private final AtomicBoolean frameAvailable; + private final AtomicBoolean resetRotationAtNextFrame; + private final ProjectionRenderer projectionRenderer; + private final FrameRotationQueue frameRotationQueue; + private final TimedValueQueue sampleTimestampQueue; + private final TimedValueQueue projectionQueue; + private final float[] rotationMatrix; + private final float[] tempMatrix; + + // Used by GL thread only + private int textureId; + private @MonotonicNonNull SurfaceTexture surfaceTexture; + + // Used by other threads only + private volatile @C.StereoMode int defaultStereoMode; + private @C.StereoMode int lastStereoMode; + @Nullable private byte[] lastProjectionData; + + // Methods called on any thread. + + public SceneRenderer() { + frameAvailable = new AtomicBoolean(); + resetRotationAtNextFrame = new AtomicBoolean(true); + projectionRenderer = new ProjectionRenderer(); + frameRotationQueue = new FrameRotationQueue(); + sampleTimestampQueue = new TimedValueQueue<>(); + projectionQueue = new TimedValueQueue<>(); + rotationMatrix = new float[16]; + tempMatrix = new float[16]; + defaultStereoMode = C.STEREO_MODE_MONO; + lastStereoMode = Format.NO_VALUE; + } + + /** + * Sets the default stereo mode. If the played video doesn't contain a stereo mode the default one + * is used. + * + * @param stereoMode A {@link C.StereoMode} value. + */ + public void setDefaultStereoMode(@C.StereoMode int stereoMode) { + defaultStereoMode = stereoMode; + } + + // Methods called on GL thread. + + /** Initializes the renderer. */ + public SurfaceTexture init() { + try { + // Set the background frame color. This is only visible if the display mesh isn't a full + // sphere. + GLES20.glClearColor(0.5f, 0.5f, 0.5f, 1.0f); + checkGlError(); + + projectionRenderer.init(); + checkGlError(); + + textureId = GlUtil.createExternalTexture(); + } catch (GlUtil.GlException e) { + Log.e(TAG, "Failed to initialize the renderer", e); + } + surfaceTexture = new SurfaceTexture(textureId); + surfaceTexture.setOnFrameAvailableListener(surfaceTexture -> frameAvailable.set(true)); + return surfaceTexture; + } + + /** + * Draws the scene with a given eye pose and type. + * + * @param viewProjectionMatrix 16 element GL matrix. + * @param rightEye Whether the right eye view should be drawn. If {@code false}, the left eye view + * is drawn. + */ + public void drawFrame(float[] viewProjectionMatrix, boolean rightEye) { + // glClear isn't strictly necessary when rendering fully spherical panoramas, but it can improve + // performance on tiled renderers by causing the GPU to discard previous data. + GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); + try { + checkGlError(); + } catch (GlUtil.GlException e) { + Log.e(TAG, "Failed to draw a frame", e); + } + + if (frameAvailable.compareAndSet(true, false)) { + Assertions.checkNotNull(surfaceTexture).updateTexImage(); + try { + checkGlError(); + } catch (GlUtil.GlException e) { + Log.e(TAG, "Failed to draw a frame", e); + } + if (resetRotationAtNextFrame.compareAndSet(true, false)) { + GlUtil.setToIdentity(rotationMatrix); + } + long lastFrameTimestampNs = surfaceTexture.getTimestamp(); + Long sampleTimestampUs = sampleTimestampQueue.poll(lastFrameTimestampNs); + if (sampleTimestampUs != null) { + frameRotationQueue.pollRotationMatrix(rotationMatrix, sampleTimestampUs); + } + Projection projection = projectionQueue.pollFloor(lastFrameTimestampNs); + if (projection != null) { + projectionRenderer.setProjection(projection); + } + } + Matrix.multiplyMM(tempMatrix, 0, viewProjectionMatrix, 0, rotationMatrix, 0); + projectionRenderer.draw(textureId, tempMatrix, rightEye); + } + + /** Cleans up GL resources. */ + public void shutdown() { + projectionRenderer.shutdown(); + } + + // Methods called on playback thread. + + // VideoFrameMetadataListener implementation. + + @Override + public void onVideoFrameAboutToBeRendered( + long presentationTimeUs, + long releaseTimeNs, + Format format, + @Nullable MediaFormat mediaFormat) { + sampleTimestampQueue.add(releaseTimeNs, presentationTimeUs); + setProjection(format.projectionData, format.stereoMode, releaseTimeNs); + } + + // CameraMotionListener implementation. + + @Override + public void onCameraMotion(long timeUs, float[] rotation) { + frameRotationQueue.setRotation(timeUs, rotation); + } + + @Override + public void onCameraMotionReset() { + sampleTimestampQueue.clear(); + frameRotationQueue.reset(); + resetRotationAtNextFrame.set(true); + } + + /** + * Sets projection data and stereo mode of the media to be played. + * + * @param projectionData Contains the projection data to be rendered. + * @param stereoMode A {@link C.StereoMode} value. + * @param timeNs When then new projection should be used. + */ + private void setProjection( + @Nullable byte[] projectionData, @C.StereoMode int stereoMode, long timeNs) { + byte[] oldProjectionData = lastProjectionData; + int oldStereoMode = lastStereoMode; + lastProjectionData = projectionData; + lastStereoMode = stereoMode == Format.NO_VALUE ? defaultStereoMode : stereoMode; + if (oldStereoMode == lastStereoMode && Arrays.equals(oldProjectionData, lastProjectionData)) { + return; + } + + Projection projectionFromData = null; + if (lastProjectionData != null) { + projectionFromData = ProjectionDecoder.decode(lastProjectionData, lastStereoMode); + } + Projection projection = + projectionFromData != null && ProjectionRenderer.isSupported(projectionFromData) + ? projectionFromData + : Projection.createEquirectangular(lastStereoMode); + projectionQueue.add(timeNs, projection); + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/spherical/SphericalGLSurfaceView.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/spherical/SphericalGLSurfaceView.java new file mode 100644 index 0000000000..a578c8ec73 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/spherical/SphericalGLSurfaceView.java @@ -0,0 +1,374 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.video.spherical; + +import android.content.Context; +import android.graphics.PointF; +import android.graphics.SurfaceTexture; +import android.hardware.Sensor; +import android.hardware.SensorManager; +import android.opengl.GLES20; +import android.opengl.GLSurfaceView; +import android.opengl.Matrix; +import android.os.Handler; +import android.os.Looper; +import android.util.AttributeSet; +import android.view.Display; +import android.view.MotionEvent; +import android.view.Surface; +import android.view.WindowManager; +import androidx.annotation.AnyThread; +import androidx.annotation.BinderThread; +import androidx.annotation.Nullable; +import androidx.annotation.UiThread; +import androidx.annotation.VisibleForTesting; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.GlUtil; +import com.google.android.exoplayer2.util.Util; +import com.google.android.exoplayer2.video.VideoFrameMetadataListener; +import java.util.concurrent.CopyOnWriteArrayList; +import javax.microedition.khronos.egl.EGLConfig; +import javax.microedition.khronos.opengles.GL10; + +/** + * Renders a GL scene in a non-VR Activity that is affected by phone orientation and touch input. + * + *

      The two input components are the TYPE_GAME_ROTATION_VECTOR Sensor and a TouchListener. The GL + * renderer combines these two inputs to render a scene with the appropriate camera orientation. + * + *

      The primary complexity in this class is related to the various rotations. It is important to + * apply the touch and sensor rotations in the correct order or the user's touch manipulations won't + * match what they expect. + */ +public final class SphericalGLSurfaceView extends GLSurfaceView { + + /** Listener for the {@link Surface} to which video frames should be rendered. */ + public interface VideoSurfaceListener { + + /** Called when the {@link Surface} to which video frames should be rendered is created. */ + void onVideoSurfaceCreated(Surface surface); + + /** Called when the {@link Surface} to which video frames should be rendered is destroyed. */ + void onVideoSurfaceDestroyed(Surface surface); + } + + // Arbitrary vertical field of view. + private static final int FIELD_OF_VIEW_DEGREES = 90; + private static final float Z_NEAR = 0.1f; + private static final float Z_FAR = 100; + + // TODO Calculate this depending on surface size and field of view. + private static final float PX_PER_DEGREES = 25; + + /* package */ static final float UPRIGHT_ROLL = (float) Math.PI; + + private final CopyOnWriteArrayList videoSurfaceListeners; + private final SensorManager sensorManager; + @Nullable private final Sensor orientationSensor; + private final OrientationListener orientationListener; + private final Handler mainHandler; + private final TouchTracker touchTracker; + private final SceneRenderer scene; + @Nullable private SurfaceTexture surfaceTexture; + @Nullable private Surface surface; + private boolean useSensorRotation; + private boolean isStarted; + private boolean isOrientationListenerRegistered; + + public SphericalGLSurfaceView(Context context) { + this(context, null); + } + + public SphericalGLSurfaceView(Context context, @Nullable AttributeSet attributeSet) { + super(context, attributeSet); + videoSurfaceListeners = new CopyOnWriteArrayList<>(); + mainHandler = new Handler(Looper.getMainLooper()); + + // Configure sensors and touch. + sensorManager = + (SensorManager) Assertions.checkNotNull(context.getSystemService(Context.SENSOR_SERVICE)); + @Nullable Sensor orientationSensor = null; + if (Util.SDK_INT >= 18) { + // TYPE_GAME_ROTATION_VECTOR is the easiest sensor since it handles all the complex math for + // fusion. It's used instead of TYPE_ROTATION_VECTOR since the latter uses the magnetometer on + // devices. When used indoors, the magnetometer can take some time to settle depending on the + // device and amount of metal in the environment. + orientationSensor = sensorManager.getDefaultSensor(Sensor.TYPE_GAME_ROTATION_VECTOR); + } + if (orientationSensor == null) { + orientationSensor = sensorManager.getDefaultSensor(Sensor.TYPE_ROTATION_VECTOR); + } + this.orientationSensor = orientationSensor; + + scene = new SceneRenderer(); + Renderer renderer = new Renderer(scene); + + touchTracker = new TouchTracker(context, renderer, PX_PER_DEGREES); + WindowManager windowManager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE); + Display display = Assertions.checkNotNull(windowManager).getDefaultDisplay(); + orientationListener = new OrientationListener(display, touchTracker, renderer); + useSensorRotation = true; + + setEGLContextClientVersion(2); + setRenderer(renderer); + setOnTouchListener(touchTracker); + } + + /** + * Adds a {@link VideoSurfaceListener}. + * + * @param listener The listener to add. + */ + public void addVideoSurfaceListener(VideoSurfaceListener listener) { + videoSurfaceListeners.add(listener); + } + + /** + * Removes a {@link VideoSurfaceListener}. + * + * @param listener The listener to remove. + */ + public void removeVideoSurfaceListener(VideoSurfaceListener listener) { + videoSurfaceListeners.remove(listener); + } + + /** + * Returns the {@link Surface} to which video frames should be rendered, or {@code null} if it has + * not been created. + */ + @Nullable + public Surface getVideoSurface() { + return surface; + } + + /** Returns the {@link VideoFrameMetadataListener} that should be registered during playback. */ + public VideoFrameMetadataListener getVideoFrameMetadataListener() { + return scene; + } + + /** Returns the {@link CameraMotionListener} that should be registered during playback. */ + public CameraMotionListener getCameraMotionListener() { + return scene; + } + + /** + * Sets the default stereo mode. If the played video doesn't contain a stereo mode the default one + * is used. + * + * @param stereoMode A {@link C.StereoMode} value. + */ + public void setDefaultStereoMode(@C.StereoMode int stereoMode) { + scene.setDefaultStereoMode(stereoMode); + } + + /** Sets whether to use the orientation sensor for rotation (if available). */ + public void setUseSensorRotation(boolean useSensorRotation) { + this.useSensorRotation = useSensorRotation; + updateOrientationListenerRegistration(); + } + + @Override + public void onResume() { + super.onResume(); + isStarted = true; + updateOrientationListenerRegistration(); + } + + @Override + public void onPause() { + isStarted = false; + updateOrientationListenerRegistration(); + super.onPause(); + } + + @Override + protected void onDetachedFromWindow() { + // This call stops GL thread. + super.onDetachedFromWindow(); + + // Post to make sure we occur in order with any onSurfaceTextureAvailable calls. + mainHandler.post( + () -> { + @Nullable Surface oldSurface = surface; + if (oldSurface != null) { + for (VideoSurfaceListener videoSurfaceListener : videoSurfaceListeners) { + videoSurfaceListener.onVideoSurfaceDestroyed(oldSurface); + } + } + releaseSurface(surfaceTexture, oldSurface); + surfaceTexture = null; + surface = null; + }); + } + + private void updateOrientationListenerRegistration() { + boolean enabled = useSensorRotation && isStarted; + if (orientationSensor == null || enabled == isOrientationListenerRegistered) { + return; + } + if (enabled) { + sensorManager.registerListener( + orientationListener, orientationSensor, SensorManager.SENSOR_DELAY_FASTEST); + } else { + sensorManager.unregisterListener(orientationListener); + } + isOrientationListenerRegistered = enabled; + } + + // Called on GL thread. + private void onSurfaceTextureAvailable(SurfaceTexture newSurfaceTexture) { + mainHandler.post( + () -> { + @Nullable SurfaceTexture oldSurfaceTexture = surfaceTexture; + @Nullable Surface oldSurface = surface; + Surface newSurface = new Surface(newSurfaceTexture); + surfaceTexture = newSurfaceTexture; + surface = newSurface; + for (VideoSurfaceListener videoSurfaceListener : videoSurfaceListeners) { + videoSurfaceListener.onVideoSurfaceCreated(newSurface); + } + releaseSurface(oldSurfaceTexture, oldSurface); + }); + } + + private static void releaseSurface( + @Nullable SurfaceTexture oldSurfaceTexture, @Nullable Surface oldSurface) { + if (oldSurfaceTexture != null) { + oldSurfaceTexture.release(); + } + if (oldSurface != null) { + oldSurface.release(); + } + } + + /** + * Standard GL Renderer implementation. The notable code is the matrix multiplication in + * onDrawFrame and updatePitchMatrix. + */ + @VisibleForTesting + /* package */ final class Renderer + implements GLSurfaceView.Renderer, TouchTracker.Listener, OrientationListener.Listener { + private final SceneRenderer scene; + private final float[] projectionMatrix = new float[16]; + + // There is no model matrix for this scene so viewProjectionMatrix is used for the mvpMatrix. + private final float[] viewProjectionMatrix = new float[16]; + + // Device orientation is derived from sensor data. This is accessed in the sensor's thread and + // the GL thread. + private final float[] deviceOrientationMatrix = new float[16]; + + // Optional pitch and yaw rotations are applied to the sensor orientation. These are accessed on + // the UI, sensor and GL Threads. + private final float[] touchPitchMatrix = new float[16]; + private final float[] touchYawMatrix = new float[16]; + private float touchPitch; + private float deviceRoll; + + // viewMatrix = touchPitch * deviceOrientation * touchYaw. + private final float[] viewMatrix = new float[16]; + private final float[] tempMatrix = new float[16]; + + public Renderer(SceneRenderer scene) { + this.scene = scene; + GlUtil.setToIdentity(deviceOrientationMatrix); + GlUtil.setToIdentity(touchPitchMatrix); + GlUtil.setToIdentity(touchYawMatrix); + deviceRoll = UPRIGHT_ROLL; + } + + @Override + public synchronized void onSurfaceCreated(GL10 gl, EGLConfig config) { + onSurfaceTextureAvailable(scene.init()); + } + + @Override + public void onSurfaceChanged(GL10 gl, int width, int height) { + GLES20.glViewport(0, 0, width, height); + float aspect = (float) width / height; + float fovY = calculateFieldOfViewInYDirection(aspect); + Matrix.perspectiveM(projectionMatrix, 0, fovY, aspect, Z_NEAR, Z_FAR); + } + + @Override + public void onDrawFrame(GL10 gl) { + // Combine touch & sensor data. + // Orientation = pitch * sensor * yaw since that is closest to what most users expect the + // behavior to be. + synchronized (this) { + Matrix.multiplyMM(tempMatrix, 0, deviceOrientationMatrix, 0, touchYawMatrix, 0); + Matrix.multiplyMM(viewMatrix, 0, touchPitchMatrix, 0, tempMatrix, 0); + } + + Matrix.multiplyMM(viewProjectionMatrix, 0, projectionMatrix, 0, viewMatrix, 0); + scene.drawFrame(viewProjectionMatrix, /* rightEye= */ false); + } + + /** Adjusts the GL camera's rotation based on device rotation. Runs on the sensor thread. */ + @Override + @BinderThread + public synchronized void onOrientationChange(float[] matrix, float deviceRoll) { + System.arraycopy(matrix, 0, deviceOrientationMatrix, 0, deviceOrientationMatrix.length); + this.deviceRoll = -deviceRoll; + updatePitchMatrix(); + } + + /** + * Updates the pitch matrix after a physical rotation or touch input. The pitch matrix rotation + * is applied on an axis that is dependent on device rotation so this must be called after + * either touch or sensor update. + */ + @AnyThread + private void updatePitchMatrix() { + // The camera's pitch needs to be rotated along an axis that is parallel to the real world's + // horizon. This is the <1, 0, 0> axis after compensating for the device's roll. + Matrix.setRotateM( + touchPitchMatrix, + 0, + -touchPitch, + (float) Math.cos(deviceRoll), + (float) Math.sin(deviceRoll), + 0); + } + + @Override + @UiThread + public synchronized void onScrollChange(PointF scrollOffsetDegrees) { + touchPitch = scrollOffsetDegrees.y; + updatePitchMatrix(); + Matrix.setRotateM(touchYawMatrix, 0, -scrollOffsetDegrees.x, 0, 1, 0); + } + + @Override + @UiThread + public boolean onSingleTapUp(MotionEvent event) { + return performClick(); + } + + private float calculateFieldOfViewInYDirection(float aspect) { + boolean landscapeMode = aspect > 1; + if (landscapeMode) { + double halfFovX = FIELD_OF_VIEW_DEGREES / 2f; + double tanY = Math.tan(Math.toRadians(halfFovX)) / aspect; + double halfFovY = Math.toDegrees(Math.atan(tanY)); + return (float) (halfFovY * 2); + } else { + return FIELD_OF_VIEW_DEGREES; + } + } + } +} diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/spherical/TouchTracker.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/spherical/TouchTracker.java new file mode 100644 index 0000000000..bbe1636fe3 --- /dev/null +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/video/spherical/TouchTracker.java @@ -0,0 +1,134 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.video.spherical; + +import android.content.Context; +import android.graphics.PointF; +import android.view.GestureDetector; +import android.view.MotionEvent; +import android.view.View; +import androidx.annotation.BinderThread; + +/** + * Basic touch input system. + * + *

      Mixing touch input and gyro input results in a complicated UI so this should be used + * carefully. This touch system implements a basic (X, Y) -> (yaw, pitch) transform. This works for + * basic UI but fails in edge cases where the user tries to drag scene up or down. There is no good + * UX solution for this. The least bad solution is to disable pitch manipulation and only let the + * user adjust yaw. This example tries to limit the awkwardness by restricting pitch manipulation to + * +/- 45 degrees. + * + *

      It is also important to get the order of operations correct. To match what users expect, touch + * interaction manipulates the scene by rotating the world by the yaw offset and tilting the camera + * by the pitch offset. If the order of operations is incorrect, the sensors & touch rotations will + * have strange interactions. The roll of the phone is also tracked so that the x & y are correctly + * mapped to yaw & pitch no matter how the user holds their phone. + * + *

      This class doesn't handle any scrolling inertia but Android's + * com.google.vr.sdk.widgets.common.TouchTracker.FlingGestureListener can be used with this code for + * a nicer UI. An even more advanced UI would reproject the user's touch point into 3D and drag the + * Mesh as the user moves their finger. However, that requires quaternion interpolation. + */ +/* package */ final class TouchTracker extends GestureDetector.SimpleOnGestureListener + implements View.OnTouchListener, OrientationListener.Listener { + + public interface Listener { + void onScrollChange(PointF scrollOffsetDegrees); + + default boolean onSingleTapUp(MotionEvent event) { + return false; + } + } + + // Touch input won't change the pitch beyond +/- 45 degrees. This reduces awkward situations + // where the touch-based pitch and gyro-based pitch interact badly near the poles. + /* package */ static final float MAX_PITCH_DEGREES = 45; + + // With every touch event, update the accumulated degrees offset by the new pixel amount. + private final PointF previousTouchPointPx = new PointF(); + private final PointF accumulatedTouchOffsetDegrees = new PointF(); + + private final Listener listener; + private final float pxPerDegrees; + private final GestureDetector gestureDetector; + // The conversion from touch to yaw & pitch requires compensating for device roll. This is set + // on the sensor thread and read on the UI thread. + private volatile float roll; + + @SuppressWarnings({"nullness:assignment", "nullness:argument"}) + public TouchTracker(Context context, Listener listener, float pxPerDegrees) { + this.listener = listener; + this.pxPerDegrees = pxPerDegrees; + gestureDetector = new GestureDetector(context, this); + roll = SphericalGLSurfaceView.UPRIGHT_ROLL; + } + + /** + * Converts ACTION_MOVE events to pitch & yaw events while compensating for device roll. + * + * @return true if we handled the event + */ + @Override + public boolean onTouch(View v, MotionEvent event) { + return gestureDetector.onTouchEvent(event); + } + + @Override + public boolean onDown(MotionEvent e) { + // Initialize drag gesture. + previousTouchPointPx.set(e.getX(), e.getY()); + return true; + } + + // Incompatible parameter type for e1. + @SuppressWarnings("nullness:override.param.invalid") + @Override + public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY) { + // Calculate the touch delta in screen space. + float touchX = (e2.getX() - previousTouchPointPx.x) / pxPerDegrees; + float touchY = (e2.getY() - previousTouchPointPx.y) / pxPerDegrees; + previousTouchPointPx.set(e2.getX(), e2.getY()); + + float r = roll; // Copy volatile state. + float cr = (float) Math.cos(r); + float sr = (float) Math.sin(r); + // To convert from screen space to the 3D space, we need to adjust the drag vector based + // on the roll of the phone. This is standard rotationMatrix(roll) * vector math but has + // an inverted y-axis due to the screen-space coordinates vs GL coordinates. + // Handle yaw. + accumulatedTouchOffsetDegrees.x -= cr * touchX - sr * touchY; + // Handle pitch and limit it to 45 degrees. + accumulatedTouchOffsetDegrees.y += sr * touchX + cr * touchY; + accumulatedTouchOffsetDegrees.y = + Math.max(-MAX_PITCH_DEGREES, Math.min(MAX_PITCH_DEGREES, accumulatedTouchOffsetDegrees.y)); + + listener.onScrollChange(accumulatedTouchOffsetDegrees); + return true; + } + + @Override + public boolean onSingleTapUp(MotionEvent e) { + return listener.onSingleTapUp(e); + } + + @Override + @BinderThread + public void onOrientationChange(float[] deviceOrientationMatrix, float roll) { + // We compensate for roll by rotating in the opposite direction. + this.roll = -roll; + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/SQLite/SQLiteDatabase.java b/TMessagesProj/src/main/java/org/telegram/SQLite/SQLiteDatabase.java index d46da3a4b0..c8ea8b36eb 100755 --- a/TMessagesProj/src/main/java/org/telegram/SQLite/SQLiteDatabase.java +++ b/TMessagesProj/src/main/java/org/telegram/SQLite/SQLiteDatabase.java @@ -97,7 +97,11 @@ public void finalize() throws Throwable { public void beginTransaction() throws SQLiteException { if (inTransaction) { - throw new SQLiteException("database already in transaction"); + if (BuildVars.DEBUG_PRIVATE_VERSION) { + throw new SQLiteException("database already in transaction"); + } else { + commitTransaction(); + } } inTransaction = true; beginTransaction(sqliteHandle); diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/AndroidUtilities.java b/TMessagesProj/src/main/java/org/telegram/messenger/AndroidUtilities.java index 2ee0e2a0c2..eaf4fa02b8 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/AndroidUtilities.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/AndroidUtilities.java @@ -59,6 +59,7 @@ import android.text.Layout; import android.text.Selection; import android.text.Spannable; +import android.text.SpannableString; import android.text.SpannableStringBuilder; import android.text.Spanned; import android.text.SpannedString; @@ -135,6 +136,7 @@ import org.telegram.ui.Components.BackgroundGradientDrawable; import org.telegram.ui.Components.Bulletin; import org.telegram.ui.Components.CubicBezierInterpolator; +import org.telegram.ui.Components.EllipsizeSpanAnimator; import org.telegram.ui.Components.ForegroundColorSpanThemable; import org.telegram.ui.Components.ForegroundDetector; import org.telegram.ui.Components.HideViewAfterAnimation; @@ -153,14 +155,17 @@ import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; +import java.io.EOFException; import java.io.File; import java.io.FileInputStream; +import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; +import java.io.RandomAccessFile; import java.lang.ref.WeakReference; import java.lang.reflect.Field; import java.lang.reflect.Method; @@ -181,6 +186,7 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -505,6 +511,16 @@ public void updateDrawState(TextPaint textPaint) { } public static void recycleBitmaps(List bitmapToRecycle) { + if (Build.VERSION.SDK_INT <= 23) { + // cause to crash: + // /system/lib/libskia.so (SkPixelRef::unlockPixels()+3) + // /system/lib/libskia.so (SkBitmap::freePixels()+14) + // /system/lib/libskia.so (SkBitmap::setPixelRef(SkPixelRef*, int, int)+50) + // /system/lib/libhwui.so (android::uirenderer::ResourceCache::recycleLocked(SkBitmap*)+30) + // /system/lib/libhwui.so (android::uirenderer::ResourceCache::recycle(SkBitmap*)+20) + // gc recycle it automatically + return; + } if (bitmapToRecycle != null && !bitmapToRecycle.isEmpty()) { AndroidUtilities.runOnUIThread(() -> Utilities.globalQueue.postRunnable(() -> { for (int i = 0; i < bitmapToRecycle.size(); i++) { @@ -517,7 +533,7 @@ public static void recycleBitmaps(List bitmapToRecycle) { } } } - }), Build.VERSION.SDK_INT <= 23 ? 100 : 36); + }), 36); } } @@ -529,6 +545,60 @@ public static void recycleBitmap(Bitmap image) { recycleBitmaps(Collections.singletonList(image)); } + public static boolean findClickableView(ViewGroup container, float x, float y) { + for (int i = 0; i < container.getChildCount(); i++) { + View child = container.getChildAt(i); + if (child.getVisibility() != View.VISIBLE) { + continue; + } + if (child.isClickable()) { + return true; + } else if (child instanceof ViewGroup && findClickableView((ViewGroup) child, x - child.getX(), y - child.getY())) { + return true; + } + } + return false; + } + + public static void removeFromParent(View child) { + if (child.getParent() != null) { + ((ViewGroup) child.getParent()).removeView(child); + } + } + + public static boolean isFilNotFoundException(Throwable e) { + return e instanceof FileNotFoundException || e instanceof EOFException; + } + + public static File getLogsDir() { +// try { +// if (Environment.MEDIA_MOUNTED.equals(Environment.getExternalStorageState())) { +// File path = ApplicationLoader.applicationContext.getExternalFilesDir(null); +// File dir = new File(path.getAbsolutePath() + "/logs"); +// dir.mkdirs(); +// return dir; +// } +// } catch (Exception e) { +// +// } + try { + File dir = new File(ApplicationLoader.applicationContext.getCacheDir() + "/logs"); + dir.mkdirs(); + return dir; + } catch (Exception e) { + + } + try { + File dir = new File(ApplicationLoader.applicationContext.getFilesDir() + "/logs"); + dir.mkdirs(); + return dir; + } catch (Exception e) { + + } +// ApplicationLoader.appCenterLog(new RuntimeException("can't create logs directory")); + return null; + } + private static class LinkSpec { String url; int start; @@ -572,6 +642,11 @@ private static void gatherLinks(ArrayList links, Spannable s, Pattern if (TextUtils.indexOf(s, '─') >= 0) { s = new SpannableStringBuilder(s.toString().replace('─', ' ')); } + if (!TextUtils.isEmpty(s) && TextUtils.lastIndexOf(s, '_') == s.length() - 1) { + //fix infinity loop regex + SpannableStringBuilder spannableStringBuilder = new SpannableStringBuilder(s.toString()); + s = spannableStringBuilder.replace(s.length() - 1, s.length(), "a"); + } Matcher m = pattern.matcher(s); while (m.find()) { int start = m.start(); @@ -1837,6 +1912,7 @@ public static ArrayList getDataDirs() { } public static ArrayList getRootDirs() { + HashSet pathes = new HashSet<>(); ArrayList result = null; if (Build.VERSION.SDK_INT >= 19) { File[] dirs = ApplicationLoader.applicationContext.getExternalFilesDirs(null); @@ -1851,7 +1927,16 @@ public static ArrayList getRootDirs() { if (result == null) { result = new ArrayList<>(); } - result.add(new File(path.substring(0, idx))); + File file = new File(path.substring(0, idx)); + for (int i = 0; i < result.size(); i++) { + if (result.get(i).getPath().equals(file.getPath())) { + continue; + } + } + if (file != null && !pathes.contains(file.getAbsolutePath())) { + pathes.add(file.getAbsolutePath()); + result.add(file); + } } } } @@ -1859,6 +1944,12 @@ public static ArrayList getRootDirs() { if (result == null) { result = new ArrayList<>(); } + if (result.isEmpty()) { + File dir = Environment.getExternalStorageDirectory(); + if (dir != null && !pathes.contains(dir.getAbsolutePath())) { + result.add(dir); + } + } return result; } @@ -2190,12 +2281,20 @@ public static String obtainLoginPhoneCall(String pattern) { if (!hasCallPermissions) { return null; } + String order; + Bundle selectionArgs; + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { + order = "date DESC"; + } else { + order = "date DESC LIMIT 5"; + } try (Cursor cursor = ApplicationLoader.applicationContext.getContentResolver().query( CallLog.Calls.CONTENT_URI, new String[]{CallLog.Calls.NUMBER, CallLog.Calls.DATE}, CallLog.Calls.TYPE + " IN (" + CallLog.Calls.MISSED_TYPE + "," + CallLog.Calls.INCOMING_TYPE + "," + CallLog.Calls.REJECTED_TYPE + ")", null, - "date DESC LIMIT 5")) { + order + )) { while (cursor.moveToNext()) { String number = cursor.getString(0); long date = cursor.getLong(1); @@ -2571,7 +2670,7 @@ public static void shakeViewSpring(View view, Runnable endCallback) { public static void shakeViewSpring(View view, float shiftDp, Runnable endCallback) { int shift = dp(shiftDp); if (view.getTag(R.id.spring_tag) != null) { - ((SpringAnimation)view.getTag(R.id.spring_tag)).cancel(); + ((SpringAnimation) view.getTag(R.id.spring_tag)).cancel(); } Float wasX = (Float) view.getTag(R.id.spring_was_translation_x_tag); if (wasX != null) { @@ -2923,7 +3022,9 @@ public static String formatFileSize(long size) { } public static String formatFileSize(long size, boolean removeZero) { - if (size < 1024) { + if (size == 0) { + return String.format("%d KB", 0); + } else if (size < 1024) { return String.format("%d B", size); } else if (size < 1024 * 1024) { float value = size / 1024.0f; @@ -2932,7 +3033,7 @@ public static String formatFileSize(long size, boolean removeZero) { } else { return String.format("%.1f KB", value); } - } else if (size < 1024 * 1024 * 1024) { + } else if (size < 1000 * 1024 * 1024) { float value = size / 1024.0f / 1024.0f; if (removeZero && (value - (int) value) * 10 == 0) { return String.format("%d MB", (int) value); @@ -3333,24 +3434,25 @@ public static boolean openForView(TLRPC.Document document, boolean forceCache, A return openForView(f, fileName, document.mime_type, activity, null); } - public static SpannableStringBuilder formatSpannableSimple(String format, CharSequence... cs) { + public static SpannableStringBuilder formatSpannableSimple(CharSequence format, CharSequence... cs) { return formatSpannable(format, i -> "%s", cs); } - public static SpannableStringBuilder formatSpannable(String format, CharSequence... cs) { - if (format.contains("%s")) + public static SpannableStringBuilder formatSpannable(CharSequence format, CharSequence... cs) { + if (format.toString().contains("%s")) return formatSpannableSimple(format, cs); return formatSpannable(format, i -> "%" + (i + 1) + "$s", cs); } - public static SpannableStringBuilder formatSpannable(String format, GenericProvider keysProvider, CharSequence... cs) { - SpannableStringBuilder stringBuilder = new SpannableStringBuilder(format); + public static SpannableStringBuilder formatSpannable(CharSequence format, GenericProvider keysProvider, CharSequence... cs) { + String str = format.toString(); + SpannableStringBuilder stringBuilder = SpannableStringBuilder.valueOf(format); for (int i = 0; i < cs.length; i++) { String key = keysProvider.provide(i); - int j = format.indexOf(key); + int j = str.indexOf(key); if (j != -1) { stringBuilder.replace(j, j + key.length(), cs[i]); - format = format.substring(0, j) + cs[i].toString() + format.substring(j + key.length()); + str = str.substring(0, j) + cs[i].toString() + str.substring(j + key.length()); } } return stringBuilder; @@ -3613,30 +3715,63 @@ public static void showProxyAlert(Context activity, final String address, final } else if (a == 4) { text = password; detail = LocaleController.getString("UseProxyPassword", R.string.UseProxyPassword); - } else { - text = LocaleController.getString("Checking", R.string.Checking); - detail = LocaleController.getString("Checking", R.string.Checking); + } else if (a == 5) { + text = LocaleController.getString(R.string.ProxyBottomSheetChecking); + detail = LocaleController.getString(R.string.ProxyStatus); } if (TextUtils.isEmpty(text)) { continue; } - TextDetailSettingsCell cell = new TextDetailSettingsCell(activity); - cell.setTextAndValue(text, detail, true); + AtomicReference ellRef = new AtomicReference<>(); + TextDetailSettingsCell cell = new TextDetailSettingsCell(activity) { + @Override + protected void onAttachedToWindow() { + super.onAttachedToWindow(); + if (ellRef.get() != null) { + ellRef.get().onAttachedToWindow(); + } + } + + @Override + protected void onDetachedFromWindow() { + super.onDetachedFromWindow(); + if (ellRef.get() != null) { + ellRef.get().onDetachedFromWindow(); + } + } + }; + if (a == 5) { + SpannableStringBuilder spannableStringBuilder = SpannableStringBuilder.valueOf(text); + EllipsizeSpanAnimator ellipsizeAnimator = new EllipsizeSpanAnimator(cell); + ellipsizeAnimator.addView(cell); + SpannableString ell = new SpannableString("..."); + ellipsizeAnimator.wrap(ell, 0); + spannableStringBuilder.append(ell); + ellRef.set(ellipsizeAnimator); + + cell.setTextAndValue(spannableStringBuilder, detail, true); + } else { + cell.setTextAndValue(text, detail, true); + } cell.getTextView().setTextColor(Theme.getColor(Theme.key_dialogTextBlack)); cell.getValueTextView().setTextColor(Theme.getColor(Theme.key_dialogTextGray3)); linearLayout.addView(cell, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT)); + if (a == 5) { - ConnectionsManager.getInstance(UserConfig.selectedAccount).checkProxy(address, Utilities.parseInt(port), user, password, secret, time -> AndroidUtilities.runOnUIThread(() -> { - String colorKey; - if (time != -1) { - cell.setTextAndValue(LocaleController.getString("Available", R.string.Available), LocaleController.formatString("Ping", R.string.Ping, time), true); - colorKey = Theme.key_windowBackgroundWhiteGreenText; - } else { - cell.setTextAndValue(LocaleController.getString("Unavailable", R.string.Unavailable), LocaleController.getString("Unavailable", R.string.Unavailable), true); - colorKey = Theme.key_windowBackgroundWhiteRedText4; - } - cell.getValueTextView().setTextColor(Theme.getColor(colorKey)); - })); + try { + ConnectionsManager.getInstance(UserConfig.selectedAccount).checkProxy(address, Integer.parseInt(port), user, password, secret, time -> AndroidUtilities.runOnUIThread(() -> { + if (time == -1) { + cell.getTextView().setText(LocaleController.getString(R.string.Unavailable)); + cell.getTextView().setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteRedText4)); + } else { + cell.getTextView().setText(LocaleController.getString(R.string.Available) + ", " + LocaleController.formatString(R.string.Ping, time)); + cell.getTextView().setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteGreenText)); + } + })); + } catch (NumberFormatException ignored) { + cell.getTextView().setText(LocaleController.getString(R.string.Unavailable)); + cell.getTextView().setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteRedText4)); + } } } @@ -4285,9 +4420,15 @@ public void run(long time) { if (activity instanceof LaunchActivity) { INavigationLayout layout = ((LaunchActivity) activity).getActionBarLayout(); BaseFragment fragment = layout.getLastFragment(); + boolean bulletinSent = false; if (fragment instanceof ChatActivity) { - ((ChatActivity) fragment).getUndoView().showWithAction(0, UndoView.ACTION_PROXY_ADDED, null); - } else { + UndoView undoView = ((ChatActivity) fragment).getUndoView(); + if (undoView != null) { + undoView.showWithAction(0, UndoView.ACTION_PROXY_ADDED, null); + bulletinSent = true; + } + } + if (!bulletinSent) { NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.showBulletin, Bulletin.TYPE_SUCCESS, LocaleController.getString(R.string.ProxyAddedSuccess)); } } else { @@ -4667,45 +4808,6 @@ public static void setFlagSecure(BaseFragment parentFragment, boolean set) { } } - private static final HashMap> flagSecureReasons = new HashMap<>(); - - // Sets FLAG_SECURE to true, until it gets unregistered (when returned callback is run) - // Useful for having multiple reasons to have this flag on. - public static Runnable registerFlagSecure(Window window) { - if (NekoXConfig.disableFlagSecure) return () -> {}; - final long reasonId = (long) (Math.random() * 999999999); - final ArrayList reasonIds; - if (flagSecureReasons.containsKey(window)) { - reasonIds = flagSecureReasons.get(window); - } else { - reasonIds = new ArrayList<>(); - flagSecureReasons.put(window, reasonIds); - } - reasonIds.add(reasonId); - updateFlagSecure(window); - return () -> { - reasonIds.remove(reasonId); - updateFlagSecure(window); - }; - } - - private static void updateFlagSecure(Window window) { - if (Build.VERSION.SDK_INT >= 23) { - if (window == null) { - return; - } - final boolean value = flagSecureReasons.containsKey(window) && flagSecureReasons.get(window).size() > 0; - try { - if (value && !NekoXConfig.disableFlagSecure) { - window.addFlags(WindowManager.LayoutParams.FLAG_SECURE); - } else { - window.clearFlags(WindowManager.LayoutParams.FLAG_SECURE); - } - } catch (Exception ignore) { - } - } - } - public static void openSharing(BaseFragment fragment, String url) { if (fragment == null || fragment.getParentActivity() == null) { return; @@ -4792,20 +4894,28 @@ public static void setLightStatusBar(Window window, boolean enable, boolean forc flags |= View.SYSTEM_UI_FLAG_LIGHT_STATUS_BAR; decorView.setSystemUiVisibility(flags); } + int statusBarColor; if (!SharedConfig.noStatusBar && !forceTransparentStatusbar) { - window.setStatusBarColor(LIGHT_STATUS_BAR_OVERLAY); + statusBarColor = LIGHT_STATUS_BAR_OVERLAY; } else { - window.setStatusBarColor(Color.TRANSPARENT); + statusBarColor = Color.TRANSPARENT; + } + if (window.getStatusBarColor() != statusBarColor) { + window.setStatusBarColor(statusBarColor); } } else { if ((flags & View.SYSTEM_UI_FLAG_LIGHT_STATUS_BAR) != 0) { flags &= ~View.SYSTEM_UI_FLAG_LIGHT_STATUS_BAR; decorView.setSystemUiVisibility(flags); } + int statusBarColor; if (!SharedConfig.noStatusBar && !forceTransparentStatusbar) { - window.setStatusBarColor(DARK_STATUS_BAR_OVERLAY); + statusBarColor = DARK_STATUS_BAR_OVERLAY; } else { - window.setStatusBarColor(Color.TRANSPARENT); + statusBarColor = Color.TRANSPARENT; + } + if (window.getStatusBarColor() != statusBarColor) { + window.setStatusBarColor(statusBarColor); } } } @@ -5238,6 +5348,13 @@ public static boolean isENOSPC(Exception e) { ); } + public static boolean isEROFS(Exception e) { + return (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP && e instanceof IOException && + (e.getCause() instanceof ErrnoException && ((ErrnoException) e.getCause()).errno == OsConstants.EROFS) || + (e.getMessage() != null && e.getMessage().toLowerCase().contains("read-only file system")) + ); + } + public static CharSequence replaceCharSequence(String what, CharSequence from, CharSequence obj) { SpannableStringBuilder spannableStringBuilder; if (from instanceof SpannableStringBuilder) { @@ -5252,18 +5369,37 @@ public static CharSequence replaceCharSequence(String what, CharSequence from, C return spannableStringBuilder; } + public static CharSequence replaceMultipleCharSequence(String what, CharSequence from, CharSequence obj) { + SpannableStringBuilder spannableStringBuilder; + if (from instanceof SpannableStringBuilder) { + spannableStringBuilder = (SpannableStringBuilder) from; + } else { + spannableStringBuilder = new SpannableStringBuilder(from); + } + int index = TextUtils.indexOf(from, what); + while (index >= 0) { + spannableStringBuilder.replace(index, index + what.length(), obj); + index = TextUtils.indexOf(spannableStringBuilder, what); + } + return spannableStringBuilder; + } + public static Bitmap makeBlurBitmap(View view) { + return makeBlurBitmap(view, 6f, 7); + } + + public static Bitmap makeBlurBitmap(View view, float downscale, int maxRadius) { if (view == null) { return null; } - int w = (int) (view.getWidth() / 6.0f); - int h = (int) (view.getHeight() / 6.0f); + int w = (int) (view.getWidth() / downscale); + int h = (int) (view.getHeight() / downscale); Bitmap bitmap = Bitmap.createBitmap(w, h, Bitmap.Config.ARGB_8888); Canvas canvas = new Canvas(bitmap); - canvas.scale(1.0f / 6.0f, 1.0f / 6.0f); + canvas.scale(1.0f / downscale, 1.0f / downscale); canvas.drawColor(Theme.getColor(Theme.key_windowBackgroundWhite)); view.draw(canvas); - Utilities.stackBlurBitmap(bitmap, Math.max(7, Math.max(w, h) / 180)); + Utilities.stackBlurBitmap(bitmap, Math.max(maxRadius, Math.max(w, h) / 180)); return bitmap; } @@ -5282,7 +5418,7 @@ public static void makeGlobalBlurBitmap(Utilities.Callback onBitmapDone, Method getViewRootNames = wmgClass.getMethod("getViewRootNames"); Method getRootView = wmgClass.getMethod("getRootView", String.class); - String[] rootViewNames = (String[])getViewRootNames.invoke(wmgInstance, (Object[])null); + String[] rootViewNames = (String[]) getViewRootNames.invoke(wmgInstance, (Object[]) null); views = new ArrayList<>(); for (String viewName : rootViewNames) { @@ -5352,7 +5488,7 @@ public static void makeGlobalBlurBitmap(Utilities.Callback onBitmapDone, onBitmapDone.run(null); }); } - // }); + // }); } // rounds percents to be exact 100% in sum @@ -5396,4 +5532,86 @@ public static int[] roundPercents(float[] percents, int[] output) { return output; } + + public static boolean isRTL(CharSequence text) { + if (text == null || text.length() <= 0) { + return false; + } + char c; + for (int i = 0; i < text.length(); ++i) { + c = text.charAt(i); + if (c >= 0x590 && c <= 0x6ff) { + return true; + } + } + return false; + } + + private static Pattern uriParse; + + private static Pattern getURIParsePattern() { + if (uriParse == null) { + uriParse = Pattern.compile("^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\\?([^#]*))?(#(.*))?"); // RFC 3986 B + } + return uriParse; + } + + public static String getHostAuthority(String uri) { + if (uri == null) { + return null; + } + // CVE-2017-13274 + Matcher matcher = getURIParsePattern().matcher(uri); + if (matcher.matches()) { + String authority = matcher.group(4); + if (authority != null) { + authority = authority.toLowerCase(); + } + return authority; + } + return null; + } + + public static String getHostAuthority(Uri uri) { + if (uri == null) { + return null; + } + return getHostAuthority(uri.toString()); + } + + public static boolean intersect1d(int x1, int x2, int y1, int y2) { + return Math.max(x1, x2) >= Math.min(y1, y2) && Math.max(y1, y2) >= Math.min(x1, x2); + } + + public static String getSysInfoString(String path) { + RandomAccessFile reader = null; + try { + reader = new RandomAccessFile(path, "r"); + String line = reader.readLine(); + if (line != null) { + return line; + } + } catch (Exception ignore) { + + } finally { + if (reader != null) { + try { + reader.close(); + } catch (Exception ignore) { + } + } + } + return null; + } + + public static Long getSysInfoLong(String path) { + String line = getSysInfoString(path); + if (line != null) { + try { + return Utilities.parseLong(line); + } catch (Exception e) { + } + } + return null; + } } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/AnimatedFileDrawableStream.java b/TMessagesProj/src/main/java/org/telegram/messenger/AnimatedFileDrawableStream.java index 1e7173924e..55c2fa5641 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/AnimatedFileDrawableStream.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/AnimatedFileDrawableStream.java @@ -19,14 +19,19 @@ public class AnimatedFileDrawableStream implements FileLoadOperationStream { private boolean preview; private boolean finishedLoadingFile; private String finishedFilePath; + private int loadingPriority; - public AnimatedFileDrawableStream(TLRPC.Document d, ImageLocation l, Object p, int a, boolean prev) { + private int debugCanceledCount; + private boolean debugReportSend; + + public AnimatedFileDrawableStream(TLRPC.Document d, ImageLocation l, Object p, int a, boolean prev, int loadingPriority) { document = d; location = l; parentObject = p; currentAccount = a; preview = prev; - loadOperation = FileLoader.getInstance(currentAccount).loadStreamFile(this, document, location, parentObject, 0, preview); + this.loadingPriority = loadingPriority; + loadOperation = FileLoader.getInstance(currentAccount).loadStreamFile(this, document, location, parentObject, 0, preview, loadingPriority); } public boolean isFinishedLoadingFile() { @@ -40,6 +45,15 @@ public String getFinishedFilePath() { public int read(int offset, int readLength) { synchronized (sync) { if (canceled) { + debugCanceledCount++; + if (!debugReportSend && debugCanceledCount > 100) { + debugReportSend = true; + if (BuildVars.DEBUG_PRIVATE_VERSION) { + throw new RuntimeException("infinity stream reading!!!"); + } else { + FileLog.e(new RuntimeException("infinity stream reading!!!")); + } + } return 0; } } @@ -56,12 +70,18 @@ public int read(int offset, int readLength) { finishedFilePath = loadOperation.getCacheFileFinal().getAbsolutePath(); } if (availableLength == 0) { + synchronized (sync) { + if (canceled) { + cancelLoadingInternal(); + return 0; + } + } if (loadOperation.isPaused() || lastOffset != offset || preview) { - FileLoader.getInstance(currentAccount).loadStreamFile(this, document, location, parentObject, offset, preview); + FileLoader.getInstance(currentAccount).loadStreamFile(this, document, location, parentObject, offset, preview, loadingPriority); } synchronized (sync) { if (canceled) { - FileLoader.getInstance(currentAccount).cancelLoadFile(document); + cancelLoadingInternal(); return 0; } countDownLatch = new CountDownLatch(1); @@ -87,6 +107,9 @@ public void cancel() { } public void cancel(boolean removeLoading) { + if (canceled) { + return; + } synchronized (sync) { if (countDownLatch != null) { countDownLatch.countDown(); @@ -94,10 +117,20 @@ public void cancel(boolean removeLoading) { FileLoader.getInstance(currentAccount).removeLoadingVideo(document, false, true); } } + if (removeLoading) { + cancelLoadingInternal(); + } canceled = true; } } + private void cancelLoadingInternal() { + FileLoader.getInstance(currentAccount).cancelLoadFile(document); + if (location != null) { + FileLoader.getInstance(currentAccount).cancelLoadFile(location.location, "mp4"); + } + } + public void reset() { synchronized (sync) { canceled = false; diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/ApplicationLoader.java b/TMessagesProj/src/main/java/org/telegram/messenger/ApplicationLoader.java index fbef02939b..49e9861ab1 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/ApplicationLoader.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/ApplicationLoader.java @@ -324,6 +324,7 @@ public void onActivityStarted(Activity activity) { org.osmdroid.config.Configuration.getInstance().setOsmdroidBasePath(new File(ApplicationLoader.applicationContext.getCacheDir(), "osmdroid")); LauncherIconController.tryFixLauncherIconIfNeeded(); + ProxyRotationController.init(); } // Local Push Service, TFoss implementation diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/AuthTokensHelper.java b/TMessagesProj/src/main/java/org/telegram/messenger/AuthTokensHelper.java new file mode 100644 index 0000000000..045670cba4 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/messenger/AuthTokensHelper.java @@ -0,0 +1,119 @@ +package org.telegram.messenger; + +import android.content.Context; +import android.content.SharedPreferences; + +import com.google.android.exoplayer2.util.Log; + +import org.telegram.tgnet.SerializedData; +import org.telegram.tgnet.TLRPC; + +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; + +public class AuthTokensHelper { + + public static ArrayList getSavedLogOutTokens() { + SharedPreferences preferences = ApplicationLoader.applicationContext.getSharedPreferences("saved_tokens", Context.MODE_PRIVATE); + int count = preferences.getInt("count", 0); + + if (count == 0) { + return null; + } + + ArrayList tokens = new ArrayList<>(); + for (int i = 0; i < count; i++) { + String value = preferences.getString("log_out_token_" + i, ""); + SerializedData serializedData = new SerializedData(Utilities.hexToBytes(value)); + TLRPC.TL_auth_loggedOut token = TLRPC.TL_auth_loggedOut.TLdeserialize(serializedData, serializedData.readInt32(true), true); + if (token != null) { + tokens.add(token); + } + } + + return tokens; + } + + public static void saveLogOutTokens(ArrayList tokens) { + SharedPreferences preferences = ApplicationLoader.applicationContext.getSharedPreferences("saved_tokens", Context.MODE_PRIVATE); + ArrayList activeTokens = new ArrayList<>(); + preferences.edit().clear().apply(); + int date = (int) (System.currentTimeMillis() / 1000L); + for (int i = 0; i < Math.min(20, tokens.size()); i++) { + activeTokens.add(tokens.get(i)); + } + if (activeTokens.size() > 0) { + SharedPreferences.Editor editor = preferences.edit(); + editor.putInt("count", activeTokens.size()); + for (int i = 0; i < activeTokens.size(); i++) { + SerializedData data = new SerializedData(activeTokens.get(i).getObjectSize()); + activeTokens.get(i).serializeToStream(data); + editor.putString("log_out_token_" + i, Utilities.bytesToHex(data.toByteArray())); + } + editor.apply(); + // BackupAgent.requestBackup(ApplicationLoader.applicationContext); + } + } + + public static ArrayList getSavedLogInTokens() { + SharedPreferences preferences = ApplicationLoader.applicationContext.getSharedPreferences("saved_tokens_login", Context.MODE_PRIVATE); + int count = preferences.getInt("count", 0); + + if (count == 0) { + return null; + } + + ArrayList tokens = new ArrayList<>(); + for (int i = 0; i < count; i++) { + String value = preferences.getString("log_in_token_" + i, ""); + SerializedData serializedData = new SerializedData(Utilities.hexToBytes(value)); + TLRPC.auth_Authorization token = TLRPC.auth_Authorization.TLdeserialize(serializedData, serializedData.readInt32(true), true); + if (token instanceof TLRPC.TL_auth_authorization) { + tokens.add((TLRPC.TL_auth_authorization) token); + } + } + + return tokens; + } + + public static void saveLogInToken(TLRPC.TL_auth_authorization token) { + if (BuildVars.DEBUG_VERSION) { + FileLog.d("saveLogInToken " + new String(token.future_auth_token, StandardCharsets.UTF_8)); + } + ArrayList tokens = getSavedLogInTokens(); + if (tokens == null) { + tokens = new ArrayList<>(); + } + tokens.add(0, token); + saveLogInTokens(tokens); + } + + private static void saveLogInTokens(ArrayList tokens) { + SharedPreferences preferences = ApplicationLoader.applicationContext.getSharedPreferences("saved_tokens_login", Context.MODE_PRIVATE); + ArrayList activeTokens = new ArrayList<>(); + preferences.edit().clear().apply(); + for (int i = 0; i < Math.min(20, tokens.size()); i++) { + activeTokens.add(tokens.get(i)); + } + if (activeTokens.size() > 0) { + SharedPreferences.Editor editor = preferences.edit(); + editor.putInt("count", activeTokens.size()); + for (int i = 0; i < activeTokens.size(); i++) { + SerializedData data = new SerializedData(activeTokens.get(i).getObjectSize()); + activeTokens.get(i).serializeToStream(data); + editor.putString("log_in_token_" + i, Utilities.bytesToHex(data.toByteArray())); + } + editor.apply(); + BackupAgent.requestBackup(ApplicationLoader.applicationContext); + } + } + + public static void addLogOutToken(TLRPC.TL_auth_loggedOut response) { + SharedPreferences preferences = ApplicationLoader.applicationContext.getSharedPreferences("saved_tokens", Context.MODE_PRIVATE); + int count = preferences.getInt("count", 0); + SerializedData data = new SerializedData(response.getObjectSize()); + response.serializeToStream(data); + preferences.edit().putString("log_out_token_" + count, Utilities.bytesToHex(data.toByteArray())).putInt("count", count + 1).apply(); + BackupAgent.requestBackup(ApplicationLoader.applicationContext); + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/AutoDeleteMediaTask.java b/TMessagesProj/src/main/java/org/telegram/messenger/AutoDeleteMediaTask.java new file mode 100644 index 0000000000..3470caf374 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/messenger/AutoDeleteMediaTask.java @@ -0,0 +1,264 @@ +package org.telegram.messenger; + +import android.util.SparseArray; + +import java.io.File; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; + +public class AutoDeleteMediaTask { + + public static Set usingFilePaths = Collections.newSetFromMap(new ConcurrentHashMap<>()); + + public static void run() { + int time = (int) (System.currentTimeMillis() / 1000); + if (!BuildVars.DEBUG_PRIVATE_VERSION && Math.abs(time - SharedConfig.lastKeepMediaCheckTime) < 24 * 60 * 60) { + return; + } + SharedConfig.lastKeepMediaCheckTime = time; + File cacheDir = FileLoader.checkDirectory(FileLoader.MEDIA_DIR_CACHE); + + Utilities.cacheClearQueue.postRunnable(() -> { + long startTime = System.currentTimeMillis(); + if (BuildVars.LOGS_ENABLED) { + FileLog.d("checkKeepMedia start task"); + } + boolean hasExceptions = false; + ArrayList cacheByChatsControllers = new ArrayList<>(); + for (int account : SharedConfig.activeAccounts) { + if (UserConfig.getInstance(account).isClientActivated()) { + CacheByChatsController cacheByChatsController = UserConfig.getInstance(account).getMessagesController().getCacheByChatsController(); + cacheByChatsControllers.add(cacheByChatsController); + if (cacheByChatsController.getKeepMediaExceptionsByDialogs().size() > 0) { + hasExceptions = true; + } + } + } + + int[] keepMediaByTypes = new int[3]; + boolean allKeepMediaTypesForever = true; + long keepMediaMinSeconds = Long.MAX_VALUE; + for (int i = 0; i < 3; i++) { + keepMediaByTypes[i] = SharedConfig.getPreferences().getInt("keep_media_type_" + i, CacheByChatsController.getDefault(i)); + if (keepMediaByTypes[i] != CacheByChatsController.KEEP_MEDIA_FOREVER) { + allKeepMediaTypesForever = false; + } + long days = CacheByChatsController.getDaysInSeconds(keepMediaByTypes[i]); + if (days < keepMediaMinSeconds) { + keepMediaMinSeconds = days; + } + } + if (hasExceptions) { + allKeepMediaTypesForever = false; + } + int autoDeletedFiles = 0; + long autoDeletedFilesSize = 0; + + int deletedFilesBySize = 0; + long deletedFilesBySizeSize = 0; + int skippedFiles = 0; + + if (!allKeepMediaTypesForever) { + //long currentTime = time - 60 * 60 * 24 * days; + final SparseArray paths = ImageLoader.getInstance().createMediaPaths(); + for (int a = 0; a < paths.size(); a++) { + boolean isCacheDir = false; + if (paths.keyAt(a) == FileLoader.MEDIA_DIR_CACHE) { + isCacheDir = true; + } + File dir = paths.valueAt(a); + try { + File[] files = dir.listFiles(); + ArrayList keepMediaFiles = new ArrayList<>(); + if (files != null) { + for (int i = 0; i < files.length; i++) { + if (files[i].isDirectory() || usingFilePaths.contains(files[i].getAbsolutePath())) { + continue; + } + keepMediaFiles.add(new CacheByChatsController.KeepMediaFile(files[i])); + } + } + for (int i = 0; i < cacheByChatsControllers.size(); i++) { + cacheByChatsControllers.get(i).lookupFiles(keepMediaFiles); + } + for (int i = 0; i < keepMediaFiles.size(); i++) { + CacheByChatsController.KeepMediaFile file = keepMediaFiles.get(i); + if (file.keepMedia == CacheByChatsController.KEEP_MEDIA_FOREVER) { + continue; + } + long seconds; + if (file.keepMedia >= 0) { + seconds = CacheByChatsController.getDaysInSeconds(file.keepMedia); + } else if (file.dialogType >= 0) { + seconds = CacheByChatsController.getDaysInSeconds(keepMediaByTypes[file.dialogType]); + } else if (isCacheDir) { + continue; + } else { + seconds = keepMediaMinSeconds; + } + if (seconds == Long.MAX_VALUE) { + continue; + } + long lastUsageTime = Utilities.getLastUsageFileTime(file.file.getAbsolutePath()); + long timeLocal = time - seconds; + boolean needDelete = lastUsageTime > 316000000 && lastUsageTime < timeLocal && !usingFilePaths.contains(file.file.getPath()); + if (needDelete) { + try { + if (BuildVars.LOGS_ENABLED) { + autoDeletedFiles++; + autoDeletedFilesSize += file.file.length(); + } + if (BuildVars.DEBUG_PRIVATE_VERSION) { + FileLog.d("delete file " + file.file.getPath() + " last_usage_time=" + lastUsageTime + " time_local=" + timeLocal); + } + file.file.delete(); + } catch (Exception exception) { + FileLog.e(exception); + } + } + } + } catch (Throwable e) { + FileLog.e(e); + } + } + } + + int maxCacheGb = SharedConfig.getPreferences().getInt("cache_limit", Integer.MAX_VALUE); + if (maxCacheGb != Integer.MAX_VALUE) { + long maxCacheSize; + if (maxCacheGb == 1) { + maxCacheSize = 1024L * 1024L * 300L; + } else { + maxCacheSize = maxCacheGb * 1024L * 1024L * 1000L; + } + final SparseArray paths = ImageLoader.getInstance().createMediaPaths(); + long totalSize = 0; + for (int a = 0; a < paths.size(); a++) { + totalSize += Utilities.getDirSize(paths.valueAt(a).getAbsolutePath(), 0, true); + } + if (totalSize > maxCacheSize) { + ArrayList allFiles = new ArrayList<>(); + for (int a = 0; a < paths.size(); a++) { + File dir = paths.valueAt(a); + fillFilesRecursive(dir, allFiles); + } + for (int i = 0; i < cacheByChatsControllers.size(); i++) { + cacheByChatsControllers.get(i).lookupFiles(allFiles); + } + Collections.sort(allFiles, (o1, o2) -> { + if (o2.lastUsageDate > o1.lastUsageDate) { + return -1; + } else if (o2.lastUsageDate < o1.lastUsageDate) { + return 1; + } + return 0; + }); + + for (int i = 0; i < allFiles.size(); i++) { + if (allFiles.get(i).keepMedia == CacheByChatsController.KEEP_MEDIA_FOREVER) { + continue; + } + if (allFiles.get(i).lastUsageDate <= 0) { + skippedFiles++; + continue; + } + long size = allFiles.get(i).file.length(); + totalSize -= size; + + try { + deletedFilesBySize++; + deletedFilesBySizeSize += size; + allFiles.get(i).file.delete(); + } catch (Exception e) { + + } + + if (totalSize < maxCacheSize) { + break; + } + } + } + } + + File stickersPath = new File(cacheDir, "acache"); + if (stickersPath.exists()) { + long currentTime = time - 60 * 60 * 24; + try { + Utilities.clearDir(stickersPath.getAbsolutePath(), 0, currentTime, false); + } catch (Throwable e) { + FileLog.e(e); + } + } + MessagesController.getGlobalMainSettings().edit() + .putInt("lastKeepMediaCheckTime", SharedConfig.lastKeepMediaCheckTime) + .apply(); + + if (BuildVars.LOGS_ENABLED) { + FileLog.d("checkKeepMedia task end time " + (System.currentTimeMillis() - startTime) + " auto deleted info: files " + autoDeletedFiles + " size " + AndroidUtilities.formatFileSize(autoDeletedFilesSize) + " deleted by size limit info: files " + deletedFilesBySize + " size " + AndroidUtilities.formatFileSize(deletedFilesBySizeSize) + " unknownTimeFiles " + skippedFiles); + } + }); + } + + private static void fillFilesRecursive(final File fromFolder, ArrayList fileInfoList) { + if (fromFolder == null) { + return; + } + File[] files = fromFolder.listFiles(); + if (files == null) { + return; + } + for (final File fileEntry : files) { + if (fileEntry.isDirectory()) { + fillFilesRecursive(fileEntry, fileInfoList); + } else { + if (fileEntry.getName().equals(".nomedia")) { + continue; + } + if (usingFilePaths.contains(fileEntry.getAbsolutePath())) { + continue; + } + fileInfoList.add(new FileInfoInternal(fileEntry)); + } + } + } + + private static class FileInfoInternal extends CacheByChatsController.KeepMediaFile { + final long lastUsageDate; + + private FileInfoInternal(File file) { + super(file); + this.lastUsageDate = Utilities.getLastUsageFileTime(file.getAbsolutePath()); + } + } + + public static void lockFile(File file) { + if (file == null) { + return; + } + lockFile(file.getAbsolutePath()); + } + + public static void unlockFile(File file) { + if (file == null) { + return; + } + unlockFile(file.getAbsolutePath()); + } + + public static void lockFile(String file) { + if (file == null) { + return; + } + usingFilePaths.add(file); + } + + public static void unlockFile(String file) { + if (file == null) { + return; + } + usingFilePaths.remove(file); + } + +} diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/BackupAgent.java b/TMessagesProj/src/main/java/org/telegram/messenger/BackupAgent.java new file mode 100644 index 0000000000..78ff6ee838 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/messenger/BackupAgent.java @@ -0,0 +1,35 @@ +package org.telegram.messenger; + +import android.app.backup.BackupAgentHelper; +import android.app.backup.BackupDataInput; +import android.app.backup.BackupDataOutput; +import android.app.backup.BackupManager; +import android.app.backup.FullBackupDataOutput; +import android.app.backup.RestoreObserver; +import android.app.backup.SharedPreferencesBackupHelper; +import android.content.Context; +import android.os.ParcelFileDescriptor; +import android.util.Log; + +import org.telegram.tgnet.TLRPC; + +import java.io.IOException; +import java.util.ArrayList; + +public class BackupAgent extends BackupAgentHelper { + + private static BackupManager backupManager; + + @Override + public void onCreate() { + SharedPreferencesBackupHelper helper = new SharedPreferencesBackupHelper(this, "saved_tokens", "saved_tokens_login"); + addHelper("prefs", helper); + } + + public static void requestBackup(Context context) { + if (backupManager == null) { + backupManager = new BackupManager(context); + } + backupManager.dataChanged(); + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/BillingController.java b/TMessagesProj/src/main/java/org/telegram/messenger/BillingController.java index eb13d69992..36d3b31a89 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/BillingController.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/BillingController.java @@ -32,6 +32,9 @@ public class BillingController { private static BillingController instance; + private String lastPremiumTransaction = ""; + private String lastPremiumToken = ""; + public static BillingController getInstance() { if (instance == null) { instance = new BillingController(ApplicationLoader.applicationContext); @@ -43,6 +46,14 @@ private BillingController(Context ctx) { } + public String getLastPremiumTransaction() { + return lastPremiumTransaction; + } + + public String getLastPremiumToken() { + return lastPremiumToken; + } + public String formatCurrency(long amount, String currency) { return formatCurrency(amount, currency, getCurrencyExp(currency)); } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/BuildVars.java b/TMessagesProj/src/main/java/org/telegram/messenger/BuildVars.java index 6185d8e0a4..f555cb699b 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/BuildVars.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/BuildVars.java @@ -24,6 +24,8 @@ public class BuildVars { public static boolean USE_CLOUD_STRINGS = true; public static boolean NO_SCOPED_STORAGE = Build.VERSION.SDK_INT <= 29; + // SafetyNet key for Google Identity SDK, set it to empty to disable + public static String SAFETYNET_KEY = "AIzaSyDqt8P-7F7CPCseMkOiVRgb1LY8RN1bvH8"; public static int BUILD_VERSION; // generated public static String BUILD_VERSION_STRING; diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/CacheByChatsController.java b/TMessagesProj/src/main/java/org/telegram/messenger/CacheByChatsController.java index 537e1456a5..93a8c7a39e 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/CacheByChatsController.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/CacheByChatsController.java @@ -41,7 +41,7 @@ public static int getDefault(int type) { } else if (type == KEEP_MEDIA_TYPE_GROUP) { return KEEP_MEDIA_ONE_MONTH; } else if (type == KEEP_MEDIA_TYPE_CHANNEL) { - return KEEP_MEDIA_ONE_MONTH; + return KEEP_MEDIA_ONE_WEEK; } return SharedConfig.keepMedia; } @@ -61,16 +61,16 @@ public static String getKeepMediaString(int keepMedia) { public static long getDaysInSeconds(int keepMedia) { long seconds; - if (keepMedia == CacheByChatsController.KEEP_MEDIA_FOREVER) { - seconds = Long.MAX_VALUE; - } else if (keepMedia == CacheByChatsController.KEEP_MEDIA_ONE_WEEK) { + if (keepMedia == CacheByChatsController.KEEP_MEDIA_ONE_WEEK) { seconds = 60L * 60L * 24L * 7L; } else if (keepMedia == CacheByChatsController.KEEP_MEDIA_ONE_MONTH) { seconds = 60L * 60L * 24L * 30L; } else if (keepMedia == CacheByChatsController.KEEP_MEDIA_ONE_DAY) { seconds = 60L * 60L * 24L; - } else { //one min + } else if (keepMedia == CacheByChatsController.KEEP_MEDIA_ONE_MINUTE && BuildVars.DEBUG_PRIVATE_VERSION) { //one min seconds = 60L; + } else { + seconds = Long.MAX_VALUE; } return seconds; } @@ -125,12 +125,12 @@ public void setKeepMedia(int type, int keepMedia) { SharedConfig.getPreferences().edit().putInt("keep_media_type_" + type, keepMedia).apply(); } - public void lookupFiles(ArrayList keepMediaFiles) { + public void lookupFiles(ArrayList keepMediaFiles) { LongSparseArray> filesByDialogId = FileLoader.getInstance(currentAccount).getFileDatabase().lookupFiles(keepMediaFiles); LongSparseArray exceptionsByType = getKeepMediaExceptionsByDialogs(); for (int i = 0; i < filesByDialogId.size(); i++) { long dialogId = filesByDialogId.keyAt(i); - ArrayList files = filesByDialogId.valueAt(i); + ArrayList files = filesByDialogId.valueAt(i); int type; if (dialogId >= 0) { type = KEEP_MEDIA_TYPE_USER; diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/CallReceiver.java b/TMessagesProj/src/main/java/org/telegram/messenger/CallReceiver.java index eed5adaf93..1662b96327 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/CallReceiver.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/CallReceiver.java @@ -23,8 +23,39 @@ public void onReceive(final Context context, Intent intent) { String phoneState = intent.getStringExtra(TelephonyManager.EXTRA_STATE); if (TelephonyManager.EXTRA_STATE_RINGING.equals(phoneState)) { String phoneNumber = intent.getStringExtra(TelephonyManager.EXTRA_INCOMING_NUMBER); - NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.didReceiveCall, PhoneFormat.stripExceptNumbers(phoneNumber)); + String phone = PhoneFormat.stripExceptNumbers(phoneNumber); + SharedConfig.getPreferences().edit() + .putString("last_call_phone_number", phone) + .putLong("last_call_time", System.currentTimeMillis()) + .apply(); + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.didReceiveCall, phone); } } } + + public static String getLastReceivedCall() { + String phone = SharedConfig.getPreferences().getString("last_call_phone_number", null); + if (phone == null) { + return null; + } + long lastTime = SharedConfig.getPreferences().getLong("last_call_time", 0); + if (System.currentTimeMillis() - lastTime < 1000 * 60 * 60 * 15) { + return phone; + } + return null; + } + + public static void checkLastReceivedCall() { + String lastCall = getLastReceivedCall(); + if (lastCall != null) { + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.didReceiveCall, lastCall); + } + } + + public static void clearLastCall() { + SharedConfig.getPreferences().edit() + .remove("last_call_phone_number") + .remove("last_call_time") + .apply(); + } } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/ChatMessagesMetadataController.java b/TMessagesProj/src/main/java/org/telegram/messenger/ChatMessagesMetadataController.java new file mode 100644 index 0000000000..ecc0615f14 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/messenger/ChatMessagesMetadataController.java @@ -0,0 +1,109 @@ +package org.telegram.messenger; + +import org.telegram.tgnet.TLRPC; +import org.telegram.ui.ChatActivity; + +import java.util.ArrayList; + +public class ChatMessagesMetadataController { + + final ChatActivity chatActivity; + private ArrayList reactionsToCheck = new ArrayList<>(10); + private ArrayList extendedMediaToCheck = new ArrayList<>(10); + + ArrayList reactionsRequests = new ArrayList<>(); + ArrayList extendedMediaRequests = new ArrayList<>(); + + + public ChatMessagesMetadataController(ChatActivity chatActivity) { + this.chatActivity = chatActivity; + } + + public void checkMessages(ChatActivity.ChatActivityAdapter chatAdapter, int maxAdapterPosition, int minAdapterPosition, long currentTime) { + ArrayList messages = chatActivity.messages; + if (!chatActivity.isInScheduleMode() && maxAdapterPosition >= 0 && minAdapterPosition >= 0) { + int from = minAdapterPosition - chatAdapter.messagesStartRow - 10; + int to = maxAdapterPosition - chatAdapter.messagesStartRow + 10; + if (from < 0) { + from = 0; + } + if (to > messages.size()) { + to = messages.size(); + } + reactionsToCheck.clear(); + extendedMediaToCheck.clear(); + for (int i = from; i < to; i++) { + MessageObject messageObject = messages.get(i); + if (chatActivity.getThreadMessage() != messageObject && messageObject.getId() > 0 && messageObject.messageOwner.action == null && (currentTime - messageObject.reactionsLastCheckTime) > 15000L) { + messageObject.reactionsLastCheckTime = currentTime; + reactionsToCheck.add(messageObject); + } + if (chatActivity.getThreadMessage() != messageObject && messageObject.getId() > 0 && messageObject.hasExtendedMediaPreview() && (currentTime - messageObject.extendedMediaLastCheckTime) > 30000L) { + messageObject.extendedMediaLastCheckTime = currentTime; + extendedMediaToCheck.add(messageObject); + } + } + loadReactionsForMessages(chatActivity.getDialogId(), reactionsToCheck); + loadExtendedMediaForMessages(chatActivity.getDialogId(), extendedMediaToCheck); + } + } + + public void loadReactionsForMessages(long dialogId, ArrayList visibleObjects) { + if (visibleObjects.isEmpty()) { + return; + } + TLRPC.TL_messages_getMessagesReactions req = new TLRPC.TL_messages_getMessagesReactions(); + req.peer = chatActivity.getMessagesController().getInputPeer(dialogId); + for (int i = 0; i < visibleObjects.size(); i++) { + MessageObject messageObject = visibleObjects.get(i); + req.id.add(messageObject.getId()); + } + int reqId = chatActivity.getConnectionsManager().sendRequest(req, (response, error) -> { + if (error == null) { + TLRPC.Updates updates = (TLRPC.Updates) response; + for (int i = 0; i < updates.updates.size(); i++) { + if (updates.updates.get(i) instanceof TLRPC.TL_updateMessageReactions) { + ((TLRPC.TL_updateMessageReactions) updates.updates.get(i)).updateUnreadState = false; + } + } + chatActivity.getMessagesController().processUpdates(updates, false); + } + }); + reactionsRequests.add(reqId); + if (reactionsRequests.size() > 5) { + chatActivity.getConnectionsManager().cancelRequest(reactionsRequests.remove(0), false); + } + } + + public void loadExtendedMediaForMessages(long dialogId, ArrayList visibleObjects) { + if (visibleObjects.isEmpty()) { + return; + } + TLRPC.TL_messages_getExtendedMedia req = new TLRPC.TL_messages_getExtendedMedia(); + req.peer = chatActivity.getMessagesController().getInputPeer(dialogId); + for (int i = 0; i < visibleObjects.size(); i++) { + MessageObject messageObject = visibleObjects.get(i); + req.id.add(messageObject.getId()); + } + int reqId = chatActivity.getConnectionsManager().sendRequest(req, (response, error) -> { + if (error == null) { + chatActivity.getMessagesController().processUpdates((TLRPC.Updates) response, false); + } + }); + extendedMediaRequests.add(reqId); + if (extendedMediaRequests.size() > 5) { + chatActivity.getConnectionsManager().cancelRequest(extendedMediaRequests.remove(0), false); + } + } + + public void onFragmentDestroy() { + for (int i = 0; i < reactionsRequests.size(); i++) { + chatActivity.getConnectionsManager().cancelRequest(reactionsRequests.remove(i), false); + } + reactionsRequests.clear(); + for (int i = 0; i < extendedMediaRequests.size(); i++) { + chatActivity.getConnectionsManager().cancelRequest(extendedMediaRequests.remove(i), false); + } + extendedMediaRequests.clear(); + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/ChatObject.java b/TMessagesProj/src/main/java/org/telegram/messenger/ChatObject.java index bd471c81a0..dbb810169e 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/ChatObject.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/ChatObject.java @@ -38,6 +38,7 @@ public class ChatObject { public static final int CHAT_TYPE_CHANNEL = 2; public static final int CHAT_TYPE_USER = 3; public static final int CHAT_TYPE_MEGAGROUP = 4; + public static final int CHAT_TYPE_FORUM = 5; public static final int ACTION_PIN = 0; public static final int ACTION_CHANGE_INFO = 1; @@ -46,6 +47,7 @@ public class ChatObject { public static final int ACTION_ADD_ADMINS = 4; public static final int ACTION_POST = 5; public static final int ACTION_SEND = 6; + public static final int ACTION_SEND_TEXT = 22; public static final int ACTION_SEND_MEDIA = 7; public static final int ACTION_SEND_STICKERS = 8; public static final int ACTION_EMBED_LINKS = 9; @@ -56,6 +58,15 @@ public class ChatObject { public static final int ACTION_MANAGE_CALLS = 14; public static final int ACTION_MANAGE_TOPICS = 15; + public static final int ACTION_SEND_PHOTO = 16; + public static final int ACTION_SEND_VIDEO = 17; + public static final int ACTION_SEND_MUSIC = 18; + public static final int ACTION_SEND_DOCUMENTS = 19; + public static final int ACTION_SEND_VOICE = 20; + public static final int ACTION_SEND_ROUND = 21; + public static final int ACTION_SEND_PLAIN = 22; + public static final int ACTION_SEND_GIFS = 23; + public final static int VIDEO_FRAME_NO_FRAME = 0; public final static int VIDEO_FRAME_REQUESTING = 1; public final static int VIDEO_FRAME_HAS_FRAME = 2; @@ -85,6 +96,61 @@ public static boolean isForum(int currentAccount, long dialogId) { return false; } + public static boolean canSendAnyMedia(TLRPC.Chat currentChat) { + return canSendPhoto(currentChat) || canSendVideo(currentChat) || canSendRoundVideo(currentChat)|| canSendVoice(currentChat) || canSendDocument(currentChat) || canSendMusic(currentChat) || canSendStickers(currentChat); + } + + public static String getAllowedSendString(TLRPC.Chat chat) { + StringBuilder stringBuilder = new StringBuilder(); + if (ChatObject.canSendPhoto(chat)) { + stringBuilder.append(LocaleController.getString("SendMediaPermissionPhotos", R.string.SendMediaPermissionPhotos)); + } + if (ChatObject.canSendVideo(chat)) { + if (stringBuilder.length() > 0) { + stringBuilder.append(", "); + } + stringBuilder.append(LocaleController.getString("SendMediaPermissionVideos", R.string.SendMediaPermissionVideos)); + } + if (ChatObject.canSendStickers(chat)) { + if (stringBuilder.length() > 0) { + stringBuilder.append(", "); + } + stringBuilder.append(LocaleController.getString("SendMediaPermissionStickersGifs", R.string.SendMediaPermissionStickersGifs)); + } + if (ChatObject.canSendMusic(chat)) { + if (stringBuilder.length() > 0) { + stringBuilder.append(", "); + } + stringBuilder.append(LocaleController.getString("SendMediaPermissionMusic", R.string.SendMediaPermissionMusic)); + } + if (ChatObject.canSendDocument(chat)) { + if (stringBuilder.length() > 0) { + stringBuilder.append(", "); + } + stringBuilder.append(LocaleController.getString("SendMediaPermissionFiles", R.string.SendMediaPermissionFiles)); + } + if (ChatObject.canSendVoice(chat)) { + if (stringBuilder.length() > 0) { + stringBuilder.append(", "); + } + stringBuilder.append(LocaleController.getString("SendMediaPermissionVoice", R.string.SendMediaPermissionVoice)); + } + if (ChatObject.canSendRoundVideo(chat)) { + if (stringBuilder.length() > 0) { + stringBuilder.append(", "); + } + stringBuilder.append(LocaleController.getString("SendMediaPermissionRound", R.string.SendMediaPermissionRound)); + } + if (ChatObject.canSendEmbed(chat)) { + if (stringBuilder.length() > 0) { + stringBuilder.append(", "); + } + stringBuilder.append(LocaleController.getString("SendMediaEmbededLinks", R.string.SendMediaEmbededLinks)); + } + + return stringBuilder.toString(); + } + public static class Call { public final static int RECORD_TYPE_AUDIO = 0, RECORD_TYPE_VIDEO_PORTAIT = 1, @@ -1230,7 +1296,11 @@ public void sortParticipants() { return Integer.compare(o2.date, o1.date); } }; - Collections.sort(sortedParticipants, comparator); + try { + Collections.sort(sortedParticipants, comparator); + } catch (Exception e) { + + } TLRPC.TL_groupCallParticipant lastParticipant = sortedParticipants.isEmpty() ? null : sortedParticipants.get(sortedParticipants.size() - 1); if (videoIsActive(lastParticipant, false, this) || videoIsActive(lastParticipant, true, this)) { if (call.unmuted_video_count > activeVideos) { @@ -1412,6 +1482,13 @@ private static boolean isBannableAction(int action) { case ACTION_SEND_POLLS: case ACTION_VIEW: case ACTION_MANAGE_TOPICS: + case ACTION_SEND_PHOTO: + case ACTION_SEND_VIDEO: + case ACTION_SEND_MUSIC: + case ACTION_SEND_DOCUMENTS: + case ACTION_SEND_VOICE: + case ACTION_SEND_ROUND: + case ACTION_SEND_PLAIN: return true; } return false; @@ -1459,14 +1536,31 @@ private static boolean getBannedRight(TLRPC.TL_chatBannedRights rights, int acti return rights.view_messages; case ACTION_MANAGE_TOPICS: return rights.manage_topics; + case ACTION_SEND_PHOTO: + return rights.send_photos; + case ACTION_SEND_VIDEO: + return rights.send_videos; + case ACTION_SEND_MUSIC: + return rights.send_audios; + case ACTION_SEND_DOCUMENTS: + return rights.send_docs; + case ACTION_SEND_VOICE: + return rights.send_voices; + case ACTION_SEND_ROUND: + return rights.send_roundvideos; + case ACTION_SEND_PLAIN: + return rights.send_plain; } return false; } public static boolean isActionBannedByDefault(TLRPC.Chat chat, int action) { - if (getBannedRight(chat.banned_rights, action)) { + if (chat == null) { return false; } + if (getBannedRight(chat.banned_rights, action) && getBannedRight(chat.default_banned_rights, action)) { + return true; + } return getBannedRight(chat.default_banned_rights, action); } @@ -1628,8 +1722,31 @@ public static boolean canSendEmbed(TLRPC.Chat chat) { return canUserDoAction(chat, ACTION_EMBED_LINKS); } - public static boolean canSendMedia(TLRPC.Chat chat) { - return canUserDoAction(chat, ACTION_SEND_MEDIA); + // public static boolean canSendMedia(TLRPC.Chat chat) { +// return canUserDoAction(chat, ACTION_SEND_MEDIA); +// } + public static boolean canSendPhoto(TLRPC.Chat chat) { + return canUserDoAction(chat, ACTION_SEND_PHOTO); + } + + public static boolean canSendVideo(TLRPC.Chat chat) { + return canUserDoAction(chat, ACTION_SEND_VIDEO); + } + + public static boolean canSendMusic(TLRPC.Chat chat) { + return canUserDoAction(chat, ACTION_SEND_MUSIC); + } + + public static boolean canSendDocument(TLRPC.Chat chat) { + return canUserDoAction(chat, ACTION_SEND_DOCUMENTS); + } + + public static boolean canSendVoice(TLRPC.Chat chat) { + return canUserDoAction(chat, ACTION_SEND_VOICE); + } + + public static boolean canSendRoundVideo(TLRPC.Chat chat) { + return canUserDoAction(chat, ACTION_SEND_ROUND); } public static boolean canSendPolls(TLRPC.Chat chat) { @@ -1640,6 +1757,10 @@ public static boolean canSendMessages(TLRPC.Chat chat) { return canUserDoAction(chat, ACTION_SEND); } + public static boolean canSendPlain(TLRPC.Chat chat) { + return canUserDoAction(chat, ACTION_SEND_PLAIN); + } + public static boolean canPost(TLRPC.Chat chat) { return canUserDoAction(chat, ACTION_POST); } @@ -1760,6 +1881,13 @@ public static String getBannedRightsString(TLRPC.TL_chatBannedRights bannedRight currentBannedRights += bannedRights.change_info ? 1 : 0; currentBannedRights += bannedRights.pin_messages ? 1 : 0; currentBannedRights += bannedRights.manage_topics ? 1 : 0; + currentBannedRights += bannedRights.send_photos ? 1 : 0; + currentBannedRights += bannedRights.send_videos ? 1 : 0; + currentBannedRights += bannedRights.send_roundvideos ? 1 : 0; + currentBannedRights += bannedRights.send_voices ? 1 : 0; + currentBannedRights += bannedRights.send_audios ? 1 : 0; + currentBannedRights += bannedRights.send_docs ? 1 : 0; + currentBannedRights += bannedRights.send_plain ? 1 : 0; currentBannedRights += bannedRights.until_date; return currentBannedRights; } @@ -1819,6 +1947,93 @@ public static boolean isPublic(TLRPC.Chat chat) { return !TextUtils.isEmpty(getPublicUsername(chat)); } + public static String getRestrictedErrorText(TLRPC.Chat chat, int action) { + if (action == ACTION_SEND_GIFS) { + if (chat == null || ChatObject.isActionBannedByDefault(chat, action)) { + return LocaleController.getString("GlobalAttachGifRestricted", R.string.GlobalAttachGifRestricted); + } else if (AndroidUtilities.isBannedForever(chat.banned_rights)) { + return LocaleController.formatString("AttachGifRestrictedForever", R.string.AttachGifRestrictedForever); + } else { + return LocaleController.formatString("AttachGifRestricted", R.string.AttachGifRestricted, LocaleController.formatDateForBan(chat.banned_rights.until_date)); + } + } else if (action == ACTION_SEND_STICKERS) { + if (chat == null || ChatObject.isActionBannedByDefault(chat, action)) { + return LocaleController.getString("GlobalAttachStickersRestricted", R.string.GlobalAttachStickersRestricted); + } else if (AndroidUtilities.isBannedForever(chat.banned_rights)) { + return LocaleController.formatString("AttachStickersRestrictedForever", R.string.AttachStickersRestrictedForever); + } else { + return LocaleController.formatString("AttachStickersRestricted", R.string.AttachStickersRestricted, LocaleController.formatDateForBan(chat.banned_rights.until_date)); + } + } else if (action == ACTION_SEND_PHOTO) { + if (chat == null || ChatObject.isActionBannedByDefault(chat, action)) { + return LocaleController.getString("GlobalAttachPhotoRestricted", R.string.GlobalAttachPhotoRestricted); + } else if (AndroidUtilities.isBannedForever(chat.banned_rights)) { + return LocaleController.formatString("AttachPhotoRestrictedForever", R.string.AttachPhotoRestrictedForever); + } else { + return LocaleController.formatString("AttachPhotoRestricted", R.string.AttachPhotoRestricted, LocaleController.formatDateForBan(chat.banned_rights.until_date)); + } + } else if (action == ACTION_SEND_VIDEO) { + if (chat == null || ChatObject.isActionBannedByDefault(chat, action)) { + return LocaleController.getString("GlobalAttachVideoRestricted", R.string.GlobalAttachVideoRestricted); + } else if (AndroidUtilities.isBannedForever(chat.banned_rights)) { + return LocaleController.formatString("AttachVideoRestrictedForever", R.string.AttachVideoRestrictedForever); + } else { + return LocaleController.formatString("AttachVideoRestricted", R.string.AttachVideoRestricted, LocaleController.formatDateForBan(chat.banned_rights.until_date)); + } + } else if (action == ACTION_SEND_DOCUMENTS) { + if (chat == null || ChatObject.isActionBannedByDefault(chat, action)) { + return LocaleController.getString("GlobalAttachDocumentsRestricted", R.string.GlobalAttachDocumentsRestricted); + } else if (AndroidUtilities.isBannedForever(chat.banned_rights)) { + return LocaleController.formatString("AttachDocumentsRestrictedForever", R.string.AttachDocumentsRestrictedForever); + } else { + return LocaleController.formatString("AttachDocumentsRestricted", R.string.AttachDocumentsRestricted, LocaleController.formatDateForBan(chat.banned_rights.until_date)); + } + } else if (action == ACTION_SEND_MEDIA) { + if (chat == null || ChatObject.isActionBannedByDefault(chat, action)) { + return LocaleController.getString("GlobalAttachMediaRestricted", R.string.GlobalAttachMediaRestricted); + } else if (AndroidUtilities.isBannedForever(chat.banned_rights)) { + return LocaleController.formatString("AttachMediaRestrictedForever", R.string.AttachMediaRestrictedForever); + } else { + return LocaleController.formatString("AttachMediaRestricted", R.string.AttachMediaRestricted, LocaleController.formatDateForBan(chat.banned_rights.until_date)); + } + } else if (action == ACTION_SEND_MUSIC) { + if (chat == null || ChatObject.isActionBannedByDefault(chat, action)) { + return LocaleController.getString("GlobalAttachAudioRestricted", R.string.GlobalAttachAudioRestricted); + } else if (AndroidUtilities.isBannedForever(chat.banned_rights)) { + return LocaleController.formatString("AttachAudioRestrictedForever", R.string.AttachAudioRestrictedForever); + } else { + return LocaleController.formatString("AttachAudioRestricted", R.string.AttachAudioRestricted, LocaleController.formatDateForBan(chat.banned_rights.until_date)); + } + } else if (action == ACTION_SEND_PLAIN) { + if (chat == null || ChatObject.isActionBannedByDefault(chat, action)) { + return LocaleController.getString("GlobalAttachPlainRestricted", R.string.GlobalAttachPlainRestricted); + } else if (AndroidUtilities.isBannedForever(chat.banned_rights)) { + return LocaleController.formatString("AttachPlainRestrictedForever", R.string.AttachPlainRestrictedForever); + } else { + return LocaleController.formatString("AttachPlainRestricted", R.string.AttachPlainRestricted, LocaleController.formatDateForBan(chat.banned_rights.until_date)); + } + } else if (action == ACTION_SEND_ROUND) { + if (chat == null || ChatObject.isActionBannedByDefault(chat, action)) { + return LocaleController.getString("GlobalAttachRoundRestricted", R.string.GlobalAttachRoundRestricted); + } else if (AndroidUtilities.isBannedForever(chat.banned_rights)) { + return LocaleController.formatString("AttachRoundRestrictedForever", R.string.AttachRoundRestrictedForever); + } else { + return LocaleController.formatString("AttachRoundRestricted", R.string.AttachRoundRestricted, LocaleController.formatDateForBan(chat.banned_rights.until_date)); + } + } else if (action == ACTION_SEND_VOICE) { + if (chat == null || ChatObject.isActionBannedByDefault(chat, action)) { + return LocaleController.getString("GlobalAttachVoiceRestricted", R.string.GlobalAttachVoiceRestricted); + } else if (AndroidUtilities.isBannedForever(chat.banned_rights)) { + return LocaleController.formatString("AttachVoiceRestrictedForever", R.string.AttachVoiceRestrictedForever); + } else { + return LocaleController.formatString("AttachVoiceRestricted", R.string.AttachVoiceRestricted, LocaleController.formatDateForBan(chat.banned_rights.until_date)); + } + } + + return ""; + } + + public static class VideoParticipant { public TLRPC.TL_groupCallParticipant participant; diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/ContactsController.java b/TMessagesProj/src/main/java/org/telegram/messenger/ContactsController.java index b6c7df3955..79b09aaa5a 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/ContactsController.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/ContactsController.java @@ -35,10 +35,13 @@ import org.telegram.tgnet.TLRPC; import org.telegram.ui.Components.Bulletin; +import java.text.CollationKey; +import java.text.Collator; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; +import java.util.Locale; import java.util.concurrent.ConcurrentHashMap; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -130,6 +133,47 @@ public boolean deliverSelfNotifications() { } } + private static Locale cachedCollatorLocale; + private static Collator cachedCollator; + public static Collator getLocaleCollator() { + if (cachedCollator == null || cachedCollatorLocale != Locale.getDefault()) { + try { + cachedCollator = Collator.getInstance(cachedCollatorLocale = Locale.getDefault()); + cachedCollator.setStrength(Collator.SECONDARY); + } catch (Exception e) { + FileLog.e(e, true); + } + } + if (cachedCollator == null) { + try { + cachedCollator = Collator.getInstance(); + cachedCollator.setStrength(Collator.SECONDARY); + } catch (Exception e) { + FileLog.e(e, true); + } + } + if (cachedCollator == null) { + cachedCollator = new Collator() { + @Override + public int compare(String source, String target) { + if (source == null || target == null) { + return 0; + } + return source.compareTo(target); + } + @Override + public CollationKey getCollationKey(String source) { + return null; + } + @Override + public int hashCode() { + return 0; + } + }; + } + return cachedCollator; + } + public static class Contact { public int contact_id; public String key; @@ -751,7 +795,7 @@ public HashMap readContactsFromPhoneBook() { String lookup_key = cur.getString(cur.getColumnIndex(ContactsContract.Contacts.LOOKUP_KEY)); String name = cur.getString(cur.getColumnIndex(ContactsContract.Contacts.DISPLAY_NAME)); String phone = null; - if (contactsMap.get(lookup_key) != null || TextUtils.isEmpty(name)) { + if ((contactsMap != null && contactsMap.get(lookup_key) != null) || TextUtils.isEmpty(name)) { continue; } pCur = cr.query( @@ -1512,12 +1556,13 @@ public void processLoadedContacts(final ArrayList contactsArr, getMessagesStorage().putContacts(contactsArr, from != 2); } + final Collator collator = getLocaleCollator(); Collections.sort(contactsArr, (tl_contact, tl_contact2) -> { TLRPC.User user1 = usersDict.get(tl_contact.user_id); TLRPC.User user2 = usersDict.get(tl_contact2.user_id); String name1 = UserObject.getFirstName(user1); String name2 = UserObject.getFirstName(user2); - return name1.compareTo(name2); + return collator.compare(name1, name2); }); final ConcurrentHashMap contactsDictionary = new ConcurrentHashMap<>(20, 1.0f, 2); @@ -1587,7 +1632,7 @@ public void processLoadedContacts(final ArrayList contactsArr, } else if (cv2 == '#') { return -1; } - return s.compareTo(s2); + return collator.compare(s, s2); }); Collections.sort(sortedSectionsArrayMutual, (s, s2) -> { @@ -1598,7 +1643,7 @@ public void processLoadedContacts(final ArrayList contactsArr, } else if (cv2 == '#') { return -1; } - return s.compareTo(s2); + return collator.compare(s, s2); }); AndroidUtilities.runOnUIThread(() -> { @@ -1701,6 +1746,7 @@ private void mergePhonebookAndTelegramContacts(final HashMap arrayList : phoneBookSectionsDictFinal.values()) { Collections.sort(arrayList, (o1, o2) -> { String name1; @@ -1733,7 +1779,7 @@ private void mergePhonebookAndTelegramContacts(final HashMap { @@ -1744,7 +1790,7 @@ private void mergePhonebookAndTelegramContacts(final HashMap { phoneBookSectionsArray = phoneBookSectionsArrayFinal; @@ -1783,29 +1829,31 @@ private void updateUnregisteredContacts() { sortedPhoneBookContacts.add(value); } + final Collator collator = getLocaleCollator(); Collections.sort(sortedPhoneBookContacts, (contact, contact2) -> { - String toComapre1 = contact.first_name; - if (toComapre1.length() == 0) { - toComapre1 = contact.last_name; + String toCompare1 = contact.first_name; + if (toCompare1.length() == 0) { + toCompare1 = contact.last_name; } - String toComapre2 = contact2.first_name; - if (toComapre2.length() == 0) { - toComapre2 = contact2.last_name; + String toCompare2 = contact2.first_name; + if (toCompare2.length() == 0) { + toCompare2 = contact2.last_name; } - return toComapre1.compareTo(toComapre2); + return collator.compare(toCompare1, toCompare2); }); phoneBookContacts = sortedPhoneBookContacts; } private void buildContactsSectionsArrays(boolean sort) { + final Collator collator = getLocaleCollator(); if (sort) { Collections.sort(contacts, (tl_contact, tl_contact2) -> { TLRPC.User user1 = getMessagesController().getUser(tl_contact.user_id); TLRPC.User user2 = getMessagesController().getUser(tl_contact2.user_id); String name1 = UserObject.getFirstName(user1); String name2 = UserObject.getFirstName(user2); - return name1.compareTo(name2); + return collator.compare(name1, name2); }); } @@ -1849,7 +1897,7 @@ private void buildContactsSectionsArrays(boolean sort) { } else if (cv2 == '#') { return -1; } - return s.compareTo(s2); + return collator.compare(s, s2); }); usersSectionsDict = sectionsDict; @@ -1884,7 +1932,8 @@ private boolean hasContactsPermission() { private void performWriteContactsToPhoneBookInternal(ArrayList contactsArray) { Cursor cursor = null; try { - if (!hasContactsPermission()) { + Account account = systemAccount; + if (!hasContactsPermission() || account == null) { return; } final SharedPreferences settings = MessagesController.getMainSettings(currentAccount); @@ -2184,7 +2233,7 @@ private void deleteContactFromPhoneBook(long uid) { Uri rawContactUri = ContactsContract.RawContacts.CONTENT_URI.buildUpon().appendQueryParameter(ContactsContract.CALLER_IS_SYNCADAPTER, "true").appendQueryParameter(ContactsContract.RawContacts.ACCOUNT_NAME, systemAccount.name).appendQueryParameter(ContactsContract.RawContacts.ACCOUNT_TYPE, systemAccount.type).build(); int value = contentResolver.delete(rawContactUri, ContactsContract.RawContacts.SYNC2 + " = " + uid, null); } catch (Exception e) { - FileLog.e(e); + FileLog.e(e, false); } synchronized (observerLock) { ignoreChanges = false; @@ -2711,6 +2760,10 @@ public void deleteConnectionServiceContact() { } } + public static String formatName(TLRPC.User user) { + return formatName(user.first_name, user.last_name, 0); + } + public static String formatName(String firstName, String lastName) { return formatName(firstName, lastName, 0); } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/DatabaseMigrationHelper.java b/TMessagesProj/src/main/java/org/telegram/messenger/DatabaseMigrationHelper.java index 1ecc488f48..0a577a01a5 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/DatabaseMigrationHelper.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/DatabaseMigrationHelper.java @@ -6,8 +6,12 @@ import org.telegram.tgnet.NativeByteBuffer; import org.telegram.tgnet.TLRPC; +import java.io.File; +import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; +import java.util.HashSet; +import java.util.Locale; public class DatabaseMigrationHelper { public static int migrate(MessagesStorage messagesStorage, int version) throws Exception { @@ -1218,6 +1222,173 @@ public static int migrate(MessagesStorage messagesStorage, int version) throws E version = 111; } + if (version == 111) { + database.executeFast("CREATE TABLE emoji_groups(type INTEGER PRIMARY KEY, data BLOB)").stepThis().dispose(); + database.executeFast("PRAGMA user_version = 112").stepThis().dispose(); + version = 112; + } + + if (version == 112) { + database.executeFast("CREATE TABLE app_config(data BLOB)").stepThis().dispose(); + database.executeFast("PRAGMA user_version = 113").stepThis().dispose(); + version = 113; + } + + if (version == 113) { + //fix issue when database file was deleted + //just reload dialogs + messagesStorage.reset(); + database.executeFast("PRAGMA user_version = 114").stepThis().dispose(); + version = 114; + } + if (version == 114) { + database.executeFast("CREATE TABLE bot_keyboard_topics(uid INTEGER, tid INTEGER, mid INTEGER, info BLOB, PRIMARY KEY(uid, tid))").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS bot_keyboard_topics_idx_mid_v2 ON bot_keyboard_topics(mid, uid, tid);").stepThis().dispose(); + database.executeFast("PRAGMA user_version = 115").stepThis().dispose(); + version = 115; + } return version; } + + + public static boolean recoverDatabase(File oldDatabaseFile, File oldDatabaseWall, File oldDatabaseShm, int currentAccount) { + File filesDir = ApplicationLoader.getFilesDirFixed(); + filesDir = new File(filesDir, "recover_database_" + currentAccount + "/"); + filesDir.mkdirs(); + + File cacheFile = new File(filesDir, "cache4.db"); + File walCacheFile = new File(filesDir, "cache4.db-wal"); + File shmCacheFile = new File(filesDir, "cache4.db-shm"); + try { + cacheFile.delete(); + walCacheFile.delete(); + shmCacheFile.delete(); + } catch (Exception e) { + e.printStackTrace(); + } + + SQLiteDatabase newDatabase = null; + long time = 0; + ArrayList encryptedDialogs = new ArrayList<>(); + ArrayList dialogs = new ArrayList<>(); + boolean recovered = true; + FileLog.d("start recover database"); + + try { + time = System.currentTimeMillis(); + + newDatabase = new SQLiteDatabase(cacheFile.getPath()); + newDatabase.executeFast("PRAGMA secure_delete = ON").stepThis().dispose(); + newDatabase.executeFast("PRAGMA temp_store = MEMORY").stepThis().dispose(); + newDatabase.executeFast("PRAGMA journal_mode = WAL").stepThis().dispose(); + newDatabase.executeFast("PRAGMA journal_size_limit = 10485760").stepThis().dispose(); + + MessagesStorage.createTables(newDatabase); + newDatabase.executeFast("ATTACH DATABASE \"" + oldDatabaseFile.getAbsolutePath() + "\" AS old;").stepThis().dispose(); + + int version = newDatabase.executeInt("PRAGMA old.user_version"); + if (version != MessagesStorage.LAST_DB_VERSION) { + FileLog.e("can't restore database from version " + version); + return false; + } + HashSet excludeTables = new HashSet<>(); + excludeTables.add("messages_v2"); + excludeTables.add("messages_holes"); + excludeTables.add("scheduled_messages_v2"); + excludeTables.add("media_holes_v2"); + excludeTables.add("media_v4"); + excludeTables.add("messages_holes_topics"); + excludeTables.add("messages_topics"); + excludeTables.add("media_topics"); + excludeTables.add("media_holes_topics"); + excludeTables.add("topics"); + excludeTables.add("media_counts_v2"); + excludeTables.add("media_counts_topics"); + excludeTables.add("dialogs"); + excludeTables.add("dialog_filter"); + excludeTables.add("dialog_filter_ep"); + excludeTables.add("dialog_filter_pin_v2"); + + //restore whole tables + for (int i = 0; i < MessagesStorage.DATABASE_TABLES.length; i++) { + String tableName = MessagesStorage.DATABASE_TABLES[i]; + if (excludeTables.contains(tableName)) { + continue; + } + newDatabase.executeFast(String.format(Locale.US, "INSERT OR IGNORE INTO %s SELECT * FROM old.%s;", tableName, tableName)).stepThis().dispose(); + } + + SQLiteCursor cursor = newDatabase.queryFinalized("SELECT did FROM old.dialogs"); + + while (cursor.next()) { + long did = cursor.longValue(0); + if (DialogObject.isEncryptedDialog(did)) { + encryptedDialogs.add(did); + } else { + dialogs.add(did); + } + } + cursor.dispose(); + + //restore only secret chats + for (int i = 0; i < encryptedDialogs.size(); i++) { + long dialogId = encryptedDialogs.get(i); + newDatabase.executeFast(String.format(Locale.US, "INSERT OR IGNORE INTO messages_v2 SELECT * FROM old.messages_v2 WHERE uid = %d;", dialogId)).stepThis().dispose(); + newDatabase.executeFast(String.format(Locale.US, "INSERT OR IGNORE INTO messages_holes SELECT * FROM old.messages_holes WHERE uid = %d;", dialogId)).stepThis().dispose(); + newDatabase.executeFast(String.format(Locale.US, "INSERT OR IGNORE INTO media_holes_v2 SELECT * FROM old.media_holes_v2 WHERE uid = %d;", dialogId)).stepThis().dispose(); + newDatabase.executeFast(String.format(Locale.US, "INSERT OR IGNORE INTO media_v4 SELECT * FROM old.media_v4 WHERE uid = %d;", dialogId)).stepThis().dispose(); + } + + SQLitePreparedStatement state5 = newDatabase.executeFast("REPLACE INTO messages_holes VALUES(?, ?, ?)"); + SQLitePreparedStatement state6 = newDatabase.executeFast("REPLACE INTO media_holes_v2 VALUES(?, ?, ?, ?)"); + + for (int a = 0; a < dialogs.size(); a++) { + Long did = dialogs.get(a); + + cursor = newDatabase.queryFinalized("SELECT last_mid_i, last_mid FROM old.dialogs WHERE did = " + did); + + if (cursor.next()) { + long last_mid_i = cursor.longValue(0); + long last_mid = cursor.longValue(1); + newDatabase.executeFast("INSERT OR IGNORE INTO messages_v2 SELECT * FROM old.messages_v2 WHERE uid = " + did + " AND mid IN (" + last_mid_i + "," + last_mid + ")").stepThis().dispose(); + + MessagesStorage.createFirstHoles(did, state5, state6, (int) last_mid, 0); + + } + cursor.dispose(); + cursor = null; + } + + state5.dispose(); + state6.dispose(); + + newDatabase.executeFast("DETACH DATABASE old;").stepThis().dispose(); + newDatabase.close(); + } catch (Exception e) { + FileLog.e(e); + recovered = false; + } + if (!recovered) { + return false; + } + try { + oldDatabaseFile.delete(); + oldDatabaseWall.delete(); + oldDatabaseShm.delete(); + + AndroidUtilities.copyFile(cacheFile, oldDatabaseFile); + AndroidUtilities.copyFile(walCacheFile, oldDatabaseWall); + AndroidUtilities.copyFile(shmCacheFile, oldDatabaseShm); + + cacheFile.delete(); + walCacheFile.delete(); + shmCacheFile.delete(); + } catch (IOException e) { + e.printStackTrace(); + return false; + } + + FileLog.d("database recovered time " + (System.currentTimeMillis() - time)); + return true; + } } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/DispatchQueue.java b/TMessagesProj/src/main/java/org/telegram/messenger/DispatchQueue.java index d57bdaa58b..07bc903660 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/DispatchQueue.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/DispatchQueue.java @@ -69,7 +69,7 @@ public void cancelRunnables(Runnable[] runnables) { public boolean postRunnable(Runnable runnable) { lastTaskTime = SystemClock.elapsedRealtime(); - return postRunnable(runnable, 0); + return postRunnable(runnable, 0); } public boolean postRunnable(Runnable runnable, long delay) { diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/DispatchQueuePoolBackground.java b/TMessagesProj/src/main/java/org/telegram/messenger/DispatchQueuePoolBackground.java index 355a358e87..185143c822 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/DispatchQueuePoolBackground.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/DispatchQueuePoolBackground.java @@ -110,7 +110,7 @@ public static void execute(Runnable runnable) { @UiThread public static void execute(Runnable runnable, boolean now) { if (Thread.currentThread() != ApplicationLoader.applicationHandler.getLooper().getThread()) { - if (BuildVars.DEBUG_PRIVATE_VERSION) { + if (BuildVars.DEBUG_VERSION) { FileLog.e(new RuntimeException("wrong thread")); } return; diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/DocumentObject.java b/TMessagesProj/src/main/java/org/telegram/messenger/DocumentObject.java index cc2f161b15..e93c240cde 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/DocumentObject.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/DocumentObject.java @@ -58,7 +58,7 @@ public static SvgHelper.SvgDrawable getSvgThumb(ArrayList sizes h = photoSize.h; } if (photoPathSize != null && w != 0 && h != 0) { - SvgHelper.SvgDrawable pathThumb = SvgHelper.getDrawableByPath(SvgHelper.decompress(photoPathSize.bytes), w, h); + SvgHelper.SvgDrawable pathThumb = SvgHelper.getDrawableByPath(photoPathSize.svgPath, w, h); if (pathThumb != null) { pathThumb.setupGradient(colorKey, alpha, false); } @@ -116,14 +116,17 @@ public static SvgHelper.SvgDrawable getSvgThumb(TLRPC.Document document, String int w = 512, h = 512; for (int a = 0, N = document.attributes.size(); a < N; a++) { TLRPC.DocumentAttribute attribute = document.attributes.get(a); - if (attribute instanceof TLRPC.TL_documentAttributeImageSize) { + if ( + attribute instanceof TLRPC.TL_documentAttributeImageSize || + attribute instanceof TLRPC.TL_documentAttributeVideo + ) { w = attribute.w; h = attribute.h; break; } } if (w != 0 && h != 0) { - pathThumb = SvgHelper.getDrawableByPath(SvgHelper.decompress(size.bytes), (int) (w * zoom), (int) (h * zoom)); + pathThumb = SvgHelper.getDrawableByPath(((TLRPC.TL_photoPathSize) size).svgPath, (int) (w * zoom), (int) (h * zoom)); if (pathThumb != null) { pathThumb.setupGradient(colorKey, alpha, false); } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/Emoji.java b/TMessagesProj/src/main/java/org/telegram/messenger/Emoji.java index 75229314db..7911255a8d 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/Emoji.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/Emoji.java @@ -78,6 +78,15 @@ public class Emoji { public static float emojiDrawingYOffset; public static boolean emojiDrawingUseAlpha = true; + private static String[] DEFAULT_RECENT = new String[]{ + "\uD83D\uDE02", "\uD83D\uDE18", "\u2764", "\uD83D\uDE0D", "\uD83D\uDE0A", "\uD83D\uDE01", + "\uD83D\uDC4D", "\u263A", "\uD83D\uDE14", "\uD83D\uDE04", "\uD83D\uDE2D", "\uD83D\uDC8B", + "\uD83D\uDE12", "\uD83D\uDE33", "\uD83D\uDE1C", "\uD83D\uDE48", "\uD83D\uDE09", "\uD83D\uDE03", + "\uD83D\uDE22", "\uD83D\uDE1D", "\uD83D\uDE31", "\uD83D\uDE21", "\uD83D\uDE0F", "\uD83D\uDE1E", + "\uD83D\uDE05", "\uD83D\uDE1A", "\uD83D\uDE4A", "\uD83D\uDE0C", "\uD83D\uDE00", "\uD83D\uDE0B", + "\uD83D\uDE06", "\uD83D\uDC4C", "\uD83D\uDE10", "\uD83D\uDE15" + }; + private final static int MAX_RECENT_EMOJI_COUNT = 48; private static boolean isSelectedCustomEmojiPack; @@ -729,6 +738,14 @@ public static void addRecentEmoji(String code) { emojiUseHistory.put(code, ++count); } + public static void removeRecentEmoji(String code) { + emojiUseHistory.remove(code); + recentEmoji.remove(code); + if (emojiUseHistory.isEmpty() || recentEmoji.isEmpty()) { + addRecentEmoji(DEFAULT_RECENT[0]); + } + } + public static void sortEmoji() { recentEmoji.clear(); for (HashMap.Entry entry : emojiUseHistory.entrySet()) { @@ -822,15 +839,8 @@ public static void loadRecentEmoji() { } if (emojiUseHistory.isEmpty()) { if (!preferences.getBoolean("filled_default", false)) { - String[] newRecent = new String[]{ - "\uD83D\uDE02", "\uD83D\uDE18", "\u2764", "\uD83D\uDE0D", "\uD83D\uDE0A", "\uD83D\uDE01", - "\uD83D\uDC4D", "\u263A", "\uD83D\uDE14", "\uD83D\uDE04", "\uD83D\uDE2D", "\uD83D\uDC8B", - "\uD83D\uDE12", "\uD83D\uDE33", "\uD83D\uDE1C", "\uD83D\uDE48", "\uD83D\uDE09", "\uD83D\uDE03", - "\uD83D\uDE22", "\uD83D\uDE1D", "\uD83D\uDE31", "\uD83D\uDE21", "\uD83D\uDE0F", "\uD83D\uDE1E", - "\uD83D\uDE05", "\uD83D\uDE1A", "\uD83D\uDE4A", "\uD83D\uDE0C", "\uD83D\uDE00", "\uD83D\uDE0B", - "\uD83D\uDE06", "\uD83D\uDC4C", "\uD83D\uDE10", "\uD83D\uDE15"}; - for (int i = 0; i < newRecent.length; i++) { - emojiUseHistory.put(newRecent[i], newRecent.length - i); + for (int i = 0; i < DEFAULT_RECENT.length; i++) { + emojiUseHistory.put(DEFAULT_RECENT[i], DEFAULT_RECENT.length - i); } preferences.edit().putBoolean("filled_default", true).commit(); saveRecentEmoji(); diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/EmojiData.java b/TMessagesProj/src/main/java/org/telegram/messenger/EmojiData.java index 100c1b648b..ec36ede986 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/EmojiData.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/EmojiData.java @@ -63,7 +63,7 @@ public class EmojiData { }; public static final String[] emojiColored = { - "🫶", "🤲", "👐", "🙌", "👏", "👍", "👎", "👊", "✊", "🤛", "🤜", "🤞", "✌", "🫰", "🤟", "🤘", "👌", "🤌", "🤏", "🫳", "🫴", "👈", "👉", "👆", "👇", "☝", "✋", "🤚", "🖐", "🖖", "👋", "🤙", "🫲", "🫱", "💪", "🖕", "✍", "🙏", "🫵", "🦶", "🦵", "👂", "🦻", "👃", "👶", "👧", "🧒", "👦", "👩", "🧑", "👨", "👩‍🦱", "🧑‍🦱", "👨‍🦱", "👩‍🦰", "🧑‍🦰", "👨‍🦰", "👱‍♀", "👱", "👱‍♂", "👩‍🦳", "🧑‍🦳", "👨‍🦳", "👩‍🦲", "🧑‍🦲", "👨‍🦲", "🧔‍♀", "🧔", "🧔‍♂", "👵", "🧓", "👴", "👲", "👳‍♀", "👳", "👳‍♂", "🧕", "👮‍♀", "👮", "👮‍♂", "👷‍♀", "👷", "👷‍♂", "💂‍♀", "💂", "💂‍♂", "🕵‍♀", "🕵", "🕵‍♂", "👩‍⚕", "🧑‍⚕", "👨‍⚕", "👩‍🌾", "🧑‍🌾", "👨‍🌾", "👩‍🍳", "🧑‍🍳", "👨‍🍳", "👩‍🎓", "🧑‍🎓", "👨‍🎓", "👩‍🎤", "🧑‍🎤", "👨‍🎤", "👩‍🏫", "🧑‍🏫", "👨‍🏫", "👩‍🏭", "🧑‍🏭", "👨‍🏭", "👩‍💻", "🧑‍💻", "👨‍💻", "👩‍💼", "🧑‍💼", "👨‍💼", "👩‍🔧", "🧑‍🔧", "👨‍🔧", "👩‍🔬", "🧑‍🔬", "👨‍🔬", "👩‍🎨", "🧑‍🎨", "👨‍🎨", "👩‍🚒", "🧑‍🚒", "👨‍🚒", "👩‍✈", "🧑‍✈", "👨‍✈", "👩‍🚀", "🧑‍🚀", "👨‍🚀", "👩‍⚖", "🧑‍⚖", "👨‍⚖", "👰‍♀", "👰", "👰‍♂", "🤵‍♀", "🤵", "🤵‍♂", "👸", "🤴", "🥷", "🦸‍♀", "🦸", "🦸‍♂", "🦹‍♀", "🦹", "🦹‍♂", "🤶", "🧑‍🎄", "🎅", "🧙‍♀", "🧙", "🧙‍♂", "🧝‍♀", "🧝", "🧝‍♂", "🧛‍♀", "🧛", "🧛‍♂", "🧜‍♀", "🧜", "🧜‍♂", "🧚‍♀", "🧚", "🧚‍♂", "👼", "🤰", "🫄", "🫃", "🤱", "👩‍🍼", "🧑‍🍼", "👨‍🍼", "🙇‍♀", "🙇", "🙇‍♂", "💁‍♀", "💁", "💁‍♂", "🙅‍♀", "🙅", "🙅‍♂", "🙆‍♀", "🙆", "🙆‍♂", "🙋‍♀", "🙋", "🙋‍♂", "🧏‍♀", "🧏", "🧏‍♂", "🤦‍♀", "🤦", "🤦‍♂", "🤷‍♀", "🤷", "🤷‍♂", "🙎‍♀", "🙎", "🙎‍♂", "🙍‍♀", "🙍", "🙍‍♂", "💇‍♀", "💇", "💇‍♂", "💆‍♀", "💆", "💆‍♂", "🧖‍♀", "🧖", "🧖‍♂", "💅", "🤳", "💃", "🕺", "🕴", "👩‍🦽", "🧑‍🦽", "👨‍🦽", "👩‍🦼", "🧑‍🦼", "👨‍🦼", "🚶‍♀", "🚶", "🚶‍♂", "👩‍🦯", "🧑‍🦯", "👨‍🦯", "🧎‍♀", "🧎", "🧎‍♂", "🏃‍♀", "🏃", "🏃‍♂", "🧍‍♀", "🧍", "🧍‍♂", "🏋‍♀", "🏋", "🏋‍♂", "🤸‍♀", "🤸", "🤸‍♂", "⛹‍♀", "⛹", "⛹‍♂", "🤾‍♀", "🤾", "🤾‍♂", "🏌‍♀", "🏌", "🏌‍♂", "🏇", "🧘‍♀", "🧘", "🧘‍♂", "🏄‍♀", "🏄", "🏄‍♂", "🏊‍♀", "🏊", "🏊‍♂", "🤽‍♀", "🤽", "🤽‍♂", "🚣‍♀", "🚣", "🚣‍♂", "🧗‍♀", "🧗", "🧗‍♂", "🚵‍♀", "🚵", "🚵‍♂", "🚴‍♀", "🚴", "🚴‍♂", "🤹‍♀", "🤹", "🤹‍♂", "🛀" + "🫶", "🤲", "👐", "🙌", "👏", "👍", "👎", "👊", "✊", "🤛", "🤜", "🫷", "🫸", "🤞", "✌", "🫰", "🤟", "🤘", "👌", "🤌", "🤏", "🫳", "🫴", "👈", "👉", "👆", "👇", "☝", "✋", "🤚", "🖐", "🖖", "👋", "🤙", "🫲", "🫱", "💪", "🖕", "✍", "🙏", "🫵", "🦶", "🦵", "👂", "🦻", "👃", "👶", "👧", "🧒", "👦", "👩", "🧑", "👨", "👩‍🦱", "🧑‍🦱", "👨‍🦱", "👩‍🦰", "🧑‍🦰", "👨‍🦰", "👱‍♀", "👱", "👱‍♂", "👩‍🦳", "🧑‍🦳", "👨‍🦳", "👩‍🦲", "🧑‍🦲", "👨‍🦲", "🧔‍♀", "🧔", "🧔‍♂", "👵", "🧓", "👴", "👲", "👳‍♀", "👳", "👳‍♂", "🧕", "👮‍♀", "👮", "👮‍♂", "👷‍♀", "👷", "👷‍♂", "💂‍♀", "💂", "💂‍♂", "🕵‍♀", "🕵", "🕵‍♂", "👩‍⚕", "🧑‍⚕", "👨‍⚕", "👩‍🌾", "🧑‍🌾", "👨‍🌾", "👩‍🍳", "🧑‍🍳", "👨‍🍳", "👩‍🎓", "🧑‍🎓", "👨‍🎓", "👩‍🎤", "🧑‍🎤", "👨‍🎤", "👩‍🏫", "🧑‍🏫", "👨‍🏫", "👩‍🏭", "🧑‍🏭", "👨‍🏭", "👩‍💻", "🧑‍💻", "👨‍💻", "👩‍💼", "🧑‍💼", "👨‍💼", "👩‍🔧", "🧑‍🔧", "👨‍🔧", "👩‍🔬", "🧑‍🔬", "👨‍🔬", "👩‍🎨", "🧑‍🎨", "👨‍🎨", "👩‍🚒", "🧑‍🚒", "👨‍🚒", "👩‍✈", "🧑‍✈", "👨‍✈", "👩‍🚀", "🧑‍🚀", "👨‍🚀", "👩‍⚖", "🧑‍⚖", "👨‍⚖", "👰‍♀", "👰", "👰‍♂", "🤵‍♀", "🤵", "🤵‍♂", "👸", "🤴", "🥷", "🦸‍♀", "🦸", "🦸‍♂", "🦹‍♀", "🦹", "🦹‍♂", "🤶", "🧑‍🎄", "🎅", "🧙‍♀", "🧙", "🧙‍♂", "🧝‍♀", "🧝", "🧝‍♂", "🧛‍♀", "🧛", "🧛‍♂", "🧜‍♀", "🧜", "🧜‍♂", "🧚‍♀", "🧚", "🧚‍♂", "👼", "🤰", "🫄", "🫃", "🤱", "👩‍🍼", "🧑‍🍼", "👨‍🍼", "🙇‍♀", "🙇", "🙇‍♂", "💁‍♀", "💁", "💁‍♂", "🙅‍♀", "🙅", "🙅‍♂", "🙆‍♀", "🙆", "🙆‍♂", "🙋‍♀", "🙋", "🙋‍♂", "🧏‍♀", "🧏", "🧏‍♂", "🤦‍♀", "🤦", "🤦‍♂", "🤷‍♀", "🤷", "🤷‍♂", "🙎‍♀", "🙎", "🙎‍♂", "🙍‍♀", "🙍", "🙍‍♂", "💇‍♀", "💇", "💇‍♂", "💆‍♀", "💆", "💆‍♂", "🧖‍♀", "🧖", "🧖‍♂", "💅", "🤳", "💃", "🕺", "🕴", "👩‍🦽", "🧑‍🦽", "👨‍🦽", "👩‍🦼", "🧑‍🦼", "👨‍🦼", "🚶‍♀", "🚶", "🚶‍♂", "👩‍🦯", "🧑‍🦯", "👨‍🦯", "🧎‍♀", "🧎", "🧎‍♂", "🏃‍♀", "🏃", "🏃‍♂", "🧍‍♀", "🧍", "🧍‍♂", "🏋‍♀", "🏋", "🏋‍♂", "🤸‍♀", "🤸", "🤸‍♂", "⛹‍♀", "⛹", "⛹‍♂", "🤾‍♀", "🤾", "🤾‍♂", "🏌‍♀", "🏌", "🏌‍♂", "🏇", "🧘‍♀", "🧘", "🧘‍♂", "🏄‍♀", "🏄", "🏄‍♂", "🏊‍♀", "🏊", "🏊‍♂", "🤽‍♀", "🤽", "🤽‍♂", "🚣‍♀", "🚣", "🚣‍♂", "🧗‍♀", "🧗", "🧗‍♂", "🚵‍♀", "🚵", "🚵‍♂", "🚴‍♀", "🚴", "🚴‍♂", "🤹‍♀", "🤹", "🤹‍♂", "🛀" }; public static final String[] emojiBigColored = { @@ -72,7 +72,7 @@ public class EmojiData { public static final String[][] dataColored = { new String[]{ - "😀", "😃", "😄", "😁", "😆", "🥹", "😅", "😂", "🤣", "🥲", "☺", "😊", "😇", "🙂", "🙃", "😉", "😌", "😍", "🥰", "😘", "😗", "😙", "😚", "😋", "😛", "😝", "😜", "🤪", "🤨", "🧐", "🤓", "😎", "🥸", "🤩", "🥳", "😏", "😒", "😞", "😔", "😟", "😕", "🙁", "☹", "😣", "😖", "😫", "😩", "🥺", "😢", "😭", "😤", "😠", "😡", "🤬", "🤯", "😳", "🥵", "🥶", "😶‍🌫", "😱", "😨", "😰", "😥", "😓", "🤗", "🤔", "🫣", "🤭", "🫢", "🫡", "🤫", "🫠", "🤥", "😶", "🫥", "😐", "🫤", "😑", "😬", "🙄", "😯", "😦", "😧", "😮", "😲", "🥱", "😴", "🤤", "😪", "😮‍💨", "😵", "😵‍💫", "🤐", "🥴", "🤢", "🤮", "🤧", "😷", "🤒", "🤕", "🤑", "🤠", "😈", "👿", "👹", "👺", "🤡", "💩", "👻", "💀", "☠", "👽", "👾", "🤖", "🎃", "😺", "😸", "😹", "😻", "😼", "😽", "🙀", "😿", "😾", + "😀", "😃", "😄", "😁", "😆", "🥹", "😅", "😂", "🤣", "🥲", "☺", "😊", "😇", "🙂", "🙃", "😉", "😌", "😍", "🥰", "😘", "😗", "😙", "😚", "😋", "😛", "😝", "😜", "🤪", "🤨", "🧐", "🤓", "😎", "🥸", "🤩", "🥳", "😏", "😒", "😞", "😔", "😟", "😕", "🙁", "☹", "😣", "😖", "😫", "😩", "🥺", "😢", "😭", "😤", "😠", "😡", "🤬", "🤯", "😳", "🥵", "🥶", "😶‍🌫", "😱", "😨", "😰", "😥", "😓", "🤗", "🤔", "🫣", "🤭", "🫢", "🫡", "🤫", "🫠", "🤥", "😶", "🫥", "😐", "🫤", "😑", "🫨", "😬", "🙄", "😯", "😦", "😧", "😮", "😲", "🥱", "😴", "🤤", "😪", "😮‍💨", "😵", "😵‍💫", "🤐", "🥴", "🤢", "🤮", "🤧", "😷", "🤒", "🤕", "🤑", "🤠", "😈", "👿", "👹", "👺", "🤡", "💩", "👻", "💀", "☠", "👽", "👾", "🤖", "🎃", "😺", "😸", "😹", "😻", "😼", "😽", "🙀", "😿", "😾", "🫶", "🤲", "👐", @@ -85,6 +85,8 @@ public class EmojiData { "✊", "🤛", "🤜", + "🫷", + "🫸", "🤞", "✌", "🫰", @@ -387,13 +389,13 @@ public class EmojiData { "🤹‍♀", "🤹", "🤹‍♂", - "🎭", "🩰", "🎨", "🎬", "🎤", "🎧", "🎼", "🎹", "🥁", "🪘", "🎷", "🎺", "🪗", "🎸", "🪕", "🎻", "🎲", "♟", "🎯", "🎳", "🎮", "🎰", "🧩" + "🎭", "🩰", "🎨", "🎬", "🎤", "🎧", "🎼", "🎹", "🪇", "🥁", "🪘", "🎷", "🎺", "🪗", "🎸", "🪕", "🎻", "🪈", "🎲", "♟", "🎯", "🎳", "🎮", "🎰", "🧩" }, null, new String[]{ "⌚", "📱", "📲", "💻", "⌨", "🖥", "🖨", "🖱", "🖲", "🕹", "🗜", "💽", "💾", "💿", "📀", "📼", "📷", "📸", "📹", "🎥", "📽", "🎞", "📞", "☎", "📟", "📠", "📺", "📻", "🎙", "🎚", "🎛", "🧭", "⏱", "⏲", "⏰", "🕰", "⌛", "⏳", "📡", "🔋", "🪫", "🔌", "💡", "🔦", "🕯", "🪔", "🧯", "🛢", "💸", "💵", "💴", "💶", "💷", "🪙", "💰", "💳", "🪪", "💎", "⚖", "🪜", "🧰", "🪛", "🔧", "🔨", "⚒", "🛠", "⛏", "🪚", "🔩", "⚙", "🪤", "🧱", "⛓", "🧲", "🔫", "💣", "🧨", "🪓", "🔪", "🗡", "⚔", "🛡", "🚬", "⚰", "🪦", "⚱", "🏺", "🔮", "📿", "🧿", "🪬", "💈", "⚗", "🔭", "🔬", "🕳", "🩻", "🩹", "🩺", "💊", "💉", "🩸", "🧬", "🦠", "🧫", "🧪", "🌡", "🧹", "🪠", "🧺", "🧻", "🚽", "🚰", "🚿", "🛁", "🛀", - "🧼", "🪥", "🪒", "🧽", "🪣", "🧴", "🛎", "🔑", "🗝", "🚪", "🪑", "🛋", "🛏", "🛌", "🧸", "🪆", "🖼", "🪞", "🪟", "🛍", "🛒", "🎁", "🎈", "🎏", "🎀", "🪄", "🪅", "🎊", "🎉", "🎎", "🏮", "🎐", "🪩", "🧧", "✉", "📩", "📨", "📧", "💌", "📥", "📤", "📦", "🏷", "🪧", "📪", "📫", "📬", "📭", "📮", "📯", "📜", "📃", "📄", "📑", "🧾", "📊", "📈", "📉", "🗒", "🗓", "📆", "📅", "🗑", "📇", "🗃", "🗳", "🗄", "📋", "📁", "📂", "🗂", "🗞", "📰", "📓", "📔", "📒", "📕", "📗", "📘", "📙", "📚", "📖", "🔖", "🧷", "🔗", "📎", "🖇", "📐", "📏", "🧮", "📌", "📍", "✂", "🖊", "🖋", "✒", "🖌", "🖍", "📝", "✏", "🔍", "🔎", "🔏", "🔐", "🔒", "🔓" + "🧼", "🪥", "🪒", "🪮", "🧽", "🪣", "🧴", "🛎", "🔑", "🗝", "🚪", "🪑", "🛋", "🛏", "🛌", "🧸", "🪆", "🖼", "🪞", "🪟", "🛍", "🛒", "🎁", "🎈", "🎏", "🎀", "🪄", "🪅", "🎊", "🎉", "🎎", "🪭", "🏮", "🎐", "🪩", "🧧", "✉", "📩", "📨", "📧", "💌", "📥", "📤", "📦", "🏷", "🪧", "📪", "📫", "📬", "📭", "📮", "📯", "📜", "📃", "📄", "📑", "🧾", "📊", "📈", "📉", "🗒", "🗓", "📆", "📅", "🗑", "📇", "🗃", "🗳", "🗄", "📋", "📁", "📂", "🗂", "🗞", "📰", "📓", "📔", "📒", "📕", "📗", "📘", "📙", "📚", "📖", "🔖", "🧷", "🔗", "📎", "🖇", "📐", "📏", "🧮", "📌", "📍", "✂", "🖊", "🖋", "✒", "🖌", "🖍", "📝", "✏", "🔍", "🔎", "🔏", "🔐", "🔒", "🔓" }, null, null @@ -503,25 +505,25 @@ public class EmojiData { public static final String[][] data = { new String[]{ - "😀", "😃", "😄", "😁", "😆", "🥹", "😅", "😂", "🤣", "🥲", "☺", "😊", "😇", "🙂", "🙃", "😉", "😌", "😍", "🥰", "😘", "😗", "😙", "😚", "😋", "😛", "😝", "😜", "🤪", "🤨", "🧐", "🤓", "😎", "🥸", "🤩", "🥳", "😏", "😒", "😞", "😔", "😟", "😕", "🙁", "☹", "😣", "😖", "😫", "😩", "🥺", "😢", "😭", "😤", "😠", "😡", "🤬", "🤯", "😳", "🥵", "🥶", "😶‍🌫", "😱", "😨", "😰", "😥", "😓", "🤗", "🤔", "🫣", "🤭", "🫢", "🫡", "🤫", "🫠", "🤥", "😶", "🫥", "😐", "🫤", "😑", "😬", "🙄", "😯", "😦", "😧", "😮", "😲", "🥱", "😴", "🤤", "😪", "😮‍💨", "😵", "😵‍💫", "🤐", "🥴", "🤢", "🤮", "🤧", "😷", "🤒", "🤕", "🤑", "🤠", "😈", "👿", "👹", "👺", "🤡", "💩", "👻", "💀", "☠", "👽", "👾", "🤖", "🎃", "😺", "😸", "😹", "😻", "😼", "😽", "🙀", "😿", "😾", "🫶", "🫶🏻", "🫶🏼", "🫶🏽", "🫶🏾", "🫶🏿", "🤲", "🤲🏻", "🤲🏼", "🤲🏽", "🤲🏾", "🤲🏿", "👐", "👐🏻", "👐🏼", "👐🏽", "👐🏾", "👐🏿", "🙌", "🙌🏻", "🙌🏼", "🙌🏽", "🙌🏾", "🙌🏿", "👏", "👏🏻", "👏🏼", "👏🏽", "👏🏾", "👏🏿", "🤝", "👍", "👍🏻", "👍🏼", "👍🏽", "👍🏾", "👍🏿", "👎", "👎🏻", "👎🏼", "👎🏽", "👎🏾", "👎🏿", "👊", "👊🏻", "👊🏼", "👊🏽", "👊🏾", "👊🏿", "✊", "✊🏻", "✊🏼", "✊🏽", "✊🏾", "✊🏿", "🤛", "🤛🏻", "🤛🏼", "🤛🏽", "🤛🏾", "🤛🏿", "🤜", "🤜🏻", "🤜🏼", "🤜🏽", "🤜🏾", "🤜🏿", "🤞", "🤞🏻", "🤞🏼", "🤞🏽", "🤞🏾", "🤞🏿", "✌", "✌🏻", "✌🏼", "✌🏽", "✌🏾", "✌🏿", "🫰", "🫰🏻", "🫰🏼", "🫰🏽", "🫰🏾", "🫰🏿", "🤟", "🤟🏻", "🤟🏼", "🤟🏽", "🤟🏾", "🤟🏿", "🤘", "🤘🏻", "🤘🏼", "🤘🏽", "🤘🏾", "🤘🏿", "👌", "👌🏻", "👌🏼", "👌🏽", "👌🏾", "👌🏿", "🤌", "🤌🏻", "🤌🏼", "🤌🏽", "🤌🏾", "🤌🏿", "🤏", "🤏🏻", "🤏🏼", "🤏🏽", "🤏🏾", "🤏🏿", "🫳", "🫳🏻", "🫳🏼", "🫳🏽", "🫳🏾", "🫳🏿", "🫴", "🫴🏻", "🫴🏼", "🫴🏽", "🫴🏾", "🫴🏿", "👈", "👈🏻", "👈🏼", "👈🏽", "👈🏾", "👈🏿", "👉", "👉🏻", "👉🏼", "👉🏽", "👉🏾", "👉🏿", "👆", "👆🏻", "👆🏼", "👆🏽", "👆🏾", "👆🏿", "👇", "👇🏻", "👇🏼", "👇🏽", "👇🏾", "👇🏿", "☝", "☝🏻", "☝🏼", "☝🏽", "☝🏾", "☝🏿", "✋", "✋🏻", "✋🏼", "✋🏽", "✋🏾", "✋🏿", "🤚", "🤚🏻", "🤚🏼", "🤚🏽", "🤚🏾", "🤚🏿", "🖐", "🖐🏻", "🖐🏼", "🖐🏽", "🖐🏾", "🖐🏿", "🖖", "🖖🏻", "🖖🏼", "🖖🏽", "🖖🏾", "🖖🏿", "👋", "👋🏻", "👋🏼", "👋🏽", "👋🏾", "👋🏿", "🤙", "🤙🏻", "🤙🏼", "🤙🏽", "🤙🏾", "🤙🏿", "🫲", "🫲🏻", "🫲🏼", "🫲🏽", "🫲🏾", "🫲🏿", "🫱", "🫱🏻", "🫱🏼", "🫱🏽", "🫱🏾", "🫱🏿", "💪", "💪🏻", "💪🏼", "💪🏽", "💪🏾", "💪🏿", "🦾", "🖕", "🖕🏻", "🖕🏼", "🖕🏽", "🖕🏾", "🖕🏿", "✍", "✍🏻", "✍🏼", "✍🏽", "✍🏾", "✍🏿", "🙏", "🙏🏻", "🙏🏼", "🙏🏽", "🙏🏾", "🙏🏿", "🫵", "🫵🏻", "🫵🏼", "🫵🏽", "🫵🏾", "🫵🏿", "🦶", "🦶🏻", "🦶🏼", "🦶🏽", "🦶🏾", "🦶🏿", "🦵", "🦵🏻", "🦵🏼", "🦵🏽", "🦵🏾", "🦵🏿", "🦿", "💄", "💋", "👄", "🫦", "🦷", "👅", "👂", "👂🏻", "👂🏼", "👂🏽", "👂🏾", "👂🏿", "🦻", "🦻🏻", "🦻🏼", "🦻🏽", "🦻🏾", "🦻🏿", "👃", "👃🏻", "👃🏼", "👃🏽", "👃🏾", "👃🏿", "👣", "👁", "👀", "🫀", "🫁", "🧠", "🗣", "👤", "👥", "🫂", "👶", "👶🏻", "👶🏼", "👶🏽", "👶🏾", "👶🏿", "👧", "👧🏻", "👧🏼", "👧🏽", "👧🏾", "👧🏿", "🧒", "🧒🏻", "🧒🏼", "🧒🏽", "🧒🏾", "🧒🏿", "👦", "👦🏻", "👦🏼", "👦🏽", "👦🏾", "👦🏿", "👩", "👩🏻", "👩🏼", "👩🏽", "👩🏾", "👩🏿", "🧑", "🧑🏻", "🧑🏼", "🧑🏽", "🧑🏾", "🧑🏿", "👨", "👨🏻", "👨🏼", "👨🏽", "👨🏾", "👨🏿", "👩‍🦱", "👩🏻‍🦱", "👩🏼‍🦱", "👩🏽‍🦱", "👩🏾‍🦱", "👩🏿‍🦱", "🧑‍🦱", "🧑🏻‍🦱", "🧑🏼‍🦱", "🧑🏽‍🦱", "🧑🏾‍🦱", "🧑🏿‍🦱", "👨‍🦱", "👨🏻‍🦱", "👨🏼‍🦱", "👨🏽‍🦱", "👨🏾‍🦱", "👨🏿‍🦱", "👩‍🦰", "👩🏻‍🦰", "👩🏼‍🦰", "👩🏽‍🦰", "👩🏾‍🦰", "👩🏿‍🦰", "🧑‍🦰", "🧑🏻‍🦰", "🧑🏼‍🦰", "🧑🏽‍🦰", "🧑🏾‍🦰", "🧑🏿‍🦰", "👨‍🦰", "👨🏻‍🦰", "👨🏼‍🦰", "👨🏽‍🦰", "👨🏾‍🦰", "👨🏿‍🦰", "👱‍♀", "👱🏻‍♀", "👱🏼‍♀", "👱🏽‍♀", "👱🏾‍♀", "👱🏿‍♀", "👱", "👱🏻", "👱🏼", "👱🏽", "👱🏾", "👱🏿", "👱‍♂", "👱🏻‍♂", "👱🏼‍♂", "👱🏽‍♂", "👱🏾‍♂", "👱🏿‍♂", "👩‍🦳", "👩🏻‍🦳", "👩🏼‍🦳", "👩🏽‍🦳", "👩🏾‍🦳", "👩🏿‍🦳", "🧑‍🦳", "🧑🏻‍🦳", "🧑🏼‍🦳", "🧑🏽‍🦳", "🧑🏾‍🦳", "🧑🏿‍🦳", "👨‍🦳", "👨🏻‍🦳", "👨🏼‍🦳", "👨🏽‍🦳", "👨🏾‍🦳", "👨🏿‍🦳", "👩‍🦲", "👩🏻‍🦲", "👩🏼‍🦲", "👩🏽‍🦲", "👩🏾‍🦲", "👩🏿‍🦲", "🧑‍🦲", "🧑🏻‍🦲", "🧑🏼‍🦲", "🧑🏽‍🦲", "🧑🏾‍🦲", "🧑🏿‍🦲", "👨‍🦲", "👨🏻‍🦲", "👨🏼‍🦲", "👨🏽‍🦲", "👨🏾‍🦲", "👨🏿‍🦲", "🧔‍♀", "🧔🏻‍♀", "🧔🏼‍♀", "🧔🏽‍♀", "🧔🏾‍♀", "🧔🏿‍♀", "🧔", "🧔🏻", "🧔🏼", "🧔🏽", "🧔🏾", "🧔🏿", "🧔‍♂", "🧔🏻‍♂", "🧔🏼‍♂", "🧔🏽‍♂", "🧔🏾‍♂", "🧔🏿‍♂", "👵", "👵🏻", "👵🏼", "👵🏽", "👵🏾", "👵🏿", "🧓", "🧓🏻", "🧓🏼", "🧓🏽", "🧓🏾", "🧓🏿", "👴", "👴🏻", "👴🏼", "👴🏽", "👴🏾", "👴🏿", "👲", "👲🏻", "👲🏼", "👲🏽", "👲🏾", "👲🏿", "👳‍♀", "👳🏻‍♀", "👳🏼‍♀", "👳🏽‍♀", "👳🏾‍♀", "👳🏿‍♀", "👳", "👳🏻", "👳🏼", "👳🏽", "👳🏾", "👳🏿", "👳‍♂", "👳🏻‍♂", "👳🏼‍♂", "👳🏽‍♂", "👳🏾‍♂", "👳🏿‍♂", "🧕", "🧕🏻", "🧕🏼", "🧕🏽", "🧕🏾", "🧕🏿", "👮‍♀", "👮🏻‍♀", "👮🏼‍♀", "👮🏽‍♀", "👮🏾‍♀", "👮🏿‍♀", "👮", "👮🏻", "👮🏼", "👮🏽", "👮🏾", "👮🏿", "👮‍♂", "👮🏻‍♂", "👮🏼‍♂", "👮🏽‍♂", "👮🏾‍♂", "👮🏿‍♂", "👷‍♀", "👷🏻‍♀", "👷🏼‍♀", "👷🏽‍♀", "👷🏾‍♀", "👷🏿‍♀", "👷", "👷🏻", "👷🏼", "👷🏽", "👷🏾", "👷🏿", "👷‍♂", "👷🏻‍♂", "👷🏼‍♂", "👷🏽‍♂", "👷🏾‍♂", "👷🏿‍♂", "💂‍♀", "💂🏻‍♀", "💂🏼‍♀", "💂🏽‍♀", "💂🏾‍♀", "💂🏿‍♀", "💂", "💂🏻", "💂🏼", "💂🏽", "💂🏾", "💂🏿", "💂‍♂", "💂🏻‍♂", "💂🏼‍♂", "💂🏽‍♂", "💂🏾‍♂", "💂🏿‍♂", "🕵‍♀", "🕵🏻‍♀", "🕵🏼‍♀", "🕵🏽‍♀", "🕵🏾‍♀", "🕵🏿‍♀", "🕵", "🕵🏻", "🕵🏼", "🕵🏽", "🕵🏾", "🕵🏿", "🕵‍♂", "🕵🏻‍♂", "🕵🏼‍♂", "🕵🏽‍♂", "🕵🏾‍♂", "🕵🏿‍♂", "👩‍⚕", "👩🏻‍⚕", "👩🏼‍⚕", "👩🏽‍⚕", "👩🏾‍⚕", "👩🏿‍⚕", "🧑‍⚕", "🧑🏻‍⚕", "🧑🏼‍⚕", "🧑🏽‍⚕", "🧑🏾‍⚕", "🧑🏿‍⚕", "👨‍⚕", "👨🏻‍⚕", "👨🏼‍⚕", "👨🏽‍⚕", "👨🏾‍⚕", "👨🏿‍⚕", "👩‍🌾", "👩🏻‍🌾", "👩🏼‍🌾", "👩🏽‍🌾", "👩🏾‍🌾", "👩🏿‍🌾", "🧑‍🌾", "🧑🏻‍🌾", "🧑🏼‍🌾", "🧑🏽‍🌾", "🧑🏾‍🌾", "🧑🏿‍🌾", "👨‍🌾", "👨🏻‍🌾", "👨🏼‍🌾", "👨🏽‍🌾", "👨🏾‍🌾", "👨🏿‍🌾", "👩‍🍳", "👩🏻‍🍳", "👩🏼‍🍳", "👩🏽‍🍳", "👩🏾‍🍳", "👩🏿‍🍳", "🧑‍🍳", "🧑🏻‍🍳", "🧑🏼‍🍳", "🧑🏽‍🍳", "🧑🏾‍🍳", "🧑🏿‍🍳", "👨‍🍳", "👨🏻‍🍳", "👨🏼‍🍳", "👨🏽‍🍳", "👨🏾‍🍳", "👨🏿‍🍳", "👩‍🎓", "👩🏻‍🎓", "👩🏼‍🎓", "👩🏽‍🎓", "👩🏾‍🎓", "👩🏿‍🎓", "🧑‍🎓", "🧑🏻‍🎓", "🧑🏼‍🎓", "🧑🏽‍🎓", "🧑🏾‍🎓", "🧑🏿‍🎓", "👨‍🎓", "👨🏻‍🎓", "👨🏼‍🎓", "👨🏽‍🎓", "👨🏾‍🎓", "👨🏿‍🎓", "👩‍🎤", "👩🏻‍🎤", "👩🏼‍🎤", "👩🏽‍🎤", "👩🏾‍🎤", "👩🏿‍🎤", "🧑‍🎤", "🧑🏻‍🎤", "🧑🏼‍🎤", "🧑🏽‍🎤", "🧑🏾‍🎤", "🧑🏿‍🎤", "👨‍🎤", "👨🏻‍🎤", "👨🏼‍🎤", "👨🏽‍🎤", "👨🏾‍🎤", "👨🏿‍🎤", "👩‍🏫", "👩🏻‍🏫", "👩🏼‍🏫", "👩🏽‍🏫", "👩🏾‍🏫", "👩🏿‍🏫", "🧑‍🏫", "🧑🏻‍🏫", "🧑🏼‍🏫", "🧑🏽‍🏫", "🧑🏾‍🏫", "🧑🏿‍🏫", "👨‍🏫", "👨🏻‍🏫", "👨🏼‍🏫", "👨🏽‍🏫", "👨🏾‍🏫", "👨🏿‍🏫", "👩‍🏭", "👩🏻‍🏭", "👩🏼‍🏭", "👩🏽‍🏭", "👩🏾‍🏭", "👩🏿‍🏭", "🧑‍🏭", "🧑🏻‍🏭", "🧑🏼‍🏭", "🧑🏽‍🏭", "🧑🏾‍🏭", "🧑🏿‍🏭", "👨‍🏭", "👨🏻‍🏭", "👨🏼‍🏭", "👨🏽‍🏭", "👨🏾‍🏭", "👨🏿‍🏭", "👩‍💻", "👩🏻‍💻", "👩🏼‍💻", "👩🏽‍💻", "👩🏾‍💻", "👩🏿‍💻", "🧑‍💻", "🧑🏻‍💻", "🧑🏼‍💻", "🧑🏽‍💻", "🧑🏾‍💻", "🧑🏿‍💻", "👨‍💻", "👨🏻‍💻", "👨🏼‍💻", "👨🏽‍💻", "👨🏾‍💻", "👨🏿‍💻", "👩‍💼", "👩🏻‍💼", "👩🏼‍💼", "👩🏽‍💼", "👩🏾‍💼", "👩🏿‍💼", "🧑‍💼", "🧑🏻‍💼", "🧑🏼‍💼", "🧑🏽‍💼", "🧑🏾‍💼", "🧑🏿‍💼", "👨‍💼", "👨🏻‍💼", "👨🏼‍💼", "👨🏽‍💼", "👨🏾‍💼", "👨🏿‍💼", "👩‍🔧", "👩🏻‍🔧", "👩🏼‍🔧", "👩🏽‍🔧", "👩🏾‍🔧", "👩🏿‍🔧", "🧑‍🔧", "🧑🏻‍🔧", "🧑🏼‍🔧", "🧑🏽‍🔧", "🧑🏾‍🔧", "🧑🏿‍🔧", "👨‍🔧", "👨🏻‍🔧", "👨🏼‍🔧", "👨🏽‍🔧", "👨🏾‍🔧", "👨🏿‍🔧", "👩‍🔬", "👩🏻‍🔬", "👩🏼‍🔬", "👩🏽‍🔬", "👩🏾‍🔬", "👩🏿‍🔬", "🧑‍🔬", "🧑🏻‍🔬", "🧑🏼‍🔬", "🧑🏽‍🔬", "🧑🏾‍🔬", "🧑🏿‍🔬", "👨‍🔬", "👨🏻‍🔬", "👨🏼‍🔬", "👨🏽‍🔬", "👨🏾‍🔬", "👨🏿‍🔬", "👩‍🎨", "👩🏻‍🎨", "👩🏼‍🎨", "👩🏽‍🎨", "👩🏾‍🎨", "👩🏿‍🎨", "🧑‍🎨", "🧑🏻‍🎨", "🧑🏼‍🎨", "🧑🏽‍🎨", "🧑🏾‍🎨", "🧑🏿‍🎨", "👨‍🎨", "👨🏻‍🎨", "👨🏼‍🎨", "👨🏽‍🎨", "👨🏾‍🎨", "👨🏿‍🎨", "👩‍🚒", "👩🏻‍🚒", "👩🏼‍🚒", "👩🏽‍🚒", "👩🏾‍🚒", "👩🏿‍🚒", "🧑‍🚒", "🧑🏻‍🚒", "🧑🏼‍🚒", "🧑🏽‍🚒", "🧑🏾‍🚒", "🧑🏿‍🚒", "👨‍🚒", "👨🏻‍🚒", "👨🏼‍🚒", "👨🏽‍🚒", "👨🏾‍🚒", "👨🏿‍🚒", "👩‍✈", "👩🏻‍✈", "👩🏼‍✈", "👩🏽‍✈", "👩🏾‍✈", "👩🏿‍✈", "🧑‍✈", "🧑🏻‍✈", "🧑🏼‍✈", "🧑🏽‍✈", "🧑🏾‍✈", "🧑🏿‍✈", "👨‍✈", "👨🏻‍✈", "👨🏼‍✈", "👨🏽‍✈", "👨🏾‍✈", "👨🏿‍✈", "👩‍🚀", "👩🏻‍🚀", "👩🏼‍🚀", "👩🏽‍🚀", "👩🏾‍🚀", "👩🏿‍🚀", "🧑‍🚀", "🧑🏻‍🚀", "🧑🏼‍🚀", "🧑🏽‍🚀", "🧑🏾‍🚀", "🧑🏿‍🚀", "👨‍🚀", "👨🏻‍🚀", "👨🏼‍🚀", "👨🏽‍🚀", "👨🏾‍🚀", "👨🏿‍🚀", "👩‍⚖", "👩🏻‍⚖", "👩🏼‍⚖", "👩🏽‍⚖", "👩🏾‍⚖", "👩🏿‍⚖", "🧑‍⚖", "🧑🏻‍⚖", "🧑🏼‍⚖", "🧑🏽‍⚖", "🧑🏾‍⚖", "🧑🏿‍⚖", "👨‍⚖", "👨🏻‍⚖", "👨🏼‍⚖", "👨🏽‍⚖", "👨🏾‍⚖", "👨🏿‍⚖", "👰‍♀", "👰🏻‍♀", "👰🏼‍♀", "👰🏽‍♀", "👰🏾‍♀", "👰🏿‍♀", "👰", "👰🏻", "👰🏼", "👰🏽", "👰🏾", "👰🏿", "👰‍♂", "👰🏻‍♂", "👰🏼‍♂", "👰🏽‍♂", "👰🏾‍♂", "👰🏿‍♂", "🤵‍♀", "🤵🏻‍♀", "🤵🏼‍♀", "🤵🏽‍♀", "🤵🏾‍♀", "🤵🏿‍♀", "🤵", "🤵🏻", "🤵🏼", "🤵🏽", "🤵🏾", "🤵🏿", "🤵‍♂", "🤵🏻‍♂", "🤵🏼‍♂", "🤵🏽‍♂", "🤵🏾‍♂", "🤵🏿‍♂", "👸", "👸🏻", "👸🏼", "👸🏽", "👸🏾", "👸🏿", "🫅", "🫅🏻", "🫅🏼", "🫅🏽", "🫅🏾", "🫅🏿", "🤴", "🤴🏻", "🤴🏼", "🤴🏽", "🤴🏾", "🤴🏿", "🥷", "🥷🏻", "🥷🏼", "🥷🏽", "🥷🏾", "🥷🏿", "🦸‍♀", "🦸🏻‍♀", "🦸🏼‍♀", "🦸🏽‍♀", "🦸🏾‍♀", "🦸🏿‍♀", "🦸", "🦸🏻", "🦸🏼", "🦸🏽", "🦸🏾", "🦸🏿", "🦸‍♂", "🦸🏻‍♂", "🦸🏼‍♂", "🦸🏽‍♂", "🦸🏾‍♂", "🦸🏿‍♂", "🦹‍♀", "🦹🏻‍♀", "🦹🏼‍♀", "🦹🏽‍♀", "🦹🏾‍♀", "🦹🏿‍♀", "🦹", "🦹🏻", "🦹🏼", "🦹🏽", "🦹🏾", "🦹🏿", "🦹‍♂", "🦹🏻‍♂", "🦹🏼‍♂", "🦹🏽‍♂", "🦹🏾‍♂", "🦹🏿‍♂", "🤶", "🤶🏻", "🤶🏼", "🤶🏽", "🤶🏾", "🤶🏿", "🧑‍🎄", "🧑🏻‍🎄", "🧑🏼‍🎄", "🧑🏽‍🎄", "🧑🏾‍🎄", "🧑🏿‍🎄", "🎅", "🎅🏻", "🎅🏼", "🎅🏽", "🎅🏾", "🎅🏿", "🧙‍♀", "🧙🏻‍♀", "🧙🏼‍♀", "🧙🏽‍♀", "🧙🏾‍♀", "🧙🏿‍♀", "🧙", "🧙🏻", "🧙🏼", "🧙🏽", "🧙🏾", "🧙🏿", "🧙‍♂", "🧙🏻‍♂", "🧙🏼‍♂", "🧙🏽‍♂", "🧙🏾‍♂", "🧙🏿‍♂", "🧝‍♀", "🧝🏻‍♀", "🧝🏼‍♀", "🧝🏽‍♀", "🧝🏾‍♀", "🧝🏿‍♀", "🧝", "🧝🏻", "🧝🏼", "🧝🏽", "🧝🏾", "🧝🏿", "🧝‍♂", "🧝🏻‍♂", "🧝🏼‍♂", "🧝🏽‍♂", "🧝🏾‍♂", "🧝🏿‍♂", "🧌", "🧛‍♀", "🧛🏻‍♀", "🧛🏼‍♀", "🧛🏽‍♀", "🧛🏾‍♀", "🧛🏿‍♀", "🧛", "🧛🏻", "🧛🏼", "🧛🏽", "🧛🏾", "🧛🏿", "🧛‍♂", "🧛🏻‍♂", "🧛🏼‍♂", "🧛🏽‍♂", "🧛🏾‍♂", "🧛🏿‍♂", "🧟‍♀", "🧟", "🧟‍♂", "🧞‍♀", "🧞", "🧞‍♂", "🧜‍♀", "🧜🏻‍♀", "🧜🏼‍♀", "🧜🏽‍♀", "🧜🏾‍♀", "🧜🏿‍♀", "🧜", "🧜🏻", "🧜🏼", "🧜🏽", "🧜🏾", "🧜🏿", "🧜‍♂", "🧜🏻‍♂", "🧜🏼‍♂", "🧜🏽‍♂", "🧜🏾‍♂", "🧜🏿‍♂", "🧚‍♀", "🧚🏻‍♀", "🧚🏼‍♀", "🧚🏽‍♀", "🧚🏾‍♀", "🧚🏿‍♀", "🧚", "🧚🏻", "🧚🏼", "🧚🏽", "🧚🏾", "🧚🏿", "🧚‍♂", "🧚🏻‍♂", "🧚🏼‍♂", "🧚🏽‍♂", "🧚🏾‍♂", "🧚🏿‍♂", "👼", "👼🏻", "👼🏼", "👼🏽", "👼🏾", "👼🏿", "🤰", "🤰🏻", "🤰🏼", "🤰🏽", "🤰🏾", "🤰🏿", "🫄", "🫄🏻", "🫄🏼", "🫄🏽", "🫄🏾", "🫄🏿", "🫃", "🫃🏻", "🫃🏼", "🫃🏽", "🫃🏾", "🫃🏿", "🤱", "🤱🏻", "🤱🏼", "🤱🏽", "🤱🏾", "🤱🏿", "👩‍🍼", "👩🏻‍🍼", "👩🏼‍🍼", "👩🏽‍🍼", "👩🏾‍🍼", "👩🏿‍🍼", "🧑‍🍼", "🧑🏻‍🍼", "🧑🏼‍🍼", "🧑🏽‍🍼", "🧑🏾‍🍼", "🧑🏿‍🍼", "👨‍🍼", "👨🏻‍🍼", "👨🏼‍🍼", "👨🏽‍🍼", "👨🏾‍🍼", "👨🏿‍🍼", "🙇‍♀", "🙇🏻‍♀", "🙇🏼‍♀", "🙇🏽‍♀", "🙇🏾‍♀", "🙇🏿‍♀", "🙇", "🙇🏻", "🙇🏼", "🙇🏽", "🙇🏾", "🙇🏿", "🙇‍♂", "🙇🏻‍♂", "🙇🏼‍♂", "🙇🏽‍♂", "🙇🏾‍♂", "🙇🏿‍♂", "💁‍♀", "💁🏻‍♀", "💁🏼‍♀", "💁🏽‍♀", "💁🏾‍♀", "💁🏿‍♀", "💁", "💁🏻", "💁🏼", "💁🏽", "💁🏾", "💁🏿", "💁‍♂", "💁🏻‍♂", "💁🏼‍♂", "💁🏽‍♂", "💁🏾‍♂", "💁🏿‍♂", "🙅‍♀", "🙅🏻‍♀", "🙅🏼‍♀", "🙅🏽‍♀", "🙅🏾‍♀", "🙅🏿‍♀", "🙅", "🙅🏻", "🙅🏼", "🙅🏽", "🙅🏾", "🙅🏿", "🙅‍♂", "🙅🏻‍♂", "🙅🏼‍♂", "🙅🏽‍♂", "🙅🏾‍♂", "🙅🏿‍♂", "🙆‍♀", "🙆🏻‍♀", "🙆🏼‍♀", "🙆🏽‍♀", "🙆🏾‍♀", "🙆🏿‍♀", "🙆", "🙆🏻", "🙆🏼", "🙆🏽", "🙆🏾", "🙆🏿", "🙆‍♂", "🙆🏻‍♂", "🙆🏼‍♂", "🙆🏽‍♂", "🙆🏾‍♂", "🙆🏿‍♂", "🙋‍♀", "🙋🏻‍♀", "🙋🏼‍♀", "🙋🏽‍♀", "🙋🏾‍♀", "🙋🏿‍♀", "🙋", "🙋🏻", "🙋🏼", "🙋🏽", "🙋🏾", "🙋🏿", "🙋‍♂", "🙋🏻‍♂", "🙋🏼‍♂", "🙋🏽‍♂", "🙋🏾‍♂", "🙋🏿‍♂", "🧏‍♀", "🧏🏻‍♀", "🧏🏼‍♀", "🧏🏽‍♀", "🧏🏾‍♀", "🧏🏿‍♀", "🧏", "🧏🏻", "🧏🏼", "🧏🏽", "🧏🏾", "🧏🏿", "🧏‍♂", "🧏🏻‍♂", "🧏🏼‍♂", "🧏🏽‍♂", "🧏🏾‍♂", "🧏🏿‍♂", "🤦‍♀", "🤦🏻‍♀", "🤦🏼‍♀", "🤦🏽‍♀", "🤦🏾‍♀", "🤦🏿‍♀", "🤦", "🤦🏻", "🤦🏼", "🤦🏽", "🤦🏾", "🤦🏿", "🤦‍♂", "🤦🏻‍♂", "🤦🏼‍♂", "🤦🏽‍♂", "🤦🏾‍♂", "🤦🏿‍♂", "🤷‍♀", "🤷🏻‍♀", "🤷🏼‍♀", "🤷🏽‍♀", "🤷🏾‍♀", "🤷🏿‍♀", "🤷", "🤷🏻", "🤷🏼", "🤷🏽", "🤷🏾", "🤷🏿", "🤷‍♂", "🤷🏻‍♂", "🤷🏼‍♂", "🤷🏽‍♂", "🤷🏾‍♂", "🤷🏿‍♂", "🙎‍♀", "🙎🏻‍♀", "🙎🏼‍♀", "🙎🏽‍♀", "🙎🏾‍♀", "🙎🏿‍♀", "🙎", "🙎🏻", "🙎🏼", "🙎🏽", "🙎🏾", "🙎🏿", "🙎‍♂", "🙎🏻‍♂", "🙎🏼‍♂", "🙎🏽‍♂", "🙎🏾‍♂", "🙎🏿‍♂", "🙍‍♀", "🙍🏻‍♀", "🙍🏼‍♀", "🙍🏽‍♀", "🙍🏾‍♀", "🙍🏿‍♀", "🙍", "🙍🏻", "🙍🏼", "🙍🏽", "🙍🏾", "🙍🏿", "🙍‍♂", "🙍🏻‍♂", "🙍🏼‍♂", "🙍🏽‍♂", "🙍🏾‍♂", "🙍🏿‍♂", "💇‍♀", "💇🏻‍♀", "💇🏼‍♀", "💇🏽‍♀", "💇🏾‍♀", "💇🏿‍♀", "💇", "💇🏻", "💇🏼", "💇🏽", "💇🏾", "💇🏿", "💇‍♂", "💇🏻‍♂", "💇🏼‍♂", "💇🏽‍♂", "💇🏾‍♂", "💇🏿‍♂", "💆‍♀", "💆🏻‍♀", "💆🏼‍♀", "💆🏽‍♀", "💆🏾‍♀", "💆🏿‍♀", "💆", "💆🏻", "💆🏼", "💆🏽", "💆🏾", "💆🏿", "💆‍♂", "💆🏻‍♂", "💆🏼‍♂", "💆🏽‍♂", "💆🏾‍♂", "💆🏿‍♂", "🧖‍♀", "🧖🏻‍♀", "🧖🏼‍♀", "🧖🏽‍♀", "🧖🏾‍♀", "🧖🏿‍♀", "🧖", "🧖🏻", "🧖🏼", "🧖🏽", "🧖🏾", "🧖🏿", "🧖‍♂", "🧖🏻‍♂", "🧖🏼‍♂", "🧖🏽‍♂", "🧖🏾‍♂", "🧖🏿‍♂", "💅", "💅🏻", "💅🏼", "💅🏽", "💅🏾", "💅🏿", "🤳", "🤳🏻", "🤳🏼", "🤳🏽", "🤳🏾", "🤳🏿", "💃", "💃🏻", "💃🏼", "💃🏽", "💃🏾", "💃🏿", "🕺", "🕺🏻", "🕺🏼", "🕺🏽", "🕺🏾", "🕺🏿", "👯‍♀", "👯", "👯‍♂", "🕴", "🕴🏻", "🕴🏼", "🕴🏽", "🕴🏾", "🕴🏿", "👩‍🦽", "👩🏻‍🦽", "👩🏼‍🦽", "👩🏽‍🦽", "👩🏾‍🦽", "👩🏿‍🦽", "🧑‍🦽", "🧑🏻‍🦽", "🧑🏼‍🦽", "🧑🏽‍🦽", "🧑🏾‍🦽", "🧑🏿‍🦽", "👨‍🦽", "👨🏻‍🦽", "👨🏼‍🦽", "👨🏽‍🦽", "👨🏾‍🦽", "👨🏿‍🦽", "👩‍🦼", "👩🏻‍🦼", "👩🏼‍🦼", "👩🏽‍🦼", "👩🏾‍🦼", "👩🏿‍🦼", "🧑‍🦼", "🧑🏻‍🦼", "🧑🏼‍🦼", "🧑🏽‍🦼", "🧑🏾‍🦼", "🧑🏿‍🦼", "👨‍🦼", "👨🏻‍🦼", "👨🏼‍🦼", "👨🏽‍🦼", "👨🏾‍🦼", "👨🏿‍🦼", "🚶‍♀", "🚶🏻‍♀", "🚶🏼‍♀", "🚶🏽‍♀", "🚶🏾‍♀", "🚶🏿‍♀", "🚶", "🚶🏻", "🚶🏼", "🚶🏽", "🚶🏾", "🚶🏿", "🚶‍♂", "🚶🏻‍♂", "🚶🏼‍♂", "🚶🏽‍♂", "🚶🏾‍♂", "🚶🏿‍♂", "👩‍🦯", "👩🏻‍🦯", "👩🏼‍🦯", "👩🏽‍🦯", "👩🏾‍🦯", "👩🏿‍🦯", "🧑‍🦯", "🧑🏻‍🦯", "🧑🏼‍🦯", "🧑🏽‍🦯", "🧑🏾‍🦯", "🧑🏿‍🦯", "👨‍🦯", "👨🏻‍🦯", "👨🏼‍🦯", "👨🏽‍🦯", "👨🏾‍🦯", "👨🏿‍🦯", "🧎‍♀", "🧎🏻‍♀", "🧎🏼‍♀", "🧎🏽‍♀", "🧎🏾‍♀", "🧎🏿‍♀", "🧎", "🧎🏻", "🧎🏼", "🧎🏽", "🧎🏾", "🧎🏿", "🧎‍♂", "🧎🏻‍♂", "🧎🏼‍♂", "🧎🏽‍♂", "🧎🏾‍♂", "🧎🏿‍♂", "🏃‍♀", "🏃🏻‍♀", "🏃🏼‍♀", "🏃🏽‍♀", "🏃🏾‍♀", "🏃🏿‍♀", "🏃", "🏃🏻", "🏃🏼", "🏃🏽", "🏃🏾", "🏃🏿", "🏃‍♂", "🏃🏻‍♂", "🏃🏼‍♂", "🏃🏽‍♂", "🏃🏾‍♂", "🏃🏿‍♂", "🧍‍♀", "🧍🏻‍♀", "🧍🏼‍♀", "🧍🏽‍♀", "🧍🏾‍♀", "🧍🏿‍♀", "🧍", "🧍🏻", "🧍🏼", "🧍🏽", "🧍🏾", "🧍🏿", "🧍‍♂", "🧍🏻‍♂", "🧍🏼‍♂", "🧍🏽‍♂", "🧍🏾‍♂", "🧍🏿‍♂", "👫", "👫🏻", "👩🏻‍🤝‍👨🏼", "👩🏻‍🤝‍👨🏽", "👩🏻‍🤝‍👨🏾", "👩🏻‍🤝‍👨🏿", "👩🏼‍🤝‍👨🏻", "👫🏼", "👩🏼‍🤝‍👨🏽", "👩🏼‍🤝‍👨🏾", "👩🏼‍🤝‍👨🏿", "👩🏽‍🤝‍👨🏻", "👩🏽‍🤝‍👨🏼", "👫🏽", "👩🏽‍🤝‍👨🏾", "👩🏽‍🤝‍👨🏿", "👩🏾‍🤝‍👨🏻", "👩🏾‍🤝‍👨🏼", "👩🏾‍🤝‍👨🏽", "👫🏾", "👩🏾‍🤝‍👨🏿", "👩🏿‍🤝‍👨🏻", "👩🏿‍🤝‍👨🏼", "👩🏿‍🤝‍👨🏽", "👩🏿‍🤝‍👨🏾", "👫🏿", "👭", "👭🏻", "👩🏻‍🤝‍👩🏼", "👩🏻‍🤝‍👩🏽", "👩🏻‍🤝‍👩🏾", "👩🏻‍🤝‍👩🏿", "👩🏼‍🤝‍👩🏻", "👭🏼", "👩🏼‍🤝‍👩🏽", "👩🏼‍🤝‍👩🏾", "👩🏼‍🤝‍👩🏿", "👩🏽‍🤝‍👩🏻", "👩🏽‍🤝‍👩🏼", "👭🏽", "👩🏽‍🤝‍👩🏾", "👩🏽‍🤝‍👩🏿", "👩🏾‍🤝‍👩🏻", "👩🏾‍🤝‍👩🏼", "👩🏾‍🤝‍👩🏽", "👭🏾", "👩🏾‍🤝‍👩🏿", "👩🏿‍🤝‍👩🏻", "👩🏿‍🤝‍👩🏼", "👩🏿‍🤝‍👩🏽", "👩🏿‍🤝‍👩🏾", "👭🏿", "👬", "👬🏻", "👨🏻‍🤝‍👨🏼", "👨🏻‍🤝‍👨🏽", "👨🏻‍🤝‍👨🏾", "👨🏻‍🤝‍👨🏿", "👨🏼‍🤝‍👨🏻", "👬🏼", "👨🏼‍🤝‍👨🏽", "👨🏼‍🤝‍👨🏾", "👨🏼‍🤝‍👨🏿", "👨🏽‍🤝‍👨🏻", "👨🏽‍🤝‍👨🏼", "👬🏽", "👨🏽‍🤝‍👨🏾", "👨🏽‍🤝‍👨🏿", "👨🏾‍🤝‍👨🏻", "👨🏾‍🤝‍👨🏼", "👨🏾‍🤝‍👨🏽", "👬🏾", "👨🏾‍🤝‍👨🏿", "👨🏿‍🤝‍👨🏻", "👨🏿‍🤝‍👨🏼", "👨🏿‍🤝‍👨🏽", "👨🏿‍🤝‍👨🏾", "👬🏿", "👩‍❤‍👨", "👩🏻‍❤‍👨🏻", "👩🏻‍❤‍👨🏼", "👩🏻‍❤‍👨🏽", "👩🏻‍❤‍👨🏾", "👩🏻‍❤‍👨🏿", "👩🏼‍❤‍👨🏻", "👩🏼‍❤‍👨🏼", "👩🏼‍❤‍👨🏽", "👩🏼‍❤‍👨🏾", "👩🏼‍❤‍👨🏿", "👩🏽‍❤‍👨🏻", "👩🏽‍❤‍👨🏼", "👩🏽‍❤‍👨🏽", "👩🏽‍❤‍👨🏾", "👩🏽‍❤‍👨🏿", "👩🏾‍❤‍👨🏻", "👩🏾‍❤‍👨🏼", "👩🏾‍❤‍👨🏽", "👩🏾‍❤‍👨🏾", "👩🏾‍❤‍👨🏿", "👩🏿‍❤‍👨🏻", "👩🏿‍❤‍👨🏼", "👩🏿‍❤‍👨🏽", "👩🏿‍❤‍👨🏾", "👩🏿‍❤‍👨🏿", "👩‍❤‍👩", "👩🏻‍❤‍👩🏻", "👩🏻‍❤‍👩🏼", "👩🏻‍❤‍👩🏽", "👩🏻‍❤‍👩🏾", "👩🏻‍❤‍👩🏿", "👩🏼‍❤‍👩🏻", "👩🏼‍❤‍👩🏼", "👩🏼‍❤‍👩🏽", "👩🏼‍❤‍👩🏾", "👩🏼‍❤‍👩🏿", "👩🏽‍❤‍👩🏻", "👩🏽‍❤‍👩🏼", "👩🏽‍❤‍👩🏽", "👩🏽‍❤‍👩🏾", "👩🏽‍❤‍👩🏿", "👩🏾‍❤‍👩🏻", "👩🏾‍❤‍👩🏼", "👩🏾‍❤‍👩🏽", "👩🏾‍❤‍👩🏾", "👩🏾‍❤‍👩🏿", "👩🏿‍❤‍👩🏻", "👩🏿‍❤‍👩🏼", "👩🏿‍❤‍👩🏽", "👩🏿‍❤‍👩🏾", "👩🏿‍❤‍👩🏿", "💑", "🧑🏻‍❤‍🧑🏻", "🧑🏻‍❤‍🧑🏼", "🧑🏻‍❤‍🧑🏽", "🧑🏻‍❤‍🧑🏾", "🧑🏻‍❤‍🧑🏿", "🧑🏼‍❤‍🧑🏻", "🧑🏼‍❤‍🧑🏼", "🧑🏼‍❤‍🧑🏽", "🧑🏼‍❤‍🧑🏾", "🧑🏼‍❤‍🧑🏿", "🧑🏽‍❤‍🧑🏻", "🧑🏽‍❤‍🧑🏼", "🧑🏽‍❤‍🧑🏽", "🧑🏽‍❤‍🧑🏾", "🧑🏽‍❤‍🧑🏿", "🧑🏾‍❤‍🧑🏻", "🧑🏾‍❤‍🧑🏼", "🧑🏾‍❤‍🧑🏽", "🧑🏾‍❤‍🧑🏾", "🧑🏾‍❤‍🧑🏿", "🧑🏿‍❤‍🧑🏻", "🧑🏿‍❤‍🧑🏼", "🧑🏿‍❤‍🧑🏽", "🧑🏿‍❤‍🧑🏾", "🧑🏿‍❤‍🧑🏿", "👨‍❤‍👨", "👨🏻‍❤‍👨🏻", "👨🏻‍❤‍👨🏼", "👨🏻‍❤‍👨🏽", "👨🏻‍❤‍👨🏾", "👨🏻‍❤‍👨🏿", "👨🏼‍❤‍👨🏻", "👨🏼‍❤‍👨🏼", "👨🏼‍❤‍👨🏽", "👨🏼‍❤‍👨🏾", "👨🏼‍❤‍👨🏿", "👨🏽‍❤‍👨🏻", "👨🏽‍❤‍👨🏼", "👨🏽‍❤‍👨🏽", "👨🏽‍❤‍👨🏾", "👨🏽‍❤‍👨🏿", "👨🏾‍❤‍👨🏻", "👨🏾‍❤‍👨🏼", "👨🏾‍❤‍👨🏽", "👨🏾‍❤‍👨🏾", "👨🏾‍❤‍👨🏿", "👨🏿‍❤‍👨🏻", "👨🏿‍❤‍👨🏼", "👨🏿‍❤‍👨🏽", "👨🏿‍❤‍👨🏾", "👨🏿‍❤‍👨🏿", "👩‍❤‍💋‍👨", "👩🏻‍❤‍💋‍👨🏻", "👩🏻‍❤‍💋‍👨🏼", "👩🏻‍❤‍💋‍👨🏽", "👩🏻‍❤‍💋‍👨🏾", "👩🏻‍❤‍💋‍👨🏿", "👩🏼‍❤‍💋‍👨🏻", "👩🏼‍❤‍💋‍👨🏼", "👩🏼‍❤‍💋‍👨🏽", "👩🏼‍❤‍💋‍👨🏾", "👩🏼‍❤‍💋‍👨🏿", "👩🏽‍❤‍💋‍👨🏻", "👩🏽‍❤‍💋‍👨🏼", "👩🏽‍❤‍💋‍👨🏽", "👩🏽‍❤‍💋‍👨🏾", "👩🏽‍❤‍💋‍👨🏿", "👩🏾‍❤‍💋‍👨🏻", "👩🏾‍❤‍💋‍👨🏼", "👩🏾‍❤‍💋‍👨🏽", "👩🏾‍❤‍💋‍👨🏾", "👩🏾‍❤‍💋‍👨🏿", "👩🏿‍❤‍💋‍👨🏻", "👩🏿‍❤‍💋‍👨🏼", "👩🏿‍❤‍💋‍👨🏽", "👩🏿‍❤‍💋‍👨🏾", "👩🏿‍❤‍💋‍👨🏿", "👩‍❤‍💋‍👩", "👩🏻‍❤‍💋‍👩🏻", "👩🏻‍❤‍💋‍👩🏼", "👩🏻‍❤‍💋‍👩🏽", "👩🏻‍❤‍💋‍👩🏾", "👩🏻‍❤‍💋‍👩🏿", "👩🏼‍❤‍💋‍👩🏻", "👩🏼‍❤‍💋‍👩🏼", "👩🏼‍❤‍💋‍👩🏽", "👩🏼‍❤‍💋‍👩🏾", "👩🏼‍❤‍💋‍👩🏿", "👩🏽‍❤‍💋‍👩🏻", "👩🏽‍❤‍💋‍👩🏼", "👩🏽‍❤‍💋‍👩🏽", "👩🏽‍❤‍💋‍👩🏾", "👩🏽‍❤‍💋‍👩🏿", "👩🏾‍❤‍💋‍👩🏻", "👩🏾‍❤‍💋‍👩🏼", "👩🏾‍❤‍💋‍👩🏽", "👩🏾‍❤‍💋‍👩🏾", "👩🏾‍❤‍💋‍👩🏿", "👩🏿‍❤‍💋‍👩🏻", "👩🏿‍❤‍💋‍👩🏼", "👩🏿‍❤‍💋‍👩🏽", "👩🏿‍❤‍💋‍👩🏾", "👩🏿‍❤‍💋‍👩🏿", "💏", "🧑🏻‍❤‍💋‍🧑🏻", "🧑🏻‍❤‍💋‍🧑🏼", "🧑🏻‍❤‍💋‍🧑🏽", "🧑🏻‍❤‍💋‍🧑🏾", "🧑🏻‍❤‍💋‍🧑🏿", "🧑🏼‍❤‍💋‍🧑🏻", "🧑🏼‍❤‍💋‍🧑🏼", "🧑🏼‍❤‍💋‍🧑🏽", "🧑🏼‍❤‍💋‍🧑🏾", "🧑🏼‍❤‍💋‍🧑🏿", "🧑🏽‍❤‍💋‍🧑🏻", "🧑🏽‍❤‍💋‍🧑🏼", "🧑🏽‍❤‍💋‍🧑🏽", "🧑🏽‍❤‍💋‍🧑🏾", "🧑🏽‍❤‍💋‍🧑🏿", "🧑🏾‍❤‍💋‍🧑🏻", "🧑🏾‍❤‍💋‍🧑🏼", "🧑🏾‍❤‍💋‍🧑🏽", "🧑🏾‍❤‍💋‍🧑🏾", "🧑🏾‍❤‍💋‍🧑🏿", "🧑🏿‍❤‍💋‍🧑🏻", "🧑🏿‍❤‍💋‍🧑🏼", "🧑🏿‍❤‍💋‍🧑🏽", "🧑🏿‍❤‍💋‍🧑🏾", "🧑🏿‍❤‍💋‍🧑🏿", "👨‍❤‍💋‍👨", "👨🏻‍❤‍💋‍👨🏻", "👨🏻‍❤‍💋‍👨🏼", "👨🏻‍❤‍💋‍👨🏽", "👨🏻‍❤‍💋‍👨🏾", "👨🏻‍❤‍💋‍👨🏿", "👨🏼‍❤‍💋‍👨🏻", "👨🏼‍❤‍💋‍👨🏼", "👨🏼‍❤‍💋‍👨🏽", "👨🏼‍❤‍💋‍👨🏾", "👨🏼‍❤‍💋‍👨🏿", "👨🏽‍❤‍💋‍👨🏻", "👨🏽‍❤‍💋‍👨🏼", "👨🏽‍❤‍💋‍👨🏽", "👨🏽‍❤‍💋‍👨🏾", "👨🏽‍❤‍💋‍👨🏿", "👨🏾‍❤‍💋‍👨🏻", "👨🏾‍❤‍💋‍👨🏼", "👨🏾‍❤‍💋‍👨🏽", "👨🏾‍❤‍💋‍👨🏾", "👨🏾‍❤‍💋‍👨🏿", "👨🏿‍❤‍💋‍👨🏻", "👨🏿‍❤‍💋‍👨🏼", "👨🏿‍❤‍💋‍👨🏽", "👨🏿‍❤‍💋‍👨🏾", "👨🏿‍❤‍💋‍👨🏿", "👨‍👩‍👦", "👨‍👩‍👧", "👨‍👩‍👧‍👦", "👨‍👩‍👦‍👦", "👨‍👩‍👧‍👧", "👩‍👩‍👦", "👩‍👩‍👧", "👩‍👩‍👧‍👦", "👩‍👩‍👦‍👦", "👩‍👩‍👧‍👧", "👨‍👨‍👦", "👨‍👨‍👧", "👨‍👨‍👧‍👦", "👨‍👨‍👦‍👦", "👨‍👨‍👧‍👧", "👩‍👦", "👩‍👧", "👩‍👧‍👦", "👩‍👦‍👦", "👩‍👧‍👧", "👨‍👦", "👨‍👧", "👨‍👧‍👦", "👨‍👦‍👦", "👨‍👧‍👧", "🪢", "🧶", "🧵", "🪡", "🧥", "🥼", "🦺", "👚", "👕", "👖", "🩲", "🩳", "👔", "👗", "👙", "🩱", "👘", "🥻", "🩴", "🥿", "👠", "👡", "👢", "👞", "👟", "🥾", "🧦", "🧤", "🧣", "🎩", "🧢", "👒", "🎓", "⛑", "🪖", "👑", "💍", "👝", "👛", "👜", "💼", "🎒", "🧳", "👓", "🕶", "🥽", "🌂" + "😀", "😃", "😄", "😁", "😆", "🥹", "😅", "😂", "🤣", "🥲", "☺", "😊", "😇", "🙂", "🙃", "😉", "😌", "😍", "🥰", "😘", "😗", "😙", "😚", "😋", "😛", "😝", "😜", "🤪", "🤨", "🧐", "🤓", "😎", "🥸", "🤩", "🥳", "😏", "😒", "😞", "😔", "😟", "😕", "🙁", "☹", "😣", "😖", "😫", "😩", "🥺", "😢", "😭", "😤", "😠", "😡", "🤬", "🤯", "😳", "🥵", "🥶", "😶‍🌫", "😱", "😨", "😰", "😥", "😓", "🤗", "🤔", "🫣", "🤭", "🫢", "🫡", "🤫", "🫠", "🤥", "😶", "🫥", "😐", "🫤", "😑", "🫨", "😬", "🙄", "😯", "😦", "😧", "😮", "😲", "🥱", "😴", "🤤", "😪", "😮‍💨", "😵", "😵‍💫", "🤐", "🥴", "🤢", "🤮", "🤧", "😷", "🤒", "🤕", "🤑", "🤠", "😈", "👿", "👹", "👺", "🤡", "💩", "👻", "💀", "☠", "👽", "👾", "🤖", "🎃", "😺", "😸", "😹", "😻", "😼", "😽", "🙀", "😿", "😾", "🫶", "🫶🏻", "🫶🏼", "🫶🏽", "🫶🏾", "🫶🏿", "🤲", "🤲🏻", "🤲🏼", "🤲🏽", "🤲🏾", "🤲🏿", "👐", "👐🏻", "👐🏼", "👐🏽", "👐🏾", "👐🏿", "🙌", "🙌🏻", "🙌🏼", "🙌🏽", "🙌🏾", "🙌🏿", "👏", "👏🏻", "👏🏼", "👏🏽", "👏🏾", "👏🏿", "🤝", "👍", "👍🏻", "👍🏼", "👍🏽", "👍🏾", "👍🏿", "👎", "👎🏻", "👎🏼", "👎🏽", "👎🏾", "👎🏿", "👊", "👊🏻", "👊🏼", "👊🏽", "👊🏾", "👊🏿", "✊", "✊🏻", "✊🏼", "✊🏽", "✊🏾", "✊🏿", "🤛", "🤛🏻", "🤛🏼", "🤛🏽", "🤛🏾", "🤛🏿", "🤜", "🤜🏻", "🤜🏼", "🤜🏽", "🤜🏾", "🤜🏿", "🫷", "🫷🏻", "🫷🏼", "🫷🏽", "🫷🏾", "🫷🏿", "🫸", "🫸🏻", "🫸🏼", "🫸🏽", "🫸🏾", "🫸🏿", "🤞", "🤞🏻", "🤞🏼", "🤞🏽", "🤞🏾", "🤞🏿", "✌", "✌🏻", "✌🏼", "✌🏽", "✌🏾", "✌🏿", "🫰", "🫰🏻", "🫰🏼", "🫰🏽", "🫰🏾", "🫰🏿", "🤟", "🤟🏻", "🤟🏼", "🤟🏽", "🤟🏾", "🤟🏿", "🤘", "🤘🏻", "🤘🏼", "🤘🏽", "🤘🏾", "🤘🏿", "👌", "👌🏻", "👌🏼", "👌🏽", "👌🏾", "👌🏿", "🤌", "🤌🏻", "🤌🏼", "🤌🏽", "🤌🏾", "🤌🏿", "🤏", "🤏🏻", "🤏🏼", "🤏🏽", "🤏🏾", "🤏🏿", "🫳", "🫳🏻", "🫳🏼", "🫳🏽", "🫳🏾", "🫳🏿", "🫴", "🫴🏻", "🫴🏼", "🫴🏽", "🫴🏾", "🫴🏿", "👈", "👈🏻", "👈🏼", "👈🏽", "👈🏾", "👈🏿", "👉", "👉🏻", "👉🏼", "👉🏽", "👉🏾", "👉🏿", "👆", "👆🏻", "👆🏼", "👆🏽", "👆🏾", "👆🏿", "👇", "👇🏻", "👇🏼", "👇🏽", "👇🏾", "👇🏿", "☝", "☝🏻", "☝🏼", "☝🏽", "☝🏾", "☝🏿", "✋", "✋🏻", "✋🏼", "✋🏽", "✋🏾", "✋🏿", "🤚", "🤚🏻", "🤚🏼", "🤚🏽", "🤚🏾", "🤚🏿", "🖐", "🖐🏻", "🖐🏼", "🖐🏽", "🖐🏾", "🖐🏿", "🖖", "🖖🏻", "🖖🏼", "🖖🏽", "🖖🏾", "🖖🏿", "👋", "👋🏻", "👋🏼", "👋🏽", "👋🏾", "👋🏿", "🤙", "🤙🏻", "🤙🏼", "🤙🏽", "🤙🏾", "🤙🏿", "🫲", "🫲🏻", "🫲🏼", "🫲🏽", "🫲🏾", "🫲🏿", "🫱", "🫱🏻", "🫱🏼", "🫱🏽", "🫱🏾", "🫱🏿", "💪", "💪🏻", "💪🏼", "💪🏽", "💪🏾", "💪🏿", "🦾", "🖕", "🖕🏻", "🖕🏼", "🖕🏽", "🖕🏾", "🖕🏿", "✍", "✍🏻", "✍🏼", "✍🏽", "✍🏾", "✍🏿", "🙏", "🙏🏻", "🙏🏼", "🙏🏽", "🙏🏾", "🙏🏿", "🫵", "🫵🏻", "🫵🏼", "🫵🏽", "🫵🏾", "🫵🏿", "🦶", "🦶🏻", "🦶🏼", "🦶🏽", "🦶🏾", "🦶🏿", "🦵", "🦵🏻", "🦵🏼", "🦵🏽", "🦵🏾", "🦵🏿", "🦿", "💄", "💋", "👄", "🫦", "🦷", "👅", "👂", "👂🏻", "👂🏼", "👂🏽", "👂🏾", "👂🏿", "🦻", "🦻🏻", "🦻🏼", "🦻🏽", "🦻🏾", "🦻🏿", "👃", "👃🏻", "👃🏼", "👃🏽", "👃🏾", "👃🏿", "👣", "👁", "👀", "🫀", "🫁", "🧠", "🗣", "👤", "👥", "🫂", "👶", "👶🏻", "👶🏼", "👶🏽", "👶🏾", "👶🏿", "👧", "👧🏻", "👧🏼", "👧🏽", "👧🏾", "👧🏿", "🧒", "🧒🏻", "🧒🏼", "🧒🏽", "🧒🏾", "🧒🏿", "👦", "👦🏻", "👦🏼", "👦🏽", "👦🏾", "👦🏿", "👩", "👩🏻", "👩🏼", "👩🏽", "👩🏾", "👩🏿", "🧑", "🧑🏻", "🧑🏼", "🧑🏽", "🧑🏾", "🧑🏿", "👨", "👨🏻", "👨🏼", "👨🏽", "👨🏾", "👨🏿", "👩‍🦱", "👩🏻‍🦱", "👩🏼‍🦱", "👩🏽‍🦱", "👩🏾‍🦱", "👩🏿‍🦱", "🧑‍🦱", "🧑🏻‍🦱", "🧑🏼‍🦱", "🧑🏽‍🦱", "🧑🏾‍🦱", "🧑🏿‍🦱", "👨‍🦱", "👨🏻‍🦱", "👨🏼‍🦱", "👨🏽‍🦱", "👨🏾‍🦱", "👨🏿‍🦱", "👩‍🦰", "👩🏻‍🦰", "👩🏼‍🦰", "👩🏽‍🦰", "👩🏾‍🦰", "👩🏿‍🦰", "🧑‍🦰", "🧑🏻‍🦰", "🧑🏼‍🦰", "🧑🏽‍🦰", "🧑🏾‍🦰", "🧑🏿‍🦰", "👨‍🦰", "👨🏻‍🦰", "👨🏼‍🦰", "👨🏽‍🦰", "👨🏾‍🦰", "👨🏿‍🦰", "👱‍♀", "👱🏻‍♀", "👱🏼‍♀", "👱🏽‍♀", "👱🏾‍♀", "👱🏿‍♀", "👱", "👱🏻", "👱🏼", "👱🏽", "👱🏾", "👱🏿", "👱‍♂", "👱🏻‍♂", "👱🏼‍♂", "👱🏽‍♂", "👱🏾‍♂", "👱🏿‍♂", "👩‍🦳", "👩🏻‍🦳", "👩🏼‍🦳", "👩🏽‍🦳", "👩🏾‍🦳", "👩🏿‍🦳", "🧑‍🦳", "🧑🏻‍🦳", "🧑🏼‍🦳", "🧑🏽‍🦳", "🧑🏾‍🦳", "🧑🏿‍🦳", "👨‍🦳", "👨🏻‍🦳", "👨🏼‍🦳", "👨🏽‍🦳", "👨🏾‍🦳", "👨🏿‍🦳", "👩‍🦲", "👩🏻‍🦲", "👩🏼‍🦲", "👩🏽‍🦲", "👩🏾‍🦲", "👩🏿‍🦲", "🧑‍🦲", "🧑🏻‍🦲", "🧑🏼‍🦲", "🧑🏽‍🦲", "🧑🏾‍🦲", "🧑🏿‍🦲", "👨‍🦲", "👨🏻‍🦲", "👨🏼‍🦲", "👨🏽‍🦲", "👨🏾‍🦲", "👨🏿‍🦲", "🧔‍♀", "🧔🏻‍♀", "🧔🏼‍♀", "🧔🏽‍♀", "🧔🏾‍♀", "🧔🏿‍♀", "🧔", "🧔🏻", "🧔🏼", "🧔🏽", "🧔🏾", "🧔🏿", "🧔‍♂", "🧔🏻‍♂", "🧔🏼‍♂", "🧔🏽‍♂", "🧔🏾‍♂", "🧔🏿‍♂", "👵", "👵🏻", "👵🏼", "👵🏽", "👵🏾", "👵🏿", "🧓", "🧓🏻", "🧓🏼", "🧓🏽", "🧓🏾", "🧓🏿", "👴", "👴🏻", "👴🏼", "👴🏽", "👴🏾", "👴🏿", "👲", "👲🏻", "👲🏼", "👲🏽", "👲🏾", "👲🏿", "👳‍♀", "👳🏻‍♀", "👳🏼‍♀", "👳🏽‍♀", "👳🏾‍♀", "👳🏿‍♀", "👳", "👳🏻", "👳🏼", "👳🏽", "👳🏾", "👳🏿", "👳‍♂", "👳🏻‍♂", "👳🏼‍♂", "👳🏽‍♂", "👳🏾‍♂", "👳🏿‍♂", "🧕", "🧕🏻", "🧕🏼", "🧕🏽", "🧕🏾", "🧕🏿", "👮‍♀", "👮🏻‍♀", "👮🏼‍♀", "👮🏽‍♀", "👮🏾‍♀", "👮🏿‍♀", "👮", "👮🏻", "👮🏼", "👮🏽", "👮🏾", "👮🏿", "👮‍♂", "👮🏻‍♂", "👮🏼‍♂", "👮🏽‍♂", "👮🏾‍♂", "👮🏿‍♂", "👷‍♀", "👷🏻‍♀", "👷🏼‍♀", "👷🏽‍♀", "👷🏾‍♀", "👷🏿‍♀", "👷", "👷🏻", "👷🏼", "👷🏽", "👷🏾", "👷🏿", "👷‍♂", "👷🏻‍♂", "👷🏼‍♂", "👷🏽‍♂", "👷🏾‍♂", "👷🏿‍♂", "💂‍♀", "💂🏻‍♀", "💂🏼‍♀", "💂🏽‍♀", "💂🏾‍♀", "💂🏿‍♀", "💂", "💂🏻", "💂🏼", "💂🏽", "💂🏾", "💂🏿", "💂‍♂", "💂🏻‍♂", "💂🏼‍♂", "💂🏽‍♂", "💂🏾‍♂", "💂🏿‍♂", "🕵‍♀", "🕵🏻‍♀", "🕵🏼‍♀", "🕵🏽‍♀", "🕵🏾‍♀", "🕵🏿‍♀", "🕵", "🕵🏻", "🕵🏼", "🕵🏽", "🕵🏾", "🕵🏿", "🕵‍♂", "🕵🏻‍♂", "🕵🏼‍♂", "🕵🏽‍♂", "🕵🏾‍♂", "🕵🏿‍♂", "👩‍⚕", "👩🏻‍⚕", "👩🏼‍⚕", "👩🏽‍⚕", "👩🏾‍⚕", "👩🏿‍⚕", "🧑‍⚕", "🧑🏻‍⚕", "🧑🏼‍⚕", "🧑🏽‍⚕", "🧑🏾‍⚕", "🧑🏿‍⚕", "👨‍⚕", "👨🏻‍⚕", "👨🏼‍⚕", "👨🏽‍⚕", "👨🏾‍⚕", "👨🏿‍⚕", "👩‍🌾", "👩🏻‍🌾", "👩🏼‍🌾", "👩🏽‍🌾", "👩🏾‍🌾", "👩🏿‍🌾", "🧑‍🌾", "🧑🏻‍🌾", "🧑🏼‍🌾", "🧑🏽‍🌾", "🧑🏾‍🌾", "🧑🏿‍🌾", "👨‍🌾", "👨🏻‍🌾", "👨🏼‍🌾", "👨🏽‍🌾", "👨🏾‍🌾", "👨🏿‍🌾", "👩‍🍳", "👩🏻‍🍳", "👩🏼‍🍳", "👩🏽‍🍳", "👩🏾‍🍳", "👩🏿‍🍳", "🧑‍🍳", "🧑🏻‍🍳", "🧑🏼‍🍳", "🧑🏽‍🍳", "🧑🏾‍🍳", "🧑🏿‍🍳", "👨‍🍳", "👨🏻‍🍳", "👨🏼‍🍳", "👨🏽‍🍳", "👨🏾‍🍳", "👨🏿‍🍳", "👩‍🎓", "👩🏻‍🎓", "👩🏼‍🎓", "👩🏽‍🎓", "👩🏾‍🎓", "👩🏿‍🎓", "🧑‍🎓", "🧑🏻‍🎓", "🧑🏼‍🎓", "🧑🏽‍🎓", "🧑🏾‍🎓", "🧑🏿‍🎓", "👨‍🎓", "👨🏻‍🎓", "👨🏼‍🎓", "👨🏽‍🎓", "👨🏾‍🎓", "👨🏿‍🎓", "👩‍🎤", "👩🏻‍🎤", "👩🏼‍🎤", "👩🏽‍🎤", "👩🏾‍🎤", "👩🏿‍🎤", "🧑‍🎤", "🧑🏻‍🎤", "🧑🏼‍🎤", "🧑🏽‍🎤", "🧑🏾‍🎤", "🧑🏿‍🎤", "👨‍🎤", "👨🏻‍🎤", "👨🏼‍🎤", "👨🏽‍🎤", "👨🏾‍🎤", "👨🏿‍🎤", "👩‍🏫", "👩🏻‍🏫", "👩🏼‍🏫", "👩🏽‍🏫", "👩🏾‍🏫", "👩🏿‍🏫", "🧑‍🏫", "🧑🏻‍🏫", "🧑🏼‍🏫", "🧑🏽‍🏫", "🧑🏾‍🏫", "🧑🏿‍🏫", "👨‍🏫", "👨🏻‍🏫", "👨🏼‍🏫", "👨🏽‍🏫", "👨🏾‍🏫", "👨🏿‍🏫", "👩‍🏭", "👩🏻‍🏭", "👩🏼‍🏭", "👩🏽‍🏭", "👩🏾‍🏭", "👩🏿‍🏭", "🧑‍🏭", "🧑🏻‍🏭", "🧑🏼‍🏭", "🧑🏽‍🏭", "🧑🏾‍🏭", "🧑🏿‍🏭", "👨‍🏭", "👨🏻‍🏭", "👨🏼‍🏭", "👨🏽‍🏭", "👨🏾‍🏭", "👨🏿‍🏭", "👩‍💻", "👩🏻‍💻", "👩🏼‍💻", "👩🏽‍💻", "👩🏾‍💻", "👩🏿‍💻", "🧑‍💻", "🧑🏻‍💻", "🧑🏼‍💻", "🧑🏽‍💻", "🧑🏾‍💻", "🧑🏿‍💻", "👨‍💻", "👨🏻‍💻", "👨🏼‍💻", "👨🏽‍💻", "👨🏾‍💻", "👨🏿‍💻", "👩‍💼", "👩🏻‍💼", "👩🏼‍💼", "👩🏽‍💼", "👩🏾‍💼", "👩🏿‍💼", "🧑‍💼", "🧑🏻‍💼", "🧑🏼‍💼", "🧑🏽‍💼", "🧑🏾‍💼", "🧑🏿‍💼", "👨‍💼", "👨🏻‍💼", "👨🏼‍💼", "👨🏽‍💼", "👨🏾‍💼", "👨🏿‍💼", "👩‍🔧", "👩🏻‍🔧", "👩🏼‍🔧", "👩🏽‍🔧", "👩🏾‍🔧", "👩🏿‍🔧", "🧑‍🔧", "🧑🏻‍🔧", "🧑🏼‍🔧", "🧑🏽‍🔧", "🧑🏾‍🔧", "🧑🏿‍🔧", "👨‍🔧", "👨🏻‍🔧", "👨🏼‍🔧", "👨🏽‍🔧", "👨🏾‍🔧", "👨🏿‍🔧", "👩‍🔬", "👩🏻‍🔬", "👩🏼‍🔬", "👩🏽‍🔬", "👩🏾‍🔬", "👩🏿‍🔬", "🧑‍🔬", "🧑🏻‍🔬", "🧑🏼‍🔬", "🧑🏽‍🔬", "🧑🏾‍🔬", "🧑🏿‍🔬", "👨‍🔬", "👨🏻‍🔬", "👨🏼‍🔬", "👨🏽‍🔬", "👨🏾‍🔬", "👨🏿‍🔬", "👩‍🎨", "👩🏻‍🎨", "👩🏼‍🎨", "👩🏽‍🎨", "👩🏾‍🎨", "👩🏿‍🎨", "🧑‍🎨", "🧑🏻‍🎨", "🧑🏼‍🎨", "🧑🏽‍🎨", "🧑🏾‍🎨", "🧑🏿‍🎨", "👨‍🎨", "👨🏻‍🎨", "👨🏼‍🎨", "👨🏽‍🎨", "👨🏾‍🎨", "👨🏿‍🎨", "👩‍🚒", "👩🏻‍🚒", "👩🏼‍🚒", "👩🏽‍🚒", "👩🏾‍🚒", "👩🏿‍🚒", "🧑‍🚒", "🧑🏻‍🚒", "🧑🏼‍🚒", "🧑🏽‍🚒", "🧑🏾‍🚒", "🧑🏿‍🚒", "👨‍🚒", "👨🏻‍🚒", "👨🏼‍🚒", "👨🏽‍🚒", "👨🏾‍🚒", "👨🏿‍🚒", "👩‍✈", "👩🏻‍✈", "👩🏼‍✈", "👩🏽‍✈", "👩🏾‍✈", "👩🏿‍✈", "🧑‍✈", "🧑🏻‍✈", "🧑🏼‍✈", "🧑🏽‍✈", "🧑🏾‍✈", "🧑🏿‍✈", "👨‍✈", "👨🏻‍✈", "👨🏼‍✈", "👨🏽‍✈", "👨🏾‍✈", "👨🏿‍✈", "👩‍🚀", "👩🏻‍🚀", "👩🏼‍🚀", "👩🏽‍🚀", "👩🏾‍🚀", "👩🏿‍🚀", "🧑‍🚀", "🧑🏻‍🚀", "🧑🏼‍🚀", "🧑🏽‍🚀", "🧑🏾‍🚀", "🧑🏿‍🚀", "👨‍🚀", "👨🏻‍🚀", "👨🏼‍🚀", "👨🏽‍🚀", "👨🏾‍🚀", "👨🏿‍🚀", "👩‍⚖", "👩🏻‍⚖", "👩🏼‍⚖", "👩🏽‍⚖", "👩🏾‍⚖", "👩🏿‍⚖", "🧑‍⚖", "🧑🏻‍⚖", "🧑🏼‍⚖", "🧑🏽‍⚖", "🧑🏾‍⚖", "🧑🏿‍⚖", "👨‍⚖", "👨🏻‍⚖", "👨🏼‍⚖", "👨🏽‍⚖", "👨🏾‍⚖", "👨🏿‍⚖", "👰‍♀", "👰🏻‍♀", "👰🏼‍♀", "👰🏽‍♀", "👰🏾‍♀", "👰🏿‍♀", "👰", "👰🏻", "👰🏼", "👰🏽", "👰🏾", "👰🏿", "👰‍♂", "👰🏻‍♂", "👰🏼‍♂", "👰🏽‍♂", "👰🏾‍♂", "👰🏿‍♂", "🤵‍♀", "🤵🏻‍♀", "🤵🏼‍♀", "🤵🏽‍♀", "🤵🏾‍♀", "🤵🏿‍♀", "🤵", "🤵🏻", "🤵🏼", "🤵🏽", "🤵🏾", "🤵🏿", "🤵‍♂", "🤵🏻‍♂", "🤵🏼‍♂", "🤵🏽‍♂", "🤵🏾‍♂", "🤵🏿‍♂", "👸", "👸🏻", "👸🏼", "👸🏽", "👸🏾", "👸🏿", "🫅", "🫅🏻", "🫅🏼", "🫅🏽", "🫅🏾", "🫅🏿", "🤴", "🤴🏻", "🤴🏼", "🤴🏽", "🤴🏾", "🤴🏿", "🥷", "🥷🏻", "🥷🏼", "🥷🏽", "🥷🏾", "🥷🏿", "🦸‍♀", "🦸🏻‍♀", "🦸🏼‍♀", "🦸🏽‍♀", "🦸🏾‍♀", "🦸🏿‍♀", "🦸", "🦸🏻", "🦸🏼", "🦸🏽", "🦸🏾", "🦸🏿", "🦸‍♂", "🦸🏻‍♂", "🦸🏼‍♂", "🦸🏽‍♂", "🦸🏾‍♂", "🦸🏿‍♂", "🦹‍♀", "🦹🏻‍♀", "🦹🏼‍♀", "🦹🏽‍♀", "🦹🏾‍♀", "🦹🏿‍♀", "🦹", "🦹🏻", "🦹🏼", "🦹🏽", "🦹🏾", "🦹🏿", "🦹‍♂", "🦹🏻‍♂", "🦹🏼‍♂", "🦹🏽‍♂", "🦹🏾‍♂", "🦹🏿‍♂", "🤶", "🤶🏻", "🤶🏼", "🤶🏽", "🤶🏾", "🤶🏿", "🧑‍🎄", "🧑🏻‍🎄", "🧑🏼‍🎄", "🧑🏽‍🎄", "🧑🏾‍🎄", "🧑🏿‍🎄", "🎅", "🎅🏻", "🎅🏼", "🎅🏽", "🎅🏾", "🎅🏿", "🧙‍♀", "🧙🏻‍♀", "🧙🏼‍♀", "🧙🏽‍♀", "🧙🏾‍♀", "🧙🏿‍♀", "🧙", "🧙🏻", "🧙🏼", "🧙🏽", "🧙🏾", "🧙🏿", "🧙‍♂", "🧙🏻‍♂", "🧙🏼‍♂", "🧙🏽‍♂", "🧙🏾‍♂", "🧙🏿‍♂", "🧝‍♀", "🧝🏻‍♀", "🧝🏼‍♀", "🧝🏽‍♀", "🧝🏾‍♀", "🧝🏿‍♀", "🧝", "🧝🏻", "🧝🏼", "🧝🏽", "🧝🏾", "🧝🏿", "🧝‍♂", "🧝🏻‍♂", "🧝🏼‍♂", "🧝🏽‍♂", "🧝🏾‍♂", "🧝🏿‍♂", "🧌", "🧛‍♀", "🧛🏻‍♀", "🧛🏼‍♀", "🧛🏽‍♀", "🧛🏾‍♀", "🧛🏿‍♀", "🧛", "🧛🏻", "🧛🏼", "🧛🏽", "🧛🏾", "🧛🏿", "🧛‍♂", "🧛🏻‍♂", "🧛🏼‍♂", "🧛🏽‍♂", "🧛🏾‍♂", "🧛🏿‍♂", "🧟‍♀", "🧟", "🧟‍♂", "🧞‍♀", "🧞", "🧞‍♂", "🧜‍♀", "🧜🏻‍♀", "🧜🏼‍♀", "🧜🏽‍♀", "🧜🏾‍♀", "🧜🏿‍♀", "🧜", "🧜🏻", "🧜🏼", "🧜🏽", "🧜🏾", "🧜🏿", "🧜‍♂", "🧜🏻‍♂", "🧜🏼‍♂", "🧜🏽‍♂", "🧜🏾‍♂", "🧜🏿‍♂", "🧚‍♀", "🧚🏻‍♀", "🧚🏼‍♀", "🧚🏽‍♀", "🧚🏾‍♀", "🧚🏿‍♀", "🧚", "🧚🏻", "🧚🏼", "🧚🏽", "🧚🏾", "🧚🏿", "🧚‍♂", "🧚🏻‍♂", "🧚🏼‍♂", "🧚🏽‍♂", "🧚🏾‍♂", "🧚🏿‍♂", "👼", "👼🏻", "👼🏼", "👼🏽", "👼🏾", "👼🏿", "🤰", "🤰🏻", "🤰🏼", "🤰🏽", "🤰🏾", "🤰🏿", "🫄", "🫄🏻", "🫄🏼", "🫄🏽", "🫄🏾", "🫄🏿", "🫃", "🫃🏻", "🫃🏼", "🫃🏽", "🫃🏾", "🫃🏿", "🤱", "🤱🏻", "🤱🏼", "🤱🏽", "🤱🏾", "🤱🏿", "👩‍🍼", "👩🏻‍🍼", "👩🏼‍🍼", "👩🏽‍🍼", "👩🏾‍🍼", "👩🏿‍🍼", "🧑‍🍼", "🧑🏻‍🍼", "🧑🏼‍🍼", "🧑🏽‍🍼", "🧑🏾‍🍼", "🧑🏿‍🍼", "👨‍🍼", "👨🏻‍🍼", "👨🏼‍🍼", "👨🏽‍🍼", "👨🏾‍🍼", "👨🏿‍🍼", "🙇‍♀", "🙇🏻‍♀", "🙇🏼‍♀", "🙇🏽‍♀", "🙇🏾‍♀", "🙇🏿‍♀", "🙇", "🙇🏻", "🙇🏼", "🙇🏽", "🙇🏾", "🙇🏿", "🙇‍♂", "🙇🏻‍♂", "🙇🏼‍♂", "🙇🏽‍♂", "🙇🏾‍♂", "🙇🏿‍♂", "💁‍♀", "💁🏻‍♀", "💁🏼‍♀", "💁🏽‍♀", "💁🏾‍♀", "💁🏿‍♀", "💁", "💁🏻", "💁🏼", "💁🏽", "💁🏾", "💁🏿", "💁‍♂", "💁🏻‍♂", "💁🏼‍♂", "💁🏽‍♂", "💁🏾‍♂", "💁🏿‍♂", "🙅‍♀", "🙅🏻‍♀", "🙅🏼‍♀", "🙅🏽‍♀", "🙅🏾‍♀", "🙅🏿‍♀", "🙅", "🙅🏻", "🙅🏼", "🙅🏽", "🙅🏾", "🙅🏿", "🙅‍♂", "🙅🏻‍♂", "🙅🏼‍♂", "🙅🏽‍♂", "🙅🏾‍♂", "🙅🏿‍♂", "🙆‍♀", "🙆🏻‍♀", "🙆🏼‍♀", "🙆🏽‍♀", "🙆🏾‍♀", "🙆🏿‍♀", "🙆", "🙆🏻", "🙆🏼", "🙆🏽", "🙆🏾", "🙆🏿", "🙆‍♂", "🙆🏻‍♂", "🙆🏼‍♂", "🙆🏽‍♂", "🙆🏾‍♂", "🙆🏿‍♂", "🙋‍♀", "🙋🏻‍♀", "🙋🏼‍♀", "🙋🏽‍♀", "🙋🏾‍♀", "🙋🏿‍♀", "🙋", "🙋🏻", "🙋🏼", "🙋🏽", "🙋🏾", "🙋🏿", "🙋‍♂", "🙋🏻‍♂", "🙋🏼‍♂", "🙋🏽‍♂", "🙋🏾‍♂", "🙋🏿‍♂", "🧏‍♀", "🧏🏻‍♀", "🧏🏼‍♀", "🧏🏽‍♀", "🧏🏾‍♀", "🧏🏿‍♀", "🧏", "🧏🏻", "🧏🏼", "🧏🏽", "🧏🏾", "🧏🏿", "🧏‍♂", "🧏🏻‍♂", "🧏🏼‍♂", "🧏🏽‍♂", "🧏🏾‍♂", "🧏🏿‍♂", "🤦‍♀", "🤦🏻‍♀", "🤦🏼‍♀", "🤦🏽‍♀", "🤦🏾‍♀", "🤦🏿‍♀", "🤦", "🤦🏻", "🤦🏼", "🤦🏽", "🤦🏾", "🤦🏿", "🤦‍♂", "🤦🏻‍♂", "🤦🏼‍♂", "🤦🏽‍♂", "🤦🏾‍♂", "🤦🏿‍♂", "🤷‍♀", "🤷🏻‍♀", "🤷🏼‍♀", "🤷🏽‍♀", "🤷🏾‍♀", "🤷🏿‍♀", "🤷", "🤷🏻", "🤷🏼", "🤷🏽", "🤷🏾", "🤷🏿", "🤷‍♂", "🤷🏻‍♂", "🤷🏼‍♂", "🤷🏽‍♂", "🤷🏾‍♂", "🤷🏿‍♂", "🙎‍♀", "🙎🏻‍♀", "🙎🏼‍♀", "🙎🏽‍♀", "🙎🏾‍♀", "🙎🏿‍♀", "🙎", "🙎🏻", "🙎🏼", "🙎🏽", "🙎🏾", "🙎🏿", "🙎‍♂", "🙎🏻‍♂", "🙎🏼‍♂", "🙎🏽‍♂", "🙎🏾‍♂", "🙎🏿‍♂", "🙍‍♀", "🙍🏻‍♀", "🙍🏼‍♀", "🙍🏽‍♀", "🙍🏾‍♀", "🙍🏿‍♀", "🙍", "🙍🏻", "🙍🏼", "🙍🏽", "🙍🏾", "🙍🏿", "🙍‍♂", "🙍🏻‍♂", "🙍🏼‍♂", "🙍🏽‍♂", "🙍🏾‍♂", "🙍🏿‍♂", "💇‍♀", "💇🏻‍♀", "💇🏼‍♀", "💇🏽‍♀", "💇🏾‍♀", "💇🏿‍♀", "💇", "💇🏻", "💇🏼", "💇🏽", "💇🏾", "💇🏿", "💇‍♂", "💇🏻‍♂", "💇🏼‍♂", "💇🏽‍♂", "💇🏾‍♂", "💇🏿‍♂", "💆‍♀", "💆🏻‍♀", "💆🏼‍♀", "💆🏽‍♀", "💆🏾‍♀", "💆🏿‍♀", "💆", "💆🏻", "💆🏼", "💆🏽", "💆🏾", "💆🏿", "💆‍♂", "💆🏻‍♂", "💆🏼‍♂", "💆🏽‍♂", "💆🏾‍♂", "💆🏿‍♂", "🧖‍♀", "🧖🏻‍♀", "🧖🏼‍♀", "🧖🏽‍♀", "🧖🏾‍♀", "🧖🏿‍♀", "🧖", "🧖🏻", "🧖🏼", "🧖🏽", "🧖🏾", "🧖🏿", "🧖‍♂", "🧖🏻‍♂", "🧖🏼‍♂", "🧖🏽‍♂", "🧖🏾‍♂", "🧖🏿‍♂", "💅", "💅🏻", "💅🏼", "💅🏽", "💅🏾", "💅🏿", "🤳", "🤳🏻", "🤳🏼", "🤳🏽", "🤳🏾", "🤳🏿", "💃", "💃🏻", "💃🏼", "💃🏽", "💃🏾", "💃🏿", "🕺", "🕺🏻", "🕺🏼", "🕺🏽", "🕺🏾", "🕺🏿", "👯‍♀", "👯", "👯‍♂", "🕴", "🕴🏻", "🕴🏼", "🕴🏽", "🕴🏾", "🕴🏿", "👩‍🦽", "👩🏻‍🦽", "👩🏼‍🦽", "👩🏽‍🦽", "👩🏾‍🦽", "👩🏿‍🦽", "🧑‍🦽", "🧑🏻‍🦽", "🧑🏼‍🦽", "🧑🏽‍🦽", "🧑🏾‍🦽", "🧑🏿‍🦽", "👨‍🦽", "👨🏻‍🦽", "👨🏼‍🦽", "👨🏽‍🦽", "👨🏾‍🦽", "👨🏿‍🦽", "👩‍🦼", "👩🏻‍🦼", "👩🏼‍🦼", "👩🏽‍🦼", "👩🏾‍🦼", "👩🏿‍🦼", "🧑‍🦼", "🧑🏻‍🦼", "🧑🏼‍🦼", "🧑🏽‍🦼", "🧑🏾‍🦼", "🧑🏿‍🦼", "👨‍🦼", "👨🏻‍🦼", "👨🏼‍🦼", "👨🏽‍🦼", "👨🏾‍🦼", "👨🏿‍🦼", "🚶‍♀", "🚶🏻‍♀", "🚶🏼‍♀", "🚶🏽‍♀", "🚶🏾‍♀", "🚶🏿‍♀", "🚶", "🚶🏻", "🚶🏼", "🚶🏽", "🚶🏾", "🚶🏿", "🚶‍♂", "🚶🏻‍♂", "🚶🏼‍♂", "🚶🏽‍♂", "🚶🏾‍♂", "🚶🏿‍♂", "👩‍🦯", "👩🏻‍🦯", "👩🏼‍🦯", "👩🏽‍🦯", "👩🏾‍🦯", "👩🏿‍🦯", "🧑‍🦯", "🧑🏻‍🦯", "🧑🏼‍🦯", "🧑🏽‍🦯", "🧑🏾‍🦯", "🧑🏿‍🦯", "👨‍🦯", "👨🏻‍🦯", "👨🏼‍🦯", "👨🏽‍🦯", "👨🏾‍🦯", "👨🏿‍🦯", "🧎‍♀", "🧎🏻‍♀", "🧎🏼‍♀", "🧎🏽‍♀", "🧎🏾‍♀", "🧎🏿‍♀", "🧎", "🧎🏻", "🧎🏼", "🧎🏽", "🧎🏾", "🧎🏿", "🧎‍♂", "🧎🏻‍♂", "🧎🏼‍♂", "🧎🏽‍♂", "🧎🏾‍♂", "🧎🏿‍♂", "🏃‍♀", "🏃🏻‍♀", "🏃🏼‍♀", "🏃🏽‍♀", "🏃🏾‍♀", "🏃🏿‍♀", "🏃", "🏃🏻", "🏃🏼", "🏃🏽", "🏃🏾", "🏃🏿", "🏃‍♂", "🏃🏻‍♂", "🏃🏼‍♂", "🏃🏽‍♂", "🏃🏾‍♂", "🏃🏿‍♂", "🧍‍♀", "🧍🏻‍♀", "🧍🏼‍♀", "🧍🏽‍♀", "🧍🏾‍♀", "🧍🏿‍♀", "🧍", "🧍🏻", "🧍🏼", "🧍🏽", "🧍🏾", "🧍🏿", "🧍‍♂", "🧍🏻‍♂", "🧍🏼‍♂", "🧍🏽‍♂", "🧍🏾‍♂", "🧍🏿‍♂", "👫", "👫🏻", "👩🏻‍🤝‍👨🏼", "👩🏻‍🤝‍👨🏽", "👩🏻‍🤝‍👨🏾", "👩🏻‍🤝‍👨🏿", "👩🏼‍🤝‍👨🏻", "👫🏼", "👩🏼‍🤝‍👨🏽", "👩🏼‍🤝‍👨🏾", "👩🏼‍🤝‍👨🏿", "👩🏽‍🤝‍👨🏻", "👩🏽‍🤝‍👨🏼", "👫🏽", "👩🏽‍🤝‍👨🏾", "👩🏽‍🤝‍👨🏿", "👩🏾‍🤝‍👨🏻", "👩🏾‍🤝‍👨🏼", "👩🏾‍🤝‍👨🏽", "👫🏾", "👩🏾‍🤝‍👨🏿", "👩🏿‍🤝‍👨🏻", "👩🏿‍🤝‍👨🏼", "👩🏿‍🤝‍👨🏽", "👩🏿‍🤝‍👨🏾", "👫🏿", "👭", "👭🏻", "👩🏻‍🤝‍👩🏼", "👩🏻‍🤝‍👩🏽", "👩🏻‍🤝‍👩🏾", "👩🏻‍🤝‍👩🏿", "👩🏼‍🤝‍👩🏻", "👭🏼", "👩🏼‍🤝‍👩🏽", "👩🏼‍🤝‍👩🏾", "👩🏼‍🤝‍👩🏿", "👩🏽‍🤝‍👩🏻", "👩🏽‍🤝‍👩🏼", "👭🏽", "👩🏽‍🤝‍👩🏾", "👩🏽‍🤝‍👩🏿", "👩🏾‍🤝‍👩🏻", "👩🏾‍🤝‍👩🏼", "👩🏾‍🤝‍👩🏽", "👭🏾", "👩🏾‍🤝‍👩🏿", "👩🏿‍🤝‍👩🏻", "👩🏿‍🤝‍👩🏼", "👩🏿‍🤝‍👩🏽", "👩🏿‍🤝‍👩🏾", "👭🏿", "👬", "👬🏻", "👨🏻‍🤝‍👨🏼", "👨🏻‍🤝‍👨🏽", "👨🏻‍🤝‍👨🏾", "👨🏻‍🤝‍👨🏿", "👨🏼‍🤝‍👨🏻", "👬🏼", "👨🏼‍🤝‍👨🏽", "👨🏼‍🤝‍👨🏾", "👨🏼‍🤝‍👨🏿", "👨🏽‍🤝‍👨🏻", "👨🏽‍🤝‍👨🏼", "👬🏽", "👨🏽‍🤝‍👨🏾", "👨🏽‍🤝‍👨🏿", "👨🏾‍🤝‍👨🏻", "👨🏾‍🤝‍👨🏼", "👨🏾‍🤝‍👨🏽", "👬🏾", "👨🏾‍🤝‍👨🏿", "👨🏿‍🤝‍👨🏻", "👨🏿‍🤝‍👨🏼", "👨🏿‍🤝‍👨🏽", "👨🏿‍🤝‍👨🏾", "👬🏿", "👩‍❤‍👨", "👩🏻‍❤‍👨🏻", "👩🏻‍❤‍👨🏼", "👩🏻‍❤‍👨🏽", "👩🏻‍❤‍👨🏾", "👩🏻‍❤‍👨🏿", "👩🏼‍❤‍👨🏻", "👩🏼‍❤‍👨🏼", "👩🏼‍❤‍👨🏽", "👩🏼‍❤‍👨🏾", "👩🏼‍❤‍👨🏿", "👩🏽‍❤‍👨🏻", "👩🏽‍❤‍👨🏼", "👩🏽‍❤‍👨🏽", "👩🏽‍❤‍👨🏾", "👩🏽‍❤‍👨🏿", "👩🏾‍❤‍👨🏻", "👩🏾‍❤‍👨🏼", "👩🏾‍❤‍👨🏽", "👩🏾‍❤‍👨🏾", "👩🏾‍❤‍👨🏿", "👩🏿‍❤‍👨🏻", "👩🏿‍❤‍👨🏼", "👩🏿‍❤‍👨🏽", "👩🏿‍❤‍👨🏾", "👩🏿‍❤‍👨🏿", "👩‍❤‍👩", "👩🏻‍❤‍👩🏻", "👩🏻‍❤‍👩🏼", "👩🏻‍❤‍👩🏽", "👩🏻‍❤‍👩🏾", "👩🏻‍❤‍👩🏿", "👩🏼‍❤‍👩🏻", "👩🏼‍❤‍👩🏼", "👩🏼‍❤‍👩🏽", "👩🏼‍❤‍👩🏾", "👩🏼‍❤‍👩🏿", "👩🏽‍❤‍👩🏻", "👩🏽‍❤‍👩🏼", "👩🏽‍❤‍👩🏽", "👩🏽‍❤‍👩🏾", "👩🏽‍❤‍👩🏿", "👩🏾‍❤‍👩🏻", "👩🏾‍❤‍👩🏼", "👩🏾‍❤‍👩🏽", "👩🏾‍❤‍👩🏾", "👩🏾‍❤‍👩🏿", "👩🏿‍❤‍👩🏻", "👩🏿‍❤‍👩🏼", "👩🏿‍❤‍👩🏽", "👩🏿‍❤‍👩🏾", "👩🏿‍❤‍👩🏿", "💑", "🧑🏻‍❤‍🧑🏻", "🧑🏻‍❤‍🧑🏼", "🧑🏻‍❤‍🧑🏽", "🧑🏻‍❤‍🧑🏾", "🧑🏻‍❤‍🧑🏿", "🧑🏼‍❤‍🧑🏻", "🧑🏼‍❤‍🧑🏼", "🧑🏼‍❤‍🧑🏽", "🧑🏼‍❤‍🧑🏾", "🧑🏼‍❤‍🧑🏿", "🧑🏽‍❤‍🧑🏻", "🧑🏽‍❤‍🧑🏼", "🧑🏽‍❤‍🧑🏽", "🧑🏽‍❤‍🧑🏾", "🧑🏽‍❤‍🧑🏿", "🧑🏾‍❤‍🧑🏻", "🧑🏾‍❤‍🧑🏼", "🧑🏾‍❤‍🧑🏽", "🧑🏾‍❤‍🧑🏾", "🧑🏾‍❤‍🧑🏿", "🧑🏿‍❤‍🧑🏻", "🧑🏿‍❤‍🧑🏼", "🧑🏿‍❤‍🧑🏽", "🧑🏿‍❤‍🧑🏾", "🧑🏿‍❤‍🧑🏿", "👨‍❤‍👨", "👨🏻‍❤‍👨🏻", "👨🏻‍❤‍👨🏼", "👨🏻‍❤‍👨🏽", "👨🏻‍❤‍👨🏾", "👨🏻‍❤‍👨🏿", "👨🏼‍❤‍👨🏻", "👨🏼‍❤‍👨🏼", "👨🏼‍❤‍👨🏽", "👨🏼‍❤‍👨🏾", "👨🏼‍❤‍👨🏿", "👨🏽‍❤‍👨🏻", "👨🏽‍❤‍👨🏼", "👨🏽‍❤‍👨🏽", "👨🏽‍❤‍👨🏾", "👨🏽‍❤‍👨🏿", "👨🏾‍❤‍👨🏻", "👨🏾‍❤‍👨🏼", "👨🏾‍❤‍👨🏽", "👨🏾‍❤‍👨🏾", "👨🏾‍❤‍👨🏿", "👨🏿‍❤‍👨🏻", "👨🏿‍❤‍👨🏼", "👨🏿‍❤‍👨🏽", "👨🏿‍❤‍👨🏾", "👨🏿‍❤‍👨🏿", "👩‍❤‍💋‍👨", "👩🏻‍❤‍💋‍👨🏻", "👩🏻‍❤‍💋‍👨🏼", "👩🏻‍❤‍💋‍👨🏽", "👩🏻‍❤‍💋‍👨🏾", "👩🏻‍❤‍💋‍👨🏿", "👩🏼‍❤‍💋‍👨🏻", "👩🏼‍❤‍💋‍👨🏼", "👩🏼‍❤‍💋‍👨🏽", "👩🏼‍❤‍💋‍👨🏾", "👩🏼‍❤‍💋‍👨🏿", "👩🏽‍❤‍💋‍👨🏻", "👩🏽‍❤‍💋‍👨🏼", "👩🏽‍❤‍💋‍👨🏽", "👩🏽‍❤‍💋‍👨🏾", "👩🏽‍❤‍💋‍👨🏿", "👩🏾‍❤‍💋‍👨🏻", "👩🏾‍❤‍💋‍👨🏼", "👩🏾‍❤‍💋‍👨🏽", "👩🏾‍❤‍💋‍👨🏾", "👩🏾‍❤‍💋‍👨🏿", "👩🏿‍❤‍💋‍👨🏻", "👩🏿‍❤‍💋‍👨🏼", "👩🏿‍❤‍💋‍👨🏽", "👩🏿‍❤‍💋‍👨🏾", "👩🏿‍❤‍💋‍👨🏿", "👩‍❤‍💋‍👩", "👩🏻‍❤‍💋‍👩🏻", "👩🏻‍❤‍💋‍👩🏼", "👩🏻‍❤‍💋‍👩🏽", "👩🏻‍❤‍💋‍👩🏾", "👩🏻‍❤‍💋‍👩🏿", "👩🏼‍❤‍💋‍👩🏻", "👩🏼‍❤‍💋‍👩🏼", "👩🏼‍❤‍💋‍👩🏽", "👩🏼‍❤‍💋‍👩🏾", "👩🏼‍❤‍💋‍👩🏿", "👩🏽‍❤‍💋‍👩🏻", "👩🏽‍❤‍💋‍👩🏼", "👩🏽‍❤‍💋‍👩🏽", "👩🏽‍❤‍💋‍👩🏾", "👩🏽‍❤‍💋‍👩🏿", "👩🏾‍❤‍💋‍👩🏻", "👩🏾‍❤‍💋‍👩🏼", "👩🏾‍❤‍💋‍👩🏽", "👩🏾‍❤‍💋‍👩🏾", "👩🏾‍❤‍💋‍👩🏿", "👩🏿‍❤‍💋‍👩🏻", "👩🏿‍❤‍💋‍👩🏼", "👩🏿‍❤‍💋‍👩🏽", "👩🏿‍❤‍💋‍👩🏾", "👩🏿‍❤‍💋‍👩🏿", "💏", "🧑🏻‍❤‍💋‍🧑🏻", "🧑🏻‍❤‍💋‍🧑🏼", "🧑🏻‍❤‍💋‍🧑🏽", "🧑🏻‍❤‍💋‍🧑🏾", "🧑🏻‍❤‍💋‍🧑🏿", "🧑🏼‍❤‍💋‍🧑🏻", "🧑🏼‍❤‍💋‍🧑🏼", "🧑🏼‍❤‍💋‍🧑🏽", "🧑🏼‍❤‍💋‍🧑🏾", "🧑🏼‍❤‍💋‍🧑🏿", "🧑🏽‍❤‍💋‍🧑🏻", "🧑🏽‍❤‍💋‍🧑🏼", "🧑🏽‍❤‍💋‍🧑🏽", "🧑🏽‍❤‍💋‍🧑🏾", "🧑🏽‍❤‍💋‍🧑🏿", "🧑🏾‍❤‍💋‍🧑🏻", "🧑🏾‍❤‍💋‍🧑🏼", "🧑🏾‍❤‍💋‍🧑🏽", "🧑🏾‍❤‍💋‍🧑🏾", "🧑🏾‍❤‍💋‍🧑🏿", "🧑🏿‍❤‍💋‍🧑🏻", "🧑🏿‍❤‍💋‍🧑🏼", "🧑🏿‍❤‍💋‍🧑🏽", "🧑🏿‍❤‍💋‍🧑🏾", "🧑🏿‍❤‍💋‍🧑🏿", "👨‍❤‍💋‍👨", "👨🏻‍❤‍💋‍👨🏻", "👨🏻‍❤‍💋‍👨🏼", "👨🏻‍❤‍💋‍👨🏽", "👨🏻‍❤‍💋‍👨🏾", "👨🏻‍❤‍💋‍👨🏿", "👨🏼‍❤‍💋‍👨🏻", "👨🏼‍❤‍💋‍👨🏼", "👨🏼‍❤‍💋‍👨🏽", "👨🏼‍❤‍💋‍👨🏾", "👨🏼‍❤‍💋‍👨🏿", "👨🏽‍❤‍💋‍👨🏻", "👨🏽‍❤‍💋‍👨🏼", "👨🏽‍❤‍💋‍👨🏽", "👨🏽‍❤‍💋‍👨🏾", "👨🏽‍❤‍💋‍👨🏿", "👨🏾‍❤‍💋‍👨🏻", "👨🏾‍❤‍💋‍👨🏼", "👨🏾‍❤‍💋‍👨🏽", "👨🏾‍❤‍💋‍👨🏾", "👨🏾‍❤‍💋‍👨🏿", "👨🏿‍❤‍💋‍👨🏻", "👨🏿‍❤‍💋‍👨🏼", "👨🏿‍❤‍💋‍👨🏽", "👨🏿‍❤‍💋‍👨🏾", "👨🏿‍❤‍💋‍👨🏿", "👨‍👩‍👦", "👨‍👩‍👧", "👨‍👩‍👧‍👦", "👨‍👩‍👦‍👦", "👨‍👩‍👧‍👧", "👩‍👩‍👦", "👩‍👩‍👧", "👩‍👩‍👧‍👦", "👩‍👩‍👦‍👦", "👩‍👩‍👧‍👧", "👨‍👨‍👦", "👨‍👨‍👧", "👨‍👨‍👧‍👦", "👨‍👨‍👦‍👦", "👨‍👨‍👧‍👧", "👩‍👦", "👩‍👧", "👩‍👧‍👦", "👩‍👦‍👦", "👩‍👧‍👧", "👨‍👦", "👨‍👧", "👨‍👧‍👦", "👨‍👦‍👦", "👨‍👧‍👧", "🪢", "🧶", "🧵", "🪡", "🧥", "🥼", "🦺", "👚", "👕", "👖", "🩲", "🩳", "👔", "👗", "👙", "🩱", "👘", "🥻", "🩴", "🥿", "👠", "👡", "👢", "👞", "👟", "🥾", "🧦", "🧤", "🧣", "🎩", "🧢", "👒", "🎓", "⛑", "🪖", "👑", "💍", "👝", "👛", "👜", "💼", "🎒", "🧳", "👓", "🕶", "🥽", "🌂" }, new String[]{ - "🐶", "🐱", "🐭", "🐹", "🐰", "🦊", "🐻", "🐼", "🐻‍❄", "🐨", "🐯", "🦁", "🐮", "🐷", "🐽", "🐸", "🐵", "🙈", "🙉", "🙊", "🐒", "🐔", "🐧", "🐦", "🐤", "🐣", "🐥", "🦆", "🦅", "🦉", "🦇", "🐺", "🐗", "🐴", "🦄", "🐝", "🪱", "🐛", "🦋", "🐌", "🐞", "🐜", "🪰", "🪲", "🪳", "🦟", "🦗", "🕷", "🕸", "🦂", "🐢", "🐍", "🦎", "🦖", "🦕", "🐙", "🦑", "🦐", "🦞", "🦀", "🐡", "🐠", "🐟", "🐬", "🐳", "🐋", "🦈", "🦭", "🐊", "🐅", "🐆", "🦓", "🦍", "🦧", "🦣", "🐘", "🦛", "🦏", "🐪", "🐫", "🦒", "🦘", "🦬", "🐃", "🐂", "🐄", "🐎", "🐖", "🐏", "🐑", "🦙", "🐐", "🦌", "🐕", "🐩", "🦮", "🐕‍🦺", "🐈", "🐈‍⬛", "🪶", "🐓", "🦃", "🦤", "🦚", "🦜", "🦢", "🦩", "🕊", "🐇", "🦝", "🦨", "🦡", "🦫", "🦦", "🦥", "🐁", "🐀", "🐿", "🦔", "🐾", "🐉", "🐲", "🌵", "🎄", "🌲", "🌳", "🌴", "🪵", "🌱", "🌿", "☘", "🍀", "🎍", "🪴", "🎋", "🍃", "🍂", "🍁", "🪺", "🪹", "🍄", "🐚", "🪸", "🪨", "🌾", "💐", "🌷", "🌹", "🥀", "🪷", "🌺", "🌸", "🌼", "🌻", "🌞", "🌝", "🌛", "🌜", "🌚", "🌕", "🌖", "🌗", "🌘", "🌑", "🌒", "🌓", "🌔", "🌙", "🌎", "🌍", "🌏", "🪐", "💫", "⭐", "🌟", "✨", "⚡", "☄", "💥", "🔥", "🌪", "🌈", "☀", "🌤", "⛅", "🌥", "☁", "🌦", "🌧", "⛈", "🌩", "🌨", "❄", "☃", "⛄", "🌬", "💨", "💧", "💦", "🫧", "☔", "☂", "🌊", "🌫" + "🐶", "🐱", "🐭", "🐹", "🐰", "🦊", "🐻", "🐼", "🐻‍❄", "🐨", "🐯", "🦁", "🐮", "🐷", "🐽", "🐸", "🐵", "🙈", "🙉", "🙊", "🐒", "🐔", "🐧", "🐦", "🐤", "🐣", "🐥", "🪿", "🦆", "🐦‍⬛", "🦅", "🦉", "🦇", "🐺", "🐗", "🐴", "🦄", "🫎", "🐝", "🪱", "🐛", "🦋", "🐌", "🐞", "🐜", "🪰", "🪲", "🪳", "🦟", "🦗", "🕷", "🕸", "🦂", "🐢", "🐍", "🦎", "🦖", "🦕", "🐙", "🦑", "🪼", "🦐", "🦞", "🦀", "🐡", "🐠", "🐟", "🐬", "🐳", "🐋", "🦈", "🦭", "🐊", "🐅", "🐆", "🦓", "🦍", "🦧", "🦣", "🐘", "🦛", "🦏", "🐪", "🐫", "🦒", "🦘", "🦬", "🐃", "🐂", "🐄", "🫏", "🐎", "🐖", "🐏", "🐑", "🦙", "🐐", "🦌", "🐕", "🐩", "🦮", "🐕‍🦺", "🐈", "🐈‍⬛", "🪶", "🪽", "🐓", "🦃", "🦤", "🦚", "🦜", "🦢", "🦩", "🕊", "🐇", "🦝", "🦨", "🦡", "🦫", "🦦", "🦥", "🐁", "🐀", "🐿", "🦔", "🐾", "🐉", "🐲", "🌵", "🎄", "🌲", "🌳", "🌴", "🪵", "🌱", "🌿", "☘", "🍀", "🎍", "🪴", "🎋", "🍃", "🍂", "🍁", "🪺", "🪹", "🍄", "🐚", "🪸", "🪨", "🌾", "💐", "🌷", "🌹", "🥀", "🪻", "🪷", "🌺", "🌸", "🌼", "🌻", "🌞", "🌝", "🌛", "🌜", "🌚", "🌕", "🌖", "🌗", "🌘", "🌑", "🌒", "🌓", "🌔", "🌙", "🌎", "🌍", "🌏", "🪐", "💫", "⭐", "🌟", "✨", "⚡", "☄", "💥", "🔥", "🌪", "🌈", "☀", "🌤", "⛅", "🌥", "☁", "🌦", "🌧", "⛈", "🌩", "🌨", "❄", "☃", "⛄", "🌬", "💨", "💧", "💦", "🫧", "☔", "☂", "🌊", "🌫" }, new String[]{ - "🍏", "🍎", "🍐", "🍊", "🍋", "🍌", "🍉", "🍇", "🍓", "🫐", "🍈", "🍒", "🍑", "🥭", "🍍", "🥥", "🥝", "🍅", "🍆", "🥑", "🥦", "🥬", "🥒", "🌶", "🫑", "🌽", "🥕", "🫒", "🧄", "🧅", "🥔", "🍠", "🥐", "🥯", "🍞", "🥖", "🥨", "🧀", "🥚", "🍳", "🧈", "🥞", "🧇", "🥓", "🥩", "🍗", "🍖", "🦴", "🌭", "🍔", "🍟", "🍕", "🫓", "🥪", "🥙", "🧆", "🌮", "🌯", "🫔", "🥗", "🥘", "🫕", "🥫", "🫙", "🍝", "🍜", "🍲", "🍛", "🍣", "🍱", "🥟", "🦪", "🍤", "🍙", "🍚", "🍘", "🍥", "🥠", "🥮", "🍢", "🍡", "🍧", "🍨", "🍦", "🥧", "🧁", "🍰", "🎂", "🍮", "🍭", "🍬", "🍫", "🍿", "🍩", "🍪", "🌰", "🥜", "🫘", "🍯", "🥛", "🫗", "🍼", "🫖", "☕", "🍵", "🧃", "🥤", "🧋", "🍶", "🍺", "🍻", "🥂", "🍷", "🥃", "🍸", "🍹", "🧉", "🍾", "🧊", "🥄", "🍴", "🍽", "🥣", "🥡", "🥢", "🧂" + "🍏", "🍎", "🍐", "🍊", "🍋", "🍌", "🍉", "🍇", "🍓", "🫐", "🍈", "🍒", "🍑", "🥭", "🍍", "🥥", "🥝", "🍅", "🍆", "🥑", "🫛", "🥦", "🥬", "🥒", "🌶", "🫑", "🌽", "🥕", "🫒", "🧄", "🧅", "🥔", "🍠", "🫚", "🥐", "🥯", "🍞", "🥖", "🥨", "🧀", "🥚", "🍳", "🧈", "🥞", "🧇", "🥓", "🥩", "🍗", "🍖", "🦴", "🌭", "🍔", "🍟", "🍕", "🫓", "🥪", "🥙", "🧆", "🌮", "🌯", "🫔", "🥗", "🥘", "🫕", "🥫", "🫙", "🍝", "🍜", "🍲", "🍛", "🍣", "🍱", "🥟", "🦪", "🍤", "🍙", "🍚", "🍘", "🍥", "🥠", "🥮", "🍢", "🍡", "🍧", "🍨", "🍦", "🥧", "🧁", "🍰", "🎂", "🍮", "🍭", "🍬", "🍫", "🍿", "🍩", "🍪", "🌰", "🥜", "🫘", "🍯", "🥛", "🫗", "🍼", "🫖", "☕", "🍵", "🧃", "🥤", "🧋", "🍶", "🍺", "🍻", "🥂", "🍷", "🥃", "🍸", "🍹", "🧉", "🍾", "🧊", "🥄", "🍴", "🍽", "🥣", "🥡", "🥢", "🧂" }, new String[]{ - "⚽", "🏀", "🏈", "⚾", "🥎", "🎾", "🏐", "🏉", "🥏", "🎱", "🪀", "🏓", "🏸", "🏒", "🏑", "🥍", "🏏", "🪃", "🥅", "⛳", "🪁", "🛝", "🏹", "🎣", "🤿", "🥊", "🥋", "🎽", "🛹", "🛼", "🛷", "⛸", "🥌", "🎿", "⛷", "🏂", "🪂", "🏋‍♀", "🏋🏻‍♀", "🏋🏼‍♀", "🏋🏽‍♀", "🏋🏾‍♀", "🏋🏿‍♀", "🏋", "🏋🏻", "🏋🏼", "🏋🏽", "🏋🏾", "🏋🏿", "🏋‍♂", "🏋🏻‍♂", "🏋🏼‍♂", "🏋🏽‍♂", "🏋🏾‍♂", "🏋🏿‍♂", "🤼‍♀", "🤼", "🤼‍♂", "🤸‍♀", "🤸🏻‍♀", "🤸🏼‍♀", "🤸🏽‍♀", "🤸🏾‍♀", "🤸🏿‍♀", "🤸", "🤸🏻", "🤸🏼", "🤸🏽", "🤸🏾", "🤸🏿", "🤸‍♂", "🤸🏻‍♂", "🤸🏼‍♂", "🤸🏽‍♂", "🤸🏾‍♂", "🤸🏿‍♂", "⛹‍♀", "⛹🏻‍♀", "⛹🏼‍♀", "⛹🏽‍♀", "⛹🏾‍♀", "⛹🏿‍♀", "⛹", "⛹🏻", "⛹🏼", "⛹🏽", "⛹🏾", "⛹🏿", "⛹‍♂", "⛹🏻‍♂", "⛹🏼‍♂", "⛹🏽‍♂", "⛹🏾‍♂", "⛹🏿‍♂", "🤺", "🤾‍♀", "🤾🏻‍♀", "🤾🏼‍♀", "🤾🏽‍♀", "🤾🏾‍♀", "🤾🏿‍♀", "🤾", "🤾🏻", "🤾🏼", "🤾🏽", "🤾🏾", "🤾🏿", "🤾‍♂", "🤾🏻‍♂", "🤾🏼‍♂", "🤾🏽‍♂", "🤾🏾‍♂", "🤾🏿‍♂", "🏌‍♀", "🏌🏻‍♀", "🏌🏼‍♀", "🏌🏽‍♀", "🏌🏾‍♀", "🏌🏿‍♀", "🏌", "🏌🏻", "🏌🏼", "🏌🏽", "🏌🏾", "🏌🏿", "🏌‍♂", "🏌🏻‍♂", "🏌🏼‍♂", "🏌🏽‍♂", "🏌🏾‍♂", "🏌🏿‍♂", "🏇", "🏇🏻", "🏇🏼", "🏇🏽", "🏇🏾", "🏇🏿", "🧘‍♀", "🧘🏻‍♀", "🧘🏼‍♀", "🧘🏽‍♀", "🧘🏾‍♀", "🧘🏿‍♀", "🧘", "🧘🏻", "🧘🏼", "🧘🏽", "🧘🏾", "🧘🏿", "🧘‍♂", "🧘🏻‍♂", "🧘🏼‍♂", "🧘🏽‍♂", "🧘🏾‍♂", "🧘🏿‍♂", "🏄‍♀", "🏄🏻‍♀", "🏄🏼‍♀", "🏄🏽‍♀", "🏄🏾‍♀", "🏄🏿‍♀", "🏄", "🏄🏻", "🏄🏼", "🏄🏽", "🏄🏾", "🏄🏿", "🏄‍♂", "🏄🏻‍♂", "🏄🏼‍♂", "🏄🏽‍♂", "🏄🏾‍♂", "🏄🏿‍♂", "🏊‍♀", "🏊🏻‍♀", "🏊🏼‍♀", "🏊🏽‍♀", "🏊🏾‍♀", "🏊🏿‍♀", "🏊", "🏊🏻", "🏊🏼", "🏊🏽", "🏊🏾", "🏊🏿", "🏊‍♂", "🏊🏻‍♂", "🏊🏼‍♂", "🏊🏽‍♂", "🏊🏾‍♂", "🏊🏿‍♂", "🤽‍♀", "🤽🏻‍♀", "🤽🏼‍♀", "🤽🏽‍♀", "🤽🏾‍♀", "🤽🏿‍♀", "🤽", "🤽🏻", "🤽🏼", "🤽🏽", "🤽🏾", "🤽🏿", "🤽‍♂", "🤽🏻‍♂", "🤽🏼‍♂", "🤽🏽‍♂", "🤽🏾‍♂", "🤽🏿‍♂", "🚣‍♀", "🚣🏻‍♀", "🚣🏼‍♀", "🚣🏽‍♀", "🚣🏾‍♀", "🚣🏿‍♀", "🚣", "🚣🏻", "🚣🏼", "🚣🏽", "🚣🏾", "🚣🏿", "🚣‍♂", "🚣🏻‍♂", "🚣🏼‍♂", "🚣🏽‍♂", "🚣🏾‍♂", "🚣🏿‍♂", "🧗‍♀", "🧗🏻‍♀", "🧗🏼‍♀", "🧗🏽‍♀", "🧗🏾‍♀", "🧗🏿‍♀", "🧗", "🧗🏻", "🧗🏼", "🧗🏽", "🧗🏾", "🧗🏿", "🧗‍♂", "🧗🏻‍♂", "🧗🏼‍♂", "🧗🏽‍♂", "🧗🏾‍♂", "🧗🏿‍♂", "🚵‍♀", "🚵🏻‍♀", "🚵🏼‍♀", "🚵🏽‍♀", "🚵🏾‍♀", "🚵🏿‍♀", "🚵", "🚵🏻", "🚵🏼", "🚵🏽", "🚵🏾", "🚵🏿", "🚵‍♂", "🚵🏻‍♂", "🚵🏼‍♂", "🚵🏽‍♂", "🚵🏾‍♂", "🚵🏿‍♂", "🚴‍♀", "🚴🏻‍♀", "🚴🏼‍♀", "🚴🏽‍♀", "🚴🏾‍♀", "🚴🏿‍♀", "🚴", "🚴🏻", "🚴🏼", "🚴🏽", "🚴🏾", "🚴🏿", "🚴‍♂", "🚴🏻‍♂", "🚴🏼‍♂", "🚴🏽‍♂", "🚴🏾‍♂", "🚴🏿‍♂", "🏆", "🥇", "🥈", "🥉", "🏅", "🎖", "🏵", "🎗", "🎫", "🎟", "🎪", "🤹‍♀", "🤹🏻‍♀", "🤹🏼‍♀", "🤹🏽‍♀", "🤹🏾‍♀", "🤹🏿‍♀", "🤹", "🤹🏻", "🤹🏼", "🤹🏽", "🤹🏾", "🤹🏿", "🤹‍♂", "🤹🏻‍♂", "🤹🏼‍♂", "🤹🏽‍♂", "🤹🏾‍♂", "🤹🏿‍♂", "🎭", "🩰", "🎨", "🎬", "🎤", "🎧", "🎼", "🎹", "🥁", "🪘", "🎷", "🎺", "🪗", "🎸", "🪕", "🎻", "🎲", "♟", "🎯", "🎳", "🎮", "🎰", "🧩" + "⚽", "🏀", "🏈", "⚾", "🥎", "🎾", "🏐", "🏉", "🥏", "🎱", "🪀", "🏓", "🏸", "🏒", "🏑", "🥍", "🏏", "🪃", "🥅", "⛳", "🪁", "🛝", "🏹", "🎣", "🤿", "🥊", "🥋", "🎽", "🛹", "🛼", "🛷", "⛸", "🥌", "🎿", "⛷", "🏂", "🪂", "🏋‍♀", "🏋🏻‍♀", "🏋🏼‍♀", "🏋🏽‍♀", "🏋🏾‍♀", "🏋🏿‍♀", "🏋", "🏋🏻", "🏋🏼", "🏋🏽", "🏋🏾", "🏋🏿", "🏋‍♂", "🏋🏻‍♂", "🏋🏼‍♂", "🏋🏽‍♂", "🏋🏾‍♂", "🏋🏿‍♂", "🤼‍♀", "🤼", "🤼‍♂", "🤸‍♀", "🤸🏻‍♀", "🤸🏼‍♀", "🤸🏽‍♀", "🤸🏾‍♀", "🤸🏿‍♀", "🤸", "🤸🏻", "🤸🏼", "🤸🏽", "🤸🏾", "🤸🏿", "🤸‍♂", "🤸🏻‍♂", "🤸🏼‍♂", "🤸🏽‍♂", "🤸🏾‍♂", "🤸🏿‍♂", "⛹‍♀", "⛹🏻‍♀", "⛹🏼‍♀", "⛹🏽‍♀", "⛹🏾‍♀", "⛹🏿‍♀", "⛹", "⛹🏻", "⛹🏼", "⛹🏽", "⛹🏾", "⛹🏿", "⛹‍♂", "⛹🏻‍♂", "⛹🏼‍♂", "⛹🏽‍♂", "⛹🏾‍♂", "⛹🏿‍♂", "🤺", "🤾‍♀", "🤾🏻‍♀", "🤾🏼‍♀", "🤾🏽‍♀", "🤾🏾‍♀", "🤾🏿‍♀", "🤾", "🤾🏻", "🤾🏼", "🤾🏽", "🤾🏾", "🤾🏿", "🤾‍♂", "🤾🏻‍♂", "🤾🏼‍♂", "🤾🏽‍♂", "🤾🏾‍♂", "🤾🏿‍♂", "🏌‍♀", "🏌🏻‍♀", "🏌🏼‍♀", "🏌🏽‍♀", "🏌🏾‍♀", "🏌🏿‍♀", "🏌", "🏌🏻", "🏌🏼", "🏌🏽", "🏌🏾", "🏌🏿", "🏌‍♂", "🏌🏻‍♂", "🏌🏼‍♂", "🏌🏽‍♂", "🏌🏾‍♂", "🏌🏿‍♂", "🏇", "🏇🏻", "🏇🏼", "🏇🏽", "🏇🏾", "🏇🏿", "🧘‍♀", "🧘🏻‍♀", "🧘🏼‍♀", "🧘🏽‍♀", "🧘🏾‍♀", "🧘🏿‍♀", "🧘", "🧘🏻", "🧘🏼", "🧘🏽", "🧘🏾", "🧘🏿", "🧘‍♂", "🧘🏻‍♂", "🧘🏼‍♂", "🧘🏽‍♂", "🧘🏾‍♂", "🧘🏿‍♂", "🏄‍♀", "🏄🏻‍♀", "🏄🏼‍♀", "🏄🏽‍♀", "🏄🏾‍♀", "🏄🏿‍♀", "🏄", "🏄🏻", "🏄🏼", "🏄🏽", "🏄🏾", "🏄🏿", "🏄‍♂", "🏄🏻‍♂", "🏄🏼‍♂", "🏄🏽‍♂", "🏄🏾‍♂", "🏄🏿‍♂", "🏊‍♀", "🏊🏻‍♀", "🏊🏼‍♀", "🏊🏽‍♀", "🏊🏾‍♀", "🏊🏿‍♀", "🏊", "🏊🏻", "🏊🏼", "🏊🏽", "🏊🏾", "🏊🏿", "🏊‍♂", "🏊🏻‍♂", "🏊🏼‍♂", "🏊🏽‍♂", "🏊🏾‍♂", "🏊🏿‍♂", "🤽‍♀", "🤽🏻‍♀", "🤽🏼‍♀", "🤽🏽‍♀", "🤽🏾‍♀", "🤽🏿‍♀", "🤽", "🤽🏻", "🤽🏼", "🤽🏽", "🤽🏾", "🤽🏿", "🤽‍♂", "🤽🏻‍♂", "🤽🏼‍♂", "🤽🏽‍♂", "🤽🏾‍♂", "🤽🏿‍♂", "🚣‍♀", "🚣🏻‍♀", "🚣🏼‍♀", "🚣🏽‍♀", "🚣🏾‍♀", "🚣🏿‍♀", "🚣", "🚣🏻", "🚣🏼", "🚣🏽", "🚣🏾", "🚣🏿", "🚣‍♂", "🚣🏻‍♂", "🚣🏼‍♂", "🚣🏽‍♂", "🚣🏾‍♂", "🚣🏿‍♂", "🧗‍♀", "🧗🏻‍♀", "🧗🏼‍♀", "🧗🏽‍♀", "🧗🏾‍♀", "🧗🏿‍♀", "🧗", "🧗🏻", "🧗🏼", "🧗🏽", "🧗🏾", "🧗🏿", "🧗‍♂", "🧗🏻‍♂", "🧗🏼‍♂", "🧗🏽‍♂", "🧗🏾‍♂", "🧗🏿‍♂", "🚵‍♀", "🚵🏻‍♀", "🚵🏼‍♀", "🚵🏽‍♀", "🚵🏾‍♀", "🚵🏿‍♀", "🚵", "🚵🏻", "🚵🏼", "🚵🏽", "🚵🏾", "🚵🏿", "🚵‍♂", "🚵🏻‍♂", "🚵🏼‍♂", "🚵🏽‍♂", "🚵🏾‍♂", "🚵🏿‍♂", "🚴‍♀", "🚴🏻‍♀", "🚴🏼‍♀", "🚴🏽‍♀", "🚴🏾‍♀", "🚴🏿‍♀", "🚴", "🚴🏻", "🚴🏼", "🚴🏽", "🚴🏾", "🚴🏿", "🚴‍♂", "🚴🏻‍♂", "🚴🏼‍♂", "🚴🏽‍♂", "🚴🏾‍♂", "🚴🏿‍♂", "🏆", "🥇", "🥈", "🥉", "🏅", "🎖", "🏵", "🎗", "🎫", "🎟", "🎪", "🤹‍♀", "🤹🏻‍♀", "🤹🏼‍♀", "🤹🏽‍♀", "🤹🏾‍♀", "🤹🏿‍♀", "🤹", "🤹🏻", "🤹🏼", "🤹🏽", "🤹🏾", "🤹🏿", "🤹‍♂", "🤹🏻‍♂", "🤹🏼‍♂", "🤹🏽‍♂", "🤹🏾‍♂", "🤹🏿‍♂", "🎭", "🩰", "🎨", "🎬", "🎤", "🎧", "🎼", "🎹", "🪇", "🥁", "🪘", "🎷", "🎺", "🪗", "🎸", "🪕", "🎻", "🪈", "🎲", "♟", "🎯", "🎳", "🎮", "🎰", "🧩" }, new String[]{ "🚗", "🚕", "🚙", "🚌", "🚎", "🏎", "🚓", "🚑", "🚒", "🚐", "🛻", "🚚", "🚛", "🚜", "🦯", "🦽", "🦼", "🩼", "🛴", "🚲", "🛵", "🏍", "🛺", "🛞", "🚨", "🚔", "🚍", "🚘", "🚖", "🚡", "🚠", "🚟", "🚃", "🚋", "🚞", "🚝", "🚄", "🚅", "🚈", "🚂", "🚆", "🚇", "🚊", "🚉", "✈", "🛫", "🛬", "🛩", "💺", "🛰", "🚀", "🛸", "🚁", "🛶", "⛵", "🚤", "🛥", "🛳", "⛴", "🚢", "🛟", "⚓", "🪝", "⛽", "🚧", "🚦", "🚥", "🚏", "🗺", "🗿", "🗽", "🗼", "🏰", "🏯", "🏟", "🎡", "🎢", "🎠", "⛲", "⛱", "🏖", "🏝", "🏜", "🌋", "⛰", "🏔", "🗻", "🏕", "⛺", "🛖", "🏠", "🏡", "🏘", "🏚", "🏗", "🏭", "🏢", "🏬", "🏣", "🏤", "🏥", "🏦", "🏨", "🏪", "🏫", "🏩", "💒", "🏛", "⛪", "🕌", "🕍", "🛕", "🕋", "⛩", "🛤", "🛣", "🗾", "🎑", "🏞", "🌅", "🌄", "🌠", "🎇", "🎆", "🌇", "🌆", "🏙", "🌃", "🌌", "🌉", "🌁" }, new String[]{ - "⌚", "📱", "📲", "💻", "⌨", "🖥", "🖨", "🖱", "🖲", "🕹", "🗜", "💽", "💾", "💿", "📀", "📼", "📷", "📸", "📹", "🎥", "📽", "🎞", "📞", "☎", "📟", "📠", "📺", "📻", "🎙", "🎚", "🎛", "🧭", "⏱", "⏲", "⏰", "🕰", "⌛", "⏳", "📡", "🔋", "🪫", "🔌", "💡", "🔦", "🕯", "🪔", "🧯", "🛢", "💸", "💵", "💴", "💶", "💷", "🪙", "💰", "💳", "🪪", "💎", "⚖", "🪜", "🧰", "🪛", "🔧", "🔨", "⚒", "🛠", "⛏", "🪚", "🔩", "⚙", "🪤", "🧱", "⛓", "🧲", "🔫", "💣", "🧨", "🪓", "🔪", "🗡", "⚔", "🛡", "🚬", "⚰", "🪦", "⚱", "🏺", "🔮", "📿", "🧿", "🪬", "💈", "⚗", "🔭", "🔬", "🕳️", "🩻", "🩹", "🩺", "💊", "💉", "🩸", "🧬", "🦠", "🧫", "🧪", "🌡", "🧹", "🪠", "🧺", "🧻", "🚽", "🚰", "🚿", "🛁", "🛀", "🛀🏻", "🛀🏼", "🛀🏽", "🛀🏾", "🛀🏿", "🧼", "🪥", "🪒", "🧽", "🪣", "🧴", "🛎", "🔑", "🗝", "🚪", "🪑", "🛋", "🛏", "🛌", "🧸", "🪆", "🖼", "🪞", "🪟", "🛍", "🛒", "🎁", "🎈", "🎏", "🎀", "🪄", "🪅", "🎊", "🎉", "🎎", "🏮", "🎐", "🪩", "🧧", "✉", "📩", "📨", "📧", "💌", "📥", "📤", "📦", "🏷", "🪧", "📪", "📫", "📬", "📭", "📮", "📯", "📜", "📃", "📄", "📑", "🧾", "📊", "📈", "📉", "🗒", "🗓", "📆", "📅", "🗑", "📇", "🗃", "🗳", "🗄", "📋", "📁", "📂", "🗂", "🗞", "📰", "📓", "📔", "📒", "📕", "📗", "📘", "📙", "📚", "📖", "🔖", "🧷", "🔗", "📎", "🖇", "📐", "📏", "🧮", "📌", "📍", "✂", "🖊", "🖋", "✒", "🖌", "🖍", "📝", "✏", "🔍", "🔎", "🔏", "🔐", "🔒", "🔓" + "⌚", "📱", "📲", "💻", "⌨", "🖥", "🖨", "🖱", "🖲", "🕹", "🗜", "💽", "💾", "💿", "📀", "📼", "📷", "📸", "📹", "🎥", "📽", "🎞", "📞", "☎", "📟", "📠", "📺", "📻", "🎙", "🎚", "🎛", "🧭", "⏱", "⏲", "⏰", "🕰", "⌛", "⏳", "📡", "🔋", "🪫", "🔌", "💡", "🔦", "🕯", "🪔", "🧯", "🛢", "💸", "💵", "💴", "💶", "💷", "🪙", "💰", "💳", "🪪", "💎", "⚖", "🪜", "🧰", "🪛", "🔧", "🔨", "⚒", "🛠", "⛏", "🪚", "🔩", "⚙", "🪤", "🧱", "⛓", "🧲", "🔫", "💣", "🧨", "🪓", "🔪", "🗡", "⚔", "🛡", "🚬", "⚰", "🪦", "⚱", "🏺", "🔮", "📿", "🧿", "🪬", "💈", "⚗", "🔭", "🔬", "🕳️", "🩻", "🩹", "🩺", "💊", "💉", "🩸", "🧬", "🦠", "🧫", "🧪", "🌡", "🧹", "🪠", "🧺", "🧻", "🚽", "🚰", "🚿", "🛁", "🛀", "🛀🏻", "🛀🏼", "🛀🏽", "🛀🏾", "🛀🏿", "🧼", "🪥", "🪒", "🪮", "🧽", "🪣", "🧴", "🛎", "🔑", "🗝", "🚪", "🪑", "🛋", "🛏", "🛌", "🧸", "🪆", "🖼", "🪞", "🪟", "🛍", "🛒", "🎁", "🎈", "🎏", "🎀", "🪄", "🪅", "🎊", "🎉", "🎎", "🪭", "🏮", "🎐", "🪩", "🧧", "✉", "📩", "📨", "📧", "💌", "📥", "📤", "📦", "🏷", "🪧", "📪", "📫", "📬", "📭", "📮", "📯", "📜", "📃", "📄", "📑", "🧾", "📊", "📈", "📉", "🗒", "🗓", "📆", "📅", "🗑", "📇", "🗃", "🗳", "🗄", "📋", "📁", "📂", "🗂", "🗞", "📰", "📓", "📔", "📒", "📕", "📗", "📘", "📙", "📚", "📖", "🔖", "🧷", "🔗", "📎", "🖇", "📐", "📏", "🧮", "📌", "📍", "✂", "🖊", "🖋", "✒", "🖌", "🖍", "📝", "✏", "🔍", "🔎", "🔏", "🔐", "🔒", "🔓" }, new String[]{ - "❤", "🧡", "💛", "💚", "💙", "💜", "🖤", "🤍", "🤎", "💔", "❤‍🔥", "❤‍🩹", "❣", "💕", "💞", "💓", "💗", "💖", "💘", "💝", "💟", "☮", "✝", "☪", "🕉", "☸", "✡", "🔯", "🕎", "☯", "☦", "🛐", "⛎", "♈", "♉", "♊", "♋", "♌", "♍", "♎", "♏", "♐", "♑", "♒", "♓", "🆔", "⚛", "🉑", "☢", "☣", "📴", "📳", "🈶", "🈚", "🈸", "🈺", "🈷", "✴", "🆚", "💮", "🉐", "㊙", "㊗", "🈴", "🈵", "🈹", "🈲", "🅰", "🅱", "🆎", "🆑", "🅾", "🆘", "❌", "⭕", "🛑", "⛔", "📛", "🚫", "💯", "💢", "♨", "🚷", "🚯", "🚳", "🚱", "🔞", "📵", "🚭", "❗", "❕", "❓", "❔", "‼", "⁉", "🔅", "🔆", "〽", "⚠", "🚸", "🔱", "⚜", "🔰", "♻", "✅", "🈯", "💹", "❇", "✳", "❎", "🌐", "💠", "Ⓜ", "🌀", "💤", "🏧", "🚾", "♿", "🅿", "🛗", "🈳", "🈂", "🛂", "🛃", "🛄", "🛅", "🚹", "🚺", "🚼", "⚧", "🚻", "🚮", "🎦", "📶", "🈁", "🔣", "ℹ", "🔤", "🔡", "🔠", "🆖", "🆗", "🆙", "🆒", "🆕", "🆓", "0⃣", "1⃣", "2⃣", "3⃣", "4⃣", "5⃣", "6⃣", "7⃣", "8⃣", "9⃣", "🔟", "🔢", "#⃣", "*⃣", "⏏", "▶", "⏸", "⏯", "⏹", "⏺", "⏭", "⏮", "⏩", "⏪", "⏫", "⏬", "◀", "🔼", "🔽", "➡", "⬅", "⬆", "⬇", "↗", "↘", "↙", "↖", "↕", "↔", "↪", "↩", "⤴", "⤵", "🔀", "🔁", "🔂", "🔄", "🔃", "🎵", "🎶", "➕", "➖", "➗", "✖", "🟰", "♾", "💲", "💱", "™", "©", "®", "👁‍🗨", "🔚", "🔙", "🔛", "🔝", "🔜", "〰", "➰", "➿", "✔", "☑", "🔘", "🔴", "🟠", "🟡", "🟢", "🔵", "🟣", "⚫", "⚪", "🟤", "🔺", "🔻", "🔸", "🔹", "🔶", "🔷", "🔳", "🔲", "▪", "▫", "◾", "◽", "◼", "◻", "🟥", "🟧", "🟨", "🟩", "🟦", "🟪", "⬛", "⬜", "🟫", "🔈", "🔇", "🔉", "🔊", "🔔", "🔕", "📣", "📢", "💬", "💭", "🗯", "♠", "♣", "♥", "♦", "🃏", "🎴", "🀄", "🕐", "🕑", "🕒", "🕓", "🕔", "🕕", "🕖", "🕗", "🕘", "🕙", "🕚", "🕛", "🕜", "🕝", "🕞", "🕟", "🕠", "🕡", "🕢", "🕣", "🕤", "🕥", "🕦", "🕧" + "🩷", "❤", "🧡", "💛", "💚", "🩵", "💙", "💜", "🖤", "🩶", "🤍", "🤎", "💔", "❤‍🔥", "❤‍🩹", "❣", "💕", "💞", "💓", "💗", "💖", "💘", "💝", "💟", "☮", "✝", "☪", "🕉", "☸", "🪯", "✡", "🔯", "🕎", "☯", "☦", "🛐", "⛎", "♈", "♉", "♊", "♋", "♌", "♍", "♎", "♏", "♐", "♑", "♒", "♓", "🆔", "⚛", "🉑", "☢", "☣", "📴", "📳", "🈶", "🈚", "🈸", "🈺", "🈷", "✴", "🆚", "💮", "🉐", "㊙", "㊗", "🈴", "🈵", "🈹", "🈲", "🅰", "🅱", "🆎", "🆑", "🅾", "🆘", "❌", "⭕", "🛑", "⛔", "📛", "🚫", "💯", "💢", "♨", "🚷", "🚯", "🚳", "🚱", "🔞", "📵", "🚭", "❗", "❕", "❓", "❔", "‼", "⁉", "🔅", "🔆", "〽", "⚠", "🚸", "🔱", "⚜", "🔰", "♻", "✅", "🈯", "💹", "❇", "✳", "❎", "🌐", "💠", "Ⓜ", "🌀", "💤", "🏧", "🚾", "♿", "🅿", "🛗", "🈳", "🈂", "🛂", "🛃", "🛄", "🛅", "🛜", "🚹", "🚺", "🚼", "⚧", "🚻", "🚮", "🎦", "📶", "🈁", "🔣", "ℹ", "🔤", "🔡", "🔠", "🆖", "🆗", "🆙", "🆒", "🆕", "🆓", "0⃣", "1⃣", "2⃣", "3⃣", "4⃣", "5⃣", "6⃣", "7⃣", "8⃣", "9⃣", "🔟", "🔢", "#⃣", "*⃣", "⏏", "▶", "⏸", "⏯", "⏹", "⏺", "⏭", "⏮", "⏩", "⏪", "⏫", "⏬", "◀", "🔼", "🔽", "➡", "⬅", "⬆", "⬇", "↗", "↘", "↙", "↖", "↕", "↔", "↪", "↩", "⤴", "⤵", "🔀", "🔁", "🔂", "🔄", "🔃", "🎵", "🎶", "➕", "➖", "➗", "✖", "🟰", "♾", "💲", "💱", "™️", "©", "®", "👁‍🗨", "🔚", "🔙", "🔛", "🔝", "🔜", "〰", "➰", "➿", "✔", "☑", "🔘", "🔴", "🟠", "🟡", "🟢", "🔵", "🟣", "⚫", "⚪", "🟤", "🔺", "🔻", "🔸", "🔹", "🔶", "🔷", "🔳", "🔲", "▪", "▫", "◾", "◽", "◼", "◻", "🟥", "🟧", "🟨", "🟩", "🟦", "🟪", "⬛", "⬜", "🟫", "🔈", "🔇", "🔉", "🔊", "🔔", "🔕", "📣", "📢", "💬", "💭", "🗯", "♠", "♣", "♥", "♦", "🃏", "🎴", "🀄", "🕐", "🕑", "🕒", "🕓", "🕔", "🕕", "🕖", "🕗", "🕘", "🕙", "🕚", "🕛", "🕜", "🕝", "🕞", "🕟", "🕠", "🕡", "🕢", "🕣", "🕤", "🕥", "🕦", "🕧" }, new String[]{ "🏳", "🏴", "🏴‍☠", "🏁", "🚩", "🏳‍🌈", "🏳‍⚧", "🇺🇳", "🇦🇫", "🇦🇽", "🇦🇱", "🇩🇿", "🇦🇸", "🇦🇩", "🇦🇴", "🇦🇮", "🇦🇶", "🇦🇬", "🇦🇷", "🇦🇲", "🇦🇼", "🇦🇺", "🇦🇹", "🇦🇿", "🇧🇸", "🇧🇭", "🇧🇩", "🇧🇧", "🇧🇾", "🇧🇪", "🇧🇿", "🇧🇯", "🇧🇲", "🇧🇹", "🇧🇴", "🇧🇦", "🇧🇼", "🇧🇷", "🇻🇬", "🇧🇳", "🇧🇬", "🇧🇫", "🇧🇮", "🇰🇭", "🇨🇲", "🇨🇦", "🇮🇨", "🇨🇻", "🇧🇶", "🇰🇾", "🇨🇫", "🇹🇩", "🇮🇴", "🇨🇱", "🇨🇳", "🇨🇽", "🇨🇨", "🇨🇴", "🇰🇲", "🇨🇬", "🇨🇩", "🇨🇰", "🇨🇷", "🇨🇮", "🇭🇷", "🇨🇺", "🇨🇼", "🇨🇾", "🇨🇿", "🇩🇰", "🇩🇯", "🇩🇲", "🇩🇴", "🇪🇨", "🇪🇬", "🇸🇻", "🇬🇶", "🇪🇷", "🇪🇪", "🇸🇿", "🇪🇹", "🇪🇺", "🇫🇰", "🇫🇴", "🇫🇯", "🇫🇮", "🇫🇷", "🇬🇫", "🇵🇫", "🇹🇫", "🇬🇦", "🇬🇲", "🇬🇪", "🇩🇪", "🇬🇭", "🇬🇮", "🇬🇷", "🇬🇱", "🇬🇩", "🇬🇵", "🇬🇺", "🇬🇹", "🇬🇬", "🇬🇳", "🇬🇼", "🇬🇾", "🇭🇹", "🇭🇳", "🇭🇰", "🇭🇺", "🇮🇸", "🇮🇳", "🇮🇩", "🇮🇷", "🇮🇶", "🇮🇪", "🇮🇲", "🇮🇱", "🇮🇹", "🇯🇲", "🇯🇵", "🎌", "🇯🇪", "🇯🇴", "🇰🇿", "🇰🇪", "🇰🇮", "🇽🇰", "🇰🇼", "🇰🇬", "🇱🇦", "🇱🇻", "🇱🇧", "🇱🇸", "🇱🇷", "🇱🇾", "🇱🇮", "🇱🇹", "🇱🇺", "🇲🇴", "🇲🇬", "🇲🇼", "🇲🇾", "🇲🇻", "🇲🇱", "🇲🇹", "🇲🇭", "🇲🇶", "🇲🇷", "🇲🇺", "🇾🇹", "🇲🇽", "🇫🇲", "🇲🇩", "🇲🇨", "🇲🇳", "🇲🇪", "🇲🇸", "🇲🇦", "🇲🇿", "🇲🇲", "🇳🇦", "🇳🇷", "🇳🇵", "🇳🇱", "🇳🇨", "🇳🇿", "🇳🇮", "🇳🇪", "🇳🇬", "🇳🇺", "🇳🇫", "🇰🇵", "🇲🇰", "🇲🇵", "🇳🇴", "🇴🇲", "🇵🇰", "🇵🇼", "🇵🇸", "🇵🇦", "🇵🇬", "🇵🇾", "🇵🇪", "🇵🇭", "🇵🇳", "🇵🇱", "🇵🇹", "🇵🇷", "🇶🇦", "🇷🇪", "🇷🇴", "🇷🇺", "🇷🇼", "🇼🇸", "🇸🇲", "🇸🇹", "🇸🇦", "🇸🇳", "🇷🇸", "🇸🇨", "🇸🇱", "🇸🇬", "🇸🇽", "🇸🇰", "🇸🇮", "🇬🇸", "🇸🇧", "🇸🇴", "🇿🇦", "🇰🇷", "🇸🇸", "🇪🇸", "🇱🇰", "🇧🇱", "🇸🇭", "🇰🇳", "🇱🇨", "🇵🇲", "🇻🇨", "🇸🇩", "🇸🇷", "🇸🇪", "🇨🇭", "🇸🇾", "🇹🇼", "🇹🇯", "🇹🇿", "🇹🇭", "🇹🇱", "🇹🇬", "🇹🇰", "🇹🇴", "🇹🇹", "🇹🇳", "🇹🇷", "🇹🇲", "🇹🇨", "🇹🇻", "🇺🇬", "🇺🇦", "🇦🇪", "🇬🇧", "🏴󠁧󠁢󠁥󠁮󠁧󠁿", "🏴󠁧󠁢󠁳󠁣󠁴󠁿", "🏴󠁧󠁢󠁷󠁬󠁳󠁿", "🇺🇸", "🇺🇾", "🇻🇮", "🇺🇿", "🇻🇺", "🇻🇦", "🇻🇪", "🇻🇳", "🇼🇫", "🇪🇭", "🇾🇪", "🇿🇲", "🇿🇼" @@ -535,7 +537,7 @@ public class EmojiData { public static final HashMap emojiAliasMap = new HashMap<>(aliasNew.length); public static boolean isHeartEmoji(String emoji) { - return "❤".equals(emoji) || "🧡".equals(emoji) || "💛".equals(emoji) || "💚".equals(emoji) || "💙".equals(emoji) || "💜".equals(emoji) || "🖤".equals(emoji) || "🤍".equals(emoji) || "🤎".equals(emoji); + return "🩷".equals(emoji) || "❤".equals(emoji) || "🧡".equals(emoji) || "💛".equals(emoji) || "💚".equals(emoji) || "🩵".equals(emoji) || "💙".equals(emoji) || "💜".equals(emoji) || "🖤".equals(emoji) || "🩶".equals(emoji) || "🤍".equals(emoji) || "🤎".equals(emoji); } public static boolean isPeachEmoji(String emoji) { diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/Fetcher.java b/TMessagesProj/src/main/java/org/telegram/messenger/Fetcher.java new file mode 100644 index 0000000000..9db4bd018f --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/messenger/Fetcher.java @@ -0,0 +1,139 @@ +package org.telegram.messenger; + +import android.util.Pair; + +import java.util.ArrayList; +import java.util.HashMap; + +public abstract class Fetcher { + + protected void getRemote(int currentAccount, Args arguments, long hash, Utilities.Callback3 onResult) { + // Implement this function + } + + // Not specifying getLocal and setLocal would mean that data is cached only in RAM + protected void getLocal(int currentAccount, Args arguments, Utilities.Callback2 onResult) { + // Implement this function + onResult.run(0L, null); + } + + protected void setLocal(int currentAccount, Args arguments, R data, long hash) { + // Implement this function + } + + protected boolean useCache(Args arguments) { + return true; + } + + private final long requestRemotelyTimeout = 4 * 60 * 1000; + + private HashMap, R> cachedResults; + private HashMap, ArrayList>> loadingCallbacks; + private HashMap, Long> lastRequestedRemotely; + + public void fetch(int currentAccount, Args arguments, Utilities.Callback onResult) { + final Pair key = new Pair<>(currentAccount, arguments); + + if (isLoading(key)) { + saveCallback(key, onResult); + return; + } + + R cached = getCachedResult(key); + if (cached != null && !shouldRequest(key)) { + if (onResult != null) { + onResult.run(cached); + } + return; + } + + saveCallback(key, onResult); + getLocal(currentAccount, arguments, (hash, data) -> { + if (shouldRequest(key)) { + saveLastRequested(key); + getRemote(currentAccount, arguments, hash, (notModified, remoteData, newHash) -> { + if (notModified) { + cacheResult(key, data); + callCallbacks(key, data); + } else { + if (remoteData != null) { + setLocal(currentAccount, arguments, remoteData, newHash); + cacheResult(key, remoteData); + } + callCallbacks(key, remoteData); + } + }); + } else { + cacheResult(key, data); + callCallbacks(key, data); + } + }); + } + + private R getCachedResult(Pair key) { + if (cachedResults == null) { + return null; + } + return cachedResults.get(key); + } + + private void cacheResult(Pair key, R result) { + if (!useCache(key.second)) { + return; + } + if (cachedResults == null) { + cachedResults = new HashMap<>(); + } + cachedResults.put(key, result); + } + + private void saveLastRequested(Pair key) { + if (lastRequestedRemotely == null) { + lastRequestedRemotely = new HashMap<>(); + } + lastRequestedRemotely.put(key, System.currentTimeMillis()); + } + + private boolean shouldRequest(Pair key) { + Long lastRequested = lastRequestedRemotely != null ? lastRequestedRemotely.get(key) : null; + return lastRequested == null || System.currentTimeMillis() - lastRequested >= requestRemotelyTimeout; + } + + private boolean isLoading(Pair key) { + return loadingCallbacks != null && loadingCallbacks.get(key) != null; + } + + private void saveCallback(Pair key, Utilities.Callback callback) { + if (callback == null) { + return; + } + if (loadingCallbacks == null) { + loadingCallbacks = new HashMap<>(); + } + ArrayList> callbacks = loadingCallbacks.get(key); + if (callbacks == null) { + loadingCallbacks.put(key, callbacks = new ArrayList<>()); + } + callbacks.add(callback); + } + + private void callCallbacks(Pair key, R result) { + if (loadingCallbacks == null) { + return; + } + + final ArrayList> callbacks = loadingCallbacks.get(key); + if (callbacks == null) { + return; + } + + AndroidUtilities.runOnUIThread(() -> { + for (Utilities.Callback callback: callbacks) { + callback.run(result); + } + callbacks.clear(); + }); + + loadingCallbacks.remove(key); + } +} \ No newline at end of file diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/FileLoadOperation.java b/TMessagesProj/src/main/java/org/telegram/messenger/FileLoadOperation.java index f3ec19683d..6482bfb69b 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/FileLoadOperation.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/FileLoadOperation.java @@ -8,6 +8,7 @@ package org.telegram.messenger; +import org.telegram.messenger.utils.ImmutableByteArrayOutputStream; import org.telegram.tgnet.ConnectionsManager; import org.telegram.tgnet.NativeByteBuffer; import org.telegram.tgnet.TLObject; @@ -17,7 +18,6 @@ import java.io.File; import java.io.FileInputStream; -import java.io.FileNotFoundException; import java.io.RandomAccessFile; import java.nio.channels.FileChannel; import java.util.ArrayList; @@ -33,12 +33,18 @@ public class FileLoadOperation { + private final static int FINISH_CODE_DEFAULT = 0; + private final static int FINISH_CODE_FILE_ALREADY_EXIST = 1; + public boolean preFinished; + FileLoadOperationStream stream; boolean streamPriority; long streamOffset; public static volatile DispatchQueue filesQueue = new DispatchQueue("writeFileQueue"); + public static ImmutableByteArrayOutputStream filesQueueByteBuffer; private boolean forceSmallChunk; + private Runnable fileWriteRunnable; public void setStream(FileLoadOperationStream stream, boolean streamPriority, long streamOffset) { this.stream = stream; @@ -46,7 +52,12 @@ public void setStream(FileLoadOperationStream stream, boolean streamPriority, lo this.streamPriority = streamPriority; } + public int getPositionInQueue() { + return getQueue().getPosition(this); + } + protected static class RequestInfo { + public long requestStartTime; private int requestToken; private long offset; private TLRPC.TL_upload_file response; @@ -129,7 +140,7 @@ private PreloadRange(long o, long l) { private ArrayList notCheckedCdnRanges; private long requestedBytesCount; - private int currentAccount; + public int currentAccount; private boolean started; private int datacenterId; private int initialDatacenterId; @@ -139,7 +150,7 @@ private PreloadRange(long o, long l) { private volatile int state = stateIdle; private volatile boolean paused; private long downloadedBytes; - private long totalBytesCount; + public long totalBytesCount; private long bytesCountPadding; private long streamStartOffset; private long streamPriorityStartOffset; @@ -202,6 +213,7 @@ private PreloadRange(long o, long l) { private FileLoaderPriorityQueue priorityQueue; public interface FileLoadOperationDelegate { + void didPreFinishLoading(FileLoadOperation operation, File finalFile); void didFinishLoadingFile(FileLoadOperation operation, File finalFile); void didFailedLoadingFile(FileLoadOperation operation, int state); void didChangedLoadProgress(FileLoadOperation operation, long uploadedSize, long totalSize); @@ -518,27 +530,41 @@ private void addPart(ArrayList ranges, long start, long end, boolean save if (save) { if (modified) { ArrayList rangesFinal = new ArrayList<>(ranges); - filesQueue.postRunnable(() -> { + if (fileWriteRunnable != null) { + filesQueue.cancelRunnable(fileWriteRunnable); + } + filesQueue.postRunnable(fileWriteRunnable = () -> { long time = System.currentTimeMillis(); try { + if (filePartsStream == null) { + return; + } + int countFinal = rangesFinal.size(); + int bufferSize = 4 + 8 * 2 * countFinal; + if (filesQueueByteBuffer == null) { + filesQueueByteBuffer = new ImmutableByteArrayOutputStream(bufferSize); + } else { + filesQueueByteBuffer.reset(); + } + filesQueueByteBuffer.writeInt(countFinal); + for (int a = 0; a < countFinal; a++) { + Range rangeFinal = rangesFinal.get(a); + filesQueueByteBuffer.writeLong(rangeFinal.start); + filesQueueByteBuffer.writeLong(rangeFinal.end); + } synchronized (FileLoadOperation.this) { if (filePartsStream == null) { return; } filePartsStream.seek(0); - int countFinal = rangesFinal.size(); - filePartsStream.writeInt(countFinal); - for (int a = 0; a < countFinal; a++) { - Range rangeFinal = rangesFinal.get(a); - filePartsStream.writeLong(rangeFinal.start); - filePartsStream.writeLong(rangeFinal.end); - } + filePartsStream.write(filesQueueByteBuffer.buf, 0, bufferSize); } } catch (Exception e) { + FileLog.e(e, false); if (AndroidUtilities.isENOSPC(e)) { LaunchActivity.checkFreeDiscSpaceStatic(1); - } else { - FileLog.e(e); + } else if (AndroidUtilities.isEROFS(e)) { + SharedConfig.checkSdCard(cacheFileFinal); } } totalTime += System.currentTimeMillis() - time; @@ -676,16 +702,17 @@ public boolean start() { return start(stream, streamOffset, streamPriority); } - public boolean start(final FileLoadOperationStream stream, final long streamOffset, final boolean steamPriority) { + public boolean start(final FileLoadOperationStream stream, final long streamOffset, final boolean streamPriority) { startTime = System.currentTimeMillis(); updateParams(); if (currentDownloadChunkSize == 0) { if (isStream) { currentDownloadChunkSize = downloadChunkSizeAnimation; currentMaxDownloadRequests = maxDownloadRequestsAnimation; + } else { + currentDownloadChunkSize = totalBytesCount >= bigFileSizeFrom ? downloadChunkSizeBig : downloadChunkSize; + currentMaxDownloadRequests = totalBytesCount >= bigFileSizeFrom ? maxDownloadRequestsBig : maxDownloadRequests; } - currentDownloadChunkSize = totalBytesCount >= bigFileSizeFrom || isStream ? downloadChunkSizeBig : downloadChunkSize; - currentMaxDownloadRequests = totalBytesCount >= bigFileSizeFrom || isStream ? maxDownloadRequestsBig : maxDownloadRequests; } final boolean alreadyStarted = state != stateIdle; final boolean wasPaused = paused; @@ -695,14 +722,14 @@ public boolean start(final FileLoadOperationStream stream, final long streamOffs if (streamListeners == null) { streamListeners = new ArrayList<>(); } - if (steamPriority) { + if (streamPriority) { long offset = (streamOffset / (long) currentDownloadChunkSize) * (long) currentDownloadChunkSize; if (priorityRequestInfo != null && priorityRequestInfo.offset != offset) { requestInfos.remove(priorityRequestInfo); requestedBytesCount -= currentDownloadChunkSize; removePart(notRequestedBytesRanges, priorityRequestInfo.offset, priorityRequestInfo.offset + currentDownloadChunkSize); if (priorityRequestInfo.requestToken != 0) { - ConnectionsManager.getInstance(currentAccount).cancelRequest(priorityRequestInfo.requestToken, true); + ConnectionsManager.getInstance(currentAccount).cancelRequest(priorityRequestInfo.requestToken, false); requestsCount--; } if (BuildVars.DEBUG_VERSION) { @@ -872,6 +899,10 @@ public boolean start(final FileLoadOperationStream stream, final long streamOffs } catch (Exception e) { if (AndroidUtilities.isENOSPC(e)) { LaunchActivity.checkFreeDiscSpaceStatic(1); + FileLog.e(e, false); + } else if (AndroidUtilities.isEROFS(e)) { + SharedConfig.checkSdCard(cacheFileFinal); + FileLog.e(e, false); } else { FileLog.e(e); } @@ -972,7 +1003,7 @@ public boolean start(final FileLoadOperationStream stream, final long streamOffs } } } catch (Exception e) { - FileLog.e(e); + FileLog.e(e, !AndroidUtilities.isFilNotFoundException(e)); } } @@ -987,7 +1018,7 @@ public boolean start(final FileLoadOperationStream stream, final long streamOffs } else { long totalDownloadedLen = cacheFileTemp.length(); if (fileNameIv != null && (totalDownloadedLen % currentDownloadChunkSize) != 0) { - requestedBytesCount = 0; + requestedBytesCount = 0; } else { requestedBytesCount = downloadedBytes = (cacheFileTemp.length()) / ((long) currentDownloadChunkSize) * currentDownloadChunkSize; } @@ -1015,7 +1046,7 @@ public boolean start(final FileLoadOperationStream stream, final long streamOffs if (isPreloadVideoOperation) { FileLog.d("start preloading file to temp = " + cacheFileTemp); } else { - FileLog.d("start loading file to temp = " + cacheFileTemp + " final = " + cacheFileFinal); + FileLog.d("start loading file to temp = " + cacheFileTemp + " final = " + cacheFileFinal + " priority" + priority); } } @@ -1035,6 +1066,10 @@ public boolean start(final FileLoadOperationStream stream, final long streamOffs requestedBytesCount = downloadedBytes = 0; if (AndroidUtilities.isENOSPC(e)) { LaunchActivity.checkFreeDiscSpaceStatic(1); + FileLog.e(e, false); + } else if (AndroidUtilities.isEROFS(e)) { + SharedConfig.checkSdCard(cacheFileFinal); + FileLog.e(e, false); } else { FileLog.e(e); } @@ -1050,12 +1085,16 @@ public boolean start(final FileLoadOperationStream stream, final long streamOffs fileOutputStream.seek(downloadedBytes); } } catch (Exception e) { + FileLog.e(e, false); if (AndroidUtilities.isENOSPC(e)) { LaunchActivity.checkFreeDiscSpaceStatic(1); onFail(true, -1); return false; - } else { + } else if (AndroidUtilities.isEROFS(e)) { + SharedConfig.checkSdCard(cacheFileFinal); FileLog.e(e, false); + onFail(true, -1); + return false; } } if (fileOutputStream == null) { @@ -1066,7 +1105,7 @@ public boolean start(final FileLoadOperationStream stream, final long streamOffs Utilities.stageQueue.postRunnable(() -> { if (totalBytesCount != 0 && (isPreloadVideoOperation && preloaded[0] || downloadedBytes == totalBytesCount)) { try { - onFinishLoadingFile(false); + onFinishLoadingFile(false, FINISH_CODE_FILE_ALREADY_EXIST); } catch (Exception e) { onFail(true, 0); } @@ -1077,16 +1116,20 @@ public boolean start(final FileLoadOperationStream stream, final long streamOffs } else { started = true; try { - onFinishLoadingFile(false); + onFinishLoadingFile(false, FINISH_CODE_FILE_ALREADY_EXIST); if (pathSaveData != null) { - delegate.saveFilePath(pathSaveData, null); + delegate.saveFilePath(pathSaveData, cacheFileFinal); } } catch (Exception e) { + FileLog.e(e, false); if (AndroidUtilities.isENOSPC(e)) { LaunchActivity.checkFreeDiscSpaceStatic(1); onFail(true, -1); + } if (AndroidUtilities.isEROFS(e)) { + SharedConfig.checkSdCard(cacheFileFinal); + onFail(true, -1); + return false; } else { - FileLog.e(e, false); onFail(true, 0); } } @@ -1141,17 +1184,10 @@ public void cancel() { cancel(false); } - public void cancel(boolean deleteFiles) { + private void cancel(boolean deleteFiles) { Utilities.stageQueue.postRunnable(() -> { if (state != stateFinished && state != stateFailed) { - if (requestInfos != null) { - for (int a = 0; a < requestInfos.size(); a++) { - RequestInfo requestInfo = requestInfos.get(a); - if (requestInfo.requestToken != 0) { - ConnectionsManager.getInstance(currentAccount).cancelRequest(requestInfo.requestToken, true); - } - } - } + cancelRequests(); onFail(false, 1); } if (deleteFiles) { @@ -1204,6 +1240,17 @@ public void cancel(boolean deleteFiles) { }); } + private void cancelRequests() { + if (requestInfos != null) { + for (int a = 0; a < requestInfos.size(); a++) { + RequestInfo requestInfo = requestInfos.get(a); + if (requestInfo.requestToken != 0) { + ConnectionsManager.getInstance(currentAccount).cancelRequest(requestInfo.requestToken, false); + } + } + } + } + private void cleanup() { try { if (fileOutputStream != null) { @@ -1286,7 +1333,7 @@ private void cleanup() { } } - private void onFinishLoadingFile(final boolean increment) { + private void onFinishLoadingFile(final boolean increment, int finishCode) { if (state != stateDownloading) { return; } @@ -1296,7 +1343,11 @@ private void onFinishLoadingFile(final boolean increment) { if (isPreloadVideoOperation) { preloadFinished = true; if (BuildVars.DEBUG_VERSION) { - FileLog.d("finished preloading file to " + cacheFileTemp + " loaded " + totalPreloadedBytes + " of " + totalBytesCount); + if (finishCode == FINISH_CODE_FILE_ALREADY_EXIST) { + FileLog.d("file already exist " + cacheFileTemp); + } else { + FileLog.d("finished preloading file to " + cacheFileTemp + " loaded " + totalPreloadedBytes + " of " + totalBytesCount); + } } if (fileMetadata != null) { if (cacheFileTemp != null) { @@ -1312,7 +1363,7 @@ private void onFinishLoadingFile(final boolean increment) { final File cacheFilePartsFinal = cacheFileParts; final File cacheFilePreloadFinal = cacheFilePreload; final File cacheFileTempFinal = cacheFileTemp; - Utilities.globalQueue.postRunnable(() -> { + filesQueue.postRunnable(() -> { if (cacheIvTempFinal != null) { cacheIvTempFinal.delete(); } @@ -1335,7 +1386,7 @@ private void onFinishLoadingFile(final boolean increment) { } catch (ZipException zipException) { ungzip = false; } catch (Throwable e) { - FileLog.e(e, !(e instanceof FileNotFoundException)); + FileLog.e(e, !AndroidUtilities.isFilNotFoundException(e)); if (BuildVars.LOGS_ENABLED) { FileLog.e("unable to ungzip temp = " + cacheFileTempFinal + " to final = " + cacheFileFinal); } @@ -1384,7 +1435,7 @@ private void onFinishLoadingFile(final boolean increment) { state = stateDownloading; Utilities.stageQueue.postRunnable(() -> { try { - onFinishLoadingFile(increment); + onFinishLoadingFile(increment, FINISH_CODE_DEFAULT); } catch (Exception e) { onFail(false, 0); } @@ -1406,7 +1457,7 @@ private void onFinishLoadingFile(final boolean increment) { } Utilities.stageQueue.postRunnable(() -> { if (BuildVars.LOGS_ENABLED) { - FileLog.d("finished downloading file to " + cacheFileFinal + " time = " + (System.currentTimeMillis() - startTime)); + FileLog.d("finished downloading file to " + cacheFileFinal + " time = " + (System.currentTimeMillis() - startTime) + " dc = " + datacenterId + " size = " + AndroidUtilities.formatFileSize(totalBytesCount)); } if (increment) { if (currentType == ConnectionsManager.FileTypeAudio) { @@ -1416,7 +1467,11 @@ private void onFinishLoadingFile(final boolean increment) { } else if (currentType == ConnectionsManager.FileTypePhoto) { StatsController.getInstance(currentAccount).incrementReceivedItemsCount(ApplicationLoader.getCurrentNetworkType(), StatsController.TYPE_PHOTOS, 1); } else if (currentType == ConnectionsManager.FileTypeFile) { - StatsController.getInstance(currentAccount).incrementReceivedItemsCount(ApplicationLoader.getCurrentNetworkType(), StatsController.TYPE_FILES, 1); + if (ext != null && (ext.toLowerCase().endsWith("mp3") || ext.toLowerCase().endsWith("m4a"))) { + StatsController.getInstance(currentAccount).incrementReceivedItemsCount(ApplicationLoader.getCurrentNetworkType(), StatsController.TYPE_MUSIC, 1); + } else { + StatsController.getInstance(currentAccount).incrementReceivedItemsCount(ApplicationLoader.getCurrentNetworkType(), StatsController.TYPE_FILES, 1); + } } } delegate.didFinishLoadingFile(FileLoadOperation.this, cacheFileFinal); @@ -1425,6 +1480,7 @@ private void onFinishLoadingFile(final boolean increment) { cacheIvTemp = null; cacheFileParts = null; cacheFilePreload = null; + delegate.didPreFinishLoading(FileLoadOperation.this, cacheFileFinal); } } @@ -1558,7 +1614,7 @@ protected boolean processRequestResult(RequestInfo requestInfo, TLRPC.TL_error e bytes = null; } if (bytes == null || bytes.limit() == 0) { - onFinishLoadingFile(true); + onFinishLoadingFile(true, FINISH_CODE_DEFAULT); return false; } int currentBytesSize = bytes.limit(); @@ -1750,15 +1806,18 @@ protected boolean processRequestResult(RequestInfo requestInfo, TLRPC.TL_error e } if (finishedDownloading) { - onFinishLoadingFile(true); - } else if (state != stateCanceled){ + onFinishLoadingFile(true, FINISH_CODE_DEFAULT); + } else if (state != stateCanceled) { startDownloadRequest(); } } catch (Exception e) { + FileLog.e(e, !AndroidUtilities.isFilNotFoundException(e) && !AndroidUtilities.isENOSPC(e)); if (AndroidUtilities.isENOSPC(e)) { onFail(false, -1); + } else if (AndroidUtilities.isEROFS(e)) { + SharedConfig.checkSdCard(cacheFileFinal); + onFail(true, -1); } else { - FileLog.e(e); onFail(false, 0); } } @@ -1790,7 +1849,7 @@ protected boolean processRequestResult(RequestInfo requestInfo, TLRPC.TL_error e } else if (error.text.contains("OFFSET_INVALID")) { if (downloadedBytes % currentDownloadChunkSize == 0) { try { - onFinishLoadingFile(true); + onFinishLoadingFile(true, FINISH_CODE_DEFAULT); } catch (Exception e) { FileLog.e(e); onFail(false, 0); @@ -1818,6 +1877,14 @@ protected void onFail(boolean thread, final int reason) { cleanup(); state = reason == 1 ? stateCanceled : stateFailed; if (delegate != null) { + if (BuildVars.LOGS_ENABLED) { + long time = startTime == 0 ? 0 : (System.currentTimeMillis() - startTime); + if (reason == 1) { + FileLog.d("cancel downloading file to " + cacheFileFinal + " time = " + time + " dc = " + datacenterId + " size = " + AndroidUtilities.formatFileSize(totalBytesCount)); + } else { + FileLog.d("failed downloading file to " + cacheFileFinal + " reason = " + reason + " time = " + time + " dc = " + datacenterId + " size = " + AndroidUtilities.formatFileSize(totalBytesCount)); + } + } if (thread) { Utilities.stageQueue.postRunnable(() -> delegate.didFailedLoadingFile(FileLoadOperation.this, reason)); } else { @@ -1840,7 +1907,7 @@ private void clearOperaion(RequestInfo currentInfo, boolean preloadChanged) { continue; } if (info.requestToken != 0) { - ConnectionsManager.getInstance(currentAccount).cancelRequest(info.requestToken, true); + ConnectionsManager.getInstance(currentAccount).cancelRequest(info.requestToken, false); } } requestInfos.clear(); @@ -1876,7 +1943,7 @@ private void requestReference(RequestInfo requestInfo) { if (requestingReference) { return; } - clearOperaion(requestInfo, false); + clearOperaion(null, false); requestingReference = true; if (parentObject instanceof MessageObject) { MessageObject messageObject = (MessageObject) parentObject; @@ -1884,15 +1951,22 @@ private void requestReference(RequestInfo requestInfo) { parentObject = messageObject.messageOwner.media.webpage; } } + if (BuildVars.LOGS_ENABLED) { + FileLog.d("debug_loading: " + cacheFileFinal.getName() + " file reference expired "); + } FileRefController.getInstance(currentAccount).requestReference(parentObject, location, this, requestInfo); } protected void startDownloadRequest() { - if (paused || reuploadingCdn || - state != stateDownloading || + if (BuildVars.DEBUG_PRIVATE_VERSION) { + if (Utilities.stageQueue != null && Utilities.stageQueue.getHandler() != null && Thread.currentThread() != Utilities.stageQueue.getHandler().getLooper().getThread()) { + throw new RuntimeException("Wrong thread!!!"); + } + } + if (paused || reuploadingCdn || state != stateDownloading || requestingReference || streamPriorityStartOffset == 0 && ( !nextPartWasPreloaded && (requestInfos.size() + delayedRequestInfos.size() >= currentMaxDownloadRequests) || - isPreloadVideoOperation && (requestedBytesCount > preloadMaxBytes || moovFound != 0 && requestInfos.size() > 0))) { + isPreloadVideoOperation && (requestedBytesCount > preloadMaxBytes || moovFound != 0 && requestInfos.size() > 0))) { return; } int count = 1; @@ -1925,7 +1999,7 @@ protected void startDownloadRequest() { tries--; } if (!found && requestInfos.isEmpty()) { - onFinishLoadingFile(false); + onFinishLoadingFile(false, FINISH_CODE_DEFAULT); } } else { downloadOffset = nextPreloadDownloadOffset; @@ -2048,10 +2122,17 @@ protected void startDownloadRequest() { } } requestInfo.forceSmallChunk = forceSmallChunk; - requestInfo.requestToken = ConnectionsManager.getInstance(currentAccount).sendRequest(request, (response, error) -> { + if (BuildVars.LOGS_ENABLED) { + requestInfo.requestStartTime = System.currentTimeMillis(); + } + int datacenterId = isCdn ? cdnDatacenterId : this.datacenterId; + requestInfo.requestToken = ConnectionsManager.getInstance(currentAccount).sendRequestSync(request, (response, error) -> { if (!requestInfos.contains(requestInfo)) { return; } + if (BuildVars.LOGS_ENABLED) { + FileLog.d("debug_loading: " + cacheFileFinal.getName() + " time=" + (System.currentTimeMillis() - requestInfo.requestStartTime) + " dcId=" + datacenterId + " cdn=" + isCdn + " conType=" + connectionType + " reqId" + requestInfo.requestToken); + } if (requestInfo == priorityRequestInfo) { if (BuildVars.DEBUG_VERSION) { FileLog.d("frame get request completed " + priorityRequestInfo.offset); @@ -2131,7 +2212,7 @@ protected void startDownloadRequest() { onFail(false, 0); } } - }, null, null, 0, datacenterId, ConnectionsManager.ConnectionTypeGeneric, true); + }, null, null, 0, this.datacenterId, ConnectionsManager.ConnectionTypeGeneric, true); } } else { if (response instanceof TLRPC.TL_upload_file) { @@ -2152,12 +2233,19 @@ protected void startDownloadRequest() { } else if (currentType == ConnectionsManager.FileTypePhoto) { StatsController.getInstance(currentAccount).incrementReceivedBytesCount(response.networkType, StatsController.TYPE_PHOTOS, response.getObjectSize() + 4); } else if (currentType == ConnectionsManager.FileTypeFile) { - StatsController.getInstance(currentAccount).incrementReceivedBytesCount(response.networkType, StatsController.TYPE_FILES, response.getObjectSize() + 4); + if (ext != null && (ext.toLowerCase().endsWith("mp3") || ext.toLowerCase().endsWith("m4a"))) { + StatsController.getInstance(currentAccount).incrementReceivedBytesCount(response.networkType, StatsController.TYPE_MUSIC, response.getObjectSize() + 4); + } else { + StatsController.getInstance(currentAccount).incrementReceivedBytesCount(response.networkType, StatsController.TYPE_FILES, response.getObjectSize() + 4); + } } } processRequestResult(requestInfo, error); } - }, null, null, flags, isCdn ? cdnDatacenterId : datacenterId, connectionType, isLast); + }, null, null, flags, datacenterId, connectionType, isLast); + if (BuildVars.LOGS_ENABLED) { + FileLog.d("debug_loading: " + cacheFileFinal.getName() + " send reqId " + requestInfo.requestToken); + } requestsCount++; } } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/FileLoader.java b/TMessagesProj/src/main/java/org/telegram/messenger/FileLoader.java index 0de722a85d..a3f1e543a9 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/FileLoader.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/FileLoader.java @@ -46,7 +46,7 @@ public static FilePathDatabase.FileMeta getFileMetadataFromParent(int currentAcc String str = (String) parentObject; if (str.startsWith("sent_")) { if (sentPattern == null) { - sentPattern = Pattern.compile("sent_.*_([0-9]+)_([0-9]+)_([0-9]+)"); + sentPattern = Pattern.compile("sent_.*_([0-9]+)_([0-9]+)_([0-9]+)_([0-9]+)"); } try { Matcher matcher = sentPattern.matcher(str); @@ -55,6 +55,7 @@ public static FilePathDatabase.FileMeta getFileMetadataFromParent(int currentAcc fileMeta.messageId = Integer.parseInt(matcher.group(1)); fileMeta.dialogId = Long.parseLong(matcher.group(2)); fileMeta.messageType = Integer.parseInt(matcher.group(3)); + fileMeta.messageSize = Long.parseLong(matcher.group(4)); return fileMeta; } } catch (Exception e) { @@ -67,11 +68,34 @@ public static FilePathDatabase.FileMeta getFileMetadataFromParent(int currentAcc fileMeta.messageId = messageObject.getId(); fileMeta.dialogId = messageObject.getDialogId(); fileMeta.messageType = messageObject.type; + fileMeta.messageSize = messageObject.getSize(); return fileMeta; } return null; } + public static TLRPC.VideoSize getVectorMarkupVideoSize(TLRPC.Photo photo) { + if (photo == null || photo.video_sizes == null) { + return null; + } + for (int i = 0; i < photo.video_sizes.size(); i++) { + TLRPC.VideoSize videoSize = photo.video_sizes.get(i); + if (videoSize instanceof TLRPC.TL_videoSizeEmojiMarkup || videoSize instanceof TLRPC.TL_videoSizeStickerMarkup) { + return videoSize; + } + } + return null; + } + + public static TLRPC.VideoSize getEmojiMarkup(ArrayList video_sizes) { + for (int i = 0; i < video_sizes.size(); i++) { + if (video_sizes.get(i) instanceof TLRPC.TL_videoSizeEmojiMarkup || video_sizes.get(i) instanceof TLRPC.TL_videoSizeStickerMarkup) { + return video_sizes.get(i); + } + } + return null; + } + private int getPriorityValue(int priorityType) { if (priorityType == PRIORITY_STREAM) { return Integer.MAX_VALUE; @@ -123,10 +147,13 @@ public interface FileLoaderDelegate { public static final int IMAGE_TYPE_SVG_WHITE = 4; public static final int IMAGE_TYPE_THEME_PREVIEW = 5; - private final FileLoaderPriorityQueue largeFilesQueue = new FileLoaderPriorityQueue("large files queue", 2); - private final FileLoaderPriorityQueue filesQueue = new FileLoaderPriorityQueue("files queue", 3); - private final FileLoaderPriorityQueue imagesQueue = new FileLoaderPriorityQueue("imagesQueue queue", 6); - private final FileLoaderPriorityQueue audioQueue = new FileLoaderPriorityQueue("audioQueue queue", 3); +// private final FileLoaderPriorityQueue largeFilesQueue = new FileLoaderPriorityQueue("large files queue", 2); +// private final FileLoaderPriorityQueue filesQueue = new FileLoaderPriorityQueue("files queue", 3); +// private final FileLoaderPriorityQueue imagesQueue = new FileLoaderPriorityQueue("imagesQueue queue", 6); +// private final FileLoaderPriorityQueue audioQueue = new FileLoaderPriorityQueue("audioQueue queue", 3); + + private final FileLoaderPriorityQueue[] smallFilesQueue = new FileLoaderPriorityQueue[5]; + private final FileLoaderPriorityQueue[] largeFilesQueue = new FileLoaderPriorityQueue[5]; public final static long DEFAULT_MAX_FILE_SIZE = 1024L * 1024L * 2000L; public final static long DEFAULT_MAX_FILE_SIZE_PREMIUM = DEFAULT_MAX_FILE_SIZE * 2L; @@ -136,16 +163,16 @@ public interface FileLoaderDelegate { private volatile static DispatchQueue fileLoaderQueue = new DispatchQueue("fileUploadQueue"); private final FilePathDatabase filePathDatabase; - private LinkedList uploadOperationQueue = new LinkedList<>(); - private LinkedList uploadSmallOperationQueue = new LinkedList<>(); - private ConcurrentHashMap uploadOperationPaths = new ConcurrentHashMap<>(); - private ConcurrentHashMap uploadOperationPathsEnc = new ConcurrentHashMap<>(); + private final LinkedList uploadOperationQueue = new LinkedList<>(); + private final LinkedList uploadSmallOperationQueue = new LinkedList<>(); + private final ConcurrentHashMap uploadOperationPaths = new ConcurrentHashMap<>(); + private final ConcurrentHashMap uploadOperationPathsEnc = new ConcurrentHashMap<>(); private int currentUploadOperationsCount = 0; private int currentUploadSmallOperationsCount = 0; - private ConcurrentHashMap loadOperationPaths = new ConcurrentHashMap<>(); - private ConcurrentHashMap loadOperationPathsUI = new ConcurrentHashMap<>(10, 1, 2); + private final ConcurrentHashMap loadOperationPaths = new ConcurrentHashMap<>(); + private final ConcurrentHashMap loadOperationPathsUI = new ConcurrentHashMap<>(10, 1, 2); private HashMap uploadSizes = new HashMap<>(); private HashMap loadingVideos = new HashMap<>(); @@ -186,6 +213,11 @@ public static FileLoader getInstance(int num) { public FileLoader(int instance) { super(instance); filePathDatabase = new FilePathDatabase(instance); + for (int i = 0; i < smallFilesQueue.length; i++) { + smallFilesQueue[i] = new FileLoaderPriorityQueue("smallFilesQueue dc" + (i + 1), 5); + largeFilesQueue[i] = new FileLoaderPriorityQueue("largeFilesQueue dc" + (i + 1), 1); + } + dumpFilesQueue(); } public static void setMediaDirs(SparseArray dirs) { @@ -201,12 +233,14 @@ public static File getDirectory(int type) { if (dir == null && type != FileLoader.MEDIA_DIR_CACHE) { dir = mediaDirs.get(FileLoader.MEDIA_DIR_CACHE); } - try { - if (dir != null && !dir.isDirectory()) { - dir.mkdirs(); + if (BuildVars.NO_SCOPED_STORAGE) { + try { + if (dir != null && !dir.isDirectory()) { + dir.mkdirs(); + } + } catch (Exception e) { + //don't promt } - } catch (Exception e) { - //don't promt } return dir; } @@ -644,13 +678,9 @@ private FileLoadOperation loadFileInternal(final TLRPC.Document document, final getDownloadController().startDownloadFile(document, (MessageObject) parentObject); } - FileLoadOperation operation = loadOperationPaths.get(fileName); - if (BuildVars.LOGS_ENABLED) { - FileLog.d("checkFile operation fileName=" + fileName + " documentName=" + getDocumentFileName(document) + " operation=" + operation); - } - if (stream != null) { - priority = PRIORITY_STREAM; - } + final String finalFileName = fileName; + FileLoadOperation operation = loadOperationPaths.get(finalFileName); + priority = getPriorityValue(priority); if (operation != null) { @@ -663,6 +693,7 @@ private FileLoadOperation loadFileInternal(final TLRPC.Document document, final operation.getQueue().add(operation); operation.updateProgress(); operation.getQueue().checkLoadingOperations(); + FileLog.d("load operation update position fileName=" + finalFileName + " position in queue " + operation.getPositionInQueue() + " account=" + currentAccount); return operation; } @@ -716,16 +747,11 @@ private FileLoadOperation loadFileInternal(final TLRPC.Document document, final } } FileLoaderPriorityQueue loaderQueue; - if (type == MEDIA_DIR_AUDIO) { - loaderQueue = audioQueue; - } else if (secureDocument != null || location != null && (imageLocation == null || imageLocation.imageType != IMAGE_TYPE_ANIMATION) || MessageObject.isImageWebDocument(webDocument) || MessageObject.isStickerDocument(document) || MessageObject.isAnimatedStickerDocument(document, true) || MessageObject.isVideoStickerDocument(document)) { - loaderQueue = imagesQueue; + int index = Utilities.clamp(operation.getDatacenterId() - 1, 4, 0); + if (operation.totalBytesCount > 20 * 1024 * 1024) {//20mb + loaderQueue = largeFilesQueue[index]; } else { - if (document == null || document.size > 20 * 1024 * 1024) { - loaderQueue = largeFilesQueue; - } else { - loaderQueue = filesQueue; - } + loaderQueue = smallFilesQueue[index]; } String storeFileName = fileName; @@ -747,7 +773,6 @@ private FileLoadOperation loadFileInternal(final TLRPC.Document document, final storeDir = getDirectory(type); boolean saveCustomPath = false; - if ((type == MEDIA_DIR_IMAGE || type == MEDIA_DIR_VIDEO) && canSaveToPublicStorage(parentObject)) { File newDir; if (type == MEDIA_DIR_IMAGE) { @@ -786,6 +811,20 @@ private FileLoadOperation loadFileInternal(final TLRPC.Document document, final final int finalType = type; FileLoadOperation.FileLoadOperationDelegate fileLoadOperationDelegate = new FileLoadOperation.FileLoadOperationDelegate() { + + @Override + public void didPreFinishLoading(FileLoadOperation operation, File finalFile) { + FileLoaderPriorityQueue queue = operation.getQueue(); + fileLoaderQueue.postRunnable(() -> { + FileLoadOperation currentOperation = loadOperationPaths.remove(fileName); + if (currentOperation != null) { + currentOperation.preFinished = true; + queue.checkLoadingOperations(); + } + }); + checkDownloadQueue(operation.getQueue(), fileName); + } + @Override public void didFinishLoadingFile(FileLoadOperation operation, File finalFile) { if (!operation.isPreloadVideoOperation() && operation.isPreloadFinished()) { @@ -846,16 +885,16 @@ public boolean hasAnotherRefOnFile(String path) { }; operation.setDelegate(fileLoadOperationDelegate); - loadOperationPaths.put(fileName, operation); + loadOperationPaths.put(finalFileName, operation); operation.setPriority(priority); operation.setStream(stream, streamPriority, streamOffset); - if (BuildVars.LOGS_ENABLED) { - FileLog.d("loadFileInternal fileName=" + fileName + " documentName=" + getDocumentFileName(document)); - } - loaderQueue.add(operation); loaderQueue.checkLoadingOperations(); + + if (BuildVars.LOGS_ENABLED) { + FileLog.d("create load operation fileName=" + finalFileName + " documentName=" + getDocumentFileName(document) + "size=" + AndroidUtilities.formatFileSize(operation.totalBytesCount) + " position in queue " + operation.getPositionInQueue() + " account=" + currentAccount); + } return operation; } @@ -871,16 +910,27 @@ private boolean canSaveAsFile(Object parentObject) { } private boolean canSaveToPublicStorage(Object parentObject) { - if (SharedConfig.saveToGalleryFlags == 0 || BuildVars.NO_SCOPED_STORAGE) { + if (BuildVars.NO_SCOPED_STORAGE) { return false; } - if (parentObject instanceof MessageObject) { - MessageObject messageObject = (MessageObject) parentObject; + FilePathDatabase.FileMeta metadata = getFileMetadataFromParent(currentAccount, parentObject); + MessageObject messageObject = null; + if (metadata != null) { int flag; - long dialogId = messageObject.getDialogId(); - if (messageObject.isRoundVideo() || messageObject.isVoice() || messageObject.isAnyKindOfSticker() || getMessagesController().isChatNoForwards(getMessagesController().getChat(-dialogId)) || messageObject.messageOwner.noforwards || DialogObject.isEncryptedDialog(dialogId)) { + long dialogId = metadata.dialogId; + if (getMessagesController().isChatNoForwards(getMessagesController().getChat(-dialogId)) || DialogObject.isEncryptedDialog(dialogId)) { return false; } + if (parentObject instanceof MessageObject) { + messageObject = (MessageObject) parentObject; + if (messageObject.isRoundVideo() || messageObject.isVoice() || messageObject.isAnyKindOfSticker() || messageObject.messageOwner.noforwards) { + return false; + } + } else { + if (metadata.messageType == MessageObject.TYPE_ROUND_VIDEO || metadata.messageType == MessageObject.TYPE_STICKER || metadata.messageType == MessageObject.TYPE_VOICE) { + return false; + } + } if (dialogId >= 0) { flag = SharedConfig.SAVE_TO_GALLERY_FLAG_PEER; } else { @@ -891,7 +941,7 @@ private boolean canSaveToPublicStorage(Object parentObject) { } } - if ((SharedConfig.saveToGalleryFlags & flag) != 0) { + if (SaveToGallerySettingsHelper.needSave(flag, metadata, messageObject, currentAccount)) { return true; } } @@ -935,11 +985,11 @@ private void loadFile(final TLRPC.Document document, final SecureDocument secure fileLoaderQueue.postRunnable(runnable); } - protected FileLoadOperation loadStreamFile(final FileLoadOperationStream stream, final TLRPC.Document document, final ImageLocation location, final Object parentObject, final long offset, final boolean priority) { + protected FileLoadOperation loadStreamFile(final FileLoadOperationStream stream, final TLRPC.Document document, final ImageLocation location, final Object parentObject, final long offset, final boolean priority, int loadingPriority) { final CountDownLatch semaphore = new CountDownLatch(1); final FileLoadOperation[] result = new FileLoadOperation[1]; fileLoaderQueue.postRunnable(() -> { - result[0] = loadFileInternal(document, null, null, document == null && location != null ? location.location : null, location, parentObject, document == null && location != null ? "mp4" : null, document == null && location != null ? location.currentSize : 0, 1, stream, offset, priority, document == null ? 1 : 0); + result[0] = loadFileInternal(document, null, null, document == null && location != null ? location.location : null, location, parentObject, document == null && location != null ? "mp4" : null, document == null && location != null ? location.currentSize : 0, loadingPriority, stream, offset, priority, document == null ? 1 : 0); semaphore.countDown(); }); try { @@ -1195,13 +1245,64 @@ public static TLRPC.PhotoSize getClosestPhotoSizeWithSize(ArrayList 100 && closestObject.location != null && closestObject.location.dc_id == Integer.MIN_VALUE || obj instanceof TLRPC.TL_photoCachedSize || side > lastSide && lastSide < currentSide) { + if ( + closestObject == null || + side > 100 && closestObject.location != null && closestObject.location.dc_id == Integer.MIN_VALUE || + obj instanceof TLRPC.TL_photoCachedSize || side > lastSide && lastSide < currentSide + ) { + closestObject = obj; + lastSide = currentSide; + } + } else { + int currentSide = Math.max(obj.w, obj.h); + if ( + closestObject == null || + side > 100 && closestObject.location != null && closestObject.location.dc_id == Integer.MIN_VALUE || + obj instanceof TLRPC.TL_photoCachedSize || + currentSide <= side && lastSide < currentSide + ) { + closestObject = obj; + lastSide = currentSide; + } + } + } + return closestObject; + } + + public static TLRPC.VideoSize getClosestVideoSizeWithSize(ArrayList sizes, int side) { + return getClosestVideoSizeWithSize(sizes, side, false); + } + + public static TLRPC.VideoSize getClosestVideoSizeWithSize(ArrayList sizes, int side, boolean byMinSide) { + return getClosestVideoSizeWithSize(sizes, side, byMinSide, false); + } + + public static TLRPC.VideoSize getClosestVideoSizeWithSize(ArrayList sizes, int side, boolean byMinSide, boolean ignoreStripped) { + if (sizes == null || sizes.isEmpty()) { + return null; + } + int lastSide = 0; + TLRPC.VideoSize closestObject = null; + for (int a = 0; a < sizes.size(); a++) { + TLRPC.VideoSize obj = sizes.get(a); + if (obj == null || obj instanceof TLRPC.TL_videoSizeEmojiMarkup || obj instanceof TLRPC.TL_videoSizeStickerMarkup) { + continue; + } + if (byMinSide) { + int currentSide = Math.min(obj.h, obj.w); + if (closestObject == null || + side > 100 && closestObject.location != null && closestObject.location.dc_id == Integer.MIN_VALUE || + side > lastSide && lastSide < currentSide) { closestObject = obj; lastSide = currentSide; } } else { int currentSide = Math.max(obj.w, obj.h); - if (closestObject == null || side > 100 && closestObject.location != null && closestObject.location.dc_id == Integer.MIN_VALUE || obj instanceof TLRPC.TL_photoCachedSize || currentSide <= side && lastSide < currentSide) { + if ( + closestObject == null || + side > 100 && closestObject.location != null && closestObject.location.dc_id == Integer.MIN_VALUE || + currentSide <= side && lastSide < currentSide + ) { closestObject = obj; lastSide = currentSide; } @@ -1603,4 +1704,21 @@ public static long bytesToLong(byte[] bytes) { } return l; } + + Runnable dumpFilesQueueRunnable = () -> { + for (int i = 0; i < smallFilesQueue.length; i++) { + if (smallFilesQueue[i].getCount() > 0 || largeFilesQueue[i].getCount() > 0) { + FileLog.d("download queue: dc" + (i + 1) + " account=" + currentAccount + " small_operations=" + smallFilesQueue[i].getCount() + " large_operations=" + largeFilesQueue[i].getCount()); + } + } + dumpFilesQueue(); + }; + + public void dumpFilesQueue() { + if (!BuildVars.LOGS_ENABLED) { + return; + } + fileLoaderQueue.cancelRunnable(dumpFilesQueueRunnable); + fileLoaderQueue.postRunnable(dumpFilesQueueRunnable, 10_000); + } } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/FileLoaderPriorityQueue.java b/TMessagesProj/src/main/java/org/telegram/messenger/FileLoaderPriorityQueue.java index 8ff17fc664..426e9e0352 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/FileLoaderPriorityQueue.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/FileLoaderPriorityQueue.java @@ -1,5 +1,7 @@ package org.telegram.messenger; +import org.telegram.tgnet.ConnectionsManager; + import java.util.ArrayList; public class FileLoaderPriorityQueue { @@ -7,8 +9,7 @@ public class FileLoaderPriorityQueue { private final int maxActiveOperationsCount; String name; - ArrayList allOperations = new ArrayList<>(); - ArrayList activeOperations = new ArrayList<>(); + private ArrayList allOperations = new ArrayList<>(); private int PRIORITY_VALUE_MAX = (1 << 20); private int PRIORITY_VALUE_NORMAL = (1 << 16); @@ -24,7 +25,12 @@ public void add(FileLoadOperation operation) { return; } int index = -1; - allOperations.remove(operation); + for (int i = 0; i < allOperations.size(); i++) { + if (allOperations.get(i) == operation) { + allOperations.remove(i); + i--; + } + } for (int i = 0; i < allOperations.size(); i++) { if (operation.getPriority() > allOperations.get(i).getPriority()) { index = i; @@ -42,14 +48,16 @@ public void cancel(FileLoadOperation operation) { if (operation == null) { return; } - allOperations.remove(operation); - operation.cancel(); + if (allOperations.remove(operation)) { + operation.cancel(); + } } public void checkLoadingOperations() { int activeCount = 0; int lastPriority = 0; boolean pauseAllNextOperations = false; + int max = maxActiveOperationsCount; for (int i = 0; i < allOperations.size(); i++) { FileLoadOperation operation = allOperations.get(i); if (i > 0 && !pauseAllNextOperations) { @@ -57,7 +65,11 @@ public void checkLoadingOperations() { pauseAllNextOperations = true; } } - if (!pauseAllNextOperations && i < maxActiveOperationsCount) { + if (operation.preFinished) { + //operation will not use connections + //just skip + max++; + } else if (!pauseAllNextOperations && i < max) { operation.start(); activeCount++; } else { @@ -73,14 +85,18 @@ public void remove(FileLoadOperation operation) { if (operation == null) { return; } + ConnectionsManager connectionsManager = ConnectionsManager.getInstance(operation.currentAccount); + if (connectionsManager != null && connectionsManager.getConnectionState() == ConnectionsManager.ConnectionStateWaitingForNetwork) { + operation.cancel(); + } allOperations.remove(operation); } - private FileLoadOperation remove() { - if (allOperations.isEmpty()) { - return null; - } - return allOperations.remove(0); + public int getCount() { + return allOperations.size(); } + public int getPosition(FileLoadOperation fileLoadOperation) { + return allOperations.indexOf(fileLoadOperation); + } } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/FileLog.java b/TMessagesProj/src/main/java/org/telegram/messenger/FileLog.java index 74d2dae09e..ac4c366cbf 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/FileLog.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/FileLog.java @@ -10,8 +10,6 @@ import android.util.Log; -import cn.hutool.core.util.StrUtil; - import com.google.gson.ExclusionStrategy; import com.google.gson.FieldAttributes; import com.google.gson.Gson; @@ -25,7 +23,9 @@ import java.io.File; import java.io.FileOutputStream; +import java.io.IOException; import java.io.OutputStreamWriter; +import java.util.ArrayList; import java.util.HashSet; import java.util.Locale; @@ -111,7 +111,7 @@ public static void dumpResponseAndRequest(TLObject request, TLObject response, T } }); } catch (Throwable e) { - FileLog.e(e); + FileLog.e(e, BuildVars.DEBUG_PRIVATE_VERSION); } } @@ -143,7 +143,6 @@ public static void dumpUnparsedMessage(TLObject message, long messageId) { } }); } catch (Throwable e) { - FileLog.e(e); } } @@ -157,6 +156,7 @@ private static void checkGson() { privateFields.add("bytes"); privateFields.add("secret"); privateFields.add("stripped_thumb"); + privateFields.add("strippedBitmap"); privateFields.add("networkType"); privateFields.add("disableFree"); @@ -193,13 +193,10 @@ public void init() { dateFormat = FastDateFormat.getInstance("dd_MM_yyyy_HH_mm_ss", Locale.US); String date = dateFormat.format(System.currentTimeMillis()); try { - File sdCard = ApplicationLoader.applicationContext.getExternalFilesDir(null); - if (sdCard == null) { + File dir = AndroidUtilities.getLogsDir(); + if (dir == null) { return; } - File dir = new File(sdCard.getAbsolutePath() + "/logs"); - dir.mkdirs(); - currentFile = new File(dir, date + ".txt"); tlRequestsFile = new File(dir, date + "_mtproto.txt"); } catch (Exception e) { @@ -228,37 +225,121 @@ public static void ensureInitied() { } public static String getNetworkLogPath() { - if (BuildVars.DEBUG_PRIVATE_VERSION) return "/dev/null"; + if (!BuildVars.LOGS_ENABLED) { + return ""; + } + try { + File dir = AndroidUtilities.getLogsDir(); + if (dir == null) { + return ""; + } + getInstance().networkFile = new File(dir, getInstance().dateFormat.format(System.currentTimeMillis()) + "_net.txt"); + return getInstance().networkFile.getAbsolutePath(); + } catch (Throwable e) { + e.printStackTrace(); + } return ""; } - private static String mkTag() { - - final StackTraceElement[] stackTrace = Thread.currentThread().getStackTrace(); - return StrUtil.subAfter(stackTrace[4].getClassName(), ".", true); - - } - - private static String mkMessage(Throwable e) { - String message = e.getMessage(); - if (message != null) return e.getClass().getSimpleName() + ": " + message; - return e.getClass().getSimpleName(); + public static String getTonlibLogPath() { + if (!BuildVars.LOGS_ENABLED) { + return ""; + } + try { + File dir = AndroidUtilities.getLogsDir(); + if (dir == null) { + return ""; + } + getInstance().tonlibFile = new File(dir, getInstance().dateFormat.format(System.currentTimeMillis()) + "_tonlib.txt"); + return getInstance().tonlibFile.getAbsolutePath(); + } catch (Throwable e) { + e.printStackTrace(); + } + return ""; } public static void e(final String message, final Throwable exception) { - Log.e(mkTag(), message, exception); + if (!BuildVars.LOGS_ENABLED) { + return; + } + ensureInitied(); + Log.e(tag, message, exception); + if (getInstance().streamWriter != null) { + getInstance().logQueue.postRunnable(() -> { + try { + getInstance().streamWriter.write(getInstance().dateFormat.format(System.currentTimeMillis()) + " E/tmessages: " + message + "\n"); + getInstance().streamWriter.write(exception.toString()); + getInstance().streamWriter.flush(); + } catch (Exception e) { + e.printStackTrace(); + } + }); + } } public static void e(final String message) { - Log.e(mkTag(), message); + if (!BuildVars.LOGS_ENABLED) { + return; + } + ensureInitied(); + Log.e(tag, message); + if (getInstance().streamWriter != null) { + getInstance().logQueue.postRunnable(() -> { + try { + getInstance().streamWriter.write(getInstance().dateFormat.format(System.currentTimeMillis()) + " E/tmessages: " + message + "\n"); + getInstance().streamWriter.flush(); + } catch (Exception e) { + e.printStackTrace(); + } + }); + } } public static void e(final Throwable e) { - Log.e(mkTag(), mkMessage(e), e); + e(e, true); } - public static void e(final Throwable e, boolean dummyException) { - e(e); + public static void e(final Throwable e, boolean logToAppCenter) { + if (!BuildVars.LOGS_ENABLED) { + return; + } +// if (BuildVars.DEBUG_VERSION && needSent(e) && logToAppCenter) { +// AndroidUtilities.appCenterLog(e); +// } + if (BuildVars.DEBUG_VERSION && e.getMessage() != null && e.getMessage().contains("disk image is malformed") && !databaseIsMalformed) { + FileLog.d("copy malformed files"); + databaseIsMalformed = true; + File filesDir = ApplicationLoader.getFilesDirFixed(); + filesDir = new File(filesDir, "malformed_database/"); + filesDir.mkdirs(); + ArrayList malformedFiles = MessagesStorage.getInstance(UserConfig.selectedAccount).getDatabaseFiles(); + for (int i = 0; i < malformedFiles.size(); i++) { + try { + AndroidUtilities.copyFile(malformedFiles.get(i), new File(filesDir, malformedFiles.get(i).getName())); + } catch (IOException ex) { + FileLog.e(ex); + } + } + } + ensureInitied(); + e.printStackTrace(); + if (getInstance().streamWriter != null) { + getInstance().logQueue.postRunnable(() -> { + + try { + getInstance().streamWriter.write(getInstance().dateFormat.format(System.currentTimeMillis()) + " E/tmessages: " + e + "\n"); + StackTraceElement[] stack = e.getStackTrace(); + for (int a = 0; a < stack.length; a++) { + getInstance().streamWriter.write(getInstance().dateFormat.format(System.currentTimeMillis()) + " E/tmessages: " + stack[a] + "\n"); + } + getInstance().streamWriter.flush(); + } catch (Exception e1) { + e1.printStackTrace(); + } + }); + } else { + e.printStackTrace(); + } } public static void fatal(final Throwable e) { @@ -272,25 +353,108 @@ public static void fatal(final Throwable e, boolean logToAppCenter) { // if (BuildVars.DEBUG_VERSION && needSent(e) && logToAppCenter) { // AndroidUtilities.appCenterLog(e); // } - Log.wtf(mkTag(), mkMessage(e), e); + ensureInitied(); + e.printStackTrace(); + if (getInstance().streamWriter != null) { + getInstance().logQueue.postRunnable(() -> { + try { + getInstance().streamWriter.write(getInstance().dateFormat.format(System.currentTimeMillis()) + " E/tmessages: " + e + "\n"); + StackTraceElement[] stack = e.getStackTrace(); + for (int a = 0; a < stack.length; a++) { + getInstance().streamWriter.write(getInstance().dateFormat.format(System.currentTimeMillis()) + " E/tmessages: " + stack[a] + "\n"); + } + getInstance().streamWriter.flush(); + } catch (Exception e1) { + e1.printStackTrace(); + } + if (BuildVars.DEBUG_PRIVATE_VERSION) { + System.exit(2); + } + }); + } else { + e.printStackTrace(); + if (BuildVars.DEBUG_PRIVATE_VERSION) { + System.exit(2); + } + } } private static boolean needSent(Throwable e) { - if (e instanceof InterruptedException || e instanceof MediaCodecVideoConvertor.ConversionCanceledException) { + if (e instanceof InterruptedException || e instanceof MediaCodecVideoConvertor.ConversionCanceledException || e instanceof IgnoreSentException) { return false; } return true; } public static void d(final String message) { - if (!BuildVars.LOGS_ENABLED) return; - Log.d(mkTag(), message); + if (!BuildVars.LOGS_ENABLED) { + return; + } + ensureInitied(); + Log.d(tag, message); + if (getInstance().streamWriter != null) { + getInstance().logQueue.postRunnable(() -> { + try { + getInstance().streamWriter.write(getInstance().dateFormat.format(System.currentTimeMillis()) + " D/tmessages: " + message + "\n"); + getInstance().streamWriter.flush(); + } catch (Exception e) { + e.printStackTrace(); + if (AndroidUtilities.isENOSPC(e)) { + LaunchActivity.checkFreeDiscSpaceStatic(1); + } + } + }); + } } public static void w(final String message) { - if (!BuildVars.LOGS_ENABLED) return; - Log.w(mkTag(), message); + if (!BuildVars.LOGS_ENABLED) { + return; + } + ensureInitied(); + Log.w(tag, message); + if (getInstance().streamWriter != null) { + getInstance().logQueue.postRunnable(() -> { + try { + getInstance().streamWriter.write(getInstance().dateFormat.format(System.currentTimeMillis()) + " W/tmessages: " + message + "\n"); + getInstance().streamWriter.flush(); + } catch (Exception e) { + e.printStackTrace(); + } + }); + } + } + + public static void cleanupLogs() { + ensureInitied(); + File dir = AndroidUtilities.getLogsDir(); + if (dir == null) { + return; + } + File[] files = dir.listFiles(); + if (files != null) { + for (int a = 0; a < files.length; a++) { + File file = files[a]; + if (getInstance().currentFile != null && file.getAbsolutePath().equals(getInstance().currentFile.getAbsolutePath())) { + continue; + } + if (getInstance().networkFile != null && file.getAbsolutePath().equals(getInstance().networkFile.getAbsolutePath())) { + continue; + } + if (getInstance().tonlibFile != null && file.getAbsolutePath().equals(getInstance().tonlibFile.getAbsolutePath())) { + continue; + } + file.delete(); + } + } } + public static class IgnoreSentException extends Exception{ + + public IgnoreSentException(String e) { + super(e); + } + + } } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/FilePathDatabase.java b/TMessagesProj/src/main/java/org/telegram/messenger/FilePathDatabase.java index ad1e909f5d..56fbd728dc 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/FilePathDatabase.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/FilePathDatabase.java @@ -17,7 +17,7 @@ public class FilePathDatabase { - private final DispatchQueue dispatchQueue; + private DispatchQueue dispatchQueue; private final int currentAccount; private SQLiteDatabase database; @@ -32,13 +32,10 @@ public class FilePathDatabase { public final static int MESSAGE_TYPE_VIDEO_MESSAGE = 0; private final FileMeta metaTmp = new FileMeta(); + boolean databaseCreated; public FilePathDatabase(int currentAccount) { this.currentAccount = currentAccount; - dispatchQueue = new DispatchQueue("files_database_queue_" + currentAccount); - dispatchQueue.postRunnable(() -> { - createDatabase(0, false); - }); } public void createDatabase(int tryCount, boolean fromBackup) { @@ -152,8 +149,8 @@ private boolean restoreBackup() { public String getPath(long documentId, int dc, int type, boolean useQueue) { if (useQueue) { - if (BuildVars.DEBUG_VERSION) { - if (dispatchQueue.getHandler() != null && Thread.currentThread() == dispatchQueue.getHandler().getLooper().getThread()) { + if (BuildVars.DEBUG_PRIVATE_VERSION) { + if (dispatchQueue != null && dispatchQueue.getHandler() != null && Thread.currentThread() == dispatchQueue.getHandler().getLooper().getThread()) { throw new RuntimeException("Error, lead to infinity loop"); } } @@ -161,7 +158,8 @@ public String getPath(long documentId, int dc, int type, boolean useQueue) { CountDownLatch syncLatch = new CountDownLatch(1); String[] res = new String[1]; - dispatchQueue.postRunnable(() -> { + postRunnable(() -> { + ensureDatabaseCreated(); if (database != null) { SQLiteCursor cursor = null; try { @@ -172,7 +170,7 @@ public String getPath(long documentId, int dc, int type, boolean useQueue) { FileLog.d("get file path id=" + documentId + " dc=" + dc + " type=" + type + " path=" + res[0]); } } - } catch (SQLiteException e) { + } catch (Throwable e) { FileLog.e(e); } finally { if (cursor != null) { @@ -212,11 +210,33 @@ public String getPath(long documentId, int dc, int type, boolean useQueue) { } } + public void ensureDatabaseCreated() { + if (!databaseCreated) { + if (!NativeLoader.loaded()) { + int tryCount = 0; + while (!NativeLoader.loaded()) { + try { + Thread.sleep(1000); + } catch (InterruptedException e) { + e.printStackTrace(); + } + tryCount++; + if (tryCount > 5) { + break; + } + } + } + createDatabase(0, false); + databaseCreated = true; + } + } + public void putPath(long id, int dc, int type, String path) { - dispatchQueue.postRunnable(() -> { + postRunnable(() -> { if (BuildVars.DEBUG_VERSION) { FileLog.d("put file path id=" + id + " dc=" + dc + " type=" + type + " path=" + path); } + ensureDatabaseCreated(); if (database == null) { return; } @@ -260,16 +280,18 @@ public void checkMediaExistance(ArrayList messageObjects) { CountDownLatch syncLatch = new CountDownLatch(1); long time = System.currentTimeMillis(); - dispatchQueue.postRunnable(() -> { + postRunnable(() -> { + ensureDatabaseCreated(); try { for (int i = 0; i < arrayListFinal.size(); i++) { MessageObject messageObject = arrayListFinal.get(i); messageObject.checkMediaExistance(false); } - } catch (Exception e) { - e.printStackTrace(); + } catch (Throwable e) { + FileLog.e(e); + } finally { + syncLatch.countDown(); } - syncLatch.countDown(); }); try { @@ -288,7 +310,8 @@ public void checkMediaExistance(ArrayList messageObjects) { } public void clear() { - dispatchQueue.postRunnable(() -> { + postRunnable(() -> { + ensureDatabaseCreated(); try { database.executeFast("DELETE FROM paths WHERE 1").stepThis().dispose(); database.executeFast("DELETE FROM paths_by_dialog_id WHERE 1").stepThis().dispose(); @@ -301,7 +324,8 @@ public void clear() { public boolean hasAnotherRefOnFile(String path) { CountDownLatch syncLatch = new CountDownLatch(1); boolean[] res = new boolean[]{false}; - dispatchQueue.postRunnable(() -> { + postRunnable(() -> { + ensureDatabaseCreated(); try { SQLiteCursor cursor = database.queryFinalized("SELECT document_id FROM paths WHERE path = '" + path + "'"); if (cursor.next()) { @@ -309,8 +333,9 @@ public boolean hasAnotherRefOnFile(String path) { } } catch (Exception e) { FileLog.e(e); + } finally { + syncLatch.countDown(); } - syncLatch.countDown(); }); try { @@ -321,11 +346,12 @@ public boolean hasAnotherRefOnFile(String path) { return res[0]; } - public void saveFileDialogId(File file,FileMeta fileMeta) { + public void saveFileDialogId(File file, FileMeta fileMeta) { if (file == null || fileMeta == null) { return; } - dispatchQueue.postRunnable(() -> { + postRunnable(() -> { + ensureDatabaseCreated(); SQLitePreparedStatement state = null; try { state = database.executeFast("REPLACE INTO paths_by_dialog_id VALUES(?, ?, ?, ?)"); @@ -381,17 +407,19 @@ private String shield(String path) { } public DispatchQueue getQueue() { + ensureQueueExist(); return dispatchQueue; } public void removeFiles(List filesToRemove) { - dispatchQueue.postRunnable(() -> { + postRunnable(() -> { try { + ensureDatabaseCreated(); database.beginTransaction(); for (int i = 0; i < filesToRemove.size(); i++) { database.executeFast("DELETE FROM paths_by_dialog_id WHERE path = '" + shield(filesToRemove.get(i).file.getPath()) + "'").stepThis().dispose(); } - } catch (Exception e) { + } catch (Throwable e) { FileLog.e(e); } finally { database.commitTransaction(); @@ -399,11 +427,12 @@ public void removeFiles(List filesToRemove) { }); } - public LongSparseArray> lookupFiles(ArrayList keepMediaFiles) { + public LongSparseArray> lookupFiles(ArrayList keepMediaFiles) { CountDownLatch syncLatch = new CountDownLatch(1); LongSparseArray> filesByDialogId = new LongSparseArray<>(); - dispatchQueue.postRunnable(() -> { + postRunnable(() -> { try { + ensureDatabaseCreated(); FileMeta fileMetaTmp = new FileMeta(); for (int i = 0; i < keepMediaFiles.size(); i++) { FileMeta fileMeta = getFileDialogId(keepMediaFiles.get(i).file, fileMetaTmp); @@ -416,10 +445,11 @@ public LongSparseArray> lookupFi list.add(keepMediaFiles.get(i)); } } - } catch (Exception e) { + } catch (Throwable e) { FileLog.e(e); + } finally { + syncLatch.countDown(); } - syncLatch.countDown(); }); try { syncLatch.await(); @@ -429,6 +459,21 @@ public LongSparseArray> lookupFi return filesByDialogId; } + private void postRunnable(Runnable runnable) { + ensureQueueExist(); + dispatchQueue.postRunnable(runnable); + } + + private void ensureQueueExist() { + if (dispatchQueue == null) { + synchronized (this) { + if (dispatchQueue == null) { + dispatchQueue = new DispatchQueue("files_database_queue_" + currentAccount); + } + } + } + } + public static class PathData { public final long id; public final int dc; @@ -445,5 +490,6 @@ public static class FileMeta { public long dialogId; public int messageId; public int messageType; + public long messageSize; } } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/FileRefController.java b/TMessagesProj/src/main/java/org/telegram/messenger/FileRefController.java index 3ffb3f3a20..85bea974b2 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/FileRefController.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/FileRefController.java @@ -23,7 +23,6 @@ private static class Requester { private static class CachedResult { private TLObject response; - private long lastQueryTime; private long firstQueryTime; } @@ -125,7 +124,7 @@ public void requestReference(Object parentObject, Object... args) { String locationKey; TLRPC.InputFileLocation location; if (BuildVars.LOGS_ENABLED) { - FileLog.d("start loading request reference for parent = " + parentObject + " args = " + args[0]); + FileLog.d("start loading request reference parent " + getObjectString(parentObject) + " args = " + args[0]); } if (args[0] instanceof TLRPC.TL_inputSingleMedia) { TLRPC.TL_inputSingleMedia req = (TLRPC.TL_inputSingleMedia) args[0]; @@ -292,7 +291,7 @@ public void requestReference(Object parentObject, Object... args) { cleanupCache(); CachedResult cachedResult = getCachedResponse(cacheKey); if (cachedResult != null) { - if (!onRequestComplete(locationKey, parentKey, cachedResult.response, false, true)) { + if (!onRequestComplete(locationKey, parentKey, cachedResult.response, null,false, true)) { responseCache.remove(locationKey); } else { return; @@ -300,21 +299,34 @@ public void requestReference(Object parentObject, Object... args) { } else { cachedResult = getCachedResponse(parentKey); if (cachedResult != null) { - if (!onRequestComplete(locationKey, parentKey, cachedResult.response, false, true)) { + if (!onRequestComplete(locationKey, parentKey, cachedResult.response, null, false, true)) { responseCache.remove(parentKey); } else { return; } } } - requestReferenceFromServer(parentObject, locationKey, parentKey, args); } - private void broadcastWaitersData(ArrayList waiters, TLObject response) { + private String getObjectString(Object parentObject) { + if (parentObject instanceof String) { + return (String) parentObject; + } + if (parentObject instanceof MessageObject) { + MessageObject messageObject = (MessageObject) parentObject; + return "message(dialogId=" + messageObject.getDialogId() + "messageId" + messageObject.getId() + ")"; + } + if (parentObject == null) { + return null; + } + return parentObject.getClass().getSimpleName(); + } + + private void broadcastWaitersData(ArrayList waiters, TLObject response, TLRPC.TL_error error) { for (int a = 0, N = waiters.size(); a < N; a++) { Waiter waiter = waiters.get(a); - onRequestComplete(waiter.locationKey, waiter.parentKey, response, a == N - 1, false); + onRequestComplete(waiter.locationKey, waiter.parentKey, response, error, a == N - 1, false); } waiters.clear(); } @@ -329,22 +341,22 @@ private void requestReferenceFromServer(Object parentObject, String locationKey, getMediaDataController().processLoadedPremiumPromo(r, date, false); } - onRequestComplete(locationKey, parentKey, response, true, false); + onRequestComplete(locationKey, parentKey, response, error, true, false); }); } else if (parentObject instanceof TLRPC.TL_availableReaction) { TLRPC.TL_messages_getAvailableReactions req = new TLRPC.TL_messages_getAvailableReactions(); req.hash = 0; - getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true, false)); + getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, error, true, false)); } else if (parentObject instanceof TLRPC.BotInfo) { TLRPC.BotInfo botInfo = (TLRPC.BotInfo) parentObject; TLRPC.TL_users_getFullUser req = new TLRPC.TL_users_getFullUser(); req.id = getMessagesController().getInputUser(botInfo.user_id); - getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true, false)); + getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, error, true, false)); } else if (parentObject instanceof TLRPC.TL_attachMenuBot) { TLRPC.TL_attachMenuBot bot = (TLRPC.TL_attachMenuBot) parentObject; TLRPC.TL_messages_getAttachMenuBot req = new TLRPC.TL_messages_getAttachMenuBot(); req.bot = getMessagesController().getInputUser(bot.bot_id); - getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true, false)); + getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, error, true, false)); } else if (parentObject instanceof MessageObject) { MessageObject messageObject = (MessageObject) parentObject; long channelId = messageObject.getChannelId(); @@ -352,16 +364,16 @@ private void requestReferenceFromServer(Object parentObject, String locationKey, TLRPC.TL_messages_getScheduledMessages req = new TLRPC.TL_messages_getScheduledMessages(); req.peer = getMessagesController().getInputPeer(messageObject.getDialogId()); req.id.add(messageObject.getRealId()); - getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true, false)); + getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, error, true, false)); } else if (channelId != 0) { TLRPC.TL_channels_getMessages req = new TLRPC.TL_channels_getMessages(); req.channel = getMessagesController().getInputChannel(channelId); req.id.add(messageObject.getRealId()); - getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true, false)); + getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, error, true, false)); } else { TLRPC.TL_messages_getMessages req = new TLRPC.TL_messages_getMessages(); req.id.add(messageObject.getRealId()); - getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true, false)); + getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, error, true, false)); } } else if (parentObject instanceof TLRPC.TL_wallPaper) { TLRPC.TL_wallPaper wallPaper = (TLRPC.TL_wallPaper) parentObject; @@ -370,7 +382,7 @@ private void requestReferenceFromServer(Object parentObject, String locationKey, inputWallPaper.id = wallPaper.id; inputWallPaper.access_hash = wallPaper.access_hash; req.wallpaper = inputWallPaper; - getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true, false)); + getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, error, true, false)); } else if (parentObject instanceof TLRPC.TL_theme) { TLRPC.TL_theme theme = (TLRPC.TL_theme) parentObject; TLRPC.TL_account_getTheme req = new TLRPC.TL_account_getTheme(); @@ -379,53 +391,53 @@ private void requestReferenceFromServer(Object parentObject, String locationKey, inputTheme.access_hash = theme.access_hash; req.theme = inputTheme; req.format = "android"; - getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true, false)); + getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, error, true, false)); } else if (parentObject instanceof TLRPC.WebPage) { TLRPC.WebPage webPage = (TLRPC.WebPage) parentObject; TLRPC.TL_messages_getWebPage req = new TLRPC.TL_messages_getWebPage(); req.url = webPage.url; req.hash = 0; - getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true, false)); + getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, error, true, false)); } else if (parentObject instanceof TLRPC.User) { TLRPC.User user = (TLRPC.User) parentObject; TLRPC.TL_users_getUsers req = new TLRPC.TL_users_getUsers(); req.id.add(getMessagesController().getInputUser(user)); - getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true, false)); + getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, error, true, false)); } else if (parentObject instanceof TLRPC.Chat) { TLRPC.Chat chat = (TLRPC.Chat) parentObject; if (chat instanceof TLRPC.TL_chat) { TLRPC.TL_messages_getChats req = new TLRPC.TL_messages_getChats(); req.id.add(chat.id); - getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true, false)); + getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, error, true, false)); } else if (chat instanceof TLRPC.TL_channel) { TLRPC.TL_channels_getChannels req = new TLRPC.TL_channels_getChannels(); req.id.add(MessagesController.getInputChannel(chat)); - getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true, false)); + getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, error, true, false)); } } else if (parentObject instanceof String) { String string = (String) parentObject; if ("wallpaper".equals(string)) { if (wallpaperWaiters.isEmpty()) { TLRPC.TL_account_getWallPapers req = new TLRPC.TL_account_getWallPapers(); - getConnectionsManager().sendRequest(req, (response, error) -> broadcastWaitersData(wallpaperWaiters, response)); + getConnectionsManager().sendRequest(req, (response, error) -> broadcastWaitersData(wallpaperWaiters, response, error)); } wallpaperWaiters.add(new Waiter(locationKey, parentKey)); } else if (string.startsWith("gif")) { if (savedGifsWaiters.isEmpty()) { TLRPC.TL_messages_getSavedGifs req = new TLRPC.TL_messages_getSavedGifs(); - getConnectionsManager().sendRequest(req, (response, error) -> broadcastWaitersData(savedGifsWaiters, response)); + getConnectionsManager().sendRequest(req, (response, error) -> broadcastWaitersData(savedGifsWaiters, response, error)); } savedGifsWaiters.add(new Waiter(locationKey, parentKey)); } else if ("recent".equals(string)) { if (recentStickersWaiter.isEmpty()) { TLRPC.TL_messages_getRecentStickers req = new TLRPC.TL_messages_getRecentStickers(); - getConnectionsManager().sendRequest(req, (response, error) -> broadcastWaitersData(recentStickersWaiter, response)); + getConnectionsManager().sendRequest(req, (response, error) -> broadcastWaitersData(recentStickersWaiter, response, error)); } recentStickersWaiter.add(new Waiter(locationKey, parentKey)); } else if ("fav".equals(string)) { if (favStickersWaiter.isEmpty()) { TLRPC.TL_messages_getFavedStickers req = new TLRPC.TL_messages_getFavedStickers(); - getConnectionsManager().sendRequest(req, (response, error) -> broadcastWaitersData(favStickersWaiter, response)); + getConnectionsManager().sendRequest(req, (response, error) -> broadcastWaitersData(favStickersWaiter, response, error)); } favStickersWaiter.add(new Waiter(locationKey, parentKey)); } else if ("update".equals(string)) { @@ -438,7 +450,7 @@ private void requestReferenceFromServer(Object parentObject, String locationKey, if (req.source == null) { req.source = ""; } - getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true, false)); + getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, error, true, false)); } else if (string.startsWith("avatar_")) { long id = Utilities.parseLong(string); if (id > 0) { @@ -447,7 +459,7 @@ private void requestReferenceFromServer(Object parentObject, String locationKey, req.offset = 0; req.max_id = 0; req.user_id = getMessagesController().getInputUser(id); - getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true, false)); + getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, error, true, false)); } else { TLRPC.TL_messages_search req = new TLRPC.TL_messages_search(); req.filter = new TLRPC.TL_inputMessagesFilterChatPhotos(); @@ -455,7 +467,7 @@ private void requestReferenceFromServer(Object parentObject, String locationKey, req.offset_id = 0; req.q = ""; req.peer = getMessagesController().getInputPeer(id); - getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true, false)); + getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, error, true, false)); } } else if (string.startsWith("sent_")) { String[] params = string.split("_"); @@ -465,11 +477,11 @@ private void requestReferenceFromServer(Object parentObject, String locationKey, TLRPC.TL_channels_getMessages req = new TLRPC.TL_channels_getMessages(); req.channel = getMessagesController().getInputChannel(channelId); req.id.add(Utilities.parseInt(params[2])); - getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, false, false)); + getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, error, false, false)); } else { TLRPC.TL_messages_getMessages req = new TLRPC.TL_messages_getMessages(); req.id.add(Utilities.parseInt(params[2])); - getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, false, false)); + getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, error, false, false)); } } else { sendErrorToObject(args, 0); @@ -483,18 +495,18 @@ private void requestReferenceFromServer(Object parentObject, String locationKey, req.stickerset = new TLRPC.TL_inputStickerSetID(); req.stickerset.id = stickerSet.set.id; req.stickerset.access_hash = stickerSet.set.access_hash; - getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true, false)); + getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, error, true, false)); } else if (parentObject instanceof TLRPC.StickerSetCovered) { TLRPC.StickerSetCovered stickerSet = (TLRPC.StickerSetCovered) parentObject; TLRPC.TL_messages_getStickerSet req = new TLRPC.TL_messages_getStickerSet(); req.stickerset = new TLRPC.TL_inputStickerSetID(); req.stickerset.id = stickerSet.set.id; req.stickerset.access_hash = stickerSet.set.access_hash; - getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true, false)); + getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, error, true, false)); } else if (parentObject instanceof TLRPC.InputStickerSet) { TLRPC.TL_messages_getStickerSet req = new TLRPC.TL_messages_getStickerSet(); req.stickerset = (TLRPC.InputStickerSet) parentObject; - getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true, false)); + getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, error, true, false)); } else { sendErrorToObject(args, 0); } @@ -625,18 +637,35 @@ private boolean onUpdateObjectReference(Requester requester, byte[] file_referen getConnectionsManager().sendRequest(req, (RequestDelegate) requester.args[1]); } else if (requester.args[1] instanceof FileLoadOperation) { FileLoadOperation fileLoadOperation = (FileLoadOperation) requester.args[1]; + String oldRef = null; + String newRef = null; if (locationReplacement != null) { if (fromCache && isSameReference(fileLoadOperation.location.file_reference, locationReplacement.file_reference)) { return false; } + if (BuildVars.LOGS_ENABLED) { + oldRef = Utilities.bytesToHex(fileLoadOperation.location.file_reference); + } fileLoadOperation.location = locationReplacement; + if (BuildVars.LOGS_ENABLED) { + newRef = Utilities.bytesToHex(fileLoadOperation.location.file_reference); + } } else { if (fromCache && isSameReference(requester.location.file_reference, file_reference)) { return false; } - requester.location.file_reference = file_reference; + if (BuildVars.LOGS_ENABLED) { + oldRef = Utilities.bytesToHex(fileLoadOperation.location.file_reference); + } + fileLoadOperation.location.file_reference = requester.location.file_reference = file_reference; + if (BuildVars.LOGS_ENABLED) { + newRef = Utilities.bytesToHex(fileLoadOperation.location.file_reference); + } } fileLoadOperation.requestingReference = false; + if (BuildVars.LOGS_ENABLED) { + FileLog.d("debug_loading: " + fileLoadOperation.getCacheFileFinal().getName() + " " + oldRef + " " + newRef + " reference updated resume download"); + } fileLoadOperation.startDownloadRequest(); } return true; @@ -666,26 +695,16 @@ private void sendErrorToObject(Object[] args, int reason) { TLRPC.TL_messages_getAttachedStickers req = (TLRPC.TL_messages_getAttachedStickers) args[0]; getConnectionsManager().sendRequest(req, (RequestDelegate) args[1]); } else { - if (reason == 0) { - TLRPC.TL_error error = new TLRPC.TL_error(); - error.text = "not found parent object to request reference"; - error.code = 400; - if (args[1] instanceof FileLoadOperation) { - FileLoadOperation fileLoadOperation = (FileLoadOperation) args[1]; - fileLoadOperation.requestingReference = false; - fileLoadOperation.processRequestResult((FileLoadOperation.RequestInfo) args[2], error); - } - } else if (reason == 1) { - if (args[1] instanceof FileLoadOperation) { - FileLoadOperation fileLoadOperation = (FileLoadOperation) args[1]; - fileLoadOperation.requestingReference = false; - fileLoadOperation.onFail(false, 0); - } + if (args[1] instanceof FileLoadOperation) { + FileLoadOperation fileLoadOperation = (FileLoadOperation) args[1]; + fileLoadOperation.requestingReference = false; + FileLog.e("debug_loading: " + fileLoadOperation.getCacheFileFinal().getName() + "reference can't update: fail operation "); + fileLoadOperation.onFail(false, 0); } } } - private boolean onRequestComplete(String locationKey, String parentKey, TLObject response, boolean cache, boolean fromCache) { + private boolean onRequestComplete(String locationKey, String parentKey, TLObject response, TLRPC.TL_error error, boolean cache, boolean fromCache) { boolean found = false; String cacheKey = parentKey; if (response instanceof TLRPC.TL_help_premiumPromo) { @@ -707,7 +726,7 @@ private boolean onRequestComplete(String locationKey, String parentKey, TLObject if (requester.completed) { continue; } - if (onRequestComplete(requester.locationKey, null, response, cache && !found, fromCache)) { + if (onRequestComplete(requester.locationKey, null, response, error, cache && !found, fromCache)) { found = true; } } @@ -730,6 +749,13 @@ private boolean onRequestComplete(String locationKey, String parentKey, TLObject if (requester.completed) { continue; } + if (error != null && BuildVars.LOGS_ENABLED) { + if (requester.args[1] instanceof FileLoadOperation) { + FileLoadOperation operation = (FileLoadOperation) requester.args[1]; + FileLog.e("debug_loading: " + operation.getCacheFileFinal().getName() + " can't update file reference: " + error.code + " " + error.text); + } + + } if (requester.location instanceof TLRPC.TL_inputFileLocation || requester.location instanceof TLRPC.TL_inputPeerPhotoFileLocation) { locationReplacement = new TLRPC.InputFileLocation[1]; needReplacement = new boolean[1]; @@ -769,6 +795,10 @@ private boolean onRequestComplete(String locationKey, String parentKey, TLObject FileLog.d("file ref not found in messages, replacing message"); } } + } else { + if (BuildVars.DEBUG_VERSION) { + FileLog.d("empty messages, file ref not found"); + } } } else if (response instanceof TLRPC.TL_help_premiumPromo) { TLRPC.TL_help_premiumPromo premiumPromo = (TLRPC.TL_help_premiumPromo) response; @@ -1008,7 +1038,7 @@ private void cleanupCache() { ArrayList keysToDelete = null; for (HashMap.Entry entry : responseCache.entrySet()) { CachedResult cachedResult = entry.getValue(); - if (Math.abs(SystemClock.elapsedRealtime() - cachedResult.firstQueryTime) >= 60 * 10 * 1000) { + if (Math.abs(System.currentTimeMillis() - cachedResult.firstQueryTime) >= 60 * 1000) { if (keysToDelete == null) { keysToDelete = new ArrayList<>(); } @@ -1024,7 +1054,7 @@ private void cleanupCache() { private CachedResult getCachedResponse(String key) { CachedResult cachedResult = responseCache.get(key); - if (cachedResult != null && Math.abs(SystemClock.elapsedRealtime() - cachedResult.firstQueryTime) >= 60 * 10 * 1000) { + if (cachedResult != null && Math.abs(System.currentTimeMillis() - cachedResult.firstQueryTime) >= 60 * 1000) { responseCache.remove(key); cachedResult = null; } @@ -1036,10 +1066,9 @@ private void putReponseToCache(String key, TLObject response) { if (cachedResult == null) { cachedResult = new CachedResult(); cachedResult.response = response; - cachedResult.firstQueryTime = SystemClock.uptimeMillis(); + cachedResult.firstQueryTime = System.currentTimeMillis(); responseCache.put(key, cachedResult); } - cachedResult.lastQueryTime = SystemClock.uptimeMillis(); } private byte[] getFileReference(TLRPC.Document document, TLRPC.InputFileLocation location, boolean[] needReplacement, TLRPC.InputFileLocation[] replacement) { diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/FileStreamLoadOperation.java b/TMessagesProj/src/main/java/org/telegram/messenger/FileStreamLoadOperation.java index cfb26754b3..1b339c5e6c 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/FileStreamLoadOperation.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/FileStreamLoadOperation.java @@ -69,7 +69,7 @@ public long open(DataSpec dataSpec) throws IOException { } else if (document.mime_type.startsWith("audio")) { document.attributes.add(new TLRPC.TL_documentAttributeAudio()); } - loadOperation = FileLoader.getInstance(currentAccount).loadStreamFile(this, document, null, parentObject, currentOffset = dataSpec.position, false); + loadOperation = FileLoader.getInstance(currentAccount).loadStreamFile(this, document, null, parentObject, currentOffset = dataSpec.position, false, FileLoader.PRIORITY_HIGH); bytesRemaining = dataSpec.length == C.LENGTH_UNSET ? document.size - dataSpec.position : dataSpec.length; if (bytesRemaining < 0) { throw new EOFException(); @@ -98,7 +98,7 @@ public int read(byte[] buffer, int offset, int readLength) throws IOException { while (availableLength == 0 && opened) { availableLength = (int) loadOperation.getDownloadedLengthFromOffset(currentOffset, readLength)[0]; if (availableLength == 0) { - FileLoader.getInstance(currentAccount).loadStreamFile(this, document, null, parentObject, currentOffset, false); + FileLoader.getInstance(currentAccount).loadStreamFile(this, document, null, parentObject, currentOffset, false, FileLoader.PRIORITY_HIGH); countDownLatch = new CountDownLatch(1); countDownLatch.await(); } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/FileUploadOperation.java b/TMessagesProj/src/main/java/org/telegram/messenger/FileUploadOperation.java index 87cc4f0076..e88cd350bd 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/FileUploadOperation.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/FileUploadOperation.java @@ -113,6 +113,7 @@ public void start() { return; } state = 1; + AutoDeleteMediaTask.lockFile(uploadingFilePath); Utilities.stageQueue.postRunnable(() -> { preferences = ApplicationLoader.applicationContext.getSharedPreferences("uploadinfo", Activity.MODE_PRIVATE); slowNetwork = ApplicationLoader.isConnectionSlow(); @@ -173,6 +174,7 @@ public void cancel() { ConnectionsManager.getInstance(currentAccount).cancelRequest(requestTokens.valueAt(a), true); } }); + AutoDeleteMediaTask.unlockFile(uploadingFilePath); delegate.didFailedUploadingFile(this); cleanup(); } @@ -196,6 +198,7 @@ private void cleanup() { } catch (Exception e) { FileLog.e(e); } + AutoDeleteMediaTask.unlockFile(uploadingFilePath); } protected void checkNewDataAvailable(final long newAvailableSize, final long finalSize) { @@ -259,10 +262,9 @@ private void startUploadRequest() { started = true; if (stream == null) { File cacheFile = new File(uploadingFilePath); - // NekoX: keep this checking? - if (AndroidUtilities.isInternalUri(Uri.fromFile(cacheFile))) { - throw new Exception("trying to upload internal file"); - } +// if (AndroidUtilities.isInternalUri(Uri.fromFile(cacheFile))) { +// throw new FileLog.IgnoreSentException("trying to upload internal file"); +// } stream = new RandomAccessFile(cacheFile, "r"); boolean isInternalFile = false; try { @@ -539,7 +541,11 @@ private void startUploadRequest() { } else if (currentType == ConnectionsManager.FileTypePhoto) { StatsController.getInstance(currentAccount).incrementSentBytesCount(networkType, StatsController.TYPE_PHOTOS, requestSize); } else if (currentType == ConnectionsManager.FileTypeFile) { - StatsController.getInstance(currentAccount).incrementSentBytesCount(networkType, StatsController.TYPE_FILES, requestSize); + if (uploadingFilePath != null && (uploadingFilePath.toLowerCase().endsWith("mp3") || uploadingFilePath.toLowerCase().endsWith("m4a"))) { + StatsController.getInstance(currentAccount).incrementSentBytesCount(networkType, StatsController.TYPE_MUSIC, requestSize); + } else { + StatsController.getInstance(currentAccount).incrementSentBytesCount(networkType, StatsController.TYPE_FILES, requestSize); + } } if (currentRequestIv != null) { freeRequestIvs.add(currentRequestIv); @@ -594,7 +600,11 @@ private void startUploadRequest() { } else if (currentType == ConnectionsManager.FileTypePhoto) { StatsController.getInstance(currentAccount).incrementSentItemsCount(ApplicationLoader.getCurrentNetworkType(), StatsController.TYPE_PHOTOS, 1); } else if (currentType == ConnectionsManager.FileTypeFile) { - StatsController.getInstance(currentAccount).incrementSentItemsCount(ApplicationLoader.getCurrentNetworkType(), StatsController.TYPE_FILES, 1); + if (uploadingFilePath != null && (uploadingFilePath.toLowerCase().endsWith("mp3") || uploadingFilePath.toLowerCase().endsWith("m4a"))) { + StatsController.getInstance(currentAccount).incrementSentItemsCount(ApplicationLoader.getCurrentNetworkType(), StatsController.TYPE_MUSIC, 1); + } else { + StatsController.getInstance(currentAccount).incrementSentItemsCount(ApplicationLoader.getCurrentNetworkType(), StatsController.TYPE_FILES, 1); + } } } else if (currentUploadRequetsCount < maxRequestsCount) { if (estimatedSize == 0 && !uploadFirstPartLater && !nextPartFirst) { diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/FlagSecureReason.java b/TMessagesProj/src/main/java/org/telegram/messenger/FlagSecureReason.java new file mode 100644 index 0000000000..e13b1c1e68 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/messenger/FlagSecureReason.java @@ -0,0 +1,83 @@ +package org.telegram.messenger; + +import android.view.Window; +import android.view.WindowManager; + +import java.util.HashMap; + +public class FlagSecureReason { + + private static HashMap currentSecureReasons; + + private Window window; + private FlagSecureCondition condition; + + public FlagSecureReason(Window window, FlagSecureCondition condition) { + this.window = window; + this.condition = condition; + } + + private boolean attached = false; + private boolean value = false; + + public void invalidate() { + boolean newValue = attached && condition != null && condition.run(); + if (newValue != value) { + update((value = newValue) ? +1 : -1); + } + } + + public void attach() { + if (attached) { + return; + } + attached = true; + invalidate(); + } + + public void detach() { + if (!attached) { + return; + } + attached = false; + invalidate(); + } + + private void update(int add) { + if (currentSecureReasons == null) { + currentSecureReasons = new HashMap<>(); + } + + Integer count = currentSecureReasons.get(window); + int newCount = Math.max(0, (count == null ? 0 : count) + add); + if (newCount <= 0) { + currentSecureReasons.remove(window); + } else { + currentSecureReasons.put(window, newCount); + } + + updateWindowSecure(window); + } + + private static void updateWindowSecure(Window window) { + if (window == null) { + return; + } + + if (isSecuredNow(window)) { + window.addFlags(WindowManager.LayoutParams.FLAG_SECURE); + } else { + window.clearFlags(WindowManager.LayoutParams.FLAG_SECURE); + } + } + + public static boolean isSecuredNow(Window window) { + return currentSecureReasons != null && currentSecureReasons.get(window) != null; + } + + + public interface FlagSecureCondition { + boolean run(); + } + +} diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/ImageLoader.java b/TMessagesProj/src/main/java/org/telegram/messenger/ImageLoader.java index c17d988a5f..50062c7268 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/ImageLoader.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/ImageLoader.java @@ -29,7 +29,7 @@ import android.text.TextUtils; import android.util.SparseArray; -import androidx.exifinterface.media.ExifInterface; +import androidx.annotation.RequiresApi; import androidx.exifinterface.media.ExifInterface; import org.json.JSONArray; @@ -74,6 +74,7 @@ import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Objects; import java.util.concurrent.ConcurrentHashMap; import java.util.stream.Stream; import java.util.zip.GZIPInputStream; @@ -89,6 +90,7 @@ * b - need blur image * g - autoplay * lastframe - return lastframe for Lottie animation + * lastreactframe - return lastframe for Lottie animation + some scale ReactionLastFrame magic * firstframe - return firstframe for Lottie animation */ public class ImageLoader { @@ -873,6 +875,7 @@ public void run() { boolean precache = false; boolean limitFps = false; boolean lastFrameBitmap = false; + boolean lastFrameReactionScaleBitmap = false; boolean firstFrameBitmap = false; int autoRepeat = 1; int[] colors = null; @@ -899,6 +902,10 @@ public void run() { if (cacheImage.filter.contains("lastframe")) { lastFrameBitmap = true; } + if (cacheImage.filter.contains("lastreactframe")) { + lastFrameBitmap = true; + lastFrameReactionScaleBitmap = true; + } if (cacheImage.filter.contains("firstframe")) { firstFrameBitmap = true; } @@ -967,13 +974,17 @@ public void run() { precache = false; } BitmapsCache.CacheOptions cacheOptions = null; - if (precache) { + if (precache || lastFrameBitmap || firstFrameBitmap) { cacheOptions = new BitmapsCache.CacheOptions(); - if (cacheImage.filter != null && cacheImage.filter.contains("compress")) { - cacheOptions.compressQuality = BitmapsCache.COMPRESS_QUALITY_DEFAULT; - } - if (cacheImage.filter != null && cacheImage.filter.contains("flbk")) { - cacheOptions.fallback = true; + if (!lastFrameBitmap && !firstFrameBitmap) { + if (cacheImage.filter != null && cacheImage.filter.contains("compress")) { + cacheOptions.compressQuality = BitmapsCache.COMPRESS_QUALITY_DEFAULT; + } + if (cacheImage.filter != null && cacheImage.filter.contains("flbk")) { + cacheOptions.fallback = true; + } + } else { + cacheOptions.firstFrame = true; } } if (compressed) { @@ -983,7 +994,7 @@ public void run() { } } if (lastFrameBitmap || firstFrameBitmap) { - loadLastFrame(lottieDrawable, h, w, lastFrameBitmap); + loadLastFrame(lottieDrawable, h, w, lastFrameBitmap, lastFrameReactionScaleBitmap); } else { lottieDrawable.setAutoRepeat(autoRepeat); onPostExecute(lottieDrawable); @@ -998,6 +1009,8 @@ public void run() { } boolean limitFps = false; boolean precache = false; + boolean fistFrame = false; + boolean notCreateStream = false; if (cacheImage.filter != null) { String[] args = cacheImage.filter.split("_"); if (args.length >= 2) { @@ -1011,20 +1024,28 @@ public void run() { if ("pcache".equals(args[i])) { precache = true; } + if ("firstframe".equals(args[i])) { + fistFrame = true; + } + if ("nostream".equals(args[i])) { + notCreateStream = true; + } + } + if (fistFrame) { + notCreateStream = true; + } + } + BitmapsCache.CacheOptions cacheOptions = null; + if (precache && !fistFrame) { + cacheOptions = new BitmapsCache.CacheOptions(); + if (cacheImage.filter != null && cacheImage.filter.contains("compress")) { + cacheOptions.compressQuality = BitmapsCache.COMPRESS_QUALITY_DEFAULT; } } if ((isAnimatedAvatar(cacheImage.filter) || AUTOPLAY_FILTER.equals(cacheImage.filter)) && !(cacheImage.imageLocation.document instanceof TLRPC.TL_documentEncrypted) && !precache) { TLRPC.Document document = cacheImage.imageLocation.document instanceof TLRPC.Document ? cacheImage.imageLocation.document : null; long size = document != null ? cacheImage.size : cacheImage.imageLocation.currentSize; - BitmapsCache.CacheOptions cacheOptions = null; - if (precache) { - cacheOptions = new BitmapsCache.CacheOptions(); - if (cacheImage.filter != null && cacheImage.filter.contains("compress")) { - cacheOptions.compressQuality = BitmapsCache.COMPRESS_QUALITY_DEFAULT; - } - } - boolean notCreateStream = cacheImage.filter != null && cacheImage.filter.contains("nostream"); - fileDrawable = new AnimatedFileDrawable(cacheImage.finalFilePath, false, notCreateStream ? 0 : size, notCreateStream ? null : document, document == null && !notCreateStream ? cacheImage.imageLocation : null, cacheImage.parentObject, seekTo, cacheImage.currentAccount, false, cacheOptions); + fileDrawable = new AnimatedFileDrawable(cacheImage.finalFilePath, fistFrame, notCreateStream ? 0 : size, cacheImage.priority, notCreateStream ? null : document, document == null && !notCreateStream ? cacheImage.imageLocation : null, cacheImage.parentObject, seekTo, cacheImage.currentAccount, false, cacheOptions); fileDrawable.setIsWebmSticker(MessageObject.isWebM(document) || MessageObject.isVideoSticker(document) || isAnimatedAvatar(cacheImage.filter)); } else { @@ -1039,21 +1060,25 @@ public void run() { h = (int) (h_filter * AndroidUtilities.density); } } - BitmapsCache.CacheOptions cacheOptions = null; - if (precache) { - cacheOptions = new BitmapsCache.CacheOptions(); - if (cacheImage.filter != null && cacheImage.filter.contains("compress")) { - cacheOptions.compressQuality = BitmapsCache.COMPRESS_QUALITY_DEFAULT; - } - } - boolean createDecoder = cacheImage.filter != null && ("d".equals(cacheImage.filter) || cacheImage.filter.contains("_d")); - boolean notCreateStream = cacheImage.filter != null && cacheImage.filter.contains("nostream"); - fileDrawable = new AnimatedFileDrawable(cacheImage.finalFilePath, createDecoder, 0, notCreateStream ? null : cacheImage.imageLocation.document, null, null, seekTo, cacheImage.currentAccount, false, w, h, cacheOptions); + boolean createDecoder = fistFrame || (cacheImage.filter != null && ("d".equals(cacheImage.filter) || cacheImage.filter.contains("_d"))); + fileDrawable = new AnimatedFileDrawable(cacheImage.finalFilePath, createDecoder, 0, cacheImage.priority, notCreateStream ? null : cacheImage.imageLocation.document, null, null, seekTo, cacheImage.currentAccount, false, w, h, cacheOptions); fileDrawable.setIsWebmSticker(MessageObject.isWebM(cacheImage.imageLocation.document) || MessageObject.isVideoSticker(cacheImage.imageLocation.document) || isAnimatedAvatar(cacheImage.filter)); } - fileDrawable.setLimitFps(limitFps); - Thread.interrupted(); - onPostExecute(fileDrawable); + if (fistFrame) { + Bitmap bitmap = fileDrawable.getFrameAtTime(0, false); + + fileDrawable.recycle(); + Thread.interrupted(); + if (bitmap == null) { + onPostExecute(null); + } else { + onPostExecute(new BitmapDrawable(bitmap)); + } + } else { + fileDrawable.setLimitFps(limitFps); + Thread.interrupted(); + onPostExecute(fileDrawable); + } } else { Long mediaId = null; boolean mediaIsVideo = false; @@ -1387,7 +1412,7 @@ public void run() { if (mediaId != null && mediaThumbPath == null) { if (mediaIsVideo) { if (mediaId == 0) { - AnimatedFileDrawable fileDrawable = new AnimatedFileDrawable(cacheFileFinal, true, 0, null, null, null, 0, 0, true, null); + AnimatedFileDrawable fileDrawable = new AnimatedFileDrawable(cacheFileFinal, true, 0, 0, null, null, null, 0, 0, true, null); image = fileDrawable.getFrameAtTime(0, true); fileDrawable.recycle(); } else { @@ -1398,7 +1423,7 @@ public void run() { } } if (image == null) { - if (useNativeWebpLoader) { + if (useNativeWebpLoader && secureDocumentKey == null) { RandomAccessFile file = new RandomAccessFile(cacheFileFinal, "r"); ByteBuffer buffer = file.getChannel().map(FileChannel.MapMode.READ_ONLY, 0, cacheFileFinal.length()); @@ -1410,41 +1435,11 @@ public void run() { Utilities.loadWebpImage(image, buffer, buffer.limit(), null, !opts.inPurgeable); file.close(); } else { - try { - - RandomAccessFile f = new RandomAccessFile(cacheFileFinal, "r"); - int len = (int) f.length(); - int offset = 0; - byte[] bytes = bytesLocal.get(); - byte[] data = bytes != null && bytes.length >= len ? bytes : null; - if (data == null) { - bytes = data = new byte[len]; - bytesLocal.set(bytes); - } - f.readFully(data, 0, len); - f.close(); - boolean error = false; - if (secureDocumentKey != null) { - EncryptedFileInputStream.decryptBytesWithKeyFile(data, 0, len, secureDocumentKey); - byte[] hash = Utilities.computeSHA256(data, 0, len); - if (secureDocumentHash == null || !Arrays.equals(hash, secureDocumentHash)) { - error = true; - } - offset = (data[0] & 0xff); - len -= offset; - } else if (inEncryptedFile) { - EncryptedFileInputStream.decryptBytesWithKeyFile(data, 0, len, cacheImage.encryptionKeyPath); - } - if (!error) { - image = BitmapFactory.decodeByteArray(data, offset, len, opts); - } - } catch (Throwable e) { - - } - if (image == null) { FileInputStream is; - if (inEncryptedFile) { + if (secureDocumentKey != null) { + is = new EncryptedFileInputStream(cacheFileFinal, secureDocumentKey); + } else if (inEncryptedFile) { is = new EncryptedFileInputStream(cacheFileFinal, cacheImage.encryptionKeyPath); } else { is = new FileInputStream(cacheFileFinal); @@ -1467,11 +1462,53 @@ public void run() { } catch (Throwable ignore) { } - is.getChannel().position(0); + if (secureDocumentKey != null || cacheImage.encryptionKeyPath != null) { + is.close(); + if (secureDocumentKey != null) { + is = new EncryptedFileInputStream(cacheFileFinal, secureDocumentKey); + } else if (inEncryptedFile) { + is = new EncryptedFileInputStream(cacheFileFinal, cacheImage.encryptionKeyPath); + } + } else { + is.getChannel().position(0); + } } image = BitmapFactory.decodeStream(is, null, opts); is.close(); } + + if (image == null) { + try { + RandomAccessFile f = new RandomAccessFile(cacheFileFinal, "r"); + int len = (int) f.length(); + int offset = 0; + byte[] bytes = bytesLocal.get(); + byte[] data = bytes != null && bytes.length >= len ? bytes : null; + if (data == null) { + bytes = data = new byte[len]; + bytesLocal.set(bytes); + } + f.readFully(data, 0, len); + f.close(); + boolean error = false; + if (secureDocumentKey != null) { + EncryptedFileInputStream.decryptBytesWithKeyFile(data, 0, len, secureDocumentKey); + byte[] hash = Utilities.computeSHA256(data, 0, len); + if (secureDocumentHash == null || !Arrays.equals(hash, secureDocumentHash)) { + error = true; + } + offset = (data[0] & 0xff); + len -= offset; + } else if (inEncryptedFile) { + EncryptedFileInputStream.decryptBytesWithKeyFile(data, 0, len, cacheImage.encryptionKeyPath); + } + if (!error) { + image = BitmapFactory.decodeByteArray(data, offset, len, opts); + } + } catch (Throwable e) { + FileLog.e(e); + } + } } } if (image == null) { @@ -1551,10 +1588,10 @@ public void run() { } } - private void loadLastFrame(RLottieDrawable lottieDrawable, int w, int h, boolean lastFrame) { + private void loadLastFrame(RLottieDrawable lottieDrawable, int w, int h, boolean lastFrame, boolean reaction) { Bitmap bitmap; Canvas canvas; - if (lastFrame) { + if (lastFrame && reaction) { bitmap = Bitmap.createBitmap((int) (w * ImageReceiver.ReactionLastFrame.LAST_FRAME_SCALE), (int) (h * ImageReceiver.ReactionLastFrame.LAST_FRAME_SCALE), Bitmap.Config.ARGB_8888); canvas = new Canvas(bitmap); canvas.scale(2f, 2f, w * ImageReceiver.ReactionLastFrame.LAST_FRAME_SCALE / 2f, h * ImageReceiver.ReactionLastFrame.LAST_FRAME_SCALE / 2f); @@ -1563,32 +1600,29 @@ private void loadLastFrame(RLottieDrawable lottieDrawable, int w, int h, boolean canvas = new Canvas(bitmap); } + lottieDrawable.prepareForGenerateCache(); + Bitmap currentBitmap = Bitmap.createBitmap(lottieDrawable.getIntrinsicWidth(), lottieDrawable.getIntrinsicHeight(), Bitmap.Config.ARGB_8888); + lottieDrawable.setGeneratingFrame(lastFrame ? lottieDrawable.getFramesCount() - 1 : 0); + lottieDrawable.getNextFrame(currentBitmap); + lottieDrawable.releaseForGenerateCache(); + canvas.save(); + if (!(lastFrame && reaction)) { + canvas.scale(currentBitmap.getWidth() / w, currentBitmap.getHeight() / h, w / 2f, h / 2f); + } + Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG); + paint.setFilterBitmap(true); + BitmapDrawable bitmapDrawable = null; + if (lastFrame && reaction) { + canvas.drawBitmap(currentBitmap, (bitmap.getWidth() - currentBitmap.getWidth()) / 2f, (bitmap.getHeight() - currentBitmap.getHeight()) / 2f, paint); + bitmapDrawable = new ImageReceiver.ReactionLastFrame(bitmap); + } else { + canvas.drawBitmap(currentBitmap, 0, 0, paint); + bitmapDrawable = new BitmapDrawable(bitmap); + } - AndroidUtilities.runOnUIThread(() -> { - lottieDrawable.setOnFrameReadyRunnable(() -> { - lottieDrawable.setOnFrameReadyRunnable(null); - BitmapDrawable bitmapDrawable = null; - if (lottieDrawable.getBackgroundBitmap() != null || lottieDrawable.getRenderingBitmap() != null) { - Bitmap currentBitmap = lottieDrawable.getBackgroundBitmap() != null ? lottieDrawable.getBackgroundBitmap() : lottieDrawable.getRenderingBitmap(); - canvas.save(); - if (!lastFrame) { - canvas.scale(currentBitmap.getWidth() / w, currentBitmap.getHeight() / h, w / 2f, h / 2f); - } - Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG); - paint.setFilterBitmap(true); - if (lastFrame) { - canvas.drawBitmap(currentBitmap, (bitmap.getWidth() - currentBitmap.getWidth()) / 2f, (bitmap.getHeight() - currentBitmap.getHeight()) / 2f, paint); - bitmapDrawable = new ImageReceiver.ReactionLastFrame(bitmap); - } else { - canvas.drawBitmap(currentBitmap, 0, 0, paint); - bitmapDrawable = new BitmapDrawable(bitmap); - } - } - onPostExecute(bitmapDrawable); - lottieDrawable.recycle(); - }); - lottieDrawable.setCurrentFrame(lastFrame ? lottieDrawable.getFramesCount() - 1 : 0, true, true); - }); + lottieDrawable.recycle(false); + currentBitmap.recycle(); + onPostExecute(bitmapDrawable); } private void onPostExecute(final Drawable drawable) { @@ -1602,7 +1636,7 @@ private void onPostExecute(final Drawable drawable) { lottieMemCache.put(cacheImage.key, lottieDrawable); toSet = lottieDrawable; } else { - lottieDrawable.recycle(); + lottieDrawable.recycle(false); } if (toSet != null) { incrementUseCount(cacheImage.key); @@ -1739,7 +1773,7 @@ private class CacheImage { public void addImageReceiver(ImageReceiver imageReceiver, String key, String filter, int type, int guid) { int index = imageReceiverArray.indexOf(imageReceiver); - if (index >= 0) { + if (index >= 0 && Objects.equals(imageReceiverArray.get(index).getImageKey(), key)) { imageReceiverGuidsArray.set(index, guid); return; } @@ -1991,7 +2025,7 @@ protected void entryRemoved(boolean evicted, String key, final BitmapDrawable ol ((AnimatedFileDrawable) oldValue).recycle(); } if (oldValue instanceof RLottieDrawable) { - ((RLottieDrawable) oldValue).recycle(); + ((RLottieDrawable) oldValue).recycle(false); } } } @@ -2043,11 +2077,14 @@ public void fileDidFailedUpload(final String location, final boolean isEncrypted public void fileDidLoaded(final String location, final File finalFile, Object parentObject, final int type) { fileProgresses.remove(location); AndroidUtilities.runOnUIThread(() -> { - if (SharedConfig.saveToGalleryFlags != 0 && finalFile != null && (location.endsWith(".mp4") || location.endsWith(".jpg"))) { - if (parentObject instanceof MessageObject) { - MessageObject messageObject = (MessageObject) parentObject; - - long dialogId = messageObject.getDialogId(); + if (finalFile != null && (location.endsWith(".mp4") || location.endsWith(".jpg"))) { + FilePathDatabase.FileMeta meta = FileLoader.getFileMetadataFromParent(currentAccount, parentObject); + if (meta != null) { + MessageObject messageObject = null; + if (parentObject instanceof MessageObject) { + messageObject = (MessageObject) parentObject; + } + long dialogId = meta.dialogId; int flag; if (dialogId >= 0) { flag = SharedConfig.SAVE_TO_GALLERY_FLAG_PEER; @@ -2058,7 +2095,7 @@ public void fileDidLoaded(final String location, final File finalFile, Object pa flag = SharedConfig.SAVE_TO_GALLERY_FLAG_GROUP; } } - if ((SharedConfig.saveToGalleryFlags & flag) != 0) { + if (SaveToGallerySettingsHelper.needSave(flag, meta, messageObject, currentAccount)) { AndroidUtilities.addMediaToGallery(finalFile.toString()); } } @@ -2127,9 +2164,18 @@ public void onReceive(Context arg0, Intent intent) { } public void checkMediaPaths() { + checkMediaPaths(null); + } + + public void checkMediaPaths(Runnable after) { cacheOutQueue.postRunnable(() -> { final SparseArray paths = createMediaPaths(); - AndroidUtilities.runOnUIThread(() -> FileLoader.setMediaDirs(paths)); + AndroidUtilities.runOnUIThread(() -> { + FileLoader.setMediaDirs(paths); + if (after != null) { + after.run(); + } + }); }); } @@ -2229,6 +2275,20 @@ public SparseArray createMediaPaths() { return mediaDirs; } + @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP) + private File getPublicStorageDir() { + File publicMediaDir = ApplicationLoader.applicationContext.getExternalMediaDirs()[0]; + if (!TextUtils.isEmpty(SharedConfig.storageCacheDir)) { + for (int i = 0; i < ApplicationLoader.applicationContext.getExternalMediaDirs().length; i++) { + File f = ApplicationLoader.applicationContext.getExternalMediaDirs()[i]; + if (f != null && f.getPath().startsWith(SharedConfig.storageCacheDir)) { + publicMediaDir = ApplicationLoader.applicationContext.getExternalMediaDirs()[i]; + } + } + } + return publicMediaDir; + } + private boolean canMoveFiles(File from, File to, int type) { RandomAccessFile file = null; try { @@ -2432,7 +2492,7 @@ public void cancelLoadingForImageReceiver(final ImageReceiver imageReceiver, fin } } } - }, imageReceiver.getFileLoadingPriority() == FileLoader.PRIORITY_LOW ? 0 : 1); + }); } public BitmapDrawable getImageFromMemory(TLObject fileLocation, String httpUrl, String filter) { @@ -2545,6 +2605,7 @@ private void createLoadOperationForImageReceiver(final ImageReceiver imageReceiv final boolean shouldGenerateQualityThumb = imageReceiver.isShouldGenerateQualityThumb(); final int currentAccount = imageReceiver.getCurrentAccount(); final boolean currentKeyQuality = type == ImageReceiver.TYPE_IMAGE && imageReceiver.isCurrentKeyQuality(); + final Runnable loadOperationRunnable = () -> { boolean added = false; if (thumb != 2) { @@ -3211,6 +3272,9 @@ private BitmapDrawable getFromLottieCache(String imageKey) { } private boolean useLottieMemCache(ImageLocation imageLocation, String key) { + if (key.endsWith("_firstframe") || key.endsWith("_lastframe")) { + return false; + } return imageLocation != null && (MessageObject.isAnimatedStickerDocument(imageLocation.document, true) || imageLocation.imageType == FileLoader.IMAGE_TYPE_LOTTIE || MessageObject.isVideoSticker(imageLocation.document)) || isAnimatedAvatar(key); } @@ -3770,50 +3834,60 @@ public static void saveMessageThumbs(TLRPC.Message message) { TLRPC.PhotoSize photoSize = findPhotoCachedSize(message); if (photoSize != null && photoSize.bytes != null && photoSize.bytes.length != 0) { + TLRPC.PhotoSize newPhotoSize; if (photoSize.location == null || photoSize.location instanceof TLRPC.TL_fileLocationUnavailable) { photoSize.location = new TLRPC.TL_fileLocationToBeDeprecated(); photoSize.location.volume_id = Integer.MIN_VALUE; photoSize.location.local_id = SharedConfig.getLastLocalId(); } - File file = FileLoader.getInstance(UserConfig.selectedAccount).getPathToAttach(photoSize, true); - boolean isEncrypted = false; - if (MessageObject.shouldEncryptPhotoOrVideo(message)) { - file = new File(file.getAbsolutePath() + ".enc"); - isEncrypted = true; - } - if (!file.exists()) { - try { - if (isEncrypted) { - File keyPath = new File(FileLoader.getInternalCacheDir(), file.getName() + ".key"); - RandomAccessFile keyFile = new RandomAccessFile(keyPath, "rws"); - long len = keyFile.length(); - byte[] encryptKey = new byte[32]; - byte[] encryptIv = new byte[16]; - if (len > 0 && len % 48 == 0) { - keyFile.read(encryptKey, 0, 32); - keyFile.read(encryptIv, 0, 16); - } else { - Utilities.random.nextBytes(encryptKey); - Utilities.random.nextBytes(encryptIv); - keyFile.write(encryptKey); - keyFile.write(encryptIv); + if (photoSize.h <= 50 && photoSize.w <= 50) { + newPhotoSize = new TLRPC.TL_photoStrippedSize(); + newPhotoSize.location = photoSize.location; + newPhotoSize.bytes = photoSize.bytes; + newPhotoSize.h = photoSize.h; + newPhotoSize.w = photoSize.w; + } else { + File file = FileLoader.getInstance(UserConfig.selectedAccount).getPathToAttach(photoSize, true); + boolean isEncrypted = false; + if (MessageObject.shouldEncryptPhotoOrVideo(message)) { + file = new File(file.getAbsolutePath() + ".enc"); + isEncrypted = true; + } + if (!file.exists()) { + try { + if (isEncrypted) { + File keyPath = new File(FileLoader.getInternalCacheDir(), file.getName() + ".key"); + RandomAccessFile keyFile = new RandomAccessFile(keyPath, "rws"); + long len = keyFile.length(); + byte[] encryptKey = new byte[32]; + byte[] encryptIv = new byte[16]; + if (len > 0 && len % 48 == 0) { + keyFile.read(encryptKey, 0, 32); + keyFile.read(encryptIv, 0, 16); + } else { + Utilities.random.nextBytes(encryptKey); + Utilities.random.nextBytes(encryptIv); + keyFile.write(encryptKey); + keyFile.write(encryptIv); + } + keyFile.close(); + Utilities.aesCtrDecryptionByteArray(photoSize.bytes, encryptKey, encryptIv, 0, photoSize.bytes.length, 0); } - keyFile.close(); - Utilities.aesCtrDecryptionByteArray(photoSize.bytes, encryptKey, encryptIv, 0, photoSize.bytes.length, 0); + RandomAccessFile writeFile = new RandomAccessFile(file, "rws"); + writeFile.write(photoSize.bytes); + writeFile.close(); + } catch (Exception e) { + FileLog.e(e); } - RandomAccessFile writeFile = new RandomAccessFile(file, "rws"); - writeFile.write(photoSize.bytes); - writeFile.close(); - } catch (Exception e) { - FileLog.e(e); } + + newPhotoSize = new TLRPC.TL_photoSize_layer127(); + newPhotoSize.w = photoSize.w; + newPhotoSize.h = photoSize.h; + newPhotoSize.location = photoSize.location; + newPhotoSize.size = photoSize.size; + newPhotoSize.type = photoSize.type; } - TLRPC.TL_photoSize newPhotoSize = new TLRPC.TL_photoSize_layer127(); - newPhotoSize.w = photoSize.w; - newPhotoSize.h = photoSize.h; - newPhotoSize.location = photoSize.location; - newPhotoSize.size = photoSize.size; - newPhotoSize.type = photoSize.type; if (message.media instanceof TLRPC.TL_messageMediaPhoto) { for (int a = 0, count = message.media.photo.sizes.size(); a < count; a++) { diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/ImageLocation.java b/TMessagesProj/src/main/java/org/telegram/messenger/ImageLocation.java index 5f0f6944e1..019889416f 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/ImageLocation.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/ImageLocation.java @@ -118,7 +118,8 @@ public static ImageLocation getForPhoto(TLRPC.PhotoSize photoSize, TLRPC.Photo p public static final int TYPE_BIG = 0; public static final int TYPE_SMALL = 1; public static final int TYPE_STRIPPED = 2; - public static final int TYPE_VIDEO_THUMB = 3; + public static final int TYPE_VIDEO_SMALL = 3; + public static final int TYPE_VIDEO_BIG = 4; public static ImageLocation getForUserOrChat(TLObject object, int type) { if (object instanceof TLRPC.User) { @@ -141,19 +142,25 @@ public static ImageLocation getForUser(TLRPC.User user, int type) { if (user == null || user.access_hash == 0 || user.photo == null) { return null; } - if (type == TYPE_VIDEO_THUMB) { + if (type == TYPE_VIDEO_BIG || type == TYPE_VIDEO_SMALL) { int currentAccount = UserConfig.selectedAccount; if (MessagesController.getInstance(currentAccount).isPremiumUser(user) && user.photo.has_video) { final TLRPC.UserFull userFull = MessagesController.getInstance(currentAccount).getUserFull(user.id); if (userFull != null && userFull.profile_photo != null && userFull.profile_photo.video_sizes != null && !userFull.profile_photo.video_sizes.isEmpty()) { - TLRPC.VideoSize videoSize = userFull.profile_photo.video_sizes.get(0); - for (int i = 0; i < userFull.profile_photo.video_sizes.size(); i++) { - if ("p".equals(userFull.profile_photo.video_sizes.get(i).type)) { - videoSize = userFull.profile_photo.video_sizes.get(i); - break; + if (type == TYPE_VIDEO_BIG) { + TLRPC.VideoSize videoSize = FileLoader.getClosestVideoSizeWithSize(userFull.profile_photo.video_sizes, 1000); + return ImageLocation.getForPhoto(videoSize, userFull.profile_photo); + } else { + TLRPC.VideoSize videoSize = FileLoader.getClosestVideoSizeWithSize(userFull.profile_photo.video_sizes, 100); + for (int i = 0; i < userFull.profile_photo.video_sizes.size(); i++) { + if ("p".equals(userFull.profile_photo.video_sizes.get(i).type)) { + videoSize = userFull.profile_photo.video_sizes.get(i); + break; + } } + return ImageLocation.getForPhoto(videoSize, userFull.profile_photo); } - return ImageLocation.getForPhoto(videoSize, userFull.profile_photo); + } } return null; diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/ImageReceiver.java b/TMessagesProj/src/main/java/org/telegram/messenger/ImageReceiver.java index f79782d8cc..74dc845c6e 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/ImageReceiver.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/ImageReceiver.java @@ -33,12 +33,14 @@ import org.telegram.tgnet.TLObject; import org.telegram.tgnet.TLRPC; import org.telegram.ui.Components.AnimatedFileDrawable; +import org.telegram.ui.Components.AttachableDrawable; import org.telegram.ui.Components.AvatarDrawable; import org.telegram.ui.Components.ClipRoundedDrawable; import org.telegram.ui.Components.CubicBezierInterpolator; import org.telegram.ui.Components.LoadingStickerDrawable; import org.telegram.ui.Components.RLottieDrawable; import org.telegram.ui.Components.RecyclableDrawable; +import org.telegram.ui.Components.VectorAvatarThumbDrawable; import java.util.ArrayList; @@ -125,7 +127,7 @@ public void release() { } else if (drawable != null) { if (drawable instanceof RLottieDrawable) { RLottieDrawable fileDrawable = (RLottieDrawable) drawable; - fileDrawable.recycle(); + fileDrawable.recycle(false); } else if (drawable instanceof AnimatedFileDrawable) { AnimatedFileDrawable fileDrawable = (AnimatedFileDrawable) drawable; fileDrawable.recycle(); @@ -305,8 +307,7 @@ private void clear() { private ArrayList loadingOperations = new ArrayList<>(); private boolean attachedToWindow; private boolean videoThumbIsSame; - private boolean allowLoadingOnAttachedOnly; - private boolean shouldLoadOnAttach; + private boolean allowLoadingOnAttachedOnly = false; private boolean skipUpdateFrame; public boolean clip = true; @@ -372,10 +373,10 @@ public void setForUserOrChat(TLObject object, Drawable avatarDrawable) { setForUserOrChat(object, avatarDrawable, null); } public void setForUserOrChat(TLObject object, Drawable avatarDrawable, Object parentObject) { - setForUserOrChat(object, avatarDrawable, null, false); + setForUserOrChat(object, avatarDrawable, parentObject, false, 0); } - public void setForUserOrChat(TLObject object, Drawable avatarDrawable, Object parentObject, boolean animationEnabled) { + public void setForUserOrChat(TLObject object, Drawable avatarDrawable, Object parentObject, boolean animationEnabled, int vectorType) { if (parentObject == null) { parentObject = object; } @@ -383,29 +384,46 @@ public void setForUserOrChat(TLObject object, Drawable avatarDrawable, Object pa BitmapDrawable strippedBitmap = null; boolean hasStripped = false; ImageLocation videoLocation = null; + TLRPC.VideoSize vectorImageMarkup = null; + boolean isPremium = false; if (object instanceof TLRPC.User) { TLRPC.User user = (TLRPC.User) object; + isPremium = user.premium; if (user.photo != null) { strippedBitmap = user.photo.strippedBitmap; hasStripped = user.photo.stripped_thumb != null; - if (animationEnabled && MessagesController.getInstance(currentAccount).isPremiumUser(user) && user.photo.has_video && !SharedConfig.getLiteMode().enabled() && NaConfig.INSTANCE.getShowPremiumAvatarAnimation().Bool()) { + if (vectorType == VectorAvatarThumbDrawable.TYPE_STATIC) { + final TLRPC.UserFull userFull = MessagesController.getInstance(currentAccount).getUserFull(user.id); + if (userFull != null) { + TLRPC.Photo photo = user.photo.personal ? userFull.personal_photo : userFull.profile_photo; + if (photo != null) { + vectorImageMarkup = FileLoader.getVectorMarkupVideoSize(photo); + } + } + } + if (vectorImageMarkup == null && animationEnabled && MessagesController.getInstance(currentAccount).isPremiumUser(user) && user.photo.has_video && LiteMode.isEnabled(LiteMode.FLAG_AUTOPLAY_VIDEOS) && NaConfig.INSTANCE.getShowPremiumAvatarAnimation().Bool()) { final TLRPC.UserFull userFull = MessagesController.getInstance(currentAccount).getUserFull(user.id); if (userFull == null) { MessagesController.getInstance(currentAccount).loadFullUser(user, currentGuid, false); } else { - TLRPC.Photo photo = userFull.profile_photo; + TLRPC.Photo photo = user.photo.personal ? userFull.personal_photo : userFull.profile_photo; if (photo != null) { - ArrayList videoSizes = photo.video_sizes; - if (videoSizes != null && !videoSizes.isEmpty()) { - TLRPC.VideoSize videoSize = videoSizes.get(0); - for (int i = 0; i < videoSizes.size(); i++) { - TLRPC.VideoSize videoSize1 = videoSizes.get(i); - if ("p".equals(videoSize1.type)) { - videoSize = videoSize1; - break; + vectorImageMarkup = FileLoader.getVectorMarkupVideoSize(photo); + if (vectorImageMarkup == null) { + ArrayList videoSizes = photo.video_sizes; + if (videoSizes != null && !videoSizes.isEmpty()) { + TLRPC.VideoSize videoSize = FileLoader.getClosestVideoSizeWithSize(videoSizes, 100); + for (int i = 0; i < videoSizes.size(); i++) { + TLRPC.VideoSize videoSize1 = videoSizes.get(i); + if ("p".equals(videoSize1.type)) { + videoSize = videoSize1; + } + if (videoSize1 instanceof TLRPC.TL_videoSizeEmojiMarkup || videoSize1 instanceof TLRPC.TL_videoSizeStickerMarkup) { + vectorImageMarkup = videoSize1; + } } + videoLocation = ImageLocation.getForPhoto(videoSize, photo); } - videoLocation = ImageLocation.getForPhoto(videoSize, photo); } } } @@ -418,18 +436,23 @@ public void setForUserOrChat(TLObject object, Drawable avatarDrawable, Object pa hasStripped = chat.photo.stripped_thumb != null; } } - ImageLocation location = ImageLocation.getForUserOrChat(object, ImageLocation.TYPE_SMALL); - String filter = "50_50"; - if (videoLocation != null) { - setImage(videoLocation, "avatar", location, filter, null, null, strippedBitmap, 0, null, parentObject, 0); - animatedFileDrawableRepeatMaxCount = 3; + if (vectorImageMarkup != null && vectorType != 0) { + VectorAvatarThumbDrawable drawable = new VectorAvatarThumbDrawable(vectorImageMarkup, isPremium, vectorType); + setImageBitmap(drawable); } else { - if (strippedBitmap != null) { - setImage(location, filter, strippedBitmap, null, parentObject, 0); - } else if (hasStripped) { - setImage(location, filter, ImageLocation.getForUserOrChat(object, ImageLocation.TYPE_STRIPPED), "50_50_b", avatarDrawable, parentObject, 0); + ImageLocation location = ImageLocation.getForUserOrChat(object, ImageLocation.TYPE_SMALL); + String filter = "50_50"; + if (videoLocation != null) { + setImage(videoLocation, "avatar", location, filter, null, null, strippedBitmap, 0, null, parentObject, 0); + animatedFileDrawableRepeatMaxCount = 3; } else { - setImage(location, filter, avatarDrawable, null, parentObject, 0); + if (strippedBitmap != null) { + setImage(location, filter, strippedBitmap, null, parentObject, 0); + } else if (hasStripped) { + setImage(location, filter, ImageLocation.getForUserOrChat(object, ImageLocation.TYPE_STRIPPED), "50_50_b", avatarDrawable, parentObject, 0); + } else { + setImage(location, filter, avatarDrawable, null, parentObject, 0); + } } } @@ -503,7 +526,7 @@ public void setImage(ImageLocation mediaLocation, String mediaFilter, ImageLocat currentParentObject = null; currentCacheType = 0; roundPaint.setShader(null); - staticThumbDrawable = thumb; + setStaticDrawable(thumb); currentAlpha = 1.0f; previousAlpha = 1f; currentSize = 0; @@ -655,7 +678,7 @@ public void setImage(ImageLocation mediaLocation, String mediaFilter, ImageLocat currentExt = ext; currentSize = size; currentCacheType = cacheType; - staticThumbDrawable = thumb; + setStaticDrawable(thumb); imageShader = null; composeShader = null; thumbShader = null; @@ -826,7 +849,8 @@ public void setImageBitmap(Drawable bitmap, boolean notify) { } thumbShader = null; roundPaint.setShader(null); - staticThumbDrawable = bitmap; + setStaticDrawable(bitmap); + updateDrawableRadius(bitmap); currentMediaLocation = null; currentMediaFilter = null; @@ -876,6 +900,26 @@ public void setImageBitmap(Drawable bitmap, boolean notify) { } } + private void setStaticDrawable(Drawable bitmap) { + if (bitmap == staticThumbDrawable) { + return; + } + AttachableDrawable oldDrawable = null; + if (staticThumbDrawable instanceof AttachableDrawable) { + if (staticThumbDrawable.equals(bitmap)) { + return; + } + oldDrawable = (AttachableDrawable) staticThumbDrawable; + } + staticThumbDrawable = bitmap; + if (attachedToWindow && staticThumbDrawable instanceof AttachableDrawable) { + ((AttachableDrawable) staticThumbDrawable).onAttachedToWindow(this); + } + if (attachedToWindow && oldDrawable != null) { + oldDrawable.onDetachedFromWindow(this); + } + } + private void setDrawableShader(Drawable drawable, BitmapShader shader) { if (drawable == currentThumbDrawable || drawable == staticThumbDrawable) { thumbShader = shader; @@ -949,6 +993,9 @@ public void clearImage() { } public void onDetachedFromWindow() { + if (!attachedToWindow) { + return; + } attachedToWindow = false; if (currentImageLocation != null || currentMediaLocation != null || currentThumbLocation != null || staticThumbDrawable != null) { if (setImageBackup == null) { @@ -971,9 +1018,12 @@ public void onDetachedFromWindow() { NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.stopAllHeavyOperations); NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.startAllHeavyOperations); } + if (staticThumbDrawable instanceof AttachableDrawable) { + ((AttachableDrawable) staticThumbDrawable).onDetachedFromWindow(this); + } if (staticThumbDrawable != null) { - staticThumbDrawable = null; + setStaticDrawable(null); thumbShader = null; roundPaint.setShader(null); } @@ -1031,6 +1081,9 @@ public void incrementFrames(int inc) { } public boolean onAttachedToWindow() { + if (attachedToWindow) { + return false; + } attachedToWindow = true; currentOpenedLayerFlags = NotificationCenter.getGlobalInstance().getCurrentHeavyOperationFlags(); currentOpenedLayerFlags &= ~currentLayerNum; @@ -1061,6 +1114,9 @@ public boolean onAttachedToWindow() { if (NotificationCenter.getGlobalInstance().isAnimationInProgress()) { didReceivedNotification(NotificationCenter.stopAllHeavyOperations, currentAccount, 512); } + if (staticThumbDrawable instanceof AttachableDrawable) { + ((AttachableDrawable) staticThumbDrawable).onAttachedToWindow(this); + } return false; } @@ -1894,6 +1950,9 @@ public boolean draw(Canvas canvas, BackgroundThreadDrawHolder backgroundThreadDr checkAlphaAnimation(animationNotReady && crossfadeWithThumb, backgroundThreadDrawHolder); result = true; } else if (staticThumbDrawable != null) { + if (staticThumbDrawable instanceof VectorAvatarThumbDrawable) { + ((VectorAvatarThumbDrawable) staticThumbDrawable).setParent(this); + } drawDrawable(canvas, staticThumbDrawable, (int) (overrideAlpha * 255), null, thumbOrientation, backgroundThreadDrawHolder); checkAlphaAnimation(animationNotReady, backgroundThreadDrawHolder); result = true; @@ -2140,7 +2199,7 @@ public boolean hasImageLoaded() { } public boolean hasNotThumb() { - return currentImageDrawable != null || currentMediaDrawable != null; + return currentImageDrawable != null || currentMediaDrawable != null || staticThumbDrawable instanceof VectorAvatarThumbDrawable; } public boolean hasStaticThumb() { @@ -2747,7 +2806,7 @@ private void recycleBitmap(String newKey, int type) { boolean canDelete = ImageLoader.getInstance().decrementUseCount(key); if (!ImageLoader.getInstance().isInMemCache(key, true)) { if (canDelete) { - fileDrawable.recycle(); + fileDrawable.recycle(false); } } } else if (image instanceof AnimatedFileDrawable) { @@ -2878,7 +2937,7 @@ public void startCrossfadeFromStaticThumb(Drawable thumb) { currentThumbDrawable = null; thumbShader = null; roundPaint.setShader(null); - staticThumbDrawable = thumb; + setStaticDrawable(thumb); crossfadeWithThumb = true; currentAlpha = 0f; updateDrawableRadius(staticThumbDrawable); diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/LiteMode.java b/TMessagesProj/src/main/java/org/telegram/messenger/LiteMode.java new file mode 100644 index 0000000000..43290e42c7 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/messenger/LiteMode.java @@ -0,0 +1,326 @@ +package org.telegram.messenger; + + +import android.content.Context; +import android.content.SharedPreferences; +import android.os.BatteryManager; +import android.os.Build; +import android.os.PowerManager; +import android.util.SparseArray; +import android.util.SparseIntArray; + +import androidx.annotation.RequiresApi; +import androidx.core.math.MathUtils; + +import org.telegram.tgnet.TLRPC; +import org.telegram.ui.ActionBar.Theme; +import org.telegram.ui.Components.AnimatedEmojiDrawable; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.Iterator; + +import org.telegram.ui.ActionBar.Theme; +import org.telegram.ui.Components.AnimatedEmojiDrawable; + +public class LiteMode { + + public static final int FLAG_ANIMATED_STICKERS_KEYBOARD = 1; + public static final int FLAG_ANIMATED_STICKERS_CHAT = 2; + public static final int FLAGS_ANIMATED_STICKERS = FLAG_ANIMATED_STICKERS_KEYBOARD | FLAG_ANIMATED_STICKERS_CHAT; + + public static final int FLAG_ANIMATED_EMOJI_KEYBOARD_PREMIUM = 4; + public static final int FLAG_ANIMATED_EMOJI_KEYBOARD_NOT_PREMIUM = 16384; + public static final int FLAG_ANIMATED_EMOJI_KEYBOARD = FLAG_ANIMATED_EMOJI_KEYBOARD_PREMIUM | FLAG_ANIMATED_EMOJI_KEYBOARD_NOT_PREMIUM; + public static final int FLAG_ANIMATED_EMOJI_REACTIONS_PREMIUM = 8; + public static final int FLAG_ANIMATED_EMOJI_REACTIONS_NOT_PREMIUM = 8192; + public static final int FLAG_ANIMATED_EMOJI_REACTIONS = FLAG_ANIMATED_EMOJI_REACTIONS_PREMIUM | FLAG_ANIMATED_EMOJI_REACTIONS_NOT_PREMIUM; + public static final int FLAG_ANIMATED_EMOJI_CHAT_PREMIUM = 16; + public static final int FLAG_ANIMATED_EMOJI_CHAT_NOT_PREMIUM = 4096; + public static final int FLAG_ANIMATED_EMOJI_CHAT = FLAG_ANIMATED_EMOJI_CHAT_PREMIUM | FLAG_ANIMATED_EMOJI_CHAT_NOT_PREMIUM; + public static final int FLAGS_ANIMATED_EMOJI = FLAG_ANIMATED_EMOJI_KEYBOARD | FLAG_ANIMATED_EMOJI_REACTIONS | FLAG_ANIMATED_EMOJI_CHAT; + + public static final int FLAG_CHAT_BACKGROUND = 32; + public static final int FLAG_CHAT_FORUM_TWOCOLUMN = 64; + public static final int FLAG_CHAT_SPOILER = 128; + public static final int FLAG_CHAT_BLUR = 256; + public static final int FLAG_CHAT_SCALE = 32768; + public static final int FLAGS_CHAT = FLAG_CHAT_BACKGROUND | FLAG_CHAT_FORUM_TWOCOLUMN | FLAG_CHAT_SPOILER | FLAG_CHAT_BLUR | FLAG_CHAT_SCALE; + + public static final int FLAG_CALLS_ANIMATIONS = 512; + public static final int FLAG_AUTOPLAY_VIDEOS = 1024; + public static final int FLAG_AUTOPLAY_GIFS = 2048; + + public static int PRESET_LOW = ( + FLAG_ANIMATED_EMOJI_CHAT_PREMIUM | + FLAG_ANIMATED_EMOJI_KEYBOARD_PREMIUM | + FLAG_ANIMATED_EMOJI_REACTIONS_PREMIUM | + FLAG_AUTOPLAY_GIFS + ); // 2076 + public static int PRESET_MEDIUM = ( + FLAGS_ANIMATED_STICKERS | + FLAG_ANIMATED_EMOJI_KEYBOARD_PREMIUM | + FLAG_ANIMATED_EMOJI_REACTIONS_PREMIUM | + FLAG_ANIMATED_EMOJI_CHAT | + FLAG_CHAT_FORUM_TWOCOLUMN | + FLAG_CALLS_ANIMATIONS | + FLAG_AUTOPLAY_VIDEOS | + FLAG_AUTOPLAY_GIFS + ); // 7775 + public static int PRESET_HIGH = ( + FLAGS_ANIMATED_STICKERS | + FLAGS_ANIMATED_EMOJI | + FLAGS_CHAT | + FLAG_CALLS_ANIMATIONS | + FLAG_AUTOPLAY_VIDEOS | + FLAG_AUTOPLAY_GIFS + ); // 65535 + public static int PRESET_POWER_SAVER = 0; + + private static int BATTERY_LOW = 10; + private static int BATTERY_MEDIUM = 10; + private static int BATTERY_HIGH = 10; + + private static int powerSaverLevel; + private static boolean lastPowerSaverApplied; + + private static int value; + private static boolean loaded; + + public static int getValue() { + return getValue(false); + } + + public static int getValue(boolean ignorePowerSaving) { + if (!loaded) { + loadPreference(); + } + if (!ignorePowerSaving && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + if (getBatteryLevel() <= powerSaverLevel && powerSaverLevel > 0) { + if (!lastPowerSaverApplied) { + onPowerSaverApplied(lastPowerSaverApplied = true); + } + return PRESET_POWER_SAVER; + } + if (lastPowerSaverApplied) { + onPowerSaverApplied(lastPowerSaverApplied = false); + } + } + return value; + } + + private static int lastBatteryLevelCached = -1; + private static long lastBatteryLevelChecked; + + @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP) + public static int getBatteryLevel() { + if (lastBatteryLevelCached < 0 || System.currentTimeMillis() - lastBatteryLevelChecked > 1000 * 12) { + BatteryManager batteryManager = (BatteryManager) ApplicationLoader.applicationContext.getSystemService(Context.BATTERY_SERVICE); + if (batteryManager != null) { + lastBatteryLevelCached = batteryManager.getIntProperty(BatteryManager.BATTERY_PROPERTY_CAPACITY); + lastBatteryLevelChecked = System.currentTimeMillis(); + } + } + return lastBatteryLevelCached; + } + + private static int preprocessFlag(int flag) { + if ((flag & FLAG_ANIMATED_EMOJI_KEYBOARD) > 0) { + flag = flag & ~FLAG_ANIMATED_EMOJI_KEYBOARD | (UserConfig.hasPremiumOnAccounts() ? FLAG_ANIMATED_EMOJI_KEYBOARD_PREMIUM : FLAG_ANIMATED_EMOJI_KEYBOARD_NOT_PREMIUM); + } + if ((flag & FLAG_ANIMATED_EMOJI_REACTIONS) > 0) { + flag = flag & ~FLAG_ANIMATED_EMOJI_REACTIONS | (UserConfig.hasPremiumOnAccounts() ? FLAG_ANIMATED_EMOJI_REACTIONS_PREMIUM : FLAG_ANIMATED_EMOJI_REACTIONS_NOT_PREMIUM); + } + if ((flag & FLAG_ANIMATED_EMOJI_CHAT) > 0) { + flag = flag & ~FLAG_ANIMATED_EMOJI_CHAT | (UserConfig.hasPremiumOnAccounts() ? FLAG_ANIMATED_EMOJI_CHAT_PREMIUM : FLAG_ANIMATED_EMOJI_CHAT_NOT_PREMIUM); + } + return flag; + } + + public static boolean isEnabled(int flag) { + return (getValue() & preprocessFlag(flag)) > 0; + } + + public static boolean isEnabledSetting(int flag) { + return (getValue(true) & flag) > 0; + } + + public static void toggleFlag(int flag) { + toggleFlag(flag, !isEnabled(flag)); + } + + public static void toggleFlag(int flag, boolean enabled) { + setAllFlags(enabled ? getValue(true) | flag : getValue(true) & ~flag); + } + + public static void setAllFlags(int flags) { + // in settings it is already handled. would you handle it? 🫵 + // onFlagsUpdate(value, flags); + value = flags; + savePreference(); + } + + public static void updatePresets(TLRPC.TL_jsonObject json) { + for (int i = 0; i < json.value.size(); ++i) { + TLRPC.TL_jsonObjectValue kv = json.value.get(i); + if ("settings_mask".equals(kv.key) && kv.value instanceof TLRPC.TL_jsonArray) { + ArrayList array = ((TLRPC.TL_jsonArray) kv.value).value; + try { + PRESET_LOW = (int) ((TLRPC.TL_jsonNumber) array.get(0)).value; + PRESET_MEDIUM = (int) ((TLRPC.TL_jsonNumber) array.get(1)).value; + PRESET_HIGH = (int) ((TLRPC.TL_jsonNumber) array.get(2)).value; + } catch (Exception e) { + FileLog.e(e); + } + } else if ("battery_low".equals(kv.key) && kv.value instanceof TLRPC.TL_jsonArray) { + ArrayList array = ((TLRPC.TL_jsonArray) kv.value).value; + try { + BATTERY_LOW = (int) ((TLRPC.TL_jsonNumber) array.get(0)).value; + BATTERY_MEDIUM = (int) ((TLRPC.TL_jsonNumber) array.get(1)).value; + BATTERY_HIGH = (int) ((TLRPC.TL_jsonNumber) array.get(2)).value; + } catch (Exception e) { + FileLog.e(e); + } + } + } + loadPreference(); + } + + public static void loadPreference() { + int defaultValue = PRESET_HIGH, batteryDefaultValue = BATTERY_HIGH; + if (SharedConfig.getDevicePerformanceClass() == SharedConfig.PERFORMANCE_CLASS_LOW) { + defaultValue = PRESET_LOW; + batteryDefaultValue = BATTERY_LOW; + } else if (SharedConfig.getDevicePerformanceClass() == SharedConfig.PERFORMANCE_CLASS_AVERAGE) { + defaultValue = PRESET_MEDIUM; + batteryDefaultValue = BATTERY_MEDIUM; + } + + final SharedPreferences preferences = MessagesController.getGlobalMainSettings(); + if (!preferences.contains("lite_mode2")) { + if (preferences.contains("lite_mode")) { + defaultValue = preferences.getInt("lite_mode", defaultValue); + if (defaultValue == 4095) { + defaultValue = PRESET_HIGH; + } + } else { + if (preferences.contains("light_mode")) { + boolean prevLiteModeEnabled = (preferences.getInt("light_mode", SharedConfig.getDevicePerformanceClass() == SharedConfig.PERFORMANCE_CLASS_LOW ? 1 : 0) & 1) > 0; + if (prevLiteModeEnabled) { + defaultValue = PRESET_LOW; + } else { + defaultValue = PRESET_HIGH; + } + } + // migrate settings + if (preferences.contains("loopStickers")) { + boolean loopStickers = preferences.getBoolean("loopStickers", true); + if (loopStickers) { + defaultValue |= FLAG_ANIMATED_STICKERS_CHAT; + } else { + defaultValue &= ~FLAG_ANIMATED_STICKERS_CHAT; + } + } + if (preferences.contains("autoplay_video")) { + boolean autoplayVideo = preferences.getBoolean("autoplay_video", true) || preferences.getBoolean("autoplay_video_liteforce", false); + if (autoplayVideo) { + defaultValue |= FLAG_AUTOPLAY_VIDEOS; + } else { + defaultValue &= ~FLAG_AUTOPLAY_VIDEOS; + } + } + if (preferences.contains("autoplay_gif")) { + boolean autoplayGif = preferences.getBoolean("autoplay_gif", true); + if (autoplayGif) { + defaultValue |= FLAG_AUTOPLAY_GIFS; + } else { + defaultValue &= ~FLAG_AUTOPLAY_GIFS; + } + } + if (preferences.contains("chatBlur")) { + boolean chatBlur = preferences.getBoolean("chatBlur", true); + if (chatBlur) { + defaultValue |= FLAG_CHAT_BLUR; + } else { + defaultValue &= ~FLAG_CHAT_BLUR; + } + } + } + } + + int prevValue = value; + value = preferences.getInt("lite_mode2", defaultValue); + if (loaded) { + onFlagsUpdate(prevValue, value); + } + powerSaverLevel = preferences.getInt("lite_mode_battery_level", batteryDefaultValue); + loaded = true; + } + + public static void savePreference() { + MessagesController.getGlobalMainSettings().edit().putInt("lite_mode2", value).putInt("lite_mode_battery_level", powerSaverLevel).apply(); + } + + public static int getPowerSaverLevel() { + if (!loaded) { + loadPreference(); + } + return powerSaverLevel; + } + + public static void setPowerSaverLevel(int value) { + powerSaverLevel = MathUtils.clamp(value, 0, 100); + savePreference(); + + // check power saver applied + getValue(false); + } + + public static boolean isPowerSaverApplied() { + getValue(false); + return lastPowerSaverApplied; + } + + private static void onPowerSaverApplied(boolean powerSaverApplied) { + if (powerSaverApplied) { + onFlagsUpdate(getValue(true), PRESET_POWER_SAVER); + } else { + onFlagsUpdate(PRESET_POWER_SAVER, getValue(true)); + } + if (onPowerSaverAppliedListeners != null) { + AndroidUtilities.runOnUIThread(() -> { + Iterator> i = onPowerSaverAppliedListeners.iterator(); + while (i.hasNext()) { + Utilities.Callback callback = i.next(); + if (callback != null) { + callback.run(powerSaverApplied); + } + } + }); + } + } + + private static void onFlagsUpdate(int oldValue, int newValue) { + int changedFlags = ~oldValue & newValue; + if ((changedFlags & FLAGS_ANIMATED_EMOJI) > 0) { + AnimatedEmojiDrawable.updateAll(); + } + if ((changedFlags & FLAG_CHAT_BACKGROUND) > 0) { + Theme.reloadWallpaper(); + } + } + + private static HashSet> onPowerSaverAppliedListeners; + public static void addOnPowerSaverAppliedListener(Utilities.Callback listener) { + if (onPowerSaverAppliedListeners == null) { + onPowerSaverAppliedListeners = new HashSet<>(); + } + onPowerSaverAppliedListeners.add(listener); + } + + public static void removeOnPowerSaverAppliedListener(Utilities.Callback listener) { + if (onPowerSaverAppliedListeners != null) { + onPowerSaverAppliedListeners.remove(listener); + } + } +} \ No newline at end of file diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/LocaleController.java b/TMessagesProj/src/main/java/org/telegram/messenger/LocaleController.java index d1fd02583d..fd1260be49 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/LocaleController.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/LocaleController.java @@ -21,6 +21,7 @@ import android.telephony.TelephonyManager; import android.text.TextUtils; import android.text.format.DateFormat; +import android.util.Log; import android.util.Xml; import android.view.Gravity; @@ -45,7 +46,9 @@ import java.util.Currency; import java.util.Date; import java.util.HashMap; +import java.util.HashSet; import java.util.Locale; +import java.util.Map; import java.util.TimeZone; import tw.nekomimi.nekogram.NekoConfig; @@ -531,7 +534,7 @@ public LocaleInfo getLanguageFromDict(String key) { public LocaleInfo getBuiltinLanguageByPlural(String plural) { Collection values = languagesDict.values(); for (LocaleInfo l : values) - if (l.pathToFile != null && l.pathToFile.equals("remote") && l.pluralLangCode != null && l.pluralLangCode.equals(plural)) + if (l.pathToFile != null && l.pathToFile.equals("remote") && (l.shortName == null || !l.shortName.endsWith("_raw")) && l.pluralLangCode != null && l.pluralLangCode.equals(plural)) return l; return null; } @@ -576,17 +579,82 @@ public void reloadCurrentRemoteLocale(int currentAccount, String langCode, boole } } + private boolean checkingUpdateForCurrentRemoteLocale; + public void checkUpdateForCurrentRemoteLocale(int currentAccount, int version, int baseVersion) { if (currentLocaleInfo == null || !currentLocaleInfo.isRemote() && !currentLocaleInfo.isUnofficial()) { return; } if (currentLocaleInfo.hasBaseLang()) { if (currentLocaleInfo.baseVersion < baseVersion) { - applyRemoteLanguage(currentLocaleInfo, currentLocaleInfo.baseLangCode, false, currentAccount, null); + checkingUpdateForCurrentRemoteLocale = true; + applyRemoteLanguage(currentLocaleInfo, currentLocaleInfo.baseLangCode, false, currentAccount, () -> { + checkingUpdateForCurrentRemoteLocale = false; + checkPatchLangpack(currentAccount); + }); } } if (currentLocaleInfo.version < version) { - applyRemoteLanguage(currentLocaleInfo, currentLocaleInfo.shortName, false, currentAccount, null); + checkingUpdateForCurrentRemoteLocale = true; + applyRemoteLanguage(currentLocaleInfo, currentLocaleInfo.shortName, false, currentAccount, () -> { + checkingUpdateForCurrentRemoteLocale = false; + checkPatchLangpack(currentAccount); + }); + } + } + + public int calculateTranslatedCount(HashMap map) { + int count = 0; + HashSet added = new HashSet<>(); + for (String k : map.keySet()) { + if (k == null) { + continue; + } + String real = null; + if (k.endsWith("_other")) { + real = k.substring(0, k.length() - 6); + } else if (k.endsWith("_zero") || k.endsWith("_many")) { + real = k.substring(0, k.length() - 5); + } else if (k.endsWith("_one") || k.endsWith("_two") || k.endsWith("_few")) { + real = k.substring(0, k.length() - 4); + } + if (real == null) { + count++; + } else if (!added.contains(real)) { + added.add(real); + count++; + } + } + added.clear(); + return count; + } + + public void checkPatchLangpack(int currentAccount) { + if (currentLocaleInfo == null || checkingUpdateForCurrentRemoteLocale) { + return; + } + if (shouldReinstallLangpack(currentLocaleInfo.shortName)) { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("reload locale because locale file is not enough"); + } + AndroidUtilities.runOnUIThread(() -> reloadCurrentRemoteLocale(currentAccount, null, true, null)); + } + } + + private boolean patching = false; + + public void checkForcePatchLangpack(int currentAccount, Runnable ifDone) { + String lng = LocaleController.getCurrentLanguageName(); + boolean shouldPatch = MessagesController.getInstance(currentAccount).checkResetLangpack > 0 && !MessagesController.getGlobalMainSettings().getBoolean("langpack_patched" + lng, false) && !patching; + if (shouldPatch) { + patching = true; + reloadCurrentRemoteLocale(currentAccount, null, true, () -> AndroidUtilities.runOnUIThread(() -> { + MessagesController.getGlobalMainSettings().edit().putBoolean("langpack_patched" + lng, true).apply(); + if (ifDone != null) { + ifDone.run(); + } + patching = false; + })); } } @@ -975,7 +1043,7 @@ public int applyLanguage(final LocaleInfo localeInfo, boolean override, boolean boolean isLoadingRemote = false; if ((localeInfo.isRemote() || localeInfo.isUnofficial()) && (force || !pathToFile.exists() || hasBase && !pathToBaseFile.exists())) { if (BuildVars.LOGS_ENABLED) { - FileLog.d("reload locale because one of file doesn't exist" + pathToFile + " " + pathToBaseFile); + FileLog.d("reload locale because one of file doesn't exist " + pathToFile + " " + pathToBaseFile); } isLoadingRemote = true; if (init) { @@ -1017,18 +1085,19 @@ public int applyLanguage(final LocaleInfo localeInfo, boolean override, boolean } currentLocale = newLocale; currentLocaleInfo = localeInfo; + FileLog.d("applyLanguage: currentLocaleInfo is set"); if (!TextUtils.isEmpty(currentLocaleInfo.pluralLangCode)) { currentPluralRules = allRules.get(currentLocaleInfo.pluralLangCode); } if (currentPluralRules == null) { currentPluralRules = allRules.get(args[0]); - if (currentPluralRules == null) { - currentPluralRules = allRules.get(currentLocale.getLanguage()); - if (currentPluralRules == null) { - currentPluralRules = new PluralRules_None(); - } - } + } + if (currentPluralRules == null) { + currentPluralRules = allRules.get(currentLocale.getLanguage()); + } + if (currentPluralRules == null) { + currentPluralRules = new PluralRules_None(); } changingConfiguration = true; Locale.setDefault(currentLocale); @@ -1037,11 +1106,14 @@ public int applyLanguage(final LocaleInfo localeInfo, boolean override, boolean FileLog.e("update locale to " + config.locale); ApplicationLoader.applicationContext.getResources().updateConfiguration(config, ApplicationLoader.applicationContext.getResources().getDisplayMetrics()); changingConfiguration = false; - if (reloadLastFile) { + if (reloadLastFile || !isLoadingRemote && !force && shouldReinstallLangpack(localeInfo.shortName)) { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("reload locale because one of file is corrupted " + pathToFile + " " + pathToBaseFile); + } if (init) { - AndroidUtilities.runOnUIThread(() -> reloadCurrentRemoteLocale(currentAccount, null, force, null)); + AndroidUtilities.runOnUIThread(() -> reloadCurrentRemoteLocale(currentAccount, null, true, null)); } else { - reloadCurrentRemoteLocale(currentAccount, null, force, null); + reloadCurrentRemoteLocale(currentAccount, null, true, null); } reloadLastFile = false; } @@ -1711,6 +1783,31 @@ public static String formatDateAudio(long date, boolean shortFormat) { return "LOC_ERR"; } + public static String formatSeenDate(long date) { + try { + date *= 1000; + Calendar rightNow = Calendar.getInstance(); + int day = rightNow.get(Calendar.DAY_OF_YEAR); + int year = rightNow.get(Calendar.YEAR); + rightNow.setTimeInMillis(date); + int dateDay = rightNow.get(Calendar.DAY_OF_YEAR); + int dateYear = rightNow.get(Calendar.YEAR); + + if (dateDay == day && year == dateYear) { + return LocaleController.formatString("TodayAtFormattedWithToday", R.string.TodayAtFormattedWithToday, getInstance().formatterDay.format(new Date(date))); + } else if (dateDay + 1 == day && year == dateYear) { + return LocaleController.formatString("YesterdayAtFormatted", R.string.YesterdayAtFormatted, getInstance().formatterDay.format(new Date(date))); + } else if (Math.abs(System.currentTimeMillis() - date) < 31536000000L) { + return LocaleController.formatString("formatDateAtTime", R.string.formatDateAtTime, getInstance().formatterDayMonth.format(new Date(date)), getInstance().formatterDay.format(new Date(date))); + } else { + return LocaleController.formatString("formatDateAtTime", R.string.formatDateAtTime, getInstance().formatterYear.format(new Date(date)), getInstance().formatterDay.format(new Date(date))); + } + } catch (Exception e) { + FileLog.e(e); + } + return "LOC_ERR"; + } + public static String formatDateCallLog(long date) { try { date *= 1000; @@ -2222,6 +2319,12 @@ public void saveRemoteLocaleStringsForCurrentLocale(final TLRPC.TL_langPackDiffe public void saveRemoteLocaleStrings(LocaleInfo localeInfo, final TLRPC.TL_langPackDifference difference, int currentAccount, Runnable onDone) { if (difference == null || difference.strings.isEmpty() || localeInfo == null || localeInfo.isLocal()) { + FileLog.d("saveRemoteLocaleStrings: empty difference=" + (difference == null || difference.strings.isEmpty()) + "; locale is local or null=" + (localeInfo == null || localeInfo.isLocal())); + recreateFormatters(); + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.reloadInterface); + if (onDone != null) { + onDone.run(); + } return; } final String langCode = difference.lang_code.replace('-', '_').toLowerCase(); @@ -2234,6 +2337,7 @@ public void saveRemoteLocaleStrings(LocaleInfo localeInfo, final TLRPC.TL_langPa type = -1; } if (type == -1) { + FileLog.d("saveRemoteLocaleStrings: unknown language " + langCode + " (locale short=" + localeInfo.shortName + ", base=" + localeInfo.baseLangCode + ")"); return; } File finalFile; @@ -2245,8 +2349,10 @@ public void saveRemoteLocaleStrings(LocaleInfo localeInfo, final TLRPC.TL_langPa try { final HashMap values; if (difference.from_version == 0) { + FileLog.d("saveRemoteLocaleStrings: difference is straight from the beginning"); values = new HashMap<>(); } else { + FileLog.d("saveRemoteLocaleStrings: difference is from version " + difference.from_version + " ours " + localeInfo.version + " (base version " + localeInfo.baseLangCode + ")"); values = getLocaleFileStrings(finalFile, true); } for (int a = 0; a < difference.strings.size(); a++) { @@ -2264,9 +2370,7 @@ public void saveRemoteLocaleStrings(LocaleInfo localeInfo, final TLRPC.TL_langPa values.remove(string.key); } } - if (BuildVars.LOGS_ENABLED) { - FileLog.d("save locale file to " + finalFile); - } + FileLog.d("save locale file to " + finalFile); BufferedWriter writer = new BufferedWriter(new FileWriter(finalFile)); writer.write("\n"); writer.write("\n"); @@ -2280,6 +2384,7 @@ public void saveRemoteLocaleStrings(LocaleInfo localeInfo, final TLRPC.TL_langPa if (hasBase) { valuesToSet.putAll(getLocaleFileStrings(localeInfo.getPathToFile())); } + FileLog.d("saved locale file to " + finalFile); AndroidUtilities.runOnUIThread(() -> { if (type == 0) { localeInfo.version = difference.version; @@ -2330,6 +2435,8 @@ public void saveRemoteLocaleStrings(LocaleInfo localeInfo, final TLRPC.TL_langPa FileLog.e("update locale to " + config.locale); ApplicationLoader.applicationContext.getResources().updateConfiguration(config, ApplicationLoader.applicationContext.getResources().getDisplayMetrics()); changingConfiguration = false; + } else { + FileLog.d("saveRemoteLocaleStrings: currentLocaleInfo != localeInfo, do nothing"); } } catch (Exception e) { FileLog.e(e); @@ -2341,8 +2448,8 @@ public void saveRemoteLocaleStrings(LocaleInfo localeInfo, final TLRPC.TL_langPa onDone.run(); } }); - } catch (Exception ignore) { - + } catch (Exception e) { + FileLog.e(e); } } @@ -2430,47 +2537,75 @@ private int applyRemoteLanguage(LocaleInfo localeInfo, String langCode, boolean if (localeInfo == null || !localeInfo.isRemote() && !localeInfo.isUnofficial()) { return 0; } + FileLog.d("applyRemoteLanguage " + langCode + " force=" + force + " currentAccount=" + currentAccount); + int[] requested = new int[1], received = new int[1]; + requested[0] = received[0] = 0; + Runnable onPartlyDone = () -> { + received[0]++; + if (received[0] >= requested[0] && onDone != null) { + onDone.run(); + } + }; + if (force) { + patched(localeInfo.shortName); + } if (localeInfo.hasBaseLang() && (langCode == null || langCode.equals(localeInfo.baseLangCode))) { if (localeInfo.baseVersion != 0 && !force) { if (localeInfo.hasBaseLang()) { + FileLog.d("applyRemoteLanguage getDifference of base"); TLRPC.TL_langpack_getDifference req = new TLRPC.TL_langpack_getDifference(); req.from_version = localeInfo.baseVersion; req.lang_code = localeInfo.getBaseLangCode(); req.lang_pack = ""; - return ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> { + requested[0]++; + ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> { if (response != null) { - AndroidUtilities.runOnUIThread(() -> saveRemoteLocaleStrings(localeInfo, (TLRPC.TL_langPackDifference) response, currentAccount, onDone)); + AndroidUtilities.runOnUIThread(() -> saveRemoteLocaleStrings(localeInfo, (TLRPC.TL_langPackDifference) response, currentAccount, onPartlyDone)); } }, ConnectionsManager.RequestFlagWithoutLogin); } } else { + FileLog.d("applyRemoteLanguage getLangPack of base"); TLRPC.TL_langpack_getLangPack req = new TLRPC.TL_langpack_getLangPack(); req.lang_code = localeInfo.getBaseLangCode(); - return ConnectionsManager.getInstance(currentAccount).sendRequest(req, (TLObject response, TLRPC.TL_error error) -> { + requested[0]++; + ConnectionsManager.getInstance(currentAccount).sendRequest(req, (TLObject response, TLRPC.TL_error error) -> { if (response != null) { - AndroidUtilities.runOnUIThread(() -> saveRemoteLocaleStrings(localeInfo, (TLRPC.TL_langPackDifference) response, currentAccount, onDone)); + AndroidUtilities.runOnUIThread(() -> { + saveRemoteLocaleStrings(localeInfo, (TLRPC.TL_langPackDifference) response, currentAccount, onPartlyDone); + }); } }, ConnectionsManager.RequestFlagWithoutLogin); } } if (langCode == null || langCode.equals(localeInfo.shortName)) { if (localeInfo.version != 0 && !force) { + FileLog.d("applyRemoteLanguage getDifference"); TLRPC.TL_langpack_getDifference req = new TLRPC.TL_langpack_getDifference(); req.from_version = localeInfo.version; req.lang_code = localeInfo.getLangCode(); req.lang_pack = ""; + requested[0]++; return ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> { if (response != null) { - AndroidUtilities.runOnUIThread(() -> saveRemoteLocaleStrings(localeInfo, (TLRPC.TL_langPackDifference) response, currentAccount, onDone)); + AndroidUtilities.runOnUIThread(() -> { + saveRemoteLocaleStrings(localeInfo, (TLRPC.TL_langPackDifference) response, currentAccount, onPartlyDone); + }); } }, ConnectionsManager.RequestFlagWithoutLogin); } else { - ConnectionsManager.setLangCode(localeInfo.getLangCode()); + for (int a : SharedConfig.activeAccounts) { + ConnectionsManager.setLangCode(localeInfo.getLangCode()); + } + FileLog.d("applyRemoteLanguage getLangPack"); TLRPC.TL_langpack_getLangPack req = new TLRPC.TL_langpack_getLangPack(); req.lang_code = localeInfo.getLangCode(); + requested[0]++; return ConnectionsManager.getInstance(currentAccount).sendRequest(req, (TLObject response, TLRPC.TL_error error) -> { if (response != null) { - AndroidUtilities.runOnUIThread(() -> saveRemoteLocaleStrings(localeInfo, (TLRPC.TL_langPackDifference) response, currentAccount, onDone)); + AndroidUtilities.runOnUIThread(() -> { + saveRemoteLocaleStrings(localeInfo, (TLRPC.TL_langPackDifference) response, currentAccount, onPartlyDone); + }); } }, ConnectionsManager.RequestFlagWithoutLogin); } @@ -3400,6 +3535,25 @@ public static String formatDistance(float distance, int type) { return formatDistance(distance, type, null); } + // patch to force reinstalling of langpack in case some strings are missing after 9.0 + private boolean shouldReinstallLangpack(String lng) { + int mustBeCount = MessagesController.getInstance(UserConfig.selectedAccount).checkResetLangpack; + if (mustBeCount <= 0) { + return false; + } + boolean alreadyPatched = MessagesController.getGlobalMainSettings().getBoolean("lngpack_patched_" + lng, false); + if (alreadyPatched) { + return false; + } + int count = calculateTranslatedCount(localeValues); + if (count >= mustBeCount) { + return false; + } + FileLog.e("reinstalling " + lng + " langpack because of patch (" + count + " keys, must be at least " + mustBeCount + ")"); + patched(lng); + return true; + } + public static String formatDistance(float distance, int type, Boolean useImperial) { ensureImperialSystemInit(); boolean imperial = useImperial != null && useImperial || useImperial == null && useImperialSystemType; @@ -3463,4 +3617,11 @@ public static String formatDistance(float distance, int type, Boolean useImperia } } } + + private void patched(String lng) { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("set as patched " + lng + " langpack"); + } + MessagesController.getGlobalMainSettings().edit().putBoolean("lngpack_patched_" + lng, true).apply(); + } } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/MediaController.java b/TMessagesProj/src/main/java/org/telegram/messenger/MediaController.java index 71cb4ee3e3..3e25aca517 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/MediaController.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/MediaController.java @@ -33,6 +33,7 @@ import android.hardware.SensorEvent; import android.hardware.SensorEventListener; import android.hardware.SensorManager; +import android.media.AudioDeviceInfo; import android.media.AudioFormat; import android.media.AudioManager; import android.media.AudioRecord; @@ -52,6 +53,7 @@ import android.telephony.PhoneStateListener; import android.telephony.TelephonyManager; import android.text.TextUtils; +import android.util.Log; import android.util.SparseArray; import android.view.HapticFeedbackConstants; import android.view.TextureView; @@ -81,6 +83,7 @@ import org.telegram.ui.Components.PhotoFilterView; import org.telegram.ui.Components.PipRoundVideoView; import org.telegram.ui.Components.VideoPlayer; +import org.telegram.ui.LaunchActivity; import org.telegram.ui.PhotoViewer; import java.io.File; @@ -97,6 +100,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Locale; import java.util.Timer; import java.util.TimerTask; @@ -359,6 +363,7 @@ public static class PhotoEntry extends MediaEditState { public boolean isChatPreviewSpoilerRevealed; public boolean isAttachSpoilerRevealed; + public TLRPC.VideoSize emojiMarkup; public PhotoEntry(int bucketId, int imageId, long dateTaken, String path, int orientation, boolean isVideo, int width, int height, long size) { this.bucketId = bucketId; @@ -479,6 +484,7 @@ public String getPathToAttach() { private boolean raiseToEarRecord; private ChatActivity raiseChat; private boolean accelerometerVertical; + private long lastAccelerometerDetected; private int raisedToTop; private int raisedToTopSign; private int raisedToBack; @@ -555,6 +561,7 @@ public VideoConvertMessage(MessageObject object, VideoEditedInfo info) { private long lastProgress = 0; private MessageObject playingMessageObject; private MessageObject goingToShowMessageObject; + private boolean manualRecording; private Timer progressTimer = null; private final Object progressTimerSync = new Object(); private boolean downloadingCurrentMessage; @@ -604,6 +611,8 @@ public void run() { private File recordingAudioFile; private long recordStartTime; private long recordTimeCount; + private int writedFrame; + private long writedFileLenght; private long recordDialogId; private MessageObject recordReplyingMsg; private MessageObject recordReplyingTopMsg; @@ -691,6 +700,9 @@ public void run() { if (writeFrame(fileBuffer, !flush ? fileBuffer.limit() : finalBuffer.position()) != 0) { fileBuffer.rewind(); recordTimeCount += fileBuffer.limit() / 2 / (sampleRate / 1000); + writedFrame++; + } else { + FileLog.e("writing frame failed"); } } if (oldLimit != -1) { @@ -1489,15 +1501,14 @@ public void onSensorChanged(SensorEvent event) { if (!sensorsStarted || VoIPService.getSharedInstance() != null) { return; } - if (event.sensor == proximitySensor) { + if (event.sensor.getType() == Sensor.TYPE_PROXIMITY) { if (BuildVars.LOGS_ENABLED) { - FileLog.d("proximity changed to " + event.values[0] + " max value = " + proximitySensor.getMaximumRange()); + FileLog.d("proximity changed to " + event.values[0] + " max value = " + event.sensor.getMaximumRange()); } - if (lastProximityValue == -100) { - lastProximityValue = event.values[0]; - } else if (lastProximityValue != event.values[0]) { + if (lastProximityValue != event.values[0]) { proximityHasDifferentValues = true; } + lastProximityValue = event.values[0]; if (proximityHasDifferentValues) { proximityTouched = isNearToSensor(event.values[0]); } @@ -1639,11 +1650,11 @@ public void onSensorChanged(SensorEvent event) { FileLog.d(accelerometerVertical + " val = " + val + " acc (" + linearAcceleration[0] + ", " + linearAcceleration[1] + ", " + linearAcceleration[2] + ") grav (" + gravityFast[0] + ", " + gravityFast[1] + ", " + gravityFast[2] + ")"); }*/ } - if (raisedToBack == minCount && accelerometerVertical && proximityTouched && !NotificationsController.audioManager.isWiredHeadsetOn()) { - if (BuildVars.LOGS_ENABLED) { - FileLog.d("sensor values reached"); - } - if (playingMessageObject == null && recordStartRunnable == null && recordingAudio == null && !PhotoViewer.getInstance().isVisible() && ApplicationLoader.isScreenOn && !inputFieldHasText && allowStartRecord && raiseChat != null && !callInProgress) { + if (raisedToBack == minCount || accelerometerVertical) { + lastAccelerometerDetected = System.currentTimeMillis(); + } + if (proximityTouched && (raisedToBack == minCount || accelerometerVertical || System.currentTimeMillis() - lastAccelerometerDetected < 60) && !NotificationsController.audioManager.isWiredHeadsetOn() && !NotificationsController.audioManager.isBluetoothA2dpOn() && !VoIPService.isAnyKindOfCallActive() && !manualRecording) { + if (SharedConfig.enabledRaiseTo(true) && playingMessageObject == null && recordStartRunnable == null && recordingAudio == null && !PhotoViewer.getInstance().isVisible() && ApplicationLoader.isScreenOn && !inputFieldHasText && allowStartRecord && raiseChat != null && !callInProgress) { if (!raiseToEarRecord) { if (BuildVars.LOGS_ENABLED) { FileLog.d("start record"); @@ -1652,7 +1663,7 @@ public void onSensorChanged(SensorEvent event) { if (!raiseChat.playFirstUnreadVoiceMessage()) { raiseToEarRecord = true; useFrontSpeaker = false; - startRecording(raiseChat.getCurrentAccount(), raiseChat.getDialogId(), null, raiseChat.getThreadMessage(), raiseChat.getClassGuid()); + startRecording(raiseChat.getCurrentAccount(), raiseChat.getDialogId(), null, raiseChat.getThreadMessage(), raiseChat.getClassGuid(), false); } if (useFrontSpeaker) { setUseFrontSpeaker(true); @@ -1662,7 +1673,7 @@ public void onSensorChanged(SensorEvent event) { proximityWakeLock.acquire(); } } - } else if (playingMessageObject != null && (playingMessageObject.isVoice() || playingMessageObject.isRoundVideo())) { + } else if (SharedConfig.enabledRaiseTo(false) && playingMessageObject != null && (playingMessageObject.isVoice() || playingMessageObject.isRoundVideo())) { if (!useFrontSpeaker) { if (BuildVars.LOGS_ENABLED) { FileLog.d("start listen"); @@ -1679,9 +1690,9 @@ public void onSensorChanged(SensorEvent event) { raisedToTop = 0; raisedToTopSign = 0; countLess = 0; - } else if (proximityTouched) { - if (playingMessageObject != null && !ApplicationLoader.mainInterfacePaused && (playingMessageObject.isVoice() || playingMessageObject.isRoundVideo())) { - if (!useFrontSpeaker && !NotificationsController.audioManager.isWiredHeadsetOn()) { + } else if (proximityTouched && ((accelerometerSensor == null || linearSensor == null) && gravitySensor == null || ignoreAccelerometerGestures()) && !VoIPService.isAnyKindOfCallActive()) { + if (playingMessageObject != null && !ApplicationLoader.mainInterfacePaused && (playingMessageObject.isVoice() || playingMessageObject.isRoundVideo()) && SharedConfig.enabledRaiseTo(false)) { + if (!useFrontSpeaker && !NotificationsController.audioManager.isWiredHeadsetOn() && !NotificationsController.audioManager.isBluetoothA2dpOn() && !manualRecording) { if (BuildVars.LOGS_ENABLED) { FileLog.d("start listen by proximity only"); } @@ -1693,7 +1704,7 @@ public void onSensorChanged(SensorEvent event) { ignoreOnPause = true; } } - } else if (!proximityTouched) { + } else if (!proximityTouched && !manualRecording) { if (raiseToEarRecord) { if (BuildVars.LOGS_ENABLED) { FileLog.d("stop record"); @@ -1737,11 +1748,11 @@ private void setUseFrontSpeaker(boolean value) { } public void startRecordingIfFromSpeaker() { - if (!useFrontSpeaker || raiseChat == null || !allowStartRecord || !SharedConfig.raiseToSpeak) { + if (!useFrontSpeaker || raiseChat == null || !allowStartRecord || !SharedConfig.enabledRaiseTo(true)) { return; } raiseToEarRecord = true; - startRecording(raiseChat.getCurrentAccount(), raiseChat.getDialogId(), null, raiseChat.getThreadMessage(), raiseChat.getClassGuid()); + startRecording(raiseChat.getCurrentAccount(), raiseChat.getDialogId(), null, raiseChat.getThreadMessage(), raiseChat.getClassGuid(), false); ignoreOnPause = true; } @@ -1801,7 +1812,7 @@ public void startRaiseToEarSensors(ChatActivity chatActivity) { return; } raiseChat = chatActivity; - if (!SharedConfig.raiseToSpeak && (playingMessageObject == null || !playingMessageObject.isVoice() && !playingMessageObject.isRoundVideo())) { + if (!SharedConfig.enabledRaiseTo(true) && (playingMessageObject == null || !playingMessageObject.isVoice() && !playingMessageObject.isRoundVideo())) { return; } if (!sensorsStarted) { @@ -1939,7 +1950,7 @@ public void onAnimationEnd(Animator animation) { stopProgressTimer(); lastProgress = 0; isPaused = false; - if (!useFrontSpeaker && !SharedConfig.raiseToSpeak) { + if (!useFrontSpeaker && !SharedConfig.enabledRaiseTo(true)) { ChatActivity chat = raiseChat; stopRaiseToEarSensors(raiseChat, false); raiseChat = chat; @@ -2015,6 +2026,7 @@ private boolean isSamePlayingMessage(MessageObject messageObject) { } public boolean seekToProgress(MessageObject messageObject, float progress) { + final MessageObject playingMessageObject = this.playingMessageObject; if (audioPlayer == null && videoPlayer == null || messageObject == null || playingMessageObject == null || !isSamePlayingMessage(messageObject)) { return false; } @@ -2242,12 +2254,12 @@ private void sortPlaylist() { long group2 = o2.messageOwner.grouped_id; if (mid1 < 0 && mid2 < 0) { if (group1 != 0 && group1 == group2) { - return Integer.compare(mid1, mid2); + return -Integer.compare(mid1, mid2); } return Integer.compare(mid2, mid1); } else { if (group1 != 0 && group1 == group2) { - return Integer.compare(mid2, mid1); + return -Integer.compare(mid2, mid1); } return Integer.compare(mid1, mid2); } @@ -2293,20 +2305,7 @@ private void playNextMessageWithoutOrder(boolean byStop) { return; } - boolean last = false; - if (SharedConfig.playOrderReversed) { - currentPlaylistNum++; - if (currentPlaylistNum >= currentPlayList.size()) { - currentPlaylistNum = 0; - last = true; - } - } else { - currentPlaylistNum--; - if (currentPlaylistNum < 0) { - currentPlaylistNum = currentPlayList.size() - 1; - last = true; - } - } + boolean last = traversePlaylist(currentPlayList, SharedConfig.playOrderReversed ? +1 : -1); if (last && byStop && SharedConfig.repeatMode == 0 && !forceLoopCurrentPlaylist) { if (audioPlayer != null || videoPlayer != null) { if (audioPlayer != null) { @@ -2363,23 +2362,47 @@ public void playPreviousMessage() { return; } - if (SharedConfig.playOrderReversed) { - currentPlaylistNum--; - if (currentPlaylistNum < 0) { - currentPlaylistNum = currentPlayList.size() - 1; - } - } else { - currentPlaylistNum++; - if (currentPlaylistNum >= currentPlayList.size()) { - currentPlaylistNum = 0; - } - } + traversePlaylist(currentPlayList, SharedConfig.playOrderReversed ? -1 : 1); if (currentPlaylistNum >= currentPlayList.size()) { return; } playMusicAgain = true; playMessage(currentPlayList.get(currentPlaylistNum)); } + + private boolean traversePlaylist(ArrayList playlist, int direction) { + boolean last = false; + final int wasCurrentPlaylistNum = currentPlaylistNum; + int connectionState = ConnectionsManager.getInstance(UserConfig.selectedAccount).getConnectionState(); + boolean offline = connectionState == ConnectionsManager.ConnectionStateWaitingForNetwork; + currentPlaylistNum += direction; + if (offline) { + while (currentPlaylistNum < playlist.size() && currentPlaylistNum >= 0) { + MessageObject audio = playlist.get(currentPlaylistNum); + if (audio != null && audio.mediaExists) { + break; + } + currentPlaylistNum += direction; + } + } + if (currentPlaylistNum >= playlist.size() || currentPlaylistNum < 0) { + currentPlaylistNum = currentPlaylistNum >= playlist.size() ? 0 : playlist.size() - 1; + if (offline) { + while (currentPlaylistNum >= 0 && currentPlaylistNum < playlist.size() && (direction > 0 ? currentPlaylistNum <= wasCurrentPlaylistNum : currentPlaylistNum >= wasCurrentPlaylistNum)) { + MessageObject audio = playlist.get(currentPlaylistNum); + if (audio != null && audio.mediaExists) { + break; + } + currentPlaylistNum += direction; + } + if (currentPlaylistNum >= playlist.size() || currentPlaylistNum < 0) { + currentPlaylistNum = currentPlaylistNum >= playlist.size() ? 0 : playlist.size() - 1; + } + } + last = true; + } + return last; + } protected void checkIsNextMediaFileDownloaded() { if (playingMessageObject == null || !playingMessageObject.isMusic()) { @@ -2447,7 +2470,7 @@ private void checkIsNextMusicFileDownloaded(int currentAccount) { } public void setVoiceMessagesPlaylist(ArrayList playlist, boolean unread) { - voiceMessagesPlaylist = playlist; + voiceMessagesPlaylist = playlist != null ? new ArrayList<>(playlist) : null; if (voiceMessagesPlaylist != null) { voiceMessagesPlaylistUnread = unread; voiceMessagesPlaylistMap = new SparseArray<>(); @@ -2475,7 +2498,7 @@ private void checkAudioFocus(MessageObject messageObject) { if (neededAudioFocus == 3) { result = NotificationsController.audioManager.requestAudioFocus(this, AudioManager.STREAM_VOICE_CALL, AudioManager.AUDIOFOCUS_GAIN); } else { - result = NotificationsController.audioManager.requestAudioFocus(this, AudioManager.STREAM_MUSIC, neededAudioFocus == 2 ? AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK : AudioManager.AUDIOFOCUS_GAIN); + result = NotificationsController.audioManager.requestAudioFocus(this, AudioManager.STREAM_MUSIC, neededAudioFocus == 2 && !SharedConfig.pauseMusicOnMedia ? AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK : AudioManager.AUDIOFOCUS_GAIN); } if (result == AudioManager.AUDIOFOCUS_REQUEST_GRANTED) { audioFocus = AUDIO_FOCUSED; @@ -2607,13 +2630,14 @@ public void setPlaybackSpeed(boolean music, float speed) { } } if (audioPlayer != null) { - audioPlayer.setPlaybackSpeed(speed); + audioPlayer.setPlaybackSpeed(Math.round(speed * 10f) / 10f); } else if (videoPlayer != null) { - videoPlayer.setPlaybackSpeed(speed); + videoPlayer.setPlaybackSpeed(Math.round(speed * 10f) / 10f); } MessagesController.getGlobalMainSettings().edit() .putFloat(music ? "musicPlaybackSpeed" : "playbackSpeed", speed) - .putFloat(music ? "fastMusicPlaybackSpeed" : "fastPlaybackSpeed", music ? fastMusicPlaybackSpeed : fastPlaybackSpeed).commit(); + .putFloat(music ? "fastMusicPlaybackSpeed" : "fastPlaybackSpeed", music ? fastMusicPlaybackSpeed : fastPlaybackSpeed) + .commit(); NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.messagePlayingSpeedChanged); } @@ -2780,12 +2804,13 @@ public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) { isPaused = false; lastProgress = 0; + MessageObject oldMessageObject = playingMessageObject; playingMessageObject = messageObject; - if (!SharedConfig.raiseToSpeak) { + if (!SharedConfig.enabledRaiseTo(true)) { startRaiseToEarSensors(raiseChat); } startProgressTimer(playingMessageObject); - NotificationCenter.getInstance(messageObject.currentAccount).postNotificationName(NotificationCenter.messagePlayingDidStart, messageObject); + NotificationCenter.getInstance(messageObject.currentAccount).postNotificationName(NotificationCenter.messagePlayingDidStart, messageObject, oldMessageObject); /*try { if (playingMessageObject.audioProgress != 0) { @@ -2946,7 +2971,7 @@ public boolean playMessage(final MessageObject messageObject, boolean silent) { if (isPaused) { resumeAudio(messageObject); } - if (!SharedConfig.raiseToSpeak) { + if (!SharedConfig.enabledRaiseTo(true)) { startRaiseToEarSensors(raiseChat); } return true; @@ -2955,6 +2980,7 @@ public boolean playMessage(final MessageObject messageObject, boolean silent) { MessagesController.getInstance(messageObject.currentAccount).markMessageContentAsRead(messageObject); } boolean notify = !playMusicAgain; + MessageObject oldMessageObject = playingMessageObject; if (playingMessageObject != null) { notify = false; if (!playMusicAgain) { @@ -3160,7 +3186,7 @@ public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) { if (messageObject.isRoundVideo()) { videoPlayer.setStreamType(useFrontSpeaker ? AudioManager.STREAM_VOICE_CALL : AudioManager.STREAM_MUSIC); if (Math.abs(currentPlaybackSpeed - 1.0f) > 0.001f) { - videoPlayer.setPlaybackSpeed(currentPlaybackSpeed); + videoPlayer.setPlaybackSpeed(Math.round(currentPlaybackSpeed * 10f) / 10f); } if (messageObject.forceSeekTo >= 0) { @@ -3269,7 +3295,7 @@ public boolean needUpdate() { shouldSavePositionForCurrentAudio = name; } if (Math.abs(currentPlaybackSpeed - 1.0f) > 0.001f) { - audioPlayer.setPlaybackSpeed(currentPlaybackSpeed); + audioPlayer.setPlaybackSpeed(Math.round(currentPlaybackSpeed * 10f) / 10f); } audioInfo = null; clearPlaylist(); @@ -3288,7 +3314,7 @@ public boolean needUpdate() { } shouldSavePositionForCurrentAudio = name; if (Math.abs(currentMusicPlaybackSpeed - 1.0f) > 0.001f) { - audioPlayer.setPlaybackSpeed(currentMusicPlaybackSpeed); + audioPlayer.setPlaybackSpeed(Math.round(currentMusicPlaybackSpeed * 10f) / 10f); } } } @@ -3331,14 +3357,16 @@ public boolean needUpdate() { isPaused = false; lastProgress = 0; playingMessageObject = messageObject; - if (!SharedConfig.raiseToSpeak) { + if (!SharedConfig.enabledRaiseTo(true)) { startRaiseToEarSensors(raiseChat); } - if (!ApplicationLoader.mainInterfacePaused && proximityWakeLock != null && !proximityWakeLock.isHeld() && (playingMessageObject.isVoice() || playingMessageObject.isRoundVideo())) { - proximityWakeLock.acquire(); + if (!ApplicationLoader.mainInterfacePaused && proximityWakeLock != null && !proximityWakeLock.isHeld() && (playingMessageObject.isVoice() || playingMessageObject.isRoundVideo()) && SharedConfig.enabledRaiseTo(false)) { + if (ignoreAccelerometerGestures()) { + proximityWakeLock.acquire(); + } } startProgressTimer(playingMessageObject); - NotificationCenter.getInstance(messageObject.currentAccount).postNotificationName(NotificationCenter.messagePlayingDidStart, messageObject); + NotificationCenter.getInstance(messageObject.currentAccount).postNotificationName(NotificationCenter.messagePlayingDidStart, messageObject, oldMessageObject); if (videoPlayer != null) { try { @@ -3396,6 +3424,10 @@ public boolean needUpdate() { return true; } + + private boolean ignoreAccelerometerGestures() { + return Build.MANUFACTURER.equalsIgnoreCase("samsung"); + } public void updateSilent(boolean value) { isSilent = value; @@ -3446,7 +3478,7 @@ public boolean pauseMessage(MessageObject messageObject) { stopProgressTimer(); try { if (audioPlayer != null) { - if (!playingMessageObject.isVoice() && (playingMessageObject.getDuration() * (1f - playingMessageObject.audioProgress) > 1)) { + if (!playingMessageObject.isVoice() && (playingMessageObject.getDuration() * (1f - playingMessageObject.audioProgress) > 1) && LaunchActivity.isResumed) { if (audioVolumeAnimator != null) { audioVolumeAnimator.removeAllUpdateListeners(); audioVolumeAnimator.cancel(); @@ -3569,12 +3601,12 @@ public void requestAudioFocus(boolean request) { } } - public void startRecording(int currentAccount, long dialogId, MessageObject replyToMsg, MessageObject replyToTopMsg, int guid) { + public void startRecording(int currentAccount, long dialogId, MessageObject replyToMsg, MessageObject replyToTopMsg, int guid, boolean manual) { boolean paused = false; if (playingMessageObject != null && isPlayingMessage(playingMessageObject) && !isMessagePaused()) { paused = true; } - + manualRecording = manual; requestAudioFocus(true); if (!NekoConfig.disableVibration.Bool()) { @@ -3595,6 +3627,7 @@ public void startRecording(int currentAccount, long dialogId, MessageObject repl setBluetoothScoOn(true); + sendAfterDone = 0; recordingAudio = new TLRPC.TL_document(); recordingGuid = guid; @@ -3606,20 +3639,36 @@ public void startRecording(int currentAccount, long dialogId, MessageObject repl recordingAudio.file_reference = new byte[0]; SharedConfig.saveConfig(); - recordingAudioFile = new File(FileLoader.getDirectory(FileLoader.MEDIA_DIR_CACHE), FileLoader.getAttachFileName(recordingAudio)); - + recordingAudioFile = new File(FileLoader.getDirectory(FileLoader.MEDIA_DIR_AUDIO), System.currentTimeMillis() + "_" + FileLoader.getAttachFileName(recordingAudio)) { + @Override + public boolean delete() { + if (BuildVars.LOGS_ENABLED) { + FileLog.e("delete voice file"); + } + return super.delete(); + } + }; + FileLoader.getDirectory(FileLoader.MEDIA_DIR_CACHE).mkdirs(); + if (BuildVars.LOGS_ENABLED) { + FileLog.d("start recording internal " + recordingAudioFile.getPath() + " " + recordingAudioFile.exists()); + } + AutoDeleteMediaTask.lockFile(recordingAudioFile); try { - if (startRecord(recordingAudioFile.getAbsolutePath(), sampleRate) == 0) { + if (startRecord(recordingAudioFile.getPath(), sampleRate) == 0) { AndroidUtilities.runOnUIThread(() -> { recordStartRunnable = null; NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.recordStartError, guid); }); + if (BuildVars.LOGS_ENABLED) { + FileLog.d("cant init encoder"); + } return; } audioRecorder = new AudioRecord(MediaRecorder.AudioSource.DEFAULT, sampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, recordBufferSize); recordStartTime = System.currentTimeMillis(); recordTimeCount = 0; + writedFrame = 0; samplesCount = 0; recordDialogId = dialogId; recordingCurrentAccount = currentAccount; @@ -3633,6 +3682,7 @@ public void startRecording(int currentAccount, long dialogId, MessageObject repl FileLog.e(e); recordingAudio = null; stopRecord(); + AutoDeleteMediaTask.unlockFile(recordingAudioFile); recordingAudioFile.delete(); recordingAudioFile = null; try { @@ -3667,7 +3717,13 @@ public void generateWaveform(MessageObject messageObject) { } generatingWaveform.put(id, messageObject); Utilities.globalQueue.postRunnable(() -> { - final byte[] waveform = getWaveform(path); + final byte[] waveform; + try { + waveform = getWaveform(path); + } catch (Exception e) { + FileLog.e(e); + return; + } AndroidUtilities.runOnUIThread(() -> { MessageObject messageObject1 = generatingWaveform.remove(id); if (messageObject1 == null) { @@ -3697,9 +3753,22 @@ private void stopRecordingInternal(final int send, boolean notify, int scheduleD if (send != 0) { final TLRPC.TL_document audioToSend = recordingAudio; final File recordingAudioFileToSend = recordingAudioFile; + if (BuildVars.LOGS_ENABLED) { + FileLog.d("stop recording internal filename " + recordingAudioFile.getPath()); + } fileEncodingQueue.postRunnable(() -> { stopRecord(); + if (BuildVars.LOGS_ENABLED) { + FileLog.d("stop recording internal in queue " + recordingAudioFileToSend.exists() + " " + recordingAudioFileToSend.length()); + } AndroidUtilities.runOnUIThread(() -> { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("stop recording internal " + recordingAudioFileToSend.exists() + " " + recordingAudioFileToSend.length() + " " + " recordTimeCount " + recordTimeCount + " writedFrames" + writedFrame); + } + boolean fileExist = recordingAudioFileToSend.exists(); + if (!fileExist && BuildVars.DEBUG_VERSION) { + FileLog.e(new RuntimeException("file not found :( recordTimeCount " + recordTimeCount + " writedFrames" + writedFrame)); + } audioToSend.date = ConnectionsManager.getInstance(recordingCurrentAccount).getCurrentTime(); audioToSend.size = (int) recordingAudioFileToSend.length(); TLRPC.TL_documentAttributeAudio attributeAudio = new TLRPC.TL_documentAttributeAudio(); @@ -3719,12 +3788,14 @@ private void stopRecordingInternal(final int send, boolean notify, int scheduleD NotificationCenter.getInstance(recordingCurrentAccount).postNotificationName(NotificationCenter.audioDidSent, recordingGuid, send == 2 ? audioToSend : null, send == 2 ? recordingAudioFileToSend.getAbsolutePath() : null); } else { NotificationCenter.getInstance(recordingCurrentAccount).postNotificationName(NotificationCenter.audioRecordTooShort, recordingGuid, false, (int) duration); + AutoDeleteMediaTask.unlockFile(recordingAudioFileToSend); recordingAudioFileToSend.delete(); } requestAudioFocus(false); }); }); } else { + AutoDeleteMediaTask.unlockFile(recordingAudioFile); if (recordingAudioFile != null) { recordingAudioFile.delete(); } @@ -3741,6 +3812,7 @@ private void stopRecordingInternal(final int send, boolean notify, int scheduleD } recordingAudio = null; recordingAudioFile = null; + manualRecording = false; } public void stopRecording(final int send, boolean notify, int scheduleDate) { @@ -3766,6 +3838,9 @@ public void stopRecording(final int send, boolean notify, int scheduleDate) { } catch (Exception e) { FileLog.e(e); if (recordingAudioFile != null) { + if (BuildVars.LOGS_ENABLED) { + FileLog.e("delete voice file"); + } recordingAudioFile.delete(); } } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/MediaDataController.java b/TMessagesProj/src/main/java/org/telegram/messenger/MediaDataController.java index b92084cc38..8d35f3f43f 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/MediaDataController.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/MediaDataController.java @@ -45,6 +45,8 @@ import androidx.core.content.pm.ShortcutManagerCompat; import androidx.core.graphics.drawable.IconCompat; +//import com.android.billingclient.api.ProductDetails; + import androidx.collection.LongSparseArray; import org.telegram.SQLite.SQLiteCursor; @@ -53,7 +55,6 @@ import org.telegram.SQLite.SQLitePreparedStatement; import org.telegram.messenger.ringtone.RingtoneDataStore; import org.telegram.messenger.ringtone.RingtoneUploader; -import org.telegram.messenger.support.SparseLongArray; import org.telegram.tgnet.ConnectionsManager; import org.telegram.tgnet.NativeByteBuffer; import org.telegram.tgnet.RequestDelegate; @@ -67,13 +68,13 @@ import org.telegram.ui.Components.AvatarDrawable; import org.telegram.ui.Components.Bulletin; import org.telegram.ui.Components.ChatThemeBottomSheet; -import org.telegram.ui.Components.RLottieDrawable; import org.telegram.ui.Components.StickerSetBulletinLayout; import org.telegram.ui.Components.StickersArchiveAlert; import org.telegram.ui.Components.TextStyleSpan; import org.telegram.ui.Components.URLSpanReplacement; import org.telegram.ui.Components.URLSpanUserMention; import org.telegram.ui.LaunchActivity; +import org.telegram.ui.PremiumPreviewFragment; import java.io.File; import java.util.ArrayList; @@ -177,6 +178,8 @@ public MediaDataController(int num) { loadStickersByEmojiOrName(AndroidUtilities.STICKERS_PLACEHOLDER_PACK_NAME, false, true); loadEmojiThemes(); loadRecentAndTopReactions(false); + loadAvatarConstructor(false); + loadAvatarConstructor(true); ringtoneDataStore = new RingtoneDataStore(currentAccount); } @@ -386,6 +389,14 @@ public TLRPC.TL_help_premiumPromo getPremiumPromo() { return premiumPromo; } + public Integer getPremiumHintAnnualDiscount(boolean checkTransaction) { + if (checkTransaction && (!BillingController.getInstance().isReady() || BillingController.getInstance().getLastPremiumTransaction() == null) || premiumPromo == null) { + return null; + } + // NekoX: Remove BillingClient + return null; + } + public TLRPC.TL_attachMenuBots getAttachMenuBots() { return attachMenuBots; } @@ -651,53 +662,33 @@ public void processLoadedReactions(List reactions, i } public void preloadDefaultReactions() { - if (reactionsList == null || reactionsCacheGenerated || SharedConfig.getLiteMode().enabled()) { + if (reactionsList == null || reactionsCacheGenerated || !LiteMode.isEnabled(LiteMode.FLAG_ANIMATED_EMOJI_REACTIONS)) { + return; + } + if (currentAccount != UserConfig.selectedAccount) { return; } reactionsCacheGenerated = true; ArrayList arrayList = new ArrayList<>(reactionsList); - for (int i = 0; i < arrayList.size(); i++) { + int N = Math.min(arrayList.size(), 10); + for (int i = 0; i < N; i++) { TLRPC.TL_availableReaction reaction = arrayList.get(i); - preloadImage(ImageLocation.getForDocument(reaction.activate_animation), null); - preloadImage(ImageLocation.getForDocument(reaction.appear_animation), null); + preloadImage(ImageLocation.getForDocument(reaction.activate_animation)); + preloadImage(ImageLocation.getForDocument(reaction.appear_animation)); } - for (int i = 0; i < arrayList.size(); i++) { + for (int i = 0; i < N; i++) { TLRPC.TL_availableReaction reaction = arrayList.get(i); - preloadImage(ImageLocation.getForDocument(reaction.effect_animation), null); + preloadImage(ImageLocation.getForDocument(reaction.effect_animation)); } } - private void preloadImage(ImageLocation location, String filter) { - preloadImage(location, filter, false); - } - - private void preloadImage(ImageLocation location, String filter, boolean log) { - ImageReceiver imageReceiver = new ImageReceiver(); - imageReceiver.setAllowStartAnimation(false); - imageReceiver.setAllowStartLottieAnimation(false); - imageReceiver.setAllowDecodeSingleFrame(false); - imageReceiver.setDelegate((imageReceiver1, set, thumb, memCache) -> { - if (set) { - RLottieDrawable rLottieDrawable = imageReceiver.getLottieAnimation(); - if (rLottieDrawable != null) { - rLottieDrawable.checkCache(() -> { - imageReceiver.clearImage(); - imageReceiver.setDelegate(null); - }); - } else { - imageReceiver.clearImage(); - imageReceiver.setDelegate(null); - } - } - }); - imageReceiver.setFileLoadingPriority(FileLoader.PRIORITY_LOW); - imageReceiver.setUniqKeyPrefix("preload"); - imageReceiver.setImage(location, filter, null, null, 0, FileLoader.PRELOAD_CACHE_TYPE); + private void preloadImage(ImageLocation location) { + getFileLoader().loadFile(location, null, null, FileLoader.PRIORITY_LOW, FileLoader.PRELOAD_CACHE_TYPE); } public void preloadImage(ImageReceiver imageReceiver, ImageLocation location, String filter) { - if (SharedConfig.getLiteMode().enabled()) { + if (!LiteMode.isEnabled(LiteMode.FLAG_ANIMATED_EMOJI_REACTIONS)) { return; } imageReceiver.setUniqKeyPrefix("preload"); @@ -1113,7 +1104,7 @@ public TLRPC.TL_messages_stickerSet getStickerSet(TLRPC.InputStickerSet inputSti } return cacheSet; } - if (inputStickerSet instanceof TLRPC.TL_inputStickerSetID && hash != null) { + if (inputStickerSet instanceof TLRPC.TL_inputStickerSetID) { getMessagesStorage().getStorageQueue().postRunnable(() -> { TLRPC.TL_messages_stickerSet cachedSet = getCachedStickerSetInternal(inputStickerSet.id, hash); AndroidUtilities.runOnUIThread(() -> { @@ -1195,7 +1186,7 @@ private TLRPC.TL_messages_stickerSet getCachedStickerSetInternal(long id, Intege if (data != null) { set = TLRPC.TL_messages_stickerSet.TLdeserialize(data, data.readInt32(false), false); int cachedHash = cursor.intValue(1); - if (hash != null && hash != cachedHash) { + if (hash != null && hash != 0 && hash != cachedHash) { return null; } } @@ -1575,7 +1566,7 @@ public void loadRecents(int type, boolean gif, boolean cache, boolean force) { loadRecents(type, gif, false, false); }); } catch (Throwable e) { - FileLog.e(e); + getMessagesStorage().checkSQLException(e); } }); } else { @@ -2328,7 +2319,9 @@ public void checkGenericAnimations() { processLoadedDiceStickers(getUserConfig().genericAnimationsStickerPack, false, stickerSet, false, (int) (System.currentTimeMillis() / 1000)); for (int i = 0; i < stickerSet.documents.size(); i++) { - preloadImage(ImageLocation.getForDocument(stickerSet.documents.get(i)), null); + if (currentAccount == UserConfig.selectedAccount) { + preloadImage(ImageLocation.getForDocument(stickerSet.documents.get(i))); + } } } })); @@ -2360,9 +2353,6 @@ public void checkDefaultTopicIcons() { getUserConfig().saveConfig(false); processLoadedDiceStickers(getUserConfig().defaultTopicIcons, false, stickerSet, false, (int) (System.currentTimeMillis() / 1000)); - for (int i = 0; i < stickerSet.documents.size(); i++) { - preloadImage(ImageLocation.getForDocument(stickerSet.documents.get(i)), null); - } } })); } @@ -3750,12 +3740,14 @@ public static int getMediaType(TLRPC.Message message) { boolean isAnimated = false; boolean isVideo = false; boolean isVoice = false; + boolean isRound = false; boolean isMusic = false; boolean isSticker = false; for (int a = 0; a < document.attributes.size(); a++) { TLRPC.DocumentAttribute attribute = document.attributes.get(a); if (attribute instanceof TLRPC.TL_documentAttributeVideo) { + isRound = attribute.round_message; isVoice = attribute.round_message; isVideo = !attribute.round_message; } else if (attribute instanceof TLRPC.TL_documentAttributeAnimated) { @@ -3767,7 +3759,7 @@ public static int getMediaType(TLRPC.Message message) { isSticker = true; } } - if (isVoice) { + if (isVoice || isRound) { return MEDIA_AUDIO; } else if (isVideo && !isAnimated && !isSticker) { return MEDIA_PHOTOVIDEO; @@ -3809,7 +3801,7 @@ private void processLoadedMedia(TLRPC.messages_Messages res, long dialogId, int } FileLog.d("process load media messagesCount " + messagesCount + " did " + dialogId + " topicId " + topicId + " count = " + count + " max_id=" + max_id + " min_id=" + min_id + " type = " + type + " cache = " + fromCache + " classGuid = " + classGuid); } - if (fromCache != 0 && ((res.messages.isEmpty() && min_id == 0) || (res.messages.size() <= 1 && min_id != 0)) && !DialogObject.isEncryptedDialog(dialogId)) { + if (fromCache != 0 && res != null && res.messages != null && ((res.messages.isEmpty() && min_id == 0) || (res.messages.size() <= 1 && min_id != 0)) && !DialogObject.isEncryptedDialog(dialogId)) { if (fromCache == 2) { return; } @@ -3841,12 +3833,29 @@ private void processLoadedMedia(TLRPC.messages_Messages res, long dialogId, int } getFileLoader().checkMediaExistance(objects); - AndroidUtilities.runOnUIThread(() -> { - int totalCount = res.count; - getMessagesController().putUsers(res.users, fromCache != 0); - getMessagesController().putChats(res.chats, fromCache != 0); - getNotificationCenter().postNotificationName(NotificationCenter.mediaDidLoad, dialogId, totalCount, objects, classGuid, type, topReached, min_id != 0, requestIndex); - }); + Runnable notify = () -> { + AndroidUtilities.runOnUIThread(() -> { + int totalCount = res.count; + getMessagesController().putUsers(res.users, fromCache != 0); + getMessagesController().putChats(res.chats, fromCache != 0); + getNotificationCenter().postNotificationName(NotificationCenter.mediaDidLoad, dialogId, totalCount, objects, classGuid, type, topReached, min_id != 0, requestIndex); + }); + }; + + if (getMessagesController().getTranslateController().isFeatureAvailable()) { + getMessagesStorage().getStorageQueue().postRunnable(() -> { + for (int i = 0; i < objects.size(); ++i) { + MessageObject messageObject = objects.get(i); + TLRPC.Message message = getMessagesStorage().getMessageWithCustomParamsOnlyInternal(messageObject.getId(), messageObject.getDialogId()); + messageObject.messageOwner.translatedToLanguage = message.translatedToLanguage; + messageObject.messageOwner.translatedText = message.translatedText; + messageObject.updateTranslation(); + } + notify.run(); + }); + } else { + notify.run(); + } }); } } @@ -4278,48 +4287,20 @@ public void buildShortcuts() { ApplicationLoader.applicationContext.getSharedPreferences("mainconfig", Activity.MODE_PRIVATE).edit().putString("directShareHash2", SharedConfig.directShareHash).commit(); } - List currentShortcuts = ShortcutManagerCompat.getDynamicShortcuts(ApplicationLoader.applicationContext); - ArrayList shortcutsToUpdate = new ArrayList<>(); - ArrayList newShortcutsIds = new ArrayList<>(); - ArrayList shortcutsToDelete = new ArrayList<>(); + ShortcutManagerCompat.removeAllDynamicShortcuts(ApplicationLoader.applicationContext); - if (currentShortcuts != null && !currentShortcuts.isEmpty()) { - newShortcutsIds.add("compose"); - for (int a = 0; a < hintsFinal.size(); a++) { - TLRPC.TL_topPeer hint = hintsFinal.get(a); - newShortcutsIds.add("did3_" + MessageObject.getPeerId(hint.peer)); - } - for (int a = 0; a < currentShortcuts.size(); a++) { - String id = currentShortcuts.get(a).getId(); - if (!newShortcutsIds.remove(id)) { - shortcutsToDelete.add(id); - } - shortcutsToUpdate.add(id); - } - if (newShortcutsIds.isEmpty() && shortcutsToDelete.isEmpty()) { - return; - } - } Intent intent = new Intent(ApplicationLoader.applicationContext, LaunchActivity.class); intent.setAction("new_dialog"); ArrayList arrayList = new ArrayList<>(); - arrayList.add(new ShortcutInfoCompat.Builder(ApplicationLoader.applicationContext, "compose") + ShortcutManagerCompat.pushDynamicShortcut(ApplicationLoader.applicationContext, new ShortcutInfoCompat.Builder(ApplicationLoader.applicationContext, "compose") .setShortLabel(LocaleController.getString("NewConversationShortcut", R.string.NewConversationShortcut)) .setLongLabel(LocaleController.getString("NewConversationShortcut", R.string.NewConversationShortcut)) .setIcon(IconCompat.createWithResource(ApplicationLoader.applicationContext, R.drawable.shortcut_compose)) + .setRank(0) .setIntent(intent) .build()); - if (shortcutsToUpdate.contains("compose")) { - ShortcutManagerCompat.updateShortcuts(ApplicationLoader.applicationContext, arrayList); - } else { - ShortcutManagerCompat.addDynamicShortcuts(ApplicationLoader.applicationContext, arrayList); - } - arrayList.clear(); - if (!shortcutsToDelete.isEmpty()) { - ShortcutManagerCompat.removeDynamicShortcuts(ApplicationLoader.applicationContext, shortcutsToDelete); - } HashSet category = new HashSet<>(1); category.add(SHORTCUT_CATEGORY); @@ -4403,6 +4384,7 @@ public void buildShortcuts() { ShortcutInfoCompat.Builder builder = new ShortcutInfoCompat.Builder(ApplicationLoader.applicationContext, id) .setShortLabel(name) .setLongLabel(name) + .setRank(1 + a) .setIntent(shortcutIntent); if (SharedConfig.directShare) { builder.setCategories(category); @@ -4412,13 +4394,7 @@ public void buildShortcuts() { } else { builder.setIcon(IconCompat.createWithResource(ApplicationLoader.applicationContext, R.drawable.shortcut_user)); } - arrayList.add(builder.build()); - if (shortcutsToUpdate.contains(id)) { - ShortcutManagerCompat.updateShortcuts(ApplicationLoader.applicationContext, arrayList); - } else { - ShortcutManagerCompat.addDynamicShortcuts(ApplicationLoader.applicationContext, arrayList); - } - arrayList.clear(); + ShortcutManagerCompat.pushDynamicShortcut(ApplicationLoader.applicationContext, builder.build()); } } catch (Throwable ignore) { @@ -5439,35 +5415,41 @@ public void loadReplyMessagesForMessages(ArrayList messages, long if (ids == null) { continue; } - SQLiteCursor cursor; - if (scheduled) { - cursor = getMessagesStorage().getDatabase().queryFinalized(String.format(Locale.US, "SELECT data, mid, date, uid FROM scheduled_messages_v2 WHERE mid IN(%s) AND uid = %d", TextUtils.join(",", ids), dialogId)); - } else { - cursor = getMessagesStorage().getDatabase().queryFinalized(String.format(Locale.US, "SELECT data, mid, date, uid FROM messages_v2 WHERE mid IN(%s) AND uid = %d", TextUtils.join(",", ids), dialogId)); - } - while (cursor.next()) { - NativeByteBuffer data = cursor.byteBufferValue(0); - if (data != null) { - TLRPC.Message message = TLRPC.Message.TLdeserialize(data, data.readInt32(false), false); - message.readAttachPath(data, getUserConfig().clientUserId); - data.reuse(); - message.id = cursor.intValue(1); - message.date = cursor.intValue(2); - message.dialog_id = dialogId; - MessagesStorage.addUsersAndChatsFromMessage(message, usersToLoad, chatsToLoad, null); - result.add(message); - - long channelId = message.peer_id != null ? message.peer_id.channel_id : 0; - ArrayList mids = dialogReplyMessagesIds.get(channelId); - if (mids != null) { - mids.remove((Integer) message.id); - if (mids.isEmpty()) { - dialogReplyMessagesIds.remove(channelId); + for (int i = 0; i < 2; i++) { + if (i == 1 && !scheduled) { + continue; + } + boolean findInScheduled = i == 1; + SQLiteCursor cursor; + if (findInScheduled) { + cursor = getMessagesStorage().getDatabase().queryFinalized(String.format(Locale.US, "SELECT data, mid, date, uid FROM scheduled_messages_v2 WHERE mid IN(%s) AND uid = %d", TextUtils.join(",", ids), dialogId)); + } else { + cursor = getMessagesStorage().getDatabase().queryFinalized(String.format(Locale.US, "SELECT data, mid, date, uid FROM messages_v2 WHERE mid IN(%s) AND uid = %d", TextUtils.join(",", ids), dialogId)); + } + while (cursor.next()) { + NativeByteBuffer data = cursor.byteBufferValue(0); + if (data != null) { + TLRPC.Message message = TLRPC.Message.TLdeserialize(data, data.readInt32(false), false); + message.readAttachPath(data, getUserConfig().clientUserId); + data.reuse(); + message.id = cursor.intValue(1); + message.date = cursor.intValue(2); + message.dialog_id = dialogId; + MessagesStorage.addUsersAndChatsFromMessage(message, usersToLoad, chatsToLoad, null); + result.add(message); + + long channelId = message.peer_id != null ? message.peer_id.channel_id : 0; + ArrayList mids = dialogReplyMessagesIds.get(channelId); + if (mids != null) { + mids.remove((Integer) message.id); + if (mids.isEmpty()) { + dialogReplyMessagesIds.remove(channelId); + } } } } + cursor.dispose(); } - cursor.dispose(); } if (!usersToLoad.isEmpty()) { @@ -5490,15 +5472,55 @@ public void loadReplyMessagesForMessages(ArrayList messages, long TLRPC.messages_Messages messagesRes = (TLRPC.messages_Messages) response; for (int i = 0; i < messagesRes.messages.size(); i++) { TLRPC.Message message = messagesRes.messages.get(i); - if (message.dialog_id == 0) { - message.dialog_id = dialogId; + if (message instanceof TLRPC.TL_messageEmpty) { + messagesRes.messages.remove(i); + i--; } } - MessageObject.fixMessagePeer(messagesRes.messages, channelId); - ImageLoader.saveMessagesThumbs(messagesRes.messages); - broadcastReplyMessages(messagesRes.messages, replyMessageOwners, messagesRes.users, messagesRes.chats, dialogId, false); - getMessagesStorage().putUsersAndChats(messagesRes.users, messagesRes.chats, true, true); - saveReplyMessages(replyMessageOwners, messagesRes.messages, scheduled); + if (messagesRes.messages.size() < req.id.size()) { + TLObject req2; + if (channelId != 0) { + TLRPC.TL_channels_getMessages reqInner = new TLRPC.TL_channels_getMessages(); + reqInner.channel = getMessagesController().getInputChannel(channelId); + reqInner.id = req.id; + req2 = reqInner; + } else { + TLRPC.TL_messages_getMessages reqInner = new TLRPC.TL_messages_getMessages(); + reqInner.id = req.id; + req2 = reqInner; + } + getConnectionsManager().sendRequest(req2, (response2, error2) -> { + if (error == null) { + TLRPC.messages_Messages messagesRes2 = (TLRPC.messages_Messages) response2; + messagesRes.messages.addAll(messagesRes2.messages); + messagesRes.users.addAll(messagesRes2.users); + messagesRes.chats.addAll(messagesRes2.chats); + for (int i = 0; i < messagesRes.messages.size(); i++) { + TLRPC.Message message = messagesRes.messages.get(i); + if (message.dialog_id == 0) { + message.dialog_id = dialogId; + } + } + MessageObject.fixMessagePeer(messagesRes.messages, channelId); + ImageLoader.saveMessagesThumbs(messagesRes.messages); + broadcastReplyMessages(messagesRes.messages, replyMessageOwners, messagesRes.users, messagesRes.chats, dialogId, false); + getMessagesStorage().putUsersAndChats(messagesRes.users, messagesRes.chats, true, true); + saveReplyMessages(replyMessageOwners, messagesRes.messages, scheduled); + } + }); + } else { + for (int i = 0; i < messagesRes.messages.size(); i++) { + TLRPC.Message message = messagesRes.messages.get(i); + if (message.dialog_id == 0) { + message.dialog_id = dialogId; + } + } + MessageObject.fixMessagePeer(messagesRes.messages, channelId); + ImageLoader.saveMessagesThumbs(messagesRes.messages); + broadcastReplyMessages(messagesRes.messages, replyMessageOwners, messagesRes.users, messagesRes.chats, dialogId, false); + getMessagesStorage().putUsersAndChats(messagesRes.users, messagesRes.chats, true, true); + saveReplyMessages(replyMessageOwners, messagesRes.messages, scheduled); + } } if (callback != null) { AndroidUtilities.runOnUIThread(callback); @@ -6571,37 +6593,74 @@ public void endTransaction() { //---------------- DRAFT END ---------------- private HashMap botInfos = new HashMap<>(); - private LongSparseArray botKeyboards = new LongSparseArray<>(); - private SparseLongArray botKeyboardsByMids = new SparseLongArray(); + private LongSparseArray> botDialogKeyboards = new LongSparseArray<>(); + private HashMap botKeyboards = new HashMap<>(); + private LongSparseArray botKeyboardsByMids = new LongSparseArray(); - public void clearBotKeyboard(long dialogId, ArrayList messages) { + public void clearBotKeyboard(MessagesStorage.TopicKey topicKey, ArrayList messages) { AndroidUtilities.runOnUIThread(() -> { if (messages != null) { for (int a = 0; a < messages.size(); a++) { - long did1 = botKeyboardsByMids.get(messages.get(a)); - if (did1 != 0) { - botKeyboards.remove(did1); - botKeyboardsByMids.delete(messages.get(a)); - getNotificationCenter().postNotificationName(NotificationCenter.botKeyboardDidLoad, null, did1); + final int id = messages.get(a); + MessagesStorage.TopicKey foundTopicKey = botKeyboardsByMids.get(id); + if (foundTopicKey != null) { + botKeyboards.remove(foundTopicKey); + ArrayList dialogMessages = botDialogKeyboards.get(foundTopicKey.dialogId); + if (dialogMessages != null) { + for (int i = 0; i < dialogMessages.size(); ++i) { + TLRPC.Message msg = dialogMessages.get(i); + if (msg == null || msg.id == id) { + dialogMessages.remove(i); + i--; + } + } + if (dialogMessages.isEmpty()) { + botDialogKeyboards.remove(foundTopicKey.dialogId); + } + } + botKeyboardsByMids.remove(id); + getNotificationCenter().postNotificationName(NotificationCenter.botKeyboardDidLoad, null, foundTopicKey); } } - } else { - botKeyboards.remove(dialogId); - getNotificationCenter().postNotificationName(NotificationCenter.botKeyboardDidLoad, null, dialogId); + } else if (topicKey != null) { + botKeyboards.remove(topicKey); + botDialogKeyboards.remove(topicKey.dialogId); + getNotificationCenter().postNotificationName(NotificationCenter.botKeyboardDidLoad, null, topicKey); } }); } - public void loadBotKeyboard(long dialogId) { - TLRPC.Message keyboard = botKeyboards.get(dialogId); + public void clearBotKeyboard(long dialogId) { + AndroidUtilities.runOnUIThread(() -> { + ArrayList dialogMessages = botDialogKeyboards.get(dialogId); + if (dialogMessages != null) { + for (int i = 0; i < dialogMessages.size(); ++i) { + TLRPC.Message msg = dialogMessages.get(i); + int topicId = MessageObject.getTopicId(msg, ChatObject.isForum(currentAccount, dialogId)); + MessagesStorage.TopicKey topicKey = MessagesStorage.TopicKey.of(dialogId, topicId); + botKeyboards.remove(topicKey); + getNotificationCenter().postNotificationName(NotificationCenter.botKeyboardDidLoad, null, topicKey); + } + } + botDialogKeyboards.remove(dialogId); + }); + } + + public void loadBotKeyboard(MessagesStorage.TopicKey topicKey) { + TLRPC.Message keyboard = botKeyboards.get(topicKey); if (keyboard != null) { - getNotificationCenter().postNotificationName(NotificationCenter.botKeyboardDidLoad, keyboard, dialogId); + getNotificationCenter().postNotificationName(NotificationCenter.botKeyboardDidLoad, keyboard, topicKey); return; } getMessagesStorage().getStorageQueue().postRunnable(() -> { try { TLRPC.Message botKeyboard = null; - SQLiteCursor cursor = getMessagesStorage().getDatabase().queryFinalized(String.format(Locale.US, "SELECT info FROM bot_keyboard WHERE uid = %d", dialogId)); + SQLiteCursor cursor; + if (topicKey.topicId != 0) { + cursor = getMessagesStorage().getDatabase().queryFinalized(String.format(Locale.US, "SELECT info FROM bot_keyboard_topics WHERE uid = %d AND tid = %d", topicKey.dialogId, topicKey.topicId)); + } else { + cursor = getMessagesStorage().getDatabase().queryFinalized(String.format(Locale.US, "SELECT info FROM bot_keyboard WHERE uid = %d", topicKey.dialogId)); + } if (cursor.next()) { NativeByteBuffer data; @@ -6617,7 +6676,7 @@ public void loadBotKeyboard(long dialogId) { if (botKeyboard != null) { TLRPC.Message botKeyboardFinal = botKeyboard; - AndroidUtilities.runOnUIThread(() -> getNotificationCenter().postNotificationName(NotificationCenter.botKeyboardDidLoad, botKeyboardFinal, dialogId)); + AndroidUtilities.runOnUIThread(() -> getNotificationCenter().postNotificationName(NotificationCenter.botKeyboardDidLoad, botKeyboardFinal, topicKey)); } } catch (Exception e) { FileLog.e(e); @@ -6663,13 +6722,18 @@ public void loadBotInfo(long uid, long dialogId, boolean cache, int classGuid) { }); } - public void putBotKeyboard(long dialogId, TLRPC.Message message) { - if (message == null) { + public void putBotKeyboard(MessagesStorage.TopicKey topicKey, TLRPC.Message message) { + if (topicKey == null) { return; } try { int mid = 0; - SQLiteCursor cursor = getMessagesStorage().getDatabase().queryFinalized(String.format(Locale.US, "SELECT mid FROM bot_keyboard WHERE uid = %d", dialogId)); + SQLiteCursor cursor; + if (topicKey.topicId != 0) { + cursor = getMessagesStorage().getDatabase().queryFinalized(String.format(Locale.US, "SELECT mid FROM bot_keyboard_topics WHERE uid = %d AND tid = %d", topicKey.dialogId, topicKey.topicId)); + } else { + cursor = getMessagesStorage().getDatabase().queryFinalized(String.format(Locale.US, "SELECT mid FROM bot_keyboard WHERE uid = %d", topicKey.dialogId)); + } if (cursor.next()) { mid = cursor.intValue(0); } @@ -6678,28 +6742,46 @@ public void putBotKeyboard(long dialogId, TLRPC.Message message) { return; } - SQLitePreparedStatement state = getMessagesStorage().getDatabase().executeFast("REPLACE INTO bot_keyboard VALUES(?, ?, ?)"); + SQLitePreparedStatement state; + if (topicKey.topicId != 0) { + state = getMessagesStorage().getDatabase().executeFast("REPLACE INTO bot_keyboard_topics VALUES(?, ?, ?, ?)"); + } else { + state = getMessagesStorage().getDatabase().executeFast("REPLACE INTO bot_keyboard VALUES(?, ?, ?)"); + } state.requery(); NativeByteBuffer data = new NativeByteBuffer(message.getObjectSize()); message.serializeToStream(data); - state.bindLong(1, dialogId); - state.bindInteger(2, message.id); - state.bindByteBuffer(3, data); + if (topicKey.topicId != 0) { + state.bindLong(1, topicKey.dialogId); + state.bindInteger(2, topicKey.topicId); + state.bindInteger(3, message.id); + state.bindByteBuffer(4, data); + } else { + state.bindLong(1, topicKey.dialogId); + state.bindInteger(2, message.id); + state.bindByteBuffer(3, data); + } state.step(); data.reuse(); state.dispose(); AndroidUtilities.runOnUIThread(() -> { - TLRPC.Message old = botKeyboards.get(dialogId); - botKeyboards.put(dialogId, message); + TLRPC.Message old = botKeyboards.get(topicKey); + botKeyboards.put(topicKey, message); + ArrayList messages = botDialogKeyboards.get(topicKey.dialogId); + if (messages == null) { + messages = new ArrayList<>(); + } + messages.add(message); + botDialogKeyboards.put(topicKey.dialogId, messages); long channelId = MessageObject.getChannelId(message); if (channelId == 0) { if (old != null) { botKeyboardsByMids.delete(old.id); } - botKeyboardsByMids.put(message.id, dialogId); + botKeyboardsByMids.put(message.id, topicKey); } - getNotificationCenter().postNotificationName(NotificationCenter.botKeyboardDidLoad, message, dialogId); + getNotificationCenter().postNotificationName(NotificationCenter.botKeyboardDidLoad, message, topicKey); }); } catch (Exception e) { FileLog.e(e); @@ -6845,10 +6927,12 @@ public void preloadPremiumPreviewStickers() { TLRPC.Document document = premiumPreviewStickers.get(i == 2 ? premiumPreviewStickers.size() - 1 : i); if (MessageObject.isPremiumSticker(document)) { ImageReceiver imageReceiver = new ImageReceiver(); + imageReceiver.setAllowLoadingOnAttachedOnly(false); imageReceiver.setImage(ImageLocation.getForDocument(document), null, null, "webp", null, 1); ImageLoader.getInstance().loadImageForImageReceiver(imageReceiver); imageReceiver = new ImageReceiver(); + imageReceiver.setAllowLoadingOnAttachedOnly(false); imageReceiver.setImage(ImageLocation.getForDocument(MessageObject.getPremiumStickerAnimation(document), document), null, null, null, "tgs", null, 1); ImageLoader.getInstance().loadImageForImageReceiver(imageReceiver); } @@ -7122,14 +7206,14 @@ public void getAnimatedEmojiByKeywords(String query, Utilities.Callback ranges = Emoji.parseEmojis(keyword, emojiOnly); + if (emojiOnly[0] > 0) { + for (int i = 0; i < ranges.size(); ++i) { + String code = ranges.get(i).code.toString(); + boolean foundDuplicate = false; + for (int j = 0; j < result.size(); ++j) { + if (TextUtils.equals(result.get(j).emoji, code)) { + foundDuplicate = true; + break; + } + } + if (!foundDuplicate) { + KeywordResult keywordResult = new KeywordResult(); + keywordResult.emoji = code; + keywordResult.keyword = ""; + result.add(keywordResult); + } + } + } + } + String key = keyword.toLowerCase(); for (int a = 0; a < 2; a++) { if (a == 1) { @@ -7785,4 +7892,60 @@ public static ArrayList loadReactionsFromPref(SharedPreferences } return objects; } + + public TLRPC.TL_emojiList profileAvatarConstructorDefault; + public TLRPC.TL_emojiList groupAvatarConstructorDefault; + + private void loadAvatarConstructor(boolean profile) { + SharedPreferences preferences = ApplicationLoader.applicationContext.getSharedPreferences("avatar_constructor" + currentAccount, Context.MODE_PRIVATE); + String value; + long lastCheckTime; + if (profile) { + value = preferences.getString("profile", null); + lastCheckTime = preferences.getLong("profile_last_check", 0); + } else { + value = preferences.getString("group", null); + lastCheckTime = preferences.getLong("group_last_check", 0); + } + + + TLRPC.TL_emojiList emojiList = null; + if (value != null) { + SerializedData serializedData = new SerializedData(Utilities.hexToBytes(value)); + try { + emojiList = (TLRPC.TL_emojiList) TLRPC.TL_emojiList.TLdeserialize(serializedData, serializedData.readInt32(true), true); + if (profile) { + profileAvatarConstructorDefault = emojiList; + } else { + groupAvatarConstructorDefault = emojiList; + } + } catch (Throwable e) { + FileLog.e(e); + } + } + + if (emojiList == null || (System.currentTimeMillis() - lastCheckTime) > 24 * 60 * 60 * 1000 || BuildVars.DEBUG_PRIVATE_VERSION) { + TLRPC.TL_account_getDefaultProfilePhotoEmojis req = new TLRPC.TL_account_getDefaultProfilePhotoEmojis(); + if (emojiList != null) { + req.hash = emojiList.hash; + } + getConnectionsManager().sendRequest(req, (response, error) -> AndroidUtilities.runOnUIThread(() -> { + if (response instanceof TLRPC.TL_emojiList) { + SerializedData data = new SerializedData(response.getObjectSize()); + response.serializeToStream(data); + SharedPreferences.Editor editor = preferences.edit(); + if (profile) { + profileAvatarConstructorDefault = (TLRPC.TL_emojiList) response; + editor.putString("profile", Utilities.bytesToHex(data.toByteArray())); + editor.putLong("profile_last_check", System.currentTimeMillis()); + } else { + groupAvatarConstructorDefault = (TLRPC.TL_emojiList) response; + editor.putString("group", Utilities.bytesToHex(data.toByteArray())); + editor.putLong("group_last_check", System.currentTimeMillis()); + } + editor.apply(); + } + })); + } + } } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/MessageCustomParamsHelper.java b/TMessagesProj/src/main/java/org/telegram/messenger/MessageCustomParamsHelper.java index 44f57a0fbf..8077a925f1 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/MessageCustomParamsHelper.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/MessageCustomParamsHelper.java @@ -14,7 +14,10 @@ public static boolean isEmpty(TLRPC.Message message) { !message.voiceTranscriptionRated && !message.voiceTranscriptionForce && message.voiceTranscriptionId == 0 && - !message.premiumEffectWasPlayed; + !message.premiumEffectWasPlayed && + message.originalLanguage == null && + message.translatedToLanguage == null && + message.translatedText == null; } public static void copyParams(TLRPC.Message fromMessage, TLRPC.Message toMessage) { @@ -25,6 +28,9 @@ public static void copyParams(TLRPC.Message fromMessage, TLRPC.Message toMessage toMessage.voiceTranscriptionRated = fromMessage.voiceTranscriptionRated; toMessage.voiceTranscriptionId = fromMessage.voiceTranscriptionId; toMessage.premiumEffectWasPlayed = fromMessage.premiumEffectWasPlayed; + toMessage.originalLanguage = fromMessage.originalLanguage; + toMessage.translatedToLanguage = fromMessage.translatedToLanguage; + toMessage.translatedText = fromMessage.translatedText; } @@ -69,6 +75,10 @@ private Params_v1(TLRPC.Message message) { this.message = message; flags += message.voiceTranscription != null ? 1 : 0; flags += message.voiceTranscriptionForce ? 2 : 0; + + flags += message.originalLanguage != null ? 4 : 0; + flags += message.translatedToLanguage != null ? 8 : 0; + flags += message.translatedText != null ? 16 : 0; } @Override @@ -85,6 +95,16 @@ public void serializeToStream(AbstractSerializedData stream) { stream.writeInt64(message.voiceTranscriptionId); stream.writeBool(message.premiumEffectWasPlayed); + + if ((flags & 4) != 0) { + stream.writeString(message.originalLanguage); + } + if ((flags & 8) != 0) { + stream.writeString(message.translatedToLanguage); + } + if ((flags & 16) != 0) { + message.translatedText.serializeToStream(stream); + } } @Override @@ -100,6 +120,16 @@ public void readParams(AbstractSerializedData stream, boolean exception) { message.voiceTranscriptionId = stream.readInt64(exception); message.premiumEffectWasPlayed = stream.readBool(exception); + + if ((flags & 4) != 0) { + message.originalLanguage = stream.readString(exception); + } + if ((flags & 8) != 0) { + message.translatedToLanguage = stream.readString(exception); + } + if ((flags & 16) != 0) { + message.translatedText = TLRPC.TL_textWithEntities.TLdeserialize(stream, stream.readInt32(exception), exception); + } } } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/MessageObject.java b/TMessagesProj/src/main/java/org/telegram/messenger/MessageObject.java index f472fc06ab..4fdf2d84fe 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/MessageObject.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/MessageObject.java @@ -95,8 +95,10 @@ public class MessageObject { public static final int TYPE_VIDEO = 3; public static final int TYPE_GEO = 4; // TL_messageMediaGeo, TL_messageMediaVenue, TL_messageMediaGeoLive public static final int TYPE_ROUND_VIDEO = 5; + public static final int TYPE_LOADING = 6; public static final int TYPE_GIF = 8; public static final int TYPE_FILE = 9; + public static final int TYPE_DATE = 10; public static final int TYPE_ACTION_PHOTO = 11; public static final int TYPE_CONTACT = 12; public static final int TYPE_STICKER = 13; @@ -123,6 +125,7 @@ public class MessageObject { public Long emojiAnimatedStickerId; public boolean isTopicMainMessage; public boolean settingAvatar; + public TLRPC.VideoSize emojiMarkup; private boolean emojiAnimatedStickerLoading; public String emojiAnimatedStickerColor; public CharSequence messageText; @@ -153,7 +156,7 @@ public class MessageObject { public float bufferedProgress; public float gifState; public int audioProgressSec; - public int audioPlayerDuration; + public int audioPlayerDuration, attributeDuration; public boolean isDateObject; public TLObject photoThumbsObject; public TLObject photoThumbsObject2; @@ -184,6 +187,7 @@ public class MessageObject { public String sponsoredChatInviteHash; public boolean sponsoredShowPeerPhoto; public boolean sponsoredRecommended; + public String sponsoredInfo, sponsoredAdditionalInfo; public TLRPC.TL_forumTopic replyToForumTopic; // used only for reply message in view all messages @@ -234,12 +238,11 @@ public class MessageObject { public int textHeight; public boolean hasRtl; public float textXOffset; - public int linesCount; public SendAnimationData sendAnimationData; private boolean hasUnwrappedEmoji; - private int emojiOnlyCount; + public int emojiOnlyCount, animatedEmojiCount; private int totalAnimatedEmojiCount; private boolean layoutCreated; private int generatedWithMinSize; @@ -353,6 +356,10 @@ public static boolean isTopicActionMessage(MessageObject message) { message.messageOwner.action instanceof TLRPC.TL_messageActionTopicEdit; } + public static boolean canCreateStripedThubms() { + return SharedConfig.getDevicePerformanceClass() == SharedConfig.PERFORMANCE_CLASS_HIGH; + } + public int getEmojiOnlyCount() { return emojiOnlyCount; } @@ -1187,6 +1194,7 @@ public MessageObject(int accountNum, TLRPC.Message message, MessageObject replyT updateMessageText(users, chats, sUsers, sChats); setType(); + updateTranslation(false); measureInlineBotButtons(); Calendar rightNow = new GregorianCalendar(); @@ -1208,7 +1216,7 @@ public MessageObject(int accountNum, TLRPC.Message message, MessageObject replyT } int[] emojiOnly = allowsBigEmoji() ? new int[1] : null; messageText = Emoji.replaceEmoji(messageText, paint.getFontMetricsInt(), AndroidUtilities.dp(20), false, emojiOnly); - messageText = replaceAnimatedEmoji(messageText, messageOwner.entities, paint.getFontMetricsInt()); + messageText = replaceAnimatedEmoji(messageText, paint.getFontMetricsInt()); if (emojiOnly != null && emojiOnly[0] > 1) { replaceEmojiToLottieFrame(messageText, emojiOnly); } @@ -1289,7 +1297,7 @@ private void createPathThumb() { } public void createStrippedThumb() { - if (photoThumbs == null || SharedConfig.getDevicePerformanceClass() != SharedConfig.PERFORMANCE_CLASS_HIGH && !hasExtendedMediaPreview()) { + if (photoThumbs == null || !canCreateStripedThubms() && !hasExtendedMediaPreview()) { return; } try { @@ -1315,7 +1323,7 @@ private void createDateArray(int accountNum, TLRPC.TL_channelAdminLogEvent event dateMsg.id = 0; dateMsg.date = event.date; MessageObject dateObj = new MessageObject(accountNum, dateMsg, false, false); - dateObj.type = 10; + dateObj.type = TYPE_DATE; dateObj.contentType = 1; dateObj.isDateObject = true; if (addToEnd) { @@ -1340,7 +1348,7 @@ private void checkEmojiOnly(Integer emojiOnly) { AnimatedEmojiSpan[] aspans = ((Spannable) messageText).getSpans(0, messageText.length(), AnimatedEmojiSpan.class); emojiOnlyCount = Math.max(emojiOnly, (spans == null ? 0 : spans.length) + (aspans == null ? 0 : aspans.length)); totalAnimatedEmojiCount = aspans == null ? 0 : aspans.length; - int animatedEmojiCount = 0; + animatedEmojiCount = 0; if (aspans != null) { for (int i = 0; i < aspans.length; ++i) { if (!aspans[i].standard) { @@ -2373,6 +2381,7 @@ public MessageObject(int accountNum, TLRPC.TL_channelAdminLogEvent event, ArrayL messageOwner.peer_id.channel_id = chat.id; messageOwner.unread = false; MediaController mediaController = MediaController.getInstance(); + isOutOwnerCached = null; if (message instanceof TLRPC.TL_messageEmpty) { message = null; @@ -2432,7 +2441,7 @@ public MessageObject(int accountNum, TLRPC.TL_channelAdminLogEvent event, ArrayL int[] emojiOnly = allowsBigEmoji() ? new int[1] : null; messageText = Emoji.replaceEmoji(messageText, paint.getFontMetricsInt(), AndroidUtilities.dp(20), false, emojiOnly); - messageText = replaceAnimatedEmoji(messageText, messageOwner.entities, paint.getFontMetricsInt()); + messageText = replaceAnimatedEmoji(messageText, paint.getFontMetricsInt()); if (emojiOnly != null && emojiOnly[0] > 1) { replaceEmojiToLottieFrame(messageText, emojiOnly); } @@ -2528,7 +2537,38 @@ private String getUserName(TLObject object, ArrayList entit return name; } + public boolean updateTranslation() { + return updateTranslation(false); + } + + public boolean translated = false; + public boolean updateTranslation(boolean force) { + boolean replyUpdated = replyMessageObject != null && replyMessageObject.updateTranslation(force); + if ( + TranslateController.isTranslatable(this) && + MessagesController.getInstance(currentAccount).getTranslateController().isTranslatingDialog(getDialogId()) && + messageOwner != null && + messageOwner.translatedText != null && + TextUtils.equals(MessagesController.getInstance(currentAccount).getTranslateController().getDialogTranslateTo(getDialogId()), messageOwner.translatedToLanguage) + ) { + if (translated) { + return replyUpdated || false; + } + translated = true; + applyNewText(messageOwner.translatedText.text); + generateCaption(); + return replyUpdated || true; + } else if (messageOwner != null && (force || translated)) { + translated = false; + applyNewText(messageOwner.message); + generateCaption(); + return replyUpdated || true; + } + return replyUpdated || false; + } + public void applyNewText() { + translated = false; applyNewText(messageOwner.message); } @@ -2543,6 +2583,7 @@ public void applyNewText(CharSequence text) { } messageText = text; + ArrayList entities = translated && messageOwner.translatedText != null ? messageOwner.translatedText.entities : messageOwner.entities; TextPaint paint; if (getMedia(messageOwner) instanceof TLRPC.TL_messageMediaGame) { paint = Theme.chat_msgGameTextPaint; @@ -2551,7 +2592,7 @@ public void applyNewText(CharSequence text) { } int[] emojiOnly = allowsBigEmoji() ? new int[1] : null; messageText = Emoji.replaceEmoji(messageText, paint.getFontMetricsInt(), AndroidUtilities.dp(20), false, emojiOnly); - messageText = replaceAnimatedEmoji(messageText, messageOwner.entities, paint.getFontMetricsInt()); + messageText = replaceAnimatedEmoji(messageText, entities, paint.getFontMetricsInt()); if (emojiOnly != null && emojiOnly[0] > 1) { replaceEmojiToLottieFrame(messageText, emojiOnly); } @@ -2687,7 +2728,7 @@ public void generatePinMessageText(TLRPC.User fromUser, TLRPC.Chat chat) { } mess = Emoji.replaceEmoji(mess, Theme.chat_msgTextPaint.getFontMetricsInt(), AndroidUtilities.dp(20), false); if (replyMessageObject != null && replyMessageObject.messageOwner != null) { - mess = replaceAnimatedEmoji(mess, replyMessageObject.messageOwner.entities, Theme.chat_msgTextPaint.getFontMetricsInt()); + mess = replyMessageObject.replaceAnimatedEmoji(mess, Theme.chat_msgTextPaint.getFontMetricsInt()); } MediaDataController.addTextStyleRuns(replyMessageObject, (Spannable) mess); if (ellipsize) { @@ -3064,24 +3105,6 @@ public void measureInlineBotButtons() { } wantedBotKeyboardWidth = Math.max(wantedBotKeyboardWidth, (maxButtonSize + AndroidUtilities.dp(12)) * size + AndroidUtilities.dp(5) * (size - 1)); } - } else if (messageOwner.reactions != null) { - int size = messageOwner.reactions.results.size(); - for (int a = 0; a < size; a++) { - TLRPC.ReactionCount reactionCount = messageOwner.reactions.results.get(a); - int maxButtonSize = 0; - botButtonsLayout.append(0).append(a); - CharSequence text = Emoji.replaceEmoji(String.format("%d %s", reactionCount.count, reactionCount.reaction), Theme.chat_msgBotButtonPaint.getFontMetricsInt(), AndroidUtilities.dp(15), false); - StaticLayout staticLayout = new StaticLayout(text, Theme.chat_msgBotButtonPaint, AndroidUtilities.dp(2000), Layout.Alignment.ALIGN_NORMAL, 1.0f, 0.0f, false); - if (staticLayout.getLineCount() > 0) { - float width = staticLayout.getLineWidth(0); - float left = staticLayout.getLineLeft(0); - if (left < width) { - width -= left; - } - maxButtonSize = Math.max(maxButtonSize, (int) Math.ceil(width) + AndroidUtilities.dp(4)); - } - wantedBotKeyboardWidth = Math.max(wantedBotKeyboardWidth, (maxButtonSize + AndroidUtilities.dp(12)) * size + AndroidUtilities.dp(5) * (size - 1)); - } } } @@ -3392,8 +3415,43 @@ private void updateMessageText(AbstractMap users, AbstractMap< messageText = LocaleController.formatString("MessageLifetimeRemoved", R.string.MessageLifetimeRemoved, UserObject.getFirstName(fromUser)); } } - } else if (messageOwner.action instanceof TLRPC.TL_messageActionAttachMenuBotAllowed) { + } else if (messageOwner.action instanceof TLRPC.TL_messageActionAttachMenuBotAllowed || messageOwner.action instanceof TLRPC.TL_messageActionBotAllowed && ((TLRPC.TL_messageActionBotAllowed) messageOwner.action).attach_menu) { messageText = LocaleController.getString(R.string.ActionAttachMenuBotAllowed); + } else if (messageOwner.action instanceof TLRPC.TL_messageActionRequestedPeer) { + TLRPC.Peer peer = ((TLRPC.TL_messageActionRequestedPeer) messageOwner.action).peer; + TLObject peerObject = null; + if (peer instanceof TLRPC.TL_peerUser) { + peerObject = MessagesController.getInstance(currentAccount).getUser(peer.user_id); + if (peerObject == null) { + peerObject = getUser(users, sUsers, peer.user_id); + } + } else if (peer instanceof TLRPC.TL_peerChat) { + peerObject = MessagesController.getInstance(currentAccount).getChat(peer.chat_id); + if (peerObject == null) { + peerObject = getChat(chats, sChats, peer.chat_id); + } + } else if (peer instanceof TLRPC.TL_peerChannel) { + peerObject = MessagesController.getInstance(currentAccount).getChat(peer.channel_id); + if (peerObject == null) { + peerObject = getChat(chats, sChats, peer.channel_id); + } + } + TLRPC.User bot = MessagesController.getInstance(currentAccount).getUser(getDialogId()); + if (bot == null) { + bot = getUser(users, sUsers, getDialogId()); + } + if (peerObject == null) { + if (peer instanceof TLRPC.TL_peerUser) { + messageText = LocaleController.getString(R.string.ActionRequestedPeerUser); + } else if (peer instanceof TLRPC.TL_peerChat) { + messageText = LocaleController.getString(R.string.ActionRequestedPeerChat); + } else { + messageText = LocaleController.getString(R.string.ActionRequestedPeerChannel); + } + } else { + messageText = replaceWithLink(LocaleController.getString(R.string.ActionRequestedPeer), "un1", peerObject); + } + messageText = replaceWithLink(messageText, "un2", bot); } else if (messageOwner.action instanceof TLRPC.TL_messageActionSetMessagesTTL) { TLRPC.TL_messageActionSetMessagesTTL action = (TLRPC.TL_messageActionSetMessagesTTL) messageOwner.action; TLRPC.Chat chat = messageOwner.peer_id != null && messageOwner.peer_id.channel_id != 0 ? getChat(chats, sChats, messageOwner.peer_id.channel_id) : null; @@ -3846,7 +3904,7 @@ public void setType() { } else { type = TYPE_ANIMATED_STICKER; } - } else if (!isDice() && emojiOnlyCount >= 1 && !hasUnwrappedEmoji) { + } else if (isMediaEmpty() && !isDice() && emojiOnlyCount >= 1 && !hasUnwrappedEmoji) { type = TYPE_EMOJIS; } else if (isMediaEmpty()) { type = TYPE_TEXT; @@ -3857,7 +3915,7 @@ public void setType() { type = TYPE_EXTENDED_MEDIA_PREVIEW; } else if (getMedia(messageOwner).ttl_seconds != 0 && (getMedia(messageOwner).photo instanceof TLRPC.TL_photoEmpty || getDocument() instanceof TLRPC.TL_documentEmpty)) { contentType = 1; - type = 10; + type = TYPE_DATE; } else if (getMedia(messageOwner) instanceof TLRPC.TL_messageMediaDice) { type = TYPE_ANIMATED_STICKER; if (getMedia(messageOwner).document == null) { @@ -3928,7 +3986,7 @@ public void setType() { } else if (messageOwner.action instanceof TLRPC.TL_messageEncryptedAction) { if (messageOwner.action.encryptedAction instanceof TLRPC.TL_decryptedMessageActionScreenshotMessages || messageOwner.action.encryptedAction instanceof TLRPC.TL_decryptedMessageActionSetMessageTTL) { contentType = 1; - type = 10; + type = TYPE_DATE; } else { contentType = -1; type = -1; @@ -3940,7 +3998,7 @@ public void setType() { type = TYPE_PHONE_CALL; } else { contentType = 1; - type = 10; + type = TYPE_DATE; } } if (oldType != 1000 && oldType != type && type != TYPE_EMOJIS) { @@ -3973,7 +4031,7 @@ public boolean checkLayout() { } int[] emojiOnly = allowsBigEmoji() ? new int[1] : null; messageText = Emoji.replaceEmoji(messageText, paint.getFontMetricsInt(), AndroidUtilities.dp(20), false, emojiOnly); - messageText = replaceAnimatedEmoji(messageText, messageOwner.entities, paint.getFontMetricsInt()); + messageText = replaceAnimatedEmoji(messageText, paint.getFontMetricsInt()); if (emojiOnly != null && emojiOnly[0] > 1) { replaceEmojiToLottieFrame(messageText, emojiOnly); } @@ -4034,7 +4092,7 @@ public static boolean isDocumentHasThumb(TLRPC.Document document) { } for (int a = 0, N = document.thumbs.size(); a < N; a++) { TLRPC.PhotoSize photoSize = document.thumbs.get(a); - if (photoSize != null && !(photoSize instanceof TLRPC.TL_photoSizeEmpty) && !(photoSize.location instanceof TLRPC.TL_fileLocationUnavailable)) { + if (photoSize != null && !(photoSize instanceof TLRPC.TL_photoSizeEmpty) && (!(photoSize.location instanceof TLRPC.TL_fileLocationUnavailable) || photoSize.bytes != null)) { return true; } } @@ -4394,7 +4452,7 @@ public static String getFileName(TLRPC.Message messageOwner) { return FileLoader.getAttachFileName(sizeFull); } } - } else if (getMedia(messageOwner) instanceof TLRPC.TL_messageMediaWebPage) { + } else if (getMedia(messageOwner) instanceof TLRPC.TL_messageMediaWebPage && getMedia(messageOwner).webpage != null) { return FileLoader.getAttachFileName(getMedia(messageOwner).webpage.document); } return ""; @@ -4568,30 +4626,39 @@ public boolean isVoiceTranscriptionOpen() { ); } + private boolean captionTranslated; + public void generateCaption() { - if (caption != null || isRoundVideo()) { + if (caption != null && translated == captionTranslated || isRoundVideo()) { return; } + String text = messageOwner.message; + ArrayList entities = messageOwner.entities; if (hasExtendedMedia()) { - messageOwner.message = messageOwner.media.description; + text = messageOwner.message = messageOwner.media.description; } - if (!isMediaEmpty() && !(getMedia(messageOwner) instanceof TLRPC.TL_messageMediaGame) && StrUtil.isNotBlank(messageOwner.message) && (!messageOwner.translated || StrUtil.isNotBlank(messageOwner.translatedMessage))) { - - String msg; - if (messageOwner.translated) { - msg = messageOwner.translatedMessage; - } else { - msg = messageOwner.message; - } - - caption = Emoji.replaceEmoji(msg, Theme.chat_msgTextPaint.getFontMetricsInt(), AndroidUtilities.dp(20), false); - caption = replaceAnimatedEmoji(caption, messageOwner.entities, Theme.chat_msgTextPaint.getFontMetricsInt()); + if (captionTranslated = translated) { + text = messageOwner.translatedText.text; + entities = messageOwner.translatedText.entities; + } + if (captionTranslated = translated) { + // Official Translate + text = messageOwner.translatedText.text; + entities = messageOwner.translatedText.entities; + } else if (messageOwner.translated) { + // NekoX Translate + text = messageOwner.translatedMessage; + // keep the entities as is + } + if (!isMediaEmpty() && !(getMedia(messageOwner) instanceof TLRPC.TL_messageMediaGame) && !TextUtils.isEmpty(text)) { + caption = Emoji.replaceEmoji(text, Theme.chat_msgTextPaint.getFontMetricsInt(), AndroidUtilities.dp(20), false); + caption = replaceAnimatedEmoji(caption, Theme.chat_msgTextPaint.getFontMetricsInt()); boolean hasEntities; if (messageOwner.send_state != MESSAGE_SEND_STATE_SENT) { hasEntities = false; } else { - hasEntities = !messageOwner.entities.isEmpty(); + hasEntities = !entities.isEmpty(); } boolean useManualParse = !hasEntities && ( @@ -4855,7 +4922,17 @@ public boolean addEntitiesToText(CharSequence text, boolean photoViewer, boolean entities.add(entityItalic); return addEntitiesToText(text, entities, isOutOwner(), true, photoViewer, useManualParse); } else { - return addEntitiesToText(text, messageOwner.entities, isOutOwner(), true, photoViewer, useManualParse); + ArrayList entities; + if (translated) { + if (messageOwner.translatedText == null) { + entities = null; + } else { + entities = messageOwner.translatedText.entities; + } + } else { + entities = messageOwner.entities; + } + return addEntitiesToText(text, entities, isOutOwner(), true, photoViewer, useManualParse); } } @@ -4883,6 +4960,11 @@ public void replaceEmojiToLottieFrame(CharSequence text, int[] emojiOnly) { } } + public Spannable replaceAnimatedEmoji(CharSequence text, Paint.FontMetricsInt fontMetricsInt) { + ArrayList entities = translated && messageOwner.translatedText != null ? messageOwner.translatedText.entities : messageOwner.entities; + return replaceAnimatedEmoji(text, entities, fontMetricsInt, false); + } + public static Spannable replaceAnimatedEmoji(CharSequence text, ArrayList entities, Paint.FontMetricsInt fontMetricsInt) { return replaceAnimatedEmoji(text, entities, fontMetricsInt, false); } @@ -4902,8 +4984,7 @@ public static Spannable replaceAnimatedEmoji(CharSequence text, ArrayList 0; - if (entities.isEmpty()) { + if (entities == null || entities.isEmpty()) { return hasUrls; } @@ -5267,11 +5348,13 @@ public void generateLayout(TLRPC.User fromUser) { textLayoutBlocks = new ArrayList<>(); textWidth = 0; + ArrayList entities = translated && messageOwner.translatedText != null ? messageOwner.translatedText.entities : messageOwner.entities; + boolean hasEntities; if (messageOwner.send_state != MESSAGE_SEND_STATE_SENT) { hasEntities = false; } else { - hasEntities = !messageOwner.entities.isEmpty(); + hasEntities = !entities.isEmpty(); } boolean useManualParse = !hasEntities && ( @@ -5353,7 +5436,7 @@ public void generateLayout(TLRPC.User fromUser) { } textHeight = textLayout.getHeight(); - linesCount = textLayout.getLineCount(); + int linesCount = textLayout.getLineCount(); int linesPreBlock = totalAnimatedEmojiCount >= 50 ? LINES_PER_BLOCK_WITH_EMOJI : LINES_PER_BLOCK; int blocksCount; @@ -5493,15 +5576,13 @@ public void generateLayout(TLRPC.User fromUser) { for (int n = 0; n < currentBlockLinesCount; n++) { try { lineWidth = block.textLayout.getLineWidth(n); - } catch (Exception e) { - FileLog.e(e); + } catch (Exception ignore) { lineWidth = 0; } try { lineLeft = block.textLayout.getLineLeft(n); - } catch (Exception e) { - FileLog.e(e); + } catch (Exception ignore) { lineLeft = 0; } @@ -5563,7 +5644,7 @@ public boolean isOut() { return messageOwner.out; } - Boolean isOutOwnerCached; + public Boolean isOutOwnerCached; public boolean isOutOwner() { if (preview) { return true; @@ -6057,7 +6138,7 @@ public static boolean isAnimatedStickerDocument(TLRPC.Document document, boolean } public static boolean canAutoplayAnimatedSticker(TLRPC.Document document) { - return (isAnimatedStickerDocument(document, true) || isVideoStickerDocument(document)) && SharedConfig.getDevicePerformanceClass() != SharedConfig.PERFORMANCE_CLASS_LOW && !SharedConfig.getLiteMode().enabled(); + return (isAnimatedStickerDocument(document, true) || isVideoStickerDocument(document)) && LiteMode.isEnabled(LiteMode.FLAG_ANIMATED_STICKERS_KEYBOARD); } public static boolean isMaskDocument(TLRPC.Document document) { @@ -6624,7 +6705,7 @@ public boolean isMask() { } public boolean isMusic() { - return isMusicMessage(messageOwner) && !isVideo(); + return isMusicMessage(messageOwner) && !isVideo() && !isRoundVideo(); } public boolean isDocument() { @@ -6765,6 +6846,9 @@ public String getMusicTitle(boolean unknown) { } public int getDuration() { + if (attributeDuration > 0) { + return attributeDuration; + } TLRPC.Document document = getDocument(); if (document == null) { return 0; @@ -6775,9 +6859,9 @@ public int getDuration() { for (int a = 0; a < document.attributes.size(); a++) { TLRPC.DocumentAttribute attribute = document.attributes.get(a); if (attribute instanceof TLRPC.TL_documentAttributeAudio) { - return attribute.duration; + return attributeDuration = attribute.duration; } else if (attribute instanceof TLRPC.TL_documentAttributeVideo) { - return attribute.duration; + return attributeDuration = attribute.duration; } } return audioPlayerDuration; @@ -6893,17 +6977,9 @@ public boolean isForwarded() { } public boolean needDrawForwarded() { - return (messageOwner.flags & TLRPC.MESSAGE_FLAG_FWD) != 0 - && messageOwner.fwd_from != null - && !messageOwner.fwd_from.imported - && ( - messageOwner.fwd_from.saved_from_peer == null - || !(messageOwner.fwd_from.from_id instanceof TLRPC.TL_peerChannel) || messageOwner.fwd_from.saved_from_peer.channel_id != messageOwner.fwd_from.from_id.channel_id - || (((messageOwner.flags & TLRPC.MESSAGE_FLAG_REPLY) == 0) && - (messageOwner.fwd_from.from_id instanceof TLRPC.TL_peerUser - || messageOwner.fwd_from.from_id == null && messageOwner.fwd_from.from_name != null)) - ) - && UserConfig.getInstance(currentAccount).getClientUserId() != getDialogId(); + return (messageOwner.flags & TLRPC.MESSAGE_FLAG_FWD) != 0 && messageOwner.fwd_from != null && !messageOwner.fwd_from.imported && ( + messageOwner.fwd_from.saved_from_peer == null || !(messageOwner.fwd_from.from_id instanceof TLRPC.TL_peerChannel) || messageOwner.fwd_from.saved_from_peer.channel_id != messageOwner.fwd_from.from_id.channel_id + ) && UserConfig.getInstance(currentAccount).getClientUserId() != getDialogId() && !UserObject.isReplyUser(messageOwner.dialog_id); } public static boolean isForwardedMessage(TLRPC.Message message) { diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/MessagesController.java b/TMessagesProj/src/main/java/org/telegram/messenger/MessagesController.java index 2fd54e97d3..c27f66cfdc 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/MessagesController.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/MessagesController.java @@ -18,6 +18,7 @@ import android.content.Intent; import android.content.SharedPreferences; import android.content.pm.PackageManager; +import android.graphics.Paint; import android.location.Location; import android.os.Build; import android.os.Bundle; @@ -36,6 +37,7 @@ import androidx.core.util.Consumer; import org.telegram.SQLite.SQLiteCursor; +import org.telegram.SQLite.SQLiteDatabase; import org.telegram.SQLite.SQLiteException; import org.telegram.SQLite.SQLitePreparedStatement; import org.telegram.messenger.browser.Browser; @@ -60,6 +62,7 @@ import org.telegram.ui.Components.ImageUpdater; import org.telegram.ui.Components.JoinCallAlert; import org.telegram.ui.Components.MotionBackgroundDrawable; +import org.telegram.ui.Components.Premium.LimitReachedBottomSheet; import org.telegram.ui.Components.SwipeGestureSettingsView; import org.telegram.ui.Components.TranscribeButton; import org.telegram.ui.DialogsActivity; @@ -299,6 +302,88 @@ public boolean isOnline() { private Theme.OverrideWallpaperInfo uploadingWallpaperInfo; private boolean loadingAppConfig; + private Fetcher appConfigFetcher = new Fetcher() { + @Override + protected void getRemote(int currentAccount, Integer arguments, long hash, Utilities.Callback3 onResult) { + TLRPC.TL_help_getAppConfig req = new TLRPC.TL_help_getAppConfig(); + req.hash = (int) hash; + getConnectionsManager().sendRequest(req, (res, err) -> { + if (res instanceof TLRPC.TL_help_appConfigNotModified) { + onResult.run(true, null, 0L); + } else if (res instanceof TLRPC.TL_help_appConfig) { + onResult.run(false, (TLRPC.TL_help_appConfig) res, (long) ((TLRPC.TL_help_appConfig) res).hash); + } else { + FileLog.e("getting appconfig error " + (err != null ? err.code + " " + err.text : "")); + onResult.run(false, null, 0L); + } + }); + } + + @Override + protected void getLocal(int currentAccount, Integer arguments, Utilities.Callback2 onResult) { + getMessagesStorage().getStorageQueue().postRunnable(() -> { + SQLiteCursor cursor = null; + try { + SQLiteDatabase database = MessagesStorage.getInstance(currentAccount).getDatabase(); + if (database != null) { + TLRPC.help_AppConfig maybeResult = null; + cursor = database.queryFinalized("SELECT data FROM app_config"); + if (cursor.next()) { + NativeByteBuffer data = cursor.byteBufferValue(0); + if (data != null) { + maybeResult = TLRPC.help_AppConfig.TLdeserialize(data, data.readInt32(false), true); + data.reuse(); + } + } + + if (maybeResult instanceof TLRPC.TL_help_appConfig) { + TLRPC.TL_help_appConfig result = (TLRPC.TL_help_appConfig) maybeResult; + onResult.run((long) result.hash, result); + } else { + onResult.run(0L, null); + } + } + } catch (Exception e) { + FileLog.e(e); + onResult.run(0L, null); + } finally { + if (cursor != null) { + cursor.dispose(); + } + } + }); + } + + @Override + protected void setLocal(int currentAccount, Integer arguments, TLRPC.TL_help_appConfig data, long hash) { + MessagesStorage.getInstance(currentAccount).getStorageQueue().postRunnable(() -> { + try { + SQLiteDatabase database = MessagesStorage.getInstance(currentAccount).getDatabase(); + if (database != null) { + if (data == null) { + database.executeFast("DELETE FROM app_config").stepThis().dispose(); + } else { + SQLitePreparedStatement state = database.executeFast("REPLACE INTO app_config VALUES(?)"); + state.requery(); + NativeByteBuffer buffer = new NativeByteBuffer(data.getObjectSize()); + data.serializeToStream(buffer); + state.bindByteBuffer(1, buffer); + state.step(); + buffer.reuse(); + state.dispose(); + } + } + } catch (Exception e) { + FileLog.e(e); + } + }); + } + + @Override + protected boolean useCache(Integer arguments) { + return false; + } + }; public int thisDc; @@ -320,8 +405,8 @@ public boolean isOnline() { public int callRingTimeout; public int callConnectTimeout; public int callPacketTimeout; - public int maxPinnedDialogsCount; - public int maxFolderPinnedDialogsCount; + public int maxFolderPinnedDialogsCountDefault; + public int maxFolderPinnedDialogsCountPremium; public int mapProvider; public int availableMapProviders; public int updateCheckDelay; @@ -341,6 +426,7 @@ public boolean isOnline() { public float animatedEmojisZoom; public boolean filtersEnabled; public boolean getfileExperimentalParams; + public boolean collectDeviceStats; public boolean showFiltersTooltip; public String venueSearchBot; public String gifSearchBot; @@ -376,6 +462,8 @@ public boolean isOnline() { public int savedGifsLimitPremium; public int stickersFavedLimitDefault; public int stickersFavedLimitPremium; + public int maxPinnedDialogsCountDefault; + public int maxPinnedDialogsCountPremium; public int dialogFiltersLimitDefault; public int dialogFiltersLimitPremium; public int dialogFiltersChatsLimitDefault; @@ -420,6 +508,12 @@ public boolean isOnline() { private LongSparseArray emojiStatusUntilValues = new LongSparseArray<>(); private TopicsController topicsController; private CacheByChatsController cacheByChatsController; + private TranslateController translateController; + public boolean uploadMarkupVideo; + public boolean giftAttachMenuIcon; + public boolean giftTextFieldIcon; + + public int checkResetLangpack; public void getNextReactionMention(long dialogId, int topicId, int count, Consumer callback) { final MessagesStorage messagesStorage = getMessagesStorage(); @@ -609,6 +703,10 @@ public TopicsController getTopicsController() { return topicsController; } + public TranslateController getTranslateController() { + return translateController; + } + public boolean isForum(long dialogId) { TLRPC.Chat chatLocal = getChat(-dialogId); return chatLocal != null && chatLocal.forum; @@ -1100,6 +1198,10 @@ public static MessagesController getInstance(int num) { return localInstance; } + public SharedPreferences getMainSettings() { + return mainPreferences; + } + public static SharedPreferences getNotificationsSettings(int account) { return getInstance(account).notificationsPreferences; } @@ -1148,8 +1250,8 @@ public MessagesController(int num) { mainPreferences = ApplicationLoader.applicationContext.getSharedPreferences("mainconfig" + currentAccount, Activity.MODE_PRIVATE); emojiPreferences = ApplicationLoader.applicationContext.getSharedPreferences("emoji" + currentAccount, Activity.MODE_PRIVATE); } + long time = System.currentTimeMillis(); - enableJoined = notificationsPreferences.getBoolean("EnableContactJoined", true); remoteConfigLoaded = mainPreferences.getBoolean("remoteConfigLoaded", false); secretWebpagePreview = mainPreferences.getInt("secretWebpage2", 2); maxGroupCount = mainPreferences.getInt("maxGroupCount", 200); @@ -1165,8 +1267,8 @@ public MessagesController(int num) { callConnectTimeout = mainPreferences.getInt("callConnectTimeout", 30000); callPacketTimeout = mainPreferences.getInt("callPacketTimeout", 10000); updateCheckDelay = mainPreferences.getInt("updateCheckDelay", 24 * 60 * 60); - maxPinnedDialogsCount = mainPreferences.getInt("maxPinnedDialogsCount", 5); - maxFolderPinnedDialogsCount = mainPreferences.getInt("maxFolderPinnedDialogsCount", 100); + maxFolderPinnedDialogsCountDefault = mainPreferences.getInt("maxFolderPinnedDialogsCountDefault", 100); + maxFolderPinnedDialogsCountPremium = mainPreferences.getInt("maxFolderPinnedDialogsCountPremium", 100); maxMessageLength = mainPreferences.getInt("maxMessageLength", 4096); maxCaptionLength = mainPreferences.getInt("maxCaptionLength", 1024); mapProvider = mainPreferences.getInt("mapProvider", 0); @@ -1186,16 +1288,13 @@ public MessagesController(int num) { promoPsaMessage = mainPreferences.getString("promo_psa_message", null); promoPsaType = mainPreferences.getString("promo_psa_type", null); proxyDialogAddress = mainPreferences.getString("proxyDialogAddress", null); - nextTosCheckTime = notificationsPreferences.getInt("nextTosCheckTime", 0); venueSearchBot = mainPreferences.getString("venueSearchBot", "foursquare"); gifSearchBot = mainPreferences.getString("gifSearchBot", "gif"); imageSearchBot = mainPreferences.getString("imageSearchBot", "pic"); blockedCountry = mainPreferences.getBoolean("blockedCountry", false); - dcDomainName = mainPreferences.getString("dcDomainName2", ConnectionsManager.native_isTestBackend(currentAccount) != 0 ? "tapv3.stel.com" : "apv3.stel.com"); - webFileDatacenterId = mainPreferences.getInt("webFileDatacenterId", ConnectionsManager.native_isTestBackend(currentAccount) != 0 ? 2 : 4); suggestedLangCode = mainPreferences.getString("suggestedLangCode", "en"); animatedEmojisZoom = mainPreferences.getFloat("animatedEmojisZoom", 0.625f); - qrLoginCamera = mainPreferences.getBoolean("qrLoginCamera", false); + qrLoginCamera = mainPreferences.getBoolean("qrLoginCamera", true); saveGifsWithStickers = mainPreferences.getBoolean("saveGifsWithStickers", false); filtersEnabled = mainPreferences.getBoolean("filtersEnabled", false); getfileExperimentalParams = mainPreferences.getBoolean("getfileExperimentalParams", false); @@ -1218,6 +1317,10 @@ public MessagesController(int num) { savedGifsLimitPremium = mainPreferences.getInt("savedGifsLimitPremium", 400); stickersFavedLimitDefault = mainPreferences.getInt("stickersFavedLimitDefault", 5); stickersFavedLimitPremium = mainPreferences.getInt("stickersFavedLimitPremium", 200); + maxPinnedDialogsCountDefault = mainPreferences.getInt("maxPinnedDialogsCountDefault", 5); + maxPinnedDialogsCountPremium = mainPreferences.getInt("maxPinnedDialogsCountPremium", 5); + maxPinnedDialogsCountDefault = mainPreferences.getInt("maxPinnedDialogsCountDefault", 5); + maxPinnedDialogsCountPremium = mainPreferences.getInt("maxPinnedDialogsCountPremium", 5); dialogFiltersLimitDefault = mainPreferences.getInt("dialogFiltersLimitDefault", 10); dialogFiltersLimitPremium = mainPreferences.getInt("dialogFiltersLimitPremium", 20); dialogFiltersChatsLimitDefault = mainPreferences.getInt("dialogFiltersChatsLimitDefault", 100); @@ -1246,7 +1349,21 @@ public MessagesController(int num) { telegramAntispamUserId = mainPreferences.getLong("telegramAntispamUserId", -1); telegramAntispamGroupSizeMin = mainPreferences.getInt("telegramAntispamGroupSizeMin", 100); hiddenMembersGroupSizeMin = mainPreferences.getInt("hiddenMembersGroupSizeMin", 100); - // BuildVars.GOOGLE_AUTH_CLIENT_ID = mainPreferences.getString("googleAuthClientId", BuildVars.GOOGLE_AUTH_CLIENT_ID); + uploadMarkupVideo = mainPreferences.getBoolean("uploadMarkupVideo", true); + giftAttachMenuIcon = mainPreferences.getBoolean("giftAttachMenuIcon", false); + giftTextFieldIcon = mainPreferences.getBoolean("giftTextFieldIcon", false); + checkResetLangpack = mainPreferences.getInt("checkResetLangpack", 0); +// BuildVars.GOOGLE_AUTH_CLIENT_ID = mainPreferences.getString("googleAuthClientId", BuildVars.GOOGLE_AUTH_CLIENT_ID); + if (mainPreferences.contains("dcDomainName2")) { + dcDomainName = mainPreferences.getString("dcDomainName2", "apv3.stel.com"); + } else { + dcDomainName = ConnectionsManager.native_isTestBackend(currentAccount) != 0 ? "tapv3.stel.com" : "apv3.stel.com"; + } + if (mainPreferences.contains("webFileDatacenterId")) { + webFileDatacenterId = mainPreferences.getInt("webFileDatacenterId", 4); + } else { + webFileDatacenterId = ConnectionsManager.native_isTestBackend(currentAccount) != 0 ? 2 : 4; + } Set currencySet = mainPreferences.getStringSet("directPaymentsCurrency", null); if (currencySet != null) { @@ -1375,19 +1492,22 @@ public MessagesController(int num) { FileLog.e(e); } } - if (BuildVars.DEBUG_VERSION) { - AndroidUtilities.runOnUIThread(() -> { - loadAppConfig(); - }, 2000); + AndroidUtilities.runOnUIThread(this::loadAppConfig, 2000); } topicsController = new TopicsController(num); cacheByChatsController = new CacheByChatsController(num); + translateController = new TranslateController(this); + + Utilities.globalQueue.postRunnable(() -> { + enableJoined = notificationsPreferences.getBoolean("EnableContactJoined", true); + nextTosCheckTime = notificationsPreferences.getInt("nextTosCheckTime", 0); + }); } - private void sendLoadPeersRequest(TLObject req, ArrayList requests, TLRPC.messages_Dialogs pinnedDialogs, TLRPC.messages_Dialogs pinnedRemoteDialogs, ArrayList users, ArrayList chats, ArrayList filtersToSave, SparseArray filtersToDelete, ArrayList filtersOrder, HashMap> filterDialogRemovals, HashMap> filterUserRemovals, HashSet filtersUnreadCounterReset) { + private void sendLoadPeersRequest(TLObject req, ArrayList requests, TLRPC.messages_Dialogs pinnedDialogs, TLRPC.messages_Dialogs pinnedRemoteDialogs, ArrayList users, ArrayList chats, ArrayList filtersToSave, SparseArray filtersToDelete, ArrayList filtersOrder, HashMap> filterDialogRemovals, HashSet filtersUnreadCounterReset) { getConnectionsManager().sendRequest(req, (response, error) -> { if (response instanceof TLRPC.TL_messages_chats) { TLRPC.TL_messages_chats res = (TLRPC.TL_messages_chats) response; @@ -1409,12 +1529,12 @@ private void sendLoadPeersRequest(TLObject req, ArrayList requests, TL } requests.remove(req); if (requests.isEmpty()) { - getMessagesStorage().processLoadedFilterPeers(pinnedDialogs, pinnedRemoteDialogs, users, chats, filtersToSave, filtersToDelete, filtersOrder, filterDialogRemovals, filterUserRemovals, filtersUnreadCounterReset); + getMessagesStorage().processLoadedFilterPeers(pinnedDialogs, pinnedRemoteDialogs, users, chats, filtersToSave, filtersToDelete, filtersOrder, filterDialogRemovals, filtersUnreadCounterReset); } }); } - protected void loadFilterPeers(HashMap dialogsToLoadMap, HashMap usersToLoadMap, HashMap chatsToLoadMap, TLRPC.messages_Dialogs pinnedDialogs, TLRPC.messages_Dialogs pinnedRemoteDialogs, ArrayList users, ArrayList chats, ArrayList filtersToSave, SparseArray filtersToDelete, ArrayList filtersOrder, HashMap> filterDialogRemovals, HashMap> filterUserRemovals, HashSet filtersUnreadCounterReset) { + protected void loadFilterPeers(HashMap dialogsToLoadMap, HashMap usersToLoadMap, HashMap chatsToLoadMap, TLRPC.messages_Dialogs pinnedDialogs, TLRPC.messages_Dialogs pinnedRemoteDialogs, ArrayList users, ArrayList chats, ArrayList filtersToSave, SparseArray filtersToDelete, ArrayList filtersOrder, HashMap> filterDialogRemovals, HashSet filtersUnreadCounterReset) { Utilities.stageQueue.postRunnable(() -> { ArrayList requests = new ArrayList<>(); TLRPC.TL_users_getUsers req = null; @@ -1425,12 +1545,12 @@ protected void loadFilterPeers(HashMap dialogsToLoadMap, } req.id.add(getInputUser(entry.getValue())); if (req.id.size() == 100) { - sendLoadPeersRequest(req, requests, pinnedDialogs, pinnedRemoteDialogs, users, chats, filtersToSave, filtersToDelete, filtersOrder, filterDialogRemovals, filterUserRemovals, filtersUnreadCounterReset); + sendLoadPeersRequest(req, requests, pinnedDialogs, pinnedRemoteDialogs, users, chats, filtersToSave, filtersToDelete, filtersOrder, filterDialogRemovals, filtersUnreadCounterReset); req = null; } } if (req != null) { - sendLoadPeersRequest(req, requests, pinnedDialogs, pinnedRemoteDialogs, users, chats, filtersToSave, filtersToDelete, filtersOrder, filterDialogRemovals, filterUserRemovals, filtersUnreadCounterReset); + sendLoadPeersRequest(req, requests, pinnedDialogs, pinnedRemoteDialogs, users, chats, filtersToSave, filtersToDelete, filtersOrder, filterDialogRemovals, filtersUnreadCounterReset); } TLRPC.TL_messages_getChats req2 = null; TLRPC.TL_channels_getChannels req3 = null; @@ -1443,7 +1563,7 @@ protected void loadFilterPeers(HashMap dialogsToLoadMap, } req2.id.add(entry.getKey()); if (req2.id.size() == 100) { - sendLoadPeersRequest(req2, requests, pinnedDialogs, pinnedRemoteDialogs, users, chats, filtersToSave, filtersToDelete, filtersOrder, filterDialogRemovals, filterUserRemovals, filtersUnreadCounterReset); + sendLoadPeersRequest(req2, requests, pinnedDialogs, pinnedRemoteDialogs, users, chats, filtersToSave, filtersToDelete, filtersOrder, filterDialogRemovals, filtersUnreadCounterReset); req2 = null; } } else if (inputPeer.channel_id != 0) { @@ -1453,16 +1573,16 @@ protected void loadFilterPeers(HashMap dialogsToLoadMap, } req3.id.add(getInputChannel(inputPeer)); if (req3.id.size() == 100) { - sendLoadPeersRequest(req3, requests, pinnedDialogs, pinnedRemoteDialogs, users, chats, filtersToSave, filtersToDelete, filtersOrder, filterDialogRemovals, filterUserRemovals, filtersUnreadCounterReset); + sendLoadPeersRequest(req3, requests, pinnedDialogs, pinnedRemoteDialogs, users, chats, filtersToSave, filtersToDelete, filtersOrder, filterDialogRemovals, filtersUnreadCounterReset); req3 = null; } } } if (req2 != null) { - sendLoadPeersRequest(req2, requests, pinnedDialogs, pinnedRemoteDialogs, users, chats, filtersToSave, filtersToDelete, filtersOrder, filterDialogRemovals, filterUserRemovals, filtersUnreadCounterReset); + sendLoadPeersRequest(req2, requests, pinnedDialogs, pinnedRemoteDialogs, users, chats, filtersToSave, filtersToDelete, filtersOrder, filterDialogRemovals, filtersUnreadCounterReset); } if (req3 != null) { - sendLoadPeersRequest(req3, requests, pinnedDialogs, pinnedRemoteDialogs, users, chats, filtersToSave, filtersToDelete, filtersOrder, filterDialogRemovals, filterUserRemovals, filtersUnreadCounterReset); + sendLoadPeersRequest(req3, requests, pinnedDialogs, pinnedRemoteDialogs, users, chats, filtersToSave, filtersToDelete, filtersOrder, filterDialogRemovals, filtersUnreadCounterReset); } TLRPC.TL_messages_getPeerDialogs req4 = null; @@ -1475,12 +1595,12 @@ protected void loadFilterPeers(HashMap dialogsToLoadMap, inputDialogPeer.peer = entry.getValue(); req4.peers.add(inputDialogPeer); if (req4.peers.size() == 100) { - sendLoadPeersRequest(req4, requests, pinnedDialogs, pinnedRemoteDialogs, users, chats, filtersToSave, filtersToDelete, filtersOrder, filterDialogRemovals, filterUserRemovals, filtersUnreadCounterReset); + sendLoadPeersRequest(req4, requests, pinnedDialogs, pinnedRemoteDialogs, users, chats, filtersToSave, filtersToDelete, filtersOrder, filterDialogRemovals, filtersUnreadCounterReset); req4 = null; } } if (req4 != null) { - sendLoadPeersRequest(req4, requests, pinnedDialogs, pinnedRemoteDialogs, users, chats, filtersToSave, filtersToDelete, filtersOrder, filterDialogRemovals, filterUserRemovals, filtersUnreadCounterReset); + sendLoadPeersRequest(req4, requests, pinnedDialogs, pinnedRemoteDialogs, users, chats, filtersToSave, filtersToDelete, filtersOrder, filterDialogRemovals, filtersUnreadCounterReset); } }); } @@ -1692,6 +1812,7 @@ protected void processLoadedDialogFilters(ArrayList filters, TLRPC } } } + getTranslateController().checkDialogMessages(key); } else { currentDialog.pinned = newDialog.pinned; currentDialog.pinnedNum = newDialog.pinnedNum; @@ -1739,6 +1860,7 @@ protected void processLoadedDialogFilters(ArrayList filters, TLRPC } } } + getTranslateController().checkDialogMessages(key); } } else { // if (newMsg == null || newMsg.messageOwner.date > oldMsg.messageOwner.date) { @@ -1776,6 +1898,7 @@ protected void processLoadedDialogFilters(ArrayList filters, TLRPC } } } + getTranslateController().checkDialogMessages(key); } } } @@ -1848,6 +1971,41 @@ public void loadRemoteFilters(boolean force) { }); } + private boolean loggedStorageDir; + public void logStorageDir() { + if (collectDeviceStats && !loggedStorageDir) { + ArrayList storageDirs = AndroidUtilities.getRootDirs(); + if (storageDirs.isEmpty()) { + return; + } + + String dir = storageDirs.get(0).getAbsolutePath(); + if (!TextUtils.isEmpty(SharedConfig.storageCacheDir)) { + for (int a = 0, N = storageDirs.size(); a < N; a++) { + String path = storageDirs.get(a).getAbsolutePath(); + if (path.startsWith(SharedConfig.storageCacheDir)) { + dir = path; + break; + } + } + } + final boolean value = dir.contains("/storage/emulated/"); + + TLRPC.TL_help_saveAppLog req = new TLRPC.TL_help_saveAppLog(); + TLRPC.TL_inputAppEvent event = new TLRPC.TL_inputAppEvent(); + event.time = getConnectionsManager().getCurrentTime(); + event.type = "android_sdcard_exists"; + TLRPC.TL_jsonBool bool = new TLRPC.TL_jsonBool(); + bool.value = value; + event.data = bool; + event.peer = value ? 1 : 0; + req.events.add(event); + + getConnectionsManager().sendRequest(req, (response, error) -> {}); + loggedStorageDir = true; + } + } + public void selectDialogFilter(DialogFilter filter, int index) { if (selectedDialogFilter[index] == filter) { return; @@ -1903,28 +2061,97 @@ public void addFilter(DialogFilter filter, boolean atBegin) { lockFiltersInternal(); } + public void updateEmojiStatus(TLRPC.EmojiStatus newStatus) { + TLRPC.TL_account_updateEmojiStatus req = new TLRPC.TL_account_updateEmojiStatus(); + req.emoji_status = newStatus; + TLRPC.User user = getUserConfig().getCurrentUser(); + if (user != null) { + user.emoji_status = req.emoji_status; + getNotificationCenter().postNotificationName(NotificationCenter.userEmojiStatusUpdated, user); + getMessagesController().updateEmojiStatusUntilUpdate(user.id, user.emoji_status); + } + getConnectionsManager().sendRequest(req, (res, err) -> { + if (!(res instanceof TLRPC.TL_boolTrue)) { + // TODO: reject + } + }); + } + public void removeFilter(DialogFilter filter) { dialogFilters.remove(filter); dialogFiltersById.remove(filter.id); getNotificationCenter().postNotificationName(NotificationCenter.dialogFiltersUpdated); } - private void loadAppConfig() { + public void loadAppConfig() { if (loadingAppConfig) { return; } loadingAppConfig = true; - TLRPC.TL_help_getAppConfig req = new TLRPC.TL_help_getAppConfig(); - getConnectionsManager().sendRequest(req, (response, error) -> AndroidUtilities.runOnUIThread(() -> { - if (response instanceof TLRPC.TL_jsonObject) { - SharedPreferences.Editor editor = mainPreferences.edit(); - boolean changed = false; - boolean keelAliveChanged = false; - resetAppConfig(); - TLRPC.TL_jsonObject object = (TLRPC.TL_jsonObject) response; - for (int a = 0, N = object.value.size(); a < N; a++) { - TLRPC.TL_jsonObjectValue value = object.value.get(a); - switch (value.key) { + appConfigFetcher.fetch(currentAccount, 0, config -> AndroidUtilities.runOnUIThread(() -> { + if (config != null && config.config instanceof TLRPC.TL_jsonObject) { + applyAppConfig((TLRPC.TL_jsonObject) config.config); + } + loadingAppConfig = false; + })); + } + + private void applyAppConfig(TLRPC.TL_jsonObject object) { + SharedPreferences.Editor editor = mainPreferences.edit(); + boolean changed = false; + boolean keelAliveChanged = false; + resetAppConfig(); + TLRPC.TL_jsonObject liteAppOptions = null; + for (int a = 0, N = object.value.size(); a < N; a++) { + TLRPC.TL_jsonObjectValue value = object.value.get(a); + switch (value.key) { + case "premium_gift_text_field_icon": { + if (value.value instanceof TLRPC.TL_jsonBool) { + if (giftTextFieldIcon != ((TLRPC.TL_jsonBool) value.value).value) { + giftTextFieldIcon = ((TLRPC.TL_jsonBool) value.value).value; + editor.putBoolean("giftTextFieldIcon", giftTextFieldIcon); + changed = true; + + NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.didUpdatePremiumGiftFieldIcon); + } + } + break; + } + case "premium_gift_attach_menu_icon": { + if (value.value instanceof TLRPC.TL_jsonBool) { + if (giftAttachMenuIcon != ((TLRPC.TL_jsonBool) value.value).value) { + giftAttachMenuIcon = ((TLRPC.TL_jsonBool) value.value).value; + editor.putBoolean("giftAttachMenuIcon", giftAttachMenuIcon); + changed = true; + } + } + break; + } + case "lite_app_options": { + if (value.value instanceof TLRPC.TL_jsonObject) { + liteAppOptions = (TLRPC.TL_jsonObject) value.value; + } + break; + } + case "lite_device_class": { + if (value.value instanceof TLRPC.TL_jsonNumber) { + int performanceClass = (int) ((TLRPC.TL_jsonNumber) value.value).value; + if (performanceClass > 0) { + SharedConfig.overrideDevicePerformanceClass(performanceClass - 1); + } + } + break; + } + case "upload_markup_video": { + if (value.value instanceof TLRPC.TL_jsonBool) { + if (uploadMarkupVideo != ((TLRPC.TL_jsonBool) value.value).value) { + uploadMarkupVideo = ((TLRPC.TL_jsonBool) value.value).value; + editor.putBoolean("uploadMarkupVideo", uploadMarkupVideo); + changed = true; + } + } + break; + } case "login_google_oauth_client_id": { if (value.value instanceof TLRPC.TL_jsonString) { String str = ((TLRPC.TL_jsonString) value.value).value; @@ -1948,897 +2175,940 @@ private void loadAppConfig() { } } - if (!(directPaymentsCurrency.containsAll(currencySet) && currencySet.containsAll(directPaymentsCurrency))) { - directPaymentsCurrency.clear(); - directPaymentsCurrency.addAll(currencySet); - editor.putStringSet("directPaymentsCurrency", currencySet); - changed = true; - - NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.billingProductDetailsUpdated); - } - } - break; + if (!(directPaymentsCurrency.containsAll(currencySet) && currencySet.containsAll(directPaymentsCurrency))) { + directPaymentsCurrency.clear(); + directPaymentsCurrency.addAll(currencySet); + editor.putStringSet("directPaymentsCurrency", currencySet); + changed = true; + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.billingProductDetailsUpdated); } - case "premium_purchase_blocked": { - if (value.value instanceof TLRPC.TL_jsonBool) { - if (premiumLocked != ((TLRPC.TL_jsonBool) value.value).value) { - premiumLocked = ((TLRPC.TL_jsonBool) value.value).value; - if (NekoConfig.localPremium.Bool()) - premiumLocked = false; - editor.putBoolean("premiumLocked", premiumLocked); - changed = true; - } - } - break; + } + break; + } + case "premium_purchase_blocked": { + if (value.value instanceof TLRPC.TL_jsonBool) { + if (premiumLocked != ((TLRPC.TL_jsonBool) value.value).value) { + premiumLocked = ((TLRPC.TL_jsonBool) value.value).value; + editor.putBoolean("premiumLocked", premiumLocked); + changed = true; } - case "premium_bot_username": { - if (value.value instanceof TLRPC.TL_jsonString) { - String string = ((TLRPC.TL_jsonString) value.value).value; - if (!string.equals(premiumBotUsername)) { - premiumBotUsername = string; - editor.putString("premiumBotUsername", premiumBotUsername); - changed = true; - } - } - break; + } + break; + } + case "premium_bot_username": { + if (value.value instanceof TLRPC.TL_jsonString) { + String string = ((TLRPC.TL_jsonString) value.value).value; + if (!string.equals(premiumBotUsername)) { + premiumBotUsername = string; + editor.putString("premiumBotUsername", premiumBotUsername); + changed = true; } - case "premium_invoice_slug": { - if (value.value instanceof TLRPC.TL_jsonString) { - String string = ((TLRPC.TL_jsonString) value.value).value; - if (!string.equals(premiumInvoiceSlug)) { - premiumInvoiceSlug = string; - editor.putString("premiumInvoiceSlug", premiumInvoiceSlug); - changed = true; - } - } - break; + } + break; + } + case "premium_invoice_slug": { + if (value.value instanceof TLRPC.TL_jsonString) { + String string = ((TLRPC.TL_jsonString) value.value).value; + if (!string.equals(premiumInvoiceSlug)) { + premiumInvoiceSlug = string; + editor.putString("premiumInvoiceSlug", premiumInvoiceSlug); + changed = true; } - case "premium_promo_order": { - if (value.value instanceof TLRPC.TL_jsonArray) { - TLRPC.TL_jsonArray order = (TLRPC.TL_jsonArray) value.value; - changed = savePremiumFeaturesPreviewOrder(editor, order.value); - } - break; + } + break; + } + case "premium_promo_order": { + if (value.value instanceof TLRPC.TL_jsonArray) { + TLRPC.TL_jsonArray order = (TLRPC.TL_jsonArray) value.value; + changed = savePremiumFeaturesPreviewOrder(editor, order.value); + } + break; + } + case "emojies_animated_zoom": { + if (value.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; + if (animatedEmojisZoom != number.value) { + animatedEmojisZoom = (float) number.value; + editor.putFloat("animatedEmojisZoom", animatedEmojisZoom); + changed = true; } - case "emojies_animated_zoom": { - if (value.value instanceof TLRPC.TL_jsonNumber) { - TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; - if (animatedEmojisZoom != number.value) { - animatedEmojisZoom = (float) number.value; - editor.putFloat("animatedEmojisZoom", animatedEmojisZoom); - changed = true; - } - } - break; + } + break; + } + case "getfile_experimental_params": { + if (value.value instanceof TLRPC.TL_jsonBool) { + TLRPC.TL_jsonBool bool = (TLRPC.TL_jsonBool) value.value; + if (bool.value != getfileExperimentalParams) { + getfileExperimentalParams = bool.value; + editor.putBoolean("getfileExperimentalParams", getfileExperimentalParams); + changed = true; } - case "getfile_experimental_params": { - if (value.value instanceof TLRPC.TL_jsonBool) { - TLRPC.TL_jsonBool bool = (TLRPC.TL_jsonBool) value.value; - if (bool.value != getfileExperimentalParams) { - getfileExperimentalParams = bool.value; - editor.putBoolean("getfileExperimentalParams", getfileExperimentalParams); - changed = true; - } - } - break; + } + break; + } + case "dialog_filters_enabled": { + if (value.value instanceof TLRPC.TL_jsonBool) { + TLRPC.TL_jsonBool bool = (TLRPC.TL_jsonBool) value.value; + if (bool.value != filtersEnabled) { + filtersEnabled = bool.value; + editor.putBoolean("filtersEnabled", filtersEnabled); + changed = true; } - case "dialog_filters_enabled": { - if (value.value instanceof TLRPC.TL_jsonBool) { - TLRPC.TL_jsonBool bool = (TLRPC.TL_jsonBool) value.value; - if (bool.value != filtersEnabled) { - filtersEnabled = bool.value; - editor.putBoolean("filtersEnabled", filtersEnabled); - changed = true; - } - } - break; + } + break; + } + case "dialog_filters_tooltip": { + if (value.value instanceof TLRPC.TL_jsonBool) { + TLRPC.TL_jsonBool bool = (TLRPC.TL_jsonBool) value.value; + if (bool.value != showFiltersTooltip) { + showFiltersTooltip = bool.value; + editor.putBoolean("showFiltersTooltip", showFiltersTooltip); + changed = true; + getNotificationCenter().postNotificationName(NotificationCenter.filterSettingsUpdated); } - case "dialog_filters_tooltip": { - if (value.value instanceof TLRPC.TL_jsonBool) { - TLRPC.TL_jsonBool bool = (TLRPC.TL_jsonBool) value.value; - if (bool.value != showFiltersTooltip) { - showFiltersTooltip = bool.value; - editor.putBoolean("showFiltersTooltip", showFiltersTooltip); - changed = true; - getNotificationCenter().postNotificationName(NotificationCenter.filterSettingsUpdated); - } - } - break; + } + break; + } + case "youtube_pip": { + if (value.value instanceof TLRPC.TL_jsonString) { + TLRPC.TL_jsonString string = (TLRPC.TL_jsonString) value.value; + if (!string.value.equals(youtubePipType)) { + youtubePipType = string.value; + editor.putString("youtubePipType", youtubePipType); + changed = true; } - case "youtube_pip": { - if (value.value instanceof TLRPC.TL_jsonString) { - TLRPC.TL_jsonString string = (TLRPC.TL_jsonString) value.value; - if (!string.value.equals(youtubePipType)) { - youtubePipType = string.value; - editor.putString("youtubePipType", youtubePipType); - changed = true; - } - } - break; + } + break; + } + case "background_connection": { + if (value.value instanceof TLRPC.TL_jsonBool) { + TLRPC.TL_jsonBool bool = (TLRPC.TL_jsonBool) value.value; + if (bool.value != backgroundConnection) { + backgroundConnection = bool.value; + editor.putBoolean("backgroundConnection", backgroundConnection); + changed = true; + keelAliveChanged = true; } - case "background_connection": { - if (value.value instanceof TLRPC.TL_jsonBool) { - TLRPC.TL_jsonBool bool = (TLRPC.TL_jsonBool) value.value; - if (bool.value != backgroundConnection) { - backgroundConnection = bool.value; - editor.putBoolean("backgroundConnection", backgroundConnection); - changed = true; - keelAliveChanged = true; - } - } - break; + } + break; + } + case "keep_alive_service": { + if (value.value instanceof TLRPC.TL_jsonBool) { + TLRPC.TL_jsonBool bool = (TLRPC.TL_jsonBool) value.value; + if (bool.value != keepAliveService) { + keepAliveService = bool.value; + editor.putBoolean("keepAliveService", keepAliveService); + changed = true; + keelAliveChanged = true; } - case "keep_alive_service": { - if (value.value instanceof TLRPC.TL_jsonBool) { - TLRPC.TL_jsonBool bool = (TLRPC.TL_jsonBool) value.value; - if (bool.value != keepAliveService) { - keepAliveService = bool.value; - editor.putBoolean("keepAliveService", keepAliveService); - changed = true; - keelAliveChanged = true; - } - } - break; + } + break; + } + case "qr_login_camera": { + if (value.value instanceof TLRPC.TL_jsonBool) { + TLRPC.TL_jsonBool bool = (TLRPC.TL_jsonBool) value.value; + if (bool.value != qrLoginCamera) { + qrLoginCamera = bool.value; + editor.putBoolean("qrLoginCamera", qrLoginCamera); + changed = true; } - case "qr_login_camera": { - if (value.value instanceof TLRPC.TL_jsonBool) { - TLRPC.TL_jsonBool bool = (TLRPC.TL_jsonBool) value.value; - if (bool.value != qrLoginCamera) { - qrLoginCamera = bool.value; - editor.putBoolean("qrLoginCamera", qrLoginCamera); - changed = true; - } - } - break; + } + break; + } + case "save_gifs_with_stickers": { + if (value.value instanceof TLRPC.TL_jsonBool) { + TLRPC.TL_jsonBool bool = (TLRPC.TL_jsonBool) value.value; + if (bool.value != saveGifsWithStickers) { + saveGifsWithStickers = bool.value; + editor.putBoolean("saveGifsWithStickers", saveGifsWithStickers); + changed = true; } - case "save_gifs_with_stickers": { - if (value.value instanceof TLRPC.TL_jsonBool) { - TLRPC.TL_jsonBool bool = (TLRPC.TL_jsonBool) value.value; - if (bool.value != saveGifsWithStickers) { - saveGifsWithStickers = bool.value; - editor.putBoolean("saveGifsWithStickers", saveGifsWithStickers); - changed = true; - } + } + break; + } + case "url_auth_domains": { + HashSet newDomains = new HashSet<>(); + if (value.value instanceof TLRPC.TL_jsonArray) { + TLRPC.TL_jsonArray array = (TLRPC.TL_jsonArray) value.value; + for (int b = 0, N2 = array.value.size(); b < N2; b++) { + TLRPC.JSONValue val = array.value.get(b); + if (val instanceof TLRPC.TL_jsonString) { + TLRPC.TL_jsonString string = (TLRPC.TL_jsonString) val; + newDomains.add(string.value); } - break; } - case "url_auth_domains": { - HashSet newDomains = new HashSet<>(); - if (value.value instanceof TLRPC.TL_jsonArray) { - TLRPC.TL_jsonArray array = (TLRPC.TL_jsonArray) value.value; - for (int b = 0, N2 = array.value.size(); b < N2; b++) { - TLRPC.JSONValue val = array.value.get(b); - if (val instanceof TLRPC.TL_jsonString) { - TLRPC.TL_jsonString string = (TLRPC.TL_jsonString) val; - newDomains.add(string.value); - } - } - } - if (!authDomains.equals(newDomains)) { - authDomains = newDomains; - editor.putStringSet("authDomains", authDomains); - changed = true; + } + if (!authDomains.equals(newDomains)) { + authDomains = newDomains; + editor.putStringSet("authDomains", authDomains); + changed = true; + } + break; + } + case "autologin_domains": { + HashSet newDomains = new HashSet<>(); + if (value.value instanceof TLRPC.TL_jsonArray) { + TLRPC.TL_jsonArray array = (TLRPC.TL_jsonArray) value.value; + for (int b = 0, N2 = array.value.size(); b < N2; b++) { + TLRPC.JSONValue val = array.value.get(b); + if (val instanceof TLRPC.TL_jsonString) { + TLRPC.TL_jsonString string = (TLRPC.TL_jsonString) val; + newDomains.add(string.value); } - break; } - case "autologin_domains": { - HashSet newDomains = new HashSet<>(); - if (value.value instanceof TLRPC.TL_jsonArray) { - TLRPC.TL_jsonArray array = (TLRPC.TL_jsonArray) value.value; - for (int b = 0, N2 = array.value.size(); b < N2; b++) { - TLRPC.JSONValue val = array.value.get(b); - if (val instanceof TLRPC.TL_jsonString) { - TLRPC.TL_jsonString string = (TLRPC.TL_jsonString) val; - newDomains.add(string.value); - } - } - } - if (!autologinDomains.equals(newDomains)) { - autologinDomains = newDomains; - editor.putStringSet("autologinDomains", autologinDomains); - changed = true; + } + if (!autologinDomains.equals(newDomains)) { + autologinDomains = newDomains; + editor.putStringSet("autologinDomains", autologinDomains); + changed = true; + } + break; + } + case "autologin_token": { + if (value.value instanceof TLRPC.TL_jsonString) { + TLRPC.TL_jsonString string = (TLRPC.TL_jsonString) value.value; + if (!string.value.equals(autologinToken)) { + autologinToken = string.value; + editor.putString("autologinToken", autologinToken); + changed = true; + } + } + break; + } + case "emojies_send_dice": { + HashSet newEmojies = new HashSet<>(); + if (value.value instanceof TLRPC.TL_jsonArray) { + TLRPC.TL_jsonArray array = (TLRPC.TL_jsonArray) value.value; + for (int b = 0, N2 = array.value.size(); b < N2; b++) { + TLRPC.JSONValue val = array.value.get(b); + if (val instanceof TLRPC.TL_jsonString) { + TLRPC.TL_jsonString string = (TLRPC.TL_jsonString) val; + newEmojies.add(string.value.replace("\uFE0F", "")); } - break; } - case "autologin_token": { - if (value.value instanceof TLRPC.TL_jsonString) { - TLRPC.TL_jsonString string = (TLRPC.TL_jsonString) value.value; - if (!string.value.equals(autologinToken)) { - autologinToken = string.value; - editor.putString("autologinToken", autologinToken); - changed = true; - } + } + if (!diceEmojies.equals(newEmojies)) { + diceEmojies = newEmojies; + editor.putStringSet("diceEmojies", diceEmojies); + changed = true; + } + break; + } + case "gif_search_emojies": { + ArrayList newEmojies = new ArrayList<>(); + if (value.value instanceof TLRPC.TL_jsonArray) { + TLRPC.TL_jsonArray array = (TLRPC.TL_jsonArray) value.value; + for (int b = 0, N2 = array.value.size(); b < N2; b++) { + TLRPC.JSONValue val = array.value.get(b); + if (val instanceof TLRPC.TL_jsonString) { + TLRPC.TL_jsonString string = (TLRPC.TL_jsonString) val; + newEmojies.add(string.value.replace("\uFE0F", "")); } - break; } - case "emojies_send_dice": { - HashSet newEmojies = new HashSet<>(); - if (value.value instanceof TLRPC.TL_jsonArray) { - TLRPC.TL_jsonArray array = (TLRPC.TL_jsonArray) value.value; - for (int b = 0, N2 = array.value.size(); b < N2; b++) { - TLRPC.JSONValue val = array.value.get(b); - if (val instanceof TLRPC.TL_jsonString) { - TLRPC.TL_jsonString string = (TLRPC.TL_jsonString) val; - newEmojies.add(string.value.replace("\uFE0F", "")); + } + if (!gifSearchEmojies.equals(newEmojies)) { + gifSearchEmojies = newEmojies; + SerializedData serializedData = new SerializedData(); + serializedData.writeInt32(gifSearchEmojies.size()); + for (int b = 0, N2 = gifSearchEmojies.size(); b < N2; b++) { + serializedData.writeString(gifSearchEmojies.get(b)); + } + editor.putString("gifSearchEmojies", Base64.encodeToString(serializedData.toByteArray(), Base64.DEFAULT)); + serializedData.cleanup(); + changed = true; + } + break; + } + case "emojies_send_dice_success": { + try { + HashMap newEmojies = new HashMap<>(); + if (value.value instanceof TLRPC.TL_jsonObject) { + TLRPC.TL_jsonObject jsonObject = (TLRPC.TL_jsonObject) value.value; + for (int b = 0, N2 = jsonObject.value.size(); b < N2; b++) { + TLRPC.TL_jsonObjectValue val = jsonObject.value.get(b); + if (val.value instanceof TLRPC.TL_jsonObject) { + TLRPC.TL_jsonObject jsonObject2 = (TLRPC.TL_jsonObject) val.value; + int n = Integer.MAX_VALUE; + int f = Integer.MAX_VALUE; + for (int c = 0, N3 = jsonObject2.value.size(); c < N3; c++) { + TLRPC.TL_jsonObjectValue val2 = jsonObject2.value.get(c); + if (val2.value instanceof TLRPC.TL_jsonNumber) { + if ("value".equals(val2.key)) { + n = (int) ((TLRPC.TL_jsonNumber) val2.value).value; + } else if ("frame_start".equals(val2.key)) { + f = (int) ((TLRPC.TL_jsonNumber) val2.value).value; + } + } + } + if (f != Integer.MAX_VALUE && n != Integer.MAX_VALUE) { + newEmojies.put(val.key.replace("\uFE0F", ""), new DiceFrameSuccess(f, n)); } } } - if (!diceEmojies.equals(newEmojies)) { - diceEmojies = newEmojies; - editor.putStringSet("diceEmojies", diceEmojies); - changed = true; - } - break; } - case "gif_search_emojies": { - ArrayList newEmojies = new ArrayList<>(); - if (value.value instanceof TLRPC.TL_jsonArray) { - TLRPC.TL_jsonArray array = (TLRPC.TL_jsonArray) value.value; - for (int b = 0, N2 = array.value.size(); b < N2; b++) { - TLRPC.JSONValue val = array.value.get(b); - if (val instanceof TLRPC.TL_jsonString) { - TLRPC.TL_jsonString string = (TLRPC.TL_jsonString) val; - newEmojies.add(string.value.replace("\uFE0F", "")); - } - } + if (!diceSuccess.equals(newEmojies)) { + diceSuccess = newEmojies; + SerializedData serializedData = new SerializedData(); + serializedData.writeInt32(diceSuccess.size()); + for (HashMap.Entry entry : diceSuccess.entrySet()) { + serializedData.writeString(entry.getKey()); + DiceFrameSuccess frameSuccess = entry.getValue(); + serializedData.writeInt32(frameSuccess.frame); + serializedData.writeInt32(frameSuccess.num); } - if (!gifSearchEmojies.equals(newEmojies)) { - gifSearchEmojies = newEmojies; - SerializedData serializedData = new SerializedData(); - serializedData.writeInt32(gifSearchEmojies.size()); - for (int b = 0, N2 = gifSearchEmojies.size(); b < N2; b++) { - serializedData.writeString(gifSearchEmojies.get(b)); - } - editor.putString("gifSearchEmojies", Base64.encodeToString(serializedData.toByteArray(), Base64.DEFAULT)); - serializedData.cleanup(); - changed = true; - } - break; + editor.putString("diceSuccess", Base64.encodeToString(serializedData.toByteArray(), Base64.DEFAULT)); + serializedData.cleanup(); + changed = true; } - case "emojies_send_dice_success": { - try { - HashMap newEmojies = new HashMap<>(); - if (value.value instanceof TLRPC.TL_jsonObject) { - TLRPC.TL_jsonObject jsonObject = (TLRPC.TL_jsonObject) value.value; - for (int b = 0, N2 = jsonObject.value.size(); b < N2; b++) { - TLRPC.TL_jsonObjectValue val = jsonObject.value.get(b); - if (val.value instanceof TLRPC.TL_jsonObject) { - TLRPC.TL_jsonObject jsonObject2 = (TLRPC.TL_jsonObject) val.value; - int n = Integer.MAX_VALUE; - int f = Integer.MAX_VALUE; - for (int c = 0, N3 = jsonObject2.value.size(); c < N3; c++) { - TLRPC.TL_jsonObjectValue val2 = jsonObject2.value.get(c); - if (val2.value instanceof TLRPC.TL_jsonNumber) { - if ("value".equals(val2.key)) { - n = (int) ((TLRPC.TL_jsonNumber) val2.value).value; - } else if ("frame_start".equals(val2.key)) { - f = (int) ((TLRPC.TL_jsonNumber) val2.value).value; - } - } - } - if (f != Integer.MAX_VALUE && n != Integer.MAX_VALUE) { - newEmojies.put(val.key.replace("\uFE0F", ""), new DiceFrameSuccess(f, n)); - } - } - } - } - if (!diceSuccess.equals(newEmojies)) { - diceSuccess = newEmojies; - SerializedData serializedData = new SerializedData(); - serializedData.writeInt32(diceSuccess.size()); - for (HashMap.Entry entry : diceSuccess.entrySet()) { - serializedData.writeString(entry.getKey()); - DiceFrameSuccess frameSuccess = entry.getValue(); - serializedData.writeInt32(frameSuccess.frame); - serializedData.writeInt32(frameSuccess.num); - } - editor.putString("diceSuccess", Base64.encodeToString(serializedData.toByteArray(), Base64.DEFAULT)); - serializedData.cleanup(); - changed = true; - } - } catch (Exception e) { - FileLog.e(e); - } - break; + } catch (Exception e) { + FileLog.e(e); + } + break; + } + case "autoarchive_setting_available": { + if (value.value instanceof TLRPC.TL_jsonBool) { + TLRPC.TL_jsonBool bool = (TLRPC.TL_jsonBool) value.value; + if (bool.value != autoarchiveAvailable) { + autoarchiveAvailable = bool.value; + editor.putBoolean("autoarchiveAvailable", autoarchiveAvailable); + changed = true; } - case "autoarchive_setting_available": { - if (value.value instanceof TLRPC.TL_jsonBool) { - TLRPC.TL_jsonBool bool = (TLRPC.TL_jsonBool) value.value; - if (bool.value != autoarchiveAvailable) { - autoarchiveAvailable = bool.value; - editor.putBoolean("autoarchiveAvailable", autoarchiveAvailable); - changed = true; - } - } - break; + } + break; + } + case "groupcall_video_participants_max": { + if (value.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; + if (number.value != groupCallVideoMaxParticipants) { + groupCallVideoMaxParticipants = (int) number.value; + editor.putInt("groipCallVideoMaxParticipants", groupCallVideoMaxParticipants); + changed = true; } - case "groupcall_video_participants_max": { - if (value.value instanceof TLRPC.TL_jsonNumber) { - TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; - if (number.value != groupCallVideoMaxParticipants) { - groupCallVideoMaxParticipants = (int) number.value; - editor.putInt("groipCallVideoMaxParticipants", groupCallVideoMaxParticipants); - changed = true; - } - } - break; + } + break; + } + case "chat_read_mark_size_threshold": { + if (value.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; + if (number.value != chatReadMarkSizeThreshold) { + chatReadMarkSizeThreshold = (int) number.value; + editor.putInt("chatReadMarkSizeThreshold", chatReadMarkSizeThreshold); + changed = true; } - case "chat_read_mark_size_threshold": { - if (value.value instanceof TLRPC.TL_jsonNumber) { - TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; - if (number.value != chatReadMarkSizeThreshold) { - chatReadMarkSizeThreshold = (int) number.value; - editor.putInt("chatReadMarkSizeThreshold", chatReadMarkSizeThreshold); - changed = true; - } - } - break; + } + break; + } + case "chat_read_mark_expire_period": { + if (value.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; + if (number.value != chatReadMarkExpirePeriod) { + chatReadMarkExpirePeriod = (int) number.value; + editor.putInt("chatReadMarkExpirePeriod", chatReadMarkExpirePeriod); + changed = true; } - case "chat_read_mark_expire_period": { - if (value.value instanceof TLRPC.TL_jsonNumber) { - TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; - if (number.value != chatReadMarkExpirePeriod) { - chatReadMarkExpirePeriod = (int) number.value; - editor.putInt("chatReadMarkExpirePeriod", chatReadMarkExpirePeriod); - changed = true; - } - } - break; + } + break; + } + case "inapp_update_check_delay": { + if (value.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; + if (number.value != updateCheckDelay) { + updateCheckDelay = (int) number.value; + editor.putInt("updateCheckDelay", updateCheckDelay); + changed = true; } - case "inapp_update_check_delay": { - if (value.value instanceof TLRPC.TL_jsonNumber) { - TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; - if (number.value != updateCheckDelay) { - updateCheckDelay = (int) number.value; - editor.putInt("updateCheckDelay", updateCheckDelay); - changed = true; - } - } else if (value.value instanceof TLRPC.TL_jsonString) { - TLRPC.TL_jsonString number = (TLRPC.TL_jsonString) value.value; - int delay = Utilities.parseInt(number.value); - if (delay != updateCheckDelay) { - updateCheckDelay = delay; - editor.putInt("updateCheckDelay", updateCheckDelay); - changed = true; - } - } - break; + } else if (value.value instanceof TLRPC.TL_jsonString) { + TLRPC.TL_jsonString number = (TLRPC.TL_jsonString) value.value; + int delay = Utilities.parseInt(number.value); + if (delay != updateCheckDelay) { + updateCheckDelay = delay; + editor.putInt("updateCheckDelay", updateCheckDelay); + changed = true; } - case "round_video_encoding": { - if (value.value instanceof TLRPC.TL_jsonObject) { - TLRPC.TL_jsonObject jsonObject = (TLRPC.TL_jsonObject) value.value; - for (int b = 0, N2 = jsonObject.value.size(); b < N2; b++) { - TLRPC.TL_jsonObjectValue value2 = jsonObject.value.get(b); - switch (value2.key) { - case "diameter": { - if (value2.value instanceof TLRPC.TL_jsonNumber) { - TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value2.value; - if (number.value != roundVideoSize) { - roundVideoSize = (int) number.value; - editor.putInt("roundVideoSize", roundVideoSize); - changed = true; - } - } - break; + } + break; + } + case "round_video_encoding": { + if (value.value instanceof TLRPC.TL_jsonObject) { + TLRPC.TL_jsonObject jsonObject = (TLRPC.TL_jsonObject) value.value; + for (int b = 0, N2 = jsonObject.value.size(); b < N2; b++) { + TLRPC.TL_jsonObjectValue value2 = jsonObject.value.get(b); + switch (value2.key) { + case "diameter": { + if (value2.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value2.value; + if (number.value != roundVideoSize) { + roundVideoSize = (int) number.value; + editor.putInt("roundVideoSize", roundVideoSize); + changed = true; } - case "video_bitrate": { - if (value2.value instanceof TLRPC.TL_jsonNumber) { - TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value2.value; - if (number.value != roundVideoBitrate) { - roundVideoBitrate = (int) number.value; - editor.putInt("roundVideoBitrate", roundVideoBitrate); - changed = true; - } - } - break; + } + break; + } + case "video_bitrate": { + if (value2.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value2.value; + if (number.value != roundVideoBitrate) { + roundVideoBitrate = (int) number.value; + editor.putInt("roundVideoBitrate", roundVideoBitrate); + changed = true; } - case "audio_bitrate": { - if (value2.value instanceof TLRPC.TL_jsonNumber) { - TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value2.value; - if (number.value != roundAudioBitrate) { - roundAudioBitrate = (int) number.value; - editor.putInt("roundAudioBitrate", roundAudioBitrate); - changed = true; - } - } - break; + } + break; + } + case "audio_bitrate": { + if (value2.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value2.value; + if (number.value != roundAudioBitrate) { + roundAudioBitrate = (int) number.value; + editor.putInt("roundAudioBitrate", roundAudioBitrate); + changed = true; } } + break; } } - break; } - case "stickers_emoji_suggest_only_api": { - if (value.value instanceof TLRPC.TL_jsonBool) { - TLRPC.TL_jsonBool bool = (TLRPC.TL_jsonBool) value.value; - if (bool.value != suggestStickersApiOnly) { - suggestStickersApiOnly = bool.value; - editor.putBoolean("suggestStickersApiOnly", suggestStickersApiOnly); - changed = true; - } - } - break; + } + break; + } + case "stickers_emoji_suggest_only_api": { + if (value.value instanceof TLRPC.TL_jsonBool) { + TLRPC.TL_jsonBool bool = (TLRPC.TL_jsonBool) value.value; + if (bool.value != suggestStickersApiOnly) { + suggestStickersApiOnly = bool.value; + editor.putBoolean("suggestStickersApiOnly", suggestStickersApiOnly); + changed = true; } - case "export_regex": { - HashSet newExport = new HashSet<>(); - if (value.value instanceof TLRPC.TL_jsonArray) { - TLRPC.TL_jsonArray array = (TLRPC.TL_jsonArray) value.value; - for (int b = 0, N2 = array.value.size(); b < N2; b++) { - TLRPC.JSONValue val = array.value.get(b); - if (val instanceof TLRPC.TL_jsonString) { - TLRPC.TL_jsonString string = (TLRPC.TL_jsonString) val; - newExport.add(string.value); - } - } - } - if (!exportUri.equals(newExport)) { - exportUri = newExport; - editor.putStringSet("exportUri2", exportUri); - changed = true; + } + break; + } + case "export_regex": { + HashSet newExport = new HashSet<>(); + if (value.value instanceof TLRPC.TL_jsonArray) { + TLRPC.TL_jsonArray array = (TLRPC.TL_jsonArray) value.value; + for (int b = 0, N2 = array.value.size(); b < N2; b++) { + TLRPC.JSONValue val = array.value.get(b); + if (val instanceof TLRPC.TL_jsonString) { + TLRPC.TL_jsonString string = (TLRPC.TL_jsonString) val; + newExport.add(string.value); } - break; } - case "export_group_urls": { - HashSet newExport = new HashSet<>(); - if (value.value instanceof TLRPC.TL_jsonArray) { - TLRPC.TL_jsonArray array = (TLRPC.TL_jsonArray) value.value; - for (int b = 0, N2 = array.value.size(); b < N2; b++) { - TLRPC.JSONValue val = array.value.get(b); - if (val instanceof TLRPC.TL_jsonString) { - TLRPC.TL_jsonString string = (TLRPC.TL_jsonString) val; - newExport.add(string.value); - } - } - } - if (!exportGroupUri.equals(newExport)) { - exportGroupUri = newExport; - editor.putStringSet("exportGroupUri", exportGroupUri); - changed = true; + } + if (!exportUri.equals(newExport)) { + exportUri = newExport; + editor.putStringSet("exportUri2", exportUri); + changed = true; + } + break; + } + case "export_group_urls": { + HashSet newExport = new HashSet<>(); + if (value.value instanceof TLRPC.TL_jsonArray) { + TLRPC.TL_jsonArray array = (TLRPC.TL_jsonArray) value.value; + for (int b = 0, N2 = array.value.size(); b < N2; b++) { + TLRPC.JSONValue val = array.value.get(b); + if (val instanceof TLRPC.TL_jsonString) { + TLRPC.TL_jsonString string = (TLRPC.TL_jsonString) val; + newExport.add(string.value); } - break; } - case "export_private_urls": { - HashSet newExport = new HashSet<>(); - if (value.value instanceof TLRPC.TL_jsonArray) { - TLRPC.TL_jsonArray array = (TLRPC.TL_jsonArray) value.value; - for (int b = 0, N2 = array.value.size(); b < N2; b++) { - TLRPC.JSONValue val = array.value.get(b); - if (val instanceof TLRPC.TL_jsonString) { - TLRPC.TL_jsonString string = (TLRPC.TL_jsonString) val; - newExport.add(string.value); - } - } - } - if (!exportPrivateUri.equals(newExport)) { - exportPrivateUri = newExport; - editor.putStringSet("exportPrivateUri", exportPrivateUri); - changed = true; + } + if (!exportGroupUri.equals(newExport)) { + exportGroupUri = newExport; + editor.putStringSet("exportGroupUri", exportGroupUri); + changed = true; + } + break; + } + case "export_private_urls": { + HashSet newExport = new HashSet<>(); + if (value.value instanceof TLRPC.TL_jsonArray) { + TLRPC.TL_jsonArray array = (TLRPC.TL_jsonArray) value.value; + for (int b = 0, N2 = array.value.size(); b < N2; b++) { + TLRPC.JSONValue val = array.value.get(b); + if (val instanceof TLRPC.TL_jsonString) { + TLRPC.TL_jsonString string = (TLRPC.TL_jsonString) val; + newExport.add(string.value); } - break; } - case "pending_suggestions": { - HashSet newSuggestions = new HashSet<>(); - if (value.value instanceof TLRPC.TL_jsonArray) { - TLRPC.TL_jsonArray array = (TLRPC.TL_jsonArray) value.value; - for (int b = 0, N2 = array.value.size(); b < N2; b++) { - TLRPC.JSONValue val = array.value.get(b); - if (val instanceof TLRPC.TL_jsonString) { - TLRPC.TL_jsonString string = (TLRPC.TL_jsonString) val; - newSuggestions.add(string.value); - } - } - } - if (!pendingSuggestions.equals(newSuggestions)) { - pendingSuggestions = newSuggestions; - editor.putStringSet("pendingSuggestions", pendingSuggestions); - getNotificationCenter().postNotificationName(NotificationCenter.newSuggestionsAvailable); - changed = true; + } + if (!exportPrivateUri.equals(newExport)) { + exportPrivateUri = newExport; + editor.putStringSet("exportPrivateUri", exportPrivateUri); + changed = true; + } + break; + } + case "pending_suggestions": { + HashSet newSuggestions = new HashSet<>(); + if (value.value instanceof TLRPC.TL_jsonArray) { + TLRPC.TL_jsonArray array = (TLRPC.TL_jsonArray) value.value; + for (int b = 0, N2 = array.value.size(); b < N2; b++) { + TLRPC.JSONValue val = array.value.get(b); + if (val instanceof TLRPC.TL_jsonString) { + TLRPC.TL_jsonString string = (TLRPC.TL_jsonString) val; + newSuggestions.add(string.value); } - break; } - case "emojies_sounds": { - try { - HashMap newEmojies = new HashMap<>(); - if (value.value instanceof TLRPC.TL_jsonObject) { - TLRPC.TL_jsonObject jsonObject = (TLRPC.TL_jsonObject) value.value; - for (int b = 0, N2 = jsonObject.value.size(); b < N2; b++) { - TLRPC.TL_jsonObjectValue val = jsonObject.value.get(b); - if (val.value instanceof TLRPC.TL_jsonObject) { - TLRPC.TL_jsonObject jsonObject2 = (TLRPC.TL_jsonObject) val.value; - long i = 0; - long ah = 0; - String fr = null; - for (int c = 0, N3 = jsonObject2.value.size(); c < N3; c++) { - TLRPC.TL_jsonObjectValue val2 = jsonObject2.value.get(c); - if (val2.value instanceof TLRPC.TL_jsonString) { - if ("id".equals(val2.key)) { - i = Utilities.parseLong(((TLRPC.TL_jsonString) val2.value).value); - } else if ("access_hash".equals(val2.key)) { - ah = Utilities.parseLong(((TLRPC.TL_jsonString) val2.value).value); - } else if ("file_reference_base64".equals(val2.key)) { - fr = ((TLRPC.TL_jsonString) val2.value).value; - } - } - } - if (i != 0 && ah != 0 && fr != null) { - newEmojies.put(val.key.replace("\uFE0F", ""), new EmojiSound(i, ah, fr)); + } + if (!pendingSuggestions.equals(newSuggestions)) { + pendingSuggestions = newSuggestions; + editor.putStringSet("pendingSuggestions", pendingSuggestions); + getNotificationCenter().postNotificationName(NotificationCenter.newSuggestionsAvailable); + changed = true; + } + break; + } + case "emojies_sounds": { + try { + HashMap newEmojies = new HashMap<>(); + if (value.value instanceof TLRPC.TL_jsonObject) { + TLRPC.TL_jsonObject jsonObject = (TLRPC.TL_jsonObject) value.value; + for (int b = 0, N2 = jsonObject.value.size(); b < N2; b++) { + TLRPC.TL_jsonObjectValue val = jsonObject.value.get(b); + if (val.value instanceof TLRPC.TL_jsonObject) { + TLRPC.TL_jsonObject jsonObject2 = (TLRPC.TL_jsonObject) val.value; + long i = 0; + long ah = 0; + String fr = null; + for (int c = 0, N3 = jsonObject2.value.size(); c < N3; c++) { + TLRPC.TL_jsonObjectValue val2 = jsonObject2.value.get(c); + if (val2.value instanceof TLRPC.TL_jsonString) { + if ("id".equals(val2.key)) { + i = Utilities.parseLong(((TLRPC.TL_jsonString) val2.value).value); + } else if ("access_hash".equals(val2.key)) { + ah = Utilities.parseLong(((TLRPC.TL_jsonString) val2.value).value); + } else if ("file_reference_base64".equals(val2.key)) { + fr = ((TLRPC.TL_jsonString) val2.value).value; } } } - } - if (!emojiSounds.equals(newEmojies)) { - emojiSounds = newEmojies; - SerializedData serializedData = new SerializedData(); - serializedData.writeInt32(emojiSounds.size()); - for (HashMap.Entry entry : emojiSounds.entrySet()) { - serializedData.writeString(entry.getKey()); - EmojiSound emojiSound = entry.getValue(); - serializedData.writeInt64(emojiSound.id); - serializedData.writeInt64(emojiSound.accessHash); - serializedData.writeByteArray(emojiSound.fileReference); + if (i != 0 && ah != 0 && fr != null) { + newEmojies.put(val.key.replace("\uFE0F", ""), new EmojiSound(i, ah, fr)); } - editor.putString("emojiSounds", Base64.encodeToString(serializedData.toByteArray(), Base64.DEFAULT)); - serializedData.cleanup(); - changed = true; } - } catch (Exception e) { - FileLog.e(e); } - break; } - case "ringtone_size_max": { - if (value.value instanceof TLRPC.TL_jsonNumber) { - TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; - if (number.value != ringtoneSizeMax) { - ringtoneSizeMax = (int) number.value; - editor.putInt("ringtoneSizeMax", ringtoneSizeMax); - changed = true; - } + if (!emojiSounds.equals(newEmojies)) { + emojiSounds = newEmojies; + SerializedData serializedData = new SerializedData(); + serializedData.writeInt32(emojiSounds.size()); + for (HashMap.Entry entry : emojiSounds.entrySet()) { + serializedData.writeString(entry.getKey()); + EmojiSound emojiSound = entry.getValue(); + serializedData.writeInt64(emojiSound.id); + serializedData.writeInt64(emojiSound.accessHash); + serializedData.writeByteArray(emojiSound.fileReference); } - break; + editor.putString("emojiSounds", Base64.encodeToString(serializedData.toByteArray(), Base64.DEFAULT)); + serializedData.cleanup(); + changed = true; } - case "ringtone_duration_max": { - if (value.value instanceof TLRPC.TL_jsonNumber) { - TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; - if (number.value != ringtoneDurationMax) { - ringtoneDurationMax = (int) number.value; - editor.putInt("ringtoneDurationMax", ringtoneDurationMax); - changed = true; - } - } - break; + } catch (Exception e) { + FileLog.e(e); + } + break; + } + case "ringtone_size_max": { + if (value.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; + if (number.value != ringtoneSizeMax) { + ringtoneSizeMax = (int) number.value; + editor.putInt("ringtoneSizeMax", ringtoneSizeMax); + changed = true; } - case "channels_limit_default": { - if (value.value instanceof TLRPC.TL_jsonNumber) { - TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; - if (number.value != channelsLimitDefault) { - channelsLimitDefault = (int) number.value; - editor.putInt("channelsLimitDefault", channelsLimitDefault); - changed = true; - } - } - break; + } + break; + } + case "ringtone_duration_max": { + if (value.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; + if (number.value != ringtoneDurationMax) { + ringtoneDurationMax = (int) number.value; + editor.putInt("ringtoneDurationMax", ringtoneDurationMax); + changed = true; } - case "channels_limit_premium": { - if (value.value instanceof TLRPC.TL_jsonNumber) { - TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; - if (number.value != channelsLimitPremium) { - channelsLimitPremium = (int) number.value; - editor.putInt("channelsLimitPremium", channelsLimitPremium); - changed = true; - } - } - break; + } + break; + } + case "channels_limit_default": { + if (value.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; + if (number.value != channelsLimitDefault) { + channelsLimitDefault = (int) number.value; + editor.putInt("channelsLimitDefault", channelsLimitDefault); + changed = true; } - case "saved_gifs_limit_default": { - if (value.value instanceof TLRPC.TL_jsonNumber) { - TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; - if (number.value != savedGifsLimitDefault) { - savedGifsLimitDefault = (int) number.value; - editor.putInt("savedGifsLimitDefault", savedGifsLimitDefault); - changed = true; - } - } - break; + } + break; + } + case "channels_limit_premium": { + if (value.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; + if (number.value != channelsLimitPremium) { + channelsLimitPremium = (int) number.value; + editor.putInt("channelsLimitPremium", channelsLimitPremium); + changed = true; } - case "saved_gifs_limit_premium": { - if (value.value instanceof TLRPC.TL_jsonNumber) { - TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; - if (number.value != savedGifsLimitPremium) { - savedGifsLimitPremium = (int) number.value; - editor.putInt("savedGifsLimitPremium", savedGifsLimitPremium); - changed = true; - } - } - break; + } + break; + } + case "saved_gifs_limit_default": { + if (value.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; + if (number.value != savedGifsLimitDefault) { + savedGifsLimitDefault = (int) number.value; + editor.putInt("savedGifsLimitDefault", savedGifsLimitDefault); + changed = true; } - case "stickers_faved_limit_default": { - if (value.value instanceof TLRPC.TL_jsonNumber) { - TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; - if (number.value != stickersFavedLimitDefault) { - stickersFavedLimitDefault = (int) number.value; - editor.putInt("stickersFavedLimitDefault", stickersFavedLimitDefault); - changed = true; - } - } - break; + } + break; + } + case "saved_gifs_limit_premium": { + if (value.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; + if (number.value != savedGifsLimitPremium) { + savedGifsLimitPremium = (int) number.value; + editor.putInt("savedGifsLimitPremium", savedGifsLimitPremium); + changed = true; } - case "stickers_faved_limit_premium": { - if (value.value instanceof TLRPC.TL_jsonNumber) { - TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; - if (number.value != stickersFavedLimitPremium) { - stickersFavedLimitPremium = (int) number.value; - editor.putInt("stickersFavedLimitPremium", stickersFavedLimitPremium); - changed = true; - } - } - break; + } + break; + } + case "stickers_faved_limit_default": { + if (value.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; + if (number.value != stickersFavedLimitDefault) { + stickersFavedLimitDefault = (int) number.value; + editor.putInt("stickersFavedLimitDefault", stickersFavedLimitDefault); + changed = true; } - case "dialog_filters_limit_default": { - if (value.value instanceof TLRPC.TL_jsonNumber) { - TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; - if (number.value != dialogFiltersLimitDefault) { - dialogFiltersLimitDefault = (int) number.value; - editor.putInt("dialogFiltersLimitDefault", dialogFiltersLimitDefault); - changed = true; - } - } - break; + } + break; + } + case "stickers_faved_limit_premium": { + if (value.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; + if (number.value != stickersFavedLimitPremium) { + stickersFavedLimitPremium = (int) number.value; + editor.putInt("stickersFavedLimitPremium", stickersFavedLimitPremium); + changed = true; } - case "dialog_filters_limit_premium": { - if (value.value instanceof TLRPC.TL_jsonNumber) { - TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; - if (number.value != dialogFiltersLimitPremium) { - dialogFiltersLimitPremium = (int) number.value; - editor.putInt("dialogFiltersLimitPremium", dialogFiltersLimitPremium); - changed = true; - } - } - break; + } + break; + } + case "pinned_dialogs_count_max_default": { + if (value.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; + if (number.value != maxPinnedDialogsCountDefault) { + maxPinnedDialogsCountDefault = (int) number.value; + editor.putInt("maxPinnedDialogsCountDefault", maxPinnedDialogsCountDefault); + changed = true; } - case "dialog_filters_chats_limit_default": { - if (value.value instanceof TLRPC.TL_jsonNumber) { - TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; - if (number.value != dialogFiltersChatsLimitDefault) { - dialogFiltersChatsLimitDefault = (int) number.value; - editor.putInt("dialogFiltersChatsLimitDefault", dialogFiltersChatsLimitDefault); - changed = true; - } - } - break; + } + break; + } + case "pinned_dialogs_count_max_premium": { + if (value.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; + if (number.value != maxPinnedDialogsCountPremium) { + maxPinnedDialogsCountPremium = (int) number.value; + editor.putInt("maxPinnedDialogsCountPremium", maxPinnedDialogsCountPremium); + changed = true; } - case "dialog_filters_chats_limit_premium": { - if (value.value instanceof TLRPC.TL_jsonNumber) { - TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; - if (number.value != dialogFiltersChatsLimitPremium) { - dialogFiltersChatsLimitPremium = (int) number.value; - editor.putInt("dialogFiltersChatsLimitPremium", dialogFiltersChatsLimitPremium); - changed = true; - } - } - break; + } + break; + } + case "dialog_filters_limit_default": { + if (value.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; + if (number.value != dialogFiltersLimitDefault) { + dialogFiltersLimitDefault = (int) number.value; + editor.putInt("dialogFiltersLimitDefault", dialogFiltersLimitDefault); + changed = true; } - case "dialog_filters_pinned_limit_default": { - if (value.value instanceof TLRPC.TL_jsonNumber) { - TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; - if (number.value != dialogFiltersPinnedLimitDefault) { - dialogFiltersPinnedLimitDefault = (int) number.value; - editor.putInt("dialogFiltersPinnedLimitDefault", dialogFiltersPinnedLimitDefault); - changed = true; - } - } - break; + } + break; + } + case "dialog_filters_limit_premium": { + if (value.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; + if (number.value != dialogFiltersLimitPremium) { + dialogFiltersLimitPremium = (int) number.value; + editor.putInt("dialogFiltersLimitPremium", dialogFiltersLimitPremium); + changed = true; } - case "dialog_filters_pinned_limit_premium": { - if (value.value instanceof TLRPC.TL_jsonNumber) { - TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; - if (number.value != dialogFiltersPinnedLimitPremium) { - dialogFiltersPinnedLimitPremium = (int) number.value; - editor.putInt("dialogFiltersPinnedLimitPremium", dialogFiltersPinnedLimitPremium); - changed = true; - } - } - break; + } + break; + } + case "dialog_filters_chats_limit_default": { + if (value.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; + if (number.value != dialogFiltersChatsLimitDefault) { + dialogFiltersChatsLimitDefault = (int) number.value; + editor.putInt("dialogFiltersChatsLimitDefault", dialogFiltersChatsLimitDefault); + changed = true; } - case "upload_max_fileparts_default": { - if (value.value instanceof TLRPC.TL_jsonNumber) { - TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; - if (number.value != uploadMaxFileParts) { - uploadMaxFileParts = (int) number.value; - editor.putInt("uploadMaxFileParts", uploadMaxFileParts); - changed = true; - } - } - break; + } + break; + } + case "dialog_filters_chats_limit_premium": { + if (value.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; + if (number.value != dialogFiltersChatsLimitPremium) { + dialogFiltersChatsLimitPremium = (int) number.value; + editor.putInt("dialogFiltersChatsLimitPremium", dialogFiltersChatsLimitPremium); + changed = true; } - case "upload_max_fileparts_premium": { - if (value.value instanceof TLRPC.TL_jsonNumber) { - TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; - if (number.value != uploadMaxFilePartsPremium) { - uploadMaxFilePartsPremium = (int) number.value; - editor.putInt("uploadMaxFilePartsPremium", uploadMaxFilePartsPremium); - changed = true; - } - } - break; + } + break; + } + case "dialog_filters_pinned_limit_default": { + if (value.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; + if (number.value != dialogFiltersPinnedLimitDefault) { + dialogFiltersPinnedLimitDefault = (int) number.value; + editor.putInt("dialogFiltersPinnedLimitDefault", dialogFiltersPinnedLimitDefault); + changed = true; } - case "channels_public_limit_default": { - if (value.value instanceof TLRPC.TL_jsonNumber) { - TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; - if (number.value != publicLinksLimitDefault) { - publicLinksLimitDefault = (int) number.value; - editor.putInt("publicLinksLimit", publicLinksLimitDefault); - changed = true; - } - } - break; + } + break; + } + case "dialog_filters_pinned_limit_premium": { + if (value.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; + if (number.value != dialogFiltersPinnedLimitPremium) { + dialogFiltersPinnedLimitPremium = (int) number.value; + editor.putInt("dialogFiltersPinnedLimitPremium", dialogFiltersPinnedLimitPremium); + changed = true; } - case "channels_public_limit_premium": { - if (value.value instanceof TLRPC.TL_jsonNumber) { - TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; - if (number.value != publicLinksLimitPremium) { - publicLinksLimitPremium = (int) number.value; - editor.putInt("publicLinksLimitPremium", publicLinksLimitPremium); - changed = true; - } - } - break; + } + break; + } + case "upload_max_fileparts_default": { + if (value.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; + if (number.value != uploadMaxFileParts) { + uploadMaxFileParts = (int) number.value; + editor.putInt("uploadMaxFileParts", uploadMaxFileParts); + changed = true; } - case "caption_length_limit_default": { - if (value.value instanceof TLRPC.TL_jsonNumber) { - TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; - if (number.value != captionLengthLimitDefault) { - captionLengthLimitDefault = (int) number.value; - editor.putInt("captionLengthLimitDefault", captionLengthLimitDefault); - changed = true; - } - } - break; + } + break; + } + case "upload_max_fileparts_premium": { + if (value.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; + if (number.value != uploadMaxFilePartsPremium) { + uploadMaxFilePartsPremium = (int) number.value; + editor.putInt("uploadMaxFilePartsPremium", uploadMaxFilePartsPremium); + changed = true; } - case "caption_length_limit_premium": { - if (value.value instanceof TLRPC.TL_jsonNumber) { - TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; - if (number.value != captionLengthLimitPremium) { - captionLengthLimitPremium = (int) number.value; - editor.putInt("captionLengthLimitPremium", captionLengthLimitPremium); - changed = true; - } - } - break; + } + break; + } + case "channels_public_limit_default": { + if (value.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; + if (number.value != publicLinksLimitDefault) { + publicLinksLimitDefault = (int) number.value; + editor.putInt("publicLinksLimit", publicLinksLimitDefault); + changed = true; } - case "about_length_limit_default": { - if (value.value instanceof TLRPC.TL_jsonNumber) { - TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; - if (number.value != aboutLengthLimitDefault) { - aboutLengthLimitDefault = (int) number.value; - editor.putInt("aboutLengthLimitDefault", aboutLengthLimitDefault); - changed = true; - } - } - break; + } + break; + } + case "channels_public_limit_premium": { + if (value.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; + if (number.value != publicLinksLimitPremium) { + publicLinksLimitPremium = (int) number.value; + editor.putInt("publicLinksLimitPremium", publicLinksLimitPremium); + changed = true; } - case "about_length_limit_premium": { - if (value.value instanceof TLRPC.TL_jsonNumber) { - TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; - if (number.value != aboutLengthLimitPremium) { - aboutLengthLimitPremium = (int) number.value; - editor.putInt("aboutLengthLimitPremium", aboutLengthLimitPremium); - changed = true; - } - } - break; + } + break; + } + case "caption_length_limit_default": { + if (value.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; + if (number.value != captionLengthLimitDefault) { + captionLengthLimitDefault = (int) number.value; + editor.putInt("captionLengthLimitDefault", captionLengthLimitDefault); + changed = true; } - case "reactions_user_max_default": { - if (value.value instanceof TLRPC.TL_jsonNumber) { - TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; - if (number.value != reactionsUserMaxDefault) { - reactionsUserMaxDefault = (int) number.value; - editor.putInt("reactionsUserMaxDefault", reactionsUserMaxDefault); - changed = true; - } - } - break; + } + break; + } + case "caption_length_limit_premium": { + if (value.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; + if (number.value != captionLengthLimitPremium) { + captionLengthLimitPremium = (int) number.value; + editor.putInt("captionLengthLimitPremium", captionLengthLimitPremium); + changed = true; + } + } + break; + } + case "about_length_limit_default": { + if (value.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; + if (number.value != aboutLengthLimitDefault) { + aboutLengthLimitDefault = (int) number.value; + editor.putInt("aboutLengthLimitDefault", aboutLengthLimitDefault); + changed = true; + } + } + break; + } + case "about_length_limit_premium": { + if (value.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; + if (number.value != aboutLengthLimitPremium) { + aboutLengthLimitPremium = (int) number.value; + editor.putInt("aboutLengthLimitPremium", aboutLengthLimitPremium); + changed = true; + } + } + break; + } + case "reactions_user_max_default": { + if (value.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; + if (number.value != reactionsUserMaxDefault) { + reactionsUserMaxDefault = (int) number.value; + editor.putInt("reactionsUserMaxDefault", reactionsUserMaxDefault); + changed = true; } - case "reactions_user_max_premium": { - if (value.value instanceof TLRPC.TL_jsonNumber) { - TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; - if (number.value != reactionsUserMaxPremium) { - reactionsUserMaxPremium = (int) number.value; - editor.putInt("reactionsUserMaxPremium", reactionsUserMaxPremium); - changed = true; - } - } - break; + } + break; + } + case "reactions_user_max_premium": { + if (value.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; + if (number.value != reactionsUserMaxPremium) { + reactionsUserMaxPremium = (int) number.value; + editor.putInt("reactionsUserMaxPremium", reactionsUserMaxPremium); + changed = true; } - case "reactions_in_chat_max": { - if (value.value instanceof TLRPC.TL_jsonNumber) { - TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; - if (number.value != reactionsInChatMax) { - reactionsInChatMax = (int) number.value; - editor.putInt("reactionsInChatMax", reactionsInChatMax); - changed = true; - } - } - break; + } + break; + } + case "reactions_in_chat_max": { + if (value.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; + if (number.value != reactionsInChatMax) { + reactionsInChatMax = (int) number.value; + editor.putInt("reactionsInChatMax", reactionsInChatMax); + changed = true; } - case "forum_upgrade_participants_min": { - if (value.value instanceof TLRPC.TL_jsonNumber) { - TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; - if (number.value != forumUpgradeParticipantsMin) { - forumUpgradeParticipantsMin = (int) number.value; - editor.putInt("forumUpgradeParticipantsMin", forumUpgradeParticipantsMin); - changed = true; - } - } - break; + } + break; + } + case "forum_upgrade_participants_min": { + if (value.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; + if (number.value != forumUpgradeParticipantsMin) { + forumUpgradeParticipantsMin = (int) number.value; + editor.putInt("forumUpgradeParticipantsMin", forumUpgradeParticipantsMin); + changed = true; } - case "topics_pinned_limit": { - if (value.value instanceof TLRPC.TL_jsonNumber) { - TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; - if (number.value != topicsPinnedLimit) { - topicsPinnedLimit = (int) number.value; - editor.putInt("topicsPinnedLimit", topicsPinnedLimit); - changed = true; - } - } - break; + } + break; + } + case "topics_pinned_limit": { + if (value.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; + if (number.value != topicsPinnedLimit) { + topicsPinnedLimit = (int) number.value; + editor.putInt("topicsPinnedLimit", topicsPinnedLimit); + changed = true; } - case "telegram_antispam_user_id": { - if (value.value instanceof TLRPC.TL_jsonString) { - TLRPC.TL_jsonString string = (TLRPC.TL_jsonString) value.value; - try { - long number = Long.parseLong(string.value); - if (number != telegramAntispamUserId) { - telegramAntispamUserId = number; - editor.putLong("telegramAntispamUserId", telegramAntispamUserId); - changed = true; - } - } catch (Exception e) { - FileLog.e(e); - } + } + break; + } + case "telegram_antispam_user_id": { + if (value.value instanceof TLRPC.TL_jsonString) { + TLRPC.TL_jsonString string = (TLRPC.TL_jsonString) value.value; + try { + long number = Long.parseLong(string.value); + if (number != telegramAntispamUserId) { + telegramAntispamUserId = number; + editor.putLong("telegramAntispamUserId", telegramAntispamUserId); + changed = true; } - break; + } catch (Exception e) { + FileLog.e(e); } - case "telegram_antispam_group_size_min": { - if (value.value instanceof TLRPC.TL_jsonNumber) { - TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; - if (number.value != telegramAntispamGroupSizeMin) { - telegramAntispamGroupSizeMin = (int) number.value; - editor.putInt("telegramAntispamGroupSizeMin", telegramAntispamGroupSizeMin); - changed = true; - } - } - break; + } + break; + } + case "telegram_antispam_group_size_min": { + if (value.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; + if (number.value != telegramAntispamGroupSizeMin) { + telegramAntispamGroupSizeMin = (int) number.value; + editor.putInt("telegramAntispamGroupSizeMin", telegramAntispamGroupSizeMin); + changed = true; } - case "hidden_members_group_size_min": { - if (value.value instanceof TLRPC.TL_jsonNumber) { - TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; - if (number.value != hiddenMembersGroupSizeMin) { - hiddenMembersGroupSizeMin = (int) number.value; - editor.putInt("hiddenMembersGroupSizeMin", hiddenMembersGroupSizeMin); - changed = true; - } - } - break; + } + break; + } + case "hidden_members_group_size_min": { + if (value.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; + if (number.value != hiddenMembersGroupSizeMin) { + hiddenMembersGroupSizeMin = (int) number.value; + editor.putInt("hiddenMembersGroupSizeMin", hiddenMembersGroupSizeMin); + changed = true; } } + break; } - if (changed) { - editor.apply(); + case "android_collect_device_stats": { + if (value.value instanceof TLRPC.TL_jsonBool) { + TLRPC.TL_jsonBool bool = (TLRPC.TL_jsonBool) value.value; + if (bool.value != collectDeviceStats) { + collectDeviceStats = bool.value; + changed = true; + } + } + break; } - if (keelAliveChanged) { - ApplicationLoader.startPushService(); - ConnectionsManager connectionsManager = getConnectionsManager(); - connectionsManager.setPushConnectionEnabled(connectionsManager.isPushConnectionEnabled()); + case "android_check_reset_langpack": { + if (value.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber num = (TLRPC.TL_jsonNumber) value.value; + if (num.value != checkResetLangpack) { + checkResetLangpack = (int) num.value; + editor.putInt("checkResetLangpack", checkResetLangpack); + LocaleController.getInstance().checkPatchLangpack(currentAccount); + changed = true; + } + } + break; } } - loadingAppConfig = false; - })); + } + if (changed) { + editor.apply(); + } + if (liteAppOptions != null) { + LiteMode.updatePresets(liteAppOptions); + } + if (keelAliveChanged) { + ApplicationLoader.startPushService(); + ConnectionsManager connectionsManager = getConnectionsManager(); + connectionsManager.setPushConnectionEnabled(connectionsManager.isPushConnectionEnabled()); + } + logStorageDir(); } private void resetAppConfig() { getfileExperimentalParams = false; - mainPreferences.edit().remove("getfileExperimentalParams"); + collectDeviceStats = false; + mainPreferences.edit().remove("getfileExperimentalParams").apply(); } private boolean savePremiumFeaturesPreviewOrder(SharedPreferences.Editor editor, ArrayList value) { @@ -2912,6 +3182,7 @@ public void removeSuggestion(long did, String suggestion) { public void updateConfig(final TLRPC.TL_config config) { AndroidUtilities.runOnUIThread(() -> { + // TODO: receive those removed parameters from appconfig getDownloadController().loadAutoDownloadConfig(false); loadAppConfig(); thisDc = config.this_dc; @@ -2919,9 +3190,9 @@ public void updateConfig(final TLRPC.TL_config config) { maxGroupCount = config.chat_size_max; maxEditTime = config.edit_time_limit; ratingDecay = config.rating_e_decay; - maxRecentGifsCount = config.saved_gifs_limit; +// maxRecentGifsCount = config.saved_gifs_limit; maxRecentStickersCount = config.stickers_recent_limit; - maxFaveStickersCount = config.stickers_faved_limit; +// maxFaveStickersCount = config.stickers_faved_limit; revokeTimeLimit = config.revoke_time_limit; revokeTimePmLimit = config.revoke_pm_time_limit; canRevokePmInbox = config.revoke_pm_inbox; @@ -2939,8 +3210,8 @@ public void updateConfig(final TLRPC.TL_config config) { callRingTimeout = config.call_ring_timeout_ms; callConnectTimeout = config.call_connect_timeout_ms; callPacketTimeout = config.call_packet_timeout_ms; - maxPinnedDialogsCount = config.pinned_dialogs_count_max; - maxFolderPinnedDialogsCount = config.pinned_infolder_count_max; +// maxPinnedDialogsCount = config.pinned_dialogs_count_max; +// maxFolderPinnedDialogsCount = config.pinned_infolder_count_max; maxMessageLength = config.message_length_max; maxCaptionLength = config.caption_length_max; preloadFeaturedStickers = config.preload_featured_stickers; @@ -3023,8 +3294,9 @@ public void updateConfig(final TLRPC.TL_config config) { editor.putInt("callConnectTimeout", callConnectTimeout); editor.putInt("callPacketTimeout", callPacketTimeout); editor.putString("linkPrefix", linkPrefix); - editor.putInt("maxPinnedDialogsCount", maxPinnedDialogsCount); - editor.putInt("maxFolderPinnedDialogsCount", maxFolderPinnedDialogsCount); +// editor.putInt("maxPinnedDialogsCount", maxPinnedDialogsCount); + editor.putInt("maxFolderPinnedDialogsCountDefault", maxFolderPinnedDialogsCountDefault); + editor.putInt("maxFolderPinnedDialogsCountPremium", maxFolderPinnedDialogsCountPremium); editor.putInt("maxMessageLength", maxMessageLength); editor.putInt("maxCaptionLength", maxCaptionLength); editor.putBoolean("preloadFeaturedStickers", preloadFeaturedStickers); @@ -3553,6 +3825,7 @@ public void cleanup() { getLocationController().cleanup(); getMediaDataController().cleanup(); getColorPalette().cleanup(); + getTranslateController().cleanup(); showFiltersTooltip = false; @@ -4544,6 +4817,7 @@ public void loadFullChat(long chatId, int classGuid, boolean force) { res.full_chat.inviterId = old.inviterId; } fullChats.put(chatId, res.full_chat); + getTranslateController().updateDialogFull(-chatId); applyDialogNotificationsSettings(-chatId, 0, res.full_chat.notify_settings); for (int a = 0; a < res.full_chat.bot_info.size(); a++) { @@ -4641,6 +4915,7 @@ public void loadFullUser(final TLRPC.User user, int classGuid, boolean force) { } } fullUsers.put(user.id, userFull); + getTranslateController().updateDialogFull(user.id); loadingFullUsers.remove(user.id); loadedFullUsers.put(user.id, System.currentTimeMillis()); String names = user.first_name + user.last_name + UserObject.getPublicUsername(user); @@ -5280,13 +5555,27 @@ public void setUserAdminRole(long chatId, TLRPC.User user, TLRPC.TL_chatAdminRig } }, 1000); } else { + if (error != null && "USER_PRIVACY_RESTRICTED".equals(error.text) && ChatObject.canUserDoAdminAction(chat, ChatObject.ACTION_INVITE)) { + AndroidUtilities.runOnUIThread(() -> { + BaseFragment lastFragment = LaunchActivity.getLastFragment(); + if (lastFragment != null && lastFragment.getParentActivity() != null) { + LimitReachedBottomSheet restricterdUsersBottomSheet = new LimitReachedBottomSheet(lastFragment, lastFragment.getParentActivity(), LimitReachedBottomSheet.TYPE_ADD_MEMBERS_RESTRICTED, currentAccount); + ArrayList users = new ArrayList(); + users.add(user); + restricterdUsersBottomSheet.setRestrictedUsers(chat, users); + restricterdUsersBottomSheet.show(); + } + onError.run(error); + }); + return; + } AndroidUtilities.runOnUIThread(() -> AlertsCreator.processError(currentAccount, error, parentFragment, req, isChannel)); if (onError != null) { AndroidUtilities.runOnUIThread(() -> onError.run(error)); } } }; - if (chat.megagroup && addingNew || !TextUtils.isEmpty(botHash)) { + if ((!user.bot || !ChatObject.isChannelAndNotMegaGroup(chat)) && addingNew) { addUserToChat(chatId, user, 0, botHash, parentFragment, true, () -> getConnectionsManager().sendRequest(req, requestDelegate), onError); } else { getConnectionsManager().sendRequest(req, requestDelegate); @@ -5415,7 +5704,7 @@ public void deleteUserPhoto(TLRPC.InputPhoto photo) { if (photo == null) { TLRPC.TL_photos_updateProfilePhoto req = new TLRPC.TL_photos_updateProfilePhoto(); req.id = new TLRPC.TL_inputPhotoEmpty(); - getUserConfig().getCurrentUser().photo = new TLRPC.TL_userProfilePhotoEmpty(); + // getUserConfig().getCurrentUser().photo = new TLRPC.TL_userProfilePhotoEmpty(); TLRPC.User user = getUser(getUserConfig().getClientUserId()); if (user == null) { user = getUserConfig().getCurrentUser(); @@ -5423,39 +5712,51 @@ public void deleteUserPhoto(TLRPC.InputPhoto photo) { if (user == null) { return; } - user.photo = getUserConfig().getCurrentUser().photo; + if (user.photo != null) { + getMessagesStorage().clearUserPhoto(user.id, user.photo.photo_id); + } + // user.photo = getUserConfig().getCurrentUser().photo; getNotificationCenter().postNotificationName(NotificationCenter.mainUserInfoChanged); getNotificationCenter().postNotificationName(NotificationCenter.updateInterfaces, UPDATE_MASK_ALL); + getConnectionsManager().sendRequest(req, (response, error) -> { if (error == null) { - TLRPC.TL_photos_photo photos_photo = (TLRPC.TL_photos_photo) response; - TLRPC.User user1 = getUser(getUserConfig().getClientUserId()); - if (user1 == null) { - user1 = getUserConfig().getCurrentUser(); - putUser(user1, false); - } else { - getUserConfig().setCurrentUser(user1); - } - if (user1 == null) { - return; - } - getMessagesStorage().clearUserPhotos(user1.id); - ArrayList users = new ArrayList<>(); - users.add(user1); - getMessagesStorage().putUsersAndChats(users, null, false, true); - if (photos_photo.photo instanceof TLRPC.TL_photo) { - user1.photo = new TLRPC.TL_userProfilePhoto(); - user1.photo.has_video = !photos_photo.photo.video_sizes.isEmpty(); - user1.photo.photo_id = photos_photo.photo.id; - user1.photo.photo_small = FileLoader.getClosestPhotoSizeWithSize(photos_photo.photo.sizes, 150).location; - user1.photo.photo_big = FileLoader.getClosestPhotoSizeWithSize(photos_photo.photo.sizes, 800).location; - user1.photo.dc_id = photos_photo.photo.dc_id; - } else { - user1.photo = new TLRPC.TL_userProfilePhotoEmpty(); - } AndroidUtilities.runOnUIThread(() -> { + TLRPC.TL_photos_photo photos_photo = (TLRPC.TL_photos_photo) response; + TLRPC.User user1 = getUser(getUserConfig().getClientUserId()); + if (user1 == null) { + user1 = getUserConfig().getCurrentUser(); + putUser(user1, false); + } else { + getUserConfig().setCurrentUser(user1); + } + if (user1 == null) { + return; + } + ArrayList users = new ArrayList<>(); + users.add(user1); + getMessagesStorage().putUsersAndChats(users, null, false, true); + if (photos_photo.photo instanceof TLRPC.TL_photo) { + user1.photo = new TLRPC.TL_userProfilePhoto(); + user1.photo.has_video = !photos_photo.photo.video_sizes.isEmpty(); + user1.photo.photo_id = photos_photo.photo.id; + user1.photo.photo_small = FileLoader.getClosestPhotoSizeWithSize(photos_photo.photo.sizes, 150).location; + user1.photo.photo_big = FileLoader.getClosestPhotoSizeWithSize(photos_photo.photo.sizes, 800).location; + user1.photo.dc_id = photos_photo.photo.dc_id; + } else { + user1.photo = new TLRPC.TL_userProfilePhotoEmpty(); + } + + TLRPC.UserFull userFull = getUserFull(getUserConfig().getClientUserId()); + userFull.profile_photo = photos_photo.photo; + getMessagesStorage().updateUserInfo(userFull, false); + + getUserConfig().getCurrentUser().photo = user1.photo; + putUser(user1, false); + getNotificationCenter().postNotificationName(NotificationCenter.mainUserInfoChanged); getNotificationCenter().postNotificationName(NotificationCenter.updateInterfaces, UPDATE_MASK_ALL); + getNotificationCenter().postNotificationName(NotificationCenter.updateInterfaces, UPDATE_MASK_AVATAR); getUserConfig().saveConfig(true); }); } @@ -6527,6 +6828,7 @@ public void loadChannelParticipants(Long chatId) { public void putChatFull(TLRPC.ChatFull chatFull) { fullChats.put(chatFull.id, chatFull); + getTranslateController().updateDialogFull(-chatFull.id); } public void processChatInfo(long chatId, TLRPC.ChatFull info, ArrayList usersArr, boolean fromCache, boolean force, boolean byChannelUsers, ArrayList pinnedMessages, HashMap pinnedMessagesMap, int totalPinnedCount, boolean pinnedEndReached) { @@ -6540,6 +6842,7 @@ public void processChatInfo(long chatId, TLRPC.ChatFull info, ArrayList getMessagesController().blockePeers.indexOfKey(it.userId) == -1).collect(Collectors.toCollection(ArrayList::new)); } - if (arr.isEmpty()) continue; + int type = 0; + CharSequence text = null; + if (key > 0 || isEncryptedChat || arr.size() == 1) { PrintingUser pu = arr.get(0); TLRPC.User user = getUser(pu.userId); if (user == null) { continue; } - + final boolean isGroup = key < 0 && !isEncryptedChat; if (pu.action instanceof TLRPC.TL_sendMessageRecordAudioAction) { - if (key < 0 && !isEncryptedChat) { - newPrintingStrings.put(threadId, LocaleController.formatString("IsRecordingAudio", R.string.IsRecordingAudio, getUserNameForTyping(user))); + if (isGroup) { + text = LocaleController.formatString("IsRecordingAudio", R.string.IsRecordingAudio, getUserNameForTyping(user)); } else { - newPrintingStrings.put(threadId, LocaleController.getString("RecordingAudio", R.string.RecordingAudio)); + text = LocaleController.getString("RecordingAudio", R.string.RecordingAudio); } - newPrintingStringsTypes.put(threadId, 1); + type = 1; } else if (pu.action instanceof TLRPC.TL_sendMessageRecordRoundAction) { - if (key < 0 && !isEncryptedChat) { - newPrintingStrings.put(threadId, LocaleController.formatString("IsRecordingRound", R.string.IsRecordingRound, getUserNameForTyping(user))); + if (isGroup) { + text = LocaleController.formatString("IsRecordingRound", R.string.IsRecordingRound, getUserNameForTyping(user)); } else { - newPrintingStrings.put(threadId, LocaleController.getString("RecordingRound", R.string.RecordingRound)); + text = LocaleController.getString("RecordingRound", R.string.RecordingRound); } - newPrintingStringsTypes.put(threadId, 4); + type = 4; } else if (pu.action instanceof TLRPC.TL_sendMessageUploadRoundAction) { - if (key < 0 && !isEncryptedChat) { - newPrintingStrings.put(threadId, LocaleController.formatString("IsSendingVideo", R.string.IsSendingVideo, getUserNameForTyping(user))); + if (isGroup) { + text = LocaleController.formatString("IsSendingVideo", R.string.IsSendingVideo, getUserNameForTyping(user)); } else { - newPrintingStrings.put(threadId, LocaleController.getString("SendingVideoStatus", R.string.SendingVideoStatus)); + text = LocaleController.getString("SendingVideoStatus", R.string.SendingVideoStatus); } - newPrintingStringsTypes.put(threadId, 4); + type = 4; } else if (pu.action instanceof TLRPC.TL_sendMessageUploadAudioAction) { - if (key < 0 && !isEncryptedChat) { - newPrintingStrings.put(threadId, LocaleController.formatString("IsSendingAudio", R.string.IsSendingAudio, getUserNameForTyping(user))); + if (isGroup) { + text = LocaleController.formatString("IsSendingAudio", R.string.IsSendingAudio, getUserNameForTyping(user)); } else { - newPrintingStrings.put(threadId, LocaleController.getString("SendingAudio", R.string.SendingAudio)); + text = LocaleController.getString("SendingAudio", R.string.SendingAudio); } - newPrintingStringsTypes.put(threadId, 2); + type = 2; } else if (pu.action instanceof TLRPC.TL_sendMessageUploadVideoAction) { - if (key < 0 && !isEncryptedChat) { - newPrintingStrings.put(threadId, LocaleController.formatString("IsSendingVideo", R.string.IsSendingVideo, getUserNameForTyping(user))); + if (isGroup) { + text = LocaleController.formatString("IsSendingVideo", R.string.IsSendingVideo, getUserNameForTyping(user)); } else { - newPrintingStrings.put(threadId, LocaleController.getString("SendingVideoStatus", R.string.SendingVideoStatus)); + text = LocaleController.getString("SendingVideoStatus", R.string.SendingVideoStatus); } - newPrintingStringsTypes.put(threadId, 2); + type = 2; } else if (pu.action instanceof TLRPC.TL_sendMessageRecordVideoAction) { - if (key < 0 && !isEncryptedChat) { - newPrintingStrings.put(threadId, LocaleController.formatString("IsRecordingVideo", R.string.IsRecordingVideo, getUserNameForTyping(user))); + if (isGroup) { + text = LocaleController.formatString("IsRecordingVideo", R.string.IsRecordingVideo, getUserNameForTyping(user)); } else { - newPrintingStrings.put(threadId, LocaleController.getString("RecordingVideoStatus", R.string.RecordingVideoStatus)); + text = LocaleController.getString("RecordingVideoStatus", R.string.RecordingVideoStatus); } - newPrintingStringsTypes.put(threadId, 2); + type = 2; } else if (pu.action instanceof TLRPC.TL_sendMessageUploadDocumentAction) { - if (key < 0 && !isEncryptedChat) { - newPrintingStrings.put(threadId, LocaleController.formatString("IsSendingFile", R.string.IsSendingFile, getUserNameForTyping(user))); + if (isGroup) { + text = LocaleController.formatString("IsSendingFile", R.string.IsSendingFile, getUserNameForTyping(user)); } else { - newPrintingStrings.put(threadId, LocaleController.getString("SendingFile", R.string.SendingFile)); + text = LocaleController.getString("SendingFile", R.string.SendingFile); } - newPrintingStringsTypes.put(threadId, 2); + type = 2; } else if (pu.action instanceof TLRPC.TL_sendMessageUploadPhotoAction) { - if (key < 0 && !isEncryptedChat) { - newPrintingStrings.put(threadId, LocaleController.formatString("IsSendingPhoto", R.string.IsSendingPhoto, getUserNameForTyping(user))); + if (isGroup) { + text = LocaleController.formatString("IsSendingPhoto", R.string.IsSendingPhoto, getUserNameForTyping(user)); } else { - newPrintingStrings.put(threadId, LocaleController.getString("SendingPhoto", R.string.SendingPhoto)); + text = LocaleController.getString("SendingPhoto", R.string.SendingPhoto); } - newPrintingStringsTypes.put(threadId, 2); + type = 2; } else if (pu.action instanceof TLRPC.TL_sendMessageGamePlayAction) { - if (key < 0 && !isEncryptedChat) { - newPrintingStrings.put(threadId, LocaleController.formatString("IsSendingGame", R.string.IsSendingGame, getUserNameForTyping(user))); + if (isGroup) { + text = LocaleController.formatString("IsSendingGame", R.string.IsSendingGame, getUserNameForTyping(user)); } else { - newPrintingStrings.put(threadId, LocaleController.getString("SendingGame", R.string.SendingGame)); + text = LocaleController.getString("SendingGame", R.string.SendingGame); } - newPrintingStringsTypes.put(threadId, 3); + type = 3; } else if (pu.action instanceof TLRPC.TL_sendMessageGeoLocationAction) { - if (key < 0 && !isEncryptedChat) { - newPrintingStrings.put(threadId, LocaleController.formatString("IsSelectingLocation", R.string.IsSelectingLocation, getUserNameForTyping(user))); + if (isGroup) { + text = LocaleController.formatString("IsSelectingLocation", R.string.IsSelectingLocation, getUserNameForTyping(user)); } else { - newPrintingStrings.put(threadId, LocaleController.getString("SelectingLocation", R.string.SelectingLocation)); + text = LocaleController.getString("SelectingLocation", R.string.SelectingLocation); } - newPrintingStringsTypes.put(threadId, 0); + type = 0; } else if (pu.action instanceof TLRPC.TL_sendMessageChooseContactAction) { - if (key < 0 && !isEncryptedChat) { - newPrintingStrings.put(threadId, LocaleController.formatString("IsSelectingContact", R.string.IsSelectingContact, getUserNameForTyping(user))); + if (isGroup) { + text = LocaleController.formatString("IsSelectingContact", R.string.IsSelectingContact, getUserNameForTyping(user)); } else { - newPrintingStrings.put(threadId, LocaleController.getString("SelectingContact", R.string.SelectingContact)); + text = LocaleController.getString("SelectingContact", R.string.SelectingContact); } - newPrintingStringsTypes.put(threadId, 0); + type = 0; } else if (pu.action instanceof TLRPC.TL_sendMessageEmojiInteractionSeen) { - String emoji = ((TLRPC.TL_sendMessageEmojiInteractionSeen) pu.action).emoticon; - String printingString; - if (key < 0 && !isEncryptedChat) { - printingString = LocaleController.formatString("IsEnjoyngAnimations", R.string.IsEnjoyngAnimations, getUserNameForTyping(user), emoji); + final String emoji = ((TLRPC.TL_sendMessageEmojiInteractionSeen) pu.action).emoticon; + if (isGroup) { + text = LocaleController.formatString("IsEnjoyngAnimations", R.string.IsEnjoyngAnimations, getUserNameForTyping(user), emoji); } else { - printingString = LocaleController.formatString("EnjoyngAnimations", R.string.EnjoyngAnimations, emoji); + text = LocaleController.formatString("EnjoyngAnimations", R.string.EnjoyngAnimations, emoji); } - newPrintingStrings.put(threadId, printingString); - newPrintingStringsTypes.put(threadId, 5); + type = 5; } else if (pu.action instanceof TLRPC.TL_sendMessageChooseStickerAction) { - if (key < 0 && !isEncryptedChat) { - newPrintingStrings.put(threadId, LocaleController.formatString("IsChoosingSticker", R.string.IsChoosingSticker, getUserNameForTyping(user))); + if (isGroup) { + text = LocaleController.formatString("IsChoosingSticker", R.string.IsChoosingSticker, getUserNameForTyping(user)); } else { - newPrintingStrings.put(threadId, LocaleController.getString("ChoosingSticker", R.string.ChoosingSticker)); + text = LocaleController.getString("ChoosingSticker", R.string.ChoosingSticker); } - newPrintingStringsTypes.put(threadId, 5); + type = 5; } else { - if (key < 0 && !isEncryptedChat) { - newPrintingStrings.put(threadId, LocaleController.formatString("IsTypingGroup", R.string.IsTypingGroup, getUserNameForTyping(user))); + if (isGroup) { + text = LocaleController.formatString("IsTypingGroup", R.string.IsTypingGroup, getUserNameForTyping(user)); } else { - newPrintingStrings.put(threadId, LocaleController.getString("Typing", R.string.Typing)); + text = LocaleController.getString("Typing", R.string.Typing); } - newPrintingStringsTypes.put(threadId, 0); + type = 0; } } else { int count = 0; @@ -7427,21 +7731,32 @@ private void updatePrintingStrings() { } if (label.length() != 0) { if (count == 1) { - newPrintingStrings.put(threadId, LocaleController.formatString("IsTypingGroup", R.string.IsTypingGroup, label.toString())); + text = LocaleController.formatString("IsTypingGroup", R.string.IsTypingGroup, label.toString()); } else { if (arr.size() > 2) { String plural = LocaleController.getPluralString("AndMoreTypingGroup", arr.size() - 2); try { - newPrintingStrings.put(threadId, String.format(plural, label.toString(), arr.size() - 2)); + text = String.format(plural, label.toString(), arr.size() - 2); } catch (Exception e) { - newPrintingStrings.put(threadId, "LOC_ERR: AndMoreTypingGroup"); + text = "LOC_ERR: AndMoreTypingGroup"; } } else { - newPrintingStrings.put(threadId, LocaleController.formatString("AreTypingGroup", R.string.AreTypingGroup, label.toString())); + text = LocaleController.formatString("AreTypingGroup", R.string.AreTypingGroup, label.toString()); } } - newPrintingStringsTypes.put(threadId, 0); + type = 0; + } + } + if (text != null) { + Paint paint = Theme.dialogs_messageNamePaint; + if (paint == null) { + paint = new Paint(); + paint.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + paint.setTextSize(AndroidUtilities.dp(14)); } + text = Emoji.replaceEmoji(text, paint.getFontMetricsInt(), false); + newPrintingStrings.put(threadId, text); + newPrintingStringsTypes.put(threadId, type); } } } @@ -7678,7 +7993,6 @@ private void loadMessagesInternal(long dialogId, long mergeDialogId, boolean loa }); getConnectionsManager().bindRequestToGuid(reqId, classGuid); } else if (mode == 2) { - } else if (mode == 1) { TLRPC.TL_messages_getScheduledHistory req = new TLRPC.TL_messages_getScheduledHistory(); req.peer = getInputPeer(dialogId); @@ -7864,13 +8178,29 @@ public void processLoadedMessages(TLRPC.messages_Messages messagesRes, int resCo } boolean isInitialLoading = offset_date == 0 && max_id == 0; boolean reload; + LongSparseArray usersDict = new LongSparseArray<>(); + LongSparseArray chatsDict = new LongSparseArray<>(); + for (int a = 0; a < messagesRes.users.size(); a++) { + TLRPC.User u = messagesRes.users.get(a); + usersDict.put(u.id, u); + } + for (int a = 0; a < messagesRes.chats.size(); a++) { + TLRPC.Chat c = messagesRes.chats.get(a); + chatsDict.put(c.id, c); + } if (mode == 1) { reload = ((SystemClock.elapsedRealtime() - lastScheduledServerQueryTime.get(dialogId, 0L)) > 60 * 1000); } else { reload = resCount == 0 && (!isInitialLoading || (SystemClock.elapsedRealtime() - lastServerQueryTime.get(dialogId, 0L)) > 60 * 1000 || (isCache && isTopic)); - if (mode == 0 && isCache && dialogId < 0 && !dialogs_dict.containsKey(dialogId) && (SystemClock.elapsedRealtime() - lastServerQueryTime.get(dialogId, 0L)) > 24 * 60 * 60 * 1000) { - messagesRes.messages.clear(); - reload = true; + if (isCache && dialogId < 0) { + TLRPC.Chat chat = getChat(-dialogId); + if (chat == null) { + chat = chatsDict.get(-dialogId); + } + if (chat != null && mode == 0 && ChatObject.isNotInChat(chat) && (SystemClock.elapsedRealtime() - lastServerQueryTime.get(dialogId, 0L)) > 24 * 60 * 60 * 1000) { + messagesRes.messages.clear(); + reload = true; + } } } if (!DialogObject.isEncryptedDialog(dialogId) && isCache && reload) { @@ -7899,16 +8229,6 @@ public void processLoadedMessages(TLRPC.messages_Messages messagesRes, int resCo return; } } - LongSparseArray usersDict = new LongSparseArray<>(); - LongSparseArray chatsDict = new LongSparseArray<>(); - for (int a = 0; a < messagesRes.users.size(); a++) { - TLRPC.User u = messagesRes.users.get(a); - usersDict.put(u.id, u); - } - for (int a = 0; a < messagesRes.chats.size(); a++) { - TLRPC.Chat c = messagesRes.chats.get(a); - chatsDict.put(c.id, c); - } int size = messagesRes.messages.size(); if (!isCache) { Integer inboxValue = dialogs_read_inbox_max.get(dialogId); @@ -7959,15 +8279,11 @@ public void processLoadedMessages(TLRPC.messages_Messages messagesRes, int resCo ArrayList objects = new ArrayList<>(); ArrayList messagesToReload = new ArrayList<>(); HashMap> webpagesToReload = new HashMap<>(); - TLRPC.InputChannel inputChannel = null; - long fileProcessTime = 0; for (int a = 0; a < size; a++) { TLRPC.Message message = messagesRes.messages.get(a); message.dialog_id = dialogId; long checkFileTime = SystemClock.elapsedRealtime(); MessageObject messageObject = new MessageObject(currentAccount, message, usersDict, chatsDict, true, false); - messageObject.createStrippedThumb(); - fileProcessTime += (SystemClock.elapsedRealtime() - checkFileTime); messageObject.scheduled = mode == 1; objects.add(messageObject); if (isCache) { @@ -7992,10 +8308,14 @@ public void processLoadedMessages(TLRPC.messages_Messages messagesRes, int resCo } } } - getFileLoader().checkMediaExistance(objects); + if (MessageObject.canCreateStripedThubms()) { + for (int i = 0; i < objects.size(); i++) { + objects.get(i).createStrippedThumb(); + } + } if (BuildVars.LOGS_ENABLED) { - FileLog.d("process time = " + (SystemClock.elapsedRealtime() - startProcessTime) + " file time = " + fileProcessTime + " for dialog = " + dialogId); + FileLog.d("process time=" + (SystemClock.elapsedRealtime() - startProcessTime) + " count=" + objects.size() + " for dialog " + dialogId); } AndroidUtilities.runOnUIThread(() -> { putUsers(messagesRes.users, isCache); @@ -8328,7 +8648,7 @@ public void loadDialogs(final int folderId, int offset, int count, boolean fromC getConnectionsManager().sendRequest(req, (response, error) -> { if (error == null) { TLRPC.messages_Dialogs dialogsRes = (TLRPC.messages_Dialogs) response; - processLoadedDialogs(dialogsRes, null, folderId, 0, count, 0, false, false, false); + processLoadedDialogs(dialogsRes, null, null, folderId, 0, count, 0, false, false, false); if (onEmptyCallback != null && dialogsRes.dialogs.isEmpty()) { AndroidUtilities.runOnUIThread(onEmptyCallback); } @@ -8501,7 +8821,7 @@ protected void loadUnknownDialog(final TLRPC.InputPeer peer, long taskId) { dialogs.messages.addAll(res.messages); dialogs.users.addAll(res.users); dialogs.chats.addAll(res.chats); - processLoadedDialogs(dialogs, null, dialog.folder_id, 0, 1, DIALOGS_LOAD_TYPE_UNKNOWN, false, false, false); + processLoadedDialogs(dialogs, null, null, dialog.folder_id, 0, 1, DIALOGS_LOAD_TYPE_UNKNOWN, false, false, false); } } if (newTaskId != 0) { @@ -8787,6 +9107,7 @@ protected void completeDialogsReset(final TLRPC.messages_Dialogs dialogsRes, int } } } + getTranslateController().checkDialogMessages(key); } allDialogs.clear(); @@ -9007,7 +9328,7 @@ private void migrateDialogs(int offset, int offsetDate, long offsetUser, long of } } - processLoadedDialogs(dialogsRes, null, 0, offsetId, 0, 0, false, true, false); + processLoadedDialogs(dialogsRes, null, null, 0, offsetId, 0, 0, false, true, false); } catch (Exception e) { FileLog.e(e); AndroidUtilities.runOnUIThread(() -> migratingDialogs = false); @@ -9023,7 +9344,7 @@ private void migrateDialogs(int offset, int offsetDate, long offsetUser, long of private int DIALOGS_LOAD_TYPE_CHANNEL = 2; private int DIALOGS_LOAD_TYPE_UNKNOWN = 3; - public void processLoadedDialogs(final TLRPC.messages_Dialogs dialogsRes, ArrayList encChats, int folderId, int offset, int count, int loadType, boolean resetEnd, boolean migrate, boolean fromCache) { + public void processLoadedDialogs(final TLRPC.messages_Dialogs dialogsRes, ArrayList encChats, ArrayList fullUsers, int folderId, int offset, int count, int loadType, boolean resetEnd, boolean migrate, boolean fromCache) { Utilities.stageQueue.postRunnable(() -> { if (!firstGettingTask) { getNewDeleteTask(null, null); @@ -9037,6 +9358,13 @@ public void processLoadedDialogs(final TLRPC.messages_Dialogs dialogsRes, ArrayL if (loadType == DIALOGS_LOAD_TYPE_CACHE && dialogsRes.dialogs.size() == 0) { AndroidUtilities.runOnUIThread(() -> { putUsers(dialogsRes.users, true); + if (fullUsers != null) { + for (int i = 0; i < fullUsers.size(); i++) { + long did = fullUsers.get(i).id; + this.fullUsers.put(did, fullUsers.get(i)); + getTranslateController().updateDialogFull(did); + } + } loadingDialogs.put(folderId, false); if (resetEnd) { dialogsEndReached.put(folderId, false); @@ -9286,6 +9614,13 @@ public void processLoadedDialogs(final TLRPC.messages_Dialogs dialogsRes, ArrayL } putUsers(dialogsRes.users, loadType == DIALOGS_LOAD_TYPE_CACHE); putChats(dialogsRes.chats, loadType == DIALOGS_LOAD_TYPE_CACHE); + if (fullUsers != null) { + for (int i = 0; i < fullUsers.size(); i++) { + long did = fullUsers.get(i).id; + this.fullUsers.put(did, fullUsers.get(i)); + getTranslateController().updateDialogFull(did); + } + } if (encChats != null) { for (int a = 0; a < encChats.size(); a++) { @@ -9336,6 +9671,7 @@ public void processLoadedDialogs(final TLRPC.messages_Dialogs dialogsRes, ArrayL } } } + getTranslateController().checkDialogMessages(key); } else { if (loadType != DIALOGS_LOAD_TYPE_CACHE) { currentDialog.notify_settings = value.notify_settings; @@ -9387,6 +9723,7 @@ public void processLoadedDialogs(final TLRPC.messages_Dialogs dialogsRes, ArrayL } } } + getTranslateController().checkDialogMessages(key); } } else { // if (newMsg == null && oldMs.getId() > 0 || newMsg != null && newMsg.messageOwner.date > oldMsg.messageOwner.date) @@ -9424,6 +9761,7 @@ public void processLoadedDialogs(final TLRPC.messages_Dialogs dialogsRes, ArrayL } } } + getTranslateController().checkDialogMessages(key); } } } @@ -9881,6 +10219,7 @@ public void processDialogsUpdate(final TLRPC.messages_Dialogs dialogsRes, ArrayL FileLog.d("processDialogsUpdate new message not null"); } } + getTranslateController().checkDialogMessages(key); } else { if (BuildVars.LOGS_ENABLED) { FileLog.d("processDialogsUpdate dialog not null"); @@ -9940,6 +10279,7 @@ public void processDialogsUpdate(final TLRPC.messages_Dialogs dialogsRes, ArrayL } } } + getTranslateController().checkDialogMessages(key); } if (fromCache && newMsgs == null) { checkLastDialogMessage(value, null, 0); @@ -9986,6 +10326,7 @@ public void processDialogsUpdate(final TLRPC.messages_Dialogs dialogsRes, ArrayL } } } + getTranslateController().checkDialogMessages(key); } } } @@ -10032,46 +10373,6 @@ public void addToViewsQueue(MessageObject messageObject) { }); } - public void loadExtendedMediaForMessages(long dialogId, ArrayList visibleObjects) { - if (visibleObjects.isEmpty()) { - return; - } - TLRPC.TL_messages_getExtendedMedia req = new TLRPC.TL_messages_getExtendedMedia(); - req.peer = getInputPeer(dialogId); - for (int i = 0; i < visibleObjects.size(); i++) { - MessageObject messageObject = visibleObjects.get(i); - req.id.add(messageObject.getId()); - } - getConnectionsManager().sendRequest(req, (response, error) -> { - if (error == null) { - processUpdates((TLRPC.Updates) response, false); - } - }); - } - - public void loadReactionsForMessages(long dialogId, ArrayList visibleObjects) { - if (visibleObjects.isEmpty()) { - return; - } - TLRPC.TL_messages_getMessagesReactions req = new TLRPC.TL_messages_getMessagesReactions(); - req.peer = getInputPeer(dialogId); - for (int i = 0; i < visibleObjects.size(); i++) { - MessageObject messageObject = visibleObjects.get(i); - req.id.add(messageObject.getId()); - } - getConnectionsManager().sendRequest(req, (response, error) -> { - if (error == null) { - TLRPC.Updates updates = (TLRPC.Updates) response; - for (int i = 0; i < updates.updates.size(); i++) { - if (updates.updates.get(i) instanceof TLRPC.TL_updateMessageReactions) { - ((TLRPC.TL_updateMessageReactions) updates.updates.get(i)).updateUnreadState = false; - } - } - processUpdates(updates, false); - } - }); - } - public void addToPollsQueue(long dialogId, ArrayList visibleObjects) { SparseArray array = pollsToCheck.get(dialogId); if (array == null) { @@ -10514,40 +10815,17 @@ public int createChat(String title, ArrayList selectedContacts, String abo if (type == ChatObject.CHAT_TYPE_CHAT && !forImport) { TLRPC.TL_messages_createChat req = new TLRPC.TL_messages_createChat(); req.title = title; - TLObject nekoxBot = null; - if (selectedContacts.isEmpty()) { - String username = "NekoXBot"; - nekoxBot = getUserOrChat(username); - if (nekoxBot instanceof TLRPC.User) { - req.users.add(getInputUser((TLRPC.User) nekoxBot)); - } else { - TLRPC.TL_contacts_resolveUsername req1 = new TLRPC.TL_contacts_resolveUsername(); - req1.username = username; - return getConnectionsManager().sendRequest(req1, (response, error) -> AndroidUtilities.runOnUIThread(() -> { - if (error == null) { - TLRPC.TL_contacts_resolvedPeer res = (TLRPC.TL_contacts_resolvedPeer) response; - putUsers(res.users, false); - putChats(res.chats, false); - getMessagesStorage().putUsersAndChats(res.users, res.chats, false, true); - createChat(title, selectedContacts, about, type, forImport, location, locationAddress, 0, fragment); - } else { - AndroidUtilities.runOnUIThread(() -> { - AlertsCreator.processError(currentAccount, error, fragment, req); - getNotificationCenter().postNotificationName(NotificationCenter.chatDidFailCreate); - }); - } - })); - } - } else { - for (int a = 0; a < selectedContacts.size(); a++) { - TLRPC.User user = getUser(selectedContacts.get(a)); - if (user == null) { - continue; - } - req.users.add(getInputUser(user)); + if (ttlPeriod >= 0) { + req.ttl_period = ttlPeriod; + req.flags |= 1; + } + for (int a = 0; a < selectedContacts.size(); a++) { + TLRPC.User user = getUser(selectedContacts.get(a)); + if (user == null) { + continue; } + req.users.add(getInputUser(user)); } - TLObject finalNekoxBot = nekoxBot; return getConnectionsManager().sendRequest(req, (response, error) -> { if (error != null) { AndroidUtilities.runOnUIThread(() -> { @@ -10559,29 +10837,27 @@ public int createChat(String title, ArrayList selectedContacts, String abo TLRPC.Updates updates = (TLRPC.Updates) response; processUpdates(updates, false); AndroidUtilities.runOnUIThread(() -> { - if (finalNekoxBot instanceof TLRPC.User) { - getMessagesController().deleteParticipantFromChat(updates.chats.get(0).id, (TLRPC.User) finalNekoxBot); - } - putUsers(updates.users, false); putChats(updates.chats, false); if (updates.chats != null && !updates.chats.isEmpty()) { getNotificationCenter().postNotificationName(NotificationCenter.chatDidCreated, updates.chats.get(0).id); + AlertsCreator.checkRestrictedInviteUsers(currentAccount, updates.chats.get(0), updates); } else { getNotificationCenter().postNotificationName(NotificationCenter.chatDidFailCreate); } }); }, ConnectionsManager.RequestFlagFailOnServerErrors); - } else if (forImport || type == ChatObject.CHAT_TYPE_CHANNEL || type == ChatObject.CHAT_TYPE_MEGAGROUP) { + } else if (forImport || type == ChatObject.CHAT_TYPE_CHANNEL || type == ChatObject.CHAT_TYPE_MEGAGROUP || type == ChatObject.CHAT_TYPE_FORUM) { TLRPC.TL_channels_createChannel req = new TLRPC.TL_channels_createChannel(); req.title = title; req.about = about != null ? about : ""; req.for_import = forImport; - if (forImport || type == ChatObject.CHAT_TYPE_MEGAGROUP) { + if (forImport || type == ChatObject.CHAT_TYPE_MEGAGROUP || type == ChatObject.CHAT_TYPE_FORUM) { req.megagroup = true; } else { req.broadcast = true; } + req.forum = type == ChatObject.CHAT_TYPE_FORUM; if (location != null) { req.geo_point = new TLRPC.TL_inputGeoPoint(); req.geo_point.lat = location.getLatitude(); @@ -10743,6 +11019,10 @@ public void addUsersToChannel(long chatId, ArrayList users, Bas return; } processUpdates((TLRPC.Updates) response, false); + AndroidUtilities.runOnUIThread(() -> { + AlertsCreator.checkRestrictedInviteUsers(currentAccount, getChat(chatId), (TLRPC.Updates) response); + }); + }); } @@ -10956,6 +11236,81 @@ public interface ErrorDelegate { public boolean run(TLRPC.TL_error error); } + public void addUsersToChat(TLRPC.Chat currentChat, BaseFragment baseFragment, ArrayList users, int fwdCount, Consumer onAddUser, Consumer onRestricted, Runnable onComplete) { + final int count = users.size(); + final int[] processed = new int[1]; + final ArrayList userRestrictedPrivacy = new ArrayList<>(); + processed[0] = 0; + final Runnable showUserRestrictedPrivacyAlert = () -> { + AndroidUtilities.runOnUIThread(() ->{ + BaseFragment lastFragment = LaunchActivity.getLastFragment(); + if (lastFragment != null && lastFragment.getParentActivity() != null) { +// if (ChatObject.canUserDoAdminAction(currentChat, ChatObject.ACTION_INVITE)) { + LimitReachedBottomSheet restricterdUsersBottomSheet = new LimitReachedBottomSheet(lastFragment, lastFragment.getParentActivity(), LimitReachedBottomSheet.TYPE_ADD_MEMBERS_RESTRICTED, currentAccount); + restricterdUsersBottomSheet.setRestrictedUsers(currentChat, userRestrictedPrivacy); + restricterdUsersBottomSheet.show(); +// } else { +// CharSequence title, description; +// if (userRestrictedPrivacy.size() == 1) { +// if (count > 1) { +// title = LocaleController.getString("InviteToGroupErrorTitleAUser", R.string.InviteToGroupErrorTitleAUser); +// } else { +// title = LocaleController.getString("InviteToGroupErrorTitleThisUser", R.string.InviteToGroupErrorTitleThisUser); +// } +// description = AndroidUtilities.replaceTags(LocaleController.formatString("InviteToGroupErrorMessageSingle", R.string.InviteToGroupErrorMessageSingle, UserObject.getFirstName(userRestrictedPrivacy.get(0)))); +// } else if (userRestrictedPrivacy.size() == 2) { +// title = LocaleController.getString("InviteToGroupErrorTitleSomeUsers", R.string.InviteToGroupErrorTitleSomeUsers); +// description = AndroidUtilities.replaceTags(LocaleController.formatString("InviteToGroupErrorMessageDouble", R.string.InviteToGroupErrorMessageDouble, UserObject.getFirstName(userRestrictedPrivacy.get(0)), UserObject.getFirstName(userRestrictedPrivacy.get(1)))); +// } else if (userRestrictedPrivacy.size() == count) { +// title = LocaleController.getString("InviteToGroupErrorTitleTheseUsers", R.string.InviteToGroupErrorTitleTheseUsers); +// description = LocaleController.getString("InviteToGroupErrorMessageMultipleAll", R.string.InviteToGroupErrorMessageMultipleAll); +// } else { +// title = LocaleController.getString("InviteToGroupErrorTitleSomeUsers", R.string.InviteToGroupErrorTitleSomeUsers); +// description = LocaleController.getString("InviteToGroupErrorMessageMultipleSome", R.string.InviteToGroupErrorMessageMultipleSome); +// } +// new AlertDialog.Builder(lastFragment.getParentActivity()) +// .setTitle(title) +// .setMessage(description) +// .setPositiveButton(LocaleController.getString("OK", R.string.OK), null) +// .show(); +// } + } + }, 200); + }; + long chatId = currentChat.id; + for (int a = 0; a < count; a++) { + final TLRPC.User user = users.get(a); + addUserToChat(chatId, user, fwdCount, null, baseFragment, false, () -> { + if (onAddUser != null) { + onAddUser.accept(user); + } + processed[0]++; + if (processed[0] >= count) { + if (userRestrictedPrivacy.size() > 0) { + showUserRestrictedPrivacyAlert.run(); + } + if (onComplete != null) { + onComplete.run(); + } + } + }, err -> { + processed[0]++; + boolean privacyRestricted; + if (privacyRestricted = err != null && "USER_PRIVACY_RESTRICTED".equals(err.text)) { + userRestrictedPrivacy.add(user); + } + if (processed[0] >= count && userRestrictedPrivacy.size() > 0) { + showUserRestrictedPrivacyAlert.run(); + } + if (onRestricted != null) { + onRestricted.accept(user); + } + return !privacyRestricted; + }); + putUser(user, false); + } + } + public void addUserToChat(long chatId, TLRPC.User user, int forwardCount, String botHash, BaseFragment fragment, boolean ignoreIfAlreadyExists, Runnable onFinishRunnable, ErrorDelegate onError) { if (user == null) { if (onError != null) { @@ -11054,6 +11409,9 @@ public void addUserToChat(long chatId, TLRPC.User user, int forwardCount, String if (!hasJoinMessage && inputUser instanceof TLRPC.TL_inputUserSelf) { generateJoinMessage(chatId, true); } + AndroidUtilities.runOnUIThread(() -> { + AlertsCreator.checkRestrictedInviteUsers(currentAccount, getChat(chatId), (TLRPC.Updates) response); + }); AndroidUtilities.runOnUIThread(() -> loadFullChat(chatId, 0, true), 1000); } if (isChannel && inputUser instanceof TLRPC.TL_inputUserSelf) { @@ -11235,12 +11593,12 @@ public void changeChatTitle(long chatId, String title) { }, ConnectionsManager.RequestFlagInvokeAfter); } - public void changeChatAvatar(long chatId, TLRPC.TL_inputChatPhoto oldPhoto, TLRPC.InputFile inputPhoto, TLRPC.InputFile inputVideo, double videoStartTimestamp, String videoPath, TLRPC.FileLocation smallSize, TLRPC.FileLocation bigSize, Runnable callback) { + public void changeChatAvatar(long chatId, TLRPC.TL_inputChatPhoto oldPhoto, TLRPC.InputFile inputPhoto, TLRPC.InputFile inputVideo, TLRPC.VideoSize emojiMarkup, double videoStartTimestamp, String videoPath, TLRPC.FileLocation smallSize, TLRPC.FileLocation bigSize, Runnable callback) { TLObject request; TLRPC.InputChatPhoto inputChatPhoto; if (oldPhoto != null) { inputChatPhoto = oldPhoto; - } else if (inputPhoto != null || inputVideo != null) { + } else if (inputPhoto != null || inputVideo != null || emojiMarkup != null) { TLRPC.TL_inputChatUploadedPhoto uploadedPhoto = new TLRPC.TL_inputChatUploadedPhoto(); if (inputPhoto != null) { uploadedPhoto.file = inputPhoto; @@ -11252,6 +11610,10 @@ public void changeChatAvatar(long chatId, TLRPC.TL_inputChatPhoto oldPhoto, TLRP uploadedPhoto.video_start_ts = videoStartTimestamp; uploadedPhoto.flags |= 4; } + if (emojiMarkup != null) { + uploadedPhoto.video_emoji_markup = emojiMarkup; + uploadedPhoto.flags |= 8; + } inputChatPhoto = uploadedPhoto; } else { inputChatPhoto = new TLRPC.TL_inputChatPhotoEmpty(); @@ -11312,6 +11674,7 @@ public void changeChatAvatar(long chatId, TLRPC.TL_inputChatPhoto oldPhoto, TLRP File src = new File(videoPath); src.renameTo(destFile); } + getMessagesStorage().addDialogPhoto(-chatId, photo); } } processUpdates(updates, false); @@ -11320,6 +11683,7 @@ public void changeChatAvatar(long chatId, TLRPC.TL_inputChatPhoto oldPhoto, TLRP callback.run(); } getNotificationCenter().postNotificationName(NotificationCenter.updateInterfaces, MessagesController.UPDATE_MASK_AVATAR); + getNotificationCenter().postNotificationName(NotificationCenter.reloadDialogPhotos); }); }, ConnectionsManager.RequestFlagInvokeAfter); } @@ -11350,13 +11714,8 @@ public void performLogout(int type) { getConnectionsManager().cleanup(false); AndroidUtilities.runOnUIThread(() -> { if (response instanceof TLRPC.TL_auth_loggedOut) { - TLRPC.TL_auth_loggedOut res = (TLRPC.TL_auth_loggedOut) response; if (((TLRPC.TL_auth_loggedOut) response).future_auth_token != null) { - SharedPreferences preferences = ApplicationLoader.applicationContext.getSharedPreferences("saved_tokens", Context.MODE_PRIVATE); - int count = preferences.getInt("count", 0); - SerializedData data = new SerializedData(response.getObjectSize()); - res.serializeToStream(data); - preferences.edit().putString("log_out_token_" + count, Utilities.bytesToHex(data.toByteArray())).putInt("count", count + 1).apply(); + AuthTokensHelper.addLogOutToken((TLRPC.TL_auth_loggedOut) response); } } }); @@ -11404,44 +11763,6 @@ public void performLogout(int type) { SharedConfig.saveAccounts(); } - public static ArrayList getSavedLogOutTokens() { - SharedPreferences preferences = ApplicationLoader.applicationContext.getSharedPreferences("saved_tokens", Context.MODE_PRIVATE); - int count = preferences.getInt("count", 0); - if (count == 0) { - return null; - } - - ArrayList tokens = new ArrayList<>(); - for (int i = 0; i < count; i++) { - String value = preferences.getString("log_out_token_" + i, ""); - SerializedData serializedData = new SerializedData(Utilities.hexToBytes(value)); - TLRPC.TL_auth_loggedOut token = TLRPC.TL_auth_loggedOut.TLdeserialize(serializedData, serializedData.readInt32(true), true); - if (token != null) { - tokens.add(token); - } - } - return tokens; - } - - public static void saveLogOutTokens(ArrayList tokens) { - SharedPreferences preferences = ApplicationLoader.applicationContext.getSharedPreferences("saved_tokens", Context.MODE_PRIVATE); - ArrayList activeTokens = new ArrayList<>(); - preferences.edit().clear().apply(); - int date = (int) (System.currentTimeMillis() / 1000L); - for (int i = 0; i < Math.min(20, tokens.size()); i++) { - activeTokens.add(tokens.get(i)); - } - if (activeTokens.size() > 0) { - SharedPreferences.Editor editor = preferences.edit(); - editor.putInt("count", activeTokens.size()); - for (int i = 0; i < activeTokens.size(); i++) { - SerializedData data = new SerializedData(activeTokens.get(i).getObjectSize()); - activeTokens.get(i).serializeToStream(data); - editor.putString("log_out_token_" + i, Utilities.bytesToHex(data.toByteArray())); - } - editor.apply(); - } - } private boolean gettingAppChangelog; @@ -11757,7 +12078,7 @@ protected void loadUnknownChannel(final TLRPC.Chat channel, long taskId) { dialogs.messages.addAll(res.messages); dialogs.users.addAll(res.users); dialogs.chats.addAll(res.chats); - processLoadedDialogs(dialogs, null, dialog.folder_id, 0, 1, DIALOGS_LOAD_TYPE_CHANNEL, false, false, false); + processLoadedDialogs(dialogs, null, null, dialog.folder_id, 0, 1, DIALOGS_LOAD_TYPE_CHANNEL, false, false, false); } } if (newTaskId != 0) { @@ -12740,6 +13061,7 @@ public void loadPinnedDialogs(final int folderId, long newDialogId, ArrayList updates, ArrayList> array; if (editingMessages == null) { @@ -15341,7 +15664,7 @@ public boolean processUpdateArray(ArrayList updates, ArrayList 0) { + if ((update.flags & 1) != 0) { getTopicsController().applyPinnedOrder(update.channel_id, update.order); } else { getTopicsController().reloadTopics(update.channel_id, false); @@ -16275,6 +16598,8 @@ public SponsoredMessagesInfo getSponsoredMessages(long dialogId) { messageObject.sponsoredChatInviteHash = sponsoredMessage.chat_invite_hash; messageObject.sponsoredRecommended = sponsoredMessage.recommended; messageObject.sponsoredShowPeerPhoto = sponsoredMessage.show_peer_photo; + messageObject.sponsoredInfo = sponsoredMessage.sponsor_info; + messageObject.sponsoredAdditionalInfo = sponsoredMessage.additional_info; result.add(messageObject); } } @@ -16584,6 +16909,7 @@ public boolean updateInterfaceWithMessages(long dialogId, ArrayList chatsDict) { + if (chatsDict == null && ApplicationLoader.mainInterfacePaused) { + return; + } dialogsServerOnly.clear(); dialogsCanAddUsers.clear(); dialogsMyGroups.clear(); @@ -16741,7 +17075,11 @@ public void sortDialogs(LongSparseArray chatsDict) { if (sortingDialogFilter == null) { continue; } - Collections.sort(allDialogs, dialogDateComparator); + try { + Collections.sort(allDialogs, dialogDateComparator); + } catch (Exception e) { + FileLog.e(e); + } ArrayList dialogsByFilter = sortingDialogFilter.dialogs; for (int a = 0, N = allDialogs.size(); a < N; a++) { @@ -17300,6 +17638,73 @@ public void setChatReactions(long chatId, int type, List reactions) { }); } + public boolean matchesAdminRights(TLRPC.Chat chat, TLRPC.User user, TLRPC.TL_chatAdminRights rights) { + if (rights == null) { + return true; + } + TLRPC.TL_chatAdminRights userRights = getChatAdminRightsCached(chat, user); + return ( + (!rights.change_info || userRights != null && userRights.change_info) && + (!rights.post_messages || userRights != null && userRights.post_messages) && + (!rights.edit_messages || userRights != null && userRights.edit_messages) && + (!rights.delete_messages || userRights != null && userRights.delete_messages) && + (!rights.ban_users || userRights != null && userRights.ban_users) && + (!rights.invite_users || userRights != null && userRights.invite_users) && + (!rights.pin_messages || userRights != null && userRights.pin_messages) && + (!rights.add_admins || userRights != null && userRights.add_admins) && + (!rights.anonymous || userRights != null && userRights.anonymous) && + (!rights.manage_call || userRights != null && userRights.manage_call) && + (!rights.other || userRights != null && userRights.other) && + (!rights.manage_topics || userRights != null && userRights.manage_topics) + ); + } + + public TLRPC.TL_chatAdminRights getChatAdminRightsCached(TLRPC.Chat chat, TLRPC.User user) { + if (chat == null || user == null) { + return null; + } + if (UserObject.isUserSelf(user)) { + return chat.admin_rights; + } + final TLRPC.ChatFull chatFull = getChatFull(chat.id); + if (chatFull == null || chatFull.participants == null || chatFull.participants.participants == null) { + return null; + } + final ArrayList participants = chatFull.participants.participants; + + for (int i = 0; i < participants.size(); ++i) { + TLRPC.ChatParticipant participant = participants.get(i); + if (participant != null && participant.user_id == user.id) { + if (participant instanceof TLRPC.TL_chatChannelParticipant && ((TLRPC.TL_chatChannelParticipant) participant).channelParticipant != null) { + return ((TLRPC.TL_chatChannelParticipant) participant).channelParticipant.admin_rights; + } + return null; + } + } + return null; + } + + public boolean isInChatCached(TLRPC.Chat chat, TLRPC.User user) { + if (chat == null || user == null) { + return false; + } + if (UserObject.isUserSelf(user)) { + return !ChatObject.isNotInChat(chat); + } + final TLRPC.ChatFull chatFull = getChatFull(chat.id); + if (chatFull == null || chatFull.participants == null || chatFull.participants.participants == null) { + return false; + } + final ArrayList participants = chatFull.participants.participants; + for (int i = 0; i < participants.size(); ++i) { + TLRPC.ChatParticipant participant = participants.get(i); + if (participant != null && participant.user_id == user.id) { + return true; + } + } + return false; + } + public void checkIsInChat(boolean tryCacheFirst, TLRPC.Chat chat, TLRPC.User user, IsInChatCheckedCallback callback) { if (chat == null || user == null) { if (callback != null) { diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/MessagesStorage.java b/TMessagesProj/src/main/java/org/telegram/messenger/MessagesStorage.java index bec498a0e4..e6994d2799 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/MessagesStorage.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/MessagesStorage.java @@ -15,6 +15,7 @@ import android.text.Spanned; import android.text.TextUtils; import android.text.style.ForegroundColorSpan; +import android.util.Log; import android.util.Pair; import android.util.SparseArray; import android.util.SparseIntArray; @@ -35,6 +36,7 @@ import org.telegram.tgnet.TLRPC; import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.Adapters.DialogsSearchAdapter; +import org.telegram.ui.DialogsActivity; import org.telegram.ui.EditWidgetActivity; import java.io.File; @@ -94,7 +96,7 @@ public class MessagesStorage extends BaseController { private static SparseArray Instance = new SparseArray(); private static final Object lockObject = new Object(); - private final static int LAST_DB_VERSION = 111; + public final static int LAST_DB_VERSION = 115; private boolean databaseMigrationInProgress; public boolean showClearDatabaseAlert; private LongSparseIntArray dialogIsForum = new LongSparseIntArray(); @@ -261,6 +263,20 @@ public long getDatabaseSize() { } public void openDatabase(int openTries) { + if (!NativeLoader.loaded()) { + int tryCount = 0; + while (!NativeLoader.loaded()) { + try { + Thread.sleep(1000); + } catch (InterruptedException e) { + e.printStackTrace(); + } + tryCount++; + if (tryCount > 5) { + break; + } + } + } File filesDir = ApplicationLoader.getFilesDirFixed(); if (currentAccount != 0) { filesDir = new File(filesDir, "account" + currentAccount + "/"); @@ -286,170 +302,7 @@ public void openDatabase(int openTries) { if (BuildVars.LOGS_ENABLED) { FileLog.d("create new database"); } - database.executeFast("CREATE TABLE messages_holes(uid INTEGER, start INTEGER, end INTEGER, PRIMARY KEY(uid, start));").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS uid_end_messages_holes ON messages_holes(uid, end);").stepThis().dispose(); - - database.executeFast("CREATE TABLE media_holes_v2(uid INTEGER, type INTEGER, start INTEGER, end INTEGER, PRIMARY KEY(uid, type, start));").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS uid_end_media_holes_v2 ON media_holes_v2(uid, type, end);").stepThis().dispose(); - - database.executeFast("CREATE TABLE scheduled_messages_v2(mid INTEGER, uid INTEGER, send_state INTEGER, date INTEGER, data BLOB, ttl INTEGER, replydata BLOB, reply_to_message_id INTEGER, PRIMARY KEY(mid, uid))").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS send_state_idx_scheduled_messages_v2 ON scheduled_messages_v2(mid, send_state, date);").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS uid_date_idx_scheduled_messages_v2 ON scheduled_messages_v2(uid, date);").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS reply_to_idx_scheduled_messages_v2 ON scheduled_messages_v2(mid, reply_to_message_id);").stepThis().dispose(); - - database.executeFast("CREATE TABLE messages_v2(mid INTEGER, uid INTEGER, read_state INTEGER, send_state INTEGER, date INTEGER, data BLOB, out INTEGER, ttl INTEGER, media INTEGER, replydata BLOB, imp INTEGER, mention INTEGER, forwards INTEGER, replies_data BLOB, thread_reply_id INTEGER, is_channel INTEGER, reply_to_message_id INTEGER, custom_params BLOB, group_id INTEGER, PRIMARY KEY(mid, uid))").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS uid_mid_read_out_idx_messages_v2 ON messages_v2(uid, mid, read_state, out);").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS uid_date_mid_idx_messages_v2 ON messages_v2(uid, date, mid);").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS mid_out_idx_messages_v2 ON messages_v2(mid, out);").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS task_idx_messages_v2 ON messages_v2(uid, out, read_state, ttl, date, send_state);").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS send_state_idx_messages_v2 ON messages_v2(mid, send_state, date);").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS uid_mention_idx_messages_v2 ON messages_v2(uid, mention, read_state);").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS is_channel_idx_messages_v2 ON messages_v2(mid, is_channel);").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS reply_to_idx_messages_v2 ON messages_v2(mid, reply_to_message_id);").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS uid_mid_groupid_messages_v2 ON messages_v2(uid, mid, group_id);").stepThis().dispose(); - - database.executeFast("CREATE TABLE download_queue(uid INTEGER, type INTEGER, date INTEGER, data BLOB, parent TEXT, PRIMARY KEY (uid, type));").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS type_date_idx_download_queue ON download_queue(type, date);").stepThis().dispose(); - - database.executeFast("CREATE TABLE user_contacts_v7(key TEXT PRIMARY KEY, uid INTEGER, fname TEXT, sname TEXT, imported INTEGER)").stepThis().dispose(); - database.executeFast("CREATE TABLE user_phones_v7(key TEXT, phone TEXT, sphone TEXT, deleted INTEGER, PRIMARY KEY (key, phone))").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS sphone_deleted_idx_user_phones ON user_phones_v7(sphone, deleted);").stepThis().dispose(); - - database.executeFast("CREATE TABLE dialogs(did INTEGER PRIMARY KEY, date INTEGER, unread_count INTEGER, last_mid INTEGER, inbox_max INTEGER, outbox_max INTEGER, last_mid_i INTEGER, unread_count_i INTEGER, pts INTEGER, date_i INTEGER, pinned INTEGER, flags INTEGER, folder_id INTEGER, data BLOB, unread_reactions INTEGER, last_mid_group INTEGER, ttl_period INTEGER)").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS date_idx_dialogs ON dialogs(date);").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS last_mid_idx_dialogs ON dialogs(last_mid);").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS unread_count_idx_dialogs ON dialogs(unread_count);").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS last_mid_i_idx_dialogs ON dialogs(last_mid_i);").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS unread_count_i_idx_dialogs ON dialogs(unread_count_i);").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS folder_id_idx_dialogs ON dialogs(folder_id);").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS flags_idx_dialogs ON dialogs(flags);").stepThis().dispose(); - - database.executeFast("CREATE TABLE dialog_filter_neko(id INTEGER PRIMARY KEY, ord INTEGER, unread_count INTEGER, flags INTEGER, title TEXT, emoticon TEXT)").stepThis().dispose(); - database.executeFast("CREATE TABLE dialog_filter_ep(id INTEGER, peer INTEGER, PRIMARY KEY (id, peer))").stepThis().dispose(); - database.executeFast("CREATE TABLE dialog_filter_pin_v2(id INTEGER, peer INTEGER, pin INTEGER, PRIMARY KEY (id, peer))").stepThis().dispose(); - - database.executeFast("CREATE TABLE randoms_v2(random_id INTEGER, mid INTEGER, uid INTEGER, PRIMARY KEY (random_id, mid, uid))").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS mid_idx_randoms_v2 ON randoms_v2(mid, uid);").stepThis().dispose(); - - database.executeFast("CREATE TABLE enc_tasks_v4(mid INTEGER, uid INTEGER, date INTEGER, media INTEGER, PRIMARY KEY(mid, uid, media))").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS date_idx_enc_tasks_v4 ON enc_tasks_v4(date);").stepThis().dispose(); - - database.executeFast("CREATE TABLE messages_seq(mid INTEGER PRIMARY KEY, seq_in INTEGER, seq_out INTEGER);").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS seq_idx_messages_seq ON messages_seq(seq_in, seq_out);").stepThis().dispose(); - - database.executeFast("CREATE TABLE params(id INTEGER PRIMARY KEY, seq INTEGER, pts INTEGER, date INTEGER, qts INTEGER, lsv INTEGER, sg INTEGER, pbytes BLOB)").stepThis().dispose(); - database.executeFast("INSERT INTO params VALUES(1, 0, 0, 0, 0, 0, 0, NULL)").stepThis().dispose(); - - database.executeFast("CREATE TABLE media_v4(mid INTEGER, uid INTEGER, date INTEGER, type INTEGER, data BLOB, PRIMARY KEY(mid, uid, type))").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS uid_mid_type_date_idx_media_v4 ON media_v4(uid, mid, type, date);").stepThis().dispose(); - - database.executeFast("CREATE TABLE bot_keyboard(uid INTEGER PRIMARY KEY, mid INTEGER, info BLOB)").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS bot_keyboard_idx_mid_v2 ON bot_keyboard(mid, uid);").stepThis().dispose(); - - database.executeFast("CREATE TABLE chat_settings_v2(uid INTEGER PRIMARY KEY, info BLOB, pinned INTEGER, online INTEGER, inviter INTEGER, links INTEGER)").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS chat_settings_pinned_idx ON chat_settings_v2(uid, pinned) WHERE pinned != 0;").stepThis().dispose(); - - database.executeFast("CREATE TABLE user_settings(uid INTEGER PRIMARY KEY, info BLOB, pinned INTEGER)").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS user_settings_pinned_idx ON user_settings(uid, pinned) WHERE pinned != 0;").stepThis().dispose(); - - database.executeFast("CREATE TABLE chat_pinned_v2(uid INTEGER, mid INTEGER, data BLOB, PRIMARY KEY (uid, mid));").stepThis().dispose(); - database.executeFast("CREATE TABLE chat_pinned_count(uid INTEGER PRIMARY KEY, count INTEGER, end INTEGER);").stepThis().dispose(); - - database.executeFast("CREATE TABLE chat_hints(did INTEGER, type INTEGER, rating REAL, date INTEGER, PRIMARY KEY(did, type))").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS chat_hints_rating_idx ON chat_hints(rating);").stepThis().dispose(); - - database.executeFast("CREATE TABLE botcache(id TEXT PRIMARY KEY, date INTEGER, data BLOB)").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS botcache_date_idx ON botcache(date);").stepThis().dispose(); - - database.executeFast("CREATE TABLE users_data(uid INTEGER PRIMARY KEY, about TEXT)").stepThis().dispose(); - database.executeFast("CREATE TABLE users(uid INTEGER PRIMARY KEY, name TEXT, status INTEGER, data BLOB)").stepThis().dispose(); - database.executeFast("CREATE TABLE chats(uid INTEGER PRIMARY KEY, name TEXT, data BLOB)").stepThis().dispose(); - database.executeFast("CREATE TABLE enc_chats(uid INTEGER PRIMARY KEY, user INTEGER, name TEXT, data BLOB, g BLOB, authkey BLOB, ttl INTEGER, layer INTEGER, seq_in INTEGER, seq_out INTEGER, use_count INTEGER, exchange_id INTEGER, key_date INTEGER, fprint INTEGER, fauthkey BLOB, khash BLOB, in_seq_no INTEGER, admin_id INTEGER, mtproto_seq INTEGER)").stepThis().dispose(); - database.executeFast("CREATE TABLE channel_users_v2(did INTEGER, uid INTEGER, date INTEGER, data BLOB, PRIMARY KEY(did, uid))").stepThis().dispose(); - database.executeFast("CREATE TABLE channel_admins_v3(did INTEGER, uid INTEGER, data BLOB, PRIMARY KEY(did, uid))").stepThis().dispose(); - database.executeFast("CREATE TABLE contacts(uid INTEGER PRIMARY KEY, mutual INTEGER)").stepThis().dispose(); - database.executeFast("CREATE TABLE user_photos(uid INTEGER, id INTEGER, data BLOB, PRIMARY KEY (uid, id))").stepThis().dispose(); - database.executeFast("CREATE TABLE dialog_settings(did INTEGER PRIMARY KEY, flags INTEGER);").stepThis().dispose(); - database.executeFast("CREATE TABLE web_recent_v3(id TEXT, type INTEGER, image_url TEXT, thumb_url TEXT, local_url TEXT, width INTEGER, height INTEGER, size INTEGER, date INTEGER, document BLOB, PRIMARY KEY (id, type));").stepThis().dispose(); - database.executeFast("CREATE TABLE stickers_v2(id INTEGER PRIMARY KEY, data BLOB, date INTEGER, hash INTEGER);").stepThis().dispose(); - database.executeFast("CREATE TABLE stickers_featured(id INTEGER PRIMARY KEY, data BLOB, unread BLOB, date INTEGER, hash INTEGER, premium INTEGER, emoji INTEGER);").stepThis().dispose(); - database.executeFast("CREATE TABLE stickers_dice(emoji TEXT PRIMARY KEY, data BLOB, date INTEGER);").stepThis().dispose(); - database.executeFast("CREATE TABLE stickersets(id INTEGER PRIMATE KEY, data BLOB, hash INTEGER);").stepThis().dispose(); - database.executeFast("CREATE TABLE hashtag_recent_v2(id TEXT PRIMARY KEY, date INTEGER);").stepThis().dispose(); - database.executeFast("CREATE TABLE webpage_pending_v2(id INTEGER, mid INTEGER, uid INTEGER, PRIMARY KEY (id, mid, uid));").stepThis().dispose(); - database.executeFast("CREATE TABLE sent_files_v2(uid TEXT, type INTEGER, data BLOB, parent TEXT, PRIMARY KEY (uid, type))").stepThis().dispose(); - database.executeFast("CREATE TABLE search_recent(did INTEGER PRIMARY KEY, date INTEGER);").stepThis().dispose(); - database.executeFast("CREATE TABLE media_counts_v2(uid INTEGER, type INTEGER, count INTEGER, old INTEGER, PRIMARY KEY(uid, type))").stepThis().dispose(); - database.executeFast("CREATE TABLE keyvalue(id TEXT PRIMARY KEY, value TEXT)").stepThis().dispose(); - database.executeFast("CREATE TABLE bot_info_v2(uid INTEGER, dialogId INTEGER, info BLOB, PRIMARY KEY(uid, dialogId))").stepThis().dispose(); - database.executeFast("CREATE TABLE pending_tasks(id INTEGER PRIMARY KEY, data BLOB);").stepThis().dispose(); - database.executeFast("CREATE TABLE requested_holes(uid INTEGER, seq_out_start INTEGER, seq_out_end INTEGER, PRIMARY KEY (uid, seq_out_start, seq_out_end));").stepThis().dispose(); - database.executeFast("CREATE TABLE sharing_locations(uid INTEGER PRIMARY KEY, mid INTEGER, date INTEGER, period INTEGER, message BLOB, proximity INTEGER);").stepThis().dispose(); - - database.executeFast("CREATE TABLE shortcut_widget(id INTEGER, did INTEGER, ord INTEGER, PRIMARY KEY (id, did));").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS shortcut_widget_did ON shortcut_widget(did);").stepThis().dispose(); - - database.executeFast("CREATE TABLE emoji_keywords_v2(lang TEXT, keyword TEXT, emoji TEXT, PRIMARY KEY(lang, keyword, emoji));").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS emoji_keywords_v2_keyword ON emoji_keywords_v2(keyword);").stepThis().dispose(); - database.executeFast("CREATE TABLE emoji_keywords_info_v2(lang TEXT PRIMARY KEY, alias TEXT, version INTEGER, date INTEGER);").stepThis().dispose(); - - database.executeFast("CREATE TABLE wallpapers2(uid INTEGER PRIMARY KEY, data BLOB, num INTEGER)").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS wallpapers_num ON wallpapers2(num);").stepThis().dispose(); - - database.executeFast("CREATE TABLE unread_push_messages(uid INTEGER, mid INTEGER, random INTEGER, date INTEGER, data BLOB, fm TEXT, name TEXT, uname TEXT, flags INTEGER, PRIMARY KEY(uid, mid))").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS unread_push_messages_idx_date ON unread_push_messages(date);").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS unread_push_messages_idx_random ON unread_push_messages(random);").stepThis().dispose(); - - database.executeFast("CREATE TABLE polls_v2(mid INTEGER, uid INTEGER, id INTEGER, PRIMARY KEY (mid, uid));").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS polls_id_v2 ON polls_v2(id);").stepThis().dispose(); - - database.executeFast("CREATE TABLE reactions(data BLOB, hash INTEGER, date INTEGER);").stepThis().dispose(); - database.executeFast("CREATE TABLE reaction_mentions(message_id INTEGER, state INTEGER, dialog_id INTEGER, PRIMARY KEY(message_id, dialog_id))").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS reaction_mentions_did ON reaction_mentions(dialog_id);").stepThis().dispose(); - - database.executeFast("CREATE TABLE downloading_documents(data BLOB, hash INTEGER, id INTEGER, state INTEGER, date INTEGER, PRIMARY KEY(hash, id));").stepThis().dispose(); - database.executeFast("CREATE TABLE animated_emoji(document_id INTEGER PRIMARY KEY, data BLOB);").stepThis().dispose(); - - database.executeFast("CREATE TABLE attach_menu_bots(data BLOB, hash INTEGER, date INTEGER);").stepThis().dispose(); - - database.executeFast("CREATE TABLE premium_promo(data BLOB, date INTEGER);").stepThis().dispose(); - database.executeFast("CREATE TABLE emoji_statuses(data BLOB, type INTEGER);").stepThis().dispose(); - - database.executeFast("CREATE TABLE messages_holes_topics(uid INTEGER, topic_id INTEGER, start INTEGER, end INTEGER, PRIMARY KEY(uid, topic_id, start));").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS uid_end_messages_holes ON messages_holes_topics(uid, topic_id, end);").stepThis().dispose(); - - database.executeFast("CREATE TABLE messages_topics(mid INTEGER, uid INTEGER, topic_id INTEGER, read_state INTEGER, send_state INTEGER, date INTEGER, data BLOB, out INTEGER, ttl INTEGER, media INTEGER, replydata BLOB, imp INTEGER, mention INTEGER, forwards INTEGER, replies_data BLOB, thread_reply_id INTEGER, is_channel INTEGER, reply_to_message_id INTEGER, custom_params BLOB, PRIMARY KEY(mid, topic_id, uid))").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS uid_date_mid_idx_messages_topics ON messages_topics(uid, date, mid);").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS mid_out_idx_messages_topics ON messages_topics(mid, out);").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS task_idx_messages_topics ON messages_topics(uid, out, read_state, ttl, date, send_state);").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS send_state_idx_messages_topics ON messages_topics(mid, send_state, date);").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS is_channel_idx_messages_topics ON messages_topics(mid, is_channel);").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS reply_to_idx_messages_topics ON messages_topics(mid, reply_to_message_id);").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS mid_uid_messages_topics ON messages_topics(mid, uid);").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS uid_mid_read_out_idx_messages_topics ON messages_topics(uid, topic_id, mid, read_state, out);").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS uid_mention_idx_messages_topics ON messages_topics(uid, topic_id, mention, read_state);").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS uid_topic_id_messages_topics ON messages_topics(uid, topic_id);").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS uid_topic_id_date_mid_messages_topics ON messages_topics(uid, topic_id, date, mid);").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS uid_topic_id_mid_messages_topics ON messages_topics(uid, topic_id, mid);").stepThis().dispose(); - - - database.executeFast("CREATE TABLE media_topics(mid INTEGER, uid INTEGER, topic_id INTEGER, date INTEGER, type INTEGER, data BLOB, PRIMARY KEY(mid, uid, topic_id, type))").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS uid_mid_type_date_idx_media_topics ON media_topics(uid, topic_id, mid, type, date);").stepThis().dispose(); - - database.executeFast("CREATE TABLE media_holes_topics(uid INTEGER, topic_id INTEGER, type INTEGER, start INTEGER, end INTEGER, PRIMARY KEY(uid, topic_id, type, start));").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS uid_end_media_holes_topics ON media_holes_topics(uid, topic_id, type, end);").stepThis().dispose(); - - database.executeFast("CREATE TABLE topics(did INTEGER, topic_id INTEGER, data BLOB, top_message INTEGER, topic_message BLOB, unread_count INTEGER, max_read_id INTEGER, unread_mentions INTEGER, unread_reactions INTEGER, read_outbox INTEGER, pinned INTEGER, total_messages_count INTEGER, hidden INTEGER, PRIMARY KEY(did, topic_id));").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS did_top_message_topics ON topics(did, top_message);").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS did_topics ON topics(did);").stepThis().dispose(); - - database.executeFast("CREATE TABLE media_counts_topics(uid INTEGER, topic_id INTEGER, type INTEGER, count INTEGER, old INTEGER, PRIMARY KEY(uid, topic_id, type))").stepThis().dispose(); - - database.executeFast("CREATE TABLE reaction_mentions_topics(message_id INTEGER, state INTEGER, dialog_id INTEGER, topic_id INTEGER, PRIMARY KEY(message_id, dialog_id, topic_id))").stepThis().dispose(); - database.executeFast("CREATE INDEX IF NOT EXISTS reaction_mentions_topics_did ON reaction_mentions_topics(dialog_id, topic_id);").stepThis().dispose(); - - //version - database.executeFast("PRAGMA user_version = " + LAST_DB_VERSION).stepThis().dispose(); + createTables(database); } else { int version = database.executeInt("PRAGMA user_version"); if (BuildVars.LOGS_ENABLED) { @@ -489,11 +342,6 @@ public void openDatabase(int openTries) { FileLog.e(e2); } } - try { - database.executeFast("CREATE TABLE IF NOT EXISTS dialog_filter_neko(id INTEGER PRIMARY KEY, ord INTEGER, unread_count INTEGER, flags INTEGER, title TEXT, emoticon TEXT)").stepThis().dispose(); - } catch (Exception e) { - FileLog.e(e); - } if (version < LAST_DB_VERSION) { try { updateDbToLastVersion(version); @@ -508,17 +356,10 @@ public void openDatabase(int openTries) { } } catch (Exception e) { FileLog.e(e); - if (BuildVars.DEBUG_PRIVATE_VERSION) { - throw new RuntimeException(e); - } if (openTries < 3 && e.getMessage() != null && e.getMessage().contains("malformed")) { if (openTries == 2) { cleanupInternal(true); - for (int a = 0; a < 2; a++) { - getUserConfig().setDialogsLoadOffset(a, 0, 0, 0, 0, 0, 0); - getUserConfig().setTotalDialogsCount(a, 0); - } - getUserConfig().saveConfig(false); + clearLoadingDialogsOffsets(); } else { cleanupInternal(false); } @@ -548,6 +389,281 @@ public void openDatabase(int openTries) { }); } + private void clearLoadingDialogsOffsets() { + for (int a = 0; a < 2; a++) { + getUserConfig().setDialogsLoadOffset(a, 0, 0, 0, 0, 0, 0); + getUserConfig().setTotalDialogsCount(a, 0); + } + getUserConfig().saveConfig(false); + } + + private boolean recoverDatabase() { + database.close(); + boolean restored = DatabaseMigrationHelper.recoverDatabase(cacheFile, walCacheFile, shmCacheFile, currentAccount); + if (restored) { + try { + database = new SQLiteDatabase(cacheFile.getPath()); + database.executeFast("PRAGMA secure_delete = ON").stepThis().dispose(); + database.executeFast("PRAGMA temp_store = MEMORY").stepThis().dispose(); + database.executeFast("PRAGMA journal_mode = WAL").stepThis().dispose(); + database.executeFast("PRAGMA journal_size_limit = 10485760").stepThis().dispose(); + } catch (SQLiteException e) { + FileLog.e(new Exception(e)); + restored = false; + } + } + if (!restored) { + openDatabase(1); + } + reset(); + return restored; + } + + public final static String[] DATABASE_TABLES = new String[] { + "messages_holes", + "media_holes_v2", + "scheduled_messages_v2", + "messages_v2", + "download_queue", + "user_contacts_v7", + "user_phones_v7", + "dialogs", + "dialog_filter", + "dialog_filter_ep", + "dialog_filter_pin_v2", + "randoms_v2", + "enc_tasks_v4", + "messages_seq", + "params", + "media_v4", + "bot_keyboard", + "bot_keyboard_topics", + "chat_settings_v2", + "user_settings", + "chat_pinned_v2", + "chat_pinned_count", + "chat_hints", + "botcache", + "users_data", + "users", + "chats", + "enc_chats", + "channel_users_v2", + "channel_admins_v3", + "contacts", + "user_photos", + "dialog_settings", + "web_recent_v3", + "stickers_v2", + "stickers_featured", + "stickers_dice", + "stickersets", + "hashtag_recent_v2", + "webpage_pending_v2", + "sent_files_v2", + "search_recent", + "media_counts_v2", + "keyvalue", + "bot_info_v2", + "pending_tasks", + "requested_holes", + "sharing_locations", + "shortcut_widget", + "emoji_keywords_v2", + "emoji_keywords_info_v2", + "wallpapers2", + "unread_push_messages", + "polls_v2", + "reactions", + "reaction_mentions", + "downloading_documents", + "animated_emoji", + "attach_menu_bots", + "premium_promo", + "emoji_statuses", + "messages_holes_topics", + "messages_topics", + "media_topics", + "media_holes_topics", + "topics", + "media_counts_topics", + "reaction_mentions_topics", + "emoji_groups" + }; + + public static void createTables(SQLiteDatabase database) throws SQLiteException { + database.executeFast("CREATE TABLE messages_holes(uid INTEGER, start INTEGER, end INTEGER, PRIMARY KEY(uid, start));").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS uid_end_messages_holes ON messages_holes(uid, end);").stepThis().dispose(); + + database.executeFast("CREATE TABLE media_holes_v2(uid INTEGER, type INTEGER, start INTEGER, end INTEGER, PRIMARY KEY(uid, type, start));").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS uid_end_media_holes_v2 ON media_holes_v2(uid, type, end);").stepThis().dispose(); + + database.executeFast("CREATE TABLE scheduled_messages_v2(mid INTEGER, uid INTEGER, send_state INTEGER, date INTEGER, data BLOB, ttl INTEGER, replydata BLOB, reply_to_message_id INTEGER, PRIMARY KEY(mid, uid))").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS send_state_idx_scheduled_messages_v2 ON scheduled_messages_v2(mid, send_state, date);").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS uid_date_idx_scheduled_messages_v2 ON scheduled_messages_v2(uid, date);").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS reply_to_idx_scheduled_messages_v2 ON scheduled_messages_v2(mid, reply_to_message_id);").stepThis().dispose(); + + database.executeFast("CREATE TABLE messages_v2(mid INTEGER, uid INTEGER, read_state INTEGER, send_state INTEGER, date INTEGER, data BLOB, out INTEGER, ttl INTEGER, media INTEGER, replydata BLOB, imp INTEGER, mention INTEGER, forwards INTEGER, replies_data BLOB, thread_reply_id INTEGER, is_channel INTEGER, reply_to_message_id INTEGER, custom_params BLOB, group_id INTEGER, PRIMARY KEY(mid, uid))").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS uid_mid_read_out_idx_messages_v2 ON messages_v2(uid, mid, read_state, out);").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS uid_date_mid_idx_messages_v2 ON messages_v2(uid, date, mid);").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS mid_out_idx_messages_v2 ON messages_v2(mid, out);").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS task_idx_messages_v2 ON messages_v2(uid, out, read_state, ttl, date, send_state);").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS send_state_idx_messages_v2 ON messages_v2(mid, send_state, date);").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS uid_mention_idx_messages_v2 ON messages_v2(uid, mention, read_state);").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS is_channel_idx_messages_v2 ON messages_v2(mid, is_channel);").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS reply_to_idx_messages_v2 ON messages_v2(mid, reply_to_message_id);").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS uid_mid_groupid_messages_v2 ON messages_v2(uid, mid, group_id);").stepThis().dispose(); + + database.executeFast("CREATE TABLE download_queue(uid INTEGER, type INTEGER, date INTEGER, data BLOB, parent TEXT, PRIMARY KEY (uid, type));").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS type_date_idx_download_queue ON download_queue(type, date);").stepThis().dispose(); + + database.executeFast("CREATE TABLE user_contacts_v7(key TEXT PRIMARY KEY, uid INTEGER, fname TEXT, sname TEXT, imported INTEGER)").stepThis().dispose(); + database.executeFast("CREATE TABLE user_phones_v7(key TEXT, phone TEXT, sphone TEXT, deleted INTEGER, PRIMARY KEY (key, phone))").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS sphone_deleted_idx_user_phones ON user_phones_v7(sphone, deleted);").stepThis().dispose(); + + database.executeFast("CREATE TABLE dialogs(did INTEGER PRIMARY KEY, date INTEGER, unread_count INTEGER, last_mid INTEGER, inbox_max INTEGER, outbox_max INTEGER, last_mid_i INTEGER, unread_count_i INTEGER, pts INTEGER, date_i INTEGER, pinned INTEGER, flags INTEGER, folder_id INTEGER, data BLOB, unread_reactions INTEGER, last_mid_group INTEGER, ttl_period INTEGER)").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS date_idx_dialogs ON dialogs(date);").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS last_mid_idx_dialogs ON dialogs(last_mid);").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS unread_count_idx_dialogs ON dialogs(unread_count);").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS last_mid_i_idx_dialogs ON dialogs(last_mid_i);").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS unread_count_i_idx_dialogs ON dialogs(unread_count_i);").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS folder_id_idx_dialogs ON dialogs(folder_id);").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS flags_idx_dialogs ON dialogs(flags);").stepThis().dispose(); + + database.executeFast("CREATE TABLE dialog_filter_neko(id INTEGER PRIMARY KEY, ord INTEGER, unread_count INTEGER, flags INTEGER, title TEXT, emoticon TEXT)").stepThis().dispose(); + database.executeFast("CREATE TABLE dialog_filter_ep(id INTEGER, peer INTEGER, PRIMARY KEY (id, peer))").stepThis().dispose(); + database.executeFast("CREATE TABLE dialog_filter_pin_v2(id INTEGER, peer INTEGER, pin INTEGER, PRIMARY KEY (id, peer))").stepThis().dispose(); + + database.executeFast("CREATE TABLE randoms_v2(random_id INTEGER, mid INTEGER, uid INTEGER, PRIMARY KEY (random_id, mid, uid))").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS mid_idx_randoms_v2 ON randoms_v2(mid, uid);").stepThis().dispose(); + + database.executeFast("CREATE TABLE enc_tasks_v4(mid INTEGER, uid INTEGER, date INTEGER, media INTEGER, PRIMARY KEY(mid, uid, media))").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS date_idx_enc_tasks_v4 ON enc_tasks_v4(date);").stepThis().dispose(); + + database.executeFast("CREATE TABLE messages_seq(mid INTEGER PRIMARY KEY, seq_in INTEGER, seq_out INTEGER);").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS seq_idx_messages_seq ON messages_seq(seq_in, seq_out);").stepThis().dispose(); + + database.executeFast("CREATE TABLE params(id INTEGER PRIMARY KEY, seq INTEGER, pts INTEGER, date INTEGER, qts INTEGER, lsv INTEGER, sg INTEGER, pbytes BLOB)").stepThis().dispose(); + database.executeFast("INSERT INTO params VALUES(1, 0, 0, 0, 0, 0, 0, NULL)").stepThis().dispose(); + + database.executeFast("CREATE TABLE media_v4(mid INTEGER, uid INTEGER, date INTEGER, type INTEGER, data BLOB, PRIMARY KEY(mid, uid, type))").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS uid_mid_type_date_idx_media_v4 ON media_v4(uid, mid, type, date);").stepThis().dispose(); + + database.executeFast("CREATE TABLE bot_keyboard(uid INTEGER PRIMARY KEY, mid INTEGER, info BLOB)").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS bot_keyboard_idx_mid_v2 ON bot_keyboard(mid, uid);").stepThis().dispose(); + + database.executeFast("CREATE TABLE bot_keyboard_topics(uid INTEGER, tid INTEGER, mid INTEGER, info BLOB, PRIMARY KEY(uid, tid))").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS bot_keyboard_topics_idx_mid_v2 ON bot_keyboard_topics(mid, uid, tid);").stepThis().dispose(); + + database.executeFast("CREATE TABLE chat_settings_v2(uid INTEGER PRIMARY KEY, info BLOB, pinned INTEGER, online INTEGER, inviter INTEGER, links INTEGER)").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS chat_settings_pinned_idx ON chat_settings_v2(uid, pinned) WHERE pinned != 0;").stepThis().dispose(); + + database.executeFast("CREATE TABLE user_settings(uid INTEGER PRIMARY KEY, info BLOB, pinned INTEGER)").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS user_settings_pinned_idx ON user_settings(uid, pinned) WHERE pinned != 0;").stepThis().dispose(); + + database.executeFast("CREATE TABLE chat_pinned_v2(uid INTEGER, mid INTEGER, data BLOB, PRIMARY KEY (uid, mid));").stepThis().dispose(); + database.executeFast("CREATE TABLE chat_pinned_count(uid INTEGER PRIMARY KEY, count INTEGER, end INTEGER);").stepThis().dispose(); + + database.executeFast("CREATE TABLE chat_hints(did INTEGER, type INTEGER, rating REAL, date INTEGER, PRIMARY KEY(did, type))").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS chat_hints_rating_idx ON chat_hints(rating);").stepThis().dispose(); + + database.executeFast("CREATE TABLE botcache(id TEXT PRIMARY KEY, date INTEGER, data BLOB)").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS botcache_date_idx ON botcache(date);").stepThis().dispose(); + + database.executeFast("CREATE TABLE users_data(uid INTEGER PRIMARY KEY, about TEXT)").stepThis().dispose(); + database.executeFast("CREATE TABLE users(uid INTEGER PRIMARY KEY, name TEXT, status INTEGER, data BLOB)").stepThis().dispose(); + database.executeFast("CREATE TABLE chats(uid INTEGER PRIMARY KEY, name TEXT, data BLOB)").stepThis().dispose(); + database.executeFast("CREATE TABLE enc_chats(uid INTEGER PRIMARY KEY, user INTEGER, name TEXT, data BLOB, g BLOB, authkey BLOB, ttl INTEGER, layer INTEGER, seq_in INTEGER, seq_out INTEGER, use_count INTEGER, exchange_id INTEGER, key_date INTEGER, fprint INTEGER, fauthkey BLOB, khash BLOB, in_seq_no INTEGER, admin_id INTEGER, mtproto_seq INTEGER)").stepThis().dispose(); + database.executeFast("CREATE TABLE channel_users_v2(did INTEGER, uid INTEGER, date INTEGER, data BLOB, PRIMARY KEY(did, uid))").stepThis().dispose(); + database.executeFast("CREATE TABLE channel_admins_v3(did INTEGER, uid INTEGER, data BLOB, PRIMARY KEY(did, uid))").stepThis().dispose(); + database.executeFast("CREATE TABLE contacts(uid INTEGER PRIMARY KEY, mutual INTEGER)").stepThis().dispose(); + database.executeFast("CREATE TABLE user_photos(uid INTEGER, id INTEGER, data BLOB, PRIMARY KEY (uid, id))").stepThis().dispose(); + database.executeFast("CREATE TABLE dialog_settings(did INTEGER PRIMARY KEY, flags INTEGER);").stepThis().dispose(); + database.executeFast("CREATE TABLE web_recent_v3(id TEXT, type INTEGER, image_url TEXT, thumb_url TEXT, local_url TEXT, width INTEGER, height INTEGER, size INTEGER, date INTEGER, document BLOB, PRIMARY KEY (id, type));").stepThis().dispose(); + database.executeFast("CREATE TABLE stickers_v2(id INTEGER PRIMARY KEY, data BLOB, date INTEGER, hash INTEGER);").stepThis().dispose(); + database.executeFast("CREATE TABLE stickers_featured(id INTEGER PRIMARY KEY, data BLOB, unread BLOB, date INTEGER, hash INTEGER, premium INTEGER, emoji INTEGER);").stepThis().dispose(); + database.executeFast("CREATE TABLE stickers_dice(emoji TEXT PRIMARY KEY, data BLOB, date INTEGER);").stepThis().dispose(); + database.executeFast("CREATE TABLE stickersets(id INTEGER PRIMATE KEY, data BLOB, hash INTEGER);").stepThis().dispose(); + database.executeFast("CREATE TABLE hashtag_recent_v2(id TEXT PRIMARY KEY, date INTEGER);").stepThis().dispose(); + database.executeFast("CREATE TABLE webpage_pending_v2(id INTEGER, mid INTEGER, uid INTEGER, PRIMARY KEY (id, mid, uid));").stepThis().dispose(); + database.executeFast("CREATE TABLE sent_files_v2(uid TEXT, type INTEGER, data BLOB, parent TEXT, PRIMARY KEY (uid, type))").stepThis().dispose(); + database.executeFast("CREATE TABLE search_recent(did INTEGER PRIMARY KEY, date INTEGER);").stepThis().dispose(); + database.executeFast("CREATE TABLE media_counts_v2(uid INTEGER, type INTEGER, count INTEGER, old INTEGER, PRIMARY KEY(uid, type))").stepThis().dispose(); + database.executeFast("CREATE TABLE keyvalue(id TEXT PRIMARY KEY, value TEXT)").stepThis().dispose(); + database.executeFast("CREATE TABLE bot_info_v2(uid INTEGER, dialogId INTEGER, info BLOB, PRIMARY KEY(uid, dialogId))").stepThis().dispose(); + database.executeFast("CREATE TABLE pending_tasks(id INTEGER PRIMARY KEY, data BLOB);").stepThis().dispose(); + database.executeFast("CREATE TABLE requested_holes(uid INTEGER, seq_out_start INTEGER, seq_out_end INTEGER, PRIMARY KEY (uid, seq_out_start, seq_out_end));").stepThis().dispose(); + database.executeFast("CREATE TABLE sharing_locations(uid INTEGER PRIMARY KEY, mid INTEGER, date INTEGER, period INTEGER, message BLOB, proximity INTEGER);").stepThis().dispose(); + + database.executeFast("CREATE TABLE shortcut_widget(id INTEGER, did INTEGER, ord INTEGER, PRIMARY KEY (id, did));").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS shortcut_widget_did ON shortcut_widget(did);").stepThis().dispose(); + + database.executeFast("CREATE TABLE emoji_keywords_v2(lang TEXT, keyword TEXT, emoji TEXT, PRIMARY KEY(lang, keyword, emoji));").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS emoji_keywords_v2_keyword ON emoji_keywords_v2(keyword);").stepThis().dispose(); + database.executeFast("CREATE TABLE emoji_keywords_info_v2(lang TEXT PRIMARY KEY, alias TEXT, version INTEGER, date INTEGER);").stepThis().dispose(); + + database.executeFast("CREATE TABLE wallpapers2(uid INTEGER PRIMARY KEY, data BLOB, num INTEGER)").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS wallpapers_num ON wallpapers2(num);").stepThis().dispose(); + + database.executeFast("CREATE TABLE unread_push_messages(uid INTEGER, mid INTEGER, random INTEGER, date INTEGER, data BLOB, fm TEXT, name TEXT, uname TEXT, flags INTEGER, PRIMARY KEY(uid, mid))").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS unread_push_messages_idx_date ON unread_push_messages(date);").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS unread_push_messages_idx_random ON unread_push_messages(random);").stepThis().dispose(); + + database.executeFast("CREATE TABLE polls_v2(mid INTEGER, uid INTEGER, id INTEGER, PRIMARY KEY (mid, uid));").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS polls_id_v2 ON polls_v2(id);").stepThis().dispose(); + + database.executeFast("CREATE TABLE reactions(data BLOB, hash INTEGER, date INTEGER);").stepThis().dispose(); + database.executeFast("CREATE TABLE reaction_mentions(message_id INTEGER, state INTEGER, dialog_id INTEGER, PRIMARY KEY(message_id, dialog_id))").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS reaction_mentions_did ON reaction_mentions(dialog_id);").stepThis().dispose(); + + database.executeFast("CREATE TABLE downloading_documents(data BLOB, hash INTEGER, id INTEGER, state INTEGER, date INTEGER, PRIMARY KEY(hash, id));").stepThis().dispose(); + database.executeFast("CREATE TABLE animated_emoji(document_id INTEGER PRIMARY KEY, data BLOB);").stepThis().dispose(); + + database.executeFast("CREATE TABLE attach_menu_bots(data BLOB, hash INTEGER, date INTEGER);").stepThis().dispose(); + + database.executeFast("CREATE TABLE premium_promo(data BLOB, date INTEGER);").stepThis().dispose(); + database.executeFast("CREATE TABLE emoji_statuses(data BLOB, type INTEGER);").stepThis().dispose(); + + database.executeFast("CREATE TABLE messages_holes_topics(uid INTEGER, topic_id INTEGER, start INTEGER, end INTEGER, PRIMARY KEY(uid, topic_id, start));").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS uid_end_messages_holes ON messages_holes_topics(uid, topic_id, end);").stepThis().dispose(); + + database.executeFast("CREATE TABLE messages_topics(mid INTEGER, uid INTEGER, topic_id INTEGER, read_state INTEGER, send_state INTEGER, date INTEGER, data BLOB, out INTEGER, ttl INTEGER, media INTEGER, replydata BLOB, imp INTEGER, mention INTEGER, forwards INTEGER, replies_data BLOB, thread_reply_id INTEGER, is_channel INTEGER, reply_to_message_id INTEGER, custom_params BLOB, PRIMARY KEY(mid, topic_id, uid))").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS uid_date_mid_idx_messages_topics ON messages_topics(uid, date, mid);").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS mid_out_idx_messages_topics ON messages_topics(mid, out);").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS task_idx_messages_topics ON messages_topics(uid, out, read_state, ttl, date, send_state);").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS send_state_idx_messages_topics ON messages_topics(mid, send_state, date);").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS is_channel_idx_messages_topics ON messages_topics(mid, is_channel);").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS reply_to_idx_messages_topics ON messages_topics(mid, reply_to_message_id);").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS mid_uid_messages_topics ON messages_topics(mid, uid);").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS uid_mid_read_out_idx_messages_topics ON messages_topics(uid, topic_id, mid, read_state, out);").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS uid_mention_idx_messages_topics ON messages_topics(uid, topic_id, mention, read_state);").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS uid_topic_id_messages_topics ON messages_topics(uid, topic_id);").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS uid_topic_id_date_mid_messages_topics ON messages_topics(uid, topic_id, date, mid);").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS uid_topic_id_mid_messages_topics ON messages_topics(uid, topic_id, mid);").stepThis().dispose(); + + + database.executeFast("CREATE TABLE media_topics(mid INTEGER, uid INTEGER, topic_id INTEGER, date INTEGER, type INTEGER, data BLOB, PRIMARY KEY(mid, uid, topic_id, type))").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS uid_mid_type_date_idx_media_topics ON media_topics(uid, topic_id, mid, type, date);").stepThis().dispose(); + + database.executeFast("CREATE TABLE media_holes_topics(uid INTEGER, topic_id INTEGER, type INTEGER, start INTEGER, end INTEGER, PRIMARY KEY(uid, topic_id, type, start));").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS uid_end_media_holes_topics ON media_holes_topics(uid, topic_id, type, end);").stepThis().dispose(); + + database.executeFast("CREATE TABLE topics(did INTEGER, topic_id INTEGER, data BLOB, top_message INTEGER, topic_message BLOB, unread_count INTEGER, max_read_id INTEGER, unread_mentions INTEGER, unread_reactions INTEGER, read_outbox INTEGER, pinned INTEGER, total_messages_count INTEGER, hidden INTEGER, PRIMARY KEY(did, topic_id));").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS did_top_message_topics ON topics(did, top_message);").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS did_topics ON topics(did);").stepThis().dispose(); + + database.executeFast("CREATE TABLE media_counts_topics(uid INTEGER, topic_id INTEGER, type INTEGER, count INTEGER, old INTEGER, PRIMARY KEY(uid, topic_id, type))").stepThis().dispose(); + + database.executeFast("CREATE TABLE reaction_mentions_topics(message_id INTEGER, state INTEGER, dialog_id INTEGER, topic_id INTEGER, PRIMARY KEY(message_id, dialog_id, topic_id))").stepThis().dispose(); + database.executeFast("CREATE INDEX IF NOT EXISTS reaction_mentions_topics_did ON reaction_mentions_topics(dialog_id, topic_id);").stepThis().dispose(); + + database.executeFast("CREATE TABLE emoji_groups(type INTEGER PRIMARY KEY, data BLOB)").stepThis().dispose(); + database.executeFast("CREATE TABLE app_config(data BLOB)").stepThis().dispose(); + + database.executeFast("PRAGMA user_version = " + MessagesStorage.LAST_DB_VERSION).stepThis().dispose(); + + } + public boolean isDatabaseMigrationInProgress() { return databaseMigrationInProgress; } @@ -578,6 +694,33 @@ void executeNoException(String query) { } private void cleanupInternal(boolean deleteFiles) { + if (deleteFiles) { + reset(); + } else { + clearDatabaseValues(); + } + if (database != null) { + database.close(); + database = null; + } + if (deleteFiles) { + if (cacheFile != null) { + cacheFile.delete(); + cacheFile = null; + } + if (walCacheFile != null) { + walCacheFile.delete(); + walCacheFile = null; + } + if (shmCacheFile != null) { + shmCacheFile.delete(); + shmCacheFile = null; + } + + } + } + + public void clearDatabaseValues() { lastDateValue = 0; lastSeqValue = 0; lastPtsValue = 0; @@ -598,24 +741,6 @@ private void cleanupInternal(boolean deleteFiles) { secretPBytes = null; secretG = 0; - if (database != null) { - database.close(); - database = null; - } - if (deleteFiles) { - if (cacheFile != null) { - cacheFile.delete(); - cacheFile = null; - } - if (walCacheFile != null) { - walCacheFile.delete(); - walCacheFile = null; - } - if (shmCacheFile != null) { - shmCacheFile.delete(); - shmCacheFile = null; - } - } } public void cleanup(boolean isLogin) { @@ -643,11 +768,35 @@ public void saveSecretParams(int lsv, int sg, byte[] pbytes) { state.dispose(); data.reuse(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } }); } + boolean tryRecover; + + public void checkSQLException(Throwable e) { + checkSQLException(e, true); + } + + private void checkSQLException(Throwable e, boolean logToAppCenter) { + if (e instanceof SQLiteException && e.getMessage() != null && e.getMessage().contains("is malformed") && !tryRecover) { + tryRecover = true; + if (recoverDatabase()) { + tryRecover = false; + clearLoadingDialogsOffsets(); + AndroidUtilities.runOnUIThread(() -> { + getNotificationCenter().postNotificationName(NotificationCenter.onDatabaseReset); + }); + FileLog.e(new Exception("database restored!!")); + } else { + FileLog.e(new Exception(e), logToAppCenter); + } + } else { + FileLog.e(e, logToAppCenter); + } + } + public void fixNotificationSettings() { storageQueue.postRunnable(() -> { try { @@ -691,10 +840,10 @@ public void fixNotificationSettings() { state.dispose(); database.commitTransaction(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } } catch (Throwable e) { - FileLog.e(e); + checkSQLException(e); } }); } @@ -712,7 +861,7 @@ public long createPendingTask(NativeByteBuffer data) { state.step(); state.dispose(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { data.reuse(); } @@ -725,7 +874,7 @@ public void removePendingTask(long id) { try { database.executeFast("DELETE FROM pending_tasks WHERE id = " + id).stepThis().dispose(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } }); } @@ -960,7 +1109,7 @@ private void loadPendingTasks() { } cursor.dispose(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } }); } @@ -974,7 +1123,7 @@ public void saveChannelPts(long channelId, int pts) { state.step(); state.dispose(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } }); } @@ -996,7 +1145,7 @@ private void saveDiffParamsInternal(int seq, int pts, int date, int qts) { lastSavedDate = date; lastSavedQts = qts; } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } } @@ -1023,7 +1172,7 @@ public void setDialogFlags(long did, long flags) { database.executeFast(String.format(Locale.US, "REPLACE INTO dialog_settings VALUES(%d, %d)", did, flags)).stepThis().dispose(); resetAllUnreadCounters(true); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } }); } @@ -1070,7 +1219,7 @@ public void putPushMessage(MessageObject message) { data.reuse(); state.dispose(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } }); } @@ -1147,17 +1296,18 @@ public void clearLocalDatabase() { } } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } cursor2.dispose(); database.executeFast("DELETE FROM messages_v2 WHERE uid = " + did + " AND mid != " + last_mid_i + " AND mid != " + last_mid).stepThis().dispose(); database.executeFast("DELETE FROM messages_holes WHERE uid = " + did).stepThis().dispose(); database.executeFast("DELETE FROM bot_keyboard WHERE uid = " + did).stepThis().dispose(); + database.executeFast("DELETE FROM bot_keyboard_topics WHERE uid = " + did).stepThis().dispose(); database.executeFast("DELETE FROM media_counts_v2 WHERE uid = " + did).stepThis().dispose(); database.executeFast("DELETE FROM media_v4 WHERE uid = " + did).stepThis().dispose(); database.executeFast("DELETE FROM media_holes_v2 WHERE uid = " + did).stepThis().dispose(); - MediaDataController.getInstance(currentAccount).clearBotKeyboard(did, null); + MediaDataController.getInstance(currentAccount).clearBotKeyboard(did); if (messageId != -1) { MessagesStorage.createFirstHoles(did, state5, state6, messageId, 0); } @@ -1177,7 +1327,7 @@ public void clearLocalDatabase() { getMessagesController().getTopicsController().databaseCleared(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (database != null) { database.commitTransaction(); @@ -1191,10 +1341,7 @@ public void clearLocalDatabase() { if (cursor != null) { cursor.dispose(); } - AndroidUtilities.runOnUIThread(() -> { - NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.didClearDatabase); - getMediaDataController().loadAttachMenuBots(false, true); - }); + reset(); } }); } @@ -1287,7 +1434,7 @@ private void saveTopicsInternal(long dialogId, List topics, resetAllUnreadCounters(false); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (state != null) { @@ -1361,7 +1508,7 @@ public void updateTopicData(long dialogId, TLRPC.TL_forumTopic fromTopic, int fl data.reuse(); } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (state != null) { state.dispose(); @@ -1474,7 +1621,7 @@ public void loadTopics(long dialogId, Consumer> c } } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } } } @@ -1505,7 +1652,7 @@ public void loadTopics(long dialogId, Consumer> c } } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } } @@ -1533,7 +1680,7 @@ public void loadTopics(long dialogId, Consumer> c } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -1588,7 +1735,7 @@ public void loadGroupedMessagesForTopicUpdates(ArrayList topicsReadOu try { database.executeFast(String.format(Locale.US, "UPDATE topics SET read_outbox = max((SELECT read_outbox FROM topics WHERE did = %d AND topic_id = %d), %d) WHERE did = %d AND topic_id = %d", topicKey.dialogId, topicKey.topicId, value, topicKey.dialogId, topicKey.topicId)).stepThis().dispose(); } catch (SQLiteException e) { - FileLog.e(e); + checkSQLException(e); } } }); @@ -1672,7 +1819,7 @@ public void setDialogTtl(long did, int ttl) { try { database.executeFast(String.format(Locale.US, "UPDATE dialogs SET ttl_period = %d WHERE did = %d", ttl, did)).stepThis().dispose(); } catch (SQLiteException e) { - FileLog.e(e); + checkSQLException(e); } }); } @@ -1685,6 +1832,23 @@ public ArrayList getDatabaseFiles() { return files; } + public void reset() { + clearDatabaseValues(); + + AndroidUtilities.runOnUIThread(() -> { + for (int a = 0; a < 2; a++) { + getUserConfig().setDialogsLoadOffset(a, 0, 0, 0, 0, 0, 0); + getUserConfig().setTotalDialogsCount(a, 0); + } + getUserConfig().clearFilters(); + getUserConfig().clearPinnedDialogsLoaded(); + + NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.didClearDatabase); + getMediaDataController().loadAttachMenuBots(false, true); + getNotificationCenter().postNotificationName(NotificationCenter.onDatabaseReset); + }); + } + private static class ReadDialog { public int lastMid; public int date; @@ -1762,7 +1926,7 @@ public void readAllDialogs(int folderId) { } }); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -1851,7 +2015,7 @@ private TLRPC.messages_Dialogs loadDialogsByIds(String ids, ArrayList user } } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } } else { data.reuse(); @@ -1943,7 +2107,7 @@ private TLRPC.messages_Dialogs loadDialogsByIds(String ids, ArrayList user } } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } } else { data.reuse(); @@ -2127,7 +2291,7 @@ private void loadDialogFilters() { getMessagesController().processLoadedDialogFilters(new ArrayList<>(dialogFilters), dialogs, null, users, chats, encryptedChats, 0); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (filtersCursor != null) { filtersCursor.dispose(); @@ -2537,7 +2701,7 @@ private void calcUnreadCounters(boolean apply) { } } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -2610,7 +2774,7 @@ private void saveDialogFilterInternal(MessagesController.DialogFilter filter, bo database.commitTransaction(); } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (database != null) { database.commitTransaction(); @@ -2621,6 +2785,28 @@ private void saveDialogFilterInternal(MessagesController.DialogFilter filter, bo } } + private ArrayList toPeerIds(ArrayList inputPeers) { + ArrayList array = new ArrayList(); + if (inputPeers == null) { + return array; + } + final int count = inputPeers.size(); + for (int i = 0; i < count; ++i) { + TLRPC.InputPeer peer = inputPeers.get(i); + if (peer == null) { + continue; + } + long id; + if (peer.user_id != 0) { + id = peer.user_id; + } else { + id = -(peer.chat_id != 0 ? peer.chat_id : peer.channel_id); + } + array.add(id); + } + return array; + } + public void checkLoadedRemoteFilters(TLRPC.Vector vector) { storageQueue.postRunnable(() -> { try { @@ -2630,14 +2816,15 @@ public void checkLoadedRemoteFilters(TLRPC.Vector vector) { filtersToDelete.put(filter.id, filter); } ArrayList filtersOrder = new ArrayList<>(); + ArrayList usersToLoad = new ArrayList<>(); HashMap usersToLoadMap = new HashMap<>(); ArrayList chatsToLoad = new ArrayList<>(); HashMap chatsToLoadMap = new HashMap<>(); ArrayList dialogsToLoad = new ArrayList<>(); HashMap dialogsToLoadMap = new HashMap<>(); + ArrayList filtersToSave = new ArrayList<>(); - HashMap> filterUserRemovals = new HashMap<>(); HashMap> filterDialogRemovals = new HashMap<>(); HashSet filtersUnreadCounterReset = new HashSet<>(); for (int a = 0, N = vector.objects.size(); a < N; a++) { @@ -2725,7 +2912,6 @@ public void checkLoadedRemoteFilters(TLRPC.Vector vector) { } } } - for (int c = 0, N2 = filter.pinnedDialogs.size(); c < N2; c++) { long did = filter.pinnedDialogs.keyAt(c); if (DialogObject.isEncryptedDialog(did)) { @@ -2734,84 +2920,74 @@ public void checkLoadedRemoteFilters(TLRPC.Vector vector) { existingDialogsIds.add(did); existingIds.remove(did); } + + filter.pinnedDialogs.clear(); + for (int b = 0, N2 = newFilter.pinned_peers.size(); b < N2; b++) { + TLRPC.InputPeer peer = newFilter.pinned_peers.get(b); + Long id; + if (peer.user_id != 0) { + id = peer.user_id; + } else { + id = -(peer.chat_id != 0 ? peer.chat_id : peer.channel_id); + } + int index = filter.pinnedDialogs.size(); + if (secretChatsMap != null) { + Long did; + while ((did = secretChatsMap.remove(index)) != null) { + filter.pinnedDialogs.put(did, index); + index++; + } + } + filter.pinnedDialogs.put(id, index); + existingIds.remove(id); + if (!existingDialogsIds.remove(id)) { + changed = true; + if (!dialogsToLoadMap.containsKey(id)) { + dialogsToLoad.add(id); + dialogsToLoadMap.put(id, peer); + } + } + } + if (secretChatsMap != null) { + for (LinkedHashMap.Entry entry : secretChatsMap.entrySet()) { + filter.pinnedDialogs.put(entry.getValue(), filter.pinnedDialogs.size()); + } + } + for (int c = 0; c < 2; c++) { - ArrayList fromArray = c == 0 ? newFilter.include_peers : newFilter.exclude_peers; + ArrayList fromArray = toPeerIds(c == 0 ? newFilter.include_peers : newFilter.exclude_peers); ArrayList toArray = c == 0 ? filter.alwaysShow : filter.neverShow; if (c == 0) { - filter.pinnedDialogs.clear(); - for (int b = 0, N2 = newFilter.pinned_peers.size(); b < N2; b++) { - TLRPC.InputPeer peer = newFilter.pinned_peers.get(b); - Long id; - if (peer.user_id != 0) { - id = peer.user_id; - } else { - id = -(peer.chat_id != 0 ? peer.chat_id : peer.channel_id); - } - if (!filter.alwaysShow.contains(id)) { - filter.alwaysShow.add(id); - } - int index = filter.pinnedDialogs.size(); - if (secretChatsMap != null) { - Long did; - while ((did = secretChatsMap.remove(index)) != null) { - filter.pinnedDialogs.put(did, index); - index++; - } - } - filter.pinnedDialogs.put(id, index); - existingIds.remove(id); - if (!existingDialogsIds.remove(id)) { - changed = true; - if (!dialogsToLoadMap.containsKey(id)) { - dialogsToLoad.add(id); - dialogsToLoadMap.put(id, peer); - } - } + // put pinned_peers into include_peers (alwaysShow) + ArrayList pinnedArray = toPeerIds(newFilter.pinned_peers); + for (int i = 0; i < pinnedArray.size(); ++i) { + fromArray.remove(pinnedArray.get(i)); } - if (secretChatsMap != null) { - for (LinkedHashMap.Entry entry : secretChatsMap.entrySet()) { - filter.pinnedDialogs.put(entry.getValue(), filter.pinnedDialogs.size()); + fromArray.addAll(0, pinnedArray); + } + + final int fromArrayCount = fromArray.size(); + boolean isDifferent = fromArray.size() != toArray.size(); + if (!isDifferent) { + for (int i = 0; i < fromArrayCount; ++i) { + if (!toArray.contains(fromArray.get(i))) { + isDifferent = true; + break; } } } - for (int b = 0, N2 = fromArray.size(); b < N2; b++) { - TLRPC.InputPeer peer = fromArray.get(b); - if (peer.user_id != 0) { - Long uid = peer.user_id; - if (!existingIds.remove(uid)) { - changed = true; - if (!toArray.contains(uid)) { - toArray.add(uid); - } - if (!usersToLoadMap.containsKey(uid)) { - usersToLoad.add(uid); - usersToLoadMap.put(uid, peer); - unreadChanged = true; - } - } + + if (isDifferent) { + unreadChanged = true; + changed = true; + if (c == 0) { + filter.alwaysShow = fromArray; } else { - Long chatId = peer.chat_id != 0 ? peer.chat_id : peer.channel_id; - Long dialogId = -chatId; - if (!existingIds.remove(dialogId)) { - changed = true; - if (!toArray.contains(dialogId)) { - toArray.add(dialogId); - } - if (!chatsToLoadMap.containsKey(chatId)) { - chatsToLoad.add(chatId); - chatsToLoadMap.put(chatId, peer); - unreadChanged = true; - } - } + filter.neverShow = fromArray; } } } - if (!existingIds.isEmpty()) { - filterUserRemovals.put(filter.id, existingIds); - unreadChanged = true; - changed = true; - } if (!existingDialogsIds.isEmpty()) { filterDialogRemovals.put(filter.id, existingDialogsIds); changed = true; @@ -2907,17 +3083,17 @@ public void checkLoadedRemoteFilters(TLRPC.Vector vector) { } if (usersToLoadMap.isEmpty() && chatsToLoadMap.isEmpty() && dialogsToLoadMap.isEmpty()) { - processLoadedFilterPeersInternal(dialogs, null, users, chats, filtersToSave, filtersToDelete, filtersOrder, filterDialogRemovals, filterUserRemovals, filtersUnreadCounterReset); + processLoadedFilterPeersInternal(dialogs, null, users, chats, filtersToSave, filtersToDelete, filtersOrder, filterDialogRemovals, filtersUnreadCounterReset); } else { - getMessagesController().loadFilterPeers(dialogsToLoadMap, usersToLoadMap, chatsToLoadMap, dialogs, new TLRPC.TL_messages_dialogs(), users, chats, filtersToSave, filtersToDelete, filtersOrder, filterDialogRemovals, filterUserRemovals, filtersUnreadCounterReset); + getMessagesController().loadFilterPeers(dialogsToLoadMap, usersToLoadMap, chatsToLoadMap, dialogs, new TLRPC.TL_messages_dialogs(), users, chats, filtersToSave, filtersToDelete, filtersOrder, filterDialogRemovals, filtersUnreadCounterReset); } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } }); } - private void processLoadedFilterPeersInternal(TLRPC.messages_Dialogs pinnedDialogs, TLRPC.messages_Dialogs pinnedRemoteDialogs, ArrayList users, ArrayList chats, ArrayList filtersToSave, SparseArray filtersToDelete, ArrayList filtersOrder, HashMap> filterDialogRemovals, HashMap> filterUserRemovals, HashSet filtersUnreadCounterReset) { + private void processLoadedFilterPeersInternal(TLRPC.messages_Dialogs pinnedDialogs, TLRPC.messages_Dialogs pinnedRemoteDialogs, ArrayList users, ArrayList chats, ArrayList filtersToSave, SparseArray filtersToDelete, ArrayList filtersOrder, HashMap> filterDialogRemovals, HashSet filtersUnreadCounterReset) { boolean anythingChanged = false; putUsersAndChats(users, chats, true, false); for (int a = 0, N = filtersToDelete.size(); a < N; a++) { @@ -2931,16 +3107,6 @@ private void processLoadedFilterPeersInternal(TLRPC.messages_Dialogs pinnedDialo } filter.pendingUnreadCount = -1; } - for (HashMap.Entry> entry : filterUserRemovals.entrySet()) { - MessagesController.DialogFilter filter = dialogFiltersMap.get(entry.getKey()); - if (filter == null) { - continue; - } - HashSet set = entry.getValue(); - filter.alwaysShow.removeAll(set); - filter.neverShow.removeAll(set); - anythingChanged = true; - } for (HashMap.Entry> entry : filterDialogRemovals.entrySet()) { MessagesController.DialogFilter filter = dialogFiltersMap.get(entry.getKey()); if (filter == null) { @@ -2982,8 +3148,8 @@ private void processLoadedFilterPeersInternal(TLRPC.messages_Dialogs pinnedDialo getMessagesController().processLoadedDialogFilters(new ArrayList<>(dialogFilters), pinnedDialogs, pinnedRemoteDialogs, users, chats, null, remote); } - protected void processLoadedFilterPeers(TLRPC.messages_Dialogs pinnedDialogs, TLRPC.messages_Dialogs pinnedRemoteDialogs, ArrayList users, ArrayList chats, ArrayList filtersToSave, SparseArray filtersToDelete, ArrayList filtersOrder, HashMap> filterDialogRemovals, HashMap> filterUserRemovals, HashSet filtersUnreadCounterReset) { - storageQueue.postRunnable(() -> processLoadedFilterPeersInternal(pinnedDialogs, pinnedRemoteDialogs, users, chats, filtersToSave, filtersToDelete, filtersOrder, filterDialogRemovals, filterUserRemovals, filtersUnreadCounterReset)); + protected void processLoadedFilterPeers(TLRPC.messages_Dialogs pinnedDialogs, TLRPC.messages_Dialogs pinnedRemoteDialogs, ArrayList users, ArrayList chats, ArrayList filtersToSave, SparseArray filtersToDelete, ArrayList filtersOrder, HashMap> filterDialogRemovals, HashSet filtersUnreadCounterReset) { + storageQueue.postRunnable(() -> processLoadedFilterPeersInternal(pinnedDialogs, pinnedRemoteDialogs, users, chats, filtersToSave, filtersToDelete, filtersOrder, filterDialogRemovals, filtersUnreadCounterReset)); } private void deleteDialogFilterInternal(MessagesController.DialogFilter filter) { @@ -2994,7 +3160,7 @@ private void deleteDialogFilterInternal(MessagesController.DialogFilter filter) database.executeFast("DELETE FROM dialog_filter_ep WHERE id = " + filter.id).stepThis().dispose(); database.executeFast("DELETE FROM dialog_filter_pin_v2 WHERE id = " + filter.id).stepThis().dispose(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } } @@ -3033,7 +3199,7 @@ public void saveDialogFiltersOrderInternal() { state.dispose(); state = null; } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (state != null) { state.dispose(); @@ -3093,34 +3259,40 @@ protected void loadReplyMessages(LongSparseArray arrayList = owners.get(message.id); - if (arrayList != null) { - for (int a = 0, N = arrayList.size(); a < N; a++) { - TLRPC.Message m = arrayList.get(a); - m.replyMessage = message; - MessageObject.getDialogId(message); + ArrayList arrayList = owners.get(message.id); + if (arrayList != null) { + for (int a = 0, N = arrayList.size(); a < N; a++) { + TLRPC.Message m = arrayList.get(a); + m.replyMessage = message; + MessageObject.getDialogId(message); + } } } } + cursor.dispose(); } - cursor.dispose(); } catch (Exception e) { throw e; } finally { @@ -3230,7 +3402,7 @@ public void loadUnreadMessages() { } } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } } } @@ -3310,7 +3482,7 @@ public void loadUnreadMessages() { Collections.reverse(messages); AndroidUtilities.runOnUIThread(() -> getNotificationsController().processLoadedUnreadMessages(pushDialogs, messages, pushMessages, users, chats, encryptedChats)); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -3357,7 +3529,7 @@ public void putWallpapers(ArrayList wallPapers, int action) { state = null; database.commitTransaction(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (database != null) { database.commitTransaction(); @@ -3374,7 +3546,7 @@ public void deleteWallpaper(long id) { try { database.executeFast("DELETE FROM wallpapers2 WHERE uid = " + id).stepThis().dispose(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } }); } @@ -3397,7 +3569,7 @@ public void getWallpapers() { } AndroidUtilities.runOnUIThread(() -> NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.wallpapersDidLoad, wallPapers)); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -3432,7 +3604,7 @@ public void addRecentLocalFile(String imageUrl, String localUrl, TLRPC.Document state.dispose(); } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (state != null) { state.dispose(); @@ -3466,7 +3638,7 @@ public void deleteUserChatHistory(long dialogId, long fromId) { } } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } cursor.dispose(); cursor = null; @@ -3482,7 +3654,7 @@ public void deleteUserChatHistory(long dialogId, long fromId) { AndroidUtilities.runOnUIThread(() -> getNotificationCenter().postNotificationName(NotificationCenter.messagesDeleted, mids, DialogObject.isChatDialog(dialogId) ? -dialogId : 0, false)); } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -3605,7 +3777,7 @@ public void deleteDialog(long did, int messagesOnly) { } } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } cursor.dispose(); cursor = null; @@ -3649,7 +3821,7 @@ public void deleteDialog(long did, int messagesOnly) { } } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } cursor2.dispose(); cursor2 = null; @@ -3657,10 +3829,11 @@ public void deleteDialog(long did, int messagesOnly) { database.executeFast("DELETE FROM messages_v2 WHERE uid = " + did + " AND mid != " + last_mid_i + " AND mid != " + last_mid).stepThis().dispose(); database.executeFast("DELETE FROM messages_holes WHERE uid = " + did).stepThis().dispose(); database.executeFast("DELETE FROM bot_keyboard WHERE uid = " + did).stepThis().dispose(); + database.executeFast("DELETE FROM bot_keyboard_topics WHERE uid = " + did).stepThis().dispose(); database.executeFast("DELETE FROM media_counts_v2 WHERE uid = " + did).stepThis().dispose(); database.executeFast("DELETE FROM media_v4 WHERE uid = " + did).stepThis().dispose(); database.executeFast("DELETE FROM media_holes_v2 WHERE uid = " + did).stepThis().dispose(); - getMediaDataController().clearBotKeyboard(did, null); + getMediaDataController().clearBotKeyboard(did); state5 = database.executeFast("REPLACE INTO messages_holes VALUES(?, ?, ?)"); state6 = database.executeFast("REPLACE INTO media_holes_v2 VALUES(?, ?, ?, ?)"); @@ -3681,16 +3854,17 @@ public void deleteDialog(long did, int messagesOnly) { database.executeFast("UPDATE dialogs SET unread_count = 0, unread_count_i = 0 WHERE did = " + did).stepThis().dispose(); database.executeFast("DELETE FROM messages_v2 WHERE uid = " + did).stepThis().dispose(); database.executeFast("DELETE FROM bot_keyboard WHERE uid = " + did).stepThis().dispose(); + database.executeFast("DELETE FROM bot_keyboard_topics WHERE uid = " + did).stepThis().dispose(); database.executeFast("DELETE FROM media_counts_v2 WHERE uid = " + did).stepThis().dispose(); database.executeFast("DELETE FROM media_v4 WHERE uid = " + did).stepThis().dispose(); database.executeFast("DELETE FROM messages_holes WHERE uid = " + did).stepThis().dispose(); database.executeFast("DELETE FROM media_holes_v2 WHERE uid = " + did).stepThis().dispose(); - getMediaDataController().clearBotKeyboard(did, null); + getMediaDataController().clearBotKeyboard(did); AndroidUtilities.runOnUIThread(() -> getNotificationCenter().postNotificationName(NotificationCenter.needReloadRecentDialogsSearch)); resetAllUnreadCounters(false); updateWidgets(did); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -3713,7 +3887,7 @@ public void onDeleteQueryComplete(long did) { try { database.executeFast("DELETE FROM media_counts_v2 WHERE uid = " + did).stepThis().dispose(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } }); } @@ -3750,7 +3924,7 @@ public void getDialogPhotos(long did, int count, int maxId, int classGuid) { Utilities.stageQueue.postRunnable(() -> getMessagesController().processLoadedUserPhotos(res, messages, did, count, maxId, true, classGuid)); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -3764,7 +3938,7 @@ public void clearUserPhotos(long dialogId) { try { database.executeFast("DELETE FROM user_photos WHERE uid = " + dialogId).stepThis().dispose(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } }); } @@ -3774,7 +3948,7 @@ public void clearUserPhoto(long dialogId, long pid) { try { database.executeFast("DELETE FROM user_photos WHERE uid = " + dialogId + " AND id = " + pid).stepThis().dispose(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } }); } @@ -3834,6 +4008,7 @@ public void resetDialogs(TLRPC.messages_Dialogs dialogsRes, int messagesCount, i database.executeFast("DELETE FROM messages_v2 WHERE uid IN " + ids).stepThis().dispose(); database.executeFast("DELETE FROM polls_v2 WHERE 1").stepThis().dispose(); database.executeFast("DELETE FROM bot_keyboard WHERE uid IN " + ids).stepThis().dispose(); + database.executeFast("DELETE FROM bot_keyboard_topics WHERE uid IN " + ids).stepThis().dispose(); database.executeFast("DELETE FROM media_v4 WHERE uid IN " + ids).stepThis().dispose(); database.executeFast("DELETE FROM messages_holes WHERE uid IN " + ids).stepThis().dispose(); database.executeFast("DELETE FROM media_holes_v2 WHERE uid IN " + ids).stepThis().dispose(); @@ -3927,7 +4102,7 @@ public void resetDialogs(TLRPC.messages_Dialogs dialogsRes, int messagesCount, i getUserConfig().saveConfig(false); getMessagesController().completeDialogsReset(dialogsRes, messagesCount, seq, newPts, date, qts, new_dialogs_dict, new_dialogMessage, lastMessage); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (database != null) { database.commitTransaction(); @@ -3944,68 +4119,82 @@ public void putDialogPhotos(long did, TLRPC.photos_Photos photos, ArrayList { - SQLitePreparedStatement state = null; - try { - database.executeFast("DELETE FROM user_photos WHERE uid = " + did).stepThis().dispose(); - state = database.executeFast("REPLACE INTO user_photos VALUES(?, ?, ?)"); - for (int a = 0, N = photos.photos.size(); a < N; a++) { - TLRPC.Photo photo = photos.photos.get(a); - if (photo instanceof TLRPC.TL_photoEmpty) { - continue; - } - if (photo.file_reference == null) { - photo.file_reference = new byte[0]; - } - state.requery(); - int size = photo.getObjectSize(); - if (messages != null) { - size += messages.get(a).getObjectSize(); - } - NativeByteBuffer data = new NativeByteBuffer(size); - photo.serializeToStream(data); - if (messages != null) { - messages.get(a).serializeToStream(data); - } - state.bindLong(1, did); - state.bindLong(2, photo.id); - state.bindByteBuffer(3, data); - state.step(); - data.reuse(); - } - state.dispose(); - state = null; - } catch (Exception e) { - FileLog.e(e); - } finally { - if (state != null) { - state.dispose(); - } - } + putDialogPhotosInternal(did, photos, messages); }); } - public void addDialogPhoto(long did, TLRPC.Photo photo) { - storageQueue.postRunnable(() -> { - SQLitePreparedStatement state = null; - try { - state = database.executeFast("REPLACE INTO user_photos VALUES(?, ?, ?)"); - + private void putDialogPhotosInternal(long did, TLRPC.photos_Photos photos, ArrayList messages) { + SQLitePreparedStatement state = null; + try { + database.executeFast("DELETE FROM user_photos WHERE uid = " + did).stepThis().dispose(); + state = database.executeFast("REPLACE INTO user_photos VALUES(?, ?, ?)"); + for (int a = 0, N = photos.photos.size(); a < N; a++) { + TLRPC.Photo photo = photos.photos.get(a); + if (photo instanceof TLRPC.TL_photoEmpty || photo == null) { + continue; + } + if (photo.file_reference == null) { + photo.file_reference = new byte[0]; + } state.requery(); int size = photo.getObjectSize(); + if (messages != null && messages.get(a) != null) { + size += messages.get(a).getObjectSize(); + } NativeByteBuffer data = new NativeByteBuffer(size); photo.serializeToStream(data); + if (messages != null && messages.get(a) != null) { + messages.get(a).serializeToStream(data); + } state.bindLong(1, did); state.bindLong(2, photo.id); state.bindByteBuffer(3, data); state.step(); data.reuse(); + } + state.dispose(); + state = null; + } catch (Exception e) { + checkSQLException(e); + } finally { + if (state != null) { state.dispose(); - state = null; + } + } + } + + public void addDialogPhoto(long did, TLRPC.Photo photoToAdd) { + storageQueue.postRunnable(() -> { + SQLiteCursor cursor = null; + try { + cursor = database.queryFinalized(String.format(Locale.US, "SELECT data FROM user_photos WHERE uid = %d ORDER BY rowid ASC", did)); + + TLRPC.photos_Photos res = new TLRPC.TL_photos_photos(); + ArrayList messages = new ArrayList<>(); + + while (cursor.next()) { + NativeByteBuffer data = cursor.byteBufferValue(0); + if (data != null) { + TLRPC.Photo photo = TLRPC.Photo.TLdeserialize(data, data.readInt32(false), false); + if (data.remaining() > 0) { + messages.add(TLRPC.Message.TLdeserialize(data, data.readInt32(false), false)); + } else { + messages.add(null); + } + data.reuse(); + res.photos.add(photo); + messages.add(null); + } + } + cursor.dispose(); + cursor = null; + res.photos.add(0, photoToAdd); + putDialogPhotosInternal(did, res, messages); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { - if (state != null) { - state.dispose(); + if (cursor != null) { + cursor.dispose(); } } }); @@ -4129,7 +4318,7 @@ public void emptyMessagesMedia(long dialogId, ArrayList mids) { AndroidUtilities.runOnUIThread(() -> getFileLoader().cancelLoadFiles(namesToDelete)); getFileLoader().deleteFiles(filesToDelete, 0); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -4217,7 +4406,7 @@ public void updateMessagePollResults(long pollId, TLRPC.Poll poll, TLRPC.PollRes database.commitTransaction(); } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -4270,7 +4459,7 @@ public void updateMessageReactions(long dialogId, int msgId, TLRPC.TL_messageRea } database.commitTransaction(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (database != null) { database.commitTransaction(); @@ -4287,7 +4476,7 @@ public void updateMessageVoiceTranscriptionOpen(long dialogId, int msgId, TLRPC. SQLitePreparedStatement state = null; try { database.beginTransaction(); - TLRPC.Message message = getMessageWithCustomParamsOnly(msgId, dialogId); + TLRPC.Message message = getMessageWithCustomParamsOnlyInternal(msgId, dialogId); message.voiceTranscriptionOpen = saveFromMessage.voiceTranscriptionOpen; message.voiceTranscriptionRated = saveFromMessage.voiceTranscriptionRated; message.voiceTranscriptionFinal = saveFromMessage.voiceTranscriptionFinal; @@ -4318,7 +4507,7 @@ public void updateMessageVoiceTranscriptionOpen(long dialogId, int msgId, TLRPC. } database.commitTransaction(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (database != null) { database.commitTransaction(); @@ -4335,7 +4524,7 @@ public void updateMessageVoiceTranscription(long dialogId, int messageId, String SQLitePreparedStatement state = null; try { database.beginTransaction(); - TLRPC.Message message = getMessageWithCustomParamsOnly(messageId, dialogId); + TLRPC.Message message = getMessageWithCustomParamsOnlyInternal(messageId, dialogId); message.voiceTranscriptionFinal = isFinal; message.voiceTranscriptionId = transcriptionId; message.voiceTranscription = text; @@ -4358,7 +4547,7 @@ public void updateMessageVoiceTranscription(long dialogId, int messageId, String nativeByteBuffer.reuse(); } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (database != null) { database.commitTransaction(); @@ -4375,7 +4564,7 @@ public void updateMessageVoiceTranscription(long dialogId, int messageId, String SQLitePreparedStatement state = null; try { database.beginTransaction(); - TLRPC.Message message = getMessageWithCustomParamsOnly(messageId, dialogId); + TLRPC.Message message = getMessageWithCustomParamsOnlyInternal(messageId, dialogId); message.voiceTranscriptionOpen = saveFromMessage.voiceTranscriptionOpen; message.voiceTranscriptionRated = saveFromMessage.voiceTranscriptionRated; message.voiceTranscriptionFinal = saveFromMessage.voiceTranscriptionFinal; @@ -4407,7 +4596,7 @@ public void updateMessageVoiceTranscription(long dialogId, int messageId, String } } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (database != null) { database.commitTransaction(); @@ -4424,7 +4613,7 @@ public void updateMessageCustomParams(long dialogId, TLRPC.Message saveFromMessa SQLitePreparedStatement state = null; try { database.beginTransaction(); - TLRPC.Message message = getMessageWithCustomParamsOnly(saveFromMessage.id, dialogId); + TLRPC.Message message = getMessageWithCustomParamsOnlyInternal(saveFromMessage.id, dialogId); MessageCustomParamsHelper.copyParams(saveFromMessage, message); for (int i = 0; i < 2; i++) { @@ -4451,7 +4640,7 @@ public void updateMessageCustomParams(long dialogId, TLRPC.Message saveFromMessa } database.commitTransaction(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (database != null) { database.commitTransaction(); @@ -4463,7 +4652,7 @@ public void updateMessageCustomParams(long dialogId, TLRPC.Message saveFromMessa }); } - private TLRPC.Message getMessageWithCustomParamsOnly(int messageId, long dialogId) { + public TLRPC.Message getMessageWithCustomParamsOnlyInternal(int messageId, long dialogId) { TLRPC.Message message = new TLRPC.TL_message(); SQLiteCursor cursor = null; try { @@ -4485,7 +4674,7 @@ private TLRPC.Message getMessageWithCustomParamsOnly(int messageId, long dialogI cursor = null; } } catch (SQLiteException e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -4546,7 +4735,7 @@ public void getNewTask(LongSparseArray> oldTask, LongSparseAr cursor = null; getMessagesController().processLoadedDeleteTask(date, newTask, newTaskMedia); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -4604,7 +4793,7 @@ public void markMentionMessageAsRead(long dialogId, int messageId, long did) { } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -4618,7 +4807,7 @@ public void markMessageAsMention(long dialogId, int mid) { try { database.executeFast(String.format(Locale.US, "UPDATE messages_v2 SET mention = 1, read_state = read_state & ~2 WHERE mid = %d AND uid = %d", mid, dialogId)).stepThis().dispose(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } }); } @@ -4658,7 +4847,7 @@ public void resetMentionsCount(long did, int topicId, int count) { getMessagesController().getTopicsController().processUpdate(Collections.singletonList(topicUpdate)); } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -4703,7 +4892,7 @@ public void createTaskForMid(long dialogId, int messageId, int time, int readTim database.executeFast(String.format(Locale.US, "UPDATE messages_v2 SET ttl = 0 WHERE mid = %d AND uid = %d", messageId, dialogId)).stepThis().dispose(); getMessagesController().didAddedNewTask(minDate, dialogId, messages); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (state != null) { state.dispose(); @@ -4782,7 +4971,7 @@ public void createTaskForSecretChat(int chatId, int time, int readTime, int isOu getMessagesController().didAddedNewTask(minDate, dialogId, messages); } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (database != null) { database.commitTransaction(); @@ -4894,7 +5083,8 @@ private void updateFiltersReadCounter(LongSparseIntArray dialogsToUpdate, LongSp for (int a = 0, N = users.size(); a < N; a++) { TLRPC.User user = users.get(a); boolean muted = getMessagesController().isDialogMuted(user.id, 0); - int idx1 = dialogsByFolders.get(user.id); + Integer folderId = dialogsByFolders.get(user.id); + int idx1 = folderId == null || folderId < 0 || folderId > 1 ? 0 : folderId; int idx2 = muted ? 1 : 0; if (muted) { mutedDialogs.put(user.id, true); @@ -4930,7 +5120,8 @@ private void updateFiltersReadCounter(LongSparseIntArray dialogsToUpdate, LongSp } long did = DialogObject.makeEncryptedDialogId(encryptedChat.id); boolean muted = getMessagesController().isDialogMuted(did, 0); - int idx1 = dialogsByFolders.get(did); + Integer folderId = dialogsByFolders.get(did); + int idx1 = folderId == null || folderId < 0 || folderId > 1 ? 0 : folderId; int idx2 = muted ? 1 : 0; if (muted) { mutedDialogs.put(user.id, true); @@ -4958,7 +5149,8 @@ private void updateFiltersReadCounter(LongSparseIntArray dialogsToUpdate, LongSp boolean muted = getMessagesController().isDialogMuted(-chat.id, 0, chat); boolean hasUnread = dialogsWithUnread.indexOfKey(-chat.id) >= 0; boolean hasMention = dialogsWithMentions.indexOfKey(-chat.id) >= 0; - int idx1 = dialogsByFolders.get(-chat.id); + Integer folderId = dialogsByFolders.get(-chat.id); + int idx1 = folderId == null || folderId < 0 || folderId > 1 ? 0 : folderId; int idx2 = muted ? 1 : 0; if (muted) { mutedDialogs.put(-chat.id, true); @@ -5606,7 +5798,7 @@ private void updateDialogsWithReadMessagesInternal(ArrayList messages, getMessagesController().reloadMentionsCountForChannels(channelMentionsToReload); } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } } @@ -5682,7 +5874,7 @@ public void updateChatParticipants(TLRPC.ChatParticipants participants) { data.reuse(); } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -5711,7 +5903,7 @@ public void loadChannelAdmins(long chatId) { cursor = null; getMessagesController().processLoadedChannelAdmins(ids, chatId, true); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -5743,7 +5935,7 @@ public void putChannelAdmins(long chatId, LongSparseArray loadUserInfos(HashSet uids) { + ArrayList arrayList = new ArrayList<>(); + try { + String ids = TextUtils.join(",", uids); + SQLiteCursor cursor = database.queryFinalized("SELECT info, pinned FROM user_settings WHERE uid IN(" + ids + ")"); + while (cursor.next()) { + NativeByteBuffer data = cursor.byteBufferValue(0); + if (data != null) { + TLRPC.UserFull info = TLRPC.UserFull.TLdeserialize(data, data.readInt32(false), false); + info.pinned_msg_id = cursor.intValue(1); + arrayList.add(info); + data.reuse(); + + } + } + cursor.dispose(); + cursor = null; + } catch (Exception e) { + checkSQLException(e); + } + return arrayList; + } + public void loadUserInfo(TLRPC.User user, boolean force, int classGuid, int fromMessageId) { if (user == null) { return; @@ -5927,7 +6142,7 @@ public void loadUserInfo(TLRPC.User user, boolean force, int classGuid, int from } } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { getMessagesController().processUserInfo(user, info, true, force, classGuid, pinnedMessages, pinnedMessagesMap, totalPinnedCount, pinnedEndReached); if (cursor != null) { @@ -5979,7 +6194,7 @@ public void updateUserInfo(TLRPC.UserFull info, boolean ifExist) { state = null; } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (state != null) { state.dispose(); @@ -6002,7 +6217,7 @@ public void saveChatInviter(long chatId, long inviterId) { state.step(); state.dispose(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (state != null) { state.dispose(); @@ -6023,7 +6238,7 @@ public void saveChatLinksCount(long chatId, int linksCount) { state.dispose(); state = null; } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (state != null) { state.dispose(); @@ -6110,7 +6325,7 @@ public void updateChatInfo(TLRPC.ChatFull info, boolean ifExist) { state.dispose(); state = null; } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -6134,7 +6349,7 @@ public void updateChatOnlineCount(long channelId, int onlineCount) { state.dispose(); state = null; } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (state != null) { state.dispose(); @@ -6282,7 +6497,7 @@ public void updatePinnedMessages(long dialogId, ArrayList ids, boolean AndroidUtilities.runOnUIThread(() -> getNotificationCenter().postNotificationName(NotificationCenter.didLoadPinnedMessages, dialogId, ids, false, null, messages, maxId, newCount, endReached)); } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (database != null) { database.commitTransaction(); @@ -6373,7 +6588,7 @@ public void updateChatInfo(long chatId, long userId, int what, long invited_id, data.reuse(); } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -6403,7 +6618,7 @@ public boolean isMigratedChat(long chatId) { result[0] = info instanceof TLRPC.TL_channelFull && info.migrated_from_chat_id != 0; countDownLatch.countDown(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -6414,7 +6629,7 @@ public boolean isMigratedChat(long chatId) { try { countDownLatch.await(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } return result[0]; } @@ -6437,7 +6652,7 @@ public TLRPC.Message getMessage(long dialogId, long msgId) { cursor.dispose(); cursor = null; } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -6448,7 +6663,7 @@ public TLRPC.Message getMessage(long dialogId, long msgId) { try { countDownLatch.await(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } return ref.get(); } @@ -6475,7 +6690,7 @@ public boolean hasInviteMeMessage(long chatId) { cursor.dispose(); cursor = null; } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -6486,7 +6701,7 @@ public boolean hasInviteMeMessage(long chatId) { try { countDownLatch.await(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } return result[0]; } @@ -6563,7 +6778,7 @@ private TLRPC.ChatFull loadChatInfoInternal(long chatId, boolean isChannel, bool info.participants.participants.add(chatChannelParticipant); } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } } cursor.dispose(); @@ -6617,7 +6832,7 @@ private TLRPC.ChatFull loadChatInfoInternal(long chatId, boolean isChannel, bool } } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -6749,7 +6964,7 @@ public void processPendingRead(long dialogId, int maxPositiveId, int maxNegative } updateWidgets(dialogId); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (state != null) { state.dispose(); @@ -6788,7 +7003,7 @@ public void putContacts(ArrayList contacts, boolean deleteAll) state = null; database.commitTransaction(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (state != null) { state.dispose(); @@ -6809,7 +7024,7 @@ public void deleteContacts(ArrayList uids) { String ids = TextUtils.join(",", uids); database.executeFast("DELETE FROM contacts WHERE uid IN(" + ids + ")").stepThis().dispose(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } }); } @@ -6827,7 +7042,7 @@ public void applyPhoneBookUpdates(String adds, String deletes) { database.executeFast(String.format(Locale.US, "UPDATE user_phones_v7 SET deleted = 1 WHERE sphone IN(%s)", deletes)).stepThis().dispose(); } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } }); } @@ -6881,7 +7096,7 @@ public void putCachedPhoneBook(HashMap conta getCachedPhoneBook(false); } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (state != null) { state.dispose(); @@ -6956,7 +7171,7 @@ public void getCachedPhoneBook(boolean byError) { return; } } catch (Throwable e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -6979,7 +7194,7 @@ public void getCachedPhoneBook(boolean byError) { } } } catch (Throwable e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -7033,7 +7248,7 @@ public void getCachedPhoneBook(boolean byError) { cursor = null; } catch (Exception e) { contactHashMap.clear(); - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -7071,7 +7286,7 @@ public void getContacts() { } catch (Exception e) { contacts.clear(); users.clear(); - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -7211,7 +7426,7 @@ public void getUnsentMessages(int count) { getSendMessagesHelper().processUnsentMessages(messages, scheduledMessages, users, chats, encryptedChats); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -7231,7 +7446,7 @@ public boolean checkMessageByRandomId(long random_id) { result[0] = true; } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -7242,7 +7457,7 @@ public boolean checkMessageByRandomId(long random_id) { try { countDownLatch.await(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } return result[0]; } @@ -7258,7 +7473,7 @@ public boolean checkMessageId(long dialogId, int mid) { result[0] = true; } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -7269,7 +7484,7 @@ public boolean checkMessageId(long dialogId, int mid) { try { countDownLatch.await(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } return result[0]; } @@ -7292,7 +7507,7 @@ public void getUnreadMention(long dialog_id, int topicId, IntCallback callback) cursor.dispose(); AndroidUtilities.runOnUIThread(() -> callback.run(result)); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -7315,7 +7530,7 @@ public void getMessagesCount(long dialog_id, IntCallback callback) { cursor.dispose(); AndroidUtilities.runOnUIThread(() -> callback.run(result)); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { cursor.dispose(); } @@ -8048,7 +8263,7 @@ public Runnable getMessagesInternal(long dialogId, long mergeDialogId, int count } cursor2.dispose(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } } } @@ -8176,7 +8391,7 @@ public Runnable getMessagesInternal(long dialogId, long mergeDialogId, int count res.chats.clear(); res.users.clear(); res.animatedEmoji = null; - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -8221,7 +8436,7 @@ private void getAnimatedEmoji(String join, ArrayList documents) documents.add(document); } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } if (byteBuffer != null) { byteBuffer.reuse(); @@ -8237,16 +8452,7 @@ private void getAnimatedEmoji(String join, ArrayList documents) } public void getMessages(long dialogId, long mergeDialogId, boolean loadInfo, int count, int max_id, int offset_date, int minDate, int classGuid, int load_type, boolean scheduled, int replyMessageId, int loadIndex, boolean processMessages, boolean isTopic) { - long startTime = System.currentTimeMillis(); storageQueue.postRunnable(() -> { - /*if (loadInfo) { - if (lowerId < 0) { - TLRPC.ChatFull info = loadChatInfoInternal(-lowerId, true, false, 0); - if (info != null) { - mergeDialogIdFinal = -info.migrated_from_chat_id; - } - } - }*/ Runnable processMessagesRunnable = getMessagesInternal(dialogId, mergeDialogId, count, max_id, offset_date, minDate, classGuid, load_type, scheduled, replyMessageId, loadIndex, processMessages, isTopic); Utilities.stageQueue.postRunnable(() -> { processMessagesRunnable.run(); @@ -8259,7 +8465,7 @@ public void clearSentMedia() { try { database.executeFast("DELETE FROM sent_files_v2 WHERE 1").stepThis().dispose(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } }); } @@ -8293,7 +8499,7 @@ public Object[] getSentFile(String path, int type) { cursor.dispose(); } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { countDownLatch.countDown(); } @@ -8301,7 +8507,7 @@ public Object[] getSentFile(String path, int type) { try { countDownLatch.await(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } return result[0] != null ? result : null; } @@ -8328,7 +8534,7 @@ private void updateWidgets(ArrayList dids) { } cursor.dispose(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } } @@ -8357,7 +8563,7 @@ public void putWidgetDialogs(int widgetId, ArrayList dids) { state.dispose(); database.commitTransaction(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } }); } @@ -8367,7 +8573,7 @@ public void clearWidgetDialogs(int widgetId) { try { database.executeFast("DELETE FROM shortcut_widget WHERE id = " + widgetId).stepThis().dispose(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } }); } @@ -8441,7 +8647,7 @@ public void getWidgetDialogIds(int widgetId, int type, ArrayList dids, Arr } } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -8452,7 +8658,7 @@ public void getWidgetDialogIds(int widgetId, int type, ArrayList dids, Arr try { countDownLatch.await(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } } @@ -8562,7 +8768,7 @@ public void getWidgetDialogs(int widgetId, int type, ArrayList dids, LongS getUsersInternal(TextUtils.join(",", usersToLoad), users); } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -8573,7 +8779,7 @@ public void getWidgetDialogs(int widgetId, int type, ArrayList dids, LongS try { countDownLatch.await(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } } @@ -8611,7 +8817,7 @@ public void putSentFile(String path, TLObject file, int type, String parent) { data.reuse(); } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (state != null) { state.dispose(); @@ -8640,7 +8846,7 @@ public void updateEncryptedChatSeq(TLRPC.EncryptedChat chat, boolean cleanup) { database.executeFast(String.format(Locale.US, "DELETE FROM messages_v2 WHERE mid IN (SELECT m.mid FROM messages_v2 as m LEFT JOIN messages_seq as s ON m.mid = s.mid WHERE m.uid = %d AND m.date = 0 AND m.mid < 0 AND s.seq_out <= %d) AND uid = %d", did, chat.in_seq_no, did)).stepThis().dispose(); } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (state != null) { state.dispose(); @@ -8661,7 +8867,7 @@ public void updateEncryptedChatTTL(TLRPC.EncryptedChat chat) { state.bindInteger(2, chat.id); state.step(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (state != null) { state.dispose(); @@ -8682,7 +8888,7 @@ public void updateEncryptedChatLayer(TLRPC.EncryptedChat chat) { state.bindInteger(2, chat.id); state.step(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (state != null) { state.dispose(); @@ -8746,7 +8952,7 @@ public void updateEncryptedChat(TLRPC.EncryptedChat chat) { data4.reuse(); data5.reuse(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (state != null) { state.dispose(); @@ -8766,7 +8972,7 @@ public void isDialogHasTopMessage(long did, Runnable onDontExist) { } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -8787,7 +8993,7 @@ public boolean hasAuthMessage(int date) { cursor = database.queryFinalized(String.format(Locale.US, "SELECT mid FROM messages_v2 WHERE uid = 777000 AND date = %d AND mid < 0 LIMIT 1", date)); result[0] = cursor.next(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -8798,7 +9004,7 @@ public boolean hasAuthMessage(int date) { try { countDownLatch.await(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } return result[0]; } @@ -8821,7 +9027,7 @@ public void getEncryptedChat(long chatId, CountDownLatch countDownLatch, ArrayLi } } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { countDownLatch.countDown(); } @@ -8910,7 +9116,7 @@ public void putEncryptedChat(TLRPC.EncryptedChat chat, TLRPC.User user, TLRPC.Di state = null; } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (state != null) { state.dispose(); @@ -8980,7 +9186,7 @@ private void putUsersInternal(ArrayList users) throws Exception { } } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } } cursor.dispose(); @@ -9045,7 +9251,7 @@ public void updateChatDefaultBannedRights(long chatId, TLRPC.TL_chatBannedRights state.dispose(); state = null; } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -9127,15 +9333,6 @@ private void putChatsInternal(ArrayList chats) throws Exception { state.dispose(); } - private int malformedCleanupCount = 0; - public void checkMalformed(Exception e) { - if (e != null && e.getMessage() != null && e.getMessage().contains("malformed") && malformedCleanupCount < 3) { - FileLog.e("detected database malformed error, cleaning up..."); - malformedCleanupCount++; - cleanup(false); - } - } - public void getUsersInternal(String usersToLoad, ArrayList result) throws Exception { if (usersToLoad == null || usersToLoad.length() == 0 || result == null) { return; @@ -9155,14 +9352,17 @@ public void getUsersInternal(String usersToLoad, ArrayList result) t } } } catch (Exception e) { - FileLog.e(e); - checkMalformed(e); + checkSQLException(e); } } cursor.dispose(); } public void getChatsInternal(String chatsToLoad, ArrayList result) throws Exception { + getChatsInternal(chatsToLoad, result, true); + } + + public void getChatsInternal(String chatsToLoad, ArrayList result, boolean parseFullData) throws Exception { if (chatsToLoad == null || chatsToLoad.length() == 0 || result == null) { return; } @@ -9171,15 +9371,14 @@ public void getChatsInternal(String chatsToLoad, ArrayList result) t try { NativeByteBuffer data = cursor.byteBufferValue(0); if (data != null) { - TLRPC.Chat chat = TLRPC.Chat.TLdeserialize(data, data.readInt32(false), false); + TLRPC.Chat chat = TLRPC.Chat.TLdeserialize(data, data.readInt32(false), false, parseFullData); data.reuse(); if (chat != null) { result.add(chat); } } } catch (Exception e) { - FileLog.e(e); - checkMalformed(e); + checkSQLException(e); } } cursor.dispose(); @@ -9225,7 +9424,7 @@ public void getEncryptedChatsInternal(String chatsToLoad, ArrayList users, ArrayList> ids, boolean } AndroidUtilities.runOnUIThread(() -> getDownloadController().cancelDownloading(ids)); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (state != null) { state.dispose(); @@ -9332,7 +9531,7 @@ public void clearDownloadQueue(int type) { database.executeFast(String.format(Locale.US, "DELETE FROM download_queue WHERE type = %d", type)).stepThis().dispose(); } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } }); } @@ -9368,7 +9567,7 @@ public void getDownloadQueue(int type) { AndroidUtilities.runOnUIThread(() -> getDownloadController().processDownloadObjects(type, objects)); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -9484,7 +9683,7 @@ public void putWebPages(LongSparseArray webPages) { AndroidUtilities.runOnUIThread(() -> getNotificationCenter().postNotificationName(NotificationCenter.didReceivedWebpages, messages)); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -9525,6 +9724,7 @@ public void overwriteChannel(long channelId, TLRPC.TL_updates_channelDifferenceT database.executeFast("DELETE FROM chat_pinned_v2 WHERE uid = " + did).stepThis().dispose(); database.executeFast("DELETE FROM messages_v2 WHERE uid = " + did).stepThis().dispose(); database.executeFast("DELETE FROM bot_keyboard WHERE uid = " + did).stepThis().dispose(); + database.executeFast("DELETE FROM bot_keyboard_topics WHERE uid = " + did).stepThis().dispose(); database.executeFast("UPDATE media_counts_v2 SET old = 1 WHERE uid = " + did).stepThis().dispose(); database.executeFast("DELETE FROM media_v4 WHERE uid = " + did).stepThis().dispose(); database.executeFast("DELETE FROM messages_holes WHERE uid = " + did).stepThis().dispose(); @@ -9537,7 +9737,7 @@ public void overwriteChannel(long channelId, TLRPC.TL_updates_channelDifferenceT database.executeFast("DELETE FROM messages_topics WHERE uid = " + did).stepThis().dispose(); database.executeFast("DELETE FROM messages_holes_topics WHERE uid = " + did).stepThis().dispose(); - getMediaDataController().clearBotKeyboard(did, null); + getMediaDataController().clearBotKeyboard(did); TLRPC.TL_messages_dialogs dialogs = new TLRPC.TL_messages_dialogs(); dialogs.chats.addAll(difference.chats); @@ -9565,7 +9765,7 @@ public void overwriteChannel(long channelId, TLRPC.TL_updates_channelDifferenceT getMessagesController().getTopicsController().reloadTopics(channelId); } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -9693,7 +9893,7 @@ public void putChannelViews(LongSparseArray channelViews, LongSp database.commitTransaction(); inTransaction = false; } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (inTransaction) { if (database != null) { @@ -9811,7 +10011,7 @@ private void updateRepliesMaxReadIdInternal(long chatId, int mid, int readMaxId, resetForumBadgeIfNeed(-chatId); } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (state != null) { state.dispose(); @@ -9842,7 +10042,7 @@ private void resetForumBadgeIfNeed(long dialogId) { updateFiltersReadCounter(dialogsToUpdate, null, true); getMessagesController().processDialogsUpdateRead(dialogsToUpdate, null); } catch (Throwable e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -9899,7 +10099,7 @@ public void updateRepliesCount(long chatId, int mid, ArrayList repli state.dispose(); state = null; } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (state != null) { state.dispose(); @@ -9942,7 +10142,7 @@ public void updateMessageVerifyFlags(ArrayList messages) { database.commitTransaction(); databaseInTransaction = false; } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (databaseInTransaction) { if (database != null) { @@ -10061,7 +10261,7 @@ private void putMessagesInternal(ArrayList messages, boolean with LongSparseIntArray newMentionsCounts = new LongSparseIntArray(); LongSparseIntArray mentionCounts = new LongSparseIntArray(); SparseArray mediaCounts = null; - LongSparseArray botKeyboards = new LongSparseArray<>(); + HashMap botKeyboards = new HashMap<>(); LongSparseArray> dialogMessagesMediaIdsMap = null; LongSparseArray dialogsMediaTypesChange = null; @@ -10235,15 +10435,20 @@ private void putMessagesInternal(ArrayList messages, boolean with } } if (isValidKeyboardToSave(message)) { - TLRPC.Message oldMessage = botKeyboards.get(message.dialog_id); + TopicKey topicKey = TopicKey.of(message.dialog_id, topicId); + TLRPC.Message oldMessage = botKeyboards.get(topicKey); if (oldMessage == null || oldMessage.id < message.id) { - botKeyboards.put(message.dialog_id, message); + botKeyboards.put(topicKey, message); } } } - for (int a = 0; a < botKeyboards.size(); a++) { - getMediaDataController().putBotKeyboard(botKeyboards.keyAt(a), botKeyboards.valueAt(a)); + if (botKeyboards != null && !botKeyboards.isEmpty()) { + Iterator iterator = botKeyboards.keySet().iterator(); + while (iterator.hasNext()) { + TopicKey topicKey = iterator.next(); + getMediaDataController().putBotKeyboard(topicKey, botKeyboards.get(topicKey)); + } } if (mediaIdsMap != null) { @@ -10735,7 +10940,7 @@ private void putMessagesInternal(ArrayList messages, boolean with state_download.bindInteger(2, type); state_download.bindInteger(3, message.date); state_download.bindByteBuffer(4, data); - state_download.bindString(5, "sent_" + (message.peer_id != null ? message.peer_id.channel_id : 0) + "_" + message.id + "_" + DialogObject.getPeerDialogId(message.peer_id) + "_" + messageObject.type); + state_download.bindString(5, "sent_" + (message.peer_id != null ? message.peer_id.channel_id : 0) + "_" + message.id + "_" + DialogObject.getPeerDialogId(message.peer_id) + "_" + messageObject.type + "_" + messageObject.getSize()); state_download.step(); data.reuse(); } @@ -11066,7 +11271,7 @@ private void putMessagesInternal(ArrayList messages, boolean with updateWidgets(dids); } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (databaseInTransaction) { if (database != null) { @@ -11188,7 +11393,7 @@ public void markMessageAsSendError(TLRPC.Message message, boolean scheduled) { database.executeFast(String.format(Locale.US, "UPDATE messages_topics SET send_state = 2 WHERE mid = %d AND uid = %d", messageId, MessageObject.getDialogId(message))).stepThis().dispose(); } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } }); } @@ -11204,7 +11409,7 @@ public void setMessageSeq(int mid, int seq_in, int seq_out) { state.bindInteger(3, seq_out); state.step(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (state != null) { state.dispose(); @@ -11225,7 +11430,7 @@ private long[] updateMessageStateAndIdInternal(long randomId, long dialogId, Int dialogId = cursor.longValue(1); } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -11245,7 +11450,7 @@ private long[] updateMessageStateAndIdInternal(long randomId, long dialogId, Int state.bindInteger(3, oldMessageId); state.step(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (state != null) { state.dispose(); @@ -11267,7 +11472,7 @@ private long[] updateMessageStateAndIdInternal(long randomId, long dialogId, Int try { database.executeFast(String.format(Locale.US, "DELETE FROM randoms_v2 WHERE random_id = %d AND mid = %d AND uid = %d", randomId, _oldId, dialogId)).stepThis().dispose(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } return null; } @@ -11281,7 +11486,7 @@ private long[] updateMessageStateAndIdInternal(long randomId, long dialogId, Int scheduled = 0; } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -11297,7 +11502,7 @@ private long[] updateMessageStateAndIdInternal(long randomId, long dialogId, Int scheduled = 1; } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -11330,7 +11535,7 @@ private long[] updateMessageStateAndIdInternal(long randomId, long dialogId, Int state2.step(); } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (state != null) { state.dispose(); @@ -11360,7 +11565,7 @@ private long[] updateMessageStateAndIdInternal(long randomId, long dialogId, Int database.executeFast(String.format(Locale.US, "DELETE FROM messages_seq WHERE mid = %d", oldMessageId)).stepThis().dispose(); database.executeFast(String.format(Locale.US, "DELETE FROM messages_topics WHERE mid = %d AND uid = %d", oldMessageId, did)).stepThis().dispose(); } catch (Exception e2) { - FileLog.e(e2); + checkSQLException(e2); } } finally { if (state != null) { @@ -11383,7 +11588,7 @@ private long[] updateMessageStateAndIdInternal(long randomId, long dialogId, Int try { database.executeFast(String.format(Locale.US, "DELETE FROM media_v4 WHERE mid = %d AND uid = %d", oldMessageId, did)).stepThis().dispose(); } catch (Exception e2) { - FileLog.e(e2); + checkSQLException(e2); } } finally { if (state != null) { @@ -11402,7 +11607,7 @@ private long[] updateMessageStateAndIdInternal(long randomId, long dialogId, Int try { database.executeFast(String.format(Locale.US, "DELETE FROM media_topics WHERE mid = %d AND uid = %d", oldMessageId, did)).stepThis().dispose(); } catch (Exception e2) { - FileLog.e(e2); + checkSQLException(e2); } } finally { if (state != null) { @@ -11417,7 +11622,7 @@ private long[] updateMessageStateAndIdInternal(long randomId, long dialogId, Int state.bindInteger(2, oldMessageId); state.step(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (state != null) { state.dispose(); @@ -11434,7 +11639,7 @@ private long[] updateMessageStateAndIdInternal(long randomId, long dialogId, Int try { database.executeFast(String.format(Locale.US, "DELETE FROM scheduled_messages_v2 WHERE mid = %d AND uid = %d", oldMessageId, did)).stepThis().dispose(); } catch (Exception e2) { - FileLog.e(e2); + checkSQLException(e2); } } finally { if (state != null) { @@ -11522,8 +11727,7 @@ private void updateUsersInternal(ArrayList users, boolean onlyStatus } } } catch (Exception e) { - FileLog.e(e); - checkMalformed(e); + checkSQLException(e); } finally { if (database != null) { database.commitTransaction(); @@ -11584,7 +11788,7 @@ private void markMessagesAsReadInternal(LongSparseIntArray inbox, LongSparseIntA } } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (state != null) { state.dispose(); @@ -11613,7 +11817,7 @@ private void markMessagesContentAsReadInternal(long dialogId, ArrayList cursor = null; } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -11647,7 +11851,7 @@ public void markMessagesContentAsRead(long dialogId, ArrayList mids, in markMessagesContentAsReadInternal(sparseArray.keyAt(a), sparseArray.valueAt(a), date); } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -11699,7 +11903,7 @@ public void markMessagesAsDeletedByRandoms(ArrayList messages) { } } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -11712,7 +11916,7 @@ protected void deletePushMessages(long dialogId, ArrayList messages) { try { database.executeFast(String.format(Locale.US, "DELETE FROM unread_push_messages WHERE uid = %d AND mid IN(%s)", dialogId, TextUtils.join(",", messages))).stepThis().dispose(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } } @@ -11730,7 +11934,7 @@ private void broadcastScheduledMessagesChange(Long did) { cursor = null; AndroidUtilities.runOnUIThread(() -> getNotificationCenter().postNotificationName(NotificationCenter.scheduledMessagesUpdated, did, count)); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -11757,7 +11961,7 @@ private ArrayList markMessagesAsDeletedInternal(long dialogId, ArrayList markMessagesAsDeletedInternal(long dialogId, ArrayList markMessagesAsDeletedInternal(long dialogId, ArrayList markMessagesAsDeletedInternal(long dialogId, ArrayList> mediaCounts = null; @@ -12177,7 +12382,7 @@ private ArrayList markMessagesAsDeletedInternal(long dialogId, ArrayList markMessagesAsDeletedInternal(long dialogId, ArrayList markMessagesAsDeletedInternal(long channelId, int mid, b } } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } cursor.dispose(); cursor = null; @@ -12558,7 +12763,7 @@ private ArrayList markMessagesAsDeletedInternal(long channelId, int mid, b updateWidgets(dialogsIds); return dialogsIds; } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -12787,7 +12992,7 @@ public void closeHolesInMedia(long did, int minId, int maxId, int type, int topi database.executeFast(String.format(Locale.US, "UPDATE media_holes_v2 SET end = %d WHERE uid = %d AND type = %d AND start = %d AND end = %d", minId, did, hole.type, hole.start, hole.end)).stepThis().dispose(); } } catch (Exception e) { - FileLog.e(e, false); + checkSQLException(e, false); } } } else if (minId <= hole.start + 1) { @@ -12799,7 +13004,7 @@ public void closeHolesInMedia(long did, int minId, int maxId, int type, int topi database.executeFast(String.format(Locale.US, "UPDATE media_holes_v2 SET start = %d WHERE uid = %d AND type = %d AND start = %d AND end = %d", maxId, did, hole.type, hole.start, hole.end)).stepThis().dispose(); } } catch (Exception e) { - FileLog.e(e, false); + checkSQLException(e, false); } } } else { @@ -12832,7 +13037,7 @@ public void closeHolesInMedia(long did, int minId, int maxId, int type, int topi } } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (state != null) { state.dispose(); @@ -12885,7 +13090,7 @@ private void closeHolesInTable(String table, long did, int minId, int maxId, int database.executeFast(String.format(Locale.US, "UPDATE " + table + " SET end = %d WHERE uid = %d AND start = %d AND end = %d", minId, did, hole.start, hole.end)).stepThis().dispose(); } } catch (Exception e) { - FileLog.e(e, false); + checkSQLException(e, false); } } } else if (minId <= hole.start + 1) { @@ -12897,7 +13102,7 @@ private void closeHolesInTable(String table, long did, int minId, int maxId, int database.executeFast(String.format(Locale.US, "UPDATE " + table + " SET start = %d WHERE uid = %d AND start = %d AND end = %d", maxId, did, hole.start, hole.end)).stepThis().dispose(); } } catch (Exception e) { - FileLog.e(e, false); + checkSQLException(e, false); } } } else { @@ -12932,7 +13137,7 @@ private void closeHolesInTable(String table, long did, int minId, int maxId, int } } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (state != null) { state.dispose(); @@ -12957,12 +13162,13 @@ public void replaceMessageIfExists(TLRPC.Message message, ArrayList try { cursor = database.queryFinalized(String.format(Locale.US, "SELECT uid, read_state, custom_params FROM messages_v2 WHERE mid = %d AND uid = %d LIMIT 1", message.id, MessageObject.getDialogId(message))); if (!cursor.next()) { + cursor.dispose(); return; } readState = cursor.intValue(1); customParams = cursor.byteBufferValue(2); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -13103,7 +13309,7 @@ public void replaceMessageIfExists(TLRPC.Message message, ArrayList AndroidUtilities.runOnUIThread(() -> getNotificationCenter().postNotificationName(NotificationCenter.replaceMessagesObjects, messageObject.getDialogId(), arrayList)); } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (database != null) { database.commitTransaction(); @@ -13211,7 +13417,7 @@ public void putMessages(TLRPC.messages_Messages messages, long dialogId, int loa state_webpage = null; state_tasks = null; int minDeleteTime = Integer.MAX_VALUE; - TLRPC.Message botKeyboard = null; + HashMap botKeyboards = null; long channelId = 0; for (int a = 0; a < count; a++) { TLRPC.Message message = messages.messages.get(a); @@ -13435,7 +13641,7 @@ public void putMessages(TLRPC.messages_Messages messages, long dialogId, int loa database.executeFast(String.format(Locale.US, "DELETE FROM media_v4 WHERE mid = %d AND uid = %d", message.id, dialogId)).stepThis().dispose(); database.executeFast("DELETE FROM media_counts_v2 WHERE uid = " + dialogId).stepThis().dispose(); } catch (Exception e2) { - FileLog.e(e2); + checkSQLException(e2); } } } @@ -13489,8 +13695,13 @@ public void putMessages(TLRPC.messages_Messages messages, long dialogId, int loa } if (load_type == 0 && isValidKeyboardToSave(message)) { - if (botKeyboard == null || botKeyboard.id < message.id) { - botKeyboard = message; + TopicKey topicKey = TopicKey.of(dialogId, MessageObject.getTopicId(message, isForum(dialogId))); + TLRPC.Message currentBotKeyboard = botKeyboards == null ? null : botKeyboards.get(topicKey); + if (currentBotKeyboard == null || currentBotKeyboard.id < message.id) { + if (botKeyboards == null) { + botKeyboards = new HashMap<>(); + } + botKeyboards.put(topicKey, message); } } } @@ -13513,8 +13724,12 @@ public void putMessages(TLRPC.messages_Messages messages, long dialogId, int loa state_polls.dispose(); state_polls = null; } - if (botKeyboard != null) { - getMediaDataController().putBotKeyboard(dialogId, botKeyboard); + if (botKeyboards != null) { + Iterator iterator = botKeyboards.keySet().iterator(); + while (iterator.hasNext()) { + TopicKey topicKey = iterator.next(); + getMediaDataController().putBotKeyboard(topicKey, botKeyboards.get(topicKey)); + } } deleteFromDownloadQueue(idsToDelete, false); AndroidUtilities.runOnUIThread(() -> getFileLoader().cancelLoadFiles(namesToDelete)); @@ -13542,7 +13757,7 @@ public void putMessages(TLRPC.messages_Messages messages, long dialogId, int loa } } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (database != null) { database.commitTransaction(); @@ -13745,6 +13960,7 @@ public void getDialogs(int folderId, int offset, int count, boolean loadDraftsPe SQLiteCursor cursor = null; try { ArrayList usersToLoad = new ArrayList<>(); + HashSet dialogUsers = new HashSet<>(); usersToLoad.add(getUserConfig().getClientUserId()); ArrayList chatsToLoad = new ArrayList<>(); ArrayList encryptedToLoad = new ArrayList<>(); @@ -13864,7 +14080,7 @@ public void getDialogs(int folderId, int offset, int count, boolean loadDraftsPe } } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } } else { data.reuse(); @@ -13886,6 +14102,7 @@ public void getDialogs(int folderId, int offset, int count, boolean loadDraftsPe if (!usersToLoad.contains(dialogId)) { usersToLoad.add(dialogId); } + dialogUsers.add(dialogId); } else if (DialogObject.isChatDialog(dialogId)) { if (!chatsToLoad.contains(-dialogId)) { chatsToLoad.add(-dialogId); @@ -13959,7 +14176,7 @@ public void getDialogs(int folderId, int offset, int count, boolean loadDraftsPe } } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } } else { data.reuse(); @@ -14028,15 +14245,28 @@ public void getDialogs(int folderId, int offset, int count, boolean loadDraftsPe if (!usersToLoad.isEmpty()) { getUsersInternal(TextUtils.join(",", usersToLoad), dialogs.users); } - getMessagesController().processLoadedDialogs(dialogs, encryptedChats, folderId, offset, count, 1, false, false, true); + ArrayList fullUsers = null; + if (!dialogUsers.isEmpty()) { + HashSet fullUsersToLoad = new HashSet<>(); + for (Long did : dialogUsers) { + for (int i = 0; i < dialogs.users.size(); i++) { + if (dialogs.users.get(i).id == did && dialogs.users.get(i).premium) { + fullUsersToLoad.add(did); + } + } + } + if (!fullUsersToLoad.isEmpty()) { + fullUsers = loadUserInfos(fullUsersToLoad); + } + } + getMessagesController().processLoadedDialogs(dialogs, encryptedChats, fullUsers, folderId, offset, count, 1, false, false, true); } catch (Exception e) { dialogs.dialogs.clear(); dialogs.users.clear(); dialogs.chats.clear(); encryptedChats.clear(); - FileLog.e(e); - getMessagesController().processLoadedDialogs(dialogs, encryptedChats, folderId, 0, 100, 1, true, false, true); - checkMalformed(e); + checkSQLException(e); + getMessagesController().processLoadedDialogs(dialogs, encryptedChats, null, folderId, 0, 100, 1, true, false, true); } finally { if (cursor != null) { cursor.dispose(); @@ -14070,6 +14300,41 @@ public static void createFirstHoles(long did, SQLitePreparedStatement state5, SQ } } + public void updateDialogData(TLRPC.Dialog dialog) { + if (dialog == null) { + return; + } + storageQueue.postRunnable(() -> { + SQLiteCursor cursor = null; + SQLitePreparedStatement state = null; + try { + cursor = database.queryFinalized("SELECT data FROM dialogs WHERE did = " + dialog.id); + if (!cursor.next()) { + return; + } + + state = database.executeFast("UPDATE dialogs SET data = ? WHERE did = ?"); + NativeByteBuffer data = new NativeByteBuffer(dialog.getObjectSize()); + dialog.serializeToStream(data); + state.bindByteBuffer(1, data); + state.bindLong(2, dialog.id); + state.step(); + state.dispose(); + state = null; + data.reuse(); + } catch (Exception e) { + checkSQLException(e); + } finally { + if (cursor != null) { + cursor.dispose(); + } + if (state != null) { + state.dispose(); + } + } + }); + } + private void putDialogsInternal(TLRPC.messages_Dialogs dialogs, int check) { SQLitePreparedStatement state_messages = null; SQLitePreparedStatement state_dialogs = null; @@ -14145,7 +14410,8 @@ private void putDialogsInternal(TLRPC.messages_Dialogs dialogs, int check) { messageDate = Math.max(message.date, messageDate); if (isValidKeyboardToSave(message)) { - getMediaDataController().putBotKeyboard(dialog.id, message); + TopicKey topicKey = TopicKey.of(dialog.id, MessageObject.getTopicId(message, isForum(dialog.id))); + getMediaDataController().putBotKeyboard(topicKey, message); } fixUnsupportedMedia(message); @@ -14313,7 +14579,7 @@ private void putDialogsInternal(TLRPC.messages_Dialogs dialogs, int check) { database.commitTransaction(); resetAllUnreadCounters(false); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (database != null) { database.commitTransaction(); @@ -14369,7 +14635,7 @@ public void getDialogFolderId(long dialogId, IntCallback callback) { } AndroidUtilities.runOnUIThread(() -> callback.run(folderId)); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -14422,7 +14688,7 @@ public void setDialogsFolderId(ArrayList peers, ArrayList resultArray, ArrayList resultArrayNames, ArrayList encUsers, int folderId) { + public void localSearch(int dialogsType, String query, ArrayList resultArray, ArrayList resultArrayNames, ArrayList encUsers, ArrayList onlyDialogIds, int folderId) { long selfUserId = UserConfig.getInstance(currentAccount).getClientUserId(); SQLiteCursor cursor = null; try { @@ -14846,23 +15112,30 @@ public void localSearch(int dialogsType, String query, ArrayList resultA dialogSearchResult.date = cursor.intValue(1); dialogsResult.put(id, dialogSearchResult); + if (dialogsType == DialogsActivity.DIALOGS_TYPE_BOT_REQUEST_PEER && (onlyDialogIds == null || !onlyDialogIds.contains(id))) { + continue; + } + if (!DialogObject.isEncryptedDialog(id)) { if (DialogObject.isUserDialog(id)) { - if (dialogsType == 4 && id == selfUserId) { + if (dialogsType == DialogsActivity.DIALOGS_TYPE_USERS_ONLY && id == selfUserId) { continue; } - if (dialogsType != 2 && !usersToLoad.contains(id)) { + if (dialogsType == DialogsActivity.DIALOGS_TYPE_GROUPS_ONLY || dialogsType == DialogsActivity.DIALOGS_TYPE_CHANNELS_ONLY) { + continue; + } + if (dialogsType != DialogsActivity.DIALOGS_TYPE_ADD_USERS_TO && !usersToLoad.contains(id)) { usersToLoad.add(id); } } else { - if (dialogsType == 4) { + if (dialogsType == DialogsActivity.DIALOGS_TYPE_USERS_ONLY) { continue; } if (!chatsToLoad.contains(-id)) { chatsToLoad.add(-id); } } - } else if (dialogsType == 0 || dialogsType == 3) { + } else if (dialogsType == DialogsActivity.DIALOGS_TYPE_DEFAULT || dialogsType == DialogsActivity.DIALOGS_TYPE_FORWARD) { int encryptedChatId = DialogObject.getEncryptedChatId(id); if (!encryptedToLoad.contains(encryptedChatId)) { encryptedToLoad.add(encryptedChatId); @@ -14925,6 +15198,9 @@ public void localSearch(int dialogsType, String query, ArrayList resultA if (data != null) { TLRPC.User user = TLRPC.User.TLdeserialize(data, data.readInt32(false), false); data.reuse(); + if (dialogsType == DialogsActivity.DIALOGS_TYPE_BOT_REQUEST_PEER && (onlyDialogIds == null || !onlyDialogIds.contains(user.id))) { + continue; + } DialogsSearchAdapter.DialogSearchResult dialogSearchResult = dialogsResult.get(user.id); if (user.status != null) { user.status.expires = cursor.intValue(1); @@ -14968,6 +15244,15 @@ public void localSearch(int dialogsType, String query, ArrayList resultA if (data != null) { TLRPC.Chat chat = TLRPC.Chat.TLdeserialize(data, data.readInt32(false), false); data.reuse(); + if (dialogsType == DialogsActivity.DIALOGS_TYPE_BOT_REQUEST_PEER && (onlyDialogIds == null || !onlyDialogIds.contains(-chat.id))) { + continue; + } + if (dialogsType == DialogsActivity.DIALOGS_TYPE_GROUPS_ONLY && ChatObject.isChannelAndNotMegaGroup(chat)) { + continue; + } + if (dialogsType == DialogsActivity.DIALOGS_TYPE_CHANNELS_ONLY && !ChatObject.isChannelAndNotMegaGroup(chat)) { + continue; + } if (!(chat == null || chat.deactivated || ChatObject.isChannel(chat) && ChatObject.isNotInChat(chat))) { long dialog_id = -chat.id; DialogsSearchAdapter.DialogSearchResult dialogSearchResult = dialogsResult.get(dialog_id); @@ -14988,7 +15273,7 @@ public void localSearch(int dialogsType, String query, ArrayList resultA cursor = null; } - if (!encryptedToLoad.isEmpty()) { + if (!encryptedToLoad.isEmpty() && dialogsType != DialogsActivity.DIALOGS_TYPE_BOT_REQUEST_PEER) { cursor = getDatabase().queryFinalized(String.format(Locale.US, "SELECT q.data, u.name, q.user, q.g, q.authkey, q.ttl, u.data, u.status, q.layer, q.seq_in, q.seq_out, q.use_count, q.exchange_id, q.key_date, q.fprint, q.fauthkey, q.khash, q.in_seq_no, q.admin_id, q.mtproto_seq FROM enc_chats as q INNER JOIN users as u ON q.user = u.uid WHERE q.uid IN(%s)", TextUtils.join(",", encryptedToLoad))); while (cursor.next()) { String name = cursor.stringValue(1); @@ -15100,7 +15385,7 @@ public void localSearch(int dialogsType, String query, ArrayList resultA resultArrayNames.add(dialogSearchResult.name); } - if (dialogsType != 2) { + if (dialogsType != DialogsActivity.DIALOGS_TYPE_ADD_USERS_TO && dialogsType != DialogsActivity.DIALOGS_TYPE_BOT_REQUEST_PEER && dialogsType != DialogsActivity.DIALOGS_TYPE_GROUPS_ONLY && dialogsType != DialogsActivity.DIALOGS_TYPE_CHANNELS_ONLY) { cursor = getDatabase().queryFinalized("SELECT u.data, u.status, u.name, u.uid FROM users as u INNER JOIN contacts as c ON u.uid = c.uid"); while (cursor.next()) { long uid = cursor.longValue(3); @@ -15151,7 +15436,7 @@ public void localSearch(int dialogsType, String query, ArrayList resultA cursor = null; } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -15170,11 +15455,11 @@ public ArrayList getCachedMessagesInRange(long dialogId, int minDate, i messageIds.add(mid); } } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } cursor.dispose(); } catch (Exception e) { - FileLog.e(e); + checkSQLException(e); } finally { if (cursor != null) { cursor.dispose(); @@ -15323,7 +15608,7 @@ public void markMessageReactionsAsReadInternal(long dialogId, int topicId, int m } } } catch (SQLiteException e) { - FileLog.e(e); + checkSQLException(e); } finally { if (state != null) { state.dispose(); @@ -15438,5 +15723,13 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash(dialogId, topicId); } + + @Override + public String toString() { + return "TopicKey{" + + "dialogId=" + dialogId + + ", topicId=" + topicId + + '}'; + } } } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/MusicBrowserService.java b/TMessagesProj/src/main/java/org/telegram/messenger/MusicBrowserService.java index 3ed7fcd432..1e20594946 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/MusicBrowserService.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/MusicBrowserService.java @@ -34,6 +34,7 @@ import android.os.SystemClock; import android.service.media.MediaBrowserService; import android.text.TextUtils; +import android.widget.Toast; import androidx.collection.LongSparseArray; @@ -139,7 +140,7 @@ public void onDestroy() { @Override public BrowserRoot onGetRoot(String clientPackageName, int clientUid, Bundle rootHints) { - if (clientPackageName == null || Process.SYSTEM_UID != clientUid && Process.myUid() != clientUid && !clientPackageName.equals("com.google.android.mediasimulator") && !clientPackageName.equals("com.google.android.projection.gearhead")) { + if (clientPackageName == null || Process.SYSTEM_UID != clientUid && Process.myUid() != clientUid && !clientPackageName.equals("com.google.android.mediasimulator") && !clientPackageName.equals("com.google.android.projection.gearhead") || passcode()) { return null; } return new BrowserRoot(MEDIA_ID_ROOT, null); @@ -147,6 +148,12 @@ public BrowserRoot onGetRoot(String clientPackageName, int clientUid, Bundle roo @Override public void onLoadChildren(String parentMediaId, Result> result) { + if (passcode()) { + Toast.makeText(getApplicationContext(), LocaleController.getString(R.string.EnterYourTelegramPasscode), Toast.LENGTH_LONG).show(); + stopSelf(); + result.detach(); + return; + } if (!chatsLoaded) { result.detach(); if (loadingChats) { @@ -266,6 +273,17 @@ public void onLoadChildren(String parentMediaId, Result 0 && ( + SharedConfig.appLocked || + SharedConfig.autoLockIn != 0 && SharedConfig.lastPauseTime != 0 && (SharedConfig.lastPauseTime + SharedConfig.autoLockIn) <= uptime || + uptime + 5 < SharedConfig.lastPauseTime + ) + ); + } + private void loadChildrenImpl(String parentMediaId, Result> result) { List mediaItems = new ArrayList<>(); diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/MusicPlayerService.java b/TMessagesProj/src/main/java/org/telegram/messenger/MusicPlayerService.java index d01c363d73..36058ea2d7 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/MusicPlayerService.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/MusicPlayerService.java @@ -173,7 +173,7 @@ public int onStartCommand(Intent intent, int flags, int startId) { audioManager.registerMediaButtonEventReceiver(remoteComponentName); Intent mediaButtonIntent = new Intent(Intent.ACTION_MEDIA_BUTTON); mediaButtonIntent.setComponent(remoteComponentName); - PendingIntent mediaPendingIntent = PendingIntent.getBroadcast(this, 0, mediaButtonIntent, PendingIntent.FLAG_MUTABLE); + PendingIntent mediaPendingIntent = PendingIntent.getBroadcast(this, 0, mediaButtonIntent, fixIntentFlags(PendingIntent.FLAG_MUTABLE)); remoteControlClient = new RemoteControlClient(mediaPendingIntent); audioManager.registerRemoteControlClient(remoteControlClient); } @@ -220,7 +220,7 @@ private void createNotification(MessageObject messageObject, boolean forBitmap) Intent intent = new Intent(ApplicationLoader.applicationContext, LaunchActivity.class); intent.setAction("com.tmessages.openplayer"); intent.addCategory(Intent.CATEGORY_LAUNCHER); - PendingIntent contentIntent = PendingIntent.getActivity(ApplicationLoader.applicationContext, 0, intent, PendingIntent.FLAG_MUTABLE); + PendingIntent contentIntent = PendingIntent.getActivity(ApplicationLoader.applicationContext, 0, intent, fixIntentFlags(PendingIntent.FLAG_MUTABLE)); Notification notification; @@ -247,12 +247,12 @@ private void createNotification(MessageObject messageObject, boolean forBitmap) if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { boolean isPlaying = !MediaController.getInstance().isMessagePaused(); - PendingIntent pendingPrev = PendingIntent.getBroadcast(getApplicationContext(), 0, new Intent(NOTIFY_PREVIOUS).setComponent(new ComponentName(this, MusicPlayerReceiver.class)), PendingIntent.FLAG_CANCEL_CURRENT | PendingIntent.FLAG_IMMUTABLE); + PendingIntent pendingPrev = PendingIntent.getBroadcast(getApplicationContext(), 0, new Intent(NOTIFY_PREVIOUS).setComponent(new ComponentName(this, MusicPlayerReceiver.class)), fixIntentFlags(PendingIntent.FLAG_MUTABLE | PendingIntent.FLAG_CANCEL_CURRENT)); //PendingIntent pendingStop = PendingIntent.getBroadcast(getApplicationContext(), 0, new Intent(NOTIFY_CLOSE).setComponent(new ComponentName(this, MusicPlayerReceiver.class)), PendingIntent.FLAG_CANCEL_CURRENT); - PendingIntent pendingStop = PendingIntent.getService(getApplicationContext(), 0, new Intent(this, getClass()).setAction(getPackageName() + ".STOP_PLAYER"), PendingIntent.FLAG_CANCEL_CURRENT | PendingIntent.FLAG_IMMUTABLE); - PendingIntent pendingPlaypause = PendingIntent.getBroadcast(getApplicationContext(), 0, new Intent(isPlaying ? NOTIFY_PAUSE : NOTIFY_PLAY).setComponent(new ComponentName(this, MusicPlayerReceiver.class)), PendingIntent.FLAG_MUTABLE | PendingIntent.FLAG_CANCEL_CURRENT); - PendingIntent pendingNext = PendingIntent.getBroadcast(getApplicationContext(), 0, new Intent(NOTIFY_NEXT).setComponent(new ComponentName(this, MusicPlayerReceiver.class)), PendingIntent.FLAG_MUTABLE | PendingIntent.FLAG_CANCEL_CURRENT); - PendingIntent pendingSeek = PendingIntent.getBroadcast(getApplicationContext(), 0, new Intent(NOTIFY_SEEK).setComponent(new ComponentName(this, MusicPlayerReceiver.class)), PendingIntent.FLAG_MUTABLE | PendingIntent.FLAG_CANCEL_CURRENT); + PendingIntent pendingStop = PendingIntent.getService(getApplicationContext(), 0, new Intent(this, getClass()).setAction(getPackageName() + ".STOP_PLAYER"), fixIntentFlags(PendingIntent.FLAG_MUTABLE | PendingIntent.FLAG_CANCEL_CURRENT)); + PendingIntent pendingPlaypause = PendingIntent.getBroadcast(getApplicationContext(), 0, new Intent(isPlaying ? NOTIFY_PAUSE : NOTIFY_PLAY).setComponent(new ComponentName(this, MusicPlayerReceiver.class)), fixIntentFlags(PendingIntent.FLAG_MUTABLE | PendingIntent.FLAG_CANCEL_CURRENT)); + PendingIntent pendingNext = PendingIntent.getBroadcast(getApplicationContext(), 0, new Intent(NOTIFY_NEXT).setComponent(new ComponentName(this, MusicPlayerReceiver.class)), fixIntentFlags(PendingIntent.FLAG_MUTABLE | PendingIntent.FLAG_CANCEL_CURRENT)); + PendingIntent pendingSeek = PendingIntent.getBroadcast(getApplicationContext(), 0, new Intent(NOTIFY_SEEK).setComponent(new ComponentName(this, MusicPlayerReceiver.class)), fixIntentFlags(PendingIntent.FLAG_MUTABLE | PendingIntent.FLAG_CANCEL_CURRENT)); Notification.Builder bldr = new Notification.Builder(this); bldr.setSmallIcon(R.drawable.player) @@ -481,18 +481,25 @@ private void updatePlaybackState(long seekTo) { } public void setListeners(RemoteViews view) { - PendingIntent pendingIntent = PendingIntent.getBroadcast(getApplicationContext(), 0, new Intent(NOTIFY_PREVIOUS), PendingIntent.FLAG_MUTABLE | PendingIntent.FLAG_UPDATE_CURRENT); + PendingIntent pendingIntent = PendingIntent.getBroadcast(getApplicationContext(), 0, new Intent(NOTIFY_PREVIOUS), fixIntentFlags(PendingIntent.FLAG_MUTABLE | PendingIntent.FLAG_UPDATE_CURRENT)); view.setOnClickPendingIntent(R.id.player_previous, pendingIntent); - pendingIntent = PendingIntent.getBroadcast(getApplicationContext(), 0, new Intent(NOTIFY_CLOSE), PendingIntent.FLAG_MUTABLE | PendingIntent.FLAG_UPDATE_CURRENT); + pendingIntent = PendingIntent.getBroadcast(getApplicationContext(), 0, new Intent(NOTIFY_CLOSE), fixIntentFlags(PendingIntent.FLAG_MUTABLE | PendingIntent.FLAG_UPDATE_CURRENT)); view.setOnClickPendingIntent(R.id.player_close, pendingIntent); - pendingIntent = PendingIntent.getBroadcast(getApplicationContext(), 0, new Intent(NOTIFY_PAUSE), PendingIntent.FLAG_MUTABLE | PendingIntent.FLAG_UPDATE_CURRENT); + pendingIntent = PendingIntent.getBroadcast(getApplicationContext(), 0, new Intent(NOTIFY_PAUSE), fixIntentFlags(PendingIntent.FLAG_MUTABLE | PendingIntent.FLAG_UPDATE_CURRENT)); view.setOnClickPendingIntent(R.id.player_pause, pendingIntent); - pendingIntent = PendingIntent.getBroadcast(getApplicationContext(), 0, new Intent(NOTIFY_NEXT), PendingIntent.FLAG_MUTABLE | PendingIntent.FLAG_UPDATE_CURRENT); + pendingIntent = PendingIntent.getBroadcast(getApplicationContext(), 0, new Intent(NOTIFY_NEXT), fixIntentFlags(PendingIntent.FLAG_MUTABLE | PendingIntent.FLAG_UPDATE_CURRENT)); view.setOnClickPendingIntent(R.id.player_next, pendingIntent); - pendingIntent = PendingIntent.getBroadcast(getApplicationContext(), 0, new Intent(NOTIFY_PLAY), PendingIntent.FLAG_MUTABLE | PendingIntent.FLAG_UPDATE_CURRENT); + pendingIntent = PendingIntent.getBroadcast(getApplicationContext(), 0, new Intent(NOTIFY_PLAY), fixIntentFlags(PendingIntent.FLAG_MUTABLE | PendingIntent.FLAG_UPDATE_CURRENT)); view.setOnClickPendingIntent(R.id.player_play, pendingIntent); } + private int fixIntentFlags(int flags) { + if (Build.VERSION.SDK_INT < Build.VERSION_CODES.S && XiaomiUtilities.isMIUI()) { + return flags & ~(PendingIntent.FLAG_IMMUTABLE | PendingIntent.FLAG_MUTABLE); + } + return flags; + } + @SuppressLint("NewApi") @Override public void onDestroy() { diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/NativeLoader.java b/TMessagesProj/src/main/java/org/telegram/messenger/NativeLoader.java index 3cf6388da1..694fee13c6 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/NativeLoader.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/NativeLoader.java @@ -18,7 +18,7 @@ public class NativeLoader { - private final static int LIB_VERSION = 42; + private final static int LIB_VERSION = 43; private final static String LIB_NAME = "tmessages." + LIB_VERSION; private final static String LIB_SO_NAME = "lib" + LIB_NAME + ".so"; private final static String LOCALE_LIB_SO_NAME = "lib" + LIB_NAME + "loc.so"; @@ -71,5 +71,9 @@ public static synchronized void initNativeLibs(Context context) { } private static native void init(String path, boolean enable); + + public static boolean loaded() { + return nativeLoaded; + } //public static native void crash(); } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/NotificationCenter.java b/TMessagesProj/src/main/java/org/telegram/messenger/NotificationCenter.java index db09d205a2..27d6c0875b 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/NotificationCenter.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/NotificationCenter.java @@ -72,6 +72,7 @@ public class NotificationCenter { public static final int updateMessageMedia = totalEvents++; public static final int replaceMessagesObjects = totalEvents++; public static final int didSetPasscode = totalEvents++; + public static final int passcodeDismissed = totalEvents++; public static final int twoStepPasswordChanged = totalEvents++; public static final int didSetOrRemoveTwoStepPassword = totalEvents++; public static final int didRemoveTwoStepPassword = totalEvents++; @@ -136,6 +137,11 @@ public class NotificationCenter { public static final int recentEmojiStatusesUpdate = totalEvents++; public static final int updateSearchSettings = totalEvents++; + public static final int messageTranslated = totalEvents++; + public static final int messageTranslating = totalEvents++; + public static final int dialogIsTranslatable = totalEvents++; + public static final int dialogTranslate = totalEvents++; + public static final int didGenerateFingerprintKeyPair = totalEvents++; public static final int walletPendingTransactionsChanged = totalEvents++; @@ -205,6 +211,7 @@ public class NotificationCenter { public static final int updateBotMenuButton = totalEvents++; public static final int didUpdatePremiumGiftStickers = totalEvents++; + public static final int didUpdatePremiumGiftFieldIcon = totalEvents++; //global public static final int pushMessagesUpdated = totalEvents++; @@ -233,6 +240,7 @@ public class NotificationCenter { public static final int didSetNewWallpapper = totalEvents++; public static final int proxySettingsChanged = totalEvents++; public static final int proxyCheckDone = totalEvents++; + public static final int proxyChangedByRotation = totalEvents++; public static final int liveLocationsChanged = totalEvents++; public static final int newLocationAvailable = totalEvents++; public static final int liveLocationsCacheChanged = totalEvents++; @@ -271,6 +279,9 @@ public class NotificationCenter { public static int topicsDidLoaded = totalEvents++; public static int chatSwithcedToForum = totalEvents++; public static int didUpdateGlobalAutoDeleteTimer = totalEvents++; + public static int onDatabaseReset = totalEvents++; + + public static boolean alreadyLogged; // custom @@ -591,6 +602,12 @@ public void addObserver(NotificationCenterDelegate observer, int id) { return; } objects.add(observer); + if (BuildVars.DEBUG_VERSION && !alreadyLogged) { + if (objects.size() > 1000) { + alreadyLogged = true; + FileLog.e(new RuntimeException("Total observers more than 1000, need check for memory leak. " + id), true); + } + } } private ArrayList createArrayForId(int id) { diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/NotificationsController.java b/TMessagesProj/src/main/java/org/telegram/messenger/NotificationsController.java index 5aaf7eb461..5a1a6419a8 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/NotificationsController.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/NotificationsController.java @@ -800,9 +800,6 @@ public void processEditedMessages(LongSparseArray> edit boolean updated = false; for (int a = 0, N = editedMessages.size(); a < N; a++) { long dialogId = editedMessages.keyAt(a); - if (pushDialogs.indexOfKey(dialogId) < 0) { - continue; - } ArrayList messages = editedMessages.valueAt(a); for (int b = 0, N2 = messages.size(); b < N2; b++) { MessageObject messageObject = messages.get(b); @@ -847,6 +844,7 @@ public void processNewMessages(ArrayList messageObjects, boolean } return; } + ArrayList popupArrayAdd = new ArrayList<>(0); notificationsQueue.postRunnable(() -> { boolean added = false; @@ -967,7 +965,7 @@ public void processNewMessages(ArrayList messageObjects, boolean hasScheduled = messageObject.messageOwner.from_scheduled; } delayedPushMessages.add(messageObject); - pushMessages.add(0, messageObject); + appendMessage(messageObject); if (mid != 0) { if (sparseArray == null) { sparseArray = new SparseArray<>(); @@ -1078,6 +1076,15 @@ public void processNewMessages(ArrayList messageObjects, boolean }); } + private void appendMessage(MessageObject messageObject) { + for (int i = 0; i < pushMessages.size(); i++) { + if (pushMessages.get(i).getId() == messageObject.getId() && pushMessages.get(i).getDialogId() == messageObject.getDialogId()) { + return; + } + } + pushMessages.add(0, messageObject); + } + public int getTotalUnreadCount() { return total_unread_count; } @@ -1273,7 +1280,7 @@ public void processLoadedUnreadMessages(LongSparseArray dialogs, ArrayL pushMessagesDict.put(did, sparseArray); } sparseArray.put(message.id, messageObject); - pushMessages.add(0, messageObject); + appendMessage(messageObject); if (original_dialog_id != dialog_id) { Integer current = pushDialogsOverrideMention.get(original_dialog_id); pushDialogsOverrideMention.put(original_dialog_id, current == null ? 1 : current + 1); @@ -1357,7 +1364,7 @@ public void processLoadedUnreadMessages(LongSparseArray dialogs, ArrayL } else if (randomId != 0) { fcmRandomMessagesDict.put(randomId, messageObject); } - pushMessages.add(0, messageObject); + appendMessage(messageObject); if (originalDialogId != dialogId) { Integer current = pushDialogsOverrideMention.get(originalDialogId); pushDialogsOverrideMention.put(originalDialogId, current == null ? 1 : current + 1); @@ -1442,7 +1449,8 @@ private int getTotalAllUnreadCount() { } } } catch (Exception e) { - FileLog.e(e); + //ignore, no thread synchronizations for fast + FileLog.e(e, false); } } else { count += controller.pushDialogs.size(); @@ -3469,7 +3477,7 @@ private String validateChannelId(long dialogId, int topicId, String name, long[] if (sound != null) { notificationChannel.setSound(sound, builder.build()); } else { - notificationChannel.setSound(null, null); + // notificationChannel.setSound(null, null); } systemNotificationManager.createNotificationChannel(notificationChannel); if (BuildVars.LOGS_ENABLED) { @@ -3845,7 +3853,7 @@ private void showOrUpdateNotification(boolean notifyAboutLast) { } } intent.putExtra("currentAccount", currentAccount); - PendingIntent contentIntent = PendingIntent.getActivity(ApplicationLoader.applicationContext, 0, intent, PendingIntent.FLAG_MUTABLE | PendingIntent.FLAG_ONE_SHOT | PendingIntent.FLAG_MUTABLE); + PendingIntent contentIntent = PendingIntent.getActivity(ApplicationLoader.applicationContext, 0, intent, PendingIntent.FLAG_IMMUTABLE | PendingIntent.FLAG_ONE_SHOT | PendingIntent.FLAG_MUTABLE); mBuilder.setContentTitle(name) .setSmallIcon(getNotificationIconResId()) @@ -3866,10 +3874,14 @@ private void showOrUpdateNotification(boolean notifyAboutLast) { mBuilder.addPerson("tel:+" + user.phone); } - Intent dismissIntent = new Intent(ApplicationLoader.applicationContext, NotificationDismissReceiver.class); - dismissIntent.putExtra("messageDate", lastMessageObject.messageOwner.date); - dismissIntent.putExtra("currentAccount", currentAccount); - mBuilder.setDeleteIntent(PendingIntent.getBroadcast(ApplicationLoader.applicationContext, 1, dismissIntent, PendingIntent.FLAG_MUTABLE | PendingIntent.FLAG_UPDATE_CURRENT)); + try { + Intent dismissIntent = new Intent(ApplicationLoader.applicationContext, NotificationDismissReceiver.class); + dismissIntent.putExtra("messageDate", lastMessageObject.messageOwner.date); + dismissIntent.putExtra("currentAccount", currentAccount); + mBuilder.setDeleteIntent(PendingIntent.getBroadcast(ApplicationLoader.applicationContext, 1, dismissIntent, PendingIntent.FLAG_MUTABLE | PendingIntent.FLAG_UPDATE_CURRENT)); + } catch (Throwable e) { + FileLog.e(e); + } if (photoPath != null) { BitmapDrawable img = ImageLoader.getInstance().getImageFromMemory(photoPath, null, "50_50"); @@ -4149,6 +4161,7 @@ void call() { long selfUserId = getUserConfig().getClientUserId(); boolean waitingForPasscode = AndroidUtilities.needShowPasscode() || SharedConfig.isWaitingForPasscodeEnter; + boolean passcode = SharedConfig.passcodeHash.length() > 0; int maxCount = 7; LongSparseArray personCache = new LongSparseArray<>(); @@ -4213,6 +4226,7 @@ void call() { } else { chat = getMessagesController().getChat(-dialogId); if (chat == null) { + canReply = false; if (lastMessageObject.isFcmMessage()) { isSupergroup = lastMessageObject.isSupergroup(); name = lastMessageObject.localName; @@ -4237,7 +4251,9 @@ void call() { name = topic.title + " in " + name; } } - + if (canReply) { + canReply = ChatObject.canSendPlain(chat); + } } } } else { @@ -4589,11 +4605,12 @@ void call() { } else { intent.putExtra("chatId", -dialogId); } + FileLog.d("show extra notifications chatId " + dialogId + " topicId " + topicId); if (topicId != 0) { intent.putExtra("topicId", topicId); } intent.putExtra("currentAccount", currentAccount); - PendingIntent contentIntent = PendingIntent.getActivity(ApplicationLoader.applicationContext, 0, intent, PendingIntent.FLAG_MUTABLE | PendingIntent.FLAG_ONE_SHOT); + PendingIntent contentIntent = PendingIntent.getActivity(ApplicationLoader.applicationContext, 0, intent, PendingIntent.FLAG_IMMUTABLE | PendingIntent.FLAG_ONE_SHOT); NotificationCompat.WearableExtender wearableExtender = new NotificationCompat.WearableExtender(); if (wearReplyAction != null) { @@ -4650,11 +4667,15 @@ void call() { .setSortKey(String.valueOf(Long.MAX_VALUE - date)) .setCategory(NotificationCompat.CATEGORY_MESSAGE); - Intent dismissIntent = new Intent(ApplicationLoader.applicationContext, NotificationDismissReceiver.class); - dismissIntent.putExtra("messageDate", maxDate); - dismissIntent.putExtra("dialogId", dialogId); - dismissIntent.putExtra("currentAccount", currentAccount); - builder.setDeleteIntent(PendingIntent.getBroadcast(ApplicationLoader.applicationContext, internalId, dismissIntent, PendingIntent.FLAG_MUTABLE | PendingIntent.FLAG_UPDATE_CURRENT)); + try { + Intent dismissIntent = new Intent(ApplicationLoader.applicationContext, NotificationDismissReceiver.class); + dismissIntent.putExtra("messageDate", maxDate); + dismissIntent.putExtra("dialogId", dialogId); + dismissIntent.putExtra("currentAccount", currentAccount); + builder.setDeleteIntent(PendingIntent.getBroadcast(ApplicationLoader.applicationContext, internalId, dismissIntent, PendingIntent.FLAG_MUTABLE | PendingIntent.FLAG_UPDATE_CURRENT)); + } catch (Exception e) { + FileLog.e(e); + } if (useSummaryNotification) { builder.setGroup(notificationGroup); diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/ProxyRotationController.java b/TMessagesProj/src/main/java/org/telegram/messenger/ProxyRotationController.java new file mode 100644 index 0000000000..e6d767e6e0 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/messenger/ProxyRotationController.java @@ -0,0 +1,128 @@ +package org.telegram.messenger; + +import android.content.SharedPreferences; +import android.os.SystemClock; + +import org.telegram.tgnet.ConnectionsManager; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +public class ProxyRotationController implements NotificationCenter.NotificationCenterDelegate { + private final static ProxyRotationController INSTANCE = new ProxyRotationController(); + + public final static int DEFAULT_TIMEOUT_INDEX = 1; + public final static List ROTATION_TIMEOUTS = Arrays.asList( + 5, 10, 15, 30, 60 + ); + + private boolean isCurrentlyChecking; + private Runnable checkProxyAndSwitchRunnable = () -> { + isCurrentlyChecking = true; + + int currentAccount = UserConfig.selectedAccount; + boolean startedCheck = false; + for (int i = 0; i < SharedConfig.proxyList.size(); i++) { + SharedConfig.ProxyInfo proxyInfo = SharedConfig.proxyList.get(i); + if (proxyInfo.checking || SystemClock.elapsedRealtime() - proxyInfo.availableCheckTime < 2 * 60 * 1000) { + continue; + } + startedCheck = true; + proxyInfo.checking = true; + proxyInfo.proxyCheckPingId = ConnectionsManager.getInstance(currentAccount).checkProxy(proxyInfo.address, proxyInfo.port, proxyInfo.username, proxyInfo.password, proxyInfo.secret, time -> AndroidUtilities.runOnUIThread(() -> { + proxyInfo.availableCheckTime = SystemClock.elapsedRealtime(); + proxyInfo.checking = false; + if (time == -1) { + proxyInfo.available = false; + proxyInfo.ping = 0; + } else { + proxyInfo.ping = time; + proxyInfo.available = true; + } + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.proxyCheckDone, proxyInfo); + })); + } + + if (!startedCheck) { + isCurrentlyChecking = false; + switchToAvailable(); + } + }; + + public static void init() { + INSTANCE.initInternal(); + } + + @SuppressWarnings("ComparatorCombinators") + private void switchToAvailable() { + isCurrentlyChecking = false; + + if (!SharedConfig.proxyRotationEnabled) { + return; + } + + List sortedList = new ArrayList<>(SharedConfig.proxyList); + Collections.sort(sortedList, (o1, o2) -> Long.compare(o1.ping, o2.ping)); + for (SharedConfig.ProxyInfo info : sortedList) { + if (info == SharedConfig.currentProxy || info.checking || !info.available) { + continue; + } + + SharedPreferences.Editor editor = MessagesController.getGlobalMainSettings().edit(); + editor.putString("proxy_ip", info.address); + editor.putString("proxy_pass", info.password); + editor.putString("proxy_user", info.username); + editor.putInt("proxy_port", info.port); + editor.putString("proxy_secret", info.secret); + editor.putBoolean("proxy_enabled", true); + + if (!info.secret.isEmpty()) { + editor.putBoolean("proxy_enabled_calls", false); + } + editor.apply(); + + SharedConfig.currentProxy = info; + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.proxySettingsChanged); + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.proxyChangedByRotation); + ConnectionsManager.setProxySettings(true, SharedConfig.currentProxy.address, SharedConfig.currentProxy.port, SharedConfig.currentProxy.username, SharedConfig.currentProxy.password, SharedConfig.currentProxy.secret); + break; + } + } + + private void initInternal() { + for (int i : SharedConfig.activeAccounts) { + NotificationCenter.getInstance(i).addObserver(this, NotificationCenter.didUpdateConnectionState); + } + NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.proxyCheckDone); + NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.proxySettingsChanged); + } + + @Override + public void didReceivedNotification(int id, int account, Object... args) { + if (id == NotificationCenter.proxyCheckDone) { + if (!SharedConfig.isProxyEnabled() || !SharedConfig.proxyRotationEnabled || SharedConfig.proxyList.size() <= 1 || !isCurrentlyChecking) { + return; + } + + switchToAvailable(); + } else if (id == NotificationCenter.proxySettingsChanged) { + AndroidUtilities.cancelRunOnUIThread(checkProxyAndSwitchRunnable); + } else if (id == NotificationCenter.didUpdateConnectionState && account == UserConfig.selectedAccount) { + if (!SharedConfig.isProxyEnabled() && !SharedConfig.proxyRotationEnabled || SharedConfig.proxyList.size() <= 1) { + return; + } + + int state = ConnectionsManager.getInstance(account).getConnectionState(); + + if (state == ConnectionsManager.ConnectionStateConnectingToProxy) { + if (!isCurrentlyChecking) { + AndroidUtilities.runOnUIThread(checkProxyAndSwitchRunnable, ROTATION_TIMEOUTS.get(SharedConfig.proxyRotationTimeout) * 1000L); + } + } else { + AndroidUtilities.cancelRunOnUIThread(checkProxyAndSwitchRunnable); + } + } + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/PushListenerController.java b/TMessagesProj/src/main/java/org/telegram/messenger/PushListenerController.java index 4e07d21a93..6ec918c31b 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/PushListenerController.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/PushListenerController.java @@ -3,6 +3,7 @@ import android.os.SystemClock; import android.text.TextUtils; import android.util.Base64; +import android.util.Log; import android.util.SparseBooleanArray; import androidx.annotation.IntDef; @@ -282,8 +283,9 @@ public static void processRemoteMessage(@PushType int pushType, String data, lon chat_id = 0; } if (custom.has("topic_id")) { - topicId =custom.getInt("topic_id"); + topicId = custom.getInt("topic_id"); } + FileLog.d( "recived push notification chatId " + chat_id + " custom topicId " + topicId); if (custom.has("encryption_id")) { dialogId = DialogObject.makeEncryptedDialogId(custom.getInt("encryption_id")); } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/SaveToGallerySettingsHelper.java b/TMessagesProj/src/main/java/org/telegram/messenger/SaveToGallerySettingsHelper.java new file mode 100644 index 0000000000..80c7cfe973 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/messenger/SaveToGallerySettingsHelper.java @@ -0,0 +1,258 @@ +package org.telegram.messenger; + +import static org.telegram.messenger.SharedConfig.SAVE_TO_GALLERY_FLAG_CHANNELS; +import static org.telegram.messenger.SharedConfig.SAVE_TO_GALLERY_FLAG_GROUP; +import static org.telegram.messenger.SharedConfig.SAVE_TO_GALLERY_FLAG_PEER; + +import android.app.Activity; +import android.content.SharedPreferences; +import android.util.LongSparseArray; + +public class SaveToGallerySettingsHelper { + + //shared settings + public static SharedSettings user; + public static SharedSettings groups; + public static SharedSettings channels; + + public static String USERS_PREF_NAME = "users_save_gallery_exceptions"; + public static String CHANNELS_PREF_NAME = "channels_save_gallery_exceptions"; + public static String GROUPS_PREF_NAME = "groups_save_gallery_exceptions"; + + public static final long DEFAULT_VIDEO_LIMIT = 100 * 1024 * 1024;//100 MB + public static final long MAX_VIDEO_LIMIT = 4L * 1000 * 1024 * 1024;//100 MB + + public static void load(SharedPreferences preferences) { + boolean saveToGalleryLegacy = preferences.getBoolean("save_gallery", false); + int saveToGalleryFlags; + if (saveToGalleryLegacy && BuildVars.NO_SCOPED_STORAGE) { + saveToGalleryFlags = SAVE_TO_GALLERY_FLAG_PEER + SAVE_TO_GALLERY_FLAG_CHANNELS + SAVE_TO_GALLERY_FLAG_GROUP; + } else { + saveToGalleryFlags = preferences.getInt("save_gallery_flags", -1); + } + //migration + if (saveToGalleryFlags != -1) { + preferences.edit().remove("save_gallery").remove("save_gallery_flags").apply(); + user = new SharedSettings(); + user.savePhoto = user.saveVideo = (saveToGalleryFlags & SAVE_TO_GALLERY_FLAG_PEER) != 0; + user.limitVideo = DEFAULT_VIDEO_LIMIT; + user.save("user", preferences); + + groups = new SharedSettings(); + groups.savePhoto = user.saveVideo = (saveToGalleryFlags & SAVE_TO_GALLERY_FLAG_GROUP) != 0; + groups.limitVideo = DEFAULT_VIDEO_LIMIT; + groups.save("groups", preferences); + + channels = new SharedSettings(); + channels.savePhoto = channels.saveVideo = (saveToGalleryFlags & SAVE_TO_GALLERY_FLAG_CHANNELS) != 0; + channels.limitVideo = DEFAULT_VIDEO_LIMIT; + channels.save("channels", preferences); + + } else { + user = SharedSettings.read("user", preferences); + groups = SharedSettings.read("groups", preferences); + channels = SharedSettings.read("channels", preferences); + } + user.type = SAVE_TO_GALLERY_FLAG_PEER; + groups.type = SAVE_TO_GALLERY_FLAG_GROUP; + channels.type = SAVE_TO_GALLERY_FLAG_CHANNELS; + } + + public static boolean needSave(int flag, FilePathDatabase.FileMeta metaData, MessageObject messageObject, int currentAccount) { + SharedSettings settings; + if (flag == SharedConfig.SAVE_TO_GALLERY_FLAG_PEER) { + settings = user; + } else if (flag == SharedConfig.SAVE_TO_GALLERY_FLAG_CHANNELS) { + settings = channels; + } else if (flag == SharedConfig.SAVE_TO_GALLERY_FLAG_GROUP) { + settings = groups; + } else { + return false; + } + return settings.needSave(metaData, messageObject, currentAccount); + } + + public static LongSparseArray loadExceptions(SharedPreferences sharedPreferences) { + LongSparseArray exceptions = new LongSparseArray<>(); + int count = sharedPreferences.getInt("count", 0); + for (int i = 0; i < count; i++) { + DialogException dialogException = new DialogException(); + dialogException.dialogId = sharedPreferences.getLong(i + "_dialog_id", 0); + dialogException.savePhoto = sharedPreferences.getBoolean(i + "_photo", false); + dialogException.saveVideo = sharedPreferences.getBoolean(i + "_video", false); + dialogException.limitVideo = sharedPreferences.getLong(i + "_limitVideo", DEFAULT_VIDEO_LIMIT); + if (dialogException.dialogId != 0) { + exceptions.put(dialogException.dialogId, dialogException); + } + } + return exceptions; + } + + public static void saveExceptions(SharedPreferences sharedPreferences, LongSparseArray exceptions) { + sharedPreferences.edit().clear().apply(); + SharedPreferences.Editor editor = sharedPreferences.edit(); + editor.putInt("count", exceptions.size()); + for (int i = 0; i < exceptions.size(); i++) { + DialogException dialogException = exceptions.valueAt(i); + editor.putLong(i + "_dialog_id", dialogException.dialogId); + editor.putBoolean(i + "_photo", dialogException.savePhoto); + editor.putBoolean(i + "_video", dialogException.saveVideo); + editor.putLong(i + "_limitVideo", dialogException.limitVideo); + } + editor.apply(); + } + + public static Settings getSettings(int type) { + if (type == SAVE_TO_GALLERY_FLAG_PEER) { + return user; + } else if (type == SAVE_TO_GALLERY_FLAG_GROUP) { + return groups; + } else if (type == SAVE_TO_GALLERY_FLAG_CHANNELS) { + return channels; + } + return null; + } + + public static void saveSettings(int type) { + SharedPreferences preferences = ApplicationLoader.applicationContext.getSharedPreferences("mainconfig", Activity.MODE_PRIVATE); + if (type == SAVE_TO_GALLERY_FLAG_PEER) { + user.save("user", preferences); + } else if (type == SAVE_TO_GALLERY_FLAG_GROUP) { + groups.save("groups", preferences); + } else if (type == SAVE_TO_GALLERY_FLAG_CHANNELS) { + channels.save("channels", preferences); + } + } + + public static abstract class Settings { + public boolean savePhoto; + public boolean saveVideo; + public long limitVideo = 100 * 1024 * 1024; ///100 MB + + public boolean enabled() { + return savePhoto || saveVideo; + } + + public abstract CharSequence createDescription(int currentAccount); + + public void toggle() { + if (enabled()) { + saveVideo = false; + savePhoto = false; + } else { + savePhoto = true; + saveVideo = true; + } + } + } + + public static class SharedSettings extends Settings { + private int type; + + private void save(String prefix, SharedPreferences sharedPreferences) { + sharedPreferences.edit() + .putBoolean(prefix + "_save_gallery_photo", savePhoto) + .putBoolean(prefix + "_save_gallery_video", saveVideo) + .putLong(prefix + "_save_gallery_limitVideo", limitVideo) + .apply(); + + } + + private static SharedSettings read(String prefix, SharedPreferences preferences) { + SharedSettings settings = new SharedSettings(); + settings.savePhoto = preferences.getBoolean(prefix + "_save_gallery_photo", false); + settings.saveVideo = preferences.getBoolean(prefix + "_save_gallery_video", false); + settings.limitVideo = preferences.getLong(prefix + "_save_gallery_limitVideo", DEFAULT_VIDEO_LIMIT); + return settings; + } + + private boolean needSave(FilePathDatabase.FileMeta meta, MessageObject messageObject, int currentAccount) { + LongSparseArray exceptions = UserConfig.getInstance(currentAccount).getSaveGalleryExceptions(type); + DialogException exception = exceptions.get(meta.dialogId); + if (messageObject != null && (messageObject.isOutOwner() || messageObject.isSecretMedia())) { + return false; + } + boolean isVideo = (messageObject != null && messageObject.isVideo()) || meta.messageType == MessageObject.TYPE_VIDEO; + long size = messageObject != null ? messageObject.getSize() : meta.messageSize; + boolean needSaveVideo = saveVideo; + boolean needSavePhoto = savePhoto; + long saveVideoLimit = limitVideo; + if (exception != null) { + needSaveVideo = exception.saveVideo; + needSavePhoto = exception.savePhoto; + saveVideoLimit = exception.limitVideo; + } + if (isVideo) { + if (needSaveVideo && (saveVideoLimit == -1 || size < saveVideoLimit)) { + return true; + } + } else { + if (needSavePhoto) { + return true; + } + } + return false; + } + + public CharSequence createDescription(int currentAccount) { + StringBuilder builder = new StringBuilder(); + if (enabled()) { + if (savePhoto) { + builder.append(LocaleController.getString("SaveToGalleryPhotos", R.string.SaveToGalleryPhotos)); + } + if (saveVideo) { + if (builder.length() != 0) { + builder.append(", "); + } + builder.append(LocaleController.getString("SaveToGalleryVideos", R.string.SaveToGalleryVideos)); + if (limitVideo > 0 && limitVideo < 4L * 1000 * 1024 * 1024) { + builder.append(" (").append(AndroidUtilities.formatFileSize(limitVideo, true)).append(")"); + } + } + } else { + builder.append(LocaleController.getString("SaveToGalleryOff", R.string.SaveToGalleryOff)); + } + LongSparseArray exceptions = UserConfig.getInstance(currentAccount).getSaveGalleryExceptions(type); + if (exceptions.size() != 0) { + if (builder.length() != 0) { + builder.append(", "); + } + builder.append(LocaleController.formatPluralString("Exception", exceptions.size(), exceptions.size())); + } + return builder; + } + + @Override + public void toggle() { + super.toggle(); + saveSettings(type); + } + } + + public static class DialogException extends Settings { + public long dialogId; + + public CharSequence createDescription(int currentAccount) { + StringBuilder builder = new StringBuilder(); + if (enabled()) { + if (savePhoto) { + builder.append(LocaleController.getString("SaveToGalleryPhotos", R.string.SaveToGalleryPhotos)); + } + if (saveVideo) { + if (builder.length() != 0) { + builder.append(", "); + } + + if (limitVideo > 0 && limitVideo < 4L * 1000 * 1024 * 1024) { + builder.append(LocaleController.formatString("SaveToGalleryVideosUpTo", R.string.SaveToGalleryVideosUpTo, AndroidUtilities.formatFileSize(limitVideo, true))); + } else { + builder.append(LocaleController.formatString("SaveToGalleryVideos", R.string.SaveToGalleryVideos)); + } + } + } else { + builder.append(LocaleController.getString("SaveToGalleryOff", R.string.SaveToGalleryOff)); + } + return builder; + } + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/SecretChatHelper.java b/TMessagesProj/src/main/java/org/telegram/messenger/SecretChatHelper.java index 6879a8fa9b..2b6ba369e7 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/SecretChatHelper.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/SecretChatHelper.java @@ -1022,7 +1022,7 @@ public TLRPC.Message processDecryptedObject(TLRPC.EncryptedChat chat, TLRPC.Encr } byte[] thumb = ((TLRPC.TL_decryptedMessageMediaDocument) decryptedMessage.media).thumb; TLRPC.PhotoSize photoSize; - if (thumb != null && thumb.length != 0 && thumb.length <= 6000 && decryptedMessage.media.thumb_w <= 100 && decryptedMessage.media.thumb_h <= 100) { + if (thumb != null && thumb.length != 0 && thumb.length <= 20000) { photoSize = new TLRPC.TL_photoCachedSize(); photoSize.bytes = thumb; photoSize.w = decryptedMessage.media.thumb_w; diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/SendMessagesHelper.java b/TMessagesProj/src/main/java/org/telegram/messenger/SendMessagesHelper.java index f6dbb1d2d2..d5e713c31f 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/SendMessagesHelper.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/SendMessagesHelper.java @@ -518,6 +518,7 @@ public static class SendingMediaInfo { public boolean forceImage; public boolean updateStickersOrder; public boolean hasMediaSpoilers; + public TLRPC.VideoSize emojiMarkup; } @SuppressLint("MissingPermission") @@ -1452,7 +1453,7 @@ public void processForwardFromMyName(MessageObject messageObject, long did) { HashMap params = null; if (DialogObject.isEncryptedDialog(did) && messageObject.messageOwner.peer_id != null && (messageObject.messageOwner.media.photo instanceof TLRPC.TL_photo || messageObject.messageOwner.media.document instanceof TLRPC.TL_document)) { params = new HashMap<>(); - params.put("parentObject", "sent_" + messageObject.messageOwner.peer_id.channel_id + "_" + messageObject.getId() + "_" + messageObject.getDialogId() + "_" + messageObject.type); + params.put("parentObject", "sent_" + messageObject.messageOwner.peer_id.channel_id + "_" + messageObject.getId() + "_" + messageObject.getDialogId() + "_" + messageObject.type + "_" + messageObject.getSize()); } if (messageObject.messageOwner.media.photo instanceof TLRPC.TL_photo) { sendMessage((TLRPC.TL_photo) messageObject.messageOwner.media.photo, null, did, messageObject.replyMessageObject, null, messageObject.messageOwner.message, messageObject.messageOwner.entities, null, params, true, 0, messageObject.messageOwner.media.ttl_seconds, messageObject, false); @@ -1581,17 +1582,25 @@ public void sendSticker(TLRPC.Document document, String query, long peer, Messag if (newDocument.mime_type == null) { newDocument.mime_type = ""; } - TLRPC.PhotoSize thumb = FileLoader.getClosestPhotoSizeWithSize(document.thumbs, 90); - if (thumb instanceof TLRPC.TL_photoSize || thumb instanceof TLRPC.TL_photoSizeProgressive) { + + TLRPC.PhotoSize thumb = FileLoader.getClosestPhotoSizeWithSize(document.thumbs, 10); + if (thumb instanceof TLRPC.TL_photoSize || thumb instanceof TLRPC.TL_photoSizeProgressive || thumb instanceof TLRPC.TL_photoStrippedSize) { File file = FileLoader.getInstance(currentAccount).getPathToAttach(thumb, true); - if (file.exists()) { + if (thumb instanceof TLRPC.TL_photoStrippedSize || file.exists()) { try { - int len = (int) file.length(); - byte[] arr = new byte[(int) file.length()]; - RandomAccessFile reader = new RandomAccessFile(file, "r"); - reader.readFully(arr); + byte[] arr; + TLRPC.PhotoSize newThumb; + if (thumb instanceof TLRPC.TL_photoStrippedSize) { + newThumb = new TLRPC.TL_photoStrippedSize(); + arr = thumb.bytes; + } else { + newThumb = new TLRPC.TL_photoCachedSize(); + int len = (int) file.length(); + arr = new byte[(int) file.length()]; + RandomAccessFile reader = new RandomAccessFile(file, "r"); + reader.readFully(arr); + } - TLRPC.PhotoSize newThumb = new TLRPC.TL_photoCachedSize(); TLRPC.TL_fileLocation_layer82 fileLocation = new TLRPC.TL_fileLocation_layer82(); fileLocation.dc_id = thumb.location.dc_id; fileLocation.volume_id = thumb.location.volume_id; @@ -1676,10 +1685,14 @@ public int sendMessage(ArrayList messages, final long peer, boole final TLRPC.Peer peer_id = getMessagesController().getPeer(peer); boolean isSignature = false; boolean canSendStickers = true; - boolean canSendMedia = true; + boolean canSendPhoto = true; + boolean canSendVideo = true; + boolean canSendDocument = true; + boolean canSendMusic = true; boolean canSendPolls = true; boolean canSendPreview = true; boolean canSendVoiceMessages = true; + boolean canSendVoiceRound = true; String rank = null; long linkedToGroup = 0; TLRPC.Chat chat; @@ -1692,7 +1705,7 @@ public int sendMessage(ArrayList messages, final long peer, boole TLRPC.UserFull userFull = getMessagesController().getUserFull(peer); if (userFull != null) { - canSendVoiceMessages = !userFull.voice_messages_forbidden; + canSendVoiceRound = canSendVoiceMessages = !userFull.voice_messages_forbidden; } } else { chat = getMessagesController().getChat(-peer); @@ -1711,9 +1724,14 @@ public int sendMessage(ArrayList messages, final long peer, boole rank = getMessagesController().getAdminRank(chat.id, myId); } canSendStickers = ChatObject.canSendStickers(chat); - canSendMedia = ChatObject.canSendMedia(chat); + canSendPhoto = ChatObject.canSendPhoto(chat); + canSendVideo = ChatObject.canSendVideo(chat); + canSendDocument = ChatObject.canSendDocument(chat); canSendPreview = ChatObject.canSendEmbed(chat); canSendPolls = ChatObject.canSendPolls(chat); + canSendVoiceRound = ChatObject.canSendRoundVideo(chat); + canSendVoiceMessages = ChatObject.canSendVoice(chat); + canSendMusic = ChatObject.canSendMusic(chat); } LongSparseArray groupsMap = new LongSparseArray<>(); @@ -1741,9 +1759,19 @@ public int sendMessage(ArrayList messages, final long peer, boole sendResult = ChatObject.isActionBannedByDefault(chat, ChatObject.ACTION_SEND_STICKERS) ? 4 : 1; } continue; - } else if (!canSendMedia && (msgObj.messageOwner.media instanceof TLRPC.TL_messageMediaPhoto || msgObj.messageOwner.media instanceof TLRPC.TL_messageMediaDocument) && !mediaIsSticker) { + } else if (!canSendPhoto && msgObj.messageOwner.media instanceof TLRPC.TL_messageMediaPhoto && !msgObj.isVideo() && !mediaIsSticker) { + if (sendResult == 0) { + sendResult = ChatObject.isActionBannedByDefault(chat, ChatObject.ACTION_SEND_PHOTO) ? 10 : 12; + } + continue; + } else if (!canSendMusic && msgObj.isMusic()) { if (sendResult == 0) { - sendResult = ChatObject.isActionBannedByDefault(chat, ChatObject.ACTION_SEND_MEDIA) ? 5 : 2; + sendResult = ChatObject.isActionBannedByDefault(chat, ChatObject.ACTION_SEND_MUSIC) ? 19 : 20; + } + continue; + } else if (!canSendVideo && msgObj.messageOwner.media instanceof TLRPC.TL_messageMediaPhoto && msgObj.isVideo() && !mediaIsSticker) { + if (sendResult == 0) { + sendResult = ChatObject.isActionBannedByDefault(chat, ChatObject.ACTION_SEND_VIDEO) ? 9 : 11; } continue; } else if (!canSendPolls && msgObj.messageOwner.media instanceof TLRPC.TL_messageMediaPoll) { @@ -1752,13 +1780,30 @@ public int sendMessage(ArrayList messages, final long peer, boole } continue; } else if (!canSendVoiceMessages && MessageObject.isVoiceMessage(msgObj.messageOwner)) { - if (sendResult == 0) { - sendResult = 7; + if (chat != null) { + if (sendResult == 0) { + sendResult = ChatObject.isActionBannedByDefault(chat, ChatObject.ACTION_SEND_VOICE) ? 13 : 14; + } + } else { + if (sendResult == 0) { + sendResult = 7; + } } continue; - } else if (!canSendVoiceMessages && MessageObject.isRoundVideoMessage(msgObj.messageOwner)) { + } else if (!canSendVoiceRound && MessageObject.isRoundVideoMessage(msgObj.messageOwner)) { + if (chat != null) { + if (sendResult == 0) { + sendResult = ChatObject.isActionBannedByDefault(chat, ChatObject.ACTION_SEND_ROUND) ? 15 : 16; + } + } else { + if (sendResult == 0) { + sendResult = 8; + } + } + continue; + } else if (!canSendDocument && msgObj.messageOwner.media instanceof TLRPC.TL_messageMediaDocument && !mediaIsSticker) { if (sendResult == 0) { - sendResult = 8; + sendResult = ChatObject.isActionBannedByDefault(chat, ChatObject.ACTION_SEND_DOCUMENTS) ? 17 : 18; } continue; } @@ -2222,6 +2267,38 @@ public int sendMessage(ArrayList messages, final long peer, boole return sendResult; } + public static int canSendMessageToChat(TLRPC.Chat chat, MessageObject msgObj) { + boolean canSendStickers = ChatObject.canSendStickers(chat); + boolean canSendPhoto = ChatObject.canSendPhoto(chat); + boolean canSendVideo = ChatObject.canSendVideo(chat); + boolean canSendDocument = ChatObject.canSendDocument(chat); + boolean canSendPreview = ChatObject.canSendEmbed(chat); + boolean canSendPolls = ChatObject.canSendPolls(chat); + boolean canSendVoiceRound = ChatObject.canSendRoundVideo(chat); + boolean canSendVoiceMessages = ChatObject.canSendVoice(chat); + boolean canSendMusic = ChatObject.canSendMusic(chat); + + boolean mediaIsSticker = (msgObj.isSticker() || msgObj.isAnimatedSticker() || msgObj.isGif() || msgObj.isGame()); + if (!canSendStickers && mediaIsSticker) { + return ChatObject.isActionBannedByDefault(chat, ChatObject.ACTION_SEND_STICKERS) ? 4 : 1; + } else if (!canSendPhoto && msgObj.messageOwner.media instanceof TLRPC.TL_messageMediaPhoto && !msgObj.isVideo() && !mediaIsSticker) { + return ChatObject.isActionBannedByDefault(chat, ChatObject.ACTION_SEND_PHOTO) ? 10 : 12; + } else if (!canSendMusic && msgObj.isMusic()) { + return ChatObject.isActionBannedByDefault(chat, ChatObject.ACTION_SEND_MUSIC) ? 19 : 20; + } else if (!canSendVideo && msgObj.isVideo() && !mediaIsSticker) { + return ChatObject.isActionBannedByDefault(chat, ChatObject.ACTION_SEND_VIDEO) ? 9 : 11; + } else if (!canSendPolls && msgObj.messageOwner.media instanceof TLRPC.TL_messageMediaPoll) { + return ChatObject.isActionBannedByDefault(chat, ChatObject.ACTION_SEND_POLLS) ? 6 : 3; + } else if (!canSendVoiceMessages && MessageObject.isVoiceMessage(msgObj.messageOwner)) { + return ChatObject.isActionBannedByDefault(chat, ChatObject.ACTION_SEND_VOICE) ? 13 : 14; + } else if (!canSendVoiceRound && MessageObject.isRoundVideoMessage(msgObj.messageOwner)) { + return ChatObject.isActionBannedByDefault(chat, ChatObject.ACTION_SEND_ROUND) ? 15 : 16; + } else if (!canSendDocument && msgObj.messageOwner.media instanceof TLRPC.TL_messageMediaDocument && !mediaIsSticker) { + return ChatObject.isActionBannedByDefault(chat, ChatObject.ACTION_SEND_DOCUMENTS) ? 17 : 18; + } + return 0; + } + private void writePreviousMessageData(TLRPC.Message message, SerializedData data) { if (message.media == null) { TLRPC.TL_messageMediaEmpty media = new TLRPC.TL_messageMediaEmpty(); @@ -3252,15 +3329,15 @@ public void sendMessage(TLRPC.TL_photo photo, String path, long peer, MessageObj sendMessage(null, caption, null, photo, null, null, null, null, null, null, peer, path, replyToMsg, replyToTopMsg, null, true, null, entities, replyMarkup, params, notify, scheduleDate, ttl, parentObject, null, updateStickersOrder); } - private void sendMessage(String message, String caption, TLRPC.MessageMedia location, TLRPC.TL_photo photo, VideoEditedInfo videoEditedInfo, TLRPC.User user, TLRPC.TL_document document, TLRPC.TL_game game, TLRPC.TL_messageMediaPoll poll, TLRPC.TL_messageMediaInvoice invoice, long peer, String path, MessageObject replyToMsg, MessageObject replyToTopMsg, TLRPC.WebPage webPage, boolean searchLinks, MessageObject retryMessageObject, ArrayList entities, TLRPC.ReplyMarkup replyMarkup, HashMap params, boolean notify, int scheduleDate, int ttl, Object parentObject, MessageObject.SendAnimationData sendAnimationData, boolean updateStickersOreder) { - sendMessage(message, caption, location, photo, videoEditedInfo, user, document, game, poll, invoice, peer, path, replyToMsg, replyToTopMsg, webPage, searchLinks, retryMessageObject, entities, replyMarkup, params, notify, scheduleDate, ttl, parentObject, sendAnimationData, updateStickersOreder, false); + private void sendMessage(String message, String caption, TLRPC.MessageMedia location, TLRPC.TL_photo photo, VideoEditedInfo videoEditedInfo, TLRPC.User user, TLRPC.TL_document document, TLRPC.TL_game game, TLRPC.TL_messageMediaPoll poll, TLRPC.TL_messageMediaInvoice invoice, long peer, String path, MessageObject replyToMsg, MessageObject replyToTopMsg, TLRPC.WebPage webPage, boolean searchLinks, MessageObject retryMessageObject, ArrayList entities, TLRPC.ReplyMarkup replyMarkup, HashMap params, boolean notify, int scheduleDate, int ttl, Object parentObject, MessageObject.SendAnimationData sendAnimationData, boolean updateStickersOrder) { + sendMessage(message, caption, location, photo, videoEditedInfo, user, document, game, poll, invoice, peer, path, replyToMsg, replyToTopMsg, webPage, searchLinks, retryMessageObject, entities, replyMarkup, params, notify, scheduleDate, ttl, parentObject, sendAnimationData, updateStickersOrder, false); } - private void sendMessage(String message, String caption, TLRPC.MessageMedia location, TLRPC.TL_photo photo, VideoEditedInfo videoEditedInfo, TLRPC.User user, TLRPC.TL_document document, TLRPC.TL_game game, TLRPC.TL_messageMediaPoll poll, TLRPC.TL_messageMediaInvoice invoice, long peer, String path, MessageObject replyToMsg, MessageObject replyToTopMsg, TLRPC.WebPage webPage, boolean searchLinks, MessageObject retryMessageObject, ArrayList entities, TLRPC.ReplyMarkup replyMarkup, HashMap params, boolean notify, int scheduleDate, int ttl, Object parentObject, MessageObject.SendAnimationData sendAnimationData, boolean updateStickersOreder, boolean hasMediaSpoilers) { - sendMessage(message, caption, location, photo, videoEditedInfo, user, document, game, poll, invoice, peer, path, replyToMsg, replyToTopMsg, webPage, searchLinks, retryMessageObject, entities, replyMarkup, params, notify, scheduleDate, ttl, parentObject, sendAnimationData, updateStickersOreder, hasMediaSpoilers, true); + private void sendMessage(String message, String caption, TLRPC.MessageMedia location, TLRPC.TL_photo photo, VideoEditedInfo videoEditedInfo, TLRPC.User user, TLRPC.TL_document document, TLRPC.TL_game game, TLRPC.TL_messageMediaPoll poll, TLRPC.TL_messageMediaInvoice invoice, long peer, String path, MessageObject replyToMsg, MessageObject replyToTopMsg, TLRPC.WebPage webPage, boolean searchLinks, MessageObject retryMessageObject, ArrayList entities, TLRPC.ReplyMarkup replyMarkup, HashMap params, boolean notify, int scheduleDate, int ttl, Object parentObject, MessageObject.SendAnimationData sendAnimationData, boolean updateStickersOrder, boolean hasMediaSpoilers) { + sendMessage(message, caption, location, photo, videoEditedInfo, user, document, game, poll, invoice, peer, path, replyToMsg, replyToTopMsg, webPage, searchLinks, retryMessageObject, entities, replyMarkup, params, notify, scheduleDate, ttl, parentObject, sendAnimationData, updateStickersOrder, hasMediaSpoilers, true); } - private void sendMessage(String message, String caption, TLRPC.MessageMedia location, TLRPC.TL_photo photo, VideoEditedInfo videoEditedInfo, TLRPC.User user, TLRPC.TL_document document, TLRPC.TL_game game, TLRPC.TL_messageMediaPoll poll, TLRPC.TL_messageMediaInvoice invoice, long peer, String path, MessageObject replyToMsg, MessageObject replyToTopMsg, TLRPC.WebPage webPage, boolean searchLinks, MessageObject retryMessageObject, ArrayList entities, TLRPC.ReplyMarkup replyMarkup, HashMap params, boolean notify, int scheduleDate, int ttl, Object parentObject, MessageObject.SendAnimationData sendAnimationData, boolean updateStickersOreder, boolean hasMediaSpoilers, boolean canSendGames) { + private void sendMessage(String message, String caption, TLRPC.MessageMedia location, TLRPC.TL_photo photo, VideoEditedInfo videoEditedInfo, TLRPC.User user, TLRPC.TL_document document, TLRPC.TL_game game, TLRPC.TL_messageMediaPoll poll, TLRPC.TL_messageMediaInvoice invoice, long peer, String path, MessageObject replyToMsg, MessageObject replyToTopMsg, TLRPC.WebPage webPage, boolean searchLinks, MessageObject retryMessageObject, ArrayList entities, TLRPC.ReplyMarkup replyMarkup, HashMap params, boolean notify, int scheduleDate, int ttl, Object parentObject, MessageObject.SendAnimationData sendAnimationData, boolean updateStickersOrder, boolean hasMediaSpoilers, boolean canSendGames) { if (user != null && user.phone == null) { return; } @@ -3560,7 +3637,7 @@ private void sendMessage(String message, String caption, TLRPC.MessageMedia loca } params.put("ve", ve); } - if (encryptedChat != null && document.dc_id > 0 && !MessageObject.isStickerDocument(document) && !MessageObject.isAnimatedStickerDocument(document, true)) { + if (encryptedChat != null && document.dc_id > 0 && !MessageObject.isStickerDocument(document) && !MessageObject.isAnimatedStickerDocument(document, true) && !MessageObject.isGifDocument(document)) { newMsg.attachPath = FileLoader.getInstance(currentAccount).getPathToAttach(document).toString(); } else { newMsg.attachPath = path; @@ -3850,7 +3927,7 @@ private void sendMessage(String message, String caption, TLRPC.MessageMedia loca reqSend.top_msg_id = replyToTopMsg.getId(); reqSend.flags |= 512; } - if (updateStickersOreder) { + if (updateStickersOrder && SharedConfig.updateStickersOrderOnSend) { reqSend.update_stickersets_order = true; } if (newMsg.from_id != null) { @@ -4244,7 +4321,7 @@ private void sendMessage(String message, String caption, TLRPC.MessageMedia loca request.schedule_date = scheduleDate; request.flags |= 1024; } - if (updateStickersOreder) { + if (updateStickersOrder && SharedConfig.updateStickersOrderOnSend) { request.update_stickersets_order = true; } @@ -4252,19 +4329,6 @@ private void sendMessage(String message, String caption, TLRPC.MessageMedia loca delayedMessage.sendRequest = request; } reqSend = request; - - if (updateStickersOreder) { -// if (MessageObject.getStickerSetId(document) != -1) { -// TLRPC.TL_updateMoveStickerSetToTop update = new TLRPC.TL_updateMoveStickerSetToTop(); -// update.masks = false; -// update.emojis = false; -// update.stickerset = MessageObject.getStickerSetId(document); -// -// ArrayList updates = new ArrayList<>(); -// updates.add(update); -// getMessagesController().processUpdateArray(updates, null, null, false, 0); -// } - } } if (groupId != 0) { performSendDelayedMessage(delayedMessage); @@ -4487,10 +4551,16 @@ private void sendMessage(String message, String caption, TLRPC.MessageMedia loca reqSend.media.caption = caption; TLRPC.PhotoSize thumb = getThumbForSecretChat(document.thumbs); if (thumb != null) { - ImageLoader.fillPhotoSizeWithBytes(thumb); - ((TLRPC.TL_decryptedMessageMediaDocument) reqSend.media).thumb = thumb.bytes; - reqSend.media.thumb_h = thumb.h; - reqSend.media.thumb_w = thumb.w; + if (thumb instanceof TLRPC.TL_photoStrippedSize) { + ((TLRPC.TL_decryptedMessageMediaDocument) reqSend.media).thumb = thumb.bytes; + reqSend.media.thumb_h = thumb.h; + reqSend.media.thumb_w = thumb.w; + } else { + ImageLoader.fillPhotoSizeWithBytes(thumb); + ((TLRPC.TL_decryptedMessageMediaDocument) reqSend.media).thumb = thumb.bytes; + reqSend.media.thumb_h = thumb.h; + reqSend.media.thumb_w = thumb.w; + } } else { ((TLRPC.TL_decryptedMessageMediaDocument) reqSend.media).thumb = new byte[0]; reqSend.media.thumb_h = 0; @@ -4675,9 +4745,12 @@ private TLRPC.PhotoSize getThumbForSecretChat(ArrayList arrayLi } for (int a = 0, N = arrayList.size(); a < N; a++) { TLRPC.PhotoSize size = arrayList.get(a); - if (size == null || size instanceof TLRPC.TL_photoStrippedSize || size instanceof TLRPC.TL_photoPathSize || size instanceof TLRPC.TL_photoSizeEmpty || size.location == null) { + if (size == null || size instanceof TLRPC.TL_photoPathSize || size instanceof TLRPC.TL_photoSizeEmpty || size.location == null) { continue; } + if (size instanceof TLRPC.TL_photoStrippedSize) { + return size; + } TLRPC.TL_photoSize photoSize = new TLRPC.TL_photoSize_layer127(); photoSize.type = size.type; photoSize.w = size.w; @@ -4793,6 +4866,20 @@ private void performSendDelayedMessage(final DelayedMessage message, int index) if (message.sendEncryptedRequest != null && document.dc_id != 0) { File file = new File(location); if (!file.exists()) { + file = getFileLoader().getPathToMessage(message.obj.messageOwner); + if (file != null && file.exists()) { + message.obj.messageOwner.attachPath = location = file.getAbsolutePath(); + message.obj.attachPathExists = true; + } + } + if (file == null || !file.exists() && message.obj.getDocument() != null) { + file = getFileLoader().getPathToAttach(message.obj.getDocument(), false); + if (file != null && file.exists()) { + message.obj.messageOwner.attachPath = location = file.getAbsolutePath(); + message.obj.attachPathExists = true; + } + } + if (file == null || !file.exists()) { putToDelayedMessages(FileLoader.getAttachFileName(document), message); getFileLoader().loadFile(document, message.parentObject, FileLoader.PRIORITY_HIGH, 0); return; @@ -4833,9 +4920,25 @@ private void performSendDelayedMessage(final DelayedMessage message, int index) } else { String location = message.obj.messageOwner.attachPath; TLRPC.Document document = message.obj.getDocument(); + if (message.sendEncryptedRequest != null && document.dc_id != 0) { File file = new File(location); if (!file.exists()) { + file = getFileLoader().getPathToMessage(message.obj.messageOwner); + if (file != null && file.exists()) { + message.obj.messageOwner.attachPath = location = file.getAbsolutePath(); + message.obj.attachPathExists = true; + } + } + if (file == null || !file.exists() && message.obj.getDocument() != null) { + file = getFileLoader().getPathToAttach(message.obj.getDocument(), false); + if (file != null && file.exists()) { + message.obj.messageOwner.attachPath = location = file.getAbsolutePath(); + message.obj.attachPathExists = true; + } + } + + if (file == null || !file.exists()) { putToDelayedMessages(FileLoader.getAttachFileName(document), message); getFileLoader().loadFile(document, message.parentObject, FileLoader.PRIORITY_HIGH, 0); return; @@ -5687,6 +5790,13 @@ protected void performSendMessageRequest(final TLObject req, final MessageObject } else { isSentError = true; existFlags = 0; + if (BuildVars.LOGS_ENABLED) { + StringBuilder builder = new StringBuilder(); + for (int i = 0; i < updatesArr.size(); i++) { + builder.append(updatesArr.get(i).getClass().getSimpleName()).append(", "); + } + FileLog.d("can't find message in updates " + builder); + } } Utilities.stageQueue.postRunnable(() -> getMessagesController().processUpdates(updates, false)); } else { @@ -5827,7 +5937,7 @@ private void updateMediaPaths(MessageObject newMsgObj, TLRPC.Message sentMessage } if (sentMessage.media instanceof TLRPC.TL_messageMediaPhoto && sentMessage.media.photo != null && newMsg.media instanceof TLRPC.TL_messageMediaPhoto && newMsg.media.photo != null) { if (sentMessage.media.ttl_seconds == 0 && !newMsgObj.scheduled) { - getMessagesStorage().putSentFile(originalPath, sentMessage.media.photo, 0, "sent_" + sentMessage.peer_id.channel_id + "_" + sentMessage.id + "_" + DialogObject.getPeerDialogId(sentMessage.peer_id) + "_" + MessageObject.TYPE_PHOTO); + getMessagesStorage().putSentFile(originalPath, sentMessage.media.photo, 0, "sent_" + sentMessage.peer_id.channel_id + "_" + sentMessage.id + "_" + DialogObject.getPeerDialogId(sentMessage.peer_id) + "_" + MessageObject.TYPE_PHOTO + "_" + newMsgObj.getSize()); } if (newMsg.media.photo.sizes.size() == 1 && newMsg.media.photo.sizes.get(0).location instanceof TLRPC.TL_fileLocationUnavailable) { @@ -5888,14 +5998,14 @@ private void updateMediaPaths(MessageObject newMsgObj, TLRPC.Message sentMessage if ((isVideo || MessageObject.isGifMessage(sentMessage)) && MessageObject.isGifDocument(sentMessage.media.document) == MessageObject.isGifDocument(newMsg.media.document)) { if (!newMsgObj.scheduled) { MessageObject messageObject = new MessageObject(currentAccount, sentMessage, false, false); - getMessagesStorage().putSentFile(originalPath, sentMessage.media.document, 2, "sent_" + sentMessage.peer_id.channel_id + "_" + sentMessage.id + "_" + DialogObject.getPeerDialogId(sentMessage.peer_id) + "_" + messageObject.type); + getMessagesStorage().putSentFile(originalPath, sentMessage.media.document, 2, "sent_" + sentMessage.peer_id.channel_id + "_" + sentMessage.id + "_" + DialogObject.getPeerDialogId(sentMessage.peer_id) + "_" + messageObject.type + "_" + messageObject.getSize()); } if (isVideo) { sentMessage.attachPath = newMsg.attachPath; } } else if (!MessageObject.isVoiceMessage(sentMessage) && !MessageObject.isRoundVideoMessage(sentMessage) && !newMsgObj.scheduled) { MessageObject messageObject = new MessageObject(currentAccount, sentMessage, false, false); - getMessagesStorage().putSentFile(originalPath, sentMessage.media.document, 1, "sent_" + sentMessage.peer_id.channel_id + "_" + sentMessage.id + "_" + DialogObject.getPeerDialogId(sentMessage.peer_id) + "_" + messageObject.type); + getMessagesStorage().putSentFile(originalPath, sentMessage.media.document, 1, "sent_" + sentMessage.peer_id.channel_id + "_" + sentMessage.id + "_" + DialogObject.getPeerDialogId(sentMessage.peer_id) + "_" + messageObject.type + "_" + messageObject.getSize()); } } @@ -5958,7 +6068,7 @@ private void updateMediaPaths(MessageObject newMsgObj, TLRPC.Message sentMessage } } - if (newMsg.attachPath != null && newMsg.attachPath.startsWith(FileLoader.getDirectory(FileLoader.MEDIA_DIR_CACHE).getAbsolutePath())) { + if (newMsg.attachPath != null && newMsg.attachPath.startsWith(FileLoader.getDirectory(FileLoader.MEDIA_DIR_CACHE).getAbsolutePath()) && !MessageObject.isGifDocument(sentMessage.media.document)) { File cacheFile = new File(newMsg.attachPath); File cacheFile2 = FileLoader.getInstance(currentAccount).getPathToAttach(sentMessage.media.document, sentMessage.media.ttl_seconds != 0); if (!cacheFile.renameTo(cacheFile2)) { @@ -7172,6 +7282,7 @@ public static void prepareSendingText(AccountInstance accountInstance, String te TLRPC.TL_forumTopic topic = accountInstance.getMessagesController().getTopicsController().findTopic(-dialogId, topicId); if (topic != null && topic.topicStartMessage != null) { replyToMsg = new MessageObject(accountInstance.getCurrentAccount(), topic.topicStartMessage, false, false); + replyToMsg.isTopicMainMessage = true; } } for (int a = 0; a < count; a++) { @@ -8096,7 +8207,7 @@ public static Bitmap createVideoThumbnailAtTime(String filePath, long time) { public static Bitmap createVideoThumbnailAtTime(String filePath, long time, int[] orientation, boolean precise) { Bitmap bitmap = null; if (precise) { - AnimatedFileDrawable fileDrawable = new AnimatedFileDrawable(new File(filePath), true, 0, null, null, null, 0, 0, true, null); + AnimatedFileDrawable fileDrawable = new AnimatedFileDrawable(new File(filePath), true, 0, 0, null, null, null, 0, 0, true, null); bitmap = fileDrawable.getFrameAtTime(time, precise); if (orientation != null) { orientation[0] = fileDrawable.getOrientation(); diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/SharedConfig.java b/TMessagesProj/src/main/java/org/telegram/messenger/SharedConfig.java index d0c5f533aa..09e58b6fd5 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/SharedConfig.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/SharedConfig.java @@ -10,7 +10,9 @@ import android.app.Activity; import android.app.ActivityManager; +import android.app.Dialog; import android.content.Context; +import android.content.DialogInterface; import android.content.SharedPreferences; import android.net.Uri; import android.content.pm.PackageInfo; @@ -18,7 +20,6 @@ import android.os.SystemClock; import android.text.TextUtils; import android.util.Base64; -import android.util.SparseArray; import android.webkit.WebView; import androidx.annotation.Nullable; @@ -38,8 +39,12 @@ import org.telegram.tgnet.ConnectionsManager; import org.telegram.tgnet.SerializedData; import org.telegram.tgnet.TLRPC; -import org.telegram.ui.Components.AnimatedEmojiDrawable; +import org.telegram.ui.ActionBar.AlertDialog; +import org.telegram.ui.ActionBar.BaseFragment; +import org.telegram.ui.CacheControlActivity; import org.telegram.ui.Components.SwipeGestureSettingsView; +import org.telegram.tgnet.TLRPC; +import org.telegram.ui.LaunchActivity; import java.io.File; import java.io.RandomAccessFile; @@ -78,21 +83,56 @@ import static com.v2ray.ang.V2RayConfig.SS_PROTOCOL; import static com.v2ray.ang.V2RayConfig.WSS_PROTOCOL; import static com.v2ray.ang.V2RayConfig.WS_PROTOCOL; +import java.util.List; import java.util.Locale; public class SharedConfig { + /** + * V2: Ping and check time serialized + */ + private final static int PROXY_SCHEMA_V2 = 2; + private final static int PROXY_CURRENT_SCHEMA_VERSION = PROXY_SCHEMA_V2; + public final static int PASSCODE_TYPE_PIN = 0, PASSCODE_TYPE_PASSWORD = 1; + private static int legacyDevicePerformanceClass = -1; - public static LiteMode getLiteMode() { - if (liteMode == null) { - liteMode = new LiteMode(); - } - return liteMode; + public static boolean loopStickers() { + return LiteMode.isEnabled(LiteMode.FLAG_ANIMATED_STICKERS_CHAT); } - public static boolean loopStickers() { - return loopStickers && !getLiteMode().enabled; + public static boolean readOnlyStorageDirAlertShowed; + + public static void checkSdCard(File file) { + if (file == null || SharedConfig.storageCacheDir == null || readOnlyStorageDirAlertShowed) { + return; + } + if (file.getPath().startsWith(SharedConfig.storageCacheDir)) { + AndroidUtilities.runOnUIThread(() -> { + if (readOnlyStorageDirAlertShowed) { + return; + } + BaseFragment fragment = LaunchActivity.getLastFragment(); + if (fragment != null && fragment.getParentActivity() != null) { + SharedConfig.storageCacheDir = null; + SharedConfig.saveConfig(); + ImageLoader.getInstance().checkMediaPaths(() -> { + + }); + + readOnlyStorageDirAlertShowed = true; + AlertDialog.Builder dialog = new AlertDialog.Builder(fragment.getParentActivity()); + dialog.setTitle(LocaleController.getString("SdCardError", R.string.SdCardError)); + dialog.setSubtitle(LocaleController.getString("SdCardErrorDescription", R.string.SdCardErrorDescription)); + dialog.setPositiveButton(LocaleController.getString("DoNotUseSDCard", R.string.DoNotUseSDCard), (dialog1, which) -> { + + }); + Dialog dialogFinal = dialog.create(); + dialogFinal.setCanceledOnTouchOutside(false); + dialogFinal.show(); + } + }); + } } @Retention(RetentionPolicy.SOURCE) @@ -136,7 +176,6 @@ public static boolean loopStickers() { public static String lastUpdateVersion; public static int suggestStickers; public static boolean suggestAnimatedEmoji; - public static boolean loopStickers; public static int keepMedia = CacheByChatsController.KEEP_MEDIA_ONE_MONTH; //deprecated public static int lastKeepMediaCheckTime; public static int lastLogsCheckTime; @@ -149,7 +188,8 @@ public static boolean loopStickers() { public static boolean stickersReorderingHintUsed; public static boolean disableVoiceAudioEffects; public static boolean forceDisableTabletMode; - public static boolean useLNavigation; + public static boolean updateStickersOrderOnSend = true; + public static boolean bigCameraForRound; private static int lastLocalId = -210000; public static String storageCacheDir; @@ -162,12 +202,11 @@ public static boolean loopStickers() { private static final Object sync = new Object(); private static final Object localIdSync = new Object(); - public static int saveToGalleryFlags; +// public static int saveToGalleryFlags; public static int mapPreviewType = 2; public static boolean chatBubbles = Build.VERSION.SDK_INT >= 30; - public static boolean autoplayGifs = true; - public static boolean autoplayVideo = true; public static boolean raiseToSpeak = false; + public static boolean raiseToListen = true; public static boolean recordViaSco = false; public static boolean customTabs = true; public static boolean directShare = true; @@ -178,12 +217,10 @@ public static boolean loopStickers() { public static boolean streamAllVideo = false; public static boolean streamMkv = false; public static boolean saveStreamMedia = true; - public static boolean smoothKeyboard = true; - public static boolean pauseMusicOnRecord = true; - public static boolean chatBlur = true; + public static boolean pauseMusicOnRecord = false; + public static boolean pauseMusicOnMedia = true; public static boolean noiseSupression; - public static boolean noStatusBar = true; - public static boolean forceRtmpStream; + public static final boolean noStatusBar = true; public static boolean debugWebView; public static boolean sortContactsByName; public static boolean sortFilesByName; @@ -197,6 +234,8 @@ public static boolean loopStickers() { public static boolean fontSizeIsDefault; public static int bubbleRadius = 3; public static int ivFontSize = 12; + public static boolean proxyRotationEnabled; + public static int proxyRotationTimeout; public static int messageSeenHintCount; public static int emojiInteractionsHintCount; public static int dayNightThemeSwitchHintCount; @@ -207,7 +246,10 @@ public static boolean loopStickers() { public static boolean hasEmailLogin; + @PerformanceClass private static int devicePerformanceClass; + @PerformanceClass + private static int overrideDevicePerformanceClass; public static boolean drawDialogIcons; public static boolean useThreeLinesLayout; @@ -220,12 +262,57 @@ public static boolean loopStickers() { public static int fastScrollHintCount = 3; public static boolean dontAskManageStorage; + public static boolean translateChats = true; + public static CopyOnWriteArraySet activeAccounts; public static int loginingAccount = -1; public static boolean isFloatingDebugActive; public static LiteMode liteMode; + private static final int[] LOW_SOC = { + -1775228513, // EXYNOS 850 + 802464304, // EXYNOS 7872 + 802464333, // EXYNOS 7880 + 802464302, // EXYNOS 7870 + 2067362118, // MSM8953 + 2067362060, // MSM8937 + 2067362084, // MSM8940 + 2067362241, // MSM8992 + 2067362117, // MSM8952 + 2067361998, // MSM8917 + -1853602818 // SDM439 + }; + + private static final int[] LOW_DEVICES = { + 1903542002, // XIAOMI NIKEL (Redmi Note 4) + 1904553494, // XIAOMI OLIVE (Redmi 8) + 1616144535, // OPPO CPH2273 (Oppo A54s) + -713271737, // OPPO OP4F2F (Oppo A54) + -1394191140, // SAMSUNG A12 (Galaxy A12) + -270252297, // SAMSUNG A12S (Galaxy A12) + -270251367, // SAMSUNG A21S (Galaxy A21s) + -270252359 // SAMSUNG A10S (Galaxy A10s) + }; + + private static final int[] AVERAGE_DEVICES = { + 812981419, // XIAOMI ANGELICA (Redmi 9C) + -993913431 // XIAOMI DANDELION (Redmi 9A) + }; + + private static final int[] HIGH_DEVICES = { + 1908570923, // XIAOMI SWEET (Redmi Note 10 Pro) + -980514379, // XIAOMI SECRET (Redmi Note 10S) + 577463889, // XIAOMI JOYEUSE (Redmi Note 9 Pro) + 1764745014, // XIAOMI BEGONIA (Redmi Note 8 Pro) + 1908524435, // XIAOMI SURYA (Poco X3 NFC) + -215787089, // XIAOMI KAMA (Poco X3) + -215458996, // XIAOMI VAYU (Poco X3 Pro) + -1394179578, // SAMSUNG M21 + 220599115, // SAMSUNG J6LTE + 1737652784 // SAMSUNG J6PRIMELTE + }; + static { loadConfig(); } @@ -270,23 +357,23 @@ public ProxyInfo() { secret = ""; } - public ProxyInfo(String a, int p, String u, String pw, String s) { - address = a; - port = p; - username = u; - password = pw; - secret = s; - if (address == null) { - address = ""; + public ProxyInfo(String address, int port, String username, String password, String secret) { + this.address = address; + this.port = port; + this.username = username; + this.password = password; + this.secret = secret; + if (this.address == null) { + this.address = ""; } - if (password == null) { - password = ""; + if (this.password == null) { + this.password = ""; } - if (username == null) { - username = ""; + if (this.username == null) { + this.username = ""; } - if (secret == null) { - secret = ""; + if (this.secret == null) { + this.secret = ""; } } @@ -1140,6 +1227,8 @@ public static void saveConfig() { editor.putBoolean("forwardingOptionsHintShown", forwardingOptionsHintShown); editor.putInt("lockRecordAudioVideoHint", lockRecordAudioVideoHint); editor.putString("storageCacheDir", !TextUtils.isEmpty(storageCacheDir) ? storageCacheDir : ""); + editor.putBoolean("proxyRotationEnabled", proxyRotationEnabled); + editor.putInt("proxyRotationTimeout", proxyRotationTimeout); if (pendingAppUpdate != null) { try { @@ -1161,8 +1250,8 @@ public static void saveConfig() { editor = ApplicationLoader.applicationContext.getSharedPreferences("mainconfig", Context.MODE_PRIVATE).edit(); editor.putBoolean("hasEmailLogin", hasEmailLogin); - editor.putBoolean("useLNavigation", useLNavigation); editor.putBoolean("floatingDebugActive", isFloatingDebugActive); + editor.putBoolean("record_via_sco", recordViaSco); editor.apply(); } catch (Exception e) { FileLog.e(e); @@ -1213,6 +1302,8 @@ public static void loadConfig() { passportConfigJson = preferences.getString("passportConfigJson", ""); passportConfigHash = preferences.getInt("passportConfigHash", 0); storageCacheDir = preferences.getString("storageCacheDir", null); + proxyRotationEnabled = preferences.getBoolean("proxyRotationEnabled", false); + proxyRotationTimeout = preferences.getInt("proxyRotationTimeout", ProxyRotationController.DEFAULT_TIMEOUT_INDEX); String authKeyString = preferences.getString("pushAuthKey", null); if (!TextUtils.isEmpty(authKeyString)) { pushAuthKey = Base64.decode(authKeyString, Base64.DEFAULT); @@ -1267,16 +1358,9 @@ public static void loadConfig() { } preferences = ApplicationLoader.applicationContext.getSharedPreferences("mainconfig", Activity.MODE_PRIVATE); - boolean saveToGalleryLegacy = preferences.getBoolean("save_gallery", false); - if (saveToGalleryLegacy && BuildVars.NO_SCOPED_STORAGE) { - saveToGalleryFlags = SAVE_TO_GALLERY_FLAG_PEER + SAVE_TO_GALLERY_FLAG_CHANNELS + SAVE_TO_GALLERY_FLAG_GROUP; - preferences.edit().remove("save_gallery").putInt("save_gallery_flags", saveToGalleryFlags).apply(); - } else { - saveToGalleryFlags = preferences.getInt("save_gallery_flags", 0); - } - autoplayGifs = preferences.getBoolean("autoplay_gif", true); - autoplayVideo = preferences.getBoolean("autoplay_video", true); + SaveToGallerySettingsHelper.load(preferences); mapPreviewType = preferences.getInt("mapPreviewType", 2); + raiseToListen = preferences.getBoolean("raise_to_listen", true); raiseToSpeak = preferences.getBoolean("raise_to_speak", false); recordViaSco = preferences.getBoolean("record_via_sco", false); customTabs = preferences.getBoolean("custom_tabs", true); @@ -1285,7 +1369,7 @@ public static void loadConfig() { playOrderReversed = !shuffleMusic && preferences.getBoolean("playOrderReversed", false); inappCamera = preferences.getBoolean("inappCamera", true); hasCameraCache = preferences.contains("cameraCache"); - roundCamera16to9 = true;//preferences.getBoolean("roundCamera16to9", false); + roundCamera16to9 = true; repeatMode = preferences.getInt("repeatMode", 0); fontSize = preferences.getInt("fons_size", AndroidUtilities.isTablet() ? 14 : 12); fontSizeIsDefault = !preferences.contains("fons_size"); @@ -1294,14 +1378,15 @@ public static void loadConfig() { allowBigEmoji = preferences.getBoolean("allowBigEmoji", true); streamMedia = preferences.getBoolean("streamMedia", true); saveStreamMedia = preferences.getBoolean("saveStreamMedia", true); - smoothKeyboard = preferences.getBoolean("smoothKeyboard2", true); pauseMusicOnRecord = preferences.getBoolean("pauseMusicOnRecord", false); - chatBlur = preferences.getBoolean("chatBlur", true) || NekoConfig.forceBlurInChat.Bool(); + pauseMusicOnMedia = preferences.getBoolean("pauseMusicOnMedia", true); forceDisableTabletMode = preferences.getBoolean("forceDisableTabletMode", false); streamAllVideo = preferences.getBoolean("streamAllVideo", BuildVars.DEBUG_VERSION); streamMkv = preferences.getBoolean("streamMkv", false); suggestStickers = preferences.getInt("suggestStickers", 0); suggestAnimatedEmoji = preferences.getBoolean("suggestAnimatedEmoji", true); + overrideDevicePerformanceClass = preferences.getInt("overrideDevicePerformanceClass", -1); + devicePerformanceClass = preferences.getInt("devicePerformanceClass", -1); sortContactsByName = preferences.getBoolean("sortContactsByName", false); sortFilesByName = preferences.getBoolean("sortFilesByName", false); noSoundHintShowed = preferences.getBoolean("noSoundHintShowed", false); @@ -1309,11 +1394,7 @@ public static void loadConfig() { useThreeLinesLayout = preferences.getBoolean("useThreeLinesLayout", false); archiveHidden = preferences.getBoolean("archiveHidden", false); distanceSystemType = preferences.getInt("distanceSystemType", 0); - devicePerformanceClass = preferences.getInt("devicePerformanceClass", -1); - loopStickers = preferences.getBoolean("loopStickers", true); keepMedia = preferences.getInt("keep_media", CacheByChatsController.KEEP_MEDIA_ONE_MONTH); - noStatusBar = NekoConfig.transparentStatusBar.Bool(); - forceRtmpStream = preferences.getBoolean("forceRtmpStream", false); debugWebView = preferences.getBoolean("debugWebView", false); lastKeepMediaCheckTime = preferences.getInt("lastKeepMediaCheckTime", 0); lastLogsCheckTime = preferences.getInt("lastLogsCheckTime", 0); @@ -1366,8 +1447,9 @@ public static void loadConfig() { fastScrollHintCount = preferences.getInt("fastScrollHintCount", 3); dontAskManageStorage = preferences.getBoolean("dontAskManageStorage", false); hasEmailLogin = preferences.getBoolean("hasEmailLogin", false); - useLNavigation = preferences.getBoolean("useLNavigation", false); isFloatingDebugActive = preferences.getBoolean("floatingDebugActive", false); + updateStickersOrderOnSend = preferences.getBoolean("updateStickersOrderOnSend", true); + bigCameraForRound = preferences.getBoolean("bigCameraForRound", false); preferences = ApplicationLoader.applicationContext.getSharedPreferences("Notifications", Activity.MODE_PRIVATE); showNotificationsForAllAccounts = preferences.getBoolean("AllAccounts", true); @@ -1421,6 +1503,14 @@ public static void increaseBadPasscodeTries() { saveConfig(); } + public static boolean isAutoplayVideo() { + return LiteMode.isEnabled(LiteMode.FLAG_AUTOPLAY_VIDEOS); + } + + public static boolean isAutoplayGifs() { + return LiteMode.isEnabled(LiteMode.FLAG_AUTOPLAY_GIFS); + } + public static boolean isPassportConfigLoaded() { return passportConfigMap != null; } @@ -1619,6 +1709,13 @@ public static void setKeepMedia(int value) { editor.commit(); } + public static void toggleUpdateStickersOrderOnSend() { + SharedPreferences preferences = MessagesController.getGlobalMainSettings(); + SharedPreferences.Editor editor = preferences.edit(); + editor.putBoolean("updateStickersOrderOnSend", updateStickersOrderOnSend = !updateStickersOrderOnSend); + editor.commit(); + } + public static void checkLogsToDelete() { if (!BuildVars.LOGS_ENABLED) { return; @@ -1631,8 +1728,10 @@ public static void checkLogsToDelete() { Utilities.cacheClearQueue.postRunnable(() -> { long currentTime = time - 60 * 60 * 24 * 10; try { - File sdCard = ApplicationLoader.applicationContext.getExternalFilesDir(null); - File dir = new File(sdCard.getAbsolutePath() + "/logs"); + File dir = AndroidUtilities.getLogsDir(); + if (dir == null) { + return; + } Utilities.clearDir(dir.getAbsolutePath(), 0, currentTime, false); } catch (Throwable e) { FileLog.e(e); @@ -1644,180 +1743,6 @@ public static void checkLogsToDelete() { }); } - public static void checkKeepMedia() { - int time = (int) (System.currentTimeMillis() / 1000); - if (!BuildVars.DEBUG_PRIVATE_VERSION && Math.abs(time - lastKeepMediaCheckTime) < 60 * 60) { - return; - } - lastKeepMediaCheckTime = time; - File cacheDir = FileLoader.checkDirectory(FileLoader.MEDIA_DIR_CACHE); - - Utilities.cacheClearQueue.postRunnable(() -> { - boolean hasExceptions = false; - ArrayList cacheByChatsControllers = new ArrayList<>(); - for (int account : SharedConfig.activeAccounts) { - if (UserConfig.getInstance(account).isClientActivated()) { - CacheByChatsController cacheByChatsController = UserConfig.getInstance(account).getMessagesController().getCacheByChatsController(); - cacheByChatsControllers.add(cacheByChatsController); - if (cacheByChatsController.getKeepMediaExceptionsByDialogs().size() > 0) { - hasExceptions = true; - } - } - } - - int[] keepMediaByTypes = new int[3]; - boolean allKeepMediaTypesForever = true; - long keepMediaMinSeconds = Long.MAX_VALUE; - for (int i = 0; i < 3; i++) { - keepMediaByTypes[i] = SharedConfig.getPreferences().getInt("keep_media_type_" + i, CacheByChatsController.getDefault(i)); - if (keepMediaByTypes[i] != CacheByChatsController.KEEP_MEDIA_FOREVER) { - allKeepMediaTypesForever = false; - } - long days = CacheByChatsController.getDaysInSeconds(keepMediaByTypes[i]); - if (days < keepMediaMinSeconds) { - keepMediaMinSeconds = days; - } - } - if (hasExceptions) { - allKeepMediaTypesForever = false; - } - if (!allKeepMediaTypesForever) { - //long currentTime = time - 60 * 60 * 24 * days; - final SparseArray paths = ImageLoader.getInstance().createMediaPaths(); - for (int a = 0; a < paths.size(); a++) { - boolean isCacheDir = false; - if (paths.keyAt(a) == FileLoader.MEDIA_DIR_CACHE) { - isCacheDir = true; - } - File dir = paths.valueAt(a); - try { - File[] files = dir.listFiles(); - ArrayList keepMediaFiles = new ArrayList<>(); - for (int i = 0; i < files.length; i++) { - keepMediaFiles.add(new CacheByChatsController.KeepMediaFile(files[i])); - } - for (int i = 0; i < cacheByChatsControllers.size(); i++) { - cacheByChatsControllers.get(i).lookupFiles(keepMediaFiles); - } - for (int i = 0; i < keepMediaFiles.size(); i++) { - CacheByChatsController.KeepMediaFile file = keepMediaFiles.get(i); - if (file.keepMedia == CacheByChatsController.KEEP_MEDIA_FOREVER) { - continue; - } - long seconds; - boolean isException = false; - if (file.keepMedia >= 0) { - isException = true; - seconds = CacheByChatsController.getDaysInSeconds(file.keepMedia); - } else if (file.dialogType >= 0) { - seconds = CacheByChatsController.getDaysInSeconds(keepMediaByTypes[file.dialogType]); - } else if (isCacheDir) { - continue; - } else { - seconds = keepMediaMinSeconds; - } - if (seconds == Long.MAX_VALUE) { - continue; - } - long lastUsageTime = Utilities.getLastUsageFileTime(file.file.getAbsolutePath()); - long timeLocal = time - seconds; - boolean needDelete = lastUsageTime < timeLocal; - if (needDelete) { - try { - file.file.delete(); - } catch (Exception exception) { - FileLog.e(exception); - } - } - } - } catch (Throwable e) { - FileLog.e(e); - } - } - } - - int maxCacheGb = SharedConfig.getPreferences().getInt("cache_limit", Integer.MAX_VALUE); - if (maxCacheGb != Integer.MAX_VALUE) { - long maxCacheSize; - if (maxCacheGb == 1) { - maxCacheSize = 1024L * 1024L * 300L; - } else { - maxCacheSize = maxCacheGb * 1024L * 1024L * 1000L; - } - final SparseArray paths = ImageLoader.getInstance().createMediaPaths(); - long totalSize = 0; - for (int a = 0; a < paths.size(); a++) { - totalSize += Utilities.getDirSize(paths.valueAt(a).getAbsolutePath(), 0, true); - } - if (totalSize > maxCacheSize) { - ArrayList allFiles = new ArrayList<>(); - for (int a = 0; a < paths.size(); a++) { - File dir = paths.valueAt(a); - fillFilesRecursive(dir, allFiles); - } - Collections.sort(allFiles, (o1, o2) -> { - if (o2.lastUsageDate > o1.lastUsageDate) { - return -1; - } else if (o2.lastUsageDate < o1.lastUsageDate) { - return 1; - } - return 0; - }); - for (int i = 0; i < allFiles.size(); i++) { - long size = allFiles.get(i).file.length(); - totalSize -= size; - try { - allFiles.get(i).file.delete(); - } catch (Exception e) { - - } - - if (totalSize < maxCacheSize) { - break; - } - } - } - - } - - - //TODO now every day generating cache for reactions and cleared it after one day -\_(-_-)_/- - //need fix - File stickersPath = new File(cacheDir, "acache"); - if (stickersPath.exists()) { - long currentTime = time - 60 * 60 * 24; - try { - Utilities.clearDir(stickersPath.getAbsolutePath(), 0, currentTime, false); - } catch (Throwable e) { - FileLog.e(e); - } - } - MessagesController.getGlobalMainSettings().edit() - .putInt("lastKeepMediaCheckTime", lastKeepMediaCheckTime) - .apply(); - }); - } - - private static void fillFilesRecursive(final File fromFolder, ArrayList fileInfoList) { - if (fromFolder == null) { - return; - } - File[] files = fromFolder.listFiles(); - if (files == null) { - return; - } - for (final File fileEntry : files) { - if (fileEntry.isDirectory()) { - fillFilesRecursive(fileEntry, fileInfoList); - } else { - if (fileEntry.getName().equals(".nomedia")) { - continue; - } - fileInfoList.add(new FileInfoInternal(fileEntry)); - } - } - } - public static void toggleDisableVoiceAudioEffects() { disableVoiceAudioEffects = !disableVoiceAudioEffects; SharedPreferences preferences = MessagesController.getGlobalMainSettings(); @@ -1834,14 +1759,6 @@ public static void toggleNoiseSupression() { editor.commit(); } - public static void toggleForceRTMPStream() { - forceRtmpStream = !forceRtmpStream; - SharedPreferences preferences = MessagesController.getGlobalMainSettings(); - SharedPreferences.Editor editor = preferences.edit(); - editor.putBoolean("forceRtmpStream", forceRtmpStream); - editor.apply(); - } - public static void toggleDebugWebView() { debugWebView = !debugWebView; if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) { @@ -1853,21 +1770,8 @@ public static void toggleDebugWebView() { editor.apply(); } -// public static void toggleNoStatusBar() { -// noStatusBar = !noStatusBar; -// noStatusBar |= NekoConfig.transparentStatusBar.Bool(); -// SharedPreferences preferences = MessagesController.getGlobalMainSettings(); -// SharedPreferences.Editor editor = preferences.edit(); -// editor.putBoolean("noStatusBar", noStatusBar); -// editor.apply(); -// } - public static void toggleLoopStickers() { - loopStickers = !loopStickers; - SharedPreferences preferences = MessagesController.getGlobalMainSettings(); - SharedPreferences.Editor editor = preferences.edit(); - editor.putBoolean("loopStickers", loopStickers); - editor.commit(); + LiteMode.toggleFlag(LiteMode.FLAG_ANIMATED_STICKERS_CHAT); } public static void toggleBigEmoji() { @@ -1916,26 +1820,15 @@ public static void setRepeatMode(int mode) { editor.commit(); } - public static void toggleSaveToGalleryFlag(int flag) { - if ((saveToGalleryFlags & flag) != 0) { - saveToGalleryFlags &= ~flag; - } else { - saveToGalleryFlags |= flag; + public static void overrideDevicePerformanceClass(int performanceClass) { + MessagesController.getGlobalMainSettings().edit().putInt("overrideDevicePerformanceClass", overrideDevicePerformanceClass = performanceClass).remove("lite_mode").commit(); + if (liteMode != null) { + liteMode.loadPreference(); } - SharedPreferences preferences = MessagesController.getGlobalMainSettings(); - preferences.edit().putInt("save_gallery_flags", saveToGalleryFlags).apply(); - ImageLoader.getInstance().checkMediaPaths(); - ImageLoader.getInstance().getCacheOutQueue().postRunnable(() -> { - checkSaveToGalleryFiles(); - }); } public static void toggleAutoplayGifs() { - autoplayGifs = !autoplayGifs; - SharedPreferences preferences = MessagesController.getGlobalMainSettings(); - SharedPreferences.Editor editor = preferences.edit(); - editor.putBoolean("autoplay_gif", autoplayGifs); - editor.commit(); + LiteMode.toggleFlag(LiteMode.FLAG_AUTOPLAY_GIFS); } public static void setUseThreeLinesLayout(boolean value) { @@ -1956,11 +1849,7 @@ public static void toggleArchiveHidden() { } public static void toggleAutoplayVideo() { - autoplayVideo = !autoplayVideo; - SharedPreferences preferences = MessagesController.getGlobalMainSettings(); - SharedPreferences.Editor editor = preferences.edit(); - editor.putBoolean("autoplay_video", autoplayVideo); - editor.commit(); + LiteMode.toggleFlag(LiteMode.FLAG_AUTOPLAY_VIDEOS); } public static boolean isSecretMapPreviewSet() { @@ -1987,7 +1876,7 @@ public static void setNoSoundHintShowed(boolean value) { editor.commit(); } - public static void toogleRaiseToSpeak() { + public static void toggleRaiseToSpeak() { raiseToSpeak = !raiseToSpeak; SharedPreferences preferences = MessagesController.getGlobalMainSettings(); SharedPreferences.Editor editor = preferences.edit(); @@ -1995,6 +1884,18 @@ public static void toogleRaiseToSpeak() { editor.commit(); } + public static void toggleRaiseToListen() { + raiseToListen = !raiseToListen; + SharedPreferences preferences = MessagesController.getGlobalMainSettings(); + SharedPreferences.Editor editor = preferences.edit(); + editor.putBoolean("raise_to_listen", raiseToListen); + editor.commit(); + } + + public static boolean enabledRaiseTo(boolean speak) { + return raiseToListen && (!speak || raiseToSpeak); + } + public static void toggleCustomTabs() { customTabs = !customTabs; SharedPreferences preferences = MessagesController.getGlobalMainSettings(); @@ -2061,14 +1962,6 @@ public static void toggleSaveStreamMedia() { editor.commit(); } - public static void toggleSmoothKeyboard() { - smoothKeyboard = !smoothKeyboard; - SharedPreferences preferences = MessagesController.getGlobalMainSettings(); - SharedPreferences.Editor editor = preferences.edit(); - editor.putBoolean("smoothKeyboard2", smoothKeyboard); - editor.commit(); - } - public static void setSmoothKeyboard(boolean smoothKeyboard) { SharedPreferences preferences = MessagesController.getGlobalMainSettings(); SharedPreferences.Editor editor = preferences.edit(); @@ -2084,14 +1977,18 @@ public static void togglePauseMusicOnRecord() { editor.commit(); } - public static void toggleChatBlur() { - chatBlur = !chatBlur; + public static void togglePauseMusicOnMedia() { + pauseMusicOnMedia = !pauseMusicOnMedia; SharedPreferences preferences = MessagesController.getGlobalMainSettings(); SharedPreferences.Editor editor = preferences.edit(); - editor.putBoolean("chatBlur", chatBlur); + editor.putBoolean("pauseMusicOnMedia", pauseMusicOnMedia); editor.commit(); } + public static void toggleChatBlur() { + LiteMode.toggleFlag(LiteMode.FLAG_CHAT_BLUR); + } + public static void toggleForceDisableTabletMode() { forceDisableTabletMode = !forceDisableTabletMode; SharedPreferences preferences = MessagesController.getGlobalMainSettings(); @@ -2407,6 +2304,10 @@ public static ProxyInfo addProxy(ProxyInfo proxyInfo) { return proxyInfo; } + public static boolean isProxyEnabled() { + return MessagesController.getGlobalMainSettings().getBoolean("proxy_enabled", false) && currentProxy != null; + } + public static void deleteProxy(ProxyInfo proxyInfo) { if (currentProxy == proxyInfo) { @@ -2452,7 +2353,7 @@ public static void checkSaveToGalleryFiles() { File videoPath = new File(telegramPath, "videos"); videoPath.mkdirs(); - if (saveToGalleryFlags != 0 || !BuildVars.NO_SCOPED_STORAGE) { + if (!BuildVars.NO_SCOPED_STORAGE) { if (imagePath.isDirectory()) { new File(imagePath, ".nomedia").delete(); } @@ -2525,41 +2426,108 @@ public static void updateDayNightThemeSwitchHintCount(int count) { @PerformanceClass public static int getDevicePerformanceClass() { + if (overrideDevicePerformanceClass != -1) { + return overrideDevicePerformanceClass; + } if (devicePerformanceClass == -1) { - int androidVersion = Build.VERSION.SDK_INT; - int cpuCount = ConnectionsManager.CPU_COUNT; - int memoryClass = ((ActivityManager) ApplicationLoader.applicationContext.getSystemService(Context.ACTIVITY_SERVICE)).getMemoryClass(); - int totalCpuFreq = 0; - int freqResolved = 0; - for (int i = 0; i < cpuCount; i++) { - try { - RandomAccessFile reader = new RandomAccessFile(String.format(Locale.ENGLISH, "/sys/devices/system/cpu/cpu%d/cpufreq/cpuinfo_max_freq", i), "r"); - String line = reader.readLine(); - if (line != null) { - totalCpuFreq += Utilities.parseInt(line) / 1000; - freqResolved++; - } - reader.close(); - } catch (Throwable ignore) {} - } - int maxCpuFreq = freqResolved == 0 ? -1 : (int) Math.ceil(totalCpuFreq / (float) freqResolved); + devicePerformanceClass = measureDevicePerformanceClass(); + } + return devicePerformanceClass; + } - if (androidVersion < 21 || cpuCount <= 2 || memoryClass <= 100 || cpuCount <= 4 && maxCpuFreq != -1 && maxCpuFreq <= 1250 || cpuCount <= 4 && maxCpuFreq <= 1600 && memoryClass <= 128 && androidVersion <= 21 || cpuCount <= 4 && maxCpuFreq <= 1300 && memoryClass <= 128 && androidVersion <= 24) { - devicePerformanceClass = PERFORMANCE_CLASS_LOW; - } else if (cpuCount < 8 || memoryClass <= 160 || maxCpuFreq != -1 && maxCpuFreq <= 2050 || maxCpuFreq == -1 && cpuCount == 8 && androidVersion <= 23) { - devicePerformanceClass = PERFORMANCE_CLASS_AVERAGE; - } else { - devicePerformanceClass = PERFORMANCE_CLASS_HIGH; + public static int measureDevicePerformanceClass() { + int androidVersion = Build.VERSION.SDK_INT; + int cpuCount = ConnectionsManager.CPU_COUNT; + int memoryClass = ((ActivityManager) ApplicationLoader.applicationContext.getSystemService(Context.ACTIVITY_SERVICE)).getMemoryClass(); + + if (Build.DEVICE != null && Build.MANUFACTURER != null) { + int hash = (Build.MANUFACTURER + " " + Build.DEVICE).toUpperCase().hashCode(); + for (int i = 0; i < LOW_DEVICES.length; ++i) { + if (LOW_DEVICES[i] == hash) { + return PERFORMANCE_CLASS_LOW; + } } - if (BuildVars.LOGS_ENABLED) { - FileLog.d("device performance info selected_class = " + devicePerformanceClass + " (cpu_count = " + cpuCount + ", freq = " + maxCpuFreq + ", memoryClass = " + memoryClass + ", android version " + androidVersion + ", manufacture " + Build.MANUFACTURER + ")"); + for (int i = 0; i < AVERAGE_DEVICES.length; ++i) { + if (AVERAGE_DEVICES[i] == hash) { + return PERFORMANCE_CLASS_AVERAGE; + } + } + for (int i = 0; i < HIGH_DEVICES.length; ++i) { + if (HIGH_DEVICES[i] == hash) { + return PERFORMANCE_CLASS_HIGH; + } } } if (NaConfig.INSTANCE.getFakeHighPerformanceDevice().Bool()) { devicePerformanceClass = PERFORMANCE_CLASS_HIGH; } - return devicePerformanceClass; + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S && Build.SOC_MODEL != null) { + int hash = Build.SOC_MODEL.toUpperCase().hashCode(); + for (int i = 0; i < LOW_SOC.length; ++i) { + if (LOW_SOC[i] == hash) { + return PERFORMANCE_CLASS_LOW; + } + } + } + + int totalCpuFreq = 0; + int freqResolved = 0; + for (int i = 0; i < cpuCount; i++) { + try { + RandomAccessFile reader = new RandomAccessFile(String.format(Locale.ENGLISH, "/sys/devices/system/cpu/cpu%d/cpufreq/cpuinfo_max_freq", i), "r"); + String line = reader.readLine(); + if (line != null) { + totalCpuFreq += Utilities.parseInt(line) / 1000; + freqResolved++; + } + reader.close(); + } catch (Throwable ignore) {} + } + int maxCpuFreq = freqResolved == 0 ? -1 : (int) Math.ceil(totalCpuFreq / (float) freqResolved); + + long ram = -1; + try { + ActivityManager.MemoryInfo memoryInfo = new ActivityManager.MemoryInfo(); + ((ActivityManager) ApplicationLoader.applicationContext.getSystemService(Context.ACTIVITY_SERVICE)).getMemoryInfo(memoryInfo); + ram = memoryInfo.totalMem; + } catch (Exception ignore) {} + + int performanceClass; + if ( + androidVersion < 21 || + cpuCount <= 2 || + memoryClass <= 100 || + cpuCount <= 4 && maxCpuFreq != -1 && maxCpuFreq <= 1250 || + cpuCount <= 4 && maxCpuFreq <= 1600 && memoryClass <= 128 && androidVersion <= 21 || + cpuCount <= 4 && maxCpuFreq <= 1300 && memoryClass <= 128 && androidVersion <= 24 || + ram != -1 && ram < 2L * 1024L * 1024L * 1024L + ) { + performanceClass = PERFORMANCE_CLASS_LOW; + } else if ( + cpuCount < 8 || + memoryClass <= 160 || + maxCpuFreq != -1 && maxCpuFreq <= 2055 || + maxCpuFreq == -1 && cpuCount == 8 && androidVersion <= 23 + ) { + performanceClass = PERFORMANCE_CLASS_AVERAGE; + } else { + performanceClass = PERFORMANCE_CLASS_HIGH; + } + if (BuildVars.LOGS_ENABLED) { + FileLog.d("device performance info selected_class = " + performanceClass + " (cpu_count = " + cpuCount + ", freq = " + maxCpuFreq + ", memoryClass = " + memoryClass + ", android version " + androidVersion + ", manufacture " + Build.MANUFACTURER + ", screenRefreshRate=" + AndroidUtilities.screenRefreshRate + ")"); + } + + return performanceClass; + } + + public static String performanceClassName(int perfClass) { + switch (perfClass) { + case PERFORMANCE_CLASS_HIGH: return "HIGH"; + case PERFORMANCE_CLASS_AVERAGE: return "AVERAGE"; + case PERFORMANCE_CLASS_LOW: return "LOW"; + default: return "UNKNOWN"; + } } public static void setMediaColumnsCount(int count) { @@ -2586,7 +2554,7 @@ public static boolean canBlurChat() { } public static boolean chatBlurEnabled() { - return (canBlurChat() && chatBlur) || NekoConfig.forceBlurInChat.Bool(); + return (canBlurChat() && LiteMode.isEnabled(LiteMode.FLAG_CHAT_BLUR)) || NekoConfig.forceBlurInChat.Bool(); } public static class BackgroundActivityPrefs { @@ -2599,6 +2567,14 @@ public static long getLastCheckedBackgroundActivity() { public static void setLastCheckedBackgroundActivity(long l) { prefs.edit().putLong("last_checked", l).apply(); } + + public static int getDismissedCount() { + return prefs.getInt("dismissed_count", 0); + } + + public static void increaseDismissedCount() { + prefs.edit().putInt("dismissed_count", getDismissedCount() + 1).apply(); + } } private static Boolean animationsEnabled; @@ -2618,50 +2594,60 @@ public static SharedPreferences getPreferences() { return ApplicationLoader.applicationContext.getSharedPreferences("userconfing", Context.MODE_PRIVATE); } - private static class FileInfoInternal { - final File file; - final long lastUsageDate; - - private FileInfoInternal(File file) { - this.file = file; - this.lastUsageDate = Utilities.getLastUsageFileTime(file.getAbsolutePath()); - } + public static boolean deviceIsLow() { + return getDevicePerformanceClass() == PERFORMANCE_CLASS_LOW; } + public static boolean deviceIsAboveAverage() { + return getDevicePerformanceClass() >= PERFORMANCE_CLASS_AVERAGE; + } - public static class LiteMode { - - private boolean enabled; - - LiteMode() { - loadPreference(); - } + public static boolean deviceIsHigh() { + return getDevicePerformanceClass() >= PERFORMANCE_CLASS_HIGH; + } - public boolean enabled() { - return enabled; - } + public static boolean deviceIsAverage() { + return getDevicePerformanceClass() <= PERFORMANCE_CLASS_AVERAGE; + } - public void toggleMode() { - enabled = !enabled; - savePreference(); - AnimatedEmojiDrawable.lightModeChanged(); - } + public static void toggleRoundCamera() { + bigCameraForRound = !bigCameraForRound; + ApplicationLoader.applicationContext.getSharedPreferences("mainconfig", Activity.MODE_PRIVATE) + .edit() + .putBoolean("bigCameraForRound", bigCameraForRound) + .apply(); + } - private void loadPreference() { - int flags = MessagesController.getGlobalMainSettings().getInt("light_mode", getDevicePerformanceClass() == PERFORMANCE_CLASS_LOW ? 1 : 0) ; - enabled = (flags & 1) != 0; - } - public void savePreference() { - int flags = 0; - if (enabled) { - flags |= 1; + @Deprecated + public static int getLegacyDevicePerformanceClass() { + if (legacyDevicePerformanceClass == -1) { + int androidVersion = Build.VERSION.SDK_INT; + int cpuCount = ConnectionsManager.CPU_COUNT; + int memoryClass = ((ActivityManager) ApplicationLoader.applicationContext.getSystemService(Context.ACTIVITY_SERVICE)).getMemoryClass(); + int totalCpuFreq = 0; + int freqResolved = 0; + for (int i = 0; i < cpuCount; i++) { + try { + RandomAccessFile reader = new RandomAccessFile(String.format(Locale.ENGLISH, "/sys/devices/system/cpu/cpu%d/cpufreq/cpuinfo_max_freq", i), "r"); + String line = reader.readLine(); + if (line != null) { + totalCpuFreq += Utilities.parseInt(line) / 1000; + freqResolved++; + } + reader.close(); + } catch (Throwable ignore) {} } - MessagesController.getGlobalMainSettings().edit().putInt("light_mode", flags).apply(); - } + int maxCpuFreq = freqResolved == 0 ? -1 : (int) Math.ceil(totalCpuFreq / (float) freqResolved); - public boolean animatedEmojiEnabled() { - return !enabled; + if (androidVersion < 21 || cpuCount <= 2 || memoryClass <= 100 || cpuCount <= 4 && maxCpuFreq != -1 && maxCpuFreq <= 1250 || cpuCount <= 4 && maxCpuFreq <= 1600 && memoryClass <= 128 && androidVersion <= 21 || cpuCount <= 4 && maxCpuFreq <= 1300 && memoryClass <= 128 && androidVersion <= 24) { + legacyDevicePerformanceClass = PERFORMANCE_CLASS_LOW; + } else if (cpuCount < 8 || memoryClass <= 160 || maxCpuFreq != -1 && maxCpuFreq <= 2050 || maxCpuFreq == -1 && cpuCount == 8 && androidVersion <= 23) { + legacyDevicePerformanceClass = PERFORMANCE_CLASS_AVERAGE; + } else { + legacyDevicePerformanceClass = PERFORMANCE_CLASS_HIGH; + } } + return legacyDevicePerformanceClass; } } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/StatsController.java b/TMessagesProj/src/main/java/org/telegram/messenger/StatsController.java index f4c446558b..0f700932e0 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/StatsController.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/StatsController.java @@ -28,7 +28,9 @@ public class StatsController extends BaseController { public static final int TYPE_PHOTOS = 4; public static final int TYPE_FILES = 5; public static final int TYPE_TOTAL = 6; - private static final int TYPES_COUNT = 7; + public static final int TYPE_MUSIC = 7; + private static final int OLD_TYPES_COUNT = 7; + private static final int TYPES_COUNT = 8; private byte[] buffer = new byte[8]; @@ -88,7 +90,7 @@ public void run() { try { statsFile.seek(0); for (int a = 0; a < 3; a++) { - for (int b = 0; b < TYPES_COUNT; b++) { + for (int b = 0; b < OLD_TYPES_COUNT; b++) { statsFile.write(longToBytes(sentBytes[a][b]), 0, 8); statsFile.write(longToBytes(receivedBytes[a][b]), 0, 8); statsFile.write(intToBytes(sentItems[a][b]), 0, 4); @@ -97,6 +99,14 @@ public void run() { statsFile.write(intToBytes(callsTotalTime[a]), 0, 4); statsFile.write(longToBytes(resetStatsDate[a]), 0, 8); } + for (int b = OLD_TYPES_COUNT; b < TYPES_COUNT; ++b) { + for (int a = 0; a < 3; ++a) { + statsFile.write(longToBytes(sentBytes[a][b]), 0, 8); + statsFile.write(longToBytes(receivedBytes[a][b]), 0, 8); + statsFile.write(intToBytes(sentItems[a][b]), 0, 4); + statsFile.write(intToBytes(receivedItems[a][b]), 0, 4); + } + } statsFile.getFD().sync(); } catch (Exception ignore) { @@ -133,7 +143,7 @@ private StatsController(int account) { if (statsFile.length() > 0) { boolean save = false; for (int a = 0; a < 3; a++) { - for (int b = 0; b < TYPES_COUNT; b++) { + for (int b = 0; b < OLD_TYPES_COUNT; b++) { statsFile.readFully(buffer, 0, 8); sentBytes[a][b] = bytesToLong(buffer); statsFile.readFully(buffer, 0, 8); @@ -152,6 +162,18 @@ private StatsController(int account) { resetStatsDate[a] = System.currentTimeMillis(); } } + for (int b = OLD_TYPES_COUNT; b < TYPES_COUNT; ++b) { + for (int a = 0; a < 3; ++a) { + statsFile.readFully(buffer, 0, 8); + sentBytes[a][b] = bytesToLong(buffer); + statsFile.readFully(buffer, 0, 8); + receivedBytes[a][b] = bytesToLong(buffer); + statsFile.readFully(buffer, 0, 4); + sentItems[a][b] = bytesToInt(buffer); + statsFile.readFully(buffer, 0, 4); + receivedItems[a][b] = bytesToInt(buffer); + } + } if (save) { saveStats(); } @@ -223,14 +245,14 @@ public int getSentItemsCount(int networkType, int dataType) { public long getSentBytesCount(int networkType, int dataType) { if (dataType == TYPE_MESSAGES) { - return sentBytes[networkType][TYPE_TOTAL] - sentBytes[networkType][TYPE_FILES] - sentBytes[networkType][TYPE_AUDIOS] - sentBytes[networkType][TYPE_VIDEOS] - sentBytes[networkType][TYPE_PHOTOS]; + return sentBytes[networkType][TYPE_TOTAL] - sentBytes[networkType][TYPE_FILES] - sentBytes[networkType][TYPE_AUDIOS] - sentBytes[networkType][TYPE_VIDEOS] - sentBytes[networkType][TYPE_PHOTOS] - sentBytes[networkType][TYPE_MUSIC]; } return sentBytes[networkType][dataType]; } public long getReceivedBytesCount(int networkType, int dataType) { if (dataType == TYPE_MESSAGES) { - return receivedBytes[networkType][TYPE_TOTAL] - receivedBytes[networkType][TYPE_FILES] - receivedBytes[networkType][TYPE_AUDIOS] - receivedBytes[networkType][TYPE_VIDEOS] - receivedBytes[networkType][TYPE_PHOTOS]; + return receivedBytes[networkType][TYPE_TOTAL] - receivedBytes[networkType][TYPE_FILES] - receivedBytes[networkType][TYPE_AUDIOS] - receivedBytes[networkType][TYPE_VIDEOS] - receivedBytes[networkType][TYPE_PHOTOS] - receivedBytes[networkType][TYPE_MUSIC]; } return receivedBytes[networkType][dataType]; } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/SvgHelper.java b/TMessagesProj/src/main/java/org/telegram/messenger/SvgHelper.java index ebae293265..c766b7421c 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/SvgHelper.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/SvgHelper.java @@ -166,7 +166,7 @@ public void drawInternal(Canvas canvas, boolean drawInBackground, int threadInde } float scale = getScale((int) w, (int) h); - if (placeholderGradient[threadIndex] != null && gradientWidth > 0 && !SharedConfig.getLiteMode().enabled()) { + if (placeholderGradient[threadIndex] != null && gradientWidth > 0 && LiteMode.isEnabled(LiteMode.FLAG_CHAT_BACKGROUND)) { if (drawInBackground) { long dt = time - lastUpdateTime; if (dt > 64) { @@ -326,7 +326,7 @@ public void setupGradient(String colorKey, Theme.ResourcesProvider resourcesProv currentColorKey = colorKey; currentColor[index] = color; gradientWidth = AndroidUtilities.displaySize.x * 2; - if (SharedConfig.getLiteMode().enabled()) { + if (!LiteMode.isEnabled(LiteMode.FLAG_CHAT_BACKGROUND)) { int color2 = ColorUtils.setAlphaComponent(currentColor[index], 70); if (drawInBackground) { if (backgroundPaint == null) { @@ -500,6 +500,20 @@ public static SvgDrawable getDrawableByPath(String pathString, int w, int h) { } } + public static SvgDrawable getDrawableByPath(Path path, int w, int h) { + try { + SvgDrawable drawable = new SvgDrawable(); + drawable.commands.add(path); + drawable.paints.put(path, new Paint(Paint.ANTI_ALIAS_FLAG)); + drawable.width = w; + drawable.height = h; + return drawable; + } catch (Exception e) { + FileLog.e(e); + return null; + } + } + public static Bitmap getBitmapByPathOnly(String pathString, int svgWidth, int svgHeight, int width, int height) { try { Path path = doPath(pathString); @@ -666,7 +680,7 @@ private static Matrix parseTransform(String s) { return null; } - private static Path doPath(String s) { + public static Path doPath(String s) { int n = s.length(); ParserHelper ph = new ParserHelper(s, 0); ph.skipWhitespace(); diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/TopicsController.java b/TMessagesProj/src/main/java/org/telegram/messenger/TopicsController.java index 92841bb4a2..54a9573568 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/TopicsController.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/TopicsController.java @@ -204,10 +204,8 @@ public void processTopics(long chatId, ArrayList newTopics, } } - - if (topicsToReload != null && loadType != LOAD_TYPE_LOAD_UNKNOWN) { - reloadTopics(chatId, topicsToReload); + reloadTopics(chatId, topicsToReload, null); } else if (((loadType == LOAD_TYPE_PRELOAD && !fromCache) || loadType == LOAD_TYPE_LOAD_NEXT) && topics.size() >= totalCount && totalCount >= 0) { endIsReached.put(chatId, 1); getUserConfig().getPreferences().edit().putBoolean("topics_end_reached_" + chatId, true).apply(); @@ -244,7 +242,6 @@ public void sortTopics(long chatId, boolean notify) { ArrayList topics = topicsByChatId.get(chatId); if (topics != null) { if (openedTopicsBuChatId.get(chatId, 0) > 0) { -// Comparator.comparingInt(o -> o.topMessage == null ? Integer.MAX_VALUE : -(o.pinned ? Integer.MAX_VALUE - o.pinnedOrder : o.topMessage.date)) Collections.sort(topics, (a, b) -> { if (a.hidden != b.hidden) { return a.hidden ? -1 : 1; @@ -351,7 +348,7 @@ public void updateTopicsWithDeletedMessages(long dialogId, ArrayList me sortTopics(chatId); } if (topicsToReload != null) { - reloadTopics(chatId, topicsToReload); + reloadTopics(chatId, topicsToReload, null); } }); } @@ -359,7 +356,7 @@ public void updateTopicsWithDeletedMessages(long dialogId, ArrayList me }); } - private void reloadTopics(long chatId, ArrayList topicsToReload) { + public void reloadTopics(long chatId, ArrayList topicsToReload, Runnable callback) { TLRPC.TL_channels_getForumTopicsByID req = new TLRPC.TL_channels_getForumTopicsByID(); for (int i = 0; i < topicsToReload.size(); i++) { req.topics.add(topicsToReload.get(i).id); @@ -379,6 +376,9 @@ private void reloadTopics(long chatId, ArrayList topicsToRe processTopics(chatId, topics.topics, messagesMap, false, LOAD_TYPE_LOAD_UNKNOWN, -1); getMessagesStorage().putMessages(topics.messages, false, true, false, 0, false, 0); getMessagesStorage().saveTopics(-chatId, topicsByChatId.get(chatId), true, true); + if (callback != null) { + callback.run(); + } }); } })); @@ -820,7 +820,7 @@ public void processUpdate(List topicUpdates) { for (int i = 0; i < topicsToReload.size(); i++) { long dialogId = topicsToReload.keyAt(i); ArrayList topics = topicsToReload.valueAt(i); - reloadTopics(-dialogId, topics); + reloadTopics(-dialogId, topics, null); } } @@ -872,14 +872,9 @@ public void reloadTopics(long chatId, boolean fromCache) { endIsReached.delete(chatId); clearLoadingOffset(chatId); - TLRPC.Chat chat = getMessagesController().getChat(chatId); if (chat != null && chat.forum) { - if (fromCache) { - preloadTopics(chatId); - } else { - loadTopics(chatId, false, LOAD_TYPE_PRELOAD); - } + loadTopics(chatId, fromCache, LOAD_TYPE_PRELOAD); } sortTopics(chatId); }); @@ -979,6 +974,27 @@ public void processEditedMessage(TLRPC.Message newMsg) { } } + public void loadTopic(long chatId, int topicId, Runnable runnable) { + getMessagesStorage().loadTopics(-chatId, topics -> { + AndroidUtilities.runOnUIThread(() -> { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("loaded from cache " + chatId + " topics_count=" + (topics == null ? 0 : topics.size())); + } + + processTopics(chatId, topics, null, true, LOAD_TYPE_PRELOAD, -1); + sortTopics(chatId); + if (findTopic(chatId, topicId) != null) { + runnable.run(); + } else { + ArrayList topicToReload = new ArrayList<>(); + TLRPC.TL_forumTopic topic = new TLRPC.TL_forumTopic(); + topic.id = topicId; + reloadTopics(chatId, topicToReload, runnable); + } + }); + }); + } + private class TopicsLoadOffset { int lastMessageId; int lastMessageDate; diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/TranslateController.java b/TMessagesProj/src/main/java/org/telegram/messenger/TranslateController.java new file mode 100644 index 0000000000..2b3897a704 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/messenger/TranslateController.java @@ -0,0 +1,959 @@ +package org.telegram.messenger; + +import android.content.Context; +import android.content.res.Resources; +import android.icu.text.Collator; +import android.text.TextUtils; +import android.view.inputmethod.InputMethodInfo; +import android.view.inputmethod.InputMethodManager; +import android.view.inputmethod.InputMethodSubtype; + +import androidx.annotation.Nullable; + +import org.telegram.tgnet.TLRPC; +import org.telegram.ui.Components.Bulletin; +import org.telegram.ui.Components.TranslateAlert2; +import org.telegram.ui.RestrictedLanguagesSelectActivity; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Locale; +import java.util.Objects; +import java.util.Set; + +public class TranslateController extends BaseController { + + public static final String UNKNOWN_LANGUAGE = "und"; + + private static final int REQUIRED_TOTAL_MESSAGES_CHECKED = 8; + private static final float REQUIRED_PERCENTAGE_MESSAGES_TRANSLATABLE = .60F; + private static final float REQUIRED_MIN_PERCENTAGE_MESSAGES_UNKNOWN = .65F; + + private static final int MAX_SYMBOLS_PER_REQUEST = 25000; + private static final int MAX_MESSAGES_PER_REQUEST = 20; + private static final int GROUPING_TRANSLATIONS_TIMEOUT = 200; + + private final Set translatingDialogs = new HashSet<>(); + private final Set translatableDialogs = new HashSet<>(); + private final HashMap translatableDialogMessages = new HashMap<>(); + private final HashMap translateDialogLanguage = new HashMap<>(); + private final HashMap detectedDialogLanguage = new HashMap<>(); + private final HashMap> keptReplyMessageObjects = new HashMap<>(); + private final Set hideTranslateDialogs = new HashSet<>(); + + class TranslatableDecision { + Set certainlyTranslatable = new HashSet<>(); + Set unknown = new HashSet<>(); + Set certainlyNotTranslatable = new HashSet<>(); + } + + private MessagesController messagesController; + + public TranslateController(MessagesController messagesController) { + super(messagesController.currentAccount); + this.messagesController = messagesController; + + AndroidUtilities.runOnUIThread(this::loadTranslatingDialogsCached, 150); + } + + public boolean isFeatureAvailable() { + return UserConfig.getInstance(currentAccount).isPremium() && isChatTranslateEnabled(); + } + + public boolean isChatTranslateEnabled() { + return MessagesController.getMainSettings(currentAccount).getBoolean("translate_chat_button", true); + } + + public boolean isContextTranslateEnabled() { + return MessagesController.getMainSettings(currentAccount).getBoolean("translate_button", MessagesController.getGlobalMainSettings().getBoolean("translate_button", false)); + } + + public void setContextTranslateEnabled(boolean enable) { + MessagesController.getMainSettings(currentAccount).edit().putBoolean("translate_button", enable).apply(); + } + + public static boolean isTranslatable(MessageObject messageObject) { + return ( + messageObject != null && messageObject.messageOwner != null && + !messageObject.isOutOwner() && + !messageObject.isRestrictedMessage && + ( + messageObject.type == MessageObject.TYPE_TEXT || + messageObject.type == MessageObject.TYPE_VIDEO || + messageObject.type == MessageObject.TYPE_PHOTO || + messageObject.type == MessageObject.TYPE_VOICE || + messageObject.type == MessageObject.TYPE_FILE || + messageObject.type == MessageObject.TYPE_MUSIC + ) && !TextUtils.isEmpty(messageObject.messageOwner.message) + ); + } + + public boolean isDialogTranslatable(long dialogId) { + return ( + isFeatureAvailable() && + !DialogObject.isEncryptedDialog(dialogId) && + getUserConfig().getClientUserId() != dialogId && + /* DialogObject.isChatDialog(dialogId) &&*/ + translatableDialogs.contains(dialogId) + ); + } + + public boolean isTranslateDialogHidden(long dialogId) { + if (hideTranslateDialogs.contains(dialogId)) { + return true; + } + TLRPC.ChatFull chatFull = getMessagesController().getChatFull(-dialogId); + if (chatFull != null) { + return chatFull.translations_disabled; + } + TLRPC.UserFull userFull = getMessagesController().getUserFull(dialogId); + if (userFull != null) { + return userFull.translations_disabled; + } + return false; + } + + public boolean isTranslatingDialog(long dialogId) { + return isFeatureAvailable() && translatingDialogs.contains(dialogId); + } + + public void toggleTranslatingDialog(long dialogId) { + toggleTranslatingDialog(dialogId, !isTranslatingDialog(dialogId)); + } + + public boolean toggleTranslatingDialog(long dialogId, boolean value) { + boolean currentValue = isTranslatingDialog(dialogId), notified = false; + if (value && !currentValue) { + translatingDialogs.add(dialogId); + NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.dialogTranslate, dialogId, true); + notified = true; + } else if (!value && currentValue) { + translatingDialogs.remove((Long) dialogId); + NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.dialogTranslate, dialogId, false); + cancelTranslations(dialogId); + notified = true; + } + saveTranslatingDialogsCache(); + return notified; + } + + private int hash(MessageObject messageObject) { + if (messageObject == null) { + return 0; + } + return Objects.hash(messageObject.getDialogId(), messageObject.getId()); + } + + private String currentLanguage() { + String lang = LocaleController.getInstance().getCurrentLocaleInfo().pluralLangCode; + if (lang != null) { + lang = lang.split("_")[0]; + } + return lang; + } + + public String getDialogTranslateTo(long dialogId) { + String lang = translateDialogLanguage.get(dialogId); + if (lang == null) { + lang = TranslateAlert2.getToLanguage(); + if (lang == null || lang.equals(getDialogDetectedLanguage(dialogId))) { + lang = currentLanguage(); + } + } + if ("nb".equals(lang)) { + lang = "no"; + } + return lang; + } + + public void setDialogTranslateTo(long dialogId, String language) { + if (TextUtils.equals(getDialogTranslateTo(dialogId), language)) { + return; + } + + boolean wasTranslating = isTranslatingDialog(dialogId); + + if (wasTranslating) { + AndroidUtilities.runOnUIThread(() -> { + synchronized (TranslateController.this) { + translateDialogLanguage.put(dialogId, language); + translatingDialogs.add(dialogId); + saveTranslatingDialogsCache(); + } + NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.dialogTranslate, dialogId, true); + }, 150); + } else { + synchronized (TranslateController.this) { + translateDialogLanguage.put(dialogId, language); + } + } + + cancelTranslations(dialogId); + synchronized (this) { + translatingDialogs.remove(dialogId); + } + NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.dialogTranslate, dialogId, false); + + TranslateAlert2.setToLanguage(language); + } + + public void updateDialogFull(long dialogId) { + if (!isFeatureAvailable() || !isDialogTranslatable(dialogId)) { + return; + } + + final boolean wasHidden = hideTranslateDialogs.contains(dialogId); + + boolean hidden = false; + TLRPC.ChatFull chatFull = getMessagesController().getChatFull(-dialogId); + if (chatFull != null) { + hidden = chatFull.translations_disabled; + } else { + TLRPC.UserFull userFull = getMessagesController().getUserFull(dialogId); + if (userFull != null) { + hidden = userFull.translations_disabled; + } + } + + synchronized (this) { + if (hidden) { + hideTranslateDialogs.add(dialogId); + } else { + hideTranslateDialogs.remove(dialogId); + } + } + + if (wasHidden != hidden) { + saveTranslatingDialogsCache(); + NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.dialogTranslate, dialogId, isTranslatingDialog(dialogId)); + } + } + + public void setHideTranslateDialog(long dialogId, boolean hide) { + setHideTranslateDialog(dialogId, hide, false); + } + + public void setHideTranslateDialog(long dialogId, boolean hide, boolean doNotNotify) { + TLRPC.TL_messages_togglePeerTranslations req = new TLRPC.TL_messages_togglePeerTranslations(); + req.peer = getMessagesController().getInputPeer(dialogId); + req.disabled = hide; + getConnectionsManager().sendRequest(req, null); + + TLRPC.ChatFull chatFull = getMessagesController().getChatFull(-dialogId); + if (chatFull != null) { + chatFull.translations_disabled = hide; + getMessagesStorage().updateChatInfo(chatFull, true); + } + TLRPC.UserFull userFull = getMessagesController().getUserFull(dialogId); + if (userFull != null) { + userFull.translations_disabled = hide; + getMessagesStorage().updateUserInfo(userFull, true); + } + + synchronized (this) { + if (hide) { + hideTranslateDialogs.add(dialogId); + } else { + hideTranslateDialogs.remove(dialogId); + } + } + saveTranslatingDialogsCache(); + + if (!doNotNotify) { + NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.dialogTranslate, dialogId, isTranslatingDialog(dialogId)); + } + } + + private static List languagesOrder = Arrays.asList( + "en", "ar", "zh", "fr", "de", "it", "ja", "ko", "pt", "ru", "es", "uk" + ); + + private static List allLanguages = Arrays.asList( + "af", "sq", "am", "ar", "hy", "az", "eu", "be", "bn", "bs", "bg", "ca", "ceb", "zh-cn", "zh", "zh-tw", "co", "hr", "cs", "da", "nl", "en", "eo", "et", "fi", "fr", "fy", "gl", "ka", "de", "el", "gu", "ht", "ha", "haw", "he", "iw", "hi", "hmn", "hu", "is", "ig", "id", "ga", "it", "ja", "jv", "kn", "kk", "km", "rw", "ko", "ku", "ky", "lo", "la", "lv", "lt", "lb", "mk", "mg", "ms", "ml", "mt", "mi", "mr", "mn", "my", "ne", "no", "ny", "or", "ps", "fa", "pl", "pt", "pa", "ro", "ru", "sm", "gd", "sr", "st", "sn", "sd", "si", "sk", "sl", "so", "es", "su", "sw", "sv", "tl", "tg", "ta", "tt", "te", "th", "tr", "tk", "uk", "ur", "ug", "uz", "vi", "cy", "xh", "yi", "yo", "zu" + ); + + public static class Language { + public String code; + public String displayName; + public String ownDisplayName; + + public String q; + } + + public static ArrayList getLanguages() { + ArrayList result = new ArrayList<>(); + for (int i = 0; i < allLanguages.size(); ++i) { + Language language = new Language(); + language.code = allLanguages.get(i); + if ("no".equals(language.code)) { + language.code = "nb"; + } + language.displayName = TranslateAlert2.capitalFirst(TranslateAlert2.languageName(language.code)); + language.ownDisplayName = TranslateAlert2.capitalFirst(TranslateAlert2.systemLanguageName(language.code, true)); + if (language.displayName == null) { + continue; + } + language.q = (language.displayName + " " + (language.ownDisplayName == null ? "" : language.ownDisplayName)).toLowerCase(); + result.add(language); + } + if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.N) { + Collator collator = Collator.getInstance(Locale.getDefault()); + Collections.sort(result, (lng1, lng2) -> collator.compare(lng1.displayName, lng2.displayName)); + } else { + Collections.sort(result, Comparator.comparing(lng -> lng.displayName)); + } + return result; + } + + private static LinkedHashSet suggestedLanguageCodes = null; + public static void invalidateSuggestedLanguageCodes() { + suggestedLanguageCodes = null; + } + public static void analyzeSuggestedLanguageCodes() { + LinkedHashSet langs = new LinkedHashSet<>(); + try { + langs.add(LocaleController.getInstance().getCurrentLocaleInfo().pluralLangCode); + } catch (Exception e1) { + FileLog.e(e1); + } + try { + langs.add(Resources.getSystem().getConfiguration().locale.getLanguage()); + } catch (Exception e2) { + FileLog.e(e2); + } + try { + langs.addAll(RestrictedLanguagesSelectActivity.getRestrictedLanguages()); + } catch (Exception e3) { + FileLog.e(e3); + } + try { + InputMethodManager imm = (InputMethodManager) ApplicationLoader.applicationContext.getSystemService(Context.INPUT_METHOD_SERVICE); + List ims = imm.getEnabledInputMethodList(); + for (InputMethodInfo method : ims) { + List submethods = imm.getEnabledInputMethodSubtypeList(method, true); + for (InputMethodSubtype submethod : submethods) { + if ("keyboard".equals(submethod.getMode())) { + String currentLocale = submethod.getLocale(); + if (currentLocale != null && currentLocale.contains("_")) { + currentLocale = currentLocale.split("_")[0]; + } + if (TranslateAlert2.languageName(currentLocale) != null) { + langs.add(currentLocale); + } + } + } + } + } catch (Exception e4) { + FileLog.e(e4); + } + suggestedLanguageCodes = langs; + } + + public static ArrayList getSuggestedLanguages(String except) { + ArrayList result = new ArrayList<>(); + if (suggestedLanguageCodes == null) { + analyzeSuggestedLanguageCodes(); + if (suggestedLanguageCodes == null) { + return result; + } + } + Iterator i = suggestedLanguageCodes.iterator(); + while (i.hasNext()) { + final String code = i.next(); + if (TextUtils.equals(code, except) || "no".equals(except) && "nb".equals(code) || "nb".equals(except) && "no".equals(code)) { + continue; + } + Language language = new Language(); + language.code = code; + if ("no".equals(language.code)) { + language.code = "nb"; + } + language.displayName = TranslateAlert2.capitalFirst(TranslateAlert2.languageName(language.code)); + language.ownDisplayName = TranslateAlert2.capitalFirst(TranslateAlert2.systemLanguageName(language.code, true)); + if (language.displayName == null) { + continue; + } + language.q = (language.displayName + " " + language.ownDisplayName).toLowerCase(); + result.add(language); + } + return result; + } + + public static ArrayList getLocales() { + HashMap languages = LocaleController.getInstance().languagesDict; + ArrayList locales = new ArrayList<>(languages.values()); + for (int i = 0; i < locales.size(); ++i) { + LocaleController.LocaleInfo locale = locales.get(i); + if (locale == null || locale.shortName != null && locale.shortName.endsWith("_raw") || !"remote".equals(locale.pathToFile)) { + locales.remove(i); + i--; + } + } + + final LocaleController.LocaleInfo currentLocale = LocaleController.getInstance().getCurrentLocaleInfo(); + Comparator comparator = (o, o2) -> { + if (o == currentLocale) { + return -1; + } else if (o2 == currentLocale) { + return 1; + } + final int index1 = languagesOrder.indexOf(o.pluralLangCode); + final int index2 = languagesOrder.indexOf(o2.pluralLangCode); + if (index1 >= 0 && index2 >= 0) { + return index1 - index2; + } else if (index1 >= 0) { + return -1; + } else if (index2 >= 0) { + return 1; + } + if (o.serverIndex == o2.serverIndex) { + return o.name.compareTo(o2.name); + } + if (o.serverIndex > o2.serverIndex) { + return 1; + } else if (o.serverIndex < o2.serverIndex) { + return -1; + } + return 0; + }; + Collections.sort(locales, comparator); + + return locales; + } + + public void checkRestrictedLanguagesUpdate() { + synchronized (this) { + translatableDialogMessages.clear(); + + ArrayList toNotify = new ArrayList<>(); + HashSet languages = RestrictedLanguagesSelectActivity.getRestrictedLanguages(); + for (long dialogId : translatableDialogs) { + String language = detectedDialogLanguage.get(dialogId); + if (language != null && languages.contains(language)) { + cancelTranslations(dialogId); + translatingDialogs.remove(dialogId); + toNotify.add(dialogId); + } + } + translatableDialogs.clear(); + saveTranslatingDialogsCache(); + + for (long dialogId : toNotify) { + NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.dialogTranslate, dialogId, false); + } + } + } + + @Nullable + public String getDialogDetectedLanguage(long dialogId) { + return detectedDialogLanguage.get(dialogId); + } + + public void checkTranslation(MessageObject messageObject, boolean onScreen) { + checkTranslation(messageObject, onScreen, false); + } + + private void checkTranslation(MessageObject messageObject, boolean onScreen, boolean keepReply) { + if (!isFeatureAvailable()) { + return; + } + if (messageObject == null || messageObject.messageOwner == null) { + return; + } + + long dialogId = messageObject.getDialogId(); + + if (!keepReply && messageObject.replyMessageObject != null) { + checkTranslation(messageObject.replyMessageObject, onScreen, true); + } + + if (!isTranslatable(messageObject)) { + return; + } + + if (!isTranslatingDialog(dialogId)) { + checkLanguage(messageObject); + return; + } + + final String language = getDialogTranslateTo(dialogId); + MessageObject potentialReplyMessageObject; + if (!keepReply && (messageObject.messageOwner.translatedText == null || !language.equals(messageObject.messageOwner.translatedToLanguage)) && (potentialReplyMessageObject = findReplyMessageObject(dialogId, messageObject.getId())) != null) { + messageObject.messageOwner.translatedToLanguage = potentialReplyMessageObject.messageOwner.translatedToLanguage; + messageObject.messageOwner.translatedText = potentialReplyMessageObject.messageOwner.translatedText; + messageObject = potentialReplyMessageObject; + } + + if (onScreen && isTranslatingDialog(dialogId)) { + final MessageObject finalMessageObject = messageObject; + if (finalMessageObject.messageOwner.translatedText == null || !language.equals(finalMessageObject.messageOwner.translatedToLanguage)) { + NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.messageTranslating, finalMessageObject); + pushToTranslate(finalMessageObject, language, (text, lang) -> { + finalMessageObject.messageOwner.translatedToLanguage = lang; + finalMessageObject.messageOwner.translatedText = text; + if (keepReply) { + keepReplyMessage(finalMessageObject); + } + + getMessagesStorage().updateMessageCustomParams(dialogId, finalMessageObject.messageOwner); + NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.messageTranslated, finalMessageObject); + + ArrayList dialogMessages = messagesController.dialogMessage.get(dialogId); + if (dialogMessages != null) { + for (int i = 0; i < dialogMessages.size(); ++i) { + MessageObject dialogMessage = dialogMessages.get(i); + if (dialogMessage != null && dialogMessage.getId() == finalMessageObject.getId()) { + dialogMessage.messageOwner.translatedToLanguage = lang; + dialogMessage.messageOwner.translatedText = text; + if (dialogMessage.updateTranslation()) { + NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.updateInterfaces, 0); + } + break; + } + } + } + }); + } else if (keepReply) { + keepReplyMessage(messageObject); + } + } + } + + public void invalidateTranslation(MessageObject messageObject) { + if (!isFeatureAvailable()) { + return; + } + if (messageObject == null || messageObject.messageOwner == null) { + return; + } + final long dialogId = messageObject.getDialogId(); + messageObject.messageOwner.translatedToLanguage = null; + messageObject.messageOwner.translatedText = null; + getMessagesStorage().updateMessageCustomParams(dialogId, messageObject.messageOwner); + AndroidUtilities.runOnUIThread(() -> { + NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.messageTranslated, messageObject, isTranslatingDialog(dialogId)); + }); + } + + public void checkDialogMessages(long dialogId) { + if (!isFeatureAvailable()) { + return; + } + getMessagesStorage().getStorageQueue().postRunnable(() -> { + final ArrayList dialogMessages = messagesController.dialogMessage.get(dialogId); + if (dialogMessages == null) { + return; + } + ArrayList customProps = new ArrayList<>(); + for (int i = 0; i < dialogMessages.size(); ++i) { + MessageObject dialogMessage = dialogMessages.get(i); + if (dialogMessage == null || dialogMessage.messageOwner == null) { + customProps.add(null); + continue; + } + customProps.add(getMessagesStorage().getMessageWithCustomParamsOnlyInternal(dialogMessage.getId(), dialogMessage.getDialogId())); + } + AndroidUtilities.runOnUIThread(() -> { + boolean updated = false; + for (int i = 0; i < Math.min(customProps.size(), dialogMessages.size()); ++i) { + MessageObject dialogMessage = dialogMessages.get(i); + TLRPC.Message props = customProps.get(i); + if (dialogMessage == null || dialogMessage.messageOwner == null || props == null) { + continue; + } + dialogMessage.messageOwner.translatedText = props.translatedText; + dialogMessage.messageOwner.translatedToLanguage = props.translatedToLanguage; + if (dialogMessage.updateTranslation(false)) { + updated = true; + } + } + if (updated) { + NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.updateInterfaces, 0); + } + }); + }); + } + + + public void cleanup() { + cancelAllTranslations(); + resetTranslatingDialogsCache(); + + translatingDialogs.clear(); + translatableDialogs.clear(); + translatableDialogMessages.clear(); + translateDialogLanguage.clear(); + detectedDialogLanguage.clear(); + keptReplyMessageObjects.clear(); + hideTranslateDialogs.clear(); + loadingTranslations.clear(); + } + + private ArrayList pendingLanguageChecks = new ArrayList<>(); + private void checkLanguage(MessageObject messageObject) { + // NekoX: remove Language Detector + } + + private void checkDialogTranslatable(MessageObject messageObject) { + if (messageObject == null || messageObject.messageOwner == null) { + return; + } + + final long dialogId = messageObject.getDialogId(); + TranslatableDecision translatableMessages = translatableDialogMessages.get(dialogId); + if (translatableMessages == null) { + translatableDialogMessages.put(dialogId, translatableMessages = new TranslatableDecision()); + } + + final boolean isUnknown = isTranslatable(messageObject) && ( + messageObject.messageOwner.originalLanguage == null || + UNKNOWN_LANGUAGE.equals(messageObject.messageOwner.originalLanguage) + ); + final boolean translatable = ( + isTranslatable(messageObject) && + messageObject.messageOwner.originalLanguage != null && + !UNKNOWN_LANGUAGE.equals(messageObject.messageOwner.originalLanguage) && + !RestrictedLanguagesSelectActivity.getRestrictedLanguages().contains(messageObject.messageOwner.originalLanguage) +// !TextUtils.equals(getDialogTranslateTo(dialogId), messageObject.messageOwner.originalLanguage) + ); + + if (isUnknown) { + translatableMessages.unknown.add(messageObject.getId()); + } else { + (translatable ? translatableMessages.certainlyTranslatable : translatableMessages.certainlyNotTranslatable).add(messageObject.getId()); + } + + if (!isUnknown) { + detectedDialogLanguage.put(dialogId, messageObject.messageOwner.originalLanguage); + } + + final int translatableCount = translatableMessages.certainlyTranslatable.size(); + final int unknownCount = translatableMessages.unknown.size(); + final int notTranslatableCount = translatableMessages.certainlyNotTranslatable.size(); + final int totalCount = translatableCount + unknownCount + notTranslatableCount; + if ( + totalCount >= REQUIRED_TOTAL_MESSAGES_CHECKED && + (translatableCount / (float) (translatableCount + notTranslatableCount)) >= REQUIRED_PERCENTAGE_MESSAGES_TRANSLATABLE && + (unknownCount / (float) totalCount) < REQUIRED_MIN_PERCENTAGE_MESSAGES_UNKNOWN + ) { + translatableDialogs.add(dialogId); + translatableDialogMessages.remove((Long) dialogId); + AndroidUtilities.runOnUIThread(() -> { + NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.dialogIsTranslatable, dialogId); + }, 450); + } + } + + private final Set loadingTranslations = new HashSet<>(); + private final HashMap> pendingTranslations = new HashMap<>(); + + private static class PendingTranslation { + Runnable runnable; + ArrayList messageIds = new ArrayList<>(); + ArrayList messageTexts = new ArrayList<>(); + ArrayList> callbacks = new ArrayList<>(); + String language; + + int symbolsCount; + + int reqId = -1; + } + + private void pushToTranslate( + MessageObject message, + String language, + Utilities.Callback2 callback + ) { + if (message == null || callback == null) { + return; + } + + long dialogId = message.getDialogId(); + + PendingTranslation pendingTranslation; + synchronized (this) { + ArrayList dialogPendingTranslations = pendingTranslations.get(dialogId); + if (dialogPendingTranslations == null) { + pendingTranslations.put(dialogId, dialogPendingTranslations = new ArrayList<>()); + } + + if (dialogPendingTranslations.isEmpty()) { + dialogPendingTranslations.add(pendingTranslation = new PendingTranslation()); + } else { + pendingTranslation = dialogPendingTranslations.get(dialogPendingTranslations.size() - 1); + } + + if (pendingTranslation.messageIds.contains(message.getId())) { + return; + } + + int messageSymbolsCount = 0; + if (message.messageOwner != null && message.messageOwner.message != null) { + messageSymbolsCount = message.messageOwner.message.length(); + } else if (message.caption != null) { + messageSymbolsCount = message.caption.length(); + } else if (message.messageText != null) { + messageSymbolsCount = message.messageText.length(); + } + + if (pendingTranslation.symbolsCount + messageSymbolsCount >= MAX_SYMBOLS_PER_REQUEST || + pendingTranslation.messageIds.size() + 1 >= MAX_MESSAGES_PER_REQUEST) { + dialogPendingTranslations.add(pendingTranslation = new PendingTranslation()); + } + + if (pendingTranslation.runnable != null) { + AndroidUtilities.cancelRunOnUIThread(pendingTranslation.runnable); + } + loadingTranslations.add(message.getId()); + pendingTranslation.messageIds.add(message.getId()); + TLRPC.TL_textWithEntities source = null; + if (message.messageOwner != null) { + source = new TLRPC.TL_textWithEntities(); + source.text = message.messageOwner.message; + source.entities = message.messageOwner.entities; + } + pendingTranslation.messageTexts.add(source); + pendingTranslation.callbacks.add(callback); + pendingTranslation.language = language; + pendingTranslation.symbolsCount += messageSymbolsCount; + final PendingTranslation pendingTranslation1 = pendingTranslation; + pendingTranslation.runnable = () -> { + synchronized (TranslateController.this) { + ArrayList dialogPendingTranslations1 = pendingTranslations.get(dialogId); + if (dialogPendingTranslations1 != null) { + dialogPendingTranslations1.remove(pendingTranslation1); + if (dialogPendingTranslations1.isEmpty()) { + pendingTranslations.remove(dialogId); + } + } + } + + TLRPC.TL_messages_translateText req = new TLRPC.TL_messages_translateText(); + req.flags |= 1; + req.peer = getMessagesController().getInputPeer(dialogId); + req.id = pendingTranslation1.messageIds; + req.to_lang = pendingTranslation1.language; + + final int reqId = getConnectionsManager().sendRequest(req, (res, err) -> AndroidUtilities.runOnUIThread(() -> { + final ArrayList ids; + final ArrayList> callbacks; + final ArrayList texts; + synchronized (TranslateController.this) { + ids = pendingTranslation1.messageIds; + callbacks = pendingTranslation1.callbacks; + texts = pendingTranslation1.messageTexts; + } + if (res instanceof TLRPC.TL_messages_translateResult) { + ArrayList translated = ((TLRPC.TL_messages_translateResult) res).result; + final int count = Math.min(callbacks.size(), translated.size()); + for (int i = 0; i < count; ++i) { + callbacks.get(i).run(TranslateAlert2.preprocess(texts.get(i), translated.get(i)), pendingTranslation1.language); + } + } else if (err != null && "TO_LANG_INVALID".equals(err.text)) { + toggleTranslatingDialog(dialogId, false); + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.showBulletin, Bulletin.TYPE_ERROR, LocaleController.getString("TranslationFailedAlert2", R.string.TranslationFailedAlert2)); + } else { + for (int i = 0; i < callbacks.size(); ++i) { + callbacks.get(i).run(null, pendingTranslation1.language); + } + } + synchronized (TranslateController.this) { + for (int i = 0; i < ids.size(); ++i) { + loadingTranslations.remove(ids.get(i)); + } + } + })); + synchronized (TranslateController.this) { + pendingTranslation1.reqId = reqId; + } + }; + AndroidUtilities.runOnUIThread(pendingTranslation.runnable, GROUPING_TRANSLATIONS_TIMEOUT); + } + } + + public boolean isTranslating(MessageObject messageObject) { + synchronized (this) { + return messageObject != null && loadingTranslations.contains(messageObject.getId()) && isTranslatingDialog(messageObject.getDialogId()); + } + } + + public boolean isTranslating(MessageObject messageObject, MessageObject.GroupedMessages group) { + if (messageObject == null) { + return false; + } + if (!isTranslatingDialog(messageObject.getDialogId())) { + return false; + } + synchronized (this) { + if (loadingTranslations.contains(messageObject.getId())) { + return true; + } + if (group != null) { + for (MessageObject message : group.messages) { + if (loadingTranslations.contains(message.getId())) { + return true; + } + } + } + } + return false; + } + + public void cancelAllTranslations() { + synchronized (this) { + for (ArrayList translations : pendingTranslations.values()) { + if (translations != null) { + for (PendingTranslation pendingTranslation : translations) { + AndroidUtilities.cancelRunOnUIThread(pendingTranslation.runnable); + if (pendingTranslation.reqId != -1) { + getConnectionsManager().cancelRequest(pendingTranslation.reqId, true); + for (Integer messageId : pendingTranslation.messageIds) { + loadingTranslations.remove(messageId); + } + } + } + } + } + } + } + + public void cancelTranslations(long dialogId) { + synchronized (this) { + ArrayList translations = pendingTranslations.get(dialogId); + if (translations != null) { + for (PendingTranslation pendingTranslation : translations) { + AndroidUtilities.cancelRunOnUIThread(pendingTranslation.runnable); + if (pendingTranslation.reqId != -1) { + getConnectionsManager().cancelRequest(pendingTranslation.reqId, true); + for (Integer messageId : pendingTranslation.messageIds) { + loadingTranslations.remove(messageId); + } + } + } + pendingTranslations.remove((Long) dialogId); + } + } + } + + private void keepReplyMessage(MessageObject messageObject) { + if (messageObject == null) { + return; + } + HashMap map = keptReplyMessageObjects.get(messageObject.getDialogId()); + if (map == null) { + keptReplyMessageObjects.put(messageObject.getDialogId(), map = new HashMap<>()); + } + map.put(messageObject.getId(), messageObject); + } + + public MessageObject findReplyMessageObject(long dialogId, int messageId) { + HashMap map = keptReplyMessageObjects.get(dialogId); + if (map == null) { + return null; + } + return map.get(messageId); + } + + private void clearAllKeptReplyMessages(long dialogId) { + keptReplyMessageObjects.remove(dialogId); + } + + + private void loadTranslatingDialogsCached() { + if (!isFeatureAvailable()) { + return; + } + + String translatingDialogsCache = messagesController.getMainSettings().getString("translating_dialog_languages2", null); + if (translatingDialogsCache == null) { + return; + } + String[] dialogs = translatingDialogsCache.split(";"); + + HashSet restricted = RestrictedLanguagesSelectActivity.getRestrictedLanguages(); + for (int i = 0; i < dialogs.length; ++i) { + String[] keyval = dialogs[i].split("="); + if (keyval.length < 2) { + continue; + } + long did = Long.parseLong(keyval[0]); + String[] langs = keyval[1].split(">"); + if (langs.length != 2) { + continue; + } + String from = langs[0], to = langs[1]; + if ("null".equals(from)) from = null; + if ("null".equals(to)) to = null; + if (from != null) { + detectedDialogLanguage.put(did, from); + if (!restricted.contains(from)) { + translatingDialogs.add(did); + translatableDialogs.add(did); + } + if (to != null) { + translateDialogLanguage.put(did, to); + } + } + } + + Set hidden = messagesController.getMainSettings().getStringSet("hidden_translation_at", null); + if (hidden != null) { + Iterator i = hidden.iterator(); + while (i.hasNext()) { + try { + hideTranslateDialogs.add(Long.parseLong(i.next())); + } catch (Exception e) { + FileLog.e(e); + } + } + } + } + + private void saveTranslatingDialogsCache() { + StringBuilder langset = new StringBuilder(); + Iterator i = translatingDialogs.iterator(); + boolean first = true; + while (i.hasNext()) { + try { + long did = i.next(); + if (!first) { + langset.append(";"); + } + if (first) { + first = false; + } + String lang = detectedDialogLanguage.get(did); + if (lang == null) { + lang = "null"; + } + String tolang = getDialogTranslateTo(did); + if (tolang == null) { + tolang = "null"; + } + langset.append(did).append("=").append(lang).append(">").append(tolang); + } catch (Exception e) {} + } + + Set hidden = new HashSet<>(); + i = hideTranslateDialogs.iterator(); + while (i.hasNext()) { + try { + hidden.add("" + i.next()); + } catch (Exception e) { + FileLog.e(e); + } + } + MessagesController.getMainSettings(currentAccount).edit().putString("translating_dialog_languages2", langset.toString()).putStringSet("hidden_translation_at", hidden).apply(); + } + + private void resetTranslatingDialogsCache() { + MessagesController.getMainSettings(currentAccount).edit().remove("translating_dialog_languages2").remove("hidden_translation_at").apply(); + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/UserConfig.java b/TMessagesProj/src/main/java/org/telegram/messenger/UserConfig.java index 14d03f27e0..ca827373b3 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/UserConfig.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/UserConfig.java @@ -13,6 +13,7 @@ import android.os.SystemClock; import android.util.Base64; import android.util.SparseArray; +import android.util.LongSparseArray; import org.telegram.tgnet.SerializedData; import org.telegram.tgnet.TLRPC; @@ -32,7 +33,7 @@ public class UserConfig extends BaseController { //public final static int MAX_ACCOUNT_COUNT = 4; private final Object sync = new Object(); - private boolean configLoaded; + private volatile boolean configLoaded; private TLRPC.User currentUser; public boolean registeredForPush; public int lastSendMessageId = -210000; @@ -83,6 +84,10 @@ public class UserConfig extends BaseController { public volatile byte[] savedPasswordHash; public volatile byte[] savedSaltedPassword; public volatile long savedPasswordTime; + LongSparseArray userSaveGalleryExceptions; + LongSparseArray chanelSaveGalleryExceptions; + LongSparseArray groupsSaveGalleryExceptions; + private static SparseArray Instance = new SparseArray<>(); @@ -137,6 +142,9 @@ public int getNewMessageId() { public void saveConfig(boolean withFile) { NotificationCenter.getInstance(currentAccount).doOnIdle(() -> { + if (!configLoaded) { + return; + } synchronized (sync) { try { SharedPreferences.Editor editor = getPreferences().edit(); @@ -287,7 +295,8 @@ private void checkPremiumSelf(TLRPC.User oldUser, TLRPC.User newUser) { } } - public void loadConfig() { + public void + loadConfig() { synchronized (sync) { if (configLoaded) { return; @@ -427,6 +436,48 @@ public SharedPreferences getPreferences() { } } + public LongSparseArray getSaveGalleryExceptions(int type) { + if (type == SharedConfig.SAVE_TO_GALLERY_FLAG_PEER) { + if (userSaveGalleryExceptions == null) { + userSaveGalleryExceptions = SaveToGallerySettingsHelper.loadExceptions(ApplicationLoader.applicationContext.getSharedPreferences(SaveToGallerySettingsHelper.USERS_PREF_NAME + "_" + currentAccount, Context.MODE_PRIVATE)); + } + return userSaveGalleryExceptions; + } else if (type == SharedConfig.SAVE_TO_GALLERY_FLAG_GROUP) { + if (groupsSaveGalleryExceptions == null) { + groupsSaveGalleryExceptions = SaveToGallerySettingsHelper.loadExceptions(ApplicationLoader.applicationContext.getSharedPreferences(SaveToGallerySettingsHelper.GROUPS_PREF_NAME + "_" + currentAccount, Context.MODE_PRIVATE)); + } + return groupsSaveGalleryExceptions; + } else if (type == SharedConfig.SAVE_TO_GALLERY_FLAG_CHANNELS) { + if (chanelSaveGalleryExceptions == null) { + chanelSaveGalleryExceptions = SaveToGallerySettingsHelper.loadExceptions(ApplicationLoader.applicationContext.getSharedPreferences(SaveToGallerySettingsHelper.CHANNELS_PREF_NAME + "_" + currentAccount, Context.MODE_PRIVATE)); + } + return chanelSaveGalleryExceptions; + } + return null; + } + + public void updateSaveGalleryExceptions(int type, LongSparseArray exceptions) { + if (type == SharedConfig.SAVE_TO_GALLERY_FLAG_PEER) { + userSaveGalleryExceptions = exceptions; + SaveToGallerySettingsHelper.saveExceptions( + ApplicationLoader.applicationContext.getSharedPreferences(SaveToGallerySettingsHelper.USERS_PREF_NAME + "_" + currentAccount, Context.MODE_PRIVATE), + userSaveGalleryExceptions + ); + } else if (type == SharedConfig.SAVE_TO_GALLERY_FLAG_GROUP) { + groupsSaveGalleryExceptions = exceptions; + SaveToGallerySettingsHelper.saveExceptions( + ApplicationLoader.applicationContext.getSharedPreferences(SaveToGallerySettingsHelper.GROUPS_PREF_NAME + "_" + currentAccount, Context.MODE_PRIVATE), + groupsSaveGalleryExceptions + ); + } else if (type == SharedConfig.SAVE_TO_GALLERY_FLAG_CHANNELS) { + chanelSaveGalleryExceptions = exceptions; + SaveToGallerySettingsHelper.saveExceptions( + ApplicationLoader.applicationContext.getSharedPreferences(SaveToGallerySettingsHelper.CHANNELS_PREF_NAME + "_" + currentAccount, Context.MODE_PRIVATE), + chanelSaveGalleryExceptions + ); + } + } + public void clearConfig() { getPreferences().edit().clear().apply(); @@ -482,6 +533,16 @@ public void setPinnedDialogsLoaded(int folderId, boolean loaded) { getPreferences().edit().putBoolean("2pinnedDialogsLoaded" + folderId, loaded).commit(); } + public void clearPinnedDialogsLoaded() { + SharedPreferences.Editor editor = getPreferences().edit(); + for (String key : getPreferences().getAll().keySet()) { + if (key.startsWith("2pinnedDialogsLoaded")) { + editor.remove(key); + } + } + editor.apply(); + } + public static final int i_dialogsLoadOffsetId = 0; public static final int i_dialogsLoadOffsetDate = 1; public static final int i_dialogsLoadOffsetUserId = 2; @@ -528,16 +589,7 @@ public boolean isPremium() { } public Long getEmojiStatus() { - if (currentUser == null) { - return null; - } - if (currentUser.emoji_status instanceof TLRPC.TL_emojiStatusUntil && ((TLRPC.TL_emojiStatusUntil) currentUser.emoji_status).until > (int) (System.currentTimeMillis() / 1000)) { - return ((TLRPC.TL_emojiStatusUntil) currentUser.emoji_status).document_id; - } - if (currentUser.emoji_status instanceof TLRPC.TL_emojiStatus) { - return ((TLRPC.TL_emojiStatus) currentUser.emoji_status).document_id; - } - return null; + return UserObject.getEmojiStatusDocumentId(currentUser); } @@ -569,4 +621,9 @@ public void loadGlobalTTl() { public void setGlobalTtl(int ttl) { globalTtl = ttl; } + + public void clearFilters() { + getPreferences().edit().remove("filtersLoaded").apply(); + filtersLoaded = false; + } } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/UserObject.java b/TMessagesProj/src/main/java/org/telegram/messenger/UserObject.java index fb0851f64a..8dc6f95c45 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/UserObject.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/UserObject.java @@ -113,4 +113,26 @@ public static TLRPC.UserProfilePhoto getPhoto(TLRPC.User user) { public static boolean hasFallbackPhoto(TLRPC.UserFull userInfo) { return userInfo != null && userInfo.fallback_photo != null && !(userInfo.fallback_photo instanceof TLRPC.TL_photoEmpty); } + + public static Long getEmojiStatusDocumentId(TLRPC.User user) { + if (user == null) { + return null; + } + return getEmojiStatusDocumentId(user.emoji_status); + } + + public static Long getEmojiStatusDocumentId(TLRPC.EmojiStatus emojiStatus) { + if (emojiStatus == null) { + return null; + } + if (emojiStatus instanceof TLRPC.TL_emojiStatus) + return ((TLRPC.TL_emojiStatus) emojiStatus).document_id; + if (emojiStatus instanceof TLRPC.TL_emojiStatusUntil) { + TLRPC.TL_emojiStatusUntil untilStatus = (TLRPC.TL_emojiStatusUntil) emojiStatus; + if (untilStatus.until > (int) (System.currentTimeMillis() / 1000)) { + return untilStatus.document_id; + } + } + return null; + } } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/Utilities.java b/TMessagesProj/src/main/java/org/telegram/messenger/Utilities.java index b8475e6472..df3c8f3cb7 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/Utilities.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/Utilities.java @@ -80,6 +80,7 @@ public class Utilities { public static native void drawDitheredGradient(Bitmap bitmap, int[] colors, int startX, int startY, int endX, int endY); public static native int saveProgressiveJpeg(Bitmap bitmap, int width, int height, int stride, int quality, String path); public static native void generateGradient(Bitmap bitmap, boolean unpin, int phase, float progress, int width, int height, int stride, int[] colors); + public static native void setupNativeCrashesListener(String path); public static Bitmap stackBlurBitmapMax(Bitmap bitmap) { int w = AndroidUtilities.dp(20); @@ -511,10 +512,18 @@ public static interface Callback { public void run(T arg); } + public static interface CallbackReturn { + public ReturnType run(Arg arg); + } + public static interface Callback2 { public void run(T arg, T2 arg2); } + public static interface Callback3 { + public void run(T arg, T2 arg2, T3 arg3); + } + public static Value getOrDefault(HashMap map, Key key, Value defaultValue) { Value v = map.get(key); if (v == null) { @@ -522,4 +531,34 @@ public static Value getOrDefault(HashMap map, Key key, } return v; } + + public static void doCallbacks(Utilities.Callback ...actions) { + doCallbacks(0, actions); + } + private static void doCallbacks(int i, Utilities.Callback ...actions) { + if (actions != null && actions.length > i) { + actions[i].run(() -> doCallbacks(i + 1, actions)); + } + } + + public static void raceCallbacks(Runnable onFinish, Utilities.Callback ...actions) { + if (actions == null || actions.length == 0) { + if (onFinish != null) { + onFinish.run(); + } + return; + } + final int[] finished = new int[] { 0 }; + Runnable checkFinish = () -> { + finished[0]++; + if (finished[0] == actions.length) { + if (onFinish != null) { + onFinish.run(); + } + } + }; + for (int i = 0; i < actions.length; ++i) { + actions[i].run(checkFinish); + } + } } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/VideoEditedInfo.java b/TMessagesProj/src/main/java/org/telegram/messenger/VideoEditedInfo.java index aef8d17c4a..d219b56c05 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/VideoEditedInfo.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/VideoEditedInfo.java @@ -103,6 +103,7 @@ public static class MediaEntity { public int textAlign; public int viewWidth; public int viewHeight; + public float roundRadius; public float scale; public float textViewWidth; @@ -122,6 +123,7 @@ public static class MediaEntity { public View view; public Canvas canvas; public AnimatedFileDrawable animatedFileDrawable; + public Canvas roundRadiusCanvas; public MediaEntity() { diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/browser/Browser.java b/TMessagesProj/src/main/java/org/telegram/messenger/browser/Browser.java index 2e350ca94c..4f79013d7d 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/browser/Browser.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/browser/Browser.java @@ -33,6 +33,7 @@ import org.telegram.messenger.CustomTabsCopyReceiver; import org.telegram.messenger.FileLog; import org.telegram.messenger.LocaleController; +import org.telegram.messenger.MessagesController; import org.telegram.messenger.NotificationCenter; import org.telegram.messenger.R; import org.telegram.messenger.ShareBroadcastReceiver; @@ -250,7 +251,7 @@ public static void openUrl(final Context context, Uri uri, final boolean allowCu boolean internalUri = isInternalUri(uri, forceBrowser); if (tryTelegraph) { try { - String host = uri.getHost().toLowerCase(); + String host = AndroidUtilities.getHostAuthority(uri); if (isTelegraphUrl(host, true) || uri.toString().toLowerCase().contains("telegram.org/faq") || uri.toString().toLowerCase().contains("telegram.org/privacy")) { final AlertDialog[] progressDialog = new AlertDialog[] { new AlertDialog(context, AlertDialog.ALERT_TYPE_SPINNER) @@ -309,7 +310,7 @@ public static void openUrl(final Context context, Uri uri, final boolean allowCu FileLog.e(e); } } - String host = uri.getHost() != null ? uri.getHost().toLowerCase() : ""; + String host = AndroidUtilities.getHostAuthority(uri.toString().toLowerCase()); if (AccountInstance.getInstance(currentAccount).getMessagesController().autologinDomains.contains(host)) { String token = "autologin_token=" + URLEncoder.encode(AccountInstance.getInstance(UserConfig.selectedAccount).getMessagesController().autologinToken, "UTF-8"); String url = uri.toString(); @@ -377,6 +378,13 @@ public static void openUrl(final Context context, Uri uri, final boolean allowCu } if (forceBrowser[0] || allActivities == null || allActivities.isEmpty()) { + if (MessagesController.getInstance(currentAccount).authDomains.contains(host)) { + Intent intent = new Intent(Intent.ACTION_VIEW, uri); + intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); + ApplicationLoader.applicationContext.startActivity(intent); + return; + } + Intent share = new Intent(ApplicationLoader.applicationContext, ShareBroadcastReceiver.class); share.setAction(Intent.ACTION_SEND); @@ -454,9 +462,16 @@ public static boolean isInternalUri(Uri uri, boolean[] forceBrowser) { } public static boolean isInternalUri(Uri uri, boolean all, boolean[] forceBrowser) { - String host = uri.getHost(); + String host = AndroidUtilities.getHostAuthority(uri); host = host != null ? host.toLowerCase() : ""; + if (MessagesController.getInstance(UserConfig.selectedAccount).authDomains.contains(host)) { + if (forceBrowser != null) { + forceBrowser[0] = true; + } + return false; + } + Matcher prefixMatcher = LaunchActivity.PREFIX_T_ME_PATTERN.matcher(host); if (prefixMatcher.find()) { uri = Uri.parse("https://t.me/" + prefixMatcher.group(1) + (TextUtils.isEmpty(uri.getPath()) ? "" : "/" + uri.getPath()) + (TextUtils.isEmpty(uri.getQuery()) ? "" : "?" + uri.getQuery())); diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/camera/CameraView.java b/TMessagesProj/src/main/java/org/telegram/messenger/camera/CameraView.java index c8f35c2085..fce3b449e5 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/camera/CameraView.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/camera/CameraView.java @@ -41,6 +41,7 @@ import android.os.Message; import android.os.VibrationEffect; import android.os.Vibrator; +import android.util.Log; import android.view.Gravity; import android.view.HapticFeedbackConstants; import android.view.Surface; @@ -50,6 +51,7 @@ import android.widget.FrameLayout; import android.widget.ImageView; +import androidx.annotation.NonNull; import androidx.core.graphics.ColorUtils; import org.telegram.messenger.AndroidUtilities; @@ -60,6 +62,7 @@ import org.telegram.messenger.Utilities; import org.telegram.messenger.video.MP4Builder; import org.telegram.messenger.video.Mp4Movie; +import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.Components.CubicBezierInterpolator; import org.telegram.ui.Components.InstantCameraView; import org.telegram.ui.Components.LayoutHelper; @@ -91,6 +94,7 @@ public class CameraView extends FrameLayout implements TextureView.SurfaceTextur private Size pictureSize; CameraInfo info; private boolean mirror; + private boolean lazy; private TextureView textureView; private ImageView blurredStubView; private CameraSession cameraSession; @@ -102,6 +106,7 @@ public class CameraView extends FrameLayout implements TextureView.SurfaceTextur private Matrix txform = new Matrix(); private Matrix matrix = new Matrix(); private int focusAreaSize; + private Drawable thumbDrawable; private boolean useMaxPreview; @@ -253,11 +258,18 @@ public interface CameraViewDelegate { } public CameraView(Context context, boolean frontface) { + this(context, frontface, false); + } + + public CameraView(Context context, boolean frontface, boolean lazy) { super(context, null); initialFrontface = isFrontface = frontface; textureView = new TextureView(context); - textureView.setSurfaceTextureListener(this); - addView(textureView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.CENTER)); + if (!(this.lazy = lazy)) { + initTexture(); + } + + setWillNotDraw(!lazy); blurredStubView = new ImageView(context); addView(blurredStubView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.CENTER)); @@ -269,6 +281,17 @@ public CameraView(Context context, boolean frontface) { innerPaint.setColor(0x7fffffff); } + private boolean textureInited = false; + public void initTexture() { + if (textureInited) { + return; + } + + textureView.setSurfaceTextureListener(this); + addView(textureView, 0, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.CENTER)); + textureInited = true; + } + public void setOptimizeForBarcode(boolean value) { optimizeForBarcode = value; if (cameraSession != null) { @@ -276,6 +299,40 @@ public void setOptimizeForBarcode(boolean value) { } } + Rect bounds = new Rect(); + + @Override + protected void onDraw(Canvas canvas) { + if (thumbDrawable != null) { + bounds.set(0, 0, getMeasuredWidth(), getMeasuredHeight()); + int W = thumbDrawable.getIntrinsicWidth(), H = thumbDrawable.getIntrinsicHeight(); + float scale = 1f / Math.min(W / (float) Math.max(1, bounds.width()), H / (float) Math.max(1, bounds.height())); + thumbDrawable.setBounds( + (int) (bounds.centerX() - W * scale / 2f), + (int) (bounds.centerY() - H * scale / 2f), + (int) (bounds.centerX() + W * scale / 2f), + (int) (bounds.centerY() + H * scale / 2f) + ); + thumbDrawable.draw(canvas); + } + super.onDraw(canvas); + } + + @Override + protected boolean verifyDrawable(@NonNull Drawable who) { + return who == thumbDrawable || super.verifyDrawable(who); + } + + public void setThumbDrawable(Drawable drawable) { + if (thumbDrawable != null) { + thumbDrawable.setCallback(null); + } + thumbDrawable = drawable; + if (thumbDrawable != null) { + thumbDrawable.setCallback(this); + } + } + private int measurementsCount = 0; @Override protected void onAttachedToWindow() { @@ -480,6 +537,39 @@ public void onSurfaceTextureUpdated(SurfaceTexture surface) { delegate.onCameraInit(); } inited = true; + if (lazy) { + textureView.setAlpha(0); + showTexture(true, true); + } + } + } + + private ValueAnimator textureViewAnimator; + public void showTexture(boolean show, boolean animated) { + if (textureView == null) { + return; + } + + if (textureViewAnimator != null) { + textureViewAnimator.cancel(); + textureViewAnimator = null; + } + if (animated) { + textureViewAnimator = ValueAnimator.ofFloat(textureView.getAlpha(), show ? 1 : 0); + textureViewAnimator.addUpdateListener(anm -> { + final float t = (float) anm.getAnimatedValue(); + textureView.setAlpha(t); + }); + textureViewAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + textureView.setAlpha(show ? 1 : 0); + textureViewAnimator = null; + } + }); + textureViewAnimator.start(); + } else { + textureView.setAlpha(show ? 1 : 0); } } @@ -503,7 +593,7 @@ public void setClipBottom(int value) { }; private void checkPreviewMatrix() { - if (previewSize == null) { + if (previewSize == null || textureView == null) { return; } @@ -1159,6 +1249,7 @@ private int loadShader(int type, String shaderCode) { private void createCamera(final SurfaceTexture surfaceTexture) { AndroidUtilities.runOnUIThread(() -> { + CameraGLThread cameraThread = this.cameraThread; if (cameraThread == null) { return; } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/secretmedia/EncryptedFileDataSource.java b/TMessagesProj/src/main/java/org/telegram/messenger/secretmedia/EncryptedFileDataSource.java index 752650f4c4..f8031bf076 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/secretmedia/EncryptedFileDataSource.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/secretmedia/EncryptedFileDataSource.java @@ -8,39 +8,46 @@ package org.telegram.messenger.secretmedia; +import static java.lang.Math.min; + import android.net.Uri; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.PlaybackException; import com.google.android.exoplayer2.upstream.BaseDataSource; +import com.google.android.exoplayer2.upstream.DataSourceException; import com.google.android.exoplayer2.upstream.DataSpec; import com.google.android.exoplayer2.upstream.TransferListener; +import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.Log; import org.telegram.messenger.FileLoader; +import org.telegram.messenger.FileLog; import org.telegram.messenger.Utilities; import java.io.EOFException; import java.io.File; +import java.io.FileInputStream; import java.io.IOException; import java.io.RandomAccessFile; +import java.nio.charset.StandardCharsets; +import java.util.Base64; public final class EncryptedFileDataSource extends BaseDataSource { public static class EncryptedFileDataSourceException extends IOException { - public EncryptedFileDataSourceException(IOException cause) { + public EncryptedFileDataSourceException(Throwable cause) { super(cause); } } - private RandomAccessFile file; private Uri uri; - private long bytesRemaining; private boolean opened; - private byte[] key = new byte[32]; - private byte[] iv = new byte[16]; - private int fileOffset; + private int bytesRemaining; + EncryptedFileInputStream fileInputStream; public EncryptedFileDataSource() { super(/* isNetwork= */ false); @@ -54,81 +61,71 @@ public EncryptedFileDataSource(@Nullable TransferListener listener) { } } + @Override - public long open(DataSpec dataSpec) throws EncryptedFileDataSourceException { + public long open(DataSpec dataSpec) throws IOException { + uri = dataSpec.uri; + File path = new File(dataSpec.uri.getPath()); + String name = path.getName(); + File keyPath = new File(FileLoader.getInternalCacheDir(), name + ".key"); + try { - uri = dataSpec.uri; - File path = new File(dataSpec.uri.getPath()); - String name = path.getName(); - File keyPath = new File(FileLoader.getInternalCacheDir(), name + ".key"); - RandomAccessFile keyFile = new RandomAccessFile(keyPath, "r"); - keyFile.read(key); - keyFile.read(iv); - keyFile.close(); - - file = new RandomAccessFile(path, "r"); - file.seek(dataSpec.position); - fileOffset = (int) dataSpec.position; - bytesRemaining = dataSpec.length == C.LENGTH_UNSET ? file.length() - dataSpec.position : dataSpec.length; - if (bytesRemaining < 0) { - throw new EOFException(); + fileInputStream = new EncryptedFileInputStream(path, keyPath); + fileInputStream.skip(dataSpec.position); + int len = (int) path.length(); + + transferInitializing(dataSpec); + if (dataSpec.position > len) { + throw new DataSourceException(PlaybackException.ERROR_CODE_IO_READ_POSITION_OUT_OF_RANGE); } - } catch (IOException e) { - throw new EncryptedFileDataSourceException(e); + bytesRemaining = (int) (len - dataSpec.position); + if (dataSpec.length != C.LENGTH_UNSET) { + bytesRemaining = (int) min(bytesRemaining, dataSpec.length); + } + opened = true; + transferStarted(dataSpec); + return dataSpec.length != C.LENGTH_UNSET ? dataSpec.length : bytesRemaining; + } catch (Throwable throwable) { + throw new DataSourceException(PlaybackException.ERROR_CODE_IO_READ_POSITION_OUT_OF_RANGE); } - - opened = true; - transferStarted(dataSpec); - - return bytesRemaining; } @Override - public int read(byte[] buffer, int offset, int readLength) throws EncryptedFileDataSourceException { - if (readLength == 0) { + public int read(byte[] buffer, int offset, int length) { + if (length == 0) { return 0; } else if (bytesRemaining == 0) { return C.RESULT_END_OF_INPUT; - } else { - int bytesRead; - try { - bytesRead = file.read(buffer, offset, (int) Math.min(bytesRemaining, readLength)); - Utilities.aesCtrDecryptionByteArray(buffer, key, iv, offset, bytesRead, fileOffset); - fileOffset += bytesRead; - } catch (IOException e) { - throw new EncryptedFileDataSourceException(e); - } - - if (bytesRead > 0) { - bytesRemaining -= bytesRead; - bytesTransferred(bytesRead); - } - - return bytesRead; } + length = min(length, bytesRemaining); + try { + fileInputStream.read(buffer, offset, length); + } catch (IOException e) { + e.printStackTrace(); + } + bytesRemaining -= length; + bytesTransferred(length); + return length; } @Override + @Nullable public Uri getUri() { return uri; } @Override - public void close() throws EncryptedFileDataSourceException { - uri = null; - fileOffset = 0; + public void close() { try { - if (file != null) { - file.close(); - } + fileInputStream.close(); } catch (IOException e) { - throw new EncryptedFileDataSourceException(e); - } finally { - file = null; - if (opened) { - opened = false; - transferEnded(); - } + e.printStackTrace(); } + if (opened) { + opened = false; + transferEnded(); + } + fileInputStream = null; + uri = null; } } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/secretmedia/ExtendedDefaultDataSource.java b/TMessagesProj/src/main/java/org/telegram/messenger/secretmedia/ExtendedDefaultDataSource.java index ab17982673..59198173ba 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/secretmedia/ExtendedDefaultDataSource.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/secretmedia/ExtendedDefaultDataSource.java @@ -97,7 +97,6 @@ public ExtendedDefaultDataSource( context, new DefaultHttpDataSource( userAgent, - /* contentTypePredicate= */ null, connectTimeoutMillis, readTimeoutMillis, allowCrossProtocolRedirects, diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/utils/BitmapsCache.java b/TMessagesProj/src/main/java/org/telegram/messenger/utils/BitmapsCache.java index cb6a39eb58..1614c5c083 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/utils/BitmapsCache.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/utils/BitmapsCache.java @@ -16,11 +16,9 @@ import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; -import java.io.OutputStream; import java.io.RandomAccessFile; import java.nio.ByteBuffer; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.concurrent.ConcurrentHashMap; @@ -36,13 +34,13 @@ public class BitmapsCache { public final static int FRAME_RESULT_NO_FRAME = -1; public static final int COMPRESS_QUALITY_DEFAULT = 60; private final Cacheable source; + private static boolean mkdir; String fileName; int w; int h; ArrayList frameOffsets = new ArrayList<>(); - final boolean useSharedBuffers; static ConcurrentHashMap sharedBuffers = new ConcurrentHashMap(); static volatile boolean cleanupScheduled; @@ -88,6 +86,10 @@ public BitmapsCache(File sourceFile, Cacheable source, CacheOptions options, int } File fileTmo = new File(FileLoader.checkDirectory(FileLoader.MEDIA_DIR_CACHE), "acache"); + if (!mkdir) { + fileTmo.mkdir(); + mkdir = true; + } file = new File(fileTmo, fileName + "_" + w + "_" + h + (noLimit ? "_nolimit" : " ") + ".pcache2"); useSharedBuffers = w < AndroidUtilities.dp(60) && h < AndroidUtilities.dp(60); @@ -111,6 +113,9 @@ public BitmapsCache(File sourceFile, Cacheable source, CacheOptions options, int fileExist = false; file.delete(); } else { + if (cachedFile != randomAccessFile) { + closeCachedFile(); + } cachedFile = randomAccessFile; } } @@ -179,6 +184,9 @@ public void createCache() { if (count > 0) { fillFrames(randomAccessFile, count); randomAccessFile.seek(0); + if (cachedFile != randomAccessFile) { + closeCachedFile(); + } cachedFile = randomAccessFile; fileExist = true; return; @@ -214,7 +222,7 @@ public void createCache() { } sharedTools.allocate(h, w); Bitmap[] bitmap = sharedTools.bitmap; - ByteArrayOutputStream[] byteArrayOutputStream = sharedTools.byteArrayOutputStream; + ImmutableByteArrayOutputStream[] byteArrayOutputStream = sharedTools.byteArrayOutputStream; CountDownLatch[] countDownLatch = new CountDownLatch[N]; ArrayList frameOffsets = new ArrayList<>(); @@ -279,7 +287,7 @@ public void createCache() { } Bitmap.CompressFormat format = Bitmap.CompressFormat.WEBP; - if (Build.VERSION.SDK_INT <= 26) { + if (Build.VERSION.SDK_INT <= 28) { format = Bitmap.CompressFormat.PNG; } bitmap[finalIndex].compress(format, compressQuality, byteArrayOutputStream[finalIndex]); @@ -345,6 +353,7 @@ public void createCache() { this.frameOffsets.clear(); this.frameOffsets.addAll(frameOffsets); + closeCachedFile(); cachedFile = new RandomAccessFile(file, "r"); cacheCreated = true; fileExist = true; @@ -460,6 +469,9 @@ public int getFrame(int index, Bitmap bitmap) { bufferTmp = getBuffer(selectedFrame); randomAccessFile.readFully(bufferTmp, 0, selectedFrame.frameSize); if (!recycled) { + if (cachedFile != randomAccessFile) { + closeCachedFile(); + } cachedFile = randomAccessFile; } else { cachedFile = null; @@ -495,6 +507,16 @@ public int getFrame(int index, Bitmap bitmap) { return FRAME_RESULT_NO_FRAME; } + private void closeCachedFile() { + if (cachedFile != null) { + try { + cachedFile.close(); + } catch (IOException e) { + e.printStackTrace(); + } + } + } + private byte[] getBuffer(FrameOffset selectedFrame) { boolean useSharedBuffers = this.useSharedBuffers && Thread.currentThread().getName().startsWith(DispatchQueuePoolBackground.THREAD_PREFIX); byte[] bufferTmp; @@ -539,6 +561,10 @@ public int getFrameCount() { return frameOffsets.size(); } + public boolean isCreated() { + return cacheCreated && fileExist; + } + private class FrameOffset { final int index; int frameSize; @@ -559,82 +585,6 @@ public interface Cacheable { Bitmap getFirstFrame(Bitmap bitmap); } - public static class ByteArrayOutputStream extends OutputStream { - - protected byte buf[]; - - protected int count; - - public ByteArrayOutputStream() { - this(32); - } - - public ByteArrayOutputStream(int size) { - buf = new byte[size]; - } - - private void ensureCapacity(int minCapacity) { - if (minCapacity - buf.length > 0) { - grow(minCapacity); - } - } - - private static final int MAX_ARRAY_SIZE = Integer.MAX_VALUE - 8; - - private void grow(int minCapacity) { - int oldCapacity = buf.length; - int newCapacity = oldCapacity << 1; - if (newCapacity - minCapacity < 0) - newCapacity = minCapacity; - if (newCapacity - MAX_ARRAY_SIZE > 0) - newCapacity = hugeCapacity(minCapacity); - buf = Arrays.copyOf(buf, newCapacity); - } - - private static int hugeCapacity(int minCapacity) { - if (minCapacity < 0) // overflow - throw new OutOfMemoryError(); - return (minCapacity > MAX_ARRAY_SIZE) ? - Integer.MAX_VALUE : - MAX_ARRAY_SIZE; - } - - public synchronized void write(int b) { - ensureCapacity(count + 1); - buf[count] = (byte) b; - count += 1; - } - - public void writeInt(int value) { - ensureCapacity(count + 4); - buf[count] = (byte) (value >>> 24); - buf[count + 1] = (byte) (value >>> 16); - buf[count + 2] = (byte) (value >>> 8); - buf[count + 3] = (byte) (value); - count += 4; - } - - public synchronized void write(byte b[], int off, int len) { - if ((off < 0) || (off > b.length) || (len < 0) || - ((off + len) - b.length > 0)) { - throw new IndexOutOfBoundsException(); - } - ensureCapacity(count + len); - System.arraycopy(b, off, buf, count, len); - count += len; - } - - public synchronized void writeTo(OutputStream out) throws IOException { - out.write(buf, 0, count); - } - - public synchronized void reset() { - count = 0; - } - - - } - public static class Metadata { public int frame; } @@ -642,10 +592,11 @@ public static class Metadata { public static class CacheOptions { public int compressQuality = 100; public boolean fallback = false; + public boolean firstFrame; } private static class CacheGeneratorSharedTools { - ByteArrayOutputStream[] byteArrayOutputStream = new ByteArrayOutputStream[N]; + ImmutableByteArrayOutputStream[] byteArrayOutputStream = new ImmutableByteArrayOutputStream[N]; private Bitmap[] bitmap = new Bitmap[N]; private int lastSize; @@ -672,7 +623,7 @@ void allocate(int h, int w) { bitmap[i] = Bitmap.createBitmap(w, h, Bitmap.Config.ARGB_8888); } if (byteArrayOutputStream[i] == null) { - byteArrayOutputStream[i] = new ByteArrayOutputStream(w * h * 2); + byteArrayOutputStream[i] = new ImmutableByteArrayOutputStream(w * h * 2); } } } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/utils/ImmutableByteArrayOutputStream.java b/TMessagesProj/src/main/java/org/telegram/messenger/utils/ImmutableByteArrayOutputStream.java new file mode 100644 index 0000000000..38b6b98a30 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/messenger/utils/ImmutableByteArrayOutputStream.java @@ -0,0 +1,93 @@ +package org.telegram.messenger.utils; + +import java.io.IOException; +import java.io.OutputStream; +import java.util.Arrays; + +public class ImmutableByteArrayOutputStream extends OutputStream { + + public byte buf[]; + + protected int count; + + public ImmutableByteArrayOutputStream() { + this(32); + } + + public ImmutableByteArrayOutputStream(int size) { + buf = new byte[size]; + } + + private void ensureCapacity(int minCapacity) { + if (minCapacity - buf.length > 0) { + grow(minCapacity); + } + } + + private static final int MAX_ARRAY_SIZE = Integer.MAX_VALUE - 8; + + private void grow(int minCapacity) { + int oldCapacity = buf.length; + int newCapacity = oldCapacity << 1; + if (newCapacity - minCapacity < 0) + newCapacity = minCapacity; + if (newCapacity - MAX_ARRAY_SIZE > 0) + newCapacity = hugeCapacity(minCapacity); + buf = Arrays.copyOf(buf, newCapacity); + } + + private static int hugeCapacity(int minCapacity) { + if (minCapacity < 0) // overflow + throw new OutOfMemoryError(); + return (minCapacity > MAX_ARRAY_SIZE) ? + Integer.MAX_VALUE : + MAX_ARRAY_SIZE; + } + + public synchronized void write(int b) { + ensureCapacity(count + 1); + buf[count] = (byte) b; + count += 1; + } + + public void writeInt(int value) { + ensureCapacity(count + 4); + buf[count] = (byte) (value >>> 24); + buf[count + 1] = (byte) (value >>> 16); + buf[count + 2] = (byte) (value >>> 8); + buf[count + 3] = (byte) (value); + count += 4; + } + + public void writeLong(long value) { + ensureCapacity(count + 8); + buf[count] = (byte) (value >>> 56); + buf[count + 1] = (byte) (value >>> 48); + buf[count + 2] = (byte) (value >>> 40); + buf[count + 3] = (byte) (value >>> 32); + buf[count + 4] = (byte) (value >>> 24); + buf[count + 5] = (byte) (value >>> 16); + buf[count + 6] = (byte) (value >>> 8); + buf[count + 7] = (byte) (value); + count += 8; + } + + public synchronized void write(byte b[], int off, int len) { + if ((off < 0) || (off > b.length) || (len < 0) || + ((off + len) - b.length > 0)) { + throw new IndexOutOfBoundsException(); + } + ensureCapacity(count + len); + System.arraycopy(b, off, buf, count, len); + count += len; + } + + public synchronized void writeTo(OutputStream out) throws IOException { + out.write(buf, 0, count); + } + + public synchronized void reset() { + count = 0; + } + +} diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/utils/PhotoUtilities.java b/TMessagesProj/src/main/java/org/telegram/messenger/utils/PhotoUtilities.java index 52fbac616f..82e3c54730 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/utils/PhotoUtilities.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/utils/PhotoUtilities.java @@ -15,6 +15,7 @@ import org.telegram.tgnet.TLRPC; import org.telegram.ui.ActionBar.BaseFragment; import org.telegram.ui.ActionBar.INavigationLayout; +import org.telegram.ui.ChatActivity; import org.telegram.ui.Components.BulletinFactory; import org.telegram.ui.Components.ImageUpdater; import org.telegram.ui.ProfileActivity; @@ -47,10 +48,10 @@ public static void setImageAsAvatar(MediaController.PhotoEntry entry, BaseFragme INavigationLayout layout = baseFragment.getParentLayout(); int currentAccount = baseFragment.getCurrentAccount(); - ImageUpdater imageUpdater = new ImageUpdater(true); + ImageUpdater imageUpdater = new ImageUpdater(true, ImageUpdater.FOR_TYPE_USER, true); imageUpdater.parentFragment = baseFragment; imageUpdater.processEntry(entry); - imageUpdater.setDelegate((photo, video, videoStartTimestamp, videoPath, bigSize, smallSize, isVideo) -> AndroidUtilities.runOnUIThread(() -> { + imageUpdater.setDelegate((photo, video, videoStartTimestamp, videoPath, bigSize, smallSize, isVideo, emojiMarkup) -> AndroidUtilities.runOnUIThread(() -> { TLRPC.TL_photos_uploadProfilePhoto req = new TLRPC.TL_photos_uploadProfilePhoto(); if (photo != null) { req.file = photo; @@ -62,6 +63,10 @@ public static void setImageAsAvatar(MediaController.PhotoEntry entry, BaseFragme req.video_start_ts = videoStartTimestamp; req.flags |= 4; } + if (emojiMarkup != null) { + req.video_emoji_markup = emojiMarkup; + req.flags |= 16; + } ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> AndroidUtilities.runOnUIThread(() -> { if (response instanceof TLRPC.TL_photos_photo) { TLRPC.TL_photos_photo photos_photo = (TLRPC.TL_photos_photo) response; @@ -92,7 +97,7 @@ public static void setImageAsAvatar(MediaController.PhotoEntry entry, BaseFragme if (onDone != null) { onDone.run(); } - CharSequence title = AndroidUtilities.replaceTags(LocaleController.getString("ApplyAvatarHint", R.string.ApplyAvatarHintTitle)); + CharSequence title = AndroidUtilities.replaceTags(LocaleController.getString("ApplyAvatarHintTitle", R.string.ApplyAvatarHintTitle)); CharSequence subtitle = AndroidUtilities.replaceSingleTag(LocaleController.getString("ApplyAvatarHint", R.string.ApplyAvatarHint), () -> { Bundle args = new Bundle(); args.putLong("user_id", UserConfig.getInstance(currentAccount).clientUserId); @@ -145,4 +150,87 @@ public static void applyPhotoToUser(TLRPC.PhotoSize smallSize, TLRPC.PhotoSize b user.photo.photo_big = bigSize.location; } } + + public static void showAvatartConstructorForUpdateUserPhoto(ChatActivity chatActivity, TLRPC.VideoSize emojiMarkup) { + ImageUpdater imageUpdater = new ImageUpdater(true, ImageUpdater.FOR_TYPE_USER, true); + imageUpdater.parentFragment = chatActivity; + imageUpdater.showAvatarConstructor(emojiMarkup); + final TLRPC.FileLocation[] avatar = new TLRPC.FileLocation[1]; + final TLRPC.FileLocation[] avatarBig = new TLRPC.FileLocation[1]; + long userId = chatActivity.getUserConfig().getClientUserId(); + imageUpdater.setDelegate((photo, video, videoStartTimestamp, videoPath, bigSize, smallSize, isVideo, emojiMarkup1) -> { + if (photo != null || video != null || emojiMarkup1 != null) { + TLRPC.TL_photos_uploadProfilePhoto req = new TLRPC.TL_photos_uploadProfilePhoto(); + if (photo != null) { + req.file = photo; + req.flags |= 1; + } + if (video != null) { + req.video = video; + req.flags |= 2; + req.video_start_ts = videoStartTimestamp; + req.flags |= 4; + } + if (emojiMarkup1 != null) { + req.video_emoji_markup = emojiMarkup1; + req.flags |= 16; + } + chatActivity.getConnectionsManager().sendRequest(req, (response, error) -> AndroidUtilities.runOnUIThread(() -> { + if (error == null) { + TLRPC.User user = chatActivity.getMessagesController().getUser(chatActivity.getUserConfig().getClientUserId()); + + TLRPC.TL_photos_photo photos_photo = (TLRPC.TL_photos_photo) response; + ArrayList sizes = photos_photo.photo.sizes; + TLRPC.PhotoSize small = FileLoader.getClosestPhotoSizeWithSize(sizes, 150); + TLRPC.PhotoSize big = FileLoader.getClosestPhotoSizeWithSize(sizes, 800); + TLRPC.VideoSize videoSize = photos_photo.photo.video_sizes.isEmpty() ? null : FileLoader.getClosestVideoSizeWithSize(photos_photo.photo.video_sizes, 1000); + user.photo = new TLRPC.TL_userProfilePhoto(); + user.photo.photo_id = photos_photo.photo.id; + if (small != null) { + user.photo.photo_small = small.location; + } + if (big != null) { + user.photo.photo_big = big.location; + } + + if (small != null && avatar[0] != null) { + File destFile = FileLoader.getInstance(chatActivity.getCurrentAccount()).getPathToAttach(small, true); + File src = FileLoader.getInstance(chatActivity.getCurrentAccount()).getPathToAttach(avatar[0], true); + src.renameTo(destFile); + String oldKey = avatar[0].volume_id + "_" + avatar[0].local_id + "@50_50"; + String newKey = small.location.volume_id + "_" + small.location.local_id + "@50_50"; + ImageLoader.getInstance().replaceImageInCache(oldKey, newKey, ImageLocation.getForUserOrChat(user, ImageLocation.TYPE_SMALL), false); + } + + if (videoSize != null && videoPath != null) { + File destFile = FileLoader.getInstance(chatActivity.getCurrentAccount()).getPathToAttach(videoSize, "mp4", true); + File src = new File(videoPath); + src.renameTo(destFile); + } else if (big != null && avatarBig[0] != null) { + File destFile = FileLoader.getInstance(chatActivity.getCurrentAccount()).getPathToAttach(big, true); + File src = FileLoader.getInstance(chatActivity.getCurrentAccount()).getPathToAttach(avatarBig[0], true); + src.renameTo(destFile); + } + chatActivity.getMessagesStorage().addDialogPhoto(user.id, ((TLRPC.TL_photos_photo) response).photo); + ArrayList users = new ArrayList<>(); + users.add(user); + chatActivity.getMessagesStorage().putUsersAndChats(users, null, false, true); + TLRPC.UserFull userFull = chatActivity.getMessagesController().getUserFull(userId); + userFull.profile_photo = photos_photo.photo; + chatActivity.getMessagesStorage().updateUserInfo(userFull, false); + CharSequence title = AndroidUtilities.replaceTags(LocaleController.getString("ApplyAvatarHintTitle", R.string.ApplyAvatarHintTitle)); + CharSequence subtitle = AndroidUtilities.replaceSingleTag(LocaleController.getString("ApplyAvatarHint", R.string.ApplyAvatarHint), () -> { + Bundle args = new Bundle(); + args.putLong("user_id", userId); + chatActivity.presentFragment(new ProfileActivity(args)); + }); + BulletinFactory.of(chatActivity).createUsersBulletin(Collections.singletonList(user), title, subtitle).show(); + } + })); + } else { + avatar[0] = smallSize.location; + avatarBig[0] = bigSize.location; + } + }); + } } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/video/MediaCodecVideoConvertor.java b/TMessagesProj/src/main/java/org/telegram/messenger/video/MediaCodecVideoConvertor.java index fea4f5c051..ce3fd3478d 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/video/MediaCodecVideoConvertor.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/video/MediaCodecVideoConvertor.java @@ -14,6 +14,7 @@ import org.telegram.messenger.BuildVars; import org.telegram.messenger.FileLog; import org.telegram.messenger.MediaController; +import org.telegram.messenger.SharedConfig; import org.telegram.messenger.Utilities; import org.telegram.messenger.VideoEditedInfo; @@ -354,7 +355,9 @@ private boolean convertVideoInternal(String videoPath, File cacheFile, } avatarStartTime = 0; //this encoder work with bitrate better, prevent case when result video max 2MB - encoderName = "OMX.google.h264.encoder"; + if (originalBitrate >= 15_000_000) { + encoderName = "OMX.google.h264.encoder"; + } } else if (bitrate <= 0) { bitrate = 921600; } @@ -822,7 +825,7 @@ private boolean convertVideoInternal(String videoPath, File cacheFile, long timeLeft = System.currentTimeMillis() - time; if (BuildVars.LOGS_ENABLED) { - FileLog.d("compression completed time=" + timeLeft + " needCompress=" + needCompress + " w=" + resultWidth + " h=" + resultHeight + " bitrate=" + bitrate); + FileLog.d("compression completed time=" + timeLeft + " needCompress=" + needCompress + " w=" + resultWidth + " h=" + resultHeight + " bitrate=" + bitrate + " file size=" + cacheFile.length()); } return error; @@ -992,8 +995,12 @@ private static String createFragmentShader( final int dstHeight, boolean external) { final float kernelSize = Utilities.clamp((float) (Math.max(srcWidth, srcHeight) / (float) Math.max(dstHeight, dstWidth)) * 0.8f, 2f, 1f); - final int kernelRadius = (int) kernelSize; + int kernelRadius = (int) kernelSize; + if (kernelRadius > 1 && SharedConfig.deviceIsAverage()) { + kernelRadius = 1; + } FileLog.d("source size " + srcWidth + "x" + srcHeight + " dest size " + dstWidth + dstHeight + " kernelRadius " + kernelRadius); + if (external) { return "#extension GL_OES_EGL_image_external : require\n" + "precision mediump float;\n" + diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/video/TextureRenderer.java b/TMessagesProj/src/main/java/org/telegram/messenger/video/TextureRenderer.java index 57edfb8c94..4ad7c343c4 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/video/TextureRenderer.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/video/TextureRenderer.java @@ -14,6 +14,10 @@ import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; +import android.graphics.Path; +import android.graphics.PorterDuff; +import android.graphics.PorterDuffXfermode; +import android.graphics.RectF; import android.graphics.SurfaceTexture; import android.graphics.Typeface; import android.opengl.GLES11Ext; @@ -52,6 +56,7 @@ import org.telegram.ui.Components.Paint.Views.EditTextOutline; import org.telegram.ui.Components.Paint.Views.PaintTextOptionsView; import org.telegram.ui.Components.RLottieDrawable; +import org.telegram.ui.Components.Rect; import java.io.File; import java.io.RandomAccessFile; @@ -143,6 +148,8 @@ public class TextureRenderer { private boolean isPhoto; private boolean firstFrame = true; + Path path; + Paint xRefPaint; public TextureRenderer(MediaController.SavedFilterState savedFilterState, String image, String paint, ArrayList entities, MediaController.CropState cropState, int w, int h, int originalWidth, int originalHeight, int rotation, float fps, boolean photo) { isPhoto = photo; @@ -404,6 +411,7 @@ public void drawFrame(SurfaceTexture st) { VideoEditedInfo.MediaEntity entity = mediaEntities.get(a); if (entity.ptr != 0) { RLottieDrawable.getFrame(entity.ptr, (int) entity.currentFrame, stickerBitmap, 512, 512, stickerBitmap.getRowBytes(), true); + applyRoundRadius(entity, stickerBitmap); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, stickerTexture[0]); GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, stickerBitmap, 0); entity.currentFrame += entity.framesPerDraw; @@ -420,15 +428,21 @@ public void drawFrame(SurfaceTexture st) { currentFrame--; } Bitmap frameBitmap = entity.animatedFileDrawable.getBackgroundBitmap(); - if (stickerCanvas == null && stickerBitmap != null) { - stickerCanvas = new Canvas(stickerBitmap); - } - if (stickerBitmap != null && frameBitmap != null) { - stickerBitmap.eraseColor(Color.TRANSPARENT); - stickerCanvas.drawBitmap(frameBitmap, 0, 0, null); - GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, stickerTexture[0]); - GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, stickerBitmap, 0); - drawTexture(false, stickerTexture[0], entity.x, entity.y, entity.width, entity.height, entity.rotation, (entity.subType & 2) != 0); + if (frameBitmap != null) { + if (stickerCanvas == null && stickerBitmap != null) { + stickerCanvas = new Canvas(stickerBitmap); + if (stickerBitmap.getHeight() != frameBitmap.getHeight() || stickerBitmap.getWidth() != frameBitmap.getWidth()) { + stickerCanvas.scale(stickerBitmap.getWidth() / (float) frameBitmap.getWidth(), stickerBitmap.getHeight() / (float) frameBitmap.getHeight()); + } + } + if (stickerBitmap != null) { + stickerBitmap.eraseColor(Color.TRANSPARENT); + stickerCanvas.drawBitmap(frameBitmap, 0, 0, null); + applyRoundRadius(entity, stickerBitmap); + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, stickerTexture[0]); + GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, stickerBitmap, 0); + drawTexture(false, stickerTexture[0], entity.x, entity.y, entity.width, entity.height, entity.rotation, (entity.subType & 2) != 0); + } } } else if (entity.view != null && entity.canvas != null && entity.bitmap != null) { entity.bitmap.eraseColor(Color.TRANSPARENT); @@ -438,6 +452,7 @@ public void drawFrame(SurfaceTexture st) { EditTextEffects editTextEffects = (EditTextEffects) entity.view; editTextEffects.incrementFrames(currentFrame - lastFrame); entity.view.draw(entity.canvas); + applyRoundRadius(entity, entity.bitmap); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, stickerTexture[0]); GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, entity.bitmap, 0); drawTexture(false, stickerTexture[0], entity.x, entity.y, entity.width, entity.height, entity.rotation, (entity.subType & 2) != 0); @@ -453,6 +468,29 @@ public void drawFrame(SurfaceTexture st) { GLES20.glFinish(); } + private void applyRoundRadius(VideoEditedInfo.MediaEntity entity, Bitmap stickerBitmap) { + if (stickerBitmap == null || entity == null || entity.roundRadius == 0) { + return; + } + if (entity.roundRadiusCanvas == null) { + entity.roundRadiusCanvas = new Canvas(stickerBitmap); + } + if (path == null) { + path = new Path(); + } + if (xRefPaint == null) { + xRefPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + xRefPaint.setColor(0xff000000); + xRefPaint.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.CLEAR)); + } + float rad = Math.min(stickerBitmap.getWidth(), stickerBitmap.getHeight()) * entity.roundRadius; + path.rewind(); + RectF rect = new RectF(0, 0, stickerBitmap.getWidth(), stickerBitmap.getHeight()); + path.addRoundRect(rect, rad, rad, Path.Direction.CCW); + path.toggleInverseFillType(); + entity.roundRadiusCanvas.drawPath(path, xRefPaint); + } + private void drawTexture(boolean bind, int texture) { drawTexture(bind, texture, -10000, -10000, -10000, -10000, 0, false); } @@ -654,7 +692,7 @@ public void surfaceCreated() { entity.ptr = RLottieDrawable.create(entity.text, null, 512, 512, entity.metadata, false, null, false, 0); entity.framesPerDraw = entity.metadata[1] / videoFps; } else if ((entity.subType & 4) != 0) { - entity.animatedFileDrawable = new AnimatedFileDrawable(new File(entity.text), true, 0, null, null, null, 0, UserConfig.selectedAccount, true, 512, 512, null); + entity.animatedFileDrawable = new AnimatedFileDrawable(new File(entity.text), true, 0, 0, null, null, null, 0, UserConfig.selectedAccount, true, 512, 512, null); entity.framesPerDraw = videoFps / 30f; entity.currentFrame = 0; } else { diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/voip/Instance.java b/TMessagesProj/src/main/java/org/telegram/messenger/voip/Instance.java index 6d3451ebcb..1015db0152 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/voip/Instance.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/voip/Instance.java @@ -1,6 +1,6 @@ package org.telegram.messenger.voip; -import android.os.Build; +import com.google.android.exoplayer2.util.Util; import org.json.JSONException; import org.json.JSONObject; @@ -15,7 +15,7 @@ public final class Instance { - public static final List AVAILABLE_VERSIONS = Build.VERSION.SDK_INT >= 18 ? Arrays.asList("4.1.2", "4.0.2", "4.0.1", "4.0.0", "3.0.0", "2.7.7", "2.4.4") : Arrays.asList("2.4.4"); + public static final List AVAILABLE_VERSIONS = Arrays.asList("2.4.4", "2.7.7", "5.0.0", "6.0.0", "7.0.0", "8.0.0", "9.0.0", "10.0.0", "11.0.0"); public static final int AUDIO_STATE_MUTED = 0; public static final int AUDIO_STATE_ACTIVE = 1; @@ -205,6 +205,7 @@ public static final class Endpoint { public final String username; public final String password; public final boolean tcp; + public int reflectorId; public Endpoint(boolean isRtc, long id, String ipv4, String ipv6, int port, int type, byte[] peerTag, boolean turn, boolean stun, String username, String password, boolean tcp) { this.isRtc = isRtc; @@ -216,8 +217,16 @@ public Endpoint(boolean isRtc, long id, String ipv4, String ipv6, int port, int this.peerTag = peerTag; this.turn = turn; this.stun = stun; - this.username = username; - this.password = password; + if (isRtc) { + this.username = username; + this.password = password; + } else if (peerTag != null) { + this.username = "reflector"; + this.password = Util.toHexString(peerTag); + } else { + this.username = null; + this.password = null; + } this.tcp = tcp; } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPController.java b/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPController.java index 61914cfeea..ffdd629b97 100755 --- a/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPController.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPController.java @@ -193,11 +193,11 @@ public static String getVersion() { } private String getLogFilePath(String name) { - return new File(EnvUtil.getTelegramPath(),"logs/" + name + ".log").getPath(); + return new File(ApplicationLoader.applicationContext.getCacheDir(),"logs/" + name + ".log").getPath(); } private String getLogFilePath(long callID){ - return new File(EnvUtil.getTelegramPath(),"logs/" + callID + ".log").getPath(); + return new File(ApplicationLoader.applicationContext.getCacheDir(),"logs/" + callID + ".log").getPath(); } public String getDebugLog() { diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPService.java b/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPService.java index 2e62156517..dfead96df0 100755 --- a/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPService.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPService.java @@ -43,6 +43,8 @@ import android.hardware.SensorEventListener; import android.hardware.SensorManager; import android.media.AudioAttributes; +import android.media.AudioDeviceCallback; +import android.media.AudioDeviceInfo; import android.media.AudioFormat; import android.media.AudioManager; import android.media.AudioTrack; @@ -57,7 +59,9 @@ import android.net.Uri; import android.os.Build; import android.os.Bundle; +import android.os.Handler; import android.os.IBinder; +import android.os.Looper; import android.os.PowerManager; import android.os.SystemClock; import android.os.Vibrator; @@ -71,6 +75,7 @@ import android.text.SpannableString; import android.text.TextUtils; import android.text.style.ForegroundColorSpan; +import android.util.Log; import android.util.LruCache; import android.view.KeyEvent; import android.view.View; @@ -132,6 +137,8 @@ import java.math.BigInteger; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; +import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; @@ -325,6 +332,9 @@ public void run() { AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); am.abandonAudioFocus(VoIPService.this); am.unregisterMediaButtonEventReceiver(new ComponentName(VoIPService.this, VoIPMediaButtonReceiver.class)); + if (audioDeviceCallback != null) { + am.unregisterAudioDeviceCallback(audioDeviceCallback); + } if (!USE_CONNECTION_SERVICE && sharedInstance == null) { if (isBtHeadsetConnected) { am.stopBluetoothSco(); @@ -381,6 +391,8 @@ public void onServiceConnected(int profile, BluetoothProfile proxy) { } }; + private AudioDeviceCallback audioDeviceCallback; + private BroadcastReceiver receiver = new BroadcastReceiver() { @Override @@ -1229,8 +1241,14 @@ public void setSinks(VideoSink local, VideoSink remote) { } public void setSinks(VideoSink local, boolean screencast, VideoSink remote) { - localSink[screencast ? CAPTURE_DEVICE_SCREEN : CAPTURE_DEVICE_CAMERA].setTarget(local); - remoteSink[screencast ? CAPTURE_DEVICE_SCREEN : CAPTURE_DEVICE_CAMERA].setTarget(remote); + ProxyVideoSink localSink = this.localSink[screencast ? CAPTURE_DEVICE_SCREEN : CAPTURE_DEVICE_CAMERA]; + ProxyVideoSink remoteSink = this.remoteSink[screencast ? CAPTURE_DEVICE_SCREEN : CAPTURE_DEVICE_CAMERA]; + if (localSink != null) { + localSink.setTarget(local); + } + if (remoteSink != null) { + remoteSink.setTarget(remote); + } } public void setLocalSink(VideoSink local, boolean screencast) { @@ -2355,9 +2373,23 @@ private void initiateActualEncryptedCall() { final boolean forceTcp = preferences.getBoolean("dbg_force_tcp_in_calls", false); final int endpointType = forceTcp ? Instance.ENDPOINT_TYPE_TCP_RELAY : Instance.ENDPOINT_TYPE_UDP_RELAY; final Instance.Endpoint[] endpoints = new Instance.Endpoint[privateCall.connections.size()]; + ArrayList reflectorIds = new ArrayList<>(); for (int i = 0; i < endpoints.length; i++) { final TLRPC.PhoneConnection connection = privateCall.connections.get(i); endpoints[i] = new Instance.Endpoint(connection instanceof TLRPC.TL_phoneConnectionWebrtc, connection.id, connection.ip, connection.ipv6, connection.port, endpointType, connection.peer_tag, connection.turn, connection.stun, connection.username, connection.password, connection.tcp); + if (connection instanceof TLRPC.TL_phoneConnection) { + reflectorIds.add(((TLRPC.TL_phoneConnection) connection).id); + } + } + if (!reflectorIds.isEmpty()) { + Collections.sort(reflectorIds); + HashMap reflectorIdMapping = new HashMap<>(); + for (int i = 0; i < reflectorIds.size(); i++) { + reflectorIdMapping.put(reflectorIds.get(i), i + 1); + } + for (int i = 0; i < endpoints.length; i++) { + endpoints[i].reflectorId = reflectorIdMapping.getOrDefault(endpoints[i].id, 0); + } } if (forceTcp) { AndroidUtilities.runOnUIThread(() -> Toast.makeText(VoIPService.this, "This call uses TCP which will degrade its quality.", Toast.LENGTH_SHORT).show()); @@ -2396,6 +2428,7 @@ private void initiateActualEncryptedCall() { return; } NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.webRtcMicAmplitudeEvent, levels[0]); + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.webRtcSpeakerAmplitudeEvent, levels[1]); }); tgVoip[CAPTURE_DEVICE_CAMERA].setOnStateUpdatedListener(this::onConnectionStateChanged); tgVoip[CAPTURE_DEVICE_CAMERA].setOnSignalBarsUpdatedListener(this::onSignalBarCountChanged); @@ -3062,7 +3095,14 @@ public void onDestroy() { } am.abandonAudioFocus(this); } - am.unregisterMediaButtonEventReceiver(new ComponentName(this, VoIPMediaButtonReceiver.class)); + try { + am.unregisterMediaButtonEventReceiver(new ComponentName(this, VoIPMediaButtonReceiver.class)); + } catch (Exception e) { + FileLog.e(e); + } + if (audioDeviceCallback != null) { + am.unregisterAudioDeviceCallback(audioDeviceCallback); + } if (hasAudioFocus) { am.abandonAudioFocus(this); } @@ -3194,6 +3234,9 @@ public void acceptIncomingCall() { } public void declineIncomingCall(int reason, final Runnable onDone) { + if (groupCall != null) { + stopScreenCapture(); + } stopRinging(); callDiscardReason = reason; if (currentState == STATE_REQUESTING) { @@ -3464,33 +3507,31 @@ public void onCreate() { registerReceiver(receiver, filter); fetchBluetoothDeviceName(); - am.registerMediaButtonEventReceiver(new ComponentName(this, VoIPMediaButtonReceiver.class)); - - if (!USE_CONNECTION_SERVICE && btAdapter != null && btAdapter.isEnabled()) { + if (audioDeviceCallback == null) { try { - MediaRouter mr = (MediaRouter) getSystemService(Context.MEDIA_ROUTER_SERVICE); - if (Build.VERSION.SDK_INT < 24) { - int headsetState = btAdapter.getProfileConnectionState(BluetoothProfile.HEADSET); - updateBluetoothHeadsetState(headsetState == BluetoothProfile.STATE_CONNECTED); - for (StateListener l : stateListeners) { - l.onAudioSettingsChanged(); + audioDeviceCallback = new AudioDeviceCallback() { + @Override + public void onAudioDevicesAdded(AudioDeviceInfo[] addedDevices) { + checkUpdateBluetoothHeadset(); } - } else { - MediaRouter.RouteInfo ri = mr.getSelectedRoute(MediaRouter.ROUTE_TYPE_LIVE_AUDIO); - if (ri.getDeviceType() == MediaRouter.RouteInfo.DEVICE_TYPE_BLUETOOTH) { - int headsetState = btAdapter.getProfileConnectionState(BluetoothProfile.HEADSET); - updateBluetoothHeadsetState(headsetState == BluetoothProfile.STATE_CONNECTED); - for (StateListener l : stateListeners) { - l.onAudioSettingsChanged(); - } - } else { - updateBluetoothHeadsetState(false); + + @Override + public void onAudioDevicesRemoved(AudioDeviceInfo[] removedDevices) { + checkUpdateBluetoothHeadset(); } - } + }; } catch (Throwable e) { + //java.lang.NoClassDefFoundError on some devices FileLog.e(e); + audioDeviceCallback = null; } } + if (audioDeviceCallback != null) { + am.registerAudioDeviceCallback(audioDeviceCallback, new Handler(Looper.getMainLooper())); + } + am.registerMediaButtonEventReceiver(new ComponentName(this, VoIPMediaButtonReceiver.class)); + + checkUpdateBluetoothHeadset(); } catch (Exception x) { if (BuildVars.LOGS_ENABLED) { FileLog.e("error initializing voip controller", x); @@ -3511,6 +3552,34 @@ public void onCreate() { } } + private void checkUpdateBluetoothHeadset() { + if (!USE_CONNECTION_SERVICE && btAdapter != null && btAdapter.isEnabled()) { + try { + MediaRouter mr = (MediaRouter) getSystemService(Context.MEDIA_ROUTER_SERVICE); + if (Build.VERSION.SDK_INT < 24) { + int headsetState = btAdapter.getProfileConnectionState(BluetoothProfile.HEADSET); + updateBluetoothHeadsetState(headsetState == BluetoothProfile.STATE_CONNECTED); + for (StateListener l : stateListeners) { + l.onAudioSettingsChanged(); + } + } else { + MediaRouter.RouteInfo ri = mr.getSelectedRoute(MediaRouter.ROUTE_TYPE_LIVE_AUDIO); + if (ri.getDeviceType() == MediaRouter.RouteInfo.DEVICE_TYPE_BLUETOOTH) { + int headsetState = btAdapter.getProfileConnectionState(BluetoothProfile.HEADSET); + updateBluetoothHeadsetState(headsetState == BluetoothProfile.STATE_CONNECTED); + for (StateListener l : stateListeners) { + l.onAudioSettingsChanged(); + } + } else { + updateBluetoothHeadsetState(false); + } + } + } catch (Throwable e) { + FileLog.e(e); + } + } + } + private void loadResources() { if (NekoConfig.useMediaStreamInVoip.Bool()) { currentStreamType = AudioManager.STREAM_MUSIC; @@ -3770,6 +3839,8 @@ private void updateBluetoothHeadsetState(boolean connected) { } else { bluetoothScoActive = false; bluetoothScoConnecting = false; + + am.setBluetoothScoOn(false); } for (StateListener l : stateListeners) { l.onAudioSettingsChanged(); diff --git a/TMessagesProj/src/main/java/org/telegram/tgnet/ConnectionsManager.java b/TMessagesProj/src/main/java/org/telegram/tgnet/ConnectionsManager.java index 09377112c6..1d824044d8 100644 --- a/TMessagesProj/src/main/java/org/telegram/tgnet/ConnectionsManager.java +++ b/TMessagesProj/src/main/java/org/telegram/tgnet/ConnectionsManager.java @@ -35,6 +35,9 @@ import java.net.InetAddress; import java.net.InterfaceAddress; import java.net.NetworkInterface; +import java.net.SocketTimeoutException; +import java.net.URL; +import java.net.URLConnection; import java.util.ArrayList; import java.util.Collections; import java.util.Enumeration; @@ -50,6 +53,8 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; +import javax.net.ssl.SSLException; + import cn.hutool.core.util.StrUtil; import tw.nekomimi.nekogram.NekoConfig; import tw.nekomimi.nekogram.parts.ProxySwitcher; @@ -305,90 +310,99 @@ public int sendRequest(final TLObject object, final RequestDelegate onComplete, return sendRequest(object, onComplete, null, onQuickAck, onWriteToSocket, flags, datacenterId, connetionType, immediate); } + public int sendRequestSync(final TLObject object, final RequestDelegate onComplete, final QuickAckDelegate onQuickAck, final WriteToSocketDelegate onWriteToSocket, final int flags, final int datacenterId, final int connetionType, final boolean immediate) { + final int requestToken = lastRequestToken.getAndIncrement(); + sendRequestInternal(object, onComplete, null, onQuickAck, onWriteToSocket, flags, datacenterId, connetionType, immediate, requestToken); + return requestToken; + } + public int sendRequest(final TLObject object, final RequestDelegate onComplete, final RequestDelegateTimestamp onCompleteTimestamp, final QuickAckDelegate onQuickAck, final WriteToSocketDelegate onWriteToSocket, final int flags, final int datacenterId, final int connetionType, final boolean immediate) { final int requestToken = lastRequestToken.getAndIncrement(); Utilities.stageQueue.postRunnable(() -> { - if (BuildVars.LOGS_ENABLED) { - FileLog.d("send request " + object + " with token = " + requestToken); - } - try { - NativeByteBuffer buffer = new NativeByteBuffer(object.getObjectSize()); - object.serializeToStream(buffer); - object.freeResources(); + sendRequestInternal(object, onComplete, onCompleteTimestamp, onQuickAck, onWriteToSocket, flags, datacenterId, connetionType, immediate, requestToken); + }); + return requestToken; + } - long startRequestTime = 0; - if (BuildVars.DEBUG_PRIVATE_VERSION && BuildVars.LOGS_ENABLED) { - startRequestTime = System.currentTimeMillis(); - } - long finalStartRequestTime = startRequestTime; - native_sendRequest(currentAccount, buffer.address, (response, errorCode, errorText, networkType, timestamp, requestMsgId) -> { - try { - TLObject resp = null; - TLRPC.TL_error error = null; - - if (response != 0) { - NativeByteBuffer buff = NativeByteBuffer.wrap(response); - buff.reused = true; - try { - resp = object.deserializeResponse(buff, buff.readInt32(true), true); - } catch (Exception e2) { - if (BuildVars.DEBUG_PRIVATE_VERSION) { - throw e2; - } - FileLog.fatal(e2); - return; - } - } else if (errorText != null) { - error = new TLRPC.TL_error(); - error.code = errorCode; - error.text = errorText; - if (BuildVars.LOGS_ENABLED) { - FileLog.e(object + " got error " + error.code + " " + error.text); - } - if (NaConfig.INSTANCE.getShowRPCError().Bool()) { - ErrorDatabase.showErrorToast(object, errorText); - } - } - if (BuildVars.DEBUG_PRIVATE_VERSION && !getUserConfig().isClientActivated() && error != null && error.code == 400 && Objects.equals(error.text, "CONNECTION_NOT_INITED")) { - if (BuildVars.LOGS_ENABLED) { - FileLog.d("Cleanup keys for " + currentAccount + " because of CONNECTION_NOT_INITED"); + private void sendRequestInternal(TLObject object, RequestDelegate onComplete, RequestDelegateTimestamp onCompleteTimestamp, QuickAckDelegate onQuickAck, WriteToSocketDelegate onWriteToSocket, int flags, int datacenterId, int connetionType, boolean immediate, int requestToken) { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("send request " + object + " with token = " + requestToken); + } + try { + NativeByteBuffer buffer = new NativeByteBuffer(object.getObjectSize()); + object.serializeToStream(buffer); + object.freeResources(); + + long startRequestTime = 0; + if (BuildVars.DEBUG_PRIVATE_VERSION && BuildVars.LOGS_ENABLED) { + startRequestTime = System.currentTimeMillis(); + } + long finalStartRequestTime = startRequestTime; + native_sendRequest(currentAccount, buffer.address, (response, errorCode, errorText, networkType, timestamp, requestMsgId) -> { + try { + TLObject resp = null; + TLRPC.TL_error error = null; + + if (response != 0) { + NativeByteBuffer buff = NativeByteBuffer.wrap(response); + buff.reused = true; + try { + resp = object.deserializeResponse(buff, buff.readInt32(true), true); + } catch (Exception e2) { + if (BuildVars.DEBUG_PRIVATE_VERSION) { + throw e2; } - cleanup(true); - sendRequest(object, onComplete, onCompleteTimestamp, onQuickAck, onWriteToSocket, flags, datacenterId, connetionType, immediate); + FileLog.fatal(e2); return; } - if (resp != null) { - resp.networkType = networkType; + } else if (errorText != null) { + error = new TLRPC.TL_error(); + error.code = errorCode; + error.text = errorText; + if (BuildVars.LOGS_ENABLED) { + FileLog.e(object + " got error " + error.code + " " + error.text); } + } + if (BuildVars.DEBUG_PRIVATE_VERSION && !getUserConfig().isClientActivated() && error != null && error.code == 400 && Objects.equals(error.text, "CONNECTION_NOT_INITED")) { if (BuildVars.LOGS_ENABLED) { - FileLog.d("java received " + resp + " error = " + error); + FileLog.d("Cleanup keys for " + currentAccount + " because of CONNECTION_NOT_INITED"); } - FileLog.dumpResponseAndRequest(object, resp, error, requestMsgId, finalStartRequestTime, requestToken); - final TLObject finalResponse = resp; - final TLRPC.TL_error finalError = error; - Utilities.stageQueue.postRunnable(() -> { - if (onComplete != null) { - onComplete.run(finalResponse, finalError); - } else if (onCompleteTimestamp != null) { - onCompleteTimestamp.run(finalResponse, finalError, timestamp); - } - if (finalResponse != null) { - finalResponse.freeResources(); - } - }); - } catch (Exception e) { - FileLog.e(e); + cleanup(true); + sendRequest(object, onComplete, onCompleteTimestamp, onQuickAck, onWriteToSocket, flags, datacenterId, connetionType, immediate); + return; } - }, onQuickAck, onWriteToSocket, flags, datacenterId, connetionType, immediate, requestToken); - } catch (Exception e) { - FileLog.e(e); - } - }); - return requestToken; + if (resp != null) { + resp.networkType = networkType; + } + if (BuildVars.LOGS_ENABLED) { + FileLog.d("java received " + resp + " error = " + error); + } + FileLog.dumpResponseAndRequest(object, resp, error, requestMsgId, finalStartRequestTime, requestToken); + final TLObject finalResponse = resp; + final TLRPC.TL_error finalError = error; + Utilities.stageQueue.postRunnable(() -> { + if (onComplete != null) { + onComplete.run(finalResponse, finalError); + } else if (onCompleteTimestamp != null) { + onCompleteTimestamp.run(finalResponse, finalError, timestamp); + } + if (finalResponse != null) { + finalResponse.freeResources(); + } + }); + } catch (Exception e) { + FileLog.e(e); + } + }, onQuickAck, onWriteToSocket, flags, datacenterId, connetionType, immediate, requestToken); + } catch (Exception e) { + FileLog.e(e); + } } public void cancelRequest(int token, boolean notifyServer) { - native_cancelRequest(currentAccount, token, notifyServer); + Utilities.stageQueue.postRunnable(() -> { + native_cancelRequest(currentAccount, token, notifyServer); + }); } public void cleanup(boolean resetKeys) { @@ -396,7 +410,9 @@ public void cleanup(boolean resetKeys) { } public void cancelRequestsForGuid(int guid) { - native_cancelRequestsForGuid(currentAccount, guid); + Utilities.stageQueue.postRunnable(() -> { + native_cancelRequestsForGuid(currentAccount, guid); + }); } public void bindRequestToGuid(int requestToken, int guid) { @@ -419,7 +435,11 @@ public void setUserId(long id) { } public void checkConnection() { - native_setIpStrategy(currentAccount, getIpStrategy()); + byte selectedStrategy = getIpStrategy(); + if (BuildVars.LOGS_ENABLED) { + FileLog.d("selected ip strategy " + selectedStrategy); + } + native_setIpStrategy(currentAccount, selectedStrategy); native_setNetworkAvailable(currentAccount, ApplicationLoader.isNetworkOnline(), ApplicationLoader.getCurrentNetworkType(), ApplicationLoader.isConnectionSlow()); } @@ -448,7 +468,7 @@ public void init(int version, int layer, int apiId, String deviceModel, String s packageId = ""; } - native_init(currentAccount, version, layer, apiId, deviceModel, systemVersion, appVersion, langCode, systemLangCode, configPath, logPath, regId, cFingerprint, installer, packageId, timezoneOffset, userId, enablePushConnection, ApplicationLoader.isNetworkOnline(), ApplicationLoader.getCurrentNetworkType()); + native_init(currentAccount, version, layer, apiId, deviceModel, systemVersion, appVersion, langCode, systemLangCode, configPath, logPath, regId, cFingerprint, installer, packageId, timezoneOffset, userId, enablePushConnection, ApplicationLoader.isNetworkOnline(), ApplicationLoader.getCurrentNetworkType(), SharedConfig.measureDevicePerformanceClass()); Utilities.stageQueue.postRunnable(() -> { @@ -463,7 +483,6 @@ public void init(int version, int layer, int apiId, String deviceModel, String s checkConnection(); }); - } public static void setLangCode(String langCode) { @@ -827,7 +846,7 @@ public static void setProxySettings(boolean enabled, String address, int port, S public static native int native_getConnectionState(int currentAccount); public static native void native_setUserId(int currentAccount, long id); - public static native void native_init(int currentAccount, int version, int layer, int apiId, String deviceModel, String systemVersion, String appVersion, String langCode, String systemLangCode, String configPath, String logPath, String regId, String cFingerprint, String installer, String packageId, int timezoneOffset, long userId, boolean enablePushConnection, boolean hasNetwork, int networkType); + public static native void native_init(int currentAccount, int version, int layer, int apiId, String deviceModel, String systemVersion, String appVersion, String langCode, String systemLangCode, String configPath, String logPath, String regId, String cFingerprint, String installer, String packageId, int timezoneOffset, long userId, boolean enablePushConnection, boolean hasNetwork, int networkType, int performanceClass); public static native void native_setProxySettings(int currentAccount, String address, int port, String username, String password, String secret); public static native void native_setLangCode(int currentAccount, String langCode); diff --git a/TMessagesProj/src/main/java/org/telegram/tgnet/TLRPC.java b/TMessagesProj/src/main/java/org/telegram/tgnet/TLRPC.java index d563e97bec..1ded1c9af0 100644 --- a/TMessagesProj/src/main/java/org/telegram/tgnet/TLRPC.java +++ b/TMessagesProj/src/main/java/org/telegram/tgnet/TLRPC.java @@ -8,6 +8,7 @@ package org.telegram.tgnet; +import android.graphics.Path; import android.graphics.drawable.BitmapDrawable; import android.os.Build; import android.text.TextUtils; @@ -20,7 +21,7 @@ import org.telegram.messenger.FileLog; import org.telegram.messenger.ImageLoader; import org.telegram.messenger.MessageObject; -import org.telegram.messenger.SharedConfig; +import org.telegram.messenger.SvgHelper; import org.telegram.messenger.Utilities; import java.util.ArrayList; @@ -75,7 +76,7 @@ public class TLRPC { public static final int MESSAGE_FLAG_HAS_BOT_ID = 0x00000800; public static final int MESSAGE_FLAG_EDITED = 0x00008000; - public static final int LAYER = 151; + public static final int LAYER = 156; public static class TL_stats_megagroupStats extends TLObject { public static int constructor = 0xef7ff916; @@ -401,11 +402,11 @@ public static class TL_account_emailVerifiedLogin extends account_EmailVerified public static int constructor = 0xe1bb0d61; public String email; - public TL_auth_sentCode sent_code; + public auth_SentCode sent_code; public void readParams(AbstractSerializedData stream, boolean exception) { email = stream.readString(exception); - sent_code = TL_auth_sentCode.TLdeserialize(stream, stream.readInt32(exception), exception); + sent_code = auth_SentCode.TLdeserialize(stream, stream.readInt32(exception), exception); } public void serializeToStream(AbstractSerializedData stream) { @@ -432,6 +433,13 @@ public static class TL_chatBannedRights extends TLObject { public boolean invite_users; public boolean pin_messages; public boolean manage_topics; + public boolean send_photos; + public boolean send_videos; + public boolean send_roundvideos; + public boolean send_audios; + public boolean send_voices; + public boolean send_docs; + public boolean send_plain; public int until_date; public static TL_chatBannedRights TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { @@ -462,11 +470,36 @@ public void readParams(AbstractSerializedData stream, boolean exception) { invite_users = (flags & 32768) != 0; pin_messages = (flags & 131072) != 0; manage_topics = (flags & 262144) != 0; + send_photos = (flags & 524288) != 0; + send_videos = (flags & 1048576) != 0; + send_roundvideos = (flags & 2097152) != 0; + send_audios = (flags & 4194304) != 0; + send_voices = (flags & 8388608) != 0; + send_docs = (flags & 16777216) != 0; + send_plain = (flags & 33554432) != 0; + if (send_media) { + send_photos = true; + send_videos = true; + send_roundvideos = true; + send_audios = true; + send_voices = true; + send_docs = true; + } until_date = stream.readInt32(exception); } public void serializeToStream(AbstractSerializedData stream) { stream.writeInt32(constructor); + if (send_photos && send_videos && send_roundvideos && send_audios && send_voices && send_docs) { + send_media = true; + } else { + send_media = false; + } + if (send_plain && send_media) { + send_messages = true; + } else { + send_messages = false; + } flags = view_messages ? (flags | 1) : (flags &~ 1); flags = send_messages ? (flags | 2) : (flags &~ 2); flags = send_media ? (flags | 4) : (flags &~ 4); @@ -480,6 +513,13 @@ public void serializeToStream(AbstractSerializedData stream) { flags = invite_users ? (flags | 32768) : (flags &~ 32768); flags = pin_messages ? (flags | 131072) : (flags &~ 131072); flags = manage_topics ? (flags | 262144) : (flags &~ 262144); + flags = send_photos ? (flags | 524288) : (flags &~ 524288); + flags = send_videos ? (flags | 1048576) : (flags &~ 1048576); + flags = send_roundvideos ? (flags | 2097152) : (flags &~ 2097152); + flags = send_audios ? (flags | 4194304) : (flags &~ 4194304); + flags = send_voices ? (flags | 8388608) : (flags &~ 8388608); + flags = send_docs ? (flags | 16777216) : (flags &~ 16777216); + flags = send_plain ? (flags | 33554432) : (flags &~ 33554432); stream.writeInt32(flags); stream.writeInt32(until_date); } @@ -636,6 +676,10 @@ public static abstract class ChatPhoto extends TLObject { public BitmapDrawable strippedBitmap; public static ChatPhoto TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { + return TLdeserialize(stream, constructor, exception, true); + } + + public static ChatPhoto TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception, boolean allowStripedThumb) { ChatPhoto result = null; switch (constructor) { case 0x1c6e1c11: @@ -670,8 +714,11 @@ public static ChatPhoto TLdeserialize(AbstractSerializedData stream, int constru public static class TL_chatPhoto extends ChatPhoto { public static int constructor = 0x1c6e1c11; - public void readParams(AbstractSerializedData stream, boolean exception) { + readParams(stream, exception, true); + } + + public void readParams(AbstractSerializedData stream, boolean exception, boolean allowStripedThumbs) { flags = stream.readInt32(exception); has_video = (flags & 1) != 0; photo_id = stream.readInt64(exception); @@ -686,7 +733,7 @@ public void readParams(AbstractSerializedData stream, boolean exception) { photo_big.volume_id = -photo_id; photo_big.local_id = 'c'; - if (stripped_thumb != null && Build.VERSION.SDK_INT >= 21) { + if (allowStripedThumbs && stripped_thumb != null && Build.VERSION.SDK_INT >= 21) { try { strippedBitmap = new BitmapDrawable(ImageLoader.getStrippedPhotoBitmap(stripped_thumb, "b")); } catch (Throwable e) { @@ -1271,9 +1318,12 @@ public void serializeToStream(AbstractSerializedData stream) { } public static class TL_premiumSubscriptionOption extends TLObject { - public static int constructor = 0xb6f11ebe; + public static int constructor = 0x5f2d1df2; public int flags; + public boolean current; + public String transaction; + public boolean can_purchase_upgrade; public int months; public String currency; public long amount; @@ -1281,18 +1331,62 @@ public static class TL_premiumSubscriptionOption extends TLObject { public String store_product; public static TL_premiumSubscriptionOption TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { - if (TL_premiumSubscriptionOption.constructor != constructor) { - if (exception) { - throw new RuntimeException(String.format("can't parse magic %x in TL_premiumSubscriptionOption", constructor)); - } else { - return null; - } + TL_premiumSubscriptionOption result = null; + switch (constructor) { + case 0x5f2d1df2: + result = new TL_premiumSubscriptionOption(); + break; + case 0xb6f11ebe: + result = new TL_premiumSubscriptionOption_layer151(); + break; + } + if (result == null && exception) { + throw new RuntimeException(String.format("can't parse magic %x in TL_premiumSubscriptionOption", constructor)); + } + if (result != null) { + result.readParams(stream, exception); } - TL_premiumSubscriptionOption result = new TL_premiumSubscriptionOption(); - result.readParams(stream, exception); return result; } + public void readParams(AbstractSerializedData stream, boolean exception) { + flags = stream.readInt32(exception); + current = (flags & 2) != 0; + if ((flags & 8) != 0) { + transaction = stream.readString(exception); + } + can_purchase_upgrade = (flags & 4) != 0; + months = stream.readInt32(exception); + currency = stream.readString(exception); + amount = stream.readInt64(exception); + bot_url = stream.readString(exception); + if ((flags & 1) != 0) { + store_product = stream.readString(exception); + } + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + flags = current ? (flags | 2) : (flags &~ 2); + flags = can_purchase_upgrade ? (flags | 4) : (flags &~ 4); + stream.writeInt32(flags); + if ((flags & 8) != 0) { + stream.writeString(transaction); + } + stream.writeInt32(months); + stream.writeString(currency); + stream.writeInt64(amount); + stream.writeString(bot_url); + if ((flags & 1) != 0) { + stream.writeString(store_product); + } + } + } + + public static class TL_premiumSubscriptionOption_layer151 extends TL_premiumSubscriptionOption { + public static int constructor = 0xb6f11ebe; + + public void readParams(AbstractSerializedData stream, boolean exception) { flags = stream.readInt32(exception); months = stream.readInt32(exception); @@ -2421,9 +2515,6 @@ public void readParams(AbstractSerializedData stream, boolean exception) { record_video_active = (flags & 2048) != 0; rtmp_stream = (flags & 4096) != 0; listeners_hidden = (flags & 8192) != 0; - if (SharedConfig.forceRtmpStream) { - rtmp_stream = true; - } id = stream.readInt64(exception); access_hash = stream.readInt64(exception); participants_count = stream.readInt32(exception); @@ -2594,13 +2685,18 @@ public static abstract class MessagePeerReaction extends TLObject { public boolean unread; public Peer peer_id; public Reaction reaction; + public int date; + public boolean dateIsSeen; //custom public static MessagePeerReaction TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { MessagePeerReaction result = null; switch (constructor) { - case 0xb156fe9c: + case 0x8c79b63c: result = new TL_messagePeerReaction(); break; + case 0xb156fe9c: + result = new TL_messagePeerReaction_layer154(); + break; case 0x51b67eff: result = new TL_messagePeerReaction_layer144(); break; @@ -2619,6 +2715,29 @@ public static MessagePeerReaction TLdeserialize(AbstractSerializedData stream, i } public static class TL_messagePeerReaction extends MessagePeerReaction { + public static int constructor = 0x8c79b63c; + + public void readParams(AbstractSerializedData stream, boolean exception) { + flags = stream.readInt32(exception); + big = (flags & 1) != 0; + unread = (flags & 2) != 0; + peer_id = Peer.TLdeserialize(stream, stream.readInt32(exception), exception); + date = stream.readInt32(exception); + reaction = Reaction.TLdeserialize(stream, stream.readInt32(exception), exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + flags = big ? (flags | 1) : (flags &~ 1); + flags = unread ? (flags | 2) : (flags &~ 2); + stream.writeInt32(flags); + peer_id.serializeToStream(stream); + stream.writeInt32(date); + reaction.serializeToStream(stream); + } + } + + public static class TL_messagePeerReaction_layer154 extends MessagePeerReaction { public static int constructor = 0xb156fe9c; public void readParams(AbstractSerializedData stream, boolean exception) { @@ -2695,7 +2814,10 @@ public static auth_Authorization TLdeserialize(AbstractSerializedData stream, in case 0x44747e9a: result = new TL_auth_authorizationSignUpRequired(); break; - case 0x33fb7bb8: + case 0x33fb7bb8://TODO old constructor need remove + result = new TL_auth_authorization(); + break; + case 0x2ea2c0d4: result = new TL_auth_authorization(); break; } @@ -2732,12 +2854,13 @@ public void serializeToStream(AbstractSerializedData stream) { } public static class TL_auth_authorization extends auth_Authorization { - public static int constructor = 0x33fb7bb8; + public static int constructor = 0x2ea2c0d4; public int flags; public boolean setup_password_required; public int otherwise_relogin_days; public int tmp_sessions; + public byte[] future_auth_token; public User user; public void readParams(AbstractSerializedData stream, boolean exception) { @@ -2749,6 +2872,9 @@ public void readParams(AbstractSerializedData stream, boolean exception) { if ((flags & 1) != 0) { tmp_sessions = stream.readInt32(exception); } + if ((flags & 4) != 0) { + future_auth_token = stream.readByteArray(exception); + } user = User.TLdeserialize(stream, stream.readInt32(exception), exception); } @@ -2762,6 +2888,9 @@ public void serializeToStream(AbstractSerializedData stream) { if ((flags & 1) != 0) { stream.writeInt32(tmp_sessions); } + if ((flags & 4) != 0) { + stream.writeByteArray(future_auth_token); + } user.serializeToStream(stream); } } @@ -6725,6 +6854,12 @@ public static abstract class auth_SentCodeType extends TLObject { public boolean google_signin_allowed; public String email_pattern; public int next_phone_login_date; + public byte[] nonce; + public String receipt; + public int push_timeout; + public int reset_available_period; + public int reset_pending_date; + public boolean verifiedFirebase; //custom public static auth_SentCodeType TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { auth_SentCodeType result = null; @@ -6735,7 +6870,7 @@ public static auth_SentCodeType TLdeserialize(AbstractSerializedData stream, int case 0x5353e5a7: result = new TL_auth_sentCodeTypeCall(); break; - case 0x5a159841: + case 0xf450f59b: result = new TL_auth_sentCodeTypeEmailCode(); break; case 0xa5491dea: @@ -6753,6 +6888,9 @@ public static auth_SentCodeType TLdeserialize(AbstractSerializedData stream, int case 0xd9565c39: result = new TL_auth_sentCodeTypeFragmentSms(); break; + case 0xe57b1432: + result = new TL_auth_sentCodeTypeFirebaseSms(); + break; } if (result == null && exception) { throw new RuntimeException(String.format("can't parse magic %x in auth_SentCodeType", constructor)); @@ -6793,7 +6931,7 @@ public void serializeToStream(AbstractSerializedData stream) { } public static class TL_auth_sentCodeTypeEmailCode extends auth_SentCodeType { - public static int constructor = 0x5a159841; + public static int constructor = 0xf450f59b; public void readParams(AbstractSerializedData stream, boolean exception) { @@ -6802,8 +6940,11 @@ public void readParams(AbstractSerializedData stream, boolean exception) { google_signin_allowed = (flags & 2) != 0; email_pattern = stream.readString(exception); length = stream.readInt32(exception); - if ((flags & 4) != 0) { - next_phone_login_date = stream.readInt32(exception); + if ((flags & 8) != 0) { + reset_available_period = stream.readInt32(exception); + } + if ((flags & 16) != 0) { + reset_pending_date = stream.readInt32(exception); } } @@ -6814,8 +6955,11 @@ public void serializeToStream(AbstractSerializedData stream) { stream.writeInt32(flags); stream.writeString(email_pattern); stream.writeInt32(length); - if ((flags & 4) != 0) { - stream.writeInt32(next_phone_login_date); + if ((flags & 8) != 0) { + stream.writeInt32(reset_available_period); + } + if ((flags & 16) != 0) { + stream.writeInt32(reset_pending_date); } } } @@ -6896,6 +7040,40 @@ public void serializeToStream(AbstractSerializedData stream) { } } + public static class TL_auth_sentCodeTypeFirebaseSms extends auth_SentCodeType { + public static int constructor = 0xe57b1432; + + + public void readParams(AbstractSerializedData stream, boolean exception) { + flags = stream.readInt32(exception); + if ((flags & 1) != 0) { + nonce = stream.readByteArray(exception); + } + if ((flags & 2) != 0) { + receipt = stream.readString(exception); + } + if ((flags & 2) != 0) { + push_timeout = stream.readInt32(exception); + } + length = stream.readInt32(exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeInt32(flags); + if ((flags & 1) != 0) { + stream.writeByteArray(nonce); + } + if ((flags & 2) != 0) { + stream.writeString(receipt); + } + if ((flags & 2) != 0) { + stream.writeInt32(push_timeout); + } + stream.writeInt32(length); + } + } + public static abstract class messages_StickerSetInstallResult extends TLObject { public ArrayList sets = new ArrayList<>(); @@ -7037,6 +7215,37 @@ public void serializeToStream(AbstractSerializedData stream) { } } + public static class TL_readParticipantDate extends TLObject { + public static int constructor = 0x4a4ff172; + + public long user_id; + public int date; + + public static TL_readParticipantDate TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { + if (TL_readParticipantDate.constructor != constructor) { + if (exception) { + throw new RuntimeException(String.format("can't parse magic %x in TL_readParticipantDate", constructor)); + } else { + return null; + } + } + TL_readParticipantDate result = new TL_readParticipantDate(); + result.readParams(stream, exception); + return result; + } + + public void readParams(AbstractSerializedData stream, boolean exception) { + user_id = stream.readInt64(exception); + date = stream.readInt32(exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeInt64(user_id); + stream.writeInt32(date); + } + } + public static class TL_statsGroupTopInviter extends TLObject { public static int constructor = 0x535f779d; @@ -8624,27 +8833,50 @@ public void serializeToStream(AbstractSerializedData stream) { } } - public static class TL_auth_sentCode extends TLObject { - public static int constructor = 0x5e002502; + public static abstract class auth_SentCode extends TLObject { public int flags; public auth_SentCodeType type; public String phone_code_hash; public auth_CodeType next_type; public int timeout; + public auth_Authorization authorization; - public static TL_auth_sentCode TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { - if (TL_auth_sentCode.constructor != constructor) { - if (exception) { - throw new RuntimeException(String.format("can't parse magic %x in TL_auth_sentCode", constructor)); - } else { - return null; - } + public static auth_SentCode TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { + auth_SentCode result = null; + switch (constructor) { + case 0x2390fe44: + result = new TL_auth_sentCodeSuccess(); + break; + case 0x5e002502: + result = new TL_auth_sentCode(); + break; + } + if (result == null && exception) { + throw new RuntimeException(String.format("can't parse magic %x in auth_SentCode", constructor)); + } + if (result != null) { + result.readParams(stream, exception); } - TL_auth_sentCode result = new TL_auth_sentCode(); - result.readParams(stream, exception); return result; } + } + + public static class TL_auth_sentCodeSuccess extends auth_SentCode { + public static int constructor = 0x2390fe44; + + public void readParams(AbstractSerializedData stream, boolean exception) { + authorization = auth_Authorization.TLdeserialize(stream, stream.readInt32(exception), exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + authorization.serializeToStream(stream); + } + } + + public static class TL_auth_sentCode extends auth_SentCode { + public static int constructor = 0x5e002502; public void readParams(AbstractSerializedData stream, boolean exception) { flags = stream.readInt32(exception); @@ -9350,6 +9582,54 @@ public void serializeToStream(AbstractSerializedData stream) { } } + public static abstract class help_AppConfig extends TLObject { + + public static help_AppConfig TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { + help_AppConfig result = null; + switch (constructor) { + case 0xdd18782e: + result = new TL_help_appConfig(); + break; + case 0x7cde641d: + result = new TL_help_appConfigNotModified(); + break; + } + if (result == null && exception) { + throw new RuntimeException(String.format("can't parse magic %x in help_AppConfig", constructor)); + } + if (result != null) { + result.readParams(stream, exception); + } + return result; + } + } + + public static class TL_help_appConfig extends help_AppConfig { + public static int constructor = 0xdd18782e; + + public int hash; + public JSONValue config; + + public void readParams(AbstractSerializedData stream, boolean exception) { + hash = stream.readInt32(exception); + config = JSONValue.TLdeserialize(stream, stream.readInt32(exception), exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeInt32(hash); + config.serializeToStream(stream); + } + } + + public static class TL_help_appConfigNotModified extends help_AppConfig { + public static int constructor = 0x7cde641d; + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + } + } + public static abstract class messages_DhConfig extends TLObject { public byte[] random; public int g; @@ -11195,6 +11475,7 @@ public static abstract class ReplyMarkup extends TLObject { public int flags; public boolean resize; public boolean single_use; + public boolean is_persistent; public boolean selective; public String placeholder; public ArrayList rows = new ArrayList<>(); @@ -11241,6 +11522,7 @@ public void readParams(AbstractSerializedData stream, boolean exception) { resize = (flags & 1) != 0; single_use = (flags & 2) != 0; selective = (flags & 4) != 0; + is_persistent = (flags & 16) != 0; int magic = stream.readInt32(exception); if (magic != 0x1cb5c415) { if (exception) { @@ -11266,6 +11548,7 @@ public void serializeToStream(AbstractSerializedData stream) { flags = resize ? (flags | 1) : (flags &~ 1); flags = single_use ? (flags | 2) : (flags &~ 2); flags = selective ? (flags | 4) : (flags &~ 4); + flags = is_persistent ? (flags | 16) : (flags &~ 16); stream.writeInt32(flags); stream.writeInt32(0x1cb5c415); int count = rows.size(); @@ -12109,6 +12392,7 @@ public static abstract class ChatFull extends TLObject { public boolean can_delete_channel; public boolean antispam; public boolean participants_hidden; + public boolean translations_disabled; public ChatReactions available_reactions; public long inviterId; //custom @@ -14584,6 +14868,7 @@ public void readParams(AbstractSerializedData stream, boolean exception) { can_delete_channel = (flags2 & 1) != 0; antispam = (flags2 & 2) != 0; participants_hidden = (flags2 & 4) != 0; + translations_disabled = (flags2 & 8) != 0; id = stream.readInt64(exception); about = stream.readString(exception); if ((flags & 1) != 0) { @@ -14721,6 +15006,7 @@ public void serializeToStream(AbstractSerializedData stream) { flags2 = can_delete_channel ? (flags2 | 1) : (flags2 &~ 1); flags2 = antispam ? (flags2 | 2) : (flags2 &~ 2); flags2 = participants_hidden ? (flags2 | 4) : (flags2 &~ 4); + flags2 = translations_disabled ? (flags2 | 8) : (flags2 &~ 8); stream.writeInt32(flags2); stream.writeInt64(id); stream.writeString(about); @@ -17693,14 +17979,17 @@ public void serializeToStream(AbstractSerializedData stream) { } public static class TL_codeSettings extends TLObject { - public static int constructor = 0x8a6469c2; + public static int constructor = 0xad253d78; public int flags; public boolean allow_flashcall; public boolean current_number; public boolean allow_app_hash; public boolean allow_missed_call; + public boolean allow_firebase; public ArrayList logout_tokens = new ArrayList<>(); + public String token; + public boolean app_sandbox; public static TL_codeSettings TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { if (TL_codeSettings.constructor != constructor) { @@ -17721,6 +18010,7 @@ public void readParams(AbstractSerializedData stream, boolean exception) { current_number = (flags & 2) != 0; allow_app_hash = (flags & 16) != 0; allow_missed_call = (flags & 32) != 0; + allow_firebase = (flags & 128) != 0; if ((flags & 64) != 0) { int magic = stream.readInt32(exception); if (magic != 0x1cb5c415) { @@ -17731,9 +18021,17 @@ public void readParams(AbstractSerializedData stream, boolean exception) { } int count = stream.readInt32(exception); for (int a = 0; a < count; a++) { - logout_tokens.add(stream.readByteArray(exception)); + byte[] object = stream.readByteArray(exception); + if (object == null) { + return; + } + logout_tokens.add(object); } } + if ((flags & 256) != 0) { + token = stream.readString(exception); + } + app_sandbox = (flags & 256) != 0; } public void serializeToStream(AbstractSerializedData stream) { @@ -17742,6 +18040,8 @@ public void serializeToStream(AbstractSerializedData stream) { flags = current_number ? (flags | 2) : (flags &~ 2); flags = allow_app_hash ? (flags | 16) : (flags &~ 16); flags = allow_missed_call ? (flags | 32) : (flags &~ 32); + flags = allow_firebase ? (flags | 128) : (flags &~ 128); + flags = app_sandbox ? (flags | 256) : (flags &~ 256); stream.writeInt32(flags); if ((flags & 64) != 0) { stream.writeInt32(0x1cb5c415); @@ -17751,6 +18051,9 @@ public void serializeToStream(AbstractSerializedData stream) { stream.writeByteArray(logout_tokens.get(a)); } } + if ((flags & 256) != 0) { + stream.writeString(token); + } } } @@ -18671,6 +18974,9 @@ public static KeyboardButton TLdeserialize(AbstractSerializedData stream, int co case 0x13767230: result = new TL_keyboardButtonWebView(); break; + case 0xd0b468c: + result = new TL_keyboardButtonRequestPeer(); + break; } if (result == null && exception) { throw new RuntimeException(String.format("can't parse magic %x in KeyboardButton", constructor)); @@ -18930,6 +19236,73 @@ public void serializeToStream(AbstractSerializedData stream) { } } + public static class TL_videoSizeEmojiMarkup extends VideoSize { + public static int constructor = 0xf85c413c; + + public long emoji_id; + + public void readParams(AbstractSerializedData stream, boolean exception) { + emoji_id = stream.readInt64(exception); + int magic = stream.readInt32(exception); + if (magic != 0x1cb5c415) { + if (exception) { + throw new RuntimeException(String.format("wrong Vector magic, got %x", magic)); + } + return; + } + int count = stream.readInt32(exception); + for (int a = 0; a < count; a++) { + background_colors.add(stream.readInt32(exception)); + } + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeInt64(emoji_id); + stream.writeInt32(0x1cb5c415); + int count = background_colors.size(); + stream.writeInt32(count); + for (int a = 0; a < count; a++) { + stream.writeInt32(background_colors.get(a)); + } + } + } + + public static class TL_videoSizeStickerMarkup extends VideoSize { + public static int constructor = 0xda082fe; + + public InputStickerSet stickerset; + public long sticker_id; + + public void readParams(AbstractSerializedData stream, boolean exception) { + stickerset = InputStickerSet.TLdeserialize(stream, stream.readInt32(exception), exception); + sticker_id = stream.readInt64(exception); + int magic = stream.readInt32(exception); + if (magic != 0x1cb5c415) { + if (exception) { + throw new RuntimeException(String.format("wrong Vector magic, got %x", magic)); + } + return; + } + int count = stream.readInt32(exception); + for (int a = 0; a < count; a++) { + background_colors.add(stream.readInt32(exception)); + } + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stickerset.serializeToStream(stream); + stream.writeInt64(sticker_id); + stream.writeInt32(0x1cb5c415); + int count = background_colors.size(); + stream.writeInt32(count); + for (int a = 0; a < count; a++) { + stream.writeInt32(background_colors.get(a)); + } + } + } + public static abstract class VideoSize extends TLObject { public int flags; @@ -18939,6 +19312,7 @@ public static abstract class VideoSize extends TLObject { public int h; public int size; public double video_start_ts; + public ArrayList background_colors = new ArrayList<>(); public static VideoSize TLdeserialize(long photo_id, long document_id, AbstractSerializedData stream, int constructor, boolean exception) { VideoSize result = null; @@ -18952,6 +19326,12 @@ public static VideoSize TLdeserialize(long photo_id, long document_id, AbstractS case 0xde33b094: result = new TL_videoSize(); break; + case 0xda082fe: + result = new TL_videoSizeStickerMarkup(); + break; + case 0xf85c413c: + result = new TL_videoSizeEmojiMarkup(); + break; } if (result == null && exception) { throw new RuntimeException(String.format("can't parse magic %x in VideoSize", constructor)); @@ -24148,6 +24528,9 @@ public static MessageAction TLdeserialize(AbstractSerializedData stream, int con result = new TL_messageActionChatEditTitle(); break; case 0xabe9affe: + result = new TL_messageActionBotAllowed_layer153(); + break; + case 0xc516d679: result = new TL_messageActionBotAllowed(); break; case 0x96163f56: @@ -24171,6 +24554,9 @@ public static MessageAction TLdeserialize(AbstractSerializedData stream, int con case 0xaba0f5c6: result = new TL_messageActionGiftPremium(); break; + case 0xfe77345d: + result = new TL_messageActionRequestedPeer(); + break; } if (result == null && exception) { throw new RuntimeException(String.format("can't parse magic %x in MessageAction", constructor)); @@ -24395,7 +24781,7 @@ public void serializeToStream(AbstractSerializedData stream) { } public static class TL_messages_getMessageReadParticipants extends TLObject { - public static int constructor = 0x2c6f97b7; + public static int constructor = 0x31c1c44f; public InputPeer peer; public int msg_id; @@ -24404,7 +24790,11 @@ public TLObject deserializeResponse(AbstractSerializedData stream, int construct Vector vector = new Vector(); int size = stream.readInt32(exception); for (int a = 0; a < size; a++) { - vector.objects.add(stream.readInt64(exception)); + TL_readParticipantDate object = TL_readParticipantDate.TLdeserialize(stream, stream.readInt32(exception), exception); + if (object == null) { + return vector; + } + vector.objects.add(object); } return vector; } @@ -25066,6 +25456,37 @@ public void serializeToStream(AbstractSerializedData stream) { } public static class TL_messageActionBotAllowed extends MessageAction { + public static int constructor = 0xc516d679; + + public boolean attach_menu; + public String domain; + public BotApp app; + + public void readParams(AbstractSerializedData stream, boolean exception) { + flags = stream.readInt32(exception); + attach_menu = (flags & 2) != 0; + if ((flags & 1) != 0) { + domain = stream.readString(exception); + } + if ((flags & 4) != 0) { + app = BotApp.TLdeserialize(stream, stream.readInt32(exception), exception); + } + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + flags = attach_menu ? (flags | 2) : (flags &~ 2); + stream.writeInt32(flags); + if ((flags & 1) != 0) { + stream.writeString(domain); + } + if ((flags & 4) != 0) { + app.serializeToStream(stream); + } + } + } + + public static class TL_messageActionBotAllowed_layer153 extends MessageAction { public static int constructor = 0xabe9affe; public String domain; @@ -31097,6 +31518,9 @@ public static Update TLdeserialize(AbstractSerializedData stream, int constructo case 0x20529438: result = new TL_updateUser(); break; + case 0xccf08ad6: + result = new TL_updateGroupInvitePrivacyForbidden(); + break; case 0x17b7a20b: result = new TL_updateAttachMenuBots(); break; @@ -35960,17 +36384,15 @@ public void serializeToStream(AbstractSerializedData stream) { } } + public static class TL_config extends TLObject { - public static int constructor = 0x232566ac; + public static int constructor = 0xcc1a241e; public int flags; - public boolean phonecalls_enabled; public boolean default_p2p_contacts; public boolean preload_featured_stickers; - public boolean ignore_phone_entities; public boolean revoke_pm_inbox; public boolean blocked_mode; - public boolean pfs_enabled; public boolean force_try_ipv6; public int date; public int expires; @@ -35989,17 +36411,13 @@ public static class TL_config extends TLObject { public int notify_default_delay_ms; public int push_chat_period_ms; public int push_chat_limit; - public int saved_gifs_limit; public int edit_time_limit; public int revoke_time_limit; public int revoke_pm_time_limit; public int rating_e_decay; public int stickers_recent_limit; - public int stickers_faved_limit; public int channels_read_media_period; public int tmp_sessions; - public int pinned_dialogs_count_max; - public int pinned_infolder_count_max; public int call_receive_timeout_ms; public int call_ring_timeout_ms; public int call_connect_timeout_ms; @@ -36017,6 +36435,7 @@ public static class TL_config extends TLObject { public int lang_pack_version; public int base_lang_pack_version; public Reaction reactions_default; + public String autologin_token; public static TL_config TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { if (TL_config.constructor != constructor) { @@ -36033,13 +36452,10 @@ public static TL_config TLdeserialize(AbstractSerializedData stream, int constru public void readParams(AbstractSerializedData stream, boolean exception) { flags = stream.readInt32(exception); - phonecalls_enabled = (flags & 2) != 0; default_p2p_contacts = (flags & 8) != 0; preload_featured_stickers = (flags & 16) != 0; - ignore_phone_entities = (flags & 32) != 0; revoke_pm_inbox = (flags & 64) != 0; blocked_mode = (flags & 256) != 0; - pfs_enabled = (flags & 8192) != 0; force_try_ipv6 = (flags & 16384) != 0; date = stream.readInt32(exception); expires = stream.readInt32(exception); @@ -36072,19 +36488,15 @@ public void readParams(AbstractSerializedData stream, boolean exception) { notify_default_delay_ms = stream.readInt32(exception); push_chat_period_ms = stream.readInt32(exception); push_chat_limit = stream.readInt32(exception); - saved_gifs_limit = stream.readInt32(exception); edit_time_limit = stream.readInt32(exception); revoke_time_limit = stream.readInt32(exception); revoke_pm_time_limit = stream.readInt32(exception); rating_e_decay = stream.readInt32(exception); stickers_recent_limit = stream.readInt32(exception); - stickers_faved_limit = stream.readInt32(exception); channels_read_media_period = stream.readInt32(exception); if ((flags & 1) != 0) { tmp_sessions = stream.readInt32(exception); } - pinned_dialogs_count_max = stream.readInt32(exception); - pinned_infolder_count_max = stream.readInt32(exception); call_receive_timeout_ms = stream.readInt32(exception); call_ring_timeout_ms = stream.readInt32(exception); call_connect_timeout_ms = stream.readInt32(exception); @@ -36120,17 +36532,17 @@ public void readParams(AbstractSerializedData stream, boolean exception) { if ((flags & 32768) != 0) { reactions_default = Reaction.TLdeserialize(stream, stream.readInt32(exception), exception); } + if ((flags & 65536) != 0) { + autologin_token = stream.readString(exception); + } } public void serializeToStream(AbstractSerializedData stream) { stream.writeInt32(constructor); - flags = phonecalls_enabled ? (flags | 2) : (flags &~ 2); flags = default_p2p_contacts ? (flags | 8) : (flags &~ 8); flags = preload_featured_stickers ? (flags | 16) : (flags &~ 16); - flags = ignore_phone_entities ? (flags | 32) : (flags &~ 32); flags = revoke_pm_inbox ? (flags | 64) : (flags &~ 64); flags = blocked_mode ? (flags | 256) : (flags &~ 256); - flags = pfs_enabled ? (flags | 8192) : (flags &~ 8192); flags = force_try_ipv6 ? (flags | 16384) : (flags &~ 16384); stream.writeInt32(flags); stream.writeInt32(date); @@ -36155,19 +36567,15 @@ public void serializeToStream(AbstractSerializedData stream) { stream.writeInt32(notify_default_delay_ms); stream.writeInt32(push_chat_period_ms); stream.writeInt32(push_chat_limit); - stream.writeInt32(saved_gifs_limit); stream.writeInt32(edit_time_limit); stream.writeInt32(revoke_time_limit); stream.writeInt32(revoke_pm_time_limit); stream.writeInt32(rating_e_decay); stream.writeInt32(stickers_recent_limit); - stream.writeInt32(stickers_faved_limit); stream.writeInt32(channels_read_media_period); if ((flags & 1) != 0) { stream.writeInt32(tmp_sessions); } - stream.writeInt32(pinned_dialogs_count_max); - stream.writeInt32(pinned_infolder_count_max); stream.writeInt32(call_receive_timeout_ms); stream.writeInt32(call_ring_timeout_ms); stream.writeInt32(call_connect_timeout_ms); @@ -36203,6 +36611,9 @@ public void serializeToStream(AbstractSerializedData stream) { if ((flags & 32768) != 0) { reactions_default.serializeToStream(stream); } + if ((flags & 65536) != 0) { + stream.writeString(autologin_token); + } } } @@ -36832,6 +37243,7 @@ public static abstract class messages_BotResults extends TLObject { public ArrayList results = new ArrayList<>(); public int cache_time; public ArrayList users = new ArrayList<>(); + public TL_inlineBotWebView switch_webview; public static messages_BotResults TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { messages_BotResults result = null; @@ -36840,6 +37252,9 @@ public static messages_BotResults TLdeserialize(AbstractSerializedData stream, i result = new TL_messages_botResults_layer71(); break; case 0x947ca848: + result = new TL_messages_botResults_layer153(); + break; + case 0xe021f2f6: result = new TL_messages_botResults(); break; } @@ -36853,7 +37268,7 @@ public static messages_BotResults TLdeserialize(AbstractSerializedData stream, i } } - public static class TL_messages_botResults_layer71 extends TL_messages_botResults { + public static class TL_messages_botResults_layer71 extends TL_messages_botResults_layer153 { public static int constructor = 0xccd3563d; @@ -36907,6 +37322,99 @@ public void serializeToStream(AbstractSerializedData stream) { } public static class TL_messages_botResults extends messages_BotResults { + public static int constructor = 0xe021f2f6; + + + public static TL_messages_botResults TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { + if (TL_messages_botResults.constructor != constructor) { + if (exception) { + throw new RuntimeException(String.format("can't parse magic %x in TL_messages_botResults", constructor)); + } else { + return null; + } + } + TL_messages_botResults result = new TL_messages_botResults(); + result.readParams(stream, exception); + return result; + } + + public void readParams(AbstractSerializedData stream, boolean exception) { + flags = stream.readInt32(exception); + gallery = (flags & 1) != 0; + query_id = stream.readInt64(exception); + if ((flags & 2) != 0) { + next_offset = stream.readString(exception); + } + if ((flags & 4) != 0) { + switch_pm = TL_inlineBotSwitchPM.TLdeserialize(stream, stream.readInt32(exception), exception); + } + if ((flags & 8) != 0) { + switch_webview = TL_inlineBotWebView.TLdeserialize(stream, stream.readInt32(exception), exception); + } + int magic = stream.readInt32(exception); + if (magic != 0x1cb5c415) { + if (exception) { + throw new RuntimeException(String.format("wrong Vector magic, got %x", magic)); + } + return; + } + int count = stream.readInt32(exception); + for (int a = 0; a < count; a++) { + BotInlineResult object = BotInlineResult.TLdeserialize(stream, stream.readInt32(exception), exception); + if (object == null) { + return; + } + results.add(object); + } + cache_time = stream.readInt32(exception); + magic = stream.readInt32(exception); + if (magic != 0x1cb5c415) { + if (exception) { + throw new RuntimeException(String.format("wrong Vector magic, got %x", magic)); + } + return; + } + count = stream.readInt32(exception); + for (int a = 0; a < count; a++) { + User object = User.TLdeserialize(stream, stream.readInt32(exception), exception); + if (object == null) { + return; + } + users.add(object); + } + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + flags = gallery ? (flags | 1) : (flags &~ 1); + stream.writeInt32(flags); + stream.writeInt64(query_id); + if ((flags & 2) != 0) { + stream.writeString(next_offset); + } + if ((flags & 4) != 0) { + switch_pm.serializeToStream(stream); + } + if ((flags & 8) != 0) { + switch_webview.serializeToStream(stream); + } + stream.writeInt32(0x1cb5c415); + int count = results.size(); + stream.writeInt32(count); + for (int a = 0; a < count; a++) { + results.get(a).serializeToStream(stream); + } + stream.writeInt32(cache_time); + stream.writeInt32(0x1cb5c415); + count = users.size(); + stream.writeInt32(count); + for (int a = 0; a < count; a++) { + users.get(a).serializeToStream(stream); + } + } + } + + public static class TL_messages_botResults_layer153 extends messages_BotResults { public static int constructor = 0x947ca848; @@ -37772,9 +38280,11 @@ public static ChannelMessagesFilter TLdeserialize(AbstractSerializedData stream, } public static class TL_sponsoredMessage extends TLObject { - public static int constructor = 0x3a836df8; + public static int constructor = 0xfc25b828; public int flags; + public boolean recommended; + public boolean show_peer_photo; public byte[] random_id; public Peer from_id; public ChatInvite chat_invite; @@ -37783,8 +38293,8 @@ public static class TL_sponsoredMessage extends TLObject { public String start_param; public String message; public ArrayList entities = new ArrayList<>(); - public boolean recommended; - public boolean show_peer_photo; + public String sponsor_info; + public String additional_info; public static TL_sponsoredMessage TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { if (TL_sponsoredMessage.constructor != constructor) { @@ -37837,6 +38347,12 @@ public void readParams(AbstractSerializedData stream, boolean exception) { entities.add(object); } } + if ((flags & 128) != 0) { + sponsor_info = stream.readString(exception); + } + if ((flags & 256) != 0) { + additional_info = stream.readString(exception); + } } public void serializeToStream(AbstractSerializedData stream) { @@ -37869,6 +38385,12 @@ public void serializeToStream(AbstractSerializedData stream) { entities.get(a).serializeToStream(stream); } } + if ((flags & 128) != 0) { + stream.writeString(sponsor_info); + } + if ((flags & 256) != 0) { + stream.writeString(additional_info); + } } } @@ -40607,6 +41129,123 @@ public void serializeToStream(AbstractSerializedData stream) { } } + public static abstract class messages_EmojiGroups extends TLObject { + + public static messages_EmojiGroups TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { + messages_EmojiGroups result = null; + switch (constructor) { + case 0x6fb4ad87: + result = new TL_messages_emojiGroupsNotModified(); + break; + case 0x881fb94b: + result = new TL_messages_emojiGroups(); + break; + } + if (result == null && exception) { + throw new RuntimeException(String.format("can't parse magic %x in messages_EmojiGroups", constructor)); + } + if (result != null) { + result.readParams(stream, exception); + } + return result; + } + } + + public static class TL_messages_emojiGroupsNotModified extends messages_EmojiGroups { + public static int constructor = 0x6fb4ad87; + + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + } + } + + public static class TL_messages_emojiGroups extends messages_EmojiGroups { + public static int constructor = 0x881fb94b; + + public int hash; + public ArrayList groups = new ArrayList<>(); + + public void readParams(AbstractSerializedData stream, boolean exception) { + hash = stream.readInt32(exception); + int magic = stream.readInt32(exception); + if (magic != 0x1cb5c415) { + if (exception) { + throw new RuntimeException(String.format("wrong Vector magic, got %x", magic)); + } + return; + } + int count = stream.readInt32(exception); + for (int a = 0; a < count; a++) { + TL_emojiGroup object = TL_emojiGroup.TLdeserialize(stream, stream.readInt32(exception), exception); + if (object == null) { + return; + } + groups.add(object); + } + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeInt32(hash); + stream.writeInt32(0x1cb5c415); + int count = groups.size(); + stream.writeInt32(count); + for (int a = 0; a < count; a++) { + groups.get(a).serializeToStream(stream); + } + } + } + + public static class TL_emojiGroup extends TLObject { + public static int constructor = 0x7a9abda9; + + public String title; + public long icon_emoji_id; + public ArrayList emoticons = new ArrayList<>(); + + public static TL_emojiGroup TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { + if (TL_emojiGroup.constructor != constructor) { + if (exception) { + throw new RuntimeException(String.format("can't parse magic %x in TL_emojiGroup", constructor)); + } else { + return null; + } + } + TL_emojiGroup result = new TL_emojiGroup(); + result.readParams(stream, exception); + return result; + } + + public void readParams(AbstractSerializedData stream, boolean exception) { + title = stream.readString(exception); + icon_emoji_id = stream.readInt64(exception); + int magic = stream.readInt32(exception); + if (magic != 0x1cb5c415) { + if (exception) { + throw new RuntimeException(String.format("wrong Vector magic, got %x", magic)); + } + return; + } + int count = stream.readInt32(exception); + for (int a = 0; a < count; a++) { + emoticons.add(stream.readString(exception)); + } + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeString(title); + stream.writeInt64(icon_emoji_id); + stream.writeInt32(0x1cb5c415); + int count = emoticons.size(); + stream.writeInt32(count); + for (int a = 0; a < count; a++) { + stream.writeString(emoticons.get(a)); + } + } + } + public static class TL_emojiKeyword extends EmojiKeyword { public static int constructor = 0xd5b3b9f9; @@ -42372,6 +43011,10 @@ public boolean verifiedExtended() { } public static Chat TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { + return TLdeserialize(stream, constructor, exception, true); + } + + public static Chat TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception, boolean allowStrippedThumb) { Chat result = null; switch (constructor) { case 0x2d85832c: @@ -42451,11 +43094,15 @@ public static Chat TLdeserialize(AbstractSerializedData stream, int constructor, throw new RuntimeException(String.format("can't parse magic %x in Chat", constructor)); } if (result != null) { - result.readParams(stream, exception); + result.readParams(stream, exception, allowStrippedThumb); } return result; } + private void readParams(AbstractSerializedData stream, boolean exception, boolean allowStrippedThumb) { + readParams(stream, exception); + } + protected static TL_chatBannedRights mergeBannedRights(TL_channelBannedRights_layer92 rights) { if (rights == null) { return null; @@ -42548,8 +43195,11 @@ public void serializeToStream(AbstractSerializedData stream) { public static class TL_chat extends Chat { public static int constructor = 0x41cbf256; - public void readParams(AbstractSerializedData stream, boolean exception) { + readParams(stream, exception, true); + } + + public void readParams(AbstractSerializedData stream, boolean exception, boolean allowStrippedThumb) { flags = stream.readInt32(exception); creator = (flags & 1) != 0; kicked = (flags & 2) != 0; @@ -42560,7 +43210,7 @@ public void readParams(AbstractSerializedData stream, boolean exception) { noforwards = (flags & 33554432) != 0; id = stream.readInt64(exception); title = stream.readString(exception); - photo = ChatPhoto.TLdeserialize(stream, stream.readInt32(exception), exception); + photo = ChatPhoto.TLdeserialize(stream, stream.readInt32(exception), exception, allowStrippedThumb); participants_count = stream.readInt32(exception); date = stream.readInt32(exception); version = stream.readInt32(exception); @@ -42820,6 +43470,10 @@ public static class TL_channel extends Chat { public static int constructor = 0x83259464; public void readParams(AbstractSerializedData stream, boolean exception) { + readParams(stream, exception, true); + } + + public void readParams(AbstractSerializedData stream, boolean exception, boolean allowStrippedThumb) { flags = stream.readInt32(exception); creator = (flags & 1) != 0; left = (flags & 4) != 0; @@ -42850,7 +43504,7 @@ public void readParams(AbstractSerializedData stream, boolean exception) { if ((flags & 64) != 0) { username = stream.readString(exception); } - photo = ChatPhoto.TLdeserialize(stream, stream.readInt32(exception), exception); + photo = ChatPhoto.TLdeserialize(stream, stream.readInt32(exception), exception, allowStrippedThumb); date = stream.readInt32(exception); if ((flags & 512) != 0) { int magic = stream.readInt32(exception); @@ -44495,49 +45149,50 @@ public void serializeToStream(AbstractSerializedData stream) { } } - public static abstract class messages_TranslatedText extends TLObject { + public static class TL_messages_translateResult extends TLObject { + public static int constructor = 0x33db32f8; - public static messages_TranslatedText TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { - messages_TranslatedText result = null; - switch (constructor) { - case 0x67ca4737: - result = new TL_messages_translateNoResult(); - break; - case 0xa214f7d0: - result = new TL_messages_translateResultText(); - break; - } - if (result == null && exception) { - throw new RuntimeException(String.format("can't parse magic %x in messages_TranslatedText", constructor)); - } - if (result != null) { - result.readParams(stream, exception); + public ArrayList result = new ArrayList<>(); + + public static TL_messages_translateResult TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { + if (TL_messages_translateResult.constructor != constructor) { + if (exception) { + throw new RuntimeException(String.format("can't parse magic %x in TL_messages_translateResult", constructor)); + } else { + return null; + } } + TL_messages_translateResult result = new TL_messages_translateResult(); + result.readParams(stream, exception); return result; } - } - - public static class TL_messages_translateNoResult extends messages_TranslatedText { - public static int constructor = 0x67ca4737; - - - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - } - } - - public static class TL_messages_translateResultText extends messages_TranslatedText { - public static int constructor = 0xa214f7d0; - - public String text; public void readParams(AbstractSerializedData stream, boolean exception) { - text = stream.readString(exception); + int magic = stream.readInt32(exception); + if (magic != 0x1cb5c415) { + if (exception) { + throw new RuntimeException(String.format("wrong Vector magic, got %x", magic)); + } + return; + } + int count = stream.readInt32(exception); + for (int a = 0; a < count; a++) { + TL_textWithEntities object = TL_textWithEntities.TLdeserialize(stream, stream.readInt32(exception), exception); + if (object == null) { + return; + } + result.add(object); + } } public void serializeToStream(AbstractSerializedData stream) { stream.writeInt32(constructor); - stream.writeString(text); + stream.writeInt32(0x1cb5c415); + int count = result.size(); + stream.writeInt32(count); + for (int a = 0; a < count; a++) { + result.get(a).serializeToStream(stream); + } } } @@ -46139,6 +46794,56 @@ public void serializeToStream(AbstractSerializedData stream) { } } + public static class TL_textWithEntities extends TLObject { + public static int constructor = 0x751f3146; + + public String text; + public ArrayList entities = new ArrayList<>(); + + public static TL_textWithEntities TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { + if (TL_textWithEntities.constructor != constructor) { + if (exception) { + throw new RuntimeException(String.format("can't parse magic %x in TL_textWithEntities", constructor)); + } else { + return null; + } + } + TL_textWithEntities result = new TL_textWithEntities(); + result.readParams(stream, exception); + return result; + } + + public void readParams(AbstractSerializedData stream, boolean exception) { + text = stream.readString(exception); + int magic = stream.readInt32(exception); + if (magic != 0x1cb5c415) { + if (exception) { + throw new RuntimeException(String.format("wrong Vector magic, got %x", magic)); + } + return; + } + int count = stream.readInt32(exception); + for (int a = 0; a < count; a++) { + MessageEntity object = MessageEntity.TLdeserialize(stream, stream.readInt32(exception), exception); + if (object == null) { + return; + } + entities.add(object); + } + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeString(text); + stream.writeInt32(0x1cb5c415); + int count = entities.size(); + stream.writeInt32(count); + for (int a = 0; a < count; a++) { + entities.get(a).serializeToStream(stream); + } + } + } + public static class TL_account_webAuthorizations extends TLObject { public static int constructor = 0xed56c9fc; @@ -46375,6 +47080,7 @@ public static abstract class UserFull extends TLObject { public boolean has_scheduled; public boolean video_calls_available; public boolean voice_messages_forbidden; + public boolean translations_disabled; public User user; public String about; public TL_contacts_link_layer101 link; @@ -46451,6 +47157,7 @@ public void readParams(AbstractSerializedData stream, boolean exception) { has_scheduled = (flags & 4096) != 0; video_calls_available = (flags & 8192) != 0; voice_messages_forbidden = (flags & 1048576) != 0; + translations_disabled = (flags & 8388608) != 0; id = stream.readInt64(exception); if ((flags & 2) != 0) { about = stream.readString(exception); @@ -46519,6 +47226,7 @@ public void serializeToStream(AbstractSerializedData stream) { flags = has_scheduled ? (flags | 4096) : (flags &~ 4096); flags = video_calls_available ? (flags | 8192) : (flags &~ 8192); flags = voice_messages_forbidden ? (flags | 1048576) : (flags &~ 1048576); + flags = translations_disabled ? (flags | 8388608) : (flags &~ 8388608); stream.writeInt32(flags); stream.writeInt64(id); if ((flags & 2) != 0) { @@ -48043,7 +48751,7 @@ public static InputChatPhoto TLdeserialize(AbstractSerializedData stream, int co case 0x1ca48f57: result = new TL_inputChatPhotoEmpty(); break; - case 0xc642724e: + case 0xbdcdaec0: result = new TL_inputChatUploadedPhoto(); break; } @@ -48082,12 +48790,13 @@ public void serializeToStream(AbstractSerializedData stream) { } public static class TL_inputChatUploadedPhoto extends InputChatPhoto { - public static int constructor = 0xc642724e; + public static int constructor = 0xbdcdaec0; public int flags; public InputFile file; public InputFile video; public double video_start_ts; + public VideoSize video_emoji_markup; public void readParams(AbstractSerializedData stream, boolean exception) { flags = stream.readInt32(exception); @@ -48100,6 +48809,9 @@ public void readParams(AbstractSerializedData stream, boolean exception) { if ((flags & 4) != 0) { video_start_ts = stream.readDouble(exception); } + if ((flags & 8) != 0) { + video_emoji_markup = VideoSize.TLdeserialize(0, 0, stream, stream.readInt32(exception), exception); + } } public void serializeToStream(AbstractSerializedData stream) { @@ -48114,6 +48826,9 @@ public void serializeToStream(AbstractSerializedData stream) { if ((flags & 4) != 0) { stream.writeDouble(video_start_ts); } + if ((flags & 8) != 0) { + video_emoji_markup.serializeToStream(stream); + } } } @@ -50020,6 +50735,87 @@ public void serializeToStream(AbstractSerializedData stream) { } } + public static class TL_messages_getEmojiGroups extends TLObject { + public static int constructor = 0x7488ce5b; + + public int hash; + + public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { + return messages_EmojiGroups.TLdeserialize(stream, constructor, exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeInt32(hash); + } + } + + public static class TL_messages_getEmojiProfilePhotoGroups extends TLObject { + public static int constructor = 0x21a548f3; + + public int hash; + + public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { + return messages_EmojiGroups.TLdeserialize(stream, constructor, exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeInt32(hash); + } + } + + public static class TL_messages_getEmojiStatusGroups extends TLObject { + public static int constructor = 0x2ecd56cd; + + public int hash; + + public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { + return messages_EmojiGroups.TLdeserialize(stream, constructor, exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeInt32(hash); + } + } + + public static class TL_messages_searchCustomEmoji extends TLObject { + public static int constructor = 0x2c11c0d7; + + public String emoticon; + public long hash; + + public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { + return EmojiList.TLdeserialize(stream, constructor, exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeString(emoticon); + stream.writeInt64(hash); + } + } + + public static class TL_messages_togglePeerTranslations extends TLObject { + public static int constructor = 0xe47cb579; + + public int flags; + public boolean disabled; + public InputPeer peer; + + public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { + return Bool.TLdeserialize(stream, constructor, exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + flags = disabled ? (flags | 1) : (flags &~ 1); + stream.writeInt32(flags); + peer.serializeToStream(stream); + } + } + public static class TL_auth_sendCode extends TLObject { public static int constructor = 0xa677244f; @@ -50121,6 +50917,23 @@ public void serializeToStream(AbstractSerializedData stream) { } } + public static class TL_auth_resetLoginEmail extends TLObject { + public static int constructor = 0x7e960193; + + public String phone_number; + public String phone_code_hash; + + public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { + return auth_SentCode.TLdeserialize(stream, constructor, exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeString(phone_number); + stream.writeString(phone_code_hash); + } + } + public static class TL_auth_resetAuthorizations extends TLObject { public static int constructor = 0x9fab0d1a; @@ -50256,6 +51069,33 @@ public void serializeToStream(AbstractSerializedData stream) { } } + public static class TL_auth_requestFirebaseSms extends TLObject { + public static int constructor = 0x89464b50; + + public int flags; + public String phone_number; + public String phone_code_hash; + public String safety_net_token; + public String ios_push_secret; + + public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { + return Bool.TLdeserialize(stream, constructor, exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeInt32(flags); + stream.writeString(phone_number); + stream.writeString(phone_code_hash); + if ((flags & 1) != 0) { + stream.writeString(safety_net_token); + } + if ((flags & 2) != 0) { + stream.writeString(ios_push_secret); + } + } + } + public static class TL_account_registerDevice extends TLObject { public static int constructor = 0xec86017a; @@ -51942,13 +52782,14 @@ public void serializeToStream(AbstractSerializedData stream) { } public static class TL_photos_uploadProfilePhoto extends TLObject { - public static int constructor = 0x89f30f69; + public static int constructor = 0x93c9a51; public int flags; public boolean fallback; public InputFile file; public InputFile video; public double video_start_ts; + public VideoSize video_emoji_markup; public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { return TL_photos_photo.TLdeserialize(stream, constructor, exception); @@ -51967,6 +52808,9 @@ public void serializeToStream(AbstractSerializedData stream) { if ((flags & 4) != 0) { stream.writeDouble(video_start_ts); } + if ((flags & 16) != 0) { + video_emoji_markup.serializeToStream(stream); + } } } @@ -52062,18 +52906,21 @@ public void serializeToStream(AbstractSerializedData stream) { } public static class TL_help_getAppConfig extends TLObject { - public static int constructor = 0x98914110; + public static int constructor = 0x61e3f854; + public int hash; public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { - return JSONValue.TLdeserialize(stream, constructor, exception); + return help_AppConfig.TLdeserialize(stream, constructor, exception); } public void serializeToStream(AbstractSerializedData stream) { stream.writeInt32(constructor); + stream.writeInt32(hash); } } + public static class TL_help_saveAppLog extends TLObject { public static int constructor = 0x6f02f748; @@ -55059,17 +55906,16 @@ public void serializeToStream(AbstractSerializedData stream) { } public static class TL_messages_translateText extends TLObject { - public static int constructor = 0x24ce6dee; + public static int constructor = 0x63183030; public int flags; public InputPeer peer; - public int msg_id; - public String text; - public String from_lang; + public ArrayList id = new ArrayList<>(); + public ArrayList text = new ArrayList<>(); public String to_lang; public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { - return messages_TranslatedText.TLdeserialize(stream, constructor, exception); + return TL_messages_translateResult.TLdeserialize(stream, constructor, exception); } public void serializeToStream(AbstractSerializedData stream) { @@ -55079,13 +55925,20 @@ public void serializeToStream(AbstractSerializedData stream) { peer.serializeToStream(stream); } if ((flags & 1) != 0) { - stream.writeInt32(msg_id); + stream.writeInt32(0x1cb5c415); + int count = id.size(); + stream.writeInt32(count); + for (int a = 0; a < count; a++) { + stream.writeInt32(id.get(a)); + } } if ((flags & 2) != 0) { - stream.writeString(text); - } - if ((flags & 4) != 0) { - stream.writeString(from_lang); + stream.writeInt32(0x1cb5c415); + final int count = text.size(); + stream.writeInt32(count); + for (int a = 0; a < count; a++) { + text.get(a).serializeToStream(stream); + } } stream.writeString(to_lang); } @@ -56446,16 +57299,18 @@ public void serializeToStream(AbstractSerializedData stream) { } public static class TL_channels_createChannel extends TLObject { - public static int constructor = 0x3d5fb10f; + public static int constructor = 0x91006707; public int flags; public boolean broadcast; public boolean megagroup; public boolean for_import; + public boolean forum; public String title; public String about; public InputGeoPoint geo_point; public String address; + public int ttl_period; public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { return Updates.TLdeserialize(stream, constructor, exception); @@ -56466,6 +57321,7 @@ public void serializeToStream(AbstractSerializedData stream) { flags = broadcast ? (flags | 1) : (flags &~ 1); flags = megagroup ? (flags | 2) : (flags &~ 2); flags = for_import ? (flags | 8) : (flags &~ 8); + flags = forum ? (flags | 32) : (flags &~ 32); stream.writeInt32(flags); stream.writeString(title); stream.writeString(about); @@ -56475,6 +57331,9 @@ public void serializeToStream(AbstractSerializedData stream) { if ((flags & 4) != 0) { stream.writeString(address); } + if ((flags & 16) != 0) { + stream.writeInt32(ttl_period); + } } } @@ -57926,7 +58785,7 @@ public static InputStorePaymentPurpose TLdeserialize(AbstractSerializedData stre case 0xa6751e66: result = new TL_inputStorePaymentPremiumSubscription(); break; - case 0x44618a7d: + case 0x616f7fe8: result = new TL_inputStorePaymentGiftPremium(); break; } @@ -57945,15 +58804,18 @@ public static class TL_inputStorePaymentPremiumSubscription extends InputStorePa public int flags; public boolean restore; + public boolean upgrade; public void readParams(AbstractSerializedData stream, boolean exception) { flags = stream.readInt32(exception); restore = (flags & 1) != 0; + upgrade = (flags & 2) != 0; } public void serializeToStream(AbstractSerializedData stream) { stream.writeInt32(constructor); flags = restore ? (flags | 1) : (flags &~ 1); + flags = upgrade ? (flags | 2) : (flags &~ 2); stream.writeInt32(flags); } } @@ -58274,12 +59136,13 @@ public void serializeToStream(AbstractSerializedData stream) { public static class TL_photoPathSize extends PhotoSize { public static int constructor = 0xd8214d41; - + public Path svgPath; public void readParams(AbstractSerializedData stream, boolean exception) { type = stream.readString(exception); bytes = stream.readByteArray(exception); w = h = 50; + svgPath = SvgHelper.doPath(SvgHelper.decompress(bytes)); } public void serializeToStream(AbstractSerializedData stream) { @@ -59118,6 +59981,9 @@ public static class Message extends TLObject { public boolean voiceTranscriptionForce; //custom public long voiceTranscriptionId; //custom public boolean premiumEffectWasPlayed; //custom + public String originalLanguage; //custom + public String translatedToLanguage; //custom + public TL_textWithEntities translatedText; // custom // NekoX Customs public String translatedMessage; //custom @@ -59230,7 +60096,11 @@ public static Message TLdeserialize(AbstractSerializedData stream, int construct if (result != null) { result.readParams(stream, exception); if (result.from_id == null) { - result.from_id = result.peer_id; + if (result.id < 0 && result.random_id == 0) { + result.from_id = new TL_peerUser(); + } else { + result.from_id = result.peer_id; + } } } return result; @@ -63391,6 +64261,170 @@ public void serializeToStream(AbstractSerializedData stream) { } } + public static class RequestPeerType extends TLObject { + + public int flags; + + public Boolean creator; + public TL_chatAdminRights user_admin_rights; + public TL_chatAdminRights bot_admin_rights; + public Boolean has_username; + public Boolean forum; + public Boolean bot_participant; + + public static RequestPeerType TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { + RequestPeerType result = null; + switch (constructor) { + case TL_requestPeerTypeUser.constructor: + result = new TL_requestPeerTypeUser(); + break; + case TL_requestPeerTypeChat.constructor: + result = new TL_requestPeerTypeChat(); + break; + case TL_requestPeerTypeBroadcast.constructor: + result = new TL_requestPeerTypeBroadcast(); + break; + } + if (result == null && exception) { + throw new RuntimeException(String.format("can't parse magic %x in RequestPeerType", constructor)); + } + if (result != null) { + result.readParams(stream, exception); + } + return result; + } + } + + public static class TL_requestPeerTypeUser extends RequestPeerType { + public static final int constructor = 0x5f3b8a00; + + public Boolean bot; + public Boolean premium; + + public void readParams(AbstractSerializedData stream, boolean exception) { + flags = stream.readInt32(exception); + if ((flags & 1) != 0) { + bot = stream.readBool(exception); + } + if ((flags & 2) != 0) { + premium = stream.readBool(exception); + } + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + flags = bot != null ? (flags | 1) : (flags &~ 1); + flags = premium != null ? (flags | 2) : (flags &~ 2); + stream.writeInt32(flags); + if (bot != null) { + stream.writeBool(bot); + } + if (premium != null) { + stream.writeBool(premium); + } + } + } + + public static class TL_requestPeerTypeChat extends RequestPeerType { + public static final int constructor = 0xc9f06e1b; + + public void readParams(AbstractSerializedData stream, boolean exception) { + flags = stream.readInt32(exception); + bot_participant = (flags & 32) != 0; + creator = (flags & 1) != 0; + if ((flags & 8) != 0) { + has_username = stream.readBool(exception); + } + if ((flags & 16) != 0) { + forum = stream.readBool(exception); + } + if ((flags & 2) != 0) { + user_admin_rights = TL_chatAdminRights.TLdeserialize(stream, stream.readInt32(exception), exception); + } + if ((flags & 4) != 0) { + bot_admin_rights = TL_chatAdminRights.TLdeserialize(stream, stream.readInt32(exception), exception); + } + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + flags = creator != null && creator ? (flags | 1) : (flags &~ 1); + flags = user_admin_rights != null ? (flags | 2) : (flags &~ 2); + flags = bot_admin_rights != null ? (flags | 4) : (flags &~ 4); + flags = has_username != null ? (flags | 8) : (flags &~ 8); + flags = forum != null ? (flags | 16) : (flags &~ 16); + flags = bot_participant != null && bot_participant ? (flags | 32) : (flags &~ 32); + stream.writeInt32(flags); + if (has_username != null) { + stream.writeBool(has_username); + } + if (forum != null) { + stream.writeBool(forum); + } + if (user_admin_rights != null) { + user_admin_rights.serializeToStream(stream); + } + if (bot_admin_rights != null) { + bot_admin_rights.serializeToStream(stream); + } + } + } + + public static class TL_requestPeerTypeBroadcast extends RequestPeerType { + public static final int constructor = 0x339bef6c; + + public void readParams(AbstractSerializedData stream, boolean exception) { + flags = stream.readInt32(exception); + creator = (flags & 1) != 0; + if ((flags & 8) != 0) { + has_username = stream.readBool(exception); + } + if ((flags & 2) != 0) { + user_admin_rights = TL_chatAdminRights.TLdeserialize(stream, stream.readInt32(exception), exception); + } + if ((flags & 4) != 0) { + bot_admin_rights = TL_chatAdminRights.TLdeserialize(stream, stream.readInt32(exception), exception); + } + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + flags = creator != null && creator ? (flags | 1) : (flags &~ 1); + flags = user_admin_rights != null ? (flags | 2) : (flags &~ 2); + flags = bot_admin_rights != null ? (flags | 4) : (flags &~ 4); + flags = has_username != null ? (flags | 8) : (flags &~ 8); + stream.writeInt32(flags); + if (has_username != null) { + stream.writeBool(has_username); + } + if (user_admin_rights != null) { + user_admin_rights.serializeToStream(stream); + } + if (bot_admin_rights != null) { + bot_admin_rights.serializeToStream(stream); + } + } + } + + public static class TL_keyboardButtonRequestPeer extends KeyboardButton { + public static int constructor = 0xd0b468c; + + public RequestPeerType peer_type; + + public void readParams(AbstractSerializedData stream, boolean exception) { + text = stream.readString(exception); + button_id = stream.readInt32(exception); + peer_type = RequestPeerType.TLdeserialize(stream, stream.readInt32(exception), exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeString(text); + stream.writeInt32(button_id); + peer_type.serializeToStream(stream); + } + } + public static class TL_messages_getAttachMenuBots extends TLObject { public static int constructor = 0x16fcc2cb; @@ -63534,10 +64568,59 @@ public void serializeToStream(AbstractSerializedData stream) { } } + public static class TL_messages_getBotApp extends TLObject { + public static int constructor = 0x34fdc5c3; + + public InputBotApp app; + public long hash; + + public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { + return TL_messages_botApp.TLdeserialize(stream, constructor, exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + app.serializeToStream(stream); + stream.writeInt64(hash); + } + } + + public static class TL_messages_requestAppWebView extends TLObject { + public static int constructor = 0x8c5a3b3c; + + public int flags; + public boolean write_allowed; + public InputPeer peer; + public InputBotApp app; + public String start_param; + public TL_dataJSON theme_params; + public String platform; + + public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { + return TL_appWebViewResultUrl.TLdeserialize(stream, constructor, exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + flags = write_allowed ? (flags | 1) : (flags &~ 1); + stream.writeInt32(flags); + peer.serializeToStream(stream); + app.serializeToStream(stream); + if ((flags & 2) != 0) { + stream.writeString(start_param); + } + if ((flags & 4) != 0) { + theme_params.serializeToStream(stream); + } + stream.writeString(platform); + } + } + public static class TL_messages_requestSimpleWebView extends TLObject { public static int constructor = 0x299bec8e; public int flags; + public boolean from_switch_webview; public InputUser bot; public String url; public TL_dataJSON theme_params; @@ -63549,6 +64632,7 @@ public TLObject deserializeResponse(AbstractSerializedData stream, int construct public void serializeToStream(AbstractSerializedData stream) { stream.writeInt32(constructor); + flags = from_switch_webview ? (flags | 2) : (flags &~ 2); stream.writeInt32(flags); bot.serializeToStream(stream); stream.writeString(url); @@ -64087,6 +65171,237 @@ public void serializeToStream(AbstractSerializedData stream) { } } + public static abstract class InputBotApp extends TLObject { + + public static InputBotApp TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { + InputBotApp result = null; + switch (constructor) { + case 0xa920bd7a: + result = new TL_inputBotAppID(); + break; + case 0x908c0407: + result = new TL_inputBotAppShortName(); + break; + } + if (result == null && exception) { + throw new RuntimeException(String.format("can't parse magic %x in InputBotApp", constructor)); + } + if (result != null) { + result.readParams(stream, exception); + } + return result; + } + } + + public static class TL_inputBotAppID extends InputBotApp { + public static int constructor = 0xa920bd7a; + + public long id; + public long access_hash; + + public void readParams(AbstractSerializedData stream, boolean exception) { + id = stream.readInt64(exception); + access_hash = stream.readInt64(exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeInt64(id); + stream.writeInt64(access_hash); + } + } + + public static class TL_inputBotAppShortName extends InputBotApp { + public static int constructor = 0x908c0407; + + public InputUser bot_id; + public String short_name; + + public void readParams(AbstractSerializedData stream, boolean exception) { + bot_id = InputUser.TLdeserialize(stream, stream.readInt32(exception), exception); + short_name = stream.readString(exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + bot_id.serializeToStream(stream); + stream.writeString(short_name); + } + } + + public static abstract class BotApp extends TLObject { + public int flags; + public long id; + public long access_hash; + public String short_name; + public String title; + public String description; + public Photo photo; + public Document document; + public long hash; + + public static BotApp TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { + BotApp result = null; + switch (constructor) { + case 0x5da674b7: + result = new TL_botAppNotModified(); + break; + case 0x95fcd1d6: + result = new TL_botApp(); + break; + } + if (result == null && exception) { + throw new RuntimeException(String.format("can't parse magic %x in BotApp", constructor)); + } + if (result != null) { + result.readParams(stream, exception); + } + return result; + } + } + + public static class TL_botAppNotModified extends BotApp { + public static int constructor = 0x5da674b7; + + + public void readParams(AbstractSerializedData stream, boolean exception) { + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + } + } + + public static class TL_botApp extends BotApp { + public static int constructor = 0x95fcd1d6; + + + public void readParams(AbstractSerializedData stream, boolean exception) { + flags = stream.readInt32(exception); + id = stream.readInt64(exception); + access_hash = stream.readInt64(exception); + short_name = stream.readString(exception); + title = stream.readString(exception); + description = stream.readString(exception); + photo = Photo.TLdeserialize(stream, stream.readInt32(exception), exception); + if ((flags & 1) != 0) { + document = Document.TLdeserialize(stream, stream.readInt32(exception), exception); + } + hash = stream.readInt64(exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeInt32(flags); + stream.writeInt64(id); + stream.writeInt64(access_hash); + stream.writeString(short_name); + stream.writeString(title); + stream.writeString(description); + photo.serializeToStream(stream); + if ((flags & 1) != 0) { + document.serializeToStream(stream); + } + stream.writeInt64(hash); + } + } + + public static class TL_messages_botApp extends TLObject { + public static int constructor = 0xeb50adf5; + + public int flags; + public boolean inactive; + public boolean request_write_access; + public BotApp app; + + public static TL_messages_botApp TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { + if (TL_messages_botApp.constructor != constructor) { + if (exception) { + throw new RuntimeException(String.format("can't parse magic %x in TL_messages_botApp", constructor)); + } else { + return null; + } + } + TL_messages_botApp result = new TL_messages_botApp(); + result.readParams(stream, exception); + return result; + } + + public void readParams(AbstractSerializedData stream, boolean exception) { + flags = stream.readInt32(exception); + inactive = (flags & 1) != 0; + request_write_access = (flags & 2) != 0; + app = BotApp.TLdeserialize(stream, stream.readInt32(exception), exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + flags = inactive ? (flags | 1) : (flags &~ 1); + flags = request_write_access ? (flags | 2) : (flags &~ 2); + stream.writeInt32(flags); + app.serializeToStream(stream); + } + } + + public static class TL_appWebViewResultUrl extends TLObject { + public static int constructor = 0x3c1b4f0d; + + public String url; + + public static TL_appWebViewResultUrl TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { + if (TL_appWebViewResultUrl.constructor != constructor) { + if (exception) { + throw new RuntimeException(String.format("can't parse magic %x in TL_appWebViewResultUrl", constructor)); + } else { + return null; + } + } + TL_appWebViewResultUrl result = new TL_appWebViewResultUrl(); + result.readParams(stream, exception); + return result; + } + + public void readParams(AbstractSerializedData stream, boolean exception) { + url = stream.readString(exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeString(url); + } + } + + public static class TL_inlineBotWebView extends TLObject { + public static int constructor = 0xb57295d5; + + public String text; + public String url; + + public static TL_inlineBotWebView TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { + if (TL_inlineBotWebView.constructor != constructor) { + if (exception) { + throw new RuntimeException(String.format("can't parse magic %x in TL_inlineBotWebView", constructor)); + } else { + return null; + } + } + TL_inlineBotWebView result = new TL_inlineBotWebView(); + result.readParams(stream, exception); + return result; + } + + public void readParams(AbstractSerializedData stream, boolean exception) { + text = stream.readString(exception); + url = stream.readString(exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeString(text); + stream.writeString(url); + } + } + public static abstract class account_SavedRingtone extends TLObject { public static account_SavedRingtone TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { @@ -65502,6 +66817,28 @@ public void serializeToStream(AbstractSerializedData stream) { stream.writeInt32(period); } } + + public static class TL_messages_sendBotRequestedPeer extends TLObject { + public static int constructor = 0xfe38d01b; + + public InputPeer peer; + public int msg_id; + public int button_id; + public InputPeer requested_peer; + + public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { + return Updates.TLdeserialize(stream, constructor, exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + peer.serializeToStream(stream); + stream.writeInt32(msg_id); + stream.writeInt32(button_id); + requested_peer.serializeToStream(stream); + } + } + public static class TL_contacts_exportContactToken extends TLObject { public static int constructor = 0xf8654027; @@ -65555,8 +66892,26 @@ public void serializeToStream(AbstractSerializedData stream) { } } + public static class TL_messageActionRequestedPeer extends MessageAction { + public static int constructor = 0xfe77345d; + + public int button_id; + public TLRPC.Peer peer; + + public void readParams(AbstractSerializedData stream, boolean exception) { + button_id = stream.readInt32(exception); + peer = TLRPC.Peer.TLdeserialize(stream, stream.readInt32(exception), exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeInt32(button_id); + peer.serializeToStream(stream); + } + } + public static class TL_photos_uploadContactProfilePhoto extends TLObject { - public static int constructor = 0xb91a83bf; + public static int constructor = 0xe14c4a71; public int flags; public boolean suggest; @@ -65565,6 +66920,7 @@ public static class TL_photos_uploadContactProfilePhoto extends TLObject { public InputFile file; public InputFile video; public double video_start_ts; + public VideoSize video_emoji_markup; public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { return TL_photos_photo.TLdeserialize(stream, constructor, exception); @@ -65585,6 +66941,118 @@ public void serializeToStream(AbstractSerializedData stream) { if ((flags & 4) != 0) { stream.writeDouble(video_start_ts); } + if ((flags & 32) != 0) { + video_emoji_markup.serializeToStream(stream); + } + } + } + + public static abstract class EmojiList extends TLObject { + + public static EmojiList TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { + EmojiList result = null; + switch (constructor) { + case 0x7a1e11d1: + result = new TL_emojiList(); + break; + case 0x481eadfa: + result = new TL_emojiListNotModified(); + break; + } + if (result == null && exception) { + throw new RuntimeException(String.format("can't parse magic %x in EmojiList", constructor)); + } + if (result != null) { + result.readParams(stream, exception); + } + return result; + } + } + + public static class TL_emojiList extends EmojiList { + public static int constructor = 0x7a1e11d1; + + public long hash; + public ArrayList document_id = new ArrayList<>(); + + public void readParams(AbstractSerializedData stream, boolean exception) { + hash = stream.readInt64(exception); + int magic = stream.readInt32(exception); + if (magic != 0x1cb5c415) { + if (exception) { + throw new RuntimeException(String.format("wrong Vector magic, got %x", magic)); + } + return; + } + int count = stream.readInt32(exception); + for (int a = 0; a < count; a++) { + document_id.add(stream.readInt64(exception)); + } + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeInt64(hash); + stream.writeInt32(0x1cb5c415); + int count = document_id.size(); + stream.writeInt32(count); + for (int a = 0; a < count; a++) { + stream.writeInt64(document_id.get(a)); + } + } + } + + public static class TL_emojiListNotModified extends EmojiList { + public static int constructor = 0x481eadfa; + + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + } + } + + public static class TL_account_getDefaultProfilePhotoEmojis extends TLObject { + public static int constructor = 0xe2750328; + + public long hash; + + public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { + return EmojiList.TLdeserialize(stream, constructor, exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeInt64(hash); + } + } + + public static class TL_account_getDefaultGroupPhotoEmojis extends TLObject { + public static int constructor = 0x915860ae; + + public long hash; + + public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { + return EmojiList.TLdeserialize(stream, constructor, exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeInt64(hash); + } + } + + public static class TL_updateGroupInvitePrivacyForbidden extends Update { + public static int constructor = 0xccf08ad6; + + public long user_id; + + public void readParams(AbstractSerializedData stream, boolean exception) { + user_id = stream.readInt64(exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeInt64(user_id); } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBar.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBar.java index 7ae186a326..422d3baf7e 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBar.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBar.java @@ -141,6 +141,7 @@ public boolean canOpenMenu() { private boolean centerScale; private CharSequence subtitle; private boolean drawBackButton; + private boolean attached; private View.OnTouchListener interceptTouchEventListener; private final Theme.ResourcesProvider resourcesProvider; @@ -469,11 +470,14 @@ public void setTitle(CharSequence value, Drawable rightDrawable) { if (titleTextView[0] != null) { titleTextView[0].setVisibility(value != null && !isSearchFieldVisible ? VISIBLE : INVISIBLE); titleTextView[0].setText(lastTitle = value); + if (attached && lastRightDrawable instanceof AnimatedEmojiDrawable.SwapAnimatedEmojiDrawable) { + ((AnimatedEmojiDrawable.SwapAnimatedEmojiDrawable) lastRightDrawable).setParentView(null); + } titleTextView[0].setRightDrawable(lastRightDrawable = rightDrawable); - titleTextView[0].setRightDrawableOnClick(rightDrawableOnClickListener); - if (rightDrawable instanceof AnimatedEmojiDrawable.SwapAnimatedEmojiDrawable) { - ((AnimatedEmojiDrawable.SwapAnimatedEmojiDrawable) rightDrawable).setParentView(titleTextView[0]); + if (attached && lastRightDrawable instanceof AnimatedEmojiDrawable.SwapAnimatedEmojiDrawable) { + ((AnimatedEmojiDrawable.SwapAnimatedEmojiDrawable) lastRightDrawable).setParentView(titleTextView[0]); } + titleTextView[0].setRightDrawableOnClick(rightDrawableOnClickListener); } fromBottom = false; } @@ -1719,6 +1723,7 @@ public boolean hasOverlappingRendering() { @Override protected void onAttachedToWindow() { super.onAttachedToWindow(); + attached = true; ellipsizeSpanAnimator.onAttachedToWindow(); if (SharedConfig.noStatusBar && actionModeVisible) { if (ColorUtils.calculateLuminance(actionModeColor) < 0.7f) { @@ -1727,11 +1732,15 @@ protected void onAttachedToWindow() { AndroidUtilities.setLightStatusBar(((Activity) getContext()).getWindow(), true); } } + if (lastRightDrawable instanceof AnimatedEmojiDrawable.SwapAnimatedEmojiDrawable) { + ((AnimatedEmojiDrawable.SwapAnimatedEmojiDrawable) lastRightDrawable).setParentView(titleTextView[0]); + } } @Override protected void onDetachedFromWindow() { super.onDetachedFromWindow(); + attached = false; ellipsizeSpanAnimator.onDetachedFromWindow(); if (SharedConfig.noStatusBar && actionModeVisible) { if (actionBarColor == 0) { @@ -1744,6 +1753,9 @@ protected void onDetachedFromWindow() { } } } + if (lastRightDrawable instanceof AnimatedEmojiDrawable.SwapAnimatedEmojiDrawable) { + ((AnimatedEmojiDrawable.SwapAnimatedEmojiDrawable) lastRightDrawable).setParentView(null); + } } public ActionBarMenu getActionMode() { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarLayout.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarLayout.java index 5192bd1c79..6769b63e97 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarLayout.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarLayout.java @@ -25,6 +25,7 @@ import android.graphics.drawable.ColorDrawable; import android.graphics.drawable.Drawable; import android.os.Build; +import android.text.TextUtils; import android.view.Gravity; import android.view.HapticFeedbackConstants; import android.view.KeyEvent; @@ -45,6 +46,7 @@ import androidx.core.math.MathUtils; import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.BuildVars; import org.telegram.messenger.FileLog; import org.telegram.messenger.ImageLoader; import org.telegram.messenger.MessagesController; @@ -69,6 +71,7 @@ public class ActionBarLayout extends FrameLayout implements INavigationLayout, FloatingDebugProvider { public boolean highlightActionButtons = false; + private boolean attached; @Override public void setHighlightActionButtons(boolean highlightActionButtons) { @@ -256,6 +259,7 @@ public void processMenuButtonsTouch(MotionEvent event) { ripple.setState(shouldBeEnabled ? new int[]{android.R.attr.state_pressed, android.R.attr.state_enabled} : new int[]{}); if (shouldBeEnabled) { try { + if (!NekoConfig.disableVibration.Bool()) button.performHapticFeedback(HapticFeedbackConstants.TEXT_HANDLE_MOVE, HapticFeedbackConstants.FLAG_IGNORE_VIEW_SETTING); } catch (Exception ignore) {} } @@ -698,7 +702,7 @@ private void onSlideAnimationEnd(final boolean backAnimation) { lastFragment.setParentLayout(null); fragmentsStack.remove(fragmentsStack.size() - 1); - onFragmentStackChanged(); + onFragmentStackChanged("onSlideAnimationEnd"); LayoutContainer temp = containerView; containerView = containerViewBack; @@ -1029,7 +1033,7 @@ private void presentFragmentInternalRemoveOld(boolean removeLast, final BaseFrag fragment.onFragmentDestroy(); fragment.setParentLayout(null); fragmentsStack.remove(fragment); - onFragmentStackChanged(); + onFragmentStackChanged("presentFragmentInternalRemoveOld"); } else { if (fragment.fragmentView != null) { ViewGroup parent = (ViewGroup) fragment.fragmentView.getParent(); @@ -1074,7 +1078,9 @@ public void run() { } long newTime = System.nanoTime() / 1000000; long dt = newTime - lastFrameTime; - if (dt > 18) { + if (dt > 40 && first) { + dt = 0; + } else if (dt > 18) { dt = 18; } lastFrameTime = newTime; @@ -1222,6 +1228,7 @@ public boolean presentFragment(NavigationParams params) { parent.removeView(fragmentView); } } + View wrappedView = fragmentView; containerViewBack.addView(wrappedView); int menuHeight = 0; @@ -1269,8 +1276,10 @@ public boolean presentFragment(NavigationParams params) { fragment.actionBar.setTitleOverlayText(titleOverlayText, titleOverlayTextId, overlayAction); } fragmentsStack.add(fragment); - onFragmentStackChanged(); + + onFragmentStackChanged("presentFragment"); fragment.onResume(); + currentActionBar = fragment.actionBar; if (!fragment.hasOwnBackground && fragmentView.getBackground() == null) { fragmentView.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); @@ -1440,7 +1449,7 @@ public void run() { } }; } - AndroidUtilities.runOnUIThread(waitingForKeyboardCloseRunnable, SharedConfig.smoothKeyboard ? 250 : 200); + AndroidUtilities.runOnUIThread(waitingForKeyboardCloseRunnable, 250); } else if (fragment.needDelayOpenAnimation()) { delayedOpenAnimationRunnable = new Runnable() { @Override @@ -1490,11 +1499,12 @@ public void setFragmentStackChangedListener(Runnable onFragmentStackChanged) { this.onFragmentStackChangedListener = onFragmentStackChanged; } - private void onFragmentStackChanged() { + private void onFragmentStackChanged(String action) { if (onFragmentStackChangedListener != null) { onFragmentStackChangedListener.run(); } ImageLoader.getInstance().onFragmentStackChanged(); + checkBlackScreen(action); } @Override @@ -1502,8 +1512,11 @@ public boolean addFragmentToStack(BaseFragment fragment, int position) { if (delegate != null && !delegate.needAddFragmentToStack(fragment, this) || !fragment.onFragmentCreate()) { return false; } + if (fragmentsStack.contains(fragment)) { + return false; + } fragment.setParentLayout(this); - if (position == -1) { + if (position == -1 || position == INavigationLayout.FORCE_NOT_ATTACH_VIEW) { if (!fragmentsStack.isEmpty()) { BaseFragment previousFragment = fragmentsStack.get(fragmentsStack.size() - 1); previousFragment.onPause(); @@ -1522,14 +1535,49 @@ public boolean addFragmentToStack(BaseFragment fragment, int position) { } } fragmentsStack.add(fragment); - onFragmentStackChanged(); + if (position != INavigationLayout.FORCE_NOT_ATTACH_VIEW) { + attachView(fragment); + fragment.onResume(); + fragment.onTransitionAnimationEnd(false, true); + fragment.onTransitionAnimationEnd(true, true); + fragment.onBecomeFullyVisible(); + } + onFragmentStackChanged("addFragmentToStack " + position); } else { fragmentsStack.add(position, fragment); - onFragmentStackChanged(); + onFragmentStackChanged("addFragmentToStack"); } return true; } + private void attachView(BaseFragment fragment) { + View fragmentView = fragment.fragmentView; + if (fragmentView == null) { + fragmentView = fragment.createView(parentActivity); + } else { + ViewGroup parent = (ViewGroup) fragmentView.getParent(); + if (parent != null) { + fragment.onRemoveFromParent(); + parent.removeView(fragmentView); + } + } + if (!fragment.hasOwnBackground && fragmentView.getBackground() == null) { + fragmentView.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); + } + containerView.addView(fragmentView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); + if (fragment.actionBar != null && fragment.actionBar.shouldAddToContainer()) { + if (removeActionBarExtraHeight) { + fragment.actionBar.setOccupyStatusBar(false); + } + ViewGroup parent = (ViewGroup) fragment.actionBar.getParent(); + if (parent != null) { + parent.removeView(fragment.actionBar); + } + containerView.addView(fragment.actionBar); + fragment.actionBar.setTitleOverlayText(titleOverlayText, titleOverlayTextId, overlayAction); + } + } + private void closeLastFragmentInternalRemoveOld(BaseFragment fragment) { fragment.finishing = true; fragment.onPause(); @@ -1539,7 +1587,7 @@ private void closeLastFragmentInternalRemoveOld(BaseFragment fragment) { containerViewBack.setVisibility(View.INVISIBLE); containerViewBack.setTranslationY(0); bringChildToFront(containerView); - onFragmentStackChanged(); + onFragmentStackChanged("closeLastFragmentInternalRemoveOld"); } @Override @@ -1753,7 +1801,7 @@ public void run() { Bulletin.getVisibleBulletin().hide(); } } - onFragmentStackChanged(); + onFragmentStackChanged("closeLastFragment"); } else { closeLastFragmentInternalRemoveOld(currentFragment); currentFragment.onTransitionAnimationEnd(false, true); @@ -1767,7 +1815,7 @@ public void run() { layoutToIgnore = containerView; onCloseAnimationEndRunnable = () -> { - removeFragmentFromStackInternal(currentFragment); + removeFragmentFromStackInternal(currentFragment, false); setVisibility(GONE); if (backgroundView != null) { backgroundView.setVisibility(GONE); @@ -1800,7 +1848,7 @@ public void onAnimationEnd(Animator animation) { }); currentAnimation.start(); } else { - removeFragmentFromStackInternal(currentFragment); + removeFragmentFromStackInternal(currentFragment, false); setVisibility(GONE); if (backgroundView != null) { backgroundView.setVisibility(GONE); @@ -1811,7 +1859,7 @@ public void onAnimationEnd(Animator animation) { @Override public void bringToFront(int i) { - if (fragmentsStack.isEmpty()) { + if (fragmentsStack.isEmpty() || !fragmentsStack.isEmpty() && fragmentsStack.size() - 1 == i && fragmentsStack.get(i).fragmentView != null) { return; } for (int a = 0; a < i; a++) { @@ -1872,29 +1920,45 @@ public void showLastFragment() { bringToFront(fragmentsStack.size() - 1); } - private void removeFragmentFromStackInternal(BaseFragment fragment) { - fragment.onPause(); - fragment.onFragmentDestroy(); - fragment.setParentLayout(null); - fragmentsStack.remove(fragment); - onFragmentStackChanged(); + private void removeFragmentFromStackInternal(BaseFragment fragment, boolean allowFinishFragment) { + if (!fragmentsStack.contains(fragment)) { + return; + } + if (allowFinishFragment && fragmentsStack.get(fragmentsStack.size() - 1) == fragment) { + fragment.finishFragment(); + } else { + if (fragmentsStack.get(fragmentsStack.size() - 1) == fragment && fragmentsStack.size() > 1) { + fragment.finishFragment(false); + } else { + fragment.onPause(); + fragment.onFragmentDestroy(); + fragment.setParentLayout(null); + fragmentsStack.remove(fragment); + onFragmentStackChanged("removeFragmentFromStackInternal " + allowFinishFragment); + } + } } @Override - public void removeFragmentFromStack(BaseFragment fragment) { + public void removeFragmentFromStack(BaseFragment fragment, boolean immediate) { + if (((fragmentsStack.size() > 0 && fragmentsStack.get(fragmentsStack.size() - 1) == fragment) || (fragmentsStack.size() > 1 && fragmentsStack.get(fragmentsStack.size() - 2) == fragment))) { + onOpenAnimationEnd(); + onCloseAnimationEnd(); + } + checkBlackScreen("removeFragmentFromStack " + immediate); if (useAlphaAnimations && fragmentsStack.size() == 1 && AndroidUtilities.isTablet()) { closeLastFragment(true); } else { if (delegate != null && fragmentsStack.size() == 1 && AndroidUtilities.isTablet()) { delegate.needCloseLastFragment(this); } - removeFragmentFromStackInternal(fragment); + removeFragmentFromStackInternal(fragment, fragment.allowFinishFragmentInsteadOfRemoveFromStack() && !immediate); } } public void removeAllFragments() { for (int a = 0; a < fragmentsStack.size(); a++) { - removeFragmentFromStackInternal(fragmentsStack.get(a)); + removeFragmentFromStackInternal(fragmentsStack.get(a), false); a--; } } @@ -2341,7 +2405,7 @@ public void setFragmentPanTranslationOffset(int offset) { } @Override - public ViewGroup getOverlayContainerView() { + public FrameLayout getOverlayContainerView() { return this; } @@ -2393,4 +2457,43 @@ public static View findScrollingChild(ViewGroup parent, float x, float y) { return null; } + + ArrayList lastActions = new ArrayList<>(); + Runnable debugBlackScreenRunnable = () -> { + if (attached && getLastFragment() != null && containerView.getChildCount() == 0) { + if (BuildVars.DEBUG_VERSION) { + FileLog.e(new RuntimeException(TextUtils.join(", ", lastActions))); + } + rebuildAllFragmentViews(true, true); + } + }; + + public void checkBlackScreen(String action) { +// if (!BuildVars.DEBUG_VERSION) { +// return; +// } + if (BuildVars.DEBUG_VERSION) { + lastActions.add(0, action + " " + fragmentsStack.size()); + if (lastActions.size() > 20) { + ArrayList actions = new ArrayList<>(); + for (int i = 0; i < 10; i++) { + actions.add(lastActions.get(i)); + } + lastActions = actions; + } + } + AndroidUtilities.cancelRunOnUIThread(debugBlackScreenRunnable); + AndroidUtilities.runOnUIThread(debugBlackScreenRunnable, 500); + } + @Override + protected void onAttachedToWindow() { + super.onAttachedToWindow(); + attached = true; + } + + @Override + protected void onDetachedFromWindow() { + super.onDetachedFromWindow(); + attached = false; + } } \ No newline at end of file diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarMenu.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarMenu.java index e132fee1a4..013c763290 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarMenu.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarMenu.java @@ -14,16 +14,22 @@ import android.view.ViewGroup; import android.widget.LinearLayout; +import androidx.annotation.Nullable; + import org.telegram.messenger.AndroidUtilities; import org.telegram.ui.Adapters.FiltersView; import org.telegram.ui.Components.RLottieDrawable; +import java.util.ArrayList; + public class ActionBarMenu extends LinearLayout { public boolean drawBlur = true; protected ActionBar parentActionBar; protected boolean isActionMode; + private ArrayList ids; + public ActionBarMenu(Context context, ActionBar layer) { super(context); setOrientation(LinearLayout.HORIZONTAL); @@ -95,13 +101,21 @@ public ActionBarMenuItem addItem(int id, int icon, CharSequence text, int backgr } public ActionBarMenuItem addItem(int id, int icon, CharSequence text, int backgroundColor, Drawable drawable, int width, CharSequence title, Theme.ResourcesProvider resourcesProvider) { + if (ids == null) { + ids = new ArrayList<>(); + } + ids.add(id); + return addItemAt(-1, id, icon, text, backgroundColor, drawable, width, title, resourcesProvider); + } + + protected ActionBarMenuItem addItemAt(int index, int id, int icon, CharSequence text, int backgroundColor, Drawable drawable, int width, CharSequence title, Theme.ResourcesProvider resourcesProvider) { ActionBarMenuItem menuItem = new ActionBarMenuItem(getContext(), this, backgroundColor, isActionMode ? parentActionBar.itemsActionModeColor : parentActionBar.itemsColor, text != null, resourcesProvider); menuItem.setTag(id); if (text != null) { menuItem.textView.setText(text); LinearLayout.LayoutParams layoutParams = new LinearLayout.LayoutParams(width != 0 ? width : ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.MATCH_PARENT); layoutParams.leftMargin = layoutParams.rightMargin = AndroidUtilities.dp(14); - addView(menuItem, layoutParams); + addView(menuItem, index, layoutParams); } else { if (drawable != null) { if (drawable instanceof RLottieDrawable) { @@ -112,7 +126,7 @@ public ActionBarMenuItem addItem(int id, int icon, CharSequence text, int backgr } else if (icon != 0) { menuItem.iconView.setImageResource(icon); } - addView(menuItem, new LinearLayout.LayoutParams(width, ViewGroup.LayoutParams.MATCH_PARENT)); + addView(menuItem, index, new LinearLayout.LayoutParams(width, ViewGroup.LayoutParams.MATCH_PARENT)); } menuItem.setOnClickListener(view -> { ActionBarMenuItem item = (ActionBarMenuItem) view; @@ -132,6 +146,181 @@ public ActionBarMenuItem addItem(int id, int icon, CharSequence text, int backgr return menuItem; } + public LazyItem lazilyAddItem(int id, int icon, Theme.ResourcesProvider resourcesProvider) { + return lazilyAddItem(id, icon, null, isActionMode ? parentActionBar.itemsActionModeBackgroundColor : parentActionBar.itemsBackgroundColor, null, AndroidUtilities.dp(48), null, resourcesProvider); + } + + public LazyItem lazilyAddItem(int id, int icon, CharSequence text, int backgroundColor, Drawable drawable, int width, CharSequence title, Theme.ResourcesProvider resourcesProvider) { + if (ids == null) { + ids = new ArrayList<>(); + } + ids.add(id); + return new LazyItem(this, id, icon, text, backgroundColor, drawable, width, title, resourcesProvider); + } + + public static class LazyItem { + ActionBarMenu parent; + + int id; + int icon; + CharSequence text; + CharSequence contentDescription; + int backgroundColor; + Drawable drawable; + int width; + CharSequence title; + Theme.ResourcesProvider resourcesProvider; + + float alpha = 1; + Boolean overrideMenuClick; + Boolean allowCloseAnimation; + Boolean isSearchField; + ActionBarMenuItem.ActionBarMenuItemSearchListener searchListener; + CharSequence searchFieldHint; + + public LazyItem(ActionBarMenu parent, int id, int icon, CharSequence text, int backgroundColor, Drawable drawable, int width, CharSequence title, Theme.ResourcesProvider resourcesProvider) { + this.parent = parent; + this.id = id; + this.icon = icon; + this.text = text; + this.backgroundColor = backgroundColor; + this.drawable = drawable; + this.width = width; + this.title = title; + this.resourcesProvider = resourcesProvider; + } + + int visibility = GONE; + ActionBarMenuItem cell; + + public void setVisibility(int visibility) { + if (this.visibility != visibility) { + this.visibility = visibility; + if (visibility == VISIBLE) { + add(); + } + if (cell != null) { + cell.setVisibility(visibility); + } + } + } + + public int getVisibility() { + return visibility; + } + + Object tag; + public Object getTag() { + return tag; + } + public void setTag(Object tag) { + this.tag = tag; + } + + @Nullable + public ActionBarMenuItem getView() { + return cell; + } + + public ActionBarMenuItem createView() { + add(); + return cell; + } + + public void setContentDescription(CharSequence contentDescription) { + this.contentDescription = contentDescription; + if (cell != null) { + cell.setContentDescription(contentDescription); + } + } + + public void setOverrideMenuClick(boolean value) { + overrideMenuClick = value; + if (cell != null) { + cell.setOverrideMenuClick(value); + } + } + + public void setAllowCloseAnimation(boolean value) { + allowCloseAnimation = value; + if (cell != null) { + cell.setAllowCloseAnimation(allowCloseAnimation); + } + } + + public void setIsSearchField(boolean value) { + isSearchField = value; + if (cell != null) { + cell.setIsSearchField(isSearchField); + } + } + + public void setActionBarMenuItemSearchListener(ActionBarMenuItem.ActionBarMenuItemSearchListener listener) { + this.searchListener = listener; + if (cell != null) { + cell.setActionBarMenuItemSearchListener(listener); + } + } + + public void setSearchFieldHint(CharSequence searchFieldHint) { + this.searchFieldHint = searchFieldHint; + if (cell != null) { + cell.setSearchFieldHint(searchFieldHint); + } + } + + public void setAlpha(float alpha) { + this.alpha = alpha; + if (cell != null) { + cell.setAlpha(alpha); + } + } + + public void add() { + if (cell != null) { + return; + } + + int index = parent.getChildCount(); + if (parent.ids != null) { + int myIndex = parent.ids.indexOf(this.id); + for (int i = 0; i < parent.getChildCount(); ++i) { + View child = parent.getChildAt(i); + Object tag = child.getTag(); + if (tag instanceof Integer) { + int thisId = (Integer) tag; + int thisIndex = parent.ids.indexOf(thisId); + if (thisIndex > myIndex) { + index = i; + break; + } + } + } + } + cell = parent.addItemAt(index, id, icon, text, backgroundColor, drawable, width, title, resourcesProvider); + cell.setVisibility(visibility); + if (contentDescription != null) { + cell.setContentDescription(contentDescription); + } + if (allowCloseAnimation != null) { + cell.setAllowCloseAnimation(allowCloseAnimation); + } + if (overrideMenuClick != null) { + cell.setOverrideMenuClick(overrideMenuClick); + } + if (isSearchField != null) { + cell.setIsSearchField(isSearchField); + } + if (searchListener != null) { + cell.setActionBarMenuItemSearchListener(searchListener); + } + if (searchFieldHint != null) { + cell.setSearchFieldHint(searchFieldHint); + } + cell.setAlpha(alpha); + } + } + public void hideAllPopupMenus() { int count = getChildCount(); for (int a = 0; a < count; a++) { @@ -176,6 +365,9 @@ public void onItemClick(int id) { } public void clearItems() { + if (ids != null) { + ids.clear(); + } removeAllViews(); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarMenuItem.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarMenuItem.java index f9b2beeb63..8ed4ea933c 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarMenuItem.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarMenuItem.java @@ -49,6 +49,7 @@ import android.widget.HorizontalScrollView; import android.widget.ImageView; import android.widget.LinearLayout; +import android.widget.PopupWindow; import android.widget.TextView; import androidx.annotation.Nullable; @@ -60,6 +61,7 @@ import org.telegram.messenger.NotificationCenter; import org.telegram.messenger.R; import org.telegram.messenger.UserConfig; +import org.telegram.messenger.Utilities; import org.telegram.tgnet.TLRPC; import org.telegram.ui.Adapters.FiltersView; import org.telegram.ui.Components.BackupImageView; @@ -72,6 +74,7 @@ import org.telegram.ui.Components.RLottieImageView; import java.util.ArrayList; +import java.util.HashMap; public class ActionBarMenuItem extends FrameLayout { @@ -153,6 +156,7 @@ public interface ActionBarMenuItemDelegate { private AnimatorSet clearButtonAnimator; private View searchAdditionalButton; protected RLottieImageView iconView; + private int iconViewResId; protected TextView textView; private FrameLayout searchContainer; private boolean isSearchField; @@ -183,6 +187,7 @@ public interface ActionBarMenuItemDelegate { private ArrayList currentSearchFilters = new ArrayList<>(); private int selectedFilterIndex = -1; private int notificationIndex = -1; + private float dimMenu; private float transitionOffset; private View showSubMenuFrom; @@ -392,14 +397,6 @@ private void createPopupLayout() { popupWindow.dismiss(); } }); - - if (popupLayout.getSwipeBack() != null) { - popupLayout.getSwipeBack().setOnClickListener(view -> { - if (popupWindow != null) { - popupWindow.dismiss(); - } - }); - } } public void removeAllSubItems() { @@ -654,7 +651,7 @@ public void setupPopupRadialSelectors(int color) { } public boolean hasSubMenu() { - return popupLayout != null; + return popupLayout != null || lazyList != null && !lazyList.isEmpty(); } public ActionBarPopupWindow.ActionBarPopupWindowLayout getPopupLayout() { @@ -689,6 +686,9 @@ public void setShowOnTop(boolean showOnTop) { private boolean showOnTop; public void toggleSubMenu(View topView, View fromView) { + if (popupWindow == null || !popupWindow.isShowing()) { + layoutLazyItems(); + } if (popupLayout == null || parentMenu != null && parentMenu.isActionMode && parentMenu.parentActionBar != null && !parentMenu.parentActionBar.isActionModeShowed()) { return; } @@ -710,6 +710,7 @@ public void toggleSubMenu(View topView, View fromView) { ((ViewGroup) popupLayout.getParent()).removeView(popupLayout); } ViewGroup container = popupLayout; + View setMinWidth = null; if (topView != null) { LinearLayout linearLayout = new LinearLayout(getContext()) { @Override @@ -725,17 +726,24 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { }; linearLayout.setOrientation(LinearLayout.VERTICAL); FrameLayout frameLayout = new FrameLayout(getContext()); + setMinWidth = frameLayout; frameLayout.setAlpha(0f); - frameLayout.animate().alpha(1f).setDuration(100).start(); - Drawable drawable = ContextCompat.getDrawable(getContext(), R.drawable.popup_fixed_alert2).mutate(); - drawable.setColorFilter(new PorterDuffColorFilter(popupLayout.getBackgroundColor(), PorterDuff.Mode.MULTIPLY)); - - frameLayout.setBackground(drawable); - frameLayout.addView(topView); - linearLayout.addView(frameLayout, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT)); - linearLayout.addView(popupLayout, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, 0, 0, -AndroidUtilities.dp(4), 0, 0)); + frameLayout.animate().alpha(1f).setDuration(100).setStartDelay(popupLayout.shownFromBottom ? 165 : 0).start(); + if (topView.getParent() instanceof ViewGroup) { + ((ViewGroup) topView.getParent()).removeView(topView); + } + if (topView instanceof ActionBarMenuSubItem || topView instanceof LinearLayout) { + Drawable drawable = ContextCompat.getDrawable(getContext(), R.drawable.popup_fixed_alert2).mutate(); + drawable.setColorFilter(new PorterDuffColorFilter(popupLayout.getBackgroundColor(), PorterDuff.Mode.MULTIPLY)); + frameLayout.setBackground(drawable); + } + frameLayout.addView(topView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT)); + linearLayout.addView(frameLayout, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT)); + linearLayout.addView(popupLayout, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, 0, 0, -10, 0, 0)); container = linearLayout; popupLayout.setTopView(frameLayout); + } else { + popupLayout.setTopView(null); } popupWindow = new ActionBarPopupWindow(container, LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT); if (animationEnabled && Build.VERSION.SDK_INT >= 19) { @@ -770,6 +778,12 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { // if (measurePopup) { container.measure(MeasureSpec.makeMeasureSpec(AndroidUtilities.displaySize.x - AndroidUtilities.dp(40), MeasureSpec.AT_MOST), MeasureSpec.makeMeasureSpec(AndroidUtilities.displaySize.y, MeasureSpec.AT_MOST)); + if (setMinWidth != null && setMinWidth.getLayoutParams() != null && popupLayout.getSwipeBack() != null) { + View mainScrollView = popupLayout.getSwipeBack().getChildAt(0); + if (mainScrollView != null && mainScrollView.getMeasuredWidth() > 0) { + setMinWidth.getLayoutParams().width = mainScrollView.getMeasuredWidth() + AndroidUtilities.dp(16); + } + } measurePopup = false; //} processedPopupClick = false; @@ -780,11 +794,29 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { popupLayout.getSwipeBack().closeForeground(false); } popupWindow.startAnimation(); + if (dimMenu > 0) { + popupWindow.dimBehind(dimMenu); + } + } + + public void setDimMenu(float dimAmount) { + dimMenu = dimAmount; } + public void toggleSubMenu() { toggleSubMenu(null, null); } + public void setOnMenuDismiss(Utilities.Callback onMenuDismiss) { + if (popupWindow != null) { + popupWindow.setOnDismissListener(() -> { + if (onMenuDismiss != null) { + onMenuDismiss.run(processedPopupClick); + } + }); + } + } + public void openSearch(boolean openKeyboard) { checkCreateSearchField(); if (searchContainer == null || searchContainer.getVisibility() == VISIBLE || parentMenu == null) { @@ -1044,6 +1076,7 @@ public void setIcon(Drawable drawable) { } else { iconView.setImageDrawable(drawable); } + iconViewResId = 0; } public RLottieImageView getIconView() { @@ -1058,7 +1091,18 @@ public void setIcon(int resId) { if (iconView == null) { return; } - iconView.setImageResource(resId); + iconView.setImageResource(iconViewResId = resId); + } + + public void setIcon(int resId, boolean animated) { + if (iconView == null || iconViewResId == resId) { + return; + } + if (animated) { + AndroidUtilities.updateImageViewImageAnimated(iconView, iconViewResId = resId); + } else { + iconView.setImageResource(iconViewResId = resId); + } } public void setText(CharSequence text) { @@ -1389,7 +1433,7 @@ public void afterTextChanged(Editable s) { } }); - searchField.setImeOptions(EditorInfo.IME_FLAG_NO_FULLSCREEN | EditorInfo.IME_ACTION_SEARCH); + searchField.setImeOptions(EditorInfo.IME_FLAG_NO_FULLSCREEN | EditorInfo.IME_ACTION_SEARCH | EditorInfo.IME_FLAG_NAVIGATE_PREVIOUS | EditorInfo.IME_FLAG_NAVIGATE_NEXT); searchField.setTextIsSelectable(false); searchField.setHighlightColor(getThemedColor(Theme.key_chat_inTextSelectionHighlight)); searchField.setHandlesColor(getThemedColor(Theme.key_chat_TextSelectionCursor)); @@ -1792,6 +1836,10 @@ private void updateOrShowPopup(boolean show, boolean update) { } public void hideSubItem(int id) { + Item lazyItem = findLazyItem(id); + if (lazyItem != null) { + lazyItem.setVisibility(GONE); + } if (popupLayout == null) { return; } @@ -1855,6 +1903,10 @@ public void showSubItem(int id) { } public void showSubItem(int id, boolean animated) { + Item lazyItem = findLazyItem(id); + if (lazyItem != null) { + lazyItem.setVisibility(VISIBLE); + } if (popupLayout == null) { return; } @@ -2100,13 +2152,259 @@ private int getThemedColor(String key) { } public ActionBarPopupWindow.GapView addColoredGap() { + return addColoredGap(-1); + } + + public ActionBarPopupWindow.GapView addColoredGap(int id) { createPopupLayout(); ActionBarPopupWindow.GapView gap = new ActionBarPopupWindow.GapView(getContext(), resourcesProvider, Theme.key_actionBarDefaultSubmenuSeparator); + if (id != -1) { + gap.setTag(id); + } gap.setTag(R.id.fit_width_tag, 1); popupLayout.addView(gap, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, 8)); return gap; } + // lazy layout to create menu only when needed + // planned to at some point to override the current logic above + public static final int VIEW_TYPE_SUBITEM = 0; + public static final int VIEW_TYPE_COLORED_GAP = 1; + public static final int VIEW_TYPE_SWIPEBACKITEM = 2; + + private ArrayList lazyList; + private HashMap lazyMap; + + public static class Item { + public int viewType; + + public int id; + public int icon; + public Drawable iconDrawable; + public CharSequence text; + public boolean dismiss, needCheck; + public View viewToSwipeBack; + + private View view; + private View.OnClickListener overrideClickListener; + private int visibility = VISIBLE, rightIconVisibility = VISIBLE; + + private Integer textColor, iconColor; + + private Item(int viewType) { + this.viewType = viewType; + } + + private static Item asSubItem(int id, int icon, Drawable iconDrawable, CharSequence text, boolean dismiss, boolean needCheck) { + Item item = new Item(VIEW_TYPE_SUBITEM); + item.id = id; + item.icon = icon; + item.iconDrawable = iconDrawable; + item.text = text; + item.dismiss = dismiss; + item.needCheck = needCheck; + return item; + } + private static Item asColoredGap() { + return new Item(VIEW_TYPE_COLORED_GAP); + } + private static Item asSwipeBackItem(int icon, Drawable iconDrawable, String text, View viewToSwipeBack) { + Item item = new Item(VIEW_TYPE_SWIPEBACKITEM); + item.icon = icon; + item.iconDrawable = iconDrawable; + item.text = text; + item.viewToSwipeBack = viewToSwipeBack; + return item; + } + + private View add(ActionBarMenuItem parent) { + parent.createPopupLayout(); + if (view != null) { + parent.popupLayout.addView(view); + } else if (viewType == VIEW_TYPE_SUBITEM) { + ActionBarMenuSubItem cell = new ActionBarMenuSubItem(parent.getContext(), needCheck, false, false, parent.resourcesProvider); + cell.setTextAndIcon(text, icon, iconDrawable); + cell.setMinimumWidth(AndroidUtilities.dp(196)); + cell.setTag(id); + parent.popupLayout.addView(cell); + LinearLayout.LayoutParams layoutParams = (LinearLayout.LayoutParams) cell.getLayoutParams(); + if (LocaleController.isRTL) { + layoutParams.gravity = Gravity.RIGHT; + } + layoutParams.width = LayoutHelper.MATCH_PARENT; + layoutParams.height = AndroidUtilities.dp(48); + cell.setLayoutParams(layoutParams); + cell.setOnClickListener(view -> { + if (parent.popupWindow != null && parent.popupWindow.isShowing()) { + if (dismiss) { + if (parent.processedPopupClick) { + return; + } + parent.processedPopupClick = true; + parent.popupWindow.dismiss(parent.allowCloseAnimation); + } + } + if (parent.parentMenu != null) { + parent.parentMenu.onItemClick((Integer) view.getTag()); + } else if (parent.delegate != null) { + parent.delegate.onItemClick((Integer) view.getTag()); + } + }); + if (textColor != null && iconColor != null) { + cell.setColors(textColor, iconColor); + } + view = cell; + } else if (viewType == VIEW_TYPE_COLORED_GAP) { + ActionBarPopupWindow.GapView gap = new ActionBarPopupWindow.GapView(parent.getContext(), parent.resourcesProvider, Theme.key_actionBarDefaultSubmenuSeparator); + gap.setTag(R.id.fit_width_tag, 1); + parent.popupLayout.addView(gap, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, 8)); + view = gap; + } else if (viewType == VIEW_TYPE_SWIPEBACKITEM) { + ActionBarMenuSubItem cell = new ActionBarMenuSubItem(parent.getContext(), false, false, false, parent.resourcesProvider); + cell.setTextAndIcon(text, icon, iconDrawable); + cell.setMinimumWidth(AndroidUtilities.dp(196)); + cell.setRightIcon(R.drawable.msg_arrowright); + cell.getRightIcon().setVisibility(rightIconVisibility); + parent.popupLayout.addView(cell); + LinearLayout.LayoutParams layoutParams = (LinearLayout.LayoutParams) cell.getLayoutParams(); + if (LocaleController.isRTL) { + layoutParams.gravity = Gravity.RIGHT; + } + layoutParams.width = LayoutHelper.MATCH_PARENT; + layoutParams.height = AndroidUtilities.dp(48); + cell.setLayoutParams(layoutParams); + int swipeBackIndex = parent.popupLayout.addViewToSwipeBack(viewToSwipeBack); + cell.openSwipeBackLayout = () -> { + if (parent.popupLayout.getSwipeBack() != null) { + parent.popupLayout.getSwipeBack().openForeground(swipeBackIndex); + } + }; + cell.setOnClickListener(view -> { + cell.openSwipeBack(); + }); + parent.popupLayout.swipeBackGravityRight = true; + if (textColor != null && iconColor != null) { + cell.setColors(textColor, iconColor); + } + view = cell; + } + if (view != null) { + view.setVisibility(visibility); + if (overrideClickListener != null) { + view.setOnClickListener(overrideClickListener); + } + } + return view; + } + + public void setVisibility(int visibility) { + this.visibility = visibility; + if (view != null) { + view.setVisibility(visibility); + } + } + + public void setVisibility(boolean bvisibility) { + int visibility = bvisibility ? View.VISIBLE : View.GONE; + this.visibility = visibility; + if (view != null) { + view.setVisibility(visibility); + } + } + + public void setOnClickListener(View.OnClickListener onClickListener) { + overrideClickListener = onClickListener; + if (view != null) { + view.setOnClickListener(overrideClickListener); + } + } + + public void openSwipeBack() { + if (view instanceof ActionBarMenuSubItem) { + ((ActionBarMenuSubItem) view).openSwipeBack(); + } + } + + public void setText(CharSequence text) { + this.text = text; + if (view instanceof ActionBarMenuSubItem) { + ((ActionBarMenuSubItem) view).setText(text); + } + } + + public void setIcon(int icon) { + if (icon != this.icon) { + this.icon = icon; + if (view instanceof ActionBarMenuSubItem) { + ((ActionBarMenuSubItem) view).setIcon(icon); + } + } + } + + public void setRightIconVisibility(int visibility) { + if (rightIconVisibility != visibility) { + rightIconVisibility = visibility; + if (view instanceof ActionBarMenuSubItem) { + ((ActionBarMenuSubItem) view).getRightIcon().setVisibility(rightIconVisibility); + } + } + } + + public void setColors(int textColor, int iconColor) { + if (this.textColor == null || this.iconColor == null || this.textColor != textColor || this.iconColor != iconColor) { + this.textColor = textColor; + this.iconColor = iconColor; + if (view instanceof ActionBarMenuSubItem) { + ((ActionBarMenuSubItem) view).setColors(textColor, iconColor); + } + } + } + } + public Item lazilyAddSwipeBackItem(int icon, Drawable iconDrawable, String text, View viewToSwipeBack) { + return putLazyItem(Item.asSwipeBackItem(icon, iconDrawable, text, viewToSwipeBack)); + } + public Item lazilyAddSubItem(int id, int icon, CharSequence text) { + return lazilyAddSubItem(id, icon, null, text, true, false); + } + public Item lazilyAddSubItem(int id, int icon, Drawable iconDrawable, CharSequence text, boolean dismiss, boolean needCheck) { + return putLazyItem(Item.asSubItem(id, icon, iconDrawable, text, dismiss, needCheck)); + } + public Item lazilyAddColoredGap() { + return putLazyItem(Item.asColoredGap()); + } + + private Item putLazyItem(Item item) { + if (item == null) { + return item; + } + if (lazyList == null) { + lazyList = new ArrayList<>(); + } + lazyList.add(item); + if (lazyMap == null) { + lazyMap = new HashMap<>(); + } + lazyMap.put(item.id, item); + return item; + } + + private Item findLazyItem(int id) { + if (lazyMap == null) { + return null; + } + return lazyMap.get(id); + } + + private void layoutLazyItems() { + if (lazyList == null) { + return; + } + for (int i = 0; i < lazyList.size(); ++i) { + lazyList.get(i).add(this); + } + lazyList.clear(); + } + public static ActionBarMenuSubItem addItem(ActionBarPopupWindow.ActionBarPopupWindowLayout windowLayout, int icon, CharSequence text, boolean needCheck, Theme.ResourcesProvider resourcesProvider) { return addItem(false, false, windowLayout, icon, text, needCheck, resourcesProvider); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarMenuSlider.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarMenuSlider.java new file mode 100644 index 0000000000..703685c531 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarMenuSlider.java @@ -0,0 +1,557 @@ +package org.telegram.ui.ActionBar; + +import static org.telegram.messenger.AndroidUtilities.dp; +import static org.telegram.messenger.AndroidUtilities.dpf2; + +import android.animation.Animator; +import android.animation.AnimatorListenerAdapter; +import android.animation.ValueAnimator; +import android.content.Context; +import android.graphics.Bitmap; +import android.graphics.BitmapShader; +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.ColorFilter; +import android.graphics.ColorMatrix; +import android.graphics.ColorMatrixColorFilter; +import android.graphics.LinearGradient; +import android.graphics.Matrix; +import android.graphics.Paint; +import android.graphics.PorterDuff; +import android.graphics.PorterDuffColorFilter; +import android.graphics.Shader; +import android.graphics.drawable.BitmapDrawable; +import android.graphics.drawable.ColorDrawable; +import android.graphics.drawable.Drawable; +import android.os.Bundle; +import android.text.TextUtils; +import android.util.Pair; +import android.view.Gravity; +import android.view.MotionEvent; +import android.view.View; +import android.view.ViewConfiguration; +import android.view.accessibility.AccessibilityNodeInfo; +import android.widget.FrameLayout; + +import androidx.annotation.NonNull; +import androidx.core.graphics.ColorUtils; +import androidx.core.math.MathUtils; + +import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.LiteMode; +import org.telegram.messenger.LocaleController; +import org.telegram.messenger.R; +import org.telegram.messenger.SharedConfig; +import org.telegram.messenger.Utilities; +import org.telegram.ui.Components.AnimatedFloat; +import org.telegram.ui.Components.AnimatedTextView; +import org.telegram.ui.Components.CubicBezierInterpolator; +import org.telegram.ui.Components.FloatSeekBarAccessibilityDelegate; +import org.telegram.ui.Components.LayoutHelper; +import org.telegram.ui.Components.MotionBackgroundDrawable; +import org.telegram.ui.Components.SeekBarAccessibilityDelegate; +import org.telegram.ui.Components.SeekBarView; +import org.telegram.ui.Components.SpeedIconDrawable; + +public class ActionBarMenuSlider extends FrameLayout { + + private static final float BLUR_RADIUS = 8f; + + private float value = .5f; + private Utilities.Callback2 onValueChange; + + private AnimatedTextView.AnimatedTextDrawable textDrawable; + + private AnimatedFloat blurBitmapAlpha = new AnimatedFloat(1, this, 0, 320, CubicBezierInterpolator.EASE_OUT_QUINT); + private Bitmap blurBitmap; + private BitmapShader blurBitmapShader; + private Matrix blurBitmapMatrix; + + private int[] location = new int[2]; + + private float roundRadiusDp = 0; + private boolean drawShadow, drawBlur; + + protected Theme.ResourcesProvider resourcesProvider; + + private Paint shadowPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + private Paint backgroundPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + private Paint blurPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + private Paint brightenBlurPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + private Paint darkenBlurPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + private Paint pseudoBlurPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + private Paint fillPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + private boolean backgroundDark; + + private boolean blurIsInChat = true; + + private LinearGradient pseudoBlurGradient; + private int pseudoBlurColor1, pseudoBlurColor2; + private Matrix pseudoBlurMatrix; + private int pseudoBlurWidth; + + public ActionBarMenuSlider(Context context) { + this(context, null); + } + + @Override + public boolean onInterceptTouchEvent(MotionEvent ev) { + return false; + } + + public ActionBarMenuSlider(Context context, Theme.ResourcesProvider resourcesProvider) { + super(context); + this.resourcesProvider = resourcesProvider; + setWillNotDraw(false); + + textDrawable = new AnimatedTextView.AnimatedTextDrawable(false, true, true) { + @Override + public void invalidateSelf() { + ActionBarMenuSlider.this.invalidate(); + } + }; + textDrawable.setCallback(this); + textDrawable.setTypeface(AndroidUtilities.getTypeface(AndroidUtilities.TYPEFACE_ROBOTO_MEDIUM)); + textDrawable.setAnimationProperties(.3f, 0, 165, CubicBezierInterpolator.EASE_OUT_QUINT); + textDrawable.setTextSize(AndroidUtilities.dpf2(14)); + textDrawable.getPaint().setStyle(Paint.Style.FILL_AND_STROKE); + textDrawable.getPaint().setStrokeWidth(AndroidUtilities.dpf2(.3f)); + textDrawable.setGravity(LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT); + + shadowPaint.setColor(Color.TRANSPARENT); + shadowPaint.setShadowLayer(dpf2(1.33f), 0, dpf2(.33f), 0x3f000000); + + ColorMatrix colorMatrix = new ColorMatrix(); + AndroidUtilities.adjustSaturationColorMatrix(colorMatrix, -0.4f); + AndroidUtilities.adjustBrightnessColorMatrix(colorMatrix, .1f); + pseudoBlurPaint.setColorFilter(new ColorMatrixColorFilter(colorMatrix)); + + backgroundPaint.setColor(Theme.getColor(Theme.key_actionBarDefaultSubmenuBackground, resourcesProvider)); + backgroundDark = AndroidUtilities.computePerceivedBrightness(backgroundPaint.getColor()) <= 0.721f; + textDrawable.setTextColor(backgroundDark ? 0xffffffff : 0xff000000); + darkenBlurPaint.setColor(Theme.multAlpha(0xff000000, .025f)); + brightenBlurPaint.setColor(Theme.multAlpha(0xffffffff, .35f)); + } + + public float getValue() { + return value; + } + + private ValueAnimator valueAnimator; + + public void setValue(float value, boolean animated) { + if (valueAnimator != null) { + valueAnimator.cancel(); + valueAnimator = null; + } + + value = MathUtils.clamp(value, 0, 1); + + if (!animated) { + this.value = value; + invalidate(); + } else { + final float newValue = value; + valueAnimator = ValueAnimator.ofFloat(this.value, newValue); + valueAnimator.addUpdateListener(anm -> { + ActionBarMenuSlider.this.value = (float) anm.getAnimatedValue(); + invalidate(); + }); + valueAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + valueAnimator = null; + ActionBarMenuSlider.this.value = newValue; + invalidate(); + } + }); + valueAnimator.setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); + valueAnimator.setDuration(220); + valueAnimator.start(); + } + + String stringValue = getStringValue(value); + if (stringValue != null && !TextUtils.equals(textDrawable.getText(), stringValue)) { + textDrawable.cancelAnimation(); + textDrawable.setText(stringValue, true); + } + fillPaint.setColor(getColorValue(value)); + } + + public void setBackgroundColor(int color) { + backgroundPaint.setColor(color); + backgroundDark = AndroidUtilities.computePerceivedBrightness(backgroundPaint.getColor()) <= 0.721f; + textDrawable.setTextColor(backgroundDark ? 0xffffffff : 0xff000000); + } + + public void setTextColor(int color) { + textDrawable.setTextColor(color); + } + + protected String getStringValue(float value) { + return null; + } + + protected int getColorValue(float value) { + return Color.WHITE; + } + + private void updateValue(float value, boolean isFinal) { + setValue(value, false); + if (onValueChange != null) { + onValueChange.run(this.value, isFinal); + } + } + + public void setOnValueChange(Utilities.Callback2 onValueChange) { + this.onValueChange = onValueChange; + } + + public void setDrawShadow(boolean draw) { + drawShadow = draw; + final int pad = drawShadow ? dp(8) : 0; + setPadding(pad, pad, pad, pad); + invalidate(); + } + + public void setDrawBlur(boolean draw) { + drawBlur = draw; + invalidate(); + } + + public void setRoundRadiusDp(float roundRadiusDp) { + this.roundRadiusDp = roundRadiusDp; + invalidate(); + } + + private boolean preparingBlur = false; + private Runnable prepareBlur = () -> { + preparingBlur = true; + AndroidUtilities.makeGlobalBlurBitmap(bitmap -> { + preparingBlur = false; + blurBitmapShader = new BitmapShader(blurBitmap = bitmap, Shader.TileMode.CLAMP, Shader.TileMode.CLAMP); + if (blurBitmapMatrix == null) { + blurBitmapMatrix = new Matrix(); + } else { + blurBitmapMatrix.reset(); + } + blurBitmapMatrix.postScale(BLUR_RADIUS, BLUR_RADIUS); + blurBitmapMatrix.postTranslate(-location[0], -location[1]); + blurBitmapShader.setLocalMatrix(blurBitmapMatrix); + blurPaint.setShader(blurBitmapShader); + ColorMatrix colorMatrix = new ColorMatrix(); + AndroidUtilities.adjustSaturationColorMatrix(colorMatrix, -.2f); + blurPaint.setColorFilter(new ColorMatrixColorFilter(colorMatrix)); + invalidate(); + }, BLUR_RADIUS); + }; + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(drawShadow ? MeasureSpec.makeMeasureSpec(MeasureSpec.getSize(widthMeasureSpec) + getPaddingRight() + getPaddingLeft(), MeasureSpec.EXACTLY) : widthMeasureSpec, MeasureSpec.makeMeasureSpec(dp(44) + getPaddingTop() + getPaddingBottom(), MeasureSpec.EXACTLY)); + + final boolean canDoBlur = SharedConfig.getDevicePerformanceClass() >= SharedConfig.PERFORMANCE_CLASS_HIGH && LiteMode.isEnabled(LiteMode.FLAG_CHAT_BLUR); + if (drawBlur && blurBitmap == null && !preparingBlur && canDoBlur) { + this.prepareBlur.run(); +// removeCallbacks(this.prepareBlur); +// post(this.prepareBlur); + } + } + + public void invalidateBlur() { + invalidateBlur(true); + } + + public void invalidateBlur(boolean isInChat) { + blurIsInChat = isInChat; + + blurPaint.setShader(null); + blurBitmapShader = null; + if (blurBitmap != null) { + blurBitmap.recycle(); + blurBitmap = null; + } + } + + @Override + protected void onLayout(boolean changed, int left, int top, int right, int bottom) { + super.onLayout(changed, left, top, right, bottom); + + getLocationOnScreen(location); + if (blurBitmapMatrix != null) { + blurBitmapMatrix.reset(); + blurBitmapMatrix.postScale(BLUR_RADIUS, BLUR_RADIUS); + blurBitmapMatrix.postTranslate(-location[0], -location[1]); + if (blurBitmapShader != null) { + blurBitmapShader.setLocalMatrix(blurBitmapMatrix); + invalidate(); + } + } + updatePseudoBlurColors(); + } + + @Override + protected void onDraw(Canvas canvas) { + super.onDraw(canvas); + + AndroidUtilities.rectTmp.set(getPaddingLeft(), getPaddingTop(), getWidth() - getPaddingRight(), getHeight() - getPaddingBottom()); + if (drawShadow) { + canvas.drawRoundRect(AndroidUtilities.rectTmp, dp(roundRadiusDp), dp(roundRadiusDp), shadowPaint); + } + + if (drawBlur) { + final float blurAlpha = blurBitmapAlpha.set(blurBitmap != null ? 1 : 0); + if (blurAlpha < 1f) { + if (pseudoBlurMatrix == null || pseudoBlurWidth != (int) AndroidUtilities.rectTmp.width()) { + if (pseudoBlurMatrix == null) { + pseudoBlurMatrix = new Matrix(); + } else { + pseudoBlurMatrix.reset(); + } + pseudoBlurMatrix.postScale(pseudoBlurWidth = (int) AndroidUtilities.rectTmp.width(), 1); + pseudoBlurGradient.setLocalMatrix(pseudoBlurMatrix); + } + + pseudoBlurPaint.setAlpha((int) (0xFF * (1f - blurAlpha))); + canvas.drawRoundRect(AndroidUtilities.rectTmp, dp(roundRadiusDp), dp(roundRadiusDp), pseudoBlurPaint); + } + + if (blurBitmap != null && value < 1 && blurAlpha > 0) { + blurPaint.setAlpha((int) (0xFF * blurAlpha)); + canvas.drawRoundRect(AndroidUtilities.rectTmp, dp(roundRadiusDp), dp(roundRadiusDp), blurPaint); + } + + canvas.drawRoundRect(AndroidUtilities.rectTmp, dp(roundRadiusDp), dp(roundRadiusDp), brightenBlurPaint); + canvas.drawRoundRect(AndroidUtilities.rectTmp, dp(roundRadiusDp), dp(roundRadiusDp), darkenBlurPaint); + + fillPaint.setColor(Color.WHITE); + } else { + canvas.drawRoundRect(AndroidUtilities.rectTmp, dp(roundRadiusDp), dp(roundRadiusDp), backgroundPaint); + } + + if (!backgroundDark) { + drawText(canvas, false); + } + + if (value < 1) { + canvas.save(); + canvas.clipRect(getPaddingLeft(), getPaddingTop(), getPaddingLeft() + (getWidth() - getPaddingLeft() - getPaddingRight()) * value, getHeight() - getPaddingBottom()); + } + canvas.drawRoundRect(AndroidUtilities.rectTmp, dp(roundRadiusDp), dp(roundRadiusDp), fillPaint); + + if (!backgroundDark) { + drawText(canvas, true); + } + + if (value < 1) { + canvas.restore(); + } + + if (backgroundDark) { + drawText(canvas, false); + } + } + + private ColorFilter whiteColorFilter; + + private void drawText(Canvas canvas, boolean white) { + textDrawable.setColorFilter(white ? (whiteColorFilter == null ? whiteColorFilter = new PorterDuffColorFilter(Color.WHITE, PorterDuff.Mode.SRC_IN) : whiteColorFilter) : null); + textDrawable.setBounds(getPaddingLeft() + dp(20), getMeasuredHeight() / 2, getMeasuredWidth() - getPaddingRight() - dp(20), getMeasuredHeight() / 2); + textDrawable.draw(canvas); + } + + private Pair getBitmapGradientColors(Bitmap bitmap) { + if (bitmap == null) { + return null; + } + + final float sx1 = location[0] / (float) AndroidUtilities.displaySize.x; + final float sx2 = (location[0] + getMeasuredWidth()) / (float) AndroidUtilities.displaySize.x; + final float sy = (location[1] - AndroidUtilities.statusBarHeight - ActionBar.getCurrentActionBarHeight()) / (float) AndroidUtilities.displaySize.y; + + final int x1 = (int) (sx1 * bitmap.getWidth()); + final int x2 = (int) (sx2 * bitmap.getWidth()); + final int y = (int) (sy * bitmap.getHeight()); + + if (x1 < 0 || x1 >= bitmap.getWidth() || x2 < 0 || x2 >= bitmap.getWidth() || y < 0 || y >= bitmap.getHeight()) { + return null; + } + + return new Pair<>( + bitmap.getPixel(x1, y), + bitmap.getPixel(x2, y) + ); + } + + private void updatePseudoBlurColors() { + int fromColor, toColor; + + if (blurIsInChat) { + Drawable drawable = Theme.getCachedWallpaper(); + if (drawable instanceof ColorDrawable) { + fromColor = toColor = ((ColorDrawable) drawable).getColor(); + } else { + Bitmap bitmap = null; + if (drawable instanceof MotionBackgroundDrawable) { + bitmap = ((MotionBackgroundDrawable) drawable).getBitmap(); + } else if (drawable instanceof BitmapDrawable) { + bitmap = ((BitmapDrawable) drawable).getBitmap(); + } + + Pair colors = getBitmapGradientColors(bitmap); + if (colors != null) { + fromColor = colors.first; + toColor = colors.second; + } else { + fromColor = toColor = Theme.multAlpha(Theme.getColor(Theme.key_windowBackgroundWhite, resourcesProvider), .25f); + } + } + } else { + int color = Theme.getColor(Theme.key_windowBackgroundWhite, resourcesProvider); + if (!Theme.isCurrentThemeDark()) { + color = Theme.blendOver(color, Theme.multAlpha(0xff000000, .18f)); + } + fromColor = toColor = color; + } + + if (pseudoBlurGradient == null || pseudoBlurColor1 != fromColor || pseudoBlurColor2 != toColor) { + pseudoBlurGradient = new LinearGradient(0, 0, 1, 0, new int[] { pseudoBlurColor1 = fromColor, pseudoBlurColor2 = toColor }, new float[] { 0, 1 }, Shader.TileMode.CLAMP); + pseudoBlurPaint.setShader(pseudoBlurGradient); + } + } + + private float fromX; + private float fromValue; + private long tapStart; + private boolean dragging; + + @Override + public boolean onTouchEvent(MotionEvent event) { + final float x = event.getX() - getPaddingLeft(); + + final int action = event.getAction(); + if (action == MotionEvent.ACTION_DOWN) { + dragging = true; + fromX = x; + fromValue = value; + tapStart = System.currentTimeMillis(); + } else if (action == MotionEvent.ACTION_MOVE || action == MotionEvent.ACTION_UP) { + if (action == MotionEvent.ACTION_UP) { + dragging = false; + if (System.currentTimeMillis() - tapStart < ViewConfiguration.getTapTimeout()) { + final float value = (x - getPaddingLeft()) / (getWidth() - getPaddingLeft() - getPaddingRight()); + if (onValueChange != null) { + onValueChange.run(value, true); + } + return true; + } + } + final float value = fromValue + (x - fromX) / Math.max(1, getWidth() - getPaddingLeft() - getPaddingRight()); + updateValue(value, !dragging); + } + + return true; + } + + public static class SpeedSlider extends ActionBarMenuSlider { + + private final SeekBarAccessibilityDelegate seekBarAccessibilityDelegate; + + public static final float MIN_SPEED = 0.2f; + public static final float MAX_SPEED = 2.5f; + + public SpeedSlider(Context context, Theme.ResourcesProvider resourcesProvider) { + super(context, resourcesProvider); + + setFocusable(true); + setFocusableInTouchMode(true); + + setImportantForAccessibility(IMPORTANT_FOR_ACCESSIBILITY_YES); + setAccessibilityDelegate(seekBarAccessibilityDelegate = new FloatSeekBarAccessibilityDelegate(false) { + @Override + public float getProgress() { + return getSpeed(); + } + + @Override + public void setProgress(float progress) { + setSpeed(progress, true); + } + + @Override + protected float getMinValue() { + return MIN_SPEED; + } + + @Override + protected float getMaxValue() { + return MAX_SPEED; + } + + @Override + protected float getDelta() { + return 0.2f; + } + + @Override + public CharSequence getContentDescription(View host) { + return SpeedIconDrawable.formatNumber(getSpeed()) + "x " + LocaleController.getString("AccDescrSpeedSlider", R.string.AccDescrSpeedSlider); + } + }); + } + + @Override + public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) { + super.onInitializeAccessibilityNodeInfo(info); + seekBarAccessibilityDelegate.onInitializeAccessibilityNodeInfoInternal(this, info); + } + + @Override + public boolean performAccessibilityAction(int action, Bundle arguments) { + return super.performAccessibilityAction(action, arguments) || seekBarAccessibilityDelegate.performAccessibilityActionInternal(this, action, arguments); + } + + public float getSpeed(float value) { + return MIN_SPEED + (MAX_SPEED - MIN_SPEED) * value; + } + + public float getSpeed() { + return getSpeed(getValue()); + } + + public void setSpeed(float speed, boolean animated) { + setValue((speed - MIN_SPEED) / (MAX_SPEED - MIN_SPEED), animated); + } + + @Override + protected String getStringValue(float value) { + return SpeedIconDrawable.formatNumber(MIN_SPEED + value * (MAX_SPEED - MIN_SPEED)) + "x"; + } + + @Override + protected int getColorValue(float value) { + final float speed = MIN_SPEED + value * (MAX_SPEED - MIN_SPEED); +// if (speed <= 0.3f) { +// return Theme.getColor(Theme.key_color_red, resourcesProvider); +// } else if (speed <= 0.5f) { +// return ColorUtils.blendARGB( +// Theme.getColor(Theme.key_color_red, resourcesProvider), +// Theme.getColor(Theme.key_color_yellow, resourcesProvider), +// (speed - 0.3f) / (0.5f - 0.3f) +// ); +// } else if (speed <= 1f) { +// return ColorUtils.blendARGB( +// Theme.getColor(Theme.key_color_yellow, resourcesProvider), +// Theme.getColor(Theme.key_color_lightblue, resourcesProvider), +// MathUtils.clamp((speed - 0.5f) / (1f - 0.5f), 0, 1) +// ); +// } else { + return ColorUtils.blendARGB( + Theme.getColor(Theme.key_color_lightblue, resourcesProvider), + Theme.getColor(Theme.key_color_blue, resourcesProvider), + MathUtils.clamp((speed - 1f) / (2f - 1f), 0, 1) + ); +// } + } + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarMenuSubItem.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarMenuSubItem.java index dc79f2c2de..8b43d04181 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarMenuSubItem.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarMenuSubItem.java @@ -43,14 +43,18 @@ public ActionBarMenuSubItem(Context context, boolean top, boolean bottom) { } public ActionBarMenuSubItem(Context context, boolean needCheck, boolean top, boolean bottom) { - this(context, needCheck, top, bottom, null); + this(context, needCheck ? 1 : 0, top, bottom, null); } public ActionBarMenuSubItem(Context context, boolean top, boolean bottom, Theme.ResourcesProvider resourcesProvider) { - this(context, false, top, bottom, resourcesProvider); + this(context, 0, top, bottom, resourcesProvider); } public ActionBarMenuSubItem(Context context, boolean needCheck, boolean top, boolean bottom, Theme.ResourcesProvider resourcesProvider) { + this(context, needCheck ? 1 : 0, top, bottom, resourcesProvider); + } + + public ActionBarMenuSubItem(Context context, int needCheck, boolean top, boolean bottom, Theme.ResourcesProvider resourcesProvider) { super(context); this.resourcesProvider = resourcesProvider; @@ -78,12 +82,17 @@ public ActionBarMenuSubItem(Context context, boolean needCheck, boolean top, boo textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 16); addView(textView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.CENTER_VERTICAL)); - if (needCheck) { + if (needCheck > 0) { checkView = new CheckBox2(context, 26, resourcesProvider); checkView.setDrawUnchecked(false); checkView.setColor(null, null, Theme.key_radioBackgroundChecked); checkView.setDrawBackgroundAsArc(-1); - addView(checkView, LayoutHelper.createFrame(26, LayoutHelper.MATCH_PARENT, Gravity.CENTER_VERTICAL | (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT))); + if (needCheck == 1) { + addView(checkView, LayoutHelper.createFrame(26, LayoutHelper.MATCH_PARENT, Gravity.CENTER_VERTICAL | (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT))); + } else { + addView(checkView, LayoutHelper.createFrame(26, LayoutHelper.MATCH_PARENT, Gravity.CENTER_VERTICAL | (LocaleController.isRTL ? Gravity.LEFT : Gravity.RIGHT))); + textView.setPadding(LocaleController.isRTL ? AndroidUtilities.dp(34) : 0, 0, LocaleController.isRTL ? 0 : AndroidUtilities.dp(34), 0); + } } } @@ -191,6 +200,10 @@ public void onItemShown() { } } + public void setVisibility(boolean visibility) { + setVisibility(visibility ? View.VISIBLE : View.GONE); + } + public void setText(CharSequence text) { textView.setText(text); } @@ -206,7 +219,7 @@ public void setSubtext(String text) { subtextView.setSingleLine(true); subtextView.setGravity(Gravity.LEFT); subtextView.setEllipsize(TextUtils.TruncateAt.END); - subtextView.setTextColor(0xff7C8286); + subtextView.setTextColor(getThemedColor(Theme.key_groupcreate_sectionText)); subtextView.setVisibility(GONE); subtextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 13); subtextView.setPadding(LocaleController.isRTL ? 0 : AndroidUtilities.dp(43), 0, LocaleController.isRTL ? AndroidUtilities.dp(43) : 0, 0); @@ -248,9 +261,7 @@ public void updateSelectorBackground(boolean top, boolean bottom) { } void updateBackground() { - int topBackgroundRadius = top ? 6 : 0; - int bottomBackgroundRadius = bottom ? 6 : 0; - setBackground(Theme.createRadSelectorDrawable(selectorColor, topBackgroundRadius, bottomBackgroundRadius)); + setBackground(Theme.createRadSelectorDrawable(selectorColor, top ? 6 : 0, bottom ? 6 : 0)); } private int getThemedColor(String key) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarPopupWindow.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarPopupWindow.java index 3a972c438a..8a6408fc76 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarPopupWindow.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarPopupWindow.java @@ -21,7 +21,6 @@ import android.graphics.Rect; import android.graphics.drawable.Drawable; import android.os.Build; -import android.util.Log; import android.view.Gravity; import androidx.annotation.Keep; import androidx.core.view.ViewCompat; @@ -108,7 +107,7 @@ public static class ActionBarPopupWindowLayout extends FrameLayout { private boolean startAnimationPending = false; private int backAlpha = 255; private int lastStartedChild = 0; - private boolean shownFromBottom; + public boolean shownFromBottom; private boolean animationEnabled = allowAnimation; private ArrayList itemAnimators; private HashMap positions = new HashMap<>(); @@ -116,6 +115,7 @@ public static class ActionBarPopupWindowLayout extends FrameLayout { private int gapEndY = -1000000; private Rect bgPaddings = new Rect(); private onSizeChangedListener onSizeChangedListener; + private float reactionsEnterProgress = 1f; private PopupSwipeBackLayout swipeBackLayout; private ScrollView scrollView; @@ -130,6 +130,7 @@ public static class ActionBarPopupWindowLayout extends FrameLayout { protected ActionBarPopupWindow window; public int subtractBackgroundHeight; + Rect rect; public ActionBarPopupWindowLayout(Context context) { this(context, null); @@ -295,16 +296,6 @@ public void setBackScaleX(float value) { } } - public void translateChildrenAfter(int index, float ty) { - subtractBackgroundHeight = (int) -ty; - for (int i = index + 1; i < linearLayout.getChildCount(); ++i) { - View child = linearLayout.getChildAt(i); - if (child != null) { - child.setTranslationY(ty); - } - } - } - @Keep public void setBackScaleY(float value) { if (backScaleY != value) { @@ -442,11 +433,6 @@ protected void dispatchDraw(Canvas canvas) { setTranslationY(yOffset); } } - super.dispatchDraw(canvas); - } - - @Override - protected void onDraw(Canvas canvas) { if (backgroundDrawable != null) { int start = gapStartY - scrollView.getScrollY(); int end = gapEndY - scrollView.getScrollY(); @@ -475,12 +461,12 @@ protected void onDraw(Canvas canvas) { backgroundDrawable.setAlpha(applyAlpha ? backAlpha : 255); if (shownFromBottom) { final int height = getMeasuredHeight(); - backgroundDrawable.setBounds(0, (int) (height * (1.0f - backScaleY)), (int) (getMeasuredWidth() * backScaleX), height); + AndroidUtilities.rectTmp2.set(0, (int) (height * (1.0f - backScaleY)), (int) (getMeasuredWidth() * backScaleX), height); } else { if (start > -AndroidUtilities.dp(16)) { int h = (int) (getMeasuredHeight() * backScaleY); if (a == 0) { - backgroundDrawable.setBounds(0, -scrollView.getScrollY() + (gapStartY != -1000000 ? AndroidUtilities.dp(1) : 0), (int) (getMeasuredWidth() * backScaleX), (gapStartY != -1000000 ? Math.min(h, start + AndroidUtilities.dp(16)) : h) - subtractBackgroundHeight); + AndroidUtilities.rectTmp2.set(0, -scrollView.getScrollY() + (gapStartY != -1000000 ? AndroidUtilities.dp(1) : 0), (int) (getMeasuredWidth() * backScaleX), (gapStartY != -1000000 ? Math.min(h, start + AndroidUtilities.dp(16)) : h) - subtractBackgroundHeight); } else { if (h < end) { if (gapStartY != -1000000) { @@ -488,13 +474,20 @@ protected void onDraw(Canvas canvas) { } continue; } - backgroundDrawable.setBounds(0, end, (int) (getMeasuredWidth() * backScaleX), h - subtractBackgroundHeight); + AndroidUtilities.rectTmp2.set(0, end, (int) (getMeasuredWidth() * backScaleX), h - subtractBackgroundHeight); } } else { - backgroundDrawable.setBounds(0, (gapStartY < 0 ? 0 : -AndroidUtilities.dp(16)), (int) (getMeasuredWidth() * backScaleX), (int) (getMeasuredHeight() * backScaleY) - subtractBackgroundHeight); + AndroidUtilities.rectTmp2.set(0, (gapStartY < 0 ? 0 : -AndroidUtilities.dp(16)), (int) (getMeasuredWidth() * backScaleX), (int) (getMeasuredHeight() * backScaleY) - subtractBackgroundHeight); } } - + if (reactionsEnterProgress != 1f) { + if (rect == null) { + rect = new Rect(); + } + rect.set(AndroidUtilities.rectTmp2.right, AndroidUtilities.rectTmp2.top, AndroidUtilities.rectTmp2.right, AndroidUtilities.rectTmp2.top); + AndroidUtilities.lerp(rect, AndroidUtilities.rectTmp2, reactionsEnterProgress, AndroidUtilities.rectTmp2); + } + backgroundDrawable.setBounds(AndroidUtilities.rectTmp2); backgroundDrawable.draw(canvas); if (hasGap) { canvas.save(); @@ -527,6 +520,15 @@ protected void onDraw(Canvas canvas) { } } } + if (reactionsEnterProgress != 1f) { + canvas.saveLayerAlpha((float) AndroidUtilities.rectTmp2.left, (float) AndroidUtilities.rectTmp2.top, AndroidUtilities.rectTmp2.right, AndroidUtilities.rectTmp2.bottom, (int) (255 * reactionsEnterProgress), Canvas.ALL_SAVE_FLAG); + float scale = 0.5f + reactionsEnterProgress * 0.5f; + canvas.scale(scale, scale, AndroidUtilities.rectTmp2.right, AndroidUtilities.rectTmp2.top); + super.dispatchDraw(canvas); + canvas.restore(); + } else { + super.dispatchDraw(canvas); + } } public Drawable getBackgroundDrawable() { @@ -620,6 +622,11 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { public void setParentWindow(ActionBarPopupWindow popupWindow) { window = popupWindow; } + + public void setReactionsTransitionProgress(float transitionEnterProgress) { + this.reactionsEnterProgress = transitionEnterProgress; + invalidate(); + } } public ActionBarPopupWindow() { @@ -670,6 +677,10 @@ public void setLayoutInScreen(boolean value) { } private void init() { + View contentView = getContentView(); + if (contentView instanceof ActionBarPopupWindowLayout && ((ActionBarPopupWindowLayout) contentView).getSwipeBack() != null) { + ((ActionBarPopupWindowLayout) contentView).getSwipeBack().setOnClickListener(e -> dismiss()); + } if (superListenerField != null) { try { mSuperScrollListener = (ViewTreeObserver.OnScrollChangedListener) superListenerField.get(this); @@ -708,12 +719,16 @@ private void registerListener(View anchor) { } public void dimBehind() { + dimBehind(0.2f); + } + + public void dimBehind(float amount) { View container = getContentView().getRootView(); Context context = getContentView().getContext(); WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE); WindowManager.LayoutParams p = (WindowManager.LayoutParams) container.getLayoutParams(); p.flags |= WindowManager.LayoutParams.FLAG_DIM_BEHIND; - p.dimAmount = 0.2f; + p.dimAmount = amount; wm.updateViewLayout(container, p); } @@ -1037,21 +1052,20 @@ public interface onSizeChangedListener { public static class GapView extends FrameLayout { - Theme.ResourcesProvider resourcesProvider; - String colorKey; - Drawable shadowDrawable; public GapView(Context context, Theme.ResourcesProvider resourcesProvider) { this(context, resourcesProvider, Theme.key_actionBarDefaultSubmenuSeparator); } - public GapView(Context context, Theme.ResourcesProvider resourcesProvider, String colorKey) { + public GapView(Context context, int color, int shadowColor) { super(context); - this.resourcesProvider = resourcesProvider; - this.colorKey = colorKey; - this.shadowDrawable = Theme.getThemedDrawable(getContext(), R.drawable.greydivider, Theme.key_windowBackgroundGrayShadow, resourcesProvider); - setBackgroundColor(Theme.getColor(colorKey, resourcesProvider)); + this.shadowDrawable = Theme.getThemedDrawable(getContext(), R.drawable.greydivider, shadowColor); + setBackgroundColor(color); + } + + public GapView(Context context, Theme.ResourcesProvider resourcesProvider, String colorKey) { + this(context, Theme.getColor(colorKey, resourcesProvider), Theme.getColor(Theme.key_windowBackgroundGrayShadow, resourcesProvider)); } public void setColor(int color) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/AdjustPanLayoutHelper.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/AdjustPanLayoutHelper.java index bdc223030e..0152ce2a5a 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/AdjustPanLayoutHelper.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/AdjustPanLayoutHelper.java @@ -86,10 +86,6 @@ public View getAdjustingContentView() { ViewTreeObserver.OnPreDrawListener onPreDrawListener = new ViewTreeObserver.OnPreDrawListener() { @Override public boolean onPreDraw() { - if (!SharedConfig.smoothKeyboard) { - onDetach(); - return true; - } int contentHeight = parent.getHeight(); if (contentHeight - startOffset() == previousHeight - previousStartOffset || contentHeight == previousHeight || animator != null) { if (animator == null) { @@ -276,13 +272,10 @@ public AdjustPanLayoutHelper(View parent) { public AdjustPanLayoutHelper(View parent, boolean useInsetsAnimator) { this.useInsetsAnimator = useInsetsAnimator; this.parent = parent; - onAttach(); + AndroidUtilities.runOnUIThread(this::onAttach); } public void onAttach() { - if (!SharedConfig.smoothKeyboard) { - return; - } onDetach(); Context context = parent.getContext(); Activity activity = getActivity(context); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/AlertDialog.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/AlertDialog.java index d300ddff63..0bc7d31e57 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/AlertDialog.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/AlertDialog.java @@ -75,7 +75,6 @@ public class AlertDialog extends Dialog implements Drawable.Callback, NotificationCenter.NotificationCenterDelegate { public static final int ALERT_TYPE_MESSAGE = 0; - public static final int ALERT_TYPE_SPINNER_DETAIL = 1; // not used? public static final int ALERT_TYPE_LOADING = 2; public static final int ALERT_TYPE_SPINNER = 3; @@ -94,7 +93,7 @@ public class AlertDialog extends Dialog implements Drawable.Callback, Notificati private BitmapDrawable[] shadow = new BitmapDrawable[2]; private boolean[] shadowVisibility = new boolean[2]; private AnimatorSet[] shadowAnimation = new AnimatorSet[2]; - private int customViewOffset = 20; + private int customViewOffset = 12; private String dialogButtonColorKey = Theme.key_dialogButton; @@ -200,7 +199,7 @@ private boolean supportsNativeBlur() { public void redPositive() { TextView button = (TextView) getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(getThemedColor(Theme.key_dialogTextRed2)); + button.setTextColor(getThemedColor(Theme.key_dialogTextRed)); } } @@ -214,7 +213,7 @@ public AlertDialogCell(Context context, Theme.ResourcesProvider resourcesProvide super(context); this.resourcesProvider = resourcesProvider; - setBackgroundDrawable(Theme.createSelectorDrawable(getThemedColor(Theme.key_dialogButtonSelector), 2)); + setBackground(Theme.createSelectorDrawable(getThemedColor(Theme.key_dialogButtonSelector), 2)); setPadding(AndroidUtilities.dp(23), 0, AndroidUtilities.dp(23), 0); imageView = new ImageView(context); @@ -269,15 +268,17 @@ public AlertDialog(Context context, int progressStyle) { public AlertDialog(Context context, int progressStyle, Theme.ResourcesProvider resourcesProvider) { super(context, R.style.TransparentDialog); - blurredNativeBackground = supportsNativeBlur() && progressViewStyle == ALERT_TYPE_MESSAGE; - blurredBackground = blurredNativeBackground || !supportsNativeBlur() && SharedConfig.getDevicePerformanceClass() >= SharedConfig.PERFORMANCE_CLASS_HIGH; this.resourcesProvider = resourcesProvider; + blurredNativeBackground = supportsNativeBlur() && progressViewStyle == ALERT_TYPE_MESSAGE; + backgroundColor = getThemedColor(Theme.key_dialogBackground); + final boolean isDark = AndroidUtilities.computePerceivedBrightness(backgroundColor) < 0.721f; + blurredBackground = blurredNativeBackground || !supportsNativeBlur() && SharedConfig.getDevicePerformanceClass() >= SharedConfig.PERFORMANCE_CLASS_HIGH && isDark; + backgroundPaddings = new Rect(); if (progressStyle != ALERT_TYPE_SPINNER || blurredBackground) { shadowDrawable = context.getResources().getDrawable(R.drawable.popup_fixed_alert3).mutate(); - backgroundColor = getThemedColor(Theme.key_dialogBackground); - blurOpacity = progressStyle == ALERT_TYPE_SPINNER ? 0.55f : (AndroidUtilities.computePerceivedBrightness(backgroundColor) < 0.721f ? 0.80f : 0.92f); + blurOpacity = progressStyle == ALERT_TYPE_SPINNER ? 0.55f : (isDark ? 0.80f : 0.985f); shadowDrawable.setColorFilter(new PorterDuffColorFilter(backgroundColor, PorterDuff.Mode.MULTIPLY)); shadowDrawable.getPadding(backgroundPaddings); } @@ -380,7 +381,7 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { availableHeight -= topImageView.getMeasuredHeight(); } if (topView != null) { - int w = width - AndroidUtilities.dp(16); + int w = width; int h; if (aspectRatio == 0) { float scale = w / 936.0f; @@ -567,7 +568,7 @@ protected void dispatchDraw(Canvas canvas) { } }; containerView.setOrientation(LinearLayout.VERTICAL); - if (blurredBackground || progressViewStyle == ALERT_TYPE_SPINNER) { + if ((blurredBackground || progressViewStyle == ALERT_TYPE_SPINNER) && progressViewStyle != ALERT_TYPE_LOADING) { containerView.setBackgroundDrawable(null); containerView.setPadding(0, 0, 0, 0); if (blurredBackground && !blurredNativeBackground) { @@ -734,22 +735,7 @@ public void setText(CharSequence text, BufferType type) { messageTextView.setEnabled(false); } messageTextView.setGravity((topAnimationIsNew ? Gravity.CENTER_HORIZONTAL : LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.TOP); - if (progressViewStyle == ALERT_TYPE_SPINNER_DETAIL) { - setCanceledOnTouchOutside(false); - setCancelable(false); - progressViewContainer = new FrameLayout(getContext()); - containerView.addView(progressViewContainer, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, 44, Gravity.LEFT | Gravity.TOP, 23, title == null ? 24 : 0, 23, 24)); - - RadialProgressView progressView = new RadialProgressView(getContext(), resourcesProvider); - progressView.setProgressColor(getThemedColor(Theme.key_dialogProgressCircle)); - progressViewContainer.addView(progressView, LayoutHelper.createFrame(44, 44, (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.TOP)); - - messageTextView.setLines(1); - messageTextView.setEllipsize(TextUtils.TruncateAt.END); - progressViewContainer.addView(messageTextView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.CENTER_VERTICAL, (LocaleController.isRTL ? 0 : 62), 0, (LocaleController.isRTL ? 62 : 0), 0)); - backgroundColor = getThemedColor(Theme.key_dialog_inlineProgressBackground); - containerView.setBackgroundDrawable(Theme.createRoundRectDrawable(AndroidUtilities.dp(18), backgroundColor)); - } else if (progressViewStyle == ALERT_TYPE_LOADING) { + if (progressViewStyle == ALERT_TYPE_LOADING) { setCanceledOnTouchOutside(false); setCancelable(false); containerView.addView(messageTextView, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.TOP, 24, title == null ? 19 : 0, 24, 20)); @@ -787,10 +773,6 @@ public void setText(CharSequence text, BufferType type) { } if (!TextUtils.isEmpty(message)) { messageTextView.setText(message); - if (customView != null) { - ViewGroup.MarginLayoutParams params = (ViewGroup.MarginLayoutParams) messageTextView.getLayoutParams(); - params.topMargin = AndroidUtilities.dp(16); - } messageTextView.setVisibility(View.VISIBLE); } else { messageTextView.setVisibility(View.GONE); @@ -1233,10 +1215,6 @@ public void onAnimationCancel(Animator animation) { } } - public void setProgressStyle(int style) { - progressViewStyle = style; - } - public void setDismissDialogByButtons(boolean value) { dismissDialogByButtons = value; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/BaseFragment.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/BaseFragment.java index b65d86a178..41367615f2 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/BaseFragment.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/BaseFragment.java @@ -309,15 +309,20 @@ public void setFinishing(boolean finishing) { this.finishing = finishing; } - public void finishFragment(boolean animated) { + public boolean finishFragment(boolean animated) { if (isFinished || parentLayout == null) { - return; + return false; } finishing = true; parentLayout.closeLastFragment(animated); + return true; } public void removeSelfFromStack() { + removeSelfFromStack(false); + } + + public void removeSelfFromStack(boolean immediate) { if (isFinished || parentLayout == null) { return; } @@ -325,7 +330,11 @@ public void removeSelfFromStack() { parentDialog.dismiss(); return; } - parentLayout.removeFragmentFromStack(this); + parentLayout.removeFragmentFromStack(this, immediate); + } + + public boolean allowFinishFragmentInsteadOfRemoveFromStack() { + return true; } protected boolean isFinishing() { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/BottomSheet.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/BottomSheet.java index 583c005666..482fce1b1a 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/BottomSheet.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/BottomSheet.java @@ -82,6 +82,7 @@ public class BottomSheet extends Dialog { public boolean drawNavigationBar; public boolean drawDoubleNavigationBar; public boolean scrollNavBar; + protected boolean waitingKeyboard; protected boolean useSmoothKeyboard; @@ -173,13 +174,14 @@ public void setAlpha(int alpha) { private OnDismissListener onHideListener; protected Theme.ResourcesProvider resourcesProvider; protected boolean isPortrait; + public boolean pauseAllHeavyOperations = true; public void setDisableScroll(boolean b) { disableScroll = b; } private ValueAnimator keyboardContentAnimator; - protected boolean smoothKeyboardAnimationEnabled; + public boolean smoothKeyboardAnimationEnabled; private boolean openNoDelay; private float hideSystemVerticalInsetsProgress; @@ -470,11 +472,6 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { } } - @Override - public void requestLayout() { - super.requestLayout(); - } - @Override protected void onLayout(boolean changed, int left, int top, int right, int bottom) { layoutCount--; @@ -570,11 +567,18 @@ public void onAnimationEnd(Animator animation) { child.layout(childLeft, childTop, childLeft + width, childTop + height); } } - if (layoutCount == 0 && startAnimationRunnable != null) { + if (layoutCount == 0 && startAnimationRunnable != null && !waitingKeyboard) { AndroidUtilities.cancelRunOnUIThread(startAnimationRunnable); startAnimationRunnable.run(); startAnimationRunnable = null; } + if (waitingKeyboard && keyboardVisible) { + if (startAnimationRunnable != null) { + AndroidUtilities.cancelRunOnUIThread(startAnimationRunnable); + startAnimationRunnable.run(); + } + waitingKeyboard = false; + } keyboardChanged = false; } @@ -743,7 +747,7 @@ public void setHideSystemVerticalInsets(boolean hideSystemVerticalInsets) { @RequiresApi(api = Build.VERSION_CODES.Q) private int getAdditionalMandatoryOffsets() { - if (!calcMandatoryInsets) { + if (!calcMandatoryInsets || lastInsets == null) { return 0; } Insets insets = lastInsets.getSystemGestureInsets(); @@ -928,7 +932,7 @@ public BottomSheet(Context context, boolean needFocus, Theme.ResourcesProvider r touchSlop = vc.getScaledTouchSlop(); Rect padding = new Rect(); - shadowDrawable = context.getResources().getDrawable(R.drawable.sheet_shadow).mutate(); + shadowDrawable = context.getResources().getDrawable(R.drawable.sheet_shadow_round).mutate(); shadowDrawable.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_dialogBackground), PorterDuff.Mode.SRC_IN)); shadowDrawable.getPadding(padding); backgroundPaddingLeft = padding.left; @@ -1104,7 +1108,8 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { containerView.setClipChildren(false); container.setClipToPadding(false); container.setClipChildren(false); - containerView.addView(customView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, customViewGravity, 0, -backgroundPaddingTop + topOffset, 0, 0)); + containerView.addView(customView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, customViewGravity, 0, topOffset, 0, 0)); + ((ViewGroup.MarginLayoutParams) customView.getLayoutParams()).topMargin = -backgroundPaddingTop + AndroidUtilities.dp(topOffset); } else { containerView.addView(customView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, customViewGravity, 0, topOffset, 0, 0)); } @@ -1219,6 +1224,10 @@ public void show() { if (Build.VERSION.SDK_INT >= 18) { layoutCount = 2; containerView.setTranslationY((Build.VERSION.SDK_INT >= 21 ? AndroidUtilities.statusBarHeight * (1f - hideSystemVerticalInsetsProgress) : 0) + containerView.getMeasuredHeight() + (scrollNavBar ? getBottomInset() : 0)); + long delay = openNoDelay ? 0 : 150; + if (waitingKeyboard) { + delay = 500; + } AndroidUtilities.runOnUIThread(startAnimationRunnable = new Runnable() { @Override public void run() { @@ -1228,7 +1237,7 @@ public void run() { startAnimationRunnable = null; startOpenAnimation(); } - }, openNoDelay ? 0 : 150); + }, delay); } else { startOpenAnimation(); } @@ -1239,7 +1248,7 @@ public ColorDrawable getBackDrawable() { } public int getBackgroundPaddingTop() { - return backgroundPaddingTop; + return backgroundPaddingTop ; } public void setAllowDrawContent(boolean value) { @@ -1359,6 +1368,7 @@ private void startOpenAnimation() { navigationBarAnimation ); currentSheetAnimation.setDuration(400); + currentSheetAnimation.setStartDelay(waitingKeyboard ? 0 : 20); currentSheetAnimation.setInterpolator(openInterpolator); currentSheetAnimation.addListener(new AnimatorListenerAdapter() { @Override @@ -1379,7 +1389,9 @@ public void onAnimationEnd(Animator animation) { getWindow().setAttributes(params); } } - NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.startAllHeavyOperations, 512); + if (pauseAllHeavyOperations) { + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.startAllHeavyOperations, 512); + } } @Override @@ -1390,7 +1402,9 @@ public void onAnimationCancel(Animator animation) { } } }); - NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.stopAllHeavyOperations, 512); + if (pauseAllHeavyOperations) { + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.stopAllHeavyOperations, 512); + } currentSheetAnimation.start(); } } @@ -1540,7 +1554,6 @@ public void dismiss() { onDismissAnimationStart(); if (!allowCustomAnimation || !onCustomCloseAnimation()) { currentSheetAnimationType = 2; - currentSheetAnimation = new AnimatorSet(); if (navigationBarAnimation != null) { navigationBarAnimation.cancel(); } @@ -1551,11 +1564,14 @@ public void dismiss() { container.invalidate(); } }); - currentSheetAnimation.playTogether( - containerView == null ? null : ObjectAnimator.ofFloat(containerView, View.TRANSLATION_Y, getContainerViewHeight() + container.keyboardHeight + AndroidUtilities.dp(10) + (scrollNavBar ? getBottomInset() : 0)), - ObjectAnimator.ofInt(backDrawable, AnimationProperties.COLOR_DRAWABLE_ALPHA, 0), - navigationBarAnimation - ); + currentSheetAnimation = new AnimatorSet(); + ArrayList animators = new ArrayList<>(); + if (containerView != null) { + animators.add(ObjectAnimator.ofFloat(containerView, View.TRANSLATION_Y, getContainerViewHeight() + container.keyboardHeight + AndroidUtilities.dp(10) + (scrollNavBar ? getBottomInset() : 0))); + } + animators.add(ObjectAnimator.ofInt(backDrawable, AnimationProperties.COLOR_DRAWABLE_ALPHA, 0)); + animators.add(navigationBarAnimation); + currentSheetAnimation.playTogether(animators); // if (useFastDismiss) { // int height = containerView.getMeasuredHeight(); // duration = Math.max(60, (int) (250 * (height - containerView.getTranslationY()) / (float) height)); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/DrawerLayoutContainer.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/DrawerLayoutContainer.java index e011b5da9f..4657eacad2 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/DrawerLayoutContainer.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/DrawerLayoutContainer.java @@ -40,6 +40,8 @@ import androidx.annotation.NonNull; import androidx.annotation.Nullable; +//import com.google.android.gms.vision.Frame; + import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.BuildVars; import org.telegram.messenger.FileLog; @@ -50,7 +52,7 @@ public class DrawerLayoutContainer extends FrameLayout { private static final int MIN_DRAWER_MARGIN = 64; - private ViewGroup drawerLayout; + private FrameLayout drawerLayout; private INavigationLayout parentActionBarLayout; private boolean maybeStartTracking; @@ -174,7 +176,7 @@ private int getTopInset(Object insets) { return 0; } - public void setDrawerLayout(ViewGroup layout) { + public void setDrawerLayout(FrameLayout layout) { drawerLayout = layout; addView(drawerLayout); drawerLayout.setVisibility(INVISIBLE); @@ -303,7 +305,7 @@ private float getScrimOpacity() { return scrimOpacity; } - public View getDrawerLayout() { + public FrameLayout getDrawerLayout() { return drawerLayout; } @@ -318,6 +320,10 @@ public void presentFragment(BaseFragment fragment) { closeDrawer(false); } + public INavigationLayout getParentActionBarLayout() { + return parentActionBarLayout; + } + public void openStatusSelect() { } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/EmojiThemes.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/EmojiThemes.java index 9707e24d61..fe692f2566 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/EmojiThemes.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/EmojiThemes.java @@ -368,6 +368,7 @@ public void loadWallpaper(int index, ResultCallback> callback } ImageLocation imageLocation = ImageLocation.getForDocument(wallPaper.document); ImageReceiver imageReceiver = new ImageReceiver(); + imageReceiver.setAllowLoadingOnAttachedOnly(false); String imageFilter; int w = Math.min(AndroidUtilities.displaySize.x, AndroidUtilities.displaySize.y); @@ -428,6 +429,7 @@ public void loadWallpaperThumb(int index, ResultCallback> cal final TLRPC.PhotoSize thumbSize = FileLoader.getClosestPhotoSizeWithSize(wallpaper.document.thumbs, 140); ImageLocation imageLocation = ImageLocation.getForDocument(thumbSize, wallpaper.document); ImageReceiver imageReceiver = new ImageReceiver(); + imageReceiver.setAllowLoadingOnAttachedOnly(false); imageReceiver.setImage(imageLocation, "120_140", null, null, null, 1); imageReceiver.setDelegate((receiver, set, thumb, memCache) -> { ImageReceiver.BitmapHolder holder = receiver.getBitmapSafe(); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/INavigationLayout.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/INavigationLayout.java index b38ba16169..797f227df6 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/INavigationLayout.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/INavigationLayout.java @@ -8,10 +8,9 @@ import android.view.Menu; import android.view.View; import android.view.ViewGroup; +import android.widget.FrameLayout; -import org.telegram.messenger.SharedConfig; import org.telegram.ui.Components.BackButtonMenu; -import org.telegram.ui.LNavigation.LNavigation; import java.util.ArrayList; import java.util.HashMap; @@ -20,10 +19,12 @@ public interface INavigationLayout { int REBUILD_FLAG_REBUILD_LAST = 1, REBUILD_FLAG_REBUILD_ONLY_LAST = 2; + int FORCE_NOT_ATTACH_VIEW = -2; + boolean presentFragment(NavigationParams params); boolean checkTransitionAnimation(); boolean addFragmentToStack(BaseFragment fragment, int position); - void removeFragmentFromStack(BaseFragment fragment); + void removeFragmentFromStack(BaseFragment fragment, boolean immediate); List getFragmentStack(); void setDelegate(INavigationLayoutDelegate INavigationLayoutDelegate); void closeLastFragment(boolean animated, boolean forceNoAnimation); @@ -47,7 +48,7 @@ public interface INavigationLayout { void expandPreviewFragment(); void finishPreviewFragment(); void setFragmentPanTranslationOffset(int offset); - ViewGroup getOverlayContainerView(); + FrameLayout getOverlayContainerView(); void setHighlightActionButtons(boolean highlight); float getCurrentPreviewFragmentAlpha(); void drawCurrentPreviewFragment(Canvas canvas, Drawable foregroundDrawable); @@ -75,9 +76,12 @@ public interface INavigationLayout { void setPulledDialogs(List pulledDialogs); static INavigationLayout newLayout(Context context) { - return SharedConfig.useLNavigation ? new LNavigation(context) : new ActionBarLayout(context); + return new ActionBarLayout(context); } + default void removeFragmentFromStack(BaseFragment fragment) { + removeFragmentFromStack(fragment, false); + } default boolean isActionBarInCrossfade() { return false; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/SimpleTextView.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/SimpleTextView.java index 02524ba67e..5e4ac1cc34 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/SimpleTextView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/SimpleTextView.java @@ -36,6 +36,8 @@ import android.view.accessibility.AccessibilityNodeInfo; import android.widget.TextView; +import androidx.annotation.NonNull; + import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.Emoji; import org.telegram.messenger.UserObject; @@ -100,6 +102,7 @@ public class SimpleTextView extends View implements Drawable.Callback { private boolean rightDrawableOutside; private boolean ellipsizeByGradient, ellipsizeByGradientLeft; + private Boolean forceEllipsizeByGradientLeft; private int ellipsizeByGradientWidthDp = 16; private int paddingRight; @@ -197,15 +200,24 @@ public void setScrollNonFitText(boolean value) { } public void setEllipsizeByGradient(boolean value) { + setEllipsizeByGradient(value, null); + } + + public void setEllipsizeByGradient(int value) { + setEllipsizeByGradient(value, null); + } + + public void setEllipsizeByGradient(boolean value, Boolean forceLeft) { if (scrollNonFitText == value) { return; } ellipsizeByGradient = value; + this.forceEllipsizeByGradientLeft = forceLeft; updateFadePaints(); } - public void setEllipsizeByGradient(int value) { - setEllipsizeByGradient(true); + public void setEllipsizeByGradient(int value, Boolean forceLeft) { + setEllipsizeByGradient(true, forceLeft); ellipsizeByGradientWidthDp = value; updateFadePaints(); } @@ -224,7 +236,12 @@ private void updateFadePaints() { fadePaintBack.setShader(new LinearGradient(0, 0, AndroidUtilities.dp(6), 0, new int[]{0, 0xffffffff}, new float[]{0f, 1f}, Shader.TileMode.CLAMP)); fadePaintBack.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.DST_OUT)); } - boolean ellipsizeLeft = getAlignment() == Layout.Alignment.ALIGN_NORMAL && LocaleController.isRTL || getAlignment() == Layout.Alignment.ALIGN_OPPOSITE && !LocaleController.isRTL; + boolean ellipsizeLeft; + if (forceEllipsizeByGradientLeft != null) { + ellipsizeLeft = forceEllipsizeByGradientLeft; + } else { + ellipsizeLeft = getAlignment() == Layout.Alignment.ALIGN_NORMAL && LocaleController.isRTL || getAlignment() == Layout.Alignment.ALIGN_OPPOSITE && !LocaleController.isRTL; + } if ((fadeEllpsizePaint == null || fadeEllpsizePaintWidth != AndroidUtilities.dp(ellipsizeByGradientWidthDp) || ellipsizeByGradientLeft != ellipsizeLeft) && ellipsizeByGradient) { if (fadeEllpsizePaint == null) { fadeEllpsizePaint = new Paint(); @@ -529,6 +546,11 @@ public void setLeftDrawable(Drawable drawable) { } } + @Override + protected boolean verifyDrawable(@NonNull Drawable who) { + return who == rightDrawable || who == leftDrawable || super.verifyDrawable(who); + } + public void replaceTextWithDrawable(Drawable drawable, String replacedText) { if (replacedDrawable == drawable) { return; @@ -897,7 +919,7 @@ protected void onDraw(Canvas canvas) { } if (rightDrawable != null && rightDrawableOutside) { - int x = Math.min(textOffsetX + textWidth + drawablePadding + (scrollingOffset == 0 ? -nextScrollX : (int) -scrollingOffset) + nextScrollX, getMaxTextWidth() - paddingRight + drawablePadding - AndroidUtilities.dp(4)); + int x = Math.min(textOffsetX + textWidth + drawablePadding + (scrollingOffset == 0 ? -nextScrollX : (int) -scrollingOffset) + nextScrollX, getMaxTextWidth() - paddingRight + drawablePadding); int dw = (int) (rightDrawable.getIntrinsicWidth() * rightDrawableScale); int dh = (int) (rightDrawable.getIntrinsicHeight() * rightDrawableScale); int y; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/Theme.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/Theme.java index b1bd87e1c4..32c0481804 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/Theme.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/Theme.java @@ -9,6 +9,7 @@ package org.telegram.ui.ActionBar; import static org.telegram.messenger.AndroidUtilities.dp; +import static org.telegram.messenger.AndroidUtilities.dpf2; import android.annotation.SuppressLint; import android.annotation.TargetApi; @@ -66,6 +67,8 @@ import android.util.SparseIntArray; import android.util.StateSet; import android.view.View; +import android.widget.FrameLayout; +import android.widget.Toast; import androidx.annotation.NonNull; import androidx.annotation.Nullable; @@ -82,6 +85,7 @@ import org.telegram.messenger.FileLoader; import org.telegram.messenger.FileLog; import org.telegram.messenger.ImageLocation; +import org.telegram.messenger.LiteMode; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MediaController; import org.telegram.messenger.MediaDataController; @@ -100,6 +104,8 @@ import org.telegram.tgnet.TLRPC; import org.telegram.ui.Components.AudioVisualizerDrawable; import org.telegram.ui.Components.BackgroundGradientDrawable; +import org.telegram.ui.Components.Bulletin; +import org.telegram.ui.Components.BulletinFactory; import org.telegram.ui.Components.ChatThemeBottomSheet; import org.telegram.ui.Components.ChoosingStickerStatusDrawable; import org.telegram.ui.Components.CombinedDrawable; @@ -119,6 +125,8 @@ import org.telegram.ui.Components.ThemeEditorView; import org.telegram.ui.Components.TypingDotsDrawable; import org.telegram.ui.RoundVideoProgressShadow; +import org.telegram.ui.ThemeActivity; +import org.telegram.messenger.support.SparseLongArray; import java.io.File; import java.io.FileInputStream; @@ -150,6 +158,11 @@ public class Theme { public static final int MSG_OUT_COLOR_BLACK = 0xff212121; public static final int MSG_OUT_COLOR_WHITE = 0xffffffff; + public static final int default_shadow_color = ColorUtils.setAlphaComponent(Color.BLACK, 27); + + public static void applyDefaultShadow(Paint paint) { + paint.setShadowLayer(dpf2(1), 0, dpf2(0.33f), default_shadow_color); + } public static class BackgroundDrawableSettings { @@ -757,6 +770,10 @@ public void draw(Canvas canvas, Paint paintToUse) { } if (invalidatePath || overrideRoundRadius != 0) { path.rewind(); + int heightHalf = (bounds.height() - padding) >> 1; + if (rad > heightHalf) { + rad = heightHalf; + } if (isOut) { if (drawFullBubble || currentType == TYPE_PREVIEW || paintToUse != null || drawFullBottom) { if (currentType == TYPE_MEDIA) { @@ -1389,7 +1406,6 @@ public boolean fillAccentColors(HashMap currentColorsNoAccent, } myMessagesAccent = getAccentColor(hsvTemp1, color, firstColor); } - boolean changeMyMessagesColors = (myMessagesAccent != 0 && (parentTheme.accentBaseColor != 0 && myMessagesAccent != parentTheme.accentBaseColor || accentColor != 0 && accentColor != myMessagesAccent)); if (changeMyMessagesColors || accentColor2 != 0) { if (accentColor2 != 0) { @@ -1490,7 +1506,6 @@ public boolean fillAccentColors(HashMap currentColorsNoAccent, currentColors.put(key_chat_outInstant, textColor); currentColors.put(key_chat_outInstantSelected, textColor); currentColors.put(key_chat_outPreviewInstantText, textColor); - currentColors.put(key_chat_outPreviewInstantSelectedText, textColor); currentColors.put(key_chat_outViews, textColor); currentColors.put(key_chat_outViewsSelected, textColor); @@ -3037,7 +3052,6 @@ public void run() { public static Paint chat_docBackPaint; public static Paint chat_deleteProgressPaint; - public static Paint chat_botProgressPaint; public static Paint chat_urlPaint; public static Paint chat_outUrlPaint; public static Paint chat_textSearchSelectionPaint; @@ -3190,8 +3204,7 @@ public void run() { public static RLottieDrawable[] chat_attachButtonDrawables = new RLottieDrawable[6]; public static Drawable[] chat_locationDrawable = new Drawable[2]; public static Drawable[] chat_contactDrawable = new Drawable[2]; - public static Drawable[][] chat_fileStatesDrawable = new Drawable[10][2]; - public static Drawable[][] chat_photoStatesDrawables = new Drawable[13][2]; + public static Drawable[][] chat_fileStatesDrawable = new Drawable[5][2]; public static Drawable calllog_msgCallUpRedDrawable; public static Drawable calllog_msgCallUpGreenDrawable; @@ -3212,10 +3225,8 @@ public void run() { public static final String key_dialogTextLink = "dialogTextLink"; public static final String key_dialogLinkSelection = "dialogLinkSelection"; public static final String key_dialogTextRed = "dialogTextRed"; - public static final String key_dialogTextRed2 = "dialogTextRed2"; public static final String key_dialogTextBlue = "dialogTextBlue"; public static final String key_dialogTextBlue2 = "dialogTextBlue2"; - public static final String key_dialogTextBlue3 = "dialogTextBlue3"; public static final String key_dialogTextBlue4 = "dialogTextBlue4"; public static final String key_dialogTextGray = "dialogTextGray"; public static final String key_dialogTextGray2 = "dialogTextGray2"; @@ -3231,11 +3242,8 @@ public void run() { public static final String key_dialogScrollGlow = "dialogScrollGlow"; public static final String key_dialogRoundCheckBox = "dialogRoundCheckBox"; public static final String key_dialogRoundCheckBoxCheck = "dialogRoundCheckBoxCheck"; - public static final String key_dialogBadgeBackground = "dialogBadgeBackground"; - public static final String key_dialogBadgeText = "dialogBadgeText"; public static final String key_dialogRadioBackground = "dialogRadioBackground"; public static final String key_dialogRadioBackgroundChecked = "dialogRadioBackgroundChecked"; - public static final String key_dialogProgressCircle = "dialogProgressCircle"; public static final String key_dialogLineProgress = "dialogLineProgress"; public static final String key_dialogLineProgressBackground = "dialogLineProgressBackground"; public static final String key_dialogButton = "dialogButton"; @@ -3285,8 +3293,6 @@ public void run() { public static final String key_windowBackgroundWhiteRedText3 = "windowBackgroundWhiteRedText3"; public static final String key_windowBackgroundWhiteRedText4 = "windowBackgroundWhiteRedText4"; public static final String key_windowBackgroundWhiteRedText5 = "windowBackgroundWhiteRedText5"; - public static final String key_windowBackgroundWhiteRedText6 = "windowBackgroundWhiteRedText6"; - public static final String key_windowBackgroundWhiteYellowText = "windowBackgroundWhiteYellowText"; public static final String key_windowBackgroundWhiteGrayText = "windowBackgroundWhiteGrayText"; public static final String key_windowBackgroundWhiteGrayText2 = "windowBackgroundWhiteGrayText2"; public static final String key_windowBackgroundWhiteGrayText3 = "windowBackgroundWhiteGrayText3"; @@ -3295,7 +3301,6 @@ public void run() { public static final String key_windowBackgroundWhiteGrayText6 = "windowBackgroundWhiteGrayText6"; public static final String key_windowBackgroundWhiteGrayText7 = "windowBackgroundWhiteGrayText7"; public static final String key_windowBackgroundWhiteGrayText8 = "windowBackgroundWhiteGrayText8"; - public static final String key_windowBackgroundWhiteGrayLine = "windowBackgroundWhiteGrayLine"; public static final String key_windowBackgroundWhiteBlackText = "windowBackgroundWhiteBlackText"; public static final String key_windowBackgroundWhiteHintText = "windowBackgroundWhiteHintText"; public static final String key_windowBackgroundWhiteValueText = "windowBackgroundWhiteValueText"; @@ -3392,7 +3397,6 @@ public void run() { public static final String key_actionBarDefaultSelector = "actionBarDefaultSelector"; public static final String key_actionBarWhiteSelector = "actionBarWhiteSelector"; public static final String key_actionBarDefaultIcon = "actionBarDefaultIcon"; - public static final String key_actionBarTipBackground = "actionBarTipBackground"; public static final String key_actionBarActionModeDefault = "actionBarActionModeDefault"; public static final String key_actionBarActionModeDefaultTop = "actionBarActionModeDefaultTop"; public static final String key_actionBarActionModeDefaultIcon = "actionBarActionModeDefaultIcon"; @@ -3426,7 +3430,6 @@ public void run() { public static final String key_chats_nameArchived = "chats_nameArchived"; public static final String key_chats_secretName = "chats_secretName"; public static final String key_chats_secretIcon = "chats_secretIcon"; - public static final String key_chats_nameIcon = "chats_nameIcon"; public static final String key_chats_pinnedIcon = "chats_pinnedIcon"; public static final String key_chats_archiveBackground = "chats_archiveBackground"; public static final String key_chats_archivePinBackground = "chats_archivePinBackground"; @@ -3465,21 +3468,14 @@ public void run() { public static final String key_chats_menuPhoneCats = "chats_menuPhoneCats"; public static final String key_chats_menuTopBackgroundCats = "chats_menuTopBackgroundCats"; public static final String key_chats_menuTopBackground = "chats_menuTopBackground"; - public static final String key_chats_menuCloud = "chats_menuCloud"; - public static final String key_chats_menuCloudBackgroundCats = "chats_menuCloudBackgroundCats"; public static final String key_chats_actionIcon = "chats_actionIcon"; public static final String key_chats_actionBackground = "chats_actionBackground"; public static final String key_chats_actionPressedBackground = "chats_actionPressedBackground"; - public static final String key_chats_actionUnreadIcon = "chats_actionUnreadIcon"; - public static final String key_chats_actionUnreadBackground = "chats_actionUnreadBackground"; - public static final String key_chats_actionUnreadPressedBackground = "chats_actionUnreadPressedBackground"; public static final String key_chats_archivePullDownBackground = "chats_archivePullDownBackground"; public static final String key_chats_archivePullDownBackgroundActive = "chats_archivePullDownBackgroundActive"; public static final String key_chats_tabUnreadActiveBackground = "chats_tabUnreadActiveBackground"; public static final String key_chats_tabUnreadUnactiveBackground = "chats_tabUnreadUnactiveBackground"; - public static final String key_chat_attachMediaBanBackground = "chat_attachMediaBanBackground"; - public static final String key_chat_attachMediaBanText = "chat_attachMediaBanText"; public static final String key_chat_attachCheckBoxCheck = "chat_attachCheckBoxCheck"; public static final String key_chat_attachCheckBoxBackground = "chat_attachCheckBoxBackground"; public static final String key_chat_attachPhotoBackground = "chat_attachPhotoBackground"; @@ -3495,23 +3491,18 @@ public void run() { public static final String key_chat_inPollWrongAnswer = "chat_inPollWrongAnswer"; public static final String key_chat_outPollWrongAnswer = "chat_outPollWrongAnswer"; + public static final String key_chat_attachIcon = "chat_attachIcon"; public static final String key_chat_attachGalleryBackground = "chat_attachGalleryBackground"; - public static final String key_chat_attachGalleryIcon = "chat_attachGalleryIcon"; public static final String key_chat_attachGalleryText = "chat_attachGalleryText"; public static final String key_chat_attachAudioBackground = "chat_attachAudioBackground"; - public static final String key_chat_attachAudioIcon = "chat_attachAudioIcon"; public static final String key_chat_attachAudioText = "chat_attachAudioText"; public static final String key_chat_attachFileBackground = "chat_attachFileBackground"; - public static final String key_chat_attachFileIcon = "chat_attachFileIcon"; public static final String key_chat_attachFileText = "chat_attachFileText"; public static final String key_chat_attachContactBackground = "chat_attachContactBackground"; - public static final String key_chat_attachContactIcon = "chat_attachContactIcon"; public static final String key_chat_attachContactText = "chat_attachContactText"; public static final String key_chat_attachLocationBackground = "chat_attachLocationBackground"; - public static final String key_chat_attachLocationIcon = "chat_attachLocationIcon"; public static final String key_chat_attachLocationText = "chat_attachLocationText"; public static final String key_chat_attachPollBackground = "chat_attachPollBackground"; - public static final String key_chat_attachPollIcon = "chat_attachPollIcon"; public static final String key_chat_attachPollText = "chat_attachPollText"; public static final String key_chat_status = "chat_status"; @@ -3579,12 +3570,9 @@ public void run() { public static final String key_chat_previewGameText = "chat_previewGameText"; public static final String key_chat_inPreviewInstantText = "chat_inPreviewInstantText"; public static final String key_chat_outPreviewInstantText = "chat_outPreviewInstantText"; - public static final String key_chat_inPreviewInstantSelectedText = "chat_inPreviewInstantSelectedText"; - public static final String key_chat_outPreviewInstantSelectedText = "chat_outPreviewInstantSelectedText"; public static final String key_chat_secretTimeText = "chat_secretTimeText"; public static final String key_chat_stickerNameText = "chat_stickerNameText"; public static final String key_chat_botButtonText = "chat_botButtonText"; - public static final String key_chat_botProgress = "chat_botProgress"; public static final String key_chat_inForwardedNameText = "chat_inForwardedNameText"; public static final String key_chat_outForwardedNameText = "chat_outForwardedNameText"; public static final String key_chat_inPsaNameText = "chat_inPsaNameText"; @@ -3690,7 +3678,6 @@ public void run() { public static final String key_chat_messagePanelVoiceLock = "key_chat_messagePanelVoiceLock"; public static final String key_chat_messagePanelVoiceLockBackground = "key_chat_messagePanelVoiceLockBackground"; public static final String key_chat_messagePanelVoiceLockShadow = "key_chat_messagePanelVoiceLockShadow"; - public static final String key_chat_messagePanelVideoFrame = "chat_messagePanelVideoFrame"; public static final String key_chat_topPanelBackground = "chat_topPanelBackground"; public static final String key_chat_topPanelClose = "chat_topPanelClose"; public static final String key_chat_topPanelLine = "chat_topPanelLine"; @@ -3703,33 +3690,20 @@ public void run() { public static final String key_chat_outLoader = "chat_outLoader"; public static final String key_chat_outLoaderSelected = "chat_outLoaderSelected"; public static final String key_chat_inLoaderPhoto = "chat_inLoaderPhoto"; - public static final String key_chat_inLoaderPhotoSelected = "chat_inLoaderPhotoSelected"; - public static final String key_chat_inLoaderPhotoIcon = "chat_inLoaderPhotoIcon"; - public static final String key_chat_inLoaderPhotoIconSelected = "chat_inLoaderPhotoIconSelected"; - public static final String key_chat_outLoaderPhoto = "chat_outLoaderPhoto"; - public static final String key_chat_outLoaderPhotoSelected = "chat_outLoaderPhotoSelected"; - public static final String key_chat_outLoaderPhotoIcon = "chat_outLoaderPhotoIcon"; - public static final String key_chat_outLoaderPhotoIconSelected = "chat_outLoaderPhotoIconSelected"; public static final String key_chat_mediaLoaderPhoto = "chat_mediaLoaderPhoto"; public static final String key_chat_mediaLoaderPhotoSelected = "chat_mediaLoaderPhotoSelected"; public static final String key_chat_mediaLoaderPhotoIcon = "chat_mediaLoaderPhotoIcon"; public static final String key_chat_mediaLoaderPhotoIconSelected = "chat_mediaLoaderPhotoIconSelected"; public static final String key_chat_inLocationBackground = "chat_inLocationBackground"; public static final String key_chat_inLocationIcon = "chat_inLocationIcon"; - public static final String key_chat_outLocationBackground = "chat_outLocationBackground"; public static final String key_chat_outLocationIcon = "chat_outLocationIcon"; public static final String key_chat_inContactBackground = "chat_inContactBackground"; public static final String key_chat_inContactIcon = "chat_inContactIcon"; public static final String key_chat_outContactBackground = "chat_outContactBackground"; public static final String key_chat_outContactIcon = "chat_outContactIcon"; - public static final String key_chat_inFileIcon = "chat_inFileIcon"; - public static final String key_chat_inFileSelectedIcon = "chat_inFileSelectedIcon"; - public static final String key_chat_outFileIcon = "chat_outFileIcon"; - public static final String key_chat_outFileSelectedIcon = "chat_outFileSelectedIcon"; public static final String key_chat_replyPanelIcons = "chat_replyPanelIcons"; public static final String key_chat_replyPanelClose = "chat_replyPanelClose"; public static final String key_chat_replyPanelName = "chat_replyPanelName"; - public static final String key_chat_replyPanelMessage = "chat_replyPanelMessage"; public static final String key_chat_replyPanelLine = "chat_replyPanelLine"; public static final String key_chat_searchPanelIcons = "chat_searchPanelIcons"; public static final String key_chat_searchPanelText = "chat_searchPanelText"; @@ -3742,21 +3716,16 @@ public void run() { public static final String key_chat_unreadMessagesStartBackground = "chat_unreadMessagesStartBackground"; public static final String key_chat_inlineResultIcon = "chat_inlineResultIcon"; public static final String key_chat_emojiPanelBackground = "chat_emojiPanelBackground"; - public static final String key_chat_emojiPanelBadgeBackground = "chat_emojiPanelBadgeBackground"; - public static final String key_chat_emojiPanelBadgeText = "chat_emojiPanelBadgeText"; public static final String key_chat_emojiSearchBackground = "chat_emojiSearchBackground"; public static final String key_chat_emojiSearchIcon = "chat_emojiSearchIcon"; public static final String key_chat_emojiPanelShadowLine = "chat_emojiPanelShadowLine"; public static final String key_chat_emojiPanelEmptyText = "chat_emojiPanelEmptyText"; public static final String key_chat_emojiPanelIcon = "chat_emojiPanelIcon"; - public static final String key_chat_emojiPanelIconSelector = "chat_emojiPanelIconSelector"; public static final String key_chat_emojiBottomPanelIcon = "chat_emojiBottomPanelIcon"; public static final String key_chat_emojiPanelIconSelected = "chat_emojiPanelIconSelected"; public static final String key_chat_emojiPanelStickerPackSelector = "chat_emojiPanelStickerPackSelector"; public static final String key_chat_emojiPanelStickerPackSelectorLine = "chat_emojiPanelStickerPackSelectorLine"; public static final String key_chat_emojiPanelBackspace = "chat_emojiPanelBackspace"; - public static final String key_chat_emojiPanelMasksIcon = "chat_emojiPanelMasksIcon"; - public static final String key_chat_emojiPanelMasksIconSelected = "chat_emojiPanelMasksIconSelected"; public static final String key_chat_emojiPanelTrendingTitle = "chat_emojiPanelTrendingTitle"; public static final String key_chat_emojiPanelStickerSetName = "chat_emojiPanelStickerSetName"; public static final String key_chat_emojiPanelStickerSetNameHighlight = "chat_emojiPanelStickerSetNameHighlight"; @@ -3781,15 +3750,11 @@ public void run() { public static final String key_chat_gifSaveHintText = "chat_gifSaveHintText"; public static final String key_chat_gifSaveHintBackground = "chat_gifSaveHintBackground"; public static final String key_chat_goDownButton = "chat_goDownButton"; - public static final String key_chat_goDownButtonShadow = "chat_goDownButtonShadow"; public static final String key_chat_goDownButtonIcon = "chat_goDownButtonIcon"; public static final String key_chat_goDownButtonCounter = "chat_goDownButtonCounter"; public static final String key_chat_goDownButtonCounterBackground = "chat_goDownButtonCounterBackground"; - public static final String key_chat_secretTimerBackground = "chat_secretTimerBackground"; - public static final String key_chat_secretTimerText = "chat_secretTimerText"; public static final String key_chat_outTextSelectionHighlight = "chat_outTextSelectionHighlight"; public static final String key_chat_inTextSelectionHighlight = "chat_inTextSelectionHighlight"; - public static final String key_chat_recordedVoiceHighlight = "key_chat_recordedVoiceHighlight"; public static final String key_chat_TextSelectionCursor = "chat_TextSelectionCursor"; public static final String key_chat_outTextSelectionCursor = "chat_outTextSelectionCursor"; public static final String key_chat_inBubbleLocationPlaceholder = "chat_inBubbleLocationPlaceholder"; @@ -3799,9 +3764,7 @@ public void run() { public static final String key_voipgroup_listSelector = "voipgroup_listSelector"; public static final String key_voipgroup_inviteMembersBackground = "voipgroup_inviteMembersBackground"; public static final String key_voipgroup_actionBar = "voipgroup_actionBar"; - public static final String key_voipgroup_emptyView = "voipgroup_emptyView"; public static final String key_voipgroup_actionBarItems = "voipgroup_actionBarItems"; - public static final String key_voipgroup_actionBarSubtitle = "voipgroup_actionBarSubtitle"; public static final String key_voipgroup_actionBarItemsSelector = "voipgroup_actionBarItemsSelector"; public static final String key_voipgroup_actionBarUnscrolled = "voipgroup_actionBarUnscrolled"; public static final String key_voipgroup_listViewBackgroundUnscrolled = "voipgroup_listViewBackgroundUnscrolled"; @@ -3834,7 +3797,6 @@ public void run() { public static final String key_voipgroup_disabledButtonActive = "voipgroup_disabledButtonActive"; public static final String key_voipgroup_disabledButtonActiveScrolled = "voipgroup_disabledButtonActiveScrolled"; public static final String key_voipgroup_connectingProgress = "voipgroup_connectingProgress"; - public static final String key_voipgroup_blueText = "voipgroup_blueText"; public static final String key_voipgroup_scrollUp = "voipgroup_scrollUp"; public static final String key_voipgroup_searchPlaceholder = "voipgroup_searchPlaceholder"; public static final String key_voipgroup_searchBackground = "voipgroup_searchBackground"; @@ -3884,7 +3846,6 @@ public void run() { public static final String key_sharedMedia_linkPlaceholder = "sharedMedia_linkPlaceholder"; public static final String key_sharedMedia_linkPlaceholderText = "sharedMedia_linkPlaceholderText"; public static final String key_sharedMedia_photoPlaceholder = "sharedMedia_photoPlaceholder"; - public static final String key_sharedMedia_actionMode = "sharedMedia_actionMode"; public static final String key_featuredStickers_addedIcon = "featuredStickers_addedIcon"; public static final String key_featuredStickers_buttonProgress = "featuredStickers_buttonProgress"; @@ -3897,7 +3858,6 @@ public void run() { public static final String key_stickers_menu = "stickers_menu"; public static final String key_stickers_menuSelector = "stickers_menuSelector"; - public static final String key_changephoneinfo_image = "changephoneinfo_image"; public static final String key_changephoneinfo_image2 = "changephoneinfo_image2"; public static final String key_groupcreate_hintText = "groupcreate_hintText"; @@ -3914,11 +3874,6 @@ public void run() { public static final String key_login_progressInner = "login_progressInner"; public static final String key_login_progressOuter = "login_progressOuter"; - public static final String key_musicPicker_checkbox = "musicPicker_checkbox"; - public static final String key_musicPicker_checkboxCheck = "musicPicker_checkboxCheck"; - public static final String key_musicPicker_buttonBackground = "musicPicker_buttonBackground"; - public static final String key_musicPicker_buttonIcon = "musicPicker_buttonIcon"; - public static final String key_picker_enabledButton = "picker_enabledButton"; public static final String key_picker_disabledButton = "picker_disabledButton"; public static final String key_picker_badge = "picker_badge"; @@ -3955,41 +3910,14 @@ public void run() { public static final String key_sheet_scrollUp = "key_sheet_scrollUp"; public static final String key_sheet_other = "key_sheet_other"; - public static final String key_wallet_blackBackground = "wallet_blackBackground"; - public static final String key_wallet_graySettingsBackground = "wallet_graySettingsBackground"; - public static final String key_wallet_grayBackground = "wallet_grayBackground"; - public static final String key_wallet_whiteBackground = "wallet_whiteBackground"; - public static final String key_wallet_blackBackgroundSelector = "wallet_blackBackgroundSelector"; - public static final String key_wallet_whiteText = "wallet_whiteText"; - public static final String key_wallet_blackText = "wallet_blackText"; - public static final String key_wallet_statusText = "wallet_statusText"; - public static final String key_wallet_grayText = "wallet_grayText"; - public static final String key_wallet_grayText2 = "wallet_grayText2"; - public static final String key_wallet_greenText = "wallet_greenText"; - public static final String key_wallet_redText = "wallet_redText"; - public static final String key_wallet_dateText = "wallet_dateText"; - public static final String key_wallet_commentText = "wallet_commentText"; - public static final String key_wallet_releaseBackground = "wallet_releaseBackground"; - public static final String key_wallet_pullBackground = "wallet_pullBackground"; - public static final String key_wallet_buttonBackground = "wallet_buttonBackground"; - public static final String key_wallet_buttonPressedBackground = "wallet_buttonPressedBackground"; - public static final String key_wallet_buttonText = "wallet_buttonText"; - public static final String key_wallet_addressConfirmBackground = "wallet_addressConfirmBackground"; - //ununsed - public static final String key_chat_outBroadcast = "chat_outBroadcast"; - public static final String key_chat_mediaBroadcast = "chat_mediaBroadcast"; - - public static final String key_player_actionBar = "player_actionBar"; public static final String key_player_actionBarSelector = "player_actionBarSelector"; public static final String key_player_actionBarTitle = "player_actionBarTitle"; - public static final String key_player_actionBarTop = "player_actionBarTop"; public static final String key_player_actionBarSubtitle = "player_actionBarSubtitle"; public static final String key_player_actionBarItems = "player_actionBarItems"; public static final String key_player_background = "player_background"; public static final String key_player_time = "player_time"; public static final String key_player_progressBackground = "player_progressBackground"; - public static final String key_player_progressBackground2 = "player_progressBackground2"; public static final String key_player_progressCachedBackground = "key_player_progressCachedBackground"; public static final String key_player_progress = "player_progress"; public static final String key_player_button = "player_button"; @@ -4001,24 +3929,31 @@ public void run() { public static final String key_statisticChartActiveLine = "statisticChartActiveLine"; public static final String key_statisticChartInactivePickerChart = "statisticChartInactivePickerChart"; public static final String key_statisticChartActivePickerChart = "statisticChartActivePickerChart"; - public static final String key_statisticChartPopupBackground = "statisticChartPopupBackground"; public static final String key_statisticChartRipple = "statisticChartRipple"; public static final String key_statisticChartBackZoomColor = "statisticChartBackZoomColor"; - public static final String key_statisticChartCheckboxInactive = "statisticChartCheckboxInactive"; - public static final String key_statisticChartNightIconColor = "statisticChartNightIconColor"; public static final String key_statisticChartChevronColor = "statisticChartChevronColor"; - public static final String key_statisticChartHighlightColor = "statisticChartHighlightColor"; - public final static String key_statisticChartLine_blue = "statisticChartLine_blue"; - public final static String key_statisticChartLine_green = "statisticChartLine_green"; - public final static String key_statisticChartLine_red = "statisticChartLine_red"; - public final static String key_statisticChartLine_golden = "statisticChartLine_golden"; - public final static String key_statisticChartLine_lightblue = "statisticChartLine_lightblue"; - public final static String key_statisticChartLine_lightgreen = "statisticChartLine_lightgreen"; - public final static String key_statisticChartLine_orange = "statisticChartLine_orange"; - public final static String key_statisticChartLine_indigo = "statisticChartLine_indigo"; - public final static String key_statisticChartLine_purple = "statisticChartLine_purple"; - public final static String key_statisticChartLine_cyan = "statisticChartLine_cyan"; - public final static String key_statisticChartLineEmpty = "statisticChartLineEmpty"; + public static final String key_statisticChartLine_blue = "statisticChartLine_blue"; + public static final String key_statisticChartLine_green = "statisticChartLine_green"; + public static final String key_statisticChartLine_red = "statisticChartLine_red"; + public static final String key_statisticChartLine_golden = "statisticChartLine_golden"; + public static final String key_statisticChartLine_lightblue = "statisticChartLine_lightblue"; + public static final String key_statisticChartLine_lightgreen = "statisticChartLine_lightgreen"; + public static final String key_statisticChartLine_orange = "statisticChartLine_orange"; + public static final String key_statisticChartLine_indigo = "statisticChartLine_indigo"; + public static final String key_statisticChartLine_purple = "statisticChartLine_purple"; + public static final String key_statisticChartLine_cyan = "statisticChartLine_cyan"; + public static final String key_statisticChartLineEmpty = "statisticChartLineEmpty"; + + public static final String key_color_lightblue = "color_lightblue"; + public static final String key_color_blue = "color_blue"; + public static final String key_color_green = "color_green"; + public static final String key_color_lightgreen = "color_lightgreen"; + public static final String key_color_red = "color_red"; + public static final String key_color_orange = "color_orange"; + public static final String key_color_yellow = "color_yellow"; + public static final String key_color_purple = "color_purple"; + public static final String key_color_cyan = "color_cyan"; + public static final String[] keys_colors = { key_color_lightblue, key_color_blue, key_color_green, key_color_lightgreen, key_color_red, key_color_orange, key_color_yellow, key_color_purple, key_color_cyan }; public static final String key_chat_outReactionButtonBackground = "chat_outReactionButtonBackground"; public static final String key_chat_inReactionButtonBackground = "chat_inReactionButtonBackground"; @@ -4138,10 +4073,8 @@ public void run() { defaultColors.put(key_dialogTextLink, 0xff2678b6); defaultColors.put(key_dialogLinkSelection, 0x3362a9e3); defaultColors.put(key_dialogTextRed, 0xffcd5a5a); - defaultColors.put(key_dialogTextRed2, 0xffde3a3a); defaultColors.put(key_dialogTextBlue, 0xff2f8cc9); defaultColors.put(key_dialogTextBlue2, 0xff3a95d5); - defaultColors.put(key_dialogTextBlue3, 0xff3ec1f9); defaultColors.put(key_dialogTextBlue4, 0xff19a7e8); defaultColors.put(key_dialogTextGray, 0xff348bc1); defaultColors.put(key_dialogTextGray2, 0xff757575); @@ -4160,7 +4093,6 @@ public void run() { defaultColors.put(key_dialogCheckboxSquareDisabled, 0xffb0b0b0); defaultColors.put(key_dialogRadioBackground, 0xffb3b3b3); defaultColors.put(key_dialogRadioBackgroundChecked, 0xff37a9f0); - defaultColors.put(key_dialogProgressCircle, 0xff0A0D0F); defaultColors.put(key_dialogLineProgress, 0xff527da3); defaultColors.put(key_dialogLineProgressBackground, 0xffdbdbdb); defaultColors.put(key_dialogButton, 0xff4991cc); @@ -4168,8 +4100,6 @@ public void run() { defaultColors.put(key_dialogScrollGlow, 0xfff5f6f7); defaultColors.put(key_dialogRoundCheckBox, 0xff4cb4f5); defaultColors.put(key_dialogRoundCheckBoxCheck, 0xffffffff); - defaultColors.put(key_dialogBadgeBackground, 0xff3ec1f9); - defaultColors.put(key_dialogBadgeText, 0xffffffff); defaultColors.put(key_dialogCameraIcon, 0xffffffff); defaultColors.put(key_dialog_inlineProgressBackground, 0xf6f0f2f5); defaultColors.put(key_dialog_inlineProgress, 0xff6b7378); @@ -4184,7 +4114,6 @@ public void run() { defaultColors.put(key_dialogEmptyImage, 0xff9fa4a8); defaultColors.put(key_dialogEmptyText, 0xff8c9094); defaultColors.put(key_dialogSwipeRemove, 0xffe56555); - defaultColors.put(key_dialogSwipeRemove, 0xffe56555); defaultColors.put(key_dialogReactionMentionBackground, 0xffF05459); defaultColors.put(key_windowBackgroundWhite, 0xffffffff); @@ -4209,8 +4138,6 @@ public void run() { defaultColors.put(key_windowBackgroundWhiteRedText3, 0xffd24949); defaultColors.put(key_windowBackgroundWhiteRedText4, 0xffcf3030); defaultColors.put(key_windowBackgroundWhiteRedText5, 0xffed3939); - defaultColors.put(key_windowBackgroundWhiteRedText6, 0xffff6666); - defaultColors.put(key_windowBackgroundWhiteYellowText, 0xffD87B29); defaultColors.put(key_windowBackgroundWhiteGrayText, 0xff838c96); defaultColors.put(key_windowBackgroundWhiteGrayText2, 0xff82868a); defaultColors.put(key_windowBackgroundWhiteGrayText3, 0xff999999); @@ -4219,7 +4146,6 @@ public void run() { defaultColors.put(key_windowBackgroundWhiteGrayText6, 0xff757575); defaultColors.put(key_windowBackgroundWhiteGrayText7, 0xffc6c6c6); defaultColors.put(key_windowBackgroundWhiteGrayText8, 0xff6d6d72); - defaultColors.put(key_windowBackgroundWhiteGrayLine, 0xffdbdbdb); defaultColors.put(key_windowBackgroundWhiteBlackText, 0xff222222); defaultColors.put(key_windowBackgroundWhiteHintText, 0xffa8a8a8); defaultColors.put(key_windowBackgroundWhiteValueText, 0xff3a95d5); @@ -4341,7 +4267,6 @@ public void run() { defaultColors.put(key_chats_nameArchived, 0xff525252); defaultColors.put(key_chats_secretName, 0xff00a60e); defaultColors.put(key_chats_secretIcon, 0xff19b126); - defaultColors.put(key_chats_nameIcon, 0xff242424); defaultColors.put(key_chats_pinnedIcon, 0xffa8a8a8); defaultColors.put(key_chats_message, 0xff8b8d8f); defaultColors.put(key_chats_messageArchived, 0xff919191); @@ -4372,20 +4297,13 @@ public void run() { defaultColors.put(key_chats_menuName, 0xffffffff); defaultColors.put(key_chats_menuPhone, 0xffffffff); defaultColors.put(key_chats_menuPhoneCats, 0xffc2e5ff); - defaultColors.put(key_chats_menuCloud, 0xffffffff); - defaultColors.put(key_chats_menuCloudBackgroundCats, 0xff427ba9); defaultColors.put(key_chats_actionIcon, 0xffffffff); defaultColors.put(key_chats_actionBackground, 0xff65a9e0); defaultColors.put(key_chats_actionPressedBackground, 0xff569dd6); - defaultColors.put(key_chats_actionUnreadIcon, 0xff737373); - defaultColors.put(key_chats_actionUnreadBackground, 0xffffffff); - defaultColors.put(key_chats_actionUnreadPressedBackground, 0xfff2f2f2); defaultColors.put(key_chats_menuTopBackgroundCats, 0xff598fba); defaultColors.put(key_chats_archivePullDownBackground, 0xffc6c9cc); defaultColors.put(key_chats_archivePullDownBackgroundActive, 0xff66a9e0); - defaultColors.put(key_chat_attachMediaBanBackground, 0xff464646); - defaultColors.put(key_chat_attachMediaBanText, 0xffffffff); defaultColors.put(key_chat_attachCheckBoxCheck, 0xffffffff); defaultColors.put(key_chat_attachCheckBoxBackground, 0xff39b2f7); defaultColors.put(key_chat_attachPhotoBackground, 0x0c000000); @@ -4396,24 +4314,19 @@ public void run() { defaultColors.put(key_chat_attachPermissionText, 0xff6f777a); defaultColors.put(key_chat_attachEmptyImage, 0xffcccccc); + defaultColors.put(key_chat_attachIcon, 0xffffffff); defaultColors.put(key_chat_attachGalleryBackground, 0xff459df5); defaultColors.put(key_chat_attachGalleryText, 0xff2e8de9); - defaultColors.put(key_chat_attachGalleryIcon, 0xffffffff); defaultColors.put(key_chat_attachAudioBackground, 0xffeb6060); defaultColors.put(key_chat_attachAudioText, 0xffde4747); - defaultColors.put(key_chat_attachAudioIcon, 0xffffffff); defaultColors.put(key_chat_attachFileBackground, 0xff34b9f1); defaultColors.put(key_chat_attachFileText, 0xff14a8e4); - defaultColors.put(key_chat_attachFileIcon, 0xffffffff); defaultColors.put(key_chat_attachContactBackground, 0xfff2c04b); defaultColors.put(key_chat_attachContactText, 0xffdfa000); - defaultColors.put(key_chat_attachContactIcon, 0xffffffff); defaultColors.put(key_chat_attachLocationBackground, 0xff60c255); defaultColors.put(key_chat_attachLocationText, 0xff3cab2f); - defaultColors.put(key_chat_attachLocationIcon, 0xffffffff); defaultColors.put(key_chat_attachPollBackground, 0xfff2c04b); defaultColors.put(key_chat_attachPollText, 0xffdfa000); - defaultColors.put(key_chat_attachPollIcon, 0xffffffff); defaultColors.put(key_chat_inPollCorrectAnswer, 0xff60c255); defaultColors.put(key_chat_outPollCorrectAnswer, 0xff60c255); @@ -4476,12 +4389,9 @@ public void run() { defaultColors.put(key_chat_previewGameText, 0xffffffff); defaultColors.put(key_chat_inPreviewInstantText, 0xff3a8ccf); defaultColors.put(key_chat_outPreviewInstantText, 0xff55ab4f); - defaultColors.put(key_chat_inPreviewInstantSelectedText, 0xff3079b5); - defaultColors.put(key_chat_outPreviewInstantSelectedText, 0xff489943); defaultColors.put(key_chat_secretTimeText, 0xffe4e2e0); defaultColors.put(key_chat_stickerNameText, 0xffffffff); defaultColors.put(key_chat_botButtonText, 0xffffffff); - defaultColors.put(key_chat_botProgress, 0xffffffff); defaultColors.put(key_chat_inForwardedNameText, 0xff3886c7); defaultColors.put(key_chat_outForwardedNameText, 0xff55ab4f); defaultColors.put(key_chat_inPsaNameText, 0xff5a9c39); @@ -4573,21 +4483,16 @@ public void run() { defaultColors.put(key_chat_outLinkSelectBackground, 0x3362a9e3); defaultColors.put(key_chat_textSelectBackground, 0x6662a9e3); defaultColors.put(key_chat_emojiPanelBackground, 0xfff0f2f5); - defaultColors.put(key_chat_emojiPanelBadgeBackground, 0xff4da6ea); - defaultColors.put(key_chat_emojiPanelBadgeText, 0xffffffff); defaultColors.put(key_chat_emojiSearchBackground, 0xffe5e9ee); defaultColors.put(key_chat_emojiSearchIcon, 0xff94a1af); defaultColors.put(key_chat_emojiPanelShadowLine, 0x12000000); defaultColors.put(key_chat_emojiPanelEmptyText, 0xff949ba1); defaultColors.put(key_chat_emojiPanelIcon, 0xff9da4ab); - defaultColors.put(key_chat_emojiPanelIconSelector, 0x0b000000); defaultColors.put(key_chat_emojiBottomPanelIcon, 0xff8c9197); defaultColors.put(key_chat_emojiPanelIconSelected, 0xff5E6976); defaultColors.put(key_chat_emojiPanelStickerPackSelector, 0xffe2e5e7); defaultColors.put(key_chat_emojiPanelStickerPackSelectorLine, 0xff56abf0); defaultColors.put(key_chat_emojiPanelBackspace, 0xff8c9197); - defaultColors.put(key_chat_emojiPanelMasksIcon, 0xffffffff); - defaultColors.put(key_chat_emojiPanelMasksIconSelected, 0xff62bfe8); defaultColors.put(key_chat_emojiPanelTrendingTitle, 0xff222222); defaultColors.put(key_chat_emojiPanelStickerSetName, 0xff828b94); defaultColors.put(key_chat_emojiPanelStickerSetNameHighlight, 0xff278ddb); @@ -4599,20 +4504,13 @@ public void run() { defaultColors.put(key_chat_unreadMessagesStartArrowIcon, 0xffa2b5c7); defaultColors.put(key_chat_unreadMessagesStartText, 0xff5695cc); defaultColors.put(key_chat_unreadMessagesStartBackground, 0xffffffff); - defaultColors.put(key_chat_inFileIcon, 0xffa2b5c7); - defaultColors.put(key_chat_inFileSelectedIcon, 0xff87b6c5); - defaultColors.put(key_chat_outFileIcon, 0xff85bf78); - defaultColors.put(key_chat_outFileSelectedIcon, 0xff85bf78); defaultColors.put(key_chat_inLocationBackground, 0xffebf0f5); defaultColors.put(key_chat_inLocationIcon, 0xffa2b5c7); - defaultColors.put(key_chat_outLocationBackground, 0xffdaf5c3); defaultColors.put(key_chat_outLocationIcon, 0xff87bf78); defaultColors.put(key_chat_inContactBackground, 0xff72b5e8); defaultColors.put(key_chat_inContactIcon, 0xffffffff); defaultColors.put(key_chat_outContactBackground, 0xff78c272); defaultColors.put(key_chat_outContactIcon, 0xffefffde); - defaultColors.put(key_chat_outBroadcast, 0xff46aa36); - defaultColors.put(key_chat_mediaBroadcast, 0xffffffff); defaultColors.put(key_chat_searchPanelIcons, 0xff676a6f); defaultColors.put(key_chat_searchPanelText, 0xff676a6f); defaultColors.put(key_chat_secretChatStatusText, 0xff7f7f7f); @@ -4621,7 +4519,6 @@ public void run() { defaultColors.put(key_chat_replyPanelIcons, 0xff57a8e6); defaultColors.put(key_chat_replyPanelClose, 0xff8e959b); defaultColors.put(key_chat_replyPanelName, 0xff3a8ccf); - defaultColors.put(key_chat_replyPanelMessage, 0xff222222); defaultColors.put(key_chat_replyPanelLine, 0xffe8e8e8); defaultColors.put(key_chat_messagePanelBackground, 0xffffffff); defaultColors.put(key_chat_messagePanelText, 0xff000000); @@ -4635,7 +4532,6 @@ public void run() { defaultColors.put(key_chat_recordedVoiceProgress, 0xffB1DEFF); defaultColors.put(key_chat_recordedVoiceProgressInner, 0xffffffff); defaultColors.put(key_chat_recordVoiceCancel, 0xff3A95D4); - defaultColors.put(key_chat_recordedVoiceHighlight, 0x64ffffff); defaultColors.put(key_chat_messagePanelSend, 0xff62b0eb); defaultColors.put(key_chat_messagePanelVoiceLock, 0xffa4a4a4); defaultColors.put(key_chat_messagePanelVoiceLockBackground, 0xffffffff); @@ -4645,7 +4541,6 @@ public void run() { defaultColors.put(key_chat_gifSaveHintText, 0xffffffff); defaultColors.put(key_chat_gifSaveHintBackground, 0xcc111111); defaultColors.put(key_chat_goDownButton, 0xffffffff); - defaultColors.put(key_chat_goDownButtonShadow, 0xff000000); defaultColors.put(key_chat_goDownButtonIcon, 0xff8e959b); defaultColors.put(key_chat_goDownButtonCounter, 0xffffffff); defaultColors.put(key_chat_goDownButtonCounterBackground, 0xff4da2e8); @@ -4667,19 +4562,10 @@ public void run() { defaultColors.put(key_chat_outLoader, 0xff78c272); defaultColors.put(key_chat_outLoaderSelected, 0xff6ab564); defaultColors.put(key_chat_inLoaderPhoto, 0xffa2b8c8); - defaultColors.put(key_chat_inLoaderPhotoSelected, 0xffa2b5c7); - defaultColors.put(key_chat_inLoaderPhotoIcon, 0xfffcfcfc); - defaultColors.put(key_chat_inLoaderPhotoIconSelected, 0xffebf0f5); - defaultColors.put(key_chat_outLoaderPhoto, 0xff85bf78); - defaultColors.put(key_chat_outLoaderPhotoSelected, 0xff7db870); - defaultColors.put(key_chat_outLoaderPhotoIcon, 0xffdaf5c3); - defaultColors.put(key_chat_outLoaderPhotoIconSelected, 0xffc0e8a4); defaultColors.put(key_chat_mediaLoaderPhoto, 0x66000000); defaultColors.put(key_chat_mediaLoaderPhotoSelected, 0x7f000000); defaultColors.put(key_chat_mediaLoaderPhotoIcon, 0xffffffff); defaultColors.put(key_chat_mediaLoaderPhotoIconSelected, 0xffd9d9d9); - defaultColors.put(key_chat_secretTimerBackground, 0xcc3e648e); - defaultColors.put(key_chat_secretTimerText, 0xffffffff); defaultColors.put(key_chat_serviceBackgroundSelector, 0x20ffffff); defaultColors.put(key_profile_creatorIcon, 0xff3a95d5); @@ -4696,16 +4582,13 @@ public void run() { defaultColors.put(key_profile_tabSelectedLine, 0xff4fa6e9); defaultColors.put(key_profile_tabSelector, 0x0f000000); - defaultColors.put(key_player_actionBar, 0xffffffff); defaultColors.put(key_player_actionBarSelector, 0x0f000000); defaultColors.put(key_player_actionBarTitle, 0xff2f3438); - defaultColors.put(key_player_actionBarTop, 0x99000000); defaultColors.put(key_player_actionBarSubtitle, 0xff8a8a8a); defaultColors.put(key_player_actionBarItems, 0xff8a8a8a); defaultColors.put(key_player_background, 0xffffffff); defaultColors.put(key_player_time, 0xff8c9296); defaultColors.put(key_player_progressBackground, 0xffEBEDF0); - defaultColors.put(key_player_progressBackground2, 0xffCCD3DB); defaultColors.put(key_player_progressCachedBackground, 0xffC5DCF0); defaultColors.put(key_player_progress, 0xff54AAEB); defaultColors.put(key_player_button, 0xff333333); @@ -4764,7 +4647,6 @@ public void run() { defaultColors.put(key_sharedMedia_linkPlaceholder, 0xfff0f3f5); defaultColors.put(key_sharedMedia_linkPlaceholderText, 0xffb7bec3); defaultColors.put(key_sharedMedia_photoPlaceholder, 0xffedf3f7); - defaultColors.put(key_sharedMedia_actionMode, 0xff4687b3); defaultColors.put(key_checkbox, 0xff5ec245); defaultColors.put(key_checkboxCheck, 0xffffffff); @@ -4773,7 +4655,6 @@ public void run() { defaultColors.put(key_stickers_menu, 0xffb6bdc5); defaultColors.put(key_stickers_menuSelector, 0x0f000000); - defaultColors.put(key_changephoneinfo_image, 0xffb8bfc5); defaultColors.put(key_changephoneinfo_image2, 0xff50a7ea); defaultColors.put(key_groupcreate_hintText, 0xffa1aab3); @@ -4790,10 +4671,6 @@ public void run() { defaultColors.put(key_login_progressInner, 0xffe1eaf2); defaultColors.put(key_login_progressOuter, 0xff62a0d0); - defaultColors.put(key_musicPicker_checkbox, 0xff29b6f7); - defaultColors.put(key_musicPicker_checkboxCheck, 0xffffffff); - defaultColors.put(key_musicPicker_buttonBackground, 0xff5cafea); - defaultColors.put(key_musicPicker_buttonIcon, 0xffffffff); defaultColors.put(key_picker_enabledButton, 0xff19a7e8); defaultColors.put(key_picker_disabledButton, 0xff999999); defaultColors.put(key_picker_badge, 0xff29b6f7); @@ -4805,26 +4682,6 @@ public void run() { defaultColors.put(key_undo_cancelColor, 0xff85caff); defaultColors.put(key_undo_infoColor, 0xffffffff); - defaultColors.put(key_wallet_blackBackground, 0xff000000); - defaultColors.put(key_wallet_graySettingsBackground, 0xfff0f0f0); - defaultColors.put(key_wallet_grayBackground, 0xff292929); - defaultColors.put(key_wallet_whiteBackground, 0xffffffff); - defaultColors.put(key_wallet_blackBackgroundSelector, 0x40ffffff); - defaultColors.put(key_wallet_whiteText, 0xffffffff); - defaultColors.put(key_wallet_blackText, 0xff222222); - defaultColors.put(key_wallet_statusText, 0xff808080); - defaultColors.put(key_wallet_grayText, 0xff777777); - defaultColors.put(key_wallet_grayText2, 0xff666666); - defaultColors.put(key_wallet_greenText, 0xff37a818); - defaultColors.put(key_wallet_redText, 0xffdb4040); - defaultColors.put(key_wallet_dateText, 0xff999999); - defaultColors.put(key_wallet_commentText, 0xff999999); - defaultColors.put(key_wallet_releaseBackground, 0xff307cbb); - defaultColors.put(key_wallet_pullBackground, 0xff212121); - defaultColors.put(key_wallet_buttonBackground, 0xff47a1e6); - defaultColors.put(key_wallet_buttonPressedBackground, 0xff2b8cd6); - defaultColors.put(key_wallet_buttonText, 0xffffffff); - defaultColors.put(key_wallet_addressConfirmBackground, 0x0d000000); defaultColors.put(key_chat_outTextSelectionHighlight, 0x2E3F9923); defaultColors.put(key_chat_inTextSelectionHighlight, 0x5062A9E3); defaultColors.put(key_chat_TextSelectionCursor, 0xFF419FE8); @@ -4842,11 +4699,7 @@ public void run() { defaultColors.put(key_statisticChartRipple, 0x2c7e9db7); defaultColors.put(key_statisticChartBackZoomColor, 0xff108BE3); - defaultColors.put(key_statisticChartCheckboxInactive, 0xffBDBDBD); - defaultColors.put(key_statisticChartNightIconColor, 0xff8E8E93); defaultColors.put(key_statisticChartChevronColor, 0xffD2D5D7); - defaultColors.put(key_statisticChartHighlightColor, 0x20ececec); - defaultColors.put(key_statisticChartPopupBackground,0xffffffff); defaultColors.put(key_statisticChartLine_blue, 0xff327FE5); defaultColors.put(key_statisticChartLine_green, 0xff61C752); @@ -4859,7 +4712,16 @@ public void run() { defaultColors.put(key_statisticChartLine_purple, 0xff9F79E8); defaultColors.put(key_statisticChartLine_cyan, 0xff40D0CA); defaultColors.put(key_statisticChartLineEmpty, 0xFFEEEEEE); - defaultColors.put(key_actionBarTipBackground, 0xFF446F94); + + defaultColors.put(key_color_blue, 0xff327FE5); + defaultColors.put(key_color_green, 0xff61C752); + defaultColors.put(key_color_red, 0xffE05356); + defaultColors.put(key_color_yellow, 0xffEBA52D); + defaultColors.put(key_color_lightblue, 0xff58A8ED); + defaultColors.put(key_color_lightgreen, 0xff8FCF39); + defaultColors.put(key_color_orange, 0xffF28C39); + defaultColors.put(key_color_purple, 0xff9F79E8); + defaultColors.put(key_color_cyan, 0xff40D0CA); defaultColors.put(key_voipgroup_checkMenu, 0xff6BB6F9); defaultColors.put(key_voipgroup_muteButton, 0xff77E55C); @@ -4889,9 +4751,7 @@ public void run() { defaultColors.put(key_voipgroup_lastSeenTextUnscrolled, 0xff858D94); defaultColors.put(key_voipgroup_mutedIconUnscrolled, 0xff7E868C); defaultColors.put(key_voipgroup_actionBar, 0xff0F1317); - defaultColors.put(key_voipgroup_emptyView, 0xff1A1D21); defaultColors.put(key_voipgroup_actionBarItems, 0xffffffff); - defaultColors.put(key_voipgroup_actionBarSubtitle, 0xff8A8A8A); defaultColors.put(key_voipgroup_actionBarItemsSelector, 0x1eBADBFF); defaultColors.put(key_voipgroup_mutedByAdminIcon, 0xffFF7070); defaultColors.put(key_voipgroup_mutedIcon, 0xff6F7980); @@ -4968,15 +4828,11 @@ public void run() { fallbackKeys.put(key_chat_outMediaIcon, key_chat_outBubble); fallbackKeys.put(key_chat_inMediaIconSelected, key_chat_inBubbleSelected); fallbackKeys.put(key_chat_outMediaIconSelected, key_chat_outBubbleSelected); - fallbackKeys.put(key_chats_actionUnreadIcon, key_profile_actionIcon); - fallbackKeys.put(key_chats_actionUnreadBackground, key_profile_actionBackground); - fallbackKeys.put(key_chats_actionUnreadPressedBackground, key_profile_actionPressedBackground); fallbackKeys.put(key_dialog_inlineProgressBackground, key_windowBackgroundGray); fallbackKeys.put(key_dialog_inlineProgress, key_chats_menuItemIcon); fallbackKeys.put(key_groupcreate_spanDelete, key_chats_actionIcon); fallbackKeys.put(key_sharedMedia_photoPlaceholder, key_windowBackgroundGray); fallbackKeys.put(key_chat_attachPollBackground, key_chat_attachAudioBackground); - fallbackKeys.put(key_chat_attachPollIcon, key_chat_attachAudioIcon); fallbackKeys.put(key_chats_onlineCircle, key_windowBackgroundWhiteBlueText); fallbackKeys.put(key_windowBackgroundWhiteBlueButton, key_windowBackgroundWhiteValueText); fallbackKeys.put(key_windowBackgroundWhiteBlueIcon, key_windowBackgroundWhiteValueText); @@ -4998,7 +4854,6 @@ public void run() { fallbackKeys.put(key_chat_emojiSearchIcon, key_chat_emojiPanelIcon); fallbackKeys.put(key_chat_emojiPanelStickerSetNameHighlight, key_windowBackgroundWhiteBlueText4); fallbackKeys.put(key_chat_emojiPanelStickerPackSelectorLine, key_chat_emojiPanelIconSelected); - fallbackKeys.put(key_sharedMedia_actionMode, key_actionBarDefault); fallbackKeys.put(key_sheet_scrollUp, key_chat_emojiPanelStickerPackSelector); fallbackKeys.put(key_sheet_other, key_player_actionBarItems); fallbackKeys.put(key_dialogSearchBackground, key_chat_emojiPanelStickerPackSelector); @@ -5009,7 +4864,6 @@ public void run() { fallbackKeys.put(key_dialogFloatingButtonPressed, key_dialogRoundCheckBox); fallbackKeys.put(key_dialogFloatingIcon, key_dialogRoundCheckBoxCheck); fallbackKeys.put(key_dialogShadowLine, key_chat_emojiPanelShadowLine); - fallbackKeys.put(key_chat_emojiPanelIconSelector, key_listSelector); fallbackKeys.put(key_actionBarDefaultArchived, key_actionBarDefault); fallbackKeys.put(key_actionBarDefaultArchivedSelector, key_actionBarDefaultSelector); fallbackKeys.put(key_actionBarDefaultArchivedIcon, key_actionBarDefaultIcon); @@ -5071,13 +4925,11 @@ public void run() { fallbackKeys.put(key_chat_outPollCorrectAnswer, key_chat_attachLocationBackground); fallbackKeys.put(key_chat_inPollWrongAnswer, key_chat_attachAudioBackground); fallbackKeys.put(key_chat_outPollWrongAnswer, key_chat_attachAudioBackground); - fallbackKeys.put(key_windowBackgroundWhiteYellowText, key_avatar_nameInMessageOrange); fallbackKeys.put(key_profile_tabText, key_windowBackgroundWhiteGrayText); fallbackKeys.put(key_profile_tabSelectedText, key_windowBackgroundWhiteBlueHeader); fallbackKeys.put(key_profile_tabSelectedLine, key_windowBackgroundWhiteBlueHeader); fallbackKeys.put(key_profile_tabSelector, key_listSelector); - fallbackKeys.put(key_statisticChartPopupBackground, key_dialogBackground); fallbackKeys.put(key_chat_attachGalleryText, key_chat_attachGalleryBackground); fallbackKeys.put(key_chat_attachAudioText, key_chat_attachAudioBackground); @@ -5114,9 +4966,19 @@ public void run() { fallbackKeys.put(key_avatar_background2Blue, key_avatar_backgroundBlue); fallbackKeys.put(key_avatar_background2Pink, key_avatar_backgroundPink); + fallbackKeys.put(key_statisticChartLine_orange, key_color_orange); + fallbackKeys.put(key_statisticChartLine_blue, key_color_blue); + fallbackKeys.put(key_statisticChartLine_red, key_color_red); + fallbackKeys.put(key_statisticChartLine_lightblue, key_color_lightblue); + fallbackKeys.put(key_statisticChartLine_golden, key_color_yellow); + fallbackKeys.put(key_statisticChartLine_purple, key_color_purple); + fallbackKeys.put(key_statisticChartLine_indigo, key_color_purple); + fallbackKeys.put(key_statisticChartLine_cyan, key_color_cyan); + themeAccentExclusionKeys.addAll(Arrays.asList(keys_avatar_background)); themeAccentExclusionKeys.addAll(Arrays.asList(keys_avatar_background2)); themeAccentExclusionKeys.addAll(Arrays.asList(keys_avatar_nameInMessage)); + themeAccentExclusionKeys.addAll(Arrays.asList(keys_colors)); themeAccentExclusionKeys.add(key_chat_attachFileBackground); themeAccentExclusionKeys.add(key_chat_attachGalleryBackground); themeAccentExclusionKeys.add(key_chat_attachFileText); @@ -5139,7 +5001,6 @@ public void run() { themeAccentExclusionKeys.add(key_voipgroup_searchBackground); themeAccentExclusionKeys.add(key_voipgroup_leaveCallMenu); themeAccentExclusionKeys.add(key_voipgroup_scrollUp); - themeAccentExclusionKeys.add(key_voipgroup_blueText); themeAccentExclusionKeys.add(key_voipgroup_soundButton); themeAccentExclusionKeys.add(key_voipgroup_soundButtonActive); themeAccentExclusionKeys.add(key_voipgroup_soundButtonActiveScrolled); @@ -5159,9 +5020,7 @@ public void run() { themeAccentExclusionKeys.add(key_voipgroup_lastSeenTextUnscrolled); themeAccentExclusionKeys.add(key_voipgroup_mutedIconUnscrolled); themeAccentExclusionKeys.add(key_voipgroup_actionBar); - themeAccentExclusionKeys.add(key_voipgroup_emptyView); themeAccentExclusionKeys.add(key_voipgroup_actionBarItems); - themeAccentExclusionKeys.add(key_voipgroup_actionBarSubtitle); themeAccentExclusionKeys.add(key_voipgroup_actionBarItemsSelector); themeAccentExclusionKeys.add(key_voipgroup_mutedByAdminIcon); themeAccentExclusionKeys.add(key_voipgroup_mutedIcon); @@ -5221,7 +5080,6 @@ public void run() { myMessagesColorKeys.add(key_chat_outInstant); myMessagesColorKeys.add(key_chat_outInstantSelected); myMessagesColorKeys.add(key_chat_outPreviewInstantText); - myMessagesColorKeys.add(key_chat_outPreviewInstantSelectedText); myMessagesColorKeys.add(key_chat_outForwardedNameText); myMessagesColorKeys.add(key_chat_outViaBotNameText); myMessagesColorKeys.add(key_chat_outReplyLine); @@ -5261,17 +5119,9 @@ public void run() { myMessagesColorKeys.add(key_chat_outVenueInfoSelectedText); myMessagesColorKeys.add(key_chat_outLoader); myMessagesColorKeys.add(key_chat_outLoaderSelected); - myMessagesColorKeys.add(key_chat_outLoaderPhoto); - myMessagesColorKeys.add(key_chat_outLoaderPhotoSelected); - myMessagesColorKeys.add(key_chat_outLoaderPhotoIcon); - myMessagesColorKeys.add(key_chat_outLoaderPhotoIconSelected); - myMessagesColorKeys.add(key_chat_outLocationBackground); myMessagesColorKeys.add(key_chat_outLocationIcon); myMessagesColorKeys.add(key_chat_outContactBackground); myMessagesColorKeys.add(key_chat_outContactIcon); - myMessagesColorKeys.add(key_chat_outFileIcon); - myMessagesColorKeys.add(key_chat_outFileSelectedIcon); - myMessagesColorKeys.add(key_chat_outBroadcast); myMessagesColorKeys.add(key_chat_messageTextOut); myMessagesColorKeys.add(key_chat_messageLinkOut); @@ -7125,7 +6975,7 @@ private static void applyTheme(ThemeInfo themeInfo, boolean save, boolean remove SharedPreferences preferences = MessagesController.getGlobalMainSettings(); SharedPreferences.Editor editor = preferences.edit(); editor.putString("theme", themeInfo.getKey()); - editor.apply(); + editor.commit(); } String[] wallpaperLink = new String[1]; if (themeInfo.assetName != null) { @@ -7210,7 +7060,7 @@ private static void applyTheme(ThemeInfo themeInfo, boolean save, boolean remove SharedPreferences preferences = MessagesController.getGlobalMainSettings(); SharedPreferences.Editor editor = preferences.edit(); editor.remove("theme"); - editor.apply(); + editor.commit(); } currentColorsNoAccent.clear(); themedWallpaperFileOffset = 0; @@ -8979,38 +8829,38 @@ public static void createCommonResources(Context context) { if (dialogs_archiveAvatarDrawable != null) { dialogs_archiveAvatarDrawable.setCallback(null); - dialogs_archiveAvatarDrawable.recycle(); + dialogs_archiveAvatarDrawable.recycle(false); } if (dialogs_archiveDrawable != null) { - dialogs_archiveDrawable.recycle(); + dialogs_archiveDrawable.recycle(false); } if (dialogs_unarchiveDrawable != null) { - dialogs_unarchiveDrawable.recycle(); + dialogs_unarchiveDrawable.recycle(false); } if (dialogs_pinArchiveDrawable != null) { - dialogs_pinArchiveDrawable.recycle(); + dialogs_pinArchiveDrawable.recycle(false); } if (dialogs_unpinArchiveDrawable != null) { - dialogs_unpinArchiveDrawable.recycle(); + dialogs_unpinArchiveDrawable.recycle(false); } if (dialogs_hidePsaDrawable != null) { - dialogs_hidePsaDrawable.recycle(); + dialogs_hidePsaDrawable.recycle(false); } dialogs_archiveAvatarDrawable = new RLottieDrawable(R.raw.chats_archiveavatar, "chats_archiveavatar", AndroidUtilities.dp(36), AndroidUtilities.dp(36), false, null); - dialogs_archiveDrawable = new RLottieDrawable(R.raw.chats_archive, "chats_archive", AndroidUtilities.dp(36), AndroidUtilities.dp(36)); - dialogs_unarchiveDrawable = new RLottieDrawable(R.raw.chats_unarchive, "chats_unarchive", AndroidUtilities.dp(AndroidUtilities.dp(36)), AndroidUtilities.dp(36)); - dialogs_pinArchiveDrawable = new RLottieDrawable(R.raw.chats_hide, "chats_hide", AndroidUtilities.dp(36), AndroidUtilities.dp(36)); - dialogs_unpinArchiveDrawable = new RLottieDrawable(R.raw.chats_unhide, "chats_unhide", AndroidUtilities.dp(36), AndroidUtilities.dp(36)); - dialogs_hidePsaDrawable = new RLottieDrawable(R.raw.chat_audio_record_delete, "chats_psahide", AndroidUtilities.dp(30), AndroidUtilities.dp(30)); + dialogs_archiveDrawable = new RLottieDrawable(R.raw.chats_archive, "chats_archive", AndroidUtilities.dp(36), AndroidUtilities.dp(36), false, null); + dialogs_unarchiveDrawable = new RLottieDrawable(R.raw.chats_unarchive, "chats_unarchive", AndroidUtilities.dp(AndroidUtilities.dp(36)), AndroidUtilities.dp(36), false, null); + dialogs_pinArchiveDrawable = new RLottieDrawable(R.raw.chats_hide, "chats_hide", AndroidUtilities.dp(36), AndroidUtilities.dp(36), false, null); + dialogs_unpinArchiveDrawable = new RLottieDrawable(R.raw.chats_unhide, "chats_unhide", AndroidUtilities.dp(36), AndroidUtilities.dp(36), false, null); + dialogs_hidePsaDrawable = new RLottieDrawable(R.raw.chat_audio_record_delete, "chats_psahide", AndroidUtilities.dp(30), AndroidUtilities.dp(30), false, null); - dialogs_swipeMuteDrawable = new RLottieDrawable(R.raw.swipe_mute, "swipe_mute", AndroidUtilities.dp(36), AndroidUtilities.dp(36)); - dialogs_swipeUnmuteDrawable = new RLottieDrawable(R.raw.swipe_unmute, "swipe_unmute", AndroidUtilities.dp(36), AndroidUtilities.dp(36)); + dialogs_swipeMuteDrawable = new RLottieDrawable(R.raw.swipe_mute, "swipe_mute", AndroidUtilities.dp(36), AndroidUtilities.dp(36), false, null); + dialogs_swipeUnmuteDrawable = new RLottieDrawable(R.raw.swipe_unmute, "swipe_unmute", AndroidUtilities.dp(36), AndroidUtilities.dp(36), false, null); - dialogs_swipeReadDrawable = new RLottieDrawable(R.raw.swipe_read, "swipe_read", AndroidUtilities.dp(36), AndroidUtilities.dp(36)); - dialogs_swipeUnreadDrawable = new RLottieDrawable(R.raw.swipe_unread, "swipe_unread", AndroidUtilities.dp(36), AndroidUtilities.dp(36)); - dialogs_swipeDeleteDrawable = new RLottieDrawable(R.raw.swipe_delete, "swipe_delete", AndroidUtilities.dp(36), AndroidUtilities.dp(36)); - dialogs_swipeUnpinDrawable = new RLottieDrawable(R.raw.swipe_unpin, "swipe_unpin", AndroidUtilities.dp(36), AndroidUtilities.dp(36)); - dialogs_swipePinDrawable = new RLottieDrawable(R.raw.swipe_pin, "swipe_pin", AndroidUtilities.dp(36), AndroidUtilities.dp(36)); + dialogs_swipeReadDrawable = new RLottieDrawable(R.raw.swipe_read, "swipe_read", AndroidUtilities.dp(36), AndroidUtilities.dp(36), false, null); + dialogs_swipeUnreadDrawable = new RLottieDrawable(R.raw.swipe_unread, "swipe_unread", AndroidUtilities.dp(36), AndroidUtilities.dp(36), false, null); + dialogs_swipeDeleteDrawable = new RLottieDrawable(R.raw.swipe_delete, "swipe_delete", AndroidUtilities.dp(36), AndroidUtilities.dp(36), false, null); + dialogs_swipeUnpinDrawable = new RLottieDrawable(R.raw.swipe_unpin, "swipe_unpin", AndroidUtilities.dp(36), AndroidUtilities.dp(36), false, null); + dialogs_swipePinDrawable = new RLottieDrawable(R.raw.swipe_pin, "swipe_pin", AndroidUtilities.dp(36), AndroidUtilities.dp(36), false, null); applyCommonTheme(); } @@ -9303,9 +9153,6 @@ public static void createCommonChatResources() { chat_docNamePaint.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); chat_docBackPaint = new Paint(Paint.ANTI_ALIAS_FLAG); chat_deleteProgressPaint = new Paint(Paint.ANTI_ALIAS_FLAG); - chat_botProgressPaint = new Paint(Paint.ANTI_ALIAS_FLAG); - chat_botProgressPaint.setStrokeCap(Paint.Cap.ROUND); - chat_botProgressPaint.setStyle(Paint.Style.STROKE); chat_locationTitlePaint = new TextPaint(Paint.ANTI_ALIAS_FLAG); chat_locationTitlePaint.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); chat_locationAddressPaint = new TextPaint(Paint.ANTI_ALIAS_FLAG); @@ -9552,39 +9399,6 @@ public static void createChatResources(Context context, boolean fontsOnly) { chat_fileStatesDrawable[3][1] = createCircleDrawableWithIcon(AndroidUtilities.dp(44), R.drawable.msg_round_file_s); chat_fileStatesDrawable[4][0] = createCircleDrawableWithIcon(AndroidUtilities.dp(44), R.drawable.msg_round_cancel_m); chat_fileStatesDrawable[4][1] = createCircleDrawableWithIcon(AndroidUtilities.dp(44), R.drawable.msg_round_cancel_m); - chat_fileStatesDrawable[5][0] = createCircleDrawableWithIcon(AndroidUtilities.dp(44), R.drawable.msg_round_play_m); - chat_fileStatesDrawable[5][1] = createCircleDrawableWithIcon(AndroidUtilities.dp(44), R.drawable.msg_round_play_m); - chat_fileStatesDrawable[6][0] = createCircleDrawableWithIcon(AndroidUtilities.dp(44), R.drawable.msg_round_pause_m); - chat_fileStatesDrawable[6][1] = createCircleDrawableWithIcon(AndroidUtilities.dp(44), R.drawable.msg_round_pause_m); - chat_fileStatesDrawable[7][0] = createCircleDrawableWithIcon(AndroidUtilities.dp(44), R.drawable.msg_round_load_m); - chat_fileStatesDrawable[7][1] = createCircleDrawableWithIcon(AndroidUtilities.dp(44), R.drawable.msg_round_load_m); - chat_fileStatesDrawable[8][0] = createCircleDrawableWithIcon(AndroidUtilities.dp(44), R.drawable.msg_round_file_s); - chat_fileStatesDrawable[8][1] = createCircleDrawableWithIcon(AndroidUtilities.dp(44), R.drawable.msg_round_file_s); - chat_fileStatesDrawable[9][0] = createCircleDrawableWithIcon(AndroidUtilities.dp(44), R.drawable.msg_round_cancel_m); - chat_fileStatesDrawable[9][1] = createCircleDrawableWithIcon(AndroidUtilities.dp(44), R.drawable.msg_round_cancel_m); - - chat_photoStatesDrawables[0][0] = createCircleDrawableWithIcon(AndroidUtilities.dp(48), R.drawable.msg_round_load_m); - chat_photoStatesDrawables[0][1] = createCircleDrawableWithIcon(AndroidUtilities.dp(48), R.drawable.msg_round_load_m); - chat_photoStatesDrawables[1][0] = createCircleDrawableWithIcon(AndroidUtilities.dp(48), R.drawable.msg_round_cancel_m); - chat_photoStatesDrawables[1][1] = createCircleDrawableWithIcon(AndroidUtilities.dp(48), R.drawable.msg_round_cancel_m); - chat_photoStatesDrawables[2][0] = createCircleDrawableWithIcon(AndroidUtilities.dp(48), R.drawable.msg_round_gif_m); - chat_photoStatesDrawables[2][1] = createCircleDrawableWithIcon(AndroidUtilities.dp(48), R.drawable.msg_round_gif_m); - chat_photoStatesDrawables[3][0] = createCircleDrawableWithIcon(AndroidUtilities.dp(48), R.drawable.msg_round_play_m); - chat_photoStatesDrawables[3][1] = createCircleDrawableWithIcon(AndroidUtilities.dp(48), R.drawable.msg_round_play_m); - - chat_photoStatesDrawables[4][0] = chat_photoStatesDrawables[4][1] = resources.getDrawable(R.drawable.burn); - chat_photoStatesDrawables[5][0] = chat_photoStatesDrawables[5][1] = resources.getDrawable(R.drawable.circle); - chat_photoStatesDrawables[6][0] = chat_photoStatesDrawables[6][1] = resources.getDrawable(R.drawable.photocheck); - - chat_photoStatesDrawables[7][0] = createCircleDrawableWithIcon(AndroidUtilities.dp(48), R.drawable.msg_round_load_m); - chat_photoStatesDrawables[7][1] = createCircleDrawableWithIcon(AndroidUtilities.dp(48), R.drawable.msg_round_load_m); - chat_photoStatesDrawables[8][0] = createCircleDrawableWithIcon(AndroidUtilities.dp(48), R.drawable.msg_round_cancel_m); - chat_photoStatesDrawables[8][1] = createCircleDrawableWithIcon(AndroidUtilities.dp(48), R.drawable.msg_round_cancel_m); - - chat_photoStatesDrawables[10][0] = createCircleDrawableWithIcon(AndroidUtilities.dp(48), R.drawable.msg_round_load_m); - chat_photoStatesDrawables[10][1] = createCircleDrawableWithIcon(AndroidUtilities.dp(48), R.drawable.msg_round_load_m); - chat_photoStatesDrawables[11][0] = createCircleDrawableWithIcon(AndroidUtilities.dp(48), R.drawable.msg_round_cancel_m); - chat_photoStatesDrawables[11][1] = createCircleDrawableWithIcon(AndroidUtilities.dp(48), R.drawable.msg_round_cancel_m); chat_contactDrawable[0] = createCircleDrawableWithIcon(AndroidUtilities.dp(44), R.drawable.msg_contact); chat_contactDrawable[1] = createCircleDrawableWithIcon(AndroidUtilities.dp(44), R.drawable.msg_contact); @@ -9672,8 +9486,7 @@ public static void createChatResources(Context context, boolean fontsOnly) { applyChatTheme(fontsOnly, false); } - if (!fontsOnly && chat_botProgressPaint != null) { - chat_botProgressPaint.setStrokeWidth(AndroidUtilities.dp(2)); + if (!fontsOnly && chat_infoPaint != null) { chat_infoPaint.setTextSize(AndroidUtilities.dp(12)); chat_stickerCommentCountPaint.setTextSize(AndroidUtilities.dp(11)); chat_docNamePaint.setTextSize(AndroidUtilities.dp(15)); @@ -9722,24 +9535,24 @@ public static void refreshAttachButtonsColors() { if (a == 0) { chat_attachButtonDrawables[a].setLayerColor("Color_Mount.**", getNonAnimatedColor(key_chat_attachGalleryBackground)); chat_attachButtonDrawables[a].setLayerColor("Color_PhotoShadow.**", getNonAnimatedColor(key_chat_attachGalleryBackground)); - chat_attachButtonDrawables[a].setLayerColor("White_Photo.**", getNonAnimatedColor(key_chat_attachGalleryIcon)); - chat_attachButtonDrawables[a].setLayerColor("White_BackPhoto.**", getNonAnimatedColor(key_chat_attachGalleryIcon)); + chat_attachButtonDrawables[a].setLayerColor("White_Photo.**", getNonAnimatedColor(key_chat_attachIcon)); + chat_attachButtonDrawables[a].setLayerColor("White_BackPhoto.**", getNonAnimatedColor(key_chat_attachIcon)); } else if (a == 1) { - chat_attachButtonDrawables[a].setLayerColor("White_Play1.**", getNonAnimatedColor(key_chat_attachAudioIcon)); - chat_attachButtonDrawables[a].setLayerColor("White_Play2.**", getNonAnimatedColor(key_chat_attachAudioIcon)); + chat_attachButtonDrawables[a].setLayerColor("White_Play1.**", getNonAnimatedColor(key_chat_attachIcon)); + chat_attachButtonDrawables[a].setLayerColor("White_Play2.**", getNonAnimatedColor(key_chat_attachIcon)); } else if (a == 2) { chat_attachButtonDrawables[a].setLayerColor("Color_Corner.**", getNonAnimatedColor(key_chat_attachFileBackground)); - chat_attachButtonDrawables[a].setLayerColor("White_List.**", getNonAnimatedColor(key_chat_attachFileIcon)); + chat_attachButtonDrawables[a].setLayerColor("White_List.**", getNonAnimatedColor(key_chat_attachIcon)); } else if (a == 3) { - chat_attachButtonDrawables[a].setLayerColor("White_User1.**", getNonAnimatedColor(key_chat_attachContactIcon)); - chat_attachButtonDrawables[a].setLayerColor("White_User2.**", getNonAnimatedColor(key_chat_attachContactIcon)); + chat_attachButtonDrawables[a].setLayerColor("White_User1.**", getNonAnimatedColor(key_chat_attachIcon)); + chat_attachButtonDrawables[a].setLayerColor("White_User2.**", getNonAnimatedColor(key_chat_attachIcon)); } else if (a == 4) { chat_attachButtonDrawables[a].setLayerColor("Color_Oval.**", getNonAnimatedColor(key_chat_attachLocationBackground)); - chat_attachButtonDrawables[a].setLayerColor("White_Pin.**", getNonAnimatedColor(key_chat_attachLocationIcon)); + chat_attachButtonDrawables[a].setLayerColor("White_Pin.**", getNonAnimatedColor(key_chat_attachIcon)); } else if (a == 5) { - chat_attachButtonDrawables[a].setLayerColor("White_Column 1.**", getNonAnimatedColor(key_chat_attachPollIcon)); - chat_attachButtonDrawables[a].setLayerColor("White_Column 2.**", getNonAnimatedColor(key_chat_attachPollIcon)); - chat_attachButtonDrawables[a].setLayerColor("White_Column 3.**", getNonAnimatedColor(key_chat_attachPollIcon)); + chat_attachButtonDrawables[a].setLayerColor("White_Column 1.**", getNonAnimatedColor(key_chat_attachIcon)); + chat_attachButtonDrawables[a].setLayerColor("White_Column 2.**", getNonAnimatedColor(key_chat_attachIcon)); + chat_attachButtonDrawables[a].setLayerColor("White_Column 3.**", getNonAnimatedColor(key_chat_attachIcon)); } chat_attachButtonDrawables[a].commitApplyLayerColors(); } @@ -9757,7 +9570,6 @@ public static void applyChatTheme(boolean fontsOnly, boolean bg) { chat_botButtonPaint.setColor(getColor(key_chat_botButtonText)); chat_urlPaint.setColor(getColor(key_chat_linkSelectBackground)); chat_outUrlPaint.setColor(getColor(key_chat_outLinkSelectBackground)); - chat_botProgressPaint.setColor(getColor(key_chat_botProgress)); chat_deleteProgressPaint.setColor(getColor(key_chat_secretTimeText)); chat_textSearchSelectionPaint.setColor(getColor(key_chat_textSelectBackground)); chat_msgErrorPaint.setColor(getColor(key_chat_sentError)); @@ -9852,30 +9664,10 @@ public static void applyChatTheme(boolean fontsOnly, boolean bg) { } for (int a = 0; a < 5; a++) { - setCombinedDrawableColor(chat_fileStatesDrawable[a][0], getColor(key_chat_outLoader), false); - setCombinedDrawableColor(chat_fileStatesDrawable[a][0], getColor(key_chat_outMediaIcon), true); - setCombinedDrawableColor(chat_fileStatesDrawable[a][1], getColor(key_chat_outLoaderSelected), false); - setCombinedDrawableColor(chat_fileStatesDrawable[a][1], getColor(key_chat_outMediaIconSelected), true); - setCombinedDrawableColor(chat_fileStatesDrawable[5 + a][0], getColor(key_chat_inLoader), false); - setCombinedDrawableColor(chat_fileStatesDrawable[5 + a][0], getColor(key_chat_inMediaIcon), true); - setCombinedDrawableColor(chat_fileStatesDrawable[5 + a][1], getColor(key_chat_inLoaderSelected), false); - setCombinedDrawableColor(chat_fileStatesDrawable[5 + a][1], getColor(key_chat_inMediaIconSelected), true); - } - for (int a = 0; a < 4; a++) { - setCombinedDrawableColor(chat_photoStatesDrawables[a][0], getColor(key_chat_mediaLoaderPhoto), false); - setCombinedDrawableColor(chat_photoStatesDrawables[a][0], getColor(key_chat_mediaLoaderPhotoIcon), true); - setCombinedDrawableColor(chat_photoStatesDrawables[a][1], getColor(key_chat_mediaLoaderPhotoSelected), false); - setCombinedDrawableColor(chat_photoStatesDrawables[a][1], getColor(key_chat_mediaLoaderPhotoIconSelected), true); - } - for (int a = 0; a < 2; a++) { - setCombinedDrawableColor(chat_photoStatesDrawables[7 + a][0], getColor(key_chat_outLoaderPhoto), false); - setCombinedDrawableColor(chat_photoStatesDrawables[7 + a][0], getColor(key_chat_outLoaderPhotoIcon), true); - setCombinedDrawableColor(chat_photoStatesDrawables[7 + a][1], getColor(key_chat_outLoaderPhotoSelected), false); - setCombinedDrawableColor(chat_photoStatesDrawables[7 + a][1], getColor(key_chat_outLoaderPhotoIconSelected), true); - setCombinedDrawableColor(chat_photoStatesDrawables[10 + a][0], getColor(key_chat_inLoaderPhoto), false); - setCombinedDrawableColor(chat_photoStatesDrawables[10 + a][0], getColor(key_chat_inLoaderPhotoIcon), true); - setCombinedDrawableColor(chat_photoStatesDrawables[10 + a][1], getColor(key_chat_inLoaderPhotoSelected), false); - setCombinedDrawableColor(chat_photoStatesDrawables[10 + a][1], getColor(key_chat_inLoaderPhotoIconSelected), true); + setCombinedDrawableColor(chat_fileStatesDrawable[a][0], getColor(key_chat_inLoader), false); + setCombinedDrawableColor(chat_fileStatesDrawable[a][0], getColor(key_chat_inMediaIcon), true); + setCombinedDrawableColor(chat_fileStatesDrawable[a][1], getColor(key_chat_inLoaderSelected), false); + setCombinedDrawableColor(chat_fileStatesDrawable[a][1], getColor(key_chat_inMediaIconSelected), true); } setCombinedDrawableColor(chat_contactDrawable[0], getColor(key_chat_inContactBackground), false); @@ -9993,7 +9785,7 @@ public static void applyChatServiceMessageColor(int[] custom, Drawable wallpaper } Drawable drawable = wallpaperOverride != null ? wallpaperOverride : currentWallpaper; - boolean drawServiceGradient = drawable instanceof MotionBackgroundDrawable && SharedConfig.getDevicePerformanceClass() != SharedConfig.PERFORMANCE_CLASS_LOW && !SharedConfig.getLiteMode().enabled(); + boolean drawServiceGradient = drawable instanceof MotionBackgroundDrawable && SharedConfig.getDevicePerformanceClass() != SharedConfig.PERFORMANCE_CLASS_LOW && LiteMode.isEnabled(LiteMode.FLAG_CHAT_BACKGROUND); if (drawServiceGradient) { Bitmap newBitmap = ((MotionBackgroundDrawable) drawable).getBitmap(); if (serviceBitmap != newBitmap) { @@ -10804,6 +10596,23 @@ public void onSizeReady(int width, int height) { settings.wallpaper = new ColorDrawable(selectedColor); } } + + if (!LiteMode.isEnabled(LiteMode.FLAG_CHAT_BACKGROUND) && settings.wallpaper instanceof MotionBackgroundDrawable) { + MotionBackgroundDrawable motionBackgroundDrawable = (MotionBackgroundDrawable) settings.wallpaper; + int w, h; + if (motionBackgroundDrawable.getPatternBitmap() == null) { + w = Math.min(AndroidUtilities.displaySize.x, AndroidUtilities.displaySize.y); + h = Math.max(AndroidUtilities.displaySize.x, AndroidUtilities.displaySize.y); + } else { + w = motionBackgroundDrawable.getPatternBitmap().getWidth(); + h = motionBackgroundDrawable.getPatternBitmap().getHeight(); + } + Bitmap bitmap = Bitmap.createBitmap(w, h, Bitmap.Config.ARGB_8888); + Canvas canvas = new Canvas(bitmap); + settings.wallpaper.setBounds(0, 0, bitmap.getWidth(), bitmap.getHeight()); + settings.wallpaper.draw(canvas); + settings.wallpaper = new BitmapDrawable(bitmap); + } return settings; } @@ -11198,6 +11007,38 @@ public static boolean isHome(ThemeAccent accent) { return false; } + public static void turnOffAutoNight(BaseFragment fragment) { + turnOffAutoNight(fragment != null ? fragment.getLayoutContainer() : null, () -> { + INavigationLayout nav = fragment != null ? fragment.getParentLayout() : null; + if (nav != null) { + nav.presentFragment(new ThemeActivity(ThemeActivity.THEME_TYPE_NIGHT)); + } + }); + } + + public static void turnOffAutoNight(FrameLayout container, Runnable openSettings) { + if (selectedAutoNightType != AUTO_NIGHT_TYPE_NONE) { + if (container != null && openSettings != null) { + try { + BulletinFactory.of(container, null).createSimpleBulletin( + R.raw.auto_night_off, + selectedAutoNightType == AUTO_NIGHT_TYPE_SYSTEM ? + LocaleController.getString("AutoNightSystemModeOff", R.string.AutoNightSystemModeOff) : + LocaleController.getString("AutoNightModeOff", R.string.AutoNightModeOff), + LocaleController.getString("Settings", R.string.Settings), + Bulletin.DURATION_PROLONG, + openSettings + ).show(); + } catch (Exception e) { + FileLog.e(e); + } + } + selectedAutoNightType = AUTO_NIGHT_TYPE_NONE; + saveAutoNightThemeConfig(); + cancelAutoNightThemeCallbacks(); + } + } + public static Paint DEBUG_RED = new Paint(); static { DEBUG_RED.setColor(0xffff0000); } public static Paint DEBUG_BLUE = new Paint(); static { DEBUG_BLUE.setColor(0xff0000ff); } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ThemeDescription.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ThemeDescription.java index 88c3609520..23cbdec0e1 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ThemeDescription.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ThemeDescription.java @@ -341,9 +341,6 @@ public void setColor(int color, boolean useDefault, boolean save) { ((ActionBar) viewToInvalidate).setPopupBackgroundColor(color, false); } } - if (viewToInvalidate instanceof VideoTimelineView) { - ((VideoTimelineView) viewToInvalidate).setColor(color); - } if (viewToInvalidate instanceof EmptyTextProgressView) { if ((changeFlags & FLAG_TEXTCOLOR) != 0) { ((EmptyTextProgressView) viewToInvalidate).setTextColor(color); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionIntroActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionIntroActivity.java index b93bdb3361..fa21e8e457 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionIntroActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionIntroActivity.java @@ -926,7 +926,6 @@ public ArrayList getThemeDescriptions() { themeDescriptions.add(new ThemeDescription(desctiptionLines[4], ThemeDescription.FLAG_TEXTCOLOR, null, null, null, null, Theme.key_windowBackgroundWhiteBlackText)); themeDescriptions.add(new ThemeDescription(desctiptionLines[5], ThemeDescription.FLAG_TEXTCOLOR, null, null, null, null, Theme.key_windowBackgroundWhiteBlackText)); - themeDescriptions.add(new ThemeDescription(null, ThemeDescription.FLAG_TEXTCOLOR, null, null, new Drawable[]{drawable1}, null, Theme.key_changephoneinfo_image)); themeDescriptions.add(new ThemeDescription(null, ThemeDescription.FLAG_TEXTCOLOR, null, null, new Drawable[]{drawable2}, null, Theme.key_changephoneinfo_image2)); return themeDescriptions; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Adapters/ContactsAdapter.java b/TMessagesProj/src/main/java/org/telegram/ui/Adapters/ContactsAdapter.java index 341f1f94d4..e2c4563893 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Adapters/ContactsAdapter.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Adapters/ContactsAdapter.java @@ -79,9 +79,13 @@ public void setDisableSections(boolean value) { disableSections = value; } + public static final int SORT_TYPE_NONE = 0; + public static final int SORT_TYPE_BY_NAME = 1; + public static final int SORT_TYPE_BY_TIME = 2; + public void setSortType(int value, boolean force) { sortType = value; - if (sortType == 2) { + if (sortType == SORT_TYPE_BY_TIME) { if (onlineContacts == null || force) { onlineContacts = new ArrayList<>(ContactsController.getInstance(currentAccount).contacts); long selfId = UserConfig.getInstance(currentAccount).clientUserId; @@ -176,7 +180,7 @@ public Object getItem(int section, int position) { if (section == 0) { return null; } else { - if (sortType == 2) { + if (sortType == SORT_TYPE_BY_TIME) { if (section == 1) { if (position < onlineContacts.size()) { return MessagesController.getInstance(currentAccount).getUser(onlineContacts.get(position).user_id); @@ -224,7 +228,7 @@ public boolean isEnabled(RecyclerView.ViewHolder holder, int section, int row) { if (isEmpty) { return false; } - if (sortType == 2) { + if (sortType == SORT_TYPE_BY_TIME) { if (section == 1) { return row < onlineContacts.size(); } @@ -243,7 +247,7 @@ public boolean isEnabled(RecyclerView.ViewHolder holder, int section, int row) { public int getSectionCount() { int count; isEmpty = false; - if (sortType == 2) { + if (sortType == SORT_TYPE_BY_TIME) { count = 1; isEmpty = onlineContacts.isEmpty(); } else { @@ -296,7 +300,7 @@ public int getCountForSection(int section) { if (isEmpty) { return 1; } - if (sortType == 2) { + if (sortType == SORT_TYPE_BY_TIME) { if (section == 1) { return onlineContacts.isEmpty() ? 0 : onlineContacts.size() + 1; } @@ -327,7 +331,7 @@ public View getSectionHeaderView(int section, View view) { view = new LetterSectionCell(mContext); } LetterSectionCell cell = (LetterSectionCell) view; - if (sortType == 2 || disableSections || isEmpty) { + if (sortType == SORT_TYPE_BY_TIME || disableSections || isEmpty) { cell.setLetter(""); } else { if (onlyUsers != 0 && !isAdmin) { @@ -415,9 +419,9 @@ public void onBindViewHolder(int section, int position, RecyclerView.ViewHolder switch (holder.getItemViewType()) { case 0: UserCell userCell = (UserCell) holder.itemView; - userCell.setAvatarPadding(sortType == 2 || disableSections ? 6 : 58); + userCell.setAvatarPadding(sortType == SORT_TYPE_BY_TIME || disableSections ? 6 : 58); ArrayList arr; - if (sortType == 2) { + if (sortType == SORT_TYPE_BY_TIME) { arr = onlineContacts; } else { HashMap> usersSectionsDict = onlyUsers == 2 ? ContactsController.getInstance(currentAccount).usersMutualSectionsDict : ContactsController.getInstance(currentAccount).usersSectionsDict; @@ -474,9 +478,9 @@ public void onBindViewHolder(int section, int position, RecyclerView.ViewHolder break; case 2: GraySectionCell sectionCell = (GraySectionCell) holder.itemView; - if (sortType == 0) { + if (sortType == SORT_TYPE_NONE) { sectionCell.setText(LocaleController.getString("Contacts", R.string.Contacts)); - } else if (sortType == 1) { + } else if (sortType == SORT_TYPE_BY_NAME) { sectionCell.setText(LocaleController.getString("SortedByName", R.string.SortedByName)); } else { sectionCell.setText(LocaleController.getString("SortedByLastSeen", R.string.SortedByLastSeen)); @@ -512,7 +516,7 @@ public int getItemViewType(int section, int position) { if (isEmpty) { return 4; } - if (sortType == 2) { + if (sortType == SORT_TYPE_BY_TIME) { if (section == 1) { return position < onlineContacts.size() ? 0 : 3; } @@ -529,7 +533,7 @@ public int getItemViewType(int section, int position) { @Override public String getLetter(int position) { - if (sortType == 2 || isEmpty) { + if (sortType == SORT_TYPE_BY_TIME || isEmpty) { return null; } ArrayList sortedUsersSectionsArray = onlyUsers == 2 ? ContactsController.getInstance(currentAccount).sortedUsersMutualSectionsArray : ContactsController.getInstance(currentAccount).sortedUsersSectionsArray; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Adapters/DialogsAdapter.java b/TMessagesProj/src/main/java/org/telegram/ui/Adapters/DialogsAdapter.java index 620e73e6ae..7bf08053bc 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Adapters/DialogsAdapter.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Adapters/DialogsAdapter.java @@ -39,6 +39,7 @@ import org.telegram.messenger.R; import org.telegram.messenger.SharedConfig; import org.telegram.messenger.UserConfig; +import org.telegram.messenger.UserObject; import org.telegram.tgnet.ConnectionsManager; import org.telegram.tgnet.TLObject; import org.telegram.tgnet.TLRPC; @@ -48,8 +49,10 @@ import org.telegram.ui.Cells.DialogCell; import org.telegram.ui.Cells.DialogMeUrlCell; import org.telegram.ui.Cells.DialogsEmptyCell; +import org.telegram.ui.Cells.DialogsRequestedEmptyCell; import org.telegram.ui.Cells.HeaderCell; import org.telegram.ui.Cells.ProfileSearchCell; +import org.telegram.ui.Cells.RequestPeerRequirementsCell; import org.telegram.ui.Cells.ShadowSectionCell; import org.telegram.ui.Cells.TextCell; import org.telegram.ui.Cells.TextInfoPrivacyCell; @@ -83,7 +86,9 @@ public class DialogsAdapter extends RecyclerListView.SelectionAdapter implements VIEW_TYPE_NEW_CHAT_HINT = 11, VIEW_TYPE_TEXT = 12, VIEW_TYPE_CONTACTS_FLICKER = 13, - VIEW_TYPE_HEADER_2 = 14; + VIEW_TYPE_HEADER_2 = 14, + VIEW_TYPE_REQUIREMENTS = 15, + VIEW_TYPE_REQUIRED_EMPTY = 16; private Context mContext; private ArchiveHintCell archiveHintCell; @@ -119,7 +124,10 @@ public class DialogsAdapter extends RecyclerListView.SelectionAdapter implements private boolean isTransitionSupport; private boolean fromDiffUtils; - public DialogsAdapter(DialogsActivity fragment, Context context, int type, int folder, boolean onlySelect, ArrayList selected, int account) { + private TLRPC.RequestPeerType requestPeerType; + public boolean isEmpty; + + public DialogsAdapter(DialogsActivity fragment, Context context, int type, int folder, boolean onlySelect, ArrayList selected, int account, TLRPC.RequestPeerType requestPeerType) { mContext = context; parentFragment = fragment; dialogsType = type; @@ -136,6 +144,7 @@ public DialogsAdapter(DialogsActivity fragment, Context context, int type, int f if (folder == 0) { this.preloader = new DialogsPreloader(); } + this.requestPeerType = requestPeerType; } public void setRecyclerListView(RecyclerListView recyclerListView) { @@ -156,9 +165,9 @@ public int fixPosition(int position) { } if (showArchiveHint) { position -= 2; - } else if (dialogsType == 11 || dialogsType == 13) { + } else if (dialogsType == DialogsActivity.DIALOGS_TYPE_IMPORT_HISTORY_GROUPS || dialogsType == DialogsActivity.DIALOGS_TYPE_IMPORT_HISTORY) { position -= 2; - } else if (dialogsType == 12) { + } else if (dialogsType == DialogsActivity.DIALOGS_TYPE_IMPORT_HISTORY_USERS) { position -= 1; } return position; @@ -368,7 +377,7 @@ public ViewPager getArchiveHintCellPager() { } public void updateHasHints() { - hasHints = folderId == 0 && dialogsType == 0 && !isOnlySelect && !MessagesController.getInstance(currentAccount).hintDialogs.isEmpty(); + hasHints = folderId == 0 && dialogsType == DialogsActivity.DIALOGS_TYPE_DEFAULT && !isOnlySelect && !MessagesController.getInstance(currentAccount).hintDialogs.isEmpty(); } public void updateList(RecyclerListView recyclerListView, boolean hasHiddenArchive, float tabsTranslation) { @@ -451,7 +460,8 @@ public boolean isEnabled(RecyclerView.ViewHolder holder) { int viewType = holder.getItemViewType(); return viewType != VIEW_TYPE_FLICKER && viewType != VIEW_TYPE_EMPTY && viewType != VIEW_TYPE_DIVIDER && viewType != VIEW_TYPE_SHADOW && viewType != VIEW_TYPE_HEADER && viewType != VIEW_TYPE_ARCHIVE && - viewType != VIEW_TYPE_LAST_EMPTY && viewType != VIEW_TYPE_NEW_CHAT_HINT && viewType != VIEW_TYPE_CONTACTS_FLICKER; + viewType != VIEW_TYPE_LAST_EMPTY && viewType != VIEW_TYPE_NEW_CHAT_HINT && viewType != VIEW_TYPE_CONTACTS_FLICKER && + viewType != VIEW_TYPE_REQUIREMENTS && viewType != VIEW_TYPE_REQUIRED_EMPTY; } @Override @@ -459,7 +469,8 @@ public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup viewGroup, int viewT View view; switch (viewType) { case VIEW_TYPE_DIALOG: - if (dialogsType == 2) { + if (dialogsType == DialogsActivity.DIALOGS_TYPE_ADD_USERS_TO || + dialogsType == DialogsActivity.DIALOGS_TYPE_BOT_REQUEST_PEER) { view = new ProfileSearchCell(mContext); } else { DialogCell dialogCell = new DialogCell(parentFragment, mContext, true, false, currentAccount, null); @@ -469,6 +480,12 @@ public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup viewGroup, int viewT dialogCell.setIsTransitionSupport(isTransitionSupport); view = dialogCell; } + if (dialogsType == DialogsActivity.DIALOGS_TYPE_BOT_REQUEST_PEER) { + view.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); + } + break; + case VIEW_TYPE_REQUIREMENTS: + view = new RequestPeerRequirementsCell(mContext); break; case VIEW_TYPE_FLICKER: case VIEW_TYPE_CONTACTS_FLICKER: @@ -524,6 +541,14 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { case VIEW_TYPE_EMPTY: view = new DialogsEmptyCell(mContext); break; + case VIEW_TYPE_REQUIRED_EMPTY: + view = new DialogsRequestedEmptyCell(mContext) { + @Override + protected void onButtonClick() { + onCreateGroupForThisClick(); + } + }; + break; case VIEW_TYPE_USER: view = new UserCell(mContext, 8, 0, false); break; @@ -611,12 +636,19 @@ protected void onTextDraw() { case VIEW_TYPE_TEXT: default: { view = new TextCell(mContext); + if (dialogsType == DialogsActivity.DIALOGS_TYPE_BOT_REQUEST_PEER) { + view.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); + } } } view.setLayoutParams(new RecyclerView.LayoutParams(RecyclerView.LayoutParams.MATCH_PARENT, viewType == 5 ? RecyclerView.LayoutParams.MATCH_PARENT : RecyclerView.LayoutParams.WRAP_CONTENT)); return new RecyclerListView.Holder(view); } + public void onCreateGroupForThisClick() { + + } + public int lastDialogsEmptyType = -1; public int dialogsEmptyType() { @@ -637,10 +669,11 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int i) { case VIEW_TYPE_DIALOG: { TLRPC.Dialog dialog = (TLRPC.Dialog) getItem(i); TLRPC.Dialog nextDialog = (TLRPC.Dialog) getItem(i + 1); - if (dialogsType == 2) { + if (dialogsType == DialogsActivity.DIALOGS_TYPE_ADD_USERS_TO || dialogsType == DialogsActivity.DIALOGS_TYPE_BOT_REQUEST_PEER) { ProfileSearchCell cell = (ProfileSearchCell) holder.itemView; long oldDialogId = cell.getDialogId(); + TLObject object = null; TLRPC.Chat chat = null; CharSequence title = null; CharSequence subtitle; @@ -657,6 +690,7 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int i) { } if (chat != null) { + object = chat; title = chat.title; if (ChatObject.isChannel(chat) && !chat.megagroup) { if (chat.participants_count != 0) { @@ -683,15 +717,27 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int i) { } } else { subtitle = ""; + TLRPC.User user = MessagesController.getInstance(currentAccount).getUser(dialog.id); + if (user != null) { + object = user; + title = UserObject.getUserName(user); + if (!UserObject.isReplyUser(user)) { + if (user.bot) { + subtitle = LocaleController.getString("Bot", R.string.Bot); + } else { + subtitle = LocaleController.formatUserStatus(currentAccount, user); + } + } + } } cell.useSeparator = nextDialog != null; - cell.setData(chat, null, title, subtitle, isRecent, false); + cell.setData(object, null, title, subtitle, isRecent, false); cell.setChecked(selectedDialogs.contains(cell.getDialogId()), oldDialogId == cell.getDialogId()); } else { DialogCell cell = (DialogCell) holder.itemView; cell.useSeparator = nextDialog != null; cell.fullSeparator = dialog.pinned && nextDialog != null && !nextDialog.pinned; - if (dialogsType == 0) { + if (dialogsType == DialogsActivity.DIALOGS_TYPE_DEFAULT) { if (AndroidUtilities.isTablet()) { cell.setDialogSelected(dialog.id == openedDialogId); } @@ -734,6 +780,10 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int i) { } break; } + case VIEW_TYPE_REQUIRED_EMPTY: { + ((DialogsRequestedEmptyCell) holder.itemView).set(requestPeerType); + break; + } case VIEW_TYPE_ME_URL: { DialogMeUrlCell cell = (DialogMeUrlCell) holder.itemView; cell.setRecentMeUrl((TLRPC.RecentMeUrl) getItem(i)); @@ -747,7 +797,11 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int i) { } case VIEW_TYPE_HEADER: { HeaderCell cell = (HeaderCell) holder.itemView; - if (dialogsType == 11 || dialogsType == 12 || dialogsType == 13) { + if ( + dialogsType == DialogsActivity.DIALOGS_TYPE_IMPORT_HISTORY_GROUPS || + dialogsType == DialogsActivity.DIALOGS_TYPE_IMPORT_HISTORY_USERS || + dialogsType == DialogsActivity.DIALOGS_TYPE_IMPORT_HISTORY + ) { if (i == 0) { cell.setText(LocaleController.getString("ImportHeader", R.string.ImportHeader)); } else { @@ -792,11 +846,24 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int i) { case VIEW_TYPE_TEXT: { TextCell cell = (TextCell) holder.itemView; cell.setColors(Theme.key_windowBackgroundWhiteBlueText4, Theme.key_windowBackgroundWhiteBlueText4); - cell.setTextAndIcon(LocaleController.getString("CreateGroupForImport", R.string.CreateGroupForImport), R.drawable.msg_groups_create, dialogsCount != 0); + if (requestPeerType != null) { + if (requestPeerType instanceof TLRPC.TL_requestPeerTypeBroadcast) { + cell.setTextAndIcon(LocaleController.getString("CreateChannelForThis", R.string.CreateChannelForThis), R.drawable.msg_channel_create, true); + } else { + cell.setTextAndIcon(LocaleController.getString("CreateGroupForThis", R.string.CreateGroupForThis), R.drawable.msg_groups_create, true); + } + } else { + cell.setTextAndIcon(LocaleController.getString("CreateGroupForImport", R.string.CreateGroupForImport), R.drawable.msg_groups_create, dialogsCount != 0); + } cell.setIsInDialogs(); cell.setOffsetFromImage(75); break; } + case VIEW_TYPE_REQUIREMENTS: { + RequestPeerRequirementsCell cell = (RequestPeerRequirementsCell) holder.itemView; + cell.set(requestPeerType); + break; + } } if (i >= dialogsCount + 1) { holder.itemView.setAlpha(1f); @@ -1098,6 +1165,7 @@ private void updateItemList() { MessagesController messagesController = MessagesController.getInstance(currentAccount); ArrayList array = parentFragment.getDialogsArray(currentAccount, dialogsType, folderId, dialogsListFrozen); dialogsCount = array.size(); + isEmpty = false; if (!hasHints && dialogsType == 0 && folderId == 0 && messagesController.isDialogsEndReached(folderId) && !forceUpdatingContacts) { if (messagesController.getAllFoldersDialogsCount() <= 10 && ContactsController.getInstance(currentAccount).doneLoadingContacts && !ContactsController.getInstance(currentAccount).contacts.isEmpty()) { @@ -1121,6 +1189,10 @@ private void updateItemList() { } } + if (requestPeerType != null) { + itemInternals.add(new ItemInternal(VIEW_TYPE_REQUIREMENTS)); + } + boolean stopUpdate = false; if (collapsedView || isTransitionSupport) { for (int k = 0; k < array.size(); k++) { @@ -1134,13 +1206,15 @@ private void updateItemList() { } if (dialogsCount == 0 && forceUpdatingContacts) { - itemInternals.add(new ItemInternal(VIEW_TYPE_EMPTY)); + isEmpty = true; + itemInternals.add(new ItemInternal(requestPeerType == null ? VIEW_TYPE_EMPTY : VIEW_TYPE_REQUIRED_EMPTY)); itemInternals.add(new ItemInternal(VIEW_TYPE_SHADOW)); itemInternals.add(new ItemInternal(VIEW_TYPE_HEADER)); itemInternals.add(new ItemInternal(VIEW_TYPE_CONTACTS_FLICKER)); - } else if (onlineContacts != null) { + } else if (onlineContacts != null && !onlineContacts.isEmpty()) { if (dialogsCount == 0) { - itemInternals.add(new ItemInternal(VIEW_TYPE_EMPTY)); + isEmpty = true; + itemInternals.add(new ItemInternal(requestPeerType == null ? VIEW_TYPE_EMPTY : VIEW_TYPE_REQUIRED_EMPTY)); itemInternals.add(new ItemInternal(VIEW_TYPE_SHADOW)); itemInternals.add(new ItemInternal(VIEW_TYPE_HEADER)); } else { @@ -1149,11 +1223,11 @@ private void updateItemList() { } itemInternals.add(new ItemInternal(VIEW_TYPE_SHADOW)); itemInternals.add(new ItemInternal(VIEW_TYPE_HEADER)); - for (int k = 0; k < onlineContacts.size(); k++) { - itemInternals.add(new ItemInternal(VIEW_TYPE_USER, onlineContacts.get(k))); - } - itemInternals.add(new ItemInternal(VIEW_TYPE_LAST_EMPTY)); } + for (int k = 0; k < onlineContacts.size(); k++) { + itemInternals.add(new ItemInternal(VIEW_TYPE_USER, onlineContacts.get(k))); + } + itemInternals.add(new ItemInternal(VIEW_TYPE_LAST_EMPTY)); stopUpdate = true; } else if (hasHints) { int count = MessagesController.getInstance(currentAccount).hintDialogs.size(); @@ -1165,32 +1239,39 @@ private void updateItemList() { } else if (showArchiveHint) { itemInternals.add(new ItemInternal(VIEW_TYPE_ARCHIVE)); itemInternals.add(new ItemInternal(VIEW_TYPE_SHADOW)); - } else if (dialogsType == 11 || dialogsType == 13) { + } else if (dialogsType == DialogsActivity.DIALOGS_TYPE_IMPORT_HISTORY_GROUPS || dialogsType == DialogsActivity.DIALOGS_TYPE_IMPORT_HISTORY) { itemInternals.add(new ItemInternal(VIEW_TYPE_HEADER)); itemInternals.add(new ItemInternal(VIEW_TYPE_TEXT)); - } else if (dialogsType == 12) { + } else if (dialogsType == DialogsActivity.DIALOGS_TYPE_IMPORT_HISTORY_USERS) { itemInternals.add(new ItemInternal(VIEW_TYPE_HEADER)); } + if ((requestPeerType instanceof TLRPC.TL_requestPeerTypeBroadcast || requestPeerType instanceof TLRPC.TL_requestPeerTypeChat) && dialogsCount > 0) { + itemInternals.add(new ItemInternal(VIEW_TYPE_TEXT)); + } + if (!stopUpdate) { for (int k = 0; k < array.size(); k++) { - if (dialogsType == 2 && array.get(k) instanceof DialogsActivity.DialogsHeader) { + if (dialogsType == DialogsActivity.DIALOGS_TYPE_ADD_USERS_TO && array.get(k) instanceof DialogsActivity.DialogsHeader) { itemInternals.add(new ItemInternal(VIEW_TYPE_HEADER_2, array.get(k))); } else { itemInternals.add(new ItemInternal(VIEW_TYPE_DIALOG, array.get(k))); } } - } - if (!forceShowEmptyCell && dialogsType != 7 && dialogsType != 8 && !MessagesController.getInstance(currentAccount).isDialogsEndReached(folderId)) { - itemInternals.add(new ItemInternal(VIEW_TYPE_FLICKER)); - } else if (dialogsCount == 0) { - itemInternals.add(new ItemInternal(VIEW_TYPE_EMPTY)); - } else { - if (folderId == 0 && dialogsCount > 10 && dialogsType == 0) { - itemInternals.add(new ItemInternal(VIEW_TYPE_NEW_CHAT_HINT)); + if (!forceShowEmptyCell && dialogsType != 7 && dialogsType != 8 && !MessagesController.getInstance(currentAccount).isDialogsEndReached(folderId)) { + if (dialogsCount != 0) { + itemInternals.add(new ItemInternal(VIEW_TYPE_FLICKER)); + } + } else if (dialogsCount == 0) { + isEmpty = true; + itemInternals.add(new ItemInternal(requestPeerType == null ? VIEW_TYPE_EMPTY : VIEW_TYPE_REQUIRED_EMPTY)); + } else { + if (folderId == 0 && dialogsCount > 10 && dialogsType == DialogsActivity.DIALOGS_TYPE_DEFAULT) { + itemInternals.add(new ItemInternal(VIEW_TYPE_NEW_CHAT_HINT)); + } + itemInternals.add(new ItemInternal(VIEW_TYPE_LAST_EMPTY)); } - itemInternals.add(new ItemInternal(VIEW_TYPE_LAST_EMPTY)); } } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Adapters/DialogsSearchAdapter.java b/TMessagesProj/src/main/java/org/telegram/ui/Adapters/DialogsSearchAdapter.java index cfb9b8984f..c0ca0a82d1 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Adapters/DialogsSearchAdapter.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Adapters/DialogsSearchAdapter.java @@ -57,6 +57,7 @@ import org.telegram.ui.Components.FlickerLoadingView; import org.telegram.ui.Components.ForegroundColorSpanThemable; import org.telegram.ui.Components.RecyclerListView; +import org.telegram.ui.DialogsActivity; import org.telegram.ui.FilteredSearchView; import java.util.ArrayList; @@ -113,6 +114,7 @@ public class DialogsSearchAdapter extends RecyclerListView.SelectionAdapter { private long lastShowMoreUpdate; public View showMoreHeader; private Runnable cancelShowMoreAnimation; + private ArrayList filterDialogIds; private int currentAccount = UserConfig.selectedAccount; @@ -128,6 +130,10 @@ public class DialogsSearchAdapter extends RecyclerListView.SelectionAdapter { private int folderId; private ArrayList allContacts; + public void setFilterDialogIds(ArrayList filterDialogIds) { + this.filterDialogIds = filterDialogIds; + } + public boolean isSearching() { return waitingResponseCount > 0; } @@ -316,6 +322,10 @@ private void searchForumMessagesInternal(final String query, int searchId) { return; } + if (dialogsType == DialogsActivity.DIALOGS_TYPE_BOT_REQUEST_PEER) { + return; + } + final long dialogId = delegate.getSearchForumDialogId(); final TLRPC.TL_messages_search req = new TLRPC.TL_messages_search(); @@ -436,6 +446,15 @@ private void searchMessagesInternal(final String query, int searchId) { searchAdapterHelper.mergeResults(searchResult, filtered2RecentSearchObjects); } + if (dialogsType == DialogsActivity.DIALOGS_TYPE_BOT_REQUEST_PEER) { + waitingResponseCount--; + if (delegate != null) { + delegate.searchStateChanged(waitingResponseCount > 0, true); + delegate.runResultsEnterAnimation(); + } + return; + } + final TLRPC.TL_messages_searchGlobal req = new TLRPC.TL_messages_searchGlobal(); req.limit = 20; req.q = query; @@ -551,7 +570,14 @@ public boolean hasRecentSearch() { } private boolean resentSearchAvailable() { - return dialogsType != 2 && dialogsType != 4 && dialogsType != 5 && dialogsType != 6 && dialogsType != 11; + return ( + dialogsType != DialogsActivity.DIALOGS_TYPE_ADD_USERS_TO && + dialogsType != DialogsActivity.DIALOGS_TYPE_USERS_ONLY && + dialogsType != DialogsActivity.DIALOGS_TYPE_CHANNELS_ONLY && + dialogsType != DialogsActivity.DIALOGS_TYPE_GROUPS_ONLY && + dialogsType != DialogsActivity.DIALOGS_TYPE_IMPORT_HISTORY_GROUPS && + dialogsType != DialogsActivity.DIALOGS_TYPE_BOT_REQUEST_PEER + ); } public boolean isSearchWas() { @@ -563,6 +589,9 @@ public boolean isRecentSearchDisplayed() { } public void loadRecentSearch() { + if (dialogsType == DialogsActivity.DIALOGS_TYPE_BOT_REQUEST_PEER) { + return; + } loadRecentSearch(currentAccount, dialogsType, (arrayList, hashMap) -> { DialogsSearchAdapter.this.setRecentSearch(arrayList, hashMap); }); @@ -585,7 +614,7 @@ public static void loadRecentSearch(int currentAccount, int dialogsType, OnRecen boolean add = false; if (DialogObject.isEncryptedDialog(did)) { - if (dialogsType == 0 || dialogsType == 3) { + if (dialogsType == DialogsActivity.DIALOGS_TYPE_DEFAULT || dialogsType == DialogsActivity.DIALOGS_TYPE_FORWARD) { int encryptedChatId = DialogObject.getEncryptedChatId(did); if (!encryptedToLoad.contains(encryptedChatId)) { encryptedToLoad.add(encryptedChatId); @@ -796,7 +825,8 @@ private void searchDialogsInternal(final String query, final int searchId) { ArrayList resultArrayNames = new ArrayList<>(); ArrayList encUsers = new ArrayList<>(); ArrayList contacts = new ArrayList<>(); - MessagesStorage.getInstance(currentAccount).localSearch(dialogsType, q, resultArray, resultArrayNames, encUsers, -1); + + MessagesStorage.getInstance(currentAccount).localSearch(dialogsType, q, resultArray, resultArrayNames, encUsers, filterDialogIds, -1); // if (allContacts == null) { // allContacts = new ArrayList<>(); // for (ContactsController.Contact contact : ContactsController.getInstance(currentAccount).phoneBookContacts) { @@ -880,7 +910,7 @@ private void updateSearchResults(final ArrayList result, final ArrayList } } - if (resentSearchAvailable()) { + if (resentSearchAvailable() && !(obj instanceof TLRPC.EncryptedChat)) { boolean foundInRecent = false; if (delegate != null && delegate.getSearchForumDialogId() == dialogId) { foundInRecent = true; @@ -951,7 +981,21 @@ public void searchDialogs(String text, int folderId) { searchResultNames.clear(); searchResultHashtags.clear(); searchAdapterHelper.mergeResults(null, null); - searchAdapterHelper.queryServerSearch(null, true, true, dialogsType != 11, dialogsType != 11, dialogsType == 2 || dialogsType == 11, 0, dialogsType == 0, 0, 0, delegate != null ? delegate.getSearchForumDialogId() : 0); + if (dialogsType != DialogsActivity.DIALOGS_TYPE_BOT_REQUEST_PEER) { + searchAdapterHelper.queryServerSearch( + null, + true, + true, + dialogsType != DialogsActivity.DIALOGS_TYPE_IMPORT_HISTORY_GROUPS, + dialogsType != DialogsActivity.DIALOGS_TYPE_IMPORT_HISTORY_GROUPS, + dialogsType == DialogsActivity.DIALOGS_TYPE_ADD_USERS_TO || dialogsType == DialogsActivity.DIALOGS_TYPE_IMPORT_HISTORY_GROUPS, + 0, + dialogsType == DialogsActivity.DIALOGS_TYPE_DEFAULT, + 0, + 0, + delegate != null ? delegate.getSearchForumDialogId() : 0 + ); + } searchWas = false; lastSearchId = 0; waitingResponseCount = 0; @@ -960,9 +1004,11 @@ public void searchDialogs(String text, int folderId) { if (delegate != null) { delegate.searchStateChanged(false, true); } - searchTopics(null); - searchMessagesInternal(null, 0); - searchForumMessagesInternal(null, 0); + if (dialogsType != DialogsActivity.DIALOGS_TYPE_BOT_REQUEST_PEER) { + searchTopics(null); + searchMessagesInternal(null, 0); + searchForumMessagesInternal(null, 0); + } notifyDataSetChanged(); localTipDates.clear(); localTipArchive = false; @@ -1004,17 +1050,33 @@ public void searchDialogs(String text, int folderId) { Utilities.searchQueue.postRunnable(searchRunnable = () -> { searchRunnable = null; searchDialogsInternal(query, searchId); + if (dialogsType == DialogsActivity.DIALOGS_TYPE_BOT_REQUEST_PEER) { + waitingResponseCount -= 2; + return; + } AndroidUtilities.runOnUIThread(searchRunnable2 = () -> { searchRunnable2 = null; if (searchId != lastSearchId) { return; } - if (needMessagesSearch != 2) { - searchAdapterHelper.queryServerSearch(query, true, dialogsType != 4, true, dialogsType != 4 && dialogsType != 11, dialogsType == 2 || dialogsType == 1, 0, dialogsType == 0, 0, searchId, delegate != null ? delegate.getSearchForumDialogId() : 0); + if (needMessagesSearch != 2 && dialogsType != DialogsActivity.DIALOGS_TYPE_GROUPS_ONLY && dialogsType != DialogsActivity.DIALOGS_TYPE_CHANNELS_ONLY) { + searchAdapterHelper.queryServerSearch( + query, + true, + dialogsType != DialogsActivity.DIALOGS_TYPE_USERS_ONLY, + true, + dialogsType != DialogsActivity.DIALOGS_TYPE_USERS_ONLY && dialogsType != DialogsActivity.DIALOGS_TYPE_IMPORT_HISTORY_GROUPS, + dialogsType == DialogsActivity.DIALOGS_TYPE_ADD_USERS_TO || dialogsType == DialogsActivity.DIALOGS_TYPE_BOT_SHARE, + 0, + dialogsType == DialogsActivity.DIALOGS_TYPE_DEFAULT, + 0, + searchId, + delegate != null ? delegate.getSearchForumDialogId() : 0 + ); } else { waitingResponseCount -= 2; } - if (needMessagesSearch == 0) { + if (needMessagesSearch == 0 || dialogsType == DialogsActivity.DIALOGS_TYPE_BOT_REQUEST_PEER) { waitingResponseCount--; } else { searchTopics(text); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Adapters/MentionsAdapter.java b/TMessagesProj/src/main/java/org/telegram/ui/Adapters/MentionsAdapter.java index 65e3412fe2..7f7fc80dbe 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Adapters/MentionsAdapter.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Adapters/MentionsAdapter.java @@ -96,6 +96,7 @@ public interface MentionsAdapterDelegate { private ArrayList searchResultCommandsUsers; private ArrayList searchResultBotContext; private TLRPC.TL_inlineBotSwitchPM searchResultBotContextSwitch; + private TLRPC.TL_inlineBotWebView searchResultBotWebViewSwitch; private MentionsAdapterDelegate delegate; private LongSparseArray botInfo; private int resultStartPosition; @@ -201,6 +202,10 @@ public void onSetHashtags(ArrayList arrayList } } + public TLRPC.User getFoundContextBot() { + return foundContextBot; + } + @Override public void didReceivedNotification(int id, int account, final Object... args) { if (id == NotificationCenter.fileLoaded || id == NotificationCenter.fileLoadFailed) { @@ -481,6 +486,10 @@ public TLRPC.TL_inlineBotSwitchPM getBotContextSwitch() { return searchResultBotContextSwitch; } + public TLRPC.TL_inlineBotWebView getBotWebViewSwitch() { + return searchResultBotWebViewSwitch; + } + public long getContextBotId() { return foundContextBot != null ? foundContextBot.id : 0; } @@ -735,6 +744,9 @@ private void searchForContextBotResults(final boolean cache, final TLRPC.User us if (searchResultBotContextSwitch == null) { searchResultBotContextSwitch = res.switch_pm; } + if (searchResultBotWebViewSwitch == null) { + searchResultBotWebViewSwitch = res.switch_webview; + } for (int a = 0; a < res.results.size(); a++) { TLRPC.BotInlineResult result = res.results.get(a); if (!(result.document instanceof TLRPC.TL_document) && !(result.photo instanceof TLRPC.TL_photo) && !"game".equals(result.type) && result.content == null && result.send_message instanceof TLRPC.TL_botInlineMessageMediaAuto) { @@ -766,14 +778,14 @@ private void searchForContextBotResults(final boolean cache, final TLRPC.User us searchResultSuggestions = null; searchResultCommandsHelp = null; searchResultCommandsUsers = null; + delegate.needChangePanelVisibility(!searchResultBotContext.isEmpty() || searchResultBotContextSwitch != null || searchResultBotWebViewSwitch != null); if (added) { - boolean hasTop = searchResultBotContextSwitch != null; + boolean hasTop = searchResultBotContextSwitch != null || searchResultBotWebViewSwitch != null; notifyItemChanged(searchResultBotContext.size() - res.results.size() + (hasTop ? 1 : 0) - 1); notifyItemRangeInserted(searchResultBotContext.size() - res.results.size() + (hasTop ? 1 : 0), res.results.size()); } else { notifyDataSetChanged(); } - delegate.needChangePanelVisibility(!searchResultBotContext.isEmpty() || searchResultBotContextSwitch != null); } }); @@ -800,7 +812,7 @@ private void searchForContextBotResults(final boolean cache, final TLRPC.User us } public void searchUsernameOrHashtag(CharSequence charSequence, int position, ArrayList messageObjects, boolean usernameOnly, boolean forSearch) { - final String text = charSequence == null ? null : charSequence.toString(); + final String text = charSequence == null ? "" : charSequence.toString(); if (cancelDelayRunnable != null) { AndroidUtilities.cancelRunOnUIThread(cancelDelayRunnable); cancelDelayRunnable = null; @@ -830,7 +842,7 @@ public void searchUsernameOrHashtag(CharSequence charSequence, int position, Arr StringBuilder result = new StringBuilder(); int foundType = -1; - boolean searchEmoji = !usernameOnly && text != null && text.length() > 0 && text.length() <= 14; + boolean searchEmoji = !usernameOnly && text.length() > 0 && text.length() <= 14; String originalEmoji = ""; if (searchEmoji) { CharSequence emoji = originalEmoji = text; @@ -1229,7 +1241,7 @@ public void run() { for (int a = 0; a < res.participants.size(); a++) { TLRPC.ChannelParticipant participant = res.participants.get(a); long peerId = MessageObject.getPeerId(participant.peer); - if (searchResultUsernamesMap.indexOfKey(peerId) >= 0 || !isSearchingMentions && peerId == currentUserId) { + if (searchResultUsernamesMap.indexOfKey(peerId) >= 0 || peerId == 0 && searchResultUsernamesMap.indexOfKey(currentUserId) >= 0 || !isSearchingMentions && (peerId == currentUserId || peerId == 0)) { continue; } if (peerId >= 0) { @@ -1392,7 +1404,7 @@ public int getItemCountInternal() { if (stickers != null) { return stickers.size(); } else if (searchResultBotContext != null) { - return searchResultBotContext.size() + (searchResultBotContextSwitch != null ? 1 : 0); + return searchResultBotContext.size() + (searchResultBotContextSwitch != null || searchResultBotWebViewSwitch != null ? 1 : 0); } else if (searchResultUsernames != null) { return searchResultUsernames.size(); } else if (searchResultHashtags != null) { @@ -1417,6 +1429,7 @@ public void clear(boolean safe) { searchResultBotContext.clear(); } searchResultBotContextSwitch = null; + searchResultBotWebViewSwitch = null; if (searchResultUsernames != null) { searchResultUsernames.clear(); } @@ -1439,7 +1452,7 @@ public int getItemViewType(int position) { } else if (foundContextBot != null && !inlineMediaEnabled) { return 3; } else if (searchResultBotContext != null) { - if (position == 0 && searchResultBotContextSwitch != null) { + if (position == 0 && (searchResultBotContextSwitch != null || searchResultBotWebViewSwitch != null)) { return 2; } return 1; @@ -1453,7 +1466,7 @@ public void addHashtagsFromMessage(CharSequence message) { } public int getItemPosition(int i) { - if (searchResultBotContext != null && searchResultBotContextSwitch != null) { + if (searchResultBotContext != null && (searchResultBotContextSwitch != null || searchResultBotWebViewSwitch != null)) { i--; } return i; @@ -1467,7 +1480,13 @@ public Object getItem(int i) { if (stickers != null) { return i >= 0 && i < stickers.size() ? stickers.get(i).sticker : null; } else if (searchResultBotContext != null) { - if (searchResultBotContextSwitch != null) { + if (searchResultBotWebViewSwitch != null) { + if (i == 0) { + return searchResultBotWebViewSwitch; + } else { + i--; + } + } else if (searchResultBotContextSwitch != null) { if (i == 0) { return searchResultBotContextSwitch; } else { @@ -1589,10 +1608,10 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { } } } else if (searchResultBotContext != null) { - boolean hasTop = searchResultBotContextSwitch != null; + boolean hasTop = searchResultBotContextSwitch != null || searchResultBotWebViewSwitch != null; if (holder.getItemViewType() == 2) { if (hasTop) { - ((BotSwitchCell) holder.itemView).setText(searchResultBotContextSwitch.text); + ((BotSwitchCell) holder.itemView).setText(searchResultBotContextSwitch != null ? searchResultBotContextSwitch.text : searchResultBotWebViewSwitch.text); } } else { if (hasTop) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ArticleViewer.java b/TMessagesProj/src/main/java/org/telegram/ui/ArticleViewer.java index f531d9b4bb..be9dff9cc5 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ArticleViewer.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ArticleViewer.java @@ -168,6 +168,7 @@ import org.telegram.ui.Components.TextPaintSpan; import org.telegram.ui.Components.TextPaintUrlSpan; import org.telegram.ui.Components.TextPaintWebpageUrlSpan; +import org.telegram.ui.Components.TranslateAlert2; import org.telegram.ui.Components.TypefaceSpan; import org.telegram.ui.Components.WebPlayerView; @@ -1226,7 +1227,14 @@ private void showCopyPopup(String urlFinal) { } BottomBuilder builder = new BottomBuilder(parentActivity); - builder.addTitle(urlFinal); + String formattedUrl = urlFinal; + try { + formattedUrl = URLDecoder.decode(urlFinal.replaceAll("\\+", "%2b"), "UTF-8"); + } catch (Exception e) { + FileLog.e(e); + } + builder.addTitle(formattedUrl); + builder.setTitleMultipleLines(true); builder.addItems( new String[]{LocaleController.getString("Open", R.string.Open), LocaleController.getString("Copy", R.string.Copy), LocaleController.getString("ShareQRCode", R.string.ShareQRCode)}, new int[]{R.drawable.msg_openin, R.drawable.msg_copy, R.drawable.msg_qrcode}, (which, text, cell) -> { @@ -2688,22 +2696,24 @@ private boolean checkLayoutForLinks(WebpageAdapter adapter, MotionEvent event, V pressedEnd = end; } } - if (pressedLink != null) { - links.removeLink(pressedLink); - } - pressedLink = new LinkSpanDrawable(selectedLink, null, x, y); - pressedLink.setColor(Theme.getColor(Theme.key_windowBackgroundWhiteLinkSelection) & 0x33ffffff); - links.addLink(pressedLink, pressedLinkOwnerLayout); - try { - LinkPath path = pressedLink.obtainNewPath(); - path.setCurrentLayout(layout, pressedStart, 0); - TextPaint textPaint = selectedLink.getTextPaint(); - int shift = textPaint != null ? textPaint.baselineShift : 0; - path.setBaselineShift(shift != 0 ? shift + AndroidUtilities.dp(shift > 0 ? 5 : -2) : 0); - layout.getSelectionPath(pressedStart, pressedEnd, path); - parentView.invalidate(); - } catch (Exception e) { - FileLog.e(e); + if (pressedLink == null || pressedLink.getSpan() != selectedLink) { + if (pressedLink != null) { + links.removeLink(pressedLink); + } + pressedLink = new LinkSpanDrawable(selectedLink, null, x, y); + pressedLink.setColor(Theme.getColor(Theme.key_windowBackgroundWhiteLinkSelection) & 0x33ffffff); + links.addLink(pressedLink, pressedLinkOwnerLayout); + try { + LinkPath path = pressedLink.obtainNewPath(); + path.setCurrentLayout(layout, pressedStart, 0); + TextPaint textPaint = selectedLink.getTextPaint(); + int shift = textPaint != null ? textPaint.baselineShift : 0; + path.setBaselineShift(shift != 0 ? shift + AndroidUtilities.dp(shift > 0 ? 5 : -2) : 0); + layout.getSelectionPath(pressedStart, pressedEnd, path); + parentView.invalidate(); + } catch (Exception e) { + FileLog.e(e); + } } } } @@ -3098,6 +3108,11 @@ protected boolean drawChild(Canvas canvas, View child, long drawingTime) { return super.drawChild(canvas, child, drawingTime); } } + + @Override + public void invalidate() { + super.invalidate(); + } }; windowView.addView(containerView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.TOP | Gravity.LEFT)); //containerView.setFitsSystemWindows(true); @@ -3724,10 +3739,8 @@ public void onDraw(Canvas canvas) { boolean isLightNavigation = navigationBrightness >= 0.721f; if (isLightNavigation && Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { uiFlags |= View.SYSTEM_UI_FLAG_LIGHT_NAVIGATION_BAR; - navigationBarPaint.setColor(navigationColor); - } else if (!isLightNavigation) { - navigationBarPaint.setColor(navigationColor); } + navigationBarPaint.setColor(navigationColor); windowLayoutParams.systemUiVisibility = uiFlags; if (Build.VERSION.SDK_INT >= 21) { @@ -3741,9 +3754,9 @@ public void onDraw(Canvas canvas) { textSelectionHelper = new TextSelectionHelper.ArticleTextSelectionHelper(); textSelectionHelper.setParentView(listView[0]); - if (MessagesController.getGlobalMainSettings().getBoolean("translate_button", false)) { + if (MessagesController.getInstance(currentAccount).getTranslateController().isContextTranslateEnabled()) { textSelectionHelper.setOnTranslate((text, fromLang, toLang, onAlertDismiss) -> { -// TranslateAlert.showAlert(parentActivity, parentFragment, currentAccount, fromLang, toLang, text, false, null, onAlertDismiss); + TranslateAlert2.showAlert(parentActivity, parentFragment, currentAccount, fromLang, toLang, text, null, false, null, onAlertDismiss); }); } textSelectionHelper.layoutManager = layoutManager[0]; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/BubbleActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/BubbleActivity.java index 0e2b4515fe..6c9b81afe8 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/BubbleActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/BubbleActivity.java @@ -122,7 +122,7 @@ private void showPasscodeActivity() { passcodeView.onShow(true, false); SharedConfig.isWaitingForPasscodeEnter = true; drawerLayoutContainer.setAllowOpenDrawer(false, false); - passcodeView.setDelegate(() -> { + passcodeView.setDelegate(view -> { SharedConfig.isWaitingForPasscodeEnter = false; if (passcodeSaveIntent != null) { handleIntent(passcodeSaveIntent, passcodeSaveIntentIsNew, passcodeSaveIntentIsRestore, true, passcodeSaveIntentAccount, passcodeSaveIntentState); @@ -130,6 +130,8 @@ private void showPasscodeActivity() { } drawerLayoutContainer.setAllowOpenDrawer(true, false); actionBarLayout.showLastFragment(); + + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.passcodeDismissed, view); }); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/CacheChatsExceptionsFragment.java b/TMessagesProj/src/main/java/org/telegram/ui/CacheChatsExceptionsFragment.java index 2290b737fe..ad6d3a6e1e 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/CacheChatsExceptionsFragment.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/CacheChatsExceptionsFragment.java @@ -81,15 +81,15 @@ public void onItemClick(int id) { args.putBoolean("onlySelect", true); args.putBoolean("checkCanWrite", false); if (currentType == CacheControlActivity.KEEP_MEDIA_TYPE_GROUP) { - args.putInt("dialogsType", 6); + args.putInt("dialogsType", DialogsActivity.DIALOGS_TYPE_GROUPS_ONLY); } else if (currentType == CacheControlActivity.KEEP_MEDIA_TYPE_CHANNEL) { - args.putInt("dialogsType", 5); + args.putInt("dialogsType", DialogsActivity.DIALOGS_TYPE_CHANNELS_ONLY); } else { - args.putInt("dialogsType", 4); + args.putInt("dialogsType", DialogsActivity.DIALOGS_TYPE_USERS_ONLY); } args.putBoolean("allowGlobalSearch", false); DialogsActivity activity = new DialogsActivity(args); - activity.setDelegate((fragment, dids, message, param) -> { + activity.setDelegate((fragment, dids, message, param, topicsFragment) -> { activity.finishFragment(); CacheByChatsController.KeepMediaException newException = null; for (int i = 0; i < dids.size(); i++) { @@ -123,6 +123,7 @@ public void onItemClick(int id) { int finalP = p; showPopupFor(newException); } + return true; }); presentFragment(activity); } else if (items.get(position).viewType == VIEW_TYPE_CHAT) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/CacheControlActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/CacheControlActivity.java index 262df85882..dbf2ad64fb 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/CacheControlActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/CacheControlActivity.java @@ -23,6 +23,7 @@ import android.graphics.PorterDuff; import android.graphics.PorterDuffColorFilter; import android.graphics.RectF; +import android.graphics.drawable.ColorDrawable; import android.graphics.drawable.Drawable; import android.os.Build; import android.os.Bundle; @@ -62,6 +63,7 @@ import androidx.annotation.NonNull; import androidx.annotation.RequiresApi; import androidx.core.graphics.ColorUtils; +import androidx.core.math.MathUtils; import androidx.recyclerview.widget.DefaultItemAnimator; import androidx.recyclerview.widget.LinearLayoutManager; import androidx.recyclerview.widget.RecyclerView; @@ -133,6 +135,7 @@ import org.telegram.ui.Components.ListView.AdapterWithDiffUtils; import org.telegram.ui.Components.LoadingDrawable; import org.telegram.ui.Components.NestedSizeNotifierLayout; +import org.telegram.ui.Components.RLottieImageView; import org.telegram.ui.Components.RecyclerListView; import org.telegram.ui.Components.SlideChooseView; import org.telegram.ui.Components.StorageDiagramView; @@ -142,6 +145,9 @@ import org.telegram.ui.Storage.CacheModel; import java.io.File; +import java.nio.file.Files; +import java.nio.file.attribute.BasicFileAttributes; +import java.nio.file.attribute.FileTime; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -215,7 +221,7 @@ public class CacheControlActivity extends BaseFragment implements NotificationCe private CacheChartHeader cacheChartHeader; private ClearCacheButtonInternal clearCacheButton; - private volatile boolean canceled = false; + public static volatile boolean canceled = false; private View bottomSheetView; private BottomSheet bottomSheet; @@ -253,9 +259,109 @@ private void updateDatabaseItemSize() { } } + private static long lastTotalSizeCalculatedTime; + private static Long lastTotalSizeCalculated; + private static Long lastDeviceTotalSize, lastDeviceTotalFreeSize; + + public static void calculateTotalSize(Utilities.Callback onDone) { + if (onDone == null) { + return; + } + if (lastTotalSizeCalculated != null) { + onDone.run(lastTotalSizeCalculated); + if (System.currentTimeMillis() - lastTotalSizeCalculatedTime < 5000) { + return; + } + } + Utilities.globalQueue.postRunnable(() -> { + canceled = false; + long cacheSize = getDirectorySize(FileLoader.checkDirectory(FileLoader.MEDIA_DIR_CACHE), 5); + long cacheTempSize = getDirectorySize(FileLoader.checkDirectory(FileLoader.MEDIA_DIR_CACHE), 4); + long photoSize = getDirectorySize(FileLoader.checkDirectory(FileLoader.MEDIA_DIR_IMAGE), 0); + photoSize += getDirectorySize(FileLoader.checkDirectory(FileLoader.MEDIA_DIR_IMAGE_PUBLIC), 0); + long videoSize = getDirectorySize(FileLoader.checkDirectory(FileLoader.MEDIA_DIR_VIDEO), 0); + videoSize += getDirectorySize(FileLoader.checkDirectory(FileLoader.MEDIA_DIR_VIDEO_PUBLIC), 0); + long documentsSize = getDirectorySize(FileLoader.checkDirectory(FileLoader.MEDIA_DIR_DOCUMENT), 1); + documentsSize += getDirectorySize(FileLoader.checkDirectory(FileLoader.MEDIA_DIR_FILES), 1); + long musicSize = getDirectorySize(FileLoader.checkDirectory(FileLoader.MEDIA_DIR_DOCUMENT), 2); + musicSize += getDirectorySize(FileLoader.checkDirectory(FileLoader.MEDIA_DIR_FILES), 2); + long stickersCacheSize = getDirectorySize(new File(FileLoader.checkDirectory(FileLoader.MEDIA_DIR_CACHE), "acache"), 0); + stickersCacheSize += getDirectorySize(FileLoader.checkDirectory(FileLoader.MEDIA_DIR_CACHE), 3); + long audioSize = getDirectorySize(FileLoader.checkDirectory(FileLoader.MEDIA_DIR_AUDIO), 0); + final long totalSize = lastTotalSizeCalculated = cacheSize + cacheTempSize + videoSize + audioSize + photoSize + documentsSize + musicSize + stickersCacheSize; + lastTotalSizeCalculatedTime = System.currentTimeMillis(); + if (!canceled) { + AndroidUtilities.runOnUIThread(() -> { + onDone.run(totalSize); + }); + } + }); + } + + public static void resetCalculatedTotalSIze() { + lastTotalSizeCalculated = null; + } + + public static void getDeviceTotalSize(Utilities.Callback2 onDone) { + if (lastDeviceTotalSize != null && lastDeviceTotalFreeSize != null) { + if (onDone != null) { + onDone.run(lastDeviceTotalSize, lastDeviceTotalFreeSize); + } + return; + } + File path; + if (Build.VERSION.SDK_INT >= 19) { + ArrayList storageDirs = AndroidUtilities.getRootDirs(); + String dir = (path = storageDirs.get(0)).getAbsolutePath(); + if (!TextUtils.isEmpty(SharedConfig.storageCacheDir)) { + for (int a = 0, N = storageDirs.size(); a < N; a++) { + File file = storageDirs.get(a); + if (file.getAbsolutePath().startsWith(SharedConfig.storageCacheDir) && file.canWrite()) { + path = file; + break; + } + } + } + } else { + path = new File(SharedConfig.storageCacheDir); + } + try { + StatFs stat = new StatFs(path.getPath()); + long blockSize; + long blockSizeExternal; + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) { + blockSize = stat.getBlockSizeLong(); + } else { + blockSize = stat.getBlockSize(); + } + long availableBlocks; + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) { + availableBlocks = stat.getAvailableBlocksLong(); + } else { + availableBlocks = stat.getAvailableBlocks(); + } + long blocksTotal; + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) { + blocksTotal = stat.getBlockCountLong(); + } else { + blocksTotal = stat.getBlockCount(); + } + + lastDeviceTotalSize = blocksTotal * blockSize; + lastDeviceTotalFreeSize = availableBlocks * blockSize; + if (onDone != null) { + onDone.run(lastDeviceTotalSize, lastDeviceTotalFreeSize); + } + return; + } catch (Exception e) { + FileLog.e(e); + } + } + @Override public boolean onFragmentCreate() { super.onFragmentCreate(); + canceled = false; getNotificationCenter().addObserver(this, NotificationCenter.didClearDatabase); databaseSize = MessagesStorage.getInstance(currentAccount).getDatabaseSize(); loadingDialogs = true; @@ -309,7 +415,11 @@ public boolean onFragmentCreate() { } stickersCacheSize += cacheCustomEmojiSize; audioSize = getDirectorySize(FileLoader.checkDirectory(FileLoader.MEDIA_DIR_AUDIO), 0); - totalSize = cacheSize + cacheTempSize + videoSize + audioSize + photoSize + documentsSize + musicSize + stickersCacheSize; + if (canceled) { + return; + } + totalSize = lastTotalSizeCalculated = cacheSize + cacheTempSize + videoSize + audioSize + photoSize + documentsSize + musicSize + stickersCacheSize; + lastTotalSizeCalculatedTime = System.currentTimeMillis(); File path = EnvUtil.getTelegramPath(); try { @@ -340,14 +450,13 @@ public boolean onFragmentCreate() { FileLog.e(e); } - long minDuration = System.currentTimeMillis() - fragmentCreateTime > 45 ? 600 : 0; AndroidUtilities.runOnUIThread(() -> { resumeDelayedFragmentAnimation(); calculating = false; updateRows(true); updateChart(); - }, Math.max(1, minDuration - (System.currentTimeMillis() - fragmentCreateTime))); + }); loadDialogEntities(); }); @@ -377,11 +486,11 @@ private void updateChart() { if (System.currentTimeMillis() - fragmentCreateTime < 80) { cacheChart.loadingFloat.set(0, true); } - cacheChart.setSegments(totalSize, segments); + cacheChart.setSegments(totalSize, true, segments); } else if (calculating) { - cacheChart.setSegments(-1); + cacheChart.setSegments(-1, true); } else { - cacheChart.setSegments(0); + cacheChart.setSegments(0, true); } } if (clearCacheButton != null && !calculating) { @@ -391,6 +500,7 @@ private void updateChart() { private void loadDialogEntities() { getFileLoader().getFileDatabase().getQueue().postRunnable(() -> { + getFileLoader().getFileDatabase().ensureDatabaseCreated(); CacheModel cacheModel = new CacheModel(false); LongSparseArray dilogsFilesEntities = new LongSparseArray<>(); @@ -568,14 +678,14 @@ private String formatPercent(float k) { } private String formatPercent(float k, boolean minimize) { - float p = Math.round(k * 100f); - if (minimize && p < 0.1f) { + if (minimize && k < 0.001f) { return String.format("<%.1f%%", 0.1f); } - if (p % 1 == 0) { - return ((int) p) + "%"; + final float p = Math.round(k * 100f); + if (minimize && p <= 0) { + return String.format("<%d%%", 1); } - return String.format("%.1f%%", p); + return String.format("%d%%", (int) p); } private CharSequence getCheckBoxTitle(CharSequence header, int percent) { @@ -583,7 +693,7 @@ private CharSequence getCheckBoxTitle(CharSequence header, int percent) { } private CharSequence getCheckBoxTitle(CharSequence header, int percent, boolean addArrow) { - String percentString = percent <= 0 ? String.format("<%.1f%%", 0.1f) : String.format("%d%%", percent); + String percentString = percent <= 0 ? String.format("<%.1f%%", 1f) : String.format("%d%%", percent); SpannableString percentStr = new SpannableString(percentString); percentStr.setSpan(new RelativeSizeSpan(.834f), 0, percentStr.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); percentStr.setSpan(new TypefaceSpan(AndroidUtilities.getTypeface("fonts/rmedium.ttf")), 0, percentStr.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); @@ -738,7 +848,7 @@ public void onFragmentDestroy() { canceled = true; } - private long getDirectorySize(File dir, int documentsMusicType) { + private static long getDirectorySize(File dir, int documentsMusicType) { if (dir == null || canceled) { return 0; } @@ -751,7 +861,7 @@ private long getDirectorySize(File dir, int documentsMusicType) { return size; } - private void cleanupFolders() { + private void cleanupFolders(Utilities.Callback2 onProgress, Runnable onDone) { if (cacheModel != null) { cacheModel.clearSelection(); } @@ -760,22 +870,125 @@ private void cleanupFolders() { cachedMediaLayout.showActionMode(false); } - progressDialog = new AlertDialog(getParentActivity(), AlertDialog.ALERT_TYPE_SPINNER); - progressDialog.setCanCancel(false); - progressDialog.showDelayed(500); +// progressDialog = new AlertDialog(getParentActivity(), AlertDialog.ALERT_TYPE_SPINNER); +// progressDialog.setCanCancel(false); +// progressDialog.showDelayed(500); getFileLoader().cancelLoadAllFiles(); getFileLoader().getFileLoaderQueue().postRunnable(() -> Utilities.globalQueue.postRunnable(() -> { - cleanupFoldersInternal(); + cleanupFoldersInternal(onProgress, onDone); })); setCacheModel(null); loadingDialogs = true; // updateRows(); } - private void cleanupFoldersInternal() { + private static int LISTDIR_DOCTYPE_ALL = 0; + private static int LISTDIR_DOCTYPE_OTHER_THAN_MUSIC = 1; + private static int LISTDIR_DOCTYPE_MUSIC = 2; + + private static int LISTDIR_DOCTYPE2_EMOJI = 3; + private static int LISTDIR_DOCTYPE2_TEMP = 4; + private static int LISTDIR_DOCTYPE2_OTHER = 5; + + public static int countDirJava(String fileName, int docType) { + int count = 0; + File dir = new File(fileName); + if (dir.exists()) { + File[] entries = dir.listFiles(); + for (int i = 0; i < entries.length; ++i) { + File entry = entries[i]; + String name = entry.getName(); + if (".".equals(name)) { + continue; + } + + if (docType > 0 && name.length() >= 4) { + String namelc = name.toLowerCase(); + boolean isMusic = namelc.endsWith(".mp3") || namelc.endsWith(".m4a"); + boolean isEmoji = namelc.endsWith(".tgs") || namelc.endsWith(".webm"); + boolean isTemp = namelc.endsWith(".tmp") || namelc.endsWith(".temp") || namelc.endsWith(".preload"); + + if ( + isMusic && docType == LISTDIR_DOCTYPE_OTHER_THAN_MUSIC || + !isMusic && docType == LISTDIR_DOCTYPE_MUSIC || + isEmoji && docType == LISTDIR_DOCTYPE2_OTHER || + !isEmoji && docType == LISTDIR_DOCTYPE2_EMOJI || + isTemp && docType == LISTDIR_DOCTYPE2_OTHER || + !isTemp && docType == LISTDIR_DOCTYPE2_TEMP + ) { + continue; + } + } + + if (entry.isDirectory()) { + count += countDirJava(fileName + "/" + name, docType); + } else { + count++; + } + } + } + return count; + } + + public static void cleanDirJava(String fileName, int docType, int[] p, Utilities.Callback onProgress) { + int count = countDirJava(fileName, docType); + if (p == null) { + p = new int[] { 0 }; + } + File dir = new File(fileName); + if (dir.exists()) { + File[] entries = dir.listFiles(); + for (int i = 0; i < entries.length; ++i) { + File entry = entries[i]; + String name = entry.getName(); + if (".".equals(name)) { + continue; + } + + if (docType > 0 && name.length() >= 4) { + String namelc = name.toLowerCase(); + boolean isMusic = namelc.endsWith(".mp3") || namelc.endsWith(".m4a"); + boolean isEmoji = namelc.endsWith(".tgs") || namelc.endsWith(".webm"); + boolean isTemp = namelc.endsWith(".tmp") || namelc.endsWith(".temp") || namelc.endsWith(".preload"); + + if ( + isMusic && docType == LISTDIR_DOCTYPE_OTHER_THAN_MUSIC || + !isMusic && docType == LISTDIR_DOCTYPE_MUSIC || + isEmoji && docType == LISTDIR_DOCTYPE2_OTHER || + !isEmoji && docType == LISTDIR_DOCTYPE2_EMOJI || + isTemp && docType == LISTDIR_DOCTYPE2_OTHER || + !isTemp && docType == LISTDIR_DOCTYPE2_TEMP + ) { + continue; + } + } + + if (entry.isDirectory()) { + cleanDirJava(fileName + "/" + name, docType, p, onProgress); + } else { + entry.delete(); + + p[0]++; + onProgress.run(p[0] / (float) count); + } + } + } + } + + private void cleanupFoldersInternal(Utilities.Callback2 onProgress, Runnable onDone) { boolean imagesCleared = false; long clearedSize = 0; boolean allItemsClear = true; + final int[] clearDirI = new int[] { 0 }; + int clearDirCount = (selected[0] ? 2 : 0) + (selected[1] ? 2 : 0) + (selected[2] ? 2 : 0) + (selected[3] ? 2 : 0) + (selected[4] ? 1 : 0) + (selected[5] ? 2 : 0) + (selected[6] ? 1 : 0) + (selected[7] ? 1 : 0); + long time = System.currentTimeMillis(); + Utilities.Callback updateProgress = t -> { + onProgress.run(clearDirI[0] / (float) clearDirCount + (1f / clearDirCount) * MathUtils.clamp(t, 0, 1), false); + }; + Runnable next = () -> { + final long now = System.currentTimeMillis(); + onProgress.run(clearDirI[0] / (float) clearDirCount, now - time > 250); + }; for (int a = 0; a < 8; a++) { if (!selected[a]) { allItemsClear = false; @@ -822,14 +1035,18 @@ private void cleanupFoldersInternal() { file = FileLoader.checkDirectory(type); } if (file != null) { - Utilities.clearDir(file.getAbsolutePath(), documentsMusicType, Long.MAX_VALUE, true); + cleanDirJava(file.getAbsolutePath(), documentsMusicType, null, updateProgress); } + clearDirI[0]++; + next.run(); if (type == 100) { file = FileLoader.checkDirectory(FileLoader.MEDIA_DIR_CACHE); if (file != null) { - Utilities.clearDir(file.getAbsolutePath(), 3, Long.MAX_VALUE, false); + cleanDirJava(file.getAbsolutePath(), 3, null, updateProgress); } EmojiHelper.getInstance().deleteAll(); + clearDirI[0]++; + next.run(); } if (type == FileLoader.MEDIA_DIR_IMAGE || type == FileLoader.MEDIA_DIR_VIDEO) { int publicDirectoryType; @@ -841,14 +1058,18 @@ private void cleanupFoldersInternal() { file = FileLoader.checkDirectory(publicDirectoryType); if (file != null) { - Utilities.clearDir(file.getAbsolutePath(), documentsMusicType, Long.MAX_VALUE, false); + cleanDirJava(file.getAbsolutePath(), documentsMusicType, null, updateProgress); } + clearDirI[0]++; + next.run(); } if (type == FileLoader.MEDIA_DIR_DOCUMENT) { file = FileLoader.checkDirectory(FileLoader.MEDIA_DIR_FILES); if (file != null) { - Utilities.clearDir(file.getAbsolutePath(), documentsMusicType, Long.MAX_VALUE, false); + cleanDirJava(file.getAbsolutePath(), documentsMusicType, null, updateProgress); } + clearDirI[0]++; + next.run(); } if (type == FileLoader.MEDIA_DIR_CACHE) { @@ -892,7 +1113,8 @@ private void cleanupFoldersInternal() { } } final boolean imagesClearedFinal = imagesCleared; - totalSize = cacheSize + cacheTempSize + videoSize + audioSize + photoSize + documentsSize + musicSize + stickersCacheSize; + totalSize = lastTotalSizeCalculated = cacheSize + cacheTempSize + videoSize + audioSize + photoSize + documentsSize + musicSize + stickersCacheSize; + lastTotalSizeCalculatedTime = System.currentTimeMillis(); Arrays.fill(selected, true); File path = Environment.getDataDirectory(); @@ -939,11 +1161,17 @@ private void cleanupFoldersInternal() { } getMediaDataController().ringtoneDataStore.checkRingtoneSoundsLoaded(); - cacheRemovedTooltip.setInfoText(LocaleController.formatString("CacheWasCleared", R.string.CacheWasCleared, AndroidUtilities.formatFileSize(finalClearedSize))); - cacheRemovedTooltip.showWithAction(0, UndoView.ACTION_CACHE_WAS_CLEARED, null, null); + AndroidUtilities.runOnUIThread(() -> { + cacheRemovedTooltip.setInfoText(LocaleController.formatString("CacheWasCleared", R.string.CacheWasCleared, AndroidUtilities.formatFileSize(finalClearedSize))); + cacheRemovedTooltip.showWithAction(0, UndoView.ACTION_CACHE_WAS_CLEARED, null, null); + }, 150); MediaDataController.getInstance(currentAccount).chekAllMedia(true); loadDialogEntities(); + + if (onDone != null) { + onDone.run(); + } }); } @@ -1061,7 +1289,7 @@ public void onItemClick(int id) { ActionBarMenuItem otherItem = actionBar.createMenu().addItem(other_id, R.drawable.ic_ab_other); clearDatabaseItem = otherItem.addSubItem(clear_database_id, R.drawable.msg_delete, LocaleController.getString("ClearLocalDatabase", R.string.ClearLocalDatabase)); clearDatabaseItem.setIconColor(Theme.getColor(Theme.key_dialogRedIcon)); - clearDatabaseItem.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + clearDatabaseItem.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); updateDatabaseItemSize(); listAdapter = new ListAdapter(context); @@ -1202,7 +1430,7 @@ private void clearSelectedFiles() { showDialog(dialog); TextView button = (TextView) dialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } @@ -1367,7 +1595,7 @@ private void clearDatabase() { showDialog(alertDialog); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } @@ -1483,7 +1711,7 @@ public void setData(boolean hasCache, float percent, float usedPercent) { LocaleController.getString("StorageCleared", R.string.StorageCleared) ); if (hasCache) { - if (percent < 0.1f) { + if (percent < 0.01f) { subtitle[1].setText(LocaleController.formatString("StorageUsageTelegramLess", R.string.StorageUsageTelegramLess, formatPercent(percent))); } else { subtitle[1].setText(LocaleController.formatString("StorageUsageTelegram", R.string.StorageUsageTelegram, formatPercent(percent))); @@ -1628,13 +1856,144 @@ private void drawRoundRect(Canvas canvas, RectF rect, float left, float right, P } } + private class ClearingCacheView extends FrameLayout { + + RLottieImageView imageView; + AnimatedTextView percentsTextView; + ProgressView progressView; + TextView title, subtitle; + + public ClearingCacheView(Context context) { + super(context); + + imageView = new RLottieImageView(context); + imageView.setAutoRepeat(true); + imageView.setAnimation(R.raw.utyan_cache, 150, 150); + addView(imageView, LayoutHelper.createFrame(150, 150, Gravity.CENTER_HORIZONTAL | Gravity.TOP, 0, 16, 0, 0)); + imageView.playAnimation(); + + percentsTextView = new AnimatedTextView(context, false, true, true); + percentsTextView.setAnimationProperties(.35f, 0, 120, CubicBezierInterpolator.EASE_OUT); + percentsTextView.setGravity(Gravity.CENTER_HORIZONTAL); + percentsTextView.setTextColor(Theme.getColor(Theme.key_dialogTextBlack)); + percentsTextView.setTextSize(AndroidUtilities.dp(24)); + percentsTextView.setTypeface(AndroidUtilities.getTypeface(AndroidUtilities.TYPEFACE_ROBOTO_MEDIUM)); + addView(percentsTextView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 32, Gravity.CENTER_HORIZONTAL | Gravity.TOP, 0, 16 + 150 + 16 - 6, 0, 0)); + + progressView = new ProgressView(context); + addView(progressView, LayoutHelper.createFrame(240, 5, Gravity.CENTER_HORIZONTAL | Gravity.TOP, 0, 16 + 150 + 16 + 28 + 16, 0, 0)); + + title = new TextView(context); + title.setGravity(Gravity.CENTER_HORIZONTAL); + title.setTextColor(Theme.getColor(Theme.key_dialogTextBlack)); + title.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 16); + title.setTypeface(AndroidUtilities.getTypeface(AndroidUtilities.TYPEFACE_ROBOTO_MEDIUM)); + title.setText(LocaleController.getString("ClearingCache", R.string.ClearingCache)); + addView(title, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_HORIZONTAL | Gravity.TOP, 0, 16 + 150 + 16 + 28 + 16 + 5 + 30, 0, 0)); + + subtitle = new TextView(context); + subtitle.setGravity(Gravity.CENTER_HORIZONTAL); + subtitle.setTextColor(Theme.getColor(Theme.key_dialogTextBlack)); + subtitle.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + subtitle.setText(LocaleController.getString("ClearingCacheDescription", R.string.ClearingCacheDescription)); + addView(subtitle, LayoutHelper.createFrame(240, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_HORIZONTAL | Gravity.TOP, 0, 16 + 150 + 16 + 28 + 16 + 5 + 30 + 18 + 10, 0, 0)); + + setProgress(0); + } + + public void setProgress(float t) { + percentsTextView.cancelAnimation(); + percentsTextView.setText(String.format("%d%%", (int) Math.ceil(MathUtils.clamp(t, 0, 1) * 100)), !LocaleController.isRTL); + progressView.setProgress(t); + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure( + MeasureSpec.makeMeasureSpec(MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.EXACTLY), + MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(350), MeasureSpec.EXACTLY) + ); + } + + class ProgressView extends View { + + Paint in = new Paint(Paint.ANTI_ALIAS_FLAG), out = new Paint(Paint.ANTI_ALIAS_FLAG); + + public ProgressView(Context context) { + super(context); + + in.setColor(Theme.getColor(Theme.key_switchTrackChecked)); + out.setColor(Theme.multAlpha(Theme.getColor(Theme.key_switchTrackChecked), .2f)); + } + + float progress; + AnimatedFloat progressT = new AnimatedFloat(this, 350, CubicBezierInterpolator.EASE_OUT); + + public void setProgress(float t) { + this.progress = t; + invalidate(); + } + + @Override + protected void onDraw(Canvas canvas) { + super.onDraw(canvas); + + AndroidUtilities.rectTmp.set(0, 0, getMeasuredWidth(), getMeasuredHeight()); + canvas.drawRoundRect(AndroidUtilities.rectTmp, AndroidUtilities.dp(3), AndroidUtilities.dp(3), out); + + AndroidUtilities.rectTmp.set(0, 0, getMeasuredWidth() * progressT.set(this.progress), getMeasuredHeight()); + canvas.drawRoundRect(AndroidUtilities.rectTmp, AndroidUtilities.dp(3), AndroidUtilities.dp(3), in); + } + } + } + private class ClearCacheButtonInternal extends ClearCacheButton { public ClearCacheButtonInternal(Context context) { super(context); ((MarginLayoutParams) button.getLayoutParams()).topMargin = AndroidUtilities.dp(5); button.setOnClickListener(e -> { - cleanupFolders(); + BottomSheet bottomSheet = new BottomSheet(getContext(), false) { + @Override + protected boolean canDismissWithTouchOutside() { + return false; + } + }; + bottomSheet.fixNavigationBar(); + bottomSheet.setCanDismissWithSwipe(false); + bottomSheet.setCancelable(false); + ClearingCacheView cacheView = new ClearingCacheView(getContext()); + bottomSheet.setCustomView(cacheView); + + final boolean[] done = new boolean[] { false }; + final float[] progress = new float[] { 0 }; + final boolean[] nextSection = new boolean[] { false }; + Runnable updateProgress = () -> { + cacheView.setProgress(progress[0]); + if (nextSection[0]) { + updateRows(); + } + }; + + AndroidUtilities.runOnUIThread(() -> { + if (!done[0]) { + showDialog(bottomSheet); + } + }, 150); + + cleanupFolders( + (progressValue, next) -> { + progress[0] = progressValue; + nextSection[0] = next; + AndroidUtilities.cancelRunOnUIThread(updateProgress); + AndroidUtilities.runOnUIThread(updateProgress); + }, + () -> AndroidUtilities.runOnUIThread(() -> { + done[0] = true; + cacheView.setProgress(1F); + bottomSheet.dismiss(); + }) + ); }); } @@ -1687,7 +2046,7 @@ public ClearCacheButton(Context context) { @Override protected void dispatchDraw(Canvas canvas) { final int margin = AndroidUtilities.dp(8); - int x = (getMeasuredWidth() - margin - valueTextView.getCurrentWidth() + textView.getCurrentWidth()) / 2; + int x = (getMeasuredWidth() - margin - (int) valueTextView.getCurrentWidth() + (int) textView.getCurrentWidth()) / 2; if (LocaleController.isRTL) { super.dispatchDraw(canvas); @@ -1704,8 +2063,15 @@ protected void dispatchDraw(Canvas canvas) { protected boolean verifyDrawable(@NonNull Drawable who) { return who == valueTextView || who == textView || super.verifyDrawable(who); } + + @Override + public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) { + super.onInitializeAccessibilityNodeInfo(info); + info.setClassName("android.widget.Button"); + } }; button.setBackground(Theme.AdaptiveRipple.filledRect(Theme.key_featuredStickers_addButton, 8)); + button.setImportantForAccessibility(IMPORTANT_FOR_ACCESSIBILITY_YES); if (LocaleController.isRTL) { rtlTextView = new TextView(context); @@ -1734,6 +2100,8 @@ protected boolean verifyDrawable(@NonNull Drawable who) { valueTextView.setTextColor(Theme.adaptHSV(Theme.getColor(Theme.key_featuredStickers_addButton), -.46f, +.08f)); valueTextView.setText(""); + button.setContentDescription(TextUtils.concat(textView.getText(), "\t", valueTextView.getText())); + setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); addView(button, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 48, Gravity.FILL, 16, 16, 16, 16)); } @@ -1755,6 +2123,8 @@ public void setSize(boolean allSelected, long size) { valueTextView.setText(size <= 0 ? "" : AndroidUtilities.formatFileSize(size)); setDisabled(size <= 0); button.invalidate(); + + button.setContentDescription(TextUtils.concat(textView.getText(), "\t", valueTextView.getText())); } public void setDisabled(boolean disabled) { @@ -2187,14 +2557,11 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { } String value = CacheByChatsController.getKeepMediaString(cacheByChatsController.getKeepMedia(keepMediaType)); if (itemInners.get(position).keepMediaType == KEEP_MEDIA_TYPE_USER) { - textCell2.setTextAndValue(LocaleController.getString("PrivateChats", R.string.PrivateChats), value, true, true); - textCell2.setColorfulIcon(getThemedColor(Theme.key_statisticChartLine_lightblue), R.drawable.msg_filled_menu_users); + textCell2.setTextAndValueAndColorfulIcon(LocaleController.getString("PrivateChats", R.string.PrivateChats), value, true, R.drawable.msg_filled_menu_users, getThemedColor(Theme.key_statisticChartLine_lightblue), true); } else if (itemInners.get(position).keepMediaType == KEEP_MEDIA_TYPE_GROUP) { - textCell2.setTextAndValue(LocaleController.getString("GroupChats", R.string.GroupChats), value, true, true); - textCell2.setColorfulIcon(getThemedColor(Theme.key_statisticChartLine_green), R.drawable.msg_filled_menu_groups); + textCell2.setTextAndValueAndColorfulIcon(LocaleController.getString("GroupChats", R.string.GroupChats), value, true, R.drawable.msg_filled_menu_groups, getThemedColor(Theme.key_statisticChartLine_green), true); } else if (itemInners.get(position).keepMediaType == KEEP_MEDIA_TYPE_CHANNEL) { - textCell2.setTextAndValue(LocaleController.getString("CacheChannels", R.string.CacheChannels), value, true, false); - textCell2.setColorfulIcon(getThemedColor(Theme.key_statisticChartLine_golden), R.drawable.msg_filled_menu_channels); + textCell2.setTextAndValueAndColorfulIcon(LocaleController.getString("CacheChannels", R.string.CacheChannels), value, true, R.drawable.msg_filled_menu_channels, getThemedColor(Theme.key_statisticChartLine_golden), true); } textCell2.setSubtitle(subtitle); break; @@ -2310,7 +2677,6 @@ public ArrayList getThemeDescriptions() { arrayList.add(new ThemeDescription(listView, 0, new Class[]{StorageUsageView.class}, new String[]{"telegramCacheTextView"}, null, null, null, Theme.key_windowBackgroundWhiteGrayText)); arrayList.add(new ThemeDescription(listView, 0, new Class[]{StorageUsageView.class}, new String[]{"freeSizeTextView"}, null, null, null, Theme.key_windowBackgroundWhiteGrayText)); arrayList.add(new ThemeDescription(listView, 0, new Class[]{StorageUsageView.class}, new String[]{"calculationgTextView"}, null, null, null, Theme.key_windowBackgroundWhiteGrayText)); - arrayList.add(new ThemeDescription(listView, 0, new Class[]{StorageUsageView.class}, new String[]{"paintProgress2"}, null, null, null, Theme.key_player_progressBackground2)); arrayList.add(new ThemeDescription(listView, 0, new Class[]{SlideChooseView.class}, null, null, null, Theme.key_switchTrack)); arrayList.add(new ThemeDescription(listView, 0, new Class[]{SlideChooseView.class}, null, null, null, Theme.key_switchTrackChecked)); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/CachedMediaLayout.java b/TMessagesProj/src/main/java/org/telegram/ui/CachedMediaLayout.java index 0b07a76af9..415e7ef767 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/CachedMediaLayout.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/CachedMediaLayout.java @@ -99,9 +99,9 @@ public CachedMediaLayout(@NonNull Context context, BaseFragment parentFragment) this.parentFragment = parentFragment; int CacheTabChats; - allPages[PAGE_TYPE_CHATS] = new Page(LocaleController.getString("Chats", R.string.Chats), PAGE_TYPE_CHATS, new DialogsAdapter()); - allPages[PAGE_TYPE_MEDIA] = new Page(LocaleController.getString("Media", R.string.Media), PAGE_TYPE_MEDIA, new MediaAdapter()); - allPages[PAGE_TYPE_DOCUMENTS] = new Page(LocaleController.getString("Files", R.string.Files), PAGE_TYPE_DOCUMENTS, new DocumentsAdapter()); + allPages[PAGE_TYPE_CHATS] = new Page(LocaleController.getString("FilterChats", R.string.FilterChats), PAGE_TYPE_CHATS, new DialogsAdapter()); + allPages[PAGE_TYPE_MEDIA] = new Page(LocaleController.getString("MediaTab", R.string.MediaTab), PAGE_TYPE_MEDIA, new MediaAdapter()); + allPages[PAGE_TYPE_DOCUMENTS] = new Page(LocaleController.getString("SharedFilesTab2", R.string.SharedFilesTab2), PAGE_TYPE_DOCUMENTS, new DocumentsAdapter()); allPages[PAGE_TYPE_MUSIC] = new Page(LocaleController.getString("Music", R.string.Music), PAGE_TYPE_MUSIC, new MusicAdapter()); // allPages[PAGE_TYPE_VOICE] = new Page(LocaleController.getString("Voice", R.string.Voice), PAGE_TYPE_VOICE, new VoiceAdapter()); @@ -750,7 +750,7 @@ public void onBindViewHolder(@NonNull RecyclerView.ViewHolder holder, int positi holder.itemView.setTag(file); long date = file.file.lastModified() / 1000; - cell.setTextAndValueAndTypeAndThumb(file.messageType == MessageObject.TYPE_ROUND_VIDEO ? LocaleController.getString("AttachRound", R.string.AttachRound) : file.file.getName(), LocaleController.formatDateAudio(date, true), Utilities.getExtension(file.file.getName()), null, 0, divider); + cell.setTextAndValueAndTypeAndThumb(file.messageType == MessageObject.TYPE_ROUND_VIDEO ? LocaleController.getString("AttachRound", R.string.AttachRound) : file.file.getName(), LocaleController.formatDateAudio(date / 1000, true), Utilities.getExtension(file.file.getName()), null, 0, divider); if (!animated) { cell.setPhoto(file.file.getPath()); } @@ -867,8 +867,12 @@ private void checkMessageObjectForAudio(CacheModel.FileInfo fileInfo, int positi } catch (Exception e) { FileLog.e(e); } finally { - if (mediaMetadataRetriever != null) { - mediaMetadataRetriever.release(); + try { + if (mediaMetadataRetriever != null) { + mediaMetadataRetriever.release(); + } + } catch (Throwable e) { + } } String finalTitle = title; @@ -992,7 +996,7 @@ public CacheCell(@NonNull Context context) { super(context); checkBox = new CheckBox2(context, 21); checkBox.setDrawBackgroundAsArc(14); - checkBox.setColor(Theme.key_radioBackground, Theme.key_radioBackground, Theme.key_checkboxCheck); + checkBox.setColor(Theme.key_checkbox, Theme.key_radioBackground, Theme.key_checkboxCheck); addView(checkBox, LayoutHelper.createFrame(24, 24, Gravity.LEFT | Gravity.CENTER_VERTICAL, 18, 0, 0, 0)); View checkBoxClickableView = new View(getContext()); checkBoxClickableView.setOnClickListener(v -> { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/CalendarActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/CalendarActivity.java index 58987667e3..9353b98060 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/CalendarActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/CalendarActivity.java @@ -9,6 +9,7 @@ import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; +import android.graphics.Path; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.Drawable; import android.os.Bundle; @@ -59,6 +60,7 @@ import org.telegram.ui.Components.LayoutHelper; import org.telegram.ui.Components.RecyclerListView; import org.telegram.ui.Components.SharedMediaLayout; +import org.telegram.ui.Components.spoilers.SpoilerEffect; import java.time.YearMonth; import java.util.ArrayList; @@ -124,6 +126,9 @@ public class CalendarActivity extends BaseFragment { private int calendarType; + private Path path = new Path(); + private SpoilerEffect mediaSpoilerEffect = new SpoilerEffect(); + public CalendarActivity(Bundle args, int photosVideosTypeFilter, int selectedDate) { super(args); this.photosVideosTypeFilter = photosVideosTypeFilter; @@ -316,8 +321,7 @@ public void onDraw(Canvas canvas) { @Override public void run(boolean forAll) { finishFragment(); - - if (parentLayout.getFragmentStack().size() >= 2) { + if (parentLayout != null && parentLayout.getFragmentStack().size() >= 2) { BaseFragment fragment = parentLayout.getFragmentStack().get(parentLayout.getFragmentStack().size() - 2); if (fragment instanceof ChatActivity) { ((ChatActivity) fragment).deleteHistory(dateSelectedStart, dateSelectedEnd + 86400, forAll); @@ -698,9 +702,8 @@ public void onLongPress(MotionEvent e) { PeriodDay periodDay = getDayAtCoord(e.getX(), e.getY()); if (periodDay != null) { - if (!NekoConfig.disableVibration.Bool()) { + if (!NekoConfig.disableVibration.Bool()) performHapticFeedback(HapticFeedbackConstants.LONG_PRESS); - } Bundle bundle = new Bundle(); if (dialogId > 0) { @@ -719,7 +722,7 @@ public void onLongPress(MotionEvent e) { cellJump.setTextAndIcon(LocaleController.getString("JumpToDate", R.string.JumpToDate), R.drawable.msg_message); cellJump.setMinimumWidth(160); cellJump.setOnClickListener(view -> { - if (parentLayout.getFragmentStack().size() >= 3) { + if (parentLayout != null && parentLayout.getFragmentStack().size() >= 3) { BaseFragment fragment = parentLayout.getFragmentStack().get(parentLayout.getFragmentStack().size() - 3); if (fragment instanceof ChatActivity) { AndroidUtilities.runOnUIThread(() -> { @@ -927,6 +930,7 @@ public void setDate(int year, int monthInYear, SparseArray messagesBy PeriodDay periodDay = messagesByDays.get(key); MessageObject messageObject = periodDay.messageObject; if (messageObject != null) { + boolean hasMediaSpoilers = messageObject.hasMediaSpoilers(); if (messageObject.isVideo()) { TLRPC.Document document = messageObject.getDocument(); TLRPC.PhotoSize thumb = FileLoader.getClosestPhotoSizeWithSize(document.thumbs, 50); @@ -936,9 +940,9 @@ public void setDate(int year, int monthInYear, SparseArray messagesBy } if (thumb != null) { if (messageObject.strippedThumb != null) { - receiver.setImage(ImageLocation.getForDocument(qualityThumb, document), "44_44", messageObject.strippedThumb, null, messageObject, 0); + receiver.setImage(ImageLocation.getForDocument(qualityThumb, document), hasMediaSpoilers ? "5_5_b" : "44_44", messageObject.strippedThumb, null, messageObject, 0); } else { - receiver.setImage(ImageLocation.getForDocument(qualityThumb, document), "44_44", ImageLocation.getForDocument(thumb, document), "b", (String) null, messageObject, 0); + receiver.setImage(ImageLocation.getForDocument(qualityThumb, document), hasMediaSpoilers ? "5_5_b" : "44_44", ImageLocation.getForDocument(thumb, document), "b", (String) null, messageObject, 0); } } } else if (messageObject.messageOwner.media instanceof TLRPC.TL_messageMediaPhoto && messageObject.messageOwner.media.photo != null && !messageObject.photoThumbs.isEmpty()) { @@ -949,9 +953,9 @@ public void setDate(int year, int monthInYear, SparseArray messagesBy currentPhotoObjectThumb = null; } if (messageObject.strippedThumb != null) { - receiver.setImage(ImageLocation.getForObject(currentPhotoObject, messageObject.photoThumbsObject), "44_44", null, null, messageObject.strippedThumb, currentPhotoObject != null ? currentPhotoObject.size : 0, null, messageObject, messageObject.shouldEncryptPhotoOrVideo() ? 2 : 1); + receiver.setImage(ImageLocation.getForObject(currentPhotoObject, messageObject.photoThumbsObject), hasMediaSpoilers ? "5_5_b" : "44_44", null, null, messageObject.strippedThumb, currentPhotoObject != null ? currentPhotoObject.size : 0, null, messageObject, messageObject.shouldEncryptPhotoOrVideo() ? 2 : 1); } else { - receiver.setImage(ImageLocation.getForObject(currentPhotoObject, messageObject.photoThumbsObject), "44_44", ImageLocation.getForObject(currentPhotoObjectThumb, messageObject.photoThumbsObject), "b", currentPhotoObject != null ? currentPhotoObject.size : 0, null, messageObject, messageObject.shouldEncryptPhotoOrVideo() ? 2 : 1); + receiver.setImage(ImageLocation.getForObject(currentPhotoObject, messageObject.photoThumbsObject), hasMediaSpoilers ? "5_5_b" : "44_44", ImageLocation.getForObject(currentPhotoObjectThumb, messageObject.photoThumbsObject), "b", currentPhotoObject != null ? currentPhotoObject.size : 0, null, messageObject, messageObject.shouldEncryptPhotoOrVideo() ? 2 : 1); } } else { if (messageObject.strippedThumb != null) { @@ -1064,6 +1068,24 @@ protected void onDraw(Canvas canvas) { imagesByDays.get(i).setImageCoords(cx - (AndroidUtilities.dp(44) - pad) / 2f, cy - (AndroidUtilities.dp(44) - pad) / 2f, AndroidUtilities.dp(44) - pad, AndroidUtilities.dp(44) - pad); imagesByDays.get(i).draw(canvas); + if (messagesByDays.get(i) != null && messagesByDays.get(i).messageObject != null && messagesByDays.get(i).messageObject.hasMediaSpoilers()) { + float rad = (AndroidUtilities.dp(44) - pad) / 2f; + path.rewind(); + path.addCircle(cx, cy, rad, Path.Direction.CW); + + canvas.save(); + canvas.clipPath(path); + + int sColor = Color.WHITE; + mediaSpoilerEffect.setColor(ColorUtils.setAlphaComponent(sColor, (int) (Color.alpha(sColor) * 0.325f * day.enterAlpha))); + mediaSpoilerEffect.setBounds((int) (cx - rad), (int) (cy - rad), (int) (cx + rad), (int) (cy + rad)); + mediaSpoilerEffect.draw(canvas); + + invalidate(); + + canvas.restore(); + } + blackoutPaint.setColor(ColorUtils.setAlphaComponent(Color.BLACK, (int) (day.enterAlpha * 80))); canvas.drawCircle(cx, cy, (AndroidUtilities.dp(44) - pad) / 2f, blackoutPaint); day.wasDrawn = true; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/CallLogActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/CallLogActivity.java index b8143811c4..e59c911416 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/CallLogActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/CallLogActivity.java @@ -628,7 +628,7 @@ private void showDeleteAlert(boolean all) { showDialog(alertDialog); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/CameraScanActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/CameraScanActivity.java index 58ba064968..61a215106c 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/CameraScanActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/CameraScanActivity.java @@ -296,6 +296,10 @@ public void dismiss() { return bottomSheet; } + public static INavigationLayout[] showAsSheet(BaseFragment parentFragment, CameraScanActivityDelegate cameraDelegate) { + return showAsSheet(parentFragment, true, TYPE_QR, cameraDelegate, true); + } + public CameraScanActivity(int type) { super(); currentType = type; @@ -536,7 +540,7 @@ private RectF aroundPoint(int x, int y, int r) { actionBar.setTitleColor(0xffffffff); actionBar.setItemsColor(0xffffffff, false); actionBar.setItemsBackgroundColor(0x22ffffff, false); - viewGroup.setBackgroundColor(Theme.getColor(Theme.key_wallet_blackBackground)); + viewGroup.setBackgroundColor(0xFF000000); viewGroup.addView(actionBar); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/AboutLinkCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/AboutLinkCell.java index 6d85b268e2..aab0f34f78 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/AboutLinkCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/AboutLinkCell.java @@ -519,9 +519,11 @@ public void init() { @Override public void end(boolean replacing) { - if (thisLoading != null) { - links.removeLoading(thisLoading, true); - } + AndroidUtilities.runOnUIThread(() -> { + if (thisLoading != null) { + links.removeLoading(thisLoading, true); + } + }, replacing ? 0 : 350); } } : null; if (pressedLink instanceof URLSpanNoUnderline) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/BotHelpCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/BotHelpCell.java index 7e540d98cf..361e96baf4 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/BotHelpCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/BotHelpCell.java @@ -12,6 +12,7 @@ import android.graphics.Canvas; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.Drawable; +import android.os.Build; import android.text.Layout; import android.text.Spannable; import android.text.SpannableStringBuilder; @@ -24,6 +25,8 @@ import android.view.View; import android.view.accessibility.AccessibilityNodeInfo; +import androidx.annotation.NonNull; + import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.Emoji; import org.telegram.messenger.FileLoader; @@ -40,6 +43,7 @@ import org.telegram.tgnet.TLRPC; import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.Components.LinkPath; +import org.telegram.ui.Components.LinkSpanDrawable; import org.telegram.ui.Components.TypefaceSpan; import org.telegram.ui.Components.URLSpanNoUnderline; @@ -58,8 +62,8 @@ public class BotHelpCell extends View { private int textY; public boolean wasDraw; - private ClickableSpan pressedLink; - private LinkPath urlPath = new LinkPath(); + private LinkSpanDrawable pressedLink; + private LinkSpanDrawable.LinkCollector links = new LinkSpanDrawable.LinkCollector(this); private BotHelpCellDelegate delegate; private Theme.ResourcesProvider resourcesProvider; @@ -84,6 +88,9 @@ public BotHelpCell(Context context, Theme.ResourcesProvider resourcesProvider) { imageReceiver.setInvalidateAll(true); imageReceiver.setCrossfadeWithOldImage(true); imageReceiver.setCrossfadeDuration(300); + + selectorDrawable = Theme.createRadSelectorDrawable(Theme.getColor(Theme.key_listSelector, resourcesProvider), selectorDrawableRadius = SharedConfig.bubbleRadius, SharedConfig.bubbleRadius); + selectorDrawable.setCallback(this); } public void setDelegate(BotHelpCellDelegate botHelpCellDelegate) { @@ -94,6 +101,7 @@ private void resetPressedLink() { if (pressedLink != null) { pressedLink = null; } + links.clear(); invalidate(); } @@ -195,6 +203,13 @@ public void setText(boolean bot, String text, TLObject imageOrAnimation, TLRPC.B } } + public CharSequence getText() { + if (textLayout == null) { + return null; + } + return textLayout.getText(); + } + @Override public boolean onTouchEvent(MotionEvent event) { float x = event.getX(); @@ -217,15 +232,18 @@ public boolean onTouchEvent(MotionEvent event) { ClickableSpan[] link = buffer.getSpans(off, off, ClickableSpan.class); if (link.length != 0) { resetPressedLink(); - pressedLink = link[0]; + pressedLink = new LinkSpanDrawable(link[0], resourcesProvider, x2, y2); result = true; try { - int start = buffer.getSpanStart(pressedLink); - urlPath.setCurrentLayout(textLayout, start, 0); - textLayout.getSelectionPath(start, buffer.getSpanEnd(pressedLink), urlPath); + int start = buffer.getSpanStart(link[0]); + LinkPath path = pressedLink.obtainNewPath(); + path.setCurrentLayout(textLayout, start, 0); + textLayout.getSelectionPath(start, buffer.getSpanEnd(link[0]), path); } catch (Exception e) { FileLog.e(e); } + links.addLink(pressedLink); + invalidate(); } else { resetPressedLink(); } @@ -238,21 +256,20 @@ public boolean onTouchEvent(MotionEvent event) { } } else if (pressedLink != null) { try { - if (pressedLink instanceof URLSpanNoUnderline) { - String url = ((URLSpanNoUnderline) pressedLink).getURL(); + ClickableSpan span = pressedLink.getSpan(); + if (span instanceof URLSpanNoUnderline) { + String url = ((URLSpanNoUnderline) span).getURL(); if (url.startsWith("@") || url.startsWith("#") || url.startsWith("/")) { if (delegate != null) { delegate.didPressUrl(url); } } - } else { - if (pressedLink instanceof URLSpan) { - if (delegate != null) { - delegate.didPressUrl(((URLSpan) pressedLink).getURL()); - } - } else { - pressedLink.onClick(this); + } else if (span instanceof URLSpan) { + if (delegate != null) { + delegate.didPressUrl(((URLSpan) span).getURL()); } + } else if (span != null) { + span.onClick(this); } } catch (Exception e) { FileLog.e(e); @@ -264,6 +281,23 @@ public boolean onTouchEvent(MotionEvent event) { resetPressedLink(); } } + if (selectorDrawable != null) { + if (!result && y > 0 && event.getAction() == MotionEvent.ACTION_DOWN && isClickable()) { + selectorDrawable.setState(new int[]{android.R.attr.state_pressed, android.R.attr.state_enabled}); + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + selectorDrawable.setHotspot(event.getX(), event.getY()); + } + invalidate(); + result = true; + } else if (event.getAction() == MotionEvent.ACTION_UP || event.getAction() == MotionEvent.ACTION_CANCEL) { + selectorDrawable.setState(new int[]{}); + invalidate(); + if (!result && event.getAction() == MotionEvent.ACTION_UP) { + performClick(); + } + result = true; + } + } return result || super.onTouchEvent(event); } @@ -272,6 +306,9 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { setMeasuredDimension(MeasureSpec.makeMeasureSpec(MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.EXACTLY), height + AndroidUtilities.dp(8)); } + private Drawable selectorDrawable; + private int selectorDrawableRadius; + @Override protected void onDraw(Canvas canvas) { int x = (getWidth() - width) / 2; @@ -294,6 +331,15 @@ protected void onDraw(Canvas canvas) { drawable.setBounds(x, 0, width + x, height); drawable.draw(canvas); + if (selectorDrawable != null) { + if (selectorDrawableRadius != SharedConfig.bubbleRadius) { + selectorDrawableRadius = SharedConfig.bubbleRadius; + Theme.setMaskDrawableRad(selectorDrawable, selectorDrawableRadius, selectorDrawableRadius); + } + selectorDrawable.setBounds(x + AndroidUtilities.dp(2), AndroidUtilities.dp(2), width + x - AndroidUtilities.dp(2), height - AndroidUtilities.dp(2)); + selectorDrawable.draw(canvas); + } + imageReceiver.setImageCoords(x + imagePadding, imagePadding, width - imagePadding * 2, photoHeight - imagePadding); imageReceiver.draw(canvas); @@ -301,8 +347,8 @@ protected void onDraw(Canvas canvas) { Theme.chat_msgTextPaint.linkColor = getThemedColor(Theme.key_chat_messageLinkIn); canvas.save(); canvas.translate(textX = AndroidUtilities.dp(isPhotoVisible ? 14 : 11) + x, textY = AndroidUtilities.dp(11) + y); - if (pressedLink != null) { - canvas.drawPath(urlPath, Theme.chat_urlPaint); + if (links.draw(canvas)) { + invalidate(); } if (textLayout != null) { textLayout.draw(canvas); @@ -349,4 +395,9 @@ private Drawable getThemedDrawable(String drawableKey) { Drawable drawable = resourcesProvider != null ? resourcesProvider.getDrawable(drawableKey) : null; return drawable != null ? drawable : Theme.getThemeDrawable(drawableKey); } + + @Override + protected boolean verifyDrawable(@NonNull Drawable who) { + return who == selectorDrawable || super.verifyDrawable(who); + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ChatActionCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ChatActionCell.java index 8afac1d142..26165e08f1 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ChatActionCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ChatActionCell.java @@ -14,6 +14,7 @@ import android.graphics.Canvas; import android.graphics.Paint; import android.graphics.Path; +import android.graphics.Rect; import android.graphics.RectF; import android.os.Build; import android.text.Layout; @@ -86,6 +87,8 @@ import java.util.Objects; import java.util.Stack; +import tw.nekomimi.nekogram.NekoConfig; + public class ChatActionCell extends BaseCell implements DownloadController.FileDownloadProgressListener, NotificationCenter.NotificationCenterDelegate { private final static boolean USE_PREMIUM_GIFT_LOCAL_STICKER = false; private final static boolean USE_PREMIUM_GIFT_MONTHS_AS_EMOJI_NUMBERS = false; @@ -128,6 +131,11 @@ public void setSpoilersSuppressed(boolean s) { } private boolean canDrawInParent; + private View invalidateWithParent; + + public void setInvalidateWithParent(View viewToInvalidate) { + invalidateWithParent = viewToInvalidate; + } public interface ChatActionCellDelegate { default void didClickImage(ChatActionCell cell) { @@ -264,11 +272,10 @@ public interface ThemeDelegate extends Theme.ResourcesProvider { if (messageObject != null && messageObject.wasUnread || forceWasUnread) { forceWasUnread = messageObject.wasUnread = false; - if (!NekoConfig.disableVibration.Bool()) { - try { - performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); - } catch (Exception ignored) { - } + try { + if (!NekoConfig.disableVibration.Bool()) + performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + } catch (Exception ignored) { } if (getContext() instanceof LaunchActivity) { @@ -402,37 +409,37 @@ public void setMessageObject(MessageObject messageObject, boolean force) { hasReplyMessage = messageObject.replyMessageObject != null; DownloadController.getInstance(currentAccount).removeLoadingFileObserver(this); previousWidth = 0; + imageReceiver.setAutoRepeatCount(0); if (messageObject.type == MessageObject.TYPE_SUGGEST_PHOTO) { imageReceiver.setRoundRadius((int) (stickerSize / 2f)); imageReceiver.setAllowStartLottieAnimation(true); imageReceiver.setDelegate(null); TLRPC.TL_messageActionSuggestProfilePhoto action = (TLRPC.TL_messageActionSuggestProfilePhoto) messageObject.messageOwner.action; + TLRPC.VideoSize videoSize = FileLoader.getClosestVideoSizeWithSize(action.photo.video_sizes, 1000); ImageLocation videoLocation; if (action.photo.video_sizes != null && !action.photo.video_sizes.isEmpty()) { - videoLocation = ImageLocation.getForPhoto(action.photo.video_sizes.get(0), action.photo); + videoLocation = ImageLocation.getForPhoto(videoSize, action.photo); } else { videoLocation = null; } TLRPC.Photo photo = messageObject.messageOwner.action.photo; - TLRPC.VideoSize videoSize = null; TLRPC.PhotoSize strippedPhotoSize = null; - for (int a = 0, N = messageObject.photoThumbs.size(); a < N; a++) { - TLRPC.PhotoSize photoSize = messageObject.photoThumbs.get(a); - if (photoSize instanceof TLRPC.TL_photoStrippedSize) { - strippedPhotoSize = photoSize; - break; + if (messageObject.strippedThumb == null) { + for (int a = 0, N = messageObject.photoThumbs.size(); a < N; a++) { + TLRPC.PhotoSize photoSize = messageObject.photoThumbs.get(a); + if (photoSize instanceof TLRPC.TL_photoStrippedSize) { + strippedPhotoSize = photoSize; + break; + } } } TLRPC.PhotoSize photoSize = FileLoader.getClosestPhotoSizeWithSize(messageObject.photoThumbs, 1000); if (photoSize != null) { - if (!photo.video_sizes.isEmpty()) { - videoSize = photo.video_sizes.get(0); - } if (videoSize != null) { - imageReceiver.setImage(videoLocation, ImageLoader.AUTOPLAY_FILTER, ImageLocation.getForPhoto(photoSize, photo), "150_150", ImageLocation.getForObject(strippedPhotoSize, messageObject.photoThumbsObject), "50_50_b", null, 0, null, messageObject, 0); + imageReceiver.setImage(videoLocation, ImageLoader.AUTOPLAY_FILTER, ImageLocation.getForPhoto(photoSize, photo), "150_150", ImageLocation.getForObject(strippedPhotoSize, messageObject.photoThumbsObject), "50_50_b", messageObject.strippedThumb, 0, null, messageObject, 0); } else { - imageReceiver.setImage(ImageLocation.getForPhoto(photoSize, photo), "150_150", ImageLocation.getForObject(strippedPhotoSize, messageObject.photoThumbsObject), "50_50_b", null, 0, null, messageObject, 0); + imageReceiver.setImage(ImageLocation.getForPhoto(photoSize, photo), "150_150", ImageLocation.getForObject(strippedPhotoSize, messageObject.photoThumbsObject), "50_50_b", messageObject.strippedThumb, 0, null, messageObject, 0); } } @@ -532,25 +539,28 @@ public void setMessageObject(MessageObject messageObject, boolean force) { imageReceiver.setAllowStartLottieAnimation(true); imageReceiver.setDelegate(null); imageReceiver.setRoundRadius(AndroidUtilities.roundMessageSize / 2); + imageReceiver.setAutoRepeatCount(1); long id = messageObject.getDialogId(); avatarDrawable.setInfo(id, null, null); if (messageObject.messageOwner.action instanceof TLRPC.TL_messageActionUserUpdatedPhoto) { imageReceiver.setImage(null, null, avatarDrawable, null, messageObject, 0); } else { TLRPC.PhotoSize strippedPhotoSize = null; - for (int a = 0, N = messageObject.photoThumbs.size(); a < N; a++) { - TLRPC.PhotoSize photoSize = messageObject.photoThumbs.get(a); - if (photoSize instanceof TLRPC.TL_photoStrippedSize) { - strippedPhotoSize = photoSize; - break; + if (messageObject.strippedThumb == null) { + for (int a = 0, N = messageObject.photoThumbs.size(); a < N; a++) { + TLRPC.PhotoSize photoSize = messageObject.photoThumbs.get(a); + if (photoSize instanceof TLRPC.TL_photoStrippedSize) { + strippedPhotoSize = photoSize; + break; + } } } TLRPC.PhotoSize photoSize = FileLoader.getClosestPhotoSizeWithSize(messageObject.photoThumbs, 640); if (photoSize != null) { TLRPC.Photo photo = messageObject.messageOwner.action.photo; TLRPC.VideoSize videoSize = null; - if (!photo.video_sizes.isEmpty() && SharedConfig.autoplayGifs) { - videoSize = photo.video_sizes.get(0); + if (!photo.video_sizes.isEmpty() && SharedConfig.isAutoplayGifs()) { + videoSize = FileLoader.getClosestVideoSizeWithSize(photo.video_sizes, 1000); if (!messageObject.mediaExists && !DownloadController.getInstance(currentAccount).canDownloadMedia(DownloadController.AUTODOWNLOAD_TYPE_VIDEO, videoSize.size)) { currentVideoLocation = ImageLocation.getForPhoto(videoSize, photo); String fileName = FileLoader.getAttachFileName(videoSize); @@ -559,9 +569,9 @@ public void setMessageObject(MessageObject messageObject, boolean force) { } } if (videoSize != null) { - imageReceiver.setImage(ImageLocation.getForPhoto(videoSize, photo), ImageLoader.AUTOPLAY_FILTER, ImageLocation.getForObject(strippedPhotoSize, messageObject.photoThumbsObject), "50_50_b", avatarDrawable, 0, null, messageObject, 1); + imageReceiver.setImage(ImageLocation.getForPhoto(videoSize, photo), ImageLoader.AUTOPLAY_FILTER, ImageLocation.getForObject(strippedPhotoSize, messageObject.photoThumbsObject), "50_50_b", messageObject.strippedThumb, 0, null, messageObject, 1); } else { - imageReceiver.setImage(ImageLocation.getForObject(photoSize, messageObject.photoThumbsObject), "150_150", ImageLocation.getForObject(strippedPhotoSize, messageObject.photoThumbsObject), "50_50_b", avatarDrawable, 0, null, messageObject, 1); + imageReceiver.setImage(ImageLocation.getForObject(photoSize, messageObject.photoThumbsObject), "150_150", ImageLocation.getForObject(strippedPhotoSize, messageObject.photoThumbsObject), "50_50_b", messageObject.strippedThumb, 0, null, messageObject, 1); } } else { imageReceiver.setImageBitmap(avatarDrawable); @@ -842,6 +852,9 @@ private void openLink(CharacterStyle link) { private void createLayout(CharSequence text, int width) { int maxWidth = width - AndroidUtilities.dp(30); + if (maxWidth < 0) { + return; + } invalidatePath = true; TextPaint paint; if (currentMessageObject != null && currentMessageObject.drawServiceWithDefaultTypeface) { @@ -849,6 +862,7 @@ private void createLayout(CharSequence text, int width) { } else { paint = (TextPaint) getThemedPaint(Theme.key_paint_chatActionText); } + paint.linkColor = paint.getColor(); textLayout = new StaticLayout(text, paint, maxWidth, Layout.Alignment.ALIGN_CENTER, 1.0f, 0.0f, false); spoilersPool.addAll(spoilers); @@ -1144,7 +1158,7 @@ protected void onDraw(Canvas canvas) { if (isButtonLayout(messageObject)) { canvas.save(); - float x = (previousWidth - giftRectSize) / 2f + AndroidUtilities.dp(8), y = textY + textHeight + giftRectSize * 0.075f + imageSize + AndroidUtilities.dp(4); + float x = (previousWidth - giftRectSize) / 2f + AndroidUtilities.dp(8), y = textY + textHeight + giftRectSize * 0.075f + (messageObject.type == MessageObject.TYPE_SUGGEST_PHOTO ? imageSize : stickerSize) + AndroidUtilities.dp(4); if (messageObject.type == MessageObject.TYPE_SUGGEST_PHOTO) { y += +AndroidUtilities.dp(16); } @@ -1529,4 +1543,28 @@ public void drawOutboundsContent(Canvas canvas) { private boolean isButtonLayout(MessageObject messageObject) { return messageObject != null && (messageObject.type == MessageObject.TYPE_GIFT_PREMIUM || messageObject.type == MessageObject.TYPE_SUGGEST_PHOTO); } + + @Override + public void invalidate() { + super.invalidate(); + if (invalidateWithParent != null) { + invalidateWithParent.invalidate(); + } + } + + @Override + public void invalidate(Rect dirty) { + super.invalidate(dirty); + if (invalidateWithParent != null) { + invalidateWithParent.invalidate(); + } + } + + @Override + public void invalidate(int l, int t, int r, int b) { + super.invalidate(l, t, r, b); + if (invalidateWithParent != null) { + invalidateWithParent.invalidate(); + } + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ChatMessageCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ChatMessageCell.java index b835a3300a..e81f8c2714 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ChatMessageCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ChatMessageCell.java @@ -64,6 +64,7 @@ import android.view.ViewConfiguration; import android.view.ViewGroup; import android.view.ViewStructure; +import android.view.Window; import android.view.accessibility.AccessibilityEvent; import android.view.accessibility.AccessibilityManager; import android.view.accessibility.AccessibilityNodeInfo; @@ -89,9 +90,11 @@ import org.telegram.messenger.Emoji; import org.telegram.messenger.FileLoader; import org.telegram.messenger.FileLog; +import org.telegram.messenger.FlagSecureReason; import org.telegram.messenger.ImageLoader; import org.telegram.messenger.ImageLocation; import org.telegram.messenger.ImageReceiver; +import org.telegram.messenger.LiteMode; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MediaController; import org.telegram.messenger.MediaDataController; @@ -155,6 +158,7 @@ import org.telegram.ui.Components.URLSpanBrowser; import org.telegram.ui.Components.URLSpanMono; import org.telegram.ui.Components.URLSpanNoUnderline; +import org.telegram.ui.Components.VectorAvatarThumbDrawable; import org.telegram.ui.Components.VideoForwardDrawable; import org.telegram.ui.Components.spoilers.SpoilerEffect; import org.telegram.ui.PhotoViewer; @@ -184,7 +188,7 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate public boolean clipToGroupBounds; public boolean drawForBlur; private boolean flipImage; - private boolean visibleOnScreen; + private boolean visibleOnScreen = true; public boolean shouldCheckVisibleOnScreen; float parentBoundsTop; int parentBoundsBottom; @@ -248,25 +252,33 @@ private void setAvatar(MessageObject messageObject) { } else { currentPhoto = null; } - avatarDrawable.setInfo(currentUser); - avatarImage.setForUserOrChat(currentUser, avatarDrawable, null, true); + post(() -> { + avatarDrawable.setInfo(currentUser); + avatarImage.setForUserOrChat(currentUser, avatarDrawable, null, LiteMode.isEnabled(LiteMode.FLAGS_CHAT), VectorAvatarThumbDrawable.TYPE_SMALL); + }); } else if (currentChat != null) { if (currentChat.photo != null) { currentPhoto = currentChat.photo.photo_small; } else { currentPhoto = null; } - avatarDrawable.setInfo(currentChat); - avatarImage.setForUserOrChat(currentChat, avatarDrawable); + post(() -> { + avatarDrawable.setInfo(currentChat); + avatarImage.setForUserOrChat(currentChat, avatarDrawable); + }); } else if (messageObject != null && messageObject.isSponsored()) { if (messageObject.sponsoredChatInvite != null && messageObject.sponsoredChatInvite.chat != null) { - avatarDrawable.setInfo(messageObject.sponsoredChatInvite.chat); - avatarImage.setForUserOrChat(messageObject.sponsoredChatInvite.chat, avatarDrawable); + post(() -> { + avatarDrawable.setInfo(messageObject.sponsoredChatInvite.chat); + avatarImage.setForUserOrChat(messageObject.sponsoredChatInvite.chat, avatarDrawable); + }); } else { avatarDrawable.setInfo(messageObject.sponsoredChatInvite); - TLRPC.Photo photo = messageObject.sponsoredChatInvite.photo; - if (photo != null) { - avatarImage.setImage(ImageLocation.getForPhoto(photo.sizes.get(0), photo), "50_50", avatarDrawable, null, null, 0); + if (messageObject.sponsoredChatInvite != null) { + TLRPC.Photo photo = messageObject.sponsoredChatInvite.photo; + if (photo != null) { + avatarImage.setImage(ImageLocation.getForPhoto(photo.sizes.get(0), photo), "50_50", avatarDrawable, null, null, 0); + } } } } else { @@ -369,6 +381,9 @@ public void setVisibleOnScreen(boolean visibleOnScreen) { if (this.visibleOnScreen != visibleOnScreen) { this.visibleOnScreen = visibleOnScreen; checkImageReceiversAttachState(); + if (visibleOnScreen) { + invalidate(); + } } } @@ -385,6 +400,10 @@ public void setParentBounds(float chatListViewPaddingTop, int blurredViewBottomO } public interface ChatMessageCellDelegate { + default boolean isReplyOrSelf() { + return false; + } + default void didPressExtendedMediaPreview(ChatMessageCell cell, TLRPC.KeyboardButton button) { } @@ -778,6 +797,8 @@ public static class PollButton { private int[] pressedState = new int[]{android.R.attr.state_enabled, android.R.attr.state_pressed}; private float animatingLoadingProgressProgress; CharSequence accessibilityText; + private boolean accessibilityTextUnread, accessibilityTextContentUnread; + private long accessibilityTextFileSize; private boolean wasTranscriptionOpen; private RoundVideoPlayingDrawable roundVideoPlayingDrawable; @@ -1060,6 +1081,12 @@ class LoadingDrawableLocation { private float replyTouchX, replyTouchY; private TLRPC.PhotoSize currentReplyPhoto; + private AnimatedFloat translationLoadingFloat; + private LinkPath translationLoadingPath; + private LoadingDrawable translationLoadingDrawable; + private ArrayList translationLoadingDrawableText; + private StaticLayout translationLoadingDrawableLayout; + private boolean drawTopic; private MessageTopicButton topicButton; @@ -1164,6 +1191,7 @@ class LoadingDrawableLocation { private int lastViewsCount; private int lastRepliesCount; private float selectedBackgroundProgress; + private boolean lastTranslated; private float viewTop; private int backgroundHeight; @@ -1184,7 +1212,7 @@ class LoadingDrawableLocation { private AnimatedFloat roundVideoPlayPipFloat = new AnimatedFloat(this, 200, CubicBezierInterpolator.EASE_OUT); private Paint roundVideoPipPaint; - private Runnable unregisterFlagSecure; + private FlagSecureReason flagSecure; private Runnable diceFinishCallback = new Runnable() { @Override @@ -1244,7 +1272,7 @@ public void run() { VideoForwardDrawable videoForwardDrawable; VideoPlayerRewinder videoPlayerRewinder; - private final Theme.ResourcesProvider resourcesProvider; + private Theme.ResourcesProvider resourcesProvider; private final boolean canDrawBackgroundInParent; // Public for enter transition @@ -1381,6 +1409,19 @@ public void drawStatusWithImage(Canvas canvas, ImageReceiver imageReceiver, int canvas.drawCircle(xCenterRegion - circleRadius, yCenterRegion - circleRadius, circleRadius, paint); } + public void setResourcesProvider(Theme.ResourcesProvider resourcesProvider) { + this.resourcesProvider = resourcesProvider; + if (radialProgress != null) { + radialProgress.setResourcesProvider(resourcesProvider); + } + if (videoRadialProgress != null) { + videoRadialProgress.setResourcesProvider(resourcesProvider); + } + if (roundVideoPlayingDrawable != null) { + roundVideoPlayingDrawable.setResourcesProvider(resourcesProvider); + } + } + private void createPollUI() { if (pollAvatarImages != null) { return; @@ -1872,7 +1913,7 @@ private boolean checkLinkPreviewMotionEvent(MotionEvent event) { } else if (documentAttachType != DOCUMENT_ATTACH_TYPE_DOCUMENT && drawPhotoImage && photoImage.isInsideImage(x, y)) { linkPreviewPressed = true; TLRPC.WebPage webPage = MessageObject.getMedia(currentMessageObject.messageOwner).webpage; - if (documentAttachType == DOCUMENT_ATTACH_TYPE_GIF && buttonState == -1 && SharedConfig.autoplayGifs && (photoImage.getAnimation() == null || !TextUtils.isEmpty(webPage.embed_url))) { + if (documentAttachType == DOCUMENT_ATTACH_TYPE_GIF && buttonState == -1 && SharedConfig.isAutoplayGifs() && (photoImage.getAnimation() == null || !TextUtils.isEmpty(webPage.embed_url))) { linkPreviewPressed = false; return false; } @@ -1929,7 +1970,7 @@ private boolean checkLinkPreviewMotionEvent(MotionEvent event) { } } else if (documentAttachType == DOCUMENT_ATTACH_TYPE_GIF && drawImageButton) { if (buttonState == -1) { - if (SharedConfig.autoplayGifs) { + if (SharedConfig.isAutoplayGifs()) { delegate.didPressImage(this, lastTouchX, lastTouchY); } else { buttonState = 2; @@ -2458,7 +2499,7 @@ private boolean checkPhotoImageMotionEvent(MotionEvent event) { if (currentMessageObject.isSendError()) { imagePressed = false; result = false; - } else if (currentMessageObject.type == MessageObject.TYPE_GIF && buttonState == -1 && SharedConfig.autoplayGifs && photoImage.getAnimation() == null) { + } else if (currentMessageObject.type == MessageObject.TYPE_GIF && buttonState == -1 && SharedConfig.isAutoplayGifs() && photoImage.getAnimation() == null) { imagePressed = false; result = false; } @@ -3082,7 +3123,7 @@ public boolean onTouchEvent(MotionEvent event) { forwardBotPressed = false; playSoundEffect(SoundEffectConstants.CLICK); if (delegate != null) { - if (currentViaBotUser.bot_inline_placeholder == null) { + if (currentViaBotUser != null && currentViaBotUser.bot_inline_placeholder == null) { delegate.didPressViaBotNotInline(this, currentViaBotUser != null ? currentViaBotUser.id : 0); } else { delegate.didPressViaBot(this, currentViaBotUser != null ? currentViaBotUser.username : currentMessageObject.messageOwner.via_bot_name); @@ -3471,6 +3512,9 @@ public void setVisiblePart(int position, int height, int parent, float parentOff this.blurredViewTopOffset = blurredViewTopOffset; this.blurredViewBottomOffset = blurredViewBottomOffset; + if (!botButtons.isEmpty() && viewTop != visibleTop) { + invalidate(); + } viewTop = visibleTop; if (parent != parentHeight || parentOffset != this.parentViewTopOffset) { @@ -3604,7 +3648,7 @@ private void didClickedImage() { } } else if (currentMessageObject.type == MessageObject.TYPE_GIF) { if (buttonState == -1 || buttonState == 1 && canStreamVideo && autoPlayingMedia) { - //if (SharedConfig.autoplayGifs) { + //if (SharedConfig.isAutoplayGifs()) { delegate.didPressImage(this, lastTouchX, lastTouchY); /*} else { buttonState = 2; @@ -3910,14 +3954,15 @@ protected void onDetachedFromWindow() { reactionsLayoutInBubble.onDetachFromWindow(); statusDrawableAnimationInProgress = false; - if (unregisterFlagSecure != null) { - unregisterFlagSecure.run(); - unregisterFlagSecure = null; + if (flagSecure != null) { + flagSecure.detach(); } - if (topicButton != null) { topicButton.onDetached(this); } + if (currentNameStatusDrawable != null) { + currentNameStatusDrawable.detach(); + } } @Override @@ -3985,15 +4030,20 @@ public void onAttachedToWindow() { toSeekBarProgress = showSeekbar ? 1f : 0f; } reactionsLayoutInBubble.onAttachToWindow(); + if (flagSecure != null) { + flagSecure.attach(); + } updateFlagSecure(); if (currentMessageObject != null && currentMessageObject.type == MessageObject.TYPE_EXTENDED_MEDIA_PREVIEW && unlockLayout != null) { invalidate(); } - if (topicButton != null) { topicButton.onAttached(this); } + if (currentNameStatusDrawable != null) { + currentNameStatusDrawable.attach(); + } } boolean imageReceiversAttachState; @@ -4037,8 +4087,8 @@ private void checkImageReceiversAttachState() { if (!loadDocumentFromImageReceiver) { TLRPC.PhotoSize photo = document == null ? FileLoader.getClosestPhotoSizeWithSize(currentMessageObject.photoThumbs, AndroidUtilities.getPhotoSize()) : null; if (canDownload == 2 || canDownload == 1 && currentMessageObject.isVideo()) { - if (document != null && !currentMessageObject.shouldEncryptPhotoOrVideo() && currentMessageObject.canStreamVideo()) { - FileLoader.getInstance(currentAccount).loadFile(document, currentMessageObject, FileLoader.PRIORITY_NORMAL, 10); + if (canDownload != 2 && document != null && !currentMessageObject.shouldEncryptPhotoOrVideo() && currentMessageObject.canStreamVideo()) { + FileLoader.getInstance(currentAccount).loadFile(document, currentMessageObject, FileLoader.PRIORITY_NORMAL, 0); } } else if (canDownload != 0) { if (document != null) { @@ -4071,32 +4121,35 @@ private void checkImageReceiversAttachState() { photoImage.onDetachedFromWindow(); blurredPhotoImage.onDetachedFromWindow(); - if (currentMessageObject != null && !currentMessageObject.mediaExists && !currentMessageObject.putInDownloadsStore && !DownloadController.getInstance(currentAccount).isDownloading(currentMessageObject.messageOwner.id)) { - TLRPC.Document document = currentMessageObject.getDocument(); - boolean loadDocumentFromImageReceiver = MessageObject.isStickerDocument(document) || MessageObject.isAnimatedStickerDocument(document, true) || MessageObject.isGifDocument(document) || MessageObject.isRoundVideoDocument(document); - if (!loadDocumentFromImageReceiver) { - if (document != null) { - FileLoader.getInstance(currentAccount).cancelLoadFile(document); - } else { - TLRPC.PhotoSize photo = FileLoader.getClosestPhotoSizeWithSize(currentMessageObject.photoThumbs, AndroidUtilities.getPhotoSize()); - if (photo != null) { - FileLoader.getInstance(currentAccount).cancelLoadFile(photo); - } - } - } - } + cancelLoading(currentMessageObject); AnimatedEmojiSpan.release(this, animatedEmojiDescriptionStack); AnimatedEmojiSpan.release(this, animatedEmojiReplyStack); AnimatedEmojiSpan.release(this, animatedEmojiStack); } } - private boolean lastTranslated; + private void cancelLoading(MessageObject messageObject) { + if (messageObject != null && !messageObject.mediaExists && !messageObject.putInDownloadsStore && !DownloadController.getInstance(currentAccount).isDownloading(messageObject.messageOwner.id)) { + TLRPC.Document document = messageObject.getDocument(); + boolean loadDocumentFromImageReceiver = MessageObject.isStickerDocument(document) || MessageObject.isAnimatedStickerDocument(document, true) || MessageObject.isGifDocument(document) || MessageObject.isRoundVideoDocument(document); + if (!loadDocumentFromImageReceiver) { + if (document != null) { + FileLoader.getInstance(currentAccount).cancelLoadFile(document); + } else { + TLRPC.PhotoSize photo = FileLoader.getClosestPhotoSizeWithSize(messageObject.photoThumbs, AndroidUtilities.getPhotoSize()); + if (photo != null) { + FileLoader.getInstance(currentAccount).cancelLoadFile(photo); + } + } + } + } + } private void setMessageContent(MessageObject messageObject, MessageObject.GroupedMessages groupedMessages, boolean bottomNear, boolean topNear) { if (messageObject.checkLayout() || currentPosition != null && lastHeight != AndroidUtilities.displaySize.y) { currentMessageObject = null; } + messageObject.isOutOwnerCached = null; boolean widthChanged = lastWidth != getParentWidth(); lastHeight = AndroidUtilities.displaySize.y; lastWidth = getParentWidth(); @@ -4109,7 +4162,8 @@ private void setMessageContent(MessageObject messageObject, MessageObject.Groupe currentMessageObject == messageObject && (isUserDataChanged() || photoNotSet) || lastPostAuthor != messageObject.messageOwner.post_author || wasPinned != isPinned || - newReply != lastReplyMessage; + newReply != lastReplyMessage || + messageObject.translated != lastTranslated; boolean groupChanged = groupedMessages != currentMessagesGroup; boolean pollChanged = false; @@ -4223,6 +4277,7 @@ private void setMessageContent(MessageObject messageObject, MessageObject.Groupe photoImage.setCrossfadeDuration(ImageReceiver.DEFAULT_CROSSFADE_DURATION); photoImage.setCrossfadeByScale(0); photoImage.setGradientBitmap(null); + lastTranslated = messageObject.translated; lastSendState = messageObject.messageOwner.send_state; lastDeleteDate = messageObject.messageOwner.destroyTime; lastViewsCount = messageObject.messageOwner.views; @@ -4467,22 +4522,30 @@ private void setMessageContent(MessageObject messageObject, MessageObject.Groupe if (a < size) { commentAvatarImages[a].setImageCoords(0, 0, AndroidUtilities.dp(24), AndroidUtilities.dp(24)); long id = MessageObject.getPeerId(recentRepliers.get(a)); - TLRPC.User user = null; - TLRPC.Chat chat = null; + TLRPC.User user; + TLRPC.Chat chat; if (DialogObject.isUserDialog(id)) { user = MessagesController.getInstance(currentAccount).getUser(id); + chat = null; } else if (DialogObject.isChatDialog(id)) { + user = null; chat = MessagesController.getInstance(currentAccount).getChat(-id); - } - if (user != null) { - commentAvatarDrawables[a].setInfo(user); - commentAvatarImages[a].setForUserOrChat(user, commentAvatarDrawables[a]); - } else if (chat != null) { - commentAvatarDrawables[a].setInfo(chat); - commentAvatarImages[a].setForUserOrChat(chat, commentAvatarDrawables[a]); } else { - commentAvatarDrawables[a].setInfo(id, "", ""); + user = null; + chat = null; } + final int A = a; + post(() -> { + if (user != null) { + commentAvatarDrawables[A].setInfo(user); + commentAvatarImages[A].setForUserOrChat(user, commentAvatarDrawables[A]); + } else if (chat != null) { + commentAvatarDrawables[A].setInfo(chat); + commentAvatarImages[A].setForUserOrChat(chat, commentAvatarDrawables[A]); + } else { + commentAvatarDrawables[A].setInfo(id, "", ""); + } + }); commentAvatarImagesVisible[a] = true; avatarsOffset += a == 0 ? 2 : 17; } else if (size != 0) { @@ -4567,11 +4630,12 @@ private void setMessageContent(MessageObject messageObject, MessageObject.Groupe hasGamePreview = MessageObject.getMedia(messageObject.messageOwner) instanceof TLRPC.TL_messageMediaGame && MessageObject.getMedia(messageObject.messageOwner).game instanceof TLRPC.TL_game; hasInvoicePreview = MessageObject.getMedia(messageObject.messageOwner) instanceof TLRPC.TL_messageMediaInvoice; hasLinkPreview = !messageObject.isRestrictedMessage && MessageObject.getMedia(messageObject.messageOwner) instanceof TLRPC.TL_messageMediaWebPage && MessageObject.getMedia(messageObject.messageOwner).webpage instanceof TLRPC.TL_webPage; + TLRPC.WebPage webpage = hasLinkPreview ? MessageObject.getMedia(messageObject.messageOwner).webpage : null; drawInstantView = hasLinkPreview && MessageObject.getMedia(messageObject.messageOwner).webpage.cached_page != null; String siteName = hasLinkPreview ? MessageObject.getMedia(messageObject.messageOwner).webpage.site_name : null; hasEmbed = hasLinkPreview && !TextUtils.isEmpty(MessageObject.getMedia(messageObject.messageOwner).webpage.embed_url) && !messageObject.isGif() && !"instangram".equalsIgnoreCase(siteName); boolean slideshow = false; - String webpageType = hasLinkPreview ? MessageObject.getMedia(messageObject.messageOwner).webpage.type : null; + String webpageType = webpage != null ? webpage.type : null; TLRPC.Document androidThemeDocument = null; TLRPC.ThemeSettings androidThemeSettings = null; if (!drawInstantView) { @@ -4590,7 +4654,7 @@ private void setMessageContent(MessageObject messageObject, MessageObject.Groupe } else if ("telegram_megagroup".equals(webpageType)) { drawInstantView = true; drawInstantViewType = 2; - } else if ("telegram_message".equals(webpageType)) { + } else if ("telegram_message".equals(webpageType) || "photo".equals(webpageType) && webpage != null && webpage.url != null && Browser.isInternalUri(Uri.parse(webpage.url), null)) { drawInstantView = true; drawInstantViewType = 3; } else if ("telegram_theme".equals(webpageType)) { @@ -4901,6 +4965,12 @@ private void setMessageContent(MessageObject messageObject, MessageObject.Groupe } boolean titleIsRTL = false; + if (!titleIsRTL && author != null) { + titleIsRTL = AndroidUtilities.isRTL(author); + } + if (!titleIsRTL && messageObject.linkDescription != null) { + titleIsRTL = AndroidUtilities.isRTL(messageObject.linkDescription); + } if (title != null) { try { titleX = Integer.MAX_VALUE; @@ -4949,9 +5019,9 @@ private void setMessageContent(MessageObject messageObject, MessageObject.Groupe } catch (Exception e) { FileLog.e(e); } - if (titleIsRTL && isSmallImage) { - linkPreviewMaxWidth -= AndroidUtilities.dp(48); - } + } + if (titleIsRTL && isSmallImage) { + linkPreviewMaxWidth -= AndroidUtilities.dp(48); } boolean authorIsRTL = false; @@ -5073,7 +5143,7 @@ private void setMessageContent(MessageObject messageObject, MessageObject.Groupe documentAttach = document; documentAttachType = DOCUMENT_ATTACH_TYPE_ROUND; } else if (MessageObject.isGifDocument(document, messageObject.hasValidGroupId())) { - if (!messageObject.isGame() && !SharedConfig.autoplayGifs) { + if (!messageObject.isGame() && !SharedConfig.isAutoplayGifs()) { messageObject.gifState = 1; } photoImage.setAllowStartAnimation(messageObject.gifState != 1); @@ -5427,7 +5497,7 @@ private void setMessageContent(MessageObject messageObject, MessageObject.Groupe } else if (documentAttachType == DOCUMENT_ATTACH_TYPE_VIDEO) { photoImage.setNeedsQualityThumb(true); photoImage.setShouldGenerateQualityThumb(true); - if (SharedConfig.autoplayVideo && (!currentMessageObject.hasMediaSpoilers() || currentMessageObject.isMediaSpoilersRevealed || currentMessageObject.revealingMediaSpoilers) && ( + if (SharedConfig.isAutoplayVideo() && (!currentMessageObject.hasMediaSpoilers() || currentMessageObject.isMediaSpoilersRevealed || currentMessageObject.revealingMediaSpoilers) && ( currentMessageObject.mediaExists || messageObject.canStreamVideo() && DownloadController.getInstance(currentAccount).canDownloadMedia(currentMessageObject) )) { @@ -5683,7 +5753,8 @@ private void setMessageContent(MessageObject messageObject, MessageObject.Groupe } } } - if (user != null) { + + if (user != null || !TextUtils.isEmpty(messageObject.vCardData) || MessageObject.getMedia(messageObject.messageOwner) instanceof TLRPC.TL_messageMediaContact) { drawInstantView = true; drawInstantViewType = 5; } @@ -6138,7 +6209,7 @@ private void setMessageContent(MessageObject messageObject, MessageObject.Groupe int photoHeight = 0; int additionHeight = 0; - if (messageObject.gifState != 2 && !SharedConfig.autoplayGifs && (messageObject.type == MessageObject.TYPE_GIF || messageObject.type == MessageObject.TYPE_ROUND_VIDEO)) { + if (messageObject.gifState != 2 && !SharedConfig.isAutoplayGifs() && (messageObject.type == MessageObject.TYPE_GIF || messageObject.type == MessageObject.TYPE_ROUND_VIDEO)) { messageObject.gifState = 1; } @@ -6563,8 +6634,13 @@ private void setMessageContent(MessageObject messageObject, MessageObject.Groupe } } } else if (messageObject.isAnimatedEmoji()) { - if (messageObject.emojiAnimatedSticker == null && messageObject.emojiAnimatedStickerId != null) { - filter = String.format(Locale.US, "%d_%d_nr_messageId=%d", w, h, messageObject.stableId); + if (!LiteMode.isEnabled(LiteMode.FLAG_ANIMATED_EMOJI_CHAT)) { + filter = String.format(Locale.US, "%d_%d_nr_messageId=%d" + messageObject.emojiAnimatedStickerColor, w, h, messageObject.stableId); + thumb = DocumentObject.getCircleThumb(.4f, Theme.key_chat_serviceBackground, resourcesProvider, 0.65f); + photoImage.setAutoRepeat(3); + messageObject.loadAnimatedEmojiDocument(); + } else if (messageObject.emojiAnimatedSticker == null && messageObject.emojiAnimatedStickerId != null) { + filter = String.format(Locale.US, "%d_%d_nr_messageId=%d" + messageObject.emojiAnimatedStickerColor, w, h, messageObject.stableId); thumb = DocumentObject.getCircleThumb(.4f, Theme.key_chat_serviceBackground, resourcesProvider, 0.65f); photoImage.setAutoRepeat(1); messageObject.loadAnimatedEmojiDocument(); @@ -7137,7 +7213,7 @@ private void setMessageContent(MessageObject messageObject, MessageObject.Groupe currentPhotoObjectThumb.size = -1; } - if (SharedConfig.autoplayVideo && (!currentMessageObject.hasMediaSpoilers() || currentMessageObject.isMediaSpoilersRevealed || currentMessageObject.revealingMediaSpoilers) && messageObject.type == MessageObject.TYPE_VIDEO && !messageObject.needDrawBluredPreview() && + if (SharedConfig.isAutoplayVideo() && (!currentMessageObject.hasMediaSpoilers() || currentMessageObject.isMediaSpoilersRevealed || currentMessageObject.revealingMediaSpoilers) && messageObject.type == MessageObject.TYPE_VIDEO && !messageObject.needDrawBluredPreview() && (currentMessageObject.mediaExists || messageObject.canStreamVideo() && DownloadController.getInstance(currentAccount).canDownloadMedia(currentMessageObject)) ) { if (currentPosition != null) { @@ -7550,7 +7626,20 @@ private void setMessageContent(MessageObject messageObject, MessageObject.Groupe int buttonWidth = (widthForButtons - AndroidUtilities.dp(5) * (buttonsCount - 1) - AndroidUtilities.dp(2)) / buttonsCount; for (int b = 0; b < row.buttons.size(); b++) { BotButton botButton = new BotButton(); - botButton.button = row.buttons.get(b); + TLRPC.TL_keyboardButtonRequestPeer button = new TLRPC.TL_keyboardButtonRequestPeer(); + button.button_id = 0; + button.text = "Request peer"; + button.peer_type = new TLRPC.TL_requestPeerTypeChat(); + ((TLRPC.TL_requestPeerTypeChat) button.peer_type).forum = true; +// ((TLRPC.TL_requestPeerTypeChat) button.peer_type).user_admin_rights = new TLRPC.TL_chatAdminRights(); +// ((TLRPC.TL_requestPeerTypeChat) button.peer_type).user_admin_rights.change_info = true; +// ((TLRPC.TL_requestPeerTypeChat) button.peer_type).user_admin_rights.delete_messages = true; +// ((TLRPC.TL_requestPeerTypeChat) button.peer_type).user_admin_rights.pin_messages = true; +// ((TLRPC.TL_requestPeerTypeChat) button.peer_type).user_admin_rights.anonymous = true; +// ((TLRPC.TL_requestPeerTypeChat) button.peer_type).user_admin_rights.post_messages = true; +// ((TLRPC.TL_requestPeerTypeChat) button.peer_type).premium = false; +// botButton.button = button; + botButton.button = row.buttons.get(b); String key = Utilities.bytesToHex(botButton.button.data); String position = a + "" + b; BotButton oldButton; @@ -7569,7 +7658,7 @@ private void setMessageContent(MessageObject messageObject, MessageObject.Groupe botButtonsByData.put(key, botButton); botButtonsByPosition.put(position, botButton); botButton.x = b * (buttonWidth + AndroidUtilities.dp(5)); - botButton.y = a * AndroidUtilities.dp(44 + 4) + AndroidUtilities.dp(5); + botButton.y = a * AndroidUtilities.dp(44 + 4) + AndroidUtilities.dp(2.5f); botButton.width = buttonWidth; botButton.height = AndroidUtilities.dp(44); CharSequence buttonText; @@ -7738,6 +7827,17 @@ private void setMessageContent(MessageObject messageObject, MessageObject.Groupe statusDrawableAnimator.removeAllListeners(); statusDrawableAnimator.cancel(); } + if (translationLoadingFloat != null) { + translationLoadingFloat.set(0, true); + } + if (translationLoadingPath != null) { + translationLoadingPath.reset(); + translationLoadingPath = null; + } + if (translationLoadingDrawable != null) { + translationLoadingDrawable.reset(); + translationLoadingDrawable = null; + } transitionParams.lastStatusDrawableParams = -1; statusDrawableAnimationInProgress = false; @@ -7800,7 +7900,7 @@ private void setMessageContent(MessageObject messageObject, MessageObject.Groupe } private boolean loopStickers() { - return SharedConfig.loopStickers && !SharedConfig.getLiteMode().enabled(); + return LiteMode.isEnabled(LiteMode.FLAG_ANIMATED_STICKERS_CHAT); } private void highlightCaptionLink(URLSpan link) { @@ -7836,13 +7936,18 @@ private void calculateUnlockXY() { } private void updateFlagSecure() { - boolean flagSecure = currentMessageObject != null && currentMessageObject.messageOwner != null && currentMessageObject.messageOwner.noforwards && !NekoXConfig.disableFlagSecure || currentMessageObject.hasRevealedExtendedMedia(); - Activity activity = AndroidUtilities.findActivity(getContext()); - if (flagSecure && unregisterFlagSecure == null && activity != null) { - unregisterFlagSecure = AndroidUtilities.registerFlagSecure(activity.getWindow()); - } else if (!flagSecure && unregisterFlagSecure != null) { - unregisterFlagSecure.run(); - unregisterFlagSecure = null; + if (flagSecure == null) { + Activity activity = AndroidUtilities.findActivity(getContext()); + Window window = activity == null ? null : activity.getWindow(); + if (window != null) { + flagSecure = new FlagSecureReason(window, () -> currentMessageObject != null && !NekoXConfig.disableFlagSecure && currentMessageObject.messageOwner != null && (currentMessageObject.messageOwner.noforwards || currentMessageObject.hasRevealedExtendedMedia())); + if (attachedToWindow) { + flagSecure.attach(); + } + } + } + if (flagSecure != null) { + flagSecure.invalidate(); } } @@ -8239,7 +8344,20 @@ private void updateWaveform() { if (seekBarWaveform != null) { seekBarWaveform.setWaveform(waveform); } - useTranscribeButton = currentMessageObject != null && (!currentMessageObject.isOutOwner() || currentMessageObject.isSent()) && (UserConfig.getInstance(currentAccount).isPremium() || !MessagesController.getInstance(currentAccount).didPressTranscribeButtonEnough() && (currentMessageObject.messageOwner != null && currentMessageObject.messageOwner.voiceTranscriptionForce || currentMessageObject.getDuration() >= 60) && !MessagesController.getInstance(currentAccount).premiumLocked) && (currentMessageObject.isVoice() && useSeekBarWaveform || currentMessageObject.isRoundVideo()) && currentMessageObject.messageOwner != null && !(MessageObject.getMedia(currentMessageObject.messageOwner) instanceof TLRPC.TL_messageMediaWebPage); + useTranscribeButton = ( + currentMessageObject != null && + (!currentMessageObject.isOutOwner() || currentMessageObject.isSent()) && + ( + UserConfig.getInstance(currentAccount).isPremium() || + !MessagesController.getInstance(currentAccount).didPressTranscribeButtonEnough() && !currentMessageObject.isOutOwner() && ( + currentMessageObject.messageOwner != null && currentMessageObject.messageOwner.voiceTranscriptionForce || + currentMessageObject.getDuration() >= 60 + ) && !MessagesController.getInstance(currentAccount).premiumLocked + ) && ( + currentMessageObject.isVoice() && useSeekBarWaveform || + currentMessageObject.isRoundVideo() + ) && currentMessageObject.messageOwner != null && !(MessageObject.getMedia(currentMessageObject.messageOwner) instanceof TLRPC.TL_messageMediaWebPage) + ); updateSeekBarWaveformWidth(null); } @@ -9437,11 +9555,11 @@ private void drawContent(Canvas canvas) { ); } } - drawMessageText(canvas, transitionParams.animateOutTextBlocks, false, (1.0f - transitionParams.animateChangeProgress), false); - drawMessageText(canvas, currentMessageObject.textLayoutBlocks, true, transitionParams.animateChangeProgress, false); + drawMessageText(canvas, transitionParams.animateOutTextBlocks, transitionParams.animateOutTextXOffset, false, (1.0f - transitionParams.animateChangeProgress), false); + drawMessageText(canvas, currentMessageObject.textLayoutBlocks, currentMessageObject.textXOffset, true, transitionParams.animateChangeProgress, false); canvas.restore(); } else { - drawMessageText(canvas, currentMessageObject.textLayoutBlocks, true, 1.0f, false); + drawMessageText(canvas, currentMessageObject.textLayoutBlocks, currentMessageObject.textXOffset, true, 1.0f, false); } } @@ -9460,7 +9578,10 @@ private void drawContent(Canvas canvas) { canvas.drawCircle(photoImage.getCenterX(), photoImage.getCenterY(), photoImage.getImageWidth() / 2f, drillHolePaint); } if (isRoundVideo && ( - MediaController.getInstance().isPlayingMessage(currentMessageObject) && MediaController.getInstance().isVideoDrawingReady() && canvas.isHardwareAccelerated() && (currentMessageObject == null || !currentMessageObject.isVoiceTranscriptionOpen() || pipFloat >= 1) + MediaController.getInstance().isPlayingMessage(currentMessageObject) && + MediaController.getInstance().isVideoDrawingReady() && + canvas.isHardwareAccelerated() && + (currentMessageObject == null || !currentMessageObject.isVoiceTranscriptionOpen() || pipFloat >= 1) )) { imageDrawn = true; drawTime = true; @@ -9898,8 +10019,8 @@ protected void onOpen() { } else { linkX = backgroundDrawableLeft + AndroidUtilities.dp(currentMessageObject.isOutOwner() ? 11 : 17); } - int startY = totalHeight - AndroidUtilities.dp(drawPinnedTop ? 9 : 10) - linkPreviewHeight - AndroidUtilities.dp(8); - int linkPreviewY = startY; + float startY = totalHeight - AndroidUtilities.dp(drawPinnedTop ? 9 : 10) - linkPreviewHeight - AndroidUtilities.dp(8); + float linkPreviewY = startY; Theme.chat_replyLinePaint.setColor(getThemedColor(currentMessageObject.isOutOwner() ? Theme.key_chat_outPreviewLine : Theme.key_chat_inPreviewLine)); AndroidUtilities.rectTmp.set(linkX, linkPreviewY - AndroidUtilities.dp(3), linkX + AndroidUtilities.dp(3), linkPreviewY + linkPreviewHeight); @@ -9941,7 +10062,7 @@ protected void onOpen() { if (linkPreviewY != startY) { linkPreviewY += AndroidUtilities.dp(2); } - descriptionY = linkPreviewY - AndroidUtilities.dp(3); + descriptionY = (int) linkPreviewY - AndroidUtilities.dp(3); canvas.save(); canvas.translate(linkX + AndroidUtilities.dp(10) + descriptionX, descriptionY); descriptionLayout.draw(canvas); @@ -10245,6 +10366,7 @@ public void drawLinkPreview(Canvas canvas, float alpha) { startY = textY + currentMessageObject.textHeight + AndroidUtilities.dp(8); linkX = unmovedTextX + AndroidUtilities.dp(1); } + startY += (int) transitionParams.deltaBottom; int linkPreviewY = startY; int smallImageStartY = 0; @@ -10593,10 +10715,6 @@ public void drawLinkPreview(Canvas canvas, float alpha) { } instantButtonRect.set(linkX, instantY, linkX + instantWidth, instantY + AndroidUtilities.dp(36)); - if (instantButtonLoading != null) { - instantButtonLoading.setBounds(instantButtonRect); - instantButtonLoading.setRadiiDp(6); - } if (instantButtonPressed && instantButtonPressProgress != 1f) { instantButtonPressProgress += (float) Math.min(40, 1000f / AndroidUtilities.screenRefreshRate) / 100f; instantButtonPressProgress = Utilities.clamp(instantButtonPressProgress, 1f, 0); @@ -10613,6 +10731,8 @@ public void drawLinkPreview(Canvas canvas, float alpha) { selectorDrawable[0].draw(canvas); } if (instantButtonLoading != null && !instantButtonLoading.isDisappeared()) { + instantButtonLoading.setBounds(instantButtonRect); + instantButtonLoading.setRadiiDp(6); instantButtonLoading.draw(canvas); invalidate(); } @@ -10637,7 +10757,7 @@ private boolean shouldDrawMenuDrawable() { return currentMessagesGroup == null || (currentPosition.flags & MessageObject.POSITION_FLAG_TOP) != 0; } - private void drawBotButtons(Canvas canvas, ArrayList botButtons, float alpha) { + private void drawBotButtons(Canvas canvas, ArrayList botButtons, int alpha) { int addX; if (currentMessageObject.isOutOwner()) { addX = getMeasuredWidth() - widthForButtons - AndroidUtilities.dp(10); @@ -10654,8 +10774,8 @@ private void drawBotButtons(Canvas canvas, ArrayList botButtons, floa } } rect.set(0, top, getMeasuredWidth(), top + height); - if (alpha != 1f) { - canvas.saveLayerAlpha(rect, (int) (255 * alpha), Canvas.ALL_SAVE_FLAG); + if (alpha != 0xFF) { + canvas.saveLayerAlpha(rect, alpha, Canvas.ALL_SAVE_FLAG); } else { canvas.save(); } @@ -10703,14 +10823,14 @@ private void drawBotButtons(Canvas canvas, ArrayList botButtons, floa Theme.multAlpha(Theme.getColor(Theme.key_chat_serviceBackgroundSelector, resourcesProvider), 3f), Theme.multAlpha(Theme.getColor(Theme.key_chat_serviceBackgroundSelector, resourcesProvider), 10f) ); - button.loadingDrawable.setAlpha((int) (0xFF * alpha)); + button.loadingDrawable.setAlpha(0xFF); button.loadingDrawable.draw(canvas); invalidateOutbounds(); } if (button.selectorDrawable != null) { button.selectorDrawable.setBounds(button.x + addX, (int) y, button.x + addX + button.width, (int) y + button.height); - button.selectorDrawable.setAlpha((int) (0xFF * alpha)); + button.selectorDrawable.setAlpha(0xFF); button.selectorDrawable.draw(canvas); } @@ -10733,7 +10853,7 @@ private void drawBotButtons(Canvas canvas, ArrayList botButtons, floa int x = button.x + button.width - AndroidUtilities.dp(3) - drawable.getIntrinsicWidth() + addX; setDrawableBounds(drawable, x, y + AndroidUtilities.dp(3)); drawable.draw(canvas); - } else if (button.button instanceof TLRPC.TL_keyboardButtonSwitchInline) { + } else if (button.button instanceof TLRPC.TL_keyboardButtonSwitchInline || button.button instanceof TLRPC.TL_keyboardButtonRequestPeer) { Drawable drawable = getThemedDrawable(Theme.key_drawable_botInline); int x = button.x + button.width - AndroidUtilities.dp(3) - drawable.getIntrinsicWidth() + addX; setDrawableBounds(drawable, x, y + AndroidUtilities.dp(3)); @@ -10753,8 +10873,12 @@ private boolean allowDrawPhotoImage() { return !currentMessageObject.hasMediaSpoilers() || currentMessageObject.isMediaSpoilersRevealed || mediaSpoilerRevealProgress != 0f || blurredPhotoImage.getBitmap() == null; } - @SuppressLint("Range") public void drawMessageText(Canvas canvas, ArrayList textLayoutBlocks, boolean origin, float alpha, boolean drawOnlyText) { + drawMessageText(canvas, textLayoutBlocks, currentMessageObject == null ? 0 : currentMessageObject.textXOffset, origin, alpha, drawOnlyText); + } + + @SuppressLint("Range") + public void drawMessageText(Canvas canvas, ArrayList textLayoutBlocks, float rtlOffset, boolean origin, float alpha, boolean drawOnlyText) { if (textLayoutBlocks == null || textLayoutBlocks.isEmpty() || alpha == 0) { return; } @@ -10776,6 +10900,62 @@ public void drawMessageText(Canvas canvas, ArrayList 0) { + if (translationLoadingDrawable == null) { + translationLoadingDrawable = new LoadingDrawable(); + translationLoadingDrawable.setAppearByGradient(true); + if (translationLoadingPath == null) { + translationLoadingPath = new LinkPath(true); + } + translationLoadingDrawable.usePath(translationLoadingPath); + translationLoadingDrawable.setRadiiDp(5); + + translationLoadingDrawable.reset(); + } + + if (translationLoadingDrawableText != textLayoutBlocks) { + translationLoadingDrawableText = textLayoutBlocks; + translationLoadingPath.reset(); + for (int i = 0; i < textLayoutBlocks.size(); ++i) { + MessageObject.TextLayoutBlock block = textLayoutBlocks.get(i); + if (block != null && block.textLayout != null) { + translationLoadingPath.setCurrentLayout(block.textLayout, 0, block.isRtl() ? rtlOffset : 0, block.textYOffset); + block.textLayout.getSelectionPath(0, block.textLayout.getText().length(), translationLoadingPath); + } + } + translationLoadingDrawable.updateBounds(); + } + + if (translating && (translationLoadingDrawable.isDisappearing() || translationLoadingDrawable.isDisappeared())) { + translationLoadingDrawable.reset(); + translationLoadingDrawable.resetDisappear(); + } else if (!translating && !translationLoadingDrawable.isDisappearing() && !translationLoadingDrawable.isDisappeared()) { + translationLoadingDrawable.disappear(); + } + + int color = getThemedColor(currentMessageObject != null && currentMessageObject.isOutOwner() ? Theme.key_chat_messageLinkOut : Theme.key_chat_messageLinkIn); + translationLoadingDrawable.setColors( + Theme.multAlpha(color, .05f), + Theme.multAlpha(color, .15f), + Theme.multAlpha(color, .1f), + Theme.multAlpha(color, .3f) + ); + canvas.save(); + canvas.translate(textX, textY + transitionYOffsetForDrawables); + translationLoadingDrawable.setAlpha((int) (0xFF * alpha * translationLoading)); + translationLoadingDrawable.draw(canvas); + canvas.restore(); + invalidate(); + } + } + if (firstVisibleBlockNum >= 0) { int restore = Integer.MIN_VALUE; int oldAlpha = 0; @@ -10816,7 +10996,7 @@ public void drawMessageText(Canvas canvas, ArrayList 0) { replyTextWidth += (int) Math.ceil(replyTextLayout.getLineWidth(0)) + AndroidUtilities.dp(8); @@ -12852,9 +13057,7 @@ private String getAuthorName() { } return ""; } - { - return "DELETED"; - } + return "DELETED"; } private Object getAuthorStatus() { @@ -12862,10 +13065,9 @@ private Object getAuthorStatus() { return null; } if (currentUser != null) { - if (currentUser.emoji_status instanceof TLRPC.TL_emojiStatusUntil && ((TLRPC.TL_emojiStatusUntil) currentUser.emoji_status).until > (int) (System.currentTimeMillis() / 1000)) { - return ((TLRPC.TL_emojiStatusUntil) currentUser.emoji_status).document_id; - } else if (currentUser.emoji_status instanceof TLRPC.TL_emojiStatus) { - return ((TLRPC.TL_emojiStatus) currentUser.emoji_status).document_id; + Long emojiStatusId = UserObject.getEmojiStatusDocumentId(currentUser); + if (emojiStatusId != null) { + return emojiStatusId; } else if (currentUser.premium) { return ContextCompat.getDrawable(ApplicationLoader.applicationContext, R.drawable.msg_premium_liststar).mutate(); } @@ -13212,7 +13414,7 @@ protected void onDraw(Canvas canvas) { } } - if ((drawBackground || transitionParams.animateDrawBackground) && shouldDrawSelectionOverlay() && currentMessagesGroup == null) { + if ((drawBackground || transitionParams.animateDrawBackground) && shouldDrawSelectionOverlay() && currentMessagesGroup == null && hasSelectionOverlay()) { if (selectionOverlayPaint == null) { selectionOverlayPaint = new Paint(Paint.ANTI_ALIAS_FLAG); } @@ -13735,6 +13937,7 @@ public void drawOutboundsContent(Canvas canvas) { if (currentNameStatusDrawable != null && drawNameLayout && nameLayout != null) { int color; + float nameX, nameY; if (currentMessageObject.shouldDrawWithoutBackground()) { color = getThemedColor(Theme.key_chat_stickerNameText); if (currentMessageObject.isOutOwner()) { @@ -13746,9 +13949,9 @@ public void drawOutboundsContent(Canvas canvas) { nameX -= nameOffsetX; } else { if (mediaBackground || currentMessageObject.isOutOwner()) { - nameX = backgroundDrawableLeft + transitionParams.deltaLeft + AndroidUtilities.dp(11) - nameOffsetX + getExtraTextX(); + nameX = backgroundDrawableLeft + transitionParams.deltaLeft + AndroidUtilities.dp(11) + getExtraTextX(); } else { - nameX = backgroundDrawableLeft + transitionParams.deltaLeft + AndroidUtilities.dp(!mediaBackground && drawPinnedBottom ? 11 : 17) - nameOffsetX + getExtraTextX(); + nameX = backgroundDrawableLeft + transitionParams.deltaLeft + AndroidUtilities.dp(!mediaBackground && drawPinnedBottom ? 11 : 17) + getExtraTextX(); } if (currentUser != null) { if (currentBackgroundDrawable != null && currentBackgroundDrawable.hasGradient()) { @@ -13789,7 +13992,7 @@ public void drawOutboundsContent(Canvas canvas) { } currentNameStatusDrawable.setBounds( (int) (Math.abs(nx) + nameLayoutWidth + AndroidUtilities.dp(2)), - (int) nameY + nameLayout.getHeight() / 2 - AndroidUtilities.dp(10), + (int) (nameY + nameLayout.getHeight() / 2 - AndroidUtilities.dp(10)), (int) (Math.abs(nx) + nameLayoutWidth + AndroidUtilities.dp(22)), (int) (nameY + nameLayout.getHeight() / 2 + AndroidUtilities.dp(10)) ); @@ -13823,10 +14026,12 @@ public void drawOutboundsContent(Canvas canvas) { } if (!transitionParams.transitionBotButtons.isEmpty() && transitionParams.animateBotButtonsChanged) { - drawBotButtons(canvas, transitionParams.transitionBotButtons, 1f - transitionParams.animateChangeProgress); + float t = transitionParams.animateChangeProgress; + t = MathUtils.clamp(1f - (float) Math.pow(t, 2f), 0f, 1f); + drawBotButtons(canvas, transitionParams.transitionBotButtons, (int) (0xFF * t)); } if (!botButtons.isEmpty()) { - drawBotButtons(canvas, botButtons, transitionParams.animateBotButtonsChanged ? transitionParams.animateChangeProgress : 1f); + drawBotButtons(canvas, botButtons, transitionParams.animateBotButtonsChanged ? (int) (0xFF * transitionParams.animateChangeProgress) : 0xFF); } drawSideButton(canvas); } @@ -14364,13 +14569,13 @@ public void drawNamesLayout(Canvas canvas, float alpha) { } else { ax = backgroundDrawableLeft + transitionParams.deltaLeft + backgroundDrawableRight + AndroidUtilities.dp(22) + nameWidth - adminLayout.getLineWidth(0); } - } else if (!mediaBackground && currentMessageObject.isOutOwner()) { ax = backgroundDrawableLeft + backgroundDrawableRight - AndroidUtilities.dp(17) - adminLayout.getLineWidth(0); } else { ax = backgroundDrawableLeft + backgroundDrawableRight - AndroidUtilities.dp(11) - adminLayout.getLineWidth(0); } } + ax += transitionParams.deltaRight; canvas.translate(ax, nameY + AndroidUtilities.dp(0.5f)); if (transitionParams.animateSign) { Theme.chat_adminPaint.setAlpha((int) (Color.alpha(color) * transitionParams.animateChangeProgress)); @@ -14821,12 +15026,36 @@ public void drawNamesLayout(Canvas canvas, float alpha) { replyNameLayout.draw(canvas); canvas.restore(); } + int spoilersColor; + if (currentMessageObject != null && currentMessageObject.isOut() && !ChatObject.isChannelAndNotMegaGroup(currentMessageObject.getChatId(), currentAccount)) { + spoilersColor = getThemedColor(Theme.key_chat_outTimeText); + } else { + spoilersColor = Theme.chat_replyTextPaint.getColor(); + } + if (transitionParams.animateReplyTextLayout != null && transitionParams.animateChangeProgress < 1) { + canvas.save(); + canvas.clipRect(replySelectorRect); + + canvas.save(); + canvas.translate(forwardNameX + replyTextOffset - transitionParams.animateReplyTextOffset, replyStartY + Theme.chat_replyNamePaint.getTextSize() + AndroidUtilities.dp(5)); + int wasAlpha2 = Theme.chat_replyTextPaint.getAlpha(); + Theme.chat_replyTextPaint.setAlpha((int) (wasAlpha2 * (1f - transitionParams.animateChangeProgress))); + SpoilerEffect.renderWithRipple(this, invalidateSpoilersParent, spoilersColor, -AndroidUtilities.dp(2), spoilersPatchedReplyTextLayout, transitionParams.animateReplyTextLayout, replySpoilers, canvas, false); + AnimatedEmojiSpan.drawAnimatedEmojis(canvas, transitionParams.animateReplyTextLayout, transitionParams.animateOutAnimateEmojiReply, 0, replySpoilers, 0, 0, 0, alpha, Theme.chat_animatedEmojiTextColorFilter); + Theme.chat_replyTextPaint.setAlpha(wasAlpha2); + canvas.restore(); + } if (replyTextLayout != null) { canvas.save(); canvas.translate(forwardNameX, replyStartY + Theme.chat_replyNamePaint.getTextSize() + AndroidUtilities.dp(5)); - int spoilersColor = currentMessageObject.isOut() && !ChatObject.isChannelAndNotMegaGroup(currentMessageObject.getChatId(), currentAccount) ? getThemedColor(Theme.key_chat_outTimeText) : replyTextLayout.getPaint().getColor(); + int wasAlpha2 = Theme.chat_replyTextPaint.getAlpha(); + Theme.chat_replyTextPaint.setAlpha((int) (wasAlpha2 * (transitionParams.animateReplyTextLayout != null ? transitionParams.animateChangeProgress : 1))); SpoilerEffect.renderWithRipple(this, invalidateSpoilersParent, spoilersColor, -AndroidUtilities.dp(2), spoilersPatchedReplyTextLayout, replyTextLayout, replySpoilers, canvas, false); AnimatedEmojiSpan.drawAnimatedEmojis(canvas, replyTextLayout, animatedEmojiReplyStack, 0, replySpoilers, 0, 0, 0, alpha, Theme.chat_animatedEmojiTextColorFilter); + Theme.chat_replyTextPaint.setAlpha(wasAlpha2); + canvas.restore(); + } + if (transitionParams.animateReplyTextLayout != null && transitionParams.animateChangeProgress < 1) { canvas.restore(); } @@ -14872,17 +15101,25 @@ public float getHighlightAlpha() { } public void setCheckBoxVisible(boolean visible, boolean animated) { - if (visible && checkBox == null) { - checkBox = new CheckBoxBase(this, 21, resourcesProvider); - if (attachedToWindow) { - checkBox.onAttachedToWindow(); + if (visible) { + if (checkBox == null) { + checkBox = new CheckBoxBase(this, 21, resourcesProvider); + if (attachedToWindow) { + checkBox.onAttachedToWindow(); + } + } else { + checkBox.setResourcesProvider(resourcesProvider); } } - if (visible && mediaCheckBox == null && ((currentMessagesGroup != null && currentMessagesGroup.messages.size() > 1) || (groupedMessagesToSet != null && groupedMessagesToSet.messages.size() > 1))) { - mediaCheckBox = new CheckBoxBase(this, 21, resourcesProvider); - mediaCheckBox.setUseDefaultCheck(true); - if (attachedToWindow) { - mediaCheckBox.onAttachedToWindow(); + if (visible && ((currentMessagesGroup != null && currentMessagesGroup.messages.size() > 1) || (groupedMessagesToSet != null && groupedMessagesToSet.messages.size() > 1))) { + if (mediaCheckBox == null) { + mediaCheckBox = new CheckBoxBase(this, 21, resourcesProvider); + mediaCheckBox.setUseDefaultCheck(true); + if (attachedToWindow) { + mediaCheckBox.onAttachedToWindow(); + } + } else { + mediaCheckBox.setResourcesProvider(resourcesProvider); } } if (checkBoxVisible == visible) { @@ -15314,6 +15551,49 @@ private void drawCaptionLayout(Canvas canvas, StaticLayout captionLayout, boolea } } canvas.translate(captionX, captionY); + boolean translating = MessagesController.getInstance(currentAccount).getTranslateController().isTranslating(getMessageObject(), getCurrentMessagesGroup()); + if (true) { + if (translationLoadingFloat == null) { + translationLoadingFloat = new AnimatedFloat(((View) getParent()), 350, CubicBezierInterpolator.EASE_OUT_QUINT); + } + float translationLoading = translationLoadingFloat.set(translating ? 1 : 0); + if (translationLoading > 0) { + if (translationLoadingDrawable == null) { + translationLoadingDrawable = new LoadingDrawable(); + translationLoadingDrawable.setAppearByGradient(true); + if (translationLoadingPath == null) { + translationLoadingPath = new LinkPath(true); + } + translationLoadingDrawable.usePath(translationLoadingPath); + translationLoadingDrawable.setRadiiDp(5); + } + + if (translationLoadingDrawableLayout != captionLayout) { + translationLoadingDrawableLayout = captionLayout; + translationLoadingPath.setCurrentLayout(captionLayout, 0, 0); + captionLayout.getSelectionPath(0, captionLayout.getText().length(), translationLoadingPath); + translationLoadingDrawable.updateBounds(); + } + + if (translating && translationLoadingDrawable.isDisappearing() || translationLoadingDrawable.isDisappeared()) { + translationLoadingDrawable.reset(); + translationLoadingDrawable.resetDisappear(); + } else if (!translating && !translationLoadingDrawable.isDisappearing() && !translationLoadingDrawable.isDisappeared()) { + translationLoadingDrawable.disappear(); + } + + int color = getThemedColor(currentMessageObject != null && currentMessageObject.isOutOwner() ? Theme.key_chat_messageLinkOut : Theme.key_chat_messageLinkIn); + translationLoadingDrawable.setColors( + Theme.multAlpha(color, .05f), + Theme.multAlpha(color, .15f), + Theme.multAlpha(color, .1f), + Theme.multAlpha(color, .3f) + ); + translationLoadingDrawable.setAlpha((int) (0xFF * alpha * translationLoading)); + translationLoadingDrawable.draw(canvas); + invalidate(); + } + } if (links.draw(canvas)) { invalidate(); @@ -17220,15 +17500,15 @@ public void drawOverlays(Canvas canvas) { canvas.scale(scale, scale, radialProgress.getProgressRect().centerX(), radialProgress.getProgressRect().centerY()); restore = true; } - if ((!isRoundVideo || !hasLinkPreview) && (!currentMessageObject.needDrawBluredPreview() || !MediaController.getInstance().isPlayingMessage(currentMessageObject)) && !(currentMessageObject.hasMediaSpoilers() && (!currentMessageObject.isMediaSpoilersRevealed || !currentMessageObject.revealingMediaSpoilers) && SharedConfig.autoplayVideo && currentMessagesGroup == null && (radialProgress.getIcon() == MediaActionDrawable.ICON_PLAY || radialProgress.getIcon() == MediaActionDrawable.ICON_NONE))) { + if ((!isRoundVideo || !hasLinkPreview) && (!currentMessageObject.needDrawBluredPreview() || !MediaController.getInstance().isPlayingMessage(currentMessageObject)) && !(currentMessageObject.hasMediaSpoilers() && (!currentMessageObject.isMediaSpoilersRevealed || !currentMessageObject.revealingMediaSpoilers) && SharedConfig.isAutoplayVideo() && currentMessagesGroup == null && (radialProgress.getIcon() == MediaActionDrawable.ICON_PLAY || radialProgress.getIcon() == MediaActionDrawable.ICON_NONE))) { if (isRoundVideo && !on) { radialProgress.overrideCircleAlpha = .25f + .75f * (1f - getVideoTranscriptionProgress()); } - if ((!SharedConfig.autoplayVideo || currentMessagesGroup != null) && currentMessageObject.hasMediaSpoilers() && !currentMessageObject.isMediaSpoilersRevealed && radialProgress.getIcon() == MediaActionDrawable.ICON_PLAY) { + if ((!SharedConfig.isAutoplayVideo() || currentMessagesGroup != null) && currentMessageObject.hasMediaSpoilers() && !currentMessageObject.isMediaSpoilersRevealed && radialProgress.getIcon() == MediaActionDrawable.ICON_PLAY) { canvas.saveLayerAlpha(radialProgress.getProgressRect(), (int) (mediaSpoilerRevealProgress * 0xFF), Canvas.ALL_SAVE_FLAG); } radialProgress.draw(canvas); - if ((!SharedConfig.autoplayVideo || currentMessagesGroup != null) && currentMessageObject.hasMediaSpoilers() && !currentMessageObject.isMediaSpoilersRevealed && radialProgress.getIcon() == MediaActionDrawable.ICON_PLAY) { + if ((!SharedConfig.isAutoplayVideo() || currentMessagesGroup != null) && currentMessageObject.hasMediaSpoilers() && !currentMessageObject.isMediaSpoilersRevealed && radialProgress.getIcon() == MediaActionDrawable.ICON_PLAY) { canvas.restore(); } if (isRoundVideo && !on) { @@ -17505,9 +17785,6 @@ public void setAnimationRunning(boolean animationRunning, boolean willRemoved) { } else { this.willRemoved = false; } - if (getParent() == null && attachedToWindow) { - onDetachedFromWindow(); - } } @Override @@ -17797,7 +18074,10 @@ public AccessibilityNodeInfo createAccessibilityNodeInfo(int virtualViewId) { if (virtualViewId == HOST_VIEW_ID) { AccessibilityNodeInfo info = AccessibilityNodeInfo.obtain(ChatMessageCell.this); onInitializeAccessibilityNodeInfo(info); - if (accessibilityText == null) { + final boolean unread = currentMessageObject != null && currentMessageObject.isOut() && !currentMessageObject.scheduled && currentMessageObject.isUnread(); + final boolean contentUnread = currentMessageObject != null && currentMessageObject.isContentUnread(); + final long fileSize = currentMessageObject != null ? currentMessageObject.loadedFileSize : 0; + if (accessibilityText == null || accessibilityTextUnread != unread || accessibilityTextContentUnread != contentUnread || accessibilityTextFileSize != fileSize) { SpannableStringBuilder sb = new SpannableStringBuilder(); if (isChat && currentUser != null && !currentMessageObject.isOut()) { sb.append(UserObject.getUserName(currentUser)); @@ -17812,6 +18092,12 @@ public AccessibilityNodeInfo createAccessibilityNodeInfo(int virtualViewId) { } } } + if (documentAttach != null && documentAttachType == DOCUMENT_ATTACH_TYPE_DOCUMENT) { + String fileName = FileLoader.getAttachFileName(documentAttach); + if (fileName.indexOf('.') != -1) { + sb.append(LocaleController.formatString(R.string.AccDescrDocumentType, fileName.substring(fileName.lastIndexOf('.') + 1).toUpperCase(Locale.ROOT))); + } + } if (!TextUtils.isEmpty(currentMessageObject.messageText)) { sb.append(currentMessageObject.messageText); } @@ -17861,15 +18147,6 @@ public AccessibilityNodeInfo createAccessibilityNodeInfo(int virtualViewId) { } sb.append(title); } - if (currentMessageObject.isVoiceTranscriptionOpen()) { - sb.append("\n"); - sb.append(currentMessageObject.getVoiceTranscription()); - } else { - if (MessageObject.getMedia(currentMessageObject.messageOwner) != null && !TextUtils.isEmpty(currentMessageObject.caption)) { - sb.append("\n"); - sb.append(currentMessageObject.caption); - } - } if (documentAttach != null) { if (documentAttachType == DOCUMENT_ATTACH_TYPE_VIDEO) { sb.append(", "); @@ -17880,6 +18157,15 @@ public AccessibilityNodeInfo createAccessibilityNodeInfo(int virtualViewId) { sb.append(AndroidUtilities.formatFileSize(documentAttach.size)); } } + if (currentMessageObject.isVoiceTranscriptionOpen()) { + sb.append("\n"); + sb.append(currentMessageObject.getVoiceTranscription()); + } else { + if (MessageObject.getMedia(currentMessageObject.messageOwner) != null && !TextUtils.isEmpty(currentMessageObject.caption)) { + sb.append("\n"); + sb.append(currentMessageObject.caption); + } + } if (currentMessageObject.isOut()) { if (currentMessageObject.isSent()) { sb.append("\n"); @@ -17895,7 +18181,7 @@ public AccessibilityNodeInfo createAccessibilityNodeInfo(int virtualViewId) { sb.append(LocaleController.getString("AccDescrMsgSending", R.string.AccDescrMsgSending)); final float sendingProgress = radialProgress.getProgress(); if (sendingProgress > 0f) { - sb.append(", ").append(Integer.toString(Math.round(sendingProgress * 100))).append("%"); + sb.append(Integer.toString(Math.round(sendingProgress * 100))).append("%"); } } else if (currentMessageObject.isSendError()) { sb.append("\n"); @@ -17978,6 +18264,9 @@ public void onClick(View view) { sb.setSpan(underlineSpan, start, end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); } accessibilityText = sb; + accessibilityTextUnread = unread; + accessibilityTextContentUnread = contentUnread; + accessibilityTextFileSize = fileSize; } if (Build.VERSION.SDK_INT < Build.VERSION_CODES.N) { @@ -18084,7 +18373,7 @@ public void onClick(View view) { if (commentLayout != null) { info.addChild(ChatMessageCell.this, COMMENT); } - if (drawSideButton == 1) { + if (drawSideButton == 1 || drawSideButton == 2) { info.addChild(ChatMessageCell.this, SHARE); } if (replyNameLayout != null) { @@ -18691,8 +18980,12 @@ public class TransitionParams { private ArrayList animateOutTextBlocks; private ArrayList lastDrawingTextBlocks; + private float animateOutTextXOffset; private AnimatedEmojiSpan.EmojiGroupedSpans animateOutAnimateEmoji; + private StaticLayout animateReplyTextLayout; + private AnimatedEmojiSpan.EmojiGroupedSpans animateOutAnimateEmojiReply; + private boolean animateEditedEnter; private StaticLayout animateEditedLayout; private StaticLayout animateTimeLayout; @@ -18745,6 +19038,7 @@ public class TransitionParams { public float lastDrawingTextX; public float animateFromTextY; + public float lastTextXOffset; public int lastTopOffset; public boolean animateForwardedLayout; @@ -18758,6 +19052,10 @@ public class TransitionParams { int animateForwardNameWidth; int lastForwardNameWidth; boolean animateBotButtonsChanged; + public StaticLayout lastDrawnReplyTextLayout; + + public int lastReplyTextXOffset; + public float animateReplyTextOffset; public void recordDrawingState() { wasDraw = true; @@ -18832,6 +19130,10 @@ public void recordDrawingState() { lastBackgroundLeft = getCurrentBackgroundLeft(); if (currentBackgroundDrawable != null) lastBackgroundRight = getCurrentBackgroundRight(); + lastTextXOffset = currentMessageObject.textXOffset; + + lastDrawnReplyTextLayout = replyTextLayout; + lastReplyTextXOffset = replyTextOffset; reactionsLayoutInBubble.recordDrawingState(); if (replyNameLayout != null) { @@ -18878,6 +19180,7 @@ public boolean animateChange() { if (!sameText) { animateMessageText = true; animateOutTextBlocks = lastDrawingTextBlocks; + animateOutTextXOffset = lastTextXOffset; animateOutAnimateEmoji = AnimatedEmojiSpan.update(AnimatedEmojiDrawable.CACHE_TYPE_MESSAGES, ChatMessageCell.this, animateOutAnimateEmoji, lastDrawingTextBlocks, true); animatedEmojiStack = AnimatedEmojiSpan.update(AnimatedEmojiDrawable.CACHE_TYPE_MESSAGES, ChatMessageCell.this, animatedEmojiStack, currentMessageObject.textLayoutBlocks); changed = true; @@ -18885,6 +19188,12 @@ public boolean animateChange() { animatedEmojiStack = AnimatedEmojiSpan.update(AnimatedEmojiDrawable.CACHE_TYPE_MESSAGES, ChatMessageCell.this, animatedEmojiStack, currentMessageObject.textLayoutBlocks); } } + if (replyTextLayout != lastDrawnReplyTextLayout) { + animateReplyTextLayout = lastDrawnReplyTextLayout; + animateReplyTextOffset = lastReplyTextXOffset; + animateOutAnimateEmojiReply = AnimatedEmojiSpan.update(AnimatedEmojiDrawable.CACHE_TYPE_MESSAGES, ChatMessageCell.this, false, animateOutAnimateEmojiReply, true, lastDrawnReplyTextLayout); + changed = true; + } if (edited && !lastDrawingEdited && timeLayout != null) { String customStr = NaConfig.INSTANCE.getCustomEditedMessage().String(); String editedStr = customStr.equals("") ? LocaleController.getString("EditedMessage", R.string.EditedMessage) : customStr; @@ -18910,6 +19219,7 @@ public boolean animateChange() { shouldAnimateTimeX = true; changed = true; } + accessibilityText = null; } else if (!edited && lastDrawingEdited && timeLayout != null) { animateTimeLayout = lastTimeLayout; animateEditedWidthDiff = timeWidth - lastTimeWidth; @@ -18967,7 +19277,7 @@ public boolean animateChange() { for (int i = 0; i < botButtons.size(); i++) { BotButton button1 = botButtons.get(i); BotButton button2 = lastDrawBotButtons.get(i); - if (button1.x != button2.x || button1.width != button2.width) { + if (button1.x != button2.x || button1.width != button2.width || button1.title != button2.title) { animateBotButtonsChanged = true; break; } @@ -18993,6 +19303,7 @@ public boolean animateChange() { animatePinned = true; changed = true; timeDrawablesIsChanged = true; + accessibilityText = null; } if ((lastRepliesLayout != null || repliesLayout != null) && lastRepliesCount != getRepliesCount()) { @@ -19000,12 +19311,14 @@ public boolean animateChange() { animateReplies = true; changed = true; timeDrawablesIsChanged = true; + accessibilityText = null; } if (lastViewsLayout != null && this.lastViewsCount != getMessageObject().messageOwner.views) { animateViewsLayout = lastViewsLayout; changed = true; timeDrawablesIsChanged = true; + accessibilityText = null; } if (commentLayout != null && lastCommentsCount != getRepliesCount()) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/CheckBoxCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/CheckBoxCell.java index 757e14bd0e..c771bc6884 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/CheckBoxCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/CheckBoxCell.java @@ -257,11 +257,14 @@ public void setTextColor(int color) { } public void setText(CharSequence text, String value, boolean checked, boolean divider) { + setText(text, value, checked, divider, false); + } + public void setText(CharSequence text, String value, boolean checked, boolean divider, boolean animated) { textView.setText(text); if (checkBoxRound != null) { - checkBoxRound.setChecked(checked, false); + checkBoxRound.setChecked(checked, animated); } else { - checkBoxSquare.setChecked(checked, false); + checkBoxSquare.setChecked(checked, animated); } valueTextView.setText(value); needDivider = divider; @@ -359,6 +362,10 @@ public void setCheckBoxColor(String background, String background1, String check } } + public CheckBox2 getCheckBoxRound() { + return checkBoxRound; + } + public void setSquareCheckBoxColor(String uncheckedColor, String checkedColor, String checkColor) { if (checkBoxSquare != null) { checkBoxSquare.setColors(uncheckedColor, checkedColor, checkColor); @@ -385,4 +392,12 @@ private int getThemedColor(String key) { Integer color = resourcesProvider != null ? resourcesProvider.getColor(key) : null; return color != null ? color : Theme.getColor(key); } + + public void setIcon(int icon) { + checkBoxRound.setIcon(icon); + } + + public boolean hasIcon() { + return checkBoxRound.hasIcon(); + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ContextLinkCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ContextLinkCell.java index 17c7232ab5..1f0b744a7d 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ContextLinkCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ContextLinkCell.java @@ -36,10 +36,12 @@ import org.telegram.messenger.ImageLoader; import org.telegram.messenger.ImageLocation; import org.telegram.messenger.ImageReceiver; +import org.telegram.messenger.LiteMode; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MediaController; import org.telegram.messenger.MessageObject; import org.telegram.messenger.R; +import org.telegram.messenger.SharedConfig; import org.telegram.messenger.SvgHelper; import org.telegram.messenger.UserConfig; import org.telegram.messenger.Utilities; @@ -88,6 +90,7 @@ public interface ContextLinkCellDelegate { private boolean needShadow; private boolean canPreviewGif; + private boolean isKeyboard; private boolean isForceGif; @@ -322,6 +325,10 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { width = (int) (w / (h / (float) AndroidUtilities.dp(80))); if (documentAttachType == DOCUMENT_ATTACH_TYPE_GIF) { currentPhotoFilterThumb = currentPhotoFilter = String.format(Locale.US, "%d_%d_b", (int) (width / AndroidUtilities.density), 80); + if (!SharedConfig.isAutoplayGifs() && !isKeyboard) { + currentPhotoFilterThumb += "_firstframe"; + currentPhotoFilter += "_firstframe"; + } } else { currentPhotoFilter = String.format(Locale.US, "%d_%d", (int) (width / AndroidUtilities.density), 80); currentPhotoFilterThumb = currentPhotoFilter + "_b"; @@ -335,13 +342,13 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { if (documentAttach != null) { TLRPC.VideoSize thumb = MessageObject.getDocumentVideoThumb(documentAttach); if (thumb != null) { - linkImageView.setImage(ImageLocation.getForDocument(thumb, documentAttach), "100_100", ImageLocation.getForDocument(currentPhotoObject, documentAttach), currentPhotoFilter, -1, ext, parentObject, 1); + linkImageView.setImage(ImageLocation.getForDocument(thumb, documentAttach), "100_100" + (!SharedConfig.isAutoplayGifs() && !isKeyboard ? "_firstframe" : ""), ImageLocation.getForDocument(currentPhotoObject, documentAttach), currentPhotoFilter, -1, ext, parentObject, 1); } else { ImageLocation location = ImageLocation.getForDocument(documentAttach); if (isForceGif) { location.imageType = FileLoader.IMAGE_TYPE_ANIMATION; } - linkImageView.setImage(location, "100_100", ImageLocation.getForDocument(currentPhotoObject, documentAttach), currentPhotoFilter, documentAttach.size, ext, parentObject, 0); + linkImageView.setImage(location, "100_100" + (!SharedConfig.isAutoplayGifs() && !isKeyboard ? "_firstframe" : ""), ImageLocation.getForDocument(currentPhotoObject, documentAttach), currentPhotoFilter, documentAttach.size, ext, parentObject, 0); } } else if (webFile != null) { linkImageView.setImage(ImageLocation.getForWebFile(webFile), "100_100", ImageLocation.getForPhoto(currentPhotoObject, photoAttach), currentPhotoFilter, -1, ext, parentObject, 1); @@ -373,6 +380,13 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { linkImageView.setImage(ImageLocation.getForPath(urlLocation), currentPhotoFilter, ImageLocation.getForPhoto(currentPhotoObjectThumb, photoAttach), currentPhotoFilterThumb, -1, ext, parentObject, 1); } } + if (SharedConfig.isAutoplayGifs() || isKeyboard) { + linkImageView.setAllowStartAnimation(true); + linkImageView.startAnimation(); + } else { + linkImageView.setAllowStartAnimation(false); + linkImageView.stopAnimation(); + } drawLinkImageView = true; } @@ -586,6 +600,10 @@ public void setCanPreviewGif(boolean value) { canPreviewGif = value; } + public void setIsKeyboard(boolean value) { + isKeyboard = value; + } + public boolean isCanPreviewGif() { return canPreviewGif; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/DialogCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/DialogCell.java index f3b964d37e..5141c5710d 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/DialogCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/DialogCell.java @@ -60,6 +60,7 @@ import org.telegram.messenger.FileLog; import org.telegram.messenger.ImageLocation; import org.telegram.messenger.ImageReceiver; +import org.telegram.messenger.LiteMode; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MediaDataController; import org.telegram.messenger.MessageObject; @@ -99,6 +100,7 @@ import org.telegram.ui.Components.TypefaceSpan; import org.telegram.ui.Components.URLSpanNoUnderline; import org.telegram.ui.Components.URLSpanNoUnderlineBold; +import org.telegram.ui.Components.VectorAvatarThumbDrawable; import org.telegram.ui.Components.spoilers.SpoilerEffect; import org.telegram.ui.DialogsActivity; import org.telegram.ui.RightSlidingDialogContainer; @@ -330,6 +332,7 @@ public static class CustomDialog { private int thumbsCount; private boolean hasVideoThumb; private Paint thumbBackgroundPaint; + private boolean[] thumbImageSeen = new boolean[3]; private ImageReceiver[] thumbImage = new ImageReceiver[3]; private boolean[] drawPlay = new boolean[3]; private boolean[] drawSpoiler = new boolean[3]; @@ -393,6 +396,7 @@ public static class CustomDialog { private int messageTop; private int messageLeft; + private int buttonLeft; private int typingLeft; private StaticLayout messageLayout; private StaticLayout typingLayout; @@ -511,13 +515,14 @@ public DialogCell(DialogsActivity fragment, Context context, boolean needCheck, for (int i = 0; i < thumbImage.length; ++i) { thumbImage[i] = new ImageReceiver(this); thumbImage[i].setRoundRadius(AndroidUtilities.dp(2)); + thumbImage[i].setAllowLoadingOnAttachedOnly(true); } useForceThreeLines = forceThreeLines; currentAccount = account; - emojiStatus = new AnimatedEmojiDrawable.SwapAnimatedEmojiDrawable(this, AndroidUtilities.dp(22)); emojiStatus.center = false; + avatarImage.setAllowLoadingOnAttachedOnly(true); } public void setDialog(TLRPC.Dialog dialog, int type, int folder) { @@ -691,6 +696,9 @@ protected void onAttachedToWindow() { animatedEmojiStack2 = AnimatedEmojiSpan.update(AnimatedEmojiDrawable.CACHE_TYPE_MESSAGES, this, animatedEmojiStack2, messageNameLayout); animatedEmojiStack3 = AnimatedEmojiSpan.update(AnimatedEmojiDrawable.CACHE_TYPE_MESSAGES, this, animatedEmojiStack3, buttonLayout); animatedEmojiStackName = AnimatedEmojiSpan.update(AnimatedEmojiDrawable.CACHE_TYPE_MESSAGES, this, animatedEmojiStackName, nameLayout); + if (emojiStatus != null) { + emojiStatus.attach(); + } } public void resetPinnedArchiveState() { @@ -704,7 +712,7 @@ public void resetPinnedArchiveState() { cornerProgress = 0.0f; setTranslationX(0); setTranslationY(0); - if (emojiStatus != null) { + if (emojiStatus != null && attachedToWindow) { emojiStatus.attach(); } } @@ -936,6 +944,9 @@ public void buildLayout() { } } + if (message != null) { + message.updateTranslation(); + } CharSequence msgText = message != null ? message.messageText : null; if (msgText instanceof Spannable) { Spannable sp = new SpannableStringBuilder(msgText); @@ -1104,12 +1115,10 @@ public void buildLayout() { } drawPremium = MessagesController.getInstance(currentAccount).isPremiumUser(user) && UserConfig.getInstance(currentAccount).clientUserId != user.id && user.id != 0; if (drawPremium) { - if (user.emoji_status instanceof TLRPC.TL_emojiStatusUntil && ((TLRPC.TL_emojiStatusUntil) user.emoji_status).until > (int) (System.currentTimeMillis() / 1000)) { + Long emojiStatusId = UserObject.getEmojiStatusDocumentId(user); + if (emojiStatusId != null) { nameLayoutEllipsizeByGradient = true; - emojiStatus.set(((TLRPC.TL_emojiStatusUntil) user.emoji_status).document_id, false); - } else if (user.emoji_status instanceof TLRPC.TL_emojiStatus) { - nameLayoutEllipsizeByGradient = true; - emojiStatus.set(((TLRPC.TL_emojiStatus) user.emoji_status).document_id, false); + emojiStatus.set(emojiStatusId, false); } else { nameLayoutEllipsizeByGradient = true; emojiStatus.set(PremiumGradient.getInstance().premiumStarDrawableMini, false); @@ -1231,7 +1240,7 @@ public void buildLayout() { } } } else { - if (dialogsType == 3 && UserObject.isUserSelf(user)) { + if (dialogsType == DialogsActivity.DIALOGS_TYPE_FORWARD && UserObject.isUserSelf(user)) { messageString = LocaleController.getString("SavedMessagesInfo", R.string.SavedMessagesInfo); showChecks = false; drawTime = false; @@ -1273,7 +1282,7 @@ public void buildLayout() { } if (lastMessageIsReaction) { - } else if (dialogsType == 2) { + } else if (dialogsType == DialogsActivity.DIALOGS_TYPE_ADD_USERS_TO) { if (chat != null) { if (ChatObject.isChannel(chat) && !chat.megagroup) { if (chat.participants_count != 0) { @@ -1304,7 +1313,7 @@ public void buildLayout() { drawCount2 = false; showChecks = false; drawTime = false; - } else if (dialogsType == 3 && UserObject.isUserSelf(user)) { + } else if (dialogsType == DialogsActivity.DIALOGS_TYPE_FORWARD && UserObject.isUserSelf(user)) { messageString = LocaleController.getString("SavedMessagesInfo", R.string.SavedMessagesInfo); showChecks = false; drawTime = false; @@ -1605,7 +1614,7 @@ public void buildLayout() { promoDialog = false; MessagesController messagesController = MessagesController.getInstance(currentAccount); - if (dialogsType == 0 && messagesController.isPromoDialog(currentDialogId, true)) { + if (dialogsType == DialogsActivity.DIALOGS_TYPE_DEFAULT && messagesController.isPromoDialog(currentDialogId, true)) { drawPinBackground = true; promoDialog = true; if (messagesController.promoDialogType == MessagesController.PROMO_TYPE_PROXY) { @@ -1651,7 +1660,7 @@ public void buildLayout() { if (useMeForMyMessages) { nameString = LocaleController.getString("FromYou", R.string.FromYou); } else { - if (dialogsType == 3) { + if (dialogsType == DialogsActivity.DIALOGS_TYPE_FORWARD) { drawPinBackground = true; } nameString = LocaleController.getString("SavedMessages", R.string.SavedMessages); @@ -1821,11 +1830,11 @@ public void buildLayout() { messageWidth = getMeasuredWidth() - AndroidUtilities.dp(messagePaddingStart + 21); if (LocaleController.isRTL) { - typingLeft = messageLeft = messageNameLeft = AndroidUtilities.dp(16); + buttonLeft = typingLeft = messageLeft = messageNameLeft = AndroidUtilities.dp(16); avatarLeft = getMeasuredWidth() - AndroidUtilities.dp(66); thumbLeft = avatarLeft - AndroidUtilities.dp(13 + 18); } else { - typingLeft = messageLeft = messageNameLeft = AndroidUtilities.dp(messagePaddingStart + 6); + buttonLeft = typingLeft = messageLeft = messageNameLeft = AndroidUtilities.dp(messagePaddingStart + 6); avatarLeft = AndroidUtilities.dp(10); thumbLeft = avatarLeft + AndroidUtilities.dp(56 + 13); } @@ -1844,11 +1853,11 @@ public void buildLayout() { messageWidth = getMeasuredWidth() - AndroidUtilities.dp(messagePaddingStart + 23 - (LocaleController.isRTL ? 0 : 12)); if (LocaleController.isRTL) { - typingLeft = messageLeft = messageNameLeft = AndroidUtilities.dp(22); + buttonLeft = typingLeft = messageLeft = messageNameLeft = AndroidUtilities.dp(22); avatarLeft = getMeasuredWidth() - AndroidUtilities.dp(64); thumbLeft = avatarLeft - AndroidUtilities.dp(11 + (thumbsCount * (thumbSize + 2) - 2)); } else { - typingLeft = messageLeft = messageNameLeft = AndroidUtilities.dp(messagePaddingStart + 4); + buttonLeft = typingLeft = messageLeft = messageNameLeft = AndroidUtilities.dp(messagePaddingStart + 4); avatarLeft = AndroidUtilities.dp(10); thumbLeft = avatarLeft + AndroidUtilities.dp(56 + 11); } @@ -2075,7 +2084,7 @@ public void buildLayout() { } else { if (thumbsCount > 0) { messageWidth += AndroidUtilities.dp((thumbsCount * (thumbSize + 2) - 2) + 5); - if (LocaleController.isRTL) { + if (LocaleController.isRTL && !isForumCell()) { messageLeft -= AndroidUtilities.dp((thumbsCount * (thumbSize + 2) - 2) + 5); } } @@ -2123,6 +2132,8 @@ public void buildLayout() { nameMuteLeft = (int) (nameLeft + (nameWidth - widthpx - left) - AndroidUtilities.dp(24)); } else if (drawScam != 0) { nameMuteLeft = (int) (nameLeft + (nameWidth - widthpx) - AndroidUtilities.dp(6) - (drawScam == 1 ? Theme.dialogs_scamDrawable : Theme.dialogs_fakeDrawable).getIntrinsicWidth()); + } else { + nameMuteLeft = (int) (nameLeft + (nameWidth - widthpx) - AndroidUtilities.dp(6) - Theme.dialogs_muteDrawable.getIntrinsicWidth()); } if (left == 0) { if (widthpx < nameWidth) { @@ -2194,7 +2205,7 @@ public void buildLayout() { nameLeft -= (nameWidth - widthpx); } } - if (dialogMuted || drawUnmute || drawVerified || drawPremium || drawScam != 0) { + if ((dialogMuted || true) || drawUnmute || drawVerified || drawPremium || drawScam != 0) { nameMuteLeft = (int) (nameLeft + left + AndroidUtilities.dp(6)); } } @@ -2208,6 +2219,16 @@ public void buildLayout() { messageLeft -= left; } } + if (buttonLayout != null) { + int lineCount = buttonLayout.getLineCount(); + if (lineCount > 0) { + left = Integer.MAX_VALUE; + for (int a = 0; a < lineCount; a++) { + left = Math.min(left, buttonLayout.getLineLeft(a)); + } + buttonLeft -= left; + } + } if (typingLayout != null) { int lineCount = typingLayout.getLineCount(); if (lineCount > 0) { @@ -2243,6 +2264,7 @@ public void buildLayout() { private void updateThumbsPosition() { if (thumbsCount > 0) { StaticLayout layout = isForumCell() ? buttonLayout : messageLayout; + int left = isForumCell() ? buttonLeft : messageLeft; if (layout == null) { return; } @@ -2263,7 +2285,12 @@ private void updateThumbsPosition() { offset += AndroidUtilities.dp(3); } for (int i = 0; i < thumbsCount; ++i) { - thumbImage[i].setImageX(messageLeft + offset + AndroidUtilities.dp((thumbSize + 2) * i)); + thumbImage[i].setImageX(left + offset + AndroidUtilities.dp((thumbSize + 2) * i)); + thumbImageSeen[i] = true; + } + } else { + for (int i = 0; i < 3; ++i) { + thumbImageSeen[i] = false; } } } @@ -2521,6 +2548,7 @@ public boolean update(int mask) { public boolean update(int mask, boolean animated) { boolean requestLayout = false; + boolean rebuildLayout = false; boolean oldIsForumCell = isForumCell(); drawAvatarSelector = false; ttlPeriod = 0; @@ -2610,7 +2638,7 @@ public boolean update(int mask, boolean animated) { mentionCount = forumTopic.unread_mentions_count; reactionMentionCount = forumTopic.unread_reactions_count; } - if (dialogsType == 2) { + if (dialogsType == DialogsActivity.DIALOGS_TYPE_ADD_USERS_TO) { drawPin = false; } @@ -2622,12 +2650,10 @@ public boolean update(int mask, boolean animated) { } if (user != null && (mask & MessagesController.UPDATE_MASK_EMOJI_STATUS) != 0) { user = MessagesController.getInstance(currentAccount).getUser(user.id); - if (user.emoji_status instanceof TLRPC.TL_emojiStatusUntil && ((TLRPC.TL_emojiStatusUntil) user.emoji_status).until > (int) (System.currentTimeMillis() / 1000)) { + Long emojiStatusId = UserObject.getEmojiStatusDocumentId(user); + if (emojiStatusId != null) { nameLayoutEllipsizeByGradient = true; - emojiStatus.set(((TLRPC.TL_emojiStatusUntil) user.emoji_status).document_id, animated); - } else if (user.emoji_status instanceof TLRPC.TL_emojiStatus) { - nameLayoutEllipsizeByGradient = true; - emojiStatus.set(((TLRPC.TL_emojiStatus) user.emoji_status).document_id, animated); + emojiStatus.set(emojiStatusId, animated); } else { nameLayoutEllipsizeByGradient = true; emojiStatus.set(PremiumGradient.getInstance().premiumStarDrawableMini, animated); @@ -2754,7 +2780,6 @@ public boolean update(int mask, boolean animated) { dialogMuted = isDialogCell && MessagesController.getInstance(currentAccount).isDialogMuted(currentDialogId, getTopicId()); } - dialogId = currentDialogId; } @@ -2797,7 +2822,7 @@ public boolean update(int mask, boolean animated) { avatarDrawable.setAvatarType(AvatarDrawable.AVATAR_TYPE_SAVED); avatarImage.setImage(null, null, avatarDrawable, null, user, 0); } else { - avatarImage.setForUserOrChat(user, avatarDrawable, null, true); + avatarImage.setForUserOrChat(user, avatarDrawable, null, true, VectorAvatarThumbDrawable.TYPE_SMALL); } } else if (chat != null) { avatarDrawable.setInfo(chat); @@ -2896,7 +2921,7 @@ public void onAnimationEnd(Animator animation) { avatarImage.setRoundRadius(chat != null && chat.forum && currentDialogFolderId == 0 && !useFromUserAsAvatar ? AndroidUtilities.dp(16) : AndroidUtilities.dp(28)); } if (!isTopic && (getMeasuredWidth() != 0 || getMeasuredHeight() != 0)) { - buildLayout(); + rebuildLayout = true; } else { //requestLayout(); } @@ -2912,6 +2937,9 @@ public void onAnimationEnd(Animator animation) { if (isForumCell() != oldIsForumCell) { requestLayout = true; } + if (rebuildLayout) { + buildLayout(); + } return requestLayout; } @@ -3429,14 +3457,14 @@ protected void onDraw(Canvas canvas) { if (!buttonCreated) { canvasButton.rewind(); if (topMessageTopicEndIndex != topMessageTopicStartIndex && topMessageTopicEndIndex > 0) { - AndroidUtilities.rectTmp.set(messageLeft + AndroidUtilities.dp(2), messageTop, messageLeft + messageLayout.getPrimaryHorizontal(Math.min(messageLayout.getText().length(), topMessageTopicEndIndex)) - AndroidUtilities.dp(3), buttonTop - AndroidUtilities.dp(4)); + AndroidUtilities.rectTmp.set(messageLeft + AndroidUtilities.dp(2) + messageLayout.getPrimaryHorizontal(0), messageTop, messageLeft + messageLayout.getPrimaryHorizontal(Math.min(messageLayout.getText().length(), topMessageTopicEndIndex)) - AndroidUtilities.dp(3), buttonTop - AndroidUtilities.dp(4)); AndroidUtilities.rectTmp.inset(-AndroidUtilities.dp(8), -AndroidUtilities.dp(4)); if (AndroidUtilities.rectTmp.right > AndroidUtilities.rectTmp.left) { canvasButton.addRect(AndroidUtilities.rectTmp); } } - AndroidUtilities.rectTmp.set(messageLeft + AndroidUtilities.dp(2), buttonTop + AndroidUtilities.dp(2), messageLeft + buttonLayout.getLineWidth(0) + AndroidUtilities.dp(12), buttonTop + buttonLayout.getHeight()); + AndroidUtilities.rectTmp.set(buttonLeft + AndroidUtilities.dp(2), buttonTop + AndroidUtilities.dp(2), buttonLeft + buttonLayout.getLineWidth(0) + AndroidUtilities.dp(12), buttonTop + buttonLayout.getHeight()); AndroidUtilities.rectTmp.inset(-AndroidUtilities.dp(8), -AndroidUtilities.dp(3)); canvasButton.addRect(AndroidUtilities.rectTmp); } @@ -3448,7 +3476,7 @@ protected void onDraw(Canvas canvas) { } - canvas.translate(messageLeft - buttonLayout.getLineLeft(0), buttonTop); + canvas.translate(buttonLeft, buttonTop); if (!spoilers2.isEmpty()) { try { canvas.save(); @@ -3648,6 +3676,9 @@ protected void onDraw(Canvas canvas) { canvas.translate(0, top); } for (int i = 0; i < thumbsCount; ++i) { + if (!thumbImageSeen[i]) { + continue; + } if (thumbBackgroundPaint == null) { thumbBackgroundPaint = new Paint(Paint.ANTI_ALIAS_FLAG); thumbBackgroundPaint.setShadowLayer(AndroidUtilities.dp(1.34f), 0, AndroidUtilities.dp(0.34f), 0x18000000); @@ -3744,6 +3775,9 @@ protected void onDraw(Canvas canvas) { timerPaint.setShader(null); if (avatarImage.getBitmap() != null && !avatarImage.getBitmap().isRecycled()) { timerPaint.setColor(PremiumLockIconView.getDominantColor(avatarImage.getBitmap())); + } else if (avatarImage.getDrawable() instanceof VectorAvatarThumbDrawable){ + VectorAvatarThumbDrawable vectorAvatarThumbDrawable = (VectorAvatarThumbDrawable) avatarImage.getDrawable(); + timerPaint.setColor(vectorAvatarThumbDrawable.gradientTools.getAverageColor()); } else { timerPaint.setColor(avatarDrawable.getColor2()); } @@ -3820,7 +3854,7 @@ protected void onDraw(Canvas canvas) { float size1; float size2; - if (SharedConfig.getLiteMode().enabled()) { + if (!LiteMode.isEnabled(LiteMode.FLAGS_CHAT)) { innerProgress = 0.65f; } if (progressStage == 0) { @@ -3865,7 +3899,7 @@ protected void onDraw(Canvas canvas) { canvas.restore(); } - if (!SharedConfig.getLiteMode().enabled()) { + if (LiteMode.isEnabled(LiteMode.FLAGS_CHAT)) { innerProgress += 16f / 400.0f; if (innerProgress >= 1.0f) { innerProgress = 0.0f; @@ -4388,6 +4422,14 @@ public void onPopulateAccessibilityEvent(AccessibilityEvent event) { sb.append(LocaleController.getString("AccDescrVerified", R.string.AccDescrVerified)); sb.append(". "); } + if (dialogMuted) { + sb.append(LocaleController.getString("AccDescrNotificationsMuted", R.string.AccDescrNotificationsMuted)); + sb.append(". "); + } + if (isOnline()) { + sb.append(LocaleController.getString("AccDescrUserOnline", R.string.AccDescrUserOnline)); + sb.append(". "); + } if (unreadCount > 0) { sb.append(LocaleController.formatPluralString("NewMessages", unreadCount)); sb.append(". "); @@ -4401,7 +4443,8 @@ public void onPopulateAccessibilityEvent(AccessibilityEvent event) { sb.append(". "); } if (message == null || currentDialogFolderId != 0) { - event.setContentDescription(sb.toString()); + event.setContentDescription(sb); + setContentDescription(sb); return; } int lastDate = lastMessageDate; @@ -4448,6 +4491,8 @@ public void onPopulateAccessibilityEvent(AccessibilityEvent event) { sb.append(messageString); } } + event.setContentDescription(sb); + setContentDescription(sb); } private MessageObject getCaptionMessage() { @@ -4594,6 +4639,9 @@ public SpannableStringBuilder getMessageStringFormatted(String messageFormat, St applyName = false; stringBuilder = SpannableStringBuilder.valueOf(mess); } + if (applyThumbs) { + applyThumbs(stringBuilder); + } } else if (captionMessage != null && captionMessage.caption != null) { MessageObject message = captionMessage; CharSequence mess = xyz.nextalone.nagram.helper.MessageHelper.INSTANCE.zalgoFilter(message.caption.toString()); @@ -4808,6 +4856,10 @@ private class DialogUpdateHelper { public boolean update() { TLRPC.Dialog dialog = MessagesController.getInstance(currentAccount).dialogs_dict.get(currentDialogId); if (dialog == null) { + if (dialogsType == DialogsActivity.DIALOGS_TYPE_FORWARD && lastDrawnDialogId != currentDialogId) { + lastDrawnDialogId = currentDialogId; + return true; + } return false; } int messageHash = message == null ? 0 : message.getId(); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/DialogsHintCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/DialogsHintCell.java new file mode 100644 index 0000000000..97b6678e88 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/DialogsHintCell.java @@ -0,0 +1,104 @@ +package org.telegram.ui.Cells; + +import static org.telegram.messenger.AndroidUtilities.dp; + +import android.content.Context; +import android.graphics.Canvas; +import android.graphics.PorterDuff; +import android.text.TextUtils; +import android.util.TypedValue; +import android.view.Gravity; +import android.view.View; +import android.widget.FrameLayout; +import android.widget.ImageView; +import android.widget.LinearLayout; +import android.widget.TextView; + +import androidx.annotation.NonNull; + +import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.LocaleController; +import org.telegram.messenger.R; +import org.telegram.ui.ActionBar.Theme; +import org.telegram.ui.Components.LayoutHelper; + +public class DialogsHintCell extends FrameLayout { + private LinearLayout contentView; + private TextView titleView; + private TextView messageView; + private ImageView chevronView; + + public DialogsHintCell(@NonNull Context context) { + super(context); + + setWillNotDraw(false); + setPadding(dp(16), dp(8), dp(16), dp(8)); + + contentView = new LinearLayout(context); + contentView.setOrientation(LinearLayout.VERTICAL); + contentView.setPadding(LocaleController.isRTL ? dp(24) : 0, 0, LocaleController.isRTL ? 0 : dp(24), 0); + addView(contentView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); + + titleView = new TextView(context); + titleView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 15); + titleView.setTypeface(AndroidUtilities.getTypeface(AndroidUtilities.TYPEFACE_ROBOTO_MEDIUM)); + titleView.setSingleLine(); + contentView.addView(titleView, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, 0, Gravity.TOP)); + + messageView = new TextView(context); + messageView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + messageView.setMaxLines(2); + messageView.setEllipsize(TextUtils.TruncateAt.END); + contentView.addView(messageView, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, 0, Gravity.TOP)); + + chevronView = new ImageView(context); + chevronView.setImageResource(R.drawable.arrow_newchat); + chevronView.setColorFilter(Theme.getColor(Theme.key_windowBackgroundWhiteGrayText), PorterDuff.Mode.SRC_IN); + addView(chevronView, LayoutHelper.createFrame(16, 16, (LocaleController.isRTL ? Gravity.LEFT : Gravity.RIGHT) | Gravity.CENTER_VERTICAL)); + + updateColors(); + } + + public void updateColors() { + titleView.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteBlackText)); + messageView.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteGrayText)); + setBackground(Theme.AdaptiveRipple.filledRect()); + } + + public void setText(CharSequence title, CharSequence subtitle) { + titleView.setText(title); + messageView.setText(subtitle); + } + + @Override + protected void onDraw(Canvas canvas) { + super.onDraw(canvas); + canvas.drawLine(0, getHeight() - 1, getWidth(), getHeight() - 1, Theme.dividerPaint); + } + + private int height; + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + int width = MeasureSpec.getSize(widthMeasureSpec); + if (width <= 0) { + width = AndroidUtilities.displaySize.x; + } + contentView.measure( + MeasureSpec.makeMeasureSpec(width - getPaddingLeft() - getPaddingRight(), MeasureSpec.EXACTLY), + MeasureSpec.makeMeasureSpec(AndroidUtilities.displaySize.y, MeasureSpec.AT_MOST) + ); + this.height = contentView.getMeasuredHeight() + getPaddingTop() + getPaddingBottom() + 1; + super.onMeasure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(height, MeasureSpec.EXACTLY)); + } + + public int height() { + if (getVisibility() != View.VISIBLE) { + return 0; + } + if (height <= 0) { + height = dp(72) + 1; + } + return height; + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/DialogsRequestedEmptyCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/DialogsRequestedEmptyCell.java new file mode 100644 index 0000000000..9db08bff42 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/DialogsRequestedEmptyCell.java @@ -0,0 +1,177 @@ +package org.telegram.ui.Cells; + +import static org.telegram.messenger.AndroidUtilities.dp; + +import android.content.Context; +import android.graphics.Canvas; +import android.graphics.Paint; +import android.graphics.Path; +import android.util.TypedValue; +import android.view.Gravity; +import android.view.View; +import android.widget.LinearLayout; +import android.widget.TextView; + +import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.DocumentObject; +import org.telegram.messenger.ImageLocation; +import org.telegram.messenger.LocaleController; +import org.telegram.messenger.MediaDataController; +import org.telegram.messenger.NotificationCenter; +import org.telegram.messenger.R; +import org.telegram.messenger.SvgHelper; +import org.telegram.messenger.UserConfig; +import org.telegram.tgnet.TLRPC; +import org.telegram.ui.ActionBar.Theme; +import org.telegram.ui.Components.BackupImageView; +import org.telegram.ui.Components.LayoutHelper; +import org.telegram.ui.Components.StickerEmptyView; + +public class DialogsRequestedEmptyCell extends LinearLayout implements NotificationCenter.NotificationCenterDelegate { + + int currentAccount = UserConfig.selectedAccount; + BackupImageView stickerView; + TextView titleView; + TextView subtitleView; + TextView buttonView; + + public DialogsRequestedEmptyCell(Context context) { + super(context); + + setOrientation(VERTICAL); + setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundGray)); + + LinearLayout linearLayout = new LinearLayout(context) { + Path path = new Path(); + Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG); + { + paint.setColor(Theme.getColor(Theme.key_windowBackgroundWhite)); + paint.setShadowLayer(dp(1.33f), 0, dp(.33f), 0x1e000000); + } + @Override + protected void onDraw(Canvas canvas) { + canvas.drawPath(path, paint); + super.onDraw(canvas); + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + path.rewind(); + AndroidUtilities.rectTmp.set(AndroidUtilities.dp(12), AndroidUtilities.dp(6), getMeasuredWidth() - AndroidUtilities.dp(12), getMeasuredHeight() - AndroidUtilities.dp(12)); + path.addRoundRect(AndroidUtilities.rectTmp, AndroidUtilities.dp(10), AndroidUtilities.dp(10), Path.Direction.CW); + } + }; + linearLayout.setWillNotDraw(false); + linearLayout.setOrientation(VERTICAL); + linearLayout.setPadding(AndroidUtilities.dp(12 + 20), AndroidUtilities.dp(6 + 10), AndroidUtilities.dp(12 + 20), AndroidUtilities.dp(12 +20)); + + stickerView = new BackupImageView(context); + stickerView.setOnClickListener(e -> { + stickerView.getImageReceiver().startAnimation(); + }); + updateSticker(); + linearLayout.addView(stickerView, LayoutHelper.createLinear(130, 130, Gravity.CENTER_HORIZONTAL | Gravity.TOP)); + + titleView = new TextView(context); + titleView.setGravity(Gravity.CENTER); + titleView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 18); + titleView.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteBlackText)); + titleView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + linearLayout.addView(titleView, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_HORIZONTAL | Gravity.TOP, 0, 6, 0, 0)); + + subtitleView = new TextView(context); + subtitleView.setGravity(Gravity.CENTER); + subtitleView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + subtitleView.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteGrayText)); + linearLayout.addView(subtitleView, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_HORIZONTAL | Gravity.TOP, 0, 7, 0, 0)); + + buttonView = new TextView(context); + buttonView.setGravity(Gravity.CENTER); + buttonView.setBackground(Theme.AdaptiveRipple.filledRect(Theme.key_featuredStickers_addButton, 8)); + buttonView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + buttonView.setTextColor(Theme.getColor(Theme.key_featuredStickers_buttonText)); + buttonView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + buttonView.setPadding(AndroidUtilities.dp(14), AndroidUtilities.dp(14), AndroidUtilities.dp(14), AndroidUtilities.dp(14)); + buttonView.setOnClickListener(e -> { + onButtonClick(); + }); + linearLayout.addView(buttonView, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_HORIZONTAL | Gravity.TOP, 0, 18, 0, 0)); + + addView(linearLayout, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT)); + + set(null); + } + + protected void onButtonClick() { + + } + + public void set(TLRPC.RequestPeerType requestPeerType) { + if (requestPeerType instanceof TLRPC.TL_requestPeerTypeBroadcast) { + titleView.setText(LocaleController.getString("NoSuchChannels", R.string.NoSuchChannels)); + subtitleView.setText(LocaleController.getString("NoSuchChannelsInfo", R.string.NoSuchChannelsInfo)); + buttonView.setVisibility(View.VISIBLE); + buttonView.setText(LocaleController.getString("CreateChannelForThis", R.string.CreateChannelForThis)); + } else if (requestPeerType instanceof TLRPC.TL_requestPeerTypeChat) { + titleView.setText(LocaleController.getString("NoSuchGroups", R.string.NoSuchGroups)); + subtitleView.setText(LocaleController.getString("NoSuchGroupsInfo", R.string.NoSuchGroupsInfo)); + buttonView.setVisibility(View.VISIBLE); + buttonView.setText(LocaleController.getString("CreateGroupForThis", R.string.CreateGroupForThis)); + } else { + titleView.setText(LocaleController.getString("NoSuchUsers", R.string.NoSuchUsers)); + subtitleView.setText(LocaleController.getString("NoSuchUsersInfo", R.string.NoSuchUsersInfo)); + buttonView.setVisibility(View.GONE); + } + } + + @Override + protected void onAttachedToWindow() { + super.onAttachedToWindow(); + NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.diceStickersDidLoad); + } + + @Override + protected void onDetachedFromWindow() { + super.onDetachedFromWindow(); + NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.diceStickersDidLoad); + } + + @Override + public void didReceivedNotification(int id, int account, Object... args) { + if (id == NotificationCenter.diceStickersDidLoad) { + String name = (String) args[0]; + if (AndroidUtilities.STICKERS_PLACEHOLDER_PACK_NAME.equals(name) && getVisibility() == VISIBLE) { + updateSticker(); + } + } + } + + private void updateSticker() { + final int stickerType = StickerEmptyView.STICKER_TYPE_SEARCH; + + TLRPC.Document document = null; + TLRPC.TL_messages_stickerSet set = MediaDataController.getInstance(currentAccount).getStickerSetByName(AndroidUtilities.STICKERS_PLACEHOLDER_PACK_NAME); + if (set == null) { + set = MediaDataController.getInstance(currentAccount).getStickerSetByEmojiOrName(AndroidUtilities.STICKERS_PLACEHOLDER_PACK_NAME); + } + if (set != null && stickerType >= 0 && stickerType < set.documents.size() ) { + document = set.documents.get(stickerType); + } + + if (document != null) { + SvgHelper.SvgDrawable svgThumb = DocumentObject.getSvgThumb(document.thumbs, Theme.key_windowBackgroundGray, 0.2f); + if (svgThumb != null) { + svgThumb.overrideWidthAndHeight(512, 512); + } + + ImageLocation imageLocation = ImageLocation.getForDocument(document); + stickerView.setImage(imageLocation, "130_130", "tgs", svgThumb, set); + stickerView.getImageReceiver().setAutoRepeat(2); + } else { + MediaDataController.getInstance(currentAccount).loadStickersByEmojiOrName(AndroidUtilities.STICKERS_PLACEHOLDER_PACK_NAME, false, set == null); + stickerView.getImageReceiver().clearImage(); + } + } + +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/DrawerActionCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/DrawerActionCell.java index 6c4d0795ac..e76f511a97 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/DrawerActionCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/DrawerActionCell.java @@ -13,12 +13,8 @@ import android.graphics.PorterDuff; import android.graphics.PorterDuffColorFilter; import android.graphics.RectF; -import android.graphics.drawable.Drawable; -import android.util.TypedValue; import android.view.Gravity; -import android.view.accessibility.AccessibilityEvent; import android.view.accessibility.AccessibilityNodeInfo; -import android.widget.Button; import android.widget.FrameLayout; import android.widget.ImageView; import android.widget.TextView; @@ -48,19 +44,24 @@ public DrawerActionCell(Context context) { super(context); imageView = new ImageView(context); + imageView.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_chats_menuItemIcon), PorterDuff.Mode.SRC_IN)); addView(imageView, LayoutHelper.createFrame(24, 24, Gravity.LEFT | Gravity.TOP, 19, 12, 0, 0)); +// addView(imageView, LayoutHelper.createFrame(24, 24, (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.TOP, LocaleController.isRTL ? 0 : 19, 12, LocaleController.isRTL ? 19 : 0, 0)); lottieImageView = new RLottieImageView(context); lottieImageView.setAutoRepeat(false); lottieImageView.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_chats_menuItemIcon), PorterDuff.Mode.SRC_IN)); addView(lottieImageView, LayoutHelper.createFrame(28, 28, Gravity.LEFT | Gravity.TOP, 17, 10, 0, 0)); +// addView(lottieImageView, LayoutHelper.createFrame(28, 28, (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.TOP, LocaleController.isRTL ? 0 : 17, 10, LocaleController.isRTL ? 17 : 0, 0)); textView = new AnimatedTextView(context, true, true, true); textView.setAnimationProperties(.6f, 0, 350, CubicBezierInterpolator.EASE_OUT_QUINT); textView.setTextColor(Theme.getColor(Theme.key_chats_menuItemText)); textView.setTextSize(AndroidUtilities.dp(15)); textView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + textView.setIgnoreRTL(true); addView(textView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.LEFT | Gravity.TOP, 19 + 24 + 29, 0, 16, 0)); +// addView(textView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.TOP, LocaleController.isRTL ? 16 : 62, 0, LocaleController.isRTL ? 62 : 16, 0)); setWillNotDraw(false); } @@ -108,9 +109,7 @@ public void setTextAndIcon(int id, String text, int resId, int lottieId) { imageView.setImageDrawable(null); lottieImageView.setAnimation(currentLottieId = lottieId, 28, 28); } else { - Drawable drawable = getResources().getDrawable(resId).mutate(); - drawable.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_chats_menuItemIcon), PorterDuff.Mode.SRC_IN)); - imageView.setImageDrawable(drawable); + imageView.setImageResource(resId); lottieImageView.clearAnimationDrawable(); currentLottieId = 0; } @@ -140,6 +139,7 @@ public void updateIcon(int lottieId) { @Override public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) { super.onInitializeAccessibilityNodeInfo(info); + info.setClassName("android.widget.Button"); info.addAction(AccessibilityNodeInfo.ACTION_CLICK); info.addAction(AccessibilityNodeInfo.ACTION_LONG_CLICK); info.setText(textView.getText()); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/DrawerProfileCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/DrawerProfileCell.java index df08337e03..f2a5bf10f1 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/DrawerProfileCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/DrawerProfileCell.java @@ -118,7 +118,7 @@ public class DrawerProfileCell extends FrameLayout implements NotificationCenter private float stateX, stateY; StarParticlesView.Drawable starParticlesDrawable; - PremiumGradient.GradientTools gradientTools; + PremiumGradient.PremiumGradientTools gradientTools; private Bitmap lastBitmap; private TLRPC.User user; @@ -301,13 +301,15 @@ public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) { sunDrawable.setCustomEndFrame(0); } darkThemeView.playAnimation(); - if (Theme.selectedAutoNightType != Theme.AUTO_NIGHT_TYPE_NONE) { - Toast.makeText(getContext(), LocaleController.getString("AutoNightModeOff", R.string.AutoNightModeOff), Toast.LENGTH_SHORT).show(); - Theme.selectedAutoNightType = Theme.AUTO_NIGHT_TYPE_NONE; - Theme.saveAutoNightThemeConfig(); - Theme.cancelAutoNightThemeCallbacks(); - } switchTheme(themeInfo, toDark); + + if (drawerLayoutContainer != null ) { + FrameLayout layout = drawerLayoutContainer.getParent() instanceof FrameLayout ? (FrameLayout) drawerLayoutContainer.getParent() : null; + Theme.turnOffAutoNight(layout, () -> { + drawerLayoutContainer.closeDrawer(false); + drawerLayoutContainer.presentFragment(new ThemeActivity(ThemeActivity.THEME_TYPE_NIGHT)); + }); + } }); darkThemeView.setOnLongClickListener(e -> { if (drawerLayoutContainer != null) { @@ -672,7 +674,7 @@ protected void onDraw(Canvas canvas) { drawPremiumProgress = Utilities.clamp(drawPremiumProgress, 1f, 0); if (drawPremiumProgress != 0) { if (gradientTools == null) { - gradientTools = new PremiumGradient.GradientTools(Theme.key_premiumGradientBottomSheet1, Theme.key_premiumGradientBottomSheet2, Theme.key_premiumGradientBottomSheet3, null); + gradientTools = new PremiumGradient.PremiumGradientTools(Theme.key_premiumGradientBottomSheet1, Theme.key_premiumGradientBottomSheet2, Theme.key_premiumGradientBottomSheet3, null); gradientTools.x1 = 0; gradientTools.y1 = 1.1f; gradientTools.x2 = 1.5f; @@ -734,7 +736,6 @@ public void setUser(TLRPC.User user, boolean accounts) { NotificationCenter.getInstance(lastAccount = account).addObserver(this, NotificationCenter.userEmojiStatusUpdated); NotificationCenter.getInstance(lastAccount = account).addObserver(this, NotificationCenter.updateInterfaces); } - lastUser = user; if (user == null) { return; @@ -749,14 +750,11 @@ public void setUser(TLRPC.User user, boolean accounts) { drawPremium = false;//user.premium; nameTextView.setText(text); - if (user.emoji_status instanceof TLRPC.TL_emojiStatusUntil && ((TLRPC.TL_emojiStatusUntil) user.emoji_status).until > (int) (System.currentTimeMillis() / 1000)) { - animatedStatus.animate().alpha(1).setDuration(200).start(); - nameTextView.setDrawablePadding(AndroidUtilities.dp(4)); - status.set(((TLRPC.TL_emojiStatusUntil) user.emoji_status).document_id, true); - } else if (user.emoji_status instanceof TLRPC.TL_emojiStatus) { + Long emojiStatusId = UserObject.getEmojiStatusDocumentId(user); + if (emojiStatusId != null) { animatedStatus.animate().alpha(1).setDuration(200).start(); nameTextView.setDrawablePadding(AndroidUtilities.dp(4)); - status.set(((TLRPC.TL_emojiStatus) user.emoji_status).document_id, true); + status.set(emojiStatusId, true); } else if (user.premium) { animatedStatus.animate().alpha(1).setDuration(200).start(); nameTextView.setDrawablePadding(AndroidUtilities.dp(4)); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/DrawerUserCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/DrawerUserCell.java index ab6ba09f4a..3d0933be03 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/DrawerUserCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/DrawerUserCell.java @@ -10,7 +10,6 @@ import android.content.Context; import android.graphics.Canvas; -import android.graphics.Path; import android.graphics.RectF; import android.graphics.drawable.Drawable; import android.view.Gravity; @@ -144,13 +143,10 @@ public void setAccount(int account) { text = Emoji.replaceEmoji(text, textView.getPaint().getFontMetricsInt(), AndroidUtilities.dp(20), false); } catch (Exception ignore) {} textView.setText(text); - if (user.emoji_status instanceof TLRPC.TL_emojiStatusUntil && ((TLRPC.TL_emojiStatusUntil) user.emoji_status).until > (int) (System.currentTimeMillis() / 1000)) { + Long emojiStatusId = UserObject.getEmojiStatusDocumentId(user); + if (emojiStatusId != null) { textView.setDrawablePadding(AndroidUtilities.dp(4)); - status.set(((TLRPC.TL_emojiStatusUntil) user.emoji_status).document_id, true); - textView.setRightDrawableOutside(true); - } else if (user.emoji_status instanceof TLRPC.TL_emojiStatus) { - textView.setDrawablePadding(AndroidUtilities.dp(4)); - status.set(((TLRPC.TL_emojiStatus) user.emoji_status).document_id, true); + status.set(emojiStatusId, true); textView.setRightDrawableOutside(true); } else if (MessagesController.getInstance(account).isPremiumUser(user)) { textView.setDrawablePadding(AndroidUtilities.dp(6)); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/GroupCallUserCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/GroupCallUserCell.java index 1d830a4a8f..86b6cd45db 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/GroupCallUserCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/GroupCallUserCell.java @@ -30,6 +30,7 @@ import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ChatObject; import org.telegram.messenger.ImageLocation; +import org.telegram.messenger.LiteMode; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MessageObject; import org.telegram.messenger.R; @@ -432,6 +433,9 @@ protected void onDetachedFromWindow() { if (animatorSet != null) { animatorSet.cancel(); } + if (rightDrawable != null) { + rightDrawable.detach(); + } } public boolean isSelfUser() { @@ -537,6 +541,9 @@ public void setDrawDivider(boolean draw) { protected void onAttachedToWindow() { super.onAttachedToWindow(); applyParticipantChanges(false); + if (rightDrawable != null) { + rightDrawable.attach(); + } } public TLRPC.TL_groupCallParticipant getParticipant() { @@ -984,7 +991,7 @@ public void update() { } public void draw(Canvas canvas, float cx, float cy, View parentView) { - if (SharedConfig.getLiteMode().enabled()) { + if (!LiteMode.isEnabled(LiteMode.FLAG_CALLS_ANIMATIONS)) { return; } float scaleBlob = 0.8f + 0.4f * amplitude; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/GroupCreateUserCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/GroupCreateUserCell.java index a539862f38..7fb4ff6a73 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/GroupCreateUserCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/GroupCreateUserCell.java @@ -264,11 +264,11 @@ public void update(int mask) { } avatarImageView.getLayoutParams().width = avatarImageView.getLayoutParams().height = AndroidUtilities.dp(46); if (checkBox != null) { - ((LayoutParams) checkBox.getLayoutParams()).topMargin = AndroidUtilities.dp(33) + padding; + ((LayoutParams) checkBox.getLayoutParams()).topMargin = AndroidUtilities.dp(29) + padding; if (LocaleController.isRTL) { ((LayoutParams) checkBox.getLayoutParams()).rightMargin = AndroidUtilities.dp(39) + padding; } else { - ((LayoutParams) checkBox.getLayoutParams()).leftMargin = AndroidUtilities.dp(40) + padding; + ((LayoutParams) checkBox.getLayoutParams()).leftMargin = AndroidUtilities.dp(45) + padding; } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/PhotoAttachCameraCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/PhotoAttachCameraCell.java index 08cb8e7ce8..5ce4752349 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/PhotoAttachCameraCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/PhotoAttachCameraCell.java @@ -14,6 +14,7 @@ import android.graphics.BitmapFactory; import android.graphics.PorterDuff; import android.graphics.PorterDuffColorFilter; +import android.graphics.drawable.Drawable; import android.widget.FrameLayout; import android.widget.ImageView; @@ -95,6 +96,10 @@ public void updateBitmap() { } } + public Drawable getDrawable() { + return backgroundView.getDrawable(); + } + protected int getThemedColor(String key) { Integer color = resourcesProvider != null ? resourcesProvider.getColor(key) : null; return color != null ? color : Theme.getColor(key); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/PhotoAttachPhotoCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/PhotoAttachPhotoCell.java index 83b2d96fdc..2cfdb3c5ad 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/PhotoAttachPhotoCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/PhotoAttachPhotoCell.java @@ -539,11 +539,17 @@ protected void onDraw(Canvas canvas) { public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) { super.onInitializeAccessibilityNodeInfo(info); info.setEnabled(true); + StringBuilder sb = new StringBuilder(); if (photoEntry != null && photoEntry.isVideo) { - info.setText(LocaleController.getString("AttachVideo", R.string.AttachVideo) + ", " + LocaleController.formatDuration(photoEntry.duration)); + sb.append(LocaleController.getString("AttachVideo", R.string.AttachVideo) + ", " + LocaleController.formatDuration(photoEntry.duration)); } else { - info.setText(LocaleController.getString("AttachPhoto", R.string.AttachPhoto)); + sb.append(LocaleController.getString("AttachPhoto", R.string.AttachPhoto)); } + if (photoEntry != null) { + sb.append(". "); + sb.append(LocaleController.getInstance().formatterStats.format(photoEntry.dateTaken * 1000L)); + } + info.setText(sb); if (checkBox.isChecked()) { info.setSelected(true); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ProfileSearchCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ProfileSearchCell.java index 0f40f49f4f..87518ea00a 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ProfileSearchCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ProfileSearchCell.java @@ -291,44 +291,42 @@ public void buildLayout() { } nameLockTop = AndroidUtilities.dp(22.0f); updateStatus(false, null, false); - } else { - if (chat != null) { - dialog_id = -chat.id; - drawCheck = chat.verifiedExtended(); - if (!LocaleController.isRTL) { - nameLeft = AndroidUtilities.dp(AndroidUtilities.leftBaseline); - } else { - nameLeft = AndroidUtilities.dp(11); - } - updateStatus(drawCheck, null, false); - } else if (user != null) { - dialog_id = user.id; - if (!LocaleController.isRTL) { - nameLeft = AndroidUtilities.dp(AndroidUtilities.leftBaseline); - } else { - nameLeft = AndroidUtilities.dp(11); - } - nameLockTop = AndroidUtilities.dp(21); - drawCheck = user.verifiedExtended(); - drawPremium = !user.self && MessagesController.getInstance(currentAccount).isPremiumUser(user); - updateStatus(drawCheck, user, false); - } else if (contact != null) { - if (!LocaleController.isRTL) { - nameLeft = AndroidUtilities.dp(AndroidUtilities.leftBaseline); - } else { - nameLeft = AndroidUtilities.dp(11); - } - if (actionButton == null) { - actionButton = new CanvasButton(this); - actionButton.setDelegate(() -> { - if (getParent() instanceof RecyclerListView) { - RecyclerListView parent = (RecyclerListView) getParent(); - parent.getOnItemClickListener().onItemClick(this, parent.getChildAdapterPosition(this)); - } else { - callOnClick(); - } - }); - } + } else if (chat != null) { + dialog_id = -chat.id; + drawCheck = chat.verifiedExtended(); + if (!LocaleController.isRTL) { + nameLeft = AndroidUtilities.dp(AndroidUtilities.leftBaseline); + } else { + nameLeft = AndroidUtilities.dp(11); + } + updateStatus(drawCheck, null, false); + } else if (user != null) { + dialog_id = user.id; + if (!LocaleController.isRTL) { + nameLeft = AndroidUtilities.dp(AndroidUtilities.leftBaseline); + } else { + nameLeft = AndroidUtilities.dp(11); + } + nameLockTop = AndroidUtilities.dp(21); + drawCheck = user.verifiedExtended(); + drawPremium = !savedMessages && MessagesController.getInstance(currentAccount).isPremiumUser(user); + updateStatus(drawCheck, user, false); + } else if (contact != null) { + if (!LocaleController.isRTL) { + nameLeft = AndroidUtilities.dp(AndroidUtilities.leftBaseline); + } else { + nameLeft = AndroidUtilities.dp(11); + } + if (actionButton == null) { + actionButton = new CanvasButton(this); + actionButton.setDelegate(() -> { + if (getParent() instanceof RecyclerListView) { + RecyclerListView parent = (RecyclerListView) getParent(); + parent.getOnItemClickListener().onItemClick(this, parent.getChildAdapterPosition(this)); + } else { + callOnClick(); + } + }); } } @@ -550,13 +548,13 @@ public void updateStatus(boolean verified, TLRPC.User user, boolean animated) { if (verified) { statusDrawable.set(new CombinedDrawable(Theme.dialogs_verifiedDrawable, Theme.dialogs_verifiedCheckDrawable, 0, 0), animated); statusDrawable.setColor(null); - } else if (user != null && !user.self && user.emoji_status instanceof TLRPC.TL_emojiStatusUntil && ((TLRPC.TL_emojiStatusUntil) user.emoji_status).until > (int) (System.currentTimeMillis() / 1000)) { + } else if (user != null && !savedMessages && user.emoji_status instanceof TLRPC.TL_emojiStatusUntil && ((TLRPC.TL_emojiStatusUntil) user.emoji_status).until > (int) (System.currentTimeMillis() / 1000)) { statusDrawable.set(((TLRPC.TL_emojiStatusUntil) user.emoji_status).document_id, animated); statusDrawable.setColor(Theme.getColor(Theme.key_chats_verifiedBackground, resourcesProvider)); - } else if (user != null && !user.self && user.emoji_status instanceof TLRPC.TL_emojiStatus) { + } else if (user != null && !savedMessages && user.emoji_status instanceof TLRPC.TL_emojiStatus) { statusDrawable.set(((TLRPC.TL_emojiStatus) user.emoji_status).document_id, animated); statusDrawable.setColor(Theme.getColor(Theme.key_chats_verifiedBackground, resourcesProvider)); - } else if (user != null && !user.self && MessagesController.getInstance(currentAccount).isPremiumUser(user)) { + } else if (user != null && !savedMessages && MessagesController.getInstance(currentAccount).isPremiumUser(user)) { statusDrawable.set(PremiumGradient.getInstance().premiumStarDrawableMini, animated); statusDrawable.setColor(Theme.getColor(Theme.key_chats_verifiedBackground, resourcesProvider)); } else { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/RequestPeerRequirementsCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/RequestPeerRequirementsCell.java new file mode 100644 index 0000000000..04cf51d801 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/RequestPeerRequirementsCell.java @@ -0,0 +1,297 @@ +package org.telegram.ui.Cells; + +import android.content.Context; +import android.graphics.PorterDuff; +import android.graphics.PorterDuffColorFilter; +import android.graphics.drawable.ColorDrawable; +import android.graphics.drawable.Drawable; +import android.text.SpannableStringBuilder; +import android.text.TextUtils; +import android.util.TypedValue; +import android.view.Gravity; +import android.view.View; +import android.widget.ImageView; +import android.widget.LinearLayout; +import android.widget.TextView; + +import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.LocaleController; +import org.telegram.messenger.R; +import org.telegram.tgnet.TLRPC; +import org.telegram.ui.ActionBar.Theme; +import org.telegram.ui.Components.LayoutHelper; + +import java.util.ArrayList; + +public class RequestPeerRequirementsCell extends LinearLayout { + + public RequestPeerRequirementsCell(Context context) { + super(context); + + setOrientation(VERTICAL); + setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundGray)); + } + + private TLRPC.RequestPeerType requestPeerType; + private ArrayList requirements = new ArrayList<>(); + + public void set(TLRPC.RequestPeerType requestPeerType) { + if (this.requestPeerType != requestPeerType) { + this.requestPeerType = requestPeerType; + removeAllViews(); + + requirements.clear(); + + if (requestPeerType instanceof TLRPC.TL_requestPeerTypeUser) { + TLRPC.TL_requestPeerTypeUser type = (TLRPC.TL_requestPeerTypeUser) requestPeerType; + checkRequirement( + type.premium, + R.string.PeerRequirementPremiumTrue, + R.string.PeerRequirementPremiumFalse + ); + } else { + boolean isChannel = requestPeerType instanceof TLRPC.TL_requestPeerTypeBroadcast; + + if (isChannel) { + checkRequirement(requestPeerType.has_username, R.string.PeerRequirementChannelPublicTrue, R.string.PeerRequirementChannelPublicFalse); + if (requestPeerType.bot_participant != null && requestPeerType.bot_participant) { + requirements.add(Requirement.make(AndroidUtilities.replaceTags(LocaleController.getString(R.string.PeerRequirementChannelBotParticipant)))); + } + if (requestPeerType.creator != null && requestPeerType.creator) { + requirements.add(Requirement.make(AndroidUtilities.replaceTags(LocaleController.getString(R.string.PeerRequirementChannelCreatorTrue)))); + } + } else { + checkRequirement(requestPeerType.has_username, R.string.PeerRequirementGroupPublicTrue, R.string.PeerRequirementGroupPublicFalse); + checkRequirement(requestPeerType.forum, R.string.PeerRequirementForumTrue, R.string.PeerRequirementForumFalse); + if (requestPeerType.bot_participant != null && requestPeerType.bot_participant) { + requirements.add(Requirement.make(AndroidUtilities.replaceTags(LocaleController.getString(R.string.PeerRequirementGroupBotParticipant)))); + } + if (requestPeerType.creator != null && requestPeerType.creator) { + requirements.add(Requirement.make(AndroidUtilities.replaceTags(LocaleController.getString(R.string.PeerRequirementGroupCreatorTrue)))); + } + } + + if (!(requestPeerType.creator != null && requestPeerType.creator)) { + checkAdminRights(requestPeerType.user_admin_rights, isChannel, R.string.PeerRequirementUserRights, R.string.PeerRequirementUserRight); + } +// checkAdminRights(requestPeerType.bot_admin_rights, isChannel, R.string.PeerRequirementBotRights, R.string.PeerRequirementBotRight); + } + + if (!requirements.isEmpty()) { + HeaderCell headerCell = new HeaderCell(getContext(), 20); + headerCell.setText(LocaleController.getString("PeerRequirements", R.string.PeerRequirements)); + headerCell.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); + addView(headerCell, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT)); + + addView(emptyView(9, Theme.getColor(Theme.key_windowBackgroundWhite)), LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT)); + for (Requirement requirement : requirements) { + addView(new RequirementCell(getContext(), requirement), LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT)); + } + addView(emptyView(12, Theme.getColor(Theme.key_windowBackgroundWhite)), LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT)); + + addView(emptyView(12, Theme.getThemedDrawable(getContext(), R.drawable.greydivider_bottom, Theme.key_windowBackgroundGrayShadow)), LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT)); + } + } + } + + private View emptyView(int heightDp, int color) { + return emptyView(heightDp, new ColorDrawable(color)); + } + + private View emptyView(int heightDp, Drawable background) { + View view = new View(getContext()) { + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(heightDp), MeasureSpec.EXACTLY)); + } + }; + view.setBackground(background); + return view; + } + + private void checkRequirement(Boolean value, String positive, String negative) { + if (value != null) { + if (value) { + requirements.add(Requirement.make(AndroidUtilities.replaceTags(positive))); + } else { + requirements.add(Requirement.make(AndroidUtilities.replaceTags(negative))); + } + } + } + + private void checkRequirement(Boolean value, int positiveResId, int negativeResId) { + if (value != null) { + if (value) { + requirements.add(Requirement.make(AndroidUtilities.replaceTags(LocaleController.getString(positiveResId)))); + } else { + requirements.add(Requirement.make(AndroidUtilities.replaceTags(LocaleController.getString(negativeResId)))); + } + } + } + + public static CharSequence rightsToString(TLRPC.TL_chatAdminRights rights, boolean isChannel) { + ArrayList array = new ArrayList<>(); + if (rights.change_info) { + array.add(Requirement.make( + 1, + isChannel ? + LocaleController.getString("EditAdminChangeChannelInfo", R.string.EditAdminChangeChannelInfo) : + LocaleController.getString("EditAdminChangeGroupInfo", R.string.EditAdminChangeGroupInfo) + )); + } + if (rights.post_messages && isChannel) { + array.add(Requirement.make(1, LocaleController.getString("EditAdminPostMessages", R.string.EditAdminPostMessages))); + } + if (rights.edit_messages && isChannel) { + array.add(Requirement.make(1, LocaleController.getString("EditAdminEditMessages", R.string.EditAdminEditMessages))); + } + if (rights.delete_messages) { + array.add(Requirement.make(1, isChannel ? LocaleController.getString("EditAdminDeleteMessages", R.string.EditAdminDeleteMessages) : LocaleController.getString("EditAdminGroupDeleteMessages", R.string.EditAdminGroupDeleteMessages))); + } + if (rights.ban_users && !isChannel) { + array.add(Requirement.make(1, LocaleController.getString("EditAdminBanUsers", R.string.EditAdminBanUsers))); + } + if (rights.invite_users) { + array.add(Requirement.make(1, LocaleController.getString("EditAdminAddUsers", R.string.EditAdminAddUsers))); + } + if (rights.pin_messages && !isChannel) { + array.add(Requirement.make(1, LocaleController.getString("EditAdminPinMessages", R.string.EditAdminPinMessages))); + } + if (rights.add_admins) { + array.add(Requirement.make(1, LocaleController.getString("EditAdminAddAdmins", R.string.EditAdminAddAdmins))); + } + if (rights.anonymous && !isChannel) { + array.add(Requirement.make(1, LocaleController.getString("EditAdminSendAnonymously", R.string.EditAdminSendAnonymously))); + } + if (rights.manage_call) { + array.add(Requirement.make(1, LocaleController.getString("StartVoipChatPermission", R.string.StartVoipChatPermission))); + } + if (rights.manage_topics && !isChannel) { + array.add(Requirement.make(1, LocaleController.getString("ManageTopicsPermission", R.string.ManageTopicsPermission))); + } + + if (array.size() == 1) { + return array.get(0).text.toString().toLowerCase(); + } else if (!array.isEmpty()) { + SpannableStringBuilder string = new SpannableStringBuilder(); + for (int i = 0; i < array.size(); ++i) { + if (i > 0) { + string.append(", "); + } + string.append(array.get(i).text.toString().toLowerCase()); + } + return string; + } + return ""; + } + + private void checkAdminRights(TLRPC.TL_chatAdminRights value, boolean isChannel, CharSequence headerText, CharSequence headerSingleText) { + if (value == null) { + return; + } + + ArrayList rights = new ArrayList<>(); + if (value.change_info) { + rights.add(Requirement.make( + 1, + isChannel ? + LocaleController.getString("EditAdminChangeChannelInfo", R.string.EditAdminChangeChannelInfo) : + LocaleController.getString("EditAdminChangeGroupInfo", R.string.EditAdminChangeGroupInfo) + )); + } + if (value.post_messages && isChannel) { + rights.add(Requirement.make(1, LocaleController.getString("EditAdminPostMessages", R.string.EditAdminPostMessages))); + } + if (value.edit_messages && isChannel) { + rights.add(Requirement.make(1, LocaleController.getString("EditAdminEditMessages", R.string.EditAdminEditMessages))); + } + if (value.delete_messages) { + rights.add(Requirement.make(1, isChannel ? LocaleController.getString("EditAdminDeleteMessages", R.string.EditAdminDeleteMessages) : LocaleController.getString("EditAdminGroupDeleteMessages", R.string.EditAdminGroupDeleteMessages))); + } + if (value.ban_users && !isChannel) { + rights.add(Requirement.make(1, LocaleController.getString("EditAdminBanUsers", R.string.EditAdminBanUsers))); + } + if (value.invite_users) { + rights.add(Requirement.make(1, LocaleController.getString("EditAdminAddUsers", R.string.EditAdminAddUsers))); + } + if (value.pin_messages && !isChannel) { + rights.add(Requirement.make(1, LocaleController.getString("EditAdminPinMessages", R.string.EditAdminPinMessages))); + } + if (value.add_admins) { + rights.add(Requirement.make(1, LocaleController.getString("EditAdminAddAdmins", R.string.EditAdminAddAdmins))); + } + if (value.anonymous && !isChannel) { + rights.add(Requirement.make(1, LocaleController.getString("EditAdminSendAnonymously", R.string.EditAdminSendAnonymously))); + } + if (value.manage_call) { + rights.add(Requirement.make(1, LocaleController.getString("StartVoipChatPermission", R.string.StartVoipChatPermission))); + } + if (value.manage_topics && !isChannel) { + rights.add(Requirement.make(1, LocaleController.getString("ManageTopicsPermission", R.string.ManageTopicsPermission))); + } + + if (rights.size() == 1) { + requirements.add(Requirement.make(TextUtils.concat(headerSingleText, " ", rights.get(0).text))); + } else if (!rights.isEmpty()) { + SpannableStringBuilder string = SpannableStringBuilder.valueOf(headerText); + string.append(" "); + for (int i = 0; i < rights.size(); ++i) { + if (i > 0) { + string.append(", "); + } + string.append(rights.get(i).text.toString().toLowerCase()); + } + string.append("."); + requirements.add(Requirement.make(string)); + } + } + + private void checkAdminRights(TLRPC.TL_chatAdminRights value, boolean isChannel, int headerTextResId, int headerSingleTextResId) { + checkAdminRights(value, isChannel, AndroidUtilities.replaceTags(LocaleController.getString(headerTextResId)), AndroidUtilities.replaceTags(LocaleController.getString(headerSingleTextResId))); + } + + class RequirementCell extends LinearLayout { + + private ImageView imageView; + private TextView textView; + + public RequirementCell(Context context, Requirement requirement) { + super(context); + + setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); + + setOrientation(HORIZONTAL); + + imageView = new ImageView(context); + imageView.setScaleType(ImageView.ScaleType.CENTER); + imageView.setImageResource(requirement.padding <= 0 ? R.drawable.list_check : R.drawable.list_circle); + imageView.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_windowBackgroundWhiteBlueHeader), PorterDuff.Mode.MULTIPLY)); + addView(imageView, LayoutHelper.createLinear(20, 20, 0, Gravity.TOP | Gravity.LEFT, 17 + requirement.padding * 16, -1, 0, 0)); + + textView = new TextView(context); + textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + textView.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteGrayText2)); + textView.setSingleLine(false); + textView.setText(requirement.text); + addView(textView, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, 1, 6, 4, 24, 4)); + } + } +} + +class Requirement { + public int padding; + public CharSequence text; + + private Requirement(CharSequence text, int padding) { + this.text = text; + this.padding = padding; + } + + public static Requirement make(CharSequence text) { + return new Requirement(text, 0); + } + + public static Requirement make(int pad, CharSequence text) { + return new Requirement(text, pad); + } +} \ No newline at end of file diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ShadowSectionCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ShadowSectionCell.java index dbdc917ff3..33defbc859 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ShadowSectionCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ShadowSectionCell.java @@ -22,6 +22,12 @@ public class ShadowSectionCell extends View { private int size; + private int backgroundColor; + private Theme.ResourcesProvider resourcesProvider; + + private boolean top = true; + private boolean bottom = true; + public ShadowSectionCell(Context context) { this(context, 12, null); } @@ -36,8 +42,9 @@ public ShadowSectionCell(Context context, int s) { public ShadowSectionCell(Context context, int s, Theme.ResourcesProvider resourcesProvider) { super(context); - setBackgroundDrawable(Theme.getThemedDrawable(context, R.drawable.greydivider, Theme.getColor(Theme.key_windowBackgroundGrayShadow, resourcesProvider))); - size = s; + this.resourcesProvider = resourcesProvider; + this.size = s; + updateBackground(); } public ShadowSectionCell(Context context, int s, int backgroundColor) { @@ -46,12 +53,50 @@ public ShadowSectionCell(Context context, int s, int backgroundColor) { public ShadowSectionCell(Context context, int s, int backgroundColor, Theme.ResourcesProvider resourcesProvider) { super(context); - Drawable shadowDrawable = Theme.getThemedDrawable(context, R.drawable.greydivider, Theme.getColor(Theme.key_windowBackgroundGrayShadow, resourcesProvider)); - Drawable background = new ColorDrawable(backgroundColor); - CombinedDrawable combinedDrawable = new CombinedDrawable(background, shadowDrawable, 0, 0); - combinedDrawable.setFullsize(true); - setBackgroundDrawable(combinedDrawable); - size = s; + this.resourcesProvider = resourcesProvider; + this.backgroundColor = backgroundColor; + this.size = s; + updateBackground(); + } + + public void setTopBottom(boolean top, boolean bottom) { + if (this.top != top || this.bottom != bottom) { + this.top = top; + this.bottom = bottom; + updateBackground(); + } + } + + private void updateBackground() { + if (backgroundColor == 0) { + if (!top && !bottom) { + setBackground(null); + } else { + setBackground(Theme.getThemedDrawable(getContext(), getBackgroundResId(), Theme.getColor(Theme.key_windowBackgroundGrayShadow, resourcesProvider))); + } + } else { + if (!top && !bottom) { + setBackgroundColor(backgroundColor); + } else { + Drawable shadowDrawable = Theme.getThemedDrawable(getContext(), getBackgroundResId(), Theme.getColor(Theme.key_windowBackgroundGrayShadow, resourcesProvider)); + Drawable background = new ColorDrawable(backgroundColor); + CombinedDrawable combinedDrawable = new CombinedDrawable(background, shadowDrawable, 0, 0); + combinedDrawable.setFullsize(true); + setBackground(combinedDrawable); + } + } + } + + private int getBackgroundResId() { + if (top && bottom) { + return R.drawable.greydivider; + } else if (top) { + return R.drawable.greydivider_bottom; + } else if (bottom) { + return R.drawable.greydivider_top; + } else { + return R.drawable.transparent; + } } @Override diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ShareTopicCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ShareTopicCell.java index 209720964e..a9f3a574c7 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ShareTopicCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ShareTopicCell.java @@ -79,7 +79,7 @@ public void setTopic(TLRPC.Dialog dialog, TLRPC.TL_forumTopic topic, boolean che } if (topic.icon_emoji_id != 0) { imageView.setImageDrawable(null); - imageView.setAnimatedEmojiDrawable(new AnimatedEmojiDrawable(AnimatedEmojiDrawable.CACHE_TYPE_FORUM_TOPIC, UserConfig.selectedAccount, topic.icon_emoji_id)); + imageView.setAnimatedEmojiDrawable(new AnimatedEmojiDrawable(AnimatedEmojiDrawable.CACHE_TYPE_ALERT_PREVIEW_STATIC, UserConfig.selectedAccount, topic.icon_emoji_id)); } else { imageView.setAnimatedEmojiDrawable(null); ForumBubbleDrawable forumBubbleDrawable = new ForumBubbleDrawable(topic.icon_color); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/SharedDocumentCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/SharedDocumentCell.java index 8afc1ec061..42d7b3028f 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/SharedDocumentCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/SharedDocumentCell.java @@ -280,16 +280,16 @@ public void setTextAndValueAndTypeAndThumb(String text, String value, String typ String backKey; if (resId == R.drawable.files_storage) { backKey = Theme.key_chat_attachLocationBackground; - iconKey = Theme.key_chat_attachLocationIcon; + iconKey = Theme.key_chat_attachIcon; } else if (resId == R.drawable.files_gallery) { backKey = Theme.key_chat_attachContactBackground; - iconKey = Theme.key_chat_attachContactIcon; + iconKey = Theme.key_chat_attachIcon; } else if (resId == R.drawable.files_music) { backKey = Theme.key_chat_attachAudioBackground; - iconKey = Theme.key_chat_attachAudioIcon; + iconKey = Theme.key_chat_attachIcon; } else if (resId == R.drawable.files_internal) { backKey = Theme.key_chat_attachGalleryBackground; - iconKey = Theme.key_chat_attachGalleryIcon; + iconKey = Theme.key_chat_attachIcon; } else { backKey = Theme.key_files_folderIconBackground; iconKey = Theme.key_files_folderIcon; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/SharedLinkCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/SharedLinkCell.java index 0f9854a511..b268fddead 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/SharedLinkCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/SharedLinkCell.java @@ -93,9 +93,8 @@ class CheckForLongPress implements Runnable { public void run() { if (checkingForLongPress && getParent() != null && currentPressCount == pressCount) { checkingForLongPress = false; - if (!NekoConfig.disableVibration.Bool()) { + if (!NekoConfig.disableVibration.Bool()) performHapticFeedback(HapticFeedbackConstants.LONG_PRESS); - } if (pressedLinkIndex >= 0) { delegate.onLinkPress(links.get(pressedLinkIndex).toString(), true); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/SharedPhotoVideoCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/SharedPhotoVideoCell.java index 38df2cea6b..444ee8a7cd 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/SharedPhotoVideoCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/SharedPhotoVideoCell.java @@ -32,6 +32,7 @@ import org.telegram.messenger.DownloadController; import org.telegram.messenger.FileLoader; import org.telegram.messenger.ImageLocation; +import org.telegram.messenger.LiteMode; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MessageObject; import org.telegram.messenger.ApplicationLoader; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/SharedPhotoVideoCell2.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/SharedPhotoVideoCell2.java index 989cc79996..fff78f7d70 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/SharedPhotoVideoCell2.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/SharedPhotoVideoCell2.java @@ -255,6 +255,9 @@ private boolean canAutoDownload(MessageObject messageObject) { public void setVideoText(String videoText, boolean drawVideoIcon) { this.videoText = videoText; showVideoLayout = videoText != null; + if (showVideoLayout && videoInfoLayot != null && !videoInfoLayot.getText().toString().equals(videoText)) { + videoInfoLayot = null; + } this.drawVideoIcon = drawVideoIcon; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/StickerEmojiCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/StickerEmojiCell.java index bbfe296710..a8d86708cd 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/StickerEmojiCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/StickerEmojiCell.java @@ -26,13 +26,13 @@ import org.telegram.messenger.FileLoader; import org.telegram.messenger.ImageLocation; import org.telegram.messenger.ImageReceiver; +import org.telegram.messenger.LiteMode; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MediaDataController; import org.telegram.messenger.MessageObject; import org.telegram.messenger.NotificationCenter; import org.telegram.messenger.R; import org.telegram.messenger.SendMessagesHelper; -import org.telegram.messenger.SharedConfig; import org.telegram.messenger.SvgHelper; import org.telegram.messenger.UserConfig; import org.telegram.tgnet.TLRPC; @@ -228,6 +228,7 @@ public void setSticker(TLRPC.Document document, SendMessagesHelper.ImportingStic updatePremiumStatus(false); imageView.setAlpha(alpha * premiumAlpha); if (drawInParentView) { + imageView.setInvalidateAll(true); imageView.setParentView((View) getParent()); } else { imageView.setParentView(this); @@ -331,6 +332,7 @@ public void showRequirePremiumAnimation() { protected void onAttachedToWindow() { super.onAttachedToWindow(); if (drawInParentView) { + imageView.setInvalidateAll(true); imageView.setParentView((View) getParent()); } else { imageView.setParentView(this); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/StickerSetCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/StickerSetCell.java index 346a4893b5..651463df8d 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/StickerSetCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/StickerSetCell.java @@ -99,12 +99,7 @@ public StickerSetCell(Context context, Theme.ResourcesProvider resourcesProvider pinnedImageView = new ImageView(context); pinnedImageView.setVisibility(GONE); - if (option == 2) { - progressView = new RadialProgressView(getContext()); - progressView.setProgressColor(Theme.getColor(Theme.key_dialogProgressCircle)); - progressView.setSize(AndroidUtilities.dp(30)); - addView(progressView, LayoutHelper.createFrame(48, 48, (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.TOP, LocaleController.isRTL ? 0 : 12, 5, LocaleController.isRTL ? 12 : 0, 0)); - } else if (option != 0) { + if (option != 0) { optionsButton = new ImageView(context); optionsButton.setFocusable(false); optionsButton.setScaleType(ImageView.ScaleType.CENTER); @@ -585,7 +580,7 @@ protected void onDraw(Canvas canvas) { public void updateRightMargin() { sideButtons.measure(MeasureSpec.makeMeasureSpec(999999, MeasureSpec.AT_MOST), MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(58), MeasureSpec.EXACTLY)); - final int margin = sideButtons.getMeasuredWidth(); + final int margin = AndroidUtilities.dp(26) + sideButtons.getMeasuredWidth(); if (LocaleController.isRTL) { ((MarginLayoutParams) textView.getLayoutParams()).leftMargin = margin; ((MarginLayoutParams) valueTextView.getLayoutParams()).leftMargin = margin; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/StickerSetNameCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/StickerSetNameCell.java index 01f9d8372d..fdec208472 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/StickerSetNameCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/StickerSetNameCell.java @@ -76,9 +76,9 @@ public StickerSetNameCell(Context context, boolean emoji, boolean supportRtl, Th } } if (supportRtl) { - lp = LayoutHelper.createFrameRelatively(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.START | Gravity.TOP, emoji ? 5 : 17, emoji ? 10 : 2, emoji ? 15 : 57, 0); + lp = LayoutHelper.createFrameRelatively(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.START | Gravity.TOP, emoji ? 5 : 17, emoji ? 5 : 2, emoji ? 15 : 57, 0); } else { - lp = LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.TOP, emoji ? 5 : 17, emoji ? 10 : 2, emoji ? 15 : 57, 0); + lp = LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.TOP, emoji ? 5 : 17, emoji ? 5 : 2, emoji ? 15 : 57, 0); } addView(textView, lp); @@ -98,10 +98,14 @@ public StickerSetNameCell(Context context, boolean emoji, boolean supportRtl, Th buttonView = new ImageView(context); buttonView.setScaleType(ImageView.ScaleType.CENTER); buttonView.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_emojiPanelStickerSetNameIcon), PorterDuff.Mode.SRC_IN)); + buttonView.setBackground(Theme.createSelectorDrawable(getThemedColor(Theme.key_listSelector), Theme.RIPPLE_MASK_CIRCLE_TO_BOUND_EDGE)); if (supportRtl) { - lp = LayoutHelper.createFrameRelatively(24, 24, Gravity.TOP | Gravity.END, 0, 0, 11, 0); + lp = LayoutHelper.createFrameRelatively(24, 24, Gravity.TOP | Gravity.END, 0, 0, isEmoji ? 0 : 11, 0); } else { - lp = LayoutHelper.createFrame(24, 24, Gravity.TOP | Gravity.RIGHT, 0, 0, 11, 0); + lp = LayoutHelper.createFrame(24, 24, Gravity.TOP | Gravity.RIGHT, 0, 0, isEmoji ? 0 : 11, 0); + } + if (isEmoji) { + buttonView.setTranslationY(AndroidUtilities.dp(4)); } addView(buttonView, lp); } @@ -192,7 +196,7 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { if (empty) { super.onMeasure(MeasureSpec.makeMeasureSpec(MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(1, MeasureSpec.EXACTLY)); } else { - super.onMeasure(MeasureSpec.makeMeasureSpec(MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(isEmoji ? 32 : 24), MeasureSpec.EXACTLY)); + super.onMeasure(MeasureSpec.makeMeasureSpec(MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(isEmoji ? 27 : 24), MeasureSpec.EXACTLY)); } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextCell.java index 669f48d0cc..076e018055 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextCell.java @@ -37,6 +37,7 @@ public class TextCell extends FrameLayout { public final SimpleTextView textView; private final SimpleTextView subtitleView; public final AnimatedTextView valueTextView; + public final SimpleTextView valueSpoilersTextView; public final RLottieImageView imageView; private Switch checkBox; private ImageView valueImageView; @@ -55,6 +56,7 @@ public class TextCell extends FrameLayout { private float loadingProgress; private float drawLoadingProgress; + private int lastWidth; public TextCell(Context context) { this(context, 23, false, false, null); @@ -88,14 +90,24 @@ public TextCell(Context context, int left, boolean dialog, boolean needCheck, Th subtitleView.setImportantForAccessibility(IMPORTANT_FOR_ACCESSIBILITY_NO); addView(subtitleView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.MATCH_PARENT)); - valueTextView = new AnimatedTextView(context); + valueTextView = new AnimatedTextView(context, false, false, true); valueTextView.setTextColor(Theme.getColor(dialog ? Theme.key_dialogTextBlue2 : Theme.key_windowBackgroundWhiteValueText, resourcesProvider)); valueTextView.setPadding(0, AndroidUtilities.dp(18), 0, AndroidUtilities.dp(18)); valueTextView.setTextSize(AndroidUtilities.dp(16)); valueTextView.setGravity(LocaleController.isRTL ? Gravity.LEFT : Gravity.RIGHT); valueTextView.setImportantForAccessibility(IMPORTANT_FOR_ACCESSIBILITY_NO); + valueTextView.setTranslationY(AndroidUtilities.dp(-2)); addView(valueTextView); + valueSpoilersTextView = new SimpleTextView(context); + valueSpoilersTextView.setEllipsizeByGradient(18, false); + valueSpoilersTextView.setTextColor(Theme.getColor(dialog ? Theme.key_dialogTextBlue2 : Theme.key_windowBackgroundWhiteValueText, resourcesProvider)); + valueSpoilersTextView.setGravity(LocaleController.isRTL ? Gravity.LEFT : Gravity.RIGHT); + valueSpoilersTextView.setTextSize(16); + valueSpoilersTextView.setImportantForAccessibility(IMPORTANT_FOR_ACCESSIBILITY_NO); + valueSpoilersTextView.setVisibility(GONE); + addView(valueSpoilersTextView); + imageView = new RLottieImageView(context); imageView.setScaleType(ImageView.ScaleType.CENTER); imageView.setColorFilter(new PorterDuffColorFilter(Theme.getColor(dialog ? Theme.key_dialogIcon : Theme.key_windowBackgroundWhiteGrayIcon, resourcesProvider), PorterDuff.Mode.SRC_IN)); @@ -143,8 +155,10 @@ public ImageView getValueImageView() { } public void setPrioritizeTitleOverValue(boolean prioritizeTitleOverValue) { - this.prioritizeTitleOverValue = prioritizeTitleOverValue; - requestLayout(); + if (this.prioritizeTitleOverValue != prioritizeTitleOverValue) { + this.prioritizeTitleOverValue = prioritizeTitleOverValue; + requestLayout(); + } } @Override @@ -152,14 +166,23 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { int width = MeasureSpec.getSize(widthMeasureSpec); int height = AndroidUtilities.dp(heightDp); + if (lastWidth != 0 && lastWidth != width && valueText != null) { + valueTextView.setText(TextUtils.ellipsize(valueText, valueTextView.getPaint(), AndroidUtilities.displaySize.x / 2.5f, TextUtils.TruncateAt.END), false); + } + lastWidth = width; + + int valueWidth; if (prioritizeTitleOverValue) { textView.measure(MeasureSpec.makeMeasureSpec(width - AndroidUtilities.dp(71 + leftPadding), MeasureSpec.AT_MOST), MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(20), MeasureSpec.EXACTLY)); subtitleView.measure(MeasureSpec.makeMeasureSpec(width - AndroidUtilities.dp(71 + leftPadding), MeasureSpec.AT_MOST), MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(20), MeasureSpec.EXACTLY)); valueTextView.measure(MeasureSpec.makeMeasureSpec(width - AndroidUtilities.dp(103 + leftPadding) - textView.getTextWidth(), LocaleController.isRTL ? MeasureSpec.AT_MOST : MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(20), MeasureSpec.EXACTLY)); + valueSpoilersTextView.measure(MeasureSpec.makeMeasureSpec(width - AndroidUtilities.dp(103 + leftPadding) - textView.getTextWidth(), LocaleController.isRTL ? MeasureSpec.AT_MOST : MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(20), MeasureSpec.EXACTLY)); } else { valueTextView.measure(MeasureSpec.makeMeasureSpec(width - AndroidUtilities.dp(leftPadding), LocaleController.isRTL ? MeasureSpec.AT_MOST : MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(20), MeasureSpec.EXACTLY)); - textView.measure(MeasureSpec.makeMeasureSpec(width - AndroidUtilities.dp(71 + leftPadding) - valueTextView.width(), MeasureSpec.AT_MOST), MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(20), MeasureSpec.EXACTLY)); - subtitleView.measure(MeasureSpec.makeMeasureSpec(width - AndroidUtilities.dp(71 + leftPadding) - valueTextView.width(), MeasureSpec.AT_MOST), MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(20), MeasureSpec.EXACTLY)); + valueSpoilersTextView.measure(MeasureSpec.makeMeasureSpec(width - AndroidUtilities.dp(leftPadding), LocaleController.isRTL ? MeasureSpec.AT_MOST : MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(20), MeasureSpec.EXACTLY)); + valueWidth = Math.max(valueTextView.width(), valueSpoilersTextView.getTextWidth()); + textView.measure(MeasureSpec.makeMeasureSpec(Math.max(0, width - AndroidUtilities.dp(71 + leftPadding) - valueWidth), MeasureSpec.AT_MOST), MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(20), MeasureSpec.EXACTLY)); + subtitleView.measure(MeasureSpec.makeMeasureSpec(width - AndroidUtilities.dp(71 + leftPadding) - valueWidth, MeasureSpec.AT_MOST), MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(20), MeasureSpec.EXACTLY)); } if (imageView.getVisibility() == VISIBLE) { imageView.measure(MeasureSpec.makeMeasureSpec(width, MeasureSpec.AT_MOST), MeasureSpec.makeMeasureSpec(height, MeasureSpec.AT_MOST)); @@ -186,13 +209,14 @@ protected void onLayout(boolean changed, int left, int top, int right, int botto int height = bottom - top; int width = right - left; - int viewTop = (height - valueTextView.getTextHeight()) / 2; - int viewLeft = LocaleController.isRTL ? AndroidUtilities.dp(leftPadding) : 0; + int viewTop = (height - Math.max(valueSpoilersTextView.getTextHeight(), valueTextView.getTextHeight())) / 2; + int viewLeft = LocaleController.isRTL ? AndroidUtilities.dp(leftPadding) : width - valueTextView.getMeasuredWidth() - AndroidUtilities.dp(leftPadding); if (prioritizeTitleOverValue && !LocaleController.isRTL) { - viewLeft = width - valueTextView.getMeasuredWidth() - AndroidUtilities.dp(leftPadding); + viewLeft = width - valueTextView.getMeasuredWidth() - AndroidUtilities.dp(leftPadding); } valueTextView.layout(viewLeft, viewTop, viewLeft + valueTextView.getMeasuredWidth(), viewTop + valueTextView.getMeasuredHeight()); - + viewLeft = LocaleController.isRTL ? AndroidUtilities.dp(leftPadding) : width - valueSpoilersTextView.getMeasuredWidth() - AndroidUtilities.dp(leftPadding); + valueSpoilersTextView.layout(viewLeft, viewTop, viewLeft + valueSpoilersTextView.getMeasuredWidth(), viewTop + valueSpoilersTextView.getMeasuredHeight()); if (LocaleController.isRTL) { viewLeft = getMeasuredWidth() - textView.getMeasuredWidth() - AndroidUtilities.dp(imageView.getVisibility() == VISIBLE ? offsetFromImage : leftPadding); @@ -239,12 +263,15 @@ public void setColors(String icon, String text) { } } + private CharSequence valueText; + public void setText(CharSequence text, boolean divider) { imageLeft = 21; textView.setText(text); - valueTextView.setText(null, false); + valueTextView.setText(valueText = null, false); imageView.setVisibility(GONE); valueTextView.setVisibility(GONE); + valueSpoilersTextView.setVisibility(GONE); valueImageView.setVisibility(GONE); needDivider = divider; setWillNotDraw(!needDivider); @@ -254,7 +281,7 @@ public void setTextAndIcon(CharSequence text, int resId, boolean divider) { imageLeft = 21; offsetFromImage = 71; textView.setText(text); - valueTextView.setText(null, false); + valueTextView.setText(valueText = null, false); if (resId != 0) { imageView.setImageResource(resId); imageView.setVisibility(VISIBLE); @@ -263,6 +290,19 @@ public void setTextAndIcon(CharSequence text, int resId, boolean divider) { imageView.setVisibility(GONE); } valueTextView.setVisibility(GONE); + valueSpoilersTextView.setVisibility(GONE); + valueImageView.setVisibility(GONE); + needDivider = divider; + setWillNotDraw(!needDivider); + } + + public void setTextAndColorfulIcon(String text, int resId, int color, boolean divider) { + imageLeft = 21; + offsetFromImage = 71; + textView.setText(text); + valueTextView.setText(valueText = null, false); + setColorfulIcon(color, resId); + valueTextView.setVisibility(GONE); valueImageView.setVisibility(GONE); needDivider = divider; setWillNotDraw(!needDivider); @@ -272,7 +312,7 @@ public void setTextAndIcon(CharSequence text, Drawable drawable, boolean divider offsetFromImage = 68; imageLeft = 18; textView.setText(text); - valueTextView.setText(null, false); + valueTextView.setText(valueText = null, false); imageView.setColorFilter(null); if (drawable instanceof RLottieDrawable) { imageView.setAnimation((RLottieDrawable) drawable); @@ -303,8 +343,9 @@ public void setTextAndValue(CharSequence text, String value, boolean animated, b imageLeft = 21; offsetFromImage = 71; textView.setText(text); - valueTextView.setText(value, animated); + valueTextView.setText(TextUtils.ellipsize(valueText = value, valueTextView.getPaint(), AndroidUtilities.displaySize.x / 2.5f, TextUtils.TruncateAt.END), animated); valueTextView.setVisibility(VISIBLE); + valueSpoilersTextView.setVisibility(GONE); imageView.setVisibility(GONE); valueImageView.setVisibility(GONE); needDivider = divider; @@ -314,6 +355,58 @@ public void setTextAndValue(CharSequence text, String value, boolean animated, b } } + public void setTextAndValueAndColorfulIcon(String text, CharSequence value, boolean animated, int resId, int color, boolean divider) { + imageLeft = 21; + offsetFromImage = 71; + textView.setText(text); + valueTextView.setText(TextUtils.ellipsize(valueText = value, valueTextView.getPaint(), AndroidUtilities.displaySize.x / 2.5f, TextUtils.TruncateAt.END), animated); + valueTextView.setVisibility(VISIBLE); + valueSpoilersTextView.setVisibility(GONE); + setColorfulIcon(color, resId); + valueImageView.setVisibility(GONE); + needDivider = divider; + setWillNotDraw(!needDivider); + if (checkBox != null) { + checkBox.setVisibility(GONE); + } + } + + public void setTextAndSpoilersValueAndIcon(String text, CharSequence value, int resId, boolean divider) { + imageLeft = 21; + offsetFromImage = 71; + textView.setText(text); + valueSpoilersTextView.setVisibility(VISIBLE); + valueSpoilersTextView.setText(value); + valueTextView.setVisibility(GONE); + valueImageView.setVisibility(GONE); + imageView.setVisibility(VISIBLE); + imageView.setTranslationX(0); + imageView.setTranslationY(0); + imageView.setPadding(0, AndroidUtilities.dp(7), 0, 0); + imageView.setImageResource(resId); + needDivider = divider; + setWillNotDraw(!needDivider); + if (checkBox != null) { + checkBox.setVisibility(GONE); + } + } + + public void setTextAndSpoilersValueAndColorfulIcon(String text, CharSequence value, int resId, int color, boolean divider) { + imageLeft = 21; + offsetFromImage = 71; + textView.setText(text); + valueSpoilersTextView.setVisibility(VISIBLE); + valueSpoilersTextView.setText(value); + valueTextView.setVisibility(GONE); + setColorfulIcon(color, resId); + valueImageView.setVisibility(GONE); + needDivider = divider; + setWillNotDraw(!needDivider); + if (checkBox != null) { + checkBox.setVisibility(GONE); + } + } + public void setTextAndValueAndIcon(CharSequence text, String value, int resId, boolean divider) { setTextAndValueAndIcon(text, value, false, resId, divider); } @@ -322,8 +415,9 @@ public void setTextAndValueAndIcon(CharSequence text, String value, boolean anim imageLeft = 21; offsetFromImage = 71; textView.setText(text); - valueTextView.setText(value, animated); + valueTextView.setText(TextUtils.ellipsize(valueText = value, valueTextView.getPaint(), AndroidUtilities.displaySize.x / 2.5f, TextUtils.TruncateAt.END), animated); valueTextView.setVisibility(VISIBLE); + valueSpoilersTextView.setVisibility(GONE); valueImageView.setVisibility(GONE); imageView.setVisibility(VISIBLE); imageView.setTranslationX(0); @@ -341,11 +435,11 @@ public void setColorfulIcon(int color, int resId) { offsetFromImage = 65; imageView.setVisibility(VISIBLE); imageView.setPadding(AndroidUtilities.dp(2), AndroidUtilities.dp(2), AndroidUtilities.dp(2), AndroidUtilities.dp(2)); - imageView.setTranslationX(AndroidUtilities.dp(-3)); + imageView.setTranslationX(AndroidUtilities.dp(LocaleController.isRTL ? 0 : -3)); imageView.setTranslationY(AndroidUtilities.dp(6)); imageView.setImageResource(resId); imageView.setColorFilter(new PorterDuffColorFilter(Color.WHITE, PorterDuff.Mode.SRC_IN)); - imageView.setBackground(Theme.createRoundRectDrawable(AndroidUtilities.dp(8), color)); + imageView.setBackground(Theme.createRoundRectDrawable(AndroidUtilities.dp(9), color)); } public void setTextAndCheck(CharSequence text, boolean checked, boolean divider) { @@ -368,6 +462,7 @@ public void setTextAndCheckAndIcon(CharSequence text, boolean checked, int resId offsetFromImage = 71; textView.setText(text); valueTextView.setVisibility(GONE); + valueSpoilersTextView.setVisibility(GONE); valueImageView.setVisibility(GONE); if (checkBox != null) { checkBox.setVisibility(VISIBLE); @@ -385,6 +480,7 @@ public void setTextAndCheckAndIcon(CharSequence text, boolean checked, Drawable offsetFromImage = 71; textView.setText(text); valueTextView.setVisibility(GONE); + valueSpoilersTextView.setVisibility(GONE); valueImageView.setVisibility(GONE); if (checkBox != null) { checkBox.setVisibility(VISIBLE); @@ -401,10 +497,11 @@ public void setTextAndValueDrawable(CharSequence text, Drawable drawable, boolea imageLeft = 21; offsetFromImage = 71; textView.setText(text); - valueTextView.setText(null, false); + valueTextView.setText(valueText = null, false); valueImageView.setVisibility(VISIBLE); valueImageView.setImageDrawable(drawable); valueTextView.setVisibility(GONE); + valueSpoilersTextView.setVisibility(GONE); imageView.setVisibility(GONE); imageView.setPadding(0, AndroidUtilities.dp(7), 0, 0); needDivider = divider; @@ -424,12 +521,26 @@ protected void onDraw(Canvas canvas) { @Override public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) { super.onInitializeAccessibilityNodeInfo(info); - final CharSequence text = textView.getText(); + CharSequence text = textView.getText(); if (!TextUtils.isEmpty(text)) { final CharSequence valueText = valueTextView.getText(); if (!TextUtils.isEmpty(valueText)) { - info.setText(text + ": " + valueText); - } else { + text = TextUtils.concat(text, ": ", valueText); + } + } + if (checkBox != null) { + info.setClassName("android.widget.Switch"); + info.setCheckable(true); + info.setChecked(checkBox.isChecked()); + StringBuilder sb = new StringBuilder(); + sb.append(textView.getText()); + if (!TextUtils.isEmpty(valueTextView.getText())) { + sb.append('\n'); + sb.append(valueTextView.getText()); + } + info.setContentDescription(sb); + } else { + if (!TextUtils.isEmpty(text)) { info.setText(text); } } @@ -460,6 +571,9 @@ public void showEnabledAlpha(boolean show) { if (valueTextView != null) { valueTextView.animate().alpha(alpha).start(); } + if (valueSpoilersTextView != null) { + valueSpoilersTextView.animate().alpha(alpha).start(); + } if (valueImageView != null) { valueImageView.animate().alpha(alpha).start(); } @@ -473,6 +587,9 @@ public void showEnabledAlpha(boolean show) { if (valueTextView != null) { valueTextView.setAlpha(alpha); } + if (valueSpoilersTextView != null) { + valueSpoilersTextView.setAlpha(alpha); + } if (valueImageView != null) { valueImageView.setAlpha(alpha); } @@ -546,7 +663,12 @@ protected void dispatchDraw(Canvas canvas) { float alpha = (0.6f + 0.4f * loadingProgress) * drawLoadingProgress; paint.setAlpha((int) (255 * alpha)); int cy = getMeasuredHeight() >> 1; - AndroidUtilities.rectTmp.set(getMeasuredWidth() - AndroidUtilities.dp(11) - AndroidUtilities.dp(loadingSize), cy - AndroidUtilities.dp(3), getMeasuredWidth() - AndroidUtilities.dp(11), cy + AndroidUtilities.dp(3)); + AndroidUtilities.rectTmp.set( + getMeasuredWidth() - AndroidUtilities.dp(21) - AndroidUtilities.dp(loadingSize), + cy - AndroidUtilities.dp(3), + getMeasuredWidth() - AndroidUtilities.dp(21), + cy + AndroidUtilities.dp(3) + ); if (LocaleController.isRTL) { AndroidUtilities.rectTmp.left = getMeasuredWidth() - AndroidUtilities.rectTmp.left; AndroidUtilities.rectTmp.right = getMeasuredWidth() - AndroidUtilities.rectTmp.right; @@ -555,6 +677,7 @@ protected void dispatchDraw(Canvas canvas) { invalidate(); } valueTextView.setAlpha(1f - drawLoadingProgress); + valueSpoilersTextView.setAlpha(1f - drawLoadingProgress); super.dispatchDraw(canvas); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextCheckCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextCheckCell.java index eb87584d0a..c7af4715c9 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextCheckCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextCheckCell.java @@ -13,6 +13,7 @@ import android.animation.ObjectAnimator; import android.content.Context; import android.graphics.Canvas; +import android.graphics.Color; import android.graphics.Paint; import android.graphics.PorterDuff; import android.graphics.PorterDuffColorFilter; @@ -27,6 +28,7 @@ import android.view.View; import android.view.accessibility.AccessibilityNodeInfo; import android.widget.FrameLayout; +import android.widget.ImageView; import android.widget.TextView; import org.telegram.messenger.AndroidUtilities; @@ -37,10 +39,12 @@ import org.telegram.ui.Components.CheckBoxSquare; import org.telegram.ui.Components.CubicBezierInterpolator; import org.telegram.ui.Components.LayoutHelper; +import org.telegram.ui.Components.RLottieImageView; import org.telegram.ui.Components.Switch; import org.telegram.ui.Components.ViewHelper; import java.util.ArrayList; +import java.util.Locale; public class TextCheckCell extends FrameLayout { private boolean isAnimatingToThumbInsteadOfTouch; @@ -60,6 +64,8 @@ public class TextCheckCell extends FrameLayout { private boolean drawCheckRipple; private int padding; private Theme.ResourcesProvider resourcesProvider; + ImageView imageView; + private boolean isRTL; public static final Property ANIMATION_PROGRESS = new AnimationProperties.FloatProperty("animationProgress") { @Override @@ -127,6 +133,8 @@ public TextCheckCell(Context context, int padding, boolean dialog, Theme.Resourc } setClipChildren(false); + + isRTL = LocaleController.isRTL; } @Override @@ -135,6 +143,10 @@ public void setEnabled(boolean enabled) { checkBox.setEnabled(enabled); } + public void setCheckBoxIcon(int icon) { + checkBox.setIcon(icon); + } + @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { if (isMultiline) { @@ -173,6 +185,11 @@ public void setTextAndCheck(String text, boolean checked, boolean divider) { } public void updateRTL() { + if (isRTL == LocaleController.isRTL) { + return; + } + isRTL = LocaleController.isRTL; + textView.setGravity((LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.CENTER_VERTICAL); removeView(textView); addView(textView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.TOP, LocaleController.isRTL ? 70 : padding, 0, LocaleController.isRTL ? padding : 70, 0)); @@ -363,7 +380,11 @@ protected void onDraw(Canvas canvas) { canvas.drawCircle(cx, cy, animatedRad, animationPaint); } if (needDivider) { - canvas.drawLine(0, getMeasuredHeight() - 3, getMeasuredWidth(), getMeasuredHeight() - 3, Theme.dividerPaint); + if (imageView != null) { + canvas.drawLine(LocaleController.isRTL ? 0 : padding, getMeasuredHeight() - 1, getMeasuredWidth() - (LocaleController.isRTL ? padding : 0), getMeasuredHeight() - 1, Theme.dividerPaint); + } else { + canvas.drawLine(0, getMeasuredHeight() - 3, getMeasuredWidth(), getMeasuredHeight() - 3, Theme.dividerPaint); + } } } @@ -399,4 +420,20 @@ protected void onDetachedFromWindow() { super.onDetachedFromWindow(); attached = false; } + + public void setColorfullIcon(int color, int resId) { + if (imageView == null) { + imageView = new RLottieImageView(getContext()); + imageView.setScaleType(ImageView.ScaleType.CENTER); + addView(imageView, LayoutHelper.createFrame(29, 29, (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.CENTER_VERTICAL, 19, 0, 19, 0)); + padding = AndroidUtilities.dp(65); + ((MarginLayoutParams)textView.getLayoutParams()).leftMargin = LocaleController.isRTL ? 70 : padding; + ((MarginLayoutParams)textView.getLayoutParams()).rightMargin = LocaleController.isRTL ? padding: 70; + } + imageView.setVisibility(VISIBLE); + imageView.setPadding(AndroidUtilities.dp(2), AndroidUtilities.dp(2), AndroidUtilities.dp(2), AndroidUtilities.dp(2)); + imageView.setImageResource(resId); + imageView.setColorFilter(new PorterDuffColorFilter(Color.WHITE, PorterDuff.Mode.SRC_IN)); + imageView.setBackground(Theme.createRoundRectDrawable(AndroidUtilities.dp(9), color)); + } } \ No newline at end of file diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextCheckCell2.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextCheckCell2.java index efd357229e..adebfe7db3 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextCheckCell2.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextCheckCell2.java @@ -10,16 +10,24 @@ import android.content.Context; import android.graphics.Canvas; +import android.graphics.PorterDuff; +import android.graphics.PorterDuffColorFilter; +import android.graphics.drawable.Drawable; import android.text.TextUtils; import android.util.TypedValue; import android.view.Gravity; +import android.view.View; import android.view.accessibility.AccessibilityNodeInfo; import android.widget.FrameLayout; +import android.widget.LinearLayout; import android.widget.TextView; import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.LocaleController; +import org.telegram.messenger.R; import org.telegram.ui.ActionBar.Theme; +import org.telegram.ui.Components.AnimatedTextView; +import org.telegram.ui.Components.CubicBezierInterpolator; import org.telegram.ui.Components.LayoutHelper; import org.telegram.ui.Components.Switch; @@ -31,6 +39,47 @@ public class TextCheckCell2 extends FrameLayout { private boolean needDivider; private boolean isMultiline; + private LinearLayout collapseViewContainer; + private AnimatedTextView animatedTextView; + private View collapsedArrow; + private View checkBoxClickArea; + + public void setCollapseArrow(String text, boolean collapsed, Runnable onCheckClick) { + if (collapseViewContainer == null) { + collapseViewContainer = new LinearLayout(getContext()); + collapseViewContainer.setOrientation(LinearLayout.HORIZONTAL); + animatedTextView = new AnimatedTextView(getContext(), false, true, true); + animatedTextView.setTextSize(AndroidUtilities.dp(14)); + animatedTextView.getDrawable().setAllowCancel(true); + animatedTextView.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteBlackText)); + animatedTextView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + collapseViewContainer.addView(animatedTextView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT,20)); + + collapsedArrow = new View(getContext()); + Drawable drawable = getContext().getResources().getDrawable(R.drawable.arrow_more).mutate(); + drawable.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_windowBackgroundWhiteBlackText), PorterDuff.Mode.MULTIPLY)); + collapsedArrow.setBackground(drawable); + collapseViewContainer.addView(collapsedArrow, LayoutHelper.createLinear(16, 16, Gravity.CENTER_VERTICAL)); + collapseViewContainer.setClipChildren(false); + setClipChildren(false); + addView(collapseViewContainer, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL)); + + checkBoxClickArea = new View(getContext()) { + @Override + protected void onDraw(Canvas canvas) { + super.onDraw(canvas); + canvas.drawLine(0, AndroidUtilities.dp(14), 2, getMeasuredHeight()- AndroidUtilities.dp(14), Theme.dividerPaint); + } + }; + checkBoxClickArea.setBackground(Theme.createSelectorDrawable(Theme.getColor(Theme.key_listSelector), 2)); + addView(checkBoxClickArea, LayoutHelper.createFrame(76, LayoutHelper.MATCH_PARENT, LocaleController.isRTL ? Gravity.LEFT : Gravity.RIGHT)); + } + animatedTextView.setText(text); + collapsedArrow.animate().cancel(); + collapsedArrow.animate().rotation(collapsed ? 0 : 180).setDuration(340).setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT).start(); + checkBoxClickArea.setOnClickListener(v -> onCheckClick.run()); + } + public TextCheckCell2(Context context) { this(context, null); } @@ -46,7 +95,7 @@ public TextCheckCell2(Context context, Theme.ResourcesProvider resourcesProvider textView.setSingleLine(true); textView.setGravity((LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.CENTER_VERTICAL); textView.setEllipsize(TextUtils.TruncateAt.END); - addView(textView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.TOP, LocaleController.isRTL ? 64 : 21, 0, LocaleController.isRTL ? 21 : 64, 0)); + addView(textView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.MATCH_PARENT, (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.TOP, LocaleController.isRTL ? 64 : 21, 0, LocaleController.isRTL ? 21 : 64, 0)); valueTextView = new TextView(context); valueTextView.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteGrayText2, resourcesProvider)); @@ -73,10 +122,26 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { } } + @Override + protected void onLayout(boolean changed, int left, int top, int right, int bottom) { + super.onLayout(changed, left, top, right, bottom); + if (collapseViewContainer != null) { + if (LocaleController.isRTL) { + collapseViewContainer.setTranslationX(textView.getLeft() - collapseViewContainer.getMeasuredWidth() - AndroidUtilities.dp(4)); + } else { + collapseViewContainer.setTranslationX(textView.getRight() + AndroidUtilities.dp(4)); + } + } + } + public void setTextAndCheck(String text, boolean checked, boolean divider) { + setTextAndCheck(text, checked, divider, false); + } + + public void setTextAndCheck(String text, boolean checked, boolean divider, boolean animated) { textView.setText(text); isMultiline = false; - checkBox.setChecked(checked, false); + checkBox.setChecked(checked, animated); needDivider = divider; valueTextView.setVisibility(GONE); LayoutParams layoutParams = (LayoutParams) textView.getLayoutParams(); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextDetailCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextDetailCell.java index a3dd0c4f67..21fed2c04c 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextDetailCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextDetailCell.java @@ -31,6 +31,8 @@ import org.telegram.ui.Components.LayoutHelper; import org.telegram.ui.Components.LinkSpanDrawable; +import tw.nekomimi.nekogram.NekoConfig; + public class TextDetailCell extends FrameLayout { private final TextView textView; @@ -70,7 +72,8 @@ public TextDetailCell(Context context, Theme.ResourcesProvider resourcesProvider valueTextView.setOnLinkLongPressListener(span -> { if (span != null) { try { - performHapticFeedback(HapticFeedbackConstants.LONG_PRESS, HapticFeedbackConstants.FLAG_IGNORE_VIEW_SETTING); + if (!NekoConfig.disableVibration.Bool()) + performHapticFeedback(HapticFeedbackConstants.LONG_PRESS, HapticFeedbackConstants.FLAG_IGNORE_VIEW_SETTING); } catch (Exception ignore) {}; span.onClick(valueTextView); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextDetailSettingsCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextDetailSettingsCell.java index 168ad129b1..6b5fab6ba7 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextDetailSettingsCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextDetailSettingsCell.java @@ -101,7 +101,7 @@ public void setMultilineDetail(boolean value) { } } - public void setTextAndValue(String text, CharSequence value, boolean divider) { + public void setTextAndValue(CharSequence text, CharSequence value, boolean divider) { textView.setText(text); if (StrUtil.isBlank(value)) { valueTextView.setVisibility(GONE); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextInfoPrivacyCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextInfoPrivacyCell.java index 91d838193e..0d559f9975 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextInfoPrivacyCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextInfoPrivacyCell.java @@ -39,6 +39,7 @@ public class TextInfoPrivacyCell extends FrameLayout { private int topPadding = 10; private int bottomPadding = 17; private int fixedSize; + private boolean isRTL; private CharSequence text; private final Theme.ResourcesProvider resourcesProvider; @@ -76,9 +77,23 @@ protected void onDraw(Canvas canvas) { textView.setImportantForAccessibility(IMPORTANT_FOR_ACCESSIBILITY_NO); addView(textView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.TOP, padding, 0, padding, 0)); + isRTL = LocaleController.isRTL; + setWillNotDraw(false); } + public void updateRTL() { + if (isRTL == LocaleController.isRTL) { + return; + } + isRTL = LocaleController.isRTL; + + textView.setGravity(LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT); + FrameLayout.LayoutParams layoutParams = (FrameLayout.LayoutParams) textView.getLayoutParams(); + layoutParams.gravity = (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.TOP; + textView.setLayoutParams(layoutParams); + } + @Override protected void onDraw(Canvas canvas) { if (links != null) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextRadioCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextRadioCell.java index 90c53c11e7..e5933a3945 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextRadioCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextRadioCell.java @@ -24,7 +24,6 @@ import org.telegram.ui.Components.CubicBezierInterpolator; import org.telegram.ui.Components.LayoutHelper; import org.telegram.ui.Components.RadioButton; -import org.telegram.ui.Components.Switch; import java.util.ArrayList; @@ -42,6 +41,8 @@ public class TextRadioCell extends FrameLayout { private float lastTouchX; private ObjectAnimator animator; private boolean drawCheckRipple; + private boolean isRTL; + private int padding; public static final Property ANIMATION_PROGRESS = new AnimationProperties.FloatProperty("animationProgress") { @Override @@ -67,6 +68,8 @@ public TextRadioCell(Context context, int padding) { public TextRadioCell(Context context, int padding, boolean dialog) { super(context); + this.padding = padding; + textView = new TextView(context); textView.setTextColor(Theme.getColor(dialog ? Theme.key_dialogTextBlack : Theme.key_windowBackgroundWhiteBlackText)); textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 16); @@ -94,9 +97,33 @@ public TextRadioCell(Context context, int padding, boolean dialog) { radioButton.setColor(Theme.getColor(Theme.key_radioBackground), Theme.getColor(Theme.key_radioBackgroundChecked)); addView(radioButton, LayoutHelper.createFrame(20, 20, (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.CENTER_VERTICAL, 22, 0, 22, 0)); + isRTL = LocaleController.isRTL; + setClipChildren(false); } + public void updateRTL() { + if (isRTL == LocaleController.isRTL) { + return; + } + isRTL = LocaleController.isRTL; + textView.setGravity((LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.CENTER_VERTICAL); + FrameLayout.LayoutParams textViewLayout = (FrameLayout.LayoutParams) textView.getLayoutParams(); + textViewLayout.gravity = (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.TOP; + textViewLayout.leftMargin = AndroidUtilities.dp(LocaleController.isRTL ? padding : 64); + textViewLayout.rightMargin = AndroidUtilities.dp(LocaleController.isRTL ? 64 : padding); + textView.setLayoutParams(textViewLayout); + valueTextView.setGravity(LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT); + FrameLayout.LayoutParams valueTextViewLayout = (FrameLayout.LayoutParams) valueTextView.getLayoutParams(); + valueTextViewLayout.gravity = (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.TOP; + valueTextViewLayout.leftMargin = AndroidUtilities.dp(LocaleController.isRTL ? padding : 64); + valueTextViewLayout.rightMargin = AndroidUtilities.dp(LocaleController.isRTL ? 64 : padding); + valueTextView.setLayoutParams(valueTextViewLayout); + FrameLayout.LayoutParams radioButtonLayout = (FrameLayout.LayoutParams) radioButton.getLayoutParams(); + radioButtonLayout.gravity = (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.CENTER_VERTICAL; + radioButton.setLayoutParams(radioButtonLayout); + } + @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { if (isMultiline) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextSelectionHelper.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextSelectionHelper.java index b4b82d95d3..07f5537a2c 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextSelectionHelper.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextSelectionHelper.java @@ -1575,7 +1575,7 @@ protected void drawSelection(Canvas canvas, StaticLayout layout, int selectionSt canvas.clipOutRect(AndroidUtilities.rectTmp2); } } else { - if (hasStart) { + if (hasStart && !layout.isRtlCharAt(selectionStart)) { AndroidUtilities.rectTmp2.set((int) startLeft, (int) (startBottom - R), (int) Math.min(startLeft + R, layout.getLineRight(startLine)), (int) startBottom); AndroidUtilities.rectTmp.set(AndroidUtilities.rectTmp2); selectionHandlePath.addRect(AndroidUtilities.rectTmp, Path.Direction.CW); @@ -1585,7 +1585,7 @@ protected void drawSelection(Canvas canvas, StaticLayout layout, int selectionSt } } - if (hasEnd) { + if (hasEnd && !layout.isRtlCharAt(selectionEnd)) { AndroidUtilities.rectTmp2.set((int) Math.max(endRight - R, layout.getLineLeft(endLine)), (int) (endBottom - R), (int) endRight, (int) endBottom); AndroidUtilities.rectTmp.set(AndroidUtilities.rectTmp2); selectionHandlePath.addRect(AndroidUtilities.rectTmp, Path.Direction.CW); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextSettingsCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextSettingsCell.java index 02cfc13cb4..06daba2707 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextSettingsCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextSettingsCell.java @@ -12,6 +12,7 @@ import android.animation.ObjectAnimator; import android.content.Context; import android.graphics.Canvas; +import android.graphics.Color; import android.graphics.Paint; import android.graphics.PorterDuff; import android.graphics.PorterDuffColorFilter; @@ -42,6 +43,7 @@ public class TextSettingsCell extends FrameLayout { private TextView textView; private AnimatedTextView valueTextView; private ImageView imageView; + private boolean imageViewIsColorful; private BackupImageView valueBackupImageView; private ImageView valueImageView; private boolean needDivider; @@ -94,14 +96,14 @@ public TextSettingsCell(Context context, int padding, Theme.ResourcesProvider re imageView = new RLottieImageView(context); imageView.setScaleType(ImageView.ScaleType.CENTER); - imageView.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_windowBackgroundWhiteGrayIcon), PorterDuff.Mode.MULTIPLY)); + imageView.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_windowBackgroundWhiteGrayIcon, resourcesProvider), PorterDuff.Mode.MULTIPLY)); imageView.setVisibility(GONE); addView(imageView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.CENTER_VERTICAL, 21, 0, 21, 0)); valueImageView = new ImageView(context); valueImageView.setScaleType(ImageView.ScaleType.CENTER); valueImageView.setVisibility(INVISIBLE); - valueImageView.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_windowBackgroundWhiteGrayIcon), PorterDuff.Mode.SRC_IN)); + valueImageView.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_windowBackgroundWhiteGrayIcon, resourcesProvider), PorterDuff.Mode.SRC_IN)); addView(valueImageView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, (LocaleController.isRTL ? Gravity.LEFT : Gravity.RIGHT) | Gravity.CENTER_VERTICAL, padding, 0, padding, 0)); } @@ -120,7 +122,11 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { } if (imageView.getVisibility() == VISIBLE) { - imageView.measure(MeasureSpec.makeMeasureSpec(width, MeasureSpec.AT_MOST), MeasureSpec.makeMeasureSpec(getMeasuredHeight(), MeasureSpec.AT_MOST)); + if (imageViewIsColorful) { + imageView.measure(MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(28), MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(28), MeasureSpec.EXACTLY)); + } else { + imageView.measure(MeasureSpec.makeMeasureSpec(width, MeasureSpec.AT_MOST), MeasureSpec.makeMeasureSpec(getMeasuredHeight(), MeasureSpec.AT_MOST)); + } } if (valueBackupImageView != null) { @@ -213,6 +219,30 @@ public void setTextAndIcon(CharSequence text, int resId, boolean divider) { public void setIcon(int resId) { MarginLayoutParams params = (MarginLayoutParams) textView.getLayoutParams(); + imageViewIsColorful = false; + if (resId == 0) { + imageView.setVisibility(GONE); + if (LocaleController.isRTL) { + params.rightMargin = AndroidUtilities.dp(this.padding); + } else { + params.leftMargin = AndroidUtilities.dp(this.padding); + } + } else { + imageView.setImageResource(resId); + imageView.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_windowBackgroundWhiteGrayIcon, resourcesProvider), PorterDuff.Mode.MULTIPLY)); + imageView.setBackground(null); + imageView.setVisibility(VISIBLE); + if (LocaleController.isRTL) { + params.rightMargin = AndroidUtilities.dp(71); + } else { + params.leftMargin = AndroidUtilities.dp(71); + } + } + } + + public void setColorfulIcon(int resId, int color) { + MarginLayoutParams params = (MarginLayoutParams) textView.getLayoutParams(); + imageViewIsColorful = true; if (resId == 0) { imageView.setVisibility(GONE); if (LocaleController.isRTL) { @@ -221,7 +251,9 @@ public void setIcon(int resId) { params.leftMargin = AndroidUtilities.dp(this.padding); } } else { + imageView.setBackground(Theme.createRoundRectDrawable(AndroidUtilities.dp(8), color)); imageView.setImageResource(resId); + imageView.setColorFilter(new PorterDuffColorFilter(Color.WHITE, PorterDuff.Mode.MULTIPLY)); imageView.setVisibility(VISIBLE); if (LocaleController.isRTL) { params.rightMargin = AndroidUtilities.dp(71); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ThemePreviewMessagesCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ThemePreviewMessagesCell.java index fad7ab5a91..a226d95402 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ThemePreviewMessagesCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ThemePreviewMessagesCell.java @@ -191,6 +191,9 @@ public ThemePreviewMessagesCell(Context context, INavigationLayout layout, int t private GestureDetector gestureDetector = new GestureDetector(context, new GestureDetector.SimpleOnGestureListener() { @Override public boolean onDoubleTap(MotionEvent e) { + if (MediaDataController.getInstance(currentAccount).getDoubleTapReaction() == null) { + return false; + } boolean added = getMessageObject().selectReaction(ReactionsLayoutInBubble.VisibleReaction.fromEmojicon(MediaDataController.getInstance(currentAccount).getDoubleTapReaction()), false, false); setMessageObject(getMessageObject(), null, false, false); requestLayout(); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ThemesHorizontalListCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ThemesHorizontalListCell.java index 6101b1cbf0..8a3ba2b704 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ThemesHorizontalListCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ThemesHorizontalListCell.java @@ -83,6 +83,7 @@ public class ThemesHorizontalListCell extends RecyclerListView implements Notifi private ArrayList defaultThemes; private int currentType; private int prevCount; + private BaseFragment fragment; private class ThemesListAdapter extends RecyclerListView.SelectionAdapter { @@ -691,12 +692,13 @@ public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) { } } - public ThemesHorizontalListCell(Context context, int type, ArrayList def, ArrayList custom) { + public ThemesHorizontalListCell(Context context, BaseFragment fragment, int type, ArrayList def, ArrayList custom) { super(context); customThemes = custom; defaultThemes = def; currentType = type; + this.fragment = fragment; if (type == ThemeActivity.THEME_TYPE_OTHER) { setBackgroundColor(Theme.getColor(Theme.key_dialogBackground)); @@ -739,7 +741,9 @@ public void selectTheme(Theme.ThemeInfo themeInfo) { return; } if (themeInfo.info.document == null) { - presentFragment(new ThemeSetUrlActivity(themeInfo, null, true)); + if (fragment != null) { + fragment.presentFragment(new ThemeSetUrlActivity(themeInfo, null, true)); + } return; } } @@ -772,6 +776,10 @@ public void selectTheme(Theme.ThemeInfo themeInfo) { } } EmojiThemes.saveCustomTheme(themeInfo, themeInfo.currentAccentId); + + if (currentType != ThemeActivity.THEME_TYPE_NIGHT) { + Theme.turnOffAutoNight(fragment); + } } public void setDrawDivider(boolean draw) { @@ -897,10 +905,6 @@ protected void showOptionsForTheme(Theme.ThemeInfo themeInfo) { } - protected void presentFragment(BaseFragment fragment) { - - } - protected void updateRows() { } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ChannelAdminLogActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ChannelAdminLogActivity.java index 88f2d6ac4f..3f38104f25 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ChannelAdminLogActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ChannelAdminLogActivity.java @@ -3131,12 +3131,9 @@ public ArrayList getThemeDescriptions() { themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, Theme.chat_gamePaint, null, null, Theme.key_chat_previewGameText)); themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, null, null, Theme.key_chat_inPreviewInstantText)); themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, null, null, Theme.key_chat_outPreviewInstantText)); - themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, null, null, Theme.key_chat_inPreviewInstantSelectedText)); - themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, null, null, Theme.key_chat_outPreviewInstantSelectedText)); themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, Theme.chat_deleteProgressPaint, null, null, Theme.key_chat_secretTimeText)); themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, null, null, Theme.key_chat_stickerNameText)); themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, Theme.chat_botButtonPaint, null, null, Theme.key_chat_botButtonText)); - themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, Theme.chat_botProgressPaint, null, null, Theme.key_chat_botProgress)); themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, null, null, Theme.key_chat_inForwardedNameText)); themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, null, null, Theme.key_chat_outForwardedNameText)); themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, null, null, Theme.key_chat_inViaBotNameText)); @@ -3224,25 +3221,12 @@ public ArrayList getThemeDescriptions() { themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, null, null, Theme.key_chat_inMediaIcon)); themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, null, null, Theme.key_chat_inLoaderSelected)); themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, null, null, Theme.key_chat_inMediaIconSelected)); - themeDescriptions.add(new ThemeDescription(chatListView, ThemeDescription.FLAG_BACKGROUNDFILTER, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_photoStatesDrawables[0][0], Theme.chat_photoStatesDrawables[1][0], Theme.chat_photoStatesDrawables[2][0], Theme.chat_photoStatesDrawables[3][0]}, null, Theme.key_chat_mediaLoaderPhoto)); - themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_photoStatesDrawables[0][0], Theme.chat_photoStatesDrawables[1][0], Theme.chat_photoStatesDrawables[2][0], Theme.chat_photoStatesDrawables[3][0]}, null, Theme.key_chat_mediaLoaderPhotoIcon)); - themeDescriptions.add(new ThemeDescription(chatListView, ThemeDescription.FLAG_BACKGROUNDFILTER, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_photoStatesDrawables[0][1], Theme.chat_photoStatesDrawables[1][1], Theme.chat_photoStatesDrawables[2][1], Theme.chat_photoStatesDrawables[3][1]}, null, Theme.key_chat_mediaLoaderPhotoSelected)); - themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_photoStatesDrawables[0][1], Theme.chat_photoStatesDrawables[1][1], Theme.chat_photoStatesDrawables[2][1], Theme.chat_photoStatesDrawables[3][1]}, null, Theme.key_chat_mediaLoaderPhotoIconSelected)); - themeDescriptions.add(new ThemeDescription(chatListView, ThemeDescription.FLAG_BACKGROUNDFILTER, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_photoStatesDrawables[7][0], Theme.chat_photoStatesDrawables[8][0]}, null, Theme.key_chat_outLoaderPhoto)); - themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_photoStatesDrawables[7][0], Theme.chat_photoStatesDrawables[8][0]}, null, Theme.key_chat_outLoaderPhotoIcon)); - themeDescriptions.add(new ThemeDescription(chatListView, ThemeDescription.FLAG_BACKGROUNDFILTER, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_photoStatesDrawables[7][1], Theme.chat_photoStatesDrawables[8][1]}, null, Theme.key_chat_outLoaderPhotoSelected)); - themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_photoStatesDrawables[7][1], Theme.chat_photoStatesDrawables[8][1]}, null, Theme.key_chat_outLoaderPhotoIconSelected)); - themeDescriptions.add(new ThemeDescription(chatListView, ThemeDescription.FLAG_BACKGROUNDFILTER, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_photoStatesDrawables[10][0], Theme.chat_photoStatesDrawables[11][0]}, null, Theme.key_chat_inLoaderPhoto)); - themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_photoStatesDrawables[10][0], Theme.chat_photoStatesDrawables[11][0]}, null, Theme.key_chat_inLoaderPhotoIcon)); - themeDescriptions.add(new ThemeDescription(chatListView, ThemeDescription.FLAG_BACKGROUNDFILTER, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_photoStatesDrawables[10][1], Theme.chat_photoStatesDrawables[11][1]}, null, Theme.key_chat_inLoaderPhotoSelected)); - themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_photoStatesDrawables[10][1], Theme.chat_photoStatesDrawables[11][1]}, null, Theme.key_chat_inLoaderPhotoIconSelected)); themeDescriptions.add(new ThemeDescription(chatListView, ThemeDescription.FLAG_BACKGROUNDFILTER, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_contactDrawable[0]}, null, Theme.key_chat_inContactBackground)); themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_contactDrawable[0]}, null, Theme.key_chat_inContactIcon)); themeDescriptions.add(new ThemeDescription(chatListView, ThemeDescription.FLAG_BACKGROUNDFILTER, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_contactDrawable[1]}, null, Theme.key_chat_outContactBackground)); themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_contactDrawable[1]}, null, Theme.key_chat_outContactIcon)); themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, null, null, Theme.key_chat_inLocationBackground)); themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_locationDrawable[0]}, null, Theme.key_chat_inLocationIcon)); - themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, null, null, Theme.key_chat_outLocationBackground)); themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_locationDrawable[1]}, null, Theme.key_chat_outLocationIcon)); themeDescriptions.add(new ThemeDescription(bottomOverlayChat, 0, null, Theme.chat_composeBackgroundPaint, null, null, Theme.key_chat_messagePanelBackground)); @@ -3261,9 +3245,6 @@ public ArrayList getThemeDescriptions() { themeDescriptions.add(new ThemeDescription(progressView2, ThemeDescription.FLAG_SERVICEBACKGROUND, null, null, null, null, Theme.key_chat_serviceBackground)); themeDescriptions.add(new ThemeDescription(emptyView, ThemeDescription.FLAG_SERVICEBACKGROUND, null, null, null, null, Theme.key_chat_serviceBackground)); - themeDescriptions.add(new ThemeDescription(avatarContainer != null ? avatarContainer.getTimeItem() : null, 0, null, null, null, null, Theme.key_chat_secretTimerBackground)); - themeDescriptions.add(new ThemeDescription(avatarContainer != null ? avatarContainer.getTimeItem() : null, 0, null, null, null, null, Theme.key_chat_secretTimerText)); - themeDescriptions.add(new ThemeDescription(undoView, ThemeDescription.FLAG_BACKGROUNDFILTER, null, null, null, null, Theme.key_undo_background)); themeDescriptions.add(new ThemeDescription(undoView, 0, new Class[]{UndoView.class}, new String[]{"undoImageView"}, null, null, null, Theme.key_undo_cancelColor)); themeDescriptions.add(new ThemeDescription(undoView, 0, new Class[]{UndoView.class}, new String[]{"undoTextView"}, null, null, null, Theme.key_undo_cancelColor)); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ChannelCreateActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ChannelCreateActivity.java index d288765863..16838af0f1 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ChannelCreateActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ChannelCreateActivity.java @@ -53,6 +53,7 @@ import org.telegram.messenger.MessagesController; import org.telegram.messenger.NotificationCenter; import org.telegram.messenger.R; +import org.telegram.messenger.Utilities; import org.telegram.messenger.browser.Browser; import org.telegram.tgnet.ConnectionsManager; import org.telegram.tgnet.TLRPC; @@ -117,6 +118,7 @@ public class ChannelCreateActivity extends BaseFragment implements NotificationC private LinearLayout linearLayout2; private HeaderCell headerCell2; private EditTextBoldCursor editText; + private boolean isGroup; private RLottieDrawable cameraDrawable; @@ -148,23 +150,29 @@ public class ChannelCreateActivity extends BaseFragment implements NotificationC private int currentStep; private long chatId; private boolean canCreatePublic = true; + private Boolean forcePublic; private TLRPC.InputFile inputPhoto; private TLRPC.InputFile inputVideo; + private TLRPC.VideoSize inputEmojiMarkup; private String inputVideoPath; private double videoTimestamp; private boolean createAfterUpload; private boolean donePressed; private Integer doneRequestId; + private Utilities.Callback2 onFinishListener; private final static int done_button = 1; public ChannelCreateActivity(Bundle args) { super(args); currentStep = args.getInt("step", 0); + if (args.containsKey("forcePublic")) { + forcePublic = args.getBoolean("forcePublic", false); + } if (currentStep == 0) { avatarDrawable = new AvatarDrawable(); - imageUpdater = new ImageUpdater(true); + imageUpdater = new ImageUpdater(true, ImageUpdater.FOR_TYPE_CHANNEL, true); TLRPC.TL_channels_checkUsername req = new TLRPC.TL_channels_checkUsername(); req.username = "1"; @@ -182,6 +190,10 @@ public ChannelCreateActivity(Bundle args) { } } + public void setOnFinishListener(Utilities.Callback2 onFinishListener) { + this.onFinishListener = onFinishListener; + } + @Override public boolean onFragmentCreate() { NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.chatDidCreated); @@ -347,7 +359,7 @@ public void onItemClick(int id) { createAfterUpload = true; return; } - doneRequestId = MessagesController.getInstance(currentAccount).createChat(nameTextView.getText().toString(), new ArrayList<>(), descriptionTextView.getText().toString(), ChatObject.CHAT_TYPE_CHANNEL, false, null, null, 0, ChannelCreateActivity.this); + doneRequestId = MessagesController.getInstance(currentAccount).createChat(nameTextView.getText().toString(), new ArrayList<>(), descriptionTextView.getText().toString(), ChatObject.CHAT_TYPE_CHANNEL, false, null, null, -1, ChannelCreateActivity.this); } else if (currentStep == 1) { if (!isPrivate) { if (descriptionTextView.length() == 0) { @@ -363,15 +375,30 @@ public void onItemClick(int id) { AndroidUtilities.shakeView(checkTextView); return; } else { - MessagesController.getInstance(currentAccount).updateChannelUserName(ChannelCreateActivity.this, chatId, lastCheckName, null, null); + AndroidUtilities.runOnUIThread(enableDoneLoading, 200); + MessagesController.getInstance(currentAccount).updateChannelUserName(ChannelCreateActivity.this, chatId, lastCheckName, () -> { + updateDoneProgress(false); + if (onFinishListener != null) { + onFinishListener.run(ChannelCreateActivity.this, chatId); + } + }, () -> { + updateDoneProgress(false); + if (onFinishListener != null) { + onFinishListener.run(ChannelCreateActivity.this, chatId); + } + }); } } + } else if (onFinishListener != null) { + onFinishListener.run(ChannelCreateActivity.this, chatId); + } + if (onFinishListener == null) { + Bundle args = new Bundle(); + args.putInt("step", 2); + args.putLong("chatId", chatId); + args.putInt("chatType", ChatObject.CHAT_TYPE_CHANNEL); + presentFragment(new GroupCreateActivity(args), true); } - Bundle args = new Bundle(); - args.putInt("step", 2); - args.putLong("chatId", chatId); - args.putInt("chatType", ChatObject.CHAT_TYPE_CHANNEL); - presentFragment(new GroupCreateActivity(args), true); } } } @@ -546,7 +573,7 @@ public void invalidate(int l, int t, int r, int b) { @Override protected void onDraw(Canvas canvas) { if (avatarImage != null && avatarImage.getImageReceiver().hasNotThumb()) { - paint.setAlpha((int) (0x55 * avatarImage.getImageReceiver().getCurrentAlpha())); + paint.setAlpha((int) (0x55 * avatarImage.getImageReceiver().getCurrentAlpha() * avatarProgressView.getAlpha())); canvas.drawCircle(getMeasuredWidth() / 2.0f, getMeasuredHeight() / 2.0f, getMeasuredWidth() / 2.0f, paint); } } @@ -560,6 +587,7 @@ protected void onDraw(Canvas canvas) { inputPhoto = null; inputVideo = null; inputVideoPath = null; + inputEmojiMarkup = null; videoTimestamp = 0; showAvatarProgress(false, true); avatarImage.setImage(null, null, avatarDrawable, null); @@ -600,8 +628,14 @@ public void invalidate() { avatarEditor.setPadding(AndroidUtilities.dp(0), 0, 0, AndroidUtilities.dp(1)); frameLayout.addView(avatarEditor, LayoutHelper.createFrame(64, 64, Gravity.TOP | (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT), LocaleController.isRTL ? 0 : 15, 12, LocaleController.isRTL ? 15 : 0, 12)); - avatarProgressView = new RadialProgressView(context); - avatarProgressView.setSize(AndroidUtilities.dp(30)); + avatarProgressView = new RadialProgressView(context) { + @Override + public void setAlpha(float alpha) { + super.setAlpha(alpha); + avatarOverlay.invalidate(); + } + }; + avatarProgressView.setSize(AndroidUtilities.dp(30)); avatarProgressView.setProgressColor(0xffffffff); avatarProgressView.setNoProgress(false); frameLayout.addView(avatarProgressView, LayoutHelper.createFrame(64, 64, Gravity.TOP | (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT), LocaleController.isRTL ? 0 : 16, 12, LocaleController.isRTL ? 16 : 0, 12)); @@ -684,14 +718,16 @@ public void afterTextChanged(Editable editable) { linearLayout.setOrientation(LinearLayout.VERTICAL); scrollView.addView(linearLayout, new ScrollView.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT)); - actionBar.setTitle(LocaleController.getString("ChannelSettingsTitle", R.string.ChannelSettingsTitle)); + TLRPC.Chat chat = getMessagesController().getChat(chatId); + isGroup = chat != null && (!ChatObject.isChannel(chat) || ChatObject.isMegagroup(chat)); + actionBar.setTitle(isGroup ? LocaleController.getString("GroupSettingsTitle", R.string.GroupSettingsTitle) : LocaleController.getString("ChannelSettingsTitle", R.string.ChannelSettingsTitle)); fragmentView.setTag(Theme.key_windowBackgroundGray); fragmentView.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundGray)); headerCell2 = new HeaderCell(context, 23); headerCell2.setHeight(46); headerCell2.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); - headerCell2.setText(LocaleController.getString("ChannelTypeHeader", R.string.ChannelTypeHeader)); + headerCell2.setText(isGroup ? LocaleController.getString("GroupTypeHeader", R.string.GroupTypeHeader) : LocaleController.getString("ChannelTypeHeader", R.string.ChannelTypeHeader)); linearLayout.addView(headerCell2); linearLayout2 = new LinearLayout(context); @@ -701,8 +737,14 @@ public void afterTextChanged(Editable editable) { radioButtonCell1 = new RadioButtonCell(context); radioButtonCell1.setBackgroundDrawable(Theme.getSelectorDrawable(false)); - radioButtonCell1.setTextAndValueAndCheck(LocaleController.getString("ChannelPublic", R.string.ChannelPublic), LocaleController.getString("ChannelPublicInfo", R.string.ChannelPublicInfo), false, !isPrivate); - linearLayout2.addView(radioButtonCell1, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT)); + if (forcePublic != null && !forcePublic) { + isPrivate = true; + } + if (isGroup) { + radioButtonCell1.setTextAndValueAndCheck(LocaleController.getString("MegaPublic", R.string.MegaPublic), LocaleController.getString("MegaPublicInfo", R.string.MegaPublicInfo), false, !isPrivate); + } else { + radioButtonCell1.setTextAndValueAndCheck(LocaleController.getString("ChannelPublic", R.string.ChannelPublic), LocaleController.getString("ChannelPublicInfo", R.string.ChannelPublicInfo), false, !isPrivate); + } radioButtonCell1.setOnClickListener(v -> { if (!canCreatePublic) { showPremiumIncreaseLimitDialog(); @@ -714,11 +756,20 @@ public void afterTextChanged(Editable editable) { isPrivate = false; updatePrivatePublic(); }); + if (forcePublic == null || forcePublic) { + linearLayout2.addView(radioButtonCell1, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT)); + } radioButtonCell2 = new RadioButtonCell(context); radioButtonCell2.setBackgroundDrawable(Theme.getSelectorDrawable(false)); - radioButtonCell2.setTextAndValueAndCheck(LocaleController.getString("ChannelPrivate", R.string.ChannelPrivate), LocaleController.getString("ChannelPrivateInfo", R.string.ChannelPrivateInfo), false, isPrivate); - linearLayout2.addView(radioButtonCell2, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT)); + if (forcePublic != null && forcePublic) { + isPrivate = false; + } + if (isGroup) { + radioButtonCell2.setTextAndValueAndCheck(LocaleController.getString("MegaPrivate", R.string.MegaPrivate), LocaleController.getString("MegaPrivateInfo", R.string.MegaPrivateInfo), false, isPrivate); + } else { + radioButtonCell2.setTextAndValueAndCheck(LocaleController.getString("ChannelPrivate", R.string.ChannelPrivate), LocaleController.getString("ChannelPrivateInfo", R.string.ChannelPrivateInfo), false, isPrivate); + } radioButtonCell2.setOnClickListener(v -> { if (isPrivate) { return; @@ -726,6 +777,9 @@ public void afterTextChanged(Editable editable) { isPrivate = true; updatePrivatePublic(); }); + if (forcePublic == null || !forcePublic) { + linearLayout2.addView(radioButtonCell2, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT)); + } sectionCell = new ShadowSectionCell(context); linearLayout.addView(sectionCell, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT)); @@ -924,8 +978,13 @@ private void updatePrivatePublic() { typeInfoCell.setBackgroundDrawable(Theme.getThemedDrawable(typeInfoCell.getContext(), R.drawable.greydivider_bottom, Theme.key_windowBackgroundGrayShadow)); linkContainer.setVisibility(View.VISIBLE); loadingAdminedCell.setVisibility(View.GONE); - typeInfoCell.setText(isPrivate ? LocaleController.getString("ChannelPrivateLinkHelp", R.string.ChannelPrivateLinkHelp) : LocaleController.getString("ChannelUsernameHelp", R.string.ChannelUsernameHelp)); - headerCell.setText(isPrivate ? LocaleController.getString("ChannelInviteLinkTitle", R.string.ChannelInviteLinkTitle) : LocaleController.getString("ChannelLinkTitle", R.string.ChannelLinkTitle)); + if (isGroup) { + typeInfoCell.setText(isPrivate ? LocaleController.getString("MegaPrivateLinkHelp", R.string.MegaPrivateLinkHelp) : LocaleController.getString("MegaUsernameHelp", R.string.MegaUsernameHelp)); + headerCell.setText(isPrivate ? LocaleController.getString("ChannelInviteLinkTitle", R.string.ChannelInviteLinkTitle) : LocaleController.getString("ChannelLinkTitle", R.string.ChannelLinkTitle)); + } else { + typeInfoCell.setText(isPrivate ? LocaleController.getString("ChannelPrivateLinkHelp", R.string.ChannelPrivateLinkHelp) : LocaleController.getString("ChannelUsernameHelp", R.string.ChannelUsernameHelp)); + headerCell.setText(isPrivate ? LocaleController.getString("ChannelInviteLinkTitle", R.string.ChannelInviteLinkTitle) : LocaleController.getString("ChannelLinkTitle", R.string.ChannelLinkTitle)); + } publicContainer.setVisibility(isPrivate ? View.GONE : View.VISIBLE); privateContainer.setVisibility(isPrivate ? View.VISIBLE : View.GONE); linkContainer.setPadding(0, 0, 0, isPrivate ? 0 : AndroidUtilities.dp(7)); @@ -955,11 +1014,12 @@ public void didStartUpload(boolean isVideo) { } @Override - public void didUploadPhoto(final TLRPC.InputFile photo, final TLRPC.InputFile video, double videoStartTimestamp, String videoPath, final TLRPC.PhotoSize bigSize, final TLRPC.PhotoSize smallSize, boolean isVideo) { + public void didUploadPhoto(final TLRPC.InputFile photo, final TLRPC.InputFile video, double videoStartTimestamp, String videoPath, final TLRPC.PhotoSize bigSize, final TLRPC.PhotoSize smallSize, boolean isVideo, TLRPC.VideoSize emojiMarkup) { AndroidUtilities.runOnUIThread(() -> { if (photo != null || video != null) { inputPhoto = photo; inputVideo = video; + inputEmojiMarkup = emojiMarkup; inputVideoPath = videoPath; videoTimestamp = videoStartTimestamp; if (createAfterUpload) { @@ -976,6 +1036,7 @@ public void didUploadPhoto(final TLRPC.InputFile photo, final TLRPC.InputFile vi doneButton.performClick(); } showAvatarProgress(false, true); + avatarEditor.setImageDrawable(null); } else { avatar = smallSize.location; avatarBig = bigSize.location; @@ -1125,10 +1186,15 @@ public void didReceivedNotification(int id, int account, Object... args) { bundle.putInt("step", 1); bundle.putLong("chat_id", chat_id); bundle.putBoolean("canCreatePublic", canCreatePublic); - if (inputPhoto != null || inputVideo != null) { - MessagesController.getInstance(currentAccount).changeChatAvatar(chat_id, null, inputPhoto, inputVideo, videoTimestamp, inputVideoPath, avatar, avatarBig, null); + if (forcePublic != null) { + bundle.putBoolean("forcePublic", forcePublic); + } + if (inputPhoto != null || inputVideo != null || inputEmojiMarkup != null) { + MessagesController.getInstance(currentAccount).changeChatAvatar(chat_id, null, inputPhoto, inputVideo, inputEmojiMarkup, videoTimestamp, inputVideoPath, avatar, avatarBig, null); } - presentFragment(new ChannelCreateActivity(bundle), true); + ChannelCreateActivity activity = new ChannelCreateActivity(bundle); + activity.setOnFinishListener(onFinishListener); + presentFragment(activity, true); } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ChatActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ChatActivity.java index a561a2dfa9..d74450594d 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ChatActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ChatActivity.java @@ -69,6 +69,7 @@ import android.text.style.ForegroundColorSpan; import android.text.style.ImageSpan; import android.text.style.URLSpan; +import android.util.DisplayMetrics; import android.util.Pair; import android.util.Property; import android.util.SparseArray; @@ -101,6 +102,7 @@ import android.widget.Toast; import androidx.annotation.NonNull; +import androidx.annotation.Nullable; import androidx.collection.LongSparseArray; import androidx.core.content.ContextCompat; import androidx.core.content.FileProvider; @@ -129,17 +131,22 @@ import org.telegram.messenger.ApplicationLoader; import org.telegram.messenger.BotWebViewVibrationEffect; import org.telegram.messenger.BuildVars; +import org.telegram.messenger.ChatMessagesMetadataController; import org.telegram.messenger.ChatObject; import org.telegram.messenger.ChatThemeController; import org.telegram.messenger.ContactsController; import org.telegram.messenger.DialogObject; +import org.telegram.messenger.DownloadController; import org.telegram.messenger.Emoji; import org.telegram.messenger.EmojiData; import org.telegram.messenger.FileLoader; import org.telegram.messenger.FileLog; +import org.telegram.messenger.FlagSecureReason; import org.telegram.messenger.ForwardingMessagesParams; import org.telegram.messenger.ImageLocation; import org.telegram.messenger.ImageReceiver; +//import org.telegram.messenger.LanguageDetector; +import org.telegram.messenger.LiteMode; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MediaController; import org.telegram.messenger.MediaDataController; @@ -152,6 +159,7 @@ import org.telegram.messenger.SecretChatHelper; import org.telegram.messenger.SendMessagesHelper; import org.telegram.messenger.SharedConfig; +import org.telegram.messenger.TranslateController; import org.telegram.messenger.UserConfig; import org.telegram.messenger.UserObject; import org.telegram.messenger.Utilities; @@ -207,6 +215,7 @@ import org.telegram.ui.Components.BluredView; import org.telegram.ui.Components.BlurredFrameLayout; import org.telegram.ui.Components.BotCommandsMenuView; +import org.telegram.ui.Components.BotWebViewSheet; import org.telegram.ui.Components.Bulletin; import org.telegram.ui.Components.BulletinFactory; import org.telegram.ui.Components.ChatActivityEnterTopView; @@ -260,12 +269,15 @@ import org.telegram.ui.Components.PipRoundVideoView; import org.telegram.ui.Components.PollVotesAlert; import org.telegram.ui.Components.PopupSwipeBackLayout; +import org.telegram.ui.Components.Premium.GiftPremiumBottomSheet; import org.telegram.ui.Components.Premium.PremiumFeatureBottomSheet; +import org.telegram.ui.Components.Premium.PremiumPreviewBottomSheet; import org.telegram.ui.Components.RLottieDrawable; import org.telegram.ui.Components.RadialProgressView; import org.telegram.ui.Components.ReactedHeaderView; import org.telegram.ui.Components.ReactedUsersListView; import org.telegram.ui.Components.ReactionTabHolderView; +import org.telegram.ui.Components.Reactions.ChatSelectionReactionMenuOverlay; import org.telegram.ui.Components.Reactions.ReactionsEffectOverlay; import org.telegram.ui.Components.Reactions.ReactionsLayoutInBubble; import org.telegram.ui.Components.ReactionsContainerLayout; @@ -282,7 +294,8 @@ import org.telegram.ui.Components.TextStyleSpan; import org.telegram.ui.Components.ThemeEditorView; import org.telegram.ui.Components.TranscribeButton; -import org.telegram.ui.Components.TranslateAlert; +import org.telegram.ui.Components.TranslateAlert2; +import org.telegram.ui.Components.TranslateButton; import org.telegram.ui.Components.TrendingStickersAlert; import org.telegram.ui.Components.TypefaceSpan; import org.telegram.ui.Components.URLSpanBotCommand; @@ -294,9 +307,9 @@ import org.telegram.ui.Components.UnreadCounterTextView; import org.telegram.ui.Components.ViewHelper; import org.telegram.ui.Components.spoilers.SpoilerEffect; +import org.telegram.ui.Components.voip.CellFlickerDrawable; import org.telegram.ui.Components.voip.VoIPHelper; import org.telegram.ui.Delegates.ChatActivityMemberRequestsDelegate; -import org.telegram.ui.LNavigation.LNavigation; import java.io.BufferedWriter; import java.io.ByteArrayOutputStream; @@ -422,44 +435,51 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not private FrameLayout bottomOverlay; protected ChatActivityEnterView chatActivityEnterView; private ChatActivityEnterTopView chatActivityEnterTopView; + private boolean shrinkActionBarItems; private int chatActivityEnterViewAnimateFromTop; private boolean chatActivityEnterViewAnimateBeforeSending; - private View timeItem2; - private ActionBarMenuItem attachItem; + private ActionBarMenuItem.Item timeItem2; + private ActionBarMenu.LazyItem attachItem; private ActionBarMenuItem headerItem; - private ActionBarMenuItem editTextItem; + private ActionBarMenu.LazyItem editTextItem; private ActionBarMenuItem searchItem; + private ActionBarMenuItem.Item translateItem; private ActionBarMenuItem searchIconItem; private ActionBarMenuItem viewInChatItem; - private ActionBarMenuItem audioCallIconItem; + private ActionBarMenu.LazyItem audioCallIconItem; private boolean searchItemVisible; private RadialProgressView progressBar; - private ActionBarMenuSubItem addContactItem; - private ActionBarMenuSubItem shareKeyItem; - private ActionBarMenuSubItem clearHistoryItem; - private ActionBarMenuSubItem toTheBeginning; - private ActionBarMenuSubItem toTheMessage; - private ActionBarMenuSubItem hideTitleItem; - private ActionBarMenuSubItem viewAsTopics; - private ActionBarMenuSubItem closeTopicItem; - private ActionBarMenuSubItem openForumItem; + + private ActionBarMenuItem.Item addContactItem; + private ActionBarMenuItem.Item shareKeyItem; + private ActionBarMenuItem.Item clearHistoryItem; + private ActionBarMenuItem.Item viewAsTopics; + private ActionBarMenuItem.Item closeTopicItem; + private ActionBarMenuItem.Item openForumItem; + private ActionBarMenuItem.Item toTheBeginning; + private ActionBarMenuItem.Item toTheMessage; + private ActionBarMenuItem.Item hideTitleItem; private ClippingImageView animatingImageView; public RecyclerListView chatListView; private ChatListItemAnimator chatListItemAnimator; private GridLayoutManagerFixed chatLayoutManager; public ChatActivityAdapter chatAdapter; private UnreadCounterTextView bottomOverlayChatText; + private TextView bottomOverlayStartButton; private ImageView bottomOverlayImage; private RadialProgressView bottomOverlayProgress; private AnimatorSet bottomOverlayAnimation; private boolean bottomOverlayChatWaitsReply; private BlurredFrameLayout bottomOverlayChat; private BlurredFrameLayout bottomMessagesActionContainer; + @Nullable private TextView forwardButton; + @Nullable private TextView replyButton; + @Nullable private FrameLayout emptyViewContainer; private ChatGreetingsView greetingsViewContainer; - public SizeNotifierFrameLayout contentView; + public ChatActivityFragmentView contentView; private ChatBigEmptyView bigEmptyView; private ArrayList actionModeViews = new ArrayList<>(); private ChatAvatarContainer avatarContainer; @@ -467,8 +487,8 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not private NumberTextView selectedMessagesCountTextView; private RecyclerListView.OnItemClickListener mentionsOnItemClickListener; private SuggestEmojiView suggestEmojiPanel; - private ActionBarMenuSubItem muteItem; - private View muteItemGap; + private ActionBarMenuItem.Item muteItem; + private ActionBarMenuItem.Item muteItemGap; private ChatNotificationsPopupWrapper chatNotificationsPopupWrapper; private float pagedownButtonEnterProgress; private float mentionsButtonEnterProgress; @@ -501,14 +521,23 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not public MentionsContainerView mentionContainer; private AnimatorSet mentionListAnimation; public ChatAttachAlert chatAttachAlert; + @Nullable private BlurredFrameLayout topChatPanelView; private AnimatorSet reportSpamViewAnimator; + @Nullable private TextView addToContactsButton; private boolean addToContactsButtonArchive; + @Nullable private TextView reportSpamButton; + @Nullable private TextView restartTopicButton; - private View topViewSeparator1, topViewSeparator2; + @Nullable + private TranslateButton translateButton; + @Nullable + private View topViewSeparator1, topViewSeparator2, topViewSeparator3; + @Nullable private LinkSpanDrawable.LinksTextView emojiStatusSpamHint; + @Nullable private ImageView closeReportSpam; private TextView chatWithAdminTextView; private FragmentContextView fragmentContextView; @@ -551,11 +580,13 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not private RadialProgressView pinnedProgress; private ImageView pinnedListButton; private AnimatorSet pinnedListAnimator; + @Nullable private FrameLayout alertView; private Runnable hideAlertViewRunnable; private TextView alertNameTextView; private TextView alertTextView; private AnimatorSet alertViewAnimator; + private final int searchContainerHeight = 51; private BlurredFrameLayout searchContainer; private View searchAsListTogglerView; private ImageView searchCalendarButton; @@ -584,8 +615,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not private boolean showPinBulletin; private int pinBullerinTag; protected boolean openKeyboardOnAttachMenuClose; - private Runnable unregisterFlagSecurePasscode; - private Runnable unregisterFlagSecureNoforwards; + private FlagSecureReason flagSecure; private boolean isFullyVisible; private MessageObject hintMessageObject; @@ -632,8 +662,6 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not private int scrollToOffsetOnRecreate = 0; private ArrayList pollsToCheck = new ArrayList<>(10); - private ArrayList reactionsToCheck = new ArrayList<>(10); - private ArrayList extendedMediaToCheck = new ArrayList<>(10); private int editTextStart; private int editTextEnd; @@ -753,7 +781,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not private SparseArray repliesMessagesDict = new SparseArray<>(); private SparseArray> replyMessageOwners = new SparseArray<>(); private HashMap> messagesByDays = new HashMap<>(); - protected ArrayList messages = new ArrayList<>(); + public ArrayList messages = new ArrayList<>(); private SparseArray waitingForReplies = new SparseArray<>(); private LongSparseArray> polls = new LongSparseArray<>(); private LongSparseArray groupedMessagesMap = new LongSparseArray<>(); @@ -793,12 +821,14 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not private Paint skeletonOutlinePaint = new Paint(Paint.ANTI_ALIAS_FLAG); private Matrix skeletonOutlineMatrix = new Matrix(); private LinearGradient skeletonOutlineGradient; + private boolean chatListViewAttached; { skeletonOutlinePaint.setStyle(Paint.Style.STROKE); skeletonOutlinePaint.setStrokeWidth(AndroidUtilities.dp(1)); } + private String inlineQueryForInput; private boolean premiumInvoiceBot; private boolean showScrollToMessageError; private int startLoadFromMessageId; @@ -904,7 +934,8 @@ public void run() { private int postponedScrollToLastMessageQueryIndex; private int postponedScrollMessageId; private boolean postponedScrollIsCanceled; - private TextSelectionHelper.ChatListTextSelectionHelper textSelectionHelper; + private static ArrayList textSelectionHelpersCache; + private ChatActivityTextSelectionHelper textSelectionHelper; private ChatMessageCell slidingView; private boolean maybeStartTrackingSlidingView; private boolean startedTrackingSlidingView; @@ -974,11 +1005,13 @@ public void run() { } }; + private ChatSelectionReactionMenuOverlay selectionReactionsOverlay; + private boolean isPauseOnThemePreview; private ChatThemeBottomSheet chatThemeBottomSheet; public ThemeDelegate themeDelegate; private ChatActivityMemberRequestsDelegate pendingRequestsDelegate; - + private final ChatMessagesMetadataController chatMessagesMetadataController = new ChatMessagesMetadataController(this); private TLRPC.TL_channels_sendAsPeers sendAsPeersObj; private boolean switchFromTopics; @@ -1101,7 +1134,12 @@ public void deleteHistory(int dateSelectedStart, int dateSelectedEnd, boolean fo } chatAdapter.isFrozen = true; chatAdapter.notifyDataSetChanged(true); - getUndoView().showWithAction(dialog_id, UndoView.ACTION_CLEAR_DATES, () -> { + UndoView undoView = getUndoView(); + if (undoView == null) { + return; + } + + undoView.showWithAction(dialog_id, UndoView.ACTION_CLEAR_DATES, () -> { getMessagesController().deleteMessagesRange(dialog_id, ChatObject.isChannel(currentChat) ? dialog_id : 0, dateSelectedStart, dateSelectedEnd, forAll, () -> { chatAdapter.frozenMessages.clear(); chatAdapter.isFrozen = false; @@ -1203,6 +1241,14 @@ public Object get(Object object) { return items; } + public boolean allowSendPhotos() { + if (currentChat != null && !ChatObject.canSendPhoto(currentChat)) { + return false; + } else { + return true; + } + } + private interface ChatActivityDelegate { default void openReplyMessage(int mid) { @@ -1290,6 +1336,9 @@ public void sendButtonPressed(int index, VideoEditedInfo videoEditedInfo, boolea private final static int delete = 12; private final static int chat_enc_timer = 13; private final static int chat_menu_attach = 14; + private final static int chat_menu_search = -1; + private final static int chat_menu_options = -2; + private final static int chat_menu_edit_text_options = -3; private final static int clear_history = 15; private final static int delete_chat = 16; @@ -1334,6 +1383,8 @@ public void sendButtonPressed(int index, VideoEditedInfo videoEditedInfo, boolea private final static int open_forum = 61; private final static int combine_message = 62; + private final static int translate = 62; + private ActionBarMenuItem actionModeOtherItem; // NekoX private final static int id_chat_compose_panel = 1000; @@ -1348,7 +1399,7 @@ public boolean onItemClick(View view, int position, float x, float y) { return false; wasManualScroll = true; boolean result = true; - if (!actionBar.isActionModeShowed() && (reportType < 0 || (view instanceof ChatActionCell && (((ChatActionCell) view).getMessageObject().messageOwner.action instanceof TLRPC.TL_messageActionSetMessagesTTL) || ((ChatActionCell) view).getMessageObject().type == MessageObject.TYPE_SUGGEST_PHOTO))) { + if (!actionBar.isActionModeShowed() && (reportType < 0 || (view instanceof ChatActionCell && (((ChatActionCell) view).getMessageObject().messageOwner.action instanceof TLRPC.TL_messageActionSetMessagesTTL) || ((view instanceof ChatActionCell) && ((ChatActionCell) view).getMessageObject().type == MessageObject.TYPE_SUGGEST_PHOTO)))) { result = createMenu(view, false, true, x, y); } else { boolean outside = false; @@ -1646,3042 +1697,2063 @@ public void onDoubleTap(View view, int position, float x, float y) { } }; - private final ChatScrollCallback chatScrollHelperCallback = new ChatScrollCallback(); + private class ChatActivityEnterViewDelegate implements ChatActivityEnterView.ChatActivityEnterViewDelegate { - private final Runnable showScheduledOrNoSoundRunnable = () -> { - if (getParentActivity() == null || fragmentView == null || chatActivityEnterView == null) { - return; - } - View anchor = chatActivityEnterView.getSendButton(); - if (anchor == null || chatActivityEnterView.getEditField().getText().length() < 5) { - return; - } - SharedConfig.increaseScheduledOrNoSuoundHintShowed(); - if (scheduledOrNoSoundHint == null) { - scheduledOrNoSoundHint = new HintView(getParentActivity(), 4, themeDelegate); - scheduledOrNoSoundHint.setShowingDuration(5000); - scheduledOrNoSoundHint.setAlpha(0); - scheduledOrNoSoundHint.setVisibility(View.INVISIBLE); - scheduledOrNoSoundHint.setText(LocaleController.getString("ScheduledOrNoSoundHint", R.string.ScheduledOrNoSoundHint)); - contentView.addView(scheduledOrNoSoundHint, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.TOP, 10, 0, 10, 0)); - } - scheduledOrNoSoundHint.showForView(anchor, true); - }; + int lastSize; + boolean isEditTextItemVisibilitySuppressed; - public ChatActivity(Bundle args) { - super(args); - } + @Override + public int getContentViewHeight() { + return contentView.getHeight(); + } - @Override - public boolean onFragmentCreate() { - final long chatId = arguments.getLong("chat_id", 0); - final long userId = arguments.getLong("user_id", 0); - final int encId = arguments.getInt("enc_id", 0); - dialogFolderId = arguments.getInt("dialog_folder_id", 0); - dialogFilterId = arguments.getInt("dialog_filter_id", 0); - chatMode = arguments.getInt("chatMode", 0); - voiceChatHash = arguments.getString("voicechat", null); - livestream = !TextUtils.isEmpty(arguments.getString("livestream", null)); - attachMenuBotToOpen = arguments.getString("attach_bot", null); - attachMenuBotStartCommand = arguments.getString("attach_bot_start_command", null); - inlineReturn = arguments.getLong("inline_return", 0); - String inlineQuery = arguments.getString("inline_query"); - premiumInvoiceBot = arguments.getBoolean("premium_bot", false); - startLoadFromMessageId = arguments.getInt("message_id", 0); - startLoadFromDate = arguments.getInt("start_from_date", 0); - startFromVideoTimestamp = arguments.getInt("video_timestamp", -1); - threadUnreadMessagesCount = arguments.getInt("unread_count", 0); - if (startFromVideoTimestamp >= 0) { - startFromVideoMessageId = startLoadFromMessageId; + @Override + public int measureKeyboardHeight() { + return contentView.measureKeyboardHeight(); } - reportType = arguments.getInt("report", -1); - pulled = arguments.getBoolean("pulled", false); - boolean historyPreloaded = arguments.getBoolean("historyPreloaded", false); - if (highlightMessageId != 0 && highlightMessageId != Integer.MAX_VALUE) { - startLoadFromMessageId = highlightMessageId; + + @Override + public TLRPC.TL_channels_sendAsPeers getSendAsPeers() { + return sendAsPeersObj; } - int migrated_to = arguments.getInt("migrated_to", 0); - scrollToTopOnResume = arguments.getBoolean("scrollToTopOnResume", false); - needRemovePreviousSameChatActivity = arguments.getBoolean("need_remove_previous_same_chat_activity", true); - noForwardQuote = arguments.getBoolean("forward_noquote", false); - justCreatedChat = arguments.getBoolean("just_created_chat", false); - if (chatId != 0) { - currentChat = getMessagesController().getChat(chatId); - if (currentChat == null) { - final CountDownLatch countDownLatch = new CountDownLatch(1); - final MessagesStorage messagesStorage = getMessagesStorage(); - messagesStorage.getStorageQueue().postRunnable(() -> { - currentChat = messagesStorage.getChat(chatId); - countDownLatch.countDown(); - }); - try { - countDownLatch.await(); - } catch (Exception e) { - FileLog.e(e); - } - if (currentChat != null) { - getMessagesController().putChat(currentChat, true); - } else { - return false; + @Override + public void onMessageSend(CharSequence message, boolean notify, int scheduleDate) { + if (chatListItemAnimator != null) { + chatActivityEnterViewAnimateFromTop = chatActivityEnterView.getBackgroundTop(); + if (chatActivityEnterViewAnimateFromTop != 0) { + chatActivityEnterViewAnimateBeforeSending = true; } } - dialog_id = -chatId; - if (ChatObject.isChannel(currentChat)) { - getMessagesController().startShortPoll(currentChat, classGuid, false); + if (mentionContainer != null && mentionContainer.getAdapter() != null) { + mentionContainer.getAdapter().addHashtagsFromMessage(message); } - } else if (userId != 0) { - currentUser = getMessagesController().getUser(userId); - if (currentUser == null) { - final MessagesStorage messagesStorage = getMessagesStorage(); - final CountDownLatch countDownLatch = new CountDownLatch(1); - messagesStorage.getStorageQueue().postRunnable(() -> { - currentUser = messagesStorage.getUser(userId); - countDownLatch.countDown(); - }); - try { - countDownLatch.await(); - } catch (Exception e) { - FileLog.e(e); + if (scheduleDate != 0) { + if (scheduledMessagesCount == -1) { + scheduledMessagesCount = 0; } - if (currentUser != null) { - getMessagesController().putUser(currentUser, true); - } else { - return false; + if (message != null) { + scheduledMessagesCount++; + } + if (forwardingMessages != null && !forwardingMessages.messages.isEmpty()) { + scheduledMessagesCount += forwardingMessages.messages.size(); + } + updateScheduledInterface(false); + } + if (!TextUtils.isEmpty(message) && forwardingMessages != null && !forwardingMessages.messages.isEmpty()) { + ArrayList messagesToForward = new ArrayList<>(); + forwardingMessages.getSelectedMessages(messagesToForward); + boolean showReplyHint = messagesToForward.size() > 0; + TLRPC.Peer toPeer = getMessagesController().getPeer(dialog_id); + for (int i = 0; i < messagesToForward.size(); ++i) { + MessageObject msg = messagesToForward.get(i); + if (msg != null && msg.messageOwner != null && !MessageObject.peersEqual(msg.messageOwner.peer_id, toPeer)) { + showReplyHint = false; + break; + } } - } - dialog_id = userId; - botUser = arguments.getString("botUser"); - if (inlineQuery != null) { - getMessagesController().sendBotStart(currentUser, inlineQuery); - } else if (premiumInvoiceBot && !TextUtils.isEmpty(botUser)) { - getMessagesController().sendBotStart(currentUser, botUser); - botUser = null; - premiumInvoiceBot = false; - } - } else if (encId != 0) { - currentEncryptedChat = getMessagesController().getEncryptedChat(encId); - final MessagesStorage messagesStorage = getMessagesStorage(); - if (currentEncryptedChat == null) { - final CountDownLatch countDownLatch = new CountDownLatch(1); - messagesStorage.getStorageQueue().postRunnable(() -> { - currentEncryptedChat = messagesStorage.getEncryptedChat(encId); - countDownLatch.countDown(); - }); - try { - countDownLatch.await(); - } catch (Exception e) { - FileLog.e(e); - } - if (currentEncryptedChat != null) { - getMessagesController().putEncryptedChat(currentEncryptedChat, true); - } else { - return false; + if (showReplyHint) { + checkTopUndoView(); + if (topUndoView != null) { + topUndoView.showWithAction(0, UndoView.ACTION_HINT_SWIPE_TO_REPLY, null, null); + } } } - currentUser = getMessagesController().getUser(currentEncryptedChat.user_id); - if (currentUser == null) { - final CountDownLatch countDownLatch = new CountDownLatch(1); - messagesStorage.getStorageQueue().postRunnable(() -> { - currentUser = messagesStorage.getUser(currentEncryptedChat.user_id); - countDownLatch.countDown(); - }); - try { - countDownLatch.await(); - } catch (Exception e) { - FileLog.e(e); + if (ChatObject.isForum(currentChat) && !isTopic && replyingMessageObject != null) { + int topicId = replyingMessageObject.replyToForumTopic != null ? replyingMessageObject.replyToForumTopic.id : MessageObject.getTopicId(replyingMessageObject.messageOwner, true); + if (topicId != 0) { + getMediaDataController().cleanDraft(dialog_id, topicId, false); } - if (currentUser != null) { - getMessagesController().putUser(currentUser, true); - } else { - return false; + } + + hideFieldPanel(notify, scheduleDate, true); + if (chatActivityEnterView != null && chatActivityEnterView.getEmojiView() != null) { + chatActivityEnterView.getEmojiView().onMessageSend(); + } + + if (!getMessagesController().premiumLocked && !getMessagesController().didPressTranscribeButtonEnough() && !getUserConfig().isPremium() && !TextUtils.isEmpty(message) && messages != null) { + for (int i = 1; i < Math.min(5, messages.size()); ++i) { + MessageObject msg = messages.get(i); + if (msg != null && !msg.isOutOwner() && (msg.isVoice() || msg.isRoundVideo()) && msg.isContentUnread()) { + TranscribeButton.showOffTranscribe(msg); + } } } - dialog_id = DialogObject.makeEncryptedDialogId(encId); - maxMessageId[0] = maxMessageId[1] = Integer.MIN_VALUE; - minMessageId[0] = minMessageId[1] = Integer.MAX_VALUE; - } else { - return false; } - dialog_id_Long = dialog_id; - - transitionAnimationGlobalIndex = NotificationCenter.getGlobalInstance().setAnimationInProgress(transitionAnimationGlobalIndex, new int[0]); + // NekoX + @Override + public void beforeMessageSend(CharSequence message, boolean notify, int scheduleDate) { + ChatActivity.this.beforeMessageSend(notify, scheduleDate, true); + } - themeDelegate = new ThemeDelegate(); - if (themeDelegate.isThemeChangeAvailable()) { - NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.needSetDayNightTheme); + @Override + public int getDisableLinkPreviewStatus() { + return disableLinkPreview ? 2 : 1; } - if (currentUser != null && Build.VERSION.SDK_INT < 23) { - MediaController.getInstance().startMediaObserver(); + @Override + public void toggleDisableLinkPreview() { + disableLinkPreview = !disableLinkPreview; } - getNotificationCenter().addPostponeNotificationsCallback(postponeNotificationsWhileLoadingCallback); + @Override + public void onEditTextScroll() { + if (suggestEmojiPanel != null) { + suggestEmojiPanel.forceClose(); + } + } - if (chatMode != MODE_SCHEDULED) { - if (threadMessageId == 0) { - getNotificationCenter().addObserver(this, NotificationCenter.screenshotTook); - getNotificationCenter().addObserver(this, NotificationCenter.encryptedChatUpdated); - getNotificationCenter().addObserver(this, NotificationCenter.messagesReadEncrypted); - getNotificationCenter().addObserver(this, NotificationCenter.botKeyboardDidLoad); - getNotificationCenter().addObserver(this, NotificationCenter.updateMentionsCount); - getNotificationCenter().addObserver(this, NotificationCenter.newDraftReceived); - getNotificationCenter().addObserver(this, NotificationCenter.chatOnlineCountDidLoad); - getNotificationCenter().addObserver(this, NotificationCenter.peerSettingsDidLoad); - getNotificationCenter().addObserver(this, NotificationCenter.didLoadPinnedMessages); - getNotificationCenter().addObserver(this, NotificationCenter.commentsRead); - getNotificationCenter().addObserver(this, NotificationCenter.changeRepliesCounter); - getNotificationCenter().addObserver(this, NotificationCenter.messagesRead); - getNotificationCenter().addObserver(this, NotificationCenter.didLoadChatInviter); - getNotificationCenter().addObserver(this, NotificationCenter.groupCallUpdated); - } else { - getNotificationCenter().addObserver(this, NotificationCenter.threadMessagesRead); - if (isTopic) { - getNotificationCenter().addObserver(this, NotificationCenter.updateMentionsCount); - getNotificationCenter().addObserver(this, NotificationCenter.didLoadPinnedMessages); - } + @Override + public void onContextMenuOpen() { + if (suggestEmojiPanel != null) { + suggestEmojiPanel.forceClose(); } - getNotificationCenter().addObserver(this, NotificationCenter.removeAllMessagesFromDialog); - getNotificationCenter().addObserver(this, NotificationCenter.messagesReadContent); - getNotificationCenter().addObserver(this, NotificationCenter.chatSearchResultsAvailable); - getNotificationCenter().addObserver(this, NotificationCenter.chatSearchResultsLoading); - getNotificationCenter().addObserver(this, NotificationCenter.didUpdateMessagesViews); - getNotificationCenter().addObserver(this, NotificationCenter.didUpdatePollResults); - if (currentEncryptedChat != null) { - getNotificationCenter().addObserver(this, NotificationCenter.didVerifyMessagesStickers); + } + + @Override + public void onContextMenuClose() { + if (suggestEmojiPanel != null) { + suggestEmojiPanel.fireUpdate(); } } - getNotificationCenter().addObserver(this, NotificationCenter.messagesDidLoad); - NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.emojiLoaded); - NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.invalidateMotionBackground); - getNotificationCenter().addObserver(this, NotificationCenter.didUpdateConnectionState); - getNotificationCenter().addObserver(this, NotificationCenter.updateInterfaces); - getNotificationCenter().addObserver(this, NotificationCenter.updateDefaultSendAsPeer); - if (chatMode != MODE_PINNED) { - getNotificationCenter().addObserver(this, NotificationCenter.didReceiveNewMessages); + + @Override + public void onSwitchRecordMode(boolean video) { + showVoiceHint(false, video); } - if (chatMode == 0) { - getNotificationCenter().addObserver(this, NotificationCenter.didLoadSponsoredMessages); + + @Override + public void onPreAudioVideoRecord() { + showVoiceHint(true, false); } - getNotificationCenter().addObserver(this, NotificationCenter.didLoadSendAsPeers); - getNotificationCenter().addObserver(this, NotificationCenter.closeChats); - getNotificationCenter().addObserver(this, NotificationCenter.messagesDeleted); - getNotificationCenter().addObserver(this, NotificationCenter.historyCleared); - getNotificationCenter().addObserver(this, NotificationCenter.messageReceivedByServer); - getNotificationCenter().addObserver(this, NotificationCenter.messageReceivedByAck); - getNotificationCenter().addObserver(this, NotificationCenter.messageSendError); - getNotificationCenter().addObserver(this, NotificationCenter.chatInfoDidLoad); - getNotificationCenter().addObserver(this, NotificationCenter.contactsDidLoad); - getNotificationCenter().addObserver(this, NotificationCenter.messagePlayingProgressDidChanged); - getNotificationCenter().addObserver(this, NotificationCenter.messagePlayingDidReset); - getNotificationCenter().addObserver(this, NotificationCenter.messagePlayingGoingToStop); - getNotificationCenter().addObserver(this, NotificationCenter.messagePlayingPlayStateChanged); - getNotificationCenter().addObserver(this, NotificationCenter.blockedUsersDidLoad); - getNotificationCenter().addObserver(this, NotificationCenter.fileNewChunkAvailable); - getNotificationCenter().addObserver(this, NotificationCenter.didCreatedNewDeleteTask); - getNotificationCenter().addObserver(this, NotificationCenter.messagePlayingDidStart); - getNotificationCenter().addObserver(this, NotificationCenter.updateMessageMedia); - getNotificationCenter().addObserver(this, NotificationCenter.voiceTranscriptionUpdate); - getNotificationCenter().addObserver(this, NotificationCenter.animatedEmojiDocumentLoaded); - getNotificationCenter().addObserver(this, NotificationCenter.replaceMessagesObjects); - getNotificationCenter().addObserver(this, NotificationCenter.notificationsSettingsUpdated); - getNotificationCenter().addObserver(this, NotificationCenter.replyMessagesDidLoad); - getNotificationCenter().addObserver(this, NotificationCenter.didReceivedWebpages); - getNotificationCenter().addObserver(this, NotificationCenter.didReceivedWebpagesInUpdates); - getNotificationCenter().addObserver(this, NotificationCenter.botInfoDidLoad); - getNotificationCenter().addObserver(this, NotificationCenter.chatInfoCantLoad); - getNotificationCenter().addObserver(this, NotificationCenter.userInfoDidLoad); - getNotificationCenter().addObserver(this, NotificationCenter.pinnedInfoDidLoad); - getNotificationCenter().addObserver(this, NotificationCenter.topicsDidLoaded); - NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.didSetNewWallpapper); - NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.didApplyNewTheme); - NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.goingToPreviewTheme); - getNotificationCenter().addObserver(this, NotificationCenter.channelRightsUpdated); - getNotificationCenter().addObserver(this, NotificationCenter.audioRecordTooShort); - getNotificationCenter().addObserver(this, NotificationCenter.didUpdateReactions); - getNotificationCenter().addObserver(this, NotificationCenter.didUpdateExtendedMedia); - getNotificationCenter().addObserver(this, NotificationCenter.videoLoadingStateChanged); - getNotificationCenter().addObserver(this, NotificationCenter.scheduledMessagesUpdated); - getNotificationCenter().addObserver(this, NotificationCenter.diceStickersDidLoad); - getNotificationCenter().addObserver(this, NotificationCenter.dialogDeleted); - getNotificationCenter().addObserver(this, NotificationCenter.chatAvailableReactionsUpdated); - getNotificationCenter().addObserver(this, NotificationCenter.dialogsUnreadReactionsCounterChanged); - getNotificationCenter().addObserver(this, NotificationCenter.groupStickersDidLoad); - super.onFragmentCreate(); - - if (chatMode == MODE_PINNED) { - ArrayList messageObjects = new ArrayList<>(); - for (int a = 0, N = pinnedMessageIds.size(); a < N; a++) { - Integer id = pinnedMessageIds.get(a); - MessageObject object = pinnedMessageObjects.get(id); - if (object != null) { - MessageObject o = new MessageObject(object.currentAccount, object.messageOwner, true, false); - o.replyMessageObject = object.replyMessageObject; - o.mediaExists = object.mediaExists; - o.attachPathExists = object.attachPathExists; - messageObjects.add(o); + @Override + public void onUpdateSlowModeButton(View button, boolean show, CharSequence time) { + showSlowModeHint(button, show, time); + if (headerItem != null && headerItem.getVisibility() != View.VISIBLE) { + headerItem.setVisibility(View.VISIBLE); + if (attachItem != null) { + attachItem.setVisibility(View.GONE); } } - int loadIndex = lastLoadIndex++; - waitingForLoad.add(loadIndex); - getNotificationCenter().postNotificationName(NotificationCenter.messagesDidLoad, dialog_id, messageObjects.size(), messageObjects, false, 0, last_message_id, 0, 0, 2, true, classGuid, loadIndex, pinnedMessageIds.get(0), 0, MODE_PINNED); - } else if (!forceHistoryEmpty) { - loading = true; } - if (isThreadChat() && !isTopic) { - if (highlightMessageId == startLoadFromMessageId) { - needSelectFromMessageId = true; + + @Override + public void onTextSelectionChanged(int start, int end) { + if (editTextItem == null) { + return; } - } else { - getMessagesController().setLastCreatedDialogId(dialog_id, chatMode == MODE_SCHEDULED, true); - if (chatMode == 0) { - if (currentEncryptedChat == null) { - getMediaDataController().loadBotKeyboard(dialog_id); + ActionBarMenu menu = actionBar.createMenu(); + if (suggestEmojiPanel != null) { + suggestEmojiPanel.onTextSelectionChanged(start, end); + } + if (end - start > 0) { + if (editTextItem.getTag() == null) { + editTextItem.setTag(1); + + if (editTextItem.getVisibility() != View.VISIBLE) { + if (chatMode == 0 && (threadMessageId == 0 || isTopic) && !UserObject.isReplyUser(currentUser) && reportType < 0) { + editTextItem.setVisibility(View.VISIBLE); + checkEditTextItemMenu(); + headerItem.setVisibility(View.GONE); + attachItem.setVisibility(View.GONE); + } else { + ValueAnimator valueAnimator = ValueAnimator.ofFloat(AndroidUtilities.dp(48), 0); + valueAnimator.setDuration(220); + valueAnimator.setInterpolator(CubicBezierInterpolator.DEFAULT); + valueAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationStart(Animator animation) { + actionBar.setMenuOffsetSuppressed(true); + checkEditTextItemMenu(); + editTextItem.setVisibility(View.VISIBLE); + menu.translateXItems(AndroidUtilities.dp(48)); + } + + @Override + public void onAnimationEnd(Animator animation) { + actionBar.setMenuOffsetSuppressed(false); + } + }); + valueAnimator.addUpdateListener(animation -> menu.translateXItems((float) animation.getAnimatedValue())); + valueAnimator.start(); + } + } } - getMessagesController().loadPeerSettings(currentUser, currentChat); + editTextStart = start; + editTextEnd = end; + } else { + if (editTextItem.getTag() != null) { + editTextItem.setTag(null); + if (editTextItem.getVisibility() != View.GONE) { + if (chatMode == 0 && (threadMessageId == 0 || isTopic) && !UserObject.isReplyUser(currentUser) && reportType < 0) { + editTextItem.setVisibility(View.GONE); - if (startLoadFromMessageId == 0) { - SharedPreferences sharedPreferences = MessagesController.getNotificationsSettings(currentAccount); - int messageId = sharedPreferences.getInt("diditem" + NotificationsController.getSharedPrefKey(dialog_id, getTopicId()), 0); - if (messageId != 0) { - wasManualScroll = true; - loadingFromOldPosition = true; - startLoadFromMessageOffset = sharedPreferences.getInt("diditemo" + NotificationsController.getSharedPrefKey(dialog_id, getTopicId()), 0); - startLoadFromMessageId = messageId; + if (chatActivityEnterView.hasText() && TextUtils.isEmpty(chatActivityEnterView.getSlowModeTimer())) { + headerItem.setVisibility(View.GONE); + attachItem.setVisibility(View.VISIBLE); + } else { + headerItem.setVisibility(View.VISIBLE); + attachItem.setVisibility(View.GONE); + } + } else { + ValueAnimator valueAnimator = ValueAnimator.ofFloat(0, AndroidUtilities.dp(48)); + valueAnimator.setDuration(220); + valueAnimator.setInterpolator(CubicBezierInterpolator.DEFAULT); + valueAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationStart(Animator animation) { + actionBar.setMenuOffsetSuppressed(true); + isEditTextItemVisibilitySuppressed = true; + } + + @Override + public void onAnimationEnd(Animator animation) { + editTextItem.setVisibility(View.GONE); + menu.translateXItems(0); + + actionBar.setMenuOffsetSuppressed(false); + isEditTextItemVisibilitySuppressed = false; + } + }); + valueAnimator.addUpdateListener(animation -> menu.translateXItems((float) animation.getAnimatedValue())); + valueAnimator.start(); + } } - } else { - showScrollToMessageError = true; - needSelectFromMessageId = true; } } } - boolean loadInfo = false; - if (currentChat != null) { - chatInfo = getMessagesController().getChatFull(currentChat.id); - groupCall = getMessagesController().getGroupCall(currentChat.id, true); - if (ChatObject.isChannel(currentChat) && !getMessagesController().isChannelAdminsLoaded(currentChat.id)) { - getMessagesController().loadChannelAdmins(currentChat.id, true); - } - fillInviterId(false); - if (chatMode != MODE_PINNED) { - getMessagesStorage().loadChatInfo(currentChat.id, ChatObject.isChannel(currentChat), null, true, false, startLoadFromMessageId); - } - if (chatMode == 0 && chatInfo != null && ChatObject.isChannel(currentChat) && chatInfo.migrated_from_chat_id != 0 && !isThreadChat()) { - mergeDialogId = -chatInfo.migrated_from_chat_id; - maxMessageId[1] = chatInfo.migrated_from_max_id; + @Override + public void onTextChanged(final CharSequence text, boolean bigChange) { + MediaController.getInstance().setInputFieldHasText(!TextUtils.isEmpty(text) || chatActivityEnterView.isEditingMessage()); + if (mentionContainer != null && mentionContainer.getAdapter() != null) { + mentionContainer.getAdapter().searchUsernameOrHashtag(text, chatActivityEnterView.getCursorPosition(), messages, false, false); + } + if (waitingForCharaterEnterRunnable != null) { + AndroidUtilities.cancelRunOnUIThread(waitingForCharaterEnterRunnable); + waitingForCharaterEnterRunnable = null; + } + if ((currentChat == null || ChatObject.canSendEmbed(currentChat)) && chatActivityEnterView.isMessageWebPageSearchEnabled() && (!chatActivityEnterView.isEditingMessage() || !chatActivityEnterView.isEditingCaption())) { + if (bigChange) { + searchLinks(text, true); + } else { + waitingForCharaterEnterRunnable = new Runnable() { + @Override + public void run() { + if (this == waitingForCharaterEnterRunnable) { + searchLinks(text, false); + waitingForCharaterEnterRunnable = null; + } + } + }; + AndroidUtilities.runOnUIThread(waitingForCharaterEnterRunnable, AndroidUtilities.WEB_URL == null ? 3000 : 1000); + } } - loadInfo = chatInfo == null; - checkGroupCallJoin(false); - } else if (currentUser != null) { - if (chatMode != MODE_PINNED) { - getMessagesController().loadUserInfo(currentUser, true, classGuid, startLoadFromMessageId); + if (emojiAnimationsOverlay != null) { + emojiAnimationsOverlay.cancelAllAnimations(); } - loadInfo = userInfo == null; + ReactionsEffectOverlay.dismissAll(); } - if (forceHistoryEmpty) { - endReached[0] = endReached[1] = true; - forwardEndReached[0] = forwardEndReached[1] = true; - firstLoading = false; - checkDispatchHideSkeletons(false); + @Override + public void onTextSpansChanged(CharSequence text) { + searchLinks(text, true); } - if (chatMode != MODE_PINNED && !forceHistoryEmpty) { - waitingForLoad.add(lastLoadIndex); - if (startLoadFromDate != 0) { - getMessagesController().loadMessages(dialog_id, mergeDialogId, false, 30, 0, startLoadFromDate, true, 0, classGuid, 4, 0, chatMode, threadMessageId, replyMaxReadId, lastLoadIndex++, isTopic); - } else if (startLoadFromMessageId != 0 && (!isThreadChat() || startLoadFromMessageId == highlightMessageId || isTopic)) { - startLoadFromMessageIdSaved = startLoadFromMessageId; - if (migrated_to != 0) { - mergeDialogId = migrated_to; - getMessagesController().loadMessages(mergeDialogId, 0, loadInfo, loadingFromOldPosition ? 50 : (AndroidUtilities.isTablet() || (isThreadChat() && !isTopic) ? 30 : 20), startLoadFromMessageId, 0, true, 0, classGuid, 3, 0, chatMode, threadMessageId, replyMaxReadId, lastLoadIndex++, isTopic); - } else { - getMessagesController().loadMessages(dialog_id, mergeDialogId, loadInfo, loadingFromOldPosition ? 50 : (AndroidUtilities.isTablet() || (isThreadChat() && !isTopic) ? 30 : 20), startLoadFromMessageId, 0, true, 0, classGuid, 3, 0, chatMode, threadMessageId, replyMaxReadId, lastLoadIndex++, isTopic); + + @Override + public void needSendTyping() { + getMessagesController().sendTyping(dialog_id, threadMessageId, 0, classGuid); + } + + @Override + public void onAttachButtonHidden() { + if (actionBar.isSearchFieldVisible()) { + return; + } + if (editTextItem != null && !isEditTextItemVisibilitySuppressed) { + editTextItem.setVisibility(View.GONE); + } + if (TextUtils.isEmpty(chatActivityEnterView.getSlowModeTimer())) { + if (headerItem != null) { + headerItem.setVisibility(View.GONE); } - } else { - if (historyPreloaded) { - lastLoadIndex++; - } else { - getMessagesController().loadMessages(dialog_id, mergeDialogId, loadInfo, AndroidUtilities.isTablet() || (isThreadChat() && !isTopic) ? 30 : 20, startLoadFromMessageId, 0, true, 0, classGuid, 2, 0, chatMode, threadMessageId, replyMaxReadId, lastLoadIndex++, isTopic); + if (attachItem != null) { + attachItem.setVisibility(View.VISIBLE); } } } - if (chatMode == 0 && !isThreadChat()) { - waitingForLoad.add(lastLoadIndex); - getMessagesController().loadMessages(dialog_id, mergeDialogId, false, 1, 0, 0, true, 0, classGuid, 2, 0, MODE_SCHEDULED, threadMessageId, replyMaxReadId, lastLoadIndex++, isTopic); + + @Override + public void onAttachButtonShow() { + if (actionBar.isSearchFieldVisible()) { + return; + } + if (headerItem != null) { + headerItem.setVisibility(View.VISIBLE); + } + if (editTextItem != null && !isEditTextItemVisibilitySuppressed) { + editTextItem.setVisibility(View.GONE); + } + if (attachItem != null) { + attachItem.setVisibility(View.GONE); + } } - if (chatMode == 0) { - if (userId != 0 && currentUser.bot) { - getMediaDataController().loadBotInfo(userId, userId, true, classGuid); - } else if (chatInfo instanceof TLRPC.TL_chatFull) { - for (int a = 0; a < chatInfo.participants.participants.size(); a++) { - TLRPC.ChatParticipant participant = chatInfo.participants.participants.get(a); - TLRPC.User user = getMessagesController().getUser(participant.user_id); - if (user != null && user.bot) { - getMediaDataController().loadBotInfo(user.id, -chatInfo.id, true, classGuid); - } + @Override + public void onMessageEditEnd(boolean loading) { + if (chatListItemAnimator != null) { + chatActivityEnterViewAnimateFromTop = chatActivityEnterView.getBackgroundTop(); + if (chatActivityEnterViewAnimateFromTop != 0) { + chatActivityEnterViewAnimateBeforeSending = true; } } - if (AndroidUtilities.isTablet() && !isComments) { - getNotificationCenter().postNotificationName(NotificationCenter.openedChatChanged, dialog_id, getTopicId(), false); + if (!loading) { + if (mentionContainer != null) { + mentionContainer.getAdapter().setNeedBotContext(true); + } + if (editingMessageObject != null) { + AndroidUtilities.runOnUIThread(() -> hideFieldPanel(true), 30); + } + boolean waitingForKeyboard = false; + if (chatActivityEnterView.isPopupShowing()) { + chatActivityEnterView.setFieldFocused(); + waitingForKeyboard = true; + } + chatActivityEnterView.setAllowStickersAndGifs(true, true, true, waitingForKeyboard); + if (editingMessageObjectReqId != 0) { + getConnectionsManager().cancelRequest(editingMessageObjectReqId, true); + editingMessageObjectReqId = 0; + } + updatePinnedMessageView(true); + updateBottomOverlay(); + updateVisibleRows(); } + } - if (currentUser != null && !UserObject.isReplyUser(currentUser)) { - userBlocked = getMessagesController().blockePeers.indexOfKey(currentUser.id) >= 0; + @Override + public void onWindowSizeChanged(int size) { + if (size < AndroidUtilities.dp(72) + ActionBar.getCurrentActionBarHeight()) { + allowStickersPanel = false; + if (suggestEmojiPanel.getVisibility() == View.VISIBLE) { + suggestEmojiPanel.setVisibility(View.INVISIBLE); + } + } else { + allowStickersPanel = true; + if (suggestEmojiPanel.getVisibility() == View.INVISIBLE) { + suggestEmojiPanel.setVisibility(View.VISIBLE); + } } - if (currentEncryptedChat != null && AndroidUtilities.getMyLayerVersion(currentEncryptedChat.layer) != SecretChatHelper.CURRENT_SECRET_CHAT_LAYER) { - getSecretChatHelper().sendNotifyLayerMessage(currentEncryptedChat, null); + allowContextBotPanel = !chatActivityEnterView.isPopupShowing(); +// checkContextBotPanel(); + int size2 = size + (chatActivityEnterView.isPopupShowing() ? 1 << 16 : 0); + if (lastSize != size2) { + chatActivityEnterViewAnimateFromTop = 0; + chatActivityEnterViewAnimateBeforeSending = false; } + lastSize = size2; } - if (chatInfo != null && chatInfo.linked_chat_id != 0) { - TLRPC.Chat chat = getMessagesController().getChat(chatInfo.linked_chat_id); - if (chat != null && chat.megagroup) { - getMessagesController().startShortPoll(chat, classGuid, false); + + @Override + public void onStickersTab(boolean opened) { + if (emojiButtonRed != null) { + emojiButtonRed.setVisibility(View.GONE); } + allowContextBotPanelSecond = !opened; +// checkContextBotPanel(); } - if (chatInvite != null) { - int timeout = chatInvite.expires - getConnectionsManager().getCurrentTime(); - if (timeout < 0) { - timeout = 10; + @Override + public void didPressAttachButton() { + if (chatAttachAlert != null) { + chatAttachAlert.setEditingMessageObject(null); } - AndroidUtilities.runOnUIThread(chatInviteRunnable = () -> { - chatInviteRunnable = null; - if (getParentActivity() == null) { - return; - } - AlertDialog.Builder builder = new AlertDialog.Builder(getParentActivity(), themeDelegate); - if (ChatObject.isChannel(currentChat) && !currentChat.megagroup) { - builder.setMessage(LocaleController.getString("JoinByPeekChannelText", R.string.JoinByPeekChannelText)); - builder.setTitle(LocaleController.getString("JoinByPeekChannelTitle", R.string.JoinByPeekChannelTitle)); - } else { - builder.setMessage(LocaleController.getString("JoinByPeekGroupText", R.string.JoinByPeekGroupText)); - builder.setTitle(LocaleController.getString("JoinByPeekGroupTitle", R.string.JoinByPeekGroupTitle)); - } - builder.setPositiveButton(LocaleController.getString("JoinByPeekJoin", R.string.JoinByPeekJoin), (dialogInterface, i) -> { - if (bottomOverlayChatText != null) { - bottomOverlayChatText.callOnClick(); - } - }); - builder.setNegativeButton(LocaleController.getString("Cancel", R.string.Cancel), (dialogInterface, i) -> finishFragment()); - showDialog(builder.create()); - }, timeout * 1000L); + openAttachMenu(); } - if (isTopic) { - getMessagesController().getTopicsController().getTopicRepliesCount(dialog_id, getTopicId()); + @Override + public void needStartRecordVideo(int state, boolean notify, int scheduleDate) { + checkInstantCameraView(); + if (instantCameraView != null) { + if (state == 0) { + instantCameraView.showCamera(); + chatListView.stopScroll(); + chatAdapter.updateRowsSafe(); + } else if (state == 1 || state == 3 || state == 4) { + instantCameraView.send(state, notify, scheduleDate); + } else if (state == 2 || state == 5) { + instantCameraView.cancel(state == 2); + } + } } - return true; - } + @Override + public void needChangeVideoPreviewState(int state, float seekProgress) { + if (instantCameraView != null) { + instantCameraView.changeVideoPreviewState(state, seekProgress); + } + } - private void fillInviterId(boolean load) { - if (currentChat == null || chatInfo == null || ChatObject.isNotInChat(currentChat) || currentChat.creator) { - return; - } - if (chatInfo.inviterId != 0) { - chatInviterId = chatInfo.inviterId; - return; - } - if (chatInfo.participants != null) { - if (chatInfo.participants.self_participant != null) { - chatInviterId = chatInfo.participants.self_participant.inviter_id; - return; - } - long selfId = getUserConfig().getClientUserId(); - for (int a = 0, N = chatInfo.participants.participants.size(); a < N; a++) { - TLRPC.ChatParticipant participant = chatInfo.participants.participants.get(a); - if (participant.user_id == selfId) { - chatInviterId = participant.inviter_id; - return; - } + @Override + public void needStartRecordAudio(int state) { + int visibility = state == 0 ? View.GONE : View.VISIBLE; + if (overlayView.getVisibility() != visibility) { + overlayView.setVisibility(visibility); } } - if (load && chatInviterId == 0) { - getMessagesController().checkChatInviter(currentChat.id, false); - } - } - private void hideUndoViews() { - if (undoView != null) { - undoView.hide(true, 0); - } - if (pinBulletin != null) { - pinBulletin.hide(false, 0); - } - if (topUndoView != null) { - topUndoView.hide(true, 0); + @Override + public void needShowMediaBanHint() { + showMediaBannedHint(); } - } - public int getOtherSameChatsDiff() { - if (parentLayout == null || parentLayout.getFragmentStack() == null) { - return 0; - } - int cur = parentLayout.getFragmentStack().indexOf(this); - if (cur == -1) { - cur = parentLayout.getFragmentStack().size(); - } - int i = cur; - for (int a = 0; a < parentLayout.getFragmentStack().size(); a++) { - BaseFragment fragment = parentLayout.getFragmentStack().get(a); - if (fragment != this && fragment instanceof ChatActivity) { - ChatActivity chatActivity = (ChatActivity) fragment; - if (chatActivity.dialog_id == dialog_id) { - i = a; - break; + @Override + public void onStickersExpandedChange() { + checkRaiseSensors(); + if (chatActivityEnterView.isStickersExpanded()) { + AndroidUtilities.setAdjustResizeToNothing(getParentActivity(), classGuid); + if (Bulletin.getVisibleBulletin() != null && Bulletin.getVisibleBulletin().isShowing()) { + Bulletin.getVisibleBulletin().hide(); } + } else { + AndroidUtilities.requestAdjustResize(getParentActivity(), classGuid); + } + if (mentionContainer != null) { + mentionContainer.animate().alpha(chatActivityEnterView.isStickersExpanded() ? 0 : 1f).setInterpolator(CubicBezierInterpolator.DEFAULT).start(); + } + if (suggestEmojiPanel != null) { + suggestEmojiPanel.setVisibility(View.VISIBLE); + suggestEmojiPanel.animate().alpha(chatActivityEnterView.isStickersExpanded() ? 0 : 1f).setInterpolator(CubicBezierInterpolator.DEFAULT).withEndAction(() -> { + if (suggestEmojiPanel != null && chatActivityEnterView.isStickersExpanded()) { + suggestEmojiPanel.setVisibility(View.GONE); + } + }).start(); } } - return i - cur; - } - @Override - public void onFragmentDestroy() { - super.onFragmentDestroy(); - if (chatActivityEnterView != null) { - chatActivityEnterView.onDestroy(); - } - if (avatarContainer != null) { - avatarContainer.onDestroy(); - } - if (mentionContainer != null && mentionContainer.getAdapter() != null) { - mentionContainer.getAdapter().onDestroy(); - } - if (chatAttachAlert != null) { - chatAttachAlert.dismissInternal(); - } - getNotificationCenter().onAnimationFinish(transitionAnimationIndex); - NotificationCenter.getGlobalInstance().onAnimationFinish(transitionAnimationGlobalIndex); - getNotificationCenter().onAnimationFinish(scrollAnimationIndex); - getNotificationCenter().onAnimationFinish(scrollCallbackAnimationIndex); - hideUndoViews(); - if (chatInviteRunnable != null) { - AndroidUtilities.cancelRunOnUIThread(chatInviteRunnable); - chatInviteRunnable = null; - } - getNotificationCenter().removePostponeNotificationsCallback(postponeNotificationsWhileLoadingCallback); - getMessagesController().setLastCreatedDialogId(dialog_id, chatMode == MODE_SCHEDULED, false); - getNotificationCenter().removeObserver(this, NotificationCenter.messagesDidLoad); - NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiLoaded); - NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.invalidateMotionBackground); - getNotificationCenter().removeObserver(this, NotificationCenter.didUpdateConnectionState); - getNotificationCenter().removeObserver(this, NotificationCenter.updateInterfaces); - getNotificationCenter().removeObserver(this, NotificationCenter.updateDefaultSendAsPeer); - getNotificationCenter().removeObserver(this, NotificationCenter.didReceiveNewMessages); - getNotificationCenter().removeObserver(this, NotificationCenter.closeChats); - getNotificationCenter().removeObserver(this, NotificationCenter.messagesRead); - getNotificationCenter().removeObserver(this, NotificationCenter.threadMessagesRead); - getNotificationCenter().removeObserver(this, NotificationCenter.commentsRead); - getNotificationCenter().removeObserver(this, NotificationCenter.changeRepliesCounter); - getNotificationCenter().removeObserver(this, NotificationCenter.messagesDeleted); - getNotificationCenter().removeObserver(this, NotificationCenter.historyCleared); - getNotificationCenter().removeObserver(this, NotificationCenter.messageReceivedByServer); - getNotificationCenter().removeObserver(this, NotificationCenter.messageReceivedByAck); - getNotificationCenter().removeObserver(this, NotificationCenter.messageSendError); - getNotificationCenter().removeObserver(this, NotificationCenter.chatInfoDidLoad); - getNotificationCenter().removeObserver(this, NotificationCenter.didLoadChatInviter); - getNotificationCenter().removeObserver(this, NotificationCenter.groupCallUpdated); - getNotificationCenter().removeObserver(this, NotificationCenter.encryptedChatUpdated); - getNotificationCenter().removeObserver(this, NotificationCenter.messagesReadEncrypted); - getNotificationCenter().removeObserver(this, NotificationCenter.removeAllMessagesFromDialog); - getNotificationCenter().removeObserver(this, NotificationCenter.contactsDidLoad); - getNotificationCenter().removeObserver(this, NotificationCenter.messagePlayingProgressDidChanged); - getNotificationCenter().removeObserver(this, NotificationCenter.messagePlayingDidReset); - getNotificationCenter().removeObserver(this, NotificationCenter.screenshotTook); - getNotificationCenter().removeObserver(this, NotificationCenter.blockedUsersDidLoad); - getNotificationCenter().removeObserver(this, NotificationCenter.fileNewChunkAvailable); - getNotificationCenter().removeObserver(this, NotificationCenter.didCreatedNewDeleteTask); - getNotificationCenter().removeObserver(this, NotificationCenter.messagePlayingDidStart); - getNotificationCenter().removeObserver(this, NotificationCenter.messagePlayingGoingToStop); - getNotificationCenter().removeObserver(this, NotificationCenter.updateMessageMedia); - getNotificationCenter().removeObserver(this, NotificationCenter.voiceTranscriptionUpdate); - getNotificationCenter().removeObserver(this, NotificationCenter.animatedEmojiDocumentLoaded); - getNotificationCenter().removeObserver(this, NotificationCenter.replaceMessagesObjects); - getNotificationCenter().removeObserver(this, NotificationCenter.notificationsSettingsUpdated); - getNotificationCenter().removeObserver(this, NotificationCenter.replyMessagesDidLoad); - getNotificationCenter().removeObserver(this, NotificationCenter.didReceivedWebpages); - getNotificationCenter().removeObserver(this, NotificationCenter.didReceivedWebpagesInUpdates); - getNotificationCenter().removeObserver(this, NotificationCenter.messagesReadContent); - getNotificationCenter().removeObserver(this, NotificationCenter.botInfoDidLoad); - getNotificationCenter().removeObserver(this, NotificationCenter.botKeyboardDidLoad); - getNotificationCenter().removeObserver(this, NotificationCenter.chatSearchResultsAvailable); - getNotificationCenter().removeObserver(this, NotificationCenter.chatSearchResultsLoading); - getNotificationCenter().removeObserver(this, NotificationCenter.messagePlayingPlayStateChanged); - getNotificationCenter().removeObserver(this, NotificationCenter.didUpdateMessagesViews); - getNotificationCenter().removeObserver(this, NotificationCenter.chatInfoCantLoad); - getNotificationCenter().removeObserver(this, NotificationCenter.didLoadPinnedMessages); - getNotificationCenter().removeObserver(this, NotificationCenter.peerSettingsDidLoad); - getNotificationCenter().removeObserver(this, NotificationCenter.newDraftReceived); - getNotificationCenter().removeObserver(this, NotificationCenter.userInfoDidLoad); - getNotificationCenter().removeObserver(this, NotificationCenter.pinnedInfoDidLoad); - getNotificationCenter().removeObserver(this, NotificationCenter.topicsDidLoaded); - NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.didSetNewWallpapper); - NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.didApplyNewTheme); - NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.goingToPreviewTheme); - getNotificationCenter().removeObserver(this, NotificationCenter.channelRightsUpdated); - getNotificationCenter().removeObserver(this, NotificationCenter.updateMentionsCount); - getNotificationCenter().removeObserver(this, NotificationCenter.audioRecordTooShort); - getNotificationCenter().removeObserver(this, NotificationCenter.didUpdatePollResults); - getNotificationCenter().removeObserver(this, NotificationCenter.didUpdateReactions); - getNotificationCenter().removeObserver(this, NotificationCenter.didUpdateExtendedMedia); - getNotificationCenter().removeObserver(this, NotificationCenter.chatOnlineCountDidLoad); - getNotificationCenter().removeObserver(this, NotificationCenter.videoLoadingStateChanged); - getNotificationCenter().removeObserver(this, NotificationCenter.scheduledMessagesUpdated); - getNotificationCenter().removeObserver(this, NotificationCenter.diceStickersDidLoad); - getNotificationCenter().removeObserver(this, NotificationCenter.dialogDeleted); - getNotificationCenter().removeObserver(this, NotificationCenter.chatAvailableReactionsUpdated); - getNotificationCenter().removeObserver(this, NotificationCenter.didLoadSponsoredMessages); - getNotificationCenter().removeObserver(this, NotificationCenter.didLoadSendAsPeers); - getNotificationCenter().removeObserver(this, NotificationCenter.dialogsUnreadReactionsCounterChanged); - getNotificationCenter().removeObserver(this, NotificationCenter.groupStickersDidLoad); - if (currentEncryptedChat != null) { - getNotificationCenter().removeObserver(this, NotificationCenter.didVerifyMessagesStickers); + @Override + public void scrollToSendingMessage() { + int id = getSendMessagesHelper().getSendingMessageId(dialog_id); + if (id != 0) { + scrollToMessageId(id, 0, true, 0, true, 0); + } } - NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.needSetDayNightTheme); - if (chatMode == 0 && AndroidUtilities.isTablet()) { - getNotificationCenter().postNotificationName(NotificationCenter.openedChatChanged, dialog_id, getTopicId(), true); - } - if (currentUser != null) { - MediaController.getInstance().stopMediaObserver(); + @Override + public boolean hasScheduledMessages() { + return scheduledMessagesCount > 0 && chatMode == 0; } - if (unregisterFlagSecureNoforwards != null) { - unregisterFlagSecureNoforwards.run(); - unregisterFlagSecureNoforwards = null; - } - if (unregisterFlagSecurePasscode != null) { - unregisterFlagSecurePasscode.run(); - unregisterFlagSecurePasscode = null; - } - if (currentUser != null) { - getMessagesController().cancelLoadFullUser(currentUser.id); - } - AndroidUtilities.removeAdjustResize(getParentActivity(), classGuid); - if (chatAttachAlert != null) { - chatAttachAlert.onDestroy(); - } - AndroidUtilities.unlockOrientation(getParentActivity()); - if (ChatObject.isChannel(currentChat)) { - getMessagesController().startShortPoll(currentChat, classGuid, true); - if (chatInfo != null && chatInfo.linked_chat_id != 0) { - TLRPC.Chat chat = getMessagesController().getChat(chatInfo.linked_chat_id); - getMessagesController().startShortPoll(chat, classGuid, true); + @Override + public void onSendLongClick() { + if (scheduledOrNoSoundHint != null) { + scheduledOrNoSoundHint.hide(); } } - if (textSelectionHelper != null) { - textSelectionHelper.clear(); - } - if (chatListItemAnimator != null) { - chatListItemAnimator.onDestroy(); - } - if (pinchToZoomHelper != null) { - pinchToZoomHelper.clear(); - } - chatThemeBottomSheet = null; - INavigationLayout parentLayout = getParentLayout(); - if (parentLayout != null && parentLayout.getFragmentStack() != null) { - BackButtonMenu.clearPulledDialogs(this, parentLayout.getFragmentStack().indexOf(this) - (replacingChatActivity ? 0 : 1)); + @Override + public void openScheduledMessages() { + ChatActivity.this.openScheduledMessages(); } - replacingChatActivity = false; - if (progressDialogCurrent != null) { - progressDialogCurrent.cancel(); - progressDialogCurrent = null; + @Override + public void onAudioVideoInterfaceUpdated() { + updatePagedownButtonVisibility(true); } - } - private ArrayList getSelectedMessages() { + @Override + public void bottomPanelTranslationYChanged(float translation) { + if (translation != 0) { + wasManualScroll = true; + } + bottomPanelTranslationY = chatActivityEnterView.panelAnimationInProgress() ? chatActivityEnterView.getEmojiPadding() - translation : 0; + bottomPanelTranslationYReverse = chatActivityEnterView.panelAnimationInProgress() ? translation : 0; + chatActivityEnterView.setTranslationY(translation); + mentionContainer.setTranslationY(translation); + contentView.setEmojiOffset(chatActivityEnterView.panelAnimationInProgress(), bottomPanelTranslationY); - ArrayList fmessages = new ArrayList<>(); + translation += chatActivityEnterView.getTopViewTranslation(); + mentionContainer.setTranslationY(translation); + chatListView.setTranslationY(translation); - for (int a = 1; a >= 0; a--) { - ArrayList ids = new ArrayList<>(); - for (int b = 0; b < selectedMessagesIds[a].size(); b++) { - ids.add(selectedMessagesIds[a].keyAt(b)); - } - Collections.sort(ids); - for (int b = 0; b < ids.size(); b++) { - Integer id = ids.get(b); - MessageObject messageObject = selectedMessagesIds[a].get(id); - if (messageObject != null) { - fmessages.add(messageObject); - } - } - selectedMessagesCanCopyIds[a].clear(); - selectedMessagesCanStarIds[a].clear(); - selectedMessagesIds[a].clear(); + invalidateChatListViewTopPadding(); + invalidateMessagesVisiblePart(); + updateTextureViewPosition(false, false); + contentView.invalidate(); + updateBulletinLayout(); } - hideActionMode(); - updatePinnedMessageView(true); - updateVisibleRows(); + @Override + public void prepareMessageSending() { + waitingForSendingMessageLoad = true; + } - return fmessages; + @Override + public void onTrendingStickersShowed(boolean show) { + if (show) { + AndroidUtilities.setAdjustResizeToNothing(getParentActivity(), classGuid); + fragmentView.requestLayout(); + } else { + AndroidUtilities.requestAdjustResize(getParentActivity(), classGuid); + } + } + @Override + public boolean hasForwardingMessages() { + return forwardingMessages != null && !forwardingMessages.messages.isEmpty(); + } } - private ArrayList getSelectedMessages1() { - - ArrayList fmessages = new ArrayList<>(); - - for (int a = 1; a >= 0; a--) { - for (int b = 0; b < selectedMessagesIds[a].size(); b++) { - MessageObject messageObject = selectedMessagesIds[a].get(selectedMessagesIds[a].keyAt(b)); - if (messageObject != null) { - fmessages.add(messageObject); - } - } + private final ChatScrollCallback chatScrollHelperCallback = new ChatScrollCallback(); + private final Runnable showScheduledOrNoSoundRunnable = () -> { + if (getParentActivity() == null || fragmentView == null || chatActivityEnterView == null) { + return; } + View anchor = chatActivityEnterView.getSendButton(); + if (anchor == null || chatActivityEnterView.getEditField() == null || chatActivityEnterView.getEditField().getText().length() < 5) { + return; + } + SharedConfig.increaseScheduledOrNoSuoundHintShowed(); + if (scheduledOrNoSoundHint == null) { + scheduledOrNoSoundHint = new HintView(getParentActivity(), 4, themeDelegate); + scheduledOrNoSoundHint.setShowingDuration(5000); + scheduledOrNoSoundHint.setAlpha(0); + scheduledOrNoSoundHint.setVisibility(View.INVISIBLE); + scheduledOrNoSoundHint.setText(LocaleController.getString("ScheduledOrNoSoundHint", R.string.ScheduledOrNoSoundHint)); + contentView.addView(scheduledOrNoSoundHint, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.TOP, 10, 0, 10, 0)); + } + scheduledOrNoSoundHint.showForView(anchor, true); + }; - return fmessages; - + public ChatActivity(Bundle args) { + super(args); } @Override - public View createView(Context context) { - textSelectionHelper = new TextSelectionHelper.ChatListTextSelectionHelper() { - @Override - public int getParentTopPadding() { - return (int) chatListViewPaddingTop; - } + public boolean onFragmentCreate() { + final long chatId = arguments.getLong("chat_id", 0); + final long userId = arguments.getLong("user_id", 0); + final int encId = arguments.getInt("enc_id", 0); + dialogFolderId = arguments.getInt("dialog_folder_id", 0); + dialogFilterId = arguments.getInt("dialog_filter_id", 0); + chatMode = arguments.getInt("chatMode", 0); + voiceChatHash = arguments.getString("voicechat", null); + livestream = !TextUtils.isEmpty(arguments.getString("livestream", null)); + attachMenuBotToOpen = arguments.getString("attach_bot", null); + attachMenuBotStartCommand = arguments.getString("attach_bot_start_command", null); + inlineReturn = arguments.getLong("inline_return", 0); + String inlineQuery = arguments.getString("inline_query"); + inlineQueryForInput = arguments.getString("inline_query_input"); + premiumInvoiceBot = arguments.getBoolean("premium_bot", false); + startLoadFromMessageId = arguments.getInt("message_id", 0); + startLoadFromDate = arguments.getInt("start_from_date", 0); + startFromVideoTimestamp = arguments.getInt("video_timestamp", -1); + threadUnreadMessagesCount = arguments.getInt("unread_count", 0); + if (startFromVideoTimestamp >= 0) { + startFromVideoMessageId = startLoadFromMessageId; + } + reportType = arguments.getInt("report", -1); + pulled = arguments.getBoolean("pulled", false); + boolean historyPreloaded = arguments.getBoolean("historyPreloaded", false); + if (highlightMessageId != 0 && highlightMessageId != Integer.MAX_VALUE) { + startLoadFromMessageId = highlightMessageId; + } + int migrated_to = arguments.getInt("migrated_to", 0); + scrollToTopOnResume = arguments.getBoolean("scrollToTopOnResume", false); + needRemovePreviousSameChatActivity = arguments.getBoolean("need_remove_previous_same_chat_activity", true); + noForwardQuote = arguments.getBoolean("forward_noquote", false); + justCreatedChat = arguments.getBoolean("just_created_chat", false); - @Override - public int getParentBottomPadding() { - return blurredViewBottomOffset; + if (chatId != 0) { + currentChat = getMessagesController().getChat(chatId); + if (currentChat == null) { + final CountDownLatch countDownLatch = new CountDownLatch(1); + final MessagesStorage messagesStorage = getMessagesStorage(); + messagesStorage.getStorageQueue().postRunnable(() -> { + currentChat = messagesStorage.getChat(chatId); + countDownLatch.countDown(); + }); + try { + countDownLatch.await(); + } catch (Exception e) { + FileLog.e(e); + } + if (currentChat != null) { + getMessagesController().putChat(currentChat, true); + } else { + return false; + } } - - @Override - protected int getThemedColor(String key) { - Integer color = themeDelegate.getColor(key); - return color != null ? color : super.getThemedColor(key); + dialog_id = -chatId; + if (ChatObject.isChannel(currentChat)) { + getMessagesController().startShortPoll(currentChat, classGuid, false); } - - @Override - protected Theme.ResourcesProvider getResourcesProvider() { - return themeDelegate; + } else if (userId != 0) { + currentUser = getMessagesController().getUser(userId); + if (currentUser == null) { + final MessagesStorage messagesStorage = getMessagesStorage(); + final CountDownLatch countDownLatch = new CountDownLatch(1); + messagesStorage.getStorageQueue().postRunnable(() -> { + currentUser = messagesStorage.getUser(userId); + countDownLatch.countDown(); + }); + try { + countDownLatch.await(); + } catch (Exception e) { + FileLog.e(e); + } + if (currentUser != null) { + getMessagesController().putUser(currentUser, true); + } else { + return false; + } } - }; - - if (reportType >= 0) { - actionBar.setBackgroundColor(getThemedColor(Theme.key_actionBarActionModeDefault)); - actionBar.setItemsColor(getThemedColor(Theme.key_actionBarActionModeDefaultIcon), false); - actionBar.setItemsBackgroundColor(getThemedColor(Theme.key_actionBarActionModeDefaultSelector), false); - actionBar.setTitleColor(getThemedColor(Theme.key_actionBarActionModeDefaultIcon)); - actionBar.setSubtitleColor(getThemedColor(Theme.key_actionBarActionModeDefaultIcon)); - } - actionBarBackgroundPaint.setColor(getThemedColor(Theme.key_actionBarDefault)); + dialog_id = userId; + botUser = arguments.getString("botUser"); + if (inlineQuery != null) { + getMessagesController().sendBotStart(currentUser, inlineQuery); + } else if (premiumInvoiceBot && !TextUtils.isEmpty(botUser)) { + getMessagesController().sendBotStart(currentUser, botUser); - if (chatMessageCellsCache.isEmpty()) { - for (int a = 0; a < 15; a++) { - chatMessageCellsCache.add(new ChatMessageCell(context, true, themeDelegate)); + botUser = null; + premiumInvoiceBot = false; } - } - for (int a = 1; a >= 0; a--) { - selectedMessagesIds[a].clear(); - selectedMessagesCanCopyIds[a].clear(); - selectedMessagesCanStarIds[a].clear(); - } - scheduledOrNoSoundHint = null; - infoTopView = null; - aspectRatioFrameLayout = null; - videoTextureView = null; - searchAsListHint = null; - mediaBanTooltip = null; - noSoundHintView = null; - forwardHintView = null; - checksHintView = null; - textSelectionHint = null; - emojiButtonRed = null; - gifHintTextView = null; - emojiHintTextView = null; - pollHintView = null; - timerHintView = null; - videoPlayerContainer = null; - voiceHintTextView = null; - blurredView = null; - dummyMessageCell = null; - cantDeleteMessagesCount = 0; - canEditMessagesCount = 0; - cantForwardMessagesCount = 0; - canForwardMessagesCount = 0; - cantSaveMessagesCount = 0; - canSaveMusicCount = 0; - canSaveDocumentsCount = 0; - - hasOwnBackground = true; - if (chatAttachAlert != null) { - try { - if (chatAttachAlert.isShowing()) { - chatAttachAlert.dismiss(); + } else if (encId != 0) { + currentEncryptedChat = getMessagesController().getEncryptedChat(encId); + final MessagesStorage messagesStorage = getMessagesStorage(); + if (currentEncryptedChat == null) { + final CountDownLatch countDownLatch = new CountDownLatch(1); + messagesStorage.getStorageQueue().postRunnable(() -> { + currentEncryptedChat = messagesStorage.getEncryptedChat(encId); + countDownLatch.countDown(); + }); + try { + countDownLatch.await(); + } catch (Exception e) { + FileLog.e(e); + } + if (currentEncryptedChat != null) { + getMessagesController().putEncryptedChat(currentEncryptedChat, true); + } else { + return false; } - } catch (Exception ignore) { - } - chatAttachAlert.onDestroy(); - chatAttachAlert = null; + currentUser = getMessagesController().getUser(currentEncryptedChat.user_id); + if (currentUser == null) { + final CountDownLatch countDownLatch = new CountDownLatch(1); + messagesStorage.getStorageQueue().postRunnable(() -> { + currentUser = messagesStorage.getUser(currentEncryptedChat.user_id); + countDownLatch.countDown(); + }); + try { + countDownLatch.await(); + } catch (Exception e) { + FileLog.e(e); + } + if (currentUser != null) { + getMessagesController().putUser(currentUser, true); + } else { + return false; + } + } + dialog_id = DialogObject.makeEncryptedDialogId(encId); + maxMessageId[0] = maxMessageId[1] = Integer.MIN_VALUE; + minMessageId[0] = minMessageId[1] = Integer.MAX_VALUE; + } else { + return false; } - Theme.createChatResources(context, false); + dialog_id_Long = dialog_id; - actionBar.setAddToContainer(false); - if (inPreviewMode) { - actionBar.setBackButtonDrawable(null); - } else { - actionBar.setBackButtonDrawable(new BackDrawable(reportType >= 0)); + transitionAnimationGlobalIndex = NotificationCenter.getGlobalInstance().setAnimationInProgress(transitionAnimationGlobalIndex, new int[0]); + + if (currentUser != null && Build.VERSION.SDK_INT < 23) { + MediaController.getInstance().startMediaObserver(); } - actionBar.setActionBarMenuOnItemClick(new ActionBar.ActionBarMenuOnItemClick() { - @Override - public void onItemClick(final int id) { - if (id == -1) { - if (actionBar.isActionModeShowed()) { - clearSelectionMode(); - } else { - if (!checkRecordLocked(true)) { - finishFragment(); - } - } - } else if (id == view_as_topics) { - TopicsFragment.prepareToSwitchAnimation(ChatActivity.this); - } else if (id == copy) { - SpannableStringBuilder str = new SpannableStringBuilder(); - long previousUid = 0; - for (int a = 1; a >= 0; a--) { - ArrayList ids = new ArrayList<>(); - for (int b = 0; b < selectedMessagesCanCopyIds[a].size(); b++) { - ids.add(selectedMessagesCanCopyIds[a].keyAt(b)); - } - if (currentEncryptedChat == null) { - Collections.sort(ids); - } else { - Collections.sort(ids, Collections.reverseOrder()); - } - for (int b = 0; b < ids.size(); b++) { - Integer messageId = ids.get(b); - MessageObject messageObject = selectedMessagesCanCopyIds[a].get(messageId); - if (str.length() != 0) { - str.append("\n\n"); - } - str.append(getMessageContent(messageObject, previousUid, ids.size() != 1 && (currentUser == null || !currentUser.self))); - previousUid = messageObject.getFromChatId(); - } - } - if (str.length() != 0) { - AndroidUtilities.addToClipboard(str); - undoView.showWithAction(0, UndoView.ACTION_TEXT_COPIED, null); - } - clearSelectionMode(); - } else if (id == combine_message) { - StringBuilder str = new StringBuilder(); - ArrayList toDeleteMessagesIds = new ArrayList<>(); - MessageObject replyTo = getThreadMessage(); - ArrayList suffice_en = new ArrayList(Arrays.asList(',', '.', '!', '?', ':', ';', '(', ')')); - ArrayList suffice_zh = new ArrayList(Arrays.asList(',', '。', '!', '?', ':', ';', '(', ')')); - for (int a = 1; a >= 0; a--) { - ArrayList ids = new ArrayList<>(); - for (int b = 0; b < selectedMessagesCanCopyIds[a].size(); b++) { - ids.add(selectedMessagesCanCopyIds[a].keyAt(b)); - } - if (currentEncryptedChat == null) { - Collections.sort(ids); - } else { - Collections.sort(ids, Collections.reverseOrder()); - } - for (int b = 0; b < ids.size(); b++) { - Integer messageId = ids.get(b); - MessageObject messageObject = selectedMessagesCanCopyIds[a].get(messageId); - if (b == 0 && NaConfig.INSTANCE.getCombineMessage().Int() == 0) { - replyTo = messageObject.replyMessageObject; - } - if (str.length() != 0) { - if (!suffice_en.contains(str.charAt(str.length() - 1)) && !suffice_zh.contains(str.charAt(str.length() - 1))) { - // add comma refer to language - if (LocaleController.getInstance().getCurrentLocale().getLanguage().equals("zh")) { - str.append(','); - } else { - str.append(','); - } - } - } - str.append(messageObject.messageText); - if (messageObject.getSenderId() == UserConfig.getInstance(currentAccount).getClientUserId()) { - toDeleteMessagesIds.add(messageId); - } - } - } - if (str.length() != 0) { - SendMessagesHelper.getInstance(currentAccount) - .sendMessage(str.toString(), dialog_id, replyTo, getThreadMessage(), null, false, null, null, null, true, 0, null, false); - MessagesController.getInstance(currentAccount).deleteMessages(toDeleteMessagesIds, null, null, dialog_id, true, false); - } - clearSelectionMode(); - } else if (id == delete) { - if (getParentActivity() == null) { - return; - } - createDeleteMessagesAlert(null, null); - } else if (id == forward) { - openForward(true); - } else if (id == save_to) { - ArrayList messageObjects = new ArrayList<>(); - for (int a = 1; a >= 0; a--) { - for (int b = 0; b < selectedMessagesIds[a].size(); b++) { - messageObjects.add(selectedMessagesIds[a].valueAt(b)); - } - selectedMessagesIds[a].clear(); - selectedMessagesCanCopyIds[a].clear(); - selectedMessagesCanStarIds[a].clear(); - } - boolean isMusic = canSaveMusicCount > 0; - hideActionMode(); - updatePinnedMessageView(true); - updateVisibleRows(); - MediaController.saveFilesFromMessages(getParentActivity(), getAccountInstance(), messageObjects, (count) -> { - if (count > 0) { - if (getParentActivity() == null) { - return; - } - BulletinFactory.of(ChatActivity.this).createDownloadBulletin(isMusic ? BulletinFactory.FileType.AUDIOS : BulletinFactory.FileType.UNKNOWNS, count, themeDelegate).show(); - } - }); - } else if (id == chat_enc_timer) { - if (getParentActivity() == null) { - return; - } - showDialog(AlertsCreator.createTTLAlert(getParentActivity(), currentEncryptedChat, themeDelegate).create()); - } else if (id == clear_history || id == delete_chat || id == auto_delete_timer) { - if (getParentActivity() == null) { - return; - } - boolean canDeleteHistory = chatInfo != null && chatInfo.can_delete_channel; - if (id == auto_delete_timer || id == clear_history && currentEncryptedChat == null && ((currentUser != null && !UserObject.isUserSelf(currentUser) && !UserObject.isDeleted(currentUser)) || (chatInfo != null && chatInfo.can_delete_channel))) { - AlertsCreator.createClearDaysDialogAlert(ChatActivity.this, -1, currentUser, currentChat, canDeleteHistory, new MessagesStorage.BooleanCallback() { - @Override - public void run(boolean revoke) { - if (revoke && (currentUser != null || canDeleteHistory)) { - getMessagesStorage().getMessagesCount(dialog_id, (count) -> { - if (count >= 50) { - AlertsCreator.createClearOrDeleteDialogAlert(ChatActivity.this, true, false, true, currentChat, currentUser, false, false, canDeleteHistory, (param) -> performHistoryClear(true, canDeleteHistory), themeDelegate); - } else { - performHistoryClear(true, canDeleteHistory); - } - }); - } else { - performHistoryClear(revoke, canDeleteHistory); - } - } - }, getResourceProvider()); - return; - } - AlertsCreator.createClearOrDeleteDialogAlert(ChatActivity.this, id == clear_history, currentChat, currentUser, currentEncryptedChat != null, true, canDeleteHistory, (param) -> { - if (id == clear_history && ChatObject.isChannel(currentChat) && (!currentChat.megagroup || ChatObject.isPublic(currentChat))) { - getMessagesController().deleteDialog(dialog_id, 2, param); - } else { - if (id != clear_history) { - getNotificationCenter().removeObserver(ChatActivity.this, NotificationCenter.closeChats); - getNotificationCenter().postNotificationName(NotificationCenter.closeChats); - finishFragment(); - getNotificationCenter().postNotificationName(NotificationCenter.needDeleteDialog, dialog_id, currentUser, currentChat, param); - } else { - performHistoryClear(param, canDeleteHistory); - } - } - }, themeDelegate); - } else if (id == share_contact) { - if (currentUser == null || getParentActivity() == null) { - return; - } - if (addToContactsButton.getTag() != null) { - shareMyContact((Integer) addToContactsButton.getTag(), null); - } else { - Bundle args = new Bundle(); - args.putLong("user_id", currentUser.id); - args.putBoolean("addContact", true); - presentFragment(new ContactAddActivity(args)); - } - } else if (id == mute) { - toggleMute(false); - } else if (id == add_shortcut) { - try { - getMediaDataController().installShortcut(currentUser.id); - } catch (Exception e) { - FileLog.e(e); - } - } else if (id == to_the_beginning) { - scrollToMessageId(1, 0, false, 0, true, 0); - } else if (id == to_the_message){ - setScrollToMessage(); - } else if (id == report) { - AlertsCreator.createReportAlert(getParentActivity(), dialog_id, 0, ChatActivity.this, themeDelegate, null); - } else if (id == star) { - for (int a = 0; a < 2; a++) { - for (int b = 0; b < selectedMessagesCanStarIds[a].size(); b++) { - MessageObject msg = selectedMessagesCanStarIds[a].valueAt(b); - getMediaDataController().addRecentSticker(MediaDataController.TYPE_FAVE, msg, msg.getDocument(), (int) (System.currentTimeMillis() / 1000), !hasUnfavedSelected); - } - } - clearSelectionMode(); - } else if (id == edit) { - MessageObject messageObject = null; - for (int a = 1; a >= 0; a--) { - if (messageObject == null && selectedMessagesIds[a].size() == 1) { - ArrayList ids = new ArrayList<>(); - for (int b = 0; b < selectedMessagesIds[a].size(); b++) { - ids.add(selectedMessagesIds[a].keyAt(b)); - } - messageObject = messagesDict[a].get(ids.get(0)); - } - selectedMessagesIds[a].clear(); - selectedMessagesCanCopyIds[a].clear(); - selectedMessagesCanStarIds[a].clear(); - } - startEditingMessageObject(messageObject); - hideActionMode(); - updatePinnedMessageView(true); - updateVisibleRows(); - } else if (id == chat_menu_attach) { - ActionBarMenuSubItem attach = new ActionBarMenuSubItem(context, false, true, true, getResourceProvider()); - attach.setTextAndIcon(LocaleController.getString("AttachMenu", R.string.AttachMenu), R.drawable.input_attach); - attach.setOnClickListener(view -> { - headerItem.closeSubMenu(); - if (chatAttachAlert != null) { - chatAttachAlert.setEditingMessageObject(null); - } - openAttachMenu(); - }); - headerItem.toggleSubMenu(attach, attachItem); - } else if (id == bot_help) { - getSendMessagesHelper().sendMessage("/help", dialog_id, null, null, null, false, null, null, null, true, 0, null, false); - } else if (id == bot_settings) { - getSendMessagesHelper().sendMessage("/settings", dialog_id, null, null, null, false, null, null, null, true, 0, null, false); - } else if (id == search) { - openSearchWithText(null); - } else if (id == call || id == video_call) { - if (currentUser != null && getParentActivity() != null) { - VoIPHelper.startCall(currentUser, id == video_call, userInfo != null && userInfo.video_calls_available, getParentActivity(), getMessagesController().getUserFull(currentUser.id), getAccountInstance()); - } - } else if (id == text_bold) { - if (chatActivityEnterView != null) { - chatActivityEnterView.getEditField().setSelectionOverride(editTextStart, editTextEnd); - chatActivityEnterView.getEditField().makeSelectedBold(); - } - } else if (id == text_italic) { - if (chatActivityEnterView != null) { - chatActivityEnterView.getEditField().setSelectionOverride(editTextStart, editTextEnd); - chatActivityEnterView.getEditField().makeSelectedItalic(); - } - } else if (id == text_spoiler) { - if (chatActivityEnterView != null) { - chatActivityEnterView.getEditField().setSelectionOverride(editTextStart, editTextEnd); - chatActivityEnterView.getEditField().makeSelectedSpoiler(); - } - } else if (id == text_mono) { - if (chatActivityEnterView != null) { - chatActivityEnterView.getEditField().setSelectionOverride(editTextStart, editTextEnd); - chatActivityEnterView.getEditField().makeSelectedMono(); - } - } else if (id == text_strike) { - if (chatActivityEnterView != null) { - chatActivityEnterView.getEditField().setSelectionOverride(editTextStart, editTextEnd); - chatActivityEnterView.getEditField().makeSelectedStrike(); - } - } else if (id == text_underline) { - if (chatActivityEnterView != null) { - chatActivityEnterView.getEditField().setSelectionOverride(editTextStart, editTextEnd); - chatActivityEnterView.getEditField().makeSelectedUnderline(); - } - } else if (id == text_link) { - if (chatActivityEnterView != null) { - chatActivityEnterView.getEditField().setSelectionOverride(editTextStart, editTextEnd); - chatActivityEnterView.getEditField().makeSelectedUrl(); - } - } else if (id == text_regular) { - if (chatActivityEnterView != null) { - chatActivityEnterView.getEditField().setSelectionOverride(editTextStart, editTextEnd); - chatActivityEnterView.getEditField().makeSelectedRegular(); - } - } else if (id == text_mention) { - if (chatActivityEnterView != null) { - chatActivityEnterView.getEditField().setSelectionOverride(editTextStart, editTextEnd); - chatActivityEnterView.getEditField().makeSelectedMention(); - } - } else if (id == text_transalte) { - if (chatActivityEnterView != null) { - chatActivityEnterView.getEditField().setSelectionOverride(editTextStart, editTextEnd); - chatActivityEnterView.getEditField().makeSelectedTranslate(); - } - } else if (id == change_colors) { - showChatThemeBottomSheet(); - } else if (id == topic_close) { - getMessagesController().getTopicsController().toggleCloseTopic(currentChat.id, forumTopic.id, forumTopic.closed = true); - updateTopicButtons(); - updateBottomOverlay(); - updateTopPanel(true); - } else if (id == open_forum) { - TopicsFragment.prepareToSwitchAnimation(ChatActivity.this); -// Bundle bundle = new Bundle(); -// bundle.putLong("chat_id", -dialog_id); -// presentFragment(new TopicsFragment(bundle)); - } else { - nkbtn_onclick_actionbar(id); - } - } - }); - View backButton = actionBar.getBackButton(); - backButton.setOnTouchListener(new LongPressListenerWithMovingGesture() { - @Override - public void onLongPress() { - scrimPopupWindow = BackButtonMenu.show(ChatActivity.this, backButton, dialog_id, getTopicId(), themeDelegate); - if (scrimPopupWindow != null) { - setSubmenu(scrimPopupWindow); - scrimPopupWindow.setOnDismissListener(() -> { - setSubmenu(null); - scrimPopupWindow = null; - menuDeleteItem = null; - scrimPopupWindowItems = null; - chatLayoutManager.setCanScrollVertically(true); - if (scrimPopupWindowHideDimOnDismiss) { - dimBehindView(false); - } else { - scrimPopupWindowHideDimOnDismiss = true; - } - if (chatActivityEnterView != null) { - chatActivityEnterView.getEditField().setAllowDrawCursor(true); - } - }); - chatListView.stopScroll(); - chatLayoutManager.setCanScrollVertically(false); - dimBehindView(backButton, 0.3f); - hideHints(false); - if (topUndoView != null) { - topUndoView.hide(true, 1); - } - if (undoView != null) { - undoView.hide(true, 1); - } - if (chatActivityEnterView != null) { - chatActivityEnterView.getEditField().setAllowDrawCursor(false); - } - } - } - }); - actionBar.setInterceptTouchEventListener((view, motionEvent) -> { - if (chatThemeBottomSheet != null) { - chatThemeBottomSheet.close(); - return true; - } - return false; - }); - - if (avatarContainer != null) { - avatarContainer.onDestroy(); - } - avatarContainer = new ChatAvatarContainer(context, this, currentEncryptedChat != null, themeDelegate); - avatarContainer.allowShorterStatus = true; - avatarContainer.premiumIconHiddable = true; - AndroidUtilities.updateViewVisibilityAnimated(avatarContainer, true, 1f, false); - updateTopicTitleIcon(); - if (inPreviewMode || inBubbleMode) { - avatarContainer.setOccupyStatusBar(false); - } - if (reportType >= 0) { - if (reportType == AlertsCreator.REPORT_TYPE_SPAM) { - actionBar.setTitle(LocaleController.getString("ReportChatSpam", R.string.ReportChatSpam)); - } else if (reportType == AlertsCreator.REPORT_TYPE_VIOLENCE) { - actionBar.setTitle(LocaleController.getString("ReportChatViolence", R.string.ReportChatViolence)); - } else if (reportType == AlertsCreator.REPORT_TYPE_CHILD_ABUSE) { - actionBar.setTitle(LocaleController.getString("ReportChatChild", R.string.ReportChatChild)); - } else if (reportType == AlertsCreator.REPORT_TYPE_PORNOGRAPHY) { - actionBar.setTitle(LocaleController.getString("ReportChatPornography", R.string.ReportChatPornography)); - } else if (reportType == AlertsCreator.REPORT_TYPE_ILLEGAL_DRUGS) { - actionBar.setTitle(LocaleController.getString("ReportChatIllegalDrugs", R.string.ReportChatIllegalDrugs)); - } else if (reportType == AlertsCreator.REPORT_TYPE_PERSONAL_DETAILS) { - actionBar.setTitle(LocaleController.getString("ReportChatPersonalDetails", R.string.ReportChatPersonalDetails)); - } - actionBar.setSubtitle(LocaleController.getString("ReportSelectMessages", R.string.ReportSelectMessages)); - } else if (startLoadFromDate != 0) { - final int date = startLoadFromDate; - actionBar.setOnClickListener((v) -> { - jumpToDate(date); - }); - actionBar.setTitle(LocaleController.formatDateChat(startLoadFromDate, false)); - actionBar.setSubtitle(LocaleController.getString("Loading", R.string.Loading)); - - TLRPC.TL_messages_getHistory gh1 = new TLRPC.TL_messages_getHistory(); - gh1.peer = getMessagesController().getInputPeer(dialog_id); - gh1.offset_date = startLoadFromDate; - gh1.limit = 1; - gh1.add_offset = -1; - - int req = getConnectionsManager().sendRequest(gh1, (response, error) -> { - if (response instanceof TLRPC.messages_Messages) { - List l = ((TLRPC.messages_Messages) response).messages; - if (!l.isEmpty()) { - - TLRPC.TL_messages_getHistory gh2 = new TLRPC.TL_messages_getHistory(); - gh2.peer = getMessagesController().getInputPeer(dialog_id); - gh2.offset_date = startLoadFromDate + 60 * 60 * 24; - gh2.limit = 1; - - getConnectionsManager().sendRequest(gh2, (response1, error1) -> { - if (response1 instanceof TLRPC.messages_Messages) { - List l2 = ((TLRPC.messages_Messages) response1).messages; - int count = 0; - if (!l2.isEmpty()) { - count = ((TLRPC.messages_Messages) response).offset_id_offset - ((TLRPC.messages_Messages) response1).offset_id_offset; - } else { - count = ((TLRPC.messages_Messages) response).offset_id_offset; - } - int finalCount = count; - AndroidUtilities.runOnUIThread(() -> { - if (finalCount != 0) { - AndroidUtilities.runOnUIThread(() -> actionBar.setSubtitle(LocaleController.formatPluralString("messages", finalCount))); - } else { - actionBar.setSubtitle(LocaleController.getString("NoMessagesForThisDay", R.string.NoMessagesForThisDay)); - } - }); - } - }); - } else { - actionBar.setSubtitle(LocaleController.getString("NoMessagesForThisDay", R.string.NoMessagesForThisDay)); - } - } - }); - getConnectionsManager().bindRequestToGuid(req, classGuid); - } else { - actionBar.addView(avatarContainer, 0, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.MATCH_PARENT, Gravity.TOP | Gravity.LEFT, !inPreviewMode ? 56 : (chatMode == MODE_PINNED ? 10 : 0), 0, 40, 0)); - } - - ActionBarMenu menu = actionBar.createMenu(); - - if (isThreadChat() && threadMessageId != 0) { - viewInChatItem = menu.addItem(nkbtn_view_in_chat, R.drawable.msg_viewreplies, themeDelegate); - } - - if (currentEncryptedChat == null && chatMode == 0 && reportType < 0) { - searchIconItem = menu.addItem(search, R.drawable.ic_ab_search); - searchIconItem.setContentDescription(LocaleController.getString("Search", R.string.Search)); - searchItem = menu.addItem(0, R.drawable.ic_ab_search, themeDelegate).setIsSearchField(true).setActionBarMenuItemSearchListener(new ActionBarMenuItem.ActionBarMenuItemSearchListener() { - - boolean searchWas; - - @Override - public boolean canCollapseSearch() { - if (messagesSearchListView.getTag() != null) { - showMessagesSearchListView(false); - return false; - } - return true; - } - - @Override - public void onSearchCollapse() { - searchCalendarButton.setVisibility(View.VISIBLE); - if (searchUserButton != null) { - searchUserButton.setVisibility(View.VISIBLE); - } - if (searchingForUser) { - mentionContainer.getAdapter().searchUsernameOrHashtag(null, 0, null, false, true); - searchingForUser = false; - } - mentionContainer.setReversed(false); - mentionContainer.getAdapter().setSearchingMentions(false); - searchingUserMessages = null; - searchingChatMessages = null; - searchItem.setSearchFieldHint(LocaleController.getString("Search", R.string.Search)); - searchItem.setSearchFieldCaption(null); - AndroidUtilities.updateViewVisibilityAnimated(avatarContainer, true, 0.95f, true); - if (editTextItem != null && editTextItem.getTag() != null) { - if (headerItem != null) { - headerItem.setVisibility(View.GONE); - } - if (editTextItem != null) { - editTextItem.setVisibility(View.VISIBLE); - } - if (attachItem != null) { - attachItem.setVisibility(View.GONE); - } - if (searchIconItem != null && showSearchAsIcon) { - searchIconItem.setVisibility(View.GONE); - } - if (audioCallIconItem != null && showAudioCallAsIcon) { - audioCallIconItem.setVisibility(View.GONE); - } - } else if (chatActivityEnterView.hasText() && TextUtils.isEmpty(chatActivityEnterView.getSlowModeTimer()) && (currentChat == null || ChatObject.canSendMessages(currentChat))) { - if (headerItem != null) { - headerItem.setVisibility(View.GONE); - } - if (editTextItem != null) { - editTextItem.setVisibility(View.GONE); - } - if (attachItem != null) { - attachItem.setVisibility(View.VISIBLE); - } - if (searchIconItem != null && showSearchAsIcon) { - searchIconItem.setVisibility(View.GONE); - } - if (audioCallIconItem != null && showAudioCallAsIcon) { - audioCallIconItem.setVisibility(View.GONE); - } - } else { - if (headerItem != null) { - headerItem.setVisibility(View.VISIBLE); - } - if (audioCallIconItem != null && showAudioCallAsIcon) { - audioCallIconItem.setVisibility(View.VISIBLE); - } - if (searchIconItem != null && showSearchAsIcon) { - searchIconItem.setVisibility(View.VISIBLE); - } - if (editTextItem != null) { - editTextItem.setVisibility(View.GONE); - } - if (attachItem != null) { - attachItem.setVisibility(View.GONE); - } - } - if (threadMessageId == 0 && !UserObject.isReplyUser(currentUser) || threadMessageObject != null && threadMessageObject.getRepliesCount() < 10) { - searchItem.setVisibility(View.GONE); - } - if (viewInChatItem != null) - viewInChatItem.setVisibility(View.VISIBLE); - searchItemVisible = false; - getMediaDataController().clearFoundMessageObjects(); - if (messagesSearchAdapter != null) { - messagesSearchAdapter.notifyDataSetChanged(); - } - removeSelectedMessageHighlight(); - updateBottomOverlay(); - updatePinnedMessageView(true); - updateVisibleRows(); - } - - @Override - public void onSearchExpand() { - if (threadMessageId != 0 && !isTopic || UserObject.isReplyUser(currentUser)) { - openSearchWithText(null); - } - if (!openSearchKeyboard) { - return; - } - saveKeyboardPositionBeforeTransition(); - AndroidUtilities.requestAdjustResize(getParentActivity(), classGuid); - AndroidUtilities.runOnUIThread(() -> { - searchWas = false; - searchItem.getSearchField().requestFocus(); - AndroidUtilities.showKeyboard(searchItem.getSearchField()); - removeKeyboardPositionBeforeTransition(); - }, 500); - } - - @Override - public void onSearchPressed(EditText editText) { - searchWas = true; - updateSearchButtons(0, 0, -1); - getMediaDataController().searchMessagesInChat(editText.getText().toString(), dialog_id, mergeDialogId, classGuid, 0, threadMessageId, searchingUserMessages, searchingChatMessages); - } - - @Override - public void onTextChanged(EditText editText) { - showMessagesSearchListView(false); - if (searchingForUser) { - mentionContainer.getAdapter().searchUsernameOrHashtag("@" + editText.getText().toString(), 0, messages, true, true); - } else if (searchingUserMessages == null && searchingChatMessages == null && searchUserButton != null && TextUtils.equals(editText.getText(), LocaleController.getString("SearchFrom", R.string.SearchFrom))) { - searchUserButton.callOnClick(); - } - } - @Override - public void onCaptionCleared() { - if (searchingUserMessages != null || searchingChatMessages != null) { - searchUserButton.callOnClick(); - } else { - if (searchingForUser) { - mentionContainer.getAdapter().searchUsernameOrHashtag(null, 0, null, false, true); - searchingForUser = false; - searchItem.setSearchFieldText("", true); - } - searchItem.setSearchFieldHint(LocaleController.getString("Search", R.string.Search)); - searchCalendarButton.setVisibility(View.VISIBLE); - searchUserButton.setVisibility(View.VISIBLE); - searchingUserMessages = null; - searchingChatMessages = null; - } - } + getNotificationCenter().addPostponeNotificationsCallback(postponeNotificationsWhileLoadingCallback); - @Override - public boolean forceShowClear() { - return searchingForUser; + if (chatMode != MODE_SCHEDULED) { + if (threadMessageId == 0) { + getNotificationCenter().addObserver(this, NotificationCenter.screenshotTook); + getNotificationCenter().addObserver(this, NotificationCenter.encryptedChatUpdated); + getNotificationCenter().addObserver(this, NotificationCenter.messagesReadEncrypted); + getNotificationCenter().addObserver(this, NotificationCenter.updateMentionsCount); + getNotificationCenter().addObserver(this, NotificationCenter.newDraftReceived); + getNotificationCenter().addObserver(this, NotificationCenter.chatOnlineCountDidLoad); + getNotificationCenter().addObserver(this, NotificationCenter.peerSettingsDidLoad); + getNotificationCenter().addObserver(this, NotificationCenter.didLoadPinnedMessages); + getNotificationCenter().addObserver(this, NotificationCenter.commentsRead); + getNotificationCenter().addObserver(this, NotificationCenter.changeRepliesCounter); + getNotificationCenter().addObserver(this, NotificationCenter.messagesRead); + getNotificationCenter().addObserver(this, NotificationCenter.didLoadChatInviter); + getNotificationCenter().addObserver(this, NotificationCenter.groupCallUpdated); + } else { + getNotificationCenter().addObserver(this, NotificationCenter.threadMessagesRead); + if (isTopic) { + getNotificationCenter().addObserver(this, NotificationCenter.updateMentionsCount); + getNotificationCenter().addObserver(this, NotificationCenter.didLoadPinnedMessages); } - }); - searchItem.setSearchFieldHint(LocaleController.getString("Search", R.string.Search)); - if (threadMessageId == 0 && !UserObject.isReplyUser(currentUser) || threadMessageObject != null && threadMessageObject.getRepliesCount() < 10) { - searchItem.setVisibility(View.GONE); } - searchItemVisible = false; + getNotificationCenter().addObserver(this, NotificationCenter.botKeyboardDidLoad); + getNotificationCenter().addObserver(this, NotificationCenter.removeAllMessagesFromDialog); + getNotificationCenter().addObserver(this, NotificationCenter.messagesReadContent); + getNotificationCenter().addObserver(this, NotificationCenter.chatSearchResultsAvailable); + getNotificationCenter().addObserver(this, NotificationCenter.chatSearchResultsLoading); + getNotificationCenter().addObserver(this, NotificationCenter.didUpdateMessagesViews); + getNotificationCenter().addObserver(this, NotificationCenter.didUpdatePollResults); + if (currentEncryptedChat != null) { + getNotificationCenter().addObserver(this, NotificationCenter.didVerifyMessagesStickers); + } } - - editTextItem = menu.addItem(0, R.drawable.ic_ab_other, themeDelegate); - editTextItem.setContentDescription(LocaleController.getString("AccDescrMoreOptions", R.string.AccDescrMoreOptions)); - editTextItem.setTag(null); - editTextItem.setVisibility(View.GONE); - - // NekoX - editTextItem.addSubItem(text_transalte, LocaleController.getString("Translate", R.string.Translate)); - - editTextItem.addSubItem(text_spoiler, LocaleController.getString("Spoiler", R.string.Spoiler)); - SpannableStringBuilder stringBuilder = new SpannableStringBuilder(LocaleController.getString("Bold", R.string.Bold)); - stringBuilder.setSpan(new TypefaceSpan(AndroidUtilities.getTypeface("fonts/rmedium.ttf")), 0, stringBuilder.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); - editTextItem.addSubItem(text_bold, stringBuilder); - stringBuilder = new SpannableStringBuilder(LocaleController.getString("Italic", R.string.Italic)); - stringBuilder.setSpan(new TypefaceSpan(AndroidUtilities.getTypeface("fonts/ritalic.ttf")), 0, stringBuilder.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); - editTextItem.addSubItem(text_italic, stringBuilder); - stringBuilder = new SpannableStringBuilder(LocaleController.getString("Mono", R.string.Mono)); - stringBuilder.setSpan(new TypefaceSpan(Typeface.MONOSPACE), 0, stringBuilder.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); - editTextItem.addSubItem(text_mono, stringBuilder); - if (currentEncryptedChat == null || AndroidUtilities.getPeerLayerVersion(currentEncryptedChat.layer) >= 101) { - stringBuilder = new SpannableStringBuilder(LocaleController.getString("Strike", R.string.Strike)); - TextStyleSpan.TextStyleRun run = new TextStyleSpan.TextStyleRun(); - run.flags |= TextStyleSpan.FLAG_STYLE_STRIKE; - stringBuilder.setSpan(new TextStyleSpan(run), 0, stringBuilder.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); - editTextItem.addSubItem(text_strike, stringBuilder); - stringBuilder = new SpannableStringBuilder(LocaleController.getString("Underline", R.string.Underline)); - run = new TextStyleSpan.TextStyleRun(); - run.flags |= TextStyleSpan.FLAG_STYLE_UNDERLINE; - stringBuilder.setSpan(new TextStyleSpan(run), 0, stringBuilder.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); - editTextItem.addSubItem(text_underline, stringBuilder); + getNotificationCenter().addObserver(this, NotificationCenter.messagesDidLoad); + NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.emojiLoaded); + NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.invalidateMotionBackground); + getNotificationCenter().addObserver(this, NotificationCenter.didUpdateConnectionState); + getNotificationCenter().addObserver(this, NotificationCenter.updateInterfaces); + getNotificationCenter().addObserver(this, NotificationCenter.updateDefaultSendAsPeer); + if (chatMode != MODE_PINNED) { + getNotificationCenter().addObserver(this, NotificationCenter.didReceiveNewMessages); } - editTextItem.addSubItem(text_link, LocaleController.getString("CreateLink", R.string.CreateLink)); - - // NekoX - editTextItem.addSubItem(text_mention, LocaleController.getString("CreateMention", R.string.CreateMention)); + if (chatMode == 0) { + getNotificationCenter().addObserver(this, NotificationCenter.didLoadSponsoredMessages); + } + getNotificationCenter().addObserver(this, NotificationCenter.didLoadSendAsPeers); + getNotificationCenter().addObserver(this, NotificationCenter.closeChats); + getNotificationCenter().addObserver(this, NotificationCenter.messagesDeleted); + getNotificationCenter().addObserver(this, NotificationCenter.historyCleared); + getNotificationCenter().addObserver(this, NotificationCenter.messageReceivedByServer); + getNotificationCenter().addObserver(this, NotificationCenter.messageReceivedByAck); + getNotificationCenter().addObserver(this, NotificationCenter.messageSendError); + getNotificationCenter().addObserver(this, NotificationCenter.chatInfoDidLoad); + getNotificationCenter().addObserver(this, NotificationCenter.contactsDidLoad); + getNotificationCenter().addObserver(this, NotificationCenter.messagePlayingProgressDidChanged); + getNotificationCenter().addObserver(this, NotificationCenter.messagePlayingDidReset); + getNotificationCenter().addObserver(this, NotificationCenter.messagePlayingGoingToStop); + getNotificationCenter().addObserver(this, NotificationCenter.messagePlayingPlayStateChanged); + getNotificationCenter().addObserver(this, NotificationCenter.blockedUsersDidLoad); + getNotificationCenter().addObserver(this, NotificationCenter.fileNewChunkAvailable); + getNotificationCenter().addObserver(this, NotificationCenter.didCreatedNewDeleteTask); + getNotificationCenter().addObserver(this, NotificationCenter.messagePlayingDidStart); + getNotificationCenter().addObserver(this, NotificationCenter.updateMessageMedia); + getNotificationCenter().addObserver(this, NotificationCenter.voiceTranscriptionUpdate); + getNotificationCenter().addObserver(this, NotificationCenter.animatedEmojiDocumentLoaded); + getNotificationCenter().addObserver(this, NotificationCenter.replaceMessagesObjects); + getNotificationCenter().addObserver(this, NotificationCenter.notificationsSettingsUpdated); + getNotificationCenter().addObserver(this, NotificationCenter.replyMessagesDidLoad); + getNotificationCenter().addObserver(this, NotificationCenter.didReceivedWebpages); + getNotificationCenter().addObserver(this, NotificationCenter.didReceivedWebpagesInUpdates); + getNotificationCenter().addObserver(this, NotificationCenter.botInfoDidLoad); + getNotificationCenter().addObserver(this, NotificationCenter.chatInfoCantLoad); + getNotificationCenter().addObserver(this, NotificationCenter.userInfoDidLoad); + getNotificationCenter().addObserver(this, NotificationCenter.pinnedInfoDidLoad); + getNotificationCenter().addObserver(this, NotificationCenter.topicsDidLoaded); + NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.didSetNewWallpapper); + NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.didApplyNewTheme); + NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.goingToPreviewTheme); + getNotificationCenter().addObserver(this, NotificationCenter.channelRightsUpdated); + getNotificationCenter().addObserver(this, NotificationCenter.audioRecordTooShort); + getNotificationCenter().addObserver(this, NotificationCenter.didUpdateReactions); + getNotificationCenter().addObserver(this, NotificationCenter.didUpdateExtendedMedia); + getNotificationCenter().addObserver(this, NotificationCenter.videoLoadingStateChanged); + getNotificationCenter().addObserver(this, NotificationCenter.scheduledMessagesUpdated); + getNotificationCenter().addObserver(this, NotificationCenter.diceStickersDidLoad); + getNotificationCenter().addObserver(this, NotificationCenter.dialogDeleted); + getNotificationCenter().addObserver(this, NotificationCenter.chatAvailableReactionsUpdated); + getNotificationCenter().addObserver(this, NotificationCenter.dialogsUnreadReactionsCounterChanged); + getNotificationCenter().addObserver(this, NotificationCenter.groupStickersDidLoad); + getNotificationCenter().addObserver(this, NotificationCenter.dialogTranslate); + getNotificationCenter().addObserver(this, NotificationCenter.dialogIsTranslatable); + getNotificationCenter().addObserver(this, NotificationCenter.messageTranslated); + getNotificationCenter().addObserver(this, NotificationCenter.messageTranslating); - editTextItem.addSubItem(text_regular, LocaleController.getString("Regular", R.string.Regular)); + super.onFragmentCreate(); - if (chatMode == 0 && (threadMessageId == 0 || isTopic) && !UserObject.isReplyUser(currentUser) && reportType < 0) { - TLRPC.UserFull userFull = null; - if (currentUser != null) { -// audioCallIconItem = menu.addItem(call, R.drawable.ic_call, themeDelegate); -// audioCallIconItem.setContentDescription(LocaleController.getString("Call", R.string.Call)); -// userFull = getMessagesController().getUserFull(currentUser.id); -// if (userFull != null && userFull.phone_calls_available) { -// showAudioCallAsIcon = !inPreviewMode; -// audioCallIconItem.setVisibility(View.VISIBLE); -// } else { -// showAudioCallAsIcon = false; -// audioCallIconItem.setVisibility(View.GONE); -// } - showAudioCallAsIcon = false; - if (avatarContainer != null) { - avatarContainer.setTitleExpand(showAudioCallAsIcon); + if (chatMode == MODE_PINNED) { + ArrayList messageObjects = new ArrayList<>(); + for (int a = 0, N = pinnedMessageIds.size(); a < N; a++) { + Integer id = pinnedMessageIds.get(a); + MessageObject object = pinnedMessageObjects.get(id); + if (object != null) { + MessageObject o = new MessageObject(object.currentAccount, object.messageOwner, true, false); + o.replyMessageObject = object.replyMessageObject; + o.mediaExists = object.mediaExists; + o.attachPathExists = object.attachPathExists; + messageObjects.add(o); } } - headerItem = menu.addItem(0, R.drawable.ic_ab_other, themeDelegate); - headerItem.setContentDescription(LocaleController.getString("AccDescrMoreOptions", R.string.AccDescrMoreOptions)); - - if (currentUser == null || !currentUser.self) { - chatNotificationsPopupWrapper = new ChatNotificationsPopupWrapper(context, currentAccount, headerItem.getPopupLayout().getSwipeBack(), false, false, new ChatNotificationsPopupWrapper.Callback() { - @Override - public void dismiss() { - headerItem.toggleSubMenu(); - } - - @Override - public void toggleSound() { - SharedPreferences preferences = MessagesController.getNotificationsSettings(currentAccount); - boolean enabled = !preferences.getBoolean("sound_enabled_" + NotificationsController.getSharedPrefKey(dialog_id, getTopicId()), true); - preferences.edit().putBoolean("sound_enabled_" + NotificationsController.getSharedPrefKey(dialog_id, getTopicId()), enabled).apply(); - if (BulletinFactory.canShowBulletin(ChatActivity.this)) { - BulletinFactory.createSoundEnabledBulletin(ChatActivity.this, enabled ? NotificationsController.SETTING_SOUND_ON : NotificationsController.SETTING_SOUND_OFF, getResourceProvider()).show(); - } - updateTitleIcons(); - } - - @Override - public void muteFor(int timeInSeconds) { - if (timeInSeconds == 0) { - if (getMessagesController().isDialogMuted(dialog_id, getTopicId())) { - ChatActivity.this.toggleMute(true); - } - if (BulletinFactory.canShowBulletin(ChatActivity.this)) { - BulletinFactory.createMuteBulletin(ChatActivity.this, NotificationsController.SETTING_MUTE_UNMUTE, timeInSeconds, getResourceProvider()).show(); - } - } else { - getNotificationsController().muteUntil(dialog_id, getTopicId(), timeInSeconds); - if (BulletinFactory.canShowBulletin(ChatActivity.this)) { - BulletinFactory.createMuteBulletin(ChatActivity.this, NotificationsController.SETTING_MUTE_CUSTOM, timeInSeconds, getResourceProvider()).show(); - } - } - } - - @Override - public void showCustomize() { - if (dialog_id != 0) { - if (currentUser != null) { - getMessagesController().putUser(currentUser, true); - } - Bundle args = new Bundle(); - args.putLong("dialog_id", dialog_id); - if (getTopicId() != 0) { - args.putInt("topic_id", getTopicId()); - } - presentFragment(new ProfileNotificationsActivity(args, themeDelegate)); - } - } - - @Override - public void toggleMute() { - ChatActivity.this.toggleMute(true); - BulletinFactory.createMuteBulletin(ChatActivity.this, getMessagesController().isDialogMuted(dialog_id, getTopicId()), themeDelegate).show(); - } - }, getResourceProvider()); - muteItem = headerItem.addSwipeBackItem(R.drawable.msg_mute, null, null, chatNotificationsPopupWrapper.windowLayout); - muteItem.setOnClickListener(view -> { - boolean muted = MessagesController.getInstance(currentAccount).isDialogMuted(dialog_id, getTopicId()); - if (muted) { - updateTitleIcons(true); - AndroidUtilities.runOnUIThread(() -> { - ChatActivity.this.toggleMute(true); - }, 150); - headerItem.toggleSubMenu(); - BulletinFactory.createMuteBulletin(ChatActivity.this, false, themeDelegate).show(); - } else { - muteItem.openSwipeBack(); - } - }); - muteItemGap = headerItem.addColoredGap(); + int loadIndex = lastLoadIndex++; + waitingForLoad.add(loadIndex); + getNotificationCenter().postNotificationName(NotificationCenter.messagesDidLoad, dialog_id, messageObjects.size(), messageObjects, false, 0, last_message_id, 0, 0, 2, true, classGuid, loadIndex, pinnedMessageIds.get(0), 0, MODE_PINNED); + } else if (!forceHistoryEmpty) { + loading = true; + } + if (isThreadChat() && !isTopic) { + if (highlightMessageId == startLoadFromMessageId) { + needSelectFromMessageId = true; } - if (currentUser != null) { - headerItem.addSubItem(call, R.drawable.msg_callback, LocaleController.getString("Call", R.string.Call), themeDelegate); - if (Build.VERSION.SDK_INT >= 18) { - headerItem.addSubItem(video_call, R.drawable.msg_videocall, LocaleController.getString("VideoCall", R.string.VideoCall), themeDelegate); + } else { + getMessagesController().setLastCreatedDialogId(dialog_id, chatMode == MODE_SCHEDULED, true); + if (chatMode == 0) { + if (currentEncryptedChat == null) { + getMediaDataController().loadBotKeyboard(MessagesStorage.TopicKey.of(dialog_id, getTopicId())); } - if (userFull != null && userFull.phone_calls_available) { - headerItem.showSubItem(call); - if (userFull.video_calls_available) { - headerItem.showSubItem(video_call); - } else { - headerItem.hideSubItem(video_call); + getMessagesController().loadPeerSettings(currentUser, currentChat); + + if (startLoadFromMessageId == 0) { + SharedPreferences sharedPreferences = MessagesController.getNotificationsSettings(currentAccount); + int messageId = sharedPreferences.getInt("diditem" + NotificationsController.getSharedPrefKey(dialog_id, getTopicId()), 0); + if (messageId != 0) { + wasManualScroll = true; + loadingFromOldPosition = true; + startLoadFromMessageOffset = sharedPreferences.getInt("diditemo" + NotificationsController.getSharedPrefKey(dialog_id, getTopicId()), 0); + startLoadFromMessageId = messageId; } } else { - headerItem.hideSubItem(call); - headerItem.hideSubItem(video_call); + showScrollToMessageError = true; + needSelectFromMessageId = true; } } + } - if (searchItem != null) { - headerItem.addSubItem(search, R.drawable.msg_search, LocaleController.getString("Search", R.string.Search), themeDelegate); + boolean loadInfo = false; + if (currentChat != null) { + chatInfo = getMessagesController().getChatFull(currentChat.id); + groupCall = getMessagesController().getGroupCall(currentChat.id, true); + if (ChatObject.isChannel(currentChat) && !getMessagesController().isChannelAdminsLoaded(currentChat.id)) { + getMessagesController().loadChannelAdmins(currentChat.id, true); } - boolean allowShowPinned; - if (currentChat != null) { - allowShowPinned = ChatObject.canUserDoAction(currentChat, ChatObject.ACTION_PIN) || ChatObject.isChannel(currentChat); - } else if (currentUser != null && currentUser.self) { - allowShowPinned = true; - } else if (userInfo != null) { - allowShowPinned = userInfo.can_pin_message; - } else { - allowShowPinned = false; + fillInviterId(false); + if (chatMode != MODE_PINNED) { + getMessagesStorage().loadChatInfo(currentChat.id, ChatObject.isChannel(currentChat), null, true, false, startLoadFromMessageId); } - if (allowShowPinned) { - headerItem.addSubItem(nkheaderbtn_show_pinned, R.drawable.msg_pin, LocaleController.getString("PinnedMessage", R.string.PinnedMessage)); + if (chatMode == 0 && chatInfo != null && ChatObject.isChannel(currentChat) && chatInfo.migrated_from_chat_id != 0 && !isThreadChat()) { + mergeDialogId = -chatInfo.migrated_from_chat_id; + maxMessageId[1] = chatInfo.migrated_from_max_id; } - if (currentChat != null && !currentChat.creator && !ChatObject.hasAdminRights(currentChat)) { - headerItem.addSubItem(report, R.drawable.msg_report, LocaleController.getString("ReportChat", R.string.ReportChat), themeDelegate); + loadInfo = chatInfo == null; + checkGroupCallJoin(false); + } else if (currentUser != null) { + if (chatMode != MODE_PINNED) { + getMessagesController().loadUserInfo(currentUser, true, classGuid, startLoadFromMessageId); } + loadInfo = userInfo == null; + } - if (currentChat != null && (currentChat.has_link || (chatInfo != null && chatInfo.linked_chat_id != 0))) { - String text; - if (!currentChat.megagroup) { - text = LocaleController.getString("LinkedGroupChat", R.string.LinkedGroupChat); - headerItem.addSubItem(nkheaderbtn_linked_chat, R.drawable.msg_discussion, text); + if (forceHistoryEmpty) { + endReached[0] = endReached[1] = true; + forwardEndReached[0] = forwardEndReached[1] = true; + firstLoading = false; + checkDispatchHideSkeletons(false); + } + if (chatMode != MODE_PINNED && !forceHistoryEmpty) { + waitingForLoad.add(lastLoadIndex); + int initialMessagesSize; + if (SharedConfig.deviceIsHigh()) { + initialMessagesSize = (isThreadChat() && !isTopic) ? 30 : 25; + } else { + initialMessagesSize = (isThreadChat() && !isTopic) ? 20 : 15; + } + if (startLoadFromDate != 0) { + getMessagesController().loadMessages(dialog_id, mergeDialogId, false, 30, 0, startLoadFromDate, true, 0, classGuid, 4, 0, chatMode, threadMessageId, replyMaxReadId, lastLoadIndex++, isTopic); + } else if (startLoadFromMessageId != 0 && (!isThreadChat() || startLoadFromMessageId == highlightMessageId || isTopic)) { + startLoadFromMessageIdSaved = startLoadFromMessageId; + if (migrated_to != 0) { + mergeDialogId = migrated_to; + getMessagesController().loadMessages(mergeDialogId, 0, loadInfo, initialMessagesSize, startLoadFromMessageId, 0, true, 0, classGuid, 3, 0, chatMode, threadMessageId, replyMaxReadId, lastLoadIndex++, isTopic); } else { - text = LocaleController.getString("LinkedChannelChat", R.string.LinkedChannelChat); - headerItem.addSubItem(nkheaderbtn_linked_chat, R.drawable.msg_channel, text); + getMessagesController().loadMessages(dialog_id, mergeDialogId, loadInfo, initialMessagesSize, startLoadFromMessageId, 0, true, 0, classGuid, 3, 0, chatMode, threadMessageId, replyMaxReadId, lastLoadIndex++, isTopic); + } + } else { + if (historyPreloaded) { + lastLoadIndex++; + } else { + getMessagesController().loadMessages(dialog_id, mergeDialogId, loadInfo, initialMessagesSize, startLoadFromMessageId, 0, true, 0, classGuid, 2, 0, chatMode, threadMessageId, replyMaxReadId, lastLoadIndex++, isTopic); } } + } + if (chatMode == 0 && !isThreadChat()) { + waitingForLoad.add(lastLoadIndex); + getMessagesController().loadMessages(dialog_id, mergeDialogId, false, 1, 0, 0, true, 0, classGuid, 2, 0, MODE_SCHEDULED, threadMessageId, replyMaxReadId, lastLoadIndex++, isTopic); + } - if (currentUser != null) { - addContactItem = headerItem.addSubItem(share_contact, R.drawable.msg_addcontact, "", themeDelegate); + if (chatMode == 0) { + if (userId != 0 && currentUser.bot) { + getMediaDataController().loadBotInfo(userId, userId, true, classGuid); + } else if (chatInfo instanceof TLRPC.TL_chatFull) { + for (int a = 0; a < chatInfo.participants.participants.size(); a++) { + TLRPC.ChatParticipant participant = chatInfo.participants.participants.get(a); + TLRPC.User user = getMessagesController().getUser(participant.user_id); + if (user != null && user.bot) { + getMediaDataController().loadBotInfo(user.id, -chatInfo.id, true, classGuid); + } + } + } + if (AndroidUtilities.isTablet() && !isComments) { + getNotificationCenter().postNotificationName(NotificationCenter.openedChatChanged, dialog_id, getTopicId(), false); } - shareKeyItem = headerItem.addSubItem(nkheaderbtn_share_key, R.drawable.baseline_vpn_key_24, LocaleController.getString("ShareMyKey", R.string.ShareMyKey), themeDelegate); - - if (currentEncryptedChat != null) { - timeItem2 = headerItem.addSubItem(chat_enc_timer, R.drawable.msg_autodelete, LocaleController.getString("SetTimer", R.string.SetTimer), themeDelegate); - } /*else if (currentChat == null && !currentUser.self || ChatObject.canUserDoAdminAction(currentChat, ChatObject.ACTION_DELETE_MESSAGES)) { - headerItem.addSubItem(auto_delete_timer, R.drawable.msg_timer, LocaleController.getString("AutoDeleteSetTimer", R.string.AutoDeleteSetTimer)); - }*/ + if (currentUser != null && !UserObject.isReplyUser(currentUser)) { + userBlocked = getMessagesController().blockePeers.indexOfKey(currentUser.id) >= 0; + } - if (currentChat != null && !isTopic) { - viewAsTopics = headerItem.addSubItem(view_as_topics, R.drawable.msg_topics, LocaleController.getString("TopicViewAsTopics", R.string.TopicViewAsTopics), themeDelegate); + if (currentEncryptedChat != null && AndroidUtilities.getMyLayerVersion(currentEncryptedChat.layer) != SecretChatHelper.CURRENT_SECRET_CHAT_LAYER) { + getSecretChatHelper().sendNotifyLayerMessage(currentEncryptedChat, null); } - if (themeDelegate.isThemeChangeAvailable()) { - headerItem.addSubItem(change_colors, R.drawable.msg_colors, LocaleController.getString("ChangeColors", R.string.ChangeColors), themeDelegate); + } + if (chatInfo != null && chatInfo.linked_chat_id != 0) { + TLRPC.Chat chat = getMessagesController().getChat(chatInfo.linked_chat_id); + if (chat != null && chat.megagroup) { + getMessagesController().startShortPoll(chat, classGuid, false); } - if (!isTopic) { - toTheBeginning = headerItem.addSubItem(to_the_beginning, R.drawable.ic_upward, LocaleController.getString("ToTheBeginning", R.string.ToTheBeginning)); - toTheMessage = headerItem.addSubItem(to_the_message, R.drawable.msg_go_up, LocaleController.getString("ToTheMessage", R.string.ToTheMessage)); - clearHistoryItem = headerItem.addSubItem(clear_history, R.drawable.msg_clear, LocaleController.getString("ClearHistory", R.string.ClearHistory), themeDelegate); - hideTitleItem = headerItem.addSubItem(nkheaderbtn_hide_title, R.drawable.hide_title, LocaleController.getString("HideTitle", R.string.HideTitle), themeDelegate); + } - if (ChatObject.isChannel(currentChat) && !currentChat.creator) { - if (!ChatObject.isNotInChat(currentChat)) { - if (currentChat.megagroup) { - headerItem.addSubItem(delete_chat, R.drawable.msg_leave, LocaleController.getString("LeaveMegaMenu", R.string.LeaveMegaMenu), themeDelegate); - } else { - headerItem.addSubItem(delete_chat, R.drawable.msg_leave, LocaleController.getString("LeaveChannelMenu", R.string.LeaveChannelMenu), themeDelegate); - } - } - } else if (!ChatObject.isChannel(currentChat)) { - if (currentChat != null) { - headerItem.addSubItem(delete_chat, R.drawable.msg_leave, LocaleController.getString("DeleteAndExit", R.string.DeleteAndExit), themeDelegate); - } else { - headerItem.addSubItem(delete_chat, R.drawable.msg_delete, LocaleController.getString("DeleteChatUser", R.string.DeleteChatUser), themeDelegate); - } - } - if (ChatObject.isMegagroup(currentChat) || currentChat != null && !ChatObject.isChannel(currentChat)) { - headerItem.addSubItem(nkheaderbtn_zibi, R.drawable.msg_delete, LocaleController.getString("DeleteAllFromSelf", R.string.DeleteAllFromSelf)); - } + themeDelegate = new ThemeDelegate(); + if (themeDelegate.isThemeChangeAvailable()) { + NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.needSetDayNightTheme); + } - if (currentChat != null && !ChatObject.isChannel(currentChat) && currentChat.creator) { - headerItem.addSubItem(nkheaderbtn_upgrade, R.drawable.baseline_arrow_upward_24, LocaleController.getString("UpgradeGroup", R.string.UpgradeGroup)); - } - } - if (currentUser != null && currentUser.self) { - headerItem.addSubItem(add_shortcut, R.drawable.msg_home, LocaleController.getString("AddShortcut", R.string.AddShortcut), themeDelegate); - } - if (currentUser != null && currentEncryptedChat == null && currentUser.bot) { - headerItem.addSubItem(bot_settings, R.drawable.msg_settings_old, LocaleController.getString("BotSettings", R.string.BotSettings), themeDelegate); - headerItem.addSubItem(bot_help, R.drawable.msg_help, LocaleController.getString("BotHelp", R.string.BotHelp), themeDelegate); - updateBotButtons(); + if (chatInvite != null) { + int timeout = chatInvite.expires - getConnectionsManager().getCurrentTime(); + if (timeout < 0) { + timeout = 10; } - } - if (ChatObject.isForum(currentChat) && isTopic) { - if (getParentLayout() != null && getParentLayout().getFragmentStack() != null) { - boolean hasMyForum = false; - for (int i = 0; i < getParentLayout().getFragmentStack().size(); ++i) { - BaseFragment fragment = getParentLayout().getFragmentStack().get(i); - if (fragment instanceof TopicsFragment && ((TopicsFragment) fragment).getDialogId() == dialog_id) { - hasMyForum = true; - break; - } + AndroidUtilities.runOnUIThread(chatInviteRunnable = () -> { + chatInviteRunnable = null; + if (getParentActivity() == null) { + return; } - - if (!hasMyForum) { - openForumItem = headerItem.addSubItem(open_forum, R.drawable.msg_discussion, LocaleController.getString("OpenAllTopics", R.string.OpenAllTopics), themeDelegate); + AlertDialog.Builder builder = new AlertDialog.Builder(getParentActivity(), themeDelegate); + if (ChatObject.isChannel(currentChat) && !currentChat.megagroup) { + builder.setMessage(LocaleController.getString("JoinByPeekChannelText", R.string.JoinByPeekChannelText)); + builder.setTitle(LocaleController.getString("JoinByPeekChannelTitle", R.string.JoinByPeekChannelTitle)); + } else { + builder.setMessage(LocaleController.getString("JoinByPeekGroupText", R.string.JoinByPeekGroupText)); + builder.setTitle(LocaleController.getString("JoinByPeekGroupTitle", R.string.JoinByPeekGroupTitle)); } - } - } - if (currentChat != null && forumTopic != null) { - closeTopicItem = headerItem.addSubItem(topic_close, R.drawable.msg_topic_close, LocaleController.getString("CloseTopic", R.string.CloseTopic), themeDelegate); - closeTopicItem.setVisibility(currentChat != null && ChatObject.canManageTopic(currentAccount, currentChat, forumTopic) && forumTopic != null && !forumTopic.closed ? View.VISIBLE : View.GONE); + builder.setPositiveButton(LocaleController.getString("JoinByPeekJoin", R.string.JoinByPeekJoin), (dialogInterface, i) -> { + if (bottomOverlayChatText != null) { + bottomOverlayChatText.callOnClick(); + } + }); + builder.setNegativeButton(LocaleController.getString("Cancel", R.string.Cancel), (dialogInterface, i) -> finishFragment()); + showDialog(builder.create()); + }, timeout * 1000L); } - menu.setVisibility(inMenuMode ? View.GONE : View.VISIBLE); - updateTitle(false); - avatarContainer.updateOnlineCount(); - avatarContainer.updateSubtitle(); - updateTitleIcons(); - - if (chatMode == 0 && (!isThreadChat() || isTopic) && reportType < 0) { - attachItem = menu.addItem(chat_menu_attach, R.drawable.ic_ab_other, themeDelegate).setOverrideMenuClick(true).setAllowCloseAnimation(false); - attachItem.setContentDescription(LocaleController.getString("AccDescrMoreOptions", R.string.AccDescrMoreOptions)); - attachItem.setVisibility(View.GONE); + if (isTopic) { + getMessagesController().getTopicsController().getTopicRepliesCount(dialog_id, getTopicId()); } - actionModeViews.clear(); + return true; + } - if (inPreviewMode) { - if (headerItem != null) { - headerItem.setAlpha(0.0f); + private void fillInviterId(boolean load) { + if (currentChat == null || chatInfo == null || ChatObject.isNotInChat(currentChat) || currentChat.creator) { + return; + } + if (chatInfo.inviterId != 0) { + chatInviterId = chatInfo.inviterId; + return; + } + if (chatInfo.participants != null) { + if (chatInfo.participants.self_participant != null) { + chatInviterId = chatInfo.participants.self_participant.inviter_id; + return; } - if (attachItem != null) { - attachItem.setAlpha(0.0f); + long selfId = getUserConfig().getClientUserId(); + for (int a = 0, N = chatInfo.participants.participants.size(); a < N; a++) { + TLRPC.ChatParticipant participant = chatInfo.participants.participants.get(a); + if (participant.user_id == selfId) { + chatInviterId = participant.inviter_id; + return; + } } } + if (load && chatInviterId == 0) { + getMessagesController().checkChatInviter(currentChat.id, false); + } + } - final ActionBarMenu actionMode = actionBar.createActionMode(); - - selectedMessagesCountTextView = new NumberTextView(actionMode.getContext()); - selectedMessagesCountTextView.setTextSize(18); - selectedMessagesCountTextView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); - selectedMessagesCountTextView.setTextColor(getThemedColor(Theme.key_actionBarActionModeDefaultIcon)); - actionMode.addView(selectedMessagesCountTextView, LayoutHelper.createLinear(0, LayoutHelper.MATCH_PARENT, 1.0f, 65, 0, 0, 0)); - selectedMessagesCountTextView.setOnTouchListener((v, event) -> true); - - actionModeViews.add(actionMode.addItemWithWidth(nkactionbarbtn_reply, R.drawable.msg_reply, AndroidUtilities.dp(54), LocaleController.getString("Reply", R.string.Reply))); - actionModeViews.add(actionMode.addItemWithWidth(edit, R.drawable.msg_edit, AndroidUtilities.dp(54), LocaleController.getString("Edit", R.string.Edit))); - actionModeViews.add(actionMode.addItemWithWidth(nkactionbarbtn_selectBetween, R.drawable.ic_select_between, AndroidUtilities.dp(54), LocaleController.getString("SelectBetween", R.string.SelectBetween))); - actionModeViews.add(actionMode.addItemWithWidth(copy, R.drawable.msg_copy, AndroidUtilities.dp(54), LocaleController.getString("Copy", R.string.Copy))); - actionModeViews.add(actionMode.addItemWithWidth(combine_message, R.drawable.msg_replace, AndroidUtilities.dp(54), LocaleController.getString("CombineMessage", R.string.CombineMessage))); + private void hideUndoViews() { + if (undoView != null) { + undoView.hide(true, 0); + } + if (pinBulletin != null) { + pinBulletin.hide(false, 0); + } + if (topUndoView != null) { + topUndoView.hide(true, 0); + } + } - if (currentEncryptedChat == null) { - actionModeViews.add(actionMode.addItemWithWidth(forward, R.drawable.msg_forward, AndroidUtilities.dp(54), LocaleController.getString("Forward", R.string.Forward))); + public int getOtherSameChatsDiff() { + if (parentLayout == null || parentLayout.getFragmentStack() == null) { + return 0; + } + int cur = parentLayout.getFragmentStack().indexOf(this); + if (cur == -1) { + cur = parentLayout.getFragmentStack().size(); + } + int i = cur; + for (int a = 0; a < parentLayout.getFragmentStack().size(); a++) { + BaseFragment fragment = parentLayout.getFragmentStack().get(a); + if (fragment != this && fragment instanceof ChatActivity) { + ChatActivity chatActivity = (ChatActivity) fragment; + if (chatActivity.dialog_id == dialog_id) { + i = a; + break; + } + } } + return i - cur; + } - actionModeViews.add(actionMode.addItemWithWidth(delete, R.drawable.msg_delete, AndroidUtilities.dp(54), LocaleController.getString("Delete", R.string.Delete))); - actionModeViews.add(actionModeOtherItem = actionMode.addItemWithWidth(nkactionbarbtn_action_mode_other, R.drawable.ic_ab_other, AndroidUtilities.dp(54), LocaleController.getString("MessageMenu", R.string.MessageMenu))); + @Override + public void onBeginSlide() { + super.onBeginSlide(); - if (currentEncryptedChat == null) { - actionModeOtherItem.addSubItem(forward, R.drawable.msg_forward, LocaleController.getString("Forward", R.string.Forward)); + if (selectionReactionsOverlay != null && selectionReactionsOverlay.isVisible()) { + selectionReactionsOverlay.setHiddenByScroll(true); } + } - boolean noforward = getMessagesController().isChatNoForwardsWithOverride(currentChat); - - if (currentEncryptedChat == null || !noforward) { - if (NaConfig.INSTANCE.getShowNoQuoteForward().Bool()) { - actionModeOtherItem.addSubItem(nkbtn_forward_noquote, R.drawable.msg_forward_noquote, LocaleController.getString("NoQuoteForward", R.string.NoQuoteForward)); - } - actionModeOtherItem.addSubItem(star, R.drawable.msg_fave, LocaleController.getString("AddToFavorites", R.string.AddToFavorites)); - actionModeOtherItem.addSubItem(save_to, R.drawable.msg_download, LocaleController.getString("SaveToMusic", R.string.SaveToMusic)); + @Override + public void onFragmentDestroy() { + super.onFragmentDestroy(); + if (chatActivityEnterView != null) { + chatActivityEnterView.onDestroy(); } - - actionModeOtherItem.addSubItem(nkbtn_translate, R.drawable.msg_translate, LocaleController.getString("Translate", R.string.Translate)); - if (NekoConfig.showShareMessages.Bool()) - actionModeOtherItem.addSubItem(nkbtn_sharemessage, R.drawable.msg_shareout, LocaleController.getString("ShareMessages", R.string.ShareMessages)); - actionModeOtherItem.addSubItem(nkbtn_unpin, R.drawable.msg_unpin, LocaleController.getString("UnpinMessage", R.string.UnpinMessage)); - if (!noforward) - actionModeOtherItem.addSubItem(nkbtn_savemessage, R.drawable.menu_saved, LocaleController.getString("AddToSavedMessages", R.string.AddToSavedMessages)); - if (NekoConfig.showRepeat.Bool() && !noforward) { - actionModeOtherItem.addSubItem(nkbtn_repeat, R.drawable.msg_repeat, LocaleController.getString("Repeat", R.string.Repeat)); + if (avatarContainer != null) { + avatarContainer.onDestroy(); } - if (NaConfig.INSTANCE.getShowRepeatAsCopy().Bool() || (NaConfig.INSTANCE.getAutoReplaceRepeat().Bool() && noforward)) { - actionModeOtherItem.addSubItem(nkbtn_repeatascopy, R.drawable.msg_repeat, LocaleController.getString("RepeatAsCopy", R.string.RepeatAsCopy)); + if (mentionContainer != null && mentionContainer.getAdapter() != null) { + mentionContainer.getAdapter().onDestroy(); } - if (NekoConfig.showMessageHide.Bool()) { - actionModeOtherItem.addSubItem(nkbtn_hide, R.drawable.msg_disable, LocaleController.getString("Hide", - R.string.Hide)); + if (chatAttachAlert != null) { + chatAttachAlert.dismissInternal(); } - if (NekoConfig.showMessageDetails.Bool()) { - actionModeOtherItem.addSubItem(nkbtn_detail,R.drawable.msg_info,LocaleController.getString("MessageDetails", R.string.MessageDetails)); + getNotificationCenter().onAnimationFinish(transitionAnimationIndex); + NotificationCenter.getGlobalInstance().onAnimationFinish(transitionAnimationGlobalIndex); + getNotificationCenter().onAnimationFinish(scrollAnimationIndex); + getNotificationCenter().onAnimationFinish(scrollCallbackAnimationIndex); + hideUndoViews(); + if (chatInviteRunnable != null) { + AndroidUtilities.cancelRunOnUIThread(chatInviteRunnable); + chatInviteRunnable = null; } + getNotificationCenter().removePostponeNotificationsCallback(postponeNotificationsWhileLoadingCallback); + getMessagesController().setLastCreatedDialogId(dialog_id, chatMode == MODE_SCHEDULED, false); + getNotificationCenter().removeObserver(this, NotificationCenter.messagesDidLoad); + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiLoaded); + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.invalidateMotionBackground); + getNotificationCenter().removeObserver(this, NotificationCenter.didUpdateConnectionState); + getNotificationCenter().removeObserver(this, NotificationCenter.updateInterfaces); + getNotificationCenter().removeObserver(this, NotificationCenter.updateDefaultSendAsPeer); + getNotificationCenter().removeObserver(this, NotificationCenter.didReceiveNewMessages); + getNotificationCenter().removeObserver(this, NotificationCenter.closeChats); + getNotificationCenter().removeObserver(this, NotificationCenter.messagesRead); + getNotificationCenter().removeObserver(this, NotificationCenter.threadMessagesRead); + getNotificationCenter().removeObserver(this, NotificationCenter.commentsRead); + getNotificationCenter().removeObserver(this, NotificationCenter.changeRepliesCounter); + getNotificationCenter().removeObserver(this, NotificationCenter.messagesDeleted); + getNotificationCenter().removeObserver(this, NotificationCenter.historyCleared); + getNotificationCenter().removeObserver(this, NotificationCenter.messageReceivedByServer); + getNotificationCenter().removeObserver(this, NotificationCenter.messageReceivedByAck); + getNotificationCenter().removeObserver(this, NotificationCenter.messageSendError); + getNotificationCenter().removeObserver(this, NotificationCenter.chatInfoDidLoad); + getNotificationCenter().removeObserver(this, NotificationCenter.didLoadChatInviter); + getNotificationCenter().removeObserver(this, NotificationCenter.groupCallUpdated); + getNotificationCenter().removeObserver(this, NotificationCenter.encryptedChatUpdated); + getNotificationCenter().removeObserver(this, NotificationCenter.messagesReadEncrypted); + getNotificationCenter().removeObserver(this, NotificationCenter.removeAllMessagesFromDialog); + getNotificationCenter().removeObserver(this, NotificationCenter.contactsDidLoad); + getNotificationCenter().removeObserver(this, NotificationCenter.messagePlayingProgressDidChanged); + getNotificationCenter().removeObserver(this, NotificationCenter.messagePlayingDidReset); + getNotificationCenter().removeObserver(this, NotificationCenter.screenshotTook); + getNotificationCenter().removeObserver(this, NotificationCenter.blockedUsersDidLoad); + getNotificationCenter().removeObserver(this, NotificationCenter.fileNewChunkAvailable); + getNotificationCenter().removeObserver(this, NotificationCenter.didCreatedNewDeleteTask); + getNotificationCenter().removeObserver(this, NotificationCenter.messagePlayingDidStart); + getNotificationCenter().removeObserver(this, NotificationCenter.messagePlayingGoingToStop); + getNotificationCenter().removeObserver(this, NotificationCenter.updateMessageMedia); + getNotificationCenter().removeObserver(this, NotificationCenter.voiceTranscriptionUpdate); + getNotificationCenter().removeObserver(this, NotificationCenter.animatedEmojiDocumentLoaded); + getNotificationCenter().removeObserver(this, NotificationCenter.replaceMessagesObjects); + getNotificationCenter().removeObserver(this, NotificationCenter.notificationsSettingsUpdated); + getNotificationCenter().removeObserver(this, NotificationCenter.replyMessagesDidLoad); + getNotificationCenter().removeObserver(this, NotificationCenter.didReceivedWebpages); + getNotificationCenter().removeObserver(this, NotificationCenter.didReceivedWebpagesInUpdates); + getNotificationCenter().removeObserver(this, NotificationCenter.messagesReadContent); + getNotificationCenter().removeObserver(this, NotificationCenter.botInfoDidLoad); + getNotificationCenter().removeObserver(this, NotificationCenter.botKeyboardDidLoad); + getNotificationCenter().removeObserver(this, NotificationCenter.chatSearchResultsAvailable); + getNotificationCenter().removeObserver(this, NotificationCenter.chatSearchResultsLoading); + getNotificationCenter().removeObserver(this, NotificationCenter.messagePlayingPlayStateChanged); + getNotificationCenter().removeObserver(this, NotificationCenter.didUpdateMessagesViews); + getNotificationCenter().removeObserver(this, NotificationCenter.chatInfoCantLoad); + getNotificationCenter().removeObserver(this, NotificationCenter.didLoadPinnedMessages); + getNotificationCenter().removeObserver(this, NotificationCenter.peerSettingsDidLoad); + getNotificationCenter().removeObserver(this, NotificationCenter.newDraftReceived); + getNotificationCenter().removeObserver(this, NotificationCenter.userInfoDidLoad); + getNotificationCenter().removeObserver(this, NotificationCenter.pinnedInfoDidLoad); + getNotificationCenter().removeObserver(this, NotificationCenter.topicsDidLoaded); + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.didSetNewWallpapper); + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.didApplyNewTheme); + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.goingToPreviewTheme); + getNotificationCenter().removeObserver(this, NotificationCenter.channelRightsUpdated); + getNotificationCenter().removeObserver(this, NotificationCenter.updateMentionsCount); + getNotificationCenter().removeObserver(this, NotificationCenter.audioRecordTooShort); + getNotificationCenter().removeObserver(this, NotificationCenter.didUpdatePollResults); + getNotificationCenter().removeObserver(this, NotificationCenter.didUpdateReactions); + getNotificationCenter().removeObserver(this, NotificationCenter.didUpdateExtendedMedia); + getNotificationCenter().removeObserver(this, NotificationCenter.chatOnlineCountDidLoad); + getNotificationCenter().removeObserver(this, NotificationCenter.videoLoadingStateChanged); + getNotificationCenter().removeObserver(this, NotificationCenter.scheduledMessagesUpdated); + getNotificationCenter().removeObserver(this, NotificationCenter.diceStickersDidLoad); + getNotificationCenter().removeObserver(this, NotificationCenter.dialogDeleted); + getNotificationCenter().removeObserver(this, NotificationCenter.chatAvailableReactionsUpdated); + getNotificationCenter().removeObserver(this, NotificationCenter.didLoadSponsoredMessages); + getNotificationCenter().removeObserver(this, NotificationCenter.didLoadSendAsPeers); + getNotificationCenter().removeObserver(this, NotificationCenter.dialogsUnreadReactionsCounterChanged); + getNotificationCenter().removeObserver(this, NotificationCenter.groupStickersDidLoad); + getNotificationCenter().removeObserver(this, NotificationCenter.dialogTranslate); + getNotificationCenter().removeObserver(this, NotificationCenter.dialogIsTranslatable); + getNotificationCenter().removeObserver(this, NotificationCenter.messageTranslated); + getNotificationCenter().removeObserver(this, NotificationCenter.messageTranslating); + if (currentEncryptedChat != null) { + getNotificationCenter().removeObserver(this, NotificationCenter.didVerifyMessagesStickers); + } + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.needSetDayNightTheme); - actionMode.getItem(nkactionbarbtn_reply).setVisibility(ChatObject.canSendMessages(currentChat) && selectedMessagesIds[0].size() + selectedMessagesIds[1].size() == 1 ? View.VISIBLE : View.GONE); - actionMode.getItem(edit).setVisibility(canEditMessagesCount == 1 && selectedMessagesIds[0].size() + selectedMessagesIds[1].size() == 1 ? View.VISIBLE : View.GONE); - actionMode.getItem(copy).setVisibility(!getMessagesController().isChatNoForwardsWithOverride(currentChat) && selectedMessagesCanCopyIds[0].size() + selectedMessagesCanCopyIds[1].size() != 0 ? View.VISIBLE : View.GONE); - actionMode.getItem(combine_message).setVisibility(selectedMessagesCanCopyIds[0].size() + selectedMessagesCanCopyIds[1].size() != 0 ? View.VISIBLE : View.GONE); - actionMode.getItem(delete).setVisibility(cantDeleteMessagesCount == 0 ? View.VISIBLE : View.GONE); - - actionModeOtherItem.setSubItemVisibility(star, selectedMessagesCanStarIds[0].size() + selectedMessagesCanStarIds[1].size() != 0); - - checkActionBarMenu(false); - - scrimPaint = new Paint(); - - fragmentView = new SizeNotifierFrameLayout(context, parentLayout) { - - int inputFieldHeight = 0; - int lastHeight; - - int lastWidth; - - ArrayList drawTimeAfter = new ArrayList<>(); - ArrayList drawNamesAfter = new ArrayList<>(); - ArrayList drawCaptionAfter = new ArrayList<>(); - - Paint backgroundPaint; - int backgroundColor; - - @Override - protected void drawList(Canvas blurCanvas, boolean top) { - float cilpTop = chatListViewPaddingTop - chatListViewPaddingVisibleOffset - AndroidUtilities.dp(4); - - for (int i = 0; i < chatListView.getChildCount(); i++) { - View child = chatListView.getChildAt(i); - if (top && child.getY() > cilpTop + AndroidUtilities.dp(40)) { - continue; - } - if (!top && child.getY() + child.getMeasuredHeight() < AndroidUtilities.dp(203)) { - continue; - } - - blurCanvas.save(); - if (top) { - blurCanvas.translate(chatListView.getX() + child.getX(), chatListView.getY() + child.getY() - contentPanTranslation); - } else { - blurCanvas.translate(chatListView.getX() + child.getX(), chatListView.getTop() + child.getY()); - } - if (child instanceof ChatMessageCell) { - ChatMessageCell cell = (ChatMessageCell) child; - cell.drawForBlur = true; - if (cell.drawBackgroundInParent()) { - cell.drawBackgroundInternal(blurCanvas, true); - } - child.draw(blurCanvas); - if (cell.hasOutboundsContent()) { - ((ChatMessageCell) child).drawOutboundsContent(blurCanvas); - } - cell.drawForBlur = false; - } else if (child instanceof ChatActionCell) { - child.draw(blurCanvas); - ((ChatActionCell) child).drawOutboundsContent(blurCanvas); - } else { - child.draw(blurCanvas); - } - blurCanvas.restore(); - } - } - - @Override - protected int getScrollOffset() { - return chatListView.computeVerticalScrollOffset(); - } - - @Override - protected float getBottomOffset() { - return chatListView.getBottom(); - } - - @Override - protected float getListTranslationY() { - return chatListView.getTranslationY(); - } - - { - adjustPanLayoutHelper = new AdjustPanLayoutHelper(this) { - - @Override - protected void onTransitionStart(boolean keyboardVisible, int contentHeight) { - wasManualScroll = true; - if (chatActivityEnterView != null) { - chatActivityEnterView.onAdjustPanTransitionStart(keyboardVisible, contentHeight); - } - if (mentionContainer != null) { - mentionContainer.onPanTransitionStart(); - } - if (mediaBanTooltip != null) { - mediaBanTooltip.hide(false); - } - } - - @Override - protected void onTransitionEnd() { - if (chatActivityEnterView != null) { - chatActivityEnterView.onAdjustPanTransitionEnd(); - } - if (mentionContainer != null) { - mentionContainer.onPanTransitionEnd(); - } - if (voiceHintTextView != null && voiceHintTextView.getVisibility() == View.VISIBLE) { - voiceHintTextView.showForView(chatActivityEnterView.getAudioVideoButtonContainer(), false); - } - } - - @Override - protected void onPanTranslationUpdate(float y, float progress, boolean keyboardVisible) { - if (getParentLayout() != null && getParentLayout().isPreviewOpenAnimationInProgress()) { - return; - } - contentPanTranslation = y; - if (chatAttachAlert != null && chatAttachAlert.isShowing()) { - setNonNoveTranslation(y); - } else { - actionBar.setTranslationY(y); - emptyViewContainer.setTranslationY(y / 2); - progressView.setTranslationY(y / 2); - contentView.setBackgroundTranslation((int) y); - instantCameraView.onPanTranslationUpdate(y); - if (blurredView != null) { - blurredView.drawable.onPanTranslationUpdate(y); - } - setFragmentPanTranslationOffset((int) y); - invalidateChatListViewTopPadding(); - invalidateMessagesVisiblePart(); - } - chatListView.invalidate(); - updateBulletinLayout(); - if (chatActivityEnterView != null) { - chatActivityEnterView.onAdjustPanTransitionUpdate(y, progress, keyboardVisible); - } - if (mentionContainer != null) { - mentionContainer.onPanTransitionUpdate(y); - } - if (AndroidUtilities.isTablet() && getParentActivity() instanceof LaunchActivity) { - BaseFragment mainFragment = ((LaunchActivity)getParentActivity()).getActionBarLayout().getLastFragment(); - if (mainFragment instanceof DialogsActivity) { - ((DialogsActivity)mainFragment).setPanTranslationOffset(y); - } - } - if (voiceHintTextView != null && voiceHintTextView.getVisibility() == View.VISIBLE) { - voiceHintTextView.showForView(chatActivityEnterView.getAudioVideoButtonContainer(), false); - } - } - - @Override - protected boolean heightAnimationEnabled() { - INavigationLayout actionBarLayout = getParentLayout(); - if (inPreviewMode || inBubbleMode || AndroidUtilities.isInMultiwindow || actionBarLayout == null || fixedKeyboardHeight > 0) { - return false; - } - if (System.currentTimeMillis() - activityResumeTime < 250) { - return false; - } - if ((ChatActivity.this == actionBarLayout.getLastFragment() && actionBarLayout.isTransitionAnimationInProgress()) || actionBarLayout.isPreviewOpenAnimationInProgress() || isPaused || !openAnimationEnded || (chatAttachAlert != null && chatAttachAlert.isShowing())) { - return false; - } - if (chatActivityEnterView != null && chatActivityEnterView.getTrendingStickersAlert() != null && chatActivityEnterView.getTrendingStickersAlert().isShowing()) { - return false; - } - return true; - } - - @Override - protected int startOffset() { - int keyboardSize = getKeyboardHeight(); - if (keyboardSize <= AndroidUtilities.dp(20) && chatActivityEnterView.isPopupShowing()) { - return chatActivityEnterView.getEmojiPadding(); - } - return 0; - } - }; - } - - @Override - protected void onAttachedToWindow() { - super.onAttachedToWindow(); - adjustPanLayoutHelper.onAttach(); - chatActivityEnterView.setAdjustPanLayoutHelper(adjustPanLayoutHelper); - MessageObject messageObject = MediaController.getInstance().getPlayingMessageObject(); - if (messageObject != null && (messageObject.isRoundVideo() || messageObject.isVideo()) && messageObject.eventId == 0 && messageObject.getDialogId() == dialog_id) { - MediaController.getInstance().setTextureView(createTextureView(false), aspectRatioFrameLayout, videoPlayerContainer, true); - } - if (pullingDownDrawable != null) { - pullingDownDrawable.onAttach(); - } - emojiAnimationsOverlay.onAttachedToWindow(); - } + if (chatMode == 0 && AndroidUtilities.isTablet()) { + getNotificationCenter().postNotificationName(NotificationCenter.openedChatChanged, dialog_id, getTopicId(), true); + } + if (currentUser != null) { + MediaController.getInstance().stopMediaObserver(); + } - @Override - protected void onDetachedFromWindow() { - super.onDetachedFromWindow(); - adjustPanLayoutHelper.onDetach(); - if (pullingDownDrawable != null) { - pullingDownDrawable.onDetach(); - pullingDownDrawable = null; - } - emojiAnimationsOverlay.onDetachedFromWindow(); - AndroidUtilities.runOnUIThread(() -> { - ReactionsEffectOverlay.removeCurrent(true); - }); + if (flagSecure != null) { + flagSecure.detach(); + } + if (currentUser != null) { + getMessagesController().cancelLoadFullUser(currentUser.id); + } + AndroidUtilities.removeAdjustResize(getParentActivity(), classGuid); + if (chatAttachAlert != null) { + chatAttachAlert.onDestroy(); + } + AndroidUtilities.unlockOrientation(getParentActivity()); + if (ChatObject.isChannel(currentChat)) { + getMessagesController().startShortPoll(currentChat, classGuid, true); + if (chatInfo != null && chatInfo.linked_chat_id != 0) { + TLRPC.Chat chat = getMessagesController().getChat(chatInfo.linked_chat_id); + getMessagesController().startShortPoll(chat, classGuid, true); } + } + if (textSelectionHelper != null) { + textSelectionHelper.clear(); + } + if (chatListItemAnimator != null) { + chatListItemAnimator.onDestroy(); + } + if (pinchToZoomHelper != null) { + pinchToZoomHelper.clear(); + } + chatThemeBottomSheet = null; - private float x, y; - private long pressTime; + INavigationLayout parentLayout = getParentLayout(); + if (parentLayout != null && parentLayout.getFragmentStack() != null) { + BackButtonMenu.clearPulledDialogs(this, parentLayout.getFragmentStack().indexOf(this) - (replacingChatActivity ? 0 : 1)); + } + replacingChatActivity = false; - @Override - public boolean dispatchTouchEvent(MotionEvent ev) { - float expandY; - if (AndroidUtilities.isInMultiwindow || isInBubbleMode()) { - expandY = chatActivityEnterView.getEmojiView() != null ? chatActivityEnterView.getEmojiView().getY() : chatActivityEnterView.getY(); - } else { - expandY = chatActivityEnterView.getY(); - } - if ((scrimView != null && scrimView != actionBar.getBackButton()) || chatActivityEnterView != null && chatActivityEnterView.isStickersExpanded() && ev.getY() < expandY) { - return false; - } + if (progressDialogCurrent != null) { + progressDialogCurrent.cancel(); + progressDialogCurrent = null; + } + chatMessagesMetadataController.onFragmentDestroy(); + } - lastTouchY = ev.getY(); - TextSelectionHelper.TextSelectionOverlay selectionOverlay = textSelectionHelper.getOverlayView(context); - ev.offsetLocation(-selectionOverlay.getX(), -selectionOverlay.getY()); - if (textSelectionHelper.isSelectionMode() && textSelectionHelper.getOverlayView(context).onTouchEvent(ev)) { - return true; - } else { - ev.offsetLocation(selectionOverlay.getX(), selectionOverlay.getY()); + private ArrayList getSelectedMessages() { + ArrayList fmessages = new ArrayList<>(); + for (int a = 1; a >= 0; a--) { + ArrayList ids = new ArrayList<>(); + for (int b = 0; b < selectedMessagesIds[a].size(); b++) { + ids.add(selectedMessagesIds[a].keyAt(b)); + } + Collections.sort(ids); + for (int b = 0; b < ids.size(); b++) { + Integer id = ids.get(b); + MessageObject messageObject = selectedMessagesIds[a].get(id); + if (messageObject != null) { + fmessages.add(messageObject); } + } + selectedMessagesCanCopyIds[a].clear(); + selectedMessagesCanStarIds[a].clear(); + selectedMessagesIds[a].clear(); + } + hideActionMode(); + updatePinnedMessageView(true); + updateVisibleRows(); + return fmessages; + } - if (selectionOverlay.checkOnTap(ev)) { - ev.setAction(MotionEvent.ACTION_CANCEL); + private ArrayList getSelectedMessages1() { + ArrayList fmessages = new ArrayList<>(); + for (int a = 1; a >= 0; a--) { + for (int b = 0; b < selectedMessagesIds[a].size(); b++) { + MessageObject messageObject = selectedMessagesIds[a].get(selectedMessagesIds[a].keyAt(b)); + if (messageObject != null) { + fmessages.add(messageObject); } + } + } + return fmessages; + } - if (ev.getAction() == MotionEvent.ACTION_DOWN && textSelectionHelper.isSelectionMode() && (ev.getY() < chatListView.getTop() || ev.getY() > chatListView.getBottom())) { - ev.offsetLocation(-selectionOverlay.getX(), -selectionOverlay.getY()); - if (textSelectionHelper.getOverlayView(context).onTouchEvent(ev)) { - ev.offsetLocation(selectionOverlay.getX(), selectionOverlay.getY()); - return super.dispatchTouchEvent(ev); - } else { - return true; - } - } + private static class ChatActivityTextSelectionHelper extends TextSelectionHelper.ChatListTextSelectionHelper { + ChatActivity chatActivity; + public void setChatActivity(ChatActivity chatActivity) { + cancelAllAnimators(); + clear(); + textSelectionOverlay = null; + this.chatActivity = chatActivity; + } - if (pinchToZoomHelper.isInOverlayMode()) { - return pinchToZoomHelper.onTouchEvent(ev); - } + @Override + public int getParentTopPadding() { + return chatActivity == null ? 0 : (int) chatActivity.chatListViewPaddingTop; + } - if (AvatarPreviewer.hasVisibleInstance()) { - AvatarPreviewer.getInstance().onTouchEvent(ev); - return true; - } + @Override + public int getParentBottomPadding() { + return chatActivity == null ? 0 : chatActivity.blurredViewBottomOffset; + } - boolean r = false; - if (isInPreviewMode() && allowExpandPreviewByClick) { - if (ev.getAction() == MotionEvent.ACTION_DOWN) { - boolean pressedOnPageDownButtons = false, pressedOnAvatar = false; - int[] off = new int[2]; - getLocationInWindow(off); - int[] pos = new int[2]; - if (pagedownButton != null) { - pagedownButton.getLocationInWindow(pos); - AndroidUtilities.rectTmp2.set(pos[0] - off[0], pos[1] - off[1], pos[0] - off[0] + pagedownButton.getMeasuredWidth(), pos[1] - off[1] + pagedownButton.getMeasuredHeight()); - if (AndroidUtilities.rectTmp2.contains((int) ev.getX(), (int) ev.getY())) { - pressedOnPageDownButtons = true; - } - } - if (avatarContainer != null && avatarContainer.getAvatarImageView() != null) { - BackupImageView avatar = avatarContainer.getAvatarImageView(); - avatar.getLocationInWindow(pos); - AndroidUtilities.rectTmp2.set(pos[0] - off[0], pos[1] - off[1], pos[0] - off[0] + avatar.getMeasuredWidth(), pos[1] - off[1] + avatar.getMeasuredHeight()); - if (AndroidUtilities.rectTmp2.contains((int) ev.getX(), (int) ev.getY())) { - pressedOnAvatar = true; - } - } - if (!pressedOnPageDownButtons && mentiondownButton != null) { - mentiondownButton.getLocationInWindow(pos); - AndroidUtilities.rectTmp2.set(pos[0] - off[0], pos[1] - off[1], pos[0] - off[0] + mentiondownButton.getMeasuredWidth(), pos[1] - off[1] + mentiondownButton.getMeasuredHeight()); - if (AndroidUtilities.rectTmp2.contains((int) ev.getX(), (int) ev.getY())) { - pressedOnPageDownButtons = true; - } - } - if (!pressedOnPageDownButtons && !pressedOnAvatar) { - x = ev.getX(); - y = ev.getY(); - pressTime = SystemClock.elapsedRealtime(); - r = true; - } else { - pressTime = -1; - } - } else if (ev.getAction() == MotionEvent.ACTION_UP) { - if (MathUtils.distance(x, y, ev.getX(), ev.getY()) < AndroidUtilities.dp(6) && SystemClock.elapsedRealtime() - pressTime <= ViewConfiguration.getTapTimeout()) { - parentLayout.expandPreviewFragment(); - ev.setAction(MotionEvent.ACTION_CANCEL); - } - pressTime = -1; - } else if (ev.getAction() == MotionEvent.ACTION_CANCEL) { - pressTime = -1; - } - } + @Override + protected int getThemedColor(String key) { + Integer color = chatActivity == null ? null : chatActivity.themeDelegate.getColor(key); + return color != null ? color : super.getThemedColor(key); + } - return super.dispatchTouchEvent(ev) || r; + @Override + protected Theme.ResourcesProvider getResourcesProvider() { + if (chatActivity != null) { + return chatActivity.themeDelegate; } + return null; + } + } - @Override - protected void onDraw(Canvas canvas) { - if (getTag(BlurBehindDrawable.TAG_DRAWING_AS_BACKGROUND) != null) { - return; - } - if (getTag(BlurBehindDrawable.TAG_DRAWING_AS_BACKGROUND) == null && (instantCameraView.blurFullyDrawing() || (blurredView != null && blurredView.fullyDrawing() && blurredView.getTag() != null))) { - return; - } - super.onDraw(canvas); + @Override + public View createView(Context context) { + if (textSelectionHelper == null) { + if (textSelectionHelpersCache != null && !textSelectionHelpersCache.isEmpty()) { + textSelectionHelper = textSelectionHelpersCache.remove(0); + } else { + textSelectionHelper = new ChatActivityTextSelectionHelper(); } + textSelectionHelper.setChatActivity(this); + } - @Override - protected boolean drawChild(Canvas canvas, View child, long drawingTime) { - if ((scrimView != null || messageEnterTransitionContainer.isRunning()) && (child == pagedownButton || child == mentiondownButton || child == floatingDateView || child == fireworksOverlay || child == reactionsMentiondownButton || child == gifHintTextView || child == emojiHintTextView || child == undoView || child == topUndoView)) { - return false; - } - if (child == fragmentContextView && fragmentContextView.isCallStyle()) { - return true; - } - if (child == undoView && PhotoViewer.getInstance().isVisible()) { - return true; - } - if (toPullingDownTransition && child == chatListView) { - return true; - } - if (switchingFromTopics && child == actionBar) { - return true; - } - if (getTag(BlurBehindDrawable.TAG_DRAWING_AS_BACKGROUND) != null) { - boolean needBlur; - if (((int) getTag(BlurBehindDrawable.TAG_DRAWING_AS_BACKGROUND)) == BlurBehindDrawable.STATIC_CONTENT) { - needBlur = child == actionBar || child == fragmentContextView || child == pinnedMessageView; - } else { - needBlur = child == chatListView || child == chatActivityEnterView || chatActivityEnterView.isPopupView(child); - } - if (!needBlur) { - return false; - } - } else if (getTag(BlurBehindDrawable.TAG_DRAWING_AS_BACKGROUND) == null && (instantCameraView.blurFullyDrawing() || (blurredView != null && blurredView.fullyDrawing() && blurredView.getTag() != null))) { - boolean needBlur = child == actionBar || child == chatListView || child == pinnedMessageView || child == fragmentContextView; - if (needBlur) { - return false; - } - } - boolean result; - MessageObject messageObject = MediaController.getInstance().getPlayingMessageObject(); - boolean isRoundVideo = false; - boolean isVideo = messageObject != null && messageObject.eventId == 0 && ((isRoundVideo = messageObject.isRoundVideo()) || messageObject.isVideo()); - if (child == videoPlayerContainer) { - canvas.save(); - float transitionOffset = 0; - if (pullingDownAnimateProgress != 0) { - transitionOffset = (chatListView.getMeasuredHeight() - pullingDownOffset) * pullingDownAnimateProgress; - } - canvas.translate(0, -pullingDownOffset - transitionOffset); - if (messageObject != null && messageObject.type == MessageObject.TYPE_ROUND_VIDEO) { - if (Theme.chat_roundVideoShadow != null && aspectRatioFrameLayout.isDrawingReady()) { - int x = (int) child.getX() - AndroidUtilities.dp(3); - int y = (int) child.getY() - AndroidUtilities.dp(2); - canvas.save(); - canvas.scale(videoPlayerContainer.getScaleX(), videoPlayerContainer.getScaleY(), child.getX(), child.getY()); - Theme.chat_roundVideoShadow.setAlpha(255); - Theme.chat_roundVideoShadow.setBounds(x, y, x + AndroidUtilities.roundPlayingMessageSize + AndroidUtilities.dp(6), y + AndroidUtilities.roundPlayingMessageSize + AndroidUtilities.dp(6)); - Theme.chat_roundVideoShadow.draw(canvas); - canvas.restore(); - } - result = super.drawChild(canvas, child, drawingTime); - } else { - if (child.getTag() == null) { - float oldTranslation = child.getTranslationY(); - child.setTranslationY(-AndroidUtilities.dp(1000)); - result = super.drawChild(canvas, child, drawingTime); - child.setTranslationY(oldTranslation); - } else { - result = false; - } - } - canvas.restore(); - } else { - result = super.drawChild(canvas, child, drawingTime); - if (isVideo && child == chatListView && messageObject.type != MessageObject.TYPE_ROUND_VIDEO && videoPlayerContainer != null && videoPlayerContainer.getTag() != null) { - canvas.save(); - float transitionOffset = 0; - if (pullingDownAnimateProgress != 0) { - transitionOffset = (chatListView.getMeasuredHeight() - pullingDownOffset) * pullingDownAnimateProgress; - } - canvas.translate(0, -pullingDownOffset - transitionOffset + pullingBottomOffset); - super.drawChild(canvas, videoPlayerContainer, drawingTime); - if (drawLaterRoundProgressCell != null) { - canvas.save(); - canvas.translate(drawLaterRoundProgressCell.getX(), drawLaterRoundProgressCell.getTop() + chatListView.getY()); - if (isRoundVideo) { - drawLaterRoundProgressCell.drawRoundProgress(canvas); - invalidate(); - drawLaterRoundProgressCell.invalidate(); - // drawLaterRoundProgressCell.drawOverlays(canvas); - } else { - drawLaterRoundProgressCell.drawOverlays(canvas); - if (drawLaterRoundProgressCell.needDrawTime()) { - drawLaterRoundProgressCell.drawTime(canvas, drawLaterRoundProgressCell.getAlpha(), true); - } - } - canvas.restore(); - } - canvas.restore(); - } - } - if (child == actionBar && parentLayout != null) { - parentLayout.drawHeaderShadow(canvas, actionBar.getVisibility() == VISIBLE ? (int) actionBar.getTranslationY() + actionBar.getMeasuredHeight() + (inPreviewMode && Build.VERSION.SDK_INT >= 21 ? AndroidUtilities.statusBarHeight : 0) : 0); - } - return result; - } + if (reportType >= 0) { + actionBar.setBackgroundColor(getThemedColor(Theme.key_actionBarActionModeDefault)); + actionBar.setItemsColor(getThemedColor(Theme.key_actionBarActionModeDefaultIcon), false); + actionBar.setItemsBackgroundColor(getThemedColor(Theme.key_actionBarActionModeDefaultSelector), false); + actionBar.setTitleColor(getThemedColor(Theme.key_actionBarActionModeDefaultIcon)); + actionBar.setSubtitleColor(getThemedColor(Theme.key_actionBarActionModeDefaultIcon)); + } + actionBarBackgroundPaint.setColor(getThemedColor(Theme.key_actionBarDefault)); - @Override - protected boolean isActionBarVisible() { - return actionBar.getVisibility() == VISIBLE; + if (chatMessageCellsCache.isEmpty()) { + for (int a = 0; a < 15; a++) { + chatMessageCellsCache.add(new ChatMessageCell(context, true, themeDelegate)); } + } + for (int a = 1; a >= 0; a--) { + selectedMessagesIds[a].clear(); + selectedMessagesCanCopyIds[a].clear(); + selectedMessagesCanStarIds[a].clear(); + } + scheduledOrNoSoundHint = null; + infoTopView = null; + aspectRatioFrameLayout = null; + videoTextureView = null; + searchAsListHint = null; + mediaBanTooltip = null; + noSoundHintView = null; + forwardHintView = null; + checksHintView = null; + textSelectionHint = null; + emojiButtonRed = null; + gifHintTextView = null; + emojiHintTextView = null; + pollHintView = null; + timerHintView = null; + videoPlayerContainer = null; + voiceHintTextView = null; + blurredView = null; + dummyMessageCell = null; + cantDeleteMessagesCount = 0; + canEditMessagesCount = 0; + cantForwardMessagesCount = 0; + canForwardMessagesCount = 0; + cantSaveMessagesCount = 0; + canSaveMusicCount = 0; + canSaveDocumentsCount = 0; - private void drawChildElement(Canvas canvas, float listTop, ChatMessageCell cell, int type) { - canvas.save(); - float canvasOffsetX = chatListView.getLeft() + cell.getLeft(); - float canvasOffsetY = chatListView.getY() + cell.getY(); - float alpha = cell.shouldDrawAlphaLayer() ? cell.getAlpha() : 1f; - canvas.clipRect(chatListView.getLeft(), listTop, chatListView.getRight(), chatListView.getY() + chatListView.getMeasuredHeight() - blurredViewBottomOffset); - canvas.translate(canvasOffsetX, canvasOffsetY); - cell.setInvalidatesParent(true); - if (type == 0) { - cell.drawTime(canvas, alpha, true); - } else if (type == 1) { - cell.drawNamesLayout(canvas, alpha); - } else { - cell.drawCaptionLayout(canvas, cell.getCurrentPosition() != null && (cell.getCurrentPosition().flags & MessageObject.POSITION_FLAG_LEFT) == 0, alpha); + hasOwnBackground = true; + if (chatAttachAlert != null) { + try { + if (chatAttachAlert.isShowing()) { + chatAttachAlert.dismiss(); } - cell.setInvalidatesParent(false); - canvas.restore(); + } catch (Exception ignore) { + } + chatAttachAlert.onDestroy(); + chatAttachAlert = null; + } + Theme.createChatResources(context, false); + actionBar.setAddToContainer(false); + if (inPreviewMode) { + actionBar.setBackButtonDrawable(null); + } else { + actionBar.setBackButtonDrawable(new BackDrawable(reportType >= 0)); + } + actionBar.setActionBarMenuOnItemClick(new ActionBar.ActionBarMenuOnItemClick() { @Override - protected void dispatchDraw(Canvas canvas) { - chatActivityEnterView.checkAnimation(); - updateChatListViewTopPadding(); - if (invalidateMessagesVisiblePart || (chatListItemAnimator != null && chatListItemAnimator.isRunning())) { - invalidateMessagesVisiblePart = false; - updateMessagesVisiblePart(false); - } - updateTextureViewPosition(false, false); - updatePagedownButtonsPosition(); - int restoreToCount = -1; - if (switchingFromTopics) { - restoreToCount = canvas.saveLayerAlpha(0, actionBar.getBottom(), getMeasuredWidth(), getMeasuredHeight(), (int) (255 * switchingFromTopicsProgress), Canvas.ALL_SAVE_FLAG); - float s = 0.8f + 0.2f * switchingFromTopicsProgress; - canvas.scale(s, s, getMeasuredWidth() / 2f, getMeasuredHeight() / 2f); - } - super.dispatchDraw(canvas); - if (fragmentContextView != null && fragmentContextView.isCallStyle()) { - float alpha = (blurredView != null && blurredView.getVisibility() == View.VISIBLE) ? 1f - blurredView.getAlpha() : 1f; - if (alpha > 0) { - if (alpha == 1f) { - canvas.save(); - } else { - canvas.saveLayerAlpha(fragmentContextView.getX(), fragmentContextView.getY() - AndroidUtilities.dp(30), fragmentContextView.getX() + fragmentContextView.getMeasuredWidth(), fragmentContextView.getY() + fragmentContextView.getMeasuredHeight(), (int) (255 * alpha), Canvas.ALL_SAVE_FLAG); + public void onItemClick(final int id) { + if (id == -1) { + if (actionBar.isActionModeShowed()) { + clearSelectionMode(); + } else { + if (!checkRecordLocked(true)) { + finishFragment(); } - canvas.translate(fragmentContextView.getX(), fragmentContextView.getY()); - fragmentContextView.setDrawOverlay(true); - fragmentContextView.draw(canvas); - fragmentContextView.setDrawOverlay(false); - canvas.restore(); } - fragmentView.invalidate(); - } - if (chatActivityEnterView != null) { - if (chatActivityEnterView.panelAnimationInProgress() && chatActivityEnterView.getEmojiPadding() < bottomPanelTranslationY) { - int color = getThemedColor(Theme.key_chat_emojiPanelBackground); - if (backgroundPaint == null) { - backgroundPaint = new Paint(); + } else if (id == view_as_topics) { + TopicsFragment.prepareToSwitchAnimation(ChatActivity.this); + } else if (id == copy) { + SpannableStringBuilder str = new SpannableStringBuilder(); + long previousUid = 0; + for (int a = 1; a >= 0; a--) { + ArrayList ids = new ArrayList<>(); + for (int b = 0; b < selectedMessagesCanCopyIds[a].size(); b++) { + ids.add(selectedMessagesCanCopyIds[a].keyAt(b)); + } + if (currentEncryptedChat == null) { + Collections.sort(ids); + } else { + Collections.sort(ids, Collections.reverseOrder()); } - if (backgroundColor != color) { - backgroundPaint.setColor(backgroundColor = color); + for (int b = 0; b < ids.size(); b++) { + Integer messageId = ids.get(b); + MessageObject messageObject = selectedMessagesCanCopyIds[a].get(messageId); + if (str.length() != 0) { + str.append("\n\n"); + } + str.append(getMessageContent(messageObject, previousUid, ids.size() != 1 && (currentUser == null || !currentUser.self))); + previousUid = messageObject.getFromChatId(); } - int offset = (int) (bottomPanelTranslationY - chatActivityEnterView.getEmojiPadding()) + 3; - canvas.drawRect(0, getMeasuredHeight() - offset, getMeasuredWidth(), getMeasuredHeight(), backgroundPaint); - setFragmentPanTranslationOffset(chatActivityEnterView.getEmojiPadding()); } - } - for (int a = 0, N = animateSendingViews.size(); a < N; a++) { - ChatMessageCell cell = animateSendingViews.get(a); - MessageObject.SendAnimationData data = cell.getMessageObject().sendAnimationData; - if (data != null) { - canvas.save(); - ImageReceiver imageReceiver = cell.getPhotoImage(); - canvas.translate(data.currentX, data.currentY); - canvas.scale(data.currentScale, data.currentScale); - canvas.translate(-imageReceiver.getCenterX(), -imageReceiver.getCenterY()); - cell.setTimeAlpha(data.timeAlpha); - animateSendingViews.get(a).draw(canvas); - canvas.restore(); + if (str.length() != 0) { + AndroidUtilities.addToClipboard(str); + createUndoView(); + undoView.showWithAction(0, UndoView.ACTION_TEXT_COPIED, null); } - } - if (scrimViewReaction == null || scrimView == null) { - scrimPaint.setAlpha((int) (255 * scrimPaintAlpha * (scrimView != null ? scrimViewAlpha : 1f))); - canvas.drawRect(0, 0, getMeasuredWidth(), getMeasuredHeight(), scrimPaint); - } - if (scrimView != null) { - if (scrimView == reactionsMentiondownButton || scrimView == mentiondownButton) { - if (scrimViewAlpha < 1f) { - scrimPaint.setAlpha((int) (255 * scrimPaintAlpha * (1f - scrimViewAlpha))); - canvas.drawRect(0, 0, getMeasuredWidth(), getMeasuredHeight(), scrimPaint); - } - } else if (scrimView instanceof ImageView) { - int c = canvas.save(); - if (scrimViewAlpha < 1f) { - canvas.saveLayerAlpha(scrimView.getLeft(), scrimView.getTop(), scrimView.getRight(), scrimView.getBottom(), (int) (255 * scrimViewAlpha), Canvas.ALL_SAVE_FLAG); - } - canvas.translate(scrimView.getLeft(), scrimView.getTop()); - if (scrimView == actionBar.getBackButton()) { - int r = Math.max(scrimView.getMeasuredWidth(), scrimView.getMeasuredHeight()) / 2; - canvas.drawCircle(r, r, r * 0.7f, actionBarBackgroundPaint); - } - scrimView.draw(canvas); - canvas.restoreToCount(c); - - if (scrimViewAlpha < 1f) { - scrimPaint.setAlpha((int) (255 * scrimPaintAlpha * (1f - scrimViewAlpha))); - canvas.drawRect(0, 0, getMeasuredWidth(), getMeasuredHeight(), scrimPaint); + clearSelectionMode(); + } else if (id == combine_message) { + StringBuilder str = new StringBuilder(); + ArrayList toDeleteMessagesIds = new ArrayList<>(); + MessageObject replyTo = getThreadMessage(); + ArrayList suffice_en = new ArrayList(Arrays.asList(',', '.', '!', '?', ':', ';', '(', ')')); + ArrayList suffice_zh = new ArrayList(Arrays.asList(',', '。', '!', '?', ':', ';', '(', ')')); + for (int a = 1; a >= 0; a--) { + ArrayList ids = new ArrayList<>(); + for (int b = 0; b < selectedMessagesCanCopyIds[a].size(); b++) { + ids.add(selectedMessagesCanCopyIds[a].keyAt(b)); } - } else { - float listTop = chatListView.getY() + chatListViewPaddingTop - chatListViewPaddingVisibleOffset - AndroidUtilities.dp(4); - MessageObject.GroupedMessages scrimGroup; - if (scrimView instanceof ChatMessageCell) { - scrimGroup = ((ChatMessageCell) scrimView).getCurrentMessagesGroup(); + if (currentEncryptedChat == null) { + Collections.sort(ids); } else { - scrimGroup = null; - } - boolean groupedBackgroundWasDraw = false; - int count = chatListView.getChildCount(); - for (int num = 0; num < count; num++) { - View child = chatListView.getChildAt(num); - MessageObject.GroupedMessages group; - MessageObject.GroupedMessagePosition position; - ChatMessageCell cell; - ChatActionCell actionCell; - if (child instanceof ChatMessageCell) { - cell = (ChatMessageCell) child; - actionCell = null; - group = cell.getCurrentMessagesGroup(); - position = cell.getCurrentPosition(); - } else { - position = null; - group = null; - cell = null; - actionCell = child instanceof ChatActionCell ? ((ChatActionCell) child) : null; - } - if (child != scrimView && (scrimGroup == null || scrimGroup != group) || child.getAlpha() == 0f) { - continue; - } - if (!groupedBackgroundWasDraw && cell != null && scrimGroup != null && scrimGroup.transitionParams.cell != null) { - float x = scrimGroup.transitionParams.cell.getNonAnimationTranslationX(true); - - float l = (scrimGroup.transitionParams.left + x + scrimGroup.transitionParams.offsetLeft); - float t = (scrimGroup.transitionParams.top + scrimGroup.transitionParams.offsetTop); - float r = (scrimGroup.transitionParams.right + x + scrimGroup.transitionParams.offsetRight); - float b = (scrimGroup.transitionParams.bottom + scrimGroup.transitionParams.offsetBottom); - - if (!scrimGroup.transitionParams.backgroundChangeBounds) { - t += scrimGroup.transitionParams.cell.getTranslationY(); - b += scrimGroup.transitionParams.cell.getTranslationY(); - } - - if (t < chatListViewPaddingTop - chatListViewPaddingVisibleOffset - AndroidUtilities.dp(20)) { - t = chatListViewPaddingTop - chatListViewPaddingVisibleOffset - AndroidUtilities.dp(20); - } - - if (b > chatListView.getMeasuredHeight() + AndroidUtilities.dp(20)) { - b = chatListView.getMeasuredHeight() + AndroidUtilities.dp(20); - } - - boolean selected = true; - for (int a = 0, N = scrimGroup.messages.size(); a < N; a++) { - MessageObject object = scrimGroup.messages.get(a); - int index = object.getDialogId() == dialog_id ? 0 : 1; - if (selectedMessagesIds[index].indexOfKey(object.getId()) < 0) { - selected = false; - break; - } - } - - canvas.save(); - canvas.clipRect(0, listTop + (mentionContainer != null ? mentionContainer.clipTop() : 0), getMeasuredWidth(), chatListView.getY() + chatListView.getMeasuredHeight() - blurredViewBottomOffset - (mentionContainer != null ? mentionContainer.clipBottom() : 0)); - canvas.translate(0, chatListView.getY()); - scrimGroup.transitionParams.cell.drawBackground(canvas, (int) l, (int) t, (int) r, (int) b, scrimGroup.transitionParams.pinnedTop, scrimGroup.transitionParams.pinnedBotton, selected, contentView.getKeyboardHeight()); - canvas.restore(); - groupedBackgroundWasDraw = true; - } - - if (cell != null && cell.getPhotoImage().isAnimationRunning()) { - invalidate(); - } - - float viewClipLeft = chatListView.getLeft(); - float viewClipTop = listTop; - float viewClipRight = chatListView.getRight(); - float viewClipBottom = chatListView.getY() + chatListView.getMeasuredHeight() - blurredViewBottomOffset; - - if (mentionContainer != null) { - viewClipTop += mentionContainer.clipTop(); - viewClipBottom -= mentionContainer.clipBottom(); - } - - if (cell == null || !cell.getTransitionParams().animateBackgroundBoundsInner) { - viewClipLeft = Math.max(viewClipLeft, chatListView.getLeft() + child.getX()); - viewClipTop = Math.max(viewClipTop, chatListView.getY() + child.getY()); - viewClipRight = Math.min(viewClipRight, chatListView.getLeft() + child.getX() + child.getMeasuredWidth()); - viewClipBottom = Math.min(viewClipBottom, chatListView.getY() + child.getY() + child.getMeasuredHeight()); - } - - if (viewClipTop < viewClipBottom) { - if (child.getAlpha() != 1f) { - canvas.saveLayerAlpha(viewClipLeft, viewClipTop, viewClipRight, viewClipBottom, (int) (255 * child.getAlpha()), Canvas.ALL_SAVE_FLAG); - } else { - canvas.save(); - } - if (cell != null) { - cell.setInvalidatesParent(true); - cell.setScrimReaction(scrimViewReaction); - } - canvas.clipRect(viewClipLeft, viewClipTop, viewClipRight, viewClipBottom); - canvas.translate(chatListView.getLeft() + child.getX(), chatListView.getY() + child.getY()); - if (cell != null && scrimGroup == null && cell.drawBackgroundInParent()) { - cell.drawBackgroundInternal(canvas, true); - } - child.draw(canvas); - if (cell != null && cell.hasOutboundsContent()) { - cell.drawOutboundsContent(canvas); - } - if (actionCell != null) { - actionCell.drawOutboundsContent(canvas); - } - - canvas.restore(); - - if (cell != null) { - cell.setInvalidatesParent(false); - cell.setScrimReaction(null); - } - } - - if (position != null || (cell != null && cell.getTransitionParams().animateBackgroundBoundsInner)) { - if (position == null || position.last || position.minX == 0 && position.minY == 0) { - if (position == null || position.last) { - drawTimeAfter.add(cell); - } - if (position == null || (position.minX == 0 && position.minY == 0 && cell.hasNameLayout())) { - drawNamesAfter.add(cell); - } - } - if (position == null || (position.flags & MessageObject.POSITION_FLAG_BOTTOM) != 0) { - drawCaptionAfter.add(cell); - } + Collections.sort(ids, Collections.reverseOrder()); + } + for (int b = 0; b < ids.size(); b++) { + Integer messageId = ids.get(b); + MessageObject messageObject = selectedMessagesCanCopyIds[a].get(messageId); + if (b == 0 && NaConfig.INSTANCE.getCombineMessage().Int() == 0) { + replyTo = messageObject.replyMessageObject; } - if (scrimViewReaction != null && cell != null) { - scrimPaint.setAlpha((int) (255 * scrimPaintAlpha * scrimViewAlpha)); - canvas.drawRect(0, 0, getMeasuredWidth(), getMeasuredHeight(), scrimPaint); - - if (viewClipTop < viewClipBottom) { - float alpha = child.getAlpha() * scrimViewAlpha; - if (alpha < 1f) { - canvas.saveLayerAlpha(viewClipLeft, viewClipTop, viewClipRight, viewClipBottom, (int) (255 * alpha), Canvas.ALL_SAVE_FLAG); + if (str.length() != 0) { + if (!suffice_en.contains(str.charAt(str.length() - 1)) && !suffice_zh.contains(str.charAt(str.length() - 1))) { + // add comma refer to language + if (LocaleController.getInstance().getCurrentLocale().getLanguage().equals("zh")) { + str.append(','); } else { - canvas.save(); + str.append(','); } - canvas.clipRect(viewClipLeft, viewClipTop, viewClipRight, viewClipBottom); - canvas.translate(chatListView.getLeft() + child.getX(), chatListView.getY() + child.getY()); - cell.drawScrimReaction(canvas, scrimViewReaction); - canvas.restore(); } } - } - - int size = drawTimeAfter.size(); - if (size > 0) { - for (int a = 0; a < size; a++) { - drawChildElement(canvas, listTop, drawTimeAfter.get(a), 0); + str.append(messageObject.messageText); + if (messageObject.getSenderId() == UserConfig.getInstance(currentAccount).getClientUserId()) { + toDeleteMessagesIds.add(messageId); } - drawTimeAfter.clear(); } - size = drawNamesAfter.size(); - if (size > 0) { - for (int a = 0; a < size; a++) { - drawChildElement(canvas, listTop, drawNamesAfter.get(a), 1); + } + if (str.length() != 0) { + SendMessagesHelper.getInstance(currentAccount) + .sendMessage(str.toString(), dialog_id, replyTo, getThreadMessage(), null, false, null, null, null, true, 0, null, false); + MessagesController.getInstance(currentAccount).deleteMessages(toDeleteMessagesIds, null, null, dialog_id, true, false); + } + clearSelectionMode(); + } else if (id == delete) { + if (getParentActivity() == null) { + return; + } + createDeleteMessagesAlert(null, null); + } else if (id == forward) { + openForward(true); + } else if (id == save_to) { + ArrayList messageObjects = new ArrayList<>(); + for (int a = 1; a >= 0; a--) { + for (int b = 0; b < selectedMessagesIds[a].size(); b++) { + messageObjects.add(selectedMessagesIds[a].valueAt(b)); + } + selectedMessagesIds[a].clear(); + selectedMessagesCanCopyIds[a].clear(); + selectedMessagesCanStarIds[a].clear(); + } + boolean isMusic = canSaveMusicCount > 0; + hideActionMode(); + updatePinnedMessageView(true); + updateVisibleRows(); + MediaController.saveFilesFromMessages(getParentActivity(), getAccountInstance(), messageObjects, (count) -> { + if (count > 0) { + if (getParentActivity() == null) { + return; } - drawNamesAfter.clear(); + BulletinFactory.of(ChatActivity.this).createDownloadBulletin(isMusic ? BulletinFactory.FileType.AUDIOS : BulletinFactory.FileType.UNKNOWNS, count, themeDelegate).show(); } - size = drawCaptionAfter.size(); - if (size > 0) { - for (int a = 0; a < size; a++) { - ChatMessageCell cell = drawCaptionAfter.get(a); - if (cell.getCurrentPosition() == null && !cell.getTransitionParams().animateBackgroundBoundsInner) { - continue; + }); + } else if (id == chat_enc_timer) { + if (getParentActivity() == null) { + return; + } + showDialog(AlertsCreator.createTTLAlert(getParentActivity(), currentEncryptedChat, themeDelegate).create()); + } else if (id == clear_history || id == delete_chat || id == auto_delete_timer) { + if (getParentActivity() == null) { + return; + } + boolean canDeleteHistory = chatInfo != null && chatInfo.can_delete_channel; + if (id == auto_delete_timer || id == clear_history && currentEncryptedChat == null && ((currentUser != null && !UserObject.isUserSelf(currentUser) && !UserObject.isDeleted(currentUser)) || (chatInfo != null && chatInfo.can_delete_channel))) { + AlertsCreator.createClearDaysDialogAlert(ChatActivity.this, -1, currentUser, currentChat, canDeleteHistory, new MessagesStorage.BooleanCallback() { + @Override + public void run(boolean revoke) { + if (revoke && (currentUser != null || canDeleteHistory)) { + getMessagesStorage().getMessagesCount(dialog_id, (count) -> { + if (count >= 50) { + AlertsCreator.createClearOrDeleteDialogAlert(ChatActivity.this, true, false, true, currentChat, currentUser, false, false, canDeleteHistory, (param) -> performHistoryClear(true, canDeleteHistory), themeDelegate); + } else { + performHistoryClear(true, canDeleteHistory); + } + }); + } else { + performHistoryClear(revoke, canDeleteHistory); } - drawChildElement(canvas, listTop, cell, 2); } - drawCaptionAfter.clear(); + }, getResourceProvider()); + return; + } + AlertsCreator.createClearOrDeleteDialogAlert(ChatActivity.this, id == clear_history, currentChat, currentUser, currentEncryptedChat != null, true, canDeleteHistory, (param) -> { + if (id == clear_history && ChatObject.isChannel(currentChat) && (!currentChat.megagroup || ChatObject.isPublic(currentChat))) { + getMessagesController().deleteDialog(dialog_id, 2, param); + } else { + if (id != clear_history) { + getNotificationCenter().removeObserver(ChatActivity.this, NotificationCenter.closeChats); + getNotificationCenter().postNotificationName(NotificationCenter.closeChats); + finishFragment(); + getNotificationCenter().postNotificationName(NotificationCenter.needDeleteDialog, dialog_id, currentUser, currentChat, param); + } else { + performHistoryClear(param, canDeleteHistory); + } } + }, themeDelegate); + } else if (id == share_contact) { + if (currentUser == null || getParentActivity() == null) { + return; } - - if (scrimViewReaction == null && scrimViewAlpha < 1f) { - scrimPaint.setAlpha((int) (255 * scrimPaintAlpha * (1f - scrimViewAlpha))); - canvas.drawRect(0, 0, getMeasuredWidth(), getMeasuredHeight(), scrimPaint); + if (addToContactsButton != null && addToContactsButton.getTag() != null) { + shareMyContact((Integer) addToContactsButton.getTag(), null); + } else { + Bundle args = new Bundle(); + args.putLong("user_id", currentUser.id); + args.putBoolean("addContact", true); + presentFragment(new ContactAddActivity(args)); } - } - - if (scrimView != null || messageEnterTransitionContainer.isRunning()) { - if (mentionContainer == null || mentionContainer.getVisibility() != View.VISIBLE) { - if (pagedownButton != null && pagedownButton.getTag() != null) { - super.drawChild(canvas, pagedownButton, SystemClock.uptimeMillis()); + } else if (id == mute) { + toggleMute(false); + } else if (id == add_shortcut) { + try { + getMediaDataController().installShortcut(currentUser.id); + } catch (Exception e) { + FileLog.e(e); + } + } else if (id == to_the_beginning) { + scrollToMessageId(1, 0, false, 0, true, 0); + } else if (id == to_the_message){ + setScrollToMessage(); + } else if (id == report) { + AlertsCreator.createReportAlert(getParentActivity(), dialog_id, 0, ChatActivity.this, themeDelegate, null); + } else if (id == star) { + for (int a = 0; a < 2; a++) { + for (int b = 0; b < selectedMessagesCanStarIds[a].size(); b++) { + MessageObject msg = selectedMessagesCanStarIds[a].valueAt(b); + getMediaDataController().addRecentSticker(MediaDataController.TYPE_FAVE, msg, msg.getDocument(), (int) (System.currentTimeMillis() / 1000), !hasUnfavedSelected); } - if (mentiondownButton != null && mentiondownButton.getTag() != null) { - super.drawChild(canvas, mentiondownButton, SystemClock.uptimeMillis()); + } + clearSelectionMode(); + } else if (id == edit) { + MessageObject messageObject = null; + for (int a = 1; a >= 0; a--) { + if (messageObject == null && selectedMessagesIds[a].size() == 1) { + ArrayList ids = new ArrayList<>(); + for (int b = 0; b < selectedMessagesIds[a].size(); b++) { + ids.add(selectedMessagesIds[a].keyAt(b)); + } + messageObject = messagesDict[a].get(ids.get(0)); } - if (reactionsMentiondownButton != null && reactionsMentiondownButton.getTag() != null) { - super.drawChild(canvas, reactionsMentiondownButton, SystemClock.uptimeMillis()); + selectedMessagesIds[a].clear(); + selectedMessagesCanCopyIds[a].clear(); + selectedMessagesCanStarIds[a].clear(); + } + startEditingMessageObject(messageObject); + hideActionMode(); + updatePinnedMessageView(true); + updateVisibleRows(); + } else if (id == chat_menu_attach) { + ActionBarMenuSubItem attach = new ActionBarMenuSubItem(context, false, true, true, getResourceProvider()); + attach.setTextAndIcon(LocaleController.getString("AttachMenu", R.string.AttachMenu), R.drawable.input_attach); + attach.setOnClickListener(view -> { + headerItem.closeSubMenu(); + if (chatAttachAlert != null) { + chatAttachAlert.setEditingMessageObject(null); } + openAttachMenu(); + }); + headerItem.toggleSubMenu(attach, attachItem.createView()); + } else if (id == bot_help) { + getSendMessagesHelper().sendMessage("/help", dialog_id, null, null, null, false, null, null, null, true, 0, null, false); + } else if (id == bot_settings) { + getSendMessagesHelper().sendMessage("/settings", dialog_id, null, null, null, false, null, null, null, true, 0, null, false); + } else if (id == search) { + openSearchWithText(null); + } else if (id == translate) { + getMessagesController().getTranslateController().setHideTranslateDialog(getDialogId(), false, true); + if (!getMessagesController().getTranslateController().toggleTranslatingDialog(getDialogId(), true)) { + updateTopPanel(true); + } + } else if (id == call || id == video_call) { + if (currentUser != null && getParentActivity() != null) { + VoIPHelper.startCall(currentUser, id == video_call, userInfo != null && userInfo.video_calls_available, getParentActivity(), getMessagesController().getUserFull(currentUser.id), getAccountInstance()); + } + } else if (id == text_bold) { + if (chatActivityEnterView != null && chatActivityEnterView.getEditField() != null) { + chatActivityEnterView.getEditField().setSelectionOverride(editTextStart, editTextEnd); + chatActivityEnterView.getEditField().makeSelectedBold(); } - if (floatingDateView != null && floatingDateView.getTag() != null) { - super.drawChild(canvas, floatingDateView, SystemClock.uptimeMillis()); + } else if (id == text_italic) { + if (chatActivityEnterView != null && chatActivityEnterView.getEditField() != null) { + chatActivityEnterView.getEditField().setSelectionOverride(editTextStart, editTextEnd); + chatActivityEnterView.getEditField().makeSelectedItalic(); } - if (fireworksOverlay != null) { - super.drawChild(canvas, fireworksOverlay, SystemClock.uptimeMillis()); + } else if (id == text_spoiler) { + if (chatActivityEnterView != null && chatActivityEnterView.getEditField() != null) { + chatActivityEnterView.getEditField().setSelectionOverride(editTextStart, editTextEnd); + chatActivityEnterView.getEditField().makeSelectedSpoiler(); } - if (gifHintTextView != null) { - super.drawChild(canvas, gifHintTextView, SystemClock.uptimeMillis()); + } else if (id == text_mono) { + if (chatActivityEnterView != null && chatActivityEnterView.getEditField() != null) { + chatActivityEnterView.getEditField().setSelectionOverride(editTextStart, editTextEnd); + chatActivityEnterView.getEditField().makeSelectedMono(); } - if (emojiHintTextView != null) { - super.drawChild(canvas, emojiHintTextView, SystemClock.uptimeMillis()); + } else if (id == text_strike) { + if (chatActivityEnterView != null && chatActivityEnterView.getEditField() != null) { + chatActivityEnterView.getEditField().setSelectionOverride(editTextStart, editTextEnd); + chatActivityEnterView.getEditField().makeSelectedStrike(); } - if (undoView != null && undoView.getVisibility() == View.VISIBLE) { - super.drawChild(canvas, undoView, SystemClock.uptimeMillis()); + } else if (id == text_underline) { + if (chatActivityEnterView != null && chatActivityEnterView.getEditField() != null) { + chatActivityEnterView.getEditField().setSelectionOverride(editTextStart, editTextEnd); + chatActivityEnterView.getEditField().makeSelectedUnderline(); } - if (topUndoView != null && undoView.getVisibility() == View.VISIBLE) { - super.drawChild(canvas, topUndoView, SystemClock.uptimeMillis()); + } else if (id == text_link) { + if (chatActivityEnterView != null && chatActivityEnterView.getEditField() != null) { + chatActivityEnterView.getEditField().setSelectionOverride(editTextStart, editTextEnd); + chatActivityEnterView.getEditField().makeSelectedUrl(); } - } - - if (fixedKeyboardHeight > 0 && keyboardHeight < AndroidUtilities.dp(20)) { - int color = getThemedColor(Theme.key_windowBackgroundWhite); - if (backgroundPaint == null) { - backgroundPaint = new Paint(); + } else if (id == text_regular) { + if (chatActivityEnterView != null && chatActivityEnterView.getEditField() != null) { + chatActivityEnterView.getEditField().setSelectionOverride(editTextStart, editTextEnd); + chatActivityEnterView.getEditField().makeSelectedRegular(); } - if (backgroundColor != color) { - backgroundPaint.setColor(backgroundColor = color); + } else if (id == text_mention) { + if (chatActivityEnterView != null) { + chatActivityEnterView.getEditField().setSelectionOverride(editTextStart, editTextEnd); + chatActivityEnterView.getEditField().makeSelectedMention(); } - canvas.drawRect(0, getMeasuredHeight() - fixedKeyboardHeight, getMeasuredWidth(), getMeasuredHeight(), backgroundPaint); - } - if (pullingDownDrawable != null && pullingDownDrawable.needDrawBottomPanel()) { - int top, bottom; - if (chatActivityEnterView != null && chatActivityEnterView.getVisibility() == View.VISIBLE) { - top = chatActivityEnterView.getTop() + AndroidUtilities.dp2(2); - bottom = chatActivityEnterView.getBottom(); - } else { - top = bottomOverlayChat.getTop() + AndroidUtilities.dp2(2); - bottom = bottomOverlayChat.getBottom(); + } else if (id == text_transalte) { + if (chatActivityEnterView != null) { + chatActivityEnterView.getEditField().setSelectionOverride(editTextStart, editTextEnd); + chatActivityEnterView.getEditField().makeSelectedTranslate(); } - top -= (int) ((pullingDownAnimateToActivity == null ? 0 : pullingDownAnimateToActivity.pullingBottomOffset) * pullingDownAnimateProgress); - pullingDownDrawable.drawBottomPanel(canvas, top, bottom, getMeasuredWidth()); - } - if (pullingDownAnimateToActivity != null) { - canvas.saveLayerAlpha(0, 0, getMeasuredWidth(), getMeasuredHeight(), (int) (255 * pullingDownAnimateProgress), Canvas.ALL_SAVE_FLAG); - pullingDownAnimateToActivity.fragmentView.draw(canvas); - canvas.restore(); - } - - emojiAnimationsOverlay.draw(canvas); - - if (restoreToCount >= 0) { - canvas.restore(); - } - if (switchingFromTopics) { - canvas.save(); - canvas.translate(actionBar.getX(), actionBar.getY()); - canvas.saveLayerAlpha(0, 0, actionBar.getWidth(), actionBar.getHeight(), (int) (255 * switchingFromTopicsProgress), Canvas.ALL_SAVE_FLAG); - actionBar.draw(canvas); - canvas.restore(); - canvas.restore(); + } else if (id == change_colors) { + showChatThemeBottomSheet(); + } else if (id == topic_close) { + getMessagesController().getTopicsController().toggleCloseTopic(currentChat.id, forumTopic.id, forumTopic.closed = true); + updateTopicButtons(); + updateBottomOverlay(); + updateTopPanel(true); + } else if (id == open_forum) { + TopicsFragment.prepareToSwitchAnimation(ChatActivity.this); +// Bundle bundle = new Bundle(); +// bundle.putLong("chat_id", -dialog_id); +// presentFragment(new TopicsFragment(bundle)); + } else { + nkbtn_onclick_actionbar(id); } } - + }); + View backButton = actionBar.getBackButton(); + backButton.setOnTouchListener(new LongPressListenerWithMovingGesture() { @Override - protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { - int allHeight; - int widthSize = MeasureSpec.getSize(widthMeasureSpec); - int heightSize = allHeight = MeasureSpec.getSize(heightMeasureSpec); - - if (lastWidth != widthSize) { - globalIgnoreLayout = true; - lastWidth = widthMeasureSpec; - if (!inPreviewMode && currentUser != null && currentUser.self) { - SimpleTextView textView = avatarContainer.getTitleTextView(); - int textWidth = (int) textView.getPaint().measureText(textView.getText(), 0, textView.getText().length()); - if (widthSize - AndroidUtilities.dp(96 + 56) > textWidth + AndroidUtilities.dp(10)) { - showSearchAsIcon = !showAudioCallAsIcon; + public void onLongPress() { + scrimPopupWindow = BackButtonMenu.show(ChatActivity.this, backButton, dialog_id, getTopicId(), themeDelegate); + if (scrimPopupWindow != null) { + setSubmenu(scrimPopupWindow); + scrimPopupWindow.setOnDismissListener(() -> { + setSubmenu(null); + scrimPopupWindow = null; + menuDeleteItem = null; + scrimPopupWindowItems = null; + chatLayoutManager.setCanScrollVertically(true); + if (scrimPopupWindowHideDimOnDismiss) { + dimBehindView(false); } else { - showSearchAsIcon = false; - } - } else { - showSearchAsIcon = false; - } - if (showSearchAsIcon || showAudioCallAsIcon) { - if (avatarContainer != null && avatarContainer.getLayoutParams() != null) { - ((MarginLayoutParams) avatarContainer.getLayoutParams()).rightMargin = AndroidUtilities.dp(96); - } - } else { - if (avatarContainer != null && avatarContainer.getLayoutParams() != null) { - ((MarginLayoutParams) avatarContainer.getLayoutParams()).rightMargin = AndroidUtilities.dp(40); - } - } - if (showSearchAsIcon) { - if (!actionBar.isSearchFieldVisible() && searchIconItem != null) { - searchIconItem.setVisibility(View.VISIBLE); - } - if (headerItem != null) { - headerItem.hideSubItem(search); - } - } else { - if (headerItem != null) { - headerItem.showSubItem(search); + scrimPopupWindowHideDimOnDismiss = true; } - if (searchIconItem != null) { - searchIconItem.setVisibility(View.GONE); + if (chatActivityEnterView != null && chatActivityEnterView.getEditField() != null) { + chatActivityEnterView.getEditField().setAllowDrawCursor(true); } + }); + chatListView.stopScroll(); + chatLayoutManager.setCanScrollVertically(false); + dimBehindView(backButton, 0.3f); + hideHints(false); + if (topUndoView != null) { + topUndoView.hide(true, 1); } - if (!actionBar.isSearchFieldVisible() && audioCallIconItem != null) { - audioCallIconItem.setVisibility((showAudioCallAsIcon && !showSearchAsIcon) ? View.VISIBLE : View.GONE); + if (undoView != null) { + undoView.hide(true, 1); } - if (headerItem != null) { - TLRPC.UserFull userInfo = getCurrentUserInfo(); -// if (showAudioCallAsIcon) { -// headerItem.hideSubItem(call); - if (userInfo != null && userInfo.phone_calls_available) { - headerItem.showSubItem(call, true); - } + if (chatActivityEnterView != null && chatActivityEnterView.getEditField() != null) { + chatActivityEnterView.getEditField().setAllowDrawCursor(false); } - globalIgnoreLayout = false; } + } + }); + actionBar.setInterceptTouchEventListener((view, motionEvent) -> { + if (chatThemeBottomSheet != null) { + chatThemeBottomSheet.close(); + return true; + } + return false; + }); + + if (avatarContainer != null) { + avatarContainer.onDestroy(); + } + avatarContainer = new ChatAvatarContainer(context, this, currentEncryptedChat != null, themeDelegate); + avatarContainer.allowShorterStatus = true; + avatarContainer.premiumIconHiddable = true; + AndroidUtilities.updateViewVisibilityAnimated(avatarContainer, true, 1f, false); + updateTopicTitleIcon(); + if (inPreviewMode || inBubbleMode) { + avatarContainer.setOccupyStatusBar(false); + } + if (reportType >= 0) { + if (reportType == AlertsCreator.REPORT_TYPE_SPAM) { + actionBar.setTitle(LocaleController.getString("ReportChatSpam", R.string.ReportChatSpam)); + } else if (reportType == AlertsCreator.REPORT_TYPE_VIOLENCE) { + actionBar.setTitle(LocaleController.getString("ReportChatViolence", R.string.ReportChatViolence)); + } else if (reportType == AlertsCreator.REPORT_TYPE_CHILD_ABUSE) { + actionBar.setTitle(LocaleController.getString("ReportChatChild", R.string.ReportChatChild)); + } else if (reportType == AlertsCreator.REPORT_TYPE_PORNOGRAPHY) { + actionBar.setTitle(LocaleController.getString("ReportChatPornography", R.string.ReportChatPornography)); + } else if (reportType == AlertsCreator.REPORT_TYPE_ILLEGAL_DRUGS) { + actionBar.setTitle(LocaleController.getString("ReportChatIllegalDrugs", R.string.ReportChatIllegalDrugs)); + } else if (reportType == AlertsCreator.REPORT_TYPE_PERSONAL_DETAILS) { + actionBar.setTitle(LocaleController.getString("ReportChatPersonalDetails", R.string.ReportChatPersonalDetails)); + } + actionBar.setSubtitle(LocaleController.getString("ReportSelectMessages", R.string.ReportSelectMessages)); + } else if (startLoadFromDate != 0) { + final int date = startLoadFromDate; + actionBar.setOnClickListener((v) -> { + jumpToDate(date); + }); + actionBar.setTitle(LocaleController.formatDateChat(startLoadFromDate, false)); + actionBar.setSubtitle(LocaleController.getString("Loading", R.string.Loading)); + + TLRPC.TL_messages_getHistory gh1 = new TLRPC.TL_messages_getHistory(); + gh1.peer = getMessagesController().getInputPeer(dialog_id); + gh1.offset_date = startLoadFromDate; + gh1.limit = 1; + gh1.add_offset = -1; + + int req = getConnectionsManager().sendRequest(gh1, (response, error) -> { + if (response instanceof TLRPC.messages_Messages) { + List l = ((TLRPC.messages_Messages) response).messages; + if (!l.isEmpty()) { - setMeasuredDimension(widthSize, heightSize); - heightSize -= getPaddingTop(); + TLRPC.TL_messages_getHistory gh2 = new TLRPC.TL_messages_getHistory(); + gh2.peer = getMessagesController().getInputPeer(dialog_id); + gh2.offset_date = startLoadFromDate + 60 * 60 * 24; + gh2.limit = 1; - measureChildWithMargins(actionBar, widthMeasureSpec, 0, heightMeasureSpec, 0); - int actionBarHeight = actionBar.getMeasuredHeight(); - if (actionBar.getVisibility() == VISIBLE) { - heightSize -= actionBarHeight; + getConnectionsManager().sendRequest(gh2, (response1, error1) -> { + if (response1 instanceof TLRPC.messages_Messages) { + List l2 = ((TLRPC.messages_Messages) response1).messages; + int count = 0; + if (!l2.isEmpty()) { + count = ((TLRPC.messages_Messages) response).offset_id_offset - ((TLRPC.messages_Messages) response1).offset_id_offset; + } else { + count = ((TLRPC.messages_Messages) response).offset_id_offset; + } + int finalCount = count; + AndroidUtilities.runOnUIThread(() -> { + if (finalCount != 0) { + AndroidUtilities.runOnUIThread(() -> actionBar.setSubtitle(LocaleController.formatPluralString("messages", finalCount))); + } else { + actionBar.setSubtitle(LocaleController.getString("NoMessagesForThisDay", R.string.NoMessagesForThisDay)); + } + }); + } + }); + } else { + actionBar.setSubtitle(LocaleController.getString("NoMessagesForThisDay", R.string.NoMessagesForThisDay)); + } } - int keyboardHeightOld = keyboardHeight + chatEmojiViewPadding; - boolean keyboardVisibleOld = keyboardHeight + chatEmojiViewPadding >= AndroidUtilities.dp(20); - if (lastHeight != allHeight) { - measureKeyboardHeight(); + }); + getConnectionsManager().bindRequestToGuid(req, classGuid); + } else { + actionBar.addView(avatarContainer, 0, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.MATCH_PARENT, Gravity.TOP | Gravity.LEFT, !inPreviewMode ? 56 : (chatMode == MODE_PINNED ? 10 : 0), 0, 40, 0)); + } + + ActionBarMenu menu = actionBar.createMenu(); + + if (isThreadChat() && threadMessageId != 0 && !isTopic) { + viewInChatItem = menu.addItem(nkbtn_view_in_chat, R.drawable.msg_viewreplies, themeDelegate); + } + + if (currentEncryptedChat == null && chatMode == 0 && reportType < 0) { + searchIconItem = menu.addItem(search, R.drawable.ic_ab_search); + searchIconItem.setContentDescription(LocaleController.getString("Search", R.string.Search)); + searchItem = menu.addItem(chat_menu_search, R.drawable.ic_ab_search, themeDelegate); + searchItem.setIsSearchField(true); + searchItem.setActionBarMenuItemSearchListener(getSearchItemListener()); + searchItem.setSearchFieldHint(LocaleController.getString("Search", R.string.Search)); + if (threadMessageId == 0 && !UserObject.isReplyUser(currentUser) || threadMessageObject != null && threadMessageObject.getRepliesCount() < 10) { + searchItem.setVisibility(View.GONE); + } else { + searchItem.setVisibility(View.VISIBLE); + } + searchItemVisible = false; + } + + if (chatMode == 0 && (threadMessageId == 0 || isTopic) && !UserObject.isReplyUser(currentUser) && reportType < 0) { + TLRPC.UserFull userFull = null; + if (currentUser != null) { +// audioCallIconItem = menu.lazilyAddItem(call, R.drawable.ic_call, themeDelegate); +// audioCallIconItem.setContentDescription(LocaleController.getString("Call", R.string.Call)); +// userFull = getMessagesController().getUserFull(currentUser.id); +// if (userFull != null && userFull.phone_calls_available) { +// showAudioCallAsIcon = !inPreviewMode; +// audioCallIconItem.setVisibility(View.VISIBLE); +// } else { +// showAudioCallAsIcon = false; +// audioCallIconItem.setVisibility(View.GONE); +// } + showAudioCallAsIcon = false; + if (avatarContainer != null) { + avatarContainer.setTitleExpand(showAudioCallAsIcon); } - int keyboardSize = getKeyboardHeight(); - if (fixedKeyboardHeight > 0 && keyboardSize <= AndroidUtilities.dp(20)) { - chatEmojiViewPadding = fixedKeyboardHeight; - } else { - if (keyboardSize <= AndroidUtilities.dp(20)) { - chatEmojiViewPadding = chatActivityEnterView.isPopupShowing() ? chatActivityEnterView.getEmojiPadding() : 0; - } else { - chatEmojiViewPadding = 0; + } + } + + editTextItem = menu.lazilyAddItem(chat_menu_edit_text_options, R.drawable.ic_ab_other, themeDelegate); + editTextItem.setContentDescription(LocaleController.getString("AccDescrMoreOptions", R.string.AccDescrMoreOptions)); + editTextItem.setTag(null); + editTextItem.setVisibility(View.GONE); + + if (chatMode == 0 && (threadMessageId == 0 || isTopic) && !UserObject.isReplyUser(currentUser) && reportType < 0) { + TLRPC.UserFull userFull = null; + if (currentUser != null) { + userFull = getMessagesController().getUserFull(currentUser.id); + } + headerItem = menu.addItem(chat_menu_options, R.drawable.ic_ab_other, themeDelegate); + headerItem.setContentDescription(LocaleController.getString("AccDescrMoreOptions", R.string.AccDescrMoreOptions)); + + if (currentUser == null || !currentUser.self) { + chatNotificationsPopupWrapper = new ChatNotificationsPopupWrapper(context, currentAccount, headerItem.getPopupLayout().getSwipeBack(), false, false, new ChatNotificationsPopupWrapper.Callback() { + @Override + public void dismiss() { + headerItem.toggleSubMenu(); + } + + @Override + public void toggleSound() { + SharedPreferences preferences = MessagesController.getNotificationsSettings(currentAccount); + boolean enabled = !preferences.getBoolean("sound_enabled_" + NotificationsController.getSharedPrefKey(dialog_id, getTopicId()), true); + preferences.edit().putBoolean("sound_enabled_" + NotificationsController.getSharedPrefKey(dialog_id, getTopicId()), enabled).apply(); + if (BulletinFactory.canShowBulletin(ChatActivity.this)) { + BulletinFactory.createSoundEnabledBulletin(ChatActivity.this, enabled ? NotificationsController.SETTING_SOUND_ON : NotificationsController.SETTING_SOUND_OFF, getResourceProvider()).show(); + } + updateTitleIcons(); } - } - setEmojiKeyboardHeight(chatEmojiViewPadding); - boolean keyboardVisible = keyboardHeight + chatEmojiViewPadding >= AndroidUtilities.dp(20); - boolean waitingChatListItemAnimator = false; - if (MediaController.getInstance().getPlayingMessageObject() != null && MediaController.getInstance().getPlayingMessageObject().isRoundVideo() && keyboardVisibleOld != keyboardVisible) { - for (int i = 0; i < chatListView.getChildCount(); i++) { - View child = chatListView.getChildAt(i); - if (child instanceof ChatMessageCell) { - ChatMessageCell cell = (ChatMessageCell) child; - MessageObject messageObject = cell.getMessageObject(); - if (messageObject.isRoundVideo() && MediaController.getInstance().isPlayingMessage(messageObject)) { - int p = chatListView.getChildAdapterPosition(child); - if (p >= 0) { - chatLayoutManager.scrollToPositionWithOffset(p, (int) ((chatListView.getMeasuredHeight() - chatListViewPaddingTop - blurredViewBottomOffset + (keyboardHeight + chatEmojiViewPadding - keyboardHeightOld) - (keyboardVisible ? AndroidUtilities.roundMessageSize : AndroidUtilities.roundPlayingMessageSize)) / 2), false); - chatAdapter.notifyItemChanged(p); - adjustPanLayoutHelper.delayAnimation(); - waitingChatListItemAnimator = true; - break; - } + @Override + public void muteFor(int timeInSeconds) { + if (timeInSeconds == 0) { + if (getMessagesController().isDialogMuted(dialog_id, getTopicId())) { + ChatActivity.this.toggleMute(true); + } + if (BulletinFactory.canShowBulletin(ChatActivity.this)) { + BulletinFactory.createMuteBulletin(ChatActivity.this, NotificationsController.SETTING_MUTE_UNMUTE, timeInSeconds, getResourceProvider()).show(); + } + } else { + getNotificationsController().muteUntil(dialog_id, getTopicId(), timeInSeconds); + if (BulletinFactory.canShowBulletin(ChatActivity.this)) { + BulletinFactory.createMuteBulletin(ChatActivity.this, NotificationsController.SETTING_MUTE_CUSTOM, timeInSeconds, getResourceProvider()).show(); } } } - } - - if (!waitingChatListItemAnimator) { - chatActivityEnterView.runEmojiPanelAnimation(); - } - - int childCount = getChildCount(); - measureChildWithMargins(chatActivityEnterView, widthMeasureSpec, 0, heightMeasureSpec, 0); - - int listViewTopHeight; - if (inPreviewMode) { - inputFieldHeight = 0; - listViewTopHeight = 0; - } else { - inputFieldHeight = chatActivityEnterView.getMeasuredHeight(); - listViewTopHeight = AndroidUtilities.dp(49); - } - - blurredViewTopOffset = 0; - blurredViewBottomOffset = 0; - if (SharedConfig.chatBlurEnabled()) { - blurredViewTopOffset = actionBarHeight; - blurredViewBottomOffset = AndroidUtilities.dp(203); - } - for (int i = 0; i < childCount; i++) { - View child = getChildAt(i); - if (child == null || child.getVisibility() == GONE || child == chatActivityEnterView || child == actionBar) { - continue; - } - if (child == backgroundView) { - int contentWidthSpec = MeasureSpec.makeMeasureSpec(widthSize, MeasureSpec.EXACTLY); - int contentHeightSpec = MeasureSpec.makeMeasureSpec(allHeight, MeasureSpec.EXACTLY); - child.measure(contentWidthSpec, contentHeightSpec); - } else if (child == blurredView) { - int h = allHeight; - if (keyboardSize > AndroidUtilities.dp(20) && getLayoutParams().height < 0) { - h += keyboardSize; - } - int contentWidthSpec = MeasureSpec.makeMeasureSpec(widthSize, MeasureSpec.EXACTLY); - int contentHeightSpec = MeasureSpec.makeMeasureSpec(h, MeasureSpec.EXACTLY); - child.measure(contentWidthSpec, contentHeightSpec); - } else if (child == chatListView) { - int contentWidthSpec = MeasureSpec.makeMeasureSpec(widthSize, MeasureSpec.EXACTLY); - int h = heightSize - listViewTopHeight - (inPreviewMode && Build.VERSION.SDK_INT >= 21 ? AndroidUtilities.statusBarHeight : 0) + blurredViewTopOffset + blurredViewBottomOffset; - if (keyboardSize > AndroidUtilities.dp(20) && getLayoutParams().height < 0) { - h += keyboardSize; - } - int contentHeightSpec = MeasureSpec.makeMeasureSpec(Math.max(AndroidUtilities.dp(10), h), MeasureSpec.EXACTLY); - child.measure(contentWidthSpec, contentHeightSpec); - } else if (child == progressView) { - int contentWidthSpec = MeasureSpec.makeMeasureSpec(widthSize, MeasureSpec.EXACTLY); - int contentHeightSpec = MeasureSpec.makeMeasureSpec(Math.max(AndroidUtilities.dp(10), heightSize - inputFieldHeight - (inPreviewMode && Build.VERSION.SDK_INT >= 21 ? AndroidUtilities.statusBarHeight : 0) + AndroidUtilities.dp(2 + (chatActivityEnterView.isTopViewVisible() ? 48 : 0))), MeasureSpec.EXACTLY); - child.measure(contentWidthSpec, contentHeightSpec); - } else if (child == instantCameraView || child == overlayView) { - int contentWidthSpec = MeasureSpec.makeMeasureSpec(widthSize, MeasureSpec.EXACTLY); - int contentHeightSpec = MeasureSpec.makeMeasureSpec(allHeight - inputFieldHeight - chatEmojiViewPadding + AndroidUtilities.dp(3), MeasureSpec.EXACTLY); - child.measure(contentWidthSpec, contentHeightSpec); - } else if (child == emptyViewContainer) { - int contentWidthSpec = MeasureSpec.makeMeasureSpec(widthSize, MeasureSpec.EXACTLY); - int contentHeightSpec = MeasureSpec.makeMeasureSpec(heightSize, MeasureSpec.EXACTLY); - child.measure(contentWidthSpec, contentHeightSpec); - } else if (child == messagesSearchListView) { - int contentWidthSpec = MeasureSpec.makeMeasureSpec(widthSize, MeasureSpec.EXACTLY); - int contentHeightSpec = MeasureSpec.makeMeasureSpec(allHeight - actionBarHeight - AndroidUtilities.dp(48), MeasureSpec.EXACTLY); - child.measure(contentWidthSpec, contentHeightSpec); - } else if (chatActivityEnterView.isPopupView(child)) { - if (inBubbleMode) { - child.measure(MeasureSpec.makeMeasureSpec(widthSize, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(heightSize - inputFieldHeight + actionBarHeight + getPaddingTop(), MeasureSpec.EXACTLY)); - } else if (AndroidUtilities.isInMultiwindow) { - if (AndroidUtilities.isTablet()) { - child.measure(MeasureSpec.makeMeasureSpec(widthSize, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(Math.min(AndroidUtilities.dp(320), heightSize - inputFieldHeight + actionBarHeight - AndroidUtilities.statusBarHeight + getPaddingTop()), MeasureSpec.EXACTLY)); - } else { - child.measure(MeasureSpec.makeMeasureSpec(widthSize, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(heightSize - inputFieldHeight + actionBarHeight - AndroidUtilities.statusBarHeight + getPaddingTop(), MeasureSpec.EXACTLY)); + @Override + public void showCustomize() { + if (dialog_id != 0) { + if (currentUser != null) { + getMessagesController().putUser(currentUser, true); } - } else { - child.measure(MeasureSpec.makeMeasureSpec(widthSize, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(child.getLayoutParams().height, MeasureSpec.EXACTLY)); - } - } else if (child == mentionContainer) { - FrameLayout.LayoutParams layoutParams = (FrameLayout.LayoutParams) mentionContainer.getLayoutParams(); - if (mentionContainer.getAdapter().isBannedInline()) { - child.measure(MeasureSpec.makeMeasureSpec(widthSize, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(heightSize, MeasureSpec.AT_MOST)); - } else { - int height; - mentionContainer.setIgnoreLayout(true); - LinearLayoutManager layoutManager = mentionContainer.getCurrentLayoutManager(); - if (layoutManager instanceof ExtendedGridLayoutManager) { - int size = ((ExtendedGridLayoutManager) layoutManager).getRowsCount(widthSize); - int maxHeight = size * 102; - if (mentionContainer.getAdapter().isBotContext()) { - if (mentionContainer.getAdapter().getBotContextSwitch() != null) { - maxHeight += 34; - } - } - height = heightSize - chatActivityEnterView.getMeasuredHeight() + (maxHeight != 0 ? AndroidUtilities.dp(2) : 0); - int padding = Math.max(0, height - AndroidUtilities.dp(Math.min(maxHeight, 68 * 1.8f))); - } else { - int size = mentionContainer.getAdapter().getLastItemCount(); - int maxHeight = 0; - if (mentionContainer.getAdapter().isBotContext()) { - if (mentionContainer.getAdapter().getBotContextSwitch() != null) { - maxHeight += 36; - size -= 1; - } - maxHeight += size * 68; - } else { - maxHeight += size * 36; - } - height = heightSize - chatActivityEnterView.getMeasuredHeight() + (maxHeight != 0 ? AndroidUtilities.dp(2) : 0); + Bundle args = new Bundle(); + args.putLong("dialog_id", dialog_id); + if (getTopicId() != 0) { + args.putInt("topic_id", getTopicId()); } - - layoutParams.height = height; - layoutParams.topMargin = 0; - - mentionContainer.setIgnoreLayout(false); - child.measure(MeasureSpec.makeMeasureSpec(widthSize, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(layoutParams.height, MeasureSpec.EXACTLY)); - } - mentionContainer.setTranslationY(chatActivityEnterView.getAnimatedTop()); - } else if (child == textSelectionHelper.getOverlayView(context)) { - int contentWidthSpec = MeasureSpec.makeMeasureSpec(widthSize, MeasureSpec.EXACTLY); - int h = heightSize + blurredViewTopOffset; - if (keyboardSize > AndroidUtilities.dp(20) && getLayoutParams().height < 0) { - h += keyboardSize; - textSelectionHelper.setKeyboardSize(keyboardSize); - } else { - textSelectionHelper.setKeyboardSize(0); + presentFragment(new ProfileNotificationsActivity(args, themeDelegate)); } - child.measure(contentWidthSpec, MeasureSpec.makeMeasureSpec(h, MeasureSpec.EXACTLY)); - } else if (child == forwardingPreviewView) { - int contentWidthSpec = MeasureSpec.makeMeasureSpec(widthSize, MeasureSpec.EXACTLY); - int h = allHeight - AndroidUtilities.statusBarHeight; - if (keyboardSize > AndroidUtilities.dp(20) && getLayoutParams().height < 0) { - h += keyboardSize; + } + + @Override + public void toggleMute() { + ChatActivity.this.toggleMute(true); + BulletinFactory.createMuteBulletin(ChatActivity.this, getMessagesController().isDialogMuted(dialog_id, getTopicId()), themeDelegate).show(); + } + }, getResourceProvider()); + muteItem = headerItem.lazilyAddSwipeBackItem(R.drawable.msg_mute, null, null, chatNotificationsPopupWrapper.windowLayout); + muteItem.setOnClickListener(view -> { + boolean muted = MessagesController.getInstance(currentAccount).isDialogMuted(dialog_id, getTopicId()); + if (muted) { + updateTitleIcons(true); + AndroidUtilities.runOnUIThread(() -> { + ChatActivity.this.toggleMute(true); + }, 150); + headerItem.toggleSubMenu(); + if (ChatActivity.this.getParentActivity() != null) { + BulletinFactory.createMuteBulletin(ChatActivity.this, false, themeDelegate).show(); } - int contentHeightSpec = MeasureSpec.makeMeasureSpec(h, MeasureSpec.EXACTLY); - child.measure(contentWidthSpec, contentHeightSpec); } else { - measureChildWithMargins(child, widthMeasureSpec, 0, heightMeasureSpec, 0); + muteItem.openSwipeBack(); } + }); + muteItemGap = headerItem.lazilyAddColoredGap(); + } + if (currentUser != null) { + headerItem.lazilyAddSubItem(call, R.drawable.msg_callback, LocaleController.getString("Call", R.string.Call)); + if (Build.VERSION.SDK_INT >= 18) { + headerItem.lazilyAddSubItem(video_call, R.drawable.msg_videocall, LocaleController.getString("VideoCall", R.string.VideoCall)); } - if (fixPaddingsInLayout) { - globalIgnoreLayout = true; - invalidateChatListViewTopPadding(); - invalidateMessagesVisiblePart(); - fixPaddingsInLayout = false; - chatListView.measure(MeasureSpec.makeMeasureSpec(chatListView.getMeasuredWidth(), MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(chatListView.getMeasuredHeight(), MeasureSpec.EXACTLY)); - globalIgnoreLayout = false; - } - if (scrollToPositionOnRecreate != -1) { - final int scrollTo = scrollToPositionOnRecreate; - AndroidUtilities.runOnUIThread(() -> chatLayoutManager.scrollToPositionWithOffset(scrollTo, scrollToOffsetOnRecreate)); - scrollToPositionOnRecreate = -1; + if (userFull != null && userFull.phone_calls_available) { + headerItem.showSubItem(call); + if (userFull.video_calls_available) { + headerItem.showSubItem(video_call); + } else { + headerItem.hideSubItem(video_call); + } + } else { + headerItem.hideSubItem(call); + headerItem.hideSubItem(video_call); } - - updateBulletinLayout(); - - lastHeight = allHeight; } - @Override - public void requestLayout() { - if (globalIgnoreLayout) { - return; - } - super.requestLayout(); + if (searchItem != null) { + headerItem.lazilyAddSubItem(search, R.drawable.msg_search, LocaleController.getString("Search", R.string.Search)); + } + boolean allowShowPinned; + if (currentChat != null) { + allowShowPinned = ChatObject.canUserDoAction(currentChat, ChatObject.ACTION_PIN) || ChatObject.isChannel(currentChat); + } else if (currentUser != null && currentUser.self) { + allowShowPinned = true; + } else if (userInfo != null) { + allowShowPinned = userInfo.can_pin_message; + } else { + allowShowPinned = false; + } + if (allowShowPinned) { + headerItem.lazilyAddSubItem(nkheaderbtn_show_pinned, R.drawable.msg_pin, LocaleController.getString("PinnedMessage", R.string.PinnedMessage)); + } + translateItem = headerItem.lazilyAddSubItem(translate, R.drawable.msg_translate, LocaleController.getString("TranslateMessage", R.string.TranslateMessage)); + updateTranslateItemVisibility(); + if (currentChat != null && !currentChat.creator && !ChatObject.hasAdminRights(currentChat)) { + headerItem.lazilyAddSubItem(report, R.drawable.msg_report, LocaleController.getString("ReportChat", R.string.ReportChat)); } - @Override - protected void onLayout(boolean changed, int l, int t, int r, int b) { - final int count = getChildCount(); - int keyboardSize = getKeyboardHeight(); - int paddingBottom; - - if (fixedKeyboardHeight > 0 && keyboardSize <= AndroidUtilities.dp(20)) { - paddingBottom = fixedKeyboardHeight; + if (currentChat != null && (currentChat.has_link || (chatInfo != null && chatInfo.linked_chat_id != 0))) { + String text; + if (!currentChat.megagroup) { + text = LocaleController.getString("LinkedGroupChat", R.string.LinkedGroupChat); } else { - paddingBottom = keyboardSize <= AndroidUtilities.dp(20) && !AndroidUtilities.isInMultiwindow && !inBubbleMode ? chatActivityEnterView.getEmojiPadding() : 0; - } - if (!SharedConfig.smoothKeyboard) { - setBottomClip(paddingBottom); + text = LocaleController.getString("LinkedChannelChat", R.string.LinkedChannelChat); } + headerItem.lazilyAddSubItem(nkheaderbtn_linked_chat, R.drawable.baseline_layers_24, text); + } - for (int i = 0; i < count; i++) { - final View child = getChildAt(i); - if (child == null || child.getVisibility() == GONE) { - continue; - } - final LayoutParams lp = (LayoutParams) child.getLayoutParams(); + if (currentUser != null) { + addContactItem = headerItem.lazilyAddSubItem(share_contact, R.drawable.msg_addcontact, ""); + } - final int width = child.getMeasuredWidth(); - final int height = child.getMeasuredHeight(); + shareKeyItem = headerItem.lazilyAddSubItem(nkheaderbtn_share_key, R.drawable.baseline_vpn_key_24, LocaleController.getString("ShareMyKey", R.string.ShareMyKey)); - int childLeft; - int childTop; + if (currentEncryptedChat != null) { + timeItem2 = headerItem.lazilyAddSubItem(chat_enc_timer, R.drawable.msg_autodelete, LocaleController.getString("SetTimer", R.string.SetTimer)); + } /*else if (currentChat == null && !currentUser.self || ChatObject.canUserDoAdminAction(currentChat, ChatObject.ACTION_DELETE_MESSAGES)) { + headerItem.addSubItem(auto_delete_timer, R.drawable.msg_timer, LocaleController.getString("AutoDeleteSetTimer", R.string.AutoDeleteSetTimer)); + }*/ - int gravity = lp.gravity; - if (gravity == -1) { - gravity = Gravity.TOP | Gravity.LEFT; - } + if (currentChat != null && !isTopic) { + viewAsTopics = headerItem.lazilyAddSubItem(view_as_topics, R.drawable.msg_topics, LocaleController.getString("TopicViewAsTopics", R.string.TopicViewAsTopics)); + } - final int absoluteGravity = gravity & Gravity.HORIZONTAL_GRAVITY_MASK; - final int verticalGravity = gravity & Gravity.VERTICAL_GRAVITY_MASK; + if (!isTopic) { + clearHistoryItem = headerItem.lazilyAddSubItem(clear_history, R.drawable.msg_clear, LocaleController.getString("ClearHistory", R.string.ClearHistory)); + } + if (themeDelegate.isThemeChangeAvailable()) { + headerItem.lazilyAddSubItem(change_colors, R.drawable.msg_colors, LocaleController.getString("ChangeColors", R.string.ChangeColors)); + } + if (!isTopic) { + toTheBeginning = headerItem.lazilyAddSubItem(to_the_beginning, R.drawable.ic_upward, LocaleController.getString("ToTheBeginning", R.string.ToTheBeginning)); + toTheMessage = headerItem.lazilyAddSubItem(to_the_message, R.drawable.msg_go_up, LocaleController.getString("ToTheMessage", R.string.ToTheMessage)); + clearHistoryItem = headerItem.lazilyAddSubItem(clear_history, R.drawable.msg_clear, LocaleController.getString("ClearHistory", R.string.ClearHistory)); + hideTitleItem = headerItem.lazilyAddSubItem(nkheaderbtn_hide_title, R.drawable.hide_title, LocaleController.getString("HideTitle", R.string.HideTitle)); - switch (absoluteGravity & Gravity.HORIZONTAL_GRAVITY_MASK) { - case Gravity.CENTER_HORIZONTAL: - childLeft = (r - l - width) / 2 + lp.leftMargin - lp.rightMargin; - break; - case Gravity.RIGHT: - childLeft = r - width - lp.rightMargin; - break; - case Gravity.LEFT: - default: - childLeft = lp.leftMargin; - } - - switch (verticalGravity) { - case Gravity.TOP: - childTop = lp.topMargin + getPaddingTop(); - if (child != actionBar && actionBar.getVisibility() == VISIBLE) { - childTop += actionBar.getMeasuredHeight(); - if (inPreviewMode && Build.VERSION.SDK_INT >= 21) { - childTop += AndroidUtilities.statusBarHeight; - } - } - break; - case Gravity.CENTER_VERTICAL: - childTop = ((b - paddingBottom) - t - height) / 2 + lp.topMargin - lp.bottomMargin; - break; - case Gravity.BOTTOM: - childTop = ((b - paddingBottom) - t) - height - lp.bottomMargin; - break; - default: - childTop = lp.topMargin; - } - - if (child == blurredView || child == backgroundView) { - childTop = 0; - } else if (child instanceof HintView || child instanceof ChecksHintView) { - childTop = 0; - } else if (child == mentionContainer) { - childTop -= chatActivityEnterView.getMeasuredHeight() - AndroidUtilities.dp(2); - mentionContainer.setTranslationY(chatActivityEnterView.getAnimatedTop()); - } else if (child == pagedownButton || child == mentiondownButton || child == reactionsMentiondownButton) { - if (!inPreviewMode) { - childTop -= chatActivityEnterView.getMeasuredHeight(); - } - } else if (child == emptyViewContainer) { - childTop -= inputFieldHeight / 2 - (actionBar.getVisibility() == VISIBLE ? actionBar.getMeasuredHeight() / 2 : 0); - } else if (chatActivityEnterView.isPopupView(child)) { - if (AndroidUtilities.isInMultiwindow || inBubbleMode) { - childTop = chatActivityEnterView.getTop() - child.getMeasuredHeight() + AndroidUtilities.dp(1); + if (ChatObject.isChannel(currentChat) && !currentChat.creator) { + if (!ChatObject.isNotInChat(currentChat)) { + if (currentChat.megagroup) { + headerItem.lazilyAddSubItem(delete_chat, R.drawable.msg_leave, LocaleController.getString("LeaveMegaMenu", R.string.LeaveMegaMenu)); } else { - childTop = chatActivityEnterView.getBottom(); - } - } else if (child == gifHintTextView || child == voiceHintTextView || child == mediaBanTooltip || child == emojiHintTextView) { - childTop -= inputFieldHeight; - } else if (child == chatListView || child == floatingDateView || child == infoTopView) { - childTop -= blurredViewTopOffset; - if (!inPreviewMode) { - childTop -= (inputFieldHeight - AndroidUtilities.dp(51)); - } - childTop -= paddingBottom; - if (keyboardSize > AndroidUtilities.dp(20) && getLayoutParams().height < 0) { - childTop -= keyboardSize; - } - } else if (child == progressView) { - if (chatActivityEnterView.isTopViewVisible()) { - childTop -= AndroidUtilities.dp(48); - } - } else if (child == actionBar) { - if (inPreviewMode && Build.VERSION.SDK_INT >= 21) { - childTop += AndroidUtilities.statusBarHeight; - } - childTop -= getPaddingTop(); - } else if (child == videoPlayerContainer) { - childTop = actionBar.getMeasuredHeight(); - childTop -= paddingBottom; - if (keyboardSize > AndroidUtilities.dp(20) && getLayoutParams().height < 0) { - childTop -= keyboardSize; - } - } else if (child == instantCameraView || child == overlayView || child == animatingImageView) { - childTop = 0; - } else if (child == textSelectionHelper.getOverlayView(context)) { - childTop -= paddingBottom; - if (keyboardSize > AndroidUtilities.dp(20) && getLayoutParams().height < 0) { - childTop -= keyboardSize; - } - childTop -= blurredViewTopOffset; - } else if (chatActivityEnterView != null && child == chatActivityEnterView.botCommandsMenuContainer) { - childTop -= inputFieldHeight; - } else if (child == forwardingPreviewView) { - childTop = AndroidUtilities.statusBarHeight; - } - child.layout(childLeft, childTop, childLeft + width, childTop + height); + headerItem.lazilyAddSubItem(delete_chat, R.drawable.msg_leave, LocaleController.getString("LeaveChannelMenu", R.string.LeaveChannelMenu)); + } + } + } else if (!ChatObject.isChannel(currentChat)) { + if (currentChat != null) { + headerItem.lazilyAddSubItem(delete_chat, R.drawable.msg_leave, LocaleController.getString("DeleteAndExit", R.string.DeleteAndExit)); + } else { + headerItem.lazilyAddSubItem(delete_chat, R.drawable.msg_delete, LocaleController.getString("DeleteChatUser", R.string.DeleteChatUser)); + } } - - invalidateChatListViewTopPadding(); - invalidateMessagesVisiblePart(); - updateTextureViewPosition(false, false); - - if (!scrollingChatListView) { - checkAutoDownloadMessages(false); + if (ChatObject.isMegagroup(currentChat) || currentChat != null && !ChatObject.isChannel(currentChat)) { + headerItem.lazilyAddSubItem(nkheaderbtn_zibi, R.drawable.msg_delete, LocaleController.getString("DeleteAllFromSelf", R.string.DeleteAllFromSelf)); } - notifyHeightChanged(); - } - private void setNonNoveTranslation(float y) { - contentView.setTranslationY(y); - actionBar.setTranslationY(0); - emptyViewContainer.setTranslationY(0); - progressView.setTranslationY(0); - contentPanTranslation = 0; - contentView.setBackgroundTranslation(0); - instantCameraView.onPanTranslationUpdate(0); - if (blurredView != null) { - blurredView.drawable.onPanTranslationUpdate(0); + if (currentChat != null && !ChatObject.isChannel(currentChat) && currentChat.creator) { + headerItem.lazilyAddSubItem(nkheaderbtn_upgrade, R.drawable.baseline_arrow_upward_24, LocaleController.getString("UpgradeGroup", R.string.UpgradeGroup)); } - setFragmentPanTranslationOffset(0); - invalidateChatListViewTopPadding(); } - - @Override - public void setPadding(int left, int top, int right, int bottom) { - contentPaddingTop = top; - invalidateChatListViewTopPadding(); - invalidateMessagesVisiblePart(); + if (currentUser != null && currentUser.self) { + headerItem.lazilyAddSubItem(add_shortcut, R.drawable.msg_home, LocaleController.getString("AddShortcut", R.string.AddShortcut)); } - - @Override - public boolean dispatchKeyEvent(KeyEvent event) { - if (event.getKeyCode() == KeyEvent.KEYCODE_BACK && event.getAction() == 1 && forwardingPreviewView != null && forwardingPreviewView.isShowing()) { - forwardingPreviewView.dismiss(true); - return true; + if (currentUser != null && currentEncryptedChat == null && currentUser.bot) { + headerItem.lazilyAddSubItem(bot_settings, R.drawable.msg_settings_old, LocaleController.getString("BotSettings", R.string.BotSettings)); + headerItem.lazilyAddSubItem(bot_help, R.drawable.msg_help, LocaleController.getString("BotHelp", R.string.BotHelp)); + updateBotButtons(); + } + } + if (ChatObject.isForum(currentChat) && isTopic && getParentLayout() != null && getParentLayout().getFragmentStack() != null) { + boolean hasMyForum = false; + for (int i = 0; i < getParentLayout().getFragmentStack().size(); ++i) { + BaseFragment fragment = getParentLayout().getFragmentStack().get(i); + if (fragment instanceof TopicsFragment && ((TopicsFragment) fragment).getDialogId() == dialog_id) { + hasMyForum = true; + break; } - return super.dispatchKeyEvent(event); } - protected Drawable getNewDrawable() { - Drawable drawable = themeDelegate.getWallpaperDrawable(); - return drawable != null ? drawable : super.getNewDrawable(); + if (!hasMyForum) { + openForumItem = headerItem.lazilyAddSubItem(open_forum, R.drawable.msg_discussion, LocaleController.getString("OpenAllTopics", R.string.OpenAllTopics)); } - }; - - contentView = (SizeNotifierFrameLayout) fragmentView; - contentView.needBlur = true; - contentView.needBlurBottom = true; - if (inBubbleMode) { - contentView.setOccupyStatusBar(false); } + if (currentChat != null && forumTopic != null) { + closeTopicItem = headerItem.lazilyAddSubItem(topic_close, R.drawable.msg_topic_close, LocaleController.getString("CloseTopic", R.string.CloseTopic)); + closeTopicItem.setVisibility(currentChat != null && ChatObject.canManageTopic(currentAccount, currentChat, forumTopic) && forumTopic != null && !forumTopic.closed ? View.VISIBLE : View.GONE); + } + menu.setVisibility(inMenuMode ? View.GONE : View.VISIBLE); - contentView.setBackgroundImage(Theme.getCachedWallpaper(), Theme.isWallpaperMotion()); + updateTitle(false); + avatarContainer.updateOnlineCount(); + avatarContainer.updateSubtitle(); + updateTitleIcons(); - emptyViewContainer = new FrameLayout(context); - emptyViewContainer.setVisibility(View.INVISIBLE); - contentView.addView(emptyViewContainer, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER)); - emptyViewContainer.setOnTouchListener((v, event) -> true); + if (chatMode == 0 && (!isThreadChat() || isTopic) && reportType < 0) { + attachItem = menu.lazilyAddItem(chat_menu_attach, R.drawable.ic_ab_other, themeDelegate); + attachItem.setOverrideMenuClick(true); + attachItem.setAllowCloseAnimation(false); + attachItem.setContentDescription(LocaleController.getString("AccDescrMoreOptions", R.string.AccDescrMoreOptions)); + attachItem.setVisibility(View.GONE); + } - int distance = getArguments().getInt("nearby_distance", -1); - if ((distance >= 0 || preloadedGreetingsSticker != null) && currentUser != null && !userBlocked) { - greetingsViewContainer = new ChatGreetingsView(context, currentUser, distance, currentAccount, preloadedGreetingsSticker, themeDelegate); - greetingsViewContainer.setListener((sticker) -> { - animatingDocuments.put(sticker, 0); - SendMessagesHelper.getInstance(currentAccount).sendSticker(sticker, null, dialog_id, null, null, null, null, true, 0, false); - }); - greetingsViewContainer.setBackground(Theme.createServiceDrawable(AndroidUtilities.dp(10), greetingsViewContainer, contentView, getThemedPaint(Theme.key_paint_chatActionBackground))); - emptyViewContainer.addView(greetingsViewContainer, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL, 68, 0, 68, 0)); - } else if (currentEncryptedChat == null) { - if (isTopic) { - CreateTopicEmptyView createTopicEmptyView = new CreateTopicEmptyView(context, contentView, themeDelegate); - emptyViewContainer.addView(createTopicEmptyView, new FrameLayout.LayoutParams(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER)); - } else if (!isThreadChat() && chatMode == 0 && ((currentUser != null && currentUser.self) || (currentChat != null && currentChat.creator && !ChatObject.isChannelAndNotMegaGroup(currentChat)))) { - bigEmptyView = new ChatBigEmptyView(context, contentView, currentChat != null ? ChatBigEmptyView.EMPTY_VIEW_TYPE_GROUP : ChatBigEmptyView.EMPTY_VIEW_TYPE_SAVED, themeDelegate); - emptyViewContainer.addView(bigEmptyView, new FrameLayout.LayoutParams(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER)); - if (currentChat != null) { - bigEmptyView.setStatusText(AndroidUtilities.replaceTags(LocaleController.getString("GroupEmptyTitle1", R.string.GroupEmptyTitle1))); - } - } else { - String emptyMessage = null; - if (isThreadChat()) { - if (isComments) { - emptyMessage = LocaleController.getString("NoComments", R.string.NoComments); - } else { - emptyMessage = LocaleController.getString("NoReplies", R.string.NoReplies); - } - } else if (chatMode == MODE_SCHEDULED) { - emptyMessage = LocaleController.getString("NoScheduledMessages", R.string.NoScheduledMessages); - } else if (currentUser != null && currentUser.id != 777000 && currentUser.id != 429000 && currentUser.id != 4244000 && MessagesController.isSupportUser(currentUser)) { - emptyMessage = LocaleController.getString("GotAQuestion", R.string.GotAQuestion); - } else if (currentUser == null || currentUser.self || currentUser.deleted || userBlocked) { - emptyMessage = LocaleController.getString("NoMessages", R.string.NoMessages); - } - if (emptyMessage == null) { - greetingsViewContainer = new ChatGreetingsView(context, currentUser, distance, currentAccount, preloadedGreetingsSticker, themeDelegate); - greetingsViewContainer.setListener((sticker) -> { - animatingDocuments.put(sticker, 0); - SendMessagesHelper.getInstance(currentAccount).sendSticker(sticker, null, dialog_id, null, null, null, null, true, 0, false); - }); - greetingsViewContainer.setBackground(Theme.createServiceDrawable(AndroidUtilities.dp(10), greetingsViewContainer, contentView, getThemedPaint(Theme.key_paint_chatActionBackground))); - emptyViewContainer.addView(greetingsViewContainer, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL, 68, 0, 68, 0)); - } else { - emptyView = new TextView(context); - emptyView.setText(emptyMessage); - emptyView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); - emptyView.setGravity(Gravity.CENTER); - emptyView.setTextColor(getThemedColor(Theme.key_chat_serviceText)); - emptyView.setBackground(Theme.createServiceDrawable(AndroidUtilities.dp(6), emptyView, contentView, getThemedPaint(Theme.key_paint_chatActionBackground))); - emptyView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); - emptyView.setPadding(AndroidUtilities.dp(10), AndroidUtilities.dp(2), AndroidUtilities.dp(10), AndroidUtilities.dp(3)); - emptyViewContainer.addView(emptyView, new FrameLayout.LayoutParams(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER)); - } - } - } else { - bigEmptyView = new ChatBigEmptyView(context, contentView, ChatBigEmptyView.EMPTY_VIEW_TYPE_SECRET, themeDelegate); - if (currentEncryptedChat.admin_id == getUserConfig().getClientUserId()) { - bigEmptyView.setStatusText(LocaleController.formatString("EncryptedPlaceholderTitleOutgoing", R.string.EncryptedPlaceholderTitleOutgoing, UserObject.getFirstName(currentUser))); - } else { - bigEmptyView.setStatusText(LocaleController.formatString("EncryptedPlaceholderTitleIncoming", R.string.EncryptedPlaceholderTitleIncoming, UserObject.getFirstName(currentUser))); + if (inPreviewMode) { + if (headerItem != null) { + headerItem.setAlpha(0.0f); } - emptyViewContainer.addView(bigEmptyView, new FrameLayout.LayoutParams(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER)); + if (attachItem != null) { + attachItem.setAlpha(0.0f); + } + } + + actionModeViews.clear(); + selectedMessagesCountTextView = null; + checkActionBarMenu(false); + + scrimPaint = new Paint(); + + fragmentView = contentView = new ChatActivityFragmentView(context, parentLayout); + contentView.needBlur = true; + contentView.needBlurBottom = true; + if (inBubbleMode) { + contentView.setOccupyStatusBar(false); } + contentView.setBackgroundImage(Theme.getCachedWallpaper(), Theme.isWallpaperMotion()); + + emptyViewContainer = null; + CharSequence oldMessage; if (chatActivityEnterView != null) { chatActivityEnterView.onDestroy(); @@ -4788,2654 +3860,2121 @@ public void setTranslationY(float translationY) { @Override protected void onLayout(boolean changed, int l, int t, int r, int b) { super.onLayout(changed, l, t, r, b); - if (lastWidth != r - l) { - lastWidth = r - l; - hideHints(false); - } - - int height = getMeasuredHeight(); - if (lastH != height) { - ignoreLayout = true; - if (chatListItemAnimator != null) { - chatListItemAnimator.endAnimations(); - } - chatScrollHelper.cancel(); - ignoreLayout = false; - lastH = height; - } - - forceScrollToTop = false; - if (textSelectionHelper != null && textSelectionHelper.isSelectionMode()) { - textSelectionHelper.invalidate(); - } - isSkeletonVisible(); - } - - private void setGroupTranslationX(ChatMessageCell view, float dx) { - MessageObject.GroupedMessages group = view.getCurrentMessagesGroup(); - if (group == null) { - return; - } - int count = getChildCount(); - for (int a = 0; a < count; a++) { - View child = getChildAt(a); - if (child == view || !(child instanceof ChatMessageCell)) { - continue; - } - ChatMessageCell cell = (ChatMessageCell) child; - if (cell.getCurrentMessagesGroup() == group) { - cell.setSlidingOffset(dx); - cell.invalidate(); - } - } - invalidate(); - } - - @Override - public boolean requestChildRectangleOnScreen(View child, Rect rect, boolean immediate) { - if (scrimPopupWindow != null) { - return false; - } - return super.requestChildRectangleOnScreen(child, rect, immediate); - } - - @Override - public boolean onInterceptTouchEvent(MotionEvent e) { - textSelectionHelper.checkSelectionCancel(e); - if (isFastScrollAnimationRunning()) { - return false; - } - boolean result = super.onInterceptTouchEvent(e); - if (actionBar.isActionModeShowed() || reportType >= 0) { - return result; - } - processTouchEvent(e); - return result; - } - - @Override - public void setItemAnimator(ItemAnimator animator) { - if (isFastScrollAnimationRunning()) { - return; - } - super.setItemAnimator(animator); - } - - private void drawReplyButton(Canvas canvas) { - if (slidingView == null || Thread.currentThread() != Looper.getMainLooper().getThread()) { - return; - } - Paint chatActionBackgroundPaint = getThemedPaint(Theme.key_paint_chatActionBackground); - Paint chatActionBackgroundDarkenPaint = Theme.chat_actionBackgroundGradientDarkenPaint; - if (outlineActionBackgroundPaint.getColor() != chatActionBackgroundPaint.getColor()) { - outlineActionBackgroundPaint.setColor(chatActionBackgroundPaint.getColor()); - } - if (outlineActionBackgroundDarkenPaint.getColor() != chatActionBackgroundDarkenPaint.getColor()) { - outlineActionBackgroundDarkenPaint.setColor(chatActionBackgroundDarkenPaint.getColor()); - } - if (outlineActionBackgroundPaint.getShader() != chatActionBackgroundPaint.getShader()) { - outlineActionBackgroundPaint.setShader(chatActionBackgroundPaint.getShader()); - } - if (outlineActionBackgroundDarkenPaint.getShader() != chatActionBackgroundDarkenPaint.getShader()) { - outlineActionBackgroundDarkenPaint.setShader(chatActionBackgroundDarkenPaint.getShader()); - } - - float fillProgress = slidingFillProgress.getValue() / springMultiplier; - int wasDarkenColor = outlineActionBackgroundDarkenPaint.getColor(); - - if (fillProgress > 1) { - slidingBeyondMax = true; - } - - float translationX = slidingView.getNonAnimationTranslationX(false); - if (slidingDrawableVisibilityProgress.getValue() == 0) { - slidingFillProgressSpring.cancel(); - slidingFillProgressSpring.getSpring().setFinalPosition(0); - slidingFillProgress.setValue(0f); - slidingOuterRingSpring.cancel(); - slidingOuterRingSpring.getSpring().setFinalPosition(0); - slidingOuterRingProgress.setValue(0f); - slidingBeyondMax = false; - } - float progress; - if (slidingFillProgressSpring.getSpring().getFinalPosition() != springMultiplier) { - progress = androidx.core.math.MathUtils.clamp((-translationX - AndroidUtilities.dp(20)) / AndroidUtilities.dp(30), 0, 1); - } else { - progress = 1f; - } - - if (progress == 1f && slidingFillProgressSpring.getSpring().getFinalPosition() != springMultiplier) { - slidingFillProgressSpring.getSpring().setFinalPosition(springMultiplier); - slidingFillProgressSpring.start(); - - slidingOuterRingSpring.getSpring().setFinalPosition(springMultiplier); - slidingOuterRingSpring.start(); - } - - boolean visible = translationX <= -AndroidUtilities.dp(20); - float endVisibleValue = visible ? springMultiplier : 0; - if (endVisibleValue != slidingDrawableVisibilitySpring.getSpring().getFinalPosition()) { - slidingDrawableVisibilitySpring.getSpring().setFinalPosition(endVisibleValue); - if (!slidingDrawableVisibilitySpring.isRunning()) { - slidingDrawableVisibilitySpring.start(); - } - } - - float iconProgress = slidingDrawableVisibilityProgress.getValue() / springMultiplier; - float x = getMeasuredWidth() + translationX * (slidingView != null && slidingView.getMessageObject() != null && slidingView.getMessageObject().isOut() ? 0.5f : 1f); - float y = slidingView.getTop() + slidingView.getMeasuredHeight() / 2f; - float scale = slidingBeyondMax ? fillProgress : iconProgress; - - float clearScale = slidingBeyondMax ? 0f : 1f - fillProgress; - - boolean isDark = ColorUtils.calculateLuminance(getThemedColor(Theme.key_windowBackgroundWhite)) <= 0.5f; - if (iconProgress != 0) { - AndroidUtilities.rectTmp.set((int) (x - AndroidUtilities.dp(16) * scale + outlineActionBackgroundPaint.getStrokeWidth() / 2f), (int) (y - AndroidUtilities.dp(16) * scale + outlineActionBackgroundPaint.getStrokeWidth() / 2f), (int) (x + AndroidUtilities.dp(16) * scale - outlineActionBackgroundPaint.getStrokeWidth() / 2f), (int) (y + AndroidUtilities.dp(16) * scale - outlineActionBackgroundPaint.getStrokeWidth() / 2f)); - Theme.applyServiceShaderMatrix(getMeasuredWidth(), AndroidUtilities.displaySize.y, 0, getY() + AndroidUtilities.rectTmp.top); - if (fillProgress == 0) { - int outlineAlpha = outlineActionBackgroundPaint.getAlpha(); - outlineActionBackgroundPaint.setAlpha((int) (outlineAlpha * iconProgress)); - canvas.drawArc(AndroidUtilities.rectTmp, -90, 360 * progress, false, outlineActionBackgroundPaint); - outlineActionBackgroundPaint.setAlpha(outlineAlpha); - - if (themeDelegate.hasGradientService()) { - outlineAlpha = outlineActionBackgroundDarkenPaint.getAlpha(); - if (isDark) { - outlineActionBackgroundDarkenPaint.setColor(Color.WHITE); - } - outlineActionBackgroundDarkenPaint.setAlpha((int) (outlineAlpha * iconProgress)); - canvas.drawArc(AndroidUtilities.rectTmp, -90, 360 * progress, false, outlineActionBackgroundDarkenPaint); - } - } - } - AndroidUtilities.rectTmp.set((int) (x - AndroidUtilities.dp(16) * scale), (int) (y - AndroidUtilities.dp(16) * scale), (int) (x + AndroidUtilities.dp(16) * scale), (int) (y + AndroidUtilities.dp(16) * scale)); - Theme.applyServiceShaderMatrix(getMeasuredWidth(), AndroidUtilities.displaySize.y, 0, getY() + AndroidUtilities.rectTmp.top); - path.rewind(); - path.addRoundRect(AndroidUtilities.rectTmp, AndroidUtilities.dp(16) * scale, AndroidUtilities.dp(16) * scale, Path.Direction.CW); - - int wasAlpha = chatActionBackgroundPaint.getAlpha(); - chatActionBackgroundPaint.setAlpha((int) (iconProgress * 0.6f * progress * wasAlpha)); - canvas.drawPath(path, chatActionBackgroundPaint); - chatActionBackgroundPaint.setAlpha(wasAlpha); - - if (themeDelegate.hasGradientService()) { - wasAlpha = Theme.chat_actionBackgroundGradientDarkenPaint.getAlpha(); - if (isDark) { - Theme.chat_actionBackgroundGradientDarkenPaint.setColor(Color.WHITE); - } - Theme.chat_actionBackgroundGradientDarkenPaint.setAlpha((int) (iconProgress * 0.6f * progress * wasAlpha)); - canvas.drawPath(path, Theme.chat_actionBackgroundGradientDarkenPaint); - Theme.chat_actionBackgroundGradientDarkenPaint.setAlpha(wasAlpha); - } - - if (clearScale != 0f) { - AndroidUtilities.rectTmp.set((int) (x - AndroidUtilities.dp(16) * clearScale), (int) (y - AndroidUtilities.dp(16) * clearScale), (int) (x + AndroidUtilities.dp(16) * clearScale), (int) (y + AndroidUtilities.dp(16) * clearScale)); - path.rewind(); - path.addRoundRect(AndroidUtilities.rectTmp, AndroidUtilities.dp(16), AndroidUtilities.dp(16), Path.Direction.CW); - - canvas.save(); - canvas.clipPath(path, Region.Op.DIFFERENCE); - } - - AndroidUtilities.rectTmp.set((int) (x - AndroidUtilities.dp(16) * scale), (int) (y - AndroidUtilities.dp(16) * scale), (int) (x + AndroidUtilities.dp(16) * scale), (int) (y + AndroidUtilities.dp(16) * scale)); - Theme.applyServiceShaderMatrix(getMeasuredWidth(), AndroidUtilities.displaySize.y, 0, getY() + AndroidUtilities.rectTmp.top); - path.rewind(); - path.addRoundRect(AndroidUtilities.rectTmp, AndroidUtilities.dp(16) * scale, AndroidUtilities.dp(16) * scale, Path.Direction.CW); - - wasAlpha = chatActionBackgroundPaint.getAlpha(); - chatActionBackgroundPaint.setAlpha((int) (iconProgress * 0.4f * wasAlpha)); - canvas.drawPath(path, chatActionBackgroundPaint); - chatActionBackgroundPaint.setAlpha(wasAlpha); - - if (themeDelegate.hasGradientService()) { - wasAlpha = Theme.chat_actionBackgroundGradientDarkenPaint.getAlpha(); - if (isDark) { - Theme.chat_actionBackgroundGradientDarkenPaint.setColor(Color.WHITE); - } - Theme.chat_actionBackgroundGradientDarkenPaint.setAlpha((int) (iconProgress * 0.4f * wasAlpha)); - canvas.drawPath(path, Theme.chat_actionBackgroundGradientDarkenPaint); - Theme.chat_actionBackgroundGradientDarkenPaint.setAlpha(wasAlpha); - } - if (clearScale != 0f) { - canvas.restore(); - } - - float outerRingProgress = slidingOuterRingProgress.getValue() / springMultiplier; - if (outerRingProgress != 0 && outerRingProgress != 1) { - float outScale = 1f + outerRingProgress; - - float wasWidth = outlineActionBackgroundPaint.getStrokeWidth(); - float width = (1f - outerRingProgress) * wasWidth; - if (width != 0f) { - AndroidUtilities.rectTmp.set((int) (x - AndroidUtilities.dp(16) * outScale + width), (int) (y - AndroidUtilities.dp(16) * outScale + width), (int) (x + AndroidUtilities.dp(16) * outScale - width), (int) (y + AndroidUtilities.dp(16) * outScale - width)); - Theme.applyServiceShaderMatrix(getMeasuredWidth(), AndroidUtilities.displaySize.y, 0, getY() + AndroidUtilities.rectTmp.top); - - wasAlpha = outlineActionBackgroundPaint.getAlpha(); - outlineActionBackgroundPaint.setAlpha((int) (wasAlpha * iconProgress)); - - outlineActionBackgroundPaint.setStrokeWidth(width); - canvas.drawRoundRect(AndroidUtilities.rectTmp, AndroidUtilities.dp(16) * outScale, AndroidUtilities.dp(16) * outScale, outlineActionBackgroundPaint); - outlineActionBackgroundPaint.setStrokeWidth(wasWidth); - - outlineActionBackgroundPaint.setAlpha(wasAlpha); - - if (themeDelegate.hasGradientService()) { - wasAlpha = outlineActionBackgroundDarkenPaint.getAlpha(); - if (isDark) { - outlineActionBackgroundDarkenPaint.setColor(Color.WHITE); - } - outlineActionBackgroundDarkenPaint.setAlpha((int) (wasAlpha * iconProgress)); + if (lastWidth != r - l) { + lastWidth = r - l; + hideHints(false); + } - outlineActionBackgroundDarkenPaint.setStrokeWidth(width); - canvas.drawRoundRect(AndroidUtilities.rectTmp, AndroidUtilities.dp(16) * outScale, AndroidUtilities.dp(16) * outScale, outlineActionBackgroundDarkenPaint); - outlineActionBackgroundDarkenPaint.setStrokeWidth(wasWidth); - } + int height = getMeasuredHeight(); + if (lastH != height) { + ignoreLayout = true; + if (chatListItemAnimator != null) { + chatListItemAnimator.endAnimations(); } + chatScrollHelper.cancel(); + ignoreLayout = false; + lastH = height; } - int alpha = (int) (iconProgress * 0xFF); - Drawable replyIconDrawable = getThemedDrawable(Theme.key_drawable_replyIcon); - replyIconDrawable.setAlpha(alpha); - replyIconDrawable.setBounds((int) (x - AndroidUtilities.dp(7) * scale), (int) (y - AndroidUtilities.dp(6) * scale), (int) (x + AndroidUtilities.dp(7) * scale), (int) (y + AndroidUtilities.dp(5) * scale)); - replyIconDrawable.draw(canvas); - replyIconDrawable.setAlpha(255); - - outlineActionBackgroundDarkenPaint.setColor(wasDarkenColor); - chatActionBackgroundDarkenPaint.setColor(wasDarkenColor); + forceScrollToTop = false; + if (textSelectionHelper != null && textSelectionHelper.isSelectionMode()) { + textSelectionHelper.invalidate(); + } + isSkeletonVisible(); } - private void processTouchEvent(MotionEvent e) { - if (e != null) { - wasManualScroll = true; + private void setGroupTranslationX(ChatMessageCell view, float dx) { + MessageObject.GroupedMessages group = view.getCurrentMessagesGroup(); + if (group == null) { + return; } - if (e != null && e.getAction() == MotionEvent.ACTION_DOWN && !startedTrackingSlidingView && !maybeStartTrackingSlidingView && slidingView == null && !inPreviewMode) { - View view = getPressedChildView(); - if (view instanceof ChatMessageCell) { - if (slidingView != null) { - slidingView.setSlidingOffset(0); - } - slidingView = (ChatMessageCell) view; - MessageObject message = slidingView.getMessageObject(); - boolean allowReplyOnOpenTopic = false; - if (message != null && ChatObject.isForum(currentChat)) { - TLRPC.TL_forumTopic topic = getMessagesController().getTopicsController().findTopic(currentChat.id, MessageObject.getTopicId(message.messageOwner, true)); - if (topic != null) { - allowReplyOnOpenTopic = !topic.closed || ChatObject.canManageTopic(currentAccount, currentChat, topic); - } - } - if (chatMode != 0 || threadMessageObjects != null && threadMessageObjects.contains(message) || - getMessageType(message) == 1 && (message.getDialogId() == mergeDialogId || message.needDrawBluredPreview()) || - currentEncryptedChat == null && message.getId() < 0 || - bottomOverlayChat != null && bottomOverlayChat.getVisibility() == View.VISIBLE && !(bottomOverlayChatWaitsReply && allowReplyOnOpenTopic || message.wasJustSent) || - currentChat != null && (ChatObject.isNotInChat(currentChat) && !isThreadChat() || ChatObject.isChannel(currentChat) && !ChatObject.canPost(currentChat) && !currentChat.megagroup || !ChatObject.canSendMessages(currentChat)) || - textSelectionHelper.isSelectionMode()) { - if (!canSendInCommentGroup()) { - slidingView.setSlidingOffset(0); - slidingView = null; - return; - } - } - startedTrackingPointerId = e.getPointerId(0); - maybeStartTrackingSlidingView = true; - startedTrackingX = (int) e.getX(); - startedTrackingY = (int) e.getY(); - } - } else if (slidingView != null && e != null && e.getAction() == MotionEvent.ACTION_MOVE && e.getPointerId(0) == startedTrackingPointerId) { - int dx = Math.max(AndroidUtilities.dp(-80), Math.min(0, (int) (e.getX() - startedTrackingX))); - int dy = Math.abs((int) e.getY() - startedTrackingY); - if (getScrollState() == SCROLL_STATE_IDLE && maybeStartTrackingSlidingView && !startedTrackingSlidingView && dx <= -AndroidUtilities.getPixelsInCM(0.4f, true) && Math.abs(dx) / 3 > dy) { - MotionEvent event = MotionEvent.obtain(0, 0, MotionEvent.ACTION_CANCEL, 0, 0, 0); - slidingView.onTouchEvent(event); - super.onInterceptTouchEvent(event); - event.recycle(); - chatLayoutManager.setCanScrollVertically(false); - maybeStartTrackingSlidingView = false; - startedTrackingSlidingView = true; - startedTrackingX = (int) e.getX(); - if (getParent() != null) { - getParent().requestDisallowInterceptTouchEvent(true); - } - } else if (startedTrackingSlidingView) { - if (Math.abs(dx) >= AndroidUtilities.dp(50)) { - if (!wasTrackingVibrate) { - if (!NekoConfig.disableVibration.Bool()) { - try { - performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); - } catch (Exception ignore) { - } - } - wasTrackingVibrate = true; - } - } else { - wasTrackingVibrate = false; - } - slidingView.setSlidingOffset(dx); - MessageObject messageObject = slidingView.getMessageObject(); - if (messageObject.isRoundVideo() || messageObject.isVideo()) { - updateTextureViewPosition(false, false); - } - setGroupTranslationX(slidingView, dx); - invalidate(); - } - } else if (slidingView != null && (e == null || e.getPointerId(0) == startedTrackingPointerId && (e.getAction() == MotionEvent.ACTION_CANCEL || e.getAction() == MotionEvent.ACTION_UP || e.getAction() == MotionEvent.ACTION_POINTER_UP))) { - if (e != null && e.getAction() != MotionEvent.ACTION_CANCEL && Math.abs(slidingView.getNonAnimationTranslationX(false)) >= AndroidUtilities.dp(50)) { - showFieldPanelForReply(slidingView.getMessageObject()); + int count = getChildCount(); + for (int a = 0; a < count; a++) { + View child = getChildAt(a); + if (child == view || !(child instanceof ChatMessageCell)) { + continue; } - endTrackingX = slidingView.getSlidingOffsetX(); - if (endTrackingX == 0) { - slidingView = null; + ChatMessageCell cell = (ChatMessageCell) child; + if (cell.getCurrentMessagesGroup() == group) { + cell.setSlidingOffset(dx); + cell.invalidate(); } - lastTrackingAnimationTime = System.currentTimeMillis(); - trackAnimationProgress = 0.0f; - invalidate(); - maybeStartTrackingSlidingView = false; - startedTrackingSlidingView = false; - chatLayoutManager.setCanScrollVertically(true); } + invalidate(); } @Override - public boolean onTouchEvent(MotionEvent e) { - textSelectionHelper.checkSelectionCancel(e); - if (e.getAction() == MotionEvent.ACTION_DOWN) { - scrollByTouch = true; + public boolean requestChildRectangleOnScreen(View child, Rect rect, boolean immediate) { + if (scrimPopupWindow != null) { + return false; } - if (!NekoConfig.disableSwipeToNext.Bool() && pullingDownOffset != 0 && (e.getAction() == MotionEvent.ACTION_UP || e.getAction() == MotionEvent.ACTION_CANCEL)) { - float progress = Math.min(1f, pullingDownOffset / AndroidUtilities.dp(110)); - if (e.getAction() == MotionEvent.ACTION_UP && progress == 1 && pullingDownDrawable != null && !pullingDownDrawable.emptyStub) { - if (pullingDownDrawable.animationIsRunning()) { - ValueAnimator animator = ValueAnimator.ofFloat(pullingDownOffset, pullingDownOffset + AndroidUtilities.dp(8)); - pullingDownBackAnimator = animator; - animator.addUpdateListener(valueAnimator -> { - pullingDownOffset = (float) valueAnimator.getAnimatedValue(); - chatListView.invalidate(); - }); - animator.setDuration(200); - animator.setInterpolator(CubicBezierInterpolator.DEFAULT); - animator.start(); - pullingDownDrawable.runOnAnimationFinish(() -> { - animateToNextChat(); - }); - } else { - animateToNextChat(); - } - } else { - if (pullingDownDrawable != null && pullingDownDrawable.emptyStub && (System.currentTimeMillis() - pullingDownDrawable.lastShowingReleaseTime) < 500 && pullingDownDrawable.animateSwipeToRelease) { - AnimatorSet animatorSet = new AnimatorSet(); - pullingDownBackAnimator = animatorSet; - if (pullingDownDrawable != null) { - pullingDownDrawable.showBottomPanel(false); - } - ValueAnimator animator = ValueAnimator.ofFloat(pullingDownOffset, AndroidUtilities.dp(111)); - animator.addUpdateListener(valueAnimator -> { - pullingDownOffset = (float) valueAnimator.getAnimatedValue(); - chatListView.invalidate(); - }); - animator.setDuration(400); - animator.setInterpolator(CubicBezierInterpolator.DEFAULT); - - ValueAnimator animator2 = ValueAnimator.ofFloat(AndroidUtilities.dp(111), 0); - animator2.addUpdateListener(valueAnimator -> { - pullingDownOffset = (float) valueAnimator.getAnimatedValue(); - chatListView.invalidate(); - }); - animator2.setStartDelay(600); - animator2.setDuration(ChatListItemAnimator.DEFAULT_DURATION); - animator2.setInterpolator(ChatListItemAnimator.DEFAULT_INTERPOLATOR); + return super.requestChildRectangleOnScreen(child, rect, immediate); + } - animatorSet.playSequentially(animator, animator2); - animatorSet.start(); - } else { - ValueAnimator animator = ValueAnimator.ofFloat(pullingDownOffset, 0); - pullingDownBackAnimator = animator; - if (pullingDownDrawable != null) { - pullingDownDrawable.showBottomPanel(false); - } - animator.addUpdateListener(valueAnimator -> { - pullingDownOffset = (float) valueAnimator.getAnimatedValue(); - chatListView.invalidate(); - }); - animator.setDuration(ChatListItemAnimator.DEFAULT_DURATION); - animator.setInterpolator(ChatListItemAnimator.DEFAULT_INTERPOLATOR); - animator.start(); - } - } - } + @Override + public boolean onInterceptTouchEvent(MotionEvent e) { + textSelectionHelper.checkSelectionCancel(e); if (isFastScrollAnimationRunning()) { return false; } - boolean result = super.onTouchEvent(e); + boolean result = super.onInterceptTouchEvent(e); if (actionBar.isActionModeShowed() || reportType >= 0) { return result; } processTouchEvent(e); - return startedTrackingSlidingView || result; + return result; + } + + @Override + public void setItemAnimator(ItemAnimator animator) { + if (isFastScrollAnimationRunning()) { + return; + } + super.setItemAnimator(animator); } - @Override - public void requestDisallowInterceptTouchEvent(boolean disallowIntercept) { - super.requestDisallowInterceptTouchEvent(disallowIntercept); - if (slidingView != null) { - processTouchEvent(null); + private void drawReplyButton(Canvas canvas) { + if (slidingView == null || Thread.currentThread() != Looper.getMainLooper().getThread()) { + return; + } + Paint chatActionBackgroundPaint = getThemedPaint(Theme.key_paint_chatActionBackground); + Paint chatActionBackgroundDarkenPaint = Theme.chat_actionBackgroundGradientDarkenPaint; + if (outlineActionBackgroundPaint.getColor() != chatActionBackgroundPaint.getColor()) { + outlineActionBackgroundPaint.setColor(chatActionBackgroundPaint.getColor()); + } + if (outlineActionBackgroundDarkenPaint.getColor() != chatActionBackgroundDarkenPaint.getColor()) { + outlineActionBackgroundDarkenPaint.setColor(chatActionBackgroundDarkenPaint.getColor()); + } + if (outlineActionBackgroundPaint.getShader() != chatActionBackgroundPaint.getShader()) { + outlineActionBackgroundPaint.setShader(chatActionBackgroundPaint.getShader()); + } + if (outlineActionBackgroundDarkenPaint.getShader() != chatActionBackgroundDarkenPaint.getShader()) { + outlineActionBackgroundDarkenPaint.setShader(chatActionBackgroundDarkenPaint.getShader()); + } + + float fillProgress = slidingFillProgress.getValue() / springMultiplier; + int wasDarkenColor = outlineActionBackgroundDarkenPaint.getColor(); + + if (fillProgress > 1) { + slidingBeyondMax = true; + } + + float translationX = slidingView.getNonAnimationTranslationX(false); + if (slidingDrawableVisibilityProgress.getValue() == 0) { + slidingFillProgressSpring.cancel(); + slidingFillProgressSpring.getSpring().setFinalPosition(0); + slidingFillProgress.setValue(0f); + slidingOuterRingSpring.cancel(); + slidingOuterRingSpring.getSpring().setFinalPosition(0); + slidingOuterRingProgress.setValue(0f); + slidingBeyondMax = false; + } + float progress; + if (slidingFillProgressSpring.getSpring().getFinalPosition() != springMultiplier) { + progress = androidx.core.math.MathUtils.clamp((-translationX - AndroidUtilities.dp(20)) / AndroidUtilities.dp(30), 0, 1); + } else { + progress = 1f; + } + + if (progress == 1f && slidingFillProgressSpring.getSpring().getFinalPosition() != springMultiplier) { + slidingFillProgressSpring.getSpring().setFinalPosition(springMultiplier); + slidingFillProgressSpring.start(); + + slidingOuterRingSpring.getSpring().setFinalPosition(springMultiplier); + slidingOuterRingSpring.start(); } - } - @Override - protected void onChildPressed(View child, float x, float y, boolean pressed) { - super.onChildPressed(child, x, y, pressed); - if (child instanceof ChatMessageCell) { - ChatMessageCell chatMessageCell = (ChatMessageCell) child; - MessageObject object = chatMessageCell.getMessageObject(); - if (object.isMusic() || object.isDocument()) { - return; - } - MessageObject.GroupedMessages groupedMessages = chatMessageCell.getCurrentMessagesGroup(); - if (groupedMessages != null) { - int count = getChildCount(); - for (int a = 0; a < count; a++) { - View item = getChildAt(a); - if (item == child || !(item instanceof ChatMessageCell)) { - continue; - } - ChatMessageCell cell = (ChatMessageCell) item; - if (cell.getCurrentMessagesGroup() == groupedMessages) { - cell.setPressed(pressed); - } - } + boolean visible = translationX <= -AndroidUtilities.dp(20); + float endVisibleValue = visible ? springMultiplier : 0; + if (endVisibleValue != slidingDrawableVisibilitySpring.getSpring().getFinalPosition()) { + slidingDrawableVisibilitySpring.getSpring().setFinalPosition(endVisibleValue); + if (!slidingDrawableVisibilitySpring.isRunning()) { + slidingDrawableVisibilitySpring.start(); } } - } - @Override - public void onDraw(Canvas c) { - super.onDraw(c); - if (slidingView != null) { - float translationX = slidingView.getSlidingOffsetX(); - if (!maybeStartTrackingSlidingView && !startedTrackingSlidingView && endTrackingX != 0 && translationX != 0) { - long newTime = System.currentTimeMillis(); - long dt = newTime - lastTrackingAnimationTime; - trackAnimationProgress += dt / 180.0f; - if (trackAnimationProgress > 1.0f) { - trackAnimationProgress = 1.0f; - } - lastTrackingAnimationTime = newTime; - translationX = endTrackingX * (1.0f - AndroidUtilities.decelerateInterpolator.getInterpolation(trackAnimationProgress)); - if (translationX == 0) { - endTrackingX = 0; - } - setGroupTranslationX(slidingView, translationX); - slidingView.setSlidingOffset(translationX); - MessageObject messageObject = slidingView.getMessageObject(); - if (messageObject.isRoundVideo() || messageObject.isVideo()) { - updateTextureViewPosition(false, false); - } + float iconProgress = slidingDrawableVisibilityProgress.getValue() / springMultiplier; + float x = getMeasuredWidth() + translationX * (slidingView.getMessageObject() != null && slidingView.getMessageObject().isOut() ? 0.5f : 1f); + float y = slidingView.getTop() + slidingView.getMeasuredHeight() / 2f; + float scale = slidingBeyondMax ? fillProgress : iconProgress; - if (trackAnimationProgress == 1f || trackAnimationProgress == 0f) { - slidingView.setSlidingOffset(0); - slidingView = null; + float clearScale = slidingBeyondMax ? 0f : 1f - fillProgress; + + boolean isDark = ColorUtils.calculateLuminance(getThemedColor(Theme.key_windowBackgroundWhite)) <= 0.5f; + if (iconProgress != 0) { + AndroidUtilities.rectTmp.set((int) (x - AndroidUtilities.dp(16) * scale + outlineActionBackgroundPaint.getStrokeWidth() / 2f), (int) (y - AndroidUtilities.dp(16) * scale + outlineActionBackgroundPaint.getStrokeWidth() / 2f), (int) (x + AndroidUtilities.dp(16) * scale - outlineActionBackgroundPaint.getStrokeWidth() / 2f), (int) (y + AndroidUtilities.dp(16) * scale - outlineActionBackgroundPaint.getStrokeWidth() / 2f)); + Theme.applyServiceShaderMatrix(getMeasuredWidth(), AndroidUtilities.displaySize.y, 0, getY() + AndroidUtilities.rectTmp.top); + if (fillProgress == 0) { + int outlineAlpha = outlineActionBackgroundPaint.getAlpha(); + outlineActionBackgroundPaint.setAlpha((int) (outlineAlpha * iconProgress)); + canvas.drawArc(AndroidUtilities.rectTmp, -90, 360 * progress, false, outlineActionBackgroundPaint); + outlineActionBackgroundPaint.setAlpha(outlineAlpha); + + if (themeDelegate.hasGradientService()) { + outlineAlpha = outlineActionBackgroundDarkenPaint.getAlpha(); + if (isDark) { + outlineActionBackgroundDarkenPaint.setColor(Color.WHITE); + } + outlineActionBackgroundDarkenPaint.setAlpha((int) (outlineAlpha * iconProgress)); + canvas.drawArc(AndroidUtilities.rectTmp, -90, 360 * progress, false, outlineActionBackgroundDarkenPaint); } - invalidate(); } - drawReplyButton(c); } + AndroidUtilities.rectTmp.set((int) (x - AndroidUtilities.dp(16) * scale), (int) (y - AndroidUtilities.dp(16) * scale), (int) (x + AndroidUtilities.dp(16) * scale), (int) (y + AndroidUtilities.dp(16) * scale)); + Theme.applyServiceShaderMatrix(getMeasuredWidth(), AndroidUtilities.displaySize.y, 0, getY() + AndroidUtilities.rectTmp.top); + path.rewind(); + path.addRoundRect(AndroidUtilities.rectTmp, AndroidUtilities.dp(16) * scale, AndroidUtilities.dp(16) * scale, Path.Direction.CW); - if (!NekoConfig.disableSwipeToNext.Bool() && pullingDownOffset != 0 && !isInPreviewMode()) { - c.save(); - float transitionOffset = 0; - if (pullingDownAnimateProgress != 0) { - transitionOffset = (chatListView.getMeasuredHeight() - pullingDownOffset + (pullingDownAnimateToActivity == null ? 0 : pullingDownAnimateToActivity.pullingBottomOffset)) * pullingDownAnimateProgress; - } - c.translate(0, getMeasuredHeight() - blurredViewBottomOffset - transitionOffset); - if (pullingDownDrawable == null) { - pullingDownDrawable = new ChatPullingDownDrawable(currentAccount, fragmentView, dialog_id, dialogFolderId, dialogFilterId, themeDelegate); - pullingDownDrawable.onAttach(); + int wasAlpha = chatActionBackgroundPaint.getAlpha(); + chatActionBackgroundPaint.setAlpha((int) (iconProgress * 0.6f * progress * wasAlpha)); + canvas.drawPath(path, chatActionBackgroundPaint); + chatActionBackgroundPaint.setAlpha(wasAlpha); + + if (themeDelegate.hasGradientService()) { + wasAlpha = Theme.chat_actionBackgroundGradientDarkenPaint.getAlpha(); + if (isDark) { + Theme.chat_actionBackgroundGradientDarkenPaint.setColor(Color.WHITE); } - pullingDownDrawable.setWidth(getMeasuredWidth()); - float progress = Math.min(1f, pullingDownOffset / AndroidUtilities.dp(110)); - pullingDownDrawable.draw(c, chatListView, progress, 1f - pullingDownAnimateProgress); + Theme.chat_actionBackgroundGradientDarkenPaint.setAlpha((int) (iconProgress * 0.6f * progress * wasAlpha)); + canvas.drawPath(path, Theme.chat_actionBackgroundGradientDarkenPaint); + Theme.chat_actionBackgroundGradientDarkenPaint.setAlpha(wasAlpha); + } - c.restore(); + if (clearScale != 0f) { + AndroidUtilities.rectTmp.set((int) (x - AndroidUtilities.dp(16) * clearScale), (int) (y - AndroidUtilities.dp(16) * clearScale), (int) (x + AndroidUtilities.dp(16) * clearScale), (int) (y + AndroidUtilities.dp(16) * clearScale)); + path.rewind(); + path.addRoundRect(AndroidUtilities.rectTmp, AndroidUtilities.dp(16), AndroidUtilities.dp(16), Path.Direction.CW); - if (pullingDownAnimateToActivity != null) { - c.saveLayerAlpha(0, 0, pullingDownAnimateToActivity.chatListView.getMeasuredWidth(), pullingDownAnimateToActivity.chatListView.getMeasuredHeight(), (int) (255 * pullingDownAnimateProgress), Canvas.ALL_SAVE_FLAG); - c.translate(0, getMeasuredHeight() - pullingDownOffset - transitionOffset); - pullingDownAnimateToActivity.chatListView.draw(c); - c.restore(); - } - } else if (pullingDownDrawable != null) { - pullingDownDrawable.reset(); + canvas.save(); + canvas.clipPath(path, Region.Op.DIFFERENCE); } - } - @Override - public void draw(Canvas canvas) { - if ((startMessageAppearTransitionMs == 0 || System.currentTimeMillis() - startMessageAppearTransitionMs <= SKELETON_DISAPPEAR_MS) && !AndroidUtilities.isTablet() && !isComments && currentUser == null) { - boolean noAvatar = currentChat == null || ChatObject.isChannelAndNotMegaGroup(currentChat); - if (pullingDownOffset != 0) { - canvas.save(); - canvas.translate(0, -pullingDownOffset); - } - updateSkeletonColors(); - updateSkeletonGradient(); + AndroidUtilities.rectTmp.set((int) (x - AndroidUtilities.dp(16) * scale), (int) (y - AndroidUtilities.dp(16) * scale), (int) (x + AndroidUtilities.dp(16) * scale), (int) (y + AndroidUtilities.dp(16) * scale)); + Theme.applyServiceShaderMatrix(getMeasuredWidth(), AndroidUtilities.displaySize.y, 0, getY() + AndroidUtilities.rectTmp.top); + path.rewind(); + path.addRoundRect(AndroidUtilities.rectTmp, AndroidUtilities.dp(16) * scale, AndroidUtilities.dp(16) * scale, Path.Direction.CW); - int lastTop = getHeight() - blurredViewBottomOffset; - int j = 0; + wasAlpha = chatActionBackgroundPaint.getAlpha(); + chatActionBackgroundPaint.setAlpha((int) (iconProgress * 0.4f * wasAlpha)); + canvas.drawPath(path, chatActionBackgroundPaint); + chatActionBackgroundPaint.setAlpha(wasAlpha); - int childMaxTop = Integer.MAX_VALUE; - for (int i = 0; i < getChildCount(); i++) { - int top = getChildAt(i).getTop(); - if (top < childMaxTop) { - childMaxTop = top; - } - } - if (startMessageAppearTransitionMs == 0 && childMaxTop <= 0) { - checkDispatchHideSkeletons(fragmentBeginToShow); + if (themeDelegate.hasGradientService()) { + wasAlpha = Theme.chat_actionBackgroundGradientDarkenPaint.getAlpha(); + if (isDark) { + Theme.chat_actionBackgroundGradientDarkenPaint.setColor(Color.WHITE); } + Theme.chat_actionBackgroundGradientDarkenPaint.setAlpha((int) (iconProgress * 0.4f * wasAlpha)); + canvas.drawPath(path, Theme.chat_actionBackgroundGradientDarkenPaint); + Theme.chat_actionBackgroundGradientDarkenPaint.setAlpha(wasAlpha); + } + if (clearScale != 0f) { + canvas.restore(); + } - Paint servicePaint = getThemedPaint(Theme.key_paint_chatActionBackground); - if (skeletonServicePaint.getColor() != servicePaint.getColor()) { - skeletonServicePaint.setColor(servicePaint.getColor()); - } - if (skeletonServicePaint.getShader() != servicePaint.getShader()) { - skeletonServicePaint.setShader(servicePaint.getShader()); - skeletonColorMatrix.setSaturation(SKELETON_SATURATION); - skeletonServicePaint.setColorFilter(new ColorMatrixColorFilter(skeletonColorMatrix)); - } + float outerRingProgress = slidingOuterRingProgress.getValue() / springMultiplier; + if (outerRingProgress != 0 && outerRingProgress != 1) { + float outScale = 1f + outerRingProgress; - for (int i = 0; i < getChildCount(); i++) { - View v = getChildAt(i); -// if (v instanceof ChatMessageCell) { -// ChatMessageCell cell = (ChatMessageCell) v; -// if ((cell.getCurrentMessagesGroup() == null || cell.getCurrentMessagesGroup().findPrimaryMessageObject() == cell.getMessageObject())) { -// if (cell.shouldDrawAlphaLayer() || System.currentTimeMillis() - startMessageAppearTransitionMs >= SKELETON_DISAPPEAR_MS) { -// float progress = cell.getAlpha(); -// -// MessageSkeleton skeleton; -// if (j >= messageSkeletons.size()) { -// skeleton = getNewSkeleton(noAvatar); -// messageSkeletons.add(skeleton); -// } else { -// skeleton = messageSkeletons.get(j); -// } -// -// Rect bounds = cell.getCurrentBackgroundDrawable(true).getBounds(); -// MessageObject.GroupedMessages group = cell.getCurrentMessagesGroup(); -// -// int alpha = skeletonPaint.getAlpha(); -// int wasServiceAlpha = servicePaint.getAlpha(); -// servicePaint.setAlpha((int) (wasServiceAlpha * 0.4f * (1f - progress))); -// skeletonPaint.setAlpha((int) (alpha * (1f - progress))); -// int bottom = (int) AndroidUtilities.lerp(Math.min(skeleton.lastBottom, lastTop - AndroidUtilities.dp(3f)), v.getBottom() + (group != null ? group.transitionParams.top + group.transitionParams.offsetTop : 0), progress); -// int left = noAvatar ? AndroidUtilities.dp(3f) : AndroidUtilities.dp(51); -// int top = (int) AndroidUtilities.lerp(bottom - skeleton.height, bounds.top + v.getTop() + (group != null ? group.transitionParams.top + group.transitionParams.offsetTop : 0), progress); -// int right = skeleton.width; -// -// boolean lerp = cell.getMessageObject() == null || !cell.getMessageObject().isOut(); -// skeletonBackgroundDrawable.setBounds(lerp ? AndroidUtilities.lerp(left, cell.getBackgroundDrawableLeft(), progress) : left, top, -// lerp ? AndroidUtilities.lerp(right, cell.getBackgroundDrawableRight(), progress) : right, bottom); -// Theme.applyServiceShaderMatrix(getMeasuredWidth(), AndroidUtilities.displaySize.y, 0, getY() + skeletonBackgroundDrawable.getBounds().top); -// skeletonBackgroundDrawable.drawCached(canvas, skeletonBackgroundCacheParams, servicePaint); -// skeletonBackgroundDrawable.drawCached(canvas, skeletonBackgroundCacheParams, skeletonPaint); -// if (!noAvatar) { -// Theme.applyServiceShaderMatrix(getMeasuredWidth(), AndroidUtilities.displaySize.y, 0, getY() + bottom - AndroidUtilities.dp(42)); -// canvas.drawCircle(AndroidUtilities.dp(48 - 21), bottom - AndroidUtilities.dp(21), AndroidUtilities.dp(21), servicePaint); -// canvas.drawCircle(AndroidUtilities.dp(48 - 21), bottom - AndroidUtilities.dp(21), AndroidUtilities.dp(21), skeletonPaint); -// } -// servicePaint.setAlpha(wasServiceAlpha); -// skeletonPaint.setAlpha(alpha); -// j++; -// -// if (top < lastTop) { -// lastTop = top; -// } -// -// continue; -// } -// j++; -// } -// } - if (v instanceof ChatMessageCell) { - MessageObject.GroupedMessages group = ((ChatMessageCell) v).getCurrentMessagesGroup(); - Rect bounds = ((ChatMessageCell) v).getCurrentBackgroundDrawable(true).getBounds(); - int newTop = (int) (v.getTop() + bounds.top + (group != null ? group.transitionParams.top + group.transitionParams.offsetTop : 0)); - int top = startMessageAppearTransitionMs == 0 && isSkeletonVisible() ? AndroidUtilities.lerp(lastTop, newTop, v.getAlpha()) : v.getAlpha() == 1f ? newTop : lastTop; - if (top < lastTop) { - lastTop = top; - } - } else if (v instanceof ChatActionCell) { - int top = startMessageAppearTransitionMs == 0 && isSkeletonVisible() ? AndroidUtilities.lerp(lastTop, v.getTop(), v.getAlpha()) : v.getAlpha() == 1f ? v.getTop() : lastTop; - if (top < lastTop) { - lastTop = top; - } - } - } + float wasWidth = outlineActionBackgroundPaint.getStrokeWidth(); + float width = (1f - outerRingProgress) * wasWidth; + if (width != 0f) { + AndroidUtilities.rectTmp.set((int) (x - AndroidUtilities.dp(16) * outScale + width), (int) (y - AndroidUtilities.dp(16) * outScale + width), (int) (x + AndroidUtilities.dp(16) * outScale - width), (int) (y + AndroidUtilities.dp(16) * outScale - width)); + Theme.applyServiceShaderMatrix(getMeasuredWidth(), AndroidUtilities.displaySize.y, 0, getY() + AndroidUtilities.rectTmp.top); - if (isSkeletonVisible()) { - boolean drawService = SharedConfig.getDevicePerformanceClass() != SharedConfig.PERFORMANCE_CLASS_LOW && Theme.hasGradientService(); - boolean darkOverlay = ColorUtils.calculateLuminance(getThemedColor(Theme.key_windowBackgroundWhite)) <= 0.7f && Theme.hasGradientService(); - boolean blackOverlay = ColorUtils.calculateLuminance(getThemedColor(Theme.key_windowBackgroundWhite)) <= 0.01f && Theme.hasGradientService(); - if (drawService) { - Theme.applyServiceShaderMatrix(getMeasuredWidth(), AndroidUtilities.displaySize.y, 0, getY() - contentPanTranslation); - } - int wasDarkenAlpha = Theme.chat_actionBackgroundGradientDarkenPaint.getAlpha(); - if (blackOverlay) { - Theme.chat_actionBackgroundGradientDarkenPaint.setAlpha((int) (wasDarkenAlpha * 4f)); - } + wasAlpha = outlineActionBackgroundPaint.getAlpha(); + outlineActionBackgroundPaint.setAlpha((int) (wasAlpha * iconProgress)); - float topSkeletonAlpha = startMessageAppearTransitionMs != 0 ? 1f - (System.currentTimeMillis() - startMessageAppearTransitionMs) / (float) SKELETON_DISAPPEAR_MS : 1f; - int alpha = skeletonPaint.getAlpha(); - int wasServiceAlpha = skeletonServicePaint.getAlpha(); - int wasOutlineAlpha = skeletonOutlinePaint.getAlpha(); - skeletonServicePaint.setAlpha((int) (0xFF * topSkeletonAlpha)); - skeletonPaint.setAlpha((int) (topSkeletonAlpha * alpha)); - skeletonOutlinePaint.setAlpha((int) (wasOutlineAlpha * alpha)); - while (lastTop > blurredViewTopOffset) { - lastTop -= AndroidUtilities.dp(3f); + outlineActionBackgroundPaint.setStrokeWidth(width); + canvas.drawRoundRect(AndroidUtilities.rectTmp, AndroidUtilities.dp(16) * outScale, AndroidUtilities.dp(16) * outScale, outlineActionBackgroundPaint); + outlineActionBackgroundPaint.setStrokeWidth(wasWidth); - MessageSkeleton skeleton; - if (j >= messageSkeletons.size()) { - skeleton = getNewSkeleton(noAvatar); - messageSkeletons.add(skeleton); - } else { - skeleton = messageSkeletons.get(j); - } - skeleton.lastBottom = startMessageAppearTransitionMs != 0 ? messages.size() <= 2 ? Math.min(skeleton.lastBottom, lastTop) : skeleton.lastBottom : lastTop; + outlineActionBackgroundPaint.setAlpha(wasAlpha); - lastTop -= skeleton.height; + if (themeDelegate.hasGradientService()) { + wasAlpha = outlineActionBackgroundDarkenPaint.getAlpha(); + if (isDark) { + outlineActionBackgroundDarkenPaint.setColor(Color.WHITE); + } + outlineActionBackgroundDarkenPaint.setAlpha((int) (wasAlpha * iconProgress)); - j++; + outlineActionBackgroundDarkenPaint.setStrokeWidth(width); + canvas.drawRoundRect(AndroidUtilities.rectTmp, AndroidUtilities.dp(16) * outScale, AndroidUtilities.dp(16) * outScale, outlineActionBackgroundDarkenPaint); + outlineActionBackgroundDarkenPaint.setStrokeWidth(wasWidth); } + } + } - lastTop = messageSkeletons.isEmpty() ? getHeight() - blurredViewBottomOffset : messageSkeletons.get(0).lastBottom + AndroidUtilities.dp(3f); - for (int i = 0; i < messageSkeletons.size() && lastTop > blurredViewTopOffset; i++) { - lastTop -= AndroidUtilities.dp(3f); + int alpha = (int) (iconProgress * 0xFF); + Drawable replyIconDrawable = getThemedDrawable(Theme.key_drawable_replyIcon); + replyIconDrawable.setAlpha(alpha); + replyIconDrawable.setBounds((int) (x - AndroidUtilities.dp(7) * scale), (int) (y - AndroidUtilities.dp(6) * scale), (int) (x + AndroidUtilities.dp(7) * scale), (int) (y + AndroidUtilities.dp(5) * scale)); + replyIconDrawable.draw(canvas); + replyIconDrawable.setAlpha(255); - MessageSkeleton skeleton = messageSkeletons.get(i); + outlineActionBackgroundDarkenPaint.setColor(wasDarkenColor); + chatActionBackgroundDarkenPaint.setColor(wasDarkenColor); + } - int bottom = skeleton.lastBottom; - skeletonBackgroundDrawable.setBounds(noAvatar ? AndroidUtilities.dp(3f) : AndroidUtilities.dp(51), bottom - skeleton.height, skeleton.width, bottom); - if (drawService) { - skeletonBackgroundDrawable.drawCached(canvas, skeletonBackgroundCacheParams, skeletonServicePaint); + private void processTouchEvent(MotionEvent e) { + if (e != null) { + wasManualScroll = true; + } + if (e != null && e.getAction() == MotionEvent.ACTION_DOWN && !startedTrackingSlidingView && !maybeStartTrackingSlidingView && slidingView == null && !inPreviewMode) { + View view = getPressedChildView(); + if (view instanceof ChatMessageCell) { + if (slidingView != null) { + slidingView.setSlidingOffset(0); + } + slidingView = (ChatMessageCell) view; + MessageObject message = slidingView.getMessageObject(); + boolean allowReplyOnOpenTopic = false; + if (message != null && ChatObject.isForum(currentChat)) { + TLRPC.TL_forumTopic topic = getMessagesController().getTopicsController().findTopic(currentChat.id, MessageObject.getTopicId(message.messageOwner, true)); + if (topic != null) { + allowReplyOnOpenTopic = !topic.closed || ChatObject.canManageTopic(currentAccount, currentChat, topic); } - skeletonBackgroundDrawable.drawCached(canvas, skeletonBackgroundCacheParams, skeletonPaint); - if (darkOverlay) { - skeletonBackgroundDrawable.drawCached(canvas, skeletonBackgroundCacheParams, Theme.chat_actionBackgroundGradientDarkenPaint); + } + if (chatMode != 0 || threadMessageObjects != null && threadMessageObjects.contains(message) || + getMessageType(message) == 1 && (message.getDialogId() == mergeDialogId || message.needDrawBluredPreview()) || + currentEncryptedChat == null && message.getId() < 0 || + bottomOverlayChat != null && bottomOverlayChat.getVisibility() == View.VISIBLE && !(bottomOverlayChatWaitsReply && allowReplyOnOpenTopic || message.wasJustSent) || + currentChat != null && (ChatObject.isNotInChat(currentChat) && !isThreadChat() || ChatObject.isChannel(currentChat) && !ChatObject.canPost(currentChat) && !currentChat.megagroup || !ChatObject.canSendMessages(currentChat)) || + textSelectionHelper.isSelectionMode()) { + if (!canSendInCommentGroup()) { + slidingView.setSlidingOffset(0); + slidingView = null; + return; } - skeletonBackgroundDrawable.drawCached(canvas, skeletonBackgroundCacheParams, skeletonOutlinePaint); - - if (!noAvatar) { - if (drawService) { - canvas.drawCircle(AndroidUtilities.dp(48 - 21), bottom - AndroidUtilities.dp(21), AndroidUtilities.dp(21), skeletonServicePaint); - } - canvas.drawCircle(AndroidUtilities.dp(48 - 21), bottom - AndroidUtilities.dp(21), AndroidUtilities.dp(21), skeletonPaint); - if (darkOverlay) { - canvas.drawCircle(AndroidUtilities.dp(48 - 21), bottom - AndroidUtilities.dp(21), AndroidUtilities.dp(21), Theme.chat_actionBackgroundGradientDarkenPaint); + } + startedTrackingPointerId = e.getPointerId(0); + maybeStartTrackingSlidingView = true; + startedTrackingX = (int) e.getX(); + startedTrackingY = (int) e.getY(); + } + } else if (slidingView != null && e != null && e.getAction() == MotionEvent.ACTION_MOVE && e.getPointerId(0) == startedTrackingPointerId) { + int dx = Math.max(AndroidUtilities.dp(-80), Math.min(0, (int) (e.getX() - startedTrackingX))); + int dy = Math.abs((int) e.getY() - startedTrackingY); + if (getScrollState() == SCROLL_STATE_IDLE && maybeStartTrackingSlidingView && !startedTrackingSlidingView && dx <= -AndroidUtilities.getPixelsInCM(0.4f, true) && Math.abs(dx) / 3 > dy) { + MotionEvent event = MotionEvent.obtain(0, 0, MotionEvent.ACTION_CANCEL, 0, 0, 0); + slidingView.onTouchEvent(event); + super.onInterceptTouchEvent(event); + event.recycle(); + chatLayoutManager.setCanScrollVertically(false); + maybeStartTrackingSlidingView = false; + startedTrackingSlidingView = true; + startedTrackingX = (int) e.getX(); + if (getParent() != null) { + getParent().requestDisallowInterceptTouchEvent(true); + } + } else if (startedTrackingSlidingView) { + if (Math.abs(dx) >= AndroidUtilities.dp(50)) { + if (!wasTrackingVibrate) { + if (!NekoConfig.disableVibration.Bool()) { + try { + performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + } catch (Exception ignore) { + } } - canvas.drawCircle(AndroidUtilities.dp(48 - 21), bottom - AndroidUtilities.dp(21), AndroidUtilities.dp(21), skeletonOutlinePaint); + wasTrackingVibrate = true; } - - lastTop -= skeleton.height; + } else { + wasTrackingVibrate = false; } - - skeletonServicePaint.setAlpha(wasServiceAlpha); - skeletonPaint.setAlpha(alpha); - skeletonOutlinePaint.setAlpha(wasOutlineAlpha); - Theme.chat_actionBackgroundGradientDarkenPaint.setAlpha(wasDarkenAlpha); - invalidated = false; + slidingView.setSlidingOffset(dx); + MessageObject messageObject = slidingView.getMessageObject(); + if (messageObject.isRoundVideo() || messageObject.isVideo()) { + updateTextureViewPosition(false, false); + } + setGroupTranslationX(slidingView, dx); invalidate(); - } else if (System.currentTimeMillis() - startMessageAppearTransitionMs > SKELETON_DISAPPEAR_MS) { - messageSkeletons.clear(); } - lastSkeletonCount = messageSkeletons.size(); - lastSkeletonMessageCount = messages.size(); - if (pullingDownOffset != 0) { - canvas.restore(); + } else if (slidingView != null && (e == null || e.getPointerId(0) == startedTrackingPointerId && (e.getAction() == MotionEvent.ACTION_CANCEL || e.getAction() == MotionEvent.ACTION_UP || e.getAction() == MotionEvent.ACTION_POINTER_UP))) { + if (e != null && e.getAction() != MotionEvent.ACTION_CANCEL && Math.abs(slidingView.getNonAnimationTranslationX(false)) >= AndroidUtilities.dp(50)) { + showFieldPanelForReply(slidingView.getMessageObject()); + } + endTrackingX = slidingView.getSlidingOffsetX(); + if (endTrackingX == 0) { + slidingView = null; } + lastTrackingAnimationTime = System.currentTimeMillis(); + trackAnimationProgress = 0.0f; + invalidate(); + maybeStartTrackingSlidingView = false; + startedTrackingSlidingView = false; + chatLayoutManager.setCanScrollVertically(true); } - super.draw(canvas); } - private void updateSkeletonColors() { - boolean dark = ColorUtils.calculateLuminance(getThemedColor(Theme.key_windowBackgroundWhite)) <= 0.7f; - int color0 = ColorUtils.blendARGB(getThemedColor(Theme.key_listSelector), Color.argb(dark ? 0x21 : 0x03, 0xFF, 0xFF, 0xFF), dark ? 0.9f : 0.5f); - int color1 = ColorUtils.setAlphaComponent(getThemedColor(Theme.key_listSelector), dark ? 24 : SKELETON_LIGHT_OVERLAY_ALPHA); - if (skeletonColor1 != color1 || skeletonColor0 != color0) { - skeletonColor0 = color0; - skeletonColor1 = color1; - skeletonGradient = new LinearGradient(0, 0, skeletonGradientWidth = AndroidUtilities.dp(200), 0, new int[]{color1, color0, color0, color1}, new float[]{0.0f, 0.4f, 0.6f, 1f}, Shader.TileMode.CLAMP); - skeletonTotalTranslation = -skeletonGradientWidth * 2; - skeletonPaint.setShader(skeletonGradient); - - int outlineColor = Color.argb(dark ? 0x2B : 0x60, 0xFF, 0xFF, 0xFF); - skeletonOutlineGradient = new LinearGradient(0, 0, skeletonGradientWidth, 0, new int[]{Color.TRANSPARENT, outlineColor, outlineColor, Color.TRANSPARENT}, new float[]{0.0f, 0.4f, 0.6f, 1f}, Shader.TileMode.CLAMP); - skeletonOutlinePaint.setShader(skeletonOutlineGradient); + @Override + public boolean onTouchEvent(MotionEvent e) { + textSelectionHelper.checkSelectionCancel(e); + if (e.getAction() == MotionEvent.ACTION_DOWN) { + scrollByTouch = true; } - } + if (!NekoConfig.disableSwipeToNext.Bool() && pullingDownOffset != 0 && (e.getAction() == MotionEvent.ACTION_UP || e.getAction() == MotionEvent.ACTION_CANCEL)) { + float progress = Math.min(1f, pullingDownOffset / AndroidUtilities.dp(110)); + if (e.getAction() == MotionEvent.ACTION_UP && progress == 1 && pullingDownDrawable != null && !pullingDownDrawable.emptyStub) { + if (pullingDownDrawable.animationIsRunning()) { + ValueAnimator animator = ValueAnimator.ofFloat(pullingDownOffset, pullingDownOffset + AndroidUtilities.dp(8)); + pullingDownBackAnimator = animator; + animator.addUpdateListener(valueAnimator -> { + pullingDownOffset = (float) valueAnimator.getAnimatedValue(); + chatListView.invalidate(); + }); + animator.setDuration(200); + animator.setInterpolator(CubicBezierInterpolator.DEFAULT); + animator.start(); + pullingDownDrawable.runOnAnimationFinish(() -> { + animateToNextChat(); + }); + } else { + animateToNextChat(); + } + } else { + if (pullingDownDrawable != null && pullingDownDrawable.emptyStub && (System.currentTimeMillis() - pullingDownDrawable.lastShowingReleaseTime) < 500 && pullingDownDrawable.animateSwipeToRelease) { + AnimatorSet animatorSet = new AnimatorSet(); + pullingDownBackAnimator = animatorSet; + if (pullingDownDrawable != null) { + pullingDownDrawable.showBottomPanel(false); + } + ValueAnimator animator = ValueAnimator.ofFloat(pullingDownOffset, AndroidUtilities.dp(111)); + animator.addUpdateListener(valueAnimator -> { + pullingDownOffset = (float) valueAnimator.getAnimatedValue(); + chatListView.invalidate(); + }); + animator.setDuration(400); + animator.setInterpolator(CubicBezierInterpolator.DEFAULT); - private void updateSkeletonGradient() { - long newUpdateTime = SystemClock.elapsedRealtime(); - long dt = Math.abs(skeletonLastUpdateTime - newUpdateTime); - if (dt > 17) { - dt = 16; - } - if (dt < 4) { - dt = 0; - } - int width = getWidth(); - skeletonLastUpdateTime = newUpdateTime; - skeletonTotalTranslation += dt * width / 400.0f; - if (skeletonTotalTranslation >= width * 2) { - skeletonTotalTranslation = -skeletonGradientWidth * 2; + ValueAnimator animator2 = ValueAnimator.ofFloat(AndroidUtilities.dp(111), 0); + animator2.addUpdateListener(valueAnimator -> { + pullingDownOffset = (float) valueAnimator.getAnimatedValue(); + chatListView.invalidate(); + }); + animator2.setStartDelay(600); + animator2.setDuration(ChatListItemAnimator.DEFAULT_DURATION); + animator2.setInterpolator(ChatListItemAnimator.DEFAULT_INTERPOLATOR); + + animatorSet.playSequentially(animator, animator2); + animatorSet.start(); + } else { + ValueAnimator animator = ValueAnimator.ofFloat(pullingDownOffset, 0); + pullingDownBackAnimator = animator; + if (pullingDownDrawable != null) { + pullingDownDrawable.showBottomPanel(false); + } + animator.addUpdateListener(valueAnimator -> { + pullingDownOffset = (float) valueAnimator.getAnimatedValue(); + chatListView.invalidate(); + }); + animator.setDuration(ChatListItemAnimator.DEFAULT_DURATION); + animator.setInterpolator(ChatListItemAnimator.DEFAULT_INTERPOLATOR); + animator.start(); + } + } } - skeletonMatrix.setTranslate(skeletonTotalTranslation, 0); - if (skeletonGradient != null) { - skeletonGradient.setLocalMatrix(skeletonMatrix); + if (isFastScrollAnimationRunning()) { + return false; } - skeletonOutlineMatrix.setTranslate(skeletonTotalTranslation, 0); - if (skeletonOutlineGradient != null) { - skeletonOutlineGradient.setLocalMatrix(skeletonOutlineMatrix); + boolean result = super.onTouchEvent(e); + if (actionBar.isActionModeShowed() || reportType >= 0) { + return result; } + processTouchEvent(e); + return startedTrackingSlidingView || result; } @Override - protected void dispatchDraw(Canvas canvas) { - drawLaterRoundProgressCell = null; - invalidated = false; - - canvas.save(); - if (fragmentTransition == null || (fromPullingDownTransition && !toPullingDownTransition)) { - canvas.clipRect(0, chatListViewPaddingTop - chatListViewPaddingVisibleOffset - AndroidUtilities.dp(4), getMeasuredWidth(), getMeasuredHeight() - blurredViewBottomOffset); - } - selectorRect.setEmpty(); - if (pullingDownOffset != 0) { - int restoreToCount = canvas.save(); - float transitionOffset = 0; - if (pullingDownAnimateProgress != 0) { - transitionOffset = (chatListView.getMeasuredHeight() - pullingDownOffset) * pullingDownAnimateProgress; - } - canvas.translate(0, drawingChatLisViewYoffset = -pullingDownOffset - transitionOffset); - drawChatBackgroundElements(canvas); - super.dispatchDraw(canvas); - drawChatForegroundElements(canvas); - canvas.restoreToCount(restoreToCount); - } else { - drawChatBackgroundElements(canvas); - super.dispatchDraw(canvas); - drawChatForegroundElements(canvas); + public void requestDisallowInterceptTouchEvent(boolean disallowIntercept) { + super.requestDisallowInterceptTouchEvent(disallowIntercept); + if (slidingView != null) { + processTouchEvent(null); } - canvas.restore(); } - private void drawChatForegroundElements(Canvas canvas) { - int size = drawTimeAfter.size(); - if (size > 0) { - for (int a = 0; a < size; a++) { - ChatMessageCell cell = drawTimeAfter.get(a); - canvas.save(); - canvas.translate(cell.getLeft() + cell.getNonAnimationTranslationX(false), cell.getY()); - cell.drawTime(canvas, cell.shouldDrawAlphaLayer() ? cell.getAlpha() : 1f, true); - canvas.restore(); - } - drawTimeAfter.clear(); - } - size = drawNamesAfter.size(); - if (size > 0) { - for (int a = 0; a < size; a++) { - ChatMessageCell cell = drawNamesAfter.get(a); - float canvasOffsetX = cell.getLeft() + cell.getNonAnimationTranslationX(false); - float canvasOffsetY = cell.getY(); - float alpha = cell.shouldDrawAlphaLayer() ? cell.getAlpha() : 1f; - - canvas.save(); - canvas.translate(canvasOffsetX, canvasOffsetY); - cell.setInvalidatesParent(true); - cell.drawNamesLayout(canvas, alpha); - cell.setInvalidatesParent(false); - canvas.restore(); + @Override + protected void onChildPressed(View child, float x, float y, boolean pressed) { + super.onChildPressed(child, x, y, pressed); + if (child instanceof ChatMessageCell) { + ChatMessageCell chatMessageCell = (ChatMessageCell) child; + MessageObject object = chatMessageCell.getMessageObject(); + if (object.isMusic() || object.isDocument()) { + return; } - drawNamesAfter.clear(); - } - size = drawCaptionAfter.size(); - if (size > 0) { - for (int a = 0; a < size; a++) { - ChatMessageCell cell = drawCaptionAfter.get(a); - boolean selectionOnly = false; - if (cell.getCurrentPosition() != null) { - selectionOnly = (cell.getCurrentPosition().flags & MessageObject.POSITION_FLAG_LEFT) == 0; - } - float alpha = cell.shouldDrawAlphaLayer() ? cell.getAlpha() : 1f; - float canvasOffsetX = cell.getLeft() + cell.getNonAnimationTranslationX(false); - float canvasOffsetY = cell.getY(); - canvas.save(); - MessageObject.GroupedMessages groupedMessages = cell.getCurrentMessagesGroup(); - if (groupedMessages != null && groupedMessages.transitionParams.backgroundChangeBounds) { - float x = cell.getNonAnimationTranslationX(true); - float l = (groupedMessages.transitionParams.left + x + groupedMessages.transitionParams.offsetLeft); - float t = (groupedMessages.transitionParams.top + groupedMessages.transitionParams.offsetTop); - float r = (groupedMessages.transitionParams.right + x + groupedMessages.transitionParams.offsetRight); - float b = (groupedMessages.transitionParams.bottom + groupedMessages.transitionParams.offsetBottom); - - if (!groupedMessages.transitionParams.backgroundChangeBounds) { - t += cell.getTranslationY(); - b += cell.getTranslationY(); + MessageObject.GroupedMessages groupedMessages = chatMessageCell.getCurrentMessagesGroup(); + if (groupedMessages != null) { + int count = getChildCount(); + for (int a = 0; a < count; a++) { + View item = getChildAt(a); + if (item == child || !(item instanceof ChatMessageCell)) { + continue; + } + ChatMessageCell cell = (ChatMessageCell) item; + if (cell.getCurrentMessagesGroup() == groupedMessages) { + cell.setPressed(pressed); } - canvas.clipRect( - l + AndroidUtilities.dp(8), t + AndroidUtilities.dp(8), - r - AndroidUtilities.dp(8), b - AndroidUtilities.dp(8) - ); - } - if (cell.getTransitionParams().wasDraw) { - canvas.translate(canvasOffsetX, canvasOffsetY); - cell.setInvalidatesParent(true); - cell.drawCaptionLayout(canvas, selectionOnly, alpha); - cell.setInvalidatesParent(false); - canvas.restore(); } } - drawCaptionAfter.clear(); } } - private void drawChatBackgroundElements(Canvas canvas) { - int count = getChildCount(); - MessageObject.GroupedMessages lastDrawnGroup = null; - - for (int a = 0; a < count; a++) { - View child = getChildAt(a); - if (chatAdapter.isBot && child instanceof BotHelpCell) { - BotHelpCell botCell = (BotHelpCell) child; - float top = (getMeasuredHeight() - chatListViewPaddingTop - blurredViewBottomOffset) / 2 - child.getMeasuredHeight() / 2 + chatListViewPaddingTop; - if (!botCell.animating() && !chatListView.fastScrollAnimationRunning) { - if (child.getTop() > top) { - child.setTranslationY(top - child.getTop()); - } else { - child.setTranslationY(0); - } - } - break; - } else if (child instanceof ChatMessageCell) { - ChatMessageCell cell = (ChatMessageCell) child; - MessageObject.GroupedMessages group = cell.getCurrentMessagesGroup(); - if (group == null || group != lastDrawnGroup) { - lastDrawnGroup = group; - MessageObject.GroupedMessagePosition position = cell.getCurrentPosition(); - MessageBackgroundDrawable backgroundDrawable = cell.getBackgroundDrawable(); - if ((backgroundDrawable.isAnimationInProgress() || cell.isDrawingSelectionBackground()) && (position == null || (position.flags & MessageObject.POSITION_FLAG_RIGHT) != 0)) { - if (cell.isHighlighted() || cell.isHighlightedAnimated()) { - if (position == null) { - Paint backgroundPaint = getThemedPaint(Theme.key_paint_chatMessageBackgroundSelected); - if (themeDelegate != null && themeDelegate.isDark || backgroundPaint == null) { - backgroundPaint = Theme.chat_replyLinePaint; - backgroundPaint.setColor(getThemedColor(Theme.key_chat_selectedBackground)); - } else { - float viewTop = (isKeyboardVisible() ? chatListView.getTop() : actionBar.getMeasuredHeight()) - contentView.getBackgroundTranslationY(); - int backgroundHeight = contentView.getBackgroundSizeY(); - if (themeDelegate != null) { - themeDelegate.applyServiceShaderMatrix(getMeasuredWidth(), backgroundHeight, cell.getX(), viewTop); - } else { - Theme.applyServiceShaderMatrix(getMeasuredWidth(), backgroundHeight, cell.getX(), viewTop); - } - } - canvas.save(); - canvas.translate(0, cell.getTranslationY()); - int wasAlpha = backgroundPaint.getAlpha(); - backgroundPaint.setAlpha((int) (wasAlpha * cell.getHighlightAlpha() * cell.getAlpha())); - canvas.drawRect(0, cell.getTop(), getMeasuredWidth(), cell.getBottom(), backgroundPaint); - backgroundPaint.setAlpha(wasAlpha); - canvas.restore(); - } - } else { - int y = (int) cell.getY(); - int height; - canvas.save(); - if (position == null) { - height = cell.getMeasuredHeight(); - } else { - height = y + cell.getMeasuredHeight(); - long time = 0; - float touchX = 0; - float touchY = 0; - for (int i = 0; i < count; i++) { - View inner = getChildAt(i); - if (inner instanceof ChatMessageCell) { - ChatMessageCell innerCell = (ChatMessageCell) inner; - MessageObject.GroupedMessages innerGroup = innerCell.getCurrentMessagesGroup(); - if (innerGroup == group) { - MessageBackgroundDrawable drawable = innerCell.getBackgroundDrawable(); - y = Math.min(y, (int) innerCell.getY()); - height = Math.max(height, (int) innerCell.getY() + innerCell.getMeasuredHeight()); - long touchTime = drawable.getLastTouchTime(); - if (touchTime > time) { - touchX = drawable.getTouchX() + innerCell.getX(); - touchY = drawable.getTouchY() + innerCell.getY(); - time = touchTime; - } - } - } - } - backgroundDrawable.setTouchCoordsOverride(touchX, touchY - y); - height -= y; - } - canvas.clipRect(0, y, getMeasuredWidth(), y + height); - Paint selectedBackgroundPaint = getThemedPaint(Theme.key_paint_chatMessageBackgroundSelected); - if (themeDelegate != null && !themeDelegate.isDark && selectedBackgroundPaint != null) { - backgroundDrawable.setCustomPaint(selectedBackgroundPaint); - float viewTop = (isKeyboardVisible() ? chatListView.getTop() : actionBar.getMeasuredHeight()) - contentView.getBackgroundTranslationY(); - int backgroundHeight = contentView.getBackgroundSizeY(); - if (themeDelegate != null) { - themeDelegate.applyServiceShaderMatrix(getMeasuredWidth(), backgroundHeight, cell.getX(), viewTop); - } else { - Theme.applyServiceShaderMatrix(getMeasuredWidth(), backgroundHeight, cell.getX(), viewTop); - } - } else { - backgroundDrawable.setCustomPaint(null); - backgroundDrawable.setColor(getThemedColor(Theme.key_chat_selectedBackground)); - } - backgroundDrawable.setBounds(0, y, getMeasuredWidth(), y + height); - backgroundDrawable.draw(canvas); - canvas.restore(); - } - } - } - if (scrimView != cell && group == null && cell.drawBackgroundInParent()) { - canvas.save(); - canvas.translate(cell.getX(), cell.getY()); - if (cell.getScaleX() != 1f) { - canvas.scale( - cell.getScaleX(), cell.getScaleY(), - cell.getPivotX(), (cell.getHeight() >> 1) - ); - } - cell.drawBackgroundInternal(canvas, true); - canvas.restore(); + @Override + public void onDraw(Canvas c) { + super.onDraw(c); + if (slidingView != null) { + float translationX = slidingView.getSlidingOffsetX(); + if (!maybeStartTrackingSlidingView && !startedTrackingSlidingView && endTrackingX != 0 && translationX != 0) { + long newTime = System.currentTimeMillis(); + long dt = newTime - lastTrackingAnimationTime; + trackAnimationProgress += dt / 180.0f; + if (trackAnimationProgress > 1.0f) { + trackAnimationProgress = 1.0f; } - } else if (child instanceof ChatActionCell) { - ChatActionCell cell = (ChatActionCell) child; - if (cell.hasGradientService()) { - canvas.save(); - canvas.translate(cell.getX(), cell.getY()); - canvas.scale(cell.getScaleX(), cell.getScaleY(), cell.getMeasuredWidth() / 2f, cell.getMeasuredHeight() / 2f); - cell.drawBackground(canvas, true); - canvas.restore(); + lastTrackingAnimationTime = newTime; + translationX = endTrackingX * (1.0f - AndroidUtilities.decelerateInterpolator.getInterpolation(trackAnimationProgress)); + if (translationX == 0) { + endTrackingX = 0; + } + setGroupTranslationX(slidingView, translationX); + slidingView.setSlidingOffset(translationX); + MessageObject messageObject = slidingView.getMessageObject(); + if (messageObject.isRoundVideo() || messageObject.isVideo()) { + updateTextureViewPosition(false, false); + } + + if (trackAnimationProgress == 1f || trackAnimationProgress == 0f) { + slidingView.setSlidingOffset(0); + slidingView = null; } + invalidate(); } + drawReplyButton(c); } - MessageObject.GroupedMessages scrimGroup = null; - if (scrimView instanceof ChatMessageCell) { - scrimGroup = ((ChatMessageCell) scrimView).getCurrentMessagesGroup(); - } - for (int k = 0; k < 3; k++) { - drawingGroups.clear(); - if (k == 2 && !chatListView.isFastScrollAnimationRunning()) { - continue; - } - for (int i = 0; i < count; i++) { - View child = chatListView.getChildAt(i); - if (child instanceof ChatMessageCell) { - ChatMessageCell cell = (ChatMessageCell) child; - if (child.getY() > chatListView.getHeight() || child.getY() + child.getHeight() < 0) { - continue; - } - MessageObject.GroupedMessages group = cell.getCurrentMessagesGroup(); - if (group == null || (k == 0 && group.messages.size() == 1) || (k == 1 && !group.transitionParams.drawBackgroundForDeletedItems)) { - continue; - } - if ((k == 0 && cell.getMessageObject().deleted) || (k == 1 && !cell.getMessageObject().deleted)) { - continue; - } - if ((k == 2 && !cell.willRemovedAfterAnimation()) || (k != 2 && cell.willRemovedAfterAnimation())) { - continue; - } - if (!drawingGroups.contains(group)) { - group.transitionParams.left = 0; - group.transitionParams.top = 0; - group.transitionParams.right = 0; - group.transitionParams.bottom = 0; + if (!NekoConfig.disableSwipeToNext.Bool() && pullingDownOffset != 0 && !isInPreviewMode()) { + c.save(); + float transitionOffset = 0; + if (pullingDownAnimateProgress != 0) { + transitionOffset = (chatListView.getMeasuredHeight() - pullingDownOffset + (pullingDownAnimateToActivity == null ? 0 : pullingDownAnimateToActivity.pullingBottomOffset)) * pullingDownAnimateProgress; + } + c.translate(0, getMeasuredHeight() - blurredViewBottomOffset - transitionOffset); + if (pullingDownDrawable == null) { + pullingDownDrawable = new ChatPullingDownDrawable(currentAccount, fragmentView, dialog_id, dialogFolderId, dialogFilterId, themeDelegate); + pullingDownDrawable.onAttach(); + } + pullingDownDrawable.setWidth(getMeasuredWidth()); + float progress = Math.min(1f, pullingDownOffset / AndroidUtilities.dp(110)); + pullingDownDrawable.draw(c, chatListView, progress, 1f - pullingDownAnimateProgress); - group.transitionParams.pinnedBotton = false; - group.transitionParams.pinnedTop = false; - group.transitionParams.cell = cell; - drawingGroups.add(group); - } + c.restore(); - group.transitionParams.pinnedTop = cell.isPinnedTop(); - group.transitionParams.pinnedBotton = cell.isPinnedBottom(); + if (pullingDownAnimateToActivity != null) { + c.saveLayerAlpha(0, 0, pullingDownAnimateToActivity.chatListView.getMeasuredWidth(), pullingDownAnimateToActivity.chatListView.getMeasuredHeight(), (int) (255 * pullingDownAnimateProgress), Canvas.ALL_SAVE_FLAG); + c.translate(0, getMeasuredHeight() - pullingDownOffset - transitionOffset); + pullingDownAnimateToActivity.chatListView.draw(c); + c.restore(); + } + } else if (pullingDownDrawable != null) { + pullingDownDrawable.reset(); + } + } - int left = (cell.getLeft() + cell.getBackgroundDrawableLeft()); - int right = (cell.getLeft() + cell.getBackgroundDrawableRight()); - int top = (cell.getTop() + cell.getBackgroundDrawableTop()); - int bottom = (cell.getTop() + cell.getBackgroundDrawableBottom()); + @Override + public void draw(Canvas canvas) { + if ((startMessageAppearTransitionMs == 0 || System.currentTimeMillis() - startMessageAppearTransitionMs <= SKELETON_DISAPPEAR_MS) && !AndroidUtilities.isTablet() && !isComments && currentUser == null) { + boolean noAvatar = currentChat == null || ChatObject.isChannelAndNotMegaGroup(currentChat); + if (pullingDownOffset != 0) { + canvas.save(); + canvas.translate(0, -pullingDownOffset); + } + updateSkeletonColors(); + updateSkeletonGradient(); - if ((cell.getCurrentPosition().flags & MessageObject.POSITION_FLAG_TOP) == 0) { - top -= AndroidUtilities.dp(10); - } + int lastTop = getHeight() - blurredViewBottomOffset; + int j = 0; - if ((cell.getCurrentPosition().flags & MessageObject.POSITION_FLAG_BOTTOM) == 0) { - bottom += AndroidUtilities.dp(10); - } + int childMaxTop = Integer.MAX_VALUE; + for (int i = 0; i < getChildCount(); i++) { + int top = getChildAt(i).getTop(); + if (top < childMaxTop) { + childMaxTop = top; + } + } + if (startMessageAppearTransitionMs == 0 && childMaxTop <= 0) { + checkDispatchHideSkeletons(fragmentBeginToShow); + } - if (cell.willRemovedAfterAnimation()) { - group.transitionParams.cell = cell; - } + Paint servicePaint = getThemedPaint(Theme.key_paint_chatActionBackground); + if (skeletonServicePaint.getColor() != servicePaint.getColor()) { + skeletonServicePaint.setColor(servicePaint.getColor()); + } + if (skeletonServicePaint.getShader() != servicePaint.getShader()) { + skeletonServicePaint.setShader(servicePaint.getShader()); + skeletonColorMatrix.setSaturation(SKELETON_SATURATION); + skeletonServicePaint.setColorFilter(new ColorMatrixColorFilter(skeletonColorMatrix)); + } - if (group.transitionParams.top == 0 || top < group.transitionParams.top) { - group.transitionParams.top = top; - } - if (group.transitionParams.bottom == 0 || bottom > group.transitionParams.bottom) { - group.transitionParams.bottom = bottom; - } - if (group.transitionParams.left == 0 || left < group.transitionParams.left) { - group.transitionParams.left = left; + for (int i = 0; i < getChildCount(); i++) { + View v = getChildAt(i); +// if (v instanceof ChatMessageCell) { +// ChatMessageCell cell = (ChatMessageCell) v; +// if ((cell.getCurrentMessagesGroup() == null || cell.getCurrentMessagesGroup().findPrimaryMessageObject() == cell.getMessageObject())) { +// if (cell.shouldDrawAlphaLayer() || System.currentTimeMillis() - startMessageAppearTransitionMs >= SKELETON_DISAPPEAR_MS) { +// float progress = cell.getAlpha(); +// +// MessageSkeleton skeleton; +// if (j >= messageSkeletons.size()) { +// skeleton = getNewSkeleton(noAvatar); +// messageSkeletons.add(skeleton); +// } else { +// skeleton = messageSkeletons.get(j); +// } +// +// Rect bounds = cell.getCurrentBackgroundDrawable(true).getBounds(); +// MessageObject.GroupedMessages group = cell.getCurrentMessagesGroup(); +// +// int alpha = skeletonPaint.getAlpha(); +// int wasServiceAlpha = servicePaint.getAlpha(); +// servicePaint.setAlpha((int) (wasServiceAlpha * 0.4f * (1f - progress))); +// skeletonPaint.setAlpha((int) (alpha * (1f - progress))); +// int bottom = (int) AndroidUtilities.lerp(Math.min(skeleton.lastBottom, lastTop - AndroidUtilities.dp(3f)), v.getBottom() + (group != null ? group.transitionParams.top + group.transitionParams.offsetTop : 0), progress); +// int left = noAvatar ? AndroidUtilities.dp(3f) : AndroidUtilities.dp(51); +// int top = (int) AndroidUtilities.lerp(bottom - skeleton.height, bounds.top + v.getTop() + (group != null ? group.transitionParams.top + group.transitionParams.offsetTop : 0), progress); +// int right = skeleton.width; +// +// boolean lerp = cell.getMessageObject() == null || !cell.getMessageObject().isOut(); +// skeletonBackgroundDrawable.setBounds(lerp ? AndroidUtilities.lerp(left, cell.getBackgroundDrawableLeft(), progress) : left, top, +// lerp ? AndroidUtilities.lerp(right, cell.getBackgroundDrawableRight(), progress) : right, bottom); +// Theme.applyServiceShaderMatrix(getMeasuredWidth(), AndroidUtilities.displaySize.y, 0, getY() + skeletonBackgroundDrawable.getBounds().top); +// skeletonBackgroundDrawable.drawCached(canvas, skeletonBackgroundCacheParams, servicePaint); +// skeletonBackgroundDrawable.drawCached(canvas, skeletonBackgroundCacheParams, skeletonPaint); +// if (!noAvatar) { +// Theme.applyServiceShaderMatrix(getMeasuredWidth(), AndroidUtilities.displaySize.y, 0, getY() + bottom - AndroidUtilities.dp(42)); +// canvas.drawCircle(AndroidUtilities.dp(48 - 21), bottom - AndroidUtilities.dp(21), AndroidUtilities.dp(21), servicePaint); +// canvas.drawCircle(AndroidUtilities.dp(48 - 21), bottom - AndroidUtilities.dp(21), AndroidUtilities.dp(21), skeletonPaint); +// } +// servicePaint.setAlpha(wasServiceAlpha); +// skeletonPaint.setAlpha(alpha); +// j++; +// +// if (top < lastTop) { +// lastTop = top; +// } +// +// continue; +// } +// j++; +// } +// } + if (v instanceof ChatMessageCell) { + MessageObject.GroupedMessages group = ((ChatMessageCell) v).getCurrentMessagesGroup(); + Rect bounds = ((ChatMessageCell) v).getCurrentBackgroundDrawable(true).getBounds(); + int newTop = (int) (v.getTop() + bounds.top + (group != null ? group.transitionParams.top + group.transitionParams.offsetTop : 0)); + int top = startMessageAppearTransitionMs == 0 && isSkeletonVisible() ? AndroidUtilities.lerp(lastTop, newTop, v.getAlpha()) : v.getAlpha() == 1f ? newTop : lastTop; + if (top < lastTop) { + lastTop = top; } - if (group.transitionParams.right == 0 || right > group.transitionParams.right) { - group.transitionParams.right = right; + } else if (v instanceof ChatActionCell) { + int top = startMessageAppearTransitionMs == 0 && isSkeletonVisible() ? AndroidUtilities.lerp(lastTop, v.getTop(), v.getAlpha()) : v.getAlpha() == 1f ? v.getTop() : lastTop; + if (top < lastTop) { + lastTop = top; } } } - for (int i = 0; i < drawingGroups.size(); i++) { - MessageObject.GroupedMessages group = drawingGroups.get(i); - if (group == scrimGroup) { - continue; + if (isSkeletonVisible()) { + boolean drawService = SharedConfig.getDevicePerformanceClass() != SharedConfig.PERFORMANCE_CLASS_LOW && Theme.hasGradientService(); + boolean darkOverlay = ColorUtils.calculateLuminance(getThemedColor(Theme.key_windowBackgroundWhite)) <= 0.7f && Theme.hasGradientService(); + boolean blackOverlay = ColorUtils.calculateLuminance(getThemedColor(Theme.key_windowBackgroundWhite)) <= 0.01f && Theme.hasGradientService(); + if (drawService) { + Theme.applyServiceShaderMatrix(getMeasuredWidth(), AndroidUtilities.displaySize.y, 0, getY() - contentPanTranslation); } - float x = group.transitionParams.cell.getNonAnimationTranslationX(true); - float l = (group.transitionParams.left + x + group.transitionParams.offsetLeft); - float t = (group.transitionParams.top + group.transitionParams.offsetTop); - float r = (group.transitionParams.right + x + group.transitionParams.offsetRight); - float b = (group.transitionParams.bottom + group.transitionParams.offsetBottom); - - if (!group.transitionParams.backgroundChangeBounds) { - t += group.transitionParams.cell.getTranslationY(); - b += group.transitionParams.cell.getTranslationY(); + int wasDarkenAlpha = Theme.chat_actionBackgroundGradientDarkenPaint.getAlpha(); + if (blackOverlay) { + Theme.chat_actionBackgroundGradientDarkenPaint.setAlpha((int) (wasDarkenAlpha * 4f)); } - if (t < chatListViewPaddingTop - chatListViewPaddingVisibleOffset - AndroidUtilities.dp(20)) { - t = chatListViewPaddingTop - chatListViewPaddingVisibleOffset - AndroidUtilities.dp(20); - } + float topSkeletonAlpha = startMessageAppearTransitionMs != 0 ? 1f - (System.currentTimeMillis() - startMessageAppearTransitionMs) / (float) SKELETON_DISAPPEAR_MS : 1f; + int alpha = skeletonPaint.getAlpha(); + int wasServiceAlpha = skeletonServicePaint.getAlpha(); + int wasOutlineAlpha = skeletonOutlinePaint.getAlpha(); + skeletonServicePaint.setAlpha((int) (0xFF * topSkeletonAlpha)); + skeletonPaint.setAlpha((int) (topSkeletonAlpha * alpha)); + skeletonOutlinePaint.setAlpha((int) (wasOutlineAlpha * alpha)); + while (lastTop > blurredViewTopOffset) { + lastTop -= AndroidUtilities.dp(3f); - if (b > chatListView.getMeasuredHeight() + AndroidUtilities.dp(20)) { - b = chatListView.getMeasuredHeight() + AndroidUtilities.dp(20); + MessageSkeleton skeleton; + if (j >= messageSkeletons.size()) { + skeleton = getNewSkeleton(noAvatar); + messageSkeletons.add(skeleton); + } else { + skeleton = messageSkeletons.get(j); + } + skeleton.lastBottom = startMessageAppearTransitionMs != 0 ? messages.size() <= 2 ? Math.min(skeleton.lastBottom, lastTop) : skeleton.lastBottom : lastTop; + + lastTop -= skeleton.height; + + j++; } - boolean useScale = group.transitionParams.cell.getScaleX() != 1f || group.transitionParams.cell.getScaleY() != 1f; - if (useScale) { - canvas.save(); - canvas.scale(group.transitionParams.cell.getScaleX(), group.transitionParams.cell.getScaleY(), l + (r - l) / 2, t + (b - t) / 2); - } - boolean selected = true; - for (int a = 0, N = group.messages.size(); a < N; a++) { - MessageObject object = group.messages.get(a); - int index = object.getDialogId() == dialog_id ? 0 : 1; - if (selectedMessagesIds[index].indexOfKey(object.getId()) < 0) { - selected = false; - break; + lastTop = messageSkeletons.isEmpty() ? getHeight() - blurredViewBottomOffset : messageSkeletons.get(0).lastBottom + AndroidUtilities.dp(3f); + for (int i = 0; i < messageSkeletons.size() && lastTop > blurredViewTopOffset; i++) { + lastTop -= AndroidUtilities.dp(3f); + + MessageSkeleton skeleton = messageSkeletons.get(i); + + int bottom = skeleton.lastBottom; + skeletonBackgroundDrawable.setBounds(noAvatar ? AndroidUtilities.dp(3f) : AndroidUtilities.dp(51), bottom - skeleton.height, skeleton.width, bottom); + if (drawService) { + skeletonBackgroundDrawable.drawCached(canvas, skeletonBackgroundCacheParams, skeletonServicePaint); } - } - group.transitionParams.cell.drawBackground(canvas, (int) l, (int) t, (int) r, (int) b, group.transitionParams.pinnedTop, group.transitionParams.pinnedBotton, selected, contentView.getKeyboardHeight()); - group.transitionParams.cell = null; - group.transitionParams.drawCaptionLayout = group.hasCaption; - if (useScale) { - canvas.restore(); - for (int ii = 0; ii < count; ii++) { - View child = chatListView.getChildAt(ii); - if (child instanceof ChatMessageCell && ((ChatMessageCell) child).getCurrentMessagesGroup() == group) { - ChatMessageCell cell = ((ChatMessageCell) child); - int left = cell.getLeft(); - int top = cell.getTop(); - child.setPivotX(l - left + (r - l) / 2); - child.setPivotY(t - top + (b - t) / 2); + skeletonBackgroundDrawable.drawCached(canvas, skeletonBackgroundCacheParams, skeletonPaint); + if (darkOverlay) { + skeletonBackgroundDrawable.drawCached(canvas, skeletonBackgroundCacheParams, Theme.chat_actionBackgroundGradientDarkenPaint); + } + skeletonBackgroundDrawable.drawCached(canvas, skeletonBackgroundCacheParams, skeletonOutlinePaint); + + if (!noAvatar) { + if (drawService) { + canvas.drawCircle(AndroidUtilities.dp(48 - 21), bottom - AndroidUtilities.dp(21), AndroidUtilities.dp(21), skeletonServicePaint); + } + canvas.drawCircle(AndroidUtilities.dp(48 - 21), bottom - AndroidUtilities.dp(21), AndroidUtilities.dp(21), skeletonPaint); + if (darkOverlay) { + canvas.drawCircle(AndroidUtilities.dp(48 - 21), bottom - AndroidUtilities.dp(21), AndroidUtilities.dp(21), Theme.chat_actionBackgroundGradientDarkenPaint); } + canvas.drawCircle(AndroidUtilities.dp(48 - 21), bottom - AndroidUtilities.dp(21), AndroidUtilities.dp(21), skeletonOutlinePaint); } + + lastTop -= skeleton.height; } + + skeletonServicePaint.setAlpha(wasServiceAlpha); + skeletonPaint.setAlpha(alpha); + skeletonOutlinePaint.setAlpha(wasOutlineAlpha); + Theme.chat_actionBackgroundGradientDarkenPaint.setAlpha(wasDarkenAlpha); + invalidated = false; + invalidate(); + } else if (System.currentTimeMillis() - startMessageAppearTransitionMs > SKELETON_DISAPPEAR_MS) { + messageSkeletons.clear(); + } + lastSkeletonCount = messageSkeletons.size(); + lastSkeletonMessageCount = messages.size(); + if (pullingDownOffset != 0) { + canvas.restore(); } } + super.draw(canvas); } - @Override - public boolean drawChild(Canvas canvas, View child, long drawingTime) { - if (isSkeletonVisible()) { - invalidated = false; - invalidate(); - } + private void updateSkeletonColors() { + boolean dark = ColorUtils.calculateLuminance(getThemedColor(Theme.key_windowBackgroundWhite)) <= 0.7f; + int color0 = ColorUtils.blendARGB(getThemedColor(Theme.key_listSelector), Color.argb(dark ? 0x21 : 0x03, 0xFF, 0xFF, 0xFF), dark ? 0.9f : 0.5f); + int color1 = ColorUtils.setAlphaComponent(getThemedColor(Theme.key_listSelector), dark ? 24 : SKELETON_LIGHT_OVERLAY_ALPHA); + if (skeletonColor1 != color1 || skeletonColor0 != color0) { + skeletonColor0 = color0; + skeletonColor1 = color1; + skeletonGradient = new LinearGradient(0, 0, skeletonGradientWidth = AndroidUtilities.dp(200), 0, new int[]{color1, color0, color0, color1}, new float[]{0.0f, 0.4f, 0.6f, 1f}, Shader.TileMode.CLAMP); + skeletonTotalTranslation = -skeletonGradientWidth * 2; + skeletonPaint.setShader(skeletonGradient); - int clipLeft = 0; - int clipBottom = 0; - boolean skipDraw = child == scrimView; - ChatMessageCell cell; - ChatActionCell actionCell = null; - float cilpTop = chatListViewPaddingTop - chatListViewPaddingVisibleOffset - AndroidUtilities.dp(4); + int outlineColor = Color.argb(dark ? 0x2B : 0x60, 0xFF, 0xFF, 0xFF); + skeletonOutlineGradient = new LinearGradient(0, 0, skeletonGradientWidth, 0, new int[]{Color.TRANSPARENT, outlineColor, outlineColor, Color.TRANSPARENT}, new float[]{0.0f, 0.4f, 0.6f, 1f}, Shader.TileMode.CLAMP); + skeletonOutlinePaint.setShader(skeletonOutlineGradient); + } + } - if (child.getY() > getMeasuredHeight() || child.getY() + child.getMeasuredHeight() < cilpTop) { - skipDraw = true; + private void updateSkeletonGradient() { + long newUpdateTime = SystemClock.elapsedRealtime(); + long dt = Math.abs(skeletonLastUpdateTime - newUpdateTime); + if (dt > 17) { + dt = 16; + } + if (dt < 4) { + dt = 0; + } + int width = getWidth(); + skeletonLastUpdateTime = newUpdateTime; + skeletonTotalTranslation += dt * width / 400.0f; + if (skeletonTotalTranslation >= width * 2) { + skeletonTotalTranslation = -skeletonGradientWidth * 2; + } + skeletonMatrix.setTranslate(skeletonTotalTranslation, 0); + if (skeletonGradient != null) { + skeletonGradient.setLocalMatrix(skeletonMatrix); + } + skeletonOutlineMatrix.setTranslate(skeletonTotalTranslation, 0); + if (skeletonOutlineGradient != null) { + skeletonOutlineGradient.setLocalMatrix(skeletonOutlineMatrix); } + } - MessageObject.GroupedMessages group = null; + @Override + protected void dispatchDraw(Canvas canvas) { + drawLaterRoundProgressCell = null; + invalidated = false; - if (child instanceof ChatMessageCell) { - cell = (ChatMessageCell) child; - if (animateSendingViews.contains(cell)) { - skipDraw = true; - } - MessageObject.GroupedMessagePosition position = cell.getCurrentPosition(); - group = cell.getCurrentMessagesGroup(); - if (position != null) { - if (position.pw != position.spanSize && position.spanSize == 1000 && position.siblingHeights == null && group.hasSibling) { - clipLeft = cell.getBackgroundDrawableLeft(); - } else if (position.siblingHeights != null) { - clipBottom = child.getBottom() - AndroidUtilities.dp(1 + (cell.isPinnedBottom() ? 1 : 0)); - } - } - if (cell.needDelayRoundProgressDraw()) { - drawLaterRoundProgressCell = cell; - } - if (!skipDraw && scrimView instanceof ChatMessageCell) { - ChatMessageCell cell2 = (ChatMessageCell) scrimView; - if (cell2.getCurrentMessagesGroup() != null && cell2.getCurrentMessagesGroup() == group) { - skipDraw = true; - } - } - if (skipDraw) { - cell.getPhotoImage().skipDraw(); + canvas.save(); + if (fragmentTransition == null || (fromPullingDownTransition && !toPullingDownTransition)) { + canvas.clipRect(0, chatListViewPaddingTop - chatListViewPaddingVisibleOffset - AndroidUtilities.dp(4), getMeasuredWidth(), getMeasuredHeight() - blurredViewBottomOffset); + } + selectorRect.setEmpty(); + if (pullingDownOffset != 0) { + int restoreToCount = canvas.save(); + float transitionOffset = 0; + if (pullingDownAnimateProgress != 0) { + transitionOffset = (chatListView.getMeasuredHeight() - pullingDownOffset) * pullingDownAnimateProgress; } - } else if (child instanceof ChatActionCell) { - actionCell = (ChatActionCell) child; - cell = null; + canvas.translate(0, drawingChatLisViewYoffset = -pullingDownOffset - transitionOffset); + drawChatBackgroundElements(canvas); + super.dispatchDraw(canvas); + drawChatForegroundElements(canvas); + canvas.restoreToCount(restoreToCount); } else { - cell = null; - } - if (clipLeft != 0) { - canvas.save(); - } else if (clipBottom != 0) { - canvas.save(); + drawChatBackgroundElements(canvas); + super.dispatchDraw(canvas); + drawChatForegroundElements(canvas); } - boolean result; - if (!skipDraw) { - boolean clipToGroupBounds = group != null && group.transitionParams.backgroundChangeBounds; - if (clipToGroupBounds) { - canvas.save(); - float x = cell.getNonAnimationTranslationX(true); - float l = (group.transitionParams.left + x + group.transitionParams.offsetLeft); - float t = (group.transitionParams.top + group.transitionParams.offsetTop); - float r = (group.transitionParams.right + x + group.transitionParams.offsetRight); - float b = (group.transitionParams.bottom + group.transitionParams.offsetBottom); + canvas.restore(); + } - canvas.clipRect( - l + AndroidUtilities.dp(4), - t + AndroidUtilities.dp(4), - r - AndroidUtilities.dp(4), - b - AndroidUtilities.dp(4) - ); - } - if (cell != null && clipToGroupBounds) { - cell.clipToGroupBounds = true; - result = super.drawChild(canvas, child, drawingTime); - cell.clipToGroupBounds = false; - } else { - result = super.drawChild(canvas, child, drawingTime); - } - if (clipToGroupBounds) { - canvas.restore(); - } - if (cell != null && cell.hasOutboundsContent()) { - canvas.save(); - canvas.translate(cell.getX(), cell.getY()); - cell.drawOutboundsContent(canvas); - canvas.restore(); - } else if (actionCell != null) { + private void drawChatForegroundElements(Canvas canvas) { + int size = drawTimeAfter.size(); + if (size > 0) { + for (int a = 0; a < size; a++) { + ChatMessageCell cell = drawTimeAfter.get(a); canvas.save(); - canvas.translate(actionCell.getX(), actionCell.getY()); - actionCell.drawOutboundsContent(canvas); + canvas.translate(cell.getLeft() + cell.getNonAnimationTranslationX(false), cell.getY()); + cell.drawTime(canvas, cell.shouldDrawAlphaLayer() ? cell.getAlpha() : 1f, true); canvas.restore(); } - } else { - result = false; - } - if (clipLeft != 0 || clipBottom != 0) { - canvas.restore(); - } - - if (child.getTranslationY() != 0) { - canvas.save(); - canvas.translate(0, child.getTranslationY()); - } - - if (cell != null) { - cell.drawCheckBox(canvas); - } - - if (child.getTranslationY() != 0) { - canvas.restore(); + drawTimeAfter.clear(); } + size = drawNamesAfter.size(); + if (size > 0) { + for (int a = 0; a < size; a++) { + ChatMessageCell cell = drawNamesAfter.get(a); + float canvasOffsetX = cell.getLeft() + cell.getNonAnimationTranslationX(false); + float canvasOffsetY = cell.getY(); + float alpha = cell.shouldDrawAlphaLayer() ? cell.getAlpha() : 1f; - if (child.getTranslationY() != 0) { - canvas.save(); - canvas.translate(0, child.getTranslationY()); + canvas.save(); + canvas.translate(canvasOffsetX, canvasOffsetY); + cell.setInvalidatesParent(true); + cell.drawNamesLayout(canvas, alpha); + cell.setInvalidatesParent(false); + canvas.restore(); + } + drawNamesAfter.clear(); } - - if (cell != null) { - MessageObject message = cell.getMessageObject(); - MessageObject.GroupedMessagePosition position = cell.getCurrentPosition(); - if (!skipDraw) { - if (position != null || cell.getTransitionParams().animateBackgroundBoundsInner) { - if (position == null || (position.last || position.minX == 0 && position.minY == 0)) { - if (position == null || position.last) { - drawTimeAfter.add(cell); - } - if ((position == null || (position.minX == 0 && position.minY == 0)) && cell.hasNameLayout()) { - drawNamesAfter.add(cell); - } - } - if (position != null || cell.getTransitionParams().transformGroupToSingleMessage || cell.getTransitionParams().animateBackgroundBoundsInner) { - if (position == null || (position.flags & MessageObject.POSITION_FLAG_BOTTOM) != 0) { - drawCaptionAfter.add(cell); - } - } + size = drawCaptionAfter.size(); + if (size > 0) { + for (int a = 0; a < size; a++) { + ChatMessageCell cell = drawCaptionAfter.get(a); + boolean selectionOnly = false; + if (cell.getCurrentPosition() != null) { + selectionOnly = (cell.getCurrentPosition().flags & MessageObject.POSITION_FLAG_LEFT) == 0; } + float alpha = cell.shouldDrawAlphaLayer() ? cell.getAlpha() : 1f; + float canvasOffsetX = cell.getLeft() + cell.getNonAnimationTranslationX(false); + float canvasOffsetY = cell.getY(); + canvas.save(); + MessageObject.GroupedMessages groupedMessages = cell.getCurrentMessagesGroup(); + if (groupedMessages != null && groupedMessages.transitionParams.backgroundChangeBounds) { + float x = cell.getNonAnimationTranslationX(true); + float l = (groupedMessages.transitionParams.left + x + groupedMessages.transitionParams.offsetLeft); + float t = (groupedMessages.transitionParams.top + groupedMessages.transitionParams.offsetTop); + float r = (groupedMessages.transitionParams.right + x + groupedMessages.transitionParams.offsetRight); + float b = (groupedMessages.transitionParams.bottom + groupedMessages.transitionParams.offsetBottom); - if (videoPlayerContainer != null && (message.isRoundVideo() || message.isVideo()) && !message.isVoiceTranscriptionOpen() && MediaController.getInstance().isPlayingMessage(message)) { - ImageReceiver imageReceiver = cell.getPhotoImage(); - float newX = imageReceiver.getImageX() + cell.getX(); - float newY = cell.getY() + imageReceiver.getImageY() + chatListView.getY() - videoPlayerContainer.getTop(); - if (videoPlayerContainer.getTranslationX() != newX || videoPlayerContainer.getTranslationY() != newY) { - videoPlayerContainer.setTranslationX(newX); - videoPlayerContainer.setTranslationY(newY); - fragmentView.invalidate(); - videoPlayerContainer.invalidate(); + if (!groupedMessages.transitionParams.backgroundChangeBounds) { + t += cell.getTranslationY(); + b += cell.getTranslationY(); } + canvas.clipRect( + l + AndroidUtilities.dp(8), t + AndroidUtilities.dp(8), + r - AndroidUtilities.dp(8), b - AndroidUtilities.dp(8) + ); } - } - ImageReceiver imageReceiver = cell.getAvatarImage(); - if (imageReceiver != null) { - MessageObject.GroupedMessages groupedMessages = getValidGroupedMessage(message); - if (cell.getMessageObject().deleted) { - if (child.getTranslationY() != 0) { - canvas.restore(); - } - imageReceiver.setVisible(false, false); - return result; + if (cell.getTransitionParams().wasDraw) { + canvas.translate(canvasOffsetX, canvasOffsetY); + cell.setInvalidatesParent(true); + cell.drawCaptionLayout(canvas, selectionOnly, alpha); + cell.setInvalidatesParent(false); + canvas.restore(); } + } + drawCaptionAfter.clear(); + } + } - boolean replaceAnimation = chatListView.isFastScrollAnimationRunning() || (groupedMessages != null && groupedMessages.transitionParams.backgroundChangeBounds); - int top = replaceAnimation ? child.getTop() : (int) child.getY(); - if (cell.drawPinnedBottom()) { - int p; - if (cell.willRemovedAfterAnimation()) { - p = chatScrollHelper.positionToOldView.indexOfValue(child); - if (p >= 0) { - p = chatScrollHelper.positionToOldView.keyAt(p); - } + private void drawChatBackgroundElements(Canvas canvas) { + int count = getChildCount(); + MessageObject.GroupedMessages lastDrawnGroup = null; + + for (int a = 0; a < count; a++) { + View child = getChildAt(a); + if (chatAdapter.isBot && child instanceof BotHelpCell) { + BotHelpCell botCell = (BotHelpCell) child; + float top = (getMeasuredHeight() - chatListViewPaddingTop - blurredViewBottomOffset) / 2 - child.getMeasuredHeight() / 2 + chatListViewPaddingTop; + if (!botCell.animating() && !chatListView.fastScrollAnimationRunning) { + if (child.getTop() > top) { + child.setTranslationY(top - child.getTop()); } else { - ViewHolder holder = chatListView.getChildViewHolder(child); - p = holder.getAdapterPosition(); + child.setTranslationY(0); } - - if (p >= 0) { - int nextPosition; - if (groupedMessages != null && position != null) { - int idx = groupedMessages.posArray.indexOf(position); - int size = groupedMessages.posArray.size(); - if ((position.flags & MessageObject.POSITION_FLAG_BOTTOM) != 0) { - nextPosition = p - size + idx; - } else { - nextPosition = p - 1; - for (int a = idx + 1; a < size; a++) { - if (groupedMessages.posArray.get(a).minY > position.maxY) { - break; + } + break; + } else if (child instanceof ChatMessageCell) { + ChatMessageCell cell = (ChatMessageCell) child; + MessageObject.GroupedMessages group = cell.getCurrentMessagesGroup(); + if (group == null || group != lastDrawnGroup) { + lastDrawnGroup = group; + MessageObject.GroupedMessagePosition position = cell.getCurrentPosition(); + MessageBackgroundDrawable backgroundDrawable = cell.getBackgroundDrawable(); + if ((backgroundDrawable.isAnimationInProgress() || cell.isDrawingSelectionBackground()) && (position == null || (position.flags & MessageObject.POSITION_FLAG_RIGHT) != 0)) { + if (cell.isHighlighted() || cell.isHighlightedAnimated()) { + if (position == null) { + Paint backgroundPaint = getThemedPaint(Theme.key_paint_chatMessageBackgroundSelected); + if (themeDelegate != null && themeDelegate.isDark || backgroundPaint == null) { + backgroundPaint = Theme.chat_replyLinePaint; + backgroundPaint.setColor(getThemedColor(Theme.key_chat_selectedBackground)); + } else { + float viewTop = (isKeyboardVisible() ? chatListView.getTop() : actionBar.getMeasuredHeight()) - contentView.getBackgroundTranslationY(); + int backgroundHeight = contentView.getBackgroundSizeY(); + if (themeDelegate != null) { + themeDelegate.applyServiceShaderMatrix(getMeasuredWidth(), backgroundHeight, cell.getX(), viewTop); } else { - nextPosition--; + Theme.applyServiceShaderMatrix(getMeasuredWidth(), backgroundHeight, cell.getX(), viewTop); } } + canvas.save(); + canvas.translate(0, cell.getTranslationY()); + int wasAlpha = backgroundPaint.getAlpha(); + backgroundPaint.setAlpha((int) (wasAlpha * cell.getHighlightAlpha() * cell.getAlpha())); + canvas.drawRect(0, cell.getTop(), getMeasuredWidth(), cell.getBottom(), backgroundPaint); + backgroundPaint.setAlpha(wasAlpha); + canvas.restore(); } } else { - nextPosition = p - 1; - } - if (cell.willRemovedAfterAnimation()) { - View view = chatScrollHelper.positionToOldView.get(nextPosition); - if (view != null) { - if (child.getTranslationY() != 0) { - canvas.restore(); - } - imageReceiver.setVisible(false, false); - return result; - } - } else { - ViewHolder holder = chatListView.findViewHolderForAdapterPosition(nextPosition); - if (holder != null) { - if (child.getTranslationY() != 0) { - canvas.restore(); - } - imageReceiver.setVisible(false, false); - return result; - } - } - } - } - float tx = cell.getSlidingOffsetX() + cell.getCheckBoxTranslation(); - - int y = (int) ((replaceAnimation ? child.getTop() : child.getY()) + cell.getLayoutHeight() + cell.getTransitionParams().deltaBottom); - int maxY = chatListView.getMeasuredHeight() - chatListView.getPaddingBottom(); - if (cell.isPlayingRound() || cell.getTransitionParams().animatePlayingRound) { - if (cell.getTransitionParams().animatePlayingRound) { - float progressLocal = cell.getTransitionParams().animateChangeProgress; - if (!cell.isPlayingRound()) { - progressLocal = 1f - progressLocal; - } - int fromY = y; - int toY = Math.min(y, maxY); - y = (int) (fromY * progressLocal + toY * (1f - progressLocal)); - } - } else { - if (y > maxY) { - y = maxY; - } - } - - if (!replaceAnimation && child.getTranslationY() != 0) { - canvas.restore(); - } - if (cell.drawPinnedTop()) { - int p; - if (cell.willRemovedAfterAnimation()) { - p = chatScrollHelper.positionToOldView.indexOfValue(child); - if (p >= 0) { - p = chatScrollHelper.positionToOldView.keyAt(p); - } - } else { - ViewHolder holder = chatListView.getChildViewHolder(child); - p = holder.getAdapterPosition(); - } - if (p >= 0) { - int tries = 0; - while (true) { - if (tries >= 20) { - break; - } - tries++; - - int prevPosition; - if (groupedMessages != null && position != null) { - int idx = groupedMessages.posArray.indexOf(position); - if (idx < 0) { - break; - } - int size = groupedMessages.posArray.size(); - if ((position.flags & MessageObject.POSITION_FLAG_TOP) != 0) { - prevPosition = p + idx + 1; - } else { - prevPosition = p + 1; - for (int a = idx - 1; a >= 0; a--) { - if (groupedMessages.posArray.get(a).maxY < position.minY) { - break; - } else { - prevPosition++; - } - } - } + int y = (int) cell.getY(); + int height; + canvas.save(); + if (position == null) { + height = cell.getMeasuredHeight(); } else { - prevPosition = p + 1; - } - if (cell.willRemovedAfterAnimation()) { - View view = chatScrollHelper.positionToOldView.get(prevPosition); - if (view != null) { - top = view.getTop(); - if (view instanceof ChatMessageCell) { - cell = (ChatMessageCell) view; - if (!cell.drawPinnedTop()) { - break; - } else { - p = prevPosition; + height = y + cell.getMeasuredHeight(); + long time = 0; + float touchX = 0; + float touchY = 0; + for (int i = 0; i < count; i++) { + View inner = getChildAt(i); + if (inner instanceof ChatMessageCell) { + ChatMessageCell innerCell = (ChatMessageCell) inner; + MessageObject.GroupedMessages innerGroup = innerCell.getCurrentMessagesGroup(); + if (innerGroup == group) { + MessageBackgroundDrawable drawable = innerCell.getBackgroundDrawable(); + y = Math.min(y, (int) innerCell.getY()); + height = Math.max(height, (int) innerCell.getY() + innerCell.getMeasuredHeight()); + long touchTime = drawable.getLastTouchTime(); + if (touchTime > time) { + touchX = drawable.getTouchX() + innerCell.getX(); + touchY = drawable.getTouchY() + innerCell.getY(); + time = touchTime; + } } - } else { - break; } - } else { - break; } - } else { - ViewHolder holder = chatListView.findViewHolderForAdapterPosition(prevPosition); - if (holder != null) { - top = holder.itemView.getTop(); - if (holder.itemView instanceof ChatMessageCell) { - cell = (ChatMessageCell) holder.itemView; - if (!cell.drawPinnedTop()) { - break; - } else { - p = prevPosition; - } - } else { - break; - } + backgroundDrawable.setTouchCoordsOverride(touchX, touchY - y); + height -= y; + } + canvas.clipRect(0, y, getMeasuredWidth(), y + height); + Paint selectedBackgroundPaint = getThemedPaint(Theme.key_paint_chatMessageBackgroundSelected); + if (themeDelegate != null && !themeDelegate.isDark && selectedBackgroundPaint != null) { + backgroundDrawable.setCustomPaint(selectedBackgroundPaint); + float viewTop = (isKeyboardVisible() ? chatListView.getTop() : actionBar.getMeasuredHeight()) - contentView.getBackgroundTranslationY(); + int backgroundHeight = contentView.getBackgroundSizeY(); + if (themeDelegate != null) { + themeDelegate.applyServiceShaderMatrix(getMeasuredWidth(), backgroundHeight, cell.getX(), viewTop); } else { - break; + Theme.applyServiceShaderMatrix(getMeasuredWidth(), backgroundHeight, cell.getX(), viewTop); } + } else { + backgroundDrawable.setCustomPaint(null); + backgroundDrawable.setColor(getThemedColor(Theme.key_chat_selectedBackground)); } + backgroundDrawable.setBounds(0, y, getMeasuredWidth(), y + height); + backgroundDrawable.draw(canvas); + canvas.restore(); } } } - if (y - AndroidUtilities.dp(48) < top) { - y = top + AndroidUtilities.dp(48); - } - if (!cell.drawPinnedBottom()) { - int cellBottom = replaceAnimation ? cell.getBottom() : (int) (cell.getY() + cell.getMeasuredHeight() + cell.getTransitionParams().deltaBottom); - if (y > cellBottom) { - y = cellBottom; - } - } - canvas.save(); - if (tx != 0) { - canvas.translate(tx, 0); - } - if (cell.getCurrentMessagesGroup() != null) { - if (cell.getCurrentMessagesGroup().transitionParams.backgroundChangeBounds) { - y -= cell.getTranslationY(); - } - } - imageReceiver.setImageY(y - AndroidUtilities.dp(44)); - if (cell.shouldDrawAlphaLayer()) { - imageReceiver.setAlpha(cell.getAlpha()); - canvas.scale( + if (scrimView != cell && group == null && cell.drawBackgroundInParent()) { + canvas.save(); + canvas.translate(cell.getX(), cell.getY()); + if (cell.getScaleX() != 1f) { + canvas.scale( cell.getScaleX(), cell.getScaleY(), - cell.getX() + cell.getPivotX(), cell.getY() + (cell.getHeight() >> 1) - ); - } else { - imageReceiver.setAlpha(1f); + cell.getPivotX(), (cell.getHeight() >> 1) + ); + } + cell.drawBackgroundInternal(canvas, true); + canvas.restore(); } - imageReceiver.setVisible(true, false); - cell.drawStatusWithImage(canvas, imageReceiver, AndroidUtilities.dp(7)); - canvas.restore(); - - if (!replaceAnimation && child.getTranslationY() != 0) { + } else if (child instanceof ChatActionCell) { + ChatActionCell cell = (ChatActionCell) child; + if (cell.hasGradientService()) { canvas.save(); + canvas.translate(cell.getX(), cell.getY()); + canvas.scale(cell.getScaleX(), cell.getScaleY(), cell.getMeasuredWidth() / 2f, cell.getMeasuredHeight() / 2f); + cell.drawBackground(canvas, true); + canvas.restore(); } } } - - if (child.getTranslationY() != 0) { - canvas.restore(); - } - return result; - } - - @Override - public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) { - if (currentEncryptedChat != null) { - return; + MessageObject.GroupedMessages scrimGroup = null; + if (scrimView instanceof ChatMessageCell) { + scrimGroup = ((ChatMessageCell) scrimView).getCurrentMessagesGroup(); } - super.onInitializeAccessibilityNodeInfo(info); - if (Build.VERSION.SDK_INT >= 19) { - AccessibilityNodeInfo.CollectionInfo collection = info.getCollectionInfo(); - if (collection != null) { - info.setCollectionInfo(AccessibilityNodeInfo.CollectionInfo.obtain(collection.getRowCount(), 1, false)); + for (int k = 0; k < 3; k++) { + drawingGroups.clear(); + if (k == 2 && !chatListView.isFastScrollAnimationRunning()) { + continue; } - } - } - - @Override - public AccessibilityNodeInfo createAccessibilityNodeInfo() { - if (currentEncryptedChat != null) { - return null; - } - return super.createAccessibilityNodeInfo(); - } - - @Override - public void invalidate() { - if (invalidated && slidingView == null) { - return; - } - invalidated = true; - super.invalidate(); - contentView.invalidateBlur(); - } - - @Override - public void onScrolled(int dx, int dy) { - super.onScrolled(dx, dy); - } - - @Override - public void onScrollStateChanged(int state) { - super.onScrollStateChanged(state); - } + for (int i = 0; i < count; i++) { + View child = chatListView.getChildAt(i); + if (child instanceof ChatMessageCell) { + ChatMessageCell cell = (ChatMessageCell) child; + if (child.getY() > chatListView.getHeight() || child.getY() + child.getHeight() < 0) { + continue; + } + MessageObject.GroupedMessages group = cell.getCurrentMessagesGroup(); + if (group == null || (k == 0 && group.messages.size() == 1) || (k == 1 && !group.transitionParams.drawBackgroundForDeletedItems)) { + continue; + } + if ((k == 0 && cell.getMessageObject().deleted) || (k == 1 && !cell.getMessageObject().deleted)) { + continue; + } + if ((k == 2 && !cell.willRemovedAfterAnimation()) || (k != 2 && cell.willRemovedAfterAnimation())) { + continue; + } - @Override - protected void onScrollChanged(int l, int t, int oldl, int oldt) { - super.onScrollChanged(l, t, oldl, oldt); - } + if (!drawingGroups.contains(group)) { + group.transitionParams.left = 0; + group.transitionParams.top = 0; + group.transitionParams.right = 0; + group.transitionParams.bottom = 0; - @Override - public void onNestedScroll(View target, int dxConsumed, int dyConsumed, int dxUnconsumed, int dyUnconsumed) { - super.onNestedScroll(target, dxConsumed, dyConsumed, dxUnconsumed, dyUnconsumed); - } - }; - if (currentEncryptedChat != null && Build.VERSION.SDK_INT >= 19) { - chatListView.setImportantForAccessibility(View.IMPORTANT_FOR_ACCESSIBILITY_NO_HIDE_DESCENDANTS); - } - chatListView.setAccessibilityEnabled(false); - chatListView.setNestedScrollingEnabled(false); - chatListView.setInstantClick(true); - chatListView.setDisableHighlightState(true); - chatListView.setTag(1); - chatListView.setVerticalScrollBarEnabled(!SharedConfig.chatBlurEnabled()); - chatListView.setAdapter(chatAdapter = new ChatActivityAdapter(context)); - chatListView.setClipToPadding(false); - chatListView.setAnimateEmptyView(true, RecyclerListView.EMPTY_VIEW_ANIMATION_TYPE_ALPHA_SCALE); - chatListView.setScrollBarStyle(View.SCROLLBARS_OUTSIDE_OVERLAY); - chatListViewPaddingTop = 0; - invalidateChatListViewTopPadding(); - if (MessagesController.getGlobalMainSettings().getBoolean("view_animations", true)) { - chatListItemAnimator = new ChatListItemAnimator(this, chatListView, themeDelegate) { + group.transitionParams.pinnedBotton = false; + group.transitionParams.pinnedTop = false; + group.transitionParams.cell = cell; + drawingGroups.add(group); + } - Runnable finishRunnable; + group.transitionParams.pinnedTop = cell.isPinnedTop(); + group.transitionParams.pinnedBotton = cell.isPinnedBottom(); - @Override - public void checkIsRunning() { - if (scrollAnimationIndex == -1) { - scrollAnimationIndex = getNotificationCenter().setAnimationInProgress(scrollAnimationIndex, allowedNotificationsDuringChatListAnimations, false); - } - } + int left = (cell.getLeft() + cell.getBackgroundDrawableLeft()); + int right = (cell.getLeft() + cell.getBackgroundDrawableRight()); + int top = (cell.getTop() + cell.getBackgroundDrawableTop()); + int bottom = (cell.getTop() + cell.getBackgroundDrawableBottom()); - @Override - public void onAnimationStart() { - scrollAnimationIndex = getNotificationCenter().setAnimationInProgress(scrollAnimationIndex, allowedNotificationsDuringChatListAnimations, false); - if (finishRunnable != null) { - AndroidUtilities.cancelRunOnUIThread(finishRunnable); - finishRunnable = null; - } - if (BuildVars.LOGS_ENABLED) { - FileLog.d("chatItemAnimator disable notifications"); - } - chatActivityEnterView.getAdjustPanLayoutHelper().runDelayedAnimation(); - chatActivityEnterView.runEmojiPanelAnimation(); - } + if ((cell.getCurrentPosition().flags & MessageObject.POSITION_FLAG_TOP) == 0) { + top -= AndroidUtilities.dp(10); + } - @Override - protected void onAllAnimationsDone() { - super.onAllAnimationsDone(); - if (finishRunnable != null) { - AndroidUtilities.cancelRunOnUIThread(finishRunnable); - finishRunnable = null; - } - AndroidUtilities.runOnUIThread(finishRunnable = () -> { - finishRunnable = null; - if (scrollAnimationIndex != -1) { - getNotificationCenter().onAnimationFinish(scrollAnimationIndex); - scrollAnimationIndex = -1; - } - if (BuildVars.LOGS_ENABLED) { - FileLog.d("chatItemAnimator enable notifications"); - } - }); - } + if ((cell.getCurrentPosition().flags & MessageObject.POSITION_FLAG_BOTTOM) == 0) { + bottom += AndroidUtilities.dp(10); + } + if (cell.willRemovedAfterAnimation()) { + group.transitionParams.cell = cell; + } - @Override - public void endAnimations() { - super.endAnimations(); - if (finishRunnable != null) { - AndroidUtilities.cancelRunOnUIThread(finishRunnable); + if (group.transitionParams.top == 0 || top < group.transitionParams.top) { + group.transitionParams.top = top; + } + if (group.transitionParams.bottom == 0 || bottom > group.transitionParams.bottom) { + group.transitionParams.bottom = bottom; + } + if (group.transitionParams.left == 0 || left < group.transitionParams.left) { + group.transitionParams.left = left; + } + if (group.transitionParams.right == 0 || right > group.transitionParams.right) { + group.transitionParams.right = right; + } + } } - AndroidUtilities.runOnUIThread(finishRunnable = () -> { - finishRunnable = null; - if (scrollAnimationIndex != -1) { - getNotificationCenter().onAnimationFinish(scrollAnimationIndex); - scrollAnimationIndex = -1; + + for (int i = 0; i < drawingGroups.size(); i++) { + MessageObject.GroupedMessages group = drawingGroups.get(i); + if (group == scrimGroup) { + continue; } - if (BuildVars.LOGS_ENABLED) { - FileLog.d("chatItemAnimator enable notifications"); + float x = group.transitionParams.cell.getNonAnimationTranslationX(true); + float l = (group.transitionParams.left + x + group.transitionParams.offsetLeft); + float t = (group.transitionParams.top + group.transitionParams.offsetTop); + float r = (group.transitionParams.right + x + group.transitionParams.offsetRight); + float b = (group.transitionParams.bottom + group.transitionParams.offsetBottom); + + if (!group.transitionParams.backgroundChangeBounds) { + t += group.transitionParams.cell.getTranslationY(); + b += group.transitionParams.cell.getTranslationY(); } - }); - } - }; - } - chatLayoutManager = new GridLayoutManagerFixed(context, 1000, LinearLayoutManager.VERTICAL, true) { + if (t < chatListViewPaddingTop - chatListViewPaddingVisibleOffset - AndroidUtilities.dp(20)) { + t = chatListViewPaddingTop - chatListViewPaddingVisibleOffset - AndroidUtilities.dp(20); + } - boolean computingScroll; + if (b > chatListView.getMeasuredHeight() + AndroidUtilities.dp(20)) { + b = chatListView.getMeasuredHeight() + AndroidUtilities.dp(20); + } - @Override - public int getStartForFixGap() { - int padding = (int) chatListViewPaddingTop; - if (isThreadChat() && (!isTopic || topicStarterMessageObject != null) && pinnedMessageView != null && pinnedMessageView.getVisibility() == View.VISIBLE) { - padding -= Math.max(0, AndroidUtilities.dp(48) + pinnedMessageEnterOffset); + boolean useScale = group.transitionParams.cell.getScaleX() != 1f || group.transitionParams.cell.getScaleY() != 1f; + if (useScale) { + canvas.save(); + canvas.scale(group.transitionParams.cell.getScaleX(), group.transitionParams.cell.getScaleY(), l + (r - l) / 2, t + (b - t) / 2); + } + boolean selected = true; + for (int a = 0, N = group.messages.size(); a < N; a++) { + MessageObject object = group.messages.get(a); + int index = object.getDialogId() == dialog_id ? 0 : 1; + if (selectedMessagesIds[index].indexOfKey(object.getId()) < 0) { + selected = false; + break; + } + } + group.transitionParams.cell.drawBackground(canvas, (int) l, (int) t, (int) r, (int) b, group.transitionParams.pinnedTop, group.transitionParams.pinnedBotton, selected, contentView.getKeyboardHeight()); + group.transitionParams.cell = null; + group.transitionParams.drawCaptionLayout = group.hasCaption; + if (useScale) { + canvas.restore(); + for (int ii = 0; ii < count; ii++) { + View child = chatListView.getChildAt(ii); + if (child instanceof ChatMessageCell && ((ChatMessageCell) child).getCurrentMessagesGroup() == group) { + ChatMessageCell cell = ((ChatMessageCell) child); + int left = cell.getLeft(); + int top = cell.getTop(); + child.setPivotX(l - left + (r - l) / 2); + child.setPivotY(t - top + (b - t) / 2); + } + } + } + } } - return padding; } @Override - protected int getParentStart() { - if (computingScroll) { - return (int) chatListViewPaddingTop; + public boolean drawChild(Canvas canvas, View child, long drawingTime) { + if (isSkeletonVisible()) { + invalidated = false; + invalidate(); } - return 0; - } - @Override - public int getStartAfterPadding() { - if (computingScroll) { - return (int) chatListViewPaddingTop; - } - return super.getStartAfterPadding(); - } + int clipLeft = 0; + int clipBottom = 0; + boolean skipDraw = child == scrimView; + ChatMessageCell cell; + ChatActionCell actionCell = null; + float cilpTop = chatListViewPaddingTop - chatListViewPaddingVisibleOffset - AndroidUtilities.dp(4); - @Override - public int getTotalSpace() { - if (computingScroll) { - return (int) (getHeight() - chatListViewPaddingTop - getPaddingBottom()); + if (child.getY() > getMeasuredHeight() || child.getY() + child.getMeasuredHeight() < cilpTop) { + skipDraw = true; } - return super.getTotalSpace(); - } - @Override - public int computeVerticalScrollExtent(RecyclerView.State state) { - computingScroll = true; - int r = super.computeVerticalScrollExtent(state); - computingScroll = false; - return r; - } + MessageObject.GroupedMessages group = null; - @Override - public int computeVerticalScrollOffset(RecyclerView.State state) { - computingScroll = true; - int r = super.computeVerticalScrollOffset(state); - computingScroll = false; - return r; - } + if (child instanceof ChatMessageCell) { + cell = (ChatMessageCell) child; + if (animateSendingViews.contains(cell)) { + skipDraw = true; + } + MessageObject.GroupedMessagePosition position = cell.getCurrentPosition(); + group = cell.getCurrentMessagesGroup(); + if (position != null) { + if (position.pw != position.spanSize && position.spanSize == 1000 && position.siblingHeights == null && group.hasSibling) { + clipLeft = cell.getBackgroundDrawableLeft(); + } else if (position.siblingHeights != null) { + clipBottom = child.getBottom() - AndroidUtilities.dp(1 + (cell.isPinnedBottom() ? 1 : 0)); + } + } + if (cell.needDelayRoundProgressDraw()) { + drawLaterRoundProgressCell = cell; + } + if (!skipDraw && scrimView instanceof ChatMessageCell) { + ChatMessageCell cell2 = (ChatMessageCell) scrimView; + if (cell2.getCurrentMessagesGroup() != null && cell2.getCurrentMessagesGroup() == group) { + skipDraw = true; + } + } + if (skipDraw) { + cell.getPhotoImage().skipDraw(); + } + } else if (child instanceof ChatActionCell) { + actionCell = (ChatActionCell) child; + cell = null; + } else { + cell = null; + } + if (clipLeft != 0) { + canvas.save(); + } else if (clipBottom != 0) { + canvas.save(); + } + boolean result; + if (!skipDraw) { + boolean clipToGroupBounds = group != null && group.transitionParams.backgroundChangeBounds; + if (clipToGroupBounds) { + canvas.save(); + float x = cell.getNonAnimationTranslationX(true); + float l = (group.transitionParams.left + x + group.transitionParams.offsetLeft); + float t = (group.transitionParams.top + group.transitionParams.offsetTop); + float r = (group.transitionParams.right + x + group.transitionParams.offsetRight); + float b = (group.transitionParams.bottom + group.transitionParams.offsetBottom); - @Override - public int computeVerticalScrollRange(RecyclerView.State state) { - computingScroll = true; - int r = super.computeVerticalScrollRange(state); - computingScroll = false; - return r; - } + canvas.clipRect( + l + AndroidUtilities.dp(4), + t + AndroidUtilities.dp(4), + r - AndroidUtilities.dp(4), + b - AndroidUtilities.dp(4) + ); + } + if (cell != null && clipToGroupBounds) { + cell.clipToGroupBounds = true; + result = super.drawChild(canvas, child, drawingTime); + cell.clipToGroupBounds = false; + } else { + result = super.drawChild(canvas, child, drawingTime); + } + if (clipToGroupBounds) { + canvas.restore(); + } + if (cell != null && cell.hasOutboundsContent()) { + canvas.save(); + canvas.translate(cell.getX(), cell.getY()); + cell.drawOutboundsContent(canvas); + canvas.restore(); + } else if (actionCell != null) { + canvas.save(); + canvas.translate(actionCell.getX(), actionCell.getY()); + actionCell.drawOutboundsContent(canvas); + canvas.restore(); + } + } else { + result = false; + } + if (clipLeft != 0 || clipBottom != 0) { + canvas.restore(); + } - @Override - public void scrollToPositionWithOffset(int position, int offset, boolean bottom) { - if (!bottom) { - offset = (int) (offset - getPaddingTop() + chatListViewPaddingTop); + if (child.getTranslationY() != 0) { + canvas.save(); + canvas.translate(0, child.getTranslationY()); } - super.scrollToPositionWithOffset(position, offset, bottom); - } - @Override - public boolean supportsPredictiveItemAnimations() { - return true; - } + if (cell != null) { + cell.drawCheckBox(canvas); + } - @Override - public void smoothScrollToPosition(RecyclerView recyclerView, RecyclerView.State state, int position) { - scrollByTouch = false; - LinearSmoothScrollerCustom linearSmoothScroller = new LinearSmoothScrollerCustom(recyclerView.getContext(), LinearSmoothScrollerCustom.POSITION_MIDDLE); - linearSmoothScroller.setTargetPosition(position); - startSmoothScroll(linearSmoothScroller); - } + if (child.getTranslationY() != 0) { + canvas.restore(); + } - @Override - public boolean shouldLayoutChildFromOpositeSide(View child) { - if (child instanceof ChatMessageCell) { - return !((ChatMessageCell) child).getMessageObject().isOutOwner(); + if (child.getTranslationY() != 0) { + canvas.save(); + canvas.translate(0, child.getTranslationY()); } - return false; - } + if (cell != null) { + MessageObject message = cell.getMessageObject(); + MessageObject.GroupedMessagePosition position = cell.getCurrentPosition(); + if (!skipDraw) { + if (position != null || cell.getTransitionParams().animateBackgroundBoundsInner) { + if (position == null || (position.last || position.minX == 0 && position.minY == 0)) { + if (position == null || position.last) { + drawTimeAfter.add(cell); + } + if ((position == null || (position.minX == 0 && position.minY == 0)) && cell.hasNameLayout()) { + drawNamesAfter.add(cell); + } + } + if (position != null || cell.getTransitionParams().transformGroupToSingleMessage || cell.getTransitionParams().animateBackgroundBoundsInner) { + if (position == null || (position.flags & MessageObject.POSITION_FLAG_BOTTOM) != 0) { + drawCaptionAfter.add(cell); + } + } + } + + if (videoPlayerContainer != null && (message.isRoundVideo() || message.isVideo()) && !message.isVoiceTranscriptionOpen() && MediaController.getInstance().isPlayingMessage(message)) { + ImageReceiver imageReceiver = cell.getPhotoImage(); + float newX = imageReceiver.getImageX() + cell.getX(); + float newY = cell.getY() + imageReceiver.getImageY() + chatListView.getY() - videoPlayerContainer.getTop(); + if (videoPlayerContainer.getTranslationX() != newX || videoPlayerContainer.getTranslationY() != newY) { + videoPlayerContainer.setTranslationX(newX); + videoPlayerContainer.setTranslationY(newY); + fragmentView.invalidate(); + videoPlayerContainer.invalidate(); + } + } + } + ImageReceiver imageReceiver = cell.getAvatarImage(); + if (imageReceiver != null) { + MessageObject.GroupedMessages groupedMessages = getValidGroupedMessage(message); + if (cell.getMessageObject().deleted) { + if (child.getTranslationY() != 0) { + canvas.restore(); + } + imageReceiver.setVisible(false, false); + return result; + } - @Override - protected boolean hasSiblingChild(int position) { - if (position >= chatAdapter.messagesStartRow && position < chatAdapter.messagesEndRow) { - int index = position - chatAdapter.messagesStartRow; - if (index >= 0 && index < messages.size()) { - MessageObject message = messages.get(index); - MessageObject.GroupedMessages group = getValidGroupedMessage(message); - if (group != null) { - MessageObject.GroupedMessagePosition pos = group.positions.get(message); - if (pos.minX == pos.maxX || pos.minY != pos.maxY || pos.minY == 0) { - return false; + boolean replaceAnimation = chatListView.isFastScrollAnimationRunning() || (groupedMessages != null && groupedMessages.transitionParams.backgroundChangeBounds); + int top = replaceAnimation ? child.getTop() : (int) child.getY(); + if (cell.drawPinnedBottom()) { + int p; + if (cell.willRemovedAfterAnimation()) { + p = chatScrollHelper.positionToOldView.indexOfValue(child); + if (p >= 0) { + p = chatScrollHelper.positionToOldView.keyAt(p); + } + } else { + ViewHolder holder = chatListView.getChildViewHolder(child); + p = holder.getAdapterPosition(); } - int count = group.posArray.size(); - for (int a = 0; a < count; a++) { - MessageObject.GroupedMessagePosition p = group.posArray.get(a); - if (p == pos) { - continue; + + if (p >= 0) { + int nextPosition; + if (groupedMessages != null && position != null) { + int idx = groupedMessages.posArray.indexOf(position); + int size = groupedMessages.posArray.size(); + if ((position.flags & MessageObject.POSITION_FLAG_BOTTOM) != 0) { + nextPosition = p - size + idx; + } else { + nextPosition = p - 1; + for (int a = idx + 1; a < size; a++) { + if (groupedMessages.posArray.get(a).minY > position.maxY) { + break; + } else { + nextPosition--; + } + } + } + } else { + nextPosition = p - 1; } - if (p.minY <= pos.minY && p.maxY >= pos.minY) { - return true; + if (cell.willRemovedAfterAnimation()) { + View view = chatScrollHelper.positionToOldView.get(nextPosition); + if (view != null) { + if (child.getTranslationY() != 0) { + canvas.restore(); + } + imageReceiver.setVisible(false, false); + return result; + } + } else { + ViewHolder holder = chatListView.findViewHolderForAdapterPosition(nextPosition); + if (holder != null) { + if (child.getTranslationY() != 0) { + canvas.restore(); + } + imageReceiver.setVisible(false, false); + return result; + } } } } - } - } - return false; - } + float tx = cell.getSlidingOffsetX() + cell.getCheckBoxTranslation(); - @Override - public void onLayoutChildren(RecyclerView.Recycler recycler, RecyclerView.State state) { - if (BuildVars.DEBUG_PRIVATE_VERSION) { - super.onLayoutChildren(recycler, state); - } else { - try { - super.onLayoutChildren(recycler, state); - } catch (Exception e) { - FileLog.e(e); - AndroidUtilities.runOnUIThread(() -> chatAdapter.notifyDataSetChanged(false)); - } - } - } + int y = (int) ((replaceAnimation ? child.getTop() : child.getY()) + cell.getLayoutHeight() + cell.getTransitionParams().deltaBottom); + int maxY = chatListView.getMeasuredHeight() - chatListView.getPaddingBottom(); + if (cell.isPlayingRound() || cell.getTransitionParams().animatePlayingRound) { + if (cell.getTransitionParams().animatePlayingRound) { + float progressLocal = cell.getTransitionParams().animateChangeProgress; + if (!cell.isPlayingRound()) { + progressLocal = 1f - progressLocal; + } + int fromY = y; + int toY = Math.min(y, maxY); + y = (int) (fromY * progressLocal + toY * (1f - progressLocal)); + } + } else { + if (y > maxY) { + y = maxY; + } + } - @Override - public int scrollVerticallyBy(int dy, RecyclerView.Recycler recycler, RecyclerView.State state) { - if (!NekoConfig.disableSwipeToNext.Bool() && dy < 0 && pullingDownOffset != 0) { - pullingDownOffset += dy; - if (pullingDownOffset < 0) { - dy = (int) pullingDownOffset; - pullingDownOffset = 0; - chatListView.invalidate(); - } else { - dy = 0; - } - } + if (!replaceAnimation && child.getTranslationY() != 0) { + canvas.restore(); + } + if (cell.drawPinnedTop()) { + int p; + if (cell.willRemovedAfterAnimation()) { + p = chatScrollHelper.positionToOldView.indexOfValue(child); + if (p >= 0) { + p = chatScrollHelper.positionToOldView.keyAt(p); + } + } else { + ViewHolder holder = chatListView.getChildViewHolder(child); + p = holder.getAdapterPosition(); + } + if (p >= 0) { + int tries = 0; + while (true) { + if (tries >= 20) { + break; + } + tries++; - int n = chatListView.getChildCount(); - int scrolled = 0; - boolean foundTopView = false; - for (int i = 0; i < n; i++) { - View child = chatListView.getChildAt(i); - float padding = chatListViewPaddingTop; - if (isThreadChat() && (!isTopic || topicStarterMessageObject != null) && pinnedMessageView != null && pinnedMessageView.getVisibility() == View.VISIBLE) { - padding -= Math.max(0, AndroidUtilities.dp(48) + pinnedMessageEnterOffset); - } - if (chatListView.getChildAdapterPosition(child) == chatAdapter.getItemCount() - 1) { - int dyLocal = dy; - if (child.getTop() - dy > padding) { - dyLocal = (int) (child.getTop() - padding); + int prevPosition; + if (groupedMessages != null && position != null) { + int idx = groupedMessages.posArray.indexOf(position); + if (idx < 0) { + break; + } + int size = groupedMessages.posArray.size(); + if ((position.flags & MessageObject.POSITION_FLAG_TOP) != 0) { + prevPosition = p + idx + 1; + } else { + prevPosition = p + 1; + for (int a = idx - 1; a >= 0; a--) { + if (groupedMessages.posArray.get(a).maxY < position.minY) { + break; + } else { + prevPosition++; + } + } + } + } else { + prevPosition = p + 1; + } + if (cell.willRemovedAfterAnimation()) { + View view = chatScrollHelper.positionToOldView.get(prevPosition); + if (view != null) { + top = view.getTop(); + if (view instanceof ChatMessageCell) { + cell = (ChatMessageCell) view; + if (!cell.drawPinnedTop()) { + break; + } else { + p = prevPosition; + } + } else { + break; + } + } else { + break; + } + } else { + ViewHolder holder = chatListView.findViewHolderForAdapterPosition(prevPosition); + if (holder != null) { + top = holder.itemView.getTop(); + if (holder.itemView instanceof ChatMessageCell) { + cell = (ChatMessageCell) holder.itemView; + if (!cell.drawPinnedTop()) { + break; + } else { + p = prevPosition; + } + } else { + break; + } + } else { + break; + } + } + } + } + } + if (y - AndroidUtilities.dp(48) < top) { + y = top + AndroidUtilities.dp(48); + } + if (!cell.drawPinnedBottom()) { + int cellBottom = replaceAnimation ? cell.getBottom() : (int) (cell.getY() + cell.getMeasuredHeight() + cell.getTransitionParams().deltaBottom); + if (y > cellBottom) { + y = cellBottom; + } + } + canvas.save(); + if (tx != 0) { + canvas.translate(tx, 0); + } + if (cell.getCurrentMessagesGroup() != null) { + if (cell.getCurrentMessagesGroup().transitionParams.backgroundChangeBounds) { + y -= cell.getTranslationY(); + } + } + imageReceiver.setImageY(y - AndroidUtilities.dp(44)); + if (cell.shouldDrawAlphaLayer()) { + imageReceiver.setAlpha(cell.getAlpha()); + canvas.scale( + cell.getScaleX(), cell.getScaleY(), + cell.getX() + cell.getPivotX(), cell.getY() + (cell.getHeight() >> 1) + ); + } else { + imageReceiver.setAlpha(1f); } - scrolled = super.scrollVerticallyBy(dyLocal, recycler, state); - foundTopView = true; - break; - } - } - if (!foundTopView) { - scrolled = super.scrollVerticallyBy(dy, recycler, state); - } - if (!NekoConfig.disableSwipeToNext.Bool() && dy > 0 && scrolled == 0 && ChatObject.isChannel(currentChat) && !currentChat.megagroup && chatListView.getScrollState() == RecyclerView.SCROLL_STATE_DRAGGING && !chatListView.isFastScrollAnimationRunning() && !chatListView.isMultiselect() && reportType < 0) { - if (pullingDownOffset == 0 && pullingDownDrawable != null) { - pullingDownDrawable.updateDialog(); - } - if (pullingDownBackAnimator != null) { - pullingDownBackAnimator.removeAllListeners(); - pullingDownBackAnimator.cancel(); - } + imageReceiver.setVisible(true, false); + cell.drawStatusWithImage(canvas, imageReceiver, AndroidUtilities.dp(7)); + canvas.restore(); - float k; - if (pullingDownOffset < AndroidUtilities.dp(110)) { - float progress = pullingDownOffset / AndroidUtilities.dp(110); - k = 0.65f * (1f - progress) + 0.45f * progress; - } else if (pullingDownOffset < AndroidUtilities.dp(160)) { - float progress = (pullingDownOffset - AndroidUtilities.dp(110)) / AndroidUtilities.dp(50); - k = 0.45f * (1f - progress) + 0.05f * progress; - } else { - k = 0.05f; + if (!replaceAnimation && child.getTranslationY() != 0) { + canvas.save(); + } } - - pullingDownOffset += dy * k; - ReactionsEffectOverlay.onScrolled((int) (dy * k)); - chatListView.invalidate(); - } - if (pullingDownOffset == 0) { - chatListView.setOverScrollMode(View.OVER_SCROLL_ALWAYS); - } else { - chatListView.setOverScrollMode(View.OVER_SCROLL_NEVER); } - if (pullingDownDrawable != null) { - pullingDownDrawable.showBottomPanel(pullingDownOffset > 0 && chatListView.getScrollState() == RecyclerView.SCROLL_STATE_DRAGGING); + + if (child.getTranslationY() != 0) { + canvas.restore(); } - return scrolled; + return result; } - }; - chatLayoutManager.setSpanSizeLookup(new GridLayoutManagerFixed.SpanSizeLookup() { + @Override - public int getSpanSize(int position) { - if (position >= chatAdapter.messagesStartRow && position < chatAdapter.messagesEndRow) { - int idx = position - chatAdapter.messagesStartRow; - if (idx >= 0 && idx < messages.size()) { - MessageObject message = messages.get(idx); - MessageObject.GroupedMessages groupedMessages = getValidGroupedMessage(message); - if (groupedMessages != null) { - return groupedMessages.positions.get(message).spanSize; - } + public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) { + if (currentEncryptedChat != null) { + return; + } + super.onInitializeAccessibilityNodeInfo(info); + if (Build.VERSION.SDK_INT >= 19) { + AccessibilityNodeInfo.CollectionInfo collection = info.getCollectionInfo(); + if (collection != null) { + info.setCollectionInfo(AccessibilityNodeInfo.CollectionInfo.obtain(collection.getRowCount(), 1, false)); } } - return 1000; } - }); - chatListView.setLayoutManager(chatLayoutManager); - chatListView.addItemDecoration(new RecyclerView.ItemDecoration() { + @Override - public void getItemOffsets(Rect outRect, View view, RecyclerView parent, RecyclerView.State state) { - outRect.bottom = 0; - if (view instanceof ChatMessageCell) { - ChatMessageCell cell = (ChatMessageCell) view; - MessageObject.GroupedMessages group = cell.getCurrentMessagesGroup(); - if (group != null) { - MessageObject.GroupedMessagePosition position = cell.getCurrentPosition(); - if (position != null && position.siblingHeights != null) { - float maxHeight = Math.max(AndroidUtilities.displaySize.x, AndroidUtilities.displaySize.y) * 0.5f; - int h = cell.getExtraInsetHeight(); - for (int a = 0; a < position.siblingHeights.length; a++) { - h += (int) Math.ceil(maxHeight * position.siblingHeights[a]); - } - h += (position.maxY - position.minY) * Math.round(7 * AndroidUtilities.density); - int count = group.posArray.size(); - for (int a = 0; a < count; a++) { - MessageObject.GroupedMessagePosition pos = group.posArray.get(a); - if (pos.minY != position.minY || pos.minX == position.minX && pos.maxX == position.maxX && pos.minY == position.minY && pos.maxY == position.maxY) { - continue; - } - if (pos.minY == position.minY) { - h -= (int) Math.ceil(maxHeight * pos.ph) - AndroidUtilities.dp(4); - break; - } - } - outRect.bottom = -h; - } - } + public AccessibilityNodeInfo createAccessibilityNodeInfo() { + if (currentEncryptedChat != null) { + return null; } + return super.createAccessibilityNodeInfo(); } - }); - contentView.addView(chatListView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); - chatListView.setOnItemLongClickListener(onItemLongClickListener); - chatListView.setOnItemClickListener(onItemClickListener); - chatListView.setOnScrollListener(new RecyclerView.OnScrollListener() { - - private float totalDy = 0; - private boolean scrollUp; - private final int scrollValue = AndroidUtilities.dp(100); @Override - public void onScrollStateChanged(RecyclerView recyclerView, int newState) { - if (newState == RecyclerView.SCROLL_STATE_IDLE) { - if (pollHintCell != null) { - pollHintView.showForMessageCell(pollHintCell, -1, pollHintX, pollHintY, true); - pollHintCell = null; - } - scrollingFloatingDate = false; - scrollingChatListView = false; - checkTextureViewPosition = false; - hideFloatingDateView(true); - checkAutoDownloadMessages(scrollUp); - if (SharedConfig.getDevicePerformanceClass() == SharedConfig.PERFORMANCE_CLASS_LOW) { - NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.startAllHeavyOperations, 512); - } - NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.startSpoilers); - chatListView.setOverScrollMode(RecyclerView.OVER_SCROLL_ALWAYS); - textSelectionHelper.stopScrolling(); - updateVisibleRows(); - scrollByTouch = false; - } else { - if (newState == RecyclerView.SCROLL_STATE_SETTLING) { - wasManualScroll = true; - scrollingChatListView = true; - } else if (newState == RecyclerView.SCROLL_STATE_DRAGGING) { - if (NekoConfig.hideKeyboardOnChatScroll.Bool()) { - AndroidUtilities.hideKeyboard(getParentActivity().getCurrentFocus()); - } - pollHintCell = null; - wasManualScroll = true; - scrollingFloatingDate = true; - checkTextureViewPosition = true; - scrollingChatListView = true; - } - if (SharedConfig.getDevicePerformanceClass() == SharedConfig.PERFORMANCE_CLASS_LOW) { - NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.stopAllHeavyOperations, 512); - } - NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.stopSpoilers); + public void invalidate() { + if (invalidated && slidingView == null) { + return; + } + invalidated = true; + super.invalidate(); + contentView.invalidateBlur(); + if (selectionReactionsOverlay != null && selectionReactionsOverlay.isVisible()) { + selectionReactionsOverlay.invalidatePosition(); } } @Override - public void onScrolled(RecyclerView recyclerView, int dx, int dy) { - chatListView.invalidate(); - scrollUp = dy < 0; - int firstVisibleItem = chatLayoutManager.findFirstVisibleItemPosition(); - if (dy != 0 && (scrollByTouch && recyclerView.getScrollState() == RecyclerView.SCROLL_STATE_SETTLING) || recyclerView.getScrollState() == RecyclerView.SCROLL_STATE_DRAGGING) { - if (forceNextPinnedMessageId != 0) { - if ((!scrollUp || forceScrollToFirst)) { - forceNextPinnedMessageId = 0; - } else if (!chatListView.isFastScrollAnimationRunning() && firstVisibleItem != RecyclerView.NO_POSITION) { - int lastVisibleItem = chatLayoutManager.findLastVisibleItemPosition(); - MessageObject messageObject = null; - boolean foundForceNextPinnedView = false; - for (int i = lastVisibleItem; i >= firstVisibleItem; i--) { - View view = chatLayoutManager.findViewByPosition(i); - if (view instanceof ChatMessageCell) { - messageObject = ((ChatMessageCell) view).getMessageObject(); - } else if (view instanceof ChatActionCell) { - messageObject = ((ChatActionCell) view).getMessageObject(); - } - if (messageObject != null) { - if (forceNextPinnedMessageId == messageObject.getId()) { - foundForceNextPinnedView = true; - break; - } - } - } - if (!foundForceNextPinnedView && messageObject != null && messageObject.getId() < forceNextPinnedMessageId) { - forceNextPinnedMessageId = 0; - } - } + protected void onAttachedToWindow() { + super.onAttachedToWindow(); + chatListViewAttached = true; + } + + @Override + protected void onDetachedFromWindow() { + super.onDetachedFromWindow(); + chatListViewAttached = false; + } + }; + if (currentEncryptedChat != null && Build.VERSION.SDK_INT >= 19) { + chatListView.setImportantForAccessibility(View.IMPORTANT_FOR_ACCESSIBILITY_NO_HIDE_DESCENDANTS); + } + chatListView.setAccessibilityEnabled(false); + chatListView.setNestedScrollingEnabled(false); + chatListView.setInstantClick(true); + chatListView.setDisableHighlightState(true); + chatListView.setTag(1); + chatListView.setVerticalScrollBarEnabled(!SharedConfig.chatBlurEnabled()); + chatListView.setAdapter(chatAdapter = new ChatActivityAdapter(context)); + chatListView.setClipToPadding(false); + chatListView.setAnimateEmptyView(true, RecyclerListView.EMPTY_VIEW_ANIMATION_TYPE_ALPHA_SCALE); + chatListView.setScrollBarStyle(View.SCROLLBARS_OUTSIDE_OVERLAY); + chatListViewPaddingTop = 0; + invalidateChatListViewTopPadding(); + if (MessagesController.getGlobalMainSettings().getBoolean("view_animations", true)) { + chatListItemAnimator = new ChatListItemAnimator(this, chatListView, themeDelegate) { + Runnable finishRunnable; + + @Override + public void checkIsRunning() { + if (scrollAnimationIndex == -1) { + scrollAnimationIndex = getNotificationCenter().setAnimationInProgress(scrollAnimationIndex, allowedNotificationsDuringChatListAnimations, false); } } - if (recyclerView.getScrollState() == RecyclerView.SCROLL_STATE_DRAGGING) { - forceScrollToFirst = false; - if (!wasManualScroll && dy != 0) { - wasManualScroll = true; + + @Override + public void onAnimationStart() { + scrollAnimationIndex = getNotificationCenter().setAnimationInProgress(scrollAnimationIndex, allowedNotificationsDuringChatListAnimations, false); + if (finishRunnable != null) { + AndroidUtilities.cancelRunOnUIThread(finishRunnable); + finishRunnable = null; } + if (BuildVars.LOGS_ENABLED) { + FileLog.d("chatItemAnimator disable notifications"); + } + chatActivityEnterView.getAdjustPanLayoutHelper().runDelayedAnimation(); + chatActivityEnterView.runEmojiPanelAnimation(); } - if (dy != 0) { - hideHints(true); - } - if (dy != 0 && scrollingFloatingDate && !currentFloatingTopIsNotMessage) { - if (highlightMessageId != Integer.MAX_VALUE) { - removeSelectedMessageHighlight(); - updateVisibleRows(); + + @Override + protected void onAllAnimationsDone() { + super.onAllAnimationsDone(); + if (finishRunnable != null) { + AndroidUtilities.cancelRunOnUIThread(finishRunnable); + finishRunnable = null; } - showFloatingDateView(true); - } - checkScrollForLoad(true); - if (firstVisibleItem != RecyclerView.NO_POSITION) { - int totalItemCount = chatAdapter.getItemCount(); - if (firstVisibleItem == 0 && forwardEndReached[0]) { - if (dy >= 0) { - canShowPagedownButton = false; - updatePagedownButtonVisibility(true); + AndroidUtilities.runOnUIThread(finishRunnable = () -> { + finishRunnable = null; + if (scrollAnimationIndex != -1) { + getNotificationCenter().onAnimationFinish(scrollAnimationIndex); + scrollAnimationIndex = -1; } - } else { - if (dy > 0) { - if (pagedownButton.getTag() == null) { - totalDy += dy; - if (totalDy > scrollValue) { - totalDy = 0; - canShowPagedownButton = true; - updatePagedownButtonVisibility(true); - pagedownButtonShowedByScroll = true; - } - } - } else { - if (pagedownButtonShowedByScroll && pagedownButton.getTag() != null) { - totalDy += dy; - if (totalDy < -scrollValue) { - canShowPagedownButton = false; - updatePagedownButtonVisibility(true); - totalDy = 0; - } - } + if (BuildVars.LOGS_ENABLED) { + FileLog.d("chatItemAnimator enable notifications"); } - } + }); } - invalidateMessagesVisiblePart(); - textSelectionHelper.onParentScrolled(); - emojiAnimationsOverlay.onScrolled(dy); - ReactionsEffectOverlay.onScrolled(dy); - } - }); - - animatingImageView = new ClippingImageView(context); - animatingImageView.setVisibility(View.GONE); - contentView.addView(animatingImageView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); - progressView = new FrameLayout(context); - progressView.setVisibility(View.INVISIBLE); - contentView.addView(progressView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.TOP | Gravity.LEFT)); - progressView2 = new View(context); - progressView2.setBackground(Theme.createServiceDrawable(AndroidUtilities.dp(18), progressView2, contentView, getThemedPaint(Theme.key_paint_chatActionBackground))); - progressView.addView(progressView2, LayoutHelper.createFrame(36, 36, Gravity.CENTER)); + @Override + public void endAnimations() { + super.endAnimations(); + if (finishRunnable != null) { + AndroidUtilities.cancelRunOnUIThread(finishRunnable); + } + AndroidUtilities.runOnUIThread(finishRunnable = () -> { + finishRunnable = null; + if (scrollAnimationIndex != -1) { + getNotificationCenter().onAnimationFinish(scrollAnimationIndex); + scrollAnimationIndex = -1; + } + if (BuildVars.LOGS_ENABLED) { + FileLog.d("chatItemAnimator enable notifications"); + } + }); + } + }; + } - progressBar = new RadialProgressView(context, themeDelegate); - progressBar.setSize(AndroidUtilities.dp(28)); - progressBar.setProgressColor(getThemedColor(Theme.key_chat_serviceText)); - progressView.addView(progressBar, LayoutHelper.createFrame(32, 32, Gravity.CENTER)); + chatLayoutManager = new GridLayoutManagerFixed(context, 1000, LinearLayoutManager.VERTICAL, true) { - floatingDateView = new ChatActionCell(context, false, themeDelegate) { + boolean computingScroll; @Override - public void setTranslationY(float translationY) { - if (getTranslationY() != translationY) { - invalidate(); + public int getStartForFixGap() { + int padding = (int) chatListViewPaddingTop; + if (isThreadChat() && (!isTopic || topicStarterMessageObject != null) && pinnedMessageView != null && pinnedMessageView.getVisibility() == View.VISIBLE) { + padding -= Math.max(0, AndroidUtilities.dp(48) + pinnedMessageEnterOffset); } - super.setTranslationY(translationY); + return padding; } @Override - public boolean onInterceptTouchEvent(MotionEvent ev) { - if (getAlpha() == 0 || actionBar.isActionModeShowed() || reportType >= 0) { - return false; + protected int getParentStart() { + if (computingScroll) { + return (int) chatListViewPaddingTop; } - return super.onInterceptTouchEvent(ev); + return 0; } @Override - public boolean onTouchEvent(MotionEvent event) { - if (getAlpha() == 0 || actionBar.isActionModeShowed() || reportType >= 0) { - return false; + public int getStartAfterPadding() { + if (computingScroll) { + return (int) chatListViewPaddingTop; } - return super.onTouchEvent(event); + return super.getStartAfterPadding(); } @Override - protected void onDraw(Canvas canvas) { - float clipTop = chatListView.getY() + chatListViewPaddingTop - getY(); - clipTop -= AndroidUtilities.dp(4); - if (clipTop > 0) { - if (clipTop < getMeasuredHeight()) { - canvas.save(); - canvas.clipRect(0, clipTop, getMeasuredWidth(), getMeasuredHeight()); - super.onDraw(canvas); - canvas.restore(); - } - } else { - super.onDraw(canvas); + public int getTotalSpace() { + if (computingScroll) { + return (int) (getHeight() - chatListViewPaddingTop - getPaddingBottom()); } + return super.getTotalSpace(); } - }; - floatingDateView.setCustomDate((int) (System.currentTimeMillis() / 1000), false, false); - floatingDateView.setAlpha(0.0f); - floatingDateView.setImportantForAccessibility(View.IMPORTANT_FOR_ACCESSIBILITY_NO); - floatingDateView.setInvalidateColors(true); - contentView.addView(floatingDateView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.TOP | Gravity.CENTER_HORIZONTAL, 0, 4, 0, 0)); - floatingDateView.setOnClickListener(view -> { - if (floatingDateView.getAlpha() == 0 || actionBar.isActionModeShowed() || reportType >= 0) { - return; + + @Override + public int computeVerticalScrollExtent(RecyclerView.State state) { + computingScroll = true; + int r = super.computeVerticalScrollExtent(state); + computingScroll = false; + return r; } - Calendar calendar = Calendar.getInstance(); - calendar.setTimeInMillis((long) floatingDateView.getCustomDate() * 1000); - int year = calendar.get(Calendar.YEAR); - int monthOfYear = calendar.get(Calendar.MONTH); - int dayOfMonth = calendar.get(Calendar.DAY_OF_MONTH); - calendar.clear(); - calendar.set(year, monthOfYear, dayOfMonth); - jumpToDate((int) (calendar.getTime().getTime() / 1000)); - }); + @Override + public int computeVerticalScrollOffset(RecyclerView.State state) { + computingScroll = true; + int r = super.computeVerticalScrollOffset(state); + computingScroll = false; + return r; + } - floatingDateView.setOnLongClickListener(view -> { - if (getParentActivity() == null) { - return false; + @Override + public int computeVerticalScrollRange(RecyclerView.State state) { + computingScroll = true; + int r = super.computeVerticalScrollRange(state); + computingScroll = false; + return r; } - AndroidUtilities.hideKeyboard(searchItem.getSearchField()); - Calendar calendar = Calendar.getInstance(); - calendar.setTimeInMillis((long) floatingDateView.getCustomDate() * 1000); - int year = calendar.get(Calendar.YEAR); - int monthOfYear = calendar.get(Calendar.MONTH); - int dayOfMonth = calendar.get(Calendar.DAY_OF_MONTH); - calendar.clear(); - calendar.set(year, monthOfYear, dayOfMonth); - Bundle bundle = new Bundle(); - bundle.putLong("dialog_id", dialog_id); - bundle.putInt("topic_id", getTopicId()); - bundle.putInt("type", CalendarActivity.TYPE_CHAT_ACTIVITY); - CalendarActivity calendarActivity = new CalendarActivity(bundle, SharedMediaLayout.FILTER_PHOTOS_AND_VIDEOS, (int) (calendar.getTime().getTime() / 1000)); - presentFragment(calendarActivity); - return true; - }); - if (currentChat != null) { - pendingRequestsDelegate = new ChatActivityMemberRequestsDelegate(this, currentChat, this::invalidateChatListViewTopPadding); - pendingRequestsDelegate.setChatInfo(chatInfo, false); - contentView.addView(pendingRequestsDelegate.getView(), ViewGroup.LayoutParams.MATCH_PARENT, pendingRequestsDelegate.getViewHeight()); - } + @Override + public void scrollToPositionWithOffset(int position, int offset, boolean bottom) { + if (!bottom) { + offset = (int) (offset - getPaddingTop() + chatListViewPaddingTop); + } + super.scrollToPositionWithOffset(position, offset, bottom); + } - if (currentEncryptedChat == null) { - pinnedMessageView = new BlurredFrameLayout(context, contentView) { + @Override + public boolean supportsPredictiveItemAnimations() { + return true; + } - float lastY; - float startY; + @Override + public void smoothScrollToPosition(RecyclerView recyclerView, RecyclerView.State state, int position) { + scrollByTouch = false; + LinearSmoothScrollerCustom linearSmoothScroller = new LinearSmoothScrollerCustom(recyclerView.getContext(), LinearSmoothScrollerCustom.POSITION_MIDDLE); + linearSmoothScroller.setTargetPosition(position); + startSmoothScroll(linearSmoothScroller); + } - { - setOnLongClickListener(v -> { - if (AndroidUtilities.isTablet() || isThreadChat()) { - return false; - } - startY = lastY; - openPinnedMessagesList(true); - return true; - }); + @Override + public boolean shouldLayoutChildFromOpositeSide(View child) { + if (child instanceof ChatMessageCell) { + return !((ChatMessageCell) child).getMessageObject().isOutOwner(); } + return false; + } - @Override - public boolean onTouchEvent(MotionEvent event) { - lastY = event.getY(); - if (event.getAction() == MotionEvent.ACTION_UP) { - finishPreviewFragment(); - } else if (event.getAction() == MotionEvent.ACTION_MOVE) { - float dy = startY - lastY; - movePreviewFragment(dy); - if (dy < 0) { - startY = lastY; + + @Override + protected boolean hasSiblingChild(int position) { + if (position >= chatAdapter.messagesStartRow && position < chatAdapter.messagesEndRow) { + int index = position - chatAdapter.messagesStartRow; + if (index >= 0 && index < messages.size()) { + MessageObject message = messages.get(index); + MessageObject.GroupedMessages group = getValidGroupedMessage(message); + if (group != null) { + MessageObject.GroupedMessagePosition pos = group.positions.get(message); + if (pos.minX == pos.maxX || pos.minY != pos.maxY || pos.minY == 0) { + return false; + } + int count = group.posArray.size(); + for (int a = 0; a < count; a++) { + MessageObject.GroupedMessagePosition p = group.posArray.get(a); + if (p == pos) { + continue; + } + if (p.minY <= pos.minY && p.maxY >= pos.minY) { + return true; + } + } } } - return super.onTouchEvent(event); + } + return false; + } + + @Override + public void onLayoutChildren(RecyclerView.Recycler recycler, RecyclerView.State state) { + if (BuildVars.DEBUG_PRIVATE_VERSION) { + super.onLayoutChildren(recycler, state); + } else { + try { + super.onLayoutChildren(recycler, state); + } catch (Exception e) { + FileLog.e(e); + AndroidUtilities.runOnUIThread(() -> chatAdapter.notifyDataSetChanged(false)); + } + } + } + + @Override + public int scrollVerticallyBy(int dy, RecyclerView.Recycler recycler, RecyclerView.State state) { + if (!NekoConfig.disableSwipeToNext.Bool() && dy < 0 && pullingDownOffset != 0) { + pullingDownOffset += dy; + if (pullingDownOffset < 0) { + dy = (int) pullingDownOffset; + pullingDownOffset = 0; + chatListView.invalidate(); + } else { + dy = 0; + } } - @Override - protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { - super.onMeasure(widthMeasureSpec, heightMeasureSpec); - if (setPinnedTextTranslationX) { - for (int a = 0; a < pinnedNextAnimation.length; a++) { - if (pinnedNextAnimation[a] != null) { - pinnedNextAnimation[a].start(); - } + int n = chatListView.getChildCount(); + int scrolled = 0; + boolean foundTopView = false; + for (int i = 0; i < n; i++) { + View child = chatListView.getChildAt(i); + float padding = chatListViewPaddingTop; + if (isThreadChat() && (!isTopic || topicStarterMessageObject != null) && pinnedMessageView != null && pinnedMessageView.getVisibility() == View.VISIBLE) { + padding -= Math.max(0, AndroidUtilities.dp(48) + pinnedMessageEnterOffset); + } + if (chatListView.getChildAdapterPosition(child) == chatAdapter.getItemCount() - 1) { + int dyLocal = dy; + if (child.getTop() - dy > padding) { + dyLocal = (int) (child.getTop() - padding); } - setPinnedTextTranslationX = false; + scrolled = super.scrollVerticallyBy(dyLocal, recycler, state); + foundTopView = true; + break; } } - - @Override - protected boolean drawChild(Canvas canvas, View child, long drawingTime) { - if (child == pinnedLineView) { - canvas.save(); - canvas.clipRect(0, 0, getMeasuredWidth(), AndroidUtilities.dp(48)); + if (!foundTopView) { + scrolled = super.scrollVerticallyBy(dy, recycler, state); + } + if (!NekoConfig.disableSwipeToNext.Bool() && dy > 0 && scrolled == 0 && ChatObject.isChannel(currentChat) && !currentChat.megagroup && chatListView.getScrollState() == RecyclerView.SCROLL_STATE_DRAGGING && !chatListView.isFastScrollAnimationRunning() && !chatListView.isMultiselect() && reportType < 0) { + if (pullingDownOffset == 0 && pullingDownDrawable != null) { + pullingDownDrawable.updateDialog(); } - boolean result; - if (child == pinnedMessageTextView[0] || child == pinnedMessageTextView[1]) { - canvas.save(); - canvas.clipRect(0, 0, getMeasuredWidth() - AndroidUtilities.dp(38), getMeasuredHeight()); - result = super.drawChild(canvas, child, drawingTime); - canvas.restore(); + if (pullingDownBackAnimator != null) { + pullingDownBackAnimator.removeAllListeners(); + pullingDownBackAnimator.cancel(); + } + + float k; + if (pullingDownOffset < AndroidUtilities.dp(110)) { + float progress = pullingDownOffset / AndroidUtilities.dp(110); + k = 0.65f * (1f - progress) + 0.45f * progress; + } else if (pullingDownOffset < AndroidUtilities.dp(160)) { + float progress = (pullingDownOffset - AndroidUtilities.dp(110)) / AndroidUtilities.dp(50); + k = 0.45f * (1f - progress) + 0.05f * progress; } else { - result = super.drawChild(canvas, child, drawingTime); - if (child == pinnedLineView) { - canvas.restore(); - } + k = 0.05f; } - return result; + pullingDownOffset += dy * k; + ReactionsEffectOverlay.onScrolled((int) (dy * k)); + chatListView.invalidate(); } - }; - pinnedMessageView.setTag(1); - pinnedMessageEnterOffset = -AndroidUtilities.dp(50); - pinnedMessageView.setVisibility(View.GONE); - pinnedMessageView.setBackgroundResource(R.drawable.blockpanel); - pinnedMessageView.backgroundColor = getThemedColor(Theme.key_chat_topPanelBackground); - pinnedMessageView.backgroundPaddingBottom = AndroidUtilities.dp(2); - pinnedMessageView.getBackground().mutate().setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_topPanelBackground), PorterDuff.Mode.MULTIPLY)); - contentView.addView(pinnedMessageView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 50, Gravity.TOP | Gravity.LEFT)); - pinnedMessageView.setOnClickListener(v -> { - wasManualScroll = true; - if (isThreadChat() && !isTopic) { - scrollToMessageId(threadMessageId, 0, true, 0, true, 0); - } else if (currentPinnedMessageId != 0) { - int currentPinned = currentPinnedMessageId; - - int forceNextPinnedMessageId = 0; - if (!pinnedMessageIds.isEmpty()) { - if (currentPinned == pinnedMessageIds.get(pinnedMessageIds.size() - 1)) { - forceNextPinnedMessageId = pinnedMessageIds.get(0) + 1; - forceScrollToFirst = true; - } else { - forceNextPinnedMessageId = currentPinned - 1; - forceScrollToFirst = false; + if (pullingDownOffset == 0) { + chatListView.setOverScrollMode(View.OVER_SCROLL_ALWAYS); + } else { + chatListView.setOverScrollMode(View.OVER_SCROLL_NEVER); + } + if (pullingDownDrawable != null) { + pullingDownDrawable.showBottomPanel(pullingDownOffset > 0 && chatListView.getScrollState() == RecyclerView.SCROLL_STATE_DRAGGING); + } + return scrolled; + } + }; + chatLayoutManager.setSpanSizeLookup(new GridLayoutManagerFixed.SpanSizeLookup() { + @Override + public int getSpanSize(int position) { + if (position >= chatAdapter.messagesStartRow && position < chatAdapter.messagesEndRow) { + int idx = position - chatAdapter.messagesStartRow; + if (idx >= 0 && idx < messages.size()) { + MessageObject message = messages.get(idx); + MessageObject.GroupedMessages groupedMessages = getValidGroupedMessage(message); + if (groupedMessages != null) { + return groupedMessages.positions.get(message).spanSize; } } - this.forceNextPinnedMessageId = forceNextPinnedMessageId; - if (!forceScrollToFirst) { - forceNextPinnedMessageId = -forceNextPinnedMessageId; - } - scrollToMessageId(currentPinned, 0, true, 0, true, forceNextPinnedMessageId); - updateMessagesVisiblePart(false); } - }); - pinnedMessageView.setEnabled(!isInPreviewMode()); - - View selector = new View(context); - selector.setBackground(Theme.getSelectorDrawable(false)); - pinnedMessageView.addView(selector, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.LEFT | Gravity.TOP, 0, 0, 0, 2)); - - pinnedLineView = new PinnedLineView(context, themeDelegate); - pinnedMessageView.addView(pinnedLineView, LayoutHelper.createFrame(2, 48, Gravity.LEFT | Gravity.TOP, 8, 0, 0, 0)); - pinnedMessageView.setClipChildren(false); - - pinnedCounterTextView = new NumberTextView(context); - pinnedCounterTextView.setAddNumber(); - pinnedCounterTextView.setTextSize(14); - pinnedCounterTextView.setTextColor(getThemedColor(Theme.key_chat_topPanelTitle)); - pinnedCounterTextView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); - pinnedMessageView.addView(pinnedCounterTextView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 18, Gravity.TOP | Gravity.LEFT, 18, 7, 44, 0)); - - for (int a = 0; a < 2; a++) { - pinnedNameTextView[a] = new TrackingWidthSimpleTextView(context); - pinnedNameTextView[a].setTextSize(14); - pinnedNameTextView[a].setTextColor(getThemedColor(Theme.key_chat_topPanelTitle)); - pinnedNameTextView[a].setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); - pinnedMessageView.addView(pinnedNameTextView[a], LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 18, Gravity.TOP | Gravity.LEFT, 18, 7.3f, 44, 0)); - - pinnedMessageTextView[a] = new SimpleTextView(context) { - @Override - public void setTranslationY(float translationY) { - super.setTranslationY(translationY); - if (this == pinnedMessageTextView[0] && pinnedNextAnimation[1] != null) { - if (forceScrollToFirst && translationY < 0) { - pinnedLineView.setTranslationY(translationY / 2); - } else { - pinnedLineView.setTranslationY(0); + return 1000; + } + }); + chatListView.setLayoutManager(chatLayoutManager); + chatListView.addItemDecoration(new RecyclerView.ItemDecoration() { + @Override + public void getItemOffsets(Rect outRect, View view, RecyclerView parent, RecyclerView.State state) { + outRect.bottom = 0; + if (view instanceof ChatMessageCell) { + ChatMessageCell cell = (ChatMessageCell) view; + MessageObject.GroupedMessages group = cell.getCurrentMessagesGroup(); + if (group != null) { + MessageObject.GroupedMessagePosition position = cell.getCurrentPosition(); + if (position != null && position.siblingHeights != null) { + float maxHeight = Math.max(AndroidUtilities.displaySize.x, AndroidUtilities.displaySize.y) * 0.5f; + int h = cell.getExtraInsetHeight(); + for (int a = 0; a < position.siblingHeights.length; a++) { + h += (int) Math.ceil(maxHeight * position.siblingHeights[a]); + } + h += (position.maxY - position.minY) * Math.round(7 * AndroidUtilities.density); + int count = group.posArray.size(); + for (int a = 0; a < count; a++) { + MessageObject.GroupedMessagePosition pos = group.posArray.get(a); + if (pos.minY != position.minY || pos.minX == position.minX && pos.maxX == position.maxX && pos.minY == position.minY && pos.maxY == position.maxY) { + continue; + } + if (pos.minY == position.minY) { + h -= (int) Math.ceil(maxHeight * pos.ph) - AndroidUtilities.dp(4); + break; + } } + outRect.bottom = -h; } } - }; - pinnedMessageTextView[a].setTextSize(14); - pinnedMessageTextView[a].setTextColor(getThemedColor(Theme.key_chat_topPanelMessage)); - pinnedMessageView.addView(pinnedMessageTextView[a], LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 18, Gravity.TOP | Gravity.LEFT, 18, 25.3f, 44, 0)); - - pinnedMessageButton[a] = new PinnedMessageButton(context); - pinnedMessageView.addView(pinnedMessageButton[a], LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, 28, Gravity.TOP | Gravity.RIGHT, 0, 10, 14, 0)); - - pinnedMessageImageView[a] = new BackupImageView(context) { - private SpoilerEffect spoilerEffect = new SpoilerEffect(); - private Path path = new Path(); - private float[] radii = new float[8]; - - @Override - protected void onDraw(Canvas canvas) { - super.onDraw(canvas); - - if (hasBlur) { - canvas.save(); - AndroidUtilities.rectTmp.set(0, 0, getWidth(), getHeight()); - - int[] rad = imageReceiver.getRoundRadius(); - radii[0] = radii[1] = rad[0]; - radii[2] = radii[3] = rad[1]; - radii[4] = radii[5] = rad[2]; - radii[6] = radii[7] = rad[3]; - - path.rewind(); - path.addRoundRect(AndroidUtilities.rectTmp, radii, Path.Direction.CW); - canvas.clipPath(path); + } + } + }); + chatListView.setOnItemLongClickListener(onItemLongClickListener); + chatListView.setOnItemClickListener(onItemClickListener); + chatListView.setOnScrollListener(new RecyclerView.OnScrollListener() { - int sColor = Color.WHITE; - spoilerEffect.setColor(ColorUtils.setAlphaComponent(sColor, (int) (Color.alpha(sColor) * 0.325f))); - spoilerEffect.setBounds(0, 0, getWidth(), getHeight()); - spoilerEffect.draw(canvas); + private float totalDy = 0; + private boolean scrollUp; + private final int scrollValue = AndroidUtilities.dp(100); - canvas.restore(); - invalidate(); - } + @Override + public void onScrollStateChanged(RecyclerView recyclerView, int newState) { + if (newState == RecyclerView.SCROLL_STATE_IDLE) { + if (pollHintCell != null) { + pollHintView.showForMessageCell(pollHintCell, -1, pollHintX, pollHintY, true); + pollHintCell = null; } - }; - pinnedMessageImageView[a].setBlurAllowed(true); - pinnedMessageImageView[a].setRoundRadius(AndroidUtilities.dp(2)); - pinnedMessageView.addView(pinnedMessageImageView[a], LayoutHelper.createFrame(32, 32, Gravity.TOP | Gravity.LEFT, 17, 8, 0, 0)); - if (a == 1) { - pinnedNameTextView[a].setVisibility(View.INVISIBLE); - pinnedMessageButton[a].setVisibility(View.INVISIBLE); - pinnedMessageTextView[a].setVisibility(View.INVISIBLE); - pinnedMessageImageView[a].setVisibility(View.INVISIBLE); - } - } - - pinnedListButton = new ImageView(context); - pinnedListButton.setImageResource(R.drawable.msg_pinnedlist); - pinnedListButton.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_topPanelClose), PorterDuff.Mode.SRC_IN)); - pinnedListButton.setScaleType(ImageView.ScaleType.CENTER); - pinnedListButton.setContentDescription(LocaleController.getString("AccPinnedMessagesList", R.string.AccPinnedMessagesList)); - pinnedListButton.setVisibility(View.INVISIBLE); - pinnedListButton.setAlpha(0.0f); - pinnedListButton.setScaleX(0.4f); - pinnedListButton.setScaleY(0.4f); - if (Build.VERSION.SDK_INT >= 21) { - pinnedListButton.setBackgroundDrawable(Theme.createSelectorDrawable(getThemedColor(Theme.key_listSelector))); - } - pinnedMessageView.addView(pinnedListButton, LayoutHelper.createFrame(36, 48, Gravity.RIGHT | Gravity.TOP, 0, 0, 7, 0)); - pinnedListButton.setOnClickListener(v -> openPinnedMessagesList(false)); - pinnedListButton.setOnLongClickListener(v -> { - if (getParentActivity() == null) { - return false; - } - boolean allowPin; - if (currentChat != null) { - allowPin = ChatObject.canPinMessages(currentChat); - } else if (currentEncryptedChat == null) { - if (userInfo != null) { - allowPin = userInfo.can_pin_message; - } else { - allowPin = false; + scrollingFloatingDate = false; + scrollingChatListView = false; + checkTextureViewPosition = false; + hideFloatingDateView(true); + if (SharedConfig.getDevicePerformanceClass() == SharedConfig.PERFORMANCE_CLASS_LOW) { + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.startAllHeavyOperations, 512); } + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.startSpoilers); + chatListView.setOverScrollMode(RecyclerView.OVER_SCROLL_ALWAYS); + textSelectionHelper.stopScrolling(); + updateVisibleRows(); + scrollByTouch = false; } else { - allowPin = false; - } - BottomBuilder builder = new BottomBuilder(getParentActivity()); - if (allowPin) { - builder.addItem(LocaleController.getString("UnpinMessagesAll", R.string.UnpinMessagesAll), R.drawable.msg_unpin, true, c -> { - getMessagesController().unpinAllMessages(currentChat, currentUser); - return Unit.INSTANCE; - }); - } - builder.addItem(LocaleController.getString("DismissForYourself", R.string.DismissForYourself), R.drawable.msg_cancel, c -> { - SharedPreferences preferences = MessagesController.getNotificationsSettings(currentAccount); - if (chatInfo != null) { - preferences.edit().putInt("pin_" + dialog_id, chatInfo.pinned_msg_id).apply(); - } else if (userInfo != null) { - preferences.edit().putInt("pin_" + dialog_id, userInfo.pinned_msg_id).apply(); + if (newState == RecyclerView.SCROLL_STATE_SETTLING) { + wasManualScroll = true; + scrollingChatListView = true; + } else if (newState == RecyclerView.SCROLL_STATE_DRAGGING) { + if (NekoConfig.hideKeyboardOnChatScroll.Bool()) { + AndroidUtilities.hideKeyboard(getParentActivity().getCurrentFocus()); + } + pollHintCell = null; + wasManualScroll = true; + scrollingFloatingDate = true; + checkTextureViewPosition = true; + scrollingChatListView = true; } - updatePinnedMessageView(true); - return Unit.INSTANCE; - }); - builder.addCancelItem(); - builder.show(); - return true; - }); - - closePinned = new ImageView(context); - closePinned.setImageResource(R.drawable.miniplayer_close); - closePinned.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_topPanelClose), PorterDuff.Mode.SRC_IN)); - closePinned.setScaleType(ImageView.ScaleType.CENTER); - closePinned.setContentDescription(LocaleController.getString("Close", R.string.Close)); - - pinnedProgress = new RadialProgressView(context, themeDelegate); - pinnedProgress.setVisibility(View.GONE); - pinnedProgress.setSize(AndroidUtilities.dp(16)); - pinnedProgress.setStrokeWidth(2f); - pinnedProgress.setProgressColor(getThemedColor(Theme.key_chat_topPanelLine)); - pinnedMessageView.addView(pinnedProgress, LayoutHelper.createFrame(36, 48, Gravity.RIGHT | Gravity.TOP, 0, 0, 2, 0)); + if (SharedConfig.getDevicePerformanceClass() == SharedConfig.PERFORMANCE_CLASS_LOW) { + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.stopAllHeavyOperations, 512); + } + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.stopSpoilers); - if (threadMessageId != 0) { - closePinned.setVisibility(View.GONE); - } - if (Build.VERSION.SDK_INT >= 21) { - closePinned.setBackgroundDrawable(Theme.createSelectorDrawable(getThemedColor(Theme.key_listSelector), 1, AndroidUtilities.dp(14))); - } - pinnedMessageView.addView(closePinned, LayoutHelper.createFrame(36, 48, Gravity.RIGHT | Gravity.TOP, 0, 0, 2, 0)); - closePinned.setOnClickListener(v -> { - if (getParentActivity() == null) { - return; - } - boolean allowPin; - if (currentChat != null) { - allowPin = ChatObject.canPinMessages(currentChat); - } else if (currentEncryptedChat == null) { - if (userInfo != null) { - allowPin = userInfo.can_pin_message; - } else { - allowPin = false; + if (selectionReactionsOverlay != null && selectionReactionsOverlay.isVisible()) { + selectionReactionsOverlay.setHiddenByScroll(true); } - } else { - allowPin = false; } - BottomBuilder builder = new BottomBuilder(getParentActivity()); - if (allowPin) { - builder.addItem(LocaleController.getString("UnpinMessageX", R.string.UnpinMessageX), R.drawable.msg_unpin, true, c -> { - MessageObject messageObject = pinnedMessageObjects.get(currentPinnedMessageId); - if (messageObject == null) { - messageObject = messagesDict[0].get(currentPinnedMessageId); + } + + @Override + public void onScrolled(RecyclerView recyclerView, int dx, int dy) { + chatListView.invalidate(); + scrollUp = dy < 0; + int firstVisibleItem = chatLayoutManager.findFirstVisibleItemPosition(); + if (dy != 0 && (scrollByTouch && recyclerView.getScrollState() == RecyclerView.SCROLL_STATE_SETTLING) || recyclerView.getScrollState() == RecyclerView.SCROLL_STATE_DRAGGING) { + if (forceNextPinnedMessageId != 0) { + if ((!scrollUp || forceScrollToFirst)) { + forceNextPinnedMessageId = 0; + } else if (!chatListView.isFastScrollAnimationRunning() && firstVisibleItem != RecyclerView.NO_POSITION) { + int lastVisibleItem = chatLayoutManager.findLastVisibleItemPosition(); + MessageObject messageObject = null; + boolean foundForceNextPinnedView = false; + for (int i = lastVisibleItem; i >= firstVisibleItem; i--) { + View view = chatLayoutManager.findViewByPosition(i); + if (view instanceof ChatMessageCell) { + messageObject = ((ChatMessageCell) view).getMessageObject(); + } else if (view instanceof ChatActionCell) { + messageObject = ((ChatActionCell) view).getMessageObject(); + } + if (messageObject != null) { + if (forceNextPinnedMessageId == messageObject.getId()) { + foundForceNextPinnedView = true; + break; + } + } + } + if (!foundForceNextPinnedView && messageObject != null && messageObject.getId() < forceNextPinnedMessageId) { + forceNextPinnedMessageId = 0; + } } - unpinMessage(messageObject); - return Unit.INSTANCE; - }); + } } - builder.addItem(LocaleController.getString("DismissForYourself", R.string.DismissForYourself), R.drawable.msg_cancel, c -> { - SharedPreferences preferences = MessagesController.getNotificationsSettings(currentAccount); - if (chatInfo != null) { - preferences.edit().putInt("pin_" + dialog_id, chatInfo.pinned_msg_id).apply(); - } else if (userInfo != null) { - preferences.edit().putInt("pin_" + dialog_id, userInfo.pinned_msg_id).apply(); + if (recyclerView.getScrollState() == RecyclerView.SCROLL_STATE_DRAGGING) { + forceScrollToFirst = false; + if (!wasManualScroll && dy != 0) { + wasManualScroll = true; } - updatePinnedMessageView(true); - return Unit.INSTANCE; - }); - builder.addCancelItem(); - builder.show(); - }); - } - - topChatPanelView = new BlurredFrameLayout(context, contentView) { - - private boolean ignoreLayout; - - @Override - protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { - int width = MeasureSpec.getSize(widthMeasureSpec); - if (addToContactsButton != null && addToContactsButton.getVisibility() == VISIBLE && reportSpamButton != null && reportSpamButton.getVisibility() == VISIBLE) { - width = (width - AndroidUtilities.dp(31)) / 2; } - ignoreLayout = true; - if (reportSpamButton != null && reportSpamButton.getVisibility() == VISIBLE) { - FrameLayout.LayoutParams layoutParams = (FrameLayout.LayoutParams) reportSpamButton.getLayoutParams(); - layoutParams.width = width; - if (addToContactsButton != null && addToContactsButton.getVisibility() == VISIBLE) { - reportSpamButton.setPadding(AndroidUtilities.dp(4), 0, AndroidUtilities.dp(4), 0); - layoutParams.leftMargin = width; - layoutParams.width -= AndroidUtilities.dp(15); - } else { - reportSpamButton.setPadding(AndroidUtilities.dp(48), 0, AndroidUtilities.dp(48), 0); - layoutParams.leftMargin = 0; + if (dy != 0) { + hideHints(true); + } + if (dy != 0 && scrollingFloatingDate && !currentFloatingTopIsNotMessage) { + if (highlightMessageId != Integer.MAX_VALUE) { + removeSelectedMessageHighlight(); + updateVisibleRows(); } + showFloatingDateView(true); } - if (addToContactsButton != null && addToContactsButton.getVisibility() == VISIBLE) { - FrameLayout.LayoutParams layoutParams = (FrameLayout.LayoutParams) addToContactsButton.getLayoutParams(); - layoutParams.width = width; - if (reportSpamButton != null && reportSpamButton.getVisibility() == VISIBLE) { - addToContactsButton.setPadding(AndroidUtilities.dp(11), 0, AndroidUtilities.dp(4), 0); + checkScrollForLoad(true); + if (firstVisibleItem != RecyclerView.NO_POSITION) { + int totalItemCount = chatAdapter.getItemCount(); + if (firstVisibleItem == 0 && forwardEndReached[0]) { + if (dy >= 0) { + canShowPagedownButton = false; + updatePagedownButtonVisibility(true); + } } else { - addToContactsButton.setPadding(AndroidUtilities.dp(48), 0, AndroidUtilities.dp(48), 0); - layoutParams.leftMargin = 0; + if (dy > 0) { + if (pagedownButton.getTag() == null) { + totalDy += dy; + if (totalDy > scrollValue) { + totalDy = 0; + canShowPagedownButton = true; + updatePagedownButtonVisibility(true); + pagedownButtonShowedByScroll = true; + } + } + } else { + if (pagedownButtonShowedByScroll && pagedownButton.getTag() != null) { + totalDy += dy; + if (totalDy < -scrollValue) { + canShowPagedownButton = false; + updatePagedownButtonVisibility(true); + totalDy = 0; + } + } + } } } - ignoreLayout = false; - super.onMeasure(widthMeasureSpec, heightMeasureSpec); + invalidateMessagesVisiblePart(); + textSelectionHelper.onParentScrolled(); + emojiAnimationsOverlay.onScrolled(dy); + ReactionsEffectOverlay.onScrolled(dy); + + checkTranslation(false); } + }); + contentView.addView(chatListView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); - @Override - public void requestLayout() { - if (ignoreLayout) { - return; - } - super.requestLayout(); - } - }; - topChatPanelView.backgroundColor = getThemedColor(Theme.key_chat_topPanelBackground); - topChatPanelView.backgroundPaddingBottom = AndroidUtilities.dp(2); - topChatPanelView.setTag(1); - topChatPanelViewOffset = -AndroidUtilities.dp(50); - invalidateChatListViewTopPadding(); - topChatPanelView.setClickable(true); - topChatPanelView.setVisibility(View.GONE); - topChatPanelView.setBackgroundResource(R.drawable.blockpanel); - topChatPanelView.getBackground().setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_topPanelBackground), PorterDuff.Mode.SRC_IN)); - contentView.addView(topChatPanelView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 50, Gravity.TOP | Gravity.LEFT)); + selectionReactionsOverlay = new ChatSelectionReactionMenuOverlay(this, context); + contentView.addView(selectionReactionsOverlay, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); - reportSpamButton = new TextView(context); - reportSpamButton.setTextColor(getThemedColor(Theme.key_chat_reportSpam)); - if (Build.VERSION.SDK_INT >= 21) { - reportSpamButton.setBackground(Theme.createSelectorDrawable(getThemedColor(Theme.key_chat_reportSpam) & 0x19ffffff, 3)); - } - reportSpamButton.setTag(Theme.key_chat_reportSpam); - reportSpamButton.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); - reportSpamButton.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); - reportSpamButton.setSingleLine(true); - reportSpamButton.setMaxLines(1); - reportSpamButton.setGravity(Gravity.CENTER); - topChatPanelView.addView(reportSpamButton, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 48, Gravity.LEFT | Gravity.TOP, 0, 0, 0, 1)); - reportSpamButton.setOnClickListener(v2 -> AlertsCreator.showBlockReportSpamAlert(ChatActivity.this, dialog_id, currentUser, currentChat, currentEncryptedChat, reportSpamButton.getTag(R.id.object_tag) != null, chatInfo, param -> { - if (param == 0) { - updateTopPanel(true); - } else { - finishFragment(); - } - }, themeDelegate)); + animatingImageView = new ClippingImageView(context); + animatingImageView.setVisibility(View.GONE); + contentView.addView(animatingImageView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); - emojiStatusSpamHint = new LinkSpanDrawable.LinksTextView(context, themeDelegate) { - Layout lastLayout; - AnimatedEmojiSpan.EmojiGroupedSpans stack; - PorterDuffColorFilter colorFilter; - int lastColor; + progressView = new FrameLayout(context); + progressView.setVisibility(View.INVISIBLE); + contentView.addView(progressView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.TOP | Gravity.LEFT)); + + progressView2 = new View(context); + progressView2.setBackground(Theme.createServiceDrawable(AndroidUtilities.dp(18), progressView2, contentView, getThemedPaint(Theme.key_paint_chatActionBackground))); + progressView.addView(progressView2, LayoutHelper.createFrame(36, 36, Gravity.CENTER)); + + progressBar = new RadialProgressView(context, themeDelegate); + progressBar.setSize(AndroidUtilities.dp(28)); + progressBar.setProgressColor(getThemedColor(Theme.key_chat_serviceText)); + progressView.addView(progressBar, LayoutHelper.createFrame(32, 32, Gravity.CENTER)); + + floatingDateView = new ChatActionCell(context, false, themeDelegate) { @Override - protected void onDetachedFromWindow() { - super.onDetachedFromWindow(); - AnimatedEmojiSpan.release(this, stack); - lastLayout = null; + public void setTranslationY(float translationY) { + if (getTranslationY() != translationY) { + invalidate(); + } + super.setTranslationY(translationY); } @Override - protected void dispatchDraw(Canvas canvas) { - super.dispatchDraw(canvas); - if (lastLayout != getLayout()) { - stack = AnimatedEmojiSpan.update(AnimatedEmojiDrawable.CACHE_TYPE_EMOJI_STATUS, this, stack, lastLayout = getLayout()); - } - int color = getThemedColor(Theme.key_windowBackgroundWhiteBlueIcon); - if (lastColor != color || colorFilter == null) { - colorFilter = new PorterDuffColorFilter(color, PorterDuff.Mode.MULTIPLY); - lastColor = color; + public boolean onInterceptTouchEvent(MotionEvent ev) { + if (getAlpha() == 0 || actionBar.isActionModeShowed() || reportType >= 0) { + return false; } - AnimatedEmojiSpan.drawAnimatedEmojis(canvas, getLayout(), stack, 0, null, 0, 0, 0, 1f, colorFilter); + return super.onInterceptTouchEvent(ev); } - }; - emojiStatusSpamHint.setTextColor(getThemedColor(Theme.key_chat_topPanelMessage)); - emojiStatusSpamHint.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 13.3f); - emojiStatusSpamHint.setGravity(Gravity.CENTER); - emojiStatusSpamHint.setVisibility(View.GONE); - emojiStatusSpamHint.setLinkTextColor(getThemedColor(Theme.key_windowBackgroundWhiteLinkText)); - topChatPanelView.addView(emojiStatusSpamHint, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.TOP, 25, 56, 25, 1)); - topViewSeparator1 = new View(context); - topViewSeparator1.setVisibility(View.GONE); - topViewSeparator1.setBackgroundColor(getThemedColor(Theme.key_divider)); - topViewSeparator1.setAlpha(.5f); - topChatPanelView.addView(topViewSeparator1, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 1f / AndroidUtilities.density, Gravity.LEFT | Gravity.BOTTOM, 0, 0, 0, 2)); - topViewSeparator2 = new View(context); - topViewSeparator2.setVisibility(View.GONE); - topViewSeparator2.setBackgroundColor(getThemedColor(Theme.key_divider)); - topChatPanelView.addView(topViewSeparator2, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 1f / AndroidUtilities.density, Gravity.LEFT | Gravity.TOP, 10, 50, 10, 1)); + @Override + public boolean onTouchEvent(MotionEvent event) { + if (getAlpha() == 0 || actionBar.isActionModeShowed() || reportType >= 0) { + return false; + } + return super.onTouchEvent(event); + } - addToContactsButton = new TextView(context); - addToContactsButton.setTextColor(getThemedColor(Theme.key_chat_addContact)); - addToContactsButton.setVisibility(View.GONE); - addToContactsButton.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); - addToContactsButton.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); - addToContactsButton.setSingleLine(true); - addToContactsButton.setMaxLines(1); - addToContactsButton.setPadding(AndroidUtilities.dp(4), 0, AndroidUtilities.dp(4), 0); - addToContactsButton.setGravity(Gravity.CENTER); - if (Build.VERSION.SDK_INT >= 21) { - addToContactsButton.setBackground(Theme.createSelectorDrawable(getThemedColor(Theme.key_chat_addContact) & 0x19ffffff, 3)); - } - topChatPanelView.addView(addToContactsButton, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 48, Gravity.LEFT | Gravity.TOP, 0, 0, 0, 1)); - addToContactsButton.setOnClickListener(v -> { - if (addToContactsButtonArchive) { - getMessagesController().addDialogToFolder(dialog_id, 0, 0, 0); - undoView.showWithAction(dialog_id, UndoView.ACTION_CHAT_UNARCHIVED, null); - SharedPreferences preferences = MessagesController.getNotificationsSettings(currentAccount); - SharedPreferences.Editor editor = preferences.edit(); - editor.putBoolean("dialog_bar_archived" + dialog_id, false); - editor.putBoolean("dialog_bar_block" + dialog_id, false); - editor.putBoolean("dialog_bar_report" + dialog_id, false); - editor.commit(); - updateTopPanel(false); - getNotificationsController().clearDialogNotificationsSettings(dialog_id, getTopicId()); - } else if (addToContactsButton.getTag() != null && (Integer) addToContactsButton.getTag() == 4) { - if (chatInfo != null && chatInfo.participants != null) { - LongSparseArray users = new LongSparseArray<>(); - for (int a = 0; a < chatInfo.participants.participants.size(); a++) { - users.put(chatInfo.participants.participants.get(a).user_id, null); + @Override + protected void onDraw(Canvas canvas) { + float clipTop = chatListView.getY() + chatListViewPaddingTop - getY(); + clipTop -= AndroidUtilities.dp(4); + if (clipTop > 0) { + if (clipTop < getMeasuredHeight()) { + canvas.save(); + canvas.clipRect(0, clipTop, getMeasuredWidth(), getMeasuredHeight()); + super.onDraw(canvas); + canvas.restore(); } - long chatId = chatInfo.id; - InviteMembersBottomSheet bottomSheet = new InviteMembersBottomSheet(context, currentAccount, users, chatInfo.id, ChatActivity.this, themeDelegate); - bottomSheet.setDelegate((users1, fwdCount) -> { - int N = users1.size(); - int[] finished = new int[1]; - for (int a = 0; a < N; a++) { - TLRPC.User user = users1.get(a); - getMessagesController().addUserToChat(chatId, user, fwdCount, null, ChatActivity.this, () -> { - if (++finished[0] == N) { - BulletinFactory.of(ChatActivity.this).createUsersAddedBulletin(users1, currentChat).show(); - } - }); - } - getMessagesController().hidePeerSettingsBar(dialog_id, currentUser, currentChat); - updateTopPanel(true); - updateInfoTopView(true); - }); - bottomSheet.show(); + } else { + super.onDraw(canvas); } - } else if (addToContactsButton.getTag() != null) { - shareMyContact(1, null); - } else { - Bundle args = new Bundle(); - args.putLong("user_id", currentUser.id); - args.putBoolean("addContact", true); - ContactAddActivity activity = new ContactAddActivity(args); - activity.setDelegate(() -> undoView.showWithAction(dialog_id, UndoView.ACTION_CONTACT_ADDED, currentUser)); - presentFragment(activity); } - }); + }; + floatingDateView.setCustomDate((int) (System.currentTimeMillis() / 1000), false, false); + floatingDateView.setAlpha(0.0f); + floatingDateView.setImportantForAccessibility(View.IMPORTANT_FOR_ACCESSIBILITY_NO); + floatingDateView.setInvalidateColors(true); + contentView.addView(floatingDateView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.TOP | Gravity.CENTER_HORIZONTAL, 0, 4, 0, 0)); + floatingDateView.setOnClickListener(view -> { + if (floatingDateView.getAlpha() == 0 || actionBar.isActionModeShowed() || reportType >= 0) { + return; + } + Calendar calendar = Calendar.getInstance(); + calendar.setTimeInMillis((long) floatingDateView.getCustomDate() * 1000); + int year = calendar.get(Calendar.YEAR); + int monthOfYear = calendar.get(Calendar.MONTH); + int dayOfMonth = calendar.get(Calendar.DAY_OF_MONTH); - restartTopicButton = new TextView(context); - restartTopicButton.setTextColor(getThemedColor(Theme.key_chat_addContact)); - restartTopicButton.setVisibility(View.GONE); - restartTopicButton.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); - restartTopicButton.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); - restartTopicButton.setSingleLine(true); - restartTopicButton.setMaxLines(1); - restartTopicButton.setPadding(AndroidUtilities.dp(4), 0, AndroidUtilities.dp(4), 0); - restartTopicButton.setGravity(Gravity.CENTER); - restartTopicButton.setText(LocaleController.getString("RestartTopic", R.string.RestartTopic).toUpperCase()); - if (Build.VERSION.SDK_INT >= 21) { - restartTopicButton.setBackground(Theme.createSelectorDrawable(getThemedColor(Theme.key_chat_addContact) & 0x19ffffff, 3)); - } - topChatPanelView.addView(restartTopicButton, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 48, Gravity.LEFT | Gravity.TOP, 0, 0, 0, 1)); - restartTopicButton.setOnClickListener(v -> { - getMessagesController().getTopicsController().toggleCloseTopic(currentChat.id, forumTopic.id, forumTopic.closed = false); - updateTopicButtons(); - updateBottomOverlay(); - updateTopPanel(true); + calendar.clear(); + calendar.set(year, monthOfYear, dayOfMonth); + jumpToDate((int) (calendar.getTime().getTime() / 1000)); }); - closeReportSpam = new ImageView(context); - closeReportSpam.setImageResource(R.drawable.miniplayer_close); - closeReportSpam.setContentDescription(LocaleController.getString("Close", R.string.Close)); - if (Build.VERSION.SDK_INT >= 21) { - closeReportSpam.setBackground(Theme.AdaptiveRipple.circle(getThemedColor(Theme.key_chat_topPanelClose))); - } - closeReportSpam.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_topPanelClose), PorterDuff.Mode.SRC_IN)); - closeReportSpam.setScaleType(ImageView.ScaleType.CENTER); - topChatPanelView.addView(closeReportSpam, LayoutHelper.createFrame(36, 36, Gravity.RIGHT | Gravity.TOP, 0, 6, 2, 0)); - closeReportSpam.setOnClickListener(v -> { - long did = dialog_id; - if (currentEncryptedChat != null) { - did = currentUser.id; + floatingDateView.setOnLongClickListener(view -> { + if (getParentActivity() == null) { + return false; } - getMessagesController().hidePeerSettingsBar(did, currentUser, currentChat); - updateTopPanel(true); - updateInfoTopView(true); - }); - - alertView = new FrameLayout(context); - alertView.setTag(1); - alertView.setVisibility(View.GONE); - alertView.setBackgroundResource(R.drawable.blockpanel); - alertView.getBackground().setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_topPanelBackground), PorterDuff.Mode.SRC_IN)); - contentView.addView(alertView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 50, Gravity.TOP | Gravity.LEFT)); - - alertNameTextView = new TextView(context); - alertNameTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); - alertNameTextView.setTextColor(getThemedColor(Theme.key_chat_topPanelTitle)); - alertNameTextView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); - alertNameTextView.setSingleLine(true); - alertNameTextView.setEllipsize(TextUtils.TruncateAt.END); - alertNameTextView.setMaxLines(1); - alertView.addView(alertNameTextView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.TOP | Gravity.LEFT, 8, 5, 8, 0)); + AndroidUtilities.hideKeyboard(searchItem.getSearchField()); + Calendar calendar = Calendar.getInstance(); + calendar.setTimeInMillis((long) floatingDateView.getCustomDate() * 1000); + int year = calendar.get(Calendar.YEAR); + int monthOfYear = calendar.get(Calendar.MONTH); + int dayOfMonth = calendar.get(Calendar.DAY_OF_MONTH); - alertTextView = new EmojiTextView(context); - alertTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); - alertTextView.setTextColor(getThemedColor(Theme.key_chat_topPanelMessage)); + calendar.clear(); + calendar.set(year, monthOfYear, dayOfMonth); + Bundle bundle = new Bundle(); + bundle.putLong("dialog_id", dialog_id); + bundle.putInt("topic_id", getTopicId()); + bundle.putInt("type", CalendarActivity.TYPE_CHAT_ACTIVITY); + CalendarActivity calendarActivity = new CalendarActivity(bundle, SharedMediaLayout.FILTER_PHOTOS_AND_VIDEOS, (int) (calendar.getTime().getTime() / 1000)); + presentFragment(calendarActivity); + return true; + }); + if (currentChat != null) { + pendingRequestsDelegate = new ChatActivityMemberRequestsDelegate(this, currentChat, this::invalidateChatListViewTopPadding); + pendingRequestsDelegate.setChatInfo(chatInfo, false); + contentView.addView(pendingRequestsDelegate.getView(), ViewGroup.LayoutParams.MATCH_PARENT, pendingRequestsDelegate.getViewHeight()); + } - alertTextView.setSingleLine(true); - alertTextView.setEllipsize(TextUtils.TruncateAt.END); - alertTextView.setMaxLines(1); - alertView.addView(alertTextView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.TOP | Gravity.LEFT, 8, 23, 8, 0)); + pinnedMessageView = null; + undoView = null; + topUndoView = null; + topChatPanelView = null; + reportSpamButton = null; + emojiStatusSpamHint = null; + topViewSeparator1 = null; + topViewSeparator2 = null; + topViewSeparator3 = null; + addToContactsButton = null; + restartTopicButton = null; + closeReportSpam = null; + translateButton = null; pagedownButton = new FrameLayout(context); pagedownButton.setVisibility(View.INVISIBLE); @@ -7445,7 +5984,7 @@ protected void dispatchDraw(Canvas canvas) { pagedownButton.setOnLongClickListener(view -> { returnToMessageId = 0; returnToMessageIdsStack.clear(); - scrollToLastMessage(true, true); + onPageDownClicked(); return true; }); } @@ -7511,7 +6050,6 @@ public void onClick(View view) { loadLastUnreadMention(); } }); - mentiondownButton.setOnLongClickListener(view -> { scrimPopupWindow = ReadAllMentionsMenu.show(ReadAllMentionsMenu.TYPE_MENTIONS, getParentActivity(), getParentLayout(), contentView, view, getResourceProvider(), () -> { for (int a = 0; a < messages.size(); a++) { @@ -7535,13 +6073,12 @@ public void onClick(View view) { scrimPopupWindowItems = null; chatLayoutManager.setCanScrollVertically(true); dimBehindView(false); - if (chatActivityEnterView != null) { + if (chatActivityEnterView != null && chatActivityEnterView.getEditField() != null) { chatActivityEnterView.getEditField().setAllowDrawCursor(true); } }); - if (!NekoConfig.disableVibration.Bool()) { + if (!NekoConfig.disableVibration.Bool()) view.performHapticFeedback(HapticFeedbackConstants.LONG_PRESS, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); - } return true; }); @@ -7619,7 +6156,7 @@ public void sendSticker(TLRPC.Document sticker, String query, Object parent, boo } @Override - public boolean needSend() { + public boolean needSend(int contentType) { return true; } @@ -7652,9 +6189,7 @@ public long getDialogId() { return dialog_id; } }; - mentionContainer.getListView().setOnTouchListener((v, event) -> { - return ContentPreviewViewer.getInstance().onTouch(event, mentionContainer.getListView(), 0, mentionsOnItemClickListener, mentionContainer.getAdapter().isStickers() ? contentPreviewViewerDelegate : null, themeDelegate); - }); + mentionContainer.getListView().setOnTouchListener((v, event) -> ContentPreviewViewer.getInstance().onTouch(event, mentionContainer.getListView(), 0, mentionsOnItemClickListener, mentionContainer.getAdapter().isStickers() ? contentPreviewViewerDelegate : null, themeDelegate)); if (!ChatObject.isChannel(currentChat) || currentChat.megagroup) { mentionContainer.getAdapter().setBotInfo(botInfo); } @@ -7692,7 +6227,7 @@ public long getDialogId() { chatActivityEnterView.setFieldText(""); } else if (object instanceof TLRPC.Chat) { TLRPC.Chat chat = (TLRPC.Chat) object; - if (searchingForUser && searchContainer.getVisibility() == View.VISIBLE) { + if (searchingForUser && searchContainer != null && searchContainer.getVisibility() == View.VISIBLE) { searchUserMessages(null, chat); } else { String username = ChatObject.getPublicUsername(chat); @@ -7702,7 +6237,7 @@ public long getDialogId() { } } else if (object instanceof TLRPC.User) { TLRPC.User user = (TLRPC.User) object; - if (searchingForUser && searchContainer.getVisibility() == View.VISIBLE) { + if (searchingForUser && searchContainer != null && searchContainer.getVisibility() == View.VISIBLE) { searchUserMessages(user, null); } else { if (UserObject.getPublicUsername(user) != null) { @@ -7771,6 +6306,8 @@ public long getDialogId() { sendBotInlineResult(result, true, 0); } } + } else if (object instanceof TLRPC.TL_inlineBotWebView) { + processInlineBotWebView((TLRPC.TL_inlineBotWebView) object); } else if (object instanceof TLRPC.TL_inlineBotSwitchPM) { processInlineBotContextPM((TLRPC.TL_inlineBotSwitchPM) object); } else if (object instanceof MediaDataController.KeywordResult) { @@ -7778,14 +6315,20 @@ public long getDialogId() { chatActivityEnterView.addEmojiToRecent(code); if (code != null && code.startsWith("animated_")) { try { + Paint.FontMetricsInt fontMetrics = null; + try { + fontMetrics = chatActivityEnterView.getEditField().getPaint().getFontMetricsInt(); + } catch (Exception e) { + FileLog.e(e, false); + } long documentId = Long.parseLong(code.substring(9)); TLRPC.Document document = AnimatedEmojiDrawable.findDocument(currentAccount, documentId); SpannableString emoji = new SpannableString(MessageObject.findAnimatedEmojiEmoticon(document)); AnimatedEmojiSpan span; if (document != null) { - span = new AnimatedEmojiSpan(document, chatActivityEnterView.getEditField().getPaint().getFontMetricsInt()); + span = new AnimatedEmojiSpan(document, fontMetrics); } else { - span = new AnimatedEmojiSpan(documentId, chatActivityEnterView.getEditField().getPaint().getFontMetricsInt()); + span = new AnimatedEmojiSpan(documentId, fontMetrics); } emoji.setSpan(span, 0, emoji.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); chatActivityEnterView.replaceWithText(start, len, emoji, false); @@ -7802,6 +6345,9 @@ public long getDialogId() { if (getParentActivity() == null || !mentionContainer.getAdapter().isLongClickEnabled()) { return false; } + if (position == 0 || mentionContainer.getAdapter().isBannedInline()) { + return false; + } position--; Object object = mentionContainer.getAdapter().getItem(position); int start = mentionContainer.getAdapter().getResultStartPosition(); @@ -7851,7 +6397,7 @@ public void getOutline(View view, Outline outline) { drawable = Theme.createCircleDrawable(AndroidUtilities.dp(42), getThemedColor(Theme.key_chat_goDownButton)); } Drawable shadowDrawable = context.getResources().getDrawable(R.drawable.pagedown_shadow).mutate(); - shadowDrawable.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_goDownButtonShadow), PorterDuff.Mode.SRC_IN)); + shadowDrawable.setColorFilter(new PorterDuffColorFilter(0xff000000, PorterDuff.Mode.SRC_IN)); CombinedDrawable combinedDrawable = new CombinedDrawable(shadowDrawable, drawable, 0, 0); combinedDrawable.setIconSize(AndroidUtilities.dp(42), AndroidUtilities.dp(42)); drawable = combinedDrawable; @@ -7906,7 +6452,7 @@ public void getOutline(View view, Outline outline) { drawable = Theme.createCircleDrawable(AndroidUtilities.dp(42), getThemedColor(Theme.key_chat_goDownButton)); } shadowDrawable = context.getResources().getDrawable(R.drawable.pagedown_shadow).mutate(); - shadowDrawable.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_goDownButtonShadow), PorterDuff.Mode.SRC_IN)); + shadowDrawable.setColorFilter(new PorterDuffColorFilter(0xff000000, PorterDuff.Mode.SRC_IN)); combinedDrawable = new CombinedDrawable(shadowDrawable, drawable, 0, 0); combinedDrawable.setIconSize(AndroidUtilities.dp(42), AndroidUtilities.dp(42)); drawable = combinedDrawable; @@ -7958,13 +6504,12 @@ public void getOutline(View view, Outline outline) { scrimPopupWindowItems = null; chatLayoutManager.setCanScrollVertically(true); dimBehindView(false); - if (chatActivityEnterView != null) { + if (chatActivityEnterView != null && chatActivityEnterView.getEditField() != null) { chatActivityEnterView.getEditField().setAllowDrawCursor(true); } }); - if (!NekoConfig.disableVibration.Bool()) { + if (!NekoConfig.disableVibration.Bool()) view.performHapticFeedback(HapticFeedbackConstants.LONG_PRESS, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); - } return false; }); reactionsMentiondownButton.setVisibility(View.INVISIBLE); @@ -7984,7 +6529,7 @@ public void getOutline(View view, Outline outline) { drawable = Theme.createCircleDrawable(AndroidUtilities.dp(42), getThemedColor(Theme.key_chat_goDownButton)); } shadowDrawable = context.getResources().getDrawable(R.drawable.pagedown_shadow).mutate(); - shadowDrawable.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_goDownButtonShadow), PorterDuff.Mode.MULTIPLY)); + shadowDrawable.setColorFilter(new PorterDuffColorFilter(0xff000000, PorterDuff.Mode.MULTIPLY)); combinedDrawable = new CombinedDrawable(shadowDrawable, drawable, 0, 0); combinedDrawable.setIconSize(AndroidUtilities.dp(42), AndroidUtilities.dp(42)); drawable = combinedDrawable; @@ -7995,14 +6540,7 @@ public void getOutline(View view, Outline outline) { reactionsMentiondownButton.setContentDescription(LocaleController.getString("AccDescrReactionMentionDown", R.string.AccDescrReactionMentionDown)); fragmentLocationContextView = new FragmentContextView(context, this, true, themeDelegate); - fragmentContextView = new FragmentContextView(context, this, false, themeDelegate) { - @Override - protected void playbackSpeedChanged(float value) { - if (Math.abs(value - 1.0f) < 0.001f || Math.abs(value - 1.8f) < 0.001f) { - undoView.showWithAction(0, Math.abs(value - 1.0f) > 0.001f ? UndoView.ACTION_PLAYBACK_SPEED_ENABLED : UndoView.ACTION_PLAYBACK_SPEED_DISABLED, value, null, null); - } - } - }; + fragmentContextView = new FragmentContextView(context, this, false, themeDelegate); contentView.addView(fragmentLocationContextView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 38, Gravity.TOP | Gravity.LEFT, 0, -36, 0, 0)); contentView.addView(fragmentContextView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 38, Gravity.TOP | Gravity.LEFT, 0, -36, 0, 0)); @@ -8040,20 +6578,6 @@ public void onScrolled(RecyclerView recyclerView, int dx, int dy) { } }); - topUndoView = new UndoView(context, this, true, themeDelegate) { - @Override - public void didPressUrl(CharacterStyle span) { - didPressMessageUrl(span, false, null, null); - } - - @Override - public void showWithAction(long did, int action, Object infoObject, Object infoObject2, Runnable actionRunnable, Runnable cancelRunnable) { - setAdditionalTranslationY(fragmentContextView != null && fragmentContextView.isCallTypeVisible() ? AndroidUtilities.dp(fragmentContextView.getStyleHeight()) : 0); - super.showWithAction(did, action, infoObject, infoObject2, actionRunnable, cancelRunnable); - } - }; - contentView.addView(topUndoView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.TOP | Gravity.LEFT, 8, 8, 8, 0)); - contentView.addView(actionBar); overlayView = new View(context); @@ -8068,8 +6592,7 @@ public void showWithAction(long did, int action, Object infoObject, Object infoO overlayView.setVisibility(View.GONE); contentView.setClipChildren(false); - instantCameraView = new InstantCameraView(context, this, themeDelegate); - contentView.addView(instantCameraView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.LEFT | Gravity.TOP)); + instantCameraView = null; bottomMessagesActionContainer = new BlurredFrameLayout(context, contentView) { @Override @@ -8088,6 +6611,8 @@ public void onDraw(Canvas canvas) { bottomMessagesActionContainer.setPadding(0, AndroidUtilities.dp(2), 0, 0); contentView.addView(bottomMessagesActionContainer, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 51, Gravity.BOTTOM)); bottomMessagesActionContainer.setOnTouchListener((v, event) -> true); + replyButton = null; + forwardButton = null; chatActivityEnterView = new ChatActivityEnterView(getParentActivity(), contentView, this, true, themeDelegate) { @@ -8181,1429 +6706,1329 @@ public void checkAnimation() { }); changeBoundAnimator.addListener(new AnimatorListenerAdapter() { @Override - public void onAnimationEnd(Animator animation) { - animatedTop = 0; - if (topView != null && topView.getVisibility() == View.VISIBLE) { - topView.setTranslationY(animatedTop + (1f - topViewEnterProgress) * topView.getLayoutParams().height); - if (topLineView != null) { - topLineView.setTranslationY(animatedTop); - } - } else { - chatListView.setTranslationY(0); - if (mentionContainer != null) { - mentionContainer.setTranslationY(0); - } - } - changeBoundAnimator = null; - } - }); - changeBoundAnimator.setDuration(ChatListItemAnimator.DEFAULT_DURATION); - changeBoundAnimator.setInterpolator(ChatListItemAnimator.DEFAULT_INTERPOLATOR); - if (!waitingForSendingMessageLoad) { - changeBoundAnimator.start(); - } - invalidateChatListViewTopPadding(); - invalidateMessagesVisiblePart(); - chatActivityEnterViewAnimateFromTop = 0; - } else if (lastContentViewHeight != contentView.getMeasuredHeight()) { - chatActivityEnterViewAnimateFromTop = 0; - } - if (shouldAnimateEditTextWithBounds) { - float dy = (messageEditTextPredrawHeigth - messageEditText.getMeasuredHeight()) + (messageEditTextPredrawScrollY - messageEditText.getScrollY()); - messageEditText.setOffsetY(messageEditText.getOffsetY() - dy); - ValueAnimator a = ValueAnimator.ofFloat(messageEditText.getOffsetY(), 0); - a.addUpdateListener(animation -> messageEditText.setOffsetY((float) animation.getAnimatedValue())); - if (messageEditTextAnimator != null) { - messageEditTextAnimator.cancel(); - } - messageEditTextAnimator = a; - a.setDuration(ChatListItemAnimator.DEFAULT_DURATION); - // a.setStartDelay(chatActivityEnterViewAnimateBeforeSending ? 20 : 0); - a.setInterpolator(ChatListItemAnimator.DEFAULT_INTERPOLATOR); - a.start(); - shouldAnimateEditTextWithBounds = false; - } - lastContentViewHeight = contentView.getMeasuredHeight(); - - chatActivityEnterViewAnimateBeforeSending = false; - } - } - - @Override - protected void onLineCountChanged(int oldLineCount, int newLineCount) { - if (chatActivityEnterView != null) { - shouldAnimateEditTextWithBounds = true; - messageEditTextPredrawHeigth = messageEditText.getMeasuredHeight(); - messageEditTextPredrawScrollY = messageEditText.getScrollY(); - contentView.invalidate(); - chatActivityEnterViewAnimateFromTop = chatActivityEnterView.getBackgroundTop(); - } - } - }; - chatActivityEnterView.setDelegate(new ChatActivityEnterView.ChatActivityEnterViewDelegate() { - - int lastSize; - boolean isEditTextItemVisibilitySuppressed; - - @Override - public int getContentViewHeight() { - return contentView.getHeight(); - } - - @Override - public int measureKeyboardHeight() { - return contentView.measureKeyboardHeight(); - } - - @Override - public TLRPC.TL_channels_sendAsPeers getSendAsPeers() { - return sendAsPeersObj; - } - - @Override - public void onMessageSend(CharSequence message, boolean notify, int scheduleDate) { - if (chatListItemAnimator != null) { - chatActivityEnterViewAnimateFromTop = chatActivityEnterView.getBackgroundTop(); - if (chatActivityEnterViewAnimateFromTop != 0) { - chatActivityEnterViewAnimateBeforeSending = true; - } - } - if (mentionContainer != null && mentionContainer.getAdapter() != null) { - mentionContainer.getAdapter().addHashtagsFromMessage(message); - } - if (scheduleDate != 0) { - if (scheduledMessagesCount == -1) { - scheduledMessagesCount = 0; - } - if (message != null) { - scheduledMessagesCount++; - } - if (forwardingMessages != null && !forwardingMessages.messages.isEmpty()) { - scheduledMessagesCount += forwardingMessages.messages.size(); - } - updateScheduledInterface(false); - } - if (!TextUtils.isEmpty(message) && forwardingMessages != null && !forwardingMessages.messages.isEmpty()) { - ArrayList messagesToForward = new ArrayList<>(); - forwardingMessages.getSelectedMessages(messagesToForward); - boolean showReplyHint = messagesToForward.size() > 0; - TLRPC.Peer toPeer = getMessagesController().getPeer(dialog_id); - for (int i = 0; i < messagesToForward.size(); ++i) { - MessageObject msg = messagesToForward.get(i); - if (msg != null && msg.messageOwner != null && !MessageObject.peersEqual(msg.messageOwner.peer_id, toPeer)) { - showReplyHint = false; - break; - } - } - - if (showReplyHint && topUndoView != null) { - topUndoView.showWithAction(0, UndoView.ACTION_HINT_SWIPE_TO_REPLY, null, null); - } - } - if (ChatObject.isForum(currentChat) && !isTopic && replyingMessageObject != null) { - int topicId = replyingMessageObject.replyToForumTopic != null ? replyingMessageObject.replyToForumTopic.id : MessageObject.getTopicId(replyingMessageObject.messageOwner, true); - if (topicId != 0) { - getMediaDataController().cleanDraft(dialog_id, topicId, false); - } - } - - hideFieldPanel(notify, scheduleDate, true); - if (chatActivityEnterView != null && chatActivityEnterView.getEmojiView() != null) { - chatActivityEnterView.getEmojiView().onMessageSend(); - } - - if (!getMessagesController().premiumLocked && !getMessagesController().didPressTranscribeButtonEnough() && !getUserConfig().isPremium() && !TextUtils.isEmpty(message) && messages != null) { - for (int i = 1; i < Math.min(5, messages.size()); ++i) { - MessageObject msg = messages.get(i); - if (msg != null && !msg.isOutOwner() && (msg.isVoice() || msg.isRoundVideo()) && msg.isContentUnread()) { - TranscribeButton.showOffTranscribe(msg); - } - } - } - } - - @Override - public void - beforeMessageSend(CharSequence message, boolean notify, int scheduleDate) { - ChatActivity.this.beforeMessageSend(notify, scheduleDate, true); - } - - @Override - public void onEditTextScroll() { - if (suggestEmojiPanel != null) { - suggestEmojiPanel.forceClose(); - } - } - - @Override - public void onContextMenuOpen() { - if (suggestEmojiPanel != null) { - suggestEmojiPanel.forceClose(); - } - } - - @Override - public void onContextMenuClose() { - if (suggestEmojiPanel != null) { - suggestEmojiPanel.fireUpdate(); - } - } - - @Override - public void onSwitchRecordMode(boolean video) { - showVoiceHint(false, video); - } - - @Override - public void onPreAudioVideoRecord() { - showVoiceHint(true, false); - } - - @Override - public void onUpdateSlowModeButton(View button, boolean show, CharSequence time) { - showSlowModeHint(button, show, time); - if (headerItem != null && headerItem.getVisibility() != View.VISIBLE) { - headerItem.setVisibility(View.VISIBLE); - if (attachItem != null) { - attachItem.setVisibility(View.GONE); - } - } - } - - @Override - public void onTextSelectionChanged(int start, int end) { - if (editTextItem == null) { - return; - } - if (suggestEmojiPanel != null) { - suggestEmojiPanel.onTextSelectionChanged(start, end); - } - if (end - start > 0) { - if (editTextItem.getTag() == null) { - editTextItem.setTag(1); - - if (editTextItem.getVisibility() != View.VISIBLE) { - if (chatMode == 0 && (threadMessageId == 0 || isTopic) && !UserObject.isReplyUser(currentUser) && reportType < 0) { - editTextItem.setVisibility(View.VISIBLE); - headerItem.setVisibility(View.GONE); - attachItem.setVisibility(View.GONE); - } else { - ValueAnimator valueAnimator = ValueAnimator.ofFloat(AndroidUtilities.dp(48), 0); - valueAnimator.setDuration(220); - valueAnimator.setInterpolator(CubicBezierInterpolator.DEFAULT); - valueAnimator.addListener(new AnimatorListenerAdapter() { - @Override - public void onAnimationStart(Animator animation) { - actionBar.setMenuOffsetSuppressed(true); - editTextItem.setVisibility(View.VISIBLE); - menu.translateXItems(AndroidUtilities.dp(48)); + public void onAnimationEnd(Animator animation) { + animatedTop = 0; + if (topView != null && topView.getVisibility() == View.VISIBLE) { + topView.setTranslationY(animatedTop + (1f - topViewEnterProgress) * topView.getLayoutParams().height); + if (topLineView != null) { + topLineView.setTranslationY(animatedTop); } - - @Override - public void onAnimationEnd(Animator animation) { - actionBar.setMenuOffsetSuppressed(false); + } else { + chatListView.setTranslationY(0); + if (mentionContainer != null) { + mentionContainer.setTranslationY(0); } - }); - valueAnimator.addUpdateListener(animation -> menu.translateXItems((float) animation.getAnimatedValue())); - valueAnimator.start(); + } + changeBoundAnimator = null; } + }); + changeBoundAnimator.setDuration(ChatListItemAnimator.DEFAULT_DURATION); + changeBoundAnimator.setInterpolator(ChatListItemAnimator.DEFAULT_INTERPOLATOR); + if (!waitingForSendingMessageLoad) { + changeBoundAnimator.start(); } + invalidateChatListViewTopPadding(); + invalidateMessagesVisiblePart(); + chatActivityEnterViewAnimateFromTop = 0; + } else if (lastContentViewHeight != contentView.getMeasuredHeight()) { + chatActivityEnterViewAnimateFromTop = 0; } - editTextStart = start; - editTextEnd = end; - } else { - if (editTextItem.getTag() != null) { - editTextItem.setTag(null); - if (editTextItem.getVisibility() != View.GONE) { - if (chatMode == 0 && (threadMessageId == 0 || isTopic) && !UserObject.isReplyUser(currentUser) && reportType < 0) { - editTextItem.setVisibility(View.GONE); - - if (chatActivityEnterView.hasText() && TextUtils.isEmpty(chatActivityEnterView.getSlowModeTimer())) { - headerItem.setVisibility(View.GONE); - attachItem.setVisibility(View.VISIBLE); - } else { - headerItem.setVisibility(View.VISIBLE); - attachItem.setVisibility(View.GONE); - } - } else { - ValueAnimator valueAnimator = ValueAnimator.ofFloat(0, AndroidUtilities.dp(48)); - valueAnimator.setDuration(220); - valueAnimator.setInterpolator(CubicBezierInterpolator.DEFAULT); - valueAnimator.addListener(new AnimatorListenerAdapter() { - @Override - public void onAnimationStart(Animator animation) { - actionBar.setMenuOffsetSuppressed(true); - isEditTextItemVisibilitySuppressed = true; - } - - @Override - public void onAnimationEnd(Animator animation) { - editTextItem.setVisibility(View.GONE); - menu.translateXItems(0); - - actionBar.setMenuOffsetSuppressed(false); - isEditTextItemVisibilitySuppressed = false; - } - }); - valueAnimator.addUpdateListener(animation -> menu.translateXItems((float) animation.getAnimatedValue())); - valueAnimator.start(); - } + if (shouldAnimateEditTextWithBounds) { + float dy = (messageEditTextPredrawHeigth - messageEditText.getMeasuredHeight()) + (messageEditTextPredrawScrollY - messageEditText.getScrollY()); + messageEditText.setOffsetY(messageEditText.getOffsetY() - dy); + ValueAnimator a = ValueAnimator.ofFloat(messageEditText.getOffsetY(), 0); + a.addUpdateListener(animation -> messageEditText.setOffsetY((float) animation.getAnimatedValue())); + if (messageEditTextAnimator != null) { + messageEditTextAnimator.cancel(); } + messageEditTextAnimator = a; + a.setDuration(ChatListItemAnimator.DEFAULT_DURATION); + // a.setStartDelay(chatActivityEnterViewAnimateBeforeSending ? 20 : 0); + a.setInterpolator(ChatListItemAnimator.DEFAULT_INTERPOLATOR); + a.start(); + shouldAnimateEditTextWithBounds = false; } - } - } + lastContentViewHeight = contentView.getMeasuredHeight(); - @Override - public void onTextChanged(final CharSequence text, boolean bigChange) { - MediaController.getInstance().setInputFieldHasText(!TextUtils.isEmpty(text) || chatActivityEnterView.isEditingMessage()); - if (mentionContainer != null && mentionContainer.getAdapter() != null) { - mentionContainer.getAdapter().searchUsernameOrHashtag(text, chatActivityEnterView.getCursorPosition(), messages, false, false); - } - if (waitingForCharaterEnterRunnable != null) { - AndroidUtilities.cancelRunOnUIThread(waitingForCharaterEnterRunnable); - waitingForCharaterEnterRunnable = null; - } - if ((currentChat == null || ChatObject.canSendEmbed(currentChat)) && chatActivityEnterView.isMessageWebPageSearchEnabled() && (!chatActivityEnterView.isEditingMessage() || !chatActivityEnterView.isEditingCaption())) { - if (bigChange) { - searchLinks(text, true); - } else { - waitingForCharaterEnterRunnable = new Runnable() { - @Override - public void run() { - if (this == waitingForCharaterEnterRunnable) { - searchLinks(text, false); - waitingForCharaterEnterRunnable = null; - } - } - }; - AndroidUtilities.runOnUIThread(waitingForCharaterEnterRunnable, AndroidUtilities.WEB_URL == null ? 3000 : 1000); - } - } - if (emojiAnimationsOverlay != null) { - emojiAnimationsOverlay.cancelAllAnimations(); + chatActivityEnterViewAnimateBeforeSending = false; } - ReactionsEffectOverlay.dismissAll(); - } - - @Override - public void onTextSpansChanged(CharSequence text) { - searchLinks(text, true); } @Override - public void needSendTyping() { - getMessagesController().sendTyping(dialog_id, threadMessageId, 0, classGuid); + protected void onLineCountChanged(int oldLineCount, int newLineCount) { + if (chatActivityEnterView != null) { + shouldAnimateEditTextWithBounds = true; + messageEditTextPredrawHeigth = messageEditText.getMeasuredHeight(); + messageEditTextPredrawScrollY = messageEditText.getScrollY(); + contentView.invalidate(); + chatActivityEnterViewAnimateFromTop = chatActivityEnterView.getBackgroundTop(); + } } + }; + chatActivityEnterView.setDelegate(new ChatActivityEnterViewDelegate()); + chatActivityEnterView.setDialogId(dialog_id, currentAccount); + if (chatInfo != null) { + chatActivityEnterView.setChatInfo(chatInfo); + } + chatActivityEnterView.setId(id_chat_compose_panel); + chatActivityEnterView.setBotsCount(botsCount, hasBotsCommands, false); + chatActivityEnterView.updateBotWebView(false); + chatActivityEnterView.setMinimumHeight(AndroidUtilities.dp(51)); + chatActivityEnterView.setAllowStickersAndGifs(true, true, currentEncryptedChat == null || AndroidUtilities.getPeerLayerVersion(currentEncryptedChat.layer) >= 46); + if (inlineQueryForInput != null) { + chatActivityEnterView.setFieldText(inlineQueryForInput); + inlineQueryForInput = null; + } + if (inPreviewMode) { + chatActivityEnterView.setVisibility(View.INVISIBLE); + } + if (!ChatObject.isChannel(currentChat) || currentChat.megagroup) { + chatActivityEnterView.setBotInfo(botInfo, false); + } + contentView.addView(chatActivityEnterView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.BOTTOM)); + chatActivityEnterView.checkChannelRights(); + chatActivityEnterTopView = new ChatActivityEnterTopView(context) { @Override - public void onAttachButtonHidden() { - if (actionBar.isSearchFieldVisible()) { - return; - } - if (editTextItem != null && !isEditTextItemVisibilitySuppressed) { - editTextItem.setVisibility(View.GONE); + public void setTranslationY(float translationY) { + super.setTranslationY(translationY); + if (chatActivityEnterView != null) { + chatActivityEnterView.invalidate(); } - if (TextUtils.isEmpty(chatActivityEnterView.getSlowModeTimer())) { - if (headerItem != null) { - headerItem.setVisibility(View.GONE); + if (getVisibility() != GONE) { + hideHints(true); + if (chatListView != null) { + chatListView.setTranslationY(translationY); + } + if (progressView != null) { + progressView.setTranslationY(translationY); } - if (attachItem != null) { - attachItem.setVisibility(View.VISIBLE); + invalidateChatListViewTopPadding(); + invalidateMessagesVisiblePart(); + if (fragmentView != null) { + fragmentView.invalidate(); } } } @Override - public void onAttachButtonShow() { - if (actionBar.isSearchFieldVisible()) { - return; - } - if (headerItem != null) { - headerItem.setVisibility(View.VISIBLE); - } - if (editTextItem != null && !isEditTextItemVisibilitySuppressed) { - editTextItem.setVisibility(View.GONE); - } - if (attachItem != null) { - attachItem.setVisibility(View.GONE); - } + public boolean hasOverlappingRendering() { + return false; } @Override - public void onMessageEditEnd(boolean loading) { - if (chatListItemAnimator != null) { - chatActivityEnterViewAnimateFromTop = chatActivityEnterView.getBackgroundTop(); - if (chatActivityEnterViewAnimateFromTop != 0) { - chatActivityEnterViewAnimateBeforeSending = true; - } - } - if (!loading) { - if (mentionContainer != null) { - mentionContainer.getAdapter().setNeedBotContext(true); - } - if (editingMessageObject != null) { - AndroidUtilities.runOnUIThread(() -> hideFieldPanel(true), 30); - } - boolean waitingForKeyboard = false; - if (chatActivityEnterView.isPopupShowing()) { - chatActivityEnterView.setFieldFocused(); - waitingForKeyboard = true; + public void setVisibility(int visibility) { + super.setVisibility(visibility); + if (visibility == GONE) { + if (chatListView != null) { + chatListView.setTranslationY(0); } - chatActivityEnterView.setAllowStickersAndGifs(true, true, true, waitingForKeyboard); - if (editingMessageObjectReqId != 0) { - getConnectionsManager().cancelRequest(editingMessageObjectReqId, true); - editingMessageObjectReqId = 0; + if (progressView != null) { + progressView.setTranslationY(0); } - updatePinnedMessageView(true); - updateBottomOverlay(); - updateVisibleRows(); } } + }; + replyLineView = new View(context); + replyLineView.setBackgroundColor(getThemedColor(Theme.key_chat_replyPanelLine)); + chatActivityEnterView.addTopView(chatActivityEnterTopView, replyLineView, 48); - @Override - public void onWindowSizeChanged(int size) { - if (size < AndroidUtilities.dp(72) + ActionBar.getCurrentActionBarHeight()) { - allowStickersPanel = false; - if (suggestEmojiPanel.getVisibility() == View.VISIBLE) { - suggestEmojiPanel.setVisibility(View.INVISIBLE); + final FrameLayout replyLayout = new FrameLayout(context); + chatActivityEnterTopView.addReplyView(replyLayout, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.NO_GRAVITY, 0, 0, 52, 0)); + + replyLayout.setOnClickListener(v -> { + if (forwardingMessages != null && !forwardingMessages.messages.isEmpty()) { + SharedConfig.forwardingOptionsHintHintShowed(); + openForwardingPreview(); + } else if (replyingMessageObject != null && (!isThreadChat() || replyingMessageObject.getId() != threadMessageId)) { + scrollToMessageId(replyingMessageObject.getId(), 0, true, 0, true, 0); + } else if (editingMessageObject != null) { + if (editingMessageObject.canEditMedia() && editingMessageObjectReqId == 0) { + if (chatAttachAlert == null) { + createChatAttachView(); } + chatAttachAlert.setEditingMessageObject(editingMessageObject); + openAttachMenu(); } else { - allowStickersPanel = true; - if (suggestEmojiPanel.getVisibility() == View.INVISIBLE) { - suggestEmojiPanel.setVisibility(View.VISIBLE); - } + scrollToMessageId(editingMessageObject.getId(), 0, true, 0, true, 0); } + } + }); - allowContextBotPanel = !chatActivityEnterView.isPopupShowing(); -// checkContextBotPanel(); - int size2 = size + (chatActivityEnterView.isPopupShowing() ? 1 << 16 : 0); - if (lastSize != size2) { - chatActivityEnterViewAnimateFromTop = 0; - chatActivityEnterViewAnimateBeforeSending = false; + replyIconImageView = new ImageView(context); + replyIconImageView.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_replyPanelIcons), PorterDuff.Mode.SRC_IN)); + replyIconImageView.setScaleType(ImageView.ScaleType.CENTER); + replyLayout.addView(replyIconImageView, LayoutHelper.createFrame(52, 46, Gravity.TOP | Gravity.LEFT)); + + replyCloseImageView = new ImageView(context); + replyCloseImageView.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_replyPanelClose), PorterDuff.Mode.SRC_IN)); + replyCloseImageView.setImageResource(R.drawable.input_clear); + replyCloseImageView.setScaleType(ImageView.ScaleType.CENTER); + if (Build.VERSION.SDK_INT >= 21) { + replyCloseImageView.setBackgroundDrawable(Theme.createSelectorDrawable(getThemedColor(Theme.key_inappPlayerClose) & 0x19ffffff, 1, AndroidUtilities.dp(18))); + } + chatActivityEnterTopView.addView(replyCloseImageView, LayoutHelper.createFrame(52, 46, Gravity.RIGHT | Gravity.TOP, 0, 0.5f, 0, 0)); + replyCloseImageView.setOnClickListener(v -> { + if (forwardingMessages == null || forwardingMessages.messages.isEmpty()) { + if (ChatObject.isForum(currentChat) && !isTopic && replyingMessageObject != null) { + int topicId = MessageObject.getTopicId(replyingMessageObject.messageOwner, true); + if (topicId != 0) { + getMediaDataController().cleanDraft(dialog_id, topicId, false); + } } - lastSize = size2; + showFieldPanel(false, null, null, null, foundWebPage, true, 0, true, true); + } else { + openAnotherForward(); } + }); + + replyNameTextView = new SimpleTextView(context); + replyNameTextView.setTextSize(14); + replyNameTextView.setTextColor(getThemedColor(Theme.key_chat_replyPanelName)); + replyNameTextView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + replyLayout.addView(replyNameTextView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 18, Gravity.TOP | Gravity.LEFT, 52, 6, 0, 0)); + + replyObjectTextView = new SimpleTextView(context); + replyObjectTextView.setTextSize(14); + replyObjectTextView.setTextColor(getThemedColor(Theme.key_windowBackgroundWhiteGrayText)); + replyLayout.addView(replyObjectTextView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 18, Gravity.TOP | Gravity.LEFT, 52, 24, 0, 0)); + + replyObjectHintTextView = new SimpleTextView(context); + replyObjectHintTextView.setTextSize(14); + replyObjectHintTextView.setTextColor(getThemedColor(Theme.key_windowBackgroundWhiteGrayText)); + replyObjectHintTextView.setText(LocaleController.getString("TapForForwardingOptions", R.string.TapForForwardingOptions)); + replyObjectHintTextView.setAlpha(0f); + replyLayout.addView(replyObjectHintTextView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 18, Gravity.TOP | Gravity.LEFT, 52, 24, 0, 0)); + + SpoilerEffect replySpoilerEffect = new SpoilerEffect(); + replyImageView = new BackupImageView(context) { + Path path = new Path(); @Override - public void onStickersTab(boolean opened) { - if (emojiButtonRed != null) { - emojiButtonRed.setVisibility(View.GONE); + public void draw(Canvas canvas) { + super.draw(canvas); + + if (replyImageHasMediaSpoiler) { + path.rewind(); + AndroidUtilities.rectTmp.set(imageReceiver.getImageX(), imageReceiver.getImageY(), imageReceiver.getImageX2(), imageReceiver.getImageY2()); + path.addRoundRect(AndroidUtilities.rectTmp, AndroidUtilities.dp(2), AndroidUtilities.dp(2), Path.Direction.CW); + + canvas.save(); + canvas.clipPath(path); + + int sColor = Color.WHITE; + replySpoilerEffect.setColor(ColorUtils.setAlphaComponent(sColor, (int) (Color.alpha(sColor) * 0.325f))); + replySpoilerEffect.setBounds((int) imageReceiver.getImageX(), (int) imageReceiver.getImageY(), (int) imageReceiver.getImageX2(), (int) imageReceiver.getImageY2()); + replySpoilerEffect.draw(canvas); + invalidate(); + + canvas.restore(); } - allowContextBotPanelSecond = !opened; -// checkContextBotPanel(); } + }; + replyImageView.setRoundRadius(AndroidUtilities.dp(2)); + replyLayout.addView(replyImageView, LayoutHelper.createFrame(34, 34, Gravity.TOP | Gravity.LEFT, 52, 6, 0, 0)); - @Override - public void didPressAttachButton() { - if (chatAttachAlert != null) { - chatAttachAlert.setEditingMessageObject(null); - } - openAttachMenu(); + contentView.addView( + suggestEmojiPanel = new SuggestEmojiView(context, currentAccount, chatActivityEnterView, themeDelegate), + LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 160, Gravity.LEFT | Gravity.BOTTOM) + ); + + final ChatActivityEnterTopView.EditView editView = new ChatActivityEnterTopView.EditView(context); + editView.setMotionEventSplittingEnabled(false); + editView.setOrientation(LinearLayout.HORIZONTAL); + editView.setOnClickListener(v -> { + if (editingMessageObject != null) { + scrollToMessageId(editingMessageObject.getId(), 0, true, 0, true, 0); } + }); + chatActivityEnterTopView.addEditView(editView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.NO_GRAVITY, 0, 0, 48, 0)); - @Override - public void needStartRecordVideo(int state, boolean notify, int scheduleDate) { - if (instantCameraView != null) { - if (state == 0) { - instantCameraView.showCamera(); - chatListView.stopScroll(); - chatAdapter.updateRowsSafe(); - } else if (state == 1 || state == 3 || state == 4) { - instantCameraView.send(state, notify, scheduleDate); - } else if (state == 2 || state == 5) { - instantCameraView.cancel(state == 2); + for (int i = 0; i < 2; i++) { + final boolean firstButton = i == 0; + + final ChatActivityEnterTopView.EditViewButton button = new ChatActivityEnterTopView.EditViewButton(context) { + @Override + public void setEditButton(boolean editButton) { + super.setEditButton(editButton); + if (firstButton) { + getTextView().setMaxWidth(editButton ? AndroidUtilities.dp(116) : Integer.MAX_VALUE); } } - } - @Override - public void needChangeVideoPreviewState(int state, float seekProgress) { - if (instantCameraView != null) { - instantCameraView.changeVideoPreviewState(state, seekProgress); + @Override + public void updateColors() { + final int leftInset = firstButton ? AndroidUtilities.dp(14) : 0; + setBackground(Theme.createCircleSelectorDrawable(getThemedColor(Theme.key_chat_replyPanelName) & 0x19ffffff, leftInset, 0)); + getImageView().setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_replyPanelName), PorterDuff.Mode.MULTIPLY)); + getTextView().setTextColor(getThemedColor(Theme.key_chat_replyPanelName)); } - } + }; + button.setOrientation(LinearLayout.HORIZONTAL); + ViewHelper.setPadding(button, 10, 0, 10, 0); + editView.addButton(button, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.MATCH_PARENT)); - @Override - public void needStartRecordAudio(int state) { - int visibility = state == 0 ? View.GONE : View.VISIBLE; - if (overlayView.getVisibility() != visibility) { - overlayView.setVisibility(visibility); - } - } + final ImageView imageView = new ImageView(context); + imageView.setScaleType(ImageView.ScaleType.CENTER); + imageView.setImageResource(firstButton ? R.drawable.msg_photoeditor : R.drawable.msg_replace); + button.addImageView(imageView, LayoutHelper.createLinear(24, LayoutHelper.MATCH_PARENT)); - @Override - public void needShowMediaBanHint() { - showMediaBannedHint(); - } + button.addView(new Space(context), LayoutHelper.createLinear(10, LayoutHelper.MATCH_PARENT)); - @Override - public void onStickersExpandedChange() { - checkRaiseSensors(); - if (chatActivityEnterView.isStickersExpanded()) { - AndroidUtilities.setAdjustResizeToNothing(getParentActivity(), classGuid); - if (Bulletin.getVisibleBulletin() != null && Bulletin.getVisibleBulletin().isShowing()) { - Bulletin.getVisibleBulletin().hide(); - } - } else { - AndroidUtilities.requestAdjustResize(getParentActivity(), classGuid); - } - if (mentionContainer != null) { - mentionContainer.animate().alpha(chatActivityEnterView.isStickersExpanded() ? 0 : 1f).setInterpolator(CubicBezierInterpolator.DEFAULT).start(); + final TextView textView = new TextView(context); + textView.setMaxLines(1); + textView.setSingleLine(true); + textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + textView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + textView.setGravity(Gravity.LEFT | Gravity.CENTER_VERTICAL); + textView.setEllipsize(TextUtils.TruncateAt.END); + button.addTextView(textView, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.MATCH_PARENT)); + + button.updateColors(); + button.setOnClickListener(v -> { + if (editingMessageObject == null || !editingMessageObject.canEditMedia() || editingMessageObjectReqId != 0) { + return; } - if (suggestEmojiPanel != null) { - suggestEmojiPanel.setVisibility(View.VISIBLE); - suggestEmojiPanel.animate().alpha(chatActivityEnterView.isStickersExpanded() ? 0 : 1f).setInterpolator(CubicBezierInterpolator.DEFAULT).withEndAction(() -> { - if (suggestEmojiPanel != null && chatActivityEnterView.isStickersExpanded()) { - suggestEmojiPanel.setVisibility(View.GONE); - } - }).start(); + if (button.isEditButton()) { + openEditingMessageInPhotoEditor(); + } else { + replyLayout.callOnClick(); } - } + }); + } + searchContainer = null; + bottomOverlay = new FrameLayout(context) { @Override - public void scrollToSendingMessage() { - int id = getSendMessagesHelper().getSendingMessageId(dialog_id); - if (id != 0) { - scrollToMessageId(id, 0, true, 0, true, 0); - } + public void onDraw(Canvas canvas) { + int bottom = Theme.chat_composeShadowDrawable.getIntrinsicHeight(); + Theme.chat_composeShadowDrawable.setBounds(0, 0, getMeasuredWidth(), bottom); + Theme.chat_composeShadowDrawable.draw(canvas); + canvas.drawRect(0, bottom, getMeasuredWidth(), getMeasuredHeight(), getThemedPaint(Theme.key_paint_chatComposeBackground)); } + }; + bottomOverlay.setWillNotDraw(false); + bottomOverlay.setVisibility(View.INVISIBLE); + bottomOverlay.setFocusable(true); + bottomOverlay.setFocusableInTouchMode(true); + bottomOverlay.setClickable(true); + bottomOverlay.setPadding(0, AndroidUtilities.dp(2), 0, 0); + contentView.addView(bottomOverlay, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 51, Gravity.BOTTOM)); + + bottomOverlayText = new TextView(context); + bottomOverlayText.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + bottomOverlayText.setGravity(Gravity.CENTER); + bottomOverlayText.setMaxLines(2); + bottomOverlayText.setEllipsize(TextUtils.TruncateAt.END); + bottomOverlayText.setLineSpacing(AndroidUtilities.dp(2), 1); + bottomOverlayText.setTextColor(getThemedColor(Theme.key_chat_secretChatStatusText)); + bottomOverlay.addView(bottomOverlayText, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER, 14, 0, 14, 0)); + bottomOverlayChat = new BlurredFrameLayout(context, contentView) { @Override - public boolean hasScheduledMessages() { - return scheduledMessagesCount > 0 && chatMode == 0; + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + int allWidth = MeasureSpec.getSize(widthMeasureSpec); + FrameLayout.LayoutParams layoutParams = (FrameLayout.LayoutParams) bottomOverlayChatText.getLayoutParams(); + layoutParams.width = allWidth; + super.onMeasure(widthMeasureSpec, heightMeasureSpec); } @Override - public void onSendLongClick() { - if (scheduledOrNoSoundHint != null) { - scheduledOrNoSoundHint.hide(); + protected void dispatchDraw(Canvas canvas) { + int bottom = Theme.chat_composeShadowDrawable.getIntrinsicHeight(); + Theme.chat_composeShadowDrawable.setBounds(0, 0, getMeasuredWidth(), bottom); + Theme.chat_composeShadowDrawable.draw(canvas); + if (SharedConfig.chatBlurEnabled()) { + if (backgroundPaint == null) { + backgroundPaint = new Paint(); + } + backgroundPaint.setColor(getThemedColor(Theme.key_chat_messagePanelBackground)); + AndroidUtilities.rectTmp2.set(0, bottom, getMeasuredWidth(), getMeasuredHeight()); + contentView.drawBlurRect(canvas, getY(), AndroidUtilities.rectTmp2, backgroundPaint, false); + } else { + canvas.drawRect(0, bottom, getMeasuredWidth(), getMeasuredHeight(), getThemedPaint(Theme.key_paint_chatComposeBackground)); } + super.dispatchDraw(canvas); } + }; + bottomOverlayChat.isTopView = false; + bottomOverlayChat.drawBlur = false; + bottomOverlayChat.setWillNotDraw(false); + bottomOverlayChat.setPadding(0, AndroidUtilities.dp(1.5f), 0, 0); + bottomOverlayChat.setVisibility(View.INVISIBLE); + bottomOverlayChat.setClipChildren(false); + contentView.addView(bottomOverlayChat, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 51, Gravity.BOTTOM)); - @Override - public void openScheduledMessages() { - ChatActivity.this.openScheduledMessages(); - } + bottomOverlayStartButton = new TextView(context) { + CellFlickerDrawable cellFlickerDrawable; @Override - public void onAudioVideoInterfaceUpdated() { - updatePagedownButtonVisibility(true); + protected void onDraw(Canvas canvas) { + super.onDraw(canvas); + if (cellFlickerDrawable == null) { + cellFlickerDrawable = new CellFlickerDrawable(); + cellFlickerDrawable.drawFrame = false; + cellFlickerDrawable.repeatProgress = 2f; + } + cellFlickerDrawable.setParentWidth(getMeasuredWidth()); + AndroidUtilities.rectTmp.set(0, 0, getMeasuredWidth(), getMeasuredHeight()); + cellFlickerDrawable.draw(canvas, AndroidUtilities.rectTmp, AndroidUtilities.dp(4), null); + invalidate(); } @Override - public void bottomPanelTranslationYChanged(float translation) { - if (translation != 0) { - wasManualScroll = true; - } - bottomPanelTranslationY = chatActivityEnterView.panelAnimationInProgress() ? chatActivityEnterView.getEmojiPadding() - translation : 0; - bottomPanelTranslationYReverse = chatActivityEnterView.panelAnimationInProgress() ? translation : 0; - chatActivityEnterView.setTranslationY(translation); - mentionContainer.setTranslationY(translation); - contentView.setEmojiOffset(chatActivityEnterView.panelAnimationInProgress(), bottomPanelTranslationY); - - translation += chatActivityEnterView.getTopViewTranslation(); - mentionContainer.setTranslationY(translation); - chatListView.setTranslationY(translation); - - invalidateChatListViewTopPadding(); - invalidateMessagesVisiblePart(); - updateTextureViewPosition(false, false); - contentView.invalidate(); - updateBulletinLayout(); - } + public void setVisibility(int visibility) { + super.setVisibility(visibility); - @Override - public void prepareMessageSending() { - waitingForSendingMessageLoad = true; + ViewGroup.LayoutParams params = bottomOverlayChat.getLayoutParams(); + params.height = AndroidUtilities.dp(visibility == VISIBLE ? 51 + 8 * 2 : 51); } + }; + bottomOverlayStartButton.setBackground(Theme.AdaptiveRipple.filledRect(getThemedColor(Theme.key_featuredStickers_addButton), 8)); + bottomOverlayStartButton.setTextColor(getThemedColor(Theme.key_featuredStickers_buttonText)); + bottomOverlayStartButton.setText(LocaleController.getString(R.string.BotStartButton)); + bottomOverlayStartButton.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 16); + bottomOverlayStartButton.setGravity(Gravity.CENTER); + bottomOverlayStartButton.setTypeface(AndroidUtilities.getTypeface(AndroidUtilities.TYPEFACE_ROBOTO_MEDIUM)); + bottomOverlayStartButton.setVisibility(View.GONE); + bottomOverlayStartButton.setOnClickListener(v -> bottomOverlayChatText.callOnClick()); + bottomOverlayChat.addView(bottomOverlayStartButton, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.CENTER, 8, 8, 8, 8)); + bottomOverlayChatText = new UnreadCounterTextView(context) { @Override - public void onTrendingStickersShowed(boolean show) { - if (show) { - AndroidUtilities.setAdjustResizeToNothing(getParentActivity(), classGuid); - fragmentView.requestLayout(); - } else { - AndroidUtilities.requestAdjustResize(getParentActivity(), classGuid); + protected void updateCounter() { + if (ChatObject.isChannel(currentChat) && !currentChat.megagroup && chatInfo != null && chatInfo.linked_chat_id != 0) { + TLRPC.Dialog dialog = getMessagesController().dialogs_dict.get(-chatInfo.linked_chat_id); + if (dialog != null) { + setCounter(dialog.unread_count); + return; + } } + setCounter(0); } @Override - public boolean hasForwardingMessages() { - return forwardingMessages != null && !forwardingMessages.messages.isEmpty(); - } - - @Override - public int getDisableLinkPreviewStatus() { - return disableLinkPreview ? 2 : 1; + protected boolean isTouchFullWidth() { + return botInfo != null; } @Override - public void toggleDisableLinkPreview() { - disableLinkPreview = !disableLinkPreview; + protected Theme.ResourcesProvider getResourceProvider() { + return themeDelegate; } - }); - chatActivityEnterView.setDialogId(dialog_id, currentAccount); - if (chatInfo != null) { - chatActivityEnterView.setChatInfo(chatInfo); - } - chatActivityEnterView.setId(id_chat_compose_panel); - chatActivityEnterView.setBotsCount(botsCount, hasBotsCommands, false); - chatActivityEnterView.updateBotWebView(false); - chatActivityEnterView.setMinimumHeight(AndroidUtilities.dp(51)); - chatActivityEnterView.setAllowStickersAndGifs(true, true, currentEncryptedChat == null || AndroidUtilities.getPeerLayerVersion(currentEncryptedChat.layer) >= 46); - if (inPreviewMode) { - chatActivityEnterView.setVisibility(View.INVISIBLE); - } - if (!ChatObject.isChannel(currentChat) || currentChat.megagroup) { - chatActivityEnterView.setBotInfo(botInfo, false); - } - contentView.addView(chatActivityEnterView, contentView.getChildCount() - 1, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.BOTTOM)); - chatActivityEnterView.checkChannelRights(); - chatActivityEnterTopView = new ChatActivityEnterTopView(context) { @Override - public void setTranslationY(float translationY) { - super.setTranslationY(translationY); - if (chatActivityEnterView != null) { - chatActivityEnterView.invalidate(); - } - if (getVisibility() != GONE) { - hideHints(true); - if (chatListView != null) { - chatListView.setTranslationY(translationY); - } - if (progressView != null) { - progressView.setTranslationY(translationY); - } - invalidateChatListViewTopPadding(); - invalidateMessagesVisiblePart(); - if (fragmentView != null) { - fragmentView.invalidate(); - } - } + protected float getTopOffset() { + return -AndroidUtilities.dp(2); } - - @Override - public boolean hasOverlappingRendering() { - return false; + }; + bottomOverlayChat.addView(bottomOverlayChatText, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, 0, 0, 1.5f, 0, 0)); + bottomOverlayChatText.setOnClickListener(view -> { + if (getParentActivity() == null || pullingDownOffset != 0) { + return; } - - @Override - public void setVisibility(int visibility) { - super.setVisibility(visibility); - if (visibility == GONE) { - if (chatListView != null) { - chatListView.setTranslationY(0); + if (reportType >= 0) { + showDialog(new ReportAlert(getParentActivity(), reportType) { + @Override + protected void onSend(int type, String message) { + ArrayList ids = new ArrayList<>(); + for (int b = 0; b < selectedMessagesIds[0].size(); b++) { + ids.add(selectedMessagesIds[0].keyAt(b)); + } + TLRPC.InputPeer peer = currentUser != null ? MessagesController.getInputPeer(currentUser) : MessagesController.getInputPeer(currentChat); + AlertsCreator.sendReport(peer, reportType, message, ids); + finishFragment(); + chatActivityDelegate.onReport(); } - if (progressView != null) { - progressView.setTranslationY(0); + }); + } else if (chatMode == MODE_PINNED) { + finishFragment(); + chatActivityDelegate.onUnpin(true, bottomOverlayChatText.getTag() == null); + } else if (currentUser != null && userBlocked) { + if (currentUser.bot) { + String botUserLast = botUser; + botUser = null; + getMessagesController().unblockPeer(currentUser.id); + if (botUserLast != null && botUserLast.length() != 0) { + getMessagesController().sendBotStart(currentUser, botUserLast); + } else { + getSendMessagesHelper().sendMessage("/start", dialog_id, null, null, null, false, null, null, null, true, 0, null, false); } + } else { + AlertDialog.Builder builder = new AlertDialog.Builder(getParentActivity(), themeDelegate); + builder.setMessage(LocaleController.getString("AreYouSureUnblockContact", R.string.AreYouSureUnblockContact)); + builder.setPositiveButton(LocaleController.getString("OK", R.string.OK), (dialogInterface, i) -> getMessagesController().unblockPeer(currentUser.id)); + builder.setTitle(LocaleController.getString("NekoX", R.string.NekoX)); + builder.setNegativeButton(LocaleController.getString("Cancel", R.string.Cancel), null); + showDialog(builder.create()); } - } - }; - - replyLineView = new View(context); - replyLineView.setBackgroundColor(getThemedColor(Theme.key_chat_replyPanelLine)); - chatActivityEnterView.addTopView(chatActivityEnterTopView, replyLineView, 48); - - final FrameLayout replyLayout = new FrameLayout(context); - chatActivityEnterTopView.addReplyView(replyLayout, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.NO_GRAVITY, 0, 0, 52, 0)); + } else if (UserObject.isReplyUser(currentUser)) { + toggleMute(true); + } else if (currentUser != null && currentUser.bot && botUser != null) { + if (botUser.length() != 0) { + getMessagesController().sendBotStart(currentUser, botUser); + } else { + getSendMessagesHelper().sendMessage("/start", dialog_id, null, null, null, false, null, null, null, true, 0, null, false); + } + botUser = null; + updateBottomOverlay(); + } else { + if (ChatObject.isChannel(currentChat) && !(currentChat instanceof TLRPC.TL_channelForbidden)) { + if (ChatObject.isNotInChat(currentChat)) { + if (currentChat.join_request) { +// showDialog(new JoinGroupAlert(context, currentChat, null, this)); + showBottomOverlayProgress(true, true); + MessagesController.getInstance(currentAccount).addUserToChat( + currentChat.id, + UserConfig.getInstance(currentAccount).getCurrentUser(), + 0, + null, + null, + true, + () -> { + showBottomOverlayProgress(false, true); + }, + err -> { + SharedPreferences preferences = MessagesController.getNotificationsSettings(currentAccount); + preferences.edit().putLong("dialog_join_requested_time_" + dialog_id, System.currentTimeMillis()).commit(); + if (err != null && "INVITE_REQUEST_SENT".equals(err.text)) { + JoinGroupAlert.showBulletin(context, this, ChatObject.isChannel(currentChat) && !currentChat.megagroup); + } + showBottomOverlayProgress(false, true); + return false; + } + ); + } else { + if (chatInviteRunnable != null) { + AndroidUtilities.cancelRunOnUIThread(chatInviteRunnable); + chatInviteRunnable = null; + } + showBottomOverlayProgress(true, true); + getMessagesController().addUserToChat(currentChat.id, getUserConfig().getCurrentUser(), 0, null, ChatActivity.this, null); + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.closeSearchByActiveAction); - replyLayout.setOnClickListener(v -> { - if (forwardingMessages != null && !forwardingMessages.messages.isEmpty()) { - SharedConfig.forwardingOptionsHintHintShowed(); - openForwardingPreview(); - } else if (replyingMessageObject != null && (!isThreadChat() || replyingMessageObject.getId() != threadMessageId)) { - scrollToMessageId(replyingMessageObject.getId(), 0, true, 0, true, 0); - } else if (editingMessageObject != null) { - if (editingMessageObject.canEditMedia() && editingMessageObjectReqId == 0) { - if (chatAttachAlert == null) { - createChatAttachView(); + if (hasReportSpam() && reportSpamButton.getTag(R.id.object_tag) != null) { + SharedPreferences preferences = MessagesController.getNotificationsSettings(currentAccount); + preferences.edit().putInt("dialog_bar_vis3" + dialog_id, 3).apply(); + getNotificationCenter().postNotificationName(NotificationCenter.peerSettingsDidLoad, dialog_id); + } + } + } else { + toggleMute(true); } - chatAttachAlert.setEditingMessageObject(editingMessageObject); - openAttachMenu(); } else { - scrollToMessageId(editingMessageObject.getId(), 0, true, 0, true, 0); + boolean canDeleteHistory = chatInfo != null && chatInfo.can_delete_channel; + AlertsCreator.createClearOrDeleteDialogAlert(ChatActivity.this, false, currentChat, currentUser, currentEncryptedChat != null, true, canDeleteHistory, (param) -> { + getNotificationCenter().removeObserver(ChatActivity.this, NotificationCenter.closeChats); + getNotificationCenter().postNotificationName(NotificationCenter.closeChats); + finishFragment(); + getNotificationCenter().postNotificationName(NotificationCenter.needDeleteDialog, dialog_id, currentUser, currentChat, param); + }, themeDelegate); } } }); - replyIconImageView = new ImageView(context); - replyIconImageView.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_replyPanelIcons), PorterDuff.Mode.SRC_IN)); - replyIconImageView.setScaleType(ImageView.ScaleType.CENTER); - replyLayout.addView(replyIconImageView, LayoutHelper.createFrame(52, 46, Gravity.TOP | Gravity.LEFT)); + bottomOverlayProgress = new RadialProgressView(context, themeDelegate); + bottomOverlayProgress.setSize(AndroidUtilities.dp(22)); + bottomOverlayProgress.setProgressColor(getThemedColor(Theme.key_chat_fieldOverlayText)); + bottomOverlayProgress.setVisibility(View.INVISIBLE); + bottomOverlayProgress.setScaleX(0.1f); + bottomOverlayProgress.setScaleY(0.1f); + bottomOverlayProgress.setAlpha(1.0f); + bottomOverlayChat.addView(bottomOverlayProgress, LayoutHelper.createFrame(30, 30, Gravity.CENTER)); - replyCloseImageView = new ImageView(context); - replyCloseImageView.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_replyPanelClose), PorterDuff.Mode.SRC_IN)); - replyCloseImageView.setImageResource(R.drawable.input_clear); - replyCloseImageView.setScaleType(ImageView.ScaleType.CENTER); + bottomOverlayImage = new ImageView(context); + int color = getThemedColor(Theme.key_chat_fieldOverlayText); + bottomOverlayImage.setImageResource(R.drawable.msg_help); + bottomOverlayImage.setColorFilter(new PorterDuffColorFilter(color, PorterDuff.Mode.MULTIPLY)); + bottomOverlayImage.setScaleType(ImageView.ScaleType.CENTER); if (Build.VERSION.SDK_INT >= 21) { - replyCloseImageView.setBackgroundDrawable(Theme.createSelectorDrawable(getThemedColor(Theme.key_inappPlayerClose) & 0x19ffffff, 1, AndroidUtilities.dp(18))); + bottomOverlayImage.setBackgroundDrawable(Theme.createSelectorDrawable(Color.argb(24, Color.red(color), Color.green(color), Color.blue(color)), 1)); } - chatActivityEnterTopView.addView(replyCloseImageView, LayoutHelper.createFrame(52, 46, Gravity.RIGHT | Gravity.TOP, 0, 0.5f, 0, 0)); - replyCloseImageView.setOnClickListener(v -> { - if (forwardingMessages == null || forwardingMessages.messages.isEmpty()) { - if (ChatObject.isForum(currentChat) && !isTopic && replyingMessageObject != null) { - int topicId = MessageObject.getTopicId(replyingMessageObject.messageOwner, true); - if (topicId != 0) { - getMediaDataController().cleanDraft(dialog_id, topicId, false); - } - } - showFieldPanel(false, null, null, null, foundWebPage, true, 0, true, true); - } else { - openAnotherForward(); - } + bottomOverlayChat.addView(bottomOverlayImage, LayoutHelper.createFrame(48, 48, Gravity.RIGHT | Gravity.TOP, 3, 1.5f, 0, 0)); + bottomOverlayImage.setContentDescription(LocaleController.getString("SettingsHelp", R.string.SettingsHelp)); + bottomOverlayImage.setOnClickListener(v -> { + createUndoView(); + undoView.showWithAction(dialog_id, UndoView.ACTION_TEXT_INFO, LocaleController.getString("BroadcastGroupInfo", R.string.BroadcastGroupInfo)); }); - replyNameTextView = new SimpleTextView(context); - replyNameTextView.setTextSize(14); - replyNameTextView.setTextColor(getThemedColor(Theme.key_chat_replyPanelName)); - replyNameTextView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); - replyLayout.addView(replyNameTextView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 18, Gravity.TOP | Gravity.LEFT, 52, 6, 0, 0)); + contentView.addView(messageEnterTransitionContainer = new MessageEnterTransitionContainer(contentView, currentAccount)); - replyObjectTextView = new SimpleTextView(context); - replyObjectTextView.setTextSize(14); - replyObjectTextView.setTextColor(getThemedColor(Theme.key_windowBackgroundWhiteGrayText)); - replyLayout.addView(replyObjectTextView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 18, Gravity.TOP | Gravity.LEFT, 52, 24, 0, 0)); + if (currentChat != null) { + slowModeHint = new HintView(getParentActivity(), 2, themeDelegate); + slowModeHint.setAlpha(0.0f); + slowModeHint.setVisibility(View.INVISIBLE); + contentView.addView(slowModeHint, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.TOP, 19, 0, 19, 0)); + } - replyObjectHintTextView = new SimpleTextView(context); - replyObjectHintTextView.setTextSize(14); - replyObjectHintTextView.setTextColor(getThemedColor(Theme.key_windowBackgroundWhiteGrayText)); - replyObjectHintTextView.setText(LocaleController.getString("TapForForwardingOptions", R.string.TapForForwardingOptions)); - replyObjectHintTextView.setAlpha(0f); - replyLayout.addView(replyObjectHintTextView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 18, Gravity.TOP | Gravity.LEFT, 52, 24, 0, 0)); + chatAdapter.updateRowsSafe(); + if (loading && messages.isEmpty()) { + showProgressView(chatAdapter.botInfoRow < 0); + chatListView.setEmptyView(null); + } else { + showProgressView(false); + createEmptyView(); + chatListView.setEmptyView(emptyViewContainer); + } - SpoilerEffect replySpoilerEffect = new SpoilerEffect(); - replyImageView = new BackupImageView(context) { - Path path = new Path(); + checkBotKeyboard(); + updateBottomOverlay(); + updateSecretStatus(); + updateTopPanel(false); + updatePinnedMessageView(false); + updateInfoTopView(false); - @Override - public void draw(Canvas canvas) { - super.draw(canvas); + chatScrollHelper = new RecyclerAnimationScrollHelper(chatListView, chatLayoutManager); + chatScrollHelper.setScrollListener(this::invalidateMessagesVisiblePart); + chatScrollHelper.setAnimationCallback(chatScrollHelperCallback); - if (replyImageHasMediaSpoiler) { - path.rewind(); - AndroidUtilities.rectTmp.set(imageReceiver.getImageX(), imageReceiver.getImageY(), imageReceiver.getImageX2(), imageReceiver.getImageY2()); - path.addRoundRect(AndroidUtilities.rectTmp, AndroidUtilities.dp(2), AndroidUtilities.dp(2), Path.Direction.CW); + flagSecure = new FlagSecureReason(getParentActivity().getWindow(), () -> currentEncryptedChat != null || SharedConfig.passcodeHash.length() > 0 && !SharedConfig.allowScreenCapture || getMessagesController().isChatNoForwards(currentChat)); - canvas.save(); - canvas.clipPath(path); + if (oldMessage != null) { + chatActivityEnterView.setFieldText(oldMessage); + } - int sColor = Color.WHITE; - replySpoilerEffect.setColor(ColorUtils.setAlphaComponent(sColor, (int) (Color.alpha(sColor) * 0.325f))); - replySpoilerEffect.setBounds((int) imageReceiver.getImageX(), (int) imageReceiver.getImageY(), (int) imageReceiver.getImageX2(), (int) imageReceiver.getImageY2()); - replySpoilerEffect.draw(canvas); - invalidate(); + fixLayoutInternal(); - canvas.restore(); + textSelectionHelper.setCallback(new TextSelectionHelper.Callback() { + @Override + public void onStateChanged(boolean isSelected) { + swipeBackEnabled = !isSelected; + if (isSelected) { + if (slidingView != null) { + slidingView.setSlidingOffset(0); + slidingView = null; + } + maybeStartTrackingSlidingView = false; + startedTrackingSlidingView = false; + if (textSelectionHint != null) { + textSelectionHint.hide(); + } } + updatePagedownButtonVisibility(true); } - }; - replyImageView.setRoundRadius(AndroidUtilities.dp(2)); - replyLayout.addView(replyImageView, LayoutHelper.createFrame(34, 34, Gravity.TOP | Gravity.LEFT, 52, 6, 0, 0)); - suggestEmojiPanel = new SuggestEmojiView(context, currentAccount, chatActivityEnterView, themeDelegate); - contentView.addView(suggestEmojiPanel, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 160, Gravity.LEFT | Gravity.BOTTOM, 0, 0, 0, 0)); - - final ChatActivityEnterTopView.EditView editView = new ChatActivityEnterTopView.EditView(context); - editView.setMotionEventSplittingEnabled(false); - editView.setOrientation(LinearLayout.HORIZONTAL); - editView.setOnClickListener(v -> { - if (editingMessageObject != null) { - scrollToMessageId(editingMessageObject.getId(), 0, true, 0, true, 0); + @Override + public void onTextCopied() { + if (actionBar != null && actionBar.isActionModeShowed()) { + clearSelectionMode(); + } + createUndoView(); + undoView.showWithAction(0, UndoView.ACTION_TEXT_COPIED, null); } }); - chatActivityEnterTopView.addEditView(editView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.NO_GRAVITY, 0, 0, 48, 0)); - for (int i = 0; i < 2; i++) { - final boolean firstButton = i == 0; - - final ChatActivityEnterTopView.EditViewButton button = new ChatActivityEnterTopView.EditViewButton(context) { - @Override - public void setEditButton(boolean editButton) { - super.setEditButton(editButton); - if (firstButton) { - getTextView().setMaxWidth(editButton ? AndroidUtilities.dp(116) : Integer.MAX_VALUE); - } - } - - @Override - public void updateColors() { - final int leftInset = firstButton ? AndroidUtilities.dp(14) : 0; - setBackground(Theme.createCircleSelectorDrawable(getThemedColor(Theme.key_chat_replyPanelName) & 0x19ffffff, leftInset, 0)); - getImageView().setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_replyPanelName), PorterDuff.Mode.MULTIPLY)); - getTextView().setTextColor(getThemedColor(Theme.key_chat_replyPanelName)); - } - }; - button.setOrientation(LinearLayout.HORIZONTAL); - ViewHelper.setPadding(button, 10, 0, 10, 0); - editView.addButton(button, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.MATCH_PARENT)); - - final ImageView imageView = new ImageView(context); - imageView.setScaleType(ImageView.ScaleType.CENTER); - imageView.setImageResource(firstButton ? R.drawable.msg_photoeditor : R.drawable.msg_replace); - button.addImageView(imageView, LayoutHelper.createLinear(24, LayoutHelper.MATCH_PARENT)); + View overlay = textSelectionHelper.getOverlayView(context); + if (overlay != null) { + if (overlay.getParent() instanceof ViewGroup) { + ((ViewGroup) overlay.getParent()).removeView(overlay); + } + contentView.addView(overlay); + } + textSelectionHelper.setParentView(chatListView); - button.addView(new Space(context), LayoutHelper.createLinear(10, LayoutHelper.MATCH_PARENT)); + contentView.addView(fireworksOverlay = new FireworksOverlay(context), LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); - final TextView textView = new TextView(context); - textView.setMaxLines(1); - textView.setSingleLine(true); - textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); - textView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); - textView.setGravity(Gravity.LEFT | Gravity.CENTER_VERTICAL); - textView.setEllipsize(TextUtils.TruncateAt.END); - button.addTextView(textView, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.MATCH_PARENT)); + checkInstantSearch(); + if (replyingMessageObject != null) { + chatActivityEnterView.setReplyingMessageObject(replyingMessageObject); + } - button.updateColors(); - button.setOnClickListener(v -> { - if (editingMessageObject == null || !editingMessageObject.canEditMedia() || editingMessageObjectReqId != 0) { - return; - } - if (button.isEditButton()) { - openEditingMessageInPhotoEditor(); - } else { - replyLayout.callOnClick(); + ViewGroup decorView; + if (Build.VERSION.SDK_INT >= 21) { + decorView = (ViewGroup) getParentActivity().getWindow().getDecorView(); + } else { + decorView = contentView; + } + pinchToZoomHelper = new PinchToZoomHelper(decorView, contentView) { + @Override + protected void drawOverlays(Canvas canvas, float alpha, float parentOffsetX, float parentOffsetY, float clipTop, float clipBottom) { + if (alpha > 0) { + View view = getChild(); + if (view instanceof ChatMessageCell) { + ChatMessageCell cell = (ChatMessageCell) view; + + int top = (int) Math.max(clipTop, parentOffsetY); + int bottom = (int) Math.min(clipBottom, parentOffsetY + cell.getMeasuredHeight()); + AndroidUtilities.rectTmp.set(parentOffsetX, top, parentOffsetX + cell.getMeasuredWidth(), bottom); + canvas.saveLayerAlpha(AndroidUtilities.rectTmp, (int) (255 * alpha), Canvas.ALL_SAVE_FLAG); + canvas.translate(parentOffsetX, parentOffsetY); + cell.drawFromPinchToZoom = true; + cell.drawOverlays(canvas); + if (cell.shouldDrawTimeOnMedia() && cell.getCurrentMessagesGroup() == null) { + cell.drawTime(canvas, 1f, false); + } + cell.drawFromPinchToZoom = false; + canvas.restore(); + } } - }); - } + } + }; + pinchToZoomHelper.setCallback(new PinchToZoomHelper.Callback() { - searchContainer = new BlurredFrameLayout(context, contentView) { @Override - public void onDraw(Canvas canvas) { - int bottom = Theme.chat_composeShadowDrawable.getIntrinsicHeight(); - if (chatActivityEnterView.getVisibility() != View.VISIBLE) { - Theme.chat_composeShadowDrawable.setBounds(0, 0, getMeasuredWidth(), bottom); - Theme.chat_composeShadowDrawable.draw(canvas); - } - AndroidUtilities.rectTmp2.set(0, bottom, getMeasuredWidth(), getMeasuredHeight()); - contentView.drawBlurRect(canvas, getY(), AndroidUtilities.rectTmp2, getThemedPaint(Theme.key_paint_chatComposeBackground), false); + public TextureView getCurrentTextureView() { + return videoTextureView; } @Override - protected void measureChildWithMargins(View child, int parentWidthMeasureSpec, int widthUsed, int parentHeightMeasureSpec, int heightUsed) { - if (child == searchCountText) { - int leftMargin = 14; - if (searchCalendarButton != null && searchCalendarButton.getVisibility() != GONE) { - leftMargin += 48; - } - if (searchUserButton != null && searchUserButton.getVisibility() != GONE) { - leftMargin += 48; + public void onZoomStarted(MessageObject messageObject) { + chatListView.cancelClickRunnables(true); + chatListView.stopScroll(); + if (MediaController.getInstance().isPlayingMessage(messageObject)) { + contentView.removeView(videoPlayerContainer); + videoPlayerContainer = null; + videoTextureView = null; + aspectRatioFrameLayout = null; + } + + for (int i = 0; i < chatListView.getChildCount(); i++) { + if (chatListView.getChildAt(i) instanceof ChatMessageCell) { + ChatMessageCell cell = (ChatMessageCell) chatListView.getChildAt(i); + if (cell.getMessageObject().getId() == messageObject.getId()) { + cell.getPhotoImage().setVisible(false, true); + } } - ((MarginLayoutParams) child.getLayoutParams()).leftMargin = AndroidUtilities.dp(leftMargin); } - super.measureChildWithMargins(child, parentWidthMeasureSpec, widthUsed, parentHeightMeasureSpec, heightUsed); } - }; - searchContainer.drawBlur = false; - searchContainer.isTopView = false; - searchContainer.setWillNotDraw(false); - searchContainer.setVisibility(View.INVISIBLE); - searchContainer.setPadding(0, AndroidUtilities.dp(3), 0, 0); - searchContainer.setClipToPadding(false); - searchAsListTogglerView = new View(context); - searchAsListTogglerView.setOnTouchListener((v, event) -> getMediaDataController().getFoundMessageObjects().size() <= 1); - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { - searchAsListTogglerView.setBackground(Theme.getSelectorDrawable(getThemedColor(Theme.key_actionBarActionModeDefaultSelector), false)); - } - searchAsListTogglerView.setOnClickListener(v -> { - if (getMediaDataController().getFoundMessageObjects().size() > 1) { - if (searchAsListHint != null) { - searchAsListHint.hide(); + @Override + public void onZoomFinished(MessageObject messageObject) { + if (messageObject == null) { + return; } - toggleMesagesSearchListView(); - if (!SharedConfig.searchMessagesAsListUsed) { - SharedConfig.setSearchMessagesAsListUsed(true); + if (MediaController.getInstance().isPlayingMessage(messageObject)) { + for (int i = 0; i < chatListView.getChildCount(); i++) { + if (chatListView.getChildAt(i) instanceof ChatMessageCell) { + ChatMessageCell cell = (ChatMessageCell) chatListView.getChildAt(i); + if (cell.getMessageObject().getId() == messageObject.getId()) { + AnimatedFileDrawable animation = cell.getPhotoImage().getAnimation(); + if (animation.isRunning()) { + animation.stop(); + } + if (animation != null) { + Bitmap bitmap = animation.getAnimatedBitmap(); + if (bitmap != null) { + try { + Bitmap src = pinchToZoomHelper.getVideoBitmap(bitmap.getWidth(), bitmap.getHeight()); + Canvas canvas = new Canvas(bitmap); + canvas.drawBitmap(src, 0, 0, null); + src.recycle(); + } catch (Throwable e) { + FileLog.e(e); + } + } + } + } + } + } + createTextureView(true); + MediaController.getInstance().setTextureView(videoTextureView, aspectRatioFrameLayout, videoPlayerContainer, true); } + chatListView.invalidate(); } + }); - final float paddingTop = Theme.chat_composeShadowDrawable.getIntrinsicHeight() / AndroidUtilities.density - 3f; - searchContainer.addView(searchAsListTogglerView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.NO_GRAVITY, 0, paddingTop, 0, 0)); + pinchToZoomHelper.setClipBoundsListener(topBottom -> { + topBottom[1] = chatListView.getBottom() - blurredViewBottomOffset; + topBottom[0] = chatListView.getTop() + chatListViewPaddingTop - AndroidUtilities.dp(4); + }); + emojiAnimationsOverlay = new EmojiAnimationsOverlay(ChatActivity.this, contentView, chatListView, currentAccount, dialog_id, threadMessageId) { + @Override + public void onAllEffectsEnd() { + updateMessagesVisiblePart(false); + } + }; + actionBar.setDrawBlurBackground(contentView); - searchUpButton = new ImageView(context); - searchUpButton.setScaleType(ImageView.ScaleType.CENTER); - searchUpButton.setImageResource(R.drawable.msg_go_up); - searchUpButton.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_searchPanelIcons), PorterDuff.Mode.SRC_IN)); - searchUpButton.setBackgroundDrawable(Theme.createSelectorDrawable(getThemedColor(Theme.key_actionBarActionModeDefaultSelector), 1)); - searchContainer.addView(searchUpButton, LayoutHelper.createFrame(48, 48, Gravity.RIGHT | Gravity.TOP, 0, 0, 48, 0)); - searchUpButton.setOnClickListener(view -> { - getMediaDataController().searchMessagesInChat(null, dialog_id, mergeDialogId, classGuid, 1, threadMessageId, searchingUserMessages, searchingChatMessages); - showMessagesSearchListView(false); - if (!SharedConfig.searchMessagesAsListUsed && SharedConfig.searchMessagesAsListHintShows < 3 && !searchAsListHintShown && Math.random() <= 0.25) { - showSearchAsListHint(); - searchAsListHintShown = true; - SharedConfig.increaseSearchAsListHintShows(); + if (isTopic) { + reactionsMentionCount = forumTopic.unread_reactions_count; + updateReactionsMentionButton(false); + } else { + TLRPC.Dialog dialog = getMessagesController().dialogs_dict.get(dialog_id); + if (dialog != null) { + reactionsMentionCount = dialog.unread_reactions_count; + updateReactionsMentionButton(false); } - }); - searchUpButton.setContentDescription(LocaleController.getString("AccDescrSearchNext", R.string.AccDescrSearchNext)); + } - searchDownButton = new ImageView(context); - searchDownButton.setScaleType(ImageView.ScaleType.CENTER); - searchDownButton.setImageResource(R.drawable.msg_go_down); - searchDownButton.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_searchPanelIcons), PorterDuff.Mode.SRC_IN)); - searchDownButton.setBackgroundDrawable(Theme.createSelectorDrawable(getThemedColor(Theme.key_actionBarActionModeDefaultSelector), 1)); - searchContainer.addView(searchDownButton, LayoutHelper.createFrame(48, 48, Gravity.RIGHT | Gravity.TOP, 0, 0, 0, 0)); - searchDownButton.setOnClickListener(view -> { - getMediaDataController().searchMessagesInChat(null, dialog_id, mergeDialogId, classGuid, 2, threadMessageId, searchingUserMessages, searchingChatMessages); - showMessagesSearchListView(false); - }); - searchDownButton.setContentDescription(LocaleController.getString("AccDescrSearchPrev", R.string.AccDescrSearchPrev)); + BackButtonMenu.addToAccessedDialogs(currentAccount, currentChat, currentUser, dialog_id, dialogFolderId, dialogFilterId); - if (currentChat != null && (!ChatObject.isChannel(currentChat) || currentChat.megagroup)) { - searchUserButton = new ImageView(context); - searchUserButton.setScaleType(ImageView.ScaleType.CENTER); - searchUserButton.setImageResource(R.drawable.msg_usersearch); - searchUserButton.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_searchPanelIcons), PorterDuff.Mode.SRC_IN)); - searchUserButton.setBackgroundDrawable(Theme.createSelectorDrawable(getThemedColor(Theme.key_actionBarActionModeDefaultSelector), 1)); - searchContainer.addView(searchUserButton, LayoutHelper.createFrame(48, 48, Gravity.LEFT | Gravity.TOP, 48, 0, 0, 0)); - searchUserButton.setOnClickListener(view -> { - if (mentionContainer != null) { - mentionContainer.setReversed(true); - mentionContainer.getAdapter().setSearchingMentions(true); - } - searchCalendarButton.setVisibility(View.GONE); - searchUserButton.setVisibility(View.GONE); - searchingForUser = true; - searchingUserMessages = null; - searchingChatMessages = null; - searchItem.setSearchFieldHint(LocaleController.getString("SearchMembers", R.string.SearchMembers)); - searchItem.setSearchFieldCaption(LocaleController.getString("SearchFrom", R.string.SearchFrom)); - AndroidUtilities.showKeyboard(searchItem.getSearchField()); - searchItem.clearSearchText(); - }); - searchUserButton.setContentDescription(LocaleController.getString("AccDescrSearchByUser", R.string.AccDescrSearchByUser)); + return fragmentView; + } + + private void createBottomMessagesActionButtons() { + if (replyButton != null || getContext() == null) { + return; } - searchCalendarButton = new ImageView(context); - searchCalendarButton.setScaleType(ImageView.ScaleType.CENTER); - searchCalendarButton.setImageResource(R.drawable.msg_calendar); - searchCalendarButton.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_searchPanelIcons), PorterDuff.Mode.SRC_IN)); - searchCalendarButton.setBackgroundDrawable(Theme.createSelectorDrawable(getThemedColor(Theme.key_actionBarActionModeDefaultSelector), 1)); - searchContainer.addView(searchCalendarButton, LayoutHelper.createFrame(48, 48, Gravity.LEFT | Gravity.TOP)); - searchCalendarButton.setOnClickListener(view -> { - if (getParentActivity() == null) { - return; - } - AndroidUtilities.hideKeyboard(searchItem.getSearchField()); - showDialog(AlertsCreator.createCalendarPickerDialog(getParentActivity(), 1375315200000L, new MessagesStorage.IntCallback() { - @Override - public void run(int param) { - jumpToDate(param); + replyButton = new TextView(getContext()); + replyButton.setText(LocaleController.getString("Reply", R.string.Reply)); + replyButton.setGravity(Gravity.CENTER_VERTICAL); + replyButton.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 15); + replyButton.setPadding(AndroidUtilities.dp(14), 0, AndroidUtilities.dp(21), 0); + replyButton.setBackgroundDrawable(Theme.createSelectorDrawable(getThemedColor(Theme.key_actionBarActionModeDefaultSelector), 3)); + replyButton.setTextColor(getThemedColor(Theme.key_actionBarActionModeDefaultIcon)); + replyButton.setCompoundDrawablePadding(AndroidUtilities.dp(7)); + replyButton.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + Drawable image = getContext().getResources().getDrawable(R.drawable.baseline_reply_24).mutate(); + image.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_actionBarActionModeDefaultIcon), PorterDuff.Mode.SRC_IN)); + replyButton.setCompoundDrawablesWithIntrinsicBounds(image, null, null, null); + replyButton.setOnClickListener(v -> { + MessageObject messageObject = null; + for (int a = 1; a >= 0; a--) { + if (messageObject == null && selectedMessagesIds[a].size() != 0) { + messageObject = messagesDict[a].get(selectedMessagesIds[a].keyAt(0)); } - }, themeDelegate).create()); + selectedMessagesIds[a].clear(); + selectedMessagesCanCopyIds[a].clear(); + selectedMessagesCanStarIds[a].clear(); + } + hideActionMode(); + if (messageObject != null && (messageObject.messageOwner.id > 0 || messageObject.messageOwner.id < 0 && currentEncryptedChat != null)) { + showFieldPanelForReply(messageObject); + } + updatePinnedMessageView(true); + updateVisibleRows(); + updateSelectedMessageReactions(); }); - searchCalendarButton.setContentDescription(LocaleController.getString("JumpToDate", R.string.JumpToDate)); + bottomMessagesActionContainer.addView(replyButton, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.MATCH_PARENT, Gravity.LEFT | Gravity.TOP)); - searchGoToBeginningButton = new ImageView(context); - searchGoToBeginningButton.setScaleType(ImageView.ScaleType.CENTER); - searchGoToBeginningButton.setImageResource(R.drawable.ic_upward); - searchGoToBeginningButton.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_chat_searchPanelIcons), PorterDuff.Mode.SRC_IN)); - searchGoToBeginningButton.setBackgroundDrawable(Theme.createSelectorDrawable(Theme.getColor(Theme.key_actionBarActionModeDefaultSelector), 1)); - searchContainer.addView(searchGoToBeginningButton, LayoutHelper.createFrame(48, 48, Gravity.RIGHT | Gravity.TOP, 0, 0, 48 * 2, 0)); - searchGoToBeginningButton.setOnClickListener(view -> { - scrollToMessageId(1, 0, false, 0, true, 0); + forwardButton = new TextView(getContext()); + forwardButton.setText(LocaleController.getString("Forward", R.string.Forward)); + forwardButton.setGravity(Gravity.CENTER_VERTICAL); + forwardButton.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 15); + forwardButton.setPadding(AndroidUtilities.dp(21), 0, AndroidUtilities.dp(21), 0); + forwardButton.setCompoundDrawablePadding(AndroidUtilities.dp(6)); + forwardButton.setBackgroundDrawable(Theme.createSelectorDrawable(getThemedColor(Theme.key_actionBarActionModeDefaultSelector), 3)); + forwardButton.setTextColor(getThemedColor(Theme.key_actionBarActionModeDefaultIcon)); + forwardButton.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + image = getContext().getResources().getDrawable(R.drawable.baseline_forward_24).mutate(); + image.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_actionBarActionModeDefaultIcon), PorterDuff.Mode.SRC_IN)); + forwardButton.setCompoundDrawablesWithIntrinsicBounds(image, null, null, null); + forwardButton.setOnClickListener(v -> { + noForwardQuote = false; + openForward(false); }); - searchGoToBeginningButton.setContentDescription(LocaleController.getString("GoToBeginning", R.string.GoToBeginning)); + bottomMessagesActionContainer.addView(forwardButton, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.MATCH_PARENT, Gravity.RIGHT | Gravity.TOP)); + } + + private void checkInstantSearch() { + final long searchFromUserId = getArguments().getInt("search_from_user_id", 0); + if (searchFromUserId != 0) { + TLRPC.User user = getMessagesController().getUser(searchFromUserId); + if (user != null) { + openSearchWithText(""); + if (searchUserButton != null) { + searchUserButton.callOnClick(); + } + searchUserMessages(user, null); + } + } else { + final long searchFromChatId = getArguments().getInt("search_from_chat_id", 0); + if (searchFromChatId != 0) { + TLRPC.Chat chat = getMessagesController().getChat(searchFromChatId); + if (chat != null) { + openSearchWithText(""); + if (searchUserButton != null) { + searchUserButton.callOnClick(); + } + searchUserMessages(null, chat); + } + } + } + } - searchCountText = new SearchCounterView(context, themeDelegate); - searchCountText.setGravity(Gravity.LEFT); - searchContainer.addView(searchCountText, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL, 0, 0, 108, 0)); + private void createTopPanel() { + if (topChatPanelView != null || getContext() == null) { + return; + } - bottomOverlay = new FrameLayout(context) { - @Override - public void onDraw(Canvas canvas) { - int bottom = Theme.chat_composeShadowDrawable.getIntrinsicHeight(); - Theme.chat_composeShadowDrawable.setBounds(0, 0, getMeasuredWidth(), bottom); - Theme.chat_composeShadowDrawable.draw(canvas); - canvas.drawRect(0, bottom, getMeasuredWidth(), getMeasuredHeight(), getThemedPaint(Theme.key_paint_chatComposeBackground)); - } - }; - bottomOverlay.setWillNotDraw(false); - bottomOverlay.setVisibility(View.INVISIBLE); - bottomOverlay.setFocusable(true); - bottomOverlay.setFocusableInTouchMode(true); - bottomOverlay.setClickable(true); - bottomOverlay.setPadding(0, AndroidUtilities.dp(2), 0, 0); - contentView.addView(bottomOverlay, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 51, Gravity.BOTTOM)); + topChatPanelView = new BlurredFrameLayout(getContext(), contentView) { - bottomOverlayText = new TextView(context); - bottomOverlayText.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); - bottomOverlayText.setGravity(Gravity.CENTER); - bottomOverlayText.setMaxLines(2); - bottomOverlayText.setEllipsize(TextUtils.TruncateAt.END); - bottomOverlayText.setLineSpacing(AndroidUtilities.dp(2), 1); - bottomOverlayText.setTextColor(getThemedColor(Theme.key_chat_secretChatStatusText)); - bottomOverlay.addView(bottomOverlayText, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER, 14, 0, 14, 0)); + private boolean ignoreLayout; - bottomOverlayChat = new BlurredFrameLayout(context, contentView) { @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { - int allWidth = MeasureSpec.getSize(widthMeasureSpec); - FrameLayout.LayoutParams layoutParams = (FrameLayout.LayoutParams) bottomOverlayChatText.getLayoutParams(); - layoutParams.width = allWidth; + int width = MeasureSpec.getSize(widthMeasureSpec); + if (addToContactsButton != null && addToContactsButton.getVisibility() == VISIBLE && reportSpamButton != null && reportSpamButton.getVisibility() == VISIBLE) { + width = (width - AndroidUtilities.dp(31)) / 2; + } + ignoreLayout = true; + if (reportSpamButton != null && reportSpamButton.getVisibility() == VISIBLE) { + FrameLayout.LayoutParams layoutParams = (FrameLayout.LayoutParams) reportSpamButton.getLayoutParams(); + layoutParams.width = width; + if (addToContactsButton != null && addToContactsButton.getVisibility() == VISIBLE) { + reportSpamButton.setPadding(AndroidUtilities.dp(4), 0, AndroidUtilities.dp(4), 0); + layoutParams.leftMargin = width; + layoutParams.width -= AndroidUtilities.dp(15); + } else { + reportSpamButton.setPadding(AndroidUtilities.dp(48), 0, AndroidUtilities.dp(48), 0); + layoutParams.leftMargin = 0; + } + } + if (addToContactsButton != null && addToContactsButton.getVisibility() == VISIBLE) { + FrameLayout.LayoutParams layoutParams = (FrameLayout.LayoutParams) addToContactsButton.getLayoutParams(); + layoutParams.width = width; + if (reportSpamButton != null && reportSpamButton.getVisibility() == VISIBLE) { + addToContactsButton.setPadding(AndroidUtilities.dp(11), 0, AndroidUtilities.dp(4), 0); + } else { + addToContactsButton.setPadding(AndroidUtilities.dp(48), 0, AndroidUtilities.dp(48), 0); + layoutParams.leftMargin = 0; + } + } + ignoreLayout = false; super.onMeasure(widthMeasureSpec, heightMeasureSpec); } + @Override - protected void dispatchDraw(Canvas canvas) { - int bottom = Theme.chat_composeShadowDrawable.getIntrinsicHeight(); - Theme.chat_composeShadowDrawable.setBounds(0, 0, getMeasuredWidth(), bottom); - Theme.chat_composeShadowDrawable.draw(canvas); - if (SharedConfig.chatBlurEnabled()) { - if (backgroundPaint == null) { - backgroundPaint = new Paint(); - } - backgroundPaint.setColor(getThemedColor(Theme.key_chat_messagePanelBackground)); - AndroidUtilities.rectTmp2.set(0, bottom, getMeasuredWidth(), getMeasuredHeight()); - contentView.drawBlurRect(canvas, getY(), AndroidUtilities.rectTmp2, backgroundPaint, false); - } else { - canvas.drawRect(0, bottom, getMeasuredWidth(), getMeasuredHeight(), getThemedPaint(Theme.key_paint_chatComposeBackground)); + public void requestLayout() { + if (ignoreLayout) { + return; } - super.dispatchDraw(canvas); + super.requestLayout(); } }; - bottomOverlayChat.isTopView = false; - bottomOverlayChat.drawBlur = false; - bottomOverlayChat.setWillNotDraw(false); - bottomOverlayChat.setPadding(0, AndroidUtilities.dp(1.5f), 0, 0); - bottomOverlayChat.setVisibility(View.INVISIBLE); - bottomOverlayChat.setClipChildren(false); - contentView.addView(bottomOverlayChat, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 51, Gravity.BOTTOM)); + topChatPanelView.backgroundColor = getThemedColor(Theme.key_chat_topPanelBackground); + topChatPanelView.backgroundPaddingBottom = AndroidUtilities.dp(2); + topChatPanelView.setTag(1); + topChatPanelViewOffset = -AndroidUtilities.dp(50); + invalidateChatListViewTopPadding(); + topChatPanelView.setClickable(true); + topChatPanelView.setVisibility(View.GONE); + topChatPanelView.setBackgroundResource(R.drawable.blockpanel); + topChatPanelView.getBackground().setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_topPanelBackground), PorterDuff.Mode.MULTIPLY)); + int index = 8; + if (pinnedMessageView != null && pinnedMessageView.getParent() == contentView) { + index = contentView.indexOfChild(pinnedMessageView) + 1; + } + contentView.addView(topChatPanelView, index, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 50, Gravity.TOP | Gravity.LEFT)); - bottomOverlayChatText = new UnreadCounterTextView(context) { - @Override - protected void updateCounter() { - if (ChatObject.isChannel(currentChat) && !currentChat.megagroup && chatInfo != null && chatInfo.linked_chat_id != 0) { - TLRPC.Dialog dialog = getMessagesController().dialogs_dict.get(-chatInfo.linked_chat_id); - if (dialog != null) { - setCounter(dialog.unread_count); - return; - } - } - setCounter(0); + reportSpamButton = new TextView(getContext()); + reportSpamButton.setTextColor(getThemedColor(Theme.key_chat_reportSpam)); + if (Build.VERSION.SDK_INT >= 21) { + reportSpamButton.setBackground(Theme.createSelectorDrawable(getThemedColor(Theme.key_chat_reportSpam) & 0x19ffffff, 3)); + } + reportSpamButton.setTag(Theme.key_chat_reportSpam); + reportSpamButton.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + reportSpamButton.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + reportSpamButton.setSingleLine(true); + reportSpamButton.setMaxLines(1); + reportSpamButton.setGravity(Gravity.CENTER); + topChatPanelView.addView(reportSpamButton, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 48, Gravity.LEFT | Gravity.TOP, 0, 0, 0, 1)); + reportSpamButton.setOnClickListener(v2 -> AlertsCreator.showBlockReportSpamAlert(ChatActivity.this, dialog_id, currentUser, currentChat, currentEncryptedChat, reportSpamButton.getTag(R.id.object_tag) != null, chatInfo, param -> { + if (param == 0) { + updateTopPanel(true); + } else { + finishFragment(); } + }, themeDelegate)); - @Override - protected boolean isTouchFullWidth() { - return botInfo != null; - } + emojiStatusSpamHint = new LinkSpanDrawable.LinksTextView(getContext(), themeDelegate) { + Layout lastLayout; + AnimatedEmojiSpan.EmojiGroupedSpans stack; + PorterDuffColorFilter colorFilter; + int lastColor; @Override - protected Theme.ResourcesProvider getResourceProvider() { - return themeDelegate; + protected void onDetachedFromWindow() { + super.onDetachedFromWindow(); + AnimatedEmojiSpan.release(this, stack); + lastLayout = null; } @Override - protected float getTopOffset() { - return -AndroidUtilities.dp(2); - } - }; - bottomOverlayChat.addView(bottomOverlayChatText, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, 0, 0, 1.5f, 0, 0)); - bottomOverlayChatText.setOnClickListener(view -> { - if (getParentActivity() == null || pullingDownOffset != 0) { - return; - } - if (reportType >= 0) { - showDialog(new ReportAlert(getParentActivity(), reportType) { - @Override - protected void onSend(int type, String message) { - ArrayList ids = new ArrayList<>(); - for (int b = 0; b < selectedMessagesIds[0].size(); b++) { - ids.add(selectedMessagesIds[0].keyAt(b)); - } - TLRPC.InputPeer peer = currentUser != null ? MessagesController.getInputPeer(currentUser) : MessagesController.getInputPeer(currentChat); - AlertsCreator.sendReport(peer, reportType, message, ids); - finishFragment(); - chatActivityDelegate.onReport(); - } - }); - } else if (chatMode == MODE_PINNED) { - finishFragment(); - chatActivityDelegate.onUnpin(true, bottomOverlayChatText.getTag() == null); - } else if (currentUser != null && userBlocked) { - if (currentUser.bot) { - String botUserLast = botUser; - botUser = null; - getMessagesController().unblockPeer(currentUser.id); - if (botUserLast != null && botUserLast.length() != 0) { - getMessagesController().sendBotStart(currentUser, botUserLast); - } else { - getSendMessagesHelper().sendMessage("/start", dialog_id, null, null, null, false, null, null, null, true, 0, null, false); - } - } else { - AlertDialog.Builder builder = new AlertDialog.Builder(getParentActivity(), themeDelegate); - builder.setMessage(LocaleController.getString("AreYouSureUnblockContact", R.string.AreYouSureUnblockContact)); - builder.setPositiveButton(LocaleController.getString("OK", R.string.OK), (dialogInterface, i) -> getMessagesController().unblockPeer(currentUser.id)); - builder.setTitle(LocaleController.getString("NekoX", R.string.NekoX)); - builder.setNegativeButton(LocaleController.getString("Cancel", R.string.Cancel), null); - showDialog(builder.create()); + protected void dispatchDraw(Canvas canvas) { + super.dispatchDraw(canvas); + if (lastLayout != getLayout()) { + stack = AnimatedEmojiSpan.update(AnimatedEmojiDrawable.CACHE_TYPE_EMOJI_STATUS, this, stack, lastLayout = getLayout()); } - } else if (UserObject.isReplyUser(currentUser)) { - toggleMute(true); - } else if (currentUser != null && currentUser.bot && botUser != null) { - if (botUser.length() != 0) { - getMessagesController().sendBotStart(currentUser, botUser); - } else { - getSendMessagesHelper().sendMessage("/start", dialog_id, null, null, null, false, null, null, null, true, 0, null, false); + int color = getThemedColor(Theme.key_windowBackgroundWhiteBlueIcon); + if (lastColor != color || colorFilter == null) { + colorFilter = new PorterDuffColorFilter(color, PorterDuff.Mode.MULTIPLY); + lastColor = color; } - botUser = null; - updateBottomOverlay(); - } else { - if (ChatObject.isChannel(currentChat) && !(currentChat instanceof TLRPC.TL_channelForbidden)) { - if (ChatObject.isNotInChat(currentChat)) { - if (currentChat.join_request) { -// showDialog(new JoinGroupAlert(context, currentChat, null, this)); - showBottomOverlayProgress(true, true); - MessagesController.getInstance(currentAccount).addUserToChat( - currentChat.id, - UserConfig.getInstance(currentAccount).getCurrentUser(), - 0, - null, - null, - true, - () -> { - showBottomOverlayProgress(false, true); - }, - err -> { - SharedPreferences preferences = MessagesController.getNotificationsSettings(currentAccount); - preferences.edit().putLong("dialog_join_requested_time_" + dialog_id, System.currentTimeMillis()).commit(); - if (err != null && "INVITE_REQUEST_SENT".equals(err.text)) { - JoinGroupAlert.showBulletin(context, this, ChatObject.isChannel(currentChat) && !currentChat.megagroup); - } - showBottomOverlayProgress(false, true); - return false; - } - ); - } else { - if (chatInviteRunnable != null) { - AndroidUtilities.cancelRunOnUIThread(chatInviteRunnable); - chatInviteRunnable = null; - } - showBottomOverlayProgress(true, true); - getMessagesController().addUserToChat(currentChat.id, getUserConfig().getCurrentUser(), 0, null, ChatActivity.this, null); - NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.closeSearchByActiveAction); + AnimatedEmojiSpan.drawAnimatedEmojis(canvas, getLayout(), stack, 0, null, 0, 0, 0, 1f, colorFilter); + } + }; + emojiStatusSpamHint.setTextColor(getThemedColor(Theme.key_chat_topPanelMessage)); + emojiStatusSpamHint.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 13.3f); + emojiStatusSpamHint.setGravity(Gravity.CENTER); + emojiStatusSpamHint.setVisibility(View.GONE); + emojiStatusSpamHint.setLinkTextColor(getThemedColor(Theme.key_windowBackgroundWhiteLinkText)); + topChatPanelView.addView(emojiStatusSpamHint, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.TOP, 25, 0, 25, 1)); - if (hasReportSpam() && reportSpamButton.getTag(R.id.object_tag) != null) { - SharedPreferences preferences = MessagesController.getNotificationsSettings(currentAccount); - preferences.edit().putInt("dialog_bar_vis3" + dialog_id, 3).apply(); - getNotificationCenter().postNotificationName(NotificationCenter.peerSettingsDidLoad, dialog_id); - } - } - } else { - toggleMute(true); + topViewSeparator1 = new View(getContext()); + topViewSeparator1.setVisibility(View.GONE); + topViewSeparator1.setBackgroundColor(getThemedColor(Theme.key_divider)); + topViewSeparator1.setAlpha(.5f); + topChatPanelView.addView(topViewSeparator1, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 1f / AndroidUtilities.density, Gravity.LEFT | Gravity.BOTTOM, 0, 0, 0, 2)); + topViewSeparator2 = new View(getContext()); + topViewSeparator2.setVisibility(View.GONE); + topViewSeparator2.setBackgroundColor(getThemedColor(Theme.key_divider)); + topChatPanelView.addView(topViewSeparator2, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 1f / AndroidUtilities.density, Gravity.LEFT | Gravity.TOP, 10, 48, 10, 1)); + topViewSeparator3 = new View(getContext()); + topViewSeparator3.setVisibility(View.GONE); + topViewSeparator3.setBackgroundColor(getThemedColor(Theme.key_divider)); + topChatPanelView.addView(topViewSeparator3, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 1f / AndroidUtilities.density, Gravity.LEFT | Gravity.BOTTOM, 0, 0, 0, 38)); + + addToContactsButton = new TextView(getContext()); + addToContactsButton.setTextColor(getThemedColor(Theme.key_chat_addContact)); + addToContactsButton.setVisibility(View.GONE); + addToContactsButton.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + addToContactsButton.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + addToContactsButton.setSingleLine(true); + addToContactsButton.setMaxLines(1); + addToContactsButton.setPadding(AndroidUtilities.dp(4), 0, AndroidUtilities.dp(4), 0); + addToContactsButton.setGravity(Gravity.CENTER); + if (Build.VERSION.SDK_INT >= 21) { + addToContactsButton.setBackground(Theme.createSelectorDrawable(getThemedColor(Theme.key_chat_addContact) & 0x19ffffff, 3)); + } + topChatPanelView.addView(addToContactsButton, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 48, Gravity.LEFT | Gravity.TOP, 0, 0, 0, 1)); + addToContactsButton.setOnClickListener(v -> { + if (addToContactsButtonArchive) { + getMessagesController().addDialogToFolder(dialog_id, 0, 0, 0); + createUndoView(); + undoView.showWithAction(dialog_id, UndoView.ACTION_CHAT_UNARCHIVED, null); + SharedPreferences preferences = MessagesController.getNotificationsSettings(currentAccount); + SharedPreferences.Editor editor = preferences.edit(); + editor.putBoolean("dialog_bar_archived" + dialog_id, false); + editor.putBoolean("dialog_bar_block" + dialog_id, false); + editor.putBoolean("dialog_bar_report" + dialog_id, false); + editor.commit(); + updateTopPanel(false); + getNotificationsController().clearDialogNotificationsSettings(dialog_id, getTopicId()); + } else if (addToContactsButton.getTag() != null && (Integer) addToContactsButton.getTag() == 4) { + if (chatInfo != null && chatInfo.participants != null) { + LongSparseArray users = new LongSparseArray<>(); + for (int a = 0; a < chatInfo.participants.participants.size(); a++) { + users.put(chatInfo.participants.participants.get(a).user_id, null); } - } else { - boolean canDeleteHistory = chatInfo != null && chatInfo.can_delete_channel; - AlertsCreator.createClearOrDeleteDialogAlert(ChatActivity.this, false, currentChat, currentUser, currentEncryptedChat != null, true, canDeleteHistory, (param) -> { - getNotificationCenter().removeObserver(ChatActivity.this, NotificationCenter.closeChats); - getNotificationCenter().postNotificationName(NotificationCenter.closeChats); - finishFragment(); - getNotificationCenter().postNotificationName(NotificationCenter.needDeleteDialog, dialog_id, currentUser, currentChat, param); - }, themeDelegate); + long chatId = chatInfo.id; + InviteMembersBottomSheet bottomSheet = new InviteMembersBottomSheet(getContext(), currentAccount, users, chatInfo.id, ChatActivity.this, themeDelegate); + bottomSheet.setDelegate((users1, fwdCount) -> { + getMessagesController().addUsersToChat(currentChat, ChatActivity.this, users1, fwdCount, null, null, null); + getMessagesController().hidePeerSettingsBar(dialog_id, currentUser, currentChat); + updateTopPanel(true); + updateInfoTopView(true); + }); + bottomSheet.show(); } + } else if (addToContactsButton.getTag() != null) { + shareMyContact(1, null); + } else { + Bundle args = new Bundle(); + args.putLong("user_id", currentUser.id); + args.putBoolean("addContact", true); + ContactAddActivity activity = new ContactAddActivity(args); + activity.setDelegate(() -> { + createUndoView(); + undoView.showWithAction(dialog_id, UndoView.ACTION_CONTACT_ADDED, currentUser); + }); + presentFragment(activity); } }); - bottomOverlayProgress = new RadialProgressView(context, themeDelegate); - bottomOverlayProgress.setSize(AndroidUtilities.dp(22)); - bottomOverlayProgress.setProgressColor(getThemedColor(Theme.key_chat_fieldOverlayText)); - bottomOverlayProgress.setVisibility(View.INVISIBLE); - bottomOverlayProgress.setScaleX(0.1f); - bottomOverlayProgress.setScaleY(0.1f); - bottomOverlayProgress.setAlpha(1.0f); - bottomOverlayChat.addView(bottomOverlayProgress, LayoutHelper.createFrame(30, 30, Gravity.CENTER)); + restartTopicButton = new TextView(getContext()); + restartTopicButton.setTextColor(getThemedColor(Theme.key_chat_addContact)); + restartTopicButton.setVisibility(View.GONE); + restartTopicButton.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + restartTopicButton.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + restartTopicButton.setSingleLine(true); + restartTopicButton.setMaxLines(1); + restartTopicButton.setPadding(AndroidUtilities.dp(4), 0, AndroidUtilities.dp(4), 0); + restartTopicButton.setGravity(Gravity.CENTER); + restartTopicButton.setText(LocaleController.getString("RestartTopic", R.string.RestartTopic).toUpperCase()); + if (Build.VERSION.SDK_INT >= 21) { + restartTopicButton.setBackground(Theme.createSelectorDrawable(getThemedColor(Theme.key_chat_addContact) & 0x19ffffff, 3)); + } + topChatPanelView.addView(restartTopicButton, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 48, Gravity.LEFT | Gravity.TOP, 0, 0, 0, 1)); + restartTopicButton.setOnClickListener(v -> { + getMessagesController().getTopicsController().toggleCloseTopic(currentChat.id, forumTopic.id, forumTopic.closed = false); + updateTopicButtons(); + updateBottomOverlay(); + updateTopPanel(true); + }); - bottomOverlayImage = new ImageView(context); - int color = getThemedColor(Theme.key_chat_fieldOverlayText); - bottomOverlayImage.setImageResource(R.drawable.msg_help); - bottomOverlayImage.setColorFilter(new PorterDuffColorFilter(color, PorterDuff.Mode.MULTIPLY)); - bottomOverlayImage.setScaleType(ImageView.ScaleType.CENTER); + closeReportSpam = new ImageView(getContext()); + closeReportSpam.setImageResource(R.drawable.miniplayer_close); + closeReportSpam.setContentDescription(LocaleController.getString("Close", R.string.Close)); if (Build.VERSION.SDK_INT >= 21) { - bottomOverlayImage.setBackgroundDrawable(Theme.createSelectorDrawable(Color.argb(24, Color.red(color), Color.green(color), Color.blue(color)), 1)); + closeReportSpam.setBackground(Theme.AdaptiveRipple.circle(getThemedColor(Theme.key_chat_topPanelClose))); } - bottomOverlayChat.addView(bottomOverlayImage, LayoutHelper.createFrame(48, 48, Gravity.RIGHT | Gravity.TOP, 3, 1.5f, 0, 0)); - bottomOverlayImage.setContentDescription(LocaleController.getString("SettingsHelp", R.string.SettingsHelp)); - bottomOverlayImage.setOnClickListener(v -> undoView.showWithAction(dialog_id, UndoView.ACTION_TEXT_INFO, LocaleController.getString("BroadcastGroupInfo", R.string.BroadcastGroupInfo))); + closeReportSpam.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_topPanelClose), PorterDuff.Mode.MULTIPLY)); + closeReportSpam.setScaleType(ImageView.ScaleType.CENTER); + topChatPanelView.addView(closeReportSpam, LayoutHelper.createFrame(36, 36, Gravity.RIGHT | Gravity.TOP, 0, 6, 2, 0)); + closeReportSpam.setOnClickListener(v -> { + long did = dialog_id; + if (currentEncryptedChat != null) { + did = currentUser.id; + } + getMessagesController().hidePeerSettingsBar(did, currentUser, currentChat); + updateTopPanel(true); + updateInfoTopView(true); + }); + } - replyButton = new TextView(context); - replyButton.setText(LocaleController.getString("Reply", R.string.Reply)); - replyButton.setGravity(Gravity.CENTER_VERTICAL); - replyButton.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 15); - replyButton.setPadding(AndroidUtilities.dp(14), 0, AndroidUtilities.dp(21), 0); - replyButton.setBackgroundDrawable(Theme.createSelectorDrawable(getThemedColor(Theme.key_actionBarActionModeDefaultSelector), 3)); - replyButton.setTextColor(getThemedColor(Theme.key_actionBarActionModeDefaultIcon)); - replyButton.setCompoundDrawablePadding(AndroidUtilities.dp(7)); - replyButton.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); - Drawable image = context.getResources().getDrawable(R.drawable.input_reply).mutate(); - image.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_actionBarActionModeDefaultIcon), PorterDuff.Mode.SRC_IN)); - replyButton.setCompoundDrawablesWithIntrinsicBounds(image, null, null, null); - replyButton.setOnClickListener(v -> { - MessageObject messageObject = null; - for (int a = 1; a >= 0; a--) { - if (messageObject == null && selectedMessagesIds[a].size() != 0) { - messageObject = messagesDict[a].get(selectedMessagesIds[a].keyAt(0)); + private void createTranslateButton() { + if (translateButton != null || getContext() == null) { + return; + } + + createTopPanel(); + translateButton = new TranslateButton(getContext(), this, themeDelegate) { + @Override + protected void onButtonClick() { + if (getUserConfig().isPremium()) { + getMessagesController().getTranslateController().toggleTranslatingDialog(getDialogId()); + } else { + MessagesController.getNotificationsSettings(currentAccount).edit().putInt("dialog_show_translate_count" + getDialogId(), 14).commit(); + showDialog(new PremiumFeatureBottomSheet(ChatActivity.this, PremiumPreviewFragment.PREMIUM_FEATURE_TRANSLATIONS, false)); } - selectedMessagesIds[a].clear(); - selectedMessagesCanCopyIds[a].clear(); - selectedMessagesCanStarIds[a].clear(); - } - hideActionMode(); - if (messageObject != null && (messageObject.messageOwner.id > 0 || messageObject.messageOwner.id < 0 && currentEncryptedChat != null)) { - showFieldPanelForReply(messageObject); + updateTopPanel(true); } - updatePinnedMessageView(true); - updateVisibleRows(); - }); - bottomMessagesActionContainer.addView(replyButton, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.MATCH_PARENT, Gravity.LEFT | Gravity.TOP)); - forwardButton = new TextView(context); - forwardButton.setText(LocaleController.getString("Forward", R.string.Forward)); - forwardButton.setGravity(Gravity.CENTER_VERTICAL); - forwardButton.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 15); - forwardButton.setPadding(AndroidUtilities.dp(21), 0, AndroidUtilities.dp(21), 0); - forwardButton.setCompoundDrawablePadding(AndroidUtilities.dp(6)); - forwardButton.setBackgroundDrawable(Theme.createSelectorDrawable(getThemedColor(Theme.key_actionBarActionModeDefaultSelector), 3)); - forwardButton.setTextColor(getThemedColor(Theme.key_actionBarActionModeDefaultIcon)); - forwardButton.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); - image = context.getResources().getDrawable(R.drawable.msg_forward).mutate(); - image.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_actionBarActionModeDefaultIcon), PorterDuff.Mode.SRC_IN)); - forwardButton.setCompoundDrawablesWithIntrinsicBounds(image, null, null, null); - forwardButton.setOnClickListener(v -> { - noForwardQuote = false; - openForward(false); - }); - bottomMessagesActionContainer.addView(forwardButton, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.MATCH_PARENT, Gravity.RIGHT | Gravity.TOP)); + @Override + protected void onCloseClick() { + MessagesController.getNotificationsSettings(currentAccount).edit().putInt("dialog_show_translate_count" + getDialogId(), 140).commit(); + updateTopPanel(true); + } + }; + topChatPanelView.addView(translateButton, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 36, Gravity.LEFT | Gravity.BOTTOM, 0, 0, 0, 2)); + } - contentView.addView(searchContainer, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 51, Gravity.BOTTOM)); - contentView.addView(messageEnterTransitionContainer = new MessageEnterTransitionContainer(contentView, currentAccount)); - undoView = new UndoView(context, this, false, themeDelegate); + private void createUndoView() { + if (undoView != null || getContext() == null) { + return; + } + undoView = new UndoView(getContext(), this, false, themeDelegate); undoView.setAdditionalTranslationY(AndroidUtilities.dp(51)); contentView.addView(undoView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.BOTTOM | Gravity.LEFT, 8, 0, 8, 8)); + } - if (currentChat != null) { - slowModeHint = new HintView(getParentActivity(), 2, themeDelegate); - slowModeHint.setAlpha(0.0f); - slowModeHint.setVisibility(View.INVISIBLE); - contentView.addView(slowModeHint, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.TOP, 19, 0, 19, 0)); - } + @Override + public INavigationLayout.BackButtonState getBackButtonState() { + return INavigationLayout.BackButtonState.BACK; + } - chatAdapter.updateRowsSafe(); - if (loading && messages.isEmpty()) { - showProgressView(chatAdapter.botInfoRow < 0); - chatListView.setEmptyView(null); - } else { - showProgressView(false); - chatListView.setEmptyView(emptyViewContainer); + private void createActionMode() { + if (selectedMessagesCountTextView != null || getContext() == null) { + return; } + final ActionBarMenu actionMode = actionBar.createActionMode(); - checkBotKeyboard(); - updateBottomOverlay(); - updateSecretStatus(); - updateTopPanel(false); - updatePinnedMessageView(false); - updateInfoTopView(false); + selectedMessagesCountTextView = new NumberTextView(actionMode.getContext()); + selectedMessagesCountTextView.setTextSize(18); + selectedMessagesCountTextView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + selectedMessagesCountTextView.setTextColor(getThemedColor(Theme.key_actionBarActionModeDefaultIcon)); + selectedMessagesCountTextView.setOnTouchListener((v, event) -> true); + actionMode.addView(selectedMessagesCountTextView, LayoutHelper.createLinear(0, LayoutHelper.MATCH_PARENT, 1.0f, 65, 0, 0, 0)); - chatScrollHelper = new RecyclerAnimationScrollHelper(chatListView, chatLayoutManager); - chatScrollHelper.setScrollListener(this::invalidateMessagesVisiblePart); - chatScrollHelper.setAnimationCallback(chatScrollHelperCallback); + DisplayMetrics displayMetrics = new DisplayMetrics(); + getParentActivity().getWindowManager().getDefaultDisplay().getMetrics(displayMetrics); - try { - if (!NekoXConfig.disableFlagSecure) { - if (currentEncryptedChat != null && (SharedConfig.passcodeHash.length() == 0 || SharedConfig.allowScreenCapture)) { - unregisterFlagSecurePasscode = AndroidUtilities.registerFlagSecure(getParentActivity().getWindow()); - } - if (getMessagesController().isChatNoForwards(currentChat)) { - unregisterFlagSecureNoforwards = AndroidUtilities.registerFlagSecure(getParentActivity().getWindow()); - } - } - } catch (Throwable e) { - FileLog.e(e); - } - if (oldMessage != null) { - chatActivityEnterView.setFieldText(oldMessage); + int maxActionBarItems = (int) (Math.ceil(displayMetrics.widthPixels / (double) AndroidUtilities.dp(54))) - 2; + shrinkActionBarItems = maxActionBarItems < 6; + + actionModeViews.add(actionMode.addItemWithWidth(nkactionbarbtn_reply, R.drawable.baseline_reply_24, AndroidUtilities.dp(54), LocaleController.getString("Reply", R.string.Reply))); + actionModeViews.add(actionMode.addItemWithWidth(edit, R.drawable.baseline_edit_24, AndroidUtilities.dp(54), LocaleController.getString("Edit", R.string.Edit))); + actionModeViews.add(actionMode.addItemWithWidth(nkactionbarbtn_selectBetween, R.drawable.ic_select_between, AndroidUtilities.dp(54), LocaleController.getString("SelectBetween", R.string.SelectBetween))); + actionModeViews.add(actionMode.addItemWithWidth(copy, R.drawable.baseline_content_copy_24, AndroidUtilities.dp(54), LocaleController.getString("Copy", R.string.Copy))); + + if (currentEncryptedChat == null) { + actionModeViews.add(actionMode.addItemWithWidth(forward, R.drawable.baseline_forward_24, AndroidUtilities.dp(54), LocaleController.getString("Forward", R.string.Forward))); } - fixLayoutInternal(); - textSelectionHelper.setCallback(new TextSelectionHelper.Callback() { - @Override - public void onStateChanged(boolean isSelected) { - swipeBackEnabled = !isSelected; - if (isSelected) { - if (slidingView != null) { - slidingView.setSlidingOffset(0); - slidingView = null; - } - maybeStartTrackingSlidingView = false; - startedTrackingSlidingView = false; - if (textSelectionHint != null) { - textSelectionHint.hide(); - } - } - updatePagedownButtonVisibility(true); - } + actionModeViews.add(actionMode.addItemWithWidth(delete, R.drawable.baseline_delete_24, AndroidUtilities.dp(54), LocaleController.getString("Delete", R.string.Delete))); + actionModeViews.add(actionModeOtherItem = actionMode.addItemWithWidth(nkactionbarbtn_action_mode_other, R.drawable.ic_ab_other, AndroidUtilities.dp(54), LocaleController.getString("MessageMenu", R.string.MessageMenu))); - @Override - public void onTextCopied() { - if (actionBar != null && actionBar.isActionModeShowed()) { - clearSelectionMode(); - } - undoView.showWithAction(0, UndoView.ACTION_TEXT_COPIED, null); - } - }); + if (currentEncryptedChat == null) { + actionModeOtherItem.addSubItem(forward, R.drawable.baseline_forward_24, LocaleController.getString("Forward", R.string.Forward)); + } - contentView.addView(textSelectionHelper.getOverlayView(context)); - fireworksOverlay = new FireworksOverlay(context); - contentView.addView(fireworksOverlay, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); - textSelectionHelper.setParentView(chatListView); + boolean noforward = getMessagesController().isChatNoForwards(currentChat); - long searchFromUserId = getArguments().getLong("search_from_user_id", 0); - long searchFromChatId = getArguments().getLong("search_from_chat_id", 0); - if (searchFromUserId != 0) { - TLRPC.User user = getMessagesController().getUser(searchFromUserId); - if (user != null) { - openSearchWithText(""); - searchUserButton.callOnClick(); - searchUserMessages(user, null); - } - } else if (searchFromChatId != 0) { - TLRPC.Chat chat = getMessagesController().getChat(searchFromChatId); - if (chat != null) { - openSearchWithText(""); - searchUserButton.callOnClick(); - searchUserMessages(null, chat); - } + if (currentEncryptedChat == null || !noforward) { + actionModeOtherItem.addSubItem(nkbtn_forward_noquote, R.drawable.baseline_fast_forward_24, LocaleController.getString("NoQuoteForward", R.string.NoQuoteForward)); + actionModeOtherItem.addSubItem(star, R.drawable.baseline_favorite_20, LocaleController.getString("AddToFavorites", R.string.AddToFavorites)); + actionModeOtherItem.addSubItem(save_to, R.drawable.msg_download, LocaleController.getString("SaveToMusic", R.string.SaveToMusic)); } - if (replyingMessageObject != null) { - chatActivityEnterView.setReplyingMessageObject(replyingMessageObject); + + actionModeOtherItem.addSubItem(nkbtn_translate, R.drawable.ic_translate, LocaleController.getString("Translate", R.string.Translate)); + if (NekoConfig.showShareMessages.Bool()) + actionModeOtherItem.addSubItem(nkbtn_sharemessage, R.drawable.baseline_share_24, LocaleController.getString("ShareMessages", R.string.ShareMessages)); + actionModeOtherItem.addSubItem(nkbtn_unpin, R.drawable.deproko_baseline_pin_undo_24, LocaleController.getString("UnpinMessage", R.string.UnpinMessage)); + if (!noforward) + actionModeOtherItem.addSubItem(nkbtn_savemessage, R.drawable.baseline_bookmark_24, LocaleController.getString("AddToSavedMessages", R.string.AddToSavedMessages)); + if (NekoConfig.showRepeat.Bool() && !noforward) + actionModeOtherItem.addSubItem(nkbtn_repeat, R.drawable.msg_repeat, LocaleController.getString("Repeat", R.string.Repeat)); + + if (NekoConfig.showMessageHide.Bool()) { + actionModeOtherItem.addSubItem(nkbtn_hide, R.drawable.baseline_remove_circle_24, LocaleController.getString("Hide", R.string.Hide)); } - ViewGroup decorView; - if (Build.VERSION.SDK_INT >= 21) { - decorView = (ViewGroup) getParentActivity().getWindow().getDecorView(); - } else { - decorView = contentView; + actionMode.getItem(nkactionbarbtn_reply).setVisibility(ChatObject.canSendMessages(currentChat) && selectedMessagesIds[0].size() + selectedMessagesIds[1].size() == 1 ? View.VISIBLE : View.GONE); + actionMode.getItem(edit).setVisibility(canEditMessagesCount == 1 && selectedMessagesIds[0].size() + selectedMessagesIds[1].size() == 1 ? View.VISIBLE : View.GONE); + actionMode.getItem(copy).setVisibility(!getMessagesController().isChatNoForwardsWithOverride(currentChat) && selectedMessagesCanCopyIds[0].size() + selectedMessagesCanCopyIds[1].size() != 0 ? View.VISIBLE : View.GONE); + actionMode.getItem(delete).setVisibility(cantDeleteMessagesCount == 0 ? View.VISIBLE : View.GONE); + + actionModeOtherItem.setSubItemVisibility(star, selectedMessagesCanStarIds[0].size() + selectedMessagesCanStarIds[1].size() != 0); + + if (shrinkActionBarItems) { + actionMode.getItem(nkactionbarbtn_reply).setVisibility(View.GONE); } - pinchToZoomHelper = new PinchToZoomHelper(decorView, contentView) { - @Override - protected void drawOverlays(Canvas canvas, float alpha, float parentOffsetX, float parentOffsetY, float clipTop, float clipBottom) { - if (alpha > 0) { - View view = getChild(); - if (view instanceof ChatMessageCell) { - ChatMessageCell cell = (ChatMessageCell) view; +} - int top = (int) Math.max(clipTop, parentOffsetY); - int bottom = (int) Math.min(clipBottom, parentOffsetY + cell.getMeasuredHeight()); - AndroidUtilities.rectTmp.set(parentOffsetX, top, parentOffsetX + cell.getMeasuredWidth(), bottom); - canvas.saveLayerAlpha(AndroidUtilities.rectTmp, (int) (255 * alpha), Canvas.ALL_SAVE_FLAG); - canvas.translate(parentOffsetX, parentOffsetY); - cell.drawFromPinchToZoom = true; - cell.drawOverlays(canvas); - if (cell.shouldDrawTimeOnMedia() && cell.getCurrentMessagesGroup() == null) { - cell.drawTime(canvas, 1f, false); - } - cell.drawFromPinchToZoom = false; - canvas.restore(); - } - } - } - }; - pinchToZoomHelper.setCallback(new PinchToZoomHelper.Callback() { + private void createSearchContainer() { + if (searchContainer != null || getContext() == null) { + return; + } + searchContainer = new BlurredFrameLayout(getContext(), contentView) { @Override - public TextureView getCurrentTextureView() { - return videoTextureView; + public void onDraw(Canvas canvas) { + int bottom = Theme.chat_composeShadowDrawable.getIntrinsicHeight(); + if (chatActivityEnterView.getVisibility() != View.VISIBLE) { + Theme.chat_composeShadowDrawable.setBounds(0, 0, getMeasuredWidth(), bottom); + Theme.chat_composeShadowDrawable.draw(canvas); + } + AndroidUtilities.rectTmp2.set(0, bottom, getMeasuredWidth(), getMeasuredHeight()); + contentView.drawBlurRect(canvas, getY(), AndroidUtilities.rectTmp2, getThemedPaint(Theme.key_paint_chatComposeBackground), false); } @Override - public void onZoomStarted(MessageObject messageObject) { - chatListView.cancelClickRunnables(true); - chatListView.stopScroll(); - if (MediaController.getInstance().isPlayingMessage(messageObject)) { - contentView.removeView(videoPlayerContainer); - videoPlayerContainer = null; - videoTextureView = null; - aspectRatioFrameLayout = null; - } - - for (int i = 0; i < chatListView.getChildCount(); i++) { - if (chatListView.getChildAt(i) instanceof ChatMessageCell) { - ChatMessageCell cell = (ChatMessageCell) chatListView.getChildAt(i); - if (cell.getMessageObject().getId() == messageObject.getId()) { - cell.getPhotoImage().setVisible(false, true); - } + protected void measureChildWithMargins(View child, int parentWidthMeasureSpec, int widthUsed, int parentHeightMeasureSpec, int heightUsed) { + if (child == searchCountText) { + int leftMargin = 14; + if (searchCalendarButton != null && searchCalendarButton.getVisibility() != GONE) { + leftMargin += 48; + } + if (searchUserButton != null && searchUserButton.getVisibility() != GONE) { + leftMargin += 48; } + ((MarginLayoutParams) child.getLayoutParams()).leftMargin = AndroidUtilities.dp(leftMargin); } + super.measureChildWithMargins(child, parentWidthMeasureSpec, widthUsed, parentHeightMeasureSpec, heightUsed); } + }; + searchContainer.drawBlur = false; + searchContainer.isTopView = false; + searchContainer.setWillNotDraw(false); + searchContainer.setVisibility(View.INVISIBLE); + searchContainer.setPadding(0, AndroidUtilities.dp(3), 0, 0); + searchContainer.setClipToPadding(false); - @Override - public void onZoomFinished(MessageObject messageObject) { - if (messageObject == null) { - return; + searchAsListTogglerView = new View(getContext()); + searchAsListTogglerView.setOnTouchListener((v, event) -> getMediaDataController().getFoundMessageObjects().size() <= 1); + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + searchAsListTogglerView.setBackground(Theme.getSelectorDrawable(getThemedColor(Theme.key_actionBarActionModeDefaultSelector), false)); + } + searchAsListTogglerView.setOnClickListener(v -> { + if (getMediaDataController().getFoundMessageObjects().size() > 1) { + if (searchAsListHint != null) { + searchAsListHint.hide(); } - if (MediaController.getInstance().isPlayingMessage(messageObject)) { - for (int i = 0; i < chatListView.getChildCount(); i++) { - if (chatListView.getChildAt(i) instanceof ChatMessageCell) { - ChatMessageCell cell = (ChatMessageCell) chatListView.getChildAt(i); - if (cell.getMessageObject().getId() == messageObject.getId()) { - AnimatedFileDrawable animation = cell.getPhotoImage().getAnimation(); - if (animation.isRunning()) { - animation.stop(); - } - if (animation != null) { - Bitmap bitmap = animation.getAnimatedBitmap(); - if (bitmap != null) { - try { - Bitmap src = pinchToZoomHelper.getVideoBitmap(bitmap.getWidth(), bitmap.getHeight()); - Canvas canvas = new Canvas(bitmap); - canvas.drawBitmap(src, 0, 0, null); - src.recycle(); - } catch (Throwable e) { - FileLog.e(e); - } - } - } - } - } - } - createTextureView(true); - MediaController.getInstance().setTextureView(videoTextureView, aspectRatioFrameLayout, videoPlayerContainer, true); + toggleMesagesSearchListView(); + if (!SharedConfig.searchMessagesAsListUsed) { + SharedConfig.setSearchMessagesAsListUsed(true); } - chatListView.invalidate(); } + }); + final float paddingTop = Theme.chat_composeShadowDrawable.getIntrinsicHeight() / AndroidUtilities.density - 3f; + searchContainer.addView(searchAsListTogglerView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.NO_GRAVITY, 0, paddingTop, 0, 0)); + searchUpButton = new ImageView(getContext()); + searchUpButton.setScaleType(ImageView.ScaleType.CENTER); + searchUpButton.setImageResource(R.drawable.msg_go_up); + searchUpButton.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_searchPanelIcons), PorterDuff.Mode.MULTIPLY)); + searchUpButton.setBackgroundDrawable(Theme.createSelectorDrawable(getThemedColor(Theme.key_actionBarActionModeDefaultSelector), 1)); + searchContainer.addView(searchUpButton, LayoutHelper.createFrame(48, 48, Gravity.RIGHT | Gravity.TOP, 0, 0, 48, 0)); + searchUpButton.setOnClickListener(view -> { + getMediaDataController().searchMessagesInChat(null, dialog_id, mergeDialogId, classGuid, 1, threadMessageId, searchingUserMessages, searchingChatMessages); + showMessagesSearchListView(false); + if (!SharedConfig.searchMessagesAsListUsed && SharedConfig.searchMessagesAsListHintShows < 3 && !searchAsListHintShown && Math.random() <= 0.25) { + showSearchAsListHint(); + searchAsListHintShown = true; + SharedConfig.increaseSearchAsListHintShows(); + } }); - pinchToZoomHelper.setClipBoundsListener(topBottom -> { - topBottom[1] = chatListView.getBottom() - blurredViewBottomOffset; - topBottom[0] = chatListView.getTop() + chatListViewPaddingTop - AndroidUtilities.dp(4); + searchUpButton.setContentDescription(LocaleController.getString("AccDescrSearchNext", R.string.AccDescrSearchNext)); + + searchDownButton = new ImageView(getContext()); + searchDownButton.setScaleType(ImageView.ScaleType.CENTER); + searchDownButton.setImageResource(R.drawable.msg_go_down); + searchDownButton.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_searchPanelIcons), PorterDuff.Mode.MULTIPLY)); + searchDownButton.setBackgroundDrawable(Theme.createSelectorDrawable(getThemedColor(Theme.key_actionBarActionModeDefaultSelector), 1)); + searchContainer.addView(searchDownButton, LayoutHelper.createFrame(48, 48, Gravity.RIGHT | Gravity.TOP, 0, 0, 0, 0)); + searchDownButton.setOnClickListener(view -> { + getMediaDataController().searchMessagesInChat(null, dialog_id, mergeDialogId, classGuid, 2, threadMessageId, searchingUserMessages, searchingChatMessages); + showMessagesSearchListView(false); }); - emojiAnimationsOverlay = new EmojiAnimationsOverlay(ChatActivity.this, contentView, chatListView, currentAccount, dialog_id, threadMessageId) { - @Override - public void onAllEffectsEnd() { - updateMessagesVisiblePart(false); - } - }; - actionBar.setDrawBlurBackground(contentView); + searchDownButton.setContentDescription(LocaleController.getString("AccDescrSearchPrev", R.string.AccDescrSearchPrev)); - if (isTopic) { - reactionsMentionCount = forumTopic.unread_reactions_count; - updateReactionsMentionButton(false); - } else { - TLRPC.Dialog dialog = getMessagesController().dialogs_dict.get(dialog_id); - if (dialog != null) { - reactionsMentionCount = dialog.unread_reactions_count; - updateReactionsMentionButton(false); - } + if (currentChat != null && (!ChatObject.isChannel(currentChat) || currentChat.megagroup)) { + searchUserButton = new ImageView(getContext()); + searchUserButton.setScaleType(ImageView.ScaleType.CENTER); + searchUserButton.setImageResource(R.drawable.msg_usersearch); + searchUserButton.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_searchPanelIcons), PorterDuff.Mode.MULTIPLY)); + searchUserButton.setBackgroundDrawable(Theme.createSelectorDrawable(getThemedColor(Theme.key_actionBarActionModeDefaultSelector), 1)); + searchContainer.addView(searchUserButton, LayoutHelper.createFrame(48, 48, Gravity.LEFT | Gravity.TOP, 48, 0, 0, 0)); + searchUserButton.setOnClickListener(view -> { + if (mentionContainer != null) { + mentionContainer.setReversed(true); + mentionContainer.getAdapter().setSearchingMentions(true); + } + searchCalendarButton.setVisibility(View.GONE); + searchUserButton.setVisibility(View.GONE); + searchingForUser = true; + searchingUserMessages = null; + searchingChatMessages = null; + searchItem.setSearchFieldHint(LocaleController.getString("SearchMembers", R.string.SearchMembers)); + searchItem.setSearchFieldCaption(LocaleController.getString("SearchFrom", R.string.SearchFrom)); + AndroidUtilities.showKeyboard(searchItem.getSearchField()); + searchItem.clearSearchText(); + }); + searchUserButton.setContentDescription(LocaleController.getString("AccDescrSearchByUser", R.string.AccDescrSearchByUser)); } - BackButtonMenu.addToAccessedDialogs(currentAccount, currentChat, currentUser, dialog_id, dialogFolderId, dialogFilterId); - return fragmentView; - } + searchCalendarButton = new ImageView(getContext()); + searchCalendarButton.setScaleType(ImageView.ScaleType.CENTER); + searchCalendarButton.setImageResource(R.drawable.baseline_date_range_24); + searchCalendarButton.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_searchPanelIcons), PorterDuff.Mode.SRC_IN)); + searchCalendarButton.setBackgroundDrawable(Theme.createSelectorDrawable(getThemedColor(Theme.key_actionBarActionModeDefaultSelector), 1)); + searchContainer.addView(searchCalendarButton, LayoutHelper.createFrame(48, 48, Gravity.LEFT | Gravity.TOP)); + searchCalendarButton.setOnClickListener(view -> { + if (getParentActivity() == null) { + return; + } + AndroidUtilities.hideKeyboard(searchItem.getSearchField()); + showDialog(AlertsCreator.createCalendarPickerDialog(getParentActivity(), 1375315200000L, new MessagesStorage.IntCallback() { + @Override + public void run(int param) { + jumpToDate(param); + } + }, themeDelegate).create()); + }); + searchCalendarButton.setContentDescription(LocaleController.getString("JumpToDate", R.string.JumpToDate)); - @Override - public INavigationLayout.BackButtonState getBackButtonState() { - return INavigationLayout.BackButtonState.BACK; + // NekoX: go to the first message + searchGoToBeginningButton = new ImageView(getContext()); + searchGoToBeginningButton.setScaleType(ImageView.ScaleType.CENTER); + searchGoToBeginningButton.setImageResource(R.drawable.baseline_arrow_upward_24); + searchGoToBeginningButton.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_chat_searchPanelIcons), PorterDuff.Mode.SRC_IN)); + searchGoToBeginningButton.setBackgroundDrawable(Theme.createSelectorDrawable(Theme.getColor(Theme.key_actionBarActionModeDefaultSelector), 1)); + searchContainer.addView(searchGoToBeginningButton, LayoutHelper.createFrame(48, 48, Gravity.RIGHT | Gravity.TOP, 0, 0, 48 * 2, 0)); + searchGoToBeginningButton.setOnClickListener(view -> { + scrollToMessageId(1, 0, false, 0, true, 0); + }); + searchGoToBeginningButton.setContentDescription(LocaleController.getString("GoToBeginning", R.string.GoToBeginning)); + + searchCountText = new SearchCounterView(getContext(), themeDelegate); + searchCountText.setGravity(Gravity.LEFT); + searchContainer.addView(searchCountText, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL, 0, 0, 108, 0)); + contentView.addView(searchContainer, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, searchContainerHeight, Gravity.BOTTOM)); } public void onPageDownClicked() { @@ -9614,7 +8039,7 @@ public void onPageDownClicked() { }; if (createUnreadMessageAfterId != 0) { scrollToMessageId(createUnreadMessageAfterId, 0, false, returnToLoadIndex, true, 0, inCaseLoading); - } else if (returnToMessageId > 0) { + } else if (returnToMessageId > 0 || (NekoConfig.rememberAllBackMessages.Bool() && !returnToMessageIdsStack.empty())) { if (NekoConfig.rememberAllBackMessages.Bool() && !returnToMessageIdsStack.empty()) returnToMessageId = returnToMessageIdsStack.pop(); scrollToMessageId(returnToMessageId, 0, true, returnToLoadIndex, true, 0, inCaseLoading); @@ -9647,19 +8072,41 @@ private void playReactionAnimation(Integer messageId) { } private void dimBehindView(View view, boolean enable) { - scrimView = view; + setScrimView(view); dimBehindView(enable ? 0.2f : 0, view != reactionsMentiondownButton && view != mentiondownButton); } private void dimBehindView(View view, float value) { - scrimView = view; + setScrimView(view); dimBehindView(value, view != reactionsMentiondownButton && view != mentiondownButton); } + private void setScrimView(View scrimView) { + if (this.scrimView == scrimView) { + return; + } + if (this.scrimView != null) { + if (this.scrimView instanceof ChatActionCell) { + ((ChatActionCell) this.scrimView).setInvalidateWithParent(null); + } + } + this.scrimView = scrimView; + if (this.scrimView instanceof ChatActionCell) { + ((ChatActionCell) this.scrimView).setInvalidateWithParent(fragmentView); + } + } public void dimBehindView(boolean enable) { dimBehindView(enable ? 0.2f : 0, true); } + private void checkInstantCameraView() { + if (instantCameraView != null || getContext() == null) { + return; + } + instantCameraView = new InstantCameraView(getContext(), this, themeDelegate); + contentView.addView(instantCameraView, 21, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.LEFT | Gravity.TOP)); + } + private void dimBehindView(float value, boolean hidePagedownButtons) { boolean enable = value > 0; if (scrimView instanceof ChatMessageCell) { @@ -9710,7 +8157,7 @@ private void dimBehindView(float value, boolean hidePagedownButtons) { @Override public void onAnimationEnd(Animator animation) { if (!enable) { - scrimView = null; + setScrimView(null); scrimViewReaction = null; contentView.invalidate(); chatListView.invalidate(); @@ -9718,7 +8165,7 @@ public void onAnimationEnd(Animator animation) { } }); if (scrimView != null && scrimViewAlpha <= 0f) { - scrimView = null; + setScrimView(null); } scrimAnimatorSet.start(); } @@ -9746,7 +8193,45 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { heightMeasureSpec ); } - }; + } + + private boolean filledEditTextItemMenu = false; + private void checkEditTextItemMenu() { + if (filledEditTextItemMenu) { + return; + } + + ActionBarMenuItem item = editTextItem.createView(); + item.addSubItem(text_spoiler, LocaleController.getString("Spoiler", R.string.Spoiler)); + SpannableStringBuilder stringBuilder = new SpannableStringBuilder(LocaleController.getString("Bold", R.string.Bold)); + stringBuilder.setSpan(new TypefaceSpan(AndroidUtilities.getTypeface("fonts/rmedium.ttf")), 0, stringBuilder.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + item.addSubItem(text_bold, stringBuilder); + stringBuilder = new SpannableStringBuilder(LocaleController.getString("Italic", R.string.Italic)); + stringBuilder.setSpan(new TypefaceSpan(AndroidUtilities.getTypeface("fonts/ritalic.ttf")), 0, stringBuilder.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + item.addSubItem(text_italic, stringBuilder); + stringBuilder = new SpannableStringBuilder(LocaleController.getString("Mono", R.string.Mono)); + stringBuilder.setSpan(new TypefaceSpan(Typeface.MONOSPACE), 0, stringBuilder.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + item.addSubItem(text_mono, stringBuilder); + if (currentEncryptedChat == null || AndroidUtilities.getPeerLayerVersion(currentEncryptedChat.layer) >= 101) { + stringBuilder = new SpannableStringBuilder(LocaleController.getString("Strike", R.string.Strike)); + TextStyleSpan.TextStyleRun run = new TextStyleSpan.TextStyleRun(); + run.flags |= TextStyleSpan.FLAG_STYLE_STRIKE; + stringBuilder.setSpan(new TextStyleSpan(run), 0, stringBuilder.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + item.addSubItem(text_strike, stringBuilder); + + stringBuilder = new SpannableStringBuilder(LocaleController.getString("Underline", R.string.Underline)); + run = new TextStyleSpan.TextStyleRun(); + run.flags |= TextStyleSpan.FLAG_STYLE_UNDERLINE; + stringBuilder.setSpan(new TextStyleSpan(run), 0, stringBuilder.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + item.addSubItem(text_underline, stringBuilder); + } + item.addSubItem(text_link, LocaleController.getString("CreateLink", R.string.CreateLink)); + // NekoX + item.addSubItem(text_mention, LocaleController.getString("CreateMention", R.string.CreateMention)); + item.addSubItem(text_regular, LocaleController.getString("Regular", R.string.Regular)); + + filledEditTextItemMenu = true; + } private void updatePagedownButtonsPosition() { float baseTranslationY = chatActivityEnterView.getAnimatedTop() + chatActivityEnterView.getTranslationY() + (chatActivityEnterTopView.getVisibility() == View.VISIBLE ? chatActivityEnterTopView.getTranslationY() : 0); @@ -9875,7 +8360,7 @@ protected void selectAnotherChat() { } Bundle args = new Bundle(); args.putBoolean("onlySelect", true); - args.putInt("dialogsType", 3); + args.putInt("dialogsType", DialogsActivity.DIALOGS_TYPE_FORWARD); args.putInt("hasPoll", hasPoll); args.putBoolean("hasInvoice", hasInvoice); args.putInt("messagesCount", forwardingMessages.messages.size()); @@ -9995,6 +8480,13 @@ private void searchUserMessages(TLRPC.User user, TLRPC.Chat chat) { getMediaDataController().searchMessagesInChat("", dialog_id, mergeDialogId, classGuid, 0, threadMessageId, searchingUserMessages, searchingChatMessages); } + private void updateTranslateItemVisibility() { + if (translateItem == null) { + return; + } + translateItem.setVisibility(getMessagesController().getTranslateController().isTranslateDialogHidden(getDialogId()) && getMessagesController().getTranslateController().isDialogTranslatable(getDialogId()) ? View.VISIBLE : View.GONE); + } + private Animator infoTopViewAnimator; private void updateInfoTopView(boolean animated) { @@ -10070,6 +8562,336 @@ public void onAnimationEnd(Animator animation) { } } + private void checkTopUndoView() { + if (topUndoView != null) { + return; + } + topUndoView = new UndoView(getContext(), this, true, themeDelegate) { + @Override + public void didPressUrl(CharacterStyle span) { + didPressMessageUrl(span, false, null, null); + } + + @Override + public void showWithAction(long did, int action, Object infoObject, Object infoObject2, Runnable actionRunnable, Runnable cancelRunnable) { + setAdditionalTranslationY(fragmentContextView != null && fragmentContextView.isCallTypeVisible() ? AndroidUtilities.dp(fragmentContextView.getStyleHeight()) : 0); + super.showWithAction(did, action, infoObject, infoObject2, actionRunnable, cancelRunnable); + } + }; + contentView.addView(topUndoView, 17, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.TOP | Gravity.LEFT, 8, 8, 8, 0)); + } + + private void createPinnedMessageView() { + if (currentEncryptedChat != null || pinnedMessageView != null || getContext() == null) { + return; + } + pinnedMessageView = new BlurredFrameLayout(getContext(), contentView) { + + float lastY; + float startY; + + { + setOnLongClickListener(v -> { + if (AndroidUtilities.isTablet() || isThreadChat()) { + return false; + } + startY = lastY; + openPinnedMessagesList(true); + return true; + }); + } + + @Override + public boolean onTouchEvent(MotionEvent event) { + lastY = event.getY(); + if (event.getAction() == MotionEvent.ACTION_UP) { + finishPreviewFragment(); + } else if (event.getAction() == MotionEvent.ACTION_MOVE) { + float dy = startY - lastY; + movePreviewFragment(dy); + if (dy < 0) { + startY = lastY; + } + } + return super.onTouchEvent(event); + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + if (setPinnedTextTranslationX) { + for (int a = 0; a < pinnedNextAnimation.length; a++) { + if (pinnedNextAnimation[a] != null) { + pinnedNextAnimation[a].start(); + } + } + setPinnedTextTranslationX = false; + } + } + + @Override + protected boolean drawChild(Canvas canvas, View child, long drawingTime) { + if (child == pinnedLineView) { + canvas.save(); + canvas.clipRect(0, 0, getMeasuredWidth(), AndroidUtilities.dp(48)); + } + boolean result; + if (child == pinnedMessageTextView[0] || child == pinnedMessageTextView[1]) { + canvas.save(); + canvas.clipRect(0,0,getMeasuredWidth() - AndroidUtilities.dp(38),getMeasuredHeight()); + result = super.drawChild(canvas, child, drawingTime); + canvas.restore(); + } else { + result = super.drawChild(canvas, child, drawingTime); + if (child == pinnedLineView) { + canvas.restore(); + } + } + + return result; + } + }; + pinnedMessageView.setTag(1); + pinnedMessageEnterOffset = -AndroidUtilities.dp(50); + pinnedMessageView.setVisibility(View.GONE); + pinnedMessageView.setBackgroundResource(R.drawable.blockpanel); + pinnedMessageView.backgroundColor = getThemedColor(Theme.key_chat_topPanelBackground); + pinnedMessageView.backgroundPaddingBottom = AndroidUtilities.dp(2); + pinnedMessageView.getBackground().mutate().setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_topPanelBackground), PorterDuff.Mode.MULTIPLY)); + int index = 8; + if (topChatPanelView != null && topChatPanelView.getParent() == contentView) { + index = contentView.indexOfChild(topChatPanelView); + } + contentView.addView(pinnedMessageView, index, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 50, Gravity.TOP | Gravity.LEFT)); + pinnedMessageView.setOnClickListener(v -> { + wasManualScroll = true; + if (isThreadChat() && !isTopic) { + scrollToMessageId(threadMessageId, 0, true, 0, true, 0); + } else if (currentPinnedMessageId != 0) { + int currentPinned = currentPinnedMessageId; + + int forceNextPinnedMessageId = 0; + if (!pinnedMessageIds.isEmpty()) { + if (currentPinned == pinnedMessageIds.get(pinnedMessageIds.size() - 1)) { + forceNextPinnedMessageId = pinnedMessageIds.get(0) + 1; + forceScrollToFirst = true; + } else { + forceNextPinnedMessageId = currentPinned - 1; + forceScrollToFirst = false; + } + } + this.forceNextPinnedMessageId = forceNextPinnedMessageId; + if (!forceScrollToFirst) { + forceNextPinnedMessageId = -forceNextPinnedMessageId; + } + scrollToMessageId(currentPinned, 0, true, 0, true, forceNextPinnedMessageId); + updateMessagesVisiblePart(false); + } + }); + pinnedMessageView.setEnabled(!isInPreviewMode()); + + View selector = new View(getContext()); + selector.setBackground(Theme.getSelectorDrawable(false)); + pinnedMessageView.addView(selector, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.LEFT | Gravity.TOP, 0, 0, 0, 2)); + + pinnedLineView = new PinnedLineView(getContext(), themeDelegate); + pinnedMessageView.addView(pinnedLineView, LayoutHelper.createFrame(2, 48, Gravity.LEFT | Gravity.TOP, 8, 0, 0, 0)); + pinnedMessageView.setClipChildren(false); + + pinnedCounterTextView = new NumberTextView(getContext()); + pinnedCounterTextView.setAddNumber(); + pinnedCounterTextView.setTextSize(14); + pinnedCounterTextView.setTextColor(getThemedColor(Theme.key_chat_topPanelTitle)); + pinnedCounterTextView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + pinnedMessageView.addView(pinnedCounterTextView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 18, Gravity.TOP | Gravity.LEFT, 18, 7, 44, 0)); + + for (int a = 0; a < 2; a++) { + pinnedNameTextView[a] = new TrackingWidthSimpleTextView(getContext()); + pinnedNameTextView[a].setTextSize(14); + pinnedNameTextView[a].setTextColor(getThemedColor(Theme.key_chat_topPanelTitle)); + pinnedNameTextView[a].setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + pinnedMessageView.addView(pinnedNameTextView[a], LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 18, Gravity.TOP | Gravity.LEFT, 18, 7.3f, 44, 0)); + + pinnedMessageTextView[a] = new SimpleTextView(getContext()) { + @Override + public void setTranslationY(float translationY) { + super.setTranslationY(translationY); + if (this == pinnedMessageTextView[0] && pinnedNextAnimation[1] != null) { + if (forceScrollToFirst && translationY < 0) { + pinnedLineView.setTranslationY(translationY / 2); + } else { + pinnedLineView.setTranslationY(0); + } + } + } + }; + pinnedMessageTextView[a].setTextSize(14); + pinnedMessageTextView[a].setTextColor(getThemedColor(Theme.key_chat_topPanelMessage)); + pinnedMessageView.addView(pinnedMessageTextView[a], LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 18, Gravity.TOP | Gravity.LEFT, 18, 25.3f, 44, 0)); + + pinnedMessageButton[a] = new PinnedMessageButton(getContext()); + pinnedMessageView.addView(pinnedMessageButton[a], LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, 28, Gravity.TOP | Gravity.RIGHT, 0, 10, 14, 0)); + + pinnedMessageImageView[a] = new BackupImageView(getContext()) { + private SpoilerEffect spoilerEffect = new SpoilerEffect(); + private Path path = new Path(); + private float[] radii = new float[8]; + + @Override + protected void onDraw(Canvas canvas) { + super.onDraw(canvas); + + if (hasBlur) { + canvas.save(); + AndroidUtilities.rectTmp.set(0, 0, getWidth(), getHeight()); + + int[] rad = imageReceiver.getRoundRadius(); + radii[0] = radii[1] = rad[0]; + radii[2] = radii[3] = rad[1]; + radii[4] = radii[5] = rad[2]; + radii[6] = radii[7] = rad[3]; + + path.rewind(); + path.addRoundRect(AndroidUtilities.rectTmp, radii, Path.Direction.CW); + canvas.clipPath(path); + + int sColor = Color.WHITE; + spoilerEffect.setColor(ColorUtils.setAlphaComponent(sColor, (int) (Color.alpha(sColor) * 0.325f))); + spoilerEffect.setBounds(0, 0, getWidth(), getHeight()); + spoilerEffect.draw(canvas); + + canvas.restore(); + invalidate(); + } + } + }; + pinnedMessageImageView[a].setBlurAllowed(true); + pinnedMessageImageView[a].setRoundRadius(AndroidUtilities.dp(2)); + pinnedMessageView.addView(pinnedMessageImageView[a], LayoutHelper.createFrame(32, 32, Gravity.TOP | Gravity.LEFT, 17, 8, 0, 0)); + if (a == 1) { + pinnedNameTextView[a].setVisibility(View.INVISIBLE); + pinnedMessageButton[a].setVisibility(View.INVISIBLE); + pinnedMessageTextView[a].setVisibility(View.INVISIBLE); + pinnedMessageImageView[a].setVisibility(View.INVISIBLE); + } + } + + pinnedListButton = new ImageView(getContext()); + pinnedListButton.setImageResource(R.drawable.baseline_menu_24); + pinnedListButton.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_topPanelClose), PorterDuff.Mode.SRC_IN)); + pinnedListButton.setScaleType(ImageView.ScaleType.CENTER); + pinnedListButton.setContentDescription(LocaleController.getString("AccPinnedMessagesList", R.string.AccPinnedMessagesList)); + pinnedListButton.setVisibility(View.INVISIBLE); + pinnedListButton.setAlpha(0.0f); + pinnedListButton.setScaleX(0.4f); + pinnedListButton.setScaleY(0.4f); + if (Build.VERSION.SDK_INT >= 21) { + pinnedListButton.setBackgroundDrawable(Theme.createSelectorDrawable(getThemedColor(Theme.key_listSelector))); + } + pinnedMessageView.addView(pinnedListButton, LayoutHelper.createFrame(36, 48, Gravity.RIGHT | Gravity.TOP, 0, 0, 7, 0)); + pinnedListButton.setOnClickListener(v -> openPinnedMessagesList(false)); + + closePinned = new ImageView(getContext()); + closePinned.setImageResource(R.drawable.miniplayer_close); + closePinned.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_topPanelClose), PorterDuff.Mode.MULTIPLY)); + closePinned.setScaleType(ImageView.ScaleType.CENTER); + closePinned.setVisibility(View.GONE); + closePinned.setContentDescription(LocaleController.getString("Close", R.string.Close)); + + pinnedProgress = new RadialProgressView(getContext(), themeDelegate); + pinnedProgress.setVisibility(View.GONE); + pinnedProgress.setSize(AndroidUtilities.dp(16)); + pinnedProgress.setStrokeWidth(2f); + pinnedProgress.setProgressColor(getThemedColor(Theme.key_chat_topPanelLine)); + pinnedMessageView.addView(pinnedProgress, LayoutHelper.createFrame(36, 48, Gravity.RIGHT | Gravity.TOP, 0, 0, 2, 0)); + + if (Build.VERSION.SDK_INT >= 21) { + closePinned.setBackgroundDrawable(Theme.createSelectorDrawable(getThemedColor(Theme.key_inappPlayerClose) & 0x19ffffff, 1, AndroidUtilities.dp(14))); + } + pinnedMessageView.addView(closePinned, LayoutHelper.createFrame(36, 48, Gravity.RIGHT | Gravity.TOP, 0, 0, 2, 0)); + closePinned.setOnClickListener(v -> { + if (getParentActivity() == null) { + return; + } + boolean allowPin; + if (currentChat != null) { + allowPin = ChatObject.canPinMessages(currentChat); + } else if (currentEncryptedChat == null) { + if (userInfo != null) { + allowPin = userInfo.can_pin_message; + } else { + allowPin = false; + } + } else { + allowPin = false; + } + BottomBuilder builder = new BottomBuilder(getParentActivity()); + if (allowPin) { + builder.addItem(LocaleController.getString("UnpinMessageX", R.string.UnpinMessageX), R.drawable.deproko_baseline_pin_undo_24, true, c -> { + MessageObject messageObject = pinnedMessageObjects.get(currentPinnedMessageId); + if (messageObject == null) { + messageObject = messagesDict[0].get(currentPinnedMessageId); + } + unpinMessage(messageObject); + return Unit.INSTANCE; + }); + } + builder.addItem(LocaleController.getString("DismissForYourself", R.string.DismissForYourself), R.drawable.baseline_close_24, c -> { + SharedPreferences preferences = MessagesController.getNotificationsSettings(currentAccount); + if (chatInfo != null) { + preferences.edit().putInt("pin_" + dialog_id, chatInfo.pinned_msg_id).apply(); + } else if (userInfo != null) { + preferences.edit().putInt("pin_" + dialog_id, userInfo.pinned_msg_id).apply(); + } + updatePinnedMessageView(true); + return Unit.INSTANCE; + }); + builder.addCancelItem(); + builder.show(); + }); + + // NekoX: long press pinned list button + pinnedListButton.setOnLongClickListener(v -> { + if (getParentActivity() == null) { + return false; + } + boolean allowPin; + if (currentChat != null) { + allowPin = ChatObject.canPinMessages(currentChat); + } else if (currentEncryptedChat == null) { + if (userInfo != null) { + allowPin = userInfo.can_pin_message; + } else { + allowPin = false; + } + } else { + allowPin = false; + } + BottomBuilder builder = new BottomBuilder(getParentActivity()); + if (allowPin) { + builder.addItem(LocaleController.getString("UnpinMessagesAll", R.string.UnpinMessagesAll), R.drawable.deproko_baseline_pin_undo_24, true, c -> { + getMessagesController().unpinAllMessages(currentChat, currentUser); + return Unit.INSTANCE; + }); + } + builder.addItem(LocaleController.getString("DismissForYourself", R.string.DismissForYourself), R.drawable.baseline_close_24, c -> { + SharedPreferences preferences = MessagesController.getNotificationsSettings(currentAccount); + if (chatInfo != null) { + preferences.edit().putInt("pin_" + dialog_id, chatInfo.pinned_msg_id).apply(); + } else if (userInfo != null) { + preferences.edit().putInt("pin_" + dialog_id, userInfo.pinned_msg_id).apply(); + } + updatePinnedMessageView(true); + return Unit.INSTANCE; + }); + builder.addCancelItem(); + builder.show(); + return true; + }); + + updatePinnedListButton(false); + } + private void openAnotherForward() { if (forwardingMessages == null || forwardingMessages.messages == null) { return; @@ -10119,7 +8941,7 @@ private void openAnotherForward() { showDialog(dialog); TextView button = (TextView) dialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(getThemedColor(Theme.key_dialogTextRed2)); + button.setTextColor(getThemedColor(Theme.key_dialogTextRed)); } } @@ -10265,7 +9087,9 @@ public void setVisibility(int visibility) { blurredView.animate().alpha(1f).setListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { - super.onAnimationEnd(animation); + if (fragmentView == null || chatListView == null) { + return; + } chatListView.invalidate(); fragmentView.invalidate(); } @@ -10295,7 +9119,7 @@ private void showProgressView(boolean show) { if (progressView == null) { return; } - if (DISABLE_PROGRESS_VIEW && !AndroidUtilities.isTablet() && !isComments && currentUser == null && !SharedConfig.getLiteMode().enabled()) { + if (DISABLE_PROGRESS_VIEW && !AndroidUtilities.isTablet() && !isComments && currentUser == null && LiteMode.isEnabled(LiteMode.FLAGS_CHAT)) { animateProgressViewTo = show; return; } @@ -10373,7 +9197,8 @@ private void updateChatListViewTopPadding() { if (!invalidateChatListViewTopPadding || chatListView == null || (fixedKeyboardHeight > 0 && searchExpandProgress == 0)) { return; } - float topPanelViewH = Math.max(0, AndroidUtilities.dp(48) + topChatPanelViewOffset); + float topPanelViewH = Math.max(0, topChatPanelView != null && topChatPanelView.getVisibility() == View.VISIBLE ? (topChatPanelView.getLayoutParams().height - AndroidUtilities.dp(2)) : 0); + topPanelViewH += Math.max(-topPanelViewH, topChatPanelViewOffset); float pinnedViewH = 0; if (pinnedMessageView != null && pinnedMessageView.getVisibility() == View.VISIBLE) { pinnedViewH = Math.max(0, AndroidUtilities.dp(48) + pinnedMessageEnterOffset); @@ -10391,7 +9216,7 @@ private void updateChatListViewTopPadding() { chatListViewPaddingTop += contentPanTranslation + bottomPanelTranslationY; float searchExpandOffset = 0; if (searchExpandProgress != 0 && chatActivityEnterView.getVisibility() == View.VISIBLE) { - chatListViewPaddingTop -= (searchExpandOffset = searchExpandProgress * (chatActivityEnterView.getMeasuredHeight() - searchContainer.getMeasuredHeight())); + chatListViewPaddingTop -= (searchExpandOffset = searchExpandProgress * (chatActivityEnterView.getMeasuredHeight() - AndroidUtilities.dp(searchContainerHeight))); } if (bottomPanelTranslationY == 0 && !chatActivityEnterView.panelAnimationInProgress() && (contentView.getLayoutParams().height < 0 || (contentView.getKeyboardHeight() <= AndroidUtilities.dp(20) && chatActivityEnterView.isPopupShowing()))) { chatListViewPaddingTop += contentView.getKeyboardHeight() <= AndroidUtilities.dp(20) && !AndroidUtilities.isInMultiwindow && !inBubbleMode ? chatActivityEnterView.getEmojiPadding() : contentView.getKeyboardHeight(); @@ -10492,7 +9317,9 @@ private void invalidateChatListViewTopPadding() { } } - float translation = contentPanTranslation + contentPaddingTop + Math.max(0, AndroidUtilities.dp(48) + topChatPanelViewOffset); + int topPanelHeight = topChatPanelView != null && topChatPanelView.getVisibility() == View.VISIBLE ? ((topChatPanelView.getLayoutParams() == null ? AndroidUtilities.dp(50) : topChatPanelView.getLayoutParams().height) - AndroidUtilities.dp(2)) : 0; + topPanelHeight = topPanelHeight + (int) Math.max(-topPanelHeight, topChatPanelViewOffset); + float translation = contentPanTranslation + contentPaddingTop + Math.max(0, topPanelHeight); if (pinnedMessageView != null) { translation += pinnedMessageEnterOffset; pinnedMessageView.setTranslationY(translation); @@ -10699,6 +9526,10 @@ private void setPagedownLoading(boolean loading, boolean animated) { public void onAnimationEnd(Animator animation) { if (loading) { pagedownButtonArrow.setVisibility(View.GONE); + if (!startedLoading[0]) { + pagedownButtonLoadingDrawable.reset(); + pagedownButtonLoading.setVisibility(View.VISIBLE); + } } else { pagedownButtonLoading.setVisibility(View.GONE); } @@ -10786,9 +9617,12 @@ private void openForward(boolean fromActionBar) { break; } } + if (selectionReactionsOverlay != null && selectionReactionsOverlay.isVisible()) { + selectionReactionsOverlay.setHiddenByScroll(true); + } Bundle args = new Bundle(); args.putBoolean("onlySelect", true); - args.putInt("dialogsType", 3); + args.putInt("dialogsType", DialogsActivity.DIALOGS_TYPE_FORWARD); args.putInt("messagesCount", canForwardMessagesCount); args.putInt("hasPoll", hasPoll); args.putBoolean("hasInvoice", hasInvoice); @@ -10957,6 +9791,13 @@ public void jumpToDate(int date) { } } + public void processInlineBotWebView(TLRPC.TL_inlineBotWebView object) { + BotWebViewSheet webViewSheet = new BotWebViewSheet(getContext(), getResourceProvider()); + webViewSheet.setParentActivity(getParentActivity()); + webViewSheet.requestWebView(currentAccount, currentUser != null ? currentUser.id : currentChat.id, mentionContainer.getAdapter().getFoundContextBot().id, object.text, object.url, BotWebViewSheet.TYPE_SIMPLE_WEB_VIEW_BUTTON, 0, false, BotWebViewSheet.FLAG_FROM_INLINE_SWITCH); + webViewSheet.show(); + } + public void processInlineBotContextPM(TLRPC.TL_inlineBotSwitchPM object) { if (object == null || mentionContainer == null) { return; @@ -10982,7 +9823,7 @@ public void processInlineBotContextPM(TLRPC.TL_inlineBotSwitchPM object) { } private void createChatAttachView() { - if (getParentActivity() == null) { + if (getParentActivity() == null || getContext() == null) { return; } if (chatAttachAlert == null) { @@ -11004,7 +9845,9 @@ public void onDismissAnimationStart() { if (chatAttachAlert != null) { chatAttachAlert.setFocusable(false); } - chatActivityEnterView.getEditField().requestFocus(); + if (chatActivityEnterView != null && chatActivityEnterView.getEditField() != null) { + chatActivityEnterView.getEditField().requestFocus(); + } if (chatAttachAlert != null && chatAttachAlert.isShowing()) { AndroidUtilities.requestAdjustResize(getParentActivity(), classGuid); } @@ -11139,6 +9982,10 @@ public void doOnIdle(Runnable runnable) { public void performHistoryClear(boolean revoke, boolean canDeleteHistory) { clearingHistory = true; + createUndoView(); + if (undoView == null) { + return; + } undoView.showWithAction(dialog_id, UndoView.ACTION_CLEAR, () -> { if (!pinnedMessageIds.isEmpty()) { SharedPreferences preferences = MessagesController.getNotificationsSettings(currentAccount); @@ -11175,7 +10022,9 @@ public boolean openedWithLivestream() { return livestream; } + @Nullable public UndoView getUndoView() { + createUndoView(); return undoView; } @@ -11591,6 +10440,16 @@ private void showMediaBannedHint() { if (userInfo != null && userInfo.voice_messages_forbidden) { mediaBanTooltip.setText(AndroidUtilities.replaceTags(LocaleController.formatString(chatActivityEnterView.isInVideoMode() ? R.string.VideoMessagesRestrictedByPrivacy : R.string.VoiceMessagesRestrictedByPrivacy, currentUser.first_name))); + } else if (!ChatObject.canSendVoice(currentChat) && !ChatObject.canSendRoundVideo(currentChat)) { + if (chatActivityEnterView.isInVideoMode()) { + mediaBanTooltip.setText(ChatObject.getRestrictedErrorText(currentChat, ChatObject.ACTION_SEND_ROUND)); + } else { + mediaBanTooltip.setText(ChatObject.getRestrictedErrorText(currentChat, ChatObject.ACTION_SEND_VOICE)); + } + } else if (ChatObject.isActionBannedByDefault(currentChat, ChatObject.ACTION_SEND_VOICE)) { + mediaBanTooltip.setText(LocaleController.getString("GlobalAttachVoiceRestricted", R.string.GlobalAttachVoiceRestricted)); + } else if (ChatObject.isActionBannedByDefault(currentChat, ChatObject.ACTION_SEND_ROUND)) { + mediaBanTooltip.setText(LocaleController.getString("GlobalAttachRoundRestricted", R.string.GlobalAttachRoundRestricted)); } else if (ChatObject.isActionBannedByDefault(currentChat, ChatObject.ACTION_SEND_MEDIA)) { mediaBanTooltip.setText(LocaleController.getString("GlobalAttachMediaRestricted", R.string.GlobalAttachMediaRestricted)); } else { @@ -11910,142 +10769,38 @@ public void onAnimationEnd(Animator animation) { } }); AnimatorSet.setDuration(300); - AnimatorSet.start(); - }, 2000); - } - }); - AnimatorSet.setDuration(300); - AnimatorSet.start(); - - View emojiButton = chatActivityEnterView.getEmojiButton(); - if (emojiButton != null) { - gifHintTextView.showForView(emojiButton, true); - } - return true; - } - - private void openAttachMenu() { - if (getParentActivity() == null || chatActivityEnterView != null && !TextUtils.isEmpty(chatActivityEnterView.getSlowModeTimer())) { - return; - } - createChatAttachView(); - chatAttachAlert.getPhotoLayout().loadGalleryPhotos(); - if (Build.VERSION.SDK_INT == 21 || Build.VERSION.SDK_INT == 22) { - chatActivityEnterView.closeKeyboard(); - } - if (currentChat != null && !ChatObject.hasAdminRights(currentChat) && currentChat.slowmode_enabled) { - chatAttachAlert.setMaxSelectedPhotos(10, true); - } else { - chatAttachAlert.setMaxSelectedPhotos(-1, true); - } - chatAttachAlert.init(); - chatAttachAlert.getCommentTextView().setText(chatActivityEnterView.getFieldText()); - chatAttachAlert.parentThemeDelegate = themeDelegate; - showDialog(chatAttachAlert); - } - - private void checkAutoDownloadMessages(boolean scrollUp) { - if (chatListView == null) { - return; - } - int count = chatListView.getChildCount(); - int firstMessagePosition = -1; - int lastMessagePosition = -1; - for (int a = 0; a < count; a++) { - View child = chatListView.getChildAt(a); - if (!(child instanceof ChatMessageCell)) { - continue; - } - RecyclerListView.ViewHolder holder = chatListView.findContainingViewHolder(child); - if (holder != null) { - int p = holder.getAdapterPosition(); - if (firstMessagePosition == -1) { - firstMessagePosition = p; - } - lastMessagePosition = p; - } - - ChatMessageCell cell = (ChatMessageCell) child; - MessageObject object = cell.getMessageObject(); - if (object == null || object.mediaExists || !object.isSent() || object.loadingCancelled) { - continue; - } - TLRPC.Document document = object.getDocument(); - if (document == null) { - continue; - } - int canDownload; - if (!MessageObject.isStickerDocument(document) && !MessageObject.isAnimatedStickerDocument(document, true) && !MessageObject.isGifDocument(document) && !MessageObject.isRoundVideoDocument(document) - && (canDownload = getDownloadController().canDownloadMedia(object.messageOwner)) != 0) { - if (canDownload == 2) { - if (currentEncryptedChat == null && !object.shouldEncryptPhotoOrVideo() && object.canStreamVideo()) { - getFileLoader().loadFile(document, object, 0, 10); - } - } else { - int cacheType; - if (object.isWallpaper() || object.isTheme()) { - cacheType = 1; - } else if (MessageObject.isVideoDocument(document) && object.shouldEncryptPhotoOrVideo()) { - cacheType = 2; - } else { - cacheType = 0; - } - getFileLoader().loadFile(document, object, 0, cacheType); - cell.updateButtonState(false, true, false); - } - } - } - if (firstMessagePosition != -1) { - int lastPosition; - if (scrollUp) { - firstMessagePosition = lastPosition = lastMessagePosition; - firstMessagePosition = Math.min(firstMessagePosition + 10, chatAdapter.messagesEndRow); - for (int a = lastPosition, N = messages.size(); a < firstMessagePosition; a++) { - int n = a - chatAdapter.messagesStartRow; - if (n < 0 || n >= N) { - continue; - } - checkAutoDownloadMessage(messages.get(n)); - } - } else { - lastPosition = Math.max(firstMessagePosition - 20, chatAdapter.messagesStartRow); - for (int a = firstMessagePosition - 1, N = messages.size(); a >= lastPosition; a--) { - int n = a - chatAdapter.messagesStartRow; - if (n < 0 || n >= N) { - continue; - } - checkAutoDownloadMessage(messages.get(n)); - } + AnimatorSet.start(); + }, 2000); } + }); + AnimatorSet.setDuration(300); + AnimatorSet.start(); + + View emojiButton = chatActivityEnterView.getEmojiButton(); + if (emojiButton != null) { + gifHintTextView.showForView(emojiButton, true); } - showNoSoundHint(); + return true; } - private void checkAutoDownloadMessage(MessageObject object) { - if (object.mediaExists) { - return; - } - TLRPC.Message message = object.messageOwner; - int canDownload = getDownloadController().canDownloadMedia(message); - if (canDownload == 0) { + private void openAttachMenu() { + if (getParentActivity() == null || chatActivityEnterView != null && !TextUtils.isEmpty(chatActivityEnterView.getSlowModeTimer())) { return; } - TLRPC.Document document = object.getDocument(); - TLRPC.PhotoSize photo = document == null ? FileLoader.getClosestPhotoSizeWithSize(object.photoThumbs, AndroidUtilities.getPhotoSize()) : null; - if (document == null && photo == null) { - return; + createChatAttachView(); + chatAttachAlert.getPhotoLayout().loadGalleryPhotos(); + if (Build.VERSION.SDK_INT == 21 || Build.VERSION.SDK_INT == 22) { + chatActivityEnterView.closeKeyboard(); } - if (canDownload == 2 || canDownload == 1 && object.isVideo()) { - if (document != null && currentEncryptedChat == null && !object.shouldEncryptPhotoOrVideo() && object.canStreamVideo()) { - getFileLoader().loadFile(document, object, FileLoader.PRIORITY_LOW, 10); - } + if (currentChat != null && !ChatObject.hasAdminRights(currentChat) && currentChat.slowmode_enabled) { + chatAttachAlert.setMaxSelectedPhotos(10, true); } else { - if (document != null) { - getFileLoader().loadFile(document, object, FileLoader.PRIORITY_LOW, MessageObject.isVideoDocument(document) && object.shouldEncryptPhotoOrVideo() ? 2 : 0); - } else { - getFileLoader().loadFile(ImageLocation.getForObject(photo, object.photoThumbsObject), object, null, FileLoader.PRIORITY_LOW, object.shouldEncryptPhotoOrVideo() ? 2 : 0); - } + chatAttachAlert.setMaxSelectedPhotos(-1, true); } + chatAttachAlert.init(); + chatAttachAlert.getCommentTextView().setText(chatActivityEnterView.getFieldText()); + chatAttachAlert.parentThemeDelegate = themeDelegate; + showDialog(chatAttachAlert); } private void showFloatingDateView(boolean scroll) { @@ -12145,6 +10900,8 @@ private void checkScrollForLoad(boolean scroll) { visibleItemCount++; } } + final int firstVisibleItemFinal = firstVisibleItem; + final int visibleItemCountFinal = visibleItemCount; int totalItemCount = chatAdapter.getItemCount(); int checkLoadCount; if (scroll) { @@ -12152,32 +10909,34 @@ private void checkScrollForLoad(boolean scroll) { } else { checkLoadCount = 5; } - if (totalItemCount - firstVisibleItem - visibleItemCount <= checkLoadCount && !loading) { - if (!endReached[0]) { - loading = true; - waitingForLoad.add(lastLoadIndex); - if (messagesByDays.size() != 0) { - getMessagesController().loadMessages(dialog_id, mergeDialogId, false, 50, maxMessageId[0], 0, !cacheEndReached[0], minDate[0], classGuid, 0, 0, chatMode, threadMessageId, replyMaxReadId, lastLoadIndex++, isTopic); - } else { - getMessagesController().loadMessages(dialog_id, mergeDialogId, false, 50, 0, 0, !cacheEndReached[0], minDate[0], classGuid, 0, 0, chatMode, threadMessageId, replyMaxReadId, lastLoadIndex++, isTopic); + AndroidUtilities.runOnUIThread(() -> { + if (totalItemCount - firstVisibleItemFinal - visibleItemCountFinal <= checkLoadCount && !loading) { + if (!endReached[0]) { + loading = true; + waitingForLoad.add(lastLoadIndex); + if (messagesByDays.size() != 0) { + getMessagesController().loadMessages(dialog_id, mergeDialogId, false, 50, maxMessageId[0], 0, !cacheEndReached[0], minDate[0], classGuid, 0, 0, chatMode, threadMessageId, replyMaxReadId, lastLoadIndex++, isTopic); + } else { + getMessagesController().loadMessages(dialog_id, mergeDialogId, false, 50, 0, 0, !cacheEndReached[0], minDate[0], classGuid, 0, 0, chatMode, threadMessageId, replyMaxReadId, lastLoadIndex++, isTopic); + } + } else if (mergeDialogId != 0 && !endReached[1]) { + loading = true; + waitingForLoad.add(lastLoadIndex); + getMessagesController().loadMessages(mergeDialogId, 0, false, 50, maxMessageId[1], 0, !cacheEndReached[1], minDate[1], classGuid, 0, 0, chatMode, threadMessageId, replyMaxReadId, lastLoadIndex++, isTopic); } - } else if (mergeDialogId != 0 && !endReached[1]) { - loading = true; - waitingForLoad.add(lastLoadIndex); - getMessagesController().loadMessages(mergeDialogId, 0, false, 50, maxMessageId[1], 0, !cacheEndReached[1], minDate[1], classGuid, 0, 0, chatMode, threadMessageId, replyMaxReadId, lastLoadIndex++, isTopic); } - } - if (visibleItemCount > 0 && !loadingForward && firstVisibleItem <= 10) { - if (mergeDialogId != 0 && !forwardEndReached[1]) { - waitingForLoad.add(lastLoadIndex); - getMessagesController().loadMessages(mergeDialogId, 0, false, 50, minMessageId[1], 0, true, maxDate[1], classGuid, 1, 0, chatMode, threadMessageId, replyMaxReadId, lastLoadIndex++, isTopic); - loadingForward = true; - } else if (!forwardEndReached[0]) { - waitingForLoad.add(lastLoadIndex); - getMessagesController().loadMessages(dialog_id, mergeDialogId, false, 50, minMessageId[0], 0, true, maxDate[0], classGuid, 1, 0, chatMode, threadMessageId, replyMaxReadId, lastLoadIndex++, isTopic); - loadingForward = true; + if (visibleItemCountFinal > 0 && !loadingForward && firstVisibleItemFinal <= 10) { + if (mergeDialogId != 0 && !forwardEndReached[1]) { + waitingForLoad.add(lastLoadIndex); + getMessagesController().loadMessages(mergeDialogId, 0, false, 50, minMessageId[1], 0, true, maxDate[1], classGuid, 1, 0, chatMode, threadMessageId, replyMaxReadId, lastLoadIndex++, isTopic); + loadingForward = true; + } else if (!forwardEndReached[0]) { + waitingForLoad.add(lastLoadIndex); + getMessagesController().loadMessages(dialog_id, mergeDialogId, false, 50, minMessageId[0], 0, true, maxDate[0], classGuid, 1, 0, chatMode, threadMessageId, replyMaxReadId, lastLoadIndex++, isTopic); + loadingForward = true; + } } - } + }); } private void processSelectedAttach(int which) { @@ -12356,2011 +11115,3222 @@ public void didSelectSearchPhotos(ArrayList if (photos.isEmpty()) { return; } - fillEditingMediaWithCaption(photos.get(0).caption, photos.get(0).entities); - SendMessagesHelper.prepareSendingMedia(getAccountInstance(), photos, dialog_id, replyingMessageObject, getThreadMessage(), null, false, true, editingMessageObject, notify, scheduleDate, photos.get(0).updateStickersOrder); - afterMessageSend(); - if (scheduleDate != 0) { - if (scheduledMessagesCount == -1) { - scheduledMessagesCount = 0; - } - scheduledMessagesCount += photos.size(); - updateScheduledInterface(true); + fillEditingMediaWithCaption(photos.get(0).caption, photos.get(0).entities); + SendMessagesHelper.prepareSendingMedia(getAccountInstance(), photos, dialog_id, replyingMessageObject, getThreadMessage(), null, false, true, editingMessageObject, notify, scheduleDate, photos.get(0).updateStickersOrder); + afterMessageSend(); + if (scheduleDate != 0) { + if (scheduledMessagesCount == -1) { + scheduledMessagesCount = 0; + } + scheduledMessagesCount += photos.size(); + updateScheduledInterface(true); + } + } + + @Override + public void startDocumentSelectActivity() { + try { + Intent photoPickerIntent = new Intent(Intent.ACTION_GET_CONTENT); + if (Build.VERSION.SDK_INT >= 18) { + photoPickerIntent.putExtra(Intent.EXTRA_ALLOW_MULTIPLE, true); + } + photoPickerIntent.setType("*/*"); + startActivityForResult(photoPickerIntent, 21); + } catch (Exception e) { + FileLog.e(e); + } + } + + @Override + public boolean dismissDialogOnPause(Dialog dialog) { + return dialog != chatAttachAlert && super.dismissDialogOnPause(dialog); + } + + private boolean disableLinkPreview = NekoConfig.disableLinkPreviewByDefault.Bool(); + + private void searchLinks(final CharSequence charSequence, final boolean force) { + if (currentEncryptedChat != null && getMessagesController().secretWebpagePreview == 0 || editingMessageObject != null && !editingMessageObject.isWebpage()) { + return; + } + if (disableLinkPreview) return; + if (force && foundWebPage != null) { + if (foundWebPage.url != null) { + int index = TextUtils.indexOf(charSequence, foundWebPage.url); + char lastChar = 0; + boolean lenEqual = false; + if (index == -1) { + if (foundWebPage.display_url != null) { + index = TextUtils.indexOf(charSequence, foundWebPage.display_url); + lenEqual = index != -1 && index + foundWebPage.display_url.length() == charSequence.length(); + lastChar = index != -1 && !lenEqual ? charSequence.charAt(index + foundWebPage.display_url.length()) : 0; + } + } else { + lenEqual = index + foundWebPage.url.length() == charSequence.length(); + lastChar = !lenEqual ? charSequence.charAt(index + foundWebPage.url.length()) : 0; + } + if (index != -1 && (lenEqual || lastChar == ' ' || lastChar == ',' || lastChar == '.' || lastChar == '!' || lastChar == '/')) { + return; + } + } + pendingLinkSearchString = null; + foundUrls = null; + showFieldPanelForWebPage(false, foundWebPage, false); + } + final MessagesController messagesController = getMessagesController(); + Utilities.searchQueue.postRunnable(() -> { + if (linkSearchRequestId != 0) { + getConnectionsManager().cancelRequest(linkSearchRequestId, true); + linkSearchRequestId = 0; + } + ArrayList urls = null; + CharSequence textToCheck; + try { + Matcher m = AndroidUtilities.WEB_URL.matcher(charSequence); + while (m.find()) { + if (m.start() > 0) { + if (charSequence.charAt(m.start() - 1) == '@') { + continue; + } + } + if (urls == null) { + urls = new ArrayList<>(); + } + urls.add(charSequence.subSequence(m.start(), m.end())); + } + if (charSequence instanceof Spannable) { + URLSpanReplacement[] spans = ((Spannable) charSequence).getSpans(0, charSequence.length(), URLSpanReplacement.class); + if (spans != null && spans.length > 0) { + if (urls == null) { + urls = new ArrayList<>(); + } + for (int a = 0; a < spans.length; a++) { + urls.add(spans[a].getURL()); + } + } + } + if (urls != null && foundUrls != null && urls.size() == foundUrls.size()) { + boolean clear = true; + for (int a = 0; a < urls.size(); a++) { + if (!TextUtils.equals(urls.get(a), foundUrls.get(a))) { + clear = false; + } + } + if (clear) { + return; + } + } + foundUrls = urls; + if (urls == null) { + AndroidUtilities.runOnUIThread(() -> { + if (foundWebPage != null) { + showFieldPanelForWebPage(false, foundWebPage, false); + foundWebPage = null; + } + }); + return; + } + textToCheck = TextUtils.join(" ", urls); + } catch (Exception e) { + FileLog.e(e); + String text = charSequence.toString().toLowerCase(); + if (charSequence.length() < 13 || !text.contains("http://") && !text.contains("https://")) { + AndroidUtilities.runOnUIThread(() -> { + if (foundWebPage != null) { + showFieldPanelForWebPage(false, foundWebPage, false); + foundWebPage = null; + } + }); + return; + } + textToCheck = charSequence; + } + + if (currentEncryptedChat != null && messagesController.secretWebpagePreview == 2) { + AndroidUtilities.runOnUIThread(() -> { + AlertDialog.Builder builder = new AlertDialog.Builder(getParentActivity(), themeDelegate); + builder.setTitle(LocaleController.getString("NekoX", R.string.NekoX)); + builder.setPositiveButton(LocaleController.getString("OK", R.string.OK), (dialog, which) -> { + messagesController.secretWebpagePreview = 1; + MessagesController.getGlobalMainSettings().edit().putInt("secretWebpage2", getMessagesController().secretWebpagePreview).apply(); + foundUrls = null; + searchLinks(charSequence, force); + }); + builder.setNegativeButton(LocaleController.getString("Cancel", R.string.Cancel), null); + builder.setMessage(LocaleController.getString("SecretLinkPreviewAlert", R.string.SecretLinkPreviewAlert)); + showDialog(builder.create()); + + messagesController.secretWebpagePreview = 0; + MessagesController.getGlobalMainSettings().edit().putInt("secretWebpage2", messagesController.secretWebpagePreview).apply(); + }); + return; + } + + final TLRPC.TL_messages_getWebPagePreview req = new TLRPC.TL_messages_getWebPagePreview(); + if (textToCheck instanceof String) { + req.message = (String) textToCheck; + } else { + req.message = textToCheck.toString(); + } + linkSearchRequestId = getConnectionsManager().sendRequest(req, (response, error) -> AndroidUtilities.runOnUIThread(() -> { + linkSearchRequestId = 0; + if (error == null) { + if (response instanceof TLRPC.TL_messageMediaWebPage) { + foundWebPage = ((TLRPC.TL_messageMediaWebPage) response).webpage; + if (foundWebPage instanceof TLRPC.TL_webPage || foundWebPage instanceof TLRPC.TL_webPagePending) { + if (foundWebPage instanceof TLRPC.TL_webPagePending) { + pendingLinkSearchString = req.message; + } + if (currentEncryptedChat != null && foundWebPage instanceof TLRPC.TL_webPagePending) { + foundWebPage.url = req.message; + } + showFieldPanelForWebPage(true, foundWebPage, false); + } else { + if (foundWebPage != null) { + showFieldPanelForWebPage(false, foundWebPage, false); + foundWebPage = null; + } + } + } else { + if (foundWebPage != null) { + showFieldPanelForWebPage(false, foundWebPage, false); + foundWebPage = null; + } + } + } + })); + + getConnectionsManager().bindRequestToGuid(linkSearchRequestId, classGuid); + }); + } + + private void forwardMessages(ArrayList arrayList, boolean fromMyName, boolean hideCaption, boolean notify, int scheduleDate) { + if (arrayList == null || arrayList.isEmpty()) { + return; + } + if (!checkSlowModeAlert()) { + return; + } + if ((scheduleDate != 0) == (chatMode == MODE_SCHEDULED)) { + waitingForSendingMessageLoad = true; + } + int result = getSendMessagesHelper().sendMessage(arrayList, dialog_id, fromMyName, hideCaption, notify, scheduleDate, getThreadMessage()); + AlertsCreator.showSendMediaAlert(result, this, themeDelegate); + if (result != 0) { + AndroidUtilities.runOnUIThread(() -> { + waitingForSendingMessageLoad = false; + hideFieldPanel(true); + }); + } + } + + // This method is used to forward messages to Saved Messages, or to multi Dialogs + private void forwardMessages(ArrayList arrayList, boolean fromMyName, boolean notify, int scheduleDate, long did) { + if (arrayList == null || arrayList.isEmpty()) { + return; + } + if ((scheduleDate != 0) == (chatMode == MODE_SCHEDULED)) { + waitingForSendingMessageLoad = true; } + AlertsCreator.showSendMediaAlert(getSendMessagesHelper().sendMessage(arrayList, did == 0 ? dialog_id : did, fromMyName, false, notify, scheduleDate), this); } - @Override - public void startDocumentSelectActivity() { - try { - Intent photoPickerIntent = new Intent(Intent.ACTION_GET_CONTENT); - if (Build.VERSION.SDK_INT >= 18) { - photoPickerIntent.putExtra(Intent.EXTRA_ALLOW_MULTIPLE, true); + public boolean shouldShowImport() { + return openImport; + } + + public void setOpenImport() { + openImport = true; + } + + private void checkBotKeyboard() { + if (chatActivityEnterView == null || botButtons == null || userBlocked) { + return; + } + if (botButtons.messageOwner.reply_markup instanceof TLRPC.TL_replyKeyboardForceReply) { + SharedPreferences preferences = MessagesController.getMainSettings(currentAccount); + String tk = isTopic ? dialog_id + "_" + getTopicId() : "" + dialog_id; + if (preferences.getInt("answered_" + tk, 0) != botButtons.getId() && (replyingMessageObject == null || chatActivityEnterView.getFieldText() == null)) { + botReplyButtons = botButtons; + chatActivityEnterView.setButtons(botButtons); + showFieldPanelForReply(botButtons); } - photoPickerIntent.setType("*/*"); - startActivityForResult(photoPickerIntent, 21); - } catch (Exception e) { - FileLog.e(e); + } else { + if (replyingMessageObject != null && botReplyButtons == replyingMessageObject) { + botReplyButtons = null; + hideFieldPanel(true); + } + chatActivityEnterView.setButtons(botButtons); } } - @Override - public boolean dismissDialogOnPause(Dialog dialog) { - return dialog != chatAttachAlert && super.dismissDialogOnPause(dialog); + public void hideFieldPanel(boolean animated) { + showFieldPanel(false, null, null, null, null, true, 0, false, animated); } - private boolean disableLinkPreview = NekoConfig.disableLinkPreviewByDefault.Bool(); + public void hideFieldPanel(boolean notify, int scheduleDate, boolean animated) { + showFieldPanel(false, null, null, null, null, notify, scheduleDate, false, animated); + } - private void searchLinks(final CharSequence charSequence, final boolean force) { - if (currentEncryptedChat != null && getMessagesController().secretWebpagePreview == 0 || editingMessageObject != null && !editingMessageObject.isWebpage()) { + public void showFieldPanelForWebPage(boolean show, TLRPC.WebPage webPage, boolean cancel) { + showFieldPanel(show, null, null, null, webPage, true, 0, cancel, true); + } + + public void showFieldPanelForForward(boolean show, ArrayList messageObjectsToForward) { + showFieldPanel(show, null, null, messageObjectsToForward, null, true, 0, false, true); + } + + public void showFieldPanelForReply(MessageObject messageObjectToReply) { + showFieldPanel(true, messageObjectToReply, null, null, null, true, 0, false, true); + } + + public void showFieldPanelForEdit(boolean show, MessageObject messageObjectToEdit) { + showFieldPanel(show, null, messageObjectToEdit, null, null, true, 0, false, true); + } + + public void beforeMessageSend(boolean notify, int scheduleDate, boolean beforeSend) { + if (beforeSend != NekoConfig.sendCommentAfterForward.Bool()) return; + if (forwardingMessages != null) { + ArrayList messagesToForward = new ArrayList<>(); + forwardingMessages.getSelectedMessages(messagesToForward); + forwardMessages(messagesToForward, forwardingMessages.hideForwardSendersName, forwardingMessages.hideCaption, notify, scheduleDate != 0 && scheduleDate != 0x7ffffffe ? scheduleDate + 1 : scheduleDate); + forwardingMessages = null; + } + } + + public void showFieldPanel(boolean show, MessageObject messageObjectToReply, MessageObject messageObjectToEdit, ArrayList messageObjectsToForward, TLRPC.WebPage webPage, boolean notify, int scheduleDate, boolean cancel, boolean animated) { + if (chatActivityEnterView == null) { return; } - if (disableLinkPreview) return; - if (force && foundWebPage != null) { - if (foundWebPage.url != null) { - int index = TextUtils.indexOf(charSequence, foundWebPage.url); - char lastChar = 0; - boolean lenEqual = false; - if (index == -1) { - if (foundWebPage.display_url != null) { - index = TextUtils.indexOf(charSequence, foundWebPage.display_url); - lenEqual = index != -1 && index + foundWebPage.display_url.length() == charSequence.length(); - lastChar = index != -1 && !lenEqual ? charSequence.charAt(index + foundWebPage.display_url.length()) : 0; + boolean showHint = false, showReplyHint = false; + if (show) { + if (messageObjectToReply == null && messageObjectsToForward == null && messageObjectToEdit == null && webPage == null) { + return; + } + hideHints(false); + if (searchItem != null && actionBar.isSearchFieldVisible()) { + actionBar.closeSearchField(false); + chatActivityEnterView.setFieldFocused(); + AndroidUtilities.runOnUIThread(() -> { + if (chatActivityEnterView != null) { + chatActivityEnterView.openKeyboard(); + } + }, 100); + } + boolean openKeyboard = false; + if (messageObjectToReply != null && messageObjectToReply.getDialogId() != dialog_id) { + messageObjectsToForward = new ArrayList<>(); + messageObjectsToForward.add(messageObjectToReply); + messageObjectToReply = null; + openKeyboard = true; + } + chatActivityEnterTopView.setEditMode(false); + if (messageObjectToEdit != null) { + forwardingMessages = null; + if (threadMessageId == 0 || isTopic) { + if (isTopic) { + replyingMessageObject = threadMessageObject; + } else { + replyingMessageObject = null; + } + chatActivityEnterView.setReplyingMessageObject(null); + updateBottomOverlay(); + } + editingMessageObject = messageObjectToEdit; + final boolean mediaEmpty = messageObjectToEdit.isMediaEmpty(); + chatActivityEnterView.setEditingMessageObject(messageObjectToEdit, !mediaEmpty); + if (foundWebPage != null) { + return; + } + chatActivityEnterView.setForceShowSendButton(false, false); + final boolean canEditMedia = messageObjectToEdit.canEditMedia(); + replyCloseImageView.setContentDescription(LocaleController.getString("AccDescrCancelEdit", R.string.AccDescrCancelEdit)); + if (!mediaEmpty && canEditMedia) { + String editButtonText = null; + String replaceButtonText; + if (messageObjectToEdit.isPhoto()) { + editButtonText = LocaleController.getString("EditMessageEditPhoto", R.string.EditMessageEditPhoto); + replaceButtonText = LocaleController.getString("EditMessageReplacePhoto", R.string.EditMessageReplacePhoto); + } else if (messageObjectToEdit.isVideo()) { + editButtonText = LocaleController.getString("EditMessageEditVideo", R.string.EditMessageEditVideo); + replaceButtonText = LocaleController.getString("EditMessageReplaceVideo", R.string.EditMessageReplaceVideo); + } else if (messageObjectToEdit.isGif()) { + replaceButtonText = LocaleController.getString("EditMessageReplaceGif", R.string.EditMessageReplaceGif); + } else if (messageObjectToEdit.isMusic()) { + replaceButtonText = LocaleController.getString("EditMessageReplaceAudio", R.string.EditMessageReplaceAudio); + } else { + replaceButtonText = LocaleController.getString("EditMessageReplaceFile", R.string.EditMessageReplaceFile); + } + final ChatActivityEnterTopView.EditViewButton[] buttons = chatActivityEnterTopView.getEditView().getButtons(); + buttons[0].setEditButton(editButtonText != null); + buttons[0].getTextView().setText(editButtonText != null ? editButtonText : replaceButtonText); + buttons[0].getImageView().setImageResource(editButtonText != null ? R.drawable.msg_photoeditor : R.drawable.msg_replace); + buttons[1].setVisibility(editButtonText != null ? View.VISIBLE : View.GONE); + if (editButtonText != null) { + buttons[1].getTextView().setText(replaceButtonText); + } + chatActivityEnterTopView.setEditMode(true); + } else { + replyIconImageView.setImageResource(R.drawable.group_edit); + replyIconImageView.setContentDescription(LocaleController.getString("AccDescrEditing", R.string.AccDescrEditing)); + if (mediaEmpty) { + replyNameTextView.setText(LocaleController.getString("EditMessage", R.string.EditMessage)); + } else { + replyNameTextView.setText(LocaleController.getString("EditCaption", R.string.EditCaption)); + } + if (canEditMedia) { + replyObjectTextView.setText(LocaleController.getString("EditMessageMedia", R.string.EditMessageMedia)); + } else if (messageObjectToEdit.messageText != null || messageObjectToEdit.caption != null) { + String mess = messageObjectToEdit.caption != null ? messageObjectToEdit.caption.toString() : messageObjectToEdit.messageText.toString(); + if (mess.length() > 150) { + mess = mess.substring(0, 150); + } + mess = mess.replace('\n', ' '); + Spannable cs = new SpannableStringBuilder(mess); + MediaDataController.addTextStyleRuns(messageObjectToEdit, cs); + if (messageObjectToEdit.messageOwner != null) { + cs = MessageObject.replaceAnimatedEmoji(cs, messageObjectToEdit.messageOwner.entities, replyObjectTextView.getPaint().getFontMetricsInt()); + } + replyObjectTextView.setText(AnimatedEmojiSpan.cloneSpans(cs)); + } + } + } else if (messageObjectToReply != null) { + editingMessageObject = null; + replyingMessageObject = messageObjectToReply; + chatActivityEnterView.setReplyingMessageObject(messageObjectToReply); + chatActivityEnterView.setEditingMessageObject(null, false); + if (foundWebPage != null) { + return; + } + String restrictionReason = MessagesController.getRestrictionReason(messageObjectToReply.messageOwner.restriction_reason); + chatActivityEnterView.setForceShowSendButton(false, false); + String name; + if (messageObjectToReply.isFromUser()) { + if (messageObjectToReply.messageOwner.from_id.channel_id != 0) { + TLRPC.Chat chat = getMessagesController().getChat(messageObjectToReply.messageOwner.from_id.channel_id); + if (chat == null) { + return; + } + name = chat.title; + } else { + TLRPC.User user = getMessagesController().getUser(messageObjectToReply.messageOwner.from_id.user_id); + if (user == null) { + return; + } + name = UserObject.getUserName(user); + } + } else { + TLRPC.Chat chat; + if (ChatObject.isChannel(currentChat) && currentChat.megagroup && messageObjectToReply.isForwardedChannelPost()) { + chat = getMessagesController().getChat(messageObjectToReply.messageOwner.fwd_from.from_id.channel_id); + } else { + chat = getMessagesController().getChat(-messageObjectToReply.getSenderId()); + } + if (chat == null) { + return; + } + name = chat.title; + } + replyIconImageView.setImageResource(R.drawable.msg_panel_reply); + replyNameTextView.setText(MessageHelper.INSTANCE.zalgoFilter(name)); + replyIconImageView.setContentDescription(LocaleController.getString("AccDescrReplying", R.string.AccDescrReplying)); + replyCloseImageView.setContentDescription(LocaleController.getString("AccDescrCancelReply", R.string.AccDescrCancelReply)); + + CharSequence replyObjectText = null; + CharSequence sourceText = null; + if (!TextUtils.isEmpty(restrictionReason)) { + replyObjectText = restrictionReason; + sourceText = restrictionReason; + } else if (MessageObject.isTopicActionMessage(messageObjectToReply)) { + ForumUtilities.applyTopicToMessage(messageObjectToReply); + if (messageObjectToReply.messageTextForReply != null) { + replyObjectText = messageObjectToReply.messageTextForReply; + } else { + replyObjectText = messageObjectToReply.messageTextShort; + } + AnimatedEmojiSpan.applyFontMetricsForString(replyObjectText, replyObjectTextView.getPaint()); + } else if (messageObjectToReply.replyToForumTopic != null) { + replyObjectText = ForumUtilities.getTopicSpannedName(messageObjectToReply.replyToForumTopic, replyObjectTextView.getPaint()); + } else if (messageObjectToReply.messageOwner.media instanceof TLRPC.TL_messageMediaGame) { + replyObjectText = Emoji.replaceEmoji(messageObjectToReply.messageOwner.media.game.title, replyObjectTextView.getPaint().getFontMetricsInt(), AndroidUtilities.dp(14), false); + sourceText = messageObjectToReply.messageOwner.media.game.title; + } else if (messageObjectToReply.messageText != null || messageObjectToReply.caption != null) { + CharSequence mess = messageObjectToReply.caption != null ? messageObjectToReply.caption.toString() : messageObjectToReply.messageText.toString(); + sourceText = mess; + if (mess.length() > 150) { + mess = mess.subSequence(0, 150); } - } else { - lenEqual = index + foundWebPage.url.length() == charSequence.length(); - lastChar = !lenEqual ? charSequence.charAt(index + foundWebPage.url.length()) : 0; + mess = AndroidUtilities.replaceNewLines(mess); + if (messageObjectToReply.messageOwner != null && messageObjectToReply.messageOwner.entities != null) { + mess = MessageObject.replaceAnimatedEmoji(mess, messageObjectToReply.messageOwner.entities, replyObjectTextView.getPaint().getFontMetricsInt()); + } + replyObjectText = Emoji.replaceEmoji(mess, replyObjectTextView.getPaint().getFontMetricsInt(), AndroidUtilities.dp(14), false); } - if (index != -1 && (lenEqual || lastChar == ' ' || lastChar == ',' || lastChar == '.' || lastChar == '!' || lastChar == '/')) { + if (replyObjectText != null) { + if (replyObjectText instanceof Spannable && sourceText != null) { + MediaDataController.addTextStyleRuns(messageObjectToReply.messageOwner.entities, sourceText, (Spannable) replyObjectText); + } + + replyObjectTextView.setText(AnimatedEmojiSpan.cloneSpans(replyObjectText)); + } + updateBottomOverlay(); + } else if (messageObjectsToForward != null) { + if (messageObjectsToForward.isEmpty()) { return; } - } - pendingLinkSearchString = null; - foundUrls = null; - showFieldPanelForWebPage(false, foundWebPage, false); - } - final MessagesController messagesController = getMessagesController(); - Utilities.searchQueue.postRunnable(() -> { - if (linkSearchRequestId != 0) { - getConnectionsManager().cancelRequest(linkSearchRequestId, true); - linkSearchRequestId = 0; - } - ArrayList urls = null; - CharSequence textToCheck; - try { - Matcher m = AndroidUtilities.WEB_URL.matcher(charSequence); - while (m.find()) { - if (m.start() > 0) { - if (charSequence.charAt(m.start() - 1) == '@') { - continue; + if (threadMessageId == 0 || isTopic) { + if (isTopic) { + replyingMessageObject = threadMessageObject; + } else { + replyingMessageObject = null; + } + chatActivityEnterView.setReplyingMessageObject(replyingMessageObject); + updateBottomOverlay(); + } + editingMessageObject = null; + chatActivityEnterView.setEditingMessageObject(null, false); + if (forwardingMessages == null) { + forwardingMessages = new ForwardingMessagesParams(messageObjectsToForward, dialog_id); + } + if (noForwardQuote) { + noForwardQuote = false; + forwardingMessages.hideForwardSendersName = true; + } + if (foundWebPage != null) { + return; + } + chatActivityEnterView.setForceShowSendButton(true, false); + ArrayList uids = new ArrayList<>(); + replyIconImageView.setImageResource(R.drawable.msg_panel_forward); + replyIconImageView.setContentDescription(LocaleController.getString("AccDescrForwarding", R.string.AccDescrForwarding)); + replyCloseImageView.setContentDescription(LocaleController.getString("AccDescrCancelForward", R.string.AccDescrCancelForward)); + MessageObject object = messageObjectsToForward.get(0); + if (object.isFromUser()) { + uids.add(object.messageOwner.from_id.user_id); + } else { + TLRPC.Chat chat = getMessagesController().getChat(object.messageOwner.peer_id.channel_id); + if (ChatObject.isChannel(chat) && chat.megagroup && object.isForwardedChannelPost()) { + uids.add(-object.messageOwner.fwd_from.from_id.channel_id); + } else { + uids.add(-object.messageOwner.peer_id.channel_id); + } + } + + int type = object.isAnimatedEmoji() || object.isDice() ? 0 : object.type; + for (int a = 1; a < messageObjectsToForward.size(); a++) { + object = messageObjectsToForward.get(a); + long uid; + if (object.isFromUser()) { + uid = object.messageOwner.from_id.user_id; + } else { + TLRPC.Chat chat = getMessagesController().getChat(object.messageOwner.peer_id.channel_id); + if (ChatObject.isChannel(chat) && chat.megagroup && object.isForwardedChannelPost()) { + uid = -object.messageOwner.fwd_from.from_id.channel_id; + } else { + uid = -object.messageOwner.peer_id.channel_id; } } - if (urls == null) { - urls = new ArrayList<>(); + if (!uids.contains(uid)) { + uids.add(uid); + } + if (messageObjectsToForward.get(a).type != type) { + type = -1; } - urls.add(charSequence.subSequence(m.start(), m.end())); } - if (charSequence instanceof Spannable) { - URLSpanReplacement[] spans = ((Spannable) charSequence).getSpans(0, charSequence.length(), URLSpanReplacement.class); - if (spans != null && spans.length > 0) { - if (urls == null) { - urls = new ArrayList<>(); + StringBuilder userNames = new StringBuilder(); + for (int a = 0; a < uids.size(); a++) { + Long uid = uids.get(a); + TLRPC.Chat chat = null; + TLRPC.User user = null; + if (uid > 0) { + user = getMessagesController().getUser(uid); + } else { + chat = getMessagesController().getChat(-uid); + } + if (user == null && chat == null) { + continue; + } + if (uids.size() == 1) { + if (user != null) { + userNames.append(UserObject.getUserName(user)); + } else { + userNames.append(chat.title); } - for (int a = 0; a < spans.length; a++) { - urls.add(spans[a].getURL()); + } else if (uids.size() == 2 || userNames.length() == 0) { + if (userNames.length() > 0) { + userNames.append(", "); + } + if (user != null) { + if (!TextUtils.isEmpty(user.first_name)) { + userNames.append(user.first_name); + } else if (!TextUtils.isEmpty(user.last_name)) { + userNames.append(user.last_name); + } else { + userNames.append(" "); + } + } else { + userNames.append(chat.title); } + } else { + userNames.append(" "); + userNames.append(LocaleController.formatPluralString("AndOther", uids.size() - 1)); + break; } } - if (urls != null && foundUrls != null && urls.size() == foundUrls.size()) { - boolean clear = true; - for (int a = 0; a < urls.size(); a++) { - if (!TextUtils.equals(urls.get(a), foundUrls.get(a))) { - clear = false; + formwardingNameText = userNames; + if (type == -1 || type == 0 || type == 10 || type == 11 || type == MessageObject.TYPE_EMOJIS) { + replyNameTextView.setText(LocaleController.formatPluralString("PreviewForwardMessagesCount", messageObjectsToForward.size())); + } else if (type == 1) { + replyNameTextView.setText(LocaleController.formatPluralString("PreviewForwardPhoto", messageObjectsToForward.size())); + if (messageObjectsToForward.size() == 1) { + messageObjectToReply = messageObjectsToForward.get(0); + } + } else if (type == 4) { + replyNameTextView.setText(LocaleController.formatPluralString("PreviewForwardLocation", messageObjectsToForward.size())); + } else if (type == 3) { + replyNameTextView.setText(LocaleController.formatPluralString("PreviewForwardVideo", messageObjectsToForward.size())); + if (messageObjectsToForward.size() == 1) { + messageObjectToReply = messageObjectsToForward.get(0); + } + } else if (type == 12) { + replyNameTextView.setText(LocaleController.formatPluralString("PreviewForwardContact", messageObjectsToForward.size())); + } else if (type == 2) { + replyNameTextView.setText(LocaleController.formatPluralString("PreviewForwardAudio", messageObjectsToForward.size())); + } else if (type == MessageObject.TYPE_ROUND_VIDEO) { + replyNameTextView.setText(LocaleController.formatPluralString("PreviewForwardRound", messageObjectsToForward.size())); + } else if (type == 14) { + replyNameTextView.setText(LocaleController.formatPluralString("PreviewForwardMusic", messageObjectsToForward.size())); + } else if (type == MessageObject.TYPE_STICKER || type == MessageObject.TYPE_ANIMATED_STICKER) { + replyNameTextView.setText(LocaleController.formatPluralString("PreviewForwardSticker", messageObjectsToForward.size())); + } else if (type == 17) { + replyNameTextView.setText(LocaleController.formatPluralString("PreviewForwardPoll", messageObjectsToForward.size())); + } else if (type == 8 || type == 9) { + if (messageObjectsToForward.size() == 1 & type == 9) { + messageObjectToReply = messageObjectsToForward.get(0); + } + if (messageObjectsToForward.size() == 1 && type == 8) { + replyNameTextView.setText(LocaleController.getString("AttachGif", R.string.AttachGif)); + } else { + replyNameTextView.setText(LocaleController.formatPluralString("PreviewForwardFile", messageObjectsToForward.size())); + } + } + + if (forwardingMessages.hideForwardSendersName) { + replyObjectTextView.setText(LocaleController.getString("HiddenSendersNameDescription", R.string.HiddenSendersNameDescription)); + } else { + if ((type == -1 || type == 0 || type == 10 || type == 11 || type == MessageObject.TYPE_EMOJIS) && (messageObjectsToForward.size() == 1 && messageObjectsToForward.get(0).messageText != null)) { + MessageObject messageObject = messageObjectsToForward.get(0); + CharSequence mess = new SpannableStringBuilder(messageObject.messageText.toString()); + if (mess.length() > 150) { + mess = mess.subSequence(0, 150); + } + mess = Emoji.replaceEmoji(mess, replyObjectTextView.getPaint().getFontMetricsInt(), AndroidUtilities.dp(14), false); + if (mess instanceof Spannable) { + MediaDataController.addTextStyleRuns(messageObject.messageOwner.entities, messageObject.messageText, (Spannable) mess); + if (messageObject.messageOwner != null) { + mess = MessageObject.replaceAnimatedEmoji(mess, messageObject.messageOwner.entities, replyObjectTextView.getPaint().getFontMetricsInt()); + } + } + replyObjectTextView.setText(mess); + } else { + replyObjectTextView.setText(LocaleController.formatString("ForwardingFromNames", R.string.ForwardingFromNames, userNames)); + } + } + if (!SharedConfig.forwardingOptionsHintShown) { + showHint = true; + } + } else { + replyIconImageView.setImageResource(R.drawable.msg_link); + if (webPage instanceof TLRPC.TL_webPagePending) { + replyNameTextView.setText(LocaleController.getString("GettingLinkInfo", R.string.GettingLinkInfo)); + replyObjectTextView.setText(pendingLinkSearchString); + } else { + if (webPage.site_name != null) { + replyNameTextView.setText(webPage.site_name); + } else if (webPage.title != null) { + replyNameTextView.setText(webPage.title); + } else { + replyNameTextView.setText(LocaleController.getString("LinkPreview", R.string.LinkPreview)); + } + if (webPage.title != null) { + replyObjectTextView.setText(webPage.title); + } else if (webPage.description != null) { + replyObjectTextView.setText(webPage.description); + } else if (webPage.author != null) { + replyObjectTextView.setText(webPage.author); + } else { + replyObjectTextView.setText(webPage.display_url); + } + chatActivityEnterView.setWebPage(webPage, true); + } + } + MessageObject thumbMediaMessageObject; + if (messageObjectToReply != null) { + thumbMediaMessageObject = messageObjectToReply; + } else if (messageObjectToEdit != null) { + if (!chatActivityEnterTopView.isEditMode()) { + thumbMediaMessageObject = messageObjectToEdit; + } else { + thumbMediaMessageObject = null; + } + } else { + thumbMediaMessageObject = null; + } + + FrameLayout.LayoutParams layoutParams1 = (FrameLayout.LayoutParams) replyNameTextView.getLayoutParams(); + FrameLayout.LayoutParams layoutParams2 = (FrameLayout.LayoutParams) replyObjectTextView.getLayoutParams(); + FrameLayout.LayoutParams layoutParams3 = (FrameLayout.LayoutParams) replyObjectHintTextView.getLayoutParams(); + + int cacheType = 1; + int size = 0; + TLRPC.PhotoSize photoSize = null; + TLRPC.PhotoSize thumbPhotoSize = null; + TLObject photoSizeObject = null; + if (thumbMediaMessageObject != null && TextUtils.isEmpty(MessagesController.getRestrictionReason(thumbMediaMessageObject.messageOwner.restriction_reason))) { + photoSize = FileLoader.getClosestPhotoSizeWithSize(thumbMediaMessageObject.photoThumbs2, 320); + thumbPhotoSize = FileLoader.getClosestPhotoSizeWithSize(thumbMediaMessageObject.photoThumbs2, AndroidUtilities.dp(40)); + photoSizeObject = thumbMediaMessageObject.photoThumbsObject2; + if (photoSize == null) { + if (thumbMediaMessageObject.mediaExists) { + photoSize = FileLoader.getClosestPhotoSizeWithSize(thumbMediaMessageObject.photoThumbs, AndroidUtilities.getPhotoSize()); + if (photoSize != null) { + size = photoSize.size; } + cacheType = 0; + } else { + photoSize = FileLoader.getClosestPhotoSizeWithSize(thumbMediaMessageObject.photoThumbs, 320); } - if (clear) { - return; - } + thumbPhotoSize = FileLoader.getClosestPhotoSizeWithSize(thumbMediaMessageObject.photoThumbs, AndroidUtilities.dp(40)); + photoSizeObject = thumbMediaMessageObject.photoThumbsObject; } - foundUrls = urls; - if (urls == null) { - AndroidUtilities.runOnUIThread(() -> { - if (foundWebPage != null) { - showFieldPanelForWebPage(false, foundWebPage, false); - foundWebPage = null; - } - }); - return; + } + if (photoSize == thumbPhotoSize) { + thumbPhotoSize = null; + } + if (photoSize == null || photoSize instanceof TLRPC.TL_photoSizeEmpty || photoSize.location instanceof TLRPC.TL_fileLocationUnavailable || thumbMediaMessageObject.isAnyKindOfSticker() || thumbMediaMessageObject.isSecretMedia() || thumbMediaMessageObject.isWebpageDocument()) { + replyImageView.setImageBitmap(null); + replyImageLocation = null; + replyImageLocationObject = null; + replyImageView.setVisibility(View.INVISIBLE); + layoutParams1.leftMargin = layoutParams2.leftMargin = layoutParams3.leftMargin = AndroidUtilities.dp(52); + } else { + if (thumbMediaMessageObject.isRoundVideo()) { + replyImageView.setRoundRadius(AndroidUtilities.dp(17)); + } else { + replyImageView.setRoundRadius(AndroidUtilities.dp(2)); } - textToCheck = TextUtils.join(" ", urls); - } catch (Exception e) { - FileLog.e(e); - String text = charSequence.toString().toLowerCase(); - if (charSequence.length() < 13 || !text.contains("http://") && !text.contains("https://")) { - AndroidUtilities.runOnUIThread(() -> { - if (foundWebPage != null) { - showFieldPanelForWebPage(false, foundWebPage, false); - foundWebPage = null; - } - }); + replyImageSize = size; + replyImageCacheType = cacheType; + replyImageLocation = photoSize; + replyImageThumbLocation = thumbPhotoSize; + replyImageLocationObject = photoSizeObject; + replyImageHasMediaSpoiler = thumbMediaMessageObject.hasMediaSpoilers(); + replyImageView.setImage(ImageLocation.getForObject(replyImageLocation, photoSizeObject), replyImageHasMediaSpoiler ? "5_5_b" : "50_50", ImageLocation.getForObject(thumbPhotoSize, photoSizeObject), replyImageHasMediaSpoiler ? "5_5_b" : "50_50_b", null, size, cacheType, thumbMediaMessageObject); + replyImageView.setVisibility(View.VISIBLE); + layoutParams1.leftMargin = layoutParams2.leftMargin = layoutParams3.leftMargin = AndroidUtilities.dp(96); + } + replyNameTextView.setLayoutParams(layoutParams1); + replyObjectTextView.setLayoutParams(layoutParams2); + replyObjectTextView.setLayoutParams(layoutParams3); + chatActivityEnterView.showTopView(true, openKeyboard); + } else { + if (replyingMessageObject == null && forwardingMessages == null && foundWebPage == null && editingMessageObject == null && !chatActivityEnterView.forceShowSendButton && !chatActivityEnterView.isTopViewVisible()) { + return; + } + if (replyingMessageObject != null && replyingMessageObject.messageOwner.reply_markup instanceof TLRPC.TL_replyKeyboardForceReply) { + SharedPreferences preferences = MessagesController.getMainSettings(currentAccount); + String tk = isTopic ? dialog_id + "_" + getTopicId() : "" + dialog_id; + preferences.edit().putInt("answered_" + tk, replyingMessageObject.getId()).apply(); + } + if (foundWebPage != null) { + foundWebPage = null; + chatActivityEnterView.setWebPage(null, !cancel); + if (webPage != null && (replyingMessageObject != null || forwardingMessages != null || editingMessageObject != null)) { + showFieldPanel(true, replyingMessageObject, editingMessageObject, forwardingMessages != null ? forwardingMessages.messages : null, null, notify, scheduleDate, false, true); return; } - textToCheck = charSequence; } - - if (currentEncryptedChat != null && messagesController.secretWebpagePreview == 2) { - AndroidUtilities.runOnUIThread(() -> { - AlertDialog.Builder builder = new AlertDialog.Builder(getParentActivity(), themeDelegate); - builder.setTitle(LocaleController.getString("NekoX", R.string.NekoX)); - builder.setPositiveButton(LocaleController.getString("OK", R.string.OK), (dialog, which) -> { - messagesController.secretWebpagePreview = 1; - MessagesController.getGlobalMainSettings().edit().putInt("secretWebpage2", getMessagesController().secretWebpagePreview).apply(); - foundUrls = null; - searchLinks(charSequence, force); - }); - builder.setNegativeButton(LocaleController.getString("Cancel", R.string.Cancel), null); - builder.setMessage(LocaleController.getString("SecretLinkPreviewAlert", R.string.SecretLinkPreviewAlert)); - showDialog(builder.create()); - - messagesController.secretWebpagePreview = 0; - MessagesController.getGlobalMainSettings().edit().putInt("secretWebpage2", messagesController.secretWebpagePreview).apply(); - }); - return; + beforeMessageSend(notify, scheduleDate, false); + chatActivityEnterView.setForceShowSendButton(false, animated); + if (!waitingForSendingMessageLoad) { + chatActivityEnterView.hideTopView(animated); } + chatActivityEnterView.setReplyingMessageObject(threadMessageObject); + chatActivityEnterView.setEditingMessageObject(null, false); + topViewWasVisible = 0; + replyingMessageObject = threadMessageObject; + editingMessageObject = null; + replyImageLocation = null; + replyImageLocationObject = null; + updateBottomOverlay(); + } - final TLRPC.TL_messages_getWebPagePreview req = new TLRPC.TL_messages_getWebPagePreview(); - if (textToCheck instanceof String) { - req.message = (String) textToCheck; - } else { - req.message = textToCheck.toString(); - } - linkSearchRequestId = getConnectionsManager().sendRequest(req, (response, error) -> AndroidUtilities.runOnUIThread(() -> { - linkSearchRequestId = 0; - if (error == null) { - if (response instanceof TLRPC.TL_messageMediaWebPage) { - foundWebPage = ((TLRPC.TL_messageMediaWebPage) response).webpage; - if (foundWebPage instanceof TLRPC.TL_webPage || foundWebPage instanceof TLRPC.TL_webPagePending) { - if (foundWebPage instanceof TLRPC.TL_webPagePending) { - pendingLinkSearchString = req.message; - } - if (currentEncryptedChat != null && foundWebPage instanceof TLRPC.TL_webPagePending) { - foundWebPage.url = req.message; - } - showFieldPanelForWebPage(true, foundWebPage, false); - } else { - if (foundWebPage != null) { - showFieldPanelForWebPage(false, foundWebPage, false); - foundWebPage = null; - } - } + if (showHint) { + if (tapForForwardingOptionsHitRunnable == null) { + AndroidUtilities.runOnUIThread(tapForForwardingOptionsHitRunnable = () -> { + showTapForForwardingOptionsHit = !showTapForForwardingOptionsHit; + replyObjectTextView.setPivotX(0); + replyObjectHintTextView.setPivotX(0); + if (showTapForForwardingOptionsHit) { + replyObjectTextView.animate().alpha(0f).scaleX(0.98f).scaleY(0.98f).setDuration(150).start(); + replyObjectHintTextView.animate().alpha(1f).scaleX(1f).scaleY(1f).setDuration(150).start(); } else { - if (foundWebPage != null) { - showFieldPanelForWebPage(false, foundWebPage, false); - foundWebPage = null; - } + replyObjectTextView.animate().alpha(1f).scaleX(1f).scaleY(1f).setDuration(150).start(); + replyObjectHintTextView.animate().alpha(0f).scaleX(0.98f).scaleY(0.98f).setDuration(150).start(); } - } - })); + AndroidUtilities.runOnUIThread(tapForForwardingOptionsHitRunnable, 6000); + }, 6000); + } + } else { + if (tapForForwardingOptionsHitRunnable != null) { + AndroidUtilities.cancelRunOnUIThread(tapForForwardingOptionsHitRunnable); + tapForForwardingOptionsHitRunnable = null; + } + replyObjectTextView.setAlpha(1f); + replyObjectHintTextView.setAlpha(0); + } - getConnectionsManager().bindRequestToGuid(linkSearchRequestId, classGuid); - }); - } + if (showReplyHint) { + + } else { - private void forwardMessages(ArrayList arrayList, boolean fromMyName, boolean hideCaption, boolean notify, int scheduleDate) { - if (arrayList == null || arrayList.isEmpty()) { - return; } - if (!checkSlowModeAlert()) { - return; + } + + private void moveScrollToLastMessage(boolean skipSponsored) { + if (chatListView != null && !messages.isEmpty() && !pinchToZoomHelper.isInOverlayMode()) { + int position = 0; + if (skipSponsored) { + position += getSponsoredMessagesCount(); + } + chatLayoutManager.scrollToPositionWithOffset(position, 0); + chatListView.stopScroll(); } - if ((scheduleDate != 0) == (chatMode == MODE_SCHEDULED)) { - waitingForSendingMessageLoad = true; + } + + private Runnable sendSecretMessageRead(MessageObject messageObject, boolean readNow) { + if (messageObject == null || messageObject.isOut() || !messageObject.isSecretMedia() || messageObject.messageOwner.destroyTime != 0 || messageObject.messageOwner.ttl <= 0) { + return null; } - int result = getSendMessagesHelper().sendMessage(arrayList, dialog_id, fromMyName, hideCaption, notify, scheduleDate, getThreadMessage()); - AlertsCreator.showSendMediaAlert(result, this, themeDelegate); - if (result != 0) { - AndroidUtilities.runOnUIThread(() -> { - waitingForSendingMessageLoad = false; - hideFieldPanel(true); - }); + messageObject.messageOwner.destroyTime = messageObject.messageOwner.ttl + getConnectionsManager().getCurrentTime(); + if (readNow) { + if (currentEncryptedChat != null) { + getMessagesController().markMessageAsRead(dialog_id, messageObject.messageOwner.random_id, messageObject.messageOwner.ttl); + } else { + getMessagesController().markMessageAsRead2(dialog_id, messageObject.getId(), null, messageObject.messageOwner.ttl, 0); + } + return null; + } else { + return () -> { + if (currentEncryptedChat != null) { + getMessagesController().markMessageAsRead(dialog_id, messageObject.messageOwner.random_id, messageObject.messageOwner.ttl); + } else { + getMessagesController().markMessageAsRead2(dialog_id, messageObject.getId(), null, messageObject.messageOwner.ttl, 0); + } + }; } } - // This method is used to forward messages to Saved Messages, or to multi Dialogs - private void forwardMessages(ArrayList arrayList, boolean fromMyName, boolean notify, int scheduleDate, long did) { - if (arrayList == null || arrayList.isEmpty()) { - return; + private void clearChatData() { + messages.clear(); + messagesByDays.clear(); + waitingForLoad.clear(); + groupedMessagesMap.clear(); + threadMessageAdded = false; + + if (chatAdapter != null) { + showProgressView(chatAdapter.botInfoRow < 0); } - if ((scheduleDate != 0) == (chatMode == MODE_SCHEDULED)) { - waitingForSendingMessageLoad = true; + if (chatListView != null) { + chatListView.setEmptyView(null); + } + for (int a = 0; a < 2; a++) { + messagesDict[a].clear(); + if (currentEncryptedChat == null) { + maxMessageId[a] = Integer.MAX_VALUE; + minMessageId[a] = Integer.MIN_VALUE; + } else { + maxMessageId[a] = Integer.MIN_VALUE; + minMessageId[a] = Integer.MAX_VALUE; + } + maxDate[a] = Integer.MIN_VALUE; + minDate[a] = 0; + endReached[a] = false; + cacheEndReached[a] = false; + forwardEndReached[a] = true; + } + first = true; + firstLoading = true; + loading = true; + loadingForward = false; + waitingForReplyMessageLoad = false; + startLoadFromMessageId = 0; + showScrollToMessageError = false; + last_message_id = 0; + unreadMessageObject = null; + createUnreadMessageAfterId = 0; + createUnreadMessageAfterIdLoading = false; + needSelectFromMessageId = false; + if (chatAdapter != null) { + chatAdapter.notifyDataSetChanged(false); } - AlertsCreator.showSendMediaAlert(getSendMessagesHelper().sendMessage(arrayList, did == 0 ? dialog_id : did, fromMyName, false, notify, scheduleDate), this); - } - - public boolean shouldShowImport() { - return openImport; } - public void setOpenImport() { - openImport = true; + public void scrollToLastMessage(boolean skipSponsored, boolean top) { + scrollToLastMessage(skipSponsored, top, null); } - private void checkBotKeyboard() { - if (chatActivityEnterView == null || botButtons == null || userBlocked) { + public void scrollToLastMessage(boolean skipSponsored, boolean top, Runnable inCaseLoading) { + if (chatListView.isFastScrollAnimationRunning()) { return; } - if (botButtons.messageOwner.reply_markup instanceof TLRPC.TL_replyKeyboardForceReply) { - SharedPreferences preferences = MessagesController.getMainSettings(currentAccount); - if (preferences.getInt("answered_" + dialog_id, 0) != botButtons.getId() && (replyingMessageObject == null || chatActivityEnterView.getFieldText() == null)) { - botReplyButtons = botButtons; - chatActivityEnterView.setButtons(botButtons); - showFieldPanelForReply(botButtons); + forceNextPinnedMessageId = 0; + nextScrollToMessageId = 0; + forceScrollToFirst = false; + chatScrollHelper.setScrollDirection(RecyclerAnimationScrollHelper.SCROLL_DIRECTION_DOWN); + if (forwardEndReached[0] && first_unread_id == 0 && startLoadFromMessageId == 0) { + setPagedownLoading(false, true); + if (chatLayoutManager.findFirstCompletelyVisibleItemPosition() == 0) { + canShowPagedownButton = false; + updatePagedownButtonVisibility(true); + removeSelectedMessageHighlight(); + updateVisibleRows(); + } else { + chatAdapter.updateRowsSafe(); + chatScrollHelperCallback.scrollTo = null; + int position = 0; + if (skipSponsored) { + while (position < messages.size()) { + if (!messages.get(position).isSponsored()) { + break; + } + position++; + } + } + if (top && messages != null && messages.size() > 0 && messages.get(position) != null) { + long groupId = messages.get(position).getGroupId(); + while (groupId != 0 && position + 1 < messages.size()) { + if (groupId != messages.get(position + 1).getGroupId()) { + break; + } + position++; + } + } + if (messages != null && messages.size() > 0) { + position = Math.min(position, messages.size() - 1); + } + final int finalPosition = position; + Runnable scroll = () -> { + chatScrollHelper.scrollToPosition(chatScrollHelperCallback.position = finalPosition, chatScrollHelperCallback.offset = 0, chatScrollHelperCallback.bottom = !top, true); + }; + if (SCROLL_DEBUG_DELAY && inCaseLoading != null) { + inCaseLoading.run(); + AndroidUtilities.runOnUIThread(() -> { + resetProgressDialogLoading(); + scroll.run(); + }, 7500); + } else { + scroll.run(); + } } } else { - if (replyingMessageObject != null && botReplyButtons == replyingMessageObject) { - botReplyButtons = null; - hideFieldPanel(true); + if (progressDialog != null) { + progressDialog.dismiss(); } - chatActivityEnterView.setButtons(botButtons); - } - } - - public void hideFieldPanel(boolean animated) { - showFieldPanel(false, null, null, null, null, true, 0, false, animated); - } - - public void hideFieldPanel(boolean notify, int scheduleDate, boolean animated) { - showFieldPanel(false, null, null, null, null, notify, scheduleDate, false, animated); - } - - public void showFieldPanelForWebPage(boolean show, TLRPC.WebPage webPage, boolean cancel) { - showFieldPanel(show, null, null, null, webPage, true, 0, cancel, true); - } - - public void showFieldPanelForForward(boolean show, ArrayList messageObjectsToForward) { - showFieldPanel(show, null, null, messageObjectsToForward, null, true, 0, false, true); - } - public void showFieldPanelForReply(MessageObject messageObjectToReply) { - showFieldPanel(true, messageObjectToReply, null, null, null, true, 0, false, true); - } + updatePinnedListButton(false); + if (inCaseLoading != null) { + inCaseLoading.run(); + } else { + resetProgressDialogLoading(); + progressDialog = new AlertDialog(getParentActivity(), AlertDialog.ALERT_TYPE_SPINNER, themeDelegate); + progressDialog.setOnCancelListener(postponedScrollCancelListener); + progressDialog.showDelayed(1000); + } - public void showFieldPanelForEdit(boolean show, MessageObject messageObjectToEdit) { - showFieldPanel(show, null, messageObjectToEdit, null, null, true, 0, false, true); - } + postponedScrollToLastMessageQueryIndex = lastLoadIndex; + postponedScrollMessageId = 0; + postponedScrollIsCanceled = false; + waitingForLoad.clear(); - public void beforeMessageSend(boolean notify, int scheduleDate, boolean beforeSend) { - if (beforeSend != NekoConfig.sendCommentAfterForward.Bool()) return; - if (forwardingMessages != null) { - ArrayList messagesToForward = new ArrayList<>(); - forwardingMessages.getSelectedMessages(messagesToForward); - forwardMessages(messagesToForward, forwardingMessages.hideForwardSendersName, forwardingMessages.hideCaption, notify, scheduleDate != 0 && scheduleDate != 0x7ffffffe ? scheduleDate + 1 : scheduleDate); - forwardingMessages = null; + waitingForLoad.add(lastLoadIndex); + AndroidUtilities.runOnUIThread(() -> { + getMessagesController().loadMessages(dialog_id, mergeDialogId, false, 30, 0, 0, true, 0, classGuid, 0, 0, chatMode, threadMessageId, replyMaxReadId, lastLoadIndex++, isTopic); + }, SCROLL_DEBUG_DELAY ? 7500 : 0); } } - public void showFieldPanel(boolean show, MessageObject messageObjectToReply, MessageObject messageObjectToEdit, ArrayList messageObjectsToForward, TLRPC.WebPage webPage, boolean notify, int scheduleDate, boolean cancel, boolean animated) { - if (chatActivityEnterView == null) { + public void updateTextureViewPosition(boolean needVisibleUpdate, boolean needScroll) { + if (fragmentView == null || paused) { return; } - boolean showHint = false, showReplyHint = false; - if (show) { - if (messageObjectToReply == null && messageObjectsToForward == null && messageObjectToEdit == null && webPage == null) { - return; - } - hideHints(false); - if (searchItem != null && actionBar.isSearchFieldVisible()) { - actionBar.closeSearchField(false); - chatActivityEnterView.setFieldFocused(); - AndroidUtilities.runOnUIThread(() -> { - if (chatActivityEnterView != null) { - chatActivityEnterView.openKeyboard(); - } - }, 100); - } - boolean openKeyboard = false; - if (messageObjectToReply != null && messageObjectToReply.getDialogId() != dialog_id) { - messageObjectsToForward = new ArrayList<>(); - messageObjectsToForward.add(messageObjectToReply); - messageObjectToReply = null; - openKeyboard = true; - } - chatActivityEnterTopView.setEditMode(false); - if (messageObjectToEdit != null) { - forwardingMessages = null; - if (threadMessageId == 0 || isTopic) { - if (isTopic) { - replyingMessageObject = threadMessageObject; - } else { - replyingMessageObject = null; - } - chatActivityEnterView.setReplyingMessageObject(null); - updateBottomOverlay(); - } - editingMessageObject = messageObjectToEdit; - final boolean mediaEmpty = messageObjectToEdit.isMediaEmpty(); - chatActivityEnterView.setEditingMessageObject(messageObjectToEdit, !mediaEmpty); - if (foundWebPage != null) { - return; - } - chatActivityEnterView.setForceShowSendButton(false, false); - final boolean canEditMedia = messageObjectToEdit.canEditMedia(); - replyCloseImageView.setContentDescription(LocaleController.getString("AccDescrCancelEdit", R.string.AccDescrCancelEdit)); - if (!mediaEmpty && canEditMedia) { - String editButtonText = null; - String replaceButtonText; - if (messageObjectToEdit.isPhoto()) { - editButtonText = LocaleController.getString("EditMessageEditPhoto", R.string.EditMessageEditPhoto); - replaceButtonText = LocaleController.getString("EditMessageReplacePhoto", R.string.EditMessageReplacePhoto); - } else if (messageObjectToEdit.isVideo()) { - editButtonText = LocaleController.getString("EditMessageEditVideo", R.string.EditMessageEditVideo); - replaceButtonText = LocaleController.getString("EditMessageReplaceVideo", R.string.EditMessageReplaceVideo); - } else if (messageObjectToEdit.isGif()) { - replaceButtonText = LocaleController.getString("EditMessageReplaceGif", R.string.EditMessageReplaceGif); - } else if (messageObjectToEdit.isMusic()) { - replaceButtonText = LocaleController.getString("EditMessageReplaceAudio", R.string.EditMessageReplaceAudio); - } else { - replaceButtonText = LocaleController.getString("EditMessageReplaceFile", R.string.EditMessageReplaceFile); - } - final ChatActivityEnterTopView.EditViewButton[] buttons = chatActivityEnterTopView.getEditView().getButtons(); - buttons[0].setEditButton(editButtonText != null); - buttons[0].getTextView().setText(editButtonText != null ? editButtonText : replaceButtonText); - buttons[0].getImageView().setImageResource(editButtonText != null ? R.drawable.msg_photoeditor : R.drawable.msg_replace); - buttons[1].setVisibility(editButtonText != null ? View.VISIBLE : View.GONE); - if (editButtonText != null) { - buttons[1].getTextView().setText(replaceButtonText); - } - chatActivityEnterTopView.setEditMode(true); - } else { - replyIconImageView.setImageResource(R.drawable.group_edit); - replyIconImageView.setContentDescription(LocaleController.getString("AccDescrEditing", R.string.AccDescrEditing)); - if (mediaEmpty) { - replyNameTextView.setText(LocaleController.getString("EditMessage", R.string.EditMessage)); - } else { - replyNameTextView.setText(LocaleController.getString("EditCaption", R.string.EditCaption)); - } - if (canEditMedia) { - replyObjectTextView.setText(LocaleController.getString("EditMessageMedia", R.string.EditMessageMedia)); - } else if (messageObjectToEdit.messageText != null || messageObjectToEdit.caption != null) { - String mess = messageObjectToEdit.caption != null ? messageObjectToEdit.caption.toString() : messageObjectToEdit.messageText.toString(); - if (mess.length() > 150) { - mess = mess.substring(0, 150); + boolean foundTextureViewMessage = false; + int count = chatListView.getChildCount(); + for (int a = 0; a < count; a++) { + View view = chatListView.getChildAt(a); + if (view instanceof ChatMessageCell) { + ChatMessageCell messageCell = (ChatMessageCell) view; + MessageObject messageObject = messageCell.getMessageObject(); + if (videoPlayerContainer != null && (messageObject.isRoundVideo() || messageObject.isVideo()) && !messageObject.isVoiceTranscriptionOpen() && MediaController.getInstance().isPlayingMessage(messageObject)) { + ImageReceiver imageReceiver = messageCell.getPhotoImage(); + videoPlayerContainer.setTranslationX(imageReceiver.getImageX() + messageCell.getX()); + float translationY = messageCell.getY() + imageReceiver.getImageY() + chatListView.getY() - videoPlayerContainer.getTop(); + videoPlayerContainer.setTranslationY(translationY); + FrameLayout.LayoutParams layoutParams = (FrameLayout.LayoutParams) videoPlayerContainer.getLayoutParams(); + if (messageObject.isRoundVideo()) { + videoPlayerContainer.setTag(R.id.parent_tag, null); + if (layoutParams.width != AndroidUtilities.roundPlayingMessageSize || layoutParams.height != AndroidUtilities.roundPlayingMessageSize) { + layoutParams.width = layoutParams.height = AndroidUtilities.roundPlayingMessageSize; + aspectRatioFrameLayout.setResizeMode(AspectRatioFrameLayout.RESIZE_MODE_FIT); + videoPlayerContainer.setLayoutParams(layoutParams); } - mess = mess.replace('\n', ' '); - Spannable cs = new SpannableStringBuilder(mess); - MediaDataController.addTextStyleRuns(messageObjectToEdit, cs); - if (messageObjectToEdit.messageOwner != null) { - cs = MessageObject.replaceAnimatedEmoji(cs, messageObjectToEdit.messageOwner.entities, replyObjectTextView.getPaint().getFontMetricsInt()); + float scale = (AndroidUtilities.roundPlayingMessageSize + AndroidUtilities.roundMessageInset * 2) / (float) AndroidUtilities.roundPlayingMessageSize; + float transitionScale = messageCell.getPhotoImage().getImageWidth() / AndroidUtilities.roundPlayingMessageSize; + if (videoPlayerContainer.getScaleX() != transitionScale) { + videoPlayerContainer.invalidate(); + fragmentView.invalidate(); } - replyObjectTextView.setText(AnimatedEmojiSpan.cloneSpans(cs)); + videoPlayerContainer.setPivotX(0); + videoPlayerContainer.setPivotY(0); + videoPlayerContainer.setScaleX(transitionScale); + videoPlayerContainer.setScaleY(transitionScale); + videoTextureView.setScaleX(scale); + videoTextureView.setScaleY(scale); + } else { + videoPlayerContainer.setTag(R.id.parent_tag, imageReceiver); + if (layoutParams.width != imageReceiver.getImageWidth() || layoutParams.height != imageReceiver.getImageHeight()) { + aspectRatioFrameLayout.setResizeMode(AspectRatioFrameLayout.RESIZE_MODE_FILL); + layoutParams.width = (int) imageReceiver.getImageWidth(); + layoutParams.height = (int) imageReceiver.getImageHeight(); + videoPlayerContainer.setLayoutParams(layoutParams); + } + videoTextureView.setScaleX(1f); + videoTextureView.setScaleY(1f); } + fragmentView.invalidate(); + videoPlayerContainer.invalidate(); + foundTextureViewMessage = true; + break; } - } else if (messageObjectToReply != null) { - forwardingMessages = null; - editingMessageObject = null; - replyingMessageObject = messageObjectToReply; - chatActivityEnterView.setReplyingMessageObject(messageObjectToReply); - chatActivityEnterView.setEditingMessageObject(null, false); - if (foundWebPage != null) { - return; - } - String restrictionReason = MessagesController.getRestrictionReason(messageObjectToReply.messageOwner.restriction_reason); - chatActivityEnterView.setForceShowSendButton(false, false); - String name; - if (messageObjectToReply.isFromUser()) { - if (messageObjectToReply.messageOwner.from_id.channel_id != 0) { - TLRPC.Chat chat = getMessagesController().getChat(messageObjectToReply.messageOwner.from_id.channel_id); - if (chat == null) { - return; - } - name = chat.title; + } + } + if (needVisibleUpdate && videoPlayerContainer != null) { + MessageObject messageObject = MediaController.getInstance().getPlayingMessageObject(); + if (messageObject != null && messageObject.eventId == 0) { + if (!foundTextureViewMessage) { + if (checkTextureViewPosition && messageObject.isVideo()) { + MediaController.getInstance().cleanupPlayer(true, true); } else { - TLRPC.User user = getMessagesController().getUser(messageObjectToReply.messageOwner.from_id.user_id); - if (user == null) { - return; + videoPlayerContainer.setTranslationY(-AndroidUtilities.roundPlayingMessageSize - 100); + fragmentView.invalidate(); + if (messageObject.isRoundVideo() || messageObject.isVideo()) { + if (checkTextureViewPosition || PipRoundVideoView.getInstance() != null) { + MediaController.getInstance().setCurrentVideoVisible(false); + } else if (needScroll) { + scrollToMessageId(messageObject.getId(), 0, false, 0, true, 0); + } } - name = UserObject.getUserName(user); } } else { - TLRPC.Chat chat; - if (ChatObject.isChannel(currentChat) && currentChat.megagroup && messageObjectToReply.isForwardedChannelPost()) { - chat = getMessagesController().getChat(messageObjectToReply.messageOwner.fwd_from.from_id.channel_id); + MediaController.getInstance().setCurrentVideoVisible(true); + if (messageObject.isRoundVideo() || scrollToVideo) { + // scrollToMessageId(messageObject.getId(), 0, false, 0, true, 0); } else { - chat = getMessagesController().getChat(-messageObjectToReply.getSenderId()); - } - if (chat == null) { - return; + chatListView.invalidate(); } - name = chat.title; } - replyIconImageView.setImageResource(R.drawable.msg_panel_reply); - replyNameTextView.setText(MessageHelper.INSTANCE.zalgoFilter(name)); - replyIconImageView.setContentDescription(LocaleController.getString("AccDescrReplying", R.string.AccDescrReplying)); - replyCloseImageView.setContentDescription(LocaleController.getString("AccDescrCancelReply", R.string.AccDescrCancelReply)); + } + } + } - CharSequence replyObjectText = null; - CharSequence sourceText = null; - if (!TextUtils.isEmpty(restrictionReason)) { - replyObjectText = restrictionReason; - sourceText = restrictionReason; - } else if (MessageObject.isTopicActionMessage(messageObjectToReply)) { - ForumUtilities.applyTopicToMessage(messageObjectToReply); - if (messageObjectToReply.messageTextForReply != null) { - replyObjectText = messageObjectToReply.messageTextForReply; - } else { - replyObjectText = messageObjectToReply.messageTextShort; - } - AnimatedEmojiSpan.applyFontMetricsForString(replyObjectText, replyObjectTextView.getPaint()); - } else if (messageObjectToReply.replyToForumTopic != null) { - replyObjectText = ForumUtilities.getTopicSpannedName(messageObjectToReply.replyToForumTopic, replyObjectTextView.getPaint()); - } else if (messageObjectToReply.messageOwner.media instanceof TLRPC.TL_messageMediaGame) { - replyObjectText = Emoji.replaceEmoji(messageObjectToReply.messageOwner.media.game.title, replyObjectTextView.getPaint().getFontMetricsInt(), AndroidUtilities.dp(14), false); - sourceText = messageObjectToReply.messageOwner.media.game.title; - } else if (messageObjectToReply.messageText != null || messageObjectToReply.caption != null) { - CharSequence mess = messageObjectToReply.caption != null ? messageObjectToReply.caption.toString() : messageObjectToReply.messageText.toString(); - sourceText = mess; - if (mess.length() > 150) { - mess = mess.subSequence(0, 150); - } - mess = AndroidUtilities.replaceNewLines(mess); - if (messageObjectToReply.messageOwner != null && messageObjectToReply.messageOwner.entities != null) { - mess = MessageObject.replaceAnimatedEmoji(mess, messageObjectToReply.messageOwner.entities, replyObjectTextView.getPaint().getFontMetricsInt()); + public void invalidateMessagesVisiblePart() { + invalidateMessagesVisiblePart = true; + if (fragmentView != null) { + fragmentView.invalidate(); + } + } + + private Integer findClosest(ArrayList arrayList, int target, int[] index) { + if (arrayList.isEmpty()) { + return 0; + } + Integer val = arrayList.get(0); + if (target >= val) { + index[0] = 0; + return val; + } + int n = arrayList.size(); + val = arrayList.get(n - 1); + if (target <= val) { + index[0] = n - 1; + return val; + } + + int i = 0, j = n, mid = 0; + while (i < j) { + mid = (i + j) / 2; + + val = arrayList.get(mid); + if (val == target) { + index[0] = mid; + return val; + } + if (target < val) { + if (mid > 0) { + Integer val2 = arrayList.get(mid - 1); + if (target > val2) { + index[0] = mid - 1; + return val2; } - replyObjectText = Emoji.replaceEmoji(mess, replyObjectTextView.getPaint().getFontMetricsInt(), AndroidUtilities.dp(14), false); } - if (replyObjectText != null) { - if (replyObjectText instanceof Spannable && sourceText != null) { - MediaDataController.addTextStyleRuns(messageObjectToReply.messageOwner.entities, sourceText, (Spannable) replyObjectText); + i = mid + 1; + } else { + if (mid > 0) { + Integer val2 = arrayList.get(mid - 1); + if (target < val2) { + index[0] = mid; + return val; } + } + j = mid; + } + } + index[0] = mid; + return arrayList.get(mid); + } - replyObjectTextView.setText(AnimatedEmojiSpan.cloneSpans(replyObjectText)); + public void updateMessagesVisiblePart(boolean inLayout) { + if (chatListView == null || fragmentView == null) { + return; + } + int count = chatListView.getChildCount(); + int height = chatListView.getMeasuredHeight(); + int minPositionHolder = Integer.MAX_VALUE; + int minPositionDateHolder = Integer.MAX_VALUE; + View minDateChild = null; + View minChild = null; + View minMessageChild = null; + boolean foundTextureViewMessage = false; + boolean previousThreadMessageVisible = threadMessageVisible; + int previousPinnedMessageId = currentPinnedMessageId; + int maxVisibleId = Integer.MIN_VALUE; + MessageObject maxVisibleMessageObject = null; + threadMessageVisible = firstLoading; + + Integer currentReadMaxId = null; + int threadId = threadMessageId; + if (threadId != 0 && currentChat != null) { + currentReadMaxId = replyMaxReadId; + } else { + currentReadMaxId = getMessagesController().dialogs_read_inbox_max.get(dialog_id_Long); + } + if (currentReadMaxId == null) { + currentReadMaxId = 0; + } + int maxPositiveUnreadId = Integer.MIN_VALUE; + int maxNegativeUnreadId = Integer.MAX_VALUE; + int maxUnreadDate = Integer.MIN_VALUE; + int recyclerChatViewHeight = (contentView.getHeightWithKeyboard() - (inPreviewMode ? 0 : AndroidUtilities.dp(48)) - chatListView.getTop()); + pollsToCheck.clear(); + float clipTop = chatListViewPaddingTop; + long currentTime = System.currentTimeMillis(); + int maxAdapterPosition = -1; + int minAdapterPosition = -1; + + boolean blurEnabled = SharedConfig.chatBlurEnabled() && Color.alpha(Theme.getColor(Theme.key_chat_BlurAlpha)) != 255; + + MessageObject messageStarter = isTopic ? topicStarterMessageObject : threadMessageObject; + + for (int a = 0; a < count; a++) { + View view = chatListView.getChildAt(a); + MessageObject messageObject = null; + int adapterPosition = chatListView.getChildAdapterPosition(view); + if (adapterPosition >= 0) { + if (adapterPosition > maxAdapterPosition || maxAdapterPosition == -1) { + maxAdapterPosition = adapterPosition; } - updateBottomOverlay(); - } else if (messageObjectsToForward != null) { - if (messageObjectsToForward.isEmpty()) { - return; + if (adapterPosition < minAdapterPosition || minAdapterPosition == -1) { + minAdapterPosition = adapterPosition; } - if (threadMessageId == 0 || isTopic) { - if (isTopic) { - replyingMessageObject = threadMessageObject; + } + int top = (int) view.getY(); + int bottom = top + view.getMeasuredHeight(); + ChatMessageCell messageCell = null; + if (view instanceof ChatMessageCell) { + messageCell = (ChatMessageCell) view; + } + if (messageCell != null) { + messageCell.isBlurred = (top < clipTop && bottom > clipTop) || (bottom > chatListView.getMeasuredHeight() - blurredViewBottomOffset && top < chatListView.getMeasuredHeight() - blurredViewBottomOffset); + } + + if (bottom <= clipTop - chatListViewPaddingVisibleOffset || top > chatListView.getMeasuredHeight() - blurredViewBottomOffset) { + if (messageCell != null) { + if (blurEnabled) { + messageCell.setVisibleOnScreen(false); } else { - replyingMessageObject = null; + messageCell.setVisibleOnScreen(true); } - chatActivityEnterView.setReplyingMessageObject(replyingMessageObject); - updateBottomOverlay(); - } - editingMessageObject = null; - chatActivityEnterView.setEditingMessageObject(null, false); - if (forwardingMessages == null) { - forwardingMessages = new ForwardingMessagesParams(messageObjectsToForward, dialog_id); } - if (noForwardQuote) { - noForwardQuote = false; - forwardingMessages.hideForwardSendersName = true; + continue; + } + if (messageCell != null) { + messageCell.setVisibleOnScreen(true); + } + + int viewTop = top >= 0 ? 0 : -top; + int viewBottom = view.getMeasuredHeight(); + if (viewBottom > height) { + viewBottom = viewTop + height; + } + int keyboardOffset = contentView.getKeyboardHeight(); + if (keyboardOffset < AndroidUtilities.dp(20) && chatActivityEnterView.isPopupShowing() || chatActivityEnterView.panelAnimationInProgress()) { + keyboardOffset = chatActivityEnterView.getEmojiPadding(); + } + + if (messageCell != null) { + messageObject = messageCell.getMessageObject(); + if (messageObject.getDialogId() == dialog_id && messageObject.getId() > maxVisibleId) { + maxVisibleId = messageObject.getId(); + maxVisibleMessageObject = messageObject; } - if (foundWebPage != null) { - return; + + messageCell.setParentBounds(chatListViewPaddingTop - chatListViewPaddingVisibleOffset - AndroidUtilities.dp(4), chatListView.getMeasuredHeight() - blurredViewBottomOffset); + messageCell.setVisiblePart(viewTop, viewBottom - viewTop, recyclerChatViewHeight, keyboardOffset, view.getY() + (isKeyboardVisible() ? chatListView.getTop() : actionBar.getMeasuredHeight()) - contentView.getBackgroundTranslationY(), contentView.getMeasuredWidth(), contentView.getBackgroundSizeY(), blurredViewTopOffset, blurredViewBottomOffset); + markSponsoredAsRead(messageObject); + if (!threadMessageVisible && messageStarter != null && (messageObject == messageStarter || isTopic && messageObject != null && messageObject.getId() == messageStarter.getId()) && messageCell.getBottom() > chatListViewPaddingTop) { + threadMessageVisible = true; } - chatActivityEnterView.setForceShowSendButton(true, false); - ArrayList uids = new ArrayList<>(); - replyIconImageView.setImageResource(R.drawable.msg_panel_forward); - replyIconImageView.setContentDescription(LocaleController.getString("AccDescrForwarding", R.string.AccDescrForwarding)); - replyCloseImageView.setContentDescription(LocaleController.getString("AccDescrCancelForward", R.string.AccDescrCancelForward)); - MessageObject object = messageObjectsToForward.get(0); - if (object.isFromUser()) { - uids.add(object.messageOwner.from_id.user_id); - } else { - TLRPC.Chat chat = getMessagesController().getChat(object.messageOwner.peer_id.channel_id); - if (ChatObject.isChannel(chat) && chat.megagroup && object.isForwardedChannelPost()) { - uids.add(-object.messageOwner.fwd_from.from_id.channel_id); + if (videoPlayerContainer != null && (messageObject.isVideo() || messageObject.isRoundVideo()) && !messageObject.isVoiceTranscriptionOpen() && MediaController.getInstance().isPlayingMessage(messageObject)) { + ImageReceiver imageReceiver = messageCell.getPhotoImage(); + if (top + imageReceiver.getImageY2() < 0) { + foundTextureViewMessage = false; } else { - uids.add(-object.messageOwner.peer_id.channel_id); + videoPlayerContainer.setTranslationX(imageReceiver.getImageX() + messageCell.getX()); + + float translationY = messageCell.getY() + imageReceiver.getImageY() + chatListView.getY() - videoPlayerContainer.getTop(); + videoPlayerContainer.setTranslationY(translationY); + fragmentView.invalidate(); + videoPlayerContainer.invalidate(); + foundTextureViewMessage = true; } } - - int type = object.isAnimatedEmoji() || object.isDice() ? 0 : object.type; - for (int a = 1; a < messageObjectsToForward.size(); a++) { - object = messageObjectsToForward.get(a); - long uid; - if (object.isFromUser()) { - uid = object.messageOwner.from_id.user_id; - } else { - TLRPC.Chat chat = getMessagesController().getChat(object.messageOwner.peer_id.channel_id); - if (ChatObject.isChannel(chat) && chat.megagroup && object.isForwardedChannelPost()) { - uid = -object.messageOwner.fwd_from.from_id.channel_id; + if (startFromVideoTimestamp >= 0 && fragmentOpened && !chatListView.isFastScrollAnimationRunning() && startFromVideoMessageId == messageObject.getId() && (messageObject.isVideo() || messageObject.isRoundVideo() || messageObject.isVoice() || messageObject.isMusic())) { + messageObject.forceSeekTo = startFromVideoTimestamp / (float) messageObject.getDuration(); + MessageObject finalMessage = messageObject; + AndroidUtilities.runOnUIThread(() -> { + if (finalMessage.isVideo()) { + openPhotoViewerForMessage(null, finalMessage); } else { - uid = -object.messageOwner.peer_id.channel_id; + MediaController.getInstance().playMessage(finalMessage); } - } - if (!uids.contains(uid)) { - uids.add(uid); - } - if (messageObjectsToForward.get(a).type != type) { - type = -1; - } + }, 40); + startFromVideoTimestamp = -1; } - StringBuilder userNames = new StringBuilder(); - for (int a = 0; a < uids.size(); a++) { - Long uid = uids.get(a); - TLRPC.Chat chat = null; - TLRPC.User user = null; - if (uid > 0) { - user = getMessagesController().getUser(uid); - } else { - chat = getMessagesController().getChat(-uid); - } - if (user == null && chat == null) { - continue; + + if (fragmentOpened && openAnimationEnded && (chatListItemAnimator == null || !chatListItemAnimator.isRunning()) && messageCell.checkUnreadReactions(clipTop, chatListView.getMeasuredHeight() - blurredViewBottomOffset)) { + reactionsMentionCount--; + getMessagesStorage().markMessageReactionsAsRead(getDialogId(), getTopicId(), messageCell.getMessageObject().getId(), true); + if (reactionsMentionCount <= 0) { + getMessagesController().markReactionsAsRead(dialog_id, getTopicId()); } - if (uids.size() == 1) { - if (user != null) { - userNames.append(UserObject.getUserName(user)); - } else { - userNames.append(chat.title); - } - } else if (uids.size() == 2 || userNames.length() == 0) { - if (userNames.length() > 0) { - userNames.append(", "); - } - if (user != null) { - if (!TextUtils.isEmpty(user.first_name)) { - userNames.append(user.first_name); - } else if (!TextUtils.isEmpty(user.last_name)) { - userNames.append(user.last_name); - } else { - userNames.append(" "); - } - } else { - userNames.append(chat.title); + if (reactionsMentionCount >= 0) { + TLRPC.MessagePeerReaction reaction = messageCell.getMessageObject().getRandomUnreadReaction(); + if (reaction != null) { + ReactionsLayoutInBubble.VisibleReaction visibleReaction = ReactionsLayoutInBubble.VisibleReaction.fromTLReaction(reaction.reaction); + ReactionsEffectOverlay.show(ChatActivity.this, null, messageCell, null, 0, 0, visibleReaction, currentAccount, reaction.big ? ReactionsEffectOverlay.LONG_ANIMATION : ReactionsEffectOverlay.SHORT_ANIMATION); + ReactionsEffectOverlay.startAnimation(); } + messageCell.markReactionsAsRead(); } else { - userNames.append(" "); - userNames.append(LocaleController.formatPluralString("AndOther", uids.size() - 1)); - break; + reactionsMentionCount = 0; } + updateReactionsMentionButton(true); } - formwardingNameText = userNames; - if (type == -1 || type == 0 || type == 10 || type == 11 || type == MessageObject.TYPE_EMOJIS) { - replyNameTextView.setText(LocaleController.formatPluralString("PreviewForwardMessagesCount", messageObjectsToForward.size())); - } else if (type == 1) { - replyNameTextView.setText(LocaleController.formatPluralString("PreviewForwardPhoto", messageObjectsToForward.size())); - if (messageObjectsToForward.size() == 1) { - messageObjectToReply = messageObjectsToForward.get(0); + getDownloadController().checkUnviewedDownloads(messageCell.getId(), dialog_id); + boolean allowPlayEffect = ((messageObject.messageOwner.media != null && !messageObject.messageOwner.media.nopremium) || (messageObject.isAnimatedEmojiStickerSingle() && dialog_id > 0)); + if ((chatListItemAnimator == null || !chatListItemAnimator.isRunning()) && (!messageObject.isOutOwner() || messageObject.forcePlayEffect) && allowPlayEffect && !messageObject.messageOwner.premiumEffectWasPlayed && (messageObject.isPremiumSticker() || messageObject.isAnimatedEmojiStickerSingle()) && emojiAnimationsOverlay.isIdle() && emojiAnimationsOverlay.checkPosition(messageCell, chatListViewPaddingTop, chatListView.getMeasuredHeight() - blurredViewBottomOffset)) { + emojiAnimationsOverlay.onTapItem(messageCell, ChatActivity.this, false); + } else if (messageObject.isAnimatedAnimatedEmoji()) { + emojiAnimationsOverlay.preloadAnimation(messageCell); + } + } else if (view instanceof ChatActionCell) { + ChatActionCell cell = (ChatActionCell) view; + messageObject = cell.getMessageObject(); + if (messageObject != null && messageObject.getDialogId() == dialog_id && messageObject.getId() > maxVisibleId) { + maxVisibleId = Math.max(maxVisibleId, messageObject.getId()); + } + cell.setVisiblePart(view.getY() + (isKeyboardVisible() ? chatListView.getTop() : actionBar.getMeasuredHeight()) - contentView.getBackgroundTranslationY(), contentView.getBackgroundSizeY()); + } else if (view instanceof BotHelpCell) { + view.invalidate(); + } + if (chatMode != MODE_SCHEDULED && messageObject != null) { + int id = messageObject.getId(); + if (!isThreadChat() && (!messageObject.isOut() && messageObject.isUnread() || messageObject.messageOwner.from_scheduled && id > currentReadMaxId) || id > 0 && isThreadChat() && id > currentReadMaxId && id > replyMaxReadId) { + if (id > 0) { + maxPositiveUnreadId = Math.max(maxPositiveUnreadId, messageObject.getId()); } - } else if (type == 4) { - replyNameTextView.setText(LocaleController.formatPluralString("PreviewForwardLocation", messageObjectsToForward.size())); - } else if (type == 3) { - replyNameTextView.setText(LocaleController.formatPluralString("PreviewForwardVideo", messageObjectsToForward.size())); - if (messageObjectsToForward.size() == 1) { - messageObjectToReply = messageObjectsToForward.get(0); + if (id < 0 && !isThreadChat()) { + maxNegativeUnreadId = Math.min(maxNegativeUnreadId, messageObject.getId()); } - } else if (type == 12) { - replyNameTextView.setText(LocaleController.formatPluralString("PreviewForwardContact", messageObjectsToForward.size())); - } else if (type == 2) { - replyNameTextView.setText(LocaleController.formatPluralString("PreviewForwardAudio", messageObjectsToForward.size())); - } else if (type == MessageObject.TYPE_ROUND_VIDEO) { - replyNameTextView.setText(LocaleController.formatPluralString("PreviewForwardRound", messageObjectsToForward.size())); - } else if (type == 14) { - replyNameTextView.setText(LocaleController.formatPluralString("PreviewForwardMusic", messageObjectsToForward.size())); - } else if (type == MessageObject.TYPE_STICKER || type == MessageObject.TYPE_ANIMATED_STICKER) { - replyNameTextView.setText(LocaleController.formatPluralString("PreviewForwardSticker", messageObjectsToForward.size())); - } else if (type == 17) { - replyNameTextView.setText(LocaleController.formatPluralString("PreviewForwardPoll", messageObjectsToForward.size())); - } else if (type == 8 || type == 9) { - if (messageObjectsToForward.size() == 1 & type == 9) { - messageObjectToReply = messageObjectsToForward.get(0); + maxUnreadDate = Math.max(maxUnreadDate, messageObject.messageOwner.date); + } + if (messageObject.type == MessageObject.TYPE_POLL && messageObject.getId() > 0) { + pollsToCheck.add(messageObject); + } + } + if (bottom <= clipTop) { + if (view instanceof ChatActionCell && messageObject.isDateObject) { + view.setAlpha(0); + } + continue; + } + int position = view.getBottom(); + if (position < minPositionHolder) { + minPositionHolder = position; + if (view instanceof ChatMessageCell || view instanceof ChatActionCell) { + minMessageChild = view; + } + minChild = view; + } + if (chatListItemAnimator == null || (!chatListItemAnimator.willRemoved(view) && !chatListItemAnimator.willAddedFromAlpha(view))) { + if (view instanceof ChatActionCell && messageObject.isDateObject) { + if (view.getAlpha() != 1.0f) { + view.setAlpha(1.0f); } - if (messageObjectsToForward.size() == 1 && type == 8) { - replyNameTextView.setText(LocaleController.getString("AttachGif", R.string.AttachGif)); - } else { - replyNameTextView.setText(LocaleController.formatPluralString("PreviewForwardFile", messageObjectsToForward.size())); + if (position < minPositionDateHolder) { + minPositionDateHolder = position; + minDateChild = view; } } - - if (forwardingMessages.hideForwardSendersName) { - replyObjectTextView.setText(LocaleController.getString("HiddenSendersNameDescription", R.string.HiddenSendersNameDescription)); - } else { - if ((type == -1 || type == 0 || type == 10 || type == 11 || type == MessageObject.TYPE_EMOJIS) && (messageObjectsToForward.size() == 1 && messageObjectsToForward.get(0).messageText != null)) { - MessageObject messageObject = messageObjectsToForward.get(0); - CharSequence mess = new SpannableStringBuilder(messageObject.messageText.toString()); - if (mess.length() > 150) { - mess = mess.subSequence(0, 150); + } + } + currentPinnedMessageId = 0; + if (!pinnedMessageIds.isEmpty()) { + if (maxVisibleId == Integer.MIN_VALUE) { + if (startLoadFromMessageId != 0) { + maxVisibleId = startLoadFromMessageId; + } else if (!pinnedMessageIds.isEmpty()) { + maxVisibleId = pinnedMessageIds.get(0) + 1; + } + } else if (maxVisibleId < 0) { + int idx = messages.indexOf(maxVisibleMessageObject); + if (idx >= 0) { + for (int a = idx - 1; a >= 0; a--) { + MessageObject object = messages.get(a); + if (object.getId() > 0) { + maxVisibleId = object.getId(); + break; } - mess = Emoji.replaceEmoji(mess, replyObjectTextView.getPaint().getFontMetricsInt(), AndroidUtilities.dp(14), false); - if (mess instanceof Spannable) { - MediaDataController.addTextStyleRuns(messageObject.messageOwner.entities, messageObject.messageText, (Spannable) mess); - if (messageObject.messageOwner != null) { - mess = MessageObject.replaceAnimatedEmoji(mess, messageObject.messageOwner.entities, replyObjectTextView.getPaint().getFontMetricsInt()); + } + if (maxVisibleId < 0) { + for (int a = idx + 1, N = messages.size(); a < N; a++) { + MessageObject object = messages.get(a); + if (object.getId() > 0) { + maxVisibleId = object.getId(); + break; } } - replyObjectTextView.setText(mess); - } else { - replyObjectTextView.setText(LocaleController.formatString("ForwardingFromNames", R.string.ForwardingFromNames, userNames)); } } - if (!SharedConfig.forwardingOptionsHintShown) { - showHint = true; - } - } else { - replyIconImageView.setImageResource(R.drawable.msg_link); - if (webPage instanceof TLRPC.TL_webPagePending) { - replyNameTextView.setText(LocaleController.getString("GettingLinkInfo", R.string.GettingLinkInfo)); - replyObjectTextView.setText(pendingLinkSearchString); - } else { - if (webPage.site_name != null) { - replyNameTextView.setText(webPage.site_name); - } else if (webPage.title != null) { - replyNameTextView.setText(webPage.title); - } else { - replyNameTextView.setText(LocaleController.getString("LinkPreview", R.string.LinkPreview)); - } - if (webPage.title != null) { - replyObjectTextView.setText(webPage.title); - } else if (webPage.description != null) { - replyObjectTextView.setText(webPage.description); - } else if (webPage.author != null) { - replyObjectTextView.setText(webPage.author); + } + currentPinnedMessageId = findClosest(pinnedMessageIds, forceNextPinnedMessageId != 0 ? forceNextPinnedMessageId : maxVisibleId, currentPinnedMessageIndex); + if (!inMenuMode && !loadingPinnedMessagesList && !pinnedEndReached && !pinnedMessageIds.isEmpty() && currentPinnedMessageIndex[0] > pinnedMessageIds.size() - 2) { + getMediaDataController().loadPinnedMessages(dialog_id, pinnedMessageIds.get(pinnedMessageIds.size() - 1), 0); + loadingPinnedMessagesList = true; + } + } + getMessagesController().addToPollsQueue(dialog_id, pollsToCheck); + chatMessagesMetadataController.checkMessages(chatAdapter, maxAdapterPosition, minAdapterPosition, currentTime); + if (videoPlayerContainer != null) { + if (!foundTextureViewMessage) { + MessageObject messageObject = MediaController.getInstance().getPlayingMessageObject(); + if (messageObject != null) { + if (checkTextureViewPosition && messageObject.isVideo()) { + MediaController.getInstance().cleanupPlayer(true, true); } else { - replyObjectTextView.setText(webPage.display_url); + videoPlayerContainer.setTranslationY(-AndroidUtilities.roundPlayingMessageSize - 100); + fragmentView.invalidate(); + if ((messageObject.isRoundVideo() || messageObject.isVideo()) && messageObject.eventId == 0 && checkTextureViewPosition && !chatListView.isFastScrollAnimationRunning()) { + MediaController.getInstance().setCurrentVideoVisible(false); + } } - chatActivityEnterView.setWebPage(webPage, true); } + } else { + MediaController.getInstance().setCurrentVideoVisible(true); } - MessageObject thumbMediaMessageObject; - if (messageObjectToReply != null) { - thumbMediaMessageObject = messageObjectToReply; - } else if (messageObjectToEdit != null) { - if (!chatActivityEnterTopView.isEditMode()) { - thumbMediaMessageObject = messageObjectToEdit; + } + if (minMessageChild != null) { + MessageObject messageObject; + if (minMessageChild instanceof ChatMessageCell) { + messageObject = ((ChatMessageCell) minMessageChild).getMessageObject(); + } else { + messageObject = ((ChatActionCell) minMessageChild).getMessageObject(); + } + floatingDateView.setCustomDate(messageObject.messageOwner.date, chatMode == MODE_SCHEDULED, true); + } + currentFloatingDateOnScreen = false; + currentFloatingTopIsNotMessage = !(minChild instanceof ChatMessageCell || minChild instanceof ChatActionCell); + if (minDateChild != null) { + boolean showFloatingView = false; + if (minDateChild.getY() > clipTop || currentFloatingTopIsNotMessage) { + if (minDateChild.getAlpha() != 1.0f) { + minDateChild.setAlpha(1.0f); + } + if (chatListView.getChildAdapterPosition(minDateChild) == chatAdapter.messagesStartRow + messages.size() - 1) { + if (minDateChild.getAlpha() != 1.0f) { + minDateChild.setAlpha(1.0f); + } + if (floatingDateAnimation != null) { + floatingDateAnimation.cancel(); + floatingDateAnimation = null; + } + floatingDateView.setTag(null); + floatingDateView.setAlpha(0); + currentFloatingDateOnScreen = false; } else { - thumbMediaMessageObject = null; + hideFloatingDateView(!currentFloatingTopIsNotMessage); } } else { - thumbMediaMessageObject = null; + if (minDateChild.getAlpha() != 0.0f) { + minDateChild.setAlpha(0.0f); + } + showFloatingView = true; } - - FrameLayout.LayoutParams layoutParams1 = (FrameLayout.LayoutParams) replyNameTextView.getLayoutParams(); - FrameLayout.LayoutParams layoutParams2 = (FrameLayout.LayoutParams) replyObjectTextView.getLayoutParams(); - FrameLayout.LayoutParams layoutParams3 = (FrameLayout.LayoutParams) replyObjectHintTextView.getLayoutParams(); - - int cacheType = 1; - int size = 0; - TLRPC.PhotoSize photoSize = null; - TLRPC.PhotoSize thumbPhotoSize = null; - TLObject photoSizeObject = null; - if (thumbMediaMessageObject != null && TextUtils.isEmpty(MessagesController.getRestrictionReason(thumbMediaMessageObject.messageOwner.restriction_reason))) { - photoSize = FileLoader.getClosestPhotoSizeWithSize(thumbMediaMessageObject.photoThumbs2, 320); - thumbPhotoSize = FileLoader.getClosestPhotoSizeWithSize(thumbMediaMessageObject.photoThumbs2, AndroidUtilities.dp(40)); - photoSizeObject = thumbMediaMessageObject.photoThumbsObject2; - if (photoSize == null) { - if (thumbMediaMessageObject.mediaExists) { - photoSize = FileLoader.getClosestPhotoSizeWithSize(thumbMediaMessageObject.photoThumbs, AndroidUtilities.getPhotoSize()); - if (photoSize != null) { - size = photoSize.size; - } - cacheType = 0; - } else { - photoSize = FileLoader.getClosestPhotoSizeWithSize(thumbMediaMessageObject.photoThumbs, 320); + float offset = minDateChild.getY() + minDateChild.getMeasuredHeight() - clipTop; + if (offset > floatingDateView.getMeasuredHeight() && offset < floatingDateView.getMeasuredHeight() * 2) { + if (chatListView.getChildAdapterPosition(minDateChild) == chatAdapter.messagesStartRow + messages.size() - 1) { + showFloatingView = false; + if (minDateChild.getAlpha() != 1.0f) { + minDateChild.setAlpha(1.0f); } - thumbPhotoSize = FileLoader.getClosestPhotoSizeWithSize(thumbMediaMessageObject.photoThumbs, AndroidUtilities.dp(40)); - photoSizeObject = thumbMediaMessageObject.photoThumbsObject; + if (floatingDateAnimation != null) { + floatingDateAnimation.cancel(); + floatingDateAnimation = null; + } + floatingDateView.setTag(null); + floatingDateView.setAlpha(0); + } else { + floatingDateViewOffset = -floatingDateView.getMeasuredHeight() * 2 + offset; } - } - if (photoSize == thumbPhotoSize) { - thumbPhotoSize = null; - } - if (photoSize == null || photoSize instanceof TLRPC.TL_photoSizeEmpty || photoSize.location instanceof TLRPC.TL_fileLocationUnavailable || thumbMediaMessageObject.isAnyKindOfSticker() || thumbMediaMessageObject.isSecretMedia() || thumbMediaMessageObject.isWebpageDocument()) { - replyImageView.setImageBitmap(null); - replyImageLocation = null; - replyImageLocationObject = null; - replyImageView.setVisibility(View.INVISIBLE); - layoutParams1.leftMargin = layoutParams2.leftMargin = layoutParams3.leftMargin = AndroidUtilities.dp(52); } else { - if (thumbMediaMessageObject.isRoundVideo()) { - replyImageView.setRoundRadius(AndroidUtilities.dp(17)); - } else { - replyImageView.setRoundRadius(AndroidUtilities.dp(2)); + floatingDateViewOffset = 0; + } + if (showFloatingView) { + if (floatingDateAnimation != null) { + floatingDateAnimation.cancel(); + floatingDateAnimation = null; } - replyImageSize = size; - replyImageCacheType = cacheType; - replyImageLocation = photoSize; - replyImageThumbLocation = thumbPhotoSize; - replyImageLocationObject = photoSizeObject; - replyImageHasMediaSpoiler = thumbMediaMessageObject.hasMediaSpoilers(); - replyImageView.setImage(ImageLocation.getForObject(replyImageLocation, photoSizeObject), replyImageHasMediaSpoiler ? "5_5_b" : "50_50", ImageLocation.getForObject(thumbPhotoSize, photoSizeObject), replyImageHasMediaSpoiler ? "5_5_b" : "50_50_b", null, size, cacheType, thumbMediaMessageObject); - replyImageView.setVisibility(View.VISIBLE); - layoutParams1.leftMargin = layoutParams2.leftMargin = layoutParams3.leftMargin = AndroidUtilities.dp(96); + if (floatingDateView.getTag() == null) { + floatingDateView.setTag(1); + } + if (floatingDateView.getAlpha() != 1.0f) { + floatingDateView.setAlpha(1.0f); + } + currentFloatingDateOnScreen = true; } - replyNameTextView.setLayoutParams(layoutParams1); - replyObjectTextView.setLayoutParams(layoutParams2); - replyObjectTextView.setLayoutParams(layoutParams3); - chatActivityEnterView.showTopView(true, openKeyboard); } else { - if (replyingMessageObject == null && forwardingMessages == null && foundWebPage == null && editingMessageObject == null && !chatActivityEnterView.forceShowSendButton && !chatActivityEnterView.isTopViewVisible()) { - return; - } - if (replyingMessageObject != null && replyingMessageObject.messageOwner.reply_markup instanceof TLRPC.TL_replyKeyboardForceReply) { - SharedPreferences preferences = MessagesController.getMainSettings(currentAccount); - preferences.edit().putInt("answered_" + dialog_id, replyingMessageObject.getId()).apply(); + hideFloatingDateView(true); + floatingDateViewOffset = 0; + } + if (isThreadChat()) { + if (previousThreadMessageVisible != threadMessageVisible) { + AndroidUtilities.runOnUIThread(() -> { + updatePinnedMessageView(openAnimationStartTime != 0 && SystemClock.elapsedRealtime() >= openAnimationStartTime + 150); + }); } - if (foundWebPage != null) { - foundWebPage = null; - chatActivityEnterView.setWebPage(null, !cancel); - if (webPage != null && (replyingMessageObject != null || forwardingMessages != null || editingMessageObject != null)) { - showFieldPanel(true, replyingMessageObject, editingMessageObject, forwardingMessages != null ? forwardingMessages.messages : null, null, notify, scheduleDate, false, true); - return; + } else { + if (currentPinnedMessageId != 0) { + MessageObject object = pinnedMessageObjects.get(currentPinnedMessageId); + if (object == null) { + object = messagesDict[0].get(currentPinnedMessageId); + } + if (object == null) { + if (loadingPinnedMessages.indexOfKey(currentPinnedMessageId) < 0) { + loadingPinnedMessages.put(currentPinnedMessageId, true); + ArrayList ids = new ArrayList<>(); + ids.add(currentPinnedMessageId); + getMediaDataController().loadPinnedMessages(dialog_id, ChatObject.isChannel(currentChat) ? currentChat.id : 0, ids, true); + } + currentPinnedMessageId = previousPinnedMessageId; } + } else if (previousPinnedMessageId != 0 && !pinnedMessageIds.isEmpty()) { + currentPinnedMessageId = previousPinnedMessageId; } - beforeMessageSend(notify, scheduleDate, false); - chatActivityEnterView.setForceShowSendButton(false, animated); - if (!waitingForSendingMessageLoad) { - chatActivityEnterView.hideTopView(animated); + boolean animated = (fromPullingDownTransition && fragmentView.getVisibility() == View.VISIBLE) || (openAnimationStartTime != 0 && SystemClock.elapsedRealtime() >= openAnimationStartTime + 150); + if (previousPinnedMessageId != currentPinnedMessageId) { + int animateToNext; + if (previousPinnedMessageId == 0) { + animateToNext = 0; + } else if (previousPinnedMessageId > currentPinnedMessageId) { + animateToNext = 1; + } else { + animateToNext = 2; + } + + AndroidUtilities.runOnUIThread(() -> { + updatePinnedMessageView(animated, animateToNext); + }); + } else { + updatePinnedListButton(animated); } - chatActivityEnterView.setReplyingMessageObject(threadMessageObject); - chatActivityEnterView.setEditingMessageObject(null, false); - topViewWasVisible = 0; - replyingMessageObject = threadMessageObject; - editingMessageObject = null; - replyImageLocation = null; - replyImageLocationObject = null; - updateBottomOverlay(); } - - if (showHint) { - if (tapForForwardingOptionsHitRunnable == null) { - AndroidUtilities.runOnUIThread(tapForForwardingOptionsHitRunnable = () -> { - showTapForForwardingOptionsHit = !showTapForForwardingOptionsHit; - replyObjectTextView.setPivotX(0); - replyObjectHintTextView.setPivotX(0); - if (showTapForForwardingOptionsHit) { - replyObjectTextView.animate().alpha(0f).scaleX(0.98f).scaleY(0.98f).setDuration(150).start(); - replyObjectHintTextView.animate().alpha(1f).scaleX(1f).scaleY(1f).setDuration(150).start(); + if (floatingDateView != null) { + floatingDateView.setTranslationY(chatListView.getTranslationY() + chatListViewPaddingTop + floatingDateViewOffset - AndroidUtilities.dp(4)); + } + invalidateChatListViewTopPadding(); + if (!firstLoading && !paused && !inPreviewMode && chatMode == 0 && !getMessagesController().ignoreSetOnline) { + int scheduledRead = 0; + if ((maxPositiveUnreadId != Integer.MIN_VALUE || maxNegativeUnreadId != Integer.MAX_VALUE)) { + int counterDecrement = 0; + for (int a = 0; a < messages.size(); a++) { + MessageObject messageObject = messages.get(a); + int id = messageObject.getId(); + if (maxPositiveUnreadId != Integer.MIN_VALUE) { + if (id > 0 && id <= maxPositiveUnreadId && (messageObject.messageOwner.from_scheduled && id > currentReadMaxId || messageObject.isUnread() && !messageObject.isOut())) { + if (messageObject.messageOwner.from_scheduled) { + scheduledRead++; + } else { + messageObject.setIsRead(); + } + counterDecrement++; + } + } + if (maxNegativeUnreadId != Integer.MAX_VALUE) { + if (id < 0 && id >= maxNegativeUnreadId && messageObject.isUnread()) { + messageObject.setIsRead(); + counterDecrement++; + } + } + } + if (forwardEndReached[0] && maxPositiveUnreadId == minMessageId[0] || maxNegativeUnreadId == minMessageId[0]) { + newUnreadMessageCount = 0; + } else { + newUnreadMessageCount -= counterDecrement; + if (newUnreadMessageCount < 0) { + newUnreadMessageCount = 0; + } + } + if (inLayout) { + AndroidUtilities.runOnUIThread(this::inlineUpdate1); + } else { + inlineUpdate1(); + } + getMessagesController().markDialogAsRead(dialog_id, maxPositiveUnreadId, maxNegativeUnreadId, maxUnreadDate, false, threadId, counterDecrement, maxPositiveUnreadId == minMessageId[0] || maxNegativeUnreadId == minMessageId[0], scheduledRead); + firstUnreadSent = true; + } else if (!firstUnreadSent && currentEncryptedChat == null) { + if (chatLayoutManager.findFirstVisibleItemPosition() == 0) { + newUnreadMessageCount = 0; + if (inLayout) { + AndroidUtilities.runOnUIThread(this::inlineUpdate2); } else { - replyObjectTextView.animate().alpha(1f).scaleX(1f).scaleY(1f).setDuration(150).start(); - replyObjectHintTextView.animate().alpha(0f).scaleX(0.98f).scaleY(0.98f).setDuration(150).start(); + inlineUpdate2(); } - AndroidUtilities.runOnUIThread(tapForForwardingOptionsHitRunnable, 6000); - }, 6000); + getMessagesController().markDialogAsRead(dialog_id, minMessageId[0], minMessageId[0], maxDate[0], false, threadId, 0, true, scheduledRead); + if (isTopic) { + getMessagesStorage().updateRepliesMaxReadId(replyOriginalChat.id, replyOriginalMessageId, Math.max(maxPositiveUnreadId, replyMaxReadId), 0, true); + } + firstUnreadSent = true; + } } - } else { - if (tapForForwardingOptionsHitRunnable != null) { - AndroidUtilities.cancelRunOnUIThread(tapForForwardingOptionsHitRunnable); - tapForForwardingOptionsHitRunnable = null; + if (threadId != 0 && maxPositiveUnreadId > 0 && replyMaxReadId != maxPositiveUnreadId) { + replyMaxReadId = maxPositiveUnreadId; + getMessagesStorage().updateRepliesMaxReadId(replyOriginalChat.id, replyOriginalMessageId, replyMaxReadId, newUnreadMessageCount, true); + if (!isTopic) { + getNotificationCenter().postNotificationName(NotificationCenter.commentsRead, replyOriginalChat.id, replyOriginalMessageId, replyMaxReadId); + } } - replyObjectTextView.setAlpha(1f); - replyObjectHintTextView.setAlpha(0); } + } - if (showReplyHint) { - - } else { - + private void inlineUpdate1() { + if (prevSetUnreadCount != newUnreadMessageCount) { + prevSetUnreadCount = newUnreadMessageCount; + pagedownButtonCounter.setCount(newUnreadMessageCount, openAnimationEnded); } } - private void moveScrollToLastMessage(boolean skipSponsored) { - if (chatListView != null && !messages.isEmpty() && !pinchToZoomHelper.isInOverlayMode()) { - int position = 0; - if (skipSponsored) { - position += getSponsoredMessagesCount(); - } - chatLayoutManager.scrollToPositionWithOffset(position, 0); - chatListView.stopScroll(); + private void inlineUpdate2() { + if (prevSetUnreadCount != newUnreadMessageCount) { + prevSetUnreadCount = newUnreadMessageCount; + pagedownButtonCounter.setCount(newUnreadMessageCount, true); } } - private Runnable sendSecretMessageRead(MessageObject messageObject, boolean readNow) { - if (messageObject == null || messageObject.isOut() || !messageObject.isSecretMedia() || messageObject.messageOwner.destroyTime != 0 || messageObject.messageOwner.ttl <= 0) { - return null; - } - messageObject.messageOwner.destroyTime = messageObject.messageOwner.ttl + getConnectionsManager().getCurrentTime(); - if (readNow) { - if (currentEncryptedChat != null) { - getMessagesController().markMessageAsRead(dialog_id, messageObject.messageOwner.random_id, messageObject.messageOwner.ttl); + private void toggleMute(boolean instant) { + boolean muted = getMessagesController().isDialogMuted(dialog_id, getTopicId()); + if (!muted) { + if (instant) { + getNotificationsController().muteDialog(dialog_id, getTopicId(), true); } else { - getMessagesController().markMessageAsRead2(dialog_id, messageObject.getId(), null, messageObject.messageOwner.ttl, 0); + BottomSheet alert = AlertsCreator.createMuteAlert(this, dialog_id, getTopicId(), themeDelegate); + alert.setCalcMandatoryInsets(isKeyboardVisible()); + showDialog(alert); } - return null; } else { - return () -> { - if (currentEncryptedChat != null) { - getMessagesController().markMessageAsRead(dialog_id, messageObject.messageOwner.random_id, messageObject.messageOwner.ttl); - } else { - getMessagesController().markMessageAsRead2(dialog_id, messageObject.getId(), null, messageObject.messageOwner.ttl, 0); - } - }; + getNotificationsController().muteDialog(dialog_id, getTopicId(), false); + if (!instant) { + BulletinFactory.createMuteBulletin(this, false, themeDelegate).show(); + } } } - private void clearChatData() { - messages.clear(); - messagesByDays.clear(); - waitingForLoad.clear(); - groupedMessagesMap.clear(); - threadMessageAdded = false; + private int getScrollOffsetForMessage(MessageObject object) { + return getScrollOffsetForMessage(getHeightForMessage(object)); + } + private int getScrollOffsetForMessage(int messageHeight) { + return (int) Math.max(-AndroidUtilities.dp(2), (chatListView.getMeasuredHeight() - blurredViewBottomOffset - chatListViewPaddingTop - messageHeight) / 2); + } - if (chatAdapter != null) { - showProgressView(chatAdapter.botInfoRow < 0); + private int getHeightForMessage(MessageObject object) { + if (getParentActivity() == null) { + return 0; } - if (chatListView != null) { - chatListView.setEmptyView(null); + if (dummyMessageCell == null) { + dummyMessageCell = new ChatMessageCell(getParentActivity(), true, themeDelegate); } - for (int a = 0; a < 2; a++) { - messagesDict[a].clear(); - if (currentEncryptedChat == null) { - maxMessageId[a] = Integer.MAX_VALUE; - minMessageId[a] = Integer.MIN_VALUE; - } else { - maxMessageId[a] = Integer.MIN_VALUE; - minMessageId[a] = Integer.MAX_VALUE; - } - maxDate[a] = Integer.MIN_VALUE; - minDate[a] = 0; - endReached[a] = false; - cacheEndReached[a] = false; - forwardEndReached[a] = true; + dummyMessageCell.isChat = currentChat != null || UserObject.isUserSelf(currentUser); + dummyMessageCell.isBot = currentUser != null && currentUser.bot; + dummyMessageCell.isMegagroup = ChatObject.isChannel(currentChat) && currentChat.megagroup; + return dummyMessageCell.computeHeight(object, groupedMessagesMap.get(object.getGroupId())); + } + + private void startMessageUnselect() { + if (unselectRunnable != null) { + AndroidUtilities.cancelRunOnUIThread(unselectRunnable); } - first = true; - firstLoading = true; - loading = true; - loadingForward = false; - waitingForReplyMessageLoad = false; - startLoadFromMessageId = 0; - showScrollToMessageError = false; - last_message_id = 0; - unreadMessageObject = null; - createUnreadMessageAfterId = 0; - createUnreadMessageAfterIdLoading = false; - needSelectFromMessageId = false; - if (chatAdapter != null) { - chatAdapter.notifyDataSetChanged(false); + unselectRunnable = () -> { + highlightMessageId = Integer.MAX_VALUE; + updateVisibleRows(); + unselectRunnable = null; + }; + AndroidUtilities.runOnUIThread(unselectRunnable, 1000); + } + + private void removeSelectedMessageHighlight() { + if (unselectRunnable != null) { + AndroidUtilities.cancelRunOnUIThread(unselectRunnable); + unselectRunnable = null; } + highlightMessageId = Integer.MAX_VALUE; + } + + private AlertDialog progressDialog; + private int nextScrollToMessageId; + private int nextScrollFromMessageId; + private boolean nextScrollSelect; + private int nextScrollLoadIndex; + private boolean nextScrollForce; + private int nextScrollForcePinnedMessageId; + + public static final int PROGRESS_REPLY = 0; + public static final int PROGRESS_LINK = 1; + public static final int PROGRESS_INSTANT = 2; + public static final int PROGRESS_BOT_BUTTON = 3; + + private int progressDialogAtMessageId; + private int progressDialogAtMessageType; + private CharacterStyle progressDialogLinkSpan; + private String progressDialogBotButtonUrl; + private Browser.Progress progressDialogCurrent; + private void resetProgressDialogLoading() { + progressDialogLinkSpan = null; + progressDialogAtMessageId = 0; + progressDialogAtMessageType = -1; + progressDialogBotButtonUrl = null; + progressDialogCurrent = null; + + setPagedownLoading(false, true); + } + + public static final boolean SCROLL_DEBUG_DELAY = false; + private boolean pinnedProgressIsShowing; + Runnable updatePinnedProgressRunnable; + + public void scrollToMessageId(int id, int fromMessageId, boolean select, int loadIndex, boolean forceScroll, int forcePinnedMessageId) { + scrollToMessageId(id, fromMessageId, select, loadIndex, forceScroll, forcePinnedMessageId, null); } - public void scrollToLastMessage(boolean skipSponsored, boolean top) { - scrollToLastMessage(skipSponsored, top, null); - } + public void scrollToMessageId(int id, int fromMessageId, boolean select, int loadIndex, boolean forceScroll, int forcePinnedMessageId, Runnable inCaseLoading) { + if (id == 0 || NotificationCenter.getInstance(currentAccount).isAnimationInProgress() || getParentActivity() == null) { + if (NotificationCenter.getInstance(currentAccount).isAnimationInProgress()) { + nextScrollToMessageId = id; + nextScrollFromMessageId = fromMessageId; + nextScrollSelect = select; + nextScrollLoadIndex = loadIndex; + nextScrollForce = forceScroll; + nextScrollForcePinnedMessageId = forcePinnedMessageId; + NotificationCenter.getInstance(currentAccount).doOnIdle(() -> { + if (nextScrollToMessageId != 0) { + scrollToMessageId(nextScrollToMessageId, nextScrollFromMessageId, nextScrollSelect, nextScrollLoadIndex, nextScrollForce, nextScrollForcePinnedMessageId); + nextScrollToMessageId = 0; + } + }); + } + return; + } + + forceNextPinnedMessageId = Math.abs(forcePinnedMessageId); + forceScrollToFirst = forcePinnedMessageId > 0; + wasManualScroll = true; + MessageObject object = messagesDict[loadIndex].get(id); + boolean query = false; + int scrollDirection = RecyclerAnimationScrollHelper.SCROLL_DIRECTION_UNSET; + int scrollFromIndex = 0; + if (fromMessageId != 0) { + boolean scrollDown = fromMessageId < id; + if (isSecretChat()) { + scrollDown = !scrollDown; + } + scrollDirection = scrollDown ? RecyclerAnimationScrollHelper.SCROLL_DIRECTION_DOWN : RecyclerAnimationScrollHelper.SCROLL_DIRECTION_UP; + } else if (messages.size() > 0) { + if (isThreadChat() && id == threadMessageId) { + scrollDirection = RecyclerAnimationScrollHelper.SCROLL_DIRECTION_UP; + } else { + int end = chatLayoutManager.findLastVisibleItemPosition(); + for (int i = chatLayoutManager.findFirstVisibleItemPosition(); i <= end; i++) { + if (i >= chatAdapter.messagesStartRow && i < chatAdapter.messagesEndRow) { + MessageObject messageObject = messages.get(i - chatAdapter.messagesStartRow); + if (messageObject.getId() == 0) { + continue; + } + scrollFromIndex = i - chatAdapter.messagesStartRow; + boolean scrollDown = messageObject.getId() < id; + if (isSecretChat()) { + scrollDown = !scrollDown; + } + scrollDirection = scrollDown ? RecyclerAnimationScrollHelper.SCROLL_DIRECTION_DOWN : RecyclerAnimationScrollHelper.SCROLL_DIRECTION_UP; + break; + } + } + } + } + + chatScrollHelper.setScrollDirection(scrollDirection); + if (!SCROLL_DEBUG_DELAY && object != null) { + MessageObject.GroupedMessages groupedMessages = groupedMessagesMap.get(object.getGroupId()); + if (object.getGroupId() != 0 && groupedMessages != null) { + MessageObject primary = groupedMessages.findPrimaryMessageObject(); + if (primary != null) { + object = primary; + } + } + + int index = messages.indexOf(object); + if (index != -1) { + if (scrollFromIndex > 0) { + scrollDirection = scrollFromIndex > index ? RecyclerAnimationScrollHelper.SCROLL_DIRECTION_DOWN : RecyclerAnimationScrollHelper.SCROLL_DIRECTION_UP; + chatScrollHelper.setScrollDirection(scrollDirection); + } + removeSelectedMessageHighlight(); + if (select) { + highlightMessageId = id; + } + + chatAdapter.updateRowsSafe(); + int position = chatAdapter.messagesStartRow + messages.indexOf(object); - public void scrollToLastMessage(boolean skipSponsored, boolean top, Runnable inCaseLoading) { - if (chatListView.isFastScrollAnimationRunning()) { - return; - } - forceNextPinnedMessageId = 0; - nextScrollToMessageId = 0; - forceScrollToFirst = false; - chatScrollHelper.setScrollDirection(RecyclerAnimationScrollHelper.SCROLL_DIRECTION_DOWN); - if (forwardEndReached[0] && first_unread_id == 0 && startLoadFromMessageId == 0) { - setPagedownLoading(false, true); - if (chatLayoutManager.findFirstCompletelyVisibleItemPosition() == 0) { - canShowPagedownButton = false; - updatePagedownButtonVisibility(true); - removeSelectedMessageHighlight(); updateVisibleRows(); - } else { - chatAdapter.updateRowsSafe(); - chatScrollHelperCallback.scrollTo = null; - int position = 0; - if (skipSponsored) { - while (position < messages.size()) { - if (!messages.get(position).isSponsored()) { - break; + boolean found = false; + int count = chatListView.getChildCount(); + for (int a = 0; a < count; a++) { + View view = chatListView.getChildAt(a); + if (view instanceof ChatMessageCell) { + ChatMessageCell cell = (ChatMessageCell) view; + MessageObject messageObject = cell.getMessageObject(); + if (messageObject != null && messageObject.getId() == object.getId()) { + found = true; + view.sendAccessibilityEvent(AccessibilityEvent.TYPE_VIEW_FOCUSED); + } + } else if (view instanceof ChatActionCell) { + ChatActionCell cell = (ChatActionCell) view; + MessageObject messageObject = cell.getMessageObject(); + if (messageObject != null && messageObject.getId() == object.getId()) { + found = true; + view.sendAccessibilityEvent(AccessibilityEvent.TYPE_VIEW_FOCUSED); } - position++; } - } - if (top && messages != null && messages.size() > 0 && messages.get(position) != null) { - long groupId = messages.get(position).getGroupId(); - while (groupId != 0 && position + 1 < messages.size()) { - if (groupId != messages.get(position + 1).getGroupId()) { - break; + + if (found) { + int yOffset = getScrollOffsetForMessage(view.getHeight()); + int scrollY = (int) (view.getTop() - chatListViewPaddingTop - yOffset); + int maxScrollOffset = chatListView.computeVerticalScrollRange() - chatListView.computeVerticalScrollOffset() - chatListView.computeVerticalScrollExtent(); + if (maxScrollOffset < 0) { + maxScrollOffset = 0; } - position++; + if (scrollY > maxScrollOffset) { + scrollY = maxScrollOffset; + } + if (scrollY != 0) { + scrollByTouch = false; + chatListView.smoothScrollBy(0, scrollY); + chatListView.setOverScrollMode(RecyclerListView.OVER_SCROLL_NEVER); + } + break; } } - if (messages != null && messages.size() > 0) { - position = Math.min(position, messages.size() - 1); - } - final int finalPosition = position; - Runnable scroll = () -> { - chatScrollHelper.scrollToPosition(chatScrollHelperCallback.position = finalPosition, chatScrollHelperCallback.offset = 0, chatScrollHelperCallback.bottom = !top, true); - }; - if (SCROLL_DEBUG_DELAY && inCaseLoading != null) { - inCaseLoading.run(); - AndroidUtilities.runOnUIThread(() -> { - resetProgressDialogLoading(); - scroll.run(); - }, 7500); - } else { - scroll.run(); + if (!found) { + int yOffset = getScrollOffsetForMessage(object); + chatScrollHelperCallback.scrollTo = object; + chatScrollHelperCallback.lastBottom = false; + chatScrollHelperCallback.lastItemOffset = yOffset; + chatScrollHelperCallback.lastPadding = (int) chatListViewPaddingTop; + chatScrollHelper.setScrollDirection(scrollDirection); + chatScrollHelper.scrollToPosition(chatScrollHelperCallback.position = position, chatScrollHelperCallback.offset = yOffset, chatScrollHelperCallback.bottom = false, true); + canShowPagedownButton = true; + updatePagedownButtonVisibility(true); } + } else { + query = true; } } else { + query = true; + } + + if (query) { + if (isThreadChat() && id == threadMessageId) { + scrollToThreadMessage = true; + id = 1; + } if (progressDialog != null) { progressDialog.dismiss(); } - updatePinnedListButton(false); + showPinnedProgress(forceNextPinnedMessageId != 0); + if (inCaseLoading != null) { inCaseLoading.run(); - } else { - resetProgressDialogLoading(); + } else if (forceNextPinnedMessageId == 0) { progressDialog = new AlertDialog(getParentActivity(), AlertDialog.ALERT_TYPE_SPINNER, themeDelegate); + progressDialog.setOnShowListener(dialogInterface -> showPinnedProgress(false)); progressDialog.setOnCancelListener(postponedScrollCancelListener); - progressDialog.showDelayed(1000); + progressDialog.showDelayed(400); } - postponedScrollToLastMessageQueryIndex = lastLoadIndex; - postponedScrollMessageId = 0; - postponedScrollIsCanceled = false; waitingForLoad.clear(); - + removeSelectedMessageHighlight(); + scrollToMessagePosition = -10000; + startLoadFromMessageId = id; + showScrollToMessageError = !forceScroll; + if (id == createUnreadMessageAfterId) { + createUnreadMessageAfterIdLoading = true; + } + postponedScrollIsCanceled = false; waitingForLoad.add(lastLoadIndex); + postponedScrollToLastMessageQueryIndex = lastLoadIndex; + postponedScrollMinMessageId = minMessageId[0]; + postponedScrollMessageId = id; AndroidUtilities.runOnUIThread(() -> { - getMessagesController().loadMessages(dialog_id, mergeDialogId, false, 30, 0, 0, true, 0, classGuid, 0, 0, chatMode, threadMessageId, replyMaxReadId, lastLoadIndex++, isTopic); + getMessagesController().loadMessages(loadIndex == 0 ? dialog_id : mergeDialogId, 0, false, ((isThreadChat() && !isTopic) || AndroidUtilities.isTablet()) ? 30 : 20, startLoadFromMessageId, 0, true, 0, classGuid, 3, 0, chatMode, threadMessageId, replyMaxReadId, lastLoadIndex++, isTopic); }, SCROLL_DEBUG_DELAY ? 7500 : 0); + } else { + View child = chatListView.getChildAt(0); + if (child != null && child.getTop() <= 0) { + showFloatingDateView(false); + } } + returnToMessageId = fromMessageId; + if (NekoConfig.rememberAllBackMessages.Bool() && fromMessageId > 0) + returnToMessageIdsStack.push(returnToMessageId); + returnToLoadIndex = loadIndex; + needSelectFromMessageId = select; } - public void updateTextureViewPosition(boolean needVisibleUpdate, boolean needScroll) { - if (fragmentView == null || paused) { - return; - } - boolean foundTextureViewMessage = false; - int count = chatListView.getChildCount(); - for (int a = 0; a < count; a++) { - View view = chatListView.getChildAt(a); - if (view instanceof ChatMessageCell) { - ChatMessageCell messageCell = (ChatMessageCell) view; - MessageObject messageObject = messageCell.getMessageObject(); - if (videoPlayerContainer != null && (messageObject.isRoundVideo() || messageObject.isVideo()) && !messageObject.isVoiceTranscriptionOpen() && MediaController.getInstance().isPlayingMessage(messageObject)) { - ImageReceiver imageReceiver = messageCell.getPhotoImage(); - videoPlayerContainer.setTranslationX(imageReceiver.getImageX() + messageCell.getX()); - float translationY = messageCell.getY() + imageReceiver.getImageY() + chatListView.getY() - videoPlayerContainer.getTop(); - videoPlayerContainer.setTranslationY(translationY); - FrameLayout.LayoutParams layoutParams = (FrameLayout.LayoutParams) videoPlayerContainer.getLayoutParams(); - if (messageObject.isRoundVideo()) { - videoPlayerContainer.setTag(R.id.parent_tag, null); - if (layoutParams.width != AndroidUtilities.roundPlayingMessageSize || layoutParams.height != AndroidUtilities.roundPlayingMessageSize) { - layoutParams.width = layoutParams.height = AndroidUtilities.roundPlayingMessageSize; - aspectRatioFrameLayout.setResizeMode(AspectRatioFrameLayout.RESIZE_MODE_FIT); - videoPlayerContainer.setLayoutParams(layoutParams); - } - float scale = (AndroidUtilities.roundPlayingMessageSize + AndroidUtilities.roundMessageInset * 2) / (float) AndroidUtilities.roundPlayingMessageSize; - float transitionScale = messageCell.getPhotoImage().getImageWidth() / AndroidUtilities.roundPlayingMessageSize; - if (videoPlayerContainer.getScaleX() != transitionScale) { - videoPlayerContainer.invalidate(); - fragmentView.invalidate(); - } - videoPlayerContainer.setPivotX(0); - videoPlayerContainer.setPivotY(0); - videoPlayerContainer.setScaleX(transitionScale); - videoPlayerContainer.setScaleY(transitionScale); - videoTextureView.setScaleX(scale); - videoTextureView.setScaleY(scale); - } else { - videoPlayerContainer.setTag(R.id.parent_tag, imageReceiver); - if (layoutParams.width != imageReceiver.getImageWidth() || layoutParams.height != imageReceiver.getImageHeight()) { - aspectRatioFrameLayout.setResizeMode(AspectRatioFrameLayout.RESIZE_MODE_FILL); - layoutParams.width = (int) imageReceiver.getImageWidth(); - layoutParams.height = (int) imageReceiver.getImageHeight(); - videoPlayerContainer.setLayoutParams(layoutParams); - } - videoTextureView.setScaleX(1f); - videoTextureView.setScaleY(1f); - } - fragmentView.invalidate(); - videoPlayerContainer.invalidate(); - foundTextureViewMessage = true; - break; - } + private void showPinnedProgress(boolean show) { + if (show) { + if (updatePinnedProgressRunnable == null) { + updatePinnedProgressRunnable = () -> { + pinnedProgressIsShowing = true; + updatePinnedListButton(true); + }; + AndroidUtilities.runOnUIThread(updatePinnedProgressRunnable, 100); } - } - if (needVisibleUpdate && videoPlayerContainer != null) { - MessageObject messageObject = MediaController.getInstance().getPlayingMessageObject(); - if (messageObject != null && messageObject.eventId == 0) { - if (!foundTextureViewMessage) { - if (checkTextureViewPosition && messageObject.isVideo()) { - MediaController.getInstance().cleanupPlayer(true, true); - } else { - videoPlayerContainer.setTranslationY(-AndroidUtilities.roundPlayingMessageSize - 100); - fragmentView.invalidate(); - if (messageObject.isRoundVideo() || messageObject.isVideo()) { - if (checkTextureViewPosition || PipRoundVideoView.getInstance() != null) { - MediaController.getInstance().setCurrentVideoVisible(false); - } else if (needScroll) { - scrollToMessageId(messageObject.getId(), 0, false, 0, true, 0); - } - } - } - } else { - MediaController.getInstance().setCurrentVideoVisible(true); - if (messageObject.isRoundVideo() || scrollToVideo) { - // scrollToMessageId(messageObject.getId(), 0, false, 0, true, 0); - } else { - chatListView.invalidate(); - } - } + } else { + if (updatePinnedProgressRunnable != null) { + AndroidUtilities.cancelRunOnUIThread(updatePinnedProgressRunnable); } + updatePinnedProgressRunnable = null; + pinnedProgressIsShowing = false; + updatePinnedListButton(true); } - } - - public void invalidateMessagesVisiblePart() { - invalidateMessagesVisiblePart = true; - if (fragmentView != null) { - fragmentView.invalidate(); - } - } - - private Integer findClosest(ArrayList arrayList, int target, int[] index) { - if (arrayList.isEmpty()) { - return 0; - } - Integer val = arrayList.get(0); - if (target >= val) { - index[0] = 0; - return val; - } - int n = arrayList.size(); - val = arrayList.get(n - 1); - if (target <= val) { - index[0] = n - 1; - return val; - } - - int i = 0, j = n, mid = 0; - while (i < j) { - mid = (i + j) / 2; + } - val = arrayList.get(mid); - if (val == target) { - index[0] = mid; - return val; + private void updatePagedownButtonVisibility(boolean animated) { + if (pagedownButton == null) { + return; + } + boolean show = canShowPagedownButton && !textSelectionHelper.isSelectionMode() && !chatActivityEnterView.isRecordingAudioVideo(); + if (show) { + if (animated && (openAnimationStartTime == 0 || SystemClock.elapsedRealtime() < openAnimationStartTime + 150)) { + animated = false; } - if (target < val) { - if (mid > 0) { - Integer val2 = arrayList.get(mid - 1); - if (target > val2) { - index[0] = mid - 1; - return val2; - } + pagedownButtonShowedByScroll = false; + if (pagedownButton.getTag() == null) { + if (pagedownButtonAnimation != null) { + pagedownButtonAnimation.removeAllListeners(); + pagedownButtonAnimation.cancel(); + pagedownButtonAnimation = null; } - i = mid + 1; - } else { - if (mid > 0) { - Integer val2 = arrayList.get(mid - 1); - if (target < val2) { - index[0] = mid; - return val; - } + pagedownButton.setTag(1); + if (animated) { + pagedownButton.setVisibility(View.VISIBLE); + pagedownButtonAnimation = ValueAnimator.ofFloat(pagedownButtonEnterProgress, 1f); + pagedownButtonAnimation.addUpdateListener(valueAnimator -> { + pagedownButtonEnterProgress = (float) valueAnimator.getAnimatedValue(); + contentView.invalidate(); + }); + pagedownButtonAnimation.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + pagedownButtonEnterProgress = 1f; + contentView.invalidate(); + } + }); + pagedownButtonAnimation.setDuration(200); + pagedownButtonAnimation.start(); + } else { + pagedownButtonEnterProgress = 1f; + contentView.invalidate(); + pagedownButton.setVisibility(View.VISIBLE); + } + } + } else { + returnToMessageId = 0; + returnToMessageIdsStack.clear(); + newUnreadMessageCount = 0; + if (pagedownButton.getTag() != null) { + pagedownButton.setTag(null); + if (pagedownButtonAnimation != null) { + pagedownButtonAnimation.removeAllListeners(); + pagedownButtonAnimation.cancel(); + pagedownButtonAnimation = null; + } + if (animated) { + pagedownButton.setVisibility(View.VISIBLE); + pagedownButtonAnimation = ValueAnimator.ofFloat(pagedownButtonEnterProgress, 0); + pagedownButtonAnimation.addUpdateListener(valueAnimator -> { + pagedownButtonEnterProgress = (float) valueAnimator.getAnimatedValue(); + contentView.invalidate(); + }); + pagedownButtonAnimation.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + pagedownButtonEnterProgress = 0; + pagedownButton.setVisibility(View.INVISIBLE); + contentView.invalidate(); + } + }); + pagedownButtonAnimation.setDuration(200); + pagedownButtonAnimation.start(); + } else { + pagedownButtonEnterProgress = 0; + pagedownButton.setVisibility(View.INVISIBLE); } - j = mid; } } - index[0] = mid; - return arrayList.get(mid); } - public void updateMessagesVisiblePart(boolean inLayout) { - if (chatListView == null) { + private void showMentionDownButton(boolean show, boolean animated) { + if (mentiondownButton == null) { return; } - int count = chatListView.getChildCount(); - int height = chatListView.getMeasuredHeight(); - int minPositionHolder = Integer.MAX_VALUE; - int minPositionDateHolder = Integer.MAX_VALUE; - View minDateChild = null; - View minChild = null; - View minMessageChild = null; - boolean foundTextureViewMessage = false; - boolean previousThreadMessageVisible = threadMessageVisible; - int previousPinnedMessageId = currentPinnedMessageId; - int maxVisibleId = Integer.MIN_VALUE; - MessageObject maxVisibleMessageObject = null; - threadMessageVisible = firstLoading; - - Integer currentReadMaxId = null; - int threadId = threadMessageId; - if (threadId != 0 && currentChat != null) { - currentReadMaxId = replyMaxReadId; - } else { - currentReadMaxId = getMessagesController().dialogs_read_inbox_max.get(dialog_id_Long); - } - if (currentReadMaxId == null) { - currentReadMaxId = 0; - } - int maxPositiveUnreadId = Integer.MIN_VALUE; - int maxNegativeUnreadId = Integer.MAX_VALUE; - int maxUnreadDate = Integer.MIN_VALUE; - int recyclerChatViewHeight = (contentView.getHeightWithKeyboard() - (inPreviewMode ? 0 : AndroidUtilities.dp(48)) - chatListView.getTop()); - pollsToCheck.clear(); - float clipTop = chatListViewPaddingTop; - long currentTime = System.currentTimeMillis(); - int maxAdapterPosition = -1; - int minAdapterPosition = -1; - - boolean blurEnabled = SharedConfig.chatBlurEnabled() && Color.alpha(Theme.getColor(Theme.key_chat_BlurAlpha)) != 255; - - MessageObject messageStarter = isTopic ? topicStarterMessageObject : threadMessageObject; - - for (int a = 0; a < count; a++) { - View view = chatListView.getChildAt(a); - MessageObject messageObject = null; - int adapterPosition = chatListView.getChildAdapterPosition(view); - if (adapterPosition >= 0) { - if (adapterPosition > maxAdapterPosition || maxAdapterPosition == -1) { - maxAdapterPosition = adapterPosition; + if (show) { + if (mentiondownButton.getTag() == null) { + if (mentiondownButtonAnimation != null) { + mentiondownButtonAnimation.removeAllListeners(); + mentiondownButtonAnimation.cancel(); + mentiondownButtonAnimation = null; } - if (adapterPosition < minAdapterPosition || minAdapterPosition == -1) { - minAdapterPosition = adapterPosition; + if (animated) { + mentiondownButton.setVisibility(View.VISIBLE); + mentiondownButton.setTag(1); + mentiondownButtonAnimation = ValueAnimator.ofFloat(mentionsButtonEnterProgress, 1f); + mentiondownButtonAnimation.addUpdateListener(valueAnimator -> { + mentionsButtonEnterProgress = (float) valueAnimator.getAnimatedValue(); + contentView.invalidate(); + }); + mentiondownButtonAnimation.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + mentionsButtonEnterProgress = 1f; + contentView.invalidate(); + } + }); + mentiondownButtonAnimation.setDuration(200); + mentiondownButtonAnimation.start(); + } else { + mentionsButtonEnterProgress = 1f; + contentView.invalidate(); } } - int top = (int) view.getY(); - int bottom = top + view.getMeasuredHeight(); - ChatMessageCell messageCell = null; - if (view instanceof ChatMessageCell) { - messageCell = (ChatMessageCell) view; - } - if (messageCell != null) { - messageCell.isBlurred = (top < clipTop && bottom > clipTop) || (bottom > chatListView.getMeasuredHeight() - blurredViewBottomOffset && top < chatListView.getMeasuredHeight() - blurredViewBottomOffset); - } - - if (bottom <= clipTop - chatListViewPaddingVisibleOffset || top > chatListView.getMeasuredHeight() - blurredViewBottomOffset) { - if (messageCell != null) { - if (!blurEnabled) { - messageCell.setVisibleOnScreen(false); - } else { - messageCell.setVisibleOnScreen(true); - } + } else { + returnToMessageId = 0; + returnToMessageIdsStack.clear(); + if (mentiondownButton.getTag() != null) { + mentiondownButton.setTag(null); + if (mentiondownButtonAnimation != null) { + mentiondownButtonAnimation.removeAllListeners(); + mentiondownButtonAnimation.cancel(); + mentiondownButtonAnimation = null; + } + if (animated) { + mentiondownButtonAnimation = ValueAnimator.ofFloat(mentionsButtonEnterProgress, 0f); + mentiondownButtonAnimation.addUpdateListener(valueAnimator -> { + mentionsButtonEnterProgress = (float) valueAnimator.getAnimatedValue(); + contentView.invalidate(); + }); + mentiondownButtonAnimation.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + mentionsButtonEnterProgress = 0f; + mentiondownButton.setVisibility(View.INVISIBLE); + contentView.invalidate(); + } + }); + mentiondownButtonAnimation.setDuration(200); + mentiondownButtonAnimation.start(); + } else { + mentionsButtonEnterProgress = 0f; + mentiondownButton.setVisibility(View.INVISIBLE); } - continue; - } - if (messageCell != null) { - messageCell.setVisibleOnScreen(true); - } - - int viewTop = top >= 0 ? 0 : -top; - int viewBottom = view.getMeasuredHeight(); - if (viewBottom > height) { - viewBottom = viewTop + height; - } - int keyboardOffset = contentView.getKeyboardHeight(); - if (keyboardOffset < AndroidUtilities.dp(20) && chatActivityEnterView.isPopupShowing() || chatActivityEnterView.panelAnimationInProgress()) { - keyboardOffset = chatActivityEnterView.getEmojiPadding(); } + } + } - if (messageCell != null) { - messageObject = messageCell.getMessageObject(); - if (messageObject.getDialogId() == dialog_id && messageObject.getId() > maxVisibleId) { - maxVisibleId = messageObject.getId(); - maxVisibleMessageObject = messageObject; - } + public class ChatActivityFragmentView extends SizeNotifierFrameLayout { - messageCell.setParentBounds(chatListViewPaddingTop - chatListViewPaddingVisibleOffset - AndroidUtilities.dp(4), chatListView.getMeasuredHeight() - blurredViewBottomOffset); - messageCell.setVisiblePart(viewTop, viewBottom - viewTop, recyclerChatViewHeight, keyboardOffset, view.getY() + (isKeyboardVisible() ? chatListView.getTop() : actionBar.getMeasuredHeight()) - contentView.getBackgroundTranslationY(), contentView.getMeasuredWidth(), contentView.getBackgroundSizeY(), blurredViewTopOffset, blurredViewBottomOffset); - markSponsoredAsRead(messageObject); - if (!threadMessageVisible && messageStarter != null && (messageObject == messageStarter || isTopic && messageObject != null && messageObject.getId() == messageStarter.getId()) && messageCell.getBottom() > chatListViewPaddingTop) { - threadMessageVisible = true; - } - if (videoPlayerContainer != null && (messageObject.isVideo() || messageObject.isRoundVideo()) && !messageObject.isVoiceTranscriptionOpen() && MediaController.getInstance().isPlayingMessage(messageObject)) { - ImageReceiver imageReceiver = messageCell.getPhotoImage(); - if (top + imageReceiver.getImageY2() < 0) { - foundTextureViewMessage = false; - } else { - videoPlayerContainer.setTranslationX(imageReceiver.getImageX() + messageCell.getX()); + public ChatActivityFragmentView(Context context, INavigationLayout parentLayout) { + super(context, parentLayout); + adjustPanLayoutHelper = new AdjustPanLayoutHelper(this) { - float translationY = messageCell.getY() + imageReceiver.getImageY() + chatListView.getY() - videoPlayerContainer.getTop(); - videoPlayerContainer.setTranslationY(translationY); - fragmentView.invalidate(); - videoPlayerContainer.invalidate(); - foundTextureViewMessage = true; + @Override + protected void onTransitionStart(boolean keyboardVisible, int contentHeight) { + wasManualScroll = true; + if (chatActivityEnterView != null) { + chatActivityEnterView.onAdjustPanTransitionStart(keyboardVisible, contentHeight); + } + if (mentionContainer != null) { + mentionContainer.onPanTransitionStart(); + } + if (mediaBanTooltip != null) { + mediaBanTooltip.hide(false); } } - if (startFromVideoTimestamp >= 0 && fragmentOpened && !chatListView.isFastScrollAnimationRunning() && startFromVideoMessageId == messageObject.getId() && (messageObject.isVideo() || messageObject.isRoundVideo() || messageObject.isVoice() || messageObject.isMusic())) { - messageObject.forceSeekTo = startFromVideoTimestamp / (float) messageObject.getDuration(); - MessageObject finalMessage = messageObject; - AndroidUtilities.runOnUIThread(() -> { - if (finalMessage.isVideo()) { - openPhotoViewerForMessage(null, finalMessage); - } else { - MediaController.getInstance().playMessage(finalMessage); - } - }, 40); - startFromVideoTimestamp = -1; + + @Override + protected void onTransitionEnd() { + if (chatActivityEnterView != null) { + chatActivityEnterView.onAdjustPanTransitionEnd(); + } + if (mentionContainer != null) { + mentionContainer.onPanTransitionEnd(); + } + if (voiceHintTextView != null && voiceHintTextView.getVisibility() == View.VISIBLE) { + voiceHintTextView.showForView(chatActivityEnterView.getAudioVideoButtonContainer(), false); + } } - if (fragmentOpened && openAnimationEnded && (chatListItemAnimator == null || !chatListItemAnimator.isRunning()) && messageCell.checkUnreadReactions(clipTop, chatListView.getMeasuredHeight() - blurredViewBottomOffset)) { - reactionsMentionCount--; - getMessagesStorage().markMessageReactionsAsRead(getDialogId(), getTopicId(), messageCell.getMessageObject().getId(), true); - if (reactionsMentionCount <= 0) { - getMessagesController().markReactionsAsRead(dialog_id, getTopicId()); + @Override + protected void onPanTranslationUpdate(float y, float progress, boolean keyboardVisible) { + if (getParentLayout() != null && getParentLayout().isPreviewOpenAnimationInProgress()) { + return; } - if (reactionsMentionCount >= 0) { - TLRPC.MessagePeerReaction reaction = messageCell.getMessageObject().getRandomUnreadReaction(); - if (reaction != null) { - ReactionsLayoutInBubble.VisibleReaction visibleReaction = ReactionsLayoutInBubble.VisibleReaction.fromTLReaction(reaction.reaction); - ReactionsEffectOverlay.show(ChatActivity.this, null, messageCell, null, 0, 0, visibleReaction, currentAccount, reaction.big ? ReactionsEffectOverlay.LONG_ANIMATION : ReactionsEffectOverlay.SHORT_ANIMATION); - ReactionsEffectOverlay.startAnimation(); - } - messageCell.markReactionsAsRead(); + contentPanTranslation = y; + if (chatAttachAlert != null && chatAttachAlert.isShowing()) { + setNonNoveTranslation(y); } else { - reactionsMentionCount = 0; + actionBar.setTranslationY(y); + if (emptyViewContainer != null) { + emptyViewContainer.setTranslationY(y / 2); + } + progressView.setTranslationY(y / 2); + contentView.setBackgroundTranslation((int) y); + if (instantCameraView != null) { + instantCameraView.onPanTranslationUpdate(y); + } + if (blurredView != null) { + blurredView.drawable.onPanTranslationUpdate(y); + } + setFragmentPanTranslationOffset((int) y); + invalidateChatListViewTopPadding(); + invalidateMessagesVisiblePart(); } - updateReactionsMentionButton(true); - } - getDownloadController().checkUnviewedDownloads(messageCell.getId(), dialog_id); - boolean allowPlayEffect = ((messageObject.messageOwner.media != null && !messageObject.messageOwner.media.nopremium) || (messageObject.isAnimatedEmojiStickerSingle() && dialog_id > 0)); - if ((chatListItemAnimator == null || !chatListItemAnimator.isRunning()) && (!messageObject.isOutOwner() || messageObject.forcePlayEffect) && allowPlayEffect && !messageObject.messageOwner.premiumEffectWasPlayed && (messageObject.isPremiumSticker() || messageObject.isAnimatedEmojiStickerSingle()) && emojiAnimationsOverlay.isIdle() && emojiAnimationsOverlay.checkPosition(messageCell, chatListViewPaddingTop, chatListView.getMeasuredHeight() - blurredViewBottomOffset)) { - emojiAnimationsOverlay.onTapItem(messageCell, ChatActivity.this, false); - } else if (messageObject.isAnimatedAnimatedEmoji()) { - emojiAnimationsOverlay.preloadAnimation(messageCell); - } - } else if (view instanceof ChatActionCell) { - ChatActionCell cell = (ChatActionCell) view; - messageObject = cell.getMessageObject(); - if (messageObject != null && messageObject.getDialogId() == dialog_id && messageObject.getId() > maxVisibleId) { - maxVisibleId = Math.max(maxVisibleId, messageObject.getId()); - } - cell.setVisiblePart(view.getY() + (isKeyboardVisible() ? chatListView.getTop() : actionBar.getMeasuredHeight()) - contentView.getBackgroundTranslationY(), contentView.getBackgroundSizeY()); - } else if (view instanceof BotHelpCell) { - view.invalidate(); - } - if (chatMode != MODE_SCHEDULED && messageObject != null) { - int id = messageObject.getId(); - if (!isThreadChat() && (!messageObject.isOut() && messageObject.isUnread() || messageObject.messageOwner.from_scheduled && id > currentReadMaxId) || id > 0 && isThreadChat() && id > currentReadMaxId && id > replyMaxReadId) { - if (id > 0) { - maxPositiveUnreadId = Math.max(maxPositiveUnreadId, messageObject.getId()); + chatListView.invalidate(); + updateBulletinLayout(); + if (chatActivityEnterView != null) { + chatActivityEnterView.onAdjustPanTransitionUpdate(y, progress, keyboardVisible); } - if (id < 0 && !isThreadChat()) { - maxNegativeUnreadId = Math.min(maxNegativeUnreadId, messageObject.getId()); + if (mentionContainer != null) { + mentionContainer.onPanTransitionUpdate(y); + } + if (AndroidUtilities.isTablet() && getParentActivity() instanceof LaunchActivity) { + BaseFragment mainFragment = ((LaunchActivity)getParentActivity()).getActionBarLayout().getLastFragment(); + if (mainFragment instanceof DialogsActivity) { + ((DialogsActivity)mainFragment).setPanTranslationOffset(y); + } + } + if (voiceHintTextView != null && voiceHintTextView.getVisibility() == View.VISIBLE) { + voiceHintTextView.showForView(chatActivityEnterView.getAudioVideoButtonContainer(), false); + } + if (fragmentContextView != null) { + fragmentContextView.onPanTranslationUpdate(y); } - maxUnreadDate = Math.max(maxUnreadDate, messageObject.messageOwner.date); - } - if (messageObject.type == MessageObject.TYPE_POLL && messageObject.getId() > 0) { - pollsToCheck.add(messageObject); - } - } - if (bottom <= clipTop) { - if (view instanceof ChatActionCell && messageObject.isDateObject) { - view.setAlpha(0); - } - continue; - } - int position = view.getBottom(); - if (position < minPositionHolder) { - minPositionHolder = position; - if (view instanceof ChatMessageCell || view instanceof ChatActionCell) { - minMessageChild = view; } - minChild = view; - } - if (chatListItemAnimator == null || (!chatListItemAnimator.willRemoved(view) && !chatListItemAnimator.willAddedFromAlpha(view))) { - if (view instanceof ChatActionCell && messageObject.isDateObject) { - if (view.getAlpha() != 1.0f) { - view.setAlpha(1.0f); + + @Override + protected boolean heightAnimationEnabled() { + INavigationLayout actionBarLayout = getParentLayout(); + if (inPreviewMode || inBubbleMode || AndroidUtilities.isInMultiwindow || actionBarLayout == null || fixedKeyboardHeight > 0) { + return false; } - if (position < minPositionDateHolder) { - minPositionDateHolder = position; - minDateChild = view; + if (System.currentTimeMillis() - activityResumeTime < 250) { + return false; + } + if ((ChatActivity.this == actionBarLayout.getLastFragment() && actionBarLayout.isTransitionAnimationInProgress()) || actionBarLayout.isPreviewOpenAnimationInProgress() || isPaused || !openAnimationEnded || (chatAttachAlert != null && chatAttachAlert.isShowing())) { + return false; + } + if (chatActivityEnterView != null && chatActivityEnterView.getTrendingStickersAlert() != null && chatActivityEnterView.getTrendingStickersAlert().isShowing()) { + return false; } + return true; } - } + + @Override + protected int startOffset() { + int keyboardSize = getKeyboardHeight(); + if (keyboardSize <= AndroidUtilities.dp(20) && chatActivityEnterView.isPopupShowing()) { + return chatActivityEnterView.getEmojiPadding(); + } + return 0; + } + }; } - currentPinnedMessageId = 0; - if (!pinnedMessageIds.isEmpty()) { - if (maxVisibleId == Integer.MIN_VALUE) { - if (startLoadFromMessageId != 0) { - maxVisibleId = startLoadFromMessageId; - } else if (!pinnedMessageIds.isEmpty()) { - maxVisibleId = pinnedMessageIds.get(0) + 1; + + int inputFieldHeight = 0; + int lastHeight; + + int lastWidth; + + ArrayList drawTimeAfter = new ArrayList<>(); + ArrayList drawNamesAfter = new ArrayList<>(); + ArrayList drawCaptionAfter = new ArrayList<>(); + + Paint backgroundPaint; + int backgroundColor; + + @Override + protected void drawList(Canvas blurCanvas, boolean top) { + float cilpTop = chatListViewPaddingTop - chatListViewPaddingVisibleOffset - AndroidUtilities.dp(4); + + for (int i = 0; i < chatListView.getChildCount(); i++) { + View child = chatListView.getChildAt(i); + if (top && child.getY() > cilpTop + AndroidUtilities.dp(40)) { + continue; } - } else if (maxVisibleId < 0) { - int idx = messages.indexOf(maxVisibleMessageObject); - if (idx >= 0) { - for (int a = idx - 1; a >= 0; a--) { - MessageObject object = messages.get(a); - if (object.getId() > 0) { - maxVisibleId = object.getId(); - break; - } + if (!top && child.getY() + child.getMeasuredHeight() < AndroidUtilities.dp(203)) { + continue; + } + + blurCanvas.save(); + if (top) { + blurCanvas.translate(chatListView.getX() + child.getX(), chatListView.getY() + child.getY() - contentPanTranslation); + } else { + blurCanvas.translate(chatListView.getX() + child.getX(), chatListView.getTop() + child.getY()); + } + if (child instanceof ChatMessageCell) { + ChatMessageCell cell = (ChatMessageCell) child; + cell.drawForBlur = true; + if (cell.drawBackgroundInParent()) { + cell.drawBackgroundInternal(blurCanvas, true); } - if (maxVisibleId < 0) { - for (int a = idx + 1, N = messages.size(); a < N; a++) { - MessageObject object = messages.get(a); - if (object.getId() > 0) { - maxVisibleId = object.getId(); - break; - } - } + child.draw(blurCanvas); + if (cell.hasOutboundsContent()) { + ((ChatMessageCell) child).drawOutboundsContent(blurCanvas); } + cell.drawForBlur = false; + } else if (child instanceof ChatActionCell) { + child.draw(blurCanvas); + ((ChatActionCell) child).drawOutboundsContent(blurCanvas); + } else { + child.draw(blurCanvas); } - } - currentPinnedMessageId = findClosest(pinnedMessageIds, forceNextPinnedMessageId != 0 ? forceNextPinnedMessageId : maxVisibleId, currentPinnedMessageIndex); - if (!inMenuMode && !loadingPinnedMessagesList && !pinnedEndReached && !pinnedMessageIds.isEmpty() && currentPinnedMessageIndex[0] > pinnedMessageIds.size() - 2) { - getMediaDataController().loadPinnedMessages(dialog_id, pinnedMessageIds.get(pinnedMessageIds.size() - 1), 0); - loadingPinnedMessagesList = true; + blurCanvas.restore(); } } - getMessagesController().addToPollsQueue(dialog_id, pollsToCheck); - if (maxAdapterPosition >= 0 && minAdapterPosition >= 0) { - int from = minAdapterPosition - chatAdapter.messagesStartRow - 10; - int to = maxAdapterPosition - chatAdapter.messagesStartRow + 10; - if (from < 0) { - from = 0; - } - if (to > messages.size()) { - to = messages.size(); + + @Override + protected int getScrollOffset() { + return chatListView.computeVerticalScrollOffset(); + } + + @Override + protected float getBottomOffset() { + return chatListView.getBottom(); + } + + @Override + protected float getListTranslationY() { + return chatListView.getTranslationY(); + } + + @Override + protected void onAttachedToWindow() { + super.onAttachedToWindow(); + adjustPanLayoutHelper.onAttach(); + chatActivityEnterView.setAdjustPanLayoutHelper(adjustPanLayoutHelper); + MessageObject messageObject = MediaController.getInstance().getPlayingMessageObject(); + if (messageObject != null && (messageObject.isRoundVideo() || messageObject.isVideo()) && messageObject.eventId == 0 && messageObject.getDialogId() == dialog_id) { + MediaController.getInstance().setTextureView(createTextureView(false), aspectRatioFrameLayout, videoPlayerContainer, true); } - reactionsToCheck.clear(); - extendedMediaToCheck.clear(); - for (int i = from; i < to; i++) { - MessageObject messageObject = messages.get(i); - if (threadMessageObject != messageObject && messageObject.getId() > 0 && messageObject.messageOwner.action == null && (currentTime - messageObject.reactionsLastCheckTime) > 15000L) { - messageObject.reactionsLastCheckTime = currentTime; - reactionsToCheck.add(messageObject); - } - if (threadMessageObject != messageObject && messageObject.getId() > 0 && messageObject.hasExtendedMediaPreview() && (currentTime - messageObject.extendedMediaLastCheckTime) > 30000L) { - messageObject.extendedMediaLastCheckTime = currentTime; - extendedMediaToCheck.add(messageObject); - } + if (pullingDownDrawable != null) { + pullingDownDrawable.onAttach(); } - getMessagesController().loadReactionsForMessages(dialog_id, reactionsToCheck); - getMessagesController().loadExtendedMediaForMessages(dialog_id, extendedMediaToCheck); + emojiAnimationsOverlay.onAttachedToWindow(); } - if (videoPlayerContainer != null) { - if (!foundTextureViewMessage) { - MessageObject messageObject = MediaController.getInstance().getPlayingMessageObject(); - if (messageObject != null) { - if (checkTextureViewPosition && messageObject.isVideo()) { - MediaController.getInstance().cleanupPlayer(true, true); - } else { - videoPlayerContainer.setTranslationY(-AndroidUtilities.roundPlayingMessageSize - 100); - fragmentView.invalidate(); - if ((messageObject.isRoundVideo() || messageObject.isVideo()) && messageObject.eventId == 0 && checkTextureViewPosition && !chatListView.isFastScrollAnimationRunning()) { - MediaController.getInstance().setCurrentVideoVisible(false); - } - } - } + + @Override + protected void onDetachedFromWindow() { + super.onDetachedFromWindow(); + adjustPanLayoutHelper.onDetach(); + if (pullingDownDrawable != null) { + pullingDownDrawable.onDetach(); + pullingDownDrawable = null; + } + emojiAnimationsOverlay.onDetachedFromWindow(); + AndroidUtilities.runOnUIThread(() -> { + ReactionsEffectOverlay.removeCurrent(true); + }); + } + + private float x, y; + private long pressTime; + + @Override + public boolean dispatchTouchEvent(MotionEvent ev) { + float expandY; + if (AndroidUtilities.isInMultiwindow || isInBubbleMode()) { + expandY = chatActivityEnterView.getEmojiView() != null ? chatActivityEnterView.getEmojiView().getY() : chatActivityEnterView.getY(); } else { - MediaController.getInstance().setCurrentVideoVisible(true); + expandY = chatActivityEnterView.getY(); } - } - if (minMessageChild != null) { - MessageObject messageObject; - if (minMessageChild instanceof ChatMessageCell) { - messageObject = ((ChatMessageCell) minMessageChild).getMessageObject(); + if ((scrimView != null && scrimView != actionBar.getBackButton()) || chatActivityEnterView != null && chatActivityEnterView.isStickersExpanded() && ev.getY() < expandY) { + return false; + } + + lastTouchY = ev.getY(); + TextSelectionHelper.TextSelectionOverlay selectionOverlay = textSelectionHelper.getOverlayView(getContext()); + ev.offsetLocation(-selectionOverlay.getX(), -selectionOverlay.getY()); + if (textSelectionHelper.isSelectionMode() && textSelectionHelper.getOverlayView(getContext()).onTouchEvent(ev)) { + return true; } else { - messageObject = ((ChatActionCell) minMessageChild).getMessageObject(); + ev.offsetLocation(selectionOverlay.getX(), selectionOverlay.getY()); } - floatingDateView.setCustomDate(messageObject.messageOwner.date, chatMode == MODE_SCHEDULED, true); - } - currentFloatingDateOnScreen = false; - currentFloatingTopIsNotMessage = !(minChild instanceof ChatMessageCell || minChild instanceof ChatActionCell); - if (minDateChild != null) { - boolean showFloatingView = false; - if (minDateChild.getY() > clipTop || currentFloatingTopIsNotMessage) { - if (minDateChild.getAlpha() != 1.0f) { - minDateChild.setAlpha(1.0f); - } - if (chatListView.getChildAdapterPosition(minDateChild) == chatAdapter.messagesStartRow + messages.size() - 1) { - if (minDateChild.getAlpha() != 1.0f) { - minDateChild.setAlpha(1.0f); - } - if (floatingDateAnimation != null) { - floatingDateAnimation.cancel(); - floatingDateAnimation = null; - } - floatingDateView.setTag(null); - floatingDateView.setAlpha(0); - currentFloatingDateOnScreen = false; + + if (selectionOverlay.checkOnTap(ev)) { + ev.setAction(MotionEvent.ACTION_CANCEL); + } + + if (ev.getAction() == MotionEvent.ACTION_DOWN && textSelectionHelper.isSelectionMode() && (ev.getY() < chatListView.getTop() || ev.getY() > chatListView.getBottom())) { + ev.offsetLocation(-selectionOverlay.getX(), -selectionOverlay.getY()); + if (textSelectionHelper.getOverlayView(getContext()).onTouchEvent(ev)) { + ev.offsetLocation(selectionOverlay.getX(), selectionOverlay.getY()); + return super.dispatchTouchEvent(ev); } else { - hideFloatingDateView(!currentFloatingTopIsNotMessage); - } - } else { - if (minDateChild.getAlpha() != 0.0f) { - minDateChild.setAlpha(0.0f); + return true; } - showFloatingView = true; } - float offset = minDateChild.getY() + minDateChild.getMeasuredHeight() - clipTop; - if (offset > floatingDateView.getMeasuredHeight() && offset < floatingDateView.getMeasuredHeight() * 2) { - if (chatListView.getChildAdapterPosition(minDateChild) == chatAdapter.messagesStartRow + messages.size() - 1) { - showFloatingView = false; - if (minDateChild.getAlpha() != 1.0f) { - minDateChild.setAlpha(1.0f); + + if (pinchToZoomHelper.isInOverlayMode()) { + return pinchToZoomHelper.onTouchEvent(ev); + } + + if (AvatarPreviewer.hasVisibleInstance()) { + AvatarPreviewer.getInstance().onTouchEvent(ev); + return true; + } + + boolean r = false; + if (isInPreviewMode() && allowExpandPreviewByClick) { + if (ev.getAction() == MotionEvent.ACTION_DOWN) { + boolean pressedOnPageDownButtons = false, pressedOnAvatar = false; + int[] off = new int[2]; + getLocationInWindow(off); + int[] pos = new int[2]; + if (pagedownButton != null) { + pagedownButton.getLocationInWindow(pos); + AndroidUtilities.rectTmp2.set(pos[0] - off[0], pos[1] - off[1], pos[0] - off[0] + pagedownButton.getMeasuredWidth(), pos[1] - off[1] + pagedownButton.getMeasuredHeight()); + if (AndroidUtilities.rectTmp2.contains((int) ev.getX(), (int) ev.getY())) { + pressedOnPageDownButtons = true; + } + } + if (avatarContainer != null && avatarContainer.getAvatarImageView() != null) { + BackupImageView avatar = avatarContainer.getAvatarImageView(); + avatar.getLocationInWindow(pos); + AndroidUtilities.rectTmp2.set(pos[0] - off[0], pos[1] - off[1], pos[0] - off[0] + avatar.getMeasuredWidth(), pos[1] - off[1] + avatar.getMeasuredHeight()); + if (AndroidUtilities.rectTmp2.contains((int) ev.getX(), (int) ev.getY())) { + pressedOnAvatar = true; + } + } + if (!pressedOnPageDownButtons && mentiondownButton != null) { + mentiondownButton.getLocationInWindow(pos); + AndroidUtilities.rectTmp2.set(pos[0] - off[0], pos[1] - off[1], pos[0] - off[0] + mentiondownButton.getMeasuredWidth(), pos[1] - off[1] + mentiondownButton.getMeasuredHeight()); + if (AndroidUtilities.rectTmp2.contains((int) ev.getX(), (int) ev.getY())) { + pressedOnPageDownButtons = true; + } + } + if (!pressedOnPageDownButtons && !pressedOnAvatar) { + x = ev.getX(); + y = ev.getY(); + pressTime = SystemClock.elapsedRealtime(); + r = true; + } else { + pressTime = -1; } - if (floatingDateAnimation != null) { - floatingDateAnimation.cancel(); - floatingDateAnimation = null; + } else if (ev.getAction() == MotionEvent.ACTION_UP) { + if (MathUtils.distance(x, y, ev.getX(), ev.getY()) < AndroidUtilities.dp(6) && SystemClock.elapsedRealtime() - pressTime <= ViewConfiguration.getTapTimeout()) { + parentLayout.expandPreviewFragment(); + ev.setAction(MotionEvent.ACTION_CANCEL); } - floatingDateView.setTag(null); - floatingDateView.setAlpha(0); - } else { - floatingDateViewOffset = -floatingDateView.getMeasuredHeight() * 2 + offset; + pressTime = -1; + } else if (ev.getAction() == MotionEvent.ACTION_CANCEL) { + pressTime = -1; } - } else { - floatingDateViewOffset = 0; } - if (showFloatingView) { - if (floatingDateAnimation != null) { - floatingDateAnimation.cancel(); - floatingDateAnimation = null; - } - if (floatingDateView.getTag() == null) { - floatingDateView.setTag(1); - } - if (floatingDateView.getAlpha() != 1.0f) { - floatingDateView.setAlpha(1.0f); - } - currentFloatingDateOnScreen = true; + + return super.dispatchTouchEvent(ev) || r; + } + + @Override + protected void onDraw(Canvas canvas) { + if (getTag(BlurBehindDrawable.TAG_DRAWING_AS_BACKGROUND) != null) { + return; } - } else { - hideFloatingDateView(true); - floatingDateViewOffset = 0; + if (getTag(BlurBehindDrawable.TAG_DRAWING_AS_BACKGROUND) == null && (instantCameraView != null && instantCameraView.blurFullyDrawing() || (blurredView != null && blurredView.fullyDrawing() && blurredView.getTag() != null))) { + return; + } + super.onDraw(canvas); } - if (isThreadChat()) { - if (previousThreadMessageVisible != threadMessageVisible) { - updatePinnedMessageView(openAnimationStartTime != 0 && SystemClock.elapsedRealtime() >= openAnimationStartTime + 150); + + @Override + protected boolean drawChild(Canvas canvas, View child, long drawingTime) { + if ((scrimView != null || messageEnterTransitionContainer.isRunning()) && (child == pagedownButton || child == mentiondownButton || child == floatingDateView || child == fireworksOverlay || child == reactionsMentiondownButton || child == gifHintTextView || child == emojiHintTextView || child == undoView || child == topUndoView)) { + return false; } - } else { - if (currentPinnedMessageId != 0) { - MessageObject object = pinnedMessageObjects.get(currentPinnedMessageId); - if (object == null) { - object = messagesDict[0].get(currentPinnedMessageId); - } - if (object == null) { - if (loadingPinnedMessages.indexOfKey(currentPinnedMessageId) < 0) { - loadingPinnedMessages.put(currentPinnedMessageId, true); - ArrayList ids = new ArrayList<>(); - ids.add(currentPinnedMessageId); - getMediaDataController().loadPinnedMessages(dialog_id, ChatObject.isChannel(currentChat) ? currentChat.id : 0, ids, true); - } - currentPinnedMessageId = previousPinnedMessageId; - } - } else if (previousPinnedMessageId != 0 && !pinnedMessageIds.isEmpty()) { - currentPinnedMessageId = previousPinnedMessageId; + if (child == fragmentContextView && fragmentContextView.isCallStyle()) { + return true; } - boolean animated = (fromPullingDownTransition && fragmentView.getVisibility() == View.VISIBLE) || (openAnimationStartTime != 0 && SystemClock.elapsedRealtime() >= openAnimationStartTime + 150); - if (previousPinnedMessageId != currentPinnedMessageId) { - int animateToNext; - if (previousPinnedMessageId == 0) { - animateToNext = 0; - } else if (previousPinnedMessageId > currentPinnedMessageId) { - animateToNext = 1; + if (child == undoView && PhotoViewer.getInstance().isVisible()) { + return true; + } + if (toPullingDownTransition && child == chatListView) { + return true; + } + if (switchingFromTopics && child == actionBar) { + return true; + } + if (getTag(BlurBehindDrawable.TAG_DRAWING_AS_BACKGROUND) != null) { + boolean needBlur; + if (((int) getTag(BlurBehindDrawable.TAG_DRAWING_AS_BACKGROUND)) == BlurBehindDrawable.STATIC_CONTENT) { + needBlur = child == actionBar || child == fragmentContextView || child == pinnedMessageView; } else { - animateToNext = 2; + needBlur = child == chatListView || child == chatActivityEnterView || chatActivityEnterView.isPopupView(child); + } + if (!needBlur) { + return false; + } + } else if (getTag(BlurBehindDrawable.TAG_DRAWING_AS_BACKGROUND) == null && (instantCameraView != null && instantCameraView.blurFullyDrawing() || (blurredView != null && blurredView.fullyDrawing() && blurredView.getTag() != null))) { + boolean needBlur = child == actionBar || child == chatListView || child == pinnedMessageView || child == fragmentContextView; + if (needBlur) { + return false; } - - updatePinnedMessageView(animated, animateToNext); - } else { - updatePinnedListButton(animated); } - } - if (floatingDateView != null) { - floatingDateView.setTranslationY(chatListView.getTranslationY() + chatListViewPaddingTop + floatingDateViewOffset - AndroidUtilities.dp(4)); - } - invalidateChatListViewTopPadding(); - if (!firstLoading && !paused && !inPreviewMode && chatMode == 0 && !getMessagesController().ignoreSetOnline) { - int scheduledRead = 0; - if ((maxPositiveUnreadId != Integer.MIN_VALUE || maxNegativeUnreadId != Integer.MAX_VALUE)) { - int counterDecrement = 0; - for (int a = 0; a < messages.size(); a++) { - MessageObject messageObject = messages.get(a); - int id = messageObject.getId(); - if (maxPositiveUnreadId != Integer.MIN_VALUE) { - if (id > 0 && id <= maxPositiveUnreadId && (messageObject.messageOwner.from_scheduled && id > currentReadMaxId || messageObject.isUnread() && !messageObject.isOut())) { - if (messageObject.messageOwner.from_scheduled) { - scheduledRead++; - } else { - messageObject.setIsRead(); - } - counterDecrement++; - } - } - if (maxNegativeUnreadId != Integer.MAX_VALUE) { - if (id < 0 && id >= maxNegativeUnreadId && messageObject.isUnread()) { - messageObject.setIsRead(); - counterDecrement++; - } + boolean result; + MessageObject messageObject = MediaController.getInstance().getPlayingMessageObject(); + boolean isRoundVideo = false; + boolean isVideo = messageObject != null && messageObject.eventId == 0 && ((isRoundVideo = messageObject.isRoundVideo()) || messageObject.isVideo()); + if (child == videoPlayerContainer) { + canvas.save(); + float transitionOffset = 0; + if (pullingDownAnimateProgress != 0) { + transitionOffset = (chatListView.getMeasuredHeight() - pullingDownOffset) * pullingDownAnimateProgress; + } + canvas.translate(0, -pullingDownOffset - transitionOffset); + if (messageObject != null && messageObject.type == MessageObject.TYPE_ROUND_VIDEO) { + if (Theme.chat_roundVideoShadow != null && aspectRatioFrameLayout.isDrawingReady()) { + int x = (int) child.getX() - AndroidUtilities.dp(3); + int y = (int) child.getY() - AndroidUtilities.dp(2); + canvas.save(); + canvas.scale(videoPlayerContainer.getScaleX(), videoPlayerContainer.getScaleY(), child.getX(), child.getY()); + Theme.chat_roundVideoShadow.setAlpha(255); + Theme.chat_roundVideoShadow.setBounds(x, y, x + AndroidUtilities.roundPlayingMessageSize + AndroidUtilities.dp(6), y + AndroidUtilities.roundPlayingMessageSize + AndroidUtilities.dp(6)); + Theme.chat_roundVideoShadow.draw(canvas); + canvas.restore(); } - } - if (forwardEndReached[0] && maxPositiveUnreadId == minMessageId[0] || maxNegativeUnreadId == minMessageId[0]) { - newUnreadMessageCount = 0; + result = super.drawChild(canvas, child, drawingTime); } else { - newUnreadMessageCount -= counterDecrement; - if (newUnreadMessageCount < 0) { - newUnreadMessageCount = 0; + if (child.getTag() == null) { + float oldTranslation = child.getTranslationY(); + child.setTranslationY(-AndroidUtilities.dp(1000)); + result = super.drawChild(canvas, child, drawingTime); + child.setTranslationY(oldTranslation); + } else { + result = false; } } - if (inLayout) { - AndroidUtilities.runOnUIThread(this::inlineUpdate1); - } else { - inlineUpdate1(); - } - getMessagesController().markDialogAsRead(dialog_id, maxPositiveUnreadId, maxNegativeUnreadId, maxUnreadDate, false, threadId, counterDecrement, maxPositiveUnreadId == minMessageId[0] || maxNegativeUnreadId == minMessageId[0], scheduledRead); - firstUnreadSent = true; - } else if (!firstUnreadSent && currentEncryptedChat == null) { - if (chatLayoutManager.findFirstVisibleItemPosition() == 0) { - newUnreadMessageCount = 0; - if (inLayout) { - AndroidUtilities.runOnUIThread(this::inlineUpdate2); - } else { - inlineUpdate2(); + canvas.restore(); + } else { + result = super.drawChild(canvas, child, drawingTime); + if (isVideo && child == chatListView && messageObject.type != MessageObject.TYPE_ROUND_VIDEO && videoPlayerContainer != null && videoPlayerContainer.getTag() != null) { + canvas.save(); + float transitionOffset = 0; + if (pullingDownAnimateProgress != 0) { + transitionOffset = (chatListView.getMeasuredHeight() - pullingDownOffset) * pullingDownAnimateProgress; } - getMessagesController().markDialogAsRead(dialog_id, minMessageId[0], minMessageId[0], maxDate[0], false, threadId, 0, true, scheduledRead); - if (isTopic) { - getMessagesStorage().updateRepliesMaxReadId(replyOriginalChat.id, replyOriginalMessageId, Math.max(maxPositiveUnreadId, replyMaxReadId), 0, true); + canvas.translate(0, -pullingDownOffset - transitionOffset + pullingBottomOffset); + super.drawChild(canvas, videoPlayerContainer, drawingTime); + if (drawLaterRoundProgressCell != null) { + canvas.save(); + canvas.translate(drawLaterRoundProgressCell.getX(), drawLaterRoundProgressCell.getTop() + chatListView.getY()); + if (isRoundVideo) { + drawLaterRoundProgressCell.drawRoundProgress(canvas); + invalidate(); + drawLaterRoundProgressCell.invalidate(); + } else { + drawLaterRoundProgressCell.drawOverlays(canvas); + if (drawLaterRoundProgressCell.needDrawTime()) { + drawLaterRoundProgressCell.drawTime(canvas, drawLaterRoundProgressCell.getAlpha(), true); + } + } + canvas.restore(); } - firstUnreadSent = true; + canvas.restore(); } } - if (threadId != 0 && maxPositiveUnreadId > 0 && replyMaxReadId != maxPositiveUnreadId) { - replyMaxReadId = maxPositiveUnreadId; - getMessagesStorage().updateRepliesMaxReadId(replyOriginalChat.id, replyOriginalMessageId, replyMaxReadId, newUnreadMessageCount, true); - if (!isTopic) { - getNotificationCenter().postNotificationName(NotificationCenter.commentsRead, replyOriginalChat.id, replyOriginalMessageId, replyMaxReadId); - } + if (child == actionBar && parentLayout != null) { + parentLayout.drawHeaderShadow(canvas, actionBar.getVisibility() == VISIBLE ? (int) actionBar.getTranslationY() + actionBar.getMeasuredHeight() + (inPreviewMode && Build.VERSION.SDK_INT >= 21 ? AndroidUtilities.statusBarHeight : 0) : 0); } + return result; } - } - - private void inlineUpdate1() { - if (prevSetUnreadCount != newUnreadMessageCount) { - prevSetUnreadCount = newUnreadMessageCount; - pagedownButtonCounter.setCount(newUnreadMessageCount, openAnimationEnded); - } - } - private void inlineUpdate2() { - if (prevSetUnreadCount != newUnreadMessageCount) { - prevSetUnreadCount = newUnreadMessageCount; - pagedownButtonCounter.setCount(newUnreadMessageCount, true); + @Override + protected boolean isActionBarVisible() { + return actionBar.getVisibility() == VISIBLE; + } + + private void drawChildElement(Canvas canvas, float listTop, ChatMessageCell cell, int type) { + canvas.save(); + float canvasOffsetX = chatListView.getLeft() + cell.getLeft(); + float canvasOffsetY = chatListView.getY() + cell.getY(); + float alpha = cell.shouldDrawAlphaLayer() ? cell.getAlpha() : 1f; + canvas.clipRect(chatListView.getLeft(), listTop, chatListView.getRight(), chatListView.getY() + chatListView.getMeasuredHeight() - blurredViewBottomOffset); + canvas.translate(canvasOffsetX, canvasOffsetY); + cell.setInvalidatesParent(true); + if (type == 0) { + cell.drawTime(canvas, alpha, true); + } else if (type == 1) { + cell.drawNamesLayout(canvas, alpha); + } else { + cell.drawCaptionLayout(canvas, cell.getCurrentPosition() != null && (cell.getCurrentPosition().flags & MessageObject.POSITION_FLAG_LEFT) == 0, alpha); + } + cell.setInvalidatesParent(false); + canvas.restore(); } - } - private void toggleMute(boolean instant) { - boolean muted = getMessagesController().isDialogMuted(dialog_id, getTopicId()); - if (!muted) { - if (instant) { - getNotificationsController().muteDialog(dialog_id, getTopicId(), true); - } else { - BottomSheet alert = AlertsCreator.createMuteAlert(this, dialog_id, getTopicId(), themeDelegate); - alert.setCalcMandatoryInsets(isKeyboardVisible()); - showDialog(alert); + @Override + protected void dispatchDraw(Canvas canvas) { + chatActivityEnterView.checkAnimation(); + updateChatListViewTopPadding(); + if (invalidateMessagesVisiblePart || (chatListItemAnimator != null && chatListItemAnimator.isRunning())) { + invalidateMessagesVisiblePart = false; + updateMessagesVisiblePart(false); } - } else { - getNotificationsController().muteDialog(dialog_id, getTopicId(), false); - if (!instant) { - BulletinFactory.createMuteBulletin(this, false, themeDelegate).show(); + updateTextureViewPosition(false, false); + updatePagedownButtonsPosition(); + int restoreToCount = -1; + if (switchingFromTopics) { + restoreToCount = canvas.saveLayerAlpha(0, actionBar.getBottom(), getMeasuredWidth(), getMeasuredHeight(), (int) (255 * switchingFromTopicsProgress), Canvas.ALL_SAVE_FLAG); + float s = 0.8f + 0.2f * switchingFromTopicsProgress; + canvas.scale(s, s, getMeasuredWidth() / 2f, getMeasuredHeight() / 2f); + } + super.dispatchDraw(canvas); + if (fragmentContextView != null && fragmentContextView.isCallStyle()) { + float alpha = (blurredView != null && blurredView.getVisibility() == View.VISIBLE) ? 1f - blurredView.getAlpha() : 1f; + if (alpha > 0) { + if (alpha == 1f) { + canvas.save(); + } else { + canvas.saveLayerAlpha(fragmentContextView.getX(), fragmentContextView.getY() - AndroidUtilities.dp(30), fragmentContextView.getX() + fragmentContextView.getMeasuredWidth(), fragmentContextView.getY() + fragmentContextView.getMeasuredHeight(), (int) (255 * alpha), Canvas.ALL_SAVE_FLAG); + } + canvas.translate(fragmentContextView.getX(), fragmentContextView.getY()); + fragmentContextView.setDrawOverlay(true); + fragmentContextView.draw(canvas); + fragmentContextView.setDrawOverlay(false); + canvas.restore(); + } + fragmentView.invalidate(); } - } - } + if (chatActivityEnterView != null) { + if (chatActivityEnterView.panelAnimationInProgress() && chatActivityEnterView.getEmojiPadding() < bottomPanelTranslationY) { + int color = getThemedColor(Theme.key_chat_emojiPanelBackground); + if (backgroundPaint == null) { + backgroundPaint = new Paint(); + } + if (backgroundColor != color) { + backgroundPaint.setColor(backgroundColor = color); + } + int offset = (int) (bottomPanelTranslationY - chatActivityEnterView.getEmojiPadding()) + 3; + canvas.drawRect(0, getMeasuredHeight() - offset, getMeasuredWidth(), getMeasuredHeight(), backgroundPaint); + setFragmentPanTranslationOffset(chatActivityEnterView.getEmojiPadding()); + } + } + for (int a = 0, N = animateSendingViews.size(); a < N; a++) { + ChatMessageCell cell = animateSendingViews.get(a); + MessageObject.SendAnimationData data = cell.getMessageObject().sendAnimationData; + if (data != null) { + canvas.save(); + ImageReceiver imageReceiver = cell.getPhotoImage(); + canvas.translate(data.currentX, data.currentY); + canvas.scale(data.currentScale, data.currentScale); + canvas.translate(-imageReceiver.getCenterX(), -imageReceiver.getCenterY()); + cell.setTimeAlpha(data.timeAlpha); + animateSendingViews.get(a).draw(canvas); + canvas.restore(); + } + } + if (scrimViewReaction == null || scrimView == null) { + scrimPaint.setAlpha((int) (255 * scrimPaintAlpha * (scrimView != null ? scrimViewAlpha : 1f))); + canvas.drawRect(0, 0, getMeasuredWidth(), getMeasuredHeight(), scrimPaint); + } + if (scrimView != null) { + if (scrimView == reactionsMentiondownButton || scrimView == mentiondownButton) { + if (scrimViewAlpha < 1f) { + scrimPaint.setAlpha((int) (255 * scrimPaintAlpha * (1f - scrimViewAlpha))); + canvas.drawRect(0, 0, getMeasuredWidth(), getMeasuredHeight(), scrimPaint); + } + } else if (scrimView instanceof ImageView) { + int c = canvas.save(); + if (scrimViewAlpha < 1f) { + canvas.saveLayerAlpha(scrimView.getLeft(), scrimView.getTop(), scrimView.getRight(), scrimView.getBottom(), (int) (255 * scrimViewAlpha), Canvas.ALL_SAVE_FLAG); + } + canvas.translate(scrimView.getLeft(), scrimView.getTop()); + if (scrimView == actionBar.getBackButton()) { + int r = Math.max(scrimView.getMeasuredWidth(), scrimView.getMeasuredHeight()) / 2; + canvas.drawCircle(r, r, r * 0.7f, actionBarBackgroundPaint); + } + scrimView.draw(canvas); + canvas.restoreToCount(c); - private int getScrollOffsetForMessage(MessageObject object) { - return getScrollOffsetForMessage(getHeightForMessage(object)); - } - private int getScrollOffsetForMessage(int messageHeight) { - return (int) Math.max(-AndroidUtilities.dp(2), (chatListView.getMeasuredHeight() - blurredViewBottomOffset - chatListViewPaddingTop - messageHeight) / 2); - } + if (scrimViewAlpha < 1f) { + scrimPaint.setAlpha((int) (255 * scrimPaintAlpha * (1f - scrimViewAlpha))); + canvas.drawRect(0, 0, getMeasuredWidth(), getMeasuredHeight(), scrimPaint); + } + } else { + float listTop = chatListView.getY() + chatListViewPaddingTop - chatListViewPaddingVisibleOffset - AndroidUtilities.dp(4); + MessageObject.GroupedMessages scrimGroup; + if (scrimView instanceof ChatMessageCell) { + scrimGroup = ((ChatMessageCell) scrimView).getCurrentMessagesGroup(); + } else { + scrimGroup = null; + } + boolean groupedBackgroundWasDraw = false; + int count = chatListView.getChildCount(); + for (int num = 0; num < count; num++) { + View child = chatListView.getChildAt(num); + MessageObject.GroupedMessages group; + MessageObject.GroupedMessagePosition position; + ChatMessageCell cell; + ChatActionCell actionCell; + if (child instanceof ChatMessageCell) { + cell = (ChatMessageCell) child; + actionCell = null; + group = cell.getCurrentMessagesGroup(); + position = cell.getCurrentPosition(); + } else { + position = null; + group = null; + cell = null; + actionCell = child instanceof ChatActionCell ? ((ChatActionCell) child) : null; + } + if (child != scrimView && (scrimGroup == null || scrimGroup != group) || child.getAlpha() == 0f) { + continue; + } + if (!groupedBackgroundWasDraw && cell != null && scrimGroup != null && scrimGroup.transitionParams.cell != null) { + float x = scrimGroup.transitionParams.cell.getNonAnimationTranslationX(true); - private int getHeightForMessage(MessageObject object) { - if (getParentActivity() == null) { - return 0; - } - if (dummyMessageCell == null) { - dummyMessageCell = new ChatMessageCell(getParentActivity(), true, themeDelegate); - } - dummyMessageCell.isChat = currentChat != null || UserObject.isUserSelf(currentUser); - dummyMessageCell.isBot = currentUser != null && currentUser.bot; - dummyMessageCell.isMegagroup = ChatObject.isChannel(currentChat) && currentChat.megagroup; - return dummyMessageCell.computeHeight(object, groupedMessagesMap.get(object.getGroupId())); - } + float l = (scrimGroup.transitionParams.left + x + scrimGroup.transitionParams.offsetLeft); + float t = (scrimGroup.transitionParams.top + scrimGroup.transitionParams.offsetTop); + float r = (scrimGroup.transitionParams.right + x + scrimGroup.transitionParams.offsetRight); + float b = (scrimGroup.transitionParams.bottom + scrimGroup.transitionParams.offsetBottom); + + if (!scrimGroup.transitionParams.backgroundChangeBounds) { + t += scrimGroup.transitionParams.cell.getTranslationY(); + b += scrimGroup.transitionParams.cell.getTranslationY(); + } - private void startMessageUnselect() { - if (unselectRunnable != null) { - AndroidUtilities.cancelRunOnUIThread(unselectRunnable); - } - unselectRunnable = () -> { - highlightMessageId = Integer.MAX_VALUE; - updateVisibleRows(); - unselectRunnable = null; - }; - AndroidUtilities.runOnUIThread(unselectRunnable, 1000); - } + if (t < chatListViewPaddingTop - chatListViewPaddingVisibleOffset - AndroidUtilities.dp(20)) { + t = chatListViewPaddingTop - chatListViewPaddingVisibleOffset - AndroidUtilities.dp(20); + } - private void removeSelectedMessageHighlight() { - if (unselectRunnable != null) { - AndroidUtilities.cancelRunOnUIThread(unselectRunnable); - unselectRunnable = null; - } - highlightMessageId = Integer.MAX_VALUE; - } + if (b > chatListView.getMeasuredHeight() + AndroidUtilities.dp(20)) { + b = chatListView.getMeasuredHeight() + AndroidUtilities.dp(20); + } - private AlertDialog progressDialog; - private int nextScrollToMessageId; - private int nextScrollFromMessageId; - private boolean nextScrollSelect; - private int nextScrollLoadIndex; - private boolean nextScrollForce; - private int nextScrollForcePinnedMessageId; + boolean selected = true; + for (int a = 0, N = scrimGroup.messages.size(); a < N; a++) { + MessageObject object = scrimGroup.messages.get(a); + int index = object.getDialogId() == dialog_id ? 0 : 1; + if (selectedMessagesIds[index].indexOfKey(object.getId()) < 0) { + selected = false; + break; + } + } - public static final int PROGRESS_REPLY = 0; - public static final int PROGRESS_LINK = 1; - public static final int PROGRESS_INSTANT = 2; - public static final int PROGRESS_BOT_BUTTON = 3; + canvas.save(); + canvas.clipRect(0, listTop + (mentionContainer != null ? mentionContainer.clipTop() : 0), getMeasuredWidth(), chatListView.getY() + chatListView.getMeasuredHeight() - blurredViewBottomOffset - (mentionContainer != null ? mentionContainer.clipBottom() : 0)); + canvas.translate(0, chatListView.getY()); + scrimGroup.transitionParams.cell.drawBackground(canvas, (int) l, (int) t, (int) r, (int) b, scrimGroup.transitionParams.pinnedTop, scrimGroup.transitionParams.pinnedBotton, selected, contentView.getKeyboardHeight()); + canvas.restore(); + groupedBackgroundWasDraw = true; + } - private int progressDialogAtMessageId; - private int progressDialogAtMessageType; - private CharacterStyle progressDialogLinkSpan; - private String progressDialogBotButtonUrl; - private Browser.Progress progressDialogCurrent; - private void resetProgressDialogLoading() { - progressDialogLinkSpan = null; - progressDialogAtMessageId = 0; - progressDialogAtMessageType = -1; - progressDialogBotButtonUrl = null; - progressDialogCurrent = null; + if (cell != null && cell.getPhotoImage().isAnimationRunning()) { + invalidate(); + } - setPagedownLoading(false, true); - } + float viewClipLeft = chatListView.getLeft(); + float viewClipTop = listTop; + float viewClipRight = chatListView.getRight(); + float viewClipBottom = chatListView.getY() + chatListView.getMeasuredHeight() - blurredViewBottomOffset; - public static final boolean SCROLL_DEBUG_DELAY = false; - private boolean pinnedProgressIsShowing; - Runnable updatePinnedProgressRunnable; + if (mentionContainer != null) { + viewClipTop += mentionContainer.clipTop(); + viewClipBottom -= mentionContainer.clipBottom(); + } - public void scrollToMessageId(int id, int fromMessageId, boolean select, int loadIndex, boolean forceScroll, int forcePinnedMessageId) { - scrollToMessageId(id, fromMessageId, select, loadIndex, forceScroll, forcePinnedMessageId, null); - } + if (cell == null || !cell.getTransitionParams().animateBackgroundBoundsInner) { + viewClipLeft = Math.max(viewClipLeft, chatListView.getLeft() + child.getX()); + viewClipTop = Math.max(viewClipTop, chatListView.getY() + child.getY()); + viewClipRight = Math.min(viewClipRight, chatListView.getLeft() + child.getX() + child.getMeasuredWidth()); + viewClipBottom = Math.min(viewClipBottom, chatListView.getY() + child.getY() + child.getMeasuredHeight()); + } - public void scrollToMessageId(int id, int fromMessageId, boolean select, int loadIndex, boolean forceScroll, int forcePinnedMessageId, Runnable inCaseLoading) { - if (id == 0 || NotificationCenter.getInstance(currentAccount).isAnimationInProgress() || getParentActivity() == null) { - if (NotificationCenter.getInstance(currentAccount).isAnimationInProgress()) { - nextScrollToMessageId = id; - nextScrollFromMessageId = fromMessageId; - nextScrollSelect = select; - nextScrollLoadIndex = loadIndex; - nextScrollForce = forceScroll; - nextScrollForcePinnedMessageId = forcePinnedMessageId; - NotificationCenter.getInstance(currentAccount).doOnIdle(() -> { - if (nextScrollToMessageId != 0) { - scrollToMessageId(nextScrollToMessageId, nextScrollFromMessageId, nextScrollSelect, nextScrollLoadIndex, nextScrollForce, nextScrollForcePinnedMessageId); - nextScrollToMessageId = 0; + if (viewClipTop < viewClipBottom) { + if (child.getAlpha() != 1f) { + canvas.saveLayerAlpha(viewClipLeft, viewClipTop, viewClipRight, viewClipBottom, (int) (255 * child.getAlpha()), Canvas.ALL_SAVE_FLAG); + } else { + canvas.save(); + } + if (cell != null) { + cell.setInvalidatesParent(true); + cell.setScrimReaction(scrimViewReaction); + } + canvas.clipRect(viewClipLeft, viewClipTop, viewClipRight, viewClipBottom); + canvas.translate(chatListView.getLeft() + child.getX(), chatListView.getY() + child.getY()); + if (cell != null && scrimGroup == null && cell.drawBackgroundInParent()) { + cell.drawBackgroundInternal(canvas, true); + } + child.draw(canvas); + if (cell != null && cell.hasOutboundsContent()) { + cell.drawOutboundsContent(canvas); + } + if (actionCell != null) { + actionCell.drawOutboundsContent(canvas); + } + + canvas.restore(); + + if (cell != null) { + cell.setInvalidatesParent(false); + cell.setScrimReaction(null); + } + } + + if (position != null || (cell != null && cell.getTransitionParams().animateBackgroundBoundsInner)) { + if (position == null || position.last || position.minX == 0 && position.minY == 0) { + if (position == null || position.last) { + drawTimeAfter.add(cell); + } + if (position == null || (position.minX == 0 && position.minY == 0 && cell.hasNameLayout())) { + drawNamesAfter.add(cell); + } + } + if (position == null || (position.flags & MessageObject.POSITION_FLAG_BOTTOM) != 0) { + drawCaptionAfter.add(cell); + } + } + if (scrimViewReaction != null && cell != null) { + scrimPaint.setAlpha((int) (255 * scrimPaintAlpha * scrimViewAlpha)); + canvas.drawRect(0, 0, getMeasuredWidth(), getMeasuredHeight(), scrimPaint); + + if (viewClipTop < viewClipBottom) { + float alpha = child.getAlpha() * scrimViewAlpha; + if (alpha < 1f) { + canvas.saveLayerAlpha(viewClipLeft, viewClipTop, viewClipRight, viewClipBottom, (int) (255 * alpha), Canvas.ALL_SAVE_FLAG); + } else { + canvas.save(); + } + canvas.clipRect(viewClipLeft, viewClipTop, viewClipRight, viewClipBottom); + canvas.translate(chatListView.getLeft() + child.getX(), chatListView.getY() + child.getY()); + cell.drawScrimReaction(canvas, scrimViewReaction); + canvas.restore(); + } + } } - }); - } - return; - } - forceNextPinnedMessageId = Math.abs(forcePinnedMessageId); - forceScrollToFirst = forcePinnedMessageId > 0; - wasManualScroll = true; - MessageObject object = messagesDict[loadIndex].get(id); - boolean query = false; - int scrollDirection = RecyclerAnimationScrollHelper.SCROLL_DIRECTION_UNSET; - int scrollFromIndex = 0; - if (fromMessageId != 0) { - boolean scrollDown = fromMessageId < id; - if (isSecretChat()) { - scrollDown = !scrollDown; - } - scrollDirection = scrollDown ? RecyclerAnimationScrollHelper.SCROLL_DIRECTION_DOWN : RecyclerAnimationScrollHelper.SCROLL_DIRECTION_UP; - } else if (messages.size() > 0) { - if (isThreadChat() && id == threadMessageId) { - scrollDirection = RecyclerAnimationScrollHelper.SCROLL_DIRECTION_UP; - } else { - int end = chatLayoutManager.findLastVisibleItemPosition(); - for (int i = chatLayoutManager.findFirstVisibleItemPosition(); i <= end; i++) { - if (i >= chatAdapter.messagesStartRow && i < chatAdapter.messagesEndRow) { - MessageObject messageObject = messages.get(i - chatAdapter.messagesStartRow); - if (messageObject.getId() == 0) { - continue; + int size = drawTimeAfter.size(); + if (size > 0) { + for (int a = 0; a < size; a++) { + drawChildElement(canvas, listTop, drawTimeAfter.get(a), 0); } - scrollFromIndex = i - chatAdapter.messagesStartRow; - boolean scrollDown = messageObject.getId() < id; - if (isSecretChat()) { - scrollDown = !scrollDown; + drawTimeAfter.clear(); + } + size = drawNamesAfter.size(); + if (size > 0) { + for (int a = 0; a < size; a++) { + drawChildElement(canvas, listTop, drawNamesAfter.get(a), 1); } - scrollDirection = scrollDown ? RecyclerAnimationScrollHelper.SCROLL_DIRECTION_DOWN : RecyclerAnimationScrollHelper.SCROLL_DIRECTION_UP; - break; + drawNamesAfter.clear(); + } + size = drawCaptionAfter.size(); + if (size > 0) { + for (int a = 0; a < size; a++) { + ChatMessageCell cell = drawCaptionAfter.get(a); + if (cell.getCurrentPosition() == null && !cell.getTransitionParams().animateBackgroundBoundsInner) { + continue; + } + drawChildElement(canvas, listTop, cell, 2); + } + drawCaptionAfter.clear(); } } + + if (scrimViewReaction == null && scrimViewAlpha < 1f) { + scrimPaint.setAlpha((int) (255 * scrimPaintAlpha * (1f - scrimViewAlpha))); + canvas.drawRect(0, 0, getMeasuredWidth(), getMeasuredHeight(), scrimPaint); + } } - } - chatScrollHelper.setScrollDirection(scrollDirection); - if (!SCROLL_DEBUG_DELAY && object != null) { - MessageObject.GroupedMessages groupedMessages = groupedMessagesMap.get(object.getGroupId()); - if (object.getGroupId() != 0 && groupedMessages != null) { - MessageObject primary = groupedMessages.findPrimaryMessageObject(); - if (primary != null) { - object = primary; + if (scrimView != null || messageEnterTransitionContainer.isRunning()) { + if (mentionContainer == null || mentionContainer.getVisibility() != View.VISIBLE) { + if (pagedownButton != null && pagedownButton.getTag() != null) { + super.drawChild(canvas, pagedownButton, SystemClock.uptimeMillis()); + } + if (mentiondownButton != null && mentiondownButton.getTag() != null) { + super.drawChild(canvas, mentiondownButton, SystemClock.uptimeMillis()); + } + if (reactionsMentiondownButton != null && reactionsMentiondownButton.getTag() != null) { + super.drawChild(canvas, reactionsMentiondownButton, SystemClock.uptimeMillis()); + } + } + if (floatingDateView != null && floatingDateView.getTag() != null) { + super.drawChild(canvas, floatingDateView, SystemClock.uptimeMillis()); + } + if (fireworksOverlay != null) { + super.drawChild(canvas, fireworksOverlay, SystemClock.uptimeMillis()); + } + if (gifHintTextView != null) { + super.drawChild(canvas, gifHintTextView, SystemClock.uptimeMillis()); + } + if (emojiHintTextView != null) { + super.drawChild(canvas, emojiHintTextView, SystemClock.uptimeMillis()); + } + if (undoView != null && undoView.getVisibility() == View.VISIBLE) { + super.drawChild(canvas, undoView, SystemClock.uptimeMillis()); + } + if (topUndoView != null && topUndoView.getVisibility() == View.VISIBLE) { + super.drawChild(canvas, topUndoView, SystemClock.uptimeMillis()); } } - int index = messages.indexOf(object); - if (index != -1) { - if (scrollFromIndex > 0) { - scrollDirection = scrollFromIndex > index ? RecyclerAnimationScrollHelper.SCROLL_DIRECTION_DOWN : RecyclerAnimationScrollHelper.SCROLL_DIRECTION_UP; - chatScrollHelper.setScrollDirection(scrollDirection); + if (fixedKeyboardHeight > 0 && keyboardHeight < AndroidUtilities.dp(20)) { + int color = getThemedColor(Theme.key_windowBackgroundWhite); + if (backgroundPaint == null) { + backgroundPaint = new Paint(); } - removeSelectedMessageHighlight(); - if (select) { - highlightMessageId = id; + if (backgroundColor != color) { + backgroundPaint.setColor(backgroundColor = color); + } + canvas.drawRect(0,getMeasuredHeight() - fixedKeyboardHeight, getMeasuredWidth(), getMeasuredHeight(), backgroundPaint); + } + if (pullingDownDrawable != null && pullingDownDrawable.needDrawBottomPanel()) { + int top, bottom; + if (chatActivityEnterView != null && chatActivityEnterView.getVisibility() == View.VISIBLE) { + top = chatActivityEnterView.getTop() + AndroidUtilities.dp2(2); + bottom = chatActivityEnterView.getBottom(); + } else { + top = bottomOverlayChat.getTop() + AndroidUtilities.dp2(2); + bottom = bottomOverlayChat.getBottom(); } + top -= (int) ((pullingDownAnimateToActivity == null ? 0 : pullingDownAnimateToActivity.pullingBottomOffset) * pullingDownAnimateProgress); + pullingDownDrawable.drawBottomPanel(canvas, top, bottom, getMeasuredWidth()); + } + if (pullingDownAnimateToActivity != null) { + canvas.saveLayerAlpha(0, 0, getMeasuredWidth(), getMeasuredHeight(), (int) (255 * pullingDownAnimateProgress), Canvas.ALL_SAVE_FLAG); + pullingDownAnimateToActivity.fragmentView.draw(canvas); + canvas.restore(); + } - chatAdapter.updateRowsSafe(); - int position = chatAdapter.messagesStartRow + messages.indexOf(object); + emojiAnimationsOverlay.draw(canvas); - updateVisibleRows(); - boolean found = false; - int count = chatListView.getChildCount(); - for (int a = 0; a < count; a++) { - View view = chatListView.getChildAt(a); - if (view instanceof ChatMessageCell) { - ChatMessageCell cell = (ChatMessageCell) view; - MessageObject messageObject = cell.getMessageObject(); - if (messageObject != null && messageObject.getId() == object.getId()) { - found = true; - view.sendAccessibilityEvent(AccessibilityEvent.TYPE_VIEW_FOCUSED); - } - } else if (view instanceof ChatActionCell) { - ChatActionCell cell = (ChatActionCell) view; - MessageObject messageObject = cell.getMessageObject(); - if (messageObject != null && messageObject.getId() == object.getId()) { - found = true; - view.sendAccessibilityEvent(AccessibilityEvent.TYPE_VIEW_FOCUSED); - } - } + if (restoreToCount >= 0) { + canvas.restore(); + } + if (switchingFromTopics) { + canvas.save(); + canvas.translate(actionBar.getX(), actionBar.getY()); + canvas.saveLayerAlpha(0, 0, actionBar.getWidth(), actionBar.getHeight(), (int) (255 * switchingFromTopicsProgress), Canvas.ALL_SAVE_FLAG); + actionBar.draw(canvas); + canvas.restore(); + canvas.restore(); + } + } - if (found) { - int yOffset = getScrollOffsetForMessage(view.getHeight()); - int scrollY = (int) (view.getTop() - chatListViewPaddingTop - yOffset); - int maxScrollOffset = chatListView.computeVerticalScrollRange() - chatListView.computeVerticalScrollOffset() - chatListView.computeVerticalScrollExtent(); - if (maxScrollOffset < 0) { - maxScrollOffset = 0; - } - if (scrollY > maxScrollOffset) { - scrollY = maxScrollOffset; - } - if (scrollY != 0) { - scrollByTouch = false; - chatListView.smoothScrollBy(0, scrollY); - chatListView.setOverScrollMode(RecyclerListView.OVER_SCROLL_NEVER); - } - break; + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + int allHeight; + int widthSize = View.MeasureSpec.getSize(widthMeasureSpec); + int heightSize = allHeight = View.MeasureSpec.getSize(heightMeasureSpec); + + if (lastWidth != widthSize) { + globalIgnoreLayout = true; + lastWidth = widthMeasureSpec; + if (!inPreviewMode && currentUser != null && currentUser.self) { + SimpleTextView textView = avatarContainer.getTitleTextView(); + int textWidth = (int) textView.getPaint().measureText(textView.getText(), 0, textView.getText().length()); + if (widthSize - AndroidUtilities.dp(96 + 56) > textWidth + AndroidUtilities.dp(10)) { + showSearchAsIcon = !showAudioCallAsIcon; + } else { + showSearchAsIcon = false; } + } else { + showSearchAsIcon = false; } - if (!found) { - int yOffset = getScrollOffsetForMessage(object); - chatScrollHelperCallback.scrollTo = object; - chatScrollHelperCallback.lastBottom = false; - chatScrollHelperCallback.lastItemOffset = yOffset; - chatScrollHelperCallback.lastPadding = (int) chatListViewPaddingTop; - chatScrollHelper.setScrollDirection(scrollDirection); - chatScrollHelper.scrollToPosition(chatScrollHelperCallback.position = position, chatScrollHelperCallback.offset = yOffset, chatScrollHelperCallback.bottom = false, true); - canShowPagedownButton = true; - updatePagedownButtonVisibility(true); + if (showSearchAsIcon || showAudioCallAsIcon) { + if (avatarContainer != null && avatarContainer.getLayoutParams() != null) { + ((ViewGroup.MarginLayoutParams) avatarContainer.getLayoutParams()).rightMargin = AndroidUtilities.dp(96); + } + } else { + if (avatarContainer != null && avatarContainer.getLayoutParams() != null) { + ((ViewGroup.MarginLayoutParams) avatarContainer.getLayoutParams()).rightMargin = AndroidUtilities.dp(40); + } + } + if (showSearchAsIcon) { + if (!actionBar.isSearchFieldVisible() && searchIconItem != null) { + searchIconItem.setVisibility(View.VISIBLE); + } + if (headerItem != null) { + headerItem.hideSubItem(search); + } + } else { + if (headerItem != null) { + headerItem.showSubItem(search); + } + if (searchIconItem != null) { + searchIconItem.setVisibility(View.GONE); + } + } + if (!actionBar.isSearchFieldVisible() && audioCallIconItem != null) { + audioCallIconItem.setVisibility((showAudioCallAsIcon && !showSearchAsIcon) ? View.VISIBLE : View.GONE); + } + if (headerItem != null) { + TLRPC.UserFull userInfo = getCurrentUserInfo(); + if (showAudioCallAsIcon) { + headerItem.hideSubItem(call); + } else if (userInfo != null && userInfo.phone_calls_available) { + headerItem.showSubItem(call, true); + } } - } else { - query = true; + globalIgnoreLayout = false; } - } else { - query = true; - } - if (query) { - if (isThreadChat() && id == threadMessageId) { - scrollToThreadMessage = true; - id = 1; + setMeasuredDimension(widthSize, heightSize); + heightSize -= getPaddingTop(); + + measureChildWithMargins(actionBar, widthMeasureSpec, 0, heightMeasureSpec, 0); + int actionBarHeight = actionBar.getMeasuredHeight(); + if (actionBar.getVisibility() == VISIBLE) { + heightSize -= actionBarHeight; } - if (progressDialog != null) { - progressDialog.dismiss(); + int keyboardHeightOld = keyboardHeight + chatEmojiViewPadding; + boolean keyboardVisibleOld = keyboardHeight + chatEmojiViewPadding >= AndroidUtilities.dp(20); + if (lastHeight != allHeight) { + measureKeyboardHeight(); } - - showPinnedProgress(forceNextPinnedMessageId != 0); - - if (inCaseLoading != null) { - inCaseLoading.run(); + int keyboardSize = getKeyboardHeight(); + if (fixedKeyboardHeight > 0 && keyboardSize <= AndroidUtilities.dp(20)) { + chatEmojiViewPadding = fixedKeyboardHeight; } else { - progressDialog = new AlertDialog(getParentActivity(), AlertDialog.ALERT_TYPE_SPINNER, themeDelegate); - progressDialog.setOnShowListener(dialogInterface -> showPinnedProgress(false)); - progressDialog.setOnCancelListener(postponedScrollCancelListener); - progressDialog.showDelayed(400); + if (keyboardSize <= AndroidUtilities.dp(20)) { + chatEmojiViewPadding = chatActivityEnterView.isPopupShowing() ? chatActivityEnterView.getEmojiPadding() : 0; + } else { + chatEmojiViewPadding = 0; + } } + setEmojiKeyboardHeight(chatEmojiViewPadding); - waitingForLoad.clear(); - removeSelectedMessageHighlight(); - scrollToMessagePosition = -10000; - startLoadFromMessageId = id; - showScrollToMessageError = !forceScroll; - if (id == createUnreadMessageAfterId) { - createUnreadMessageAfterIdLoading = true; - } - postponedScrollIsCanceled = false; - waitingForLoad.add(lastLoadIndex); - postponedScrollToLastMessageQueryIndex = lastLoadIndex; - postponedScrollMinMessageId = minMessageId[0]; - postponedScrollMessageId = id; - AndroidUtilities.runOnUIThread(() -> { - getMessagesController().loadMessages(loadIndex == 0 ? dialog_id : mergeDialogId, 0, false, ((isThreadChat() && !isTopic) || AndroidUtilities.isTablet()) ? 30 : 20, startLoadFromMessageId, 0, true, 0, classGuid, 3, 0, chatMode, threadMessageId, replyMaxReadId, lastLoadIndex++, isTopic); - }, SCROLL_DEBUG_DELAY ? 7500 : 0); - } else { - View child = chatListView.getChildAt(0); - if (child != null && child.getTop() <= 0) { - showFloatingDateView(false); + boolean keyboardVisible = keyboardHeight + chatEmojiViewPadding >= AndroidUtilities.dp(20); + boolean waitingChatListItemAnimator = false; + if (MediaController.getInstance().getPlayingMessageObject() != null && MediaController.getInstance().getPlayingMessageObject().isRoundVideo() && keyboardVisibleOld != keyboardVisible) { + for (int i = 0; i < chatListView.getChildCount(); i++) { + View child = chatListView.getChildAt(i); + if (child instanceof ChatMessageCell) { + ChatMessageCell cell = (ChatMessageCell) child; + MessageObject messageObject = cell.getMessageObject(); + if (messageObject.isRoundVideo() && MediaController.getInstance().isPlayingMessage(messageObject)) { + int p = chatListView.getChildAdapterPosition(child); + if (p >= 0) { + chatLayoutManager.scrollToPositionWithOffset(p, (int) ((chatListView.getMeasuredHeight() - chatListViewPaddingTop - blurredViewBottomOffset + (keyboardHeight + chatEmojiViewPadding - keyboardHeightOld) - (keyboardVisible ? AndroidUtilities.roundMessageSize : AndroidUtilities.roundPlayingMessageSize)) / 2), false); + chatAdapter.notifyItemChanged(p); + adjustPanLayoutHelper.delayAnimation(); + waitingChatListItemAnimator = true; + break; + } + } + } + } } - } - returnToMessageId = fromMessageId; - if (NekoConfig.rememberAllBackMessages.Bool() && fromMessageId > 0) - returnToMessageIdsStack.push(returnToMessageId); - returnToLoadIndex = loadIndex; - needSelectFromMessageId = select; - } - private void showPinnedProgress(boolean show) { - if (show) { - if (updatePinnedProgressRunnable == null) { - updatePinnedProgressRunnable = () -> { - pinnedProgressIsShowing = true; - updatePinnedListButton(true); - }; - AndroidUtilities.runOnUIThread(updatePinnedProgressRunnable, 100); + if (!waitingChatListItemAnimator) { + chatActivityEnterView.runEmojiPanelAnimation(); } - } else { - if (updatePinnedProgressRunnable != null) { - AndroidUtilities.cancelRunOnUIThread(updatePinnedProgressRunnable); + + int childCount = getChildCount(); + measureChildWithMargins(chatActivityEnterView, widthMeasureSpec, 0, heightMeasureSpec, 0); + + int listViewTopHeight; + if (inPreviewMode) { + inputFieldHeight = 0; + listViewTopHeight = 0; + } else { + inputFieldHeight = chatActivityEnterView.getMeasuredHeight(); + listViewTopHeight = AndroidUtilities.dp(49); } - updatePinnedProgressRunnable = null; - pinnedProgressIsShowing = false; - updatePinnedListButton(true); - } - } - private void updatePagedownButtonVisibility(boolean animated) { - if (pagedownButton == null) { - return; - } - boolean show = canShowPagedownButton && !textSelectionHelper.isSelectionMode() && !chatActivityEnterView.isRecordingAudioVideo(); - if (show) { - if (animated && (openAnimationStartTime == 0 || SystemClock.elapsedRealtime() < openAnimationStartTime + 150)) { - animated = false; + blurredViewTopOffset = 0; + blurredViewBottomOffset = 0; + if (SharedConfig.chatBlurEnabled()) { + blurredViewTopOffset = actionBarHeight; + blurredViewBottomOffset = AndroidUtilities.dp(203); } - pagedownButtonShowedByScroll = false; - if (pagedownButton.getTag() == null) { - if (pagedownButtonAnimation != null) { - pagedownButtonAnimation.removeAllListeners(); - pagedownButtonAnimation.cancel(); - pagedownButtonAnimation = null; + for (int i = 0; i < childCount; i++) { + View child = getChildAt(i); + + if (child == null || child.getVisibility() == GONE || child == chatActivityEnterView || child == actionBar) { + continue; } - pagedownButton.setTag(1); - if (animated) { - pagedownButton.setVisibility(View.VISIBLE); - pagedownButtonAnimation = ValueAnimator.ofFloat(pagedownButtonEnterProgress, 1f); - pagedownButtonAnimation.addUpdateListener(valueAnimator -> { - pagedownButtonEnterProgress = (float) valueAnimator.getAnimatedValue(); - contentView.invalidate(); - }); - pagedownButtonAnimation.addListener(new AnimatorListenerAdapter() { - @Override - public void onAnimationEnd(Animator animation) { - pagedownButtonEnterProgress = 1f; - contentView.invalidate(); + if (child == backgroundView) { + int contentWidthSpec = View.MeasureSpec.makeMeasureSpec(widthSize, View.MeasureSpec.EXACTLY); + int contentHeightSpec = View.MeasureSpec.makeMeasureSpec(allHeight, View.MeasureSpec.EXACTLY); + child.measure(contentWidthSpec, contentHeightSpec); + } else if (child == blurredView) { + int h = allHeight; + if (keyboardSize > AndroidUtilities.dp(20) && getLayoutParams().height < 0) { + h += keyboardSize; + } + int contentWidthSpec = View.MeasureSpec.makeMeasureSpec(widthSize, View.MeasureSpec.EXACTLY); + int contentHeightSpec = View.MeasureSpec.makeMeasureSpec(h, View.MeasureSpec.EXACTLY); + child.measure(contentWidthSpec, contentHeightSpec); + } else if (child == chatListView) { + int contentWidthSpec = View.MeasureSpec.makeMeasureSpec(widthSize, View.MeasureSpec.EXACTLY); + int h = heightSize - listViewTopHeight - (inPreviewMode && Build.VERSION.SDK_INT >= 21 ? AndroidUtilities.statusBarHeight : 0) + blurredViewTopOffset + blurredViewBottomOffset; + if (keyboardSize > AndroidUtilities.dp(20) && getLayoutParams().height < 0) { + h += keyboardSize; + } + int contentHeightSpec = View.MeasureSpec.makeMeasureSpec(Math.max(AndroidUtilities.dp(10), h), View.MeasureSpec.EXACTLY); + child.measure(contentWidthSpec, contentHeightSpec); + } else if (child == progressView) { + int contentWidthSpec = View.MeasureSpec.makeMeasureSpec(widthSize, View.MeasureSpec.EXACTLY); + int contentHeightSpec = View.MeasureSpec.makeMeasureSpec(Math.max(AndroidUtilities.dp(10), heightSize - inputFieldHeight - (inPreviewMode && Build.VERSION.SDK_INT >= 21 ? AndroidUtilities.statusBarHeight : 0) + AndroidUtilities.dp(2 + (chatActivityEnterView.isTopViewVisible() ? 48 : 0))), View.MeasureSpec.EXACTLY); + child.measure(contentWidthSpec, contentHeightSpec); + } else if (child == instantCameraView || child == overlayView) { + int contentWidthSpec = View.MeasureSpec.makeMeasureSpec(widthSize, View.MeasureSpec.EXACTLY); + int contentHeightSpec = View.MeasureSpec.makeMeasureSpec(allHeight - inputFieldHeight - chatEmojiViewPadding + AndroidUtilities.dp(3), View.MeasureSpec.EXACTLY); + child.measure(contentWidthSpec, contentHeightSpec); + } else if (child == emptyViewContainer) { + int contentWidthSpec = View.MeasureSpec.makeMeasureSpec(widthSize, View.MeasureSpec.EXACTLY); + int contentHeightSpec = View.MeasureSpec.makeMeasureSpec(heightSize, View.MeasureSpec.EXACTLY); + child.measure(contentWidthSpec, contentHeightSpec); + } else if (child == messagesSearchListView) { + int contentWidthSpec = View.MeasureSpec.makeMeasureSpec(widthSize, View.MeasureSpec.EXACTLY); + int contentHeightSpec = View.MeasureSpec.makeMeasureSpec(allHeight - actionBarHeight - AndroidUtilities.dp(48), View.MeasureSpec.EXACTLY); + child.measure(contentWidthSpec, contentHeightSpec); + } else if (chatActivityEnterView.isPopupView(child)) { + if (inBubbleMode) { + child.measure(View.MeasureSpec.makeMeasureSpec(widthSize, View.MeasureSpec.EXACTLY), View.MeasureSpec.makeMeasureSpec(heightSize - inputFieldHeight + actionBarHeight + getPaddingTop(), View.MeasureSpec.EXACTLY)); + } else if (AndroidUtilities.isInMultiwindow) { + if (AndroidUtilities.isTablet()) { + child.measure(View.MeasureSpec.makeMeasureSpec(widthSize, View.MeasureSpec.EXACTLY), View.MeasureSpec.makeMeasureSpec(Math.min(AndroidUtilities.dp(320), heightSize - inputFieldHeight + actionBarHeight - AndroidUtilities.statusBarHeight + getPaddingTop()), View.MeasureSpec.EXACTLY)); + } else { + child.measure(View.MeasureSpec.makeMeasureSpec(widthSize, View.MeasureSpec.EXACTLY), View.MeasureSpec.makeMeasureSpec(heightSize - inputFieldHeight + actionBarHeight - AndroidUtilities.statusBarHeight + getPaddingTop(), View.MeasureSpec.EXACTLY)); } - }); - pagedownButtonAnimation.setDuration(200); - pagedownButtonAnimation.start(); - } else { - pagedownButtonEnterProgress = 1f; - contentView.invalidate(); - pagedownButton.setVisibility(View.VISIBLE); - } - } - } else { - returnToMessageId = 0; - returnToMessageIdsStack.clear(); - newUnreadMessageCount = 0; - if (pagedownButton.getTag() != null) { - pagedownButton.setTag(null); - if (pagedownButtonAnimation != null) { - pagedownButtonAnimation.removeAllListeners(); - pagedownButtonAnimation.cancel(); - pagedownButtonAnimation = null; - } - if (animated) { - pagedownButton.setVisibility(View.VISIBLE); - pagedownButtonAnimation = ValueAnimator.ofFloat(pagedownButtonEnterProgress, 0); - pagedownButtonAnimation.addUpdateListener(valueAnimator -> { - pagedownButtonEnterProgress = (float) valueAnimator.getAnimatedValue(); - contentView.invalidate(); - }); - pagedownButtonAnimation.addListener(new AnimatorListenerAdapter() { - @Override - public void onAnimationEnd(Animator animation) { - pagedownButtonEnterProgress = 0; - pagedownButton.setVisibility(View.INVISIBLE); - contentView.invalidate(); + } else { + child.measure(View.MeasureSpec.makeMeasureSpec(widthSize, View.MeasureSpec.EXACTLY), View.MeasureSpec.makeMeasureSpec(child.getLayoutParams().height, View.MeasureSpec.EXACTLY)); + } + } else if (child == mentionContainer) { + FrameLayout.LayoutParams layoutParams = (FrameLayout.LayoutParams) mentionContainer.getLayoutParams(); + if (mentionContainer.getAdapter().isBannedInline()) { + child.measure(View.MeasureSpec.makeMeasureSpec(widthSize, View.MeasureSpec.EXACTLY), View.MeasureSpec.makeMeasureSpec(heightSize, View.MeasureSpec.AT_MOST)); + } else { + int height; + mentionContainer.setIgnoreLayout(true); + LinearLayoutManager layoutManager = mentionContainer.getCurrentLayoutManager(); + if (layoutManager instanceof ExtendedGridLayoutManager) { + int size = ((ExtendedGridLayoutManager) layoutManager).getRowsCount(widthSize); + int maxHeight = size * 102; + if (mentionContainer.getAdapter().isBotContext()) { + if (mentionContainer.getAdapter().getBotContextSwitch() != null || mentionContainer.getAdapter().getBotWebViewSwitch() != null) { + maxHeight += 34; + } + } + height = heightSize - chatActivityEnterView.getMeasuredHeight() + (maxHeight != 0 ? AndroidUtilities.dp(2) : 0); + int padding = Math.max(0, height - AndroidUtilities.dp(Math.min(maxHeight, 68 * 1.8f))); + } else { + int size = mentionContainer.getAdapter().getLastItemCount(); + int maxHeight = 0; + if (mentionContainer.getAdapter().isBotContext()) { + if (mentionContainer.getAdapter().getBotContextSwitch() != null || mentionContainer.getAdapter().getBotWebViewSwitch() != null) { + maxHeight += 36; + size -= 1; + } + maxHeight += size * 68; + } else { + maxHeight += size * 36; + } + height = heightSize - chatActivityEnterView.getMeasuredHeight() + (maxHeight != 0 ? AndroidUtilities.dp(2) : 0); } - }); - pagedownButtonAnimation.setDuration(200); - pagedownButtonAnimation.start(); + + layoutParams.height = height; + layoutParams.topMargin = 0; + + mentionContainer.setIgnoreLayout(false); + child.measure(View.MeasureSpec.makeMeasureSpec(widthSize, View.MeasureSpec.EXACTLY), View.MeasureSpec.makeMeasureSpec(layoutParams.height, View.MeasureSpec.EXACTLY)); + } + mentionContainer.setTranslationY(chatActivityEnterView.getAnimatedTop()); + } else if (child == textSelectionHelper.getOverlayView(getContext())) { + int contentWidthSpec = View.MeasureSpec.makeMeasureSpec(widthSize, View.MeasureSpec.EXACTLY); + int h = heightSize + blurredViewTopOffset; + if (keyboardSize > AndroidUtilities.dp(20) && getLayoutParams().height < 0) { + h += keyboardSize; + textSelectionHelper.setKeyboardSize(keyboardSize); + } else { + textSelectionHelper.setKeyboardSize(0); + } + child.measure(contentWidthSpec, View.MeasureSpec.makeMeasureSpec(h, View.MeasureSpec.EXACTLY)); + } else if (child == forwardingPreviewView) { + int contentWidthSpec = View.MeasureSpec.makeMeasureSpec(widthSize, View.MeasureSpec.EXACTLY); + int h = allHeight - AndroidUtilities.statusBarHeight; + if (keyboardSize > AndroidUtilities.dp(20) && getLayoutParams().height < 0) { + h += keyboardSize; + } + int contentHeightSpec = View.MeasureSpec.makeMeasureSpec(h, View.MeasureSpec.EXACTLY); + child.measure(contentWidthSpec, contentHeightSpec); } else { - pagedownButtonEnterProgress = 0; - pagedownButton.setVisibility(View.INVISIBLE); + measureChildWithMargins(child, widthMeasureSpec, 0, heightMeasureSpec, 0); } } + if (fixPaddingsInLayout) { + globalIgnoreLayout = true; + invalidateChatListViewTopPadding(); + invalidateMessagesVisiblePart(); + fixPaddingsInLayout = false; + chatListView.measure(View.MeasureSpec.makeMeasureSpec(chatListView.getMeasuredWidth(), View.MeasureSpec.EXACTLY), View.MeasureSpec.makeMeasureSpec(chatListView.getMeasuredHeight(), View.MeasureSpec.EXACTLY)); + globalIgnoreLayout = false; + } + if (scrollToPositionOnRecreate != -1) { + final int scrollTo = scrollToPositionOnRecreate; + AndroidUtilities.runOnUIThread(() -> chatLayoutManager.scrollToPositionWithOffset(scrollTo, scrollToOffsetOnRecreate)); + scrollToPositionOnRecreate = -1; + } + + updateBulletinLayout(); + + lastHeight = allHeight; } - } - private void showMentionDownButton(boolean show, boolean animated) { - if (mentiondownButton == null) { - return; + @Override + public void requestLayout() { + if (globalIgnoreLayout) { + return; + } + super.requestLayout(); } - if (show) { - if (mentiondownButton.getTag() == null) { - if (mentiondownButtonAnimation != null) { - mentiondownButtonAnimation.removeAllListeners(); - mentiondownButtonAnimation.cancel(); - mentiondownButtonAnimation = null; + + @Override + protected void onLayout(boolean changed, int l, int t, int r, int b) { + final int count = getChildCount(); + int keyboardSize = getKeyboardHeight(); + int paddingBottom; + + if (fixedKeyboardHeight > 0 && keyboardSize <= AndroidUtilities.dp(20)) { + paddingBottom = fixedKeyboardHeight; + } else { + paddingBottom = keyboardSize <= AndroidUtilities.dp(20) && !AndroidUtilities.isInMultiwindow && !inBubbleMode ? chatActivityEnterView.getEmojiPadding() : 0; + } + + for (int i = 0; i < count; i++) { + final View child = getChildAt(i); + if (child == null || child.getVisibility() == GONE) { + continue; } - if (animated) { - mentiondownButton.setVisibility(View.VISIBLE); - mentiondownButton.setTag(1); - mentiondownButtonAnimation = ValueAnimator.ofFloat(mentionsButtonEnterProgress, 1f); - mentiondownButtonAnimation.addUpdateListener(valueAnimator -> { - mentionsButtonEnterProgress = (float) valueAnimator.getAnimatedValue(); - contentView.invalidate(); - }); - mentiondownButtonAnimation.addListener(new AnimatorListenerAdapter() { - @Override - public void onAnimationEnd(Animator animation) { - mentionsButtonEnterProgress = 1f; - contentView.invalidate(); - } - }); - mentiondownButtonAnimation.setDuration(200); - mentiondownButtonAnimation.start(); - } else { - mentionsButtonEnterProgress = 1f; - contentView.invalidate(); + final FrameLayout.LayoutParams lp = (FrameLayout.LayoutParams) child.getLayoutParams(); + + final int width = child.getMeasuredWidth(); + final int height = child.getMeasuredHeight(); + + int childLeft; + int childTop; + + int gravity = lp.gravity; + if (gravity == -1) { + gravity = Gravity.TOP | Gravity.LEFT; } - } - } else { - returnToMessageId = 0; - returnToMessageIdsStack.clear(); - if (mentiondownButton.getTag() != null) { - mentiondownButton.setTag(null); - if (mentiondownButtonAnimation != null) { - mentiondownButtonAnimation.removeAllListeners(); - mentiondownButtonAnimation.cancel(); - mentiondownButtonAnimation = null; + + final int absoluteGravity = gravity & Gravity.HORIZONTAL_GRAVITY_MASK; + final int verticalGravity = gravity & Gravity.VERTICAL_GRAVITY_MASK; + + switch (absoluteGravity & Gravity.HORIZONTAL_GRAVITY_MASK) { + case Gravity.CENTER_HORIZONTAL: + childLeft = (r - l - width) / 2 + lp.leftMargin - lp.rightMargin; + break; + case Gravity.RIGHT: + childLeft = r - width - lp.rightMargin; + break; + case Gravity.LEFT: + default: + childLeft = lp.leftMargin; } - if (animated) { - mentiondownButtonAnimation = ValueAnimator.ofFloat(mentionsButtonEnterProgress, 0f); - mentiondownButtonAnimation.addUpdateListener(valueAnimator -> { - mentionsButtonEnterProgress = (float) valueAnimator.getAnimatedValue(); - contentView.invalidate(); - }); - mentiondownButtonAnimation.addListener(new AnimatorListenerAdapter() { - @Override - public void onAnimationEnd(Animator animation) { - mentionsButtonEnterProgress = 0f; - mentiondownButton.setVisibility(View.INVISIBLE); - contentView.invalidate(); + + switch (verticalGravity) { + case Gravity.TOP: + childTop = lp.topMargin + getPaddingTop(); + if (child != actionBar && actionBar.getVisibility() == VISIBLE) { + childTop += actionBar.getMeasuredHeight(); + if (inPreviewMode && Build.VERSION.SDK_INT >= 21) { + childTop += AndroidUtilities.statusBarHeight; + } } - }); - mentiondownButtonAnimation.setDuration(200); - mentiondownButtonAnimation.start(); - } else { - mentionsButtonEnterProgress = 0f; - mentiondownButton.setVisibility(View.INVISIBLE); - } + break; + case Gravity.CENTER_VERTICAL: + childTop = ((b - paddingBottom) - t - height) / 2 + lp.topMargin - lp.bottomMargin; + break; + case Gravity.BOTTOM: + childTop = ((b - paddingBottom) - t) - height - lp.bottomMargin; + break; + default: + childTop = lp.topMargin; + } + + if (child == blurredView || child == backgroundView) { + childTop = 0; + } else if (child instanceof HintView || child instanceof ChecksHintView) { + childTop = 0; + } else if (child == mentionContainer) { + childTop -= chatActivityEnterView.getMeasuredHeight() - AndroidUtilities.dp(2); + mentionContainer.setTranslationY(chatActivityEnterView.getAnimatedTop()); + } else if (child == pagedownButton || child == mentiondownButton || child == reactionsMentiondownButton) { + if (!inPreviewMode) { + childTop -= chatActivityEnterView.getMeasuredHeight(); + } + } else if (child == emptyViewContainer) { + childTop -= inputFieldHeight / 2 - (actionBar.getVisibility() == VISIBLE ? actionBar.getMeasuredHeight() / 2 : 0); + } else if (chatActivityEnterView.isPopupView(child)) { + if (AndroidUtilities.isInMultiwindow || inBubbleMode) { + childTop = chatActivityEnterView.getTop() - child.getMeasuredHeight() + AndroidUtilities.dp(1); + } else { + childTop = chatActivityEnterView.getBottom(); + } + } else if (child == gifHintTextView || child == voiceHintTextView || child == mediaBanTooltip || child == emojiHintTextView) { + childTop -= inputFieldHeight; + } else if (child == chatListView || child == floatingDateView || child == infoTopView) { + childTop -= blurredViewTopOffset; + if (!inPreviewMode) { + childTop -= (inputFieldHeight - AndroidUtilities.dp(51)); + } + childTop -= paddingBottom; + if (keyboardSize > AndroidUtilities.dp(20) && getLayoutParams().height < 0) { + childTop -= keyboardSize; + } + } else if (child == progressView) { + if (chatActivityEnterView.isTopViewVisible()) { + childTop -= AndroidUtilities.dp(48); + } + } else if (child == actionBar) { + if (inPreviewMode && Build.VERSION.SDK_INT >= 21) { + childTop += AndroidUtilities.statusBarHeight; + } + childTop -= getPaddingTop(); + } else if (child == videoPlayerContainer) { + childTop = actionBar.getMeasuredHeight(); + childTop -= paddingBottom; + if (keyboardSize > AndroidUtilities.dp(20) && getLayoutParams().height < 0) { + childTop -= keyboardSize; + } + } else if (child == instantCameraView || child == overlayView || child == animatingImageView) { + childTop = 0; + } else if (child == textSelectionHelper.getOverlayView(getContext())) { + childTop -= paddingBottom; + if (keyboardSize > AndroidUtilities.dp(20) && getLayoutParams().height < 0) { + childTop -= keyboardSize; + } + childTop -= blurredViewTopOffset; + } else if (chatActivityEnterView != null && child == chatActivityEnterView.botCommandsMenuContainer) { + childTop -= inputFieldHeight; + } else if (child == forwardingPreviewView) { + childTop = AndroidUtilities.statusBarHeight; + } + child.layout(childLeft, childTop, childLeft + width, childTop + height); + } + + invalidateChatListViewTopPadding(); + invalidateMessagesVisiblePart(); + updateTextureViewPosition(false, false); + + notifyHeightChanged(); + } + + private void setNonNoveTranslation(float y) { + contentView.setTranslationY(y); + actionBar.setTranslationY(0); + emptyViewContainer.setTranslationY(0); + progressView.setTranslationY(0); + contentPanTranslation = 0; + contentView.setBackgroundTranslation(0); + if (instantCameraView != null) { + instantCameraView.onPanTranslationUpdate(0); } + if (blurredView != null) { + blurredView.drawable.onPanTranslationUpdate(0); + } + setFragmentPanTranslationOffset(0); + invalidateChatListViewTopPadding(); } - } + + @Override + public void setPadding(int left, int top, int right, int bottom) { + contentPaddingTop = top; + invalidateChatListViewTopPadding(); + invalidateMessagesVisiblePart(); + } + + @Override + public boolean dispatchKeyEvent(KeyEvent event) { + if (event.getKeyCode() == KeyEvent.KEYCODE_BACK && event.getAction() == 1 && forwardingPreviewView != null && forwardingPreviewView.isShowing()) { + forwardingPreviewView.dismiss(true); + return true; + } + return super.dispatchKeyEvent(event); + } + + protected Drawable getNewDrawable() { + Drawable drawable = themeDelegate.getWallpaperDrawable(); + return drawable != null ? drawable : super.getNewDrawable(); + } + }; private void updateSecretStatus() { if (bottomOverlay == null) { return; } boolean hideKeyboard = false; - if (currentChat != null && !ChatObject.canSendMessages(currentChat) && !currentChat.gigagroup && (!ChatObject.isChannel(currentChat) || currentChat.megagroup)) { + if (currentChat != null && !ChatObject.canSendMessages(currentChat) && !ChatObject.canSendAnyMedia(currentChat) && !currentChat.gigagroup && (!ChatObject.isChannel(currentChat) || currentChat.megagroup)) { if (currentChat.default_banned_rights != null && currentChat.default_banned_rights.send_messages) { bottomOverlayText.setText(LocaleController.getString("GlobalSendMessageRestricted", R.string.GlobalSendMessageRestricted)); } else if (AndroidUtilities.isBannedForever(currentChat.banned_rights)) { @@ -14381,6 +14351,7 @@ private void updateSecretStatus() { suggestEmojiPanel.forceClose(); } } else { + createEmptyView(); if (currentEncryptedChat == null || bigEmptyView == null) { bottomOverlay.setVisibility(View.INVISIBLE); if (suggestEmojiPanel != null && chatActivityEnterView != null && chatActivityEnterView.hasText()) { @@ -14798,33 +14769,40 @@ private void addToSelectedMessages(MessageObject messageObject, boolean outside, hideActionMode(); updatePinnedMessageView(true); } else { + createActionMode(); ActionBarMenuItem replyItem = actionBar.createActionMode().getItem(nkactionbarbtn_reply); if (replyItem != null) { - replyItem.setVisibility(chatMode != MODE_PINNED && ChatObject.canSendMessages(currentChat) && - selectedCount == 1 ? View.VISIBLE : View.GONE); + replyItem.setVisibility(!shrinkActionBarItems &&chatMode != MODE_PINNED && + ChatObject.canSendMessages(currentChat) && selectedCount == 1 ? View.VISIBLE : View.GONE); } + ActionBarMenuSubItem saveItem = actionModeOtherItem.getSubItem(save_to); ActionBarMenuItem copyItem = actionBar.createActionMode().getItem(copy); + ActionBarMenuItem starItem = actionBar.createActionMode().getItem(star); ActionBarMenuItem editItem = actionBar.createActionMode().getItem(edit); ActionBarMenuItem forwardItem = actionBar.createActionMode().getItem(forward); + ActionBarMenuItem deleteItem = actionBar.createActionMode().getItem(delete); ActionBarMenuItem selectItem = actionBar.createActionMode().getItem(nkactionbarbtn_selectBetween); ActionBarMenuItem combineMessageItem = actionBar.createActionMode().getItem(combine_message); - ActionBarMenuSubItem starItem = actionModeOtherItem.getSubItem(star); - ActionBarMenuSubItem saveItem = actionModeOtherItem.getSubItem(save_to); - - if (forwardItem != null) { - forwardItem.setVisibility(canForwardMessagesCount > 0 && !(canEditMessagesCount == 1 && selectedCount == 1) ? View.VISIBLE : View.GONE); - actionModeOtherItem.setSubItemVisibility(forward, canForwardMessagesCount == 1 && canEditMessagesCount == 1 && selectedCount == 1); - } + ActionBarMenuSubItem nkbtnSaveMessage = actionModeOtherItem.getSubItem(nkbtn_savemessage); if (chatMode == MODE_SCHEDULED) { if (NaConfig.INSTANCE.getShowNoQuoteForward().Bool()) { actionModeOtherItem.setSubItemVisibility(nkbtn_forward_noquote, false); } + actionModeOtherItem.setSubItemVisibility(forward, false); actionModeOtherItem.setSubItemVisibility(nkbtn_savemessage, false); } + if (NekoConfig.showBottomActionsWhenSelecting.Bool()) + createBottomMessagesActionButtons(); boolean noforwards = getMessagesController().isChatNoForwards(currentChat) || hasSelectedNoforwardsMessage(); + + boolean canForward = cantForwardMessagesCount != 0 && !noforwards; + actionModeOtherItem.setSubItemVisibility(nkbtn_savemessage, canForward); + actionModeOtherItem.setSubItemVisibility(forward, canForward); + actionModeOtherItem.setSubItemVisibility(nkbtn_forward_noquote, canForward); + if (prevCantForwardCount == 0 && cantForwardMessagesCount != 0 || prevCantForwardCount != 0 && cantForwardMessagesCount == 0) { forwardButtonAnimation = new AnimatorSet(); ArrayList animators = new ArrayList<>(); @@ -14861,7 +14839,9 @@ public void onAnimationEnd(Animator animation) { forwardItem.setEnabled(cantForwardMessagesCount == 0 || noforwards); forwardItem.setAlpha(cantForwardMessagesCount == 0 ? 1.0f : 0.5f); if (noforwards) { - if (forwardItem.getBackground() != null) forwardButton.setBackground(null); + if (forwardButton != null && forwardItem.getBackground() != null) { + forwardButton.setBackground(null); + } } else if (forwardItem.getBackground() == null) { forwardItem.setBackground(Theme.createSelectorDrawable(getThemedColor(Theme.key_actionBarActionModeDefaultSelector), 3)); } @@ -14876,17 +14856,25 @@ public void onAnimationEnd(Animator animation) { forwardButton.setAlpha(cantForwardMessagesCount == 0 ? 1.0f : 0.5f); } } - if (saveItem != null) { - actionModeOtherItem.setSubItemVisibility(save_to, ((canSaveMusicCount > 0 && canSaveDocumentsCount == 0) || (canSaveMusicCount == 0 && canSaveDocumentsCount > 0)) && cantSaveMessagesCount == 0); + saveItem.setVisibility(((canSaveMusicCount > 0 && canSaveDocumentsCount == 0) || (canSaveMusicCount == 0 && canSaveDocumentsCount > 0)) && cantSaveMessagesCount == 0 ? View.VISIBLE : View.GONE); saveItem.setText(canSaveMusicCount > 0 ? LocaleController.getString("SaveToMusic", R.string.SaveToMusic) : LocaleController.getString("SaveToDownloads", R.string.SaveToDownloads)); } - int copyVisible = copyItem.getVisibility(); + int copyVisible = View.GONE, starVisible = View.GONE, newCopyVisible = View.GONE, newStarVisible = View.GONE; boolean noforwardsOverride = noforwards && !NekoXConfig.disableFlagSecure && !NaConfig.INSTANCE.getForceCopy().Bool(); - copyItem.setVisibility(!noforwardsOverride && selectedMessagesCanCopyIds[0].size() + selectedMessagesCanCopyIds[1].size() != 0 ? View.VISIBLE : View.GONE); - combineMessageItem.setVisibility(selectedMessagesCanCopyIds[0].size() + selectedMessagesCanCopyIds[1].size() != 0 ? NaConfig.INSTANCE.getCombineMessage().Int() != 2 ? View.VISIBLE : View.GONE : View.GONE); - actionModeOtherItem.setSubItemVisibility(star, getMediaDataController().canAddStickerToFavorites() && (selectedMessagesCanStarIds[0].size() + selectedMessagesCanStarIds[1].size()) == selectedCount); + if (copyItem != null) { + copyVisible = copyItem.getVisibility(); + copyItem.setVisibility(!noforwardsOverride && selectedMessagesCanCopyIds[0].size() + selectedMessagesCanCopyIds[1].size() != 0 ? View.VISIBLE : View.GONE); + newCopyVisible = copyItem.getVisibility(); + } + + if (starItem != null) { + starVisible = starItem.getVisibility(); + starItem.setVisibility(getMediaDataController().canAddStickerToFavorites() && (selectedMessagesCanStarIds[0].size() + selectedMessagesCanStarIds[1].size()) == selectedCount ? View.VISIBLE : View.GONE); + newStarVisible = starItem.getVisibility(); + } + if (selectItem != null) { ArrayList ids = new ArrayList<>(); for (int a = 1; a >= 0; a--) { @@ -14914,18 +14902,19 @@ public void onAnimationEnd(Animator animation) { } selectItem.setVisibility(selectable ? View.VISIBLE : View.GONE); } - int newCopyVisible = copyItem.getVisibility(); - actionBar.createActionMode().getItem(delete).setVisibility(cantDeleteMessagesCount == 0 ? View.VISIBLE : View.GONE); + + if (deleteItem != null) { + deleteItem.setVisibility(cantDeleteMessagesCount == 0 ? View.VISIBLE : View.GONE); + } hasUnfavedSelected = false; boolean starChanged = false; if (starItem != null) { - int starVisible = starItem.getVisibility(); - actionModeOtherItem.setSubItemVisibility(star, selectedMessagesCanStarIds[0].size() + selectedMessagesCanStarIds[1].size() != 0); + starItem.setVisibility(selectedMessagesCanStarIds[0].size() + selectedMessagesCanStarIds[1].size() != 0 ? View.VISIBLE : View.GONE); starChanged = starVisible != starItem.getVisibility(); for (int a = 0; a < 2; a++) { for (int b = 0; b < selectedMessagesCanStarIds[a].size(); b++) { MessageObject msg = selectedMessagesCanStarIds[a].valueAt(b); - if (!getMediaDataController().isStickerInFavorites(msg.getDocument())) { + if (msg != null && !getMediaDataController().isStickerInFavorites(msg.getDocument())) { hasUnfavedSelected = true; break; } @@ -14935,7 +14924,11 @@ public void onAnimationEnd(Animator animation) { } } } + if (starItem != null) { + starItem.setIcon(hasUnfavedSelected ? R.drawable.msg_fave : R.drawable.msg_unfave); + } final int newEditVisibility = canEditMessagesCount == 1 && selectedCount == 1 ? View.VISIBLE : View.GONE; + createBottomMessagesActionButtons(); if (replyButton != null) { boolean allowChatActions = true; if (bottomOverlayChat != null && bottomOverlayChat.getVisibility() == View.VISIBLE && !bottomOverlayChatWaitsReply || @@ -14998,7 +14991,7 @@ public void onAnimationEnd(Animator animation) { replyButtonAnimation.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { - if (replyButtonAnimation != null && replyButtonAnimation.equals(animation)) { + if (replyButtonAnimation != null && replyButtonAnimation.equals(animation) && replyButton != null) { if (newVisibilityFinal == View.GONE) { replyButton.setVisibility(View.GONE); } @@ -15030,44 +15023,48 @@ public void onAnimationCancel(Animator animation) { if (editButtonAnimation != null) { editButtonAnimation.cancel(); } - editButtonAnimation = new AnimatorSet(); - editItem.setPivotX(AndroidUtilities.dp(54)); - editItem.setPivotX(AndroidUtilities.dp(54)); - if (newEditVisibility == View.VISIBLE) { + if (shrinkActionBarItems) { editItem.setVisibility(newEditVisibility); - editButtonAnimation.playTogether( - ObjectAnimator.ofFloat(editItem, View.ALPHA, 1.0f), - ObjectAnimator.ofFloat(editItem, View.SCALE_X, 1.0f) - ); } else { - editButtonAnimation.playTogether( - ObjectAnimator.ofFloat(editItem, View.ALPHA, 0.0f), - ObjectAnimator.ofFloat(editItem, View.SCALE_X, 0.0f) - ); - } - editButtonAnimation.setDuration(100); - editButtonAnimation.addListener(new AnimatorListenerAdapter() { - @Override - public void onAnimationEnd(Animator animation) { - if (editButtonAnimation != null && editButtonAnimation.equals(animation)) { - if (newEditVisibility == View.GONE) { - editItem.setVisibility(View.GONE); + editButtonAnimation = new AnimatorSet(); + editItem.setPivotX(AndroidUtilities.dp(54)); + editItem.setPivotX(AndroidUtilities.dp(54)); + if (newEditVisibility == View.VISIBLE) { + editItem.setVisibility(newEditVisibility); + editButtonAnimation.playTogether( + ObjectAnimator.ofFloat(editItem, View.ALPHA, 1.0f), + ObjectAnimator.ofFloat(editItem, View.SCALE_X, 1.0f) + ); + } else { + editButtonAnimation.playTogether( + ObjectAnimator.ofFloat(editItem, View.ALPHA, 0.0f), + ObjectAnimator.ofFloat(editItem, View.SCALE_X, 0.0f) + ); + } + editButtonAnimation.setDuration(100); + editButtonAnimation.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + if (editButtonAnimation != null && editButtonAnimation.equals(animation)) { + if (newEditVisibility == View.GONE) { + editItem.setVisibility(View.GONE); + } } } - } - @Override - public void onAnimationCancel(Animator animation) { - if (editButtonAnimation != null && editButtonAnimation.equals(animation)) { - editButtonAnimation = null; + @Override + public void onAnimationCancel(Animator animation) { + if (editButtonAnimation != null && editButtonAnimation.equals(animation)) { + editButtonAnimation = null; + } } - } - }); - editButtonAnimation.start(); + }); + editButtonAnimation.start(); + } } } - actionModeOtherItem.setSubItemVisibility(nkbtn_translate, selectedMessagesCanCopyIds[0].size() + selectedMessagesCanCopyIds[1].size() > 0); + translateItem.setVisibility(selectedMessagesCanCopyIds[0].size() + selectedMessagesCanCopyIds[1].size() > 0); actionModeOtherItem.setSubItemVisibility(nkbtn_sharemessage, selectedMessagesCanCopyIds[0].size() + selectedMessagesCanCopyIds[1].size() > 0); boolean allowPin = false; @@ -15089,6 +15086,20 @@ public void onAnimationCancel(Animator animation) { actionModeOtherItem.setSubItemVisibility(nkbtn_unpin, allowPin); } } + updateSelectedMessageReactions(); + } + + private void updateSelectedMessageReactions() { + List selected = new ArrayList<>(); + SparseArray objs = selectedMessagesIds[0]; + for (int i = 0; i < objs.size(); i++) { + selected.add(objs.valueAt(i)); + } + objs = selectedMessagesIds[1]; + for (int i = 0; i < objs.size(); i++) { + selected.add(objs.valueAt(i)); + } + selectionReactionsOverlay.setSelectedMessages(selected); } private void processRowSelect(View view, boolean outside, float touchX, float touchY) { @@ -15116,7 +15127,7 @@ private void updateActionModeTitle() { if (!actionBar.isActionModeShowed()) { return; } - if (selectedMessagesIds[0].size() != 0 || selectedMessagesIds[1].size() != 0) { + if (selectedMessagesCountTextView != null && (selectedMessagesIds[0].size() != 0 || selectedMessagesIds[1].size() != 0)) { selectedMessagesCountTextView.setNumber(selectedMessagesIds[0].size() + selectedMessagesIds[1].size(), true); } } else { @@ -15247,14 +15258,17 @@ private void updateTitleIcons(boolean forceToggleMuted) { avatarContainer.setTitleIcons(currentEncryptedChat != null ? getThemedDrawable(Theme.key_drawable_lockIconDrawable) : null, rightIcon); if (!forceToggleMuted && muteItem != null) { if (isMuted) { - muteItem.getRightIcon().setVisibility(View.GONE); - muteItem.setTextAndIcon(LocaleController.getString("Unmute", R.string.Unmute), R.drawable.msg_mute); + muteItem.setRightIconVisibility(View.GONE); + muteItem.setText(LocaleController.getString("Unmute", R.string.Unmute)); + muteItem.setIcon(R.drawable.msg_mute); } else { - muteItem.getRightIcon().setVisibility(View.VISIBLE); + muteItem.setRightIconVisibility(View.VISIBLE); if (getMessagesController().isDialogNotificationsSoundEnabled(dialog_id, getTopicId())) { - muteItem.setTextAndIcon(LocaleController.getString("Mute", R.string.Mute), R.drawable.msg_unmute); + muteItem.setText(LocaleController.getString("Mute", R.string.Mute)); + muteItem.setIcon(R.drawable.msg_unmute); } else { - muteItem.setTextAndIcon(LocaleController.getString("Mute", R.string.Mute), R.drawable.msg_silent); + muteItem.setText(LocaleController.getString("Mute", R.string.Mute)); + muteItem.setIcon(R.drawable.msg_silent); } } } @@ -15442,7 +15456,9 @@ private void sendPhotosGroup(ArrayList entries, bool fillEditingMediaWithCaption(photos.get(0).caption, photos.get(0).entities); SendMessagesHelper.prepareSendingMedia(getAccountInstance(), photos, dialog_id, replyingMessageObject, getThreadMessage(), null, forceDocument, true, null, notify, scheduleDate, photos.get(0).updateStickersOrder); afterMessageSend(); - chatActivityEnterView.setFieldText(""); + if (chatActivityEnterView != null) { + chatActivityEnterView.setFieldText(""); + } } if (scheduleDate != 0) { if (scheduledMessagesCount == -1) { @@ -15580,7 +15596,7 @@ private PhotoViewer.PlaceProviderObject getPlaceForPhoto(MessageObject messageOb object.viewX = coords[0]; object.viewY = coords[1] - (Build.VERSION.SDK_INT >= 21 ? 0 : AndroidUtilities.statusBarHeight); object.parentView = chatListView; - object.animatingImageView = !SharedConfig.smoothKeyboard && pagedownButton != null && pagedownButton.getTag() != null && view instanceof ChatMessageCell ? animatingImageView : null; + object.animatingImageView = null; object.imageReceiver = imageReceiver; if (needPreview) { object.thumb = imageReceiver.getBitmapSafe(); @@ -15769,7 +15785,7 @@ public void restoreSelfArgs(Bundle args) { } private boolean isSkeletonVisible() { - if (justCreatedTopic || justCreatedChat || currentUser != null || !SharedConfig.animationsEnabled() || SharedConfig.getLiteMode().enabled()) { + if (justCreatedTopic || justCreatedChat || currentUser != null || chatListView == null || !SharedConfig.animationsEnabled() || !LiteMode.isEnabled(LiteMode.FLAGS_CHAT)) { return false; } int childHeight = 0; @@ -15798,6 +15814,9 @@ private boolean isSkeletonVisible() { if (wallpaper instanceof MotionBackgroundDrawable) { MotionBackgroundDrawable motion = (MotionBackgroundDrawable) wallpaper; if (((MotionBackgroundDrawable) wallpaper).isIndeterminateAnimation() != rotate) { + if (!rotate) { + motion.generateNextGradient(); + } motion.setIndeterminateAnimation(rotate); motion.setIndeterminateSpeedScale(rotate ? 1.5f : 1f); motion.updateAnimation(true); @@ -15890,6 +15909,7 @@ public void didReceivedNotification(int id, int account, final Object... args) { showProgressView(false); checkDispatchHideSkeletons(true); if (chatListView != null) { + createEmptyView(); if (!fragmentOpened) { chatListView.setAnimateEmptyView(false, 1); chatListView.setEmptyView(emptyViewContainer); @@ -15962,6 +15982,15 @@ public void didReceivedNotification(int id, int account, final Object... args) { int loaded_max_id = (Integer) args[12]; int loaded_mentions_count = chatWasReset ? 0 : (Integer) args[13]; + if (did == dialog_id && count > 0 && currentUser != null && (bottomOverlayStartButton != null && bottomOverlayStartButton.getVisibility() == View.VISIBLE)) { + if (!TextUtils.isEmpty(botUser)) { + getMessagesController().sendBotStart(currentUser, botUser); + + bottomOverlayChat.setVisibility(View.GONE); + chatActivityEnterView.setVisibility(View.VISIBLE); + } + } + if (loaded_mentions_count < 0) { loaded_mentions_count *= -1; hasAllMentionsLocal = false; @@ -16139,12 +16168,6 @@ public void didReceivedNotification(int id, int account, final Object... args) { } } firstLoading = false; - AndroidUtilities.runOnUIThread(() -> { - getNotificationCenter().runDelayedNotifications(); - resumeDelayedFragmentAnimation(); - AndroidUtilities.cancelRunOnUIThread(fragmentTransitionRunnable); - fragmentTransitionRunnable.run(); - }); } if (isThreadChat() && !isTopic && (load_type == 2 || load_type == 3) && !isCache) { @@ -16403,7 +16426,7 @@ public void didReceivedNotification(int id, int account, final Object... args) { calendar.set(Calendar.MINUTE, 0); dateMsg.date = (int) (calendar.getTimeInMillis() / 1000); MessageObject dateObj = new MessageObject(currentAccount, dateMsg, false, false); - dateObj.type = 10; + dateObj.type = MessageObject.TYPE_DATE; dateObj.contentType = 1; dateObj.isDateObject = true; dateObj.stableId = lastStableId++; @@ -16472,6 +16495,7 @@ public void didReceivedNotification(int id, int account, final Object... args) { } else { obj.stableId = lastStableId++; } + getMessagesController().getTranslateController().checkTranslation(obj, false); if (load_type == 1) { messages.add(0, obj); } else { @@ -16506,7 +16530,7 @@ public void didReceivedNotification(int id, int account, final Object... args) { dateMsg.message = ""; dateMsg.id = 0; MessageObject dateObj = new MessageObject(currentAccount, dateMsg, false, false); - dateObj.type = 6; + dateObj.type = MessageObject.TYPE_LOADING; dateObj.contentType = 2; dateObj.stableId = lastStableId++; messages.add(messages.size() - 1, dateObj); @@ -16543,7 +16567,7 @@ public void didReceivedNotification(int id, int account, final Object... args) { dateMsg.message = ""; dateMsg.id = 0; MessageObject dateObj = new MessageObject(currentAccount, dateMsg, false, false); - dateObj.type = 6; + dateObj.type = MessageObject.TYPE_LOADING; dateObj.contentType = 2; dateObj.stableId = lastStableId++; if (load_type == 1) { @@ -16756,13 +16780,16 @@ public void didReceivedNotification(int id, int account, final Object... args) { if (first) { if (chatListView != null) { - if (!fragmentBeginToShow) { - chatListView.setAnimateEmptyView(false, 0); - chatListView.setEmptyView(emptyViewContainer); - chatListView.setAnimateEmptyView(true, RecyclerListView.EMPTY_VIEW_ANIMATION_TYPE_ALPHA_SCALE); - } else { - chatListView.setEmptyView(emptyViewContainer); - } + AndroidUtilities.runOnUIThread(() -> { + createEmptyView(); + if (!fragmentBeginToShow) { + chatListView.setAnimateEmptyView(false, 0); + chatListView.setEmptyView(emptyViewContainer); + chatListView.setAnimateEmptyView(true, RecyclerListView.EMPTY_VIEW_ANIMATION_TYPE_ALPHA_SCALE); + } else { + chatListView.setEmptyView(emptyViewContainer); + } + }); } } } else { @@ -16834,6 +16861,7 @@ public void didReceivedNotification(int id, int account, final Object... args) { } else { showProgressView(false); } + if (newRowsCount == 0 && mergeDialogId != 0 && loadIndex == 0) { getNotificationCenter().updateAllowedNotifications(transitionAnimationIndex, new int[]{NotificationCenter.chatInfoDidLoad, NotificationCenter.groupCallUpdated, NotificationCenter.dialogsNeedReload, NotificationCenter.scheduledMessagesUpdated, NotificationCenter.closeChats, NotificationCenter.messagesDidLoad, NotificationCenter.botKeyboardDidLoad, NotificationCenter.userInfoDidLoad, NotificationCenter.pinnedInfoDidLoad, NotificationCenter.needDeleteDialog/*, NotificationCenter.botInfoDidLoad*/}); @@ -16901,6 +16929,16 @@ public void didReceivedNotification(int id, int account, final Object... args) { } } chatWasReset = false; + + if (isFirstLoading) { + AndroidUtilities.runOnUIThread(() -> { + resumeDelayedFragmentAnimation(); + + AndroidUtilities.cancelRunOnUIThread(fragmentTransitionRunnable); + fragmentTransitionRunnable.run(); + getNotificationCenter().runDelayedNotifications(); + }); + } } else if (id == NotificationCenter.invalidateMotionBackground) { if (chatListView != null) { chatListView.invalidateViews(); @@ -16934,9 +16972,11 @@ public void didReceivedNotification(int id, int account, final Object... args) { } if (chatActivityEnterView != null) { EditTextBoldCursor editText = chatActivityEnterView.getEditField(); - int color = editText.getCurrentTextColor(); - editText.setTextColor(0xffffffff); - editText.setTextColor(color); + if (editText != null) { + int color = editText.getCurrentTextColor(); + editText.setTextColor(0xffffffff); + editText.setTextColor(color); + } } if (pinnedMessageButton[0] != null) { pinnedMessageButton[0].invalidate(); @@ -16946,9 +16986,6 @@ public void didReceivedNotification(int id, int account, final Object... args) { } } else if (id == NotificationCenter.didUpdateConnectionState) { int state = ConnectionsManager.getInstance(account).getConnectionState(); - if (state == ConnectionsManager.ConnectionStateConnected) { - checkAutoDownloadMessages(false); - } } else if (id == NotificationCenter.chatOnlineCountDidLoad) { Long chatId = (Long) args[0]; if (chatInfo == null || currentChat == null || currentChat.id != chatId) { @@ -17007,18 +17044,8 @@ public void didReceivedNotification(int id, int account, final Object... args) { if (chatActivityEnterView != null) { chatActivityEnterView.setDialogId(dialog_id, currentAccount); } - - if (currentEncryptedChat != null && SharedConfig.passcodeHash.length() == 0 && !SharedConfig.allowScreenCapture && unregisterFlagSecurePasscode == null) { - unregisterFlagSecurePasscode = AndroidUtilities.registerFlagSecure(getParentActivity().getWindow()); - } - if (fwdChanged) { - boolean value = getMessagesController().isChatNoForwards(currentChat); - if (!value && unregisterFlagSecureNoforwards != null) { - unregisterFlagSecureNoforwards.run(); - unregisterFlagSecureNoforwards = null; - } else if (value && unregisterFlagSecureNoforwards == null) { - unregisterFlagSecureNoforwards = AndroidUtilities.registerFlagSecure(getParentActivity().getWindow()); - } + if (flagSecure != null) { + flagSecure.invalidate(); } } if (avatarContainer != null && updateSubtitle) { @@ -17080,7 +17107,7 @@ public void didReceivedNotification(int id, int account, final Object... args) { if (AndroidUtilities.isTablet() && parentLayout != null && parentLayout.getFragmentStack().size() > 1) { finishFragment(); } else { - removeSelfFromStack(); + removeSelfFromStack(true); } } } else if (id == NotificationCenter.commentsRead) { @@ -17388,6 +17415,7 @@ public void didReceivedNotification(int id, int account, final Object... args) { obj.messageOwner = newMsgObj; if (fwdHeader != null && newMsgObj.fwd_from != null && !TextUtils.isEmpty(newMsgObj.fwd_from.from_name)) { obj.messageOwner.fwd_from = fwdHeader; + obj.isOutOwnerCached = null; } obj.generateThumbs(true); obj.setType(); @@ -17611,6 +17639,7 @@ public void didReceivedNotification(int id, int account, final Object... args) { builder.setTopAnimationIsNew(true); if (reason == 0) { if (currentChat instanceof TLRPC.TL_channelForbidden) { + builder.setTitle(LocaleController.getString("ChannelCantOpenBannedByAdminTitle", R.string.ChannelCantOpenBannedByAdminTitle)); builder.setMessage(LocaleController.getString("ChannelCantOpenBannedByAdmin", R.string.ChannelCantOpenBannedByAdmin)); } else { builder.setTitle(LocaleController.getString(R.string.ChannelPrivate)); @@ -17735,6 +17764,7 @@ public void didReceivedNotification(int id, int account, final Object... args) { } } else if (id == NotificationCenter.messagePlayingDidStart) { MessageObject messageObject = (MessageObject) args[0]; + MessageObject oldPlayingObject = (MessageObject) args[1]; if (messageObject.eventId != 0) { return; } @@ -17743,7 +17773,19 @@ public void didReceivedNotification(int id, int account, final Object... args) { if ((messageObject.isRoundVideo() || messageObject.isVideo()) && fragmentView != null && fragmentView.getParent() != null) { if (!messageObject.isVoiceTranscriptionOpen()) { MediaController.getInstance().setTextureView(createTextureView(true), aspectRatioFrameLayout, videoPlayerContainer, true); - updateTextureViewPosition(true, true); + boolean needScroll = false; + if (oldPlayingObject != null) { + int index1 = messages.indexOf(oldPlayingObject); + int index2 = messages.indexOf(messageObject); + if (index1 >= 0 && index2 >= 0 && Math.abs(index1 - index2) < 5) { + needScroll = true; + } + } + updateTextureViewPosition(true, needScroll); + if (!needScroll) { + checkTextureViewPosition = true; + updateMessagesVisiblePart(false); + } } else { MediaController.getInstance().setTextureView(createTextureView(true), aspectRatioFrameLayout, videoPlayerContainer, true); } @@ -17994,6 +18036,9 @@ public void didReceivedNotification(int id, int account, final Object... args) { } }); } else if (id == NotificationCenter.didUpdateReactions) { + if (isInScheduleMode()) { + return; + } long did = (Long) args[0]; doOnIdle(() -> { int msgId = (Integer) args[1]; @@ -18406,7 +18451,8 @@ public void didReceivedNotification(int id, int account, final Object... args) { updateBotButtons(); } } else if (id == NotificationCenter.botKeyboardDidLoad) { - if (dialog_id == (Long) args[1]) { + MessagesStorage.TopicKey topicKey = (MessagesStorage.TopicKey) args[1]; + if (dialog_id == topicKey.dialogId && getTopicId() == topicKey.topicId) { TLRPC.Message message = (TLRPC.Message) args[0]; if (message != null && !userBlocked) { botButtons = new MessageObject(currentAccount, message, false, false); @@ -18650,6 +18696,7 @@ public void didReceivedNotification(int id, int account, final Object... args) { checkThemeEmoticon(); if (chatActivityEnterView != null) { chatActivityEnterView.checkChannelRights(); + chatActivityEnterView.updateGiftButton(true); } if (headerItem != null) { // showAudioCallAsIcon = userInfo.phone_calls_available && !inPreviewMode; @@ -18660,9 +18707,10 @@ public void didReceivedNotification(int id, int account, final Object... args) { if (userInfo.phone_calls_available) { if (showAudioCallAsIcon) { if (audioCallIconItem != null) { - if (openAnimationStartTime != 0 && audioCallIconItem.getVisibility() != View.VISIBLE) { - audioCallIconItem.setAlpha(0f); - audioCallIconItem.animate().alpha(1f).setDuration(160).setInterpolator(CubicBezierInterpolator.EASE_IN).setStartDelay(50).start(); + View item = audioCallIconItem.createView(); + if (openAnimationStartTime != 0 && item.getVisibility() != View.VISIBLE) { + item.setAlpha(0f); + item.animate().alpha(1f).setDuration(160).setInterpolator(CubicBezierInterpolator.EASE_IN).setStartDelay(50).start(); } audioCallIconItem.setVisibility(View.VISIBLE); } @@ -18718,6 +18766,10 @@ public void didReceivedNotification(int id, int account, final Object... args) { } preferences.edit().putBoolean("themehint", true).apply(); boolean deleteTheme = (Boolean) args[2]; + createUndoView(); + if (undoView == null) { + return; + } undoView.showWithAction(0, UndoView.ACTION_THEME_CHANGED, null, () -> { if (themeAccent != null) { Theme.ThemeAccent prevAccent = themeInfo.getAccent(false); @@ -18819,7 +18871,7 @@ public void didReceivedNotification(int id, int account, final Object... args) { } else if (id == NotificationCenter.dialogDeleted) { long did = (Long) args[0]; if (did == dialog_id) { - if (parentLayout != null && parentLayout.getFragmentStack().get(parentLayout.getFragmentStack().size() - 1) == this) { + if (parentLayout != null && parentLayout.getLastFragment() == this) { finishFragment(); } else { removeSelfFromStack(); @@ -18885,8 +18937,289 @@ public void didReceivedNotification(int id, int account, final Object... args) { actionBar.unreadBadgeSetCount(getMessagesStorage().getMainUnreadCount()); } } + } else if (id == NotificationCenter.dialogTranslate) { + final long dialogId = (long) args[0]; + if (getDialogId() != dialogId) { + return; + } + + updateTopPanel(true); + if (chatListView != null && chatAdapter != null) { + boolean updatedPinned = false; + ArrayList groupChecked = new ArrayList<>(); + for (int i = 0; i < chatListView.getChildCount(); ++i) { + View child = chatListView.getChildAt(i); + if (child instanceof ChatMessageCell) { + ChatMessageCell cell = (ChatMessageCell) child; + MessageObject messageObject = cell.getMessageObject(); + boolean update = false; + if (messageObject != null && messageObject.updateTranslation(false)) { + update = true; + + MessageObject pinnedMessageObject = pinnedMessageObjects.get(messageObject.getId()); + if (pinnedMessageObject != null) { + pinnedMessageObject.messageOwner.translatedText = messageObject.messageOwner.translatedText; + pinnedMessageObject.messageOwner.translatedToLanguage = messageObject.messageOwner.translatedToLanguage; + if (pinnedMessageObject.updateTranslation(currentPinnedMessageId == messageObject.getId())) { + updatedPinned = true; + } + } + } + MessageObject.GroupedMessages group = groupedMessagesMap.get(messageObject.getGroupId()); + if (group != null && !groupChecked.contains(group.groupId)) { + for (int j = 0; j < group.messages.size(); ++j) { + MessageObject groupMessageObject = group.messages.get(j); + if (groupMessageObject != null && groupMessageObject.updateTranslation(false)) { + update = true; + } + } + groupChecked.add(group.groupId); + } + if (messageObject != null && messageObject.replyMessageObject != null) { + MessageObject translatedReplyMessageObject = getMessagesController().getTranslateController().findReplyMessageObject(dialogId, messageObject.replyMessageObject.getId()); + if (translatedReplyMessageObject != null) { + messageObject.replyMessageObject.messageOwner.translatedText = translatedReplyMessageObject.messageOwner.translatedText; + messageObject.replyMessageObject.messageOwner.translatedToLanguage = translatedReplyMessageObject.messageOwner.translatedToLanguage; + if (messageObject.replyMessageObject.updateTranslation(true)) { + update = true; + } + } + } + + if (update) { + messageObject.forceUpdate = true; + cell.setMessageObject(messageObject, cell.getCurrentMessagesGroup(), cell.isPinnedBottom(), cell.isPinnedTop()); + if (group != null) { + if (chatListItemAnimator != null) { + chatListItemAnimator.groupWillChanged(group); + } + for (int j = 0; j < group.messages.size(); j++) { + group.messages.get(j).forceUpdate = true; + } + chatAdapter.notifyDataSetChanged(true); + } else { + chatAdapter.updateRowAtPosition(chatListView.getChildAdapterPosition(child)); + } + } else { + cell.invalidate(); + } + } + } + if (!updatedPinned) { + for (MessageObject pinnedMessageObject : pinnedMessageObjects.values()) { + if (pinnedMessageObject != null && pinnedMessageObject.updateTranslation(currentPinnedMessageId == pinnedMessageObject.getId())) { + updatedPinned = true; + } + } + } + if (updatedPinned) { + updatePinnedMessageView(true, 1); + } + } + checkTranslation(true); + updateTranslateItemVisibility(); + } else if (id == NotificationCenter.messageTranslated) { + MessageObject messageObject = (MessageObject) args[0]; + if (getDialogId() != messageObject.getDialogId()) { + return; + } + updateMessageTranslation(messageObject); + if (args.length > 1 && (boolean) args[1]) { + checkTranslation(true); + } + } else if (id == NotificationCenter.messageTranslating) { + MessageObject messageObject = (MessageObject) args[0]; + if (getDialogId() != messageObject.getDialogId()) { + return; + } + if (chatListView == null || chatAdapter == null) { + return; + } + for (int i = 0; i < chatListView.getChildCount(); ++i) { + View child = chatListView.getChildAt(i); + if (child instanceof ChatMessageCell) { + child.invalidate(); + } + } + } else if (id == NotificationCenter.dialogIsTranslatable) { + final long dialogId = (long) args[0]; + if (getDialogId() != dialogId) { + return; + } + + updateTopPanel(true); + updateTranslateItemVisibility(); + } + } + + private boolean updateMessageTranslation(MessageObject messageObject) { + if (messageObject == null || messageObject.messageOwner == null) { + return false; + } + boolean updated = false; + for (MessageObject pinnedMessageObject : pinnedMessageObjects.values()) { + if (pinnedMessageObject != null && pinnedMessageObject.getId() == messageObject.getId()) { + pinnedMessageObject.messageOwner.translatedText = messageObject.messageOwner.translatedText; + pinnedMessageObject.messageOwner.translatedToLanguage = messageObject.messageOwner.translatedToLanguage; + if (pinnedMessageObject.updateTranslation(true)) { + updatePinnedMessageView(true, 1); + updated = true; + } + } + } + if (chatListView == null) { + return updated; + } + ArrayList groupChecked = new ArrayList<>(); + for (int i = 0; i < chatListView.getChildCount(); ++i) { + View child = chatListView.getChildAt(i); + if (child instanceof ChatMessageCell) { + ChatMessageCell cell = (ChatMessageCell) child; + MessageObject cellMessageObject = cell.getMessageObject(); + if (cellMessageObject == null) { + continue; + } + boolean update = false; + if (cellMessageObject.getId() == messageObject.getId()) { + cellMessageObject.messageOwner.translatedText = messageObject.messageOwner.translatedText; + cellMessageObject.messageOwner.translatedToLanguage = messageObject.messageOwner.translatedToLanguage; + if (cellMessageObject.updateTranslation(false)) { + update = true; + ArrayList dependentMessages = replyMessageOwners.get(cellMessageObject.getId()); + if (dependentMessages != null) { + updateMessagesReplyTranslation(dependentMessages, messageObject); + } + } + } + MessageObject.GroupedMessages group = groupedMessagesMap.get(messageObject.getGroupId()); + if (group != null && !groupChecked.contains(group.groupId)) { + for (int j = 0; j < group.messages.size(); ++j) { + MessageObject groupMessageObject = group.messages.get(j); + if (groupMessageObject != null && groupMessageObject.updateTranslation(false)) { + update = true; + } + } + groupChecked.add(group.groupId); + } + if (cellMessageObject.replyMessageObject != null && cellMessageObject.replyMessageObject.getId() == messageObject.getId() && cellMessageObject.replyMessageObject.getDialogId() == messageObject.getDialogId()) { + cellMessageObject.replyMessageObject.messageOwner.translatedText = messageObject.messageOwner.translatedText; + cellMessageObject.replyMessageObject.messageOwner.translatedToLanguage = messageObject.messageOwner.translatedToLanguage; + if (cellMessageObject.replyMessageObject.updateTranslation(false)) { + update = true; + } + } + if (update) { + cellMessageObject.forceUpdate = true; + cell.setMessageObject(cellMessageObject, cell.getCurrentMessagesGroup(), cell.isPinnedBottom(), cell.isPinnedTop()); + if (group != null) { + if (chatListItemAnimator != null) { + chatListItemAnimator.groupWillChanged(group); + } + for (int j = 0; j < group.messages.size(); j++) { + group.messages.get(j).forceUpdate = true; + } + chatAdapter.notifyDataSetChanged(true); + } else { + chatAdapter.updateRowAtPosition(chatListView.getChildAdapterPosition(child)); + } + updated = true; + } + } + } + return updated; + } + + + private boolean updateMessagesReplyTranslation(ArrayList messageIds, MessageObject translatedReplyMessageObject) { + boolean updated = false; + for (int i = 0; i < chatListView.getChildCount(); ++i) { + View child = chatListView.getChildAt(i); + if (child instanceof ChatMessageCell) { + ChatMessageCell cell = (ChatMessageCell) child; + MessageObject cellMessageObject = cell.getMessageObject(); + boolean update = false; + if (cellMessageObject != null && messageIds.contains(cellMessageObject.getId()) && cellMessageObject.replyMessageObject != null && cellMessageObject.replyMessageObject.getId() == translatedReplyMessageObject.getId()) { + cellMessageObject.replyMessageObject.messageOwner.translatedText = translatedReplyMessageObject.messageOwner.translatedText; + cellMessageObject.replyMessageObject.messageOwner.translatedToLanguage = translatedReplyMessageObject.messageOwner.translatedToLanguage; + if (cellMessageObject.replyMessageObject.updateTranslation(false)) { + update = true; + } + } + if (update) { + cellMessageObject.forceUpdate = true; + cell.setMessageObject(cellMessageObject, cell.getCurrentMessagesGroup(), cell.isPinnedBottom(), cell.isPinnedTop()); + chatAdapter.updateRowAtPosition(chatListView.getChildAdapterPosition(child)); + updated = true; + } + } + } + return updated; + } + + private long lastTranslationCheck; + private void checkTranslation(boolean force) { + if (System.currentTimeMillis() - lastTranslationCheck > 1000) { + force = true; } + AndroidUtilities.cancelRunOnUIThread(checkTranslationRunnable); + AndroidUtilities.runOnUIThread(checkTranslationRunnable, force ? 0 : 150); } + private Runnable checkTranslationRunnable = () -> { + lastTranslationCheck = System.currentTimeMillis(); + if (chatListView != null && chatAdapter != null) { + int minId = Integer.MAX_VALUE, maxId = Integer.MIN_VALUE; + for (int i = 0; i < chatListView.getChildCount(); ++i) { + View child = chatListView.getChildAt(i); + if (child instanceof ChatMessageCell) { + ChatMessageCell cell = (ChatMessageCell) child; + if (cell.getCurrentMessagesGroup() != null) { + for (int j = 0; j < cell.getCurrentMessagesGroup().messages.size(); ++j) { + final int mid = cell.getCurrentMessagesGroup().messages.get(j).getId(); + minId = Math.min(minId, mid); + maxId = Math.max(maxId, mid); + } + } else if (cell.getMessageObject() != null) { + final int mid = cell.getMessageObject().getId(); + minId = Math.min(minId, mid); + maxId = Math.max(maxId, mid); + } + } + } + + if (minId <= maxId) { + ArrayList groupsChecked = new ArrayList<>(); + for (int i = 0; i < messages.size(); ++i) { + MessageObject messageObject = messages.get(i); + MessageObject.GroupedMessages group = groupedMessagesMap.get(messageObject.getGroupId()); + if (group != null) { + if (!groupsChecked.contains(group.groupId)) { + for (int j = 0; j < group.messages.size(); ++j) { + MessageObject messageObject1 = group.messages.get(j); + if (messageObject1 != null) { + final int mid = messageObject1.getId(); + getMessagesController().getTranslateController().checkTranslation( + messageObject1, mid >= minId - 7 && mid <= maxId + 7 + ); + } + } + groupsChecked.add(group.groupId); + } + } else { + final int mid = messageObject.getId(); + getMessagesController().getTranslateController().checkTranslation( + messageObject, mid >= minId - 7 && mid <= maxId + 7 + ); + } + } + } + } + + if (currentPinnedMessageId > 0 && pinnedMessageObjects != null) { + getMessagesController().getTranslateController().checkTranslation(pinnedMessageObjects.get(currentPinnedMessageId), true); + } + + updateTranslateItemVisibility(); + }; private void checkSecretMessageForLocation(MessageObject messageObject) { if (messageObject.type != MessageObject.TYPE_GEO || locationAlertShown || SharedConfig.isSecretMapPreviewSet()) { @@ -19120,6 +19453,7 @@ private void clearHistory(boolean overwrite, TLRPC.TL_updates_channelDifferenceT if (progressView != null) { showProgressView(false); + createEmptyView(); chatListView.setEmptyView(emptyViewContainer); } @@ -19179,6 +19513,10 @@ private void showGigagroupConvertAlert() { protected void onCovert() { getMessagesController().convertToGigaGroup(getParentActivity(), currentChat, ChatActivity.this, (result) -> { if (result) { + createUndoView(); + if (undoView == null) { + return; + } undoView.showWithAction(0, UndoView.ACTION_GIGAGROUP_SUCCESS, null); } }); @@ -19186,10 +19524,20 @@ protected void onCovert() { @Override protected void onCancel() { + createUndoView(); + if (undoView == null) { + return; + } undoView.showWithAction(0, UndoView.ACTION_GIGAGROUP_CANCEL, null); getMessagesController().removeSuggestion(dialog_id, "CONVERT_GIGAGROUP"); } - }), (dialog, which) -> undoView.showWithAction(0, UndoView.ACTION_GIGAGROUP_CANCEL, null)).create()); + }), (dialog, which) -> { + createUndoView(); + if (undoView == null) { + return; + } + undoView.showWithAction(0, UndoView.ACTION_GIGAGROUP_CANCEL, null); + }).create()); } } } @@ -19338,7 +19686,7 @@ private void processNewMessages(ArrayList arr) { } } } - if (messageObject.wasJustSent && (getUserConfig().isPremium() || messageObject.isAnimatedAnimatedEmoji())) { + if (messageObject.wasJustSent && (getUserConfig().isPremium() || messageObject.isAnimatedAnimatedEmoji()) && !(SharedConfig.getDevicePerformanceClass() == SharedConfig.PERFORMANCE_CLASS_LOW || !LiteMode.isEnabled(messageObject.isAnimatedAnimatedEmoji() ? LiteMode.FLAG_ANIMATED_EMOJI_CHAT : LiteMode.FLAG_ANIMATED_STICKERS_CHAT))) { messageObject.forcePlayEffect = true; } } @@ -19782,6 +20130,7 @@ private void processNewMessages(ArrayList arr) { } } obj.stableId = lastStableId++; + getMessagesController().getTranslateController().checkTranslation(obj, false); messages.add(placeToPaste, obj); if (placeToPaste == 0 && !obj.isSponsored()) { needMoveScrollToLastMessage = true; @@ -20403,6 +20752,7 @@ private void addToPolls(MessageObject obj, MessageObject old) { } private void showInfoHint(MessageObject messageObject, CharSequence text, int type) { + checkTopUndoView(); if (topUndoView == null) { return; } @@ -20445,9 +20795,13 @@ private void showPollSolution(MessageObject messageObject, TLRPC.PollResults res private void updateSearchButtons(int mask, int num, int count) { if (searchUpButton != null) { searchUpButton.setEnabled((mask & 1) != 0); - searchDownButton.setEnabled((mask & 2) != 0); searchUpButton.setAlpha(searchUpButton.isEnabled() ? 1.0f : 0.5f); + } + if (searchDownButton != null) { + searchDownButton.setEnabled((mask & 2) != 0); searchDownButton.setAlpha(searchDownButton.isEnabled() ? 1.0f : 0.5f); + } + if (searchCountText != null) { if (count < 0) { searchCountText.setCount("", 0, false); } else if (count == 0) { @@ -20502,6 +20856,8 @@ public void onBecomeFullyHidden() { progressDialogCurrent.cancel(); progressDialogCurrent = null; } + + flagSecure.detach(); } public void saveKeyboardPositionBeforeTransition() { @@ -20572,9 +20928,6 @@ public void onTransitionAnimationStart(boolean isOpen, boolean backward) { @Override public void onTransitionAnimationEnd(boolean isOpen, boolean backward) { super.onTransitionAnimationEnd(isOpen, backward); - if (getParentLayout() instanceof LNavigation && ((LNavigation) getParentLayout()).doShowOpenChat()) { - Toast.makeText(getParentActivity(), "Opened chat fragment in " + (System.currentTimeMillis() - startMs) + "ms", Toast.LENGTH_SHORT).show(); - } if (isOpen) { if (backward) { if (showPinBulletin && pinBulletin != null) { @@ -20592,9 +20945,9 @@ public void onTransitionAnimationEnd(boolean isOpen, boolean backward) { openAnimationEnded = true; getNotificationCenter().onAnimationFinish(transitionAnimationIndex); NotificationCenter.getGlobalInstance().onAnimationFinish(transitionAnimationGlobalIndex); - if (Build.VERSION.SDK_INT >= 21) { - createChatAttachView(); - } +// if (Build.VERSION.SDK_INT >= 21) { +// createChatAttachView(); +// } checkGroupCallJoin(lastCallCheckFromServer); if (chatActivityEnterView.hasRecordVideo() && !chatActivityEnterView.isSendButtonVisible()) { @@ -20658,6 +21011,7 @@ public void onTransitionAnimationEnd(boolean isOpen, boolean backward) { message = null; } if (!TextUtils.isEmpty(message)) { + checkTopUndoView(); if (topUndoView != null) { if (type == MessagesController.PROMO_TYPE_PROXY) { preferences.edit().putLong("proxychannel", dialog_id).apply(); @@ -20730,7 +21084,7 @@ public void onTransitionAnimationEnd(boolean isOpen, boolean backward) { openAttachBotLayout(user.id, attachMenuBotStartCommand); } else { AttachBotIntroTopView introTopView = new AttachBotIntroTopView(getParentActivity()); - introTopView.setColor(Theme.getColor(Theme.key_chat_attachContactIcon)); + introTopView.setColor(Theme.getColor(Theme.key_chat_attachIcon)); introTopView.setBackgroundColor(Theme.getColor(Theme.key_dialogTopBackground)); introTopView.setAttachBot(attachMenuBot); @@ -20768,6 +21122,7 @@ public void onTransitionAnimationEnd(boolean isOpen, boolean backward) { allowWrite.set(allow); }); + builder.setCustomViewOffset(6); builder.setView(cell); } builder.show(); @@ -20784,6 +21139,7 @@ public void onTransitionAnimationEnd(boolean isOpen, boolean backward) { public void openAttachBotLayout(long botId, String startCommand) { openAttachMenu(); + createChatAttachView(); chatAttachAlert.showBotLayout(botId, startCommand); } @@ -20997,7 +21353,9 @@ private void updateBottomOverlay() { } showBottomOverlayProgress(false, true); } else if (botUser != null && currentUser.bot) { - bottomOverlayChatText.setText(LocaleController.getString("BotStart", R.string.BotStart)); +// bottomOverlayStartButton.setText(LocaleController.getString("BotStart", R.string.BotStart)); + bottomOverlayStartButton.setVisibility(View.VISIBLE); + bottomOverlayChatText.setVisibility(View.GONE); chatActivityEnterView.hidePopup(false); if (getParentActivity() != null) { AndroidUtilities.hideKeyboard(getParentActivity().getCurrentFocus()); @@ -21013,11 +21371,17 @@ private void updateBottomOverlay() { bottomOverlayImage.setVisibility(View.INVISIBLE); } if (inPreviewMode) { - searchContainer.setVisibility(View.INVISIBLE); + if (searchContainer != null) { + searchContainer.setVisibility(View.INVISIBLE); + } bottomOverlayChat.setVisibility(View.INVISIBLE); chatActivityEnterView.setFieldFocused(false); chatActivityEnterView.setVisibility(View.INVISIBLE); } else if (searchItem != null && searchItemVisible) { + createSearchContainer(); + if (searchContainer == null) { + return; + } searchContainer.animate().setListener(null).cancel(); if (searchContainer.getVisibility() != View.VISIBLE) { searchContainer.setVisibility(View.VISIBLE); @@ -21027,7 +21391,6 @@ private void updateBottomOverlay() { @Override public void onAnimationEnd(Animator animation) { super.onAnimationEnd(animation); - } }).start(); @@ -21040,8 +21403,8 @@ public void onAnimationEnd(Animator animation) { searchExpandAnimator = ValueAnimator.ofFloat(searchExpandProgress, 1f); searchExpandAnimator.addUpdateListener(animation -> { searchExpandProgress = (float) animation.getAnimatedValue(); - chatListView.setTranslationY(searchExpandProgress * (chatActivityEnterView.getMeasuredHeight() - searchContainer.getMeasuredHeight())); - chatActivityEnterView.setChatSearchExpandOffset(searchExpandProgress * (chatActivityEnterView.getMeasuredHeight() - searchContainer.getMeasuredHeight())); + chatListView.setTranslationY(searchExpandProgress * (chatActivityEnterView.getMeasuredHeight() - AndroidUtilities.dp(searchContainerHeight))); + chatActivityEnterView.setChatSearchExpandOffset(searchExpandProgress * (chatActivityEnterView.getMeasuredHeight() - AndroidUtilities.dp(searchContainerHeight))); invalidateChatListViewTopPadding(); }); searchExpandAnimator.addListener(new AnimatorListenerAdapter() { @@ -21071,15 +21434,17 @@ public void onAnimationEnd(Animator animation) { topViewWasVisible = 2; } } else { - searchContainer.animate().setListener(null).cancel(); - if (searchContainer.getVisibility() == View.VISIBLE) { - searchContainer.animate().alpha(0).setDuration(150).setListener(new AnimatorListenerAdapter() { - @Override - public void onAnimationEnd(Animator animation) { - super.onAnimationEnd(animation); - searchContainer.setVisibility(View.INVISIBLE); - } - }).start(); + if (searchContainer != null) { + searchContainer.animate().setListener(null).cancel(); + if (searchContainer.getVisibility() == View.VISIBLE) { + searchContainer.animate().alpha(0).setDuration(150).setListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + super.onAnimationEnd(animation); + searchContainer.setVisibility(View.INVISIBLE); + } + }).start(); + } } chatActivityEnterView.setVisibility(View.VISIBLE); @@ -21094,8 +21459,8 @@ public void onAnimationEnd(Animator animation) { invalidateChatListViewTopPadding(); searchExpandAnimator.addUpdateListener(animation -> { searchExpandProgress = (float) animation.getAnimatedValue(); - chatListView.setTranslationY(searchExpandProgress * (chatActivityEnterView.getMeasuredHeight() - searchContainer.getMeasuredHeight())); - chatActivityEnterView.setChatSearchExpandOffset(searchExpandProgress * (chatActivityEnterView.getMeasuredHeight() - searchContainer.getMeasuredHeight())); + chatListView.setTranslationY(searchExpandProgress * (chatActivityEnterView.getMeasuredHeight() - AndroidUtilities.dp(searchContainerHeight))); + chatActivityEnterView.setChatSearchExpandOffset(searchExpandProgress * (chatActivityEnterView.getMeasuredHeight() - AndroidUtilities.dp(searchContainerHeight))); invalidateChatListViewTopPadding(); }); searchExpandAnimator.addListener(new AnimatorListenerAdapter() { @@ -21221,7 +21586,40 @@ private void updateTopicHeader() { updateTopicTitleIcon(); } + private void createAlertView() { + if (alertView != null || getContext() == null) { + return; + } + + alertView = new FrameLayout(getContext()); + alertView.setTag(1); + alertView.setVisibility(View.GONE); + alertView.setBackgroundResource(R.drawable.blockpanel); + alertView.getBackground().setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_topPanelBackground), PorterDuff.Mode.MULTIPLY)); + contentView.addView(alertView, 9, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 50, Gravity.TOP | Gravity.LEFT)); + + alertNameTextView = new TextView(getContext()); + alertNameTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + alertNameTextView.setTextColor(getThemedColor(Theme.key_chat_topPanelTitle)); + alertNameTextView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + alertNameTextView.setSingleLine(true); + alertNameTextView.setEllipsize(TextUtils.TruncateAt.END); + alertNameTextView.setMaxLines(1); + alertView.addView(alertNameTextView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.TOP | Gravity.LEFT, 8, 5, 8, 0)); + + alertTextView = new TextView(getContext()); + alertTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + alertTextView.setTextColor(getThemedColor(Theme.key_chat_topPanelMessage)); + + alertTextView.setSingleLine(true); + alertTextView.setEllipsize(TextUtils.TruncateAt.END); + alertTextView.setMaxLines(1); + alertView.addView(alertTextView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.TOP | Gravity.LEFT, 8, 23, 8, 0)); + } + public void showAlert(String name, String message) { + createAlertView(); + if (alertView == null || name == null || message == null) { return; } @@ -21343,7 +21741,9 @@ private boolean hidePinnedMessageView(boolean animated) { @Override public void onAnimationEnd(Animator animation) { if (pinnedMessageViewAnimator != null && pinnedMessageViewAnimator.equals(animation)) { - pinnedMessageView.setVisibility(View.GONE); + if (pinnedMessageView != null) { + pinnedMessageView.setVisibility(View.GONE); + } pinnedMessageViewAnimator = null; } } @@ -21470,7 +21870,7 @@ private TLRPC.KeyboardButton pinnedButton(MessageObject message) { } private void updatePinnedMessageView(boolean animated, int animateToNext) { - if (pinnedMessageView == null || chatMode != 0) { + if (currentEncryptedChat != null || chatMode != 0) { return; } int pinned_msg_id; @@ -21507,10 +21907,17 @@ private void updatePinnedMessageView(boolean animated, int animateToNext) { changed = hidePinnedMessageView(animated); if (headerItem != null) headerItem.showSubItem(nkheaderbtn_show_pinned); } else { - updatePinnedListButton(animated); + if (pinnedMessageView == null) { + createPinnedMessageView(); + } else { + updatePinnedListButton(animated); + } if (headerItem != null) headerItem.hideSubItem(nkheaderbtn_show_pinned); + if (pinnedMessageView == null) { + return; + } if (pinnedMessageObject != null) { - if (pinnedMessageView.getTag() != null) { + if (pinnedMessageView != null && pinnedMessageView.getTag() != null) { pinnedMessageView.setTag(null); changed = true; if (pinnedMessageViewAnimator != null) { @@ -21600,7 +22007,8 @@ public void onAnimationCancel(Animator animation) { if (botButton instanceof TLRPC.TL_keyboardButtonUrl) { openClickableLink(null, botButton.url, true, null, buttonMessage); try { - buttonTextView.performHapticFeedback(HapticFeedbackConstants.LONG_PRESS, HapticFeedbackConstants.FLAG_IGNORE_VIEW_SETTING); + if (!NekoConfig.disableVibration.Bool()) + buttonTextView.performHapticFeedback(HapticFeedbackConstants.LONG_PRESS, HapticFeedbackConstants.FLAG_IGNORE_VIEW_SETTING); } catch (Exception ignore) {} return true; } @@ -21757,6 +22165,7 @@ public void onAnimationCancel(Animator animation) { } } CharSequence pinnedText = null; + pinnedMessageObject.updateTranslation(false); if (pinnedMessageObject.type == MessageObject.TYPE_MUSIC) { pinnedText = String.format("%s - %s", pinnedMessageObject.getMusicAuthor(), pinnedMessageObject.getMusicTitle()); } else if (pinnedMessageObject.type == MessageObject.TYPE_POLL) { @@ -21775,22 +22184,22 @@ public void onAnimationCancel(Animator animation) { mess = mess.substring(0, 150); } mess = mess.replace('\n', ' '); - CharSequence message = mess; + pinnedText = mess; + pinnedText = Emoji.replaceEmoji(pinnedText, messageTextView.getPaint().getFontMetricsInt(), AndroidUtilities.dp(14), false); if (pinnedMessageObject != null && pinnedMessageObject.messageOwner != null) { - message = MessageObject.replaceAnimatedEmoji(mess, pinnedMessageObject.messageOwner.entities, messageTextView.getPaint().getFontMetricsInt()); + pinnedText = pinnedMessageObject.replaceAnimatedEmoji(pinnedText, messageTextView.getPaint().getFontMetricsInt()); } - pinnedText = Emoji.replaceEmoji(message, messageTextView.getPaint().getFontMetricsInt(), AndroidUtilities.dp(14), false); } else if (pinnedMessageObject.messageText != null) { String mess = pinnedMessageObject.messageText.toString(); if (mess.length() > 150) { mess = mess.substring(0, 150); } mess = mess.replace('\n', ' '); - CharSequence message = mess; + pinnedText = mess; + pinnedText = Emoji.replaceEmoji(pinnedText, messageTextView.getPaint().getFontMetricsInt(), AndroidUtilities.dp(14), false); if (pinnedMessageObject != null && pinnedMessageObject.messageOwner != null) { - message = MessageObject.replaceAnimatedEmoji(mess, pinnedMessageObject.messageOwner.entities, messageTextView.getPaint().getFontMetricsInt()); + pinnedText = pinnedMessageObject.replaceAnimatedEmoji(pinnedText, messageTextView.getPaint().getFontMetricsInt()); } - pinnedText = Emoji.replaceEmoji(message, messageTextView.getPaint().getFontMetricsInt(), AndroidUtilities.dp(14), false); } if (pinnedText != null) { if (pinnedText instanceof Spannable) { @@ -22145,9 +22554,11 @@ public TrackingWidthSimpleTextView(Context context) { } private boolean trackWidth = true; + public void setTrackWidth(boolean value) { this.trackWidth = value; } + public boolean getTrackWidth() { return this.trackWidth; } @@ -22165,9 +22576,9 @@ protected boolean createLayout(int width) { } } - private boolean shownRestartTopic; + private boolean shownRestartTopic, shownTranslateTopic; private void updateTopPanel(boolean animated) { - if (topChatPanelView == null || chatMode != 0) { + if (chatMode != 0) { return; } SharedPreferences preferences = MessagesController.getNotificationsSettings(currentAccount); @@ -22195,30 +22606,66 @@ private void updateTopPanel(boolean animated) { boolean showAddMembersToGroup = preferences.getBoolean("dialog_bar_invite" + did, false); TLRPC.EmojiStatus showEmojiStatusReport = currentUser != null && (showReport || showBlock) && (currentUser.emoji_status instanceof TLRPC.TL_emojiStatus || currentUser.emoji_status instanceof TLRPC.TL_emojiStatusUntil && ((TLRPC.TL_emojiStatusUntil) currentUser.emoji_status).until > (int) (System.currentTimeMillis() / 1000)) ? currentUser.emoji_status : null; boolean showRestartTopic = !isInPreviewMode() && forumTopic != null && forumTopic.closed && !forumTopic.hidden && ChatObject.canManageTopic(currentAccount, currentChat, forumTopic); + boolean showTranslate = ( + getUserConfig().isPremium() ? + getMessagesController().getTranslateController().isDialogTranslatable(getDialogId()) && !getMessagesController().getTranslateController().isTranslateDialogHidden(getDialogId()) : + !getMessagesController().premiumLocked && preferences.getInt("dialog_show_translate_count" + did, 5) <= 0 + ); if (showRestartTopic) { shownRestartTopic = true; } - - reportSpamButton.setVisibility(showReport || showBlock || showGeo ? View.VISIBLE : View.GONE); + if (showTranslate) { + shownTranslateTopic = true; + } boolean showRestartTopic1 = (showRestartTopic || shownRestartTopic) && !(showReport || showBlock || showGeo); - restartTopicButton.setVisibility(showRestartTopic1 ? View.VISIBLE : View.GONE); - closeReportSpam.setVisibility(showRestartTopic1 ? View.GONE : View.VISIBLE); + if (show || showReport || showBlock || showGeo || showTranslate || showRestartTopic1) { + createTopPanel(); + if (topChatPanelView == null) { + return; + } + } + + if (showTranslate) { + createTranslateButton(); + if (translateButton != null) { + translateButton.updateText(); + } + } + if ((shownTranslateTopic || shownRestartTopic) && !show) { + showReport = showGeo = showShare = showBlock = showAdd = showArchive = showAddMembersToGroup = false; + show = true; + } + if (reportSpamButton != null) { + reportSpamButton.setVisibility(showReport || showBlock || showGeo ? View.VISIBLE : View.GONE); + } + if (restartTopicButton != null) { + restartTopicButton.setVisibility(showRestartTopic1 ? View.VISIBLE : View.GONE); + } + if (translateButton != null) { + translateButton.setVisibility(showTranslate ? View.VISIBLE : View.GONE); + } + if (closeReportSpam != null) { + closeReportSpam.setVisibility(showRestartTopic1 || showTranslate && !(showReport || showBlock || showGeo) ? View.GONE : View.VISIBLE); + } if (!showRestartTopic) { shownRestartTopic = false; } - - if (showRestartTopic) { - show = true; + if (!showTranslate) { + shownTranslateTopic = false; } addToContactsButtonArchive = false; TLRPC.User user = currentUser != null ? getMessagesController().getUser(currentUser.id) : null; boolean isChatWithAdmin = false; if (user != null && !TextUtils.isEmpty(chatWithAdmin)) { + createTopPanel(); + if (topChatPanelView == null) { + return; + } isChatWithAdmin = true; if (chatWithAdminTextView == null) { - chatWithAdminTextView = new TextView(topChatPanelView.getContext()); + chatWithAdminTextView = new TextView(getContext()); chatWithAdminTextView.setGravity(Gravity.CENTER_VERTICAL); chatWithAdminTextView.setPadding(AndroidUtilities.dp(14), 0, AndroidUtilities.dp(46), 0); chatWithAdminTextView.setBackground(Theme.createSelectorDrawable(getThemedColor(Theme.key_listSelector), 2)); @@ -22242,12 +22689,12 @@ public void onClick(View view) { addToContactsButton.setVisibility(View.GONE); chatWithAdminTextView.setText(AndroidUtilities.replaceTags(str)); } else if (showAddMembersToGroup) { - String str = LocaleController.getString("GroupAddMembers", R.string.GroupAddMembers); - if (str != null) { - str = str.toUpperCase(); + createTopPanel(); + if (topChatPanelView == null) { + return; } addToContactsButton.setVisibility(View.VISIBLE); - addToContactsButton.setText(str); + addToContactsButton.setText(LocaleController.getString("GroupAddMembers", R.string.GroupAddMembers)); addToContactsButton.setTag(4); addToContactsButton.setTextColor(getThemedColor(Theme.key_chat_addContact)); if (Build.VERSION.SDK_INT >= 21) { @@ -22256,8 +22703,14 @@ public void onClick(View view) { reportSpamButton.setTag(Theme.key_chat_addContact); } else if (user != null) { if (UserObject.isReplyUser(user)) { - addToContactsButton.setVisibility(View.GONE); + if (addToContactsButton != null) { + addToContactsButton.setVisibility(View.GONE); + } } else if (!user.contact && !user.self && showAdd) { + createTopPanel(); + if (topChatPanelView == null) { + return; + } addContactItem.setVisibility(View.VISIBLE); addContactItem.setText(LocaleController.getString("AddToContacts", R.string.AddToContacts)); addToContactsButton.setVisibility(View.VISIBLE); @@ -22269,12 +22722,17 @@ public void onClick(View view) { if (reportSpamButton.getVisibility() == View.VISIBLE) { addToContactsButton.setText(LocaleController.getString("AddContactChat", R.string.AddContactChat)); } else { - addToContactsButton.setText(LocaleController.formatString("AddContactFullChat", R.string.AddContactFullChat, UserObject.getFirstName(user)).toUpperCase()); + float baseWidth = addToContactsButton.getPaint().measureText(LocaleController.formatString(R.string.AddContactFullChat, "")); + addToContactsButton.setText(LocaleController.formatString("AddContactFullChat", R.string.AddContactFullChat, TextUtils.ellipsize(UserObject.getFirstName(user), addToContactsButton.getPaint(), getContext().getResources().getDisplayMetrics().widthPixels - baseWidth - AndroidUtilities.dp(64 * 2), TextUtils.TruncateAt.MIDDLE)).toUpperCase()); } } addToContactsButton.setTag(null); addToContactsButton.setVisibility(View.VISIBLE); } else if (showShare && !user.self) { + createTopPanel(); + if (topChatPanelView == null) { + return; + } addContactItem.setVisibility(View.VISIBLE); addToContactsButton.setVisibility(View.VISIBLE); addContactItem.setText(LocaleController.getString("ShareMyContactInfo", R.string.ShareMyContactInfo)); @@ -22283,17 +22741,29 @@ public void onClick(View view) { addToContactsButton.setVisibility(View.VISIBLE); } else { if (!user.contact && !user.self && !show) { + createTopPanel(); + if (topChatPanelView == null) { + return; + } addContactItem.setVisibility(View.VISIBLE); addContactItem.setText(LocaleController.getString("ShareMyContactInfo", R.string.ShareMyContactInfo)); addToContactsButton.setTag(2); - } else { + } else if (addContactItem != null) { addContactItem.setVisibility(View.GONE); } - addToContactsButton.setVisibility(View.GONE); + if (addToContactsButton != null) { + addToContactsButton.setVisibility(View.GONE); + } + } + if (reportSpamButton != null) { + reportSpamButton.setText(LocaleController.getString("ReportSpamUser", R.string.ReportSpamUser)); } - reportSpamButton.setText(LocaleController.getString("ReportSpamUser", R.string.ReportSpamUser)); } else { if (showGeo) { + createTopPanel(); + if (topChatPanelView == null) { + return; + } reportSpamButton.setText(LocaleController.getString("ReportSpamLocation", R.string.ReportSpamLocation)); reportSpamButton.setTag(R.id.object_tag, 1); reportSpamButton.setTextColor(getThemedColor(Theme.key_chat_addContact)); @@ -22303,21 +22773,31 @@ public void onClick(View view) { reportSpamButton.setTag(Theme.key_chat_addContact); } else { if (showArchive) { + createTopPanel(); + if (topChatPanelView == null) { + return; + } addToContactsButtonArchive = true; addToContactsButton.setText(LocaleController.getString("Unarchive", R.string.Unarchive).toUpperCase()); addToContactsButton.setTag(3); addToContactsButton.setVisibility(View.VISIBLE); reportSpamButton.setText(LocaleController.getString("ReportSpam", R.string.ReportSpam)); } else { - addToContactsButton.setVisibility(View.GONE); - reportSpamButton.setText(LocaleController.getString("ReportSpamAndLeave", R.string.ReportSpamAndLeave)); + if (addToContactsButton != null) { + addToContactsButton.setVisibility(View.GONE); + } + if (reportSpamButton != null) { + reportSpamButton.setText(LocaleController.getString("ReportSpamAndLeave", R.string.ReportSpamAndLeave)); + } } - reportSpamButton.setTag(R.id.object_tag, null); - reportSpamButton.setTextColor(getThemedColor(Theme.key_chat_reportSpam)); - if (Build.VERSION.SDK_INT >= 21) { - Theme.setSelectorDrawableColor(reportSpamButton.getBackground(), getThemedColor(Theme.key_chat_reportSpam) & 0x19ffffff, true); + if (reportSpamButton != null) { + reportSpamButton.setTag(R.id.object_tag, null); + reportSpamButton.setTextColor(getThemedColor(Theme.key_chat_reportSpam)); + if (Build.VERSION.SDK_INT >= 21) { + Theme.setSelectorDrawableColor(reportSpamButton.getBackground(), getThemedColor(Theme.key_chat_reportSpam) & 0x19ffffff, true); + } + reportSpamButton.setTag(Theme.key_chat_reportSpam); } - reportSpamButton.setTag(Theme.key_chat_reportSpam); } if (addContactItem != null) { @@ -22336,12 +22816,16 @@ public void onClick(View view) { if (chatWithAdminTextView != null) { chatWithAdminTextView.setVisibility(isChatWithAdmin ? View.VISIBLE : View.GONE); } - if (userBlocked || (addToContactsButton.getVisibility() == View.GONE && reportSpamButton.getVisibility() == View.GONE && (chatWithAdminTextView == null || chatWithAdminTextView.getVisibility() == View.GONE) && restartTopicButton.getVisibility() == View.GONE)) { + if (userBlocked || ((addToContactsButton == null || addToContactsButton.getVisibility() == View.GONE) && (reportSpamButton == null || reportSpamButton.getVisibility() == View.GONE) && (chatWithAdminTextView == null || chatWithAdminTextView.getVisibility() == View.GONE) && (restartTopicButton == null || restartTopicButton.getVisibility() == View.GONE) && (translateButton == null || translateButton.getVisibility() == View.GONE))) { show = false; } - int topChatPanelHeight; - if (showEmojiStatusReport != null) { + int topChatPanelHeight = AndroidUtilities.dp(50); + if (showEmojiStatusReport != null && show) { + createTopPanel(); + if (topChatPanelView == null) { + return; + } emojiStatusSpamHint.setVisibility(View.VISIBLE); topViewSeparator1.setVisibility(View.VISIBLE); topViewSeparator2.setVisibility(View.VISIBLE); @@ -22381,16 +22865,51 @@ public void updateDrawState(@NonNull TextPaint ds) { } emojiStatusSpamHint.setText(text); emojiStatusSpamHint.measure(View.MeasureSpec.makeMeasureSpec(AndroidUtilities.displaySize.x - AndroidUtilities.dp(50), View.MeasureSpec.AT_MOST), View.MeasureSpec.makeMeasureSpec(99999, View.MeasureSpec.AT_MOST)); - topChatPanelHeight = AndroidUtilities.dp(50 + 16) + emojiStatusSpamHint.getMeasuredHeight(); + topChatPanelHeight += AndroidUtilities.dp(4); + emojiStatusSpamHint.setTranslationY(topChatPanelHeight); + topChatPanelHeight += AndroidUtilities.dp(10) + emojiStatusSpamHint.getMeasuredHeight(); } else { - emojiStatusSpamHint.setVisibility(View.GONE); - topViewSeparator1.setVisibility(View.GONE); - topViewSeparator2.setVisibility(View.GONE); - topChatPanelHeight = AndroidUtilities.dp(50); + if (emojiStatusSpamHint != null) { + emojiStatusSpamHint.setVisibility(View.GONE); + } + if (topViewSeparator1 != null) { + topViewSeparator1.setVisibility(View.GONE); + } + if (topViewSeparator2 != null) { + topViewSeparator2.setVisibility(View.GONE); + } + } + if (showTranslate) { + createTopPanel(); + if (topChatPanelView == null) { + return; + } + if (restartTopicButton.getVisibility() == View.VISIBLE || + reportSpamButton.getVisibility() == View.VISIBLE || + addToContactsButton.getVisibility() == View.VISIBLE || + user != null && !TextUtils.isEmpty(chatWithAdmin) + ) { + topViewSeparator3.setVisibility(View.VISIBLE); + } else { + topChatPanelHeight -= AndroidUtilities.dp(48); + topViewSeparator3.setVisibility(View.GONE); + } + topChatPanelHeight += AndroidUtilities.dp(36); + } else if (topViewSeparator3 != null) { + topViewSeparator3.setVisibility(View.GONE); + } + if (topViewSeparator1 != null) { + topViewSeparator1.setVisibility((topViewSeparator3 != null && topViewSeparator3.getVisibility() == View.VISIBLE || topViewSeparator2 != null && topViewSeparator2.getVisibility() == View.VISIBLE) ? View.VISIBLE : View.GONE); + } + if (topChatPanelView != null) { + topChatPanelView.getLayoutParams().height = topChatPanelHeight; } - topChatPanelView.getLayoutParams().height = topChatPanelHeight; if (show) { + createTopPanel(); + if (topChatPanelView == null) { + return; + } if (topChatPanelView.getTag() != null) { if (BuildVars.LOGS_ENABLED) { FileLog.d("show spam button"); @@ -22433,7 +22952,7 @@ public void onAnimationCancel(Animator animation) { invalidateMessagesVisiblePart(); } } - } else { + } else if (topChatPanelView != null) { if (topChatPanelView.getTag() == null) { if (BuildVars.LOGS_ENABLED) { FileLog.d("hide spam button"); @@ -22461,6 +22980,8 @@ public void onAnimationEnd(Animator animation) { topChatPanelView.setVisibility(View.GONE); reportSpamViewAnimator = null; } + invalidateChatListViewTopPadding(); + invalidateMessagesVisiblePart(); } @Override @@ -22505,7 +23026,7 @@ private void checkListViewPaddings() { private void checkRaiseSensors() { if (chatActivityEnterView != null && chatActivityEnterView.isStickersExpanded()) { MediaController.getInstance().setAllowStartRecord(false); - } else if (currentChat != null && !ChatObject.canSendMedia(currentChat)) { + } else if (currentChat != null && !ChatObject.canSendVoice(currentChat)) { MediaController.getInstance().setAllowStartRecord(false); } else if (!ApplicationLoader.mainInterfacePaused && (bottomOverlayChat == null || bottomOverlayChat.getVisibility() != View.VISIBLE) && (bottomOverlay == null || bottomOverlay.getVisibility() != View.VISIBLE) && (searchContainer == null || searchContainer.getVisibility() != View.VISIBLE)) { MediaController.getInstance().setAllowStartRecord(true); @@ -22653,6 +23174,11 @@ public int getBottomOffset(int tag) { height += contentPanTranslation; return height - AndroidUtilities.dp(1.5f); } + + @Override + public boolean allowLayoutChanges() { + return false; + } }); checkActionBarMenu(false); @@ -22736,6 +23262,12 @@ public int getBottomOffset(int tag) { pullingDownOffset = 0; chatListView.invalidate(); } + + flagSecure.attach(); + } + + public float getPullingDownOffset() { + return pullingDownOffset; } public void checkAdjustResize() { @@ -22902,7 +23434,7 @@ public void onPause() { } } - private void applyDraftMaybe(boolean canClear) { + public void applyDraftMaybe(boolean canClear) { if (chatActivityEnterView == null || chatMode != 0) { return; } @@ -22970,7 +23502,7 @@ private void applyDraftMaybe(boolean canClear) { try { fontMetrics = chatActivityEnterView.getEditField().getPaint().getFontMetricsInt(); } catch (Exception e) { - FileLog.e(e); + FileLog.e(e, false); } TLRPC.TL_messageEntityCustomEmoji e = (TLRPC.TL_messageEntityCustomEmoji) entity; AnimatedEmojiSpan span; @@ -23050,10 +23582,8 @@ private void updateInformationForScreenshotDetector() { } private boolean fixLayoutInternal() { - if (!AndroidUtilities.isTablet() && ApplicationLoader.applicationContext.getResources().getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE) { - selectedMessagesCountTextView.setTextSize(18); - } else { - selectedMessagesCountTextView.setTextSize(20); + if (selectedMessagesCountTextView != null) { + selectedMessagesCountTextView.setTextSize(!AndroidUtilities.isTablet() && ApplicationLoader.applicationContext.getResources().getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE ? 18 : 20); } HashMap newGroups = null; @@ -23248,10 +23778,12 @@ private void hideActionMode() { cantSaveMessagesCount = 0; if (chatActivityEnterView != null) { EditTextCaption editTextCaption = chatActivityEnterView.getEditField(); - if (chatActivityEnterView.getVisibility() == View.VISIBLE) { - editTextCaption.requestFocus(); + if (editTextCaption != null) { + if (chatActivityEnterView.getVisibility() == View.VISIBLE) { + editTextCaption.requestFocus(); + } + editTextCaption.setAllowDrawCursor(true); } - editTextCaption.setAllowDrawCursor(true); } if (textSelectionHelper != null) { @@ -23486,7 +24018,7 @@ private boolean createMenu(View v, boolean single, boolean listView, float x, fl final ArrayList options = new ArrayList<>(); View optionsView = null; - if (AndroidUtilities.isAccessibilityScreenReaderEnabled() && message.messageOwner != null && message.messageOwner.from_id.user_id != getUserConfig().clientUserId) { + if (AndroidUtilities.isAccessibilityScreenReaderEnabled() && message.messageOwner != null && message.messageOwner.from_id != null && message.messageOwner.from_id.user_id != getUserConfig().clientUserId) { items.add(LocaleController.getString(R.string.OpenProfile)); options.add(OPTION_OPEN_PROFILE); icons.add(R.drawable.msg_user_search); @@ -23529,6 +24061,10 @@ public void dismiss() { public void setAutoDeleteHistory(int time, int action) { getMessagesController().setDialogHistoryTTL(dialog_id, time); if (userInfo != null || chatInfo != null) { + createUndoView(); + if (undoView == null) { + return; + } undoView.showWithAction(dialog_id, action, currentUser, userInfo != null ? userInfo.ttl_period : chatInfo.ttl_period, null, null); } } @@ -23551,13 +24087,16 @@ public void setAutoDeleteHistory(int time, int action) { } catch (Exception e) { } } - if (messageTextToTranslate == null) { + if (messageTextToTranslate == null && MessageObject.isMediaEmpty(selectedObject.messageOwner)) { messageTextToTranslate = getMessageContent(selectedObject, 0, false); } if (messageTextToTranslate != null && Emoji.fullyConsistsOfEmojis(messageTextToTranslate)) { messageTextToTranslate = null; // message fully consists of emojis, do not translate } } + if (selectedObject.translated || selectedObject.isRestrictedMessage) { + messageTextToTranslate = null; + } if (message.isSponsored() && !getMessagesController().premiumLocked) { items.add(LocaleController.getString("HideAd", R.string.HideAd)); @@ -24107,7 +24646,7 @@ public void setAutoDeleteHistory(int time, int action) { } options.add(OPTION_VIEW_REPLIES_OR_THREAD); icons.add(R.drawable.msg_viewreplies); - } else if (isThreadChat() && chatMode != MODE_SCHEDULED && currentChat != null) { + } else if (isThreadChat() && !isTopic && chatMode != MODE_SCHEDULED && currentChat != null) { options.add(nkbtn_view_in_chat); icons.add(R.drawable.msg_viewreplies); items.add(LocaleController.getString("ViewInChat", R.string.ViewInChat)); @@ -24312,10 +24851,11 @@ public void setAutoDeleteHistory(int time, int action) { } else { isReactionsAvailable = nekoXShowReactionsView && !message.isSecretMedia() && !isSecretChat() && !isInScheduleMode() && message.isReactionsAvailable() && (chatInfo != null && !(chatInfo.available_reactions instanceof TLRPC.TL_chatReactionsNone) || (chatInfo == null && !ChatObject.isChannel(currentChat)) || currentUser != null) && !availableReacts.isEmpty(); } - boolean showMessageSeen = !isReactionsViewAvailable && !isInScheduleMode() && currentChat != null && message.isOutOwner() && message.isSent() && !message.isEditing() && !message.isSending() && !message.isSendError() && !message.isContentUnread() && !message.isUnread() && (ConnectionsManager.getInstance(currentAccount).getCurrentTime() - message.messageOwner.date < getMessagesController().chatReadMarkExpirePeriod) && (ChatObject.isMegagroup(currentChat) || !ChatObject.isChannel(currentChat)) && chatInfo != null && chatInfo.participants_count <= getMessagesController().chatReadMarkSizeThreshold && !(message.messageOwner.action instanceof TLRPC.TL_messageActionChatJoinedByRequest) && (v instanceof ChatMessageCell); + boolean showMessageSeen = !isReactionsViewAvailable && !isInScheduleMode() && currentChat != null && message.isOutOwner() && message.isSent() && !message.isEditing() && !message.isSending() && !message.isSendError() && !message.isContentUnread() && !message.isUnread() && (ConnectionsManager.getInstance(currentAccount).getCurrentTime() - message.messageOwner.date < getMessagesController().chatReadMarkExpirePeriod) && (ChatObject.isMegagroup(currentChat) || !ChatObject.isChannel(currentChat)) && !ChatObject.isForum(currentChat) && chatInfo != null && chatInfo.participants_count <= getMessagesController().chatReadMarkSizeThreshold && !(message.messageOwner.action instanceof TLRPC.TL_messageActionChatJoinedByRequest) && (v instanceof ChatMessageCell); + boolean showSponsorInfo = selectedObject != null && selectedObject.isSponsored() && (selectedObject.sponsoredInfo != null || selectedObject.sponsoredAdditionalInfo != null); int flags = 0; - if (isReactionsViewAvailable || showMessageSeen) { + if (isReactionsViewAvailable || showMessageSeen || showSponsorInfo) { flags |= ActionBarPopupWindow.ActionBarPopupWindowLayout.FLAG_USE_SWIPEBACK; } @@ -24343,7 +24883,7 @@ public void setAutoDeleteHistory(int time, int action) { ReactedUsersListView.ContainerLinerLayout linearLayout = new ReactedUsersListView.ContainerLinerLayout(contentView.getContext()); linearLayout.hasHeader = hasHeader; linearLayout.setOrientation(LinearLayout.VERTICAL); - linearLayout.setLayoutParams(new FrameLayout.LayoutParams(AndroidUtilities.dp(200), AndroidUtilities.dp(6 * 48 + (hasHeader ? 44 * 2 + 8 : 44)) + (!hasHeader ? 1 : 0))); + linearLayout.setLayoutParams(new FrameLayout.LayoutParams(AndroidUtilities.dp(200), AndroidUtilities.dp(6 * ReactedUsersListView.ITEM_HEIGHT_DP + (hasHeader ? 44 * 2 + 8 : 44)) + (!hasHeader ? 1 : 0))); ActionBarMenuSubItem backCell = new ActionBarMenuSubItem(getParentActivity(), true, false, themeDelegate); backCell.setItemHeight(44); backCell.setTextAndIcon(LocaleController.getString("Back", R.string.Back), R.drawable.msg_arrow_back); @@ -24519,9 +25059,7 @@ public void onPageScrollStateChanged(int state) { }); linearLayout.addView(pager, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, 0, 1f)); } else { - View gap = new FrameLayout(contentView.getContext()); - gap.setBackgroundColor(getThemedColor(Theme.key_actionBarDefaultSubmenuSeparator)); - linearLayout.addView(gap, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, 8)); + linearLayout.addView(new ActionBarPopupWindow.GapView(contentView.getContext(), themeDelegate), LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, 8)); reactedUsersListView = new ReactedUsersListView(contentView.getContext(), themeDelegate, currentAccount, message, null, false) .setSeenUsers(reactedView.getSeenUsers()) @@ -24582,6 +25120,7 @@ public void dismiss() { RecyclerListView listView2 = finalMessageSeenView.createListView(); backContainer.addView(cell); linearLayout.addView(backContainer); + linearLayout.addView(new ActionBarPopupWindow.GapView(contentView.getContext(), themeDelegate), LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, 8)); backContainer.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { @@ -24597,7 +25136,7 @@ public void onClick(View view) { if (scrimPopupWindow == null || finalMessageSeenView.users.isEmpty()) { return; } - if (finalMessageSeenView.users.size() == 1) { + if (finalMessageSeenView.users.size() == 1 && (finalMessageSeenView.dates.size() <= 0 || finalMessageSeenView.dates.get(0) <= 0)) { TLRPC.User user = finalMessageSeenView.users.get(0); if (user == null) { return; @@ -24610,8 +25149,6 @@ public void onClick(View view) { return; } - int totalHeight = contentView.getHeightWithKeyboard(); - if (!NaConfig.INSTANCE.getHideMessageSeenTooltip().Bool() && SharedConfig.messageSeenHintCount > 0 && contentView.getKeyboardHeight() < AndroidUtilities.dp(20)) { messageSeenPrivacyBulletin = BulletinFactory.of(Bulletin.BulletinWindow.make(getContext()), themeDelegate).createErrorBulletin(AndroidUtilities.replaceTags(LocaleController.getString("MessageSeenTooltipMessage", R.string.MessageSeenTooltipMessage))); messageSeenPrivacyBulletin.setDuration(4000); @@ -24632,6 +25169,7 @@ public void onClick(View view) { if (user == null) { return; } + closeMenu(true); Bundle args = new Bundle(); args.putLong("user_id", user.id); ProfileActivity fragment = new ProfileActivity(args); @@ -24652,18 +25190,21 @@ public void onClick(View view) { popupLayout.addView(cell); cell.setOnClickListener(v1 -> { if (getMediaDataController().saveToRingtones(message.getDocument())) { - getUndoView().showWithAction(dialog_id, UndoView.ACTION_RINGTONE_ADDED, new Runnable() { - boolean clicked; + UndoView undoView = getUndoView(); + if (undoView != null) { + undoView.showWithAction(dialog_id, UndoView.ACTION_RINGTONE_ADDED, new Runnable() { + boolean clicked; - @Override - public void run() { - if (clicked) { - return; + @Override + public void run() { + if (clicked) { + return; + } + clicked = true; + presentFragment(new NotificationsSettingsActivity()); } - clicked = true; - presentFragment(new NotificationsSettingsActivity()); - } - }); + }); + } } closeMenu(true); }); @@ -24705,6 +25246,7 @@ public void run() { drawable.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_actionBarDefaultSubmenuItemIcon), PorterDuff.Mode.SRC_IN)); drawable = new CrossfadeDrawable(drawable, new CircularProgressDrawable(AndroidUtilities.dp(12f), AndroidUtilities.dp(1.5f), getThemedColor(Theme.key_actionBarDefaultSubmenuItemIcon))); rateUp.setImageDrawable(drawable); + rateUp.setContentDescription(LocaleController.getString(R.string.AccDescrRateTranscriptionUp)); rateTranscription.addView(rateUp, LayoutHelper.createFrame(33, 33, Gravity.CENTER_HORIZONTAL | Gravity.TOP, -42, 39, 0, 0)); ImageView rateDown = new ImageView(contentView.getContext()); @@ -24713,6 +25255,7 @@ public void run() { drawable.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_actionBarDefaultSubmenuItemIcon), PorterDuff.Mode.SRC_IN)); drawable = new CrossfadeDrawable(drawable, new CircularProgressDrawable(AndroidUtilities.dp(12f), AndroidUtilities.dp(1.5f), getThemedColor(Theme.key_actionBarDefaultSubmenuItemIcon))); rateDown.setImageDrawable(drawable); + rateDown.setContentDescription(LocaleController.getString(R.string.AccDescrRateTranscriptionDown)); rateTranscription.addView(rateDown, LayoutHelper.createFrame(33, 33, Gravity.CENTER_HORIZONTAL | Gravity.TOP, 42, 39, 0, 0)); Runnable rate = () -> { @@ -24766,45 +25309,134 @@ public void run() { popupLayout.addView(rateTranscriptionLayout, rateTranscriptionLayoutParams); } - final boolean translateButtonEnabled = MessagesController.getGlobalMainSettings().getBoolean("translate_button", false); - scrimPopupWindowItems = new ActionBarMenuSubItem[items.size() + (selectedObject.isSponsored() ? 1 : 0)]; - for (int a = 0, N = items.size(); a < N; a++) { - if (a == 0 && selectedObject.isSponsored()) { + final boolean translateButtonEnabled = MessagesController.getInstance(currentAccount).getTranslateController().isContextTranslateEnabled(); + if (selectedObject != null && selectedObject.isSponsored()) { + if (selectedObject.sponsoredInfo != null || selectedObject.sponsoredAdditionalInfo != null) { + LinearLayout linearLayout = new LinearLayout(getParentActivity()); + linearLayout.setOrientation(LinearLayout.VERTICAL); + + ActionBarMenuSubItem backCell = new ActionBarMenuSubItem(getParentActivity(), true, false, themeDelegate); + backCell.setItemHeight(44); + backCell.setTextAndIcon(LocaleController.getString("Back", R.string.Back), R.drawable.msg_arrow_back); + backCell.getTextView().setPadding(LocaleController.isRTL ? 0 : AndroidUtilities.dp(40), 0, LocaleController.isRTL ? AndroidUtilities.dp(40) : 0, 0); + backCell.setOnClickListener(v1 -> popupLayout.getSwipeBack().closeForeground()); + linearLayout.addView(backCell, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT)); + + linearLayout.addView(new ActionBarPopupWindow.GapView(contentView.getContext(), themeDelegate), LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, 8)); + + if (selectedObject.sponsoredInfo != null) { + TextView textView = new TextView(getParentActivity()); + textView.setTextColor(getThemedColor(Theme.key_actionBarDefaultSubmenuItem)); + textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + textView.setPadding(AndroidUtilities.dp(18), AndroidUtilities.dp(10), AndroidUtilities.dp(18), AndroidUtilities.dp(10)); + textView.setMaxWidth(AndroidUtilities.dp(300)); + textView.setText(selectedObject.sponsoredInfo); + textView.setBackground(Theme.createRadSelectorDrawable(getThemedColor(Theme.key_listSelector), 0, selectedObject.sponsoredAdditionalInfo == null ? 6 : 0)); + textView.setOnClickListener(e -> { + if (AndroidUtilities.addToClipboard(selectedObject.sponsoredInfo)) { + BulletinFactory.of(Bulletin.BulletinWindow.make(getParentActivity()), themeDelegate).createCopyBulletin(LocaleController.getString("TextCopied", R.string.TextCopied)).show(); + } + }); + linearLayout.addView(textView, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT)); + } + + if (selectedObject.sponsoredInfo != null && selectedObject.sponsoredAdditionalInfo != null) { + FrameLayout separator = new FrameLayout(getParentActivity()); + separator.setBackgroundColor(getThemedColor(Theme.key_divider)); + LinearLayout.LayoutParams params = LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, 1); + params.height = 1; + linearLayout.addView(separator, params); + } + + if (selectedObject.sponsoredAdditionalInfo != null) { + TextView textView = new TextView(getParentActivity()); + textView.setTextColor(getThemedColor(Theme.key_actionBarDefaultSubmenuItem)); + textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + textView.setPadding(AndroidUtilities.dp(18), AndroidUtilities.dp(10), AndroidUtilities.dp(18), AndroidUtilities.dp(10)); + textView.setMaxWidth(AndroidUtilities.dp(300)); + textView.setText(selectedObject.sponsoredAdditionalInfo); + textView.setBackground(Theme.createRadSelectorDrawable(getThemedColor(Theme.key_listSelector), 0, 6)); + textView.setOnClickListener(e -> { + if (AndroidUtilities.addToClipboard(selectedObject.sponsoredAdditionalInfo)) { + BulletinFactory.of(Bulletin.BulletinWindow.make(getParentActivity()), themeDelegate).createCopyBulletin(LocaleController.getString("TextCopied", R.string.TextCopied)).show(); + } + }); + linearLayout.addView(textView, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT)); + } + + final int foregroundIndex = popupLayout.addViewToSwipeBack(linearLayout); + ActionBarMenuSubItem cell = new ActionBarMenuSubItem(getParentActivity(), true, true, themeDelegate); - cell.setTextAndIcon(LocaleController.getString("SponsoredMessageInfo", R.string.SponsoredMessageInfo), R.drawable.msg_info); - cell.setItemHeight(56); - cell.setTag(R.id.width_tag, 240); - cell.setMultiline(); - scrimPopupWindowItems[scrimPopupWindowItems.length - 1] = cell; + cell.setTextAndIcon(LocaleController.getString("SponsoredMessageSponsor", R.string.SponsoredMessageSponsor), R.drawable.msg_channel); popupLayout.addView(cell); cell.setOnClickListener(v1 -> { if (contentView == null || getParentActivity() == null) { return; } - BottomSheet.Builder builder = new BottomSheet.Builder(contentView.getContext()); - builder.setCustomView(new SponsoredMessageInfoView(getParentActivity(), themeDelegate)); - builder.show(); + popupLayout.getSwipeBack().openForeground(foregroundIndex); }); + popupLayout.addView(new ActionBarPopupWindow.GapView(contentView.getContext(), themeDelegate), LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, 8)); + } + + FrameLayout sponsoredAbout = new FrameLayout(getParentActivity()) { + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + } + }; + sponsoredAbout.setMinimumHeight(AndroidUtilities.dp(56)); + sponsoredAbout.setBackground(Theme.createRadSelectorDrawable(getThemedColor(Theme.key_dialogButtonSelector), popupLayout.getItemsCount() <= 0 ? 6 : 0, 0)); + sponsoredAbout.setPadding(AndroidUtilities.dp(18), 0, AndroidUtilities.dp(18), 0); - View gap = new View(getParentActivity()); - gap.setMinimumWidth(AndroidUtilities.dp(196)); - gap.setTag(1000); - gap.setTag(R.id.object_tag, 1); - popupLayout.addView(gap); - LinearLayout.LayoutParams layoutParams = (LinearLayout.LayoutParams) cell.getLayoutParams(); - if (LocaleController.isRTL) { - layoutParams.gravity = Gravity.RIGHT; + ImageView infoImage = new ImageView(getParentActivity()); + infoImage.setScaleType(ImageView.ScaleType.CENTER); + infoImage.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_actionBarDefaultSubmenuItemIcon), PorterDuff.Mode.MULTIPLY)); + infoImage.setImageResource(R.drawable.msg_info); + sponsoredAbout.addView(infoImage, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, 40, Gravity.CENTER_VERTICAL | (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT))); + + TextView infoText = new TextView(getParentActivity()) { + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + if (MeasureSpec.getMode(widthMeasureSpec) == MeasureSpec.AT_MOST && getLayout() != null) { + Layout layout = getLayout(); + int width = 0; + for (int i = 0; i < layout.getLineCount(); ++i) { + width = Math.max(width, (int) layout.getLineWidth(i)); + } + widthMeasureSpec = MeasureSpec.makeMeasureSpec(getPaddingLeft() + width + getPaddingRight(), MeasureSpec.EXACTLY); + } + super.onMeasure(widthMeasureSpec, heightMeasureSpec); } - layoutParams.width = LayoutHelper.MATCH_PARENT; - layoutParams.height = AndroidUtilities.dp(6); - gap.setLayoutParams(layoutParams); - } + }; + infoText.setMaxLines(3); + infoText.setGravity(Gravity.LEFT); + infoText.setEllipsize(TextUtils.TruncateAt.END); + infoText.setTextColor(getThemedColor(Theme.key_actionBarDefaultSubmenuItem)); + infoText.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + infoText.setMaxWidth(AndroidUtilities.dp(240)); + infoText.setText(LocaleController.getString("SponsoredMessageInfo", R.string.SponsoredMessageInfo).replace('\n', ' ')); + infoText.setPadding(LocaleController.isRTL ? 0 : AndroidUtilities.dp(43), 0, LocaleController.isRTL ? AndroidUtilities.dp(43) : 0, 0); + sponsoredAbout.addView(infoText, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.CENTER_VERTICAL)); + + popupLayout.addView(sponsoredAbout); + sponsoredAbout.setOnClickListener(v1 -> { + if (contentView == null || getParentActivity() == null) { + return; + } + BottomSheet.Builder builder = new BottomSheet.Builder(contentView.getContext()); + builder.setCustomView(new SponsoredMessageInfoView(getParentActivity(), themeDelegate)); + builder.show(); + }); + popupLayout.addView(new ActionBarPopupWindow.GapView(contentView.getContext(), themeDelegate), LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, 8)); + } + scrimPopupWindowItems = new ActionBarMenuSubItem[items.size()]; + for (int a = 0, N = items.size(); a < N; a++) { ActionBarMenuSubItem cell = new ActionBarMenuSubItem(getParentActivity(), a == 0, a == N - 1, themeDelegate); cell.setMinimumWidth(AndroidUtilities.dp(200)); cell.setTextAndIcon(items.get(a), icons.get(a)); Integer option = options.get(a); - if (option == 1 && selectedObject.messageOwner.ttl_period != 0) { + if (option == 1 && selectedObject != null && selectedObject.messageOwner.ttl_period != 0) { menuDeleteItem = cell; updateDeleteItemRunnable.run(); cell.setSubtextColor(getThemedColor(Theme.key_windowBackgroundWhiteGrayText6)); @@ -24918,7 +25550,7 @@ public void hideMenu() { @Override public void onSwipeBackProgress(PopupSwipeBackLayout layout, float toProgress, float progress) { if (toProgress == 0 && !isEnter) { - finalReactionsLayout.startEnterAnimation(); + finalReactionsLayout.startEnterAnimation(false); isEnter = true; } else if (toProgress == 1 && isEnter) { finalReactionsLayout.setAlpha(1f - progress); @@ -25014,6 +25646,7 @@ public void dismiss() { } ReactionsContainerLayout finalReactionsLayout1 = reactionsLayout; + reactionsLayout.setParentLayout(scrimPopupContainerLayout); scrimPopupWindow = new ActionBarPopupWindow(scrimPopupContainerLayout, LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT) { @Override public void dismiss() { @@ -25034,7 +25667,7 @@ public void dismiss() { } else { scrimPopupWindowHideDimOnDismiss = true; } - if (chatActivityEnterView != null) { + if (chatActivityEnterView != null && chatActivityEnterView.getEditField() != null) { chatActivityEnterView.getEditField().setAllowDrawCursor(true); } } @@ -25051,7 +25684,11 @@ public void dismiss(boolean animated) { scrimPopupWindow.setDismissAnimationDuration(220); scrimPopupWindow.setOutsideTouchable(true); scrimPopupWindow.setClippingEnabled(true); - scrimPopupWindow.setAnimationStyle(R.style.PopupContextAnimation); + if (!isReactionsAvailable || reactionsLayout == null || !ReactionsContainerLayout.allowSmoothEnterTransition()) { + scrimPopupWindow.setAnimationStyle(R.style.PopupContextAnimation); + } else { + scrimPopupWindow.setAnimationStyle(0); + } scrimPopupWindow.setFocusable(true); scrimPopupContainerLayout.measure(View.MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(1000), View.MeasureSpec.AT_MOST), View.MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(1000), View.MeasureSpec.AT_MOST)); scrimPopupWindow.setInputMethodMode(ActionBarPopupWindow.INPUT_METHOD_NOT_NEEDED); @@ -25100,7 +25737,7 @@ public void dismiss(boolean animated) { } scrimPopupWindow.showAtLocation(chatListView, Gravity.LEFT | Gravity.TOP, finalPopupX, finalPopupY); if (isReactionsAvailable && finalReactionsLayout != null) { - finalReactionsLayout.startEnterAnimation(); + finalReactionsLayout.startEnterAnimation(true); } AndroidUtilities.runOnUIThread(() -> { if (scrimPopupWindowItems != null && scrimPopupWindowItems.length > 0 && scrimPopupWindowItems[0] != null) { @@ -25125,6 +25762,9 @@ public void dismiss(boolean animated) { if (undoView != null) { undoView.hide(true, 1); } + if (chatActivityEnterView != null && chatActivityEnterView.getEditField() != null) { + chatActivityEnterView.getEditField().setAllowDrawCursor(false); + } return true; } @@ -25132,6 +25772,7 @@ public void dismiss(boolean animated) { return false; } + createActionMode(); final ActionBarMenu actionMode = actionBar.createActionMode(); View item = actionMode.getItem(forward); if (item != null) { @@ -25143,9 +25784,11 @@ public void dismiss(boolean animated) { if (item != null) { item.setVisibility(View.VISIBLE); } + createBottomMessagesActionButtons(); bottomMessagesActionContainer.setVisibility(View.VISIBLE); int translationY = chatActivityEnterView.getMeasuredHeight() - AndroidUtilities.dp(51); + createActionMode(); if (chatActivityEnterView.getVisibility() == View.VISIBLE) { ArrayList views = new ArrayList<>(); views.add(chatActivityEnterView); @@ -25163,6 +25806,9 @@ public void dismiss(boolean animated) { if (getParentActivity() instanceof LaunchActivity) { ((LaunchActivity) getParentActivity()).hideVisibleActionMode(); } + if (chatActivityEnterView != null && chatActivityEnterView.getEditField() != null) { + chatActivityEnterView.getEditField().setAllowDrawCursor(false); + } } else if (bottomOverlayChat.getVisibility() == View.VISIBLE) { if (NekoConfig.showBottomActionsWhenSelecting.Bool()) actionBar.showActionMode(true, bottomMessagesActionContainer, null, new View[]{bottomOverlayChat}, new boolean[]{true}, chatListView, translationY); @@ -25196,7 +25842,9 @@ public void dismiss(boolean animated) { chatActivityEnterView.preventInput = true; } - selectedMessagesCountTextView.setNumber(selectedMessagesIds[0].size() + selectedMessagesIds[1].size(), false); + if (selectedMessagesCountTextView != null) { + selectedMessagesCountTextView.setNumber(selectedMessagesIds[0].size() + selectedMessagesIds[1].size(), false); + } updateVisibleRows(); if (chatActivityEnterView != null) { chatActivityEnterView.hideBotCommands(); @@ -25204,6 +25852,81 @@ public void dismiss(boolean animated) { return false; } + private void createEmptyView() { + if (emptyViewContainer != null || getContext() == null) { + return; + } + + emptyViewContainer = new FrameLayout(getContext()); + emptyViewContainer.setOnTouchListener((v, event) -> true); + emptyViewContainer.setVisibility(View.INVISIBLE); + contentView.addView(emptyViewContainer, 1, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER)); + + int distance = getArguments().getInt("nearby_distance", -1); + if ((distance >= 0 || preloadedGreetingsSticker != null) && currentUser != null && !userBlocked) { + greetingsViewContainer = new ChatGreetingsView(getContext(), currentUser, distance, currentAccount, preloadedGreetingsSticker, themeDelegate); + greetingsViewContainer.setListener((sticker) -> { + animatingDocuments.put(sticker, 0); + SendMessagesHelper.getInstance(currentAccount).sendSticker(sticker, null, dialog_id, null, null, null, null, true, 0, false); + }); + greetingsViewContainer.setBackground(Theme.createServiceDrawable(AndroidUtilities.dp(10), greetingsViewContainer, contentView, getThemedPaint(Theme.key_paint_chatActionBackground))); + emptyViewContainer.addView(greetingsViewContainer, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL, 68, 0, 68, 0)); + } else if (currentEncryptedChat == null) { + if (isTopic) { + CreateTopicEmptyView createTopicEmptyView = new CreateTopicEmptyView(getContext(), contentView, themeDelegate); + emptyViewContainer.addView(createTopicEmptyView, new FrameLayout.LayoutParams(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER)); + } else if (!isThreadChat() && chatMode == 0 && ((currentUser != null && currentUser.self) || (currentChat != null && currentChat.creator && !ChatObject.isChannelAndNotMegaGroup(currentChat)))) { + bigEmptyView = new ChatBigEmptyView(getContext(), contentView, currentChat != null ? ChatBigEmptyView.EMPTY_VIEW_TYPE_GROUP : ChatBigEmptyView.EMPTY_VIEW_TYPE_SAVED, themeDelegate); + emptyViewContainer.addView(bigEmptyView, new FrameLayout.LayoutParams(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER)); + if (currentChat != null) { + bigEmptyView.setStatusText(AndroidUtilities.replaceTags(LocaleController.getString("GroupEmptyTitle1", R.string.GroupEmptyTitle1))); + } + } else { + String emptyMessage = null; + if (isThreadChat()) { + if (isComments) { + emptyMessage = LocaleController.getString("NoComments", R.string.NoComments); + } else { + emptyMessage = LocaleController.getString("NoReplies", R.string.NoReplies); + } + } else if (chatMode == MODE_SCHEDULED) { + emptyMessage = LocaleController.getString("NoScheduledMessages", R.string.NoScheduledMessages); + } else if (currentUser != null && currentUser.id != 777000 && currentUser.id != 429000 && currentUser.id != 4244000 && MessagesController.isSupportUser(currentUser)) { + emptyMessage = LocaleController.getString("GotAQuestion", R.string.GotAQuestion); + } else if (currentUser == null || currentUser.self || currentUser.deleted || userBlocked) { + emptyMessage = LocaleController.getString("NoMessages", R.string.NoMessages); + } + if (emptyMessage == null) { + greetingsViewContainer = new ChatGreetingsView(getContext(), currentUser, distance, currentAccount, preloadedGreetingsSticker, themeDelegate); + greetingsViewContainer.setListener((sticker) -> { + animatingDocuments.put(sticker, 0); + SendMessagesHelper.getInstance(currentAccount).sendSticker(sticker, null, dialog_id, null, null, null, null, true, 0, false); + }); + greetingsViewContainer.setBackground(Theme.createServiceDrawable(AndroidUtilities.dp(10), greetingsViewContainer, contentView, getThemedPaint(Theme.key_paint_chatActionBackground))); + emptyViewContainer.addView(greetingsViewContainer, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL, 68, 0, 68, 0)); + } else { + emptyView = new TextView(getContext()); + emptyView.setText(emptyMessage); + emptyView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + emptyView.setGravity(Gravity.CENTER); + emptyView.setTextColor(getThemedColor(Theme.key_chat_serviceText)); + emptyView.setBackground(Theme.createServiceDrawable(AndroidUtilities.dp(6), emptyView, contentView, getThemedPaint(Theme.key_paint_chatActionBackground))); + emptyView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + emptyView.setPadding(AndroidUtilities.dp(10), AndroidUtilities.dp(2), AndroidUtilities.dp(10), AndroidUtilities.dp(3)); + emptyViewContainer.addView(emptyView, new FrameLayout.LayoutParams(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER)); + } + } + } else { + bigEmptyView = new ChatBigEmptyView(getContext(), contentView, ChatBigEmptyView.EMPTY_VIEW_TYPE_SECRET, themeDelegate); + if (currentEncryptedChat.admin_id == getUserConfig().getClientUserId()) { + bigEmptyView.setStatusText(LocaleController.formatString("EncryptedPlaceholderTitleOutgoing", R.string.EncryptedPlaceholderTitleOutgoing, UserObject.getFirstName(currentUser))); + } else { + bigEmptyView.setStatusText(LocaleController.formatString("EncryptedPlaceholderTitleIncoming", R.string.EncryptedPlaceholderTitleIncoming, UserObject.getFirstName(currentUser))); + } + emptyViewContainer.addView(bigEmptyView, new FrameLayout.LayoutParams(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER)); + } + } + public void closeMenu() { closeMenu(true); } @@ -25236,8 +25959,8 @@ private void closeMenu(boolean hideDim) { Runnable updateReactionRunnable; - private void selectReaction(MessageObject primaryMessage, ReactionsContainerLayout reactionsLayout, View fromView, float x, float y, ReactionsLayoutInBubble.VisibleReaction visibleReaction, boolean fromDoubleTap, boolean bigEmoji, boolean addToRecent) { - if (isInScheduleMode()) { + public void selectReaction(MessageObject primaryMessage, ReactionsContainerLayout reactionsLayout, View fromView, float x, float y, ReactionsLayoutInBubble.VisibleReaction visibleReaction, boolean fromDoubleTap, boolean bigEmoji, boolean addToRecent) { + if (isInScheduleMode() || primaryMessage == null) { return; } @@ -25363,6 +26086,9 @@ private void startEditingMessageObject(MessageObject messageObject) { if (messageObject == null || getParentActivity() == null) { return; } + if (selectionReactionsOverlay != null && selectionReactionsOverlay.isVisible()) { + selectionReactionsOverlay.setHiddenByScroll(true); + } if (searchItem != null && actionBar.isSearchFieldVisible()) { actionBar.closeSearchField(); chatActivityEnterView.setFieldFocused(); @@ -25486,7 +26212,7 @@ private CharSequence getMessageContent(MessageObject messageObject, long previou String restrictionReason = MessagesController.getRestrictionReason(messageObject.messageOwner.restriction_reason); if (!TextUtils.isEmpty(restrictionReason)) { str.append(restrictionReason); - } else if (messageObject.caption != null){ + } else if (messageObject.caption != null) { str.append(messageObject.caption); } else { str.append(messageObject.messageText); @@ -25531,6 +26257,10 @@ public void openReportChat(int type) { fragment.chatActivityDelegate = new ChatActivityDelegate() { @Override public void onReport() { + createUndoView(); + if (undoView == null) { + return; + } undoView.showWithAction(0, UndoView.ACTION_REPORT_SENT, null); } }; @@ -25613,7 +26343,7 @@ private void processSelectedOption(int option) { forwardingMessageGroup = selectedObjectGroup; Bundle args = new Bundle(); args.putBoolean("onlySelect", true); - args.putInt("dialogsType", 3); + args.putInt("dialogsType", DialogsActivity.DIALOGS_TYPE_FORWARD); args.putInt("messagesCount", forwardingMessageGroup == null ? 1 : forwardingMessageGroup.messages.size()); args.putInt("hasPoll", forwardingMessage.isPoll() ? (forwardingMessage.isPublicPoll() ? 2 : 1) : 0); args.putBoolean("hasInvoice", forwardingMessage.isInvoice()); @@ -25634,6 +26364,10 @@ private void processSelectedOption(int option) { AndroidUtilities.addToClipboard(getMessageContent(selectedObject, 0, false)); } } + createUndoView(); + if (undoView == null) { + return; + } undoView.showWithAction(0, UndoView.ACTION_MESSAGE_COPIED, null); break; } @@ -25980,6 +26714,7 @@ private void processSelectedOption(int option) { checks[1] = !checks[1]; cell1.setChecked(checks[1], true); }); + builder.setCustomViewOffset(6); builder.setView(frameLayout); } } else if (ChatObject.isChannel(currentChat) && currentChat.megagroup || currentChat != null && !ChatObject.isChannel(currentChat)) { @@ -26004,6 +26739,7 @@ private void processSelectedOption(int option) { checks[0] = !checks[0]; cell1.setChecked(checks[0], true); }); + builder.setCustomViewOffset(9); builder.setView(frameLayout); } } else { @@ -26020,9 +26756,8 @@ private void processSelectedOption(int option) { bulletin.show(); View view = bulletin.getLayout(); view.postDelayed(() -> { - if (!NekoConfig.disableVibration.Bool()) { + if (!NekoConfig.disableVibration.Bool()) view.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); - } }, 550); }); builder.setNegativeButton(LocaleController.getString("Cancel", R.string.Cancel), null); @@ -26363,9 +27098,9 @@ private int processSelectedOptionLongClick(ActionBarMenuSubItem cell, int option } @Override - public void didSelectDialogs(DialogsActivity fragment, ArrayList dids, CharSequence message, boolean param) { + public boolean didSelectDialogs(DialogsActivity fragment, ArrayList dids, CharSequence message, boolean param, TopicsFragment topicsFragment) { if (forwardingMessage == null && selectedMessagesIds[0].size() == 0 && selectedMessagesIds[1].size() == 0) { - return; + return false; } ArrayList fmessages = new ArrayList<>(); if (forwardingMessage != null) { @@ -26374,8 +27109,6 @@ public void didSelectDialogs(DialogsActivity fragment, ArrayList= 0; a--) { ArrayList ids = new ArrayList<>(); @@ -26390,6 +27123,25 @@ public void didSelectDialogs(DialogsActivity fragment, ArrayList= 0; a--) { selectedMessagesCanCopyIds[a].clear(); selectedMessagesCanStarIds[a].clear(); selectedMessagesIds[a].clear(); @@ -26412,12 +27164,15 @@ public void didSelectDialogs(DialogsActivity fragment, ArrayList= 0; a--) { selectedMessagesIds[a].clear(); selectedMessagesCanCopyIds[a].clear(); @@ -26551,7 +27316,8 @@ private void clearSelectionMode() { } hideActionMode(); updatePinnedMessageView(true); - updateVisibleRows(); + updateVisibleRows(suppressUpdateMessageObject); + updateSelectedMessageReactions(); } public void onListItemAnimatorTick() { @@ -26601,6 +27367,10 @@ public boolean isReplyChatComment() { } private void updateVisibleRows() { + updateVisibleRows(false); + } + + private void updateVisibleRows(boolean suppressUpdateMessageObject) { if (chatListView == null) { return; } @@ -26648,7 +27418,7 @@ private void updateVisibleRows() { cell.setChecked(false, false, true); } - if (!cell.getMessageObject().deleted || cell.linkedChatId != linkedChatId) { + if ((!cell.getMessageObject().deleted || cell.linkedChatId != linkedChatId) && !suppressUpdateMessageObject) { cell.setIsUpdating(true); cell.linkedChatId = chatInfo != null ? chatInfo.linked_chat_id : 0; cell.setMessageObject(cell.getMessageObject(), cell.getCurrentMessagesGroup(), cell.isPinnedBottom(), cell.isPinnedTop()); @@ -26669,7 +27439,9 @@ private void updateVisibleRows() { cell.setSpoilersSuppressed(chatListView.getScrollState() != RecyclerView.SCROLL_STATE_IDLE); } else if (view instanceof ChatActionCell) { ChatActionCell cell = (ChatActionCell) view; - cell.setMessageObject(cell.getMessageObject()); + if (!suppressUpdateMessageObject) { + cell.setMessageObject(cell.getMessageObject()); + } cell.setSpoilersSuppressed(chatListView.getScrollState() != RecyclerView.SCROLL_STATE_IDLE); } } @@ -26804,6 +27576,10 @@ public boolean canScheduleMessage() { return currentEncryptedChat == null && (bottomOverlayChat == null || bottomOverlayChat.getVisibility() != View.VISIBLE) && !isThreadChat(); } + public boolean canSendMessage() { + return currentEncryptedChat == null && (bottomOverlayChat == null || bottomOverlayChat.getVisibility() != View.VISIBLE); + } + public boolean isInScheduleMode() { return chatMode == MODE_SCHEDULED; } @@ -26900,6 +27676,25 @@ public void sendMedia(MediaController.PhotoEntry photoEntry, VideoEditedInfo vid afterMessageSend(); } + public void sendAnimatedEmoji(TLRPC.Document emoji, boolean notify, int scheduleDate) { + if (emoji == null) { + return; + } + String message = MessageObject.findAnimatedEmojiEmoticon(emoji, null); + if (message == null) { + return; + } + ArrayList entities = new ArrayList<>(); + TLRPC.TL_messageEntityCustomEmoji entity = new TLRPC.TL_messageEntityCustomEmoji(); + entity.document = emoji; + entity.document_id = emoji.id; + entity.offset = 0; + entity.length = message.length(); + entities.add(entity); + SendMessagesHelper.getInstance(currentAccount).sendMessage(message, dialog_id, replyingMessageObject, getThreadMessage(), null, false, entities, null, null, notify, scheduleDate, null, false); + afterMessageSend(); + } + public void showOpenGameAlert(final TLRPC.TL_game game, final MessageObject messageObject, final String urlStr, boolean ask, final long uid) { TLRPC.User user = getMessagesController().getUser(uid); if (ask) { @@ -26998,7 +27793,9 @@ private void processLoadedDiscussionMessage(boolean noDiscussion, TLRPC.TL_messa }, 200); presentFragment(chatActivity); if (isKeyboardVisible() && !chatActivity.hideKeyboardOnShow()) { - chatActivity.chatActivityEnterView.getEditField().requestFocus(); + if (chatActivity.chatActivityEnterView != null && chatActivity.chatActivityEnterView.getEditField() != null) { + chatActivity.chatActivityEnterView.getEditField().requestFocus(); + } } }; if (history != null) { @@ -27223,7 +28020,6 @@ public void showRequestUrlAlert(final TLRPC.TL_urlAuthResultRequest request, TLR } }); } - builder.setCustomViewOffset(12); builder.setView(linearLayout); builder.setPositiveButton(LocaleController.getString("Open", R.string.Open), (dialogInterface, i) -> { if (!cells[0].isChecked()) { @@ -27298,13 +28094,18 @@ public void openVCard(TLRPC.User user, String phone, String vcard, String first_ } try { - File f = AndroidUtilities.getSharingDirectory(); - f.mkdirs(); - f = new File(f, "vcard.vcf"); - BufferedWriter writer = new BufferedWriter(new FileWriter(f)); - writer.write(vcard); - writer.close(); - showDialog(new PhonebookShareAlert(this, null, user, null, f, first_name, last_name, themeDelegate)); + File f; + if (!TextUtils.isEmpty(vcard)) { + f = AndroidUtilities.getSharingDirectory(); + f.mkdirs(); + f = new File(f, "vcard.vcf"); + BufferedWriter writer = new BufferedWriter(new FileWriter(f)); + writer.write(vcard); + writer.close(); + } else { + f = null; + } + showDialog(new PhonebookShareAlert(this, null, user, null, f, PhoneFormat.stripExceptNumbers(phone), first_name, last_name, themeDelegate)); } catch (Exception e) { FileLog.e(e); } @@ -27401,6 +28202,10 @@ private void openClickableLink(CharacterStyle url, String str, boolean longPress } else { AndroidUtilities.addToClipboard(str); } + createUndoView(); + if (undoView == null) { + return Unit.INSTANCE; + } if (str.startsWith("@")) { undoView.showWithAction(0, UndoView.ACTION_USERNAME_COPIED, null); } else if (str.startsWith("#") || str.startsWith("$")) { @@ -27408,7 +28213,6 @@ private void openClickableLink(CharacterStyle url, String str, boolean longPress } else { undoView.showWithAction(0, UndoView.ACTION_LINK_COPIED, null); } - } return Unit.INSTANCE; }); @@ -27525,8 +28329,8 @@ public void end(boolean replaced) { private void processExternalUrl(int type, String url, CharacterStyle span, ChatMessageCell cell, boolean forceAlert) { try { - Uri uri = Uri.parse(url); - String host = uri.getHost() != null ? uri.getHost().toLowerCase() : ""; + + String host = AndroidUtilities.getHostAuthority(url); if ((currentEncryptedChat == null || getMessagesController().secretWebpagePreview == 1) && getMessagesController().authDomains.contains(host)) { getSendMessagesHelper().requestUrlAuth(url, this, type == 0 || type == 2); return; @@ -27560,7 +28364,10 @@ private void didPressMessageUrl(CharacterStyle url, boolean longPress, MessageOb if (url instanceof URLSpanMono) { if (!noforwardsOverride) { ((URLSpanMono) url).copyToClipboard(); - getUndoView().showWithAction(0, UndoView.ACTION_TEXT_COPIED, null); + UndoView undoView = getUndoView(); + if (undoView != null) { + undoView.showWithAction(0, UndoView.ACTION_TEXT_COPIED, null); + } } if (longPress && cell != null) { cell.resetPressedLink(-1); @@ -27631,7 +28438,9 @@ private void didPressMessageUrl(CharacterStyle url, boolean longPress, MessageOb if (longPress && cell != null) { cell.resetPressedLink(-1); } - showDialog(new AudioPlayerAlert(getContext(), themeDelegate)); + if (!messageObject.isVoice()) { + showDialog(new AudioPlayerAlert(getContext(), themeDelegate)); + } } else if (str.startsWith("card:")) { final ChatMessageCell finalCell = cell; String number = str.substring(5); @@ -27731,7 +28540,10 @@ private void didPressMessageUrl(CharacterStyle url, boolean longPress, MessageOb if (which == 1) { // Copy AndroidUtilities.addToClipboard(url1); - if (mail) { + createUndoView(); + if (undoView == null) { + return Unit.INSTANCE; + }if (mail) { undoView.showWithAction(0, UndoView.ACTION_EMAIL_COPIED, null); } else if (tel) { undoView.showWithAction(0, UndoView.ACTION_PHONE_COPIED, null); @@ -27880,7 +28692,7 @@ public class ChatActivityAdapter extends RecyclerAnimationScrollHelper.Animatabl private int botInfoEmptyRow = -5; private int loadingUpRow = -5; private int loadingDownRow = -5; - private int messagesStartRow; + public int messagesStartRow; private int messagesEndRow; public boolean isFrozen; @@ -27990,2114 +28802,2331 @@ public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType view = new ChatMessageCell(mContext, true, themeDelegate); } ChatMessageCell chatMessageCell = (ChatMessageCell) view; + chatMessageCell.setResourcesProvider(themeDelegate); chatMessageCell.shouldCheckVisibleOnScreen = true; - chatMessageCell.setDelegate(new ChatMessageCell.ChatMessageCellDelegate() { - + chatMessageCell.setDelegate(new ChatMessageCellDelegate()); + if (currentEncryptedChat == null) { + chatMessageCell.setAllowAssistant(true); + } + } else if (viewType == 1) { + view = new ChatActionCell(mContext, true, themeDelegate) { @Override - public void didPressHint(ChatMessageCell cell, int type) { - if (type == 0) { - TLRPC.TL_messageMediaPoll media = (TLRPC.TL_messageMediaPoll) cell.getMessageObject().messageOwner.media; - showPollSolution(cell.getMessageObject(), media.results); - } else if (type == 1) { - MessageObject messageObject = cell.getMessageObject(); - if (messageObject.messageOwner.fwd_from == null || TextUtils.isEmpty(messageObject.messageOwner.fwd_from.psa_type)) { - return; - } - CharSequence text = LocaleController.getString("PsaMessageInfo_" + messageObject.messageOwner.fwd_from.psa_type); - if (TextUtils.isEmpty(text)) { - text = LocaleController.getString("PsaMessageInfoDefault", R.string.PsaMessageInfoDefault); - } - SpannableStringBuilder stringBuilder = new SpannableStringBuilder(text); - MessageObject.addLinks(false, stringBuilder); - MessageObject.GroupedMessages group = cell.getCurrentMessagesGroup(); - if (group != null) { - for (int a = 0, N = group.posArray.size(); a < N; a++) { - MessageObject.GroupedMessagePosition pos = group.posArray.get(a); - if ((pos.flags & MessageObject.POSITION_FLAG_LEFT) != 0) { - MessageObject m = group.messages.get(a); - if (m != messageObject) { - messageObject = m; - int count = chatListView.getChildCount(); - for (int b = 0; b < count; b++) { - View view = chatListView.getChildAt(b); - if (!(view instanceof ChatMessageCell)) { - continue; - } - ChatMessageCell c = (ChatMessageCell) view; - if (messageObject.equals(c.getMessageObject())) { - cell = c; - } - } - } - break; - } - } - } - showInfoHint(messageObject, stringBuilder, 1); - } - cell.showHintButton(false, true, type); + public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) { + super.onInitializeAccessibilityNodeInfo(info); + // if alpha == 0, then visibleToUser == false, so we need to override it + // to keep accessibility working correctly + info.setVisibleToUser(true); + } + }; + ((ChatActionCell) view).setInvalidateColors(true); + ((ChatActionCell) view).setDelegate(new ChatActionCell.ChatActionCellDelegate() { + @Override + public void didOpenPremiumGift(ChatActionCell cell, TLRPC.TL_premiumGiftOption giftOption, boolean animateConfetti) { + showDialog(new PremiumPreviewBottomSheet(ChatActivity.this, currentAccount, getCurrentUser(), new GiftPremiumBottomSheet.GiftTier(giftOption), themeDelegate) + .setAnimateConfetti(animateConfetti) + .setOutboundGift(cell.getMessageObject().isOut())); } @Override - public boolean shouldDrawThreadProgress(ChatMessageCell cell) { - MessageObject.GroupedMessages group = cell.getCurrentMessagesGroup(); - MessageObject message; - if (group != null && !group.messages.isEmpty()) { - message = group.messages.get(0); - } else { - message = cell.getMessageObject(); - } - if (message == null) { - return false; - } - return message.getId() == commentLoadingMessageId; + public void needShowEffectOverlay(ChatActionCell cell, TLRPC.Document document, TLRPC.VideoSize videoSize) { + emojiAnimationsOverlay.showAnimationForActionCell(cell, document, videoSize); } @Override - public void didPressSideButton(ChatMessageCell cell) { - if (getParentActivity() == null) { - return; - } - if (chatActivityEnterView != null) { - chatActivityEnterView.closeKeyboard(); + public void didClickImage(ChatActionCell cell) { + MessageObject message = cell.getMessageObject(); + PhotoViewer.getInstance().setParentActivity(ChatActivity.this, themeDelegate); + TLRPC.PhotoSize photoSize = FileLoader.getClosestPhotoSizeWithSize(message.photoThumbs, 640); + TLRPC.VideoSize videoSize = null; + TLRPC.VideoSize emojiMarkup = null; + if (message.messageOwner.action.photo.video_sizes != null && !message.messageOwner.action.photo.video_sizes.isEmpty()) { + videoSize = FileLoader.getClosestVideoSizeWithSize(message.messageOwner.action.photo.video_sizes, 1000); + emojiMarkup = FileLoader.getEmojiMarkup(message.messageOwner.action.photo.video_sizes); } - MessageObject messageObject = cell.getMessageObject(); - if (chatMode == MODE_PINNED) { - chatActivityDelegate.openReplyMessage(messageObject.getId()); - finishFragment(); - } else if ((UserObject.isReplyUser(currentUser) || UserObject.isUserSelf(currentUser)) && messageObject.messageOwner.fwd_from.saved_from_peer != null) { - if (UserObject.isReplyUser(currentUser) && messageObject.messageOwner.reply_to != null && messageObject.messageOwner.reply_to.reply_to_top_id != 0) { - openDiscussionMessageChat(messageObject.messageOwner.reply_to.reply_to_peer_id.channel_id, null, messageObject.messageOwner.reply_to.reply_to_top_id, 0, -1, messageObject.messageOwner.fwd_from.saved_from_msg_id, messageObject); - } else { - openOriginalReplyChat(messageObject); + if (cell.getMessageObject().type == MessageObject.TYPE_SUGGEST_PHOTO && !message.isOutOwner()) { + if (message.settingAvatar) { + return; } - } else { - ArrayList arrayList = null; - if (messageObject.getGroupId() != 0) { - MessageObject.GroupedMessages groupedMessages = groupedMessagesMap.get(messageObject.getGroupId()); - if (groupedMessages != null) { - arrayList = groupedMessages.messages; - } + if (emojiMarkup != null) { + PhotoUtilities.showAvatartConstructorForUpdateUserPhoto(ChatActivity.this, emojiMarkup); + return; } - if (arrayList == null) { - arrayList = new ArrayList<>(); - arrayList.add(messageObject); + final ArrayList photos = new ArrayList<>(); + ImageLocation.getForPhoto(videoSize, message.messageOwner.action.photo); + File file = videoSize == null ? getFileLoader().getPathToAttach(message.messageOwner.action.photo) : getFileLoader().getPathToAttach(videoSize); + File file2 = new File(FileLoader.getDirectory(FileLoader.MEDIA_DIR_CACHE), file.getName()); + if (!file.exists()) { + if (file2.exists()) { + file = file2; + } else { + //TODO photo not downloaded yet + return; + } } - showDialog(new ShareAlert(mContext, ChatActivity.this, arrayList, null, null, ChatObject.isChannel(currentChat), null, null, false, false, themeDelegate) { + final MediaController.PhotoEntry entry = new MediaController.PhotoEntry(0, 0, 0, file.getAbsolutePath(), 0, false, 0, 0, 0); + entry.caption = chatActivityEnterView.getFieldText(); + entry.isVideo = videoSize != null; + photos.add(entry); + + PhotoViewer.getInstance().openPhotoForSelect(photos, 0, PhotoViewer.SELECT_TYPE_AVATAR, false, new PhotoViewer.EmptyPhotoViewerProvider() { @Override - public void dismissInternal() { - super.dismissInternal(); - AndroidUtilities.requestAdjustResize(getParentActivity(), classGuid); - if (chatActivityEnterView.getVisibility() == View.VISIBLE) { - fragmentView.requestLayout(); - } + public PhotoViewer.PlaceProviderObject getPlaceForPhoto(MessageObject messageObject, TLRPC.FileLocation fileLocation, int index, boolean needPreview) { + return photoViewerProvider.getPlaceForPhoto(message, fileLocation, index, needPreview); } @Override - protected void onSend(LongSparseArray dids, int count, TLRPC.TL_forumTopic topic) { - if (dids.size() == 1) { - undoView.showWithAction(dids.valueAt(0).id, UndoView.ACTION_FWD_MESSAGES, count, topic, null, null); + public void sendButtonPressed(int index, VideoEditedInfo videoEditedInfo, boolean notify, int scheduleDate, boolean forceDocument) { + message.settingAvatar = true; + if (entry.imagePath != null || entry.isVideo) { + PhotoUtilities.setImageAsAvatar(entry, ChatActivity.this, () -> { + message.settingAvatar = false; + }); } else { - undoView.showWithAction(0, UndoView.ACTION_FWD_MESSAGES, count, dids.size(), null, null); - } - } - }); - AndroidUtilities.setAdjustResizeToNothing(getParentActivity(), classGuid); - fragmentView.requestLayout(); - } - } - - @Override - public boolean needPlayMessage(MessageObject messageObject, boolean muted) { - if (messageObject.isVoice() || messageObject.isRoundVideo()) { - boolean result = MediaController.getInstance().playMessage(messageObject, muted); - MediaController.getInstance().setVoiceMessagesPlaylist(result ? createVoiceMessagesPlaylist(messageObject, false) : null, false); - return result; - } else if (messageObject.isMusic()) { - return MediaController.getInstance().setPlaylist(messages, messageObject, mergeDialogId); - } - return false; - } - - @Override - public void videoTimerReached() { - showNoSoundHint(); - } - - @Override - public void didPressTime(ChatMessageCell cell) { - undoView.showWithAction(dialog_id, UndoView.ACTION_IMPORT_INFO, null); - } - - @Override - public void didPressChannelAvatar(ChatMessageCell cell, TLRPC.Chat chat, int postId, float touchX, float touchY) { - if (chat == null) { - return; - } - if (actionBar.isActionModeShowed() || reportType >= 0) { - processRowSelect(cell, true, touchX, touchY); - return; - } - openChat(cell, chat, postId); - } - - @Override - public void didPressHiddenForward(ChatMessageCell cell) { - if (cell.getMessageObject().isImportedForward()) { - didPressTime(cell); - return; - } - showForwardHint(cell); - } - - @Override - public void didPressOther(ChatMessageCell cell, float otherX, float otherY) { - MessageObject messageObject = cell.getMessageObject(); - if (messageObject.type == MessageObject.TYPE_PHONE_CALL) { - if (currentUser != null) { - VoIPHelper.startCall(currentUser, messageObject.isVideoCall(), userInfo != null && userInfo.video_calls_available, getParentActivity(), getMessagesController().getUserFull(currentUser.id), getAccountInstance()); - } - } else { - createMenu(cell, true, false, otherX, otherY, messageObject.isMusic()); - } - } - - @Override - public void didPressUserAvatar(ChatMessageCell cell, TLRPC.User user, float touchX, float touchY) { - if (actionBar.isActionModeShowed() || reportType >= 0) { - processRowSelect(cell, true, touchX, touchY); - return; - } - if (cell != null && cell.getMessageObject() != null && cell.getMessageObject().isSponsored()) { - didPressInstantButton(cell, 10); - return; - } - openProfile(user, ChatObject.isForum(currentChat) || isThreadChat()); - } + TLRPC.TL_photos_updateProfilePhoto req = new TLRPC.TL_photos_updateProfilePhoto(); + req.id = new TLRPC.TL_inputPhoto(); + req.id.id = message.messageOwner.action.photo.id; + req.id.access_hash = message.messageOwner.action.photo.access_hash; + req.id.file_reference = message.messageOwner.action.photo.file_reference; - @Override - public boolean didLongPressUserAvatar(ChatMessageCell cell, TLRPC.User user, float touchX, float touchY) { - if (isAvatarPreviewerEnabled()) { - final boolean enableMention = currentChat != null && (bottomOverlayChat == null || bottomOverlayChat.getVisibility() != View.VISIBLE) && (bottomOverlay == null || bottomOverlay.getVisibility() != View.VISIBLE); - final AvatarPreviewer.MenuItem[] menuItems = new AvatarPreviewer.MenuItem[2 + (enableMention ? 1 : 0)]; - menuItems[0] = AvatarPreviewer.MenuItem.OPEN_PROFILE; - menuItems[1] = AvatarPreviewer.MenuItem.SEND_MESSAGE; - if (enableMention) { - menuItems[2] = AvatarPreviewer.MenuItem.MENTION; - } - final TLRPC.UserFull userFull = getMessagesController().getUserFull(user.id); - final AvatarPreviewer.Data data; - if (userFull != null) { - data = AvatarPreviewer.Data.of(userFull, menuItems); - } else { - data = AvatarPreviewer.Data.of(user, classGuid, menuItems); - } - if (AvatarPreviewer.canPreview(data)) { - AvatarPreviewer.getInstance().show((ViewGroup) fragmentView, data, item -> { - switch (item) { - case SEND_MESSAGE: - openDialog(cell, user); - break; - case OPEN_PROFILE: - openProfile(user); - break; - case MENTION: - appendMention(user); - break; + getConnectionsManager().sendRequest(req, (response, error) -> AndroidUtilities.runOnUIThread(() -> { + if (response instanceof TLRPC.TL_photos_photo) { + TLRPC.TL_photos_photo photos_photo = (TLRPC.TL_photos_photo) response; + getMessagesController().putUsers(photos_photo.users, false); + TLRPC.User user = getMessagesController().getUser(getUserConfig().clientUserId); + if (photos_photo.photo instanceof TLRPC.TL_photo) { + if (user != null) { + PhotoUtilities.applyPhotoToUser(message.messageOwner.action.photo, user, false); + getUserConfig().setCurrentUser(user); + getUserConfig().saveConfig(true); + CharSequence title = AndroidUtilities.replaceTags(LocaleController.getString("ApplyAvatarHintTitle", R.string.ApplyAvatarHintTitle)); + CharSequence subtitle = AndroidUtilities.replaceSingleTag(LocaleController.getString("ApplyAvatarHint", R.string.ApplyAvatarHint), () -> { + Bundle args = new Bundle(); + args.putLong("user_id", UserConfig.getInstance(currentAccount).clientUserId); + presentFragment(new ProfileActivity(args)); + }); + BulletinFactory.of(ChatActivity.this).createUsersBulletin(Collections.singletonList(user), title, subtitle).show(); + } + } + } + message.settingAvatar = false; + })); } - }); - return true; - } - } - return false; - } - - private void appendMention(TLRPC.User user) { - if (chatActivityEnterView != null) { - SpannableStringBuilder sb; - final CharSequence text = chatActivityEnterView.getFieldText(); - if (text != null) { - sb = new SpannableStringBuilder(text); - if (text.charAt(text.length() - 1) != ' ') { - sb.append(" "); } - } else { - sb = new SpannableStringBuilder(); - } - if (sb.length() > 0 && sb.charAt(sb.length() - 1) != ' ') { - sb.append(' '); - } - String username = UserObject.getPublicUsername(user); - if (username != null) { - sb.append("@").append(username).append(" "); - } else { - String name = UserObject.getFirstName(user, false); - Spannable spannable = new SpannableString(name + " "); - spannable.setSpan(new URLSpanUserMention("" + user.id, 3), 0, spannable.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); - sb.append(spannable); - } - chatActivityEnterView.setFieldText(sb); - AndroidUtilities.runOnUIThread(() -> chatActivityEnterView.openKeyboard(), 200); - } - } - @Override - public boolean didLongPressChannelAvatar(ChatMessageCell cell, TLRPC.Chat chat, int postId, float touchX, float touchY) { - if (isAvatarPreviewerEnabled()) { - AvatarPreviewer.MenuItem[] menuItems = {AvatarPreviewer.MenuItem.OPEN_PROFILE}; - if (currentChat == null || currentChat.id != chat.id || isThreadChat()) { - menuItems = Arrays.copyOf(menuItems, 2); - menuItems[1] = chat.broadcast ? AvatarPreviewer.MenuItem.OPEN_CHANNEL : AvatarPreviewer.MenuItem.OPEN_GROUP; - } - final TLRPC.ChatFull chatFull = getMessagesController().getChatFull(chat.id); - final AvatarPreviewer.Data data; - if (chatFull != null) { - data = AvatarPreviewer.Data.of(chat, chatFull, menuItems); + }, null); + if (entry.isVideo) { + PhotoViewer.getInstance().setTitle(LocaleController.getString(R.string.SuggestedVideo)); } else { - data = AvatarPreviewer.Data.of(chat, classGuid, menuItems); - } - if (AvatarPreviewer.canPreview(data)) { - AvatarPreviewer.getInstance().show((ViewGroup) fragmentView, data, item -> { - switch (item) { - case OPEN_PROFILE: - openProfile(chat); - break; - case OPEN_GROUP: - case OPEN_CHANNEL: - openChat(cell, chat, 0); - break; - } - }); - return true; + PhotoViewer.getInstance().setTitle(LocaleController.getString(R.string.SuggestedPhoto)); } - } - return false; - } - - private void openProfile(TLRPC.User user) { - openProfile(user, false); - } - - private void openProfile(TLRPC.User user, boolean expandPhoto) { - if (user != null && user.id != getUserConfig().getClientUserId()) { - if (user.photo == null || user.photo instanceof TLRPC.TL_userProfilePhotoEmpty) { - expandPhoto = false; + ImageUpdater.AvatarFor avatarFor = new ImageUpdater.AvatarFor(getUserConfig().getCurrentUser(), ImageUpdater.TYPE_SET_PHOTO_FOR_USER); + avatarFor.isVideo = videoSize != null; + avatarFor.fromObject = getMessagesController().getUser(dialog_id); + PhotoViewer.getInstance().setAvatarFor(avatarFor); + } else if (videoSize != null) { + ImageLocation imageLocation = ImageLocation.getForPhoto(videoSize, message.messageOwner.action.photo); + PhotoViewer.getInstance().openPhoto(videoSize.location, imageLocation, photoViewerProvider); + if (cell.getMessageObject().type == MessageObject.TYPE_SUGGEST_PHOTO) { + PhotoViewer.getInstance().setTitle(LocaleController.getString("SuggestedVideo", R.string.SuggestedVideo)); } - Bundle args = new Bundle(); - args.putLong("user_id", user.id); - args.putBoolean("expandPhoto", expandPhoto); - ProfileActivity fragment = new ProfileActivity(args); - fragment.setPlayProfileAnimation(currentUser != null && currentUser.id == user.id ? 1 : 0); - AndroidUtilities.setAdjustResizeToNothing(getParentActivity(), classGuid); - presentFragment(fragment); - } - } - - private void openProfile(TLRPC.Chat chat) { - openProfile(chat, false); - } - - private void openProfile(TLRPC.Chat chat, boolean expandPhoto) { - if (chat != null) { - Bundle args = new Bundle(); - args.putLong("chat_id", chat.id); - args.putBoolean("expandPhoto", expandPhoto); - presentFragment(new ProfileActivity(args)); - } - } - - private void openDialog(ChatMessageCell cell, TLRPC.User user) { - if (user != null) { - Bundle args = new Bundle(); - args.putLong("user_id", user.id); - if (getMessagesController().checkCanOpenChat(args, ChatActivity.this, cell.getMessageObject())) { - presentFragment(new ChatActivity(args)); + } else if (photoSize != null) { + ImageLocation imageLocation = ImageLocation.getForPhoto(photoSize, message.messageOwner.action.photo); + PhotoViewer.getInstance().openPhoto(photoSize.location, imageLocation, photoViewerProvider); + if (cell.getMessageObject().type == MessageObject.TYPE_SUGGEST_PHOTO) { + PhotoViewer.getInstance().setTitle(LocaleController.getString("SuggestedPhoto", R.string.SuggestedPhoto)); } + } else { + PhotoViewer.getInstance().openPhoto(message, null, 0, 0, 0, photoViewerProvider); } } - private void openChat(ChatMessageCell cell, TLRPC.Chat chat, int postId) { - if (currentChat != null && chat.id == currentChat.id) { - scrollToMessageId(postId, cell.getMessageObject().getId(), true, 0, true, 0); - } else if (currentChat == null || chat.id != currentChat.id || isThreadChat()) { - Bundle args = new Bundle(); - args.putLong("chat_id", chat.id); - if (postId != 0) { - args.putInt("message_id", postId); - } - if (getMessagesController().checkCanOpenChat(args, ChatActivity.this, cell.getMessageObject())) { - presentFragment(new ChatActivity(args)); - } - } + @Override + public BaseFragment getBaseFragment() { + return ChatActivity.this; } - private boolean isAvatarPreviewerEnabled() { - return UserObject.isUserSelf(currentUser) || (currentChat != null && (!ChatObject.isChannel(currentChat) || currentChat.megagroup)); + @Override + public int getTopicId() { + return ChatActivity.this.getTopicId(); } @Override - public void didPressBotButton(ChatMessageCell cell, TLRPC.KeyboardButton button) { - if (getParentActivity() == null || bottomOverlayChat.getVisibility() == View.VISIBLE && - !(button instanceof TLRPC.TL_keyboardButtonSwitchInline) && !(button instanceof TLRPC.TL_keyboardButtonCallback) && - !(button instanceof TLRPC.TL_keyboardButtonGame) && !(button instanceof TLRPC.TL_keyboardButtonUrl) && - !(button instanceof TLRPC.TL_keyboardButtonBuy) && !(button instanceof TLRPC.TL_keyboardButtonUrlAuth) && - !(button instanceof TLRPC.TL_keyboardButtonUserProfile)) { - return; + public boolean didLongPress(ChatActionCell cell, float x, float y) { + if (inPreviewMode) { + return false; } - chatActivityEnterView.didPressedBotButton(button, cell.getMessageObject(), cell.getMessageObject(), makeProgressForBotButton(cell, button instanceof TLRPC.TL_keyboardButtonUrl ? button.url : null)); + return createMenu(cell, false, false, x, y); } @Override - public void needShowPremiumFeatures(String source) { - presentFragment(new PremiumPreviewFragment(source)); + public void needOpenUserProfile(long uid) { + openUserProfile(uid); } @Override - public void needShowPremiumBulletin(int type) { - if (type == 0) { - if (topUndoView == null) { - return; - } - topUndoView.showWithAction(0, UndoView.ACTION_PREMIUM_TRANSCRIPTION, null, () -> { - new PremiumFeatureBottomSheet(ChatActivity.this, PremiumPreviewFragment.PREMIUM_FEATURE_VOICE_TO_TEXT, true).show(); - getMessagesController().pressTranscribeButton(); - }); - try { - topUndoView.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); - } catch (Exception ignored) {} - } + public void didPressReplyMessage(ChatActionCell cell, int id) { + MessageObject messageObject = cell.getMessageObject(); + scrollToMessageId(id, messageObject.getId(), true, messageObject.getDialogId() == mergeDialogId ? 1 : 0, true, 0); } @Override - public void didLongPressBotButton(ChatMessageCell cell, TLRPC.KeyboardButton button) { + public void didPressBotButton(MessageObject messageObject, TLRPC.KeyboardButton button) { if (getParentActivity() == null || bottomOverlayChat.getVisibility() == View.VISIBLE && !(button instanceof TLRPC.TL_keyboardButtonSwitchInline) && !(button instanceof TLRPC.TL_keyboardButtonCallback) && !(button instanceof TLRPC.TL_keyboardButtonGame) && !(button instanceof TLRPC.TL_keyboardButtonUrl) && !(button instanceof TLRPC.TL_keyboardButtonBuy) && !(button instanceof TLRPC.TL_keyboardButtonUrlAuth) && - !(button instanceof TLRPC.TL_keyboardButtonUserProfile)) { + !(button instanceof TLRPC.TL_keyboardButtonUserProfile) && !(button instanceof TLRPC.TL_keyboardButtonRequestPeer)) { return; } - if (button instanceof TLRPC.TL_keyboardButtonUrl) { - openClickableLink(null, button.url, true, cell, cell.getMessageObject()); - try { - cell.performHapticFeedback(HapticFeedbackConstants.LONG_PRESS, HapticFeedbackConstants.FLAG_IGNORE_VIEW_SETTING); - } catch (Exception ignore) {} - } + chatActivityEnterView.didPressedBotButton(button, messageObject, messageObject); } @Override - public void didPressReaction(ChatMessageCell cell, TLRPC.ReactionCount reaction, boolean longpress) { - if (getParentActivity() == null) { - return; + public boolean canDrawOutboundsContent() { + return false; + } + }); + } else if (viewType == 2) { + view = new ChatUnreadCell(mContext, themeDelegate); + } else if (viewType == 3) { + view = new BotHelpCell(mContext, themeDelegate); + ((BotHelpCell) view).setDelegate(url -> { + if (url.startsWith("@")) { + getMessagesController().openByUserName(url.substring(1), ChatActivity.this, 0); + } else if (url.startsWith("#") || url.startsWith("$")) { + DialogsActivity fragment = new DialogsActivity(null); + fragment.setSearchString(url); + presentFragment(fragment); + } else if (url.startsWith("/")) { + chatActivityEnterView.setCommand(null, url, false, false); + if (chatActivityEnterView.getFieldText() == null) { + hideFieldPanel(false); } - if (longpress) { - if (!ChatObject.isChannelAndNotMegaGroup(currentChat) || dialog_id >= 0) { - cell.performHapticFeedback(HapticFeedbackConstants.LONG_PRESS); - FrameLayout scrimPopupContainerLayout = new FrameLayout(getParentActivity()) { - @Override - public boolean dispatchKeyEvent(KeyEvent event) { - if (event.getKeyCode() == KeyEvent.KEYCODE_BACK && event.getRepeatCount() == 0) { - closeMenu(); - } - return super.dispatchKeyEvent(event); - } - - @Override - protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { - int h = Math.min(MeasureSpec.getSize(heightMeasureSpec), AndroidUtilities.dp(ReactedUsersListView.VISIBLE_ITEMS * ReactedUsersListView.ITEM_HEIGHT_DP)); - if (h == 0) { - h = AndroidUtilities.dp(ReactedUsersListView.VISIBLE_ITEMS * ReactedUsersListView.ITEM_HEIGHT_DP); - } - super.onMeasure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(h, MeasureSpec.AT_MOST)); - } - - Path path = new Path(); - @Override - protected void dispatchDraw(Canvas canvas) { - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { - canvas.save(); - path.rewind(); - path.addRoundRect(AndroidUtilities.dp(8), AndroidUtilities.dp(8), getWidth() - AndroidUtilities.dp(8), getHeight() - AndroidUtilities.dp(8), AndroidUtilities.dp(6), AndroidUtilities.dp(6), Path.Direction.CW); - canvas.clipPath(path); - super.dispatchDraw(canvas); - canvas.restore(); - } else { - super.dispatchDraw(canvas); - } - } - }; - scrimPopupContainerLayout.setLayoutParams(LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT)); + } else { + processExternalUrl(0, url, null, null, false); + } + }); + } else if (viewType == 4) { + view = new ChatLoadingCell(mContext, contentView, themeDelegate); + } + view.setLayoutParams(new RecyclerView.LayoutParams(RecyclerView.LayoutParams.MATCH_PARENT, RecyclerView.LayoutParams.WRAP_CONTENT)); + return new RecyclerListView.Holder(view); + } - Rect backgroundPaddings = new Rect(); - Drawable shadowDrawable2 = ContextCompat.getDrawable(getParentActivity(), R.drawable.popup_fixed_alert).mutate(); - shadowDrawable2.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_actionBarDefaultSubmenuBackground), PorterDuff.Mode.MULTIPLY)); - shadowDrawable2.getPadding(backgroundPaddings); - scrimPopupContainerLayout.setBackground(shadowDrawable2); + @Override + public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { + if (position == botInfoRow || position == botInfoEmptyRow) { + BotHelpCell helpView = (BotHelpCell) holder.itemView; + if (UserObject.isReplyUser(currentUser)) { + helpView.setText(false, LocaleController.getString("RepliesChatInfo", R.string.RepliesChatInfo)); + } else { + TLRPC.BotInfo mBotInfo = botInfo.size() != 0 ? botInfo.get(currentUser.id) : null; + helpView.setText(true, mBotInfo != null ? mBotInfo.description : null, mBotInfo != null ? mBotInfo.description_document != null ? mBotInfo.description_document : mBotInfo.description_photo : null, mBotInfo); + } + updateBotHelpCellClick(helpView); + } else if (position == loadingDownRow || position == loadingUpRow) { + ChatLoadingCell loadingCell = (ChatLoadingCell) holder.itemView; + loadingCell.setProgressVisible(loadsCount > 1); + } else if (position >= messagesStartRow && position < messagesEndRow) { + ArrayList messages = isFrozen ? frozenMessages : ChatActivity.this.messages; - ReactionsLayoutInBubble.ReactionButton button = cell.getReactionButton(ReactionsLayoutInBubble.VisibleReaction.fromTLReaction(reaction.reaction)); - if (button == null) { - return; - } - float bottom = cell.reactionsLayoutInBubble.y + button.y + AndroidUtilities.dp(28); - float left = cell.reactionsLayoutInBubble.x + button.x; - int[] loc = new int[2]; - cell.getLocationInWindow(loc); - scrimPopupContainerLayout.addView(new ReactedUsersListView(getParentActivity(), themeDelegate, currentAccount, cell.getPrimaryMessageObject(), reaction, false) - .setOnCustomEmojiSelectedListener((reactedUsersListView1, customEmojiStickerSets) -> { - EmojiPacksAlert alert = new EmojiPacksAlert(ChatActivity.this, getParentActivity(), themeDelegate, customEmojiStickerSets) { - @Override - public void dismiss() { - super.dismiss(); - dimBehindView(false); - } - }; - alert.setCalcMandatoryInsets(isKeyboardVisible()); - alert.setDimBehind(false); - closeMenu(false); - showDialog(alert); - }) - .setOnProfileSelectedListener((view1, userId, messagePeerReaction) -> { - Bundle args = new Bundle(); - args.putLong("user_id", userId); - args.putInt("report_reaction_message_id", cell.getMessageObject().getId()); - args.putLong("report_reaction_from_dialog_id", dialog_id); - ProfileActivity fragment = new ProfileActivity(args); - presentFragment(fragment); - closeMenu(); - }), LayoutHelper.createFrame(240, LayoutHelper.WRAP_CONTENT)); + MessageObject message = messages.get(position - messagesStartRow); + View view = holder.itemView; - scrimPopupWindow = new ActionBarPopupWindow(scrimPopupContainerLayout, LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT) { - @Override - public void dismiss() { - super.dismiss(); - if (scrimPopupWindow != this) { - return; - } - scrimPopupWindow = null; - menuDeleteItem = null; - scrimPopupWindowItems = null; - chatLayoutManager.setCanScrollVertically(true); - if (scrimPopupWindowHideDimOnDismiss) { - dimBehindView(false); - } else { - scrimPopupWindowHideDimOnDismiss = true; - } - if (chatActivityEnterView != null) { - chatActivityEnterView.getEditField().setAllowDrawCursor(true); - } - } - }; - scrimPopupWindow.setPauseNotifications(true); - scrimPopupWindow.setDismissAnimationDuration(220); - scrimPopupWindow.setOutsideTouchable(true); - scrimPopupWindow.setClippingEnabled(true); - scrimPopupWindow.setAnimationStyle(R.style.PopupContextAnimation); - scrimPopupWindow.setFocusable(true); - scrimPopupContainerLayout.measure(View.MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(1000), View.MeasureSpec.AT_MOST), View.MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(1000), View.MeasureSpec.AT_MOST)); - scrimPopupWindow.setInputMethodMode(ActionBarPopupWindow.INPUT_METHOD_NOT_NEEDED); - scrimPopupWindow.setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_UNSPECIFIED); - scrimPopupWindow.getContentView().setFocusableInTouchMode(true); - - int totalHeight = contentView.getHeight(); - int height = scrimPopupContainerLayout.getMeasuredHeight(); - int keyboardHeight = contentView.measureKeyboardHeight(); - if (keyboardHeight > AndroidUtilities.dp(20)) { - totalHeight += keyboardHeight; - } + if (view instanceof ChatMessageCell) { + final ChatMessageCell messageCell = (ChatMessageCell) view; + MessageObject.GroupedMessages groupedMessages = getValidGroupedMessage(message); + messageCell.isChat = currentChat != null || UserObject.isUserSelf(currentUser) || UserObject.isReplyUser(currentUser); + messageCell.isBot = currentUser != null && currentUser.bot; + messageCell.isMegagroup = ChatObject.isChannel(currentChat) && currentChat.megagroup; + messageCell.isForum = ChatObject.isForum(currentChat); + messageCell.isForumGeneral = ChatObject.isForum(currentChat) && isTopic && getTopicId() == 1; + messageCell.isThreadChat = threadMessageId != 0 || messageCell.isForum && isTopic; + messageCell.hasDiscussion = chatMode != MODE_SCHEDULED && ChatObject.isChannel(currentChat) && currentChat.has_link && !currentChat.megagroup; + messageCell.isPinned = chatMode == 0 && (pinnedMessageObjects.containsKey(message.getId()) || groupedMessages != null && !groupedMessages.messages.isEmpty() && pinnedMessageObjects.containsKey(groupedMessages.messages.get(0).getId())); + messageCell.linkedChatId = chatMode != MODE_SCHEDULED && chatInfo != null ? chatInfo.linked_chat_id : 0; + messageCell.isRepliesChat = UserObject.isReplyUser(currentUser); + messageCell.isPinnedChat = chatMode == MODE_PINNED; + boolean pinnedBottom = false; + boolean pinnedBottomByGroup = false; + boolean pinnedTop = false; + boolean pinnedTopByGroup = false; - int popupX = (int) (left - AndroidUtilities.dp(28)); - popupX = Math.max(AndroidUtilities.dp(6), Math.min(chatListView.getMeasuredWidth() - AndroidUtilities.dp(6) - scrimPopupContainerLayout.getMeasuredWidth(), popupX)); - if (AndroidUtilities.isTablet()) { - int[] location = new int[2]; - fragmentView.getLocationInWindow(location); - popupX += location[0]; - } - int popupY; - if (height < totalHeight) { - if (height < totalHeight / 2f && chatListView.getY() + cell.getY() + cell.reactionsLayoutInBubble.y + button.y > totalHeight / 2f) { - popupY = (int) (chatListView.getY() + cell.getY() + cell.reactionsLayoutInBubble.y + button.y - height); - } else { - popupY = (int) (chatListView.getY() + cell.getY() + cell.reactionsLayoutInBubble.y + button.y + button.height); - } + int prevPosition; + int nextPosition; + if (groupedMessages != null) { + MessageObject.GroupedMessagePosition pos = groupedMessages.positions.get(message); + if (pos != null) { + if (groupedMessages.isDocuments) { + prevPosition = position + groupedMessages.posArray.indexOf(pos) + 1; + nextPosition = position - groupedMessages.posArray.size() + groupedMessages.posArray.indexOf(pos); + } else { + if ((pos.flags & MessageObject.POSITION_FLAG_TOP) != 0) { + prevPosition = position + groupedMessages.posArray.indexOf(pos) + 1; } else { - popupY = inBubbleMode ? 0 : AndroidUtilities.statusBarHeight; - } - scrimPopupWindow.showAtLocation(chatListView, Gravity.LEFT | Gravity.TOP, scrimPopupX = popupX, scrimPopupY = popupY); - - chatListView.stopScroll(); - chatLayoutManager.setCanScrollVertically(false); - scrimViewReaction = reaction.reaction instanceof TLRPC.TL_reactionEmoji ? ((TLRPC.TL_reactionEmoji) reaction.reaction).emoticon : null; - dimBehindView(cell, true); - hideHints(false); - if (topUndoView != null) { - topUndoView.hide(true, 1); - } - if (undoView != null) { - undoView.hide(true, 1); + pinnedTop = true; + pinnedTopByGroup = true; + prevPosition = -100; } - if (chatActivityEnterView != null) { - chatActivityEnterView.getEditField().setAllowDrawCursor(false); + if ((pos.flags & MessageObject.POSITION_FLAG_BOTTOM) != 0) { + nextPosition = position - groupedMessages.posArray.size() + groupedMessages.posArray.indexOf(pos); + } else { + pinnedBottom = true; + pinnedBottomByGroup = true; + nextPosition = -100; } } - } else if (reaction != null) { - ReactionsLayoutInBubble.VisibleReaction visibleReaction = ReactionsLayoutInBubble.VisibleReaction.fromTLReaction(reaction.reaction); - selectReaction(cell.getPrimaryMessageObject(), null, null,0, 0, visibleReaction,false, false, false); + } else { + prevPosition = -100; + nextPosition = -100; } + } else { + nextPosition = position - 1; + prevPosition = position + 1; } + int nextType = getItemViewType(nextPosition); + int prevType = getItemViewType(prevPosition); - @Override - public void didPressVoteButtons(ChatMessageCell cell, ArrayList buttons, int showCount, int x, int y) { - if (showCount >= 0 || buttons.isEmpty()) { - if (getParentActivity() == null) { - return; - } - if (pollHintView == null) { - pollHintView = new HintView(getParentActivity(), HintView.TYPE_POLL_VOTE, themeDelegate); - pollHintView.setAlpha(0.0f); - pollHintView.setVisibility(View.INVISIBLE); - int index = contentView.indexOfChild(chatActivityEnterView); - if (index == -1) { - return; - } - contentView.addView(pollHintView, index + 1, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.TOP, 19, 0, 19, 0)); - } - if (buttons.isEmpty() && showCount < 0) { - ArrayList pollButtons = cell.getPollButtons(); - float lastDiff = 0; - for (int a = 0, N = pollButtons.size(); a < N; a++) { - ChatMessageCell.PollButton button = pollButtons.get(a); - lastDiff = cell.getY() + button.y - AndroidUtilities.dp(4) - chatListViewPaddingTop; - pollHintX = button.x + AndroidUtilities.dp(13.3f); - pollHintY = button.y - AndroidUtilities.dp(6) + y; - if (lastDiff > 0) { - lastDiff = 0; - x = pollHintX; - y = pollHintY; - break; + if (!(message.messageOwner.reply_markup instanceof TLRPC.TL_replyInlineMarkup) && nextType == holder.getItemViewType()) { + MessageObject nextMessage = messages.get(nextPosition - messagesStartRow); + pinnedBottom = nextMessage.isOutOwner() == message.isOutOwner() && Math.abs(nextMessage.messageOwner.date - message.messageOwner.date) <= 5 * 60; + if (pinnedBottom) { + if (message.isImportedForward() || nextMessage.isImportedForward()) { + if (message.isImportedForward() && nextMessage.isImportedForward()) { + if (Math.abs(nextMessage.messageOwner.fwd_from.date - message.messageOwner.fwd_from.date) <= 5 * 60) { + if (nextMessage.messageOwner.fwd_from.from_name != null && message.messageOwner.fwd_from.from_name != null) { + pinnedBottom = nextMessage.messageOwner.fwd_from.from_name.equals(message.messageOwner.fwd_from.from_name); + } else if (nextMessage.messageOwner.fwd_from.from_id != null && message.messageOwner.fwd_from.from_id != null) { + pinnedBottom = MessageObject.getPeerId(nextMessage.messageOwner.fwd_from.from_id) == MessageObject.getPeerId(message.messageOwner.fwd_from.from_id); + } else { + pinnedBottom = false; + } + } else { + pinnedBottom = false; } + } else { + pinnedBottom = false; } - if (lastDiff != 0) { - chatListView.smoothScrollBy(0, (int) lastDiff); - pollHintCell = cell; - return; + } else if (currentChat != null) { + long fromId = nextMessage.getFromChatId(); + pinnedBottom = fromId == message.getFromChatId(); + if (!pinnedBottomByGroup && pinnedBottom && fromId < 0 && currentChat.megagroup) { + pinnedBottom = false; + } + } else if (UserObject.isUserSelf(currentUser) || UserObject.isReplyUser(currentUser)) { + if (message.isPrivateForward() || nextMessage.isPrivateForward()) { + pinnedBottom = false; + } else { + pinnedBottom = nextMessage.getSenderId() == message.getSenderId(); } } - pollHintView.showForMessageCell(cell, showCount, x, y, true); - } else { - getSendMessagesHelper().sendVote(cell.getMessageObject(), buttons, null); } } - - @Override - public void didPressCancelSendButton(ChatMessageCell cell) { - MessageObject message = cell.getMessageObject(); - if (message.messageOwner.send_state != 0) { - getSendMessagesHelper().cancelSendingMessage(message); - } - } - - @Override - public void didLongPress(ChatMessageCell cell, float x, float y) { - createMenu(cell, false, false, x, y); - startMultiselect(chatListView.getChildAdapterPosition(cell)); - } - - @Override - public boolean canPerformActions() { - return actionBar != null && !actionBar.isActionModeShowed() && reportType < 0 && !inPreviewMode; - } - - @Override - public void didPressUrl(ChatMessageCell cell, final CharacterStyle url, boolean longPress) { - didPressMessageUrl(url, longPress, cell.getMessageObject(), cell); - } - - @Override - public boolean didPressAnimatedEmoji(ChatMessageCell cell, AnimatedEmojiSpan span) { - if (getMessagesController().premiumLocked || span == null || span.standard) { - return false; - } - long documentId = span.getDocumentId(); - TLRPC.Document document = span.document == null ? AnimatedEmojiDrawable.findDocument(currentAccount, documentId) : span.document; - if (document == null) { - return false; - } - Bulletin bulletin = BulletinFactory.of(ChatActivity.this).createContainsEmojiBulletin(document, false, set -> { - ArrayList inputSets = new ArrayList<>(1); - inputSets.add(set); - EmojiPacksAlert alert = new EmojiPacksAlert(ChatActivity.this, getParentActivity(), themeDelegate, inputSets); - alert.setCalcMandatoryInsets(isKeyboardVisible()); - showDialog(alert); - }); - if (bulletin != null) { - bulletin.show(); - return true; - } - return false; - } - - @Override - public void didPressTopicButton(ChatMessageCell cell) { - MessageObject message = cell.getMessageObject(); - if (message != null) { - int topicId = MessageObject.getTopicId(message.messageOwner, true); - if (topicId != 0) { - TLRPC.TL_forumTopic topic = getMessagesController().getTopicsController().findTopic(currentChat.id, topicId); - if (topic != null) { - ForumUtilities.openTopic(ChatActivity.this, currentChat.id, topic, message.getId()); + if (prevType == holder.getItemViewType()) { + MessageObject prevMessage = messages.get(prevPosition - messagesStartRow); + pinnedTop = !(prevMessage.messageOwner.reply_markup instanceof TLRPC.TL_replyInlineMarkup) && prevMessage.isOutOwner() == message.isOutOwner() && Math.abs(prevMessage.messageOwner.date - message.messageOwner.date) <= 5 * 60; + if (pinnedTop) { + if (message.isImportedForward() || prevMessage.isImportedForward()) { + if (message.isImportedForward() && prevMessage.isImportedForward()) { + if (Math.abs(message.messageOwner.fwd_from.date - prevMessage.messageOwner.fwd_from.date) <= 5 * 60) { + if (prevMessage.messageOwner.fwd_from.from_name != null && message.messageOwner.fwd_from.from_name != null) { + pinnedTop = prevMessage.messageOwner.fwd_from.from_name.equals(message.messageOwner.fwd_from.from_name); + } else if (prevMessage.messageOwner.fwd_from.from_id != null && message.messageOwner.fwd_from.from_id != null) { + pinnedTop = MessageObject.getPeerId(prevMessage.messageOwner.fwd_from.from_id) == MessageObject.getPeerId(message.messageOwner.fwd_from.from_id); + } else { + pinnedTop = false; + } + } else { + pinnedTop = false; + } + } else { + pinnedTop = false; + } + } else if (currentChat != null) { + long fromId = prevMessage.getFromChatId(); + pinnedTop = fromId == message.getFromChatId() && !message.isImportedForward() && !prevMessage.isImportedForward(); + if (!pinnedTopByGroup && pinnedTop && fromId < 0 && currentChat.megagroup) { + pinnedTop = false; + } + } else if (UserObject.isUserSelf(currentUser) || UserObject.isReplyUser(currentUser)) { + if (message.isPrivateForward() || prevMessage.isPrivateForward()) { + pinnedTop = false; + } else { + pinnedTop = prevMessage.getSenderId() == message.getSenderId(); } } } } - - @Override - public boolean shouldShowTopicButton() { - return ChatObject.isForum(currentChat) && !isTopic; - } - - @Override - public void didPressExtendedMediaPreview(ChatMessageCell cell, TLRPC.KeyboardButton button) { - getSendMessagesHelper().sendCallback(true, cell.getMessageObject(), button, ChatActivity.this); - } - - @Override - public void needOpenWebView(MessageObject message, String url, String title, String description, String originalUrl, int w, int h) { - try { - EmbedBottomSheet.show(ChatActivity.this, message, photoViewerProvider, title, description, originalUrl, url, w, h, isKeyboardVisible()); - } catch (Throwable e) { - FileLog.e(e); + if (ChatObject.isChannel(currentChat) && currentChat.megagroup && message.getFromChatId() <= 0 && message.messageOwner.fwd_from != null && message.messageOwner.fwd_from.saved_from_peer instanceof TLRPC.TL_peerChannel) { + if (!pinnedTopByGroup) { + pinnedTop = false; + } + if (!pinnedBottomByGroup) { + pinnedBottom = false; } } - @Override - public void didPressReplyMessage(ChatMessageCell cell, int id) { - if (UserObject.isReplyUser(currentUser)) { - didPressSideButton(cell); - return; - } - MessageObject messageObject = cell.getMessageObject(); - if (chatMode == MODE_PINNED || chatMode == MODE_SCHEDULED) { - chatActivityDelegate.openReplyMessage(id); - finishFragment(); - } else { - scrollToMessageId(id, messageObject.getId(), true, messageObject.getDialogId() == mergeDialogId ? 1 : 0, true, 0, () -> { - progressDialogAtMessageId = messageObject.getId(); - progressDialogAtMessageType = PROGRESS_REPLY; - }); + message.updateTranslation(false); + if (groupedMessages != null) { + for (int i = 0; i < groupedMessages.messages.size(); ++i) { + groupedMessages.messages.get(i).updateTranslation(false); } } - - @Override - public boolean isProgressLoading(ChatMessageCell cell, int type) { - return progressDialogAtMessageId != 0 && cell.getMessageObject() != null && progressDialogAtMessageId == cell.getMessageObject().getId() && progressDialogAtMessageType == type; + messageCell.setMessageObject(message, groupedMessages, pinnedBottom, pinnedTop); + messageCell.setSpoilersSuppressed(chatListView.getScrollState() != RecyclerView.SCROLL_STATE_IDLE); + messageCell.setHighlighted(highlightMessageId != Integer.MAX_VALUE && message.getId() == highlightMessageId); + if (highlightMessageId != Integer.MAX_VALUE) { + startMessageUnselect(); } + int index; + if ((index = animatingMessageObjects.indexOf(message)) != -1) { + boolean applyAnimation = false; + if (message.type == MessageObject.TYPE_ROUND_VIDEO && instantCameraView != null && instantCameraView.getTextureView() != null) { + applyAnimation = true; + messageCell.getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() { + @Override + public boolean onPreDraw() { - @Override - public CharacterStyle getProgressLoadingLink(ChatMessageCell cell) { - if (cell.getMessageObject() != null && progressDialogAtMessageId != 0 && progressDialogAtMessageId == cell.getMessageObject().getId() && progressDialogAtMessageType == PROGRESS_LINK) { - return progressDialogLinkSpan; - } - return null; - } + PipRoundVideoView pipRoundVideoView = PipRoundVideoView.getInstance(); + if (pipRoundVideoView != null) { + pipRoundVideoView.showTemporary(true); + } - @Override - public String getProgressLoadingBotButtonUrl(ChatMessageCell cell) { - if (cell.getMessageObject() != null && progressDialogAtMessageId != 0 && progressDialogAtMessageId == cell.getMessageObject().getId() && progressDialogAtMessageType == PROGRESS_BOT_BUTTON) { - return progressDialogBotButtonUrl; - } - return null; - } + messageCell.getViewTreeObserver().removeOnPreDrawListener(this); + ImageReceiver imageReceiver = messageCell.getPhotoImage(); + float w = imageReceiver.getImageWidth(); + org.telegram.ui.Components.Rect rect = instantCameraView.getCameraRect(); + float scale = w / rect.width; + int[] position = new int[2]; + messageCell.getTransitionParams().ignoreAlpha = true; + messageCell.setAlpha(0.0f); + messageCell.setTimeAlpha(0.0f); + messageCell.getLocationOnScreen(position); + position[0] += imageReceiver.getImageX() - messageCell.getAnimationOffsetX(); + position[1] += imageReceiver.getImageY() - messageCell.getTranslationY(); + final InstantCameraView.InstantViewCameraContainer cameraContainer = instantCameraView.getCameraContainer(); + cameraContainer.setPivotX(0.0f); + cameraContainer.setPivotY(0.0f); + AnimatorSet animatorSet = new AnimatorSet(); - @Override - public void didPressViaBotNotInline(ChatMessageCell cell, long botId) { - Bundle args = new Bundle(); - args.putLong("user_id", botId); - if (getMessagesController().checkCanOpenChat(args, ChatActivity.this, cell.getMessageObject())) { - presentFragment(new ChatActivity(args)); - } - } + cameraContainer.setImageReceiver(imageReceiver); - @Override - public void didPressViaBot(ChatMessageCell cell, String username) { - if (bottomOverlayChat != null && bottomOverlayChat.getVisibility() == View.VISIBLE || bottomOverlay != null && bottomOverlay.getVisibility() == View.VISIBLE) { - return; - } - if (chatActivityEnterView != null && username != null && username.length() > 0) { - chatActivityEnterView.setFieldText("@" + username + " "); - chatActivityEnterView.openKeyboard(); - } - } + instantCameraView.cancelBlur(); - @Override - public void didStartVideoStream(MessageObject message) { - if (message.isVideo()) { - sendSecretMessageRead(message, true); - } - } + AnimatorSet allAnimators = new AnimatorSet(); + animatorSet.playTogether( + ObjectAnimator.ofFloat(cameraContainer, View.SCALE_X, scale), + ObjectAnimator.ofFloat(cameraContainer, View.SCALE_Y, scale), + ObjectAnimator.ofFloat(cameraContainer, View.TRANSLATION_Y, position[1] - rect.y), + ObjectAnimator.ofFloat(instantCameraView.getSwitchButtonView(), View.ALPHA, 0.0f), + ObjectAnimator.ofInt(instantCameraView.getPaint(), AnimationProperties.PAINT_ALPHA, 0), + ObjectAnimator.ofFloat(instantCameraView.getMuteImageView(), View.ALPHA, 0.0f) + ); + animatorSet.setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); + ObjectAnimator o = ObjectAnimator.ofFloat(cameraContainer, View.TRANSLATION_X, position[0] - rect.x); + o.setInterpolator(CubicBezierInterpolator.DEFAULT); - @Override - public void needReloadPolls() { - invalidateMessagesVisiblePart(); - } + allAnimators.playTogether(o, animatorSet); + allAnimators.setStartDelay(120); + allAnimators.setDuration(180); - @Override - public void didPressImage(ChatMessageCell cell, float x, float y) { - MessageObject message = cell.getMessageObject(); - message.putInDownloadsStore = true; - if (message.isSendError()) { - createMenu(cell, false, false, x, y); - return; - } else if (message.isSending()) { - return; - } - if (message.isDice()) { - undoView.showWithAction(0, chatActivityEnterView.getVisibility() == View.VISIBLE && bottomOverlay.getVisibility() != View.VISIBLE ? UndoView.ACTION_DICE_INFO : UndoView.ACTION_DICE_NO_SEND_INFO, message.getDiceEmoji(), null, () -> { - if (checkSlowModeAlert()) { - getSendMessagesHelper().sendMessage(message.getDiceEmoji(), dialog_id, replyingMessageObject, getThreadMessage(), null, false, null, null, null, true, 0, null, false); - } - }); - } else if (message.isAnimatedEmoji() && (!message.isAnimatedAnimatedEmoji() || emojiAnimationsOverlay.supports(MessageObject.findAnimatedEmojiEmoticon(message.getDocument())) && currentUser != null) || message.isPremiumSticker()) { - restartSticker(cell); - emojiAnimationsOverlay.onTapItem(cell, ChatActivity.this, true); - chatListView.cancelClickRunnables(false); - } else if (message.needDrawBluredPreview()) { - Runnable action = sendSecretMessageRead(message, false); - cell.invalidate(); - SecretMediaViewer.getInstance().setParentActivity(getParentActivity()); - SecretMediaViewer.getInstance().openMedia(message, photoViewerProvider, action); - } else if (MessageObject.isAnimatedEmoji(message.getDocument()) && MessageObject.getInputStickerSet(message.getDocument()) != null) { - ArrayList inputSets = new ArrayList<>(1); - inputSets.add(MessageObject.getInputStickerSet(message.getDocument())); - EmojiPacksAlert alert = new EmojiPacksAlert(ChatActivity.this, getParentActivity(), themeDelegate, inputSets); - alert.setCalcMandatoryInsets(isKeyboardVisible()); - showDialog(alert); - } else if (message.getInputStickerSet() != null) { - // In case we have a .webp file that is displayed as a sticker, but - // that doesn't fit in 512x512, we assume it may be a regular large - // .webp image and we allow to open it in media viewer. - // Inspired by https://github.com/telegramdesktop/tdesktop/commit/baccec623d45dbfd1132d5f808192f0f3ad87647 - if (message.getInputStickerSet() == null) { - int photoHeight = 0; - int photoWidth = 0; - TLRPC.Document document = message.getDocument(); - for (int a = 0, N = document.attributes.size(); a < N; a++) { - TLRPC.DocumentAttribute attribute = document.attributes.get(a); - if (attribute instanceof TLRPC.TL_documentAttributeImageSize) { - photoWidth = attribute.w; - photoHeight = attribute.h; - break; + if (instantCameraView != null) { + instantCameraView.setIsMessageTransition(true); } + allAnimators.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + messageCell.setAlpha(1.0f); + messageCell.getTransitionParams().ignoreAlpha = false; + Property ALPHA = new AnimationProperties.FloatProperty("alpha") { + @Override + public void setValue(ChatMessageCell object, float value) { + object.setTimeAlpha(value); + } + + @Override + public Float get(ChatMessageCell object) { + return object.getTimeAlpha(); + } + }; + + AnimatorSet animatorSet = new AnimatorSet(); + animatorSet.playTogether( + ObjectAnimator.ofFloat(cameraContainer, View.ALPHA, 0.0f), + ObjectAnimator.ofFloat(messageCell, ALPHA, 1.0f) + ); + animatorSet.setDuration(100); + animatorSet.setInterpolator(new DecelerateInterpolator()); + animatorSet.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + if (instantCameraView != null) { + instantCameraView.setIsMessageTransition(false); + instantCameraView.hideCamera(true); + instantCameraView.setVisibility(View.INVISIBLE); + } + } + }); + animatorSet.start(); + } + }); + allAnimators.start(); + return true; } - if (photoWidth > 512 || photoHeight > 512) { - openPhotoViewerForMessage(cell, message); - } - return; - } - StickersAlert alert = new StickersAlert(getParentActivity(), ChatActivity.this, message.getInputStickerSet(), null, bottomOverlayChat.getVisibility() != View.VISIBLE && (currentChat == null || ChatObject.canSendStickers(currentChat)) ? chatActivityEnterView : null, themeDelegate); - alert.setCalcMandatoryInsets(isKeyboardVisible()); - showDialog(alert); - } else if (message.isVideo() || message.type == MessageObject.TYPE_PHOTO || message.type == MessageObject.TYPE_TEXT && !message.isWebpageDocument() || message.isGif()) { - openPhotoViewerForMessage(cell, message); - } else if (message.type == MessageObject.TYPE_VIDEO) { - sendSecretMessageRead(message, true); - try { - File f = null; - if (message.messageOwner.attachPath != null && message.messageOwner.attachPath.length() != 0) { - f = new File(message.messageOwner.attachPath); - } - if (f == null || !f.exists()) { - f = getFileLoader().getPathToMessage(message.messageOwner); - } - Intent intent = new Intent(Intent.ACTION_VIEW); - if (Build.VERSION.SDK_INT >= 24) { - intent.setFlags(Intent.FLAG_GRANT_READ_URI_PERMISSION); - intent.setDataAndType(FileProvider.getUriForFile(getParentActivity(), ApplicationLoader.getApplicationId() + ".provider", f), "video/mp4"); - } else { - intent.setDataAndType(Uri.fromFile(f), "video/mp4"); - } - getParentActivity().startActivityForResult(intent, 500); - } catch (Exception e) { - FileLog.e(e); - alertUserOpenError(message); - } - } else if (message.type == MessageObject.TYPE_GEO) { - if (!AndroidUtilities.isMapsInstalled(ChatActivity.this)) { - return; - } - if (message.isLiveLocation()) { - LocationActivity fragment = new LocationActivity(currentChat == null || ChatObject.canSendMessages(currentChat) || currentChat.megagroup ? 2 : LocationActivity.LOCATION_TYPE_LIVE_VIEW); - fragment.setDelegate(ChatActivity.this); - fragment.setMessageObject(message); - presentFragment(fragment); - } else { - LocationActivity fragment = new LocationActivity(currentEncryptedChat == null ? 3 : 0); - fragment.setDelegate(ChatActivity.this); - fragment.setMessageObject(message); - presentFragment(fragment); - } - } else if (message.type == MessageObject.TYPE_FILE || message.type == MessageObject.TYPE_TEXT) { - File locFile = null; - if (message.messageOwner.attachPath != null && message.messageOwner.attachPath.length() != 0) { - File f = new File(message.messageOwner.attachPath); - if (f.exists()) { - locFile = f; - } - } - if (locFile == null) { - File f = getFileLoader().getPathToMessage(message.messageOwner); - if (f.exists()) { - locFile = f; - } - } - if (message.getDocumentName().toLowerCase().endsWith("attheme")) { - Theme.ThemeInfo themeInfo = Theme.applyThemeFile(locFile, message.getDocumentName(), null, true); - if (themeInfo != null) { - presentFragment(new ThemePreviewActivity(themeInfo)); - return; - } else { - scrollToPositionOnRecreate = -1; - } - boolean handled = false; - if (message.canPreviewDocument()) { - PhotoViewer.getInstance().setParentActivity(getParentActivity()); - PhotoViewer.getInstance().openPhoto(message, message.type != 0 ? dialog_id : 0, message.type != 0 ? mergeDialogId : 0, 0, photoViewerProvider, false); - handled = true; - } - if (!handled) { - try { - AndroidUtilities.openForView(message, getParentActivity()); - } catch (Exception e) { - FileLog.e(e); - alertUserOpenError(message); + }); + } else if (message.isAnyKindOfSticker() && !message.isAnimatedEmojiStickers()) { + applyAnimation = true; + messageCell.getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() { + @Override + public boolean onPreDraw() { + messageCell.getViewTreeObserver().removeOnPreDrawListener(this); + MessageObject.SendAnimationData sendAnimationData = messageCell.getMessageObject().sendAnimationData; + if (sendAnimationData == null) { + return true; + } + animateSendingViews.add(messageCell); + ImageReceiver imageReceiver = messageCell.getPhotoImage(); + float w = imageReceiver.getImageWidth(); + float scale = sendAnimationData.width / w; + int[] position = new int[2]; + messageCell.getTransitionParams().ignoreAlpha = true; + messageCell.getLocationInWindow(position); + position[1] -= messageCell.getTranslationY(); + if (chatActivityEnterView.isTopViewVisible()) { + position[1] += AndroidUtilities.dp(48); } - } - } else if (locFile == null || !locFile.isFile()) { - AlertUtil.showToast("FILE_NOT_FOUND"); + AnimatorSet allAnimators = new AnimatorSet(); - } else if (message.getDocumentName().toLowerCase().endsWith(".nekox.json")) { + Property param1 = new AnimationProperties.FloatProperty("p1") { + @Override + public void setValue(MessageObject.SendAnimationData object, float value) { + object.currentScale = value; + } - File finalLocFile = locFile; - AlertUtil.showConfirm(getParentActivity(), - LocaleController.getString("ImportProxyList", R.string.ImportProxyList), - R.drawable.baseline_security_24, LocaleController.getString("Import", R.string.Import), - false, () -> { - String status = ProxyListActivity.processProxyListFile(getParentActivity(), finalLocFile); - if (!StrUtil.isBlank(status)) { - presentFragment(new ProxyListActivity(status)); + @Override + public Float get(MessageObject.SendAnimationData object) { + return object.currentScale; + } + }; + Property param2 = new AnimationProperties.FloatProperty("p2") { + @Override + public void setValue(MessageObject.SendAnimationData object, float value) { + object.currentX = value; + if (fragmentView != null) { + fragmentView.invalidate(); } - }); + } - } else if (message.getDocumentName().toLowerCase().endsWith(".nekox-stickers.json")) { + @Override + public Float get(MessageObject.SendAnimationData object) { + return object.currentX; + } + }; + Property param3 = new AnimationProperties.FloatProperty("p3") { + @Override + public void setValue(MessageObject.SendAnimationData object, float value) { + object.currentY = value; + if (fragmentView != null) { + fragmentView.invalidate(); + } + } - File finalLocFile = locFile; - AlertUtil.showConfirm(getParentActivity(), - LocaleController.getString("ImportStickersList", R.string.ImportStickersList), - R.drawable.msg_sticker, LocaleController.getString("Import", R.string.Import), false, () -> { - presentFragment(new StickersActivity(finalLocFile)); - }); + @Override + public Float get(MessageObject.SendAnimationData object) { + return object.currentY; + } + }; + AnimatorSet animatorSet = new AnimatorSet(); + animatorSet.playTogether( + ObjectAnimator.ofFloat(sendAnimationData, param1, scale, 1.0f), + ObjectAnimator.ofFloat(sendAnimationData, param3, sendAnimationData.y, position[1] + imageReceiver.getCenterY()) + ); + animatorSet.setInterpolator(ChatListItemAnimator.DEFAULT_INTERPOLATOR); + ObjectAnimator o = ObjectAnimator.ofFloat(sendAnimationData, param2, sendAnimationData.x, position[0] + imageReceiver.getCenterX()); + o.setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); + allAnimators.playTogether(o, animatorSet); + allAnimators.setDuration(ChatListItemAnimator.DEFAULT_DURATION); - } else if (message.getDocumentName().toLowerCase().endsWith(".nekox-settings.json")) { + allAnimators.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + animateSendingViews.remove(messageCell); + if (fragmentView != null) { + fragmentView.invalidate(); + chatListView.invalidate(); + } + messageCell.setAlpha(1.0f); + messageCell.getTransitionParams().ignoreAlpha = false; + } + }); + allAnimators.start(); - File finalLocFile = locFile; - NekoSettingsActivity.importSettings(getParentActivity(), finalLocFile); + Property ALPHA = new AnimationProperties.FloatProperty("alpha") { + @Override + public void setValue(MessageObject.SendAnimationData object, float value) { + object.timeAlpha = value; + if (fragmentView != null) { + fragmentView.invalidate(); + } + } - } else { - boolean handled = false; - if (message.canPreviewDocument()) { - PhotoViewer.getInstance().setParentActivity(ChatActivity.this, themeDelegate); - PhotoViewer.getInstance().openPhoto(message, ChatActivity.this, message.type != 0 ? dialog_id : 0, message.type != 0 ? mergeDialogId : 0, message.type != 0 ? getTopicId() : 0, photoViewerProvider); - handled = true; - } - if (!handled) { - try { - AndroidUtilities.openForView(message, getParentActivity(), themeDelegate); - } catch (Exception e) { - FileLog.e(e); - alertUserOpenError(message); + @Override + public Float get(MessageObject.SendAnimationData object) { + return object.timeAlpha; + } + }; + + AnimatorSet animatorSet2 = new AnimatorSet(); + animatorSet2.playTogether( + ObjectAnimator.ofFloat(sendAnimationData, ALPHA, 0.0f, 1.0f) + ); + animatorSet2.setDuration(100); + animatorSet2.setStartDelay(150); + animatorSet2.setInterpolator(new DecelerateInterpolator()); + animatorSet2.start(); + return true; } - } + }); + } + if (applyAnimation || chatListItemAnimator == null) { + animatingMessageObjects.remove(index); + chatActivityEnterView.startMessageTransition(); + chatActivityEnterView.hideTopView(true); } } - + if (!animatingDocuments.isEmpty() && animatingDocuments.containsKey(message.getDocument())) { + animatingDocuments.remove(message.getDocument()); + if (chatListItemAnimator != null) { + chatListItemAnimator.onGreetingStickerTransition(holder, greetingsViewContainer); + } + } + } else if (view instanceof ChatActionCell) { + ChatActionCell actionCell = (ChatActionCell) view; + actionCell.setMessageObject(message); + actionCell.setAlpha(1.0f); + actionCell.setSpoilersSuppressed(chatListView.getScrollState() != RecyclerView.SCROLL_STATE_IDLE); + } else if (view instanceof ChatUnreadCell) { + ChatUnreadCell unreadCell = (ChatUnreadCell) view; + unreadCell.setText(LocaleController.getString("UnreadMessages", R.string.UnreadMessages)); + if (createUnreadMessageAfterId != 0) { + createUnreadMessageAfterId = 0; } + } + } + } + @Override + public int getItemViewType(int position) { + if (clearingHistory) { + if (position == botInfoEmptyRow) { + return 3; + } + } + if (position >= messagesStartRow && position < messagesEndRow) { + ArrayList messages = isFrozen ? frozenMessages : ChatActivity.this.messages; + return messages.get(position - messagesStartRow).contentType; + } else if (position == botInfoRow) { + return 3; + } + return 4; + } - @Override - public void didPressInstantButton(ChatMessageCell cell, int type) { - MessageObject messageObject = cell.getMessageObject(); - if (type == 8) { - PollVotesAlert.showForPoll(ChatActivity.this, messageObject); - } else if (type == 0) { - if (messageObject.messageOwner.media != null && messageObject.messageOwner.media.webpage != null && messageObject.messageOwner.media.webpage.cached_page != null) { - ArticleViewer.getInstance().setParentActivity(getParentActivity(), ChatActivity.this); - ArticleViewer.getInstance().open(messageObject); - } - } else if (type == 5) { - long uid = messageObject.messageOwner.media.user_id; - TLRPC.User user = null; - if (uid != 0) { - user = MessagesController.getInstance(currentAccount).getUser(uid); - } - openVCard(user, messageObject.messageOwner.media.phone_number, messageObject.messageOwner.media.vcard, messageObject.messageOwner.media.first_name, messageObject.messageOwner.media.last_name); - } else { - if (messageObject.isSponsored()) { - Bundle args = new Bundle();if (messageObject.sponsoredChatInvite != null) { - showDialog(new JoinGroupAlert(mContext, messageObject.sponsoredChatInvite, messageObject.sponsoredChatInviteHash, ChatActivity.this, themeDelegate)); - } else { - long peerId = MessageObject.getPeerId(messageObject.messageOwner.from_id); - if (peerId < 0) { - args.putLong("chat_id", -peerId); - } else { - args.putLong("user_id", peerId); - } - if (messageObject.sponsoredChannelPost != 0) { - args.putInt("message_id", messageObject.sponsoredChannelPost); - } - if (messageObject.botStartParam != null) { - args.putString("inline_query", messageObject.botStartParam); - } - if (getMessagesController().checkCanOpenChat(args, ChatActivity.this)) { - presentFragment(new ChatActivity(args)); - } - } - } else if (messageObject.messageOwner.media != null && messageObject.messageOwner.media.webpage != null) { - if (!openLinkInternally(messageObject.messageOwner.media.webpage.url, cell, null, messageObject.getId(), PROGRESS_INSTANT)) { - if (progressDialogCurrent != null) { - progressDialogCurrent.cancel(true); - } - progressDialogCurrent = cell == null || cell.getMessageObject() == null ? null : new Browser.Progress() { - @Override - public void init() { - progressDialogAtMessageId = cell.getMessageObject().getId(); - progressDialogAtMessageType = PROGRESS_INSTANT; - progressDialogLinkSpan = null; - cell.invalidate(); - } + @Override + public void onViewAttachedToWindow(RecyclerView.ViewHolder holder) { + if (holder.itemView instanceof ChatMessageCell || holder.itemView instanceof ChatActionCell) { + View view = holder.itemView; + holder.itemView.getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() { + @Override + public boolean onPreDraw() { + view.getViewTreeObserver().removeOnPreDrawListener(this); - @Override - public void end(boolean replaced) { - if (!replaced) { - AndroidUtilities.runOnUIThread(ChatActivity.this::resetProgressDialogLoading, 250); - } - } - }; - Browser.openUrl(getParentActivity(), Uri.parse(messageObject.messageOwner.media.webpage.url), true, true, progressDialogCurrent); - } + int height = chatListView.getMeasuredHeight(); + int top = view.getTop(); + int bottom = view.getBottom(); + int viewTop = top >= 0 ? 0 : -top; + int viewBottom = view.getMeasuredHeight(); + if (viewBottom > height) { + viewBottom = viewTop + height; + } + int recyclerChatViewHeight = (contentView.getHeightWithKeyboard() - (inPreviewMode ? 0 : AndroidUtilities.dp(48)) - chatListView.getTop()); + int keyboardOffset = contentView.getKeyboardHeight(); + int parentHeight = viewBottom - viewTop; + if (keyboardOffset < AndroidUtilities.dp(20) && chatActivityEnterView.isPopupShowing() || chatActivityEnterView.panelAnimationInProgress()) { + keyboardOffset = chatActivityEnterView.getEmojiPadding(); + } + if (holder.itemView instanceof ChatMessageCell) { + ChatMessageCell chatMessageCell = (ChatMessageCell) view; + chatMessageCell.setVisiblePart(viewTop, viewBottom - viewTop, recyclerChatViewHeight, keyboardOffset, view.getY() + (isKeyboardVisible() ? chatListView.getTop() : actionBar.getMeasuredHeight()) - contentView.getBackgroundTranslationY(), contentView.getMeasuredWidth(), contentView.getBackgroundSizeY(), blurredViewTopOffset, blurredViewBottomOffset); + markSponsoredAsRead(chatMessageCell.getMessageObject()); + } else if (holder.itemView instanceof ChatActionCell) { + if (actionBar != null && contentView != null) { + ((ChatActionCell) view).setVisiblePart(view.getY() + (isKeyboardVisible() ? chatListView.getTop() : actionBar.getMeasuredHeight()) - contentView.getBackgroundTranslationY(), contentView.getBackgroundSizeY()); } } + + return true; + } + }); + } + if (holder.itemView instanceof ChatMessageCell) { + final ChatMessageCell messageCell = (ChatMessageCell) holder.itemView; + MessageObject message = messageCell.getMessageObject(); + messageCell.showHintButton(true, false, -1); + if (hintMessageObject != null && hintMessageObject.equals(message)) { + messageCell.showHintButton(false, false, hintMessageType); + } + if (message.isAnimatedEmoji()) { + String emoji = message.getStickerEmoji(); + if (emoji != null) { + MessagesController.EmojiSound sound = getMessagesController().emojiSounds.get(emoji.replace("\uFE0F", "")); + if (sound != null) { + getMediaController().playEmojiSound(getAccountInstance(), emoji, sound, true); + } } - - @Override - public void didPressCommentButton(ChatMessageCell cell) { - MessageObject.GroupedMessages group = cell.getCurrentMessagesGroup(); - MessageObject message; - if (group != null && !group.messages.isEmpty()) { - message = group.messages.get(0); - } else { - message = cell.getMessageObject(); - } - int maxReadId; - long linkedChatId; - if (message.messageOwner.replies != null) { - maxReadId = message.messageOwner.replies.read_max_id; - linkedChatId = message.messageOwner.replies.channel_id; - } else { - maxReadId = -1; - linkedChatId = 0; + } + if (message.updateTranslation(false)) { + messageCell.setMessageObject(message, messageCell.getCurrentMessagesGroup(), messageCell.isPinnedBottom(), messageCell.isPinnedTop()); + } else { + MessageObject.GroupedMessages group = messageCell.getCurrentMessagesGroup(); + if (group != null) { + for (int i = 0; i < group.messages.size(); ++i) { + group.messages.get(i).updateTranslation(); } - openDiscussionMessageChat(currentChat.id, message, message.getId(), linkedChatId, maxReadId, 0, null); } + } - @Override - public String getAdminRank(long uid) { - if (ChatObject.isChannel(currentChat) && currentChat.megagroup) { - String rank = getMessagesController().getAdminRank(currentChat.id, uid); - if (rank != null) { - return rank; - } - } - if (forumTopic != null && forumTopic.from_id != null && (forumTopic.from_id.user_id == uid || forumTopic.from_id.channel_id == uid || forumTopic.from_id.chat_id == uid)) { - return LocaleController.getString("TopicCreator", R.string.TopicCreator); - } - return null; + boolean selected = false; + boolean disableSelection = false; + if (actionBar.isActionModeShowed() || reportType >= 0) { + messageCell.setCheckBoxVisible(threadMessageObjects == null || !threadMessageObjects.contains(message), false); + int idx = message.getDialogId() == dialog_id ? 0 : 1; + if (selectedMessagesIds[idx].indexOfKey(message.getId()) >= 0) { + setCellSelectionBackground(message, messageCell, idx, false); + selected = true; + } else { + messageCell.setDrawSelectionBackground(false); + messageCell.setChecked(false, false, false); } + disableSelection = true; + } else { + messageCell.setDrawSelectionBackground(false); + messageCell.setChecked(false, false, false); + messageCell.setCheckBoxVisible(false, false); + } + messageCell.setCheckPressed(!disableSelection, disableSelection && selected); - @Override - public boolean shouldRepeatSticker(MessageObject message) { - return !alreadyPlayedStickers.containsKey(message); - } + if (searchContainer != null && searchContainer.getVisibility() == View.VISIBLE && getMediaDataController().isMessageFound(message.getId(), message.getDialogId() == mergeDialogId) && getMediaDataController().getLastSearchQuery() != null) { + messageCell.setHighlightedText(getMediaDataController().getLastSearchQuery()); + } else { + messageCell.setHighlightedText(null); + } - @Override - public void setShouldNotRepeatSticker(MessageObject message) { - alreadyPlayedStickers.put(message, true); + if (!inPreviewMode || !messageCell.isHighlighted()) { + messageCell.setHighlighted(highlightMessageId != Integer.MAX_VALUE && messageCell.getMessageObject().getId() == highlightMessageId); + if (highlightMessageId != Integer.MAX_VALUE) { + startMessageUnselect(); } - - @Override - public TextSelectionHelper.ChatListTextSelectionHelper getTextSelectionHelper() { - return textSelectionHelper; + } + if (DialogConfig.isAutoTranslateEnable(dialog_id, getTopicId()) && LanguageDetector.hasSupport()) { + final var messageObject = messageCell.getMessageObject(); + if (MessageHelper.isMessageObjectAutoTranslatable(messageObject)) { + LanguageDetector.detectLanguage( + MessageHelper.getMessagePlainText(messageObject), + (String lang) -> { + if (!isLanguageRestricted(lang)) { + ArrayList fmessages = new ArrayList<>(Arrays.asList(messageObject)); + MessageTransKt.translateMessages(ChatActivity.this, fmessages, true); + } + }, + (Exception e) -> { + FileLog.e("mlkit: failed to detect language in message"); + e.printStackTrace(); + messageObject.translating = false; + }); } + } + } - @Override - public boolean hasSelectedMessages() { - return selectedMessagesIds[0].size() + selectedMessagesIds[1].size() > 0; - } + int position = holder.getAdapterPosition(); + if (position >= messagesStartRow && position < messagesEndRow) { + ArrayList messages = isFrozen ? frozenMessages : ChatActivity.this.messages; - @Override - public void onDiceFinished() { - if (fireworksOverlay.isStarted()) { - return; + MessageObject message = messages.get(position - messagesStartRow); + View view = holder.itemView; + if (message != null && message.messageOwner != null && message.messageOwner.media_unread && message.messageOwner.mentioned) { + if (!inPreviewMode && chatMode == 0) { + if (!message.isVoice() && !message.isRoundVideo()) { + newMentionsCount--; + if (newMentionsCount <= 0) { + newMentionsCount = 0; + hasAllMentionsLocal = true; + showMentionDownButton(false, true); + } else { + mentiondownButtonCounter.setText(String.format("%d", newMentionsCount)); + } + getMessagesController().markMentionMessageAsRead(message.getId(), ChatObject.isChannel(currentChat) ? currentChat.id : 0, dialog_id); + message.setContentIsRead(); } - fireworksOverlay.start(); - if (!NekoConfig.disableVibration.Bool()) { - fireworksOverlay.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + } + if (view instanceof ChatMessageCell) { + ChatMessageCell messageCell = (ChatMessageCell) view; + if (inPreviewMode) { + messageCell.setHighlighted(true); + } else { + messageCell.setHighlightedAnimated(); } } + } + } + } - @Override - public PinchToZoomHelper getPinchToZoomHelper() { - return pinchToZoomHelper; - } + public void updateRowAtPosition(int index) { + if (chatLayoutManager == null || isFrozen) { + return; + } + int lastVisibleItem = RecyclerView.NO_POSITION; + int top = 0; - @Override - public boolean keyboardIsOpened() { - return contentView.getKeyboardHeight() + chatEmojiViewPadding >= AndroidUtilities.dp(20); + if (!wasManualScroll && unreadMessageObject != null) { + int n = chatListView.getChildCount(); + for (int i = 0; i < n; i++) { + View child = chatListView.getChildAt(i); + if (child instanceof ChatMessageCell && ((ChatMessageCell) child).getMessageObject() == unreadMessageObject) { + int unreadMessageIndex = messages.indexOf(unreadMessageObject); + if (unreadMessageIndex >= 0) { + lastVisibleItem = messagesStartRow + messages.indexOf(unreadMessageObject); + top = chatListView.getMeasuredHeight() - child.getBottom() - chatListView.getPaddingBottom(); + } + break; } + } + } + notifyItemChanged(index); + if (lastVisibleItem != RecyclerView.NO_POSITION) { + chatLayoutManager.scrollToPositionWithOffset(lastVisibleItem, top); + } + } - public boolean isLandscape() { - return contentView.getMeasuredWidth() > contentView.getMeasuredHeight(); + public void invalidateRowWithMessageObject(MessageObject messageObject) { + int count = chatListView.getChildCount(); + for (int a = 0; a < count; a++) { + View child = chatListView.getChildAt(a); + if (child instanceof ChatMessageCell) { + ChatMessageCell cell = (ChatMessageCell) child; + if (cell.getMessageObject() == messageObject) { + cell.invalidate(); + return; } + } + } + } - @Override - public void invalidateBlur() { - contentView.invalidateBlur(); + public View updateRowWithMessageObject(MessageObject messageObject, boolean allowInPlace) { + if (allowInPlace) { + int count = chatListView.getChildCount(); + for (int a = 0; a < count; a++) { + View child = chatListView.getChildAt(a); + if (child instanceof ChatMessageCell) { + ChatMessageCell cell = (ChatMessageCell) child; + if (cell.getMessageObject() == messageObject && !cell.isAdminLayoutChanged()) { + cell.setMessageObject(messageObject, cell.getCurrentMessagesGroup(), cell.isPinnedBottom(), cell.isPinnedTop()); + return cell; + } } + } + } + ArrayList messages = isFrozen ? frozenMessages : ChatActivity.this.messages; - @Override - public boolean canDrawOutboundsContent() { - return false; - } + int index = messages.indexOf(messageObject); + if (index == -1) { + return null; + } + updateRowAtPosition(index + messagesStartRow); + return null; + } - @Override - public boolean onAccessibilityAction(int action, Bundle arguments) { - if (action == AccessibilityNodeInfo.ACTION_CLICK || action == R.id.acc_action_small_button || action == R.id.acc_action_msg_options) { - if (inPreviewMode && allowExpandPreviewByClick) { - if (parentLayout != null) { - parentLayout.expandPreviewFragment(); - } - return true; - } - return !canPerformActions(); - } - return false; - } - }); - if (currentEncryptedChat == null) { - chatMessageCell.setAllowAssistant(true); + public void notifyDataSetChanged(boolean animated) { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("notify data set changed fragmentOpened=" + fragmentOpened); + } + if (animated && fragmentOpened) { + if (chatListView.getItemAnimator() != chatListItemAnimator) { + chatListView.setItemAnimator(chatListItemAnimator); } - } else if (viewType == 1) { - view = new ChatActionCell(mContext, true, themeDelegate) { - @Override - public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) { - super.onInitializeAccessibilityNodeInfo(info); - // if alpha == 0, then visibleToUser == false, so we need to override it - // to keep accessibility working correctly - info.setVisibleToUser(true); - } - }; - ((ChatActionCell) view).setInvalidateColors(true); - ((ChatActionCell) view).setDelegate(new ChatActionCell.ChatActionCellDelegate() { - @Override - public void didOpenPremiumGift(ChatActionCell cell, TLRPC.TL_premiumGiftOption giftOption, boolean animateConfetti) { - Toast.makeText(ChatActivity.this.getContext(), LocaleController.getString("nekoxPremiumGiftRemoved", R.string.nekoxPremiumGiftRemoved), Toast.LENGTH_SHORT).show(); -// showDialog(new PremiumPreviewBottomSheet(ChatActivity.this, currentAccount, getCurrentUser(), new GiftPremiumBottomSheet.GiftTier(giftOption), themeDelegate) -// .setAnimateConfetti(animateConfetti) -// .setOutboundGift(cell.getMessageObject().isOut())); - } + } else { + chatListView.setItemAnimator(null); + } + updateRowsInternal(); + try { + super.notifyDataSetChanged(); + } catch (Exception e) { + FileLog.e(e); + } + boolean hideSkeletons = false; + for (int i = messages.size() - 1; i >= 0; i--) { + MessageObject message = messages.get(i); + if (message.isDateObject) { + continue; + } + if (message.messageOwner != null && (message.messageOwner.action instanceof TLRPC.TL_messageActionTopicCreate || message.messageOwner.action instanceof TLRPC.TL_messageActionChannelCreate)) { + hideSkeletons = true; + } + break; + } + if ((endReached[0] && (mergeDialogId == 0 || endReached[1])) || hideSkeletons) { + checkDispatchHideSkeletons(fragmentBeginToShow); + } + } + + + @Override + public void notifyDataSetChanged() { + notifyDataSetChanged(false); + } + + @Override + public void notifyItemChanged(int position) { + if (!fragmentBeginToShow) { + chatListView.setItemAnimator(null); + } else if (chatListView.getItemAnimator() != chatListItemAnimator) { + chatListView.setItemAnimator(chatListItemAnimator); + } + updateRowsInternal(); + try { + super.notifyItemChanged(position); + } catch (Exception e) { + FileLog.e(e); + } + } + + @Override + public void notifyItemRangeChanged(int positionStart, int itemCount) { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("notify item range changed " + positionStart + ":" + itemCount); + } + if (!fragmentBeginToShow) { + chatListView.setItemAnimator(null); + } else if (chatListView.getItemAnimator() != chatListItemAnimator) { + chatListView.setItemAnimator(chatListItemAnimator); + } + updateRowsInternal(); + try { + super.notifyItemRangeChanged(positionStart, itemCount); + } catch (Exception e) { + FileLog.e(e); + } + } - @Override - public void needShowEffectOverlay(ChatActionCell cell, TLRPC.Document document, TLRPC.VideoSize videoSize) { - emojiAnimationsOverlay.showAnimationForActionCell(cell, document, videoSize); - } + @Override + public void notifyItemInserted(int position) { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("notify item inserted " + position); + } + if (!fragmentBeginToShow) { + chatListView.setItemAnimator(null); + } else if (chatListView.getItemAnimator() != chatListItemAnimator) { + chatListView.setItemAnimator(chatListItemAnimator); + } + updateRowsInternal(); + try { + super.notifyItemInserted(position); + } catch (Exception e) { + FileLog.e(e); + } + } - @Override - public void didClickImage(ChatActionCell cell) { - MessageObject message = cell.getMessageObject(); - PhotoViewer.getInstance().setParentActivity(ChatActivity.this, themeDelegate); - TLRPC.PhotoSize photoSize = FileLoader.getClosestPhotoSizeWithSize(message.photoThumbs, 640); - TLRPC.VideoSize videoSize = null; - if (message.messageOwner.action.photo.video_sizes != null && !message.messageOwner.action.photo.video_sizes.isEmpty()) { - videoSize = message.messageOwner.action.photo.video_sizes.get(0); - } - if (cell.getMessageObject().type == MessageObject.TYPE_SUGGEST_PHOTO && !message.isOutOwner()) { - if (message.settingAvatar) { - return; - } - final ArrayList photos = new ArrayList<>(); - ImageLocation.getForPhoto(videoSize, message.messageOwner.action.photo); - File file = videoSize == null ? getFileLoader().getPathToAttach(message.messageOwner.action.photo) : getFileLoader().getPathToAttach(videoSize); - File file2 = new File(FileLoader.getDirectory(FileLoader.MEDIA_DIR_CACHE), file.getName()); - if (!file.exists()) { - if (file2.exists()) { - file = file2; - } else { - //TODO photo not downloaded yet - return; - } - } - final MediaController.PhotoEntry entry = new MediaController.PhotoEntry(0, 0, 0, file.getAbsolutePath(), 0, false, 0, 0, 0); - entry.caption = chatActivityEnterView.getFieldText(); - entry.isVideo = videoSize != null; - photos.add(entry); + @Override + public void notifyItemMoved(int fromPosition, int toPosition) { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("notify item moved" + fromPosition + ":" + toPosition); + } + if (!fragmentBeginToShow) { + chatListView.setItemAnimator(null); + } else if (chatListView.getItemAnimator() != chatListItemAnimator) { + chatListView.setItemAnimator(chatListItemAnimator); + } + updateRowsInternal(); + try { + super.notifyItemMoved(fromPosition, toPosition); + } catch (Exception e) { + FileLog.e(e); + } + } - PhotoViewer.getInstance().openPhotoForSelect(photos, 0, PhotoViewer.SELECT_TYPE_AVATAR, false, new PhotoViewer.EmptyPhotoViewerProvider() { - @Override - public PhotoViewer.PlaceProviderObject getPlaceForPhoto(MessageObject messageObject, TLRPC.FileLocation fileLocation, int index, boolean needPreview) { - return photoViewerProvider.getPlaceForPhoto(message, fileLocation, index, needPreview); - } + @Override + public void notifyItemRangeInserted(int positionStart, int itemCount) { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("notify item range inserted" + positionStart + ":" + itemCount); + } + if (!fragmentBeginToShow) { + chatListView.setItemAnimator(null); + } else if (chatListView.getItemAnimator() != chatListItemAnimator) { + chatListView.setItemAnimator(chatListItemAnimator); + } + updateRowsInternal(); + if (positionStart == 1 && itemCount > 0) { + int lastPosition = positionStart + itemCount; + if (lastPosition >= messagesStartRow && lastPosition < messagesEndRow) { + MessageObject m1 = messages.get(lastPosition - messagesStartRow); + MessageObject m2 = messages.get(lastPosition - messagesStartRow - 1); + if (currentChat != null && m1.getFromChatId() == m2.getFromChatId() || currentUser != null && m1.isOutOwner() == m2.isOutOwner()) { + notifyItemChanged(positionStart); + } + } + } + try { + super.notifyItemRangeInserted(positionStart, itemCount); + } catch (Exception e) { + FileLog.e(e); + } + } - @Override - public void sendButtonPressed(int index, VideoEditedInfo videoEditedInfo, boolean notify, int scheduleDate, boolean forceDocument) { - message.settingAvatar = true; - if (entry.imagePath != null || entry.isVideo) { - PhotoUtilities.setImageAsAvatar(entry, ChatActivity.this, () -> { - message.settingAvatar = false; - }); - } else { - TLRPC.TL_photos_updateProfilePhoto req = new TLRPC.TL_photos_updateProfilePhoto(); - req.id = new TLRPC.TL_inputPhoto(); - req.id.id = message.messageOwner.action.photo.id; - req.id.access_hash = message.messageOwner.action.photo.access_hash; - req.id.file_reference = message.messageOwner.action.photo.file_reference; + @Override + public void notifyItemRemoved(int position) { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("notify item removed " + position); + } + if (!fragmentBeginToShow) { + chatListView.setItemAnimator(null); + } else if (chatListView.getItemAnimator() != chatListItemAnimator) { + chatListView.setItemAnimator(chatListItemAnimator); + } + updateRowsInternal(); + try { + super.notifyItemRemoved(position); + } catch (Exception e) { + FileLog.e(e); + } + } - getConnectionsManager().sendRequest(req, (response, error) -> AndroidUtilities.runOnUIThread(() -> { - if (response instanceof TLRPC.TL_photos_photo) { - TLRPC.TL_photos_photo photos_photo = (TLRPC.TL_photos_photo) response; - getMessagesController().putUsers(photos_photo.users, false); - TLRPC.User user = getMessagesController().getUser(getUserConfig().clientUserId); - if (photos_photo.photo instanceof TLRPC.TL_photo) { - if (user != null) { - PhotoUtilities.applyPhotoToUser(message.messageOwner.action.photo, user, false); - getUserConfig().setCurrentUser(user); - getUserConfig().saveConfig(true); - CharSequence title = AndroidUtilities.replaceTags(LocaleController.getString("ApplyAvatarHint", R.string.ApplyAvatarHintTitle)); - CharSequence subtitle = AndroidUtilities.replaceSingleTag(LocaleController.getString("ApplyAvatarHint", R.string.ApplyAvatarHint), () -> { - Bundle args = new Bundle(); - args.putLong("user_id", UserConfig.getInstance(currentAccount).clientUserId); - presentFragment(new ProfileActivity(args)); - }); - BulletinFactory.of(ChatActivity.this).createUsersBulletin(Collections.singletonList(user), title, subtitle).show(); - } - } - } - message.settingAvatar = false; - })); - } - } + @Override + public void notifyItemRangeRemoved(int positionStart, int itemCount) { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("notify item range removed" + positionStart + ":" + itemCount); + } + if (!fragmentBeginToShow) { + chatListView.setItemAnimator(null); + } else if (chatListView.getItemAnimator() != chatListItemAnimator) { + chatListView.setItemAnimator(chatListItemAnimator); + } + updateRowsInternal(); + try { + super.notifyItemRangeRemoved(positionStart, itemCount); + } catch (Exception e) { + FileLog.e(e); + } + } - }, null); - if (entry.isVideo) { - PhotoViewer.getInstance().setTitle(LocaleController.getString(R.string.SuggestedVideo)); - } else { - PhotoViewer.getInstance().setTitle(LocaleController.getString(R.string.SuggestedPhoto)); - } - ImageUpdater.AvatarFor avatarFor = new ImageUpdater.AvatarFor(getUserConfig().getCurrentUser(), ImageUpdater.TYPE_SET_PHOTO_FOR_USER); - avatarFor.isVideo = videoSize != null; - avatarFor.fromObject = getMessagesController().getUser(dialog_id); - PhotoViewer.getInstance().setAvatarFor(avatarFor); - } else if (videoSize != null) { - ImageLocation imageLocation = ImageLocation.getForPhoto(videoSize, message.messageOwner.action.photo); - PhotoViewer.getInstance().openPhoto(videoSize.location, imageLocation, photoViewerProvider); - if (cell.getMessageObject().type == MessageObject.TYPE_SUGGEST_PHOTO) { - PhotoViewer.getInstance().setTitle(LocaleController.getString("SuggestedVideo", R.string.SuggestedVideo)); - } - } else if (photoSize != null) { - ImageLocation imageLocation = ImageLocation.getForPhoto(photoSize, message.messageOwner.action.photo); - PhotoViewer.getInstance().openPhoto(photoSize.location, imageLocation, photoViewerProvider); - if (cell.getMessageObject().type == MessageObject.TYPE_SUGGEST_PHOTO) { - PhotoViewer.getInstance().setTitle(LocaleController.getString("SuggestedPhoto", R.string.SuggestedPhoto)); - } - } else { - PhotoViewer.getInstance().openPhoto(message, null, 0, 0, 0, photoViewerProvider); - } - } + @Override + public boolean isEnabled(RecyclerView.ViewHolder holder) { + return false; + } + } - @Override - public BaseFragment getBaseFragment() { - return ChatActivity.this; - } + private class SearchItemListener extends ActionBarMenuItem.ActionBarMenuItemSearchListener { - @Override - public int getTopicId() { - return ChatActivity.this.getTopicId(); - } + boolean searchWas; - @Override - public boolean didLongPress(ChatActionCell cell, float x, float y) { - if (inPreviewMode) { - return false; - } - return createMenu(cell, false, false, x, y); - } + @Override + public boolean canCollapseSearch() { + if (messagesSearchListView.getTag() != null) { + showMessagesSearchListView(false); + return false; + } + return true; + } - @Override - public void needOpenUserProfile(long uid) { - openUserProfile(uid); - } + @Override + public void onSearchCollapse() { + if (searchCalendarButton != null) { + searchCalendarButton.setVisibility(View.VISIBLE); + } + if (searchUserButton != null) { + searchUserButton.setVisibility(View.VISIBLE); + } + if (searchingForUser) { + mentionContainer.getAdapter().searchUsernameOrHashtag(null, 0, null, false, true); + searchingForUser = false; + } + mentionContainer.setReversed(false); + mentionContainer.getAdapter().setSearchingMentions(false); + searchingUserMessages = null; + searchingChatMessages = null; + searchItem.setSearchFieldHint(LocaleController.getString("Search", R.string.Search)); + searchItem.setSearchFieldCaption(null); + AndroidUtilities.updateViewVisibilityAnimated(avatarContainer, true, 0.95f, true); + if (editTextItem != null && editTextItem.getTag() != null) { + if (headerItem != null) { + headerItem.setVisibility(View.GONE); + } + if (editTextItem != null) { + editTextItem.setVisibility(View.VISIBLE); + checkEditTextItemMenu(); + } + if (attachItem != null) { + attachItem.setVisibility(View.GONE); + } + if (searchIconItem != null && showSearchAsIcon) { + searchIconItem.setVisibility(View.GONE); + } + if (audioCallIconItem != null && showAudioCallAsIcon) { + audioCallIconItem.setVisibility(View.GONE); + } + } else if (chatActivityEnterView.hasText() && TextUtils.isEmpty(chatActivityEnterView.getSlowModeTimer()) && (currentChat == null || ChatObject.canSendPlain(currentChat))) { + if (headerItem != null) { + headerItem.setVisibility(View.GONE); + } + if (editTextItem != null) { + editTextItem.setVisibility(View.GONE); + } + if (attachItem != null) { + attachItem.setVisibility(View.VISIBLE); + } + if (searchIconItem != null && showSearchAsIcon) { + searchIconItem.setVisibility(View.GONE); + } + if (audioCallIconItem != null && showAudioCallAsIcon) { + audioCallIconItem.setVisibility(View.GONE); + } + } else { + if (headerItem != null) { + headerItem.setVisibility(View.VISIBLE); + } + if (audioCallIconItem != null && showAudioCallAsIcon) { + audioCallIconItem.setVisibility(View.VISIBLE); + } + if (searchIconItem != null && showSearchAsIcon) { + searchIconItem.setVisibility(View.VISIBLE); + } + if (editTextItem != null) { + editTextItem.setVisibility(View.GONE); + } + if (attachItem != null) { + attachItem.setVisibility(View.GONE); + } + } + if (threadMessageId == 0 && !UserObject.isReplyUser(currentUser) || threadMessageObject != null && threadMessageObject.getRepliesCount() < 10) { + searchItem.setVisibility(View.GONE); + } + // NekoX: hide viewInChat Item when searching + if (viewInChatItem != null) + viewInChatItem.setVisibility(View.VISIBLE); + searchItemVisible = false; + getMediaDataController().clearFoundMessageObjects(); + if (messagesSearchAdapter != null) { + messagesSearchAdapter.notifyDataSetChanged(); + } + removeSelectedMessageHighlight(); + updateBottomOverlay(); + updatePinnedMessageView(true); + updateVisibleRows(); + } - @Override - public void didPressReplyMessage(ChatActionCell cell, int id) { - MessageObject messageObject = cell.getMessageObject(); - scrollToMessageId(id, messageObject.getId(), true, messageObject.getDialogId() == mergeDialogId ? 1 : 0, true, 0); - } + @Override + public void onSearchExpand() { + if (threadMessageId != 0 || UserObject.isReplyUser(currentUser)) { + openSearchWithText(null); + } + if (!openSearchKeyboard) { + return; + } + saveKeyboardPositionBeforeTransition(); + AndroidUtilities.requestAdjustResize(getParentActivity(), classGuid); + AndroidUtilities.runOnUIThread(() -> { + searchWas = false; + searchItem.getSearchField().requestFocus(); + AndroidUtilities.showKeyboard(searchItem.getSearchField()); + removeKeyboardPositionBeforeTransition(); + }, 500); + } - @Override - public void didPressBotButton(MessageObject messageObject, TLRPC.KeyboardButton button) { - if (getParentActivity() == null || bottomOverlayChat.getVisibility() == View.VISIBLE && - !(button instanceof TLRPC.TL_keyboardButtonSwitchInline) && !(button instanceof TLRPC.TL_keyboardButtonCallback) && - !(button instanceof TLRPC.TL_keyboardButtonGame) && !(button instanceof TLRPC.TL_keyboardButtonUrl) && - !(button instanceof TLRPC.TL_keyboardButtonBuy) && !(button instanceof TLRPC.TL_keyboardButtonUrlAuth) && - !(button instanceof TLRPC.TL_keyboardButtonUserProfile)) { - return; - } - chatActivityEnterView.didPressedBotButton(button, messageObject, messageObject); - } + @Override + public void onSearchPressed(EditText editText) { + searchWas = true; + updateSearchButtons(0, 0, -1); + getMediaDataController().searchMessagesInChat(editText.getText().toString(), dialog_id, mergeDialogId, classGuid, 0, threadMessageId, searchingUserMessages, searchingChatMessages); + } - @Override - public boolean canDrawOutboundsContent() { - return false; - } - }); - } else if (viewType == 2) { - view = new ChatUnreadCell(mContext, themeDelegate); - } else if (viewType == 3) { - view = new BotHelpCell(mContext, themeDelegate); - ((BotHelpCell) view).setDelegate(url -> { - if (url.startsWith("@")) { - getMessagesController().openByUserName(url.substring(1), ChatActivity.this, 0); - } else if (url.startsWith("#") || url.startsWith("$")) { - DialogsActivity fragment = new DialogsActivity(null); - fragment.setSearchString(url); - presentFragment(fragment); - } else if (url.startsWith("/")) { - chatActivityEnterView.setCommand(null, url, false, false); - if (chatActivityEnterView.getFieldText() == null) { - hideFieldPanel(false); - } - } else { - processExternalUrl(0, url, null, null, false); - } - }); - } else if (viewType == 4) { - view = new ChatLoadingCell(mContext, contentView, themeDelegate); + @Override + public void onTextChanged(EditText editText) { + showMessagesSearchListView(false); + createSearchContainer(); + if (searchingForUser) { + mentionContainer.getAdapter().searchUsernameOrHashtag("@" + editText.getText().toString(), 0, messages, true, true); + } else if (searchingUserMessages == null && searchingChatMessages == null && searchUserButton != null && TextUtils.equals(editText.getText(), LocaleController.getString("SearchFrom", R.string.SearchFrom))) { + searchUserButton.callOnClick(); } - view.setLayoutParams(new RecyclerView.LayoutParams(RecyclerView.LayoutParams.MATCH_PARENT, RecyclerView.LayoutParams.WRAP_CONTENT)); - return new RecyclerListView.Holder(view); } @Override - public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { - if (position == botInfoRow || position == botInfoEmptyRow) { - BotHelpCell helpView = (BotHelpCell) holder.itemView; - if (UserObject.isReplyUser(currentUser)) { - helpView.setText(false, LocaleController.getString("RepliesChatInfo", R.string.RepliesChatInfo)); - } else { - TLRPC.BotInfo mBotInfo = botInfo.size() != 0 ? botInfo.get(currentUser.id) : null; - helpView.setText(true, mBotInfo != null ? mBotInfo.description : null, mBotInfo != null ? mBotInfo.description_document != null ? mBotInfo.description_document : mBotInfo.description_photo : null, mBotInfo); - } - } else if (position == loadingDownRow || position == loadingUpRow) { - ChatLoadingCell loadingCell = (ChatLoadingCell) holder.itemView; - loadingCell.setProgressVisible(loadsCount > 1); - } else if (position >= messagesStartRow && position < messagesEndRow) { - ArrayList messages = isFrozen ? frozenMessages : ChatActivity.this.messages; - - MessageObject message = messages.get(position - messagesStartRow); - View view = holder.itemView; - boolean fromUserBlocked = getMessagesController().blockePeers.indexOfKey(message.getFromChatId()) >= 0 && NekoConfig.ignoreBlocked.Bool(); + public void onCaptionCleared() { + createSearchContainer(); + if (searchingUserMessages != null || searchingChatMessages != null) { + searchUserButton.callOnClick(); + } else { + if (searchingForUser) { + mentionContainer.getAdapter().searchUsernameOrHashtag(null, 0, null, false, true); + searchingForUser = false; + searchItem.setSearchFieldText("", true); + } + searchItem.setSearchFieldHint(LocaleController.getString("Search", R.string.Search)); + searchCalendarButton.setVisibility(View.VISIBLE); + searchUserButton.setVisibility(View.VISIBLE); + searchingUserMessages = null; + searchingChatMessages = null; + } + } - if (view instanceof ChatMessageCell) { - final ChatMessageCell messageCell = (ChatMessageCell) view; - MessageObject.GroupedMessages groupedMessages = getValidGroupedMessage(message); - messageCell.isChat = currentChat != null || UserObject.isUserSelf(currentUser) || UserObject.isReplyUser(currentUser); - messageCell.isBot = currentUser != null && currentUser.bot; - messageCell.isMegagroup = ChatObject.isChannel(currentChat) && currentChat.megagroup; - messageCell.isForum = ChatObject.isForum(currentChat); - messageCell.isForumGeneral = ChatObject.isForum(currentChat) && isTopic && getTopicId() == 1; - messageCell.isThreadChat = threadMessageId != 0 || messageCell.isForum && isTopic; - messageCell.hasDiscussion = chatMode != MODE_SCHEDULED && ChatObject.isChannel(currentChat) && currentChat.has_link && !currentChat.megagroup; - messageCell.isPinned = chatMode == 0 && (pinnedMessageObjects.containsKey(message.getId()) || groupedMessages != null && !groupedMessages.messages.isEmpty() && pinnedMessageObjects.containsKey(groupedMessages.messages.get(0).getId())); - messageCell.linkedChatId = chatMode != MODE_SCHEDULED && chatInfo != null ? chatInfo.linked_chat_id : 0; - messageCell.isRepliesChat = UserObject.isReplyUser(currentUser); - messageCell.isPinnedChat = chatMode == MODE_PINNED; - boolean pinnedBottom = false; - boolean pinnedBottomByGroup = false; - boolean pinnedTop = false; - boolean pinnedTopByGroup = false; + @Override + public boolean forceShowClear() { + return searchingForUser; + } + } + private SearchItemListener searchItemListener; + private SearchItemListener getSearchItemListener() { + if (searchItemListener == null) { + searchItemListener = new SearchItemListener(); + } + return searchItemListener; + } - int prevPosition; - int nextPosition; - if (groupedMessages != null) { - MessageObject.GroupedMessagePosition pos = groupedMessages.positions.get(message); - if (pos != null) { - if (groupedMessages.isDocuments) { - prevPosition = position + groupedMessages.posArray.indexOf(pos) + 1; - nextPosition = position - groupedMessages.posArray.size() + groupedMessages.posArray.indexOf(pos); - } else { - if ((pos.flags & MessageObject.POSITION_FLAG_TOP) != 0) { - prevPosition = position + groupedMessages.posArray.indexOf(pos) + 1; - } else { - pinnedTop = true; - pinnedTopByGroup = true; - prevPosition = -100; - } - if ((pos.flags & MessageObject.POSITION_FLAG_BOTTOM) != 0) { - nextPosition = position - groupedMessages.posArray.size() + groupedMessages.posArray.indexOf(pos); - } else { - pinnedBottom = true; - pinnedBottomByGroup = true; - nextPosition = -100; - } - } - } else { - prevPosition = -100; - nextPosition = -100; - } - } else { - nextPosition = position - 1; - prevPosition = position + 1; - } - int nextType = getItemViewType(nextPosition); - int prevType = getItemViewType(prevPosition); + private ChatMessageCellDelegate chatMessageCellDelegate; + private ChatMessageCellDelegate getChatMessageCellDelegate() { + if (chatMessageCellDelegate == null) { + chatMessageCellDelegate = new ChatMessageCellDelegate(); + } + return chatMessageCellDelegate; + } + private class ChatMessageCellDelegate implements ChatMessageCell.ChatMessageCellDelegate { + @Override + public boolean isReplyOrSelf() { + return UserObject.isReplyUser(currentUser) || UserObject.isUserSelf(currentUser); + } - if (!(message.messageOwner.reply_markup instanceof TLRPC.TL_replyInlineMarkup) && nextType == holder.getItemViewType()) { - MessageObject nextMessage = messages.get(nextPosition - messagesStartRow); - pinnedBottom = nextMessage.isOutOwner() == message.isOutOwner() && Math.abs(nextMessage.messageOwner.date - message.messageOwner.date) <= 5 * 60; - if (pinnedBottom) { - if (message.isImportedForward() || nextMessage.isImportedForward()) { - if (message.isImportedForward() && nextMessage.isImportedForward()) { - if (Math.abs(nextMessage.messageOwner.fwd_from.date - message.messageOwner.fwd_from.date) <= 5 * 60) { - if (nextMessage.messageOwner.fwd_from.from_name != null && message.messageOwner.fwd_from.from_name != null) { - pinnedBottom = nextMessage.messageOwner.fwd_from.from_name.equals(message.messageOwner.fwd_from.from_name); - } else if (nextMessage.messageOwner.fwd_from.from_id != null && message.messageOwner.fwd_from.from_id != null) { - pinnedBottom = MessageObject.getPeerId(nextMessage.messageOwner.fwd_from.from_id) == MessageObject.getPeerId(message.messageOwner.fwd_from.from_id); - } else { - pinnedBottom = false; - } - } else { - pinnedBottom = false; + @Override + public void didPressHint(ChatMessageCell cell, int type) { + if (type == 0) { + TLRPC.TL_messageMediaPoll media = (TLRPC.TL_messageMediaPoll) cell.getMessageObject().messageOwner.media; + showPollSolution(cell.getMessageObject(), media.results); + } else if (type == 1) { + MessageObject messageObject = cell.getMessageObject(); + if (messageObject.messageOwner.fwd_from == null || TextUtils.isEmpty(messageObject.messageOwner.fwd_from.psa_type)) { + return; + } + CharSequence text = LocaleController.getString("PsaMessageInfo_" + messageObject.messageOwner.fwd_from.psa_type); + if (TextUtils.isEmpty(text)) { + text = LocaleController.getString("PsaMessageInfoDefault", R.string.PsaMessageInfoDefault); + } + SpannableStringBuilder stringBuilder = new SpannableStringBuilder(text); + MessageObject.addLinks(false, stringBuilder); + MessageObject.GroupedMessages group = cell.getCurrentMessagesGroup(); + if (group != null) { + for (int a = 0, N = group.posArray.size(); a < N; a++) { + MessageObject.GroupedMessagePosition pos = group.posArray.get(a); + if ((pos.flags & MessageObject.POSITION_FLAG_LEFT) != 0) { + MessageObject m = group.messages.get(a); + if (m != messageObject) { + messageObject = m; + int count = chatListView.getChildCount(); + for (int b = 0; b < count; b++) { + View view = chatListView.getChildAt(b); + if (!(view instanceof ChatMessageCell)) { + continue; + } + ChatMessageCell c = (ChatMessageCell) view; + if (messageObject.equals(c.getMessageObject())) { + cell = c; } - } else { - pinnedBottom = false; - } - } else if (currentChat != null) { - long fromId = nextMessage.getFromChatId(); - pinnedBottom = fromId == message.getFromChatId(); - if (!pinnedBottomByGroup && pinnedBottom && fromId < 0 && currentChat.megagroup) { - pinnedBottom = false; - } - } else if (UserObject.isUserSelf(currentUser) || UserObject.isReplyUser(currentUser)) { - if (message.isPrivateForward() || nextMessage.isPrivateForward()) { - pinnedBottom = false; - } else { - pinnedBottom = nextMessage.getSenderId() == message.getSenderId(); } } + break; } } - if (prevType == holder.getItemViewType()) { - MessageObject prevMessage = messages.get(prevPosition - messagesStartRow); - pinnedTop = !(prevMessage.messageOwner.reply_markup instanceof TLRPC.TL_replyInlineMarkup) && prevMessage.isOutOwner() == message.isOutOwner() && Math.abs(prevMessage.messageOwner.date - message.messageOwner.date) <= 5 * 60; - if (pinnedTop) { - if (message.isImportedForward() || prevMessage.isImportedForward()) { - if (message.isImportedForward() && prevMessage.isImportedForward()) { - if (Math.abs(message.messageOwner.fwd_from.date - prevMessage.messageOwner.fwd_from.date) <= 5 * 60) { - if (prevMessage.messageOwner.fwd_from.from_name != null && message.messageOwner.fwd_from.from_name != null) { - pinnedTop = prevMessage.messageOwner.fwd_from.from_name.equals(message.messageOwner.fwd_from.from_name); - } else if (prevMessage.messageOwner.fwd_from.from_id != null && message.messageOwner.fwd_from.from_id != null) { - pinnedTop = MessageObject.getPeerId(prevMessage.messageOwner.fwd_from.from_id) == MessageObject.getPeerId(message.messageOwner.fwd_from.from_id); - } else { - pinnedTop = false; - } - } else { - pinnedTop = false; - } - } else { - pinnedTop = false; - } - } else if (currentChat != null) { - long fromId = prevMessage.getFromChatId(); - pinnedTop = fromId == message.getFromChatId() && !message.isImportedForward() && !prevMessage.isImportedForward(); - if (!pinnedTopByGroup && pinnedTop && fromId < 0 && currentChat.megagroup) { - pinnedTop = false; - } - } else if (UserObject.isUserSelf(currentUser) || UserObject.isReplyUser(currentUser)) { - if (message.isPrivateForward() || prevMessage.isPrivateForward()) { - pinnedTop = false; - } else { - pinnedTop = prevMessage.getSenderId() == message.getSenderId(); - } - } + } + showInfoHint(messageObject, stringBuilder, 1); + } + cell.showHintButton(false, true, type); + } + + @Override + public boolean shouldDrawThreadProgress(ChatMessageCell cell) { + MessageObject.GroupedMessages group = cell.getCurrentMessagesGroup(); + MessageObject message; + if (group != null && !group.messages.isEmpty()) { + message = group.messages.get(0); + } else { + message = cell.getMessageObject(); + } + if (message == null) { + return false; + } + return message.getId() == commentLoadingMessageId; + } + + @Override + public void didPressSideButton(ChatMessageCell cell) { + if (getParentActivity() == null) { + return; + } + if (chatActivityEnterView != null) { + chatActivityEnterView.closeKeyboard(); + } + MessageObject messageObject = cell.getMessageObject(); + if (chatMode == MODE_PINNED) { + chatActivityDelegate.openReplyMessage(messageObject.getId()); + finishFragment(); + } else if ((UserObject.isReplyUser(currentUser) || UserObject.isUserSelf(currentUser)) && messageObject.messageOwner.fwd_from.saved_from_peer != null) { + if (UserObject.isReplyUser(currentUser) && messageObject.messageOwner.reply_to != null && messageObject.messageOwner.reply_to.reply_to_top_id != 0) { + openDiscussionMessageChat(messageObject.messageOwner.reply_to.reply_to_peer_id.channel_id, null, messageObject.messageOwner.reply_to.reply_to_top_id, 0, -1, messageObject.messageOwner.fwd_from.saved_from_msg_id, messageObject); + } else { + openOriginalReplyChat(messageObject); + } + } else { + ArrayList arrayList = null; + if (messageObject.getGroupId() != 0) { + MessageObject.GroupedMessages groupedMessages = groupedMessagesMap.get(messageObject.getGroupId()); + if (groupedMessages != null) { + arrayList = groupedMessages.messages; + } + } + if (arrayList == null) { + arrayList = new ArrayList<>(); + arrayList.add(messageObject); + } + showDialog(new ShareAlert(getContext(), ChatActivity.this, arrayList, null, null, ChatObject.isChannel(currentChat), null, null, false, false, themeDelegate) { + @Override + public void dismissInternal() { + super.dismissInternal(); + AndroidUtilities.requestAdjustResize(getParentActivity(), classGuid); + if (chatActivityEnterView.getVisibility() == View.VISIBLE) { + fragmentView.requestLayout(); } } - if (ChatObject.isChannel(currentChat) && currentChat.megagroup && message.getFromChatId() <= 0 && message.messageOwner.fwd_from != null && message.messageOwner.fwd_from.saved_from_peer instanceof TLRPC.TL_peerChannel) { - if (!pinnedTopByGroup) { - pinnedTop = false; + + @Override + protected void onSend(LongSparseArray dids, int count, TLRPC.TL_forumTopic topic) { + createUndoView(); + if (undoView == null) { + return; } - if (!pinnedBottomByGroup) { - pinnedBottom = false; + if (dids.size() == 1) { + undoView.showWithAction(dids.valueAt(0).id, UndoView.ACTION_FWD_MESSAGES, count, topic, null, null); + } else { + undoView.showWithAction(0, UndoView.ACTION_FWD_MESSAGES, count, dids.size(), null, null); } } + }); + AndroidUtilities.setAdjustResizeToNothing(getParentActivity(), classGuid); + fragmentView.requestLayout(); + } + } - messageCell.setMessageObject(message, groupedMessages, pinnedBottom, pinnedTop); - messageCell.setSpoilersSuppressed(chatListView.getScrollState() != RecyclerView.SCROLL_STATE_IDLE); - messageCell.setHighlighted(highlightMessageId != Integer.MAX_VALUE && message.getId() == highlightMessageId); - if (highlightMessageId != Integer.MAX_VALUE) { - startMessageUnselect(); - } - int index; - if ((index = animatingMessageObjects.indexOf(message)) != -1) { - boolean applyAnimation = false; - if (message.type == MessageObject.TYPE_ROUND_VIDEO && instantCameraView.getTextureView() != null) { - applyAnimation = true; - messageCell.getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() { - @Override - public boolean onPreDraw() { + @Override + public boolean needPlayMessage(MessageObject messageObject, boolean muted) { + if (messageObject.isVoice() || messageObject.isRoundVideo()) { + boolean result = MediaController.getInstance().playMessage(messageObject, muted); + MediaController.getInstance().setVoiceMessagesPlaylist(result ? createVoiceMessagesPlaylist(messageObject, false) : null, false); + return result; + } else if (messageObject.isMusic()) { + return MediaController.getInstance().setPlaylist(messages, messageObject, mergeDialogId); + } + return false; + } - PipRoundVideoView pipRoundVideoView = PipRoundVideoView.getInstance(); - if (pipRoundVideoView != null) { - pipRoundVideoView.showTemporary(true); - } + @Override + public void videoTimerReached() { + showNoSoundHint(); + } - messageCell.getViewTreeObserver().removeOnPreDrawListener(this); - ImageReceiver imageReceiver = messageCell.getPhotoImage(); - float w = imageReceiver.getImageWidth(); - org.telegram.ui.Components.Rect rect = instantCameraView.getCameraRect(); - float scale = w / rect.width; - int[] position = new int[2]; - messageCell.getTransitionParams().ignoreAlpha = true; - messageCell.setAlpha(0.0f); - messageCell.setTimeAlpha(0.0f); - messageCell.getLocationOnScreen(position); - position[0] += imageReceiver.getImageX() - messageCell.getAnimationOffsetX(); - position[1] += imageReceiver.getImageY() - messageCell.getTranslationY(); - final InstantCameraView.InstantViewCameraContainer cameraContainer = instantCameraView.getCameraContainer(); - cameraContainer.setPivotX(0.0f); - cameraContainer.setPivotY(0.0f); - AnimatorSet animatorSet = new AnimatorSet(); + @Override + public void didPressTime(ChatMessageCell cell) { + createUndoView(); + if (undoView == null) { + return; + } + undoView.showWithAction(dialog_id, UndoView.ACTION_IMPORT_INFO, null); + } - cameraContainer.setImageReceiver(imageReceiver); + @Override + public void didPressChannelAvatar(ChatMessageCell cell, TLRPC.Chat chat, int postId, float touchX, float touchY) { + if (chat == null) { + return; + } + if (actionBar.isActionModeShowed() || reportType >= 0) { + processRowSelect(cell, true, touchX, touchY); + return; + } + openChat(cell, chat, postId); + } - instantCameraView.cancelBlur(); + @Override + public void didPressHiddenForward(ChatMessageCell cell) { + if (cell.getMessageObject().isImportedForward()) { + didPressTime(cell); + return; + } + showForwardHint(cell); + } - AnimatorSet allAnimators = new AnimatorSet(); - animatorSet.playTogether( - ObjectAnimator.ofFloat(cameraContainer, View.SCALE_X, scale), - ObjectAnimator.ofFloat(cameraContainer, View.SCALE_Y, scale), - ObjectAnimator.ofFloat(cameraContainer, View.TRANSLATION_Y, position[1] - rect.y), - ObjectAnimator.ofFloat(instantCameraView.getSwitchButtonView(), View.ALPHA, 0.0f), - ObjectAnimator.ofInt(instantCameraView.getPaint(), AnimationProperties.PAINT_ALPHA, 0), - ObjectAnimator.ofFloat(instantCameraView.getMuteImageView(), View.ALPHA, 0.0f) - ); - animatorSet.setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); - ObjectAnimator o = ObjectAnimator.ofFloat(cameraContainer, View.TRANSLATION_X, position[0] - rect.x); - o.setInterpolator(CubicBezierInterpolator.DEFAULT); + @Override + public void didPressOther(ChatMessageCell cell, float otherX, float otherY) { + MessageObject messageObject = cell.getMessageObject(); + if (messageObject.type == MessageObject.TYPE_PHONE_CALL) { + if (currentUser != null) { + VoIPHelper.startCall(currentUser, messageObject.isVideoCall(), userInfo != null && userInfo.video_calls_available, getParentActivity(), getMessagesController().getUserFull(currentUser.id), getAccountInstance()); + } + } else { + createMenu(cell, true, false, otherX, otherY, messageObject.isMusic()); + } + } - allAnimators.playTogether(o, animatorSet); - allAnimators.setStartDelay(120); - allAnimators.setDuration(180); + @Override + public void didPressUserAvatar(ChatMessageCell cell, TLRPC.User user, float touchX, float touchY) { + if (actionBar.isActionModeShowed() || reportType >= 0) { + processRowSelect(cell, true, touchX, touchY); + return; + } + if (cell != null && cell.getMessageObject() != null && cell.getMessageObject().isSponsored()) { + didPressInstantButton(cell, 10); + return; + } + openProfile(user, ChatObject.isForum(currentChat) || isThreadChat()); + } - instantCameraView.setIsMessageTransition(true); - allAnimators.addListener(new AnimatorListenerAdapter() { - @Override - public void onAnimationEnd(Animator animation) { - messageCell.setAlpha(1.0f); - messageCell.getTransitionParams().ignoreAlpha = false; - Property ALPHA = new AnimationProperties.FloatProperty("alpha") { - @Override - public void setValue(ChatMessageCell object, float value) { - object.setTimeAlpha(value); - } + @Override + public boolean didLongPressUserAvatar(ChatMessageCell cell, TLRPC.User user, float touchX, float touchY) { + if (isAvatarPreviewerEnabled()) { + final boolean enableMention = currentChat != null && (bottomOverlayChat == null || bottomOverlayChat.getVisibility() != View.VISIBLE) && (bottomOverlay == null || bottomOverlay.getVisibility() != View.VISIBLE); + final AvatarPreviewer.MenuItem[] menuItems = new AvatarPreviewer.MenuItem[2 + (enableMention ? 1 : 0)]; + menuItems[0] = AvatarPreviewer.MenuItem.OPEN_PROFILE; + menuItems[1] = AvatarPreviewer.MenuItem.SEND_MESSAGE; + if (enableMention) { + menuItems[2] = AvatarPreviewer.MenuItem.MENTION; + } + final TLRPC.UserFull userFull = getMessagesController().getUserFull(user.id); + final AvatarPreviewer.Data data; + if (userFull != null) { + data = AvatarPreviewer.Data.of(userFull, menuItems); + } else { + data = AvatarPreviewer.Data.of(user, classGuid, menuItems); + } + if (AvatarPreviewer.canPreview(data)) { + AvatarPreviewer.getInstance().show((ViewGroup) fragmentView, data, item -> { + switch (item) { + case SEND_MESSAGE: + openDialog(cell, user); + break; + case OPEN_PROFILE: + openProfile(user); + break; + case MENTION: + appendMention(user); + break; + } + }); + return true; + } + } + return false; + } - @Override - public Float get(ChatMessageCell object) { - return object.getTimeAlpha(); - } - }; + private void appendMention(TLRPC.User user) { + if (chatActivityEnterView != null) { + SpannableStringBuilder sb; + final CharSequence text = chatActivityEnterView.getFieldText(); + if (text != null) { + sb = new SpannableStringBuilder(text); + if (text.charAt(text.length() - 1) != ' ') { + sb.append(" "); + } + } else { + sb = new SpannableStringBuilder(); + } + if (sb.length() > 0 && sb.charAt(sb.length() - 1) != ' ') { + sb.append(' '); + } + String username = UserObject.getPublicUsername(user); + if (username != null) { + sb.append("@").append(username).append(" "); + } else { + String name = UserObject.getFirstName(user, false); + Spannable spannable = new SpannableString(name + " "); + spannable.setSpan(new URLSpanUserMention("" + user.id, 3), 0, spannable.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + sb.append(spannable); + } + chatActivityEnterView.setFieldText(sb); + AndroidUtilities.runOnUIThread(() -> chatActivityEnterView.openKeyboard(), 200); + } + } - AnimatorSet animatorSet = new AnimatorSet(); - animatorSet.playTogether( - ObjectAnimator.ofFloat(cameraContainer, View.ALPHA, 0.0f), - ObjectAnimator.ofFloat(messageCell, ALPHA, 1.0f) - ); - animatorSet.setDuration(100); - animatorSet.setInterpolator(new DecelerateInterpolator()); - animatorSet.addListener(new AnimatorListenerAdapter() { - @Override - public void onAnimationEnd(Animator animation) { - instantCameraView.setIsMessageTransition(false); - instantCameraView.hideCamera(true); - instantCameraView.setVisibility(View.INVISIBLE); - } - }); - animatorSet.start(); - } - }); - allAnimators.start(); - return true; - } - }); - } else if (message.isAnyKindOfSticker() && !message.isAnimatedEmojiStickers()) { - applyAnimation = true; - messageCell.getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() { - @Override - public boolean onPreDraw() { - messageCell.getViewTreeObserver().removeOnPreDrawListener(this); - MessageObject.SendAnimationData sendAnimationData = messageCell.getMessageObject().sendAnimationData; - if (sendAnimationData == null) { - return true; - } - animateSendingViews.add(messageCell); - ImageReceiver imageReceiver = messageCell.getPhotoImage(); - float w = imageReceiver.getImageWidth(); - float scale = sendAnimationData.width / w; - int[] position = new int[2]; - messageCell.getTransitionParams().ignoreAlpha = true; - messageCell.getLocationInWindow(position); - position[1] -= messageCell.getTranslationY(); - if (chatActivityEnterView.isTopViewVisible()) { - position[1] += AndroidUtilities.dp(48); - } + @Override + public boolean didLongPressChannelAvatar(ChatMessageCell cell, TLRPC.Chat chat, int postId, float touchX, float touchY) { + if (isAvatarPreviewerEnabled()) { + AvatarPreviewer.MenuItem[] menuItems = {AvatarPreviewer.MenuItem.OPEN_PROFILE}; + if (currentChat == null || currentChat.id != chat.id || isThreadChat()) { + menuItems = Arrays.copyOf(menuItems, 2); + menuItems[1] = chat.broadcast ? AvatarPreviewer.MenuItem.OPEN_CHANNEL : AvatarPreviewer.MenuItem.OPEN_GROUP; + } + final TLRPC.ChatFull chatFull = getMessagesController().getChatFull(chat.id); + final AvatarPreviewer.Data data; + if (chatFull != null) { + data = AvatarPreviewer.Data.of(chat, chatFull, menuItems); + } else { + data = AvatarPreviewer.Data.of(chat, classGuid, menuItems); + } + if (AvatarPreviewer.canPreview(data)) { + AvatarPreviewer.getInstance().show((ViewGroup) fragmentView, data, item -> { + switch (item) { + case OPEN_PROFILE: + openProfile(chat); + break; + case OPEN_GROUP: + case OPEN_CHANNEL: + openChat(cell, chat, 0); + break; + } + }); + return true; + } + } + return false; + } - AnimatorSet allAnimators = new AnimatorSet(); + private void openProfile(TLRPC.User user) { + openProfile(user, false); + } - Property param1 = new AnimationProperties.FloatProperty("p1") { - @Override - public void setValue(MessageObject.SendAnimationData object, float value) { - object.currentScale = value; - } + private void openProfile(TLRPC.User user, boolean expandPhoto) { + if (user != null && user.id != getUserConfig().getClientUserId()) { + if (user.photo == null || user.photo instanceof TLRPC.TL_userProfilePhotoEmpty) { + expandPhoto = false; + } + Bundle args = new Bundle(); + args.putLong("user_id", user.id); + args.putBoolean("expandPhoto", expandPhoto); + ProfileActivity fragment = new ProfileActivity(args); + fragment.setPlayProfileAnimation(currentUser != null && currentUser.id == user.id ? 1 : 0); + AndroidUtilities.setAdjustResizeToNothing(getParentActivity(), classGuid); + presentFragment(fragment); + } + } - @Override - public Float get(MessageObject.SendAnimationData object) { - return object.currentScale; - } - }; - Property param2 = new AnimationProperties.FloatProperty("p2") { - @Override - public void setValue(MessageObject.SendAnimationData object, float value) { - object.currentX = value; - if (fragmentView != null) { - fragmentView.invalidate(); - } - } + private void openProfile(TLRPC.Chat chat) { + openProfile(chat, false); + } - @Override - public Float get(MessageObject.SendAnimationData object) { - return object.currentX; - } - }; - Property param3 = new AnimationProperties.FloatProperty("p3") { - @Override - public void setValue(MessageObject.SendAnimationData object, float value) { - object.currentY = value; - if (fragmentView != null) { - fragmentView.invalidate(); - } - } + private void openProfile(TLRPC.Chat chat, boolean expandPhoto) { + if (chat != null) { + Bundle args = new Bundle(); + args.putLong("chat_id", chat.id); + args.putBoolean("expandPhoto", expandPhoto); + presentFragment(new ProfileActivity(args)); + } + } - @Override - public Float get(MessageObject.SendAnimationData object) { - return object.currentY; - } - }; - AnimatorSet animatorSet = new AnimatorSet(); - animatorSet.playTogether( - ObjectAnimator.ofFloat(sendAnimationData, param1, scale, 1.0f), - ObjectAnimator.ofFloat(sendAnimationData, param3, sendAnimationData.y, position[1] + imageReceiver.getCenterY()) - ); - animatorSet.setInterpolator(ChatListItemAnimator.DEFAULT_INTERPOLATOR); - ObjectAnimator o = ObjectAnimator.ofFloat(sendAnimationData, param2, sendAnimationData.x, position[0] + imageReceiver.getCenterX()); - o.setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); + private void openDialog(ChatMessageCell cell, TLRPC.User user) { + if (user != null) { + Bundle args = new Bundle(); + args.putLong("user_id", user.id); + if (getMessagesController().checkCanOpenChat(args, ChatActivity.this, cell.getMessageObject())) { + presentFragment(new ChatActivity(args)); + } + } + } - allAnimators.playTogether(o, animatorSet); - allAnimators.setDuration(ChatListItemAnimator.DEFAULT_DURATION); + private void openChat(ChatMessageCell cell, TLRPC.Chat chat, int postId) { + if (currentChat != null && chat.id == currentChat.id) { + scrollToMessageId(postId, cell.getMessageObject().getId(), true, 0, true, 0); + } else if (currentChat == null || chat.id != currentChat.id || isThreadChat()) { + Bundle args = new Bundle(); + args.putLong("chat_id", chat.id); + if (postId != 0) { + args.putInt("message_id", postId); + } + if (getMessagesController().checkCanOpenChat(args, ChatActivity.this, cell.getMessageObject())) { + presentFragment(new ChatActivity(args)); + } + } + } - allAnimators.addListener(new AnimatorListenerAdapter() { - @Override - public void onAnimationEnd(Animator animation) { - animateSendingViews.remove(messageCell); - if (fragmentView != null) { - fragmentView.invalidate(); - chatListView.invalidate(); - } - messageCell.setAlpha(1.0f); - messageCell.getTransitionParams().ignoreAlpha = false; - } - }); - allAnimators.start(); + private boolean isAvatarPreviewerEnabled() { + return UserObject.isUserSelf(currentUser) || (currentChat != null && (!ChatObject.isChannel(currentChat) || currentChat.megagroup)); + } - Property ALPHA = new AnimationProperties.FloatProperty("alpha") { - @Override - public void setValue(MessageObject.SendAnimationData object, float value) { - object.timeAlpha = value; - if (fragmentView != null) { - fragmentView.invalidate(); - } - } + @Override + public void didPressBotButton(ChatMessageCell cell, TLRPC.KeyboardButton button) { + if (getParentActivity() == null || bottomOverlayChat.getVisibility() == View.VISIBLE && + !(button instanceof TLRPC.TL_keyboardButtonSwitchInline) && !(button instanceof TLRPC.TL_keyboardButtonCallback) && + !(button instanceof TLRPC.TL_keyboardButtonGame) && !(button instanceof TLRPC.TL_keyboardButtonUrl) && + !(button instanceof TLRPC.TL_keyboardButtonBuy) && !(button instanceof TLRPC.TL_keyboardButtonUrlAuth) && + !(button instanceof TLRPC.TL_keyboardButtonUserProfile) && !(button instanceof TLRPC.TL_keyboardButtonRequestPeer)) { + return; + } + chatActivityEnterView.didPressedBotButton(button, cell.getMessageObject(), cell.getMessageObject(), makeProgressForBotButton(cell, button instanceof TLRPC.TL_keyboardButtonUrl ? button.url : null)); + } - @Override - public Float get(MessageObject.SendAnimationData object) { - return object.timeAlpha; - } - }; + @Override + public void needShowPremiumFeatures(String source) { + presentFragment(new PremiumPreviewFragment(source)); + } - AnimatorSet animatorSet2 = new AnimatorSet(); - animatorSet2.playTogether( - ObjectAnimator.ofFloat(sendAnimationData, ALPHA, 0.0f, 1.0f) - ); - animatorSet2.setDuration(100); - animatorSet2.setStartDelay(150); - animatorSet2.setInterpolator(new DecelerateInterpolator()); - animatorSet2.start(); - return true; - } - }); - } - if (applyAnimation || chatListItemAnimator == null) { - animatingMessageObjects.remove(index); - chatActivityEnterView.startMessageTransition(); - chatActivityEnterView.hideTopView(true); - } - } - if (fromUserBlocked) { - messageCell.setVisibility(View.GONE); - } else { - messageCell.setVisibility(View.VISIBLE); - } - if (!animatingDocuments.isEmpty() && animatingDocuments.containsKey(message.getDocument())) { - animatingDocuments.remove(message.getDocument()); - if (chatListItemAnimator != null) { - chatListItemAnimator.onGreetingStickerTransition(holder, greetingsViewContainer); - } - } - } else if (view instanceof ChatActionCell) { - ChatActionCell actionCell = (ChatActionCell) view; - actionCell.setMessageObject(message); - actionCell.setAlpha(1.0f); - actionCell.setSpoilersSuppressed(chatListView.getScrollState() != RecyclerView.SCROLL_STATE_IDLE); - } else if (view instanceof ChatUnreadCell) { - ChatUnreadCell unreadCell = (ChatUnreadCell) view; - unreadCell.setText(LocaleController.getString("UnreadMessages", R.string.UnreadMessages)); - if (createUnreadMessageAfterId != 0) { - createUnreadMessageAfterId = 0; - } + @Override + public void needShowPremiumBulletin(int type) { + if (type == 0) { + checkTopUndoView(); + if (topUndoView == null) { + return; } + topUndoView.showWithAction(0, UndoView.ACTION_PREMIUM_TRANSCRIPTION, null, () -> { + new PremiumFeatureBottomSheet(ChatActivity.this, PremiumPreviewFragment.PREMIUM_FEATURE_VOICE_TO_TEXT, true).show(); + getMessagesController().pressTranscribeButton(); + }); + try { + if (!NekoConfig.disableVibration.Bool()) + topUndoView.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + } catch (Exception ignored) {} } } @Override - public int getItemViewType(int position) { - if (clearingHistory) { - if (position == botInfoEmptyRow) { - return 3; - } + public void didLongPressBotButton(ChatMessageCell cell, TLRPC.KeyboardButton button) { + if (getParentActivity() == null || bottomOverlayChat.getVisibility() == View.VISIBLE && + !(button instanceof TLRPC.TL_keyboardButtonSwitchInline) && !(button instanceof TLRPC.TL_keyboardButtonCallback) && + !(button instanceof TLRPC.TL_keyboardButtonGame) && !(button instanceof TLRPC.TL_keyboardButtonUrl) && + !(button instanceof TLRPC.TL_keyboardButtonBuy) && !(button instanceof TLRPC.TL_keyboardButtonUrlAuth) && + !(button instanceof TLRPC.TL_keyboardButtonUserProfile)) { + return; } - if (position >= messagesStartRow && position < messagesEndRow) { - ArrayList messages = isFrozen ? frozenMessages : ChatActivity.this.messages; - return messages.get(position - messagesStartRow).contentType; - } else if (position == botInfoRow) { - return 3; + if (button instanceof TLRPC.TL_keyboardButtonUrl) { + openClickableLink(null, button.url, true, cell, cell.getMessageObject()); + try { + if (!NekoConfig.disableVibration.Bool()) + cell.performHapticFeedback(HapticFeedbackConstants.LONG_PRESS, HapticFeedbackConstants.FLAG_IGNORE_VIEW_SETTING); + } catch (Exception ignore) {} } - return 4; } @Override - public void onViewAttachedToWindow(RecyclerView.ViewHolder holder) { - if (holder.itemView instanceof ChatMessageCell || holder.itemView instanceof ChatActionCell) { - View view = holder.itemView; - holder.itemView.getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() { - @Override - public boolean onPreDraw() { - view.getViewTreeObserver().removeOnPreDrawListener(this); + public void didPressReaction(ChatMessageCell cell, TLRPC.ReactionCount reaction, boolean longpress) { + if (getParentActivity() == null) { + return; + } + if (longpress) { + if (!ChatObject.isChannelAndNotMegaGroup(currentChat) || dialog_id >= 0) { + if (!NekoConfig.disableVibration.Bool()) + cell.performHapticFeedback(HapticFeedbackConstants.LONG_PRESS); + FrameLayout scrimPopupContainerLayout = new FrameLayout(getParentActivity()) { + @Override + public boolean dispatchKeyEvent(KeyEvent event) { + if (event.getKeyCode() == KeyEvent.KEYCODE_BACK && event.getRepeatCount() == 0) { + closeMenu(); + } + return super.dispatchKeyEvent(event); + } - int height = chatListView.getMeasuredHeight(); - int top = view.getTop(); - int bottom = view.getBottom(); - int viewTop = top >= 0 ? 0 : -top; - int viewBottom = view.getMeasuredHeight(); - if (viewBottom > height) { - viewBottom = viewTop + height; + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + int h = Math.min(MeasureSpec.getSize(heightMeasureSpec), AndroidUtilities.dp(ReactedUsersListView.VISIBLE_ITEMS * ReactedUsersListView.ITEM_HEIGHT_DP)); + if (h == 0) { + h = AndroidUtilities.dp(ReactedUsersListView.VISIBLE_ITEMS * ReactedUsersListView.ITEM_HEIGHT_DP); + } + super.onMeasure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(h, MeasureSpec.AT_MOST)); } - int recyclerChatViewHeight = (contentView.getHeightWithKeyboard() - (inPreviewMode ? 0 : AndroidUtilities.dp(48)) - chatListView.getTop()); - int keyboardOffset = contentView.getKeyboardHeight(); - int parentHeight = viewBottom - viewTop; - if (keyboardOffset < AndroidUtilities.dp(20) && chatActivityEnterView.isPopupShowing() || chatActivityEnterView.panelAnimationInProgress()) { - keyboardOffset = chatActivityEnterView.getEmojiPadding(); + + Path path = new Path(); + @Override + protected void dispatchDraw(Canvas canvas) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + canvas.save(); + path.rewind(); + path.addRoundRect(AndroidUtilities.dp(8), AndroidUtilities.dp(8), getWidth() - AndroidUtilities.dp(8), getHeight() - AndroidUtilities.dp(8), AndroidUtilities.dp(6), AndroidUtilities.dp(6), Path.Direction.CW); + canvas.clipPath(path); + super.dispatchDraw(canvas); + canvas.restore(); + } else { + super.dispatchDraw(canvas); + } } - if (holder.itemView instanceof ChatMessageCell) { - ChatMessageCell chatMessageCell = (ChatMessageCell) view; - chatMessageCell.setVisiblePart(viewTop, viewBottom - viewTop, recyclerChatViewHeight, keyboardOffset, view.getY() + (isKeyboardVisible() ? chatListView.getTop() : actionBar.getMeasuredHeight()) - contentView.getBackgroundTranslationY(), contentView.getMeasuredWidth(), contentView.getBackgroundSizeY(), blurredViewTopOffset, blurredViewBottomOffset); - markSponsoredAsRead(chatMessageCell.getMessageObject()); - } else if (holder.itemView instanceof ChatActionCell) { - if (actionBar != null && contentView != null) { - ((ChatActionCell) view).setVisiblePart(view.getY() + (isKeyboardVisible() ? chatListView.getTop() : actionBar.getMeasuredHeight()) - contentView.getBackgroundTranslationY(), contentView.getBackgroundSizeY()); + }; + scrimPopupContainerLayout.setLayoutParams(LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT)); + + Rect backgroundPaddings = new Rect(); + Drawable shadowDrawable2 = ContextCompat.getDrawable(getParentActivity(), R.drawable.popup_fixed_alert).mutate(); + shadowDrawable2.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_actionBarDefaultSubmenuBackground), PorterDuff.Mode.MULTIPLY)); + shadowDrawable2.getPadding(backgroundPaddings); + scrimPopupContainerLayout.setBackground(shadowDrawable2); + + ReactionsLayoutInBubble.ReactionButton button = cell.getReactionButton(ReactionsLayoutInBubble.VisibleReaction.fromTLReaction(reaction.reaction)); + if (button == null) { + return; + } + float bottom = cell.reactionsLayoutInBubble.y + button.y + AndroidUtilities.dp(28); + float left = cell.reactionsLayoutInBubble.x + button.x; + int[] loc = new int[2]; + cell.getLocationInWindow(loc); + scrimPopupContainerLayout.addView(new ReactedUsersListView(getParentActivity(), themeDelegate, currentAccount, cell.getPrimaryMessageObject(), reaction, false) + .setOnCustomEmojiSelectedListener((reactedUsersListView1, customEmojiStickerSets) -> { + EmojiPacksAlert alert = new EmojiPacksAlert(ChatActivity.this, getParentActivity(), themeDelegate, customEmojiStickerSets) { + @Override + public void dismiss() { + super.dismiss(); + dimBehindView(false); + } + }; + alert.setCalcMandatoryInsets(isKeyboardVisible()); + alert.setDimBehind(false); + closeMenu(false); + showDialog(alert); + }) + .setOnProfileSelectedListener((view1, userId, messagePeerReaction) -> { + Bundle args = new Bundle(); + args.putLong("user_id", userId); + args.putInt("report_reaction_message_id", cell.getMessageObject().getId()); + args.putLong("report_reaction_from_dialog_id", dialog_id); + ProfileActivity fragment = new ProfileActivity(args); + presentFragment(fragment); + closeMenu(); + }), LayoutHelper.createFrame(240, LayoutHelper.WRAP_CONTENT)); + + scrimPopupWindow = new ActionBarPopupWindow(scrimPopupContainerLayout, LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT) { + @Override + public void dismiss() { + super.dismiss(); + if (scrimPopupWindow != this) { + return; } + scrimPopupWindow = null; + menuDeleteItem = null; + scrimPopupWindowItems = null; + chatLayoutManager.setCanScrollVertically(true); + if (scrimPopupWindowHideDimOnDismiss) { + dimBehindView(false); + } else { + scrimPopupWindowHideDimOnDismiss = true; + } + if (chatActivityEnterView != null && chatActivityEnterView.getEditField() != null) { + chatActivityEnterView.getEditField().setAllowDrawCursor(true); + } + } + }; + scrimPopupWindow.setPauseNotifications(true); + scrimPopupWindow.setDismissAnimationDuration(220); + scrimPopupWindow.setOutsideTouchable(true); + scrimPopupWindow.setClippingEnabled(true); + scrimPopupWindow.setAnimationStyle(R.style.PopupContextAnimation); + scrimPopupWindow.setFocusable(true); + scrimPopupContainerLayout.measure(View.MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(1000), View.MeasureSpec.AT_MOST), View.MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(1000), View.MeasureSpec.AT_MOST)); + scrimPopupWindow.setInputMethodMode(ActionBarPopupWindow.INPUT_METHOD_NOT_NEEDED); + scrimPopupWindow.setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_UNSPECIFIED); + scrimPopupWindow.getContentView().setFocusableInTouchMode(true); + + int totalHeight = contentView.getHeight(); + int height = scrimPopupContainerLayout.getMeasuredHeight(); + int keyboardHeight = contentView.measureKeyboardHeight(); + if (keyboardHeight > AndroidUtilities.dp(20)) { + totalHeight += keyboardHeight; + } + + int popupX = (int) (left - AndroidUtilities.dp(28)); + popupX = Math.max(AndroidUtilities.dp(6), Math.min(chatListView.getMeasuredWidth() - AndroidUtilities.dp(6) - scrimPopupContainerLayout.getMeasuredWidth(), popupX)); + if (AndroidUtilities.isTablet()) { + int[] location = new int[2]; + fragmentView.getLocationInWindow(location); + popupX += location[0]; + } + int popupY; + if (height < totalHeight) { + if (height < totalHeight / 2f && chatListView.getY() + cell.getY() + cell.reactionsLayoutInBubble.y + button.y > totalHeight / 2f) { + popupY = (int) (chatListView.getY() + cell.getY() + cell.reactionsLayoutInBubble.y + button.y - height); + } else { + popupY = (int) (chatListView.getY() + cell.getY() + cell.reactionsLayoutInBubble.y + button.y + button.height); } + } else { + popupY = inBubbleMode ? 0 : AndroidUtilities.statusBarHeight; + } + scrimPopupWindow.showAtLocation(chatListView, Gravity.LEFT | Gravity.TOP, scrimPopupX = popupX, scrimPopupY = popupY); - return true; + chatListView.stopScroll(); + chatLayoutManager.setCanScrollVertically(false); + scrimViewReaction = reaction.reaction instanceof TLRPC.TL_reactionEmoji ? ((TLRPC.TL_reactionEmoji) reaction.reaction).emoticon : null; + dimBehindView(cell, true); + hideHints(false); + if (topUndoView != null) { + topUndoView.hide(true, 1); } - }); + if (undoView != null) { + undoView.hide(true, 1); + } + if (chatActivityEnterView != null && chatActivityEnterView.getEditField() != null) { + chatActivityEnterView.getEditField().setAllowDrawCursor(false); + } + } + } else if (reaction != null) { + ReactionsLayoutInBubble.VisibleReaction visibleReaction = ReactionsLayoutInBubble.VisibleReaction.fromTLReaction(reaction.reaction); + selectReaction(cell.getPrimaryMessageObject(), null, null,0, 0, visibleReaction,false, false, false); } - if (holder.itemView instanceof ChatMessageCell) { - final ChatMessageCell messageCell = (ChatMessageCell) holder.itemView; - MessageObject message = messageCell.getMessageObject(); - messageCell.showHintButton(true, false, -1); - if (hintMessageObject != null && hintMessageObject.equals(message)) { - messageCell.showHintButton(false, false, hintMessageType); + } + + @Override + public void didPressVoteButtons(ChatMessageCell cell, ArrayList buttons, int showCount, int x, int y) { + if (showCount >= 0 || buttons.isEmpty()) { + if (getParentActivity() == null) { + return; } - if (message.isAnimatedEmoji()) { - String emoji = message.getStickerEmoji(); - if (emoji != null) { - MessagesController.EmojiSound sound = getMessagesController().emojiSounds.get(emoji.replace("\uFE0F", "")); - if (sound != null) { - getMediaController().playEmojiSound(getAccountInstance(), emoji, sound, true); + if (pollHintView == null) { + pollHintView = new HintView(getParentActivity(), HintView.TYPE_POLL_VOTE, themeDelegate); + pollHintView.setAlpha(0.0f); + pollHintView.setVisibility(View.INVISIBLE); + int index = contentView.indexOfChild(chatActivityEnterView); + if (index == -1) { + return; + } + contentView.addView(pollHintView, index + 1, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.TOP, 19, 0, 19, 0)); + } + if (buttons.isEmpty() && showCount < 0) { + ArrayList pollButtons = cell.getPollButtons(); + float lastDiff = 0; + for (int a = 0, N = pollButtons.size(); a < N; a++) { + ChatMessageCell.PollButton button = pollButtons.get(a); + lastDiff = cell.getY() + button.y - AndroidUtilities.dp(4) - chatListViewPaddingTop; + pollHintX = button.x + AndroidUtilities.dp(13.3f); + pollHintY = button.y - AndroidUtilities.dp(6) + y; + if (lastDiff > 0) { + lastDiff = 0; + x = pollHintX; + y = pollHintY; + break; } } - } - - boolean selected = false; - boolean disableSelection = false; - if (actionBar.isActionModeShowed() || reportType >= 0) { - messageCell.setCheckBoxVisible(threadMessageObjects == null || !threadMessageObjects.contains(message), false); - int idx = message.getDialogId() == dialog_id ? 0 : 1; - if (selectedMessagesIds[idx].indexOfKey(message.getId()) >= 0) { - setCellSelectionBackground(message, messageCell, idx, false); - selected = true; - } else { - messageCell.setDrawSelectionBackground(false); - messageCell.setChecked(false, false, false); + if (lastDiff != 0) { + chatListView.smoothScrollBy(0, (int) lastDiff); + pollHintCell = cell; + return; } - disableSelection = true; - } else { - messageCell.setDrawSelectionBackground(false); - messageCell.setChecked(false, false, false); - messageCell.setCheckBoxVisible(false, false); } - messageCell.setCheckPressed(!disableSelection, disableSelection && selected); + pollHintView.showForMessageCell(cell, showCount, x, y, true); + } else { + getSendMessagesHelper().sendVote(cell.getMessageObject(), buttons, null); + } + } - if (searchContainer != null && searchContainer.getVisibility() == View.VISIBLE && getMediaDataController().isMessageFound(message.getId(), message.getDialogId() == mergeDialogId) && getMediaDataController().getLastSearchQuery() != null) { - messageCell.setHighlightedText(getMediaDataController().getLastSearchQuery()); - } else { - messageCell.setHighlightedText(null); - } + @Override + public void didPressCancelSendButton(ChatMessageCell cell) { + MessageObject message = cell.getMessageObject(); + if (message.messageOwner.send_state != 0) { + getSendMessagesHelper().cancelSendingMessage(message); + } + } + + @Override + public void didLongPress(ChatMessageCell cell, float x, float y) { + createMenu(cell, false, false, x, y); + startMultiselect(chatListView.getChildAdapterPosition(cell)); + } + + @Override + public boolean canPerformActions() { + return actionBar != null && !actionBar.isActionModeShowed() && reportType < 0 && !inPreviewMode; + } + + @Override + public void didPressUrl(ChatMessageCell cell, final CharacterStyle url, boolean longPress) { + didPressMessageUrl(url, longPress, cell.getMessageObject(), cell); + } + + @Override + public boolean didPressAnimatedEmoji(ChatMessageCell cell, AnimatedEmojiSpan span) { + if (getMessagesController().premiumLocked || span == null || span.standard) { + return false; + } + long documentId = span.getDocumentId(); + TLRPC.Document document = span.document == null ? AnimatedEmojiDrawable.findDocument(currentAccount, documentId) : span.document; + if (document == null) { + return false; + } + Bulletin bulletin = BulletinFactory.of(ChatActivity.this).createContainsEmojiBulletin(document, false, set -> { + ArrayList inputSets = new ArrayList<>(1); + inputSets.add(set); + EmojiPacksAlert alert = new EmojiPacksAlert(ChatActivity.this, getParentActivity(), themeDelegate, inputSets); + alert.setCalcMandatoryInsets(isKeyboardVisible()); + showDialog(alert); + }); + if (bulletin != null) { + bulletin.show(); + return true; + } + return false; + } - if (!inPreviewMode || !messageCell.isHighlighted()) { - messageCell.setHighlighted(highlightMessageId != Integer.MAX_VALUE && messageCell.getMessageObject().getId() == highlightMessageId); - if (highlightMessageId != Integer.MAX_VALUE) { - startMessageUnselect(); - } - } - if (actionBar != null) {//Nekomura - actionBar.unreadBadgeSetCount(getMessagesStorage().getMainUnreadCount()); - } - if (DialogConfig.isAutoTranslateEnable(dialog_id, getTopicId()) && LanguageDetector.hasSupport()) { - final var messageObject = messageCell.getMessageObject(); - if (MessageHelper.isMessageObjectAutoTranslatable(messageObject)) { - LanguageDetector.detectLanguage( - MessageHelper.getMessagePlainText(messageObject), - (String lang) -> { - if (!isLanguageRestricted(lang)) { - ArrayList fmessages = new ArrayList<>(Arrays.asList(messageObject)); - MessageTransKt.translateMessages(ChatActivity.this, fmessages, true); - } - }, - (Exception e) -> { - FileLog.e("mlkit: failed to detect language in message"); - e.printStackTrace(); - messageObject.translating = false; - }); + @Override + public void didPressTopicButton(ChatMessageCell cell) { + MessageObject message = cell.getMessageObject(); + if (message != null) { + int topicId = MessageObject.getTopicId(message.messageOwner, true); + if (topicId != 0) { + TLRPC.TL_forumTopic topic = getMessagesController().getTopicsController().findTopic(currentChat.id, topicId); + if (topic != null) { + ForumUtilities.openTopic(ChatActivity.this, currentChat.id, topic, message.getId()); } } } + } - int position = holder.getAdapterPosition(); - if (position >= messagesStartRow && position < messagesEndRow) { - ArrayList messages = isFrozen ? frozenMessages : ChatActivity.this.messages; + @Override + public boolean shouldShowTopicButton() { + return ChatObject.isForum(currentChat) && !isTopic; + } - MessageObject message = messages.get(position - messagesStartRow); - View view = holder.itemView; - if (message != null && message.messageOwner != null && message.messageOwner.media_unread && message.messageOwner.mentioned) { - if (!inPreviewMode && chatMode == 0) { - if (!message.isVoice() && !message.isRoundVideo()) { - newMentionsCount--; - if (newMentionsCount <= 0) { - newMentionsCount = 0; - hasAllMentionsLocal = true; - showMentionDownButton(false, true); - } else { - mentiondownButtonCounter.setText(String.format("%d", newMentionsCount)); - } - getMessagesController().markMentionMessageAsRead(message.getId(), ChatObject.isChannel(currentChat) ? currentChat.id : 0, dialog_id); - message.setContentIsRead(); - } - } - if (view instanceof ChatMessageCell) { - ChatMessageCell messageCell = (ChatMessageCell) view; - if (inPreviewMode) { - messageCell.setHighlighted(true); - } else { - messageCell.setHighlightedAnimated(); - } - } - } + @Override + public void didPressExtendedMediaPreview(ChatMessageCell cell, TLRPC.KeyboardButton button) { + getSendMessagesHelper().sendCallback(true, cell.getMessageObject(), button, ChatActivity.this); + } + + @Override + public void needOpenWebView(MessageObject message, String url, String title, String description, String originalUrl, int w, int h) { + try { + EmbedBottomSheet.show(ChatActivity.this, message, photoViewerProvider, title, description, originalUrl, url, w, h, isKeyboardVisible()); + } catch (Throwable e) { + FileLog.e(e); } } - public void updateRowAtPosition(int index) { - if (chatLayoutManager == null || isFrozen) { + @Override + public void didPressReplyMessage(ChatMessageCell cell, int id) { + if (UserObject.isReplyUser(currentUser)) { + didPressSideButton(cell); return; } - int lastVisibleItem = RecyclerView.NO_POSITION; - int top = 0; + MessageObject messageObject = cell.getMessageObject(); + if (chatMode == MODE_PINNED || chatMode == MODE_SCHEDULED) { + chatActivityDelegate.openReplyMessage(id); + finishFragment(); + } else { + scrollToMessageId(id, messageObject.getId(), true, messageObject.getDialogId() == mergeDialogId ? 1 : 0, true, 0, () -> { + progressDialogAtMessageId = messageObject.getId(); + progressDialogAtMessageType = PROGRESS_REPLY; + }); + } + } + + @Override + public boolean isProgressLoading(ChatMessageCell cell, int type) { + return progressDialogAtMessageId != 0 && cell.getMessageObject() != null && progressDialogAtMessageId == cell.getMessageObject().getId() && progressDialogAtMessageType == type; + } + + @Override + public CharacterStyle getProgressLoadingLink(ChatMessageCell cell) { + if (cell.getMessageObject() != null && progressDialogAtMessageId != 0 && progressDialogAtMessageId == cell.getMessageObject().getId() && progressDialogAtMessageType == PROGRESS_LINK) { + return progressDialogLinkSpan; + } + return null; + } + + @Override + public String getProgressLoadingBotButtonUrl(ChatMessageCell cell) { + if (cell.getMessageObject() != null && progressDialogAtMessageId != 0 && progressDialogAtMessageId == cell.getMessageObject().getId() && progressDialogAtMessageType == PROGRESS_BOT_BUTTON) { + return progressDialogBotButtonUrl; + } + return null; + } + + @Override + public void didPressViaBotNotInline(ChatMessageCell cell, long botId) { + Bundle args = new Bundle(); + args.putLong("user_id", botId); + if (getMessagesController().checkCanOpenChat(args, ChatActivity.this, cell.getMessageObject())) { + presentFragment(new ChatActivity(args)); + } + } + + @Override + public void didPressViaBot(ChatMessageCell cell, String username) { + if (bottomOverlayChat != null && bottomOverlayChat.getVisibility() == View.VISIBLE || bottomOverlay != null && bottomOverlay.getVisibility() == View.VISIBLE) { + return; + } + if (chatActivityEnterView != null && username != null && username.length() > 0) { + chatActivityEnterView.setFieldText("@" + username + " "); + chatActivityEnterView.openKeyboard(); + } + } + + @Override + public void didStartVideoStream(MessageObject message) { + if (message.isVideo()) { + sendSecretMessageRead(message, true); + } + } + + @Override + public void needReloadPolls() { + invalidateMessagesVisiblePart(); + } + + @Override + public void didPressImage(ChatMessageCell cell, float x, float y) { + MessageObject message = cell.getMessageObject(); + if (message.isVideo()) { + if (DownloadController.getInstance(currentAccount).canDownloadMedia(message.messageOwner) == 1) { + message.putInDownloadsStore = true; + } + } else { + message.putInDownloadsStore = true; + } + if (message.isSendError()) { + createMenu(cell, false, false, x, y); + return; + } else if (message.isSending()) { + return; + } + if (message.isDice()) { + createUndoView(); + if (undoView == null) { + return; + } + undoView.showWithAction(0, chatActivityEnterView.getVisibility() == View.VISIBLE && bottomOverlay.getVisibility() != View.VISIBLE ? UndoView.ACTION_DICE_INFO : UndoView.ACTION_DICE_NO_SEND_INFO, message.getDiceEmoji(), null, () -> { + if (checkSlowModeAlert()) { + getSendMessagesHelper().sendMessage(message.getDiceEmoji(), dialog_id, replyingMessageObject, getThreadMessage(), null, false, null, null, null, true, 0, null, false); + } + }); + } else if (message.isAnimatedEmoji() && (!message.isAnimatedAnimatedEmoji() || emojiAnimationsOverlay.supports(MessageObject.findAnimatedEmojiEmoticon(message.getDocument())) && currentUser != null) || message.isPremiumSticker()) { + restartSticker(cell); + emojiAnimationsOverlay.onTapItem(cell, ChatActivity.this, true); + chatListView.cancelClickRunnables(false); + } else if (message.needDrawBluredPreview()) { + Runnable action = sendSecretMessageRead(message, false); + cell.invalidate(); + SecretMediaViewer.getInstance().setParentActivity(getParentActivity()); + SecretMediaViewer.getInstance().openMedia(message, photoViewerProvider, action); + } else if (MessageObject.isAnimatedEmoji(message.getDocument()) && MessageObject.getInputStickerSet(message.getDocument()) != null) { + ArrayList inputSets = new ArrayList<>(1); + inputSets.add(MessageObject.getInputStickerSet(message.getDocument())); + EmojiPacksAlert alert = new EmojiPacksAlert(ChatActivity.this, getParentActivity(), themeDelegate, inputSets); + alert.setCalcMandatoryInsets(isKeyboardVisible()); + showDialog(alert); + } else if (message.getInputStickerSet() != null) { + // In case we have a .webp file that is displayed as a sticker, but + // that doesn't fit in 512x512, we assume it may be a regular large + // .webp image and we allow to open it in media viewer. + // Inspired by https://github.com/telegramdesktop/tdesktop/commit/baccec623d45dbfd1132d5f808192f0f3ad87647 + if (message.getInputStickerSet() == null) { + int photoHeight = 0; + int photoWidth = 0; + TLRPC.Document document = message.getDocument(); + for (int a = 0, N = document.attributes.size(); a < N; a++) { + TLRPC.DocumentAttribute attribute = document.attributes.get(a); + if (attribute instanceof TLRPC.TL_documentAttributeImageSize) { + photoWidth = attribute.w; + photoHeight = attribute.h; + break; + } + } + if (photoWidth > 512 || photoHeight > 512) { + openPhotoViewerForMessage(cell, message); + } + return; + } + StickersAlert alert = new StickersAlert(getParentActivity(), ChatActivity.this, message.getInputStickerSet(), null, bottomOverlayChat.getVisibility() != View.VISIBLE && (currentChat == null || ChatObject.canSendStickers(currentChat)) ? chatActivityEnterView : null, themeDelegate); + alert.setCalcMandatoryInsets(isKeyboardVisible()); + showDialog(alert); + } else if (message.isVideo() || message.type == MessageObject.TYPE_PHOTO || message.type == MessageObject.TYPE_TEXT && !message.isWebpageDocument() || message.isGif()) { + openPhotoViewerForMessage(cell, message); + } else if (message.type == MessageObject.TYPE_VIDEO) { + sendSecretMessageRead(message, true); + try { + File f = null; + if (message.messageOwner.attachPath != null && message.messageOwner.attachPath.length() != 0) { + f = new File(message.messageOwner.attachPath); + } + if (f == null || !f.exists()) { + f = getFileLoader().getPathToMessage(message.messageOwner); + } + Intent intent = new Intent(Intent.ACTION_VIEW); + if (Build.VERSION.SDK_INT >= 24) { + intent.setFlags(Intent.FLAG_GRANT_READ_URI_PERMISSION); + intent.setDataAndType(FileProvider.getUriForFile(getParentActivity(), ApplicationLoader.getApplicationId() + ".provider", f), "video/mp4"); + } else { + intent.setDataAndType(Uri.fromFile(f), "video/mp4"); + } + getParentActivity().startActivityForResult(intent, 500); + } catch (Exception e) { + FileLog.e(e); + alertUserOpenError(message); + } + } else if (message.type == MessageObject.TYPE_GEO) { + if (!AndroidUtilities.isMapsInstalled(ChatActivity.this)) { + return; + } + if (message.isLiveLocation()) { + LocationActivity fragment = new LocationActivity(currentChat == null || ChatObject.canSendMessages(currentChat) || currentChat.megagroup ? 2 : LocationActivity.LOCATION_TYPE_LIVE_VIEW); + fragment.setDelegate(ChatActivity.this); + fragment.setMessageObject(message); + presentFragment(fragment); + } else { + LocationActivity fragment = new LocationActivity(currentEncryptedChat == null ? 3 : 0); + fragment.setDelegate(ChatActivity.this); + fragment.setMessageObject(message); + presentFragment(fragment); + } + } else if (message.type == MessageObject.TYPE_FILE || message.type == MessageObject.TYPE_TEXT) { + File locFile = null; + if (message.messageOwner.attachPath != null && message.messageOwner.attachPath.length() != 0) { + File f = new File(message.messageOwner.attachPath); + if (f.exists()) { + locFile = f; + } + } + if (locFile == null) { + File f = getFileLoader().getPathToMessage(message.messageOwner); + if (f.exists()) { + locFile = f; + } + } + if (message.getDocumentName().toLowerCase().endsWith("attheme")) { + Theme.ThemeInfo themeInfo = Theme.applyThemeFile(locFile, message.getDocumentName(), null, true); + if (themeInfo != null) { + presentFragment(new ThemePreviewActivity(themeInfo)); + return; + } else { + scrollToPositionOnRecreate = -1; + } + boolean handled = false; + if (message.canPreviewDocument()) { + PhotoViewer.getInstance().setParentActivity(getParentActivity()); + PhotoViewer.getInstance().openPhoto(message, message.type != 0 ? dialog_id : 0, message.type != 0 ? mergeDialogId : 0, 0, photoViewerProvider, false); + handled = true; + } + if (!handled) { + try { + AndroidUtilities.openForView(message, getParentActivity()); + } catch (Exception e) { + FileLog.e(e); + alertUserOpenError(message); + } + } + } else if (locFile == null || !locFile.isFile()) { + + AlertUtil.showToast("FILE_NOT_FOUND"); + + } else if (message.getDocumentName().toLowerCase().endsWith(".nekox.json")) { + + File finalLocFile = locFile; + AlertUtil.showConfirm(getParentActivity(), + LocaleController.getString("ImportProxyList", R.string.ImportProxyList), + R.drawable.baseline_security_24, LocaleController.getString("Import", R.string.Import), + false, () -> { + String status = ProxyListActivity.processProxyListFile(getParentActivity(), finalLocFile); + if (!StrUtil.isBlank(status)) { + presentFragment(new ProxyListActivity(status)); + } + }); + + } else if (message.getDocumentName().toLowerCase().endsWith(".nekox-stickers.json")) { + + File finalLocFile = locFile; + AlertUtil.showConfirm(getParentActivity(), + LocaleController.getString("ImportStickersList", R.string.ImportStickersList), + R.drawable.deproko_baseline_stickers_filled_24, LocaleController.getString("Import", R.string.Import), false, () -> { + presentFragment(new StickersActivity(finalLocFile)); + }); + + + } else if (message.getDocumentName().toLowerCase().endsWith(".nekox-settings.json")) { + + File finalLocFile = locFile; + NekoSettingsActivity.importSettings(getParentActivity(), finalLocFile); - if (!wasManualScroll && unreadMessageObject != null) { - int n = chatListView.getChildCount(); - for (int i = 0; i < n; i++) { - View child = chatListView.getChildAt(i); - if (child instanceof ChatMessageCell && ((ChatMessageCell) child).getMessageObject() == unreadMessageObject) { - int unreadMessageIndex = messages.indexOf(unreadMessageObject); - if (unreadMessageIndex >= 0) { - lastVisibleItem = messagesStartRow + messages.indexOf(unreadMessageObject); - top = chatListView.getMeasuredHeight() - child.getBottom() - chatListView.getPaddingBottom(); - } - break; + } else { + boolean handled = false; + if (message.canPreviewDocument()) { + PhotoViewer.getInstance().setParentActivity(ChatActivity.this, themeDelegate); + PhotoViewer.getInstance().openPhoto(message, ChatActivity.this, message.type != 0 ? dialog_id : 0, message.type != 0 ? mergeDialogId : 0, message.type != 0 ? getTopicId() : 0, photoViewerProvider); + handled = true; + } + if (!handled) { + try { + AndroidUtilities.openForView(message, getParentActivity(), themeDelegate); + } catch (Exception e) { + FileLog.e(e); + alertUserOpenError(message); } } } - notifyItemChanged(index); - if (lastVisibleItem != RecyclerView.NO_POSITION) { - chatLayoutManager.scrollToPositionWithOffset(lastVisibleItem, top); - } } - public void invalidateRowWithMessageObject(MessageObject messageObject) { - int count = chatListView.getChildCount(); - for (int a = 0; a < count; a++) { - View child = chatListView.getChildAt(a); - if (child instanceof ChatMessageCell) { - ChatMessageCell cell = (ChatMessageCell) child; - if (cell.getMessageObject() == messageObject) { - cell.invalidate(); - return; - } - } - } } - public View updateRowWithMessageObject(MessageObject messageObject, boolean allowInPlace) { - if (allowInPlace) { - int count = chatListView.getChildCount(); - for (int a = 0; a < count; a++) { - View child = chatListView.getChildAt(a); - if (child instanceof ChatMessageCell) { - ChatMessageCell cell = (ChatMessageCell) child; - if (cell.getMessageObject() == messageObject && !cell.isAdminLayoutChanged()) { - cell.setMessageObject(messageObject, cell.getCurrentMessagesGroup(), cell.isPinnedBottom(), cell.isPinnedTop()); - return cell; + + @Override + public void didPressInstantButton(ChatMessageCell cell, int type) { + MessageObject messageObject = cell.getMessageObject(); + if (type == 8) { + PollVotesAlert.showForPoll(ChatActivity.this, messageObject); + } else if (type == 0) { + if (messageObject.messageOwner.media != null && messageObject.messageOwner.media.webpage != null && messageObject.messageOwner.media.webpage.cached_page != null) { + ArticleViewer.getInstance().setParentActivity(getParentActivity(), ChatActivity.this); + ArticleViewer.getInstance().open(messageObject); + } + } else if (type == 5) { + long uid = messageObject.messageOwner.media.user_id; + TLRPC.User user = null; + if (uid != 0) { + user = MessagesController.getInstance(currentAccount).getUser(uid); + } + openVCard(user, messageObject.messageOwner.media.phone_number, messageObject.messageOwner.media.vcard, messageObject.messageOwner.media.first_name, messageObject.messageOwner.media.last_name); + } else { + if (messageObject.isSponsored()) { + Bundle args = new Bundle();if (messageObject.sponsoredChatInvite != null) { + showDialog(new JoinGroupAlert(getContext(), messageObject.sponsoredChatInvite, messageObject.sponsoredChatInviteHash, ChatActivity.this, themeDelegate)); + } else { + long peerId = MessageObject.getPeerId(messageObject.messageOwner.from_id); + if (peerId < 0) { + args.putLong("chat_id", -peerId); + } else { + args.putLong("user_id", peerId); + } + if (messageObject.sponsoredChannelPost != 0) { + args.putInt("message_id", messageObject.sponsoredChannelPost); + } + if (messageObject.botStartParam != null) { + args.putString("inline_query", messageObject.botStartParam); + } + if (getMessagesController().checkCanOpenChat(args, ChatActivity.this)) { + presentFragment(new ChatActivity(args)); } } - } - } - ArrayList messages = isFrozen ? frozenMessages : ChatActivity.this.messages; + } else if (messageObject.messageOwner.media != null && messageObject.messageOwner.media.webpage != null) { + if (!openLinkInternally(messageObject.messageOwner.media.webpage.url, cell, null, messageObject.getId(), PROGRESS_INSTANT)) { + if (progressDialogCurrent != null) { + progressDialogCurrent.cancel(true); + } + progressDialogCurrent = cell == null || cell.getMessageObject() == null ? null : new Browser.Progress() { + @Override + public void init() { + progressDialogAtMessageId = cell.getMessageObject().getId(); + progressDialogAtMessageType = PROGRESS_INSTANT; + progressDialogLinkSpan = null; + cell.invalidate(); + } - int index = messages.indexOf(messageObject); - if (index == -1) { - return null; + @Override + public void end(boolean replaced) { + if (!replaced) { + AndroidUtilities.runOnUIThread(ChatActivity.this::resetProgressDialogLoading, 250); + } + } + }; + Browser.openUrl(getParentActivity(), Uri.parse(messageObject.messageOwner.media.webpage.url), true, true, progressDialogCurrent); + } + } } - updateRowAtPosition(index + messagesStartRow); - return null; } - public void notifyDataSetChanged(boolean animated) { - if (BuildVars.LOGS_ENABLED) { - FileLog.d("notify data set changed fragmentOpened=" + fragmentOpened); - } - if (animated && fragmentOpened) { - if (chatListView.getItemAnimator() != chatListItemAnimator) { - chatListView.setItemAnimator(chatListItemAnimator); - } + @Override + public void didPressCommentButton(ChatMessageCell cell) { + MessageObject.GroupedMessages group = cell.getCurrentMessagesGroup(); + MessageObject message; + if (group != null && !group.messages.isEmpty()) { + message = group.messages.get(0); } else { - chatListView.setItemAnimator(null); + message = cell.getMessageObject(); } - updateRowsInternal(); - try { - super.notifyDataSetChanged(); - } catch (Exception e) { - FileLog.e(e); + int maxReadId; + long linkedChatId; + if (message.messageOwner.replies != null) { + maxReadId = message.messageOwner.replies.read_max_id; + linkedChatId = message.messageOwner.replies.channel_id; + } else { + maxReadId = -1; + linkedChatId = 0; } - boolean hideSkeletons = false; - for (int i = messages.size() - 1; i >= 0; i--) { - MessageObject message = messages.get(i); - if (message.isDateObject) { - continue; - } - if (message.messageOwner != null && (message.messageOwner.action instanceof TLRPC.TL_messageActionTopicCreate || message.messageOwner.action instanceof TLRPC.TL_messageActionChannelCreate)) { - hideSkeletons = true; + openDiscussionMessageChat(currentChat.id, message, message.getId(), linkedChatId, maxReadId, 0, null); + } + + @Override + public String getAdminRank(long uid) { + if (ChatObject.isChannel(currentChat) && currentChat.megagroup) { + String rank = getMessagesController().getAdminRank(currentChat.id, uid); + if (rank != null) { + return rank; } - break; } - if ((endReached[0] && (mergeDialogId == 0 || endReached[1])) || hideSkeletons) { - checkDispatchHideSkeletons(fragmentBeginToShow); + if (forumTopic != null && forumTopic.from_id != null && (forumTopic.from_id.user_id == uid || forumTopic.from_id.channel_id == uid || forumTopic.from_id.chat_id == uid)) { + return LocaleController.getString("TopicCreator", R.string.TopicCreator); } + return null; } + @Override + public boolean shouldRepeatSticker(MessageObject message) { + return !alreadyPlayedStickers.containsKey(message); + } @Override - public void notifyDataSetChanged() { - notifyDataSetChanged(false); + public void setShouldNotRepeatSticker(MessageObject message) { + alreadyPlayedStickers.put(message, true); } @Override - public void notifyItemChanged(int position) { - if (BuildVars.LOGS_ENABLED) { - FileLog.d("notify item changed " + position); - } - if (!fragmentBeginToShow) { - chatListView.setItemAnimator(null); - } else if (chatListView.getItemAnimator() != chatListItemAnimator) { - chatListView.setItemAnimator(chatListItemAnimator); - } - updateRowsInternal(); - try { - super.notifyItemChanged(position); - } catch (Exception e) { - FileLog.e(e); - } + public TextSelectionHelper.ChatListTextSelectionHelper getTextSelectionHelper() { + return textSelectionHelper; } @Override - public void notifyItemRangeChanged(int positionStart, int itemCount) { - if (BuildVars.LOGS_ENABLED) { - FileLog.d("notify item range changed " + positionStart + ":" + itemCount); - } - if (!fragmentBeginToShow) { - chatListView.setItemAnimator(null); - } else if (chatListView.getItemAnimator() != chatListItemAnimator) { - chatListView.setItemAnimator(chatListItemAnimator); - } - updateRowsInternal(); - try { - super.notifyItemRangeChanged(positionStart, itemCount); - } catch (Exception e) { - FileLog.e(e); - } + public boolean hasSelectedMessages() { + return selectedMessagesIds[0].size() + selectedMessagesIds[1].size() > 0; } @Override - public void notifyItemInserted(int position) { - if (BuildVars.LOGS_ENABLED) { - FileLog.d("notify item inserted " + position); - } - if (!fragmentBeginToShow) { - chatListView.setItemAnimator(null); - } else if (chatListView.getItemAnimator() != chatListItemAnimator) { - chatListView.setItemAnimator(chatListItemAnimator); + public void onDiceFinished() { + if (fireworksOverlay.isStarted()) { + return; } - updateRowsInternal(); - try { - super.notifyItemInserted(position); - } catch (Exception e) { - FileLog.e(e); + fireworksOverlay.start(); + if (!NekoConfig.disableVibration.Bool()) { + fireworksOverlay.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); } } @Override - public void notifyItemMoved(int fromPosition, int toPosition) { - if (BuildVars.LOGS_ENABLED) { - FileLog.d("notify item moved" + fromPosition + ":" + toPosition); - } - if (!fragmentBeginToShow) { - chatListView.setItemAnimator(null); - } else if (chatListView.getItemAnimator() != chatListItemAnimator) { - chatListView.setItemAnimator(chatListItemAnimator); - } - updateRowsInternal(); - try { - super.notifyItemMoved(fromPosition, toPosition); - } catch (Exception e) { - FileLog.e(e); - } + public PinchToZoomHelper getPinchToZoomHelper() { + return pinchToZoomHelper; } @Override - public void notifyItemRangeInserted(int positionStart, int itemCount) { - if (BuildVars.LOGS_ENABLED) { - FileLog.d("notify item range inserted" + positionStart + ":" + itemCount); - } - if (!fragmentBeginToShow) { - chatListView.setItemAnimator(null); - } else if (chatListView.getItemAnimator() != chatListItemAnimator) { - chatListView.setItemAnimator(chatListItemAnimator); - } - updateRowsInternal(); - if (positionStart == 1 && itemCount > 0) { - int lastPosition = positionStart + itemCount; - if (lastPosition >= messagesStartRow && lastPosition < messagesEndRow) { - MessageObject m1 = messages.get(lastPosition - messagesStartRow); - MessageObject m2 = messages.get(lastPosition - messagesStartRow - 1); - if (currentChat != null && m1.getFromChatId() == m2.getFromChatId() || currentUser != null && m1.isOutOwner() == m2.isOutOwner()) { - notifyItemChanged(positionStart); - } - } - } - try { - super.notifyItemRangeInserted(positionStart, itemCount); - } catch (Exception e) { - FileLog.e(e); - } + public boolean keyboardIsOpened() { + return contentView.getKeyboardHeight() + chatEmojiViewPadding >= AndroidUtilities.dp(20); + } + + public boolean isLandscape() { + return contentView.getMeasuredWidth() > contentView.getMeasuredHeight(); } @Override - public void notifyItemRemoved(int position) { - if (BuildVars.LOGS_ENABLED) { - FileLog.d("notify item removed " + position); - } - if (!fragmentBeginToShow) { - chatListView.setItemAnimator(null); - } else if (chatListView.getItemAnimator() != chatListItemAnimator) { - chatListView.setItemAnimator(chatListItemAnimator); - } - updateRowsInternal(); - try { - super.notifyItemRemoved(position); - } catch (Exception e) { - FileLog.e(e); - } + public void invalidateBlur() { + contentView.invalidateBlur(); } @Override - public void notifyItemRangeRemoved(int positionStart, int itemCount) { - if (BuildVars.LOGS_ENABLED) { - FileLog.d("notify item range removed" + positionStart + ":" + itemCount); - } - if (!fragmentBeginToShow) { - chatListView.setItemAnimator(null); - } else if (chatListView.getItemAnimator() != chatListItemAnimator) { - chatListView.setItemAnimator(chatListItemAnimator); - } - updateRowsInternal(); - try { - super.notifyItemRangeRemoved(positionStart, itemCount); - } catch (Exception e) { - FileLog.e(e); - } + public boolean canDrawOutboundsContent() { + return false; } @Override - public boolean isEnabled(RecyclerView.ViewHolder holder) { + public boolean onAccessibilityAction(int action, Bundle arguments) { + if (action == AccessibilityNodeInfo.ACTION_CLICK || action == R.id.acc_action_small_button || action == R.id.acc_action_msg_options) { + if (inPreviewMode && allowExpandPreviewByClick) { + if (parentLayout != null) { + parentLayout.expandPreviewFragment(); + } + return true; + } + return !canPerformActions(); + } return false; } - } + }; private void openUserProfile(long uid) { if (uid < 0) { @@ -30565,12 +31594,9 @@ public ArrayList getThemeDescriptions() { themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, Theme.chat_gamePaint, null, null, Theme.key_chat_previewGameText)); themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, null, null, Theme.key_chat_inPreviewInstantText)); themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, null, null, Theme.key_chat_outPreviewInstantText)); - themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, null, null, Theme.key_chat_inPreviewInstantSelectedText)); - themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, null, null, Theme.key_chat_outPreviewInstantSelectedText)); themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, Theme.chat_deleteProgressPaint, null, null, Theme.key_chat_secretTimeText)); themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, null, null, Theme.key_chat_stickerNameText)); themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, getThemedPaint(Theme.key_paint_chatBotButton), null, null, Theme.key_chat_botButtonText)); - themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, Theme.chat_botProgressPaint, null, null, Theme.key_chat_botProgress)); themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, getThemedPaint(Theme.key_paint_chatTimeBackground), null, null, Theme.key_chat_mediaTimeBackground)); themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, null, null, Theme.key_chat_inForwardedNameText)); themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, null, null, Theme.key_chat_outForwardedNameText)); @@ -30670,27 +31696,12 @@ public ArrayList getThemeDescriptions() { themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, null, null, Theme.key_chat_inMediaIcon)); themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, null, null, Theme.key_chat_inLoaderSelected)); themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, null, null, Theme.key_chat_inMediaIconSelected)); - themeDescriptions.add(new ThemeDescription(chatListView, ThemeDescription.FLAG_BACKGROUNDFILTER, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_photoStatesDrawables[0][0], Theme.chat_photoStatesDrawables[1][0], Theme.chat_photoStatesDrawables[2][0], Theme.chat_photoStatesDrawables[3][0]}, null, Theme.key_chat_mediaLoaderPhoto)); - themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_photoStatesDrawables[0][0], Theme.chat_photoStatesDrawables[1][0], Theme.chat_photoStatesDrawables[2][0], Theme.chat_photoStatesDrawables[3][0]}, null, Theme.key_chat_mediaLoaderPhotoIcon)); - themeDescriptions.add(new ThemeDescription(chatListView, ThemeDescription.FLAG_BACKGROUNDFILTER, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_photoStatesDrawables[0][1], Theme.chat_photoStatesDrawables[1][1], Theme.chat_photoStatesDrawables[2][1], Theme.chat_photoStatesDrawables[3][1]}, null, Theme.key_chat_mediaLoaderPhotoSelected)); - themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_photoStatesDrawables[0][1], Theme.chat_photoStatesDrawables[1][1], Theme.chat_photoStatesDrawables[2][1], Theme.chat_photoStatesDrawables[3][1]}, null, Theme.key_chat_mediaLoaderPhotoIconSelected)); - themeDescriptions.add(new ThemeDescription(chatListView, ThemeDescription.FLAG_BACKGROUNDFILTER, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_photoStatesDrawables[7][0], Theme.chat_photoStatesDrawables[8][0]}, null, Theme.key_chat_outLoaderPhoto)); - themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_photoStatesDrawables[7][0], Theme.chat_photoStatesDrawables[8][0]}, null, Theme.key_chat_outLoaderPhotoIcon)); - themeDescriptions.add(new ThemeDescription(chatListView, ThemeDescription.FLAG_BACKGROUNDFILTER, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_photoStatesDrawables[7][1], Theme.chat_photoStatesDrawables[8][1]}, null, Theme.key_chat_outLoaderPhotoSelected)); - themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_photoStatesDrawables[7][1], Theme.chat_photoStatesDrawables[8][1]}, null, Theme.key_chat_outLoaderPhotoIconSelected)); - themeDescriptions.add(new ThemeDescription(chatListView, ThemeDescription.FLAG_BACKGROUNDFILTER, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_photoStatesDrawables[10][0], Theme.chat_photoStatesDrawables[11][0]}, null, Theme.key_chat_inLoaderPhoto)); - themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_photoStatesDrawables[10][0], Theme.chat_photoStatesDrawables[11][0]}, null, Theme.key_chat_inLoaderPhotoIcon)); - themeDescriptions.add(new ThemeDescription(chatListView, ThemeDescription.FLAG_BACKGROUNDFILTER, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_photoStatesDrawables[10][1], Theme.chat_photoStatesDrawables[11][1]}, null, Theme.key_chat_inLoaderPhotoSelected)); - themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_photoStatesDrawables[10][1], Theme.chat_photoStatesDrawables[11][1]}, null, Theme.key_chat_inLoaderPhotoIconSelected)); - themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_photoStatesDrawables[12][0]}, null, Theme.key_chat_inFileIcon)); - themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_photoStatesDrawables[12][1]}, null, Theme.key_chat_inFileSelectedIcon)); themeDescriptions.add(new ThemeDescription(chatListView, ThemeDescription.FLAG_BACKGROUNDFILTER, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_contactDrawable[0]}, null, Theme.key_chat_inContactBackground)); themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_contactDrawable[0]}, null, Theme.key_chat_inContactIcon)); themeDescriptions.add(new ThemeDescription(chatListView, ThemeDescription.FLAG_BACKGROUNDFILTER, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_contactDrawable[1]}, null, Theme.key_chat_outContactBackground)); themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_contactDrawable[1]}, null, Theme.key_chat_outContactIcon)); themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, null, null, Theme.key_chat_inLocationBackground)); themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_locationDrawable[0]}, null, Theme.key_chat_inLocationIcon)); - themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, null, null, Theme.key_chat_outLocationBackground)); themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_locationDrawable[1]}, null, Theme.key_chat_outLocationIcon)); themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, null, null, Theme.key_chat_inPollCorrectAnswer)); themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, null, null, Theme.key_chat_outPollCorrectAnswer)); @@ -30747,7 +31758,6 @@ public ArrayList getThemeDescriptions() { themeDescriptions.add(new ThemeDescription(chatActivityEnterView, ThemeDescription.FLAG_USEBACKGROUNDDRAWABLE | ThemeDescription.FLAG_DRAWABLESELECTEDSTATE, new Class[]{ChatActivityEnterView.class}, new String[]{"attachButton"}, null, null, null, Theme.key_listSelector)); // themeDescriptions.add(new ThemeDescription(chatActivityEnterView, 0, new Class[]{ChatActivityEnterView.class}, new String[]{"audioSendButton"}, null, null, null, Theme.key_chat_messagePanelIcons)); // themeDescriptions.add(new ThemeDescription(chatActivityEnterView, 0, new Class[]{ChatActivityEnterView.class}, new String[]{"videoSendButton"}, null, null, null, Theme.key_chat_messagePanelIcons)); - themeDescriptions.add(new ThemeDescription(chatActivityEnterView, 0, new Class[]{ChatActivityEnterView.class}, new String[]{"notifyButton"}, null, null, null, Theme.key_chat_messagePanelVideoFrame)); themeDescriptions.add(new ThemeDescription(chatActivityEnterView, ThemeDescription.FLAG_USEBACKGROUNDDRAWABLE | ThemeDescription.FLAG_DRAWABLESELECTEDSTATE, new Class[]{ChatActivityEnterView.class}, new String[]{"notifyButton"}, null, null, null, Theme.key_listSelector)); themeDescriptions.add(new ThemeDescription(chatActivityEnterView, 0, new Class[]{ChatActivityEnterView.class}, new String[]{"videoTimelineView"}, null, null, null, Theme.key_chat_messagePanelSend)); //themeDescriptions.add(new ThemeDescription(chatActivityEnterView, ThemeDescription.FLAG_IMAGECOLOR, new Class[]{ChatActivityEnterView.class}, new String[]{"doneButtonImage"}, null, null, null, Theme.key_chat_messagePanelBackground)); @@ -30783,8 +31793,6 @@ public ArrayList getThemeDescriptions() { themeDescriptions.add(new ThemeDescription(chatActivityEnterView != null ? chatActivityEnterView.getEmojiView() : null, 0, new Class[]{EmojiView.class}, null, null, null, selectedBackgroundDelegate, Theme.key_chat_emojiPanelBackspace)); themeDescriptions.add(new ThemeDescription(chatActivityEnterView != null ? chatActivityEnterView.getEmojiView() : null, 0, new Class[]{EmojiView.class}, null, null, null, selectedBackgroundDelegate, Theme.key_chat_emojiPanelTrendingTitle)); themeDescriptions.add(new ThemeDescription(chatActivityEnterView != null ? chatActivityEnterView.getEmojiView() : null, 0, new Class[]{EmojiView.class}, null, null, null, selectedBackgroundDelegate, Theme.key_chat_emojiPanelTrendingDescription)); - themeDescriptions.add(new ThemeDescription(chatActivityEnterView != null ? chatActivityEnterView.getEmojiView() : null, 0, new Class[]{EmojiView.class}, null, null, null, selectedBackgroundDelegate, Theme.key_chat_emojiPanelBadgeText)); - themeDescriptions.add(new ThemeDescription(chatActivityEnterView != null ? chatActivityEnterView.getEmojiView() : null, 0, new Class[]{EmojiView.class}, null, null, null, selectedBackgroundDelegate, Theme.key_chat_emojiPanelBadgeBackground)); themeDescriptions.add(new ThemeDescription(chatActivityEnterView != null ? chatActivityEnterView.getEmojiView() : null, 0, new Class[]{EmojiView.class}, null, null, null, selectedBackgroundDelegate, Theme.key_chat_messagePanelIcons)); themeDescriptions.add(new ThemeDescription(chatActivityEnterView != null ? chatActivityEnterView.getEmojiView() : null, 0, new Class[]{EmojiView.class}, null, null, null, selectedBackgroundDelegate, Theme.key_chat_emojiSearchIcon)); themeDescriptions.add(new ThemeDescription(chatActivityEnterView != null ? chatActivityEnterView.getEmojiView() : null, 0, new Class[]{EmojiView.class}, null, null, null, selectedBackgroundDelegate, Theme.key_chat_emojiPanelStickerSetNameHighlight)); @@ -30903,8 +31911,6 @@ public ArrayList getThemeDescriptions() { themeDescriptions.add(new ThemeDescription(gifHintTextView, ThemeDescription.FLAG_BACKGROUNDFILTER, null, null, null, null, Theme.key_chat_gifSaveHintBackground)); themeDescriptions.add(new ThemeDescription(gifHintTextView, ThemeDescription.FLAG_TEXTCOLOR, null, null, null, null, Theme.key_chat_gifSaveHintText)); - themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, selectedBackgroundDelegate, Theme.key_chat_attachMediaBanBackground)); - themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, selectedBackgroundDelegate, Theme.key_chat_attachMediaBanText)); themeDescriptions.add(new ThemeDescription(noSoundHintView, ThemeDescription.FLAG_TEXTCOLOR, new Class[]{HintView.class}, new String[]{"textView"}, null, null, null, Theme.key_chat_gifSaveHintText)); themeDescriptions.add(new ThemeDescription(noSoundHintView, ThemeDescription.FLAG_IMAGECOLOR, new Class[]{HintView.class}, new String[]{"imageView"}, null, null, null, Theme.key_chat_gifSaveHintText)); @@ -30916,39 +31922,34 @@ public ArrayList getThemeDescriptions() { themeDescriptions.add(new ThemeDescription(pagedownButtonCounter, ThemeDescription.FLAG_BACKGROUNDFILTER, null, null, null, null, Theme.key_chat_goDownButtonCounterBackground)); themeDescriptions.add(new ThemeDescription(pagedownButtonCounter, ThemeDescription.FLAG_TEXTCOLOR, null, null, null, null, Theme.key_chat_goDownButtonCounter)); themeDescriptions.add(new ThemeDescription(pagedownButtonImage, ThemeDescription.FLAG_BACKGROUNDFILTER, null, null, null, null, Theme.key_chat_goDownButton)); - themeDescriptions.add(new ThemeDescription(pagedownButtonImage, ThemeDescription.FLAG_BACKGROUNDFILTER | ThemeDescription.FLAG_DRAWABLESELECTEDSTATE, null, null, null, null, Theme.key_chat_goDownButtonShadow)); themeDescriptions.add(new ThemeDescription(pagedownButtonImage, ThemeDescription.FLAG_IMAGECOLOR, null, null, null, null, Theme.key_chat_goDownButtonIcon)); themeDescriptions.add(new ThemeDescription(mentiondownButtonCounter, ThemeDescription.FLAG_BACKGROUNDFILTER, null, null, null, null, Theme.key_chat_goDownButtonCounterBackground)); themeDescriptions.add(new ThemeDescription(mentiondownButtonCounter, ThemeDescription.FLAG_TEXTCOLOR, null, null, null, null, Theme.key_chat_goDownButtonCounter)); themeDescriptions.add(new ThemeDescription(mentiondownButtonImage, ThemeDescription.FLAG_BACKGROUNDFILTER, null, null, null, null, Theme.key_chat_goDownButton)); - themeDescriptions.add(new ThemeDescription(mentiondownButtonImage, ThemeDescription.FLAG_BACKGROUNDFILTER | ThemeDescription.FLAG_DRAWABLESELECTEDSTATE, null, null, null, null, Theme.key_chat_goDownButtonShadow)); themeDescriptions.add(new ThemeDescription(mentiondownButtonImage, ThemeDescription.FLAG_IMAGECOLOR, null, null, null, null, Theme.key_chat_goDownButtonIcon)); - themeDescriptions.add(new ThemeDescription(avatarContainer != null ? avatarContainer.getTimeItem() : null, 0, null, null, null, null, Theme.key_chat_secretTimerBackground)); - themeDescriptions.add(new ThemeDescription(avatarContainer != null ? avatarContainer.getTimeItem() : null, 0, null, null, null, null, Theme.key_chat_secretTimerText)); - themeDescriptions.add(new ThemeDescription(floatingDateView, 0, null, null, null, null, Theme.key_chat_serviceText)); themeDescriptions.add(new ThemeDescription(floatingDateView, 0, null, null, null, null, Theme.key_chat_serviceBackground)); themeDescriptions.add(new ThemeDescription(infoTopView, 0, null, null, null, null, Theme.key_chat_serviceText)); themeDescriptions.add(new ThemeDescription(infoTopView, 0, null, null, null, null, Theme.key_chat_serviceBackground)); - themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, selectedBackgroundDelegate, Theme.key_chat_attachGalleryIcon)); + themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, selectedBackgroundDelegate, Theme.key_chat_attachIcon)); themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, selectedBackgroundDelegate, Theme.key_chat_attachGalleryBackground)); themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, selectedBackgroundDelegate, Theme.key_chat_attachGalleryText)); - themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, selectedBackgroundDelegate, Theme.key_chat_attachAudioIcon)); + themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, selectedBackgroundDelegate, Theme.key_chat_attachIcon)); themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, selectedBackgroundDelegate, Theme.key_chat_attachAudioBackground)); themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, selectedBackgroundDelegate, Theme.key_chat_attachAudioText)); - themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, selectedBackgroundDelegate, Theme.key_chat_attachFileIcon)); + themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, selectedBackgroundDelegate, Theme.key_chat_attachIcon)); themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, selectedBackgroundDelegate, Theme.key_chat_attachFileBackground)); themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, selectedBackgroundDelegate, Theme.key_chat_attachFileText)); - themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, selectedBackgroundDelegate, Theme.key_chat_attachContactIcon)); + themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, selectedBackgroundDelegate, Theme.key_chat_attachIcon)); themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, selectedBackgroundDelegate, Theme.key_chat_attachContactBackground)); themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, selectedBackgroundDelegate, Theme.key_chat_attachContactText)); - themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, selectedBackgroundDelegate, Theme.key_chat_attachLocationIcon)); + themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, selectedBackgroundDelegate, Theme.key_chat_attachIcon)); themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, selectedBackgroundDelegate, Theme.key_chat_attachLocationBackground)); themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, selectedBackgroundDelegate, Theme.key_chat_attachLocationText)); - themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, selectedBackgroundDelegate, Theme.key_chat_attachPollIcon)); + themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, selectedBackgroundDelegate, Theme.key_chat_attachIcon)); themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, selectedBackgroundDelegate, Theme.key_chat_attachPollBackground)); themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, selectedBackgroundDelegate, Theme.key_chat_attachPollText)); themeDescriptions.add(new ThemeDescription(null, 0, null, null, new Drawable[]{Theme.chat_attachEmptyDrawable}, null, Theme.key_chat_attachEmptyImage)); @@ -30996,7 +31997,7 @@ public ArrayList getThemeDescriptions() { themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_chat_inReactionButtonTextSelected)); themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, selectedBackgroundDelegate, Theme.key_chat_BlurAlpha)); - if (chatActivityEnterView != null) { + if (chatActivityEnterView != null && chatActivityEnterView.botCommandsMenuContainer != null) { themeDescriptions.add(new ThemeDescription(chatActivityEnterView.botCommandsMenuContainer.listView, ThemeDescription.FLAG_TEXTCOLOR, new Class[]{BotCommandsMenuView.BotCommandView.class}, new String[]{"description"}, null, null, null, Theme.key_windowBackgroundWhiteBlackText)); themeDescriptions.add(new ThemeDescription(chatActivityEnterView.botCommandsMenuContainer.listView, ThemeDescription.FLAG_TEXTCOLOR, new Class[]{BotCommandsMenuView.BotCommandView.class}, new String[]{"command"}, null, null, null, Theme.key_windowBackgroundWhiteGrayText)); } @@ -31190,8 +32191,12 @@ public AnimatorSet onCustomTransitionAnimation(boolean isOpen, Runnable callback if (useAlphaForContextView) { previousChat.fragmentContextView.setAlpha(1f - progress); } - previousChat.pinnedMessageView.setAlpha(1f - progress); - previousChat.topChatPanelView.setAlpha(1f - progress); + if (previousChat.pinnedMessageView != null) { + previousChat.pinnedMessageView.setAlpha(1f - progress); + } + if (previousChat.topChatPanelView != null) { + previousChat.topChatPanelView.setAlpha(1f - progress); + } }); updateChatListViewTopPadding(); @@ -31233,8 +32238,12 @@ public void onAnimationEnd(Animator animation) { previousChat.avatarContainer.getAvatarImageView().setScaleY(1f); previousChat.avatarContainer.getAvatarImageView().setAlpha(1f); - previousChat.pinnedMessageView.setAlpha(1f); - previousChat.topChatPanelView.setAlpha(1f); + if (previousChat.pinnedMessageView != null) { + previousChat.pinnedMessageView.setAlpha(1f); + } + if (previousChat.topChatPanelView != null) { + previousChat.topChatPanelView.setAlpha(1f); + } } }); fragmentTransition.setDuration(300); @@ -32046,6 +33055,14 @@ private int getCurrentColorOrDefault(String key, boolean ignoreAnimation) { } } + private void updateBotHelpCellClick(BotHelpCell cell) { + final boolean translateButtonEnabled = MessagesController.getInstance(currentAccount).getTranslateController().isContextTranslateEnabled(); + // NekoX: remove Language Detector + { + cell.setClickable(false); + } + } + @Override protected boolean allowPresentFragment() { return !inPreviewMode; @@ -32101,6 +33118,7 @@ private final static class MessageSkeleton { } private void nkbtn_onclick_actionbar(int id) { + createUndoView(); // from ActionBar & Header ( without text_* ) // should hide shit action bar after done if (id == nkbtn_forward_noquote) { @@ -32226,7 +33244,7 @@ private void nkbtn_onclick_actionbar(int id) { showDialog(alertDialog); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } else if (id == nkheaderbtn_upgrade) { AlertDialog.Builder builder = new AlertDialog.Builder(getParentActivity()); @@ -32389,7 +33407,7 @@ private void nkbtn_onclick(int id) { } if (messageCell != null) { - checkAutoDownloadMessage(so); +// checkAutoDownloadMessage(so); messageCell.updateButtonState(false, true, false); } @@ -32446,17 +33464,17 @@ private void nkbtn_onclick(int id) { break; } case nkbtn_translate: { - if (NekoConfig.useTelegramTranslateInChat.Bool()) { + if (NekoConfig.useTelegramTranslateInChat.Bool() && !selectedObject.isPoll()) { String toLang = LocaleController.getInstance().getCurrentLocale().getLanguage(); int[] messageIdToTranslate = new int[] { selectedObject.getId() }; final CharSequence finalMessageText = getMessageCaption(selectedObject, selectedObjectGroup, messageIdToTranslate); - TranslateAlert.OnLinkPress onLinkPress = (link) -> { + Utilities.CallbackReturn onLinkPress = (link) -> { didPressMessageUrl(link, false, selectedObject, null); return true; }; TLRPC.InputPeer inputPeer = selectedObject != null && (selectedObject.isPoll() || selectedObject.isVoiceTranscriptionOpen() || selectedObject.isSponsored()) ? null : getMessagesController().getInputPeer(dialog_id); - TranslateAlert alert = TranslateAlert.showAlert(getParentActivity(), this, currentAccount, inputPeer, messageIdToTranslate[0], "und", toLang, finalMessageText, false, onLinkPress, () -> dimBehindView(false)); - alert.showDim(false); + TranslateAlert2 alert = TranslateAlert2.showAlert(getParentActivity(), this, currentAccount, inputPeer, messageIdToTranslate[0], "und", toLang, finalMessageText, selectedObject.messageOwner.entities, false, onLinkPress, () -> dimBehindView(false)); + alert.setDimBehind(true); closeMenu(false); } else { MessageTransKt.translateMessages(this); @@ -32663,10 +33681,8 @@ private void doRepeatMessage(boolean isLongClick, ArrayList messa // If selected message contains `replyTo`: // When longClick it will reply to the `replyMessage` of selectedMessage // When not LongClick but in a threadchat: reply to the Thread - MessageObject replyTo = selectedObject.replyMessageObject != null ? isLongClick ? - selectedObject.replyMessageObject : getThreadMessage() : getThreadMessage(); - if (selectedObject.type == 0 || selectedObject.isAnimatedEmoji() || getMessageCaption(selectedObject, -selectedObjectGroup) != null) { + MessageObject replyTo = selectedObject.replyMessageObject != null ? isLongClick ? selectedObject.replyMessageObject : getThreadMessage() : getThreadMessage(); + if (selectedObject.type == 0 || selectedObject.isAnimatedEmoji() || getMessageCaption(selectedObject, selectedObjectGroup) != null) { CharSequence caption = getMessageCaption(selectedObject, selectedObjectGroup); if (caption == null) { caption = getMessageContent(selectedObject, 0, false); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ChatEditActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ChatEditActivity.java index 9f6e5675de..4de4e1b725 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ChatEditActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ChatEditActivity.java @@ -83,6 +83,7 @@ import java.util.List; import java.util.concurrent.CountDownLatch; +import tw.nekomimi.nekogram.NekoConfig; import tw.nekomimi.nekogram.utils.VibrateUtil; public class ChatEditActivity extends BaseFragment implements ImageUpdater.ImageUpdaterDelegate, NotificationCenter.NotificationCenterDelegate { @@ -202,8 +203,9 @@ public void openPhotoForEdit(String file, String thumb, boolean isVideo) { public ChatEditActivity(Bundle args) { super(args); avatarDrawable = new AvatarDrawable(); - imageUpdater = new ImageUpdater(true); chatId = args.getLong("chat_id", 0); + TLRPC.Chat chat = getMessagesController().getChat(chatId); + imageUpdater = new ImageUpdater(true, chat != null && ChatObject.isChannelAndNotMegaGroup(chat) ? ImageUpdater.FOR_TYPE_CHANNEL : ImageUpdater.FOR_TYPE_GROUP, true); } @Override @@ -608,7 +610,7 @@ protected void onDraw(Canvas canvas) { setAvatarCell.setOnClickListener(v -> { imageUpdater.openMenu(avatar != null, () -> { avatar = null; - MessagesController.getInstance(currentAccount).changeChatAvatar(chatId, null, null, null, 0, null, null, null, null); + MessagesController.getInstance(currentAccount).changeChatAvatar(chatId, null, null, null, null, 0, null, null, null, null); showAvatarProgress(false, true); avatarImage.setImage(null, null, avatarDrawable, currentChat); cameraDrawable.setCurrentFrame(0); @@ -1115,11 +1117,11 @@ public void didStartUpload(boolean isVideo) { } @Override - public void didUploadPhoto(final TLRPC.InputFile photo, final TLRPC.InputFile video, double videoStartTimestamp, String videoPath, final TLRPC.PhotoSize bigSize, final TLRPC.PhotoSize smallSize, boolean isVideo) { + public void didUploadPhoto(final TLRPC.InputFile photo, final TLRPC.InputFile video, double videoStartTimestamp, String videoPath, final TLRPC.PhotoSize bigSize, final TLRPC.PhotoSize smallSize, boolean isVideo, TLRPC.VideoSize emojiMarkup) { AndroidUtilities.runOnUIThread(() -> { avatar = smallSize.location; - if (photo != null || video != null) { - getMessagesController().changeChatAvatar(chatId, null, photo, video, videoStartTimestamp, videoPath, smallSize.location, bigSize.location, null); + if (photo != null || video != null || emojiMarkup != null) { + getMessagesController().changeChatAvatar(chatId, null, photo, video, emojiMarkup, videoStartTimestamp, videoPath, smallSize.location, bigSize.location, null); if (createAfterUpload) { try { if (progressDialog != null && progressDialog.isShowing()) { @@ -1510,27 +1512,16 @@ private void updateFields(boolean updateChat, boolean animated) { } else { int count = 0; if (currentChat.default_banned_rights != null) { - if (!currentChat.default_banned_rights.send_stickers) { + if (!currentChat.default_banned_rights.send_plain) { count++; } if (!currentChat.default_banned_rights.send_gifs) { count++; } - if (!currentChat.default_banned_rights.send_media) { - count++; - } - if (!currentChat.default_banned_rights.embed_links) { - count++; - } - if (!currentChat.default_banned_rights.send_messages) { - count++; - } + count += ChatUsersActivity.getSendMediaSelectedCount(currentChat.default_banned_rights); if (!currentChat.default_banned_rights.pin_messages) { count++; } - if (!currentChat.default_banned_rights.send_polls) { - count++; - } if (!currentChat.default_banned_rights.invite_users) { count++; } @@ -1547,10 +1538,9 @@ private void updateFields(boolean updateChat, boolean animated) { count++; } } else { - count = forum ? 12 : 11; + count = forum ? 14 : 13; } -// blockCell.setTextAndValue(LocaleController.getString("ChannelPermissions", R.string.ChannelPermissions), count + " / 11", true); - blockCell.setTextAndValueAndIcon(LocaleController.getString("ChannelPermissions", R.string.ChannelPermissions), String.format("%d/%d", count, forum ? 12 : 11), R.drawable.msg_permissions, true); + blockCell.setTextAndValueAndIcon(LocaleController.getString("ChannelPermissions", R.string.ChannelPermissions), String.format("%d/%d", count, forum ? 14 : 13), animated, R.drawable.msg_permissions, true); } if (memberRequestsCell != null) { memberRequestsCell.setTextAndValueAndIcon(LocaleController.getString("MemberRequests", R.string.MemberRequests), String.format("%d", info.requests_pending), R.drawable.msg_requests, logCell != null && logCell.getVisibility() == View.VISIBLE); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ChatLinkActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ChatLinkActivity.java index 8247265f5b..85bb50bca5 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ChatLinkActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ChatLinkActivity.java @@ -465,7 +465,7 @@ public void didFailChatCreation() { showDialog(dialog); TextView button = (TextView) dialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ChatRightsEditActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ChatRightsEditActivity.java index a2fbfd64fc..9fde40bc51 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ChatRightsEditActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ChatRightsEditActivity.java @@ -42,6 +42,7 @@ import org.telegram.messenger.AccountInstance; import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.BotWebViewVibrationEffect; import org.telegram.messenger.ChatObject; import org.telegram.messenger.FileLog; import org.telegram.messenger.LocaleController; @@ -59,6 +60,7 @@ import org.telegram.ui.ActionBar.SimpleTextView; import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.ActionBar.ThemeDescription; +import org.telegram.ui.Cells.CheckBoxCell; import org.telegram.ui.Cells.DialogRadioCell; import org.telegram.ui.Cells.HeaderCell; import org.telegram.ui.Cells.PollEditTextCell; @@ -73,12 +75,14 @@ import org.telegram.ui.Components.BulletinFactory; import org.telegram.ui.Components.CircularProgressDrawable; import org.telegram.ui.Components.CrossfadeDrawable; +import org.telegram.ui.Components.CubicBezierInterpolator; import org.telegram.ui.Components.LayoutHelper; import org.telegram.ui.Components.Premium.LimitReachedBottomSheet; import org.telegram.ui.Components.RecyclerListView; import java.util.ArrayList; import java.util.Calendar; +import java.util.Locale; import tw.nekomimi.nekogram.utils.VibrateUtil; @@ -97,7 +101,6 @@ public class ChatRightsEditActivity extends BaseFragment { private long chatId; private TLRPC.User currentUser; private TLRPC.Chat currentChat; - private TLObject participant; private int currentType; private boolean isChannel; private boolean isForum; @@ -143,12 +146,18 @@ public class ChatRightsEditActivity extends BaseFragment { private int sendMessagesRow; private int sendMediaRow; + private int sendPhotosRow; + private int sendVideosRow; + private int sendMusicRow; + private int sendFilesRow; + private int sendVoiceRow; + private int sendRoundRow; private int sendStickersRow; - private int sendGamesRow; - private int sendInlineRow; - - private int sendGifsRow; +// private int sendGamesRow; +// private int sendInlineRow; +// +// private int sendGifsRow; private int sendPollsRow; private int embedLinksRow; private int startVoiceChatRow; @@ -166,6 +175,7 @@ public class ChatRightsEditActivity extends BaseFragment { public static final int TYPE_ADD_BOT = 2; private boolean closingKeyboardAfterFinish = false; + private boolean sendMediaExpanded; public interface ChatRightsEditActivityDelegate { void didSetRights(int rights, TLRPC.TL_chatAdminRights rightsAdmin, TLRPC.TL_chatBannedRights rightsBanned, String rank); @@ -177,7 +187,6 @@ public interface ChatRightsEditActivityDelegate { public ChatRightsEditActivity(long userId, long channelId, TLRPC.TL_chatAdminRights rightsAdmin, TLRPC.TL_chatBannedRights rightsBannedDefault, TLRPC.TL_chatBannedRights rightsBanned, String rank, int type, boolean edit, boolean addingNew, String addingNewBotHash ,TLObject part) { this(userId, channelId, rightsAdmin, rightsBannedDefault, rightsBanned, rank, type, edit, addingNew, addingNewBotHash); - participant = part; } public ChatRightsEditActivity(long userId, long channelId, TLRPC.TL_chatAdminRights rightsAdmin, TLRPC.TL_chatBannedRights rightsBannedDefault, TLRPC.TL_chatBannedRights rightsBanned, String rank, int type, boolean edit, boolean addingNew, String addingNewBotHash) { @@ -279,13 +288,15 @@ public ChatRightsEditActivity(long userId, long channelId, TLRPC.TL_chatAdminRig if (defaultBannedRights == null) { defaultBannedRights = new TLRPC.TL_chatBannedRights(); defaultBannedRights.view_messages = defaultBannedRights.send_media = defaultBannedRights.send_messages = - defaultBannedRights.embed_links = defaultBannedRights.send_stickers = defaultBannedRights.send_gifs = - defaultBannedRights.send_games = defaultBannedRights.send_inline = defaultBannedRights.send_polls = - defaultBannedRights.invite_users = defaultBannedRights.change_info = defaultBannedRights.pin_messages = - defaultBannedRights.manage_topics = true; + defaultBannedRights.embed_links = defaultBannedRights.send_stickers = defaultBannedRights.send_gifs = + defaultBannedRights.send_games = defaultBannedRights.send_inline = defaultBannedRights.send_polls = + defaultBannedRights.invite_users = defaultBannedRights.change_info = defaultBannedRights.pin_messages = + defaultBannedRights.manage_topics = defaultBannedRights.send_plain = defaultBannedRights.send_videos = + defaultBannedRights.send_photos = defaultBannedRights.send_audios = defaultBannedRights.send_docs = + defaultBannedRights.send_voices = defaultBannedRights.send_roundvideos = false; } - if (!defaultBannedRights.change_info) { + if (!defaultBannedRights.change_info && !isChannel) { adminRights.change_info = true; } if (!defaultBannedRights.pin_messages) { @@ -299,7 +310,9 @@ public ChatRightsEditActivity(long userId, long channelId, TLRPC.TL_chatAdminRig defaultBannedRights.embed_links = defaultBannedRights.send_stickers = defaultBannedRights.send_gifs = defaultBannedRights.send_games = defaultBannedRights.send_inline = defaultBannedRights.send_polls = defaultBannedRights.invite_users = defaultBannedRights.change_info = defaultBannedRights.pin_messages = - defaultBannedRights.manage_topics = false; + defaultBannedRights.manage_topics = defaultBannedRights.send_plain = defaultBannedRights.send_videos = + defaultBannedRights.send_photos = defaultBannedRights.send_audios = defaultBannedRights.send_docs = + defaultBannedRights.send_voices = defaultBannedRights.send_roundvideos = false; } bannedRights = new TLRPC.TL_chatBannedRights(); @@ -308,7 +321,7 @@ public ChatRightsEditActivity(long userId, long channelId, TLRPC.TL_chatAdminRig bannedRights.embed_links = bannedRights.send_stickers = bannedRights.send_gifs = bannedRights.send_games = bannedRights.send_inline = bannedRights.send_polls = bannedRights.invite_users = bannedRights.change_info = bannedRights.pin_messages = - bannedRights.manage_topics = false; + bannedRights.manage_topics = false; } else { bannedRights.view_messages = rightsBanned.view_messages; bannedRights.send_messages = rightsBanned.send_messages; @@ -324,6 +337,13 @@ public ChatRightsEditActivity(long userId, long channelId, TLRPC.TL_chatAdminRig bannedRights.pin_messages = rightsBanned.pin_messages; bannedRights.until_date = rightsBanned.until_date; bannedRights.manage_topics = rightsBanned.manage_topics; + bannedRights.send_photos = rightsBanned.send_photos; + bannedRights.send_videos = rightsBanned.send_videos; + bannedRights.send_roundvideos = rightsBanned.send_roundvideos; + bannedRights.send_audios = rightsBanned.send_audios; + bannedRights.send_voices = rightsBanned.send_voices; + bannedRights.send_docs = rightsBanned.send_docs; + bannedRights.send_plain = rightsBanned.send_plain; } if (defaultBannedRights.view_messages) { bannedRights.view_messages = true; @@ -364,6 +384,27 @@ public ChatRightsEditActivity(long userId, long channelId, TLRPC.TL_chatAdminRig if (defaultBannedRights.manage_topics) { bannedRights.manage_topics = true; } + if (defaultBannedRights.send_photos) { + bannedRights.send_photos = true; + } + if (defaultBannedRights.send_videos) { + bannedRights.send_videos = true; + } + if (defaultBannedRights.send_audios) { + bannedRights.send_audios = true; + } + if (defaultBannedRights.send_docs) { + bannedRights.send_docs = true; + } + if (defaultBannedRights.send_voices) { + bannedRights.send_voices = true; + } + if (defaultBannedRights.send_roundvideos) { + bannedRights.send_roundvideos = true; + } + if (defaultBannedRights.send_plain) { + bannedRights.send_plain = true; + } currentBannedRights = ChatObject.getBannedRightsString(bannedRights); @@ -372,15 +413,30 @@ public ChatRightsEditActivity(long userId, long channelId, TLRPC.TL_chatAdminRig updateRows(false); } + public static TLRPC.TL_chatAdminRights rightsOR(TLRPC.TL_chatAdminRights a, TLRPC.TL_chatAdminRights b) { + TLRPC.TL_chatAdminRights adminRights = new TLRPC.TL_chatAdminRights(); + adminRights.change_info = a.change_info || b.change_info; + adminRights.post_messages = a.post_messages || b.post_messages; + adminRights.edit_messages = a.edit_messages || b.edit_messages; + adminRights.delete_messages = a.delete_messages || b.delete_messages; + adminRights.ban_users = a.ban_users || b.ban_users; + adminRights.invite_users = a.invite_users || b.invite_users; + adminRights.pin_messages = a.pin_messages || b.pin_messages; + adminRights.add_admins = a.add_admins || b.add_admins; + adminRights.manage_call = a.manage_call || b.manage_call; + adminRights.manage_topics = a.manage_topics || b.manage_topics; + return adminRights; + } + + public static TLRPC.TL_chatAdminRights emptyAdminRights(boolean value) { TLRPC.TL_chatAdminRights adminRights = new TLRPC.TL_chatAdminRights(); adminRights.change_info = adminRights.post_messages = adminRights.edit_messages = - adminRights.delete_messages = adminRights.ban_users = adminRights.invite_users = - adminRights.pin_messages = adminRights.add_admins = adminRights.manage_call = adminRights.manage_topics = value; + adminRights.delete_messages = adminRights.ban_users = adminRights.invite_users = + adminRights.pin_messages = adminRights.add_admins = adminRights.manage_call = adminRights.manage_topics = value; return adminRights; } - @Override public View createView(Context context) { actionBar.setBackButtonImage(R.drawable.ic_ab_back); @@ -418,6 +474,7 @@ public void onItemClick(int id) { fragmentView = new FrameLayout(context) { private int previousHeight = -1; + @Override protected void onLayout(boolean changed, int left, int top, int right, int bottom) { super.onLayout(changed, left, top, right, bottom); @@ -463,7 +520,10 @@ protected int getExtraLayoutSpace(RecyclerView.State state) { if (currentType == TYPE_ADD_BOT) { listView.setResetSelectorOnChanged(false); } + itemAnimator.setSupportsChangeAnimations(false); itemAnimator.setDelayAnimations(false); + itemAnimator.setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); + itemAnimator.setDurations(350); listView.setItemAnimator(itemAnimator); listView.setVerticalScrollbarPosition(LocaleController.isRTL ? RecyclerListView.SCROLLBAR_POSITION_LEFT : RecyclerListView.SCROLLBAR_POSITION_RIGHT); frameLayout.addView(listView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); @@ -478,15 +538,35 @@ public void onScrollStateChanged(RecyclerView recyclerView, int newState) { }); listView.setOnItemClickListener((view, position) -> { + if (!canEdit && (!currentChat.creator || currentType != TYPE_ADMIN || position != anonymousRow)) { + return; + } + if (position == sendMediaRow) { +// if (allDefaultMediaBanned()) { +// new AlertDialog.Builder(getParentActivity()) +// .setTitle(LocaleController.getString("UserRestrictionsCantModify", R.string.UserRestrictionsCantModify)) +// .setMessage(LocaleController.getString("UserRestrictionsCantModifyEnabled", R.string.UserRestrictionsCantModifyEnabled)) +// .setPositiveButton(LocaleController.getString("OK", R.string.OK), null) +// .create() +// .show(); +// return; +// } + sendMediaExpanded = !sendMediaExpanded; + updateRows(false); + if (sendMediaExpanded) { + listViewAdapter.notifyItemRangeInserted(sendMediaRow + 1, 9); + } else { + listViewAdapter.notifyItemRangeRemoved(sendMediaRow + 1, 9); + } + return; + } if (position == 0) { Bundle args = new Bundle(); args.putLong("user_id", currentUser.id); presentFragment(new ProfileActivity(args)); - } else if (!canEdit && (!currentChat.creator || currentType != TYPE_ADMIN || position != anonymousRow)) { - return; } else if (position == removeAdminRow) { if (currentType == TYPE_ADMIN) { - MessagesController.getInstance(currentAccount).setUserAdminRole(chatId, currentUser, new TLRPC.TL_chatAdminRights(), currentRank, isChannel, getFragmentForAlert(0), isAddingNew, false, null,null); + MessagesController.getInstance(currentAccount).setUserAdminRole(chatId, currentUser, new TLRPC.TL_chatAdminRights(), currentRank, isChannel, getFragmentForAlert(0), isAddingNew, false, null, null); if (delegate != null) { delegate.didSetRights(0, adminRights, bannedRights, currentRank); } @@ -647,30 +727,78 @@ public void onScrollStateChanged(RecyclerView recyclerView, int newState) { } builder.setCustomView(linearLayout); showDialog(builder.create()); + } else if (view instanceof CheckBoxCell) { + CheckBoxCell checkBoxCell = (CheckBoxCell) view; + if (currentType == TYPE_BANNED && bannedRights != null) { + boolean disabled = !checkBoxCell.isChecked(); + boolean value = false; + + if (checkBoxCell.hasIcon()) { + if (currentType != TYPE_ADD_BOT) { + new AlertDialog.Builder(getParentActivity()) + .setTitle(LocaleController.getString("UserRestrictionsCantModify", R.string.UserRestrictionsCantModify)) + .setMessage(LocaleController.getString("UserRestrictionsCantModifyDisabled", R.string.UserRestrictionsCantModifyDisabled)) + .setPositiveButton(LocaleController.getString("OK", R.string.OK), null) + .create() + .show(); + } + return; + } + + if (position == sendPhotosRow) { + value = bannedRights.send_photos = !bannedRights.send_photos; + } else if (position == sendVideosRow) { + value = bannedRights.send_videos = !bannedRights.send_videos; + } else if (position == sendMusicRow) { + value = bannedRights.send_audios = !bannedRights.send_audios; + } else if (position == sendFilesRow) { + value = bannedRights.send_docs = !bannedRights.send_docs; + } else if (position == sendRoundRow) { + value = bannedRights.send_roundvideos = !bannedRights.send_roundvideos; + } else if (position == sendVoiceRow) { + value = bannedRights.send_voices = !bannedRights.send_voices; + } else if (position == sendStickersRow) { + value = bannedRights.send_stickers = bannedRights.send_games = bannedRights.send_gifs = bannedRights.send_inline = !bannedRights.send_stickers; + } else if (position == embedLinksRow) { + if (bannedRights.send_plain || defaultBannedRights.send_plain) { + View senMessagesView = linearLayoutManager.findViewByPosition(sendMessagesRow); + if (senMessagesView != null) { + AndroidUtilities.shakeViewSpring(senMessagesView); + BotWebViewVibrationEffect.APP_ERROR.vibrate(); + return; + } + } + value = bannedRights.embed_links = !bannedRights.embed_links; + } else if (position == sendPollsRow) { + value = bannedRights.send_polls = !bannedRights.send_polls; + } + listViewAdapter.notifyItemChanged(sendMediaRow); + checkBoxCell.setChecked(!value, true); + } } else if (view instanceof TextCheckCell2) { TextCheckCell2 checkCell = (TextCheckCell2) view; if (checkCell.hasIcon()) { if (currentType != TYPE_ADD_BOT) { new AlertDialog.Builder(getParentActivity()) - .setTitle(LocaleController.getString("UserRestrictionsCantModify", R.string.UserRestrictionsCantModify)) - .setMessage(LocaleController.getString("UserRestrictionsCantModifyDisabled", R.string.UserRestrictionsCantModifyDisabled)) - .setPositiveButton(LocaleController.getString("OK", R.string.OK), null) - .create() - .show(); + .setTitle(LocaleController.getString("UserRestrictionsCantModify", R.string.UserRestrictionsCantModify)) + .setMessage(LocaleController.getString("UserRestrictionsCantModifyDisabled", R.string.UserRestrictionsCantModifyDisabled)) + .setPositiveButton(LocaleController.getString("OK", R.string.OK), null) + .create() + .show(); } return; } if (!checkCell.isEnabled()) { if ((currentType == TYPE_ADD_BOT || currentType == TYPE_ADMIN) && - (position == changeInfoRow && defaultBannedRights != null && !defaultBannedRights.change_info || - position == pinMessagesRow && defaultBannedRights != null && !defaultBannedRights.pin_messages)) { + (position == changeInfoRow && defaultBannedRights != null && !defaultBannedRights.change_info || + position == pinMessagesRow && defaultBannedRights != null && !defaultBannedRights.pin_messages)) { new AlertDialog.Builder(getParentActivity()) - .setTitle(LocaleController.getString("UserRestrictionsCantModify", R.string.UserRestrictionsCantModify)) - .setMessage(LocaleController.getString("UserRestrictionsCantModifyEnabled", R.string.UserRestrictionsCantModifyEnabled)) - .setPositiveButton(LocaleController.getString("OK", R.string.OK), null) - .create() - .show(); + .setTitle(LocaleController.getString("UserRestrictionsCantModify", R.string.UserRestrictionsCantModify)) + .setMessage(LocaleController.getString("UserRestrictionsCantModifyEnabled", R.string.UserRestrictionsCantModifyEnabled)) + .setPositiveButton(LocaleController.getString("OK", R.string.OK), null) + .create() + .show(); } return; } @@ -722,90 +850,18 @@ public void onScrollStateChanged(RecyclerView recyclerView, int newState) { } else if (currentType == TYPE_BANNED && bannedRights != null) { boolean disabled = !checkCell.isChecked(); if (position == sendMessagesRow) { - value = bannedRights.send_messages = !bannedRights.send_messages; - } else if (position == sendMediaRow) { - value = bannedRights.send_media = !bannedRights.send_media; - } else if (position == sendStickersRow) { - value = bannedRights.send_stickers = !bannedRights.send_stickers; - } else if (position == sendGamesRow) { - bannedRights.send_games = !bannedRights.send_games; - } else if (position == sendInlineRow) { - bannedRights.send_inline = !bannedRights.send_inline; - } else if (position == sendGifsRow) { - bannedRights.send_gifs = !bannedRights.send_gifs; - } else if (position == embedLinksRow) { - value = bannedRights.embed_links = !bannedRights.embed_links; - } else if (position == sendPollsRow) { - value = bannedRights.send_polls = !bannedRights.send_polls; + value = bannedRights.send_plain = !bannedRights.send_plain; } - if (disabled) { - if (bannedRights.view_messages && !bannedRights.send_messages) { - bannedRights.send_messages = true; - RecyclerListView.ViewHolder holder = listView.findViewHolderForAdapterPosition(sendMessagesRow); - if (holder != null) { - ((TextCheckCell2) holder.itemView).setChecked(false); - } - } - if ((bannedRights.view_messages || bannedRights.send_messages) && !bannedRights.send_media) { - bannedRights.send_media = true; - RecyclerListView.ViewHolder holder = listView.findViewHolderForAdapterPosition(sendMediaRow); - if (holder != null) { - ((TextCheckCell2) holder.itemView).setChecked(false); - } - } - if ((bannedRights.view_messages || bannedRights.send_messages) && !bannedRights.send_polls) { - bannedRights.send_polls = true; - RecyclerListView.ViewHolder holder = listView.findViewHolderForAdapterPosition(sendPollsRow); - if (holder != null) { - ((TextCheckCell2) holder.itemView).setChecked(false); - } - } - if ((bannedRights.view_messages || bannedRights.send_messages) && !bannedRights.send_stickers) { - bannedRights.send_stickers = true; - RecyclerListView.ViewHolder holder = listView.findViewHolderForAdapterPosition(sendStickersRow); - if (holder != null) { - ((TextCheckCell2) holder.itemView).setChecked(false); - } - } - if ((bannedRights.view_messages || bannedRights.send_messages) && !bannedRights.send_games) { - bannedRights.send_games = true; - RecyclerListView.ViewHolder holder = listView.findViewHolderForAdapterPosition(sendGamesRow); - if (holder != null) { - ((TextCheckCell2) holder.itemView).setChecked(false); - } - } - if ((bannedRights.view_messages || bannedRights.send_inline) && !bannedRights.send_inline) { - bannedRights.send_inline = true; - RecyclerListView.ViewHolder holder = listView.findViewHolderForAdapterPosition(sendInlineRow); - if (holder != null) { - ((TextCheckCell2) holder.itemView).setChecked(false); - } - } - if ((bannedRights.view_messages || bannedRights.send_messages) && !bannedRights.send_gifs) { - bannedRights.send_gifs = true; - RecyclerListView.ViewHolder holder = listView.findViewHolderForAdapterPosition(sendGifsRow); - if (holder != null) { - ((TextCheckCell2) holder.itemView).setChecked(false); - } - } - if ((bannedRights.view_messages || bannedRights.send_messages) && !bannedRights.embed_links) { - bannedRights.embed_links = true; - RecyclerListView.ViewHolder holder = listView.findViewHolderForAdapterPosition(embedLinksRow); - if (holder != null) { - ((TextCheckCell2) holder.itemView).setChecked(false); - } - } - } else { - if ((!bannedRights.send_messages || !bannedRights.embed_links || !bannedRights.send_inline || !bannedRights.send_media || !bannedRights.send_polls) && bannedRights.view_messages) { + if (!disabled) { + if ((!bannedRights.send_plain || !bannedRights.embed_links || !bannedRights.send_inline || !bannedRights.send_photos || !bannedRights.send_videos || !bannedRights.send_audios || !bannedRights.send_docs || !bannedRights.send_voices || !bannedRights.send_roundvideos || !bannedRights.send_polls) && bannedRights.view_messages) { bannedRights.view_messages = false; } - if ((!bannedRights.embed_links || !bannedRights.send_inline || !bannedRights.send_media || !bannedRights.send_polls) && bannedRights.send_messages) { - bannedRights.send_messages = false; - RecyclerListView.ViewHolder holder = listView.findViewHolderForAdapterPosition(sendMessagesRow); - if (holder != null) { - ((TextCheckCell2) holder.itemView).setChecked(true); - } - } + } + if (embedLinksRow >= 0) { + listViewAdapter.notifyItemChanged(embedLinksRow); + } + if (sendMediaRow >= 0) { + listViewAdapter.notifyItemChanged(sendMediaRow); } } if (currentType == TYPE_ADD_BOT) { @@ -979,7 +1035,7 @@ private void initTransfer(TLRPC.InputCheckPasswordSRP srp, TwoStepVerificationAc }), ConnectionsManager.RequestFlagWithoutLogin); } else if (error.text.equals("CHANNELS_TOO_MUCH")) { if (getParentActivity() != null && !AccountInstance.getInstance(currentAccount).getUserConfig().isPremium()) { - showDialog(new LimitReachedBottomSheet(this, getParentActivity(), LimitReachedBottomSheet.TYPE_TO_MANY_COMMUNITIES, currentAccount)); + showDialog(new LimitReachedBottomSheet(this, getParentActivity(), LimitReachedBottomSheet.TYPE_TO0_MANY_COMMUNITIES, currentAccount)); } else { presentFragment(new TooManyCommunitiesActivity(TooManyCommunitiesActivity.TYPE_EDIT)); } @@ -1026,10 +1082,14 @@ private void updateRows(boolean update) { sendMessagesRow = -1; sendMediaRow = -1; + + sendPhotosRow = -1; + sendVideosRow = -1; + sendMusicRow = -1; + sendFilesRow = -1; + sendVoiceRow = -1; + sendRoundRow = -1; sendStickersRow = -1; - sendGamesRow = -1; - sendInlineRow = -1; - sendGifsRow = -1; sendPollsRow = -1; embedLinksRow = -1; startVoiceChatRow = -1; @@ -1068,12 +1128,18 @@ private void updateRows(boolean update) { } else if (currentType == TYPE_BANNED) { sendMessagesRow = rowCount++; sendMediaRow = rowCount++; - sendStickersRow = rowCount++; - sendGamesRow = rowCount++; - sendInlineRow = rowCount++; - sendGifsRow = rowCount++; - sendPollsRow = rowCount++; - embedLinksRow = rowCount++; + if (sendMediaExpanded) { + sendPhotosRow = rowCount++; + sendVideosRow = rowCount++; + sendFilesRow = rowCount++; + sendMusicRow = rowCount++; + sendVoiceRow = rowCount++; + sendRoundRow = rowCount++; + sendStickersRow = rowCount++; + sendPollsRow = rowCount++; + embedLinksRow = rowCount++; + } + addUsersRow = rowCount++; pinMessagesRow = rowCount++; changeInfoRow = rowCount++; @@ -1183,10 +1249,10 @@ private void onDonePressed() { MessagesController.getInstance(currentAccount).setUserAdminRole(chatId, currentUser, adminRights, currentRank, isChannel, this, isAddingNew, false, null, () -> { if (delegate != null) { delegate.didSetRights( - adminRights.change_info || adminRights.post_messages || adminRights.edit_messages || - adminRights.delete_messages || adminRights.ban_users || adminRights.invite_users || (isForum && adminRights.manage_topics) || - adminRights.pin_messages || adminRights.add_admins || adminRights.anonymous || adminRights.manage_call || - adminRights.other ? 1 : 0, adminRights, bannedRights, currentRank); + adminRights.change_info || adminRights.post_messages || adminRights.edit_messages || + adminRights.delete_messages || adminRights.ban_users || adminRights.invite_users || (isForum && adminRights.manage_topics) || + adminRights.pin_messages || adminRights.add_admins || adminRights.anonymous || adminRights.manage_call || + adminRights.other ? 1 : 0, adminRights, bannedRights, currentRank); finishFragment(); } }, err -> { @@ -1209,17 +1275,17 @@ private void onDonePressed() { } else if (currentType == TYPE_ADD_BOT) { AlertDialog.Builder builder = new AlertDialog.Builder(getParentActivity()); builder.setTitle(asAdmin ? - LocaleController.getString("AddBotAdmin", R.string.AddBotAdmin) : - LocaleController.getString("AddBot", R.string.AddBot) + LocaleController.getString("AddBotAdmin", R.string.AddBotAdmin) : + LocaleController.getString("AddBot", R.string.AddBot) ); boolean isChannel = ChatObject.isChannel(currentChat) && !currentChat.megagroup; String chatName = currentChat == null ? "" : currentChat.title; builder.setMessage(AndroidUtilities.replaceTags( - asAdmin ? ( - isChannel ? - LocaleController.formatString("AddBotMessageAdminChannel", R.string.AddBotMessageAdminChannel, chatName) : - LocaleController.formatString("AddBotMessageAdminGroup", R.string.AddBotMessageAdminGroup, chatName) - ) : LocaleController.formatString("AddMembersAlertNamesText", R.string.AddMembersAlertNamesText, UserObject.getUserName(currentUser), chatName) + asAdmin ? ( + isChannel ? + LocaleController.formatString("AddBotMessageAdminChannel", R.string.AddBotMessageAdminChannel, chatName) : + LocaleController.formatString("AddBotMessageAdminGroup", R.string.AddBotMessageAdminGroup, chatName) + ) : LocaleController.formatString("AddMembersAlertNamesText", R.string.AddMembersAlertNamesText, UserObject.getUserName(currentUser), chatName) )); builder.setNegativeButton(LocaleController.getString("Cancel", R.string.Cancel), null); builder.setPositiveButton(asAdmin ? LocaleController.getString("AddAsAdmin", R.string.AddAsAdmin) : LocaleController.getString("AddBot", R.string.AddBot), (di, i) -> { @@ -1348,6 +1414,8 @@ private class ListAdapter extends RecyclerListView.SelectionAdapter { private final int VIEW_TYPE_UNTIL_DATE_CELL = 6; private final int VIEW_TYPE_RANK_CELL = 7; private final int VIEW_TYPE_ADD_BOT_CELL = 8; + private final int VIEW_TYPE_EXPANDABLE_SWITCH = 9; + private final int VIEW_TYPE_INNER_CHECK = 10; private Context mContext; private boolean ignoreTextChange; @@ -1382,7 +1450,7 @@ public long getItemId(int position) { if (position == rankRow) return 18; if (position == rankInfoRow) return 19; if (position == sendMessagesRow) return 20; - if (position == sendMediaRow) return 21; + if (position == sendPhotosRow) return 21; if (position == sendStickersRow) return 22; if (position == sendPollsRow) return 23; if (position == embedLinksRow) return 24; @@ -1391,6 +1459,12 @@ public long getItemId(int position) { if (position == untilDateRow) return 27; if (position == addBotButtonRow) return 28; if (position == manageTopicsRow) return 29; + if (position == sendVideosRow) return 30; + if (position == sendFilesRow) return 31; + if (position == sendMusicRow) return 32; + if (position == sendVoiceRow) return 33; + if (position == sendRoundRow) return 34; + if (position == sendMediaRow) return 35; return 0; } else { return super.getItemId(position); @@ -1403,7 +1477,7 @@ public boolean isEnabled(RecyclerView.ViewHolder holder) { if (currentChat.creator && (currentType == TYPE_ADMIN || currentType == TYPE_ADD_BOT && asAdmin) && type == VIEW_TYPE_SWITCH_CELL && holder.getAdapterPosition() == anonymousRow) { return true; } - if (!canEdit && holder.getAdapterPosition() != 0) { + if (!canEdit) { return false; } if ((currentType == TYPE_ADMIN || currentType == TYPE_ADD_BOT) && type == VIEW_TYPE_SWITCH_CELL) { @@ -1415,7 +1489,7 @@ public boolean isEnabled(RecyclerView.ViewHolder holder) { return false; } if (position == changeInfoRow) { - return myAdminRights.change_info && (defaultBannedRights == null || defaultBannedRights.change_info); + return myAdminRights.change_info && (defaultBannedRights == null || defaultBannedRights.change_info || isChannel); } else if (position == postMessagesRow) { return myAdminRights.post_messages; } else if (position == editMesagesRow) { @@ -1468,6 +1542,7 @@ public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType view = new HeaderCell(mContext, Theme.key_windowBackgroundWhiteBlueHeader, 21, 15, true); view.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); break; + case VIEW_TYPE_EXPANDABLE_SWITCH: case VIEW_TYPE_SWITCH_CELL: view = new TextCheckCell2(mContext); view.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); @@ -1529,6 +1604,14 @@ public void afterTextChanged(Editable s) { }); view = cell; break; + case VIEW_TYPE_INNER_CHECK: + CheckBoxCell checkBoxCell = new CheckBoxCell(mContext, 4, 21, getResourceProvider()); + checkBoxCell.getCheckBoxRound().setDrawBackgroundAsArc(14); + checkBoxCell.getCheckBoxRound().setColor(Theme.key_switch2TrackChecked, Theme.key_radioBackground, Theme.key_checkboxCheck); + checkBoxCell.setEnabled(true); + view = checkBoxCell; + view.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); + break; } return new RecyclerListView.Holder(view); } @@ -1536,6 +1619,39 @@ public void afterTextChanged(Editable s) { @Override public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { switch (holder.getItemViewType()) { + case VIEW_TYPE_INNER_CHECK: + CheckBoxCell checkBoxCell = (CheckBoxCell) holder.itemView; + boolean animated = checkBoxCell.getTag() != null && (Integer) checkBoxCell.getTag() == position; + checkBoxCell.setTag(position); + if (position == sendStickersRow) { + checkBoxCell.setText(LocaleController.getString("SendMediaPermissionStickersGifs", R.string.SendMediaPermissionStickersGifs), "", !bannedRights.send_stickers && !defaultBannedRights.send_stickers, true, animated); + checkBoxCell.setIcon(defaultBannedRights.send_stickers ? R.drawable.permission_locked : 0); + } else if (position == embedLinksRow) { + checkBoxCell.setText(LocaleController.getString("UserRestrictionsEmbedLinks", R.string.UserRestrictionsEmbedLinks), "", !bannedRights.embed_links && !defaultBannedRights.embed_links && !bannedRights.send_plain && !defaultBannedRights.send_plain, true, animated); + checkBoxCell.setIcon(defaultBannedRights.embed_links ? R.drawable.permission_locked : 0); + } else if (position == sendPollsRow) { + checkBoxCell.setText(LocaleController.getString("SendMediaPolls", R.string.SendMediaPolls), "", !bannedRights.send_polls && !defaultBannedRights.send_polls, true, animated); + checkBoxCell.setIcon(defaultBannedRights.send_polls ? R.drawable.permission_locked : 0); + } else if (position == sendPhotosRow) { + checkBoxCell.setText(LocaleController.getString("SendMediaPermissionPhotos", R.string.SendMediaPermissionPhotos), "", !bannedRights.send_photos && !defaultBannedRights.send_photos, true, animated); + checkBoxCell.setIcon(defaultBannedRights.send_photos ? R.drawable.permission_locked : 0); + } else if (position == sendVideosRow) { + checkBoxCell.setText(LocaleController.getString("SendMediaPermissionVideos", R.string.SendMediaPermissionVideos), "", !bannedRights.send_videos && !defaultBannedRights.send_videos, true, animated); + checkBoxCell.setIcon(defaultBannedRights.send_videos ? R.drawable.permission_locked : 0); + } else if (position == sendMusicRow) { + checkBoxCell.setText(LocaleController.getString("SendMediaPermissionMusic", R.string.SendMediaPermissionMusic), "", !bannedRights.send_audios && !defaultBannedRights.send_audios, true, animated); + checkBoxCell.setIcon(defaultBannedRights.send_audios ? R.drawable.permission_locked : 0); + } else if (position == sendFilesRow) { + checkBoxCell.setText(LocaleController.getString("SendMediaPermissionFiles", R.string.SendMediaPermissionFiles), "", !bannedRights.send_docs && !defaultBannedRights.send_docs, true, animated); + checkBoxCell.setIcon(defaultBannedRights.send_docs ? R.drawable.permission_locked : 0); + } else if (position == sendVoiceRow) { + checkBoxCell.setText(LocaleController.getString("SendMediaPermissionVoice", R.string.SendMediaPermissionVoice), "", !bannedRights.send_voices && !defaultBannedRights.send_voices, true, animated); + checkBoxCell.setIcon(defaultBannedRights.send_voices ? R.drawable.permission_locked : 0); + } else if (position == sendRoundRow) { + checkBoxCell.setText(LocaleController.getString("SendMediaPermissionRound", R.string.SendMediaPermissionRound), "", !bannedRights.send_roundvideos && !defaultBannedRights.send_roundvideos, true, animated); + checkBoxCell.setIcon(defaultBannedRights.send_roundvideos ? R.drawable.permission_locked : 0); + } + break; case VIEW_TYPE_USER_CELL: UserCell2 userCell2 = (UserCell2) holder.itemView; String status = null; @@ -1592,17 +1708,36 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { headerCell.setText(LocaleController.getString("EditAdminRank", R.string.EditAdminRank)); } break; + case VIEW_TYPE_EXPANDABLE_SWITCH: case VIEW_TYPE_SWITCH_CELL: TextCheckCell2 checkCell = (TextCheckCell2) holder.itemView; boolean asAdminValue = currentType != TYPE_ADD_BOT || asAdmin; boolean isCreator = (currentChat != null && currentChat.creator); - if (position == manageRow) { + if (position == sendMediaRow) { + int sentMediaCount = getSendMediaSelectedCount(); + checkCell.setTextAndCheck(LocaleController.getString("UserRestrictionsSendMedia", R.string.UserRestrictionsSendMedia), sentMediaCount > 0, true, true); + checkCell.setCollapseArrow(String.format(Locale.US, "%d/9", sentMediaCount), !sendMediaExpanded, () -> { + if (allDefaultMediaBanned()) { + new AlertDialog.Builder(getParentActivity()) + .setTitle(LocaleController.getString("UserRestrictionsCantModify", R.string.UserRestrictionsCantModify)) + .setMessage(LocaleController.getString("UserRestrictionsCantModifyEnabled", R.string.UserRestrictionsCantModifyEnabled)) + .setPositiveButton(LocaleController.getString("OK", R.string.OK), null) + .create() + .show(); + return; + } + boolean checked = !checkCell.isChecked(); + checkCell.setChecked(checked); + setSendMediaEnabled(checked); + }); + checkCell.setIcon(allDefaultMediaBanned() ? R.drawable.permission_locked : 0); + } else if (position == manageRow) { checkCell.setTextAndCheck(LocaleController.getString("ManageGroup", R.string.ManageGroup), asAdmin, true); checkCell.setIcon(myAdminRights.add_admins || isCreator ? 0 : R.drawable.permission_locked); } else if (position == changeInfoRow) { if (currentType == TYPE_ADMIN || currentType == TYPE_ADD_BOT) { if (isChannel) { - checkCell.setTextAndCheck(LocaleController.getString("EditAdminChangeChannelInfo", R.string.EditAdminChangeChannelInfo), asAdminValue && adminRights.change_info || !defaultBannedRights.change_info, true); + checkCell.setTextAndCheck(LocaleController.getString("EditAdminChangeChannelInfo", R.string.EditAdminChangeChannelInfo), asAdminValue && adminRights.change_info, true); } else { checkCell.setTextAndCheck(LocaleController.getString("EditAdminChangeGroupInfo", R.string.EditAdminChangeGroupInfo), asAdminValue && adminRights.change_info || !defaultBannedRights.change_info, true); } @@ -1687,37 +1822,17 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { checkCell.setIcon(defaultBannedRights.pin_messages ? R.drawable.permission_locked : 0); } } else if (position == sendMessagesRow) { - checkCell.setTextAndCheck(LocaleController.getString("UserRestrictionsSend", R.string.UserRestrictionsSend), !bannedRights.send_messages && !defaultBannedRights.send_messages, true); - checkCell.setIcon(defaultBannedRights.send_messages ? R.drawable.permission_locked : 0); - } else if (position == sendMediaRow) { - checkCell.setTextAndCheck(LocaleController.getString("UserRestrictionsSendMedia", R.string.UserRestrictionsSendMedia), !bannedRights.send_media && !defaultBannedRights.send_media, true); - checkCell.setIcon(defaultBannedRights.send_media ? R.drawable.permission_locked : 0); - } else if (position == sendStickersRow) { - checkCell.setTextAndCheck(LocaleController.getString("UserRestrictionsSendStickers2", R.string.UserRestrictionsSendStickers2), !bannedRights.send_stickers && !defaultBannedRights.send_stickers, true); - checkCell.setIcon(defaultBannedRights.send_stickers ? R.drawable.permission_locked : 0); - } else if (position == sendGamesRow) { - checkCell.setTextAndCheck(LocaleController.getString("UserRestrictionsSendGames", R.string.UserRestrictionsSendGames), !bannedRights.send_games && !defaultBannedRights.send_games, true); - checkCell.setIcon(defaultBannedRights.send_stickers ? R.drawable.permission_locked : 0); - } else if (position == sendInlineRow) { - checkCell.setTextAndCheck(LocaleController.getString("UserRestrictionsSendInline", R.string.UserRestrictionsSendInline), !bannedRights.send_inline && !defaultBannedRights.send_inline, true); - checkCell.setIcon(defaultBannedRights.send_stickers ? R.drawable.permission_locked : 0); - } else if (position == sendGifsRow) { - checkCell.setTextAndCheck(LocaleController.getString("UserRestrictionsSendGifs", R.string.UserRestrictionsSendGifs), !bannedRights.send_gifs && !defaultBannedRights.send_gifs, true); - checkCell.setIcon(defaultBannedRights.send_gifs ? R.drawable.permission_locked : 0); - } else if (position == embedLinksRow) { - checkCell.setTextAndCheck(LocaleController.getString("UserRestrictionsEmbedLinks", R.string.UserRestrictionsEmbedLinks), !bannedRights.embed_links && !defaultBannedRights.embed_links, true); - checkCell.setIcon(defaultBannedRights.embed_links ? R.drawable.permission_locked : 0); - } else if (position == sendPollsRow) { - checkCell.setTextAndCheck(LocaleController.getString("UserRestrictionsSendPolls", R.string.UserRestrictionsSendPolls), !bannedRights.send_polls && !defaultBannedRights.send_polls, true); - checkCell.setIcon(defaultBannedRights.send_polls ? R.drawable.permission_locked : 0); + checkCell.setTextAndCheck(LocaleController.getString("UserRestrictionsSend", R.string.UserRestrictionsSend), !bannedRights.send_plain && !defaultBannedRights.send_plain, true); + checkCell.setIcon(defaultBannedRights.send_plain ? R.drawable.permission_locked : 0); } if (currentType == TYPE_ADD_BOT) { // checkCell.setEnabled((asAdmin || position == manageRow) && !checkCell.hasIcon(), false); } else { - if (position == sendMediaRow || position == sendStickersRow || position == sendGamesRow || position == sendInlineRow || position == sendGifsRow || position == embedLinksRow || position == sendPollsRow) { - checkCell.setEnabled(!bannedRights.send_messages && !bannedRights.view_messages && !defaultBannedRights.send_messages && !defaultBannedRights.view_messages); - } else if (position == sendMessagesRow) { +// if (position == sendMediaRow || position == sendStickersRow || position == embedLinksRow || position == sendPollsRow) { +// checkCell.setEnabled(!bannedRights.send_messages && !bannedRights.view_messages && !defaultBannedRights.send_messages && !defaultBannedRights.view_messages); +// } else + if (position == sendMessagesRow) { checkCell.setEnabled(!bannedRights.view_messages && !defaultBannedRights.view_messages); } } @@ -1785,7 +1900,11 @@ public void onViewDetachedFromWindow(RecyclerView.ViewHolder holder) { @Override public int getItemViewType(int position) { - if (position == 0) { + if (isExpandableSendMediaRow(position)) { + return VIEW_TYPE_INNER_CHECK; + } else if (position == sendMediaRow) { + return VIEW_TYPE_EXPANDABLE_SWITCH; + } else if (position == 0) { return VIEW_TYPE_USER_CELL; } else if (position == 1 || position == rightsShadowRow || position == removeAdminShadowRow || position == untilSectionRow || position == transferOwnerShadowRow) { return VIEW_TYPE_SHADOW_CELL; @@ -1793,8 +1912,7 @@ public int getItemViewType(int position) { return VIEW_TYPE_HEADER_CELL; } else if (position == changeInfoRow || position == postMessagesRow || position == editMesagesRow || position == deleteMessagesRow || position == addAdminsRow || position == banUsersRow || position == addUsersRow || position == pinMessagesRow || - position == sendMessagesRow || position == sendMediaRow || position == sendStickersRow || position == sendGamesRow || position == sendInlineRow || - position == sendGifsRow || position == embedLinksRow || position == sendPollsRow || position == anonymousRow || position == startVoiceChatRow || position == manageRow || position == manageTopicsRow) { + position == sendMessagesRow || position == anonymousRow || position == startVoiceChatRow || position == manageRow || position == manageTopicsRow) { return VIEW_TYPE_SWITCH_CELL; } else if (position == cantEditInfoRow || position == rankInfoRow) { return VIEW_TYPE_INFO_CELL; @@ -1810,7 +1928,69 @@ public int getItemViewType(int position) { } } + private void setSendMediaEnabled(boolean enabled) { + bannedRights.send_media = !enabled; + bannedRights.send_photos = !enabled; + bannedRights.send_videos = !enabled; + bannedRights.send_stickers = !enabled; + bannedRights.send_audios = !enabled; + bannedRights.send_docs = !enabled; + bannedRights.send_voices = !enabled; + bannedRights.send_roundvideos = !enabled; + bannedRights.embed_links = !enabled; + bannedRights.send_polls = !enabled; + AndroidUtilities.updateVisibleRows(listView); + } + + private int getSendMediaSelectedCount() { + int i = 0; + if (!bannedRights.send_photos && !defaultBannedRights.send_photos) { + i++; + } + if (!bannedRights.send_videos && !defaultBannedRights.send_videos) { + i++; + } + if (!bannedRights.send_stickers && !defaultBannedRights.send_stickers) { + i++; + } + if (!bannedRights.send_audios && !defaultBannedRights.send_audios) { + i++; + } + if (!bannedRights.send_docs && !defaultBannedRights.send_docs) { + i++; + } + if (!bannedRights.send_voices && !defaultBannedRights.send_voices) { + i++; + } + if (!bannedRights.send_roundvideos && !defaultBannedRights.send_roundvideos) { + i++; + } + if (!bannedRights.embed_links && !defaultBannedRights.embed_links && !bannedRights.send_plain && !defaultBannedRights.send_plain) { + i++; + } + if (!bannedRights.send_polls && !defaultBannedRights.send_polls) { + i++; + } + return i; + } + + private boolean allDefaultMediaBanned() { + return defaultBannedRights.send_photos && defaultBannedRights.send_videos && defaultBannedRights.send_stickers + && defaultBannedRights.send_audios && defaultBannedRights.send_docs && defaultBannedRights.send_voices && + defaultBannedRights.send_roundvideos && defaultBannedRights.embed_links && defaultBannedRights.send_polls; + } + + private boolean isExpandableSendMediaRow(int position) { + if (position == sendStickersRow || position == embedLinksRow || position == sendPollsRow || + position == sendPhotosRow || position == sendVideosRow || position == sendFilesRow || + position == sendMusicRow || position == sendRoundRow || position == sendVoiceRow) { + return true; + } + return false; + } + private ValueAnimator asAdminAnimator; + private void updateAsAdmin(boolean animated) { if (addBotButton != null) { addBotButton.invalidate(); @@ -1822,7 +2002,7 @@ private void updateAsAdmin(boolean animated) { if (child instanceof TextCheckCell2) { if (!asAdmin) { if (childPosition == changeInfoRow && !defaultBannedRights.change_info || - childPosition == pinMessagesRow && !defaultBannedRights.pin_messages) { + childPosition == pinMessagesRow && !defaultBannedRights.pin_messages) { ((TextCheckCell2) child).setChecked(true); ((TextCheckCell2) child).setEnabled(false, false); } else { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ChatUsersActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ChatUsersActivity.java index 522c509842..e12288555b 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ChatUsersActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ChatUsersActivity.java @@ -19,7 +19,6 @@ import android.text.Spanned; import android.text.TextUtils; import android.text.style.ForegroundColorSpan; -import android.util.Log; import android.util.SparseIntArray; import android.view.Gravity; import android.view.View; @@ -35,6 +34,7 @@ import androidx.recyclerview.widget.RecyclerView; import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.BotWebViewVibrationEffect; import org.telegram.messenger.ChatObject; import org.telegram.messenger.FileLog; import org.telegram.messenger.LocaleController; @@ -54,6 +54,7 @@ import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.ActionBar.ThemeDescription; import org.telegram.ui.Adapters.SearchAdapterHelper; +import org.telegram.ui.Cells.CheckBoxCell; import org.telegram.ui.Cells.GraySectionCell; import org.telegram.ui.Cells.HeaderCell; import org.telegram.ui.Cells.LoadingCell; @@ -65,6 +66,7 @@ import org.telegram.ui.Cells.TextInfoPrivacyCell; import org.telegram.ui.Cells.TextSettingsCell; import org.telegram.ui.Components.BulletinFactory; +import org.telegram.ui.Components.CubicBezierInterpolator; import org.telegram.ui.Components.FlickerLoadingView; import org.telegram.ui.Components.GigagroupConvertAlert; import org.telegram.ui.Components.LayoutHelper; @@ -76,10 +78,14 @@ import java.util.ArrayList; import java.util.Collections; +import java.util.Locale; import java.util.concurrent.atomic.AtomicInteger; public class ChatUsersActivity extends BaseFragment implements NotificationCenter.NotificationCenterDelegate { + private static final int VIEW_TYPE_INNER_CHECK = 13; + private static final int VIEW_TYPE_EXPANDABLE_SWITCH = 14; + private ListAdapter listViewAdapter; private StickerEmptyView emptyView; private RecyclerListView listView; @@ -113,11 +119,17 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente private int permissionsSectionRow; private int sendMessagesRow; private int sendMediaRow; - private int sendStickersRow; - private int sendGamesRow; - private int sendInlineRow; - private int sendGifsRow; + private int sendMediaPhotosRow; + private int sendMediaVideosRow; + private int sendMediaStickerGifsRow; + private int sendMediaMusicRow; + private int sendMediaFilesRow; + private int sendMediaVoiceMessagesRow; + private int sendMediaVideoMessagesRow; + private int sendMediaEmbededLinksRow; private int sendPollsRow; + + private int sendStickersRow; private int embedLinksRow; private int changeInfoRow; private int addUsersRow; @@ -167,6 +179,8 @@ public class ChatUsersActivity extends BaseFragment implements NotificationCente private int delayResults; + private boolean sendMediaExpanded; + private ChatUsersActivityDelegate delegate; private boolean needOpenSearch; @@ -232,6 +246,21 @@ public ChatUsersActivity(Bundle args) { defaultBannedRights.invite_users = currentChat.default_banned_rights.invite_users; defaultBannedRights.manage_topics = currentChat.default_banned_rights.manage_topics; defaultBannedRights.change_info = currentChat.default_banned_rights.change_info; + defaultBannedRights.send_photos = currentChat.default_banned_rights.send_photos; + defaultBannedRights.send_videos = currentChat.default_banned_rights.send_videos; + defaultBannedRights.send_roundvideos = currentChat.default_banned_rights.send_roundvideos; + defaultBannedRights.send_audios = currentChat.default_banned_rights.send_audios; + defaultBannedRights.send_voices = currentChat.default_banned_rights.send_voices; + defaultBannedRights.send_docs = currentChat.default_banned_rights.send_docs; + defaultBannedRights.send_plain = currentChat.default_banned_rights.send_plain; + if (!defaultBannedRights.send_media && defaultBannedRights.send_docs && defaultBannedRights.send_voices && defaultBannedRights.send_audios && defaultBannedRights.send_roundvideos && defaultBannedRights.send_videos && defaultBannedRights.send_photos) { + defaultBannedRights.send_photos = false; + defaultBannedRights.send_videos = false; + defaultBannedRights.send_roundvideos = false; + defaultBannedRights.send_audios = false; + defaultBannedRights.send_voices = false; + defaultBannedRights.send_docs = false; + } } initialBannedRights = ChatObject.getBannedRightsString(defaultBannedRights); isChannel = ChatObject.isChannel(currentChat) && !currentChat.megagroup; @@ -265,9 +294,6 @@ private void updateRows() { sendMessagesRow = -1; sendMediaRow = -1; sendStickersRow = -1; - sendGamesRow = -1; - sendInlineRow = -1; - sendGifsRow = -1; sendPollsRow = -1; embedLinksRow = -1; addUsersRow = -1; @@ -288,18 +314,31 @@ private void updateRows() { loadingProgressRow = -1; loadingUserCellRow = -1; loadingHeaderRow = -1; + sendMediaPhotosRow = -1; + sendMediaVideosRow = -1; + sendMediaStickerGifsRow = -1; + sendMediaMusicRow = -1; + sendMediaFilesRow = -1; + sendMediaVoiceMessagesRow = -1; + sendMediaVideoMessagesRow = -1; + sendMediaEmbededLinksRow = -1; rowCount = 0; if (type == TYPE_KICKED) { permissionsSectionRow = rowCount++; sendMessagesRow = rowCount++; sendMediaRow = rowCount++; - sendStickersRow = rowCount++; - sendGamesRow = rowCount++; - sendInlineRow = rowCount++; - sendGifsRow = rowCount++; - sendPollsRow = rowCount++; - embedLinksRow = rowCount++; + if (sendMediaExpanded) { + sendMediaPhotosRow = rowCount++; + sendMediaVideosRow = rowCount++; + sendMediaStickerGifsRow = rowCount++; + sendMediaMusicRow = rowCount++; + sendMediaFilesRow = rowCount++; + sendMediaVoiceMessagesRow = rowCount++; + sendMediaVideoMessagesRow = rowCount++; + sendMediaEmbededLinksRow = rowCount++; + sendPollsRow = rowCount++; + } addUsersRow = rowCount++; pinMessagesRow = rowCount++; changeInfoRow = rowCount++; @@ -601,7 +640,7 @@ protected void dispatchDraw(Canvas canvas) { emptyView.showProgress(true, false); frameLayout.addView(emptyView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); - emptyView.addView(progressLayout,0); + emptyView.addView(progressLayout, 0); listView = new RecyclerListView(context) { @Override @@ -611,6 +650,14 @@ public void invalidate() { fragmentView.invalidate(); } } + + @Override + protected void dispatchDraw(Canvas canvas) { + if (permissionsSectionRow >= 0 && participantsDivider2Row >= 0) { + drawSectionBackground(canvas, permissionsSectionRow, Math.max(0, participantsDivider2Row - 1), getThemedColor(Theme.key_windowBackgroundWhite)); + } + super.dispatchDraw(canvas); + } }; listView.setLayoutManager(layoutManager = new LinearLayoutManager(context, LinearLayoutManager.VERTICAL, false) { @Override @@ -623,31 +670,6 @@ public int scrollVerticallyBy(int dy, RecyclerView.Recycler recycler, RecyclerVi }); DefaultItemAnimator itemAnimator = new DefaultItemAnimator() { - @Override - protected long getAddAnimationDelay(long removeDuration, long moveDuration, long changeDuration) { - return 0; - } - - @Override - protected long getMoveAnimationDelay() { - return 0; - } - - @Override - public long getMoveDuration() { - return 220; - } - - @Override - public long getRemoveDuration() { - return 220; - } - - @Override - public long getAddDuration() { - return 220; - } - int animationIndex = -1; @Override @@ -667,7 +689,24 @@ public void runPendingAnimations() { } super.runPendingAnimations(); } + + @Override + protected void onMoveAnimationUpdate(RecyclerView.ViewHolder holder) { + super.onMoveAnimationUpdate(holder); + listView.invalidate(); + } + + @Override + protected void onChangeAnimationUpdate(RecyclerView.ViewHolder holder) { + super.onChangeAnimationUpdate(holder); + listView.invalidate(); + } }; + itemAnimator.setDurations(320); + itemAnimator.setMoveDelay(0); + itemAnimator.setAddDelay(0); + itemAnimator.setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); + itemAnimator.setDelayAnimations(false); listView.setItemAnimator(itemAnimator); itemAnimator.setSupportsChangeAnimations(false); listView.setAnimateEmptyView(true, RecyclerListView.EMPTY_VIEW_ANIMATION_TYPE_ALPHA); @@ -675,10 +714,42 @@ public void runPendingAnimations() { listView.setVerticalScrollbarPosition(LocaleController.isRTL ? RecyclerListView.SCROLLBAR_POSITION_LEFT : RecyclerListView.SCROLLBAR_POSITION_RIGHT); frameLayout.addView(listView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); - listView.setOnItemClickListener((view, position) -> { + listView.setOnItemClickListener((view, position, x, y) -> { boolean listAdapter = listView.getAdapter() == listViewAdapter; if (listAdapter) { - if (position == addNewRow) { + if (isExpandableSendMediaRow(position)) { + CheckBoxCell checkBoxCell = (CheckBoxCell) view; + if (position == sendMediaPhotosRow) { + defaultBannedRights.send_photos = !defaultBannedRights.send_photos; + } else if (position == sendMediaVideosRow) { + defaultBannedRights.send_videos = !defaultBannedRights.send_videos; + } else if (position == sendMediaStickerGifsRow) { + defaultBannedRights.send_stickers = !defaultBannedRights.send_stickers; + } else if (position == sendMediaMusicRow) { + defaultBannedRights.send_audios = !defaultBannedRights.send_audios; + } else if (position == sendMediaFilesRow) { + defaultBannedRights.send_docs = !defaultBannedRights.send_docs; + } else if (position == sendMediaVoiceMessagesRow) { + defaultBannedRights.send_voices = !defaultBannedRights.send_voices; + } else if (position == sendMediaVideoMessagesRow) { + defaultBannedRights.send_roundvideos = !defaultBannedRights.send_roundvideos; + } else if (position == sendMediaEmbededLinksRow) { + if (defaultBannedRights.send_plain) { + View senMessagesView = layoutManager.findViewByPosition(sendMessagesRow); + if (senMessagesView != null) { + AndroidUtilities.shakeViewSpring(senMessagesView); + BotWebViewVibrationEffect.APP_ERROR.vibrate(); + return; + } + } + defaultBannedRights.embed_links = !defaultBannedRights.embed_links; + } else if (position == sendPollsRow) { + defaultBannedRights.send_polls = !defaultBannedRights.send_polls; + } + + checkBoxCell.setChecked(!checkBoxCell.isChecked(), true); + AndroidUtilities.updateVisibleRows(listView); + } else if (position == addNewRow) { if (type == TYPE_BANNED || type == TYPE_KICKED) { Bundle bundle = new Bundle(); bundle.putLong("chat_id", chatId); @@ -686,6 +757,7 @@ public void runPendingAnimations() { bundle.putInt("selectType", type == TYPE_BANNED ? ChatUsersActivity.SELECT_TYPE_BLOCK : ChatUsersActivity.SELECT_TYPE_EXCEPTION); ChatUsersActivity fragment = new ChatUsersActivity(bundle); fragment.setInfo(info); + fragment.setBannedRights(defaultBannedRights); fragment.setDelegate(new ChatUsersActivityDelegate() { @Override @@ -784,79 +856,37 @@ public void didSelectUser(long uid) { fragment.setDelegate(new GroupCreateActivity.ContactsAddActivityDelegate() { @Override public void didSelectUsers(ArrayList users, int fwdCount) { - final int count = users.size(); - final int[] processed = new int[1]; - final ArrayList userRestrictedPrivacy = new ArrayList<>(); - processed[0] = 0; - final Runnable showUserRestrictedPrivacyAlert = () -> { - CharSequence title, description; - if (userRestrictedPrivacy.size() == 1) { - if (count > 1) { - title = LocaleController.getString("InviteToGroupErrorTitleAUser", R.string.InviteToGroupErrorTitleAUser); + if (fragment.getParentActivity() == null) { + return; + } + getMessagesController().addUsersToChat(currentChat, ChatUsersActivity.this, users, fwdCount, user -> { + ChatUsersActivity.DiffCallback savedState = saveState(); + ArrayList array = contactsMap != null && contactsMap.size() != 0 ? contacts : participants; + LongSparseArray map = contactsMap != null && contactsMap.size() != 0 ? contactsMap : participantsMap; + if (map.get(user.id) == null) { + if (ChatObject.isChannel(currentChat)) { + TLRPC.TL_channelParticipant channelParticipant1 = new TLRPC.TL_channelParticipant(); + channelParticipant1.inviter_id = getUserConfig().getClientUserId(); + channelParticipant1.peer = new TLRPC.TL_peerUser(); + channelParticipant1.peer.user_id = user.id; + channelParticipant1.date = getConnectionsManager().getCurrentTime(); + array.add(0, channelParticipant1); + map.put(user.id, channelParticipant1); } else { - title = LocaleController.getString("InviteToGroupErrorTitleThisUser", R.string.InviteToGroupErrorTitleThisUser); + TLRPC.ChatParticipant participant = new TLRPC.TL_chatParticipant(); + participant.user_id = user.id; + participant.inviter_id = getUserConfig().getClientUserId(); + array.add(0, participant); + map.put(user.id, participant); } - description = AndroidUtilities.replaceTags(LocaleController.formatString("InviteToGroupErrorMessageSingle", R.string.InviteToGroupErrorMessageSingle, UserObject.getFirstName(userRestrictedPrivacy.get(0)))); - } else if (userRestrictedPrivacy.size() == 2) { - title = LocaleController.getString("InviteToGroupErrorTitleSomeUsers", R.string.InviteToGroupErrorTitleSomeUsers); - description = AndroidUtilities.replaceTags(LocaleController.formatString("InviteToGroupErrorMessageDouble", R.string.InviteToGroupErrorMessageDouble, UserObject.getFirstName(userRestrictedPrivacy.get(0)), UserObject.getFirstName(userRestrictedPrivacy.get(1)))); - } else if (userRestrictedPrivacy.size() == count) { - title = LocaleController.getString("InviteToGroupErrorTitleTheseUsers", R.string.InviteToGroupErrorTitleTheseUsers); - description = LocaleController.getString("InviteToGroupErrorMessageMultipleAll", R.string.InviteToGroupErrorMessageMultipleAll); - } else { - title = LocaleController.getString("InviteToGroupErrorTitleSomeUsers", R.string.InviteToGroupErrorTitleSomeUsers); - description = LocaleController.getString("InviteToGroupErrorMessageMultipleSome", R.string.InviteToGroupErrorMessageMultipleSome); } - new AlertDialog.Builder(context) - .setTitle(title) - .setMessage(description) - .setPositiveButton(LocaleController.getString("OK", R.string.OK), null) - .show(); - }; - for (int a = 0; a < count; a++) { - final TLRPC.User user = users.get(a); - getMessagesController().addUserToChat(chatId, user, fwdCount, null, ChatUsersActivity.this, false, () -> { - processed[0]++; - if (processed[0] >= count && userRestrictedPrivacy.size() > 0) { - showUserRestrictedPrivacyAlert.run(); - } - DiffCallback savedState = saveState(); - ArrayList array = contactsMap != null && contactsMap.size() != 0 ? contacts : participants; - LongSparseArray map = contactsMap != null && contactsMap.size() != 0 ? contactsMap : participantsMap; - if (map.get(user.id) == null) { - if (ChatObject.isChannel(currentChat)) { - TLRPC.TL_channelParticipant channelParticipant1 = new TLRPC.TL_channelParticipant(); - channelParticipant1.inviter_id = getUserConfig().getClientUserId(); - channelParticipant1.peer = new TLRPC.TL_peerUser(); - channelParticipant1.peer.user_id = user.id; - channelParticipant1.date = getConnectionsManager().getCurrentTime(); - array.add(0, channelParticipant1); - map.put(user.id, channelParticipant1); - } else { - TLRPC.ChatParticipant participant = new TLRPC.TL_chatParticipant(); - participant.user_id = user.id; - participant.inviter_id = getUserConfig().getClientUserId(); - array.add(0, participant); - map.put(user.id, participant); - } - } - if (array == participants) { - sortAdmins(participants); - } - updateListAnimated(savedState); - }, err -> { - processed[0]++; - boolean privacyRestricted; - if (privacyRestricted = err != null && "USER_PRIVACY_RESTRICTED".equals(err.text)) { - userRestrictedPrivacy.add(user); - } - if (processed[0] >= count && userRestrictedPrivacy.size() > 0) { - showUserRestrictedPrivacyAlert.run(); - } - return !privacyRestricted; - }); - getMessagesController().putUser(user, false); - } + if (array == participants) { + sortAdmins(participants); + } + updateListAnimated(savedState); + }, user -> { + + }, null); } @Override @@ -989,6 +1019,14 @@ protected void onCancel() { } return; } + if (position == sendMediaRow) { + //defaultBannedRights.send_media = !defaultBannedRights.send_media; + DiffCallback diffCallback = saveState(); + sendMediaExpanded = !sendMediaExpanded; + AndroidUtilities.updateVisibleRows(listView); + updateListAnimated(diffCallback); + return; + } checkCell.setChecked(!checkCell.isChecked()); if (position == changeInfoRow) { defaultBannedRights.change_info = !defaultBannedRights.change_info; @@ -999,90 +1037,26 @@ protected void onCancel() { } else if (position == pinMessagesRow) { defaultBannedRights.pin_messages = !defaultBannedRights.pin_messages; } else { - boolean disabled = !checkCell.isChecked(); if (position == sendMessagesRow) { - defaultBannedRights.send_messages = !defaultBannedRights.send_messages; + defaultBannedRights.send_plain = !defaultBannedRights.send_plain; + if (sendMediaEmbededLinksRow >= 0) { + listViewAdapter.notifyItemChanged(sendMediaEmbededLinksRow); + } + if (sendMediaRow >= 0) { + listViewAdapter.notifyItemChanged(sendMediaRow); + } } else if (position == sendMediaRow) { - defaultBannedRights.send_media = !defaultBannedRights.send_media; + DiffCallback diffCallback = saveState(); + sendMediaExpanded = !sendMediaExpanded; + AndroidUtilities.updateVisibleRows(listView); + updateListAnimated(diffCallback); } else if (position == sendStickersRow) { - defaultBannedRights.send_stickers = !defaultBannedRights.send_stickers; - } else if (position == sendGamesRow) { - defaultBannedRights.send_games = !defaultBannedRights.send_games; - } else if (position == sendInlineRow) { - defaultBannedRights.send_inline = !defaultBannedRights.send_inline; - } else if (position == sendGifsRow) { - defaultBannedRights.send_gifs = !defaultBannedRights.send_gifs; + defaultBannedRights.send_stickers = defaultBannedRights.send_games = defaultBannedRights.send_gifs = defaultBannedRights.send_inline = !defaultBannedRights.send_stickers; } else if (position == embedLinksRow) { defaultBannedRights.embed_links = !defaultBannedRights.embed_links; } else if (position == sendPollsRow) { defaultBannedRights.send_polls = !defaultBannedRights.send_polls; } - if (disabled) { - if (defaultBannedRights.view_messages && !defaultBannedRights.send_messages) { - defaultBannedRights.send_messages = true; - RecyclerListView.ViewHolder holder = listView.findViewHolderForAdapterPosition(sendMessagesRow); - if (holder != null) { - ((TextCheckCell2) holder.itemView).setChecked(false); - } - } - if ((defaultBannedRights.view_messages || defaultBannedRights.send_messages) && !defaultBannedRights.send_media) { - defaultBannedRights.send_media = true; - RecyclerListView.ViewHolder holder = listView.findViewHolderForAdapterPosition(sendMediaRow); - if (holder != null) { - ((TextCheckCell2) holder.itemView).setChecked(false); - } - } - if ((defaultBannedRights.view_messages || defaultBannedRights.send_messages) && !defaultBannedRights.send_polls) { - defaultBannedRights.send_polls = true; - RecyclerListView.ViewHolder holder = listView.findViewHolderForAdapterPosition(sendPollsRow); - if (holder != null) { - ((TextCheckCell2) holder.itemView).setChecked(false); - } - } - if ((defaultBannedRights.view_messages || defaultBannedRights.send_messages) && !defaultBannedRights.send_stickers) { - defaultBannedRights.send_stickers = true; - RecyclerListView.ViewHolder holder = listView.findViewHolderForAdapterPosition(sendStickersRow); - if (holder != null) { - ((TextCheckCell2) holder.itemView).setChecked(false); - } - } - if ((defaultBannedRights.view_messages || defaultBannedRights.send_messages) && !defaultBannedRights.send_games) { - defaultBannedRights.send_games = true; - RecyclerListView.ViewHolder holder = listView.findViewHolderForAdapterPosition(sendGamesRow); - if (holder != null) { - ((TextCheckCell2) holder.itemView).setChecked(false); - } - } - if ((defaultBannedRights.view_messages || defaultBannedRights.send_messages) && !defaultBannedRights.send_inline) { - defaultBannedRights.send_inline = true; - RecyclerListView.ViewHolder holder = listView.findViewHolderForAdapterPosition(sendInlineRow); - if (holder != null) { - ((TextCheckCell2) holder.itemView).setChecked(false); - } - } - if ((defaultBannedRights.view_messages || defaultBannedRights.send_messages) && !defaultBannedRights.send_gifs) { - defaultBannedRights.send_gifs = true; - RecyclerListView.ViewHolder holder = listView.findViewHolderForAdapterPosition(sendGifsRow); - if (holder != null) { - ((TextCheckCell2) holder.itemView).setChecked(false); - } - } - if ((defaultBannedRights.view_messages || defaultBannedRights.send_messages) && !defaultBannedRights.embed_links) { - defaultBannedRights.embed_links = true; - RecyclerListView.ViewHolder holder = listView.findViewHolderForAdapterPosition(embedLinksRow); - if (holder != null) { - ((TextCheckCell2) holder.itemView).setChecked(false); - } - } - } else { - if ((!defaultBannedRights.embed_links || !defaultBannedRights.send_inline || !defaultBannedRights.send_media || !defaultBannedRights.send_polls) && defaultBannedRights.send_messages) { - defaultBannedRights.send_messages = false; - RecyclerListView.ViewHolder holder = listView.findViewHolderForAdapterPosition(sendMessagesRow); - if (holder != null) { - ((TextCheckCell2) holder.itemView).setChecked(true); - } - } - } } return; } @@ -1109,8 +1083,8 @@ protected void onCancel() { if (adminRights == null) { adminRights = new TLRPC.TL_chatAdminRights(); adminRights.change_info = adminRights.post_messages = adminRights.edit_messages = - adminRights.delete_messages = adminRights.ban_users = adminRights.invite_users = - adminRights.manage_topics = adminRights.pin_messages = adminRights.add_admins = true; + adminRights.delete_messages = adminRights.ban_users = adminRights.invite_users = + adminRights.manage_topics = adminRights.pin_messages = adminRights.add_admins = true; if (!isChannel) { adminRights.manage_call = true; } @@ -1123,8 +1097,8 @@ protected void onCancel() { if (participant instanceof TLRPC.TL_chatParticipantCreator) { adminRights = new TLRPC.TL_chatAdminRights(); adminRights.change_info = adminRights.post_messages = adminRights.edit_messages = - adminRights.delete_messages = adminRights.ban_users = adminRights.invite_users = - adminRights.manage_topics = adminRights.pin_messages = adminRights.add_admins = true; + adminRights.delete_messages = adminRights.ban_users = adminRights.invite_users = + adminRights.manage_topics = adminRights.pin_messages = adminRights.add_admins = true; if (!isChannel) { adminRights.manage_call = true; } @@ -1203,7 +1177,14 @@ protected void onCancel() { bannedRights.view_messages = true; bannedRights.send_stickers = true; bannedRights.send_media = true; + bannedRights.send_photos = true; + bannedRights.send_videos = true; + bannedRights.send_roundvideos = true; + bannedRights.send_audios = true; + bannedRights.send_voices = true; + bannedRights.send_docs = true; bannedRights.embed_links = true; + bannedRights.send_plain = true; bannedRights.send_messages = true; bannedRights.send_games = true; bannedRights.send_inline = true; @@ -1214,7 +1195,7 @@ protected void onCancel() { bannedRights.manage_topics = true; bannedRights.change_info = true; } - ChatRightsEditActivity fragment = new ChatRightsEditActivity(peerId, chatId, adminRights, defaultBannedRights, bannedRights, rank, type == TYPE_ADMIN ? ChatRightsEditActivity.TYPE_ADMIN : ChatRightsEditActivity.TYPE_BANNED, canEdit, participant == null, null, participant); + ChatRightsEditActivity fragment = new ChatRightsEditActivity(peerId, chatId, adminRights, defaultBannedRights, bannedRights, rank, type == TYPE_ADMIN ? ChatRightsEditActivity.TYPE_ADMIN : ChatRightsEditActivity.TYPE_BANNED, canEdit, participant == null, null); fragment.setDelegate(new ChatRightsEditActivity.ChatRightsEditActivityDelegate() { @Override public void didSetRights(int rights, TLRPC.TL_chatAdminRights rightsAdmin, TLRPC.TL_chatBannedRights rightsBanned, String rank) { @@ -1270,6 +1251,12 @@ public void onScrolled(RecyclerView recyclerView, int dx, int dy) { return fragmentView; } + private void setBannedRights(TLRPC.TL_chatBannedRights defaultBannedRights) { + if (defaultBannedRights != null) { + this.defaultBannedRights = defaultBannedRights; + } + } + private void sortAdmins(ArrayList participants) { Collections.sort(participants, (lhs, rhs) -> { int type1 = getChannelAdminParticipantType(lhs); @@ -1395,8 +1382,8 @@ private void onOwnerChaged(TLRPC.User user) { admin.date = (int) (System.currentTimeMillis() / 1000); admin.admin_rights = new TLRPC.TL_chatAdminRights(); admin.admin_rights.change_info = admin.admin_rights.post_messages = admin.admin_rights.edit_messages = - admin.admin_rights.delete_messages = admin.admin_rights.ban_users = admin.admin_rights.invite_users = - admin.admin_rights.manage_topics = admin.admin_rights.pin_messages = admin.admin_rights.add_admins = true; + admin.admin_rights.delete_messages = admin.admin_rights.ban_users = admin.admin_rights.invite_users = + admin.admin_rights.manage_topics = admin.admin_rights.pin_messages = admin.admin_rights.add_admins = true; if (!isChannel) { admin.admin_rights.manage_call = true; } @@ -1439,7 +1426,7 @@ private void onOwnerChaged(TLRPC.User user) { private void openRightsEdit2(long peerId, int date, TLObject participant, TLRPC.TL_chatAdminRights adminRights, TLRPC.TL_chatBannedRights bannedRights, String rank, boolean canEditAdmin, int type, boolean removeFragment) { boolean[] needShowBulletin = new boolean[1]; final boolean isAdmin = participant instanceof TLRPC.TL_channelParticipantAdmin || participant instanceof TLRPC.TL_chatParticipantAdmin; - ChatRightsEditActivity fragment = new ChatRightsEditActivity(peerId, chatId, adminRights, defaultBannedRights, bannedRights, rank, type, true, false, null, participant) { + ChatRightsEditActivity fragment = new ChatRightsEditActivity(peerId, chatId, adminRights, defaultBannedRights, bannedRights, rank, type, true, false, null) { @Override public void onTransitionAnimationEnd(boolean isOpen, boolean backward) { if (!isOpen && backward && needShowBulletin[0] && BulletinFactory.canShowBulletin(ChatUsersActivity.this)) { @@ -1530,7 +1517,7 @@ public boolean canBeginSlide() { } private void openRightsEdit(long user_id, TLObject participant, TLRPC.TL_chatAdminRights adminRights, TLRPC.TL_chatBannedRights bannedRights, String rank, boolean canEditAdmin, int type, boolean removeFragment) { - ChatRightsEditActivity fragment = new ChatRightsEditActivity(user_id, chatId, adminRights, defaultBannedRights, bannedRights, rank, type, canEditAdmin, participant == null, null, participant); + ChatRightsEditActivity fragment = new ChatRightsEditActivity(user_id, chatId, adminRights, defaultBannedRights, bannedRights, rank, type, canEditAdmin, participant == null, null); fragment.setDelegate(new ChatRightsEditActivity.ChatRightsEditActivityDelegate() { @Override public void didSetRights(int rights, TLRPC.TL_chatAdminRights rightsAdmin, TLRPC.TL_chatBannedRights rightsBanned, String rank) { @@ -1774,7 +1761,7 @@ private boolean createMenuForParticipant(final TLObject participant, boolean res AlertDialog alertDialog = builder.create(); showDialog(alertDialog); if (hasRemove) { - alertDialog.setItemColor(items.size() - 1, Theme.getColor(Theme.key_dialogTextRed2), Theme.getColor(Theme.key_dialogRedIcon)); + alertDialog.setItemColor(items.size() - 1, Theme.getColor(Theme.key_dialogTextRed), Theme.getColor(Theme.key_dialogRedIcon)); } } else { CharSequence[] items; @@ -1827,7 +1814,7 @@ private boolean createMenuForParticipant(final TLObject participant, boolean res builder.setItems(items, icons, (dialogInterface, i) -> { if (type == TYPE_ADMIN) { if (i == 0 && items.length == 2) { - ChatRightsEditActivity fragment = new ChatRightsEditActivity(peerId, chatId, adminRights, null, null, rank, ChatRightsEditActivity.TYPE_ADMIN, true, false, null, participant); + ChatRightsEditActivity fragment = new ChatRightsEditActivity(peerId, chatId, adminRights, null, null, rank, ChatRightsEditActivity.TYPE_ADMIN, true, false, null); fragment.setDelegate(new ChatRightsEditActivity.ChatRightsEditActivityDelegate() { @Override public void didSetRights(int rights, TLRPC.TL_chatAdminRights rightsAdmin, TLRPC.TL_chatBannedRights rightsBanned, String rank) { @@ -1847,13 +1834,13 @@ public void didChangeOwner(TLRPC.User user) { }); presentFragment(fragment); } else { - getMessagesController().setUserAdminRole(chatId, getMessagesController().getUser(peerId), new TLRPC.TL_chatAdminRights(), "", !isChannel, ChatUsersActivity.this, false, false, null,null); + getMessagesController().setUserAdminRole(chatId, getMessagesController().getUser(peerId), new TLRPC.TL_chatAdminRights(), "", !isChannel, ChatUsersActivity.this, false, false, null, null); removeParticipants(peerId); } } else if (type == TYPE_BANNED || type == TYPE_KICKED) { if (i == 0) { if (type == TYPE_KICKED) { - ChatRightsEditActivity fragment = new ChatRightsEditActivity(peerId, chatId, null, defaultBannedRights, bannedRights, rank, ChatRightsEditActivity.TYPE_BANNED, true, false, null, participant); + ChatRightsEditActivity fragment = new ChatRightsEditActivity(peerId, chatId, null, defaultBannedRights, bannedRights, rank, ChatRightsEditActivity.TYPE_BANNED, true, false, null); fragment.setDelegate(new ChatRightsEditActivity.ChatRightsEditActivityDelegate() { @Override public void didSetRights(int rights, TLRPC.TL_chatAdminRights rightsAdmin, TLRPC.TL_chatBannedRights rightsBanned, String rank) { @@ -1917,7 +1904,7 @@ public void didChangeOwner(TLRPC.User user) { AlertDialog alertDialog = builder.create(); showDialog(alertDialog); if (type == TYPE_ADMIN) { - alertDialog.setItemColor(items.length - 1, Theme.getColor(Theme.key_dialogTextRed2), Theme.getColor(Theme.key_dialogRedIcon)); + alertDialog.setItemColor(items.length - 1, Theme.getColor(Theme.key_dialogTextRed), Theme.getColor(Theme.key_dialogRedIcon)); } } return true; @@ -2024,17 +2011,54 @@ private String formatUserPermissions(TLRPC.TL_chatBannedRights rights) { if (rights.view_messages && defaultBannedRights.view_messages != rights.view_messages) { builder.append(LocaleController.getString("UserRestrictionsNoRead", R.string.UserRestrictionsNoRead)); } - if (rights.send_messages && defaultBannedRights.send_messages != rights.send_messages) { + if (rights.send_messages && defaultBannedRights.send_plain != rights.send_plain) { if (builder.length() != 0) { builder.append(", "); } - builder.append(LocaleController.getString("UserRestrictionsNoSend", R.string.UserRestrictionsNoSend)); + builder.append(LocaleController.getString("UserRestrictionsNoSendText", R.string.UserRestrictionsNoSendText)); } if (rights.send_media && defaultBannedRights.send_media != rights.send_media) { if (builder.length() != 0) { builder.append(", "); } builder.append(LocaleController.getString("UserRestrictionsNoSendMedia", R.string.UserRestrictionsNoSendMedia)); + } else { + if (rights.send_photos && defaultBannedRights.send_photos != rights.send_photos) { + if (builder.length() != 0) { + builder.append(", "); + } + builder.append(LocaleController.getString("UserRestrictionsNoSendPhotos", R.string.UserRestrictionsNoSendPhotos)); + } + if (rights.send_videos && defaultBannedRights.send_videos != rights.send_videos) { + if (builder.length() != 0) { + builder.append(", "); + } + builder.append(LocaleController.getString("UserRestrictionsNoSendVideos", R.string.UserRestrictionsNoSendVideos)); + } + if (rights.send_audios && defaultBannedRights.send_audios != rights.send_audios) { + if (builder.length() != 0) { + builder.append(", "); + } + builder.append(LocaleController.getString("UserRestrictionsNoSendMusic", R.string.UserRestrictionsNoSendMusic)); + } + if (rights.send_docs && defaultBannedRights.send_docs != rights.send_docs) { + if (builder.length() != 0) { + builder.append(", "); + } + builder.append(LocaleController.getString("UserRestrictionsNoSendDocs", R.string.UserRestrictionsNoSendDocs)); + } + if (rights.send_voices && defaultBannedRights.send_voices != rights.send_voices) { + if (builder.length() != 0) { + builder.append(", "); + } + builder.append(LocaleController.getString("UserRestrictionsNoSendVoice", R.string.UserRestrictionsNoSendVoice)); + } + if (rights.send_roundvideos && defaultBannedRights.send_roundvideos != rights.send_roundvideos) { + if (builder.length() != 0) { + builder.append(", "); + } + builder.append(LocaleController.getString("UserRestrictionsNoSendRound", R.string.UserRestrictionsNoSendRound)); + } } if (rights.send_stickers && defaultBannedRights.send_stickers != rights.send_stickers) { if (builder.length() != 0) { @@ -2048,7 +2072,7 @@ private String formatUserPermissions(TLRPC.TL_chatBannedRights rights) { } builder.append(LocaleController.getString("UserRestrictionsNoSendPolls", R.string.UserRestrictionsNoSendPolls)); } - if (rights.embed_links && defaultBannedRights.embed_links != rights.embed_links) { + if (rights.embed_links && !rights.send_plain && defaultBannedRights.embed_links != rights.embed_links) { if (builder.length() != 0) { builder.append(", "); } @@ -2126,7 +2150,7 @@ private int getChannelAdminParticipantType(TLObject participant) { return 0; } else if (participant instanceof TLRPC.TL_channelParticipantAdmin || participant instanceof TLRPC.TL_channelParticipant) { return 1; - } else { + } else { return 2; } } @@ -2976,7 +3000,7 @@ public ListAdapter(Context context) { @Override public boolean isEnabled(RecyclerView.ViewHolder holder) { int viewType = holder.getItemViewType(); - if (viewType == 7) { + if (viewType == 7 || viewType == VIEW_TYPE_EXPANDABLE_SWITCH) { return ChatObject.canBlockUsers(currentChat); } else if (viewType == 0) { ManageChatUserCell cell = (ManageChatUserCell) holder.itemView; @@ -3000,6 +3024,9 @@ public boolean isEnabled(RecyclerView.ViewHolder holder) { return ChatObject.canUserDoAdminAction(currentChat, ChatObject.ACTION_BLOCK_USERS); } } + if (viewType == VIEW_TYPE_INNER_CHECK) { + return true; + } return false; } @@ -3054,6 +3081,7 @@ public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType view = new TextSettingsCell(mContext); view.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); break; + case VIEW_TYPE_EXPANDABLE_SWITCH: case 7: view = new TextCheckCell2(mContext); view.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); @@ -3087,23 +3115,31 @@ public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType view = chooseView; chooseView.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); chooseView.setOptions( - selectedSlowmode, - LocaleController.getString("SlowmodeOff", R.string.SlowmodeOff), - LocaleController.formatString("SlowmodeSeconds", R.string.SlowmodeSeconds, 10), - LocaleController.formatString("SlowmodeSeconds", R.string.SlowmodeSeconds, 30), - LocaleController.formatString("SlowmodeMinutes", R.string.SlowmodeMinutes, 1), - LocaleController.formatString("SlowmodeMinutes", R.string.SlowmodeMinutes, 5), - LocaleController.formatString("SlowmodeMinutes", R.string.SlowmodeMinutes, 15), - LocaleController.formatString("SlowmodeHours", R.string.SlowmodeHours, 1) + selectedSlowmode, + LocaleController.getString("SlowmodeOff", R.string.SlowmodeOff), + LocaleController.formatString("SlowmodeSeconds", R.string.SlowmodeSeconds, 10), + LocaleController.formatString("SlowmodeSeconds", R.string.SlowmodeSeconds, 30), + LocaleController.formatString("SlowmodeMinutes", R.string.SlowmodeMinutes, 1), + LocaleController.formatString("SlowmodeMinutes", R.string.SlowmodeMinutes, 5), + LocaleController.formatString("SlowmodeMinutes", R.string.SlowmodeMinutes, 15), + LocaleController.formatString("SlowmodeHours", R.string.SlowmodeHours, 1) ); chooseView.setCallback(which -> { if (info == null) { return; } selectedSlowmode = which; -// listViewAdapter.notifyItemChanged(slowmodeInfoRow); + listViewAdapter.notifyItemChanged(slowmodeInfoRow); }); break; + case VIEW_TYPE_INNER_CHECK: + CheckBoxCell checkBoxCell = new CheckBoxCell(mContext, 4, 21, getResourceProvider()); + checkBoxCell.getCheckBoxRound().setDrawBackgroundAsArc(14); + checkBoxCell.getCheckBoxRound().setColor(Theme.key_switch2TrackChecked, Theme.key_radioBackground, Theme.key_checkboxCheck); + checkBoxCell.setEnabled(true); + view = checkBoxCell; + view.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); + break; } return new RecyclerListView.Holder(view); } @@ -3315,38 +3351,39 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { TextSettingsCell settingsCell = (TextSettingsCell) holder.itemView; settingsCell.setTextAndValue(LocaleController.getString("ChannelBlacklist", R.string.ChannelBlacklist), String.format("%d", info != null ? info.kicked_count : 0), false); break; + case VIEW_TYPE_EXPANDABLE_SWITCH: case 7: TextCheckCell2 checkCell = (TextCheckCell2) holder.itemView; + boolean animated = checkCell.getTag() != null && (Integer) checkCell.getTag() == position; + checkCell.setTag(position); if (position == changeInfoRow) { - checkCell.setTextAndCheck(LocaleController.getString("UserRestrictionsChangeInfo", R.string.UserRestrictionsChangeInfo), !defaultBannedRights.change_info && !ChatObject.isPublic(currentChat), manageTopicsRow != -1); + checkCell.setTextAndCheck(LocaleController.getString("UserRestrictionsChangeInfo", R.string.UserRestrictionsChangeInfo), !defaultBannedRights.change_info && !ChatObject.isPublic(currentChat), manageTopicsRow != -1, animated); } else if (position == addUsersRow) { - checkCell.setTextAndCheck(LocaleController.getString("UserRestrictionsInviteUsers", R.string.UserRestrictionsInviteUsers), !defaultBannedRights.invite_users, true); + checkCell.setTextAndCheck(LocaleController.getString("UserRestrictionsInviteUsers", R.string.UserRestrictionsInviteUsers), !defaultBannedRights.invite_users, true, animated); } else if (position == pinMessagesRow) { - checkCell.setTextAndCheck(LocaleController.getString("UserRestrictionsPinMessages", R.string.UserRestrictionsPinMessages), !defaultBannedRights.pin_messages && !ChatObject.isPublic(currentChat), true); + checkCell.setTextAndCheck(LocaleController.getString("UserRestrictionsPinMessages", R.string.UserRestrictionsPinMessages), !defaultBannedRights.pin_messages && !ChatObject.isPublic(currentChat), true, animated); } else if (position == sendMessagesRow) { - checkCell.setTextAndCheck(LocaleController.getString("UserRestrictionsSend", R.string.UserRestrictionsSend), !defaultBannedRights.send_messages, true); + checkCell.setTextAndCheck(LocaleController.getString("UserRestrictionsSendText", R.string.UserRestrictionsSendText), !defaultBannedRights.send_plain, true, animated); } else if (position == sendMediaRow) { - checkCell.setTextAndCheck(LocaleController.getString("UserRestrictionsSendMedia", R.string.UserRestrictionsSendMedia), !defaultBannedRights.send_media, true); + int sentMediaCount = getSendMediaSelectedCount(); + checkCell.setTextAndCheck(LocaleController.getString("UserRestrictionsSendMedia", R.string.UserRestrictionsSendMedia), sentMediaCount > 0, true, animated); + checkCell.setCollapseArrow(String.format(Locale.US, "%d/9", sentMediaCount), !sendMediaExpanded, new Runnable() { + @Override + public void run() { + boolean checked = !checkCell.isChecked(); + checkCell.setChecked(checked); + setSendMediaEnabled(checked); + + } + }); } else if (position == sendStickersRow) { - checkCell.setTextAndCheck(LocaleController.getString("UserRestrictionsSendStickers2", R.string.UserRestrictionsSendStickers2), !defaultBannedRights.send_stickers, true); - } else if (position == sendGamesRow) { - checkCell.setTextAndCheck(LocaleController.getString("UserRestrictionsSendGames", R.string.UserRestrictionsSendGames), !defaultBannedRights.send_games, true); - } else if (position == sendInlineRow) { - checkCell.setTextAndCheck(LocaleController.getString("UserRestrictionsSendInline", R.string.UserRestrictionsSendInline), !defaultBannedRights.send_inline, true); - } else if (position == sendGifsRow) { - checkCell.setTextAndCheck(LocaleController.getString("UserRestrictionsSendGifs", R.string.UserRestrictionsSendGifs), !defaultBannedRights.send_gifs, true); + checkCell.setTextAndCheck(LocaleController.getString("UserRestrictionsSendStickers", R.string.UserRestrictionsSendStickers), !defaultBannedRights.send_stickers, true, animated); } else if (position == embedLinksRow) { - checkCell.setTextAndCheck(LocaleController.getString("UserRestrictionsEmbedLinks", R.string.UserRestrictionsEmbedLinks), !defaultBannedRights.embed_links, true); + checkCell.setTextAndCheck(LocaleController.getString("UserRestrictionsEmbedLinks", R.string.UserRestrictionsEmbedLinks), !defaultBannedRights.embed_links, true, animated); } else if (position == sendPollsRow) { - checkCell.setTextAndCheck(LocaleController.getString("UserRestrictionsSendPolls", R.string.UserRestrictionsSendPolls), !defaultBannedRights.send_polls, true); + checkCell.setTextAndCheck(LocaleController.getString("UserRestrictionsSendPollsShort", R.string.UserRestrictionsSendPollsShort), !defaultBannedRights.send_polls, true); } else if (position == manageTopicsRow) { - checkCell.setTextAndCheck(LocaleController.getString("CreateTopicsPermission", R.string.CreateTopicsPermission), !defaultBannedRights.manage_topics, false); - } - - if (position == sendMediaRow || position == sendStickersRow || position == sendGamesRow || position == sendInlineRow || position == sendGifsRow || position == embedLinksRow || position == sendPollsRow) { - checkCell.setEnabled(!defaultBannedRights.send_messages && !defaultBannedRights.view_messages); - } else if (position == sendMessagesRow) { - checkCell.setEnabled(!defaultBannedRights.view_messages); + checkCell.setTextAndCheck(LocaleController.getString("CreateTopicsPermission", R.string.CreateTopicsPermission), !defaultBannedRights.manage_topics, false, animated); } if (ChatObject.canBlockUsers(currentChat)) { if (position == addUsersRow && !ChatObject.canUserDoAdminAction(currentChat, ChatObject.ACTION_INVITE) || @@ -3400,6 +3437,32 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { textCell.setTextAndCheck(LocaleController.getString("ChannelHideMembers", R.string.ChannelHideMembers), info != null && info.participants_hidden, false); } break; + case VIEW_TYPE_INNER_CHECK: + CheckBoxCell checkBoxCell = (CheckBoxCell) holder.itemView; + animated = checkBoxCell.getTag() != null && (Integer) checkBoxCell.getTag() == position; + checkBoxCell.setTag(position); + if (position == sendMediaPhotosRow) { + checkBoxCell.setText(LocaleController.getString("SendMediaPermissionPhotos", R.string.SendMediaPermissionPhotos), "", !defaultBannedRights.send_photos, true, animated); + } else if (position == sendMediaVideosRow) { + checkBoxCell.setText(LocaleController.getString("SendMediaPermissionVideos", R.string.SendMediaPermissionVideos), "", !defaultBannedRights.send_videos, true, animated); + } else if (position == sendMediaStickerGifsRow) { + checkBoxCell.setText(LocaleController.getString("SendMediaPermissionStickersGifs", R.string.SendMediaPermissionStickersGifs), "", !defaultBannedRights.send_stickers, true, animated); + } else if (position == sendMediaMusicRow) { + checkBoxCell.setText(LocaleController.getString("SendMediaPermissionMusic", R.string.SendMediaPermissionMusic), "", !defaultBannedRights.send_audios, true, animated); + } else if (position == sendMediaFilesRow) { + checkBoxCell.setText(LocaleController.getString("SendMediaPermissionFiles", R.string.SendMediaPermissionFiles), "", !defaultBannedRights.send_docs, true, animated); + } else if (position == sendMediaVoiceMessagesRow) { + checkBoxCell.setText(LocaleController.getString("SendMediaPermissionVoice", R.string.SendMediaPermissionVoice), "", !defaultBannedRights.send_voices, true, animated); + } else if (position == sendMediaVideoMessagesRow) { + checkBoxCell.setText(LocaleController.getString("SendMediaPermissionRound", R.string.SendMediaPermissionRound), "", !defaultBannedRights.send_roundvideos, true, animated); + } else if (position == sendMediaEmbededLinksRow) { + checkBoxCell.setText(LocaleController.getString("SendMediaEmbededLinks", R.string.SendMediaEmbededLinks), "", !defaultBannedRights.embed_links && !defaultBannedRights.send_plain, false, animated); + }else if (position == sendPollsRow) { + checkBoxCell.setText(LocaleController.getString("SendMediaPolls", R.string.SendMediaPolls), "", !defaultBannedRights.send_polls, false, animated); + } + // checkBoxCell.setText(getCheckBoxTitle(item.headerName, percents[item.index < 0 ? 8 : item.index], item.index < 0), AndroidUtilities.formatFileSize(item.size), selected, item.index < 0 ? !collapsed : !item.last); + checkBoxCell.setPad(1); + break; } } @@ -3429,8 +3492,7 @@ public int getItemViewType(int position) { } else if (position == removedUsersRow) { return 6; } else if (position == changeInfoRow || position == addUsersRow || position == pinMessagesRow || position == sendMessagesRow || - position == sendMediaRow || position == sendStickersRow || position == sendGamesRow || position == sendInlineRow || - position == sendGifsRow || position == embedLinksRow || position == sendPollsRow || position == manageTopicsRow) { + position == sendStickersRow || position == embedLinksRow || position == manageTopicsRow) { return 7; } else if (position == membersHeaderRow || position == contactsHeaderRow || position == botHeaderRow || position == loadingHeaderRow) { return 8; @@ -3442,6 +3504,10 @@ public int getItemViewType(int position) { return 11; } else if (position == antiSpamRow || position == hideMembersRow) { return 12; + } else if (isExpandableSendMediaRow(position)) { + return VIEW_TYPE_INNER_CHECK; + } else if (position == sendMediaRow) { + return VIEW_TYPE_EXPANDABLE_SWITCH; } return 0; } @@ -3458,6 +3524,26 @@ public TLObject getItem(int position) { } } + private void setSendMediaEnabled(boolean enabled) { + defaultBannedRights.send_media = !enabled; + defaultBannedRights.send_photos = !enabled; + defaultBannedRights.send_videos = !enabled; + defaultBannedRights.send_stickers = !enabled; + defaultBannedRights.send_audios = !enabled; + defaultBannedRights.send_docs = !enabled; + defaultBannedRights.send_voices = !enabled; + defaultBannedRights.send_roundvideos = !enabled; + defaultBannedRights.embed_links = !enabled; + defaultBannedRights.send_polls = !enabled; + AndroidUtilities.updateVisibleRows(listView); + } + + private boolean isExpandableSendMediaRow(int position) { + return position == sendMediaPhotosRow || position == sendMediaVideosRow || position == sendMediaStickerGifsRow || + position == sendMediaMusicRow || position == sendMediaFilesRow || position == sendMediaVoiceMessagesRow || + position == sendMediaVideoMessagesRow || position == sendMediaEmbededLinksRow || position == sendPollsRow; + } + public DiffCallback saveState() { DiffCallback diffCallback = new DiffCallback(); diffCallback.oldRowCount = rowCount; @@ -3601,6 +3687,42 @@ private void put(int id, int position, SparseIntArray sparseIntArray) { } } + private int getSendMediaSelectedCount() { + return getSendMediaSelectedCount(defaultBannedRights); + } + + public static int getSendMediaSelectedCount(TLRPC.TL_chatBannedRights bannedRights) { + int i = 0; + if (!bannedRights.send_photos) { + i++; + } + if (!bannedRights.send_videos) { + i++; + } + if (!bannedRights.send_stickers) { + i++; + } + if (!bannedRights.send_audios) { + i++; + } + if (!bannedRights.send_docs) { + i++; + } + if (!bannedRights.send_voices) { + i++; + } + if (!bannedRights.send_roundvideos) { + i++; + } + if (!bannedRights.embed_links && !bannedRights.send_plain) { + i++; + } + if (!bannedRights.send_polls) { + i++; + } + return i; + } + @Override public ArrayList getThemeDescriptions() { ArrayList themeDescriptions = new ArrayList<>(); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ChatsWidgetConfigActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ChatsWidgetConfigActivity.java index 5698a6d740..bc87d74f8e 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ChatsWidgetConfigActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ChatsWidgetConfigActivity.java @@ -23,7 +23,7 @@ protected boolean handleIntent(Intent intent, boolean isNew, boolean restore, bo if (creatingAppWidgetId != AppWidgetManager.INVALID_APPWIDGET_ID) { Bundle args = new Bundle(); args.putBoolean("onlySelect", true); - args.putInt("dialogsType", 10); + args.putInt("dialogsType", DialogsActivity.DIALOGS_TYPE_WIDGET); args.putBoolean("allowSwitchAccount", true); EditWidgetActivity fragment = new EditWidgetActivity(EditWidgetActivity.TYPE_CHATS, creatingAppWidgetId); fragment.setDelegate(dialogs -> { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ChooseSpeedLayout.java b/TMessagesProj/src/main/java/org/telegram/ui/ChooseSpeedLayout.java index a58e10b54f..de2350010e 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ChooseSpeedLayout.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ChooseSpeedLayout.java @@ -1,17 +1,34 @@ package org.telegram.ui; import android.content.Context; +import android.view.Gravity; +import android.view.View; +import android.widget.FrameLayout; +import android.widget.LinearLayout; +import androidx.core.graphics.ColorUtils; +import androidx.core.math.MathUtils; + +import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.LocaleController; +import org.telegram.messenger.MediaController; import org.telegram.messenger.R; import org.telegram.ui.ActionBar.ActionBarMenuItem; +import org.telegram.ui.ActionBar.ActionBarMenuSlider; import org.telegram.ui.ActionBar.ActionBarMenuSubItem; import org.telegram.ui.ActionBar.ActionBarPopupWindow; +import org.telegram.ui.ActionBar.Theme; +import org.telegram.ui.Components.LayoutHelper; import org.telegram.ui.Components.PopupSwipeBackLayout; +import org.telegram.ui.Components.SpeedIconDrawable; public class ChooseSpeedLayout { ActionBarPopupWindow.ActionBarPopupWindowLayout speedSwipeBackLayout; + ActionBarMenuSlider.SpeedSlider slider; + + private static final float MIN_SPEED = 0.2f; + private static final float MAX_SPEED = 2.5f; ActionBarMenuSubItem[] speedItems = new ActionBarMenuSubItem[5]; public ChooseSpeedLayout(Context context, PopupSwipeBackLayout swipeBackLayout, Callback callback) { @@ -25,62 +42,111 @@ public ChooseSpeedLayout(Context context, PopupSwipeBackLayout swipeBackLayout, backItem.setColors(0xfffafafa, 0xfffafafa); backItem.setSelectorColor(0x0fffffff); + FrameLayout gap = new FrameLayout(context) { + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + } + }; + gap.setMinimumWidth(AndroidUtilities.dp(196)); + gap.setBackgroundColor(0xff181818); + speedSwipeBackLayout.addView(gap); + LinearLayout.LayoutParams layoutParams = (LinearLayout.LayoutParams) gap.getLayoutParams(); + if (LocaleController.isRTL) { + layoutParams.gravity = Gravity.RIGHT; + } + layoutParams.width = LayoutHelper.MATCH_PARENT; + layoutParams.height = AndroidUtilities.dp(8); + gap.setLayoutParams(layoutParams); + + slider = new ActionBarMenuSlider.SpeedSlider(context, null); + slider.setMinimumWidth(AndroidUtilities.dp(196)); + slider.setDrawShadow(false); + slider.setBackgroundColor(0xff222222); + slider.setTextColor(0xffffffff); + slider.setOnValueChange((value, isFinal) -> { + final float speed = MIN_SPEED + (MAX_SPEED - MIN_SPEED) * value; + callback.onSpeedSelected(speed, isFinal, false); + }); + speedSwipeBackLayout.addView(slider, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, 44)); + + gap = new FrameLayout(context) { + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + } + }; + gap.setMinimumWidth(AndroidUtilities.dp(196)); + gap.setBackgroundColor(0xff181818); + speedSwipeBackLayout.addView(gap); + layoutParams = (LinearLayout.LayoutParams) gap.getLayoutParams(); + if (LocaleController.isRTL) { + layoutParams.gravity = Gravity.RIGHT; + } + layoutParams.width = LayoutHelper.MATCH_PARENT; + layoutParams.height = AndroidUtilities.dp(8); + gap.setLayoutParams(layoutParams); + ActionBarMenuSubItem item = ActionBarMenuItem.addItem(speedSwipeBackLayout, R.drawable.msg_speed_0_2, LocaleController.getString("SpeedVerySlow", R.string.SpeedVerySlow), false, null); item.setColors(0xfffafafa, 0xfffafafa); item.setOnClickListener((view) -> { - callback.onSpeedSelected(0.25f); + callback.onSpeedSelected(0.2f, true, true); }); item.setSelectorColor(0x0fffffff); speedItems[0] = item; - item = ActionBarMenuItem.addItem(speedSwipeBackLayout, R.drawable.msg_speed_0_5, LocaleController.getString("SpeedSlow", R.string.SpeedSlow), false, null); + item = ActionBarMenuItem.addItem(speedSwipeBackLayout, R.drawable.msg_speed_slow, LocaleController.getString("SpeedSlow", R.string.SpeedSlow), false, null); item.setColors(0xfffafafa, 0xfffafafa); item.setOnClickListener((view) -> { - callback.onSpeedSelected(0.5f); + callback.onSpeedSelected(0.5f, true, true); }); item.setSelectorColor(0x0fffffff); speedItems[1] = item; - item = ActionBarMenuItem.addItem(speedSwipeBackLayout, R.drawable.msg_speed_1, LocaleController.getString("SpeedNormal", R.string.SpeedNormal), false, null); + item = ActionBarMenuItem.addItem(speedSwipeBackLayout, R.drawable.msg_speed_normal, LocaleController.getString("SpeedNormal", R.string.SpeedNormal), false, null); item.setColors(0xfffafafa, 0xfffafafa); item.setOnClickListener((view) -> { - callback.onSpeedSelected(1f); + callback.onSpeedSelected(1f, true, true); }); item.setSelectorColor(0x0fffffff); speedItems[2] = item; - item = ActionBarMenuItem.addItem(speedSwipeBackLayout, R.drawable.msg_speed_1_5, LocaleController.getString("SpeedFast", R.string.SpeedFast), false, null); + item = ActionBarMenuItem.addItem(speedSwipeBackLayout, R.drawable.msg_speed_fast, LocaleController.getString("SpeedFast", R.string.SpeedFast), false, null); item.setColors(0xfffafafa, 0xfffafafa); item.setOnClickListener((view) -> { - callback.onSpeedSelected(1.5f); + callback.onSpeedSelected(1.5f, true, true); }); item.setSelectorColor(0x0fffffff); speedItems[3] = item; - item = ActionBarMenuItem.addItem(speedSwipeBackLayout, R.drawable.msg_speed_2, LocaleController.getString("SpeedVeryFast", R.string.SpeedVeryFast), false, null); + item = ActionBarMenuItem.addItem(speedSwipeBackLayout, R.drawable.msg_speed_superfast, LocaleController.getString("SpeedVeryFast", R.string.SpeedVeryFast), false, null); item.setColors(0xfffafafa, 0xfffafafa); item.setOnClickListener((view) -> { - callback.onSpeedSelected(2f); + callback.onSpeedSelected(2f, true, true); }); item.setSelectorColor(0x0fffffff); speedItems[4] = item; } - public void update(float currentVideoSpeed) { + public void update(float currentVideoSpeed, boolean isFinal) { for (int a = 0; a < speedItems.length; a++) { - if (a == 0 && Math.abs(currentVideoSpeed - 0.25f) < 0.001f || - a == 1 && Math.abs(currentVideoSpeed - 0.5f) < 0.001f || - a == 2 && Math.abs(currentVideoSpeed - 1.0f) < 0.001f || - a == 3 && Math.abs(currentVideoSpeed - 1.5f) < 0.001f || - a == 4 && Math.abs(currentVideoSpeed - 2.0f) < 0.001f) { + if (isFinal && ( + a == 0 && Math.abs(currentVideoSpeed - 0.2f) < 0.01f || + a == 1 && Math.abs(currentVideoSpeed - 0.5f) < 0.1f || + a == 2 && Math.abs(currentVideoSpeed - 1.0f) < 0.1f || + a == 3 && Math.abs(currentVideoSpeed - 1.5f) < 0.1f || + a == 4 && Math.abs(currentVideoSpeed - 2.0f) < 0.1f + )) { speedItems[a].setColors(0xff6BB6F9, 0xff6BB6F9); } else { speedItems[a].setColors(0xfffafafa, 0xfffafafa); } } + + slider.setSpeed(currentVideoSpeed, true); } public interface Callback { - void onSpeedSelected(float speed); + void onSpeedSelected(float speed, boolean isFinal, boolean closeMenu); } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/CodeFieldContainer.java b/TMessagesProj/src/main/java/org/telegram/ui/CodeFieldContainer.java index 2353ad3ac4..3271e21bd2 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/CodeFieldContainer.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/CodeFieldContainer.java @@ -109,6 +109,11 @@ protected boolean drawChild(Canvas canvas, View child, long drawingTime) { public void setNumbersCount(int length, int currentType) { if (codeField == null || codeField.length != length) { + if (codeField != null) { + for (CodeNumberField f : codeField) { + removeView(f); + } + } codeField = new CodeNumberField[length]; for (int a = 0; a < length; a++) { final int num = a; @@ -119,6 +124,9 @@ public boolean dispatchKeyEvent(KeyEvent event) { return false; } int keyCode = event.getKeyCode(); + if (num >= codeField.length) { + return false; + } if (event.getAction() == KeyEvent.ACTION_UP) { if (keyCode == KeyEvent.KEYCODE_DEL && codeField[num].length() == 1) { codeField[num].startExitAnimation(); @@ -215,14 +223,16 @@ public void afterTextChanged(Editable s) { s.replace(0, len, text.substring(a, a + 1)); } else { n++; - codeField[num + a].setText(text.substring(a, a + 1)); + if (num + a < codeField.length) { + codeField[num + a].setText(text.substring(a, a + 1)); + } } } ignoreOnTextChange = false; } - if (n != length - 1) { + if (n + 1 >= 0 && n + 1 < codeField.length) { codeField[n + 1].setSelection(codeField[n + 1].length()); codeField[n + 1].requestFocus(); } @@ -271,6 +281,9 @@ public void setText(String code) { } public void setText(String code, boolean fromPaste) { + if (codeField == null) { + return; + } int startFrom = 0; if (fromPaste) { for (int i = 0; i < codeField.length; i++) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/AlertsCreator.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/AlertsCreator.java index d00ba25819..2a274c443d 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/AlertsCreator.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/AlertsCreator.java @@ -178,6 +178,7 @@ public static Dialog createBackgroundActivityDialog(Context ctx) { } }) .setNegativeButton(LocaleController.getString("ContactsPermissionAlertNotNow", R.string.ContactsPermissionAlertNotNow), null) + .setOnDismissListener(dialog -> SharedConfig.BackgroundActivityPrefs.increaseDismissedCount()) .create(); } @@ -298,7 +299,7 @@ public static Dialog processError(int currentAccount, TLRPC.TL_error error, Base request instanceof TLRPC.TL_phone_inviteToGroupCall) { if (fragment != null && error.text.equals("CHANNELS_TOO_MUCH")) { if (fragment.getParentActivity() != null) { - fragment.showDialog(new LimitReachedBottomSheet(fragment, fragment.getParentActivity(), LimitReachedBottomSheet.TYPE_TO_MANY_COMMUNITIES, currentAccount)); + fragment.showDialog(new LimitReachedBottomSheet(fragment, fragment.getParentActivity(), LimitReachedBottomSheet.TYPE_TO0_MANY_COMMUNITIES, currentAccount)); } else { if (request instanceof TLRPC.TL_channels_joinChannel || request instanceof TLRPC.TL_channels_inviteToChannel) { fragment.presentFragment(new TooManyCommunitiesActivity(TooManyCommunitiesActivity.TYPE_JOIN)); @@ -317,7 +318,7 @@ public static Dialog processError(int currentAccount, TLRPC.TL_error error, Base } else if (request instanceof TLRPC.TL_messages_createChat) { if (error.text.equals("CHANNELS_TOO_MUCH")) { if (fragment.getParentActivity() != null) { - fragment.showDialog(new LimitReachedBottomSheet(fragment, fragment.getParentActivity(), LimitReachedBottomSheet.TYPE_TO_MANY_COMMUNITIES, currentAccount)); + fragment.showDialog(new LimitReachedBottomSheet(fragment, fragment.getParentActivity(), LimitReachedBottomSheet.TYPE_TO0_MANY_COMMUNITIES, currentAccount)); } else { fragment.presentFragment(new TooManyCommunitiesActivity(TooManyCommunitiesActivity.TYPE_CREATE)); } @@ -330,7 +331,7 @@ public static Dialog processError(int currentAccount, TLRPC.TL_error error, Base } else if (request instanceof TLRPC.TL_channels_createChannel) { if (error.text.equals("CHANNELS_TOO_MUCH")) { if (fragment.getParentActivity() != null) { - fragment.showDialog(new LimitReachedBottomSheet(fragment, fragment.getParentActivity(), LimitReachedBottomSheet.TYPE_TO_MANY_COMMUNITIES, currentAccount)); + fragment.showDialog(new LimitReachedBottomSheet(fragment, fragment.getParentActivity(), LimitReachedBottomSheet.TYPE_TO0_MANY_COMMUNITIES, currentAccount)); } else { fragment.presentFragment(new TooManyCommunitiesActivity(TooManyCommunitiesActivity.TYPE_CREATE)); } @@ -354,16 +355,69 @@ public static Dialog processError(int currentAccount, TLRPC.TL_error error, Base request instanceof TLRPC.TL_messages_forwardMessages || request instanceof TLRPC.TL_messages_sendMultiMedia || request instanceof TLRPC.TL_messages_sendScheduledMessages) { - switch (error.text) { - case "PEER_FLOOD": - NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.needShowAlert, 0); - break; - case "USER_BANNED_IN_CHANNEL": - NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.needShowAlert, 5); - break; - case "SCHEDULE_TOO_MUCH": - showSimpleToast(fragment, LocaleController.getString("MessageScheduledLimitReached", R.string.MessageScheduledLimitReached)); - break; + long dialogId = 0; + if (request instanceof TLRPC.TL_messages_sendMessage) { + dialogId = DialogObject.getPeerDialogId(((TLRPC.TL_messages_sendMessage) request).peer); + } else if (request instanceof TLRPC.TL_messages_sendMedia) { + dialogId = DialogObject.getPeerDialogId(((TLRPC.TL_messages_sendMedia) request).peer); + } else if (request instanceof TLRPC.TL_messages_sendInlineBotResult) { + dialogId = DialogObject.getPeerDialogId(((TLRPC.TL_messages_sendInlineBotResult) request).peer); + } else if (request instanceof TLRPC.TL_messages_forwardMessages) { + dialogId = DialogObject.getPeerDialogId(((TLRPC.TL_messages_forwardMessages) request).to_peer); + } else if (request instanceof TLRPC.TL_messages_sendMultiMedia) { + dialogId = DialogObject.getPeerDialogId(((TLRPC.TL_messages_sendMultiMedia) request).peer); + } else if (request instanceof TLRPC.TL_messages_sendScheduledMessages) { + dialogId = DialogObject.getPeerDialogId(((TLRPC.TL_messages_sendScheduledMessages) request).peer); + } + if (error.text != null && error.text.startsWith("CHAT_SEND_") && error.text.endsWith("FORBIDDEN")) { + String errorText = error.text; + TLRPC.Chat chat = dialogId < 0 ? MessagesController.getInstance(currentAccount).getChat(-dialogId) : null; + switch (error.text) { + case "CHAT_SEND_PLAIN_FORBIDDEN": + errorText = ChatObject.getRestrictedErrorText(chat, ChatObject.ACTION_SEND_PLAIN); + break; + case "CHAT_SEND_PHOTOS_FORBIDDEN": + errorText = ChatObject.getRestrictedErrorText(chat, ChatObject.ACTION_SEND_PHOTO); + break; + case "CHAT_SEND_VIDEOS_FORBIDDEN": + errorText = ChatObject.getRestrictedErrorText(chat, ChatObject.ACTION_SEND_VIDEO); + break; + case "CHAT_SEND_ROUNDVIDEOS_FORBIDDEN": + errorText = ChatObject.getRestrictedErrorText(chat, ChatObject.ACTION_SEND_ROUND); + break; + case "CHAT_SEND_DOCS_FORBIDDEN": + errorText = ChatObject.getRestrictedErrorText(chat, ChatObject.ACTION_SEND_DOCUMENTS); + break; + case "CHAT_SEND_VOICES_FORBIDDEN": + errorText = ChatObject.getRestrictedErrorText(chat, ChatObject.ACTION_SEND_VOICE); + break; + case "CHAT_SEND_AUDIOS_FORBIDDEN": + errorText = ChatObject.getRestrictedErrorText(chat, ChatObject.ACTION_SEND_MUSIC); + break; + case "CHAT_SEND_STICKERS_FORBIDDEN": + errorText = ChatObject.getRestrictedErrorText(chat, ChatObject.ACTION_SEND_STICKERS); + break; + case "CHAT_SEND_GIFS_FORBIDDEN": + errorText = ChatObject.getRestrictedErrorText(chat, ChatObject.ACTION_SEND_GIFS); + break; + case "CHAT_SEND_POLL_FORBIDDEN": + errorText = ChatObject.getRestrictedErrorText(chat, ChatObject.ACTION_SEND_POLLS); + break; + + } + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.showBulletin, Bulletin.TYPE_ERROR, errorText); + } else { + switch (error.text) { + case "PEER_FLOOD": + NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.needShowAlert, 0); + break; + case "USER_BANNED_IN_CHANNEL": + NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.needShowAlert, 5); + break; + case "SCHEDULE_TOO_MUCH": + showSimpleToast(fragment, LocaleController.getString("MessageScheduledLimitReached", R.string.MessageScheduledLimitReached)); + break; + } } } else if (request instanceof TLRPC.TL_messages_importChatInvite) { if (error.text.startsWith("FLOOD_WAIT")) { @@ -372,7 +426,7 @@ public static Dialog processError(int currentAccount, TLRPC.TL_error error, Base showSimpleAlert(fragment, LocaleController.getString("JoinToGroupErrorFull", R.string.JoinToGroupErrorFull)); } else if (error.text.equals("CHANNELS_TOO_MUCH")) { if (fragment.getParentActivity() != null) { - fragment.showDialog(new LimitReachedBottomSheet(fragment, fragment.getParentActivity(), LimitReachedBottomSheet.TYPE_TO_MANY_COMMUNITIES, currentAccount)); + fragment.showDialog(new LimitReachedBottomSheet(fragment, fragment.getParentActivity(), LimitReachedBottomSheet.TYPE_TO0_MANY_COMMUNITIES, currentAccount)); } else { fragment.presentFragment(new TooManyCommunitiesActivity(TooManyCommunitiesActivity.TYPE_JOIN)); } @@ -755,7 +809,6 @@ public static void showBlockReportSpamReplyAlert(ChatActivity fragment, MessageO cells[num].setChecked(!cells[num].isChecked(), true); }); - builder.setCustomViewOffset(12); builder.setView(linearLayout); builder.setPositiveButton(LocaleController.getString("BlockAndDeleteReplies", R.string.BlockAndDeleteReplies), (dialogInterface, i) -> { @@ -772,7 +825,10 @@ public static void showBlockReportSpamReplyAlert(ChatActivity fragment, MessageO request.report_spam = true; if (fragment.getParentActivity() != null) { if (fragment instanceof ChatActivity) { - fragment.getUndoView().showWithAction(0, UndoView.ACTION_REPORT_SENT, null); + UndoView undoView = fragment.getUndoView(); + if (undoView != null) { + undoView.showWithAction(0, UndoView.ACTION_REPORT_SENT, null); + } } else if (fragment != null) { BulletinFactory.of(fragment).createReportSent(resourcesProvider).show(); } else { @@ -791,7 +847,7 @@ public static void showBlockReportSpamReplyAlert(ChatActivity fragment, MessageO fragment.showDialog(dialog); TextView button = (TextView) dialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } @@ -832,7 +888,6 @@ public static void showBlockReportSpamAlert(BaseFragment fragment, long dialog_i cells[num].setChecked(!cells[num].isChecked(), true); }); } - builder.setCustomViewOffset(12); builder.setView(linearLayout); } else { cells = null; @@ -881,7 +936,7 @@ public static void showBlockReportSpamAlert(BaseFragment fragment, long dialog_i fragment.showDialog(dialog); TextView button = (TextView) dialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } @@ -1131,7 +1186,7 @@ public static void showOpenUrlAlert(BaseFragment fragment, String url, boolean p // String host = IDN.toASCII(uri.getHost(), IDN.ALLOW_UNASSIGNED); // urlFinal = uri.getScheme() + "://" + host + uri.getPath(); // } catch (Exception e) { -// FileLog.e(e); +// FileLog.e(e, false); // urlFinal = url; // } // } else { @@ -1402,6 +1457,7 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { } } }; + builder.setCustomViewOffset(6); builder.setView(frameLayout); AvatarDrawable avatarDrawable = new AvatarDrawable(); @@ -1648,7 +1704,7 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { fragment.showDialog(alertDialog); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } @@ -1759,7 +1815,7 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { fragment.showDialog(alertDialog); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } @@ -2220,6 +2276,36 @@ public static ActionBarPopupWindow createSimplePopup(BaseFragment fragment, View return popupWindow; } + public static void checkRestrictedInviteUsers(int currentAccount, TLRPC.Chat currentChat, TLRPC.Updates updates) { + if (updates == null || updates.updates == null || currentChat == null) { + return; + } + ArrayList arrayList = null; + for (int i = 0; i < updates.updates.size(); i++) { + if (updates.updates.get(i) instanceof TLRPC.TL_updateGroupInvitePrivacyForbidden) { + TLRPC.TL_updateGroupInvitePrivacyForbidden restrictedUpdate = (TLRPC.TL_updateGroupInvitePrivacyForbidden) updates.updates.get(i); + TLRPC.User user = MessagesController.getInstance(currentAccount).getUser(restrictedUpdate.user_id); + if (user != null) { + if (arrayList == null) { + arrayList = new ArrayList<>(); + } + arrayList.add(user); + } + } + } + if (arrayList != null) { + ArrayList finalArrayList = arrayList; + AndroidUtilities.runOnUIThread(() -> { + BaseFragment lastFragment = LaunchActivity.getLastFragment(); + if (lastFragment != null && lastFragment.getParentActivity() != null) { + LimitReachedBottomSheet restricterdUsersBottomSheet = new LimitReachedBottomSheet(lastFragment, lastFragment.getParentActivity(), LimitReachedBottomSheet.TYPE_ADD_MEMBERS_RESTRICTED, currentAccount); + restricterdUsersBottomSheet.setRestrictedUsers(currentChat, finalArrayList); + restricterdUsersBottomSheet.show(); + } + }, 200); + } + } + public interface BlockDialogCallback { void run(boolean report, boolean delete); } @@ -2278,7 +2364,7 @@ public static void createBlockDialogAlert(BaseFragment fragment, int count, bool fragment.showDialog(alertDialog); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } @@ -3077,10 +3163,11 @@ public CharSequence getAccessibilityClassName() { } }); final NumberPicker.OnValueChangeListener onValueChangeListener = (picker, oldVal, newVal) -> { - if (!NekoConfig.disableVibration.Bool()) { - try { - container.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); - } catch (Exception ignore) {} + try { + if (!NekoConfig.disableVibration.Bool()) + container.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + } catch (Exception ignore) { + } checkScheduleDate(null, null, 0, dayPicker, hourPicker, minutePicker); }; @@ -3272,15 +3359,16 @@ public CharSequence getAccessibilityClassName() { buttonTextView.setText(LocaleController.getString("DisableAutoDeleteTimer", R.string.DisableAutoDeleteTimer)); final NumberPicker.OnValueChangeListener onValueChangeListener = (picker, oldVal, newVal) -> { - if (!NekoConfig.disableVibration.Bool()) { - try { + try { if (newVal == 0) { buttonTextView.setText(LocaleController.getString("DisableAutoDeleteTimer", R.string.DisableAutoDeleteTimer)); } else { buttonTextView.setText(LocaleController.getString("SetAutoDeleteTimer", R.string.SetAutoDeleteTimer)); } + if (!NekoConfig.disableVibration.Bool()) container.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); - } catch (Exception ignore) {} + } catch (Exception ignore) { + } }; numberPicker.setOnValueChangedListener(onValueChangeListener); @@ -3416,10 +3504,11 @@ public CharSequence getAccessibilityClassName() { container.addView(buttonTextView, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, 48, Gravity.LEFT | Gravity.BOTTOM, 16, 15, 16, 16)); final NumberPicker.OnValueChangeListener onValueChangeListener = (picker, oldVal, newVal) -> { - if (!NekoConfig.disableVibration.Bool()) { - try { + try { + if (!NekoConfig.disableVibration.Bool()) container.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); - } catch (Exception ignore) {} + } catch (Exception ignore) { + } }; times.setOnValueChangedListener(onValueChangeListener); @@ -3568,10 +3657,11 @@ public CharSequence getAccessibilityClassName() { linearLayout.addView(numberPicker, LayoutHelper.createLinear(0, 54 * 5, 1f)); final NumberPicker.OnValueChangeListener onValueChangeListener = (picker, oldVal, newVal) -> { - if (!NekoConfig.disableVibration.Bool()) { - try { + try { + if (!NekoConfig.disableVibration.Bool()) container.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); - } catch (Exception ignore) {} + } catch (Exception ignore) { + } }; numberPicker.setOnValueChangedListener(onValueChangeListener); @@ -4043,7 +4133,10 @@ protected void onSend(int type, String message) { TLRPC.InputPeer peer = MessagesController.getInstance(UserConfig.selectedAccount).getInputPeer(dialog_id); sendReport(peer, type, message, ids); if (parentFragment instanceof ChatActivity) { - ((ChatActivity) parentFragment).getUndoView().showWithAction(0, UndoView.ACTION_REPORT_SENT, null); + UndoView undoView = ((ChatActivity) parentFragment).getUndoView(); + if (undoView != null) { + undoView.showWithAction(0, UndoView.ACTION_REPORT_SENT, null); + } } } }); @@ -4093,7 +4186,10 @@ protected void onSend(int type, String message) { } ConnectionsManager.getInstance(UserConfig.selectedAccount).sendRequest(req, (response, error) -> AlertUtil.showToast(error)); if (parentFragment instanceof ChatActivity) { - ((ChatActivity) parentFragment).getUndoView().showWithAction(0, UndoView.ACTION_REPORT_SENT, null); + UndoView undoView = ((ChatActivity) parentFragment).getUndoView(); + if (undoView != null) { + undoView.showWithAction(0, UndoView.ACTION_REPORT_SENT, null); + } } else { BulletinFactory.of(parentFragment).createReportSent(resourcesProvider).show(); @@ -4141,7 +4237,7 @@ public static void showSendMediaAlert(int result, final BaseFragment fragment) { } public static void showSendMediaAlert(int result, final BaseFragment fragment, Theme.ResourcesProvider resourcesProvider) { - if (result == 0) { + if (result == 0 || fragment == null || fragment.getParentActivity() == null) { return; } AlertDialog.Builder builder = new AlertDialog.Builder(fragment.getParentActivity(), resourcesProvider); @@ -4162,6 +4258,30 @@ public static void showSendMediaAlert(int result, final BaseFragment fragment, T builder.setMessage(LocaleController.getString("ErrorSendRestrictedPrivacyVoiceMessages", R.string.ErrorSendRestrictedPrivacyVoiceMessages)); } else if (result == 8) { builder.setMessage(LocaleController.getString("ErrorSendRestrictedPrivacyVideoMessages", R.string.ErrorSendRestrictedPrivacyVideoMessages)); + } else if (result == 9) { + builder.setMessage(LocaleController.getString("ErrorSendRestrictedPrivacyVideo", R.string.ErrorSendRestrictedVideoAll)); + } else if (result == 10) { + builder.setMessage(LocaleController.getString("ErrorSendRestrictedPrivacyPhoto", R.string.ErrorSendRestrictedPhotoAll)); + } else if (result == 11) { + builder.setMessage(LocaleController.getString("ErrorSendRestrictedVideo", R.string.ErrorSendRestrictedVideo)); + } else if (result == 12) { + builder.setMessage(LocaleController.getString("ErrorSendRestrictedPhoto", R.string.ErrorSendRestrictedPhoto)); + } else if (result == 13) { + builder.setMessage(LocaleController.getString("ErrorSendRestrictedVoiceAll", R.string.ErrorSendRestrictedVoiceAll)); + } else if (result == 14) { + builder.setMessage(LocaleController.getString("ErrorSendRestrictedVoice", R.string.ErrorSendRestrictedVoice)); + } else if (result == 15) { + builder.setMessage(LocaleController.getString("ErrorSendRestrictedRoundAll", R.string.ErrorSendRestrictedRoundAll)); + } else if (result == 16) { + builder.setMessage(LocaleController.getString("ErrorSendRestrictedRound", R.string.ErrorSendRestrictedRound)); + } else if (result == 17) { + builder.setMessage(LocaleController.getString("ErrorSendRestrictedDocumentsAll", R.string.ErrorSendRestrictedDocumentsAll)); + } else if (result == 18) { + builder.setMessage(LocaleController.getString("ErrorSendRestrictedDocuments", R.string.ErrorSendRestrictedDocuments)); + } else if (result == 19) { + builder.setMessage(LocaleController.getString("ErrorSendRestrictedMusicAll", R.string.ErrorSendRestrictedMusicAll)); + } else if (result == 20) { + builder.setMessage(LocaleController.getString("ErrorSendRestrictedMusic", R.string.ErrorSendRestrictedMusic)); } builder.setPositiveButton(LocaleController.getString("OK", R.string.OK), null); @@ -5468,7 +5588,7 @@ public static void createDeleteMessagesAlert(BaseFragment fragment, TLRPC.User u fragment.showDialog(dialog); TextView button = (TextView) dialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } @@ -5479,7 +5599,7 @@ public static void createThemeCreateDialog(BaseFragment fragment, int type, Them Context context = fragment.getParentActivity(); final EditTextBoldCursor editText = new EditTextBoldCursor(context); editText.setBackground(null); - editText.setLineColors(Theme.getColor(Theme.key_dialogInputField), Theme.getColor(Theme.key_dialogInputFieldActivated), Theme.getColor(Theme.key_dialogTextRed2)); + editText.setLineColors(Theme.getColor(Theme.key_dialogInputField), Theme.getColor(Theme.key_dialogInputFieldActivated), Theme.getColor(Theme.key_dialogTextRed)); AlertDialog.Builder builder = new AlertDialog.Builder(context); builder.setTitle(LocaleController.getString("NewTheme", R.string.NewTheme)); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/AnimatedEmojiDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/AnimatedEmojiDrawable.java index 75729d7395..c91221cbf1 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/AnimatedEmojiDrawable.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/AnimatedEmojiDrawable.java @@ -10,6 +10,9 @@ import android.graphics.drawable.Drawable; import android.os.Looper; import android.text.TextUtils; +import android.util.Log; +import android.util.LongSparseArray; +import android.util.SparseArray; import android.view.View; import android.view.animation.OvershootInterpolator; @@ -28,6 +31,7 @@ import org.telegram.messenger.ImageLoader; import org.telegram.messenger.ImageLocation; import org.telegram.messenger.ImageReceiver; +import org.telegram.messenger.LiteMode; import org.telegram.messenger.MessageObject; import org.telegram.messenger.MessagesStorage; import org.telegram.messenger.SharedConfig; @@ -38,6 +42,7 @@ import org.telegram.tgnet.TLRPC; import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.Components.Premium.PremiumLockIconView; +import org.telegram.ui.SelectAnimatedEmojiDialog; import java.io.File; import java.util.ArrayList; @@ -61,10 +66,14 @@ public class AnimatedEmojiDrawable extends Drawable { public static final int CACHE_TYPE_FORUM_TOPIC = 10; public static final int CACHE_TYPE_FORUM_TOPIC_LARGE = 11; public static final int CACHE_TYPE_RENDERING_VIDEO = 12; + public static final int CACHE_TYPE_ALERT_PREVIEW_STATIC = 13; + public static final int CACHE_TYPE_AVATAR_CONSTRUCTOR_PREVIEW = 14; + public static final int CACHE_TYPE_AVATAR_CONSTRUCTOR_PREVIEW2 = 15; public int rawDrawIndex; - private static HashMap> globalEmojiCache; + private static SparseArray> globalEmojiCache; + private static boolean LOG_MEMORY_LEAK = false; @NonNull public static AnimatedEmojiDrawable make(int account, int cacheType, long documentId) { @@ -74,12 +83,12 @@ public static AnimatedEmojiDrawable make(int account, int cacheType, long docume @NonNull public static AnimatedEmojiDrawable make(int account, int cacheType, long documentId, String absolutePath) { if (globalEmojiCache == null) { - globalEmojiCache = new HashMap<>(); + globalEmojiCache = new SparseArray<>(); } final int key = Objects.hash(account, cacheType); - HashMap cache = globalEmojiCache.get(key); + LongSparseArray cache = globalEmojiCache.get(key); if (cache == null) { - globalEmojiCache.put(key, cache = new HashMap<>()); + globalEmojiCache.put(key, cache = new LongSparseArray<>()); } AnimatedEmojiDrawable drawable = cache.get(documentId); if (drawable == null) { @@ -91,12 +100,12 @@ public static AnimatedEmojiDrawable make(int account, int cacheType, long docume @NonNull public static AnimatedEmojiDrawable make(int account, int cacheType, @NonNull TLRPC.Document document) { if (globalEmojiCache == null) { - globalEmojiCache = new HashMap<>(); + globalEmojiCache = new SparseArray<>(); } final int key = Objects.hash(account, cacheType); - HashMap cache = globalEmojiCache.get(key); + LongSparseArray cache = globalEmojiCache.get(key); if (cache == null) { - globalEmojiCache.put(key, cache = new HashMap<>()); + globalEmojiCache.put(key, cache = new LongSparseArray<>()); } AnimatedEmojiDrawable drawable = cache.get(document.id); if (drawable == null) { @@ -163,7 +172,9 @@ public void fetchDocument(long id, ReceivedDocument onDone) { if (emojiDocumentsCache != null) { TLRPC.Document cacheDocument = emojiDocumentsCache.get(id); if (cacheDocument != null) { - onDone.run(cacheDocument); + if (onDone != null) { + onDone.run(cacheDocument); + } return; } } @@ -171,19 +182,18 @@ public void fetchDocument(long id, ReceivedDocument onDone) { if (!checkThread()) { return; } - if (onDone != null) { - if (loadingDocuments == null) { - loadingDocuments = new HashMap<>(); - } - ArrayList callbacks = loadingDocuments.get(id); - if (callbacks != null) { - callbacks.add(onDone); - return; - } - callbacks = new ArrayList<>(1); + if (loadingDocuments == null) { + loadingDocuments = new HashMap<>(); + } + ArrayList callbacks = loadingDocuments.get(id); + if (callbacks != null) { callbacks.add(onDone); - loadingDocuments.put(id, callbacks); + return; } + callbacks = new ArrayList<>(1); + callbacks.add(onDone); + loadingDocuments.put(id, callbacks); + if (toFetchDocuments == null) { toFetchDocuments = new HashSet<>(); } @@ -210,8 +220,12 @@ private boolean checkThread() { } private void loadFromDatabase(ArrayList emojiToLoad) { - MessagesStorage.getInstance(currentAccount).getStorageQueue().postRunnable(() -> { - SQLiteDatabase database = MessagesStorage.getInstance(currentAccount).getDatabase(); + MessagesStorage messagesStorage = MessagesStorage.getInstance(currentAccount); + messagesStorage.getStorageQueue().postRunnable(() -> { + SQLiteDatabase database = messagesStorage.getDatabase(); + if (database == null) { + return; + } try { String idsStr = TextUtils.join(",", emojiToLoad); SQLiteCursor cursor = database.queryFinalized(String.format(Locale.US, "SELECT data FROM animated_emoji WHERE document_id IN (%s)", idsStr)); @@ -241,7 +255,7 @@ private void loadFromDatabase(ArrayList emojiToLoad) { }); cursor.dispose(); } catch (SQLiteException e) { - FileLog.e(e); + messagesStorage.checkSQLException(e); } }); } @@ -312,7 +326,10 @@ public void processDocuments(ArrayList documents) { ArrayList loadingCallbacks = loadingDocuments.remove(document.id); if (loadingCallbacks != null) { for (int j = 0; j < loadingCallbacks.size(); ++j) { - loadingCallbacks.get(j).run(document); + ReceivedDocument callback = loadingCallbacks.get(j); + if (callback != null) { + callback.run(document); + } } loadingCallbacks.clear(); } @@ -413,6 +430,8 @@ private void updateSize() { sizedp = (int) ((Math.abs(Theme.chat_msgTextPaintEmoji[2].ascent()) + Math.abs(Theme.chat_msgTextPaintEmoji[2].descent())) * 1.15f / AndroidUtilities.density); } else if (this.cacheType == STANDARD_LOTTIE_FRAME) { sizedp = (int) ((Math.abs(Theme.chat_msgTextPaintEmoji[0].ascent()) + Math.abs(Theme.chat_msgTextPaintEmoji[0].descent())) * 1.15f / AndroidUtilities.density); + } else if (cacheType == CACHE_TYPE_AVATAR_CONSTRUCTOR_PREVIEW || cacheType == CACHE_TYPE_AVATAR_CONSTRUCTOR_PREVIEW2) { + sizedp = 100; } else { sizedp = 34; } @@ -458,16 +477,18 @@ protected boolean setImageBitmapByKey(Drawable drawable, String key, int type, b imageReceiver.setUniqKeyPrefix(cacheType + "_"); } imageReceiver.setVideoThumbIsSame(true); - boolean onlyStaticPreview = SharedConfig.getDevicePerformanceClass() == SharedConfig.PERFORMANCE_CLASS_LOW && (cacheType == CACHE_TYPE_KEYBOARD || cacheType == CACHE_TYPE_ALERT_PREVIEW || cacheType == CACHE_TYPE_ALERT_PREVIEW_TAB_STRIP); - + boolean onlyStaticPreview = SharedConfig.getDevicePerformanceClass() == SharedConfig.PERFORMANCE_CLASS_LOW && cacheType == CACHE_TYPE_ALERT_PREVIEW_TAB_STRIP || cacheType == CACHE_TYPE_KEYBOARD && !LiteMode.isEnabled(LiteMode.FLAG_ANIMATED_EMOJI_KEYBOARD) || cacheType == CACHE_TYPE_ALERT_PREVIEW && !LiteMode.isEnabled(LiteMode.FLAG_ANIMATED_EMOJI_REACTIONS); + if (cacheType == CACHE_TYPE_ALERT_PREVIEW_STATIC) { + onlyStaticPreview = true; + } String filter = sizedp + "_" + sizedp; if (cacheType == CACHE_TYPE_RENDERING_VIDEO) { filter += "_d_nostream"; } - if (cacheType != STANDARD_LOTTIE_FRAME && (cacheType != CACHE_TYPE_MESSAGES_LARGE || SharedConfig.getDevicePerformanceClass() < SharedConfig.PERFORMANCE_CLASS_HIGH) && cacheType != CACHE_TYPE_RENDERING_VIDEO) { + if (cacheType != CACHE_TYPE_AVATAR_CONSTRUCTOR_PREVIEW2 && cacheType != CACHE_TYPE_AVATAR_CONSTRUCTOR_PREVIEW && cacheType != STANDARD_LOTTIE_FRAME && (cacheType != CACHE_TYPE_MESSAGES_LARGE || SharedConfig.getDevicePerformanceClass() < SharedConfig.PERFORMANCE_CLASS_HIGH) && cacheType != CACHE_TYPE_RENDERING_VIDEO) { filter += "_pcache"; } - if (cacheType != CACHE_TYPE_MESSAGES && cacheType != CACHE_TYPE_MESSAGES_LARGE) { + if (cacheType != CACHE_TYPE_MESSAGES && cacheType != CACHE_TYPE_MESSAGES_LARGE && cacheType != CACHE_TYPE_AVATAR_CONSTRUCTOR_PREVIEW && cacheType != CACHE_TYPE_AVATAR_CONSTRUCTOR_PREVIEW2) { filter += "_compress"; } if (cacheType == STANDARD_LOTTIE_FRAME) { @@ -481,8 +502,7 @@ protected boolean setImageBitmapByKey(Drawable drawable, String key, int type, b if ("video/webm".equals(document.mime_type)) { mediaLocation = ImageLocation.getForDocument(document); mediaFilter = filter + "_" + ImageLoader.AUTOPLAY_FILTER; - SvgHelper.SvgDrawable svgThumb = DocumentObject.getSvgThumb(document.thumbs, Theme.key_windowBackgroundWhiteGrayIcon, 0.2f); - thumbDrawable = svgThumb; + thumbDrawable = DocumentObject.getSvgThumb(document.thumbs, Theme.key_windowBackgroundWhiteGrayIcon, 0.2f); } else if ("application/x-tgsticker".equals(document.mime_type)) { String probableCacheKey = (cacheType != 0 ? cacheType + "_" : "") + documentId + "@" + filter; if (SharedConfig.getDevicePerformanceClass() != SharedConfig.PERFORMANCE_CLASS_LOW || (cacheType == CACHE_TYPE_KEYBOARD || !ImageLoader.getInstance().hasLottieMemCache(probableCacheKey))) { @@ -503,41 +523,35 @@ protected boolean setImageBitmapByKey(Drawable drawable, String key, int type, b mediaLocation = null; mediaFilter = filter; } - if (onlyStaticPreview) { - mediaLocation = null; - } if (absolutePath != null) { - imageReceiver.setImageBitmap(new AnimatedFileDrawable(new File(absolutePath), true, 0, null, null, null, 0, currentAccount, true, 512, 512, null)); + imageReceiver.setImageBitmap(new AnimatedFileDrawable(new File(absolutePath), true, 0, 0, null, null, null, 0, currentAccount, true, 512, 512, null)); } else if (cacheType == STANDARD_LOTTIE_FRAME) { imageReceiver.setImage(null, null, mediaLocation, mediaFilter, null, null, thumbDrawable, document.size, null, document, 1); } else { - if (SharedConfig.getLiteMode().enabled()) { + if (onlyStaticPreview || (!LiteMode.isEnabled(LiteMode.FLAG_ANIMATED_EMOJI_KEYBOARD) && cacheType != CACHE_TYPE_AVATAR_CONSTRUCTOR_PREVIEW)) { if ("video/webm".equals(document.mime_type)) { imageReceiver.setImage(null, null, ImageLocation.getForDocument(thumb, document), sizedp + "_" + sizedp, null, null, thumbDrawable, document.size, null, document, 1); + } else if (MessageObject.isAnimatedStickerDocument(document, true)){ + imageReceiver.setImage(mediaLocation, mediaFilter + "_firstframe", null, null, thumbDrawable, document.size, null, document, 1); } else { - imageReceiver.setImage(mediaLocation, mediaFilter + "_firstframe", ImageLocation.getForDocument(thumb, document), sizedp + "_" + sizedp, null, null, thumbDrawable, document.size, null, document, 1); + imageReceiver.setImage(ImageLocation.getForDocument(thumb, document), sizedp + "_" + sizedp, null, null, thumbDrawable, document.size, null, document, 1); } } else { imageReceiver.setImage(mediaLocation, mediaFilter, ImageLocation.getForDocument(thumb, document), sizedp + "_" + sizedp, null, null, thumbDrawable, document.size, null, document, 1); } } + updateAutoRepeat(imageReceiver); - if (cacheType == CACHE_TYPE_EMOJI_STATUS || cacheType == CACHE_TYPE_ALERT_EMOJI_STATUS || cacheType == CACHE_TYPE_FORUM_TOPIC) { - imageReceiver.setAutoRepeatCount(2); - } else if (cacheType == CACHE_TYPE_FORUM_TOPIC_LARGE) { - imageReceiver.setAutoRepeatCount(1); - } - - if (cacheType == CACHE_TYPE_ALERT_PREVIEW || cacheType == CACHE_TYPE_ALERT_PREVIEW_TAB_STRIP || cacheType == CACHE_TYPE_ALERT_PREVIEW_LARGE) { + if (cacheType == CACHE_TYPE_ALERT_PREVIEW_STATIC || cacheType == CACHE_TYPE_ALERT_PREVIEW || cacheType == CACHE_TYPE_ALERT_PREVIEW_TAB_STRIP || cacheType == CACHE_TYPE_ALERT_PREVIEW_LARGE) { imageReceiver.setLayerNum(7); } if (cacheType == CACHE_TYPE_ALERT_EMOJI_STATUS) { imageReceiver.setLayerNum(6656); } imageReceiver.setAspectFit(true); - if (cacheType == CACHE_TYPE_RENDERING_VIDEO || cacheType == STANDARD_LOTTIE_FRAME) { + if (cacheType == CACHE_TYPE_RENDERING_VIDEO || cacheType == STANDARD_LOTTIE_FRAME || cacheType == CACHE_TYPE_TAB_STRIP || cacheType == CACHE_TYPE_ALERT_PREVIEW_TAB_STRIP) { imageReceiver.setAllowStartAnimation(false); imageReceiver.setAllowStartLottieAnimation(false); imageReceiver.setAutoRepeat(0); @@ -556,6 +570,14 @@ protected boolean setImageBitmapByKey(Drawable drawable, String key, int type, b invalidate(); } + private void updateAutoRepeat(ImageReceiver imageReceiver) { + if (cacheType == CACHE_TYPE_EMOJI_STATUS || cacheType == CACHE_TYPE_ALERT_EMOJI_STATUS || cacheType == CACHE_TYPE_FORUM_TOPIC) { + imageReceiver.setAutoRepeatCount(2); + } else if (cacheType == CACHE_TYPE_FORUM_TOPIC_LARGE || cacheType == CACHE_TYPE_AVATAR_CONSTRUCTOR_PREVIEW || cacheType == CACHE_TYPE_TAB_STRIP || cacheType == CACHE_TYPE_ALERT_PREVIEW_TAB_STRIP) { + imageReceiver.setAutoRepeatCount(1); + } + } + void invalidate() { if (views != null) { for (int i = 0; i < views.size(); ++i) { @@ -637,6 +659,9 @@ public void draw(Canvas canvas, ImageReceiver.BackgroundThreadDrawHolder backgro } public void addView(View callback) { + if (callback instanceof SelectAnimatedEmojiDialog.EmojiListView) { + throw new RuntimeException(); + } if (views == null) { views = new ArrayList<>(10); } @@ -670,6 +695,8 @@ public void removeView(View view) { updateAttachState(); } + public static int attachedCount = 0; + public static ArrayList attachedDrawable; private void updateAttachState() { if (imageReceiver == null) { return; @@ -682,6 +709,19 @@ private void updateAttachState() { } else { imageReceiver.onDetachedFromWindow(); } + if (LOG_MEMORY_LEAK) { + if (attachedDrawable == null) { + attachedDrawable = new ArrayList<>(); + } + if (attached) { + attachedCount++; + attachedDrawable.add(this); + } else { + attachedCount--; + attachedDrawable.remove(this); + } + Log.d("animatedDrawable", "attached count " + attachedCount); + } } // if (globalEmojiCache != null && (views == null || views.size() <= 0) && (holders == null || holders.size() <= 0) && globalEmojiCache.size() > 50) { @@ -852,9 +892,11 @@ public static class SwapAnimatedEmojiDrawable extends Drawable implements Animat private AnimatedFloat changeProgress = new AnimatedFloat((View) null, 300, CubicBezierInterpolator.EASE_OUT); private Drawable[] drawables = new Drawable[2]; private View parentView; + private View secondParent; private boolean invalidateParent; private int size; private int alpha = 255; + boolean attached; public SwapAnimatedEmojiDrawable(View parentView, int size) { this(parentView, false, size, CACHE_TYPE_EMOJI_STATUS); @@ -876,40 +918,16 @@ public SwapAnimatedEmojiDrawable(View parentView, boolean invalidateParent, int } public void setParentView(View parentView) { - removeParentView(this.parentView); - addParentView(this.parentView = parentView); changeProgress.setParent(parentView); this.parentView = parentView; } - public void addParentView(View parentView) { - if (drawables[0] instanceof AnimatedEmojiDrawable) { - ((AnimatedEmojiDrawable) drawables[0]).addView(parentView); - } - if (drawables[1] instanceof AnimatedEmojiDrawable) { - ((AnimatedEmojiDrawable) drawables[1]).addView(parentView); - } - } - - public void removeParentView(View parentView) { - if (drawables[0] instanceof AnimatedEmojiDrawable) { - ((AnimatedEmojiDrawable) drawables[0]).removeView(parentView); - } - if (drawables[1] instanceof AnimatedEmojiDrawable) { - ((AnimatedEmojiDrawable) drawables[1]).removeView(parentView); - } - } - public void play() { if (getDrawable() instanceof AnimatedEmojiDrawable) { AnimatedEmojiDrawable drawable = (AnimatedEmojiDrawable) getDrawable(); ImageReceiver imageReceiver = drawable.getImageReceiver(); if (imageReceiver != null) { - if (drawable.cacheType == CACHE_TYPE_EMOJI_STATUS || drawable.cacheType == CACHE_TYPE_ALERT_EMOJI_STATUS || drawable.cacheType == CACHE_TYPE_FORUM_TOPIC) { - imageReceiver.setAutoRepeatCount(2); - } else if (cacheType == CACHE_TYPE_FORUM_TOPIC_LARGE) { - imageReceiver.setAutoRepeatCount(1); - } + drawable.updateAutoRepeat(imageReceiver); imageReceiver.startAnimation(); } } @@ -1013,19 +1031,26 @@ public void set(long documentId, int cacheType, boolean animated) { if (animated) { changeProgress.set(0, true); if (drawables[1] != null) { - if (drawables[1] instanceof AnimatedEmojiDrawable) { + if (attached && drawables[1] instanceof AnimatedEmojiDrawable) { ((AnimatedEmojiDrawable) drawables[1]).removeView(this); } drawables[1] = null; } drawables[1] = drawables[0]; drawables[0] = AnimatedEmojiDrawable.make(UserConfig.selectedAccount, cacheType, documentId); - ((AnimatedEmojiDrawable) drawables[0]).addView(this); + if (attached) { + ((AnimatedEmojiDrawable) drawables[0]).addView(this); + } } else { changeProgress.set(1, true); - detach(); + boolean attachedLocal = attached; + if (attachedLocal) { + detach(); + } drawables[0] = AnimatedEmojiDrawable.make(UserConfig.selectedAccount, cacheType, documentId); - ((AnimatedEmojiDrawable) drawables[0]).addView(this); + if (attachedLocal) { + attach(); + } } lastColor = 0xffffffff; colorFilter = null; @@ -1052,19 +1077,26 @@ public void set(TLRPC.Document document, int cacheType, boolean animated) { drawables[1] = drawables[0]; if (document != null) { drawables[0] = AnimatedEmojiDrawable.make(UserConfig.selectedAccount, cacheType, document); - ((AnimatedEmojiDrawable) drawables[0]).addView(this); + if (attached) { + ((AnimatedEmojiDrawable) drawables[0]).addView(this); + } } else { drawables[0] = null; } } else { changeProgress.set(1, true); - detach(); + boolean attachedLocal = attached; + if (attachedLocal) { + detach(); + } if (document != null) { drawables[0] = AnimatedEmojiDrawable.make(UserConfig.selectedAccount, cacheType, document); - ((AnimatedEmojiDrawable) drawables[0]).addView(this); } else { drawables[0] = null; } + if (attachedLocal) { + attach(); + } } lastColor = 0xffffffff; colorFilter = null; @@ -1079,7 +1111,7 @@ public void set(Drawable drawable, boolean animated) { if (animated) { changeProgress.set(0, true); if (drawables[1] != null) { - if (drawables[1] instanceof AnimatedEmojiDrawable) { + if (attached && drawables[1] instanceof AnimatedEmojiDrawable) { ((AnimatedEmojiDrawable) drawables[1]).removeView(this); } drawables[1] = null; @@ -1088,8 +1120,14 @@ public void set(Drawable drawable, boolean animated) { drawables[0] = drawable; } else { changeProgress.set(1, true); - detach(); + boolean attachedLocal = attached; + if (attachedLocal) { + detach(); + } drawables[0] = drawable; + if (attachedLocal) { + attach(); + } } lastColor = 0xffffffff; colorFilter = null; @@ -1098,6 +1136,10 @@ public void set(Drawable drawable, boolean animated) { } public void detach() { + if (!attached) { + return; + } + attached = false; if (drawables[0] instanceof AnimatedEmojiDrawable) { ((AnimatedEmojiDrawable) drawables[0]).removeView(this); } @@ -1107,6 +1149,10 @@ public void detach() { } public void attach() { + if (attached) { + return; + } + attached = true; if (drawables[0] instanceof AnimatedEmojiDrawable) { ((AnimatedEmojiDrawable) drawables[0]).addView(this); } @@ -1145,15 +1191,27 @@ public void invalidate() { parentView.invalidate(); } } + if (secondParent != null) { + secondParent.invalidate(); + } + invalidateSelf(); + } + + public void setSecondParent(View secondParent) { + this.secondParent = secondParent; } } - public static void lightModeChanged() { - for (HashMap map :globalEmojiCache.values()) { - ArrayList set = new ArrayList(map.keySet()); - for (Long documentId : set) { + public static void updateAll() { + if (globalEmojiCache == null) { + return; + } + for (int i = 0; i < globalEmojiCache.size(); i++) { + LongSparseArray map = globalEmojiCache.valueAt(i); + for (int j = 0; j < map.size(); j++) { + long documentId = map.keyAt(j); AnimatedEmojiDrawable animatedEmojiDrawable = map.get(documentId); - if (animatedEmojiDrawable.attached) { + if (animatedEmojiDrawable != null && animatedEmojiDrawable.attached) { animatedEmojiDrawable.initDocument(true); } else { map.remove(documentId); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/AnimatedEmojiSpan.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/AnimatedEmojiSpan.java index f4ced2de1f..84f71f12d5 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/AnimatedEmojiSpan.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/AnimatedEmojiSpan.java @@ -20,8 +20,8 @@ import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.Emoji; import org.telegram.messenger.ImageReceiver; +import org.telegram.messenger.LiteMode; import org.telegram.messenger.MessageObject; -import org.telegram.messenger.SharedConfig; import org.telegram.messenger.UserConfig; import org.telegram.tgnet.TLRPC; import org.telegram.ui.ActionBar.Theme; @@ -730,7 +730,7 @@ public void remove(AnimatedEmojiHolder holder) { } private void checkBackgroundRendering() { - if (allowBackgroundRendering && holders.size() >= 10 && backgroundThreadDrawable == null && !SharedConfig.getLiteMode().enabled()) { + if (allowBackgroundRendering && holders.size() >= 10 && backgroundThreadDrawable == null && LiteMode.isEnabled(LiteMode.FLAG_ANIMATED_EMOJI_KEYBOARD)) { backgroundThreadDrawable = new DrawingInBackgroundThreadDrawable() { private final ArrayList backgroundHolders = new ArrayList<>(); @@ -798,8 +798,7 @@ public void onResume() { }; backgroundThreadDrawable.padding = AndroidUtilities.dp(3); backgroundThreadDrawable.onAttachToWindow(); - } - else if (holders.size() < 10 && backgroundThreadDrawable != null) { + } else if (holders.size() < 10 && backgroundThreadDrawable != null) { backgroundThreadDrawable.onDetachFromWindow(); backgroundThreadDrawable = null; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/AnimatedFileDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/AnimatedFileDrawable.java index b0e2614bc1..bd9b285d5a 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/AnimatedFileDrawable.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/AnimatedFileDrawable.java @@ -94,6 +94,7 @@ public class AnimatedFileDrawable extends BitmapDrawable implements Animatable, private boolean forceDecodeAfterNextFrame; private File path; private long streamFileSize; + private int streamLoadingPriority; private int currentAccount; private boolean recycleWithSecond; private volatile long pendingSeekTo = -1; @@ -420,13 +421,14 @@ private void updateScaleFactor() { } }; - public AnimatedFileDrawable(File file, boolean createDecoder, long streamSize, TLRPC.Document document, ImageLocation location, Object parentObject, long seekTo, int account, boolean preview, BitmapsCache.CacheOptions cacheOptions) { - this(file, createDecoder, streamSize, document, location, parentObject, seekTo, account, preview, 0, 0, cacheOptions); + public AnimatedFileDrawable(File file, boolean createDecoder, long streamSize, int streamLoadingPriority, TLRPC.Document document, ImageLocation location, Object parentObject, long seekTo, int account, boolean preview, BitmapsCache.CacheOptions cacheOptions) { + this(file, createDecoder, streamSize, streamLoadingPriority, document, location, parentObject, seekTo, account, preview, 0, 0, cacheOptions); } - public AnimatedFileDrawable(File file, boolean createDecoder, long streamSize, TLRPC.Document document, ImageLocation location, Object parentObject, long seekTo, int account, boolean preview, int w, int h, BitmapsCache.CacheOptions cacheOptions) { + public AnimatedFileDrawable(File file, boolean createDecoder, long streamSize, int streamLoadingPriority, TLRPC.Document document, ImageLocation location, Object parentObject, long seekTo, int account, boolean preview, int w, int h, BitmapsCache.CacheOptions cacheOptions) { path = file; streamFileSize = streamSize; + this.streamLoadingPriority = streamLoadingPriority; currentAccount = account; renderingHeight = h; renderingWidth = w; @@ -434,7 +436,7 @@ public AnimatedFileDrawable(File file, boolean createDecoder, long streamSize, T this.document = document; getPaint().setFlags(Paint.ANTI_ALIAS_FLAG | Paint.FILTER_BITMAP_FLAG); if (streamSize != 0 && (document != null || location != null)) { - stream = new AnimatedFileDrawableStream(document, location, parentObject, account, preview); + stream = new AnimatedFileDrawableStream(document, location, parentObject, account, preview, streamLoadingPriority); } if (createDecoder && !this.precache) { nativePtr = createDecoder(file.getAbsolutePath(), metaData, currentAccount, streamFileSize, stream, preview); @@ -481,7 +483,7 @@ public Bitmap getFrameAtTime(long ms, boolean precise) { if (!precise) { seekToMs(nativePtr, ms, precise); } - Bitmap backgroundBitmap = Bitmap.createBitmap((int) (metaData[0] * scaleFactor), (int) (metaData[1] * scaleFactor), Bitmap.Config.ARGB_8888); + Bitmap backgroundBitmap = Bitmap.createBitmap(metaData[0], metaData[1], Bitmap.Config.ARGB_8888); int result; if (precise) { result = getFrameAtTime(nativePtr, ms, backgroundBitmap, metaData, backgroundBitmap.getRowBytes()); @@ -631,6 +633,7 @@ public void recycle() { } if (stream != null) { stream.cancel(true); + stream = null; } invalidateInternal(); } @@ -970,9 +973,9 @@ public int getOrientation() { public AnimatedFileDrawable makeCopy() { AnimatedFileDrawable drawable; if (stream != null) { - drawable = new AnimatedFileDrawable(path, false, streamFileSize, stream.getDocument(), stream.getLocation(), stream.getParentObject(), pendingSeekToUI, currentAccount, stream != null && stream.isPreview(), null); + drawable = new AnimatedFileDrawable(path, false, streamFileSize, streamLoadingPriority, stream.getDocument(), stream.getLocation(), stream.getParentObject(), pendingSeekToUI, currentAccount, stream != null && stream.isPreview(), null); } else { - drawable = new AnimatedFileDrawable(path, false, streamFileSize, document, null, null, pendingSeekToUI, currentAccount, stream != null && stream.isPreview(), null); + drawable = new AnimatedFileDrawable(path, false, streamFileSize, streamLoadingPriority, document, null, null, pendingSeekToUI, currentAccount, stream != null && stream.isPreview(), null); } drawable.metaData[0] = metaData[0]; drawable.metaData[1] = metaData[1]; @@ -1072,6 +1075,9 @@ public int getNextFrame(Bitmap bitmap) { @Override public Bitmap getFirstFrame(Bitmap bitmap) { + if (bitmap == null) { + bitmap = Bitmap.createBitmap(renderingWidth, renderingHeight, Bitmap.Config.ARGB_8888); + } Canvas canvas = new Canvas(bitmap); if (generatingCacheBitmap == null) { generatingCacheBitmap = Bitmap.createBitmap(metaData[0], metaData[1], Bitmap.Config.ARGB_8888); @@ -1127,15 +1133,9 @@ public void updateCurrentFrame(long now, boolean b) { if (renderingBitmap == null && nextRenderingBitmap == null) { scheduleNextGetFrame(); } else if (nextRenderingBitmap != null && (renderingBitmap == null || (Math.abs(now - lastFrameTime) >= invalidateAfter && !skipFrameUpdate))) { - //if (precache) { - backgroundBitmap = renderingBitmap; - // } renderingBitmap = nextRenderingBitmap; renderingBitmapTime = nextRenderingBitmapTime; for (int i = 0; i < backgroundShader.length; i++) { - // if (precache) { - backgroundShader[i] = renderingShader[i]; - // } renderingShader[i] = nextRenderingShader[i]; nextRenderingShader[i] = null; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/AnimatedFloat.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/AnimatedFloat.java index 92b50debd0..40f6bfdb3f 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/AnimatedFloat.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/AnimatedFloat.java @@ -77,6 +77,14 @@ public AnimatedFloat(Runnable invalidate, long transitionDuration, TimeInterpola this.firstSet = true; } + public AnimatedFloat(Runnable invalidate, long transitionDelay, long transitionDuration, TimeInterpolator transitionInterpolator) { + this.invalidate = invalidate; + this.transitionDelay = transitionDelay; + this.transitionDuration = transitionDuration; + this.transitionInterpolator = transitionInterpolator; + this.firstSet = true; + } + public AnimatedFloat(float initialValue, View parentToInvalidate) { this.parent = parentToInvalidate; this.value = targetValue = initialValue; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/AnimatedTextView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/AnimatedTextView.java index afa54897ec..12a6a7a9d4 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/AnimatedTextView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/AnimatedTextView.java @@ -6,6 +6,7 @@ import android.animation.ValueAnimator; import android.content.Context; import android.graphics.Canvas; +import android.graphics.Color; import android.graphics.ColorFilter; import android.graphics.Paint; import android.graphics.PixelFormat; @@ -39,18 +40,29 @@ public static class AnimatedTextDrawable extends Drawable { private boolean isRTL = false; - private int currentWidth, currentHeight; - private Integer[] currentLayoutOffsets; - private Integer[] currentLayoutToOldIndex; - private StaticLayout[] currentLayout; + private float currentWidth, currentHeight; + private Part[] currentParts; private CharSequence currentText; - private int oldWidth, oldHeight; - private Integer[] oldLayoutOffsets; - private Integer[] oldLayoutToCurrentIndex; - private StaticLayout[] oldLayout; + private float oldWidth, oldHeight; + private Part[] oldParts; private CharSequence oldText; + private class Part { + StaticLayout layout; + float offset; + int toOppositeIndex; + float left, width; + + public Part(StaticLayout layout, float offset, int toOppositeIndex) { + this.layout = layout; + this.offset = offset; + this.toOppositeIndex = toOppositeIndex; + this.left = layout == null || layout.getLineCount() <= 0 ? 0 : layout.getLineLeft(0); + this.width = layout == null || layout.getLineCount() <= 0 ? 0 : layout.getLineWidth(0); + } + } + private float t = 0; private boolean moveDown = true; private ValueAnimator animator; @@ -97,26 +109,35 @@ public void draw(@NonNull Canvas canvas) { canvas.translate(bounds.left, bounds.top); int fullWidth = bounds.width(); int fullHeight = bounds.height(); - if (currentLayout != null && oldLayout != null && t != 1) { - int width = AndroidUtilities.lerp(oldWidth, currentWidth, t); - int height = AndroidUtilities.lerp(oldHeight, currentHeight, t); + if (currentParts != null && oldParts != null && t != 1) { + float width = AndroidUtilities.lerp(oldWidth, currentWidth, t); + float height = AndroidUtilities.lerp(oldHeight, currentHeight, t); canvas.translate(0, (fullHeight - height) / 2f); - for (int i = 0; i < currentLayout.length; ++i) { - int j = currentLayoutToOldIndex[i]; - float x = currentLayoutOffsets[i], y = 0; + for (int i = 0; i < currentParts.length; ++i) { + Part current = currentParts[i]; + int j = current.toOppositeIndex; + float x = current.offset, y = 0; if (j >= 0) { - float oldX = oldLayoutOffsets[j]; - x = AndroidUtilities.lerp(oldX, x, t); + if (isRTL && !ignoreRTL) { + x = currentWidth - (x + current.width); + } + Part old = oldParts[j]; + float oldX = old.offset; + if (isRTL && !ignoreRTL) { + oldX = oldWidth - (oldX + old.width); + } + x = AndroidUtilities.lerp(oldX - old.left, x - current.left, t); textPaint.setAlpha(alpha); } else { + if (isRTL && !ignoreRTL) { + x = currentWidth - (x + current.width); + } + x -= current.left; y = -textPaint.getTextSize() * moveAmplitude * (1f - t) * (moveDown ? 1f : -1f); textPaint.setAlpha((int) (alpha * t)); } canvas.save(); - int lwidth = j >= 0 ? width : currentWidth; - if (isRTL && !ignoreRTL) { - x = -x + 2 * lwidth - currentLayout[i].getWidth() - fullWidth; - } + float lwidth = j >= 0 ? width : currentWidth; if ((gravity | ~Gravity.LEFT) != ~0) { if ((gravity | ~Gravity.RIGHT) == ~0) { x += fullWidth - lwidth; @@ -127,21 +148,23 @@ public void draw(@NonNull Canvas canvas) { } } canvas.translate(x, y); - currentLayout[i].draw(canvas); + current.layout.draw(canvas); canvas.restore(); } - for (int i = 0; i < oldLayout.length; ++i) { - int j = oldLayoutToCurrentIndex[i]; + for (int i = 0; i < oldParts.length; ++i) { + Part old = oldParts[i]; + int j = old.toOppositeIndex; if (j >= 0) { continue; } - float x = oldLayoutOffsets[i]; + float x = old.offset; float y = textPaint.getTextSize() * moveAmplitude * t * (moveDown ? 1f : -1f); textPaint.setAlpha((int) (alpha * (1f - t))); canvas.save(); if (isRTL && !ignoreRTL) { - x = -x + 2 * oldWidth - oldLayout[i].getWidth() - fullWidth; + x = oldWidth - (x + old.width); } + x -= old.left; if ((gravity | ~Gravity.LEFT) != ~0) { if ((gravity | ~Gravity.RIGHT) == ~0) { x += fullWidth - oldWidth; @@ -152,19 +175,21 @@ public void draw(@NonNull Canvas canvas) { } } canvas.translate(x, y); - oldLayout[i].draw(canvas); + old.layout.draw(canvas); canvas.restore(); } } else { canvas.translate(0, (fullHeight - currentHeight) / 2f); - if (currentLayout != null) { - for (int i = 0; i < currentLayout.length; ++i) { - textPaint.setAlpha(alpha); + if (currentParts != null) { + textPaint.setAlpha(alpha); + for (int i = 0; i < currentParts.length; ++i) { canvas.save(); - float x = currentLayoutOffsets[i]; + Part current = currentParts[i]; + float x = current.offset; if (isRTL && !ignoreRTL) { - x = -x + 2 * currentWidth - currentLayout[i].getWidth() - fullWidth; + x = currentWidth - (x + current.width); } + x -= current.left; if ((gravity | ~Gravity.LEFT) != ~0) { if ((gravity | ~Gravity.RIGHT) == ~0) { x += fullWidth - currentWidth; @@ -174,8 +199,9 @@ public void draw(@NonNull Canvas canvas) { x += fullWidth - currentWidth; } } +// boolean isAppeared = currentLayoutToOldIndex != null && i < currentLayoutToOldIndex.length && currentLayoutToOldIndex[i] < 0; canvas.translate(x, 0); - currentLayout[i].draw(canvas); + current.layout.draw(canvas); canvas.restore(); } } @@ -226,48 +252,45 @@ public void setText(CharSequence text, boolean animated, boolean moveDown) { oldText = currentText; currentText = text; - currentLayout = null; - oldLayout = null; - ArrayList currentLayoutOffsets = new ArrayList<>(); - ArrayList currentLayoutToOldIndex = new ArrayList<>(); - ArrayList currentLayoutList = new ArrayList<>(); - ArrayList oldLayoutOffsets = new ArrayList<>(); - ArrayList oldLayoutToCurrentIndex = new ArrayList<>(); - ArrayList oldLayoutList = new ArrayList<>(); +// ArrayList currentLayoutOffsets = new ArrayList<>(); +// ArrayList currentLayoutToOldIndex = new ArrayList<>(); +// ArrayList currentLayoutList = new ArrayList<>(); +// ArrayList oldLayoutOffsets = new ArrayList<>(); +// ArrayList oldLayoutToCurrentIndex = new ArrayList<>(); +// ArrayList oldLayoutList = new ArrayList<>(); + ArrayList currentParts = new ArrayList<>(); + ArrayList oldParts = new ArrayList<>(); currentWidth = currentHeight = 0; oldWidth = oldHeight = 0; + isRTL = AndroidUtilities.isRTL(currentText); // order execution matters RegionCallback onEqualRegion = (part, from, to) -> { - StaticLayout layout = makeLayout(part, bounds.width() - Math.min(currentWidth, oldWidth)); - oldLayoutToCurrentIndex.add(currentLayoutList.size()); - currentLayoutToOldIndex.add(oldLayoutList.size()); - currentLayoutOffsets.add(currentWidth); - currentLayoutList.add(layout); - oldLayoutOffsets.add(oldWidth); - oldLayoutList.add(layout); - float partWidth = layout.getLineWidth(0); + StaticLayout layout = makeLayout(part, bounds.width() - (int) Math.ceil(Math.min(currentWidth, oldWidth))); + final Part currentPart = new Part(layout, currentWidth, oldParts.size()); + final Part oldPart = new Part(layout, oldWidth, oldParts.size()); + currentParts.add(currentPart); + oldParts.add(oldPart); + float partWidth = currentPart.width; currentWidth += partWidth; oldWidth += partWidth; currentHeight = Math.max(currentHeight, layout.getHeight()); oldHeight = Math.max(oldHeight, layout.getHeight()); }; RegionCallback onNewPart = (part, from, to) -> { - StaticLayout layout = makeLayout(part, bounds.width() - currentWidth); - currentLayoutOffsets.add(currentWidth); - currentLayoutList.add(layout); - currentLayoutToOldIndex.add(-1); - currentWidth += layout.getLineWidth(0); + StaticLayout layout = makeLayout(part, bounds.width() - (int) Math.ceil(currentWidth)); + final Part currentPart = new Part(layout, currentWidth, -1); + currentParts.add(currentPart); + currentWidth += currentPart.width; currentHeight = Math.max(currentHeight, layout.getHeight()); }; RegionCallback onOldPart = (part, from, to) -> { - StaticLayout layout = makeLayout(part, bounds.width() - oldWidth); - oldLayoutOffsets.add(oldWidth); - oldLayoutList.add(layout); - oldLayoutToCurrentIndex.add(-1); - oldWidth += layout.getLineWidth(0); + StaticLayout layout = makeLayout(part, bounds.width() - (int) Math.ceil(oldWidth)); + final Part oldPart = new Part(layout, oldWidth, -1); + oldParts.add(oldPart); + oldWidth += oldPart.width; oldHeight = Math.max(oldHeight, layout.getHeight()); }; @@ -276,38 +299,14 @@ public void setText(CharSequence text, boolean animated, boolean moveDown) { diff(from, to, onEqualRegion, onNewPart, onOldPart); - if (this.currentLayout == null || this.currentLayout.length != currentLayoutList.size()) { - this.currentLayout = new StaticLayout[currentLayoutList.size()]; - } - currentLayoutList.toArray(currentLayout); - if (this.currentLayoutOffsets == null || this.currentLayoutOffsets.length != currentLayoutOffsets.size()) { - this.currentLayoutOffsets = new Integer[currentLayoutOffsets.size()]; - } - currentLayoutOffsets.toArray(this.currentLayoutOffsets); - if (this.currentLayoutToOldIndex == null || this.currentLayoutToOldIndex.length != currentLayoutToOldIndex.size()) { - this.currentLayoutToOldIndex = new Integer[currentLayoutToOldIndex.size()]; - } - currentLayoutToOldIndex.toArray(this.currentLayoutToOldIndex); - - if (this.oldLayout == null || this.oldLayout.length != oldLayoutList.size()) { - this.oldLayout = new StaticLayout[oldLayoutList.size()]; - } - oldLayoutList.toArray(oldLayout); - if (this.oldLayoutOffsets == null || this.oldLayoutOffsets.length != oldLayoutOffsets.size()) { - this.oldLayoutOffsets = new Integer[oldLayoutOffsets.size()]; + if (this.currentParts == null || this.currentParts.length != currentParts.size()) { + this.currentParts = new Part[currentParts.size()]; } - oldLayoutOffsets.toArray(this.oldLayoutOffsets); - if (this.oldLayoutToCurrentIndex == null || this.oldLayoutToCurrentIndex.length != oldLayoutToCurrentIndex.size()) { - this.oldLayoutToCurrentIndex = new Integer[oldLayoutToCurrentIndex.size()]; - } - oldLayoutToCurrentIndex.toArray(this.oldLayoutToCurrentIndex); - - if (this.currentLayout.length > 0) { - isRTL = this.currentLayout[0].isRtlCharAt(0); - } else if (this.oldLayout.length > 0) { - isRTL = this.oldLayout[0].isRtlCharAt(0); + currentParts.toArray(this.currentParts); + if (this.oldParts == null || this.oldParts.length != oldParts.size()) { + this.oldParts = new Part[oldParts.size()]; } - + oldParts.toArray(this.oldParts); if (animator != null) { animator.cancel(); } @@ -322,9 +321,7 @@ public void setText(CharSequence text, boolean animated, boolean moveDown) { @Override public void onAnimationEnd(Animator animation) { super.onAnimationEnd(animation); - oldLayout = null; - AnimatedTextDrawable.this.oldLayoutOffsets = null; - AnimatedTextDrawable.this.oldLayoutToCurrentIndex = null; + AnimatedTextDrawable.this.oldParts = null; oldText = null; oldWidth = 0; t = 0; @@ -353,22 +350,15 @@ public void onAnimationEnd(Animator animation) { toSetTextMoveDown = false; t = 0; - currentLayout = new StaticLayout[1]; - currentLayout[0] = makeLayout(currentText = text, bounds.width()); - currentWidth = (int) currentLayout[0].getLineWidth(0); - currentHeight = currentLayout[0].getHeight(); - currentLayoutOffsets = new Integer[1]; - currentLayoutOffsets[0] = 0; - currentLayoutToOldIndex = new Integer[1]; - currentLayoutToOldIndex[0] = -1; - - if (this.currentLayout.length > 0) { - isRTL = this.currentLayout[0].isRtlCharAt(0); + if (!text.equals(currentText)) { + currentParts = new Part[1]; + currentParts[0] = new Part(makeLayout(currentText = text, bounds.width()), 0, -1); + currentWidth = currentParts[0].width; + currentHeight = currentParts[0].layout.getHeight(); + isRTL = AndroidUtilities.isRTL(currentText); } - oldLayout = null; - oldLayoutOffsets = null; - oldLayoutToCurrentIndex = null; + oldParts = null; oldText = null; oldWidth = 0; oldHeight = 0; @@ -381,18 +371,22 @@ public CharSequence getText() { return currentText; } - public int getWidth() { + public float getWidth() { return Math.max(currentWidth, oldWidth); } - public int getCurrentWidth() { - if (currentLayout != null && oldLayout != null) { + public float getCurrentWidth() { + if (currentParts != null && oldParts != null) { return AndroidUtilities.lerp(oldWidth, currentWidth, t); } return currentWidth; } - public int getHeight() { + public float getAnimateToWidth() { + return currentWidth; + } + + public float getHeight() { return currentHeight; } @@ -654,6 +648,11 @@ public float getTextSize() { public void setTextColor(int color) { textPaint.setColor(color); + alpha = Color.alpha(color); + } + + public int getTextColor() { + return textPaint.getColor(); } public void setTypeface(Typeface typeface) { @@ -720,10 +719,11 @@ public Rect getDirtyBounds() { } private AnimatedTextDrawable drawable; - private int lastMaxWidth; + private int lastMaxWidth, maxWidth; private CharSequence toSetText; private boolean toSetMoveDown; + public boolean adaptWidth = true; public AnimatedTextView(Context context) { this(context, false, false, false); @@ -743,18 +743,25 @@ public AnimatedTextView(Context context, boolean splitByWords, boolean preserveI }); } + public void setMaxWidth(int width) { + maxWidth = width; + } + @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { int width = MeasureSpec.getSize(widthMeasureSpec); int height = MeasureSpec.getSize(heightMeasureSpec); + if (maxWidth > 0) { + width = Math.min(width, maxWidth); + } if (lastMaxWidth != width && getLayoutParams().width != 0) { drawable.setBounds(getPaddingLeft(), getPaddingTop(), width - getPaddingRight(), height - getPaddingBottom()); lastMaxWidth = width; setText(drawable.getText(), false); } lastMaxWidth = width; - if (MeasureSpec.getMode(widthMeasureSpec) == MeasureSpec.AT_MOST) { - width = getPaddingLeft() + drawable.getWidth() + getPaddingRight(); + if (adaptWidth && MeasureSpec.getMode(widthMeasureSpec) == MeasureSpec.AT_MOST) { + width = getPaddingLeft() + (int) Math.ceil(drawable.getWidth()) + getPaddingRight(); } setMeasuredDimension(width, height); } @@ -781,7 +788,7 @@ public boolean isAnimating() { return drawable.isAnimating(); } - private void setIgnoreRTL(boolean value) { + public void setIgnoreRTL(boolean value) { drawable.ignoreRTL = value; } @@ -801,7 +808,7 @@ public void setText(CharSequence text, boolean animated, boolean moveDown) { return; } } - int wasWidth = drawable.getWidth(); + int wasWidth = (int) drawable.getWidth(); drawable.setBounds(getPaddingLeft(), getPaddingTop(), lastMaxWidth - getPaddingRight(), getMeasuredHeight() - getPaddingBottom()); drawable.setText(text, animated, moveDown); if (wasWidth < drawable.getWidth() || !animated && wasWidth != drawable.getWidth()) { @@ -810,7 +817,7 @@ public void setText(CharSequence text, boolean animated, boolean moveDown) { } public int width() { - return getPaddingLeft() + drawable.getCurrentWidth() + getPaddingRight(); + return getPaddingLeft() + (int) Math.ceil(drawable.getCurrentWidth()) + getPaddingRight(); } public CharSequence getText() { @@ -830,6 +837,10 @@ public void setTextColor(int color) { invalidate(); } + public int getTextColor() { + return drawable.getTextColor(); + } + public void setTypeface(Typeface typeface) { drawable.setTypeface(typeface); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/AttachBotIntroTopView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/AttachBotIntroTopView.java index 47fb79d14d..71f5fd8677 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/AttachBotIntroTopView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/AttachBotIntroTopView.java @@ -82,8 +82,8 @@ protected void onDetachedFromWindow() { protected void onDraw(Canvas canvas) { super.onDraw(canvas); - AndroidUtilities.rectTmp.set(0, 0, getWidth(), getHeight() + AndroidUtilities.dp(6)); - canvas.drawRoundRect(AndroidUtilities.rectTmp, AndroidUtilities.dp(6), AndroidUtilities.dp(6), backgroundPaint); + AndroidUtilities.rectTmp.set(0, 0, getWidth(), getHeight() + AndroidUtilities.dp(10)); + canvas.drawRoundRect(AndroidUtilities.rectTmp, AndroidUtilities.dp(10), AndroidUtilities.dp(10), backgroundPaint); imageReceiver.setImageCoords(getWidth() / 2f - AndroidUtilities.dp(ICONS_SIDE_PADDING + ICONS_SIZE_DP), getHeight() / 2f - AndroidUtilities.dp(ICONS_SIZE_DP) / 2f, AndroidUtilities.dp(ICONS_SIZE_DP), AndroidUtilities.dp(ICONS_SIZE_DP)); imageReceiver.draw(canvas); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/AttachableDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/AttachableDrawable.java new file mode 100644 index 0000000000..c98b50cf01 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/AttachableDrawable.java @@ -0,0 +1,8 @@ +package org.telegram.ui.Components; + +import org.telegram.messenger.ImageReceiver; + +public interface AttachableDrawable { + void onAttachedToWindow(ImageReceiver parent); + void onDetachedFromWindow(ImageReceiver parent); +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/AudioPlayerAlert.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/AudioPlayerAlert.java index ae99306c94..d13e6917fc 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/AudioPlayerAlert.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/AudioPlayerAlert.java @@ -85,6 +85,7 @@ import org.telegram.ui.ActionBar.ActionBar; import org.telegram.ui.ActionBar.ActionBarMenu; import org.telegram.ui.ActionBar.ActionBarMenuItem; +import org.telegram.ui.ActionBar.ActionBarMenuSlider; import org.telegram.ui.ActionBar.ActionBarMenuSubItem; import org.telegram.ui.ActionBar.AlertDialog; import org.telegram.ui.ActionBar.BaseFragment; @@ -129,7 +130,10 @@ public class AudioPlayerAlert extends BottomSheet implements NotificationCenter. private SeekBarView seekBarView; private SimpleTextView timeTextView; private ActionBarMenuItem playbackSpeedButton; - private ActionBarMenuSubItem[] speedItems = new ActionBarMenuSubItem[4]; + private SpeedIconDrawable speedIcon; + private ActionBarMenuSlider.SpeedSlider speedSlider; + private boolean slidingSpeed; + private ActionBarMenuSubItem[] speedItems = new ActionBarMenuSubItem[6]; private TextView durationTextView; private ActionBarMenuItem repeatButton; private ActionBarMenuSubItem repeatSongItem; @@ -182,10 +186,9 @@ public class AudioPlayerAlert extends BottomSheet implements NotificationCenter. private boolean wasLight; - private final static int menu_speed_slow = 1; - private final static int menu_speed_normal = 2; - private final static int menu_speed_fast = 3; - private final static int menu_speed_veryfast = 4; + private final static float[] speeds = new float[] { + .5f, 1f, 1.2f, 1.5f, 1.7f, 2f + }; private final Runnable forwardSeek = new Runnable() { @Override @@ -255,6 +258,7 @@ public AudioPlayerAlert(final Context context, Theme.ResourcesProvider resources NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.fileLoadProgressChanged); NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.musicDidLoad); NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.moreMusicDidLoad); + NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.messagePlayingSpeedChanged); containerView = new FrameLayout(context) { @@ -431,7 +435,7 @@ public void setAlpha(float alpha) { containerView.invalidate(); } }; - actionBar.setBackgroundColor(getThemedColor(Theme.key_player_actionBar)); + actionBar.setBackgroundColor(getThemedColor(Theme.key_dialogBackground)); actionBar.setBackButtonImage(R.drawable.ic_ab_back); actionBar.setItemsColor(getThemedColor(Theme.key_player_actionBarTitle), false); actionBar.setItemsBackgroundColor(getThemedColor(Theme.key_player_actionBarSelector), false); @@ -673,48 +677,68 @@ public CharSequence getContentDescription() { durationTextView.setImportantForAccessibility(View.IMPORTANT_FOR_ACCESSIBILITY_NO); playerLayout.addView(durationTextView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.TOP | Gravity.RIGHT, 0, 96, 20, 0)); - playbackSpeedButton = new ActionBarMenuItem(context, null, 0, getThemedColor(Theme.key_dialogTextBlack), false, resourcesProvider); + playbackSpeedButton = new ActionBarMenuItem(context, null, 0, getThemedColor(Theme.key_player_time), false, resourcesProvider); playbackSpeedButton.setLongClickEnabled(false); playbackSpeedButton.setShowSubmenuByMove(false); playbackSpeedButton.setAdditionalYOffset(-AndroidUtilities.dp(224)); playbackSpeedButton.setContentDescription(LocaleController.getString("AccDescrPlayerSpeed", R.string.AccDescrPlayerSpeed)); playbackSpeedButton.setDelegate(id -> { - float oldSpeed = MediaController.getInstance().getPlaybackSpeed(true); - if (id == menu_speed_slow) { - MediaController.getInstance().setPlaybackSpeed(true, 0.5f); - } else if (id == menu_speed_normal) { - MediaController.getInstance().setPlaybackSpeed(true, 1.0f); - } else if (id == menu_speed_fast) { - MediaController.getInstance().setPlaybackSpeed(true, 1.5f); - } else { - MediaController.getInstance().setPlaybackSpeed(true, 1.8f); + if (id < 0 || id >= speeds.length) { + return; } - updatePlaybackButton(); + MediaController.getInstance().setPlaybackSpeed(true, speeds[id]); + updatePlaybackButton(true); }); - speedItems[0] = playbackSpeedButton.addSubItem(menu_speed_slow, R.drawable.msg_speed_0_5, LocaleController.getString("SpeedSlow", R.string.SpeedSlow)); - speedItems[1] = playbackSpeedButton.addSubItem(menu_speed_normal, R.drawable.msg_speed_1, LocaleController.getString("SpeedNormal", R.string.SpeedNormal)); - speedItems[2] = playbackSpeedButton.addSubItem(menu_speed_fast, R.drawable.msg_speed_1_5, LocaleController.getString("SpeedFast", R.string.SpeedFast)); - speedItems[3] = playbackSpeedButton.addSubItem(menu_speed_veryfast, R.drawable.msg_speed_2, LocaleController.getString("SpeedVeryFast", R.string.SpeedVeryFast)); + playbackSpeedButton.setIcon(speedIcon = new SpeedIconDrawable(true)); + final float[] toggleSpeeds = new float[] { 1.0F, 1.5F, 2F }; + speedSlider = new ActionBarMenuSlider.SpeedSlider(getContext(), resourcesProvider); + speedSlider.setRoundRadiusDp(6); + speedSlider.setDrawShadow(true); + speedSlider.setOnValueChange((value, isFinal) -> { + slidingSpeed = !isFinal; + MediaController.getInstance().setPlaybackSpeed(true, speedSlider.getSpeed(value)); + }); + speedItems[0] = playbackSpeedButton.addSubItem(0, R.drawable.msg_speed_slow, LocaleController.getString("SpeedSlow", R.string.SpeedSlow)); + speedItems[1] = playbackSpeedButton.addSubItem(1, R.drawable.msg_speed_normal, LocaleController.getString("SpeedNormal", R.string.SpeedNormal)); + speedItems[2] = playbackSpeedButton.addSubItem(2, R.drawable.msg_speed_medium, LocaleController.getString("SpeedMedium", R.string.SpeedMedium)); + speedItems[3] = playbackSpeedButton.addSubItem(3, R.drawable.msg_speed_fast, LocaleController.getString("SpeedFast", R.string.SpeedFast)); + speedItems[4] = playbackSpeedButton.addSubItem(4, R.drawable.msg_speed_veryfast, LocaleController.getString("SpeedVeryFast", R.string.SpeedVeryFast)); + speedItems[5] = playbackSpeedButton.addSubItem(5, R.drawable.msg_speed_superfast, LocaleController.getString("SpeedSuperFast", R.string.SpeedSuperFast)); if (AndroidUtilities.density >= 3.0f) { playbackSpeedButton.setPadding(0, 1, 0, 0); } playbackSpeedButton.setAdditionalXOffset(AndroidUtilities.dp(8)); + playbackSpeedButton.setAdditionalYOffset(-AndroidUtilities.dp(400)); playbackSpeedButton.setShowedFromBottom(true); playerLayout.addView(playbackSpeedButton, LayoutHelper.createFrame(36, 36, Gravity.TOP | Gravity.RIGHT, 0, 86, 20, 0)); playbackSpeedButton.setOnClickListener(v -> { float currentPlaybackSpeed = MediaController.getInstance().getPlaybackSpeed(true); - if (Math.abs(currentPlaybackSpeed - 1.0f) > 0.001f) { - MediaController.getInstance().setPlaybackSpeed(true, 1.0f); - } else { - MediaController.getInstance().setPlaybackSpeed(true, MediaController.getInstance().getFastPlaybackSpeed(true)); + int index = -1; + for (int i = 0; i < toggleSpeeds.length; ++i) { + if (currentPlaybackSpeed - 0.1F <= toggleSpeeds[i]) { + index = i; + break; + } + } + index++; + if (index >= toggleSpeeds.length) { + index = 0; } - updatePlaybackButton(); + MediaController.getInstance().setPlaybackSpeed(true, toggleSpeeds[index]); + + checkSpeedHint(); }); playbackSpeedButton.setOnLongClickListener(view -> { - playbackSpeedButton.toggleSubMenu(); + final float speed = MediaController.getInstance().getPlaybackSpeed(true); + speedSlider.setSpeed(speed, false); + speedSlider.setBackgroundColor(Theme.getColor(Theme.key_actionBarDefaultSubmenuBackground, resourcesProvider)); + updatePlaybackButton(false); + playbackSpeedButton.setDimMenu(.15f); + playbackSpeedButton.toggleSubMenu(speedSlider, null); + MessagesController.getGlobalNotificationsSettings().edit().putInt("speedhint", -15).apply(); return true; }); - updatePlaybackButton(); + updatePlaybackButton(false); FrameLayout bottomView = new FrameLayout(context) { @Override @@ -1345,6 +1369,45 @@ private void setMenuItemChecked(ActionBarMenuSubItem item, boolean checked) { } } + private HintView speedHintView; + private long lastPlaybackClick; + + private void checkSpeedHint() { + final long now = System.currentTimeMillis(); + if (now - lastPlaybackClick > 300) { + int hintValue = MessagesController.getGlobalNotificationsSettings().getInt("speedhint", 0); + hintValue++; + if (hintValue > 2) { + hintValue = -10; + } + MessagesController.getGlobalNotificationsSettings().edit().putInt("speedhint", hintValue).apply(); + if (hintValue >= 0) { + showSpeedHint(); + } + } + lastPlaybackClick = now; + } + + private void showSpeedHint() { + if (containerView != null) { + speedHintView = new HintView(getContext(), 5, false) { + @Override + public void setVisibility(int visibility) { + super.setVisibility(visibility); + if (visibility != View.VISIBLE) { + try { + ((ViewGroup) getParent()).removeView(this); + } catch (Exception e) {} + } + } + }; + speedHintView.setExtraTranslationY(AndroidUtilities.dp(6)); + speedHintView.setText(LocaleController.getString("SpeedHint")); + playerLayout.addView(speedHintView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.TOP, 0, 0, 6, 0)); + speedHintView.showForView(playbackSpeedButton, true); + } + } + private void updateSubMenu() { setMenuItemChecked(shuffleListItem, SharedConfig.shuffleMusic); setMenuItemChecked(reverseOrderItem, SharedConfig.playOrderReversed); @@ -1352,35 +1415,40 @@ private void updateSubMenu() { setMenuItemChecked(repeatSongItem, SharedConfig.repeatMode == 2); } - private void updatePlaybackButton() { - float currentPlaybackSpeed = MediaController.getInstance().getPlaybackSpeed(true); - String key; - if (Math.abs(currentPlaybackSpeed - 1.0f) > 0.001f) { - key = Theme.key_inappPlayerPlayPause; - } else { - key = Theme.key_inappPlayerClose; - } - playbackSpeedButton.setTag(key); - float speed = MediaController.getInstance().getFastPlaybackSpeed(true); - if (Math.abs(speed - 1.8f) < 0.001f) { - playbackSpeedButton.setIcon(R.drawable.voice_mini_2_0); - } else if (Math.abs(speed - 1.5f) < 0.001f) { - playbackSpeedButton.setIcon(R.drawable.voice_mini_1_5); - } else { - playbackSpeedButton.setIcon(R.drawable.voice_mini_0_5); - } - playbackSpeedButton.setIconColor(getThemedColor(key)); - if (Build.VERSION.SDK_INT >= 21) { - playbackSpeedButton.setBackgroundDrawable(Theme.createSelectorDrawable(getThemedColor(key) & 0x19ffffff, 1, AndroidUtilities.dp(14))); + private boolean equals(float a, float b) { + return Math.abs(a - b) < 0.05f; + } + + private void updatePlaybackButton(boolean animated) { + if (playbackSpeedButton == null) { + return; } + float currentPlaybackSpeed = MediaController.getInstance().getPlaybackSpeed(true); + speedIcon.setValue(currentPlaybackSpeed, animated); + speedSlider.setSpeed(currentPlaybackSpeed, animated); + updateColors(); + + boolean isFinal = !slidingSpeed; + slidingSpeed = false; + for (int a = 0; a < speedItems.length; a++) { - if (a == 0 && Math.abs(currentPlaybackSpeed - 0.5f) < 0.001f || - a == 1 && Math.abs(currentPlaybackSpeed - 1.0f) < 0.001f || - a == 2 && Math.abs(currentPlaybackSpeed - 1.5f) < 0.001f || - a == 3 && Math.abs(currentPlaybackSpeed - 1.8f) < 0.001f) { - speedItems[a].setColors(getThemedColor(Theme.key_inappPlayerPlayPause), getThemedColor(Theme.key_inappPlayerPlayPause)); + if (isFinal && equals(currentPlaybackSpeed, speeds[a])) { + speedItems[a].setColors(getThemedColor(Theme.key_featuredStickers_addButtonPressed), getThemedColor(Theme.key_featuredStickers_addButtonPressed)); } else { - speedItems[a].setColors(getThemedColor(Theme.key_actionBarDefaultSubmenuItem), getThemedColor(Theme.key_actionBarDefaultSubmenuItemIcon)); + speedItems[a].setColors(getThemedColor(Theme.key_actionBarDefaultSubmenuItem), getThemedColor(Theme.key_actionBarDefaultSubmenuItem)); + } + } + } + + public void updateColors() { + if (playbackSpeedButton != null) { + float currentPlaybackSpeed = MediaController.getInstance().getPlaybackSpeed(true); + final int color = getThemedColor(!equals(currentPlaybackSpeed, 1.0f) ? Theme.key_featuredStickers_addButtonPressed : Theme.key_inappPlayerClose); + if (speedIcon != null) { + speedIcon.setColor(color); + } + if (Build.VERSION.SDK_INT >= 21) { + playbackSpeedButton.setBackground(Theme.createSelectorDrawable(color & 0x19ffffff, 1, AndroidUtilities.dp(14))); } } } @@ -1396,11 +1464,11 @@ private void onSubItemClick(int id) { } Bundle args = new Bundle(); args.putBoolean("onlySelect", true); - args.putInt("dialogsType", 3); + args.putInt("dialogsType", DialogsActivity.DIALOGS_TYPE_FORWARD); DialogsActivity fragment = new DialogsActivity(args); final ArrayList fmessages = new ArrayList<>(); fmessages.add(messageObject); - fragment.setDelegate((fragment1, dids, message, param) -> { + fragment.setDelegate((fragment1, dids, message, param, topicsFragment) -> { if (dids.size() > 1 || dids.get(0) .dialogId== UserConfig.getInstance(currentAccount).getClientUserId() || message != null) { for (int a = 0; a < dids.size(); a++) { long did = dids.get(a).dialogId; @@ -1429,6 +1497,7 @@ private void onSubItemClick(int id) { fragment1.finishFragment(); } } + return true; }); parentActivity.presentFragment(fragment); dismiss(); @@ -1622,6 +1691,8 @@ public void didReceivedNotification(int id, int account, Object... args) { if (messageObject != null && (messageObject.isMusic() || messageObject.isVoice())) { updateProgress(messageObject); } + } else if (id == NotificationCenter.messagePlayingSpeedChanged) { + updatePlaybackButton(true); } else if (id == NotificationCenter.musicDidLoad) { playlist = MediaController.getInstance().getPlaylist(); listAdapter.notifyDataSetChanged(); @@ -1755,6 +1826,7 @@ public void dismiss() { NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.fileLoadProgressChanged); NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.musicDidLoad); NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.moreMusicDidLoad); + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.messagePlayingSpeedChanged); DownloadController.getInstance(currentAccount).removeLoadingFileObserver(this); } @@ -2244,7 +2316,7 @@ public ArrayList getThemeDescriptions() { optionsButton.redrawPopup(getThemedColor(Theme.key_actionBarDefaultSubmenuBackground)); }; - themeDescriptions.add(new ThemeDescription(actionBar, ThemeDescription.FLAG_BACKGROUND, null, null, null, null, Theme.key_player_actionBar)); + themeDescriptions.add(new ThemeDescription(actionBar, ThemeDescription.FLAG_BACKGROUND, null, null, null, null, Theme.key_dialogBackground)); themeDescriptions.add(new ThemeDescription(actionBar, ThemeDescription.FLAG_AB_ITEMSCOLOR, null, null, null, delegate, Theme.key_player_actionBarTitle)); themeDescriptions.add(new ThemeDescription(actionBar, ThemeDescription.FLAG_AB_TITLECOLOR, null, null, null, null, Theme.key_player_actionBarTitle)); themeDescriptions.add(new ThemeDescription(actionBar, ThemeDescription.FLAG_AB_SUBTITLECOLOR, null, null, null, null, Theme.key_player_actionBarTitle)); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/AudioVisualizerDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/AudioVisualizerDrawable.java index fe6b32ff88..1ffb415b19 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/AudioVisualizerDrawable.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/AudioVisualizerDrawable.java @@ -5,6 +5,7 @@ import android.view.View; import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.LiteMode; import org.telegram.messenger.SharedConfig; import org.telegram.ui.ActionBar.Theme; @@ -52,7 +53,7 @@ public AudioVisualizerDrawable() { public void setWaveform(boolean playing, boolean animate, float[] waveform) { - if (SharedConfig.getLiteMode().enabled()) { + if (!LiteMode.isEnabled(LiteMode.FLAG_CHAT_BACKGROUND)) { return; } if (!playing && !animate) { @@ -114,7 +115,7 @@ public void setWaveform(boolean playing, boolean animate, float[] waveform) { float rotation; public void draw(Canvas canvas, float cx, float cy, boolean outOwner, float alpha, Theme.ResourcesProvider resourcesProvider) { - if (SharedConfig.getLiteMode().enabled()) { + if (!LiteMode.isEnabled(LiteMode.FLAG_CHAT_BACKGROUND)) { return; } if (outOwner) { @@ -128,7 +129,7 @@ public void draw(Canvas canvas, float cx, float cy, boolean outOwner, float alph } public void draw(Canvas canvas, float cx, float cy, boolean outOwner, Theme.ResourcesProvider resourcesProvider) { - if (SharedConfig.getLiteMode().enabled()) { + if (!LiteMode.isEnabled(LiteMode.FLAG_CHAT_BACKGROUND)) { return; } if (outOwner) { @@ -142,7 +143,7 @@ public void draw(Canvas canvas, float cx, float cy, boolean outOwner, Theme.Reso } public void draw(Canvas canvas, float cx, float cy) { - if (SharedConfig.getLiteMode().enabled()) { + if (!LiteMode.isEnabled(LiteMode.FLAG_CHAT_BACKGROUND)) { return; } for (int i = 0; i < 8; i++) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/AutoDeletePopupWrapper.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/AutoDeletePopupWrapper.java index 0c3ad1450b..0409725fa8 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/AutoDeletePopupWrapper.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/AutoDeletePopupWrapper.java @@ -72,7 +72,7 @@ public AutoDeletePopupWrapper(Context context, PopupSwipeBackLayout swipeBackLay callback.setAutoDeleteHistory(0, UndoView.ACTION_AUTO_DELETE_OFF); }); if (type != TYPE_GROUP_CREATE) { - disableItem.setColors(Theme.getColor(Theme.key_dialogTextRed2), Theme.getColor(Theme.key_dialogTextRed2)); + disableItem.setColors(Theme.getColor(Theme.key_dialogTextRed), Theme.getColor(Theme.key_dialogTextRed)); } if (type != TYPE_GROUP_CREATE) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/AvatarConstructorFragment.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/AvatarConstructorFragment.java new file mode 100644 index 0000000000..56737fea88 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/AvatarConstructorFragment.java @@ -0,0 +1,1441 @@ +package org.telegram.ui.Components; + +import static org.telegram.ui.Components.ImageUpdater.FOR_TYPE_CHANNEL; +import static org.telegram.ui.Components.ImageUpdater.FOR_TYPE_GROUP; +import static org.telegram.ui.Components.ImageUpdater.TYPE_SUGGEST_PHOTO_FOR_USER; + +import android.animation.Animator; +import android.animation.AnimatorListenerAdapter; +import android.animation.ValueAnimator; +import android.content.Context; +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.Paint; +import android.graphics.Path; +import android.graphics.PorterDuff; +import android.graphics.PorterDuffColorFilter; +import android.graphics.Rect; +import android.graphics.drawable.BitmapDrawable; +import android.graphics.drawable.Drawable; +import android.os.Build; +import android.util.TypedValue; +import android.view.Gravity; +import android.view.MotionEvent; +import android.view.View; +import android.view.ViewGroup; +import android.widget.FrameLayout; +import android.widget.LinearLayout; +import android.widget.TextView; + +import androidx.annotation.NonNull; +import androidx.core.content.ContextCompat; +import androidx.core.graphics.ColorUtils; +import androidx.core.view.NestedScrollingParent; +import androidx.core.view.NestedScrollingParentHelper; +import androidx.recyclerview.widget.LinearLayoutManager; + +import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.DocumentObject; +import org.telegram.messenger.ImageLocation; +import org.telegram.messenger.ImageReceiver; +import org.telegram.messenger.LocaleController; +import org.telegram.messenger.MediaDataController; +import org.telegram.messenger.NotificationCenter; +import org.telegram.messenger.R; +import org.telegram.messenger.SvgHelper; +import org.telegram.messenger.Utilities; +import org.telegram.tgnet.TLRPC; +import org.telegram.ui.ActionBar.ActionBar; +import org.telegram.ui.ActionBar.ActionBarMenu; +import org.telegram.ui.ActionBar.ActionBarMenuItem; +import org.telegram.ui.ActionBar.AdjustPanLayoutHelper; +import org.telegram.ui.ActionBar.AlertDialog; +import org.telegram.ui.ActionBar.BackDrawable; +import org.telegram.ui.ActionBar.BaseFragment; +import org.telegram.ui.ActionBar.BottomSheet; +import org.telegram.ui.ActionBar.Theme; +import org.telegram.ui.SelectAnimatedEmojiDialog; + +import java.util.ArrayList; +import java.util.Objects; + +public class AvatarConstructorFragment extends BaseFragment { + + public final static float STICKER_DEFAULT_SCALE = 0.7f; + public final static float STICKER_DEFAULT_ROUND_RADIUS = 0.13f; + PreviewView previewView; + private SelectAnimatedEmojiDialog selectAnimatedEmojiDialog; + + int collapsedHeight; + int expandedHeight; + View colorPickerPreviewView; + boolean colorPickerInAnimatoin; + boolean drawForBlur; + boolean wasChanged; + LinearLayout linearLayout; + + boolean forGroup; + private FrameLayout button; + + float progressToExpand; + boolean expandWithKeyboard; + ValueAnimator expandAnimator; + + protected ActionBar overlayActionBar; + + Delegate delegate; + private BackgroundSelectView backgroundSelectView; + CanvasButton avatarClickableArea; + boolean keyboardVisible; + + ValueAnimator keyboardVisibilityAnimator; + float keyboardVisibleProgress; + Paint actionBarPaint = new Paint(); + private int gradientBackgroundItemWidth; + + public static final int[][] defaultColors = new int[][]{ + new int[]{0xFF4D8DFF, 0xFF2BBFFF, 0xFF20E2CD, 0xFF0EE1F1}, + new int[]{0xFF5EB6FB, 0xFF1FCEEB, 0xFF45F7B7, 0xFF1FF1D9}, + new int[]{0xFF09D260, 0xFF5EDC40, 0xFFC1E526, 0xFF80DF2B}, + new int[]{0xFFF5694E, 0xFFF5772C, 0xFFFFD412, 0xFFFFA743}, + new int[]{0xFFF64884, 0xFFEF5B41, 0xFFF6A730, 0xFFFF7742}, + new int[]{0xFFF94BA0, 0xFFFB5C80, 0xFFFFB23A, 0xFFFE7E62}, + new int[]{0xFF837CFF, 0xFFB063FF, 0xFFFF72A9, 0xFFE269FF} + }; + public boolean finishOnDone = true; + private ActionBarMenuItem setPhotoItem; + private BottomSheet bottomSheet; + final ImageUpdater.AvatarFor avatarFor; + boolean isLandscapeMode; + private TextView chooseEmojiHint; + private TextView chooseBackgroundHint; + ImageUpdater imageUpdater; + + public AvatarConstructorFragment(ImageUpdater imageUpdater, ImageUpdater.AvatarFor avatarFor) { + this.imageUpdater = imageUpdater; + this.avatarFor = avatarFor; + } + + @Override + public View createView(Context context) { + hasOwnBackground = true; + actionBar.setBackgroundDrawable(null); + actionBar.setCastShadows(false); + actionBar.setAddToContainer(false); + actionBar.setOccupyStatusBar(true); + actionBar.setTitleColor(Theme.getColor(Theme.key_windowBackgroundWhiteBlackText)); + actionBar.setItemsColor(Theme.getColor(Theme.key_windowBackgroundWhiteBlackText), false); + actionBar.setItemsBackgroundColor(Theme.getColor(Theme.key_listSelector), false); + actionBar.setBackButtonDrawable(new BackDrawable(false)); + actionBar.setAllowOverlayTitle(false); + actionBar.setTitle(LocaleController.getString("PhotoEditor", R.string.PhotoEditor)); + actionBar.setActionBarMenuOnItemClick(new ActionBar.ActionBarMenuOnItemClick() { + @Override + public void onItemClick(int id) { + if (id == -1) { + discardEditor(); + } + } + }); + actionBar.getTitleTextView().setAlpha(0); + + overlayActionBar = new ActionBar(getContext()); + overlayActionBar.setCastShadows(false); + overlayActionBar.setAddToContainer(false); + overlayActionBar.setOccupyStatusBar(true); + overlayActionBar.setClipChildren(false); + int selectorColor = ColorUtils.setAlphaComponent(Color.WHITE, 60); + overlayActionBar.setItemsColor(Color.WHITE, false); + + overlayActionBar.setBackButtonDrawable(new BackDrawable(false)); + overlayActionBar.setAllowOverlayTitle(false); + overlayActionBar.setItemsBackgroundColor(selectorColor, false); + ActionBarMenu menuOverlay = overlayActionBar.createMenu(); + menuOverlay.setClipChildren(false); + setPhotoItem = menuOverlay.addItem(1, avatarFor != null && avatarFor.type == TYPE_SUGGEST_PHOTO_FOR_USER ? + LocaleController.getString("SuggestPhoto", R.string.SuggestPhoto) : + LocaleController.getString("SetPhoto", R.string.SetPhoto) + ); + setPhotoItem.setBackground(Theme.createSelectorDrawable(selectorColor, Theme.RIPPLE_MASK_CIRCLE_TO_BOUND_EDGE)); + overlayActionBar.setActionBarMenuOnItemClick(new ActionBar.ActionBarMenuOnItemClick() { + @Override + public void onItemClick(int id) { + if (id == -1) { + discardEditor(); + } + if (id == 1) { + onDonePressed(); + } + } + }); + + linearLayout = new LinearLayout(getContext()) { + @Override + protected boolean drawChild(Canvas canvas, View child, long drawingTime) { + if (child == previewView) { + return true; + } + return super.drawChild(canvas, child, drawingTime); + } + }; + + ContainerLayout nestedSizeNotifierLayout = new ContainerLayout(context) { + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + measureKeyboardHeight(); + boolean isLandscapeModeLocal = MeasureSpec.getSize(widthMeasureSpec) > (MeasureSpec.getSize(heightMeasureSpec) + keyboardHeight); + if (isLandscapeModeLocal != isLandscapeMode) { + isLandscapeMode = isLandscapeModeLocal; + AndroidUtilities.removeFromParent(previewView); + AndroidUtilities.requestAdjustNothing(getParentActivity(), getClassGuid()); + if (isLandscapeMode) { + setProgressToExpand(0, false); + previewView.setExpanded(false); + addView(previewView, 0, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); + } else { + linearLayout.addView(previewView, 0, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT)); + } + AndroidUtilities.requestAdjustResize(getParentActivity(), getClassGuid()); + } + if (isLandscapeMode) { + int avatarWidth = (int) (MeasureSpec.getSize(widthMeasureSpec) * 0.45f); + int contentWidth = (int) (MeasureSpec.getSize(widthMeasureSpec) * 0.55f); + ((MarginLayoutParams) linearLayout.getLayoutParams()).bottomMargin = 0; + ((MarginLayoutParams) linearLayout.getLayoutParams()).leftMargin = avatarWidth; + ((MarginLayoutParams) previewView.getLayoutParams()).rightMargin = contentWidth; + ((MarginLayoutParams) button.getLayoutParams()).rightMargin = contentWidth + AndroidUtilities.dp(16); + ((MarginLayoutParams) chooseBackgroundHint.getLayoutParams()).topMargin = 0; + ((MarginLayoutParams) chooseEmojiHint.getLayoutParams()).topMargin = AndroidUtilities.dp(10); + } else { + ((MarginLayoutParams) linearLayout.getLayoutParams()).bottomMargin = AndroidUtilities.dp(64); + ((MarginLayoutParams) linearLayout.getLayoutParams()).leftMargin = 0; + ((MarginLayoutParams) previewView.getLayoutParams()).rightMargin = 0; + ((MarginLayoutParams) button.getLayoutParams()).rightMargin = AndroidUtilities.dp(16); + ((MarginLayoutParams) chooseBackgroundHint.getLayoutParams()).topMargin = AndroidUtilities.dp(10); + ((MarginLayoutParams) chooseEmojiHint.getLayoutParams()).topMargin = AndroidUtilities.dp(18); + } + boolean oldKeyboardVisible = keyboardVisible; + keyboardVisible = keyboardHeight >= AndroidUtilities.dp(20); + + if (oldKeyboardVisible != keyboardVisible) { + int newMargin; + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + if (keyboardVisible) { + newMargin = -selectAnimatedEmojiDialog.getTop() + actionBar.getMeasuredHeight() + AndroidUtilities.dp(8); + } else { + newMargin = 0; + } + linearLayout.setTranslationY(linearLayout.getTranslationY() + ((MarginLayoutParams) linearLayout.getLayoutParams()).topMargin - newMargin); + ((MarginLayoutParams) linearLayout.getLayoutParams()).topMargin = newMargin; + createKeyboardVisibleAnimator(keyboardVisible); + } + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + + collapsedHeight = previewView.getMeasuredHeight(); + expandedHeight = previewView.getMeasuredWidth(); + } + + @Override + protected boolean drawChild(Canvas canvas, View child, long drawingTime) { + if (child == overlayActionBar) { + return true; + } + if (child == actionBar && keyboardVisibleProgress > 0) { + actionBarPaint.setColor(Theme.getColor(Theme.key_windowBackgroundWhite)); + actionBarPaint.setAlpha((int) (255 * keyboardVisibleProgress)); + canvas.drawRect(0, 0, child.getMeasuredWidth(), child.getMeasuredHeight(), actionBarPaint); + getParentLayout().drawHeaderShadow(canvas, (int) (255 * keyboardVisibleProgress), child.getMeasuredHeight()); + } + return super.drawChild(canvas, child, drawingTime); + } + + @Override + protected void dispatchDraw(Canvas canvas) { + int count = canvas.save(); + super.dispatchDraw(canvas); + if (!isLandscapeMode) { + if (!drawForBlur) { + canvas.save(); + float x = linearLayout.getX() + previewView.getX(); + float y = linearLayout.getY() + previewView.getY(); + int additionalH = expandedHeight - collapsedHeight; + int yKeyboardVisible = AndroidUtilities.statusBarHeight + ((ActionBar.getCurrentActionBarHeight() - collapsedHeight) >> 1); + y = AndroidUtilities.lerp(y, yKeyboardVisible, keyboardVisibleProgress); + canvas.translate(x, y); + previewView.draw(canvas); + AndroidUtilities.rectTmp.set(x, y - additionalH / 2f * progressToExpand, x + previewView.getMeasuredWidth(), y + previewView.getMeasuredHeight() + additionalH / 2f * progressToExpand); + float cx = x + previewView.cx; + float cy = y + previewView.cy; + avatarClickableArea.setRect((int) (cx - previewView.size), (int) (cy - previewView.size), (int) (cx + previewView.size), (int) (cy + previewView.size)); + canvas.restore(); + } + canvas.restoreToCount(count); + + + float alpha = previewView.expandProgress.get() * (1f - (colorPickerPreviewView.getVisibility() == View.VISIBLE ? colorPickerPreviewView.getAlpha() : 0)); + if (alpha != 0) { + overlayActionBar.setVisibility(View.VISIBLE); + count = canvas.save(); + canvas.translate(overlayActionBar.getX(), overlayActionBar.getY()); + + if (alpha != 1) { + canvas.saveLayerAlpha(0, 0, overlayActionBar.getMeasuredWidth(), overlayActionBar.getMeasuredHeight(), (int) (255 * alpha), Canvas.ALL_SAVE_FLAG); + } + overlayActionBar.draw(canvas); + canvas.restoreToCount(count); + } else { + overlayActionBar.setVisibility(View.GONE); + } + } + if (colorPickerInAnimatoin) { + invalidate(); + } + } + + @Override + public boolean onInterceptTouchEvent(MotionEvent ev) { + if (keyboardVisibleProgress == 0 && AndroidUtilities.findClickableView(this, ev.getX(), ev.getY())) { + return false; + } + return onTouchEvent(ev); + } + + boolean maybeScroll; + boolean isScrolling; + float startFromProgressToExpand; + float scrollFromX, scrollFromY; + + @Override + public boolean onTouchEvent(MotionEvent event) { + if (avatarClickableArea.checkTouchEvent(event)) { + return true; + } + + if (!isLandscapeMode) { + if (event.getAction() == MotionEvent.ACTION_DOWN) { + selectAnimatedEmojiDialog.getHitRect(AndroidUtilities.rectTmp2); + AndroidUtilities.rectTmp2.offset(0, (int) linearLayout.getY()); + if (keyboardVisibleProgress == 0 && !AndroidUtilities.rectTmp2.contains((int) event.getX(), (int) event.getY())) { + maybeScroll = true; + scrollFromX = event.getX(); + scrollFromY = event.getY(); + } + } else if (event.getAction() == MotionEvent.ACTION_MOVE && (maybeScroll || isScrolling)) { + if (maybeScroll) { + if (Math.abs(scrollFromY - event.getY()) > AndroidUtilities.touchSlop) { + maybeScroll = false; + isScrolling = true; + startFromProgressToExpand = progressToExpand; + scrollFromX = event.getX(); + scrollFromY = event.getY(); + } + } else { + float dy = scrollFromY - event.getY(); + float progressToExpand = startFromProgressToExpand + (-dy / (float) expandedHeight); + progressToExpand = Utilities.clamp(progressToExpand, 1f, 0f); + setProgressToExpand(progressToExpand, true); + } + } else if (event.getAction() == MotionEvent.ACTION_UP || event.getAction() == MotionEvent.ACTION_CANCEL) { + if (isScrolling) { + setExpanded(progressToExpand > 0.5f, false, false); + } + maybeScroll = false; + isScrolling = false; + } + } + return isScrolling || super.onTouchEvent(event) || maybeScroll; + } + }; + nestedSizeNotifierLayout.setFitsSystemWindows(true); + nestedSizeNotifierLayout.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundGray)); + + + linearLayout.setClipChildren(false); + linearLayout.setClipToPadding(false); + linearLayout.setPadding(0, AndroidUtilities.statusBarHeight, 0, 0); + linearLayout.setOrientation(LinearLayout.VERTICAL); + linearLayout.addView(previewView = new PreviewView(getContext()) { + @Override + public void invalidate() { + super.invalidate(); + nestedSizeNotifierLayout.invalidate(); + } + }); + + chooseBackgroundHint = new TextView(getContext()); + chooseBackgroundHint.setText(LocaleController.getString("ChooseBackground", R.string.ChooseBackground)); + chooseBackgroundHint.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteGrayText)); + chooseBackgroundHint.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + chooseBackgroundHint.setGravity(Gravity.CENTER); + linearLayout.addView(chooseBackgroundHint, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, 0, 21, 10, 21, 10)); + + FrameLayout backgroundContainer = new FrameLayout(getContext()) { + + private Path path = new Path(); + private Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG); + + @Override + protected void dispatchDraw(Canvas canvas) { + Theme.applyDefaultShadow(paint); + paint.setColor(Theme.getColor(Theme.key_actionBarDefaultSubmenuBackground, getResourceProvider())); + paint.setAlpha((int) (255 * getAlpha())); + + AndroidUtilities.rectTmp.set( + 0, 0, getMeasuredWidth(), getMeasuredHeight() + ); + path.rewind(); + path.addRoundRect(AndroidUtilities.rectTmp, AndroidUtilities.dp(12), AndroidUtilities.dp(12), Path.Direction.CW); + canvas.drawPath(path, paint); + super.dispatchDraw(canvas); + } + }; + backgroundContainer.addView(backgroundSelectView = new BackgroundSelectView(getContext())); + linearLayout.addView(backgroundContainer, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, 48, 0, 12, 0, 12, 0)); + + chooseEmojiHint = new TextView(getContext()); + chooseEmojiHint.setText(LocaleController.getString("ChooseEmojiOrSticker", R.string.ChooseEmojiOrSticker)); + chooseEmojiHint.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteGrayText)); + chooseEmojiHint.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + chooseEmojiHint.setGravity(Gravity.CENTER); + linearLayout.addView(chooseEmojiHint, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, 0, 21, 18, 21, 10)); + + selectAnimatedEmojiDialog = new SelectAnimatedEmojiDialog(this, getContext(), false, null, SelectAnimatedEmojiDialog.TYPE_AVATAR_CONSTRUCTOR, null) { + + private boolean firstLayout = true; + + @Override + protected void onLayout(boolean changed, int left, int top, int right, int bottom) { + super.onLayout(changed, left, top, right, bottom); + if (firstLayout) { + firstLayout = false; + selectAnimatedEmojiDialog.onShow(null); + } + } + + protected void onEmojiSelected(View view, Long documentId, TLRPC.Document document, Integer until) { + long docId = documentId == null ? 0 : documentId; + setPreview(docId, document); + } + }; + selectAnimatedEmojiDialog.forUser = !forGroup; + + selectAnimatedEmojiDialog.setAnimationsEnabled(fragmentBeginToShow); + selectAnimatedEmojiDialog.setClipChildren(false); + linearLayout.addView(selectAnimatedEmojiDialog, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, 0, 12, 0, 12, 12)); + + linearLayout.setClipChildren(false); + nestedSizeNotifierLayout.addView(linearLayout, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, 0, 0, 0, 0, 64)); + + colorPickerPreviewView = new View(getContext()); + colorPickerPreviewView.setVisibility(View.GONE); + + + button = new FrameLayout(getContext()); + button.setBackground(Theme.AdaptiveRipple.filledRect(Theme.key_featuredStickers_addButton, 8)); + + TextView textView = new TextView(getContext()); + textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + if (imageUpdater.setForType == FOR_TYPE_CHANNEL) { + textView.setText(LocaleController.getString("SetChannelPhoto", R.string.SetChannelPhoto)); + } else if (imageUpdater.setForType == FOR_TYPE_GROUP) { + textView.setText(LocaleController.getString("SetGroupPhoto", R.string.SetGroupPhoto)); + } else if (avatarFor != null && avatarFor.type == TYPE_SUGGEST_PHOTO_FOR_USER) { + textView.setText(LocaleController.getString("SuggestPhoto", R.string.SuggestPhoto)); + } else { + textView.setText(LocaleController.getString("SetProfilePhotoAvatarConstructor", R.string.SetProfilePhotoAvatarConstructor)); + } + textView.setGravity(Gravity.CENTER); + textView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + textView.setTextColor(Theme.getColor(Theme.key_featuredStickers_buttonText)); + button.addView(textView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER)); + button.setOnClickListener(v -> onDonePressed()); + + nestedSizeNotifierLayout.addView(button, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 48, Gravity.BOTTOM, 16, 16, 16, 16)); + nestedSizeNotifierLayout.addView(actionBar); + + nestedSizeNotifierLayout.addView(overlayActionBar); + nestedSizeNotifierLayout.addView(colorPickerPreviewView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); + + avatarClickableArea = new CanvasButton(nestedSizeNotifierLayout); + avatarClickableArea.setDelegate(() -> { + onPreviewClick(); + }); + fragmentView = nestedSizeNotifierLayout; + return fragmentView; + } + + private void setPreview(long docId, TLRPC.Document document) { + previewView.documentId = docId; + previewView.document = document; + if (docId == 0) { + previewView.backupImageView.setAnimatedEmojiDrawable(null); + SvgHelper.SvgDrawable svgThumb = DocumentObject.getSvgThumb(document, Theme.key_windowBackgroundWhiteGrayIcon, 0.2f); + previewView.backupImageView.getImageReceiver().setImage(ImageLocation.getForDocument(document), "100_100", null, null, svgThumb, 0, "tgs", document, 0); + } else { + previewView.backupImageView.setAnimatedEmojiDrawable(new AnimatedEmojiDrawable(AnimatedEmojiDrawable.CACHE_TYPE_AVATAR_CONSTRUCTOR_PREVIEW, currentAccount, docId)); + previewView.backupImageView.getImageReceiver().clearImage(); + } + if (previewView.getImageReceiver() != null && previewView.getImageReceiver().getAnimation() != null) { + previewView.getImageReceiver().getAnimation().seekTo(0, true); + } + if (previewView.getImageReceiver() != null && previewView.getImageReceiver().getLottieAnimation() != null) { + previewView.getImageReceiver().getLottieAnimation().setCurrentFrame(0, false, true); + } + wasChanged = true; + } + + private void discardEditor() { + if (getParentActivity() == null) { + return; + } + if (wasChanged) { + AlertDialog.Builder builder = new AlertDialog.Builder(getParentActivity()); + builder.setMessage(LocaleController.getString("PhotoEditorDiscardAlert", R.string.PhotoEditorDiscardAlert)); + builder.setTitle(LocaleController.getString("DiscardChanges", R.string.DiscardChanges)); + builder.setPositiveButton(LocaleController.getString("PassportDiscard", R.string.PassportDiscard), (dialogInterface, i) -> finishFragment()); + builder.setNegativeButton(LocaleController.getString("Cancel", R.string.Cancel), null); + AlertDialog dialog = builder.create(); + showDialog(dialog); + dialog.redPositive(); + } else { + finishFragment(); + } + } + + private void createKeyboardVisibleAnimator(boolean keyboardVisible) { + if (isLandscapeMode) { + return; + } + keyboardVisibilityAnimator = ValueAnimator.ofFloat(keyboardVisibleProgress, keyboardVisible ? 1f : 0f); + float offsetY = (expandedHeight - collapsedHeight - AndroidUtilities.statusBarHeight) * progressToExpand; + float translationYFrom, translationYTo; + if (keyboardVisible) { + previewView.setExpanded(false); + translationYFrom = linearLayout.getTranslationY(); + translationYTo = 0; + } else { + translationYFrom = offsetY; + translationYTo = linearLayout.getTranslationY(); + } + if (expandWithKeyboard && !keyboardVisible) { + previewView.setExpanded(true); + } else { + expandWithKeyboard = false; + } + keyboardVisibilityAnimator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() { + @Override + public void onAnimationUpdate(ValueAnimator animation) { + keyboardVisibleProgress = (float) animation.getAnimatedValue(); + float offset = AndroidUtilities.lerp(translationYFrom, translationYTo, keyboardVisibleProgress); + actionBar.getTitleTextView().setAlpha(keyboardVisibleProgress); + if (expandWithKeyboard && !keyboardVisible) { + setProgressToExpand(1f - keyboardVisibleProgress, false); + } + linearLayout.setTranslationY(offset); + button.setTranslationY(offset); + fragmentView.invalidate(); + actionBar.invalidate(); + } + }); + keyboardVisibilityAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + setProgressToExpand(expandWithKeyboard ? 1f : 0f, false); + expandWithKeyboard = false; + } + }); + keyboardVisibilityAnimator.setDuration(AdjustPanLayoutHelper.keyboardDuration); + keyboardVisibilityAnimator.setInterpolator(AdjustPanLayoutHelper.keyboardInterpolator); + keyboardVisibilityAnimator.start(); + } + + private void onDonePressed() { + if (previewView.getImageReceiver() == null || !previewView.getImageReceiver().hasImageLoaded()) { + return; + } + if (delegate != null) { + delegate.onDone(previewView.backgroundGradient, previewView.documentId, previewView.document, previewView); + } + if (finishOnDone) { + finishFragment(); + } + } + + + private void setExpanded(boolean expanded, boolean fromClick, boolean withColorPicker) { + if (isLandscapeMode) { + return; + } +// if (this.expanded != expanded) { +// this.expanded = expanded; + cancelExpandAnimator(); + expandAnimator = ValueAnimator.ofFloat(progressToExpand, expanded ? 1f : 0f); + if (fromClick) { + previewView.overrideExpandProgress = progressToExpand; + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.needCheckSystemBarColors); + } + } + expandAnimator.addUpdateListener(animation -> { + float progress = (float) animation.getAnimatedValue(); + setProgressToExpand(progress, false); + if (fromClick) { + previewView.overrideExpandProgress = progress; + previewView.invalidate(); + } + }); + expandAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + expandAnimator = null; + setProgressToExpand(expanded ? 1f : 0f, false); + if (fromClick) { + previewView.overrideExpandProgress = -1; + previewView.setExpanded(expanded); + } + + } + }); + if (withColorPicker) { + expandAnimator.setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); + expandAnimator.setDuration(350); + expandAnimator.setStartDelay(150); + } else { + expandAnimator.setInterpolator(CubicBezierInterpolator.DEFAULT); + expandAnimator.setDuration(250); + } + expandAnimator.start(); + // } + } + + void cancelExpandAnimator() { + if (expandAnimator != null) { + expandAnimator.removeAllListeners(); + expandAnimator.cancel(); + expandAnimator = null; + } + } + + private void setProgressToExpand(float progressToExpand, boolean fromScroll) { + this.progressToExpand = progressToExpand; + + float offsetY = (expandedHeight - collapsedHeight - AndroidUtilities.statusBarHeight) * progressToExpand; + if (keyboardVisibleProgress == 0) { + linearLayout.setTranslationY(offsetY); + button.setTranslationY(offsetY); + } + previewView.setTranslationY(-(expandedHeight - collapsedHeight) / 2f * progressToExpand); + fragmentView.invalidate(); + if (fromScroll) { + previewView.setExpanded(progressToExpand > 0.5f); + } + } + + public void startFrom(AvatarConstructorPreviewCell previewCell) { + BackgroundGradient gradient = previewCell.getBackgroundGradient(); + if (previewView == null) { + return; + } + previewView.setGradient(gradient); + if (previewCell.getAnimatedEmoji() != null) { + long docId = previewCell.getAnimatedEmoji().getDocumentId(); + previewView.documentId = docId; + previewView.backupImageView.setAnimatedEmojiDrawable(new AnimatedEmojiDrawable(AnimatedEmojiDrawable.CACHE_TYPE_AVATAR_CONSTRUCTOR_PREVIEW, currentAccount, docId)); + } + backgroundSelectView.selectGradient(gradient); + selectAnimatedEmojiDialog.setForUser(previewCell.forUser); + } + + public void startFrom(TLRPC.VideoSize emojiMarkup) { + BackgroundGradient gradient = new BackgroundGradient(); + gradient.color1 = ColorUtils.setAlphaComponent(emojiMarkup.background_colors.get(0), 255); + gradient.color2 = emojiMarkup.background_colors.size() > 1 ? ColorUtils.setAlphaComponent(emojiMarkup.background_colors.get(1), 255) : 0; + gradient.color3 = emojiMarkup.background_colors.size() > 2 ? ColorUtils.setAlphaComponent(emojiMarkup.background_colors.get(2), 255) : 0; + gradient.color4 = emojiMarkup.background_colors.size() > 3 ? ColorUtils.setAlphaComponent(emojiMarkup.background_colors.get(3), 255) : 0; + previewView.setGradient(gradient); + + + if (emojiMarkup instanceof TLRPC.TL_videoSizeEmojiMarkup) { + setPreview(((TLRPC.TL_videoSizeEmojiMarkup) emojiMarkup).emoji_id, null); + } else { + TLRPC.TL_videoSizeStickerMarkup stickerMarkup = new TLRPC.TL_videoSizeStickerMarkup(); + TLRPC.TL_messages_stickerSet set = MediaDataController.getInstance(currentAccount).getStickerSet(stickerMarkup.stickerset, false); + TLRPC.Document document = null; + if (set != null) { + for (int i = 0; i < set.documents.size(); i++) { + if (set.documents.get(i).id == stickerMarkup.sticker_id) { + document = set.documents.get(i); + } + } + } + setPreview(0, document); + } + backgroundSelectView.selectGradient(gradient); + selectAnimatedEmojiDialog.setForUser(true); + } + + public class PreviewView extends FrameLayout { + + public long documentId; + public TLRPC.Document document; + BackupImageView backupImageView; + GradientTools gradientTools = new GradientTools(); + GradientTools outGradientTools = new GradientTools(); + float changeBackgroundProgress = 1f; + BackgroundGradient backgroundGradient; + + AnimatedFloat expandProgress = new AnimatedFloat(this, 200, CubicBezierInterpolator.EASE_OUT); + boolean expanded; + float overrideExpandProgress = -1f; + private float size; + private float cx, cy; + + public PreviewView(Context context) { + super(context); + backupImageView = new BackupImageView(context) { + @Override + public void invalidate() { + super.invalidate(); + PreviewView.this.invalidate(); + } + + @Override + public void invalidate(Rect dirty) { + super.invalidate(dirty); + PreviewView.this.invalidate(); + } + + @Override + public void invalidate(int l, int t, int r, int b) { + super.invalidate(l, t, r, b); + PreviewView.this.invalidate(); + } + }; + backupImageView.getImageReceiver().setAutoRepeatCount(1); + backupImageView.getImageReceiver().setAspectFit(true); + setClipChildren(false); + addView(backupImageView, LayoutHelper.createFrame(70, 70, Gravity.CENTER)); + } + + public void setExpanded(boolean expanded) { + if (this.expanded == expanded) { + return; + } + this.expanded = expanded; + if (expanded) { + if (backupImageView.animatedEmojiDrawable != null && backupImageView.animatedEmojiDrawable.getImageReceiver() != null) { + backupImageView.animatedEmojiDrawable.getImageReceiver().startAnimation(); + } + backupImageView.imageReceiver.startAnimation(); + } + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.needCheckSystemBarColors); + } + invalidate(); + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + if (isLandscapeMode) { + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + } else { + super.onMeasure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(140), MeasureSpec.EXACTLY)); + } + } + + @Override + protected void dispatchDraw(Canvas canvas) { + cx = getMeasuredWidth() / 2f; + cy = getMeasuredHeight() / 2f; + float radius = isLandscapeMode ? getMeasuredWidth() * 0.3f : AndroidUtilities.dp(50); + expandProgress.set(expanded ? 1f : 0f); + if (overrideExpandProgress >= 0) { + expandProgress.set(overrideExpandProgress, true); + } + size = AndroidUtilities.lerp(radius, getMeasuredWidth() / 2f, expandProgress.get()); + size = AndroidUtilities.lerp(size, AndroidUtilities.dp(21), keyboardVisibleProgress); + cx = AndroidUtilities.lerp(cx, getMeasuredWidth() - AndroidUtilities.dp(12) - AndroidUtilities.dp(21), keyboardVisibleProgress); + + + canvas.save(); + int additionalH = expandedHeight - collapsedHeight; + canvas.clipRect(0, -additionalH / 2f, getMeasuredWidth(), getMeasuredHeight() + additionalH / 2f * progressToExpand); + if (backgroundGradient != null) { + gradientTools.setColors(backgroundGradient.color1, backgroundGradient.color2, backgroundGradient.color3, backgroundGradient.color4); + gradientTools.setBounds(cx - size, cy - size, cx + size, cy + size); + if (changeBackgroundProgress != 1f) { + outGradientTools.setBounds(cx - size, cy - size, cx + size, cy + size); + outGradientTools.paint.setAlpha(255); + drawBackround(canvas, cx, cy, radius, size, outGradientTools.paint); + gradientTools.paint.setAlpha((int) (255 * changeBackgroundProgress)); + drawBackround(canvas, cx, cy, radius, size, gradientTools.paint); + changeBackgroundProgress += 16 / 250f; + if (changeBackgroundProgress > 1f) { + changeBackgroundProgress = 1f; + } + invalidate(); + } else { + gradientTools.paint.setAlpha(255); + drawBackround(canvas, cx, cy, radius, size, gradientTools.paint); + } + } + int imageHeight = isLandscapeMode ? (int) (radius * 2 * STICKER_DEFAULT_SCALE) : AndroidUtilities.dp(70); + int imageHeightExpanded = (int) (getMeasuredWidth() * STICKER_DEFAULT_SCALE); + int imageHeightKeyboardVisible = (int) (AndroidUtilities.dp(42) * STICKER_DEFAULT_SCALE); + float imageSize = AndroidUtilities.lerp(imageHeight, imageHeightExpanded, expandProgress.get()); + imageSize = AndroidUtilities.lerp(imageSize, imageHeightKeyboardVisible, keyboardVisibleProgress); + imageSize /= 2; + if (backupImageView.animatedEmojiDrawable != null) { + if (backupImageView.animatedEmojiDrawable.getImageReceiver() != null) { + backupImageView.animatedEmojiDrawable.getImageReceiver().setRoundRadius((int) (imageSize * 2 * STICKER_DEFAULT_ROUND_RADIUS)); + } + backupImageView.animatedEmojiDrawable.setBounds((int) (cx - imageSize), (int) (cy - imageSize), (int) (cx + imageSize), (int) (cy + imageSize)); + backupImageView.animatedEmojiDrawable.draw(canvas); + + } else { + backupImageView.imageReceiver.setImageCoords(cx - imageSize, cy - imageSize, imageSize * 2, imageSize * 2); + backupImageView.imageReceiver.setRoundRadius((int) (imageSize * 2 * STICKER_DEFAULT_ROUND_RADIUS)); + backupImageView.imageReceiver.draw(canvas); + } + } + + private void drawBackround(Canvas canvas, float cx, float cy, float radius, float size, Paint paint) { + float p = expandProgress.get(); + if (p == 0) { + canvas.drawCircle(cx, cy, size, paint); + } else { + float roundRadius = AndroidUtilities.lerp(radius, 0, p); + AndroidUtilities.rectTmp.set(cx - size, cy - size, cx + size, cy + size); + canvas.drawRoundRect(AndroidUtilities.rectTmp, roundRadius, roundRadius, paint); + } + } + + public void setGradient(BackgroundGradient backgroundGradient) { + if (this.backgroundGradient != null) { + outGradientTools.setColors(this.backgroundGradient.color1, this.backgroundGradient.color2, this.backgroundGradient.color3, this.backgroundGradient.color4); + changeBackgroundProgress = 0f; + wasChanged = true; + } + this.backgroundGradient = backgroundGradient; + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.needCheckSystemBarColors); + } + invalidate(); + } + + public long getDuration() { + ImageReceiver imageReceiver = backupImageView.getImageReceiver(); + if (backupImageView.animatedEmojiDrawable != null) { + imageReceiver = backupImageView.animatedEmojiDrawable.getImageReceiver(); + } + if (imageReceiver == null) { + return 5000; + } + if (imageReceiver.getLottieAnimation() != null) { + return imageReceiver.getLottieAnimation().getDuration(); + } + return 5000; + } + + public ImageReceiver getImageReceiver() { + ImageReceiver imageReceiver = backupImageView.getImageReceiver(); + if (backupImageView.animatedEmojiDrawable != null) { + imageReceiver = backupImageView.animatedEmojiDrawable.getImageReceiver(); + } + return imageReceiver; + } + + public boolean hasAnimation() { + return getImageReceiver().getAnimation() != null || getImageReceiver().getLottieAnimation() != null; + } + + @Override + public void invalidate() { + super.invalidate(); + fragmentView.invalidate(); + } + } + + private class BackgroundSelectView extends RecyclerListView { + + ArrayList gradients = new ArrayList<>(); + + int stableIdPointer = 200; + + int selectedItemId = -1; + + Adapter adapter; + BackgroundGradient customSelectedGradient; + + public BackgroundSelectView(Context context) { + super(context); + LinearLayoutManager layoutManager = new LinearLayoutManager(context); + layoutManager.setOrientation(LinearLayoutManager.HORIZONTAL); + setLayoutManager(layoutManager); + for (int i = 0; i < defaultColors.length; i++) { + BackgroundGradient backgroundGradient = new BackgroundGradient(); + backgroundGradient.stableId = stableIdPointer++; + backgroundGradient.color1 = defaultColors[i][0]; + backgroundGradient.color2 = defaultColors[i][1]; + backgroundGradient.color3 = defaultColors[i][2]; + backgroundGradient.color4 = defaultColors[i][3]; + gradients.add(backgroundGradient); + } + setOnItemClickListener((view, position) -> { + if (view instanceof GradientSelectorView && !((GradientSelectorView) view).isCustom) { + selectedItemId = ((GradientSelectorView) view).backgroundGradient.stableId; + previewView.setGradient(((GradientSelectorView) view).backgroundGradient); + if (adapter != null) { + adapter.notifyDataSetChanged(); + } + } else { + if (selectedItemId != 1 && customSelectedGradient != null) { + selectedItemId = 1; + previewView.setGradient(customSelectedGradient); + if (adapter != null) { + adapter.notifyDataSetChanged(); + } + } else { + showColorPicker(); + } + } + }); + setAdapter(adapter = new Adapter() { + + private final static int VIEW_TYPE_GRADIENT = 0; + private final static int VIEW_TYPE_ADD_CUSTOM = 1; + + @NonNull + @Override + public ViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) { + View view; + switch (viewType) { + case VIEW_TYPE_ADD_CUSTOM: + case VIEW_TYPE_GRADIENT: + default: + view = new GradientSelectorView(getContext()); + break; + } + return new Holder(view); + } + + @Override + public void onBindViewHolder(@NonNull ViewHolder holder, int position) { + if (holder.getItemViewType() == VIEW_TYPE_GRADIENT) { + GradientSelectorView gradientSelectorView = (GradientSelectorView) holder.itemView; + gradientSelectorView.setGradient(gradients.get(position)); + gradientSelectorView.setSelectedInternal(selectedItemId == gradients.get(position).stableId, true); + } else { + GradientSelectorView gradientSelectorView = (GradientSelectorView) holder.itemView; + gradientSelectorView.setCustom(true); + gradientSelectorView.setGradient(customSelectedGradient); + gradientSelectorView.setSelectedInternal(selectedItemId == 1, true); + } + } + + @Override + public int getItemCount() { + return gradients.size() + 1; + } + + @Override + public long getItemId(int position) { + if (position >= gradients.size()) { + return 1; + } + return gradients.get(position).stableId; + } + + @Override + public int getItemViewType(int position) { + if (position >= gradients.size()) { + return VIEW_TYPE_ADD_CUSTOM; + } + return VIEW_TYPE_GRADIENT; + } + }); + setOverScrollMode(OVER_SCROLL_IF_CONTENT_SCROLLS); + } + + @Override + protected void onMeasure(int widthSpec, int heightSpec) { + int availableWidth = MeasureSpec.getSize(widthSpec); + int itemsCount = adapter.getItemCount(); + gradientBackgroundItemWidth = availableWidth / itemsCount; + if (gradientBackgroundItemWidth < AndroidUtilities.dp(36)) { + gradientBackgroundItemWidth = AndroidUtilities.dp(36); + } else if (gradientBackgroundItemWidth > AndroidUtilities.dp(150)) { + gradientBackgroundItemWidth = AndroidUtilities.dp(48); + } + super.onMeasure(widthSpec, heightSpec); + } + + public void selectGradient(BackgroundGradient gradient) { + boolean isDefault = false; + for (int i = 0; i < gradients.size(); i++) { + if (gradients.get(i).equals(gradient)) { + selectedItemId = gradients.get(i).stableId; + isDefault = true; + break; + } + } + if (!isDefault) { + customSelectedGradient = gradient; + selectedItemId = 1; + } + adapter.notifyDataSetChanged(); + } + } + + BackgroundGradient colorPickerGradient; + + private void showColorPicker() { + if (bottomSheet != null) { + return; + } + if (!previewView.expanded) { + setExpanded(true, true, true); + } + + BackgroundGradient prevGradient = null; + if (previewView.backgroundGradient != null) { + prevGradient = previewView.backgroundGradient; + } + boolean[] onDoneButtonPressed = new boolean[]{false}; + BackgroundGradient finalPrevGradient = prevGradient; + AndroidUtilities.requestAdjustNothing(getParentActivity(), getClassGuid()); + bottomSheet = new BottomSheet(getContext(), true) { + @Override + public void dismiss() { + super.dismiss(); + backgroundSelectView.selectGradient(colorPickerGradient); + colorPickerInAnimatoin = true; + fragmentView.invalidate(); + colorPickerPreviewView.animate().setListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + colorPickerInAnimatoin = false; + colorPickerPreviewView.setVisibility(View.GONE); + } + }).alpha(0f).setDuration(200).start(); + } + + @Override + public void dismissInternal() { + super.dismissInternal(); + AndroidUtilities.requestAdjustResize(getParentActivity(), getClassGuid()); + bottomSheet = null; + } + }; + bottomSheet.fixNavigationBar(); + bottomSheet.pauseAllHeavyOperations = false; + + drawForBlur = true; + colorPickerPreviewView.setBackground(new BitmapDrawable(getContext().getResources(), AndroidUtilities.makeBlurBitmap(fragmentView, 12f, 10))); + drawForBlur = false; + colorPickerPreviewView.setVisibility(View.VISIBLE); + colorPickerPreviewView.setAlpha(0); + colorPickerInAnimatoin = true; + fragmentView.invalidate(); + colorPickerPreviewView.animate().setListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + super.onAnimationEnd(animation); + colorPickerInAnimatoin = false; + } + }).alpha(1f).setDuration(200).start(); + + colorPickerGradient = new BackgroundGradient(); + ColorPicker colorPicker = new ColorPicker(getContext(), false, (color, num, applyNow) -> { + switch (num) { + case 0: + if (colorPickerGradient.color1 != color && (colorPickerGradient.color1 == 0 || color == 0)) { + colorPickerGradient = colorPickerGradient.copy(); + previewView.setGradient(colorPickerGradient); + } + colorPickerGradient.color1 = color; + break; + case 1: + if (colorPickerGradient.color2 != color && (colorPickerGradient.color2 == 0 || color == 0)) { + colorPickerGradient = colorPickerGradient.copy(); + previewView.setGradient(colorPickerGradient); + } + colorPickerGradient.color2 = color; + break; + case 2: + if (colorPickerGradient.color3 != color && (colorPickerGradient.color3 == 0 || color == 0)) { + colorPickerGradient = colorPickerGradient.copy(); + previewView.setGradient(colorPickerGradient); + } + colorPickerGradient.color3 = color; + break; + case 3: + if (colorPickerGradient.color4 != color && (colorPickerGradient.color4 == 0 || color == 0)) { + colorPickerGradient = colorPickerGradient.copy(); + previewView.setGradient(colorPickerGradient); + } + colorPickerGradient.color4 = color; + break; + } + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.needCheckSystemBarColors); + } + previewView.invalidate(); + }) { + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(300), MeasureSpec.EXACTLY)); + } + }; + + if (previewView.backgroundGradient != null) { + colorPicker.setColor(colorPickerGradient.color4 = previewView.backgroundGradient.color4, 3); + colorPicker.setColor(colorPickerGradient.color3 = previewView.backgroundGradient.color3, 2); + colorPicker.setColor(colorPickerGradient.color2 = previewView.backgroundGradient.color2, 1); + colorPicker.setColor(colorPickerGradient.color1 = previewView.backgroundGradient.color1, 0); + } + + colorPicker.setType(-1, true, 4, colorPickerGradient.colorsCount(), false, 0, false); + + previewView.setGradient(colorPickerGradient); + + LinearLayout colorPickerContainer = new LinearLayout(getContext()); + colorPickerContainer.setOrientation(LinearLayout.VERTICAL); + colorPickerContainer.setPadding(0, AndroidUtilities.dp(8), 0, 0); + colorPickerContainer.addView(colorPicker); + + FrameLayout button = new FrameLayout(getContext()); + button.setBackground(Theme.AdaptiveRipple.filledRect(Theme.key_featuredStickers_addButton, 8)); + + TextView textView = new TextView(getContext()); + textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + textView.setText(LocaleController.getString("SetColor", R.string.SetColor)); + textView.setGravity(Gravity.CENTER); + textView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + textView.setTextColor(Theme.getColor(Theme.key_featuredStickers_buttonText)); + button.addView(textView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER)); + + colorPickerContainer.addView(button, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 48, 0, 16, -8, 16, 16)); + button.setOnClickListener(v -> { + onDoneButtonPressed[0] = true; + backgroundSelectView.selectGradient(colorPickerGradient); + bottomSheet.dismiss(); + }); + bottomSheet.setCustomView(colorPickerContainer); + bottomSheet.smoothKeyboardAnimationEnabled = true; + bottomSheet.setDimBehind(false); + bottomSheet.show(); + isLightStatusBar(); + } + + public static class BackgroundGradient { + + public int stableId; + + int color1; + int color2; + int color3; + int color4; + + public BackgroundGradient copy() { + BackgroundGradient backgroundGradient = new BackgroundGradient(); + backgroundGradient.color1 = color1; + backgroundGradient.color2 = color2; + backgroundGradient.color3 = color3; + backgroundGradient.color4 = color4; + return backgroundGradient; + } + + public int colorsCount() { + if (color4 != 0) { + return 4; + } + if (color3 != 0) { + return 3; + } + if (color2 != 0) { + return 2; + } + return 1; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof BackgroundGradient)) return false; + BackgroundGradient that = (BackgroundGradient) o; + return color1 == that.color1 && color2 == that.color2 && color3 == that.color3 && color4 == that.color4; + } + + @Override + public int hashCode() { + return Objects.hash(stableId, color1, color2, color3, color4); + } + + public int getAverageColor() { + int color = color1; + if (color2 != 0) { + color = ColorUtils.blendARGB(color, color2, 0.5f); + } + if (color3 != 0) { + color = ColorUtils.blendARGB(color, color3, 0.5f); + } + if (color4 != 0) { + color = ColorUtils.blendARGB(color, color4, 0.5f); + } + return color; + } + } + + private class GradientSelectorView extends View { + + BackgroundGradient backgroundGradient; + + AnimatedFloat progressToSelect = new AnimatedFloat(400, AndroidUtilities.overshootInterpolator); + boolean selected; + boolean isCustom; + + GradientTools gradientTools = new GradientTools(); + Drawable addIcon; + Paint optionsPaint; + Paint defaultPaint; + + public GradientSelectorView(Context context) { + super(context); + progressToSelect.setParent(this); + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(MeasureSpec.makeMeasureSpec(gradientBackgroundItemWidth, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(48), MeasureSpec.EXACTLY)); + } + + @Override + protected void onDraw(Canvas canvas) { + super.onDraw(canvas); + progressToSelect.set(selected ? 1f : 0, false); + float cx = getMeasuredWidth() / 2f; + float cy = getMeasuredHeight() / 2f; + + Paint paint; + if (backgroundGradient != null) { + gradientTools.setColors(backgroundGradient.color1, backgroundGradient.color2, backgroundGradient.color3, backgroundGradient.color4); + gradientTools.setBounds(0, 0, getMeasuredWidth(), getMeasuredHeight()); + paint = gradientTools.paint; + } else { + if (defaultPaint == null) { + defaultPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + defaultPaint.setColor(Theme.getColor(Theme.key_chat_emojiPanelBackground)); + } + paint = defaultPaint; + } + if (progressToSelect.get() == 0) { + canvas.drawCircle(cx, cy, AndroidUtilities.dp(15), paint); + } else { + paint.setStyle(Paint.Style.STROKE); + paint.setStrokeWidth(AndroidUtilities.dp(2)); + canvas.drawCircle(cx, cy, AndroidUtilities.dpf2(13.5f), paint); + paint.setStyle(Paint.Style.FILL); + canvas.drawCircle(cx, cy, AndroidUtilities.dp(10) + AndroidUtilities.dp(5) * (1f - progressToSelect.get()), paint); + } + + if (isCustom) { + if (backgroundGradient == null) { + if (addIcon == null) { + addIcon = ContextCompat.getDrawable(getContext(), R.drawable.msg_filled_plus); + addIcon.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_chat_emojiSearchIcon), PorterDuff.Mode.MULTIPLY)); + } + addIcon.setBounds((int) (cx - addIcon.getIntrinsicWidth() / 2f), (int) (cy - addIcon.getIntrinsicHeight() / 2f), + (int) (cx + addIcon.getIntrinsicWidth() / 2f), (int) (cy + addIcon.getIntrinsicHeight() / 2f)); + addIcon.draw(canvas); + } else { + if (optionsPaint == null) { + optionsPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + optionsPaint.setColor(0xffffffff); + } + optionsPaint.setAlpha(Math.round(255f * Utilities.clamp(progressToSelect.get(), 1f, 0f))); + canvas.drawCircle(cx, cy, AndroidUtilities.dp(1.5f), optionsPaint); + canvas.drawCircle(cx - AndroidUtilities.dp(5) * progressToSelect.get(), cy, AndroidUtilities.dp(1.5f), optionsPaint); + canvas.drawCircle(cx + AndroidUtilities.dp(5) * progressToSelect.get(), cy, AndroidUtilities.dp(1.5f), optionsPaint); + + } + } + } + + void setGradient(BackgroundGradient backgroundGradient) { + this.backgroundGradient = backgroundGradient; + } + + void setSelectedInternal(boolean selected, boolean animated) { + if (this.selected != selected) { + this.selected = selected; + invalidate(); + } + if (!animated) { + progressToSelect.set(selected ? 1f : 0, false); + } + } + + public void setCustom(boolean b) { + isCustom = b; + } + } + + boolean isLightInternal = false; + float progressToLightStatusBar = 0f; + ValueAnimator lightProgressAnimator; + + @Override + public boolean isLightStatusBar() { + boolean isLight; + if (previewView != null && (previewView.expanded || previewView.overrideExpandProgress >= 0 && previewView.backgroundGradient != null)) { + int averageColor = previewView.backgroundGradient.getAverageColor(); + isLight = AndroidUtilities.computePerceivedBrightness(averageColor) > 0.721f; + } else { + isLight = AndroidUtilities.computePerceivedBrightness(Theme.getColor(Theme.key_windowBackgroundGray)) > 0.721f; + } + if (isLightInternal != isLight) { + isLightInternal = isLight; + if (actionBar.getAlpha() == 0) { + setProgressToLightStatusBar(isLight ? 0f : 1f); + } else { + if (lightProgressAnimator != null) { + lightProgressAnimator.removeAllListeners(); + lightProgressAnimator.cancel(); + } + lightProgressAnimator = ValueAnimator.ofFloat(progressToLightStatusBar, isLight ? 0f : 1f); + lightProgressAnimator.addUpdateListener(animation -> { + setProgressToLightStatusBar((Float) animation.getAnimatedValue()); + }); + lightProgressAnimator.setDuration(150).start(); + } + } + if (bottomSheet != null) { + AndroidUtilities.setLightStatusBar(bottomSheet.getWindow(), isLight); + } + return isLight; + } + + private void setProgressToLightStatusBar(float value) { + if (progressToLightStatusBar != value) { + progressToLightStatusBar = value; + int color = ColorUtils.blendARGB(Color.BLACK, Color.WHITE, progressToLightStatusBar); + int selectorColor = ColorUtils.setAlphaComponent(color, 60); + overlayActionBar.setItemsColor(color, false); + setPhotoItem.setBackground(Theme.createSelectorDrawable(selectorColor, Theme.RIPPLE_MASK_CIRCLE_TO_BOUND_EDGE)); + } + } + + public void setDelegate(Delegate delegate) { + this.delegate = delegate; + } + + public void onPreviewClick() { + if (isLandscapeMode) { + return; + } + if (keyboardVisibleProgress > 0) { + if (keyboardVisibilityAnimator != null) { + progressToExpand = 1f; + expandWithKeyboard = true; + } + AndroidUtilities.hideKeyboard(fragmentView); + return; + } + setExpanded(!previewView.expanded, true, false); + } + + private class ContainerLayout extends SizeNotifierFrameLayout implements NestedScrollingParent { + + private NestedScrollingParentHelper nestedScrollingParentHelper; + + public ContainerLayout(Context context) { + super(context); + nestedScrollingParentHelper = new NestedScrollingParentHelper(this); + } + + @Override + public boolean onStartNestedScroll(View child, View target, int nestedScrollAxes) { + if (keyboardVisibleProgress > 0 || isLandscapeMode) { + return false; + } + return true; + } + + @Override + public void onNestedScrollAccepted(View child, View target, int nestedScrollAxes) { + nestedScrollingParentHelper.onNestedScrollAccepted(child, target, nestedScrollAxes); + cancelExpandAnimator(); + } + + @Override + public void onStopNestedScroll(View target) { + nestedScrollingParentHelper.onStopNestedScroll(target); + setExpanded(progressToExpand > 0.5f, false, false); + } + + @Override + public void onNestedScroll(View target, int dxConsumed, int dyConsumed, int dxUnconsumed, int dyUnconsumed) { + if (keyboardVisibleProgress > 0 || isLandscapeMode) { + return; + } + if (dyUnconsumed != 0) { + cancelExpandAnimator(); + float progressToExpand = AvatarConstructorFragment.this.progressToExpand - dyUnconsumed / (float) expandedHeight; + progressToExpand = Utilities.clamp(progressToExpand, 1f, 0f); + setProgressToExpand(progressToExpand, true); + } + } + + @Override + public void onNestedPreScroll(View target, int dx, int dy, int[] consumed) { + if (keyboardVisibleProgress > 0 || isLandscapeMode) { + return; + } + if (dy > 0 && AvatarConstructorFragment.this.progressToExpand > 0) { + cancelExpandAnimator(); + float progressToExpand = AvatarConstructorFragment.this.progressToExpand - dy / (float) expandedHeight; + progressToExpand = Utilities.clamp(progressToExpand, 1f, 0f); + setProgressToExpand(progressToExpand, true); + consumed[1] = dy; + } + } + + @Override + public boolean onNestedFling(View target, float velocityX, float velocityY, boolean consumed) { + return false; + } + + @Override + public boolean onNestedPreFling(View target, float velocityX, float velocityY) { + return false; + } + + @Override + public int getNestedScrollAxes() { + return nestedScrollingParentHelper.getNestedScrollAxes(); + } + } + + @Override + public boolean isSwipeBackEnabled(MotionEvent event) { + return false; + } + + @Override + public boolean onBackPressed() { + discardEditor(); + return false; + } + + public interface Delegate { + void onDone(BackgroundGradient backgroundGradient, long documentId, TLRPC.Document document, PreviewView previewView); + } + + @Override + public void onResume() { + super.onResume(); + AndroidUtilities.requestAdjustResize(getParentActivity(), getClassGuid()); + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/AvatarConstructorPreviewCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/AvatarConstructorPreviewCell.java new file mode 100644 index 0000000000..d844a7bd77 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/AvatarConstructorPreviewCell.java @@ -0,0 +1,228 @@ +package org.telegram.ui.Components; + +import android.content.Context; +import android.graphics.Canvas; +import android.graphics.drawable.Drawable; +import android.graphics.drawable.GradientDrawable; +import android.util.TypedValue; +import android.view.Gravity; +import android.widget.FrameLayout; +import android.widget.TextView; + +import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.LocaleController; +import org.telegram.messenger.MediaDataController; +import org.telegram.messenger.R; +import org.telegram.messenger.UserConfig; +import org.telegram.messenger.Utilities; +import org.telegram.tgnet.TLRPC; +import org.telegram.ui.ActionBar.Theme; + +import java.util.ArrayList; + +public class AvatarConstructorPreviewCell extends FrameLayout { + + private AnimatedEmojiDrawable animatedEmojiDrawable; + BackupImageView currentImage; + BackupImageView nextImage; + + GradientTools currentBackgroundDrawable; + GradientTools nextBackgroundDrawable; + TextView textView; + + TLRPC.TL_emojiList emojiList; + + public final boolean forUser; + private final int currentAccount = UserConfig.selectedAccount; + + int backgroundIndex = 0; + int emojiIndex = 0; + + float progressToNext = 1f; + + Runnable scheduleSwitchToNextRunnable = new Runnable() { + @Override + public void run() { + AndroidUtilities.runOnUIThread(scheduleSwitchToNextRunnable, 1000); + if (emojiList == null || emojiList.document_id.isEmpty() || progressToNext != 1f) { + + return; + } + emojiIndex++; + backgroundIndex++; + + if (emojiIndex > emojiList.document_id.size() - 1) { + emojiIndex = 0; + } + if (backgroundIndex > AvatarConstructorFragment.defaultColors.length - 1) { + backgroundIndex = 0; + } + animatedEmojiDrawable = new AnimatedEmojiDrawable(AnimatedEmojiDrawable.CACHE_TYPE_ALERT_PREVIEW_LARGE, currentAccount, emojiList.document_id.get(emojiIndex)); + nextImage.setAnimatedEmojiDrawable(animatedEmojiDrawable); + + + int color1 = AvatarConstructorFragment.defaultColors[backgroundIndex][0]; + int color2 = AvatarConstructorFragment.defaultColors[backgroundIndex][1]; + int color3 = AvatarConstructorFragment.defaultColors[backgroundIndex][2]; + int color4 = AvatarConstructorFragment.defaultColors[backgroundIndex][3]; + + nextBackgroundDrawable = new GradientTools(); + nextBackgroundDrawable.setColors(color1, color2, color3, color4); + + progressToNext = 0f; + invalidate(); + } + }; + + public AvatarConstructorPreviewCell(Context context, boolean forUser) { + super(context); + this.forUser = forUser; + if (forUser) { + emojiList = MediaDataController.getInstance(currentAccount).profileAvatarConstructorDefault; + } else { + emojiList = MediaDataController.getInstance(currentAccount).groupAvatarConstructorDefault; + } + + if (emojiList == null || emojiList.document_id.isEmpty()) { + ArrayList installedEmojipacks = MediaDataController.getInstance(currentAccount).getStickerSets(MediaDataController.TYPE_EMOJIPACKS); + emojiList = new TLRPC.TL_emojiList(); + if (installedEmojipacks.isEmpty()) { + ArrayList featured = MediaDataController.getInstance(currentAccount).getFeaturedEmojiSets(); + for (int i = 0; i < featured.size(); i++) { + TLRPC.StickerSetCovered set = featured.get(i); + if (set.cover != null) { + emojiList.document_id.add(set.cover.id); + } else if (set instanceof TLRPC.TL_stickerSetFullCovered) { + TLRPC.TL_stickerSetFullCovered setFullCovered = ((TLRPC.TL_stickerSetFullCovered) set); + if (!setFullCovered.documents.isEmpty()) { + emojiList.document_id.add(setFullCovered.documents.get(0).id); + } + } + } + } else { + for (int i = 0; i < installedEmojipacks.size(); i++) { + TLRPC.TL_messages_stickerSet set = installedEmojipacks.get(i); + if (!set.documents.isEmpty()) { + int index = Math.abs(Utilities.fastRandom.nextInt() % set.documents.size()); + emojiList.document_id.add(set.documents.get(index).id); + } + } + } + + } + currentImage = new BackupImageView(context); + nextImage = new BackupImageView(context); + addView(currentImage, LayoutHelper.createFrame(50, 50, Gravity.CENTER_HORIZONTAL)); + addView(nextImage, LayoutHelper.createFrame(50, 50, Gravity.CENTER_HORIZONTAL)); + + if (emojiList != null && !emojiList.document_id.isEmpty()) { + animatedEmojiDrawable = new AnimatedEmojiDrawable(AnimatedEmojiDrawable.CACHE_TYPE_ALERT_PREVIEW_LARGE, currentAccount, emojiList.document_id.get(0)); + currentImage.setAnimatedEmojiDrawable(animatedEmojiDrawable); + } + + int color1 = AvatarConstructorFragment.defaultColors[backgroundIndex][0]; + int color2 = AvatarConstructorFragment.defaultColors[backgroundIndex][1]; + int color3 = AvatarConstructorFragment.defaultColors[backgroundIndex][2]; + int color4 = AvatarConstructorFragment.defaultColors[backgroundIndex][3]; + + currentBackgroundDrawable = new GradientTools(); + currentBackgroundDrawable.setColors(color1, color2, color3, color4); + + textView = new TextView(context); + textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 12); + textView.setTextColor(Theme.getColor(Theme.key_avatar_text)); + textView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + textView.setGravity(Gravity.CENTER); + textView.setText(LocaleController.getString("UseEmoji", R.string.UseEmoji)); + + addView(textView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 28, Gravity.BOTTOM, 10, 10, 10, 10)); + + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + + int availableHeight = textView.getTop(); + int imageHeight = (int) (availableHeight * 0.7f); + int padding = (int) ((availableHeight - imageHeight) * 0.7f); + + currentImage.getLayoutParams().width = currentImage.getLayoutParams().height = imageHeight; + nextImage.getLayoutParams().width = nextImage.getLayoutParams().height = imageHeight; + ((LayoutParams) currentImage.getLayoutParams()).topMargin = padding; + ((LayoutParams) nextImage.getLayoutParams()).topMargin = padding; + } + + @Override + protected void dispatchDraw(Canvas canvas) { + if (currentBackgroundDrawable != null) { + currentBackgroundDrawable.setBounds(0, 0, getMeasuredWidth(), getMeasuredHeight()); + } + if (nextBackgroundDrawable != null) { + nextBackgroundDrawable.setBounds(0, 0, getMeasuredWidth(), getMeasuredHeight()); + } + if (progressToNext == 1f) { + currentBackgroundDrawable.paint.setAlpha(255); + canvas.drawRect(0, 0, getMeasuredWidth(), getMeasuredHeight(), currentBackgroundDrawable.paint); + currentImage.setAlpha(1f); + currentImage.setScaleX(1f); + currentImage.setScaleY(1f); + nextImage.setAlpha(0f); + } else { + float progressInternal = CubicBezierInterpolator.DEFAULT.getInterpolation(progressToNext); + + currentBackgroundDrawable.paint.setAlpha(255); + canvas.drawRect(0, 0, getMeasuredWidth(), getMeasuredHeight(), currentBackgroundDrawable.paint); + nextBackgroundDrawable.paint.setAlpha((int) (255 * progressInternal)); + canvas.drawRect(0, 0, getMeasuredWidth(), getMeasuredHeight(), nextBackgroundDrawable.paint); + + progressToNext += 16 / 250f; + + currentImage.setAlpha(1f - progressInternal); + currentImage.setScaleX(1f - progressInternal); + currentImage.setScaleY(1f - progressInternal); + currentImage.setPivotY(0); + nextImage.setAlpha(progressInternal); + nextImage.setScaleX(progressInternal); + nextImage.setScaleY(progressInternal); + nextImage.setPivotY(nextImage.getMeasuredHeight()); + if (progressToNext > 1f) { + progressToNext = 1f; + currentBackgroundDrawable = nextBackgroundDrawable; + + BackupImageView tmp = currentImage; + currentImage = nextImage; + nextImage = tmp; + } + invalidate(); + } + super.dispatchDraw(canvas); + } + + @Override + protected void onAttachedToWindow() { + super.onAttachedToWindow(); + AndroidUtilities.runOnUIThread(scheduleSwitchToNextRunnable, 1000); + } + + @Override + protected void onDetachedFromWindow() { + super.onDetachedFromWindow(); + AndroidUtilities.cancelRunOnUIThread(scheduleSwitchToNextRunnable); + } + + public AvatarConstructorFragment.BackgroundGradient getBackgroundGradient() { + AvatarConstructorFragment.BackgroundGradient backgroundGradient = new AvatarConstructorFragment.BackgroundGradient(); + + backgroundGradient.color1 = AvatarConstructorFragment.defaultColors[backgroundIndex][0]; + backgroundGradient.color2 = AvatarConstructorFragment.defaultColors[backgroundIndex][1]; + backgroundGradient.color3 = AvatarConstructorFragment.defaultColors[backgroundIndex][2]; + backgroundGradient.color4 = AvatarConstructorFragment.defaultColors[backgroundIndex][3]; + + return backgroundGradient; + } + + public AnimatedEmojiDrawable getAnimatedEmoji() { + return animatedEmojiDrawable; + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/AvatarDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/AvatarDrawable.java index 6e2b75ce20..2b168534c7 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/AvatarDrawable.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/AvatarDrawable.java @@ -20,6 +20,7 @@ import android.text.Layout; import android.text.StaticLayout; import android.text.TextPaint; +import android.text.TextUtils; import androidx.core.graphics.ColorUtils; @@ -79,6 +80,7 @@ public class AvatarDrawable extends Drawable { private int alpha = 255; private Theme.ResourcesProvider resourcesProvider; + private boolean invalidateTextLayout; public AvatarDrawable() { this((Theme.ResourcesProvider) null); @@ -291,6 +293,7 @@ private String takeFirstCharacter(String text) { public void setInfo(long id, String firstName, String lastName, String custom) { hasGradient = true; + invalidateTextLayout = true; color = getThemedColor(Theme.keys_avatar_background[getColorIndex(id)]); color2 = getThemedColor(Theme.keys_avatar_background2[getColorIndex(id)]); needApplyColorAccent = id == 5; // Tinting manually set blue color @@ -335,23 +338,6 @@ public void setInfo(long id, String firstName, String lastName, String custom) { } } } - - if (stringBuilder.length() > 0) { - CharSequence text = stringBuilder.toString().toUpperCase(); - text = Emoji.replaceEmoji(text, namePaint.getFontMetricsInt(), AndroidUtilities.dp(16), true); - try { - textLayout = new StaticLayout(text, namePaint, AndroidUtilities.dp(100), Layout.Alignment.ALIGN_NORMAL, 1.0f, 0.0f, false); - if (textLayout.getLineCount() > 0) { - textLeft = textLayout.getLineLeft(0); - textWidth = textLayout.getLineWidth(0); - textHeight = textLayout.getLineBottom(0); - } - } catch (Exception e) { - FileLog.e(e); - } - } else { - textLayout = null; - } } @Override @@ -469,6 +455,27 @@ public void draw(Canvas canvas) { Theme.avatarDrawables[1].setBounds(x, y, x + w, y + h); Theme.avatarDrawables[1].draw(canvas); } else { + if (invalidateTextLayout) { + invalidateTextLayout = false; + if (stringBuilder.length() > 0) { + CharSequence text = stringBuilder.toString().toUpperCase(); + text = Emoji.replaceEmoji(text, namePaint.getFontMetricsInt(), AndroidUtilities.dp(16), true); + if (textLayout == null || !TextUtils.equals(text, textLayout.getText())) { + try { + textLayout = new StaticLayout(text, namePaint, AndroidUtilities.dp(100), Layout.Alignment.ALIGN_NORMAL, 1.0f, 0.0f, false); + if (textLayout.getLineCount() > 0) { + textLeft = textLayout.getLineLeft(0); + textWidth = textLayout.getLineWidth(0); + textHeight = textLayout.getLineBottom(0); + } + } catch (Exception e) { + FileLog.e(e); + } + } + } else { + textLayout = null; + } + } if (textLayout != null) { float scale = size / (float) AndroidUtilities.dp(50); canvas.scale(scale, scale, size / 2f, size / 2f) ; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/AvatarsDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/AvatarsDrawable.java index ca86c32906..22cc04190b 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/AvatarsDrawable.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/AvatarsDrawable.java @@ -549,7 +549,13 @@ private int getSize() { return AndroidUtilities.dp(bigAvatars ? 32 : 24); } + private boolean attached; + public void onDetachedFromWindow() { + if (!attached) { + return; + } + attached = false; wasDraw = false; for (int a = 0; a < 3; a++) { currentStates[a].imageReceiver.onDetachedFromWindow(); @@ -561,6 +567,10 @@ public void onDetachedFromWindow() { } public void onAttachedToWindow() { + if (attached) { + return; + } + attached = true; for (int a = 0; a < 3; a++) { currentStates[a].imageReceiver.onAttachedToWindow(); animatingStates[a].imageReceiver.onAttachedToWindow(); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/BackupImageView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/BackupImageView.java index 1e2e62a3ce..1c0750ac82 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/BackupImageView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/BackupImageView.java @@ -176,8 +176,12 @@ public void setImage(ImageLocation imageLocation, String imageFilter, ImageLocat onNewImageSet(); } - public void setImageMedia(ImageLocation mediaLocation, String mediaFilter, ImageLocation imageLocation, String imageFilter, ImageLocation thumbLocation, String thumbFilter, String ext, int size, int cacheType, Object parentObject) { - imageReceiver.setImage(mediaLocation, mediaFilter, imageLocation, imageFilter, thumbLocation, thumbFilter, null, size, ext, parentObject, cacheType); + public void setImageMedia(VectorAvatarThumbDrawable vectorAvatar, ImageLocation mediaLocation, String mediaFilter, ImageLocation imageLocation, String imageFilter, ImageLocation thumbLocation, String thumbFilter, String ext, int size, int cacheType, Object parentObject) { + if (vectorAvatar != null) { + imageReceiver.setImageBitmap(vectorAvatar); + } else { + imageReceiver.setImage(mediaLocation, mediaFilter, imageLocation, imageFilter, thumbLocation, thumbFilter, null, size, ext, parentObject, cacheType); + } onNewImageSet(); } @@ -321,6 +325,7 @@ public void setAnimatedEmojiDrawable(AnimatedEmojiDrawable animatedEmojiDrawable if (attached && animatedEmojiDrawable != null) { animatedEmojiDrawable.addView(this); } + invalidate(); } ValueAnimator roundRadiusAnimator; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/BatteryDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/BatteryDrawable.java new file mode 100644 index 0000000000..3253d90741 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/BatteryDrawable.java @@ -0,0 +1,192 @@ +package org.telegram.ui.Components; + +import static org.telegram.messenger.AndroidUtilities.dp; +import static org.telegram.messenger.AndroidUtilities.dpf2; + +import android.animation.Animator; +import android.animation.AnimatorListenerAdapter; +import android.animation.ValueAnimator; +import android.graphics.Canvas; +import android.graphics.ColorFilter; +import android.graphics.Paint; +import android.graphics.PixelFormat; +import android.graphics.RectF; +import android.graphics.drawable.Drawable; + +import androidx.annotation.NonNull; +import androidx.annotation.Nullable; + +public class BatteryDrawable extends Drawable { + + private Paint paintReference; + private Paint strokePaint = new Paint(Paint.ANTI_ALIAS_FLAG); + private Paint connectorPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + private Paint fillPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + + private float scale = 1f, translateY = 0; + private float fillValue = 1f; + + private RectF rectTmp = new RectF(); + + public BatteryDrawable() { + strokePaint.setStyle(Paint.Style.STROKE); + } + + public BatteryDrawable(float value) { + this(); + setFillValue(value, false); + } + + public BatteryDrawable(float value, int color) { + this(); + setFillValue(value, false); + setColor(color); + } + + public BatteryDrawable(float value, int color, int fillColor) { + this(); + setFillValue(value, false); + setColor(color, fillColor); + } + + public BatteryDrawable(float value, int color, int fillColor, float scale) { + this(); + setFillValue(value, false); + setColor(color, fillColor); + setScale(scale); + } + + public void setScale(float scale) { + this.scale = scale; + invalidateSelf(); + } + + public void setColor(int color) { + setColor(color, color); + } + + public void setColor(int color, int fillColor) { + strokePaint.setColor(color); + connectorPaint.setColor(color); + fillPaint.setColor(fillColor); + } + + private ValueAnimator fillValueAnimator; + + public void setFillValue(float newValue, boolean animated) { + final float value = Math.max(Math.min(newValue, 1), 0); + + if (fillValueAnimator != null) { + fillValueAnimator.cancel(); + fillValueAnimator = null; + } + + if (!animated) { + fillValue = value; + invalidateSelf(); + } else { + fillValueAnimator = ValueAnimator.ofFloat(fillValue, value); + fillValueAnimator.addUpdateListener(anm -> { + fillValue = (float) anm.getAnimatedValue(); + invalidateSelf(); + }); + fillValueAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + fillValue = value; + invalidateSelf(); + } + }); + fillValueAnimator.setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); + fillValueAnimator.setDuration(200); + fillValueAnimator.start(); + } + } + + public void colorFromPaint(Paint paintReference) { + this.paintReference = paintReference; + } + + @Override + public void draw(@NonNull Canvas canvas) { + if (getBounds() == null) { + return; + } + + final int x = getBounds().left, y = getBounds().top + (int) translateY; + final int w = getBounds().width(), h = getBounds().height(); + final int cx = getBounds().centerX(), cy = getBounds().centerY() + (int) translateY; + + if (paintReference != null) { + setColor(paintReference.getColor()); + } + + if (scale != 1) { + canvas.save(); + canvas.scale(scale, scale, cx, cy); + } + + strokePaint.setStrokeWidth(dpf2(1.1f)); + + rectTmp.set( + x + (w - dpf2(16.33f)) / 2f - dpf2(1.33f), + y + (h - dpf2(10.33f)) / 2f, + x + (w + dpf2(16.33f)) / 2f - dpf2(1.33f), + y + (h + dpf2(10.33f)) / 2f + ); + canvas.drawRoundRect(rectTmp, dpf2(2.33f), dpf2(2.33f), strokePaint); + + rectTmp.set( + x + (w - dpf2(13f)) / 2f - dpf2(1.66f), + y + (h - dpf2(7.33f)) / 2f, + x + (w - dpf2(13f)) / 2f - dpf2(1.66f) + Math.max(dpf2(1.1f), fillValue * dpf2(13)), + y + (h + dpf2(7.33f)) / 2f + ); + canvas.drawRoundRect(rectTmp, dpf2(0.83f), dpf2(0.83f), fillPaint); + + rectTmp.set( + x + (w + dpf2(17.5f) - dpf2(4.66f)) / 2f, + cy - dpf2(2.65f), + x + (w + dpf2(17.5f) + dpf2(4.66f)) / 2f, + cy + dpf2(2.65f) + ); + canvas.drawArc(rectTmp, -90, 180, false, connectorPaint); + + if (scale != 1) { + canvas.restore(); + } + } + + public void setTranslationY(float translateY) { + this.translateY = translateY; + } + + @Override + public void setAlpha(int alpha) { + strokePaint.setAlpha(alpha); + connectorPaint.setAlpha(alpha); + fillPaint.setAlpha(alpha); + } + + @Override + public void setColorFilter(@Nullable ColorFilter colorFilter) { + strokePaint.setColorFilter(colorFilter); + connectorPaint.setColorFilter(colorFilter); + fillPaint.setColorFilter(colorFilter); + } + + @Override + public int getOpacity() { + return PixelFormat.TRANSPARENT; + } + + @Override + public int getIntrinsicWidth() { + return dp(24 * scale); + } + + @Override + public int getIntrinsicHeight() { + return dp(24 * scale); + } +} \ No newline at end of file diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/BlobDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/BlobDrawable.java index ef38e7284a..743d114c12 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/BlobDrawable.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/BlobDrawable.java @@ -5,6 +5,7 @@ import android.graphics.Paint; import android.graphics.Path; +import org.telegram.messenger.LiteMode; import org.telegram.messenger.SharedConfig; import java.util.Random; @@ -58,7 +59,13 @@ public class BlobDrawable { private final Matrix m = new Matrix(); + private final int liteFlag; + public BlobDrawable(int n) { + this(n, LiteMode.FLAG_CALLS_ANIMATIONS); + } + + public BlobDrawable(int n, int liteFlag) { N = n; L = (float) ((4.0 / 3.0) * Math.tan(Math.PI / (2 * N))); radius = new float[n]; @@ -74,6 +81,8 @@ public BlobDrawable(int n) { generateBlob(radiusNext, angleNext, i); progress[i] = 0; } + + this.liteFlag = liteFlag; } private void generateBlob(float[] radius, float[] angle, int i) { @@ -85,7 +94,7 @@ private void generateBlob(float[] radius, float[] angle, int i) { } public void update(float amplitude, float speedScale) { - if (SharedConfig.getLiteMode().enabled()) { + if (!LiteMode.isEnabled(liteFlag)) { return; } for (int i = 0; i < N; i++) { @@ -100,7 +109,7 @@ public void update(float amplitude, float speedScale) { } public void draw(float cX, float cY, Canvas canvas, Paint paint) { - if (SharedConfig.getLiteMode().enabled()) { + if (!LiteMode.isEnabled(liteFlag)) { return; } path.reset(); @@ -171,7 +180,7 @@ public void generateBlob() { public void setValue(float value, boolean isBig) { animateToAmplitude = value; - if (SharedConfig.getLiteMode().enabled()) { + if (!LiteMode.isEnabled(liteFlag)) { return; } if (isBig) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/BlurredFrameLayout.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/BlurredFrameLayout.java index b8788aa227..dd77a898dc 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/BlurredFrameLayout.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/BlurredFrameLayout.java @@ -5,6 +5,7 @@ import android.graphics.Color; import android.graphics.Paint; import android.view.View; +import android.view.ViewParent; import android.widget.FrameLayout; import androidx.annotation.NonNull; @@ -39,7 +40,13 @@ protected void dispatchDraw(Canvas canvas) { View view = this; while (view != sizeNotifierFrameLayout) { y += view.getY(); - view = (View) view.getParent(); + ViewParent parent = view.getParent(); + if (parent instanceof View) { + view = (View) parent; + } else { + super.dispatchDraw(canvas); + return; + } } sizeNotifierFrameLayout.drawBlurRect(canvas, y, AndroidUtilities.rectTmp2, backgroundPaint, isTopView); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/BotCommandsMenuView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/BotCommandsMenuView.java index e78c556517..68de90c07b 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/BotCommandsMenuView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/BotCommandsMenuView.java @@ -4,6 +4,8 @@ import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; +import android.graphics.PorterDuff; +import android.graphics.PorterDuffColorFilter; import android.graphics.RectF; import android.graphics.drawable.Drawable; import android.os.Build; @@ -109,9 +111,12 @@ public void setWebView(boolean webView) { private void updateColors() { paint.setColor(Theme.getColor(Theme.key_chat_messagePanelVoiceBackground)); - int textColor = Theme.getColor(Theme.key_chat_messagePanelVoicePressed); + int textColor = Theme.getColor(Theme.key_chat_messagePanelVoiceDuration); backDrawable.setBackColor(textColor); backDrawable.setIconColor(textColor); + if (webViewAnimation != null) { + webViewAnimation.setColorFilter(new PorterDuffColorFilter(textColor, PorterDuff.Mode.SRC_IN)); + } textPaint.setColor(textColor); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/BotKeyboardView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/BotKeyboardView.java index fec45178bb..b6037b7e49 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/BotKeyboardView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/BotKeyboardView.java @@ -120,12 +120,7 @@ public void setButtons(TLRPC.TL_replyKeyboardMarkup buttons) { float weight = 1.0f / row.buttons.size(); for (int b = 0; b < row.buttons.size(); b++) { TLRPC.KeyboardButton button = row.buttons.get(b); - TextView textView = new EmojiTextView(getContext()); - textView.setTag(button); - textView.setTextColor(getThemedColor(Theme.key_chat_botKeyboardButtonText)); - textView.setBackground(Theme.createSimpleSelectorRoundRectDrawable(AndroidUtilities.dp(4), getThemedColor(Theme.key_chat_botKeyboardButtonBackground), getThemedColor(Theme.key_chat_botKeyboardButtonBackgroundPressed))); - textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 16); - textView.setGravity(Gravity.CENTER); + Button textView = new Button(getContext(), button); FrameLayout frame = new FrameLayout(getContext()); frame.addView(textView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); @@ -151,6 +146,20 @@ public void setButtons(TLRPC.TL_replyKeyboardMarkup buttons) { } } + private class Button extends EmojiTextView { + public Button(Context context, TLRPC.KeyboardButton button) { + super(context); + + setTag(button); + setTextColor(getThemedColor(Theme.key_chat_botKeyboardButtonText)); + setBackground(Theme.createSimpleSelectorRoundRectDrawable(AndroidUtilities.dp(4), getThemedColor(Theme.key_chat_botKeyboardButtonBackground), getThemedColor(Theme.key_chat_botKeyboardButtonBackgroundPressed))); + setTextSize(TypedValue.COMPLEX_UNIT_DIP, 16); + setGravity(Gravity.CENTER); + setPadding(AndroidUtilities.dp(4), 0, AndroidUtilities.dp(4), 0); + setText(Emoji.replaceEmoji(button.text, getPaint().getFontMetricsInt(), false)); + } + } + public int getKeyboardHeight() { if (botButtons == null) { return 0; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/BotWebViewContainer.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/BotWebViewContainer.java index c60b9fa397..d6a54c0c2d 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/BotWebViewContainer.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/BotWebViewContainer.java @@ -939,6 +939,21 @@ private void onEventReceived(String eventType, String eventData) { delegate.onCloseRequested(null); break; } + case "web_app_switch_inline_query": { + try { + JSONObject jsonObject = new JSONObject(eventData); + List types = new ArrayList<>(); + JSONArray arr = jsonObject.getJSONArray("chat_types"); + for (int i = 0; i < arr.length(); i++) { + types.add(arr.getString(i)); + } + + delegate.onWebAppSwitchInlineQuery(botUser, jsonObject.getString("query"), types); + } catch (JSONException e) { + FileLog.e(e); + } + break; + } case "web_app_read_text_from_clipboard": { try { JSONObject jsonObject = new JSONObject(eventData); @@ -1489,6 +1504,15 @@ default void onSendWebViewData(String data) {} */ void onWebAppExpand(); + /** + * Called when web apps requests to switch to inline mode picker + * + * @param botUser Bot user + * @param query Inline query + * @param chatTypes Chat types + */ + void onWebAppSwitchInlineQuery(TLRPC.User botUser, String query, List chatTypes); + /** * Called when web app attempts to open invoice * diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/BotWebViewMenuContainer.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/BotWebViewMenuContainer.java index b1777fd71b..fc5649f213 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/BotWebViewMenuContainer.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/BotWebViewMenuContainer.java @@ -8,6 +8,7 @@ import android.graphics.Canvas; import android.graphics.Paint; import android.os.Build; +import android.os.Bundle; import android.text.Editable; import android.view.Gravity; import android.view.MotionEvent; @@ -25,12 +26,14 @@ import org.json.JSONObject; import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ContactsController; +import org.telegram.messenger.DialogObject; import org.telegram.messenger.FileLog; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MessageObject; import org.telegram.messenger.MessagesController; import org.telegram.messenger.NotificationCenter; import org.telegram.messenger.R; +import org.telegram.messenger.UserObject; import org.telegram.tgnet.ConnectionsManager; import org.telegram.tgnet.TLObject; import org.telegram.tgnet.TLRPC; @@ -39,9 +42,12 @@ import org.telegram.ui.ActionBar.ActionBarMenuItem; import org.telegram.ui.ActionBar.ActionBarMenuSubItem; import org.telegram.ui.ActionBar.AlertDialog; +import org.telegram.ui.ActionBar.INavigationLayout; import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.ChatActivity; +import org.telegram.ui.DialogsActivity; +import java.util.List; import java.util.Locale; import java.util.Objects; @@ -121,17 +127,22 @@ public class BotWebViewMenuContainer extends FrameLayout implements Notification } }; + private void checkBotMenuItem() { + if (botMenuItem == null) { + ActionBarMenu menu = parentEnterView.getParentFragment().getActionBar().createMenu(); + botMenuItem = menu.addItem(1000, R.drawable.ic_ab_other); + botMenuItem.setVisibility(GONE); + + botMenuItem.addSubItem(R.id.menu_reload_page, R.drawable.msg_retry, LocaleController.getString(R.string.BotWebViewReloadPage)); + } + } + public BotWebViewMenuContainer(@NonNull Context context, ChatActivityEnterView parentEnterView) { super(context); this.parentEnterView = parentEnterView; ChatActivity chatActivity = parentEnterView.getParentFragment(); ActionBar actionBar = chatActivity.getActionBar(); - ActionBarMenu menu = actionBar.createMenu(); - botMenuItem = menu.addItem(1000, R.drawable.ic_ab_other); - botMenuItem.setVisibility(GONE); - - botMenuItem.addSubItem(R.id.menu_reload_page, R.drawable.msg_retry, LocaleController.getString(R.string.BotWebViewReloadPage)); actionBarOnItemClick = actionBar.getActionBarMenuOnItemClick(); webViewContainer = new BotWebViewContainer(context, parentEnterView.getParentFragment().getResourceProvider(), getColor(Theme.key_windowBackgroundWhite)); @@ -192,6 +203,45 @@ public void onWebAppExpand() { swipeContainer.stickTo(-swipeContainer.getOffsetY() + swipeContainer.getTopActionBarOffsetY()); } + @Override + public void onWebAppSwitchInlineQuery(TLRPC.User botUser, String query, List chatTypes) { + if (chatTypes.isEmpty()) { + parentEnterView.setFieldText("@" + UserObject.getPublicUsername(botUser) + " " + query); + dismiss(); + } else { + Bundle args = new Bundle(); + args.putInt("dialogsType", DialogsActivity.DIALOGS_TYPE_START_ATTACH_BOT); + args.putBoolean("onlySelect", true); + + args.putBoolean("allowGroups", chatTypes.contains("groups")); + args.putBoolean("allowUsers", chatTypes.contains("users")); + args.putBoolean("allowChannels", chatTypes.contains("channels")); + args.putBoolean("allowBots", chatTypes.contains("bots")); + + DialogsActivity dialogsActivity = new DialogsActivity(args); + dialogsActivity.setDelegate((fragment, dids, message1, param, topicsFragment) -> { + long did = dids.get(0).dialogId; + + Bundle args1 = new Bundle(); + args1.putBoolean("scrollToTopOnResume", true); + if (DialogObject.isEncryptedDialog(did)) { + args1.putInt("enc_id", DialogObject.getEncryptedChatId(did)); + } else if (DialogObject.isUserDialog(did)) { + args1.putLong("user_id", did); + } else { + args1.putLong("chat_id", -did); + } + args1.putString("inline_query_input", "@" + UserObject.getPublicUsername(botUser) + " " + query); + + if (MessagesController.getInstance(currentAccount).checkCanOpenChat(args1, fragment)) { + fragment.presentFragment(new INavigationLayout.NavigationParams(new ChatActivity(args1)).setRemoveLast(true)); + } + return true; + }); + parentEnterView.getParentFragment().presentFragment(dialogsActivity); + } + } + @Override public void onWebAppOpenInvoice(String slug, TLObject response) { Toast.makeText(getContext(), LocaleController.getString("nekoXPaymentRemovedToast", R.string.nekoXPaymentRemovedToast), Toast.LENGTH_LONG).show(); @@ -432,6 +482,7 @@ public void onAttachedToWindow() { ActionBar actionBar = chatActivity.getActionBar(); if (value == 100 && parentEnterView.hasBotWebView()) { chatActivity.showHeaderItem(false); + checkBotMenuItem(); botMenuItem.setVisibility(VISIBLE); actionBar.setActionBarMenuOnItemClick(new ActionBar.ActionBarMenuOnItemClick() { @Override @@ -461,7 +512,9 @@ public void onItemClick(int id) { }); } else { chatActivity.showHeaderItem(true); - botMenuItem.setVisibility(GONE); + if (botMenuItem != null) { + botMenuItem.setVisibility(GONE); + } actionBar.setActionBarMenuOnItemClick(actionBarOnItemClick); } }); @@ -624,7 +677,7 @@ public void show(int currentAccount, long botId, String botUrl) { this.botId = botId; this.botUrl = botUrl; - savedEditText = parentEnterView.getEditField().getText(); + savedEditText = parentEnterView.getEditText(); parentEnterView.getEditField().setText(null); savedReplyMessageObject = parentEnterView.getReplyingMessageObject(); savedEditMessageObject = parentEnterView.getEditingMessageObject(); @@ -794,7 +847,7 @@ public void onAnimationEnd(Animator animation) { } AndroidUtilities.runOnUIThread(()->{ - if (savedEditText != null) { + if (savedEditText != null && parentEnterView.getEditField() != null) { parentEnterView.getEditField().setText(savedEditText); savedEditText = null; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/BotWebViewSheet.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/BotWebViewSheet.java index e6cd9cb066..6e7bae60a5 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/BotWebViewSheet.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/BotWebViewSheet.java @@ -7,12 +7,14 @@ import android.app.Activity; import android.app.Dialog; import android.content.Context; +import android.content.ContextWrapper; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.graphics.drawable.Drawable; import android.os.Build; import android.os.Bundle; +import android.text.TextUtils; import android.util.TypedValue; import android.view.Gravity; import android.view.MotionEvent; @@ -34,6 +36,7 @@ import org.json.JSONObject; import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ContactsController; +import org.telegram.messenger.DialogObject; import org.telegram.messenger.FileLog; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MessagesController; @@ -50,22 +53,28 @@ import org.telegram.ui.ActionBar.ActionBarMenuSubItem; import org.telegram.ui.ActionBar.AlertDialog; import org.telegram.ui.ActionBar.BaseFragment; +import org.telegram.ui.ActionBar.INavigationLayout; import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.ChatActivity; +import org.telegram.ui.DialogsActivity; import org.telegram.ui.LaunchActivity; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.util.List; import java.util.Locale; public class BotWebViewSheet extends Dialog implements NotificationCenter.NotificationCenterDelegate { - public final static int TYPE_WEB_VIEW_BUTTON = 0, TYPE_SIMPLE_WEB_VIEW_BUTTON = 1, TYPE_BOT_MENU_BUTTON = 2; + public final static int TYPE_WEB_VIEW_BUTTON = 0, TYPE_SIMPLE_WEB_VIEW_BUTTON = 1, TYPE_BOT_MENU_BUTTON = 2, TYPE_WEB_VIEW_BOT_APP = 3; + + public final static int FLAG_FROM_INLINE_SWITCH = 1; @Retention(RetentionPolicy.SOURCE) @IntDef(value = { TYPE_WEB_VIEW_BUTTON, TYPE_SIMPLE_WEB_VIEW_BUTTON, - TYPE_BOT_MENU_BUTTON + TYPE_BOT_MENU_BUTTON, + TYPE_WEB_VIEW_BOT_APP }) public @interface WebViewType {} @@ -127,6 +136,8 @@ public class BotWebViewSheet extends Dialog implements NotificationCenter.Notifi private VerticalPositionAutoAnimator mainButtonAutoAnimator, radialProgressAutoAnimator; + private PasscodeView passcodeView; + private Runnable pollRunnable = () -> { if (!dismissed) { TLRPC.TL_messages_prolongWebView prolongWebView = new TLRPC.TL_messages_prolongWebView(); @@ -270,6 +281,66 @@ public void onWebAppExpand() { swipeContainer.stickTo(-swipeContainer.getOffsetY() + swipeContainer.getTopActionBarOffsetY()); } + @Override + public void onWebAppSwitchInlineQuery(TLRPC.User botUser, String query, List chatTypes) { + if (chatTypes.isEmpty()) { + if (parentActivity instanceof LaunchActivity) { + BaseFragment lastFragment = ((LaunchActivity) parentActivity).getActionBarLayout().getLastFragment(); + if (lastFragment instanceof ChatActivity) { + ((ChatActivity) lastFragment).getChatActivityEnterView().setFieldText("@" + UserObject.getPublicUsername(botUser) + " " + query); + dismiss(); + } + } + } else { + Bundle args = new Bundle(); + args.putInt("dialogsType", DialogsActivity.DIALOGS_TYPE_START_ATTACH_BOT); + args.putBoolean("onlySelect", true); + + args.putBoolean("allowGroups", chatTypes.contains("groups")); + args.putBoolean("allowUsers", chatTypes.contains("users")); + args.putBoolean("allowChannels", chatTypes.contains("channels")); + args.putBoolean("allowBots", chatTypes.contains("bots")); + + DialogsActivity dialogsActivity = new DialogsActivity(args); + AndroidUtilities.hideKeyboard(frameLayout); + OverlayActionBarLayoutDialog overlayActionBarLayoutDialog = new OverlayActionBarLayoutDialog(context, resourcesProvider); + dialogsActivity.setDelegate((fragment, dids, message1, param, topicsFragment) -> { + long did = dids.get(0).dialogId; + + Bundle args1 = new Bundle(); + args1.putBoolean("scrollToTopOnResume", true); + if (DialogObject.isEncryptedDialog(did)) { + args1.putInt("enc_id", DialogObject.getEncryptedChatId(did)); + } else if (DialogObject.isUserDialog(did)) { + args1.putLong("user_id", did); + } else { + args1.putLong("chat_id", -did); + } + args1.putString("inline_query_input", "@" + UserObject.getPublicUsername(botUser) + " " + query); + + if (parentActivity instanceof LaunchActivity) { + BaseFragment lastFragment = ((LaunchActivity) parentActivity).getActionBarLayout().getLastFragment(); + if (MessagesController.getInstance(currentAccount).checkCanOpenChat(args1, lastFragment)) { + overlayActionBarLayoutDialog.dismiss(); + + dismissed = true; + AndroidUtilities.cancelRunOnUIThread(pollRunnable); + + webViewContainer.destroyWebView(); + NotificationCenter.getInstance(currentAccount).removeObserver(BotWebViewSheet.this, NotificationCenter.webViewResultSent); + NotificationCenter.getGlobalInstance().removeObserver(BotWebViewSheet.this, NotificationCenter.didSetNewTheme); + BotWebViewSheet.super.dismiss(); + + lastFragment.presentFragment(new INavigationLayout.NavigationParams(new ChatActivity(args1)).setRemoveLast(true)); + } + } + return true; + }); + overlayActionBarLayoutDialog.show(); + overlayActionBarLayoutDialog.addFragment(dialogsActivity); + } + } + @Override public void onSetupMainButton(boolean isVisible, boolean isActive, String text, int color, int textColor, boolean isProgressVisible) { mainButton.setClickable(isActive); @@ -332,19 +403,21 @@ public void onAnimationEnd(Animator animation) { protected void onDraw(Canvas canvas) { super.onDraw(canvas); - if (!overrideBackgroundColor) { - backgroundPaint.setColor(getColor(Theme.key_windowBackgroundWhite)); - } - AndroidUtilities.rectTmp.set(0, 0, getWidth(), getHeight()); - canvas.drawRect(AndroidUtilities.rectTmp, dimPaint); + if (passcodeView.getVisibility() != View.VISIBLE) { + if (!overrideBackgroundColor) { + backgroundPaint.setColor(getColor(Theme.key_windowBackgroundWhite)); + } + AndroidUtilities.rectTmp.set(0, 0, getWidth(), getHeight()); + canvas.drawRect(AndroidUtilities.rectTmp, dimPaint); - actionBarPaint.setColor(ColorUtils.blendARGB(actionBarColor, getColor(Theme.key_windowBackgroundWhite), actionBarTransitionProgress)); - float radius = AndroidUtilities.dp(16) * (AndroidUtilities.isTablet() ? 1f : 1f - actionBarTransitionProgress); - AndroidUtilities.rectTmp.set(swipeContainer.getLeft(), AndroidUtilities.lerp(swipeContainer.getTranslationY(), 0, actionBarTransitionProgress), swipeContainer.getRight(), swipeContainer.getTranslationY() + AndroidUtilities.dp(24) + radius); - canvas.drawRoundRect(AndroidUtilities.rectTmp, radius, radius, actionBarPaint); + actionBarPaint.setColor(ColorUtils.blendARGB(actionBarColor, getColor(Theme.key_windowBackgroundWhite), actionBarTransitionProgress)); + float radius = AndroidUtilities.dp(16) * (AndroidUtilities.isTablet() ? 1f : 1f - actionBarTransitionProgress); + AndroidUtilities.rectTmp.set(swipeContainer.getLeft(), AndroidUtilities.lerp(swipeContainer.getTranslationY(), 0, actionBarTransitionProgress), swipeContainer.getRight(), swipeContainer.getTranslationY() + AndroidUtilities.dp(24) + radius); + canvas.drawRoundRect(AndroidUtilities.rectTmp, radius, radius, actionBarPaint); - AndroidUtilities.rectTmp.set(swipeContainer.getLeft(), swipeContainer.getTranslationY() + AndroidUtilities.dp(24), swipeContainer.getRight(), getHeight()); - canvas.drawRect(AndroidUtilities.rectTmp, backgroundPaint); + AndroidUtilities.rectTmp.set(swipeContainer.getLeft(), swipeContainer.getTranslationY() + AndroidUtilities.dp(24), swipeContainer.getRight(), getHeight()); + canvas.drawRect(AndroidUtilities.rectTmp, backgroundPaint); + } } @Override @@ -511,9 +584,38 @@ public void onAnimationEnd(Animator animation) { swipeContainer.setTopActionBarOffsetY(ActionBar.getCurrentActionBarHeight() + AndroidUtilities.statusBarHeight - AndroidUtilities.dp(24)); swipeContainer.setIsKeyboardVisible(obj -> frameLayout.getKeyboardHeight() >= AndroidUtilities.dp(20)); + passcodeView = new PasscodeView(context); + frameLayout.addView(passcodeView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); + setContentView(frameLayout, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT)); } + @Override + protected void onStart() { + super.onStart(); + + Context context = getContext(); + if (context instanceof ContextWrapper && !(context instanceof LaunchActivity)) { + context = ((ContextWrapper) context).getBaseContext(); + } + if (context instanceof LaunchActivity) { + ((LaunchActivity) context).addOverlayPasscodeView(passcodeView); + } + } + + @Override + protected void onStop() { + super.onStop(); + + Context context = getContext(); + if (context instanceof ContextWrapper && !(context instanceof LaunchActivity)) { + context = ((ContextWrapper) context).getBaseContext(); + } + if (context instanceof LaunchActivity) { + ((LaunchActivity) context).removeOverlayPasscodeView(passcodeView); + } + } + public void setParentActivity(Activity parentActivity) { this.parentActivity = parentActivity; } @@ -615,7 +717,19 @@ public void onDetachedFromWindow() { } } + public void requestWebView(int currentAccount, long peerId, long botId, String buttonText, String buttonUrl, @WebViewType int type, int replyToMsgId, boolean silent, int flags) { + requestWebView(currentAccount, peerId, botId, buttonText, buttonUrl, type, replyToMsgId, silent, null, null, false, null, null, flags); + } + public void requestWebView(int currentAccount, long peerId, long botId, String buttonText, String buttonUrl, @WebViewType int type, int replyToMsgId, boolean silent) { + requestWebView(currentAccount, peerId, botId, buttonText, buttonUrl, type, replyToMsgId, silent, null, null, false, null, null, 0); + } + + public void requestWebView(int currentAccount, long peerId, long botId, String buttonText, String buttonUrl, @WebViewType int type, int replyToMsgId, boolean silent, BaseFragment lastFragment, TLRPC.BotApp app, boolean allowWrite, String startParam, TLRPC.User botUser) { + requestWebView(currentAccount, peerId, botId, buttonText, buttonUrl, type, replyToMsgId, silent, lastFragment, app, allowWrite, startParam, botUser, 0); + } + + public void requestWebView(int currentAccount, long peerId, long botId, String buttonText, String buttonUrl, @WebViewType int type, int replyToMsgId, boolean silent, BaseFragment lastFragment, TLRPC.BotApp app, boolean allowWrite, String startParam, TLRPC.User botUser, int flags) { this.currentAccount = currentAccount; this.peerId = peerId; this.botId = botId; @@ -714,6 +828,7 @@ public void onItemClick(int id) { } case TYPE_SIMPLE_WEB_VIEW_BUTTON: { TLRPC.TL_messages_requestSimpleWebView req = new TLRPC.TL_messages_requestSimpleWebView(); + req.from_switch_webview = (flags & FLAG_FROM_INLINE_SWITCH) != 0; req.bot = MessagesController.getInstance(currentAccount).getInputUser(botId); req.platform = "android"; if (hasThemeParams) { @@ -767,6 +882,40 @@ public void onItemClick(int id) { NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.webViewResultSent); break; } + case TYPE_WEB_VIEW_BOT_APP: { + TLRPC.TL_messages_requestAppWebView req = new TLRPC.TL_messages_requestAppWebView(); + TLRPC.TL_inputBotAppID botApp = new TLRPC.TL_inputBotAppID(); + botApp.id = app.id; + botApp.access_hash = app.access_hash; + + req.app = botApp; + req.write_allowed = allowWrite; + req.platform = "android"; + req.peer = lastFragment instanceof ChatActivity ? ((ChatActivity) lastFragment).getCurrentUser() != null ? MessagesController.getInputPeer(((ChatActivity) lastFragment).getCurrentUser()) : MessagesController.getInputPeer(((ChatActivity) lastFragment).getCurrentChat()) + : MessagesController.getInputPeer(botUser); + + if (!TextUtils.isEmpty(startParam)) { + req.start_param = startParam; + req.flags |= 2; + } + + if (hasThemeParams) { + req.theme_params = new TLRPC.TL_dataJSON(); + req.theme_params.data = themeParams; + req.flags |= 4; + } + + ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response2, error2) -> AndroidUtilities.runOnUIThread(() -> { + if (error2 == null) { + TLRPC.TL_appWebViewResultUrl result = (TLRPC.TL_appWebViewResultUrl) response2; + queryId = 0; + webViewContainer.loadUrl(currentAccount, result.url); + swipeContainer.setWebView(webViewContainer.getWebView()); + + AndroidUtilities.runOnUIThread(pollRunnable, POLL_PERIOD); + } + }), ConnectionsManager.RequestFlagInvokeAfter | ConnectionsManager.RequestFlagFailOnServerErrors); + } } } @@ -803,6 +952,12 @@ public void onLayoutChange(View v, int left, int top, int right, int bottom, int @Override public void onBackPressed() { + if (passcodeView.getVisibility() == View.VISIBLE) { + if (getOwnerActivity() != null) { + getOwnerActivity().finish(); + } + return; + } if (webViewContainer.onBackPressed()) { return; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/Bulletin.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/Bulletin.java index 92f0c8eace..3d4f452feb 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/Bulletin.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/Bulletin.java @@ -41,6 +41,7 @@ import androidx.annotation.IntDef; import androidx.annotation.NonNull; import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; import androidx.core.util.Consumer; import androidx.core.view.ViewCompat; import androidx.dynamicanimation.animation.DynamicAnimation; @@ -180,8 +181,9 @@ public static void hideVisible() { } } - public void setDuration(int duration) { + public Bulletin setDuration(int duration) { this.duration = duration; + return this; } public Bulletin show() { @@ -209,6 +211,9 @@ public Bulletin show(boolean top) { layout.onAttach(this); containerLayout.addOnLayoutChangeListener(containerLayoutListener = (v, left, top1, right, bottom, oldLeft, oldTop, oldRight, oldBottom) -> { + if (currentDelegate != null && !currentDelegate.allowLayoutChanges()) { + return; + } if (!top) { int newOffset = currentDelegate != null ? currentDelegate.getBottomOffset(tag) : 0; if (lastBottomOffset != newOffset) { @@ -580,6 +585,10 @@ default void onShow(Bulletin bulletin) { default void onHide(Bulletin bulletin) { } + + default boolean allowLayoutChanges() { + return true; + } } //endregion @@ -1115,6 +1124,7 @@ public static class TwoLineLayout extends ButtonLayout { public final BackupImageView imageView; public final TextView titleTextView; public final TextView subtitleTextView; + private final LinearLayout linearLayout; public TwoLineLayout(@NonNull Context context, Theme.ResourcesProvider resourcesProvider) { super(context, resourcesProvider); @@ -1123,7 +1133,7 @@ public TwoLineLayout(@NonNull Context context, Theme.ResourcesProvider resources addView(imageView = new BackupImageView(context), LayoutHelper.createFrameRelatively(29, 29, Gravity.START | Gravity.CENTER_VERTICAL, 12, 12, 12, 12)); - final LinearLayout linearLayout = new LinearLayout(context); + linearLayout = new LinearLayout(context); linearLayout.setOrientation(LinearLayout.VERTICAL); addView(linearLayout, LayoutHelper.createFrameRelatively(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.START | Gravity.CENTER_VERTICAL, 54, 8, 12, 8)); @@ -1151,6 +1161,11 @@ protected void onShow() { public CharSequence getAccessibilityText() { return titleTextView.getText() + ".\n" + subtitleTextView.getText(); } + + public void hideImage() { + imageView.setVisibility(GONE); + ((MarginLayoutParams) linearLayout.getLayoutParams()).setMarginStart(AndroidUtilities.dp(12)); + } } public static class TwoLineLottieLayout extends ButtonLayout { @@ -1158,6 +1173,7 @@ public static class TwoLineLottieLayout extends ButtonLayout { public final RLottieImageView imageView; public final LinkSpanDrawable.LinksTextView titleTextView; public final LinkSpanDrawable.LinksTextView subtitleTextView; + private final LinearLayout linearLayout; private final int textColor; @@ -1173,7 +1189,7 @@ public TwoLineLottieLayout(@NonNull Context context, Theme.ResourcesProvider res final int undoInfoColor = getThemedColor(Theme.key_undo_infoColor); final int undoLinkColor = getThemedColor(Theme.key_undo_cancelColor); - final LinearLayout linearLayout = new LinearLayout(context); + linearLayout = new LinearLayout(context); linearLayout.setOrientation(LinearLayout.VERTICAL); addView(linearLayout, LayoutHelper.createFrameRelatively(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.START | Gravity.CENTER_VERTICAL, 52, 8, 8, 8)); @@ -1215,6 +1231,11 @@ public void setAnimation(int resId, int w, int h, String... layers) { public CharSequence getAccessibilityText() { return titleTextView.getText() + ".\n" + subtitleTextView.getText(); } + + public void hideImage() { + imageView.setVisibility(GONE); + ((MarginLayoutParams) linearLayout.getLayoutParams()).setMarginStart(AndroidUtilities.dp(10)); + } } public static class LottieLayout extends ButtonLayout { @@ -1282,6 +1303,7 @@ public void setAnimation(int resId, int w, int h, String... layers) { } public void setAnimation(TLRPC.Document document, int w, int h, String... layers) { + imageView.setAutoRepeat(true); imageView.setAnimation(document, w, h); for (String layer : layers) { imageView.setLayerColor(layer + ".**", textColor); @@ -1374,7 +1396,7 @@ public void setText(CharSequence text, BufferType type) { } else { linearLayout = new LinearLayout(getContext()); linearLayout.setOrientation(LinearLayout.VERTICAL); - addView(linearLayout, LayoutHelper.createFrameRelatively(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.START | Gravity.CENTER_VERTICAL, 18 + 56 + 2, 0, 8, 0)); + addView(linearLayout, LayoutHelper.createFrameRelatively(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.START | Gravity.CENTER_VERTICAL, 18 + 56 + 2, 0, 8, 0)); textView = new LinkSpanDrawable.LinksTextView(context) { @Override @@ -1699,7 +1721,11 @@ public void addView(View child) { @Override public void removeView(View child) { super.removeView(child); - BulletinWindow.this.dismiss(); + try { + BulletinWindow.this.dismiss(); + } catch (Exception ignore) { + + } removeDelegate(container); } }, @@ -1764,6 +1790,7 @@ public int getTopOffset(int tag) { } catch (Exception ignore) {} } + @RequiresApi(api = Build.VERSION_CODES.KITKAT_WATCH) private void applyInsets(WindowInsets insets) { if (container != null) { container.setPadding( diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/BulletinFactory.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/BulletinFactory.java index 860f5936c3..adb07b60a8 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/BulletinFactory.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/BulletinFactory.java @@ -16,6 +16,7 @@ import android.view.View; import android.view.ViewGroup; import android.widget.FrameLayout; +import android.widget.ImageView; import androidx.annotation.CheckResult; import androidx.annotation.NonNull; @@ -37,6 +38,7 @@ import org.telegram.tgnet.TLRPC; import org.telegram.ui.ActionBar.BaseFragment; import org.telegram.ui.ActionBar.Theme; +import org.telegram.ui.LaunchActivity; import org.telegram.ui.PremiumPreviewFragment; import java.util.ArrayList; @@ -61,6 +63,14 @@ public static boolean canShowBulletin(BaseFragment fragment) { public static final int ICON_TYPE_NOT_FOUND = 0; public static final int ICON_TYPE_WARNING = 1; + public static BulletinFactory global() { + BaseFragment baseFragment = LaunchActivity.getLastFragment(); + if (baseFragment == null) { + return null; + } + return BulletinFactory.of(baseFragment); + } + public enum FileType { PHOTO("PhotoSavedHint", R.string.PhotoSavedHint, Icon.SAVED_TO_GALLERY), @@ -158,6 +168,15 @@ public Bulletin createSimpleBulletin(int iconRawId, CharSequence text) { return create(layout, text.length() < 20 ? Bulletin.DURATION_SHORT : Bulletin.DURATION_LONG); } + public Bulletin createSimpleBulletin(int iconRawId, CharSequence text, int maxLines) { + final Bulletin.LottieLayout layout = new Bulletin.LottieLayout(getContext(), resourcesProvider); + layout.setAnimation(iconRawId, 36, 36); + layout.textView.setText(text); + layout.textView.setSingleLine(false); + layout.textView.setMaxLines(maxLines); + return create(layout, text.length() < 20 ? Bulletin.DURATION_SHORT : Bulletin.DURATION_LONG); + } + public Bulletin createSimpleBulletin(int iconRawId, CharSequence text, CharSequence subtext) { final Bulletin.TwoLineLottieLayout layout = new Bulletin.TwoLineLottieLayout(getContext(), resourcesProvider); layout.setAnimation(iconRawId, 36, 36); @@ -170,10 +189,20 @@ public Bulletin createSimpleBulletin(int iconRawId, CharSequence text, CharSeque return createSimpleBulletin(iconRawId, text, button, text.length() < 20 ? Bulletin.DURATION_SHORT : Bulletin.DURATION_LONG, onButtonClick); } + public Bulletin createSimpleBulletin(int iconRawId, CharSequence text, CharSequence subtext, CharSequence button, Runnable onButtonClick) { + final Bulletin.TwoLineLottieLayout layout = new Bulletin.TwoLineLottieLayout(getContext(), resourcesProvider); + layout.setAnimation(iconRawId, 36, 36); + layout.titleTextView.setText(text); + layout.subtitleTextView.setText(subtext); + layout.setButton(new Bulletin.UndoButton(getContext(), true, resourcesProvider).setText(button).setUndoAction(onButtonClick)); + return create(layout, Bulletin.DURATION_PROLONG); + } + public Bulletin createSimpleBulletin(int iconRawId, CharSequence text, CharSequence button, int duration, Runnable onButtonClick) { final Bulletin.LottieLayout layout = new Bulletin.LottieLayout(getContext(), resourcesProvider); layout.setAnimation(iconRawId, 36, 36); layout.textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + layout.textView.setTextDirection(View.TEXT_DIRECTION_LOCALE); layout.textView.setSingleLine(false); layout.textView.setMaxLines(3); layout.textView.setText(text); @@ -191,6 +220,49 @@ public Bulletin createSimpleBulletin(Drawable drawable, CharSequence text, Strin return create(layout, Bulletin.DURATION_LONG); } + public Bulletin createSimpleBulletin(Drawable drawable, CharSequence text) { + final Bulletin.LottieLayout layout = new Bulletin.LottieLayout(getContext(), resourcesProvider); + layout.imageView.setImageDrawable(drawable); + layout.textView.setText(text); + layout.textView.setSingleLine(false); + layout.textView.setMaxLines(2); + return create(layout, Bulletin.DURATION_LONG); + } + + public Bulletin createSimpleBulletin(Drawable drawable, CharSequence text, CharSequence subtitle) { + final Bulletin.TwoLineLottieLayout layout = new Bulletin.TwoLineLottieLayout(getContext(), resourcesProvider); + layout.imageView.setImageDrawable(drawable); + layout.titleTextView.setText(text); + layout.subtitleTextView.setText(subtitle); + return create(layout, Bulletin.DURATION_LONG); + } + + public Bulletin createSimpleBulletin(CharSequence text, CharSequence subtitle) { + final Bulletin.TwoLineLottieLayout layout = new Bulletin.TwoLineLottieLayout(getContext(), resourcesProvider); + layout.hideImage(); + layout.titleTextView.setText(text); + layout.subtitleTextView.setText(subtitle); + return create(layout, Bulletin.DURATION_PROLONG); + } + + public Bulletin createSimpleBulletin(Drawable drawable, CharSequence text, CharSequence subtitle, String button, Runnable onButtonClick) { + final Bulletin.TwoLineLottieLayout layout = new Bulletin.TwoLineLottieLayout(getContext(), resourcesProvider); + layout.imageView.setImageDrawable(drawable); + layout.titleTextView.setText(text); + layout.subtitleTextView.setText(subtitle); + layout.setButton(new Bulletin.UndoButton(getContext(), true, resourcesProvider).setText(button).setUndoAction(onButtonClick)); + return create(layout, Bulletin.DURATION_LONG); + } + + public Bulletin createSimpleBulletin(CharSequence text, CharSequence subtitle, String button, Runnable onButtonClick) { + final Bulletin.TwoLineLottieLayout layout = new Bulletin.TwoLineLottieLayout(getContext(), resourcesProvider); + layout.hideImage(); + layout.titleTextView.setText(text); + layout.subtitleTextView.setText(subtitle); + layout.setButton(new Bulletin.UndoButton(getContext(), true, resourcesProvider).setText(button).setUndoAction(onButtonClick)); + return create(layout, Bulletin.DURATION_PROLONG); + } + public Bulletin createUndoBulletin(CharSequence text, Runnable onUndo, Runnable onAction) { final Bulletin.LottieLayout layout = new Bulletin.LottieLayout(getContext(), resourcesProvider); layout.textView.setText(text); @@ -276,7 +348,7 @@ public Bulletin createUsersAddedBulletin(ArrayList users, TLRPC.Chat if (ChatObject.isChannelAndNotMegaGroup(chat)) { text = AndroidUtilities.replaceTags(LocaleController.formatPluralString("AddedMembersToChannel", users.size())); } else { - text = AndroidUtilities.replaceTags(LocaleController.formatPluralString("AddedMembersToGroup", users.size())); + text = AndroidUtilities.replaceTags(LocaleController.formatPluralString("AddedSubscribersToChannel", users.size())); } } return createUsersBulletin(users, text); @@ -295,12 +367,28 @@ public Bulletin createEmojiBulletin(String emoji, String text, String button, Ru return createEmojiBulletin(MediaDataController.getInstance(UserConfig.selectedAccount).getEmojiAnimatedSticker(emoji), text, button, onButtonClick); } + public Bulletin createEmojiBulletin(TLRPC.Document document, CharSequence text) { + final Bulletin.LottieLayout layout = new Bulletin.LottieLayout(getContext(), resourcesProvider); + if (MessageObject.isTextColorEmoji(document)) { + layout.imageView.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_undo_infoColor), PorterDuff.Mode.SRC_IN)); + } + layout.setAnimation(document, 36, 36); + layout.textView.setText(text); + layout.textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + layout.textView.setSingleLine(false); + layout.textView.setMaxLines(3); + return create(layout, Bulletin.DURATION_LONG); + } + public Bulletin createEmojiBulletin(TLRPC.Document document, CharSequence text, CharSequence button, Runnable onButtonClick) { final Bulletin.LottieLayout layout = new Bulletin.LottieLayout(getContext(), resourcesProvider); if (MessageObject.isTextColorEmoji(document)) { layout.imageView.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_undo_infoColor), PorterDuff.Mode.SRC_IN)); } layout.setAnimation(document, 36, 36); + if (layout.imageView.getImageReceiver() != null) { + layout.imageView.getImageReceiver().setRoundRadius(AndroidUtilities.dp(4)); + } layout.textView.setText(text); layout.textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); layout.textView.setSingleLine(false); @@ -482,19 +570,21 @@ public Bulletin createRestrictVoiceMessagesPremiumBulletin() { String str = LocaleController.getString(R.string.PrivacyVoiceMessagesPremiumOnly); SpannableStringBuilder spannable = new SpannableStringBuilder(str); int indexStart = str.indexOf('*'), indexEnd = str.lastIndexOf('*'); - spannable.replace(indexStart, indexEnd + 1, str.substring(indexStart + 1, indexEnd)); - spannable.setSpan(new ClickableSpan() { - @Override - public void onClick(@NonNull View widget) { - fragment.presentFragment(new PremiumPreviewFragment("settings")); - } + if (indexStart >= 0) { + spannable.replace(indexStart, indexEnd + 1, str.substring(indexStart + 1, indexEnd)); + spannable.setSpan(new ClickableSpan() { + @Override + public void onClick(@NonNull View widget) { + fragment.presentFragment(new PremiumPreviewFragment("settings")); + } - @Override - public void updateDrawState(@NonNull TextPaint ds) { - super.updateDrawState(ds); - ds.setUnderlineText(false); - } - }, indexStart, indexEnd - 1, Spannable.SPAN_EXCLUSIVE_EXCLUSIVE); + @Override + public void updateDrawState(@NonNull TextPaint ds) { + super.updateDrawState(ds); + ds.setUnderlineText(false); + } + }, indexStart, indexEnd - 1, Spannable.SPAN_EXCLUSIVE_EXCLUSIVE); + } layout.textView.setText(spannable); layout.textView.setSingleLine(false); layout.textView.setMaxLines(2); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/CacheChart.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/CacheChart.java index d1ca26bcdb..b5e132054c 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/CacheChart.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/CacheChart.java @@ -5,7 +5,9 @@ import static org.telegram.messenger.AndroidUtilities.lerp; import android.content.Context; +import android.content.res.Configuration; import android.graphics.Bitmap; +import android.graphics.BitmapFactory; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.LinearGradient; @@ -26,6 +28,7 @@ import com.google.zxing.common.detector.MathUtils; import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.LiteMode; import org.telegram.messenger.R; import org.telegram.messenger.SvgHelper; import org.telegram.ui.ActionBar.Theme; @@ -35,11 +38,15 @@ public class CacheChart extends View { + public static final int TYPE_CACHE = 0; + public static final int TYPE_NETWORK = 1; + + private RectF chartMeasureBounds = new RectF(); private RectF chartBounds = new RectF(); private RectF chartInnerBounds = new RectF(); - private static final int SECTIONS_COUNT = 9; - private static final String[] colorKeys = new String[] { + private static final int DEFAULT_SECTIONS_COUNT = 9; + private static final String[] DEFAULT_COLORS = new String[] { Theme.key_statisticChartLine_lightblue, Theme.key_statisticChartLine_blue, Theme.key_statisticChartLine_green, @@ -51,7 +58,7 @@ public class CacheChart extends View { Theme.key_statisticChartLine_golden }; - private static final int[] particles = new int[] { + private static final int[] DEFAULT_PARTICLES = new int[] { R.raw.cache_photos, R.raw.cache_videos, R.raw.cache_documents, @@ -63,13 +70,19 @@ public class CacheChart extends View { R.raw.cache_other }; + private final int sectionsCount; + private final String[] colorKeys; + private final int type; + private final boolean svgParticles; + private final int[] particles; + private boolean loading = true; public AnimatedFloat loadingFloat = new AnimatedFloat(this, 750, CubicBezierInterpolator.EASE_OUT_QUINT); private boolean complete = false; - private AnimatedFloat completeFloat = new AnimatedFloat(this, 750, CubicBezierInterpolator.EASE_OUT_QUINT); + private AnimatedFloat completeFloat = new AnimatedFloat(this, 650, CubicBezierInterpolator.EASE_OUT_QUINT); - private Sector[] sectors = new Sector[SECTIONS_COUNT]; + private Sector[] sectors; private float[] segmentsTmp = new float[2]; private RectF roundingRect = new RectF(); @@ -79,12 +92,15 @@ public class CacheChart extends View { private Path completePath = new Path(); private Paint completePaintStroke = new Paint(Paint.ANTI_ALIAS_FLAG); private Paint completePaint = new Paint(Paint.ANTI_ALIAS_FLAG); - private LinearGradient completeGradient; - private Matrix completeGradientMatrix; + private LinearGradient completeGradient, completeTextGradient; + private Matrix completeGradientMatrix, completeTextGradientMatrix; private AnimatedTextView.AnimatedTextDrawable topText = new AnimatedTextView.AnimatedTextDrawable(false, true, true); private AnimatedTextView.AnimatedTextDrawable bottomText = new AnimatedTextView.AnimatedTextDrawable(false, true, true); + private AnimatedTextView.AnimatedTextDrawable topCompleteText = new AnimatedTextView.AnimatedTextDrawable(false, true, true); + private AnimatedTextView.AnimatedTextDrawable bottomCompleteText = new AnimatedTextView.AnimatedTextDrawable(false, true, true); + private StarParticlesView.Drawable completeDrawable; private static long particlesStart = -1; @@ -233,7 +249,7 @@ private void drawParticles( float innerRadius, float outerRadius, float textAlpha, float alpha ) { - if (alpha <= 0) { + if (alpha <= 0 || !LiteMode.isEnabled(LiteMode.FLAGS_CHAT)) { return; } long now = System.currentTimeMillis(); @@ -244,6 +260,7 @@ private void drawParticles( float time = (now - particlesStart) / 10000f; if (particle != null) { int sz = particle.getWidth(); + float szs = AndroidUtilities.dpf2(15) / sz; float stepangle = 7f; @@ -272,7 +289,7 @@ private void drawParticles( particleAlpha = Math.max(0, Math.min(1, particleAlpha)); particlePaint.setAlpha((int) (0xFF * particleAlpha)); - float s = (float) (.75f * (.25f * (float) (Math.sin(t * Math.PI) - 1) + 1) * (.8f + (Math.sin(angle) + 1) * .25f)); + float s = szs * (float) (.75f * (.25f * (float) (Math.sin(t * Math.PI) - 1) + 1) * (.8f + (Math.sin(angle) + 1) * .25f)); canvas.save(); canvas.translate(x, y); @@ -336,14 +353,28 @@ void draw( } public CacheChart(Context context) { + this(context, DEFAULT_SECTIONS_COUNT, DEFAULT_COLORS, TYPE_CACHE, DEFAULT_PARTICLES); + } + + public CacheChart(Context context, int count, String[] colorKeys, int type, int[] particles) { super(context); + setLayerType(LAYER_TYPE_HARDWARE, null); + + this.sectionsCount = count; + this.colorKeys = colorKeys; + this.particles = particles; + this.type = type; + this.svgParticles = type == TYPE_CACHE; + this.sectors = new Sector[this.sectionsCount]; loadingBackgroundPaint.setStyle(Paint.Style.STROKE); loadingBackgroundPaint.setColor(Theme.getColor(Theme.key_listSelector)); completePaint.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.SRC_IN)); completeGradient = new LinearGradient(0, 0, 0, AndroidUtilities.dp(200), new int[] { 0x006ED556, 0xFF6ED556, 0xFF41BA71, 0x0041BA71 }, new float[] { 0, .07f, .93f, 1 }, Shader.TileMode.CLAMP); + completeTextGradient = new LinearGradient(0, 0, 0, AndroidUtilities.dp(200), new int[] { 0x006ED556, 0xFF6ED556, 0xFF41BA71, 0x0041BA71 }, new float[] { 0, .07f, .93f, 1 }, Shader.TileMode.CLAMP); completeGradientMatrix = new Matrix(); + completeTextGradientMatrix = new Matrix(); completePaintStroke.setShader(completeGradient); completePaint.setShader(completeGradient); completePaintStroke.setStyle(Paint.Style.STROKE); @@ -361,7 +392,19 @@ public CacheChart(Context context) { bottomText.setTextSize(AndroidUtilities.dp(12)); bottomText.setGravity(Gravity.CENTER); - for (int i = 0; i < SECTIONS_COUNT; ++i) { + topCompleteText.setAnimationProperties(.2f, 0, 450, CubicBezierInterpolator.EASE_OUT_QUINT); + topCompleteText.getPaint().setShader(completeTextGradient); + topCompleteText.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + topCompleteText.setTextSize(AndroidUtilities.dp(32)); + topCompleteText.setGravity(Gravity.CENTER); + + bottomCompleteText.setAnimationProperties(.6f, 0, 450, CubicBezierInterpolator.EASE_OUT_QUINT); + bottomCompleteText.getPaint().setShader(completeTextGradient); + bottomCompleteText.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + bottomCompleteText.setTextSize(AndroidUtilities.dp(12)); + bottomCompleteText.setGravity(Gravity.CENTER); + + for (int i = 0; i < sectors.length; ++i) { Sector sector = sectors[i] = new Sector(); final int color2 = Theme.blendOver(Theme.getColor(colorKeys[i]), 0x03000000); final int color1 = Theme.blendOver(Theme.getColor(colorKeys[i]), 0x30ffffff); @@ -369,7 +412,40 @@ public CacheChart(Context context) { sector.gradient = new RadialGradient(0, 0, dp(86), new int[]{ color1, color2 }, new float[] { .3f, 1 }, Shader.TileMode.CLAMP); sector.gradient.setLocalMatrix(sector.gradientMatrix = new Matrix()); sector.paint.setShader(sector.gradient); - sector.particle = SvgHelper.getBitmap(particles[i], AndroidUtilities.dp(16), AndroidUtilities.dp(16), 0xffffffff); + } + } + + private boolean interceptTouch = true; + public void setInterceptTouch(boolean value) { + this.interceptTouch = value; + } + + private boolean isAttached; + + @Override + protected void onAttachedToWindow() { + super.onAttachedToWindow(); + isAttached = true; + for (int i = 0; i < sectors.length; ++i) { + if (sectors[i].particle == null) { + if (svgParticles) { + sectors[i].particle = SvgHelper.getBitmap(particles[i], AndroidUtilities.dp(16), AndroidUtilities.dp(16), 0xffffffff); + } else { + sectors[i].particle = BitmapFactory.decodeResource(getContext().getResources(), particles[i]); + } + } + } + } + + @Override + protected void onDetachedFromWindow() { + super.onDetachedFromWindow(); + isAttached = false; + for (int i = 0; i < sectors.length; ++i) { + if (sectors[i].particle != null) { + sectors[i].particle.recycle(); + sectors[i].particle = null; + } } } @@ -399,7 +475,7 @@ public boolean dispatchTouchEvent(MotionEvent event) { setSelected(index); if (index >= 0) { onSectionDown(index, index != -1); - if (getParent() != null) { + if (getParent() != null && interceptTouch) { getParent().requestDisallowInterceptTouchEvent(true); } } @@ -459,8 +535,12 @@ public void setSelected(int index) { public static class SegmentSize { int index; - boolean selected; - long size; + public boolean selected; + public long size; + + public static SegmentSize of(long size) { + return of(size, true); + } public static SegmentSize of(long size, boolean selected) { SegmentSize segment = new SegmentSize(); @@ -470,20 +550,35 @@ public static SegmentSize of(long size, boolean selected) { } } - public void setSegments(long totalSize, SegmentSize ...segments) { + public void setSegments(long totalSize, boolean animated, SegmentSize ...segments) { if (segments == null || segments.length == 0) { - loading = true; + loading = false; complete = totalSize == 0; - topText.setText(""); - bottomText.setText(""); + if (!animated) { + loadingFloat.set(loading ? 1 : 0, true); + completeFloat.set(complete ? 1 : 0, true); + } + topCompleteText.setText(topText.getText(), false); + topText.setText("0", animated); + topCompleteText.setText("0", animated); + + bottomCompleteText.setText(bottomText.getText(), false); + bottomText.setText("KB", animated); + bottomCompleteText.setText("KB", animated); for (int i = 0; i < sectors.length; ++i) { sectors[i].textAlpha = 0; + if (!animated) { + sectors[i].textAlphaAnimated.set(0, true); + } } invalidate(); return; } loading = false; + if (!animated) { + loadingFloat.set(0, true); + } SpannableString percent = new SpannableString("%"); // percent.setSpan(new RelativeSizeSpan(0.733f), 0, percent.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); @@ -505,12 +600,23 @@ public void setSegments(long totalSize, SegmentSize ...segments) { } if (segmentsSum <= 0) { - loading = true; + loading = false; complete = totalSize <= 0; - topText.setText(""); - bottomText.setText(""); + if (!animated) { + loadingFloat.set(loading ? 1 : 0, true); + completeFloat.set(complete ? 1 : 0, true); + } + topCompleteText.setText(topText.getText(), false); + topText.setText("0", animated); + topCompleteText.setText("0", animated); + bottomCompleteText.setText(bottomText.getText(), false); + bottomText.setText("KB", animated); + bottomCompleteText.setText("KB", animated); for (int i = 0; i < sectors.length; ++i) { sectors[i].textAlpha = 0; + if (!animated) { + sectors[i].textAlphaAnimated.set(0, true); + } } invalidate(); return; @@ -537,14 +643,16 @@ public void setSegments(long totalSize, SegmentSize ...segments) { tempFloat[i] = segments[i] == null || !segments[i].selected ? 0 : segments[i].size / (float) segmentsSum; } AndroidUtilities.roundPercents(tempFloat, tempPercents); - Arrays.sort(segments, (a, b) -> Long.compare(a.size, b.size)); - for (int i = 0; i < segments.length - 1; ++i) { - if (segments[i].index == segments.length - 1) { - int from = i, to = 0; - SegmentSize temp = segments[to]; - segments[to] = segments[from]; - segments[from] = temp; - break; + if (type == TYPE_CACHE) { // putting "other" section to being the first one + Arrays.sort(segments, (a, b) -> Long.compare(a.size, b.size)); + for (int i = 0; i < segments.length - 1; ++i) { + if (segments[i].index == segments.length - 1) { + int from = i, to = 0; + SegmentSize temp = segments[to]; + segments[to] = segments[from]; + segments[from] = temp; + break; + } } } @@ -559,8 +667,13 @@ public void setSegments(long totalSize, SegmentSize ...segments) { sectors[i].textAlpha = progress > .05 && progress < 1 ? 1f : 0f; sectors[i].textScale = progress < .08f || tempPercents[i] >= 100 ? .85f : 1f; sectors[i].particlesAlpha = 1; + if (!animated) { + sectors[i].textAlphaAnimated.set(sectors[i].textAlpha, true); + sectors[i].textScaleAnimated.set(sectors[i].textScale, true); + sectors[i].particlesAlphaAnimated.set(sectors[i].particlesAlpha, true); + } if (sectors[i].textAlpha > 0) { - sectors[i].text.setText(string); + sectors[i].text.setText(string, animated); } if (progress < .02f && progress > 0) { progress = .02f; @@ -573,10 +686,19 @@ public void setSegments(long totalSize, SegmentSize ...segments) { sectors[i].angleCenter = (angleFrom + angleTo) / 2; sectors[i].angleSize = Math.abs(angleTo - angleFrom) / 2; sectors[i].textAlpha = 0; + if (!animated) { + sectors[i].angleCenterAnimated.set(sectors[i].angleCenter, true); + sectors[i].angleSizeAnimated.set(sectors[i].angleSize, true); + sectors[i].textAlphaAnimated.set(sectors[i].textAlpha, true); + } continue; } sectors[i].angleCenter = (angleFrom + angleTo) / 2; sectors[i].angleSize = Math.abs(angleTo - angleFrom) / 2; + if (!animated) { + sectors[i].angleCenterAnimated.set(sectors[i].angleCenter, true); + sectors[i].angleSizeAnimated.set(sectors[i].angleSize, true); + } prev += progress; k++; } @@ -586,9 +708,17 @@ public void setSegments(long totalSize, SegmentSize ...segments) { if (top.length() >= 4 && segmentsSum < 1024L * 1024L * 1024L) { top = top.split("\\.")[0]; } - topText.setText(top); - bottomText.setText(fileSize.length > 1 ? fileSize[1] : ""); + topText.setText(top, animated); + bottomText.setText(fileSize.length > 1 ? fileSize[1] : "", animated); + if (completeFloat.get() > 0) { + topCompleteText.setText(topText.getText(), animated); + bottomCompleteText.setText(bottomText.getText(), animated); + } + complete = false; + if (!animated) { + completeFloat.set(complete ? 1 : 0, true); + } invalidate(); } @@ -618,8 +748,12 @@ protected void dispatchDraw(Canvas canvas) { final float loading = loadingFloat.set(this.loading ? 1f : 0f); final float complete = completeFloat.set(this.complete ? 1f : 0f); + chartBounds.set(chartMeasureBounds); + final float minusDp = lerp(0, dpf2(padInsideDp()), complete); + chartBounds.inset(minusDp, minusDp); + chartInnerBounds.set(chartBounds); - final float thickness = lerp(dpf2(38), dpf2(10), loading); + final float thickness = lerp(dpf2(38), dpf2(10), Math.max(loading, complete)); chartInnerBounds.inset(thickness, thickness); final float rounding = lerp(0, dp(60), loading); @@ -647,7 +781,7 @@ protected void dispatchDraw(Canvas canvas) { boolean wouldUpdate = loading > 0 || complete > 0; - for (int i = 0; i < SECTIONS_COUNT; ++i) { + for (int i = 0; i < sectors.length; ++i) { Sector sector = sectors[i]; CircularProgressDrawable.getSegments((loadingTime + i * 80) % 5400, segmentsTmp); @@ -672,30 +806,47 @@ protected void dispatchDraw(Canvas canvas) { sector.draw(canvas, chartBounds, chartInnerBounds, angleCenter, angleSize, rounding, 1f - complete, 1f - loading); } - topText.setAlpha((int) (255 * (1f - loading) * (1f - complete))); - topText.setBounds((int) (chartBounds.centerX()), (int) (chartBounds.centerY() - AndroidUtilities.dp(5)), (int) (chartBounds.centerX()), (int) (chartBounds.centerY() - AndroidUtilities.dp(3))); - topText.draw(canvas); - wouldUpdate = topText.isAnimating() || wouldUpdate; - - bottomText.setAlpha((int) (255 * (1f - loading) * (1f - complete))); - bottomText.setBounds((int) (chartBounds.centerX()), (int) (chartBounds.centerY() + AndroidUtilities.dp(22)), (int) (chartBounds.centerX()), (int) (chartBounds.centerY() + AndroidUtilities.dp(22))); - bottomText.draw(canvas); - wouldUpdate = bottomText.isAnimating() || wouldUpdate; + if (type == TYPE_CACHE) { + float textAlpha = (1f - loading) * (1f - complete); + float topTextX = chartBounds.centerX(); + float topTextY = chartBounds.centerY() - dpf2(5); + wouldUpdate = drawAnimatedText(canvas, topText, topTextX, topTextY, 1f, textAlpha) || wouldUpdate; + + float bottomTextX = chartBounds.centerX(); + float bottomTextY = chartBounds.centerY() + dpf2(22); + wouldUpdate = drawAnimatedText(canvas, bottomText, bottomTextX, bottomTextY, 1f, textAlpha) || wouldUpdate; + } else if (type == TYPE_NETWORK) { + float textAlpha = 1f - loading; + float topTextX = chartBounds.centerX() - AndroidUtilities.lerp(0, dpf2(4), complete); + float topTextY = chartBounds.centerY() - AndroidUtilities.lerp(dpf2(5), 0, complete); + float topTextScale = AndroidUtilities.lerp(1f, 2.25f, complete); + wouldUpdate = drawAnimatedText(canvas, topCompleteText, topTextX, topTextY, topTextScale, textAlpha * complete) || wouldUpdate; + wouldUpdate = drawAnimatedText(canvas, topText, topTextX, topTextY, topTextScale, textAlpha * (1f - complete)) || wouldUpdate; + + float bottomTextX = chartBounds.centerX() + AndroidUtilities.lerp(0, dpf2(26), complete); + float bottomTextY = chartBounds.centerY() + AndroidUtilities.lerp(dpf2(22), -dpf2(18), complete); + float bottomTextScale = AndroidUtilities.lerp(1f, 1.4f, complete); + wouldUpdate = drawAnimatedText(canvas, bottomCompleteText, bottomTextX, bottomTextY, bottomTextScale, textAlpha * complete) || wouldUpdate; + wouldUpdate = drawAnimatedText(canvas, bottomText, bottomTextX, bottomTextY, bottomTextScale, textAlpha * (1f - complete)) || wouldUpdate; + } if (complete > 0) { + boolean init = false; if (completeDrawable == null) { completeDrawable = new StarParticlesView.Drawable(25); completeDrawable.type = 100; - completeDrawable.roundEffect = false; + completeDrawable.roundEffect = true; completeDrawable.useRotate = true; completeDrawable.useBlur = false; completeDrawable.checkBounds = true; completeDrawable.size1 = 18; completeDrawable.distributionAlgorithm = false; completeDrawable.excludeRadius = AndroidUtilities.dp(80); - completeDrawable.k1 = completeDrawable.k2 = completeDrawable.k3 = 0.7f; + completeDrawable.k1 = completeDrawable.k2 = completeDrawable.k3 = .85f; completeDrawable.init(); - + init = true; + } + if (init || completePathBounds == null || !completePathBounds.equals(chartMeasureBounds)) { float d = Math.min(getMeasuredHeight(), Math.min(getMeasuredWidth(), AndroidUtilities.dp(150))); completeDrawable.rect.set(0, 0, d, d); completeDrawable.rect.offset((getMeasuredWidth() - completeDrawable.rect.width()) / 2, (getMeasuredHeight() - completeDrawable.rect.height()) / 2); @@ -713,36 +864,71 @@ protected void dispatchDraw(Canvas canvas) { completePaintStroke.setAlpha((int) (0xFF * complete)); canvas.drawCircle(chartBounds.centerX(), chartBounds.centerY(), (chartBounds.width() - thickness) / 2, completePaintStroke); - if (completePathBounds == null || completePathBounds.equals(chartBounds)) { + if (completePathBounds == null || !completePathBounds.equals(chartMeasureBounds)) { if (completePathBounds == null) { completePathBounds = new RectF(); } - completePathBounds.set(chartBounds); + completePathBounds.set(chartMeasureBounds); completePath.rewind(); - completePath.moveTo(chartBounds.left + chartBounds.width() * .348f, chartBounds.top + chartBounds.height() * .538f); - completePath.lineTo(chartBounds.left + chartBounds.width() * .447f, chartBounds.top + chartBounds.height() * .636f); - completePath.lineTo(chartBounds.left + chartBounds.width() * .678f, chartBounds.top + chartBounds.height() * .402f); + if (type == TYPE_CACHE) { + completePath.moveTo(chartBounds.width() * .348f, chartBounds.height() * .538f); + completePath.lineTo(chartBounds.width() * .447f, chartBounds.height() * .636f); + completePath.lineTo(chartBounds.width() * .678f, chartBounds.height() * .402f); + } else if (type == TYPE_NETWORK) { + completePath.moveTo(chartBounds.width() * .2929f, chartBounds.height() * .4369f); + completePath.lineTo(chartBounds.width() * .381f, chartBounds.height() * .35f); + completePath.lineTo(chartBounds.width() * .4691f, chartBounds.height() * .4369f); + completePath.moveTo(chartBounds.width() * .381f, chartBounds.height() * .35f); + completePath.lineTo(chartBounds.width() * .381f, chartBounds.height() * .6548f); + + completePath.moveTo(chartBounds.width() * .5214f, chartBounds.height() * .5821f); + completePath.lineTo(chartBounds.width() * .6095f, chartBounds.height() * .669f); + completePath.lineTo(chartBounds.width() * .6976f, chartBounds.height() * .5821f); + completePath.moveTo(chartBounds.width() * .6095f, chartBounds.height() * .669f); + completePath.lineTo(chartBounds.width() * .6095f, chartBounds.height() * .3643f); + } + completePath.offset(chartBounds.left, chartBounds.top); + } + if (type == TYPE_CACHE) { + completePaintStroke.setStrokeWidth(dpf2(10)); + canvas.drawPath(completePath, completePaintStroke); } - completePaintStroke.setStrokeWidth(AndroidUtilities.dp(10)); - canvas.drawPath(completePath, completePaintStroke); } - if (wouldUpdate || true) { + if ((wouldUpdate || true) && isAttached) { invalidate(); } } + private boolean drawAnimatedText(Canvas canvas, AnimatedTextView.AnimatedTextDrawable textDrawable, float x, float y, float scale, float alpha) { + if (alpha <= 0) { + return false; + } + textDrawable.setAlpha((int) (0xFF * alpha)); + textDrawable.setBounds(0, 0, 0, 0); + canvas.save(); + canvas.translate(x, y); + canvas.scale(scale, scale); + textDrawable.draw(canvas); + canvas.restore(); + return textDrawable.isAnimating(); + } + protected int heightDp() { return 200; } + protected int padInsideDp() { + return 0; + } + @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { final int width = MeasureSpec.getSize(widthMeasureSpec); final int height = dp(heightDp()); final int d = dp(172); - chartBounds.set( + chartMeasureBounds.set( (width - d) / 2f, (height - d) / 2f, (width + d) / 2f, @@ -750,8 +936,11 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { ); completeGradientMatrix.reset(); - completeGradientMatrix.setTranslate(chartBounds.left, 0); + completeGradientMatrix.setTranslate(chartMeasureBounds.left, 0); completeGradient.setLocalMatrix(completeGradientMatrix); + completeTextGradientMatrix.reset(); + completeTextGradientMatrix.setTranslate(chartMeasureBounds.left, -chartMeasureBounds.centerY()); + completeTextGradient.setLocalMatrix(completeTextGradientMatrix); if (completeDrawable != null) { completeDrawable.rect.set(0, 0, AndroidUtilities.dp(140), AndroidUtilities.dp(140)); @@ -765,4 +954,10 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { MeasureSpec.makeMeasureSpec(height, MeasureSpec.EXACTLY) ); } + + @Override + protected void onConfigurationChanged(Configuration newConfig) { + super.onConfigurationChanged(newConfig); + requestLayout(); + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/CanvasButton.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/CanvasButton.java index 97303f65a1..344056a319 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/CanvasButton.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/CanvasButton.java @@ -25,7 +25,7 @@ public class CanvasButton { - Path drawingPath = new Path(); + Path drawingPath; ArrayList drawingRects = new ArrayList<>(); int usingRectCount; boolean buttonPressed; @@ -110,7 +110,11 @@ public void draw(Canvas canvas) { private void drawInternal(Canvas canvas, Paint paint) { if (usingRectCount > 1) { if (!pathCreated) { - drawingPath.rewind(); + if (drawingPath == null) { + drawingPath = new Path(); + } else { + drawingPath.rewind(); + } int left = 0, top = 0, right = 0, bottom = 0; for (int i = 0; i < usingRectCount; i++) { if (i + 1 < usingRectCount) { @@ -140,7 +144,9 @@ private void drawInternal(Canvas canvas, Paint paint) { pathCreated = true; } paint.setPathEffect(pathEffect); - canvas.drawPath(drawingPath, paint); + if (drawingPath != null) { + canvas.drawPath(drawingPath, paint); + } } else if (usingRectCount == 1) { if (selectorDrawable != null) { selectorDrawable.setBounds((int) drawingRects.get(0).left, (int) drawingRects.get(0).top, (int) drawingRects.get(0).right, (int) drawingRects.get(0).bottom); @@ -260,4 +266,9 @@ public void cancelRipple() { } } + + public void setRect(int x, int y, int x1, int y1) { + AndroidUtilities.rectTmp.set(x, y, x1, y1); + setRect(AndroidUtilities.rectTmp); + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatActivityEnterView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatActivityEnterView.java index 44b1770596..2eb2387618 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatActivityEnterView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatActivityEnterView.java @@ -87,7 +87,6 @@ import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.TextView; -import android.widget.Toast; import androidx.annotation.Keep; import androidx.annotation.NonNull; @@ -115,16 +114,19 @@ import org.telegram.messenger.AccountInstance; import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ApplicationLoader; +import org.telegram.messenger.BuildVars; import org.telegram.messenger.ChatObject; import org.telegram.messenger.ContactsController; import org.telegram.messenger.DialogObject; import org.telegram.messenger.Emoji; import org.telegram.messenger.FileLog; +import org.telegram.messenger.LiteMode; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MediaController; import org.telegram.messenger.MediaDataController; import org.telegram.messenger.MessageObject; import org.telegram.messenger.MessagesController; +import org.telegram.messenger.MessagesStorage; import org.telegram.messenger.NotificationCenter; import org.telegram.messenger.NotificationsController; import org.telegram.messenger.R; @@ -138,6 +140,7 @@ import org.telegram.messenger.browser.Browser; import org.telegram.messenger.camera.CameraController; import org.telegram.tgnet.ConnectionsManager; +import org.telegram.tgnet.SerializedData; import org.telegram.tgnet.TLRPC; import org.telegram.ui.ActionBar.ActionBar; import org.telegram.ui.ActionBar.ActionBarMenuSubItem; @@ -151,6 +154,7 @@ import org.telegram.ui.BasePermissionsActivity; import org.telegram.ui.Cells.TextCheckCell; import org.telegram.ui.ChatActivity; +import org.telegram.ui.Components.Premium.GiftPremiumBottomSheet; import org.telegram.ui.Components.Premium.PremiumFeatureBottomSheet; import org.telegram.ui.ContentPreviewViewer; import org.telegram.ui.DialogsActivity; @@ -160,6 +164,7 @@ import org.telegram.ui.PremiumPreviewFragment; import org.telegram.ui.ProfileActivity; import org.telegram.ui.StickersActivity; +import org.telegram.ui.TopicsFragment; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -193,6 +198,9 @@ public class ChatActivityEnterView extends BlurredFrameLayout implements NotificationCenter.NotificationCenterDelegate, SizeNotifierFrameLayout.SizeNotifierFrameLayoutDelegate, StickersAlert.StickersAlertDelegate { + private int commonInputType; + private boolean stickersEnabled; + public interface ChatActivityEnterViewDelegate { default void beforeMessageSend(CharSequence message, boolean notify, int scheduleDate) { } @@ -331,6 +339,7 @@ default TLRPC.TL_channels_sendAsPeers getSendAsPeers() { private Runnable showTopViewRunnable; private Runnable setTextFieldRunnable; public boolean preventInput; + @Nullable private NumberTextView captionLimitView; private int currentLimit = -1; private int codePointCount; @@ -347,6 +356,7 @@ default TLRPC.TL_channels_sendAsPeers getSendAsPeers() { private BotWebViewMenuContainer botWebViewMenuContainer; private ChatActivityBotWebViewButton botWebViewButton; + @Nullable private BotCommandsMenuView botCommandsMenuButton; public BotCommandsMenuContainer botCommandsMenuContainer; private BotCommandsMenuView.BotCommandsAdapter botCommandsAdapter; @@ -354,6 +364,7 @@ default TLRPC.TL_channels_sendAsPeers getSendAsPeers() { private boolean captionLimitBulletinShown; // Send as... stuff + @Nullable private SenderSelectView senderSelectView; private SenderSelectPopup senderSelectPopupWindow; private Runnable onEmojiSearchClosed; @@ -364,6 +375,12 @@ default TLRPC.TL_channels_sendAsPeers getSendAsPeers() { private float searchToOpenProgress; private float chatSearchExpandOffset; + private boolean sendRoundEnabled = true; + private boolean sendVoiceEnabled = true; + private boolean sendPlainEnabled = true; + private boolean emojiButtonRestricted; + + private HashMap animationParamsX = new HashMap<>(); private class SeekBarWaveformView extends View { @@ -430,6 +447,7 @@ public void onInitializeAccessibilityNodeInfo(View host, AccessibilityNodeInfo i } }; + @Nullable protected EditTextCaption messageEditText; private SimpleTextView slowModeButton; private int slowModeTimer; @@ -442,6 +460,7 @@ public void onInitializeAccessibilityNodeInfo(View host, AccessibilityNodeInfo i private ActionBarPopupWindow.ActionBarPopupWindowLayout sendPopupLayout; private ImageView cancelBotButton; private ChatActivityEnterViewAnimatedIconView emojiButton; + @Nullable private ImageView expandStickersButton; private EmojiView emojiView; private AnimatorSet panelAnimation; @@ -451,37 +470,53 @@ public void onInitializeAccessibilityNodeInfo(View host, AccessibilityNodeInfo i private FrameLayout audioVideoButtonContainer; private ChatActivityEnterViewAnimatedIconView audioVideoSendButton; private boolean isInVideoMode; + @Nullable private FrameLayout recordPanel; + @Nullable private FrameLayout recordedAudioPanel; + @Nullable private VideoTimelineView videoTimelineView; @SuppressWarnings("FieldCanBeLocal") private RLottieImageView recordDeleteImageView; + @Nullable private SeekBarWaveformView recordedAudioSeekBar; + @Nullable private View recordedAudioBackground; + @Nullable private ImageView recordedAudioPlayButton; + @Nullable private TextView recordedAudioTimeTextView; + @Nullable private SlideTextView slideText; - @SuppressWarnings("FieldCanBeLocal") - private LinearLayout recordTimeContainer; + @Nullable private RecordDot recordDot; private SizeNotifierFrameLayout sizeNotifierLayout; private int originalViewHeight; private LinearLayout attachLayout; private ImageView attachButton; + @Nullable private ImageView botButton; + private FrameLayout messageEditTextContainer; private FrameLayout textFieldContainer; private FrameLayout sendButtonContainer; + @Nullable private FrameLayout doneButtonContainer; + @Nullable private ImageView doneButtonImage; private AnimatorSet doneButtonAnimation; + @Nullable private ContextProgressView doneButtonProgress; protected View topView; protected View topLineView; private BotKeyboardView botKeyboardView; private ImageView notifyButton; + @Nullable private ImageView scheduledButton; + @Nullable + private ImageView giftButton; private boolean scheduleButtonHidden; private AnimatorSet scheduledButtonAnimation; + @Nullable private RecordCircle recordCircle; private CloseProgressDrawable2 progressDrawable; private Paint dotPaint; @@ -491,6 +526,7 @@ public void onInitializeAccessibilityNodeInfo(View host, AccessibilityNodeInfo i protected float topViewEnterProgress; protected int animatedTop; public ValueAnimator currentTopViewAnimation; + @Nullable private ReplaceableIconDrawable botButtonDrawable; private CharSequence draftMessage; @@ -579,8 +615,6 @@ public void onInitializeAccessibilityNodeInfo(View host, AccessibilityNodeInfo i private MessageObject pendingMessageObject; private TLRPC.KeyboardButton pendingLocationButton; - private boolean configAnimationsEnabled; - private boolean waitingForKeyboardOpen; private boolean waitingForKeyboardOpenAfterAnimation; private boolean wasSendTyping; @@ -590,7 +624,8 @@ public void onInitializeAccessibilityNodeInfo(View host, AccessibilityNodeInfo i private boolean clearBotButtonsOnKeyboardOpen; private boolean expandStickersWithKeyboard; private float doneButtonEnabledProgress = 1f; - private final Drawable doneCheckDrawable; + @Nullable + private Drawable doneCheckDrawable; boolean doneButtonEnabled = true; private ValueAnimator doneButtonColorAnimator; @@ -698,8 +733,12 @@ public void run() { delegate.onPreAudioVideoRecord(); calledRecordRunnable = true; recordAudioVideoRunnableStarted = false; - slideText.setAlpha(1.0f); - slideText.setTranslationY(0); + if (slideText != null) { + slideText.setAlpha(1.0f); + slideText.setTranslationY(0); + } + audioToSendPath = null; + audioToSend = null; if (isInVideoMode()) { if (Build.VERSION.SDK_INT >= 23) { boolean hasAudio = parentActivity.checkSelfPermission(Manifest.permission.RECORD_AUDIO) == PackageManager.PERMISSION_GRANTED; @@ -726,8 +765,12 @@ public void run() { if (!recordingAudioVideo) { recordingAudioVideo = true; updateRecordInterface(RECORD_STATE_ENTER); - recordCircle.showWaves(false, false); - recordTimerView.reset(); + if (recordCircle != null) { + recordCircle.showWaves(false, false); + } + if (recordTimerView != null) { + recordTimerView.reset(); + } } } else { if (parentFragment != null) { @@ -739,13 +782,19 @@ public void run() { delegate.needStartRecordAudio(1); startedDraggingX = -1; - MediaController.getInstance().startRecording(currentAccount, dialog_id, replyingMessageObject, getThreadMessage(), recordingGuid); + MediaController.getInstance().startRecording(currentAccount, dialog_id, replyingMessageObject, getThreadMessage(), recordingGuid, true); recordingAudioVideo = true; updateRecordInterface(RECORD_STATE_ENTER); - recordTimerView.start(); - recordDot.enterAnimation = false; + if (recordTimerView != null) { + recordTimerView.start(); + } + if (recordDot != null) { + recordDot.enterAnimation = false; + } audioVideoButtonContainer.getParent().requestDisallowInterceptTouchEvent(true); - recordCircle.showWaves(true, false); + if (recordCircle != null) { + recordCircle.showWaves(true, false); + } } } }; @@ -877,6 +926,8 @@ public void playDeleteAnimation() { private Drawable lockShadowDrawable; private final Theme.ResourcesProvider resourcesProvider; + private final boolean isChat; + private Runnable runEmojiPanelAnimation = new Runnable() { @Override public void run() { @@ -905,8 +956,8 @@ public class RecordCircle extends View { public float iconScale; - BlobDrawable tinyWaveDrawable = new BlobDrawable(11); - BlobDrawable bigWaveDrawable = new BlobDrawable(12); + BlobDrawable tinyWaveDrawable = new BlobDrawable(11, LiteMode.FLAGS_CHAT); + BlobDrawable bigWaveDrawable = new BlobDrawable(12, LiteMode.FLAGS_CHAT); private Drawable tooltipBackground; private Drawable tooltipBackgroundArrow; @@ -955,25 +1006,10 @@ public class RecordCircle extends View { public RecordCircle(Context context) { super(context); - micDrawable = getResources().getDrawable(R.drawable.input_mic_pressed).mutate(); - micDrawable.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_messagePanelVoicePressed), PorterDuff.Mode.SRC_IN)); - - cameraDrawable = getResources().getDrawable(R.drawable.input_video_pressed).mutate(); - cameraDrawable.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_messagePanelVoicePressed), PorterDuff.Mode.SRC_IN)); - - sendDrawable = getResources().getDrawable(R.drawable.attach_send).mutate(); - sendDrawable.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_messagePanelVoicePressed), PorterDuff.Mode.SRC_IN)); - - micOutline = getResources().getDrawable(R.drawable.input_mic).mutate(); - micOutline.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_messagePanelIcons), PorterDuff.Mode.SRC_IN)); - - cameraOutline = getResources().getDrawable(R.drawable.input_video).mutate(); - cameraOutline.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_messagePanelIcons), PorterDuff.Mode.SRC_IN)); virtualViewHelper = new VirtualViewHelper(this); ViewCompat.setAccessibilityDelegate(this, virtualViewHelper); - tinyWaveDrawable.minRadius = AndroidUtilities.dp(47); tinyWaveDrawable.maxRadius = AndroidUtilities.dp(55); tinyWaveDrawable.generateBlob(); @@ -1002,6 +1038,26 @@ public RecordCircle(Context context) { updateColors(); } + private void checkDrawables() { + if (micDrawable != null) { + return; + } + micDrawable = getResources().getDrawable(R.drawable.input_mic_pressed).mutate(); + micDrawable.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_messagePanelVoicePressed), PorterDuff.Mode.MULTIPLY)); + + cameraDrawable = getResources().getDrawable(R.drawable.input_video_pressed).mutate(); + cameraDrawable.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_messagePanelVoicePressed), PorterDuff.Mode.MULTIPLY)); + + sendDrawable = getResources().getDrawable(R.drawable.attach_send).mutate(); + sendDrawable.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_messagePanelVoicePressed), PorterDuff.Mode.MULTIPLY)); + + micOutline = getResources().getDrawable(R.drawable.input_mic).mutate(); + micOutline.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_messagePanelIcons), PorterDuff.Mode.MULTIPLY)); + + cameraOutline = getResources().getDrawable(R.drawable.input_video).mutate(); + cameraOutline.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_messagePanelIcons), PorterDuff.Mode.MULTIPLY)); + } + public void setAmplitude(double value) { bigWaveDrawable.setValue((float) (Math.min(WaveDrawable.MAX_AMPLITUDE, value) / WaveDrawable.MAX_AMPLITUDE), true); tinyWaveDrawable.setValue((float) (Math.min(WaveDrawable.MAX_AMPLITUDE, value) / WaveDrawable.MAX_AMPLITUDE), false); @@ -1102,7 +1158,9 @@ public boolean onTouchEvent(MotionEvent event) { MediaController.getInstance().stopRecording(2, true, 0); delegate.needStartRecordAudio(0); } - slideText.setEnabled(false); + if (slideText != null) { + slideText.setEnabled(false); + } } } return true; @@ -1200,7 +1258,7 @@ protected void onDraw(Canvas canvas) { float progressToSeekbarStep2 = 0; float exitProgress2 = 0f; - if (transformToSeekbar != 0) { + if (transformToSeekbar != 0 && recordedAudioBackground != null) { float step1Time = 0.38f; float step2Time = 0.25f; float step3Time = 1f - step1Time - step2Time; @@ -1230,7 +1288,7 @@ protected void onDraw(Canvas canvas) { radius = radius + AndroidUtilities.dp(16) * progressToSeekbarStep1; radius *= (1f - exitProgress2); - if (configAnimationsEnabled && exitTransition > 0.6f) { + if (LiteMode.isEnabled(LiteMode.FLAGS_CHAT) && exitTransition > 0.6f) { circleAlpha = Math.max(0, 1f - (exitTransition - 0.6f) / 0.4f); } } @@ -1247,6 +1305,7 @@ protected void onDraw(Canvas canvas) { Drawable drawable; Drawable replaceDrawable = null; + checkDrawables(); if (isSendButtonVisible()) { if (progressToSendButton != 1f) { progressToSendButton += dt / 150f; @@ -1289,7 +1348,7 @@ protected void onDraw(Canvas canvas) { } } - if (configAnimationsEnabled) { + if (LiteMode.isEnabled(LiteMode.FLAGS_CHAT)) { tinyWaveDrawable.minRadius = AndroidUtilities.dp(47); tinyWaveDrawable.maxRadius = AndroidUtilities.dp(47) + AndroidUtilities.dp(15) * BlobDrawable.FORM_SMALL_MAX; @@ -1307,7 +1366,7 @@ protected void onDraw(Canvas canvas) { lastUpdateTime = System.currentTimeMillis(); float slideToCancelProgress1 = slideToCancelProgress > 0.7f ? 1f : slideToCancelProgress / 0.7f; - if (configAnimationsEnabled && progressToSeekbarStep2 != 1 && exitProgress2 < 0.4f && slideToCancelProgress1 > 0 && !canceledByGesture) { + if (LiteMode.isEnabled(LiteMode.FLAGS_CHAT) && progressToSeekbarStep2 != 1 && exitProgress2 < 0.4f && slideToCancelProgress1 > 0 && !canceledByGesture) { if (showWaves && wavesEnterAnimation != 1f) { wavesEnterAnimation += 0.04f; if (wavesEnterAnimation > 1f) { @@ -1334,7 +1393,7 @@ protected void onDraw(Canvas canvas) { paint.setAlpha((int) (paintAlpha * circleAlpha)); if (this.scale == 1f) { if (transformToSeekbar != 0) { - if (progressToSeekbarStep3 > 0) { + if (progressToSeekbarStep3 > 0 && recordedAudioBackground != null) { float circleB = cy + radius; float circleT = cy - radius; float circleR = cx + slideDelta + radius; @@ -1561,6 +1620,7 @@ protected void onDraw(Canvas canvas) { public void drawIcon(Canvas canvas, int cx, int cy, float alpha) { Drawable drawable; Drawable replaceDrawable = null; + checkDrawables(); if (isSendButtonVisible()) { if (progressToSendButton != 1f) { replaceDrawable = isInVideoMode() ? cameraDrawable : micDrawable; @@ -1579,6 +1639,7 @@ public void drawIcon(Canvas canvas, int cx, int cy, float alpha) { } private void drawIconInternal(Canvas canvas, Drawable drawable, Drawable replaceDrawable, float progressToSendButton, int alpha) { + checkDrawables(); if (progressToSendButton == 0 || progressToSendButton == 1 || replaceDrawable == null) { if (canceledByGesture && slideToCancelProgress == 1f) { View v = audioVideoSendButton; @@ -1723,7 +1784,7 @@ public VirtualViewHelper(@NonNull View host) { @Override protected int getVirtualViewAt(float x, float y) { - if (isSendButtonVisible()) { + if (isSendButtonVisible() && recordCircle != null) { if (sendRect.contains((int) x, (int) y)) { return 1; } else if (pauseRect.contains(x, y)) { @@ -1760,7 +1821,7 @@ protected void onPopulateNodeForVirtualView(int id, @NonNull AccessibilityNodeIn rect.set((int) pauseRect.left, (int) pauseRect.top, (int) pauseRect.right, (int) pauseRect.bottom); info.setBoundsInParent(rect); info.setText(LocaleController.getString("Stop", R.string.Stop)); - } else if (id == 3) { + } else if (id == 3 && recordCircle != null) { if (slideText != null && slideText.cancelRect != null) { AndroidUtilities.rectTmp2.set(slideText.cancelRect); slideText.getLocationOnScreen(coords); @@ -1790,8 +1851,9 @@ public ChatActivityEnterView(Activity context, SizeNotifierFrameLayout parent, C this.resourcesProvider = resourcesProvider; this.backgroundColor = getThemedColor(Theme.key_chat_messagePanelBackground); this.drawBlur = false; + this.isChat = isChat; - smoothKeyboard = isChat && SharedConfig.smoothKeyboard && !AndroidUtilities.isInMultiwindow && (fragment == null || !fragment.isInBubbleMode()); + smoothKeyboard = isChat && !AndroidUtilities.isInMultiwindow && (fragment == null || !fragment.isInBubbleMode()); dotPaint = new Paint(Paint.ANTI_ALIAS_FLAG); dotPaint.setColor(getThemedColor(Theme.key_chat_emojiPanelNewTrending)); setFocusable(true); @@ -1814,6 +1876,7 @@ public ChatActivityEnterView(Activity context, SizeNotifierFrameLayout parent, C NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.sendingMessagesChanged); NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.audioRecordTooShort); NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.updateBotMenuButton); + NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.didUpdatePremiumGiftFieldIcon); NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.emojiLoaded); parentActivity = context; @@ -1825,12 +1888,11 @@ public ChatActivityEnterView(Activity context, SizeNotifierFrameLayout parent, C sizeNotifierLayout.setDelegate(this); SharedPreferences preferences = MessagesController.getGlobalMainSettings(); sendByEnter = preferences.getBoolean("send_by_enter", false); - configAnimationsEnabled = preferences.getBoolean("view_animations", true); textFieldContainer = new FrameLayout(context) { @Override public boolean dispatchTouchEvent(MotionEvent ev) { - if (botWebViewButton.getVisibility() == VISIBLE) { + if (botWebViewButton != null && botWebViewButton.getVisibility() == VISIBLE) { return botWebViewButton.dispatchTouchEvent(ev); } return super.dispatchTouchEvent(ev); @@ -1841,7 +1903,7 @@ public boolean dispatchTouchEvent(MotionEvent ev) { textFieldContainer.setPadding(0, AndroidUtilities.dp(1), 0, 0); addView(textFieldContainer, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.BOTTOM, 0, 1, 0, 0)); - FrameLayout frameLayout = new FrameLayout(context) { + FrameLayout frameLayout = messageEditTextContainer = new FrameLayout(context) { @Override protected void onLayout(boolean changed, int left, int top, int right, int bottom) { super.onLayout(changed, left, top, right, bottom); @@ -1900,21 +1962,29 @@ protected void onDraw(Canvas canvas) { if (adjustPanLayoutHelper != null && adjustPanLayoutHelper.animationInProgress()) { return; } + if (emojiButtonRestricted) { + showRestrictedHint(); + return; + } if (hasBotWebView() && botCommandsMenuIsShowing()) { - botWebViewMenuContainer.dismiss(v::callOnClick); + if (botWebViewMenuContainer != null) { + botWebViewMenuContainer.dismiss(v::callOnClick); + } return; } if (!isPopupShowing() || currentPopupContentType != 0) { showPopup(1, 0); - emojiView.onOpen(messageEditText.length() > 0); + emojiView.onOpen(messageEditText != null && messageEditText.length() > 0); } else { if (searchingType != 0) { setSearchingTypeInternal(0, true); if (emojiView != null) { emojiView.closeSearch(false); } - messageEditText.requestFocus(); + if (messageEditText != null) { + messageEditText.requestFocus(); + } } if (stickersExpanded) { setStickersExpanded(false, true, false); @@ -1928,873 +1998,885 @@ protected void onDraw(Canvas canvas) { } } }); - frameLayout.addView(emojiButton, LayoutHelper.createFrame(48, 48, Gravity.BOTTOM | Gravity.LEFT, 3, 0, 0, 0)); + messageEditTextContainer.addView(emojiButton, LayoutHelper.createFrame(48, 48, Gravity.BOTTOM | Gravity.LEFT, 3, 0, 0, 0)); setEmojiButtonImage(false, false); - captionLimitView = new NumberTextView(context); - captionLimitView.setVisibility(View.GONE); - captionLimitView.setTextSize(15); - captionLimitView.setTextColor(getThemedColor(Theme.key_windowBackgroundWhiteGrayText)); - captionLimitView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); - captionLimitView.setCenterAlign(true); - addView(captionLimitView, LayoutHelper.createFrame(48, 20, Gravity.BOTTOM | Gravity.RIGHT, 3, 0, 0, 48)); - - messageEditText = new EditTextCaption(context, resourcesProvider) { - - @Override - protected void onScrollChanged(int horiz, int vert, int oldHoriz, int oldVert) { - super.onScrollChanged(horiz, vert, oldHoriz, oldVert); - if (delegate != null) { - delegate.onEditTextScroll(); - } - } - - @Override - protected void onContextMenuOpen() { - if (delegate != null) { - delegate.onContextMenuOpen(); - } - } - - @Override - protected void onContextMenuClose() { - if (delegate != null) { - delegate.onContextMenuClose(); - } - } - private void send(InputContentInfoCompat inputContentInfo, boolean notify, int scheduleDate) { - if (delegate != null) { - delegate.beforeMessageSend(null, true, scheduleDate); - } - ClipDescription description = inputContentInfo.getDescription(); - if (description.hasMimeType("image/gif")) { - SendMessagesHelper.prepareSendingDocument(accountInstance, null, null, inputContentInfo.getContentUri(), null, "image/gif", dialog_id, replyingMessageObject, getThreadMessage(), inputContentInfo, null, notify, 0); - } else { - SendMessagesHelper.prepareSendingPhoto(accountInstance, null, inputContentInfo.getContentUri(), dialog_id, replyingMessageObject, getThreadMessage(), null, null, null, inputContentInfo, 0, null, notify, 0); - } - if (delegate != null) { - delegate.onMessageSend(null, true, scheduleDate); - } - } + if (isChat) { + attachLayout = new LinearLayout(context); + attachLayout.setOrientation(LinearLayout.HORIZONTAL); + attachLayout.setEnabled(false); + attachLayout.setPivotX(AndroidUtilities.dp(48)); + attachLayout.setClipChildren(false); + messageEditTextContainer.addView(attachLayout, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, 48, Gravity.BOTTOM | Gravity.RIGHT)); - @Override - public InputConnection onCreateInputConnection(EditorInfo editorInfo) { - final InputConnection ic = super.onCreateInputConnection(editorInfo); - if (ic == null) { - return null; - } - try { - EditorInfoCompat.setContentMimeTypes(editorInfo, new String[]{"image/gif", "image/*", "image/jpg", "image/png", "image/webp"}); - final InputConnectionCompat.OnCommitContentListener callback = (inputContentInfo, flags, opts) -> { - if (BuildCompat.isAtLeastNMR1() && (flags & InputConnectionCompat.INPUT_CONTENT_GRANT_READ_URI_PERMISSION) != 0) { - try { - inputContentInfo.requestPermission(); - } catch (Exception e) { - return false; - } - } - if (inputContentInfo.getDescription().hasMimeType("image/gif") || SendMessagesHelper.shouldSendWebPAsSticker(null, inputContentInfo.getContentUri())) { - if (isInScheduleMode()) { - AlertsCreator.createScheduleDatePickerDialog(parentActivity, parentFragment.getDialogId(), (notify, scheduleDate) -> send(inputContentInfo, notify, scheduleDate), resourcesProvider); - } else { - send(inputContentInfo, true, 0); - } - } else { - editPhoto(inputContentInfo.getContentUri(), inputContentInfo.getDescription().getMimeType(0)); - } - return true; - }; - return InputConnectionCompat.createWrapper(ic, editorInfo, callback); - } catch (Throwable e) { - FileLog.e(e); - } - return ic; + notifyButton = new ImageView(context); + notifySilentDrawable = new CrossOutDrawable(context, R.drawable.input_notify_on, Theme.key_chat_messagePanelIcons); + notifyButton.setImageDrawable(notifySilentDrawable); + notifySilentDrawable.setCrossOut(silent, false); + notifyButton.setContentDescription(silent ? LocaleController.getString("AccDescrChanSilentOn", R.string.AccDescrChanSilentOn) : LocaleController.getString("AccDescrChanSilentOff", R.string.AccDescrChanSilentOff)); + notifyButton.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_messagePanelIcons), PorterDuff.Mode.SRC_IN)); + notifyButton.setScaleType(ImageView.ScaleType.CENTER); + if (Build.VERSION.SDK_INT >= 21) { + notifyButton.setBackgroundDrawable(Theme.createSelectorDrawable(getThemedColor(Theme.key_listSelector))); } - - @Override - public boolean onTouchEvent(MotionEvent event) { - if (stickersDragging || stickersExpansionAnim != null) { - return false; - } - if (isPopupShowing() && event.getAction() == MotionEvent.ACTION_DOWN) { - if (searchingType != 0) { - setSearchingTypeInternal(0, false); - emojiView.closeSearch(false); - requestFocus(); + notifyButton.setVisibility(canWriteToChannel && (delegate == null || !delegate.hasScheduledMessages()) ? VISIBLE : GONE); + attachLayout.addView(notifyButton, LayoutHelper.createLinear(48, 48)); + notifyButton.setOnClickListener(new OnClickListener() { + @Override + public void onClick(View v) { + silent = !silent; + if (notifySilentDrawable == null) { + notifySilentDrawable = new CrossOutDrawable(context, R.drawable.input_notify_on, Theme.key_chat_messagePanelIcons); } - showPopup(AndroidUtilities.usingHardwareInput ? 0 : 2, 0); - if (stickersExpanded) { - setStickersExpanded(false, true, false); - waitingForKeyboardOpenAfterAnimation = true; - AndroidUtilities.runOnUIThread(() -> { - waitingForKeyboardOpenAfterAnimation = false; - openKeyboardInternal(); - }, 200); - } else { - openKeyboardInternal(); + notifySilentDrawable.setCrossOut(silent, true); + notifyButton.setImageDrawable(notifySilentDrawable); + MessagesController.getNotificationsSettings(currentAccount).edit().putBoolean("silent_" + dialog_id, silent).commit(); + NotificationsController.getInstance(currentAccount).updateServerNotificationsSettings(dialog_id, fragment == null ? 0 :fragment.getTopicId()); + UndoView undoView = fragment.getUndoView(); + if (undoView != null) { + undoView.showWithAction(0, !silent ? UndoView.ACTION_NOTIFY_ON : UndoView.ACTION_NOTIFY_OFF, null); } - return true; - } - try { - return super.onTouchEvent(event); - } catch (Exception e) { - FileLog.e(e); - } - return false; - } - - /*@Override - public boolean dispatchKeyEvent(KeyEvent event) { - if (preventInput) { - return false; + notifyButton.setContentDescription(silent ? LocaleController.getString("AccDescrChanSilentOn", R.string.AccDescrChanSilentOn) : LocaleController.getString("AccDescrChanSilentOff", R.string.AccDescrChanSilentOff)); + updateFieldHint(true); } - return super.dispatchKeyEvent(event); - }*/ + }); - @Override - protected void onSelectionChanged(int selStart, int selEnd) { - super.onSelectionChanged(selStart, selEnd); - if (delegate != null) { - delegate.onTextSelectionChanged(selStart, selEnd); - } + attachButton = new ImageView(context); + attachButton.setScaleType(ImageView.ScaleType.CENTER); + attachButton.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_messagePanelIcons), PorterDuff.Mode.SRC_IN)); + attachButton.setImageResource(R.drawable.deproko_baseline_attach_26); + if (Build.VERSION.SDK_INT >= 21) { + attachButton.setBackgroundDrawable(Theme.createSelectorDrawable(getThemedColor(Theme.key_listSelector))); } - - @Override - protected void extendActionMode(ActionMode actionMode, Menu menu) { - if (parentFragment != null) { - parentFragment.extendActionMode(menu); + attachLayout.addView(attachButton, LayoutHelper.createLinear(48, 48)); + attachButton.setOnClickListener(v -> { + if (adjustPanLayoutHelper != null && adjustPanLayoutHelper.animationInProgress()) { + return; } - } + delegate.didPressAttachButton(); + }); + attachButton.setContentDescription(LocaleController.getString("AccDescrAttachButton", R.string.AccDescrAttachButton)); + } - @Override - public boolean requestRectangleOnScreen(Rect rectangle) { - rectangle.bottom += AndroidUtilities.dp(1000); - return super.requestRectangleOnScreen(rectangle); - } + if (audioToSend != null) { + createRecordAudioPanel(); + } + sendButtonContainer = new FrameLayout(context) { @Override - protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { - isInitLineCount = getMeasuredWidth() == 0 && getMeasuredHeight() == 0; - super.onMeasure(widthMeasureSpec, heightMeasureSpec); - if (isInitLineCount) { - lineCount = getLineCount(); + protected boolean drawChild(Canvas canvas, View child, long drawingTime) { + if (child == sendButton && textTransitionIsRunning) { + return true; } - isInitLineCount = false; + return super.drawChild(canvas, child, drawingTime); } + }; + sendButtonContainer.setClipChildren(false); + sendButtonContainer.setClipToPadding(false); + textFieldContainer.addView(sendButtonContainer, LayoutHelper.createFrame(48, 48, Gravity.BOTTOM | Gravity.RIGHT)); - @Override - public boolean onTextContextMenuItem(int id) { - if (id == android.R.id.paste) { - isPaste = true; - - ClipboardManager clipboard = (ClipboardManager) getContext().getSystemService(Context.CLIPBOARD_SERVICE); - ClipData clipData = clipboard.getPrimaryClip(); - if (clipData != null) { - if (clipData.getItemCount() == 1 && clipData.getDescription().hasMimeType("image/*")) { - editPhoto(clipData.getItemAt(0).getUri(), clipData.getDescription().getMimeType(0)); + audioVideoButtonContainer = new FrameLayout(context); + audioVideoButtonContainer.setSoundEffectsEnabled(false); + sendButtonContainer.addView(audioVideoButtonContainer, LayoutHelper.createFrame(48, 48)); + if (NekoConfig.useChatAttachMediaMenu.Bool()) { + audioVideoButtonContainer.setOnClickListener(v -> { + createRecordAudioPanel(); + createRecordCircle(); + if (recordCircle.isSendButtonVisible()) { + if (!hasRecordVideo || calledRecordRunnable) { + startedDraggingX = -1; + if (hasRecordVideo && isInVideoMode) { + delegate.needStartRecordVideo(1, true, 0); + } else { + if (recordingAudioVideo && isInScheduleMode()) { + AlertsCreator.createScheduleDatePickerDialog(parentActivity, parentFragment.getDialogId(), (notify, scheduleDate) -> MediaController.getInstance().stopRecording(1, notify, scheduleDate), () -> MediaController.getInstance().stopRecording(0, false, 0), null); + } + MediaController.getInstance().stopRecording(isInScheduleMode() ? 3 : 1, true, 0); + delegate.needStartRecordAudio(0); } + recordingAudioVideo = false; + updateRecordInterface(RECORD_STATE_SENDING); } - } - return super.onTextContextMenuItem(id); - } - - private void editPhoto(Uri uri, String mime) { - final File file = AndroidUtilities.generatePicturePath(fragment != null && fragment.isSecretChat(), MimeTypeMap.getSingleton().getExtensionFromMimeType(mime)); - Utilities.globalQueue.postRunnable(() -> { - try { - InputStream in = context.getContentResolver().openInputStream(uri); - FileOutputStream fos = new FileOutputStream(file); - byte[] buffer = new byte[1024]; - int lengthRead; - while ((lengthRead = in.read(buffer)) > 0) { - fos.write(buffer, 0, lengthRead); - fos.flush(); - } - in.close(); - fos.close(); - MediaController.PhotoEntry photoEntry = new MediaController.PhotoEntry(0, -1, 0, file.getAbsolutePath(), 0, false, 0, 0, 0); - ArrayList entries = new ArrayList<>(); - entries.add(photoEntry); - AndroidUtilities.runOnUIThread(() -> { - openPhotoViewerForEdit(entries, file); - }); - } catch (Throwable e) { - e.printStackTrace(); - } - }); - } - - private void openPhotoViewerForEdit(ArrayList entries, File sourceFile) { - MediaController.PhotoEntry photoEntry = (MediaController.PhotoEntry) entries.get(0); - if (keyboardVisible) { - AndroidUtilities.hideKeyboard(messageEditText); - AndroidUtilities.runOnUIThread(new Runnable() { - @Override - public void run() { - openPhotoViewerForEdit(entries, sourceFile); - } - }, 100); return; } - - PhotoViewer.getInstance().setParentActivity(parentFragment, resourcesProvider); - PhotoViewer.getInstance().openPhotoForSelect(entries, 0, 2, false, new PhotoViewer.EmptyPhotoViewerProvider() { - boolean sending; - - @Override - public void sendButtonPressed(int index, VideoEditedInfo videoEditedInfo, boolean notify, int scheduleDate, boolean forceDocument) { - ArrayList photos = new ArrayList<>(); - SendMessagesHelper.SendingMediaInfo info = new SendMessagesHelper.SendingMediaInfo(); - if (!photoEntry.isVideo && photoEntry.imagePath != null) { - info.path = photoEntry.imagePath; - } else if (photoEntry.path != null) { - info.path = photoEntry.path; - } - info.thumbPath = photoEntry.thumbPath; - info.isVideo = photoEntry.isVideo; - info.caption = photoEntry.caption != null ? photoEntry.caption.toString() : null; - info.entities = photoEntry.entities; - info.masks = photoEntry.stickers; - info.ttl = photoEntry.ttl; - info.videoEditedInfo = videoEditedInfo; - info.canDeleteAfter = true; - info.hasMediaSpoilers = photoEntry.hasSpoiler; - photos.add(info); - photoEntry.reset(); - sending = true; - boolean updateStickersOrder = SendMessagesHelper.checkUpdateStickersOrder(info.caption); - SendMessagesHelper.prepareSendingMedia(accountInstance, photos, dialog_id, replyingMessageObject, getThreadMessage(), null, forceDocument, false, editingMessageObject, notify, scheduleDate, updateStickersOrder); - if (delegate != null) { - delegate.onMessageSend(null, true, scheduleDate); - } - } - - @Override - public void willHidePhotoViewer() { - if (!sending) { - try { - sourceFile.delete(); - } catch (Throwable ignore) { - + onMenuClick(v); + }); + } else { + audioVideoButtonContainer.setOnTouchListener((view, motionEvent) -> { + audioVideoButtonContainer.setFocusable(true); + audioVideoButtonContainer.setImportantForAccessibility(View.IMPORTANT_FOR_ACCESSIBILITY_YES); + createRecordCircle(); + if (motionEvent.getAction() == MotionEvent.ACTION_DOWN) { + if (recordCircle.isSendButtonVisible()) { + if (!hasRecordVideo || calledRecordRunnable) { + startedDraggingX = -1; + if (hasRecordVideo && isInVideoMode()) { + delegate.needStartRecordVideo(1, true, 0); + } else { + if (recordingAudioVideo && isInScheduleMode()) { + AlertsCreator.createScheduleDatePickerDialog(parentActivity, parentFragment.getDialogId(), (notify, scheduleDate) -> MediaController.getInstance().stopRecording(1, notify, scheduleDate), () -> MediaController.getInstance().stopRecording(0, false, 0), resourcesProvider); + } + MediaController.getInstance().stopRecording(isInScheduleMode() ? 3 : 1, true, 0); + delegate.needStartRecordAudio(0); } + recordingAudioVideo = false; + messageTransitionIsRunning = false; + AndroidUtilities.runOnUIThread(moveToSendStateRunnable = () -> { + moveToSendStateRunnable = null; + updateRecordInterface(RECORD_STATE_SENDING); + }, 200); + }getParent().requestDisallowInterceptTouchEvent(true); + return true; + } + if (parentFragment != null) { + TLRPC.Chat chat = parentFragment.getCurrentChat(); + TLRPC.UserFull userFull = parentFragment.getCurrentUserInfo(); + if (chat != null && !(ChatObject.canSendVoice(chat) || (ChatObject.canSendRoundVideo(chat) && hasRecordVideo)) ||userFull != null && userFull.voice_messages_forbidden) { + delegate.needShowMediaBanHint(); + return true; } } - - @Override - public boolean canCaptureMorePhotos() { - return false; + if (hasRecordVideo) { + calledRecordRunnable = false; + recordAudioVideoRunnableStarted = true; + AndroidUtilities.runOnUIThread(recordAudioVideoRunnable, 150); + } else { + recordAudioVideoRunnable.run(); } - - }, parentFragment); - } - - @Override - protected Theme.ResourcesProvider getResourcesProvider() { - return resourcesProvider; - } - }; - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) { - messageEditText.setFallbackLineSpacing(false); - } - messageEditText.setDelegate(new EditTextCaption.EditTextCaptionDelegate() { - - @Override - public void onSpansChanged() { - messageEditText.invalidateEffects(); - if (delegate != null) { - delegate.onTextSpansChanged(messageEditText.getText()); - } - } - - @Override - public long getCurrentChat() { - - long chatId; - if (parentFragment.getCurrentChat() != null) { - chatId = parentFragment.getCurrentChat().id; - } else if (parentFragment.getCurrentUser() != null) { - chatId = parentFragment.getCurrentUser().id; - } else { - chatId = -1; - } - - return chatId; - } - - }); - messageEditText.setIncludeFontPadding(false); - messageEditText.setWindowView(parentActivity.getWindow().getDecorView()); - TLRPC.EncryptedChat encryptedChat = parentFragment != null ? parentFragment.getCurrentEncryptedChat() : null; - messageEditText.setAllowTextEntitiesIntersection(supportsSendingNewEntities()); - updateFieldHint(false); - int flags = EditorInfo.IME_FLAG_NO_EXTRACT_UI; - if (encryptedChat != null) { - flags |= 0x01000000; //EditorInfo.IME_FLAG_NO_PERSONALIZED_LEARNING; - } - messageEditText.setImeOptions(flags); - messageEditText.setInputType(messageEditText.getInputType() | EditorInfo.TYPE_TEXT_FLAG_CAP_SENTENCES | EditorInfo.TYPE_TEXT_FLAG_MULTI_LINE); - messageEditText.setSingleLine(false); - messageEditText.setMaxLines(6); - messageEditText.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 18); - messageEditText.setGravity(Gravity.BOTTOM); - messageEditText.setPadding(0, AndroidUtilities.dp(11), 0, AndroidUtilities.dp(12)); - messageEditText.setBackgroundDrawable(null); - messageEditText.setTextColor(getThemedColor(Theme.key_chat_messagePanelText)); - messageEditText.setLinkTextColor(getThemedColor(Theme.key_windowBackgroundWhiteLinkText)); - messageEditText.setHighlightColor(getThemedColor(Theme.key_chat_inTextSelectionHighlight)); - messageEditText.setHintColor(getThemedColor(Theme.key_chat_messagePanelHint)); - messageEditText.setHintTextColor(getThemedColor(Theme.key_chat_messagePanelHint)); - messageEditText.setCursorColor(getThemedColor(Theme.key_chat_messagePanelCursor)); - messageEditText.setHandlesColor(getThemedColor(Theme.key_chat_TextSelectionCursor)); - frameLayout.addView(messageEditText, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.BOTTOM, 52, 0, isChat ? 50 : 2, 0)); - messageEditText.setOnKeyListener(new OnKeyListener() { - - boolean ctrlPressed = false; - - @Override - public boolean onKey(View view, int keyCode, KeyEvent keyEvent) { - if (keyCode == KeyEvent.KEYCODE_BACK && !keyboardVisible && isPopupShowing() && keyEvent.getAction() == KeyEvent.ACTION_UP) { - if (ContentPreviewViewer.hasInstance() && ContentPreviewViewer.getInstance().isVisible()) { - ContentPreviewViewer.getInstance().closeWithMenu(); - return true; + return true; + } else if (motionEvent.getAction() == MotionEvent.ACTION_UP || motionEvent.getAction() == MotionEvent.ACTION_CANCEL) { + if (motionEvent.getAction() == MotionEvent.ACTION_CANCEL && recordingAudioVideo) { + if (recordCircle.slideToCancelProgress < 0.7f) { + if (hasRecordVideo && isInVideoMode()) { + CameraController.getInstance().cancelOnInitRunnable(onFinishInitCameraRunnable); + delegate.needStartRecordVideo(2, true, 0); + } else { + delegate.needStartRecordAudio(0); + MediaController.getInstance().stopRecording(0, false, 0); + } + recordingAudioVideo = false; + updateRecordInterface(RECORD_STATE_CANCEL_BY_GESTURE); + } else { + recordCircle.sendButtonVisible = true; + startLockTransition(); + } + return false; } - if (currentPopupContentType == POPUP_CONTENT_BOT_KEYBOARD && botButtonsMessageObject != null) { + if (recordCircle != null && recordCircle.isSendButtonVisible() || recordedAudioPanel != null &&recordedAudioPanel.getVisibility() == VISIBLE) { + if (recordAudioVideoRunnableStarted) { + AndroidUtilities.cancelRunOnUIThread(recordAudioVideoRunnable); + } return false; } - if (keyEvent.getAction() == 1) { - if (currentPopupContentType == POPUP_CONTENT_BOT_KEYBOARD && botButtonsMessageObject != null) { - SharedPreferences preferences = MessagesController.getMainSettings(currentAccount); - preferences.edit().putInt("hidekeyboard_" + dialog_id, botButtonsMessageObject.getId()).apply(); + + float x = motionEvent.getX() + audioVideoButtonContainer.getX(); + float dist = (x - startedDraggingX); + float alpha = 1.0f + dist / distCanMove; + if (alpha < 0.45) { + if (hasRecordVideo && isInVideoMode()) { + CameraController.getInstance().cancelOnInitRunnable(onFinishInitCameraRunnable); + delegate.needStartRecordVideo(2, true, 0); + } else { + delegate.needStartRecordAudio(0); + MediaController.getInstance().stopRecording(0, false, 0); } - if (searchingType != 0) { - setSearchingTypeInternal(0, true); - if (emojiView != null) { - emojiView.closeSearch(true); + recordingAudioVideo = false; + updateRecordInterface(RECORD_STATE_CANCEL_BY_GESTURE); + } else { + if (recordAudioVideoRunnableStarted) { + AndroidUtilities.cancelRunOnUIThread(recordAudioVideoRunnable); + if (sendVoiceEnabled && sendRoundEnabled) {delegate.onSwitchRecordMode(!isInVideoMode()); + setRecordVideoButtonVisible(!isInVideoMode(), true);} else { + delegate.needShowMediaBanHint(); + } + if (!NekoConfig.disableVibration.Bool()) { + performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP); } - messageEditText.requestFocus(); - } else { - if (stickersExpanded) { - setStickersExpanded(false, true, false); + sendAccessibilityEvent(AccessibilityEvent.TYPE_VIEW_CLICKED); + } else if (!hasRecordVideo || calledRecordRunnable) { + startedDraggingX = -1; + if (hasRecordVideo && isInVideoMode()) { + CameraController.getInstance().cancelOnInitRunnable(onFinishInitCameraRunnable); + delegate.needStartRecordVideo(NekoConfig.confirmAVMessage.Bool() ? 3 : 1, true, 0);} else if (!sendVoiceEnabled) { + delegate.needShowMediaBanHint(); } else { - if (stickersExpansionAnim == null) { - if (botButtonsMessageObject != null && currentPopupContentType != POPUP_CONTENT_BOT_KEYBOARD && TextUtils.isEmpty(messageEditText.getText())) { - showPopup(1, POPUP_CONTENT_BOT_KEYBOARD); - } else { - showPopup(0, 0); + if (!NekoConfig.confirmAVMessage.Bool()) { + if (recordingAudioVideo && isInScheduleMode()) { + AlertsCreator.createScheduleDatePickerDialog(parentActivity, parentFragment.getDialogId(), (notify, scheduleDate) -> MediaController.getInstance().stopRecording(1, notify, scheduleDate), () -> MediaController.getInstance().stopRecording(0, false, 0), resourcesProvider); } } + delegate.needStartRecordAudio(0); + if (!NekoConfig.confirmAVMessage.Bool()) { + MediaController.getInstance().stopRecording(isInScheduleMode() ? 3 : 1, true, 0); + } else { + MediaController.getInstance().stopRecording(2, true, 0); + } + } + if (!NekoConfig.confirmAVMessage.Bool()) { + recordingAudioVideo = false; + messageTransitionIsRunning = false; + AndroidUtilities.runOnUIThread(moveToSendStateRunnable = () -> { + moveToSendStateRunnable = null; + updateRecordInterface(RECORD_STATE_SENDING); + }, 500); } } } return true; - } else if (keyCode == KeyEvent.KEYCODE_ENTER && (ctrlPressed || sendByEnter) && keyEvent.getAction() == KeyEvent.ACTION_DOWN && editingMessageObject == null) { - sendMessage(); - return true; - } else if (keyCode == KeyEvent.KEYCODE_CTRL_LEFT || keyCode == KeyEvent.KEYCODE_CTRL_RIGHT) { - ctrlPressed = keyEvent.getAction() == KeyEvent.ACTION_DOWN; - return true; - } - return false; - } - }); - messageEditText.setOnEditorActionListener(new TextView.OnEditorActionListener() { - - boolean ctrlPressed = false; + } else if (motionEvent.getAction() == MotionEvent.ACTION_MOVE && recordingAudioVideo) { + float x = motionEvent.getX(); + float y = motionEvent.getY(); + if (recordCircle.isSendButtonVisible()) { + return false; + } + if (recordCircle.setLockTranslation(y) == 2) { + startLockTransition(); + return false; + } else { + recordCircle.setMovingCords(x, y); + } - @Override - public boolean onEditorAction(TextView textView, int i, KeyEvent keyEvent) { - if (i == EditorInfo.IME_ACTION_SEND) { - sendMessage(); - return true; - } else if (keyEvent != null && i == EditorInfo.IME_NULL) { - if ((ctrlPressed || sendByEnter) && keyEvent.getAction() == KeyEvent.ACTION_DOWN && editingMessageObject == null) { - sendMessage(); - return true; + if (startedDraggingX == -1) { + startedDraggingX = x; + distCanMove = (float) (sizeNotifierLayout.getMeasuredWidth() * 0.35); + if (distCanMove > AndroidUtilities.dp(140)) { + distCanMove = AndroidUtilities.dp(140); + } } - } - return false; - } - }); - messageEditText.addTextChangedListener(new TextWatcher() { - private boolean processChange; - private boolean nextChangeIsSend; - private CharSequence prevText; - private boolean ignorePrevTextChange; - boolean heightShouldBeChanged; + x = x + audioVideoButtonContainer.getX(); + float dist = (x - startedDraggingX); + float alpha = 1.0f + dist / distCanMove; + if (startedDraggingX != -1) { + if (alpha > 1) { + alpha = 1; + } else if (alpha < 0) { + alpha = 0; + } + if (slideText != null) { + slideText.setSlideX(alpha); + } + if (recordCircle != null) { + recordCircle.setSlideToCancelProgress(alpha); + }} - @Override - public void beforeTextChanged(CharSequence charSequence, int i, int i2, int i3) { - if (ignorePrevTextChange) { - return; - } - if (recordingAudioVideo) { - prevText = charSequence.toString(); + if (alpha == 0) { + if (hasRecordVideo && isInVideoMode()) { + CameraController.getInstance().cancelOnInitRunnable(onFinishInitCameraRunnable); + delegate.needStartRecordVideo(2, true, 0); + } else { + delegate.needStartRecordAudio(0); + MediaController.getInstance().stopRecording(0, false, 0); + } + recordingAudioVideo = false; + updateRecordInterface(RECORD_STATE_CANCEL_BY_GESTURE); + } + return true; } - } + view.onTouchEvent(motionEvent); + return true; + }); + } - @Override - public void onTextChanged(CharSequence charSequence, int start, int before, int count) { - if (ignorePrevTextChange) { - return; - } + audioVideoSendButton = new ChatActivityEnterViewAnimatedIconView(context, this); + audioVideoSendButton.setFocusable(true); + audioVideoSendButton.setImportantForAccessibility(View.IMPORTANT_FOR_ACCESSIBILITY_YES); + audioVideoSendButton.setAccessibilityDelegate(mediaMessageButtonsDelegate); + padding = AndroidUtilities.dp(9.5f); + audioVideoSendButton.setPadding(padding, padding, padding, padding); + audioVideoSendButton.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_messagePanelIcons), PorterDuff.Mode.SRC_IN)); - boolean allowChangeToSmile = true; - int currentPage; - if (emojiView == null) { - currentPage = MessagesController.getGlobalEmojiSettings().getInt("selected_page", 0); - } else { - currentPage = emojiView.getCurrentPage(); - } - if (currentPage == 0 || !allowStickers && !allowGifs) { - allowChangeToSmile = false; - } + if (Build.VERSION.SDK_INT >= 21 && NekoConfig.useChatAttachMediaMenu.Bool()) { + audioVideoSendButton.setBackground(Theme.createSelectorDrawable(Theme.getColor(Theme.key_listSelector))); + } - if ((before == 0 && !TextUtils.isEmpty(charSequence) || before != 0 && TextUtils.isEmpty(charSequence)) && allowChangeToSmile) { - setEmojiButtonImage(false, true); - } - if (lineCount != messageEditText.getLineCount()) { - heightShouldBeChanged = (messageEditText.getLineCount() >= 4) != (lineCount >= 4); - if (!isInitLineCount && messageEditText.getMeasuredWidth() > 0) { - onLineCountChanged(lineCount, messageEditText.getLineCount()); - } - lineCount = messageEditText.getLineCount(); - } else { - heightShouldBeChanged = false; - } + audioVideoButtonContainer.addView(audioVideoSendButton, LayoutHelper.createFrame(48, 48)); - if (innerTextChange == 1) { - return; - } - if (sendByEnter && !isPaste && editingMessageObject == null && count > before && charSequence.length() > 0 && charSequence.length() == start + count && charSequence.charAt(charSequence.length() - 1) == '\n') { - nextChangeIsSend = true; - } - isPaste = false; - checkSendButton(true); - CharSequence message = AndroidUtilities.getTrimmedString(charSequence.toString()); - if (delegate != null) { - if (!ignoreTextChange) { - if ((before > count + 1 || (count - before) > 2 || TextUtils.isEmpty(charSequence)) && delegate.getDisableLinkPreviewStatus() == 1) { - messageWebPageSearch = true; - } - delegate.onTextChanged(charSequence, before > count + 1 || (count - before) > 2); - } - } - if (innerTextChange != 2 && (count - before) > 1) { - processChange = true; - } - if (editingMessageObject == null && !canWriteToChannel && message.length() != 0 && lastTypingTimeSend < System.currentTimeMillis() - 5000 && !ignoreTextChange) { - lastTypingTimeSend = System.currentTimeMillis(); - if (delegate != null) { - delegate.needSendTyping(); - } - } + cancelBotButton = new ImageView(context); + cancelBotButton.setVisibility(INVISIBLE); + cancelBotButton.setScaleType(ImageView.ScaleType.CENTER_INSIDE); + cancelBotButton.setImageDrawable(progressDrawable = new CloseProgressDrawable2() { + @Override + protected int getCurrentColor() { + return Theme.getColor(Theme.key_chat_messagePanelCancelInlineBot); + } + }); + cancelBotButton.setContentDescription(LocaleController.getString("Cancel", R.string.Cancel)); + cancelBotButton.setSoundEffectsEnabled(false); + cancelBotButton.setScaleX(0.1f); + cancelBotButton.setScaleY(0.1f); + cancelBotButton.setAlpha(0.0f); + if (Build.VERSION.SDK_INT >= 21) { + cancelBotButton.setBackgroundDrawable(Theme.createSelectorDrawable(getThemedColor(Theme.key_listSelector))); + } + sendButtonContainer.addView(cancelBotButton, LayoutHelper.createFrame(48, 48)); + cancelBotButton.setOnClickListener(view -> { + String text = messageEditText != null ? messageEditText.getText().toString() : ""; + int idx = text.indexOf(' '); + if (idx == -1 || idx == text.length() - 1) { + setFieldText(""); + } else { + setFieldText(text.substring(0, idx + 1)); } + }); + + if (isInScheduleMode()) { + sendButtonDrawable = context.getResources().getDrawable(R.drawable.input_schedule).mutate(); + sendButtonInverseDrawable = context.getResources().getDrawable(R.drawable.input_schedule).mutate(); + inactinveSendButtonDrawable = context.getResources().getDrawable(R.drawable.input_schedule).mutate(); + } else { + sendButtonDrawable = context.getResources().getDrawable(R.drawable.ic_send).mutate(); + sendButtonInverseDrawable = context.getResources().getDrawable(R.drawable.ic_send).mutate(); + inactinveSendButtonDrawable = context.getResources().getDrawable(R.drawable.ic_send).mutate(); + } + + sendButton = new View(context) { + + private int drawableColor; + private float animationProgress; + private float animateBounce; + private long lastAnimationTime; + private float animationDuration; + private int prevColorType; @Override - public void afterTextChanged(Editable editable) { - if (ignorePrevTextChange) { - return; + protected void onDraw(Canvas canvas) { + int x = (getMeasuredWidth() - sendButtonDrawable.getIntrinsicWidth()) / 2; + int y = (getMeasuredHeight() - sendButtonDrawable.getIntrinsicHeight()) / 2; + if (isInScheduleMode()) { + y -= AndroidUtilities.dp(1); + } else { + x += AndroidUtilities.dp(2); } - if (prevText != null) { - ignorePrevTextChange = true; - editable.replace(0, editable.length(), prevText); - prevText = null; - ignorePrevTextChange = false; - return; + + int color; + boolean showingPopup; + int colorType; + if (showingPopup = (sendPopupWindow != null && sendPopupWindow.isShowing())) { + color = getThemedColor(Theme.key_chat_messagePanelVoicePressed); + colorType = 1; + } else { + color = getThemedColor(Theme.key_chat_messagePanelSend); + colorType = 2; } - if (innerTextChange == 0) { - if (nextChangeIsSend) { - sendMessage(); - nextChangeIsSend = false; - } - if (processChange) { - ImageSpan[] spans = editable.getSpans(0, editable.length(), ImageSpan.class); - for (int i = 0; i < spans.length; i++) { - editable.removeSpan(spans[i]); + if (color != drawableColor) { + lastAnimationTime = SystemClock.elapsedRealtime(); + if (prevColorType != 0 && prevColorType != colorType) { + animationProgress = 0.0f; + if (showingPopup) { + animationDuration = 200.0f; + } else { + animationDuration = 120.0f; } - Emoji.replaceEmoji(editable, messageEditText.getPaint().getFontMetricsInt(), AndroidUtilities.dp(20), false, null); - processChange = false; - } - } - - int beforeLimit; - codePointCount = Character.codePointCount(editable, 0, editable.length()); - boolean doneButtonEnabledLocal = true; - if (currentLimit > 0 && (beforeLimit = currentLimit - codePointCount) <= 100) { - if (beforeLimit < -9999) { - beforeLimit = -9999; - } - captionLimitView.setNumber(beforeLimit, captionLimitView.getVisibility() == View.VISIBLE); - if (captionLimitView.getVisibility() != View.VISIBLE) { - captionLimitView.setVisibility(View.VISIBLE); - captionLimitView.setAlpha(0); - captionLimitView.setScaleX(0.5f); - captionLimitView.setScaleY(0.5f); - } - captionLimitView.animate().setListener(null).cancel(); - captionLimitView.animate().alpha(1f).scaleX(1f).scaleY(1f).setDuration(100).start(); - if (beforeLimit < 0) { - doneButtonEnabledLocal = false; - captionLimitView.setTextColor(getThemedColor(Theme.key_windowBackgroundWhiteRedText)); } else { - captionLimitView.setTextColor(getThemedColor(Theme.key_windowBackgroundWhiteGrayText)); + animationProgress = 1.0f; } - } else { - captionLimitView.animate().alpha(0).scaleX(0.5f).scaleY(0.5f).setDuration(100).setListener(new AnimatorListenerAdapter() { - @Override - public void onAnimationEnd(Animator animation) { - captionLimitView.setVisibility(View.GONE); - } - }); + prevColorType = colorType; + drawableColor = color; + sendButtonDrawable.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_messagePanelSend), PorterDuff.Mode.SRC_IN)); + int c = getThemedColor(Theme.key_chat_messagePanelIcons); + inactinveSendButtonDrawable.setColorFilter(new PorterDuffColorFilter(Color.argb(0xb4, Color.red(c), Color.green(c), Color.blue(c)), PorterDuff.Mode.SRC_IN)); + sendButtonInverseDrawable.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_messagePanelVoicePressed), PorterDuff.Mode.SRC_IN)); } - - if (doneButtonEnabled != doneButtonEnabledLocal) { - doneButtonEnabled = doneButtonEnabledLocal; - if (doneButtonColorAnimator != null) { - doneButtonColorAnimator.cancel(); + if (animationProgress < 1.0f) { + long newTime = SystemClock.elapsedRealtime(); + long dt = newTime - lastAnimationTime; + animationProgress += dt / animationDuration; + if (animationProgress > 1.0f) { + animationProgress = 1.0f; } - doneButtonColorAnimator = ValueAnimator.ofFloat(doneButtonEnabled ? 0 : 1f, doneButtonEnabled ? 1f : 0); - doneButtonColorAnimator.addUpdateListener(valueAnimator -> { - int color = getThemedColor(Theme.key_chat_messagePanelVoicePressed); - int defaultAlpha = Color.alpha(color); - doneButtonEnabledProgress = (float) valueAnimator.getAnimatedValue(); - doneCheckDrawable.setColorFilter(new PorterDuffColorFilter(ColorUtils.setAlphaComponent(color, (int) (defaultAlpha * (0.58f + 0.42f * doneButtonEnabledProgress))), PorterDuff.Mode.MULTIPLY)); - doneButtonImage.invalidate(); - }); - doneButtonColorAnimator.setDuration(150).start(); - } - if (botCommandsMenuContainer != null) { - botCommandsMenuContainer.dismiss(); + lastAnimationTime = newTime; + invalidate(); } - checkBotMenu(); - - if (editingCaption && !captionLimitBulletinShown && !MessagesController.getInstance(currentAccount).premiumLocked && !UserConfig.getInstance(currentAccount).isPremium() && codePointCount > MessagesController.getInstance(currentAccount).captionLengthLimitDefault && codePointCount < MessagesController.getInstance(currentAccount).captionLengthLimitPremium) { - captionLimitBulletinShown = true; - if (heightShouldBeChanged) { - AndroidUtilities.runOnUIThread(() -> showCaptionLimitBulletin(), 300); + if (!showingPopup) { + if (slowModeTimer == Integer.MAX_VALUE && !isInScheduleMode()) { + inactinveSendButtonDrawable.setBounds(x, y, x + sendButtonDrawable.getIntrinsicWidth(), y + sendButtonDrawable.getIntrinsicHeight()); + inactinveSendButtonDrawable.draw(canvas); } else { - showCaptionLimitBulletin(); - } - } - } - }); - - if (isChat) { - if (parentFragment != null) { - Drawable drawable1 = context.getResources().getDrawable(R.drawable.input_calendar1).mutate(); - Drawable drawable2 = context.getResources().getDrawable(R.drawable.input_calendar2).mutate(); - drawable1.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_messagePanelIcons), PorterDuff.Mode.SRC_IN)); - drawable2.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_recordedVoiceDot), PorterDuff.Mode.SRC_IN)); - CombinedDrawable combinedDrawable = new CombinedDrawable(drawable1, drawable2); - - scheduledButton = new ImageView(context); - scheduledButton.setImageDrawable(combinedDrawable); - scheduledButton.setVisibility(GONE); - scheduledButton.setContentDescription(LocaleController.getString("ScheduledMessages", R.string.ScheduledMessages)); - scheduledButton.setScaleType(ImageView.ScaleType.CENTER); - if (Build.VERSION.SDK_INT >= 21) { - scheduledButton.setBackgroundDrawable(Theme.createSelectorDrawable(getThemedColor(Theme.key_listSelector))); - } - frameLayout.addView(scheduledButton, LayoutHelper.createFrame(48, 48, Gravity.BOTTOM | Gravity.RIGHT)); - scheduledButton.setOnClickListener(v -> { - if (delegate != null) { - delegate.openScheduledMessages(); - } - }); - } - - attachLayout = new LinearLayout(context); - attachLayout.setOrientation(LinearLayout.HORIZONTAL); - attachLayout.setEnabled(false); - attachLayout.setPivotX(AndroidUtilities.dp(48)); - attachLayout.setClipChildren(false); - frameLayout.addView(attachLayout, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, 48, Gravity.BOTTOM | Gravity.RIGHT)); - - - botCommandsMenuButton = new BotCommandsMenuView(getContext()); - botCommandsMenuButton.setOnClickListener(view -> { - boolean open = !botCommandsMenuButton.isOpened(); - botCommandsMenuButton.setOpened(open); - if (!NekoConfig.disableVibration.Bool()) { - try { - performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); - } catch (Exception ignore) { + sendButtonDrawable.setBounds(x, y, x + sendButtonDrawable.getIntrinsicWidth(), y + sendButtonDrawable.getIntrinsicHeight()); + sendButtonDrawable.draw(canvas); } } - if (hasBotWebView()) { - if (open) { - if (emojiViewVisible || botKeyboardViewVisible) { - AndroidUtilities.runOnUIThread(this::openWebViewMenu, 275); - hidePopup(false); - return; + if (showingPopup || animationProgress != 1.0f) { + Theme.dialogs_onlineCirclePaint.setColor(getThemedColor(Theme.key_chat_messagePanelSend)); + int rad = AndroidUtilities.dp(20); + if (showingPopup) { + sendButtonInverseDrawable.setAlpha(255); + float p = animationProgress; + if (p <= 0.25f) { + float progress = p / 0.25f; + rad += AndroidUtilities.dp(2) * CubicBezierInterpolator.EASE_IN.getInterpolation(progress); + } else { + p -= 0.25f; + if (p <= 0.5f) { + float progress = p / 0.5f; + rad += AndroidUtilities.dp(2) - AndroidUtilities.dp(3) * CubicBezierInterpolator.EASE_IN.getInterpolation(progress); + } else { + p -= 0.5f; + float progress = p / 0.25f; + rad += -AndroidUtilities.dp(1) + AndroidUtilities.dp(1) * CubicBezierInterpolator.EASE_IN.getInterpolation(progress); + } } - - openWebViewMenu(); } else { - botWebViewMenuContainer.dismiss(); + int alpha = (int) (255 * (1.0f - animationProgress)); + Theme.dialogs_onlineCirclePaint.setAlpha(alpha); + sendButtonInverseDrawable.setAlpha(alpha); } - return; + canvas.drawCircle(getMeasuredWidth() / 2, getMeasuredHeight() / 2, rad, Theme.dialogs_onlineCirclePaint); + sendButtonInverseDrawable.setBounds(x, y, x + sendButtonDrawable.getIntrinsicWidth(), y + sendButtonDrawable.getIntrinsicHeight()); + sendButtonInverseDrawable.draw(canvas); } + } - if (open) { - botCommandsMenuContainer.show(); - } else { - botCommandsMenuContainer.dismiss(); + @Override + public boolean onTouchEvent(MotionEvent event) { + if (getAlpha() <= 0f) { // for accessibility + return false; } - }); - frameLayout.addView(botCommandsMenuButton, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, 32, Gravity.BOTTOM | Gravity.LEFT, 10, 8, 10, 8)); - AndroidUtilities.updateViewVisibilityAnimated(botCommandsMenuButton, false, 1f, false); - botCommandsMenuButton.setExpanded(true, false); - - LinearLayoutManager layoutManager = new LinearLayoutManager(context); + return super.onTouchEvent(event); + } + }; + sendButton.setVisibility(INVISIBLE); + int color = getThemedColor(Theme.key_chat_messagePanelSend); + sendButton.setContentDescription(LocaleController.getString("Send", R.string.Send)); + sendButton.setSoundEffectsEnabled(false); + sendButton.setScaleX(0.1f); + sendButton.setScaleY(0.1f); + sendButton.setAlpha(0.0f); + if (Build.VERSION.SDK_INT >= 21) { + sendButton.setBackgroundDrawable(Theme.createSelectorDrawable(Color.argb(24, Color.red(color), Color.green(color), Color.blue(color)), 1)); + } + sendButtonContainer.addView(sendButton, LayoutHelper.createFrame(48, 48)); + sendButton.setOnClickListener(view -> { + if ((sendPopupWindow != null && sendPopupWindow.isShowing()) || (runningAnimationAudio != null && runningAnimationAudio.isRunning()) || moveToSendStateRunnable != null) { + return; + } + sendMessage(); + }); + sendButton.setOnLongClickListener(this::onSendLongClick); - botCommandsMenuContainer = new BotCommandsMenuContainer(context) { - @Override - protected void onDismiss() { - super.onDismiss(); - botCommandsMenuButton.setOpened(false); - } - }; - botCommandsMenuContainer.listView.setLayoutManager(layoutManager); - botCommandsMenuContainer.listView.setAdapter(botCommandsAdapter = new BotCommandsMenuView.BotCommandsAdapter()); - botCommandsMenuContainer.listView.setOnItemClickListener(new RecyclerListView.OnItemClickListener() { - @Override - public void onItemClick(View view, int position) { - if (view instanceof BotCommandsMenuView.BotCommandView) { - String command = ((BotCommandsMenuView.BotCommandView) view).getCommand(); - if (TextUtils.isEmpty(command)) { - return; - } - if (isInScheduleMode()) { - AlertsCreator.createScheduleDatePickerDialog(parentActivity, dialog_id, (notify, scheduleDate) -> { - SendMessagesHelper.getInstance(currentAccount).sendMessage(command, dialog_id, replyingMessageObject, getThreadMessage(), null, false, null, null, null, notify, scheduleDate, null, false); - setFieldText(""); - botCommandsMenuContainer.dismiss(); - }, resourcesProvider); - } else { - if (fragment != null && fragment.checkSlowMode(view)) { - return; - } - SendMessagesHelper.getInstance(currentAccount).sendMessage(command, dialog_id, replyingMessageObject, getThreadMessage(), null, false, null, null, null, true, 0, null, false); - setFieldText(""); - botCommandsMenuContainer.dismiss(); - } + slowModeButton = new SimpleTextView(context); + slowModeButton.setTextSize(18); + slowModeButton.setVisibility(INVISIBLE); + slowModeButton.setSoundEffectsEnabled(false); + slowModeButton.setScaleX(0.1f); + slowModeButton.setScaleY(0.1f); + slowModeButton.setAlpha(0.0f); + slowModeButton.setPadding(0, 0, AndroidUtilities.dp(13), 0); + slowModeButton.setGravity(Gravity.RIGHT | Gravity.CENTER_VERTICAL); + slowModeButton.setTextColor(getThemedColor(Theme.key_chat_messagePanelIcons)); + sendButtonContainer.addView(slowModeButton, LayoutHelper.createFrame(64, 48, Gravity.RIGHT | Gravity.TOP)); + slowModeButton.setOnClickListener(v -> { + if (delegate != null) { + delegate.onUpdateSlowModeButton(slowModeButton, true, slowModeButton.getText()); + } + }); + slowModeButton.setOnLongClickListener(v -> { + if (messageEditText == null || messageEditText.length() <= 0) { + return false; + } + return onSendLongClick(v); + }); - } - } - }); - botCommandsMenuContainer.listView.setOnItemLongClickListener(new RecyclerListView.OnItemLongClickListener() { - @Override - public boolean onItemClick(View view, int position) { - if (view instanceof BotCommandsMenuView.BotCommandView) { - String command = ((BotCommandsMenuView.BotCommandView) view).getCommand(); - setFieldText(command + " "); - botCommandsMenuContainer.dismiss(); - return true; - } - return false; - } - }); - botCommandsMenuContainer.setClipToPadding(false); - sizeNotifierLayout.addView(botCommandsMenuContainer, 14, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.BOTTOM)); - botCommandsMenuContainer.setVisibility(View.GONE); + SharedPreferences sharedPreferences = MessagesController.getGlobalEmojiSettings(); + keyboardHeight = sharedPreferences.getInt("kbd_height", AndroidUtilities.dp(200)); + keyboardHeightLand = sharedPreferences.getInt("kbd_height_land3", AndroidUtilities.dp(200)); - botWebViewMenuContainer = new BotWebViewMenuContainer(context, this) { - @Override - public void onDismiss() { - super.onDismiss(); - botCommandsMenuButton.setOpened(false); - } - }; - sizeNotifierLayout.addView(botWebViewMenuContainer, 15, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.BOTTOM)); - botWebViewMenuContainer.setVisibility(GONE); - botWebViewMenuContainer.setOnDismissGlobalListener(() -> { - if (botButtonsMessageObject != null && TextUtils.isEmpty(messageEditText.getText()) && !botWebViewMenuContainer.hasSavedText()) { - showPopup(1, POPUP_CONTENT_BOT_KEYBOARD); - } - }); + setRecordVideoButtonVisible(false, false); + checkSendButton(false); + checkChannelRights(); - botButton = new ImageView(context); - botButton.setImageDrawable(botButtonDrawable = new ReplaceableIconDrawable(context)); - botButtonDrawable.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_messagePanelIcons), PorterDuff.Mode.SRC_IN)); - botButtonDrawable.setIcon(R.drawable.input_bot2, false); - botButton.setScaleType(ImageView.ScaleType.CENTER); - if (Build.VERSION.SDK_INT >= 21) { - botButton.setBackgroundDrawable(Theme.createSelectorDrawable(getThemedColor(Theme.key_listSelector))); - } - botButton.setVisibility(GONE); - AndroidUtilities.updateViewVisibilityAnimated(botButton, false, 0.1f, false); - attachLayout.addView(botButton, LayoutHelper.createLinear(48, 48)); - botButton.setOnClickListener(v -> { - if (hasBotWebView() && botCommandsMenuIsShowing()) { - botWebViewMenuContainer.dismiss(v::callOnClick); - return; - } - if (searchingType != 0) { - setSearchingTypeInternal(0, false); - emojiView.closeSearch(false); - messageEditText.requestFocus(); - } - if (botReplyMarkup != null) { - if (!isPopupShowing() || currentPopupContentType != POPUP_CONTENT_BOT_KEYBOARD) { - showPopup(1, POPUP_CONTENT_BOT_KEYBOARD); - } - } else if (hasBotCommands) { - setFieldText("/"); - messageEditText.requestFocus(); - openKeyboard(); - } - if (stickersExpanded) { - setStickersExpanded(false, false, false); - } - }); + createMessageEditText(); + } - notifyButton = new ImageView(context); - notifySilentDrawable = new CrossOutDrawable(context, R.drawable.input_notify_on, Theme.key_chat_messagePanelIcons); - notifyButton.setImageDrawable(notifySilentDrawable); - notifySilentDrawable.setCrossOut(silent, false); - notifyButton.setContentDescription(silent ? LocaleController.getString("AccDescrChanSilentOn", R.string.AccDescrChanSilentOn) : LocaleController.getString("AccDescrChanSilentOff", R.string.AccDescrChanSilentOff)); - notifyButton.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_messagePanelIcons), PorterDuff.Mode.SRC_IN)); - notifyButton.setScaleType(ImageView.ScaleType.CENTER); - if (Build.VERSION.SDK_INT >= 21) { - notifyButton.setBackgroundDrawable(Theme.createSelectorDrawable(getThemedColor(Theme.key_listSelector))); - } - notifyButton.setVisibility(canWriteToChannel && (delegate == null || !delegate.hasScheduledMessages()) ? VISIBLE : GONE); - attachLayout.addView(notifyButton, LayoutHelper.createLinear(48, 48)); - notifyButton.setOnClickListener(new OnClickListener() { + private void createCaptionLimitView() { + if (captionLimitView != null) { + return; + } - private Toast visibleToast; + captionLimitView = new NumberTextView(getContext()); + captionLimitView.setVisibility(View.GONE); + captionLimitView.setTextSize(15); + captionLimitView.setTextColor(getThemedColor(Theme.key_windowBackgroundWhiteGrayText)); + captionLimitView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + captionLimitView.setCenterAlign(true); + addView(captionLimitView, 3, LayoutHelper.createFrame(48, 20, Gravity.BOTTOM | Gravity.RIGHT, 3, 0, 0, 48)); + } - @Override - public void onClick(View v) { - silent = !silent; - if (notifySilentDrawable == null) { - notifySilentDrawable = new CrossOutDrawable(context, R.drawable.input_notify_on, Theme.key_chat_messagePanelIcons); - } - notifySilentDrawable.setCrossOut(silent, true); - notifyButton.setImageDrawable(notifySilentDrawable); - MessagesController.getNotificationsSettings(currentAccount).edit().putBoolean("silent_" + dialog_id, silent).commit(); - NotificationsController.getInstance(currentAccount).updateServerNotificationsSettings(dialog_id, fragment == null ? 0 : fragment.getTopicId()); - try { - if (visibleToast != null) { - visibleToast.cancel(); - } - } catch (Exception e) { - FileLog.e(e); - } - fragment.getUndoView().showWithAction(0, !silent ? UndoView.ACTION_NOTIFY_ON : UndoView.ACTION_NOTIFY_OFF, null); - notifyButton.setContentDescription(silent ? LocaleController.getString("AccDescrChanSilentOn", R.string.AccDescrChanSilentOn) : LocaleController.getString("AccDescrChanSilentOff", R.string.AccDescrChanSilentOff)); - updateFieldHint(true); - } - }); + private void createScheduledButton() { + if (scheduledButton != null || parentFragment == null) { + return; + } - attachButton = new ImageView(context); - attachButton.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_messagePanelIcons), PorterDuff.Mode.SRC_IN)); - attachButton.setImageResource(R.drawable.input_attach); - attachButton.setScaleType(ImageView.ScaleType.CENTER); - if (Build.VERSION.SDK_INT >= 21) { - attachButton.setBackgroundDrawable(Theme.createSelectorDrawable(getThemedColor(Theme.key_listSelector))); + Drawable drawable1 = getContext().getResources().getDrawable(R.drawable.input_calendar1).mutate(); + Drawable drawable2 = getContext().getResources().getDrawable(R.drawable.input_calendar2).mutate(); + drawable1.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_messagePanelIcons), PorterDuff.Mode.SRC_IN)); + drawable2.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_recordedVoiceDot), PorterDuff.Mode.SRC_IN)); + CombinedDrawable combinedDrawable = new CombinedDrawable(drawable1, drawable2); + + scheduledButton = new ImageView(getContext()); + scheduledButton.setImageDrawable(combinedDrawable); + scheduledButton.setVisibility(GONE); + scheduledButton.setContentDescription(LocaleController.getString("ScheduledMessages", R.string.ScheduledMessages)); + scheduledButton.setScaleType(ImageView.ScaleType.CENTER); + if (Build.VERSION.SDK_INT >= 21) { + scheduledButton.setBackgroundDrawable(Theme.createSelectorDrawable(getThemedColor(Theme.key_listSelector))); + } + messageEditTextContainer.addView(scheduledButton, 2, LayoutHelper.createFrame(48, 48, Gravity.BOTTOM | Gravity.RIGHT)); + scheduledButton.setOnClickListener(v -> { + if (delegate != null) { + delegate.openScheduledMessages(); } - attachLayout.addView(attachButton, LayoutHelper.createLinear(48, 48)); - attachButton.setOnClickListener(v -> { - if (adjustPanLayoutHelper != null && adjustPanLayoutHelper.animationInProgress()) { - return; - } - delegate.didPressAttachButton(); - }); - attachButton.setContentDescription(LocaleController.getString("AccDescrAttachButton", R.string.AccDescrAttachButton)); + }); + } + + private void createGiftButton() { + if (giftButton != null || parentFragment == null) { + return; } - senderSelectView = new SenderSelectView(getContext()); - senderSelectView.setOnClickListener(v -> { - if (getTranslationY() != 0) { - onEmojiSearchClosed = () -> senderSelectView.callOnClick(); - hidePopup(true, true); + giftButton = new ImageView(getContext()); + giftButton.setImageResource(R.drawable.msg_input_gift); + giftButton.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_messagePanelIcons), PorterDuff.Mode.MULTIPLY)); + giftButton.setVisibility(GONE); + giftButton.setContentDescription(LocaleController.getString(R.string.GiftPremium)); + giftButton.setScaleType(ImageView.ScaleType.CENTER); + if (Build.VERSION.SDK_INT >= 21) { + giftButton.setBackground(Theme.createSelectorDrawable(getThemedColor(Theme.key_listSelector))); + } + attachLayout.addView(giftButton, 0, LayoutHelper.createFrame(48, 48, Gravity.CENTER_VERTICAL | Gravity.RIGHT)); + giftButton.setOnClickListener(v -> { + MessagesController.getInstance(currentAccount).getMainSettings().edit().putBoolean("show_gift_for_" + parentFragment.getDialogId(), false).apply(); + AndroidUtilities.updateViewVisibilityAnimated(giftButton, false); + new GiftPremiumBottomSheet(getParentFragment(), getParentFragment().getCurrentUser()).show(); + }); + } + + private void createBotButton() { + if (botButton != null) { + return; + } + botButton = new ImageView(getContext()); + botButton.setImageDrawable(botButtonDrawable = new ReplaceableIconDrawable(getContext())); + botButtonDrawable.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_messagePanelIcons), PorterDuff.Mode.SRC_IN)); + botButtonDrawable.setIcon(R.drawable.input_bot2, false); + botButton.setScaleType(ImageView.ScaleType.CENTER); + if (Build.VERSION.SDK_INT >= 21) { + botButton.setBackgroundDrawable(Theme.createSelectorDrawable(getThemedColor(Theme.key_listSelector))); + } + botButton.setVisibility(GONE); + AndroidUtilities.updateViewVisibilityAnimated(botButton, false, 0.1f, false); + attachLayout.addView(botButton, 0, LayoutHelper.createLinear(48, 48)); + botButton.setOnClickListener(v -> { + if (hasBotWebView() && botCommandsMenuIsShowing()) { + botWebViewMenuContainer.dismiss(v::callOnClick); return; } - if (delegate.measureKeyboardHeight() > AndroidUtilities.dp(20)) { - int totalHeight = delegate.getContentViewHeight(); - int keyboard = delegate.measureKeyboardHeight(); - if (keyboard <= AndroidUtilities.dp(20)) { - totalHeight += keyboard; - } - if (emojiViewVisible) { - totalHeight -= getEmojiPadding(); - } - - if (totalHeight < AndroidUtilities.dp(200)) { - onKeyboardClosed = () -> senderSelectView.callOnClick(); - closeKeyboard(); - return; + if (searchingType != 0) { + setSearchingTypeInternal(0, false); + emojiView.closeSearch(false); + if (messageEditText != null) { + messageEditText.requestFocus(); } } - if (delegate.getSendAsPeers() != null) { - if (!NekoConfig.disableVibration.Bool()) { - try { - v.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); - } catch (Exception ignored) { - } - } - if (senderSelectPopupWindow != null) { - senderSelectPopupWindow.setPauseNotifications(false); - senderSelectPopupWindow.startDismissAnimation(); - return; + if (botReplyMarkup != null) { + if (!isPopupShowing() || currentPopupContentType != POPUP_CONTENT_BOT_KEYBOARD) { + showPopup(1, POPUP_CONTENT_BOT_KEYBOARD); + } else if (isPopupShowing() && currentPopupContentType == POPUP_CONTENT_BOT_KEYBOARD) { + showPopup(0, POPUP_CONTENT_BOT_KEYBOARD); } - MessagesController controller = MessagesController.getInstance(currentAccount); - TLRPC.ChatFull chatFull = controller.getChatFull(-dialog_id); - if (chatFull == null) { - return; + } else if (hasBotCommands) { + setFieldText("/"); + if (messageEditText != null) { + messageEditText.requestFocus(); } + openKeyboard(); + } + if (stickersExpanded) { + setStickersExpanded(false, false, false); + } + }); + } - ViewGroup fl = parentFragment.getParentLayout().getOverlayContainerView(); + private void createDoneButton() { + if (doneButtonContainer != null) { + return; + } - senderSelectPopupWindow = new SenderSelectPopup(context, parentFragment, controller, chatFull, delegate.getSendAsPeers(), (recyclerView, senderView, peer) -> { - if (senderSelectPopupWindow == null) return; - if (chatFull != null) { - var chat = controller.getChat(chatFull.id); - if (chat != null && chat.creator) { - var self = UserConfig.getInstance(currentAccount).getCurrentUser(); - - if (peer.channel_id == chat.id) { - var rights = chat.admin_rights; - rights.anonymous = true; - var rank = MessagesController.getInstance(currentAccount).getAdminRank(chat.id, self.id); - MessagesController.getInstance(currentAccount).setUserAdminRole(chat.id, self, rights, rank, false, parentFragment, false, false, null, null); - } else if (peer.user_id == self.id) { - var rights = chat.admin_rights; - rights.anonymous = false; - var rank = MessagesController.getInstance(currentAccount).getAdminRank(chat.id, self.id); - MessagesController.getInstance(currentAccount).setUserAdminRole(chat.id, self, rights, rank, false, parentFragment, false, false, null, null); - } - } + doneButtonContainer = new FrameLayout(getContext()); + doneButtonContainer.setVisibility(GONE); + textFieldContainer.addView(doneButtonContainer, LayoutHelper.createFrame(48, 48, Gravity.BOTTOM | Gravity.RIGHT)); + doneButtonContainer.setOnClickListener(view -> doneEditingMessage()); - chatFull.default_send_as = peer; - updateSendAsButton(); - } + Drawable doneCircleDrawable = Theme.createCircleDrawable(AndroidUtilities.dp(16), getThemedColor(Theme.key_chat_messagePanelSend)); + doneCheckDrawable = getContext().getResources().getDrawable(R.drawable.input_done).mutate(); + doneCheckDrawable.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_messagePanelVoicePressed), PorterDuff.Mode.MULTIPLY)); + CombinedDrawable combinedDrawable = new CombinedDrawable(doneCircleDrawable, doneCheckDrawable, 0, AndroidUtilities.dp(1)); + combinedDrawable.setCustomSize(AndroidUtilities.dp(32), AndroidUtilities.dp(32)); - parentFragment.getMessagesController().setDefaultSendAs(dialog_id, peer.user_id != 0 ? peer.user_id : -peer.channel_id); + doneButtonImage = new ImageView(getContext()); + doneButtonImage.setScaleType(ImageView.ScaleType.CENTER); + doneButtonImage.setImageDrawable(combinedDrawable); + doneButtonImage.setContentDescription(LocaleController.getString("Done", R.string.Done)); + doneButtonContainer.addView(doneButtonImage, LayoutHelper.createFrame(48, 48)); - int[] loc = new int[2]; - boolean wasSelected = senderView.avatar.isSelected(); - senderView.avatar.getLocationInWindow(loc); - senderView.avatar.setSelected(true, true); + doneButtonProgress = new ContextProgressView(getContext(), 0); + doneButtonProgress.setVisibility(View.INVISIBLE); + doneButtonContainer.addView(doneButtonProgress, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); + } - SimpleAvatarView avatar = new SimpleAvatarView(getContext()); - if (peer.channel_id != 0) { - TLRPC.Chat chat = controller.getChat(peer.channel_id); - if (chat != null) { - avatar.setAvatar(chat); - } - } else if (peer.user_id != 0) { - TLRPC.User user = controller.getUser(peer.user_id); - if (user != null) { - avatar.setAvatar(user); - } - } - for (int i = 0; i < recyclerView.getChildCount(); i++) { - View ch = recyclerView.getChildAt(i); + private void createExpandStickersButton() { + if (expandStickersButton != null) { + return; + } + expandStickersButton = new ImageView(getContext()) { + @Override + public boolean onTouchEvent(MotionEvent event) { + if (getAlpha() <= 0f) { // for accessibility + return false; + } + return super.onTouchEvent(event); + } + }; + expandStickersButton.setScaleType(ImageView.ScaleType.CENTER); + expandStickersButton.setImageDrawable(stickersArrow = new AnimatedArrowDrawable(getThemedColor(Theme.key_chat_messagePanelIcons), false)); + expandStickersButton.setVisibility(GONE); + expandStickersButton.setScaleX(0.1f); + expandStickersButton.setScaleY(0.1f); + expandStickersButton.setAlpha(0.0f); + if (Build.VERSION.SDK_INT >= 21) { + expandStickersButton.setBackgroundDrawable(Theme.createSelectorDrawable(getThemedColor(Theme.key_listSelector))); + } + sendButtonContainer.addView(expandStickersButton, LayoutHelper.createFrame(48, 48)); + expandStickersButton.setOnClickListener(v -> { + if (expandStickersButton.getVisibility() != VISIBLE || expandStickersButton.getAlpha() != 1.0f || waitingForKeyboardOpen || (keyboardVisible && messageEditText != null && messageEditText.isFocused())) { + return; + } + if (stickersExpanded) { + if (searchingType != 0) { + setSearchingTypeInternal(0, true); + emojiView.closeSearch(true); + emojiView.hideSearchKeyboard(); + if (emojiTabOpen) { + checkSendButton(true); + } + } else if (!stickersDragging) { + if (emojiView != null) { + emojiView.showSearchField(false); + } + } + } else if (!stickersDragging) { + emojiView.showSearchField(true); + } + if (!stickersDragging) { + setStickersExpanded(!stickersExpanded, true, false); + } + }); + expandStickersButton.setContentDescription(LocaleController.getString("AccDescrExpandPanel", R.string.AccDescrExpandPanel)); + } + + private void createRecordAudioPanel() { + if (recordedAudioPanel != null) { + return; + } + + recordedAudioPanel = new FrameLayout(getContext()) { + @Override + public void setVisibility(int visibility) { + super.setVisibility(visibility); + updateSendAsButton(); + } + }; + recordedAudioPanel.setVisibility(audioToSend == null ? GONE : VISIBLE); + recordedAudioPanel.setFocusable(true); + recordedAudioPanel.setFocusableInTouchMode(true); + recordedAudioPanel.setClickable(true); + messageEditTextContainer.addView(recordedAudioPanel, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 48, Gravity.BOTTOM)); + + recordDeleteImageView = new RLottieImageView(getContext()); + recordDeleteImageView.setScaleType(ImageView.ScaleType.CENTER); + recordDeleteImageView.setAnimation(R.raw.chat_audio_record_delete_2, 28, 28); + recordDeleteImageView.getAnimatedDrawable().setInvalidateOnProgressSet(true); + updateRecordedDeleteIconColors(); + recordDeleteImageView.setContentDescription(LocaleController.getString("Delete", R.string.Delete)); + if (Build.VERSION.SDK_INT >= 21) { + recordDeleteImageView.setBackgroundDrawable(Theme.createSelectorDrawable(getThemedColor(Theme.key_listSelector))); + } + recordedAudioPanel.addView(recordDeleteImageView, LayoutHelper.createFrame(48, 48)); + recordDeleteImageView.setOnClickListener(v -> { + if (runningAnimationAudio != null && runningAnimationAudio.isRunning()) { + return; + } + if (videoToSendMessageObject != null) { + CameraController.getInstance().cancelOnInitRunnable(onFinishInitCameraRunnable); + delegate.needStartRecordVideo(2, true, 0); + } else { + MessageObject playing = MediaController.getInstance().getPlayingMessageObject(); + if (playing != null && playing == audioToSendMessageObject) { + MediaController.getInstance().cleanupPlayer(true, true); + } + } + if (audioToSendPath != null) { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("delete file " + audioToSendPath); + } + new File(audioToSendPath).delete(); + } + hideRecordedAudioPanel(false); + checkSendButton(true); + }); + + videoTimelineView = new VideoTimelineView(getContext()); + videoTimelineView.setRoundFrames(true); + videoTimelineView.setDelegate(new VideoTimelineView.VideoTimelineViewDelegate() { + @Override + public void onLeftProgressChanged(float progress) { + if (videoToSendMessageObject == null) { + return; + } + videoToSendMessageObject.startTime = (long) (progress * videoToSendMessageObject.estimatedDuration); + delegate.needChangeVideoPreviewState(2, progress); + } + + @Override + public void onRightProgressChanged(float progress) { + if (videoToSendMessageObject == null) { + return; + } + videoToSendMessageObject.endTime = (long) (progress * videoToSendMessageObject.estimatedDuration); + delegate.needChangeVideoPreviewState(2, progress); + } + + @Override + public void didStartDragging() { + delegate.needChangeVideoPreviewState(1, 0); + } + + @Override + public void didStopDragging() { + delegate.needChangeVideoPreviewState(0, 0); + } + }); + recordedAudioPanel.addView(videoTimelineView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.CENTER_VERTICAL | Gravity.LEFT, 56, 0, 8, 0)); + + VideoTimelineView.TimeHintView videoTimeHintView = new VideoTimelineView.TimeHintView(getContext()); + videoTimelineView.setTimeHintView(videoTimeHintView); + sizeNotifierLayout.addView(videoTimeHintView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.BOTTOM, 0, 0, 0, 52)); + + recordedAudioBackground = new View(getContext()); + recordedAudioBackground.setBackgroundDrawable(Theme.createRoundRectDrawable(AndroidUtilities.dp(18), getThemedColor(Theme.key_chat_recordedVoiceBackground))); + recordedAudioPanel.addView(recordedAudioBackground, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 36, Gravity.CENTER_VERTICAL | Gravity.LEFT, 48, 0, 0, 0)); + + LinearLayout waveFormTimerLayout = new LinearLayout(getContext()); + waveFormTimerLayout.setOrientation(LinearLayout.HORIZONTAL); + recordedAudioPanel.addView(waveFormTimerLayout, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 32, Gravity.CENTER_VERTICAL | Gravity.LEFT, 48 + 44, 0, 13, 0)); + + recordedAudioPlayButton = new ImageView(getContext()); + Matrix matrix = new Matrix(); + matrix.postScale(0.8f, 0.8f, AndroidUtilities.dpf2(24), AndroidUtilities.dpf2(24)); + recordedAudioPlayButton.setImageMatrix(matrix); + recordedAudioPlayButton.setImageDrawable(playPauseDrawable = new MediaActionDrawable()); + recordedAudioPlayButton.setScaleType(ImageView.ScaleType.MATRIX); + recordedAudioPlayButton.setContentDescription(LocaleController.getString("AccActionPlay", R.string.AccActionPlay)); + recordedAudioPanel.addView(recordedAudioPlayButton, LayoutHelper.createFrame(48, 48, Gravity.LEFT | Gravity.BOTTOM, 48, 0, 13, 0)); + recordedAudioPlayButton.setOnClickListener(v -> { + if (audioToSend == null) { + return; + } + if (MediaController.getInstance().isPlayingMessage(audioToSendMessageObject) && !MediaController.getInstance().isMessagePaused()) { + MediaController.getInstance().pauseMessage(audioToSendMessageObject); + playPauseDrawable.setIcon(MediaActionDrawable.ICON_PLAY, true); + recordedAudioPlayButton.setContentDescription(LocaleController.getString("AccActionPlay", R.string.AccActionPlay)); + } else { + playPauseDrawable.setIcon(MediaActionDrawable.ICON_PAUSE, true); + MediaController.getInstance().playMessage(audioToSendMessageObject); + recordedAudioPlayButton.setContentDescription(LocaleController.getString("AccActionPause", R.string.AccActionPause)); + } + }); + + recordedAudioSeekBar = new SeekBarWaveformView(getContext()); + waveFormTimerLayout.addView(recordedAudioSeekBar, LayoutHelper.createLinear(0, 32, 1f, Gravity.CENTER_VERTICAL, 0, 0, 4, 0)); + + recordedAudioTimeTextView = new TextView(getContext()); + recordedAudioTimeTextView.setTextColor(getThemedColor(Theme.key_chat_messagePanelVoiceDuration)); + recordedAudioTimeTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 13); + waveFormTimerLayout.addView(recordedAudioTimeTextView, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, 0f, Gravity.CENTER_VERTICAL)); + } + + private void createSenderSelectView() { + if (senderSelectView != null) { + return; + } + senderSelectView = new SenderSelectView(getContext()); + senderSelectView.setOnClickListener(v -> { + if (getTranslationY() != 0) { + onEmojiSearchClosed = () -> senderSelectView.callOnClick(); + hidePopup(true, true); + return; + } + if (delegate.measureKeyboardHeight() > AndroidUtilities.dp(20)) { + int totalHeight = delegate.getContentViewHeight(); + int keyboard = delegate.measureKeyboardHeight(); + if (keyboard <= AndroidUtilities.dp(20)) { + totalHeight += keyboard; + } + if (emojiViewVisible) { + totalHeight -= getEmojiPadding(); + } + + if (totalHeight < AndroidUtilities.dp(200)) { + onKeyboardClosed = () -> senderSelectView.callOnClick(); + closeKeyboard(); + return; + } + } + if (delegate.getSendAsPeers() != null) { + try { + v.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + } catch (Exception ignored) { + } + if (senderSelectPopupWindow != null) { + senderSelectPopupWindow.setPauseNotifications(false); + senderSelectPopupWindow.startDismissAnimation(); + return; + } + MessagesController controller = MessagesController.getInstance(currentAccount); + TLRPC.ChatFull chatFull = controller.getChatFull(-dialog_id); + if (chatFull == null) { + return; + } + + ViewGroup fl = parentFragment.getParentLayout().getOverlayContainerView(); + + senderSelectPopupWindow = new SenderSelectPopup(getContext(), parentFragment, controller, chatFull, delegate.getSendAsPeers(), (recyclerView, senderView, peer) -> { + if (senderSelectPopupWindow == null) return; + if (chatFull != null) { + chatFull.default_send_as = peer; + updateSendAsButton(); + } + + parentFragment.getMessagesController().setDefaultSendAs(dialog_id, peer.user_id != 0 ? peer.user_id : -peer.channel_id); + + int[] loc = new int[2]; + boolean wasSelected = senderView.avatar.isSelected(); + senderView.avatar.getLocationInWindow(loc); + senderView.avatar.setSelected(true, true); + + SimpleAvatarView avatar = new SimpleAvatarView(getContext()); + if (peer.channel_id != 0) { + TLRPC.Chat chat = controller.getChat(peer.channel_id); + if (chat != null) { + avatar.setAvatar(chat); + } + } else if (peer.user_id != 0) { + TLRPC.User user = controller.getUser(peer.user_id); + if (user != null) { + avatar.setAvatar(user); + } + } + for (int i = 0; i < recyclerView.getChildCount(); i++) { + View ch = recyclerView.getChildAt(i); if (ch instanceof SenderSelectPopup.SenderView && ch != senderView) { SenderSelectPopup.SenderView childSenderView = (SenderSelectPopup.SenderView) ch; childSenderView.avatar.setSelected(false, true); @@ -2823,7 +2905,7 @@ public void onClick(View v) { d.getWindow().setNavigationBarColor(0); int color = Theme.getColor(Theme.key_actionBarDefault, null, true); - AndroidUtilities.setLightStatusBar(d.getWindow(), ColorUtils.calculateLuminance(color) > 0.7f); + AndroidUtilities.setLightStatusBar(d.getWindow(), color == Color.WHITE); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { int color2 = Theme.getColor(Theme.key_windowBackgroundGray, null, true); @@ -2919,11 +3001,9 @@ public boolean onPreDraw() { public void onAnimationUpdate(DynamicAnimation animation, float value, float velocity) { if (!performedHapticFeedback && value >= endY) { performedHapticFeedback = true; - if (!NekoConfig.disableVibration.Bool()) { - try { - avatar.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); - } catch (Exception ignored) { - } + try { + avatar.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + } catch (Exception ignored) { } } } @@ -3028,1336 +3108,1415 @@ public void dismiss() { } }); senderSelectView.setVisibility(GONE); - frameLayout.addView(senderSelectView, LayoutHelper.createFrame(32, 32, Gravity.BOTTOM | Gravity.LEFT, 10, 8, 10, 8)); + messageEditTextContainer.addView(senderSelectView, LayoutHelper.createFrame(32, 32, Gravity.BOTTOM | Gravity.LEFT, 10, 8, 10, 8)); + } - recordedAudioPanel = new FrameLayout(context) { - @Override - public void setVisibility(int visibility) { - super.setVisibility(visibility); - updateSendAsButton(); + private void createBotCommandsMenuButton() { + if (botCommandsMenuButton != null) { + return; + } + botCommandsMenuButton = new BotCommandsMenuView(getContext()); + botCommandsMenuButton.setOnClickListener(view -> { + boolean open = !botCommandsMenuButton.isOpened(); + botCommandsMenuButton.setOpened(open); + try { + performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + } catch (Exception ignore) { } - }; - recordedAudioPanel.setVisibility(audioToSend == null ? GONE : VISIBLE); - recordedAudioPanel.setFocusable(true); - recordedAudioPanel.setFocusableInTouchMode(true); - recordedAudioPanel.setClickable(true); - frameLayout.addView(recordedAudioPanel, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 48, Gravity.BOTTOM)); + if (hasBotWebView()) { + if (open) { + if (emojiViewVisible || botKeyboardViewVisible) { + AndroidUtilities.runOnUIThread(this::openWebViewMenu, 275); + hidePopup(false); + return; + } - recordDeleteImageView = new RLottieImageView(context); - recordDeleteImageView.setScaleType(ImageView.ScaleType.CENTER); - recordDeleteImageView.setAnimation(R.raw.chat_audio_record_delete_2, 28, 28); - recordDeleteImageView.getAnimatedDrawable().setInvalidateOnProgressSet(true); - updateRecordedDeleteIconColors(); + openWebViewMenu(); + } else if (botWebViewMenuContainer != null) { + botWebViewMenuContainer.dismiss(); + } + return; + } - recordDeleteImageView.setContentDescription(LocaleController.getString("Delete", R.string.Delete)); - if (Build.VERSION.SDK_INT >= 21) { - recordDeleteImageView.setBackgroundDrawable(Theme.createSelectorDrawable(getThemedColor(Theme.key_listSelector))); - } - recordedAudioPanel.addView(recordDeleteImageView, LayoutHelper.createFrame(48, 48)); - recordDeleteImageView.setOnClickListener(v -> { - if (runningAnimationAudio != null && runningAnimationAudio.isRunning()) { - return; - } - if (videoToSendMessageObject != null) { - CameraController.getInstance().cancelOnInitRunnable(onFinishInitCameraRunnable); - delegate.needStartRecordVideo(2, true, 0); - } else { - MessageObject playing = MediaController.getInstance().getPlayingMessageObject(); - if (playing != null && playing == audioToSendMessageObject) { - MediaController.getInstance().cleanupPlayer(true, true); - } - } - if (audioToSendPath != null) { - new File(audioToSendPath).delete(); + if (open) { + createBotCommandsMenuContainer(); + botCommandsMenuContainer.show(); + } else if (botCommandsMenuContainer != null) { + botCommandsMenuContainer.dismiss(); } - hideRecordedAudioPanel(false); - checkSendButton(true); }); + messageEditTextContainer.addView(botCommandsMenuButton, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, 32, Gravity.BOTTOM | Gravity.LEFT, 10, 8, 10, 8)); + AndroidUtilities.updateViewVisibilityAnimated(botCommandsMenuButton, false, 1f, false); + botCommandsMenuButton.setExpanded(true, false); + } - videoTimelineView = new VideoTimelineView(context); - videoTimelineView.setColor(getThemedColor(Theme.key_chat_messagePanelVideoFrame)); - videoTimelineView.setRoundFrames(true); - videoTimelineView.setDelegate(new VideoTimelineView.VideoTimelineViewDelegate() { - @Override - public void onLeftProgressChanged(float progress) { - if (videoToSendMessageObject == null) { - return; - } - videoToSendMessageObject.startTime = (long) (progress * videoToSendMessageObject.estimatedDuration); - delegate.needChangeVideoPreviewState(2, progress); - } + private void createBotWebViewButton() { + if (botWebViewButton != null) { + return; + } + botWebViewButton = new ChatActivityBotWebViewButton(getContext()); + botWebViewButton.setVisibility(GONE); + createBotCommandsMenuButton(); + botWebViewButton.setBotMenuButton(botCommandsMenuButton); + messageEditTextContainer.addView(botWebViewButton, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.BOTTOM)); + } - @Override - public void onRightProgressChanged(float progress) { - if (videoToSendMessageObject == null) { - return; + private void createRecordCircle() { + if (recordCircle != null) { + return; + } + recordCircle = new RecordCircle(getContext()); + recordCircle.setVisibility(GONE); + sizeNotifierLayout.addView(recordCircle, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.BOTTOM, 0, 0, 0, 0)); + } + + private void showRestrictedHint() { + if (DialogObject.isChatDialog(dialog_id)) { + TLRPC.Chat chat = accountInstance.getMessagesController().getChat(-dialog_id); + BulletinFactory.of(parentFragment).createSimpleBulletin(R.raw.passcode_lock_close, LocaleController.formatString("SendPlainTextRestrictionHint", R.string.SendPlainTextRestrictionHint, ChatObject.getAllowedSendString(chat)), 3).show(); + } + } + + private void openWebViewMenu() { + createBotWebViewMenuContainer(); + Runnable onRequestWebView = () -> { + AndroidUtilities.hideKeyboard(this); + if (AndroidUtilities.isTablet()) { + BotWebViewSheet webViewSheet = new BotWebViewSheet(getContext(), parentFragment.getResourceProvider()); + webViewSheet.setParentActivity(parentActivity); + webViewSheet.requestWebView(currentAccount, dialog_id, dialog_id, botMenuWebViewTitle, botMenuWebViewUrl, BotWebViewSheet.TYPE_BOT_MENU_BUTTON, 0, false); + webViewSheet.show(); + + if (botCommandsMenuButton != null) { + botCommandsMenuButton.setOpened(false); } - videoToSendMessageObject.endTime = (long) (progress * videoToSendMessageObject.estimatedDuration); - delegate.needChangeVideoPreviewState(2, progress); + } else { + botWebViewMenuContainer.show(currentAccount, dialog_id, botMenuWebViewUrl); } + }; - @Override - public void didStartDragging() { - delegate.needChangeVideoPreviewState(1, 0); - } + if (SharedPrefsHelper.isWebViewConfirmShown(currentAccount, dialog_id)) { + onRequestWebView.run(); + } else { + new AlertDialog.Builder(parentFragment.getParentActivity()) + .setTitle(LocaleController.getString(R.string.BotOpenPageTitle)) + .setMessage(AndroidUtilities.replaceTags(LocaleController.formatString(R.string.BotOpenPageMessage, UserObject.getUserName(MessagesController.getInstance(currentAccount).getUser(dialog_id))))) + .setPositiveButton(LocaleController.getString(R.string.OK), (dialog, which) -> { + onRequestWebView.run(); + SharedPrefsHelper.setWebViewConfirmShown(currentAccount, dialog_id, true); + }) + .setNegativeButton(LocaleController.getString(R.string.Cancel), null) + .setOnDismissListener(dialog -> { + if (botCommandsMenuButton != null && !SharedPrefsHelper.isWebViewConfirmShown(currentAccount, dialog_id)) { + botCommandsMenuButton.setOpened(false); + } + }) + .show(); + } + } - @Override - public void didStopDragging() { - delegate.needChangeVideoPreviewState(0, 0); - } - }); - recordedAudioPanel.addView(videoTimelineView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.CENTER_VERTICAL | Gravity.LEFT, 56, 0, 8, 0)); - VideoTimelineView.TimeHintView videoTimeHintView = new VideoTimelineView.TimeHintView(context); - videoTimelineView.setTimeHintView(videoTimeHintView); - sizeNotifierLayout.addView(videoTimeHintView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.BOTTOM, 0, 0, 0, 52)); + public void setBotWebViewButtonOffsetX(float offset) { + emojiButton.setTranslationX(offset); + if (messageEditText != null) { + messageEditText.setTranslationX(offset); + } + attachButton.setTranslationX(offset); + audioVideoSendButton.setTranslationX(offset); + if (botButton != null) { + botButton.setTranslationX(offset); + } + } - recordedAudioBackground = new View(context); - recordedAudioBackground.setBackgroundDrawable(Theme.createRoundRectDrawable(AndroidUtilities.dp(18), getThemedColor(Theme.key_chat_recordedVoiceBackground))); - recordedAudioPanel.addView(recordedAudioBackground, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 36, Gravity.CENTER_VERTICAL | Gravity.LEFT, 48, 0, 0, 0)); + public void setComposeShadowAlpha(float alpha) { + composeShadowAlpha = alpha; + invalidate(); + } - recordedAudioSeekBar = new SeekBarWaveformView(context); + public ChatActivityBotWebViewButton getBotWebViewButton() { + createBotWebViewButton(); + return botWebViewButton; + } - LinearLayout waveFormTimerLayout = new LinearLayout(context); - waveFormTimerLayout.setOrientation(LinearLayout.HORIZONTAL); - recordedAudioPanel.addView(waveFormTimerLayout, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 32, Gravity.CENTER_VERTICAL | Gravity.LEFT, 48 + 44, 0, 13, 0)); - playPauseDrawable = new MediaActionDrawable(); - recordedAudioPlayButton = new ImageView(context); - Matrix matrix = new Matrix(); - matrix.postScale(0.8f, 0.8f, AndroidUtilities.dpf2(24), AndroidUtilities.dpf2(24)); - recordedAudioPlayButton.setImageMatrix(matrix); - recordedAudioPlayButton.setImageDrawable(playPauseDrawable); - recordedAudioPlayButton.setScaleType(ImageView.ScaleType.MATRIX); - recordedAudioPlayButton.setContentDescription(LocaleController.getString("AccActionPlay", R.string.AccActionPlay)); - recordedAudioPanel.addView(recordedAudioPlayButton, LayoutHelper.createFrame(48, 48, Gravity.LEFT | Gravity.BOTTOM, 48, 0, 13, 0)); - recordedAudioPlayButton.setOnClickListener(v -> { - if (audioToSend == null) { - return; - } - if (MediaController.getInstance().isPlayingMessage(audioToSendMessageObject) && !MediaController.getInstance().isMessagePaused()) { - MediaController.getInstance().pauseMessage(audioToSendMessageObject); - playPauseDrawable.setIcon(MediaActionDrawable.ICON_PLAY, true); - recordedAudioPlayButton.setContentDescription(LocaleController.getString("AccActionPlay", R.string.AccActionPlay)); - } else { - playPauseDrawable.setIcon(MediaActionDrawable.ICON_PAUSE, true); - MediaController.getInstance().playMessage(audioToSendMessageObject); - recordedAudioPlayButton.setContentDescription(LocaleController.getString("AccActionPause", R.string.AccActionPause)); - } - }); + public ChatActivity getParentFragment() { + return parentFragment; + } - recordedAudioTimeTextView = new TextView(context); - recordedAudioTimeTextView.setTextColor(getThemedColor(Theme.key_chat_messagePanelVoiceDuration)); - recordedAudioTimeTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 13); - waveFormTimerLayout.addView(recordedAudioSeekBar, LayoutHelper.createLinear(0, 32, 1f, Gravity.CENTER_VERTICAL, 0, 0, 4, 0)); - waveFormTimerLayout.addView(recordedAudioTimeTextView, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, 0f, Gravity.CENTER_VERTICAL)); - recordedAudioTimeTextView.setOnClickListener(v -> { - if ((recordedAudioTimeTextView.getAlpha() < 1f) - || (recordedAudioTimeTextView.getVisibility() == GONE)) { - return; + private void checkBotMenu() { + final boolean shouldBeExpanded = (messageEditText == null || TextUtils.isEmpty(messageEditText.getText())) && !(keyboardVisible || waitingForKeyboardOpen || isPopupShowing()); + if (shouldBeExpanded) { + createBotCommandsMenuButton(); + } + if (botCommandsMenuButton != null) { + boolean wasExpanded = botCommandsMenuButton.expanded; + botCommandsMenuButton.setExpanded(shouldBeExpanded, true); + if (wasExpanded != botCommandsMenuButton.expanded) { + beginDelayedTransition(); } - Dialogs.CreateVoiceCaptionAlert( - getContext(), - recordTimerView.timestamps, - (String caption) -> { - voiceCaption = caption; - sendMessage(); - return null; - }); - }); + } + } - recordPanel = new FrameLayout(context); - recordPanel.setClipChildren(false); - recordPanel.setVisibility(GONE); - frameLayout.addView(recordPanel, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 48)); - recordPanel.setOnTouchListener((v, event) -> true); + public void forceSmoothKeyboard(boolean smoothKeyboard) { + this.smoothKeyboard = smoothKeyboard && !AndroidUtilities.isInMultiwindow && (parentFragment == null || !parentFragment.isInBubbleMode()); + } - slideText = new SlideTextView(context); - recordPanel.addView(slideText, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.NO_GRAVITY, 45, 0, 0, 0)); + protected void onLineCountChanged(int oldLineCount, int newLineCount) { - recordTimeContainer = new LinearLayout(context); - recordTimeContainer.setOrientation(LinearLayout.HORIZONTAL); - recordTimeContainer.setPadding(AndroidUtilities.dp(13), 0, 0, 0); - recordTimeContainer.setFocusable(false); - recordPanel.addView(recordTimeContainer, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.CENTER_VERTICAL)); - slideText.bringToFront(); + } - recordDot = new RecordDot(context); - recordTimeContainer.addView(recordDot, LayoutHelper.createLinear(28, 28, Gravity.CENTER_VERTICAL, 0, 0, 0, 0)); + private void startLockTransition() { + startLockTransition(true); + } - recordTimerView = new TimerView(context); - recordTimeContainer.addView(recordTimerView, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.CENTER_VERTICAL, 6, 0, 0, 0)); + private void startLockTransition(boolean animate) { + AnimatorSet animatorSet = new AnimatorSet(); + if (!NekoConfig.disableVibration.Bool() && animate) { + performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + } - sendButtonContainer = new FrameLayout(context) { - @Override - protected boolean drawChild(Canvas canvas, View child, long drawingTime) { - if (child == sendButton && textTransitionIsRunning) { - return true; - } - return super.drawChild(canvas, child, drawingTime); - } - }; - sendButtonContainer.setClipChildren(false); - sendButtonContainer.setClipToPadding(false); - textFieldContainer.addView(sendButtonContainer, LayoutHelper.createFrame(48, 48, Gravity.BOTTOM | Gravity.RIGHT)); + ObjectAnimator translate = ObjectAnimator.ofFloat(recordCircle, "lockAnimatedTranslation", recordCircle.startTranslation); + translate.setStartDelay(animate ? 100 : 1); + translate.setDuration(animate ? 350 : 1); - audioVideoButtonContainer = new FrameLayout(context); - audioVideoButtonContainer.setSoundEffectsEnabled(false); - sendButtonContainer.addView(audioVideoButtonContainer, LayoutHelper.createFrame(48, 48)); - if (NekoConfig.useChatAttachMediaMenu.Bool()) { - audioVideoButtonContainer.setOnClickListener(v -> { - if (recordCircle.isSendButtonVisible()) { - if (!hasRecordVideo || calledRecordRunnable) { - startedDraggingX = -1; - if (hasRecordVideo && isInVideoMode) { - delegate.needStartRecordVideo(1, true, 0); - } else { - if (recordingAudioVideo && isInScheduleMode()) { - AlertsCreator.createScheduleDatePickerDialog(parentActivity, parentFragment.getDialogId(), (notify, scheduleDate) -> MediaController.getInstance().stopRecording(1, notify, scheduleDate), () -> MediaController.getInstance().stopRecording(0, false, 0), null); - } - MediaController.getInstance().stopRecording(isInScheduleMode() ? 3 : 1, true, 0); - delegate.needStartRecordAudio(0); - } - recordingAudioVideo = false; - updateRecordInterface(RECORD_STATE_SENDING); - } - return; - } - onMenuClick(v); - }); - } else { - audioVideoButtonContainer.setOnTouchListener((view, motionEvent) -> { - audioVideoButtonContainer.setFocusable(true); - audioVideoButtonContainer.setImportantForAccessibility(View.IMPORTANT_FOR_ACCESSIBILITY_YES); - if (motionEvent.getAction() == MotionEvent.ACTION_DOWN) { - if (recordCircle.isSendButtonVisible()) { - if (!hasRecordVideo || calledRecordRunnable) { - startedDraggingX = -1; - if (hasRecordVideo && isInVideoMode()) { - delegate.needStartRecordVideo(1, true, 0); - } else { - if (recordingAudioVideo && isInScheduleMode()) { - AlertsCreator.createScheduleDatePickerDialog(parentActivity, parentFragment.getDialogId(), (notify, scheduleDate) -> MediaController.getInstance().stopRecording(1, notify, scheduleDate), () -> MediaController.getInstance().stopRecording(0, false, 0), resourcesProvider); - } - MediaController.getInstance().stopRecording(isInScheduleMode() ? 3 : 1, true, 0); - delegate.needStartRecordAudio(0); - } - recordingAudioVideo = false; - messageTransitionIsRunning = false; - AndroidUtilities.runOnUIThread(moveToSendStateRunnable = () -> { - moveToSendStateRunnable = null; - updateRecordInterface(RECORD_STATE_SENDING); - }, 200); - } - return false; - } - if (parentFragment != null) { - TLRPC.Chat chat = parentFragment.getCurrentChat(); - TLRPC.UserFull userFull = parentFragment.getCurrentUserInfo(); - if (chat != null && !ChatObject.canSendMedia(chat) || userFull != null && userFull.voice_messages_forbidden) { - delegate.needShowMediaBanHint(); - return true; - } - } - if (hasRecordVideo) { - calledRecordRunnable = false; - recordAudioVideoRunnableStarted = true; - AndroidUtilities.runOnUIThread(recordAudioVideoRunnable, 150); - } else { - recordAudioVideoRunnable.run(); - } - return true; - } else if (motionEvent.getAction() == MotionEvent.ACTION_UP || motionEvent.getAction() == MotionEvent.ACTION_CANCEL) { - if (motionEvent.getAction() == MotionEvent.ACTION_CANCEL && recordingAudioVideo) { - if (recordCircle.slideToCancelProgress < 0.7f) { - if (hasRecordVideo && isInVideoMode()) { - CameraController.getInstance().cancelOnInitRunnable(onFinishInitCameraRunnable); - delegate.needStartRecordVideo(2, true, 0); - } else { - delegate.needStartRecordAudio(0); - MediaController.getInstance().stopRecording(0, false, 0); - } - recordingAudioVideo = false; - updateRecordInterface(RECORD_STATE_CANCEL_BY_GESTURE); - } else { - recordCircle.sendButtonVisible = true; - startLockTransition(); - } - return false; - } - if (recordCircle.isSendButtonVisible() || recordedAudioPanel.getVisibility() == VISIBLE) { - if (recordAudioVideoRunnableStarted) { - AndroidUtilities.cancelRunOnUIThread(recordAudioVideoRunnable); - } - return false; - } - - float x = motionEvent.getX() + audioVideoButtonContainer.getX(); - float dist = (x - startedDraggingX); - float alpha = 1.0f + dist / distCanMove; - if (alpha < 0.45) { - if (hasRecordVideo && isInVideoMode()) { - CameraController.getInstance().cancelOnInitRunnable(onFinishInitCameraRunnable); - delegate.needStartRecordVideo(2, true, 0); - } else { - delegate.needStartRecordAudio(0); - MediaController.getInstance().stopRecording(0, false, 0); - } - recordingAudioVideo = false; - updateRecordInterface(RECORD_STATE_CANCEL_BY_GESTURE); - } else { - if (recordAudioVideoRunnableStarted) { - AndroidUtilities.cancelRunOnUIThread(recordAudioVideoRunnable); - delegate.onSwitchRecordMode(!isInVideoMode()); - setRecordVideoButtonVisible(!isInVideoMode(), true); - if (!NekoConfig.disableVibration.Bool()) { - performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP); - } - sendAccessibilityEvent(AccessibilityEvent.TYPE_VIEW_CLICKED); - } else if (!hasRecordVideo || calledRecordRunnable) { - startedDraggingX = -1; - if (hasRecordVideo && isInVideoMode()) { - CameraController.getInstance().cancelOnInitRunnable(onFinishInitCameraRunnable); - delegate.needStartRecordVideo(NekoConfig.confirmAVMessage.Bool() ? 3 : 1, true, 0); - } else { - if (!NekoConfig.confirmAVMessage.Bool()) { - if (recordingAudioVideo && isInScheduleMode()) { - AlertsCreator.createScheduleDatePickerDialog(parentActivity, parentFragment.getDialogId(), (notify, scheduleDate) -> MediaController.getInstance().stopRecording(1, notify, scheduleDate), () -> MediaController.getInstance().stopRecording(0, false, 0), resourcesProvider); - } - } - delegate.needStartRecordAudio(0); - if (!NekoConfig.confirmAVMessage.Bool()) { - MediaController.getInstance().stopRecording(isInScheduleMode() ? 3 : 1, true, 0); - } else { - MediaController.getInstance().stopRecording(2, true, 0); - } - } - if (!NekoConfig.confirmAVMessage.Bool()) { - recordingAudioVideo = false; - messageTransitionIsRunning = false; - AndroidUtilities.runOnUIThread(moveToSendStateRunnable = () -> { - moveToSendStateRunnable = null; - updateRecordInterface(RECORD_STATE_SENDING); - }, 500); - } - } - } - return true; - } else if (motionEvent.getAction() == MotionEvent.ACTION_MOVE && recordingAudioVideo) { - float x = motionEvent.getX(); - float y = motionEvent.getY(); - if (recordCircle.isSendButtonVisible()) { - return false; - } - if (recordCircle.setLockTranslation(y) == 2) { - startLockTransition(); - return false; - } else { - recordCircle.setMovingCords(x, y); - } - - if (startedDraggingX == -1) { - startedDraggingX = x; - distCanMove = (float) (sizeNotifierLayout.getMeasuredWidth() * 0.35); - if (distCanMove > AndroidUtilities.dp(140)) { - distCanMove = AndroidUtilities.dp(140); - } - } - - x = x + audioVideoButtonContainer.getX(); - float dist = (x - startedDraggingX); - float alpha = 1.0f + dist / distCanMove; - if (startedDraggingX != -1) { - if (alpha > 1) { - alpha = 1; - } else if (alpha < 0) { - alpha = 0; - } - slideText.setSlideX(alpha); - recordCircle.setSlideToCancelProgress(alpha); - } - - if (alpha == 0) { - if (hasRecordVideo && isInVideoMode()) { - CameraController.getInstance().cancelOnInitRunnable(onFinishInitCameraRunnable); - delegate.needStartRecordVideo(2, true, 0); - } else { - delegate.needStartRecordAudio(0); - MediaController.getInstance().stopRecording(0, false, 0); - } - recordingAudioVideo = false; - updateRecordInterface(RECORD_STATE_CANCEL_BY_GESTURE); - } - return true; - } - view.onTouchEvent(motionEvent); - return true; - }); - } - - audioVideoSendButton = new ChatActivityEnterViewAnimatedIconView(context, this); - audioVideoSendButton.setFocusable(true); - audioVideoSendButton.setImportantForAccessibility(View.IMPORTANT_FOR_ACCESSIBILITY_YES); - audioVideoSendButton.setAccessibilityDelegate(mediaMessageButtonsDelegate); - padding = AndroidUtilities.dp(9.5f); - audioVideoSendButton.setPadding(padding, padding, padding, padding); - audioVideoSendButton.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_messagePanelIcons), PorterDuff.Mode.SRC_IN)); + ObjectAnimator snap = ObjectAnimator.ofFloat(recordCircle, "snapAnimationProgress", 1f); + snap.setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); + snap.setDuration(animate ? 250 : 1); - if (Build.VERSION.SDK_INT >= 21 && NekoConfig.useChatAttachMediaMenu.Bool()) { - audioVideoSendButton.setBackground(Theme.createSelectorDrawable(Theme.getColor(Theme.key_listSelector))); - } - audioVideoButtonContainer.addView(audioVideoSendButton, LayoutHelper.createFrame(48, 48)); + SharedConfig.removeLockRecordAudioVideoHint(); - recordCircle = new RecordCircle(context); - recordCircle.setVisibility(GONE); - sizeNotifierLayout.addView(recordCircle, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.BOTTOM, 0, 0, 0, 0)); + animatorSet.playTogether( + snap, + translate, + ObjectAnimator.ofFloat(recordCircle, "slideToCancelProgress", 1f).setDuration(animate ? 200 : 1), + ObjectAnimator.ofFloat(slideText, "cancelToProgress", 1f) + ); - cancelBotButton = new ImageView(context); - cancelBotButton.setVisibility(INVISIBLE); - cancelBotButton.setScaleType(ImageView.ScaleType.CENTER_INSIDE); - cancelBotButton.setImageDrawable(progressDrawable = new CloseProgressDrawable2() { - @Override - protected int getCurrentColor() { - return Theme.getColor(Theme.key_chat_messagePanelCancelInlineBot); - } - }); - cancelBotButton.setContentDescription(LocaleController.getString("Cancel", R.string.Cancel)); - cancelBotButton.setSoundEffectsEnabled(false); - cancelBotButton.setScaleX(0.1f); - cancelBotButton.setScaleY(0.1f); - cancelBotButton.setAlpha(0.0f); - if (Build.VERSION.SDK_INT >= 21) { - cancelBotButton.setBackgroundDrawable(Theme.createSelectorDrawable(getThemedColor(Theme.key_listSelector))); - } - sendButtonContainer.addView(cancelBotButton, LayoutHelper.createFrame(48, 48)); - cancelBotButton.setOnClickListener(view -> { - String text = messageEditText.getText().toString(); - int idx = text.indexOf(' '); - if (idx == -1 || idx == text.length() - 1) { - setFieldText(""); - } else { - setFieldText(text.substring(0, idx + 1)); - } - }); + animatorSet.start(); + } - if (isInScheduleMode()) { - sendButtonDrawable = context.getResources().getDrawable(R.drawable.input_schedule).mutate(); - sendButtonInverseDrawable = context.getResources().getDrawable(R.drawable.input_schedule).mutate(); - inactinveSendButtonDrawable = context.getResources().getDrawable(R.drawable.input_schedule).mutate(); - } else { - sendButtonDrawable = context.getResources().getDrawable(R.drawable.ic_send).mutate(); - sendButtonInverseDrawable = context.getResources().getDrawable(R.drawable.ic_send).mutate(); - inactinveSendButtonDrawable = context.getResources().getDrawable(R.drawable.ic_send).mutate(); + public int getBackgroundTop() { + int t = getTop(); + if (topView != null && topView.getVisibility() == View.VISIBLE) { + t += topView.getLayoutParams().height; } - sendButton = new View(context) { - - private int drawableColor; - private float animationProgress; - private float animateBounce; - private long lastAnimationTime; - private float animationDuration; - private int prevColorType; - - @Override - protected void onDraw(Canvas canvas) { - int x = (getMeasuredWidth() - sendButtonDrawable.getIntrinsicWidth()) / 2; - int y = (getMeasuredHeight() - sendButtonDrawable.getIntrinsicHeight()) / 2; - if (isInScheduleMode()) { - y -= AndroidUtilities.dp(1); - } else { - x += AndroidUtilities.dp(2); - } - - int color; - boolean showingPopup; - int colorType; - if (showingPopup = (sendPopupWindow != null && sendPopupWindow.isShowing())) { - color = getThemedColor(Theme.key_chat_messagePanelVoicePressed); - colorType = 1; - } else { - color = getThemedColor(Theme.key_chat_messagePanelSend); - colorType = 2; - } - if (color != drawableColor) { - lastAnimationTime = SystemClock.elapsedRealtime(); - if (prevColorType != 0 && prevColorType != colorType) { - animationProgress = 0.0f; - if (showingPopup) { - animationDuration = 200.0f; - } else { - animationDuration = 120.0f; - } - } else { - animationProgress = 1.0f; - } - prevColorType = colorType; - drawableColor = color; - sendButtonDrawable.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_messagePanelSend), PorterDuff.Mode.SRC_IN)); - int c = getThemedColor(Theme.key_chat_messagePanelIcons); - inactinveSendButtonDrawable.setColorFilter(new PorterDuffColorFilter(Color.argb(0xb4, Color.red(c), Color.green(c), Color.blue(c)), PorterDuff.Mode.SRC_IN)); - sendButtonInverseDrawable.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_messagePanelVoicePressed), PorterDuff.Mode.SRC_IN)); - } - if (animationProgress < 1.0f) { - long newTime = SystemClock.elapsedRealtime(); - long dt = newTime - lastAnimationTime; - animationProgress += dt / animationDuration; - if (animationProgress > 1.0f) { - animationProgress = 1.0f; - } - lastAnimationTime = newTime; - invalidate(); - } - if (!showingPopup) { - if (slowModeTimer == Integer.MAX_VALUE && !isInScheduleMode()) { - inactinveSendButtonDrawable.setBounds(x, y, x + sendButtonDrawable.getIntrinsicWidth(), y + sendButtonDrawable.getIntrinsicHeight()); - inactinveSendButtonDrawable.draw(canvas); - } else { - sendButtonDrawable.setBounds(x, y, x + sendButtonDrawable.getIntrinsicWidth(), y + sendButtonDrawable.getIntrinsicHeight()); - sendButtonDrawable.draw(canvas); - } - } - if (showingPopup || animationProgress != 1.0f) { - Theme.dialogs_onlineCirclePaint.setColor(getThemedColor(Theme.key_chat_messagePanelSend)); - int rad = AndroidUtilities.dp(20); - if (showingPopup) { - sendButtonInverseDrawable.setAlpha(255); - float p = animationProgress; - if (p <= 0.25f) { - float progress = p / 0.25f; - rad += AndroidUtilities.dp(2) * CubicBezierInterpolator.EASE_IN.getInterpolation(progress); - } else { - p -= 0.25f; - if (p <= 0.5f) { - float progress = p / 0.5f; - rad += AndroidUtilities.dp(2) - AndroidUtilities.dp(3) * CubicBezierInterpolator.EASE_IN.getInterpolation(progress); - } else { - p -= 0.5f; - float progress = p / 0.25f; - rad += -AndroidUtilities.dp(1) + AndroidUtilities.dp(1) * CubicBezierInterpolator.EASE_IN.getInterpolation(progress); - } - } - } else { - int alpha = (int) (255 * (1.0f - animationProgress)); - Theme.dialogs_onlineCirclePaint.setAlpha(alpha); - sendButtonInverseDrawable.setAlpha(alpha); - } - canvas.drawCircle(getMeasuredWidth() / 2, getMeasuredHeight() / 2, rad, Theme.dialogs_onlineCirclePaint); - sendButtonInverseDrawable.setBounds(x, y, x + sendButtonDrawable.getIntrinsicWidth(), y + sendButtonDrawable.getIntrinsicHeight()); - sendButtonInverseDrawable.draw(canvas); - } - } + return t; + } - @Override - public boolean onTouchEvent(MotionEvent event) { - if (getAlpha() <= 0f) { // for accessibility - return false; + @Override + protected boolean drawChild(Canvas canvas, View child, long drawingTime) { + boolean clip = child == topView || child == textFieldContainer; + if (clip) { + canvas.save(); + if (child == textFieldContainer) { + int top = (int) (animatedTop + AndroidUtilities.dp(2) + chatSearchExpandOffset); + if (topView != null && topView.getVisibility() == View.VISIBLE) { + top += topView.getHeight(); } - return super.onTouchEvent(event); + canvas.clipRect(0, top, getMeasuredWidth(), getMeasuredHeight()); + } else { + canvas.clipRect(0, animatedTop, getMeasuredWidth(), animatedTop + child.getLayoutParams().height + AndroidUtilities.dp(2)); } - }; - sendButton.setVisibility(INVISIBLE); - int color = getThemedColor(Theme.key_chat_messagePanelSend); - sendButton.setContentDescription(LocaleController.getString("Send", R.string.Send)); - sendButton.setSoundEffectsEnabled(false); - sendButton.setScaleX(0.1f); - sendButton.setScaleY(0.1f); - sendButton.setAlpha(0.0f); - if (Build.VERSION.SDK_INT >= 21) { - sendButton.setBackgroundDrawable(Theme.createSelectorDrawable(Color.argb(24, Color.red(color), Color.green(color), Color.blue(color)), 1)); } - sendButtonContainer.addView(sendButton, LayoutHelper.createFrame(48, 48)); - sendButton.setOnClickListener(view -> { - if ((sendPopupWindow != null && sendPopupWindow.isShowing()) || (runningAnimationAudio != null && runningAnimationAudio.isRunning()) || moveToSendStateRunnable != null) { - return; + boolean result = super.drawChild(canvas, child, drawingTime); + if (clip) { + canvas.restore(); + } + return result; + } + + public boolean allowBlur = true; + Paint backgroundPaint = new Paint(); + private float composeShadowAlpha = 1f; + + @Override + protected void onDraw(Canvas canvas) { + int top = animatedTop; + top += Theme.chat_composeShadowDrawable.getIntrinsicHeight() * (1f - composeShadowAlpha); + if (topView != null && topView.getVisibility() == View.VISIBLE) { + top += (1f - topViewEnterProgress) * topView.getLayoutParams().height; + } + int bottom = top + Theme.chat_composeShadowDrawable.getIntrinsicHeight(); + + Theme.chat_composeShadowDrawable.setAlpha((int) (composeShadowAlpha * 0xFF)); + Theme.chat_composeShadowDrawable.setBounds(0, top, getMeasuredWidth(), bottom); + Theme.chat_composeShadowDrawable.draw(canvas); + bottom += chatSearchExpandOffset; + if (allowBlur) { + backgroundPaint.setColor(getThemedColor(Theme.key_chat_messagePanelBackground)); + if (SharedConfig.chatBlurEnabled() && sizeNotifierLayout != null) { + AndroidUtilities.rectTmp2.set(0, bottom, getWidth(), getHeight()); + sizeNotifierLayout.drawBlurRect(canvas, getTop(), AndroidUtilities.rectTmp2, backgroundPaint, false); + } else { + canvas.drawRect(0, bottom, getWidth(), getHeight(), backgroundPaint); } - sendMessage(); - }); - sendButton.setOnLongClickListener(this::onSendLongClick); + } else { + canvas.drawRect(0, bottom, getWidth(), getHeight(), getThemedPaint(Theme.key_paint_chatComposeBackground)); + } + } - slowModeButton = new SimpleTextView(context); - slowModeButton.setTextSize(18); - slowModeButton.setVisibility(INVISIBLE); - slowModeButton.setSoundEffectsEnabled(false); - slowModeButton.setScaleX(0.1f); - slowModeButton.setScaleY(0.1f); - slowModeButton.setAlpha(0.0f); - slowModeButton.setPadding(0, 0, AndroidUtilities.dp(13), 0); - slowModeButton.setGravity(Gravity.RIGHT | Gravity.CENTER_VERTICAL); - slowModeButton.setTextColor(getThemedColor(Theme.key_chat_messagePanelIcons)); - sendButtonContainer.addView(slowModeButton, LayoutHelper.createFrame(64, 48, Gravity.RIGHT | Gravity.TOP)); - slowModeButton.setOnClickListener(v -> { - if (delegate != null) { - delegate.onUpdateSlowModeButton(slowModeButton, true, slowModeButton.getText()); + @Override + public boolean hasOverlappingRendering() { + return false; + } + + private boolean isInInput; + private ActionBarPopupWindow menuPopupWindow; + + private boolean checkMenuPermissions() { + if (Build.VERSION.SDK_INT < 23) { + return true; + } + if (isInVideoMode()) { + boolean hasAudio = parentActivity.checkSelfPermission(Manifest.permission.RECORD_AUDIO) == PackageManager.PERMISSION_GRANTED; + boolean hasVideo = parentActivity.checkSelfPermission(Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED; + if (!hasAudio || !hasVideo) { + String[] permissions = new String[!hasAudio && !hasVideo ? 2 : 1]; + if (!hasAudio && !hasVideo) { + permissions[0] = Manifest.permission.RECORD_AUDIO; + permissions[1] = Manifest.permission.CAMERA; + } else if (!hasAudio) { + permissions[0] = Manifest.permission.RECORD_AUDIO; + } else { + permissions[0] = Manifest.permission.CAMERA; + } + parentActivity.requestPermissions(permissions, BasePermissionsActivity.REQUEST_CODE_VIDEO_MESSAGE); + return false; } - }); - slowModeButton.setOnLongClickListener(v -> { - if (messageEditText.length() == 0) { + } else { + if (parentActivity.checkSelfPermission(Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) { + parentActivity.requestPermissions(new String[]{Manifest.permission.RECORD_AUDIO}, 3); return false; } - return onSendLongClick(v); - }); + } + return true; + } - expandStickersButton = new ImageView(context) { - @Override - public boolean onTouchEvent(MotionEvent event) { - if (getAlpha() <= 0f) { // for accessibility - return false; - } - return super.onTouchEvent(event); - } - }; - expandStickersButton.setScaleType(ImageView.ScaleType.CENTER); - expandStickersButton.setImageDrawable(stickersArrow = new AnimatedArrowDrawable(getThemedColor(Theme.key_chat_messagePanelIcons), false)); - expandStickersButton.setVisibility(GONE); - expandStickersButton.setScaleX(0.1f); - expandStickersButton.setScaleY(0.1f); - expandStickersButton.setAlpha(0.0f); - if (Build.VERSION.SDK_INT >= 21) { - expandStickersButton.setBackgroundDrawable(Theme.createSelectorDrawable(getThemedColor(Theme.key_listSelector))); + private void onMenuClick(View view) { + if (parentFragment == null) { + return; } - sendButtonContainer.addView(expandStickersButton, LayoutHelper.createFrame(48, 48)); - expandStickersButton.setOnClickListener(v -> { - if (expandStickersButton.getVisibility() != VISIBLE || expandStickersButton.getAlpha() != 1.0f || waitingForKeyboardOpen || (keyboardVisible && messageEditText.isFocused())) { - return; - } - if (stickersExpanded) { - if (searchingType != 0) { - setSearchingTypeInternal(0, true); - emojiView.closeSearch(true); - emojiView.hideSearchKeyboard(); - if (emojiTabOpen) { - checkSendButton(true); - } - } else if (!stickersDragging) { - if (emojiView != null) { - emojiView.showSearchField(false); + ActionBarPopupWindow.ActionBarPopupWindowLayout menuPopupLayout = new ActionBarPopupWindow.ActionBarPopupWindowLayout(parentActivity); + + + menuPopupLayout.setAnimationEnabled(false); + menuPopupLayout.setOnTouchListener(new OnTouchListener() { + + private android.graphics.Rect popupRect = new android.graphics.Rect(); + + @Override + public boolean onTouch(View v, MotionEvent event) { + if (event.getActionMasked() == MotionEvent.ACTION_DOWN) { + if (menuPopupWindow != null && menuPopupWindow.isShowing()) { + v.getHitRect(popupRect); + if (!popupRect.contains((int) event.getX(), (int) event.getY())) { + menuPopupWindow.dismiss(); + } } } - } else if (!stickersDragging) { - emojiView.showSearchField(true); - } - if (!stickersDragging) { - setStickersExpanded(!stickersExpanded, true, false); + return false; } }); - expandStickersButton.setContentDescription(LocaleController.getString("AccDescrExpandPanel", R.string.AccDescrExpandPanel)); - doneButtonContainer = new FrameLayout(context); - doneButtonContainer.setVisibility(GONE); - textFieldContainer.addView(doneButtonContainer, LayoutHelper.createFrame(48, 48, Gravity.BOTTOM | Gravity.RIGHT)); - doneButtonContainer.setOnClickListener(view -> doneEditingMessage()); - doneButtonContainer.setOnLongClickListener(view -> { - if (messageEditText.getText().length() == 0 || !containsMarkdown(messageEditText.getText())) { - return false; - } - sendPopupLayout = new ActionBarPopupWindow.ActionBarPopupWindowLayout(parentActivity, resourcesProvider); - sendPopupLayout.setAnimationEnabled(false); - sendPopupLayout.setOnTouchListener(new OnTouchListener() { - private final android.graphics.Rect popupRect = new android.graphics.Rect(); + int a = 0; - @Override - public boolean onTouch(View v, MotionEvent event) { - if (event.getActionMasked() == MotionEvent.ACTION_DOWN) { - if (sendPopupWindow != null && sendPopupWindow.isShowing()) { - v.getHitRect(popupRect); - if (!popupRect.contains((int) event.getX(), (int) event.getY())) { - sendPopupWindow.dismiss(); - sendPopupLayout = null; - } - } - } - return false; + ActionBarMenuSubItem cell = new ActionBarMenuSubItem(getContext(), true, false); + int dlps = delegate.getDisableLinkPreviewStatus(); + + if (!isInInput) { + + cell.setTextAndIcon(LocaleController.getString("ChatAttachEnterMenuRecordAudio", R.string.ChatAttachEnterMenuRecordAudio), R.drawable.input_mic); + cell.setOnClickListener(v -> { + if (menuPopupWindow != null && menuPopupWindow.isShowing()) { + menuPopupWindow.dismiss(); } - }); - sendPopupLayout.setDispatchKeyEventListener(keyEvent -> { - if (keyEvent.getKeyCode() == KeyEvent.KEYCODE_BACK && keyEvent.getRepeatCount() == 0 && sendPopupWindow != null && sendPopupWindow.isShowing()) { - sendPopupWindow.dismiss(); + + if (parentFragment != null) { + TLRPC.Chat chat = parentFragment.getCurrentChat(); + if (chat != null && !ChatObject.canSendVoice(chat)) { + delegate.needShowMediaBanHint(); + return; + } } - }); - sendPopupLayout.setShownFromBottom(false); - ActionBarMenuSubItem sendWithoutMarkdownButton = new ActionBarMenuSubItem(getContext(), false, false, resourcesProvider); - sendWithoutMarkdownButton.setTextAndIcon(LocaleController.getString("SaveWithoutMarkdown", R.string.SaveWithoutMarkdown), R.drawable.round_code_off_white); - sendWithoutMarkdownButton.setMinimumWidth(AndroidUtilities.dp(196)); - sendWithoutMarkdownButton.setOnClickListener(v -> { - if (sendPopupWindow != null && sendPopupWindow.isShowing()) { - sendPopupWindow.dismiss(); + + isInVideoMode = false; + if (checkMenuPermissions()) { + recordAudioVideoRunnable.run(); + delegate.onSwitchRecordMode(isInVideoMode); + setRecordVideoButtonVisible(isInVideoMode, true); + if (!NekoConfig.disableVibration.Bool()) { + performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP); + } + recordCircle.sendButtonVisible = true; + startLockTransition(false); } - doneEditingMessage(false); - sendPopupLayout = null; }); - sendPopupLayout.addView(sendWithoutMarkdownButton, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, 48)); - sendPopupLayout.setupRadialSelectors(getThemedColor(Theme.key_dialogButtonSelector)); - sendPopupWindow = new ActionBarPopupWindow(sendPopupLayout, LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT) { - @Override - public void dismiss() { - super.dismiss(); - doneButtonContainer.invalidate(); - sendPopupLayout = null; - } - }; - sendPopupWindow.setAnimationEnabled(false); - sendPopupWindow.setAnimationStyle(R.style.PopupContextAnimation2); - sendPopupWindow.setOutsideTouchable(true); - sendPopupWindow.setClippingEnabled(true); - sendPopupWindow.setInputMethodMode(ActionBarPopupWindow.INPUT_METHOD_NOT_NEEDED); - sendPopupWindow.setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_UNSPECIFIED); - sendPopupWindow.getContentView().setFocusableInTouchMode(true); - sendPopupLayout.measure(MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(1000), MeasureSpec.AT_MOST), MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(1000), MeasureSpec.AT_MOST)); - sendPopupWindow.setFocusable(true); - view.getLocationInWindow(location); - int y; - if (keyboardVisible && ChatActivityEnterView.this.getMeasuredHeight() > AndroidUtilities.dp(topView != null && topView.getVisibility() == VISIBLE ? 48 + 58 : 58)) { - y = location[1] + view.getMeasuredHeight(); - } else { - y = location[1] - sendPopupLayout.getMeasuredHeight() - AndroidUtilities.dp(2); - } - sendPopupWindow.showAtLocation(view, Gravity.LEFT | Gravity.TOP, location[0] + view.getMeasuredWidth() - sendPopupLayout.getMeasuredWidth() + AndroidUtilities.dp(8), y); - sendPopupWindow.dimBehind(); - doneButtonContainer.invalidate(); - if (!NekoConfig.disableVibration.Bool()) { - try { - view.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); - } catch (Exception ignore) { - } - } - return false; - }); - Drawable doneCircleDrawable = Theme.createCircleDrawable(AndroidUtilities.dp(16), getThemedColor(Theme.key_chat_messagePanelSend)); - doneCheckDrawable = context.getResources().getDrawable(R.drawable.input_done).mutate(); - doneCheckDrawable.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_messagePanelVoicePressed), PorterDuff.Mode.SRC_IN)); - CombinedDrawable combinedDrawable = new CombinedDrawable(doneCircleDrawable, doneCheckDrawable, 0, AndroidUtilities.dp(1)); - combinedDrawable.setCustomSize(AndroidUtilities.dp(32), AndroidUtilities.dp(32)); + cell.setMinimumWidth(AndroidUtilities.dp(196)); + menuPopupLayout.addView(cell, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 48, LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT, 0, 48 * a++, 0, 0)); + + if (SharedConfig.inappCamera) { + + cell = new ActionBarMenuSubItem(getContext(), false, dlps == 0); + + cell.setTextAndIcon(LocaleController.getString("ChatAttachEnterMenuRecordVideo", R.string.ChatAttachEnterMenuRecordVideo), R.drawable.input_video); + cell.setOnClickListener(v -> { + if (menuPopupWindow != null && menuPopupWindow.isShowing()) { + menuPopupWindow.dismiss(); + } + + if (parentFragment != null) { + TLRPC.Chat chat = parentFragment.getCurrentChat(); + if (chat != null && !ChatObject.canSendRoundVideo(chat)) { + delegate.needShowMediaBanHint(); + return; + } + } + + isInVideoMode = true; + if (checkMenuPermissions()) { + recordAudioVideoRunnable.run(); + delegate.onSwitchRecordMode(isInVideoMode); + setRecordVideoButtonVisible(isInVideoMode, true); + if (!NekoConfig.disableVibration.Bool()) { + performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP); + } + recordCircle.sendButtonVisible = true; + startLockTransition(false); + } + }); - doneButtonImage = new ImageView(context); - doneButtonImage.setScaleType(ImageView.ScaleType.CENTER); - doneButtonImage.setImageDrawable(combinedDrawable); - doneButtonImage.setContentDescription(LocaleController.getString("Done", R.string.Done)); - doneButtonContainer.addView(doneButtonImage, LayoutHelper.createFrame(48, 48)); + cell.setMinimumWidth(AndroidUtilities.dp(196)); + menuPopupLayout.addView(cell, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 48, LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT, 0, 48 * a++, 0, 0)); - doneButtonProgress = new ContextProgressView(context, 0); - doneButtonProgress.setVisibility(View.INVISIBLE); - doneButtonContainer.addView(doneButtonProgress, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); + } - SharedPreferences sharedPreferences = MessagesController.getGlobalEmojiSettings(); - keyboardHeight = sharedPreferences.getInt("kbd_height", AndroidUtilities.dp(200)); - keyboardHeightLand = sharedPreferences.getInt("kbd_height_land3", AndroidUtilities.dp(200)); + } else { - setRecordVideoButtonVisible(false, false); - checkSendButton(false); - checkChannelRights(); + if (StrUtil.isNotBlank(NekoConfig.openPGPApp.String())) { - botWebViewButton = new ChatActivityBotWebViewButton(context); - botWebViewButton.setVisibility(GONE); - botWebViewButton.setBotMenuButton(botCommandsMenuButton); - frameLayout.addView(botWebViewButton, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.BOTTOM)); - } + cell.setTextAndIcon(LocaleController.getString("Sign", R.string.Sign), R.drawable.baseline_vpn_key_24); + cell.setOnClickListener(v -> { + if (menuPopupWindow != null && menuPopupWindow.isShowing()) { + menuPopupWindow.dismiss(); + } + signComment(true); + }); + cell.setOnLongClickListener(v -> { + if (menuPopupWindow != null && menuPopupWindow.isShowing()) { + menuPopupWindow.dismiss(); + } + signComment(false); + return true; + }); + cell.setMinimumWidth(AndroidUtilities.dp(196)); + menuPopupLayout.addView(cell, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 48, LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT, 0, 48 * a++, 0, 0)); - private void openWebViewMenu() { - Runnable onRequestWebView = () -> { - AndroidUtilities.hideKeyboard(this); - if (AndroidUtilities.isTablet()) { - BotWebViewSheet webViewSheet = new BotWebViewSheet(getContext(), parentFragment.getResourceProvider()); - webViewSheet.setParentActivity(parentActivity); - webViewSheet.requestWebView(currentAccount, dialog_id, dialog_id, botMenuWebViewTitle, botMenuWebViewUrl, BotWebViewSheet.TYPE_BOT_MENU_BUTTON, 0, false); - webViewSheet.show(); + cell = new ActionBarMenuSubItem(getContext(), false, false); + + } - botCommandsMenuButton.setOpened(false); + TLRPC.Chat chat = parentFragment.getCurrentChat(); + TLRPC.User user = parentFragment.getCurrentUser(); + + long chatId; + if (chat != null) { + chatId = chat.id; + } else if (user != null) { + chatId = user.id; } else { - botWebViewMenuContainer.show(currentAccount, dialog_id, botMenuWebViewUrl); + chatId = -1; } - }; - if (SharedPrefsHelper.isWebViewConfirmShown(currentAccount, dialog_id)) { - onRequestWebView.run(); - } else { - new AlertDialog.Builder(parentFragment.getParentActivity()) - .setTitle(LocaleController.getString(R.string.BotOpenPageTitle)) - .setMessage(AndroidUtilities.replaceTags(LocaleController.formatString(R.string.BotOpenPageMessage, UserObject.getUserName(MessagesController.getInstance(currentAccount).getUser(dialog_id))))) - .setPositiveButton(LocaleController.getString(R.string.OK), (dialog, which) -> { - onRequestWebView.run(); - SharedPrefsHelper.setWebViewConfirmShown(currentAccount, dialog_id, true); - }) - .setNegativeButton(LocaleController.getString(R.string.Cancel), null) - .setOnDismissListener(dialog -> { - if (!SharedPrefsHelper.isWebViewConfirmShown(currentAccount, dialog_id)) { - botCommandsMenuButton.setOpened(false); + cell.setTextAndIcon(LocaleController.getString("Translate", R.string.Translate), R.drawable.ic_translate); + cell.setOnClickListener(v -> { + if (menuPopupWindow != null && menuPopupWindow.isShowing()) { + menuPopupWindow.dismiss(); + } + translateComment(TranslateDb.getChatLanguage(chatId, TranslatorKt.getCode2Locale(NekoConfig.translateInputLang.String()))); + }); + ActionBarMenuSubItem finalCell = cell; + cell.setOnLongClickListener(v -> { + Translator.showTargetLangSelect(finalCell, true, (locale) -> { + if (menuPopupWindow != null && menuPopupWindow.isShowing()) { + menuPopupWindow.dismiss(); + } + translateComment(locale); + TranslateDb.saveChatLanguage(chatId, locale); + return Unit.INSTANCE; + }); + return true; + }); + cell.setMinimumWidth(AndroidUtilities.dp(196)); + menuPopupLayout.addView(cell, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 48, LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT, 0, 48 * a++, 0, 0)); + cell = new ActionBarMenuSubItem(getContext(), false, dlps == 0); + + cell.setTextAndIcon(LocaleController.getString("Translate", R.string.OpenCC), R.drawable.ic_translate); + ActionBarMenuSubItem finalCell1 = cell; + cell.setOnClickListener(v -> { + String ccTarget = TranslateDb.getChatCCTarget(chatId, NekoConfig.ccInputLang.String()); + if (ccTarget == null || StringsKt.isBlank(ccTarget)) { + Translator.showCCTargetSelect(finalCell1, (target) -> { + if (menuPopupWindow != null && menuPopupWindow.isShowing()) { + menuPopupWindow.dismiss(); } - }) - .show(); - } - } + ccComment(target); + TranslateDb.saveChatCCTarget(chatId, target); + return Unit.INSTANCE; + }); + return; + } + if (menuPopupWindow != null && menuPopupWindow.isShowing()) { + menuPopupWindow.dismiss(); + } + ccComment(ccTarget); + }); + cell.setOnLongClickListener(v -> { + Translator.showCCTargetSelect(finalCell1, (target) -> { + if (menuPopupWindow != null && menuPopupWindow.isShowing()) { + menuPopupWindow.dismiss(); + } + ccComment(target); + TranslateDb.saveChatCCTarget(chatId, target); + return Unit.INSTANCE; + }); + return true; + }); + cell.setMinimumWidth(AndroidUtilities.dp(196)); + menuPopupLayout.addView(cell, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 48, LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT, 0, 48 * a++, 0, 0)); + cell = new ActionBarMenuSubItem(getContext(), false, dlps == 0); - public void setBotWebViewButtonOffsetX(float offset) { - emojiButton.setTranslationX(offset); - messageEditText.setTranslationX(offset); - attachButton.setTranslationX(offset); - audioVideoSendButton.setTranslationX(offset); - if (botButton != null) { - botButton.setTranslationX(offset); - } - } - public void setComposeShadowAlpha(float alpha) { - composeShadowAlpha = alpha; - invalidate(); - } + cell.setTextAndIcon(LocaleController.getString("ReplaceText", R.string.ReplaceText), R.drawable.msg_edit); + cell.setOnClickListener(v -> { + if (menuPopupWindow != null && menuPopupWindow.isShowing()) { + menuPopupWindow.dismiss(); + } + showReplace(); + }); + cell.setMinimumWidth(AndroidUtilities.dp(196)); + menuPopupLayout.addView(cell, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 48, LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT, 0, 48 * a++, 0, 0)); - public ChatActivityBotWebViewButton getBotWebViewButton() { - return botWebViewButton; - } + } - public ChatActivity getParentFragment() { - return parentFragment; - } + if (dlps > 0) { + cell = new ActionBarMenuSubItem(getContext(), false, true); - private void checkBotMenu() { - if (botCommandsMenuButton != null) { - boolean wasExpanded = botCommandsMenuButton.expanded; - botCommandsMenuButton.setExpanded(TextUtils.isEmpty(messageEditText.getText()) && !(keyboardVisible || waitingForKeyboardOpen || isPopupShowing()), true); - if (wasExpanded != botCommandsMenuButton.expanded) { - beginDelayedTransition(); - } - } - } + cell.setTextAndIcon(dlps != 1 ? + LocaleController.getString("ChatAttachEnterMenuEnableLinkPreview", R.string.ChatAttachEnterMenuEnableLinkPreview) : + LocaleController.getString("ChatAttachEnterMenuDisableLinkPreview", R.string.ChatAttachEnterMenuDisableLinkPreview), R.drawable.msg_link); - public void forceSmoothKeyboard(boolean smoothKeyboard) { - this.smoothKeyboard = smoothKeyboard && SharedConfig.smoothKeyboard && !AndroidUtilities.isInMultiwindow && (parentFragment == null || !parentFragment.isInBubbleMode()); - } + ActionBarMenuSubItem finalCell = cell; + cell.setOnClickListener(v -> { + if (menuPopupWindow != null && menuPopupWindow.isShowing()) { + menuPopupWindow.dismiss(); + } - protected void onLineCountChanged(int oldLineCount, int newLineCount) { + delegate.toggleDisableLinkPreview(); + messageWebPageSearch = delegate.getDisableLinkPreviewStatus() == 1; - } + finalCell.setTextAndIcon(delegate.getDisableLinkPreviewStatus() != 1 ? + LocaleController.getString("ChatAttachEnterMenuEnableLinkPreview", R.string.ChatAttachEnterMenuEnableLinkPreview) : + LocaleController.getString("ChatAttachEnterMenuDisableLinkPreview", R.string.ChatAttachEnterMenuDisableLinkPreview), R.drawable.msg_link); - private void startLockTransition() { - startLockTransition(true); - } + }); - private void startLockTransition(boolean animate) { - AnimatorSet animatorSet = new AnimatorSet(); - if (!NekoConfig.disableVibration.Bool() && animate) { - performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + cell.setMinimumWidth(AndroidUtilities.dp(196)); + menuPopupLayout.addView(cell, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 48, LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT, 0, 48 * a++, 0, 0)); } - ObjectAnimator translate = ObjectAnimator.ofFloat(recordCircle, "lockAnimatedTranslation", recordCircle.startTranslation); - translate.setStartDelay(animate ? 100 : 1); - translate.setDuration(animate ? 350 : 1); + menuPopupLayout.setupRadialSelectors(Theme.getColor(Theme.key_dialogButtonSelector)); - ObjectAnimator snap = ObjectAnimator.ofFloat(recordCircle, "snapAnimationProgress", 1f); - snap.setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); - snap.setDuration(animate ? 250 : 1); + menuPopupWindow = new ActionBarPopupWindow(menuPopupLayout, LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT); + menuPopupWindow.setAnimationEnabled(false); + menuPopupWindow.setAnimationStyle(R.style.PopupContextAnimation2); + menuPopupWindow.setOutsideTouchable(true); + menuPopupWindow.setClippingEnabled(true); + menuPopupWindow.setInputMethodMode(ActionBarPopupWindow.INPUT_METHOD_NOT_NEEDED); + menuPopupWindow.setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_UNSPECIFIED); + menuPopupWindow.getContentView().setFocusableInTouchMode(true); + if (delegate != null) { + delegate.onSendLongClick(); + } - SharedConfig.removeLockRecordAudioVideoHint(); - animatorSet.playTogether( - snap, - translate, - ObjectAnimator.ofFloat(recordCircle, "slideToCancelProgress", 1f).setDuration(animate ? 200 : 1), - ObjectAnimator.ofFloat(slideText, "cancelToProgress", 1f) - ); + menuPopupLayout.measure(MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(1000), MeasureSpec.AT_MOST), MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(1000), MeasureSpec.AT_MOST)); + menuPopupWindow.setFocusable(true); + int[] location = new int[2]; + view.getLocationInWindow(location); + int y; + if (keyboardVisible && ChatActivityEnterView.this.getMeasuredHeight() > AndroidUtilities.dp(topView != null && topView.getVisibility() == VISIBLE ? 48 + 58 : 58)) { + y = location[1] + view.getMeasuredHeight(); + } else { + y = location[1] - menuPopupLayout.getMeasuredHeight() - AndroidUtilities.dp(2); + } + y += AndroidUtilities.dp(48); + int x = location[0] + view.getMeasuredWidth() - menuPopupLayout.getMeasuredWidth() + AndroidUtilities.dp(8); + if (isInInput) { + x += view.getMeasuredWidth(); + } + menuPopupWindow.showAtLocation(view, Gravity.LEFT | Gravity.TOP, x, y); + menuPopupWindow.dimBehind(); - animatorSet.start(); } - public int getBackgroundTop() { - int t = getTop(); - if (topView != null && topView.getVisibility() == View.VISIBLE) { - t += topView.getLayoutParams().height; + private boolean onSendLongClick(View view) { + if (isInScheduleMode()) { + return false; } - return t; - } - @Override - protected boolean drawChild(Canvas canvas, View child, long drawingTime) { - boolean clip = child == topView || child == textFieldContainer; - if (clip) { - canvas.save(); - if (child == textFieldContainer) { - int top = (int) (animatedTop + AndroidUtilities.dp(2) + chatSearchExpandOffset); - if (topView != null && topView.getVisibility() == View.VISIBLE) { - top += topView.getHeight(); + boolean self = parentFragment != null && UserObject.isUserSelf(parentFragment.getCurrentUser()); + + if (sendPopupLayout == null) { + sendPopupLayout = new ActionBarPopupWindow.ActionBarPopupWindowLayout(parentActivity, resourcesProvider); + sendPopupLayout.setAnimationEnabled(false); + sendPopupLayout.setOnTouchListener(new OnTouchListener() { + + private android.graphics.Rect popupRect = new android.graphics.Rect(); + + @Override + public boolean onTouch(View v, MotionEvent event) { + if (event.getActionMasked() == MotionEvent.ACTION_DOWN) { + if (sendPopupWindow != null && sendPopupWindow.isShowing()) { + v.getHitRect(popupRect); + if (!popupRect.contains((int) event.getX(), (int) event.getY())) { + sendPopupWindow.dismiss(); + } + } + } + return false; + } + }); + sendPopupLayout.setDispatchKeyEventListener(keyEvent -> { + if (keyEvent.getKeyCode() == KeyEvent.KEYCODE_BACK && keyEvent.getRepeatCount() == 0 && sendPopupWindow != null && sendPopupWindow.isShowing()) { + sendPopupWindow.dismiss(); + } + }); + sendPopupLayout.setShownFromBottom(false); + + boolean scheduleButtonValue = parentFragment != null && parentFragment.canScheduleMessage(); + boolean sendWithoutSoundButtonValue = !(self || slowModeTimer > 0 && !isInScheduleMode()); + if (scheduleButtonValue) { + ActionBarMenuSubItem scheduleButton = new ActionBarMenuSubItem(getContext(), true, !sendWithoutSoundButtonValue, resourcesProvider); + if (self) { + scheduleButton.setTextAndIcon(LocaleController.getString("SetReminder", R.string.SetReminder), R.drawable.baseline_date_range_24); + } else { + scheduleButton.setTextAndIcon(LocaleController.getString("ScheduleMessage", R.string.ScheduleMessage), R.drawable.baseline_date_range_24); + } + scheduleButton.setMinimumWidth(AndroidUtilities.dp(196)); + scheduleButton.setOnClickListener(v -> { + if (sendPopupWindow != null && sendPopupWindow.isShowing()) { + sendPopupWindow.dismiss(); + } + AlertsCreator.createScheduleDatePickerDialog(parentActivity, parentFragment.getDialogId(), this::sendMessageInternal, resourcesProvider); + }); + sendPopupLayout.addView(scheduleButton, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, 48)); + } + if (sendWithoutSoundButtonValue) { + ActionBarMenuSubItem sendWithoutSoundButton = new ActionBarMenuSubItem(getContext(), !scheduleButtonValue, true, resourcesProvider); + sendWithoutSoundButton.setTextAndIcon(LocaleController.getString("SendWithoutSound", R.string.SendWithoutSound), R.drawable.input_notify_off); + sendWithoutSoundButton.setMinimumWidth(AndroidUtilities.dp(196)); + sendWithoutSoundButton.setOnClickListener(v -> { + if (sendPopupWindow != null && sendPopupWindow.isShowing()) { + sendPopupWindow.dismiss(); + } + sendMessageInternal(false, 0); + }); + sendPopupLayout.addView(sendWithoutSoundButton, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, 48)); + } + if (sendWithoutSoundButtonValue) { + if (containsMarkdown(messageEditText.getText())) { + ActionBarMenuSubItem sendWithoutMarkdownButton = new ActionBarMenuSubItem(getContext(), false, false, resourcesProvider); + sendWithoutMarkdownButton.setTextAndIcon(LocaleController.getString("SendWithoutMarkdown", R.string.SendWithoutMarkdown), R.drawable.round_code_off_white); + sendWithoutMarkdownButton.setMinimumWidth(AndroidUtilities.dp(196)); + sendWithoutMarkdownButton.setOnClickListener(v -> { + if (sendPopupWindow != null && sendPopupWindow.isShowing()) { + sendPopupWindow.dismiss(); + } + sendMessageInternal(true, 0, false, true); + }); + sendPopupLayout.addView(sendWithoutMarkdownButton, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, 48)); + } else if (canSendAsDice(messageEditText.getText().toString(), parentFragment, dialog_id)) { + ActionBarMenuSubItem sendWithoutMarkdownButton = new ActionBarMenuSubItem(getContext(), false, false, resourcesProvider); + sendWithoutMarkdownButton.setTextAndIcon(LocaleController.getString("SendAsEmoji", R.string.SendAsEmoji), R.drawable.casino_icon); + sendWithoutMarkdownButton.setMinimumWidth(AndroidUtilities.dp(196)); + sendWithoutMarkdownButton.setOnClickListener(v -> { + if (sendPopupWindow != null && sendPopupWindow.isShowing()) { + sendPopupWindow.dismiss(); + } + sendMessageInternal(true, 0, true, false); + }); + sendPopupLayout.addView(sendWithoutMarkdownButton, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, 48)); } - canvas.clipRect(0, top, getMeasuredWidth(), getMeasuredHeight()); - } else { - canvas.clipRect(0, animatedTop, getMeasuredWidth(), animatedTop + child.getLayoutParams().height + AndroidUtilities.dp(2)); } - } - boolean result = super.drawChild(canvas, child, drawingTime); - if (clip) { - canvas.restore(); - } - return result; - } - public boolean allowBlur = true; - Paint backgroundPaint = new Paint(); - private float composeShadowAlpha = 1f; + sendPopupLayout.setupRadialSelectors(getThemedColor(Theme.key_dialogButtonSelector)); - @Override - protected void onDraw(Canvas canvas) { - int top = animatedTop; - top += Theme.chat_composeShadowDrawable.getIntrinsicHeight() * (1f - composeShadowAlpha); - if (topView != null && topView.getVisibility() == View.VISIBLE) { - top += (1f - topViewEnterProgress) * topView.getLayoutParams().height; - } - int bottom = top + Theme.chat_composeShadowDrawable.getIntrinsicHeight(); + sendPopupWindow = new ActionBarPopupWindow(sendPopupLayout, LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT) { + @Override + public void dismiss() { + super.dismiss(); + sendButton.invalidate(); + } + }; + sendPopupWindow.setAnimationEnabled(false); + sendPopupWindow.setAnimationStyle(R.style.PopupContextAnimation2); + sendPopupWindow.setOutsideTouchable(true); + sendPopupWindow.setClippingEnabled(true); + sendPopupWindow.setInputMethodMode(ActionBarPopupWindow.INPUT_METHOD_NOT_NEEDED); + sendPopupWindow.setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_UNSPECIFIED); + sendPopupWindow.getContentView().setFocusableInTouchMode(true); + SharedConfig.removeScheduledOrNoSoundHint(); - Theme.chat_composeShadowDrawable.setAlpha((int) (composeShadowAlpha * 0xFF)); - Theme.chat_composeShadowDrawable.setBounds(0, top, getMeasuredWidth(), bottom); - Theme.chat_composeShadowDrawable.draw(canvas); - bottom += chatSearchExpandOffset; - if (allowBlur) { - backgroundPaint.setColor(getThemedColor(Theme.key_chat_messagePanelBackground)); - if (SharedConfig.chatBlurEnabled() && sizeNotifierLayout != null) { - AndroidUtilities.rectTmp2.set(0, bottom, getWidth(), getHeight()); - sizeNotifierLayout.drawBlurRect(canvas, getTop(), AndroidUtilities.rectTmp2, backgroundPaint, false); - } else { - canvas.drawRect(0, bottom, getWidth(), getHeight(), backgroundPaint); + if (delegate != null) { + delegate.onSendLongClick(); } + } + + sendPopupLayout.measure(MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(1000), MeasureSpec.AT_MOST), MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(1000), MeasureSpec.AT_MOST)); + sendPopupWindow.setFocusable(true); + view.getLocationInWindow(location); + int y; + if (keyboardVisible && ChatActivityEnterView.this.getMeasuredHeight() > AndroidUtilities.dp(topView != null && topView.getVisibility() == VISIBLE ? 48 + 58 : 58)) { + y = location[1] + view.getMeasuredHeight(); } else { - canvas.drawRect(0, bottom, getWidth(), getHeight(), getThemedPaint(Theme.key_paint_chatComposeBackground)); + y = location[1] - sendPopupLayout.getMeasuredHeight() - AndroidUtilities.dp(2); + } + sendPopupWindow.showAtLocation(view, Gravity.LEFT | Gravity.TOP, location[0] + view.getMeasuredWidth() - sendPopupLayout.getMeasuredWidth() + AndroidUtilities.dp(8), y); + sendPopupWindow.dimBehind(); + sendButton.invalidate(); + if (!NekoConfig.disableVibration.Bool()) { + try { + view.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + } catch (Exception ignore) { + } } - } - @Override - public boolean hasOverlappingRendering() { return false; } - private boolean isInInput; - private ActionBarPopupWindow menuPopupWindow; - - private boolean checkMenuPermissions() { - if (Build.VERSION.SDK_INT < 23) { - return true; + private void createBotCommandsMenuContainer() { + if (botCommandsMenuContainer != null) { + return; } - if (isInVideoMode()) { - boolean hasAudio = parentActivity.checkSelfPermission(Manifest.permission.RECORD_AUDIO) == PackageManager.PERMISSION_GRANTED; - boolean hasVideo = parentActivity.checkSelfPermission(Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED; - if (!hasAudio || !hasVideo) { - String[] permissions = new String[!hasAudio && !hasVideo ? 2 : 1]; - if (!hasAudio && !hasVideo) { - permissions[0] = Manifest.permission.RECORD_AUDIO; - permissions[1] = Manifest.permission.CAMERA; - } else if (!hasAudio) { - permissions[0] = Manifest.permission.RECORD_AUDIO; - } else { - permissions[0] = Manifest.permission.CAMERA; + botCommandsMenuContainer = new BotCommandsMenuContainer(getContext()) { + @Override + protected void onDismiss() { + super.onDismiss(); + if (botCommandsMenuButton != null) { + botCommandsMenuButton.setOpened(false); } - parentActivity.requestPermissions(permissions, BasePermissionsActivity.REQUEST_CODE_VIDEO_MESSAGE); - return false; } - } else { - if (parentActivity.checkSelfPermission(Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) { - parentActivity.requestPermissions(new String[]{Manifest.permission.RECORD_AUDIO}, 3); + }; + botCommandsMenuContainer.listView.setLayoutManager(new LinearLayoutManager(getContext())); + botCommandsMenuContainer.listView.setAdapter(botCommandsAdapter = new BotCommandsMenuView.BotCommandsAdapter()); + botCommandsMenuContainer.listView.setOnItemClickListener(new RecyclerListView.OnItemClickListener() { + @Override + public void onItemClick(View view, int position) { + if (view instanceof BotCommandsMenuView.BotCommandView) { + String command = ((BotCommandsMenuView.BotCommandView) view).getCommand(); + if (TextUtils.isEmpty(command)) { + return; + } + if (isInScheduleMode()) { + AlertsCreator.createScheduleDatePickerDialog(parentActivity, dialog_id, (notify, scheduleDate) -> { + SendMessagesHelper.getInstance(currentAccount).sendMessage(command, dialog_id, replyingMessageObject, getThreadMessage(), null, false, null, null, null, notify, scheduleDate, null, false); + setFieldText(""); + botCommandsMenuContainer.dismiss(); + }, resourcesProvider); + } else { + if (parentFragment != null && parentFragment.checkSlowMode(view)) { + return; + } + SendMessagesHelper.getInstance(currentAccount).sendMessage(command, dialog_id, replyingMessageObject, getThreadMessage(), null, false, null, null, null, true, 0, null, false); + setFieldText(""); + botCommandsMenuContainer.dismiss(); + } + } + } + }); + botCommandsMenuContainer.listView.setOnItemLongClickListener(new RecyclerListView.OnItemLongClickListener() { + @Override + public boolean onItemClick(View view, int position) { + if (view instanceof BotCommandsMenuView.BotCommandView) { + String command = ((BotCommandsMenuView.BotCommandView) view).getCommand(); + setFieldText(command + " "); + botCommandsMenuContainer.dismiss(); + return true; + } return false; } + }); + botCommandsMenuContainer.setClipToPadding(false); + sizeNotifierLayout.addView(botCommandsMenuContainer, 14, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.BOTTOM)); + botCommandsMenuContainer.setVisibility(View.GONE); + + if (lastBotInfo != null) { + botCommandsAdapter.setBotInfo(lastBotInfo); } - return true; + updateBotCommandsMenuContainerTopPadding(); } - private void onMenuClick(View view) { - if (parentFragment == null) { + private void updateBotCommandsMenuContainerTopPadding() { + if (botCommandsMenuContainer == null) { return; } - ActionBarPopupWindow.ActionBarPopupWindowLayout menuPopupLayout = new ActionBarPopupWindow.ActionBarPopupWindowLayout(parentActivity); - - - menuPopupLayout.setAnimationEnabled(false); - menuPopupLayout.setOnTouchListener(new OnTouchListener() { - - private android.graphics.Rect popupRect = new android.graphics.Rect(); + int padding; + if (botCommandsAdapter.getItemCount() > 4) { + padding = Math.max(0, sizeNotifierLayout.getMeasuredHeight() - AndroidUtilities.dp(8 + 36 * 4.3f)); + } else { + padding = Math.max(0, sizeNotifierLayout.getMeasuredHeight() - AndroidUtilities.dp(8 + 36 * Math.max(1, Math.min(4, botCommandsAdapter.getItemCount())))); + } - @Override - public boolean onTouch(View v, MotionEvent event) { - if (event.getActionMasked() == MotionEvent.ACTION_DOWN) { - if (menuPopupWindow != null && menuPopupWindow.isShowing()) { - v.getHitRect(popupRect); - if (!popupRect.contains((int) event.getX(), (int) event.getY())) { - menuPopupWindow.dismiss(); - } + if (botCommandsMenuContainer.listView.getPaddingTop() != padding) { + botCommandsMenuContainer.listView.setTopGlowOffset(padding); + if (botCommandLastPosition == -1 && botCommandsMenuContainer.getVisibility() == View.VISIBLE && botCommandsMenuContainer.listView.getLayoutManager() != null) { + LinearLayoutManager layoutManager = (LinearLayoutManager) botCommandsMenuContainer.listView.getLayoutManager(); + int p = layoutManager.findFirstVisibleItemPosition(); + if (p >= 0) { + View view = layoutManager.findViewByPosition(p); + if (view != null) { + botCommandLastPosition = p; + botCommandLastTop = view.getTop() - botCommandsMenuContainer.listView.getPaddingTop(); } } - return false; } - }); - - int a = 0; - - ActionBarMenuSubItem cell = new ActionBarMenuSubItem(getContext(), true, false); - int dlps = delegate.getDisableLinkPreviewStatus(); - - if (!isInInput) { + botCommandsMenuContainer.listView.setPadding(0, padding, 0, AndroidUtilities.dp(8)); + } + } - cell.setTextAndIcon(LocaleController.getString("ChatAttachEnterMenuRecordAudio", R.string.ChatAttachEnterMenuRecordAudio), R.drawable.input_mic); - cell.setOnClickListener(v -> { - if (menuPopupWindow != null && menuPopupWindow.isShowing()) { - menuPopupWindow.dismiss(); + private void createBotWebViewMenuContainer() { + if (botWebViewMenuContainer != null) { + return; + } + botWebViewMenuContainer = new BotWebViewMenuContainer(getContext(), this) { + @Override + public void onDismiss() { + super.onDismiss(); + if (botCommandsMenuButton != null) { + botCommandsMenuButton.setOpened(false); } + } + }; + sizeNotifierLayout.addView(botWebViewMenuContainer, 15, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.BOTTOM)); + botWebViewMenuContainer.setVisibility(GONE); + botWebViewMenuContainer.setOnDismissGlobalListener(()->{ + if (botButtonsMessageObject != null && (messageEditText == null || TextUtils.isEmpty(messageEditText.getText())) && !botWebViewMenuContainer.hasSavedText()) { + showPopup(1, POPUP_CONTENT_BOT_KEYBOARD); + } + }); + } - if (parentFragment != null) { - TLRPC.Chat chat = parentFragment.getCurrentChat(); - if (chat != null && !ChatObject.canSendMedia(chat)) { - delegate.needShowMediaBanHint(); - return; - } - } + private ArrayList messageEditTextWatchers; + private boolean messageEditTextEnabled = true; - isInVideoMode = false; - if (checkMenuPermissions()) { - recordAudioVideoRunnable.run(); - delegate.onSwitchRecordMode(isInVideoMode); - setRecordVideoButtonVisible(isInVideoMode, true); - if (!NekoConfig.disableVibration.Bool()) { - performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP); - } - recordCircle.sendButtonVisible = true; - startLockTransition(false); - } - }); + private class ChatActivityEditTextCaption extends EditTextCaption { + public ChatActivityEditTextCaption(Context context, Theme.ResourcesProvider resourcesProvider) { + super(context, resourcesProvider); + } - cell.setMinimumWidth(AndroidUtilities.dp(196)); - menuPopupLayout.addView(cell, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 48, LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT, 0, 48 * a++, 0, 0)); + CanvasButton canvasButton; - if (SharedConfig.inappCamera) { + @Override + protected void onScrollChanged(int horiz, int vert, int oldHoriz, int oldVert) { + super.onScrollChanged(horiz, vert, oldHoriz, oldVert); + if (delegate != null) { + delegate.onEditTextScroll(); + } + } - cell = new ActionBarMenuSubItem(getContext(), false, dlps == 0); + @Override + protected void onContextMenuOpen() { + if (delegate != null) { + delegate.onContextMenuOpen(); + } + } - cell.setTextAndIcon(LocaleController.getString("ChatAttachEnterMenuRecordVideo", R.string.ChatAttachEnterMenuRecordVideo), R.drawable.input_video); - cell.setOnClickListener(v -> { - if (menuPopupWindow != null && menuPopupWindow.isShowing()) { - menuPopupWindow.dismiss(); - } + @Override + protected void onContextMenuClose() { + if (delegate != null) { + delegate.onContextMenuClose(); + } + } - if (parentFragment != null) { - TLRPC.Chat chat = parentFragment.getCurrentChat(); - if (chat != null && !ChatObject.canSendMedia(chat)) { - delegate.needShowMediaBanHint(); - return; + private void send(InputContentInfoCompat inputContentInfo, boolean notify, int scheduleDate) { + if (delegate != null) { + delegate.beforeMessageSend(null, true, scheduleDate); + } + ClipDescription description = inputContentInfo.getDescription(); + if (description.hasMimeType("image/gif")) { + SendMessagesHelper.prepareSendingDocument(accountInstance, null, null, inputContentInfo.getContentUri(), null, "image/gif", dialog_id, replyingMessageObject, getThreadMessage(), inputContentInfo, null, notify, 0); + } else { + SendMessagesHelper.prepareSendingPhoto(accountInstance, null, inputContentInfo.getContentUri(), dialog_id, replyingMessageObject, getThreadMessage(), null, null, null, inputContentInfo, 0, null, notify, 0); + } + if (delegate != null) { + delegate.onMessageSend(null, true, scheduleDate); + } + } + + @Override + public InputConnection onCreateInputConnection(EditorInfo editorInfo) { + final InputConnection ic = super.onCreateInputConnection(editorInfo); + if (ic == null) { + return null; + } + try { + EditorInfoCompat.setContentMimeTypes(editorInfo, new String[]{"image/gif", "image/*", "image/jpg", "image/png", "image/webp"}); + final InputConnectionCompat.OnCommitContentListener callback = (inputContentInfo, flags, opts) -> { + if (BuildCompat.isAtLeastNMR1() && (flags & InputConnectionCompat.INPUT_CONTENT_GRANT_READ_URI_PERMISSION) != 0) { + try { + inputContentInfo.requestPermission(); + } catch (Exception e) { + return false; } } - - isInVideoMode = true; - if (checkMenuPermissions()) { - recordAudioVideoRunnable.run(); - delegate.onSwitchRecordMode(isInVideoMode); - setRecordVideoButtonVisible(isInVideoMode, true); - if (!NekoConfig.disableVibration.Bool()) { - performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP); + if (inputContentInfo.getDescription().hasMimeType("image/gif") || SendMessagesHelper.shouldSendWebPAsSticker(null, inputContentInfo.getContentUri())) { + if (isInScheduleMode()) { + AlertsCreator.createScheduleDatePickerDialog(parentActivity, parentFragment.getDialogId(), (notify, scheduleDate) -> send(inputContentInfo, notify, scheduleDate), resourcesProvider); + } else { + send(inputContentInfo, true, 0); } - recordCircle.sendButtonVisible = true; - startLockTransition(false); + } else { + editPhoto(inputContentInfo.getContentUri(), inputContentInfo.getDescription().getMimeType(0)); } - }); - - cell.setMinimumWidth(AndroidUtilities.dp(196)); - menuPopupLayout.addView(cell, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 48, LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT, 0, 48 * a++, 0, 0)); + return true; + }; + return InputConnectionCompat.createWrapper(ic, editorInfo, callback); + } catch (Throwable e) { + FileLog.e(e); + } + return ic; + } + @Override + public boolean onTouchEvent(MotionEvent event) { + if (stickersDragging || stickersExpansionAnim != null) { + return false; + } + if (!sendPlainEnabled && !isEditingMessage()) { + if (canvasButton == null) { + canvasButton = new CanvasButton(this); + canvasButton.setDelegate(() -> { + showRestrictedHint(); + }); + } + canvasButton.setRect(0, 0, getMeasuredWidth(), getMeasuredHeight()); + return canvasButton.checkTouchEvent(event); + } + if (isPopupShowing() && event.getAction() == MotionEvent.ACTION_DOWN) { + if (searchingType != 0) { + setSearchingTypeInternal(0, false); + emojiView.closeSearch(false); + requestFocus(); + } + showPopup(AndroidUtilities.usingHardwareInput ? 0 : 2, 0); + if (stickersExpanded) { + setStickersExpanded(false, true, false); + waitingForKeyboardOpenAfterAnimation = true; + AndroidUtilities.runOnUIThread(() -> { + waitingForKeyboardOpenAfterAnimation = false; + openKeyboardInternal(); + }, 200); + } else { + openKeyboardInternal(); + } + return true; + } + try { + return super.onTouchEvent(event); + } catch (Exception e) { + FileLog.e(e); } + return false; + } - } else { + @Override + public boolean dispatchKeyEvent(KeyEvent event) { + if (preventInput) { + return false; + } + return super.dispatchKeyEvent(event); + } - if (StrUtil.isNotBlank(NekoConfig.openPGPApp.String())) { + @Override + protected void onSelectionChanged(int selStart, int selEnd) { + super.onSelectionChanged(selStart, selEnd); + if (delegate != null) { + delegate.onTextSelectionChanged(selStart, selEnd); + } + } - cell.setTextAndIcon(LocaleController.getString("Sign", R.string.Sign), R.drawable.baseline_vpn_key_24); - cell.setOnClickListener(v -> { - if (menuPopupWindow != null && menuPopupWindow.isShowing()) { - menuPopupWindow.dismiss(); - } - signComment(true); - }); - cell.setOnLongClickListener(v -> { - if (menuPopupWindow != null && menuPopupWindow.isShowing()) { - menuPopupWindow.dismiss(); - } - signComment(false); - return true; - }); - cell.setMinimumWidth(AndroidUtilities.dp(196)); - menuPopupLayout.addView(cell, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 48, LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT, 0, 48 * a++, 0, 0)); + @Override + protected void extendActionMode(ActionMode actionMode, Menu menu) { + if (parentFragment != null) { + parentFragment.extendActionMode(menu); + } + } - cell = new ActionBarMenuSubItem(getContext(), false, false); + @Override + public boolean requestRectangleOnScreen(Rect rectangle) { + rectangle.bottom += AndroidUtilities.dp(1000); + return super.requestRectangleOnScreen(rectangle); + } + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + isInitLineCount = getMeasuredWidth() == 0 && getMeasuredHeight() == 0; + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + if (isInitLineCount) { + lineCount = getLineCount(); } + isInitLineCount = false; + } - TLRPC.Chat chat = parentFragment.getCurrentChat(); - TLRPC.User user = parentFragment.getCurrentUser(); + @Override + public boolean onTextContextMenuItem(int id) { + if (id == android.R.id.paste) { + isPaste = true; - long chatId; - if (chat != null) { - chatId = chat.id; - } else if (user != null) { - chatId = user.id; - } else { - chatId = -1; + ClipboardManager clipboard = (ClipboardManager) getContext().getSystemService(Context.CLIPBOARD_SERVICE); + ClipData clipData = clipboard.getPrimaryClip(); + if (clipData != null) { + if (clipData.getItemCount() == 1 && clipData.getDescription().hasMimeType("image/*")) { + editPhoto(clipData.getItemAt(0).getUri(), clipData.getDescription().getMimeType(0)); + } + } } + return super.onTextContextMenuItem(id); + } - cell.setTextAndIcon(LocaleController.getString("Translate", R.string.Translate), R.drawable.ic_translate); - cell.setOnClickListener(v -> { - if (menuPopupWindow != null && menuPopupWindow.isShowing()) { - menuPopupWindow.dismiss(); + private void editPhoto(Uri uri, String mime) { + final File file = AndroidUtilities.generatePicturePath(parentFragment != null && parentFragment.isSecretChat(), MimeTypeMap.getSingleton().getExtensionFromMimeType(mime)); + Utilities.globalQueue.postRunnable(() -> { + try { + InputStream in = getContext().getContentResolver().openInputStream(uri); + FileOutputStream fos = new FileOutputStream(file); + byte[] buffer = new byte[1024]; + int lengthRead; + while ((lengthRead = in.read(buffer)) > 0) { + fos.write(buffer, 0, lengthRead); + fos.flush(); + } + in.close(); + fos.close(); + MediaController.PhotoEntry photoEntry = new MediaController.PhotoEntry(0, -1, 0, file.getAbsolutePath(), 0, false, 0, 0, 0); + ArrayList entries = new ArrayList<>(); + entries.add(photoEntry); + AndroidUtilities.runOnUIThread(() -> { + openPhotoViewerForEdit(entries, file); + }); + } catch (Throwable e) { + e.printStackTrace(); } - translateComment(TranslateDb.getChatLanguage(chatId, TranslatorKt.getCode2Locale(NekoConfig.translateInputLang.String()))); }); - ActionBarMenuSubItem finalCell = cell; - cell.setOnLongClickListener(v -> { - Translator.showTargetLangSelect(finalCell, true, (locale) -> { - if (menuPopupWindow != null && menuPopupWindow.isShowing()) { - menuPopupWindow.dismiss(); + } + + private void openPhotoViewerForEdit(ArrayList entries, File sourceFile) { + if (parentFragment == null || parentFragment.getParentActivity() == null) { + return; + } + MediaController.PhotoEntry photoEntry = (MediaController.PhotoEntry) entries.get(0); + if (keyboardVisible) { + AndroidUtilities.hideKeyboard(this); + AndroidUtilities.runOnUIThread(new Runnable() { + @Override + public void run() { + openPhotoViewerForEdit(entries, sourceFile); } - translateComment(locale); - TranslateDb.saveChatLanguage(chatId, locale); - return Unit.INSTANCE; - }); - return true; - }); - cell.setMinimumWidth(AndroidUtilities.dp(196)); - menuPopupLayout.addView(cell, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 48, LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT, 0, 48 * a++, 0, 0)); - cell = new ActionBarMenuSubItem(getContext(), false, dlps == 0); + }, 100); + return; + } + + PhotoViewer.getInstance().setParentActivity(parentFragment, resourcesProvider); + PhotoViewer.getInstance().openPhotoForSelect(entries, 0, 2, false, new PhotoViewer.EmptyPhotoViewerProvider() { + boolean sending; + @Override + public void sendButtonPressed(int index, VideoEditedInfo videoEditedInfo, boolean notify, int scheduleDate, boolean forceDocument) { + ArrayList photos = new ArrayList<>(); + SendMessagesHelper.SendingMediaInfo info = new SendMessagesHelper.SendingMediaInfo(); + if (!photoEntry.isVideo && photoEntry.imagePath != null) { + info.path = photoEntry.imagePath; + } else if (photoEntry.path != null) { + info.path = photoEntry.path; + } + info.thumbPath = photoEntry.thumbPath; + info.isVideo = photoEntry.isVideo; + info.caption = photoEntry.caption != null ? photoEntry.caption.toString() : null; + info.entities = photoEntry.entities; + info.masks = photoEntry.stickers; + info.ttl = photoEntry.ttl; + info.videoEditedInfo = videoEditedInfo; + info.canDeleteAfter = true; + photos.add(info); + photoEntry.reset(); + sending = true; + boolean updateStickersOrder = SendMessagesHelper.checkUpdateStickersOrder(info.caption); + SendMessagesHelper.prepareSendingMedia(accountInstance, photos, dialog_id, replyingMessageObject, getThreadMessage(), null, false, false, editingMessageObject, notify, scheduleDate, updateStickersOrder); + if (delegate != null) { + delegate.onMessageSend(null, true, scheduleDate); + } + } + + @Override + public void willHidePhotoViewer() { + if (!sending) { + try { + sourceFile.delete(); + } catch (Throwable ignore) { - cell.setTextAndIcon(LocaleController.getString("Translate", R.string.OpenCC), R.drawable.ic_translate); - ActionBarMenuSubItem finalCell1 = cell; - cell.setOnClickListener(v -> { - String ccTarget = TranslateDb.getChatCCTarget(chatId, NekoConfig.ccInputLang.String()); - if (ccTarget == null || StringsKt.isBlank(ccTarget)) { - Translator.showCCTargetSelect(finalCell1, (target) -> { - if (menuPopupWindow != null && menuPopupWindow.isShowing()) { - menuPopupWindow.dismiss(); } - ccComment(target); - TranslateDb.saveChatCCTarget(chatId, target); - return Unit.INSTANCE; - }); - return; + } } - if (menuPopupWindow != null && menuPopupWindow.isShowing()) { - menuPopupWindow.dismiss(); + + @Override + public boolean canCaptureMorePhotos() { + return false; } - ccComment(ccTarget); - }); - cell.setOnLongClickListener(v -> { - Translator.showCCTargetSelect(finalCell1, (target) -> { - if (menuPopupWindow != null && menuPopupWindow.isShowing()) { - menuPopupWindow.dismiss(); - } - ccComment(target); - TranslateDb.saveChatCCTarget(chatId, target); - return Unit.INSTANCE; - }); - return true; - }); - cell.setMinimumWidth(AndroidUtilities.dp(196)); - menuPopupLayout.addView(cell, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 48, LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT, 0, 48 * a++, 0, 0)); - cell = new ActionBarMenuSubItem(getContext(), false, dlps == 0); + }, parentFragment); + } + @Override + protected Theme.ResourcesProvider getResourcesProvider() { + return resourcesProvider; + } - cell.setTextAndIcon(LocaleController.getString("ReplaceText", R.string.ReplaceText), R.drawable.msg_edit); - cell.setOnClickListener(v -> { - if (menuPopupWindow != null && menuPopupWindow.isShowing()) { - menuPopupWindow.dismiss(); - } - showReplace(); - }); - cell.setMinimumWidth(AndroidUtilities.dp(196)); - menuPopupLayout.addView(cell, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 48, LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT, 0, 48 * a++, 0, 0)); + @Override + public boolean requestFocus(int direction, Rect previouslyFocusedRect) { + if (!sendPlainEnabled && !isEditingMessage()) { + return false; + } + return super.requestFocus(direction, previouslyFocusedRect); + } + @Override + public void setOffsetY(float offset) { + super.setOffsetY(offset); + if (sizeNotifierLayout.getForeground() != null) { + sizeNotifierLayout.invalidateDrawable(sizeNotifierLayout.getForeground()); + } } + } - if (dlps > 0) { - cell = new ActionBarMenuSubItem(getContext(), false, true); + private void createMessageEditText() { + if (messageEditText != null) { + return; + } - cell.setTextAndIcon(dlps != 1 ? - LocaleController.getString("ChatAttachEnterMenuEnableLinkPreview", R.string.ChatAttachEnterMenuEnableLinkPreview) : - LocaleController.getString("ChatAttachEnterMenuDisableLinkPreview", R.string.ChatAttachEnterMenuDisableLinkPreview), R.drawable.msg_link); + messageEditText = new ChatActivityEditTextCaption(getContext(), resourcesProvider); + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) { + messageEditText.setFallbackLineSpacing(false); + } + messageEditText.setDelegate(new EditTextCaption.EditTextCaptionDelegate() { - ActionBarMenuSubItem finalCell = cell; - cell.setOnClickListener(v -> { - if (menuPopupWindow != null && menuPopupWindow.isShowing()) { - menuPopupWindow.dismiss(); + @Override + public void onSpansChanged() { + messageEditText.invalidateEffects(); + if (delegate != null) { + delegate.onTextSpansChanged(messageEditText.getText()); } + } - delegate.toggleDisableLinkPreview(); - messageWebPageSearch = delegate.getDisableLinkPreviewStatus() == 1; + @Override + public long getCurrentChat() { - finalCell.setTextAndIcon(delegate.getDisableLinkPreviewStatus() != 1 ? - LocaleController.getString("ChatAttachEnterMenuEnableLinkPreview", R.string.ChatAttachEnterMenuEnableLinkPreview) : - LocaleController.getString("ChatAttachEnterMenuDisableLinkPreview", R.string.ChatAttachEnterMenuDisableLinkPreview), R.drawable.msg_link); + long chatId; + if (parentFragment.getCurrentChat() != null) { + chatId = parentFragment.getCurrentChat().id; + } else if (parentFragment.getCurrentUser() != null) { + chatId = parentFragment.getCurrentUser().id; + } else { + chatId = -1; + } - }); + return chatId; + } - cell.setMinimumWidth(AndroidUtilities.dp(196)); - menuPopupLayout.addView(cell, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 48, LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT, 0, 48 * a++, 0, 0)); + }); + messageEditText.setWindowView(parentActivity.getWindow().getDecorView()); + TLRPC.EncryptedChat encryptedChat = parentFragment != null ? parentFragment.getCurrentEncryptedChat() : null; + messageEditText.setAllowTextEntitiesIntersection(supportsSendingNewEntities()); + int flags = EditorInfo.IME_FLAG_NO_EXTRACT_UI; + if (encryptedChat != null) { + flags |= 0x01000000; // EditorInfo.IME_FLAG_NO_PERSONALIZED_LEARNING; } + messageEditText.setIncludeFontPadding(false); + messageEditText.setImeOptions(flags); + messageEditText.setInputType(commonInputType = (messageEditText.getInputType() | EditorInfo.TYPE_TEXT_FLAG_CAP_SENTENCES | EditorInfo.TYPE_TEXT_FLAG_MULTI_LINE)); + updateFieldHint(false); + messageEditText.setSingleLine(false); + messageEditText.setMaxLines(6); + messageEditText.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 18); + messageEditText.setGravity(Gravity.BOTTOM); + messageEditText.setPadding(0, AndroidUtilities.dp(11), 0, AndroidUtilities.dp(12)); + messageEditText.setBackgroundDrawable(null); + messageEditText.setTextColor(getThemedColor(Theme.key_chat_messagePanelText)); + messageEditText.setLinkTextColor(getThemedColor(Theme.key_chat_messageLinkOut)); + messageEditText.setHighlightColor(getThemedColor(Theme.key_chat_inTextSelectionHighlight)); + messageEditText.setHintColor(getThemedColor(Theme.key_chat_messagePanelHint)); + messageEditText.setHintTextColor(getThemedColor(Theme.key_chat_messagePanelHint)); + messageEditText.setCursorColor(getThemedColor(Theme.key_chat_messagePanelCursor)); + messageEditText.setHandlesColor(getThemedColor(Theme.key_chat_TextSelectionCursor)); + messageEditTextContainer.addView(messageEditText, 1, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.BOTTOM, 52, 0, isChat ? 50 : 2, 1.5f)); + messageEditText.setOnKeyListener(new OnKeyListener() { - menuPopupLayout.setupRadialSelectors(Theme.getColor(Theme.key_dialogButtonSelector)); + boolean ctrlPressed = false; - menuPopupWindow = new ActionBarPopupWindow(menuPopupLayout, LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT); - menuPopupWindow.setAnimationEnabled(false); - menuPopupWindow.setAnimationStyle(R.style.PopupContextAnimation2); - menuPopupWindow.setOutsideTouchable(true); - menuPopupWindow.setClippingEnabled(true); - menuPopupWindow.setInputMethodMode(ActionBarPopupWindow.INPUT_METHOD_NOT_NEEDED); - menuPopupWindow.setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_UNSPECIFIED); - menuPopupWindow.getContentView().setFocusableInTouchMode(true); + @Override + public boolean onKey(View view, int keyCode, KeyEvent keyEvent) { + if (keyCode == KeyEvent.KEYCODE_BACK && !keyboardVisible && isPopupShowing() && keyEvent.getAction() == KeyEvent.ACTION_UP) { + if (ContentPreviewViewer.hasInstance() && ContentPreviewViewer.getInstance().isVisible()) { + ContentPreviewViewer.getInstance().closeWithMenu(); + return true; + } + if (currentPopupContentType == POPUP_CONTENT_BOT_KEYBOARD && botButtonsMessageObject != null) { + return false; + } + if (keyEvent.getAction() == 1) { + if (currentPopupContentType == POPUP_CONTENT_BOT_KEYBOARD && botButtonsMessageObject != null) { + SharedPreferences preferences = MessagesController.getMainSettings(currentAccount); + preferences.edit().putInt("hidekeyboard_" + dialog_id, botButtonsMessageObject.getId()).commit(); + } + if (searchingType != 0) { + setSearchingTypeInternal(0, true); + if (emojiView != null) { + emojiView.closeSearch(true); + } + messageEditText.requestFocus(); + } else { + if (stickersExpanded) { + setStickersExpanded(false, true, false); + } else { + if (stickersExpansionAnim == null) { + if (botButtonsMessageObject != null && currentPopupContentType != POPUP_CONTENT_BOT_KEYBOARD && TextUtils.isEmpty(messageEditText.getText())) { + showPopup(1, POPUP_CONTENT_BOT_KEYBOARD); + } else { + showPopup(0, 0); + } + } + } + } + } + return true; + } else if (keyCode == KeyEvent.KEYCODE_ENTER && (ctrlPressed || sendByEnter) && keyEvent.getAction() == KeyEvent.ACTION_DOWN && editingMessageObject == null) { + sendMessage(); + return true; + } else if (keyCode == KeyEvent.KEYCODE_CTRL_LEFT || keyCode == KeyEvent.KEYCODE_CTRL_RIGHT) { + ctrlPressed = keyEvent.getAction() == KeyEvent.ACTION_DOWN; + return true; + } + return false; + } + }); + messageEditText.setOnEditorActionListener(new TextView.OnEditorActionListener() { - if (delegate != null) { - delegate.onSendLongClick(); - } + boolean ctrlPressed = false; + @Override + public boolean onEditorAction(TextView textView, int i, KeyEvent keyEvent) { + if (i == EditorInfo.IME_ACTION_SEND) { + sendMessage(); + return true; + } else if (keyEvent != null && i == EditorInfo.IME_NULL) { + if ((ctrlPressed || sendByEnter) && keyEvent.getAction() == KeyEvent.ACTION_DOWN && editingMessageObject == null) { + sendMessage(); + return true; + } + } + return false; + } + }); + messageEditText.addTextChangedListener(new TextWatcher() { - menuPopupLayout.measure(MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(1000), MeasureSpec.AT_MOST), MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(1000), MeasureSpec.AT_MOST)); - menuPopupWindow.setFocusable(true); - int[] location = new int[2]; - view.getLocationInWindow(location); - int y; - if (keyboardVisible && ChatActivityEnterView.this.getMeasuredHeight() > AndroidUtilities.dp(topView != null && topView.getVisibility() == VISIBLE ? 48 + 58 : 58)) { - y = location[1] + view.getMeasuredHeight(); - } else { - y = location[1] - menuPopupLayout.getMeasuredHeight() - AndroidUtilities.dp(2); - } - y += AndroidUtilities.dp(48); - int x = location[0] + view.getMeasuredWidth() - menuPopupLayout.getMeasuredWidth() + AndroidUtilities.dp(8); - if (isInInput) { - x += view.getMeasuredWidth(); - } - menuPopupWindow.showAtLocation(view, Gravity.LEFT | Gravity.TOP, x, y); - menuPopupWindow.dimBehind(); + private boolean processChange; + private boolean nextChangeIsSend; + private CharSequence prevText; + private boolean ignorePrevTextChange; + boolean heightShouldBeChanged; - } + @Override + public void beforeTextChanged(CharSequence charSequence, int i, int i2, int i3) { + if (ignorePrevTextChange) { + return; + } + if (recordingAudioVideo) { + prevText = charSequence.toString(); + } + } - private boolean onSendLongClick(View view) { - if (isInScheduleMode()) { - return false; - } + @Override + public void onTextChanged(CharSequence charSequence, int start, int before, int count) { + if (ignorePrevTextChange) { + return; + } - boolean self = parentFragment != null && UserObject.isUserSelf(parentFragment.getCurrentUser()); + boolean allowChangeToSmile = true; + int currentPage; + if (emojiView == null) { + currentPage = MessagesController.getGlobalEmojiSettings().getInt("selected_page", 0); + } else { + currentPage = emojiView.getCurrentPage(); + } + if (currentPage == 0 || !allowStickers && !allowGifs) { + allowChangeToSmile = false; + } - if (sendPopupLayout == null) { - sendPopupLayout = new ActionBarPopupWindow.ActionBarPopupWindowLayout(parentActivity, resourcesProvider); - sendPopupLayout.setAnimationEnabled(false); - sendPopupLayout.setOnTouchListener(new OnTouchListener() { + if ((before == 0 && !TextUtils.isEmpty(charSequence) || before != 0 && TextUtils.isEmpty(charSequence)) && allowChangeToSmile) { + setEmojiButtonImage(false, true); + } + if (lineCount != messageEditText.getLineCount()) { + heightShouldBeChanged = (messageEditText.getLineCount() >= 4) != (lineCount >= 4); + if (!isInitLineCount && messageEditText.getMeasuredWidth() > 0) { + onLineCountChanged(lineCount, messageEditText.getLineCount()); + } + lineCount = messageEditText.getLineCount(); + } else { + heightShouldBeChanged = false; + } - private android.graphics.Rect popupRect = new android.graphics.Rect(); + if (innerTextChange == 1) { + return; + } + if (sendByEnter && !isPaste && editingMessageObject == null && count > before && charSequence.length() > 0 && charSequence.length() == start + count && charSequence.charAt(charSequence.length() - 1) == '\n') { + nextChangeIsSend = true; + } + isPaste = false; + checkSendButton(true); + CharSequence message = AndroidUtilities.getTrimmedString(charSequence.toString()); + if (delegate != null) { + if (!ignoreTextChange) { + if ((before > count + 1 || (count - before) > 2 || TextUtils.isEmpty(charSequence)) && delegate.getDisableLinkPreviewStatus() == 1) { + messageWebPageSearch = true; + } + delegate.onTextChanged(charSequence, before > count + 1 || (count - before) > 2); + } + } + if (innerTextChange != 2 && (count - before) > 1) { + processChange = true; + } + if (editingMessageObject == null && !canWriteToChannel && message.length() != 0 && lastTypingTimeSend < System.currentTimeMillis() - 5000 && !ignoreTextChange) { + lastTypingTimeSend = System.currentTimeMillis(); + if (delegate != null) { + delegate.needSendTyping(); + } + } + } - @Override - public boolean onTouch(View v, MotionEvent event) { - if (event.getActionMasked() == MotionEvent.ACTION_DOWN) { - if (sendPopupWindow != null && sendPopupWindow.isShowing()) { - v.getHitRect(popupRect); - if (!popupRect.contains((int) event.getX(), (int) event.getY())) { - sendPopupWindow.dismiss(); - } + @Override + public void afterTextChanged(Editable editable) { + if (ignorePrevTextChange) { + return; + } + if (prevText != null) { + ignorePrevTextChange = true; + editable.replace(0, editable.length(), prevText); + prevText = null; + ignorePrevTextChange = false; + return; + } + if (innerTextChange == 0) { + if (nextChangeIsSend) { + sendMessage(); + nextChangeIsSend = false; + } + if (processChange) { + ImageSpan[] spans = editable.getSpans(0, editable.length(), ImageSpan.class); + for (int i = 0; i < spans.length; i++) { + editable.removeSpan(spans[i]); } + Emoji.replaceEmoji(editable, messageEditText.getPaint().getFontMetricsInt(), AndroidUtilities.dp(20), false, null); + processChange = false; } - return false; - } - }); - sendPopupLayout.setDispatchKeyEventListener(keyEvent -> { - if (keyEvent.getKeyCode() == KeyEvent.KEYCODE_BACK && keyEvent.getRepeatCount() == 0 && sendPopupWindow != null && sendPopupWindow.isShowing()) { - sendPopupWindow.dismiss(); } - }); - sendPopupLayout.setShownFromBottom(false); - boolean scheduleButtonValue = parentFragment != null && parentFragment.canScheduleMessage(); - boolean sendWithoutSoundButtonValue = !(self || slowModeTimer > 0 && !isInScheduleMode()); - if (scheduleButtonValue) { - ActionBarMenuSubItem scheduleButton = new ActionBarMenuSubItem(getContext(), true, !sendWithoutSoundButtonValue, resourcesProvider); - if (self) { - scheduleButton.setTextAndIcon(LocaleController.getString("SetReminder", R.string.SetReminder), R.drawable.msg_schedule); - } else { - scheduleButton.setTextAndIcon(LocaleController.getString("ScheduleMessage", R.string.ScheduleMessage), R.drawable.msg_schedule); - } - scheduleButton.setMinimumWidth(AndroidUtilities.dp(196)); - scheduleButton.setOnClickListener(v -> { - if (sendPopupWindow != null && sendPopupWindow.isShowing()) { - sendPopupWindow.dismiss(); + int beforeLimit; + codePointCount = Character.codePointCount(editable, 0, editable.length()); + boolean doneButtonEnabledLocal = true; + if (currentLimit > 0 && (beforeLimit = currentLimit - codePointCount) <= 100) { + if (beforeLimit < -9999) { + beforeLimit = -9999; } - AlertsCreator.createScheduleDatePickerDialog(parentActivity, parentFragment.getDialogId(), this::sendMessageInternal, resourcesProvider); - }); - sendPopupLayout.addView(scheduleButton, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, 48)); - } - if (sendWithoutSoundButtonValue) { - ActionBarMenuSubItem sendWithoutSoundButton = new ActionBarMenuSubItem(getContext(), !scheduleButtonValue, true, resourcesProvider); - sendWithoutSoundButton.setTextAndIcon(LocaleController.getString("SendWithoutSound", R.string.SendWithoutSound), R.drawable.input_notify_off); - sendWithoutSoundButton.setMinimumWidth(AndroidUtilities.dp(196)); - sendWithoutSoundButton.setOnClickListener(v -> { - if (sendPopupWindow != null && sendPopupWindow.isShowing()) { - sendPopupWindow.dismiss(); + createCaptionLimitView(); + captionLimitView.setNumber(beforeLimit, captionLimitView.getVisibility() == View.VISIBLE); + if (captionLimitView.getVisibility() != View.VISIBLE) { + captionLimitView.setVisibility(View.VISIBLE); + captionLimitView.setAlpha(0); + captionLimitView.setScaleX(0.5f); + captionLimitView.setScaleY(0.5f); } - sendMessageInternal(false, 0); - }); - sendPopupLayout.addView(sendWithoutSoundButton, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, 48)); - } - if (sendWithoutSoundButtonValue) { - if (containsMarkdown(messageEditText.getText())) { - ActionBarMenuSubItem sendWithoutMarkdownButton = new ActionBarMenuSubItem(getContext(), false, false, resourcesProvider); - sendWithoutMarkdownButton.setTextAndIcon(LocaleController.getString("SendWithoutMarkdown", R.string.SendWithoutMarkdown), R.drawable.round_code_off_white); - sendWithoutMarkdownButton.setMinimumWidth(AndroidUtilities.dp(196)); - sendWithoutMarkdownButton.setOnClickListener(v -> { - if (sendPopupWindow != null && sendPopupWindow.isShowing()) { - sendPopupWindow.dismiss(); + captionLimitView.animate().setListener(null).cancel(); + captionLimitView.animate().alpha(1f).scaleX(1f).scaleY(1f).setDuration(100).start(); + if (beforeLimit < 0) { + doneButtonEnabledLocal = false; + captionLimitView.setTextColor(getThemedColor(Theme.key_windowBackgroundWhiteRedText)); + } else { + captionLimitView.setTextColor(getThemedColor(Theme.key_windowBackgroundWhiteGrayText)); + } + } else if (captionLimitView != null) { + captionLimitView.animate().alpha(0).scaleX(0.5f).scaleY(0.5f).setDuration(100).setListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + captionLimitView.setVisibility(View.GONE); } - sendMessageInternal(true, 0, false, true); }); - sendPopupLayout.addView(sendWithoutMarkdownButton, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, 48)); - } else if (canSendAsDice(messageEditText.getText().toString(), parentFragment, dialog_id)) { - ActionBarMenuSubItem sendWithoutMarkdownButton = new ActionBarMenuSubItem(getContext(), false, false, resourcesProvider); - sendWithoutMarkdownButton.setTextAndIcon(LocaleController.getString("SendAsEmoji", R.string.SendAsEmoji), R.drawable.casino_icon); - sendWithoutMarkdownButton.setMinimumWidth(AndroidUtilities.dp(196)); - sendWithoutMarkdownButton.setOnClickListener(v -> { - if (sendPopupWindow != null && sendPopupWindow.isShowing()) { - sendPopupWindow.dismiss(); + } + + if (doneButtonEnabled != doneButtonEnabledLocal && (doneButtonImage != null || doneCheckDrawable != null)) { + doneButtonEnabled = doneButtonEnabledLocal; + if (doneButtonColorAnimator != null) { + doneButtonColorAnimator.cancel(); + } + doneButtonColorAnimator = ValueAnimator.ofFloat(doneButtonEnabled ? 0 : 1f, doneButtonEnabled ? 1f : 0); + doneButtonColorAnimator.addUpdateListener(valueAnimator -> { + int color = getThemedColor(Theme.key_chat_messagePanelVoicePressed); + int defaultAlpha = Color.alpha(color); + doneButtonEnabledProgress = (float) valueAnimator.getAnimatedValue(); + if (doneCheckDrawable != null) { + doneCheckDrawable.setColorFilter(new PorterDuffColorFilter(ColorUtils.setAlphaComponent(color, (int) (defaultAlpha * (0.58f + 0.42f * doneButtonEnabledProgress))), PorterDuff.Mode.SRC_IN)); + } + if (doneButtonImage != null) { + doneButtonImage.invalidate(); } - sendMessageInternal(true, 0, true, false); }); - sendPopupLayout.addView(sendWithoutMarkdownButton, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, 48)); + doneButtonColorAnimator.setDuration(150).start(); } - } - - sendPopupLayout.setupRadialSelectors(getThemedColor(Theme.key_dialogButtonSelector)); - - sendPopupWindow = new ActionBarPopupWindow(sendPopupLayout, LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT) { - @Override - public void dismiss() { - super.dismiss(); - sendButton.invalidate(); + if (botCommandsMenuContainer != null) { + botCommandsMenuContainer.dismiss(); } - }; - sendPopupWindow.setAnimationEnabled(false); - sendPopupWindow.setAnimationStyle(R.style.PopupContextAnimation2); - sendPopupWindow.setOutsideTouchable(true); - sendPopupWindow.setClippingEnabled(true); - sendPopupWindow.setInputMethodMode(ActionBarPopupWindow.INPUT_METHOD_NOT_NEEDED); - sendPopupWindow.setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_UNSPECIFIED); - sendPopupWindow.getContentView().setFocusableInTouchMode(true); - SharedConfig.removeScheduledOrNoSoundHint(); + checkBotMenu(); - if (delegate != null) { - delegate.onSendLongClick(); + if (editingCaption && !captionLimitBulletinShown && !MessagesController.getInstance(currentAccount).premiumLocked && !UserConfig.getInstance(currentAccount).isPremium() && codePointCount > MessagesController.getInstance(currentAccount).captionLengthLimitDefault && codePointCount < MessagesController.getInstance(currentAccount).captionLengthLimitPremium) { + captionLimitBulletinShown = true; + if (heightShouldBeChanged) { + AndroidUtilities.runOnUIThread(()->showCaptionLimitBulletin(), 300); + } else { + showCaptionLimitBulletin(); + } + } + } + }); + messageEditText.setEnabled(messageEditTextEnabled); + if (messageEditTextWatchers != null) { + for (TextWatcher textWatcher : messageEditTextWatchers) { + messageEditText.addTextChangedListener(textWatcher); } + messageEditTextWatchers.clear(); } + updateFieldHint(false); + updateSendAsButton(parentFragment != null && parentFragment.getFragmentBeginToShow()); + if (parentFragment != null) { + parentFragment.applyDraftMaybe(false); + } + } - sendPopupLayout.measure(MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(1000), MeasureSpec.AT_MOST), MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(1000), MeasureSpec.AT_MOST)); - sendPopupWindow.setFocusable(true); - view.getLocationInWindow(location); - int y; - if (keyboardVisible && ChatActivityEnterView.this.getMeasuredHeight() > AndroidUtilities.dp(topView != null && topView.getVisibility() == VISIBLE ? 48 + 58 : 58)) { - y = location[1] + view.getMeasuredHeight(); + public void addTextChangedListener(TextWatcher textWatcher) { + if (messageEditText != null) { + messageEditText.addTextChangedListener(textWatcher); } else { - y = location[1] - sendPopupLayout.getMeasuredHeight() - AndroidUtilities.dp(2); - } - sendPopupWindow.showAtLocation(view, Gravity.LEFT | Gravity.TOP, location[0] + view.getMeasuredWidth() - sendPopupLayout.getMeasuredWidth() + AndroidUtilities.dp(8), y); - sendPopupWindow.dimBehind(); - sendButton.invalidate(); - if (!NekoConfig.disableVibration.Bool()) { - try { - view.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); - } catch (Exception ignore) { + if (messageEditTextWatchers == null) { + messageEditTextWatchers = new ArrayList<>(); } + messageEditTextWatchers.add(textWatcher); } - - return false; } private void signComment(boolean save) { @@ -4763,12 +4922,12 @@ public void showTopView(boolean animated, final boolean openKeyboard) { private void showTopView(boolean animated, final boolean openKeyboard, boolean skipAwait) { if (topView == null || topViewShowed || getVisibility() != VISIBLE) { - if (recordedAudioPanel.getVisibility() != VISIBLE && (!forceShowSendButton || openKeyboard)) { + if ((recordedAudioPanel == null || recordedAudioPanel.getVisibility() != VISIBLE) && (!forceShowSendButton || openKeyboard)) { openKeyboard(); } return; } - boolean openKeyboardInternal = recordedAudioPanel.getVisibility() != VISIBLE && (!forceShowSendButton || openKeyboard) && (botReplyMarkup == null || editingMessageObject != null); + boolean openKeyboardInternal = (recordedAudioPanel == null || recordedAudioPanel.getVisibility() != VISIBLE) && (!forceShowSendButton || openKeyboard) && (botReplyMarkup == null || editingMessageObject != null); if (!skipAwait && animated && openKeyboardInternal && !(keyboardVisible || isPopupShowing())) { openKeyboard(); if (showTopViewRunnable != null) { @@ -4815,17 +4974,24 @@ public void onAnimationEnd(Animator animation) { topLineView.setAlpha(1.0f); } if (openKeyboardInternal) { - messageEditText.requestFocus(); + if (messageEditText != null) { + messageEditText.requestFocus(); + } openKeyboard(); } } } public void onEditTimeExpired() { - doneButtonContainer.setVisibility(View.GONE); + if (doneButtonContainer != null) { + doneButtonContainer.setVisibility(View.GONE); + } } public void showEditDoneProgress(final boolean show, boolean animated) { + if (doneButtonContainer == null) { + return; + } if (doneButtonAnimation != null) { doneButtonAnimation.cancel(); } @@ -4958,11 +5124,15 @@ public boolean isTopViewVisible() { } public void onAdjustPanTransitionUpdate(float y, float progress, boolean keyboardVisible) { - botWebViewMenuContainer.setTranslationY(y); + if (botWebViewMenuContainer != null) { + botWebViewMenuContainer.setTranslationY(y); + } } public void onAdjustPanTransitionEnd() { - botWebViewMenuContainer.onPanTransitionEnd(); + if (botWebViewMenuContainer != null) { + botWebViewMenuContainer.onPanTransitionEnd(); + } if (onKeyboardClosed != null) { onKeyboardClosed.run(); onKeyboardClosed = null; @@ -4970,7 +5140,9 @@ public void onAdjustPanTransitionEnd() { } public void onAdjustPanTransitionStart(boolean keyboardVisible, int contentHeight) { - botWebViewMenuContainer.onPanTransitionStart(keyboardVisible, contentHeight); + if (botWebViewMenuContainer != null) { + botWebViewMenuContainer.onPanTransitionStart(keyboardVisible, contentHeight); + } if (keyboardVisible && showTopViewRunnable != null) { AndroidUtilities.cancelRunOnUIThread(showTopViewRunnable); showTopViewRunnable.run(); @@ -4981,7 +5153,7 @@ public void onAdjustPanTransitionStart(boolean keyboardVisible, int contentHeigh setTextFieldRunnable.run(); } - if (keyboardVisible && messageEditText.hasFocus() && hasBotWebView() && botCommandsMenuIsShowing()) { + if (keyboardVisible && messageEditText != null && messageEditText.hasFocus() && hasBotWebView() && botCommandsMenuIsShowing() && botWebViewMenuContainer != null) { botWebViewMenuContainer.dismiss(); } } @@ -5054,6 +5226,7 @@ public void onDestroy() { NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.sendingMessagesChanged); NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.audioRecordTooShort); NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.updateBotMenuButton); + NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.didUpdatePremiumGiftFieldIcon); NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiLoaded); if (emojiView != null) { emojiView.onDestroy(); @@ -5085,14 +5258,42 @@ public void checkChannelRights() { } TLRPC.Chat chat = parentFragment.getCurrentChat(); TLRPC.UserFull userFull = parentFragment.getCurrentUserInfo(); + emojiButtonRestricted = false; + stickersEnabled = true; + sendPlainEnabled = true; + sendRoundEnabled = true; + sendVoiceEnabled = true; if (chat != null) { - audioVideoButtonContainer.setAlpha(ChatObject.canSendMedia(chat) ? 1.0f : 0.5f); - if (emojiView != null) { - emojiView.setStickersBanned(!ChatObject.canSendStickers(chat), chat.id); + audioVideoButtonContainer.setAlpha(ChatObject.canSendVoice(chat) || (ChatObject.canSendRoundVideo(chat) && hasRecordVideo)? 1.0f : 0.5f); + + stickersEnabled = ChatObject.canSendStickers(chat); + sendPlainEnabled = ChatObject.canSendPlain(chat); + sendPlainEnabled = ChatObject.canSendPlain(chat); + emojiButtonRestricted = !stickersEnabled && !sendPlainEnabled; + emojiButton.setAlpha(emojiButtonRestricted ? 0.5f : 1.0f); + if (!emojiButtonRestricted) { + if (emojiView != null) { + emojiView.setStickersBanned(!ChatObject.canSendPlain(chat), !ChatObject.canSendStickers(chat), chat.id); + } } + sendRoundEnabled = ChatObject.canSendRoundVideo(chat); + sendVoiceEnabled = ChatObject.canSendVoice(chat); } else if (userFull != null) { audioVideoButtonContainer.setAlpha(userFull.voice_messages_forbidden ? 0.5f : 1.0f); } + updateFieldHint(false); + boolean currentModeVideo = isInVideoMode; + if (!sendRoundEnabled && currentModeVideo) { + currentModeVideo = false; + } + if (!sendVoiceEnabled && !currentModeVideo) { + if (hasRecordVideo) { + currentModeVideo = true; + } else { + currentModeVideo = false; + } + } + setRecordVideoButtonVisible(currentModeVideo, false); } public void onBeginHide() { @@ -5144,7 +5345,7 @@ public void onResume() { int visibility = getVisibility(); if (showKeyboardOnResume && parentFragment != null && parentFragment.isLastFragment()) { showKeyboardOnResume = false; - if (searchingType == 0) { + if (searchingType == 0 && messageEditText != null) { messageEditText.requestFocus(); } AndroidUtilities.showKeyboard(messageEditText); @@ -5159,7 +5360,10 @@ public void onResume() { @Override public void setVisibility(int visibility) { super.setVisibility(visibility); - messageEditText.setEnabled(visibility == VISIBLE); + messageEditTextEnabled = visibility == VISIBLE; + if (messageEditText != null) { + messageEditText.setEnabled(messageEditTextEnabled); + } } public void setDialogId(long id, int account) { @@ -5196,10 +5400,20 @@ public void setDialogId(long id, int account) { NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.sendingMessagesChanged); } + sendPlainEnabled = true; + if (DialogObject.isChatDialog(dialog_id)) { + TLRPC.Chat chat = accountInstance.getMessagesController().getChat(-dialog_id); + sendPlainEnabled = ChatObject.canSendPlain(chat); + } + updateScheduleButton(false); + updateGiftButton(false); checkRoundVideo(); + checkChannelRights(); updateFieldHint(false); - updateSendAsButton(parentFragment != null && parentFragment.getFragmentBeginToShow()); + if (messageEditText != null) { + updateSendAsButton(parentFragment != null && parentFragment.getFragmentBeginToShow()); + } } public void setChatInfo(TLRPC.ChatFull chatInfo) { @@ -5220,6 +5434,8 @@ public void checkRoundVideo() { return; } hasRecordVideo = true; + sendRoundEnabled = true; + sendVoiceEnabled = true; boolean isChannel = false; if (DialogObject.isChatDialog(dialog_id)) { TLRPC.Chat chat = accountInstance.getMessagesController().getChat(-dialog_id); @@ -5227,20 +5443,32 @@ public void checkRoundVideo() { if (isChannel && !chat.creator && (chat.admin_rights == null || !chat.admin_rights.post_messages)) { hasRecordVideo = false; } + sendRoundEnabled = ChatObject.canSendRoundVideo(chat); + sendVoiceEnabled = ChatObject.canSendVoice(chat); } if (!SharedConfig.inappCamera) { hasRecordVideo = false; } + boolean currentModeVideo = false; if (hasRecordVideo) { if (SharedConfig.hasCameraCache) { CameraController.getInstance().initCamera(null); } SharedPreferences preferences = MessagesController.getGlobalMainSettings(); - boolean currentModeVideo = preferences.getBoolean(isChannel ? "currentModeVideoChannel" : "currentModeVideo", isChannel); - setRecordVideoButtonVisible(currentModeVideo, false); - } else { - setRecordVideoButtonVisible(false, false); + currentModeVideo = preferences.getBoolean(isChannel ? "currentModeVideoChannel" : "currentModeVideo", isChannel); + } + + if (!sendRoundEnabled && currentModeVideo) { + currentModeVideo = false; + } + if (!sendVoiceEnabled && !currentModeVideo) { + if (hasRecordVideo) { + currentModeVideo = true; + } else { + currentModeVideo = false; + } } + setRecordVideoButtonVisible(currentModeVideo, false); } public boolean isInVideoMode() { @@ -5256,6 +5484,21 @@ public MessageObject getReplyingMessageObject() { } public void updateFieldHint(boolean animated) { + if (messageEditText == null) { + return; + } + if (!sendPlainEnabled && !isEditingMessage()) { + SpannableStringBuilder spannableStringBuilder = new SpannableStringBuilder(" d " + LocaleController.getString("PlainTextRestrictedHint", R.string.PlainTextRestrictedHint)); + spannableStringBuilder.setSpan(new ColoredImageSpan(R.drawable.msg_mini_lock3), 1, 2, 0); + messageEditText.setHintText(spannableStringBuilder, animated); + messageEditText.setText(null); + messageEditText.setEnabled(false); + messageEditText.setInputType(EditorInfo.IME_ACTION_NONE); + return; + } else { + messageEditText.setEnabled(true); + messageEditText.setInputType(commonInputType); + } if (replyingMessageObject != null && replyingMessageObject.messageOwner.reply_markup != null && !TextUtils.isEmpty(replyingMessageObject.messageOwner.reply_markup.placeholder)) { messageEditText.setHintText(replyingMessageObject.messageOwner.reply_markup.placeholder, animated); } else if (editingMessageObject != null) { @@ -5335,7 +5578,9 @@ public void setReplyingMessageObject(MessageObject messageObject) { botMessageObject = botButtonsMessageObject; } replyingMessageObject = messageObject; - setButtons(replyingMessageObject, true); + if (!(parentFragment != null && parentFragment.isTopic && parentFragment.getThreadMessage() == replyingMessageObject)) { + setButtons(replyingMessageObject, true); + } } else if (replyingMessageObject == botButtonsMessageObject) { replyingMessageObject = null; setButtons(botMessageObject, false); @@ -5436,7 +5681,9 @@ private void hideRecordedAudioPanel(boolean wasSent) { audioToSend = null; audioToSendMessageObject = null; videoToSendMessageObject = null; - videoTimelineView.destroy(); + if (videoTimelineView != null) { + videoTimelineView.destroy(); + } if (audioVideoSendButton != null) { audioVideoSendButton.setVisibility(View.VISIBLE); @@ -5454,7 +5701,7 @@ private void hideRecordedAudioPanel(boolean wasSent) { recordPannelAnimation = new AnimatorSet(); recordPannelAnimation.playTogether( - ObjectAnimator.ofFloat(emojiButton, View.ALPHA, 1.0f), + ObjectAnimator.ofFloat(emojiButton, View.ALPHA, emojiButtonRestricted ? 0.5f : 1.0f), ObjectAnimator.ofFloat(emojiButton, View.SCALE_X, 1.0f), ObjectAnimator.ofFloat(emojiButton, View.SCALE_Y, 1.0f), ObjectAnimator.ofFloat(recordDeleteImageView, View.ALPHA, 0.0f), @@ -5484,13 +5731,19 @@ private void hideRecordedAudioPanel(boolean wasSent) { recordPannelAnimation.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { - recordedAudioPanel.setVisibility(GONE); - messageEditText.requestFocus(); + if (recordedAudioPanel != null) { + recordedAudioPanel.setVisibility(GONE); + } + if (messageEditText != null) { + messageEditText.requestFocus(); + } } }); } else { - recordDeleteImageView.playAnimation(); + if (recordDeleteImageView != null) { + recordDeleteImageView.playAnimation(); + } AnimatorSet exitAnimation = new AnimatorSet(); if (isInVideoMode()) { @@ -5501,8 +5754,10 @@ public void onAnimationEnd(Animator animation) { ObjectAnimator.ofFloat(messageEditText, View.TRANSLATION_X, 0) ); } else { - messageEditText.setAlpha(1f); - messageEditText.setTranslationX(0); + if (messageEditText != null) { + messageEditText.setAlpha(1f); + messageEditText.setTranslationX(0); + } exitAnimation.playTogether( ObjectAnimator.ofFloat(recordedAudioSeekBar, View.ALPHA, 0.0f), ObjectAnimator.ofFloat(recordedAudioPlayButton, View.ALPHA, 0.0f), @@ -5560,7 +5815,7 @@ public void onAnimationEnd(Animator animation) { ObjectAnimator.ofFloat(recordDeleteImageView, View.SCALE_Y, 0.0f), ObjectAnimator.ofFloat(recordDeleteImageView, View.ALPHA, 0.0f), - ObjectAnimator.ofFloat(emojiButton, View.ALPHA, 1.0f), + ObjectAnimator.ofFloat(emojiButton, View.ALPHA, emojiButtonRestricted ? 0.5f : 1.0f), ObjectAnimator.ofFloat(emojiButton, View.SCALE_X, 1.0f), ObjectAnimator.ofFloat(emojiButton, View.SCALE_Y, 1.0f) ); @@ -5606,21 +5861,34 @@ public void onAnimationEnd(Animator animation) { recordPannelAnimation.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { - recordedAudioSeekBar.setAlpha(1f); - recordedAudioSeekBar.setTranslationX(0); - recordedAudioPlayButton.setAlpha(1f); - recordedAudioPlayButton.setTranslationX(0); - recordedAudioBackground.setAlpha(1f); - recordedAudioBackground.setTranslationX(0); - recordedAudioTimeTextView.setAlpha(1f); - recordedAudioTimeTextView.setTranslationX(0); - videoTimelineView.setAlpha(1f); - videoTimelineView.setTranslationX(0); - messageEditText.setAlpha(1f); - messageEditText.setTranslationX(0); - messageEditText.requestFocus(); - recordedAudioPanel.setVisibility(GONE); - + if (recordedAudioSeekBar != null) { + recordedAudioSeekBar.setAlpha(1f); + recordedAudioSeekBar.setTranslationX(0); + } + if (recordedAudioPlayButton != null) { + recordedAudioPlayButton.setAlpha(1f); + recordedAudioPlayButton.setTranslationX(0); + } + if (recordedAudioBackground != null) { + recordedAudioBackground.setAlpha(1f); + recordedAudioBackground.setTranslationX(0); + } + if (recordedAudioTimeTextView != null) { + recordedAudioTimeTextView.setAlpha(1f); + recordedAudioTimeTextView.setTranslationX(0); + } + if (videoTimelineView != null) { + videoTimelineView.setAlpha(1f); + videoTimelineView.setTranslationX(0); + } + if (messageEditText != null) { + messageEditText.setAlpha(1f); + messageEditText.setTranslationX(0); + messageEditText.requestFocus(); + } + if (recordedAudioPanel != null) { + recordedAudioPanel.setVisibility(GONE); + } } }); } @@ -5684,7 +5952,7 @@ private void sendMessageInternal(boolean notify, int scheduleDate, boolean withM checkSendButton(true); return; } - CharSequence message = messageEditText.getText(); + CharSequence message = messageEditText == null ? "" : messageEditText.getText(); if (parentFragment != null) { TLRPC.Chat chat = parentFragment.getCurrentChat(); if (chat != null && chat.slowmode_enabled && !ChatObject.hasAdminRights(chat)) { @@ -5706,7 +5974,9 @@ private void sendMessageInternal(boolean notify, int scheduleDate, boolean withM } if (processSendingText(message, notify, scheduleDate, withMarkdown, withGame)) { if (delegate.hasForwardingMessages() || (scheduleDate != 0 && !isInScheduleMode()) || isInScheduleMode()) { + if (messageEditText != null) { messageEditText.setText(""); + } if (delegate != null) { delegate.onMessageSend(message, notify, scheduleDate); } @@ -5715,7 +5985,9 @@ private void sendMessageInternal(boolean notify, int scheduleDate, boolean withM AndroidUtilities.runOnUIThread(moveToSendStateRunnable = () -> { moveToSendStateRunnable = null; hideTopView(true); + if (messageEditText != null) { messageEditText.setText(""); + } if (delegate != null) { delegate.onMessageSend(message, notify, scheduleDate); } @@ -5735,9 +6007,6 @@ public static boolean checkPremiumAnimatedEmoji(int currentAccount, long dialogI if (message == null || parentFragment == null) { return false; } - if (container == null) { - container = parentFragment.getLayoutContainer(); - } final boolean isPremium = UserConfig.getInstance(currentAccount).isPremium(); if (!isPremium && UserConfig.getInstance(currentAccount).getClientUserId() != dialogId && message instanceof Spanned) { AnimatedEmojiSpan[] animatedEmojis = ((Spanned) message).getSpans(0, message.length(), AnimatedEmojiSpan.class); @@ -5748,20 +6017,75 @@ public static boolean checkPremiumAnimatedEmoji(int currentAccount, long dialogI if (emoji == null) { emoji = AnimatedEmojiDrawable.findDocument(currentAccount, animatedEmojis[i].getDocumentId()); } - if (emoji != null && !MessageObject.isFreeEmoji(emoji)) { - BulletinFactory.of(container, parentFragment.getResourceProvider()) - .createEmojiBulletin( - emoji, - AndroidUtilities.replaceTags(LocaleController.getString("UnlockPremiumEmojiHint", R.string.UnlockPremiumEmojiHint)), - LocaleController.getString("PremiumMore", R.string.PremiumMore), - () -> { - if (parentFragment != null) { - new PremiumFeatureBottomSheet(parentFragment, PremiumPreviewFragment.PREMIUM_FEATURE_ANIMATED_EMOJI, false).show(); - } else if (parentFragment.getContext() instanceof LaunchActivity) { - ((LaunchActivity) parentFragment.getContext()).presentFragment(new PremiumPreviewFragment(null)); - } - } - ).show(); + long documentId = animatedEmojis[i].getDocumentId(); + if (emoji == null) { + ArrayList sets1 = MediaDataController.getInstance(currentAccount).getStickerSets(MediaDataController.TYPE_EMOJIPACKS); + for (TLRPC.TL_messages_stickerSet set : sets1) { + if (set != null && set.documents != null && !set.documents.isEmpty()) { + for (TLRPC.Document document : set.documents) { + if (document.id == documentId) { + emoji = document; + break; + } + } + } + if (emoji != null) { + break; + } + } + } + if (emoji == null) { + ArrayList sets2 = MediaDataController.getInstance(currentAccount).getFeaturedEmojiSets(); + for (TLRPC.StickerSetCovered set : sets2) { + if (set != null && set.covers != null && !set.covers.isEmpty()) { + for (TLRPC.Document document : set.covers) { + if (document.id == documentId) { + emoji = document; + break; + } + } + } + if (emoji != null) { + break; + } + ArrayList documents = null; + if (set instanceof TLRPC.TL_stickerSetFullCovered) { + documents = ((TLRPC.TL_stickerSetFullCovered) set).documents; + } else if (set instanceof TLRPC.TL_stickerSetNoCovered && set.set != null) { + TLRPC.TL_inputStickerSetID inputStickerSetID = new TLRPC.TL_inputStickerSetID(); + inputStickerSetID.id = set.set.id; + TLRPC.TL_messages_stickerSet fullSet = MediaDataController.getInstance(currentAccount).getStickerSet(inputStickerSetID, true); + if (fullSet != null && fullSet.documents != null) { + documents = fullSet.documents; + } + } + if (documents != null && !documents.isEmpty()) { + for (TLRPC.Document document : documents) { + if (document.id == documentId) { + emoji = document; + break; + } + } + } + if (emoji != null) { + break; + } + } + } + if (emoji == null || !MessageObject.isFreeEmoji(emoji)) { + BulletinFactory.of(parentFragment) + .createEmojiBulletin( + emoji, + AndroidUtilities.replaceTags(LocaleController.getString("UnlockPremiumEmojiHint", R.string.UnlockPremiumEmojiHint)), + LocaleController.getString("PremiumMore", R.string.PremiumMore), + () -> { + if (parentFragment != null) { + new PremiumFeatureBottomSheet(parentFragment, PremiumPreviewFragment.PREMIUM_FEATURE_ANIMATED_EMOJI, false).show(); + } else if (parentFragment.getContext() instanceof LaunchActivity) { + ((LaunchActivity) parentFragment.getContext()).presentFragment(new PremiumPreviewFragment(null)); + } + } + ).show(); return true; } } @@ -5791,10 +6115,11 @@ public void doneEditingMessage(boolean withMarkdown) { return; } if (currentLimit - codePointCount < 0) { - AndroidUtilities.shakeView(captionLimitView); - try { - captionLimitView.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); - } catch (Exception ignored) { + if (captionLimitView != null) { + AndroidUtilities.shakeViewSpring(captionLimitView, 3.5f); + try { + captionLimitView.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + } catch (Exception ignored) {} } if (!MessagesController.getInstance(currentAccount).premiumLocked && MessagesController.getInstance(currentAccount).captionLengthLimitPremium > codePointCount) { @@ -5815,13 +6140,13 @@ public void doneEditingMessage(boolean withMarkdown) { }, 200); } } - CharSequence text = messageEditText.getText(); + CharSequence text = messageEditText == null ? "" : messageEditText.getText(); if (editingMessageObject == null || editingMessageObject.type != MessageObject.TYPE_EMOJIS) { text = AndroidUtilities.getTrimmedString(text); } CharSequence[] message = new CharSequence[]{text}; ArrayList entities = MediaDataController.getInstance(currentAccount).getEntities(message, supportsSendingNewEntities()); - if (!TextUtils.equals(message[0], editingMessageObject.messageText) || entities != null && !entities.isEmpty() || (entities == null || entities.isEmpty()) && !editingMessageObject.messageOwner.entities.isEmpty() || editingMessageObject.messageOwner.media instanceof TLRPC.TL_messageMediaWebPage) { + if (!TextUtils.equals(message[0], editingMessageObject.messageText) || entities != null && !entities.isEmpty() || !editingMessageObject.messageOwner.entities.isEmpty() || editingMessageObject.messageOwner.media instanceof TLRPC.TL_messageMediaWebPage) { editingMessageObject.editingMessage = withMarkdown ? message[0] : messageEditText.getText().toString(); editingMessageObject.editingMessageEntities = withMarkdown ? entities : new ArrayList<>(); editingMessageObject.editingMessageSearchWebPage = messageWebPageSearch; @@ -5892,9 +6217,14 @@ public boolean processSendingText(CharSequence text, boolean notify, int schedul if (!delegate.hasForwardingMessages()) { sendAnimationData = new MessageObject.SendAnimationData(); sendAnimationData.width = sendAnimationData.height = AndroidUtilities.dp(22); - messageEditText.getLocationInWindow(location); - sendAnimationData.x = location[0] + AndroidUtilities.dp(11); - sendAnimationData.y = location[1] + AndroidUtilities.dp(8 + 11); + if (messageEditText != null) { + messageEditText.getLocationInWindow(location); + sendAnimationData.x = location[0] + AndroidUtilities.dp(11); + sendAnimationData.y = location[1] + AndroidUtilities.dp(8 + 11); + } else { + sendAnimationData.x = AndroidUtilities.dp(48 + 11); + sendAnimationData.y = AndroidUtilities.displaySize.y - AndroidUtilities.dp(8 + 11); + } } if (!withMarkdown) { message[0] = text.toString(); @@ -5925,7 +6255,7 @@ private void checkSendButton(boolean animated) { if (isPaused) { animated = false; } - CharSequence message = AndroidUtilities.getTrimmedString(messageEditText.getText()); + CharSequence message = messageEditText == null ? "" : AndroidUtilities.getTrimmedString(messageEditText.getText()); if (slowModeTimer > 0 && slowModeTimer != Integer.MAX_VALUE && !isInScheduleMode()) { if (slowModeButton.getVisibility() != VISIBLE) { if (animated) { @@ -5949,17 +6279,20 @@ private void checkSendButton(boolean animated) { animators.add(ObjectAnimator.ofFloat(attachLayout, View.SCALE_X, 0.0f)); scheduleButtonHidden = false; boolean hasScheduled = delegate != null && delegate.hasScheduledMessages(); - if (scheduledButton != null) { + if (hasScheduled) { + createScheduledButton(); + } + if (scheduledButton != null) { scheduledButton.setScaleY(1.0f); if (hasScheduled) { scheduledButton.setVisibility(VISIBLE); scheduledButton.setTag(1); scheduledButton.setPivotX(AndroidUtilities.dp(48)); - animators.add(ObjectAnimator.ofFloat(scheduledButton, View.TRANSLATION_X, AndroidUtilities.dp(botButton != null && botButton.getVisibility() == VISIBLE ? 96 : 48))); + animators.add(ObjectAnimator.ofFloat(scheduledButton, View.TRANSLATION_X, AndroidUtilities.dp(botButton != null && botButton.getVisibility() == VISIBLE ? 96 : 48) - AndroidUtilities.dp(giftButton != null && giftButton.getVisibility() == VISIBLE ? 48 : 0))); animators.add(ObjectAnimator.ofFloat(scheduledButton, View.ALPHA, 1.0f)); animators.add(ObjectAnimator.ofFloat(scheduledButton, View.SCALE_X, 1.0f)); } else { - scheduledButton.setTranslationX(AndroidUtilities.dp(botButton != null && botButton.getVisibility() == VISIBLE ? 96 : 48)); + scheduledButton.setTranslationX(AndroidUtilities.dp(botButton != null && botButton.getVisibility() == VISIBLE ? 96 : 48) - AndroidUtilities.dp(giftButton != null && giftButton.getVisibility() == VISIBLE ? 48 : 0)); scheduledButton.setAlpha(1.0f); scheduledButton.setScaleX(1.0f); } @@ -6007,7 +6340,7 @@ public void onAnimationCancel(Animator animation) { animators.add(ObjectAnimator.ofFloat(audioVideoButtonContainer, View.SCALE_Y, 0.1f)); animators.add(ObjectAnimator.ofFloat(audioVideoButtonContainer, View.ALPHA, 0.0f)); } - if (expandStickersButton.getVisibility() == VISIBLE) { + if (expandStickersButton != null &&expandStickersButton.getVisibility() == VISIBLE) { animators.add(ObjectAnimator.ofFloat(expandStickersButton, View.SCALE_X, 0.1f)); animators.add(ObjectAnimator.ofFloat(expandStickersButton, View.SCALE_Y, 0.1f)); animators.add(ObjectAnimator.ofFloat(expandStickersButton, View.ALPHA, 0.0f)); @@ -6035,7 +6368,9 @@ public void onAnimationEnd(Animator animation) { sendButton.setVisibility(GONE); cancelBotButton.setVisibility(GONE); audioVideoButtonContainer.setVisibility(GONE); + if (expandStickersButton != null) { expandStickersButton.setVisibility(GONE); + } runningAnimation = null; runningAnimationType = 0; } @@ -6070,7 +6405,7 @@ public void onAnimationCancel(Animator animation) { cancelBotButton.setAlpha(0.0f); cancelBotButton.setVisibility(GONE); - if (expandStickersButton.getVisibility() == VISIBLE) { + if (expandStickersButton != null && expandStickersButton.getVisibility() == VISIBLE) { expandStickersButton.setScaleX(0.1f); expandStickersButton.setScaleY(0.1f); expandStickersButton.setAlpha(0.0f); @@ -6089,12 +6424,16 @@ public void onAnimationCancel(Animator animation) { } } scheduleButtonHidden = false; + final boolean hasScheduled = delegate != null && delegate.hasScheduledMessages(); + if (hasScheduled) { + createScheduledButton(); + } if (scheduledButton != null) { - if (delegate != null && delegate.hasScheduledMessages()) { + if (hasScheduled) { scheduledButton.setVisibility(VISIBLE); scheduledButton.setTag(1); } - scheduledButton.setTranslationX(AndroidUtilities.dp(botButton != null && botButton.getVisibility() == VISIBLE ? 96 : 48)); + scheduledButton.setTranslationX(AndroidUtilities.dp(botButton != null && botButton.getVisibility() == VISIBLE ? 96 : 48) - AndroidUtilities.dp(giftButton != null && giftButton.getVisibility() == VISIBLE ? 48 : 0)); scheduledButton.setAlpha(1.0f); scheduledButton.setScaleX(1.0f); scheduledButton.setScaleY(1.0f); @@ -6103,9 +6442,9 @@ public void onAnimationCancel(Animator animation) { } } } else if (message.length() > 0 || forceShowSendButton || audioToSend != null || videoToSendMessageObject != null || slowModeTimer == Integer.MAX_VALUE && !isInScheduleMode()) { - final String caption = messageEditText.getCaption(); - boolean showBotButton = caption != null && (sendButton.getVisibility() == VISIBLE || expandStickersButton.getVisibility() == VISIBLE); - boolean showSendButton = caption == null && (cancelBotButton.getVisibility() == VISIBLE || expandStickersButton.getVisibility() == VISIBLE); + final String caption = messageEditText == null ? null : messageEditText.getCaption(); + boolean showBotButton = caption != null && (sendButton.getVisibility() == VISIBLE || expandStickersButton != null && expandStickersButton.getVisibility() == VISIBLE); + boolean showSendButton = caption == null && (cancelBotButton.getVisibility() == VISIBLE || expandStickersButton != null && expandStickersButton.getVisibility() == VISIBLE); int color; if (slowModeTimer == Integer.MAX_VALUE && !isInScheduleMode()) { color = getThemedColor(Theme.key_chat_messagePanelIcons); @@ -6115,7 +6454,7 @@ public void onAnimationCancel(Animator animation) { Theme.setSelectorDrawableColor(sendButton.getBackground(), Color.argb(24, Color.red(color), Color.green(color), Color.blue(color)), true); if (audioVideoButtonContainer.getVisibility() == VISIBLE || slowModeButton.getVisibility() == VISIBLE || showBotButton || showSendButton) { if (animated) { - if (runningAnimationType == 1 && messageEditText.getCaption() == null || runningAnimationType == 3 && caption != null) { + if (runningAnimationType == 1 && caption == null || runningAnimationType == 3 && caption != null) { return; } if (runningAnimation != null) { @@ -6142,11 +6481,11 @@ public void onAnimationCancel(Animator animation) { scheduledButton.setTag(null); animators.add(ObjectAnimator.ofFloat(scheduledButton, View.ALPHA, 0.0f)); animators.add(ObjectAnimator.ofFloat(scheduledButton, View.SCALE_X, 0.0f)); - animators.add(ObjectAnimator.ofFloat(scheduledButton, View.TRANSLATION_X, AndroidUtilities.dp(botButton == null || botButton.getVisibility() == GONE ? 48 : 96))); + animators.add(ObjectAnimator.ofFloat(scheduledButton, View.TRANSLATION_X, AndroidUtilities.dp(botButton != null && botButton.getVisibility() == VISIBLE ? 96 : 48) - AndroidUtilities.dp(giftButton != null && giftButton.getVisibility() == VISIBLE ? 48 : 0))); } else { scheduledButton.setAlpha(0.0f); scheduledButton.setScaleX(0.0f); - scheduledButton.setTranslationX(AndroidUtilities.dp(botButton == null || botButton.getVisibility() == GONE ? 48 : 96)); + scheduledButton.setTranslationX(AndroidUtilities.dp(botButton != null && botButton.getVisibility() == VISIBLE ? 96 : 48) - AndroidUtilities.dp(giftButton != null && giftButton.getVisibility() == VISIBLE ? 48 : 0)); } } runningAnimation2.playTogether(animators); @@ -6156,7 +6495,7 @@ public void onAnimationCancel(Animator animation) { public void onAnimationEnd(Animator animation) { if (animation.equals(runningAnimation2)) { attachLayout.setVisibility(GONE); - if (hasScheduled) { + if (hasScheduled && scheduledButton != null) { scheduledButton.setVisibility(GONE); } runningAnimation2 = null; @@ -6183,7 +6522,7 @@ public void onAnimationCancel(Animator animation) { runningAnimation = new AnimatorSet(); ArrayList animators = new ArrayList<>(); - if (NekoConfig.useChatAttachMediaMenu.Bool() && botButton.getVisibility() == VISIBLE) { + if (botButton != null && NekoConfig.useChatAttachMediaMenu.Bool() && botButton.getVisibility() == VISIBLE) { animators.add(ObjectAnimator.ofFloat(botButton, View.SCALE_X, 0.1f)); animators.add(ObjectAnimator.ofFloat(botButton, View.SCALE_Y, 0.1f)); animators.add(ObjectAnimator.ofFloat(botButton, View.ALPHA, 0.0f)); @@ -6193,7 +6532,7 @@ public void onAnimationCancel(Animator animation) { animators.add(ObjectAnimator.ofFloat(audioVideoButtonContainer, View.SCALE_Y, 0.1f)); animators.add(ObjectAnimator.ofFloat(audioVideoButtonContainer, View.ALPHA, 0.0f)); } - if (expandStickersButton.getVisibility() == VISIBLE) { + if (expandStickersButton != null && expandStickersButton.getVisibility() == VISIBLE) { animators.add(ObjectAnimator.ofFloat(expandStickersButton, View.SCALE_X, 0.1f)); animators.add(ObjectAnimator.ofFloat(expandStickersButton, View.SCALE_Y, 0.1f)); animators.add(ObjectAnimator.ofFloat(expandStickersButton, View.ALPHA, 0.0f)); @@ -6244,7 +6583,9 @@ public void onAnimationEnd(Animator animation) { cancelBotButton.setVisibility(GONE); } audioVideoButtonContainer.setVisibility(GONE); - expandStickersButton.setVisibility(GONE); + if (expandStickersButton != null) { + expandStickersButton.setVisibility(GONE); + } setSlowModeButtonVisible(false); runningAnimation = null; runningAnimationType = 0; @@ -6289,7 +6630,7 @@ public void onAnimationCancel(Animator animation) { sendButton.setAlpha(1.0f); cancelBotButton.setVisibility(GONE); } - if (expandStickersButton.getVisibility() == VISIBLE) { + if (expandStickersButton != null && expandStickersButton.getVisibility() == VISIBLE) { expandStickersButton.setScaleX(0.1f); expandStickersButton.setScaleY(0.1f); expandStickersButton.setAlpha(0.0f); @@ -6316,7 +6657,7 @@ public void onAnimationCancel(Animator animation) { scheduledButton.setAlpha(0.0f); scheduledButton.setScaleX(0.0f); scheduledButton.setScaleY(1.0f); - scheduledButton.setTranslationX(AndroidUtilities.dp(botButton == null || botButton.getVisibility() == GONE ? 48 : 96)); + scheduledButton.setTranslationX(AndroidUtilities.dp(botButton != null && botButton.getVisibility() == VISIBLE ? 96 : 48) - AndroidUtilities.dp(giftButton != null && giftButton.getVisibility() == VISIBLE ? 48 : 0)); } } } @@ -6343,6 +6684,9 @@ public void onAnimationCancel(Animator animation) { animators.add(ObjectAnimator.ofFloat(attachLayout, View.SCALE_X, 1.0f)); boolean hasScheduled = delegate != null && delegate.hasScheduledMessages(); scheduleButtonHidden = false; + if (hasScheduled) { + createScheduledButton(); + } if (scheduledButton != null) { scheduledButton.setScaleY(1.0f); if (hasScheduled) { @@ -6383,6 +6727,7 @@ public void onAnimationCancel(Animator animation) { } } + createExpandStickersButton(); expandStickersButton.setVisibility(VISIBLE); runningAnimation = new AnimatorSet(); runningAnimationType = 4; @@ -6450,6 +6795,7 @@ public void onAnimationCancel(Animator animation) { audioVideoButtonContainer.setScaleY(0.1f); audioVideoButtonContainer.setAlpha(0.0f); audioVideoButtonContainer.setVisibility(GONE); + createExpandStickersButton(); expandStickersButton.setScaleX(1.0f); expandStickersButton.setScaleY(1.0f); expandStickersButton.setAlpha(1.0f); @@ -6463,8 +6809,12 @@ public void onAnimationCancel(Animator animation) { updateFieldRight(1); } scheduleButtonHidden = false; + final boolean hasScheduled = delegate != null && delegate.hasScheduledMessages(); + if (hasScheduled) { + createScheduledButton(); + } if (scheduledButton != null) { - if (delegate != null && delegate.hasScheduledMessages()) { + if (hasScheduled) { scheduledButton.setVisibility(VISIBLE); scheduledButton.setTag(1); } @@ -6474,7 +6824,7 @@ public void onAnimationCancel(Animator animation) { scheduledButton.setTranslationX(0); } } - } else if (sendButton.getVisibility() == VISIBLE || cancelBotButton.getVisibility() == VISIBLE || expandStickersButton.getVisibility() == VISIBLE || slowModeButton.getVisibility() == VISIBLE) { + } else if (sendButton.getVisibility() == VISIBLE || cancelBotButton.getVisibility() == VISIBLE || expandStickersButton != null && expandStickersButton.getVisibility() == VISIBLE || slowModeButton.getVisibility() == VISIBLE) { if (animated) { if (runningAnimationType == 2) { return; @@ -6501,6 +6851,9 @@ public void onAnimationCancel(Animator animation) { animators.add(ObjectAnimator.ofFloat(attachLayout, View.SCALE_X, 1.0f)); boolean hasScheduled = delegate != null && delegate.hasScheduledMessages(); scheduleButtonHidden = false; + if (hasScheduled) { + createScheduledButton(); + } if (scheduledButton != null) { if (hasScheduled) { scheduledButton.setVisibility(VISIBLE); @@ -6508,7 +6861,7 @@ public void onAnimationCancel(Animator animation) { scheduledButton.setPivotX(AndroidUtilities.dp(48)); animators.add(ObjectAnimator.ofFloat(scheduledButton, View.ALPHA, 1.0f)); animators.add(ObjectAnimator.ofFloat(scheduledButton, View.SCALE_X, 1.0f)); - animators.add(ObjectAnimator.ofFloat(scheduledButton, View.TRANSLATION_X, 0)); + animators.add(ObjectAnimator.ofFloat(scheduledButton, View.TRANSLATION_X, giftButton != null && giftButton.getVisibility() == VISIBLE ? -AndroidUtilities.dp(48) : 0)); } else { scheduledButton.setAlpha(1.0f); scheduledButton.setScaleX(1.0f); @@ -6562,7 +6915,7 @@ public void onAnimationCancel(Animator animation) { TLRPC.Chat chat = parentFragment.getCurrentChat(); TLRPC.UserFull userFull = parentFragment.getCurrentUserInfo(); if (chat != null) { - alpha = ChatObject.canSendMedia(chat) ? 1.0f : 0.5f; + alpha = (ChatObject.canSendVoice(chat) || ChatObject.canSendRoundVideo(chat)) ? 1.0f : 0.5f; } else if (userFull != null) { alpha = userFull.voice_messages_forbidden ? 0.5f : 1.0f; } @@ -6572,7 +6925,7 @@ public void onAnimationCancel(Animator animation) { animators.add(ObjectAnimator.ofFloat(cancelBotButton, View.SCALE_X, 0.1f)); animators.add(ObjectAnimator.ofFloat(cancelBotButton, View.SCALE_Y, 0.1f)); animators.add(ObjectAnimator.ofFloat(cancelBotButton, View.ALPHA, 0.0f)); - } else if (expandStickersButton.getVisibility() == VISIBLE) { + } else if (expandStickersButton != null && expandStickersButton.getVisibility() == VISIBLE) { animators.add(ObjectAnimator.ofFloat(expandStickersButton, View.SCALE_X, 0.1f)); animators.add(ObjectAnimator.ofFloat(expandStickersButton, View.SCALE_Y, 0.1f)); animators.add(ObjectAnimator.ofFloat(expandStickersButton, View.ALPHA, 0.0f)); @@ -6623,10 +6976,12 @@ public void onAnimationCancel(Animator animation) { cancelBotButton.setScaleY(0.1f); cancelBotButton.setAlpha(0.0f); cancelBotButton.setVisibility(GONE); - expandStickersButton.setScaleX(0.1f); - expandStickersButton.setScaleY(0.1f); - expandStickersButton.setAlpha(0.0f); - expandStickersButton.setVisibility(GONE); + if (expandStickersButton != null) { + expandStickersButton.setScaleX(0.1f); + expandStickersButton.setScaleY(0.1f); + expandStickersButton.setAlpha(0.0f); + expandStickersButton.setVisibility(GONE); + } audioVideoButtonContainer.setScaleX(1.0f); audioVideoButtonContainer.setScaleY(1.0f); audioVideoButtonContainer.setAlpha(1.0f); @@ -6642,6 +6997,10 @@ public void onAnimationCancel(Animator animation) { updateFieldRight(1); } scheduleButtonHidden = false; + final boolean hasScheduled = delegate != null && delegate.hasScheduledMessages(); + if (hasScheduled) { + createScheduledButton(); + } if (scheduledButton != null) { if (delegate != null && delegate.hasScheduledMessages()) { scheduledButton.setVisibility(VISIBLE); @@ -6659,7 +7018,7 @@ public void onAnimationCancel(Animator animation) { private void setSlowModeButtonVisible(boolean visible) { slowModeButton.setVisibility(visible ? VISIBLE : GONE); int padding = visible ? AndroidUtilities.dp(16) : 0; - if (messageEditText.getPaddingRight() != padding) { + if (messageEditText != null && messageEditText.getPaddingRight() != padding) { messageEditText.setPadding(0, AndroidUtilities.dp(11), padding, AndroidUtilities.dp(12)); } } @@ -6718,12 +7077,13 @@ private void updateRecordInterface(int recordState) { AndroidUtilities.cancelRunOnUIThread(moveToSendStateRunnable); moveToSendStateRunnable = null; } - recordCircle.voiceEnterTransitionInProgress = false; - + if (recordCircle != null) {recordCircle.voiceEnterTransitionInProgress = false; +} if (recordingAudioVideo) { if (recordInterfaceState == 1) { return; } + createRecordAudioPanel(); recordInterfaceState = 1; if (emojiView != null) { emojiView.setEnabled(false); @@ -6751,16 +7111,26 @@ private void updateRecordInterface(int recordState) { recordPannelAnimation.cancel(); } - recordPanel.setVisibility(VISIBLE); - recordCircle.setVisibility(VISIBLE); - recordCircle.setAmplitude(0); - recordDot.resetAlpha(); + createRecordPanel(); + if (recordPanel != null) { + recordPanel.setVisibility(VISIBLE); + } + createRecordCircle(); + if (recordCircle != null) { + recordCircle.voiceEnterTransitionInProgress = false; + recordCircle.setVisibility(VISIBLE); + recordCircle.setAmplitude(0); + } + if (recordDot != null) { + recordDot.resetAlpha(); + + recordDot.setScaleX(0); + recordDot.setScaleY(0); + recordDot.enterAnimation = true; + } runningAnimationAudio = new AnimatorSet(); - recordDot.setScaleX(0); - recordDot.setScaleY(0); - recordDot.enterAnimation = true; recordTimerView.setTranslationX(AndroidUtilities.dp(20)); recordTimerView.setAlpha(0); slideText.setTranslationX(AndroidUtilities.dp(20)); @@ -6831,7 +7201,9 @@ public void onAnimationEnd(Animator animator) { slideText.setTranslationX(0); recordCircle.showTooltipIfNeed(); - messageEditText.setAlpha(0f); + if (messageEditText != null) { + messageEditText.setAlpha(0f); + } } }); runningAnimationAudio.setInterpolator(new DecelerateInterpolator()); @@ -6874,7 +7246,9 @@ public void onAnimationEnd(Animator animator) { if (recordPannelAnimation != null) { recordPannelAnimation.cancel(); } - messageEditText.setVisibility(View.VISIBLE); + if (messageEditText != null) { + messageEditText.setVisibility(View.VISIBLE); + } runningAnimationAudio = new AnimatorSet(); //EXIT TRANSITION @@ -6886,7 +7260,7 @@ public void onAnimationEnd(Animator animator) { runningAnimationAudio.playTogether( ObjectAnimator.ofFloat(emojiButton, View.SCALE_Y, 1), ObjectAnimator.ofFloat(emojiButton, View.SCALE_X, 1), - ObjectAnimator.ofFloat(emojiButton, View.ALPHA, 1), + ObjectAnimator.ofFloat(emojiButton, View.ALPHA, emojiButtonRestricted ? 0.5f : 1.0f), ObjectAnimator.ofFloat(recordDot, View.SCALE_Y, 0), ObjectAnimator.ofFloat(recordDot, View.SCALE_X, 0), ObjectAnimator.ofFloat(recordCircle, recordCircleScale, 0.0f), @@ -6928,36 +7302,64 @@ public void onAnimationEnd(Animator animator) { recordIsCanceled = true; runningAnimationAudio.setDuration(150); } else if (recordState == RECORD_STATE_PREPARING) { - slideText.setEnabled(false); + if (slideText != null) { + slideText.setEnabled(false); + } + createRecordAudioPanel(); if (isInVideoMode()) { - recordedAudioBackground.setVisibility(GONE); - recordedAudioTimeTextView.setVisibility(GONE); - recordedAudioPlayButton.setVisibility(GONE); - recordedAudioSeekBar.setVisibility(GONE); - recordedAudioPanel.setAlpha(1.0f); - recordedAudioPanel.setVisibility(VISIBLE); - recordDeleteImageView.setProgress(0); - recordDeleteImageView.stopAnimation(); + if (recordedAudioBackground != null) { + recordedAudioBackground.setVisibility(GONE); + } + if (recordedAudioTimeTextView != null) { + recordedAudioTimeTextView.setVisibility(GONE); + } + if (recordedAudioPlayButton != null) { + recordedAudioPlayButton.setVisibility(GONE); + } + if (recordedAudioSeekBar != null) { + recordedAudioSeekBar.setVisibility(GONE); + } + if (recordedAudioPanel != null) { + recordedAudioPanel.setAlpha(1.0f); + recordedAudioPanel.setVisibility(VISIBLE); + } + if (recordDeleteImageView != null) { + recordDeleteImageView.setProgress(0); + recordDeleteImageView.stopAnimation(); + } } else { - videoTimelineView.setVisibility(GONE); - recordedAudioBackground.setVisibility(VISIBLE); - recordedAudioTimeTextView.setVisibility(VISIBLE); - recordedAudioPlayButton.setVisibility(VISIBLE); - recordedAudioSeekBar.setVisibility(VISIBLE); - - recordedAudioPanel.setAlpha(1.0f); - recordedAudioBackground.setAlpha(0f); - recordedAudioTimeTextView.setAlpha(0f); - recordedAudioPlayButton.setAlpha(0f); - recordedAudioSeekBar.setAlpha(0f); - recordedAudioPanel.setVisibility(VISIBLE); + if (videoTimelineView != null) { + videoTimelineView.setVisibility(GONE); + } + if (recordedAudioTimeTextView != null) { + recordedAudioTimeTextView.setVisibility(VISIBLE); + recordedAudioTimeTextView.setAlpha(0f); + } + if (recordedAudioPanel != null) { + recordedAudioPanel.setVisibility(VISIBLE); + recordedAudioPanel.setAlpha(1.0f); + } + if (recordedAudioBackground != null) { + recordedAudioBackground.setVisibility(VISIBLE); + recordedAudioBackground.setAlpha(0f); + } + if (recordedAudioPlayButton != null) { + recordedAudioPlayButton.setVisibility(VISIBLE); + recordedAudioPlayButton.setAlpha(0f); + } + if (recordedAudioSeekBar != null) { + recordedAudioSeekBar.setVisibility(VISIBLE); + recordedAudioSeekBar.setAlpha(0f); + } } - recordDeleteImageView.setAlpha(0f); - recordDeleteImageView.setScaleX(0f); - recordDeleteImageView.setScaleY(0f); - recordDeleteImageView.setProgress(0); - recordDeleteImageView.stopAnimation(); + if (recordDeleteImageView != null) { + recordDeleteImageView.setAlpha(0f); + recordDeleteImageView.setScaleX(0f); + recordDeleteImageView.setScaleY(0f); + recordDeleteImageView.setProgress(0); + recordDeleteImageView.stopAnimation(); + } ValueAnimator transformToSeekbar = ValueAnimator.ofFloat(0, 1f); transformToSeekbar.addUpdateListener(animation -> { @@ -6965,12 +7367,12 @@ public void onAnimationEnd(Animator animator) { if (!isInVideoMode()) { recordCircle.setTransformToSeekbar(value); seekBarWaveform.setWaveScaling(recordCircle.getTransformToSeekbarProgressStep3()); - recordedAudioSeekBar.invalidate(); recordedAudioTimeTextView.setAlpha(recordCircle.getTransformToSeekbarProgressStep3()); recordedAudioPlayButton.setAlpha(recordCircle.getTransformToSeekbarProgressStep3()); recordedAudioPlayButton.setScaleX(recordCircle.getTransformToSeekbarProgressStep3()); recordedAudioPlayButton.setScaleY(recordCircle.getTransformToSeekbarProgressStep3()); recordedAudioSeekBar.setAlpha(recordCircle.getTransformToSeekbarProgressStep3()); + recordedAudioSeekBar.invalidate(); } else { recordCircle.setExitTransition(value); } @@ -6991,9 +7393,11 @@ public void onAnimationEnd(Animator animator) { videoTimelineView.setVisibility(VISIBLE); } - recordDeleteImageView.setAlpha(0f); - recordDeleteImageView.setScaleX(0f); - recordDeleteImageView.setScaleY(0f); + if (recordDeleteImageView != null) { + recordDeleteImageView.setAlpha(0f); + recordDeleteImageView.setScaleX(0f); + recordDeleteImageView.setScaleY(0f); + } AnimatorSet iconsAnimator = new AnimatorSet(); @@ -7099,7 +7503,7 @@ public void onAnimationEnd(Animator animation) { iconsAnimator.playTogether( ObjectAnimator.ofFloat(emojiButton, View.SCALE_Y, 1), ObjectAnimator.ofFloat(emojiButton, View.SCALE_X, 1), - ObjectAnimator.ofFloat(emojiButton, View.ALPHA, 1), + ObjectAnimator.ofFloat(emojiButton, View.ALPHA, emojiButtonRestricted ? 0.5f : 1.0f), ObjectAnimator.ofFloat(recordDot, View.SCALE_Y, 0), ObjectAnimator.ofFloat(recordDot, View.SCALE_X, 0) ); @@ -7209,8 +7613,9 @@ public void onAnimationEnd(Animator animation) { recordTimer.setDuration(200); recordTimer.setStartDelay(200); - - messageEditText.setTranslationX(0f); + if (messageEditText != null) { + messageEditText.setTranslationX(0f); + } ObjectAnimator messageEditTextAniamtor = ObjectAnimator.ofFloat(messageEditText, View.ALPHA, 1); messageEditTextAniamtor.setStartDelay(300); messageEditTextAniamtor.setDuration(200); @@ -7235,7 +7640,9 @@ public void onAnimationEnd(Animator animation) { recordCircleAnimator ); } - recordDot.playDeleteAnimation(); + if (recordDot != null) { + recordDot.playDeleteAnimation(); + } } else { if (audioVideoSendButton != null) { @@ -7246,7 +7653,7 @@ public void onAnimationEnd(Animator animation) { iconsAnimator.playTogether( ObjectAnimator.ofFloat(emojiButton, View.SCALE_Y, 1), ObjectAnimator.ofFloat(emojiButton, View.SCALE_X, 1), - ObjectAnimator.ofFloat(emojiButton, View.ALPHA, 1), + ObjectAnimator.ofFloat(emojiButton, View.ALPHA, emojiButtonRestricted ? 0.5f : 1.0f), ObjectAnimator.ofFloat(recordDot, View.SCALE_Y, 0), ObjectAnimator.ofFloat(recordDot, View.SCALE_X, 0), ObjectAnimator.ofFloat(audioVideoButtonContainer, View.ALPHA, 1.0f) @@ -7293,7 +7700,9 @@ public void onAnimationEnd(Animator animation) { Animator recordCircleAnimator = ObjectAnimator.ofFloat(recordCircle, "exitTransition", 1.0f); recordCircleAnimator.setDuration(messageTransitionIsRunning ? 220 : 360); - messageEditText.setTranslationX(0f); + if (messageEditText != null) { + messageEditText.setTranslationX(0f); + } ObjectAnimator messageEditTextAniamtor = ObjectAnimator.ofFloat(messageEditText, View.ALPHA, 1); messageEditTextAniamtor.setStartDelay(150); messageEditTextAniamtor.setDuration(200); @@ -7309,18 +7718,26 @@ public void onAnimationEnd(Animator animation) { @Override public void onAnimationEnd(Animator animator) { if (animator.equals(runningAnimationAudio)) { - recordPanel.setVisibility(GONE); - recordCircle.setVisibility(GONE); - recordCircle.setSendButtonInvisible(); + if (recordPanel != null) { + recordPanel.setVisibility(GONE); + } + if (recordCircle != null) { + recordCircle.setVisibility(GONE); + recordCircle.setSendButtonInvisible(); + } runningAnimationAudio = null; - if (recordState != RECORD_STATE_PREPARING) { + if (recordState != RECORD_STATE_PREPARING && messageEditText != null) { messageEditText.requestFocus(); } - recordedAudioBackground.setAlpha(1f); + if (recordedAudioBackground != null) { + recordedAudioBackground.setAlpha(1f); + } if (attachLayout != null) { attachLayout.setTranslationX(0); } - slideText.setCancelToProgress(0f); + if (slideText != null) { + slideText.setCancelToProgress(0f); + } delegate.onAudioVideoInterfaceUpdated(); updateSendAsButton(); @@ -7328,12 +7745,37 @@ public void onAnimationEnd(Animator animator) { } }); runningAnimationAudio.start(); - recordTimerView.stop(); + if (recordTimerView != null) { + recordTimerView.stop(); + } } delegate.onAudioVideoInterfaceUpdated(); updateSendAsButton(); } + private void createRecordPanel() { + if (recordPanel != null || getContext() == null) { + return; + } + + recordPanel = new FrameLayout(getContext()); + recordPanel.setClipChildren(false); + recordPanel.setVisibility(GONE); + messageEditTextContainer.addView(recordPanel, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 48)); + recordPanel.setOnTouchListener((v, event) -> true); + recordPanel.addView(slideText = new SlideTextView(getContext()), LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.NO_GRAVITY, 45, 0, 0, 0)); + + LinearLayout recordTimeContainer = new LinearLayout(getContext()); + recordTimeContainer.setOrientation(LinearLayout.HORIZONTAL); + recordTimeContainer.setPadding(AndroidUtilities.dp(13), 0, 0, 0); + recordTimeContainer.setFocusable(false); + + recordTimeContainer.addView(recordDot = new RecordDot(getContext()), LayoutHelper.createLinear(28, 28, Gravity.CENTER_VERTICAL, 0, 0, 0, 0)); + recordTimeContainer.addView(recordTimerView = new TimerView(getContext()), LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.CENTER_VERTICAL, 6, 0, 0, 0)); + + recordPanel.addView(recordTimeContainer, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.CENTER_VERTICAL)); + } + @Override public boolean onInterceptTouchEvent(MotionEvent ev) { if (recordingAudioVideo) { @@ -7347,7 +7789,7 @@ public void setDelegate(ChatActivityEnterViewDelegate chatActivityEnterViewDeleg } public void setCommand(MessageObject messageObject, String command, boolean longPress, boolean username) { - if (command == null || getVisibility() != VISIBLE) { + if (command == null || getVisibility() != VISIBLE || messageEditText == null) { return; } if (longPress) { @@ -7388,6 +7830,7 @@ public void setEditingMessageObject(MessageObject messageObject, boolean caption if (audioToSend != null || videoToSendMessageObject != null || editingMessageObject == messageObject) { return; } + createMessageEditText(); boolean hadEditingMessage = editingMessageObject != null; editingMessageObject = messageObject; editingCaption = caption; @@ -7397,6 +7840,7 @@ public void setEditingMessageObject(MessageObject messageObject, boolean caption doneButtonAnimation.cancel(); doneButtonAnimation = null; } + createDoneButton(); doneButtonContainer.setVisibility(View.VISIBLE); doneButtonImage.setScaleX(0.1f); doneButtonImage.setScaleY(0.1f); @@ -7412,6 +7856,17 @@ public void setEditingMessageObject(MessageObject messageObject, boolean caption editingText = editingMessageObject.messageText; } if (editingText != null) { + final Paint.FontMetricsInt fontMetricsInt; + Paint paint = null; + if (messageEditText != null) { + paint = messageEditText.getPaint(); + } + if (paint == null) { + paint = new TextPaint(); + paint.setTextSize(AndroidUtilities.dp(18)); + } + fontMetricsInt = paint.getFontMetricsInt(); + ArrayList entities = editingMessageObject.messageOwner.entities; MediaDataController.sortEntities(entities); SpannableStringBuilder stringBuilder = new SpannableStringBuilder(editingText); @@ -7472,9 +7927,9 @@ public void setEditingMessageObject(MessageObject messageObject, boolean caption TLRPC.TL_messageEntityCustomEmoji emojiEntity = (TLRPC.TL_messageEntityCustomEmoji) entity; AnimatedEmojiSpan span; if (emojiEntity.document != null) { - span = new AnimatedEmojiSpan(emojiEntity.document, messageEditText.getPaint().getFontMetricsInt()); + span = new AnimatedEmojiSpan(emojiEntity.document, fontMetricsInt); } else { - span = new AnimatedEmojiSpan(emojiEntity.document_id, messageEditText.getPaint().getFontMetricsInt()); + span = new AnimatedEmojiSpan(emojiEntity.document_id, fontMetricsInt); } stringBuilder.setSpan(span, entity.offset, entity.offset + entity.length, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); } @@ -7483,12 +7938,12 @@ public void setEditingMessageObject(MessageObject messageObject, boolean caption FileLog.e(e); } } - textToSetWithKeyboard = Emoji.replaceEmoji(new SpannableStringBuilder(stringBuilder), messageEditText.getPaint().getFontMetricsInt(), AndroidUtilities.dp(20), false, null); + textToSetWithKeyboard = Emoji.replaceEmoji(new SpannableStringBuilder(stringBuilder), fontMetricsInt, AndroidUtilities.dp(20), false, null); } else { textToSetWithKeyboard = ""; } if (draftMessage == null && !hadEditingMessage) { - draftMessage = messageEditText.length() > 0 ? messageEditText.getText() : null; + draftMessage = messageEditText != null && messageEditText.length() > 0 ? messageEditText.getText() : null; draftSearchWebpage = messageWebPageSearch; } messageWebPageSearch = editingMessageObject.messageOwner.media instanceof TLRPC.TL_messageMediaWebPage; @@ -7504,11 +7959,15 @@ public void setEditingMessageObject(MessageObject messageObject, boolean caption } setFieldText(textToSetWithKeyboard); } - messageEditText.requestFocus(); + if (messageEditText != null) { + messageEditText.requestFocus(); + } openKeyboard(); - FrameLayout.LayoutParams layoutParams = (FrameLayout.LayoutParams) messageEditText.getLayoutParams(); - layoutParams.rightMargin = AndroidUtilities.dp(4); - messageEditText.setLayoutParams(layoutParams); + if (messageEditText != null) { + FrameLayout.LayoutParams layoutParams = (FrameLayout.LayoutParams) messageEditText.getLayoutParams(); + layoutParams.rightMargin = AndroidUtilities.dp(4); + messageEditText.setLayoutParams(layoutParams); + } sendButton.setVisibility(GONE); setSlowModeButtonVisible(false); cancelBotButton.setVisibility(GONE); @@ -7523,8 +7982,9 @@ public void setEditingMessageObject(MessageObject messageObject, boolean caption AndroidUtilities.cancelRunOnUIThread(setTextFieldRunnable); setTextFieldRunnable = null; } - doneButtonContainer.setVisibility(View.GONE); - doneButtonContainer.setVisibility(View.GONE); + if (doneButtonContainer != null) { + doneButtonContainer.setVisibility(View.GONE); + } currentLimit = -1; delegate.onMessageEditEnd(false); sendButtonContainer.setVisibility(VISIBLE); @@ -7576,16 +8036,20 @@ public void setEditingMessageObject(MessageObject messageObject, boolean caption audioVideoButtonContainer.setAlpha(1.0f); audioVideoButtonContainer.setVisibility(VISIBLE); } - if (scheduledButton.getTag() != null) { + createScheduledButton(); + if (scheduledButton != null && scheduledButton.getTag() != null) { scheduledButton.setScaleX(1.0f); scheduledButton.setScaleY(1.0f); scheduledButton.setAlpha(1.0f); scheduledButton.setVisibility(VISIBLE); } - messageEditText.setText(draftMessage); + createMessageEditText(); + if (messageEditText != null) { + messageEditText.setText(draftMessage); + messageEditText.setSelection(messageEditText.length()); + } draftMessage = null; messageWebPageSearch = draftSearchWebpage; - messageEditText.setSelection(messageEditText.length()); if (getVisibility() == VISIBLE) { delegate.onAttachButtonShow(); } @@ -7637,11 +8101,21 @@ public void updateColors() { } updateRecordedDeleteIconColors(); - recordCircle.updateColors(); - recordDot.updateColors(); - slideText.updateColors(); - recordTimerView.updateColors(); - videoTimelineView.updateColors(); + if (recordCircle != null) { + recordCircle.updateColors(); + } + if (recordDot != null) { + recordDot.updateColors(); + } + if (slideText != null) { + slideText.updateColors(); + } + if (recordTimerView != null) { + recordTimerView.updateColors(); + } + if (videoTimelineView != null) { + videoTimelineView.updateColors(); + } if (captionLimitView != null && messageEditText != null) { if (codePointCount - currentLimit < 0) { @@ -7652,7 +8126,9 @@ public void updateColors() { } int color = getThemedColor(Theme.key_chat_messagePanelVoicePressed); int defaultAlpha = Color.alpha(color); - doneCheckDrawable.setColorFilter(new PorterDuffColorFilter(ColorUtils.setAlphaComponent(color, (int) (defaultAlpha * (0.58f + 0.42f * doneButtonEnabledProgress))), PorterDuff.Mode.MULTIPLY)); + if (doneCheckDrawable != null) { + doneCheckDrawable.setColorFilter(new PorterDuffColorFilter(ColorUtils.setAlphaComponent(color, (int) (defaultAlpha * (0.58f + 0.42f * doneButtonEnabledProgress))), PorterDuff.Mode.SRC_IN)); + } if (botCommandsMenuContainer != null) { botCommandsMenuContainer.updateColors(); } @@ -7672,14 +8148,16 @@ private void updateRecordedDeleteIconColors() { int background = getThemedColor(Theme.key_chat_messagePanelBackground); int greyColor = getThemedColor(Theme.key_chat_messagePanelVoiceDelete); - recordDeleteImageView.setLayerColor("Cup Red.**", dotColor); - recordDeleteImageView.setLayerColor("Box Red.**", dotColor); - recordDeleteImageView.setLayerColor("Cup Grey.**", greyColor); - recordDeleteImageView.setLayerColor("Box Grey.**", greyColor); + if (recordDeleteImageView != null) { + recordDeleteImageView.setLayerColor("Cup Red.**", dotColor); + recordDeleteImageView.setLayerColor("Box Red.**", dotColor); + recordDeleteImageView.setLayerColor("Cup Grey.**", greyColor); + recordDeleteImageView.setLayerColor("Box Grey.**", greyColor); - recordDeleteImageView.setLayerColor("Line 1.**", background); - recordDeleteImageView.setLayerColor("Line 2.**", background); - recordDeleteImageView.setLayerColor("Line 3.**", background); + recordDeleteImageView.setLayerColor("Line 1.**", background); + recordDeleteImageView.setLayerColor("Line 2.**", background); + recordDeleteImageView.setLayerColor("Line 3.**", background); + } } public void setFieldText(CharSequence text) { @@ -7726,6 +8204,9 @@ public int getSelectionLength() { } public void replaceWithText(int start, int len, CharSequence text, boolean parseEmoji) { + if (messageEditText == null) { + return; + } try { SpannableStringBuilder builder = new SpannableStringBuilder(messageEditText.getText()); builder.replace(start, start + len, text); @@ -7756,14 +8237,14 @@ public void setFieldFocused(boolean focus) { return; } if (focus) { - if (searchingType == 0 && !messageEditText.isFocused()) { + if (searchingType == 0 && !messageEditText.isFocused() && (botWebViewMenuContainer == null || botWebViewMenuContainer.getVisibility() == View.GONE)) { AndroidUtilities.runOnUIThread(focusRunnable = () -> { focusRunnable = null; boolean allowFocus; if (AndroidUtilities.isTablet()) { if (parentActivity instanceof LaunchActivity) { LaunchActivity launchActivity = (LaunchActivity) parentActivity; - View layout = launchActivity.getLayersActionBarLayout().getView(); + View layout = launchActivity != null && launchActivity.getLayersActionBarLayout() != null ? launchActivity.getLayersActionBarLayout().getView() : null; allowFocus = layout == null || layout.getVisibility() != View.VISIBLE; } else { allowFocus = true; @@ -7791,27 +8272,58 @@ public boolean hasText() { return messageEditText != null && messageEditText.length() > 0; } + @Nullable public EditTextCaption getEditField() { return messageEditText; } + public Editable getEditText() { + if (messageEditText == null) { + return null; + } + return messageEditText.getText(); + } + public CharSequence getDraftMessage() { if (editingMessageObject != null) { return TextUtils.isEmpty(draftMessage) ? null : draftMessage; } - if (hasText()) { + if (messageEditText != null && hasText()) { return messageEditText.getText(); } return null; } public CharSequence getFieldText() { - if (hasText()) { + if (messageEditText != null && hasText()) { return messageEditText.getText(); } return null; } + public void updateGiftButton(boolean animated) { + boolean visible = !MessagesController.getInstance(currentAccount).premiumLocked && MessagesController.getInstance(currentAccount).giftAttachMenuIcon && + MessagesController.getInstance(currentAccount).giftTextFieldIcon && getParentFragment() != null && getParentFragment().getCurrentUser() != null && + !BuildVars.IS_BILLING_UNAVAILABLE && !getParentFragment().getCurrentUser().self && !getParentFragment().getCurrentUser().premium && + getParentFragment().getCurrentUserInfo() != null && !getParentFragment().getCurrentUserInfo().premium_gifts.isEmpty() && !isInScheduleMode() && + MessagesController.getInstance(currentAccount).getMainSettings().getBoolean("show_gift_for_" + parentFragment.getDialogId(), true); + + if (!visible && giftButton == null) { + return; + } + createGiftButton(); + + AndroidUtilities.updateViewVisibilityAnimated(giftButton, visible, 1f, animated); + if (scheduledButton != null && scheduledButton.getVisibility() == View.VISIBLE) { + float tX = (visible ? -AndroidUtilities.dp(48) : 0) + AndroidUtilities.dp(botButton != null && botButton.getVisibility() == VISIBLE ? 48 : 0); + if (animated) { + scheduledButton.animate().translationX(tX).setDuration(150).start(); + } else { + scheduledButton.setTranslationX(tX); + } + } + } + public void updateScheduleButton(boolean animated) { boolean notifyVisible = false; /*if (DialogObject.isChatDialog(dialog_id)) { @@ -7832,6 +8344,7 @@ public void updateScheduleButton(boolean animated) { }*/ boolean hasScheduled = delegate != null && !isInScheduleMode() && delegate.hasScheduledMessages(); boolean visible = hasScheduled && !scheduleButtonHidden && !recordingAudioVideo; + createScheduledButton(); if (scheduledButton != null) { if (scheduledButton.getTag() != null && visible || scheduledButton.getTag() == null && !visible) { if (notifyButton != null) { @@ -7860,6 +8373,9 @@ public void updateScheduleButton(boolean animated) { if (notifyButton != null) { notifyButton.setVisibility(notifyVisible && scheduledButton.getVisibility() != VISIBLE ? VISIBLE : GONE); } + if (giftButton != null && giftButton.getVisibility() == VISIBLE) { + scheduledButton.setTranslationX(-AndroidUtilities.dp(48)); + } } } else if (scheduledButton != null) { if (visible) { @@ -7895,6 +8411,7 @@ public void updateSendAsButton(boolean animated) { if (parentFragment == null || delegate == null) { return; } + createMessageEditText(); if (NekoConfig.hideSendAsChannel.Bool()) return; TLRPC.ChatFull full = parentFragment.getMessagesController().getChatFull(-dialog_id); @@ -7902,33 +8419,41 @@ public void updateSendAsButton(boolean animated) { if (defPeer == null && delegate.getSendAsPeers() != null && !delegate.getSendAsPeers().peers.isEmpty()) { defPeer = delegate.getSendAsPeers().peers.get(0).peer; } + boolean isVisible = defPeer != null && (delegate.getSendAsPeers() == null || delegate.getSendAsPeers().peers.size() > 1) && + !isEditingMessage() && !isRecordingAudioVideo() && (recordedAudioPanel == null || recordedAudioPanel.getVisibility() != View.VISIBLE); + if (isVisible) { + createSenderSelectView(); + } if (defPeer != null) { if (defPeer.channel_id != 0) { TLRPC.Chat ch = MessagesController.getInstance(currentAccount).getChat(defPeer.channel_id); - if (ch != null) { + if (ch != null && senderSelectView != null) { senderSelectView.setAvatar(ch); senderSelectView.setContentDescription(LocaleController.formatString(R.string.AccDescrSendAs, ch.title)); } } else { TLRPC.User user = MessagesController.getInstance(currentAccount).getUser(defPeer.user_id); - if (user != null) { + if (user != null && senderSelectView != null) { senderSelectView.setAvatar(user); senderSelectView.setContentDescription(LocaleController.formatString(R.string.AccDescrSendAs, ContactsController.formatName(user.first_name, user.last_name))); } } } - boolean wasVisible = senderSelectView.getVisibility() == View.VISIBLE; - boolean isVisible = defPeer != null && (delegate.getSendAsPeers() == null || delegate.getSendAsPeers().peers.size() > 1) && - !isEditingMessage() && !isRecordingAudioVideo() && recordedAudioPanel.getVisibility() != View.VISIBLE; + boolean wasVisible = senderSelectView != null && senderSelectView.getVisibility() == View.VISIBLE; int pad = AndroidUtilities.dp(2); - MarginLayoutParams params = (MarginLayoutParams) senderSelectView.getLayoutParams(); float startAlpha = isVisible ? 0 : 1; - float startX = isVisible ? -senderSelectView.getLayoutParams().width - params.leftMargin - pad : 0; float endAlpha = isVisible ? 1 : 0; - float endX = isVisible ? 0 : -senderSelectView.getLayoutParams().width - params.leftMargin - pad; + final float startX, endX; + if (senderSelectView != null) { + MarginLayoutParams params = (MarginLayoutParams) senderSelectView.getLayoutParams(); + startX = isVisible ? -senderSelectView.getLayoutParams().width - params.leftMargin - pad : 0; + endX = isVisible ? 0 : -senderSelectView.getLayoutParams().width - params.leftMargin - pad; + } else { + startX = endX = 0; + } if (wasVisible != isVisible) { - ValueAnimator animator = (ValueAnimator) senderSelectView.getTag(); + ValueAnimator animator = senderSelectView == null ? null : (ValueAnimator) senderSelectView.getTag(); if (animator != null) { animator.cancel(); senderSelectView.setTag(null); @@ -7936,28 +8461,37 @@ public void updateSendAsButton(boolean animated) { if (parentFragment.getOtherSameChatsDiff() == 0 && parentFragment.fragmentOpened && animated) { ValueAnimator anim = ValueAnimator.ofFloat(0, 1).setDuration(150); - senderSelectView.setTranslationX(startX); - messageEditText.setTranslationX(senderSelectView.getTranslationX()); + if (senderSelectView != null) { + senderSelectView.setTranslationX(startX); + } + messageEditText.setTranslationX(startX); anim.addUpdateListener(animation -> { - float val = (float) animation.getAnimatedValue(); - - senderSelectView.setAlpha(startAlpha + (endAlpha - startAlpha) * val); - senderSelectView.setTranslationX(startX + (endX - startX) * val); - emojiButton.setTranslationX(senderSelectView.getTranslationX()); - messageEditText.setTranslationX(senderSelectView.getTranslationX()); + final float val = (float) animation.getAnimatedValue(); + final float tx = startX + (endX - startX) * val; + if (senderSelectView != null) { + senderSelectView.setAlpha(startAlpha + (endAlpha - startAlpha) * val); + senderSelectView.setTranslationX(tx); + } + emojiButton.setTranslationX(tx); + messageEditText.setTranslationX(tx); }); anim.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationStart(Animator animation) { if (isVisible) { + createSenderSelectView(); senderSelectView.setVisibility(VISIBLE); } - senderSelectView.setAlpha(startAlpha); - senderSelectView.setTranslationX(startX); - emojiButton.setTranslationX(senderSelectView.getTranslationX()); - messageEditText.setTranslationX(senderSelectView.getTranslationX()); + float tx = 0; + if (senderSelectView != null) { + senderSelectView.setAlpha(startAlpha); + senderSelectView.setTranslationX(startX); + tx = senderSelectView.getTranslationX(); + } + emojiButton.setTranslationX(tx); + messageEditText.setTranslationX(tx); - if (botCommandsMenuButton.getTag() == null) { + if (botCommandsMenuButton != null && botCommandsMenuButton.getTag() == null) { animationParamsX.clear(); } } @@ -7965,7 +8499,9 @@ public void onAnimationStart(Animator animation) { @Override public void onAnimationEnd(Animator animation) { if (!isVisible) { - senderSelectView.setVisibility(GONE); + if (senderSelectView != null) { + senderSelectView.setVisibility(GONE); + } emojiButton.setTranslationX(0); messageEditText.setTranslationX(0); } @@ -7973,29 +8509,40 @@ public void onAnimationEnd(Animator animation) { @Override public void onAnimationCancel(Animator animation) { + float tx = 0; if (isVisible) { - senderSelectView.setVisibility(VISIBLE); - } else { - senderSelectView.setVisibility(GONE); + createSenderSelectView(); } - senderSelectView.setAlpha(endAlpha); - senderSelectView.setTranslationX(endX); - emojiButton.setTranslationX(senderSelectView.getTranslationX()); - messageEditText.setTranslationX(senderSelectView.getTranslationX()); - + if (senderSelectView != null) { + senderSelectView.setVisibility(isVisible ? VISIBLE : GONE); + senderSelectView.setAlpha(endAlpha); + senderSelectView.setTranslationX(endX); + tx = senderSelectView.getTranslationX(); + } + emojiButton.setTranslationX(tx); + messageEditText.setTranslationX(tx); requestLayout(); } }); anim.start(); - senderSelectView.setTag(anim); + if (senderSelectView != null) { + senderSelectView.setTag(anim); + } } else { - senderSelectView.setVisibility(isVisible ? VISIBLE : GONE); - senderSelectView.setTranslationX(endX); + if (isVisible) { + createSenderSelectView(); + } + if (senderSelectView != null) { + senderSelectView.setVisibility(isVisible ? VISIBLE : GONE); + senderSelectView.setTranslationX(endX); + } float translationX = isVisible ? endX : 0; emojiButton.setTranslationX(translationX); messageEditText.setTranslationX(translationX); - senderSelectView.setAlpha(endAlpha); - senderSelectView.setTag(null); + if (senderSelectView != null) { + senderSelectView.setAlpha(endAlpha); + senderSelectView.setTag(null); + } } } } @@ -8009,7 +8556,7 @@ public boolean hasBotWebView() { } private void updateBotButton(boolean animated) { - if (botButton == null) { + if (!isChat) { return; } if (!parentFragment.openAnimationEnded) { @@ -8017,14 +8564,15 @@ private void updateBotButton(boolean animated) { } boolean hasBotWebView = hasBotWebView(); boolean canShowBotsMenu = botMenuButtonType != BotMenuButtonType.NO_BUTTON && dialog_id > 0; - boolean wasVisible = botButton.getVisibility() == VISIBLE; + boolean wasVisible = botButton != null && botButton.getVisibility() == VISIBLE; if (hasBotWebView || hasBotCommands || botReplyMarkup != null) { if (botReplyMarkup != null) { - if (isPopupShowing() && currentPopupContentType == POPUP_CONTENT_BOT_KEYBOARD) { - if (botButton.getVisibility() != GONE) { + if (isPopupShowing() && currentPopupContentType == POPUP_CONTENT_BOT_KEYBOARD && botReplyMarkup.is_persistent) { + if (botButton != null && botButton.getVisibility() != GONE) { botButton.setVisibility(GONE); } } else { + createBotButton(); if (botButton.getVisibility() != VISIBLE) { botButton.setVisibility(VISIBLE); } @@ -8034,26 +8582,33 @@ private void updateBotButton(boolean animated) { } } else { if (!canShowBotsMenu && !NaConfig.INSTANCE.getHideBotButtonInInputField().Bool()) { - botButtonDrawable.setIcon(R.drawable.input_bot1, true); + createBotButton(); + botButtonDrawable.setIcon(R.drawable.deproko_baseline_bots_command_26, true); botButton.setContentDescription(LocaleController.getString("AccDescrBotCommands", R.string.AccDescrBotCommands)); botButton.setVisibility(VISIBLE); - } else { + } else if (botButton != null) { botButton.setVisibility(GONE); } } - } else { + } else if (botButton != null) { botButton.setVisibility(GONE); } - boolean wasWebView = botCommandsMenuButton.isWebView; - botCommandsMenuButton.setWebView(botMenuButtonType == BotMenuButtonType.WEB_VIEW); - boolean textChanged = botCommandsMenuButton.setMenuText(botMenuButtonType == BotMenuButtonType.COMMANDS ? LocaleController.getString(R.string.BotsMenuTitle) : botMenuWebViewTitle); - AndroidUtilities.updateViewVisibilityAnimated(botCommandsMenuButton, canShowBotsMenu, 0.5f, animated); - boolean changed = (botButton.getVisibility() == VISIBLE) != wasVisible || textChanged || wasWebView != botCommandsMenuButton.isWebView; + if (canShowBotsMenu) { + createBotCommandsMenuButton(); + } + boolean changed = (botButton != null && botButton.getVisibility() == VISIBLE) != wasVisible; + if (botCommandsMenuButton != null) { + boolean wasWebView = botCommandsMenuButton.isWebView; + botCommandsMenuButton.setWebView(botMenuButtonType == BotMenuButtonType.WEB_VIEW); + boolean textChanged = botCommandsMenuButton.setMenuText(botMenuButtonType == BotMenuButtonType.COMMANDS ? LocaleController.getString(R.string.BotsMenuTitle) : botMenuWebViewTitle); + AndroidUtilities.updateViewVisibilityAnimated(botCommandsMenuButton, canShowBotsMenu, 0.5f, animated); + changed = changed || textChanged || wasWebView != botCommandsMenuButton.isWebView; + } if (changed && animated) { beginDelayedTransition(); - boolean show = botButton.getVisibility() == VISIBLE; - if (show != wasVisible) { + boolean show = botButton != null && botButton.getVisibility() == VISIBLE; + if (show != wasVisible && botButton != null) { botButton.setVisibility(VISIBLE); if (show) { botButton.setAlpha(0f); @@ -8107,7 +8662,7 @@ private boolean checkBotButton() { public boolean isRtlText() { try { - return messageEditText.getLayout().getParagraphDirection(0) == Layout.DIR_RIGHT_TO_LEFT; + return messageEditText != null && messageEditText.getLayout().getParagraphDirection(0) == Layout.DIR_RIGHT_TO_LEFT; } catch (Throwable ignore) { } @@ -8115,7 +8670,12 @@ public boolean isRtlText() { } public void updateBotWebView(boolean animated) { - botCommandsMenuButton.setWebView(hasBotWebView()); + if (botMenuButtonType != BotMenuButtonType.NO_BUTTON && dialog_id > 0) { + createBotCommandsMenuButton(); + } + if (botCommandsMenuButton != null) { + botCommandsMenuButton.setWebView(hasBotWebView()); + } updateBotButton(animated); } @@ -8136,7 +8696,7 @@ public void setButtons(MessageObject messageObject, boolean openKeyboard) { botMessageObject = messageObject; return; } - if (botButton == null || botButtonsMessageObject != null && botButtonsMessageObject == messageObject || botButtonsMessageObject == null && messageObject == null) { + if (botButtonsMessageObject != null && botButtonsMessageObject == messageObject || botButtonsMessageObject == null && messageObject == null) { return; } if (botKeyboardView == null) { @@ -8152,12 +8712,13 @@ public void setTranslationY(float translationY) { botKeyboardView.setVisibility(GONE); botKeyboardViewVisible = false; botKeyboardView.setDelegate(button -> { - MessageObject object = replyingMessageObject != null ? replyingMessageObject : (DialogObject.isChatDialog(dialog_id) ? botButtonsMessageObject : null); + boolean replyingIsTopicStarter = replyingMessageObject != null && parentFragment != null && parentFragment.isTopic && parentFragment.getTopicId() == replyingMessageObject.getId(); + MessageObject object = replyingMessageObject != null && !replyingIsTopicStarter ? replyingMessageObject : (DialogObject.isChatDialog(dialog_id) ? botButtonsMessageObject : null); if (delegate != null) { delegate.beforeMessageSend(null, true, 0); } - boolean open = didPressedBotButton(button, object, replyingMessageObject != null ? replyingMessageObject : botButtonsMessageObject); - if (replyingMessageObject != null) { + boolean open = didPressedBotButton(button, object, replyingMessageObject != null && !replyingIsTopicStarter ? replyingMessageObject : botButtonsMessageObject); + if (replyingMessageObject != null && !replyingIsTopicStarter) { openKeyboardInternal(); setButtons(botMessageObject, false); } else if (botButtonsMessageObject != null && botButtonsMessageObject.messageOwner.reply_markup.single_use) { @@ -8167,7 +8728,7 @@ public void setTranslationY(float translationY) { showPopup(0, 0); } SharedPreferences preferences = MessagesController.getMainSettings(currentAccount); - preferences.edit().putInt("answered_" + dialog_id, botButtonsMessageObject.getId()).commit(); + preferences.edit().putInt("answered_" + getTopicKeyString(), botButtonsMessageObject.getId()).commit(); } if (delegate != null) { delegate.onMessageSend(null, true, 0); @@ -8183,13 +8744,16 @@ public void setTranslationY(float translationY) { if (botReplyMarkup != null) { SharedPreferences preferences = MessagesController.getMainSettings(currentAccount); boolean showPopup = true; - if (botButtonsMessageObject != replyingMessageObject && botReplyMarkup.single_use) { - if (preferences.getInt("answered_" + dialog_id, 0) == messageObject.getId()) { + if (botButtonsMessageObject != replyingMessageObject) { + if (messageObject != null && ( + botReplyMarkup.single_use && preferences.getInt("answered_" + getTopicKeyString(), 0) == messageObject.getId() || + !botReplyMarkup.is_persistent && preferences.getInt("closed_botkeyboard_" + getTopicKeyString(), 0) == messageObject.getId() + )) { showPopup = false; } } botKeyboardView.setButtons(botReplyMarkup); - if (showPopup && messageEditText.length() == 0 && !isPopupShowing()) { + if (showPopup && (messageEditText == null || messageEditText.length() == 0) && !isPopupShowing()) { showPopup(1, 1); } } else { @@ -8291,9 +8855,9 @@ public void run() { } else { Bundle args = new Bundle(); args.putBoolean("onlySelect", true); - args.putInt("dialogsType", 1); + args.putInt("dialogsType", DialogsActivity.DIALOGS_TYPE_BOT_SHARE); DialogsActivity fragment = new DialogsActivity(args); - fragment.setDelegate((fragment1, dids, message, param) -> { + fragment.setDelegate((fragment1, dids, message, param, topicsFragment) -> { long uid = messageObject.messageOwner.from_id.user_id; if (messageObject.messageOwner.via_bot_id != 0) { uid = messageObject.messageOwner.via_bot_id; @@ -8301,7 +8865,7 @@ public void run() { TLRPC.User user = accountInstance.getMessagesController().getUser(uid); if (user == null) { fragment1.finishFragment(); - return; + return true; } long did = dids.get(0).dialogId; MediaDataController.getInstance(currentAccount).saveDraft(did, 0, "@" + UserObject.getPublicUsername(user) + " " + button.query, null, null, true); @@ -8314,7 +8878,7 @@ public void run() { args1.putLong("chat_id", -did); } if (!accountInstance.getMessagesController().checkCanOpenChat(args1, fragment1)) { - return; + return true; } ChatActivity chatActivity = new ChatActivity(args1); if (parentFragment.presentFragment(chatActivity, true)) { @@ -8330,6 +8894,7 @@ public void run() { } else { fragment1.finishFragment(); } + return true; }); parentFragment.presentFragment(fragment); } @@ -8340,6 +8905,44 @@ public void run() { ProfileActivity fragment = new ProfileActivity(args); parentFragment.presentFragment(fragment); } + } else if (button instanceof TLRPC.TL_keyboardButtonRequestPeer) { + TLRPC.TL_keyboardButtonRequestPeer btn = (TLRPC.TL_keyboardButtonRequestPeer) button; + if (btn.peer_type != null && messageObject != null && messageObject.messageOwner != null) { + Bundle args = new Bundle(); + args.putBoolean("onlySelect", true); + args.putInt("dialogsType", DialogsActivity.DIALOGS_TYPE_BOT_REQUEST_PEER); + if (messageObject != null && messageObject.messageOwner != null && messageObject.messageOwner.from_id instanceof TLRPC.TL_peerUser) { + args.putLong("requestPeerBotId", messageObject.messageOwner.from_id.user_id); + } + try { + SerializedData buffer = new SerializedData(btn.peer_type.getObjectSize()); + btn.peer_type.serializeToStream(buffer); + args.putByteArray("requestPeerType", buffer.toByteArray()); + buffer.cleanup(); + } catch (Exception e) { + FileLog.e(e); + } + DialogsActivity fragment = new DialogsActivity(args); + fragment.setDelegate(new DialogsActivity.DialogsActivityDelegate() { + @Override + public boolean didSelectDialogs(DialogsActivity fragment, ArrayList dids, CharSequence message, boolean param, TopicsFragment topicsFragment) { + if (dids != null && !dids.isEmpty()) { + TLRPC.TL_messages_sendBotRequestedPeer req = new TLRPC.TL_messages_sendBotRequestedPeer(); + req.peer = MessagesController.getInstance(currentAccount).getInputPeer(messageObject.messageOwner.peer_id); + req.msg_id = messageObject.getId(); + req.button_id = btn.button_id; + req.requested_peer = MessagesController.getInstance(currentAccount).getInputPeer(dids.get(0).dialogId); + ConnectionsManager.getInstance(currentAccount).sendRequest(req, null); + } + fragment.finishFragment(); + return true; + } + }); + parentFragment.presentFragment(fragment); + return false; + } else { + FileLog.e("button.peer_type is null"); + } } return true; } @@ -8385,7 +8988,7 @@ public boolean isUserSelf() { @Override public boolean onBackspace() { - if (messageEditText.length() == 0) { + if (messageEditText == null || messageEditText.length() == 0) { return false; } messageEditText.dispatchKeyEvent(new KeyEvent(KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_DEL)); @@ -8394,6 +8997,9 @@ public boolean onBackspace() { @Override public void onEmojiSelected(String symbol) { + if (messageEditText == null) { + return; + } int i = messageEditText.getSelectionEnd(); if (i < 0) { i = 0; @@ -8413,6 +9019,9 @@ public void onEmojiSelected(String symbol) { public void onCustomEmojiSelected(long documentId, TLRPC.Document document, String emoticon, boolean isRecent) { AndroidUtilities.runOnUIThread(() -> { + if (messageEditText == null) { + return; + } int i = messageEditText.getSelectionEnd(); if (i < 0) { i = 0; @@ -8758,7 +9367,7 @@ public void onDrag(int offset) { } private boolean allowDragging() { - return stickersTabOpen && !(!stickersExpanded && messageEditText.length() > 0) && emojiView.areThereAnyStickers() && !waitingForKeyboardOpen; + return stickersTabOpen && !(!stickersExpanded && messageEditText != null && messageEditText.length() > 0) && emojiView.areThereAnyStickers() && !waitingForKeyboardOpen; } }); sizeNotifierLayout.addView(emojiView, sizeNotifierLayout.getChildCount() - 5); @@ -8873,6 +9482,7 @@ private void showPopup(int show, int contentType, boolean allowAnimation) { botKeyboardView.setVisibility(VISIBLE); currentView = botKeyboardView; animatingContentType = POPUP_CONTENT_BOT_KEYBOARD; + MessagesController.getMainSettings(currentAccount).edit().remove("closed_botkeyboard_" + getTopicKeyString()).apply(); } currentPopupContentType = contentType; @@ -8892,9 +9502,11 @@ private void showPopup(int show, int contentType, boolean allowAnimation) { if (botKeyboardView != null) { botKeyboardView.setPanelHeight(currentHeight); } - FrameLayout.LayoutParams layoutParams = (FrameLayout.LayoutParams) currentView.getLayoutParams(); - layoutParams.height = currentHeight; - currentView.setLayoutParams(layoutParams); + if (currentView != null) { + FrameLayout.LayoutParams layoutParams = (FrameLayout.LayoutParams) currentView.getLayoutParams(); + layoutParams.height = currentHeight; + currentView.setLayoutParams(layoutParams); + } if (!AndroidUtilities.isInMultiwindow) { AndroidUtilities.hideKeyboard(messageEditText); } @@ -9024,12 +9636,8 @@ public void onAnimationEnd(Animator animation) { } botKeyboardViewVisible = false; } - if (sizeNotifierLayout != null) { - if (!SharedConfig.smoothKeyboard && show == 0) { - emojiPadding = 0; - sizeNotifierLayout.requestLayout(); - onWindowSizeChanged(); - } + if (contentType == POPUP_CONTENT_BOT_KEYBOARD && botButtonsMessageObject != null) { + MessagesController.getMainSettings(currentAccount).edit().putInt("closed_botkeyboard_" + getTopicKeyString(), botButtonsMessageObject.getId()).apply(); } updateBotButton(true); } @@ -9044,6 +9652,13 @@ public void onAnimationEnd(Animator animation) { checkBotMenu(); } + private String getTopicKeyString() { + if (parentFragment != null && parentFragment.isTopic) { + return dialog_id + "_" + parentFragment.getTopicId(); + } + return "" + dialog_id; + } + private void setEmojiButtonImage(boolean byOpen, boolean animated) { if (emojiButton == null) { return; @@ -9057,6 +9672,9 @@ private void setEmojiButtonImage(boolean byOpen, boolean animated) { } ChatActivityEnterViewAnimatedIconView.State nextIcon; if (byOpen && currentPopupContentType == 0) { + if (!sendPlainEnabled) { + return; + } nextIcon = ChatActivityEnterViewAnimatedIconView.State.KEYBOARD; } else { int currentPage; @@ -9077,6 +9695,12 @@ private void setEmojiButtonImage(boolean byOpen, boolean animated) { } } } + if (!sendPlainEnabled && nextIcon == ChatActivityEnterViewAnimatedIconView.State.SMILE) { + nextIcon = ChatActivityEnterViewAnimatedIconView.State.GIF; + } else if (!stickersEnabled && nextIcon != ChatActivityEnterViewAnimatedIconView.State.SMILE) { + nextIcon = ChatActivityEnterViewAnimatedIconView.State.SMILE; + } + emojiButton.setState(nextIcon, animated); onEmojiIconChanged(nextIcon); } @@ -9097,15 +9721,20 @@ public boolean hidePopup(boolean byBackButton) { public boolean hidePopup(boolean byBackButton, boolean forceAnimate) { if (isPopupShowing()) { - if (currentPopupContentType == POPUP_CONTENT_BOT_KEYBOARD && byBackButton && botButtonsMessageObject != null) { - return false; + if (currentPopupContentType == POPUP_CONTENT_BOT_KEYBOARD && botReplyMarkup != null && byBackButton && botButtonsMessageObject != null) { + if (botReplyMarkup.is_persistent) { + return false; + } + MessagesController.getMainSettings(currentAccount).edit().putInt("closed_botkeyboard_" + getTopicKeyString(), botButtonsMessageObject.getId()).apply(); } if (byBackButton && searchingType != 0 || forceAnimate) { setSearchingTypeInternal(0, true); if (emojiView != null) { emojiView.closeSearch(true); } - messageEditText.requestFocus(); + if (messageEditText != null) { + messageEditText.requestFocus(); + } setStickersExpanded(false, true, false); if (emojiTabOpen) { checkSendButton(true); @@ -9114,7 +9743,9 @@ public boolean hidePopup(boolean byBackButton, boolean forceAnimate) { if (searchingType != 0) { setSearchingTypeInternal(0, false); emojiView.closeSearch(false); - messageEditText.requestFocus(); + if (messageEditText != null) { + messageEditText.requestFocus(); + } } showPopup(0, 0); } @@ -9167,7 +9798,9 @@ private void openKeyboardInternal() { return; } showPopup(AndroidUtilities.usingHardwareInput || AndroidUtilities.isInMultiwindow || parentFragment != null && parentFragment.isInBubbleMode() || isPaused ? 0 : 2, 0); - messageEditText.requestFocus(); + if (messageEditText != null) { + messageEditText.requestFocus(); + } AndroidUtilities.showKeyboard(messageEditText); if (isPaused) { showKeyboardOnResume = true; @@ -9201,7 +9834,7 @@ public void openKeyboard() { if (hasBotWebView() && botCommandsMenuIsShowing()) { return; } - if (!AndroidUtilities.showKeyboard(messageEditText)) { + if (messageEditText != null && !AndroidUtilities.showKeyboard(messageEditText)) { messageEditText.clearFocus(); messageEditText.requestFocus(); } @@ -9234,7 +9867,9 @@ protected void onSizeChanged(int w, int h, int oldw, int oldh) { emojiView.closeSearch(false); setStickersExpanded(false, false, false); } - videoTimelineView.clearFrames(); + if (videoTimelineView != null) { + videoTimelineView.clearFrames(); + } } public boolean isStickersExpanded() { @@ -9330,7 +9965,7 @@ public void onAnimationEnd(Animator animation) { checkBotMenu(); if (keyboardVisible && isPopupShowing() && stickersExpansionAnim == null) { showPopup(0, currentPopupContentType); - } else if (!keyboardVisible && !isPopupShowing() && botButtonsMessageObject != null && replyingMessageObject != botButtonsMessageObject && (!hasBotWebView() || !botCommandsMenuIsShowing()) && TextUtils.isEmpty(messageEditText.getText()) && botReplyMarkup != null && !botReplyMarkup.rows.isEmpty()) { + } else if (!keyboardVisible && !isPopupShowing() && botButtonsMessageObject != null && replyingMessageObject != botButtonsMessageObject && (!hasBotWebView() || !botCommandsMenuIsShowing()) && (messageEditText == null || TextUtils.isEmpty(messageEditText.getText())) && botReplyMarkup != null && !botReplyMarkup.rows.isEmpty()) { if (sizeNotifierLayout.adjustPanLayoutHelper.animationInProgress()) { sizeNotifierLayout.adjustPanLayoutHelper.stopTransition(); } else { @@ -9442,16 +10077,21 @@ public void didReceivedNotification(int id, int account, Object... args) { } boolean audio = (Boolean) args[1]; isInVideoMode = !audio; - if (!NekoConfig.useChatAttachMediaMenu.Bool()) + if (audioVideoSendButton != null && !NekoConfig.useChatAttachMediaMenu.Bool()) { audioVideoSendButton.setState(audio ? ChatActivityEnterViewAnimatedIconView.State.VOICE : ChatActivityEnterViewAnimatedIconView.State.VIDEO, true); + } if (!recordingAudioVideo) { recordingAudioVideo = true; updateRecordInterface(RECORD_STATE_ENTER); - } else { + } else if (recordCircle != null) { recordCircle.showWaves(true, true); } - recordTimerView.start(); - recordDot.enterAnimation = false; + if (recordTimerView != null) { + recordTimerView.start(); + } + if (recordDot != null) { + recordDot.enterAnimation = false; + } } else if (id == NotificationCenter.audioDidSent) { int guid = (Integer) args[0]; if (guid != recordingGuid) { @@ -9464,16 +10104,19 @@ public void didReceivedNotification(int id, int account, Object... args) { audioToSendPath = (String) args[2]; ArrayList keyframes = (ArrayList) args[3]; - videoTimelineView.setVideoPath(audioToSendPath); - videoTimelineView.setKeyframes(keyframes); - videoTimelineView.setVisibility(VISIBLE); - videoTimelineView.setMinProgressDiff(1000.0f / videoToSendMessageObject.estimatedDuration); + if (videoTimelineView != null) { + videoTimelineView.setVideoPath(audioToSendPath); + videoTimelineView.setKeyframes(keyframes); + videoTimelineView.setVisibility(VISIBLE); + videoTimelineView.setMinProgressDiff(1000.0f / videoToSendMessageObject.estimatedDuration); + } updateRecordInterface(RECORD_STATE_PREPARING); checkSendButton(false); } else { audioToSend = (TLRPC.TL_document) args[1]; audioToSendPath = (String) args[2]; if (audioToSend != null) { + createRecordAudioPanel(); if (recordedAudioPanel == null) { return; } @@ -9538,9 +10181,15 @@ public void didReceivedNotification(int id, int account, Object... args) { } } else if (id == NotificationCenter.messagePlayingDidReset) { if (audioToSendMessageObject != null && !MediaController.getInstance().isPlayingMessage(audioToSendMessageObject)) { - playPauseDrawable.setIcon(MediaActionDrawable.ICON_PLAY, true); - recordedAudioPlayButton.setContentDescription(LocaleController.getString("AccActionPlay", R.string.AccActionPlay)); - recordedAudioSeekBar.setProgress(0); + if (playPauseDrawable != null) { + playPauseDrawable.setIcon(MediaActionDrawable.ICON_PLAY, true); + } + if (recordedAudioPlayButton != null) { + recordedAudioPlayButton.setContentDescription(LocaleController.getString("AccActionPlay", R.string.AccActionPlay)); + } + if (recordedAudioSeekBar != null) { + recordedAudioSeekBar.setProgress(0); + } } } else if (id == NotificationCenter.messagePlayingProgressDidChanged) { Integer mid = (Integer) args[0]; @@ -9594,6 +10243,8 @@ public void didReceivedNotification(int id, int account, Object... args) { updateBotButton(false); } + } else if (id == NotificationCenter.didUpdatePremiumGiftFieldIcon) { + updateGiftButton(true); } } @@ -9652,10 +10303,13 @@ public void onAnimationEnd(Animator animation) { } else { emojiView.getLayoutParams().height = stickersExpandedHeight; sizeNotifierLayout.requestLayout(); - int start = messageEditText.getSelectionStart(); - int end = messageEditText.getSelectionEnd(); - messageEditText.setText(messageEditText.getText()); // dismiss action mode, if any - messageEditText.setSelection(start, end); + int start = 0, end = 0; + if (messageEditText != null) { + start = messageEditText.getSelectionStart(); + end = messageEditText.getSelectionEnd(); + messageEditText.setText(messageEditText.getText()); // dismiss action mode, if any + messageEditText.setSelection(start, end); + } AnimatorSet anims = new AnimatorSet(); anims.playTogether( ObjectAnimator.ofInt(this, roundedTranslationYProperty, -(stickersExpandedHeight - origHeight)), @@ -9709,10 +10363,13 @@ public void setStickersExpanded(boolean expanded, boolean animated, boolean byDr emojiView.getLayoutParams().height = stickersExpandedHeight; sizeNotifierLayout.requestLayout(); sizeNotifierLayout.setForeground(new ScrimDrawable()); - int start = messageEditText.getSelectionStart(); - int end = messageEditText.getSelectionEnd(); - messageEditText.setText(messageEditText.getText()); // dismiss action mode, if any - messageEditText.setSelection(start, end); + int start = 0, end = 0; + if (messageEditText != null) { + start = messageEditText.getSelectionStart(); + end = messageEditText.getSelectionEnd(); + messageEditText.setText(messageEditText.getText()); // dismiss action mode, if any + messageEditText.setSelection(start, end); + } if (animated) { AnimatorSet anims = new AnimatorSet(); anims.playTogether( @@ -9805,10 +10462,12 @@ public void onAnimationEnd(Animator animation) { stickersArrow.setAnimationProgress(0); } } - if (expanded) { - expandStickersButton.setContentDescription(LocaleController.getString("AccDescrCollapsePanel", R.string.AccDescrCollapsePanel)); - } else { - expandStickersButton.setContentDescription(LocaleController.getString("AccDescrExpandPanel", R.string.AccDescrExpandPanel)); + if (expandStickersButton != null) { + if (stickersExpanded) { + expandStickersButton.setContentDescription(LocaleController.getString("AccDescrCollapsePanel", R.string.AccDescrCollapsePanel)); + } else { + expandStickersButton.setContentDescription(LocaleController.getString("AccDescrExpandPanel", R.string.AccDescrExpandPanel)); + } } } @@ -9875,7 +10534,7 @@ public void draw(Canvas canvas) { return; } paint.setAlpha(Math.round(102 * stickersExpansionProgress)); - canvas.drawRect(0, 0, getWidth(), emojiView.getY() - getHeight() + Theme.chat_composeShadowDrawable.getIntrinsicHeight(), paint); + canvas.drawRect(0, 0, getWidth(), emojiView.getY() - getHeight() + Theme.chat_composeShadowDrawable.getIntrinsicHeight() + (messageEditText == null ? 0 : messageEditText.getOffsetY()), paint); } @Override @@ -10106,7 +10765,7 @@ protected void onDraw(Canvas canvas) { if (cancelToProgress != 1) { int slideDelta = (int) (-getMeasuredWidth() / 4 * (1f - slideProgress)); canvas.save(); - canvas.clipRect(recordTimerView.getLeftProperty() + AndroidUtilities.dp(4), 0, getMeasuredWidth(), getMeasuredHeight()); + canvas.clipRect((recordTimerView == null ? 0 : recordTimerView.getLeftProperty()) + AndroidUtilities.dp(4), 0, getMeasuredWidth(), getMeasuredHeight()); canvas.save(); canvas.translate((int) x - (smallSize ? AndroidUtilities.dp(7) : AndroidUtilities.dp(10)) + slideDelta, offsetY); canvas.drawPath(arrowPath, arrowPaint); @@ -10183,27 +10842,12 @@ public class TimerView extends View { float replaceTransition; - final TextPaint textPaint = new TextPaint(Paint.ANTI_ALIAS_FLAG); + TextPaint textPaint; final float replaceDistance = AndroidUtilities.dp(15); float left; - public ArrayList timestamps = new ArrayList(); - - public TimerView(Context context) { super(context); - textPaint.setTextSize(AndroidUtilities.dp(15)); - textPaint.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); - updateColors(); - - setOnClickListener((v) -> { - final String current = oldString.substring(0, oldString.indexOf(',')); - timestamps.add(current); - Toast.makeText( - parentActivity, - "Saved timestamp at " + current + ".", - Toast.LENGTH_SHORT).show(); - }); } public void start() { @@ -10211,8 +10855,6 @@ public void start() { startTime = System.currentTimeMillis(); lastSendTypingTime = startTime; invalidate(); - timestamps.clear(); - timestamps.add("0:00"); } public void stop() { @@ -10229,6 +10871,12 @@ public void stop() { @SuppressLint("DrawAllocation") @Override protected void onDraw(Canvas canvas) { + if (textPaint == null) { + textPaint = new TextPaint(Paint.ANTI_ALIAS_FLAG); + textPaint.setTextSize(AndroidUtilities.dp(15)); + textPaint.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + textPaint.setColor(getThemedColor(Theme.key_chat_recordTime)); + } long currentTimeMillis = System.currentTimeMillis(); long t = isRunning ? (currentTimeMillis - startTime) : stopTime - startTime; long time = t / 1000; @@ -10374,7 +11022,9 @@ protected void onDraw(Canvas canvas) { } public void updateColors() { - textPaint.setColor(getThemedColor(Theme.key_chat_recordTime)); + if (textPaint != null) { + textPaint.setColor(getThemedColor(Theme.key_chat_recordTime)); + } } public float getLeftProperty() { @@ -10392,7 +11042,8 @@ protected boolean pannelAnimationEnabled() { return true; } - public RecordCircle getRecordCicle() { + @Nullable + public RecordCircle getRecordCircle() { return recordCircle; } @@ -10403,41 +11054,24 @@ public RecordCircle getRecordCicle() { protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { if (botCommandsMenuButton != null && botCommandsMenuButton.getTag() != null) { botCommandsMenuButton.measure(widthMeasureSpec, heightMeasureSpec); - ((MarginLayoutParams) emojiButton.getLayoutParams()).leftMargin = AndroidUtilities.dp(10) + botCommandsMenuButton.getMeasuredWidth(); - ((MarginLayoutParams) messageEditText.getLayoutParams()).leftMargin = AndroidUtilities.dp(57) + botCommandsMenuButton.getMeasuredWidth(); + ((MarginLayoutParams) emojiButton.getLayoutParams()).leftMargin = AndroidUtilities.dp(10) + (botCommandsMenuButton == null ? 0 : botCommandsMenuButton.getMeasuredWidth()); + if (messageEditText != null) { + ((MarginLayoutParams) messageEditText.getLayoutParams()).leftMargin = AndroidUtilities.dp(57) + (botCommandsMenuButton == null ? 0 : botCommandsMenuButton.getMeasuredWidth()); + } } else if (senderSelectView != null && senderSelectView.getVisibility() == View.VISIBLE) { int width = senderSelectView.getLayoutParams().width, height = senderSelectView.getLayoutParams().height; senderSelectView.measure(MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(height, MeasureSpec.EXACTLY)); ((MarginLayoutParams) emojiButton.getLayoutParams()).leftMargin = AndroidUtilities.dp(16) + width; - ((MarginLayoutParams) messageEditText.getLayoutParams()).leftMargin = AndroidUtilities.dp(63) + width; + if (messageEditText != null) { + ((MarginLayoutParams) messageEditText.getLayoutParams()).leftMargin = AndroidUtilities.dp(63) + width; + } } else { ((MarginLayoutParams) emojiButton.getLayoutParams()).leftMargin = AndroidUtilities.dp(3); - ((MarginLayoutParams) messageEditText.getLayoutParams()).leftMargin = AndroidUtilities.dp(50); - } - if (botCommandsMenuContainer != null) { - int padding; - if (botCommandsAdapter.getItemCount() > 4) { - padding = Math.max(0, sizeNotifierLayout.getMeasuredHeight() - AndroidUtilities.dp(8 + 36 * 4.3f)); - } else { - padding = Math.max(0, sizeNotifierLayout.getMeasuredHeight() - AndroidUtilities.dp(8 + 36 * Math.max(1, Math.min(4, botCommandsAdapter.getItemCount())))); - } - - if (botCommandsMenuContainer.listView.getPaddingTop() != padding) { - botCommandsMenuContainer.listView.setTopGlowOffset(padding); - if (botCommandLastPosition == -1 && botCommandsMenuContainer.getVisibility() == View.VISIBLE && botCommandsMenuContainer.listView.getLayoutManager() != null) { - LinearLayoutManager layoutManager = (LinearLayoutManager) botCommandsMenuContainer.listView.getLayoutManager(); - int p = layoutManager.findFirstVisibleItemPosition(); - if (p >= 0) { - View view = layoutManager.findViewByPosition(p); - if (view != null) { - botCommandLastPosition = p; - botCommandLastTop = view.getTop() - botCommandsMenuContainer.listView.getPaddingTop(); - } - } - } - botCommandsMenuContainer.listView.setPadding(0, padding, 0, AndroidUtilities.dp(8)); + if (messageEditText != null) { + ((MarginLayoutParams) messageEditText.getLayoutParams()).leftMargin = AndroidUtilities.dp(50); } } + updateBotCommandsMenuContainerTopPadding(); super.onMeasure(widthMeasureSpec, heightMeasureSpec); if (botWebViewButton != null) { @@ -10449,7 +11083,7 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { } if (botWebViewMenuContainer != null) { MarginLayoutParams params = (MarginLayoutParams) botWebViewMenuContainer.getLayoutParams(); - params.bottomMargin = messageEditText.getMeasuredHeight(); + params.bottomMargin = messageEditText == null ? 0 : messageEditText.getMeasuredHeight(); measureChild(botWebViewMenuContainer, widthMeasureSpec, heightMeasureSpec); } } @@ -10457,7 +11091,7 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { @Override protected void onLayout(boolean changed, int left, int top, int right, int bottom) { super.onLayout(changed, left, top, right, bottom); - if (botCommandLastPosition != -1) { + if (botCommandLastPosition != -1 && botCommandsMenuContainer != null) { LinearLayoutManager layoutManager = (LinearLayoutManager) botCommandsMenuContainer.listView.getLayoutManager(); if (layoutManager != null) { layoutManager.scrollToPositionWithOffset(botCommandLastPosition, botCommandLastTop); @@ -10468,14 +11102,20 @@ protected void onLayout(boolean changed, int left, int top, int right, int botto private void beginDelayedTransition() { animationParamsX.put(emojiButton, emojiButton.getX()); - animationParamsX.put(messageEditText, messageEditText.getX()); + if (messageEditText != null) { + animationParamsX.put(messageEditText, messageEditText.getX()); + } } + private LongSparseArray lastBotInfo; + public void setBotInfo(LongSparseArray botInfo) { setBotInfo(botInfo, true); } public void setBotInfo(LongSparseArray botInfo, boolean animate) { + lastBotInfo = botInfo; + if (botInfo.size() == 1 && botInfo.valueAt(0).user_id == dialog_id) { TLRPC.BotInfo info = botInfo.valueAt(0); TLRPC.BotMenuButton menuButton = info.menu_button; @@ -10505,12 +11145,18 @@ public boolean botCommandsMenuIsShowing() { } public void hideBotCommands() { - botCommandsMenuButton.setOpened(false); + if (botCommandsMenuButton != null) { + botCommandsMenuButton.setOpened(false); + } if (hasBotWebView()) { - botWebViewMenuContainer.dismiss(); + if (botWebViewMenuContainer != null) { + botWebViewMenuContainer.dismiss(); + } } else { - botCommandsMenuContainer.dismiss(); + if (botCommandsMenuContainer != null) { + botCommandsMenuContainer.dismiss(); + } } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlert.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlert.java index 446f47a94c..2528c7beaf 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlert.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlert.java @@ -16,6 +16,7 @@ import android.animation.ValueAnimator; import android.annotation.SuppressLint; import android.content.Context; +import android.content.ContextWrapper; import android.content.DialogInterface; import android.content.Intent; import android.content.pm.PackageManager; @@ -71,6 +72,7 @@ import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ChatObject; import org.telegram.messenger.ContactsController; +import org.telegram.messenger.DialogObject; import org.telegram.messenger.DocumentObject; import org.telegram.messenger.Emoji; import org.telegram.messenger.ImageLocation; @@ -82,7 +84,6 @@ import org.telegram.messenger.NotificationCenter; import org.telegram.messenger.R; import org.telegram.messenger.SendMessagesHelper; -import org.telegram.messenger.SharedConfig; import org.telegram.messenger.UserConfig; import org.telegram.messenger.UserObject; import org.telegram.tgnet.ConnectionsManager; @@ -96,10 +97,13 @@ import org.telegram.ui.ActionBar.AlertDialog; import org.telegram.ui.ActionBar.BaseFragment; import org.telegram.ui.ActionBar.BottomSheet; +import org.telegram.ui.ActionBar.INavigationLayout; import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.ActionBar.ThemeDescription; import org.telegram.ui.BasePermissionsActivity; import org.telegram.ui.ChatActivity; +import org.telegram.ui.DialogsActivity; +import org.telegram.ui.LaunchActivity; import org.telegram.ui.PassportActivity; import org.telegram.ui.PhotoPickerActivity; import org.telegram.ui.PhotoPickerSearchActivity; @@ -127,6 +131,7 @@ public class ChatAttachAlert extends BottomSheet implements NotificationCenter.N public ChatActivity.ThemeDelegate parentThemeDelegate; private final NumberTextView captionLimitView; + public boolean forUser; private int currentLimit; private int codepointCount; @@ -135,6 +140,10 @@ public class ChatAttachAlert extends BottomSheet implements NotificationCenter.N private boolean isEmojiPicker = false; private ImageUpdater.AvatarFor setAvatarFor; + private PasscodeView passcodeView; + private ChatAttachRestrictedLayout restrictedLayout; + public ImageUpdater parentImageUpdater; + public void setCanOpenPreview(boolean canOpenPreview) { this.canOpenPreview = canOpenPreview; selectedArrowImageView.setVisibility(canOpenPreview && avatarPicker != 2 ? View.VISIBLE : View.GONE); @@ -210,9 +219,56 @@ public void onWebAppExpand() { } } + @Override + public void onWebAppSwitchInlineQuery(TLRPC.User botUser, String query, List chatTypes) { + if (chatTypes.isEmpty()) { + if (baseFragment instanceof ChatActivity) { + ((ChatActivity) baseFragment).getChatActivityEnterView().setFieldText("@" + UserObject.getPublicUsername(botUser) + " " + query); + } + dismiss(true); + } else { + Bundle args = new Bundle(); + args.putInt("dialogsType", DialogsActivity.DIALOGS_TYPE_START_ATTACH_BOT); + args.putBoolean("onlySelect", true); + + args.putBoolean("allowGroups", chatTypes.contains("groups")); + args.putBoolean("allowUsers", chatTypes.contains("users")); + args.putBoolean("allowChannels", chatTypes.contains("channels")); + args.putBoolean("allowBots", chatTypes.contains("bots")); + + DialogsActivity dialogsActivity = new DialogsActivity(args); + OverlayActionBarLayoutDialog overlayActionBarLayoutDialog = new OverlayActionBarLayoutDialog(getContext(), resourcesProvider); + dialogsActivity.setDelegate((fragment, dids, message1, param, topicsFragment) -> { + long did = dids.get(0).dialogId; + + Bundle args1 = new Bundle(); + args1.putBoolean("scrollToTopOnResume", true); + if (DialogObject.isEncryptedDialog(did)) { + args1.putInt("enc_id", DialogObject.getEncryptedChatId(did)); + } else if (DialogObject.isUserDialog(did)) { + args1.putLong("user_id", did); + } else { + args1.putLong("chat_id", -did); + } + args1.putString("inline_query_input", "@" + UserObject.getPublicUsername(botUser) + " " + query); + + BaseFragment lastFragment = baseFragment; + if (MessagesController.getInstance(currentAccount).checkCanOpenChat(args1, lastFragment)) { + overlayActionBarLayoutDialog.dismiss(); + dismiss(true); + + lastFragment.presentFragment(new INavigationLayout.NavigationParams(new ChatActivity(args1)).setRemoveLast(true)); + } + return true; + }); + overlayActionBarLayoutDialog.show(); + overlayActionBarLayoutDialog.addFragment(dialogsActivity); + } + } + @Override public void onSetupMainButton(boolean isVisible, boolean isActive, String text, int color, int textColor, boolean isProgressVisible) { - if (currentAttachLayout != webViewLayout || !webViewLayout.isBotButtonAvailable()) { + if (currentAttachLayout != webViewLayout || !webViewLayout.isBotButtonAvailable() && startCommand == null) { return; } botMainButtonTextView.setClickable(isActive); @@ -331,6 +387,10 @@ public ImageUpdater.AvatarFor getAvatarFor() { return setAvatarFor; } + public void setImageUpdater(ImageUpdater imageUpdater) { + parentImageUpdater = imageUpdater; + } + public interface ChatAttachViewDelegate { void didPressedButton(int button, boolean arg, boolean notify, int scheduleDate, boolean forceDocument); @@ -499,7 +559,7 @@ void onOpenAnimationEnd() { } - void onInit(boolean mediaEnabled) { + void onInit(boolean hasVideo, boolean hasPhoto, boolean hasDocuments) { } @@ -605,6 +665,7 @@ public void onPanTransitionEnd() { private FrameLayout frameLayout2; protected EditTextEmoji commentTextView; + private int[] commentTextViewLocation = new int[2]; private FrameLayout writeButtonContainer; private ImageView writeButton; private Drawable writeButtonDrawable; @@ -665,8 +726,12 @@ public void onPanTransitionEnd() { protected int currentAccount = UserConfig.selectedAccount; - private boolean mediaEnabled = true; + private boolean documentsEnabled = true; + private boolean photosEnabled = true; + private boolean videosEnabled = true; + private boolean musicEnabled = true; private boolean pollsEnabled = true; + private boolean plainTextEnabled = true; protected int maxSelectedPhotos = -1; protected boolean allowOrder = true; @@ -1041,7 +1106,7 @@ public void setAttachBot(TLRPC.User user, TLRPC.TL_attachMenuBot bot) { } imageView.setSize(AndroidUtilities.dp(28), AndroidUtilities.dp(28)); - imageView.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_attachContactIcon), PorterDuff.Mode.SRC_IN)); + imageView.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_attachIcon), PorterDuff.Mode.SRC_IN)); attachMenuBot = bot; selector.setVisibility(GONE); updateMargins(); @@ -1080,7 +1145,7 @@ public ChatAttachAlert(Context context, final BaseFragment parentFragment, boole private Bulletin.Delegate bulletinDelegate = new Bulletin.Delegate() { @Override public int getBottomOffset(int tag) { - return getHeight() - frameLayout2.getTop(); + return getHeight() - frameLayout2.getTop() + AndroidUtilities.dp(52); } }; private int lastNotifyWidth; @@ -1150,6 +1215,8 @@ protected void onPanTranslationUpdate(float y, float progress, boolean keyboardV invalidate(); frameLayout2.invalidate(); + updateCommentTextViewPosition(); + if (currentAttachLayout != null) { currentAttachLayout.onContainerTranslationUpdated(currentPanTranslationY); } @@ -1231,7 +1298,7 @@ private void onMeasureInternal(int widthMeasureSpec, int heightMeasureSpec) { setMeasuredDimension(widthSize, heightSize); widthSize -= backgroundPaddingLeft * 2; - int keyboardSize = SharedConfig.smoothKeyboard ? 0 : measureKeyboardHeight(); + int keyboardSize = 0; if (!commentTextView.isWaitingForKeyboardOpen() && keyboardSize <= AndroidUtilities.dp(20) && !commentTextView.isPopupShowing() && !commentTextView.isAnimatePopupClosing()) { ignoreLayout = true; commentTextView.hideEmojiView(); @@ -1240,7 +1307,7 @@ private void onMeasureInternal(int widthMeasureSpec, int heightMeasureSpec) { if (keyboardSize <= AndroidUtilities.dp(20)) { int paddingBottom; - if (SharedConfig.smoothKeyboard && keyboardVisible) { + if (keyboardVisible) { paddingBottom = 0; } else { paddingBottom = commentTextView.getEmojiPadding(); @@ -1298,7 +1365,7 @@ protected void onLayout(boolean changed, int l, int t, int r, int b) { int keyboardSize = measureKeyboardHeight(); int paddingBottom = getPaddingBottom(); - if (SharedConfig.smoothKeyboard && keyboardVisible) { + if (keyboardVisible) { paddingBottom += 0; } else { paddingBottom += keyboardSize <= AndroidUtilities.dp(20) && !AndroidUtilities.isInMultiwindow && !AndroidUtilities.isTablet() ? commentTextView.getEmojiPadding() : 0; @@ -1650,14 +1717,14 @@ protected void onAttachedToWindow() { adjustPanLayoutHelper.setResizableView(this); adjustPanLayoutHelper.onAttach(); commentTextView.setAdjustPanLayoutHelper(adjustPanLayoutHelper); - Bulletin.addDelegate(this, bulletinDelegate); + // Bulletin.addDelegate(this, bulletinDelegate); } @Override protected void onDetachedFromWindow() { super.onDetachedFromWindow(); adjustPanLayoutHelper.onDetach(); - Bulletin.removeDelegate(this); + // Bulletin.removeDelegate(this); } }; sizeNotifierFrameLayout.setDelegate(new SizeNotifierFrameLayout.SizeNotifierFrameLayoutDelegate() { @@ -1942,6 +2009,9 @@ public void setTranslationY(float translationY) { if (view instanceof AttachButton) { int num = (Integer) view.getTag(); if (num == 1) { + if (!photosEnabled && !videosEnabled) { + showLayout(restrictedLayout = new ChatAttachRestrictedLayout(1, this, getContext(), resourcesProvider)); + } showLayout(photoLayout); } else if (num == 3) { if (Build.VERSION.SDK_INT >= 23 && baseFragment.getParentActivity().checkSelfPermission(Manifest.permission.READ_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) { @@ -1956,7 +2026,7 @@ public void setTranslationY(float translationY) { } openDocumentsLayout(true); } else if (num == 5) { - if (Build.VERSION.SDK_INT >= 23) { + if (Build.VERSION.SDK_INT >= 23 && plainTextEnabled) { if (baseFragment.getParentActivity().checkSelfPermission(Manifest.permission.READ_CONTACTS) != PackageManager.PERMISSION_GRANTED) { baseFragment.getParentActivity().requestPermissions(new String[]{Manifest.permission.READ_CONTACTS}, BasePermissionsActivity.REQUEST_CODE_ATTACH_CONTACT); return; @@ -1967,17 +2037,27 @@ public void setTranslationY(float translationY) { if (!AndroidUtilities.isMapsInstalled(baseFragment)) { return; } - if (locationLayout == null) { - layouts[5] = locationLayout = new ChatAttachAlertLocationLayout(this, getContext(), resourcesProvider); - locationLayout.setDelegate((location, live, notify, scheduleDate) -> ((ChatActivity) baseFragment).didSelectLocation(location, live, notify, scheduleDate)); + if (!plainTextEnabled) { + restrictedLayout = new ChatAttachRestrictedLayout(6, this, getContext(), resourcesProvider); + showLayout(restrictedLayout); + } else { + if (locationLayout == null) { + layouts[5] = locationLayout = new ChatAttachAlertLocationLayout(this, getContext(), resourcesProvider); + locationLayout.setDelegate((location, live, notify, scheduleDate) -> ((ChatActivity) baseFragment).didSelectLocation(location, live, notify, scheduleDate)); + } + showLayout(locationLayout); } - showLayout(locationLayout); } else if (num == 9) { - if (pollLayout == null) { - layouts[1] = pollLayout = new ChatAttachAlertPollLayout(this, getContext(), resourcesProvider); - pollLayout.setDelegate((poll, params, notify, scheduleDate) -> ((ChatActivity) baseFragment).sendPoll(poll, params, notify, scheduleDate)); + if (!pollsEnabled) { + restrictedLayout = new ChatAttachRestrictedLayout(9, this, getContext(), resourcesProvider); + showLayout(restrictedLayout); + } else { + if (pollLayout == null) { + layouts[1] = pollLayout = new ChatAttachAlertPollLayout(this, getContext(), resourcesProvider); + pollLayout.setDelegate((poll, params, notify, scheduleDate) -> ((ChatActivity) baseFragment).sendPoll(poll, params, notify, scheduleDate)); + } + showLayout(pollLayout); } - showLayout(pollLayout); } else { delegate.didPressedButton((Integer) view.getTag(), true, true, 0, false); } @@ -2135,7 +2215,13 @@ protected void dispatchDraw(Canvas canvas) { float dy = (messageEditTextPredrawHeigth - editText.getMeasuredHeight()) + (messageEditTextPredrawScrollY - editText.getScrollY()); editText.setOffsetY(editText.getOffsetY() - dy); ValueAnimator a = ValueAnimator.ofFloat(editText.getOffsetY(), 0); - a.addUpdateListener(animation -> editText.setOffsetY((float) animation.getAnimatedValue())); + a.addUpdateListener(animation -> { + editText.setOffsetY((float) animation.getAnimatedValue()); + updateCommentTextViewPosition(); + if (currentAttachLayout == photoLayout) { + photoLayout.onContainerTranslationUpdated(currentPanTranslationY); + } + }); if (messageEditTextAnimator != null) { messageEditTextAnimator.cancel(); } @@ -2162,6 +2248,7 @@ protected void onLineCountChanged(int oldLineCount, int newLineCount) { } chatActivityEnterViewAnimateFromTop = frameLayout2.getTop() + captionEditTextTopOffset; frameLayout2.invalidate(); + updateCommentTextViewPosition(); } @Override @@ -2179,6 +2266,12 @@ protected void bottomPanelTranslationY(float translation) { protected void closeParent() { ChatAttachAlert.super.dismiss(); } + + @Override + protected void onLayout(boolean changed, int left, int top, int right, int bottom) { + super.onLayout(changed, left, top, right, bottom); + updateCommentTextViewPosition(); + } }; commentTextView.setHint(LocaleController.getString("AddCaption", R.string.AddCaption)); commentTextView.onResume(); @@ -2509,6 +2602,43 @@ protected void onDraw(Canvas canvas) { checkColors(); navBarColorKey = null; } + + passcodeView = new PasscodeView(context); + containerView.addView(passcodeView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); + } + + @Override + protected void onStart() { + super.onStart(); + + Context context = getContext(); + if (context instanceof ContextWrapper && !(context instanceof LaunchActivity)) { + context = ((ContextWrapper) context).getBaseContext(); + } + if (context instanceof LaunchActivity) { + ((LaunchActivity) context).addOverlayPasscodeView(passcodeView); + } + } + + @Override + protected void onStop() { + super.onStop(); + + Context context = getContext(); + if (context instanceof ContextWrapper && !(context instanceof LaunchActivity)) { + context = ((ContextWrapper) context).getBaseContext(); + } + if (context instanceof LaunchActivity) { + ((LaunchActivity) context).removeOverlayPasscodeView(passcodeView); + } + } + + public void updateCommentTextViewPosition() { + commentTextView.getLocationOnScreen(commentTextViewLocation); + } + + public int getCommentTextViewTop() { + return commentTextViewLocation[1]; } private void showCaptionLimitBulletin(BaseFragment parentFragment) { @@ -2681,7 +2811,9 @@ private void sendPressed(boolean notify, int scheduleDate) { private void showLayout(AttachAlertLayout layout) { long newId = selectedId; - if (layout == photoLayout) { + if (layout == restrictedLayout) { + newId = restrictedLayout.id; + } else if (layout == photoLayout) { newId = 1; } else if (layout == audioLayout) { newId = 3; @@ -2901,6 +3033,10 @@ public void onRequestPermissionsResultFragment(int requestCode, String[] permiss } private void openContactsLayout() { + if (!plainTextEnabled) { + restrictedLayout = new ChatAttachRestrictedLayout(5, this, getContext(), resourcesProvider); + showLayout(restrictedLayout); + } if (contactsLayout == null) { layouts[2] = contactsLayout = new ChatAttachAlertContactsLayout(this, getContext(), resourcesProvider); contactsLayout.setDelegate((user, notify, scheduleDate) -> ((ChatActivity) baseFragment).sendContact(user, notify, scheduleDate)); @@ -2909,6 +3045,12 @@ private void openContactsLayout() { } private void openAudioLayout(boolean show) { + if (!musicEnabled) { + if (show) { + restrictedLayout = new ChatAttachRestrictedLayout(3, this, getContext(), resourcesProvider); + showLayout(restrictedLayout); + } + } if (audioLayout == null) { layouts[3] = audioLayout = new ChatAttachAlertAudioLayout(this, getContext(), resourcesProvider); audioLayout.setDelegate((audios, caption, notify, scheduleDate) -> ((ChatActivity) baseFragment).sendAudio(audios, caption, notify, scheduleDate)); @@ -2924,6 +3066,12 @@ private void openAudioLayout(boolean show) { } private void openDocumentsLayout(boolean show) { + if (!documentsEnabled) { + if (show) { + restrictedLayout = new ChatAttachRestrictedLayout(4, this, getContext(), resourcesProvider); + showLayout(restrictedLayout); + } + } if (documentLayout == null) { int type = isEmojiPicker ? ChatAttachAlertDocumentLayout.TYPE_EMOJI : isSoundPicker ? ChatAttachAlertDocumentLayout.TYPE_RINGTONE : ChatAttachAlertDocumentLayout.TYPE_DEFAULT; layouts[4] = documentLayout = new ChatAttachAlertDocumentLayout(this, getContext(), type, resourcesProvider); @@ -3499,8 +3647,13 @@ private void updateActionBarVisibility(boolean show, boolean animated) { actionBarAnimation.cancel(); actionBarAnimation = null; } - boolean needsSearchItem = avatarSearch || currentAttachLayout == photoLayout && !menuShowed && baseFragment instanceof ChatActivity && ((ChatActivity) baseFragment).allowSendGifs(); - boolean needMoreItem = avatarPicker != 0 || !menuShowed && currentAttachLayout == photoLayout && mediaEnabled; + + boolean needsSearchItem = avatarSearch || currentAttachLayout == photoLayout && !menuShowed && baseFragment instanceof ChatActivity && ((ChatActivity) baseFragment).allowSendGifs() && ((ChatActivity) baseFragment).allowSendPhotos(); + boolean needMoreItem = avatarPicker != 0 || !menuShowed && currentAttachLayout == photoLayout && (photosEnabled || videosEnabled); + if (currentAttachLayout == restrictedLayout) { + needsSearchItem = false; + needMoreItem = false; + } if (show) { if (needsSearchItem) { searchItem.setVisibility(View.VISIBLE); @@ -3731,15 +3884,20 @@ public void init() { TLRPC.Chat chat = ((ChatActivity) baseFragment).getCurrentChat(); TLRPC.User user = ((ChatActivity) baseFragment).getCurrentUser(); if (chat != null) { - mediaEnabled = ChatObject.canSendMedia(chat); + // mediaEnabled = ChatObject.canSendMedia(chat); + photosEnabled = ChatObject.canSendPhoto(chat); + videosEnabled = ChatObject.canSendVideo(chat); + musicEnabled = ChatObject.canSendMusic(chat); pollsEnabled = ChatObject.canSendPolls(chat); + plainTextEnabled = ChatObject.canSendPlain(chat); + documentsEnabled = ChatObject.canSendDocument(chat); } else { pollsEnabled = user != null && user.bot; } } else { commentTextView.setVisibility(View.INVISIBLE); } - photoLayout.onInit(mediaEnabled); + photoLayout.onInit(videosEnabled, photosEnabled, documentsEnabled); commentTextView.hidePopup(true); enterCommentEventSent = false; setFocusable(false); @@ -3861,6 +4019,9 @@ public void setAvatarPicker(int type, boolean search) { } else { typeButtonsAvailable = true; } + if (photoLayout != null) { + photoLayout.updateAvatarPicker(); + } } public TextView getSelectedTextView() { @@ -4024,9 +4185,8 @@ public void notifyDataSetChanged() { musicButton = buttonsCount++; } } else { - if (mediaEnabled) { - galleryButton = buttonsCount++; - + galleryButton = buttonsCount++; + if (photosEnabled || videosEnabled) { if (baseFragment instanceof ChatActivity && !((ChatActivity) baseFragment).isInScheduleMode() && !((ChatActivity) baseFragment).isSecretChat()) { ChatActivity chatActivity = (ChatActivity) baseFragment; @@ -4041,18 +4201,20 @@ public void notifyDataSetChanged() { buttonsCount += attachMenuBots.size(); attachBotsEndRow = buttonsCount; } + } + documentButton = buttonsCount++; - documentButton = buttonsCount++; + if (plainTextEnabled) { + locationButton = buttonsCount++; } - locationButton = buttonsCount++; + if (pollsEnabled) { pollButton = buttonsCount++; - } else { + } else if (plainTextEnabled) { contactButton = buttonsCount++; } - if (mediaEnabled) { - musicButton = buttonsCount++; - } + musicButton = buttonsCount++; + TLRPC.User user = baseFragment instanceof ChatActivity ? ((ChatActivity) baseFragment).getCurrentUser() : null; if (user != null && user.bot) { contactButton = buttonsCount++; @@ -4112,6 +4274,12 @@ private void removeFromRoot() { @Override public void onBackPressed() { + if (passcodeView.getVisibility() == View.VISIBLE) { + if (getOwnerActivity() != null) { + getOwnerActivity().finish(); + } + return; + } if (actionBar.isSearchFieldVisible()) { actionBar.closeSearchField(); return; @@ -4193,7 +4361,7 @@ public void dismiss() { dialog.show(); TextView button = (TextView) dialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(getThemedColor(Theme.key_dialogTextRed2)); + button.setTextColor(getThemedColor(Theme.key_dialogTextRed)); } return; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlertBotWebViewLayout.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlertBotWebViewLayout.java index 20a0fda2ce..9e6a6dc58b 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlertBotWebViewLayout.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlertBotWebViewLayout.java @@ -696,7 +696,7 @@ public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float d ev.recycle(); return true; - } else if (webView != null && webView.canScrollHorizontally(distanceX >= 0 ? 1 : -1)) { + } else if (webView != null && webView.canScrollHorizontally(distanceX >= 0 ? 1 : -1) || Math.abs(distanceX) >= touchSlop && Math.abs(distanceX) * 1.5f >= Math.abs(distanceY)) { isSwipeDisallowed = true; } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlertDocumentLayout.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlertDocumentLayout.java index 894f00671b..b31358841f 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlertDocumentLayout.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlertDocumentLayout.java @@ -1827,7 +1827,7 @@ private void searchGlobal(long dialogId, long minDate, long maxDate, FiltersView resultArray = new ArrayList<>(); ArrayList resultArrayNames = new ArrayList<>(); ArrayList encUsers = new ArrayList<>(); - accountInstance.getMessagesStorage().localSearch(0, query, resultArray, resultArrayNames, encUsers, -1); + accountInstance.getMessagesStorage().localSearch(0, query, resultArray, resultArrayNames, encUsers, null, -1); } final TLRPC.TL_messages_searchGlobal req = new TLRPC.TL_messages_searchGlobal(); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlertPhotoLayout.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlertPhotoLayout.java index 98c22991c2..3cf681f73e 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlertPhotoLayout.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlertPhotoLayout.java @@ -8,6 +8,8 @@ package org.telegram.ui.Components; +import static android.view.WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON; + import android.Manifest; import android.animation.Animator; import android.animation.AnimatorListenerAdapter; @@ -20,11 +22,13 @@ import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.Canvas; +import android.graphics.Color; import android.graphics.Outline; import android.graphics.Paint; import android.graphics.PorterDuff; import android.graphics.PorterDuffColorFilter; import android.graphics.Rect; +import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.Drawable; import android.hardware.Camera; import android.media.MediaMetadataRetriever; @@ -33,6 +37,7 @@ import android.provider.MediaStore; import android.provider.Settings; import android.text.TextUtils; +import android.util.Log; import android.util.TypedValue; import android.view.Gravity; import android.view.KeyEvent; @@ -71,6 +76,7 @@ import org.telegram.messenger.FileLoader; import org.telegram.messenger.FileLog; import org.telegram.messenger.ImageReceiver; +import org.telegram.messenger.LiteMode; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MediaController; import org.telegram.messenger.MediaDataController; @@ -98,8 +104,10 @@ import org.telegram.ui.ChatActivity; import org.telegram.ui.PhotoViewer; +import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileOutputStream; +import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -113,6 +121,8 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayout implements NotificationCenter.NotificationCenterDelegate { + private static final int VIEW_TYPE_AVATAR_CONSTRUCTOR = 4; + private RecyclerListView cameraPhotoRecyclerView; private LinearLayoutManager cameraPhotoLayoutManager; private PhotoAttachAdapter cameraAttachAdapter; @@ -177,6 +187,9 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou private boolean checkCameraWhenShown; private boolean mediaEnabled; + private boolean videoEnabled; + private boolean photoEnabled; + private boolean documentsEnabled; private float pinchStartDistance; private float cameraZoom; @@ -224,6 +237,11 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou boolean forceDarkTheme; private int animationIndex = -1; + private boolean showAvatarConstructor; + + public void updateAvatarPicker() { + showAvatarConstructor = parentAlert.avatarPicker != 0; + } private class BasePhotoProvider extends PhotoViewer.EmptyPhotoViewerProvider { @Override @@ -246,6 +264,9 @@ public int setPhotoChecked(int index, VideoEditedInfo videoEditedInfo) { if (photoEntry == null) { return -1; } + if (checkSendMediaEnabled(photoEntry)) { + return -1; + } boolean add = true; int num; if ((num = addToSelectedPhotos(photoEntry, -1)) == -1) { @@ -557,6 +578,7 @@ public ChatAttachAlertPhotoLayout(ChatAttachAlert alert, Context context, boolea NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.albumsDidLoad); NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.cameraInitied); FrameLayout container = alert.getContainer(); + showAvatarConstructor = parentAlert.avatarPicker != 0; cameraDrawable = context.getResources().getDrawable(R.drawable.instant_camera).mutate(); @@ -594,8 +616,7 @@ public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) { parentAlert.selectedMenuItem.addSubItem(group, R.drawable.msg_ungroup, LocaleController.getString("SendWithoutGrouping", R.string.SendWithoutGrouping)); spoilerItem = parentAlert.selectedMenuItem.addSubItem(spoiler, R.drawable.msg_spoiler, LocaleController.getString("EnablePhotoSpoiler", R.string.EnablePhotoSpoiler)); parentAlert.selectedMenuItem.addSubItem(open_in, R.drawable.msg_openin, LocaleController.getString("OpenInExternalApp", R.string.OpenInExternalApp)); - View gap = parentAlert.selectedMenuItem.addGap(preview_gap); - gap.setBackgroundColor(Theme.getColor(Theme.key_actionBarDefaultSubmenuSeparator, resourcesProvider)); + parentAlert.selectedMenuItem.addColoredGap(preview_gap); previewItem = parentAlert.selectedMenuItem.addSubItem(preview, R.drawable.msg_view_file, LocaleController.getString("AttachMediaPreviewButton", R.string.AttachMediaPreviewButton)); parentAlert.selectedMenuItem.setFitSubItems(true); @@ -722,6 +743,16 @@ public int getSpanSize(int position) { if (selectedAlbumEntry == galleryAlbumEntry) { position--; } + if (showAvatarConstructor) { + if (position == 0) { + if (!(view instanceof AvatarConstructorPreviewCell)) { + return; + } + showAvatarConstructorFragment((AvatarConstructorPreviewCell) view, null); + parentAlert.dismiss(); + } + position--; + } ArrayList arrayList = getAllPhotosArray(); if (position < 0 || position >= arrayList.size()) { return; @@ -765,7 +796,13 @@ public int getSpanSize(int position) { } boolean hasSpoiler = arrayList.get(position) instanceof MediaController.PhotoEntry && ((MediaController.PhotoEntry) arrayList.get(position)).hasSpoiler; - + Object object = arrayList.get(position); + if (object instanceof MediaController.PhotoEntry) { + MediaController.PhotoEntry photoEntry = (MediaController.PhotoEntry) object; + if (checkSendMediaEnabled(photoEntry)) { + return; + } + } if (hasSpoiler) { setCurrentSpoilerVisible(position, false); } @@ -973,6 +1010,10 @@ public boolean shutterLongPressed() { if (parentAlert.avatarPicker != 2 && !(parentAlert.baseFragment instanceof ChatActivity) || takingPhoto || parentAlert.baseFragment == null || parentAlert.baseFragment.getParentActivity() == null || cameraView == null) { return false; } + if (!videoEnabled) { + BulletinFactory.of(cameraView, resourcesProvider).createErrorBulletin(LocaleController.getString(R.string.GlobalAttachVideoRestricted)).show(); + return false; + } if (Build.VERSION.SDK_INT >= 23) { if (parentAlert.baseFragment.getParentActivity().checkSelfPermission(Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) { requestingPermissions = true; @@ -1048,6 +1089,10 @@ public void shutterReleased() { shutterButton.setState(ShutterButton.State.DEFAULT, true); return; } + if (!photoEnabled) { + BulletinFactory.of(cameraView, resourcesProvider).createErrorBulletin(LocaleController.getString(R.string.GlobalAttachPhotoRestricted)).show(); + return; + } final File cameraFile = AndroidUtilities.generatePicturePath(parentAlert.baseFragment instanceof ChatActivity && ((ChatActivity) parentAlert.baseFragment).isSecretChat(), null); final boolean sameTakePictureOrientation = cameraView.getCameraSession().isSameTakePictureOrientation(); cameraView.getCameraSession().setFlipFront(parentAlert.baseFragment instanceof ChatActivity || parentAlert.avatarPicker == 2); @@ -1222,6 +1267,189 @@ public boolean supportsPredictiveItemAnimations() { }); } + public void showAvatarConstructorFragment(AvatarConstructorPreviewCell view, TLRPC.VideoSize emojiMarkupStrat) { + AvatarConstructorFragment avatarConstructorFragment = new AvatarConstructorFragment(parentAlert.parentImageUpdater, parentAlert.getAvatarFor()); + avatarConstructorFragment.finishOnDone = !(parentAlert.getAvatarFor() != null && parentAlert.getAvatarFor().type == ImageUpdater.TYPE_SUGGEST_PHOTO_FOR_USER); + parentAlert.baseFragment.presentFragment(avatarConstructorFragment); + if (view != null) { + avatarConstructorFragment.startFrom(view); + } + if (emojiMarkupStrat != null) { + avatarConstructorFragment.startFrom(emojiMarkupStrat); + } + avatarConstructorFragment.setDelegate((gradient, documentId, document, previewView) -> { + selectedPhotos.clear(); + Bitmap bitmap = Bitmap.createBitmap(800, 800, Bitmap.Config.ARGB_8888); + Canvas canvas = new Canvas(bitmap); + GradientTools gradientTools = new GradientTools(); + if (gradient != null) { + gradientTools.setColors(gradient.color1, gradient.color2, gradient.color3, gradient.color4); + } else { + gradientTools.setColors(AvatarConstructorFragment.defaultColors[0][0], AvatarConstructorFragment.defaultColors[0][1], AvatarConstructorFragment.defaultColors[0][2], AvatarConstructorFragment.defaultColors[0][3]); + } + gradientTools.setBounds(0, 0, 800, 800); + canvas.drawRect(0, 0, 800, 800, gradientTools.paint); + + File file = new File(FileLoader.getDirectory(FileLoader.MEDIA_DIR_CACHE), SharedConfig.getLastLocalId() + "avatar_background.png"); + try { + file.createNewFile(); + + ByteArrayOutputStream bos = new ByteArrayOutputStream(); + bitmap.compress(Bitmap.CompressFormat.PNG, 0, bos); + byte[] bitmapdata = bos.toByteArray(); + + FileOutputStream fos = new FileOutputStream(file); + fos.write(bitmapdata); + fos.flush(); + fos.close(); + } catch (IOException e) { + e.printStackTrace(); + } + + float scale = AvatarConstructorFragment.STICKER_DEFAULT_SCALE; + int imageX, imageY; + imageX = imageY = (int) (800 * (1f - scale) / 2f); + int imageSize = (int) (800 * scale); + + ImageReceiver imageReceiver = previewView.getImageReceiver(); + if (imageReceiver.getAnimation() != null) { + Bitmap firstFrame = imageReceiver.getAnimation().getFirstFrame(null); + ImageReceiver firstFrameReceiver = new ImageReceiver(); + firstFrameReceiver.setImageBitmap(firstFrame); + firstFrameReceiver.setImageCoords(imageX, imageY, imageSize, imageSize); + firstFrameReceiver.setRoundRadius((int) (imageSize * AvatarConstructorFragment.STICKER_DEFAULT_ROUND_RADIUS)); + firstFrameReceiver.draw(canvas); + firstFrameReceiver.clearImage(); + firstFrame.recycle(); + } else { + if (imageReceiver.getLottieAnimation() != null) { + imageReceiver.getLottieAnimation().setCurrentFrame(0, false, true); + } + imageReceiver.setImageCoords(imageX, imageY, imageSize, imageSize); + imageReceiver.setRoundRadius((int) (imageSize * AvatarConstructorFragment.STICKER_DEFAULT_ROUND_RADIUS)); + imageReceiver.draw(canvas); + } + + File thumb = new File(FileLoader.getDirectory(FileLoader.MEDIA_DIR_CACHE), SharedConfig.getLastLocalId() + "avatar_background.png"); + try { + thumb.createNewFile(); + + ByteArrayOutputStream bos = new ByteArrayOutputStream(); + bitmap.compress(Bitmap.CompressFormat.PNG, 0, bos); + byte[] bitmapdata = bos.toByteArray(); + + FileOutputStream fos = new FileOutputStream(thumb); + fos.write(bitmapdata); + fos.flush(); + fos.close(); + } catch (IOException e) { + e.printStackTrace(); + } + + MediaController.PhotoEntry photoEntry; + if (previewView.hasAnimation()) { + photoEntry = new MediaController.PhotoEntry(0, 0, 0, file.getPath(), 0, false, 0, 0, 0); + photoEntry.thumbPath = thumb.getPath(); + + if (previewView.documentId != 0) { + TLRPC.TL_videoSizeEmojiMarkup emojiMarkup = new TLRPC.TL_videoSizeEmojiMarkup(); + emojiMarkup.emoji_id = previewView.documentId; + emojiMarkup.background_colors.add(previewView.backgroundGradient.color1); + if (previewView.backgroundGradient.color2 != 0) { + emojiMarkup.background_colors.add(previewView.backgroundGradient.color2); + } + if (previewView.backgroundGradient.color3 != 0) { + emojiMarkup.background_colors.add(previewView.backgroundGradient.color3); + } + if (previewView.backgroundGradient.color4 != 0) { + emojiMarkup.background_colors.add(previewView.backgroundGradient.color4); + } + photoEntry.emojiMarkup = emojiMarkup; + } else if (previewView.document != null) { + TLRPC.TL_videoSizeStickerMarkup emojiMarkup = new TLRPC.TL_videoSizeStickerMarkup(); + emojiMarkup.sticker_id = previewView.document.id; + emojiMarkup.stickerset = MessageObject.getInputStickerSet(previewView.document); + emojiMarkup.background_colors.add(previewView.backgroundGradient.color1); + if (previewView.backgroundGradient.color2 != 0) { + emojiMarkup.background_colors.add(previewView.backgroundGradient.color2); + } + if (previewView.backgroundGradient.color3 != 0) { + emojiMarkup.background_colors.add(previewView.backgroundGradient.color3); + } + if (previewView.backgroundGradient.color4 != 0) { + emojiMarkup.background_colors.add(previewView.backgroundGradient.color4); + } + photoEntry.emojiMarkup = emojiMarkup; + } + + photoEntry.editedInfo = new VideoEditedInfo(); + photoEntry.editedInfo.originalPath = file.getPath(); + photoEntry.editedInfo.resultWidth = 800; + photoEntry.editedInfo.resultHeight = 800; + photoEntry.editedInfo.originalWidth = 800; + photoEntry.editedInfo.originalHeight = 800; + photoEntry.editedInfo.isPhoto = true; + photoEntry.editedInfo.bitrate = -1; + photoEntry.editedInfo.muted = true; + + photoEntry.editedInfo.start = photoEntry.editedInfo.startTime = 0; + photoEntry.editedInfo.endTime = previewView.getDuration(); + photoEntry.editedInfo.framerate = 30; + + photoEntry.editedInfo.avatarStartTime = 0; + photoEntry.editedInfo.estimatedSize = (int) (photoEntry.editedInfo.endTime / 1000.0f * 115200); + photoEntry.editedInfo.estimatedDuration = photoEntry.editedInfo.endTime; + + VideoEditedInfo.MediaEntity mediaEntity = new VideoEditedInfo.MediaEntity(); + mediaEntity.type = 0; + + if (document == null) { + document = AnimatedEmojiDrawable.findDocument(UserConfig.selectedAccount, documentId); + } + if (document == null) { + return; + } + mediaEntity.viewWidth = (int) (800 * scale); + mediaEntity.viewHeight = (int) (800 * scale); + mediaEntity.width = scale; + mediaEntity.height = scale; + mediaEntity.x = (1f - scale) / 2f; + mediaEntity.y = (1f - scale) / 2f; + mediaEntity.document = document; + mediaEntity.parentObject = null; + mediaEntity.text = FileLoader.getInstance(UserConfig.selectedAccount).getPathToAttach(document, true).getAbsolutePath(); + mediaEntity.roundRadius = AvatarConstructorFragment.STICKER_DEFAULT_ROUND_RADIUS; + if (MessageObject.isAnimatedStickerDocument(document, true) || MessageObject.isVideoStickerDocument(document)) { + boolean isAnimatedSticker = MessageObject.isAnimatedStickerDocument(document, true); + mediaEntity.subType |= isAnimatedSticker ? 1 : 4; + } + + photoEntry.editedInfo.mediaEntities = new ArrayList<>(); + photoEntry.editedInfo.mediaEntities.add(mediaEntity); + } else { + photoEntry = new MediaController.PhotoEntry(0, 0, 0, thumb.getPath(), 0, false, 0, 0, 0); + } + selectedPhotos.put(-1, photoEntry); + selectedPhotosOrder.add(-1); + parentAlert.delegate.didPressedButton(7, true, false, 0, false); + }); + } + + private boolean checkSendMediaEnabled(MediaController.PhotoEntry photoEntry) { + if (!videoEnabled && photoEntry.isVideo) { + BulletinFactory.of(parentAlert.sizeNotifierFrameLayout, resourcesProvider).createErrorBulletin( + LocaleController.getString("GlobalAttachVideoRestricted", R.string.GlobalAttachVideoRestricted) + ).show(); + return true; + } else if (!photoEnabled && !photoEntry.isVideo) { + BulletinFactory.of(parentAlert.sizeNotifierFrameLayout, resourcesProvider).createErrorBulletin( + LocaleController.getString("GlobalAttachPhotoRestricted", R.string.GlobalAttachPhotoRestricted) + ).show(); + return true; + } + return false; + } + private int addToSelectedPhotos(MediaController.PhotoEntry object, int index) { Object key = object.imageId; if (selectedPhotos.containsKey(key)) { @@ -1579,7 +1807,11 @@ public void sendButtonPressed(int index, VideoEditedInfo videoEditedInfo, boolea } if (!(parentAlert.baseFragment instanceof ChatActivity) || !((ChatActivity) parentAlert.baseFragment).isSecretChat()) { for (int a = 0, size = cameraPhotos.size(); a < size; a++) { - AndroidUtilities.addMediaToGallery(((MediaController.PhotoEntry) cameraPhotos.get(a)).path); + MediaController.PhotoEntry entry = (MediaController.PhotoEntry) cameraPhotos.get(a); + if (entry.ttl > 0) { + continue; + } + AndroidUtilities.addMediaToGallery(entry.path); } } parentAlert.applyCaption(); @@ -1710,7 +1942,7 @@ private PhotoAttachPhotoCell getCellForIndex(int index) { } if (view instanceof PhotoAttachPhotoCell) { PhotoAttachPhotoCell cell = (PhotoAttachPhotoCell) view; - if ((Integer) cell.getImageView().getTag() == index) { + if (cell.getImageView().getTag() != null && (Integer) cell.getImageView().getTag() == index) { return cell; } } @@ -1782,9 +2014,10 @@ public void checkCamera(boolean request) { boolean cameraExpanded; private void openCamera(boolean animated) { - if (cameraView == null || cameraInitAnimation != null || !cameraView.isInited() || parentAlert.isDismissed()) { + if (cameraView == null || cameraInitAnimation != null || parentAlert.isDismissed()) { return; } + cameraView.initTexture(); if (parentAlert.avatarPicker == 2 || parentAlert.baseFragment instanceof ChatActivity) { tooltipTextView.setVisibility(VISIBLE); } else { @@ -1809,9 +2042,12 @@ private void openCamera(boolean animated) { animateCameraValues[2] = itemSize; additionCloseCameraY = 0; cameraExpanded = true; - cameraView.setFpsLimit(-1); + if (cameraView != null) { + cameraView.setFpsLimit(-1); + } AndroidUtilities.hideKeyboard(this); AndroidUtilities.setLightNavigationBar(parentAlert.getWindow(), false); + parentAlert.getWindow().addFlags(FLAG_KEEP_SCREEN_ON); if (animated) { setCameraOpenProgress(0); cameraAnimationInProgress = true; @@ -1836,10 +2072,12 @@ private void openCamera(boolean animated) { public void onAnimationEnd(Animator animator) { NotificationCenter.getInstance(parentAlert.currentAccount).onAnimationFinish(animationIndex); cameraAnimationInProgress = false; - if (Build.VERSION.SDK_INT >= 21 && cameraView != null) { - cameraView.invalidateOutline(); - } else if (cameraView != null) { - cameraView.invalidate(); + if (cameraView != null) { + if (Build.VERSION.SDK_INT >= 21) { + cameraView.invalidateOutline(); + } else { + cameraView.invalidate(); + } } if (cameraOpened) { parentAlert.delegate.onCameraOpened(); @@ -1862,15 +2100,21 @@ public void onAnimationEnd(Animator animator) { } } parentAlert.delegate.onCameraOpened(); - if (Build.VERSION.SDK_INT >= 21) { + if (cameraView != null && Build.VERSION.SDK_INT >= 21) { cameraView.setSystemUiVisibility(View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN | View.SYSTEM_UI_FLAG_FULLSCREEN); } } cameraOpened = true; - cameraView.setImportantForAccessibility(View.IMPORTANT_FOR_ACCESSIBILITY_NO); + if (cameraView != null) { + cameraView.setImportantForAccessibility(View.IMPORTANT_FOR_ACCESSIBILITY_NO); + } if (Build.VERSION.SDK_INT >= 19) { gridView.setImportantForAccessibility(View.IMPORTANT_FOR_ACCESSIBILITY_NO_HIDE_DESCENDANTS); } + + if (!LiteMode.isEnabled(LiteMode.FLAGS_CHAT) && cameraView != null && cameraView.isInited()) { + cameraView.showTexture(true, animated); + } } public void loadGalleryPhotos() { @@ -1890,18 +2134,27 @@ public void showCamera() { return; } if (cameraView == null) { - cameraView = new CameraView(parentAlert.baseFragment.getParentActivity(), parentAlert.openWithFrontFaceCamera) { + final boolean lazy = !LiteMode.isEnabled(LiteMode.FLAGS_CHAT); + cameraView = new CameraView(parentAlert.baseFragment.getParentActivity(), parentAlert.openWithFrontFaceCamera, lazy) { + + Bulletin.Delegate bulletinDelegate = new Bulletin.Delegate() { + @Override + public int getBottomOffset(int tag) { + return AndroidUtilities.dp(126) + parentAlert.getBottomInset(); + } + }; @Override protected void dispatchDraw(Canvas canvas) { if (Build.VERSION.SDK_INT >= 21) { super.dispatchDraw(canvas); } else { + int maxY = (int) Math.min(parentAlert.getCommentTextViewTop() + currentPanTranslationY + parentAlert.getContainerView().getTranslationY() - cameraView.getTranslationY(), getMeasuredHeight()); if (cameraAnimationInProgress) { - AndroidUtilities.rectTmp.set(animationClipLeft + cameraViewOffsetX * (1f - cameraOpenProgress), animationClipTop + cameraViewOffsetY * (1f - cameraOpenProgress), animationClipRight, animationClipBottom); + AndroidUtilities.rectTmp.set(animationClipLeft + cameraViewOffsetX * (1f - cameraOpenProgress), animationClipTop + cameraViewOffsetY * (1f - cameraOpenProgress), animationClipRight, Math.min(maxY, animationClipBottom)); } else if (!cameraAnimationInProgress && !cameraOpened) { - AndroidUtilities.rectTmp.set(cameraViewOffsetX, cameraViewOffsetY, getMeasuredWidth(), getMeasuredHeight()); + AndroidUtilities.rectTmp.set(cameraViewOffsetX, cameraViewOffsetY, getMeasuredWidth(), Math.min(maxY, getMeasuredHeight())); } else { - AndroidUtilities.rectTmp.set(0 , 0, getMeasuredWidth(), getMeasuredHeight()); + AndroidUtilities.rectTmp.set(0 , 0, getMeasuredWidth(), Math.min(maxY, getMeasuredHeight())); } canvas.save(); canvas.clipRect(AndroidUtilities.rectTmp); @@ -1909,7 +2162,22 @@ protected void dispatchDraw(Canvas canvas) { canvas.restore(); } } + + @Override + protected void onAttachedToWindow() { + super.onAttachedToWindow(); + Bulletin.addDelegate(cameraView, bulletinDelegate); + } + + @Override + protected void onDetachedFromWindow() { + super.onDetachedFromWindow(); + Bulletin.removeDelegate(cameraView); + } }; + if (cameraCell != null && lazy) { + cameraView.setThumbDrawable(cameraCell.getDrawable()); + } cameraView.setRecordFile(AndroidUtilities.generateVideoPath(parentAlert.baseFragment instanceof ChatActivity && ((ChatActivity) parentAlert.baseFragment).isSecretChat())); cameraView.setFocusable(true); cameraView.setFpsLimit(30); @@ -1917,14 +2185,20 @@ protected void dispatchDraw(Canvas canvas) { cameraView.setOutlineProvider(new ViewOutlineProvider() { @Override public void getOutline(View view, Outline outline) { + int maxY = (int) Math.min(parentAlert.getCommentTextViewTop() + currentPanTranslationY + parentAlert.getContainerView().getTranslationY() - cameraView.getTranslationY(), view.getMeasuredHeight()); + if (cameraOpened) { + maxY = view.getMeasuredHeight(); + } else if (cameraAnimationInProgress) { + maxY = AndroidUtilities.lerp(maxY, view.getMeasuredHeight(), cameraOpenProgress); + } if (cameraAnimationInProgress) { AndroidUtilities.rectTmp.set(animationClipLeft + cameraViewOffsetX * (1f - cameraOpenProgress), animationClipTop + cameraViewOffsetY * (1f - cameraOpenProgress), animationClipRight, animationClipBottom); - outline.setRect((int) AndroidUtilities.rectTmp.left,(int) AndroidUtilities.rectTmp.top, (int) AndroidUtilities.rectTmp.right, (int) AndroidUtilities.rectTmp.bottom); + outline.setRect((int) AndroidUtilities.rectTmp.left,(int) AndroidUtilities.rectTmp.top, (int) AndroidUtilities.rectTmp.right, Math.min(maxY, (int) AndroidUtilities.rectTmp.bottom)); } else if (!cameraAnimationInProgress && !cameraOpened) { int rad = AndroidUtilities.dp(8 * parentAlert.cornerRadius); - outline.setRoundRect((int) cameraViewOffsetX, (int) cameraViewOffsetY, view.getMeasuredWidth() + rad, view.getMeasuredHeight() + rad, rad); + outline.setRoundRect((int) cameraViewOffsetX, (int) cameraViewOffsetY, view.getMeasuredWidth() + rad, Math.min(maxY, view.getMeasuredHeight()) + rad, rad); } else { - outline.setRect(0, 0, view.getMeasuredWidth(), view.getMeasuredHeight()); + outline.setRect(0, 0, view.getMeasuredWidth(), Math.min(maxY, view.getMeasuredHeight())); } } }); @@ -1992,6 +2266,12 @@ public void onAnimationCancel(Animator animation) { cameraIcon = new FrameLayout(parentAlert.baseFragment.getParentActivity()) { @Override protected void onDraw(Canvas canvas) { + int maxY = (int) Math.min(parentAlert.getCommentTextViewTop() + currentPanTranslationY + parentAlert.getContainerView().getTranslationY() - cameraView.getTranslationY(), getMeasuredHeight()); + if (cameraOpened) { + maxY = getMeasuredHeight(); + } else if (cameraAnimationInProgress) { + maxY = AndroidUtilities.lerp(maxY, getMeasuredHeight(), cameraOpenProgress); + } int w = cameraDrawable.getIntrinsicWidth(); int h = cameraDrawable.getIntrinsicHeight(); int x = (itemSize - w) / 2; @@ -1999,8 +2279,16 @@ protected void onDraw(Canvas canvas) { if (cameraViewOffsetY != 0) { y -= cameraViewOffsetY; } + boolean clip = maxY < getMeasuredHeight(); + if (clip) { + canvas.save(); + canvas.clipRect(0, 0, getMeasuredWidth(), maxY); + } cameraDrawable.setBounds(x, y, x + w, y + h); cameraDrawable.draw(canvas); + if (clip) { + canvas.restore(); + } } }; cameraIcon.setWillNotDraw(false); @@ -2240,12 +2528,15 @@ public void closeCamera(boolean animated) { public void onAnimationEnd(Animator animator) { NotificationCenter.getInstance(parentAlert.currentAccount).onAnimationFinish(animationIndex); cameraExpanded = false; + parentAlert.getWindow().clearFlags(FLAG_KEEP_SCREEN_ON); setCameraOpenProgress(0f); cameraAnimationInProgress = false; - if (Build.VERSION.SDK_INT >= 21 && cameraView != null) { - cameraView.invalidateOutline(); - } else if (cameraView != null){ - cameraView.invalidate(); + if (cameraView != null) { + if (Build.VERSION.SDK_INT >= 21) { + cameraView.invalidateOutline(); + } else { + cameraView.invalidate(); + } } cameraOpened = false; @@ -2270,6 +2561,7 @@ public void onAnimationEnd(Animator animator) { animatorSet.start(); } else { cameraExpanded = false; + parentAlert.getWindow().clearFlags(FLAG_KEEP_SCREEN_ON); setCameraOpenProgress(0f); animateCameraValues[0] = 0; setCameraOpenProgress(0); @@ -2288,15 +2580,23 @@ public void onAnimationEnd(Animator animator) { } } cameraOpened = false; - cameraView.setFpsLimit(30); - if (Build.VERSION.SDK_INT >= 21) { - cameraView.setSystemUiVisibility(View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN); + if (cameraView != null) { + cameraView.setFpsLimit(30); + if (Build.VERSION.SDK_INT >= 21) { + cameraView.setSystemUiVisibility(View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN); + } } } - cameraView.setImportantForAccessibility(View.IMPORTANT_FOR_ACCESSIBILITY_AUTO); + if (cameraView != null) { + cameraView.setImportantForAccessibility(View.IMPORTANT_FOR_ACCESSIBILITY_AUTO); + } if (Build.VERSION.SDK_INT >= 19) { gridView.setImportantForAccessibility(View.IMPORTANT_FOR_ACCESSIBILITY_AUTO); } + + if (!LiteMode.isEnabled(LiteMode.FLAGS_CHAT) && cameraView != null) { + cameraView.showTexture(false, animated); + } } float animationClipTop; @@ -2337,21 +2637,18 @@ public void setCameraOpenProgress(float value) { float fromScaleY = startHeight / endHeight; float fromScaleX = startWidth/ endWidth; - float scaleOffsetX = 0; - float scaleOffsetY = 0; - if (cameraExpanded) { cameraViewW = (int) endWidth; cameraViewH = (int) endHeight; - float s = fromScale * (1f - value) + value; + final float s = fromScale * (1f - value) + value; cameraView.getTextureView().setScaleX(s); cameraView.getTextureView().setScaleY(s); - float sX = fromScaleX * (1f - value) + value; - float sY = fromScaleY * (1f - value) + value; + final float sX = fromScaleX * (1f - value) + value; + final float sY = fromScaleY * (1f - value) + value; - scaleOffsetY = (1 - sY) * endHeight / 2; - scaleOffsetX = (1 - sX) * endWidth / 2; + final float scaleOffsetY = (1 - sY) * endHeight / 2; + final float scaleOffsetX = (1 - sX) * endWidth / 2; cameraView.setTranslationX(fromX * (1f - value) + toX * value - scaleOffsetX); cameraView.setTranslationY(fromY * (1f - value) + toY * value - scaleOffsetY); @@ -2513,16 +2810,16 @@ private void applyCameraViewPosition() { int finalWidth = itemSize; int finalHeight = itemSize; - ViewGroup.LayoutParams layoutParams; + LayoutParams layoutParams; if (!cameraOpened) { cameraView.setClipTop((int) cameraViewOffsetY); cameraView.setClipBottom((int) cameraViewOffsetBottomY); - layoutParams = cameraView.getLayoutParams(); + layoutParams = (LayoutParams) cameraView.getLayoutParams(); if (layoutParams.height != finalHeight || layoutParams.width != finalWidth) { layoutParams.width = finalWidth; layoutParams.height = finalHeight; cameraView.setLayoutParams(layoutParams); - final ViewGroup.LayoutParams layoutParamsFinal = layoutParams; + final LayoutParams layoutParamsFinal = layoutParams; AndroidUtilities.runOnUIThread(() -> { if (cameraView != null) { cameraView.setLayoutParams(layoutParamsFinal); @@ -2534,12 +2831,12 @@ private void applyCameraViewPosition() { finalWidth = (int) (itemSize - cameraViewOffsetX); finalHeight = (int) (itemSize - cameraViewOffsetY - cameraViewOffsetBottomY); - layoutParams = cameraIcon.getLayoutParams(); + layoutParams = (LayoutParams) cameraIcon.getLayoutParams(); if (layoutParams.height != finalHeight || layoutParams.width != finalWidth) { layoutParams.width = finalWidth; layoutParams.height = finalHeight; cameraIcon.setLayoutParams(layoutParams); - final ViewGroup.LayoutParams layoutParamsFinal = layoutParams; + final LayoutParams layoutParamsFinal = layoutParams; AndroidUtilities.runOnUIThread(() -> { if (cameraIcon != null) { cameraIcon.setLayoutParams(layoutParamsFinal); @@ -2766,6 +3063,7 @@ void onSelectedItemsCountChanged(int count) { if (count != 0) { parentAlert.selectedMenuItem.hideSubItem(open_in); } + compressItem.setVisibility(documentsEnabled ? View.VISIBLE : View.GONE); if (count > 1) { parentAlert.selectedMenuItem.showSubItem(preview_gap); parentAlert.selectedMenuItem.showSubItem(preview); @@ -2912,8 +3210,11 @@ void checkColors() { } @Override - void onInit(boolean hasMedia) { - mediaEnabled = hasMedia; + void onInit(boolean hasVideo, boolean hasPhoto, boolean hasDocuments) { + mediaEnabled = hasVideo || hasPhoto; + videoEnabled = hasVideo; + photoEnabled = hasPhoto; + documentsEnabled = hasDocuments; if (cameraView != null) { cameraView.setAlpha(mediaEnabled ? 1.0f : 0.2f); cameraView.setEnabled(mediaEnabled); @@ -3235,10 +3536,36 @@ boolean canDismissWithTouchOutside() { return !cameraOpened; } + @Override + public void onPanTransitionStart(boolean keyboardVisible, int contentHeight) { + super.onPanTransitionStart(keyboardVisible, contentHeight); + checkCameraViewPosition(); + if (cameraView != null) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + cameraView.invalidateOutline(); + } else { + cameraView.invalidate(); + } + } + if (cameraIcon != null) { + cameraIcon.invalidate(); + } + } + @Override void onContainerTranslationUpdated(float currentPanTranslationY) { this.currentPanTranslationY = currentPanTranslationY; checkCameraViewPosition(); + if (cameraView != null) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + cameraView.invalidateOutline(); + } else { + cameraView.invalidate(); + } + } + if (cameraIcon != null) { + cameraIcon.invalidate(); + } invalidate(); } @@ -3471,10 +3798,16 @@ public RecyclerListView.Holder createHolder() { @Override public void getOutline(View view, Outline outline) { PhotoAttachPhotoCell photoCell = (PhotoAttachPhotoCell) view; + if (photoCell.getTag() == null) { + return; + } int position = (Integer) photoCell.getTag(); if (needCamera && selectedAlbumEntry == galleryAlbumEntry) { position++; } + if (showAvatarConstructor) { + position++; + } if (position == 0) { int rad = AndroidUtilities.dp(8 * parentAlert.cornerRadius); outline.setRoundRect(0, 0, view.getMeasuredWidth() + rad, view.getMeasuredHeight() + rad, rad); @@ -3494,6 +3827,9 @@ public void getOutline(View view, Outline outline) { } int index = (Integer) v.getTag(); MediaController.PhotoEntry photoEntry = v.getPhotoEntry(); + if (checkSendMediaEnabled(photoEntry)) { + return; + } boolean added = !selectedPhotos.containsKey(photoEntry.imageId); if (added && parentAlert.maxSelectedPhotos >= 0 && selectedPhotos.size() >= parentAlert.maxSelectedPhotos) { if (parentAlert.allowOrder && parentAlert.baseFragment instanceof ChatActivity) { @@ -3546,6 +3882,9 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { if (needCamera && selectedAlbumEntry == galleryAlbumEntry) { position--; } + if (showAvatarConstructor) { + position--; + } PhotoAttachPhotoCell cell = (PhotoAttachPhotoCell) holder.itemView; if (this == adapter) { cell.setItemSize(itemSize); @@ -3557,12 +3896,22 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { } MediaController.PhotoEntry photoEntry = getPhotoEntryAtPosition(position); + if (photoEntry == null) { + return; + } cell.setPhotoEntry(photoEntry, needCamera && selectedAlbumEntry == galleryAlbumEntry, position == getItemCount() - 1); if (parentAlert.baseFragment instanceof ChatActivity && parentAlert.allowOrder) { cell.setChecked(selectedPhotosOrder.indexOf(photoEntry.imageId), selectedPhotos.containsKey(photoEntry.imageId), false); } else { cell.setChecked(-1, selectedPhotos.containsKey(photoEntry.imageId), false); } + if (!videoEnabled && photoEntry.isVideo) { + cell.setAlpha(0.3f); + } else if (!photoEnabled && !photoEntry.isVideo) { + cell.setAlpha(0.3f); + } else { + cell.setAlpha(1f); + } cell.getImageView().setTag(position); cell.setTag(position); break; @@ -3631,6 +3980,15 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { default: holder = new RecyclerListView.Holder(new PhotoAttachPermissionCell(mContext, resourcesProvider)); break; + case 4: + AvatarConstructorPreviewCell avatarConstructorPreviewCell = new AvatarConstructorPreviewCell(mContext, parentAlert.forUser) { + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(MeasureSpec.makeMeasureSpec(itemSize, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(itemSize, MeasureSpec.EXACTLY)); + } + }; + holder = new RecyclerListView.Holder(avatarConstructorPreviewCell); + break; } return holder; } @@ -3652,6 +4010,9 @@ public int getItemCount() { if (needCamera && selectedAlbumEntry == galleryAlbumEntry) { count++; } + if (showAvatarConstructor) { + count++; + } if (noGalleryPermissions && this == adapter) { count++; count++; // NekoX: Additional Open In picker @@ -3671,6 +4032,7 @@ public int getItemViewType(int position) { if (!mediaEnabled) { return 2; } + int localPosition = position; if (needCamera && position == 0 && selectedAlbumEntry == galleryAlbumEntry) { if (noCameraPermissions) { return 3; @@ -3678,6 +4040,12 @@ public int getItemViewType(int position) { return 1; } } + if (needCamera) { + localPosition--; + } + if (showAvatarConstructor && localPosition == 0) { + return VIEW_TYPE_AVATAR_CONSTRUCTOR; + } if (this == adapter && position == itemsCount - 1) { return 2; } else if (noGalleryPermissions) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachRestrictedLayout.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachRestrictedLayout.java new file mode 100644 index 0000000000..133099d367 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachRestrictedLayout.java @@ -0,0 +1,154 @@ +package org.telegram.ui.Components; + +import android.content.Context; +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.Paint; +import android.view.View; +import android.view.ViewGroup; + +import androidx.annotation.NonNull; +import androidx.recyclerview.widget.LinearLayoutManager; +import androidx.recyclerview.widget.LinearSmoothScroller; +import androidx.recyclerview.widget.RecyclerView; + +import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.ChatObject; +import org.telegram.messenger.LocaleController; +import org.telegram.messenger.R; +import org.telegram.tgnet.TLRPC; +import org.telegram.ui.ActionBar.ActionBar; +import org.telegram.ui.ActionBar.Theme; +import org.telegram.ui.ChatActivity; + +public class ChatAttachRestrictedLayout extends ChatAttachAlert.AttachAlertLayout { + + private final EmptyTextProgressView progressView; + private final RecyclerListView listView; + public final int id; + private final RecyclerView.Adapter adapter; + private int gridExtraSpace; + + public ChatAttachRestrictedLayout(int id, ChatAttachAlert alert, Context context, Theme.ResourcesProvider resourcesProvider) { + super(alert, context, resourcesProvider); + this.id = id; + progressView = new EmptyTextProgressView(context, null, resourcesProvider); + progressView.setText(LocaleController.getString("NoPhotos", R.string.NoPhotos)); + progressView.setOnTouchListener(null); + progressView.setTextSize(16); + addView(progressView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT)); + + progressView.setLottie(R.raw.media_forbidden, 150, 150); + TLRPC.Chat chat = ((ChatActivity) parentAlert.baseFragment).getCurrentChat(); + if (id == 1) { + progressView.setText(ChatObject.getRestrictedErrorText(chat, ChatObject.ACTION_SEND_MEDIA)); + } else if (id == 3) { + progressView.setText(ChatObject.getRestrictedErrorText(chat, ChatObject.ACTION_SEND_MUSIC)); + } else if (id == 4) { + progressView.setText(ChatObject.getRestrictedErrorText(chat, ChatObject.ACTION_SEND_DOCUMENTS)); + } else { + progressView.setText(ChatObject.getRestrictedErrorText(chat, ChatObject.ACTION_SEND_PLAIN)); + } + progressView.showTextView(); + + listView = new RecyclerListView(context, resourcesProvider); + listView.setSectionsType(RecyclerListView.SECTIONS_TYPE_DATE); + listView.setVerticalScrollBarEnabled(false); + listView.setLayoutManager(new LinearLayoutManager(context)); + listView.setClipToPadding(false); + listView.setAdapter(adapter = new RecyclerView.Adapter() { + @NonNull + @Override + public RecyclerView.ViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) { + View view = new View(getContext()) { + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(MeasureSpec.makeMeasureSpec(MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(gridExtraSpace, MeasureSpec.EXACTLY)); + } + }; + return new RecyclerListView.Holder(view); + } + + @Override + public void onBindViewHolder(@NonNull RecyclerView.ViewHolder holder, int position) { + + } + + @Override + public int getItemCount() { + return 1; + } + }); + listView.setPadding(0, 0, 0, AndroidUtilities.dp(48)); + listView.setOnScrollListener(new RecyclerView.OnScrollListener() { + @Override + public void onScrolled(RecyclerView recyclerView, int dx, int dy) { + parentAlert.updateLayout(ChatAttachRestrictedLayout.this, true, dy); + } + }); + addView(listView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); + } + + + @Override + int getCurrentItemTop() { + if (listView.getChildCount() <= 0) { + return Integer.MAX_VALUE; + } + View child = listView.getChildAt(0); + RecyclerListView.Holder holder = (RecyclerListView.Holder) listView.findContainingViewHolder(child); + int top = child.getTop() - AndroidUtilities.dp(8); + int newOffset = top > 0 && holder != null && holder.getAdapterPosition() == 0 ? top : 0; + if (top >= 0 && holder != null && holder.getAdapterPosition() == 0) { + newOffset = top; + // runShadowAnimation(false); + } else { +// runShadowAnimation(true); + } + progressView.setTranslationY(newOffset + (getMeasuredHeight() - newOffset - AndroidUtilities.dp(50) - progressView.getMeasuredHeight()) / 2); +// frameLayout.setTranslationY(newOffset); + return newOffset + AndroidUtilities.dp(12); + } + + @Override + public void setTranslationY(float translationY) { + super.setTranslationY(translationY); + parentAlert.getSheetContainer().invalidate(); + } + + @Override + int getFirstOffset() { + return getListTopPadding() + AndroidUtilities.dp(4); + } + + @Override + int getListTopPadding() { + return listView.getPaddingTop(); + } + + @Override + void onPreMeasure(int availableWidth, int availableHeight) { + super.onPreMeasure(availableWidth, availableHeight); + int newSize = Math.max(0, availableHeight - ActionBar.getCurrentActionBarHeight()); + if (gridExtraSpace != newSize) { + gridExtraSpace = newSize; + adapter.notifyDataSetChanged(); + } + int paddingTop; + if (!AndroidUtilities.isTablet() && AndroidUtilities.displaySize.x > AndroidUtilities.displaySize.y) { + paddingTop = (int) (availableHeight / 3.5f); + } else { + paddingTop = (availableHeight / 5 * 2); + } + paddingTop -= AndroidUtilities.dp(52); + if (paddingTop < 0) { + paddingTop = 0; + } + if (listView.getPaddingTop() != paddingTop) { + listView.setPadding(AndroidUtilities.dp(6), paddingTop, AndroidUtilities.dp(6), AndroidUtilities.dp(48)); + } + } + + +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAvatarContainer.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAvatarContainer.java index c6f1ff9052..674af29b85 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAvatarContainer.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAvatarContainer.java @@ -53,15 +53,17 @@ import org.telegram.ui.ProfileActivity; import org.telegram.ui.TopicsFragment; +import java.util.concurrent.atomic.AtomicReference; + import xyz.nextalone.nagram.helper.MessageHelper; public class ChatAvatarContainer extends FrameLayout implements NotificationCenter.NotificationCenterDelegate { private BackupImageView avatarImageView; private SimpleTextView titleTextView; - private SimpleTextView titleTextLargerCopyView; + private AtomicReference titleTextLargerCopyView = new AtomicReference<>(); private SimpleTextView subtitleTextView; - private SimpleTextView subtitleTextLargerCopyView; + private AtomicReference subtitleTextLargerCopyView = new AtomicReference<>(); private ImageView timeItem; private TimerDrawable timerDrawable; private ChatActivity parentFragment; @@ -97,6 +99,37 @@ public class ChatAvatarContainer extends FrameLayout implements NotificationCent private AnimatedEmojiDrawable.SwapAnimatedEmojiDrawable emojiStatusDrawable; + private class SimpleTextConnectedView extends SimpleTextView { + + private AtomicReference reference; + public SimpleTextConnectedView(Context context, AtomicReference reference) { + super(context); + this.reference = reference; + } + + @Override + public void setTranslationY(float translationY) { + if (reference != null) { + SimpleTextView connected = reference.get(); + if (connected != null) { + connected.setTranslationY(translationY); + } + } + super.setTranslationY(translationY); + } + + @Override + public boolean setText(CharSequence value) { + if (reference != null) { + SimpleTextView connected = reference.get(); + if (connected != null) { + connected.setText(value); + } + } + return super.setText(value); + } + } + public ChatAvatarContainer(Context context, BaseFragment baseFragment, boolean needTime) { this(context, baseFragment, needTime, null); } @@ -140,23 +173,7 @@ public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) { }); } - titleTextView = new SimpleTextView(context) { - @Override - public boolean setText(CharSequence value) { - if (titleTextLargerCopyView != null) { - titleTextLargerCopyView.setText(value); - } - return super.setText(value); - } - - @Override - public void setTranslationY(float translationY) { - if (titleTextLargerCopyView != null) { - titleTextLargerCopyView.setTranslationY(translationY); - } - super.setTranslationY(translationY); - } - }; + titleTextView = new SimpleTextConnectedView(context, titleTextLargerCopyView); titleTextView.setEllipsizeByGradient(true); titleTextView.setTextColor(getThemedColor(Theme.key_actionBarDefaultTitle)); titleTextView.setTextSize(18); @@ -168,23 +185,7 @@ public void setTranslationY(float translationY) { titleTextView.setPadding(0, AndroidUtilities.dp(6), 0, AndroidUtilities.dp(12)); addView(titleTextView); - subtitleTextView = new SimpleTextView(context) { - @Override - public boolean setText(CharSequence value) { - if (subtitleTextLargerCopyView != null) { - subtitleTextLargerCopyView.setText(value); - } - return super.setText(value); - } - - @Override - public void setTranslationY(float translationY) { - if (subtitleTextLargerCopyView != null) { - subtitleTextLargerCopyView.setTranslationY(translationY); - } - super.setTranslationY(translationY); - } - }; + subtitleTextView = new SimpleTextConnectedView(context, subtitleTextLargerCopyView); subtitleTextView.setEllipsizeByGradient(true); subtitleTextView.setTextColor(getThemedColor(Theme.key_actionBarDefaultSubtitle)); subtitleTextView.setTag(Theme.key_actionBarDefaultSubtitle); @@ -294,7 +295,10 @@ public void setAutoDeleteHistory(int time, int action) { TLRPC.ChatFull chatInfo = parentFragment.getCurrentChatInfo(); TLRPC.UserFull userInfo = parentFragment.getCurrentUserInfo(); if (userInfo != null || chatInfo != null) { - parentFragment.getUndoView().showWithAction(parentFragment.getDialogId(), action, parentFragment.getCurrentUser(), userInfo != null ? userInfo.ttl_period : chatInfo.ttl_period, null, null); + UndoView undoView = parentFragment.getUndoView(); + if (undoView != null) { + undoView.showWithAction(parentFragment.getDialogId(), action, parentFragment.getCurrentUser(), userInfo != null ? userInfo.ttl_period : chatInfo.ttl_period, null, null); + } } } @@ -410,6 +414,7 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { if (lastWidth != -1 && lastWidth != width && lastWidth > width) { fadeOutToLessWidth(lastWidth); } + SimpleTextView titleTextLargerCopyView = this.titleTextLargerCopyView.get(); if (titleTextLargerCopyView != null) { int largerAvailableWidth = largerWidth - AndroidUtilities.dp((avatarImageView.getVisibility() == VISIBLE ? 54 : 0) + 16); titleTextLargerCopyView.measure(MeasureSpec.makeMeasureSpec(largerAvailableWidth, MeasureSpec.AT_MOST), MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(24), MeasureSpec.AT_MOST)); @@ -419,10 +424,12 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { private void fadeOutToLessWidth(int largerWidth) { this.largerWidth = largerWidth; + SimpleTextView titleTextLargerCopyView = this.titleTextLargerCopyView.get(); if (titleTextLargerCopyView != null) { removeView(titleTextLargerCopyView); } titleTextLargerCopyView = new SimpleTextView(getContext()); + this.titleTextLargerCopyView.set(titleTextLargerCopyView); titleTextLargerCopyView.setTextColor(getThemedColor(Theme.key_actionBarDefaultTitle)); titleTextLargerCopyView.setTextSize(18); titleTextLargerCopyView.setGravity(Gravity.LEFT); @@ -433,23 +440,30 @@ private void fadeOutToLessWidth(int largerWidth) { titleTextLargerCopyView.setLeftDrawable(titleTextView.getLeftDrawable()); titleTextLargerCopyView.setText(titleTextView.getText()); titleTextLargerCopyView.animate().alpha(0).setDuration(350).setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT).withEndAction(() -> { - if (titleTextLargerCopyView != null) { - removeView(titleTextLargerCopyView); - titleTextLargerCopyView = null; + SimpleTextView titleTextLargerCopyView2 = this.titleTextLargerCopyView.get(); + if (titleTextLargerCopyView2 != null) { + removeView(titleTextLargerCopyView2); + this.titleTextLargerCopyView.set(null); } }).start(); addView(titleTextLargerCopyView); + SimpleTextView subtitleTextLargerCopyView = this.subtitleTextLargerCopyView.get(); + if (subtitleTextLargerCopyView != null) { + removeView(subtitleTextLargerCopyView); + } subtitleTextLargerCopyView = new SimpleTextView(getContext()); + this.subtitleTextLargerCopyView.set(subtitleTextLargerCopyView); subtitleTextLargerCopyView.setTextColor(getThemedColor(Theme.key_actionBarDefaultSubtitle)); subtitleTextLargerCopyView.setTag(Theme.key_actionBarDefaultSubtitle); subtitleTextLargerCopyView.setTextSize(14); subtitleTextLargerCopyView.setGravity(Gravity.LEFT); subtitleTextLargerCopyView.setText(subtitleTextView.getText()); subtitleTextLargerCopyView.animate().alpha(0).setDuration(350).setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT).withEndAction(() -> { - if (subtitleTextLargerCopyView != null) { - removeView(subtitleTextLargerCopyView); - subtitleTextLargerCopyView = null; + SimpleTextView subtitleTextLargerCopyView2 = this.subtitleTextLargerCopyView.get(); + if (subtitleTextLargerCopyView2 != null) { + removeView(subtitleTextLargerCopyView2); + this.subtitleTextLargerCopyView.set(null); setClipChildren(true); } }).start(); @@ -464,6 +478,7 @@ protected void onLayout(boolean changed, int left, int top, int right, int botto int viewTop = (actionBarHeight - AndroidUtilities.dp(42)) / 2 + (Build.VERSION.SDK_INT >= 21 && occupyStatusBar ? AndroidUtilities.statusBarHeight : 0); avatarImageView.layout(leftPadding, viewTop + 1, leftPadding + AndroidUtilities.dp(42), viewTop + 1 + AndroidUtilities.dp(42)); int l = leftPadding + (avatarImageView.getVisibility() == VISIBLE ? AndroidUtilities.dp(54) : 0); + SimpleTextView titleTextLargerCopyView = this.titleTextLargerCopyView.get(); if (subtitleTextView.getVisibility() != GONE) { titleTextView.layout(l, viewTop + AndroidUtilities.dp(1.3f) - titleTextView.getPaddingTop(), l + titleTextView.getMeasuredWidth(), viewTop + titleTextView.getTextHeight() + AndroidUtilities.dp(1.3f) - titleTextView.getPaddingTop() + titleTextView.getPaddingBottom()); if (titleTextLargerCopyView != null) { @@ -479,6 +494,7 @@ protected void onLayout(boolean changed, int left, int top, int right, int botto timeItem.layout(leftPadding + AndroidUtilities.dp(16), viewTop + AndroidUtilities.dp(15), leftPadding + AndroidUtilities.dp(16 + 34), viewTop + AndroidUtilities.dp(15 + 34)); } subtitleTextView.layout(l, viewTop + AndroidUtilities.dp(24), l + subtitleTextView.getMeasuredWidth(), viewTop + subtitleTextView.getTextHeight() + AndroidUtilities.dp(24)); + SimpleTextView subtitleTextLargerCopyView = this.subtitleTextLargerCopyView.get(); if (subtitleTextLargerCopyView != null) { subtitleTextLargerCopyView.layout(l, viewTop + AndroidUtilities.dp(24), l + subtitleTextLargerCopyView.getMeasuredWidth(), viewTop + subtitleTextLargerCopyView.getTextHeight() + AndroidUtilities.dp(24)); } @@ -762,7 +778,8 @@ public void onAnimationEnd(Animator animation) { } } else { int[] result = new int[1]; - String shortNumber = LocaleController.formatShortNumber(info.participants_count, result); + boolean ignoreShort = AndroidUtilities.isAccessibilityScreenReaderEnabled(); + String shortNumber = ignoreShort ? String.valueOf(result[0] = info.participants_count) : LocaleController.formatShortNumber(info.participants_count, result); if (chat.megagroup) { newSubtitle = LocaleController.formatPluralString("Members", result[0]).replace(String.format("%d", result[0]), shortNumber); } else { @@ -942,7 +959,7 @@ public void checkAndUpdateAvatar() { } else { avatarDrawable.setScaleSize(1f); if (avatarImageView != null) { - avatarImageView.imageReceiver.setForUserOrChat(user, avatarDrawable, null, true); + avatarImageView.imageReceiver.setForUserOrChat(user, avatarDrawable, null, true, VectorAvatarThumbDrawable.TYPE_STATIC); } } } else if (chat != null) { @@ -986,6 +1003,9 @@ protected void onAttachedToWindow() { currentConnectionState = ConnectionsManager.getInstance(currentAccount).getConnectionState(); updateCurrentConnectionState(); } + if (emojiStatusDrawable != null) { + emojiStatusDrawable.attach(); + } } @Override @@ -995,6 +1015,9 @@ protected void onDetachedFromWindow() { NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.didUpdateConnectionState); NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiLoaded); } + if (emojiStatusDrawable != null) { + emojiStatusDrawable.detach(); + } } @Override @@ -1064,6 +1087,7 @@ public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) { sb.append("\n"); sb.append(subtitleTextView.getText()); info.setContentDescription(sb); + setContentDescription(sb); if (info.isClickable() && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { info.addAction(new AccessibilityNodeInfo.AccessibilityAction(AccessibilityNodeInfo.ACTION_CLICK, LocaleController.getString("OpenProfile", R.string.OpenProfile))); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatNotificationsPopupWrapper.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatNotificationsPopupWrapper.java index 1db79549fe..b8411b7eca 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatNotificationsPopupWrapper.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatNotificationsPopupWrapper.java @@ -119,14 +119,12 @@ protected boolean drawChild(Canvas canvas, View child, long drawingTime) { callback.showCustomize(); }); - muteUnmuteButton = ActionBarMenuItem.addItem(windowLayout, 0, "", false, resourcesProvider); muteUnmuteButton.setOnClickListener(view -> { dismiss(); AndroidUtilities.runOnUIThread(() -> { callback.toggleMute(); }); - }); gap = new FrameLayout(context); @@ -173,7 +171,7 @@ public void update(long dialogId, int topicId, HashSet topicExceptions) int color; if (muted) { muteUnmuteButton.setTextAndIcon(LocaleController.getString("UnmuteNotifications", R.string.UnmuteNotifications), R.drawable.msg_unmute); - color = Theme.getColor(Theme.key_wallet_greenText); + color = Theme.getColor(Theme.key_windowBackgroundWhiteGreenText2); soundToggle.setVisibility(View.GONE); } else { muteUnmuteButton.setTextAndIcon(LocaleController.getString("MuteNotifications", R.string.MuteNotifications), R.drawable.msg_mute); @@ -220,6 +218,7 @@ public void update(long dialogId, int topicId, HashSet topicExceptions) } muteUnmuteButton.setColors(color, color); + muteUnmuteButton.setSelectorColor(Theme.multAlpha(color, .1f)); if (topicExceptions == null || topicExceptions.isEmpty()) { gap.setVisibility(View.GONE); @@ -280,6 +279,9 @@ public void showAsOptions(BaseFragment parentFragment, View anchorView, float to float x = touchedX, y = touchedY; View view = anchorView; while (view != parentFragment.getFragmentView()) { + if (view.getParent() == null) { + return; + } x += view.getX(); y += view.getY(); view = (View) view.getParent(); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatScrimPopupContainerLayout.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatScrimPopupContainerLayout.java index 6188e9d4c8..839af67bb9 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatScrimPopupContainerLayout.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatScrimPopupContainerLayout.java @@ -18,6 +18,7 @@ public class ChatScrimPopupContainerLayout extends LinearLayout { private float progressToSwipeBack; private float bottomViewYOffset; private float expandSize; + private float bottomViewReactionsOffset; public ChatScrimPopupContainerLayout(Context context) { super(context); @@ -106,6 +107,7 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { } else { super.onMeasure(widthMeasureSpec, heightMeasureSpec); } + maxHeight = getMeasuredHeight(); } private void updatePopupTranslation() { @@ -148,7 +150,7 @@ public void setPopupWindowLayout(ActionBarPopupWindow.ActionBarPopupWindowLayout private void updateBottomViewPosition() { if (bottomView != null) { - bottomView.setTranslationY(bottomViewYOffset + expandSize); + bottomView.setTranslationY(bottomViewYOffset + expandSize + bottomViewReactionsOffset); } } @@ -168,4 +170,18 @@ public void setPopupAlpha(float aplha) { bottomView.setAlpha(aplha); } } + + public void setReactionsTransitionProgress(float v) { + popupWindowLayout.setReactionsTransitionProgress(v); + if (bottomView != null) { + bottomView.setAlpha(v); + float scale = 0.5f + v * 0.5f; + bottomView.setPivotX(bottomView.getMeasuredWidth()); + bottomView.setPivotY(0); + bottomViewReactionsOffset = -popupWindowLayout.getMeasuredHeight() * (1f - v); + updateBottomViewPosition(); + bottomView.setScaleX(scale); + bottomView.setScaleY(scale); + } + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatThemeBottomSheet.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatThemeBottomSheet.java index 8f4b621622..5af5b2361f 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatThemeBottomSheet.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatThemeBottomSheet.java @@ -104,6 +104,8 @@ public ChatThemeBottomSheet(final ChatActivity chatActivity, ChatActivity.ThemeD setApplyBottomPadding(false); drawNavigationBar = true; + fixNavigationBar(); + rootLayout = new FrameLayout(getContext()); setCustomView(rootLayout); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/CheckBox2.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/CheckBox2.java index 6626ca270d..b2fd95340d 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/CheckBox2.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/CheckBox2.java @@ -3,16 +3,23 @@ import android.content.Context; import android.graphics.Canvas; import android.graphics.Paint; +import android.graphics.PorterDuff; +import android.graphics.drawable.Drawable; import android.view.View; import android.view.accessibility.AccessibilityNodeInfo; import android.widget.CheckBox; +import androidx.core.content.ContextCompat; + +import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.GenericProvider; import org.telegram.ui.ActionBar.Theme; public class CheckBox2 extends View { private CheckBoxBase checkBoxBase; + Drawable iconDrawable; + int currentIcon; public CheckBox2(Context context, int sz) { this(context, sz, null); @@ -93,14 +100,42 @@ protected void onLayout(boolean changed, int left, int top, int right, int botto @Override protected void onDraw(Canvas canvas) { - checkBoxBase.draw(canvas); + if (iconDrawable != null) { + int cx = getMeasuredWidth() >> 1; + int cy = getMeasuredHeight() >> 1; + iconDrawable.setBounds(cx - iconDrawable.getIntrinsicWidth() / 2, cy - iconDrawable.getIntrinsicHeight() / 2, cx + iconDrawable.getIntrinsicWidth() / 2, cy + iconDrawable.getIntrinsicHeight() / 2); + iconDrawable.draw(canvas); + Paint paint = new Paint(); + paint.setStyle(Paint.Style.STROKE); + paint.setStrokeWidth(AndroidUtilities.dp(1.2f)); + paint.setColor(Theme.getColor(Theme.key_switch2Track)); + canvas.drawCircle(cx, cy, cx - AndroidUtilities.dp(1.5f), paint); + } else { + checkBoxBase.draw(canvas); + } } @Override public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) { super.onInitializeAccessibilityNodeInfo(info); - info.setClassName(CheckBox.class.getName()); - info.setChecked(isChecked()); + info.setClassName("android.widget.Switch"); info.setCheckable(true); + info.setChecked(isChecked()); + } + + public void setIcon(int icon) { + if (icon != currentIcon) { + currentIcon = icon; + if (icon == 0) { + iconDrawable = null; + } else { + iconDrawable = ContextCompat.getDrawable(getContext(), icon).mutate(); + iconDrawable.setColorFilter(Theme.getColor(Theme.key_switch2Track), PorterDuff.Mode.MULTIPLY); + } + } + } + + public boolean hasIcon() { + return iconDrawable != null; } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/CheckBoxBase.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/CheckBoxBase.java index 296f5e061e..c2200985b6 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/CheckBoxBase.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/CheckBoxBase.java @@ -63,7 +63,7 @@ public class CheckBoxBase { private ProgressDelegate progressDelegate; private Theme.MessageDrawable messageDrawable; - private final Theme.ResourcesProvider resourcesProvider; + private Theme.ResourcesProvider resourcesProvider; private GenericProvider circlePaintProvider = obj -> paint; @@ -93,6 +93,10 @@ public CheckBoxBase(View parent, int sz, Theme.ResourcesProvider resourcesProvid backgroundPaint.setStrokeWidth(AndroidUtilities.dp(1.2f)); } + public void setResourcesProvider(Theme.ResourcesProvider resourcesProvider) { + this.resourcesProvider = resourcesProvider; + } + public void onAttachedToWindow() { attachedToWindow = true; } @@ -354,7 +358,7 @@ public void draw(Canvas canvas) { if (backgroundType == 9) { paint.setColor(getThemedColor(background2ColorKey)); - } else if (backgroundType == 11 || backgroundType == 6 || backgroundType == 7 || backgroundType == 10 || !drawUnchecked && backgroundColorKey != null) { + } else if (backgroundType == 11 || backgroundType == 6 || backgroundType == 7 || backgroundType == 10 || !drawUnchecked && backgroundColorKey != null || backgroundType == 14) { paint.setColor(getThemedColor(backgroundColorKey)); } else { paint.setColor(getThemedColor(enabled ? Theme.key_checkbox : Theme.key_checkboxDisabled)); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/CircularProgressDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/CircularProgressDrawable.java index dcf35cb2ea..5d189da015 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/CircularProgressDrawable.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/CircularProgressDrawable.java @@ -41,7 +41,7 @@ private void updateSegment() { } public static void getSegments(float t, float[] segments) { - segments[0] = 1520 * t / 5400f - 20; + segments[0] = Math.max(0, 1520 * t / 5400f - 20); segments[1] = 1520 * t / 5400f; for (int i = 0; i < 4; ++i) { segments[1] += interpolator.getInterpolation((t - i * 1350) / 667f) * 250; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ColorPicker.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ColorPicker.java index 7e729e3b82..12112e8247 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ColorPicker.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ColorPicker.java @@ -95,6 +95,7 @@ public class ColorPicker extends FrameLayout { private boolean colorPressed; private int selectedColor; + private int prevSelectedColor; private float pressedMoveProgress = 1.0f; private long lastUpdateTime; @@ -252,6 +253,7 @@ protected void onDraw(Canvas canvas) { boolean checked = radioButton[b] == radioButton1; radioButton[b].setChecked(checked, true); if (checked) { + prevSelectedColor = selectedColor; selectedColor = b; } } @@ -529,7 +531,11 @@ public void setAlpha(float alpha) { } radioButton[3] = button; } - radioButton[0].callOnClick(); + if (prevSelectedColor >= 0 && prevSelectedColor < selectedColor) { + radioButton[prevSelectedColor].callOnClick(); + } else { + radioButton[colorsCount - 1].callOnClick(); + } for (int a = 0; a < radioButton.length; a++) { if (a < colorsCount) { delegate.setColor(radioButton[a].getColor(), a, a == radioButton.length - 1); @@ -907,6 +913,7 @@ public void onAnimationEnd(Animator animation) { public void setType(int resetType, boolean hasChanges, int maxColorsCount, int newColorsCount, boolean myMessages, int angle, boolean animated) { if (resetType != currentResetType) { + prevSelectedColor = 0; selectedColor = 0; for (int i = 0; i < 4; i++) { radioButton[i].setChecked(i == selectedColor, true); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/CounterView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/CounterView.java index c4492c1584..dcba7af667 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/CounterView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/CounterView.java @@ -45,7 +45,7 @@ protected void onDraw(Canvas canvas) { } - public void setColors(String textKey, String circleKey){ + public void setColors(String textKey, String circleKey) { counterDrawable.textColorKey = textKey; counterDrawable.circleColorKey = circleKey; } @@ -73,6 +73,7 @@ public static class CounterDrawable { private final static int ANIMATION_TYPE_OUT = 1; private final static int ANIMATION_TYPE_REPLACE = 2; public boolean shortFormat; + public float circleScale = 1f; int animationType = -1; @@ -103,6 +104,7 @@ public static class CounterDrawable { public int gravity = Gravity.CENTER; float countLeft; float x; + public float radius = 11.5f; private boolean reverseAnimation; public float horizontalPadding; @@ -143,13 +145,23 @@ public void setSize(int h, int w) { private void drawInternal(Canvas canvas) { - float countTop = (lastH - AndroidUtilities.dp(23)) / 2f; + float size = radius * 2; + float countTop = (lastH - AndroidUtilities.dp(size)) / 2f; updateX(countWidth); - rectF.set(x, countTop, x + countWidth + AndroidUtilities.dp(11), countTop + AndroidUtilities.dp(23)); + rectF.set(x, countTop, x + countWidth + AndroidUtilities.dp(radius - 0.5f), countTop + AndroidUtilities.dp(size)); if (circlePaint != null && drawBackground) { - canvas.drawRoundRect(rectF, 11.5f * AndroidUtilities.density, 11.5f * AndroidUtilities.density, circlePaint); + boolean needRestore = false; + if (circleScale != 1f) { + canvas.save(); + canvas.scale(circleScale, circleScale, rectF.centerX(), rectF.centerY()); + needRestore = true; + } + canvas.drawRoundRect(rectF, radius * AndroidUtilities.density, radius * AndroidUtilities.density, circlePaint); if (addServiceGradient && Theme.hasGradientService()) { - canvas.drawRoundRect(rectF, 11.5f * AndroidUtilities.density, 11.5f * AndroidUtilities.density, Theme.chat_actionBackgroundGradientDarkenPaint); + canvas.drawRoundRect(rectF, radius * AndroidUtilities.density, radius * AndroidUtilities.density, Theme.chat_actionBackgroundGradientDarkenPaint); + } + if (needRestore) { + canvas.restore(); } } if (countLayout != null) { @@ -187,6 +199,7 @@ public void setCount(int count, boolean animated) { if (parent != null) { parent.invalidate(); } + return; } String newStr = getStringOfCCount(count); @@ -306,7 +319,7 @@ public void draw(Canvas canvas) { progressHalf = 1f; } - float countTop = (lastH - AndroidUtilities.dp(23)) / 2f; + float countTop = (lastH - AndroidUtilities.dp(radius * 2)) / 2f; float countWidth; if (this.countWidth == this.countWidthOld) { countWidth = this.countWidth; @@ -324,15 +337,24 @@ public void draw(Canvas canvas) { } } - rectF.set(x, countTop, x + countWidth + AndroidUtilities.dp(11), countTop + AndroidUtilities.dp(23)); + rectF.set(x, countTop, x + countWidth + AndroidUtilities.dp(radius - 0.5f), countTop + AndroidUtilities.dp(radius * 2)); canvas.save(); canvas.scale(scale, scale, rectF.centerX(), rectF.centerY()); + boolean needRestore = false; + if (circleScale != 1f) { + needRestore = true; + canvas.save(); + canvas.scale(circleScale, circleScale, rectF.centerX(), rectF.centerY()); + } if (drawBackground && circlePaint != null) { - canvas.drawRoundRect(rectF, 11.5f * AndroidUtilities.density, 11.5f * AndroidUtilities.density, circlePaint); + canvas.drawRoundRect(rectF, radius * AndroidUtilities.density, radius * AndroidUtilities.density, circlePaint); if (addServiceGradient && Theme.hasGradientService()) { - canvas.drawRoundRect(rectF, 11.5f * AndroidUtilities.density, 11.5f * AndroidUtilities.density, Theme.chat_actionBackgroundGradientDarkenPaint); + canvas.drawRoundRect(rectF, radius * AndroidUtilities.density, radius * AndroidUtilities.density, Theme.chat_actionBackgroundGradientDarkenPaint); } } + if (needRestore) { + canvas.restore(); + } canvas.clipRect(rectF); boolean increment = reverseAnimation != countAnimationIncrement; @@ -377,14 +399,14 @@ public void updateBackgroundRect() { if (countChangeProgress != 1f) { if (animationType == ANIMATION_TYPE_IN || animationType == ANIMATION_TYPE_OUT) { updateX(countWidth); - float countTop = (lastH - AndroidUtilities.dp(23)) / 2f; + float countTop = (lastH - AndroidUtilities.dp(radius * 2)) / 2f; rectF.set(x, countTop, x + countWidth + AndroidUtilities.dp(11), countTop + AndroidUtilities.dp(23)); } else { float progressHalf = countChangeProgress * 2; if (progressHalf > 1f) { progressHalf = 1f; } - float countTop = (lastH - AndroidUtilities.dp(23)) / 2f; + float countTop = (lastH - AndroidUtilities.dp(radius * 2)) / 2f; float countWidth; if (this.countWidth == this.countWidthOld) { countWidth = this.countWidth; @@ -396,7 +418,7 @@ public void updateBackgroundRect() { } } else { updateX(countWidth); - float countTop = (lastH - AndroidUtilities.dp(23)) / 2f; + float countTop = (lastH - AndroidUtilities.dp(radius * 2)) / 2f; rectF.set(x, countTop, x + countWidth + AndroidUtilities.dp(11), countTop + AndroidUtilities.dp(23)); } } @@ -435,6 +457,10 @@ private int getThemedColor(String key) { Integer color = resourcesProvider != null ? resourcesProvider.getColor(key) : null; return color != null ? color : Theme.getColor(key); } + + public int getWidth() { + return currentCount == 0 ? 0 : (countWidth + AndroidUtilities.dp(radius - 0.5f)); + } } public float getEnterProgress() { @@ -452,4 +478,5 @@ public float getEnterProgress() { public boolean isInOutAnimation() { return counterDrawable.animationType == CounterDrawable.ANIMATION_TYPE_IN || counterDrawable.animationType == CounterDrawable.ANIMATION_TYPE_OUT; } + } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/CustomPhoneKeyboardView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/CustomPhoneKeyboardView.java index 301c11a0e9..1cbed5d512 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/CustomPhoneKeyboardView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/CustomPhoneKeyboardView.java @@ -37,9 +37,12 @@ public class CustomPhoneKeyboardView extends ViewGroup { private EditText editText; private View[] views = new View[12]; + private View viewToFindFocus; + private boolean dispatchBackWhenEmpty; private boolean runningLongClick; private Runnable onBackButton = () -> { + checkFindEditText(); if (editText == null || editText.length() == 0 && !dispatchBackWhenEmpty) return; if (!NekoConfig.disableVibration.Bool()) { @@ -104,6 +107,7 @@ public CustomPhoneKeyboardView(Context context) { String num = String.valueOf(i != 10 ? i + 1 : 0); views[i] = new NumberButtonView(context, num, symbols); views[i].setOnClickListener(v -> { + checkFindEditText(); if (editText == null) return; if (!NekoConfig.disableVibration.Bool()) { @@ -192,6 +196,19 @@ public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float d }); } + public void setViewToFindFocus(View viewToFindFocus) { + this.viewToFindFocus = viewToFindFocus; + } + + public void checkFindEditText() { + if (editText == null && viewToFindFocus != null) { + View focus = viewToFindFocus.findFocus(); + if (focus instanceof EditText) { + editText = (EditText) focus; + } + } + } + public void setEditText(EditText editText) { this.editText = editText; dispatchBackWhenEmpty = false; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/DrawingInBackgroundThreadDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/DrawingInBackgroundThreadDrawable.java index b6b686484b..64d825ba7f 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/DrawingInBackgroundThreadDrawable.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/DrawingInBackgroundThreadDrawable.java @@ -28,7 +28,7 @@ public class DrawingInBackgroundThreadDrawable implements NotificationCenter.Not private boolean bitmapUpdating; - public int currentLayerNum = 1; + private int currentLayerNum = 1; private int currentOpenedLayerFlags; protected boolean paused; @@ -141,7 +141,7 @@ public void draw(Canvas canvas, long time, int w, int h, float alpha) { } bitmapCanvas.save(); bitmapCanvas.translate(0, padding); - drawInUiThread(bitmapCanvas, 1f); + drawInUiThread(bitmapCanvas, alpha); bitmapCanvas.restore(); } @@ -154,7 +154,7 @@ public void draw(Canvas canvas, long time, int w, int h, float alpha) { if (bitmap != null ) { Bitmap drawingBitmap = bitmap; - paint.setAlpha((int) (255 * alpha)); + paint.setAlpha((int) (0xFF * alpha)); canvas.save(); canvas.translate(0, -padding); this.drawBitmap(canvas, drawingBitmap, paint); @@ -299,6 +299,13 @@ public DispatchQueue getNextQueue() { } return queue; } + } + public void setLayerNum(int value) { + currentLayerNum = value; + if (attachedToWindow) { + currentOpenedLayerFlags = NotificationCenter.getGlobalInstance().getCurrentHeavyOperationFlags(); + currentOpenedLayerFlags &= ~currentLayerNum; + } } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/EditTextBoldCursor.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/EditTextBoldCursor.java index cf1c0aff03..d5cd5cdee7 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/EditTextBoldCursor.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/EditTextBoldCursor.java @@ -503,12 +503,15 @@ public void setLineColors(int color, int active, int error) { invalidate(); } - public void setHintVisible(boolean value) { + public void setHintVisible(boolean value, boolean animated) { if (hintVisible == value) { return; } hintLastUpdateTime = System.currentTimeMillis(); hintVisible = value; + if (!animated) { + hintAlpha = hintVisible ? 1f : 0; + } invalidate(); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/EditTextCaption.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/EditTextCaption.java index 15491ce24e..c0075fad4e 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/EditTextCaption.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/EditTextCaption.java @@ -816,6 +816,7 @@ public boolean onTextContextMenuItem(int id) { stringBuilder.append(getText().subSequence(end, getText().length())); } setText(stringBuilder); + setSelection(start, start); return true; } catch (Exception e) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/EditTextEmoji.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/EditTextEmoji.java index 57ced78c87..e54f0ced35 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/EditTextEmoji.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/EditTextEmoji.java @@ -334,7 +334,7 @@ public void hidePopup(boolean byBackButton) { showPopup(0); } if (byBackButton) { - if (SharedConfig.smoothKeyboard && emojiView != null && emojiView.getVisibility() == View.VISIBLE && !waitingForKeyboardOpen) { + if (emojiView != null && emojiView.getVisibility() == View.VISIBLE && !waitingForKeyboardOpen) { int height = emojiView.getMeasuredHeight(); ValueAnimator animator = ValueAnimator.ofFloat(0, height); animator.addUpdateListener(animation -> { @@ -433,24 +433,23 @@ protected void showPopup(int show) { } if (!keyboardVisible && !emojiWasVisible) { - if (SharedConfig.smoothKeyboard) { - ValueAnimator animator = ValueAnimator.ofFloat(emojiPadding, 0); - animator.addUpdateListener(animation -> { - float v = (float) animation.getAnimatedValue(); - emojiView.setTranslationY(v); - bottomPanelTranslationY(v); - }); - animator.addListener(new AnimatorListenerAdapter() { - @Override - public void onAnimationEnd(Animator animation) { - emojiView.setTranslationY(0); - bottomPanelTranslationY(0); - } - }); - animator.setDuration(AdjustPanLayoutHelper.keyboardDuration); - animator.setInterpolator(AdjustPanLayoutHelper.keyboardInterpolator); - animator.start(); - } + ValueAnimator animator = ValueAnimator.ofFloat(emojiPadding, 0); + animator.addUpdateListener(animation -> { + float v = (float) animation.getAnimatedValue(); + emojiView.setTranslationY(v); + bottomPanelTranslationY(v); + }); + animator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + emojiView.setTranslationY(0); + bottomPanelTranslationY(0); + } + }); + animator.setDuration(AdjustPanLayoutHelper.keyboardDuration); + animator.setInterpolator(AdjustPanLayoutHelper.keyboardInterpolator); + animator.start(); + } } else { if (emojiButton != null) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/EmojiPacksAlert.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/EmojiPacksAlert.java index dcdd2113d0..accf16b093 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/EmojiPacksAlert.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/EmojiPacksAlert.java @@ -56,6 +56,7 @@ import org.telegram.messenger.R; import org.telegram.messenger.SharedConfig; import org.telegram.messenger.UserConfig; +import org.telegram.messenger.UserObject; import org.telegram.messenger.Utilities; import org.telegram.tgnet.ConnectionsManager; import org.telegram.tgnet.TLObject; @@ -69,6 +70,7 @@ import org.telegram.ui.ChatActivity; import org.telegram.ui.Components.Premium.PremiumButtonView; import org.telegram.ui.Components.Premium.PremiumFeatureBottomSheet; +import org.telegram.ui.ContentPreviewViewer; import org.telegram.ui.LaunchActivity; import org.telegram.ui.PremiumPreviewFragment; import org.telegram.ui.ProfileActivity; @@ -87,14 +89,16 @@ public class EmojiPacksAlert extends BottomSheet implements NotificationCenter.N private View paddingView; private EmojiPacksLoader customEmojiPacks; - + private ContentView contentView; private RecyclerListView listView; + private Adapter adapter; private View shadowView; private FrameLayout buttonsView; private TextView addButtonView; private TextView removeButtonView; private PremiumButtonView premiumButtonView; private GridLayoutManager gridLayoutManager; + private RecyclerAnimationScrollHelper scrollHelper; private CircularProgressDrawable progressDrawable; private ActionBarPopupWindow popupWindow; @@ -104,6 +108,113 @@ public class EmojiPacksAlert extends BottomSheet implements NotificationCenter.N private float lastY; private Float fromY; + int highlightStartPosition = -1, highlightEndPosition = -1; + private AnimatedFloat highlightAlpha; + + private ContentPreviewViewer.ContentPreviewViewerDelegate previewDelegate = new ContentPreviewViewer.ContentPreviewViewerDelegate() { + @Override + public boolean can() { + return true; + } + + @Override + public boolean needSend(int contentType) { + return fragment instanceof ChatActivity && ((ChatActivity) fragment).canSendMessage() && (UserConfig.getInstance(UserConfig.selectedAccount).isPremium() || ((ChatActivity) fragment).getCurrentUser() != null && UserObject.isUserSelf(((ChatActivity) fragment).getCurrentUser())); + } + + @Override + public void sendEmoji(TLRPC.Document emoji) { + if (fragment instanceof ChatActivity) { + ((ChatActivity) fragment).sendAnimatedEmoji(emoji, true, 0); + } + onCloseByLink(); + dismiss(); + } + + @Override + public boolean needCopy() { + return UserConfig.getInstance(UserConfig.selectedAccount).isPremium(); + } + + @Override + public void copyEmoji(TLRPC.Document document) { + Spannable spannable = SpannableStringBuilder.valueOf(MessageObject.findAnimatedEmojiEmoticon(document)); + spannable.setSpan(new AnimatedEmojiSpan(document, null), 0, spannable.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + if (AndroidUtilities.addToClipboard(spannable)) { + BulletinFactory.of((FrameLayout) containerView, resourcesProvider).createCopyBulletin(LocaleController.getString("EmojiCopied", R.string.EmojiCopied)).show(); + } + } + + @Override + public Boolean canSetAsStatus(TLRPC.Document document) { + if (!UserConfig.getInstance(UserConfig.selectedAccount).isPremium()) { + return null; + } + TLRPC.User user = UserConfig.getInstance(UserConfig.selectedAccount).getCurrentUser(); + if (user == null) { + return null; + } + Long emojiStatusId = UserObject.getEmojiStatusDocumentId(user); + return document != null && (emojiStatusId == null || emojiStatusId != document.id); + } + + @Override + public void setAsEmojiStatus(TLRPC.Document document, Integer until) { + TLRPC.EmojiStatus status; + if (document == null) { + status = new TLRPC.TL_emojiStatusEmpty(); + } else if (until != null) { + status = new TLRPC.TL_emojiStatusUntil(); + ((TLRPC.TL_emojiStatusUntil) status).document_id = document.id; + ((TLRPC.TL_emojiStatusUntil) status).until = until; + } else { + status = new TLRPC.TL_emojiStatus(); + ((TLRPC.TL_emojiStatus) status).document_id = document.id; + } + TLRPC.User user = UserConfig.getInstance(UserConfig.selectedAccount).getCurrentUser(); + final TLRPC.EmojiStatus previousEmojiStatus = user == null ? new TLRPC.TL_emojiStatusEmpty() : user.emoji_status; + MessagesController.getInstance(currentAccount).updateEmojiStatus(status); + + Runnable undoAction = () -> MessagesController.getInstance(currentAccount).updateEmojiStatus(previousEmojiStatus); + if (document == null) { + final Bulletin.SimpleLayout layout = new Bulletin.SimpleLayout(getContext(), resourcesProvider); + layout.textView.setText(LocaleController.getString("RemoveStatusInfo", R.string.RemoveStatusInfo)); + layout.imageView.setImageResource(R.drawable.msg_settings_premium); + Bulletin.UndoButton undoButton = new Bulletin.UndoButton(getContext(), true, resourcesProvider); + undoButton.setUndoAction(undoAction); + layout.setButton(undoButton); + Bulletin.make((FrameLayout) containerView, layout, Bulletin.DURATION_SHORT).show(); + } else { + BulletinFactory.of((FrameLayout) containerView, resourcesProvider).createEmojiBulletin(document, LocaleController.getString("SetAsEmojiStatusInfo", R.string.SetAsEmojiStatusInfo), LocaleController.getString("Undo", R.string.Undo), undoAction).show(); + } + } + + @Override + public boolean canSchedule() { + return false; +// return delegate != null && delegate.canSchedule(); + } + + @Override + public boolean isInScheduleMode() { + if (fragment instanceof ChatActivity) { + return ((ChatActivity) fragment).isInScheduleMode(); + } + return false; + } + + @Override + public void openSet(TLRPC.InputStickerSet set, boolean clearsInputField) { + + } + + @Override + public long getDialogId() { + + return 0; + } + }; + @Override protected boolean canDismissWithSwipe() { return false; @@ -118,7 +229,7 @@ public EmojiPacksAlert(BaseFragment fragment, Context context, Theme.ResourcesPr } private EmojiPacksAlert(BaseFragment fragment, Context context, Theme.ResourcesProvider resourceProvider, ArrayList stickerSets, TLObject parentObject) { - super(context, false, resourceProvider); + super(context, false, resourceProvider = fragment != null && fragment.getResourceProvider() != null ? fragment.getResourceProvider() : resourceProvider); this.fragment = fragment; fixNavigationBar(); @@ -135,300 +246,7 @@ protected void onUpdate() { progressDrawable = new CircularProgressDrawable(AndroidUtilities.dp(32), AndroidUtilities.dp(3.5f), getThemedColor(Theme.key_featuredStickers_addButton)); final ColorFilter colorFilter = new PorterDuffColorFilter(ColorUtils.setAlphaComponent(getThemedColor(Theme.key_windowBackgroundWhiteLinkText), 178), PorterDuff.Mode.MULTIPLY); - containerView = new FrameLayout(context) { - - private Paint paint = new Paint(); - private Path path = new Path(); - private Boolean lastOpen = null; - boolean attached; - SparseArray> viewsGroupedByLines = new SparseArray<>(); - ArrayList lineDrawables = new ArrayList<>(); - ArrayList lineDrawablesTmp = new ArrayList<>(); - ArrayList> unusedArrays = new ArrayList<>(); - ArrayList unusedLineDrawables = new ArrayList<>(); - - @Override - protected void dispatchDraw(Canvas canvas) { - if (!attached) { - return; - } - paint.setColor(getThemedColor(Theme.key_dialogBackground)); - paint.setShadowLayer(dp(2), 0, dp(-0.66f), 0x1e000000); - path.reset(); - float y = lastY = getListTop(); - float pad = 0; - if (fromY != null) { - float wasY = y; - y = AndroidUtilities.lerp(fromY, y + containerView.getY(), loadT) - containerView.getY(); - pad = y - wasY; - } - float stickToTop = (1f - MathUtils.clamp((y - containerView.getPaddingTop()) / AndroidUtilities.dp(32), 0, 1)); - y -= stickToTop * containerView.getPaddingTop(); - float r = dp((1f - stickToTop) * 14); - AndroidUtilities.rectTmp.set(getPaddingLeft(), y, getWidth() - getPaddingRight(), getBottom() + r); - path.addRoundRect(AndroidUtilities.rectTmp, r, r, Path.Direction.CW); - canvas.drawPath(path, paint); - - boolean open = stickToTop > .75f; - if (lastOpen == null || open != lastOpen) { - updateLightStatusBar(lastOpen = open); - } - - Theme.dialogs_onlineCirclePaint.setColor(getThemedColor(Theme.key_sheet_scrollUp)); - Theme.dialogs_onlineCirclePaint.setAlpha((int) (MathUtils.clamp(y / (float) AndroidUtilities.dp(20), 0, 1) * Theme.dialogs_onlineCirclePaint.getAlpha())); - int w = AndroidUtilities.dp(36); - y += AndroidUtilities.dp(10); - AndroidUtilities.rectTmp.set((getMeasuredWidth() - w) / 2, y, (getMeasuredWidth() + w) / 2, y + AndroidUtilities.dp(4)); - canvas.drawRoundRect(AndroidUtilities.rectTmp, AndroidUtilities.dp(2), AndroidUtilities.dp(2), Theme.dialogs_onlineCirclePaint); - - shadowView.setVisibility(listView.canScrollVertically(1) || removeButtonView.getVisibility() == View.VISIBLE ? View.VISIBLE : View.INVISIBLE); - if (listView != null) { - canvas.save(); - canvas.translate(listView.getLeft(), listView.getTop() + pad); - canvas.clipRect(0, 0, listView.getWidth(), listView.getHeight()); - canvas.saveLayerAlpha(0, 0, listView.getWidth(), listView.getHeight(), (int) (255 * listView.getAlpha()), Canvas.ALL_SAVE_FLAG); - - for (int i = 0; i < viewsGroupedByLines.size(); i++) { - ArrayList arrayList = viewsGroupedByLines.valueAt(i); - arrayList.clear(); - unusedArrays.add(arrayList); - } - viewsGroupedByLines.clear(); - for (int i = 0; i < listView.getChildCount(); ++i) { - View child = listView.getChildAt(i); - if (child instanceof EmojiImageView) { - ((EmojiImageView) child).updatePressedProgress(); - if (animatedEmojiDrawables == null) { - animatedEmojiDrawables = new LongSparseArray<>(); - } - AnimatedEmojiSpan span = ((EmojiImageView) child).span; - if (span == null) { - continue; - } - long documentId = span.getDocumentId(); - AnimatedEmojiDrawable drawable = animatedEmojiDrawables.get(documentId); - if (drawable == null) { - animatedEmojiDrawables.put(documentId, drawable = AnimatedEmojiDrawable.make(currentAccount, AnimatedEmojiDrawable.CACHE_TYPE_ALERT_PREVIEW, documentId)); - } - drawable.setColorFilter(Theme.chat_animatedEmojiTextColorFilter); - drawable.addView(this); - ArrayList arrayList = viewsGroupedByLines.get(child.getTop()); - if (arrayList == null) { - if (!unusedArrays.isEmpty()) { - arrayList = unusedArrays.remove(unusedArrays.size() - 1); - } else { - arrayList = new ArrayList<>(); - } - viewsGroupedByLines.put(child.getTop(), arrayList); - } - arrayList.add((EmojiImageView) child); - } else { - canvas.save(); - canvas.translate(child.getLeft(), child.getTop()); - child.draw(canvas); - canvas.restore(); - } - } - - lineDrawablesTmp.clear(); - lineDrawablesTmp.addAll(lineDrawables); - lineDrawables.clear(); - - long time = System.currentTimeMillis(); - for (int i = 0; i < viewsGroupedByLines.size(); i++) { - ArrayList arrayList = viewsGroupedByLines.valueAt(i); - View firstView = arrayList.get(0); - int position = listView.getChildAdapterPosition(firstView); - DrawingInBackgroundLine drawable = null; - for (int k = 0; k < lineDrawablesTmp.size(); k++) { - if (lineDrawablesTmp.get(k).position == position) { - drawable = lineDrawablesTmp.get(k); - lineDrawablesTmp.remove(k); - break; - } - } - if (drawable == null) { - if (!unusedLineDrawables.isEmpty()) { - drawable = unusedLineDrawables.remove(unusedLineDrawables.size() - 1); - } else { - drawable = new DrawingInBackgroundLine(); - drawable.currentLayerNum = 7; - } - drawable.position = position; - drawable.onAttachToWindow(); - } - lineDrawables.add(drawable); - drawable.imageViewEmojis = arrayList; - canvas.save(); - canvas.translate(0, firstView.getY() + firstView.getPaddingTop()); - drawable.draw(canvas, time, getMeasuredWidth(), firstView.getMeasuredHeight() - firstView.getPaddingBottom(), 1f); - canvas.restore(); - } - - for (int i = 0; i < lineDrawablesTmp.size(); i++) { - if (unusedLineDrawables.size() < 3) { - unusedLineDrawables.add(lineDrawablesTmp.get(i)); - lineDrawablesTmp.get(i).imageViewEmojis = null; - lineDrawablesTmp.get(i).reset(); - - } else { - lineDrawablesTmp.get(i).onDetachFromWindow(); - } - } - lineDrawablesTmp.clear(); - canvas.restore(); - canvas.restore(); - - if (listView.getAlpha() < 1) { - int cx = getWidth() / 2; - int cy = ((int) y + getHeight()) / 2; - int R = AndroidUtilities.dp(16); - progressDrawable.setAlpha((int) (255 * (1f - listView.getAlpha()))); - progressDrawable.setBounds(cx - R, cy - R, cx + R, cy + R); - progressDrawable.draw(canvas); - invalidate(); - } - } - super.dispatchDraw(canvas); - } - - @Override - public boolean dispatchTouchEvent(MotionEvent event) { - if (event.getAction() == MotionEvent.ACTION_DOWN && event.getY() < getListTop() - AndroidUtilities.dp(6)) { - dismiss(); - } - return super.dispatchTouchEvent(event); - } - - class DrawingInBackgroundLine extends DrawingInBackgroundThreadDrawable { - public int position; - ArrayList imageViewEmojis; - ArrayList drawInBackgroundViews = new ArrayList<>(); - - - @Override - public void prepareDraw(long time) { - drawInBackgroundViews.clear(); - for (int i = 0; i < imageViewEmojis.size(); i++) { - EmojiImageView imageView = imageViewEmojis.get(i); - AnimatedEmojiSpan span = imageView.span; - if (span == null) { - continue; - } - AnimatedEmojiDrawable drawable = animatedEmojiDrawables.get(imageView.span.getDocumentId()); - if (drawable == null || drawable.getImageReceiver() == null) { - continue; - } - - drawable.update(time); - imageView.backgroundThreadDrawHolder[threadIndex] = drawable.getImageReceiver().setDrawInBackgroundThread(imageView.backgroundThreadDrawHolder[threadIndex], threadIndex); - imageView.backgroundThreadDrawHolder[threadIndex].time = time; - drawable.setAlpha(255); - AndroidUtilities.rectTmp2.set(imageView.getLeft() + imageView.getPaddingLeft(), imageView.getPaddingTop(), imageView.getRight() - imageView.getPaddingRight(), imageView.getMeasuredHeight() - imageView.getPaddingBottom()); - imageView.backgroundThreadDrawHolder[threadIndex].setBounds(AndroidUtilities.rectTmp2); - drawable.setColorFilter(Theme.chat_animatedEmojiTextColorFilter); - imageView.imageReceiver = drawable.getImageReceiver();; - drawInBackgroundViews.add(imageView); - } - } - - @Override - public void draw(Canvas canvas, long time, int w, int h, float alpha) { - if (imageViewEmojis == null) { - return; - } - boolean drawInUi = imageViewEmojis.size() <= 3 || SharedConfig.getDevicePerformanceClass() == SharedConfig.PERFORMANCE_CLASS_LOW; - if (!drawInUi) { - for (int i = 0; i < imageViewEmojis.size(); i++) { - EmojiImageView img = imageViewEmojis.get(i); - if (img.pressedProgress != 0 || img.backAnimator != null || img.getTranslationX() != 0 || img.getTranslationY() != 0 || img.getAlpha() != 1) { - drawInUi = true; - break; - } - } - } - if (drawInUi) { - prepareDraw(System.currentTimeMillis()); - drawInUiThread(canvas, alpha); - reset(); - } else { - super.draw(canvas, time, w, h, alpha); - } - } - - @Override - public void drawInBackground(Canvas canvas) { - for (int i = 0; i < drawInBackgroundViews.size(); i++) { - EmojiImageView imageView = drawInBackgroundViews.get(i); - imageView.imageReceiver.draw(canvas, imageView.backgroundThreadDrawHolder[threadIndex]); - } - } - - @Override - protected void drawInUiThread(Canvas canvas, float alpha) { - if (imageViewEmojis != null) { - for (int i = 0; i < imageViewEmojis.size(); i++) { - EmojiImageView imageView = imageViewEmojis.get(i); - AnimatedEmojiSpan span = imageView.span; - if (span == null) { - continue; - } - AnimatedEmojiDrawable drawable = animatedEmojiDrawables.get(imageView.span.getDocumentId()); - if (drawable == null || drawable.getImageReceiver() == null) { - continue; - } - if (imageView.imageReceiver != null) { - drawable.setAlpha((int) (255 * alpha * imageView.getAlpha())); - float hw = (imageView.getWidth() - imageView.getPaddingLeft() - imageView.getPaddingRight()) / 2f; - float hh = (imageView.getHeight() - imageView.getPaddingTop() - imageView.getPaddingBottom()) / 2f; - float cx = (imageView.getLeft() + imageView.getRight()) / 2f; - float cy = imageView.getPaddingTop() + hh; - float scale = 1f; - if (imageView.pressedProgress != 0) { - scale *= 0.8f + 0.2f * (1f - imageView.pressedProgress); - } - drawable.setBounds( - (int) (cx - hw * imageView.getScaleX() * scale), - (int) (cy - hh * imageView.getScaleY() * scale), - (int) (cx + hw * imageView.getScaleX() * scale), - (int) (cy + hh * imageView.getScaleY() * scale) - ); - drawable.draw(canvas); - } - } - } - } - - @Override - public void onFrameReady() { - super.onFrameReady(); - for (int i = 0; i < drawInBackgroundViews.size(); i++) { - EmojiImageView imageView = drawInBackgroundViews.get(i); - imageView.backgroundThreadDrawHolder[threadIndex].release(); - } - containerView.invalidate(); - } - } - - @Override - protected void onAttachedToWindow() { - super.onAttachedToWindow(); - attached = true; - } - - @Override - protected void onDetachedFromWindow() { - super.onDetachedFromWindow(); - attached = false; - for (int i = 0; i < lineDrawables.size(); i++) { - lineDrawables.get(i).onDetachFromWindow(); - } - for (int i = 0; i < unusedLineDrawables.size(); i++) { - unusedLineDrawables.get(i).onDetachFromWindow(); - } - lineDrawables.clear(); - } - }; + containerView = contentView = new ContentView(context); paddingView = new View(context) { @Override @@ -439,6 +257,12 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { }; listView = new RecyclerListView(context) { + @Override + public boolean onInterceptTouchEvent(MotionEvent event) { + boolean result = ContentPreviewViewer.getInstance().onInterceptTouchEvent(event, listView, 0, previewDelegate, resourcesProvider); + return super.onInterceptTouchEvent(event) || result; + } + @Override protected void onMeasure(int widthSpec, int heightSpec) { int width = MeasureSpec.getSize(widthSpec); @@ -449,9 +273,16 @@ protected void onMeasure(int widthSpec, int heightSpec) { @Override public void onScrolled(int dx, int dy) { super.onScrolled(dx, dy); + contentView.updateEmojiDrawables(); containerView.invalidate(); } + @Override + protected void onLayout(boolean changed, int l, int t, int r, int b) { + super.onLayout(changed, l, t, r, b); + contentView.updateEmojiDrawables(); + } + @Override protected void onDetachedFromWindow() { super.onDetachedFromWindow(); @@ -462,15 +293,47 @@ protected void onDetachedFromWindow() { public boolean drawChild(Canvas canvas, View child, long drawingTime) { return false; } + + private Paint highlightPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + + @Override + protected void dispatchDraw(Canvas canvas) { + if (highlightAlpha != null && highlightStartPosition >= 0 && highlightEndPosition >= 0 && adapter != null && isAttachedToWindow()) { + float alpha = highlightAlpha.set(0); + + if (alpha > 0) { + int top = Integer.MAX_VALUE, bottom = Integer.MIN_VALUE; + for (int i = 0; i < getChildCount(); ++i) { + View child = getChildAt(i); + int position = getChildAdapterPosition(child); + + if (position != NO_POSITION && position >= highlightStartPosition && position <= highlightEndPosition) { + top = Math.min(top, child.getTop() + (int) child.getTranslationY()); + bottom = Math.max(bottom, child.getBottom() + (int) child.getTranslationY()); + } + } + + if (top < bottom) { + highlightPaint.setColor(Theme.multAlpha(getThemedColor(Theme.key_chat_linkSelectBackground), alpha)); + canvas.drawRect(0, top, getMeasuredWidth(), bottom, highlightPaint); + } + + invalidate(); + } + } + + super.dispatchDraw(canvas); + } }; + highlightAlpha = new AnimatedFloat(0, listView, 0, 1250, CubicBezierInterpolator.EASE_IN); containerView.setPadding(backgroundPaddingLeft, AndroidUtilities.statusBarHeight, backgroundPaddingLeft, 0); containerView.setClipChildren(false); containerView.setClipToPadding(false); containerView.setWillNotDraw(false); + listView.setWillNotDraw(false); listView.setSelectorRadius(AndroidUtilities.dp(6)); listView.setSelectorDrawableColor(Theme.getColor(Theme.key_listSelector, resourceProvider)); listView.setPadding(AndroidUtilities.dp(8), 0, AndroidUtilities.dp(8), AndroidUtilities.dp(68)); - listView.setAdapter(new Adapter()); listView.setLayoutManager(gridLayoutManager = new GridLayoutManager(context, 8)); listView.addItemDecoration(new RecyclerView.ItemDecoration() { @Override @@ -483,7 +346,9 @@ public void getItemOffsets(@NonNull Rect outRect, @NonNull View view, @NonNull R } } }); - listView.setOnItemClickListener((view, position) -> { + final Theme.ResourcesProvider finalResourceProvider = resourceProvider; + RecyclerListView.OnItemClickListener stickersOnItemClickListener; + listView.setOnItemClickListener(stickersOnItemClickListener = (view, position) -> { if (stickerSets == null || stickerSets.size() <= 1) { if (popupWindow != null) { popupWindow.dismiss(); @@ -496,7 +361,6 @@ public void getItemOffsets(@NonNull Rect outRect, @NonNull View view, @NonNull R SpannableString text = new SpannableString(MessageObject.findAnimatedEmojiEmoticon(span.document == null ? AnimatedEmojiDrawable.findDocument(currentAccount, span.getDocumentId()) : span.document)); text.setSpan(span, 0, text.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); ((Editable) ((ChatActivity) fragment).getChatActivityEnterView().messageEditText.getText()).append(text); - ((ChatActivity) fragment).showEmojiHint(); onCloseByLink(); dismiss(); } catch (Exception ignore) {} @@ -531,7 +395,7 @@ public void getItemOffsets(@NonNull Rect outRect, @NonNull View view, @NonNull R inputStickerSet.id = stickerSet.set.id; inputStickerSet.access_hash = stickerSet.set.access_hash; inputStickerSets.add(inputStickerSet); - new EmojiPacksAlert(fragment, getContext(), resourceProvider, inputStickerSets) { + new EmojiPacksAlert(fragment, getContext(), finalResourceProvider, inputStickerSets) { @Override protected void onCloseByLink() { EmojiPacksAlert.this.dismiss(); @@ -594,6 +458,7 @@ protected void onCloseByLink() { } return false; }); + listView.setOnTouchListener((v, event) -> ContentPreviewViewer.getInstance().onTouch(event, listView, 0, stickersOnItemClickListener, previewDelegate, resourcesProvider)); gridLayoutManager.setReverseLayout(false); gridLayoutManager.setSpanSizeLookup(new GridLayoutManager.SpanSizeLookup() { @Override @@ -601,6 +466,7 @@ public int getSpanSize(int position) { return listView.getAdapter() != null && listView.getAdapter().getItemViewType(position) != 1 ? gridLayoutManager.getSpanCount() : 1; } }); + scrollHelper = new RecyclerAnimationScrollHelper(listView, gridLayoutManager); containerView.addView(listView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.TOP | Gravity.LEFT)); @@ -637,16 +503,26 @@ public int getSpanSize(int position) { premiumButtonView.setIcon(R.raw.unlock_icon); premiumButtonView.buttonLayout.setClickable(true); buttonsView.addView(premiumButtonView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 48, Gravity.BOTTOM, 12, 10, 12, 10)); + } - updateButton(); - int currentAccount = fragment == null ? UserConfig.selectedAccount : fragment.getCurrentAccount(); - MediaDataController.getInstance(currentAccount).checkStickers(MediaDataController.TYPE_EMOJIPACKS); + @Override + public void onBackPressed() { + if (ContentPreviewViewer.getInstance().isVisible()) { + ContentPreviewViewer.getInstance().closeWithMenu(); + return; + } + super.onBackPressed(); } protected void onButtonClicked(boolean install) { } + private int highlightIndex = -1; + public void highlight(int setIndex) { + highlightIndex = setIndex; + } + private boolean shown = false; private void updateShowButton(boolean show) { boolean animated = !shown && show; @@ -665,6 +541,323 @@ private void updateShowButton(boolean show) { shown = show; } + private class ContentView extends FrameLayout { + public ContentView(Context context) { + super(context); + } + + private Paint paint = new Paint(); + private Path path = new Path(); + private Boolean lastOpen = null; + boolean attached; + SparseArray> viewsGroupedByLines = new SparseArray<>(); + ArrayList lineDrawables = new ArrayList<>(); + ArrayList lineDrawablesTmp = new ArrayList<>(); + ArrayList> unusedArrays = new ArrayList<>(); + ArrayList unusedLineDrawables = new ArrayList<>(); + + @Override + protected void dispatchDraw(Canvas canvas) { + if (!attached) { + return; + } + paint.setColor(getThemedColor(Theme.key_dialogBackground)); + Theme.applyDefaultShadow(paint); + path.reset(); + float y = lastY = getListTop(); + float pad = 0; + if (fromY != null) { + float wasY = y; + y = AndroidUtilities.lerp(fromY, y + containerView.getY(), loadT) - containerView.getY(); + pad = y - wasY; + } + float stickToTop = (1f - MathUtils.clamp((y - containerView.getPaddingTop()) / AndroidUtilities.dp(32), 0, 1)); + y -= stickToTop * containerView.getPaddingTop(); + float r = dp((1f - stickToTop) * 14); + AndroidUtilities.rectTmp.set(getPaddingLeft(), y, getWidth() - getPaddingRight(), getBottom() + r); + path.addRoundRect(AndroidUtilities.rectTmp, r, r, Path.Direction.CW); + canvas.drawPath(path, paint); + + boolean open = stickToTop > .75f; + if (lastOpen == null || open != lastOpen) { + updateLightStatusBar(lastOpen = open); + } + + Theme.dialogs_onlineCirclePaint.setColor(getThemedColor(Theme.key_sheet_scrollUp)); + Theme.dialogs_onlineCirclePaint.setAlpha((int) (MathUtils.clamp(y / (float) AndroidUtilities.dp(20), 0, 1) * Theme.dialogs_onlineCirclePaint.getAlpha())); + int w = AndroidUtilities.dp(36); + y += AndroidUtilities.dp(10); + AndroidUtilities.rectTmp.set((getMeasuredWidth() - w) / 2, y, (getMeasuredWidth() + w) / 2, y + AndroidUtilities.dp(4)); + canvas.drawRoundRect(AndroidUtilities.rectTmp, AndroidUtilities.dp(2), AndroidUtilities.dp(2), Theme.dialogs_onlineCirclePaint); + + shadowView.setVisibility(listView.canScrollVertically(1) || removeButtonView.getVisibility() == View.VISIBLE ? View.VISIBLE : View.INVISIBLE); + if (listView != null) { + canvas.save(); + canvas.translate(listView.getLeft(), listView.getTop() + pad); + canvas.clipRect(0, 0, listView.getWidth(), listView.getHeight()); + canvas.saveLayerAlpha(0, 0, listView.getWidth(), listView.getHeight(), (int) (255 * listView.getAlpha()), Canvas.ALL_SAVE_FLAG); + + for (int i = 0; i < viewsGroupedByLines.size(); i++) { + ArrayList arrayList = viewsGroupedByLines.valueAt(i); + arrayList.clear(); + unusedArrays.add(arrayList); + } + viewsGroupedByLines.clear(); + for (int i = 0; i < listView.getChildCount(); ++i) { + View child = listView.getChildAt(i); + if (child instanceof EmojiImageView) { + ((EmojiImageView) child).updatePressedProgress(); + if (animatedEmojiDrawables == null) { + continue; + } + AnimatedEmojiSpan span = ((EmojiImageView) child).span; + if (span == null) { + continue; + } + long documentId = span.getDocumentId(); + AnimatedEmojiDrawable drawable = animatedEmojiDrawables.get(documentId); + if (drawable == null) { + continue; + } + drawable.setColorFilter(Theme.chat_animatedEmojiTextColorFilter); +// drawable.addView(this); + ArrayList arrayList = viewsGroupedByLines.get(child.getTop()); + if (arrayList == null) { + if (!unusedArrays.isEmpty()) { + arrayList = unusedArrays.remove(unusedArrays.size() - 1); + } else { + arrayList = new ArrayList<>(); + } + viewsGroupedByLines.put(child.getTop(), arrayList); + } + arrayList.add((EmojiImageView) child); + } else { + canvas.save(); + canvas.translate(child.getLeft(), child.getTop()); + child.draw(canvas); + canvas.restore(); + } + } + + lineDrawablesTmp.clear(); + lineDrawablesTmp.addAll(lineDrawables); + lineDrawables.clear(); + + long time = System.currentTimeMillis(); + for (int i = 0; i < viewsGroupedByLines.size(); i++) { + ArrayList arrayList = viewsGroupedByLines.valueAt(i); + View firstView = arrayList.get(0); + int position = listView.getChildAdapterPosition(firstView); + DrawingInBackgroundLine drawable = null; + for (int k = 0; k < lineDrawablesTmp.size(); k++) { + if (lineDrawablesTmp.get(k).position == position) { + drawable = lineDrawablesTmp.get(k); + lineDrawablesTmp.remove(k); + break; + } + } + if (drawable == null) { + if (!unusedLineDrawables.isEmpty()) { + drawable = unusedLineDrawables.remove(unusedLineDrawables.size() - 1); + } else { + drawable = new DrawingInBackgroundLine(); + drawable.setLayerNum(7); + } + drawable.position = position; + drawable.onAttachToWindow(); + } + lineDrawables.add(drawable); + drawable.imageViewEmojis = arrayList; + canvas.save(); + canvas.translate(0, firstView.getY() + firstView.getPaddingTop()); + drawable.draw(canvas, time, getMeasuredWidth(), firstView.getMeasuredHeight() - firstView.getPaddingBottom(), 1f); + canvas.restore(); + } + + for (int i = 0; i < lineDrawablesTmp.size(); i++) { + if (unusedLineDrawables.size() < 3) { + unusedLineDrawables.add(lineDrawablesTmp.get(i)); + lineDrawablesTmp.get(i).imageViewEmojis = null; + lineDrawablesTmp.get(i).reset(); + + } else { + lineDrawablesTmp.get(i).onDetachFromWindow(); + } + } + lineDrawablesTmp.clear(); + canvas.restore(); + canvas.restore(); + + if (listView.getAlpha() < 1) { + int cx = getWidth() / 2; + int cy = ((int) y + getHeight()) / 2; + int R = AndroidUtilities.dp(16); + progressDrawable.setAlpha((int) (255 * (1f - listView.getAlpha()))); + progressDrawable.setBounds(cx - R, cy - R, cx + R, cy + R); + progressDrawable.draw(canvas); + invalidate(); + } + } + super.dispatchDraw(canvas); + } + + private AnimatedEmojiSpan[] getAnimatedEmojiSpans() { + if (listView == null) { + return new AnimatedEmojiSpan[0]; + } + AnimatedEmojiSpan[] spans = new AnimatedEmojiSpan[listView.getChildCount()]; + for (int i = 0; i < listView.getChildCount(); ++i) { + View child = listView.getChildAt(i); + if (child instanceof EmojiImageView) { + spans[i] = ((EmojiImageView) child).span; + } + } + return spans; + } + + public void updateEmojiDrawables() { + animatedEmojiDrawables = AnimatedEmojiSpan.update(AnimatedEmojiDrawable.CACHE_TYPE_ALERT_PREVIEW, this, getAnimatedEmojiSpans(), animatedEmojiDrawables); + } + + @Override + public boolean dispatchTouchEvent(MotionEvent event) { + if (event.getAction() == MotionEvent.ACTION_DOWN && event.getY() < getListTop() - AndroidUtilities.dp(6)) { + dismiss(); + } + return super.dispatchTouchEvent(event); + } + + class DrawingInBackgroundLine extends DrawingInBackgroundThreadDrawable { + public int position; + ArrayList imageViewEmojis; + ArrayList drawInBackgroundViews = new ArrayList<>(); + + + @Override + public void prepareDraw(long time) { + drawInBackgroundViews.clear(); + for (int i = 0; i < imageViewEmojis.size(); i++) { + EmojiImageView imageView = imageViewEmojis.get(i); + AnimatedEmojiSpan span = imageView.span; + if (span == null) { + continue; + } + AnimatedEmojiDrawable drawable = animatedEmojiDrawables.get(imageView.span.getDocumentId()); + if (drawable == null || drawable.getImageReceiver() == null) { + continue; + } + + drawable.update(time); + imageView.backgroundThreadDrawHolder[threadIndex] = drawable.getImageReceiver().setDrawInBackgroundThread(imageView.backgroundThreadDrawHolder[threadIndex], threadIndex); + imageView.backgroundThreadDrawHolder[threadIndex].time = time; + drawable.setAlpha(255); + AndroidUtilities.rectTmp2.set(imageView.getLeft() + imageView.getPaddingLeft(), imageView.getPaddingTop(), imageView.getRight() - imageView.getPaddingRight(), imageView.getMeasuredHeight() - imageView.getPaddingBottom()); + imageView.backgroundThreadDrawHolder[threadIndex].setBounds(AndroidUtilities.rectTmp2); + drawable.setColorFilter(Theme.chat_animatedEmojiTextColorFilter); + imageView.imageReceiver = drawable.getImageReceiver();; + drawInBackgroundViews.add(imageView); + } + } + + @Override + public void draw(Canvas canvas, long time, int w, int h, float alpha) { + if (imageViewEmojis == null) { + return; + } + boolean drawInUi = imageViewEmojis.size() <= 3 || SharedConfig.getDevicePerformanceClass() == SharedConfig.PERFORMANCE_CLASS_LOW; + if (!drawInUi) { + for (int i = 0; i < imageViewEmojis.size(); i++) { + EmojiImageView img = imageViewEmojis.get(i); + if (img.pressedProgress != 0 || img.backAnimator != null || img.getTranslationX() != 0 || img.getTranslationY() != 0 || img.getAlpha() != 1) { + drawInUi = true; + break; + } + } + } + if (drawInUi) { + prepareDraw(System.currentTimeMillis()); + drawInUiThread(canvas, alpha); + reset(); + } else { + super.draw(canvas, time, w, h, alpha); + } + } + + @Override + public void drawInBackground(Canvas canvas) { + for (int i = 0; i < drawInBackgroundViews.size(); i++) { + EmojiImageView imageView = drawInBackgroundViews.get(i); + imageView.imageReceiver.draw(canvas, imageView.backgroundThreadDrawHolder[threadIndex]); + } + } + + @Override + protected void drawInUiThread(Canvas canvas, float alpha) { + if (imageViewEmojis != null) { + for (int i = 0; i < imageViewEmojis.size(); i++) { + EmojiImageView imageView = imageViewEmojis.get(i); + AnimatedEmojiSpan span = imageView.span; + if (span == null) { + continue; + } + AnimatedEmojiDrawable drawable = animatedEmojiDrawables.get(imageView.span.getDocumentId()); + if (drawable == null || drawable.getImageReceiver() == null) { + continue; + } + if (imageView.imageReceiver != null) { + drawable.setAlpha((int) (255 * alpha * imageView.getAlpha())); + float hw = (imageView.getWidth() - imageView.getPaddingLeft() - imageView.getPaddingRight()) / 2f; + float hh = (imageView.getHeight() - imageView.getPaddingTop() - imageView.getPaddingBottom()) / 2f; + float cx = (imageView.getLeft() + imageView.getRight()) / 2f; + float cy = imageView.getPaddingTop() + hh; + float scale = 1f; + if (imageView.pressedProgress != 0) { + scale *= 0.8f + 0.2f * (1f - imageView.pressedProgress); + } + drawable.setBounds( + (int) (cx - hw * imageView.getScaleX() * scale), + (int) (cy - hh * imageView.getScaleY() * scale), + (int) (cx + hw * imageView.getScaleX() * scale), + (int) (cy + hh * imageView.getScaleY() * scale) + ); + drawable.draw(canvas); + } + } + } + } + + @Override + public void onFrameReady() { + super.onFrameReady(); + for (int i = 0; i < drawInBackgroundViews.size(); i++) { + EmojiImageView imageView = drawInBackgroundViews.get(i); + imageView.backgroundThreadDrawHolder[threadIndex].release(); + } + containerView.invalidate(); + } + } + + @Override + protected void onAttachedToWindow() { + super.onAttachedToWindow(); + attached = true; + } + + @Override + protected void onDetachedFromWindow() { + super.onDetachedFromWindow(); + attached = false; + for (int i = 0; i < lineDrawables.size(); i++) { + lineDrawables.get(i).onDetachFromWindow(); + } + for (int i = 0; i < unusedLineDrawables.size(); i++) { + unusedLineDrawables.get(i).onDetachFromWindow(); + } + lineDrawables.clear(); + AnimatedEmojiSpan.release(this, animatedEmojiDrawables); + } + } + protected void onCloseByLink() { } @@ -840,13 +1033,13 @@ private void updateButton() { } } - boolean mePremium = UserConfig.getInstance(currentAccount).isPremium(); +// boolean mePremium = UserConfig.getInstance(currentAccount).isPremium(); ArrayList canInstallPacks = new ArrayList<>(notInstalledPacks); - for (int i = 0; i < canInstallPacks.size(); ++i) { - if (MessageObject.isPremiumEmojiPack(canInstallPacks.get(i)) && !mePremium) { - canInstallPacks.remove(i--); - } - } +// for (int i = 0; i < canInstallPacks.size(); ++i) { +// if (MessageObject.isPremiumEmojiPack(canInstallPacks.get(i)) && !mePremium) { +// canInstallPacks.remove(i--); +// } +// } boolean loadedNow = customEmojiPacks.inputStickerSets != null && allPacks.size() == customEmojiPacks.inputStickerSets.size(); if (!loaded && loadedNow) { @@ -855,6 +1048,20 @@ private void updateButton() { loaded = loadedNow; if (!loaded) { listView.setAlpha(0); + } else if (highlightIndex >= 0) { + int currentPosition = gridLayoutManager.findFirstVisibleItemPosition(); + int position = adapter.getSetHeaderPosition(highlightIndex); + if (Math.abs(currentPosition - position) > (1 + 16 + 1) * 3) { + scrollHelper.setScrollDirection(currentPosition < position ? RecyclerAnimationScrollHelper.SCROLL_DIRECTION_DOWN : RecyclerAnimationScrollHelper.SCROLL_DIRECTION_UP); + scrollHelper.scrollToPosition(position, AndroidUtilities.displaySize.y / 2 - AndroidUtilities.dp(170), false, true); + } else { + listView.smoothScrollToPosition(position); + } + highlightStartPosition = adapter.getSetHeaderPosition(highlightIndex); + highlightEndPosition = adapter.getSetEndPosition(highlightIndex); + highlightAlpha.set(1, true); + listView.invalidate(); + highlightIndex = -1; } if (!loaded) { @@ -862,11 +1069,11 @@ private void updateButton() { addButtonView.setVisibility(View.GONE); removeButtonView.setVisibility(View.GONE); updateShowButton(false); - } else if (canInstallPacks.size() <= 0 && notInstalledPacks.size() >= 0 && !mePremium || !loaded) { - premiumButtonView.setVisibility(View.VISIBLE); - addButtonView.setVisibility(View.GONE); - removeButtonView.setVisibility(View.GONE); - updateShowButton(true); +// } else if (canInstallPacks.size() <= 0 && notInstalledPacks.size() >= 0 && !mePremium || !loaded) { +// premiumButtonView.setVisibility(View.VISIBLE); +// addButtonView.setVisibility(View.GONE); +// removeButtonView.setVisibility(View.GONE); +// updateShowButton(true); } else { premiumButtonView.setVisibility(View.INVISIBLE); if (canInstallPacks.size() > 0) { @@ -875,7 +1082,7 @@ private void updateButton() { if (canInstallPacks.size() == 1) { addButtonView.setText(LocaleController.formatPluralString("AddManyEmojiCount", canInstallPacks.get(0).documents.size())); } else { - addButtonView.setText(LocaleController.formatPluralString("AddManyEmojiCount", canInstallPacks.size())); + addButtonView.setText(LocaleController.formatPluralString("AddManyEmojiPacksCount", canInstallPacks.size())); } addButtonView.setOnClickListener(ev -> { final int count = canInstallPacks.size(); @@ -948,7 +1155,14 @@ private int getListTop() { @Override public void show() { super.show(); + listView.setAdapter(adapter = new Adapter()); + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.stopAllHeavyOperations, 4); + + customEmojiPacks.start(); + updateButton(); + int currentAccount = fragment == null ? UserConfig.selectedAccount : fragment.getCurrentAccount(); + MediaDataController.getInstance(currentAccount).checkStickers(MediaDataController.TYPE_EMOJIPACKS); } @Override @@ -977,6 +1191,12 @@ public SeparatorView(Context context) { private class Adapter extends RecyclerListView.SelectionAdapter { + private final int VIEW_TYPE_PADDING = 0; + private final int VIEW_TYPE_EMOJI = 1; + private final int VIEW_TYPE_HEADER = 2; + private final int VIEW_TYPE_TEXT = 3; + private final int VIEW_TYPE_SEPARATOR = 4; + @Override public boolean isEnabled(RecyclerView.ViewHolder holder) { return holder.getItemViewType() == 1; @@ -986,15 +1206,15 @@ public boolean isEnabled(RecyclerView.ViewHolder holder) { @Override public RecyclerView.ViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) { View view = null; - if (viewType == 0) { + if (viewType == VIEW_TYPE_PADDING) { view = paddingView; - } else if (viewType == 1) { + } else if (viewType == VIEW_TYPE_EMOJI) { view = new EmojiImageView(getContext()); - } else if (viewType == 2) { + } else if (viewType == VIEW_TYPE_HEADER) { view = new EmojiPackHeader(getContext(), customEmojiPacks.data.length <= 1); - } else if (viewType == 3) { + } else if (viewType == VIEW_TYPE_TEXT) { view = new TextView(getContext()); - } else if (viewType == 4) { + } else if (viewType == VIEW_TYPE_SEPARATOR) { view = new SeparatorView(getContext()); } return new RecyclerListView.Holder(view); @@ -1004,14 +1224,14 @@ public RecyclerView.ViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int public void onBindViewHolder(@NonNull RecyclerView.ViewHolder holder, int position) { position--; switch (holder.getItemViewType()) { - case 3: + case VIEW_TYPE_TEXT: TextView textView = (TextView) holder.itemView; textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 13); textView.setTextColor(getThemedColor(Theme.key_chat_emojiPanelTrendingDescription)); textView.setText(AndroidUtilities.replaceTags(LocaleController.getString("PremiumPreviewEmojiPack", R.string.PremiumPreviewEmojiPack))); textView.setPadding(AndroidUtilities.dp(14), 0, AndroidUtilities.dp(30), AndroidUtilities.dp(14)); break; - case 1: + case VIEW_TYPE_EMOJI: if (hasDescription) { position--; } @@ -1036,11 +1256,16 @@ public void onBindViewHolder(@NonNull RecyclerView.ViewHolder holder, int positi inputStickerSet.id = customEmoji.stickerSet.set.id; inputStickerSet.short_name = customEmoji.stickerSet.set.short_name; inputStickerSet.access_hash = customEmoji.stickerSet.set.access_hash; - view.span = new AnimatedEmojiSpan(customEmoji.documentId, null); + TLRPC.Document document = customEmoji.getDocument(); + if (document != null) { + view.span = new AnimatedEmojiSpan(document, null); + } else { + view.span = new AnimatedEmojiSpan(customEmoji.documentId, null); + } } } break; - case 2: + case VIEW_TYPE_HEADER: if (hasDescription && position > 0) { position--; } @@ -1075,19 +1300,19 @@ public void onBindViewHolder(@NonNull RecyclerView.ViewHolder holder, int positi @Override public int getItemViewType(int position) { if (position == 0) { - return 0; + return VIEW_TYPE_PADDING; } position--; if (hasDescription) { if (position == 1) { - return 3; + return VIEW_TYPE_TEXT; } else if (position > 0) { position--; } } for (int i = 0, j = 0; i < customEmojiPacks.data.length; ++i) { if (position == j) { - return 2; + return VIEW_TYPE_HEADER; } int count = customEmojiPacks.data[i].size(); if (customEmojiPacks.data.length > 1) { @@ -1095,11 +1320,47 @@ public int getItemViewType(int position) { } j += 1 + count; if (position == j) { - return 4; + return VIEW_TYPE_SEPARATOR; } j++; } - return 1; + return VIEW_TYPE_EMOJI; + } + + public int getSetHeaderPosition(int setIndex) { + int position = 1; + if (hasDescription) { + position++; + } + for (int i = 0; i < customEmojiPacks.data.length; ++i) { + if (i == setIndex) { + return position; + } + int count = customEmojiPacks.data[i].size(); + if (customEmojiPacks.data.length > 1) { + count = Math.min(gridLayoutManager.getSpanCount() * 2, count); + } + position += 1 + count + 1; + } + return position; + } + + public int getSetEndPosition(int setIndex) { + int position = 1; + if (hasDescription) { + position++; + } + for (int i = 0; i < customEmojiPacks.data.length; ++i) { + int count = customEmojiPacks.data[i].size(); + if (customEmojiPacks.data.length > 1) { + count = Math.min(gridLayoutManager.getSpanCount() * 2, count); + } + if (i == setIndex) { + return position + count + 1; + } + position += 1 + count + 1; + } + return position; } @Override @@ -1164,7 +1425,9 @@ protected void onSend(androidx.collection.LongSparseArray dids, in } } } - private class EmojiImageView extends View { + + public static class EmojiImageView extends View { + public ImageReceiver.BackgroundThreadDrawHolder[] backgroundThreadDrawHolder = new ImageReceiver.BackgroundThreadDrawHolder[DrawingInBackgroundThreadDrawable.THREAD_COUNT]; public ImageReceiver imageReceiver; @@ -1174,6 +1437,18 @@ public EmojiImageView(Context context) { public AnimatedEmojiSpan span; + public TLRPC.Document getDocument() { + TLRPC.Document document = null; + if (span != null) { + document = span.document; + if (document == null) { + long documentId = span.getDocumentId(); + document = AnimatedEmojiDrawable.findDocument(UserConfig.selectedAccount, documentId); + } + } + return document; + } + @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { setPadding(AndroidUtilities.dp(2), AndroidUtilities.dp(2), AndroidUtilities.dp(2), AndroidUtilities.dp(2)); @@ -1197,7 +1472,10 @@ public void setPressed(boolean pressed) { backAnimator = ValueAnimator.ofFloat(pressedProgress, 0); backAnimator.addUpdateListener(animation -> { pressedProgress = (float) animation.getAnimatedValue(); - containerView.invalidate(); + if (getParent() instanceof View) { + ((View) getParent()).invalidate(); + } +// containerView.invalidate(); }); backAnimator.addListener(new AnimatorListenerAdapter() { @Override @@ -1509,6 +1787,7 @@ class EmojiPacksLoader implements NotificationCenter.NotificationCenterDelegate public ArrayList stickerSets; public ArrayList[] data; private int currentAccount; + private boolean started = false; public EmojiPacksLoader(int currentAccount, ArrayList inputStickerSets, TLObject parentObject) { this.currentAccount = currentAccount; @@ -1517,10 +1796,17 @@ public EmojiPacksLoader(int currentAccount, ArrayList inp } this.inputStickerSets = inputStickerSets; this.parentObject = parentObject; + } + + public void start() { + if (started) { + return; + } + started = true; init(); } - private void init() { + public void init() { if ((parentObject instanceof TLRPC.Photo || parentObject instanceof TLRPC.Document) && (this.inputStickerSets == null || this.inputStickerSets.isEmpty())) { data = new ArrayList[2]; putStickerSet(0, null); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/EmojiTabsStrip.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/EmojiTabsStrip.java index 55d76f9dad..1f8c612ab2 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/EmojiTabsStrip.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/EmojiTabsStrip.java @@ -9,6 +9,7 @@ import android.graphics.ColorFilter; import android.graphics.Paint; import android.graphics.Path; +import android.graphics.PixelFormat; import android.graphics.PorterDuff; import android.graphics.PorterDuffColorFilter; import android.graphics.Rect; @@ -18,11 +19,11 @@ import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; -import android.view.animation.OvershootInterpolator; import android.widget.HorizontalScrollView; import android.widget.ImageView; import android.widget.LinearLayout; +import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.core.graphics.ColorUtils; import androidx.core.math.MathUtils; @@ -41,7 +42,6 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.Map; -import java.util.Objects; import tw.nekomimi.nekogram.NekoConfig; @@ -49,41 +49,47 @@ public class EmojiTabsStrip extends ScrollableHorizontalScrollView { private int recentDrawableId = R.drawable.msg_emoji_recent; private static int[] emojiTabsDrawableIds = { - R.drawable.msg_emoji_smiles, - R.drawable.msg_emoji_cat, - R.drawable.msg_emoji_food, - R.drawable.msg_emoji_activities, - R.drawable.msg_emoji_travel, - R.drawable.msg_emoji_objects, - R.drawable.msg_emoji_other, - R.drawable.msg_emoji_flags + R.drawable.msg_emoji_smiles, + R.drawable.msg_emoji_cat, + R.drawable.msg_emoji_food, + R.drawable.msg_emoji_activities, + R.drawable.msg_emoji_travel, + R.drawable.msg_emoji_objects, + R.drawable.msg_emoji_other, + R.drawable.msg_emoji_flags }; private static int[] emojiTabsAnimatedDrawableIds = { - R.raw.msg_emoji_smiles, - R.raw.msg_emoji_cat, - R.raw.msg_emoji_food, - R.raw.msg_emoji_activities, - R.raw.msg_emoji_travel, - R.raw.msg_emoji_objects, - R.raw.msg_emoji_other, - R.raw.msg_emoji_flags + R.raw.msg_emoji_smiles, + R.raw.msg_emoji_cat, + R.raw.msg_emoji_food, + R.raw.msg_emoji_activities, + R.raw.msg_emoji_travel, + R.raw.msg_emoji_objects, + R.raw.msg_emoji_other, + R.raw.msg_emoji_flags }; private int settingsDrawableId = R.drawable.smiles_tab_settings; + private boolean forceTabsShow = !UserConfig.getInstance(UserConfig.selectedAccount).isPremium(); + private boolean showSelected = true; + private AnimatedFloat showSelectedAlpha; + private Theme.ResourcesProvider resourcesProvider; private boolean includeAnimated; - public LinearLayout contentView; + public EmojiTabButton toggleEmojiStickersTab; public EmojiTabButton recentTab; private EmojiTabButton settingsTab; private EmojiTabsView emojiTabs; - private ArrayList emojipackTabs; private HashMap removingViews = new HashMap<>(); + private int packsIndexStart; + private ValueAnimator selectAnimator; private float selectT = 0f; private float selectAnimationT = 0f; private int selected = 0; + private int selectedFullIndex = 0; private int wasIndex = 0; public boolean animateAppear = true; @@ -91,15 +97,15 @@ public class EmojiTabsStrip extends ScrollableHorizontalScrollView { private Runnable onSettingsOpenRunnable; private boolean wasDrawn; private int animatedEmojiCacheType = AnimatedEmojiDrawable.CACHE_TYPE_TAB_STRIP; - + private int currentType; public boolean updateButtonDrawables = true; - public EmojiTabsStrip(Context context, Theme.ResourcesProvider resourcesProvider, boolean includeStandard, boolean includeAnimated, int type, Runnable onSettingsOpen) { super(context); this.includeAnimated = includeAnimated; this.resourcesProvider = resourcesProvider; this.onSettingsOpenRunnable = onSettingsOpen; + this.currentType = type; contentView = new LinearLayout(context) { @@ -188,6 +194,7 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { private RectF to = new RectF(); private RectF rect = new RectF(); private Path path = new Path(); + @Override protected void dispatchDraw(Canvas canvas) { for (Map.Entry entry : removingViews.entrySet()) { @@ -202,6 +209,11 @@ protected void dispatchDraw(Canvas canvas) { } } + if (showSelectedAlpha == null) { + showSelectedAlpha = new AnimatedFloat(this, 350, CubicBezierInterpolator.EASE_OUT_QUINT); + } + float alpha = showSelectedAlpha.set(showSelected ? 1 : 0); + int selectFrom = (int) Math.floor(selectT), selectTo = (int) Math.ceil(selectT); getChildBounds(selectFrom, from); getChildBounds(selectTo, to); @@ -213,11 +225,23 @@ protected void dispatchDraw(Canvas canvas) { rect.set(rect.centerX() - hw, rect.centerY() - hh, rect.centerX() + hw, rect.centerY() + hh); float r = AndroidUtilities.dp(AndroidUtilities.lerp(8f, 16f, isEmojiTabs)); paint.setColor(selectorColor()); + if (forceTabsShow) { + paint.setAlpha((int) (paint.getAlpha() * alpha * (1f - isEmojiTabs * .5f))); + } path.rewind(); path.addRoundRect(rect, r, r, Path.Direction.CW); canvas.drawPath(path, paint); + if (forceTabsShow) { + path.rewind(); + getChildBounds(1, rect); + path.addRoundRect(rect, AndroidUtilities.dpf2(16), AndroidUtilities.dpf2(16), Path.Direction.CW); + paint.setColor(selectorColor()); + paint.setAlpha((int) (paint.getAlpha() * .5f)); + canvas.drawPath(path, paint); + } + if (emojiTabs != null) { path.addCircle(emojiTabs.getLeft() + AndroidUtilities.dp(15), (emojiTabs.getTop() + emojiTabs.getBottom()) / 2f, AndroidUtilities.dp(15), Path.Direction.CW); } @@ -242,10 +266,10 @@ private void getChildBounds(int i, RectF out) { View child = getChildAt(MathUtils.clamp(i, 0, getChildCount() - 1)); out.set(child.getLeft(), child.getTop(), child.getRight(), child.getBottom()); out.set( - out.centerX() - out.width() / 2f * child.getScaleX(), - out.centerY() - out.height() / 2f * child.getScaleY(), - out.centerX() + out.width() / 2f * child.getScaleX(), - out.centerY() + out.height() / 2f * child.getScaleY() + out.centerX() - out.width() / 2f * child.getScaleX(), + out.centerY() - out.height() / 2f * child.getScaleY(), + out.centerX() + out.width() / 2f * child.getScaleX(), + out.centerY() + out.height() / 2f * child.getScaleY() ); // out.offset(recentIsShown ? recentTab.getTranslationX() : AndroidUtilities.dp(30 + 3) - recentTab.getTranslationX(), 0); } @@ -256,6 +280,9 @@ private void getChildBounds(int i, RectF out) { setHorizontalScrollBarEnabled(false); addView(contentView); + if (type == SelectAnimatedEmojiDialog.TYPE_AVATAR_CONSTRUCTOR) { + contentView.addView(toggleEmojiStickersTab = new EmojiTabButton(context, R.drawable.msg_emoji_stickers, false, false)); + } if (type == SelectAnimatedEmojiDialog.TYPE_TOPIC_ICON) { recentDrawableId = R.drawable.msg_emoji_smiles; } @@ -271,6 +298,7 @@ private void getChildBounds(int i, RectF out) { contentView.addView(emojiTabs = new EmojiTabsView(context)); emojiTabs.id = "tabs".hashCode(); } + packsIndexStart = contentView.getChildCount(); if (onSettingsOpen != null) { contentView.addView(settingsTab = new EmojiTabButton(context, settingsDrawableId, false, true)); settingsTab.id = "settings".hashCode(); @@ -280,8 +308,14 @@ private void getChildBounds(int i, RectF out) { } } + public void showSelected(boolean show) { + this.showSelected = show; + this.contentView.invalidate(); + } + private boolean recentFirstChange = true; private boolean recentIsShown = true; + public void showRecent(boolean show) { if (recentIsShown == show) { return; @@ -299,6 +333,10 @@ public void showRecent(boolean show) { recentFirstChange = false; } + protected boolean doIncludeFeatured() { + return true; + } + private boolean isFreeEmojiPack(TLRPC.StickerSet set, ArrayList documents) { if (set == null || documents == null) { return false; @@ -329,6 +367,122 @@ private TLRPC.Document getThumbDocument(TLRPC.StickerSet set, ArrayList emojiPacks) { if (!includeAnimated) { return; @@ -359,151 +514,205 @@ public void updateEmojiPacks(ArrayList emojiPacks) { if (NekoConfig.disableTrending.Bool() && !isPremium) { return; } - if (emojipackTabs == null) { - emojipackTabs = new ArrayList<>(); - } - boolean first = emojipackTabs.size() == 0 && emojiPacks.size() > 0 && appearCount != emojiPacks.size() && wasDrawn; + int childCount = contentView.getChildCount() - packsIndexStart - (settingsTab != null ? 1 : 0); + boolean first = childCount == 0 && emojiPacks.size() > 0 && appearCount != emojiPacks.size() && wasDrawn; boolean doAppearAnimation = false; // emojipackTabs.size() == 0 && emojiPacks.size() > 0 && appearCount != emojiPacks.size() && wasDrawn; if (appearAnimation != null && appearCount != emojiPacks.size()) { appearAnimation.cancel(); appearAnimation = null; } appearCount = emojiPacks.size(); - for (int i = 0; i < emojipackTabs.size(); ++i) { - EmojiTabButton emojipackTab = emojipackTabs.get(i); - EmojiView.EmojiPack pack = null; - if (emojipackTab != null && emojipackTab.id != null) { - for (int j = 0; j < emojiPacks.size(); ++j) { - EmojiView.EmojiPack p = emojiPacks.get(j); - final int id = Objects.hash(p.set.id, p.featured); - if (id == emojipackTab.id) { - pack = p; - break; - } - } + final boolean includeFeatured = doIncludeFeatured(); + + ArrayList attachedEmojiPacks = new ArrayList<>(); + + for (int i = 0; i < Math.max(emojiPacks.size(), childCount); ++i) { + EmojiTabButton currentPackButton = null; + if (i < childCount) { + currentPackButton = (EmojiTabButton) contentView.getChildAt(i + packsIndexStart); + } + EmojiView.EmojiPack newPack = null; + if (i < emojiPacks.size()) { + newPack = emojiPacks.get(i); } - if (pack == null && emojipackTab != null) { - Rect bounds = new Rect(); - bounds.set(emojipackTab.getLeft(), emojipackTab.getTop(), emojipackTab.getRight(), emojipackTab.getBottom()); - removingViews.put(emojipackTab, bounds); - ValueAnimator anm = ValueAnimator.ofFloat(emojipackTab.getAlpha(), 0f); - anm.addUpdateListener(a -> { - float alpha = (float) a.getAnimatedValue(); - emojipackTab.setAlpha(alpha); - emojipackTab.setScaleX(alpha); - emojipackTab.setScaleY(alpha); - contentView.invalidate(); - }); - anm.addListener(new AnimatorListenerAdapter() { - @Override - public void onAnimationEnd(Animator animation) { - removingViews.remove(emojipackTab); - contentView.invalidate(); - } - }); - anm.setDuration(200); - anm.setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); - anm.start(); - emojipackTabs.remove(i--); - } - contentView.removeView(emojipackTab); - } - for (int i = 0; i < emojiPacks.size(); ++i) { - EmojiView.EmojiPack pack = emojiPacks.get(i); - final int id = Objects.hash(pack.set.id, pack.featured); - EmojiTabButton emojipackTab = null; - for (int j = 0; j < emojipackTabs.size(); ++j) { - EmojiTabButton tab = emojipackTabs.get(j); - if (tab != null && tab.id != null && tab.id == id) { - emojipackTab = tab; - break; + if (newPack == null) { + if (currentPackButton != null) { + contentView.removeView(currentPackButton); } - } - final boolean free = isFreeEmojiPack(pack.set, pack.documents); - AnimatedEmojiDrawable drawable = emojipackTab == null ? null : (AnimatedEmojiDrawable) emojipackTab.getDrawable(); - TLRPC.Document thumbDocument = getThumbDocument(pack.set, pack.documents); - if (thumbDocument != null && (drawable == null || drawable.getDocumentId() != thumbDocument.id)) { - drawable = AnimatedEmojiDrawable.make(UserConfig.selectedAccount, animatedEmojiCacheType, thumbDocument); - } - if (emojipackTab == null) { - emojipackTab = new EmojiTabButton(getContext(), drawable, free, false,false); - emojipackTab.id = id; - if (drawable != null) { - drawable.addView(emojipackTab.imageView); + } else { + final boolean free = isFreeEmojiPack(newPack.set, newPack.documents); + DelayedAnimatedEmojiDrawable drawable = currentPackButton == null ? null : (DelayedAnimatedEmojiDrawable) currentPackButton.getDrawable(); + TLRPC.Document thumbDocument = getThumbDocument(newPack.set, newPack.documents); + if (thumbDocument != null && (drawable == null || !drawable.equals(thumbDocument.id))) { + drawable = new DelayedAnimatedEmojiDrawable(UserConfig.selectedAccount, animatedEmojiCacheType, thumbDocument); } - onTabCreate(emojipackTab); - emojipackTabs.add(emojipackTab); - } else if (emojipackTab.getDrawable() != drawable) { - if (emojipackTab.getDrawable() instanceof AnimatedEmojiDrawable) { - ((AnimatedEmojiDrawable) emojipackTab.getDrawable()).removeView(emojipackTab.imageView); + if (currentPackButton == null) { + currentPackButton = new EmojiTabButton(getContext(), drawable, free, false, false); + currentPackButton.setDrawable(drawable); + onTabCreate(currentPackButton); + contentView.addView(currentPackButton, packsIndexStart + i); + } else if (currentPackButton.getDrawable() != drawable) { + currentPackButton.setDrawable(drawable); } - emojipackTab.setDrawable(drawable); - if (drawable != null) { - drawable.addView(emojipackTab.imageView); + if (currentType == SelectAnimatedEmojiDialog.TYPE_AVATAR_CONSTRUCTOR) { + currentPackButton.setLock(null); + } else if (!isPremium && !free) { + currentPackButton.setLock(true); + } else if (!this.isInstalled(newPack)) { + currentPackButton.setLock(false); + } else { + currentPackButton.setLock(null); + } + if (doAppearAnimation && !first) { + currentPackButton.newly = false; } } - if (!isPremium && !free) { - emojipackTab.setLock(true); - } else if (!this.isInstalled(pack)) { - emojipackTab.setLock(false); - } else { - emojipackTab.setLock(null); - } - if (doAppearAnimation && !first) { - emojipackTab.newly = false; - } - if (emojipackTab.getParent() instanceof ViewGroup) { - ((ViewGroup) emojipackTab.getParent()).removeView(emojipackTab); - } - contentView.addView(emojipackTab); } + +// for (int i = 0; i < emojipackTabs.size(); ++i) { +// EmojiTabButton emojipackTab = emojipackTabs.get(i); +// EmojiView.EmojiPack pack = null; +// if (emojipackTab != null && emojipackTab.id != null) { +// for (int j = 0; j < emojiPacks.size(); ++j) { +// EmojiView.EmojiPack p = emojiPacks.get(j); +// if (!includeFeatured && p.featured) { +// continue; +// } +// final int id = Objects.hash(p.set.id, p.featured); +// if (id == emojipackTab.id) { +// pack = p; +// break; +// } +// } +// } +// +// if (pack == null && emojipackTab != null) { +// Rect bounds = new Rect(); +// bounds.set(emojipackTab.getLeft(), emojipackTab.getTop(), emojipackTab.getRight(), emojipackTab.getBottom()); +// removingViews.put(emojipackTab, bounds); +// ValueAnimator anm = ValueAnimator.ofFloat(emojipackTab.getAlpha(), 0f); +// anm.addUpdateListener(a -> { +// float alpha = (float) a.getAnimatedValue(); +// emojipackTab.setAlpha(alpha); +// emojipackTab.setScaleX(alpha); +// emojipackTab.setScaleY(alpha); +// contentView.invalidate(); +// }); +// anm.addListener(new AnimatorListenerAdapter() { +// @Override +// public void onAnimationEnd(Animator animation) { +// removingViews.remove(emojipackTab); +// contentView.invalidate(); +// } +// }); +// anm.setDuration(200); +// anm.setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); +// anm.start(); +// emojipackTabs.remove(i--); +// } +// if (emojipackTab != null) { +// emojipackTab.keepAttached = true; +// attachedEmojiPacks.add(emojipackTab); +// } +// contentView.removeView(emojipackTab); +// } +// for (int i = 0; i < emojiPacks.size(); ++i) { +// EmojiView.EmojiPack pack = emojiPacks.get(i); +// if (!includeFeatured && pack.featured) { +// continue; +// } +// final int id = Objects.hash(pack.set.id, pack.featured); +// EmojiTabButton emojipackTab = null; +// for (int j = 0; j < emojipackTabs.size(); ++j) { +// EmojiTabButton tab = emojipackTabs.get(j); +// if (tab != null && tab.id != null && tab.id == id) { +// emojipackTab = tab; +// break; +// } +// } +// final boolean free = isFreeEmojiPack(pack.set, pack.documents); +// DelayedAnimatedEmojiDrawable drawable = emojipackTab == null ? null : (DelayedAnimatedEmojiDrawable) emojipackTab.getDrawable(); +// TLRPC.Document thumbDocument = getThumbDocument(pack.set, pack.documents); +// if (thumbDocument != null && (drawable == null || !drawable.equals(thumbDocument.id))) { +// drawable = new DelayedAnimatedEmojiDrawable(UserConfig.selectedAccount, animatedEmojiCacheType, thumbDocument); +// } +// if (emojipackTab == null) { +// emojipackTab = new EmojiTabButton(getContext(), drawable, free, false, false); +// emojipackTab.id = id; +// emojipackTab.setDrawable(drawable); +// onTabCreate(emojipackTab); +// emojipackTabs.add(emojipackTab); +// } else if (emojipackTab.getDrawable() != drawable) { +// emojipackTab.setDrawable(drawable); +// } +// if (currentType == SelectAnimatedEmojiDialog.TYPE_AVATAR_CONSTRUCTOR) { +// emojipackTab.setLock(null); +// } else if (!isPremium && !free) { +// emojipackTab.setLock(true); +// } else if (!this.isInstalled(pack)) { +// emojipackTab.setLock(false); +// } else { +// emojipackTab.setLock(null); +// } +// if (doAppearAnimation && !first) { +// emojipackTab.newly = false; +// } +// if (emojipackTab.getParent() instanceof ViewGroup) { +// ((ViewGroup) emojipackTab.getParent()).removeView(emojipackTab); +// } +// contentView.addView(emojipackTab); +// } if (settingsTab != null) { settingsTab.bringToFront(); if (settingsTab.getAlpha() < 1) { settingsTab.animate().alpha(1f).setDuration(200).setInterpolator(CubicBezierInterpolator.DEFAULT).start(); } } - if (doAppearAnimation) { - if (emojipackTabs != null) { - for (int i = 0; i < emojipackTabs.size(); ++i) { - emojipackTabs.get(i).setScaleX(0); - emojipackTabs.get(i).setScaleY(0); - } - } - appearAnimation = ValueAnimator.ofFloat(0, 1); - final OvershootInterpolator innerInterpolator = new OvershootInterpolator(3f); - appearAnimation.addUpdateListener(anm -> { - if (emojipackTabs == null) { - return; - } - final float t = (float) anm.getAnimatedValue(); - final int count = emojipackTabs.size(); - final float dur = 1f / count * 4.5f; - for (int i = 0; i < count; ++i) { - final float off = i / (float) count * (1f - dur); - final float T = MathUtils.clamp((t - off) / dur, 0, 1); - final float scale = innerInterpolator.getInterpolation(T); - emojipackTabs.get(i).setScaleX(scale); - emojipackTabs.get(i).setScaleY(scale); - } - }); - appearAnimation.addListener(new AnimatorListenerAdapter() { - @Override - public void onAnimationCancel(Animator animation) { - if (emojipackTabs == null) { - return; - } - for (int i = 0; i < emojipackTabs.size(); ++i) { - emojipackTabs.get(i).setScaleX(1); - emojipackTabs.get(i).setScaleY(1); - } - } - }); - appearAnimation.setStartDelay(150); - appearAnimation.setDuration((emojipackTabs == null ? 0 : emojipackTabs.size()) * 75L); - appearAnimation.setInterpolator(CubicBezierInterpolator.EASE_OUT); - appearAnimation.start(); +// if (doAppearAnimation) { +// if (emojipackTabs != null) { +// for (int i = 0; i < emojipackTabs.size(); ++i) { +// emojipackTabs.get(i).setScaleX(0); +// emojipackTabs.get(i).setScaleY(0); +// } +// } +// appearAnimation = ValueAnimator.ofFloat(0, 1); +// final OvershootInterpolator innerInterpolator = new OvershootInterpolator(3f); +// appearAnimation.addUpdateListener(anm -> { +// if (emojipackTabs == null) { +// return; +// } +// final float t = (float) anm.getAnimatedValue(); +// final int count = emojipackTabs.size(); +// final float dur = 1f / count * 4.5f; +// for (int i = 0; i < count; ++i) { +// final float off = i / (float) count * (1f - dur); +// final float T = MathUtils.clamp((t - off) / dur, 0, 1); +// final float scale = innerInterpolator.getInterpolation(T); +// emojipackTabs.get(i).setScaleX(scale); +// emojipackTabs.get(i).setScaleY(scale); +// } +// }); +// appearAnimation.addListener(new AnimatorListenerAdapter() { +// @Override +// public void onAnimationCancel(Animator animation) { +// if (emojipackTabs == null) { +// return; +// } +// for (int i = 0; i < emojipackTabs.size(); ++i) { +// emojipackTabs.get(i).setScaleX(1); +// emojipackTabs.get(i).setScaleY(1); +// } +// } +// }); +// appearAnimation.setStartDelay(150); +// appearAnimation.setDuration((emojipackTabs == null ? 0 : emojipackTabs.size()) * 75L); +// appearAnimation.setInterpolator(CubicBezierInterpolator.EASE_OUT); +// appearAnimation.start(); +// } + for (int i = 0; i < attachedEmojiPacks.size(); i++) { + attachedEmojiPacks.get(i).keepAttached = false; + attachedEmojiPacks.get(i).updateAttachState(); } updateClickListeners(); } @@ -541,6 +750,7 @@ protected boolean onTabClick(int index) { } private float paddingLeftDp = 5 + 6; + public void setPaddingLeft(float paddingLeftDp) { this.paddingLeftDp = paddingLeftDp; } @@ -563,9 +773,13 @@ public void select(int index) { public void select(int index, boolean animated) { animated = animated && !first; - if (!recentIsShown) { + if (toggleEmojiStickersTab != null) { + index++; + } + if (!recentIsShown || toggleEmojiStickersTab != null) { index = Math.max(1, index); } + selectedFullIndex = index; final int wasSelected = selected; for (int i = 0, j = 0; i < contentView.getChildCount(); ++i, ++j) { View child = contentView.getChildAt(i); @@ -608,7 +822,7 @@ public void select(int index, boolean animated) { } if (emojiTabs != null) { - emojiTabs.show(selected == 1, animated); + emojiTabs.show(selected == 1 || forceTabsShow, animated); } View child = contentView.getChildAt(selected); @@ -644,12 +858,15 @@ public class EmojiTabButton extends ViewGroup { public Integer id; public boolean newly; + public boolean keepAttached; private boolean isAnimatedEmoji; private ImageView imageView; private RLottieDrawable lottieDrawable; private PremiumLockIconView lockView; private boolean round, forceSelector; + DelayedAnimatedEmojiDrawable animatedEmoji; + boolean attached; public EmojiTabButton(Context context, int drawableId, int lottieId, boolean roundSelector, boolean forceSelector) { super(context); @@ -750,21 +967,61 @@ public void setImageDrawable(@Nullable Drawable drawable) { @Override protected void dispatchDraw(Canvas canvas) { super.dispatchDraw(canvas); - if (lottieDrawable != null) { + if (lottieDrawable != null && wasVisible) { lottieDrawable.draw(canvas); } } + @Override + protected boolean drawChild(Canvas canvas, View child, long drawingTime) { + if (!wasVisible) { + return true; + } + return super.drawChild(canvas, child, drawingTime); + } + + @Override + protected void onDraw(Canvas canvas) { + if (!wasVisible) { + return; + } + super.onDraw(canvas); + } + @Override public boolean performClick() { -// if (lottieDrawable != null) { -// lottieDrawable.setProgress(0); -// AndroidUtilities.runOnUIThread(() -> lottieDrawable.start(), 75); -// } + playAnimation(); return super.performClick(); } + private void playAnimation() { + if (animatedEmoji != null && animatedEmoji.drawable != null) { + ImageReceiver imageReceiver = animatedEmoji.drawable.getImageReceiver(); + if (imageReceiver != null) { + if (imageReceiver.getAnimation() != null) { + imageReceiver.getAnimation().seekTo(0, true); + } + imageReceiver.startAnimation(); + } + } + } + + private void stopAnimation() { + if (animatedEmoji != null && animatedEmoji.drawable != null) { + ImageReceiver imageReceiver = animatedEmoji.drawable.getImageReceiver(); + if (imageReceiver != null) { + if (imageReceiver.getLottieAnimation() != null) { + imageReceiver.getLottieAnimation().setCurrentFrame(0); + imageReceiver.getLottieAnimation().stop(); + } else if (imageReceiver.getAnimation() != null) { + imageReceiver.getAnimation().stop(); + } + } + } + } + private boolean wasVisible; + public void updateVisibilityInbounds(boolean visible, boolean ignore) { if (!wasVisible && visible) { if (lottieDrawable != null && !lottieDrawable.isRunning() && !ignore) { @@ -772,7 +1029,34 @@ public void updateVisibilityInbounds(boolean visible, boolean ignore) { lottieDrawable.start(); } } - wasVisible = visible; + if (wasVisible != visible) { + wasVisible = visible; + if (visible) { + invalidate(); + if (lockView != null) { + lockView.invalidate(); + } + if (imageView != null && imageView.getDrawable() instanceof DelayedAnimatedEmojiDrawable) { + ((DelayedAnimatedEmojiDrawable) imageView.getDrawable()).load(); + } + initLock(); + if (imageView != null) { + imageView.invalidate(); + } + } else { + stopAnimation(); + } + updateAttachState(); + } + } + + private void initLock() { + if (lockView != null && animatedEmoji != null && animatedEmoji.drawable != null) { + ImageReceiver imageReceiver = animatedEmoji.drawable.getImageReceiver(); + if (imageReceiver != null) { + lockView.setImageReceiver(imageReceiver); + } + } } public void setLock(Boolean lock) { @@ -795,6 +1079,7 @@ public void setLock(Boolean lock) { private float lockT; private ValueAnimator lockAnimator; + private void updateLock(boolean enable) { if (lockAnimator != null) { lockAnimator.cancel(); @@ -838,14 +1123,14 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { setMeasuredDimension(AndroidUtilities.dp(30), AndroidUtilities.dp(30)); if (imageView != null) { imageView.measure( - MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(24), MeasureSpec.EXACTLY), - MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(24), MeasureSpec.EXACTLY) + MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(24), MeasureSpec.EXACTLY), + MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(24), MeasureSpec.EXACTLY) ); } if (lockView != null) { lockView.measure( - MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(12), MeasureSpec.EXACTLY), - MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(12), MeasureSpec.EXACTLY) + MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(12), MeasureSpec.EXACTLY), + MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(12), MeasureSpec.EXACTLY) ); } } @@ -866,20 +1151,56 @@ public Drawable getDrawable() { } public void setDrawable(Drawable drawable) { - if (lockView != null && drawable instanceof AnimatedEmojiDrawable) { - ImageReceiver imageReceiver = ((AnimatedEmojiDrawable) drawable).getImageReceiver(); - if (imageReceiver != null) { - lockView.setImageReceiver(imageReceiver); + DelayedAnimatedEmojiDrawable newEmoji = null; + if (drawable instanceof DelayedAnimatedEmojiDrawable) { + newEmoji = (DelayedAnimatedEmojiDrawable) drawable; + } + if (animatedEmoji != newEmoji) { + if (animatedEmoji != null && attached && wasVisible) { + animatedEmoji.removeView(); + } + animatedEmoji = newEmoji; + if (animatedEmoji != null && attached && wasVisible) { + animatedEmoji.updateView(imageView); + } + if (wasVisible) { + animatedEmoji.load(); } + initLock(); } if (imageView != null) { imageView.setImageDrawable(drawable); } } + @Override + protected void onAttachedToWindow() { + super.onAttachedToWindow(); + attached = true; + updateAttachState(); + } + + @Override + protected void onDetachedFromWindow() { + super.onDetachedFromWindow(); + attached = false; + updateAttachState(); + } + + private void updateAttachState() { + if (animatedEmoji != null) { + if ((keepAttached || attached) && wasVisible) { + animatedEmoji.updateView(imageView); + } else { + animatedEmoji.removeView(); + } + } + } + private float selectT; private boolean selected; private ValueAnimator selectAnimator; + public void updateSelect(boolean selected, boolean animated) { if (imageView != null && imageView.getDrawable() == null) { return; @@ -893,6 +1214,10 @@ public void updateSelect(boolean selected, boolean animated) { selectAnimator = null; } + if (!selected) { + stopAnimation(); + } + if (animated) { selectAnimator = ValueAnimator.ofFloat(selectT, selected ? 1f : 0f); selectAnimator.addUpdateListener(a -> { @@ -925,11 +1250,11 @@ public void onAnimationEnd(Animator animation) { public void updateColor() { Theme.setSelectorDrawableColor(getBackground(), selectorColor(), false); setColor( - ColorUtils.blendARGB( - Theme.getColor(Theme.key_chat_emojiPanelIcon, resourcesProvider), - Theme.getColor(Theme.key_chat_emojiPanelIconSelected, resourcesProvider), - selectT - ) + ColorUtils.blendARGB( + Theme.getColor(Theme.key_chat_emojiPanelIcon, resourcesProvider), + Theme.getColor(Theme.key_chat_emojiPanelIconSelected, resourcesProvider), + selectT + ) ); } @@ -949,8 +1274,6 @@ private void setColor(int color) { private class EmojiTabsView extends ScrollableHorizontalScrollView { public int id; - private LinearLayout contentView; - public EmojiTabsView(Context context) { super(context); setSmoothScrollingEnabled(true); @@ -978,8 +1301,8 @@ protected void onLayout(boolean changed, int l, int t, int r, int b) { @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { super.onMeasure( - Math.max(MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.makeMeasureSpec(AndroidUtilities.dp((30 + 2) * contentView.getChildCount()), MeasureSpec.EXACTLY)), - heightMeasureSpec + Math.max(MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.makeMeasureSpec(AndroidUtilities.dp((30 + 2) * contentView.getChildCount()), MeasureSpec.EXACTLY)), + heightMeasureSpec ); } }; @@ -1000,38 +1323,16 @@ public boolean onTouchEvent(MotionEvent ev) { @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { super.onMeasure( - MeasureSpec.makeMeasureSpec(AndroidUtilities.lerp(AndroidUtilities.dp(30), maxWidth(), showT), MeasureSpec.EXACTLY), - MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(30), MeasureSpec.EXACTLY) + MeasureSpec.makeMeasureSpec(AndroidUtilities.lerp(AndroidUtilities.dp(30), maxWidth(), showT), MeasureSpec.EXACTLY), + MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(30), MeasureSpec.EXACTLY) ); } public int maxWidth() { +// return AndroidUtilities.dp((30 + 2) * (clip() ? Math.min(5.7f, contentView.getChildCount()) : contentView.getChildCount())); return AndroidUtilities.dp((30 + 2) * Math.min(5.7f, contentView.getChildCount())); } - private boolean touching = false; - - @Override - protected void onScrollChanged(int l, int t, int oldl, int oldt) { - super.onScrollChanged(l, t, oldl, oldt); - if (Math.abs(t - oldt) < 2 || t >= getMeasuredHeight() || t == 0) { - if (!touching) { - EmojiTabsStrip.this.requestDisallowInterceptTouchEvent(false); - } - } - updateButtonsVisibility(); - } - - private void updateButtonsVisibility() { - final int count = contentView.getChildCount(); - for (int i = 0; i < count; ++i) { - View child = contentView.getChildAt(i); - if (child instanceof EmojiTabButton) { - ((EmojiTabButton) child).updateVisibilityInbounds(child.getRight() - getScrollX() > 0 && child.getLeft() - getScrollX() < getMeasuredWidth(), scrollingAnimation && !(showAnimator != null && showAnimator.isRunning())); - } - } - } - private void intercept(MotionEvent ev) { if (shown && !scrollingAnimation) { switch (ev.getAction()) { @@ -1056,9 +1357,9 @@ public boolean onTouchEvent(MotionEvent ev) { return super.onTouchEvent(ev); } - private boolean shown = false; - private float showT = 0f; - private ValueAnimator showAnimator; + private boolean shown = forceTabsShow; + private float showT = forceTabsShow ? 1f : 0f; + public void show(boolean show, boolean animated) { if (show == shown) { return; @@ -1095,11 +1396,17 @@ public void show(boolean show, boolean animated) { } class ScrollableHorizontalScrollView extends HorizontalScrollView { + + boolean touching; + public LinearLayout contentView; + ValueAnimator showAnimator; + public ScrollableHorizontalScrollView(Context context) { super(context); } protected boolean scrollingAnimation; + public boolean isScrolling() { return scrollingAnimation; } @@ -1124,6 +1431,7 @@ public boolean scrollToVisible(int left, int right) { private int scrollingTo = -1; private ValueAnimator scrollAnimator; + public void scrollTo(int x) { if (scrollingTo == x) { return; @@ -1159,4 +1467,31 @@ public void onAnimationStart(Animator animation) { public void resetScrollTo() { scrollingTo = -1; } + + @Override + protected void onLayout(boolean changed, int l, int t, int r, int b) { + super.onLayout(changed, l, t, r, b); + updateButtonsVisibility(); + } + + @Override + protected void onScrollChanged(int l, int t, int oldl, int oldt) { + super.onScrollChanged(l, t, oldl, oldt); + if (Math.abs(t - oldt) < 2 || t >= getMeasuredHeight() || t == 0) { + if (!touching) { + requestDisallowInterceptTouchEvent(false); + } + } + updateButtonsVisibility(); + } + + void updateButtonsVisibility() { + final int count = contentView.getChildCount(); + for (int i = 0; i < count; ++i) { + View child = contentView.getChildAt(i); + if (child instanceof EmojiTabsStrip.EmojiTabButton) { + ((EmojiTabsStrip.EmojiTabButton) child).updateVisibilityInbounds(child.getRight() - getScrollX() > 0 && child.getLeft() - getScrollX() < getMeasuredWidth(), scrollingAnimation && !(showAnimator != null && showAnimator.isRunning())); + } + } + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/EmojiView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/EmojiView.java index 77e55a9c48..116a3dcd03 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/EmojiView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/EmojiView.java @@ -8,6 +8,8 @@ package org.telegram.ui.Components; +import static org.telegram.messenger.AndroidUtilities.dp; + import android.animation.Animator; import android.animation.AnimatorListenerAdapter; import android.animation.AnimatorSet; @@ -38,9 +40,12 @@ import android.os.Bundle; import android.os.SystemClock; import android.text.Editable; +import android.text.Spannable; import android.text.SpannableStringBuilder; +import android.text.Spanned; import android.text.TextUtils; import android.text.TextWatcher; +import android.util.Log; import android.util.LongSparseArray; import android.util.SparseArray; import android.util.SparseIntArray; @@ -93,6 +98,7 @@ import org.telegram.messenger.FileLog; import org.telegram.messenger.ImageLocation; import org.telegram.messenger.ImageReceiver; +import org.telegram.messenger.LiteMode; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MediaDataController; import org.telegram.messenger.MessageObject; @@ -103,6 +109,7 @@ import org.telegram.messenger.SharedConfig; import org.telegram.messenger.SvgHelper; import org.telegram.messenger.UserConfig; +import org.telegram.messenger.UserObject; import org.telegram.messenger.Utilities; import org.telegram.messenger.browser.Browser; import org.telegram.tgnet.ConnectionsManager; @@ -125,6 +132,7 @@ import org.telegram.ui.Components.Premium.PremiumButtonView; import org.telegram.ui.Components.Premium.PremiumGradient; import org.telegram.ui.ContentPreviewViewer; +import org.telegram.ui.StickersActivity; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; @@ -133,6 +141,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.HashMap; +import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Objects; @@ -148,6 +157,7 @@ public class EmojiView extends FrameLayout implements NotificationCenter.Notific private ArrayList allTabs = new ArrayList<>(); private ArrayList currentTabs = new ArrayList<>(); + private boolean ignorePagerScroll; private ViewPager pager; private FrameLayout bottomTabContainer; private FrameLayout bulletinContainer; @@ -294,6 +304,8 @@ public void allowEmojisForNonPremium(boolean allow) { private EmojiViewDelegate delegate; private long currentChatId; + boolean emojiBanned; + boolean stickersBanned; private TLRPC.StickerSetCovered[] primaryInstallingStickerSets = new TLRPC.StickerSetCovered[10]; private LongSparseArray installingStickerSets = new LongSparseArray<>(); @@ -472,13 +484,117 @@ public interface DragListener { } private ContentPreviewViewer.ContentPreviewViewerDelegate contentPreviewViewerDelegate = new ContentPreviewViewer.ContentPreviewViewerDelegate() { + @Override + public boolean can() { + return fragment != null; + } + @Override public void sendSticker(TLRPC.Document sticker, String query, Object parent, boolean notify, int scheduleDate) { delegate.onStickerSelected(null, sticker, query, parent, null, notify, scheduleDate); } @Override - public boolean needSend() { + public void resetTouch() { + if (emojiGridView != null) { + emojiGridView.clearAllTouches(); + } + } + + @Override + public void sendEmoji(TLRPC.Document emoji) { + if (fragment instanceof ChatActivity) { + ((ChatActivity) fragment).sendAnimatedEmoji(emoji, true, 0); + } + } + + @Override + public void setAsEmojiStatus(TLRPC.Document document, Integer until) { + TLRPC.EmojiStatus status; + if (document == null) { + status = new TLRPC.TL_emojiStatusEmpty(); + } else if (until != null) { + status = new TLRPC.TL_emojiStatusUntil(); + ((TLRPC.TL_emojiStatusUntil) status).document_id = document.id; + ((TLRPC.TL_emojiStatusUntil) status).until = until; + } else { + status = new TLRPC.TL_emojiStatus(); + ((TLRPC.TL_emojiStatus) status).document_id = document.id; + } + TLRPC.User user = UserConfig.getInstance(UserConfig.selectedAccount).getCurrentUser(); + final TLRPC.EmojiStatus previousEmojiStatus = user == null ? new TLRPC.TL_emojiStatusEmpty() : user.emoji_status; + MessagesController.getInstance(currentAccount).updateEmojiStatus(status); + + Runnable undoAction = () -> MessagesController.getInstance(currentAccount).updateEmojiStatus(previousEmojiStatus); + if (document == null) { + final Bulletin.SimpleLayout layout = new Bulletin.SimpleLayout(getContext(), resourcesProvider); + layout.textView.setText(LocaleController.getString("RemoveStatusInfo", R.string.RemoveStatusInfo)); + layout.imageView.setImageResource(R.drawable.msg_settings_premium); + layout.imageView.setScaleX(.8f); + layout.imageView.setScaleY(.8f); + layout.imageView.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_chats_verifiedBackground, resourcesProvider), PorterDuff.Mode.MULTIPLY)); + Bulletin.UndoButton undoButton = new Bulletin.UndoButton(getContext(), true, resourcesProvider); + undoButton.setUndoAction(undoAction); + layout.setButton(undoButton); + if (fragment != null) { + Bulletin.make(fragment, layout, Bulletin.DURATION_SHORT).show(); + } else { + Bulletin.make(bulletinContainer, layout, Bulletin.DURATION_SHORT).show(); + } + } else { + BulletinFactory factory = fragment != null ? BulletinFactory.of(fragment) : BulletinFactory.of(bulletinContainer, resourcesProvider); + factory.createEmojiBulletin(document, LocaleController.getString("SetAsEmojiStatusInfo", R.string.SetAsEmojiStatusInfo), LocaleController.getString("Undo", R.string.Undo), undoAction).show(); + } + } + + @Override + public void copyEmoji(TLRPC.Document document) { + Spannable spannable = SpannableStringBuilder.valueOf(MessageObject.findAnimatedEmojiEmoticon(document)); + spannable.setSpan(new AnimatedEmojiSpan(document, null), 0, spannable.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + if (AndroidUtilities.addToClipboard(spannable)) { + BulletinFactory factory = fragment != null ? BulletinFactory.of(fragment) : BulletinFactory.of(bulletinContainer, resourcesProvider); + factory.createCopyBulletin(LocaleController.getString("EmojiCopied", R.string.EmojiCopied)).show(); + } + } + + @Override + public boolean needCopy() { + return true; + } + + @Override + public boolean needRemoveFromRecent(TLRPC.Document document) { + return document != null && Emoji.recentEmoji.contains("animated_" + document.id); + } + + @Override + public void removeFromRecent(TLRPC.Document document) { + if (document != null) { + Emoji.removeRecentEmoji("animated_" + document.id); + if (emojiAdapter != null) { + emojiAdapter.notifyDataSetChanged(); + } + } + } + + @Override + public Boolean canSetAsStatus(TLRPC.Document document) { + if (!UserConfig.getInstance(UserConfig.selectedAccount).isPremium()) { + return null; + } + TLRPC.User user = UserConfig.getInstance(UserConfig.selectedAccount).getCurrentUser(); + if (user == null) { + return null; + } + Long emojiStatusId = UserObject.getEmojiStatusDocumentId(user); + return document != null && (emojiStatusId == null || emojiStatusId != document.id); + } + + @Override + public boolean needSend(int contentType) { + if (contentType == ContentPreviewViewer.CONTENT_TYPE_EMOJI) { + return fragment instanceof ChatActivity && ((ChatActivity) fragment).canSendMessage() && (UserConfig.getInstance(UserConfig.selectedAccount).isPremium() || ((ChatActivity) fragment).getCurrentUser() != null && UserObject.isUserSelf(((ChatActivity) fragment).getCurrentUser())); + } return true; } @@ -561,55 +677,81 @@ public void setEnabled(boolean enabled) { private class SearchField extends FrameLayout { - private View searchBackground; - private ImageView searchIconImageView; - private ImageView clearSearchImageView; - private CloseProgressDrawable2 progressDrawable; + private int type; + private ImageView searchImageView; + private SearchStateDrawable searchStateDrawable; private EditTextBoldCursor searchEditText; private View shadowView; private View backgroundView; + private ImageView clear; + private FrameLayout box; private AnimatorSet shadowAnimator; + private StickerCategoriesListView categoriesListView; + private FrameLayout inputBox; + private View inputBoxGradient; + private StickerCategoriesListView.EmojiCategory recent; + private StickerCategoriesListView.EmojiCategory trending; + + @SuppressLint("ClickableViewAccessibility") public SearchField(Context context, int type) { super(context); + this.type = type; shadowView = new View(context); shadowView.setAlpha(0.0f); shadowView.setTag(1); shadowView.setBackgroundColor(getThemedColor(Theme.key_chat_emojiPanelShadowLine)); - addView(shadowView, new FrameLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, AndroidUtilities.getShadowHeight(), Gravity.BOTTOM | Gravity.LEFT)); + addView(shadowView, new LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, AndroidUtilities.getShadowHeight(), Gravity.BOTTOM | Gravity.LEFT)); backgroundView = new View(context); backgroundView.setBackgroundColor(getThemedColor(Theme.key_chat_emojiPanelBackground)); - addView(backgroundView, new FrameLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, searchFieldHeight)); + addView(backgroundView, new LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, searchFieldHeight)); - searchBackground = new View(context); - searchBackground.setBackgroundDrawable(Theme.createRoundRectDrawable(AndroidUtilities.dp(18), getThemedColor(Theme.key_chat_emojiSearchBackground))); - addView(searchBackground, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 36, Gravity.LEFT | Gravity.TOP, 14, 14, 14, 0)); - - searchIconImageView = new ImageView(context); - searchIconImageView.setScaleType(ImageView.ScaleType.CENTER); - searchIconImageView.setImageResource(R.drawable.smiles_inputsearch); - searchIconImageView.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_emojiSearchIcon), PorterDuff.Mode.SRC_IN)); - addView(searchIconImageView, LayoutHelper.createFrame(36, 36, Gravity.LEFT | Gravity.TOP, 16, 14, 0, 0)); - - clearSearchImageView = new ImageView(context); - clearSearchImageView.setScaleType(ImageView.ScaleType.CENTER); - clearSearchImageView.setImageDrawable(progressDrawable = new CloseProgressDrawable2() { - @Override - protected int getCurrentColor() { - return getThemedColor(Theme.key_chat_emojiSearchIcon); + box = new FrameLayout(context); + box.setBackground(Theme.createRoundRectDrawable(dp(18), getThemedColor(Theme.key_chat_emojiSearchBackground))); + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + box.setClipToOutline(true); + box.setOutlineProvider(new ViewOutlineProvider() { + @Override + public void getOutline(View view, Outline outline) { + outline.setRoundRect(0, 0, view.getWidth(), view.getHeight(), (int) dp(18)); + } + }); + } + if (type == 2) { + addView(box, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 36, Gravity.FILL, 10, 8, 10, 8)); + } else { + addView(box, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 36, Gravity.FILL, 10, 6, 10, 8)); + } + + inputBox = new FrameLayout(context); + box.addView(inputBox, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 40, Gravity.LEFT | Gravity.TOP, 38, 0, 0, 0)); + + searchImageView = new ImageView(context); + searchStateDrawable = new SearchStateDrawable(); + searchStateDrawable.setIconState(SearchStateDrawable.State.STATE_SEARCH, false); + searchStateDrawable.setColor(getThemedColor(Theme.key_chat_emojiSearchIcon)); + searchImageView.setScaleType(ImageView.ScaleType.CENTER); + searchImageView.setImageDrawable(searchStateDrawable); + searchImageView.setOnClickListener(e -> { + if (searchStateDrawable.getIconState() == SearchStateDrawable.State.STATE_BACK) { + searchEditText.setText(""); + search(null, false); + if (categoriesListView != null) { + categoriesListView.scrollToStart(); + categoriesListView.selectCategory(null); + categoriesListView.updateCategoriesShown(true, true); + } + toggleClear(false); + if (searchEditText != null) { + searchEditText.clearAnimation(); + searchEditText.animate().translationX(0).setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT).start(); + } + showInputBoxGradient(false); } }); - progressDrawable.setSide(AndroidUtilities.dp(7)); - clearSearchImageView.setScaleX(0.1f); - clearSearchImageView.setScaleY(0.1f); - clearSearchImageView.setAlpha(0.0f); - addView(clearSearchImageView, LayoutHelper.createFrame(36, 36, Gravity.RIGHT | Gravity.TOP, 14, 14, 14, 0)); - clearSearchImageView.setOnClickListener(v -> { - searchEditText.setText(""); - AndroidUtilities.showKeyboard(searchEditText); - }); + box.addView(searchImageView, LayoutHelper.createFrame(36, 36, Gravity.LEFT | Gravity.TOP)); searchEditText = new EditTextBoldCursor(context) { @Override @@ -637,49 +779,207 @@ public boolean onTouchEvent(MotionEvent event) { searchEditText.setLines(1); searchEditText.setSingleLine(true); searchEditText.setImeOptions(EditorInfo.IME_ACTION_SEARCH | EditorInfo.IME_FLAG_NO_EXTRACT_UI); - if (type == 0) { - searchEditText.setHint(LocaleController.getString("SearchStickersHint", R.string.SearchStickersHint)); - } else if (type == 1) { - searchEditText.setHint(LocaleController.getString("SearchEmojiHint", R.string.SearchEmojiHint)); - } else if (type == 2) { - searchEditText.setHint(LocaleController.getString("SearchGifsTitle", R.string.SearchGifsTitle)); - } + searchEditText.setHint(LocaleController.getString("Search", R.string.Search)); searchEditText.setCursorColor(getThemedColor(Theme.key_featuredStickers_addedIcon)); - searchEditText.setCursorSize(AndroidUtilities.dp(20)); + searchEditText.setCursorSize(dp(20)); searchEditText.setCursorWidth(1.5f); - addView(searchEditText, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 40, Gravity.LEFT | Gravity.TOP, 16 + 38, 12, 16 + 30, 0)); + searchEditText.setTranslationY(dp(-2)); + inputBox.addView(searchEditText, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 40, Gravity.LEFT | Gravity.TOP, 0, 0, 28, 0)); searchEditText.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence s, int start, int count, int after) { - } @Override public void onTextChanged(CharSequence s, int start, int before, int count) { - } @Override public void afterTextChanged(Editable s) { - boolean show = searchEditText.length() > 0; - boolean showed = clearSearchImageView.getAlpha() != 0; - if (show != showed) { - clearSearchImageView.animate() - .alpha(show ? 1.0f : 0.0f) - .setDuration(150) - .scaleX(show ? 1.0f : 0.1f) - .scaleY(show ? 1.0f : 0.1f) - .start(); - } - if (type == 0) { - stickersSearchGridAdapter.search(searchEditText.getText().toString()); - } else if (type == 1) { - emojiSearchAdapter.search(searchEditText.getText().toString()); - } else if (type == 2) { - gifSearchAdapter.search(searchEditText.getText().toString()); + updateButton(); + final String query = searchEditText.getText().toString(); + search(query, true); + if (categoriesListView != null) { + categoriesListView.selectCategory(null); + categoriesListView.updateCategoriesShown(TextUtils.isEmpty(query), true); } + toggleClear(!TextUtils.isEmpty(query)); + if (searchEditText != null) { + searchEditText.clearAnimation(); + searchEditText.animate().translationX(0).setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT).start(); + } + showInputBoxGradient(false); + } + }); + + inputBoxGradient = new View(context); + Drawable gradientDrawable = context.getResources().getDrawable(R.drawable.gradient_right).mutate(); + gradientDrawable.setColorFilter(new PorterDuffColorFilter(Theme.blendOver(getThemedColor(Theme.key_chat_emojiPanelBackground), getThemedColor(Theme.key_chat_emojiSearchBackground)), PorterDuff.Mode.MULTIPLY)); + inputBoxGradient.setBackground(gradientDrawable); + inputBoxGradient.setAlpha(0f); + inputBox.addView(inputBoxGradient, LayoutHelper.createFrame(18, LayoutHelper.MATCH_PARENT, Gravity.LEFT)); + + clear = new ImageView(context); + clear.setScaleType(ImageView.ScaleType.CENTER); + clear.setImageDrawable(new CloseProgressDrawable2(1.25f) { + { setSide(AndroidUtilities.dp(7)); } + @Override + protected int getCurrentColor() { + return Theme.getColor(Theme.key_chat_emojiSearchIcon, resourcesProvider); } }); + clear.setBackground(Theme.createSelectorDrawable(Theme.getColor(Theme.key_listSelector, resourcesProvider), Theme.RIPPLE_MASK_CIRCLE_20DP, AndroidUtilities.dp(15))); + clear.setAlpha(0f); + clear.setOnClickListener(e -> { + searchEditText.setText(""); + search(null, false); + if (categoriesListView != null) { + categoriesListView.scrollToStart(); + categoriesListView.selectCategory(null); + categoriesListView.updateCategoriesShown(true, true); + } + toggleClear(false); + if (searchEditText != null) { + searchEditText.clearAnimation(); + searchEditText.animate().translationX(0).setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT).start(); + } + showInputBoxGradient(false); + }); + box.addView(clear, LayoutHelper.createFrame(36, 36, Gravity.RIGHT | Gravity.TOP)); + + if (type != 1 || allowAnimatedEmoji && UserConfig.getInstance(UserConfig.selectedAccount).isPremium()) { + categoriesListView = new StickerCategoriesListView(context, null, StickerCategoriesListView.CategoriesType.DEFAULT, resourcesProvider) { + @Override + public void selectCategory(int categoryIndex) { + super.selectCategory(categoryIndex); + showBottomTab(categoriesListView.getSelectedCategory() == null, true); + if (type == 1 && emojiTabs != null) { + emojiTabs.showSelected(categoriesListView.getSelectedCategory() == null); + } else if (type == 0 && stickersTab != null) { + stickersTab.showSelected(categoriesListView.getSelectedCategory() == null); + } + updateButton(); + } + + @Override + protected boolean isTabIconsAnimationEnabled(boolean loaded) { + return LiteMode.isEnabled(LiteMode.FLAG_ANIMATED_EMOJI_REACTIONS); + } + }; + categoriesListView.setDontOccupyWidth((int) (searchEditText.getPaint().measureText(searchEditText.getHint() + "")) + dp(16)); + categoriesListView.setBackgroundColor(Theme.blendOver(getThemedColor(Theme.key_chat_emojiPanelBackground), getThemedColor(Theme.key_chat_emojiSearchBackground))); + categoriesListView.setOnScrollIntoOccupiedWidth(scrolled -> { + searchEditText.setTranslationX(-Math.max(0, scrolled)); + showInputBoxGradient(scrolled > 0); + updateButton(); + }); + categoriesListView.setOnTouchListener(new OnTouchListener() { + @Override + public boolean onTouch(View v, MotionEvent event) { + if (event.getAction() == MotionEvent.ACTION_DOWN) { + ignorePagerScroll = true; + } else if (event.getAction() == MotionEvent.ACTION_UP || event.getAction() == MotionEvent.ACTION_CANCEL) { + ignorePagerScroll = false; + } + return false; + } + }); + categoriesListView.setOnCategoryClick(category -> { + if (category == recent) { + showInputBoxGradient(false); + categoriesListView.selectCategory(recent); + gifSearchField.searchEditText.setText(""); + gifLayoutManager.scrollToPositionWithOffset(0, 0); + return; + } else if (category == trending) { + showInputBoxGradient(false); + gifSearchField.searchEditText.setText(""); + gifLayoutManager.scrollToPositionWithOffset(gifAdapter.trendingSectionItem, -dp(4)); + categoriesListView.selectCategory(trending); + final ArrayList gifSearchEmojies = MessagesController.getInstance(currentAccount).gifSearchEmojies; + if (!gifSearchEmojies.isEmpty()) { + gifSearchPreloader.preload(gifSearchEmojies.get(0)); + } + return; + } + if (categoriesListView.getSelectedCategory() == category) { + search(null, false); + categoriesListView.selectCategory(null); + } else { + search(category.emojis, false); + categoriesListView.selectCategory(category); + } + }); + box.addView(categoriesListView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 36, Gravity.LEFT | Gravity.TOP, 36, 0, 0, 0)); + } + } + + public boolean isCategorySelected() { + return categoriesListView != null && categoriesListView.getSelectedCategory() != null; + } + + public void search(String text, boolean delay) { + if (type == 0) { + stickersSearchGridAdapter.search(text, delay); + } else if (type == 1) { + emojiSearchAdapter.search(text, delay); + } else if (type == 2) { + gifSearchAdapter.search(text, delay); + } + } + + private boolean inputBoxShown = false; + + private void showInputBoxGradient(boolean show) { + if (show == inputBoxShown || inputBoxGradient == null) { + return; + } + inputBoxShown = show; + inputBoxGradient.clearAnimation(); + inputBoxGradient.animate().alpha(show ? 1 : 0).setDuration(120).setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT).start(); + } + + public boolean isInProgress() { + return isprogress; + } + + private Runnable delayedToggle; + private void toggleClear(boolean enabled) { + if (enabled) { + if (delayedToggle == null) { + AndroidUtilities.runOnUIThread(delayedToggle = () -> { + AndroidUtilities.updateViewShow(clear, true); + }, 340); + } + } else { + if (delayedToggle != null) { + AndroidUtilities.cancelRunOnUIThread(delayedToggle); + delayedToggle = null; + } + AndroidUtilities.updateViewShow(clear, false); + } + } + + private boolean isprogress; + public void showProgress(boolean progress) { + isprogress = progress; + if (progress) { + searchStateDrawable.setIconState(SearchStateDrawable.State.STATE_PROGRESS); + } else { + updateButton(true); + } + } + + private void updateButton() { + updateButton(false); + } + + private void updateButton(boolean force) { + if (!isInProgress() || searchEditText.length() == 0 && (categoriesListView == null || categoriesListView.getSelectedCategory() == null) || force) { + boolean backButton = searchEditText.length() > 0 || categoriesListView != null && categoriesListView.isCategoriesShown() && (categoriesListView.isScrolledIntoOccupiedWidth() || categoriesListView.getSelectedCategory() != null); + searchStateDrawable.setIconState(backButton ? SearchStateDrawable.State.STATE_BACK : SearchStateDrawable.State.STATE_SEARCH); + isprogress = false; + } } public void hideKeyboard() { @@ -934,65 +1234,48 @@ public boolean onTouchEvent(MotionEvent ev) { } } - private boolean premiumBulletin = true; - private class ImageViewEmoji extends ImageView { - public int position; - - public ImageReceiver imageReceiver; - public AnimatedEmojiDrawable drawable; - public boolean ignoring; - private boolean isRecent; - private AnimatedEmojiSpan span; - private EmojiPack pack; - private ImageReceiver.BackgroundThreadDrawHolder[] backgroundThreadDrawHolder = new ImageReceiver.BackgroundThreadDrawHolder[DrawingInBackgroundThreadDrawable.THREAD_COUNT]; - float pressedProgress; - ValueAnimator backAnimator; - - public ImageViewEmoji(Context context) { - super(context); - setScaleType(ImageView.ScaleType.CENTER); - setBackground(Theme.createRadSelectorDrawable(getThemedColor(Theme.key_listSelector), AndroidUtilities.dp(2), AndroidUtilities.dp(2))); + private void sendEmoji(ImageViewEmoji imageViewEmoji, String override) { + if (imageViewEmoji == null) { + return; } - - private void sendEmoji(String override) { - if (getSpan() != null) { + if (imageViewEmoji.getSpan() != null) { // if (pack != null && pack.set != null && (pack.free || UserConfig.getInstance(currentAccount).isPremium())) { // openEmojiPackAlert(pack.set); // return; // } - if (delegate != null) { - long documentId = getSpan().documentId; - TLRPC.Document document = getSpan().document; - String emoticon = null; - if (document == null) { - for (int i = 0; i < emojipacksProcessed.size(); ++i) { - EmojiPack pack = emojipacksProcessed.get(i); - for (int j = 0; pack.documents != null && j < pack.documents.size(); ++j) { - if (pack.documents.get(j).id == documentId) { - document = pack.documents.get(j); - break; - } + if (delegate != null) { + long documentId = imageViewEmoji.getSpan().documentId; + TLRPC.Document document = imageViewEmoji.getSpan().document; + String emoticon = null; + if (document == null) { + for (int i = 0; i < emojipacksProcessed.size(); ++i) { + EmojiPack pack = emojipacksProcessed.get(i); + for (int j = 0; pack.documents != null && j < pack.documents.size(); ++j) { + if (pack.documents.get(j).id == documentId) { + document = pack.documents.get(j); + break; } } } - if (document == null) { - document = AnimatedEmojiDrawable.findDocument(currentAccount, documentId); - } - if (emoticon == null && document != null) { - emoticon = MessageObject.findAnimatedEmojiEmoticon(document); - } - if (!MessageObject.isFreeEmoji(document) && !UserConfig.getInstance(currentAccount).isPremium() && !(delegate != null && delegate.isUserSelf()) && !allowEmojisForNonPremium) { - showBottomTab(false, true); - BulletinFactory factory = fragment != null ? BulletinFactory.of(fragment) : BulletinFactory.of(bulletinContainer, resourcesProvider); - if (premiumBulletin || fragment == null) { - factory.createEmojiBulletin( + } + if (document == null) { + document = AnimatedEmojiDrawable.findDocument(currentAccount, documentId); + } + if (emoticon == null && document != null) { + emoticon = MessageObject.findAnimatedEmojiEmoticon(document); + } + if (!MessageObject.isFreeEmoji(document) && !UserConfig.getInstance(currentAccount).isPremium() && !(delegate != null && delegate.isUserSelf()) && !allowEmojisForNonPremium) { + showBottomTab(false, true); + BulletinFactory factory = fragment != null ? BulletinFactory.of(fragment) : BulletinFactory.of(bulletinContainer, resourcesProvider); + if (premiumBulletin || fragment == null) { + factory.createEmojiBulletin( document, AndroidUtilities.replaceTags(LocaleController.getString("UnlockPremiumEmojiHint", R.string.UnlockPremiumEmojiHint)), LocaleController.getString("PremiumMore", R.string.PremiumMore), EmojiView.this::openPremiumAnimatedEmojiFeature - ).show(); - } else { - factory.createSimpleBulletin( + ).show(); + } else { + factory.createSimpleBulletin( R.raw.saved_messages, AndroidUtilities.replaceTags(LocaleController.getString("UnlockPremiumEmojiHint2", R.string.UnlockPremiumEmojiHint2)), LocaleController.getString("Open", R.string.Open), @@ -1014,39 +1297,59 @@ public void onTransitionAnimationEnd(boolean isOpen, boolean backward) { } }); } - ).show(); - } - premiumBulletin = !premiumBulletin; - return; + ).show(); } - shownBottomTabAfterClick = SystemClock.elapsedRealtime(); - showBottomTab(true, true); - addEmojiToRecent("animated_" + documentId); - delegate.onCustomEmojiSelected(documentId, document, emoticon, isRecent); + premiumBulletin = !premiumBulletin; + return; } - return; + shownBottomTabAfterClick = SystemClock.elapsedRealtime(); + showBottomTab(true, true); + addEmojiToRecent("animated_" + documentId); + delegate.onCustomEmojiSelected(documentId, document, emoticon, imageViewEmoji.isRecent); } - shownBottomTabAfterClick = SystemClock.elapsedRealtime(); - showBottomTab(true, true); - String code = override != null ? override : (String) getTag(); - SpannableStringBuilder builder = new SpannableStringBuilder(); - builder.append(code); - if (override == null) { - if (!isRecent) { - String color = Emoji.emojiColor.get(code); - if (color != null) { - code = addColorToCode(code, color); - } - } - addEmojiToRecent(code); - if (delegate != null) { - delegate.onEmojiSelected(Emoji.fixEmoji(code)); - } - } else { - if (delegate != null) { - delegate.onEmojiSelected(Emoji.fixEmoji(override)); + return; + } + shownBottomTabAfterClick = SystemClock.elapsedRealtime(); + showBottomTab(true, true); + String code = override != null ? override : (String) imageViewEmoji.getTag(); + SpannableStringBuilder builder = new SpannableStringBuilder(); + builder.append(code); + if (override == null) { + if (!imageViewEmoji.isRecent) { + String color = Emoji.emojiColor.get(code); + if (color != null) { + code = addColorToCode(code, color); } } + addEmojiToRecent(code); + if (delegate != null) { + delegate.onEmojiSelected(Emoji.fixEmoji(code)); + } + } else { + if (delegate != null) { + delegate.onEmojiSelected(Emoji.fixEmoji(override)); + } + } + } + + private boolean premiumBulletin = true; + public static class ImageViewEmoji extends ImageView { + public int position; + + public ImageReceiver imageReceiver; + public AnimatedEmojiDrawable drawable; + public boolean ignoring; + private boolean isRecent; + private AnimatedEmojiSpan span; + private EmojiPack pack; + private ImageReceiver.BackgroundThreadDrawHolder[] backgroundThreadDrawHolder = new ImageReceiver.BackgroundThreadDrawHolder[DrawingInBackgroundThreadDrawable.THREAD_COUNT]; + float pressedProgress; + ValueAnimator backAnimator; + + public ImageViewEmoji(Context context) { + super(context); + setScaleType(ImageView.ScaleType.CENTER); + setBackground(Theme.createRadSelectorDrawable(Theme.getColor(Theme.key_listSelector), AndroidUtilities.dp(2), AndroidUtilities.dp(2))); } public void setImageDrawable(Drawable drawable, boolean recent) { @@ -1339,7 +1642,7 @@ public EmojiView(BaseFragment fragment, boolean needAnimatedEmoji, boolean needS int color = getThemedColor(Theme.key_chat_emojiBottomPanelIcon); color = Color.argb(30, Color.red(color), Color.green(color), Color.blue(color)); - searchFieldHeight = AndroidUtilities.dp(64); + searchFieldHeight = AndroidUtilities.dp(50); needEmojiSearch = needSearch; tabIcons = new Drawable[]{ @@ -1409,6 +1712,7 @@ public void getOutline(View view, Outline outline) { emojiItemAnimator.setChangeDuration(160); emojiItemAnimator.setMoveInterpolator(CubicBezierInterpolator.EASE_OUT); emojiGridView.setItemAnimator(emojiItemAnimator); + emojiGridView.setOnTouchListener((v, event) -> ContentPreviewViewer.getInstance().onTouch(event, emojiGridView, EmojiView.this.getMeasuredHeight(), null, contentPreviewViewerDelegate, resourcesProvider)); emojiGridView.setOnItemLongClickListener(new RecyclerListView.OnItemLongClickListener() { @Override public boolean onItemClick(View view, int position) { @@ -1551,6 +1855,7 @@ public int getSpanSize(int position) { needEmojiSearch && position == 0 || position == emojiAdapter.trendingRow || position == emojiAdapter.trendingHeaderRow || + position == emojiAdapter.recentlyUsedHeaderRow || emojiAdapter.positionToSection.indexOfKey(position) >= 0 || emojiAdapter.positionToUnlock.indexOfKey(position) >= 0 ) { @@ -1630,11 +1935,22 @@ public void setTranslationY(float translationY) { } } + @Override + protected boolean doIncludeFeatured() { + return !(featuredEmojiSets.size() > 0 && featuredEmojiSets.get(0).set != null && MessagesController.getEmojiSettings(currentAccount).getLong("emoji_featured_hidden", 0) != featuredEmojiSets.get(0).set.id && UserConfig.getInstance(UserConfig.selectedAccount).isPremium()); + } + @Override protected boolean onTabClick(int index) { if (emojiSmoothScrolling) { return false; } + if (emojiSearchAdapter != null) { + emojiSearchAdapter.search(null); + } + if (emojiSearchField != null && emojiSearchField.categoriesListView != null) { + emojiSearchField.categoriesListView.selectCategory(null); + } Integer position = null; int offset = 0; if (index == 0) { @@ -1739,7 +2055,7 @@ protected void onMeasure(int widthSpec, int heightSpec) { protected void onLayout(boolean changed, int l, int t, int r, int b) { if (firstGifAttach && gifAdapter.getItemCount() > 1) { ignoreLayout = true; - gifLayoutManager.scrollToPositionWithOffset(1, 0); + gifLayoutManager.scrollToPositionWithOffset(0, 0); gifSearchField.setVisibility(VISIBLE); gifTabs.onPageScrolled(0, 0); firstGifAttach = false; @@ -1769,17 +2085,17 @@ public void getItemOffsets(android.graphics.Rect outRect, View view, RecyclerVie return; } - if (position != 0) { + if (position != 0 || !gifAdapter.addSearch) { outRect.left = 0; outRect.bottom = 0; outRect.top = AndroidUtilities.dp(2); - outRect.right = gifLayoutManager.isLastInRow(position - 1) ? 0 : AndroidUtilities.dp(2); + outRect.right = gifLayoutManager.isLastInRow(position - (gifAdapter.addSearch ? 1 : 0)) ? 0 : AndroidUtilities.dp(2); } else { outRect.set(0, 0, 0, 0); } } }); - gifGridView.setPadding(0, AndroidUtilities.dp(36 + 4), 0, AndroidUtilities.dp(44)); + gifGridView.setPadding(0, searchFieldHeight, 0, AndroidUtilities.dp(44)); gifGridView.setOverScrollMode(RecyclerListView.OVER_SCROLL_NEVER); ((SimpleItemAnimator) gifGridView.getItemAnimator()).setSupportsChangeAnimations(false); gifGridView.setAdapter(gifAdapter = new GifAdapter(context, true)); @@ -1790,7 +2106,9 @@ public void getItemOffsets(android.graphics.Rect outRect, View view, RecyclerVie if (delegate == null) { return; } - position--; + if (gifAdapter.addSearch) { + position--; + } if (gifGridView.getAdapter() == gifAdapter) { if (position < 0) { return; @@ -1820,7 +2138,7 @@ public void getItemOffsets(android.graphics.Rect outRect, View view, RecyclerVie gifContainer.addView(gifGridView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); gifSearchField = new SearchField(context, 2); - gifSearchField.setVisibility(INVISIBLE); +// gifSearchField.setVisibility(INVISIBLE); gifContainer.addView(gifSearchField, new FrameLayout.LayoutParams(LayoutHelper.MATCH_PARENT, searchFieldHeight + AndroidUtilities.getShadowHeight())); gifTabs = new DraggableScrollSlidingTabStrip(context, resourcesProvider); @@ -1829,7 +2147,7 @@ public void getItemOffsets(android.graphics.Rect outRect, View view, RecyclerVie gifTabs.setIndicatorColor(getThemedColor(Theme.key_chat_emojiPanelStickerPackSelectorLine)); gifTabs.setUnderlineColor(getThemedColor(Theme.key_chat_emojiPanelShadowLine)); gifTabs.setBackgroundColor(getThemedColor(Theme.key_chat_emojiPanelBackground)); - gifContainer.addView(gifTabs, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, StickerTabView.SMALL_HEIGHT, Gravity.LEFT | Gravity.TOP)); +// gifContainer.addView(gifTabs, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, StickerTabView.SMALL_HEIGHT, Gravity.LEFT | Gravity.TOP)); updateGifTabs(); gifTabs.setDelegate(page -> { @@ -1911,7 +2229,7 @@ public void setVisibility(int visibility) { protected void onLayout(boolean changed, int l, int t, int r, int b) { if (firstStickersAttach && stickersGridAdapter.getItemCount() > 0) { ignoreLayout = true; - stickersLayoutManager.scrollToPositionWithOffset(1, 0); + stickersLayoutManager.scrollToPositionWithOffset(0, 0); firstStickersAttach = false; ignoreLayout = false; } @@ -1926,8 +2244,15 @@ public void requestLayout() { } super.requestLayout(); } - }; + @Override + public void onScrolled(int dx, int dy) { + super.onScrolled(dx, dy); + if (stickersTabContainer != null) { + stickersTab.setUnderlineHeight(stickersGridView.canScrollVertically(-1) ? AndroidUtilities.getShadowHeight() : 0); + } + } + }; stickersGridView.setLayoutManager(stickersLayoutManager = new GridLayoutManager(context, 5) { @Override public void smoothScrollToPosition(RecyclerView recyclerView, RecyclerView.State state, int position) { @@ -1979,7 +2304,7 @@ public int getSpanSize(int position) { } } }); - stickersGridView.setPadding(0, AndroidUtilities.dp(4 + 36), 0, AndroidUtilities.dp(44)); + stickersGridView.setPadding(0, AndroidUtilities.dp(36), 0, AndroidUtilities.dp(44)); stickersGridView.setClipToPadding(false); Tab stickersTabHolder = new Tab(); @@ -2050,12 +2375,33 @@ protected void stickerSetPositionChanged(int fromPosition, int toPosition) { } return 0; }); + if (frozenStickerSets != null) { + frozenStickerSets.clear(); + frozenStickerSets.addAll(stickerSets); + } reloadStickersAdapter(); AndroidUtilities.cancelRunOnUIThread(checkExpandStickerTabsRunnable); AndroidUtilities.runOnUIThread(checkExpandStickerTabsRunnable, 1500); sendReorder(); updateStickerTabs(true); + + if (SharedConfig.updateStickersOrderOnSend) { + SharedConfig.toggleUpdateStickersOrderOnSend(); + if (fragment != null) { + BulletinFactory.of(fragment).createSimpleBulletin( + R.raw.filter_reorder, + LocaleController.getString("DynamicPackOrderOff", R.string.DynamicPackOrderOff), + LocaleController.getString("DynamicPackOrderOffInfo", R.string.DynamicPackOrderOffInfo), + LocaleController.getString("Settings"), + () -> fragment.presentFragment(new StickersActivity(MediaDataController.TYPE_IMAGE, null)) + ).show(); + } else if (bulletinContainer != null) { + BulletinFactory.of(bulletinContainer, EmojiView.this.resourcesProvider).createSimpleBulletin(R.raw.filter_reorder, LocaleController.getString("DynamicPackOrderOff", R.string.DynamicPackOrderOff), LocaleController.getString("DynamicPackOrderOffInfo", R.string.DynamicPackOrderOffInfo)).show(); + } else { + return; + } + } } private void swapListElements(List list, int index1, int index2) { @@ -2083,7 +2429,7 @@ protected void invalidateOverlays() { stickersTab.setDragEnabled(true); stickersTab.setWillNotDraw(false); stickersTab.setType(ScrollSlidingTabStrip.Type.TAB); - stickersTab.setUnderlineHeight(AndroidUtilities.getShadowHeight()); + stickersTab.setUnderlineHeight(stickersGridView.canScrollVertically(-1) ? AndroidUtilities.getShadowHeight() : 0); stickersTab.setIndicatorColor(getThemedColor(Theme.key_chat_emojiPanelStickerPackSelectorLine)); stickersTab.setUnderlineColor(getThemedColor(Theme.key_chat_emojiPanelShadowLine)); @@ -2128,7 +2474,14 @@ protected void onLayout(boolean changed, int left, int top, int right, int botto if (page == trendingTabNum) { openTrendingStickers(null); return; - } else if (page == recentTabNum) { + } + + if (stickersSearchField != null && stickersSearchField.isCategorySelected()) { + stickersSearchField.search(null, false); + stickersSearchField.categoriesListView.selectCategory(null); + } + + if (page == recentTabNum) { stickersGridView.stopScroll(); scrollStickersToPosition(stickersGridAdapter.getPositionForPack("recent"), 0); resetTabsY(Type.STICKERS); @@ -2182,6 +2535,9 @@ protected void onLayout(boolean changed, int left, int top, int right, int botto pager = new ViewPager(context) { @Override public boolean onInterceptTouchEvent(MotionEvent ev) { + if (ignorePagerScroll) { + return false; + } if (getParent() != null) { getParent().requestDisallowInterceptTouchEvent(canScrollHorizontally(-1)); } @@ -2202,12 +2558,12 @@ public void setCurrentItem(int item, boolean smoothScroll) { animator.setDuration(150); animator.setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); animator.start(); - scrollEmojisToPosition(0, 0); + scrollEmojisToPosition(1, 0); if (emojiTabs != null) { emojiTabs.select(0); } } else if (item == 1) { - gifGridView.smoothScrollToPosition(1); + gifGridView.smoothScrollToPosition(0); } else { stickersGridView.smoothScrollToPosition(1); } @@ -2257,15 +2613,8 @@ public void onClick(View v) { addView(bulletinContainer, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 100, Gravity.BOTTOM | Gravity.FILL_HORIZONTAL, 0, 0, 0, 0)); } - bottomTabContainer = new FrameLayout(context) { - @Override - public boolean onInterceptTouchEvent(MotionEvent ev) { - if (getParent() != null) { - getParent().requestDisallowInterceptTouchEvent(true); - } - return super.onInterceptTouchEvent(ev); - } - }; + bottomTabContainer = new FrameLayout(context); + bottomTabContainer.setClickable(true); shadowLine = new View(context); shadowLine.setBackgroundColor(getThemedColor(Theme.key_chat_emojiPanelShadowLine)); @@ -2303,9 +2652,10 @@ public void onClick(View v) { typeTabs = new PagerSlidingTabStrip(context, resourcesProvider); typeTabs.setViewPager(pager); typeTabs.setShouldExpand(false); - typeTabs.setIndicatorHeight(0); + typeTabs.setIndicatorHeight(AndroidUtilities.dp(3)); + typeTabs.setIndicatorColor(getThemedColor(Theme.key_chat_emojiPanelIconSelected)); typeTabs.setUnderlineHeight(0); - typeTabs.setTabPaddingLeftRight(AndroidUtilities.dp(10)); + typeTabs.setTabPaddingLeftRight(AndroidUtilities.dp(13)); bottomTabContainer.addView(typeTabs, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, 40, Gravity.CENTER_HORIZONTAL | Gravity.BOTTOM)); typeTabs.setOnPageChangeListener(new ViewPager.OnPageChangeListener() { @Override @@ -2376,6 +2726,7 @@ public void onPageScrollStateChanged(int state) { searchButton.setScaleType(ImageView.ScaleType.CENTER); searchButton.setContentDescription(LocaleController.getString("Search", R.string.Search)); searchButton.setFocusable(true); + searchButton.setVisibility(View.GONE); if (Build.VERSION.SDK_INT >= 21) { searchButton.setBackground(Theme.createSelectorDrawable(color, Theme.RIPPLE_MASK_CIRCLE_20DP, AndroidUtilities.dp(18))); } @@ -2439,9 +2790,9 @@ public void getOutline(View view, Outline outline) { addView(pager, 0, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.LEFT | Gravity.TOP)); mediaBanTooltip = new CorrectlyMeasuringTextView(context); - mediaBanTooltip.setBackgroundDrawable(Theme.createRoundRectDrawable(AndroidUtilities.dp(3), getThemedColor(Theme.key_chat_gifSaveHintBackground))); + mediaBanTooltip.setBackground(Theme.createRoundRectDrawable(AndroidUtilities.dp(6), getThemedColor(Theme.key_chat_gifSaveHintBackground))); mediaBanTooltip.setTextColor(getThemedColor(Theme.key_chat_gifSaveHintText)); - mediaBanTooltip.setPadding(AndroidUtilities.dp(8), AndroidUtilities.dp(7), AndroidUtilities.dp(8), AndroidUtilities.dp(7)); + mediaBanTooltip.setPadding(AndroidUtilities.dp(12), AndroidUtilities.dp(7), AndroidUtilities.dp(12), AndroidUtilities.dp(7)); mediaBanTooltip.setGravity(Gravity.CENTER_VERTICAL); mediaBanTooltip.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); mediaBanTooltip.setVisibility(INVISIBLE); @@ -2479,6 +2830,12 @@ public EmojiGridView(Context context) { super(context); } + @Override + public boolean onInterceptTouchEvent(MotionEvent event) { + boolean result = ContentPreviewViewer.getInstance().onInterceptTouchEvent(event, this, 0, contentPreviewViewerDelegate, resourcesProvider); + return super.onInterceptTouchEvent(event) || result; + } + private boolean ignoreLayout; SparseArray> viewsGroupedByLines = new SparseArray<>(); @@ -2515,7 +2872,7 @@ protected void onMeasure(int widthSpec, int heightSpec) { protected void onLayout(boolean changed, int l, int t, int r, int b) { if (needEmojiSearch && firstEmojiAttach) { ignoreLayout = true; - emojiLayoutManager.scrollToPositionWithOffset(1, 0); + emojiLayoutManager.scrollToPositionWithOffset(0, 0); firstEmojiAttach = false; ignoreLayout = false; } @@ -2566,7 +2923,7 @@ public boolean onTouchEvent(MotionEvent event) { Emoji.emojiColor.remove(code); } emojiTouchedView.setImageDrawable(Emoji.getEmojiBigDrawable(code), emojiTouchedView.isRecent); - emojiTouchedView.sendEmoji(null); + sendEmoji(emojiTouchedView, null); try { performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_VIEW_SETTING); } catch (Exception ignore) {} @@ -2578,11 +2935,10 @@ public boolean onTouchEvent(MotionEvent event) { .replace("\uD83C\uDFFD", "") .replace("\uD83C\uDFFE", "") .replace("\uD83C\uDFFF", ""); - if (color != null) { - emojiTouchedView.sendEmoji(addColorToCode(code, color)); + sendEmoji(emojiTouchedView, addColorToCode(code, color)); } else { - emojiTouchedView.sendEmoji(code); + sendEmoji(emojiTouchedView, code); } } } @@ -2692,7 +3048,6 @@ protected void dispatchDraw(Canvas canvas) { } } } - } } @@ -2767,7 +3122,7 @@ protected void onDetachedFromWindow() { lineDrawables.clear(); } - private HashMap touches; + private SparseArray touches; class TouchDownInfo { float x, y; long time; @@ -2775,9 +3130,11 @@ class TouchDownInfo { } public void clearTouchesFor(View view) { if (touches != null) { - for (Map.Entry e : touches.entrySet()) { - if (e != null && e.getValue().view == view) { - TouchDownInfo touch = touches.remove(e.getKey()); + for (int i = 0; i < touches.size(); i++) { + TouchDownInfo touch = touches.valueAt(i); + if (touch.view == view) { + touches.removeAt(i); + i--; if (touch != null) { if (touch.view != null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP && touch.view.getBackground() instanceof RippleDrawable) { touch.view.getBackground().setState(new int[]{}); @@ -2790,6 +3147,23 @@ public void clearTouchesFor(View view) { } } } + public void clearAllTouches() { + if (touches != null) { + for (int i = 0; i < touches.size(); i++) { + TouchDownInfo touch = touches.valueAt(i); + touches.removeAt(i); + i--; + if (touch != null) { + if (touch.view != null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP && touch.view.getBackground() instanceof RippleDrawable) { + touch.view.getBackground().setState(new int[]{}); + } + if (touch.view != null) { + touch.view.setPressed(false); + } + } + } + } + } public long animateExpandDuration() { return animateExpandAppearDuration() + animateExpandCrossfadeDuration() + 150; @@ -2816,7 +3190,7 @@ public void draw(Canvas canvas, long time, int w, int h, float alpha) { if (imageViewEmojis == null) { return; } - boolean drawInUi = imageViewEmojis.size() <= 4 || SharedConfig.getDevicePerformanceClass() == SharedConfig.PERFORMANCE_CLASS_LOW || SharedConfig.getLiteMode().enabled(); + boolean drawInUi = imageViewEmojis.size() <= 4 || SharedConfig.getDevicePerformanceClass() == SharedConfig.PERFORMANCE_CLASS_LOW || !LiteMode.isEnabled(LiteMode.FLAG_ANIMATED_EMOJI_KEYBOARD); if (!drawInUi) { boolean animatedExpandIn = animateExpandStartTime > 0 && (SystemClock.elapsedRealtime() - animateExpandStartTime) < animateExpandDuration(); for (int i = 0; i < imageViewEmojis.size(); i++) { @@ -2953,7 +3327,7 @@ public boolean dispatchTouchEvent(MotionEvent ev) { int index = ev.getActionIndex(); int id = ev.getPointerId(index); if (touches == null) { - touches = new HashMap<>(); + touches = new SparseArray<>(); } float x = ev.getX(index), y = ev.getY(index); @@ -2975,7 +3349,8 @@ public boolean dispatchTouchEvent(MotionEvent ev) { stopScroll(); } } else { - touch = touches.remove(id); + touch = touches.get(id); + touches.remove(id); if ( touchChild != null && touch != null && Math.sqrt(Math.pow(x - touch.x, 2) + Math.pow(y - touch.y, 2)) < AndroidUtilities.touchSlop * 3 && @@ -2984,15 +3359,16 @@ public boolean dispatchTouchEvent(MotionEvent ev) { (!pickerViewPopup.isShowing() || SystemClock.elapsedRealtime() - touch.time < ViewConfiguration.getLongPressTimeout()) ) { View view = touch.view; + int position = getChildAdapterPosition(touch.view); if (view instanceof ImageViewEmoji) { ImageViewEmoji viewEmoji = (ImageViewEmoji) view; - viewEmoji.sendEmoji(null); + sendEmoji(viewEmoji, null); try { performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_VIEW_SETTING); } catch (Exception ignore) {} } else if (view instanceof EmojiPackExpand) { EmojiPackExpand button = (EmojiPackExpand) view; - emojiAdapter.expand(getChildAdapterPosition(button), button); + emojiAdapter.expand(position, button); try { performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_VIEW_SETTING); } catch (Exception ignore) {} @@ -3008,7 +3384,7 @@ public boolean dispatchTouchEvent(MotionEvent ev) { } } } - return super.dispatchTouchEvent(ev) || !cancel && !touches.isEmpty(); + return super.dispatchTouchEvent(ev) || !cancel && touches.size() > 0; } private Path lockPath; @@ -3166,7 +3542,7 @@ private void updateEmojiTabsPosition() { private void updateEmojiTabsPosition(int position) { if (!emojiSmoothScrolling && position != RecyclerView.NO_POSITION) { int tab = 0; - int count = getRecentEmoji().size() + (needEmojiSearch ? 1 : 0); + int count = getRecentEmoji().size() + (needEmojiSearch ? 1 : 0) + (emojiAdapter.trendingHeaderRow >= 0 ? 3 : 0); if (position >= count) { tab = -1; for (int a = 0; a < EmojiData.dataColored.length; a++) { @@ -3775,8 +4151,6 @@ public void getItemOffsets(@NonNull Rect outRect, @NonNull View view, @NonNull R // if (parent.getChildAdapterPosition(view) == emojiAdapter.firstTrendingRow) { // outRect.top = AndroidUtilities.dp(32); // } - } else { - outRect.bottom = AndroidUtilities.dp(6); } } else if (view instanceof BackupImageView) { outRect.bottom = AndroidUtilities.dp(12); @@ -3866,22 +4240,21 @@ public void setTranslationY(float translationY) { updateBottomTabContainerPosition(); } private void updateBottomTabContainerPosition() { - if (bottomTabContainer.getTag() == null && (delegate == null || !delegate.isSearchOpened()) && (pager == null || pager.getCurrentItem() != 0)) { - View parent = (View) getParent(); - if (parent != null) { - float y = getY() - parent.getHeight(); - if (getLayoutParams().height > 0) { - y += getLayoutParams().height; - } else { - y += getMeasuredHeight(); - } - if (bottomTabContainer.getTop() - y < 0) { - y = bottomTabContainer.getTop(); - } - bottomTabContainer.setTranslationY(-y); - if (needEmojiSearch) { - bulletinContainer.setTranslationY(-y); - } + View parent = (View) getParent(); + if (parent != null) { + float y = getY() - parent.getHeight(); + if (getLayoutParams().height > 0) { + y += getLayoutParams().height; + } else { + y += getMeasuredHeight(); + } + if (bottomTabContainer.getTop() - y < 0) { + y = 0; + } + bottomTabMainTranslation = -y; + bottomTabContainer.setTranslationY(bottomTabMainTranslation + bottomTabAdditionalTranslation); + if (needEmojiSearch) { + bulletinContainer.setTranslationY(bottomTabMainTranslation + bottomTabAdditionalTranslation); } } } @@ -4018,7 +4391,7 @@ public void showSearchField(boolean show) { } } else { if (position == 0) { - layoutManager.scrollToPositionWithOffset(1, 0); + layoutManager.scrollToPositionWithOffset(0, 0); } } } @@ -4079,7 +4452,7 @@ private void openSearch(SearchField searchField) { ObjectAnimator.ofFloat(currentField, View.TRANSLATION_Y, AndroidUtilities.dp(0))); } else { searchAnimation.playTogether( - ObjectAnimator.ofFloat(gridView, View.TRANSLATION_Y, -AndroidUtilities.dp(36)), + ObjectAnimator.ofFloat(gridView, View.TRANSLATION_Y, a == 2 ? 0 : -AndroidUtilities.dp(36)), ObjectAnimator.ofFloat(currentField, View.TRANSLATION_Y, AndroidUtilities.dp(0))); } searchAnimation.setDuration(220); @@ -4090,9 +4463,11 @@ public void onAnimationEnd(Animator animation) { if (animation.equals(searchAnimation)) { gridView.setTranslationY(0); if (gridView == stickersGridView) { - gridView.setPadding(0, AndroidUtilities.dp(4), 0, 0); - } else if (gridView == emojiGridView || gridView == gifGridView) { gridView.setPadding(0, 0, 0, 0); + } else if (gridView == emojiGridView) { + gridView.setPadding(AndroidUtilities.dp(5), 0, AndroidUtilities.dp(5), 0); + } else if (gridView == gifGridView) { + gridView.setPadding(0, searchFieldHeight, 0, 0); } searchAnimation = null; } @@ -4113,8 +4488,10 @@ public void onAnimationCancel(Animator animation) { } if (gridView == stickersGridView) { gridView.setPadding(0, AndroidUtilities.dp(4), 0, 0); - } else if (gridView == emojiGridView || gridView == gifGridView) { - gridView.setPadding(0, 0, 0, 0); + } else if (gridView == emojiGridView) { + gridView.setPadding(AndroidUtilities.dp(5), 0, AndroidUtilities.dp(5), 0); + } else if (gridView == gifGridView) { + gridView.setPadding(0, searchFieldHeight, 0, 0); } if (gridView == gifGridView) { if (gifSearchAdapter.showTrendingWhenSearchEmpty = gifAdapter.results.size() > 0) { @@ -4127,6 +4504,7 @@ public void onAnimationCancel(Animator animation) { layoutManager.scrollToPositionWithOffset(0, 0); } } + showBottomTab(false, true); } private void showEmojiShadow(boolean show, boolean animated) { @@ -4260,18 +4638,23 @@ public void closeSearch(boolean animated, long scrollToSet) { } currentField.searchEditText.setText(""); + if (currentField.categoriesListView != null) { + currentField.categoriesListView.selectCategory(null); + currentField.categoriesListView.scrollToStart(); + } if (a == currentItem && animated) { searchAnimation = new AnimatorSet(); - if (tabStrip != null && a != 2) { + if (tabStrip != null && a != 1) { searchAnimation.playTogether( - ObjectAnimator.ofFloat(tabStrip, View.TRANSLATION_Y, 0), - ObjectAnimator.ofFloat(gridView, View.TRANSLATION_Y, AndroidUtilities.dp(36) - searchFieldHeight), - ObjectAnimator.ofFloat(currentField, View.TRANSLATION_Y, AndroidUtilities.dp(36) - searchFieldHeight)); + ObjectAnimator.ofFloat(tabStrip, View.TRANSLATION_Y, 0), + ObjectAnimator.ofFloat(gridView, View.TRANSLATION_Y, AndroidUtilities.dp(36)), + ObjectAnimator.ofFloat(currentField, View.TRANSLATION_Y, AndroidUtilities.dp(36)) + ); } else { searchAnimation.playTogether( - ObjectAnimator.ofFloat(gridView, View.TRANSLATION_Y, AndroidUtilities.dp(36) - searchFieldHeight), - ObjectAnimator.ofFloat(currentField, View.TRANSLATION_Y, -searchFieldHeight)); + ObjectAnimator.ofFloat(gridView, View.TRANSLATION_Y, AndroidUtilities.dp(36) - searchFieldHeight) + ); } searchAnimation.setDuration(200); searchAnimation.setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); @@ -4280,21 +4663,16 @@ public void closeSearch(boolean animated, long scrollToSet) { public void onAnimationEnd(Animator animation) { if (animation.equals(searchAnimation)) { int firstVisPos = layoutManager.findFirstVisibleItemPosition(); - int top = 0; - if (firstVisPos != RecyclerView.NO_POSITION) { - View firstVisView = layoutManager.findViewByPosition(firstVisPos); - top = (int) (firstVisView.getTop() + gridView.getTranslationY()); - } gridView.setTranslationY(0); if (gridView == stickersGridView) { - gridView.setPadding(0, AndroidUtilities.dp(36 + 4), 0, AndroidUtilities.dp(44)); + gridView.setPadding(0, AndroidUtilities.dp(36), 0, AndroidUtilities.dp(44)); } else if (gridView == gifGridView) { - gridView.setPadding(0, AndroidUtilities.dp(36 + 4), 0, AndroidUtilities.dp(44)); + gridView.setPadding(0, searchFieldHeight, 0, AndroidUtilities.dp(44)); } else if (gridView == emojiGridView) { - gridView.setPadding(0, AndroidUtilities.dp(36), 0, AndroidUtilities.dp(44)); + gridView.setPadding(AndroidUtilities.dp(5), AndroidUtilities.dp(36), AndroidUtilities.dp(5), AndroidUtilities.dp(44)); } if (firstVisPos != RecyclerView.NO_POSITION) { - layoutManager.scrollToPositionWithOffset(firstVisPos, top - gridView.getPaddingTop()); + layoutManager.scrollToPositionWithOffset(firstVisPos, 0); } searchAnimation = null; } @@ -4309,24 +4687,25 @@ public void onAnimationCancel(Animator animation) { }); searchAnimation.start(); } else { - currentField.setTranslationY(AndroidUtilities.dp(36) - searchFieldHeight); + if (currentField != gifSearchField) { + currentField.setTranslationY(AndroidUtilities.dp(36) - searchFieldHeight); + } if (tabStrip != null && a != 2) { tabStrip.setTranslationY(0); } if (gridView == stickersGridView) { - gridView.setPadding(0, AndroidUtilities.dp(36 + 4), 0, AndroidUtilities.dp(44)); + gridView.setPadding(0, AndroidUtilities.dp(36), 0, AndroidUtilities.dp(44)); } else if (gridView == gifGridView) { gridView.setPadding(0, AndroidUtilities.dp(36 + 4), 0, AndroidUtilities.dp(44)); } else if (gridView == emojiGridView) { - gridView.setPadding(0, AndroidUtilities.dp(36), 0, AndroidUtilities.dp(44)); + gridView.setPadding(AndroidUtilities.dp(5), AndroidUtilities.dp(36), AndroidUtilities.dp(5), AndroidUtilities.dp(44)); } - layoutManager.scrollToPositionWithOffset(1, 0); + layoutManager.scrollToPositionWithOffset(0, 0); } } if (!animated) { delegate.onSearchOpenClose(0); } - showBottomTab(true, animated); } private void checkStickersSearchFieldScroll(boolean isLayout) { @@ -4446,33 +4825,45 @@ public void onAnimationEnd(Animator animation) { } } + private ValueAnimator bottomTabContainerAnimator; + private float bottomTabMainTranslation, bottomTabAdditionalTranslation; private long shownBottomTabAfterClick; private void showBottomTab(boolean show, boolean animated) { lastBottomScrollDy = 0; - if (show && bottomTabContainer.getTag() == null || !show && bottomTabContainer.getTag() != null || delegate != null && delegate.isSearchOpened()) { + if (delegate != null && delegate.isSearchOpened()) { + show = false; + } + if (show && bottomTabContainer.getTag() == null || !show && bottomTabContainer.getTag() != null) { return; } - if (bottomTabContainerAnimation != null) { - bottomTabContainerAnimation.cancel(); - bottomTabContainerAnimation = null; + if (bottomTabContainerAnimator != null) { + bottomTabContainerAnimator.cancel(); + bottomTabContainerAnimator = null; } bottomTabContainer.setTag(show ? null : 1); if (animated) { - bottomTabContainerAnimation = new AnimatorSet(); - bottomTabContainerAnimation.playTogether( - ObjectAnimator.ofFloat(bottomTabContainer, View.TRANSLATION_Y, show ? 0 : AndroidUtilities.dp(needEmojiSearch ? 45 : 50)), - ObjectAnimator.ofFloat(bulletinContainer, View.TRANSLATION_Y, needEmojiSearch ? (show ? 0 : AndroidUtilities.dp(needEmojiSearch ? 45 : 50)) : bulletinContainer.getTranslationY()), - ObjectAnimator.ofFloat(shadowLine, View.TRANSLATION_Y, show ? 0 : AndroidUtilities.dp(45)) - ); - bottomTabContainerAnimation.setDuration(200); - bottomTabContainerAnimation.setInterpolator(CubicBezierInterpolator.EASE_OUT); - bottomTabContainerAnimation.start(); + bottomTabContainerAnimator = ValueAnimator.ofFloat(bottomTabAdditionalTranslation, show ? 0 : AndroidUtilities.dp(needEmojiSearch ? 45 : 50)); + bottomTabContainerAnimator.addUpdateListener(anm -> { + bottomTabAdditionalTranslation = (float) anm.getAnimatedValue(); + updateBottomTabContainerPosition(); + }); + bottomTabContainerAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + if (bottomTabContainerAnimator != animation) { + return; + } + bottomTabAdditionalTranslation = (float) bottomTabContainerAnimator.getAnimatedValue(); + updateBottomTabContainerPosition(); + bottomTabContainerAnimator = null; + } + }); + bottomTabContainerAnimator.setDuration(380); + bottomTabContainerAnimator.setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); + bottomTabContainerAnimator.start(); } else { - bottomTabContainer.setTranslationY(show ? 0 : AndroidUtilities.dp(needEmojiSearch ? 45 : 50)); - if (needEmojiSearch) { - bulletinContainer.setTranslationY(show ? 0 : AndroidUtilities.dp(needEmojiSearch ? 45 : 50)); - } - shadowLine.setTranslationY(show ? 0 : AndroidUtilities.dp(45)); + bottomTabAdditionalTranslation = show ? 0 : AndroidUtilities.dp(needEmojiSearch ? 45 : 50); + updateBottomTabContainerPosition(); } } @@ -4558,6 +4949,9 @@ private void animateSearchField(@Type int type) { } private void animateSearchField(@Type int type, boolean visible, int tabsMinusDy) { + if (type == Type.GIFS) { + return; + } if (getListViewForType(type).findViewHolderForAdapterPosition(0) == null) { return; } @@ -4714,17 +5108,17 @@ private void checkGifSearchFieldScroll(boolean isLayout) { if (gifSearchField == null || gifGridView == null) { return; } - RecyclerView.ViewHolder holder = gifGridView.findViewHolderForAdapterPosition(0); - if (holder != null) { - gifSearchField.setTranslationY(holder.itemView.getTop()); - } else { - gifSearchField.setTranslationY(-searchFieldHeight); - } - gifSearchField.showShadow(false, !isLayout); +// RecyclerView.ViewHolder holder = gifGridView.findViewHolderForAdapterPosition(0); +// if (holder != null) { +// gifSearchField.setTranslationY(holder.itemView.getTop()); +// } else { +// gifSearchField.setTranslationY(-searchFieldHeight); +// } + gifSearchField.showShadow(true, !isLayout); } private void scrollGifsToTop() { - gifLayoutManager.scrollToPositionWithOffset(delegate != null && delegate.isExpanded() ? 0 : 1, 0); + gifLayoutManager.scrollToPositionWithOffset(0, 0); resetTabsY(Type.GIFS); } @@ -4923,13 +5317,13 @@ private void updateStickerTabs(boolean updateStickerSets) { stickerSets.add(pack); } - if (!premiumStickers.isEmpty()) { - premiumTabNum = stickersTabOffset; - stickersTabOffset++; - StickerTabView stickerTabView = stickersTab.addStickerIconTab(4, PremiumGradient.getInstance().premiumStarMenuDrawable2); - stickerTabView.textView.setText(LocaleController.getString("PremiumStickersShort", R.string.PremiumStickersShort)); - stickerTabView.setContentDescription(LocaleController.getString("PremiumStickers", R.string.PremiumStickers)); - } +// if (!premiumStickers.isEmpty()) { +// premiumTabNum = stickersTabOffset; +// stickersTabOffset++; +// StickerTabView stickerTabView = stickersTab.addStickerIconTab(4, PremiumGradient.getInstance().premiumStarMenuDrawable2); +// stickerTabView.textView.setText(LocaleController.getString("PremiumStickersShort", R.string.PremiumStickersShort)); +// stickerTabView.setContentDescription(LocaleController.getString("PremiumStickers", R.string.PremiumStickers)); +// } if (info != null && (!NekoConfig.hideGroupSticker.Bool())) { long hiddenStickerSetId = MessagesController.getEmojiSettings(currentAccount).getLong("group_hide_stickers_" + info.id, -1); @@ -4978,9 +5372,21 @@ private void updateStickerTabs(boolean updateStickerSets) { } } else { TLRPC.TL_messages_stickerSet stickerSet = stickerSets.get(a); - TLRPC.Document document = stickerSet.documents.get(0); + TLRPC.Document document = null; + if (stickerSet.set != null && stickerSet.set.thumb_document_id != 0) { + for (int i = 0; i < stickerSet.documents.size(); ++i) { + TLRPC.Document d = stickerSet.documents.get(i); + if (d != null && stickerSet.set.thumb_document_id == d.id) { + document = d; + break; + } + } + } + if (document == null) { + document = stickerSet.documents.get(0); + } TLObject thumb = FileLoader.getClosestPhotoSizeWithSize(stickerSet.set.thumbs, 90); - if (thumb == null || stickerSet.set.gifs) { + if (thumb == null || stickerSet.set.gifs) { thumb = document; } stickersTab.addStickerTab(thumb, document, stickerSet).setContentDescription(stickerSet.set.title + ", " + LocaleController.getString("AccDescrStickerSet", R.string.AccDescrStickerSet)); @@ -5057,6 +5463,9 @@ private void updateGifTabs() { if (wasRecentTabSelected && !hasRecent) { gifTabs.selectTab(gifTrendingTabNum); + if (gifSearchField != null && gifSearchField.categoriesListView != null) { + gifSearchField.categoriesListView.selectCategory(gifSearchField.trending); + } } else if (ViewCompat.isLaidOut(gifTabs)) { if (hasRecent && !hadRecent) { gifTabs.onPageScrolled(lastPosition + 1, 0); @@ -5134,10 +5543,9 @@ public void updateColors() { } searchField.backgroundView.setBackgroundColor(getThemedColor(Theme.key_chat_emojiPanelBackground)); searchField.shadowView.setBackgroundColor(getThemedColor(Theme.key_chat_emojiPanelShadowLine)); - searchField.clearSearchImageView.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_emojiSearchIcon), PorterDuff.Mode.SRC_IN)); - searchField.searchIconImageView.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_emojiSearchIcon), PorterDuff.Mode.SRC_IN)); - Theme.setDrawableColorByKey(searchField.searchBackground.getBackground(), Theme.key_chat_emojiSearchBackground); - searchField.searchBackground.invalidate(); + searchField.searchStateDrawable.setColor(getThemedColor(Theme.key_chat_emojiSearchIcon)); + Theme.setDrawableColorByKey(searchField.box.getBackground(), Theme.key_chat_emojiSearchBackground); + searchField.box.invalidate(); searchField.searchEditText.setHintTextColor(getThemedColor(Theme.key_chat_emojiSearchIcon)); searchField.searchEditText.setTextColor(getThemedColor(Theme.key_windowBackgroundWhiteBlackText)); } @@ -5261,31 +5669,8 @@ protected void onLayout(boolean changed, int left, int top, int right, int botto lastNotifyWidth = right - left; reloadStickersAdapter(); } - View parent = (View) getParent(); - if (parent != null) { - int newHeight = bottom - top; - int newHeight2 = parent.getHeight(); - if (lastNotifyHeight != newHeight || lastNotifyHeight2 != newHeight2) { - if (delegate != null && delegate.isSearchOpened()) { - bottomTabContainer.setTranslationY(AndroidUtilities.dp(49)); - if (needEmojiSearch) { - bulletinContainer.setTranslationY(AndroidUtilities.dp(49)); - } - } else { - if (bottomTabContainer.getTag() == null) { - if (newHeight <= lastNotifyHeight) { - bottomTabContainer.setTranslationY(0); - if (needEmojiSearch) { - bulletinContainer.setTranslationY(0); - } - } - } - } - lastNotifyHeight = newHeight; - lastNotifyHeight2 = newHeight2; - } - } super.onLayout(changed, left, top, right, bottom); + updateBottomTabContainerPosition(); updateStickerTabsPosition(); } @@ -5324,9 +5709,12 @@ public void setForseMultiwindowLayout(boolean value) { } public void onOpen(boolean forceEmoji) { - if (currentPage != 0 && currentChatId != 0) { + if (currentPage != 0 && stickersBanned) { currentPage = 0; } + if (currentPage == 0 && emojiBanned) { + currentPage = 1; + } if (currentPage == 0 || forceEmoji || currentTabs.size() == 1) { showBackspaceButton(true, false); showStickerSettingsButton(false, false); @@ -5349,7 +5737,7 @@ public void onOpen(boolean forceEmoji) { stickersTab.selectTab(stickersTabOffset); } firstTabUpdate = false; - stickersLayoutManager.scrollToPositionWithOffset(1, 0); + stickersLayoutManager.scrollToPositionWithOffset(0, 0); } } else if (currentPage == 2) { showBackspaceButton(false, false); @@ -5360,7 +5748,11 @@ public void onOpen(boolean forceEmoji) { if (gifTabs != null) { gifTabs.selectTab(0); } + if (gifSearchField != null && gifSearchField.categoriesListView != null) { + gifSearchField.categoriesListView.selectCategory(gifSearchField.recent); + } } + showBottomTab(true, true); } @Override @@ -5495,91 +5887,97 @@ private void updateRecentGifs() { } } - public void setStickersBanned(boolean value, long chatId) { + public void setStickersBanned(boolean emojiBanned, boolean stickersBanned, long chatId) { if (typeTabs == null) { return; } - if (value) { + this.emojiBanned = emojiBanned; + this.stickersBanned = stickersBanned; + if (stickersBanned || emojiBanned) { currentChatId = chatId; } else { currentChatId = 0; } - View view = typeTabs.getTab(2); + View view = typeTabs.getTab(stickersBanned ? 2 : 0); if (view != null) { - view.setAlpha(currentChatId != 0 ? 0.5f : 1.0f); - if (currentChatId != 0 && pager.getCurrentItem() != 0) { - showBackspaceButton(true, true); - showStickerSettingsButton(false, true); - pager.setCurrentItem(0, false); + view.setAlpha(currentChatId != 0 ? 0.15f : 1.0f); + if (stickersBanned) { + if (currentChatId != 0 && pager.getCurrentItem() != 0) { + showBackspaceButton(true, true); + showStickerSettingsButton(false, true); + pager.setCurrentItem(0, false); + } + } else { + if (currentChatId != 0 && pager.getCurrentItem() != 1) { + showBackspaceButton(false, true); + showStickerSettingsButton(false, true); + pager.setCurrentItem(1, false); + } } } } - public void showStickerBanHint(boolean gif) { - if (mediaBanTooltip.getVisibility() == VISIBLE) { - return; - } + private AnimatorSet showStickersBanAnimator; + private Runnable hideStickersBan; + public void showStickerBanHint(boolean show, boolean emoji, boolean gif) { TLRPC.Chat chat = MessagesController.getInstance(currentAccount).getChat(currentChatId); if (chat == null) { return; } - String text; - if (!ChatObject.hasAdminRights(chat) && chat.default_banned_rights != null && chat.default_banned_rights.send_stickers) { - if (gif) { - mediaBanTooltip.setText(LocaleController.getString("GlobalAttachGifRestricted", R.string.GlobalAttachGifRestricted)); - } else { - mediaBanTooltip.setText(LocaleController.getString("GlobalAttachStickersRestricted", R.string.GlobalAttachStickersRestricted)); - } - } else { - if (chat.banned_rights == null) { - return; - } - if (AndroidUtilities.isBannedForever(chat.banned_rights)) { - if (gif) { - mediaBanTooltip.setText(LocaleController.getString("AttachGifRestrictedForever", R.string.AttachGifRestrictedForever)); + if (show) { + if (!ChatObject.hasAdminRights(chat) && chat.default_banned_rights != null && (chat.default_banned_rights.send_stickers || (emoji && chat.default_banned_rights.send_plain))) { + if (emoji) { + mediaBanTooltip.setText(LocaleController.getString("GlobalAttachEmojiRestricted", R.string.GlobalAttachEmojiRestricted)); + } else if (gif) { + mediaBanTooltip.setText(LocaleController.getString("GlobalAttachGifRestricted", R.string.GlobalAttachGifRestricted)); } else { - mediaBanTooltip.setText(LocaleController.getString("AttachStickersRestrictedForever", R.string.AttachStickersRestrictedForever)); + mediaBanTooltip.setText(LocaleController.getString("GlobalAttachStickersRestricted", R.string.GlobalAttachStickersRestricted)); } } else { - if (gif) { - mediaBanTooltip.setText(LocaleController.formatString("AttachGifRestricted", R.string.AttachGifRestricted, LocaleController.formatDateForBan(chat.banned_rights.until_date))); + if (chat.banned_rights == null) { + return; + } + if (AndroidUtilities.isBannedForever(chat.banned_rights)) { + if (emoji) { + mediaBanTooltip.setText(LocaleController.getString("AttachPlainRestrictedForever", R.string.AttachPlainRestrictedForever)); + } else if (gif) { + mediaBanTooltip.setText(LocaleController.getString("AttachGifRestrictedForever", R.string.AttachGifRestrictedForever)); + } else { + mediaBanTooltip.setText(LocaleController.getString("AttachStickersRestrictedForever", R.string.AttachStickersRestrictedForever)); + } } else { - mediaBanTooltip.setText(LocaleController.formatString("AttachStickersRestricted", R.string.AttachStickersRestricted, LocaleController.formatDateForBan(chat.banned_rights.until_date))); + if (emoji) { + mediaBanTooltip.setText(LocaleController.formatString("AttachPlainRestricted", R.string.AttachPlainRestricted, LocaleController.formatDateForBan(chat.banned_rights.until_date))); + } if (gif) { + mediaBanTooltip.setText(LocaleController.formatString("AttachGifRestricted", R.string.AttachGifRestricted, LocaleController.formatDateForBan(chat.banned_rights.until_date))); + } else { + mediaBanTooltip.setText(LocaleController.formatString("AttachStickersRestricted", R.string.AttachStickersRestricted, LocaleController.formatDateForBan(chat.banned_rights.until_date))); + } } } + mediaBanTooltip.setVisibility(View.VISIBLE); } - mediaBanTooltip.setVisibility(View.VISIBLE); - AnimatorSet AnimatorSet = new AnimatorSet(); - AnimatorSet.playTogether( - ObjectAnimator.ofFloat(mediaBanTooltip, View.ALPHA, 0.0f, 1.0f) + + if (showStickersBanAnimator != null) { + showStickersBanAnimator.cancel(); + showStickersBanAnimator = null; + } + + showStickersBanAnimator = new AnimatorSet(); + showStickersBanAnimator.playTogether( + ObjectAnimator.ofFloat(mediaBanTooltip, View.ALPHA, show ? mediaBanTooltip.getAlpha() : 1f, show ? 1f : 0f), + ObjectAnimator.ofFloat(mediaBanTooltip, View.TRANSLATION_Y, show ? AndroidUtilities.dp(12) : mediaBanTooltip.getTranslationY(), show ? 0 : AndroidUtilities.dp(12)) ); - AnimatorSet.addListener(new AnimatorListenerAdapter() { - @Override - public void onAnimationEnd(Animator animation) { - AndroidUtilities.runOnUIThread(() -> { - if (mediaBanTooltip == null) { - return; - } - AnimatorSet AnimatorSet1 = new AnimatorSet(); - AnimatorSet1.playTogether( - ObjectAnimator.ofFloat(mediaBanTooltip, View.ALPHA, 0.0f) - ); - AnimatorSet1.addListener(new AnimatorListenerAdapter() { - @Override - public void onAnimationEnd(Animator animation1) { - if (mediaBanTooltip != null) { - mediaBanTooltip.setVisibility(View.INVISIBLE); - } - } - }); - AnimatorSet1.setDuration(300); - AnimatorSet1.start(); - }, 5000); - } - }); - AnimatorSet.setDuration(300); - AnimatorSet.start(); + if (hideStickersBan != null) { + AndroidUtilities.cancelRunOnUIThread(hideStickersBan); + } + if (show) { + AndroidUtilities.runOnUIThread(hideStickersBan = () -> showStickerBanHint(false, emoji, gif), 3500); + } + showStickersBanAnimator.setDuration(320); + showStickersBanAnimator.setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); + showStickersBanAnimator.start(); } private void updateVisibleTrendingSets() { @@ -5716,7 +6114,7 @@ public void didReceivedNotification(int id, int account, Object... args) { gifTabs.invalidateTabs(); } } else if (id == NotificationCenter.newEmojiSuggestionsAvailable) { - if (emojiGridView != null && needEmojiSearch && (emojiSearchField.progressDrawable.isAnimating() || emojiGridView.getAdapter() == emojiSearchAdapter) && !TextUtils.isEmpty(emojiSearchAdapter.lastSearchEmojiString)) { + if (emojiGridView != null && needEmojiSearch && (emojiSearchField.searchStateDrawable.getIconState() == SearchStateDrawable.State.STATE_PROGRESS || emojiGridView.getAdapter() == emojiSearchAdapter) && !TextUtils.isEmpty(emojiSearchAdapter.lastSearchEmojiString)) { emojiSearchAdapter.search(emojiSearchAdapter.lastSearchEmojiString); } } else if (id == NotificationCenter.currentUserPremiumStatusChanged) { @@ -5751,18 +6149,20 @@ public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType @Override protected void onDraw(Canvas canvas) { super.onDraw(canvas); - TLRPC.StickerSetCovered set = (TLRPC.StickerSetCovered) getTag(); - if (MediaDataController.getInstance(currentAccount).isStickerPackUnread(emoji, set.set.id) && dotPaint != null) { - int x = canvas.getWidth() - AndroidUtilities.dp(8); - int y = AndroidUtilities.dp(14); - canvas.drawCircle(x, y, AndroidUtilities.dp(3), dotPaint); + if (!emoji) { + TLRPC.StickerSetCovered set = (TLRPC.StickerSetCovered) getTag(); + if (MediaDataController.getInstance(currentAccount).isStickerPackUnread(emoji, set.set.id) && dotPaint != null) { + int x = canvas.getWidth() - AndroidUtilities.dp(8); + int y = AndroidUtilities.dp(14); + canvas.drawCircle(x, y, AndroidUtilities.dp(3), dotPaint); + } } } }; - imageView.setSize(AndroidUtilities.dp(emoji ? 36 : 30), AndroidUtilities.dp(emoji ? 36 : 30)); + imageView.setSize(AndroidUtilities.dp(emoji ? 24 : 30), AndroidUtilities.dp(emoji ? 24 : 30)); imageView.setLayerNum(1); imageView.setAspectFit(true); - imageView.setLayoutParams(new RecyclerView.LayoutParams(AndroidUtilities.dp(42), AndroidUtilities.dp(42))); + imageView.setLayoutParams(new RecyclerView.LayoutParams(AndroidUtilities.dp(emoji ? 34 : 42), AndroidUtilities.dp(emoji ? 34 : 42))); return new RecyclerListView.Holder(imageView); } @@ -5772,10 +6172,44 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { TLRPC.StickerSetCovered set = (emoji ? featuredEmojiSets : featuredStickerSets).get(position); imageView.setTag(set); - TLRPC.Document document = set.cover; - if (!set.covers.isEmpty()) { - document = set.covers.get(0); + ArrayList setDocuments; + if (set instanceof TLRPC.TL_stickerSetFullCovered) { + setDocuments = ((TLRPC.TL_stickerSetFullCovered) set).documents; + } else if (set instanceof TLRPC.TL_stickerSetNoCovered) { + TLRPC.TL_messages_stickerSet fullSet = MediaDataController.getInstance(currentAccount).getStickerSet(MediaDataController.getInputStickerSet(set.set), false); + if (fullSet == null) { + setDocuments = null; + } else { + setDocuments = fullSet.documents; + } + } else { + setDocuments = set.covers; + } + + TLRPC.Document document = null; + if (set.cover != null) { + document = set.cover; + } else if (setDocuments != null && !setDocuments.isEmpty()) { + if (set.set != null) { + for (int i = 0; i < setDocuments.size(); ++i) { + if (setDocuments.get(i).id == set.set.thumb_document_id) { + document = setDocuments.get(i); + break; + } + } + } + if (document == null) { + document = setDocuments.get(0); + } + } + if (document == null) { + return; + } + + if (this.emoji) { + imageView.setColorFilter(MessageObject.isTextColorEmoji(document) ? Theme.chat_animatedEmojiTextColorFilter : null); } + TLObject object = FileLoader.getClosestPhotoSizeWithSize(set.set.thumbs, 90); SvgHelper.SvgDrawable svgThumb = DocumentObject.getSvgThumb(set.set.thumbs, Theme.key_emptyListPlaceholder, 0.2f); if (svgThumb != null) { @@ -5796,10 +6230,11 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { } else { return; } + if (imageLocation == null) { return; } - String filter = SharedConfig.getLiteMode().enabled() ? "30_30_firstframe" : "30_30"; + String filter = !LiteMode.isEnabled(emoji ? LiteMode.FLAG_ANIMATED_EMOJI_KEYBOARD : LiteMode.FLAG_ANIMATED_STICKERS_KEYBOARD) ? "30_30_firstframe" : "30_30"; if (object instanceof TLRPC.Document && (MessageObject.isAnimatedStickerDocument(document, true) || MessageObject.isVideoSticker(document))) { if (svgThumb != null) { imageView.setImage(ImageLocation.getForDocument(document), filter, svgThumb, 0, set); @@ -6005,7 +6440,7 @@ public void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { alertDialog.show(); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } } @@ -6143,7 +6578,7 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { } case 5: { StickerSetNameCell cell = (StickerSetNameCell) holder.itemView; - cell.setText(MediaDataController.getInstance(currentAccount).loadFeaturedPremium ? LocaleController.getString("FeaturedStickersPremium", R.string.FeaturedStickersPremium) : LocaleController.getString("FeaturedStickers", R.string.FeaturedStickers), R.drawable.msg_close); + cell.setText(MediaDataController.getInstance(currentAccount).loadFeaturedPremium ? LocaleController.getString("FeaturedStickersPremium", R.string.FeaturedStickersPremium) : LocaleController.getString("FeaturedStickers", R.string.FeaturedStickers), R.drawable.msg_close, LocaleController.getString("AccDescrCloseTrendingStickers", R.string.AccDescrCloseTrendingStickers)); break; } } @@ -6189,8 +6624,9 @@ private void updateItems() { documents = recentStickers; packStartPosition.put(key = "recent", totalItems); } else if (a == -1) { - documents = premiumStickers; - packStartPosition.put(key = "premium", totalItems); + continue; +// documents = premiumStickers; +// packStartPosition.put(key = "premium", totalItems); } else { key = null; pack = packs.get(a); @@ -6317,6 +6753,7 @@ private class EmojiGridAdapter extends RecyclerListView.SelectionAdapter { private int trendingHeaderRow = -1; private int trendingRow = -1; private int firstTrendingRow = -1; + private int recentlyUsedHeaderRow = -1; private ArrayList frozenEmojiPacks; private ArrayList rowHashCodes = new ArrayList<>(); @@ -6328,10 +6765,6 @@ private class EmojiGridAdapter extends RecyclerListView.SelectionAdapter { private int itemCount; public int plainEmojisCount; - void EmojiGridAdapter() { - MediaDataController mediaDataController = MediaDataController.getInstance(currentAccount); - } - @Override public int getItemCount() { return itemCount; @@ -6357,37 +6790,64 @@ public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType break; case VIEW_TYPE_HEADER: view = new StickerSetNameCell(getContext(), true, resourcesProvider); + ((StickerSetNameCell) view).setOnIconClickListener(e -> { + if (featuredEmojiSets == null || featuredEmojiSets.isEmpty() || featuredEmojiSets.get(0).set == null) { + return; + } + long lastSetId = featuredEmojiSets.get(0).set.id; + MessagesController.getEmojiSettings(currentAccount).edit().putLong("emoji_featured_hidden", lastSetId).commit(); + if (emojiAdapter != null) { + emojiAdapter.notifyItemRangeRemoved(1, 3); + } + if (emojiTabs != null) { + emojiTabs.updateEmojiPacks(getEmojipacks()); + } + updateRows(); + }); break; case VIEW_TYPE_PACK_HEADER: view = new EmojiPackHeader(getContext()); break; case VIEW_TYPE_TRENDING: TrendingListView listView = new TrendingListView(getContext(), trendingEmojiAdapter = new TrendingAdapter(true)); - listView.setPadding(AndroidUtilities.dp(8), 0, AndroidUtilities.dp(8), 0); + listView.setPadding(AndroidUtilities.dp(8), AndroidUtilities.dp(4), AndroidUtilities.dp(8), 0); listView.setClipToPadding(false); listView.addItemDecoration(new RecyclerView.ItemDecoration() { @Override public void getItemOffsets(@NonNull Rect outRect, @NonNull View view, @NonNull RecyclerView parent, @NonNull RecyclerView.State state) { - outRect.right = AndroidUtilities.dp(8); + outRect.right = AndroidUtilities.dp(2); } }); listView.setOnItemClickListener((item, position) -> { - ArrayList inputStickerSets = new ArrayList<>(); - List featuredStickerSets = MediaDataController.getInstance(currentAccount).getFeaturedEmojiSets(); - for (int i = 0; featuredStickerSets != null && i < featuredStickerSets.size(); ++i) { - TLRPC.StickerSetCovered set = featuredStickerSets.get(i); - if (set != null && set.set != null) { - TLRPC.TL_inputStickerSetID inputStickerSet = new TLRPC.TL_inputStickerSetID(); - inputStickerSet.id = set.set.id; - inputStickerSet.access_hash = set.set.access_hash; - inputStickerSets.add(inputStickerSet); + if (item.getTag() instanceof TLRPC.StickerSetCovered) { + TLRPC.StickerSetCovered highlightSet = (TLRPC.StickerSetCovered) item.getTag(); + ArrayList inputStickerSets = new ArrayList<>(); + ArrayList sets = MediaDataController.getInstance(currentAccount).getFeaturedEmojiSets(); + int highlight = -1; + for (int i = 0; i < sets.size(); ++i) { + TLRPC.StickerSetCovered set = sets.get(i); + if (set != null && set.set != null) { + TLRPC.TL_inputStickerSetID inputStickerSet = new TLRPC.TL_inputStickerSetID(); + inputStickerSet.id = set.set.id; + inputStickerSet.access_hash = set.set.access_hash; + inputStickerSets.add(inputStickerSet); + + if (highlightSet != null && highlightSet.set != null && highlightSet.set.id == set.set.id) { + highlight = i; + } + } + } + + MediaDataController.getInstance(currentAccount).markFeaturedStickersAsRead(true, true); + EmojiPacksAlert alert = new EmojiPacksAlert(fragment, getContext(), fragment == null ? null : fragment.getResourceProvider(), inputStickerSets); + if (highlight >= 0) { + alert.highlight(highlight); + } + if (fragment != null) { + fragment.showDialog(alert); + } else { + alert.show(); } - } - MediaDataController.getInstance(currentAccount).markFeaturedStickersAsRead(true, true); - if (fragment != null) { - fragment.showDialog(new EmojiPacksAlert(fragment, getContext(), fragment.getResourceProvider(), inputStickerSets)); - } else { - new EmojiPacksAlert(null, getContext(), resourcesProvider, inputStickerSets).show(); } }); view = listView; @@ -6424,6 +6884,9 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, @SuppressLint("Recy if (needEmojiSearch) { position--; } + if (recentlyUsedHeaderRow >= 0) { + position--; + } if (trendingRow >= 0) { position -= 2; } @@ -6497,19 +6960,19 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, @SuppressLint("Recy StickerSetNameCell cell = (StickerSetNameCell) holder.itemView; cell.position = position; index = positionToSection.get(position); - String text; if (position == trendingHeaderRow) { - text = LocaleController.getString("FeaturedEmojiPacks", R.string.FeaturedEmojiPacks); + cell.setText(LocaleController.getString("FeaturedEmojiPacks", R.string.FeaturedEmojiPacks), R.drawable.msg_close, LocaleController.getString("AccDescrCloseTrendingEmoji", R.string.AccDescrCloseTrendingEmoji)); + } else if (position == recentlyUsedHeaderRow) { + cell.setText(LocaleController.getString("RecentlyUsed", R.string.RecentlyUsed), 0); } else if (index >= emojiTitles.length) { try { - text = emojipacksProcessed.get(index - emojiTitles.length).set.title; + cell.setText(emojipacksProcessed.get(index - emojiTitles.length).set.title, 0); } catch (Exception ignore) { - text = ""; + cell.setText("", 0); } } else { - text = emojiTitles[index]; + cell.setText(emojiTitles[index], 0); } - cell.setText(text, 0); break; } case VIEW_TYPE_EXPAND: @@ -6574,7 +7037,7 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, @SuppressLint("Recy public int getItemViewType(int position) { if (position == trendingRow) { return VIEW_TYPE_TRENDING; - } else if (position == trendingHeaderRow) { + } else if (position == trendingHeaderRow || position == recentlyUsedHeaderRow) { return VIEW_TYPE_HEADER; } else if (positionToSection.indexOfKey(position) >= 0) { return positionToSection.get(position) >= EmojiData.dataColored.length ? VIEW_TYPE_PACK_HEADER : VIEW_TYPE_HEADER; @@ -6769,17 +7232,20 @@ public void updateRows() { packStartPosition.clear(); rowHashCodes.clear(); itemCount = 0; + boolean isPremium = UserConfig.getInstance(currentAccount).isPremium() || allowEmojisForNonPremium; if (needEmojiSearch) { itemCount++; rowHashCodes.add(-1); } -// if (featuredEmojiSets.size() > 0) { -// trendingHeaderRow = itemCount++; -// trendingRow = itemCount++; -// } else { -// trendingHeaderRow = -1; -// trendingRow = -1; -// } + if (isPremium && allowAnimatedEmoji && featuredEmojiSets.size() > 0 && featuredEmojiSets.get(0).set != null && MessagesController.getEmojiSettings(currentAccount).getLong("emoji_featured_hidden", 0) != featuredEmojiSets.get(0).set.id) { + trendingHeaderRow = itemCount++; + trendingRow = itemCount++; + recentlyUsedHeaderRow = itemCount++; + } else { + trendingHeaderRow = -1; + trendingRow = -1; + recentlyUsedHeaderRow = -1; + } ArrayList recent = getRecentEmoji(); if (emojiTabs != null) { emojiTabs.showRecent(!recent.isEmpty()); @@ -6799,7 +7265,6 @@ public void updateRows() { } } - boolean isPremium = UserConfig.getInstance(currentAccount).isPremium() || allowEmojisForNonPremium; int maxlen = emojiLayoutManager.getSpanCount() * 3; plainEmojisCount = itemCount; firstTrendingRow = -1; @@ -7049,6 +7514,7 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { switch (holder.getItemViewType()) { case 0: { ImageViewEmoji imageView = (ImageViewEmoji) holder.itemView; + imageView.position = position; imageView.pack = null; String code; @@ -7104,6 +7570,10 @@ public int getItemViewType(int position) { } public void search(String text) { + search(text, true); + } + + public void search(String text, boolean delay) { if (TextUtils.isEmpty(text)) { lastSearchEmojiString = null; if (emojiGridView.getAdapter() != emojiAdapter) { @@ -7118,11 +7588,11 @@ public void search(String text) { AndroidUtilities.cancelRunOnUIThread(searchRunnable); } if (!TextUtils.isEmpty(lastSearchEmojiString)) { - AndroidUtilities.runOnUIThread(searchRunnable = new Runnable() { - @Override - public void run() { - emojiSearchField.progressDrawable.startAnimation(); - String query = lastSearchEmojiString; + emojiSearchField.showProgress(true); + AndroidUtilities.runOnUIThread(searchRunnable = () -> { + final LinkedHashSet documentIds = new LinkedHashSet<>(); + final String query = lastSearchEmojiString; + final Runnable fullSearch = () -> { String[] newLanguage = AndroidUtilities.getCurrentKeyboardLanguage(); if (!Arrays.equals(lastSearchKeyboardLanguage, newLanguage)) { MediaDataController.getInstance(currentAccount).fetchNewEmojiKeywords(newLanguage); @@ -7133,18 +7603,92 @@ public void run() { public void run(ArrayList param, String alias) { if (query.equals(lastSearchEmojiString)) { lastSearchAlias = alias; - emojiSearchField.progressDrawable.stopAnimation(); + emojiSearchField.showProgress(false); searchWas = true; if (emojiGridView.getAdapter() != emojiSearchAdapter) { emojiGridView.setAdapter(emojiSearchAdapter); } - result = param; + result.clear(); + searchByPackname(query, documentIds); + for (long documentId : documentIds) { + MediaDataController.KeywordResult r = new MediaDataController.KeywordResult(); + r.keyword = ""; + r.emoji = "animated_" + documentId; + result.add(r); + } + for (int i = 0; i < param.size(); ++i) { + MediaDataController.KeywordResult r = param.get(i); + if (r != null && r.emoji != null && (!r.emoji.startsWith("animated_") || !documentIds.contains(Long.parseLong(r.emoji.substring(9))))) { + result.add(r); + } + } notifyDataSetChanged(); } } - }, true); + }, null, true, false, true, 25); + }; + if (Emoji.fullyConsistsOfEmojis(query)) { + StickerCategoriesListView.search.fetch(UserConfig.selectedAccount, query, list -> { + if (list != null) { + documentIds.addAll(list.document_id); + } + fullSearch.run(); + }); + } else { + fullSearch.run(); + } + }, delay ? 300 : 0); + } + } + + private ArrayList addedSets = new ArrayList<>(); + + private void searchByPackname(String query, LinkedHashSet documentIds) { + if (query == null || query.length() <= 3 || !UserConfig.getInstance(currentAccount).isPremium()) { + return; + } + String translitQuery = LocaleController.getInstance().getTranslitString(query).toLowerCase(); + + ArrayList sets = MediaDataController.getInstance(currentAccount).getStickerSets(MediaDataController.TYPE_EMOJIPACKS); + ArrayList featuredSets = MediaDataController.getInstance(currentAccount).getFeaturedEmojiSets(); + addedSets.clear(); + + for (int i = 0; i < sets.size(); ++i) { + TLRPC.TL_messages_stickerSet fullSet = sets.get(i); + if (fullSet == null || fullSet.set == null) { + continue; + } + checkAddPackToResults(fullSet.set, fullSet.documents, translitQuery, documentIds); + } + for (int i = 0; i < featuredSets.size(); ++i) { + TLRPC.StickerSetCovered coveredSet = featuredSets.get(i); + if (coveredSet == null || coveredSet.set == null) { + continue; + } + if (coveredSet instanceof TLRPC.TL_stickerSetFullCovered) { + checkAddPackToResults(coveredSet.set, ((TLRPC.TL_stickerSetFullCovered) coveredSet).documents, translitQuery, documentIds); + } else if (coveredSet instanceof TLRPC.TL_stickerSetNoCovered) { + TLRPC.TL_inputStickerSetID inputStickerSetID = new TLRPC.TL_inputStickerSetID(); + inputStickerSetID.id = coveredSet.set.id; + TLRPC.TL_messages_stickerSet fullSet = MediaDataController.getInstance(currentAccount).getStickerSet(inputStickerSetID, true); + if (fullSet != null) { + checkAddPackToResults(fullSet.set, fullSet.documents, translitQuery, documentIds); + } + } else { + checkAddPackToResults(coveredSet.set, coveredSet.covers, translitQuery, documentIds); + } + } + } + + private void checkAddPackToResults(TLRPC.StickerSet set, ArrayList documents, String translitQuery, LinkedHashSet documentIds) { + if (set.title != null && !addedSets.contains(set.id) && LocaleController.getInstance().getTranslitString(set.title.toLowerCase()).contains(translitQuery)) { + for (TLRPC.Document document : documents) { + if (document == null) { + continue; } - }, 300); + documentIds.add(document.id); + } + addedSets.add(set.id); } } } @@ -7157,8 +7701,12 @@ public void destroyItem(ViewGroup viewGroup, int position, Object object) { @Override public boolean canScrollToTab(int position) { - if ((position == 1 || position == 2) && currentChatId != 0) { - showStickerBanHint(position == 1); + if ((position == 1 || position == 2) && stickersBanned) { + showStickerBanHint(true, false, position == 1); + return false; + } + if (position == 0 && emojiBanned) { + showStickerBanHint(true, true, false); return false; } return true; @@ -7169,7 +7717,8 @@ public int getCount() { } public Drawable getPageIconDrawable(int position) { - return tabIcons[position]; + return null; +// return tabIcons[position]; } public CharSequence getPageTitle(int position) { @@ -7185,14 +7734,26 @@ public CharSequence getPageTitle(int position) { } @Override - public void customOnDraw(Canvas canvas, int position) { - if (position == 2 && !MediaDataController.getInstance(currentAccount).getUnreadStickerSets().isEmpty() && dotPaint != null) { - int x = canvas.getWidth() / 2 + AndroidUtilities.dp(4 + 5); - int y = canvas.getHeight() / 2 - AndroidUtilities.dp(13 - 5); - canvas.drawCircle(x, y, AndroidUtilities.dp(5), dotPaint); + public int getTabPadding(int position) { + switch (position) { + case 0: + return AndroidUtilities.dp(18); + case 1: + case 2: + default: + return AndroidUtilities.dp(12); } } + @Override + public void customOnDraw(Canvas canvas, View view, int position) { +// if (position == 2 && !MediaDataController.getInstance(currentAccount).getUnreadStickerSets().isEmpty() && dotPaint != null) { +// int x = canvas.getWidth() - view.getPaddingRight(); +// int y = canvas.getHeight() / 2 - AndroidUtilities.dp(13 - 5); +// canvas.drawCircle(x, y, AndroidUtilities.dp(5), dotPaint); +// } + } + public Object instantiateItem(ViewGroup viewGroup, int position) { View view = currentTabs.get(position).view; viewGroup.addView(view); @@ -7230,6 +7791,8 @@ private class GifAdapter extends RecyclerListView.SelectionAdapter { private int trendingSectionItem = -1; private int firstResultItem = -1; + private boolean addSearch; + private boolean showTrendingWhenSearchEmpty; public GifAdapter(Context context) { @@ -7259,7 +7822,7 @@ public int getItemCount() { @Override public int getItemViewType(int position) { - if (position == 0) { + if (position == 0 && addSearch) { return 1; // search field } else if (withRecent && position == trendingSectionItem) { return 2; // trending section @@ -7275,6 +7838,7 @@ public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType switch (viewType) { case 0: ContextLinkCell cell = new ContextLinkCell(context); + cell.setIsKeyboard(true); cell.setCanPreviewGif(true); view = cell; break; @@ -7308,7 +7872,7 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { if (firstResultItem >= 0 && position >= firstResultItem) { cell.setLink(results.get(position - firstResultItem), bot, true, false, false, true); } else { - cell.setGif(recentGifs.get(position - 1), false); + cell.setGif(recentGifs.get(position - (addSearch ? 1 : 0)), false); } break; } @@ -7326,7 +7890,10 @@ private void updateItems() { trendingSectionItem = -1; firstResultItem = -1; - itemsCount = 1; // search field + itemsCount = 0; + if (addSearch) { + itemsCount++;// search field + } if (withRecent) { itemsCount += recentItemsCount; @@ -7413,6 +7980,10 @@ private void searchBotUser() { } public void search(String text) { + search(text, true); + } + + public void search(String text, boolean delay) { if (withRecent) { return; } @@ -7450,13 +8021,13 @@ public void search(String text) { if (!TextUtils.isEmpty(lastSearchImageString)) { AndroidUtilities.runOnUIThread(searchRunnable = () -> { search(text, "", true); - }, 300); + }, delay ? 300 : 0); } } public void searchEmoji(String emoji) { if (lastSearchIsEmoji && TextUtils.equals(lastSearchImageString, emoji)) { - gifLayoutManager.scrollToPositionWithOffset(1, 0); + gifLayoutManager.scrollToPositionWithOffset(0, 0); return; } search(emoji, "", true, true, true); @@ -7486,13 +8057,13 @@ protected void search(final String query, final String offset, boolean searchUse if (searchUser) { searchBotUser(); if (!withRecent) { - gifSearchField.progressDrawable.startAnimation(); + gifSearchField.showProgress(true); } } return; } if (!withRecent && TextUtils.isEmpty(offset)) { - gifSearchField.progressDrawable.startAnimation(); + gifSearchField.showProgress(true); } bot = (TLRPC.User) object; @@ -7527,7 +8098,7 @@ protected void search(final String query, final String offset, boolean searchUse req.bot = MessagesController.getInstance(currentAccount).getInputUser(bot); req.offset = offset; req.peer = new TLRPC.TL_inputPeerEmpty(); - reqId = ConnectionsManager.getInstance(currentAccount).sendRequest(req, requestDelegate, ConnectionsManager.RequestFlagFailOnServerErrors); + reqId = ConnectionsManager.getInstance(currentAccount).sendRequest(req, requestDelegate); } } @@ -7546,7 +8117,7 @@ private void processResponse(final String query, final String offset, boolean se if (TextUtils.isEmpty(offset)) { results.clear(); resultsMap.clear(); - gifSearchField.progressDrawable.stopAnimation(); + gifSearchField.showProgress(false); } } @@ -7577,16 +8148,16 @@ private void processResponse(final String query, final String offset, boolean se updateItems(); if (withRecent) { if (oldCount != 0) { - notifyItemChanged(recentItemsCount + 1 + oldCount); - notifyItemRangeInserted(recentItemsCount + 1 + oldCount + 1, addedCount); + notifyItemChanged(recentItemsCount + (gifAdapter.addSearch ? 1 : 0) + oldCount); + notifyItemRangeInserted(recentItemsCount + (gifAdapter.addSearch ? 1 : 0) + oldCount + 1, addedCount); } else { - notifyItemRangeInserted(recentItemsCount + 1, addedCount + 1); + notifyItemRangeInserted(recentItemsCount + (gifAdapter.addSearch ? 1 : 0), addedCount + 1); } } else { if (oldCount != 0) { notifyItemChanged(oldCount); } - notifyItemRangeInserted(oldCount + 1, addedCount); + notifyItemRangeInserted(oldCount + (gifAdapter.addSearch ? 1 : 0), addedCount); } } else { notifyDataSetChanged(); @@ -7673,10 +8244,10 @@ public GifLayoutManager(Context context) { setSpanSizeLookup(new GridLayoutManager.SpanSizeLookup() { @Override public int getSpanSize(int position) { - if (position == 0 || gifGridView.getAdapter() == gifSearchAdapter && gifSearchAdapter.results.isEmpty()) { + if (position == 0 && gifAdapter.addSearch || gifGridView.getAdapter() == gifSearchAdapter && gifSearchAdapter.results.isEmpty()) { return getSpanCount(); } - return getSpanSizeForItem(position - 1); + return getSpanSizeForItem(position - (gifAdapter.addSearch ? 1 : 0)); } }); } @@ -7773,13 +8344,13 @@ public GifProgressEmptyView(@NonNull Context context) { imageView.setScaleType(ImageView.ScaleType.CENTER); imageView.setImageResource(R.drawable.gif_empty); imageView.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_emojiPanelEmptyText), PorterDuff.Mode.SRC_IN)); - addView(imageView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER, 0, 0, 0, 59)); + addView(imageView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER, 0, 8, 0, 0)); textView = new TextView(getContext()); textView.setText(LocaleController.getString("NoGIFsFound", R.string.NoGIFsFound)); textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 16); textView.setTextColor(getThemedColor(Theme.key_chat_emojiPanelEmptyText)); - addView(textView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER, 0, 0, 0, 9)); + addView(textView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER, 0, 42, 0, 0)); progressView = new RadialProgressView(context, resourcesProvider); progressView.setVisibility(GONE); @@ -7839,34 +8410,43 @@ private class StickersSearchGridAdapter extends RecyclerListView.SelectionAdapte boolean cleared; private String searchQuery; private Runnable searchRunnable = new Runnable() { + String query; + int lastId; + + final ArrayList serverPacks = new ArrayList<>(); + final ArrayList localPacks = new ArrayList<>(); + final HashMap localPacksByShortName = new HashMap<>(); + final HashMap localPacksByName = new HashMap<>(); + final HashMap, String> emojiStickers = new HashMap<>(); + final ArrayList> emojiArrays = new ArrayList<>(); - private void clear() { - if (cleared) { + final ArrayList emojiStickersArray = new ArrayList<>(0); + final LongSparseArray emojiStickersMap = new LongSparseArray<>(0); + + private void searchFinish() { + if (emojiSearchId != lastId) { return; } - cleared = true; - emojiStickers.clear(); - emojiArrays.clear(); - localPacks.clear(); - serverPacks.clear(); - localPacksByShortName.clear(); - localPacksByName.clear(); - } - @Override - public void run() { - if (TextUtils.isEmpty(searchQuery)) { - return; + StickersSearchGridAdapter.this.localPacks = localPacks; + StickersSearchGridAdapter.this.serverPacks = serverPacks; + StickersSearchGridAdapter.this.localPacksByShortName = localPacksByShortName; + StickersSearchGridAdapter.this.localPacksByName = localPacksByName; + StickersSearchGridAdapter.this.emojiStickers = emojiStickers; + StickersSearchGridAdapter.this.emojiArrays = emojiArrays; + stickersSearchField.showProgress(false); + + if (stickersGridView.getAdapter() != stickersSearchGridAdapter) { + stickersGridView.setAdapter(stickersSearchGridAdapter); } - stickersSearchField.progressDrawable.startAnimation(); - cleared = false; - int lastId = ++emojiSearchId; + notifyDataSetChanged(); + } - final ArrayList emojiStickersArray = new ArrayList<>(0); - final LongSparseArray emojiStickersMap = new LongSparseArray<>(0); - HashMap> allStickers = MediaDataController.getInstance(currentAccount).getAllStickers(); - if (searchQuery.length() <= 14) { - CharSequence emoji = searchQuery; + private void addFromAllStickers(Runnable finished) { + final HashMap> allStickers = MediaDataController.getInstance(currentAccount).getAllStickers(); + + if (query.length() <= 14) { + CharSequence emoji = query; int length = emoji.length(); for (int a = 0; a < length; a++) { if (a < length - 1 && (emoji.charAt(a) == 0xD83C && emoji.charAt(a + 1) >= 0xDFFB && emoji.charAt(a + 1) <= 0xDFFF || emoji.charAt(a) == 0x200D && (emoji.charAt(a + 1) == 0x2640 || emoji.charAt(a + 1) == 0x2642))) { @@ -7881,7 +8461,6 @@ public void run() { } ArrayList newStickers = allStickers != null ? allStickers.get(emoji.toString()) : null; if (newStickers != null && !newStickers.isEmpty()) { - clear(); emojiStickersArray.addAll(newStickers); for (int a = 0, size = newStickers.size(); a < size; a++) { TLRPC.Document document = newStickers.get(a); @@ -7891,37 +8470,41 @@ public void run() { emojiArrays.add(emojiStickersArray); } } - if (allStickers != null && !allStickers.isEmpty() && searchQuery.length() > 1) { + finished.run(); + } + + private void addFromSuggestions(Runnable finished) { + final HashMap> allStickers = MediaDataController.getInstance(currentAccount).getAllStickers(); + + if (allStickers != null && !allStickers.isEmpty() && query.length() > 1) { String[] newLanguage = AndroidUtilities.getCurrentKeyboardLanguage(); if (!Arrays.equals(lastSearchKeyboardLanguage, newLanguage)) { MediaDataController.getInstance(currentAccount).fetchNewEmojiKeywords(newLanguage); } lastSearchKeyboardLanguage = newLanguage; - MediaDataController.getInstance(currentAccount).getEmojiSuggestions(lastSearchKeyboardLanguage, searchQuery, false, new MediaDataController.KeywordResultCallback() { - @Override - public void run(ArrayList param, String alias) { - if (lastId != emojiSearchId) { - return; - } - boolean added = false; - for (int a = 0, size = param.size(); a < size; a++) { - String emoji = param.get(a).emoji; - ArrayList newStickers = allStickers != null ? allStickers.get(emoji) : null; - if (newStickers != null && !newStickers.isEmpty()) { - clear(); - if (!emojiStickers.containsKey(newStickers)) { - emojiStickers.put(newStickers, emoji); - emojiArrays.add(newStickers); - added = true; - } + MediaDataController.getInstance(currentAccount).getEmojiSuggestions(lastSearchKeyboardLanguage, searchQuery, false, (param, alias) -> { + if (emojiSearchId != lastId) { + return; + } + + for (int a = 0, size = param.size(); a < size; a++) { + String emoji = param.get(a).emoji; + ArrayList newStickers = allStickers.get(emoji); + if (newStickers != null && !newStickers.isEmpty()) { + if (!emojiStickers.containsKey(newStickers)) { + emojiStickers.put(newStickers, emoji); + emojiArrays.add(newStickers); } } - if (added) { - notifyDataSetChanged(); - } } + finished.run(); }, false); + } else { + finished.run(); } + } + + private void addLocalPacks(Runnable finished) { ArrayList local = MediaDataController.getInstance(currentAccount).getStickerSets(MediaDataController.TYPE_IMAGE); MessagesController.getInstance(currentAccount).filterPremiumStickers(local); int index; @@ -7929,13 +8512,11 @@ public void run(ArrayList param, String alias TLRPC.TL_messages_stickerSet set = local.get(a); if ((index = AndroidUtilities.indexOfIgnoreCase(set.set.title, searchQuery)) >= 0) { if (index == 0 || set.set.title.charAt(index - 1) == ' ') { - clear(); localPacks.add(set); localPacksByName.put(set, index); } } else if (set.set.short_name != null && (index = AndroidUtilities.indexOfIgnoreCase(set.set.short_name, searchQuery)) >= 0) { if (index == 0 || set.set.short_name.charAt(index - 1) == ' ') { - clear(); localPacks.add(set); localPacksByShortName.put(set, true); } @@ -7947,48 +8528,50 @@ public void run(ArrayList param, String alias TLRPC.TL_messages_stickerSet set = local.get(a); if ((index = AndroidUtilities.indexOfIgnoreCase(set.set.title, searchQuery)) >= 0) { if (index == 0 || set.set.title.charAt(index - 1) == ' ') { - clear(); localPacks.add(set); localPacksByName.put(set, index); } } else if (set.set.short_name != null && (index = AndroidUtilities.indexOfIgnoreCase(set.set.short_name, searchQuery)) >= 0) { if (index == 0 || set.set.short_name.charAt(index - 1) == ' ') { - clear(); localPacks.add(set); localPacksByShortName.put(set, true); } } } - if ((!localPacks.isEmpty() || !emojiStickers.isEmpty()) && stickersGridView.getAdapter() != stickersSearchGridAdapter) { - stickersGridView.setAdapter(stickersSearchGridAdapter); - } + finished.run(); + } + + private void searchStickerSets(Runnable finished) { final TLRPC.TL_messages_searchStickerSets req = new TLRPC.TL_messages_searchStickerSets(); - req.q = searchQuery; - reqId = ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> { + req.q = query; + reqId = ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> AndroidUtilities.runOnUIThread(() -> { + if (emojiSearchId != lastId) { + return; + } + if (response instanceof TLRPC.TL_messages_foundStickerSets) { - AndroidUtilities.runOnUIThread(() -> { - if (req.q.equals(searchQuery)) { - clear(); - stickersSearchField.progressDrawable.stopAnimation(); - reqId = 0; - if (stickersGridView.getAdapter() != stickersSearchGridAdapter) { - stickersGridView.setAdapter(stickersSearchGridAdapter); - } - TLRPC.TL_messages_foundStickerSets res = (TLRPC.TL_messages_foundStickerSets) response; - serverPacks.addAll(res.sets); - notifyDataSetChanged(); - } - }); + reqId = 0; + TLRPC.TL_messages_foundStickerSets res = (TLRPC.TL_messages_foundStickerSets) response; + serverPacks.addAll(res.sets); } - }); - if (Emoji.isValidEmoji(searchQuery)) { + finished.run(); + })); + } + + private void searchStickers(Runnable finished) { + if (Emoji.fullyConsistsOfEmojis(searchQuery)) { final TLRPC.TL_messages_getStickers req2 = new TLRPC.TL_messages_getStickers(); - req2.emoticon = searchQuery; + req2.emoticon = query; req2.hash = 0; reqId2 = ConnectionsManager.getInstance(currentAccount).sendRequest(req2, (response, error) -> AndroidUtilities.runOnUIThread(() -> { - if (req2.emoticon.equals(searchQuery)) { - reqId2 = 0; + if (emojiSearchId != lastId) { + return; + } + + reqId2 = 0; + if (req2.emoticon.equals(query)) { if (!(response instanceof TLRPC.TL_messages_stickers)) { + finished.run(); return; } TLRPC.TL_messages_stickers res = (TLRPC.TL_messages_stickers) response; @@ -8006,12 +8589,48 @@ public void run(ArrayList param, String alias if (oldCount == 0) { emojiArrays.add(emojiStickersArray); } - notifyDataSetChanged(); } } + finished.run(); })); + } else { + finished.run(); } - notifyDataSetChanged(); + } + + @Override + public void run() { + if (TextUtils.isEmpty(searchQuery)) { + if (stickersGridView.getAdapter() != stickersGridAdapter) { + stickersGridView.setAdapter(stickersGridAdapter); + } + notifyDataSetChanged(); + return; + } + lastId = ++emojiSearchId; + query = searchQuery; + + serverPacks.clear(); + localPacks.clear(); + localPacksByShortName.clear(); + localPacksByName.clear(); + emojiStickers.clear(); + emojiArrays.clear(); + + emojiStickersArray.clear(); + emojiStickersMap.clear(); + + stickersSearchField.showProgress(true); + + Utilities.raceCallbacks( + this::searchFinish, + + this::addFromAllStickers, + this::addFromSuggestions, + this::addLocalPacks, + this::searchStickerSets, + this::searchStickers + ); } }; @@ -8038,6 +8657,10 @@ public Object getItem(int i) { } public void search(String text) { + search(text, true); + } + + public void search(String text, boolean delay) { if (reqId != 0) { ConnectionsManager.getInstance(currentAccount).cancelRequest(reqId, true); reqId = 0; @@ -8055,8 +8678,10 @@ public void search(String text) { stickersGridView.setAdapter(stickersGridAdapter); } notifyDataSetChanged(); + stickersSearchField.showProgress(false); } else { searchQuery = text.toLowerCase(); + stickersSearchField.showProgress(true); } AndroidUtilities.cancelRunOnUIThread(searchRunnable); AndroidUtilities.runOnUIThread(searchRunnable, 300); @@ -8134,13 +8759,14 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { imageView.setScaleType(ImageView.ScaleType.CENTER); imageView.setImageResource(R.drawable.stickers_empty); imageView.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_emojiPanelEmptyText), PorterDuff.Mode.SRC_IN)); - frameLayout.addView(imageView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER, 0, 0, 0, 59)); + imageView.setTranslationY(-AndroidUtilities.dp(24)); + frameLayout.addView(imageView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER, 0, 42, 0, 28)); TextView textView = new TextView(context); textView.setText(LocaleController.getString("NoStickersFound", R.string.NoStickersFound)); textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 16); textView.setTextColor(getThemedColor(Theme.key_chat_emojiPanelEmptyText)); - frameLayout.addView(textView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER, 0, 0, 0, 9)); + frameLayout.addView(textView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER, 0, 42, 0, 9)); view = frameLayout; view.setLayoutParams(new RecyclerView.LayoutParams(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT)); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/FlickerLoadingView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/FlickerLoadingView.java index 011733813a..ab95d7b6ee 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/FlickerLoadingView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/FlickerLoadingView.java @@ -821,7 +821,7 @@ private int getCellHeight(int width) { return AndroidUtilities.dp(107); case REACTED_TYPE_WITH_EMOJI_HINT: case REACTED_TYPE: - return AndroidUtilities.dp(48); + return AndroidUtilities.dp(ReactedUsersListView.ITEM_HEIGHT_DP); case LIMIT_REACHED_GROUPS: return AndroidUtilities.dp(58); case LIMIT_REACHED_LINKS: diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/FloatingDebug/FloatingDebugView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/FloatingDebug/FloatingDebugView.java index c4b27b6e11..1bf4067bc5 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/FloatingDebug/FloatingDebugView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/FloatingDebug/FloatingDebugView.java @@ -489,13 +489,6 @@ private List getBuiltInDebugItems() { Toast.makeText(getContext(), LocaleController.getString(SharedConfig.debugWebView ? R.string.DebugMenuWebViewDebugEnabled : R.string.DebugMenuWebViewDebugDisabled), Toast.LENGTH_SHORT).show(); })); } - items.add(new FloatingDebugController.DebugItem(LocaleController.getString(SharedConfig.useLNavigation ? R.string.AltNavigationDisable : R.string.AltNavigationEnable), () -> { - SharedConfig.useLNavigation = !SharedConfig.useLNavigation; - SharedConfig.saveConfig(); - if (getContext() instanceof Activity) { - ((Activity) getContext()).recreate(); - } - })); items.add(new FloatingDebugController.DebugItem(Theme.isCurrentThemeDark() ? "Switch to day theme" : "Switch to dark theme", () -> { boolean toDark; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/Forum/ForumUtilities.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/Forum/ForumUtilities.java index 26f24d1c19..894383b31b 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/Forum/ForumUtilities.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/Forum/ForumUtilities.java @@ -228,7 +228,7 @@ public static CharSequence getTopicSpannedName(TLRPC.ForumTopic topic, Paint pai AnimatedEmojiSpan span; sb.setSpan(span = new AnimatedEmojiSpan(forumTopic.icon_emoji_id, .95f, paint == null ? null : paint.getFontMetricsInt()), 0, 1, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); span.top = true; - span.cacheType = AnimatedEmojiDrawable.CACHE_TYPE_EMOJI_STATUS; + span.cacheType = AnimatedEmojiDrawable.CACHE_TYPE_ALERT_PREVIEW_STATIC; } else { sb.append(" "); Drawable drawable = ForumUtilities.createTopicDrawable(forumTopic); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ForwardingPreviewView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ForwardingPreviewView.java index c94c0c9c4f..5bd4e387e9 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ForwardingPreviewView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ForwardingPreviewView.java @@ -166,7 +166,7 @@ public boolean drawChild(Canvas canvas, View child, long drawingTime) { cell.drawCheckBox(canvas); canvas.save(); canvas.translate(cell.getX(), cell.getY()); - cell.drawMessageText(canvas, cell.getMessageObject().textLayoutBlocks, true, 1f, false); + cell.drawMessageText(canvas, cell.getMessageObject().textLayoutBlocks, cell.getMessageObject().textXOffset, true, 1f, false); if (cell.getCurrentMessagesGroup() != null || cell.getTransitionParams().animateBackgroundBoundsInner) { cell.drawNamesLayout(canvas, 1f); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/FragmentContextView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/FragmentContextView.java index a34ca97bb5..a49d5f8e43 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/FragmentContextView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/FragmentContextView.java @@ -45,16 +45,22 @@ import android.widget.Button; import android.widget.FrameLayout; import android.widget.ImageView; +import android.widget.LinearLayout; import android.widget.TextView; import androidx.annotation.IntDef; import androidx.annotation.Keep; +import androidx.core.graphics.ColorUtils; +import androidx.core.math.MathUtils; + +//import com.google.android.gms.vision.Frame; import org.telegram.messenger.AccountInstance; import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ChatObject; import org.telegram.messenger.ContactsController; import org.telegram.messenger.DialogObject; +import org.telegram.messenger.LiteMode; import org.telegram.messenger.LocaleController; import org.telegram.messenger.LocationController; import org.telegram.messenger.MediaController; @@ -69,9 +75,8 @@ import org.telegram.messenger.voip.VoIPService; import org.telegram.tgnet.ConnectionsManager; import org.telegram.tgnet.TLRPC; -import org.telegram.ui.ActionBar.ActionBar; import org.telegram.ui.ActionBar.ActionBarMenuItem; -import org.telegram.ui.ActionBar.ActionBarMenuSubItem; +import org.telegram.ui.ActionBar.ActionBarMenuSlider; import org.telegram.ui.ActionBar.AlertDialog; import org.telegram.ui.ActionBar.BaseFragment; import org.telegram.ui.ActionBar.Theme; @@ -127,11 +132,14 @@ public class FragmentContextView extends FrameLayout implements NotificationCent private RLottieDrawable muteDrawable; private ImageView closeButton; private ActionBarMenuItem playbackSpeedButton; - private ActionBarMenuSubItem[] speedItems = new ActionBarMenuSubItem[4]; + private SpeedIconDrawable speedIcon; + private ActionBarMenuSlider.SpeedSlider speedSlider; + private ActionBarMenuItem.Item[] speedItems = new ActionBarMenuItem.Item[6]; private FrameLayout silentButton; private ImageView silentButtonImage; private FragmentContextView additionalContextView; private TextView joinButton; + private int joinButtonWidth; private CellFlickerDrawable joinButtonFlicker; private boolean isMuted; @@ -206,10 +214,9 @@ public void run() { private boolean checkPlayerAfterAnimation; private boolean checkImportAfterAnimation; - private final static int menu_speed_slow = 1; - private final static int menu_speed_normal = 2; - private final static int menu_speed_fast = 3; - private final static int menu_speed_veryfast = 4; + private final static float[] speeds = new float[] { + .5f, 1f, 1.2f, 1.5f, 1.7f, 2f + }; @Override public void onAudioSettingsChanged() { @@ -251,10 +258,6 @@ public FragmentContextView(Context context, BaseFragment parentFragment, View pa if (parentFragment instanceof ChatActivityInterface) { chatActivity = (ChatActivityInterface) parentFragment; } - SizeNotifierFrameLayout sizeNotifierFrameLayout = null; - if (fragment.getFragmentView() instanceof SizeNotifierFrameLayout) { - sizeNotifierFrameLayout = (SizeNotifierFrameLayout) fragment.getFragmentView(); - } applyingView = paddingView; visible = true; isLocation = location; @@ -263,6 +266,26 @@ public FragmentContextView(Context context, BaseFragment parentFragment, View pa } setTag(1); + } + + public void setSupportsCalls(boolean value) { + supportsCalls = value; + } + + public void setDelegate(FragmentContextViewDelegate fragmentContextViewDelegate) { + delegate = fragmentContextViewDelegate; + } + + private void checkCreateView() { + if (frameLayout != null) { + return; + } + + final Context context = getContext(); + SizeNotifierFrameLayout sizeNotifierFrameLayout = null; + if (fragment.getFragmentView() instanceof SizeNotifierFrameLayout) { + sizeNotifierFrameLayout = (SizeNotifierFrameLayout) fragment.getFragmentView(); + } frameLayout = new BlurredFrameLayout(context, sizeNotifierFrameLayout) { @Override @@ -413,6 +436,33 @@ protected void onSizeChanged(int w, int h, int oldw, int oldh) { joinButtonFlicker.setParentWidth(getWidth()); } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + updateJoinButtonWidth(getMeasuredWidth()); + } + + @Override + public void setVisibility(int visibility) { + super.setVisibility(visibility); + if (visibility != View.VISIBLE) { + updateJoinButtonWidth(0); + joinButtonWidth = 0; + } + } + + private void updateJoinButtonWidth(int width) { + if (joinButtonWidth != width) { + titleTextView.setPadding( + titleTextView.getPaddingLeft(), + titleTextView.getPaddingTop(), + titleTextView.getPaddingRight() - joinButtonWidth + width, + titleTextView.getPaddingBottom() + ); + joinButtonWidth = width; + } + } }; joinButton.setText(LocaleController.getString("VoipChatJoin", R.string.VoipChatJoin)); joinButton.setTextColor(getThemedColor(Theme.key_featuredStickers_buttonText)); @@ -439,54 +489,8 @@ protected void onSizeChanged(int w, int h, int oldw, int oldh) { silentButton.setVisibility(View.GONE); addView(silentButton, LayoutHelper.createFrame(36, 36, Gravity.RIGHT | Gravity.TOP, 0, 0, 36, 0)); - if (!location) { - playbackSpeedButton = new ActionBarMenuItem(context, null, 0, getThemedColor(Theme.key_dialogTextBlack), resourcesProvider); - playbackSpeedButton.setLongClickEnabled(false); - playbackSpeedButton.setVisibility(GONE); - playbackSpeedButton.setTag(null); - playbackSpeedButton.setShowSubmenuByMove(false); - playbackSpeedButton.setContentDescription(LocaleController.getString("AccDescrPlayerSpeed", R.string.AccDescrPlayerSpeed)); - playbackSpeedButton.setDelegate(id -> { - float oldSpeed = MediaController.getInstance().getPlaybackSpeed(isMusic); - if (id == menu_speed_slow) { - MediaController.getInstance().setPlaybackSpeed(isMusic, 0.5f); - } else if (id == menu_speed_normal) { - MediaController.getInstance().setPlaybackSpeed(isMusic, 1.0f); - } else if (id == menu_speed_fast) { - MediaController.getInstance().setPlaybackSpeed(isMusic, 1.5f); - } else { - MediaController.getInstance().setPlaybackSpeed(isMusic, 1.8f); - } - float newSpeed = MediaController.getInstance().getPlaybackSpeed(isMusic); - if (oldSpeed != newSpeed) { - playbackSpeedChanged(newSpeed); - } - updatePlaybackButton(); - }); - speedItems[0] = playbackSpeedButton.addSubItem(menu_speed_slow, R.drawable.msg_speed_0_5, LocaleController.getString("SpeedSlow", R.string.SpeedSlow)); - speedItems[1] = playbackSpeedButton.addSubItem(menu_speed_normal, R.drawable.msg_speed_1, LocaleController.getString("SpeedNormal", R.string.SpeedNormal)); - speedItems[2] = playbackSpeedButton.addSubItem(menu_speed_fast, R.drawable.msg_speed_1_5, LocaleController.getString("SpeedFast", R.string.SpeedFast)); - speedItems[3] = playbackSpeedButton.addSubItem(menu_speed_veryfast, R.drawable.msg_speed_2, LocaleController.getString("SpeedVeryFast", R.string.SpeedVeryFast)); - if (AndroidUtilities.density >= 3.0f) { - playbackSpeedButton.setPadding(0, 1, 0, 0); - } - playbackSpeedButton.setAdditionalXOffset(AndroidUtilities.dp(8)); - addView(playbackSpeedButton, LayoutHelper.createFrame(36, 36, Gravity.TOP | Gravity.RIGHT, 0, 0, 36, 0)); - playbackSpeedButton.setOnClickListener(v -> { - float currentPlaybackSpeed = MediaController.getInstance().getPlaybackSpeed(isMusic); - float newSpeed; - if (Math.abs(currentPlaybackSpeed - 1.0f) > 0.001f) { - MediaController.getInstance().setPlaybackSpeed(isMusic, newSpeed = 1.0f); - } else { - MediaController.getInstance().setPlaybackSpeed(isMusic, newSpeed = MediaController.getInstance().getFastPlaybackSpeed(isMusic)); - } - playbackSpeedChanged(newSpeed); - }); - playbackSpeedButton.setOnLongClickListener(view -> { - playbackSpeedButton.toggleSubMenu(); - return true; - }); - updatePlaybackButton(); + if (!isLocation) { + createPlaybackSpeedButton(); } avatars = new AvatarsImageView(context, false); @@ -668,7 +672,7 @@ public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) { builder.show(); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(getThemedColor(Theme.key_dialogTextRed2)); + button.setTextColor(getThemedColor(Theme.key_dialogTextRed)); } } else { MediaController.getInstance().cleanupPlayer(true, true); @@ -744,7 +748,7 @@ public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) { } VoIPHelper.startCall(fragment.getMessagesController().getChat(call.chatId), null, null, false, call.call != null && !call.call.rtmp_stream, fragment.getParentActivity(), fragment, fragment.getAccountInstance()); } else if (currentStyle == STYLE_IMPORTING_MESSAGES) { - SendMessagesHelper.ImportingHistory importingHistory = parentFragment.getSendMessagesHelper().getImportingHistory(((ChatActivity) parentFragment).getDialogId()); + SendMessagesHelper.ImportingHistory importingHistory = fragment.getSendMessagesHelper().getImportingHistory(((ChatActivity) fragment).getDialogId()); if (importingHistory == null) { return; } @@ -756,55 +760,168 @@ public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) { }); } - public void setSupportsCalls(boolean value) { - supportsCalls = value; + private boolean slidingSpeed; + + private void createPlaybackSpeedButton() { + if (playbackSpeedButton != null) { + return; + } + playbackSpeedButton = new ActionBarMenuItem(getContext(), null, 0, getThemedColor(Theme.key_dialogTextBlack), resourcesProvider); + playbackSpeedButton.setAdditionalYOffset(AndroidUtilities.dp(24 + 6)); + playbackSpeedButton.setLongClickEnabled(false); + playbackSpeedButton.setVisibility(GONE); + playbackSpeedButton.setTag(null); + playbackSpeedButton.setShowSubmenuByMove(false); + playbackSpeedButton.setContentDescription(LocaleController.getString("AccDescrPlayerSpeed", R.string.AccDescrPlayerSpeed)); + playbackSpeedButton.setDelegate(id -> { + if (id < 0 || id >= speeds.length) { + return; + } + float oldSpeed = MediaController.getInstance().getPlaybackSpeed(isMusic), newSpeed = speeds[id]; + MediaController.getInstance().setPlaybackSpeed(isMusic, newSpeed); + if (oldSpeed != newSpeed) { + playbackSpeedChanged(false, oldSpeed, newSpeed); + } + }); + playbackSpeedButton.setIcon(speedIcon = new SpeedIconDrawable(true)); + final float[] toggleSpeeds = new float[] { 1.0F, 1.5F, 2F }; + speedSlider = new ActionBarMenuSlider.SpeedSlider(getContext(), resourcesProvider); + speedSlider.setRoundRadiusDp(6); + speedSlider.setDrawShadow(true); + speedSlider.setOnValueChange((value, isFinal) -> { + slidingSpeed = !isFinal; + MediaController.getInstance().setPlaybackSpeed(isMusic, speedSlider.getSpeed(value)); + }); + speedItems[0] = playbackSpeedButton.lazilyAddSubItem(0, R.drawable.msg_speed_slow, LocaleController.getString("SpeedSlow", R.string.SpeedSlow)); + speedItems[1] = playbackSpeedButton.lazilyAddSubItem(1, R.drawable.msg_speed_normal, LocaleController.getString("SpeedNormal", R.string.SpeedNormal)); + speedItems[2] = playbackSpeedButton.lazilyAddSubItem(2, R.drawable.msg_speed_medium, LocaleController.getString("SpeedMedium", R.string.SpeedMedium)); + speedItems[3] = playbackSpeedButton.lazilyAddSubItem(3, R.drawable.msg_speed_fast, LocaleController.getString("SpeedFast", R.string.SpeedFast)); + speedItems[4] = playbackSpeedButton.lazilyAddSubItem(4, R.drawable.msg_speed_veryfast, LocaleController.getString("SpeedVeryFast", R.string.SpeedVeryFast)); + speedItems[5] = playbackSpeedButton.lazilyAddSubItem(5, R.drawable.msg_speed_superfast, LocaleController.getString("SpeedSuperFast", R.string.SpeedSuperFast)); + if (AndroidUtilities.density >= 3.0f) { + playbackSpeedButton.setPadding(0, 1, 0, 0); + } + playbackSpeedButton.setAdditionalXOffset(AndroidUtilities.dp(8)); + addView(playbackSpeedButton, LayoutHelper.createFrame(36, 36, Gravity.TOP | Gravity.RIGHT, 0, 0, 36, 0)); + playbackSpeedButton.setOnClickListener(v -> { + float currentPlaybackSpeed = MediaController.getInstance().getPlaybackSpeed(isMusic); + float newSpeed; + int index = -1; + for (int i = 0; i < toggleSpeeds.length; ++i) { + if (currentPlaybackSpeed - 0.1F <= toggleSpeeds[i]) { + index = i; + break; + } + } + index++; + if (index >= toggleSpeeds.length) { + index = 0; + } + newSpeed = toggleSpeeds[index]; + MediaController.getInstance().setPlaybackSpeed(isMusic, newSpeed); + playbackSpeedChanged(true, currentPlaybackSpeed, newSpeed); + + checkSpeedHint(); + }); + playbackSpeedButton.setOnLongClickListener(view -> { + final float speed = MediaController.getInstance().getPlaybackSpeed(isMusic); + speedSlider.setSpeed(speed, false); + speedSlider.setBackgroundColor(Theme.getColor(Theme.key_actionBarDefaultSubmenuBackground, resourcesProvider)); + speedSlider.invalidateBlur(fragment instanceof ChatActivity); + playbackSpeedButton.redrawPopup(Theme.getColor(Theme.key_actionBarDefaultSubmenuBackground)); + playbackSpeedButton.updateColor(); + updatePlaybackButton(false); + playbackSpeedButton.setDimMenu(.3f); + playbackSpeedButton.toggleSubMenu(speedSlider, null); + playbackSpeedButton.setOnMenuDismiss(byButton -> { + if (!byButton) { + playbackSpeedChanged(false, speed, MediaController.getInstance().getPlaybackSpeed(isMusic)); + } + }); + MessagesController.getGlobalNotificationsSettings().edit().putInt("speedhint", -15).apply(); + return true; + }); + updatePlaybackButton(false); } - public void setDelegate(FragmentContextViewDelegate fragmentContextViewDelegate) { - delegate = fragmentContextViewDelegate; + private HintView speedHintView; + private long lastPlaybackClick; + + private void checkSpeedHint() { + final long now = System.currentTimeMillis(); + if (now - lastPlaybackClick > 300) { + int hintValue = MessagesController.getGlobalNotificationsSettings().getInt("speedhint", 0); + hintValue++; + if (hintValue > 2) { + hintValue = -10; + } + MessagesController.getGlobalNotificationsSettings().edit().putInt("speedhint", hintValue).apply(); + if (hintValue >= 0) { + showSpeedHint(); + } + } + lastPlaybackClick = now; } - private void updatePlaybackButton() { - if (playbackSpeedButton == null) { + private void showSpeedHint() { + if (fragment != null && getParent() instanceof ViewGroup) { + speedHintView = new HintView(getContext(), 6, true) { + @Override + public void setVisibility(int visibility) { + super.setVisibility(visibility); + if (visibility != View.VISIBLE) { + try { + ((ViewGroup) getParent()).removeView(this); + } catch (Exception e) {} + } + } + }; + speedHintView.setExtraTranslationY(AndroidUtilities.dp(-12)); + speedHintView.setText(LocaleController.getString("SpeedHint")); + MarginLayoutParams params = new MarginLayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT); + params.rightMargin = AndroidUtilities.dp(3); + ((ViewGroup) getParent()).addView(speedHintView, params); + speedHintView.showForView(playbackSpeedButton, true); + } + } + + public void onPanTranslationUpdate(float y) { + if (speedHintView != null) { + speedHintView.setExtraTranslationY(AndroidUtilities.dp(64 + 8) + y); + } + } + + private void updatePlaybackButton(boolean animated) { + if (speedIcon == null) { return; } float currentPlaybackSpeed = MediaController.getInstance().getPlaybackSpeed(isMusic); - float speed = MediaController.getInstance().getFastPlaybackSpeed(isMusic); - if (Math.abs(speed - 1.8f) < 0.001f) { - playbackSpeedButton.setIcon(R.drawable.voice_mini_2_0); - } else if (Math.abs(speed - 1.5f) < 0.001f) { - playbackSpeedButton.setIcon(R.drawable.voice_mini_1_5); - } else { - playbackSpeedButton.setIcon(R.drawable.voice_mini_0_5); - } + speedIcon.setValue(currentPlaybackSpeed, animated); updateColors(); + + boolean isFinal = !slidingSpeed; + slidingSpeed = false; + for (int a = 0; a < speedItems.length; a++) { - if (a == 0 && Math.abs(currentPlaybackSpeed - 0.5f) < 0.001f || - a == 1 && Math.abs(currentPlaybackSpeed - 1.0f) < 0.001f || - a == 2 && Math.abs(currentPlaybackSpeed - 1.5f) < 0.001f || - a == 3 && Math.abs(currentPlaybackSpeed - 1.8f) < 0.001f) { - speedItems[a].setColors(getThemedColor(Theme.key_inappPlayerPlayPause), getThemedColor(Theme.key_inappPlayerPlayPause)); + if (isFinal && Math.abs(currentPlaybackSpeed - speeds[a]) < 0.05f) { + speedItems[a].setColors(getThemedColor(Theme.key_featuredStickers_addButtonPressed), getThemedColor(Theme.key_featuredStickers_addButtonPressed)); } else { - speedItems[a].setColors(getThemedColor(Theme.key_actionBarDefaultSubmenuItem), getThemedColor(Theme.key_actionBarDefaultSubmenuItemIcon)); + speedItems[a].setColors(getThemedColor(Theme.key_actionBarDefaultSubmenuItem), getThemedColor(Theme.key_actionBarDefaultSubmenuItem)); } } + + speedSlider.setSpeed(currentPlaybackSpeed, animated); } public void updateColors() { - if (playbackSpeedButton != null) { - String key; - float currentPlaybackSpeed = MediaController.getInstance().getPlaybackSpeed(isMusic); - if (Math.abs(currentPlaybackSpeed - 1.0f) > 0.001f) { - key = Theme.key_inappPlayerPlayPause; - } else { - key = Theme.key_inappPlayerClose; - } - playbackSpeedButton.setIconColor(getThemedColor(key)); - if (Build.VERSION.SDK_INT >= 21) { - playbackSpeedButton.setBackgroundDrawable(Theme.createSelectorDrawable(getThemedColor(key) & 0x19ffffff, 1, AndroidUtilities.dp(14))); - } + float currentPlaybackSpeed = MediaController.getInstance().getPlaybackSpeed(isMusic); + final int color = getThemedColor(!equals(currentPlaybackSpeed, 1.0f) ? Theme.key_featuredStickers_addButtonPressed : Theme.key_inappPlayerClose); + if (speedIcon != null) { + speedIcon.setColor(color); + } + if (playbackSpeedButton != null && Build.VERSION.SDK_INT >= 21) { + playbackSpeedButton.setBackground(Theme.createSelectorDrawable(color & 0x19ffffff, 1, AndroidUtilities.dp(14))); } - } public void setAdditionalContextView(FragmentContextView contextView) { @@ -854,6 +971,9 @@ private void checkVisibility() { } } } + if (show) { + checkCreateView(); + } setVisibility(show ? VISIBLE : GONE); } @@ -872,8 +992,41 @@ public void setTopPadding(float value) { } } - protected void playbackSpeedChanged(float value) { + private boolean equals(float a, float b) { + return Math.abs(a - b) < 0.05f; + } + + private void playbackSpeedChanged(boolean byTap, float oldValue, float newValue) { + if (equals(oldValue, newValue)) { + return; + } + final String text; + final int resId; + if (Math.abs(newValue - 1f) < 0.05f) { + if (oldValue < newValue) { + return; + } + text = LocaleController.getString("AudioSpeedNormal", R.string.AudioSpeedNormal); + if (Math.abs(oldValue - 2f) < 0.05f) { + resId = R.raw.speed_2to1; + } else if (newValue < oldValue) { + resId = R.raw.speed_slow; + } else { + resId = R.raw.speed_fast; + } + } else if (byTap && equals(newValue, 1.5f) && equals(oldValue, 1f)) { + text = LocaleController.formatString("AudioSpeedCustom", R.string.AudioSpeedCustom, SpeedIconDrawable.formatNumber(newValue)); + resId = R.raw.speed_1to15; + } else if (byTap && equals(newValue, 2f) && equals(oldValue, 1.5f)) { + text = LocaleController.getString("AudioSpeedFast", R.string.AudioSpeedFast); + resId = R.raw.speed_15to2; + } else { + text = LocaleController.formatString("AudioSpeedCustom", R.string.AudioSpeedCustom, SpeedIconDrawable.formatNumber(newValue)); + resId = newValue < 1 ? R.raw.speed_slow : R.raw.speed_fast; + } + Bulletin bulletin = BulletinFactory.of(fragment).createSimpleBulletin(resId, text); + bulletin.show(); } private void updateSilent() { @@ -891,6 +1044,7 @@ private void updateStyle(@Style int style) { if (currentStyle == style) { return; } + checkCreateView(); if (currentStyle == STYLE_ACTIVE_GROUP_CALL || currentStyle == STYLE_CONNECTING_GROUP_CALL) { Theme.getFragmentContextViewWavesDrawable().removeParent(this); if (VoIPService.getSharedInstance() != null) { @@ -971,6 +1125,7 @@ private void updateStyle(@Style int style) { if (style == STYLE_AUDIO_PLAYER) { playButton.setLayoutParams(LayoutHelper.createFrame(36, 36, Gravity.TOP | Gravity.LEFT, 0, 0, 0, 0)); titleTextView.setLayoutParams(LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 36, Gravity.LEFT | Gravity.TOP, 35, 0, 36, 0)); + createPlaybackSpeedButton(); if (playbackSpeedButton != null) { playbackSpeedButton.setVisibility(VISIBLE); playbackSpeedButton.setTag(1); @@ -999,7 +1154,7 @@ private void updateStyle(@Style int style) { textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 15); } titleTextView.setTag(Theme.key_inappPlayerPerformer); - titleTextView.setPadding(0, 0, 0, 0); + titleTextView.setPadding(0, 0, joinButtonWidth, 0); importingImageView.setVisibility(GONE); importingImageView.stopAnimation(); @@ -1070,7 +1225,7 @@ private void updateStyle(@Style int style) { joinButton.setVisibility(GONE); titleTextView.setLayoutParams(LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER, 0, 0, 0, 2)); - titleTextView.setPadding(AndroidUtilities.dp(112), 0, AndroidUtilities.dp(112), 0); + titleTextView.setPadding(AndroidUtilities.dp(112), 0, AndroidUtilities.dp(112) + joinButtonWidth, 0); if (playbackSpeedButton != null) { playbackSpeedButton.setVisibility(GONE); playbackSpeedButton.setTag(null); @@ -1159,7 +1314,7 @@ protected void onAttachedToWindow() { } else { checkCall(true); checkPlayer(true); - updatePlaybackButton(); + updatePlaybackButton(false); } } @@ -1169,7 +1324,7 @@ protected void onAttachedToWindow() { VoIPService.getSharedInstance().registerStateListener(this); } boolean newMuted = VoIPService.getSharedInstance() != null && VoIPService.getSharedInstance().isMicMute(); - if (isMuted != newMuted) { + if (isMuted != newMuted && muteButton != null) { isMuted = newMuted; muteDrawable.setCustomEndFrame(isMuted ? 15 : 29); muteDrawable.setCurrentFrame(muteDrawable.getCustomEndFrame() - 1, false, true); @@ -1223,7 +1378,7 @@ public void didReceivedNotification(int id, int account, Object... args) { int currentCallState = sharedInstance.getCallState(); if (currentCallState == VoIPService.STATE_WAIT_INIT || currentCallState == VoIPService.STATE_WAIT_INIT_ACK || currentCallState == VoIPService.STATE_CREATING || currentCallState == VoIPService.STATE_RECONNECTING) { - } else { + } else if (muteButton != null) { TLRPC.TL_groupCallParticipant participant = sharedInstance.groupCall.participants.get(sharedInstance.getSelfId()); if (participant != null && !participant.can_self_unmute && participant.muted && !ChatObject.canManageCalls(sharedInstance.getChat())) { sharedInstance.setMicMute(true, false, false); @@ -1235,9 +1390,10 @@ public void didReceivedNotification(int id, int account, Object... args) { } } } else if (id == NotificationCenter.groupCallTypingsUpdated) { + checkCreateView(); if (visible && currentStyle == STYLE_INACTIVE_GROUP_CALL) { ChatObject.Call call = chatActivity.getGroupCall(); - if (call != null) { + if (call != null && subtitleTextView != null) { if (call.isScheduled()) { subtitleTextView.setText(LocaleController.formatStartsTime(call.call.schedule_date, 4), false); } else if (call.call.participants_count == 0) { @@ -1254,7 +1410,7 @@ public void didReceivedNotification(int id, int account, Object... args) { } checkImport(false); } else if (id == NotificationCenter.messagePlayingSpeedChanged) { - updatePlaybackButton(); + updatePlaybackButton(true); } else if (id == NotificationCenter.webRtcMicAmplitudeEvent) { if (VoIPService.getSharedInstance() == null || VoIPService.getSharedInstance().isMicMute()) { micAmplitude = 0; @@ -1265,6 +1421,7 @@ public void didReceivedNotification(int id, int account, Object... args) { Theme.getFragmentContextViewWavesDrawable().setAmplitude(Math.max(speakerAmplitude, micAmplitude)); } } else if (id == NotificationCenter.webRtcSpeakerAmplitudeEvent) { + checkCreateView(); float a = (float) args[0] * 15f / 80f; speakerAmplitude = Math.max(0, Math.min(a, 1)); if (VoIPService.getSharedInstance() == null || VoIPService.getSharedInstance().isMicMute()) { @@ -1332,6 +1489,7 @@ public void onAnimationEnd(Animator animation) { } } } else { + checkCreateView(); updateStyle(STYLE_LIVE_LOCATION); playButton.setImageDrawable(new ShareLocationDrawable(getContext(), 1)); if (create && topPadding == 0) { @@ -1413,6 +1571,7 @@ private void checkLocationString() { if (chatActivity == null || titleTextView == null) { return; } + checkCreateView(); long dialogId = chatActivity.getDialogId(); int currentAccount = fragment.getCurrentAccount(); ArrayList messages = LocationController.getInstance(currentAccount).locationsCache.get(dialogId); @@ -1563,6 +1722,7 @@ public void onAnimationEnd(Animator animation) { setVisibility(View.GONE); } } else { + checkCreateView(); if (currentStyle != STYLE_AUDIO_PLAYER && animatorSet != null && !create) { checkPlayerAfterAnimation = true; return; @@ -1638,7 +1798,7 @@ public void onAnimationEnd(Animator animation) { playbackSpeedButton.setAlpha(1.0f); playbackSpeedButton.setEnabled(true); } - titleTextView.setPadding(0, 0, AndroidUtilities.dp(44), 0); + titleTextView.setPadding(0, 0, AndroidUtilities.dp(44) + joinButtonWidth, 0); stringBuilder = new SpannableStringBuilder(String.format("%s %s", messageObject.getMusicAuthor(), messageObject.getMusicTitle())); for (int i = 0; i < 2; i++) { @@ -1649,22 +1809,22 @@ public void onAnimationEnd(Animator animation) { textView.setEllipsize(TextUtils.TruncateAt.MIDDLE); } - updatePlaybackButton(); + updatePlaybackButton(false); } else { isMusic = true; if (playbackSpeedButton != null) { if (messageObject.getDuration() >= 10 * 60) { playbackSpeedButton.setAlpha(1.0f); playbackSpeedButton.setEnabled(true); - titleTextView.setPadding(0, 0, AndroidUtilities.dp(44), 0); - updatePlaybackButton(); + titleTextView.setPadding(0, 0, AndroidUtilities.dp(44) + joinButtonWidth, 0); + updatePlaybackButton(false); } else { playbackSpeedButton.setAlpha(0.0f); playbackSpeedButton.setEnabled(false); - titleTextView.setPadding(0, 0, 0, 0); + titleTextView.setPadding(0, 0, joinButtonWidth, 0); } } else { - titleTextView.setPadding(0, 0, 0, 0); + titleTextView.setPadding(0, 0, joinButtonWidth, 0); } stringBuilder = new SpannableStringBuilder(String.format("%s - %s", messageObject.getMusicAuthor(), messageObject.getMusicTitle())); for (int i = 0; i < 2; i++) { @@ -1686,6 +1846,7 @@ public void checkImport(boolean create) { if (chatActivity == null || visible && (currentStyle == STYLE_CONNECTING_GROUP_CALL || currentStyle == STYLE_ACTIVE_GROUP_CALL)) { return; } + checkCreateView(); SendMessagesHelper.ImportingHistory importingHistory = fragment.getSendMessagesHelper().getImportingHistory(chatActivity.getDialogId()); View fragmentView = fragment.getFragmentView(); if (!create && fragmentView != null) { @@ -1896,6 +2057,7 @@ public void onAnimationEnd(Animator animation) { BulletinFactory.of(fragment).createSimpleBulletin(R.raw.linkbroken, LocaleController.getString("InviteExpired", R.string.InviteExpired)).show(); } } else { + checkCreateView(); int newStyle; if (groupActive) { newStyle = STYLE_INACTIVE_GROUP_CALL; @@ -1969,7 +2131,9 @@ public void onAnimationEnd(Animator animation) { } else { timeLayout = null; joinButton.setVisibility(VISIBLE); - if (call.call.rtmp_stream) { + if (!TextUtils.isEmpty(call.call.title)) { + titleTextView.setText(call.call.title, false); + } else if (call.call.rtmp_stream) { titleTextView.setText(LocaleController.getString(R.string.VoipChannelVoiceChat), false); } else if (ChatObject.isChannelOrGiga(chat)) { titleTextView.setText(LocaleController.getString("VoipChannelVoiceChat", R.string.VoipChannelVoiceChat), false); @@ -2045,7 +2209,7 @@ public void onAnimationEnd(Animator animation) { } private void startJoinFlickerAnimation() { - if (joinButtonFlicker.getProgress() > 1) { + if (joinButtonFlicker != null && joinButtonFlicker.getProgress() > 1) { AndroidUtilities.runOnUIThread(() -> { joinButtonFlicker.setProgress(0); joinButton.invalidate(); @@ -2054,6 +2218,7 @@ private void startJoinFlickerAnimation() { } private void updateAvatars(boolean animated) { + checkCreateView(); if (!animated) { if (avatars.avatarsDrawable.transitionProgressAnimator != null) { avatars.avatarsDrawable.transitionProgressAnimator.cancel(); @@ -2144,12 +2309,15 @@ public void setCollapseTransition(boolean show, float extraHeight, float progres @Override protected void dispatchDraw(Canvas canvas) { + if (frameLayout == null) { + return; + } if (drawOverlay && getVisibility() != View.VISIBLE) { return; } boolean clipped = false; if (currentStyle == STYLE_ACTIVE_GROUP_CALL || currentStyle == STYLE_CONNECTING_GROUP_CALL) { - boolean mutedByAdmin = GroupCallActivity.groupCallInstance == null && Theme.getFragmentContextViewWavesDrawable().getState() == FragmentContextViewWavesDrawable.MUTE_BUTTON_STATE_MUTED_BY_ADMIN; +// boolean mutedByAdmin = GroupCallActivity.groupCallInstance == null && Theme.getFragmentContextViewWavesDrawable().getState() == FragmentContextViewWavesDrawable.MUTE_BUTTON_STATE_MUTED_BY_ADMIN; Theme.getFragmentContextViewWavesDrawable().updateState(wasDraw); float progress = topPadding / AndroidUtilities.dp((getStyleHeight())); @@ -2228,6 +2396,7 @@ public void onStateChanged(int state) { } private void updateCallTitle() { + checkCreateView(); VoIPService service = VoIPService.getSharedInstance(); if (service != null && (currentStyle == STYLE_CONNECTING_GROUP_CALL || currentStyle == STYLE_ACTIVE_GROUP_CALL)) { int currentCallState = service.getCallState(); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/FragmentContextViewWavesDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/FragmentContextViewWavesDrawable.java index ab2db19caa..42c355f185 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/FragmentContextViewWavesDrawable.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/FragmentContextViewWavesDrawable.java @@ -10,8 +10,11 @@ import android.os.SystemClock; import android.view.View; +import androidx.core.graphics.ColorUtils; + import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ChatObject; +import org.telegram.messenger.LiteMode; import org.telegram.messenger.Utilities; import org.telegram.messenger.voip.VoIPService; import org.telegram.tgnet.TLRPC; @@ -69,7 +72,7 @@ public void draw(float left, float top, float right, float bottom, Canvas canvas return; } long dt = 0; - boolean rippleTransition = currentState != null && previousState != null && ((currentState.currentState == MUTE_BUTTON_STATE_MUTE && previousState.currentState == MUTE_BUTTON_STATE_UNMUTE) || (previousState.currentState == MUTE_BUTTON_STATE_MUTE && currentState.currentState == MUTE_BUTTON_STATE_UNMUTE)); + boolean rippleTransition = currentState != null && previousState != null && ((currentState.currentState == MUTE_BUTTON_STATE_MUTE && previousState.currentState == MUTE_BUTTON_STATE_UNMUTE) || (previousState.currentState == MUTE_BUTTON_STATE_MUTE && currentState.currentState == MUTE_BUTTON_STATE_UNMUTE)); if (update) { long newTime = SystemClock.elapsedRealtime(); @@ -155,37 +158,13 @@ public void draw(float left, float top, float right, float bottom, Canvas canvas lineBlobDrawable2.update(amplitude, 0.7f); } -// if (rippleTransition) { -// paint.setAlpha(76); -// canvas.save(); -// float cx = right - AndroidUtilities.dp(18); -// float r = (right - left) * 1.1f * progressToState; -// float offset = (float) Math.sqrt(r * r - (bottom - top) * (bottom - top)); -// if (i == 0) { -// if (cx - offset > left) { -// canvas.clipRect(left, top - AndroidUtilities.dp(20), cx - offset, bottom); -// float top1 = AndroidUtilities.dp(6) * amplitude2; -// float top2 = AndroidUtilities.dp(6) * amplitude2; -// lineBlobDrawable1.draw(left, top - top1, right, bottom, canvas, paint, top, progress); -// lineBlobDrawable2.draw(left, top - top2, right, bottom, canvas, paint, top, progress); -// } -// } else { -// if (cx - offset > left) { -// canvas.clipRect(cx - offset, top - AndroidUtilities.dp(20), right, bottom); -// } -// float top1 = AndroidUtilities.dp(6) * amplitude2; -// float top2 = AndroidUtilities.dp(6) * amplitude2; -// lineBlobDrawable1.draw(left, top - top1, right, bottom, canvas, paint, top, progress); -// lineBlobDrawable2.draw(left, top - top2, right, bottom, canvas, paint, top, progress); -// } -// canvas.restore(); -// } else { + if (LiteMode.isEnabled(LiteMode.FLAG_CALLS_ANIMATIONS)) { paint.setAlpha((int) (76 * alpha)); float top1 = AndroidUtilities.dp(6) * amplitude2; float top2 = AndroidUtilities.dp(6) * amplitude2; lineBlobDrawable1.draw(left, top - top1, right, bottom, canvas, paint, top, progress); lineBlobDrawable2.draw(left, top - top2, right, bottom, canvas, paint, top, progress); - //} + } if (i == 1 && rippleTransition) { paint.setAlpha(255); @@ -209,37 +188,6 @@ public void draw(float left, float top, float right, float bottom, Canvas canvas lineBlobDrawable.draw(left, top, right, bottom, canvas, paint, top, progress); } } - -// if (Build.VERSION.SDK_INT > 21 && parentView != null && (parentView.isPressed() || pressedRemoveProgress != 0)) { -// if (parentView.isPressed()) { -// pressedRemoveProgress = 1f; -// } -// if (pressedProgress != 1f) { -// pressedProgress += 16f / 150f; -// if (pressedProgress > 1f) { -// pressedProgress = 1f; -// } -// } else if (!parentView.isPressed() && pressedRemoveProgress != 0) { -// pressedRemoveProgress -= 16f / 150f; -// if (pressedRemoveProgress < 0) { -// pressedRemoveProgress = 0; -// pressedProgress = 0; -// } -// } -// rect.set(left, top - AndroidUtilities.dp(20), right, bottom); -// canvas.saveLayerAlpha(rect, 255, Canvas.ALL_SAVE_FLAG); -// Theme.getColor(Theme.key_listSelector); -// selectedPaint.setColor(ColorUtils.setAlphaComponent(Color.BLACK, (int) (16 * pressedRemoveProgress))); -// -// float hotspotX = left + parentView.hotspotX; -// float rad = Math.max(right - hotspotX, hotspotX - left) * 0.8f; -// canvas.drawCircle(hotspotX, top + parentView.hotspotY, rad * 1.3f * CubicBezierInterpolator.DEFAULT.getInterpolation(pressedProgress), selectedPaint); -// -// lineBlobDrawable.path.toggleInverseFillType(); -// canvas.drawPath(lineBlobDrawable.path, xRefP); -// lineBlobDrawable.path.toggleInverseFillType(); -// canvas.restore(); -// } } float pressedProgress; @@ -333,6 +281,7 @@ public static class WeavingState { private float time; public Shader shader; + public int averageColor; private final Matrix matrix = new Matrix(); private final int currentState; @@ -428,7 +377,16 @@ public void checkColor() { public void setToPaint(Paint paint) { if (currentState == MUTE_BUTTON_STATE_UNMUTE || currentState == MUTE_BUTTON_STATE_MUTE || currentState == MUTE_BUTTON_STATE_MUTED_BY_ADMIN) { - paint.setShader(shader); + if (!LiteMode.isEnabled(LiteMode.FLAG_CALLS_ANIMATIONS)) { + paint.setShader(null); + if (currentState == MUTE_BUTTON_STATE_MUTED_BY_ADMIN) { + paint.setColor(ColorUtils.blendARGB(ColorUtils.blendARGB(color1, color2, 0.5f), color3, 0.5f)); + } else { + paint.setColor(ColorUtils.blendARGB(color1, color2, 0.5f)); + } + } else { + paint.setShader(shader); + } } else { paint.setShader(null); paint.setColor(Theme.getColor(Theme.key_voipgroup_topPanelGray)); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/GestureDetector2.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/GestureDetector2.java index 13690d4bab..52bba9adb7 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/GestureDetector2.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/GestureDetector2.java @@ -285,7 +285,7 @@ public boolean onTouchEvent(MotionEvent ev) { if (mIsLongpressEnabled) { mHandler.removeMessages(LONG_PRESS); - mHandler.sendMessageAtTime(mHandler.obtainMessage(LONG_PRESS, 0, 0), mCurrentDownEvent.getDownTime() + ViewConfiguration.getLongPressTimeout()); + mHandler.sendMessageDelayed(mHandler.obtainMessage(LONG_PRESS, 0, 0), ViewConfiguration.getLongPressTimeout()); } mHandler.sendEmptyMessageAtTime(SHOW_PRESS, mCurrentDownEvent.getDownTime() + TAP_TIMEOUT); handled |= mListener.onDown(ev); @@ -316,7 +316,7 @@ public boolean onTouchEvent(MotionEvent ev) { if (distance > slopSquare) { mHandler.removeMessages(LONG_PRESS); final long longPressTimeout = ViewConfiguration.getLongPressTimeout(); - mHandler.sendMessageAtTime(mHandler.obtainMessage(LONG_PRESS, 0, 0), ev.getDownTime() + (long) (longPressTimeout * multiplier)); + mHandler.sendMessageDelayed(mHandler.obtainMessage(LONG_PRESS, 0, 0), (long) (longPressTimeout * multiplier)); } slopSquare *= multiplier * multiplier; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/GradientTools.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/GradientTools.java new file mode 100644 index 0000000000..e3ed8cbfdc --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/GradientTools.java @@ -0,0 +1,106 @@ +package org.telegram.ui.Components; + +import android.graphics.Bitmap; +import android.graphics.BitmapShader; +import android.graphics.LinearGradient; +import android.graphics.Matrix; +import android.graphics.Paint; +import android.graphics.RectF; +import android.graphics.Shader; + +import androidx.core.graphics.ColorUtils; + +import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.Utilities; + +public class GradientTools { + + Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG); + int color1; + int color2; + int color3; + int color4; + + private final static int INTERNAL_WIDTH = 60; + private final static int INTERNAL_HEIGHT = 80; + + RectF bounds = new RectF(); + Shader shader; + Matrix matrix = new Matrix(); + + Bitmap gradientBitmap = null; + + int[] colors = new int[4]; + + public void setColors(int color1, int color2) { + setColors(color1, color2, 0, 0); + } + + public void setColors(int color1, int color2, int color3) { + setColors(color1, color2, color3, 0); + } + + public void setColors(int color1, int color2, int color3, int color4) { + if (shader != null && this.color1 == color1 && this.color2 == color2 && this.color3 == color3 && this.color4 == color4) { + return; + } + colors[0] = this.color1 = color1; + colors[1] = this.color2 = color2; + colors[2] = this.color3 = color3; + colors[3] = this.color4 = color4; + if (color2 == 0) { + paint.setShader(shader = null); + paint.setColor(color1); + } else if (color3 == 0) { + paint.setShader(shader = new LinearGradient(0, 0, 0, INTERNAL_HEIGHT, new int[]{color1, color2}, null, Shader.TileMode.CLAMP)); + } else { + if (gradientBitmap == null) { + gradientBitmap = Bitmap.createBitmap(INTERNAL_WIDTH, INTERNAL_HEIGHT, Bitmap.Config.ARGB_8888); + } + Utilities.generateGradient(gradientBitmap, true, 0, 0, gradientBitmap.getWidth(), gradientBitmap.getHeight(), gradientBitmap.getRowBytes(), colors); + paint.setShader(shader = new BitmapShader(gradientBitmap, Shader.TileMode.CLAMP, Shader.TileMode.CLAMP)); + } + updateBounds(); + } + + public void setBounds(RectF bounds) { + if (this.bounds.top == bounds.top && this.bounds.bottom == bounds.bottom && this.bounds.left == bounds.left && this.bounds.right == bounds.right) { + return; + } + this.bounds.set(bounds); + updateBounds(); + } + + private void updateBounds() { + if (shader == null) { + return; + } + float sx = bounds.width() / (float) INTERNAL_WIDTH; + float sy = bounds.height() / (float) INTERNAL_HEIGHT; + + matrix.reset(); + matrix.postTranslate(bounds.left, bounds.top); + matrix.preScale(sx, sy); + + shader.setLocalMatrix(matrix); + } + + public void setBounds(float left, float top, float right, float bottom) { + AndroidUtilities.rectTmp.set(left, top, right, bottom); + setBounds(AndroidUtilities.rectTmp); + } + + public int getAverageColor() { + int color = color1; + if (color2 != 0) { + color = ColorUtils.blendARGB(color, color2, 0.5f); + } + if (color3 != 0) { + color = ColorUtils.blendARGB(color, color3, 0.5f); + } + if (color4 != 0) { + color = ColorUtils.blendARGB(color, color4, 0.5f); + } + return color; + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/GroupCreateSpan.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/GroupCreateSpan.java index 292d276788..49133e36b2 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/GroupCreateSpan.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/GroupCreateSpan.java @@ -275,7 +275,9 @@ protected void onDraw(Canvas canvas) { rect.set(0, 0, getMeasuredWidth(), AndroidUtilities.dp(32)); backPaint.setColor(Color.argb(colors[6] + (int) ((colors[7] - colors[6]) * progress), colors[0] + (int) ((colors[1] - colors[0]) * progress), colors[2] + (int) ((colors[3] - colors[2]) * progress), colors[4] + (int) ((colors[5] - colors[4]) * progress))); canvas.drawRoundRect(rect, AndroidUtilities.dp(16), AndroidUtilities.dp(16), backPaint); - imageReceiver.draw(canvas); + if (progress != 1f) { + imageReceiver.draw(canvas); + } if (progress != 0) { int color = avatarDrawable.getColor(); float alpha = Color.alpha(color) / 255.0f; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/GroupVoipInviteAlert.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/GroupVoipInviteAlert.java index f180a24b0f..8425b6cb89 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/GroupVoipInviteAlert.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/GroupVoipInviteAlert.java @@ -207,8 +207,8 @@ private void fillContacts() { int currentTime = ConnectionsManager.getInstance(currentAccount).getCurrentTime(); MessagesController messagesController = MessagesController.getInstance(currentAccount); Collections.sort(contacts, (o1, o2) -> { - TLRPC.User user1 = messagesController.getUser(((TLRPC.TL_contact) o2).user_id); - TLRPC.User user2 = messagesController.getUser(((TLRPC.TL_contact) o1).user_id); + TLRPC.User user1 = o2 instanceof TLRPC.TL_contact ? messagesController.getUser(((TLRPC.TL_contact) o2).user_id) : null; + TLRPC.User user2 = o1 instanceof TLRPC.TL_contact ? messagesController.getUser(((TLRPC.TL_contact) o1).user_id) : null; int status1 = 0; int status2 = 0; if (user1 != null) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ImageUpdater.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ImageUpdater.java index 8e6bd18d7b..1842ad599a 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ImageUpdater.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ImageUpdater.java @@ -29,6 +29,7 @@ import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ApplicationLoader; +import org.telegram.messenger.BuildVars; import org.telegram.messenger.FileLoader; import org.telegram.messenger.FileLog; import org.telegram.messenger.ImageLoader; @@ -37,6 +38,7 @@ import org.telegram.messenger.LocaleController; import org.telegram.messenger.MediaController; import org.telegram.messenger.MessageObject; +import org.telegram.messenger.MessagesController; import org.telegram.messenger.NotificationCenter; import org.telegram.messenger.MessagesController; import org.telegram.messenger.NotificationCenter; @@ -75,6 +77,10 @@ public class ImageUpdater implements NotificationCenter.NotificationCenterDelega ID_RECORD_VIDEO = 4, ID_OPEN_AVATAR = 5; + public final static int FOR_TYPE_USER = 0; + public final static int FOR_TYPE_CHANNEL = 1; + public final static int FOR_TYPE_GROUP = 2; + public BaseFragment parentFragment; private ImageUpdaterDelegate delegate; private ChatAttachAlert chatAttachAlert; @@ -94,19 +100,23 @@ public class ImageUpdater implements NotificationCenter.NotificationCenterDelega private boolean clearAfterUpdate; private boolean useAttachMenu; private boolean openWithFrontfaceCamera; + private boolean supportEmojiMarkup; private boolean searchAvailable = true; private boolean uploadAfterSelect = true; private TLRPC.User user; + private boolean isUser; private TLRPC.InputFile uploadedPhoto; private TLRPC.InputFile uploadedVideo; + private TLRPC.VideoSize vectorMarkup; private double videoTimestamp; private boolean canSelectVideo; private boolean forceDarkTheme; private boolean showingFromDialog; private boolean canceled; + private boolean forUser; private final static int attach_photo = 0; @@ -115,6 +125,7 @@ public class ImageUpdater implements NotificationCenter.NotificationCenterDelega public final static int TYPE_SUGGEST_PHOTO_FOR_USER = 2; private int type; + public final int setForType; public void processEntry(MediaController.PhotoEntry photoEntry) { String path = null; @@ -135,6 +146,7 @@ public void processEntry(MediaController.PhotoEntry photoEntry) { avatarObject = new MessageObject(UserConfig.selectedAccount, message, false, false); avatarObject.messageOwner.attachPath = new File(FileLoader.getDirectory(FileLoader.MEDIA_DIR_CACHE), SharedConfig.getLastLocalId() + "_avatar.mp4").getAbsolutePath(); avatarObject.videoEditedInfo = photoEntry.editedInfo; + avatarObject.emojiMarkup = photoEntry.emojiMarkup; bitmap = ImageLoader.loadBitmap(photoEntry.thumbPath, null, 800, 800, true); } else { bitmap = ImageLoader.loadBitmap(path, null, 800, 800, true); @@ -159,8 +171,13 @@ public boolean isCanceled() { return canceled; } + public void showAvatarConstructor(TLRPC.VideoSize emojiMarkup) { + createChatAttachView(); + chatAttachAlert.getPhotoLayout().showAvatarConstructorFragment(null, emojiMarkup); + } + public interface ImageUpdaterDelegate { - void didUploadPhoto(TLRPC.InputFile photo, TLRPC.InputFile video, double videoStartTimestamp, String videoPath, TLRPC.PhotoSize bigSize, TLRPC.PhotoSize smallSize, boolean isVideo); + void didUploadPhoto(TLRPC.InputFile photo, TLRPC.InputFile video, double videoStartTimestamp, String videoPath, TLRPC.PhotoSize bigSize, TLRPC.PhotoSize smallSize, boolean isVideo, TLRPC.VideoSize emojiMarkup); default String getInitialSearchString() { return null; @@ -205,9 +222,11 @@ public void setOpenWithFrontfaceCamera(boolean value) { openWithFrontfaceCamera = value; } - public ImageUpdater(boolean allowVideo) { + public ImageUpdater(boolean allowVideo, int setForType, boolean supportEmojiMarkup) { imageReceiver = new ImageReceiver(null); canSelectVideo = allowVideo; + this.supportEmojiMarkup = supportEmojiMarkup; + this.setForType = setForType; } public void setCanSelectVideo(boolean canSelectVideo) { @@ -313,7 +332,7 @@ public void openMenu(boolean hasAvatar, Runnable onDeleteAvatar, DialogInterface sheet.setOnHideListener(onDismiss); parentFragment.showDialog(sheet); if (hasAvatar) { - sheet.setItemColor(items.size() - 1, Theme.getColor(Theme.key_dialogTextRed2), Theme.getColor(Theme.key_dialogRedIcon)); + sheet.setItemColor(items.size() - 1, Theme.getColor(Theme.key_dialogTextRed), Theme.getColor(Theme.key_dialogRedIcon)); } } @@ -441,6 +460,7 @@ private void openAttachMenu(DialogInterface.OnDismissListener onDismissListener) if (type != 0) { chatAttachAlert.avatarFor(new AvatarFor(user, type)); } + chatAttachAlert.forUser = forUser; parentFragment.showDialog(chatAttachAlert); } @@ -481,6 +501,7 @@ public void didPressedButton(int button, boolean arg, boolean notify, int schedu info.entities = photoEntry.entities; info.masks = photoEntry.stickers; info.ttl = photoEntry.ttl; + info.emojiMarkup = photoEntry.emojiMarkup; } else if (object instanceof MediaController.SearchImage) { MediaController.SearchImage searchImage = (MediaController.SearchImage) object; if (searchImage.imagePath != null) { @@ -550,6 +571,7 @@ public void openAvatarsSearch() { openSearch(); } }); + chatAttachAlert.setImageUpdater(this); } if (type == TYPE_SET_PHOTO_FOR_USER) { chatAttachAlert.getSelectedTextView().setText(LocaleController.formatString("SetPhotoFor", R.string.SetPhotoFor, user.first_name)); @@ -573,6 +595,7 @@ private void didSelectPhotos(ArrayList phot avatarObject = new MessageObject(UserConfig.selectedAccount, message, false, false); avatarObject.messageOwner.attachPath = new File(FileLoader.getDirectory(FileLoader.MEDIA_DIR_CACHE), SharedConfig.getLastLocalId() + "_avatar.mp4").getAbsolutePath(); avatarObject.videoEditedInfo = info.videoEditedInfo; + avatarObject.emojiMarkup = info.emojiMarkup; bitmap = ImageLoader.loadBitmap(info.thumbPath, null, 800, 800, true); } else if (info.path != null) { bitmap = ImageLoader.loadBitmap(info.path, null, 800, 800, true); @@ -819,6 +842,7 @@ private void processBitmap(Bitmap bitmap, MessageObject avatarObject) { uploadedPhoto = null; convertingVideo = null; videoPath = null; + vectorMarkup = avatarObject == null ? null : avatarObject.emojiMarkup; bigPhoto = ImageLoader.scaleAndSaveImage(bitmap, 800, 800, 80, false, 320, 320); smallPhoto = ImageLoader.scaleAndSaveImage(bitmap, 150, 150, 80, false, 150, 150); if (smallPhoto != null) { @@ -836,6 +860,18 @@ private void processBitmap(Bitmap bitmap, MessageObject avatarObject) { uploadingImage = FileLoader.getDirectory(FileLoader.MEDIA_DIR_CACHE) + "/" + bigPhoto.location.volume_id + "_" + bigPhoto.location.local_id + ".jpg"; if (uploadAfterSelect) { if (avatarObject != null && avatarObject.videoEditedInfo != null) { + if (supportEmojiMarkup && !MessagesController.getInstance(currentAccount).uploadMarkupVideo) { + if (delegate != null) { + delegate.didStartUpload(true); + } + if (delegate != null) { + //skip upload step + delegate.didUploadPhoto(null, null, 0, null, bigPhoto, smallPhoto, isVideo, null); + delegate.didUploadPhoto(null, null, videoTimestamp, videoPath, bigPhoto, smallPhoto, isVideo, vectorMarkup); + cleanup(); + } + return; + } convertingVideo = avatarObject; long startTime = avatarObject.videoEditedInfo.startTime < 0 ? 0 : avatarObject.videoEditedInfo.startTime; videoTimestamp = (avatarObject.videoEditedInfo.avatarStartTime - startTime) / 1000000.0; @@ -863,7 +899,7 @@ private void processBitmap(Bitmap bitmap, MessageObject avatarObject) { } } if (delegate != null) { - delegate.didUploadPhoto(null, null, 0, null, bigPhoto, smallPhoto, isVideo); + delegate.didUploadPhoto(null, null, 0, null, bigPhoto, smallPhoto, isVideo, null); } } } @@ -911,7 +947,7 @@ public void didReceivedNotification(int id, int account, Object... args) { NotificationCenter.getInstance(currentAccount).removeObserver(ImageUpdater.this, NotificationCenter.fileUploadFailed); if (id == NotificationCenter.fileUploaded) { if (delegate != null) { - delegate.didUploadPhoto(uploadedPhoto, uploadedVideo, videoTimestamp, videoPath, bigPhoto, smallPhoto, isVideo); + delegate.didUploadPhoto(uploadedPhoto, uploadedVideo, videoTimestamp, videoPath, bigPhoto, smallPhoto, isVideo, vectorMarkup); } } cleanup(); @@ -974,10 +1010,16 @@ public void didReceivedNotification(int id, int account, Object... args) { if (bitmap != null) { File path = FileLoader.getInstance(currentAccount).getPathToAttach(smallPhoto, true); if (path != null) { + if (BuildVars.LOGS_ENABLED) { + FileLog.e("delete file " + path); + } path.delete(); } path = FileLoader.getInstance(currentAccount).getPathToAttach(bigPhoto, true); if (path != null) { + if (BuildVars.LOGS_ENABLED) { + FileLog.e("delete file " + path); + } path.delete(); } bigPhoto = ImageLoader.scaleAndSaveImage(bitmap, 800, 800, 80, false, 320, 320); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/InstantCameraView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/InstantCameraView.java index a67cd815cd..3ce0428abf 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/InstantCameraView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/InstantCameraView.java @@ -36,6 +36,7 @@ import android.media.AudioFormat; import android.media.AudioManager; import android.media.AudioRecord; +import android.media.AudioTimestamp; import android.media.MediaCodec; import android.media.MediaCodecInfo; import android.media.MediaFormat; @@ -63,6 +64,7 @@ import android.widget.FrameLayout; import android.widget.ImageView; +import androidx.annotation.RequiresApi; import androidx.core.content.ContextCompat; import androidx.core.graphics.ColorUtils; @@ -70,6 +72,7 @@ import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ApplicationLoader; +import org.telegram.messenger.AutoDeleteMediaTask; import org.telegram.messenger.BuildVars; import org.telegram.messenger.DispatchQueue; import org.telegram.messenger.FileLoader; @@ -114,7 +117,6 @@ import javax.microedition.khronos.egl.EGLContext; import javax.microedition.khronos.egl.EGLDisplay; import javax.microedition.khronos.egl.EGLSurface; -import javax.microedition.khronos.opengles.GL; import tw.nekomimi.nekogram.NekoConfig; @@ -172,9 +174,9 @@ public class InstantCameraView extends FrameLayout implements NotificationCenter private float panTranslationY; private float animationTranslationY; - private float[] mMVPMatrix = new float[16]; - private float[] mSTMatrix = new float[16]; - private float[] moldSTMatrix = new float[16]; + private final float[] mMVPMatrix = new float[16]; + private final float[] mSTMatrix = new float[16]; + private final float[] moldSTMatrix = new float[16]; private static final String VERTEX_SHADER = "uniform mat4 uMVPMatrix;\n" + "uniform mat4 uSTMatrix;\n" + @@ -225,6 +227,12 @@ public class InstantCameraView extends FrameLayout implements NotificationCenter private final static int audioSampleRate = 48000; + private static final int[] ALLOW_BIG_CAMERA_WHITELIST = { + 285904780, // XIAOMI (Redmi Note 7) + -1394191079 // samsung a31 + }; + + @SuppressLint("ClickableViewAccessibility") public InstantCameraView(Context context, ChatActivity parentFragment, Theme.ResourcesProvider resourcesProvider) { super(context); @@ -592,11 +600,21 @@ public void showCamera() { } MediaController.getInstance().pauseMessage(MediaController.getInstance().getPlayingMessageObject()); - cameraFile = new File(FileLoader.getDirectory(FileLoader.MEDIA_DIR_CACHE), SharedConfig.getLastLocalId() + ".mp4"); + cameraFile = new File(FileLoader.getDirectory(FileLoader.MEDIA_DIR_DOCUMENT), System.currentTimeMillis() + "_" + SharedConfig.getLastLocalId() + ".mp4") { + @Override + public boolean delete() { + if (BuildVars.LOGS_ENABLED) { + FileLog.e("delete camera file"); + } + return super.delete(); + } + }; + SharedConfig.saveConfig(); + AutoDeleteMediaTask.lockFile(cameraFile); if (BuildVars.LOGS_ENABLED) { - FileLog.d("show round camera"); + FileLog.d("show round camera " + cameraFile.getAbsolutePath()); } textureView = new TextureView(getContext()); @@ -754,6 +772,9 @@ public void send(int state, boolean notify, int scheduleDate) { videoPlayer = null; } if (state == 4) { + if (BuildVars.DEBUG_VERSION && !cameraFile.exists()) { + FileLog.e(new RuntimeException("file not found :( round video")); + } if (videoEditedInfo.needConvert()) { file = null; encryptedFile = null; @@ -851,7 +872,11 @@ public void cancel(boolean byGesture) { cameraThread = null; } if (cameraFile != null) { + if (BuildVars.LOGS_ENABLED) { + FileLog.e("delete camera file by cancel"); + } cameraFile.delete(); + AutoDeleteMediaTask.unlockFile(cameraFile); cameraFile = null; } MediaController.getInstance().requestAudioFocus(false); @@ -981,12 +1006,18 @@ private boolean initCamera() { private Size chooseOptimalSize(ArrayList previewSizes) { ArrayList sortedSizes = new ArrayList<>(); + boolean allowBigSizeCamera = allowBigSizeCamera(); + int maxVideoSize = allowBigSizeCamera ? 1440 : 1200; + if (Build.MANUFACTURER.equalsIgnoreCase("Samsung")) { + //1440 lead to gl crashes on samsung s9 + maxVideoSize = 1200; + } for (int i = 0; i < previewSizes.size(); i++) { - if (Math.max(previewSizes.get(i).mHeight, previewSizes.get(i).mWidth) <= 1200 && Math.min(previewSizes.get(i).mHeight, previewSizes.get(i).mWidth) >= 320) { + if (Math.max(previewSizes.get(i).mHeight, previewSizes.get(i).mWidth) <= maxVideoSize && Math.min(previewSizes.get(i).mHeight, previewSizes.get(i).mWidth) >= 320) { sortedSizes.add(previewSizes.get(i)); } } - if (sortedSizes.isEmpty() || SharedConfig.getDevicePerformanceClass() == SharedConfig.PERFORMANCE_CLASS_LOW || SharedConfig.getDevicePerformanceClass() == SharedConfig.PERFORMANCE_CLASS_AVERAGE) { + if (sortedSizes.isEmpty() || !allowBigSizeCamera()) { ArrayList sizes = sortedSizes; if (!sortedSizes.isEmpty()) { sizes = sortedSizes; @@ -1013,6 +1044,40 @@ private Size chooseOptimalSize(ArrayList previewSizes) { return sortedSizes.get(0); } + private boolean allowBigSizeCamera() { + if (SharedConfig.bigCameraForRound) { + return true; + } + if (SharedConfig.deviceIsAboveAverage()) { + return true; + } + int devicePerformanceClass = Math.max(SharedConfig.getDevicePerformanceClass(), SharedConfig.getLegacyDevicePerformanceClass()); + if (devicePerformanceClass == SharedConfig.PERFORMANCE_CLASS_HIGH) { + return true; + } + int hash = (Build.MANUFACTURER + " " + Build.DEVICE).toUpperCase().hashCode(); + for (int i = 0; i < ALLOW_BIG_CAMERA_WHITELIST.length; ++i) { + if (ALLOW_BIG_CAMERA_WHITELIST[i] == hash) { + return true; + } + } + return false; + } + + public static boolean allowBigSizeCameraDebug() { + int devicePerformanceClass = Math.max(SharedConfig.getDevicePerformanceClass(), SharedConfig.getLegacyDevicePerformanceClass()); + if (devicePerformanceClass == SharedConfig.PERFORMANCE_CLASS_HIGH) { + return true; + } + int hash = (Build.MANUFACTURER + " " + Build.DEVICE).toUpperCase().hashCode(); + for (int i = 0; i < ALLOW_BIG_CAMERA_WHITELIST.length; ++i) { + if (ALLOW_BIG_CAMERA_WHITELIST[i] == hash) { + return true; + } + } + return false; + } + private void createCamera(final SurfaceTexture surfaceTexture) { AndroidUtilities.runOnUIThread(() -> { if (cameraThread == null) { @@ -1058,7 +1123,11 @@ private void createCamera(final SurfaceTexture surfaceTexture) { } } } - }, () -> cameraThread.setCurrentSession(cameraSession)); + }, () -> { + if (cameraThread != null) { + cameraThread.setCurrentSession(cameraSession); + } + }); }); } @@ -1298,7 +1367,6 @@ private boolean initGL() { finish(); return false; } - GL gl = eglContext.getGL(); float tX = 1.0f / scaleX / 2.0f; float tY = 1.0f / scaleY / 2.0f; @@ -1716,11 +1784,15 @@ private class VideoRecorder implements Runnable { private Runnable recorderRunnable = new Runnable() { + @RequiresApi(api = Build.VERSION_CODES.N) @Override public void run() { long audioPresentationTimeUs = -1; int readResult; boolean done = false; + AudioTimestamp audioTimestamp = new AudioTimestamp(); + boolean shouldUseTimestamp = Build.VERSION.SDK_INT >= Build.VERSION_CODES.N; + while (!done) { if (!running && audioRecorder.getRecordingState() != AudioRecord.RECORDSTATE_STOPPED) { try { @@ -1741,14 +1813,13 @@ public void run() { buffer.lastWroteBuffer = 0; buffer.results = AudioBufferInfo.MAX_SAMPLES; for (int a = 0; a < AudioBufferInfo.MAX_SAMPLES; a++) { - if (audioPresentationTimeUs == -1) { + if (audioPresentationTimeUs == -1 && !shouldUseTimestamp) { audioPresentationTimeUs = System.nanoTime() / 1000; } ByteBuffer byteBuffer = buffer.buffer[a]; byteBuffer.rewind(); readResult = audioRecorder.read(byteBuffer, 2048); - if (readResult > 0 && a % 2 == 0) { byteBuffer.limit(readResult); double s = 0; @@ -1767,10 +1838,17 @@ public void run() { } break; } - buffer.offset[a] = audioPresentationTimeUs; + if (shouldUseTimestamp) { + audioRecorder.getTimestamp(audioTimestamp, AudioTimestamp.TIMEBASE_MONOTONIC); + buffer.offset[a] = audioTimestamp.nanoTime / 1000; + } else { + buffer.offset[a] = audioPresentationTimeUs; + } buffer.read[a] = readResult; int bufferDurationUs = 1000000 * readResult / audioSampleRate / 2; - audioPresentationTimeUs += bufferDurationUs; + if (!shouldUseTimestamp) { + audioPresentationTimeUs += bufferDurationUs; + } } if (buffer.results >= 0 || buffer.last) { if (!running && buffer.results < AudioBufferInfo.MAX_SAMPLES) { @@ -2038,16 +2116,25 @@ private void handleVideoFrameAvailable(long timestampNanos, Integer cameraId) { } videoLast = timestampNanos; - GLES20.glUseProgram(drawProgram); - GLES20.glUniformMatrix4fv(vertexMatrixHandle, 1, false, mMVPMatrix, 0); + FloatBuffer textureBuffer = InstantCameraView.this.textureBuffer; + FloatBuffer vertexBuffer = InstantCameraView.this.vertexBuffer; + FloatBuffer oldTextureBuffer = oldTextureTextureBuffer; + if (textureBuffer == null || vertexBuffer == null) { + FileLog.d("handleVideoFrameAvailable skip frame " + textureBuffer + " " + vertexBuffer); + return; + } + GLES20.glUseProgram(drawProgram); GLES20.glActiveTexture(GLES20.GL_TEXTURE0); + GLES20.glVertexAttribPointer(positionHandle, 3, GLES20.GL_FLOAT, false, 12, vertexBuffer); GLES20.glEnableVertexAttribArray(positionHandle); + GLES20.glVertexAttribPointer(textureHandle, 2, GLES20.GL_FLOAT, false, 8, textureBuffer); GLES20.glEnableVertexAttribArray(textureHandle); + GLES20.glUniformMatrix4fv(vertexMatrixHandle, 1, false, mMVPMatrix, 0); GLES20.glUniform2f(resolutionHandle, videoWidth, videoHeight); - if (oldCameraTexture[0] != 0 && oldTextureTextureBuffer != null) { + if (oldCameraTexture[0] != 0 && oldTextureBuffer != null) { if (!blendEnabled) { GLES20.glEnable(GLES20.GL_BLEND); blendEnabled = true; @@ -2055,7 +2142,7 @@ private void handleVideoFrameAvailable(long timestampNanos, Integer cameraId) { if (oldTexturePreviewSize != null) { GLES20.glUniform2f(previewSizeHandle, oldTexturePreviewSize.getWidth(), oldTexturePreviewSize.getHeight()); } - GLES20.glVertexAttribPointer(textureHandle, 2, GLES20.GL_FLOAT, false, 8, oldTextureTextureBuffer); + GLES20.glVertexAttribPointer(textureHandle, 2, GLES20.GL_FLOAT, false, 8, oldTextureBuffer); GLES20.glUniformMatrix4fv(textureMatrixHandle, 1, false, moldSTMatrix, 0); GLES20.glUniform1f(alphaHandle, 1.0f); @@ -2063,14 +2150,10 @@ private void handleVideoFrameAvailable(long timestampNanos, Integer cameraId) { GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); } - if (previewSize != null) { GLES20.glUniform2f(previewSizeHandle, previewSize.getWidth(), previewSize.getHeight()); } - GLES20.glVertexAttribPointer(positionHandle, 3, GLES20.GL_FLOAT, false, 12, vertexBuffer); - GLES20.glVertexAttribPointer(textureHandle, 2, GLES20.GL_FLOAT, false, 8, textureBuffer); - GLES20.glUniformMatrix4fv(textureMatrixHandle, 1, false, mSTMatrix, 0); GLES20.glUniform1f(alphaHandle, cameraTextureAlpha); GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, cameraTexture[0]); @@ -2660,7 +2743,7 @@ protected void finalize() throws Throwable { } private String createFragmentShader(Size previewSize) { - if (SharedConfig.getDevicePerformanceClass() == SharedConfig.PERFORMANCE_CLASS_LOW || SharedConfig.getDevicePerformanceClass() == SharedConfig.PERFORMANCE_CLASS_AVERAGE || Math.max(previewSize.getHeight(), previewSize.getWidth()) * 0.7f < MessagesController.getInstance(currentAccount).roundVideoSize) { + if (!allowBigSizeCamera() || Math.max(previewSize.getHeight(), previewSize.getWidth()) * 0.7f < MessagesController.getInstance(currentAccount).roundVideoSize) { return "#extension GL_OES_EGL_image_external : require\n" + "precision highp float;\n" + "varying vec2 vTextureCoord;\n" + @@ -2685,27 +2768,27 @@ private String createFragmentShader(Size previewSize) { "uniform vec2 resolution;\n" + "uniform vec2 preview;\n" + "uniform float alpha;\n" + - "const float kernel = 1.0;\n" + "uniform samplerExternalOES sTexture;\n" + "void main() {\n" + - " float pixelSizeX = 1.0 / preview.x;\n" + - " float pixelSizeY = 1.0 / preview.y;\n" + - " vec3 accumulation = vec3(0);\n" + - " vec3 weightsum = vec3(0);\n" + - " for (float x = -kernel; x < kernel; x++){\n" + - " for (float y = -kernel; y < kernel; y++){\n" + - " accumulation += texture2D(sTexture, vTextureCoord + vec2(x * pixelSizeX, y * pixelSizeY)).xyz;\n" + - " weightsum += 1.0;\n" + - " }\n" + - " }\n" + - " vec4 textColor = vec4(accumulation / weightsum, 1.0);\n" + " vec2 coord = resolution * 0.5;\n" + " float radius = 0.51 * resolution.x;\n" + " float d = length(coord - gl_FragCoord.xy) - radius;\n" + " float t = clamp(d, 0.0, 1.0);\n" + - " vec3 color = mix(textColor.rgb, vec3(1, 1, 1), t);\n" + - " gl_FragColor = vec4(color * alpha, alpha);\n" + + " if (t == 0.0) {\n" + + " float pixelSizeX = 1.0 / preview.x;\n" + + " float pixelSizeY = 1.0 / preview.y;\n" + + " vec3 accumulation = vec3(0);\n" + + " for (float x = 0.0; x < 2.0; x++){\n" + + " for (float y = 0.0; y < 2.0; y++){\n" + + " accumulation += texture2D(sTexture, vTextureCoord + vec2(x * pixelSizeX, y * pixelSizeY)).xyz;\n" + + " }\n" + + " }\n" + + " vec4 textColor = vec4(accumulation / vec3(4, 4, 4), 1);\n" + + " gl_FragColor = textColor * alpha;\n" + + " } else {\n" + + " gl_FragColor = vec4(1, 1, 1, alpha);\n" + + " }\n" + "}\n"; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/LazyView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/LazyView.java new file mode 100644 index 0000000000..dfb64f034c --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/LazyView.java @@ -0,0 +1,46 @@ +package org.telegram.ui.Components; + +import static android.view.View.GONE; +import static android.view.View.VISIBLE; + +import android.view.View; + +import androidx.annotation.NonNull; +import androidx.annotation.Nullable; + +public abstract class LazyView { + private View view; + private int visibility = GONE; + + @NonNull + public View makeView() { + return null; + } + + @Nullable + public View view() { + return view; + } + + @NonNull + public View forceView() { + if (view == null) { + view = makeView(); + } + return view; + } + + public void setVisibility(int visibility) { + this.visibility = visibility; + if (visibility == VISIBLE && view == null) { + view = makeView(); + } + if (view != null) { + view.setVisibility(visibility); + } + } + + public int getVisibility() { + return visibility; + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/LineBlobDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/LineBlobDrawable.java index 7b4a67415f..787cb45f4e 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/LineBlobDrawable.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/LineBlobDrawable.java @@ -7,6 +7,7 @@ import android.graphics.Path; import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.LiteMode; import java.util.Random; @@ -60,6 +61,11 @@ public void update(float amplitude, float speedScale) { } public void draw(float left, float top, float right, float bottom, Canvas canvas, Paint paint, float pinnedTop, float progressToPinned) { + if (!LiteMode.isEnabled(LiteMode.FLAG_CALLS_ANIMATIONS)) { + canvas.drawRect(left, top, right, bottom, paint); + return; + } + path.reset(); path.moveTo(right, bottom); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/LinkPath.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/LinkPath.java index 377f61b612..7c9ff72586 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/LinkPath.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/LinkPath.java @@ -14,6 +14,7 @@ import android.text.Layout; import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.LiteMode; public class LinkPath extends Path { @@ -137,7 +138,7 @@ public void addRect(float left, float top, float right, float bottom, Direction } centerX = (right + left) / 2; centerY = (y2 + y) / 2; - if (useRoundRect) { + if (useRoundRect && LiteMode.isEnabled(LiteMode.FLAGS_CHAT)) { // final CharSequence text = currentLayout.getText(); // int startOffset = currentLayout.getOffsetForHorizontal(currentLine, left), endOffset = currentLayout.getOffsetForHorizontal(currentLine, right) + 1; boolean startsWithWhitespace = false; // startOffset >= 0 && startOffset < text.length() && text.charAt(startOffset) == ' '; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/LinkSpanDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/LinkSpanDrawable.java index de688ad174..2121c112b0 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/LinkSpanDrawable.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/LinkSpanDrawable.java @@ -23,6 +23,8 @@ import androidx.core.graphics.ColorUtils; import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.LiteMode; +import org.telegram.messenger.SharedConfig; import org.telegram.ui.ActionBar.SimpleTextView; import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.ArticleViewer; @@ -61,6 +63,8 @@ public class LinkSpanDrawable { private final float selectionAlpha = 0.2f; private final float rippleAlpha = 0.8f; + private final boolean isLite = !LiteMode.isEnabled(LiteMode.FLAGS_CHAT); + public LinkSpanDrawable(S span, Theme.ResourcesProvider resourcesProvider, float touchX, float touchY) { this(span, resourcesProvider, touchX, touchY, true); } @@ -120,7 +124,8 @@ public S getSpan() { } public boolean draw(Canvas canvas) { - boolean cornerRadiusUpdate = cornerRadius != AndroidUtilities.dp(CORNER_RADIUS_DP); + final int radius = isLite ? 0 : AndroidUtilities.dp(CORNER_RADIUS_DP); + boolean cornerRadiusUpdate = cornerRadius != radius; if (mSelectionPaint == null) { mSelectionPaint = new Paint(Paint.ANTI_ALIAS_FLAG); mSelectionPaint.setStyle(Paint.Style.FILL_AND_STROKE); @@ -134,9 +139,14 @@ public boolean draw(Canvas canvas) { mRippleAlpha = Color.alpha(color); } if (cornerRadiusUpdate) { - cornerRadius = AndroidUtilities.dp(CORNER_RADIUS_DP); - mSelectionPaint.setPathEffect(new CornerPathEffect(cornerRadius)); - mRipplePaint.setPathEffect(new CornerPathEffect(cornerRadius)); + cornerRadius = radius; + if (radius <= 0) { + mSelectionPaint.setPathEffect(null); + mRipplePaint.setPathEffect(null); + } else { + mSelectionPaint.setPathEffect(new CornerPathEffect(cornerRadius)); + mRipplePaint.setPathEffect(new CornerPathEffect(cornerRadius)); + } } if (mBounds == null && mPathesCount > 0) { mPathes.get(0).computeBounds(AndroidUtilities.rectTmp, false); @@ -167,6 +177,13 @@ public boolean draw(Canvas canvas) { ); } + if (isLite) { + for (int i = 0; i < mPathesCount; ++i) { + canvas.drawPath(mPathes.get(i), mRipplePaint); + } + return false; + } + final long now = SystemClock.elapsedRealtime(); if (mStart < 0) { mStart = now; @@ -582,14 +599,14 @@ public boolean onTouchEvent(MotionEvent event) { } else if (pressedLink.getSpan() != null) { pressedLink.getSpan().onClick(this); } + pressedLink = null; + return true; } pressedLink = null; - return true; } if (event.getAction() == MotionEvent.ACTION_CANCEL) { links.clear(); pressedLink = null; - return true; } } return pressedLink != null || super.onTouchEvent(event); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ListView/AdapterWithDiffUtils.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ListView/AdapterWithDiffUtils.java index 61a772beca..d049902b0d 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ListView/AdapterWithDiffUtils.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ListView/AdapterWithDiffUtils.java @@ -18,7 +18,7 @@ public void setItems(ArrayList oldItems, ArrayList= scrollOffsetY + frameLayout.getMeasuredHeight(); - } - @Override protected void setTranslationY(int newOffset) { super.setTranslationY(newOffset); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/MentionsContainerView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/MentionsContainerView.java index 8f91b692cb..0a29ae632e 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/MentionsContainerView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/MentionsContainerView.java @@ -83,7 +83,7 @@ protected Size getSizeForItem(int i) { } else { i--; } - if (adapter.getBotContextSwitch() != null) { + if (adapter.getBotContextSwitch() != null || adapter.getBotWebViewSwitch() != null) { i++; } size.width = 0; @@ -134,7 +134,7 @@ protected Size getSizeForItem(int i) { @Override protected int getFlowItemCount() { - if (adapter.getBotContextSwitch() != null) { + if (adapter.getBotContextSwitch() != null || adapter.getBotWebViewSwitch() != null) { return getItemCount() - 2; } return super.getFlowItemCount() - 1; @@ -154,7 +154,7 @@ public int getSpanSize(int position) { } else if (object instanceof TLRPC.Document) { return 20; } else { - if (adapter.getBotContextSwitch() != null) { + if (adapter.getBotContextSwitch() != null || adapter.getBotWebViewSwitch() != null) { position--; } return gridLayoutManager.getSpanSizeForItem(position); @@ -183,10 +183,14 @@ public void onItemCountUpdate(int oldCount, int newCount) { @Override public void needChangePanelVisibility(boolean show) { - if (getNeededLayoutManager() != getCurrentLayoutManager() && canOpen() && adapter.getItemCountInternal() > 0) { - switchLayoutManagerOnEnd = true; - updateVisibility(false); - return; + if (getNeededLayoutManager() != getCurrentLayoutManager() && canOpen()) { + if (adapter.getLastItemCount() > 0) { + switchLayoutManagerOnEnd = true; + updateVisibility(false); + return; + } else { + listView.setLayoutManager(getNeededLayoutManager()); + } } if (show && !canOpen()) { show = false; @@ -288,7 +292,7 @@ public float clipTop() { @Override public void dispatchDraw(Canvas canvas) { boolean reversed = isReversed(); - boolean topPadding = (adapter.isStickers() || adapter.isBotContext()) && adapter.isMediaLayout() && adapter.getBotContextSwitch() == null; + boolean topPadding = (adapter.isStickers() || adapter.isBotContext()) && adapter.isMediaLayout() && adapter.getBotContextSwitch() == null && adapter.getBotWebViewSwitch() == null; containerPadding = AndroidUtilities.dp(2 + (topPadding ? 2 : 0)); float r = AndroidUtilities.dp(4); @@ -414,12 +418,15 @@ public boolean isOpen() { private boolean listViewHiding = false; private float hideT = 0; private boolean switchLayoutManagerOnEnd = false; + private int scrollRangeUpdateTries; private void updateListViewTranslation(boolean forceZeroHeight, boolean animated) { if (listView == null || paddedAdapter == null) { + scrollRangeUpdateTries = 0; return; } if (listViewHiding && listViewTranslationAnimator != null && listViewTranslationAnimator.isRunning() && forceZeroHeight) { + scrollRangeUpdateTries = 0; return; } boolean reversed = isReversed(); @@ -427,8 +434,15 @@ private void updateListViewTranslation(boolean forceZeroHeight, boolean animated if (forceZeroHeight) { itemHeight = - containerPadding - AndroidUtilities.dp(6); } else { - itemHeight = listView.computeVerticalScrollRange() - paddedAdapter.getPadding() + containerPadding; + int scrollRange = listView.computeVerticalScrollRange(); + itemHeight = scrollRange - paddedAdapter.getPadding() + containerPadding; + if (scrollRange <= 0 && adapter.getItemCountInternal() > 0 && scrollRangeUpdateTries < 3) { + scrollRangeUpdateTries++; + updateVisibility(true); + return; + } } + scrollRangeUpdateTries = 0; float newTranslationY = (reversed ? -Math.max(0, listViewPadding - itemHeight) : -listViewPadding + Math.max(0, listViewPadding - itemHeight)); if (forceZeroHeight && !reversed) { newTranslationY += listView.computeVerticalScrollOffset(); @@ -538,7 +552,7 @@ public void getItemOffsets(Rect outRect, View view, RecyclerView parent, Recycle position--; if (adapter.isStickers()) { return; - } else if (adapter.getBotContextSwitch() != null) { + } else if (adapter.getBotContextSwitch() != null || adapter.getBotWebViewSwitch() != null) { if (position == 0) { return; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/MessageContainsEmojiButton.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/MessageContainsEmojiButton.java index 4e6b94d20a..8cdac15708 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/MessageContainsEmojiButton.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/MessageContainsEmojiButton.java @@ -165,6 +165,9 @@ public void draw(@NonNull Canvas canvas, CharSequence charSequence, int start, i private int lastSecondPartTextWidth; private CharSequence lastSecondPartText; private int updateLayout(int width, boolean full) { + if (width <= 0) { + return 0; + } if (mainText != lastMainTextText || lastMainTextWidth != width) { if (mainText != null) { mainTextLayout = new StaticLayout(mainText, 0, mainText.length(), textPaint, Math.max(width, 0), Layout.Alignment.ALIGN_NORMAL, 1, 0, false); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/MessageSeenCheckDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/MessageSeenCheckDrawable.java new file mode 100644 index 0000000000..2506b974c4 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/MessageSeenCheckDrawable.java @@ -0,0 +1,71 @@ +package org.telegram.ui.Components; + +import android.content.Context; +import android.graphics.PorterDuff; +import android.graphics.PorterDuffColorFilter; +import android.graphics.drawable.Drawable; +import android.text.SpannableStringBuilder; +import android.text.Spanned; +import android.text.style.DynamicDrawableSpan; +import android.text.style.ImageSpan; +import android.util.Log; + +import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.R; +import org.telegram.ui.ActionBar.Theme; +import org.telegram.ui.Cells.DialogCell; + +public class MessageSeenCheckDrawable { + + private CharSequence lastSpanned; + private int lastColor; + private Drawable drawable; + private float lastDensity; + + private int resId; + private String colorKey; + + private int w = -1, h = -1; + private float oy = 4.66f; + + public MessageSeenCheckDrawable(int resId, String colorKey) { + this.resId = resId; + this.colorKey = colorKey; + } + + public MessageSeenCheckDrawable(int resId, String colorKey, int w, int h) { + this(resId, colorKey); + this.w = w; + this.h = h; + } + + public MessageSeenCheckDrawable(int resId, String colorKey, int w, int h, float oy) { + this(resId, colorKey); + this.w = w; + this.h = h; + this.oy = oy; + } + + public CharSequence getSpanned(Context context) { + if (lastSpanned != null && drawable != null && AndroidUtilities.density == lastDensity) { + if (lastColor != Theme.getColor(colorKey)) { + drawable.setColorFilter(new PorterDuffColorFilter(lastColor = Theme.getColor(colorKey), PorterDuff.Mode.SRC_IN)); + } + return lastSpanned; + } + if (context == null) { + return null; + } + SpannableStringBuilder str = new SpannableStringBuilder("v "); + lastDensity = AndroidUtilities.density; + drawable = context.getResources().getDrawable(resId).mutate(); + drawable.setColorFilter(new PorterDuffColorFilter(lastColor = Theme.getColor(colorKey), PorterDuff.Mode.SRC_IN)); + final int w = this.w <= 0 ? drawable.getIntrinsicWidth() : AndroidUtilities.dp(this.w); + final int h = this.h <= 0 ? drawable.getIntrinsicHeight() : AndroidUtilities.dp(this.h); + final int oy = AndroidUtilities.dp(this.oy); + drawable.setBounds(0, oy, w, oy + h); + str.setSpan(new ImageSpan(drawable, DynamicDrawableSpan.ALIGN_CENTER), 0, 1, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + str.setSpan(new DialogCell.FixedWidthSpan(AndroidUtilities.dp(2)), 1, 2, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + return lastSpanned = str; + } +} \ No newline at end of file diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/MotionBackgroundDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/MotionBackgroundDrawable.java index f2bbb287f6..b2b5461263 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/MotionBackgroundDrawable.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/MotionBackgroundDrawable.java @@ -28,6 +28,7 @@ import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.FileLog; import org.telegram.messenger.GenericProvider; +import org.telegram.messenger.LiteMode; import org.telegram.messenger.NotificationCenter; import org.telegram.messenger.SharedConfig; import org.telegram.messenger.Utilities; @@ -53,6 +54,7 @@ public class MotionBackgroundDrawable extends Drawable { private long lastUpdateTime; private WeakReference parentView; + private boolean ignoreInterpolator; private final CubicBezierInterpolator interpolator = new CubicBezierInterpolator(0.33, 0.0, 0.0, 1.0); private int translationY; @@ -164,6 +166,10 @@ public Bitmap getBitmap() { return currentBitmap; } + public Bitmap getPatternBitmap() { + return patternBitmap; + } + public int getIntensity() { return intensity; } @@ -252,7 +258,7 @@ public void switchToNextPosition() { } public void switchToNextPosition(boolean fast) { - if (posAnimationProgress < 1.0f || SharedConfig.getLiteMode().enabled()) { + if (posAnimationProgress < 1.0f || !LiteMode.isEnabled(LiteMode.FLAG_CHAT_BACKGROUND)) { return; } rotatingPreview = false; @@ -268,7 +274,7 @@ public void switchToNextPosition(boolean fast) { generateNextGradient(); } - private void generateNextGradient() { + public void generateNextGradient() { if (useLegacyBitmap && intensity < 0) { try { if (legacyBitmap != null) { @@ -294,9 +300,9 @@ private void generateNextGradient() { Utilities.generateGradient(currentBitmap, true, phase, 1f, currentBitmap.getWidth(), currentBitmap.getHeight(), currentBitmap.getRowBytes(), colors); invalidateLegacy = true; } - for (int i = 0; i < ANIMATION_CACHE_BITMAPS_COUNT; i++) { + for (int i = -1; i < ANIMATION_CACHE_BITMAPS_COUNT; i++) { float p = (i + 1) / (float) ANIMATION_CACHE_BITMAPS_COUNT; - Utilities.generateGradient(gradientToBitmap[i], true, phase, p, currentBitmap.getWidth(), currentBitmap.getHeight(), currentBitmap.getRowBytes(), colors); + Utilities.generateGradient(i < 0 ? gradientFromBitmap : gradientToBitmap[i], true, phase, p, currentBitmap.getWidth(), currentBitmap.getHeight(), currentBitmap.getRowBytes(), colors); } } @@ -892,11 +898,14 @@ public void updateAnimation(boolean invalidate) { if (posAnimationProgress > 1.0f) { posAnimationProgress = 1.0f; } - if (animationProgressProvider == null) { + if (animationProgressProvider == null && !ignoreInterpolator) { progress = interpolator.getInterpolation(posAnimationProgress); } else { progress = posAnimationProgress; } + if (ignoreInterpolator && (progress == 0 || progress == 1)) { + ignoreInterpolator = false; + } if (stageBefore == 0 && progress > 0.25f || stageBefore == 1 && progress > 0.5f || stageBefore == 2 && progress > 0.75f) { @@ -941,11 +950,14 @@ public void updateAnimation(boolean invalidate) { if (posAnimationProgress > 1.0f) { posAnimationProgress = 1.0f; } - if (animationProgressProvider == null) { + if (animationProgressProvider == null && !ignoreInterpolator) { progress = interpolator.getInterpolation(posAnimationProgress); } else { progress = posAnimationProgress; } + if (ignoreInterpolator && (progress == 0 || progress == 1)) { + ignoreInterpolator = false; + } if (rotationBack) { progress = 1.0f - progress; if (posAnimationProgress >= 1.0f) { @@ -1022,6 +1034,12 @@ public boolean isIndeterminateAnimation() { } public void setIndeterminateAnimation(boolean isIndeterminateAnimation) { + if (!isIndeterminateAnimation && this.isIndeterminateAnimation) { + float progressPerPhase = 1f / 8f; + int phase = (int) (posAnimationProgress / progressPerPhase); + posAnimationProgress = 1f - (posAnimationProgress - phase * progressPerPhase) / progressPerPhase; + ignoreInterpolator = true; + } this.isIndeterminateAnimation = isIndeterminateAnimation; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/OptionsSpeedIconDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/OptionsSpeedIconDrawable.java new file mode 100644 index 0000000000..75822d4379 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/OptionsSpeedIconDrawable.java @@ -0,0 +1,151 @@ +package org.telegram.ui.Components; + + +import static org.telegram.messenger.AndroidUtilities.dp; +import static org.telegram.messenger.AndroidUtilities.dpf2; + +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.ColorFilter; +import android.graphics.Paint; +import android.graphics.PixelFormat; +import android.graphics.PorterDuff; +import android.graphics.PorterDuffXfermode; +import android.graphics.Xfermode; +import android.graphics.drawable.Drawable; +import android.text.TextUtils; +import android.view.Gravity; + +import androidx.annotation.NonNull; +import androidx.annotation.Nullable; + +import org.telegram.messenger.AndroidUtilities; + +public class OptionsSpeedIconDrawable extends Drawable { + + private Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG); + + private final Drawable.Callback callback = new Callback() { + @Override + public void invalidateDrawable(@NonNull Drawable who) { + OptionsSpeedIconDrawable.this.invalidateSelf(); + } + @Override + public void scheduleDrawable(@NonNull Drawable who, @NonNull Runnable what, long when) { + OptionsSpeedIconDrawable.this.scheduleSelf(what, when); + } + @Override + public void unscheduleDrawable(@NonNull Drawable who, @NonNull Runnable what) { + OptionsSpeedIconDrawable.this.unscheduleSelf(what); + } + }; + + private AnimatedTextView.AnimatedTextDrawable textDrawable; + private boolean textDrawableVisible; + private AnimatedFloat textDrawableAlpha = new AnimatedFloat(this::invalidateSelf, 250, CubicBezierInterpolator.EASE_OUT_QUINT); + + public OptionsSpeedIconDrawable() { + paint.setColor(Color.WHITE); + } + + public void setSpeed(Float speed, boolean animated) { + if (speed == null && textDrawable == null) { + return; + } + if (textDrawable == null) { + textDrawable = new AnimatedTextView.AnimatedTextDrawable(); + textDrawable.setCallback(callback); + textDrawable.setAnimationProperties(.3f, 0, 165, CubicBezierInterpolator.EASE_OUT_QUINT); + textDrawable.setGravity(Gravity.CENTER_HORIZONTAL); + textDrawable.setTypeface(AndroidUtilities.getTypeface(AndroidUtilities.TYPEFACE_ROBOTO_MEDIUM)); + textDrawable.setTextSize(dp(7)); + textDrawable.setTextColor(0xFFFFFFFF); + textDrawable.getPaint().setStyle(Paint.Style.FILL_AND_STROKE); + textDrawable.getPaint().setStrokeWidth(dpf2(.1f)); + textDrawable.getPaint().setXfermode(new PorterDuffXfermode(PorterDuff.Mode.DST_OUT)); + } + if (speed == null) { + textDrawable.cancelAnimation(); + textDrawable.setText("", animated); + textDrawableVisible = false; + } else { + String string = SpeedIconDrawable.formatNumber(speed); + if (string.length() <= 1) { + string += "X"; + } + if (!TextUtils.equals(string, textDrawable.getText())) { + textDrawable.cancelAnimation(); + textDrawable.setText(string, animated); + textDrawableVisible = !TextUtils.isEmpty(string); + } + } + invalidateSelf(); + } + + @Override + public void draw(@NonNull Canvas canvas) { + if (getBounds() == null) { + return; + } + + int cx = getBounds().centerX(), cy = getBounds().centerY(); + + canvas.drawCircle(cx, cy - dpf2(6), dpf2(2), paint); + canvas.drawCircle(cx, cy, dpf2(2), paint); + canvas.drawCircle(cx, cy + dpf2(6), dpf2(2), paint); + + if (textDrawable != null) { + canvas.save(); + + int tcx = cx - dp(11.6f), tcy = cy + dp(4); + + float alpha = textDrawableAlpha.set(textDrawableVisible ? 1 : 0); + int wasAlpha = paint.getAlpha(); + if (alpha < 1) { + paint.setAlpha((int) (0xFF * alpha)); + } + + AndroidUtilities.rectTmp.set( + tcx - dpf2(1.5f) - textDrawable.getCurrentWidth() / 2f, + tcy - dpf2(4), + tcx + dpf2(1.5f) + textDrawable.getCurrentWidth() / 2f, + tcy + dpf2(5) + ); + canvas.drawRoundRect(AndroidUtilities.rectTmp, dpf2(2), dpf2(2), paint); + + canvas.save(); + textDrawable.setBounds(tcx, tcy, tcx, tcy); + textDrawable.draw(canvas); + canvas.restore(); + + paint.setAlpha(wasAlpha); + + canvas.restore(); + } + } + + @Override + public void setAlpha(int alpha) { + paint.setAlpha(alpha); + } + + @Override + public void setColorFilter(@Nullable ColorFilter colorFilter) { + paint.setColorFilter(colorFilter); + } + + @Override + public int getOpacity() { + return PixelFormat.TRANSPARENT; + } + + @Override + public int getIntrinsicWidth() { + return dp(45); + } + + @Override + public int getIntrinsicHeight() { + return dp(45); + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/OutlineEditText.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/OutlineEditText.java index f82238dbae..4676050ada 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/OutlineEditText.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/OutlineEditText.java @@ -7,6 +7,7 @@ import android.util.TypedValue; import android.view.Gravity; +import org.telegram.messenger.AndroidUtilities; import org.telegram.ui.ActionBar.Theme; public class OutlineEditText extends OutlineTextContainerView { @@ -32,6 +33,9 @@ protected void onFocusChanged(boolean focused, int direction, Rect previouslyFoc editText.setTypeface(Typeface.DEFAULT); editText.setCursorColor(Theme.getColor(Theme.key_windowBackgroundWhiteInputFieldActivated)); editText.setCursorWidth(1.5f); + editText.setPadding( + AndroidUtilities.dp(15), 0, AndroidUtilities.dp(15), 0 + ); attachEditText(editText); addView(editText, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL)); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/OverlayActionBarLayoutDialog.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/OverlayActionBarLayoutDialog.java index 7b09d5c4f0..06468b6349 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/OverlayActionBarLayoutDialog.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/OverlayActionBarLayoutDialog.java @@ -2,6 +2,7 @@ import android.app.Dialog; import android.content.Context; +import android.content.ContextWrapper; import android.graphics.Color; import android.os.Build; import android.os.Bundle; @@ -20,6 +21,7 @@ import org.telegram.ui.ActionBar.BaseFragment; import org.telegram.ui.ActionBar.INavigationLayout; import org.telegram.ui.ActionBar.Theme; +import org.telegram.ui.LaunchActivity; import java.util.ArrayList; @@ -27,6 +29,7 @@ public class OverlayActionBarLayoutDialog extends Dialog implements INavigationL private Theme.ResourcesProvider resourcesProvider; private INavigationLayout actionBarLayout; private FrameLayout frameLayout; + private PasscodeView passcodeView; public OverlayActionBarLayoutDialog(@NonNull Context context, Theme.ResourcesProvider resourcesProvider) { super(context, R.style.TransparentDialog); @@ -46,9 +49,38 @@ public OverlayActionBarLayoutDialog(@NonNull Context context, Theme.ResourcesPro actionBarLayout.setRemoveActionBarExtraHeight(true); VerticalPositionAutoAnimator.attach(actionBarLayout.getView()); } + passcodeView = new PasscodeView(context); + frameLayout.addView(passcodeView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); + setContentView(frameLayout); } + @Override + protected void onStart() { + super.onStart(); + + Context context = getContext(); + if (context instanceof ContextWrapper && !(context instanceof LaunchActivity)) { + context = ((ContextWrapper) context).getBaseContext(); + } + if (context instanceof LaunchActivity) { + ((LaunchActivity) context).addOverlayPasscodeView(passcodeView); + } + } + + @Override + protected void onStop() { + super.onStop(); + + Context context = getContext(); + if (context instanceof ContextWrapper && !(context instanceof LaunchActivity)) { + context = ((ContextWrapper) context).getBaseContext(); + } + if (context instanceof LaunchActivity) { + ((LaunchActivity) context).removeOverlayPasscodeView(passcodeView); + } + } + @Override public void onMeasureOverride(int[] measureSpec) { if (AndroidUtilities.isTablet() && !AndroidUtilities.isInMultiwindow && !AndroidUtilities.isSmallTablet()) { @@ -105,6 +137,13 @@ public void addFragment(BaseFragment fragment) { @Override public void onBackPressed() { + if (passcodeView.getVisibility() == View.VISIBLE) { + if (getOwnerActivity() != null) { + getOwnerActivity().finish(); + } + return; + } + actionBarLayout.onBackPressed(); if (actionBarLayout.getFragmentStack().size() <= 1) { dismiss(); @@ -137,13 +176,21 @@ public boolean needCloseLastFragment(INavigationLayout layout) { @Override public void onRebuildAllFragments(INavigationLayout layout, boolean last) {} - private static final class EmptyFragment extends BaseFragment { + private final class EmptyFragment extends BaseFragment { @Override public View createView(Context context) { + hasOwnBackground = true; actionBar.setAddToContainer(false); View v = new View(context); v.setBackgroundColor(Color.TRANSPARENT); return v; } + + @Override + public void onTransitionAnimationEnd(boolean isOpen, boolean backward) { + if (isOpen && backward) { + dismiss(); + } + } } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/PagerSlidingTabStrip.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/PagerSlidingTabStrip.java index e865c33775..d2ce07a7d9 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/PagerSlidingTabStrip.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/PagerSlidingTabStrip.java @@ -16,6 +16,8 @@ import android.graphics.drawable.Drawable; import android.graphics.drawable.RippleDrawable; import android.os.Build; +import android.util.TypedValue; +import android.view.Gravity; import android.view.View; import android.view.ViewGroup; import android.view.ViewTreeObserver.OnGlobalLayoutListener; @@ -23,6 +25,7 @@ import android.widget.HorizontalScrollView; import android.widget.ImageView; import android.widget.LinearLayout; +import android.widget.TextView; import androidx.viewpager.widget.ViewPager; import androidx.viewpager.widget.ViewPager.OnPageChangeListener; @@ -34,8 +37,9 @@ public class PagerSlidingTabStrip extends HorizontalScrollView { public interface IconTabProvider { Drawable getPageIconDrawable(int position); - void customOnDraw(Canvas canvas, int position); + void customOnDraw(Canvas canvas, View view, int position); boolean canScrollToTab(int position); + int getTabPadding(int position); } private LinearLayout.LayoutParams defaultTabLayoutParams; @@ -104,7 +108,14 @@ public void notifyDataSetChanged() { tabCount = pager.getAdapter().getCount(); for (int i = 0; i < tabCount; i++) { if (pager.getAdapter() instanceof IconTabProvider) { - addIconTab(i, ((IconTabProvider) pager.getAdapter()).getPageIconDrawable(i), pager.getAdapter().getPageTitle(i)); + Drawable drawable = ((IconTabProvider) pager.getAdapter()).getPageIconDrawable(i); + if (drawable != null) { + addIconTab(i, drawable, pager.getAdapter().getPageTitle(i)); + } else { + addTab(i, pager.getAdapter().getPageTitle(i)); + } + } else { + addTab(i, pager.getAdapter().getPageTitle(i)); } } updateTabStyles(); @@ -131,7 +142,7 @@ private void addIconTab(final int position, Drawable drawable, CharSequence cont protected void onDraw(Canvas canvas) { super.onDraw(canvas); if (pager.getAdapter() instanceof IconTabProvider) { - ((IconTabProvider) pager.getAdapter()).customOnDraw(canvas, position); + ((IconTabProvider) pager.getAdapter()).customOnDraw(canvas, this, position); } } @@ -166,6 +177,58 @@ public void setSelected(boolean selected) { tab.setContentDescription(contentDescription); } + private void addTab(final int position, CharSequence text) { + TextView tab = new TextView(getContext()) { + + @Override + public void setAlpha(float alpha) { + super.setAlpha(alpha); + } + + @Override + protected void onDraw(Canvas canvas) { + super.onDraw(canvas); + if (pager.getAdapter() instanceof IconTabProvider) { + ((IconTabProvider) pager.getAdapter()).customOnDraw(canvas, this, position); + } + } + + @Override + public void setSelected(boolean selected) { + super.setSelected(selected); + Drawable background = getBackground(); + if (Build.VERSION.SDK_INT >= 21 && background != null) { + int color = getThemedColor(selected ? Theme.key_chat_emojiPanelIconSelected : Theme.key_chat_emojiBottomPanelIcon); + Theme.setSelectorDrawableColor(background, Color.argb(30, Color.red(color), Color.green(color), Color.blue(color)), true); + } + setTextColor(getThemedColor(selected ? Theme.key_chat_emojiPanelIconSelected : Theme.key_chat_emojiPanelBackspace)); + } + }; + tab.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + tab.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + tab.setTextColor(getThemedColor(Theme.key_chat_emojiPanelBackspace)); + tab.setFocusable(true); + tab.setGravity(Gravity.CENTER); + if (Build.VERSION.SDK_INT >= 21) { + RippleDrawable rippleDrawable = (RippleDrawable) Theme.createSelectorDrawable(getThemedColor(Theme.key_chat_emojiBottomPanelIcon), Theme.RIPPLE_MASK_CIRCLE_TO_BOUND_EDGE); + Theme.setRippleDrawableForceSoftware(rippleDrawable); + tab.setBackground(rippleDrawable); + } + tab.setText(text); + tab.setOnClickListener(v -> { + if (pager.getAdapter() instanceof IconTabProvider) { + if (!((IconTabProvider) pager.getAdapter()).canScrollToTab(position)) { + return; + } + } + pager.setCurrentItem(position, false); + }); + tab.setPadding(AndroidUtilities.dp(18), 0, AndroidUtilities.dp(18), 0); + tabsContainer.addView(tab, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, 10, 0, 10, 0)); + tab.setSelected(position == currentPosition); + } + + private void updateTabStyles() { for (int i = 0; i < tabCount; i++) { View v = tabsContainer.getChildAt(i); @@ -173,6 +236,9 @@ private void updateTabStyles() { if (shouldExpand) { v.setPadding(0, 0, 0, 0); v.setLayoutParams(new LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT, 1.0F)); + } else if (pager.getAdapter() instanceof IconTabProvider) { + int padding = ((IconTabProvider) pager.getAdapter()).getTabPadding(i); + v.setPadding(padding, 0, padding, 0); } else { v.setPadding(tabPadding, 0, tabPadding, 0); } @@ -207,6 +273,9 @@ private void scrollToChild(int position, int offset) { } } + private AnimatedFloat lineLeftAnimated = new AnimatedFloat(this, 350, CubicBezierInterpolator.EASE_OUT_QUINT); + private AnimatedFloat lineRightAnimated = new AnimatedFloat(this, 350, CubicBezierInterpolator.EASE_OUT_QUINT); + @Override protected void onDraw(Canvas canvas) { super.onDraw(canvas); @@ -219,26 +288,34 @@ protected void onDraw(Canvas canvas) { if (underlineHeight != 0) { rectPaint.setColor(underlineColor); - canvas.drawRect(0, height - underlineHeight, tabsContainer.getWidth(), height, rectPaint); + AndroidUtilities.rectTmp.set(0, height - underlineHeight, tabsContainer.getWidth(), height); + canvas.drawRoundRect(AndroidUtilities.rectTmp, underlineHeight / 2f, underlineHeight / 2f, rectPaint); } View currentTab = tabsContainer.getChildAt(currentPosition); if (currentTab != null) { - float lineLeft = currentTab.getLeft(); - float lineRight = currentTab.getRight(); + float lineLeft = currentTab.getLeft() + currentTab.getPaddingLeft(); + float lineRight = currentTab.getRight() - currentTab.getPaddingRight(); if (currentPositionOffset > 0f && currentPosition < tabCount - 1) { View nextTab = tabsContainer.getChildAt(currentPosition + 1); - final float nextTabLeft = nextTab.getLeft(); - final float nextTabRight = nextTab.getRight(); + final float nextTabLeft = nextTab.getLeft() + nextTab.getPaddingLeft(); + final float nextTabRight = nextTab.getRight() - nextTab.getPaddingRight(); lineLeft = (currentPositionOffset * nextTabLeft + (1f - currentPositionOffset) * lineLeft); lineRight = (currentPositionOffset * nextTabRight + (1f - currentPositionOffset) * lineRight); + + lineLeftAnimated.set(lineLeft, true); + lineRightAnimated.set(lineRight, true); + } else { + lineLeft = lineLeftAnimated.set(lineLeft); + lineRight = lineRightAnimated.set(lineRight); } if (indicatorHeight != 0) { rectPaint.setColor(indicatorColor); - canvas.drawRect(lineLeft, height - indicatorHeight, lineRight, height, rectPaint); + AndroidUtilities.rectTmp.set(lineLeft, height - indicatorHeight, lineRight, height); + canvas.drawRoundRect(AndroidUtilities.rectTmp, indicatorHeight / 2f, indicatorHeight / 2f, rectPaint); } } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/Paint/Views/EntityView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/Paint/Views/EntityView.java index c9e845358e..8332074a0c 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/Paint/Views/EntityView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/Paint/Views/EntityView.java @@ -517,7 +517,7 @@ public void deselect() { selectionView.animate().alpha(0f).scaleX(0.9f).scaleY(0.9f).setDuration(150).setInterpolator(CubicBezierInterpolator.DEFAULT).setListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { - ((ViewGroup) selectionView.getParent()).removeView(selectionView); + AndroidUtilities.removeFromParent(selectionView); selectionView = null; } }).start(); @@ -620,8 +620,10 @@ public boolean onTouchEvent(MotionEvent event) { delta *= -1; } - float scaleDelta = 1 + (delta * 2) / getMeasuredWidth(); - scale(scaleDelta); + if (getMeasuredWidth() != 0) { + float scaleDelta = 1 + (delta * 2) / getMeasuredWidth(); + scale(scaleDelta); + } int[] pos = delegate.getCenterLocation(EntityView.this); float angle = 0; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/Paint/Views/LPhotoPaintView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/Paint/Views/LPhotoPaintView.java index d98141e307..d63c96f04b 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/Paint/Views/LPhotoPaintView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/Paint/Views/LPhotoPaintView.java @@ -785,12 +785,12 @@ public void draw(Canvas c) { cancelButton = new PaintCancelView(context); cancelButton.setPadding(AndroidUtilities.dp(8), AndroidUtilities.dp(8), AndroidUtilities.dp(8), AndroidUtilities.dp(8)); - cancelButton.setBackground(Theme.createSelectorDrawable(Theme.getColor(Theme.key_listSelector, resourcesProvider))); + cancelButton.setBackground(Theme.createSelectorDrawable(Theme.ACTION_BAR_WHITE_SELECTOR_COLOR)); bottomLayout.addView(cancelButton, LayoutHelper.createFrame(32, 32, Gravity.BOTTOM | Gravity.LEFT, 12, 0, 0, 4)); doneButton = new PaintDoneView(context); doneButton.setPadding(AndroidUtilities.dp(8), AndroidUtilities.dp(8), AndroidUtilities.dp(8), AndroidUtilities.dp(8)); - doneButton.setBackground(Theme.createSelectorDrawable(Theme.getColor(Theme.key_listSelector, resourcesProvider))); + doneButton.setBackground(Theme.createSelectorDrawable(Theme.ACTION_BAR_WHITE_SELECTOR_COLOR)); doneButton.setOnClickListener(v -> { if (isColorListShown) { new ColorPickerBottomSheet(context, this.resourcesProvider).setColor(colorSwatch.color).setPipetteDelegate(new ColorPickerBottomSheet.PipetteDelegate() { @@ -1166,6 +1166,7 @@ protected void onDraw(Canvas canvas) { drawTab.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); drawTab.setGravity(Gravity.CENTER_HORIZONTAL); drawTab.setTypeface(AndroidUtilities.getTypeface(AndroidUtilities.TYPEFACE_ROBOTO_MEDIUM)); + drawTab.setSingleLine(); drawTab.setOnClickListener(v -> { if (editingText) { selectEntity(null); @@ -1185,6 +1186,7 @@ protected void onDraw(Canvas canvas) { stickerTab.setGravity(Gravity.CENTER_HORIZONTAL); stickerTab.setTypeface(AndroidUtilities.getTypeface(AndroidUtilities.TYPEFACE_ROBOTO_MEDIUM)); stickerTab.setAlpha(0.6f); + stickerTab.setSingleLine(); tabsLayout.addView(stickerTab, LayoutHelper.createLinear(0, LayoutHelper.WRAP_CONTENT, 1f)); textTab = new TextView(context); @@ -1196,6 +1198,7 @@ protected void onDraw(Canvas canvas) { textTab.setGravity(Gravity.CENTER_HORIZONTAL); textTab.setTypeface(AndroidUtilities.getTypeface(AndroidUtilities.TYPEFACE_ROBOTO_MEDIUM)); textTab.setAlpha(0.6f); + textTab.setSingleLine(); textTab.setOnClickListener(v -> { switchTab(2); if (!(currentEntityView instanceof TextPaintView)) { @@ -1371,7 +1374,7 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { measureChild(topLayout, widthMeasureSpec, heightMeasureSpec); ignoreLayout = false; - int keyboardSize = SharedConfig.smoothKeyboard ? 0 : measureKeyboardHeight(); + int keyboardSize = 0; if (!waitingForKeyboardOpen && keyboardSize <= AndroidUtilities.dp(20) && !emojiViewVisible && !isAnimatePopupClosing) { ignoreLayout = true; hideEmojiView(); @@ -2832,37 +2835,36 @@ private void showEmojiPopup(int show) { onWindowSizeChanged(); if (!emojiWasVisible) { - if (SharedConfig.smoothKeyboard) { - if (keyboardVisible) { - translateBottomPanelAfterResize = true; - weightChooserView.startPanTransition(AndroidUtilities.displaySize.y, AndroidUtilities.displaySize.y - emojiPadding); + if (keyboardVisible) { + translateBottomPanelAfterResize = true; + weightChooserView.startPanTransition(AndroidUtilities.displaySize.y, AndroidUtilities.displaySize.y - emojiPadding); // weightChooserView.updatePanTransition(0, 1); // weightChooserView.stopPanTransition(); - } else { - ValueAnimator animator = ValueAnimator.ofFloat(emojiPadding, 0); - weightChooserView.startPanTransition(AndroidUtilities.displaySize.y, AndroidUtilities.displaySize.y - emojiPadding); - animator.addUpdateListener(animation -> { - float v = (float) animation.getAnimatedValue(); - emojiView.setTranslationY(v); + } else { + ValueAnimator animator = ValueAnimator.ofFloat(emojiPadding, 0); + weightChooserView.startPanTransition(AndroidUtilities.displaySize.y, AndroidUtilities.displaySize.y - emojiPadding); + animator.addUpdateListener(animation -> { + float v = (float) animation.getAnimatedValue(); + emojiView.setTranslationY(v); + if (!ignore) { + bottomPanelTranslationY(v, 1f - v / emojiPadding); + } + }); + animator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + emojiView.setTranslationY(0); if (!ignore) { - bottomPanelTranslationY(v, 1f - v / emojiPadding); + bottomPanelTranslationY(0, 1); } - }); - animator.addListener(new AnimatorListenerAdapter() { - @Override - public void onAnimationEnd(Animator animation) { - emojiView.setTranslationY(0); - if (!ignore) { - bottomPanelTranslationY(0, 1); - } - weightChooserView.stopPanTransition(); - } - }); - animator.setDuration(AdjustPanLayoutHelper.keyboardDuration); - animator.setInterpolator(AdjustPanLayoutHelper.keyboardInterpolator); - animator.start(); - } + weightChooserView.stopPanTransition(); + } + }); + animator.setDuration(AdjustPanLayoutHelper.keyboardDuration); + animator.setInterpolator(AdjustPanLayoutHelper.keyboardInterpolator); + animator.start(); } + } } else { ChatActivityEnterViewAnimatedIconView emojiButton = textOptionsView.getEmojiButton(); @@ -2895,7 +2897,7 @@ private void hideEmojiPopup(boolean byBackButton) { showEmojiPopup(0); } if (byBackButton) { - if (SharedConfig.smoothKeyboard && emojiView != null && emojiView.getVisibility() == View.VISIBLE && !waitingForKeyboardOpen) { + if (emojiView != null && emojiView.getVisibility() == View.VISIBLE && !waitingForKeyboardOpen) { int height = emojiView.getMeasuredHeight(); ValueAnimator animator = ValueAnimator.ofFloat(0, height); final boolean ignore = bottomPanelIgnoreOnce; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/Paint/Views/PipettePickerView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/Paint/Views/PipettePickerView.java index 3a31ba7f9d..174a353ee1 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/Paint/Views/PipettePickerView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/Paint/Views/PipettePickerView.java @@ -19,6 +19,7 @@ import androidx.core.util.Consumer; import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.Utilities; import org.telegram.ui.Components.CubicBezierInterpolator; public class PipettePickerView extends View { @@ -163,7 +164,10 @@ protected void onDraw(Canvas canvas) { float cx = positionX * getWidth(), cy = positionY * getHeight(); int bx = Math.round(positionX * bitmap.getWidth()), by = Math.round(positionY * bitmap.getHeight()); - mColor = bitmap.getPixel(bx, by); + mColor = bitmap.getPixel( + Utilities.clamp(bx, bitmap.getWidth(), 0), + Utilities.clamp(by, bitmap.getHeight(), 0) + ); colorPaint.setColor(mColor); if (appearProgress != 0f && appearProgress != 1f) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/PasscodeView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/PasscodeView.java index 57f350be4e..cc6a02b862 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/PasscodeView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/PasscodeView.java @@ -76,6 +76,7 @@ import org.telegram.messenger.support.fingerprint.FingerprintManagerCompat; import org.telegram.ui.ActionBar.AlertDialog; import org.telegram.ui.ActionBar.Theme; +import org.telegram.ui.LaunchActivity; import java.util.ArrayList; import java.util.Collections; @@ -100,11 +101,19 @@ public void didReceivedNotification(int id, int account, Object... args) { if ((boolean) args[0] && SharedConfig.appLocked) { checkFingerprint(); } + } else if (id == NotificationCenter.passcodeDismissed) { + if (args[0] != this) { + setVisibility(GONE); + + if (fingerprintDialog != null) { + fingerprintDialog.dismiss(); + } + } } } public interface PasscodeViewDelegate { - void didAcceptedPassword(); + void didAcceptedPassword(PasscodeView view); } private static class AnimatingTextView extends FrameLayout { @@ -630,7 +639,9 @@ public void beforeTextChanged(CharSequence s, int start, int count, int after) { backgroundSpringQueue.remove(callback); } for (int i : removeIndex) { - backgroundSpringNextQueue.remove(i); + if (i < backgroundSpringNextQueue.size()) { + backgroundSpringNextQueue.remove(i); + } } } } @@ -989,7 +1000,7 @@ private void processDone(boolean fingerprint) { NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.didSetPasscode); setOnTouchListener(null); if (delegate != null) { - delegate.didAcceptedPassword(); + delegate.didAcceptedPassword(this); } AndroidUtilities.runOnUIThread(() -> { @@ -1124,6 +1135,7 @@ protected void onAttachedToWindow() { super.onAttachedToWindow(); NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.didGenerateFingerprintKeyPair); + NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.passcodeDismissed); } @Override @@ -1131,6 +1143,7 @@ protected void onDetachedFromWindow() { super.onDetachedFromWindow(); NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.didGenerateFingerprintKeyPair); + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.passcodeDismissed); } private void checkFingerprint() { @@ -1138,7 +1151,7 @@ private void checkFingerprint() { return; } Activity parentActivity = (Activity) getContext(); - if (parentActivity != null && fingerprintView.getVisibility() == VISIBLE && !ApplicationLoader.mainInterfacePaused) { + if (parentActivity != null && fingerprintView.getVisibility() == VISIBLE && !ApplicationLoader.mainInterfacePaused && (!(parentActivity instanceof LaunchActivity) || ((LaunchActivity) parentActivity).allowShowFingerprintDialog(this))) { if (Build.VERSION.SDK_INT >= 28 && NaConfig.INSTANCE.getUseSystemUnlock().Bool()) { try { boolean useBiometric; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/PhonebookShareAlert.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/PhonebookShareAlert.java index cc7ea429d2..5f984365f4 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/PhonebookShareAlert.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/PhonebookShareAlert.java @@ -240,10 +240,18 @@ protected void onDraw(Canvas canvas) { } public PhonebookShareAlert(BaseFragment parent, ContactsController.Contact contact, TLRPC.User user, Uri uri, File file, String firstName, String lastName) { - this(parent, contact, user, uri, file, firstName, lastName, null); + this(parent, contact, user, uri, file, null, firstName, lastName); + } + + public PhonebookShareAlert(BaseFragment parent, ContactsController.Contact contact, TLRPC.User user, Uri uri, File file, String phone, String firstName, String lastName) { + this(parent, contact, user, uri, file, phone, firstName, lastName, null); } public PhonebookShareAlert(BaseFragment parent, ContactsController.Contact contact, TLRPC.User user, Uri uri, File file, String firstName, String lastName, Theme.ResourcesProvider resourcesProvider) { + this(parent, contact, user, uri, file, null, firstName, lastName, resourcesProvider); + } + + public PhonebookShareAlert(BaseFragment parent, ContactsController.Contact contact, TLRPC.User user, Uri uri, File file, String phone, String firstName, String lastName, Theme.ResourcesProvider resourcesProvider) { super(parent.getParentActivity(), false, resourcesProvider); String name = ContactsController.formatName(firstName, lastName); @@ -256,6 +264,12 @@ public PhonebookShareAlert(BaseFragment parent, ContactsController.Contact conta result = AndroidUtilities.loadVCardFromStream(Uri.fromFile(file), currentAccount, false, items, name); file.delete(); isImport = true; + } else if (phone != null) { + AndroidUtilities.VcardItem item = new AndroidUtilities.VcardItem(); + item.type = 0; + item.vcardData.add(item.fullData = "TEL;MOBILE:+" + phone); + phones.add(item); + isImport = true; } else if (contact.key != null) { uri = Uri.withAppendedPath(ContactsContract.Contacts.CONTENT_VCARD_URI, contact.key); result = AndroidUtilities.loadVCardFromStream(uri, currentAccount, true, items, name); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/PhotoViewerCaptionEnterView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/PhotoViewerCaptionEnterView.java index 7eb22c1629..0da2e783c1 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/PhotoViewerCaptionEnterView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/PhotoViewerCaptionEnterView.java @@ -833,7 +833,7 @@ private void showPopup(int show, boolean animated) { emojiIconDrawable.setIcon(R.drawable.input_smile, true); } if (sizeNotifierLayout != null) { - if (animated && SharedConfig.smoothKeyboard && show == 0 && emojiView != null) { + if (animated && show == 0 && emojiView != null) { ValueAnimator animator = ValueAnimator.ofFloat(emojiPadding, 0); float animateFrom = emojiPadding; popupAnimating = true; @@ -867,10 +867,6 @@ public void onAnimationEnd(Animator animation) { emojiView.setVisibility(GONE); } emojiPadding = 0; - } else { - if (!SharedConfig.smoothKeyboard && emojiView != null) { - emojiView.setVisibility(GONE); - } } sizeNotifierLayout.requestLayout(); onWindowSizeChanged(); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/PipVideoOverlay.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/PipVideoOverlay.java index cf4a7970db..70c2054bb5 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/PipVideoOverlay.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/PipVideoOverlay.java @@ -353,7 +353,7 @@ private void dismissInternal(boolean animate) { } // Animate is a flag for PhotoViewer transition, not ours - if (animate) { + if (animate || contentView == null) { AndroidUtilities.runOnUIThread(this::onDismissedInternal, 100); } else { AnimatorSet set = new AnimatorSet(); @@ -376,10 +376,10 @@ public void onAnimationEnd(Animator animation) { private void onDismissedInternal() { try { - if (controlsView.getParent() != null) { + if (contentView != null && contentView.getParent() != null) { windowManager.removeViewImmediate(contentView); } - } catch (IllegalArgumentException ignored) {} + } catch (Exception ignored) {} if (photoViewerWebView != null) { photoViewerWebView.showControls(); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/PopupAudioView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/PopupAudioView.java index ee9cc32dd3..daf0a6159f 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/PopupAudioView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/PopupAudioView.java @@ -151,7 +151,7 @@ protected void onDraw(Canvas canvas) { } canvas.restore(); - int state = buttonState + 5; + int state = buttonState; timePaint.setColor(0xffa1aab3); Drawable buttonDrawable = Theme.chat_fileStatesDrawable[state][buttonPressed]; int side = AndroidUtilities.dp(36); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/PopupSwipeBackLayout.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/PopupSwipeBackLayout.java index c1b166ea0f..8e893f9ef9 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/PopupSwipeBackLayout.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/PopupSwipeBackLayout.java @@ -10,6 +10,7 @@ import android.graphics.Path; import android.graphics.Rect; import android.graphics.RectF; +import android.util.Log; import android.util.SparseIntArray; import android.view.GestureDetector; import android.view.Gravity; @@ -25,6 +26,7 @@ import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.NotificationCenter; import org.telegram.messenger.UserConfig; +import org.telegram.ui.ActionBar.ActionBarMenuSlider; import org.telegram.ui.ActionBar.ActionBarPopupWindow; import org.telegram.ui.ActionBar.Theme; @@ -432,7 +434,7 @@ public void setOnHeightUpdateListener(IntCallback onHeightUpdateListener) { */ private boolean isDisallowedView(MotionEvent e, View v) { v.getHitRect(hitRect); - if (hitRect.contains((int) e.getX(), (int) e.getY()) && v.canScrollHorizontally(-1)) + if (hitRect.contains((int) e.getX(), (int) e.getY()) && (v.canScrollHorizontally(-1) || v instanceof ActionBarMenuSlider)) return true; if (v instanceof ViewGroup) { ViewGroup vg = (ViewGroup) v; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/DoubleLimitsPageView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/DoubleLimitsPageView.java index 58d48b7698..a645375e8a 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/DoubleLimitsPageView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/DoubleLimitsPageView.java @@ -44,7 +44,9 @@ protected void dispatchDraw(Canvas canvas) { public void setOffset(float translationX) { float progress = Math.abs(translationX / getMeasuredWidth()); if (progress == 1f) { - recyclerListView.scrollToPosition(0); + if (recyclerListView.findViewHolderForAdapterPosition(0) == null || recyclerListView.findViewHolderForAdapterPosition(0).itemView.getTop() != recyclerListView.getPaddingTop()) { + recyclerListView.scrollToPosition(0); + } } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/DoubledLimitsBottomSheet.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/DoubledLimitsBottomSheet.java index 8735878a42..947def6fbf 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/DoubledLimitsBottomSheet.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/DoubledLimitsBottomSheet.java @@ -255,7 +255,7 @@ public static class Adapter extends RecyclerListView.SelectionAdapter { final ArrayList limits = new ArrayList<>(); - PremiumGradient.GradientTools gradientTools; + PremiumGradient.PremiumGradientTools gradientTools; private int totalGradientHeight; ViewGroup containerView; @@ -263,7 +263,7 @@ public static class Adapter extends RecyclerListView.SelectionAdapter { public Adapter(int currentAccount, boolean drawHeader) { this.drawHeader = drawHeader; - gradientTools = new PremiumGradient.GradientTools(Theme.key_premiumGradient1, Theme.key_premiumGradient2, Theme.key_premiumGradient3, Theme.key_premiumGradient4); + gradientTools = new PremiumGradient.PremiumGradientTools(Theme.key_premiumGradient1, Theme.key_premiumGradient2, Theme.key_premiumGradient3, Theme.key_premiumGradient4); gradientTools.x1 = 0; gradientTools.y1 = 0; gradientTools.x2 = 0; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/GiftPremiumBottomSheet.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/GiftPremiumBottomSheet.java index 3c33a905f8..357330752f 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/GiftPremiumBottomSheet.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/GiftPremiumBottomSheet.java @@ -49,8 +49,8 @@ import java.util.concurrent.atomic.AtomicReference; public class GiftPremiumBottomSheet extends BottomSheetWithRecyclerListView { - private PremiumGradient.GradientTools gradientTools; - private PremiumGradient.GradientTools outlineGradient; + private PremiumGradient.PremiumGradientTools gradientTools; + private PremiumGradient.PremiumGradientTools outlineGradient; private PremiumButtonView premiumButtonView; private PremiumGiftTierCell dummyCell; @@ -73,7 +73,7 @@ public GiftPremiumBottomSheet(BaseFragment fragment, TLRPC.User user) { super(fragment, false, true); this.user = user; - gradientTools = new PremiumGradient.GradientTools(Theme.key_premiumGradient1, Theme.key_premiumGradient2, null, null); + gradientTools = new PremiumGradient.PremiumGradientTools(Theme.key_premiumGradient1, Theme.key_premiumGradient2, null, null); gradientTools.exactly = true; gradientTools.x1 = 0; gradientTools.y1 = 0f; @@ -82,7 +82,7 @@ public GiftPremiumBottomSheet(BaseFragment fragment, TLRPC.User user) { gradientTools.cx = 0; gradientTools.cy = 0; - outlineGradient = new PremiumGradient.GradientTools(Theme.key_premiumGradient1, Theme.key_premiumGradient2, Theme.key_premiumGradient3, Theme.key_premiumGradient4); + outlineGradient = new PremiumGradient.PremiumGradientTools(Theme.key_premiumGradient1, Theme.key_premiumGradient2, Theme.key_premiumGradient3, Theme.key_premiumGradient4); outlineGradient.paint.setStyle(Paint.Style.STROKE); outlineGradient.paint.setStrokeWidth(AndroidUtilities.dp(1.5f)); @@ -185,8 +185,11 @@ public GiftPremiumBottomSheet(BaseFragment fragment, TLRPC.User user) { } private void updateButtonText(boolean animated) { + if (LocaleController.isRTL) { + animated = false; + } if (!BuildVars.useInvoiceBilling() && (!BillingController.getInstance().isReady() || giftTiers.get(selectedTierIndex).googlePlayProductDetails == null)) { - premiumButtonView.setButton(LocaleController.getString(R.string.Loading), v -> {}, true); + premiumButtonView.setButton(LocaleController.getString(R.string.Loading), v -> {}, !LocaleController.isRTL); premiumButtonView.setFlickerDisabled(true); return; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/HelloParticles.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/HelloParticles.java new file mode 100644 index 0000000000..236cdfe089 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/HelloParticles.java @@ -0,0 +1,221 @@ +package org.telegram.ui.Components.Premium; + +import android.graphics.Bitmap; +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.Paint; +import android.graphics.RectF; +import android.text.Layout; +import android.text.StaticLayout; +import android.text.TextPaint; + +import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.SharedConfig; +import org.telegram.messenger.Utilities; + +import java.util.ArrayList; +import java.util.HashMap; + +public class HelloParticles { + + private static String[] hellos = new String[] { + "Hello", "Привіт", "Привет", "Bonjour", "Hola", "Ciao", "Olá", "여보세요", "你好", "Salve", + "Sveiki", "Halo", "გამარჯობა", "Hallå", "Salam", "Tere", "Dia dhuit", "こんにちは", "Сайн уу", + "Bongu", "Ahoj", "γεια", "Zdravo", "नमस्ते", "Habari", "Hallo", "ជំរាបសួរ", "مرحبًا", "ನಮಸ್ಕಾರ", + "Салам", "Silav li wir", "سڵاو", "Kif inti", "Talofa", "Thobela", "हॅलो", "ሰላም", "Здраво", + "ഹലോ", "ہیلو", "ꯍꯦꯜꯂꯣ", "Alô", "வணக்கம்", "Mhoro", "Moni", "Alo", "สวัสดี", "Salom", "Բարեւ" + }; + + public static class Drawable { + + private TextPaint textPaint = new TextPaint(Paint.ANTI_ALIAS_FLAG); + + private float bitmapScale = 1; + private HashMap bitmaps = new HashMap<>(); + + public RectF rect = new RectF(); + public RectF screenRect = new RectF(); + public boolean paused; + private Paint paint = new Paint(); + + ArrayList particles = new ArrayList<>(); + public float speedScale = 1f; + + public final int count; + public boolean useGradient; + public int size1 = 14, size2 = 12, size3 = 10; + public long minLifeTime = 2000; + private int lastColor; + private final float dt = 1000 / AndroidUtilities.screenRefreshRate; + + public Drawable(int count) { + this.count = count; + textPaint.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + textPaint.setColor(Color.WHITE); + switch (SharedConfig.getDevicePerformanceClass()) { + case SharedConfig.PERFORMANCE_CLASS_LOW: + bitmapScale = .25f; + break; + case SharedConfig.PERFORMANCE_CLASS_AVERAGE: + bitmapScale = .5f; + break; + case SharedConfig.PERFORMANCE_CLASS_HIGH: + default: + bitmapScale = .75f; + break; + } + textPaint.setTextSize(AndroidUtilities.dp(24 * bitmapScale)); + paint.setColor(Color.WHITE); + } + + public void init() { + if (particles.isEmpty()) { + for (int i = 0; i < count; i++) { + particles.add(new Drawable.Particle()); + } + } + } + + public void resetPositions() { + long time = System.currentTimeMillis(); + for (int i = 0; i < particles.size(); i++) { + particles.get(i).genPosition(time, i, true); + } + } + + public void onDraw(Canvas canvas) { + long time = System.currentTimeMillis(); + for (int i = 0; i < particles.size(); i++) { + Drawable.Particle particle = particles.get(i); + if (paused) { + particle.draw(canvas, i, pausedTime); + } else { + particle.draw(canvas, i, time); + } + if (particle.inProgress >= 1) { + particle.genPosition(time, i, false); + } + } + } + + public void recycle() { + for (Bitmap bitmap : bitmaps.values()) { + bitmap.recycle(); + } + bitmaps.clear(); + } + + long pausedTime; + + private class Particle { + private boolean set; + private float x, y; + private float vecX, vecY; + private int alpha; + private StaticLayout staticLayout; + private Bitmap bitmap; + private int l, w, h; + private long duration; + private float scale; + float inProgress; + + public void draw(Canvas canvas, int index, long time) { + + if (!paused) { + float speed = AndroidUtilities.dp(4) * (dt / 660f) * speedScale; +// x += vecX * speed; +// y += vecY * speed; + + if (inProgress != 1f) { + inProgress += dt / duration; + if (inProgress > 1f) { + inProgress = 1f; + } + } + } + + + if (bitmap != null) { + canvas.save(); + float t = 1f - 4f * (float) Math.pow(inProgress - .5f, 2f); + float s = scale / bitmapScale * (.7f + .4f * t); + canvas.translate(x - w / 2f, y - h / 2f); + canvas.scale(s, s, w / 2f, h / 2f); + paint.setAlpha((int) (alpha * t)); + canvas.drawBitmap(bitmap, 0, 0, paint); + canvas.restore(); + } + } + + public void genPosition(long time, int index, boolean reset) { + duration = 2250 + Math.abs(Utilities.fastRandom.nextLong() % 2250); + scale = .6f + .45f * Math.abs(Utilities.fastRandom.nextFloat()); + + String string = hellos[Math.abs(Utilities.fastRandom.nextInt() % hellos.length)]; + if (string.length() > 7) { + scale *= .6f; + } else if (string.length() > 5) { + scale *= .75f; + } + staticLayout = new StaticLayout(string, textPaint, AndroidUtilities.displaySize.x, Layout.Alignment.ALIGN_NORMAL, 1f, 0, false); + if (staticLayout.getLineCount() <= 0) { + l = w = h = 0; + } else { + l = (int) staticLayout.getLineLeft(0); + w = (int) staticLayout.getLineWidth(0); + h = staticLayout.getHeight(); + } + bitmap = bitmaps.get(string); + if (bitmap == null) { + bitmap = Bitmap.createBitmap(Math.max(1, w - Math.max(0, l)), Math.max(1, h), Bitmap.Config.ARGB_8888); + Canvas canvas = new Canvas(bitmap); + canvas.translate(-l, 0); + staticLayout.draw(canvas); + bitmaps.put(string, bitmap); + } + + float bestDistance = 0; + float minX = rect.left + w / 4f, maxX = rect.right - w / 4f; + if (index % 2 == 0) { + maxX = rect.centerX() - w / 2f; + } else { + minX = rect.centerX() + w / 2f; + } + float bestX = minX + Math.abs(Utilities.fastRandom.nextInt() % (maxX - minX)); + float bestY = rect.top + Math.abs(Utilities.fastRandom.nextInt() % rect.height()); + for (int k = 0; k < 10; k++) { + float randX = minX + Math.abs(Utilities.fastRandom.nextInt() % (maxX - minX)); + float randY = rect.top + Math.abs(Utilities.fastRandom.nextInt() % rect.height()); + float minDistance = Integer.MAX_VALUE; + for (int j = 0; j < particles.size(); j++) { + Particle p = particles.get(j); + if (!p.set) { + continue; + } + float rx = Math.min(Math.abs(p.x + p.w * (scale / bitmapScale) * 1.1f - randX), Math.abs(p.x - randX)); + float ry = p.y - randY; + float distance = rx * rx + ry * ry; + if (distance < minDistance) { + minDistance = distance; + } + } + if (minDistance > bestDistance) { + bestDistance = minDistance; + bestX = randX; + bestY = randY; + } + } + x = bestX; + y = bestY; + + double a = Math.atan2(x - rect.centerX(), y - rect.centerY()); + vecX = (float) Math.sin(a); + vecY = (float) Math.cos(a); + alpha = (int) (255 * ((50 + Utilities.fastRandom.nextInt(50)) / 100f)); + + inProgress = reset ? Math.abs((Utilities.fastRandom.nextFloat() % 1f) * .9f) : 0; + set = true; + } + } + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/LimitPreviewView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/LimitPreviewView.java index 95178df813..84cc7ad269 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/LimitPreviewView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/LimitPreviewView.java @@ -54,7 +54,7 @@ public class LimitPreviewView extends LinearLayout { TextView defaultCount; private float position; private View parentVideForGradient; - PremiumGradient.GradientTools staticGradient; + PremiumGradient.PremiumGradientTools staticGradient; int gradientYOffset; boolean wasHaptic; boolean animationCanPlay = true; @@ -272,6 +272,13 @@ public void setType(int type) { limitIcon.setText(spannableStringBuilder); } premiumCount.setText("4 GB"); + } else if (type == LimitReachedBottomSheet.TYPE_ADD_MEMBERS_RESTRICTED) { + if (limitIcon != null) { + SpannableStringBuilder spannableStringBuilder = new SpannableStringBuilder(); + spannableStringBuilder.append("d").setSpan(new ColoredImageSpan(icon), 0, 1, 0); + limitIcon.setText(spannableStringBuilder); + } + premiumCount.setText(""); } } @@ -283,7 +290,7 @@ public void setParentViewForGradien(ViewGroup containerView) { parentVideForGradient = containerView; } - public void setStaticGradinet(PremiumGradient.GradientTools gradientTools) { + public void setStaticGradinet(PremiumGradient.PremiumGradientTools gradientTools) { staticGradient = gradientTools; } @@ -368,7 +375,7 @@ protected void onDraw(Canvas canvas) { h = getMeasuredHeight(); PremiumGradient.getInstance().updateMainGradientMatrix(0, 0, LimitPreviewView.this.getMeasuredWidth(), LimitPreviewView.this.getMeasuredHeight(), getGlobalXOffset() - getX(), -getTop()); AndroidUtilities.rectTmp.set(0, AndroidUtilities.dp(3), getMeasuredWidth(), h - AndroidUtilities.dp(3)); - canvas.drawRoundRect(AndroidUtilities.rectTmp, h / 2f, h / 2f, PremiumGradient.getInstance().getMainGradientPaint()); + canvas.drawRoundRect(AndroidUtilities.rectTmp, h / 2f, h / 2f, PremiumGradient.getInstance().getPremiumLocakedPaint()); } else { if (invalidatePath) { invalidatePath = false; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/LimitReachedBottomSheet.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/LimitReachedBottomSheet.java index f522e9fc7e..327e65d10e 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/LimitReachedBottomSheet.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/LimitReachedBottomSheet.java @@ -19,9 +19,11 @@ import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ChatObject; +import org.telegram.messenger.ContactsController; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MessagesController; import org.telegram.messenger.R; +import org.telegram.messenger.SendMessagesHelper; import org.telegram.messenger.UserConfig; import org.telegram.tgnet.ConnectionsManager; import org.telegram.tgnet.TLRPC; @@ -33,6 +35,7 @@ import org.telegram.ui.Cells.HeaderCell; import org.telegram.ui.Cells.ShadowSectionCell; import org.telegram.ui.Components.BottomSheetWithRecyclerListView; +import org.telegram.ui.Components.BulletinFactory; import org.telegram.ui.Components.FlickerLoadingView; import org.telegram.ui.Components.LayoutHelper; import org.telegram.ui.Components.RecyclerItemsEnterAnimator; @@ -48,7 +51,7 @@ public class LimitReachedBottomSheet extends BottomSheetWithRecyclerListView { public static final int TYPE_PUBLIC_LINKS = 2; public static final int TYPE_FOLDERS = 3; public static final int TYPE_CHATS_IN_FOLDER = 4; - public static final int TYPE_TO_MANY_COMMUNITIES = 5; + public static final int TYPE_TO0_MANY_COMMUNITIES = 5; public static final int TYPE_LARGE_FILE = 6; public static final int TYPE_ACCOUNTS = 7; @@ -56,11 +59,16 @@ public class LimitReachedBottomSheet extends BottomSheetWithRecyclerListView { public static final int TYPE_GIFS = 9; public static final int TYPE_STICKERS = 10; + + public static final int TYPE_ADD_MEMBERS_RESTRICTED = 11; + private boolean canSendLink; + private TLRPC.TL_webPage linkPreview; + public static String limitTypeToServerString(int type) { switch (type) { case TYPE_PIN_DIALOGS: return "double_limits__dialog_pinned"; - case TYPE_TO_MANY_COMMUNITIES: + case TYPE_TO0_MANY_COMMUNITIES: return "double_limits__channels"; case TYPE_PUBLIC_LINKS: return "double_limits__channels_public"; @@ -88,15 +96,18 @@ public static String limitTypeToServerString(int type) { int dividerRow = -1; int chatsTitleRow = -1; int chatStartRow = -1; + int chatEndRow = -1; int loadingRow = -1; + int emptyViewDividerRow = -1; public boolean parentIsChannel; private int currentValue = -1; LimitPreviewView limitPreviewView; - HashSet selectedChats = new HashSet<>(); + HashSet selectedChats = new HashSet<>(); private ArrayList inactiveChats = new ArrayList<>(); private ArrayList inactiveChatsSignatures = new ArrayList<>(); + private ArrayList restrictedUsers = new ArrayList<>(); PremiumButtonView premiumButtonView; public Runnable onSuccessRunnable; @@ -107,17 +118,19 @@ public static String limitTypeToServerString(int type) { View divider; LimitParams limitParams; private boolean isVeryLargeFile; + private TLRPC.Chat fromChat; public LimitReachedBottomSheet(BaseFragment fragment, Context context, int type, int currentAccount) { super(fragment, false, hasFixedSize(type)); fixNavigationBar(); parentFragment = fragment; this.type = type; + updateTitle(); this.currentAccount = currentAccount; updateRows(); if (type == TYPE_PUBLIC_LINKS) { loadAdminedChannels(); - } else if (type == TYPE_TO_MANY_COMMUNITIES) { + } else if (type == TYPE_TO0_MANY_COMMUNITIES) { loadInactiveChannels(); } updatePremiumButtonText(); @@ -135,7 +148,9 @@ public void onViewCreated(FrameLayout containerView) { @Override protected void onDraw(Canvas canvas) { super.onDraw(canvas); - canvas.drawRect(0, 0, getMeasuredWidth(), 1, Theme.dividerPaint); + if (chatEndRow - chatStartRow > 1) { + canvas.drawRect(0, 0, getMeasuredWidth(), 1, Theme.dividerPaint); + } } }; divider.setBackgroundColor(Theme.getColor(Theme.key_dialogBackground)); @@ -155,14 +170,17 @@ protected void onDraw(Canvas canvas) { adminedChannelCell.setChecked(selectedChats.contains(chat), true); updateButton(); } else if (view instanceof GroupCreateUserCell) { + if (!canSendLink && type == TYPE_ADD_MEMBERS_RESTRICTED) { + return; + } GroupCreateUserCell cell = (GroupCreateUserCell) view; - TLRPC.Chat chat = (TLRPC.Chat) cell.getObject(); - if (selectedChats.contains(chat)) { - selectedChats.remove(chat); + Object object = cell.getObject(); + if (selectedChats.contains(object)) { + selectedChats.remove(object); } else { - selectedChats.add(chat); + selectedChats.add(object); } - cell.setChecked(selectedChats.contains(chat), true); + cell.setChecked(selectedChats.contains(object), true); updateButton(); } }); @@ -172,6 +190,9 @@ protected void onDraw(Canvas canvas) { return false; }); premiumButtonView.buttonLayout.setOnClickListener(v -> { + if (type == TYPE_ADD_MEMBERS_RESTRICTED) { + return; + } if (UserConfig.getInstance(currentAccount).isPremium() || MessagesController.getInstance(currentAccount).premiumLocked || isVeryLargeFile) { dismiss(); return; @@ -189,18 +210,63 @@ protected void onDraw(Canvas canvas) { dismiss(); }); premiumButtonView.overlayTextView.setOnClickListener(v -> { + if (type == TYPE_ADD_MEMBERS_RESTRICTED) { + if (selectedChats.isEmpty()) { + dismiss(); + return; + } + sendInviteMessages(); + return; + } if (selectedChats.isEmpty()) { return; } if (type == TYPE_PUBLIC_LINKS) { revokeSelectedLinks(); - } else if (type == TYPE_TO_MANY_COMMUNITIES) { + } else if (type == TYPE_TO0_MANY_COMMUNITIES) { leaveFromSelectedGroups(); } }); enterAnimator = new RecyclerItemsEnterAnimator(recyclerListView, true); } + private void sendInviteMessages() { + String link = null; + TLRPC.ChatFull chatFull = MessagesController.getInstance(currentAccount).getChatFull(fromChat.id); + if (chatFull == null) { + dismiss(); + return; + } + if (fromChat.username != null) { + link = "@" + fromChat.username; + } else if (chatFull.exported_invite != null) { + link = chatFull.exported_invite.link; + } else { + dismiss(); + return; + } + for (Object obj : selectedChats) { + TLRPC.User user = (TLRPC.User) obj; + SendMessagesHelper.getInstance(currentAccount).sendMessage(link, user.id, null, null, linkPreview, false, null, null, null, false, 0, null, false); + } + AndroidUtilities.runOnUIThread(() -> { + BulletinFactory factory = BulletinFactory.global(); + if (factory != null) { + if (selectedChats.size() == 1) { + TLRPC.User user = (TLRPC.User) selectedChats.iterator().next(); + factory.createSimpleBulletin(R.raw.voip_invite, + AndroidUtilities.replaceTags(LocaleController.formatString("InviteLinkSentSingle", R.string.InviteLinkSentSingle, ContactsController.formatName(user))) + ).show(); + } else { + factory.createSimpleBulletin(R.raw.voip_invite, + AndroidUtilities.replaceTags(LocaleController.formatPluralString("InviteLinkSent", selectedChats.size(), selectedChats.size())) + ).show(); + } + } + }); + dismiss(); + } + public void updatePremiumButtonText() { if (UserConfig.getInstance(currentAccount).isPremium() || MessagesController.getInstance(currentAccount).premiumLocked || isVeryLargeFile) { premiumButtonView.buttonTextView.setText(LocaleController.getString(R.string.OK)); @@ -213,7 +279,10 @@ public void updatePremiumButtonText() { private void leaveFromSelectedGroups() { TLRPC.User currentUser = MessagesController.getInstance(currentAccount).getUser(UserConfig.getInstance(currentAccount).getClientUserId()); - ArrayList chats = new ArrayList<>(selectedChats); + ArrayList chats = new ArrayList<>(); + for (Object obj : selectedChats) { + chats.add((TLRPC.Chat) obj); + } AlertDialog.Builder builder = new AlertDialog.Builder(getContext()); builder.setTitle(LocaleController.formatPluralString("LeaveCommunities", chats.size())); if (chats.size() == 1) { @@ -235,21 +304,34 @@ private void leaveFromSelectedGroups() { alertDialog.show(); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } private void updateButton() { - if (selectedChats.size() > 0) { - String str = null; - if (type == TYPE_PUBLIC_LINKS) { - str = LocaleController.formatPluralString("RevokeLinks", selectedChats.size()); - } else if (type == TYPE_TO_MANY_COMMUNITIES) { - str = LocaleController.formatPluralString("LeaveCommunities", selectedChats.size()); + if (type == TYPE_ADD_MEMBERS_RESTRICTED) { + premiumButtonView.checkCounterView(); + if (!canSendLink) { + premiumButtonView.setOverlayText(LocaleController.getString("Close", R.string.Close), true, true); + } else if (selectedChats.size() > 0) { + premiumButtonView.setOverlayText(LocaleController.getString("SendInviteLink", R.string.SendInviteLink), true, true); + } else { + premiumButtonView.setOverlayText(LocaleController.getString("ActionSkip", R.string.ActionSkip), true, true); } - premiumButtonView.setOverlayText(str, true, true); + premiumButtonView.counterView.setCount(selectedChats.size(), true); + premiumButtonView.invalidate(); } else { - premiumButtonView.clearOverlayText(); + if (selectedChats.size() > 0) { + String str = null; + if (type == TYPE_PUBLIC_LINKS) { + str = LocaleController.formatPluralString("RevokeLinks", selectedChats.size()); + } else if (type == TYPE_TO0_MANY_COMMUNITIES) { + str = LocaleController.formatPluralString("LeaveCommunities", selectedChats.size()); + } + premiumButtonView.setOverlayText(str, true, true); + } else { + premiumButtonView.clearOverlayText(); + } } } @@ -262,6 +344,9 @@ private static boolean hasFixedSize(int type) { @Override public CharSequence getTitle() { + if (type == TYPE_ADD_MEMBERS_RESTRICTED) { + return LocaleController.getString("ChannelInviteViaLink", R.string.ChannelInviteViaLink); + } return LocaleController.getString("LimitReached", R.string.LimitReached); } @@ -270,6 +355,9 @@ public RecyclerListView.SelectionAdapter createAdapter() { return new RecyclerListView.SelectionAdapter() { @Override public boolean isEnabled(RecyclerView.ViewHolder holder) { + if (type == TYPE_ADD_MEMBERS_RESTRICTED && !canSendLink) { + return false; + } return holder.getItemViewType() == 1 || holder.getItemViewType() == 4; } @@ -312,6 +400,14 @@ public void onClick(View v) { flickerLoadingView.setItemsCount(10); view = flickerLoadingView; break; + case 6: + view = new View(getContext()) { + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(16), MeasureSpec.EXACTLY)); + } + }; + break; } view.setLayoutParams(new RecyclerView.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT)); return new RecyclerListView.Holder(view); @@ -320,11 +416,18 @@ public void onClick(View v) { @Override public void onBindViewHolder(@NonNull RecyclerView.ViewHolder holder, int position) { if (holder.getItemViewType() == 4) { - TLRPC.Chat chat = inactiveChats.get(position - chatStartRow); GroupCreateUserCell cell = (GroupCreateUserCell) holder.itemView; - String signature = inactiveChatsSignatures.get(position - chatStartRow); - cell.setObject(chat, chat.title, signature, true); - cell.setChecked(selectedChats.contains(chat), false); + if (type == TYPE_TO0_MANY_COMMUNITIES) { + TLRPC.Chat chat = inactiveChats.get(position - chatStartRow); + String signature = inactiveChatsSignatures.get(position - chatStartRow); + cell.setObject(chat, chat.title, signature, position != chatEndRow - 1f); + cell.setChecked(selectedChats.contains(chat), false); + } else if (type == TYPE_ADD_MEMBERS_RESTRICTED) { + TLRPC.User user = restrictedUsers.get(position - chatStartRow); + String signature = LocaleController.formatUserStatus(currentAccount, user, null, null); + cell.setObject(user, ContactsController.formatName(user.first_name, user.last_name), signature, position != chatEndRow - 1f); + cell.setChecked(selectedChats.contains(user), false); + } } else if (holder.getItemViewType() == 1) { TLRPC.Chat chat = chats.get(position - chatStartRow); AdminedChannelCell adminedChannelCell = (AdminedChannelCell) holder.itemView; @@ -333,7 +436,17 @@ public void onBindViewHolder(@NonNull RecyclerView.ViewHolder holder, int positi adminedChannelCell.setChecked(selectedChats.contains(chat), oldChat == chat); } else if (holder.getItemViewType() == 3) { HeaderCell headerCell = (HeaderCell) holder.itemView; - if (type == TYPE_PUBLIC_LINKS) { + if (type == TYPE_ADD_MEMBERS_RESTRICTED) { + if (canSendLink) { + headerCell.setText(LocaleController.getString("ChannelInviteViaLink", R.string.ChannelInviteViaLink)); + } else { + if (restrictedUsers.size() == 1) { + headerCell.setText(LocaleController.getString("ChannelInviteViaLinkRestricted2", R.string.ChannelInviteViaLinkRestricted2)); + } else { + headerCell.setText(LocaleController.getString("ChannelInviteViaLinkRestricted3", R.string.ChannelInviteViaLinkRestricted3)); + } + } + } else if (type == TYPE_PUBLIC_LINKS) { headerCell.setText(LocaleController.getString("YourPublicCommunities", R.string.YourPublicCommunities)); } else { headerCell.setText(LocaleController.getString("LastActiveCommunities", R.string.LastActiveCommunities)); @@ -351,8 +464,10 @@ public int getItemViewType(int position) { return 3; } else if (loadingRow == position) { return 5; + } else if (emptyViewDividerRow == position) { + return 6; } - if (type == TYPE_TO_MANY_COMMUNITIES) { + if (type == TYPE_TO0_MANY_COMMUNITIES || type == TYPE_ADD_MEMBERS_RESTRICTED) { return 4; } else { return 1; @@ -375,6 +490,35 @@ public void setVeryLargeFile(boolean b) { updatePremiumButtonText(); } + public void setRestrictedUsers(TLRPC.Chat chat, ArrayList userRestrictedPrivacy) { + fromChat = chat; + canSendLink = ChatObject.canUserDoAdminAction(chat, ChatObject.ACTION_INVITE); + restrictedUsers = new ArrayList<>(userRestrictedPrivacy); + selectedChats.clear(); + if (canSendLink) { + selectedChats.addAll(restrictedUsers); + } + updateRows(); + updateButton(); + + TLRPC.ChatFull chatFull = MessagesController.getInstance(currentAccount).getChatFull(fromChat.id); + String link; + if (fromChat.username == null && chatFull != null && chatFull.exported_invite != null) { + link = chatFull.exported_invite.link; + + TLRPC.TL_messages_getWebPage webPagePreview = new TLRPC.TL_messages_getWebPage(); + webPagePreview.url = link; + ConnectionsManager.getInstance(currentAccount).sendRequest(webPagePreview,(response, error) -> AndroidUtilities.runOnUIThread(() -> { + if (response != null) { + if (response instanceof TLRPC.TL_webPage) { + linkPreview = (TLRPC.TL_webPage) response; + } + } + })); + } + + } + private class HeaderView extends LinearLayout { @@ -388,10 +532,43 @@ public HeaderView(Context context) { int icon = limitParams.icon; String descriptionStr; boolean premiumLocked = MessagesController.getInstance(currentAccount).premiumLocked; - if (premiumLocked) { - descriptionStr = limitParams.descriptionStrLocked; + if (type == TYPE_ADD_MEMBERS_RESTRICTED) { + premiumLocked = true; + if (!canSendLink) { + if (ChatObject.isChannelAndNotMegaGroup(fromChat)) { + if (restrictedUsers.size() == 1) { + descriptionStr = LocaleController.formatString("InviteChannelRestrictedUsers2One", R.string.InviteChannelRestrictedUsers2One, ContactsController.formatName(restrictedUsers.get(0))); + } else { + descriptionStr = LocaleController.formatPluralString("InviteChannelRestrictedUsers2", restrictedUsers.size(), restrictedUsers.size()); + } + } else { + if (restrictedUsers.size() == 1) { + descriptionStr = LocaleController.formatString("InviteRestrictedUsers2One", R.string.InviteRestrictedUsers2One, ContactsController.formatName(restrictedUsers.get(0))); + } else { + descriptionStr = LocaleController.formatPluralString("InviteRestrictedUsers2", restrictedUsers.size(), restrictedUsers.size()); + } + } + } else { + if (ChatObject.isChannelAndNotMegaGroup(fromChat)) { + if (restrictedUsers.size() == 1) { + descriptionStr = LocaleController.formatString("InviteChannelRestrictedUsersOne", R.string.InviteChannelRestrictedUsersOne, ContactsController.formatName(restrictedUsers.get(0))); + } else { + descriptionStr = LocaleController.formatPluralString("InviteChannelRestrictedUsers", restrictedUsers.size(), restrictedUsers.size()); + } + } else { + if (restrictedUsers.size() == 1) { + descriptionStr = LocaleController.formatString("InviteRestrictedUsersOne", R.string.InviteRestrictedUsersOne, ContactsController.formatName(restrictedUsers.get(0))); + } else { + descriptionStr = LocaleController.formatPluralString("InviteRestrictedUsers", restrictedUsers.size(), restrictedUsers.size()); + } + } + } } else { - descriptionStr = (UserConfig.getInstance(currentAccount).isPremium() || isVeryLargeFile) ? limitParams.descriptionStrPremium : limitParams.descriptionStr; + if (premiumLocked) { + descriptionStr = limitParams.descriptionStrLocked; + } else { + descriptionStr = (UserConfig.getInstance(currentAccount).isPremium() || isVeryLargeFile) ? limitParams.descriptionStrPremium : limitParams.descriptionStr; + } } int defaultLimit = limitParams.defaultLimit; int premiumLimit = limitParams.premiumLimit; @@ -452,7 +629,7 @@ public HeaderView(Context context) { } } - if (type == TYPE_PUBLIC_LINKS || type == TYPE_TO_MANY_COMMUNITIES) { + if (type == TYPE_PUBLIC_LINKS || type == TYPE_TO0_MANY_COMMUNITIES) { limitPreviewView.setDelayedAnimation(); } @@ -461,14 +638,20 @@ public HeaderView(Context context) { TextView title = new TextView(context); title.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); - if (type == TYPE_LARGE_FILE) { + if (type == TYPE_ADD_MEMBERS_RESTRICTED) { + if (canSendLink) { + title.setText(LocaleController.getString("ChannelInviteViaLink", R.string.ChannelInviteViaLink)); + } else { + title.setText(LocaleController.getString("ChannelInviteViaLinkRestricted", R.string.ChannelInviteViaLinkRestricted)); + } + } else if (type == TYPE_LARGE_FILE) { title.setText(LocaleController.getString("FileTooLarge", R.string.FileTooLarge)); } else { title.setText(LocaleController.getString("LimitReached", R.string.LimitReached)); } title.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 20); title.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteBlackText)); - addView(title, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_HORIZONTAL, 0, 22, 0, 10)); + addView(title, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_HORIZONTAL, 0, premiumLocked ? 8 : 22, 0, 10)); TextView description = new TextView(context); description.setText(AndroidUtilities.replaceTags(descriptionStr)); @@ -509,7 +692,7 @@ private static LimitParams getLimitParams(int type, int currentAccount) { limitParams.descriptionStr = LocaleController.formatString("LimitReachedChatInFolders", R.string.LimitReachedChatInFolders, limitParams.defaultLimit, limitParams.premiumLimit); limitParams.descriptionStrPremium = LocaleController.formatString("LimitReachedChatInFoldersPremium", R.string.LimitReachedChatInFoldersPremium, limitParams.premiumLimit); limitParams.descriptionStrLocked = LocaleController.formatString("LimitReachedChatInFoldersLocked", R.string.LimitReachedChatInFoldersLocked, limitParams.defaultLimit); - } else if (type == TYPE_TO_MANY_COMMUNITIES) { + } else if (type == TYPE_TO0_MANY_COMMUNITIES) { limitParams.defaultLimit = MessagesController.getInstance(currentAccount).channelsLimitDefault; limitParams.premiumLimit = MessagesController.getInstance(currentAccount).channelsLimitPremium; limitParams.icon = R.drawable.msg_limit_groups; @@ -530,6 +713,13 @@ private static LimitParams getLimitParams(int type, int currentAccount) { limitParams.descriptionStr = LocaleController.formatString("LimitReachedAccounts", R.string.LimitReachedAccounts, limitParams.defaultLimit, limitParams.premiumLimit); limitParams.descriptionStrPremium = LocaleController.formatString("LimitReachedAccountsPremium", R.string.LimitReachedAccountsPremium, limitParams.premiumLimit); limitParams.descriptionStrLocked = LocaleController.formatString("LimitReachedAccountsPremium", R.string.LimitReachedAccountsPremium, limitParams.defaultLimit); + } else if (type == TYPE_ADD_MEMBERS_RESTRICTED) { + limitParams.defaultLimit = 0; + limitParams.premiumLimit = 0; + limitParams.icon = R.drawable.msg_limit_links; + limitParams.descriptionStr = LocaleController.formatString("LimitReachedAccounts", R.string.LimitReachedAccounts, limitParams.defaultLimit, limitParams.premiumLimit); + limitParams.descriptionStrPremium = ""; + limitParams.descriptionStrLocked = ""; } return limitParams; } @@ -573,7 +763,9 @@ private void updateRows() { rowCount = 0; dividerRow = -1; chatStartRow = -1; + chatEndRow = -1; loadingRow = -1; + emptyViewDividerRow = -1; headerRow = rowCount++; if (!hasFixedSize(type)) { dividerRow = rowCount++; @@ -582,11 +774,17 @@ private void updateRows() { loadingRow = rowCount++; } else { chatStartRow = rowCount; - if (type == TYPE_TO_MANY_COMMUNITIES) { + if (type == TYPE_ADD_MEMBERS_RESTRICTED) { + rowCount += restrictedUsers.size(); + } else if (type == TYPE_TO0_MANY_COMMUNITIES) { rowCount += inactiveChats.size(); } else { rowCount += chats.size(); } + chatEndRow = rowCount; + if (chatEndRow - chatStartRow > 1) { + emptyViewDividerRow = rowCount++; + } } } notifyDataSetChanged(); @@ -594,7 +792,10 @@ private void updateRows() { private void revokeSelectedLinks() { - final ArrayList channels = new ArrayList<>(selectedChats); + final ArrayList channels = new ArrayList<>(); + for (Object obj : selectedChats) { + chats.add((TLRPC.Chat) obj); + } revokeLinks(channels); } @@ -634,7 +835,7 @@ private void revokeLinks(ArrayList channels) { alertDialog.show(); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } @@ -693,9 +894,11 @@ private void loadInactiveChannels() { limitParams = getLimitParams(type, currentAccount); } int currentValue = Math.max(inactiveChats.size(), limitParams.defaultLimit); - limitPreviewView.setIconValue(currentValue); - limitPreviewView.setBagePosition(currentValue / (float) limitParams.premiumLimit); - limitPreviewView.startDelayedAnimation(); + if (limitPreviewView != null) { + limitPreviewView.setIconValue(currentValue); + limitPreviewView.setBagePosition(currentValue / (float) limitParams.premiumLimit); + limitPreviewView.startDelayedAnimation(); + } }); } })); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/PremiumButtonView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/PremiumButtonView.java index b4583c07aa..198901b400 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/PremiumButtonView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/PremiumButtonView.java @@ -21,7 +21,9 @@ import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.BuildVars; import org.telegram.ui.ActionBar.Theme; +import org.telegram.ui.Components.AnimatedFloat; import org.telegram.ui.Components.AnimatedTextView; +import org.telegram.ui.Components.CounterView; import org.telegram.ui.Components.CubicBezierInterpolator; import org.telegram.ui.Components.LayoutHelper; import org.telegram.ui.Components.RLottieImageView; @@ -34,7 +36,7 @@ public class PremiumButtonView extends FrameLayout { private float progress; private boolean inc; public AnimatedTextView buttonTextView; - public TextView overlayTextView; + public AnimatedTextView overlayTextView; private int radius; private boolean showOverlay; @@ -51,6 +53,7 @@ public class PremiumButtonView extends FrameLayout { private boolean isButtonTextSet; private boolean isFlickerDisabled; + CounterView counterView; public PremiumButtonView(@NonNull Context context, boolean createOverlayTextView) { this(context, AndroidUtilities.dp(8), createOverlayTextView); @@ -85,12 +88,13 @@ public PremiumButtonView(@NonNull Context context, int radius, boolean createOve addView(buttonLayout); if (createOverlayTextView) { - overlayTextView = new TextView(context); + overlayTextView = new AnimatedTextView(context, true, true, true); overlayTextView.setPadding(AndroidUtilities.dp(34), 0, AndroidUtilities.dp(34), 0); overlayTextView.setGravity(Gravity.CENTER); overlayTextView.setTextColor(Theme.getColor(Theme.key_featuredStickers_buttonText)); - overlayTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + overlayTextView.setTextSize(AndroidUtilities.dp(14)); overlayTextView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + overlayTextView.getDrawable().setAllowCancel(true); overlayTextView.setBackground(Theme.createSimpleSelectorRoundRectDrawable(AndroidUtilities.dp(8), Color.TRANSPARENT, ColorUtils.setAlphaComponent(Color.WHITE, 120))); addView(overlayTextView); @@ -106,6 +110,9 @@ public AnimatedTextView getTextView() { return buttonTextView; } + AnimatedFloat counterOffset = new AnimatedFloat(this); + AnimatedFloat counterOffset2 = new AnimatedFloat(this); + @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { super.onMeasure(widthMeasureSpec, heightMeasureSpec); @@ -113,6 +120,16 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { @Override protected void dispatchDraw(Canvas canvas) { + if (counterView != null) { + counterOffset.set((counterView.counterDrawable.getWidth() * 0.85f + AndroidUtilities.dp(3)) / 2f); + counterOffset2.set(getMeasuredWidth() / 2f + (overlayTextView.getDrawable().getWidth()) / 2f + AndroidUtilities.dp(3)); + overlayTextView.setTranslationX(-counterOffset.get()); + counterView.setTranslationX(counterOffset2.get() - counterOffset.get()); + } else { + if (overlayTextView != null) { + overlayTextView.setTranslationX(0); + } + } AndroidUtilities.rectTmp.set(0, 0, getMeasuredWidth(), getMeasuredHeight()); if (overlayProgress != 1f || !drawOverlayColor) { if (inc) { @@ -157,7 +174,7 @@ protected void dispatchDraw(Canvas canvas) { public void setOverlayText(String text, boolean drawOverlayColor, boolean animated) { showOverlay = true; this.drawOverlayColor = drawOverlayColor; - overlayTextView.setText(text); + overlayTextView.setText(text, animated); updateOverlay(animated); } @@ -254,4 +271,15 @@ public void setButton(String text, View.OnClickListener clickListener, boolean a buttonTextView.setText(text, animated); buttonLayout.setOnClickListener(clickListener); } + + public void checkCounterView() { + if (counterView == null) { + counterView = new CounterView(getContext(), null); + counterView.setGravity(Gravity.LEFT); + counterView.setColors(Theme.key_featuredStickers_addButton, Theme.key_featuredStickers_buttonText); + counterView.counterDrawable.circleScale = 0.8f; + setClipChildren(false); + addView(counterView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 24, Gravity.CENTER_VERTICAL)); + } + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/PremiumFeatureBottomSheet.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/PremiumFeatureBottomSheet.java index 31c7514639..f525d6ef8c 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/PremiumFeatureBottomSheet.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/PremiumFeatureBottomSheet.java @@ -136,9 +136,10 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { PremiumPreviewFragment.PremiumFeatureData featureData = premiumFeatures.get(selectedPosition); + setApplyTopPadding(false); setApplyBottomPadding(false); useBackgroundTopPadding = false; - PremiumGradient.GradientTools gradientTools = new PremiumGradient.GradientTools(Theme.key_premiumGradientBottomSheet1, Theme.key_premiumGradientBottomSheet2, Theme.key_premiumGradientBottomSheet3, null); + PremiumGradient.PremiumGradientTools gradientTools = new PremiumGradient.PremiumGradientTools(Theme.key_premiumGradientBottomSheet1, Theme.key_premiumGradientBottomSheet2, Theme.key_premiumGradientBottomSheet3, null); gradientTools.x1 = 0; gradientTools.y1 = 1.1f; gradientTools.x2 = 1.5f; @@ -234,7 +235,7 @@ public boolean isViewFromObject(@NonNull View view, @NonNull Object object) { viewPager.setCurrentItem(selectedPosition); frameLayout.addView(viewPager, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 100, 0, 0, 18, 0, 0)); - frameLayout.addView(closeLayout, LayoutHelper.createFrame(52, 52, Gravity.RIGHT | Gravity.TOP, 0, 16, 0, 0)); + frameLayout.addView(closeLayout, LayoutHelper.createFrame(52, 52, Gravity.RIGHT | Gravity.TOP, 0, 24, 0, 0)); BottomPagesView bottomPages = new BottomPagesView(getContext(), viewPager, premiumFeatures.size()); viewPager.addOnPageChangeListener(new ViewPager.OnPageChangeListener() { @@ -290,7 +291,9 @@ private void checkPage() { if (localGradientAlpha != gradientAlpha) { gradientAlpha = localGradientAlpha; content.invalidate(); - checkTopOffset(); + AndroidUtilities.runOnUIThread(() -> { + checkTopOffset(); + }); } } @@ -381,7 +384,7 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { @Override protected void dispatchDraw(Canvas canvas) { - shadowDrawable.setBounds(0, topCurrentOffset - backgroundPaddingTop + AndroidUtilities.dp(2), getMeasuredWidth(), getMeasuredHeight()); + shadowDrawable.setBounds(0, topCurrentOffset + backgroundPaddingTop - AndroidUtilities.dp(2) + 1, getMeasuredWidth(), getMeasuredHeight()); shadowDrawable.draw(canvas); super.dispatchDraw(canvas); if (actionBar != null && actionBar.getVisibility() == View.VISIBLE && actionBar.getAlpha() != 0) { @@ -622,6 +625,9 @@ void setFeatureDate(PremiumPreviewFragment.PremiumFeatureData featureData) { } else if (startType == PremiumPreviewFragment.PREMIUM_FEATURE_VOICE_TO_TEXT) { title.setText(LocaleController.getString(R.string.PremiumPreviewVoiceToText)); description.setText(LocaleController.getString(R.string.PremiumPreviewVoiceToTextDescription2)); + } else if (startType == PremiumPreviewFragment.PREMIUM_FEATURE_TRANSLATIONS) { + title.setText(LocaleController.getString(R.string.PremiumPreviewTranslations)); + description.setText(LocaleController.getString(R.string.PremiumPreviewTranslationsDescription)); } topViewOnFullHeight = false; } else { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/PremiumGradient.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/PremiumGradient.java index 4b24fc2fe2..441bafe95e 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/PremiumGradient.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/PremiumGradient.java @@ -23,7 +23,7 @@ public class PremiumGradient { - private final GradientTools mainGradient = new GradientTools(Theme.key_premiumGradient1, Theme.key_premiumGradient2, Theme.key_premiumGradient3, Theme.key_premiumGradient4); + private final PremiumGradientTools mainGradient = new PremiumGradientTools(Theme.key_premiumGradient1, Theme.key_premiumGradient2, Theme.key_premiumGradient3, Theme.key_premiumGradient4); // private final GradientTools grayGradient = new GradientTools(Theme.key_windowBackgroundWhiteGrayText7, Theme.key_windowBackgroundWhiteGrayText7, Theme.key_windowBackgroundWhiteGrayText7); private final Paint mainGradientPaint = mainGradient.paint; Paint lockedPremiumPaint; @@ -62,7 +62,7 @@ public InternalDrawable createGradientDrawable(Drawable drawable) { return createGradientDrawable(drawable, mainGradient); } - public InternalDrawable createGradientDrawable(Drawable drawable, PremiumGradient.GradientTools gradient) { + public InternalDrawable createGradientDrawable(Drawable drawable, PremiumGradientTools gradient) { if (drawable == null) { return null; } @@ -101,6 +101,14 @@ public void updateMainGradientMatrix(int x, int y, int width, int height, float mainGradient.gradientMatrix(x, y, width, height, xOffset, yOffset); } + public Paint getPremiumLocakedPaint() { + if (lockedPremiumPaint == null) { + lockedPremiumPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + } + lockedPremiumPaint.setColor(Theme.getColor(Theme.key_featuredStickers_addButton)); + return lockedPremiumPaint; + } + public static class InternalDrawable extends BitmapDrawable { public int[] colors; @@ -135,7 +143,9 @@ public Paint getMainGradientPaint() { return mainGradientPaint; } } - public static class GradientTools { + + //help with update colors and position + public static class PremiumGradientTools { public float cx = 0.5f; public float cy = 0.5f; @@ -149,15 +159,15 @@ public static class GradientTools { public float x1 = 0f, y1 = 1f, x2 = 1.5f, y2 = 0f; - public GradientTools(String colorKey1, String colorKey2, String colorKey3) { + public PremiumGradientTools(String colorKey1, String colorKey2, String colorKey3) { this(colorKey1, colorKey2, colorKey3, null, null); } - public GradientTools(String colorKey1, String colorKey2, String colorKey3, String colorKey4) { + public PremiumGradientTools(String colorKey1, String colorKey2, String colorKey3, String colorKey4) { this(colorKey1, colorKey2, colorKey3, colorKey4, null); } - public GradientTools(String colorKey1, String colorKey2, String colorKey3, String colorKey4, String colorKey5) { + public PremiumGradientTools(String colorKey1, String colorKey2, String colorKey3, String colorKey4, String colorKey5) { this.colorKey1 = colorKey1; this.colorKey2 = colorKey2; this.colorKey3 = colorKey3; @@ -165,7 +175,6 @@ public GradientTools(String colorKey1, String colorKey2, String colorKey3, Strin this.colorKey5 = colorKey5; } - public void gradientMatrix(int x, int y, int x1, int y1, float xOffset, float yOffset) { chekColors(); if (exactly) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/PremiumLockIconView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/PremiumLockIconView.java index f132347f46..bd43995327 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/PremiumLockIconView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/PremiumLockIconView.java @@ -10,6 +10,7 @@ import android.graphics.Shader; import android.widget.ImageView; +import androidx.annotation.Nullable; import androidx.core.graphics.ColorUtils; import org.telegram.messenger.AndroidUtilities; @@ -63,7 +64,8 @@ public PremiumLockIconView(Context context, int type, Theme.ResourcesProvider re boolean waitingImage; boolean wasDrawn; - CellFlickerDrawable cellFlickerDrawable = new CellFlickerDrawable(); + @Nullable + CellFlickerDrawable cellFlickerDrawable; @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { @@ -114,6 +116,9 @@ protected void onDraw(Canvas canvas) { PremiumGradient.getInstance().updateMainGradientMatrix(0, 0, getMeasuredWidth(), getMeasuredHeight(), -AndroidUtilities.dp(24), 0); canvas.drawPath(path, PremiumGradient.getInstance().getMainGradientPaint()); } + if (cellFlickerDrawable == null) { + cellFlickerDrawable = new CellFlickerDrawable(); + } cellFlickerDrawable.setParentWidth(getMeasuredWidth() / 2); cellFlickerDrawable.drawFrame = false; cellFlickerDrawable.draw(canvas, path, this); @@ -226,8 +231,10 @@ public boolean ready() { public void play(int delay) { isEnter = true; - cellFlickerDrawable.progress = 0; - cellFlickerDrawable.repeatEnabled = false; + if (cellFlickerDrawable != null) { + cellFlickerDrawable.progress = 0; + cellFlickerDrawable.repeatEnabled = false; + } invalidate(); animate().scaleX(1.1f).scaleY(1.1f).setStartDelay(delay).setInterpolator(AndroidUtilities.overshootInterpolator).setDuration(300); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/PremiumPreviewBottomSheet.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/PremiumPreviewBottomSheet.java index 2ae1870e2c..18e7580e1f 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/PremiumPreviewBottomSheet.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/PremiumPreviewBottomSheet.java @@ -67,7 +67,7 @@ public class PremiumPreviewBottomSheet extends BottomSheetWithRecyclerListView i ArrayList premiumFeatures = new ArrayList<>(); int currentAccount; TLRPC.User user; -// GiftPremiumBottomSheet.GiftTier giftTier; + GiftPremiumBottomSheet.GiftTier giftTier; boolean isOutboundGift; PremiumFeatureCell dummyCell; @@ -82,7 +82,7 @@ public class PremiumPreviewBottomSheet extends BottomSheetWithRecyclerListView i int buttonRow; FireworksOverlay fireworksOverlay; - PremiumGradient.GradientTools gradientTools; + PremiumGradient.PremiumGradientTools gradientTools; StarParticlesView starParticlesView; GLIconTextureView iconTextureView; ViewGroup iconContainer; @@ -108,13 +108,17 @@ public class PremiumPreviewBottomSheet extends BottomSheetWithRecyclerListView i FrameLayout bulletinContainer; public PremiumPreviewBottomSheet(BaseFragment fragment, int currentAccount, TLRPC.User user, Theme.ResourcesProvider resourcesProvider) { + this(fragment, currentAccount, user, null, resourcesProvider); + } + + public PremiumPreviewBottomSheet(BaseFragment fragment, int currentAccount, TLRPC.User user, GiftPremiumBottomSheet.GiftTier gift, Theme.ResourcesProvider resourcesProvider) { super(fragment, false, false, false, resourcesProvider); fixNavigationBar(); this.fragment = fragment; topPadding = 0.26f; this.user = user; this.currentAccount = currentAccount; -// this.giftTier = gift; + this.giftTier = gift; dummyCell = new PremiumFeatureCell(getContext()); PremiumPreviewFragment.fillPremiumFeaturesList(premiumFeatures, currentAccount); @@ -122,7 +126,7 @@ public PremiumPreviewBottomSheet(BaseFragment fragment, int currentAccount, TLRP buttonContainer.setVisibility(View.GONE); } - gradientTools = new PremiumGradient.GradientTools(Theme.key_premiumGradient1, Theme.key_premiumGradient2, Theme.key_premiumGradient3, Theme.key_premiumGradient4); + gradientTools = new PremiumGradient.PremiumGradientTools(Theme.key_premiumGradient1, Theme.key_premiumGradient2, Theme.key_premiumGradient3, Theme.key_premiumGradient4); gradientTools.exactly = true; gradientTools.x1 = 0; gradientTools.y1 = 1f; @@ -351,7 +355,7 @@ protected void onCloseByLink() { } else if (isEmojiStatus) { titleView[0].setText(AndroidUtilities.replaceTags(LocaleController.formatString(R.string.TelegramPremiumUserStatusDefaultDialogTitle, ContactsController.formatName(user.first_name, user.last_name)))); subtitleView.setText(AndroidUtilities.replaceTags(LocaleController.formatString(R.string.TelegramPremiumUserStatusDialogSubtitle, ContactsController.formatName(user.first_name, user.last_name)))); - } else /*if (giftTier != null) { + } else if (giftTier != null) { if (isOutboundGift) { titleView[0].setText(AndroidUtilities.replaceSingleTag(LocaleController.formatString(R.string.TelegramPremiumUserGiftedPremiumOutboundDialogTitleWithPlural, user != null ? user.first_name : "", LocaleController.formatPluralString("GiftMonths", giftTier.getMonths())), Theme.key_windowBackgroundWhiteBlueButton, AndroidUtilities.REPLACING_TAG_TYPE_LINK, null)); subtitleView.setText(AndroidUtilities.replaceSingleTag(LocaleController.formatString(R.string.TelegramPremiumUserGiftedPremiumOutboundDialogSubtitle, user != null ? user.first_name : ""), Theme.key_windowBackgroundWhiteBlueButton, AndroidUtilities.REPLACING_TAG_TYPE_LINK, null)); @@ -359,7 +363,7 @@ protected void onCloseByLink() { titleView[0].setText(AndroidUtilities.replaceSingleTag(LocaleController.formatString(R.string.TelegramPremiumUserGiftedPremiumDialogTitleWithPlural, user != null ? user.first_name : "", LocaleController.formatPluralString("GiftMonths", giftTier.getMonths())), Theme.key_windowBackgroundWhiteBlueButton, AndroidUtilities.REPLACING_TAG_TYPE_LINK, null)); subtitleView.setText(AndroidUtilities.replaceTags(LocaleController.getString(R.string.TelegramPremiumUserGiftedPremiumDialogSubtitle))); } - } else */ { + } else { titleView[0].setText(AndroidUtilities.replaceSingleTag(LocaleController.formatString(R.string.TelegramPremiumUserDialogTitle, ContactsController.formatName(user.first_name, user.last_name)), Theme.key_windowBackgroundWhiteBlueButton, AndroidUtilities.REPLACING_TAG_TYPE_LINK, null)); subtitleView.setText(AndroidUtilities.replaceTags(LocaleController.getString(R.string.TelegramPremiumUserDialogSubtitle))); } @@ -603,13 +607,11 @@ public void show() { NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.stopAllHeavyOperations, 4); if (animateConfetti) { AndroidUtilities.runOnUIThread(()->{ - if (!NekoConfig.disableVibration.Bool()) { - try { + try { + if (!NekoConfig.disableVibration.Bool()) container.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); - } catch (Exception ignored) { - } - fireworksOverlay.start(); - } + } catch (Exception ignored) {} + fireworksOverlay.start(); }, 200); } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/PremiumTierCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/PremiumTierCell.java index 94ef24badf..4c90d1c40c 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/PremiumTierCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/PremiumTierCell.java @@ -107,6 +107,15 @@ public PremiumTierCell(@NonNull Context context) { setWillNotDraw(false); } + @Override + public void setEnabled(boolean enabled) { + super.setEnabled(enabled); + + titleView.setAlpha(enabled ? 1 : 0.6f); + pricePerMonthView.setAlpha(enabled ? 1 : 0.6f); + checkBox.setAlpha(enabled ? 1 : 0.6f); + } + @Override protected void onDraw(Canvas canvas) { super.onDraw(canvas); @@ -142,7 +151,7 @@ protected void onLayout(boolean changed, int l, int t, int r, int b) { checkRtlAndLayout(checkBox); int y = (int) ((getMeasuredHeight() - pricePerMonthView.getMeasuredHeight()) / 2f); - if (AndroidUtilities.dp(leftPaddingToCheckboxDp + leftPaddingToTextDp + 24) + checkBox.getMeasuredWidth() + pricePerYearStrikeView.getMeasuredWidth() + pricePerYearView.getMeasuredWidth() + getPaddingLeft() > getMeasuredWidth() - pricePerMonthView.getMeasuredWidth() && discountView.getVisibility() == VISIBLE) { + if (AndroidUtilities.dp(leftPaddingToCheckboxDp + leftPaddingToTextDp + 24) + checkBox.getMeasuredWidth() + (pricePerYearStrikeView.getVisibility() == VISIBLE ? pricePerYearStrikeView.getMeasuredWidth() : 0) + pricePerYearView.getMeasuredWidth() + getPaddingLeft() > getMeasuredWidth() - pricePerMonthView.getMeasuredWidth() && discountView.getVisibility() == VISIBLE) { y = getPaddingTop() + AndroidUtilities.dp(2); } AndroidUtilities.rectTmp2.set(getMeasuredWidth() - pricePerMonthView.getMeasuredWidth() - AndroidUtilities.dp(16) - getPaddingRight(), y, 0, 0); @@ -159,7 +168,7 @@ protected void onLayout(boolean changed, int l, int t, int r, int b) { AndroidUtilities.rectTmp2.set(AndroidUtilities.dp(leftPaddingToCheckboxDp + leftPaddingToTextDp) + checkBox.getMeasuredWidth() + getPaddingLeft(), getMeasuredHeight() - pricePerYearStrikeView.getMeasuredHeight() - getPaddingBottom(), 0, 0); checkRtlAndLayout(pricePerYearStrikeView); - AndroidUtilities.rectTmp2.set(AndroidUtilities.dp(leftPaddingToCheckboxDp + leftPaddingToTextDp + 6) + checkBox.getMeasuredWidth() + pricePerYearStrikeView.getMeasuredWidth() + getPaddingLeft(), getMeasuredHeight() - pricePerYearView.getMeasuredHeight() - getPaddingBottom(), 0, 0); + AndroidUtilities.rectTmp2.set(AndroidUtilities.dp(leftPaddingToCheckboxDp + leftPaddingToTextDp) + checkBox.getMeasuredWidth() + (pricePerYearStrikeView.getVisibility() == VISIBLE ? pricePerYearStrikeView.getMeasuredWidth() + AndroidUtilities.dp(6) : 0) + getPaddingLeft(), getMeasuredHeight() - pricePerYearView.getMeasuredHeight() - getPaddingBottom(), 0, 0); checkRtlAndLayout(pricePerYearView); } @@ -197,8 +206,8 @@ private void checkRtlAndLayout(View v) { rect.bottom = rect.top + v.getMeasuredHeight(); if (LocaleController.isRTL) { int right = rect.right; - rect.right = rect.left; - rect.left = right; + rect.right = getWidth() - rect.left; + rect.left = getWidth() - right; } v.layout(AndroidUtilities.rectTmp2.left, AndroidUtilities.rectTmp2.top, AndroidUtilities.rectTmp2.right, AndroidUtilities.rectTmp2.bottom); } @@ -217,7 +226,7 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { discountView.measure(MeasureSpec.makeMeasureSpec(0, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(0, MeasureSpec.EXACTLY)); } pricePerYearStrikeView.measure(MeasureSpec.makeMeasureSpec(width - checkBox.getMeasuredWidth(), MeasureSpec.AT_MOST), MeasureSpec.makeMeasureSpec(height, MeasureSpec.AT_MOST)); - pricePerYearView.measure(MeasureSpec.makeMeasureSpec(width - checkBox.getMeasuredWidth() - pricePerYearStrikeView.getMeasuredWidth() - AndroidUtilities.dp(6), MeasureSpec.AT_MOST), MeasureSpec.makeMeasureSpec(height, MeasureSpec.AT_MOST)); + pricePerYearView.measure(MeasureSpec.makeMeasureSpec(width - checkBox.getMeasuredWidth() - (pricePerYearStrikeView.getVisibility() == VISIBLE ? pricePerYearStrikeView.getMeasuredWidth() : 0) - AndroidUtilities.dp(6), MeasureSpec.AT_MOST), MeasureSpec.makeMeasureSpec(height, MeasureSpec.AT_MOST)); if (pricePerYearView.getVisibility() != VISIBLE) { height -= AndroidUtilities.dp(8); @@ -269,6 +278,11 @@ public void bind(PremiumPreviewFragment.SubscriptionTier tier, boolean hasDivide pricePerYearStrikeView.setText(tier.getFormattedPricePerYearRegular()); pricePerYearView.setText(LocaleController.formatString(R.string.PricePerYear, tier.getFormattedPricePerYear())); pricePerMonthView.setText(LocaleController.formatString(R.string.PricePerMonthMe, tier.getFormattedPricePerMonth())); + + if (tier.subscriptionOption.current) { + pricePerYearView.setVisibility(VISIBLE); + pricePerYearView.setText(LocaleController.getString(R.string.YourCurrentPlan)); + } } else { discountView.setText(LocaleController.formatString(R.string.GiftPremiumOptionDiscount, 10)); discountView.setVisibility(VISIBLE); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/VideoScreenPreview.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/VideoScreenPreview.java index f59fac62f3..3e94dfdc8c 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/VideoScreenPreview.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/Premium/VideoScreenPreview.java @@ -67,12 +67,16 @@ private void checkVideo() { return; } - MediaMetadataRetriever retriever = new MediaMetadataRetriever(); - retriever.setDataSource(ApplicationLoader.applicationContext, Uri.fromFile(file)); - int width = Integer.parseInt(retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH)); - int height = Integer.parseInt(retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT)); - retriever.release(); - aspectRatio = width / (float) height; + try { + MediaMetadataRetriever retriever = new MediaMetadataRetriever(); + retriever.setDataSource(ApplicationLoader.applicationContext, Uri.fromFile(file)); + int width = Integer.parseInt(retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH)); + int height = Integer.parseInt(retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT)); + retriever.release(); + aspectRatio = width / (float) height; + } catch (Exception e) { + aspectRatio = 0.671f; + } } else { aspectRatio = 0.671f; } @@ -102,6 +106,7 @@ private void checkVideo() { private float roundRadius; StarParticlesView.Drawable starDrawable; SpeedLineParticles.Drawable speedLinesDrawable; + HelloParticles.Drawable helloParticlesDrawable; private final static float[] speedScaleVideoTimestamps = new float[]{0.02f, 1f, 1f, 1f, 1f, 1f, 1f, 1f, 1f, 0.02f}; private MatrixParticlesDrawable matrixParticlesDrawable; @@ -147,6 +152,9 @@ public VideoScreenPreview(Context context, SvgHelper.SvgDrawable svgDrawable, in } else if (type == PremiumPreviewFragment.PREMIUM_FEATURE_DOWNLOAD_SPEED) { speedLinesDrawable = new SpeedLineParticles.Drawable(200); speedLinesDrawable.init(); + } else if (type == PremiumPreviewFragment.PREMIUM_FEATURE_TRANSLATIONS) { + helloParticlesDrawable = new HelloParticles.Drawable(25); + helloParticlesDrawable.init(); } else { int particlesCount = 100; if (SharedConfig.getDevicePerformanceClass() == SharedConfig.PERFORMANCE_CLASS_HIGH) { @@ -188,7 +196,7 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { AndroidUtilities.rectTmp.set(0, 0, getMeasuredWidth(), (int) (getMeasuredHeight() + roundRadius)); } float rad = roundRadius - AndroidUtilities.dp(3); - clipPath.addRoundRect(AndroidUtilities.rectTmp, new float[]{rad, rad, rad, rad, rad, rad, rad, rad}, Path.Direction.CW); + clipPath.addRoundRect(AndroidUtilities.rectTmp, rad, rad, Path.Direction.CW); } @Override @@ -339,13 +347,19 @@ protected void onLayout(boolean changed, int left, int top, int right, int botto speedLinesDrawable.rect.offset(0, getMeasuredHeight() * 0.1f); speedLinesDrawable.resetPositions(); } + if (helloParticlesDrawable != null) { + helloParticlesDrawable.rect.set(0, 0, getMeasuredWidth(), getMeasuredHeight()); + helloParticlesDrawable.screenRect.set(0, 0, getMeasuredWidth(), getMeasuredHeight()); + helloParticlesDrawable.rect.inset(AndroidUtilities.dp(0), getMeasuredHeight() * 0.1f); + helloParticlesDrawable.resetPositions(); + } } } @Override protected void dispatchDraw(Canvas canvas) { - if ((starDrawable != null || speedLinesDrawable != null || matrixParticlesDrawable != null) && progress < 0.5f) { + if ((starDrawable != null || speedLinesDrawable != null || helloParticlesDrawable != null || matrixParticlesDrawable != null) && progress < 0.5f) { float s = (float) Math.pow(1f - progress, 2f); canvas.save(); canvas.scale(s, s, getMeasuredWidth() / 2f, getMeasuredHeight() / 2f); @@ -372,6 +386,8 @@ protected void dispatchDraw(Canvas canvas) { float progressSpeedScale = 0.1f + 0.9f * (1f - Utilities.clamp(progress / 0.1f, 1f, 0)); speedLinesDrawable.speedScale = 150 * progressSpeedScale * videoSpeedScale; speedLinesDrawable.onDraw(canvas); + } else if (helloParticlesDrawable != null) { + helloParticlesDrawable.onDraw(canvas); } canvas.restore(); invalidate(); @@ -486,6 +502,10 @@ protected void onDetachedFromWindow() { attached = false; updateAttachState(); NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.fileLoaded); + if (helloParticlesDrawable != null) { + helloParticlesDrawable.recycle(); + helloParticlesDrawable = null; + } } @Override diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ProfileGalleryView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ProfileGalleryView.java index 6cfd171d68..8cbcd39817 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ProfileGalleryView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ProfileGalleryView.java @@ -25,6 +25,7 @@ import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.DialogObject; import org.telegram.messenger.FileLoader; +import org.telegram.messenger.FileLog; import org.telegram.messenger.ImageLocation; import org.telegram.messenger.ImageReceiver; import org.telegram.messenger.MessagesController; @@ -54,7 +55,7 @@ public class ProfileGalleryView extends CircularViewPager implements Notificatio private TLRPC.ChatFull chatInfo; private final Callback callback; - private boolean scrolledByUser; + public boolean scrolledByUser; private boolean isDownReleased; private final boolean isProfileFragment; private ImageLocation uploadingImageLocation; @@ -66,12 +67,15 @@ public class ProfileGalleryView extends CircularViewPager implements Notificatio float[] radii = new float[8]; private ImageLocation prevImageLocation; + private ImageLocation prevThumbLocation; + private VectorAvatarThumbDrawable prevVectorAvatarThumbDrawable; private ArrayList videoFileNames = new ArrayList<>(); private ArrayList thumbsFileNames = new ArrayList<>(); private ArrayList photos = new ArrayList<>(); private ArrayList videoLocations = new ArrayList<>(); private ArrayList imagesLocations = new ArrayList<>(); private ArrayList thumbsLocations = new ArrayList<>(); + private ArrayList vectorAvatars = new ArrayList<>(); private ArrayList imagesLocationsSizes = new ArrayList<>(); private ArrayList imagesUploadProgress = new ArrayList<>(); @@ -108,6 +112,10 @@ public View findVideoActiveView() { return null; } + public void clearPrevImages() { + prevImageLocation = null; + } + private static class Item { boolean isActiveVideo; private View textureViewStubView; @@ -115,6 +123,10 @@ private static class Item { } public interface Callback { + void onDown(boolean left); + + void onRelease(); + void onClick(); void onPhotosLoaded(); @@ -375,12 +387,120 @@ public void setAnimatedFileMaybe(AnimatedFileDrawable drawable) { } } + @Override + public boolean onTouchEvent(MotionEvent ev) { + if (adapter == null) { + return false; + } + if (parentListView.getScrollState() != RecyclerView.SCROLL_STATE_IDLE && !isScrollingListView && isSwipingViewPager) { + isSwipingViewPager = false; + final MotionEvent cancelEvent = MotionEvent.obtain(ev); + cancelEvent.setAction(MotionEvent.ACTION_CANCEL); + super.onTouchEvent(cancelEvent); + cancelEvent.recycle(); + return false; + } + final int action = ev.getAction(); + + if (pinchToZoomHelper != null && getCurrentItemView() != null) { + if (action != MotionEvent.ACTION_DOWN && isDownReleased && !pinchToZoomHelper.isInOverlayMode()) { + pinchToZoomHelper.checkPinchToZoom(MotionEvent.obtain(0, 0, MotionEvent.ACTION_CANCEL, 0, 0, 0), this, getCurrentItemView().getImageReceiver(), null); + } else if (pinchToZoomHelper.checkPinchToZoom(ev, this, getCurrentItemView().getImageReceiver(), null)) { + if (!isDownReleased) { + isDownReleased = true; + callback.onRelease(); + } + return true; + } + } + + + if (action == MotionEvent.ACTION_DOWN) { + isScrollingListView = true; + isSwipingViewPager = true; + scrolledByUser = true; + downPoint.set(ev.getX(), ev.getY()); + if (adapter.getCount() > 1) { + callback.onDown(ev.getX() < getWidth() / 3f); + } + isDownReleased = false; + } else if (action == MotionEvent.ACTION_UP) { + if (!isDownReleased) { + final int itemsCount = adapter.getCount(); + int currentItem = getCurrentItem(); + if (itemsCount > 1) { + if (ev.getX() > getWidth() / 3f) { + final int extraCount = adapter.getExtraCount(); + if (++currentItem >= itemsCount - extraCount) { + currentItem = extraCount; + } + } else { + final int extraCount = adapter.getExtraCount(); + if (--currentItem < extraCount) { + currentItem = itemsCount - extraCount - 1; + } + } + callback.onRelease(); + setCurrentItem(currentItem, false); + } + } + } else if (action == MotionEvent.ACTION_MOVE) { + final float dx = ev.getX() - downPoint.x; + final float dy = ev.getY() - downPoint.y; + boolean move = Math.abs(dy) >= touchSlop || Math.abs(dx) >= touchSlop; + if (move) { + isDownReleased = true; + callback.onRelease(); + } + if (isSwipingViewPager && isScrollingListView) { + if (move) { + if (Math.abs(dy) > Math.abs(dx)) { + isSwipingViewPager = false; + final MotionEvent cancelEvent = MotionEvent.obtain(ev); + cancelEvent.setAction(MotionEvent.ACTION_CANCEL); + super.onTouchEvent(cancelEvent); + cancelEvent.recycle(); + } else { + isScrollingListView = false; + final MotionEvent cancelEvent = MotionEvent.obtain(ev); + cancelEvent.setAction(MotionEvent.ACTION_CANCEL); + parentListView.onTouchEvent(cancelEvent); + cancelEvent.recycle(); + } + } + } else if (isSwipingViewPager && !canScrollHorizontally(-1) && dx > touchSlop) { + return false; + } + } + + boolean result = false; + + if (isScrollingListView) { + result = parentListView.onTouchEvent(ev); + } + + if (isSwipingViewPager) { + try { + result |= super.onTouchEvent(ev); + } catch (Exception e) { + FileLog.e(e); + } + } + + if (action == MotionEvent.ACTION_UP || action == MotionEvent.ACTION_CANCEL) { + isScrollingListView = false; + isSwipingViewPager = false; + } + + return result; + } + public void setChatInfo(TLRPC.ChatFull chatFull) { chatInfo = chatFull; if (!photos.isEmpty() && photos.get(0) == null && chatInfo != null && FileLoader.isSamePhoto(imagesLocations.get(0).location, chatInfo.chat_photo)) { photos.set(0, chatInfo.chat_photo); if (!chatInfo.chat_photo.video_sizes.isEmpty()) { - final TLRPC.VideoSize videoSize = chatInfo.chat_photo.video_sizes.get(0); + final TLRPC.VideoSize videoSize = FileLoader.getClosestVideoSizeWithSize(chatInfo.chat_photo.video_sizes, 1000); videoLocations.set(0, ImageLocation.getForPhoto(videoSize, chatInfo.chat_photo)); videoFileNames.set(0, FileLoader.getAttachFileName(videoSize)); callback.onPhotosLoaded(); @@ -393,7 +513,7 @@ public void setChatInfo(TLRPC.ChatFull chatFull) { } } - public boolean initIfEmpty(ImageLocation imageLocation, ImageLocation thumbLocation, boolean reload) { + public boolean initIfEmpty(VectorAvatarThumbDrawable vectorAvatar, ImageLocation imageLocation, ImageLocation thumbLocation, boolean reload) { if (imageLocation == null || thumbLocation == null || settingMainPhoto != 0) { return false; } @@ -414,16 +534,19 @@ public boolean initIfEmpty(ImageLocation imageLocation, ImageLocation thumbLocat return false; } prevImageLocation = imageLocation; + prevThumbLocation = thumbLocation; + prevVectorAvatarThumbDrawable = vectorAvatar; thumbsFileNames.add(null); videoFileNames.add(null); imagesLocations.add(imageLocation); thumbsLocations.add(thumbLocation); + vectorAvatars.add(vectorAvatar); videoLocations.add(null); photos.add(null); imagesLocationsSizes.add(-1); imagesUploadProgress.add(null); getAdapter().notifyDataSetChanged(); - // resetCurrentItem(); + resetCurrentItem(); return true; } @@ -436,6 +559,7 @@ public void addUploadingImage(ImageLocation imageLocation, ImageLocation thumbLo videoFileNames.add(0, null); imagesLocations.add(0, imageLocation); thumbsLocations.add(0, thumbLocation); + vectorAvatars.add(0, null); videoLocations.add(0, null); photos.add(0, null); imagesLocationsSizes.add(0, -1); @@ -616,6 +740,10 @@ public void startMovePhotoToBegin(int index) { thumbsLocations.remove(index); thumbsLocations.add(0, location); + VectorAvatarThumbDrawable vectorAvatar = vectorAvatars.get(index); + vectorAvatars.remove(index); + vectorAvatars.add(0, vectorAvatar); + Integer size = imagesLocationsSizes.get(index); imagesLocationsSizes.remove(index); imagesLocationsSizes.add(0, size); @@ -642,11 +770,14 @@ public boolean removePhotoAtIndex(int index) { videoLocations.remove(index); imagesLocations.remove(index); thumbsLocations.remove(index); + vectorAvatars.remove(index); imagesLocationsSizes.remove(index); radialProgresses.delete(index); imagesUploadProgress.remove(index); if (index == 0 && !imagesLocations.isEmpty()) { prevImageLocation = imagesLocations.get(0); + prevThumbLocation = null; + prevVectorAvatarThumbDrawable = null; } adapter.notifyDataSetChanged(); return photos.isEmpty(); @@ -682,6 +813,9 @@ public void didReceivedNotification(int id, int account, Object... args) { if (did == dialogId && parentClassGuid == guid && adapter != null) { boolean fromCache = (Boolean) args[2]; ArrayList arrayList = new ArrayList<>((ArrayList) args[4]); + if (arrayList.isEmpty() && fromCache) { + return; + } customAvatarIndex = -1; fallbackPhotoIndex = -1; @@ -702,6 +836,7 @@ public void didReceivedNotification(int id, int account, Object... args) { imagesLocations.clear(); videoLocations.clear(); thumbsLocations.clear(); + vectorAvatars.clear(); photos.clear(); imagesLocationsSizes.clear(); imagesUploadProgress.clear(); @@ -712,11 +847,12 @@ public void didReceivedNotification(int id, int account, Object... args) { if (currentImageLocation != null) { imagesLocations.add(currentImageLocation); thumbsLocations.add(ImageLocation.getForUserOrChat(chat, ImageLocation.TYPE_SMALL)); + vectorAvatars.add(null); thumbsFileNames.add(null); if (chatInfo != null && FileLoader.isSamePhoto(currentImageLocation.location, chatInfo.chat_photo)) { photos.add(chatInfo.chat_photo); if (!chatInfo.chat_photo.video_sizes.isEmpty()) { - final TLRPC.VideoSize videoSize = chatInfo.chat_photo.video_sizes.get(0); + final TLRPC.VideoSize videoSize = FileLoader.getClosestVideoSizeWithSize(chatInfo.chat_photo.video_sizes, 1000); videoLocations.add(ImageLocation.getForPhoto(videoSize, chatInfo.chat_photo)); videoFileNames.add(FileLoader.getAttachFileName(videoSize)); } else { @@ -752,7 +888,7 @@ public void didReceivedNotification(int id, int account, Object... args) { if (size.location != null && size.location.local_id == currentImageLocation.location.local_id && size.location.volume_id == currentImageLocation.location.volume_id) { photos.set(0, photo); if (!photo.video_sizes.isEmpty()) { - videoLocations.set(0, ImageLocation.getForPhoto(photo.video_sizes.get(0), photo)); + videoLocations.set(0, ImageLocation.getForPhoto(FileLoader.getClosestVideoSizeWithSize(photo.video_sizes, 1000), photo)); } cont = true; break; @@ -770,20 +906,62 @@ public void didReceivedNotification(int id, int account, Object... args) { } ImageLocation location = ImageLocation.getForPhoto(sizeFull, photo); if (location != null) { - imagesLocations.add(location); - thumbsFileNames.add(FileLoader.getAttachFileName(sizeThumb instanceof TLRPC.TL_photoStrippedSize ? sizeFull : sizeThumb)); - thumbsLocations.add(ImageLocation.getForPhoto(sizeThumb, photo)); - if (!photo.video_sizes.isEmpty()) { - final TLRPC.VideoSize videoSize = photo.video_sizes.get(0); - videoLocations.add(ImageLocation.getForPhoto(videoSize, photo)); - videoFileNames.add(FileLoader.getAttachFileName(videoSize)); + if (prevImageLocation != null && prevImageLocation.photoId == location.photoId && !isProfileFragment && dialogId != UserConfig.getInstance(currentAccount).getClientUserId()) { + thumbsFileNames.add(null); + + imagesLocations.add(prevImageLocation); + ImageLocation thumbLocation = prevThumbLocation; + if (thumbLocation == null) { + thumbLocation = ImageLocation.getForPhoto(sizeThumb, photo); + } + thumbsLocations.add(thumbLocation); + + if (!photo.video_sizes.isEmpty()) { + final TLRPC.VideoSize videoSize = FileLoader.getClosestVideoSizeWithSize(photo.video_sizes, 1000); + final TLRPC.VideoSize vectorMarkupVideoSize = FileLoader.getVectorMarkupVideoSize(photo); + if (vectorMarkupVideoSize != null) { + vectorAvatars.add(new VectorAvatarThumbDrawable(vectorMarkupVideoSize, user != null && user.premium, VectorAvatarThumbDrawable.TYPE_PROFILE)); + videoLocations.add(null); + videoFileNames.add(null); + } else { + vectorAvatars.add(null); + videoLocations.add(ImageLocation.getForPhoto(videoSize, photo)); + videoFileNames.add(FileLoader.getAttachFileName(videoSize)); + } + } else { + vectorAvatars.add(prevVectorAvatarThumbDrawable); + videoLocations.add(null); + videoFileNames.add(null); + } + photos.add(null); + imagesLocationsSizes.add(-1); + imagesUploadProgress.add(null); } else { - videoLocations.add(null); - videoFileNames.add(null); + imagesLocations.add(location); + thumbsFileNames.add(FileLoader.getAttachFileName(sizeThumb instanceof TLRPC.TL_photoStrippedSize ? sizeFull : sizeThumb)); + thumbsLocations.add(ImageLocation.getForPhoto(sizeThumb, photo)); + + if (!photo.video_sizes.isEmpty()) { + final TLRPC.VideoSize videoSize = FileLoader.getClosestVideoSizeWithSize(photo.video_sizes, 1000); + final TLRPC.VideoSize vectorMarkupVideoSize = FileLoader.getVectorMarkupVideoSize(photo); + if (vectorMarkupVideoSize != null) { + vectorAvatars.add(new VectorAvatarThumbDrawable(vectorMarkupVideoSize, user != null && user.premium, VectorAvatarThumbDrawable.TYPE_PROFILE)); + videoLocations.add(null); + videoFileNames.add(null); + } else { + vectorAvatars.add(null); + videoLocations.add(ImageLocation.getForPhoto(videoSize, photo)); + videoFileNames.add(FileLoader.getAttachFileName(videoSize)); + } + } else { + videoLocations.add(null); + videoFileNames.add(null); + vectorAvatars.add(null); + } + photos.add(photo); + imagesLocationsSizes.add(sizeFull.size); + imagesUploadProgress.add(null); } - photos.add(photo); - imagesLocationsSizes.add(sizeFull.size); - imagesUploadProgress.add(null); } } } @@ -937,7 +1115,7 @@ public Item instantiateItem(ViewGroup container, int position) { } else { ImageLocation videoLocation = videoLocations.get(imageLocationPosition); item.imageView.isVideo = videoLocation != null; - needProgress = true; + needProgress = vectorAvatars.get(imageLocationPosition) == null; String filter; if (isProfileFragment && videoLocation != null && videoLocation.imageType == FileLoader.IMAGE_TYPE_ANIMATION) { filter = "avatar"; @@ -947,23 +1125,23 @@ public Item instantiateItem(ViewGroup container, int position) { ImageLocation location = thumbsLocations.get(imageLocationPosition); Bitmap thumb = (parentAvatarImageView == null || !createThumbFromParent) ? null : parentAvatarImageView.getImageReceiver().getBitmap(); String parent = "avatar_" + dialogId; - if (thumb != null) { + if (thumb != null && vectorAvatars.get(imageLocationPosition) == null) { item.imageView.setImageMedia(videoLocations.get(imageLocationPosition), filter, imagesLocations.get(imageLocationPosition), null, thumb, imagesLocationsSizes.get(imageLocationPosition), 1, parent); } else if (uploadingImageLocation != null) { - item.imageView.setImageMedia(videoLocations.get(imageLocationPosition), filter, imagesLocations.get(imageLocationPosition), null, uploadingImageLocation, null, null, imagesLocationsSizes.get(imageLocationPosition), 1, parent); + item.imageView.setImageMedia(vectorAvatars.get(imageLocationPosition), videoLocations.get(imageLocationPosition), filter, imagesLocations.get(imageLocationPosition), null, uploadingImageLocation, null, null, imagesLocationsSizes.get(imageLocationPosition), 1, parent); } else { String thumbFilter = location.photoSize instanceof TLRPC.TL_photoStrippedSize ? "b" : null; - item.imageView.setImageMedia(videoLocation, null, imagesLocations.get(imageLocationPosition), null, thumbsLocations.get(imageLocationPosition), thumbFilter, null, imagesLocationsSizes.get(imageLocationPosition), 1, parent); + item.imageView.setImageMedia(vectorAvatars.get(imageLocationPosition), videoLocation, null, imagesLocations.get(imageLocationPosition), null, thumbsLocations.get(imageLocationPosition), thumbFilter, null, imagesLocationsSizes.get(imageLocationPosition), 1, parent); } } } else { final ImageLocation videoLocation = videoLocations.get(imageLocationPosition); item.imageView.isVideo = videoLocation != null; - needProgress = true; + needProgress = vectorAvatars.get(imageLocationPosition) == null; ImageLocation location = thumbsLocations.get(imageLocationPosition); - String filter = location.photoSize instanceof TLRPC.TL_photoStrippedSize ? "b" : null; + String filter = (location != null && location.photoSize instanceof TLRPC.TL_photoStrippedSize) ? "b" : null; String parent = "avatar_" + dialogId; - item.imageView.setImageMedia(videoLocation, null, imagesLocations.get(imageLocationPosition), null, thumbsLocations.get(imageLocationPosition), filter, null, imagesLocationsSizes.get(imageLocationPosition), 1, parent); + item.imageView.setImageMedia(vectorAvatars.get(imageLocationPosition), videoLocation, null, imagesLocations.get(imageLocationPosition), null, thumbsLocations.get(imageLocationPosition), filter, null, imagesLocationsSizes.get(imageLocationPosition), 1, parent); } if (imagesUploadProgress.get(imageLocationPosition) != null) { needProgress = true; @@ -1076,12 +1254,15 @@ public void setData(long dialogId, boolean forceReset) { return; } forceResetPosition = true; - adapter.notifyDataSetChanged(); reset(); this.dialogId = dialogId; - if (dialogId != 0) { - MessagesController.getInstance(currentAccount).loadDialogPhotos(dialogId, 80, 0, true, parentClassGuid); - } +// if (dialogId != 0) { +// MessagesController.getInstance(currentAccount).loadDialogPhotos(dialogId, 80, 0, true, parentClassGuid); +// } + } + + public long getDialogId() { + return dialogId; } private void reset() { @@ -1094,7 +1275,10 @@ private void reset() { imagesLocationsSizes.clear(); imagesUploadProgress.clear(); adapter.notifyDataSetChanged(); + setCurrentItem(0 , false); + selectedPage = 0; uploadingImageLocation = null; + prevImageLocation = null; } public void setRoundRadius(int topRadius, int bottomRadius) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/RLottieDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/RLottieDrawable.java index 9f80b8f410..c5d13ab1f2 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/RLottieDrawable.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/RLottieDrawable.java @@ -24,6 +24,8 @@ import android.view.HapticFeedbackConstants; import android.view.View; +import com.google.gson.Gson; + import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ApplicationLoader; import org.telegram.messenger.DispatchQueue; @@ -37,6 +39,7 @@ import java.io.File; import java.io.FileInputStream; +import java.io.FileReader; import java.io.InputStream; import java.lang.ref.WeakReference; import java.util.ArrayList; @@ -74,6 +77,7 @@ public class RLottieDrawable extends BitmapDrawable implements Animatable, Bitma private HashMap vibrationPattern; private boolean resetVibrationAfterRestart = false; private boolean allowVibration = true; + public static Gson gson; private WeakReference frameReadyCallback; protected WeakReference onFinishCallback; @@ -109,6 +113,7 @@ public class RLottieDrawable extends BitmapDrawable implements Animatable, Bitma private boolean applyingLayerColors; protected int currentFrame; private boolean shouldLimitFps; + private boolean createdForFirstFrame; private float scaleX = 1.0f; private float scaleY = 1.0f; @@ -233,12 +238,7 @@ protected void decodeFrameFinishedInternal() { if (destroyWhenDone) { checkRunningTasks(); if (loadFrameTask == null && cacheGenerateTask == null && nativePtr != 0) { - destroy(nativePtr); - nativePtr = 0; - if (secondNativePtr != 0) { - destroy(secondNativePtr); - secondNativePtr = 0; - } + recycleNativePtr(true); } } if ((nativePtr == 0 || fallbackCache) && secondNativePtr == 0 && bitmapsCache == null) { @@ -249,7 +249,38 @@ protected void decodeFrameFinishedInternal() { if (!hasParentView()) { stop(); } - scheduleNextGetFrame(); + if (isRunning) { + scheduleNextGetFrame(); + } + } + + private void recycleNativePtr(boolean uiThread) { + long nativePtrFinal = nativePtr; + long secondNativePtrFinal = secondNativePtr; + + nativePtr = 0; + secondNativePtr = 0; + if (nativePtrFinal != 0 || secondNativePtrFinal != 0) { + if (uiThread) { + DispatchQueuePoolBackground.execute(() -> { + if (nativePtrFinal != 0) { + destroy(nativePtrFinal); + } + if (secondNativePtrFinal != 0) { + destroy(secondNativePtrFinal); + } + }); + } else { + Utilities.globalQueue.postRunnable(() ->{ + if (nativePtrFinal != 0) { + destroy(nativePtrFinal); + } + if (secondNativePtrFinal != 0) { + destroy(secondNativePtrFinal); + } + }); + } + } } protected void recycleResources() { @@ -449,6 +480,7 @@ public RLottieDrawable(File file, int w, int h, BitmapsCache.CacheOptions cacheO shouldLimitFps = limitFps; this.precache = cacheOptions != null; this.fallbackCache = cacheOptions != null && cacheOptions.fallback; + this.createdForFirstFrame = cacheOptions != null && cacheOptions.firstFrame; getPaint().setFlags(Paint.FILTER_BITMAP_FLAG); this.file = file; @@ -456,31 +488,29 @@ public RLottieDrawable(File file, int w, int h, BitmapsCache.CacheOptions cacheO createCacheGenQueue(); } if (precache) { - bitmapsCache = new BitmapsCache(file, this, cacheOptions, w, h, !limitFps); args = new NativePtrArgs(); args.file = file.getAbsoluteFile(); args.json = null; args.colorReplacement = colorReplacement; args.fitzModifier = fitzModifier; - nativePtr = create(file.getAbsolutePath(), null, w, h, metaData, precache, colorReplacement, shouldLimitFps, fitzModifier); - if (fallbackCache) { - if (nativePtr == 0) { - file.delete(); - } - } else { - destroy(nativePtr); - nativePtr = 0; + if (createdForFirstFrame) { + return; + } + parseLottieMetadata(file, null, metaData); + if (shouldLimitFps && metaData[1] < 60) { + shouldLimitFps = false; } + bitmapsCache = new BitmapsCache(file, this, cacheOptions, w, h, !limitFps); } else { nativePtr = create(file.getAbsolutePath(), null, w, h, metaData, precache, colorReplacement, shouldLimitFps, fitzModifier); if (nativePtr == 0) { file.delete(); } + if (shouldLimitFps && metaData[1] < 60) { + shouldLimitFps = false; + } } - if (shouldLimitFps && metaData[1] < 60) { - shouldLimitFps = false; - } timeBetweenFrames = Math.max(shouldLimitFps ? 33 : 16, (int) (1000.0f / metaData[1])); } @@ -489,41 +519,68 @@ public RLottieDrawable(File file, String json, int w, int h, BitmapsCache.CacheO height = h; shouldLimitFps = limitFps; this.precache = options != null; + this.createdForFirstFrame = options != null && options.firstFrame; getPaint().setFlags(Paint.FILTER_BITMAP_FLAG); if (precache && lottieCacheGenerateQueue == null) { createCacheGenQueue(); } if (precache) { - bitmapsCache = new BitmapsCache(file, this, options, w, h, !limitFps); args = new NativePtrArgs(); args.file = file.getAbsoluteFile(); args.json = json; args.colorReplacement = colorReplacement; args.fitzModifier = fitzModifier; - nativePtr = create(file.getAbsolutePath(), json, w, h, metaData, precache, colorReplacement, shouldLimitFps, fitzModifier); - if (fallbackCache) { - if (nativePtr == 0) { - file.delete(); - } - } else { - if (nativePtr != 0) { - destroy(nativePtr); - } - nativePtr = 0; + if (createdForFirstFrame) { + return; + } + parseLottieMetadata(file, json, metaData); + if (shouldLimitFps && metaData[1] < 60) { + shouldLimitFps = false; } + bitmapsCache = new BitmapsCache(file, this, options, w, h, !limitFps); } else { nativePtr = create(file.getAbsolutePath(), json, w, h, metaData, precache, colorReplacement, shouldLimitFps, fitzModifier); if (nativePtr == 0) { file.delete(); } + if (shouldLimitFps && metaData[1] < 60) { + shouldLimitFps = false; + } } - if (shouldLimitFps && metaData[1] < 60) { - shouldLimitFps = false; - } + timeBetweenFrames = Math.max(shouldLimitFps ? 33 : 16, (int) (1000.0f / metaData[1])); } + private void parseLottieMetadata(File file, String json, int[] metaData) { + if (gson == null) { + gson = new Gson(); + } + try { + LottieMetadata lottieMetadata; + if (file != null) { + FileReader reader = new FileReader(file.getAbsolutePath()); + lottieMetadata = gson.fromJson(reader, LottieMetadata.class); + try { + reader.close(); + } catch (Exception e) { + + } + } else { + lottieMetadata = gson.fromJson(json, LottieMetadata.class); + } + metaData[0] = (int) (lottieMetadata.op - lottieMetadata.ip); + metaData[1] = (int) lottieMetadata.fr; + } catch (Exception e) { + // ignore app center, try handle by old method + FileLog.e(e, false); + long nativePtr = create(file.getAbsolutePath(), json, width, height, metaData, false, args.colorReplacement, shouldLimitFps, args.fitzModifier); + if (nativePtr != 0) { + destroy(nativePtr); + } + } + } + public RLottieDrawable(int rawRes, String name, int w, int h) { this(rawRes, name, w, h, true, null); } @@ -579,7 +636,7 @@ public boolean setBaseDice(File path) { AndroidUtilities.runOnUIThread(() -> { loadingInBackground = false; if (!secondLoadingInBackground && destroyAfterLoading) { - recycle(); + recycle(true); return; } timeBetweenFrames = Math.max(16, (int) (1000.0f / metaData[1])); @@ -613,7 +670,7 @@ public boolean setDiceNumber(File path, boolean instant) { AndroidUtilities.runOnUIThread(() -> { secondLoadingInBackground = false; if (!loadingInBackground && destroyAfterLoading) { - recycle(); + recycle(true); } }); return; @@ -623,7 +680,7 @@ public boolean setDiceNumber(File path, boolean instant) { AndroidUtilities.runOnUIThread(() -> { secondLoadingInBackground = false; if (!secondLoadingInBackground && destroyAfterLoading) { - recycle(); + recycle(true); return; } secondFramesCount = metaData2[0]; @@ -741,8 +798,6 @@ public void removeParentView(ImageReceiver parent) { checkCacheCancel(); } - private Runnable cancelCache; - public void checkCacheCancel() { if (bitmapsCache == null || lottieCacheGenerateQueue == null || cacheGenerateTask == null) { return; @@ -769,7 +824,10 @@ protected boolean hasParentView() { } protected void invalidateInternal() { - for (int i = 0; i < parentViews.size(); i++) { + if (isRecycled) { + return; + } + for (int i = 0, N = parentViews.size(); i < N; i++) { parentViews.get(i).invalidate(); } if (masterParent != null) { @@ -787,21 +845,14 @@ public void setAllowDecodeSingleFrame(boolean value) { } } - public void recycle() { + public void recycle(boolean uiThread) { isRunning = false; isRecycled = true; checkRunningTasks(); if (loadingInBackground || secondLoadingInBackground) { destroyAfterLoading = true; } else if (loadFrameTask == null && cacheGenerateTask == null && !generatingCache) { - if (nativePtr != 0) { - destroy(nativePtr); - nativePtr = 0; - } - if (secondNativePtr != 0) { - destroy(secondNativePtr); - secondNativePtr = 0; - } + recycleNativePtr(uiThread); if (bitmapsCache != null) { bitmapsCache.recycle(); bitmapsCache = null; @@ -830,7 +881,7 @@ public void setAutoRepeatTimeout(long timeout) { @Override protected void finalize() throws Throwable { try { - recycle(); + recycle(false); } finally { super.finalize(); } @@ -858,7 +909,11 @@ public void start() { } public boolean restart() { - if ((autoRepeat < 2 || autoRepeatPlayCount == 0) && autoRepeatCount < 0) { + return restart(false); + } + + public boolean restart(boolean force) { + if (!force && (autoRepeat < 2 || autoRepeatPlayCount == 0) && autoRepeatCount < 0) { return false; } autoRepeatPlayCount = 0; @@ -1258,12 +1313,16 @@ public boolean isLastFrame() { @Override public void prepareForGenerateCache() { - generateCacheNativePtr = create(args.file.toString(), args.json, width, height, new int[3], false, args.colorReplacement, false, args.fitzModifier); + generateCacheNativePtr = create(args.file.toString(), args.json, width, height, createdForFirstFrame ? metaData : new int[3], false, args.colorReplacement, false, args.fitzModifier); if (generateCacheNativePtr == 0 && file != null) { file.delete(); } } + public void setGeneratingFrame(int i) { + generateCacheFramePointer = i; + } + @Override public int getNextFrame(Bitmap bitmap) { if (generateCacheNativePtr == 0) { @@ -1368,4 +1427,10 @@ public void checkCache(Runnable onReady) { }); } } + + private class LottieMetadata { + float fr; + float op; + float ip; + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/RLottieImageView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/RLottieImageView.java index ee890df898..4b461405bd 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/RLottieImageView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/RLottieImageView.java @@ -31,6 +31,10 @@ public class RLottieImageView extends ImageView { private boolean attachedToWindow; private boolean playing; private boolean startOnAttach; + private Integer layerNum; + private boolean onlyLastFrame; + public boolean cached; + private boolean reverse; public RLottieImageView(Context context) { super(context); @@ -40,6 +44,13 @@ public void clearLayerColors() { layerColors.clear(); } + public void setLayerNum(Integer layerNum) { + this.layerNum = layerNum; + if (this.imageReceiver != null) { + this.imageReceiver.setLayerNum(layerNum); + } + } + public void setLayerColor(String layer, int color) { if (layerColors == null) { layerColors = new HashMap<>(); @@ -95,6 +106,18 @@ public void setAnimation(RLottieDrawable lottieDrawable) { } + public void setOnlyLastFrame(boolean onlyLastFrame) { + this.onlyLastFrame = onlyLastFrame; + } + + public void setReverse() { + if (drawable != null) { + drawable.setPlayInDirectionOfCustomEndFrame(true); + drawable.setCurrentFrame(drawable.getFramesCount()); + drawable.setCustomEndFrame(0); + } + } + public void setAnimation(TLRPC.Document document, int w, int h) { if (imageReceiver != null) { imageReceiver.onDetachedFromWindow(); @@ -103,36 +126,50 @@ public void setAnimation(TLRPC.Document document, int w, int h) { if (document == null) { return; } - imageReceiver = new ImageReceiver(); - if ("video/webm".equals(document.mime_type)) { + imageReceiver = new ImageReceiver() { + @Override + protected boolean setImageBitmapByKey(Drawable drawable, String key, int type, boolean memCache, int guid) { + if (drawable != null) { + onLoaded(); + } + return super.setImageBitmapByKey(drawable, key, type, memCache, guid); + } + }; + if (onlyLastFrame) { + imageReceiver.setImage(ImageLocation.getForDocument(document), w + "_" + h + "_lastframe", null, null, null, null, null, 0, null, document, 1); + } else if ("video/webm".equals(document.mime_type)) { TLRPC.PhotoSize thumb = FileLoader.getClosestPhotoSizeWithSize(document.thumbs, 90); - imageReceiver.setImage(ImageLocation.getForDocument(document), w + "_" + h + "_pcache_" + ImageLoader.AUTOPLAY_FILTER, ImageLocation.getForDocument(thumb, document), null, null, document.size, null, document, 1); + imageReceiver.setImage(ImageLocation.getForDocument(document), w + "_" + h + (cached ? "_pcache" : "") + "_" + ImageLoader.AUTOPLAY_FILTER, ImageLocation.getForDocument(thumb, document), null, null, document.size, null, document, 1); } else { - Drawable thumbDrawable = null; - String probableCacheKey = document.id + "@" + w + "_" + h; - if (!ImageLoader.getInstance().hasLottieMemCache(probableCacheKey)) { - SvgHelper.SvgDrawable svgThumb = DocumentObject.getSvgThumb(document.thumbs, Theme.key_windowBackgroundWhiteGrayIcon, 0.2f); - if (svgThumb != null) { - svgThumb.overrideWidthAndHeight(512, 512); - } - thumbDrawable = svgThumb; + SvgHelper.SvgDrawable svgThumb = DocumentObject.getSvgThumb(document.thumbs, Theme.key_windowBackgroundWhiteGrayIcon, 0.2f); + if (svgThumb != null) { + svgThumb.overrideWidthAndHeight(512, 512); } TLRPC.PhotoSize thumb = FileLoader.getClosestPhotoSizeWithSize(document.thumbs, 90); - imageReceiver.setImage(ImageLocation.getForDocument(document), w + "_" + h, ImageLocation.getForDocument(thumb, document), null, null, null, thumbDrawable, 0, null, document, 1); + imageReceiver.setImage(ImageLocation.getForDocument(document), w + "_" + h + (cached ? "_pcache" : ""), ImageLocation.getForDocument(thumb, document), null, null, null, svgThumb, 0, null, document, 1); } imageReceiver.setAspectFit(true); imageReceiver.setParentView(this); - imageReceiver.setAutoRepeat(1); - imageReceiver.setAllowStartLottieAnimation(true); - imageReceiver.setAllowStartAnimation(true); + if (autoRepeat) { + imageReceiver.setAutoRepeat(1); + imageReceiver.setAllowStartLottieAnimation(true); + imageReceiver.setAllowStartAnimation(true); + } else { + imageReceiver.setAutoRepeat(0); + } + imageReceiver.setLayerNum(layerNum != null ? layerNum : 7); imageReceiver.clip = false; setImageDrawable(new Drawable() { @Override public void draw(@NonNull Canvas canvas) { - AndroidUtilities.rectTmp2.set(getBounds()); - AndroidUtilities.rectTmp2.inset(AndroidUtilities.dp(11), AndroidUtilities.dp(11)); + AndroidUtilities.rectTmp2.set( + getBounds().centerX() - AndroidUtilities.dp(w) / 2, + getBounds().centerY() - AndroidUtilities.dp(h) / 2, + getBounds().centerX() + AndroidUtilities.dp(w) / 2, + getBounds().centerY() + AndroidUtilities.dp(h) / 2 + ); imageReceiver.setImageCoords(AndroidUtilities.rectTmp2); imageReceiver.draw(canvas); } @@ -158,6 +195,10 @@ public int getOpacity() { } } + protected void onLoaded() { + + } + public void clearAnimationDrawable() { if (drawable != null) { drawable.stop(); @@ -176,6 +217,9 @@ protected void onAttachedToWindow() { attachedToWindow = true; if (imageReceiver != null) { imageReceiver.onAttachedToWindow(); + if (playing) { + imageReceiver.startAnimation(); + } } if (drawable != null) { drawable.setCallback(this); @@ -194,7 +238,6 @@ protected void onDetachedFromWindow() { } if (imageReceiver != null) { imageReceiver.onDetachedFromWindow(); - imageReceiver = null; } } @@ -207,10 +250,13 @@ public void setAutoRepeat(boolean repeat) { } public void setProgress(float progress) { - if (drawable == null) { - return; + if (drawable != null) { + drawable.setProgress(progress); } - drawable.setProgress(progress); + } + + public ImageReceiver getImageReceiver() { + return imageReceiver; } @Override @@ -220,12 +266,14 @@ public void setImageResource(int resId) { } public void playAnimation() { - if (drawable == null) { + if (drawable == null && imageReceiver == null) { return; } playing = true; if (attachedToWindow) { - drawable.start(); + if (drawable != null) { + drawable.start(); + } if (imageReceiver != null) { imageReceiver.startAnimation(); } @@ -235,12 +283,14 @@ public void playAnimation() { } public void stopAnimation() { - if (drawable == null) { + if (drawable == null && imageReceiver == null) { return; } playing = false; if (attachedToWindow) { - drawable.stop(); + if (drawable != null) { + drawable.stop(); + } if (imageReceiver != null) { imageReceiver.stopAnimation(); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/RadialProgress2.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/RadialProgress2.java index cc3b886c32..aaf0311da9 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/RadialProgress2.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/RadialProgress2.java @@ -66,7 +66,7 @@ public class RadialProgress2 { private Canvas miniDrawCanvas; private float overrideAlpha = 1.0f; - private final Theme.ResourcesProvider resourcesProvider; + private Theme.ResourcesProvider resourcesProvider; private int maxIconSize; private float overlayImageAlpha = 1f; @@ -97,6 +97,10 @@ public RadialProgress2(View parentView, Theme.ResourcesProvider resourcesProvide overlayPaint.setColor(0x64000000); } + public void setResourcesProvider(Theme.ResourcesProvider resourcesProvider) { + this.resourcesProvider = resourcesProvider; + } + public void setAsMini() { mediaActionDrawable.setMini(true); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ReactedHeaderView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ReactedHeaderView.java index 740c65edd5..b15634d0e4 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ReactedHeaderView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ReactedHeaderView.java @@ -5,6 +5,7 @@ import android.graphics.PorterDuffColorFilter; import android.graphics.drawable.Drawable; import android.text.TextUtils; +import android.util.Pair; import android.util.TypedValue; import android.view.Gravity; import android.view.View; @@ -40,15 +41,15 @@ public class ReactedHeaderView extends FrameLayout { private int currentAccount; private boolean ignoreLayout; - private List seenUsers = new ArrayList<>(); - private List users = new ArrayList<>(); + private List seenUsers = new ArrayList<>(); + private List users = new ArrayList<>(); private long dialogId; private MessageObject message; private int fixedWidth; private boolean isLoaded; - private Consumer> seenCallback; + private Consumer> seenCallback; public ReactedHeaderView(@NonNull Context context, int currentAccount, MessageObject message, long dialogId) { super(context); @@ -71,6 +72,7 @@ public ReactedHeaderView(@NonNull Context context, int currentAccount, MessageOb avatarsImageView = new AvatarsImageView(context, false); avatarsImageView.setStyle(AvatarsDrawable.STYLE_MESSAGE_SEEN); + avatarsImageView.setAvatarsTextSize(AndroidUtilities.dp(22)); addView(avatarsImageView, LayoutHelper.createFrameRelatively(24 + 12 + 12 + 8, LayoutHelper.MATCH_PARENT, Gravity.END | Gravity.CENTER_VERTICAL, 0, 0, 0, 0)); iconView = new ImageView(context); @@ -89,10 +91,23 @@ public ReactedHeaderView(@NonNull Context context, int currentAccount, MessageOb setBackground(Theme.getSelectorDrawable(false)); } - public void setSeenCallback(Consumer> seenCallback) { + public void setSeenCallback(Consumer> seenCallback) { this.seenCallback = seenCallback; } + public static class UserSeen { + public TLRPC.User user; + public int date = 0; + + public UserSeen(TLRPC.User user) { + this.user = user; + } + public UserSeen(TLRPC.User user, int date) { + this.user = user; + this.date = date; + } + } + @Override protected void onAttachedToWindow() { super.onAttachedToWindow(); @@ -111,29 +126,43 @@ protected void onAttachedToWindow() { ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> { if (response instanceof TLRPC.Vector) { List usersToRequest = new ArrayList<>(); + List dates = new ArrayList<>(); TLRPC.Vector v = (TLRPC.Vector) response; for (Object obj : v.objects) { if (obj instanceof Long) { long l = (long) obj; - if (fromId != l) + if (fromId != l) { usersToRequest.add(l); + dates.add(0); + } + } else if (obj instanceof TLRPC.TL_readParticipantDate) { + long userId = ((TLRPC.TL_readParticipantDate) obj).user_id; + int date = ((TLRPC.TL_readParticipantDate) obj).date; + if (fromId != userId) { + usersToRequest.add(userId); + dates.add(date); + } } } usersToRequest.add(fromId); + dates.add(0); - List usersRes = new ArrayList<>(); + List usersRes = new ArrayList<>(); Runnable callback = () -> { seenUsers.addAll(usersRes); - for (TLRPC.User u : usersRes) { + for (UserSeen p : usersRes) { boolean hasSame = false; for (int i = 0; i < users.size(); i++) { - if (users.get(i).id == u.id) { + if (users.get(i).user.id == p.user.id) { hasSame = true; + if (p.date > 0) { + users.get(i).date = p.date; + } break; } } if (!hasSame) { - users.add(u); + users.add(p); } } if (seenCallback != null) @@ -152,8 +181,10 @@ protected void onAttachedToWindow() { for (int i = 0; i < users.users.size(); i++) { TLRPC.User user = users.users.get(i); MessagesController.getInstance(currentAccount).putUser(user, false); - if (!user.self && usersToRequest.contains(user.id)) - usersRes.add(user); + int index = usersToRequest.indexOf(user.id); + if (!user.self && index >= 0) { + usersRes.add(new UserSeen(user, dates.get(index))); + } } } callback.run(); @@ -167,8 +198,10 @@ protected void onAttachedToWindow() { for (int i = 0; i < chatFull.users.size(); i++) { TLRPC.User user = chatFull.users.get(i); MessagesController.getInstance(currentAccount).putUser(user, false); - if (!user.self && usersToRequest.contains(user.id)) - usersRes.add(user); + int index = usersToRequest.indexOf(user.id); + if (!user.self && index >= 0) { + usersRes.add(new UserSeen(user, dates.get(index))); + } } } callback.run(); @@ -192,18 +225,20 @@ private void loadReactions() { if (response instanceof TLRPC.TL_messages_messageReactionsList) { TLRPC.TL_messages_messageReactionsList list = (TLRPC.TL_messages_messageReactionsList) response; int c = list.count; + int ic = list.users.size(); post(() -> { String str; if (seenUsers.isEmpty() || seenUsers.size() < c) { str = LocaleController.formatPluralString("ReactionsCount", c); } else { String countStr; + int n; if (c == seenUsers.size()) { - countStr = String.valueOf(c); + countStr = String.valueOf(n = c); } else { - countStr = c + "/" + seenUsers.size(); + countStr = (n = c) + "/" + seenUsers.size(); } - str = String.format(LocaleController.getPluralString("Reacted", c), countStr); + str = String.format(LocaleController.getPluralString("Reacted", n), countStr); } if (getMeasuredWidth() > 0) { @@ -214,7 +249,7 @@ private void loadReactions() { if (message.messageOwner.reactions != null && message.messageOwner.reactions.results.size() == 1 && !list.reactions.isEmpty()) { for (TLRPC.TL_availableReaction r : MediaDataController.getInstance(currentAccount).getReactionsList()) { if (r.reaction.equals(list.reactions.get(0).reaction)) { - reactView.setImage(ImageLocation.getForDocument(r.center_icon), "40_40_lastframe", "webp", null, r); + reactView.setImage(ImageLocation.getForDocument(r.center_icon), "40_40_lastreactframe", "webp", null, r); reactView.setVisibility(VISIBLE); reactView.setAlpha(0); reactView.animate().alpha(1f).start(); @@ -233,13 +268,13 @@ private void loadReactions() { if (message.messageOwner.from_id != null && u.id != message.messageOwner.from_id.user_id) { boolean hasSame = false; for (int i = 0; i < users.size(); i++) { - if (users.get(i).id == u.id) { + if (users.get(i).user.id == u.id) { hasSame = true; break; } } if (!hasSame) { - users.add(u); + users.add(new UserSeen(u, 0)); } } } @@ -250,7 +285,7 @@ private void loadReactions() { }, ConnectionsManager.RequestFlagInvokeAfter); } - public List getSeenUsers() { + public List getSeenUsers() { return seenUsers; } @@ -258,7 +293,7 @@ private void updateView() { setEnabled(users.size() > 0); for (int i = 0; i < 3; i++) { if (i < users.size()) { - avatarsImageView.setObject(i, currentAccount, users.get(i)); + avatarsImageView.setObject(i, currentAccount, users.get(i).user); } else { avatarsImageView.setObject(i, currentAccount, null); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ReactedUsersListView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ReactedUsersListView.java index bd0d869714..72035ab066 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ReactedUsersListView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ReactedUsersListView.java @@ -8,7 +8,10 @@ import android.graphics.drawable.ColorDrawable; import android.graphics.drawable.Drawable; import android.os.Build; +import android.text.TextUtils; +import android.util.Log; import android.util.LongSparseArray; +import android.util.Pair; import android.view.Gravity; import android.view.View; import android.view.ViewGroup; @@ -47,7 +50,7 @@ public class ReactedUsersListView extends FrameLayout { public final static int VISIBLE_ITEMS = 6; - public final static int ITEM_HEIGHT_DP = 48; + public final static int ITEM_HEIGHT_DP = 50; private final static int USER_VIEW_TYPE = 0; private final static int CUSTOM_EMOJI_VIEW_TYPE = 1; @@ -93,7 +96,7 @@ protected void onMeasure(int widthSpec, int heightSpec) { updateHeight(); } }; - LinearLayoutManager llm = new LinearLayoutManager(context); + final LinearLayoutManager llm = new LinearLayoutManager(context); listView.setLayoutManager(llm); if (addPadding) { listView.setPadding(0, 0, 0, AndroidUtilities.dp(8)); @@ -110,7 +113,7 @@ public RecyclerView.ViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int View view = null; switch (viewType) { case USER_VIEW_TYPE: - view = new ReactedUserHolderView(context); + view = new ReactedUserHolderView(currentAccount, context); break; default: case CUSTOM_EMOJI_VIEW_TYPE: @@ -199,17 +202,34 @@ public int getAdditionalHeight() { } @SuppressLint("NotifyDataSetChanged") - public ReactedUsersListView setSeenUsers(List users) { + public ReactedUsersListView setSeenUsers(List users) { + if (userReactions != null && !userReactions.isEmpty()) { + for (ReactedHeaderView.UserSeen p : users) { + TLRPC.User user = p.user; + if (user != null && p.date > 0) { + for (int i = 0; i < userReactions.size(); ++i) { + TLRPC.MessagePeerReaction react = userReactions.get(i); + if (react != null && react.date <= 0 && react.peer_id.user_id == user.id) { + react.date = p.date; + react.dateIsSeen = true; + break; + } + } + } + } + } List nr = new ArrayList<>(users.size()); - for (TLRPC.User u : users) { - ArrayList userReactions = peerReactionMap.get(u.id); + for (ReactedHeaderView.UserSeen p : users) { + ArrayList userReactions = peerReactionMap.get(p.user.id); if (userReactions != null) { continue; } TLRPC.TL_messagePeerReaction r = new TLRPC.TL_messagePeerReaction(); r.reaction = null; r.peer_id = new TLRPC.TL_peerUser(); - r.peer_id.user_id = u.id; + r.peer_id.user_id = p.user.id; + r.date = p.date; + r.dateIsSeen = true; userReactions = new ArrayList<>(); userReactions.add(r); peerReactionMap.put(MessageObject.getPeerId(r.peer_id), userReactions); @@ -219,7 +239,7 @@ public ReactedUsersListView setSeenUsers(List users) { onlySeenNow = true; } userReactions.addAll(nr); - Collections.sort(userReactions, Comparator.comparingInt(o -> o.reaction != null ? 0 : 1)); + Collections.sort(userReactions, Comparator.comparingInt(o -> o.date <= 0 || o.reaction != null ? Integer.MIN_VALUE : -o.date)); adapter.notifyDataSetChanged(); updateHeight(); @@ -288,8 +308,7 @@ private void load() { updateCustomReactionsButton(); } - Collections.sort(userReactions, Comparator.comparingInt(o -> o.reaction != null ? 0 : 1)); - + Collections.sort(userReactions, Comparator.comparingInt(o -> o.date <= 0 || o.reaction != null ? Integer.MIN_VALUE : -o.date)); adapter.notifyDataSetChanged(); @@ -367,20 +386,28 @@ private int getLoadCount() { return filter == null ? 100 : 50; } - private final class ReactedUserHolderView extends FrameLayout { + private static final class ReactedUserHolderView extends FrameLayout { + int currentAccount; + BackupImageView avatarView; SimpleTextView titleView; + SimpleTextView subtitleView; BackupImageView reactView; AvatarDrawable avatarDrawable = new AvatarDrawable(); View overlaySelectorView; + AnimatedEmojiDrawable.SwapAnimatedEmojiDrawable rightDrawable; + + private static final MessageSeenCheckDrawable seenDrawable = new MessageSeenCheckDrawable(R.drawable.msg_mini_checks, Theme.key_windowBackgroundWhiteGrayText); + private static final MessageSeenCheckDrawable reactDrawable = new MessageSeenCheckDrawable(R.drawable.msg_reactions, Theme.key_windowBackgroundWhiteGrayText, 16, 16, 5.66f); - ReactedUserHolderView(@NonNull Context context) { + ReactedUserHolderView(int currentAccount, @NonNull Context context) { super(context); - setLayoutParams(new RecyclerView.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, AndroidUtilities.dp(48))); + this.currentAccount = currentAccount; + setLayoutParams(new RecyclerView.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, AndroidUtilities.dp(ITEM_HEIGHT_DP))); avatarView = new BackupImageView(context); - avatarView.setRoundRadius(AndroidUtilities.dp(32)); - addView(avatarView, LayoutHelper.createFrameRelatively(36, 36, Gravity.START | Gravity.CENTER_VERTICAL, 8, 0, 0, 0)); + avatarView.setRoundRadius(AndroidUtilities.dp(34)); + addView(avatarView, LayoutHelper.createFrameRelatively(34, 34, Gravity.START | Gravity.CENTER_VERTICAL, 10, 0, 0, 0)); titleView = new SimpleTextView(context) { @Override @@ -394,10 +421,22 @@ public boolean setText(CharSequence value) { titleView.setTextColor(Theme.getColor(Theme.key_actionBarDefaultSubmenuItem)); titleView.setEllipsizeByGradient(true); titleView.setImportantForAccessibility(IMPORTANT_FOR_ACCESSIBILITY_NO); - titleView.setPadding(0, AndroidUtilities.dp(12), 0, AndroidUtilities.dp(12)); titleView.setRightPadding(AndroidUtilities.dp(30)); titleView.setTranslationX(LocaleController.isRTL ? AndroidUtilities.dp(30) : 0); - addView(titleView, LayoutHelper.createFrameRelatively(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.FILL_HORIZONTAL | Gravity.CENTER_VERTICAL, 58, 0, 12, 0)); + titleView.setRightDrawableOutside(true); + addView(titleView, LayoutHelper.createFrameRelatively(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.FILL_HORIZONTAL | Gravity.TOP, 55, 5.33f, 12, 0)); + + rightDrawable = new AnimatedEmojiDrawable.SwapAnimatedEmojiDrawable(this, AndroidUtilities.dp(18)); + titleView.setDrawablePadding(AndroidUtilities.dp(3)); + titleView.setRightDrawable(rightDrawable); + + subtitleView = new SimpleTextView(context); + subtitleView.setTextSize(13); + subtitleView.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteGrayText)); + subtitleView.setEllipsizeByGradient(true); + subtitleView.setImportantForAccessibility(IMPORTANT_FOR_ACCESSIBILITY_NO); + subtitleView.setTranslationX(LocaleController.isRTL ? AndroidUtilities.dp(30) : 0); + addView(subtitleView, LayoutHelper.createFrameRelatively(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.FILL_HORIZONTAL | Gravity.TOP, 55, 19f, 20, 0)); reactView = new BackupImageView(context); addView(reactView, LayoutHelper.createFrameRelatively(24, 24, Gravity.END | Gravity.CENTER_VERTICAL, 0, 0, 12, 0)); @@ -408,18 +447,32 @@ public boolean setText(CharSequence value) { } void setUserReaction(TLRPC.MessagePeerReaction reaction) { + if (reaction == null) { + return; + } + TLRPC.User u = MessagesController.getInstance(currentAccount).getUser(MessageObject.getPeerId(reaction.peer_id)); if (u == null) { return; } + + Long documentId = UserObject.getEmojiStatusDocumentId(u); + if (documentId == null) { + rightDrawable.set((Drawable) null, false); + } else { + rightDrawable.set(documentId, false); + } + avatarDrawable.setInfo(u); titleView.setText(UserObject.getUserName(u)); + Drawable thumb = avatarDrawable; if (u.photo != null && u.photo.strippedBitmap != null) { thumb = u.photo.strippedBitmap; } avatarView.setImage(ImageLocation.getForUser(u, ImageLocation.TYPE_SMALL), "50_50", thumb, u); + String contentDescription; boolean hasReactImage = false; if (reaction.reaction != null) { ReactionsLayoutInBubble.VisibleReaction visibleReaction = ReactionsLayoutInBubble.VisibleReaction.fromTLReaction(reaction.reaction); @@ -427,7 +480,7 @@ void setUserReaction(TLRPC.MessagePeerReaction reaction) { TLRPC.TL_availableReaction r = MediaDataController.getInstance(currentAccount).getReactionsMap().get(visibleReaction.emojicon); if (r != null) { SvgHelper.SvgDrawable svgThumb = DocumentObject.getSvgThumb(r.static_icon.thumbs, Theme.key_windowBackgroundGray, 1.0f); - reactView.setImage(ImageLocation.getForDocument(r.center_icon), "40_40_lastframe", "webp", svgThumb, r); + reactView.setImage(ImageLocation.getForDocument(r.center_icon), "40_40_lastreactframe", "webp", svgThumb, r); hasReactImage = true; } else { reactView.setImageDrawable(null); @@ -438,13 +491,32 @@ void setUserReaction(TLRPC.MessagePeerReaction reaction) { reactView.setAnimatedEmojiDrawable(drawable); hasReactImage = true; } - setContentDescription(LocaleController.formatString("AccDescrReactedWith", R.string.AccDescrReactedWith, UserObject.getUserName(u), reaction.reaction)); + contentDescription = LocaleController.formatString("AccDescrReactedWith", R.string.AccDescrReactedWith, UserObject.getUserName(u), visibleReaction.emojicon != null ? visibleReaction.emojicon : reaction.reaction); } else { reactView.setImageDrawable(null); - setContentDescription(LocaleController.formatString("AccDescrPersonHasSeen", R.string.AccDescrPersonHasSeen, UserObject.getUserName(u))); + contentDescription = LocaleController.formatString("AccDescrPersonHasSeen", R.string.AccDescrPersonHasSeen, UserObject.getUserName(u)); + } + + if (reaction.date != 0) { + contentDescription += " " + LocaleController.formatSeenDate(reaction.date); } + setContentDescription(contentDescription); + + if (reaction.date != 0) { + subtitleView.setVisibility(View.VISIBLE); + CharSequence icon = reaction.dateIsSeen ? seenDrawable.getSpanned(getContext()) : reactDrawable.getSpanned(getContext()); + subtitleView.setText(TextUtils.concat(icon, LocaleController.formatSeenDate(reaction.date))); + subtitleView.setTranslationY(!reaction.dateIsSeen ? AndroidUtilities.dp(-1) : 0); + titleView.setTranslationY(0); + } else { + subtitleView.setVisibility(View.GONE); + titleView.setTranslationY(AndroidUtilities.dp(9)); + } + titleView.setRightPadding(AndroidUtilities.dp(hasReactImage ? 30 : 0)); titleView.setTranslationX(hasReactImage && LocaleController.isRTL ? AndroidUtilities.dp(30) : 0); + ((MarginLayoutParams) subtitleView.getLayoutParams()).rightMargin = AndroidUtilities.dp(hasReactImage && !LocaleController.isRTL ? 12 + 24 : 12); + subtitleView.setTranslationX(hasReactImage && LocaleController.isRTL ? AndroidUtilities.dp(30) : 0); } @Override @@ -457,6 +529,22 @@ public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) { super.onInitializeAccessibilityNodeInfo(info); info.setEnabled(true); } + + @Override + protected void onAttachedToWindow() { + super.onAttachedToWindow(); + if (rightDrawable != null) { + rightDrawable.attach(); + } + } + + @Override + protected void onDetachedFromWindow() { + super.onDetachedFromWindow(); + if (rightDrawable != null) { + rightDrawable.detach(); + } + } } public ReactedUsersListView setOnProfileSelectedListener(OnProfileSelectedListener onProfileSelectedListener) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ReactionTabHolderView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ReactionTabHolderView.java index 2a789dc3a8..1ac08904cc 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ReactionTabHolderView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ReactionTabHolderView.java @@ -113,7 +113,7 @@ public void setCounter(int currentAccount, TLRPC.ReactionCount counter) { for (TLRPC.TL_availableReaction r : MediaDataController.getInstance(currentAccount).getReactionsList()) { if (r.reaction.equals(reaction.emojicon)) { SvgHelper.SvgDrawable svgThumb = DocumentObject.getSvgThumb(r.static_icon, Theme.key_windowBackgroundGray, 1.0f); - reactView.setImage(ImageLocation.getForDocument(r.center_icon), "40_40_lastframe", "webp", svgThumb, r); + reactView.setImage(ImageLocation.getForDocument(r.center_icon), "40_40_lastreactframe", "webp", svgThumb, r); reactView.setVisibility(VISIBLE); iconView.setVisibility(GONE); break; @@ -150,7 +150,7 @@ public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) { if (reaction != null) { info.setText(LocaleController.formatPluralString("AccDescrNumberOfPeopleReactions", count, reaction)); } else { - info.setText(LocaleController.formatPluralString("AccDescrNumberOfReactions", count)); + info.setText(LocaleController.formatPluralString("ReactionsCount", count)); } } } \ No newline at end of file diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/Reactions/AnimatedEmojiEffect.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/Reactions/AnimatedEmojiEffect.java index 6a310d1ea0..8378ae4516 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/Reactions/AnimatedEmojiEffect.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/Reactions/AnimatedEmojiEffect.java @@ -7,6 +7,7 @@ import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ImageLocation; import org.telegram.messenger.ImageReceiver; +import org.telegram.messenger.LiteMode; import org.telegram.messenger.MediaDataController; import org.telegram.messenger.MessageObject; import org.telegram.messenger.R; @@ -43,7 +44,7 @@ private AnimatedEmojiEffect(AnimatedEmojiDrawable animatedEmojiDrawable, int cur this.currentAccount = currentAccount; this.showGeneric = showGeneric; startTime = System.currentTimeMillis(); - if (!longAnimation && showGeneric && !SharedConfig.getLiteMode().enabled()) { + if (!longAnimation && showGeneric && LiteMode.isEnabled(LiteMode.FLAG_ANIMATED_EMOJI_CHAT)) { effectImageReceiver = new ImageReceiver(); } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/Reactions/ChatSelectionReactionMenuOverlay.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/Reactions/ChatSelectionReactionMenuOverlay.java new file mode 100644 index 0000000000..9926104d16 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/Reactions/ChatSelectionReactionMenuOverlay.java @@ -0,0 +1,391 @@ +package org.telegram.ui.Components.Reactions; + +import android.animation.Animator; +import android.animation.AnimatorListenerAdapter; +import android.animation.ValueAnimator; +import android.content.Context; +import android.graphics.Canvas; +import android.view.Gravity; +import android.view.View; +import android.widget.FrameLayout; + +import androidx.annotation.NonNull; +import androidx.core.math.MathUtils; +import androidx.recyclerview.widget.RecyclerView; + +import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.LocaleController; +import org.telegram.messenger.MessageObject; +import org.telegram.tgnet.TLRPC; +import org.telegram.ui.Cells.ChatMessageCell; +import org.telegram.ui.ChatActivity; +import org.telegram.ui.Components.CubicBezierInterpolator; +import org.telegram.ui.Components.FragmentContextView; +import org.telegram.ui.Components.LayoutHelper; +import org.telegram.ui.Components.ReactionsContainerLayout; +import org.telegram.ui.Components.RecyclerListView; + +import java.util.Collections; +import java.util.List; + +public class ChatSelectionReactionMenuOverlay extends FrameLayout { + private ChatActivity parentFragment; + private ReactionsContainerLayout reactionsContainerLayout; + + private List selectedMessages = Collections.emptyList(); + private boolean isVisible; + private MessageObject currentPrimaryObject; + + private int mPadding = 22; + private int mSidePadding = 24; + + private float currentOffsetY; + private float toOffsetY; + private float translationOffsetY; + private long lastUpdate; + private boolean hiddenByScroll; + + private boolean messageSet; + + public ChatSelectionReactionMenuOverlay(ChatActivity fragment, Context context) { + super(context); + setVisibility(GONE); + + this.parentFragment = fragment; + + setClipToPadding(false); + setClipChildren(false); + + fragment.getChatListView().addOnScrollListener(new RecyclerView.OnScrollListener() { + @Override + public void onScrolled(@NonNull RecyclerView recyclerView, int dx, int dy) { + invalidatePosition(); + } + }); + } + + private void checkCreateReactionsLayout() { + if (reactionsContainerLayout == null) { + reactionsContainerLayout = new ReactionsContainerLayout(parentFragment, getContext(), parentFragment.getCurrentAccount(), parentFragment.getResourceProvider()) { + float enabledAlpha = 1f; + long lastUpdate; + + { + setWillNotDraw(false); + } + + @Override + public void draw(Canvas canvas) { + long dt = Math.min(16, System.currentTimeMillis() - lastUpdate); + lastUpdate = System.currentTimeMillis(); + + AndroidUtilities.rectTmp.set(0, 0, getWidth(), getHeight()); + canvas.saveLayerAlpha(AndroidUtilities.rectTmp, (int) (0xFF * enabledAlpha), Canvas.ALL_SAVE_FLAG); + super.draw(canvas); + canvas.restore(); + + if (!isEnabled() && enabledAlpha != 0f) { + enabledAlpha = Math.max(0, enabledAlpha - dt / 150f); + invalidate(); + + if (enabledAlpha == 0) { + setVisibility(GONE); + } + } else if (isEnabled() && enabledAlpha != 1f) { + enabledAlpha = Math.min(1, enabledAlpha + dt / 150f); + invalidate(); + } + } + + @Override + public void setVisibility(int visibility) { + super.setVisibility(visibility); + + if (visibility == View.GONE && enabledAlpha != 0) { + enabledAlpha = 0; + } + } + }; + reactionsContainerLayout.setPadding(AndroidUtilities.dp(4) + (LocaleController.isRTL ? 0 : mSidePadding), AndroidUtilities.dp(4), AndroidUtilities.dp(4) + (LocaleController.isRTL ? mSidePadding : 0), AndroidUtilities.dp(mPadding)); + reactionsContainerLayout.setDelegate(new ReactionsContainerLayout.ReactionsContainerDelegate() { + @Override + public void onReactionClicked(View view, ReactionsLayoutInBubble.VisibleReaction visibleReaction, boolean longpress, boolean addToRecent) { + parentFragment.selectReaction(currentPrimaryObject, reactionsContainerLayout, view, 0, 0, visibleReaction, false, longpress, addToRecent); + AndroidUtilities.runOnUIThread(() -> { + if (reactionsContainerLayout != null) { + reactionsContainerLayout.dismissParent(true); + } + hideMenu(); + }); + } + + @Override + public void hideMenu() { + parentFragment.clearSelectionMode(true); + } + }); + reactionsContainerLayout.setClipChildren(false); + reactionsContainerLayout.setClipToPadding(false); + addView(reactionsContainerLayout, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, 70 + mPadding, Gravity.RIGHT)); + } + } + + public boolean isVisible() { + return isVisible && !hiddenByScroll; + } + + public void invalidatePosition() { + invalidatePosition(true); + } + + private int[] pos = new int[2]; + public void invalidatePosition(boolean animate) { + if (!isVisible || currentPrimaryObject == null || reactionsContainerLayout == null) { + return; + } + + long dt = Math.min(16, System.currentTimeMillis() - lastUpdate); + lastUpdate = System.currentTimeMillis(); + if (currentOffsetY != toOffsetY) { + float a = dt / 220f; + if (toOffsetY > currentOffsetY) { + currentOffsetY = Math.min(currentOffsetY + a, toOffsetY); + } else if (toOffsetY < currentOffsetY) { + currentOffsetY = Math.max(currentOffsetY - a, toOffsetY); + } + AndroidUtilities.runOnUIThread(this::invalidatePosition); + } + + RecyclerListView listView = parentFragment.getChatListView(); + listView.getLocationInWindow(pos); + float listY = pos[1]; + getLocationInWindow(pos); + float offsetY = listY - pos[1] - parentFragment.getPullingDownOffset(); + + for (int i = 0; i < listView.getChildCount(); i++) { + View ch = listView.getChildAt(i); + if (ch instanceof ChatMessageCell) { + ChatMessageCell cell = (ChatMessageCell) ch; + + MessageObject obj = cell.getMessageObject(); + if (obj.getId() == currentPrimaryObject.getId()) { + boolean mirrorX = obj.isOutOwner(); + if (reactionsContainerLayout != null) { + reactionsContainerLayout.setMirrorX(mirrorX); + reactionsContainerLayout.setPadding(AndroidUtilities.dp(4) + (LocaleController.isRTL || mirrorX ? 0 : mSidePadding), AndroidUtilities.dp(mPadding), AndroidUtilities.dp(4) + (LocaleController.isRTL || mirrorX ? mSidePadding : 0), AndroidUtilities.dp(mPadding)); + } + int height = getHeight() != 0 ? getHeight() : listView.getHeight(); + int groupHeight; + + if (cell.getCurrentMessagesGroup() != null) { + MessageObject.GroupedMessages group = cell.getCurrentMessagesGroup(); + groupHeight = group.transitionParams.bottom - group.transitionParams.top; + } else { + groupHeight = cell.getHeight(); + } + + float y = cell.getY() + offsetY - AndroidUtilities.dp(74); + float min = AndroidUtilities.dp(14), max = height - AndroidUtilities.dp(218); + FragmentContextView fragmentContextView = parentFragment.getFragmentContextView(); + if (fragmentContextView != null && fragmentContextView.getVisibility() == View.VISIBLE) { + min += fragmentContextView.getHeight(); + } + boolean newVisibleOffset; + boolean flippedVertically; + if (y > min - groupHeight / 2f && y < max) { + newVisibleOffset = true; + flippedVertically = false; + toOffsetY = 0f; + } else if (y < min - groupHeight - AndroidUtilities.dp(92) || y > max) { + newVisibleOffset = false; + flippedVertically = false; + } else { + newVisibleOffset = true; + translationOffsetY = groupHeight + AndroidUtilities.dp(56); + flippedVertically = true; + toOffsetY = 1f; + } + if (!animate) { + currentOffsetY = toOffsetY; + } + + y += CubicBezierInterpolator.DEFAULT.getInterpolation(currentOffsetY) * translationOffsetY; + if (reactionsContainerLayout == null) { + return; + } + if (flippedVertically != reactionsContainerLayout.isFlippedVertically()) { + reactionsContainerLayout.setFlippedVertically(flippedVertically); + AndroidUtilities.runOnUIThread(this::invalidatePosition); + } + if (newVisibleOffset != reactionsContainerLayout.isEnabled()) { + reactionsContainerLayout.setEnabled(newVisibleOffset); + reactionsContainerLayout.invalidate(); + if (newVisibleOffset) { + reactionsContainerLayout.setVisibility(VISIBLE); + if (!messageSet) { + messageSet = true; + reactionsContainerLayout.setMessage(currentPrimaryObject, parentFragment.getCurrentChatInfo()); + } + } + } + reactionsContainerLayout.setTranslationY(MathUtils.clamp(y, min, max)); + reactionsContainerLayout.setTranslationX(cell.getNonAnimationTranslationX(true)); + + boolean invalidate = false; + LayoutParams params = (LayoutParams) reactionsContainerLayout.getLayoutParams(); + int left = Math.max(0, cell.getBackgroundDrawableLeft() - AndroidUtilities.dp(32)); + int right = Math.max((int) cell.getNonAnimationTranslationX(true), cell.getWidth() - cell.getBackgroundDrawableRight() - AndroidUtilities.dp(32)); + + int minWidth = AndroidUtilities.dp(40) * 8; + if (getWidth() - right - left < minWidth) { + if (mirrorX) { + right = 0; + left = Math.min(left, getWidth() - right - minWidth); + } else { + left = 0; + right = Math.min(right, getWidth() - left - minWidth); + } + } + + int gravity = mirrorX ? Gravity.RIGHT : Gravity.LEFT; + if (gravity != params.gravity) { + params.gravity = gravity; + invalidate = true; + } + if (left != params.leftMargin) { + params.leftMargin = left; + invalidate = true; + } + if (right != params.rightMargin) { + params.rightMargin = right; + invalidate = true; + } + if (invalidate) { + reactionsContainerLayout.requestLayout(); + } + return; + } + } + } + + if (reactionsContainerLayout != null && reactionsContainerLayout.isEnabled()) { + reactionsContainerLayout.setEnabled(false); + } + } + + private MessageObject findPrimaryObject() { + if (isVisible && !selectedMessages.isEmpty()) { + MessageObject msg = selectedMessages.get(0); + + if (msg.getGroupId() != 0) { + MessageObject.GroupedMessages groupedMessages = parentFragment.getGroup(msg.getGroupId()); + if (groupedMessages != null && groupedMessages.messages != null) { + for (MessageObject obj : groupedMessages.messages) { + if (obj.messageOwner != null && obj.messageOwner.reactions != null && obj.messageOwner.reactions.results != null && + !obj.messageOwner.reactions.results.isEmpty()) { + return obj; + } + } + } + } + + return msg; + } + return null; + } + + private boolean isMessageTypeAllowed(MessageObject obj) { + return MessageObject.isPhoto(obj.messageOwner) && MessageObject.getMedia(obj.messageOwner).webpage == null || obj.getDocument() != null && (MessageObject.isVideoDocument(obj.getDocument()) || MessageObject.isGifDocument(obj.getDocument())); + } + + public void setSelectedMessages(List messages) { + this.selectedMessages = messages; + + boolean visible = false; + + if (parentFragment.isSecretChat() || parentFragment.getCurrentChatInfo() != null && parentFragment.getCurrentChatInfo().available_reactions instanceof TLRPC.TL_chatReactionsNone) { + visible = false; + } else if (!messages.isEmpty()) { + visible = true; + + boolean hasGroupId = false; + long groupId = 0; + for (MessageObject obj : messages) { + if (!isMessageTypeAllowed(obj)) { + visible = false; + break; + } + if (!hasGroupId) { + hasGroupId = true; + groupId = obj.getGroupId(); + } else if (groupId != obj.getGroupId() || groupId == 0) { + visible = false; + break; + } + } + } + + if (visible != isVisible) { + isVisible = visible; + hiddenByScroll = false; + animateVisible(visible); + } else if (visible) { + currentPrimaryObject = findPrimaryObject(); + } + } + + private void animateVisible(boolean visible) { + if (visible) { + currentPrimaryObject = findPrimaryObject(); + checkCreateReactionsLayout(); + invalidatePosition(false); + + setVisibility(VISIBLE); + if (reactionsContainerLayout.isEnabled()) { + messageSet = true; + reactionsContainerLayout.setMessage(currentPrimaryObject, parentFragment.getCurrentChatInfo()); + reactionsContainerLayout.startEnterAnimation(false); + } else { + messageSet = false; + reactionsContainerLayout.setTransitionProgress(1f); + } + } else { + messageSet = false; + ValueAnimator animator = ValueAnimator.ofFloat(1, 0).setDuration(150); + animator.addUpdateListener(animation -> { + float val = (float) animation.getAnimatedValue(); + if (reactionsContainerLayout != null) { + reactionsContainerLayout.setAlpha(val); + } + }); + animator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + setVisibility(GONE); + if (reactionsContainerLayout != null) { + removeView(reactionsContainerLayout); + reactionsContainerLayout = null; + } + currentPrimaryObject = null; + } + }); + animator.start(); + } + } + + public boolean onBackPressed() { + if (reactionsContainerLayout != null && reactionsContainerLayout.getReactionsWindow() != null) { + reactionsContainerLayout.dismissWindow(); + return false; + } + return true; + } + + public void setHiddenByScroll(boolean hiddenByScroll) { + this.hiddenByScroll = hiddenByScroll; + + if (hiddenByScroll) { + animateVisible(false); + } + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/Reactions/CustomEmojiReactionsWindow.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/Reactions/CustomEmojiReactionsWindow.java index 76c4318d67..954eb7cab4 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/Reactions/CustomEmojiReactionsWindow.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/Reactions/CustomEmojiReactionsWindow.java @@ -18,6 +18,7 @@ import android.view.KeyEvent; import android.view.View; import android.view.WindowManager; +import android.view.animation.OvershootInterpolator; import android.widget.FrameLayout; import androidx.annotation.NonNull; @@ -25,16 +26,17 @@ import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ImageReceiver; +import org.telegram.messenger.LiteMode; import org.telegram.messenger.LocaleController; import org.telegram.messenger.NotificationCenter; import org.telegram.messenger.R; +import org.telegram.messenger.SharedConfig; import org.telegram.messenger.UserConfig; import org.telegram.messenger.Utilities; import org.telegram.tgnet.TLRPC; import org.telegram.ui.ActionBar.BaseFragment; import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.ChatActivity; -import org.telegram.ui.Components.BackupImageView; import org.telegram.ui.Components.Bulletin; import org.telegram.ui.Components.BulletinFactory; import org.telegram.ui.Components.CubicBezierInterpolator; @@ -44,6 +46,7 @@ import org.telegram.ui.PremiumPreviewFragment; import org.telegram.ui.SelectAnimatedEmojiDialog; +import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -59,6 +62,7 @@ public class CustomEmojiReactionsWindow { public RectF drawingRect = new RectF(); float enterTransitionProgress; boolean enterTransitionFinished; + boolean isShowing; SelectAnimatedEmojiDialog selectAnimatedEmojiDialog; ReactionsContainerLayout reactionsContainerLayout; @@ -75,6 +79,9 @@ public class CustomEmojiReactionsWindow { float yTranslation; float keyboardHeight; private boolean wasFocused; + private int account; + private boolean cascadeAnimation; + private ValueAnimator valueAnimator; public CustomEmojiReactionsWindow(BaseFragment baseFragment, List reactions, HashSet selectedReactions, ReactionsContainerLayout reactionsContainerLayout, Theme.ResourcesProvider resourcesProvider) { this.reactions = reactions; @@ -150,6 +157,11 @@ protected void onEmojiSelected(View emojiView, Long documentId, TLRPC.Document d reactionsContainerLayout.onReactionClicked(emojiView, ReactionsLayoutInBubble.VisibleReaction.fromCustomEmoji(documentId), false); AndroidUtilities.hideKeyboard(windowView); } + + @Override + protected void invalidateParent() { + containerView.invalidate(); + } }; selectAnimatedEmojiDialog.setOnLongPressedListener(new SelectAnimatedEmojiDialog.onLongPressedListener() { @Override @@ -181,14 +193,12 @@ public void onRecentCleared() { this.reactionsContainerLayout = reactionsContainerLayout; reactionsContainerLayout.prepareAnimation(true); - containerView.addOnLayoutChangeListener(new View.OnLayoutChangeListener() { - @Override - public void onLayoutChange(View v, int left, int top, int right, int bottom, int oldLeft, int oldTop, int oldRight, int oldBottom) { - containerView.removeOnLayoutChangeListener(this); - reactionsContainerLayout.prepareAnimation(false); - createTransition(true); - } - }); + AndroidUtilities.runOnUIThread(() -> { + isShowing = true; + containerView.invalidate(); + reactionsContainerLayout.prepareAnimation(false); + createTransition(true); + }, 50); NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.stopAllHeavyOperations, 7); } @@ -238,7 +248,7 @@ private void createTransition(boolean enter) { reactionsContainerLayout.getLocationOnScreen(location); } windowView.getLocationOnScreen(windowLocation); - float y = location[1] - windowLocation[1] - AndroidUtilities.dp(44); + float y = location[1] - windowLocation[1] - AndroidUtilities.dp(44) - AndroidUtilities.dp(52) - (selectAnimatedEmojiDialog.includeHint ? AndroidUtilities.dp(26) : 0); if (y + containerView.getMeasuredHeight() > windowView.getMeasuredHeight() - AndroidUtilities.dp(32)) { y = windowView.getMeasuredHeight() - AndroidUtilities.dp(32) - containerView.getMeasuredHeight(); } @@ -259,16 +269,28 @@ private void createTransition(boolean enter) { reactionsContainerLayout.setCustomEmojiEnterProgress(enterTransitionProgress); if (enter) { + cascadeAnimation = SharedConfig.getDevicePerformanceClass() >= SharedConfig.PERFORMANCE_CLASS_HIGH && LiteMode.isEnabled(LiteMode.FLAG_ANIMATED_EMOJI_REACTIONS); enterTransitionFinished = false; + } else { + cascadeAnimation = false; } - int account = UserConfig.selectedAccount; + if (cascadeAnimation) { + updateCascadeEnter(0); + } + selectAnimatedEmojiDialog.setEnterAnimationInProgress(true); + account = UserConfig.selectedAccount; animationIndex = NotificationCenter.getInstance(account).setAnimationInProgress(animationIndex, null); - ValueAnimator valueAnimator = ValueAnimator.ofFloat(enterTransitionProgress, enter ? 1f : 0); + valueAnimator = ValueAnimator.ofFloat(enterTransitionProgress, enter ? 1f : 0); valueAnimator.addUpdateListener(animation -> { + valueAnimator = null; enterTransitionProgress = (float) animation.getAnimatedValue(); - reactionsContainerLayout.setCustomEmojiEnterProgress(enterTransitionProgress); + reactionsContainerLayout.setCustomEmojiEnterProgress(Utilities.clamp(enterTransitionProgress,1f, 0)); invalidatePath = true; containerView.invalidate(); + + if (cascadeAnimation) { + updateCascadeEnter(enterTransitionProgress); + } }); if (!enter) { syncReactionFrames(enter); @@ -276,7 +298,7 @@ private void createTransition(boolean enter) { valueAnimator.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { - NotificationCenter.getInstance(account).onAnimationFinish(animationIndex); + checkAnimationEnd(); enterTransitionProgress = enter ? 1f : 0f; if (enter) { enterTransitionFinished = true; @@ -284,7 +306,7 @@ public void onAnimationEnd(Animator animation) { reactionsContainerLayout.onCustomEmojiWindowOpened(); containerView.invalidate(); } - reactionsContainerLayout.setCustomEmojiEnterProgress(enterTransitionProgress); + reactionsContainerLayout.setCustomEmojiEnterProgress(Utilities.clamp(enterTransitionProgress, 1f, 0f)); if (enter) { syncReactionFrames(enter); } @@ -297,9 +319,100 @@ public void onAnimationEnd(Animator animation) { } }); valueAnimator.setStartDelay(30); - valueAnimator.setDuration(350); - valueAnimator.setInterpolator(CubicBezierInterpolator.DEFAULT); + if (cascadeAnimation) { + valueAnimator.setDuration(450); + valueAnimator.setInterpolator(new OvershootInterpolator(0.5f)); + } else { + valueAnimator.setDuration(350); + valueAnimator.setInterpolator(CubicBezierInterpolator.DEFAULT); + } valueAnimator.start(); + containerView.invalidate(); + } + + HashSet animatingEnterChild = new HashSet<>(); + ArrayList animators = new ArrayList<>(); + + private void updateCascadeEnter(float progress) { + int fullHeight = selectAnimatedEmojiDialog.contentView.getHeight(); + int parentTop = (int) (selectAnimatedEmojiDialog.getY() + selectAnimatedEmojiDialog.contentView.getY() + selectAnimatedEmojiDialog.emojiGridView.getY()); + ArrayList animatedViews = null; + boolean updated = false; + for (int i = 0; i < selectAnimatedEmojiDialog.emojiGridView.getChildCount(); i++) { + View child = selectAnimatedEmojiDialog.emojiGridView.getChildAt(i); + if (animatingEnterChild.contains(child)) { + continue; + } + float cy = parentTop + child.getTop() + child.getMeasuredHeight() / 2f; + if (cy < drawingRect.bottom && cy > drawingRect.top && progress != 0) { + if (animatedViews == null) { + animatedViews = new ArrayList<>(); + } + animatedViews.add(child); + animatingEnterChild.add(child); + } else { + child.setScaleX(0f); + child.setScaleY(0f); + updated = true; + } + } + parentTop = (int) (selectAnimatedEmojiDialog.getY() + selectAnimatedEmojiDialog.contentView.getY() + selectAnimatedEmojiDialog.emojiTabs.getY()); + for (int i = 0; i < selectAnimatedEmojiDialog.emojiTabs.contentView.getChildCount(); i++) { + View child = selectAnimatedEmojiDialog.emojiTabs.contentView.getChildAt(i); + if (animatingEnterChild.contains(child)) { + continue; + } + float cy = parentTop + child.getTop() + child.getMeasuredHeight() / 2f; + if (cy < drawingRect.bottom && cy > drawingRect.top && progress != 0) { + if (animatedViews == null) { + animatedViews = new ArrayList<>(); + } + animatedViews.add(child); + animatingEnterChild.add(child); + } else { + child.setScaleX(0f); + child.setScaleY(0f); + updated = true; + } + } + if (updated) { + selectAnimatedEmojiDialog.emojiGridView.invalidate(); + selectAnimatedEmojiDialog.contentView.invalidate(); + selectAnimatedEmojiDialog.emojiTabs.contentView.invalidate(); + } + if (animatedViews != null) { + ValueAnimator valueAnimator = ValueAnimator.ofFloat(0, 1f); + ArrayList finalAnimatedViews = animatedViews; + valueAnimator.addUpdateListener(animation -> { + float s = (float) animation.getAnimatedValue(); + for (int i = 0; i < finalAnimatedViews.size(); i++) { + finalAnimatedViews.get(i).setScaleX(s); + finalAnimatedViews.get(i).setScaleY(s); + } + selectAnimatedEmojiDialog.emojiGridView.invalidate(); + selectAnimatedEmojiDialog.contentView.invalidate(); + selectAnimatedEmojiDialog.emojiTabs.contentView.invalidate(); + }); + animators.add(valueAnimator); + valueAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + super.onAnimationEnd(animation); + animators.remove(valueAnimator); + checkAnimationEnd(); + } + }); + valueAnimator.setDuration(350); + valueAnimator.setInterpolator(new OvershootInterpolator(1f)); + valueAnimator.start(); + } + } + + private void checkAnimationEnd() { + if (animators.isEmpty()) { + NotificationCenter.getInstance(account).onAnimationFinish(animationIndex); + selectAnimatedEmojiDialog.setEnterAnimationInProgress(false); + } } private void syncReactionFrames(boolean enter) { @@ -333,7 +446,7 @@ public void removeView() { }); } - private void dismiss() { + public void dismiss() { if (dismissed) { return; } @@ -382,7 +495,6 @@ private class ContainerView extends FrameLayout { Drawable shadow; Rect shadowPad = new Rect(); Paint backgroundPaint = new Paint(Paint.ANTI_ALIAS_FLAG); - private Paint dimPaint = new Paint(Paint.ANTI_ALIAS_FLAG); int[] radiusTmp = new int[4]; @@ -412,13 +524,15 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { @Override protected void dispatchDraw(Canvas canvas) { - dimPaint.setAlpha((int) (0.2f * enterTransitionProgress * 255)); - canvas.drawPaint(dimPaint); + if (!isShowing) { + return; + } + float progressClpamped = Utilities.clamp(enterTransitionProgress,1f, 0f); AndroidUtilities.rectTmp.set(0, 0, getMeasuredWidth(), getMeasuredHeight()); AndroidUtilities.lerp(fromRect, AndroidUtilities.rectTmp, enterTransitionProgress, drawingRect); float radius = AndroidUtilities.lerp(fromRadius, AndroidUtilities.dp(8), enterTransitionProgress); - shadow.setAlpha((int) (Utilities.clamp(enterTransitionProgress / 0.05f, 1f, 0f) * 255)); + shadow.setAlpha((int) (Utilities.clamp(progressClpamped / 0.05f, 1f, 0f) * 255)); shadow.setBounds((int) drawingRect.left - shadowPad.left, (int) drawingRect.top - shadowPad.top, (int) drawingRect.right + shadowPad.right, (int) drawingRect.bottom + shadowPad.bottom); shadow.draw(canvas); @@ -572,7 +686,7 @@ protected void dispatchDraw(Canvas canvas) { } else { if (holderView.hasEnterAnimation && holderView.loopImageView.getImageReceiver().getLottieAnimation() == null) { float oldAlpha = holderView.enterImageView.getImageReceiver().getAlpha(); - holderView.enterImageView.getImageReceiver().setAlpha(oldAlpha * (1f - enterTransitionProgress)); + holderView.enterImageView.getImageReceiver().setAlpha(oldAlpha * (1f - progressClpamped)); holderView.enterImageView.draw(canvas); holderView.enterImageView.getImageReceiver().setAlpha(oldAlpha); } else { @@ -581,14 +695,14 @@ protected void dispatchDraw(Canvas canvas) { imageReceiver = holderView.loopImageView.animatedEmojiDrawable.getImageReceiver(); } float oldAlpha = imageReceiver.getAlpha(); - imageReceiver.setAlpha(oldAlpha * (1f - enterTransitionProgress)); + imageReceiver.setAlpha(oldAlpha * (1f - progressClpamped)); holderView.loopImageView.draw(canvas); imageReceiver.setAlpha(oldAlpha); } } } else { canvas.translate(child.getX() + drawingRect.width() - reactionsContainerLayout.rect.width(), child.getY() + fromRect.top - drawingRect.top); - canvas.saveLayerAlpha(0, 0, child.getMeasuredWidth(), child.getMeasuredHeight(), (int) (255 * (1f - enterTransitionProgress)), Canvas.ALL_SAVE_FLAG); + canvas.saveLayerAlpha(0, 0, child.getMeasuredWidth(), child.getMeasuredHeight(), (int) (255 * (1f - progressClpamped)), Canvas.ALL_SAVE_FLAG); canvas.scale(1f - enterTransitionProgress, 1f - enterTransitionProgress, child.getMeasuredWidth() >> 1, child.getMeasuredHeight() >> 1); child.draw(canvas); canvas.restore(); @@ -605,9 +719,11 @@ protected void dispatchDraw(Canvas canvas) { } canvas.save(); canvas.clipPath(pathToClip); - canvas.translate(enterTransitionOffsetX, enterTransitionOffsetY); + canvas.translate(cascadeAnimation ? 0 : enterTransitionOffsetX, enterTransitionOffsetY); canvas.scale(enterTransitionScale, enterTransitionScale, enterTransitionScalePx, enterTransitionScalePy); - selectAnimatedEmojiDialog.setAlpha(enterTransitionProgress); + if (!cascadeAnimation) { + selectAnimatedEmojiDialog.setAlpha(enterTransitionProgress); + } super.dispatchDraw(canvas); canvas.restore(); @@ -619,18 +735,10 @@ protected void dispatchDraw(Canvas canvas) { } selectAnimatedEmojiDialog.drawBigReaction(canvas, this); - invalidate(); - } - } - - private boolean imageIsEquals(BackupImageView loopImageView, SelectAnimatedEmojiDialog.ImageViewEmoji toImageView) { - if (toImageView.span == null && loopImageView.getImageReceiver().getLottieAnimation() != null && toImageView.imageReceiver.getLottieAnimation() != null) { - return true; - } - if (loopImageView.animatedEmojiDrawable != null) { - return toImageView.span.getDocumentId() == loopImageView.animatedEmojiDrawable.getDocumentId(); + if (valueAnimator != null) { + invalidate(); + } } - return false; } public void setRecentReactions(List reactions) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/Reactions/ReactionsEffectOverlay.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/Reactions/ReactionsEffectOverlay.java index 338ac6f21b..0c0f8c1ad3 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/Reactions/ReactionsEffectOverlay.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/Reactions/ReactionsEffectOverlay.java @@ -16,6 +16,7 @@ import org.telegram.messenger.FileLoader; import org.telegram.messenger.ImageLocation; import org.telegram.messenger.ImageReceiver; +import org.telegram.messenger.LiteMode; import org.telegram.messenger.MediaDataController; import org.telegram.messenger.MessageObject; import org.telegram.messenger.MessagesController; @@ -206,9 +207,9 @@ private ReactionsEffectOverlay(Context context, BaseFragment fragment, Reactions fromHeight = holderView.loopImageView.getWidth() * holderView.getScaleX(); } else if (reactionButton != null) { cell.getLocationInWindow(loc); - fromX = loc[0] + cell.reactionsLayoutInBubble.x + reactionButton.x + reactionButton.imageReceiver.getImageX(); - fromY = loc[1] + cell.reactionsLayoutInBubble.y + reactionButton.y + reactionButton.imageReceiver.getImageY(); - fromHeight = reactionButton.imageReceiver.getImageHeight(); + fromX = loc[0] + cell.reactionsLayoutInBubble.x + reactionButton.x + (reactionButton.imageReceiver == null ? 0 : reactionButton.imageReceiver.getImageX()); + fromY = loc[1] + cell.reactionsLayoutInBubble.y + reactionButton.y + (reactionButton.imageReceiver == null ? 0 : reactionButton.imageReceiver.getImageY()); + fromHeight = reactionButton.imageReceiver == null ? 0 : reactionButton.imageReceiver.getImageHeight(); } else { ((View) cell.getParent()).getLocationInWindow(loc); fromX = loc[0] + x; @@ -303,7 +304,7 @@ protected void dispatchDraw(Canvas canvas) { toY = lastDrawnToY; } - if (fragment.getParentActivity() != null && fragment.getFragmentView().getParent() != null && fragment.getFragmentView().getVisibility() == View.VISIBLE && fragment.getFragmentView() != null) { + if (fragment.getParentActivity() != null && fragment.getFragmentView() != null && fragment.getFragmentView().getParent() != null && fragment.getFragmentView().getVisibility() == View.VISIBLE && fragment.getFragmentView() != null) { fragment.getFragmentView().getLocationOnScreen(loc); setAlpha(((View) fragment.getFragmentView().getParent()).getAlpha()); } else { @@ -521,7 +522,7 @@ protected void onDetachedFromWindow() { if (availableReaction != null || visibleReaction.documentId != 0) { if (availableReaction != null) { if (animationType != ONLY_MOVE_ANIMATION) { - if ((animationType == SHORT_ANIMATION && !SharedConfig.getLiteMode().enabled()) || animationType == LONG_ANIMATION) { + if ((animationType == SHORT_ANIMATION && LiteMode.isEnabled(LiteMode.FLAG_ANIMATED_EMOJI_CHAT)) || animationType == LONG_ANIMATION) { TLRPC.Document document = animationType == SHORT_ANIMATION ? availableReaction.around_animation : availableReaction.effect_animation; String filer = animationType == SHORT_ANIMATION ? getFilterForAroundAnimation() : sizeForFilter + "_" + sizeForFilter; effectImageView.getImageReceiver().setUniqKeyPrefix((uniqPrefix++) + "_" + cell.getMessageObject().getId() + "_"); @@ -593,7 +594,7 @@ protected void onDetachedFromWindow() { if (animationType != SHORT_ANIMATION) { if (availableReaction != null) { - emojiStaticImageView.getImageReceiver().setImage(ImageLocation.getForDocument(availableReaction.center_icon), "40_40_lastframe", null, "webp", availableReaction, 1); + emojiStaticImageView.getImageReceiver().setImage(ImageLocation.getForDocument(availableReaction.center_icon), "40_40_lastreactframe", null, "webp", availableReaction, 1); } container.addView(emojiStaticImageView); emojiStaticImageView.getLayoutParams().width = emojiSize; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/Reactions/ReactionsLayoutInBubble.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/Reactions/ReactionsLayoutInBubble.java index c6af97580a..bfc413a1fc 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/Reactions/ReactionsLayoutInBubble.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/Reactions/ReactionsLayoutInBubble.java @@ -25,6 +25,7 @@ import org.telegram.messenger.MessagesController; import org.telegram.messenger.SvgHelper; import org.telegram.messenger.UserConfig; +import org.telegram.messenger.UserObject; import org.telegram.tgnet.TLRPC; import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.Cells.ChatMessageCell; @@ -116,9 +117,7 @@ public void setMessage(MessageObject messageObject, boolean isSmall, Theme.Resou this.resourcesProvider = resourcesProvider; this.isSmall = isSmall; this.messageObject = messageObject; - for (int i = 0; i < reactionButtons.size(); i++) { - reactionButtons.get(i).detach(); - } + ArrayList oldButtons = new ArrayList<>(reactionButtons); hasUnreadReactions = false; reactionButtons.clear(); if (messageObject != null) { @@ -130,21 +129,39 @@ public void setMessage(MessageObject messageObject, boolean isSmall, Theme.Resou } for (int i = 0; i < messageObject.messageOwner.reactions.results.size(); i++) { TLRPC.ReactionCount reactionCount = messageObject.messageOwner.reactions.results.get(i); - ReactionButton button = new ReactionButton(reactionCount, isSmall); + ReactionButton old = null; +// for (int j = 0; j < oldButtons.size(); ++j) { +// ReactionButton btn = oldButtons.get(j); +// if (btn.reaction.equals(reactionCount.reaction)) { +// old = btn; +// break; +// } +// } + ReactionButton button = new ReactionButton(old, reactionCount, isSmall); reactionButtons.add(button); if (!isSmall && messageObject.messageOwner.reactions.recent_reactions != null) { ArrayList users = null; - if (messageObject.getDialogId() > 0) { + if (messageObject.getDialogId() > 0 && !UserObject.isReplyUser(messageObject.getDialogId())) { users = new ArrayList<>(); + TLRPC.User me = UserConfig.getInstance(currentAccount).getCurrentUser(); + TLRPC.User dialogUser = MessagesController.getInstance(currentAccount).getUser(messageObject.getDialogId()); if (reactionCount.count == 2) { - users.add(UserConfig.getInstance(currentAccount).getCurrentUser()); - users.add(MessagesController.getInstance(currentAccount).getUser(messageObject.getDialogId())); + if (me != null) { + users.add(me); + } + if (dialogUser != null) { + users.add(dialogUser); + } } else { if (reactionCount.chosen) { - users.add(UserConfig.getInstance(currentAccount).getCurrentUser()); + if (me != null) { + users.add(me); + } } else { - users.add(MessagesController.getInstance(currentAccount).getUser(messageObject.getDialogId())); + if (dialogUser != null) { + users.add(dialogUser); + } } } button.setUsers(users); @@ -154,14 +171,17 @@ public void setMessage(MessageObject messageObject, boolean isSmall, Theme.Resou } } else if (reactionCount.count <= 3 && totalCount <= 3) { for (int j = 0; j < messageObject.messageOwner.reactions.recent_reactions.size(); j++) { - TLRPC.MessagePeerReaction reccent = messageObject.messageOwner.reactions.recent_reactions.get(j); - VisibleReaction visibleReactionPeer = VisibleReaction.fromTLReaction(reccent.reaction); + TLRPC.MessagePeerReaction recent = messageObject.messageOwner.reactions.recent_reactions.get(j); + VisibleReaction visibleReactionPeer = VisibleReaction.fromTLReaction(recent.reaction); VisibleReaction visibleReactionCount = VisibleReaction.fromTLReaction(reactionCount.reaction); - if (visibleReactionPeer.equals(visibleReactionCount) && MessagesController.getInstance(currentAccount).getUser(MessageObject.getPeerId(reccent.peer_id)) != null) { - if (users == null) { - users = new ArrayList<>(); + if (visibleReactionPeer.equals(visibleReactionCount) && MessagesController.getInstance(currentAccount).getUser(MessageObject.getPeerId(recent.peer_id)) != null) { + TLRPC.User user = MessagesController.getInstance(currentAccount).getUser(MessageObject.getPeerId(recent.peer_id)); + if (user != null) { + if (users == null) { + users = new ArrayList<>(); + } + users.add(user); } - users.add(MessagesController.getInstance(currentAccount).getUser(MessageObject.getPeerId(reccent.peer_id))); } } button.setUsers(users); @@ -172,7 +192,8 @@ public void setMessage(MessageObject messageObject, boolean isSmall, Theme.Resou } } if (isSmall && reactionCount.count > 1 && reactionCount.chosen) { - reactionButtons.add(new ReactionButton(reactionCount, isSmall)); + // TODO: also reuse here + reactionButtons.add(new ReactionButton(null, reactionCount, isSmall)); reactionButtons.get(0).isSelected = false; reactionButtons.get(1).isSelected = true; reactionButtons.get(0).realCount = 1; @@ -197,6 +218,9 @@ public void setMessage(MessageObject messageObject, boolean isSmall, Theme.Resou } hasUnreadReactions = MessageObject.hasUnreadReactions(messageObject.messageOwner); } + for (int i = 0; i < oldButtons.size(); i++) { + oldButtons.get(i).detach(); + } isEmpty = reactionButtons.isEmpty(); } @@ -416,11 +440,13 @@ public boolean animateChange() { } private boolean equalsUsersList(ArrayList users, ArrayList users1) { - if (users.size() != users1.size()) { + if (users == null || users1 == null || users.size() != users1.size()) { return false; } for (int i = 0; i < users.size(); i++) { - if (users.get(i).id != users.get(i).id) { + TLRPC.User user1 = users.get(i); + TLRPC.User user2 = users1.get(i); + if (user1 == null || user2 == null || user1.id != user2.id) { return false; } } @@ -483,10 +509,10 @@ public class ReactionButton { public int y; public int width; public int height; - ImageReceiver imageReceiver = new ImageReceiver(); + ImageReceiver imageReceiver; AnimatedEmojiDrawable animatedEmojiDrawable; int animatedEmojiDrawableColor; - CounterView.CounterDrawable counterDrawable = new CounterView.CounterDrawable(parentView, false, null); + CounterView.CounterDrawable counterDrawable; int backgroundColor; int textColor; int serviceBackgroundColor; @@ -498,7 +524,16 @@ public class ReactionButton { AvatarsDrawable avatarsDrawable; ArrayList users; - public ReactionButton(TLRPC.ReactionCount reactionCount, boolean isSmall) { + public ReactionButton(ReactionButton reuseFrom, TLRPC.ReactionCount reactionCount, boolean isSmall) { + if (reuseFrom != null) { + counterDrawable = reuseFrom.counterDrawable; + } + if (imageReceiver == null) { + imageReceiver = new ImageReceiver(); + } + if (counterDrawable == null) { + counterDrawable = new CounterView.CounterDrawable(parentView, false, null); + } this.reactionCount = reactionCount; this.reaction = reactionCount.reaction; this.visibleReaction = VisibleReaction.fromTLReaction(reactionCount.reaction); @@ -524,9 +559,9 @@ public ReactionButton(TLRPC.ReactionCount reactionCount, boolean isSmall) { if (visibleReaction.emojicon != null) { TLRPC.TL_availableReaction r = MediaDataController.getInstance(currentAccount).getReactionsMap().get(visibleReaction.emojicon); if (r != null) { - SvgHelper.SvgDrawable svgThumb = DocumentObject.getSvgThumb(r.static_icon, Theme.key_windowBackgroundGray, 1.0f); //imageReceiver.setImage(ImageLocation.getForDocument(r.static_icon), "40_40", svgThumb, "webp", r, 1); - imageReceiver.setImage(ImageLocation.getForDocument(r.center_icon), "40_40_lastframe", svgThumb, "webp", r, 1); + SvgHelper.SvgDrawable svgThumb = DocumentObject.getSvgThumb(r.static_icon, Theme.key_windowBackgroundGray, 1.0f); + imageReceiver.setImage(ImageLocation.getForDocument(r.center_icon), "40_40_lastreactframe", svgThumb, "webp", r, 1); } } else if (visibleReaction.documentId != 0) { animatedEmojiDrawable = new AnimatedEmojiDrawable(AnimatedEmojiDrawable.CACHE_TYPE_ALERT_PREVIEW, currentAccount, visibleReaction.documentId); @@ -622,7 +657,7 @@ public void draw(Canvas canvas, float progress, float alpha, boolean drawOverlay drawImage(canvas, alpha); } - if (count != 0 || counterDrawable.countChangeProgress != 1f) { + if (counterDrawable != null && (count != 0 || counterDrawable.countChangeProgress != 1f)) { canvas.save(); canvas.translate(AndroidUtilities.dp(8) + AndroidUtilities.dp(20) + AndroidUtilities.dp(2), 0); counterDrawable.draw(canvas); @@ -701,9 +736,9 @@ public void setUsers(ArrayList users) { avatarsDrawable.width = AndroidUtilities.dp(100); avatarsDrawable.height = height; avatarsDrawable.setAvatarsTextSize(AndroidUtilities.dp(22)); - if (attached) { - avatarsDrawable.onAttachedToWindow(); - } + } + if (attached) { + avatarsDrawable.onAttachedToWindow(); } for (int i = 0; i < users.size(); i++) { if (i == 3) { @@ -893,12 +928,16 @@ public static class VisibleReaction { public String emojicon; public long documentId; + public long hash; + public static VisibleReaction fromTLReaction(TLRPC.Reaction reaction) { VisibleReaction visibleReaction = new VisibleReaction(); if (reaction instanceof TLRPC.TL_reactionEmoji) { visibleReaction.emojicon = ((TLRPC.TL_reactionEmoji) reaction).emoticon; + visibleReaction.hash = visibleReaction.emojicon.hashCode(); } else if (reaction instanceof TLRPC.TL_reactionCustomEmoji) { visibleReaction.documentId = ((TLRPC.TL_reactionCustomEmoji) reaction).document_id; + visibleReaction.hash = visibleReaction.documentId; } return visibleReaction; @@ -907,19 +946,27 @@ public static VisibleReaction fromTLReaction(TLRPC.Reaction reaction) { public static VisibleReaction fromEmojicon(TLRPC.TL_availableReaction reaction) { VisibleReaction visibleReaction = new VisibleReaction(); visibleReaction.emojicon = reaction.reaction; + visibleReaction.hash = visibleReaction.emojicon.hashCode(); return visibleReaction; } public static VisibleReaction fromEmojicon(String reaction) { + if (reaction == null) { + reaction = ""; + } + VisibleReaction visibleReaction = new VisibleReaction(); if (reaction.startsWith("animated_")) { try { visibleReaction.documentId = Long.parseLong(reaction.substring(9)); + visibleReaction.hash = visibleReaction.documentId; } catch (Exception ignore) { visibleReaction.emojicon = reaction; + visibleReaction.hash = visibleReaction.emojicon.hashCode(); } } else { visibleReaction.emojicon = reaction; + visibleReaction.hash = visibleReaction.emojicon.hashCode(); } return visibleReaction; } @@ -927,6 +974,7 @@ public static VisibleReaction fromEmojicon(String reaction) { public static VisibleReaction fromCustomEmoji(Long documentId) { VisibleReaction visibleReaction = new VisibleReaction(); visibleReaction.documentId = documentId; + visibleReaction.hash = visibleReaction.documentId; return visibleReaction; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ReactionsContainerLayout.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ReactionsContainerLayout.java index e52f214b98..3bc275c0a3 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ReactionsContainerLayout.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ReactionsContainerLayout.java @@ -18,6 +18,7 @@ import android.graphics.RectF; import android.graphics.Shader; import android.graphics.drawable.Drawable; +import android.provider.Settings; import android.util.Property; import android.view.Gravity; import android.view.HapticFeedbackConstants; @@ -39,10 +40,12 @@ import androidx.recyclerview.widget.RecyclerView; import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.BuildVars; import org.telegram.messenger.ChatObject; import org.telegram.messenger.DocumentObject; import org.telegram.messenger.ImageLocation; import org.telegram.messenger.ImageReceiver; +import org.telegram.messenger.LiteMode; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MediaDataController; import org.telegram.messenger.MessageObject; @@ -92,6 +95,7 @@ public void set(ReactionsContainerLayout object, Float value) { private final static float SCALE_PROGRESS = 0.75f; private final static float CLIP_PROGRESS = 0.25f; public final RecyclerListView recyclerListView; + public final float durationScale; private Paint bgPaint = new Paint(Paint.ANTI_ALIAS_FLAG); private Paint leftShadowPaint = new Paint(Paint.ANTI_ALIAS_FLAG), @@ -108,6 +112,11 @@ public void set(ReactionsContainerLayout object, Float value) { private int currentAccount; private long waitingLoadingChatId; + private boolean mirrorX; + private boolean isFlippedVertically; + private float flipVerticalProgress; + private long lastUpdate; + ValueAnimator cancelPressedAnimation; FrameLayout premiumLockContainer; FrameLayout customReactionsContainer; @@ -155,9 +164,12 @@ public void set(ReactionsContainerLayout object, Float value) { private boolean allReactionsAvailable; private boolean allReactionsIsDefault; private Paint selectedPaint; + ChatScrimPopupContainerLayout parentLayout; + private boolean animatePopup; public ReactionsContainerLayout(BaseFragment fragment, @NonNull Context context, int currentAccount, Theme.ResourcesProvider resourcesProvider) { super(context); + durationScale = Settings.Global.getFloat(context.getContentResolver(), Settings.Global.ANIMATOR_DURATION_SCALE, 1.0f); selectedPaint = new Paint(Paint.ANTI_ALIAS_FLAG); selectedPaint.setColor(Theme.getColor(Theme.key_listSelector, resourcesProvider)); this.resourcesProvider = resourcesProvider; @@ -321,6 +333,7 @@ public RecyclerView.ViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int customEmojiReactionsIconView.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_dialogBackground), PorterDuff.Mode.MULTIPLY)); customEmojiReactionsIconView.setBackground(Theme.createSimpleSelectorCircleDrawable(AndroidUtilities.dp(28), Color.TRANSPARENT, ColorUtils.setAlphaComponent(Theme.getColor(Theme.key_listSelector), 40))); customEmojiReactionsIconView.setPadding(AndroidUtilities.dp(2), AndroidUtilities.dp(2), AndroidUtilities.dp(2), AndroidUtilities.dp(2)); + customEmojiReactionsIconView.setContentDescription(LocaleController.getString(R.string.AccDescrExpandPanel)); customReactionsContainer.addView(customEmojiReactionsIconView, LayoutHelper.createFrame(30, 30, Gravity.CENTER)); customEmojiReactionsIconView.setOnClickListener(v -> { showCustomEmojiReactionDialog(); @@ -487,12 +500,19 @@ private void animatePullingBack() { } } + public void dismissWindow() { + reactionsWindow.dismiss(); + } + + public CustomEmojiReactionsWindow getReactionsWindow() { + return reactionsWindow; + } + private void showCustomEmojiReactionDialog() { if (reactionsWindow != null) { return; } reactionsWindow = new CustomEmojiReactionsWindow(fragment, allReactionsList, selectedReactions, this, resourcesProvider); - reactionsWindow.onDismissListener(() -> { reactionsWindow = null; }); @@ -525,6 +545,20 @@ public void setDelegate(ReactionsContainerDelegate delegate) { this.delegate = delegate; } + public boolean isFlippedVertically() { + return isFlippedVertically; + } + + public void setFlippedVertically(boolean flippedVertically) { + isFlippedVertically = flippedVertically; + invalidate(); + } + + public void setMirrorX(boolean mirrorX) { + this.mirrorX = mirrorX; + invalidate(); + } + @SuppressLint("NotifyDataSetChanged") private void setVisibleReactionsList(List visibleReactionsList) { this.visibleReactionsList.clear(); @@ -566,11 +600,19 @@ private void setVisibleReactionsList(List lastReactionX) { + lastReactionX = child.getLeft(); + } if (view.hasEnterAnimation && view.enterImageView.getImageReceiver().getLottieAnimation() == null) { continue; } @@ -660,13 +710,13 @@ protected void dispatchDraw(Canvas canvas) { } else if (!view.isEnter) { view.resetAnimation(); } - if (view.getLeft() > lastReactionX) { - lastReactionX = view.getLeft(); - } } else { if (child == premiumLockContainer) { if (child.getX() + child.getMeasuredWidth() / 2f > 0 && child.getX() + child.getMeasuredWidth() / 2f < recyclerListView.getWidth()) { if (!lastVisibleViewsTmp.contains(child)) { + if (transitionProgress != 1f) { + premiumLockIconView.resetAnimation(); + } premiumLockIconView.play(delay); delay += 30; } @@ -678,7 +728,10 @@ protected void dispatchDraw(Canvas canvas) { if (child == customReactionsContainer) { if (child.getX() + child.getMeasuredWidth() / 2f > 0 && child.getX() + child.getMeasuredWidth() / 2f < recyclerListView.getWidth()) { if (!lastVisibleViewsTmp.contains(child)) { - customEmojiReactionsIconView.play(delay); + if (transitionProgress != 1f) { + customEmojiReactionsIconView.resetAnimation(); + } + customEmojiReactionsIconView.play(delay, LiteMode.isEnabled(LiteMode.FLAG_ANIMATED_EMOJI_REACTIONS) || SharedConfig.getDevicePerformanceClass() >= SharedConfig.PERFORMANCE_CLASS_AVERAGE); delay += 30; } lastVisibleViews.add(child); @@ -703,7 +756,7 @@ protected void dispatchDraw(Canvas canvas) { float scale = Utilities.clamp(progress, 1f, 0f); nextRecentReaction.setScaleX(scale); nextRecentReaction.setScaleY(scale); - nextRecentReaction.setTranslationX(recyclerListView.getLeft() + left - pullingOffsetX - AndroidUtilities.dp(20)); + nextRecentReaction.setTranslationX(recyclerListView.getX() + left - pullingOffsetX - AndroidUtilities.dp(20)); nextRecentReaction.setVisibility(View.VISIBLE); } else { nextRecentReaction.setVisibility(View.GONE); @@ -716,16 +769,14 @@ protected void dispatchDraw(Canvas canvas) { if (skipDraw && reactionsWindow != null) { int alpha = (int) (Utilities.clamp(1f - (customEmojiReactionsEnterProgress / 0.2f), 1f, 0f) * (1f - customEmojiReactionsEnterProgress) * 255); canvas.save(); - //canvas.translate(rect.left - reactionsWindow.drawingRect.left + (rect.width() - reactionsWindow.drawingRect.width()), rect.top - reactionsWindow.drawingRect.top + (rect.height() - reactionsWindow.drawingRect.height())); - - // canvas.translate(rect.width() - reactionsWindow.drawingRect.width(), (reactionsWindow.drawingRect.bottom() - rect.height())); drawBubbles(canvas, br, cPr, sr, alpha); canvas.restore(); return; } canvas.clipPath(mPath); - canvas.translate((LocaleController.isRTL ? -1 : 1) * getWidth() * (1f - transitionProgress), 0); + canvas.translate((LocaleController.isRTL || mirrorX ? -1 : 1) * getWidth() * (1f - transitionProgress), 0); + recyclerListView.setTranslationX(-transitionLeftOffset); super.dispatchDraw(canvas); if (leftShadowPaint != null) { @@ -753,9 +804,11 @@ public void drawBubbles(Canvas canvas) { private void drawBubbles(Canvas canvas, float br, float cPr, float sr, int alpha) { canvas.save(); - canvas.clipRect(0, rect.bottom, getMeasuredWidth(), getMeasuredHeight() + AndroidUtilities.dp(8)); - float cx = LocaleController.isRTL ? bigCircleOffset : getWidth() - bigCircleOffset; + float scale = transitionProgress; + canvas.clipRect(0, AndroidUtilities.lerp(rect.bottom, 0, CubicBezierInterpolator.DEFAULT.getInterpolation(flipVerticalProgress)) - (int) Math.ceil(rect.height() / 2f * (1f - transitionProgress)), getMeasuredWidth(), AndroidUtilities.lerp(getMeasuredHeight() + AndroidUtilities.dp(8), getPaddingTop() - expandSize(), CubicBezierInterpolator.DEFAULT.getInterpolation(flipVerticalProgress))); + float cx = LocaleController.isRTL || mirrorX ? bigCircleOffset : getWidth() - bigCircleOffset; float cy = getHeight() - getPaddingBottom() + expandSize(); + cy = AndroidUtilities.lerp(cy, getPaddingTop() - expandSize(), CubicBezierInterpolator.DEFAULT.getInterpolation(flipVerticalProgress)); int sPad = AndroidUtilities.dp(3); shadow.setAlpha(alpha); bgPaint.setAlpha(alpha); @@ -763,8 +816,9 @@ private void drawBubbles(Canvas canvas, float br, float cPr, float sr, int alpha shadow.draw(canvas); canvas.drawCircle(cx, cy, br, bgPaint); - cx = LocaleController.isRTL ? bigCircleOffset - bigCircleRadius : getWidth() - bigCircleOffset + bigCircleRadius; + cx = LocaleController.isRTL || mirrorX ? bigCircleOffset - bigCircleRadius : getWidth() - bigCircleOffset + bigCircleRadius; cy = getHeight() - smallCircleRadius - sPad + expandSize(); + cy = AndroidUtilities.lerp(cy, smallCircleRadius + sPad - expandSize(), CubicBezierInterpolator.DEFAULT.getInterpolation(flipVerticalProgress)); sPad = -AndroidUtilities.dp(1); shadow.setBounds((int) (cx - br - sPad * cPr), (int) (cy - br - sPad * cPr), (int) (cx + br + sPad * cPr), (int) (cy + br + sPad * cPr)); shadow.draw(canvas); @@ -791,7 +845,6 @@ private void checkPressedProgressForOtherViews(View view) { view.setScaleY(otherViewsScale); } - private void checkPressedProgress(Canvas canvas, ReactionHolderView view) { float pullingOffsetX = 0; if (pullingLeftOffset != 0) { @@ -888,10 +941,12 @@ private void invalidateShaders() { public void setTransitionProgress(float transitionProgress) { this.transitionProgress = transitionProgress; + if (parentLayout != null) { + parentLayout.setReactionsTransitionProgress(animatePopup && allowSmoothEnterTransition() ? transitionProgress : 1); + } invalidate(); } - public void setMessage(MessageObject message, TLRPC.ChatFull chatFull) { this.messageObject = message; TLRPC.ChatFull reactionsChat = chatFull; @@ -928,13 +983,15 @@ public void setMessage(MessageObject message, TLRPC.ChatFull chatFull) { } } } else { - throw new RuntimeException("Unknow chat reactions type"); + if (BuildVars.DEBUG_PRIVATE_VERSION) { + throw new RuntimeException("Unknown chat reactions type: " + reactionsChat.available_reactions); + } } } else { allReactionsAvailable = true; fillRecentReactionsList(visibleReactions); } - + filterReactions(visibleReactions); setVisibleReactionsList(visibleReactions); if (message.messageOwner.reactions != null && message.messageOwner.reactions.results != null) { @@ -946,6 +1003,16 @@ public void setMessage(MessageObject message, TLRPC.ChatFull chatFull) { } } + private void filterReactions(List visibleReactions) { + HashSet set = new HashSet<>(); + for (int i = 0; i < visibleReactions.size(); i++) { + if (set.contains(visibleReactions.get(i))) { + i--; + visibleReactions.remove(i); + } + set.add(visibleReactions.get(i)); + } + } private void fillRecentReactionsList(List visibleReactions) { if (!allReactionsAvailable) { @@ -1009,12 +1076,19 @@ private void checkPremiumReactions(List reactions) { } } - public void startEnterAnimation() { + public void startEnterAnimation(boolean animatePopup) { + this.animatePopup = animatePopup; setTransitionProgress(0); setAlpha(1f); - ObjectAnimator animator = ObjectAnimator.ofFloat(this, ReactionsContainerLayout.TRANSITION_PROGRESS_VALUE, 0f, 1f).setDuration(400); - animator.setInterpolator(new OvershootInterpolator(1.004f)); - animator.start(); + if (allowSmoothEnterTransition()) { + ObjectAnimator animator = ObjectAnimator.ofFloat(this, ReactionsContainerLayout.TRANSITION_PROGRESS_VALUE, 0f, 1f).setDuration(250); + animator.setInterpolator(new OvershootInterpolator(0.5f)); + animator.start(); + } else { + ObjectAnimator animator = ObjectAnimator.ofFloat(this, ReactionsContainerLayout.TRANSITION_PROGRESS_VALUE, 0f, 1f).setDuration(250); + animator.setInterpolator(new OvershootInterpolator(0.5f)); + animator.start(); + } } public int getTotalWidth() { @@ -1032,7 +1106,9 @@ public int getItemsCount() { public void setCustomEmojiEnterProgress(float progress) { customEmojiReactionsEnterProgress = progress; - chatScrimPopupContainerLayout.setPopupAlpha(1f - progress); + if (chatScrimPopupContainerLayout != null) { + chatScrimPopupContainerLayout.setPopupAlpha(1f - progress); + } invalidate(); } @@ -1101,7 +1177,7 @@ public void clearRecentReactions() { alertDialog.show(); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } @@ -1171,6 +1247,7 @@ public final class ReactionHolderView extends FrameLayout { public boolean selected; public boolean drawSelected = true; public int position; + public boolean waitingAnimation; Runnable playRunnable = new Runnable() { @Override @@ -1178,6 +1255,7 @@ public void run() { if (enterImageView.getImageReceiver().getLottieAnimation() != null && !enterImageView.getImageReceiver().getLottieAnimation().isRunning() && !enterImageView.getImageReceiver().getLottieAnimation().isGeneratingCache()) { enterImageView.getImageReceiver().getLottieAnimation().start(); } + waitingAnimation = false; } }; @@ -1203,7 +1281,7 @@ public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) { @Override protected void dispatchDraw(Canvas canvas) { super.dispatchDraw(canvas); - if (imageReceiver.getLottieAnimation() != null) { + if (imageReceiver.getLottieAnimation() != null && !waitingAnimation) { imageReceiver.getLottieAnimation().start(); } if (shouldSwitchToLoopView && !switchedToLoopView && imageReceiver.getLottieAnimation() != null && imageReceiver.getLottieAnimation().isLastFrame() && loopImageView.imageReceiver.getLottieAnimation() != null && loopImageView.imageReceiver.getLottieAnimation().hasBitmap()) { @@ -1257,7 +1335,7 @@ private void setReaction(ReactionsLayoutInBubble.VisibleReaction react, int posi resetAnimation(); currentReaction = react; selected = selectedReactions.contains(react); - hasEnterAnimation = currentReaction.emojicon != null && (!showCustomEmojiReaction() || allReactionsIsDefault) && !SharedConfig.getLiteMode().enabled(); + hasEnterAnimation = currentReaction.emojicon != null && (showCustomEmojiReaction() || allReactionsIsDefault) && LiteMode.isEnabled(LiteMode.FLAG_ANIMATED_EMOJI_REACTIONS); if (currentReaction.emojicon != null) { updateImage(react); @@ -1299,16 +1377,21 @@ private void updateImage(ReactionsLayoutInBubble.VisibleReaction react) { if (currentReaction.emojicon != null) { TLRPC.TL_availableReaction defaultReaction = MediaDataController.getInstance(currentAccount).getReactionsMap().get(currentReaction.emojicon); if (defaultReaction != null) { - SvgHelper.SvgDrawable svgThumb = DocumentObject.getSvgThumb(defaultReaction.activate_animation, Theme.key_windowBackgroundGray, 1.0f); - if (SharedConfig.getLiteMode().enabled()) { - enterImageView.getImageReceiver().clearImage(); - loopImageView.getImageReceiver().setImage(ImageLocation.getForDocument(defaultReaction.select_animation), "60_60_firstframe", null, null, hasEnterAnimation ? null : svgThumb, 0, "tgs", currentReaction, 0); + SvgHelper.SvgDrawable svgThumb = DocumentObject.getSvgThumb(defaultReaction.activate_animation, Theme.key_windowBackgroundWhiteGrayIcon, 0.2f); + if (!LiteMode.isEnabled(LiteMode.FLAG_ANIMATED_EMOJI_REACTIONS)) { + if (SharedConfig.getDevicePerformanceClass() <= SharedConfig.PERFORMANCE_CLASS_LOW) { + loopImageView.getImageReceiver().setImage(ImageLocation.getForDocument(defaultReaction.select_animation), "60_60_firstframe", null, null, hasEnterAnimation ? null : svgThumb, 0, "tgs", currentReaction, 0); + } else { + enterImageView.getImageReceiver().setImage(ImageLocation.getForDocument(defaultReaction.appear_animation), ReactionsUtils.APPEAR_ANIMATION_FILTER, null, null, svgThumb, 0, "tgs", react, 0); + loopImageView.getImageReceiver().setImage(ImageLocation.getForDocument(defaultReaction.select_animation), "60_60_firstframe", null, null, hasEnterAnimation ? null : svgThumb, 0, "tgs", currentReaction, 0); + } } else { enterImageView.getImageReceiver().setImage(ImageLocation.getForDocument(defaultReaction.appear_animation), ReactionsUtils.APPEAR_ANIMATION_FILTER, null, null, svgThumb, 0, "tgs", react, 0); loopImageView.getImageReceiver().setImage(ImageLocation.getForDocument(defaultReaction.select_animation), ReactionsUtils.SELECT_ANIMATION_FILTER, null, null, hasEnterAnimation ? null : svgThumb, 0, "tgs", currentReaction, 0); } pressedBackupImageView.getImageReceiver().setImage(ImageLocation.getForDocument(defaultReaction.select_animation), ReactionsUtils.SELECT_ANIMATION_FILTER, null, null, svgThumb, 0, "tgs", react, 0); + preloadImageReceiver.setAllowStartLottieAnimation(false); MediaDataController.getInstance(currentAccount).preloadImage(preloadImageReceiver, ImageLocation.getForDocument(defaultReaction.around_animation), ReactionsEffectOverlay.getFilterForAroundAnimation()); } } @@ -1343,10 +1426,13 @@ public boolean play(int delay) { if (enterImageView.getImageReceiver().getLottieAnimation() != null && !enterImageView.getImageReceiver().getLottieAnimation().isGeneratingCache() && !isEnter) { isEnter = true; if (delay == 0) { + waitingAnimation = false; enterImageView.getImageReceiver().getLottieAnimation().stop(); enterImageView.getImageReceiver().getLottieAnimation().setCurrentFrame(0, false); playRunnable.run(); + } else { + waitingAnimation = true; enterImageView.getImageReceiver().getLottieAnimation().stop(); enterImageView.getImageReceiver().getLottieAnimation().setCurrentFrame(0, false); AndroidUtilities.runOnUIThread(playRunnable, delay); @@ -1362,7 +1448,7 @@ public boolean play(int delay) { if (!isEnter) { loopImageView.setScaleY(0); loopImageView.setScaleX(0); - loopImageView.animate().scaleX(1f).scaleY(1).setDuration(150).setStartDelay(delay).start(); + loopImageView.animate().scaleX(1f).scaleY(1).setDuration(150).setStartDelay((long) (delay * durationScale)).start(); isEnter = true; } } @@ -1516,11 +1602,19 @@ public void didReceivedNotification(int id, int account, Object... args) { if (chatFull.id == waitingLoadingChatId && getVisibility() != View.VISIBLE && !(chatFull.available_reactions instanceof TLRPC.TL_chatReactionsNone)) { setMessage(messageObject, null); setVisibility(View.VISIBLE); - startEnterAnimation(); + startEnterAnimation(false); } } } + @Override + public boolean dispatchTouchEvent(MotionEvent ev) { + if (getAlpha() < 0.5f) { + return false; + } + return super.dispatchTouchEvent(ev); + } + @Override public void setAlpha(float alpha) { if (getAlpha() != alpha && alpha == 0) { @@ -1551,7 +1645,7 @@ public InternalImageView(Context context) { super(context); } - public void play(int delay) { + public void play(int delay, boolean animated) { isEnter = true; invalidate(); if (valueAnimator != null) { @@ -1559,19 +1653,22 @@ public void play(int delay) { valueAnimator.cancel(); } - valueAnimator = ValueAnimator.ofFloat(getScaleX(), 1f); - valueAnimator.setInterpolator(AndroidUtilities.overshootInterpolator); - valueAnimator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() { - @Override - public void onAnimationUpdate(ValueAnimator animation) { + if (animated) { + valueAnimator = ValueAnimator.ofFloat(getScaleX(), 1f); + valueAnimator.setInterpolator(AndroidUtilities.overshootInterpolator); + valueAnimator.addUpdateListener(animation -> { float s = (float) animation.getAnimatedValue(); setScaleX(s); setScaleY(s); customReactionsContainer.invalidate(); - } - }); - valueAnimator.setDuration(300); - valueAnimator.start(); + }); + valueAnimator.setStartDelay((long) (delay * durationScale)); + valueAnimator.setDuration(300); + valueAnimator.start(); + } else { + setScaleX(1f); + setScaleY(1f); + } } public void resetAnimation() { @@ -1622,4 +1719,12 @@ protected void dispatchDraw(Canvas canvas) { public float expandSize() { return (int) (getPullingLeftProgress() * AndroidUtilities.dp(6)); } + + public void setParentLayout(ChatScrimPopupContainerLayout layout) { + parentLayout = layout; + } + + public static boolean allowSmoothEnterTransition() { + return SharedConfig.deviceIsHigh(); + } } \ No newline at end of file diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/RecyclerListView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/RecyclerListView.java index f4325abb24..b3b9f8939e 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/RecyclerListView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/RecyclerListView.java @@ -1621,12 +1621,12 @@ public void setSelectorDrawableColor(int color) { selectorDrawable = Theme.createRadSelectorDrawable(color, selectorRadius, 0); } else if (topBottomSelectorRadius > 0) { selectorDrawable = Theme.createRadSelectorDrawable(color, topBottomSelectorRadius, topBottomSelectorRadius); - } else if (selectorRadius > 0) { + } else if (selectorRadius > 0 && selectorType != Theme.RIPPLE_MASK_CIRCLE_20DP) { selectorDrawable = Theme.createSimpleSelectorRoundRectDrawable(selectorRadius, 0, color, 0xff000000); } else if (selectorType == 2) { selectorDrawable = Theme.getSelectorDrawable(color, false); } else { - selectorDrawable = Theme.createSelectorDrawable(color, selectorType); + selectorDrawable = Theme.createSelectorDrawable(color, selectorType, selectorRadius); } selectorDrawable.setCallback(this); } @@ -1853,6 +1853,10 @@ public void setListSelectorColor(int color) { Theme.setSelectorDrawableColor(selectorDrawable, color, true); } + public Integer getSelectorColor(int position) { + return null; + } + public void setOnItemClickListener(OnItemClickListener listener) { onItemClickListener = listener; } @@ -2271,6 +2275,10 @@ private void positionSelector(int position, View sel, boolean manageHotspot, flo selectorDrawable.setVisible(false, false); selectorDrawable.setState(StateSet.NOTHING); } + Integer color = getSelectorColor(position); + if (color != null) { + setListSelectorColor(color); + } selectorDrawable.setBounds(selectorRect); if (positionChanged) { if (getVisibility() == VISIBLE) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/RoundVideoPlayingDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/RoundVideoPlayingDrawable.java index 7c0499ed1c..f49f6c10d9 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/RoundVideoPlayingDrawable.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/RoundVideoPlayingDrawable.java @@ -36,7 +36,7 @@ public class RoundVideoPlayingDrawable extends Drawable { public int timeColor; int alpha = 255; - private final Theme.ResourcesProvider resourcesProvider; + private Theme.ResourcesProvider resourcesProvider; public RoundVideoPlayingDrawable(View view, Theme.ResourcesProvider resourcesProvider) { super(); @@ -44,6 +44,10 @@ public RoundVideoPlayingDrawable(View view, Theme.ResourcesProvider resourcesPro parentView = view; } + public void setResourcesProvider(Theme.ResourcesProvider resourcesProvider) { + this.resourcesProvider = resourcesProvider; + } + private void update() { long newTime = System.currentTimeMillis(); long dt = newTime - lastUpdateTime; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ScrollSlidingTabStrip.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ScrollSlidingTabStrip.java index 121b600d67..c6881f24e6 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ScrollSlidingTabStrip.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ScrollSlidingTabStrip.java @@ -44,6 +44,7 @@ import org.telegram.messenger.Emoji; import org.telegram.messenger.FileLoader; import org.telegram.messenger.ImageLocation; +import org.telegram.messenger.LiteMode; import org.telegram.messenger.MessageObject; import org.telegram.messenger.R; import org.telegram.messenger.SharedConfig; @@ -280,43 +281,6 @@ private void checkViewIndex(String key, View view, int index) { futureTabsPositions.put(index, view); } - public TextView addIconTabWithCounter(int id, Drawable drawable) { - String key = "textTab" + id; - final int position = tabCount++; - - FrameLayout tab = (FrameLayout) prevTypes.get(key); - TextView textView; - if (tab != null) { - textView = (TextView) tab.getChildAt(1); - checkViewIndex(key, tab, position); - } else { - tab = new FrameLayout(getContext()); - tab.setFocusable(true); - tabsContainer.addView(tab, position); - - ImageView imageView = new ImageView(getContext()); - imageView.setImageDrawable(drawable); - imageView.setScaleType(ImageView.ScaleType.CENTER); - tab.setOnClickListener(v -> delegate.onPageSelected((Integer) v.getTag(R.id.index_tag))); - tab.addView(imageView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); - - textView = new TextView(getContext()); - textView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); - textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 12); - textView.setTextColor(getThemedColor(Theme.key_chat_emojiPanelBadgeText)); - textView.setGravity(Gravity.CENTER); - textView.setBackgroundDrawable(Theme.createRoundRectDrawable(AndroidUtilities.dp(9), getThemedColor(Theme.key_chat_emojiPanelBadgeBackground))); - textView.setMinWidth(AndroidUtilities.dp(18)); - textView.setPadding(AndroidUtilities.dp(5), 0, AndroidUtilities.dp(5), AndroidUtilities.dp(1)); - tab.addView(textView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, 18, Gravity.TOP | Gravity.LEFT, 26, 6, 0, 0)); - } - tab.setTag(R.id.index_tag, position); - tab.setSelected(position == currentPosition); - - tabTypes.put(key, tab); - return textView; - } - public FrameLayout addIconTab(int id, Drawable drawable) { String key = "tab" + id; final int position = tabCount++; @@ -655,7 +619,7 @@ public void setImages() { thumbDrawable = (Drawable) thumb; } if (sticker instanceof TLRPC.Document) { - tabView.imageView.setImage(ImageLocation.getForDocument((TLRPC.Document) sticker), SharedConfig.getLiteMode().enabled() ? "36_36_firstframe" : "36_36_nolimit", (Drawable) null, null); + tabView.imageView.setImage(ImageLocation.getForDocument((TLRPC.Document) sticker), !LiteMode.isEnabled(LiteMode.FLAG_ANIMATED_STICKERS_KEYBOARD) ? "36_36_firstframe" : "36_36_nolimit", (Drawable) null, null); } else { tabView.imageView.setImageDrawable(thumbDrawable); } @@ -666,11 +630,10 @@ public void setImages() { ImageLocation imageLocation; if (object instanceof TLRPC.Document) { - TLRPC.PhotoSize thumb = FileLoader.getClosestPhotoSizeWithSize(sticker.thumbs, 90); if (!tabView.inited) { tabView.svgThumb = DocumentObject.getSvgThumb((TLRPC.Document) object, Theme.key_emptyListPlaceholder, 0.2f); } - imageLocation = ImageLocation.getForDocument(thumb, sticker); + imageLocation = ImageLocation.getForDocument(sticker); } else if (object instanceof TLRPC.PhotoSize) { TLRPC.PhotoSize thumb = (TLRPC.PhotoSize) object; int thumbVersion = 0; @@ -692,9 +655,10 @@ public void setImages() { tabView.inited = true; SvgHelper.SvgDrawable svgThumb = tabView.svgThumb; BackupImageView imageView = tabView.imageView; - String imageFilter = SharedConfig.getLiteMode().enabled() ? "40_40_firstframe" : "40_40"; + final boolean lite = !LiteMode.isEnabled(LiteMode.FLAG_ANIMATED_STICKERS_KEYBOARD); + String imageFilter = lite ? "40_40_firstframe" : "40_40"; if (MessageObject.isVideoSticker(sticker) && sticker.thumbs != null && sticker.thumbs.size() > 0) { - if (SharedConfig.getLiteMode().enabled()) { + if (lite) { TLRPC.PhotoSize thumb = FileLoader.getClosestPhotoSizeWithSize(sticker.thumbs, 90); imageView.setImage(ImageLocation.getForDocument(thumb, sticker), "40_40", svgThumb, 0, parentObject); } else if (svgThumb != null) { @@ -704,9 +668,9 @@ public void setImages() { } } else if (MessageObject.isAnimatedStickerDocument(sticker, true)) { if (svgThumb != null) { - imageView.setImage(ImageLocation.getForDocument(sticker), imageFilter, svgThumb, 0, parentObject); + imageView.setImage(imageLocation, imageFilter, svgThumb, 0, parentObject); } else { - imageView.setImage(ImageLocation.getForDocument(sticker), imageFilter, imageLocation, null, 0, parentObject); + imageView.setImage(imageLocation, imageFilter, imageLocation, null, 0, parentObject); } } else if (imageLocation.imageType == FileLoader.IMAGE_TYPE_LOTTIE) { imageView.setImage(imageLocation, imageFilter, "tgs", svgThumb, parentObject); @@ -744,6 +708,14 @@ protected void onScrollChanged(int l, int t, int oldl, int oldt) { private Paint selectorPaint = new Paint(); + private boolean showSelected = true; + private AnimatedFloat showSelectedAlpha = new AnimatedFloat(this, 350, CubicBezierInterpolator.EASE_OUT_QUINT); + + public void showSelected(boolean show) { + this.showSelected = show; + invalidate(); + } + @Override protected void dispatchDraw(Canvas canvas) { final float dif = (stickerTabWidth - stickerTabExpandedWidth); @@ -765,6 +737,8 @@ protected void dispatchDraw(Canvas canvas) { height = getHeight() - AndroidUtilities.dp(50) * (1f - expandProgress); } + float selectedAlpha = showSelectedAlpha.set(showSelected ? 1 : 0); + if (!(isInEditMode() || tabCount == 0) && indicatorHeight >= 0) { float position = currentPositionAnimated.set(currentPosition); int floorPosition = (int) Math.floor(position); @@ -812,6 +786,7 @@ protected void dispatchDraw(Canvas canvas) { tabBounds.set(cx - w / 2, cy - h / 2, cx + w / 2, cy + h / 2); selectorPaint.setColor(0x2effffff & getThemedColor(Theme.key_chat_emojiPanelIcon)); + selectorPaint.setAlpha((int) (selectorPaint.getAlpha() * selectedAlpha)); canvas.drawRoundRect(tabBounds, AndroidUtilities.dp(8), AndroidUtilities.dp(8), selectorPaint); } @@ -913,8 +888,10 @@ public void setUnderlineColorResource(int resId) { } public void setUnderlineHeight(int value) { - underlineHeight = value; - invalidate(); + if (underlineHeight != value) { + underlineHeight = value; + invalidate(); + } } protected void invalidateOverlays() { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/SearchDownloadsContainer.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/SearchDownloadsContainer.java index 4feb1e6323..17d16234ad 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/SearchDownloadsContainer.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/SearchDownloadsContainer.java @@ -141,11 +141,12 @@ public void onScrollStateChanged(RecyclerView recyclerView, int newState) { boolean openInPhotoViewer = message.canPreviewDocument(); if (!openInPhotoViewer) { boolean noforwards = message.messageOwner != null && message.messageOwner.noforwards; - if (message.isFromChat()) { - TLRPC.Chat chat = MessagesController.getInstance(currentAccount).getChat(-message.getFromChatId()); - if (chat != null) { - noforwards = chat.noforwards; - } + TLRPC.Chat chatTo = messageObject.messageOwner.peer_id.channel_id != 0 ? MessagesController.getInstance(UserConfig.selectedAccount).getChat(messageObject.messageOwner.peer_id.channel_id) : null; + if (chatTo == null) { + chatTo = messageObject.messageOwner.peer_id.chat_id != 0 ? MessagesController.getInstance(UserConfig.selectedAccount).getChat(messageObject.messageOwner.peer_id.chat_id) : null; + } + if (chatTo != null) { + noforwards = chatTo.noforwards; } openInPhotoViewer = openInPhotoViewer || noforwards; } @@ -301,7 +302,8 @@ public void update(boolean animated) { } for (int i = 0; i < recentLoadingFilesTmp.size(); i++) { - if (FileLoader.getDocumentFileName(recentLoadingFilesTmp.get(i).getDocument()).toLowerCase().contains(q)) { + String documentName = FileLoader.getDocumentFileName(recentLoadingFilesTmp.get(i).getDocument()); + if (documentName != null && documentName.toLowerCase().contains(q)) { MessageObject messageObject = new MessageObject(currentAccount, recentLoadingFilesTmp.get(i).messageOwner, false, false); messageObject.mediaExists = recentLoadingFilesTmp.get(i).mediaExists; messageObject.setQuery(searchQuery); @@ -390,7 +392,7 @@ public boolean areItemsTheSame(int oldItemPosition, int newItemPosition) { } else if (newItemPosition >= recentFilesStartRow && newItemPosition < recentFilesEndRow) { newItem = recentLoadingFiles.get(newItemPosition - recentFilesStartRow); } - if (newItem != null && oldItem != null) { + if (newItem != null && oldItem != null && newItem.getDocument() != null && oldItem.getDocument() != null) { return newItem.getDocument().id == oldItem.getDocument().id; } return false; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/SearchStateDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/SearchStateDrawable.java new file mode 100644 index 0000000000..7cb4bb7d98 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/SearchStateDrawable.java @@ -0,0 +1,339 @@ +package org.telegram.ui.Components; + +import static org.telegram.messenger.AndroidUtilities.lerp; + +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.ColorFilter; +import android.graphics.Paint; +import android.graphics.Path; +import android.graphics.PixelFormat; +import android.graphics.Rect; +import android.graphics.RectF; +import android.graphics.drawable.Drawable; + +import androidx.annotation.IntDef; +import androidx.annotation.NonNull; +import androidx.annotation.Nullable; + +import com.google.zxing.common.detector.MathUtils; + +import org.telegram.messenger.AndroidUtilities; + +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; + +public class SearchStateDrawable extends Drawable { + + @IntDef({ State.STATE_SEARCH, State.STATE_BACK, State.STATE_PROGRESS }) + @Retention(RetentionPolicy.SOURCE) + public @interface State { + int STATE_SEARCH = 0; + int STATE_BACK = 1; + int STATE_PROGRESS = 2; + } + + private int alpha = 0xFF; + private Paint paint; + + private Path path = new Path(); + + private RectF progressRect = new RectF(); + private final float progressRadius = .25f; + private long progressStart = -1; + private boolean progressStartedWithOverTo; + private float progressAngleFrom = 0, progressAngleTo = 0; + private float[] progressSegments = new float[2]; + + private @State int fromState; + private @State int toState = State.STATE_SEARCH; + private boolean waitingForProgressToEnd = false, wereNotWaitingForProgressToEnd; + + private AnimatedFloat progress = new AnimatedFloat(1, this::invalidateSelf, 0, 350, CubicBezierInterpolator.EASE_OUT_QUINT); + private Runnable delaySetProgress; + + public SearchStateDrawable() { + paint = new Paint(Paint.ANTI_ALIAS_FLAG); + paint.setColor(Color.WHITE); + paint.setStyle(Paint.Style.STROKE); + paint.setStrokeJoin(Paint.Join.ROUND); + paint.setStrokeCap(Paint.Cap.ROUND); + paint.setStrokeWidth(AndroidUtilities.dp(1.333f)); + } + + public @State int getIconState() { + return this.toState; + } + + public void setIconState(@State int state) { + setIconState(state, true); + } + public void setIconState(@State int state, boolean animated) { + setIconState(state, animated, false); + } + + private void setIconState(@State int state, boolean animated, boolean skipProgressDelay) { + if (getIconState() == state) { + if (state != State.STATE_PROGRESS) { + AndroidUtilities.cancelRunOnUIThread(delaySetProgress); + delaySetProgress = null; + } + return; + } + + if (!skipProgressDelay && state == State.STATE_PROGRESS) { + if (delaySetProgress == null) { + AndroidUtilities.runOnUIThread(delaySetProgress = () -> { + delaySetProgress = null; + setIconState(state, animated, true); + }, 65); + } + return; + } else if (delaySetProgress != null) { + AndroidUtilities.cancelRunOnUIThread(delaySetProgress); + } + + if (progress.get() < 1 && animated) { + setIconState(toState, false); + } + + if (state == State.STATE_PROGRESS) { + progressAngleFrom = 180; + progressStart = -1; + } else if (toState == State.STATE_PROGRESS) { + if (state == State.STATE_SEARCH) { + progressAngleTo = -45; + } else { + progressAngleTo = 0; + } + } + + if (animated) { + fromState = toState; + toState = state; + waitingForProgressToEnd = fromState == State.STATE_PROGRESS && state != State.STATE_PROGRESS; + progress.set(0, true); + } else { + fromState = toState = state; + waitingForProgressToEnd = false; + progress.set(1, true); + } + invalidateSelf(); + } + + public void setStrokeWidth(float strokeWidth) { + paint.setStrokeWidth(strokeWidth); + } + + public void setColor(int color) { + paint.setColor(color); + } + + @Override + public void draw(@NonNull Canvas canvas) { + final Rect bounds = getBounds(); + this.mn = Math.min(bounds.width(), bounds.height()); + this.cx = bounds.centerX(); + this.cy = bounds.centerY(); + + if (alpha < 0xFF) { + canvas.saveLayerAlpha(bounds.left, bounds.top, bounds.right, bounds.bottom, alpha, Canvas.ALL_SAVE_FLAG); + } + + float value = progress.set(waitingForProgressToEnd ? 0 : 1); + + float searchValue = toState == State.STATE_SEARCH ? fromState == State.STATE_SEARCH ? 1 : value : fromState == State.STATE_SEARCH ? 1f - value : 0; + float backValue = toState == State.STATE_BACK ? fromState == State.STATE_BACK ? 1 : value : fromState == State.STATE_BACK ? 1f - value : 0; + float progressValue = toState == State.STATE_PROGRESS ? fromState == State.STATE_PROGRESS ? 1 : value : fromState == State.STATE_PROGRESS ? 1f - value : 0; + + if (searchValue > 0) { + // o + drawCircle( + canvas, + lerp(x(.25f), x(.444f), searchValue), + lerp(y(.5f), y(.444f), searchValue), + lerp(0, w(.208f), searchValue) + ); + } + + if (searchValue > 0 || backValue > 0) { + // — + canvas.save(); + canvas.rotate(searchValue * 45, cx, cy); + drawLine( + canvas, + lerp3( + x(.914f), x(.7638f), fromState == State.STATE_PROGRESS ? x(.5f + progressRadius) : x(.2409f), + searchValue, backValue, progressValue + ), y(.5f), + lerp3( + x(.658f), x(.2409f), fromState == State.STATE_PROGRESS ? x(.5f + progressRadius) : x(.2409f), + searchValue, backValue, progressValue + ), y(.5f) + ); + canvas.restore(); + } + + if (backValue > 0) { + // < + float ax = fromState == State.STATE_PROGRESS ? lerp(x(.5f + progressRadius), x(.2409f), backValue) : x(.2409f); + + canvas.save(); + canvas.rotate(searchValue * 45, cx, cy); + drawLines( + canvas, + + ax + x(.2452f) * backValue, + lerp(y(.5f), y(.25f), backValue), + + ax, y(.5f), + + ax + x(.2452f) * backValue, + lerp(y(.5f), y(.75f), backValue) + ); + canvas.restore(); + } + + if (progressValue > 0) { + if (progressStart < 0 && progressValue > .8f) { + progressStart = System.currentTimeMillis(); + wereNotWaitingForProgressToEnd = waitingForProgressToEnd; + } + if (progressStart > 0) { + + CircularProgressDrawable.getSegments( + (System.currentTimeMillis() - progressStart) % 5400f, + progressSegments + ); + float fromAngle = progressSegments[0], toAngle = progressSegments[1]; + if (getIconState() != State.STATE_PROGRESS && !waitingForProgressToEnd) { + float m = Math.max(0, (float) Math.floor((fromAngle - 180) / 360f) * 360f + 180); + toAngle = Math.min(toAngle, m + progressAngleTo); + fromAngle = Math.min(fromAngle, m + progressAngleTo); + fromAngle = lerp(toAngle, fromAngle, progressValue); + } + + boolean progressOverTo = containsAngle(progressAngleTo, progressAngleFrom + fromAngle, progressAngleFrom + toAngle); + if (waitingForProgressToEnd && !wereNotWaitingForProgressToEnd) { + wereNotWaitingForProgressToEnd = waitingForProgressToEnd; + progressStartedWithOverTo = progressOverTo; + } + if (progressStartedWithOverTo && !progressOverTo) { + progressStartedWithOverTo = false; + } + if (waitingForProgressToEnd && progressOverTo && !progressStartedWithOverTo) { + waitingForProgressToEnd = false; + } + + progressRect.set(x(.5f - progressRadius), y(.5f - progressRadius), x(.5f + progressRadius), y(.5f + progressRadius)); + canvas.drawArc( + progressRect, + progressAngleFrom + fromAngle, + toAngle - fromAngle, + false, + paint + ); + + invalidateSelf(); + } + } + + if (alpha < 0xFF) { + canvas.restore(); + } + + if (value < 1) { + invalidateSelf(); + } + } + + private boolean containsAngle(float angle, float angleFrom, float angleTo) { + angleFrom = angleFrom % 360; + if (angleFrom < 0) { + angleFrom = 360 + angleFrom; + } + angleTo = angleTo % 360; + if (angleTo < 0) { + angleTo = 360 + angleTo; + } + if (angleFrom > angleTo) + return angle >= angleFrom || angle <= angleTo; + return angle >= angleFrom && angle <= angleTo; + } + + private void drawCircle(Canvas canvas, float x, float y, float r) { + if (r < w(.075f)) { + return; + } + canvas.drawCircle(x, y, r, paint); + } + + private void drawLine(Canvas canvas, float x1, float y1, float x2, float y2) { + if (MathUtils.distance(x1, y1, x2, y2) <= w(.075f)) { + return; + } + canvas.drawLine(x1, y1, x2, y2, paint); + } + + private void drawLines(Canvas canvas, float x1, float y1, float x2, float y2, float x3, float y3) { + if (Math.max(MathUtils.distance(x1, y1, x2, y2), MathUtils.distance(x3, y3, x2, y2)) <= w(.075f)) { + return; + } + path.rewind(); + path.moveTo(x1, y1); + path.lineTo(x2, y2); + path.lineTo(x3, y3); + canvas.drawPath(path, paint); + } + + private float lerp3( + float x1, + float x2, + float x3, + // t1 + t2 + t3 = 1 + float t1, + float t2, + float t3 + ) { + return x1 * t1 + x2 * t2 + x3 * t3; + } + + private float mn, cx, cy; + + private float x(float t) { + return cx - mn * (.5f - t); + } + + private float y(float t) { + return cy - mn * (.5f - t); + } + + private float w(float t) { + return mn * t; + } + + @Override + public void setAlpha(int alpha) { + this.alpha = alpha; + } + + @Override + public void setColorFilter(@Nullable ColorFilter colorFilter) { + paint.setColorFilter(colorFilter); + } + + @Override + public int getOpacity() { + return PixelFormat.TRANSPARENT; + } + + @Override + public int getIntrinsicWidth() { + return AndroidUtilities.dp(24); + } + + @Override + public int getIntrinsicHeight() { + return AndroidUtilities.dp(24); + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/SearchViewPager.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/SearchViewPager.java index 72433b117a..41442d18ce 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/SearchViewPager.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/SearchViewPager.java @@ -145,6 +145,14 @@ public void notifyDataSetChanged() { } } }; + if (initialDialogsType == DialogsActivity.DIALOGS_TYPE_BOT_REQUEST_PEER) { + ArrayList dialogs = fragment.getDialogsArray(currentAccount, initialDialogsType, folderId, true); + ArrayList dialogIds = new ArrayList<>(); + for (int i = 0; i < dialogs.size(); ++i) { + dialogIds.add(dialogs.get(i).id); + } + dialogsSearchAdapter.setFilterDialogIds(dialogIds); + } fragmentView = (SizeNotifierFrameLayout) fragment.getFragmentView(); searchListView = new BlurredRecyclerView(context) { @@ -492,7 +500,7 @@ public void onActionBarItemClick(int id) { AlertDialog alertDialog = builder.show(); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } else if (id == speedItemId) { @@ -510,9 +518,9 @@ public void onActionBarItemClick(int id) { } else if (id == forwardItemId) { Bundle args = new Bundle(); args.putBoolean("onlySelect", true); - args.putInt("dialogsType", 3); + args.putInt("dialogsType", DialogsActivity.DIALOGS_TYPE_FORWARD); DialogsActivity fragment = new DialogsActivity(args); - fragment.setDelegate((fragment1, dids, message, param) -> { + fragment.setDelegate((fragment1, dids, message, param, topicsFragment) -> { ArrayList fmessages = new ArrayList<>(); Iterator idIterator = selectedFiles.keySet().iterator(); while (idIterator.hasNext()) { @@ -545,13 +553,14 @@ public void onActionBarItemClick(int id) { args1.putLong("chat_id", -did); } if (!AccountInstance.getInstance(currentAccount).getMessagesController().checkCanOpenChat(args1, fragment1)) { - return; + return true; } } ChatActivity chatActivity = new ChatActivity(args1); fragment1.presentFragment(chatActivity, true); chatActivity.showFieldPanelForForward(true, fmessages); } + return true; }); parent.presentFragment(fragment); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/SeekBarView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/SeekBarView.java index c6210db571..a6bfff9e53 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/SeekBarView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/SeekBarView.java @@ -66,7 +66,7 @@ public class SeekBarView extends FrameLayout { private float transitionProgress = 1f; private int transitionThumbX; private int separatorsCount; - private int lineWidthDp = 2; + private int lineWidthDp = 3; private boolean twoSided; private final Theme.ResourcesProvider resourcesProvider; @@ -474,7 +474,7 @@ protected void onDraw(Canvas canvas) { private ArrayList> timestamps; private CharSequence lastCaption; - private long lastVideoDuration; + private long lastDuration; private float timestampsAppearing = 0; private long lastTimestampsAppearingUpdate; @@ -500,18 +500,18 @@ public void clearTimestamps() { timestampLabel[0] = timestampLabel[1] = null; } lastCaption = null; - lastVideoDuration = -1; + lastDuration = -1; } - public void updateTimestamps(MessageObject messageObject, Long videoDuration) { + public void updateTimestamps(MessageObject messageObject, Long duration) { if (messageObject == null) { clearTimestamps(); return; } - if (videoDuration == null) { - videoDuration = (long) messageObject.getDuration() * 1000L; + if (duration == null) { + duration = (long) messageObject.getDuration() * 1000L; } - if (videoDuration == null || videoDuration < 0) { + if (duration == null || duration < 0) { clearTimestamps(); return; } @@ -519,15 +519,15 @@ public void updateTimestamps(MessageObject messageObject, Long videoDuration) { if (messageObject.isYouTubeVideo()) { if (messageObject.youtubeDescription == null && messageObject.messageOwner.media.webpage.description != null) { messageObject.youtubeDescription = SpannableString.valueOf(messageObject.messageOwner.media.webpage.description); - MessageObject.addUrlsByPattern(messageObject.isOut(), messageObject.youtubeDescription, false, 3, (int) (long) videoDuration, false); + MessageObject.addUrlsByPattern(messageObject.isOut(), messageObject.youtubeDescription, false, 3, (int) (long) duration, false); } text = messageObject.youtubeDescription; } - if (text == lastCaption && lastVideoDuration == videoDuration) { + if (text == lastCaption && lastDuration == duration) { return; } lastCaption = text; - lastVideoDuration = videoDuration; + lastDuration = duration; if (!(text instanceof Spanned)) { timestamps = null; currentTimestamp = -1; @@ -563,7 +563,7 @@ public void updateTimestamps(MessageObject messageObject, Long videoDuration) { if (link != null && link.getURL() != null && link.label != null && link.getURL().startsWith("audio?")) { Integer seconds = Utilities.parseInt(link.getURL().substring(6)); if (seconds != null && seconds >= 0) { - float position = seconds * 1000L / (float) videoDuration; + float position = seconds * 1000L / (float) duration; String label = link.label; SpannableStringBuilder builder = new SpannableStringBuilder(label); Emoji.replaceEmoji(builder, timestampLabelPaint.getFontMetricsInt(), AndroidUtilities.dp(14), false); @@ -683,7 +683,7 @@ private void drawTimestampLabel(Canvas canvas) { float left = selectorWidth / 2f; float right = getMeasuredWidth() - selectorWidth / 2f; - float rightPadded = right; + float rightPadded = right - (lastDuration > 1000L * 60 * 10 ? AndroidUtilities.dp(36) : 0); float width = Math.abs(left - rightPadded) - AndroidUtilities.dp(16 + 50); if (lastWidth > 0 && Math.abs(lastWidth - width) > 0.01f) { @@ -742,7 +742,7 @@ private void drawTimestampLabel(Canvas canvas) { float changeT = CubicBezierInterpolator.DEFAULT.getInterpolation(timestampChangeT); canvas.save(); - float bottom = getMeasuredHeight() / 2f + AndroidUtilities.dp(13); + float bottom = getMeasuredHeight() / 2f + AndroidUtilities.dp(14); canvas.translate(left + AndroidUtilities.dp(25), bottom); timestampLabelPaint.setColor(getThemedColor(Theme.key_player_time)); if (timestampLabel[1] != null) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ShareAlert.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ShareAlert.java index 1b9c0ef64c..894caa3825 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ShareAlert.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ShareAlert.java @@ -684,7 +684,7 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { ignoreLayout = false; } fullHeight = contentSize >= totalHeight; - topOffset = (fullHeight || !SharedConfig.smoothKeyboard) ? 0 : totalHeight - contentSize; + topOffset = fullHeight ? 0 : totalHeight - contentSize; ignoreLayout = true; checkCurrentList(false); ignoreLayout = false; @@ -699,7 +699,7 @@ private void onMeasureInternal(int widthMeasureSpec, int heightMeasureSpec) { widthSize -= backgroundPaddingLeft * 2; - int keyboardSize = SharedConfig.smoothKeyboard ? 0 : measureKeyboardHeight(); + int keyboardSize = 0; if (!commentTextView.isWaitingForKeyboardOpen() && keyboardSize <= AndroidUtilities.dp(20) && !commentTextView.isPopupShowing() && !commentTextView.isAnimatePopupClosing()) { ignoreLayout = true; commentTextView.hideEmojiView(); @@ -710,7 +710,7 @@ private void onMeasureInternal(int widthMeasureSpec, int heightMeasureSpec) { if (keyboardSize <= AndroidUtilities.dp(20)) { if (!AndroidUtilities.isInMultiwindow) { int paddingBottom; - if (SharedConfig.smoothKeyboard && keyboardVisible) { + if (keyboardVisible) { paddingBottom = 0; } else { paddingBottom = commentTextView.getEmojiPadding(); @@ -764,7 +764,7 @@ protected void onLayout(boolean changed, int l, int t, int r, int b) { int keyboardSize = measureKeyboardHeight(); int paddingBottom; - if (SharedConfig.smoothKeyboard && keyboardVisible) { + if (keyboardVisible) { paddingBottom = 0; } else { paddingBottom = keyboardSize <= AndroidUtilities.dp(20) && !AndroidUtilities.isInMultiwindow && !AndroidUtilities.isTablet() ? commentTextView.getEmojiPadding() : 0; @@ -1942,9 +1942,8 @@ public boolean onTouch(View v, MotionEvent event) { } sendPopupWindow.showAtLocation(view, Gravity.LEFT | Gravity.TOP, location[0] + view.getMeasuredWidth() - layout.getMeasuredWidth() + AndroidUtilities.dp(8), y); sendPopupWindow.dimBehind(); - if (!NekoConfig.disableVibration.Bool()) { + if (!NekoConfig.disableVibration.Bool()) view.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); - } return true; } @@ -2036,6 +2035,10 @@ protected void onSend(LongSparseArray dids, int count, TLRPC.TL_fo } + protected boolean doSend(LongSparseArray dids, TLRPC.TL_forumTopic topic) { + return false; + } + private int getCurrentTop() { if (gridView.getChildCount() != 0) { View child = gridView.getChildAt(0); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/SharedMediaLayout.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/SharedMediaLayout.java index 3a10a75a69..53d37281b1 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/SharedMediaLayout.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/SharedMediaLayout.java @@ -2201,11 +2201,17 @@ public void onScrolled(RecyclerView recyclerView, int dx, int dy) { } if (mediaPage.selectedType == 7 && view instanceof UserCell) { final TLRPC.ChatParticipant participant; + int index = position; if (!chatUsersAdapter.sortedUsers.isEmpty()) { - participant = chatUsersAdapter.chatInfo.participants.participants.get(chatUsersAdapter.sortedUsers.get(position)); - } else { - participant = chatUsersAdapter.chatInfo.participants.participants.get(position); + if (position >= chatUsersAdapter.sortedUsers.size()) { + return false; + } + index = chatUsersAdapter.sortedUsers.get(position); + } + if (index < 0 || index >= chatUsersAdapter.chatInfo.participants.participants.size()) { + return false; } + participant = chatUsersAdapter.chatInfo.participants.participants.get(index); return onMemberClick(participant, true); } else if (mediaPage.selectedType == 1 && view instanceof SharedDocumentCell) { return onItemLongClick(((SharedDocumentCell) view).getMessage(), view, 0); @@ -3155,7 +3161,9 @@ private void loadFastScrollData(boolean force) { req.limit = 100; req.peer = MessagesController.getInstance(profileActivity.getCurrentAccount()).getInputPeer(dialog_id); int reqIndex = sharedMediaData[type].requestIndex; - int reqId = ConnectionsManager.getInstance(profileActivity.getCurrentAccount()).sendRequest(req, (response, error) -> AndroidUtilities.runOnUIThread(() -> { + int reqId = ConnectionsManager.getInstance(profileActivity.getCurrentAccount()).sendRequest(req, (response, error) -> + AndroidUtilities.runOnUIThread(() -> + NotificationCenter.getInstance(profileActivity.getCurrentAccount()).doOnIdle(() -> { if (error != null) { return; } @@ -3183,7 +3191,7 @@ private void loadFastScrollData(boolean force) { } } photoVideoAdapter.notifyDataSetChanged(); - })); + }))); ConnectionsManager.getInstance(profileActivity.getCurrentAccount()).bindRequestToGuid(reqId, profileActivity.getClassGuid()); } } @@ -3276,9 +3284,9 @@ public void onActionBarItemClick(View v, int id) { Bundle args = new Bundle(); args.putBoolean("onlySelect", true); args.putBoolean("canSelectTopics", true); - args.putInt("dialogsType", 3); + args.putInt("dialogsType", DialogsActivity.DIALOGS_TYPE_FORWARD); DialogsActivity fragment = new DialogsActivity(args); - fragment.setDelegate((fragment1, dids, message, param) -> { + fragment.setDelegate((fragment1, dids, message, param, topicsFragment) -> { ArrayList fmessages = new ArrayList<>(); for (int a = 1; a >= 0; a--) { ArrayList ids = new ArrayList<>(); @@ -3338,7 +3346,7 @@ public void onActionBarItemClick(View v, int id) { args1.putLong("chat_id", -did); } if (!profileActivity.getMessagesController().checkCanOpenChat(args1, fragment1)) { - return; + return true; } } @@ -3349,6 +3357,7 @@ public void onActionBarItemClick(View v, int id) { fragment1.presentFragment(chatActivity, true); chatActivity.showFieldPanelForForward(true, fmessages); } + return true; }); profileActivity.presentFragment(fragment); } else if (id == gotochat) { @@ -3858,7 +3867,11 @@ public void didReceivedNotification(int id, int account, Object... args) { photoVideoAdapter.notifyDataSetChanged(); } } else { - adapter.notifyDataSetChanged(); + try { + adapter.notifyDataSetChanged(); + } catch (Throwable e) { + + } } if (sharedMediaData[type].messages.isEmpty() && !sharedMediaData[type].loading) { if (listView != null) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/SimpleThemeDescription.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/SimpleThemeDescription.java index 39efa4c62c..7c79b6cc4e 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/SimpleThemeDescription.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/SimpleThemeDescription.java @@ -18,4 +18,8 @@ public static ArrayList createThemeDescriptions(ThemeDescripti } return l; } + + public static void add(ArrayList descriptions, Runnable upd, String... keys) { + descriptions.addAll(SimpleThemeDescription.createThemeDescriptions(upd::run, keys)); + } } \ No newline at end of file diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/SizeNotifierFrameLayout.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/SizeNotifierFrameLayout.java index f5f20dd780..cdcaca27eb 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/SizeNotifierFrameLayout.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/SizeNotifierFrameLayout.java @@ -31,6 +31,7 @@ import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.DispatchQueue; import org.telegram.messenger.FileLog; +import org.telegram.messenger.LiteMode; import org.telegram.messenger.SharedConfig; import org.telegram.messenger.Utilities; import org.telegram.ui.ActionBar.ActionBar; @@ -121,129 +122,134 @@ public SizeNotifierFrameLayout(Context context, INavigationLayout layout) { setWillNotDraw(false); parentLayout = layout; adjustPanLayoutHelper = createAdjustPanLayoutHelper(); - backgroundView = new View(context) { - @Override - protected void onDraw(Canvas canvas) { - if (backgroundDrawable == null || skipBackgroundDrawing) { - return; + addView(backgroundView = new BackgroundView(context), LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); + checkLayerType(); + } + + private class BackgroundView extends View { + public BackgroundView(Context context) { + super(context); + } + + @Override + protected void onDraw(Canvas canvas) { + if (backgroundDrawable == null || skipBackgroundDrawing) { + return; + } + Drawable newDrawable = getNewDrawable(); + if (newDrawable != backgroundDrawable && newDrawable != null) { + if (Theme.isAnimatingColor()) { + oldBackgroundDrawable = backgroundDrawable; } - Drawable newDrawable = getNewDrawable(); - if (newDrawable != backgroundDrawable && newDrawable != null) { - if (Theme.isAnimatingColor()) { - oldBackgroundDrawable = backgroundDrawable; - } - if (newDrawable instanceof MotionBackgroundDrawable) { - MotionBackgroundDrawable motionBackgroundDrawable = (MotionBackgroundDrawable) newDrawable; - motionBackgroundDrawable.setParentView(backgroundView); - } - backgroundDrawable = newDrawable; + if (newDrawable instanceof MotionBackgroundDrawable) { + MotionBackgroundDrawable motionBackgroundDrawable = (MotionBackgroundDrawable) newDrawable; + motionBackgroundDrawable.setParentView(backgroundView); } - float themeAnimationValue = parentLayout != null ? parentLayout.getThemeAnimationValue() : 1.0f; - for (int a = 0; a < 2; a++) { - Drawable drawable = a == 0 ? oldBackgroundDrawable : backgroundDrawable; - if (drawable == null) { - continue; - } - if (a == 1 && oldBackgroundDrawable != null && parentLayout != null) { - drawable.setAlpha((int) (255 * themeAnimationValue)); - } else { - drawable.setAlpha(255); - } - if (drawable instanceof MotionBackgroundDrawable) { - MotionBackgroundDrawable motionBackgroundDrawable = (MotionBackgroundDrawable) drawable; - if (motionBackgroundDrawable.hasPattern()) { - int actionBarHeight = (isActionBarVisible() ? ActionBar.getCurrentActionBarHeight() : 0) + (Build.VERSION.SDK_INT >= 21 && occupyStatusBar ? AndroidUtilities.statusBarHeight : 0); - int viewHeight = getRootView().getMeasuredHeight() - actionBarHeight; - float scaleX = (float) getMeasuredWidth() / (float) drawable.getIntrinsicWidth(); - float scaleY = (float) (viewHeight) / (float) drawable.getIntrinsicHeight(); - float scale = Math.max(scaleX, scaleY); - int width = (int) Math.ceil(drawable.getIntrinsicWidth() * scale * parallaxScale); - int height = (int) Math.ceil(drawable.getIntrinsicHeight() * scale * parallaxScale); - int x = (getMeasuredWidth() - width) / 2 + (int) translationX; - int y = backgroundTranslationY + (viewHeight - height) / 2 + actionBarHeight + (int) translationY; - canvas.save(); - canvas.clipRect(0, actionBarHeight, width, getMeasuredHeight() - bottomClip); - drawable.setBounds(x, y, x + width, y + height); - drawable.draw(canvas); - checkSnowflake(canvas); - canvas.restore(); - } else { - if (bottomClip != 0) { - canvas.save(); - canvas.clipRect(0, 0, getMeasuredWidth(), getRootView().getMeasuredHeight() - bottomClip); - } - motionBackgroundDrawable.setTranslationY(backgroundTranslationY); - int bottom = (int) (getRootView().getMeasuredHeight() - backgroundTranslationY + translationY); - if (animationInProgress) { - bottom -= emojiOffset; - } else if (emojiHeight != 0) { - bottom -= emojiHeight; - } - drawable.setBounds(0, 0, getMeasuredWidth(), bottom); - drawable.draw(canvas); - if (bottomClip != 0) { - canvas.restore(); - } - } - } else if (drawable instanceof ColorDrawable) { - if (bottomClip != 0) { - canvas.save(); - canvas.clipRect(0, 0, getMeasuredWidth(), getMeasuredHeight() - bottomClip); - } - drawable.setBounds(0, 0, getMeasuredWidth(), getRootView().getMeasuredHeight()); + backgroundDrawable = newDrawable; + } + float themeAnimationValue = parentLayout != null ? parentLayout.getThemeAnimationValue() : 1.0f; + for (int a = 0; a < 2; a++) { + Drawable drawable = a == 0 ? oldBackgroundDrawable : backgroundDrawable; + if (drawable == null) { + continue; + } + if (a == 1 && oldBackgroundDrawable != null && parentLayout != null) { + drawable.setAlpha((int) (255 * themeAnimationValue)); + } else { + drawable.setAlpha(255); + } + if (drawable instanceof MotionBackgroundDrawable) { + MotionBackgroundDrawable motionBackgroundDrawable = (MotionBackgroundDrawable) drawable; + if (motionBackgroundDrawable.hasPattern()) { + int actionBarHeight = (isActionBarVisible() ? ActionBar.getCurrentActionBarHeight() : 0) + (Build.VERSION.SDK_INT >= 21 && occupyStatusBar ? AndroidUtilities.statusBarHeight : 0); + int viewHeight = getRootView().getMeasuredHeight() - actionBarHeight; + float scaleX = (float) getMeasuredWidth() / (float) drawable.getIntrinsicWidth(); + float scaleY = (float) (viewHeight) / (float) drawable.getIntrinsicHeight(); + float scale = Math.max(scaleX, scaleY); + int width = (int) Math.ceil(drawable.getIntrinsicWidth() * scale * parallaxScale); + int height = (int) Math.ceil(drawable.getIntrinsicHeight() * scale * parallaxScale); + int x = (getMeasuredWidth() - width) / 2 + (int) translationX; + int y = backgroundTranslationY + (viewHeight - height) / 2 + actionBarHeight + (int) translationY; + canvas.save(); + canvas.clipRect(0, actionBarHeight, width, getMeasuredHeight() - bottomClip); + drawable.setBounds(x, y, x + width, y + height); drawable.draw(canvas); checkSnowflake(canvas); - if (bottomClip != 0) { - canvas.restore(); - } - } else if (drawable instanceof GradientDrawable) { + canvas.restore(); + } else { if (bottomClip != 0) { canvas.save(); canvas.clipRect(0, 0, getMeasuredWidth(), getRootView().getMeasuredHeight() - bottomClip); } - drawable.setBounds(0, backgroundTranslationY, getMeasuredWidth(), backgroundTranslationY + getRootView().getMeasuredHeight()); + motionBackgroundDrawable.setTranslationY(backgroundTranslationY); + int bottom = (int) (getRootView().getMeasuredHeight() - backgroundTranslationY + translationY); + if (animationInProgress) { + bottom -= emojiOffset; + } else if (emojiHeight != 0) { + bottom -= emojiHeight; + } + drawable.setBounds(0, 0, getMeasuredWidth(), bottom); drawable.draw(canvas); - checkSnowflake(canvas); if (bottomClip != 0) { canvas.restore(); } - } else if (drawable instanceof BitmapDrawable) { - BitmapDrawable bitmapDrawable = (BitmapDrawable) drawable; - if (bitmapDrawable.getTileModeX() == Shader.TileMode.REPEAT) { - canvas.save(); - float scale = 2.0f / AndroidUtilities.density; - canvas.scale(scale, scale); - drawable.setBounds(0, 0, (int) Math.ceil(getMeasuredWidth() / scale), (int) Math.ceil(getRootView().getMeasuredHeight() / scale)); - drawable.draw(canvas); - checkSnowflake(canvas); - canvas.restore(); - } else { - int actionBarHeight = (isActionBarVisible() ? ActionBar.getCurrentActionBarHeight() : 0) + (Build.VERSION.SDK_INT >= 21 && occupyStatusBar ? AndroidUtilities.statusBarHeight : 0); - int viewHeight = getRootView().getMeasuredHeight() - actionBarHeight; - float scaleX = (float) getMeasuredWidth() / (float) drawable.getIntrinsicWidth(); - float scaleY = (float) (viewHeight) / (float) drawable.getIntrinsicHeight(); - float scale = Math.max(scaleX, scaleY); - int width = (int) Math.ceil(drawable.getIntrinsicWidth() * scale * parallaxScale); - int height = (int) Math.ceil(drawable.getIntrinsicHeight() * scale * parallaxScale); - int x = (getMeasuredWidth() - width) / 2 + (int) translationX; - int y = backgroundTranslationY + (viewHeight - height) / 2 + actionBarHeight + (int) translationY; - canvas.save(); - canvas.clipRect(0, actionBarHeight, width, getMeasuredHeight() - bottomClip); - drawable.setBounds(x, y, x + width, y + height); - drawable.draw(canvas); - checkSnowflake(canvas); - canvas.restore(); - } } - if (a == 0 && oldBackgroundDrawable != null && themeAnimationValue >= 1.0f) { - oldBackgroundDrawable = null; - backgroundView.invalidate(); + } else if (drawable instanceof ColorDrawable) { + if (bottomClip != 0) { + canvas.save(); + canvas.clipRect(0, 0, getMeasuredWidth(), getMeasuredHeight() - bottomClip); + } + drawable.setBounds(0, 0, getMeasuredWidth(), getRootView().getMeasuredHeight()); + drawable.draw(canvas); + checkSnowflake(canvas); + if (bottomClip != 0) { + canvas.restore(); + } + } else if (drawable instanceof GradientDrawable) { + if (bottomClip != 0) { + canvas.save(); + canvas.clipRect(0, 0, getMeasuredWidth(), getRootView().getMeasuredHeight() - bottomClip); } + drawable.setBounds(0, backgroundTranslationY, getMeasuredWidth(), backgroundTranslationY + getRootView().getMeasuredHeight()); + drawable.draw(canvas); + checkSnowflake(canvas); + if (bottomClip != 0) { + canvas.restore(); + } + } else if (drawable instanceof BitmapDrawable) { + BitmapDrawable bitmapDrawable = (BitmapDrawable) drawable; + if (bitmapDrawable.getTileModeX() == Shader.TileMode.REPEAT) { + canvas.save(); + float scale = 2.0f / AndroidUtilities.density; + canvas.scale(scale, scale); + drawable.setBounds(0, 0, (int) Math.ceil(getMeasuredWidth() / scale), (int) Math.ceil(getRootView().getMeasuredHeight() / scale)); + drawable.draw(canvas); + checkSnowflake(canvas); + canvas.restore(); + } else { + int actionBarHeight = (isActionBarVisible() ? ActionBar.getCurrentActionBarHeight() : 0) + (Build.VERSION.SDK_INT >= 21 && occupyStatusBar ? AndroidUtilities.statusBarHeight : 0); + int viewHeight = getRootView().getMeasuredHeight() - actionBarHeight; + float scaleX = (float) getMeasuredWidth() / (float) drawable.getIntrinsicWidth(); + float scaleY = (float) (viewHeight) / (float) drawable.getIntrinsicHeight(); + float scale = Math.max(scaleX, scaleY); + int width = (int) Math.ceil(drawable.getIntrinsicWidth() * scale * parallaxScale); + int height = (int) Math.ceil(drawable.getIntrinsicHeight() * scale * parallaxScale); + int x = (getMeasuredWidth() - width) / 2 + (int) translationX; + int y = backgroundTranslationY + (viewHeight - height) / 2 + actionBarHeight + (int) translationY; + canvas.save(); + canvas.clipRect(0, actionBarHeight, width, getMeasuredHeight() - bottomClip); + drawable.setBounds(x, y, x + width, y + height); + drawable.draw(canvas); + checkSnowflake(canvas); + canvas.restore(); + } + } + if (a == 0 && oldBackgroundDrawable != null && themeAnimationValue >= 1.0f) { + oldBackgroundDrawable = null; + backgroundView.invalidate(); } } - }; - addView(backgroundView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); - checkLayerType(); + } } public void setBackgroundImage(Drawable bitmap, boolean motion) { @@ -416,7 +422,7 @@ public void setEmojiOffset(boolean animInProgress, float offset) { } private void checkSnowflake(Canvas canvas) { - if (((Theme.canStartHolidayAnimation() && !SharedConfig.getLiteMode().enabled()) || NaConfig.INSTANCE.getChatDecoration().Int() == 1) && NaConfig.INSTANCE.getChatDecoration().Int() != 2) { + if (((Theme.canStartHolidayAnimation() && LiteMode.isEnabled(LiteMode.FLAG_CHAT_BACKGROUND)) || NaConfig.INSTANCE.getChatDecoration().Int() == 1) && NaConfig.INSTANCE.getChatDecoration().Int() != 2) { if (snowflakesEffect == null) { snowflakesEffect = new SnowflakesEffect(1); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/SlotsDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/SlotsDrawable.java index 3b5769182a..e2043b417b 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/SlotsDrawable.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/SlotsDrawable.java @@ -196,7 +196,7 @@ public boolean setBaseDice(ChatMessageCell messageCell, TLRPC.TL_messages_sticke AndroidUtilities.runOnUIThread(() -> { loadingInBackground = false; if (!secondLoadingInBackground && destroyAfterLoading) { - recycle(); + recycle(true); } }); return; @@ -240,7 +240,7 @@ public boolean setBaseDice(ChatMessageCell messageCell, TLRPC.TL_messages_sticke AndroidUtilities.runOnUIThread(() -> { loadingInBackground = false; if (!secondLoadingInBackground && destroyAfterLoading) { - recycle(); + recycle(true); return; } nativePtr = nativePtrs[0]; @@ -268,7 +268,7 @@ public boolean setDiceNumber(ChatMessageCell messageCell, int number, TLRPC.TL_m AndroidUtilities.runOnUIThread(() -> { secondLoadingInBackground = false; if (!loadingInBackground && destroyAfterLoading) { - recycle(); + recycle(true); } }); return; @@ -359,7 +359,7 @@ public boolean setDiceNumber(ChatMessageCell messageCell, int number, TLRPC.TL_m } secondLoadingInBackground = false; if (!loadingInBackground && destroyAfterLoading) { - recycle(); + recycle(true); return; } secondNativePtr = secondNativePtrs[0]; @@ -373,7 +373,7 @@ public boolean setDiceNumber(ChatMessageCell messageCell, int number, TLRPC.TL_m } @Override - public void recycle() { + public void recycle(boolean uiThread) { isRunning = false; isRecycled = true; checkRunningTasks(); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/SnowflakesEffect.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/SnowflakesEffect.java index 549b1dfccd..a7c16946f6 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/SnowflakesEffect.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/SnowflakesEffect.java @@ -15,6 +15,7 @@ import android.view.View; import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.LiteMode; import org.telegram.messenger.SharedConfig; import org.telegram.messenger.Utilities; import org.telegram.ui.ActionBar.Theme; @@ -169,7 +170,7 @@ private void updateParticles(long dt) { } public void onDraw(View parent, Canvas canvas) { - if (parent == null || canvas == null || SharedConfig.getLiteMode().enabled()) { + if (parent == null || canvas == null || !LiteMode.isEnabled(LiteMode.FLAG_CHAT_BACKGROUND)) { return; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/SpeedIconDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/SpeedIconDrawable.java new file mode 100644 index 0000000000..11c4fca4c7 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/SpeedIconDrawable.java @@ -0,0 +1,146 @@ +package org.telegram.ui.Components; + +import android.graphics.Canvas; +import android.graphics.ColorFilter; +import android.graphics.Paint; +import android.graphics.PixelFormat; +import android.graphics.drawable.Drawable; +import android.text.TextUtils; +import android.util.Log; +import android.view.Gravity; + +import androidx.annotation.NonNull; +import androidx.annotation.Nullable; + +import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.LocaleController; +import org.telegram.messenger.R; + +import java.text.DecimalFormat; +import java.text.DecimalFormatSymbols; +import java.util.Locale; + +public class SpeedIconDrawable extends Drawable { + + private final AnimatedTextView.AnimatedTextDrawable textDrawable; + private final Drawable.Callback callback = new Callback() { + @Override + public void invalidateDrawable(@NonNull Drawable who) { + SpeedIconDrawable.this.invalidateSelf(); + } + @Override + public void scheduleDrawable(@NonNull Drawable who, @NonNull Runnable what, long when) { + SpeedIconDrawable.this.scheduleSelf(what, when); + } + @Override + public void unscheduleDrawable(@NonNull Drawable who, @NonNull Runnable what) { + SpeedIconDrawable.this.unscheduleSelf(what); + } + }; + private final Paint outlinePaint; + + public SpeedIconDrawable() { + this(true); + } + + public SpeedIconDrawable(boolean outline) { + textDrawable = new AnimatedTextView.AnimatedTextDrawable(false, true, true); + textDrawable.setCallback(callback); + textDrawable.setAnimationProperties(.3f, 0, 165, CubicBezierInterpolator.EASE_OUT_QUINT); + textDrawable.setGravity(Gravity.CENTER_HORIZONTAL); + textDrawable.setTypeface(AndroidUtilities.getTypeface(AndroidUtilities.TYPEFACE_ROBOTO_MEDIUM)); + textDrawable.setTextSize(AndroidUtilities.dp(10)); + textDrawable.getPaint().setStyle(Paint.Style.FILL_AND_STROKE); + textDrawable.getPaint().setStrokeWidth(AndroidUtilities.dpf2(.6f)); + + if (outline) { + outlinePaint = new Paint(Paint.ANTI_ALIAS_FLAG); + outlinePaint.setStyle(Paint.Style.STROKE); + } else { + outlinePaint = null; + } + } + +// private static Locale decimalFormatLocale; +// private static DecimalFormat decimalFormat; + public static String formatNumber(float value) { + final float precision = Math.abs(value - .25f) < 0.001f && false ? 100F : 10F; + float roundedValue = Math.round(value * precision) / precision; + if (roundedValue == (long) roundedValue) { + return "" + (long) roundedValue; + } else { + return "" + roundedValue; + } +// if (decimalFormat == null || decimalFormatLocale != Locale.getDefault()) { +// DecimalFormatSymbols symbols = DecimalFormatSymbols.getInstance(decimalFormatLocale = Locale.getDefault()); +// symbols.setDecimalSeparator('.'); +// decimalFormat = new DecimalFormat("###,##0.0", symbols); +// } +// return decimalFormat.format(value); + } + + public void setValue(float value, boolean animated) { + String text = formatNumber(value) + "X"; + if (!animated || !TextUtils.equals(textDrawable.getText(), text)) { + textDrawable.cancelAnimation(); + textDrawable.setText(text, animated); + invalidateSelf(); + } + } + + @Override + public void draw(@NonNull Canvas canvas) { + if (outlinePaint != null) { + outlinePaint.setStrokeWidth(AndroidUtilities.dpf2(1.6f)); + AndroidUtilities.rectTmp.set( + (getIntrinsicWidth() - textDrawable.getCurrentWidth()) / 2f - AndroidUtilities.dpf2(3f), + (getIntrinsicHeight() - textDrawable.getHeight()) / 2f + AndroidUtilities.dpf2(0.2f), + (getIntrinsicWidth() + textDrawable.getCurrentWidth()) / 2f + AndroidUtilities.dpf2(3f), + (getIntrinsicHeight() + textDrawable.getHeight()) / 2f + ); + canvas.drawRoundRect(AndroidUtilities.rectTmp, AndroidUtilities.dpf2(3f), AndroidUtilities.dpf2(3f), outlinePaint); + } + + textDrawable.getPaint().setStrokeWidth(AndroidUtilities.dpf2(.3f)); + textDrawable.setBounds(0, (int) ((getIntrinsicHeight() - textDrawable.getHeight()) / 2F), getIntrinsicWidth(), (int) ((getIntrinsicHeight() + textDrawable.getHeight()) / 2F)); + textDrawable.draw(canvas); + } + + @Override + public int getIntrinsicWidth() { + return AndroidUtilities.dp(24); + } + + @Override + public int getIntrinsicHeight() { + return AndroidUtilities.dp(24); + } + + @Override + public void setAlpha(int alpha) { + textDrawable.setAlpha(alpha); + if (outlinePaint != null) { + outlinePaint.setAlpha(alpha); + } + } + + public void setColor(int color) { + textDrawable.setTextColor(color); + if (outlinePaint != null) { + outlinePaint.setColor(color); + } + } + + @Override + public void setColorFilter(@Nullable ColorFilter colorFilter) { +// textDrawable.setColorFilter(colorFilter); +// if (outlinePaint != null) { +// outlinePaint.setColorFilter(colorFilter); +// } + } + + @Override + public int getOpacity() { + return PixelFormat.TRANSPARENT; + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/StickerCategoriesListView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/StickerCategoriesListView.java new file mode 100644 index 0000000000..6eeb1a79bb --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/StickerCategoriesListView.java @@ -0,0 +1,960 @@ +package org.telegram.ui.Components; + +import static org.telegram.messenger.AndroidUtilities.dp; + +import android.animation.Animator; +import android.animation.AnimatorListenerAdapter; +import android.animation.ValueAnimator; +import android.content.Context; +import android.content.res.Configuration; +import android.graphics.Canvas; +import android.graphics.Paint; +import android.graphics.PorterDuff; +import android.graphics.PorterDuffColorFilter; +import android.graphics.RectF; +import android.graphics.drawable.Drawable; +import android.text.TextUtils; +import android.view.MotionEvent; +import android.view.View; +import android.view.ViewGroup; +import android.view.animation.OvershootInterpolator; + +import androidx.annotation.IntDef; +import androidx.annotation.NonNull; +import androidx.core.graphics.ColorUtils; +import androidx.recyclerview.widget.LinearLayoutManager; + +import org.telegram.SQLite.SQLiteCursor; +import org.telegram.SQLite.SQLiteDatabase; +import org.telegram.SQLite.SQLitePreparedStatement; +import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.Fetcher; +import org.telegram.messenger.FileLog; +import org.telegram.messenger.LiteMode; +import org.telegram.messenger.MessagesStorage; +import org.telegram.messenger.R; +import org.telegram.messenger.SharedConfig; +import org.telegram.messenger.UserConfig; +import org.telegram.messenger.Utilities; +import org.telegram.tgnet.ConnectionsManager; +import org.telegram.tgnet.NativeByteBuffer; +import org.telegram.tgnet.TLObject; +import org.telegram.tgnet.TLRPC; +import org.telegram.ui.ActionBar.Theme; + +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.util.HashSet; +import java.util.Set; + +public class StickerCategoriesListView extends RecyclerListView { + + @IntDef({CategoriesType.DEFAULT, CategoriesType.STATUS, CategoriesType.PROFILE_PHOTOS}) + @Retention(RetentionPolicy.SOURCE) + public static @interface CategoriesType { + int DEFAULT = 0; + int STATUS = 1; + int PROFILE_PHOTOS = 2; + } + + private float shownButtonsAtStart = 6.5f; + + private static EmojiGroupFetcher fetcher = new EmojiGroupFetcher(); + public static Fetcher search = new EmojiSearch(); + private EmojiCategory[] categories = null; + + private Adapter adapter; + private LinearLayoutManager layoutManager; + + private AnimatedFloat leftBoundAlpha = new AnimatedFloat(this, 360, CubicBezierInterpolator.EASE_OUT_QUINT); + private AnimatedFloat rightBoundAlpha = new AnimatedFloat(this, 360, CubicBezierInterpolator.EASE_OUT_QUINT); + private Drawable leftBoundDrawable; + private Drawable rightBoundDrawable; + private Paint backgroundPaint; + + @CategoriesType + private int categoriesType; + private static Set loadedIconsType = new HashSet<>(); + + private Paint selectedPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + + private int paddingWidth; + private int dontOccupyWidth; + private Utilities.Callback onScrollIntoOccupiedWidth; + private Utilities.Callback onScrollFully; + private boolean scrolledIntoOccupiedWidth; + private boolean scrolledFully; + + private View paddingView; + + public Integer layerNum; + + private int selectedCategoryIndex = -1; + private Utilities.Callback onCategoryClick; + + public static void preload(int account, @CategoriesType int type) { + fetcher.fetch(account, type, emojiGroups -> { + if (emojiGroups.groups == null) { + return; + } + for (TLRPC.TL_emojiGroup group : emojiGroups.groups) { + AnimatedEmojiDrawable.getDocumentFetcher(account).fetchDocument(group.icon_emoji_id, null); + } + }); + } + + public StickerCategoriesListView(Context context, @CategoriesType int categoriesType) { + this(context, null, categoriesType, null); + } + + public StickerCategoriesListView(Context context, @CategoriesType int categoriesType, Theme.ResourcesProvider resourcesProvider) { + this(context, null, categoriesType, resourcesProvider); + } + + public StickerCategoriesListView(Context context, EmojiCategory[] additionalCategories, @CategoriesType int categoriesType) { + this(context, additionalCategories, categoriesType, null); + } + + public StickerCategoriesListView(Context context, EmojiCategory[] additionalCategories, @CategoriesType int categoriesType, Theme.ResourcesProvider resourcesProvider) { + super(context, resourcesProvider); + + this.categoriesType = categoriesType; + setPadding(0, 0, dp(2), 0); + + setAdapter(adapter = new Adapter()); + setLayoutManager(layoutManager = new LinearLayoutManager(context)); + layoutManager.setOrientation(HORIZONTAL); + +// setSelectorRadius(dp(15)); +// setSelectorType(Theme.RIPPLE_MASK_CIRCLE_20DP); +// setSelectorDrawableColor(getThemedColor(Theme.key_listSelector)); + selectedPaint.setColor(getThemedColor(Theme.key_listSelector)); + setSelectorDrawableColor(0); + + setWillNotDraw(false); + + setOnItemClickListener((view, position) -> onItemClick(position, view)); + + long start = System.currentTimeMillis(); + fetcher.fetch(UserConfig.selectedAccount, categoriesType, (emojiGroups) -> { + if (emojiGroups != null) { + categories = new EmojiCategory[(additionalCategories == null ? 0 : additionalCategories.length) + emojiGroups.groups.size()]; + int i = 0; + if (additionalCategories != null) { + for (; i < additionalCategories.length; ++i) { + categories[i] = additionalCategories[i]; + } + } + for (int j = 0; j < emojiGroups.groups.size(); ++j) { + categories[i + j] = EmojiCategory.remote(emojiGroups.groups.get(j)); + } + adapter.notifyDataSetChanged(); + setCategoriesShownT(0); + updateCategoriesShown(categoriesShouldShow, System.currentTimeMillis() - start > 16); + } + }); + } + + public void setShownButtonsAtStart(float buttonsCount) { + shownButtonsAtStart = buttonsCount; + } + + private void onItemClick(int position, View view) { + if (position < 1) { + return; + } + + if (categories == null) { + return; + } + + EmojiCategory category = categories[position - 1]; + int minimumPadding = dp(64); + if (getMeasuredWidth() - view.getRight() < minimumPadding) { + smoothScrollBy((minimumPadding - (getMeasuredWidth() - view.getRight())), 0, CubicBezierInterpolator.EASE_OUT_QUINT); + } else if (view.getLeft() < minimumPadding) { + smoothScrollBy(-(minimumPadding - view.getLeft()), 0, CubicBezierInterpolator.EASE_OUT_QUINT); + } + if (onCategoryClick != null) { + onCategoryClick.run(category); + } + } + + private int getScrollToStartWidth() { + if (getChildCount() > 0) { + View child = getChildAt(0); + if (child instanceof CategoryButton) { + return paddingWidth + Math.max(0, (getChildAdapterPosition(child) - 1) * getHeight()) + (-child.getLeft()); + } else { + return -child.getLeft(); + } + } + return 0; + } + + public void scrollToStart() { + smoothScrollBy(-getScrollToStartWidth(), 0, CubicBezierInterpolator.EASE_OUT_QUINT); + } + + public void selectCategory(EmojiCategory category) { + int index = -1; + if (categories != null) { + for (int i = 0; i < categories.length; ++i) { + if (categories[i] == category) { + index = i; + break; + } + } + } + selectCategory(index); + } + + public void selectCategory(int categoryIndex) { + if (selectedCategoryIndex < 0) { + selectedIndex.set(categoryIndex, true); + } + this.selectedCategoryIndex = categoryIndex; + for (int i = 0; i < getChildCount(); ++i) { + View child = getChildAt(i); + if (child instanceof CategoryButton) { + final int position = getChildAdapterPosition(child); + ((CategoryButton) child).setSelected(selectedCategoryIndex == position - 1, true); + } + } + invalidate(); + } + + public EmojiCategory getSelectedCategory() { + if (categories == null || selectedCategoryIndex < 0 || selectedCategoryIndex >= categories.length) { + return null; + } + return categories[selectedCategoryIndex]; + } + + @Override + protected void onAttachedToWindow() { + super.onAttachedToWindow(); + updateCategoriesShown(categoriesShouldShow, false); + } + + @Override + protected void onConfigurationChanged(Configuration newConfig) { + super.onConfigurationChanged(newConfig); + + if (paddingView != null) { + paddingView.requestLayout(); + } + } + + private float categoriesShownT = 0; + private ValueAnimator categoriesShownAnimator; + private boolean categoriesShouldShow = true; + public void updateCategoriesShown(boolean show, boolean animated) { + categoriesShouldShow = show; + if (categories == null) { + show = false; + } + + if (categoriesShownT == (show ? 1 : 0)) { + return; + } + + if (categoriesShownAnimator != null) { + categoriesShownAnimator.cancel(); + categoriesShownAnimator = null; + } + + if (animated) { + categoriesShownAnimator = ValueAnimator.ofFloat(categoriesShownT, show ? 1 : 0); + categoriesShownAnimator.addUpdateListener(anm -> { + setCategoriesShownT((float) anm.getAnimatedValue()); + }); + categoriesShownAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + setCategoriesShownT((float) categoriesShownAnimator.getAnimatedValue()); + categoriesShownAnimator = null; + } + }); + categoriesShownAnimator.setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); + categoriesShownAnimator.setDuration((categories == null ? 5 : categories.length) * 120L); + categoriesShownAnimator.start(); + } else { + setCategoriesShownT(show ? 1 : 0); + } + } + + private void setCategoriesShownT(float t) { + categoriesShownT = t; + + for (int i = 0; i < getChildCount(); ++i) { + View child = getChildAt(i); + if (child instanceof CategoryButton) { + int position = getChildAdapterPosition(child); + float childT = AndroidUtilities.cascade(t, getChildCount() - 1 - position, getChildCount() - 1, 3f); + if (childT > 0 && child.getAlpha() <= 0) { + ((CategoryButton) child).play(); + } + child.setAlpha(childT); + child.setScaleX(childT); + child.setScaleY(childT); + } + } + + invalidate(); + } + + public boolean isCategoriesShown() { + return categoriesShownT > .5f; + } + + @Override + public void onScrolled(int dx, int dy) { + super.onScrolled(dx, dy); + + boolean scrolledIntoOccupiedWidth = false; + boolean scrolledFully = false; + if (getChildCount() > 0) { + View child = getChildAt(0); + if (child instanceof CategoryButton) { + scrolledIntoOccupiedWidth = true; + scrolledFully = true; + } else { + scrolledIntoOccupiedWidth = child.getRight() <= dontOccupyWidth; + } + } + if (this.scrolledIntoOccupiedWidth != scrolledIntoOccupiedWidth) { + this.scrolledIntoOccupiedWidth = scrolledIntoOccupiedWidth; + if (onScrollIntoOccupiedWidth != null) { + onScrollIntoOccupiedWidth.run(this.scrolledIntoOccupiedWidth ? Math.max(0, getScrollToStartWidth() - (paddingWidth - dontOccupyWidth)) : 0); + } + invalidate(); + } else if (this.scrolledIntoOccupiedWidth && onScrollIntoOccupiedWidth != null) { + onScrollIntoOccupiedWidth.run(Math.max(0, getScrollToStartWidth() - (paddingWidth - dontOccupyWidth))); + } + if (this.scrolledFully != scrolledFully) { + this.scrolledFully = scrolledFully; + if (onScrollFully != null) { + onScrollFully.run(this.scrolledFully); + } + invalidate(); + } + } + + public void setDontOccupyWidth(int dontOccupyWidth) { + this.dontOccupyWidth = dontOccupyWidth; + } + + public void setOnScrollIntoOccupiedWidth(Utilities.Callback onScrollIntoOccupiedWidth) { + this.onScrollIntoOccupiedWidth = onScrollIntoOccupiedWidth; + } + + public void setOnScrollFully(Utilities.Callback onScrollFully) { + this.onScrollFully = onScrollFully; + } + + public void setOnCategoryClick(Utilities.Callback onCategoryClick) { + this.onCategoryClick = onCategoryClick; + } + + public boolean isScrolledIntoOccupiedWidth() { + return scrolledIntoOccupiedWidth; + } + + @Override + public void setBackgroundColor(int color) { + if (backgroundPaint == null) { + backgroundPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + } + backgroundPaint.setColor(color); + leftBoundDrawable = getContext().getResources().getDrawable(R.drawable.gradient_right).mutate(); + leftBoundDrawable.setColorFilter(new PorterDuffColorFilter(color, PorterDuff.Mode.MULTIPLY)); + rightBoundDrawable = getContext().getResources().getDrawable(R.drawable.gradient_left).mutate(); + rightBoundDrawable.setColorFilter(new PorterDuffColorFilter(color, PorterDuff.Mode.MULTIPLY)); + } + + private AnimatedFloat selectedAlpha = new AnimatedFloat(this, 350, CubicBezierInterpolator.EASE_OUT_QUINT); + private AnimatedFloat selectedIndex = new AnimatedFloat(this, 350, CubicBezierInterpolator.EASE_OUT_QUINT); + + @Override + public void draw(Canvas canvas) { + + if (backgroundPaint != null) { + int left = Integer.MAX_VALUE; + int right = Integer.MIN_VALUE; + + for (int i = 0; i < getChildCount(); ++i) { + final View child = getChildAt(i); + if (child instanceof CategoryButton) { + left = Math.min(left, child.getLeft()); + right = Math.max(right, child.getRight()); + } + } + + if (left < right) { + left += (getWidth() + dp(32)) * (1f - categoriesShownT); + right += (getWidth() + dp(32)) * (1f - categoriesShownT); + +// if (left > 0 && rightBoundDrawable != null) { +// rightBoundDrawable.setAlpha(0xFF); +// rightBoundDrawable.setBounds(left - rightBoundDrawable.getIntrinsicWidth(), 0, left, getHeight()); +// rightBoundDrawable.draw(canvas); +// } + canvas.drawRect(left, 0, right, getHeight(), backgroundPaint); + if (right < getWidth() && leftBoundDrawable != null) { + leftBoundDrawable.setAlpha(0xFF); + leftBoundDrawable.setBounds(right, 0, right + leftBoundDrawable.getIntrinsicWidth(), getHeight()); + leftBoundDrawable.draw(canvas); + } + } + } + + drawSelectedHighlight(canvas); + + super.draw(canvas); + + if (leftBoundDrawable != null) { + leftBoundDrawable.setAlpha((int) (0xFF * leftBoundAlpha.set(canScrollHorizontally(-1) && scrolledFully ? 1 : 0) * categoriesShownT)); + if (leftBoundDrawable.getAlpha() > 0) { + leftBoundDrawable.setBounds(0, 0, leftBoundDrawable.getIntrinsicWidth(), getHeight()); + leftBoundDrawable.draw(canvas); + } + } + +// if (rightBoundDrawable != null) { +// rightBoundDrawable.setAlpha((int) (0xFF * rightBoundAlpha.set(canScrollHorizontally(1) ? 1 : 0) * categoriesShownT)); +// if (rightBoundDrawable.getAlpha() > 0) { +// rightBoundDrawable.setBounds(getWidth() - rightBoundDrawable.getIntrinsicWidth(), 0, getWidth(), getHeight()); +// rightBoundDrawable.draw(canvas); +// } +// } + } + + private RectF rect1 = new RectF(), rect2 = new RectF(), rect3 = new RectF(); + private void drawSelectedHighlight(Canvas canvas) { + float alpha = selectedAlpha.set(selectedCategoryIndex >= 0 ? 1 : 0); + float index = selectedCategoryIndex >= 0 ? selectedIndex.set(selectedCategoryIndex) : selectedIndex.get(); + + if (alpha <= 0) { + return; + } + + int fromPosition = Math.max(1, (int) Math.floor(index + 1)); + int toPosition = Math.max(1, (int) Math.ceil(index + 1)); + + View fromChild = null, toChild = null; + + for (int i = 0; i < getChildCount(); ++i) { + View child = getChildAt(i); + int position = getChildAdapterPosition(child); + + if (position == fromPosition) { + fromChild = child; + } + if (position == toPosition) { + toChild = child; + } + + if (fromChild != null && toChild != null) { + break; + } + } + + int wasAlpha = selectedPaint.getAlpha(); + selectedPaint.setAlpha((int) (wasAlpha * alpha)); + if (fromChild != null && toChild != null) { + float t = fromPosition == toPosition ? .5f : (index + 1 - fromPosition) / (toPosition - fromPosition); + getChildBounds(fromChild, rect1); + getChildBounds(toChild, rect2); + AndroidUtilities.lerp(rect1, rect2, t, rect3); +// float T = selectedIndex.getTransitionProgress(); +// float isMiddle = 4f * T * (1f - T); +// float hw = rect3.width() / 2 * (1f + isMiddle * .05f); +// float hh = rect3.height() / 2 * (1f - isMiddle * .1f); +// rect3.set(rect3.centerX() - hw, rect3.centerY() - hh, rect3.centerX() + hw, rect3.centerY() + hh); + canvas.drawRoundRect(rect3, AndroidUtilities.dp(15), AndroidUtilities.dp(15), selectedPaint); + } + selectedPaint.setAlpha(wasAlpha); + } + + private void getChildBounds(View child, RectF rect) { + float cx = (child.getRight() + child.getLeft()) / 2f; + float cy = (child.getBottom() + child.getTop()) / 2f; + float r = child.getWidth() / 2f - dp(1); + float s = child instanceof CategoryButton ? ((CategoryButton) child).getScale() : 1f; + rect.set( + cx - r * s, cy - r * s, + cx + r * s, cy + r * s + ); + } + + @Override + public boolean dispatchTouchEvent(MotionEvent ev) { + if (ev.getAction() == MotionEvent.ACTION_DOWN) { + final View child = findChildViewUnder(ev.getX(), ev.getY()); + if (!(child instanceof CategoryButton) || child.getAlpha() < .5f) { + return false; + } + } + return super.dispatchTouchEvent(ev); + } + + private class Adapter extends RecyclerListView.SelectionAdapter { + + private static final int VIEW_TYPE_PADDING = 0; + private static final int VIEW_TYPE_CATEGORY = 1; + + @NonNull + @Override + public ViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) { + View view; + if (viewType == VIEW_TYPE_PADDING) { + view = paddingView = new View(getContext()) { + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + int fullWidth = MeasureSpec.getSize(widthMeasureSpec); + if (fullWidth <= 0) { + fullWidth = ((View) getParent()).getMeasuredWidth(); + } + final int BUTTON_WIDTH = MeasureSpec.getSize(heightMeasureSpec) - dp(4); + super.onMeasure( + MeasureSpec.makeMeasureSpec( + paddingWidth = Math.max( + dontOccupyWidth > 0 ? dontOccupyWidth + dp(4) : 0, + (int) (fullWidth - Math.min((getItemCount() - 1) * BUTTON_WIDTH + dp(4), shownButtonsAtStart * BUTTON_WIDTH)) + ), + MeasureSpec.EXACTLY + ), + heightMeasureSpec + ); + } + }; + } else { + view = new CategoryButton(getContext()); + } + return new RecyclerListView.Holder(view); + } + + @Override + public void onBindViewHolder(@NonNull ViewHolder holder, int position) { + if (holder.getItemViewType() == VIEW_TYPE_CATEGORY && categories != null) { + final EmojiCategory category = categories[position - 1]; + final CategoryButton button = (CategoryButton) holder.itemView; + button.set(category, position - 1, selectedCategoryIndex == position - 1); + button.setAlpha(categoriesShownT); + button.setScaleX(categoriesShownT); + button.setScaleY(categoriesShownT); + button.play(); + } + } + + @Override + public void onViewAttachedToWindow(@NonNull ViewHolder holder) { + if (holder.getItemViewType() == VIEW_TYPE_CATEGORY) { + final CategoryButton button = (CategoryButton) holder.itemView; + final int position = holder.getAdapterPosition(); + button.setSelected(selectedCategoryIndex == position - 1, false); + button.play(); + } + } + + @Override + public int getItemViewType(int position) { + return position == 0 ? VIEW_TYPE_PADDING : VIEW_TYPE_CATEGORY; + } + + private int lastItemCount; + + @Override + public int getItemCount() { + final int itemCount = 1 + (categories == null ? 0 : categories.length); + if (itemCount != lastItemCount) { + if (paddingView != null) { + paddingView.requestLayout(); + } + lastItemCount = itemCount; + } + return itemCount; + } + + @Override + public boolean isEnabled(ViewHolder holder) { + return holder.getItemViewType() == VIEW_TYPE_CATEGORY; + } + } + + protected boolean isTabIconsAnimationEnabled(boolean loaded) { + return LiteMode.isEnabled(LiteMode.FLAG_ANIMATED_EMOJI_KEYBOARD) && !loaded; + } + + static int loadedCategoryIcons = 0; + + private class CategoryButton extends RLottieImageView { + + private int imageColor; + private float selectedT; + private ValueAnimator selectedAnimator; + private int index; + + public CategoryButton(Context context) { + super(context); + + setImageColor(getThemedColor(Theme.key_chat_emojiPanelIcon)); + setScaleType(ScaleType.CENTER); + + setBackground(Theme.createSelectorDrawable(getThemedColor(Theme.key_listSelector), Theme.RIPPLE_MASK_CIRCLE_20DP, dp(15))); + + setLayerNum(layerNum); + } + + public void set(EmojiCategory category, int index, boolean selected) { + this.index = index; + if (loadAnimator != null) { + loadAnimator.cancel(); + loadAnimator = null; + } + if (category.remote) { + setImageResource(0); +// cached = true; + clearAnimationDrawable(); + boolean animated = isTabIconsAnimationEnabled(true); + loaded = false; + loadProgress = 1; + AnimatedEmojiDrawable.getDocumentFetcher(UserConfig.selectedAccount) + .fetchDocument(category.documentId, document -> { + setOnlyLastFrame(!animated); + setAnimation(document, 24, 24); + playAnimation(); + }); + AndroidUtilities.runOnUIThread(() -> { + if (!loaded) { + loadProgress = 0; + } + }, 60); + } else if (category.animated) { + cached = false; + setImageResource(0); + setAnimation(category.iconResId, 24, 24); + playAnimation(); + loadProgress = 1; + } else { + clearAnimationDrawable(); + setImageResource(category.iconResId); + loadProgress = 1; + } + setSelected(selected, false); + } + + private boolean loaded = false; + + @Override + protected void onLoaded() { + loaded = true; + if (loadProgress < 1) { + if (loadAnimator != null) { + loadAnimator.cancel(); + loadAnimator = null; + } + loadAnimator = ValueAnimator.ofFloat(loadProgress, 1f); + loadAnimator.addUpdateListener(anm -> { + loadProgress = (float) anm.getAnimatedValue(); + invalidate(); + }); + loadAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + loadProgress = 1f; + invalidate(); + loadAnimator = null; + } + }); + loadAnimator.setDuration(320); + loadAnimator.setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); + loadAnimator.start(); + } + } + + public void setSelected(boolean selected, boolean animated) { + if (Math.abs(selectedT - (selected ? 1 : 0)) > .01f) { + if (selectedAnimator != null) { + selectedAnimator.cancel(); + selectedAnimator = null; + } + + if (animated) { + selectedAnimator = ValueAnimator.ofFloat(selectedT, selected ? 1 : 0); + selectedAnimator.addUpdateListener(anm -> { + updateSelectedT((float) anm.getAnimatedValue()); + }); + selectedAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + updateSelectedT((float) selectedAnimator.getAnimatedValue()); + selectedAnimator = null; + } + }); + selectedAnimator.setDuration(350); + selectedAnimator.setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); + selectedAnimator.start(); + } else { + updateSelectedT(selected ? 1 : 0); + } + } + } + + private void updateSelectedT(float t) { + selectedT = t; + setImageColor( + ColorUtils.blendARGB( + getThemedColor(Theme.key_chat_emojiPanelIcon), + getThemedColor(Theme.key_chat_emojiPanelIconSelected), + selectedT + ) + ); + invalidate(); + } + + public void setImageColor(int color) { + if (imageColor != color) { + setColorFilter(new PorterDuffColorFilter(imageColor = color, PorterDuff.Mode.MULTIPLY)); + } + } + + @Override + public void draw(Canvas canvas) { + updatePressedProgress(); + float scale = getScale(); + if (scale != 1) { + canvas.save(); + canvas.scale(scale, scale, getMeasuredWidth() / 2f, getMeasuredHeight() / 2f); + } + super.draw(canvas); + if (scale != 1) { + canvas.restore(); + } + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + final int size = MeasureSpec.getSize(heightMeasureSpec); + super.onMeasure( + MeasureSpec.makeMeasureSpec(size - dp(4), MeasureSpec.EXACTLY), + MeasureSpec.makeMeasureSpec(size, MeasureSpec.EXACTLY) + ); + } + + private long lastPlayed; + public void play() { + if (System.currentTimeMillis() - lastPlayed > 250) { + lastPlayed = System.currentTimeMillis(); + RLottieDrawable drawable = getAnimatedDrawable(); + if (drawable == null && getImageReceiver() != null) { + drawable = getImageReceiver().getLottieAnimation(); + } + if (drawable != null) { + drawable.stop(); + drawable.setCurrentFrame(0); + drawable.restart(true); + } else if (drawable == null) { + setProgress(0); + playAnimation(); + } + } + } + + float loadProgress = 1f; + float pressedProgress; + ValueAnimator backAnimator; + ValueAnimator loadAnimator; + + public void updatePressedProgress() { + if (isPressed() && pressedProgress != 1f) { + pressedProgress = Utilities.clamp(pressedProgress + (1000f / AndroidUtilities.screenRefreshRate) / 100f, 1f, 0); + invalidate(); + StickerCategoriesListView.this.invalidate(); + } + } + + public float getScale() { + return (0.85f + 0.15f * (1f - pressedProgress)) * loadProgress; + } + + @Override + public void setPressed(boolean pressed) { + if (isPressed() != pressed) { + super.setPressed(pressed); + invalidate(); + StickerCategoriesListView.this.invalidate(); + if (pressed) { + if (backAnimator != null) { + backAnimator.removeAllListeners(); + backAnimator.cancel(); + } + } + if (!pressed && pressedProgress != 0) { + backAnimator = ValueAnimator.ofFloat(pressedProgress, 0); + backAnimator.addUpdateListener(animation -> { + pressedProgress = (float) animation.getAnimatedValue(); + invalidate(); + }); + backAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + super.onAnimationEnd(animation); + backAnimator = null; + } + }); + backAnimator.setInterpolator(new OvershootInterpolator(3.0f)); + backAnimator.setDuration(350); + backAnimator.start(); + } + } + } + } + + public static class EmojiCategory { + + public boolean animated; + public int iconResId; + public String emojis; + + public boolean remote; + public long documentId; + + public String title; + + public static EmojiCategory withAnimatedIcon(int animatedIconResId, String emojis) { + EmojiCategory category = new EmojiCategory(); + category.animated = true; + category.iconResId = animatedIconResId; + category.emojis = emojis; + return category; + } + + public static EmojiCategory withIcon(int iconResId, String emojis) { + EmojiCategory category = new EmojiCategory(); + category.animated = false; + category.iconResId = iconResId; + category.emojis = emojis; + return category; + } + + public static EmojiCategory remote(TLRPC.TL_emojiGroup group) { + EmojiCategory category = new EmojiCategory(); + category.remote = true; + category.documentId = group.icon_emoji_id; + category.emojis = TextUtils.concat(group.emoticons.toArray(new String[0])).toString(); + category.title = group.title; + return category; + } + } + + private static class EmojiGroupFetcher extends Fetcher { + + @Override + protected void getRemote(int currentAccount, @CategoriesType Integer type, long hash, Utilities.Callback3 onResult) { + TLObject req; + if (type == CategoriesType.STATUS) { + req = new TLRPC.TL_messages_getEmojiStatusGroups(); + ((TLRPC.TL_messages_getEmojiStatusGroups) req).hash = (int) hash; + } else if (type == CategoriesType.PROFILE_PHOTOS) { + req = new TLRPC.TL_messages_getEmojiProfilePhotoGroups(); + ((TLRPC.TL_messages_getEmojiProfilePhotoGroups) req).hash = (int) hash; + } else { + req = new TLRPC.TL_messages_getEmojiGroups(); + ((TLRPC.TL_messages_getEmojiGroups) req).hash = (int) hash; + } + + ConnectionsManager.getInstance(currentAccount).sendRequest(req, (res, err) -> { + if (res instanceof TLRPC.TL_messages_emojiGroupsNotModified) { + onResult.run(true, null, 0L); + } else if (res instanceof TLRPC.TL_messages_emojiGroups) { + TLRPC.TL_messages_emojiGroups result = (TLRPC.TL_messages_emojiGroups) res; + onResult.run(false, result, (long) result.hash); + } else { + onResult.run(false, null, 0L); + } + }); + } + + @Override + protected void getLocal(int currentAccount, Integer type, Utilities.Callback2 onResult) { + MessagesStorage.getInstance(currentAccount).getStorageQueue().postRunnable(() -> { + SQLiteCursor cursor = null; + try { + SQLiteDatabase database = MessagesStorage.getInstance(currentAccount).getDatabase(); + if (database != null) { + TLRPC.messages_EmojiGroups maybeResult = null; + cursor = database.queryFinalized("SELECT data FROM emoji_groups WHERE type = ?", type); + if (cursor.next()) { + NativeByteBuffer data = cursor.byteBufferValue(0); + if (data != null) { + maybeResult = TLRPC.messages_EmojiGroups.TLdeserialize(data, data.readInt32(false), true); + data.reuse(); + } + } + + if (!(maybeResult instanceof TLRPC.TL_messages_emojiGroups)) { + onResult.run(0L, null); + } else { + TLRPC.TL_messages_emojiGroups result = (TLRPC.TL_messages_emojiGroups) maybeResult; + onResult.run((long) result.hash, result); + } + } + } catch (Exception e) { + FileLog.e(e); + onResult.run(0L, null); + } finally { + if (cursor != null) { + cursor.dispose(); + } + } + }); + } + + @Override + protected void setLocal(int currentAccount, Integer type, TLRPC.TL_messages_emojiGroups data, long hash) { + MessagesStorage.getInstance(currentAccount).getStorageQueue().postRunnable(() -> { + try { + SQLiteDatabase database = MessagesStorage.getInstance(currentAccount).getDatabase(); + if (database != null) { + if (data == null) { + database.executeFast("DELETE FROM emoji_groups WHERE type = " + type).stepThis().dispose(); + } else { + SQLitePreparedStatement state = database.executeFast("REPLACE INTO emoji_groups VALUES(?, ?)"); + state.requery(); + NativeByteBuffer buffer = new NativeByteBuffer(data.getObjectSize()); + data.serializeToStream(buffer); + state.bindInteger(1, type); + state.bindByteBuffer(2, buffer); + state.step(); + buffer.reuse(); + state.dispose(); + } + } + } catch (Exception e) { + FileLog.e(e); + } + }); + } + } + + private static class EmojiSearch extends Fetcher { + @Override + protected void getRemote(int currentAccount, String query, long hash, Utilities.Callback3 onResult) { + TLRPC.TL_messages_searchCustomEmoji req = new TLRPC.TL_messages_searchCustomEmoji(); + req.emoticon = query; + req.hash = hash; + ConnectionsManager.getInstance(currentAccount).sendRequest(req, (res, err) -> { + if (res instanceof TLRPC.TL_emojiListNotModified) { + onResult.run(true, null, 0L); + } else if (res instanceof TLRPC.TL_emojiList) { + TLRPC.TL_emojiList list = (TLRPC.TL_emojiList) res; + onResult.run(false, list, list.hash); + } else { + onResult.run(false, null, 0L); + } + }); + } + } +} \ No newline at end of file diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/StickerEmptyView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/StickerEmptyView.java index 63b520f5f5..2784390c57 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/StickerEmptyView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/StickerEmptyView.java @@ -15,6 +15,7 @@ import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.DocumentObject; import org.telegram.messenger.ImageLocation; +import org.telegram.messenger.LiteMode; import org.telegram.messenger.MediaDataController; import org.telegram.messenger.NotificationCenter; import org.telegram.messenger.SvgHelper; @@ -79,7 +80,9 @@ public StickerEmptyView(@NonNull Context context, View progressView, int type, T public void setVisibility(int visibility) { if (getVisibility() == View.GONE && visibility == View.VISIBLE) { setSticker(); - stickerView.getImageReceiver().startAnimation(); + if (LiteMode.isEnabled(LiteMode.FLAGS_ANIMATED_STICKERS)) { + stickerView.getImageReceiver().startAnimation(); + } } else if (visibility == View.GONE) { stickerView.getImageReceiver().clearImage(); } @@ -235,6 +238,10 @@ private void setSticker() { imageFilter = "130_130"; } + if (!LiteMode.isEnabled(LiteMode.FLAGS_ANIMATED_STICKERS)) { + imageFilter += "_firstframe"; + } + if (document != null) { SvgHelper.SvgDrawable svgThumb = DocumentObject.getSvgThumb(document.thumbs, colorKey1, 0.2f); if (svgThumb != null) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/StickerMasksAlert.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/StickerMasksAlert.java index 6a51342bab..b4a078ca12 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/StickerMasksAlert.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/StickerMasksAlert.java @@ -45,6 +45,7 @@ import org.telegram.messenger.FileLoader; import org.telegram.messenger.ImageLocation; import org.telegram.messenger.ImageReceiver; +import org.telegram.messenger.LiteMode; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MediaDataController; import org.telegram.messenger.MessageObject; @@ -142,7 +143,7 @@ public void sendSticker(TLRPC.Document sticker, String query, Object parent, boo } @Override - public boolean needSend() { + public boolean needSend(int contentType) { return false; } @@ -988,7 +989,7 @@ public void draw(Canvas canvas, long time, int w, int h, float alpha) { if (imageViewEmojis == null) { return; } - boolean drawInUi = imageViewEmojis.size() <= 4 || SharedConfig.getDevicePerformanceClass() == SharedConfig.PERFORMANCE_CLASS_LOW || SharedConfig.getLiteMode().enabled(); + boolean drawInUi = imageViewEmojis.size() <= 4 || SharedConfig.getDevicePerformanceClass() == SharedConfig.PERFORMANCE_CLASS_LOW || !LiteMode.isEnabled(LiteMode.FLAG_ANIMATED_EMOJI_KEYBOARD); if (!drawInUi) { // boolean animatedExpandIn = animateExpandStartTime > 0 && (SystemClock.elapsedRealtime() - animateExpandStartTime) < animateExpandDuration(); for (int i = 0; i < imageViewEmojis.size(); i++) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/StickerSetBulletinLayout.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/StickerSetBulletinLayout.java index af281cb38c..e911a5c4c7 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/StickerSetBulletinLayout.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/StickerSetBulletinLayout.java @@ -3,6 +3,9 @@ import android.annotation.SuppressLint; import android.app.Activity; import android.content.Context; +import android.graphics.Color; +import android.graphics.PorterDuff; +import android.graphics.PorterDuffColorFilter; import androidx.annotation.IntDef; import androidx.annotation.NonNull; @@ -122,6 +125,10 @@ public StickerSetBulletinLayout(@NonNull Context context, TLObject setObject, in imageView.setImage(null, null, "webp", null, setObject); } + if (MessageObject.isTextColorEmoji(sticker)) { + imageView.setColorFilter(new PorterDuffColorFilter(Color.WHITE, PorterDuff.Mode.SRC_IN)); + } + switch (type) { case TYPE_ADDED: if (stickerSet != null) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/StickersAlert.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/StickersAlert.java index ba53922858..d3d84c407b 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/StickersAlert.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/StickersAlert.java @@ -224,7 +224,7 @@ public void remove(SendMessagesHelper.ImportingSticker importingSticker) { } @Override - public boolean needSend() { + public boolean needSend(int contentType) { return delegate != null; } @@ -1290,7 +1290,7 @@ private void showNameEnterAlert() { EditTextBoldCursor editText = new EditTextBoldCursor(context); editText.setBackground(null); - editText.setLineColors(Theme.getColor(Theme.key_dialogInputField), Theme.getColor(Theme.key_dialogInputFieldActivated), Theme.getColor(Theme.key_dialogTextRed2)); + editText.setLineColors(Theme.getColor(Theme.key_dialogInputField), Theme.getColor(Theme.key_dialogInputFieldActivated), Theme.getColor(Theme.key_dialogTextRed)); editText.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 16); editText.setTextColor(getThemedColor(Theme.key_dialogTextBlack)); editText.setMaxLines(1); @@ -1700,7 +1700,7 @@ private void setButton(View.OnClickListener onClickListener, String title, Strin params.leftMargin = params.topMargin = params.rightMargin = params.bottomMargin = AndroidUtilities.dp(8); emptyParams.bottomMargin = gridParams.bottomMargin = shadowParams.bottomMargin = AndroidUtilities.dp(64); } else { - pickerBottomLayout.setBackground(Theme.createSelectorWithBackgroundDrawable(getThemedColor(Theme.key_dialogBackground), getThemedColor(Theme.key_listSelector))); + pickerBottomLayout.setBackground(Theme.createSelectorWithBackgroundDrawable(getThemedColor(Theme.key_dialogBackground), Theme.multAlpha(getThemedColor(Theme.key_dialogTextRed), .1f))); pickerBottomFrameLayout.setBackgroundColor(Color.TRANSPARENT); params.leftMargin = params.topMargin = params.rightMargin = params.bottomMargin = 0; emptyParams.bottomMargin = gridParams.bottomMargin = shadowParams.bottomMargin = AndroidUtilities.dp(48); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/StorageDiagramView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/StorageDiagramView.java index d6a9beb594..e36918ef49 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/StorageDiagramView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/StorageDiagramView.java @@ -271,10 +271,10 @@ protected void onDraw(Canvas canvas) { text1.setTextColor(Theme.getColor(Theme.key_dialogTextBlack)); text2.setTextColor(Theme.getColor(Theme.key_dialogTextBlack)); if (dialogId != null) { - int textWidth = text1.getCurrentWidth() + AndroidUtilities.dp(4) + text2.getCurrentWidth(); - int leftpad = (getWidth() - textWidth) / 2; - text1.setBounds(0, AndroidUtilities.dp(115), leftpad + text1.getCurrentWidth(), AndroidUtilities.dp(115 + 30)); - text2.setBounds(leftpad + textWidth - text2.getCurrentWidth(), AndroidUtilities.dp(115 + 3), getWidth(), AndroidUtilities.dp(115 + 3 + 30)); + float textWidth = text1.getCurrentWidth() + AndroidUtilities.dp(4) + text2.getCurrentWidth(); + float leftpad = (getWidth() - textWidth) / 2; + text1.setBounds(0, AndroidUtilities.dp(115), (int) (leftpad + text1.getCurrentWidth()), AndroidUtilities.dp(115 + 30)); + text2.setBounds((int) (leftpad + textWidth - text2.getCurrentWidth()), AndroidUtilities.dp(115 + 3), getWidth(), AndroidUtilities.dp(115 + 3 + 30)); } text1.draw(canvas); text2.draw(canvas); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/SuggestEmojiView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/SuggestEmojiView.java index 919000dbc2..f904d3772d 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/SuggestEmojiView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/SuggestEmojiView.java @@ -11,6 +11,7 @@ import android.text.Editable; import android.text.Spannable; import android.text.SpannableString; +import android.text.SpannableStringBuilder; import android.text.Spanned; import android.text.TextWatcher; import android.view.Gravity; @@ -29,12 +30,20 @@ import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.Emoji; +import org.telegram.messenger.LocaleController; import org.telegram.messenger.MediaDataController; import org.telegram.messenger.MessageObject; +import org.telegram.messenger.MessagesController; import org.telegram.messenger.NotificationCenter; +import org.telegram.messenger.R; import org.telegram.messenger.SharedConfig; +import org.telegram.messenger.UserConfig; +import org.telegram.messenger.UserObject; import org.telegram.tgnet.TLRPC; +import org.telegram.ui.ActionBar.BaseFragment; import org.telegram.ui.ActionBar.Theme; +import org.telegram.ui.ChatActivity; +import org.telegram.ui.ContentPreviewViewer; import java.util.ArrayList; import java.util.Arrays; @@ -45,42 +54,131 @@ public class SuggestEmojiView extends FrameLayout implements NotificationCenter. private final Theme.ResourcesProvider resourcesProvider; private final ChatActivityEnterView enterView; - private final FrameLayout containerView = new FrameLayout(getContext()) { - @Override - protected void dispatchDraw(Canvas canvas) { - SuggestEmojiView.this.drawContainerBegin(canvas); - super.dispatchDraw(canvas); - SuggestEmojiView.this.drawContainerEnd(canvas); - } + @Nullable + private FrameLayout containerView; + @Nullable + private RecyclerListView listView; + @Nullable + private Adapter adapter; + + private ContentPreviewViewer.ContentPreviewViewerDelegate previewDelegate; + private ContentPreviewViewer.ContentPreviewViewerDelegate getPreviewDelegate() { + if (previewDelegate == null) { + previewDelegate = new ContentPreviewViewer.ContentPreviewViewerDelegate() { + @Override + public boolean can() { + return true; + } - @Override - protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { - this.setPadding(AndroidUtilities.dp(10), AndroidUtilities.dp(8), AndroidUtilities.dp(10), AndroidUtilities.dp(6.66f)); - super.onMeasure(widthMeasureSpec, heightMeasureSpec); - } + @Override + public boolean needSend(int contentType) { + if (enterView == null) { + return false; + } + ChatActivity fragment = enterView.getParentFragment(); + return fragment != null && fragment.canSendMessage() && (UserConfig.getInstance(UserConfig.selectedAccount).isPremium() || fragment.getCurrentUser() != null && UserObject.isUserSelf(fragment.getCurrentUser())); + } - @Override - public void setVisibility(int visibility) { - boolean same = getVisibility() == visibility; - super.setVisibility(visibility); - if (!same) { - boolean visible = visibility == View.VISIBLE; - for (int i = 0; i < listView.getChildCount(); ++i) { - if (visible) { - ((Adapter.EmojiImageView) listView.getChildAt(i)).attach(); + @Override + public void sendEmoji(TLRPC.Document emoji) { + if (enterView == null) { + return; + } + ChatActivity fragment = enterView.getParentFragment(); + fragment.sendAnimatedEmoji(emoji, true, 0); + enterView.setFieldText(""); + } + + @Override + public boolean needCopy() { + return UserConfig.getInstance(UserConfig.selectedAccount).isPremium(); + } + + @Override + public void copyEmoji(TLRPC.Document document) { + Spannable spannable = SpannableStringBuilder.valueOf(MessageObject.findAnimatedEmojiEmoticon(document)); + spannable.setSpan(new AnimatedEmojiSpan(document, null), 0, spannable.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + if (AndroidUtilities.addToClipboard(spannable) && enterView != null) { + BulletinFactory.of(enterView.getParentFragment()).createCopyBulletin(LocaleController.getString("EmojiCopied", R.string.EmojiCopied)).show(); + } + } + + @Override + public Boolean canSetAsStatus(TLRPC.Document document) { + if (!UserConfig.getInstance(UserConfig.selectedAccount).isPremium()) { + return null; + } + TLRPC.User user = UserConfig.getInstance(UserConfig.selectedAccount).getCurrentUser(); + if (user == null) { + return null; + } + Long emojiStatusId = UserObject.getEmojiStatusDocumentId(user); + return document != null && (emojiStatusId == null || emojiStatusId != document.id); + } + + @Override + public void setAsEmojiStatus(TLRPC.Document document, Integer until) { + TLRPC.EmojiStatus status; + if (document == null) { + status = new TLRPC.TL_emojiStatusEmpty(); + } else if (until != null) { + status = new TLRPC.TL_emojiStatusUntil(); + ((TLRPC.TL_emojiStatusUntil) status).document_id = document.id; + ((TLRPC.TL_emojiStatusUntil) status).until = until; } else { - ((Adapter.EmojiImageView) listView.getChildAt(i)).detach(); + status = new TLRPC.TL_emojiStatus(); + ((TLRPC.TL_emojiStatus) status).document_id = document.id; + } + TLRPC.User user = UserConfig.getInstance(UserConfig.selectedAccount).getCurrentUser(); + final TLRPC.EmojiStatus previousEmojiStatus = user == null ? new TLRPC.TL_emojiStatusEmpty() : user.emoji_status; + MessagesController.getInstance(currentAccount).updateEmojiStatus(status); + + Runnable undoAction = () -> MessagesController.getInstance(currentAccount).updateEmojiStatus(previousEmojiStatus); + BaseFragment fragment = enterView == null ? null : enterView.getParentFragment(); + if (fragment != null) { + if (document == null) { + final Bulletin.SimpleLayout layout = new Bulletin.SimpleLayout(getContext(), resourcesProvider); + layout.textView.setText(LocaleController.getString("RemoveStatusInfo", R.string.RemoveStatusInfo)); + layout.imageView.setImageResource(R.drawable.msg_settings_premium); + Bulletin.UndoButton undoButton = new Bulletin.UndoButton(getContext(), true, resourcesProvider); + undoButton.setUndoAction(undoAction); + layout.setButton(undoButton); + Bulletin.make(fragment, layout, Bulletin.DURATION_SHORT).show(); + } else { + BulletinFactory.of(fragment).createEmojiBulletin(document, LocaleController.getString("SetAsEmojiStatusInfo", R.string.SetAsEmojiStatusInfo), LocaleController.getString("Undo", R.string.Undo), undoAction).show(); + } } } - } + + @Override + public boolean canSchedule() { + return false; + } + + @Override + public boolean isInScheduleMode() { + if (enterView == null) { + return false; + } + ChatActivity fragment = enterView.getParentFragment(); + return fragment.isInScheduleMode(); + } + + @Override + public void openSet(TLRPC.InputStickerSet set, boolean clearsInputField) {} + + @Override + public long getDialogId() { + return 0; + } + }; } - }; - private final RecyclerListView listView; - private final Adapter adapter; - private final LinearLayoutManager layout; + return previewDelegate; + } private boolean show, forceClose; - private ArrayList keywordResults = new ArrayList<>(); + @Nullable + private ArrayList keywordResults; private boolean clear; public SuggestEmojiView(Context context, int currentAccount, ChatActivityEnterView enterView, Theme.ResourcesProvider resourcesProvider) { @@ -89,7 +187,60 @@ public SuggestEmojiView(Context context, int currentAccount, ChatActivityEnterVi this.enterView = enterView; this.resourcesProvider = resourcesProvider; - this.listView = new RecyclerListView(context) { + postDelayed(() -> MediaDataController.getInstance(currentAccount).checkStickers(MediaDataController.TYPE_EMOJIPACKS), 260); + } + + private void createListView() { + if (listView != null) { + return; + } + + path = new Path(); + circlePath = new Path(); + + containerView = new FrameLayout(getContext()) { + @Override + protected void dispatchDraw(Canvas canvas) { + SuggestEmojiView.this.drawContainerBegin(canvas); + super.dispatchDraw(canvas); + SuggestEmojiView.this.drawContainerEnd(canvas); + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + this.setPadding(AndroidUtilities.dp(10), AndroidUtilities.dp(8), AndroidUtilities.dp(10), AndroidUtilities.dp(6.66f)); + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + } + + @Override + public void setVisibility(int visibility) { + boolean same = getVisibility() == visibility; + super.setVisibility(visibility); + if (!same) { + boolean visible = visibility == View.VISIBLE; + if (listView != null) { + for (int i = 0; i < listView.getChildCount(); ++i) { + if (visible) { + ((EmojiImageView) listView.getChildAt(i)).attach(); + } else { + ((EmojiImageView) listView.getChildAt(i)).detach(); + } + } + } + } + } + }; + + showFloat1 = new AnimatedFloat(containerView, 120, 350, CubicBezierInterpolator.EASE_OUT_QUINT); + showFloat2 = new AnimatedFloat(containerView, 150, 600, CubicBezierInterpolator.EASE_OUT_QUINT); + overshootInterpolator = new OvershootInterpolator(.4f); + leftGradientAlpha = new AnimatedFloat(containerView, 300, CubicBezierInterpolator.EASE_OUT_QUINT); + rightGradientAlpha = new AnimatedFloat(containerView, 300, CubicBezierInterpolator.EASE_OUT_QUINT); + arrowXAnimated = new AnimatedFloat(containerView, 200, CubicBezierInterpolator.EASE_OUT_QUINT); + listViewCenterAnimated = new AnimatedFloat(containerView, 350, CubicBezierInterpolator.EASE_OUT_QUINT); + listViewWidthAnimated = new AnimatedFloat(containerView, 350, CubicBezierInterpolator.EASE_OUT_QUINT); + + listView = new RecyclerListView(getContext()) { private boolean left, right; @Override public void onScrolled(int dx, int dy) { @@ -97,29 +248,38 @@ public void onScrolled(int dx, int dy) { boolean left = canScrollHorizontally(-1); boolean right = canScrollHorizontally(1); if (this.left != left || this.right != right) { - containerView.invalidate(); + if (containerView != null) { + containerView.invalidate(); + } this.left = left; this.right = right; } } + + @Override + public boolean onInterceptTouchEvent(MotionEvent event) { + boolean result = ContentPreviewViewer.getInstance().onInterceptTouchEvent(event, listView, 0, getPreviewDelegate(), resourcesProvider); + return super.onInterceptTouchEvent(event) || result; + } }; - this.listView.setAdapter(this.adapter = new Adapter()); - this.layout = new LinearLayoutManager(context); - this.layout.setOrientation(RecyclerView.HORIZONTAL); - this.listView.setLayoutManager(this.layout); + listView.setAdapter(adapter = new Adapter(this)); + LinearLayoutManager layout = new LinearLayoutManager(getContext()); + layout.setOrientation(RecyclerView.HORIZONTAL); + listView.setLayoutManager(layout); DefaultItemAnimator itemAnimator = new DefaultItemAnimator(); itemAnimator.setDurations(45); itemAnimator.setTranslationInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); - this.listView.setItemAnimator(itemAnimator); - this.listView.setSelectorDrawableColor(Theme.getColor(Theme.key_listSelector, resourcesProvider)); - this.listView.setOnItemClickListener((view, position) -> { - onClick(((Adapter.EmojiImageView) view).emoji); + listView.setItemAnimator(itemAnimator); + listView.setSelectorDrawableColor(Theme.getColor(Theme.key_listSelector, resourcesProvider)); + RecyclerListView.OnItemClickListener onItemClickListener; + listView.setOnItemClickListener(onItemClickListener = (view, position) -> { + onClick(((EmojiImageView) view).emoji); }); + listView.setOnTouchListener((v, event) -> ContentPreviewViewer.getInstance().onTouch(event, listView, 0, onItemClickListener, getPreviewDelegate(), resourcesProvider)); - this.containerView.addView(this.listView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 44 + 8)); - this.addView(this.containerView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 8 + 44 + 8 + 6.66f, Gravity.BOTTOM)); - - this.enterView.getEditField().addTextChangedListener(new TextWatcher() { + containerView.addView(listView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 44 + 8)); + addView(containerView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 8 + 44 + 8 + 6.66f, Gravity.BOTTOM)); + enterView.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence charSequence, int i, int i1, int i2) {} @Override @@ -131,8 +291,6 @@ public void afterTextChanged(Editable editable) { } } }); - - MediaDataController.getInstance(currentAccount).checkStickers(MediaDataController.TYPE_EMOJIPACKS); } public void onTextSelectionChanged(int start, int end) { @@ -158,14 +316,15 @@ public void forceClose() { } show = false; forceClose = true; - containerView.invalidate(); + if (containerView != null) { + containerView.invalidate(); + } } private Runnable updateRunnable; public void fireUpdate() { if (updateRunnable != null) { AndroidUtilities.cancelRunOnUIThread(updateRunnable); - updateRunnable = null; } AndroidUtilities.runOnUIThread(updateRunnable = this::update, 16); } @@ -175,14 +334,18 @@ private void update() { if (enterView == null || enterView.getEditField() == null || enterView.getFieldText() == null) { show = false; forceClose = true; - containerView.invalidate(); + if (containerView != null) { + containerView.invalidate(); + } return; } int selectionStart = enterView.getEditField().getSelectionStart(); int selectionEnd = enterView.getEditField().getSelectionEnd(); if (selectionStart != selectionEnd) { show = false; - containerView.invalidate(); + if (containerView != null) { + containerView.invalidate(); + } return; } CharSequence text = enterView.getFieldText(); @@ -195,11 +358,14 @@ private void update() { if (selectionStart == emojiEnd) { String emoji = text.toString().substring(emojiStart, emojiEnd); show = true; + createListView(); // containerView.setVisibility(View.VISIBLE); arrowToSpan = lastEmoji; arrowToStart = arrowToEnd = null; searchAnimated(emoji); - containerView.invalidate(); + if (containerView != null) { + containerView.invalidate(); + } return; } } @@ -207,10 +373,13 @@ private void update() { AnimatedEmojiSpan[] aspans = (text instanceof Spanned) ? ((Spanned) text).getSpans(Math.max(0, selectionEnd), selectionEnd, AnimatedEmojiSpan.class) : null; if ((aspans == null || aspans.length == 0) && selectionEnd < 52) { show = true; + createListView(); // containerView.setVisibility(View.VISIBLE); arrowToSpan = null; searchKeywords(text.toString().substring(0, selectionEnd)); - containerView.invalidate(); + if (containerView != null) { + containerView.invalidate(); + } return; } } @@ -219,7 +388,9 @@ private void update() { searchRunnable = null; } show = false; - containerView.invalidate(); + if (containerView != null) { + containerView.invalidate(); + } } private int lastQueryType; @@ -231,8 +402,9 @@ private void searchKeywords(String query) { if (query == null) { return; } - if (lastQuery != null && lastQueryType == 1 && lastQuery.equals(query) && !clear && !keywordResults.isEmpty()) { + if (lastQuery != null && lastQueryType == 1 && lastQuery.equals(query) && !clear && keywordResults != null && !keywordResults.isEmpty()) { forceClose = false; + createListView(); containerView.setVisibility(View.VISIBLE); lastSpanY = AndroidUtilities.dp(10); containerView.invalidate(); @@ -258,13 +430,20 @@ private void searchKeywords(String query) { if (param != null && !param.isEmpty()) { clear = false; forceClose = false; - containerView.setVisibility(View.VISIBLE); + createListView(); + if (containerView != null) { + containerView.setVisibility(View.VISIBLE); + } lastSpanY = AndroidUtilities.dp(10); keywordResults = param; arrowToStart = 0; arrowToEnd = query.length(); - containerView.invalidate(); - adapter.notifyDataSetChanged(); + if (containerView != null) { + containerView.invalidate(); + } + if (adapter != null) { + adapter.notifyDataSetChanged(); + } } else { clear = true; forceClose(); @@ -272,7 +451,7 @@ private void searchKeywords(String query) { } }, true); }; - if (keywordResults.isEmpty()) { + if (keywordResults == null || keywordResults.isEmpty()) { AndroidUtilities.runOnUIThread(searchRunnable, 600); } else { searchRunnable.run(); @@ -283,17 +462,19 @@ private void searchAnimated(String emoji) { if (emoji == null) { return; } - if (lastQuery != null && lastQueryType == 2 && lastQuery.equals(emoji) && !clear && !keywordResults.isEmpty()) { + if (lastQuery != null && lastQueryType == 2 && lastQuery.equals(emoji) && !clear && keywordResults != null && !keywordResults.isEmpty()) { forceClose = false; - containerView.setVisibility(View.VISIBLE); - containerView.invalidate(); + createListView(); + if (containerView != null) { + containerView.setVisibility(View.VISIBLE); + containerView.invalidate(); + } return; } final int id = ++lastQueryId; if (searchRunnable != null) { AndroidUtilities.cancelRunOnUIThread(searchRunnable); - searchRunnable = null; } searchRunnable = () -> { @@ -307,10 +488,15 @@ private void searchAnimated(String emoji) { if (!standard.isEmpty()) { clear = false; forceClose = false; - containerView.setVisibility(View.VISIBLE); + createListView(); + if (containerView != null) { + containerView.setVisibility(View.VISIBLE); + containerView.invalidate(); + } keywordResults = standard; - adapter.notifyDataSetChanged(); - containerView.invalidate(); + if (adapter != null) { + adapter.notifyDataSetChanged(); + } } else { clear = true; forceClose(); @@ -318,7 +504,7 @@ private void searchAnimated(String emoji) { } }); }; - if (keywordResults.isEmpty()) { + if (keywordResults == null || keywordResults.isEmpty()) { AndroidUtilities.runOnUIThread(searchRunnable, 600); } else { searchRunnable.run(); @@ -326,7 +512,15 @@ private void searchAnimated(String emoji) { } private CharSequence makeEmoji(String emojiSource) { - Paint.FontMetricsInt fontMetricsInt = enterView.getEditField().getPaint().getFontMetricsInt(); + Paint.FontMetricsInt fontMetricsInt = null; + if (enterView.getEditField() != null) { + fontMetricsInt = enterView.getEditField().getPaint().getFontMetricsInt(); + } + if (fontMetricsInt == null) { + Paint paint = new Paint(); + paint.setTextSize(AndroidUtilities.dp(18)); + fontMetricsInt = paint.getFontMetricsInt(); + } CharSequence emoji; if (emojiSource != null && emojiSource.startsWith("animated_")) { try { @@ -365,7 +559,7 @@ private void onClick(String emojiSource) { } else { return; } - Editable editable = enterView.getEditField().getText(); + Editable editable = enterView.getEditText(); if (editable == null || start < 0 || end < 0 || start > editable.length() || end > editable.length()) { return; } @@ -407,26 +601,28 @@ private void onClick(String emojiSource) { show = false; forceClose = true; lastQueryType = 0; - containerView.invalidate(); + if (containerView != null) { + containerView.invalidate(); + } } - private Path path = new Path(), circlePath = new Path(); + private Path path, circlePath; private Paint backgroundPaint; - private AnimatedFloat showFloat1 = new AnimatedFloat(containerView, 120, 350, CubicBezierInterpolator.EASE_OUT_QUINT); - private AnimatedFloat showFloat2 = new AnimatedFloat(containerView, 150, 600, CubicBezierInterpolator.EASE_OUT_QUINT); - private OvershootInterpolator overshootInterpolator = new OvershootInterpolator(.4f); + private AnimatedFloat showFloat1; + private AnimatedFloat showFloat2; + private OvershootInterpolator overshootInterpolator; - private AnimatedFloat leftGradientAlpha = new AnimatedFloat(containerView, 300, CubicBezierInterpolator.EASE_OUT_QUINT); - private AnimatedFloat rightGradientAlpha = new AnimatedFloat(containerView, 300, CubicBezierInterpolator.EASE_OUT_QUINT); + private AnimatedFloat leftGradientAlpha; + private AnimatedFloat rightGradientAlpha; private Emoji.EmojiSpan arrowToSpan; private float lastSpanY; private Integer arrowToStart, arrowToEnd; private float arrowX; - private AnimatedFloat arrowXAnimated = new AnimatedFloat(containerView, 200, CubicBezierInterpolator.EASE_OUT_QUINT); + private AnimatedFloat arrowXAnimated; - private AnimatedFloat listViewCenterAnimated = new AnimatedFloat(containerView, 350, CubicBezierInterpolator.EASE_OUT_QUINT); - private AnimatedFloat listViewWidthAnimated = new AnimatedFloat(containerView, 350, CubicBezierInterpolator.EASE_OUT_QUINT); + private AnimatedFloat listViewCenterAnimated; + private AnimatedFloat listViewWidthAnimated; private void drawContainerBegin(Canvas canvas) { if (enterView != null && enterView.getEditField() != null) { @@ -438,7 +634,7 @@ private void drawContainerBegin(Canvas canvas) { } } - final boolean show = this.show && !forceClose && !keywordResults.isEmpty() && !clear; + final boolean show = this.show && !forceClose && keywordResults != null && !keywordResults.isEmpty() && !clear; final float showT1 = showFloat1.set(show ? 1f : 0f); final float showT2 = showFloat2.set(show ? 1f : 0f); final float arrowX = arrowXAnimated.set(this.arrowX); @@ -450,7 +646,7 @@ private void drawContainerBegin(Canvas canvas) { path.rewind(); float listViewLeft = listView.getLeft(); - float listViewRight = listView.getLeft() + keywordResults.size() * AndroidUtilities.dp(44); + float listViewRight = listView.getLeft() + (keywordResults == null ? 0 : keywordResults.size()) * AndroidUtilities.dp(44); boolean force = listViewWidthAnimated.get() <= 0; float width = listViewRight - listViewLeft <= 0 ? listViewWidthAnimated.get() : listViewWidthAnimated.set(listViewRight - listViewLeft, force); @@ -522,15 +718,9 @@ private void drawContainerBegin(Canvas canvas) { canvas.drawPath(path, backgroundPaint); canvas.save(); canvas.clipPath(path); - -// final int count = listView.getChildCount(); -// for (int i = 0; i < count; ++i) { -// listView.getChildAt(i).setTranslationY((1f - overshootInterpolator.getInterpolation(AndroidUtilities.cascade(showT2, i, count + 1, 4))) * AndroidUtilities.dp(16)); -// } } public void drawContainerEnd(Canvas canvas) { - final float width = listViewWidthAnimated.get(); final float center = listViewCenterAnimated.get(); @@ -562,6 +752,9 @@ public void drawContainerEnd(Canvas canvas) { @Override public boolean dispatchTouchEvent(MotionEvent ev) { + if (listView == null) { + return super.dispatchTouchEvent(ev); + } final float width = listViewWidthAnimated.get(); final float center = listViewCenterAnimated.get(); @@ -605,124 +798,129 @@ protected void onDetachedFromWindow() { @Override public void didReceivedNotification(int id, int account, Object... args) { if (id == NotificationCenter.newEmojiSuggestionsAvailable) { - if (!keywordResults.isEmpty()) { + if (keywordResults != null && !keywordResults.isEmpty()) { fireUpdate(); } } else if (id == NotificationCenter.emojiLoaded) { - for (int i = 0; i < listView.getChildCount(); ++i) { - listView.getChildAt(i).invalidate(); + if (listView != null) { + for (int i = 0; i < listView.getChildCount(); ++i) { + listView.getChildAt(i).invalidate(); + } } } } public void invalidateContent() { - containerView.invalidate(); + if (containerView != null) { + containerView.invalidate(); + } } - private class Adapter extends RecyclerListView.SelectionAdapter { + public static class EmojiImageView extends View { - private class EmojiImageView extends View { + private String emoji; + public Drawable drawable; + private boolean attached; - private String emoji; - private Drawable drawable; - private boolean attached; + private AnimatedFloat pressed = new AnimatedFloat(this, 350, new OvershootInterpolator(5.0f)); - private AnimatedFloat pressed = new AnimatedFloat(this, 350, new OvershootInterpolator(5.0f)); - - public EmojiImageView(Context context) { - super(context); - } + public EmojiImageView(Context context) { + super(context); + } - private final int paddingDp = 3; - @Override - protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { - setPadding(AndroidUtilities.dp(paddingDp), AndroidUtilities.dp(paddingDp), AndroidUtilities.dp(paddingDp), AndroidUtilities.dp(paddingDp + 6.66f)); - super.onMeasure( + private final int paddingDp = 3; + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + setPadding(AndroidUtilities.dp(paddingDp), AndroidUtilities.dp(paddingDp), AndroidUtilities.dp(paddingDp), AndroidUtilities.dp(paddingDp + 6.66f)); + super.onMeasure( MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(44), MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(44 + 8), MeasureSpec.EXACTLY) - ); - } + ); + } - private void setEmoji(String emoji) { - this.emoji = emoji; - if (emoji != null && emoji.startsWith("animated_")) { - try { - long documentId = Long.parseLong(emoji.substring(9)); - if (!(drawable instanceof AnimatedEmojiDrawable) || ((AnimatedEmojiDrawable) drawable).getDocumentId() != documentId) { - setImageDrawable(AnimatedEmojiDrawable.make(currentAccount, AnimatedEmojiDrawable.CACHE_TYPE_KEYBOARD, documentId)); - } - } catch (Exception ignore) { - setImageDrawable(null); + private void setEmoji(String emoji) { + this.emoji = emoji; + if (emoji != null && emoji.startsWith("animated_")) { + try { + long documentId = Long.parseLong(emoji.substring(9)); + if (!(drawable instanceof AnimatedEmojiDrawable) || ((AnimatedEmojiDrawable) drawable).getDocumentId() != documentId) { + setImageDrawable(AnimatedEmojiDrawable.make(UserConfig.selectedAccount, AnimatedEmojiDrawable.CACHE_TYPE_KEYBOARD, documentId)); } - } else { - setImageDrawable(Emoji.getEmojiBigDrawable(emoji)); + } catch (Exception ignore) { + setImageDrawable(null); } + } else { + setImageDrawable(Emoji.getEmojiBigDrawable(emoji)); } + } - public void setImageDrawable(@Nullable Drawable drawable) { - if (this.drawable instanceof AnimatedEmojiDrawable) { - ((AnimatedEmojiDrawable) this.drawable).removeView(this); - } - this.drawable = drawable; - if (drawable instanceof AnimatedEmojiDrawable && attached) { - ((AnimatedEmojiDrawable) drawable).addView(this); - } + public void setImageDrawable(@Nullable Drawable drawable) { + if (this.drawable instanceof AnimatedEmojiDrawable) { + ((AnimatedEmojiDrawable) this.drawable).removeView(this); } - - @Override - public void setPressed(boolean pressed) { - super.setPressed(pressed); - invalidate(); + this.drawable = drawable; + if (drawable instanceof AnimatedEmojiDrawable && attached) { + ((AnimatedEmojiDrawable) drawable).addView(this); } + } - @Override - protected void dispatchDraw(Canvas canvas) { - float scale = 0.8f + 0.2f * (1f - pressed.set(isPressed() ? 1f : 0f)); - if (drawable != null) { - int cx = getWidth() / 2; - int cy = (getHeight() - getPaddingBottom() + getPaddingTop()) / 2; - drawable.setBounds(getPaddingLeft(), getPaddingTop(), getWidth() - getPaddingRight(), getHeight() - getPaddingBottom()); - canvas.scale(scale, scale, cx, cy); - if (drawable instanceof AnimatedEmojiDrawable) { - ((AnimatedEmojiDrawable) drawable).setTime(System.currentTimeMillis()); - } - drawable.draw(canvas); + @Override + public void setPressed(boolean pressed) { + super.setPressed(pressed); + invalidate(); + } + + @Override + protected void dispatchDraw(Canvas canvas) { + float scale = 0.8f + 0.2f * (1f - pressed.set(isPressed() ? 1f : 0f)); + if (drawable != null) { + int cx = getWidth() / 2; + int cy = (getHeight() - getPaddingBottom() + getPaddingTop()) / 2; + drawable.setBounds(getPaddingLeft(), getPaddingTop(), getWidth() - getPaddingRight(), getHeight() - getPaddingBottom()); + canvas.scale(scale, scale, cx, cy); + if (drawable instanceof AnimatedEmojiDrawable) { + ((AnimatedEmojiDrawable) drawable).setTime(System.currentTimeMillis()); } + drawable.draw(canvas); } + } - @Override - protected void onAttachedToWindow() { - super.onAttachedToWindow(); - attach(); - } + @Override + protected void onAttachedToWindow() { + super.onAttachedToWindow(); + attach(); + } - @Override - protected void onDetachedFromWindow() { - super.onDetachedFromWindow(); - detach(); - } + @Override + protected void onDetachedFromWindow() { + super.onDetachedFromWindow(); + detach(); + } - public void detach() { - if (drawable instanceof AnimatedEmojiDrawable) { - ((AnimatedEmojiDrawable) drawable).removeView(this); - } - attached = false; + public void detach() { + if (drawable instanceof AnimatedEmojiDrawable) { + ((AnimatedEmojiDrawable) drawable).removeView(this); } - public void attach() { - if (drawable instanceof AnimatedEmojiDrawable) { - ((AnimatedEmojiDrawable) drawable).addView(this); - } - attached = true; + attached = false; + } + public void attach() { + if (drawable instanceof AnimatedEmojiDrawable) { + ((AnimatedEmojiDrawable) drawable).addView(this); } + attached = true; } + } + + private static class Adapter extends RecyclerListView.SelectionAdapter { - public Adapter() { -// setHasStableIds(true); + SuggestEmojiView suggestEmojiView; + public Adapter(SuggestEmojiView suggestEmojiView) { + this.suggestEmojiView = suggestEmojiView; } @Override public long getItemId(int position) { - return keywordResults.get(position).emoji.hashCode(); + return suggestEmojiView.keywordResults == null ? 0 : suggestEmojiView.keywordResults.get(position).emoji.hashCode(); } @Override @@ -733,17 +931,17 @@ public boolean isEnabled(RecyclerView.ViewHolder holder) { @NonNull @Override public RecyclerView.ViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) { - return new RecyclerListView.Holder(new EmojiImageView(getContext())); + return new RecyclerListView.Holder(new EmojiImageView(suggestEmojiView.getContext())); } @Override public void onBindViewHolder(@NonNull RecyclerView.ViewHolder holder, int position) { - ((EmojiImageView) holder.itemView).setEmoji(keywordResults.get(position).emoji); + ((EmojiImageView) holder.itemView).setEmoji(suggestEmojiView.keywordResults == null ? null : suggestEmojiView.keywordResults.get(position).emoji); } @Override public int getItemCount() { - return keywordResults.size(); + return suggestEmojiView.keywordResults == null ? 0 : suggestEmojiView.keywordResults.size(); } } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/SwipeGestureSettingsView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/SwipeGestureSettingsView.java index feb7ab555a..3557a41cd1 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/SwipeGestureSettingsView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/SwipeGestureSettingsView.java @@ -109,9 +109,8 @@ protected void onDraw(Canvas canvas) { SharedConfig.updateChatListSwipeSetting(newVal); invalidate(); - if (!NekoConfig.disableVibration.Bool()) { + if (!NekoConfig.disableVibration.Bool()) picker.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); - } }); picker.setImportantForAccessibility(View.IMPORTANT_FOR_ACCESSIBILITY_NO); picker.setValue(SharedConfig.getChatSwipeAction(currentAccount)); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ThemeEditorView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ThemeEditorView.java index d6a034bb42..2ccc6f5f41 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ThemeEditorView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ThemeEditorView.java @@ -115,7 +115,7 @@ public void destroy() { windowManager.removeViewImmediate(windowView); windowView = null; } catch (Exception e) { - FileLog.e(e); + FileLog.e(e, false); } try { if (editorAlert != null) { @@ -344,7 +344,7 @@ public ColorPicker(Context context) { colorEditText[a].setCursorWidth(1.5f); colorEditText[a].setTextSize(TypedValue.COMPLEX_UNIT_DIP, 18); colorEditText[a].setBackground(null); - colorEditText[a].setLineColors(Theme.getColor(Theme.key_dialogInputField), Theme.getColor(Theme.key_dialogInputFieldActivated), Theme.getColor(Theme.key_dialogTextRed2)); + colorEditText[a].setLineColors(Theme.getColor(Theme.key_dialogInputField), Theme.getColor(Theme.key_dialogInputFieldActivated), Theme.getColor(Theme.key_dialogTextRed)); colorEditText[a].setMaxLines(1); colorEditText[a].setTag(a); colorEditText[a].setGravity(Gravity.CENTER); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ThemeSmallPreviewView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ThemeSmallPreviewView.java index e9a90736e6..9c24e29ef1 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ThemeSmallPreviewView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ThemeSmallPreviewView.java @@ -238,6 +238,7 @@ public void setItem(ChatThemeBottomSheet.ChatThemeItem item, boolean animated) { final TLRPC.PhotoSize thumbSize = FileLoader.getClosestPhotoSizeWithSize(wallpaperDocument.thumbs, PATTERN_BITMAP_MAXWIDTH); ImageLocation imageLocation = ImageLocation.getForDocument(thumbSize, wallpaperDocument); ImageReceiver imageReceiver = new ImageReceiver(); + imageReceiver.setAllowLoadingOnAttachedOnly(false); imageReceiver.setImage(imageLocation, PATTERN_BITMAP_MAXWIDTH + "_" + PATTERN_BITMAP_MAXHEIGHT, null, null, null, 1); imageReceiver.setDelegate((receiver, set, thumb, memCache) -> { ImageReceiver.BitmapHolder holder = receiver.getBitmapSafe(); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/TranslateAlert.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/TranslateAlert.java index ba3bc8558c..e69de29bb2 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/TranslateAlert.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/TranslateAlert.java @@ -1,1856 +0,0 @@ -package org.telegram.ui.Components; - -import static org.telegram.messenger.AndroidUtilities.displayMetrics; -import static org.telegram.messenger.AndroidUtilities.dp; -import static org.telegram.messenger.AndroidUtilities.lerp; - -import android.animation.Animator; -import android.animation.AnimatorListenerAdapter; -import android.animation.ValueAnimator; -import android.app.Dialog; -import android.content.Context; -import android.graphics.Canvas; -import android.graphics.LinearGradient; -import android.graphics.Paint; -import android.graphics.Path; -import android.graphics.PorterDuff; -import android.graphics.PorterDuffColorFilter; -import android.graphics.Rect; -import android.graphics.RectF; -import android.graphics.Region; -import android.graphics.Shader; -import android.graphics.drawable.ColorDrawable; -import android.graphics.drawable.Drawable; -import android.os.Build; -import android.os.Bundle; -import android.os.SystemClock; -import android.text.Layout; -import android.text.Spannable; -import android.text.SpannableStringBuilder; -import android.text.Spanned; -import android.text.TextPaint; -import android.text.TextUtils; -import android.text.method.LinkMovementMethod; -import android.text.style.ClickableSpan; -import android.text.style.URLSpan; -import android.text.util.Linkify; -import android.util.TypedValue; -import android.view.Gravity; -import android.view.MotionEvent; -import android.view.View; -import android.view.ViewGroup; -import android.view.ViewParent; -import android.view.Window; -import android.view.WindowManager; -import android.widget.FrameLayout; -import android.widget.ImageView; -import android.widget.LinearLayout; -import android.widget.TextView; -import android.widget.Toast; - -import androidx.annotation.NonNull; -import androidx.core.widget.NestedScrollView; - -import org.telegram.messenger.AndroidUtilities; -import org.telegram.messenger.ApplicationLoader; -import org.telegram.messenger.DispatchQueue; -import org.telegram.messenger.Emoji; -import org.telegram.messenger.FileLog; -import org.telegram.messenger.LocaleController; -import org.telegram.messenger.MessageObject; -import org.telegram.messenger.R; -import org.telegram.messenger.Utilities; -import org.telegram.messenger.XiaomiUtilities; -import org.telegram.tgnet.ConnectionsManager; -import org.telegram.tgnet.TLRPC; -import org.telegram.ui.ActionBar.BaseFragment; -import org.telegram.ui.ActionBar.Theme; - -import java.util.ArrayList; - -public class TranslateAlert extends Dialog { - -// public static volatile DispatchQueue translateQueue = new DispatchQueue("translateQueue", false); - - private FrameLayout bulletinContainer; - private FrameLayout contentView; - private FrameLayout container; - private TextView titleView; - private LinearLayout subtitleView; - private InlineLoadingTextView subtitleFromView; - private ImageView subtitleArrowView; - private TextView subtitleToView; - private ImageView backButton; - private FrameLayout header; - private FrameLayout headerShadowView; - private NestedScrollView scrollView; - private TextBlocksLayout textsView; - private TextView buttonTextView; - private FrameLayout buttonView; - private FrameLayout buttonShadowView; - private TextView allTextsView; - private FrameLayout textsContainerView; - - private FrameLayout.LayoutParams titleLayout; - private FrameLayout.LayoutParams subtitleLayout; - private FrameLayout.LayoutParams headerLayout; - private FrameLayout.LayoutParams scrollViewLayout; - - private int blockIndex = 0; - private ArrayList textBlocks; - - private float containerOpenAnimationT = 0f; - - private void openAnimation(float t) { - t = Math.min(Math.max(t, 0f), 1f); - if (containerOpenAnimationT == t) { - return; - } - containerOpenAnimationT = t; - - titleView.setScaleX(lerp(1f, 0.9473f, t)); - titleView.setScaleY(lerp(1f, 0.9473f, t)); - titleLayout.setMargins( - dp(lerp(22, 72, t)), - dp(lerp(22, 8, t)), - titleLayout.rightMargin, - titleLayout.bottomMargin - ); - titleView.setLayoutParams(titleLayout); - subtitleLayout.setMargins( - dp(lerp(22, 72, t)) - LoadingTextView2.paddingHorizontal, - dp(lerp(47, 30, t)) - LoadingTextView2.paddingVertical, - subtitleLayout.rightMargin, - subtitleLayout.bottomMargin - ); - subtitleView.setLayoutParams(subtitleLayout); - - backButton.setAlpha(t); - backButton.setScaleX(.75f + .25f * t); - backButton.setScaleY(.75f + .25f * t); - backButton.setClickable(t > .5f); - headerShadowView.setAlpha(scrollView.getScrollY() > 0 ? 1f : t); - - headerLayout.height = (int) lerp(dp(70), dp(56), t); - header.setLayoutParams(headerLayout); - - scrollViewLayout.setMargins( - scrollViewLayout.leftMargin, - (int) lerp(dp(70), dp(56), t), - scrollViewLayout.rightMargin, - scrollViewLayout.bottomMargin - ); - scrollView.setLayoutParams(scrollViewLayout); - } - - - private boolean openAnimationToAnimatorPriority = false; - private ValueAnimator openAnimationToAnimator = null; - - private void openAnimationTo(float to, boolean priority) { - openAnimationTo(to, priority, null); - } - - private void openAnimationTo(float to, boolean priority, Runnable onAnimationEnd) { - if (openAnimationToAnimatorPriority && !priority) { - return; - } - openAnimationToAnimatorPriority = priority; - to = Math.min(Math.max(to, 0), 1); - if (openAnimationToAnimator != null) { - openAnimationToAnimator.cancel(); - } - openAnimationToAnimator = ValueAnimator.ofFloat(containerOpenAnimationT, to); - openAnimationToAnimator.addUpdateListener(a -> openAnimation((float) a.getAnimatedValue())); - openAnimationToAnimator.addListener(new AnimatorListenerAdapter() { - @Override - public void onAnimationEnd(Animator animator) { - openAnimationToAnimatorPriority = false; - if (onAnimationEnd != null) - onAnimationEnd.run(); - } - - @Override - public void onAnimationCancel(Animator animator) { - openAnimationToAnimatorPriority = false; - } - }); - openAnimationToAnimator.setInterpolator(CubicBezierInterpolator.EASE_OUT); - openAnimationToAnimator.setDuration(220); - openAnimationToAnimator.start(); - if (to >= .5 && blockIndex <= 1) { - fetchNext(); - } - } - - private int firstMinHeight = -1; - - private int minHeight() { - return minHeight(false); - } - - private int minHeight(boolean full) { - int textsViewHeight = textsView == null ? 0 : textsView.getMeasuredHeight(); - int height = - textsViewHeight + - dp( - 66 + // header - 1 + // button separator - 16 + // button top padding - 48 + // button - 16 // button bottom padding - ); - if (firstMinHeight < 0 && textsViewHeight > 0) - firstMinHeight = height; - if (firstMinHeight > 0 && textBlocks.size() > 1 && !full) - return firstMinHeight; - return height; - } - - private boolean canExpand() { - return ( - textsView.getBlocksCount() < textBlocks.size() || - minHeight(true) >= (AndroidUtilities.displayMetrics.heightPixels * heightMaxPercent) - ); - } - - private void updateCanExpand() { - boolean canExpand = canExpand(); - if (containerOpenAnimationT > 0f && !canExpand) { - openAnimationTo(0f, false); - } - - buttonShadowView.animate().alpha(canExpand ? 1f : 0f).setDuration((long) (Math.abs(buttonShadowView.getAlpha() - (canExpand ? 1f : 0f)) * 220)).start(); - } - - public interface OnLinkPress { - public boolean run(URLSpan urlSpan); - } - - private int currentAccount; - private TLRPC.InputPeer peer; - private int msgId; - private boolean allowScroll = true; - private String fromLanguage, toLanguage; - private CharSequence text; - private BaseFragment fragment; - private boolean noforwards; - private OnLinkPress onLinkPress; - private Runnable onDismiss; - - public TranslateAlert(BaseFragment fragment, Context context, int currentAccount, String fromLanguage, String toLanguage, CharSequence text, boolean noforwards, OnLinkPress onLinkPress, Runnable onDismiss) { - this(fragment, context, currentAccount, null, -1, fromLanguage, toLanguage, text, noforwards, onLinkPress, onDismiss); - } - - public TranslateAlert(BaseFragment fragment, Context context, int currentAccount, TLRPC.InputPeer peer, int msgId, String fromLanguage, String toLanguage, CharSequence text, boolean noforwards, OnLinkPress onLinkPress, Runnable onDismiss) { - super(context, R.style.TransparentDialog); - - this.onLinkPress = onLinkPress; - this.noforwards = noforwards; - this.fragment = fragment; - this.fromLanguage = fromLanguage != null && fromLanguage.equals("und") ? "auto" : fromLanguage; - this.toLanguage = toLanguage; - this.text = text; - this.textBlocks = new ArrayList<>(); - this.textBlocks.add(text); -// cutInBlocks(text, 1024); - this.onDismiss = onDismiss; - - this.currentAccount = currentAccount; - this.peer = peer; - this.msgId = msgId; - - if (Build.VERSION.SDK_INT >= 30) { - getWindow().addFlags(WindowManager.LayoutParams.FLAG_LAYOUT_IN_SCREEN | WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS); - } else if (Build.VERSION.SDK_INT >= 21) { - getWindow().addFlags(WindowManager.LayoutParams.FLAG_LAYOUT_INSET_DECOR | WindowManager.LayoutParams.FLAG_LAYOUT_IN_SCREEN | WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS); - } - - if (noforwards) { - getWindow().addFlags(WindowManager.LayoutParams.FLAG_SECURE); - } - - contentView = new FrameLayout(context); - contentView.setBackground(backDrawable); - contentView.setClipChildren(false); - contentView.setClipToPadding(false); - if (Build.VERSION.SDK_INT >= 21) { - contentView.setFitsSystemWindows(true); - if (Build.VERSION.SDK_INT >= 30) { - contentView.setSystemUiVisibility(View.SYSTEM_UI_FLAG_LAYOUT_STABLE | View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN | View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION); - } else { - contentView.setSystemUiVisibility(View.SYSTEM_UI_FLAG_LAYOUT_STABLE | View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN); - } - } - - Paint containerPaint = new Paint(); - containerPaint.setColor(Theme.getColor(Theme.key_dialogBackground)); - containerPaint.setShadowLayer(dp(2), 0, dp(-0.66f), 0x1e000000); - container = new FrameLayout(context) { - private int contentHeight = Integer.MAX_VALUE; - - @Override - protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { - int fullWidth = MeasureSpec.getSize(widthMeasureSpec); - int fullHeight = MeasureSpec.getSize(widthMeasureSpec); - int minHeight = (int) (AndroidUtilities.displayMetrics.heightPixels * heightMaxPercent); - if (textsView != null && textsView.getMeasuredHeight() <= 0) { - textsView.measure( - MeasureSpec.makeMeasureSpec(MeasureSpec.getSize(widthMeasureSpec) - textsView.getPaddingLeft() - textsView.getPaddingRight() - textsContainerView.getPaddingLeft() - textsContainerView.getPaddingRight(), MeasureSpec.EXACTLY), - 0 - ); - } - int fromHeight = Math.min(minHeight, minHeight()); - int height = (int) (fromHeight + (AndroidUtilities.displayMetrics.heightPixels - fromHeight) * containerOpenAnimationT); - updateCanExpand(); - super.onMeasure( - MeasureSpec.makeMeasureSpec( - (int) Math.max(fullWidth * 0.8f, Math.min(dp(480), fullWidth)), - MeasureSpec.getMode(widthMeasureSpec) - ), - MeasureSpec.makeMeasureSpec( - height, - MeasureSpec.EXACTLY - ) - ); - } - - @Override - protected void onLayout(boolean changed, int left, int top, int right, int bottom) { - super.onLayout(changed, left, top, right, bottom); - contentHeight = Math.min(contentHeight, bottom - top); - } - - private Path containerPath = new Path(); - private RectF containerRect = new RectF(); - private RectF rectF = new RectF(); - - @Override - protected void onDraw(Canvas canvas) { - int w = getWidth(), h = getHeight(), r = dp(12 * (1f - containerOpenAnimationT)); - canvas.clipRect(0, 0, w, h); - - containerRect.set(0, 0, w, h + r); - canvas.translate(0, (1f - openingT) * h); - - canvas.drawRoundRect(containerRect, r, r, containerPaint); - super.onDraw(canvas); - } - }; - container.setWillNotDraw(false); - - header = new FrameLayout(context); - - titleView = new TextView(context); - titleView.setPivotX(LocaleController.isRTL ? titleView.getWidth() : 0); - titleView.setPivotY(0); - titleView.setLines(1); - titleView.setText(LocaleController.getString("AutomaticTranslation", R.string.AutomaticTranslation)); - titleView.setGravity(LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT); - titleView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); - titleView.setTextColor(Theme.getColor(Theme.key_dialogTextBlack)); - titleView.setTextSize(TypedValue.COMPLEX_UNIT_PX, dp(19)); - header.addView(titleView, titleLayout = LayoutHelper.createFrame( - LayoutHelper.MATCH_PARENT, - LayoutHelper.WRAP_CONTENT, - Gravity.FILL_HORIZONTAL | Gravity.TOP, - 22, 22, 22, 0 - )); - titleView.post(() -> { - titleView.setPivotX(LocaleController.isRTL ? titleView.getWidth() : 0); - }); - - subtitleView = new LinearLayout(context); - subtitleView.setOrientation(LinearLayout.HORIZONTAL); - if (Build.VERSION.SDK_INT >= 17) { - subtitleView.setLayoutDirection(LocaleController.isRTL ? View.LAYOUT_DIRECTION_RTL : View.LAYOUT_DIRECTION_LTR); - } - subtitleView.setGravity(LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT); - - String fromLanguageName = languageName(fromLanguage); - subtitleFromView = new InlineLoadingTextView(context, fromLanguageName == null ? languageName(toLanguage) : fromLanguageName, dp(14), Theme.getColor(Theme.key_player_actionBarSubtitle)) { - @Override - protected void onLoadAnimation(float t) { - MarginLayoutParams lp = (MarginLayoutParams) subtitleFromView.getLayoutParams(); - if (lp != null) { - if (LocaleController.isRTL) { - lp.leftMargin = dp(2f - t * 6f); - } else { - lp.rightMargin = dp(2f - t * 6f); - } - subtitleFromView.setLayoutParams(lp); - } - } - }; - subtitleFromView.showLoadingText = false; - subtitleArrowView = new ImageView(context); - subtitleArrowView.setImageResource(R.drawable.search_arrow); - subtitleArrowView.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_player_actionBarSubtitle), PorterDuff.Mode.MULTIPLY)); - if (LocaleController.isRTL) { - subtitleArrowView.setScaleX(-1f); - } - - subtitleToView = new TextView(context); - subtitleToView.setLines(1); - subtitleToView.setTextColor(Theme.getColor(Theme.key_player_actionBarSubtitle)); - subtitleToView.setTextSize(TypedValue.COMPLEX_UNIT_PX, dp(14)); - subtitleToView.setText(languageName(toLanguage)); - - if (LocaleController.isRTL) { - subtitleView.setPadding(InlineLoadingTextView.paddingHorizontal, 0, 0, 0); - subtitleView.addView(subtitleToView, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL)); - subtitleView.addView(subtitleArrowView, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL, 3, 1, 0, 0)); - subtitleView.addView(subtitleFromView, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL, 2, 0, 0, 0)); - } else { - subtitleView.setPadding(0, 0, InlineLoadingTextView.paddingHorizontal, 0); - subtitleView.addView(subtitleFromView, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL, 0, 0, 2, 0)); - subtitleView.addView(subtitleArrowView, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL, 0, 1, 3, 0)); - subtitleView.addView(subtitleToView, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL)); - } - if (fromLanguageName != null) { - subtitleFromView.set(fromLanguageName); - } - - header.addView(subtitleView, subtitleLayout = LayoutHelper.createFrame( - LayoutHelper.MATCH_PARENT, - LayoutHelper.WRAP_CONTENT, - Gravity.TOP | (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT), - 22 - LoadingTextView2.paddingHorizontal / AndroidUtilities.density, - 47 - LoadingTextView2.paddingVertical / AndroidUtilities.density, - 22 - LoadingTextView2.paddingHorizontal / AndroidUtilities.density, - 0 - )); - - backButton = new ImageView(context); - backButton.setImageResource(R.drawable.ic_ab_back); - backButton.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_dialogTextBlack), PorterDuff.Mode.MULTIPLY)); - backButton.setScaleType(ImageView.ScaleType.FIT_CENTER); - backButton.setPadding(AndroidUtilities.dp(16), 0, AndroidUtilities.dp(16), 0); - backButton.setBackground(Theme.createSelectorDrawable(Theme.getColor(Theme.key_dialogButtonSelector))); - backButton.setClickable(false); - backButton.setAlpha(0f); - backButton.setOnClickListener(e -> dismiss()); - header.addView(backButton, LayoutHelper.createFrame(56, 56, Gravity.LEFT | Gravity.CENTER_HORIZONTAL)); - - headerShadowView = new FrameLayout(context); - headerShadowView.setBackgroundColor(Theme.getColor(Theme.key_dialogShadowLine)); - headerShadowView.setAlpha(0); - header.addView(headerShadowView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 1, Gravity.BOTTOM | Gravity.FILL_HORIZONTAL)); - - header.setClipChildren(false); - container.addView(header, headerLayout = LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 70, Gravity.FILL_HORIZONTAL | Gravity.TOP)); - - scrollView = new NestedScrollView(context) { - @Override - public boolean onInterceptTouchEvent(MotionEvent ev) { - return allowScroll && containerOpenAnimationT >= 1f && canExpand() && super.onInterceptTouchEvent(ev); - } - - @Override - public void onNestedScroll(@NonNull View target, int dxConsumed, int dyConsumed, int dxUnconsumed, int dyUnconsumed) { - super.onNestedScroll(target, dxConsumed, dyConsumed, dxUnconsumed, dyUnconsumed); - } - - @Override - protected void onScrollChanged(int l, int t, int oldl, int oldt) { - super.onScrollChanged(l, t, oldl, oldt); - if (checkForNextLoading()) { - openAnimationTo(1f, true); - } - } - }; - scrollView.setClipChildren(true); - - allTextsView = new TextView(context) { - @Override - protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { - super.onMeasure(widthMeasureSpec, MOST_SPEC); - } - - @Override - protected void onDraw(Canvas canvas) { - super.onDraw(canvas); - canvas.translate(getPaddingLeft(), getPaddingTop()); - if (links != null && links.draw(canvas)) { - invalidate(); - } - } - - @Override - public boolean onTextContextMenuItem(int id) { - if (id == android.R.id.copy && isFocused()) { - android.content.ClipboardManager clipboard = (android.content.ClipboardManager) ApplicationLoader.applicationContext.getSystemService(Context.CLIPBOARD_SERVICE); - android.content.ClipData clip = android.content.ClipData.newPlainText( - "label", - getText().subSequence( - Math.max(0, Math.min(getSelectionStart(), getSelectionEnd())), - Math.max(0, Math.max(getSelectionStart(), getSelectionEnd())) - ) - ); - clipboard.setPrimaryClip(clip); - BulletinFactory.of(bulletinContainer, null).createCopyBulletin(LocaleController.getString("TextCopied", R.string.TextCopied)).show(); - clearFocus(); - return true; - } else { - return super.onTextContextMenuItem(id); - } - } - }; - links = new LinkSpanDrawable.LinkCollector(allTextsView); - allTextsView.setTextColor(0x00000000); - allTextsView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 16); - allTextsView.setTextIsSelectable(!noforwards); - allTextsView.setHighlightColor(Theme.getColor(Theme.key_chat_inTextSelectionHighlight)); - int handleColor = Theme.getColor(Theme.key_chat_TextSelectionCursor); - try { - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q && !XiaomiUtilities.isMIUI()) { - Drawable left = allTextsView.getTextSelectHandleLeft(); - left.setColorFilter(handleColor, PorterDuff.Mode.SRC_IN); - allTextsView.setTextSelectHandleLeft(left); - - Drawable right = allTextsView.getTextSelectHandleRight(); - right.setColorFilter(handleColor, PorterDuff.Mode.SRC_IN); - allTextsView.setTextSelectHandleRight(right); - } - } catch (Exception e) { - } - allTextsView.setFocusable(true); - allTextsView.setMovementMethod(new LinkMovementMethod()); - - textsView = new TextBlocksLayout(context, dp(16), Theme.getColor(Theme.key_dialogTextBlack), allTextsView); - textsView.setPadding( - dp(22) - LoadingTextView2.paddingHorizontal, - dp(12) - LoadingTextView2.paddingVertical, - dp(22) - LoadingTextView2.paddingHorizontal, - dp(12) - LoadingTextView2.paddingVertical - ); - for (CharSequence blockText : textBlocks) - textsView.addBlock(blockText); - - textsContainerView = new FrameLayout(context); - textsContainerView.addView(textsView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT)); - - scrollView.addView(textsContainerView, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, 1f)); - - container.addView(scrollView, scrollViewLayout = LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.FILL, 0, 70, 0, 81)); - - fetchNext(); - - buttonShadowView = new FrameLayout(context); - buttonShadowView.setBackgroundColor(Theme.getColor(Theme.key_dialogShadowLine)); - container.addView(buttonShadowView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 1, Gravity.BOTTOM | Gravity.FILL_HORIZONTAL, 0, 0, 0, 80)); - - buttonTextView = new TextView(context); - buttonTextView.setLines(1); - buttonTextView.setSingleLine(true); - buttonTextView.setGravity(Gravity.CENTER_HORIZONTAL); - buttonTextView.setEllipsize(TextUtils.TruncateAt.END); - buttonTextView.setGravity(Gravity.CENTER); - buttonTextView.setTextColor(Theme.getColor(Theme.key_featuredStickers_buttonText)); - buttonTextView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); - buttonTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); - buttonTextView.setText(LocaleController.getString("CloseTranslation", R.string.CloseTranslation)); - - buttonView = new FrameLayout(context); -// buttonView.setBackground(Theme.AdaptiveRipple.filledRect(Theme.key_featuredStickers_addButton, 4)); - buttonView.setBackground(Theme.AdaptiveRipple.filledRect(Theme.getColor(Theme.key_featuredStickers_addButton), 4)); - buttonView.addView(buttonTextView); - buttonView.setOnClickListener(e -> dismiss()); - - container.addView(buttonView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 48, Gravity.BOTTOM, 16, 16, 16, 16)); - contentView.addView(container, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.BOTTOM | Gravity.CENTER_HORIZONTAL)); - - bulletinContainer = new FrameLayout(context); - contentView.addView(bulletinContainer, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.FILL, 0, 0, 0, 81)); - } - - public void showDim(boolean enable) { - contentView.setBackground(enable ? backDrawable : null); - } - - private boolean scrollAtBottom() { - View view = (View) scrollView.getChildAt(scrollView.getChildCount() - 1); - int bottom = view.getBottom(); - LoadingTextView2 lastUnloadedBlock = textsView.getFirstUnloadedBlock(); - if (lastUnloadedBlock != null) { - bottom = lastUnloadedBlock.getTop(); - } - int diff = (bottom - (scrollView.getHeight() + scrollView.getScrollY())); - return diff <= textsContainerView.getPaddingBottom(); - } - - private void setScrollY(float t) { - openAnimation(t); - openingT = Math.max(Math.min(1f + t, 1), 0); - backDrawable.setAlpha((int) (openingT * 51)); - container.invalidate(); - bulletinContainer.setTranslationY((1f - openingT) * Math.min(minHeight(), displayMetrics.heightPixels * heightMaxPercent)); - } - - private void scrollYTo(float t) { - scrollYTo(t, null); - } - - private void scrollYTo(float t, Runnable onAnimationEnd) { - openAnimationTo(t, false, onAnimationEnd); - openTo(1f + t, false); - } - - private float fromScrollY = 0; - - private float getScrollY() { - return Math.max(Math.min(containerOpenAnimationT - (1 - openingT), 1), 0); - } - - private boolean hasSelection() { - return allTextsView.hasSelection(); - } - - private Rect containerRect = new Rect(); - private Rect textRect = new Rect(); - private Rect translateMoreRect = new Rect(); - private Rect buttonRect = new Rect(); - private Rect backRect = new Rect(); - private Rect scrollRect = new Rect(); - private float fromY = 0; - private boolean pressedOutside = false; - private boolean maybeScrolling = false; - private boolean scrolling = false; - private boolean fromScrollRect = false; - private boolean fromTranslateMoreView = false; - private float fromScrollViewY = 0; - private Spannable allTexts = null; - private LinkSpanDrawable pressedLink; - private LinkSpanDrawable.LinkCollector links; - - @Override - public boolean dispatchTouchEvent(@NonNull MotionEvent event) { - try { - float x = event.getX(); - float y = event.getY(); - - container.getGlobalVisibleRect(containerRect); - if (!containerRect.contains((int) x, (int) y)) { - if (event.getAction() == MotionEvent.ACTION_DOWN) { - pressedOutside = true; - return true; - } else if (event.getAction() == MotionEvent.ACTION_UP) { - if (pressedOutside) { - pressedOutside = false; - dismiss(); - return true; - } - } - } - - try { - allTextsView.getGlobalVisibleRect(textRect); - if (textRect.contains((int) x, (int) y) && !maybeScrolling) { - Layout allTextsLayout = allTextsView.getLayout(); - int tx = (int) (x - allTextsView.getLeft() - container.getLeft()), - ty = (int) (y - allTextsView.getTop() - container.getTop() - scrollView.getTop() + scrollView.getScrollY()); - final int line = allTextsLayout.getLineForVertical(ty); - final int off = allTextsLayout.getOffsetForHorizontal(line, tx); - - final float left = allTextsLayout.getLineLeft(line); - if (allTexts instanceof Spannable && left <= tx && left + allTextsLayout.getLineWidth(line) >= tx) { - ClickableSpan[] linkSpans = allTexts.getSpans(off, off, ClickableSpan.class); - if (linkSpans != null && linkSpans.length >= 1) { - if (event.getAction() == MotionEvent.ACTION_UP && pressedLink.getSpan() == linkSpans[0]) { - ((ClickableSpan) pressedLink.getSpan()).onClick(allTextsView); - if (links != null) { - links.removeLink(pressedLink); - } - pressedLink = null; - allTextsView.setTextIsSelectable(!noforwards); - } else if (event.getAction() == MotionEvent.ACTION_DOWN) { - pressedLink = new LinkSpanDrawable(linkSpans[0], fragment.getResourceProvider(), tx, ty, false); - if (links != null) { - links.addLink(pressedLink); - } - LinkPath path = pressedLink.obtainNewPath(); - int start = allTexts.getSpanStart(pressedLink.getSpan()); - int end = allTexts.getSpanEnd(pressedLink.getSpan()); - path.setCurrentLayout(allTextsLayout, start, 0); - allTextsLayout.getSelectionPath(start, end, path); - } - allTextsView.invalidate(); - return true; - } - } - } - if (pressedLink != null) { - if (links != null) { - links.clear(); - } - pressedLink = null; - } - } catch (Exception e2) { - e2.printStackTrace(); - } - - scrollView.getGlobalVisibleRect(scrollRect); - backButton.getGlobalVisibleRect(backRect); - buttonView.getGlobalVisibleRect(buttonRect); - if (pressedLink == null && /*!(scrollRect.contains((int) x, (int) y) && !canExpand() && containerOpenAnimationT < .5f && !scrolling) &&*/ !hasSelection()) { - if ( - !backRect.contains((int) x, (int) y) && - !buttonRect.contains((int) x, (int) y) && - event.getAction() == MotionEvent.ACTION_DOWN - ) { - fromScrollRect = scrollRect.contains((int) x, (int) y) && (containerOpenAnimationT > 0 || !canExpand()); - maybeScrolling = true; - scrolling = scrollRect.contains((int) x, (int) y) && textsView.getBlocksCount() > 0 && !((LoadingTextView2) textsView.getBlockAt(0)).loaded; - fromY = y; - fromScrollY = getScrollY(); - fromScrollViewY = scrollView.getScrollY(); - return super.dispatchTouchEvent(event) || true; - } else if (maybeScrolling && (event.getAction() == MotionEvent.ACTION_MOVE || event.getAction() == MotionEvent.ACTION_UP)) { - float dy = fromY - y; - if (fromScrollRect) { - dy = -Math.max(0, -(fromScrollViewY + dp(48)) - dy); - if (dy < 0) { - scrolling = true; - allTextsView.setTextIsSelectable(false); - } - } else if (Math.abs(dy) > dp(4) && !fromScrollRect) { - scrolling = true; - allTextsView.setTextIsSelectable(false); - scrollView.stopNestedScroll(); - allowScroll = false; - } - float fullHeight = AndroidUtilities.displayMetrics.heightPixels, - minHeight = Math.min(minHeight(), fullHeight * heightMaxPercent); - float scrollYPx = minHeight * (1f - -Math.min(Math.max(fromScrollY, -1), 0)) + (fullHeight - minHeight) * Math.min(1, Math.max(fromScrollY, 0)) + dy; - float scrollY = scrollYPx > minHeight ? (scrollYPx - minHeight) / (fullHeight - minHeight) : -(1f - scrollYPx / minHeight); - if (!canExpand()) { - scrollY = Math.min(scrollY, 0); - } - updateCanExpand(); - - if (scrolling) { - setScrollY(scrollY); - if (event.getAction() == MotionEvent.ACTION_UP) { - scrolling = false; - allTextsView.setTextIsSelectable(!noforwards); - maybeScrolling = false; - allowScroll = true; - scrollYTo( - Math.abs(dy) > dp(16) ? - Math.round(fromScrollY) + (scrollY > fromScrollY ? 1f : -1f) * (float) Math.ceil(Math.abs(fromScrollY - scrollY)) : - Math.round(fromScrollY), - () -> { - contentView.post(this::checkForNextLoading); - } - ); - } - return true; - } - } - } - if (hasSelection() && maybeScrolling) { - scrolling = false; - allTextsView.setTextIsSelectable(!noforwards); - maybeScrolling = false; - allowScroll = true; - scrollYTo(Math.round(fromScrollY)); - } - return super.dispatchTouchEvent(event); - } catch (Exception e) { - e.printStackTrace(); - return super.dispatchTouchEvent(event); - } - } - - private float openingT = 0f; - private ValueAnimator openingAnimator; - - @Override - protected void onCreate(Bundle savedInstanceState) { - super.onCreate(savedInstanceState); - - contentView.setPadding(0, 0, 0, 0); - setContentView(contentView, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT)); - - Window window = getWindow(); - - window.setWindowAnimations(R.style.DialogNoAnimation); - WindowManager.LayoutParams params = window.getAttributes(); - params.width = ViewGroup.LayoutParams.MATCH_PARENT; - params.gravity = Gravity.TOP | Gravity.LEFT; - params.dimAmount = 0; - params.flags &= ~WindowManager.LayoutParams.FLAG_DIM_BEHIND; - params.flags |= WindowManager.LayoutParams.FLAG_ALT_FOCUSABLE_IM; - if (Build.VERSION.SDK_INT >= 21) { - params.flags |= - WindowManager.LayoutParams.FLAG_LAYOUT_IN_SCREEN | - WindowManager.LayoutParams.FLAG_LAYOUT_INSET_DECOR | - WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS; - } - params.flags |= WindowManager.LayoutParams.FLAG_LAYOUT_IN_SCREEN; - params.height = ViewGroup.LayoutParams.MATCH_PARENT; - window.setAttributes(params); - - int navigationbarColor = Theme.getColor(Theme.key_windowBackgroundWhite); - AndroidUtilities.setNavigationBarColor(window, navigationbarColor); - AndroidUtilities.setLightNavigationBar(window, AndroidUtilities.computePerceivedBrightness(navigationbarColor) > .721); - - container.forceLayout(); - } - - protected ColorDrawable backDrawable = new ColorDrawable(0xff000000) { - @Override - public void setAlpha(int alpha) { - super.setAlpha(alpha); - container.invalidate(); - } - }; - - @Override - public void show() { - super.show(); - - openAnimation(0); - openTo(1, true, true); - } - - private boolean dismissed = false; - - @Override - public void dismiss() { - if (dismissed) - return; - dismissed = true; - - openTo(0, true); - } - - private void openTo(float t, boolean priority) { - openTo(t, priority, false); - } - - private void openTo(float t) { - openTo(t, false); - } - - private float heightMaxPercent = .85f; - - private boolean fastHide = false; - private boolean openingAnimatorPriority = false; - - private void openTo(float t, boolean priority, boolean setAfter) { - final float T = Math.min(Math.max(t, 0), 1); - if (openingAnimatorPriority && !priority) { - return; - } - openingAnimatorPriority = priority; - if (openingAnimator != null) { - openingAnimator.cancel(); - } - openingAnimator = ValueAnimator.ofFloat(openingT, T); - backDrawable.setAlpha((int) (openingT * 51)); - openingAnimator.addUpdateListener(a -> { - openingT = (float) a.getAnimatedValue(); - container.invalidate(); - backDrawable.setAlpha((int) (openingT * 51)); - bulletinContainer.setTranslationY((1f - openingT) * Math.min(minHeight(), displayMetrics.heightPixels * heightMaxPercent)); - }); - if (T <= 0f) { - if (onDismiss != null) { - onDismiss.run(); - } - } - openingAnimator.addListener(new AnimatorListenerAdapter() { - @Override - public void onAnimationEnd(Animator animator) { - if (T <= 0f) { - dismissInternal(); - } else if (setAfter) { - allTextsView.setTextIsSelectable(!noforwards); - allTextsView.invalidate(); - scrollView.stopNestedScroll(); - openAnimation(T - 1f); - } - openingAnimatorPriority = false; - } - }); - openingAnimator.setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); - openingAnimator.setDuration((long) (Math.abs(openingT - T) * (fastHide ? 200 : 380))); - openingAnimator.setStartDelay(setAfter ? 60 : 0); - openingAnimator.start(); - } - - public void dismissInternal() { - try { - super.dismiss(); - } catch (Exception e) { - FileLog.e(e); - } - } - - public String languageName(String locale) { - // sorry, no more vodka - if (locale == null || locale.equals("und") || locale.equals("auto")) { - return null; - } - LocaleController.LocaleInfo thisLanguageInfo = LocaleController.getInstance().getBuiltinLanguageByPlural(locale), - currentLanguageInfo = LocaleController.getInstance().getCurrentLocaleInfo(); - if (thisLanguageInfo == null) { - return null; - } - boolean isCurrentLanguageEnglish = currentLanguageInfo != null && "en".equals(currentLanguageInfo.pluralLangCode); - if (isCurrentLanguageEnglish) { - // trying to show this language in a language of the interface, but there are only names in english and its own - return thisLanguageInfo.nameEnglish; - } else { - return thisLanguageInfo.name; - } - } - - public void updateSourceLanguage() { - if (languageName(fromLanguage) != null) { - subtitleView.setAlpha(1); - if (!subtitleFromView.loaded) { - subtitleFromView.loaded(languageName(fromLanguage)); - } - } else if (loaded) { - subtitleView.animate().alpha(0).setDuration(150).start(); - } - } - - private ArrayList cutInBlocks(CharSequence full, int maxBlockSize) { - ArrayList blocks = new ArrayList<>(); - if (full == null) { - return blocks; - } - while (full.length() > maxBlockSize) { - String maxBlockStr = full.subSequence(0, maxBlockSize).toString(); - int n = -1; - if (n == -1) n = maxBlockStr.lastIndexOf("\n\n"); - if (n == -1) n = maxBlockStr.lastIndexOf("\n"); - if (n == -1) n = maxBlockStr.lastIndexOf(". "); - if (n == -1) n = Math.min(maxBlockStr.length(), maxBlockSize); - blocks.add(full.subSequence(0, n + 1)); - full = full.subSequence(n + 1, full.length()); - } - if (full.length() > 0) { - blocks.add(full); - } - return blocks; - } - - private boolean loading = false; - private boolean loaded = false; - - private boolean fetchNext() { - if (loading) { - return false; - } - loading = true; - - if (blockIndex >= textBlocks.size()) { - return false; - } - - fetchTranslation( - textBlocks.get(blockIndex), - Math.min((blockIndex + 1) * 1000, 3500), - (String translatedText, String sourceLanguage) -> { - loaded = true; - Spannable spannable = new SpannableStringBuilder(translatedText); - try { - MessageObject.addUrlsByPattern(false, spannable, false, 0, 0, true); - URLSpan[] urlSpans = spannable.getSpans(0, spannable.length(), URLSpan.class); - for (int i = 0; i < urlSpans.length; ++i) { - URLSpan urlSpan = urlSpans[i]; - int start = spannable.getSpanStart(urlSpan), - end = spannable.getSpanEnd(urlSpan); - if (start == -1 || end == -1) { - continue; - } - spannable.removeSpan(urlSpan); - spannable.setSpan( - new ClickableSpan() { - @Override - public void onClick(@NonNull View view) { - if (onLinkPress != null) { - if (onLinkPress.run(urlSpan)) { - fastHide = true; - dismiss(); - } - } else { - AlertsCreator.showOpenUrlAlert(fragment, urlSpan.getURL(), false, false); - } - } - - @Override - public void updateDrawState(@NonNull TextPaint ds) { - int alpha = Math.min(ds.getAlpha(), ds.getColor() >> 24 & 0xff); - if (!(urlSpan instanceof URLSpanNoUnderline)) { - ds.setUnderlineText(true); - } - ds.setColor(Theme.getColor(Theme.key_dialogTextLink)); - ds.setAlpha(alpha); - } - }, - start, end, - Spanned.SPAN_EXCLUSIVE_EXCLUSIVE - ); - } - - AndroidUtilities.addLinks(spannable, Linkify.WEB_URLS); - urlSpans = spannable.getSpans(0, spannable.length(), URLSpan.class); - for (int i = 0; i < urlSpans.length; ++i) { - URLSpan urlSpan = urlSpans[i]; - int start = spannable.getSpanStart(urlSpan), - end = spannable.getSpanEnd(urlSpan); - if (start == -1 || end == -1) { - continue; - } - spannable.removeSpan(urlSpan); - spannable.setSpan( - new ClickableSpan() { - @Override - public void onClick(@NonNull View view) { - AlertsCreator.showOpenUrlAlert(fragment, urlSpan.getURL(), false, false); - } - - @Override - public void updateDrawState(@NonNull TextPaint ds) { - int alpha = Math.min(ds.getAlpha(), ds.getColor() >> 24 & 0xff); - if (!(urlSpan instanceof URLSpanNoUnderline)) - ds.setUnderlineText(true); - ds.setColor(Theme.getColor(Theme.key_dialogTextLink)); - ds.setAlpha(alpha); - } - }, - start, end, - Spanned.SPAN_EXCLUSIVE_EXCLUSIVE - ); - } - - spannable = (Spannable) Emoji.replaceEmoji(spannable, allTextsView.getPaint().getFontMetricsInt(), dp(14), false); - } catch (Exception e) { - e.printStackTrace(); - } - - SpannableStringBuilder allTextsBuilder = new SpannableStringBuilder(allTexts == null ? "" : allTexts); - if (blockIndex != 0) { - allTextsBuilder.append("\n"); - } - allTextsBuilder.append(spannable); - allTexts = allTextsBuilder; - textsView.setWholeText(allTexts); - - LoadingTextView2 block = textsView.getBlockAt(blockIndex); - if (block != null) { - block.loaded(spannable, () -> contentView.post(this::checkForNextLoading)); - } - - if (sourceLanguage != null) { - fromLanguage = sourceLanguage; - } - updateSourceLanguage(); - - if (blockIndex == 0 && AndroidUtilities.isAccessibilityScreenReaderEnabled()) { - if (allTextsView != null) { - allTextsView.requestFocus(); - } - } - - blockIndex++; - loading = false; - }, - (boolean rateLimit) -> { - if (rateLimit) { - Toast.makeText(getContext(), LocaleController.getString("TranslationFailedAlert1", R.string.TranslationFailedAlert1), Toast.LENGTH_SHORT).show(); - } else { - Toast.makeText(getContext(), LocaleController.getString("TranslationFailedAlert2", R.string.TranslationFailedAlert2), Toast.LENGTH_SHORT).show(); - } - - if (blockIndex == 0) { - dismiss(); - } - } - ); - return true; - } - - private boolean checkForNextLoading() { - if (scrollAtBottom()) { - fetchNext(); - return true; - } - return false; - } - - public interface OnTranslationSuccess { - public void run(String translated, String sourceLanguage); - } - - public interface OnTranslationFail { - public void run(boolean rateLimit); - } - - private void fetchTranslation(CharSequence text, long minDuration, OnTranslationSuccess onSuccess, OnTranslationFail onFail) { - final long start = System.currentTimeMillis(); - Utilities.Callback onDone = (string) -> { - AndroidUtilities.runOnUIThread(() -> { - if (string != null) { - onSuccess.run(string, null); - } else { - onFail.run(false); - } - }, Math.max((System.currentTimeMillis() - start) - minDuration, 1)); - }; - if (peer != null) { - translateText(currentAccount, peer, msgId, fromLanguage, toLanguage, onDone); - } else if (text != null) { - translateText(currentAccount, text.toString(), fromLanguage, toLanguage, onDone); - } else { - onFail.run(false); - } - } - - private static void translateText(int currentAccount, TLRPC.InputPeer peer, int msg_id, String from_lang, String to_lang, Utilities.Callback onDone) { - if (onDone == null) { - return; - } - if (from_lang == null || from_lang.equals("und")) { - from_lang = null; - } - - TLRPC.TL_messages_translateText req = new TLRPC.TL_messages_translateText(); - req.peer = peer; - req.msg_id = msg_id; - req.flags |= 1; - if (from_lang != null) { - req.from_lang = from_lang; - req.flags |= 4; - } - req.to_lang = to_lang; - - try { - ConnectionsManager.getInstance(currentAccount).sendRequest(req, (res, err) -> { - if (res instanceof TLRPC.TL_messages_translateResultText) { - onDone.run(((TLRPC.TL_messages_translateResultText) res).text); - return; - } - onDone.run(null); - }); - } catch (Exception e) { - FileLog.e(e); - } - } - - private static void translateText(int currentAccount, String text, String from_lang, String to_lang, Utilities.Callback onDone) { - if (onDone == null) { - return; - } - if (from_lang == null || from_lang.equals("und")) { - from_lang = null; - } - - TLRPC.TL_messages_translateText req = new TLRPC.TL_messages_translateText(); - req.flags |= 2; - req.text = text; - if (from_lang != null) { - req.from_lang = from_lang; - req.flags |= 4; - } - req.to_lang = to_lang; - - try { - ConnectionsManager.getInstance(currentAccount).sendRequest(req, (res, err) -> { - if (res instanceof TLRPC.TL_messages_translateResultText) { - onDone.run(((TLRPC.TL_messages_translateResultText) res).text); - return; - } - onDone.run(null); - }); - } catch (Exception e) { - FileLog.e(e); - } - } - - public static TranslateAlert showAlert(Context context, BaseFragment fragment, int currentAccount, TLRPC.InputPeer peer, int msgId, String fromLanguage, String toLanguage, CharSequence text, boolean noforwards, OnLinkPress onLinkPress, Runnable onDismiss) { - TranslateAlert alert = new TranslateAlert(fragment, context, currentAccount, peer, msgId, fromLanguage, toLanguage, text, noforwards, onLinkPress, onDismiss); - if (fragment != null) { - if (fragment.getParentActivity() != null) { - fragment.showDialog(alert); - } - } else { - alert.show(); - } - return alert; - } - - public static TranslateAlert showAlert(Context context, BaseFragment fragment, int currentAccount, String fromLanguage, String toLanguage, CharSequence text, boolean noforwards, OnLinkPress onLinkPress, Runnable onDismiss) { - TranslateAlert alert = new TranslateAlert(fragment, context, currentAccount, fromLanguage, toLanguage, text, noforwards, onLinkPress, onDismiss); - if (fragment != null) { - if (fragment.getParentActivity() != null) { - fragment.showDialog(alert); - } - } else { - alert.show(); - } - return alert; - } - - private static final int MOST_SPEC = View.MeasureSpec.makeMeasureSpec(999999, View.MeasureSpec.AT_MOST); - - public static class TextBlocksLayout extends ViewGroup { - - private TextView wholeTextView; - private final int fontSize; - private final int textColor; - - public TextBlocksLayout(Context context, int fontSize, int textColor, TextView wholeTextView) { - super(context); - - this.fontSize = fontSize; - this.textColor = textColor; - - if (wholeTextView != null) { - wholeTextView.setPadding(LoadingTextView2.paddingHorizontal, LoadingTextView2.paddingVertical, LoadingTextView2.paddingHorizontal, LoadingTextView2.paddingVertical); - addView(this.wholeTextView = wholeTextView); - } - } - - public void setWholeText(CharSequence wholeText) { - // having focus on that text view can cause jumping scroll to the top after loading a new block - // TODO(dkaraush): preserve selection after setting a new text - wholeTextView.clearFocus(); - wholeTextView.setText(wholeText); - } - - public LoadingTextView2 addBlock(CharSequence fromText) { - LoadingTextView2 textView = new LoadingTextView2(getContext(), fromText, getBlocksCount() > 0, fontSize, textColor); - textView.setFocusable(false); - addView(textView); - if (wholeTextView != null) { - wholeTextView.bringToFront(); - } - return textView; - } - - public int getBlocksCount() { - return getChildCount() - (wholeTextView != null ? 1 : 0); - } - - public LoadingTextView2 getBlockAt(int i) { - View child = getChildAt(i); - if (child instanceof LoadingTextView2) { - return (LoadingTextView2) child; - } - return null; - } - - public LoadingTextView2 getFirstUnloadedBlock() { - final int count = getBlocksCount(); - for (int i = 0; i < count; ++i) { - LoadingTextView2 block = getBlockAt(i); - if (block != null && !block.loaded) - return block; - } - return null; - } - - private static final int gap = -LoadingTextView2.paddingVertical * 4 + dp(.48f); - - public int height() { - int height = 0; - final int count = getBlocksCount(); - for (int i = 0; i < count; ++i) { - height += getBlockAt(i).height(); - } - return getPaddingTop() + height + getPaddingBottom(); - } - - protected void onHeightUpdated(int height) { - } - - public void updateHeight() { - boolean updated; - int newHeight = height(); - FrameLayout.LayoutParams lp = (FrameLayout.LayoutParams) getLayoutParams(); - if (lp == null) { - lp = new FrameLayout.LayoutParams(LayoutParams.MATCH_PARENT, newHeight); - updated = true; - } else { - updated = lp.height != newHeight; - lp.height = newHeight; - } - - if (updated) { - this.setLayoutParams(lp); - onHeightUpdated(newHeight); - } - } - - @Override - protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { - final int count = getBlocksCount(); - final int innerWidthMeasureSpec = MeasureSpec.makeMeasureSpec( - MeasureSpec.getSize(widthMeasureSpec) - getPaddingLeft() - getPaddingRight(), - MeasureSpec.getMode(widthMeasureSpec) - ); - for (int i = 0; i < count; ++i) { - LoadingTextView2 block = getBlockAt(i); - block.measure(innerWidthMeasureSpec, MOST_SPEC); - } - super.onMeasure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(height(), MeasureSpec.EXACTLY)); - } - - @Override - protected void onLayout(boolean changed, int l, int t, int r, int b) { - int y = 0; - final int count = getBlocksCount(); - for (int i = 0; i < count; ++i) { - LoadingTextView2 block = getBlockAt(i); - final int blockHeight = block.height(); - final int translationY = i > 0 ? gap : 0; - block.layout(getPaddingLeft(), getPaddingTop() + y + translationY, r - l - getPaddingRight(), getPaddingTop() + y + blockHeight + translationY); - y += blockHeight; - if (i > 0 && i < count - 1) { - y += gap; - } - } - - wholeTextView.measure( - MeasureSpec.makeMeasureSpec(r - l - getPaddingLeft() - getPaddingRight(), MeasureSpec.EXACTLY), - MeasureSpec.makeMeasureSpec(b - t - getPaddingTop() - getPaddingBottom(), MeasureSpec.EXACTLY) - ); - wholeTextView.layout( - getPaddingLeft(), - getPaddingTop(), - (r - l) - getPaddingRight(), - getPaddingTop() + wholeTextView.getMeasuredHeight() - ); - } - } - - public static class InlineLoadingTextView extends ViewGroup { - - public static final int paddingHorizontal = dp(6), - paddingVertical = 0; - - - public boolean showLoadingText = true; - - private final TextView fromTextView; - private final TextView toTextView; - - private final ValueAnimator loadingAnimator; - - private final long start = SystemClock.elapsedRealtime(); - - public InlineLoadingTextView(Context context, CharSequence fromText, int fontSize, int textColor) { - super(context); - - setPadding(paddingHorizontal, paddingVertical, paddingHorizontal, paddingVertical); - setClipChildren(false); - setWillNotDraw(false); - - fromTextView = new TextView(context) { - @Override - protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { - super.onMeasure(MOST_SPEC, MOST_SPEC); - } - }; - fromTextView.setTextSize(TypedValue.COMPLEX_UNIT_PX, fontSize); - fromTextView.setTextColor(textColor); - fromTextView.setText(fromText); - fromTextView.setLines(1); - fromTextView.setMaxLines(1); - fromTextView.setSingleLine(true); - fromTextView.setEllipsize(null); - fromTextView.setFocusable(false); - fromTextView.setImportantForAccessibility(View.IMPORTANT_FOR_ACCESSIBILITY_NO); - addView(fromTextView); - - toTextView = new TextView(context) { - @Override - protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { - super.onMeasure(MOST_SPEC, MOST_SPEC); - } - }; - toTextView.setTextSize(TypedValue.COMPLEX_UNIT_PX, fontSize); - toTextView.setTextColor(textColor); - toTextView.setLines(1); - toTextView.setMaxLines(1); - toTextView.setSingleLine(true); - toTextView.setEllipsize(null); - toTextView.setFocusable(true); - addView(toTextView); - - int c1 = Theme.getColor(Theme.key_dialogBackground), - c2 = Theme.getColor(Theme.key_dialogBackgroundGray); - LinearGradient gradient = new LinearGradient(0, 0, gradientWidth, 0, new int[]{c1, c2, c1}, new float[]{0, 0.67f, 1f}, Shader.TileMode.REPEAT); - loadingPaint.setShader(gradient); - - loadingAnimator = ValueAnimator.ofFloat(0f, 1f); - loadingAnimator.addUpdateListener(a -> invalidate()); - loadingAnimator.setDuration(Long.MAX_VALUE); - loadingAnimator.start(); - } - - @Override - protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { - fromTextView.measure(0, 0); - toTextView.measure(0, 0); - super.onMeasure( - MeasureSpec.makeMeasureSpec( - (int) AndroidUtilities.lerp(fromTextView.getMeasuredWidth(), toTextView.getMeasuredWidth(), loadingT) + getPaddingLeft() + getPaddingRight(), - MeasureSpec.EXACTLY - ), - MeasureSpec.makeMeasureSpec( - Math.max(fromTextView.getMeasuredHeight(), toTextView.getMeasuredHeight()), - MeasureSpec.EXACTLY - ) - ); - } - - @Override - protected void onLayout(boolean changed, int l, int t, int r, int b) { - fromTextView.layout(getPaddingLeft(), getPaddingTop(), getPaddingLeft() + fromTextView.getMeasuredWidth(), getPaddingTop() + fromTextView.getMeasuredHeight()); - toTextView.layout(getPaddingLeft(), getPaddingTop(), getPaddingLeft() + toTextView.getMeasuredWidth(), getPaddingTop() + toTextView.getMeasuredHeight()); - updateWidth(); - } - - private void updateWidth() { - boolean updated; - - int newWidth = (int) AndroidUtilities.lerp(fromTextView.getMeasuredWidth(), toTextView.getMeasuredWidth(), loadingT) + getPaddingLeft() + getPaddingRight(); - int newHeight = Math.max(fromTextView.getMeasuredHeight(), toTextView.getMeasuredHeight()); - LayoutParams lp = getLayoutParams(); - if (lp == null) { - lp = new LinearLayout.LayoutParams(newWidth, newHeight); - updated = true; - } else { - updated = lp.width != newWidth || lp.height != newHeight; - lp.width = newWidth; - lp.height = newHeight; - } - - if (updated) - setLayoutParams(lp); - } - - protected void onLoadAnimation(float t) { - } - - public boolean loaded = false; - public float loadingT = 0f; - private ValueAnimator loadedAnimator = null; - - public void loaded(CharSequence loadedText) { - loaded(loadedText, 350, null); - } - - public void loaded(CharSequence loadedText, Runnable onLoadEnd) { - loaded(loadedText, 350, onLoadEnd); - } - - public void loaded(CharSequence loadedText, long duration, Runnable onLoadEnd) { - loaded = true; - toTextView.setText(loadedText); - - if (loadingAnimator.isRunning()) { - loadingAnimator.cancel(); - } - if (loadedAnimator == null) { - loadedAnimator = ValueAnimator.ofFloat(0f, 1f); - loadedAnimator.addUpdateListener(a -> { - loadingT = (float) a.getAnimatedValue(); - updateWidth(); - invalidate(); - onLoadAnimation(loadingT); - }); - loadedAnimator.addListener(new AnimatorListenerAdapter() { - @Override - public void onAnimationEnd(Animator animation) { - if (onLoadEnd != null) - onLoadEnd.run(); - } - }); - loadedAnimator.setDuration(duration); - loadedAnimator.setInterpolator(CubicBezierInterpolator.EASE_BOTH); - loadedAnimator.start(); - } - } - - public void set(CharSequence loadedText) { - loaded = true; - toTextView.setText(loadedText); - - if (loadingAnimator.isRunning()) { - loadingAnimator.cancel(); - } - if (loadedAnimator != null) { - loadedAnimator.cancel(); - loadedAnimator = null; - } - loadingT = 1f; - requestLayout(); - updateWidth(); - invalidate(); - onLoadAnimation(1f); - } - - private final RectF rect = new RectF(); - private final Path inPath = new Path(), - tempPath = new Path(), - loadingPath = new Path(), - shadePath = new Path(); - private final Paint loadingPaint = new Paint(); - private final float gradientWidth = dp(350f); - - @Override - protected void onDraw(Canvas canvas) { - float w = getWidth(), h = getHeight(); - - float cx = LocaleController.isRTL ? Math.max(w / 2f, w - 8f) : Math.min(w / 2f, 8f), - cy = Math.min(h / 2f, 8f), - R = (float) Math.sqrt(Math.max( - Math.max(cx * cx + cy * cy, (w - cx) * (w - cx) + cy * cy), - Math.max(cx * cx + (h - cy) * (h - cy), (w - cx) * (w - cx) + (h - cy) * (h - cy)) - )), - r = loadingT * R; - inPath.reset(); - inPath.addCircle(cx, cy, r, Path.Direction.CW); - - canvas.save(); - canvas.clipPath(inPath, Region.Op.DIFFERENCE); - - loadingPaint.setAlpha((int) ((1f - loadingT) * 255)); - float dx = gradientWidth - (((SystemClock.elapsedRealtime() - start) / 1000f * gradientWidth) % gradientWidth); - shadePath.reset(); - shadePath.addRect(0, 0, w, h, Path.Direction.CW); - - loadingPath.reset(); - rect.set(0, 0, w, h); - loadingPath.addRoundRect(rect, dp(4), dp(4), Path.Direction.CW); - canvas.clipPath(loadingPath); - canvas.translate(-dx, 0); - shadePath.offset(dx, 0f, tempPath); - canvas.drawPath(tempPath, loadingPaint); - canvas.translate(dx, 0); - canvas.restore(); - - if (showLoadingText && fromTextView != null) { - canvas.save(); - rect.set(0, 0, w, h); - canvas.clipPath(inPath, Region.Op.DIFFERENCE); - canvas.translate(paddingHorizontal, paddingVertical); - canvas.saveLayerAlpha(rect, (int) (255 * .08f), Canvas.ALL_SAVE_FLAG); - fromTextView.draw(canvas); - canvas.restore(); - canvas.restore(); - } - - if (toTextView != null) { - canvas.save(); - canvas.clipPath(inPath); - canvas.translate(paddingHorizontal, paddingVertical); - canvas.saveLayerAlpha(rect, (int) (255 * loadingT), Canvas.ALL_SAVE_FLAG); - toTextView.draw(canvas); - if (loadingT < 1f) { - canvas.restore(); - } - canvas.restore(); - } - } - - @Override - protected boolean drawChild(Canvas canvas, View child, long drawingTime) { - return false; - } - } - - public static class LoadingTextView2 extends ViewGroup { - - public static final int paddingHorizontal = dp(6), - paddingVertical = dp(1.5f); - - public boolean showLoadingText = true; - - private final TextView fromTextView; - private final TextView toTextView; - - private final boolean scaleFromZero; - private final ValueAnimator loadingAnimator; - - private final long start = SystemClock.elapsedRealtime(); - private float scaleT = 1f; - - public LoadingTextView2(Context context, CharSequence fromText, boolean scaleFromZero, int fontSize, int textColor) { - super(context); - - setPadding(paddingHorizontal, paddingVertical, paddingHorizontal, paddingVertical); - setClipChildren(false); - setWillNotDraw(false); - setFocusable(false); - - fromTextView = new TextView(context) { - @Override - protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { - super.onMeasure(widthMeasureSpec, MOST_SPEC); - } - }; - fromTextView.setTextSize(TypedValue.COMPLEX_UNIT_PX, fontSize); - fromTextView.setTextColor(textColor); - fromTextView.setText(fromText); - fromTextView.setLines(0); - fromTextView.setMaxLines(0); - fromTextView.setSingleLine(false); - fromTextView.setEllipsize(null); - fromTextView.setFocusable(false); - fromTextView.setImportantForAccessibility(View.IMPORTANT_FOR_ACCESSIBILITY_NO); - addView(fromTextView); - - toTextView = new TextView(context) { - @Override - protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { - super.onMeasure(widthMeasureSpec, MOST_SPEC); - } - }; - toTextView.setTextSize(TypedValue.COMPLEX_UNIT_PX, fontSize); - toTextView.setTextColor(textColor); - toTextView.setLines(0); - toTextView.setMaxLines(0); - toTextView.setSingleLine(false); - toTextView.setEllipsize(null); - toTextView.setFocusable(false); - toTextView.setImportantForAccessibility(View.IMPORTANT_FOR_ACCESSIBILITY_NO); - addView(toTextView); - - int c1 = Theme.getColor(Theme.key_dialogBackground), - c2 = Theme.getColor(Theme.key_dialogBackgroundGray); - LinearGradient gradient = new LinearGradient(0, 0, gradientWidth, 0, new int[]{c1, c2, c1}, new float[]{0, 0.67f, 1f}, Shader.TileMode.REPEAT); - loadingPaint.setShader(gradient); - - this.scaleFromZero = scaleFromZero; - loadingAnimator = ValueAnimator.ofFloat(0f, 1f); - if (scaleFromZero) - scaleT = 0; - loadingAnimator.addUpdateListener(a -> { - invalidate(); - if (scaleFromZero) { - boolean scaleTWasNoFull = scaleT < 1f; - scaleT = Math.min(1, (SystemClock.elapsedRealtime() - start) / 400f); - if (scaleTWasNoFull) { - updateHeight(); - } - } - }); - loadingAnimator.setDuration(Long.MAX_VALUE); - loadingAnimator.start(); - } - - public int innerHeight() { - return (int) (AndroidUtilities.lerp(fromTextView.getMeasuredHeight(), toTextView.getMeasuredHeight(), loadingT) * scaleT); - } - - public int height() { - return getPaddingTop() + innerHeight() + getPaddingBottom(); - } - - private void updateHeight() { - ViewParent parent = getParent(); - if (parent instanceof TextBlocksLayout) { - ((TextBlocksLayout) parent).updateHeight(); - } - } - - public boolean loaded = false; - private float loadingT = 0f; - private ValueAnimator loadedAnimator = null; - - public void loaded(CharSequence loadedText, Runnable onLoadEnd) { - loaded = true; - toTextView.setText(loadedText); - layout(); - - if (loadingAnimator.isRunning()) { - loadingAnimator.cancel(); - } - if (loadedAnimator == null) { - loadedAnimator = ValueAnimator.ofFloat(0f, 1f); - loadedAnimator.addUpdateListener(a -> { - loadingT = (float) a.getAnimatedValue(); - updateHeight(); - invalidate(); - }); - loadedAnimator.addListener(new AnimatorListenerAdapter() { - @Override - public void onAnimationEnd(Animator animation) { - if (onLoadEnd != null) - onLoadEnd.run(); - } - }); - loadedAnimator.setDuration(350); - loadedAnimator.setInterpolator(CubicBezierInterpolator.EASE_BOTH); - loadedAnimator.start(); - } - } - - @Override - protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { - int width = MeasureSpec.getSize(widthMeasureSpec), - innerWidth = width - getPaddingLeft() - getPaddingRight(); - if (fromTextView.getMeasuredWidth() <= 0 || lastWidth != innerWidth) { - measureChild(fromTextView, innerWidth); - updateLoadingPath(); - } - if (toTextView.getMeasuredWidth() <= 0 || lastWidth != innerWidth) { - measureChild(toTextView, innerWidth); - } - lastWidth = innerWidth; - super.onMeasure( - MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY), - MeasureSpec.makeMeasureSpec(height(), MeasureSpec.EXACTLY) - ); - } - - int lastWidth = 0; - - @Override - protected void onLayout(boolean changed, int l, int t, int r, int b) { - layout(r - l - getPaddingLeft() - getPaddingRight(), true); - } - - private void layout(int width, boolean force) { - if (lastWidth != width || force) { - layout(lastWidth = width); - } - } - - private void layout(int width) { - measureChild(fromTextView, width); - layoutChild(fromTextView, width); - updateLoadingPath(); - measureChild(toTextView, width); - layoutChild(toTextView, width); - updateHeight(); - } - - private void layout() { - layout(lastWidth); - } - - private void measureChild(View view, int width) { - view.measure(MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY), MOST_SPEC); - } - - private void layoutChild(View view, int width) { - view.layout(getPaddingLeft(), getPaddingTop(), getPaddingLeft() + width, getPaddingTop() + view.getMeasuredHeight()); - } - - private RectF fetchedPathRect = new RectF(); - - private void updateLoadingPath() { - if (fromTextView != null && fromTextView.getMeasuredWidth() > 0) { - loadingPath.reset(); - Layout loadingLayout = fromTextView.getLayout(); - if (loadingLayout != null) { - CharSequence text = loadingLayout.getText(); - final int lineCount = loadingLayout.getLineCount(); - for (int i = 0; i < lineCount; ++i) { - float s = loadingLayout.getLineLeft(i), - e = loadingLayout.getLineRight(i), - l = Math.min(s, e), - r = Math.max(s, e); - int start = loadingLayout.getLineStart(i), - end = loadingLayout.getLineEnd(i); - boolean hasNonEmptyChar = false; - for (int j = start; j < end; ++j) { - char c = text.charAt(j); - if (c != '\n' && c != '\t' && c != ' ') { - hasNonEmptyChar = true; - break; - } - } - if (!hasNonEmptyChar) - continue; - fetchedPathRect.set( - l - paddingHorizontal, - loadingLayout.getLineTop(i) - paddingVertical, - r + paddingHorizontal, - loadingLayout.getLineBottom(i) + paddingVertical - ); - loadingPath.addRoundRect(fetchedPathRect, dp(4), dp(4), Path.Direction.CW); - } - } - } - } - - private final RectF rect = new RectF(); - private final Path inPath = new Path(), - tempPath = new Path(), - loadingPath = new Path(), - shadePath = new Path(); - private final Paint loadingPaint = new Paint(); - private final float gradientWidth = dp(350f); - - @Override - protected void onDraw(Canvas canvas) { - float w = getWidth(), h = getHeight(); - - float cx = LocaleController.isRTL ? Math.max(w / 2f, w - 8f) : Math.min(w / 2f, 8f), - cy = Math.min(h / 2f, 8f), - R = (float) Math.sqrt(Math.max( - Math.max(cx * cx + cy * cy, (w - cx) * (w - cx) + cy * cy), - Math.max(cx * cx + (h - cy) * (h - cy), (w - cx) * (w - cx) + (h - cy) * (h - cy)) - )), - r = loadingT * R; - inPath.reset(); - inPath.addCircle(cx, cy, r, Path.Direction.CW); - - canvas.save(); - canvas.clipPath(inPath, Region.Op.DIFFERENCE); - - loadingPaint.setAlpha((int) ((1f - loadingT) * 255)); - float dx = gradientWidth - (((SystemClock.elapsedRealtime() - start) / 1000f * gradientWidth) % gradientWidth); - shadePath.reset(); - shadePath.addRect(0, 0, w, h, Path.Direction.CW); - - canvas.translate(paddingHorizontal, paddingVertical); - canvas.clipPath(loadingPath); - canvas.translate(-paddingHorizontal, -paddingVertical); - canvas.translate(-dx, 0); - shadePath.offset(dx, 0f, tempPath); - canvas.drawPath(tempPath, loadingPaint); - canvas.translate(dx, 0); - canvas.restore(); - - if (showLoadingText && fromTextView != null) { - canvas.save(); - rect.set(0, 0, w, h); - canvas.clipPath(inPath, Region.Op.DIFFERENCE); - canvas.translate(paddingHorizontal, paddingVertical); - canvas.saveLayerAlpha(rect, (int) (255 * .08f), Canvas.ALL_SAVE_FLAG); - fromTextView.draw(canvas); - canvas.restore(); - canvas.restore(); - } - - if (toTextView != null) { - canvas.save(); - canvas.clipPath(inPath); - canvas.translate(paddingHorizontal, paddingVertical); - canvas.saveLayerAlpha(rect, (int) (255 * loadingT), Canvas.ALL_SAVE_FLAG); - toTextView.draw(canvas); - if (loadingT < 1f) { - canvas.restore(); - } - canvas.restore(); - } - } - - @Override - protected boolean drawChild(Canvas canvas, View child, long drawingTime) { - return false; - } - } -} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/TranslateAlert2.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/TranslateAlert2.java new file mode 100644 index 0000000000..8e97508c10 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/TranslateAlert2.java @@ -0,0 +1,1161 @@ +package org.telegram.ui.Components; + +import static org.telegram.messenger.AndroidUtilities.dp; +import static org.telegram.messenger.AndroidUtilities.dpf2; + +import android.content.Context; +import android.graphics.Canvas; +import android.graphics.Paint; +import android.graphics.Path; +import android.graphics.PorterDuff; +import android.graphics.PorterDuffColorFilter; +import android.graphics.Rect; +import android.graphics.drawable.Drawable; +import android.os.Build; +import android.text.Spannable; +import android.text.SpannableStringBuilder; +import android.text.Spanned; +import android.text.TextPaint; +import android.text.TextUtils; +import android.text.style.ClickableSpan; +import android.text.style.URLSpan; +import android.util.TypedValue; +import android.view.Gravity; +import android.view.MotionEvent; +import android.view.View; +import android.view.ViewGroup; +import android.view.WindowManager; +import android.view.animation.LinearInterpolator; +import android.widget.FrameLayout; +import android.widget.ImageView; +import android.widget.LinearLayout; +import android.widget.TextView; + +import androidx.annotation.NonNull; +import androidx.core.content.ContextCompat; +import androidx.core.math.MathUtils; +import androidx.recyclerview.widget.DefaultItemAnimator; +import androidx.recyclerview.widget.LinearLayoutManager; +import androidx.recyclerview.widget.RecyclerView; + +import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.Emoji; +import org.telegram.messenger.LocaleController; +import org.telegram.messenger.MessageObject; +import org.telegram.messenger.MessagesController; +import org.telegram.messenger.NotificationCenter; +import org.telegram.messenger.R; +import org.telegram.messenger.SharedConfig; +import org.telegram.messenger.TranslateController; +import org.telegram.messenger.Utilities; +import org.telegram.messenger.XiaomiUtilities; +import org.telegram.tgnet.ConnectionsManager; +import org.telegram.tgnet.TLRPC; +import org.telegram.ui.ActionBar.ActionBarMenuSubItem; +import org.telegram.ui.ActionBar.ActionBarPopupWindow; +import org.telegram.ui.ActionBar.BaseFragment; +import org.telegram.ui.ActionBar.BottomSheet; +import org.telegram.ui.ActionBar.Theme; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Locale; + +public class TranslateAlert2 extends BottomSheet implements NotificationCenter.NotificationCenterDelegate { + + private Integer reqId; + private CharSequence reqText; + private ArrayList reqMessageEntities; + private TLRPC.InputPeer reqPeer; + private int reqMessageId; + + private String fromLanguage, toLanguage; + private String prevToLanguage; + + private HeaderView headerView; + private LoadingTextView loadingTextView; + private FrameLayout textViewContainer; + private LinkSpanDrawable.LinksTextView textView; + + private boolean sheetTopNotAnimate; + private RecyclerListView listView; + private LinearLayoutManager layoutManager; + private PaddedAdapter adapter; + + private View buttonShadowView; + private FrameLayout buttonView; + private TextView buttonTextView; + + private BaseFragment fragment; + private Utilities.CallbackReturn onLinkPress; + private boolean firstTranslation = true; + + public TranslateAlert2( + Context context, + String fromLanguage, String toLanguage, + CharSequence text, ArrayList entities, + Theme.ResourcesProvider resourcesProvider + ) { + this(context, fromLanguage, toLanguage, text, entities, null, 0, resourcesProvider); + } + + private TranslateAlert2( + Context context, + String fromLanguage, String toLanguage, + CharSequence text, ArrayList entities, TLRPC.InputPeer peer, int messageId, + Theme.ResourcesProvider resourcesProvider + ) { + super(context, false, resourcesProvider); + + backgroundPaddingLeft = 0; + + fixNavigationBar(); + + this.reqText = text; + this.reqPeer = peer; + this.reqMessageId = messageId; + + this.fromLanguage = fromLanguage; + this.toLanguage = toLanguage; + + containerView = new ContainerView(context); + sheetTopAnimated = new AnimatedFloat(containerView, 320, CubicBezierInterpolator.EASE_OUT_QUINT); + + loadingTextView = new LoadingTextView(context); + loadingTextView.setPadding(dp(22), dp(12), dp(22), dp(6)); + loadingTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, SharedConfig.fontSize); + loadingTextView.setTextColor(getThemedColor(Theme.key_dialogTextBlack)); + loadingTextView.setLinkTextColor(Theme.multAlpha(getThemedColor(Theme.key_dialogTextBlack), .2f)); + loadingTextView.setText(Emoji.replaceEmoji(text == null ? "" : text.toString(), loadingTextView.getPaint().getFontMetricsInt(), true)); + + textViewContainer = new FrameLayout(context) { + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(MeasureSpec.makeMeasureSpec(MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.EXACTLY), heightMeasureSpec); + } + }; + textView = new LinkSpanDrawable.LinksTextView(context, resourcesProvider); + textView.setDisablePaddingsOffsetY(true); + textView.setPadding(dp(22), dp(12), dp(22), dp(6)); + textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, SharedConfig.fontSize); + textView.setTextColor(getThemedColor(Theme.key_dialogTextBlack)); + textView.setLinkTextColor(getThemedColor(Theme.key_chat_messageLinkIn)); + textView.setTextIsSelectable(true); + textView.setHighlightColor(getThemedColor(Theme.key_chat_inTextSelectionHighlight)); + int handleColor = getThemedColor(Theme.key_chat_TextSelectionCursor); + try { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q && !XiaomiUtilities.isMIUI()) { + Drawable left = textView.getTextSelectHandleLeft(); + left.setColorFilter(handleColor, PorterDuff.Mode.SRC_IN); + textView.setTextSelectHandleLeft(left); + + Drawable right = textView.getTextSelectHandleRight(); + right.setColorFilter(handleColor, PorterDuff.Mode.SRC_IN); + textView.setTextSelectHandleRight(right); + } + } catch (Exception e) {} + textViewContainer.addView(textView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); + + listView = new RecyclerListView(context) { + @Override + public boolean dispatchTouchEvent(MotionEvent ev) { + if (ev.getAction() == MotionEvent.ACTION_DOWN && ev.getY() < getSheetTop() - getTop()) { + dismiss(); + return true; + } + return super.dispatchTouchEvent(ev); + } + + @Override + protected boolean onRequestFocusInDescendants(int direction, Rect previouslyFocusedRect) { + return true; + } + + @Override + public void requestChildFocus(View child, View focused) {} + }; + listView.setOverScrollMode(View.OVER_SCROLL_IF_CONTENT_SCROLLS); + listView.setPadding(0, AndroidUtilities.statusBarHeight + dp(56), 0, dp(80)); + listView.setClipToPadding(true); + listView.setLayoutManager(layoutManager = new LinearLayoutManager(context)); + listView.setAdapter(adapter = new PaddedAdapter(context, loadingTextView)); + listView.setOnScrollListener(new RecyclerView.OnScrollListener() { + @Override + public void onScrolled(@NonNull RecyclerView recyclerView, int dx, int dy) { + containerView.invalidate(); + updateButtonShadow(listView.canScrollVertically(1)); + } + + @Override + public void onScrollStateChanged(@NonNull RecyclerView recyclerView, int newState) { + if (newState == RecyclerView.SCROLL_STATE_IDLE) { + sheetTopNotAnimate = false; + } + if ((newState == RecyclerView.SCROLL_STATE_IDLE || newState == RecyclerView.SCROLL_STATE_SETTLING) && getSheetTop(false) > 0 && getSheetTop(false) < dp(64 + 32) && listView.canScrollVertically(1) && hasEnoughHeight()) { + sheetTopNotAnimate = true; + listView.smoothScrollBy(0, (int) getSheetTop(false)); + } + } + }); + DefaultItemAnimator itemAnimator = new DefaultItemAnimator() { + @Override + protected void onChangeAnimationUpdate(RecyclerView.ViewHolder holder) { + containerView.invalidate(); + } + + @Override + protected void onMoveAnimationUpdate(RecyclerView.ViewHolder holder) { + containerView.invalidate(); + } + }; + itemAnimator.setDurations(180); + itemAnimator.setInterpolator(new LinearInterpolator()); + listView.setItemAnimator(itemAnimator); + containerView.addView(listView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.BOTTOM)); + + headerView = new HeaderView(context); + containerView.addView(headerView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 78, Gravity.TOP | Gravity.FILL_HORIZONTAL)); + + buttonView = new FrameLayout(context); + buttonView.setBackgroundColor(getThemedColor(Theme.key_dialogBackground)); + + buttonShadowView = new View(context); + buttonShadowView.setBackgroundColor(getThemedColor(Theme.key_dialogShadowLine)); + buttonShadowView.setAlpha(0); + buttonView.addView(buttonShadowView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, AndroidUtilities.getShadowHeight() / dpf2(1), Gravity.TOP | Gravity.FILL_HORIZONTAL)); + + buttonTextView = new TextView(context); + buttonTextView.setLines(1); + buttonTextView.setSingleLine(true); + buttonTextView.setGravity(Gravity.CENTER_HORIZONTAL); + buttonTextView.setEllipsize(TextUtils.TruncateAt.END); + buttonTextView.setGravity(Gravity.CENTER); + buttonTextView.setTextColor(Theme.getColor(Theme.key_featuredStickers_buttonText)); + buttonTextView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + buttonTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + buttonTextView.setText(LocaleController.getString("CloseTranslation", R.string.CloseTranslation)); + buttonTextView.setBackground(Theme.AdaptiveRipple.filledRect(Theme.getColor(Theme.key_featuredStickers_addButton), 6)); + buttonTextView.setOnClickListener(e -> dismiss()); + buttonView.addView(buttonTextView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 48, Gravity.BOTTOM | Gravity.FILL_HORIZONTAL, 16, 16, 16, 16)); + + containerView.addView(buttonView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.BOTTOM | Gravity.FILL_HORIZONTAL)); + + translate(); + } + + private boolean hasEnoughHeight() { + float height = 0; + for (int i = 0; i < listView.getChildCount(); ++i) { + View child = listView.getChildAt(i); + if (listView.getChildAdapterPosition(child) == 1) + height += child.getHeight(); + } + return height >= listView.getHeight() - listView.getPaddingTop() - listView.getPaddingBottom(); + } + + public void translate() { + if (reqId != null) { + ConnectionsManager.getInstance(currentAccount).cancelRequest(reqId, true); + reqId = null; + } + TLRPC.TL_messages_translateText req = new TLRPC.TL_messages_translateText(); + TLRPC.TL_textWithEntities textWithEntities = new TLRPC.TL_textWithEntities(); + textWithEntities.text = reqText == null ? "" : reqText.toString(); + if (reqMessageEntities != null) { + textWithEntities.entities = reqMessageEntities; + } + if (reqPeer != null) { + req.flags |= 1; + req.peer = reqPeer; + req.id.add(reqMessageId); + } else { + req.flags |= 2; + req.text.add(textWithEntities); + } +// if (fromLanguage != null && !"und".equals(fromLanguage)) { +// req.flags |= 4; +// req.from_lang = fromLanguage; +// } + String lang = toLanguage; + if (lang != null) { + lang = lang.split("_")[0]; + } + if ("nb".equals(lang)) { + lang = "no"; + } + req.to_lang = lang; + reqId = ConnectionsManager.getInstance(currentAccount).sendRequest(req, (res, err) -> { + AndroidUtilities.runOnUIThread(() -> { + reqId = null; + if (res instanceof TLRPC.TL_messages_translateResult && + !((TLRPC.TL_messages_translateResult) res).result.isEmpty() && + ((TLRPC.TL_messages_translateResult) res).result.get(0) != null && + ((TLRPC.TL_messages_translateResult) res).result.get(0).text != null + ) { + firstTranslation = false; + TLRPC.TL_textWithEntities text = preprocess(textWithEntities, ((TLRPC.TL_messages_translateResult) res).result.get(0)); + CharSequence translated = SpannableStringBuilder.valueOf(text.text); + MessageObject.addEntitiesToText(translated, text.entities, false, true, false, false); + translated = preprocessText(translated); + textView.setText(translated); + adapter.updateMainView(textViewContainer); + } else if (firstTranslation) { + dismiss(); + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.showBulletin, Bulletin.TYPE_ERROR, LocaleController.getString("TranslationFailedAlert2", R.string.TranslationFailedAlert2)); + } else { + BulletinFactory.of((FrameLayout) containerView, resourcesProvider).createErrorBulletin(LocaleController.getString("TranslationFailedAlert2", R.string.TranslationFailedAlert2)).show(); + headerView.toLanguageTextView.setText(languageName(toLanguage = prevToLanguage)); + adapter.updateMainView(textViewContainer); + } + }); + }); + } + + public static TLRPC.TL_textWithEntities preprocess(TLRPC.TL_textWithEntities source, TLRPC.TL_textWithEntities received) { + if (received == null || received.text == null) { + return null; + } + for (int i = 0; i < received.entities.size(); ++i) { + TLRPC.MessageEntity entity = received.entities.get(i); + if (entity instanceof TLRPC.TL_messageEntityTextUrl) { + if (entity.url == null) { + continue; + } + String text = received.text.substring(entity.offset, entity.offset + entity.length); + if (TextUtils.equals(text, entity.url)) { + TLRPC.TL_messageEntityUrl newEntity = new TLRPC.TL_messageEntityUrl(); + newEntity.offset = entity.offset; + newEntity.length = entity.length; + received.entities.set(i, newEntity); + } else if ( + entity.url.startsWith("https://t.me/") && + text.startsWith("@") && + TextUtils.equals(text.substring(1), entity.url.substring(13)) + ) { + TLRPC.TL_messageEntityMention newEntity = new TLRPC.TL_messageEntityMention(); + newEntity.offset = entity.offset; + newEntity.length = entity.length; + received.entities.set(i, newEntity); + } + } + } + if (source != null && source.text != null && !source.entities.isEmpty()) { + + HashMap> srcIndexes = groupEmojiRanges(source.text); + HashMap> destIndexes = groupEmojiRanges(received.text); + + for (int i = 0; i < source.entities.size(); ++i) { + TLRPC.MessageEntity entity = source.entities.get(i); + if (entity instanceof TLRPC.TL_messageEntityCustomEmoji) { + String code = source.text.substring(entity.offset, entity.offset + entity.length); + if (TextUtils.isEmpty(code)) { + continue; + } + ArrayList srcRanges = srcIndexes.get(code); + ArrayList destRanges = destIndexes.get(code); + if (srcRanges == null || destRanges == null) { + continue; + } + int srcIndex = -1; + for (int j = 0; j < srcRanges.size(); ++j) { + Emoji.EmojiSpanRange range = srcRanges.get(j); + if (range.start == entity.offset && range.end == entity.offset + entity.length) { + srcIndex = j; + break; + } + } + if (srcIndex < 0 || srcIndex >= destRanges.size()) { + continue; + } + Emoji.EmojiSpanRange destRange = destRanges.get(srcIndex); + if (destRange == null) { + continue; + } + + boolean alreadyContainsOne = false; + for (int j = 0; j < received.entities.size(); ++j) { + TLRPC.MessageEntity destEntity = received.entities.get(j); + if ( + destEntity instanceof TLRPC.TL_messageEntityCustomEmoji && + AndroidUtilities.intersect1d(destRange.start, destRange.end, destEntity.offset, destEntity.offset + destEntity.length) + ) { + alreadyContainsOne = true; + break; + } + } + if (alreadyContainsOne) { + continue; + } + + TLRPC.TL_messageEntityCustomEmoji newEntity = new TLRPC.TL_messageEntityCustomEmoji(); + newEntity.document_id = ((TLRPC.TL_messageEntityCustomEmoji) entity).document_id; + newEntity.document = ((TLRPC.TL_messageEntityCustomEmoji) entity).document; + newEntity.offset = destRange.start; + newEntity.length = destRange.end - destRange.start; + received.entities.add(newEntity); + } + } + } + return received; + } + + private static HashMap> groupEmojiRanges(CharSequence text) { + HashMap> result = new HashMap<>(); + if (text == null) { + return result; + } + ArrayList ranges = Emoji.parseEmojis(text); + if (ranges == null) { + return result; + } + String string = text.toString(); + for (int i = 0; i < ranges.size(); ++i) { + Emoji.EmojiSpanRange range = ranges.get(i); + if (range == null || range.code == null) { + continue; + } + String code = string.substring(range.start, range.end); + ArrayList codeRanges = result.get(code); + if (codeRanges == null) { + result.put(code, codeRanges = new ArrayList<>()); + } + codeRanges.add(range); + } + return result; + } + + public static ArrayList preprocess(ArrayList received) { + if (received == null) { + return null; + } + for (int i = 0; i < received.size(); ++i) { + received.set(i, preprocess(null, received.get(i))); + } + return received; + } + + private CharSequence preprocessText(CharSequence text) { + Spannable spannable = new SpannableStringBuilder(text); + URLSpan[] urlSpans; + if (onLinkPress != null || fragment != null) { + urlSpans = spannable.getSpans(0, spannable.length(), URLSpan.class); + for (int i = 0; i < urlSpans.length; ++i) { + URLSpan urlSpan = urlSpans[i]; + int start = spannable.getSpanStart(urlSpan), + end = spannable.getSpanEnd(urlSpan); + if (start == -1 || end == -1) { + continue; + } + spannable.removeSpan(urlSpan); + spannable.setSpan( + new ClickableSpan() { + @Override + public void onClick(@NonNull View view) { + if (onLinkPress != null) { + if (onLinkPress.run(urlSpan)) { + dismiss(); + } + } else if (fragment != null) { + AlertsCreator.showOpenUrlAlert(fragment, urlSpan.getURL(), false, false); + } + } + + @Override + public void updateDrawState(@NonNull TextPaint ds) { + int alpha = Math.min(ds.getAlpha(), ds.getColor() >> 24 & 0xff); + if (!(urlSpan instanceof URLSpanNoUnderline)) { + ds.setUnderlineText(true); + } + ds.setColor(Theme.getColor(Theme.key_dialogTextLink)); + ds.setAlpha(alpha); + } + }, + start, end, + Spanned.SPAN_EXCLUSIVE_EXCLUSIVE + ); + } + } + return Emoji.replaceEmoji(spannable, textView.getPaint().getFontMetricsInt(), true); + } + + @Override + public void dismissInternal() { + if (reqId != null) { + ConnectionsManager.getInstance(currentAccount).cancelRequest(reqId, true); + reqId = null; + } + super.dismissInternal(); + } + + public void setFragment(BaseFragment fragment) { + this.fragment = fragment; + } + + public void setOnLinkPress(Utilities.CallbackReturn onLinkPress) { + this.onLinkPress = onLinkPress; + } + + public void setNoforwards(boolean noforwards) { + if (textView != null) { + textView.setTextIsSelectable(!noforwards); + } + if (noforwards) { + getWindow().addFlags(WindowManager.LayoutParams.FLAG_SECURE); + } else { + getWindow().clearFlags(WindowManager.LayoutParams.FLAG_SECURE); + } + } + + @Override + protected boolean canDismissWithSwipe() { + return false; + } + + private class LoadingTextView extends TextView { + + private final LinkPath path = new LinkPath(true); + private final LoadingDrawable loadingDrawable = new LoadingDrawable(); + + public LoadingTextView(Context context) { + super(context); + loadingDrawable.usePath(path); + loadingDrawable.setSpeed(.65f); + loadingDrawable.setRadiiDp(4); + setBackground(loadingDrawable); + } + + @Override + public void setTextColor(int color) { + super.setTextColor(Theme.multAlpha(color, .2f)); + loadingDrawable.setColors( + Theme.multAlpha(color, 0.03f), + Theme.multAlpha(color, 0.175f), + Theme.multAlpha(color, 0.2f), + Theme.multAlpha(color, 0.45f) + ); + } + + private void updateDrawable() { + if (path == null || loadingDrawable == null) { + return; + } + + path.rewind(); + if (getLayout() != null && getLayout().getText() != null) { + path.setCurrentLayout(getLayout(), 0, getPaddingLeft(), getPaddingTop()); + getLayout().getSelectionPath(0, getLayout().getText().length(), path); + } + loadingDrawable.updateBounds(); + } + + @Override + public void setText(CharSequence text, BufferType type) { + super.setText(text, type); + updateDrawable(); + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + updateDrawable(); + } + + @Override + protected void onDetachedFromWindow() { + super.onDetachedFromWindow(); + loadingDrawable.reset(); + } + } + + private static class PaddedAdapter extends RecyclerListView.Adapter { + + private Context mContext; + private View mMainView; + + public PaddedAdapter(Context context, View mainView) { + mContext = context; + mMainView = mainView; + } + + private int mainViewType = 1; + + public void updateMainView(View newMainView) { + if (mMainView == newMainView) { + return; + } + mainViewType++; + mMainView = newMainView; + notifyItemChanged(1); + } + + @NonNull + @Override + public RecyclerView.ViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) { + if (viewType == 0) { + return new RecyclerListView.Holder(new View(mContext) { + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure( + MeasureSpec.makeMeasureSpec(MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.EXACTLY), + MeasureSpec.makeMeasureSpec((int) (AndroidUtilities.displaySize.y * .4f), MeasureSpec.EXACTLY) + ); + } + }); + } else { + return new RecyclerListView.Holder(mMainView); + } + } + + @Override + public void onBindViewHolder(@NonNull RecyclerView.ViewHolder holder, int position) {} + + @Override + public int getItemViewType(int position) { + if (position == 0) { + return 0; + } else { + return mainViewType; + } + } + + @Override + public int getItemCount() { + return 2; + } + } + + private AnimatedFloat sheetTopAnimated; + private float getSheetTop() { + return getSheetTop(true); + } + private float getSheetTop(boolean animated) { + float top = listView.getTop(); + if (listView.getChildCount() >= 1) { + top += Math.max(0, listView.getChildAt(listView.getChildCount() - 1).getTop()); + } + top = Math.max(0, top - dp(78)); + if (animated && sheetTopAnimated != null) { + if (!listView.scrollingByUser && !sheetTopNotAnimate) { + top = sheetTopAnimated.set(top); + } else { + sheetTopAnimated.set(top, true); + } + } + return top; + } + + private class HeaderView extends FrameLayout { + + private ImageView backButton; + private TextView titleTextView; + private LinearLayout subtitleView; + private TextView fromLanguageTextView; + private ImageView arrowView; + private AnimatedTextView toLanguageTextView; + + private View backgroundView; + + private View shadow; + + public HeaderView(Context context) { + super(context); + + backgroundView = new View(context); + backgroundView.setBackgroundColor(getThemedColor(Theme.key_dialogBackground)); + addView(backgroundView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 44, Gravity.TOP | Gravity.FILL_HORIZONTAL, 0, 12, 0, 0)); + + backButton = new ImageView(context); + backButton.setScaleType(ImageView.ScaleType.CENTER); + backButton.setImageResource(R.drawable.ic_ab_back); + backButton.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_dialogTextBlack), PorterDuff.Mode.MULTIPLY)); + backButton.setBackground(Theme.createSelectorDrawable(getThemedColor(Theme.key_listSelector))); + backButton.setAlpha(0f); + backButton.setOnClickListener(e -> dismiss()); + addView(backButton, LayoutHelper.createFrame(54, 54, Gravity.TOP, 1, 1, 1, 1)); + + titleTextView = new TextView(context) { + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + if (LocaleController.isRTL) { + titleTextView.setPivotX(getMeasuredWidth()); + } + } + }; + titleTextView.setTextColor(getThemedColor(Theme.key_dialogTextBlack)); + titleTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 20); + titleTextView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + titleTextView.setText(LocaleController.getString("AutomaticTranslation", R.string.AutomaticTranslation)); + titleTextView.setPivotX(0); + titleTextView.setPivotY(0); + addView(titleTextView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.TOP | Gravity.FILL_HORIZONTAL, 22, 20, 22, 0)); + + subtitleView = new LinearLayout(context) { + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + if (LocaleController.isRTL) { + subtitleView.setPivotX(getMeasuredWidth()); + } + } + }; + if (LocaleController.isRTL) { + subtitleView.setGravity(Gravity.RIGHT); + } + subtitleView.setPivotX(0); + subtitleView.setPivotY(0); + if (!TextUtils.isEmpty(fromLanguage) && !"und".equals(fromLanguage)) { + fromLanguageTextView = new TextView(context); + fromLanguageTextView.setLines(1); + fromLanguageTextView.setTextColor(getThemedColor(Theme.key_player_actionBarSubtitle)); + fromLanguageTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + fromLanguageTextView.setText(capitalFirst(languageName(fromLanguage))); + fromLanguageTextView.setPadding(0, dp(2), 0, dp(2)); + } + + arrowView = new ImageView(context); + arrowView.setImageResource(R.drawable.search_arrow); + arrowView.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_player_actionBarSubtitle), PorterDuff.Mode.MULTIPLY)); + if (LocaleController.isRTL) { + arrowView.setScaleX(-1f); + } + + toLanguageTextView = new AnimatedTextView(context) { + private Paint bgPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + private LinkSpanDrawable.LinkCollector links = new LinkSpanDrawable.LinkCollector(); + + @Override + protected void onDraw(Canvas canvas) { + if (LocaleController.isRTL) { + AndroidUtilities.rectTmp.set(getWidth() - width(), (getHeight() - dp(18)) / 2f, getWidth(), (getHeight() + dp(18)) / 2f); + } else { + AndroidUtilities.rectTmp.set(0, (getHeight() - dp(18)) / 2f, width(), (getHeight() + dp(18)) / 2f); + } + bgPaint.setColor(Theme.multAlpha(getThemedColor(Theme.key_player_actionBarSubtitle), .1175f)); + canvas.drawRoundRect(AndroidUtilities.rectTmp, dp(4), dp(4), bgPaint); + if (links.draw(canvas)) { + invalidate(); + } + + super.onDraw(canvas); + } + + @Override + public boolean onTouchEvent(MotionEvent event) { + if (event.getAction() == MotionEvent.ACTION_DOWN) { + LinkSpanDrawable link = new LinkSpanDrawable(null, resourcesProvider, event.getX(), event.getY()); + link.setColor(Theme.multAlpha(getThemedColor(Theme.key_player_actionBarSubtitle), .1175f)); + LinkPath path = link.obtainNewPath(); + if (LocaleController.isRTL) { + AndroidUtilities.rectTmp.set(getWidth() - width(), (getHeight() - dp(18)) / 2f, getWidth(), (getHeight() + dp(18)) / 2f); + } else { + AndroidUtilities.rectTmp.set(0, (getHeight() - dp(18)) / 2f, width(), (getHeight() + dp(18)) / 2f); + } + path.addRect(AndroidUtilities.rectTmp, Path.Direction.CW); + links.addLink(link); + invalidate(); + return true; + } else if (event.getAction() == MotionEvent.ACTION_UP || event.getAction() == MotionEvent.ACTION_CANCEL) { + if (event.getAction() == MotionEvent.ACTION_UP) { + performClick(); + } + links.clear(); + invalidate(); + } + return super.onTouchEvent(event); + } + }; + if (LocaleController.isRTL) { + toLanguageTextView.setGravity(Gravity.RIGHT); + } + toLanguageTextView.setAnimationProperties(.25f, 0, 350, CubicBezierInterpolator.EASE_OUT_QUINT); + toLanguageTextView.setTextColor(getThemedColor(Theme.key_player_actionBarSubtitle)); + toLanguageTextView.setTextSize(dp(14)); + toLanguageTextView.setText(capitalFirst(languageName(toLanguage))); + toLanguageTextView.setPadding(dp(4), dp(2), dp(4), dp(2)); + toLanguageTextView.setOnClickListener(e -> openLanguagesSelect()); + + if (LocaleController.isRTL) { + subtitleView.addView(toLanguageTextView, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL, 0, 0, fromLanguageTextView != null ? 3 : 0, 0)); + if (fromLanguageTextView != null) { + subtitleView.addView(arrowView, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL, 0, 1, 0, 0)); + subtitleView.addView(fromLanguageTextView, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL, 4, 0, 0, 0)); + } + } else { + if (fromLanguageTextView != null) { + subtitleView.addView(fromLanguageTextView, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL, 0, 0, 4, 0)); + subtitleView.addView(arrowView, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL, 0, 1, 0, 0)); + } + subtitleView.addView(toLanguageTextView, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL, fromLanguageTextView != null ? 3 : 0, 0, 0, 0)); + } + + addView(subtitleView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.TOP | Gravity.FILL_HORIZONTAL, 22, 43, 22, 0)); + + shadow = new View(context); + shadow.setBackgroundColor(getThemedColor(Theme.key_dialogShadowLine)); + shadow.setAlpha(0); + addView(shadow, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, AndroidUtilities.getShadowHeight() / dpf2(1), Gravity.TOP | Gravity.FILL_HORIZONTAL, 0, 56, 0, 0)); + } + + public void openLanguagesSelect() { + ActionBarPopupWindow.ActionBarPopupWindowLayout layout = new ActionBarPopupWindow.ActionBarPopupWindowLayout(getContext()) { + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(widthMeasureSpec, + MeasureSpec.makeMeasureSpec(Math.min((int) (AndroidUtilities.displaySize.y * .33f), MeasureSpec.getSize(heightMeasureSpec)), MeasureSpec.EXACTLY) + ); + } + }; + + Drawable shadowDrawable2 = ContextCompat.getDrawable(getContext(), R.drawable.popup_fixed_alert).mutate(); + shadowDrawable2.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_actionBarDefaultSubmenuBackground), PorterDuff.Mode.MULTIPLY)); + layout.setBackground(shadowDrawable2); + + final Runnable[] dismiss = new Runnable[1]; + + ArrayList locales = TranslateController.getLocales(); + boolean first = true; + for (int i = 0; i < locales.size(); ++i) { + LocaleController.LocaleInfo localeInfo = locales.get(i); + + if ( + localeInfo.pluralLangCode.equals(fromLanguage) || + !"remote".equals(localeInfo.pathToFile) + ) { + continue; + } + + ActionBarMenuSubItem button = new ActionBarMenuSubItem(getContext(), 2, first, i == locales.size() - 1, resourcesProvider); + button.setText(capitalFirst(languageName(localeInfo.pluralLangCode))); + button.setChecked(TextUtils.equals(toLanguage, localeInfo.pluralLangCode)); + button.setOnClickListener(e -> { + if (dismiss[0] != null) { + dismiss[0].run(); + } + + if (TextUtils.equals(toLanguage, localeInfo.pluralLangCode)) { + return; + } + + if (adapter.mMainView == textViewContainer) { + prevToLanguage = toLanguage; + } + toLanguageTextView.setText(capitalFirst(languageName(toLanguage = localeInfo.pluralLangCode))); + adapter.updateMainView(loadingTextView); + setToLanguage(toLanguage); + translate(); + }); + layout.addView(button); + + first = false; + } + + ActionBarPopupWindow window = new ActionBarPopupWindow(layout, LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT); + dismiss[0] = () -> window.dismiss(); + window.setPauseNotifications(true); + window.setDismissAnimationDuration(220); + window.setOutsideTouchable(true); + window.setClippingEnabled(true); + window.setAnimationStyle(R.style.PopupContextAnimation); + window.setFocusable(true); + int[] location = new int[2]; + toLanguageTextView.getLocationInWindow(location); + layout.measure(MeasureSpec.makeMeasureSpec(AndroidUtilities.displaySize.x, MeasureSpec.AT_MOST), MeasureSpec.makeMeasureSpec(AndroidUtilities.displaySize.y, MeasureSpec.AT_MOST)); + int height = layout.getMeasuredHeight(); + int y = location[1] > AndroidUtilities.displaySize.y * .9f - height ? location[1] - height + dp(8) : location[1] + toLanguageTextView.getMeasuredHeight() - dp(8); + window.showAtLocation(containerView, Gravity.TOP | Gravity.LEFT, location[0] - dp(8), y); + } + + @Override + public void setTranslationY(float translationY) { + super.setTranslationY(translationY); + + float t = MathUtils.clamp((translationY - AndroidUtilities.statusBarHeight) / dp(64), 0, 1); + if (!hasEnoughHeight()) { + t = 1; + } + t = CubicBezierInterpolator.EASE_OUT.getInterpolation(t); + + titleTextView.setScaleX(AndroidUtilities.lerp(.85f, 1f, t)); + titleTextView.setScaleY(AndroidUtilities.lerp(.85f, 1f, t)); + titleTextView.setTranslationY(AndroidUtilities.lerp(dpf2(-12), 0, t)); + if (!LocaleController.isRTL) { + titleTextView.setTranslationX(AndroidUtilities.lerp(dpf2(50), 0, t)); + subtitleView.setTranslationX(AndroidUtilities.lerp(dpf2(50), 0, t)); + } + + subtitleView.setTranslationY(AndroidUtilities.lerp(dpf2(-22), 0, t)); + + backButton.setTranslationX(AndroidUtilities.lerp(0, dpf2(-25), t)); + backButton.setAlpha(1f - t); + + shadow.setTranslationY(AndroidUtilities.lerp(0, dpf2(22), t)); + shadow.setAlpha(1f - t); + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure( + MeasureSpec.makeMeasureSpec(MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.EXACTLY), + MeasureSpec.makeMeasureSpec(dp(78), MeasureSpec.EXACTLY) + ); + } + } + + private class ContainerView extends FrameLayout { + public ContainerView(Context context) { + super(context); + + bgPaint.setColor(getThemedColor(Theme.key_dialogBackground)); + Theme.applyDefaultShadow(bgPaint); + } + + private Path bgPath = new Path(); + private Paint bgPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + + @Override + protected void dispatchDraw(Canvas canvas) { + + float top = getSheetTop(); + final float R = AndroidUtilities.lerp(0, dp(12), MathUtils.clamp(top / dpf2(24), 0, 1)); + headerView.setTranslationY(Math.max(AndroidUtilities.statusBarHeight, top)); + updateLightStatusBar(top <= AndroidUtilities.statusBarHeight / 2f); + + bgPath.rewind(); + AndroidUtilities.rectTmp.set(0, top, getWidth(), getHeight() + R); + bgPath.addRoundRect(AndroidUtilities.rectTmp, R, R, Path.Direction.CW); + canvas.drawPath(bgPath, bgPaint); + + super.dispatchDraw(canvas); + } + + private Boolean lightStatusBarFull; + private void updateLightStatusBar(boolean full) { + if (lightStatusBarFull == null || lightStatusBarFull != full) { + lightStatusBarFull = full; + AndroidUtilities.setLightStatusBar(getWindow(), AndroidUtilities.computePerceivedBrightness( + full ? + getThemedColor(Theme.key_dialogBackground) : + Theme.blendOver( + getThemedColor(Theme.key_actionBarDefault), + 0x33000000 + ) + ) > .721f); + } + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(MeasureSpec.getSize(heightMeasureSpec), MeasureSpec.EXACTLY)); + } + + @Override + protected void onAttachedToWindow() { + super.onAttachedToWindow(); + Bulletin.addDelegate(this, new Bulletin.Delegate() { + @Override + public int getBottomOffset(int tag) { + return AndroidUtilities.dp(16 + 48 + 16); + } + }); + } + + @Override + protected void onDetachedFromWindow() { + super.onDetachedFromWindow(); + Bulletin.removeDelegate(this); + } + } + + public static String capitalFirst(String text) { + if (text == null || text.length() <= 0) { + return null; + } + return text.substring(0, 1).toUpperCase() + text.substring(1); + } + + public static CharSequence capitalFirst(CharSequence text) { + if (text == null || text.length() <= 0) { + return null; + } + SpannableStringBuilder builder = text instanceof SpannableStringBuilder ? (SpannableStringBuilder) text : SpannableStringBuilder.valueOf(text); + String string = builder.toString(); + builder.replace(0, 1, string.substring(0, 1).toUpperCase()); + return builder; + } + + public static String languageName(String locale) { + return languageName(locale, null); + } + + public static String languageName(String locale, boolean[] accusative) { + if (locale == null || locale.equals(TranslateController.UNKNOWN_LANGUAGE) || locale.equals("auto")) { + return null; + } + + String simplifiedLocale = locale.split("_")[0]; + if ("nb".equals(simplifiedLocale)) { + simplifiedLocale = "no"; + } + + // getting localized language name in accusative case + if (accusative != null) { + String localed = LocaleController.getString("TranslateLanguage" + simplifiedLocale.toUpperCase()); + if (accusative[0] = (localed != null && !localed.startsWith("LOC_ERR"))) { + return localed; + } + } + + // getting language name from system + String systemLangName = systemLanguageName(locale); + if (systemLangName == null) { + systemLangName = systemLanguageName(simplifiedLocale); + } + if (systemLangName != null) { + return systemLangName; + } + + // getting language name from lang packs + if ("no".equals(locale)) { + locale = "nb"; + } + final LocaleController.LocaleInfo currentLanguageInfo = LocaleController.getInstance().getCurrentLocaleInfo(); + final LocaleController.LocaleInfo thisLanguageInfo = LocaleController.getInstance().getBuiltinLanguageByPlural(locale); + if (thisLanguageInfo == null) { + return null; + } + boolean isCurrentLanguageEnglish = currentLanguageInfo != null && "en".equals(currentLanguageInfo.pluralLangCode); + if (isCurrentLanguageEnglish) { + return thisLanguageInfo.nameEnglish; + } else { + return thisLanguageInfo.name; + } + } + + public static String systemLanguageName(String langCode) { + return systemLanguageName(langCode, false); + } + + private static HashMap localesByCode; + public static String systemLanguageName(String langCode, boolean inItsOwnLocale) { + if (langCode == null) { + return null; + } + if (localesByCode == null) { + localesByCode = new HashMap<>(); + try { + Locale[] allLocales = Locale.getAvailableLocales(); + for (int i = 0; i < allLocales.length; ++i) { + localesByCode.put(allLocales[i].getLanguage(), allLocales[i]); + String region = allLocales[i].getCountry(); + if (region != null && region.length() > 0) { + localesByCode.put(allLocales[i].getLanguage() + "-" + region.toLowerCase(), allLocales[i]); + } + } + } catch (Exception ignore) {} + } + langCode = langCode.replace("_", "-").toLowerCase(); + try { + Locale locale = localesByCode.get(langCode); + if (locale != null) { + String name = locale.getDisplayLanguage(inItsOwnLocale ? locale : Locale.getDefault()); + if (langCode.contains("-")) { + String region = locale.getDisplayCountry(inItsOwnLocale ? locale : Locale.getDefault()); + if (!TextUtils.isEmpty(region)) { + name += " (" + region + ")"; + } + } + return name; + } + } catch (Exception ignore) {} + return null; + } + + + @Override + public void show() { + super.show(); + NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.emojiLoaded); + } + + @Override + public void dismiss() { + super.dismiss(); + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiLoaded); + } + + @Override + public void didReceivedNotification(int id, int account, Object... args) { + if (id == NotificationCenter.emojiLoaded) { + loadingTextView.invalidate(); + textView.invalidate(); + } + } + + private Boolean buttonShadowShown; + private void updateButtonShadow(boolean show) { + if (buttonShadowShown == null || buttonShadowShown != show) { + buttonShadowShown = show; + buttonShadowView.animate().cancel(); + buttonShadowView.animate().alpha(show ? 1f : 0f).setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT).setDuration(320).start(); + } + } + + public static TranslateAlert2 showAlert(Context context, BaseFragment fragment, int currentAccount, TLRPC.InputPeer peer, int msgId, String fromLanguage, String toLanguage, CharSequence text, ArrayList entities, boolean noforwards, Utilities.CallbackReturn onLinkPress, Runnable onDismiss) { + TranslateAlert2 alert = new TranslateAlert2(context, fromLanguage, toLanguage, text, entities, peer, msgId, null) { + @Override + public void dismiss() { + super.dismiss(); + if (onDismiss != null) { + onDismiss.run(); + } + } + }; + alert.setNoforwards(noforwards); + alert.setFragment(fragment); + alert.setOnLinkPress(onLinkPress); + if (fragment != null) { + if (fragment.getParentActivity() != null) { + fragment.showDialog(alert); + } + } else { + alert.show(); + } + return alert; + } + + public static TranslateAlert2 showAlert(Context context, BaseFragment fragment, int currentAccount, String fromLanguage, String toLanguage, CharSequence text, ArrayList entities, boolean noforwards, Utilities.CallbackReturn onLinkPress, Runnable onDismiss) { + TranslateAlert2 alert = new TranslateAlert2(context, fromLanguage, toLanguage, text, entities, null) { + @Override + public void dismiss() { + super.dismiss(); + if (onDismiss != null) { + onDismiss.run(); + } + } + }; + alert.setNoforwards(noforwards); + alert.setFragment(fragment); + alert.setOnLinkPress(onLinkPress); + if (fragment != null) { + if (fragment.getParentActivity() != null) { + fragment.showDialog(alert); + } + } else { + alert.show(); + } + return alert; + } + + public static String getToLanguage() { + return MessagesController.getGlobalMainSettings().getString("translate_to_language", LocaleController.getInstance().getCurrentLocale().getLanguage()); + } + + public static void setToLanguage(String toLang) { + MessagesController.getGlobalMainSettings().edit().putString("translate_to_language", toLang).apply(); + } + + public static void resetToLanguage() { + MessagesController.getGlobalMainSettings().edit().remove("translate_to_language").apply(); + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/TranslateButton.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/TranslateButton.java new file mode 100644 index 0000000000..80e4847ccd --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/TranslateButton.java @@ -0,0 +1,318 @@ +package org.telegram.ui.Components; + +import android.content.Context; +import android.graphics.Canvas; +import android.graphics.PorterDuff; +import android.graphics.PorterDuffColorFilter; +import android.graphics.drawable.Drawable; +import android.text.SpannableString; +import android.text.Spanned; +import android.text.TextUtils; +import android.text.style.DynamicDrawableSpan; +import android.text.style.ImageSpan; +import android.view.Gravity; +import android.view.View; +import android.view.WindowManager; +import android.widget.FrameLayout; +import android.widget.ImageView; +import android.widget.LinearLayout; +import android.widget.ScrollView; + +import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.ChatObject; +import org.telegram.messenger.LocaleController; +import org.telegram.messenger.MessagesController; +import org.telegram.messenger.R; +import org.telegram.messenger.TranslateController; +import org.telegram.messenger.UserConfig; +import org.telegram.tgnet.TLRPC; +import org.telegram.ui.ActionBar.ActionBarMenuSubItem; +import org.telegram.ui.ActionBar.ActionBarPopupWindow; +import org.telegram.ui.ActionBar.BaseFragment; +import org.telegram.ui.ActionBar.Theme; +import org.telegram.ui.ChatActivity; +import org.telegram.ui.RestrictedLanguagesSelectActivity; + +import java.util.ArrayList; + +public class TranslateButton extends FrameLayout { + + private final int currentAccount; + private final long dialogId; + private final BaseFragment fragment; + + private Theme.ResourcesProvider resourcesProvider; + + private AnimatedTextView textView; + public final SpannableString translateIcon; + + private ImageView menuView; + + private boolean[] accusative = new boolean[1]; + + public TranslateButton(Context context, ChatActivity chatActivity, Theme.ResourcesProvider resourcesProvider) { + this(context, chatActivity.getCurrentAccount(), chatActivity.getDialogId(), chatActivity, resourcesProvider); + } + + public TranslateButton(Context context, int currentAccount, long dialogId, BaseFragment fragment, Theme.ResourcesProvider resourcesProvider) { + super(context); + + this.currentAccount = currentAccount; + this.dialogId = dialogId; + this.fragment = fragment; + this.resourcesProvider = resourcesProvider; + + textView = new AnimatedTextView(context, true, true, false); + textView.setAnimationProperties(.3f, 0, 450, CubicBezierInterpolator.EASE_OUT_QUINT); + textView.setTextColor(Theme.getColor(Theme.key_chat_addContact, resourcesProvider)); + textView.setTextSize(AndroidUtilities.dp(15)); + textView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + textView.setPadding(AndroidUtilities.dp(4), 0, AndroidUtilities.dp(4), 0); + textView.setGravity(Gravity.CENTER_HORIZONTAL); + textView.setIgnoreRTL(!LocaleController.isRTL); + textView.adaptWidth = false; + textView.setBackground(Theme.createSelectorDrawable(Theme.getColor(Theme.key_chat_addContact, resourcesProvider) & 0x19ffffff, 3)); + textView.setOnClickListener(e -> onButtonClick()); + addView(textView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); + + final Drawable translateDrawable = getContext().getResources().getDrawable(R.drawable.msg_translate).mutate(); + translateDrawable.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_chat_addContact, resourcesProvider), PorterDuff.Mode.MULTIPLY)); + translateDrawable.setBounds(0, AndroidUtilities.dp(-8), AndroidUtilities.dp(20), AndroidUtilities.dp(20 - 8)); + translateIcon = new SpannableString("x"); + translateIcon.setSpan(new ImageSpan(translateDrawable, DynamicDrawableSpan.ALIGN_BOTTOM), 0, 1, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + + menuView = new ImageView(context); + menuView.setScaleType(ImageView.ScaleType.CENTER); + menuView.setImageResource(R.drawable.msg_mini_customize); + menuView.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_chat_addContact, resourcesProvider), PorterDuff.Mode.MULTIPLY)); + menuView.setBackground(Theme.createSelectorDrawable(Theme.getColor(Theme.key_chat_addContact, resourcesProvider) & 0x19ffffff, Theme.RIPPLE_MASK_ROUNDRECT_6DP)); + menuView.setOnClickListener(e -> { + if (UserConfig.getInstance(currentAccount).isPremium()) { + onMenuClick(); + } else { + onCloseClick(); + } + }); + addView(menuView, LayoutHelper.createFrame(32, 32, Gravity.RIGHT | Gravity.CENTER_VERTICAL, 0, 0, 8, 0)); + } + + protected void onButtonClick() { + + } + + protected void onCloseClick() { + + } + + protected void onMenuClick() { + TranslateController translateController = MessagesController.getInstance(currentAccount).getTranslateController(); + + final ActionBarPopupWindow.ActionBarPopupWindowLayout popupLayout = new ActionBarPopupWindow.ActionBarPopupWindowLayout(getContext(), R.drawable.popup_fixed_alert2, resourcesProvider, ActionBarPopupWindow.ActionBarPopupWindowLayout.FLAG_USE_SWIPEBACK); + final ActionBarPopupWindow popupWindow = new ActionBarPopupWindow(popupLayout, LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT); + popupLayout.setBackgroundColor(Theme.getColor(Theme.key_actionBarDefaultSubmenuBackground, resourcesProvider)); + + LinearLayout swipeBack = new LinearLayout(getContext()); + swipeBack.setOrientation(LinearLayout.VERTICAL); + ScrollView swipeBackScrollView = new ScrollView(getContext()) { + Drawable topShadowDrawable; + AnimatedFloat alphaFloat = new AnimatedFloat(this, 350, CubicBezierInterpolator.EASE_OUT_QUINT); + private boolean wasCanScrollVertically; + + @Override + public void onNestedScroll(View target, int dxConsumed, int dyConsumed, int dxUnconsumed, int dyUnconsumed) { + super.onNestedScroll(target, dxConsumed, dyConsumed, dxUnconsumed, dyUnconsumed); + boolean canScrollVertically = canScrollVertically(-1); + if (wasCanScrollVertically != canScrollVertically) { + invalidate(); + wasCanScrollVertically = canScrollVertically; + } + } + + @Override + protected void dispatchDraw(Canvas canvas) { + super.dispatchDraw(canvas); + + float alpha = .5f * alphaFloat.set(canScrollVertically(-1) ? 1 : 0); + if (alpha > 0) { + if (topShadowDrawable == null) { + topShadowDrawable = getContext().getResources().getDrawable(R.drawable.header_shadow); + } + topShadowDrawable.setBounds( + 0, getScrollY(), getWidth(), getScrollY() + topShadowDrawable.getIntrinsicHeight() + ); + topShadowDrawable.setAlpha((int) (0xFF * alpha)); + topShadowDrawable.draw(canvas); + } + } + }; + LinearLayout swipeBackScroll = new LinearLayout(getContext()); + swipeBackScrollView.addView(swipeBackScroll); + swipeBackScroll.setOrientation(LinearLayout.VERTICAL); + popupLayout.swipeBackGravityRight = true; + final int swipeBackIndex = popupLayout.addViewToSwipeBack(swipeBack); + + ActionBarMenuSubItem translateToButton = new ActionBarMenuSubItem(getContext(), true, false, resourcesProvider); + translateToButton.setTextAndIcon(LocaleController.getString("TranslateTo", R.string.TranslateTo), R.drawable.msg_translate); + translateToButton.setSubtext(TranslateAlert2.capitalFirst(TranslateAlert2.languageName(translateController.getDialogTranslateTo(dialogId)))); + translateToButton.setItemHeight(56); + translateToButton.setOnClickListener(e -> popupLayout.getSwipeBack().openForeground(swipeBackIndex)); + popupLayout.addView(translateToButton); + + ActionBarMenuSubItem backButton = new ActionBarMenuSubItem(getContext(), true, false, resourcesProvider); + backButton.setTextAndIcon(LocaleController.getString("Back", R.string.Back), R.drawable.ic_ab_back); + backButton.setOnClickListener(e -> popupLayout.getSwipeBack().closeForeground()); + swipeBack.addView(backButton); + + swipeBack.addView(swipeBackScrollView, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, 420)); + + String detectedLanguage = translateController.getDialogDetectedLanguage(dialogId); + String detectedLanguageName = TranslateAlert2.languageName(detectedLanguage); + String detectedLanguageNameAccusative = TranslateAlert2.languageName(detectedLanguage, accusative); + String currentTranslateTo = translateController.getDialogTranslateTo(dialogId); + + ArrayList suggestedLanguages = TranslateController.getSuggestedLanguages(currentTranslateTo); + ArrayList allLanguages = TranslateController.getLanguages(); + swipeBackScroll.addView(new ActionBarPopupWindow.GapView(getContext(), resourcesProvider), LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, 8)); + if (currentTranslateTo != null) { + String displayName = TranslateAlert2.capitalFirst(TranslateAlert2.languageName(currentTranslateTo)); + if (displayName != null) { + ActionBarMenuSubItem button = new ActionBarMenuSubItem(getContext(), 2, false, false, resourcesProvider); + button.setChecked(true); + button.setText(displayName); + swipeBackScroll.addView(button); + } + } + for (TranslateController.Language lng : suggestedLanguages) { + final String code = lng.code; + if (TextUtils.equals(code, detectedLanguage)) { + continue; + } + + ActionBarMenuSubItem button = new ActionBarMenuSubItem(getContext(), 2, false, false, resourcesProvider); + final boolean checked = currentTranslateTo != null && currentTranslateTo.equals(code); + button.setChecked(checked); + button.setText(lng.displayName); + if (!checked) { + button.setOnClickListener(e -> { + translateController.setDialogTranslateTo(dialogId, code); + popupWindow.dismiss(); + updateText(); + }); + } + swipeBackScroll.addView(button); + } + swipeBackScroll.addView(new ActionBarPopupWindow.GapView(getContext(), resourcesProvider), LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, 8)); + for (TranslateController.Language lng : allLanguages) { + final String code = lng.code; + if (TextUtils.equals(code, detectedLanguage)) { + continue; + } + + ActionBarMenuSubItem button = new ActionBarMenuSubItem(getContext(), 2, false, false, resourcesProvider); + final boolean checked = currentTranslateTo != null && currentTranslateTo.equals(code); + button.setChecked(checked); + button.setText(lng.displayName); + if (!checked) { + button.setOnClickListener(e -> { + translateController.setDialogTranslateTo(dialogId, code); + popupWindow.dismiss(); + updateText(); + }); + } + swipeBackScroll.addView(button); + } + +// if (detectedLanguage != null) { +// ActionBarMenuSubItem translateFromButton = new ActionBarMenuSubItem(getContext(), true, false, resourcesProvider); +// translateFromButton.setTextAndIcon(LocaleController.getString("DetectedLanguage", R.string.DetectedLanguage), R.drawable.msg_language); +// translateFromButton.setSubtext(TranslateAlert2.languageName(detectedLanguage)); +// translateFromButton.setItemHeight(56); +// popupLayout.addView(translateFromButton); +// } + + popupLayout.addView(new ActionBarPopupWindow.GapView(getContext(), resourcesProvider), LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, 8)); + + if (detectedLanguageNameAccusative != null) { + ActionBarMenuSubItem dontTranslateButton = new ActionBarMenuSubItem(getContext(), true, false, resourcesProvider); + String text; + if (accusative[0]) { + text = LocaleController.formatString("DoNotTranslateLanguage", R.string.DoNotTranslateLanguage, detectedLanguageNameAccusative); + } else { + text = LocaleController.formatString("DoNotTranslateLanguageOther", R.string.DoNotTranslateLanguageOther, detectedLanguageNameAccusative); + } + dontTranslateButton.setTextAndIcon(text, R.drawable.msg_block2); + dontTranslateButton.setOnClickListener(e -> { + RestrictedLanguagesSelectActivity.toggleLanguage(detectedLanguage, true); + translateController.checkRestrictedLanguagesUpdate(); + translateController.setHideTranslateDialog(dialogId, true); + String bulletinTextString; + if (accusative[0]) { + bulletinTextString = LocaleController.formatString("AddedToDoNotTranslate", R.string.AddedToDoNotTranslate, detectedLanguageNameAccusative); + } else { + bulletinTextString = LocaleController.formatString("AddedToDoNotTranslateOther", R.string.AddedToDoNotTranslateOther, detectedLanguageNameAccusative); + } + CharSequence bulletinText = AndroidUtilities.replaceTags(bulletinTextString); + bulletinText = TranslateAlert2.capitalFirst(bulletinText); + BulletinFactory.of(fragment).createSimpleBulletin( + R.raw.msg_translate, + bulletinText, + LocaleController.getString("Settings", R.string.Settings), + () -> fragment.presentFragment(new RestrictedLanguagesSelectActivity()) + ).show(); + popupWindow.dismiss(); + }); + popupLayout.addView(dontTranslateButton); + } + + ActionBarMenuSubItem hideButton = new ActionBarMenuSubItem(getContext(), true, false, resourcesProvider); + hideButton.setTextAndIcon(LocaleController.getString("Hide", R.string.Hide), R.drawable.msg_cancel); + hideButton.setOnClickListener(e -> { + translateController.setHideTranslateDialog(dialogId, true); + TLRPC.Chat chat = MessagesController.getInstance(currentAccount).getChat(-dialogId); + final boolean isChannel = chat != null && ChatObject.isChannelAndNotMegaGroup(chat); + final CharSequence message = AndroidUtilities.replaceTags( + isChannel ? + LocaleController.getString("TranslationBarHiddenForChannel", R.string.TranslationBarHiddenForChannel) : + chat != null ? + LocaleController.getString("TranslationBarHiddenForGroup", R.string.TranslationBarHiddenForGroup) : + LocaleController.getString("TranslationBarHiddenForChat", R.string.TranslationBarHiddenForChat) + ); + BulletinFactory.of(fragment).createSimpleBulletin(R.raw.msg_translate, message, LocaleController.getString("Undo", R.string.Undo), () -> { + translateController.setHideTranslateDialog(dialogId, false); + }).show(); + popupWindow.dismiss(); + }); + popupLayout.addView(hideButton); + + popupWindow.setPauseNotifications(true); + popupWindow.setDismissAnimationDuration(220); + popupWindow.setOutsideTouchable(true); + popupWindow.setClippingEnabled(true); + popupWindow.setAnimationStyle(R.style.PopupContextAnimation); + popupWindow.setFocusable(true); + popupWindow.setInputMethodMode(ActionBarPopupWindow.INPUT_METHOD_NOT_NEEDED); + popupWindow.setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_UNSPECIFIED); + popupWindow.showAsDropDown(menuView, 0, -menuView.getMeasuredHeight() - AndroidUtilities.dp(8)); + } + + public void updateText() { + TranslateController translateController = MessagesController.getInstance(currentAccount).getTranslateController(); + if (translateController.isTranslatingDialog(dialogId)) { + textView.setText(TextUtils.concat(translateIcon, " ", LocaleController.getString("ShowOriginalButton", R.string.ShowOriginalButton))); + } else { + String lng = translateController.getDialogTranslateTo(dialogId); + if (lng == null) { + lng = "en"; + } + String text; + String lang = TranslateAlert2.languageName(lng, accusative); + if (accusative[0]) { + text = LocaleController.formatString("TranslateToButton", R.string.TranslateToButton, lang); + } else { + text = LocaleController.formatString("TranslateToButtonOther", R.string.TranslateToButtonOther, lang); + } + textView.setText(TextUtils.concat(translateIcon, " ", text)); + } + menuView.setImageResource(UserConfig.getInstance(currentAccount).isPremium() ? R.drawable.msg_mini_customize : R.drawable.msg_close); + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/UndoView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/UndoView.java index 9d63f90abc..600ef657b2 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/UndoView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/UndoView.java @@ -154,8 +154,6 @@ public class UndoView extends FrameLayout { public final static int ACTION_IMPORT_GROUP_NOT_ADMIN = 46; public final static int ACTION_IMPORT_INFO = 47; - public final static int ACTION_PLAYBACK_SPEED_ENABLED = 50; - public final static int ACTION_PLAYBACK_SPEED_DISABLED = 51; public final static int ACTION_MESSAGE_COPIED = 52; public final static int ACTION_FWD_MESSAGES = 53; public final static int ACTION_NOTIFY_ON = 54; @@ -939,7 +937,7 @@ public void showWithAction(ArrayList dialogIds, int action, Object infoObj undoButton.setVisibility(GONE); } else if (currentAction == ACTION_IMPORT_NOT_MUTUAL || currentAction == ACTION_IMPORT_GROUP_NOT_ADMIN || currentAction == ACTION_IMPORT_INFO || - currentAction == ACTION_PLAYBACK_SPEED_DISABLED || currentAction == ACTION_PLAYBACK_SPEED_ENABLED || currentAction == ACTION_MESSAGE_COPIED || + currentAction == ACTION_MESSAGE_COPIED || currentAction == ACTION_FWD_MESSAGES || currentAction == ACTION_NOTIFY_ON || currentAction == ACTION_NOTIFY_OFF || currentAction == ACTION_USERNAME_COPIED || currentAction == ACTION_HASHTAG_COPIED || currentAction == ACTION_TEXT_COPIED || currentAction == ACTION_LINK_COPIED || currentAction == ACTION_PHONE_COPIED || currentAction == ACTION_AUTO_DELETE_OFF || currentAction == ACTION_AUTO_DELETE_ON || currentAction == ACTION_GIGAGROUP_CANCEL || currentAction == ACTION_GIGAGROUP_SUCCESS || @@ -997,16 +995,6 @@ public void showWithAction(ArrayList dialogIds, int action, Object infoObj infoOnly = true; layoutParams.topMargin = AndroidUtilities.dp(9); infoTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); - } else if (currentAction == ACTION_PLAYBACK_SPEED_DISABLED) { - infoTextView.setText(LocaleController.getString("AudioSpeedNormal", R.string.AudioSpeedNormal)); - leftImageView.setAnimation(R.raw.audio_stop_speed, 36, 36); - timeLeft = 3000; - infoTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 15); - } else if (currentAction == ACTION_PLAYBACK_SPEED_ENABLED) { - infoTextView.setText(LocaleController.getString("AudioSpeedFast", R.string.AudioSpeedFast)); - leftImageView.setAnimation(R.raw.audio_speed, 36, 36); - timeLeft = 3000; - infoTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 15); } else if (currentAction == ACTION_MESSAGE_COPIED || currentAction == ACTION_USERNAME_COPIED || currentAction == ACTION_HASHTAG_COPIED || currentAction == ACTION_TEXT_COPIED || currentAction == ACTION_LINK_COPIED || currentAction == ACTION_PHONE_COPIED || currentAction == ACTION_EMAIL_COPIED) { if (!AndroidUtilities.shouldShowClipboardToast()) { return; @@ -1135,9 +1123,8 @@ public void showWithAction(ArrayList dialogIds, int action, Object infoObj leftImageView.playAnimation(); if (hapticDelay > 0) { leftImageView.postDelayed(() -> { - if (!NekoConfig.disableVibration.Bool()) { + if (!NekoConfig.disableVibration.Bool()) leftImageView.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); - } }, hapticDelay); } } else if (currentAction == ACTION_PROXIMITY_SET || currentAction == ACTION_PROXIMITY_REMOVED) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/UsersAlertBase.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/UsersAlertBase.java index 60ea5042a2..0094c5be7d 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/UsersAlertBase.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/UsersAlertBase.java @@ -117,10 +117,6 @@ public UsersAlertBase(Context context, boolean needFocus, int account, Theme.Res containerView.addView(emptyView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.LEFT | Gravity.TOP, 0, 58 + 4, 0, 0)); listView = new RecyclerListView(context) { - @Override - protected boolean allowSelectChildAtPosition(float x, float y) { - return isAllowSelectChildAtPosition(x, y); - } @Override public void setTranslationY(float translationY) { @@ -194,10 +190,6 @@ protected ContainerView createContainerView(Context context) { return new ContainerView(context); } - protected boolean isAllowSelectChildAtPosition(float x, float y) { - return y >= AndroidUtilities.dp(58) + (Build.VERSION.SDK_INT >= 21 ? AndroidUtilities.statusBarHeight : 0); - } - protected void updateColorKeys() { } @@ -246,7 +238,7 @@ protected int getCurrentColor() { @Override public boolean dispatchTouchEvent(MotionEvent event) { MotionEvent e = MotionEvent.obtain(event); - e.setLocation(e.getRawX(), e.getRawY() - containerView.getTranslationY()); + e.setLocation(e.getRawX(), e.getRawY() - listView.getMeasuredHeight()); if (e.getAction() == MotionEvent.ACTION_UP) { e.setAction(MotionEvent.ACTION_CANCEL); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/VectorAvatarThumbDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/VectorAvatarThumbDrawable.java new file mode 100644 index 0000000000..28cb79dcd5 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/VectorAvatarThumbDrawable.java @@ -0,0 +1,239 @@ +package org.telegram.ui.Components; + +import android.graphics.Canvas; +import android.graphics.ColorFilter; +import android.graphics.drawable.Drawable; + +import androidx.annotation.NonNull; +import androidx.annotation.Nullable; +import androidx.core.graphics.ColorUtils; + +import com.google.android.exoplayer2.util.Log; + +import org.telegram.messenger.DocumentObject; +import org.telegram.messenger.ImageLocation; +import org.telegram.messenger.ImageReceiver; +import org.telegram.messenger.MediaDataController; +import org.telegram.messenger.NotificationCenter; +import org.telegram.messenger.SvgHelper; +import org.telegram.messenger.UserConfig; +import org.telegram.tgnet.TLRPC; +import org.telegram.ui.ActionBar.Theme; + +import java.util.HashSet; + +public class VectorAvatarThumbDrawable extends Drawable implements AnimatedEmojiSpan.InvalidateHolder, AttachableDrawable, NotificationCenter.NotificationCenterDelegate { + + public static final int TYPE_SMALL = 1; + public static final int TYPE_PROFILE = 2; + public static final int TYPE_STATIC = 3; + public final GradientTools gradientTools = new GradientTools(); + private final int type; + float roundRadius; + boolean isPremium; + + ImageReceiver currentParent; + HashSet parents = new HashSet<>(); + + AnimatedEmojiDrawable animatedEmojiDrawable; + ImageReceiver imageReceiver; + ImageReceiver stickerPreloadImageReceiver = new ImageReceiver(); + final int currentAccount = UserConfig.selectedAccount; + boolean imageSeted; + TLRPC.TL_videoSizeStickerMarkup sizeStickerMarkup; + + public VectorAvatarThumbDrawable(TLRPC.VideoSize vectorImageMarkup, boolean isPremiumUser, int type) { + this.type = type; + this.isPremium = isPremiumUser; + int color1 = ColorUtils.setAlphaComponent(vectorImageMarkup.background_colors.get(0), 255); + int color2 = vectorImageMarkup.background_colors.size() > 1 ? ColorUtils.setAlphaComponent(vectorImageMarkup.background_colors.get(1), 255) : 0; + int color3 = vectorImageMarkup.background_colors.size() > 2 ? ColorUtils.setAlphaComponent(vectorImageMarkup.background_colors.get(2), 255) : 0; + int color4 = vectorImageMarkup.background_colors.size() > 3 ? ColorUtils.setAlphaComponent(vectorImageMarkup.background_colors.get(3), 255) : 0; + gradientTools.setColors(color1, color2, color3, color4); + if (vectorImageMarkup instanceof TLRPC.TL_videoSizeEmojiMarkup) { + TLRPC.TL_videoSizeEmojiMarkup emojiMarkup = (TLRPC.TL_videoSizeEmojiMarkup) vectorImageMarkup; + int cacheType = AnimatedEmojiDrawable.STANDARD_LOTTIE_FRAME; + if (type == TYPE_SMALL && isPremiumUser) { + cacheType = AnimatedEmojiDrawable.CACHE_TYPE_EMOJI_STATUS; + } else if (type == TYPE_PROFILE) { + cacheType = AnimatedEmojiDrawable.CACHE_TYPE_AVATAR_CONSTRUCTOR_PREVIEW2; + } + + animatedEmojiDrawable = new AnimatedEmojiDrawable(cacheType, UserConfig.selectedAccount, emojiMarkup.emoji_id); + } else if (vectorImageMarkup instanceof TLRPC.TL_videoSizeStickerMarkup) { + sizeStickerMarkup = (TLRPC.TL_videoSizeStickerMarkup) vectorImageMarkup; + imageReceiver = new ImageReceiver() { + @Override + public void invalidate() { + VectorAvatarThumbDrawable.this.invalidate(); + } + }; + imageReceiver.setInvalidateAll(true); + if (type == TYPE_SMALL) { + imageReceiver.setAutoRepeatCount(2); + } + setImage(); + } + } + + private void setImage() { + TLRPC.TL_messages_stickerSet set = MediaDataController.getInstance(currentAccount).getStickerSet(sizeStickerMarkup.stickerset, false); + if (set != null) { + imageSeted = true; + for (int i = 0; i < set.documents.size(); i++) { + if (set.documents.get(i).id == sizeStickerMarkup.sticker_id) { + TLRPC.Document document = set.documents.get(i); + TLRPC.Document thumb = null; + String filter = "50_50_firstframe"; + String thumbFilter = null; + if (isPremium && type == TYPE_SMALL) { + filter = "50_50"; + thumbFilter = "50_50_firstframe"; + thumb = document; + } else if (type == TYPE_PROFILE) { + filter = "100_100"; + thumbFilter = "50_50_firstframe"; + thumb = document; + } + SvgHelper.SvgDrawable svgThumb = DocumentObject.getSvgThumb(document, Theme.key_windowBackgroundWhiteGrayIcon, 0.2f); + imageReceiver.setImage(ImageLocation.getForDocument(document), filter, ImageLocation.getForDocument(thumb), thumbFilter, null, null, svgThumb, 0, "tgs", document, 0); + if (type == TYPE_STATIC) { + stickerPreloadImageReceiver.setImage(ImageLocation.getForDocument(document), "100_100", null, null, null, 0, "tgs", document, 0); + } + break; + } + } + } + } + + @Override + public void draw(@NonNull Canvas canvas) { + gradientTools.setBounds(getBounds().left, getBounds().top, getBounds().right, getBounds().bottom); + + if (currentParent != null) { + roundRadius = currentParent.getRoundRadius()[0]; + } + if (roundRadius == 0) { + canvas.drawRect(getBounds(), gradientTools.paint); + } else { + canvas.drawRoundRect(gradientTools.bounds, roundRadius, roundRadius, gradientTools.paint); + } + int cx = getBounds().centerX(); + int cy = getBounds().centerY(); + int size = (int) (getBounds().width() * AvatarConstructorFragment.STICKER_DEFAULT_SCALE) >> 1; + if (animatedEmojiDrawable != null) { + if (animatedEmojiDrawable.getImageReceiver() != null) { + animatedEmojiDrawable.getImageReceiver().setRoundRadius((int) (size * 2 * AvatarConstructorFragment.STICKER_DEFAULT_ROUND_RADIUS)); + } + animatedEmojiDrawable.setBounds(cx - size, cy - size, cx + size, cy + size); + animatedEmojiDrawable.draw(canvas); + } + if (imageReceiver != null) { + imageReceiver.setRoundRadius((int) (size * 2 * AvatarConstructorFragment.STICKER_DEFAULT_ROUND_RADIUS)); + imageReceiver.setImageCoords(cx - size, cy - size, size * 2, size * 2); + imageReceiver.draw(canvas); + } + } + + @Override + public void setAlpha(int alpha) { + gradientTools.paint.setAlpha(alpha); + if (animatedEmojiDrawable != null) { + animatedEmojiDrawable.setAlpha(alpha); + } + } + + @Override + public void setColorFilter(@Nullable ColorFilter colorFilter) { + + } + + @Override + public int getOpacity() { + return 0; + } + + public void setRoundRadius(float roundRadius) { + this.roundRadius = roundRadius; + } + + static int attachedToWindowCount = 0; + @Override + public void onAttachedToWindow(ImageReceiver parent) { + if (parent == null) { + return; + } + roundRadius = parent.getRoundRadius()[0]; + if (parents.isEmpty()) { + if (animatedEmojiDrawable != null) { + animatedEmojiDrawable.addView(this); + } + if (imageReceiver != null) { + imageReceiver.onAttachedToWindow(); + } + if (stickerPreloadImageReceiver != null) { + stickerPreloadImageReceiver.onAttachedToWindow(); + } + } + parents.add(parent); + if (sizeStickerMarkup != null) { + NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.groupStickersDidLoad); + } + } + + @Override + public void onDetachedFromWindow(ImageReceiver parent) { + parents.remove(parent); + if (parents.isEmpty()) { + if (animatedEmojiDrawable != null) { + animatedEmojiDrawable.removeView(this); + } + if (imageReceiver != null) { + imageReceiver.onDetachedFromWindow(); + } + if (stickerPreloadImageReceiver != null) { + stickerPreloadImageReceiver.onDetachedFromWindow(); + } + } + if (sizeStickerMarkup != null) { + NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.groupStickersDidLoad); + } + } + + @Override + public void invalidate() { + for (ImageReceiver parent : parents) { + parent.invalidate(); + } + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + VectorAvatarThumbDrawable that = (VectorAvatarThumbDrawable) o; + if (type == that.type && gradientTools.color1 == that.gradientTools.color1 && gradientTools.color2 == that.gradientTools.color2 && gradientTools.color3 == that.gradientTools.color3 && gradientTools.color4 == that.gradientTools.color4) { + if (animatedEmojiDrawable != null && that.animatedEmojiDrawable != null) { + return animatedEmojiDrawable.getDocumentId() == that.animatedEmojiDrawable.getDocumentId(); + } + if (sizeStickerMarkup != null && that.sizeStickerMarkup != null) { + return sizeStickerMarkup.stickerset.id == that.sizeStickerMarkup.stickerset.id && sizeStickerMarkup.sticker_id == that.sizeStickerMarkup.sticker_id; + } + } + return false; + } + + @Override + public void didReceivedNotification(int id, int account, Object... args) { + if (id == NotificationCenter.groupStickersDidLoad) { + if (!imageSeted) { + setImage(); + return; + } + } + } + + public void setParent(ImageReceiver imageReceiver) { + currentParent = imageReceiver; + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/VideoPlayer.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/VideoPlayer.java index 70ff328644..e254b82c2f 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/VideoPlayer.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/VideoPlayer.java @@ -11,6 +11,7 @@ import android.annotation.SuppressLint; import android.content.Context; import android.graphics.SurfaceTexture; +import android.media.AudioManager; import android.net.Uri; import android.os.Handler; import android.os.Looper; @@ -25,41 +26,38 @@ import com.google.android.exoplayer2.DefaultRenderersFactory; import com.google.android.exoplayer2.ExoPlaybackException; import com.google.android.exoplayer2.ExoPlayer; -import com.google.android.exoplayer2.ExoPlayerFactory; +import com.google.android.exoplayer2.MediaItem; +import com.google.android.exoplayer2.PlaybackException; import com.google.android.exoplayer2.PlaybackParameters; import com.google.android.exoplayer2.Player; import com.google.android.exoplayer2.Renderer; import com.google.android.exoplayer2.SimpleExoPlayer; -import com.google.android.exoplayer2.Timeline; import com.google.android.exoplayer2.analytics.AnalyticsListener; +import com.google.android.exoplayer2.audio.AudioAttributes; +import com.google.android.exoplayer2.audio.AudioCapabilities; import com.google.android.exoplayer2.audio.AudioProcessor; import com.google.android.exoplayer2.audio.AudioRendererEventListener; +import com.google.android.exoplayer2.audio.AudioSink; +import com.google.android.exoplayer2.audio.DefaultAudioSink; import com.google.android.exoplayer2.audio.TeeAudioProcessor; -import com.google.android.exoplayer2.drm.DrmSessionManager; -import com.google.android.exoplayer2.drm.FrameworkMediaCrypto; -import com.google.android.exoplayer2.extractor.DefaultExtractorsFactory; import com.google.android.exoplayer2.mediacodec.MediaCodecRenderer; import com.google.android.exoplayer2.mediacodec.MediaCodecSelector; -import com.google.android.exoplayer2.source.ExtractorMediaSource; import com.google.android.exoplayer2.source.LoopingMediaSource; import com.google.android.exoplayer2.source.MediaSource; -import com.google.android.exoplayer2.source.TrackGroupArray; +import com.google.android.exoplayer2.source.ProgressiveMediaSource; import com.google.android.exoplayer2.source.dash.DashMediaSource; -import com.google.android.exoplayer2.source.dash.DefaultDashChunkSource; import com.google.android.exoplayer2.source.hls.HlsMediaSource; -import com.google.android.exoplayer2.source.smoothstreaming.DefaultSsChunkSource; import com.google.android.exoplayer2.source.smoothstreaming.SsMediaSource; -import com.google.android.exoplayer2.trackselection.AdaptiveTrackSelection; import com.google.android.exoplayer2.trackselection.DefaultTrackSelector; import com.google.android.exoplayer2.trackselection.MappingTrackSelector; -import com.google.android.exoplayer2.trackselection.TrackSelection; -import com.google.android.exoplayer2.trackselection.TrackSelectionArray; import com.google.android.exoplayer2.upstream.DataSource; import com.google.android.exoplayer2.upstream.DefaultAllocator; import com.google.android.exoplayer2.upstream.DefaultBandwidthMeter; -import com.google.android.exoplayer2.upstream.DefaultHttpDataSourceFactory; import com.google.android.exoplayer2.video.SurfaceNotValidException; +import com.google.android.exoplayer2.video.VideoListener; +import com.google.android.exoplayer2.video.VideoSize; +import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ApplicationLoader; import org.telegram.messenger.FourierTransform; import org.telegram.messenger.NotificationCenter; @@ -69,7 +67,7 @@ import java.util.ArrayList; @SuppressLint("NewApi") -public class VideoPlayer implements ExoPlayer.EventListener, SimpleExoPlayer.VideoListener, AnalyticsListener, NotificationCenter.NotificationCenterDelegate { +public class VideoPlayer implements Player.Listener, VideoListener, AnalyticsListener, NotificationCenter.NotificationCenterDelegate { public interface VideoPlayerDelegate { void onStateChanged(boolean playWhenReady, int playbackState); @@ -94,10 +92,9 @@ public interface AudioVisualizerDelegate { boolean needUpdate(); } - private SimpleExoPlayer player; - private SimpleExoPlayer audioPlayer; + private ExoPlayer player; + private ExoPlayer audioPlayer; private MappingTrackSelector trackSelector; - private Handler mainHandler; private DataSource.Factory mediaDataSourceFactory; private TextureView textureView; private Surface surface; @@ -118,10 +115,6 @@ public interface AudioVisualizerDelegate { private int lastReportedPlaybackState; private boolean lastReportedPlayWhenReady; - private static final int RENDERER_BUILDING_STATE_IDLE = 1; - private static final int RENDERER_BUILDING_STATE_BUILDING = 2; - private static final int RENDERER_BUILDING_STATE_BUILT = 3; - private Uri videoUri, audioUri; private String videoType, audioType; private boolean loopingMediaSource; @@ -129,23 +122,20 @@ public interface AudioVisualizerDelegate { private int repeatCount; private boolean shouldPauseOther; + MediaSource.Factory dashMediaSourceFactory; + HlsMediaSource.Factory hlsMediaSourceFactory; + SsMediaSource.Factory ssMediaSourceFactory; + ProgressiveMediaSource.Factory progressiveMediaSourceFactory; Handler audioUpdateHandler = new Handler(Looper.getMainLooper()); - private static final DefaultBandwidthMeter BANDWIDTH_METER = new DefaultBandwidthMeter(); - public VideoPlayer() { this(true); } public VideoPlayer(boolean pauseOther) { - mediaDataSourceFactory = new ExtendedDefaultDataSourceFactory(ApplicationLoader.applicationContext, BANDWIDTH_METER, new DefaultHttpDataSourceFactory("Mozilla/5.0 (X11; Linux x86_64; rv:10.0) Gecko/20150101 Firefox/47.0 (Chrome)", BANDWIDTH_METER)); - - mainHandler = new Handler(); - - TrackSelection.Factory videoTrackSelectionFactory = new AdaptiveTrackSelection.Factory(BANDWIDTH_METER); - trackSelector = new DefaultTrackSelector(videoTrackSelectionFactory); - + mediaDataSourceFactory = new ExtendedDefaultDataSourceFactory(ApplicationLoader.applicationContext, "Mozilla/5.0 (X11; Linux x86_64; rv:10.0) Gecko/20150101 Firefox/47.0 (Chrome)"); + trackSelector = new DefaultTrackSelector(ApplicationLoader.applicationContext); lastReportedPlaybackState = ExoPlayer.STATE_IDLE; shouldPauseOther = pauseOther; if (pauseOther) { @@ -171,7 +161,9 @@ private void ensurePlayerCreated() { 100, DefaultLoadControl.DEFAULT_BUFFER_FOR_PLAYBACK_AFTER_REBUFFER_MS, DefaultLoadControl.DEFAULT_TARGET_BUFFER_BYTES, - DefaultLoadControl.DEFAULT_PRIORITIZE_TIME_OVER_SIZE_THRESHOLDS); + DefaultLoadControl.DEFAULT_PRIORITIZE_TIME_OVER_SIZE_THRESHOLDS, + DefaultLoadControl.DEFAULT_BACK_BUFFER_DURATION_MS, + DefaultLoadControl.DEFAULT_RETAIN_BACK_BUFFER_FROM_KEYFRAME); if (player == null) { DefaultRenderersFactory factory; if (audioVisualizerDelegate != null) { @@ -180,11 +172,13 @@ private void ensurePlayerCreated() { factory = new DefaultRenderersFactory(ApplicationLoader.applicationContext); } factory.setExtensionRendererMode(DefaultRenderersFactory.EXTENSION_RENDERER_MODE_PREFER); - player = ExoPlayerFactory.newSimpleInstance(ApplicationLoader.applicationContext, factory, trackSelector, loadControl, null); + player = new ExoPlayer.Builder(ApplicationLoader.applicationContext).setRenderersFactory(factory) + .setTrackSelector(trackSelector) + .setLoadControl(loadControl).build(); player.addAnalyticsListener(this); player.addListener(this); - player.setVideoListener(this); + player.addVideoListener(this); if (textureView != null) { player.setVideoTextureView(textureView); } else if (surface != null) { @@ -195,38 +189,10 @@ private void ensurePlayerCreated() { } if (mixedAudio) { if (audioPlayer == null) { - audioPlayer = ExoPlayerFactory.newSimpleInstance(ApplicationLoader.applicationContext, trackSelector, loadControl, null, DefaultRenderersFactory.EXTENSION_RENDERER_MODE_PREFER); - audioPlayer.addListener(new Player.EventListener() { - - @Override - public void onTracksChanged(TrackGroupArray trackGroups, TrackSelectionArray trackSelections) { - - } - - @Override - public void onLoadingChanged(boolean isLoading) { - - } - - @Override - public void onTimelineChanged(Timeline timeline, Object manifest, int reason) { - - } - - @Override - public void onShuffleModeEnabledChanged(boolean shuffleModeEnabled) { - - } - - @Override - public void onPositionDiscontinuity(int reason) { - - } - - @Override - public void onSeekProcessed() { - - } + audioPlayer = new ExoPlayer.Builder(ApplicationLoader.applicationContext) + .setTrackSelector(trackSelector) + .setLoadControl(loadControl).buildSimpleExoPlayer(); + audioPlayer.addListener(new Player.Listener() { @Override public void onPlayerStateChanged(boolean playWhenReady, int playbackState) { @@ -235,21 +201,6 @@ public void onPlayerStateChanged(boolean playWhenReady, int playbackState) { checkPlayersReady(); } } - - @Override - public void onRepeatModeChanged(int repeatMode) { - - } - - @Override - public void onPlayerError(ExoPlaybackException error) { - - } - - @Override - public void onPlaybackParametersChanged(PlaybackParameters playbackParameters) { - - } }); audioPlayer.setPlayWhenReady(autoplay); } @@ -279,20 +230,7 @@ public void preparePlayerLoop(Uri videoUri, String videoType, Uri audioUri, Stri type = audioType; uri = audioUri; } - switch (type) { - case "dash": - mediaSource = new DashMediaSource(uri, mediaDataSourceFactory, new DefaultDashChunkSource.Factory(mediaDataSourceFactory), mainHandler, null); - break; - case "hls": - mediaSource = new HlsMediaSource.Factory(mediaDataSourceFactory).createMediaSource(uri); - break; - case "ss": - mediaSource = new SsMediaSource(uri, mediaDataSourceFactory, new DefaultSsChunkSource.Factory(mediaDataSourceFactory), mainHandler, null); - break; - default: - mediaSource = new ExtractorMediaSource(uri, mediaDataSourceFactory, new DefaultExtractorsFactory(), mainHandler, null); - break; - } + mediaSource = mediaSourceFromUri(uri, type); mediaSource = new LoopingMediaSource(mediaSource); if (a == 0) { mediaSource1 = mediaSource; @@ -300,8 +238,36 @@ public void preparePlayerLoop(Uri videoUri, String videoType, Uri audioUri, Stri mediaSource2 = mediaSource; } } - player.prepare(mediaSource1, true, true); - audioPlayer.prepare(mediaSource2, true, true); + player.setMediaSource(mediaSource1, true); + player.prepare(); + audioPlayer.setMediaSource(mediaSource2, true); + audioPlayer.prepare(); + } + + private MediaSource mediaSourceFromUri(Uri uri, String type) { + MediaItem mediaItem = new MediaItem.Builder().setUri(uri).build(); + switch (type) { + case "dash": + if (dashMediaSourceFactory == null) { + dashMediaSourceFactory = new DashMediaSource.Factory(mediaDataSourceFactory); + } + return dashMediaSourceFactory.createMediaSource(mediaItem); + case "hls": + if (hlsMediaSourceFactory == null) { + hlsMediaSourceFactory = new HlsMediaSource.Factory(mediaDataSourceFactory); + } + return hlsMediaSourceFactory.createMediaSource(mediaItem); + case "ss": + if (ssMediaSourceFactory == null) { + ssMediaSourceFactory = new SsMediaSource.Factory(mediaDataSourceFactory); + } + return ssMediaSourceFactory.createMediaSource(mediaItem); + default: + if (progressiveMediaSourceFactory == null) { + progressiveMediaSourceFactory = new ProgressiveMediaSource.Factory(mediaDataSourceFactory); + } + return progressiveMediaSourceFactory.createMediaSource(mediaItem); + } } public void preparePlayer(Uri uri, String type) { @@ -317,22 +283,9 @@ public void preparePlayer(Uri uri, String type) { String scheme = uri.getScheme(); isStreaming = scheme != null && !scheme.startsWith("file"); ensurePlayerCreated(); - MediaSource mediaSource; - switch (type) { - case "dash": - mediaSource = new DashMediaSource(uri, mediaDataSourceFactory, new DefaultDashChunkSource.Factory(mediaDataSourceFactory), mainHandler, null); - break; - case "hls": - mediaSource = new HlsMediaSource.Factory(mediaDataSourceFactory).createMediaSource(uri); - break; - case "ss": - mediaSource = new SsMediaSource(uri, mediaDataSourceFactory, new DefaultSsChunkSource.Factory(mediaDataSourceFactory), mainHandler, null); - break; - default: - mediaSource = new ExtractorMediaSource(uri, mediaDataSourceFactory, new DefaultExtractorsFactory(), mainHandler, null); - break; - } - player.prepare(mediaSource, true, true); + MediaSource mediaSource = mediaSourceFromUri(uri, type); + player.setMediaSource(mediaSource, true); + player.prepare(); } public boolean isPlayerPrepared() { @@ -341,11 +294,11 @@ public boolean isPlayerPrepared() { public void releasePlayer(boolean async) { if (player != null) { - player.release(async); + player.release(); player = null; } if (audioPlayer != null) { - audioPlayer.release(async); + audioPlayer.release(); audioPlayer = null; } if (shouldPauseOther) { @@ -368,7 +321,7 @@ public void onSeekProcessed(EventTime eventTime) { } @Override - public void onRenderedFirstFrame(EventTime eventTime, Surface surface) { + public void onRenderedFirstFrame(EventTime eventTime, Object output, long renderTimeMs) { if (delegate != null) { delegate.onRenderedFirstFrame(eventTime); } @@ -548,10 +501,14 @@ public boolean isBuffering() { public void setStreamType(int type) { if (player != null) { - player.setAudioStreamType(type); + player.setAudioAttributes(new AudioAttributes.Builder() + .setUsage(type == AudioManager.STREAM_VOICE_CALL ? C.USAGE_VOICE_COMMUNICATION : C.USAGE_MEDIA) + .build(), false); } if (audioPlayer != null) { - audioPlayer.setAudioStreamType(type); + audioPlayer.setAudioAttributes(new AudioAttributes.Builder() + .setUsage(type == AudioManager.STREAM_VOICE_CALL ? C.USAGE_VOICE_COMMUNICATION : C.USAGE_MEDIA) + .build(), true); } } @@ -574,11 +531,6 @@ private void checkPlayersReady() { } } - @Override - public void onLoadingChanged(boolean isLoading) { - - } - @Override public void onPlayerStateChanged(boolean playWhenReady, int playbackState) { maybeReportPlayerState(); @@ -598,61 +550,44 @@ public void onPlayerStateChanged(boolean playWhenReady, int playbackState) { } @Override - public void onTimelineChanged(Timeline timeline, Object manifest, int reason) { - - } - - @Override - public void onShuffleModeEnabledChanged(boolean shuffleModeEnabled) { - - } - - @Override - public void onPositionDiscontinuity(int reason) { - if (reason == Player.DISCONTINUITY_REASON_PERIOD_TRANSITION) { + public void onPositionDiscontinuity(Player.PositionInfo oldPosition, Player.PositionInfo newPosition, @Player.DiscontinuityReason int reason) { + if (reason == Player.DISCONTINUITY_REASON_AUTO_TRANSITION) { repeatCount++; } } @Override - public void onSeekProcessed() { - - } - - @Override - public void onPlayerError(ExoPlaybackException error) { - Throwable cause = error.getCause(); - if (textureView != null && (!triedReinit && cause instanceof MediaCodecRenderer.DecoderInitializationException || cause instanceof SurfaceNotValidException)) { - triedReinit = true; - if (player != null) { - ViewGroup parent = (ViewGroup) textureView.getParent(); - if (parent != null) { - int i = parent.indexOfChild(textureView); - parent.removeView(textureView); - parent.addView(textureView, i); - } - player.clearVideoTextureView(textureView); - player.setVideoTextureView(textureView); - if (loopingMediaSource) { - preparePlayerLoop(videoUri, videoType, audioUri, audioType); - } else { - preparePlayer(videoUri, videoType); + public void onPlayerError(PlaybackException error) { + AndroidUtilities.runOnUIThread(() -> { + Throwable cause = error.getCause(); + if (textureView != null && (!triedReinit && cause instanceof MediaCodecRenderer.DecoderInitializationException || cause instanceof SurfaceNotValidException)) { + triedReinit = true; + if (player != null) { + ViewGroup parent = (ViewGroup) textureView.getParent(); + if (parent != null) { + int i = parent.indexOfChild(textureView); + parent.removeView(textureView); + parent.addView(textureView, i); + } + player.clearVideoTextureView(textureView); + player.setVideoTextureView(textureView); + if (loopingMediaSource) { + preparePlayerLoop(videoUri, videoType, audioUri, audioType); + } else { + preparePlayer(videoUri, videoType); + } + play(); } - play(); + } else { + delegate.onError(this, error); } - } else { - delegate.onError(this, error); - } - } - - @Override - public void onTracksChanged(TrackGroupArray trackGroups, TrackSelectionArray trackSelections) { - + }); } @Override - public void onVideoSizeChanged(int width, int height, int unappliedRotationDegrees, float pixelWidthHeightRatio) { - delegate.onVideoSizeChanged(width, height, unappliedRotationDegrees, pixelWidthHeightRatio); + public void onVideoSizeChanged(VideoSize videoSize) { + delegate.onVideoSizeChanged(videoSize.width, videoSize.height, videoSize.unappliedRotationDegrees, videoSize.pixelWidthHeightRatio); + Player.Listener.super.onVideoSizeChanged(videoSize); } @Override @@ -698,10 +633,19 @@ public AudioVisualizerRenderersFactory(Context context) { super(context); } + @Nullable @Override - protected void buildAudioRenderers(Context context, int extensionRendererMode, MediaCodecSelector mediaCodecSelector, @Nullable DrmSessionManager drmSessionManager, boolean playClearSamplesWithoutKeys, boolean enableDecoderFallback, AudioProcessor[] audioProcessors, Handler eventHandler, AudioRendererEventListener eventListener, ArrayList out) { - AudioProcessor audioProcessor = new TeeAudioProcessor(new VisualizerBufferSink()); - super.buildAudioRenderers(context, extensionRendererMode, mediaCodecSelector, drmSessionManager, playClearSamplesWithoutKeys, enableDecoderFallback, new AudioProcessor[]{audioProcessor}, eventHandler, eventListener, out); + protected AudioSink buildAudioSink(Context context, boolean enableFloatOutput, boolean enableAudioTrackPlaybackParams, boolean enableOffload) { + return new DefaultAudioSink.Builder() + .setAudioCapabilities(AudioCapabilities.getCapabilities(context)) + .setEnableFloatOutput(enableFloatOutput) + .setEnableAudioTrackPlaybackParams(enableAudioTrackPlaybackParams) + .setAudioProcessors(new AudioProcessor[] {new TeeAudioProcessor(new VisualizerBufferSink())}) + .setOffloadMode( + enableOffload + ? DefaultAudioSink.OFFLOAD_MODE_ENABLED_GAPLESS_REQUIRED + : DefaultAudioSink.OFFLOAD_MODE_DISABLED) + .build(); } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/VideoSeekPreviewImage.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/VideoSeekPreviewImage.java index edb5393b8f..40f6dda0f8 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/VideoSeekPreviewImage.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/VideoSeekPreviewImage.java @@ -286,10 +286,10 @@ public void open(Uri uri) { } else { path = FileLoader.getInstance(currentAccount).getPathToAttach(document, false).getAbsolutePath(); } - fileDrawable = new AnimatedFileDrawable(new File(path), true, document.size, document, null, parentObject, 0, currentAccount, true, null); + fileDrawable = new AnimatedFileDrawable(new File(path), true, document.size, FileLoader.PRIORITY_NORMAL, document, null, parentObject, 0, currentAccount, true, null); } else { path = uri.getPath(); - fileDrawable = new AnimatedFileDrawable(new File(path), true, 0, null, null, null, 0, 0, true, null); + fileDrawable = new AnimatedFileDrawable(new File(path), true, 0, 0, null, null, null, 0, 0, true, null); } duration = fileDrawable.getDurationMs(); if (pendingProgress != 0.0f) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/VideoTimelineView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/VideoTimelineView.java index 3f91f1940c..ce80e38c96 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/VideoTimelineView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/VideoTimelineView.java @@ -40,9 +40,8 @@ public class VideoTimelineView extends View { private long videoLength; private float progressLeft; private float progressRight = 1; - private Paint paint; - private Paint paint2; - private Paint backgroundGrayPaint; + private final Paint paint2 = new Paint(); + private final Paint backgroundGrayPaint = new Paint(); private boolean pressedLeft; private boolean pressedRight; private float pressDx; @@ -69,7 +68,6 @@ public class VideoTimelineView extends View { private TimeHintView timeHintView; Paint thumbPaint = new Paint(Paint.ANTI_ALIAS_FLAG); - Paint thumbRipplePaint = new Paint(Paint.ANTI_ALIAS_FLAG); public void setKeyframes(ArrayList keyframes) { this.keyframes.clear(); @@ -85,14 +83,9 @@ public interface VideoTimelineViewDelegate { public VideoTimelineView(Context context) { super(context); - paint = new Paint(Paint.ANTI_ALIAS_FLAG); - paint.setColor(0xffffffff); - paint2 = new Paint(); paint2.setColor(0x7f000000); - backgroundGrayPaint = new Paint(); - thumbPaint.setColor(Color.WHITE); thumbPaint.setStrokeWidth(AndroidUtilities.dpf2(2f)); thumbPaint.setStyle(Paint.Style.STROKE); @@ -103,7 +96,6 @@ public VideoTimelineView(Context context) { public void updateColors() { backgroundGrayPaint.setColor(Theme.getColor(Theme.key_windowBackgroundGray)); - thumbRipplePaint.setColor(Theme.getColor(Theme.key_chat_recordedVoiceHighlight)); roundCornersSize = 0; if (timeHintView != null) { timeHintView.updateColors(); @@ -254,11 +246,6 @@ public boolean onTouchEvent(MotionEvent event) { return false; } - public void setColor(int color) { - paint.setColor(color); - invalidate(); - } - public void setVideoPath(String path) { destroy(); mediaMetadataRetriever = new MediaMetadataRetriever(); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ViewPagerFixed.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ViewPagerFixed.java index 6d85da83f2..dade1b05cf 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ViewPagerFixed.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ViewPagerFixed.java @@ -150,11 +150,11 @@ public void onPageSelected(int page, boolean forward) { updateViewForIndex(1); onTabPageSelected(page); - + int trasnlationX = viewPages[0] != null ? viewPages[0].getMeasuredWidth() : 0; if (forward) { - viewPages[1].setTranslationX(viewPages[0].getMeasuredWidth()); + viewPages[1].setTranslationX(trasnlationX); } else { - viewPages[1].setTranslationX(-viewPages[0].getMeasuredWidth()); + viewPages[1].setTranslationX(-trasnlationX); } } @@ -812,6 +812,7 @@ private static class Tab { public String title; public int titleWidth; public int counter; + public float alpha = 1f; public Tab(int i, String t) { id = i; @@ -851,6 +852,7 @@ public void setTab(Tab tab, int position) { currentTab = tab; currentPosition = position; setContentDescription(tab.title); + setAlpha(tab.alpha); requestLayout(); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/WaveDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/WaveDrawable.java index 0427d51d3c..c0be56c576 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/WaveDrawable.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/WaveDrawable.java @@ -12,6 +12,7 @@ import android.view.animation.LinearInterpolator; import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.LiteMode; import org.telegram.messenger.SharedConfig; public class WaveDrawable { @@ -167,7 +168,7 @@ public void setValue(float value) { } private void startFling(float delta) { - if (SharedConfig.getLiteMode().enabled()) { + if (!LiteMode.isEnabled(LiteMode.FLAG_CALLS_ANIMATIONS)) { return; } if (flingAnimator != null) { @@ -196,7 +197,7 @@ private void startFling(float delta) { boolean wasFling; public void tick(float circleRadius) { - if (SharedConfig.getLiteMode().enabled()) { + if (!LiteMode.isEnabled(LiteMode.FLAG_CALLS_ANIMATIONS)) { return; } long newTime = SystemClock.elapsedRealtime(); @@ -330,7 +331,7 @@ public void tick(float circleRadius) { } public void draw(float cx, float cy, float scale, Canvas canvas) { - if (SharedConfig.getLiteMode().enabled()) { + if (!LiteMode.isEnabled(LiteMode.FLAG_CALLS_ANIMATIONS)) { return; } float waveAmplitude = amplitude < 0.3f ? amplitude / 0.3f : 1f; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/spoilers/SpoilerEffect.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/spoilers/SpoilerEffect.java index c6d24da039..b7134697ca 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/spoilers/SpoilerEffect.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/spoilers/SpoilerEffect.java @@ -5,7 +5,6 @@ import android.animation.TimeInterpolator; import android.animation.ValueAnimator; import android.annotation.SuppressLint; -import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.ColorFilter; @@ -40,6 +39,7 @@ import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.Emoji; +import org.telegram.messenger.LiteMode; import org.telegram.messenger.LocaleController; import org.telegram.messenger.SharedConfig; import org.telegram.messenger.Utilities; @@ -49,7 +49,6 @@ import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Stack; @@ -62,7 +61,6 @@ public class SpoilerEffect extends Drawable { public final static int PARTICLES_PER_CHARACTER = measureParticlesPerCharacter(); private final static float VERTICAL_PADDING_DP = 2.5f; private final static int RAND_REPEAT = 14; - private final static float KEYPOINT_DELTA = 5f; private final static int FPS = 30; private final static int renderDelayMs = 1000 / FPS + 1; public final static float[] ALPHAS = { @@ -94,7 +92,6 @@ public class SpoilerEffect extends Drawable { private ValueAnimator rippleAnimator; private List spaces = new ArrayList<>(); - private List keyPoints; private int mAlpha = 0xFF; private TimeInterpolator rippleInterpolator = input -> input; @@ -250,16 +247,6 @@ public void setRippleInterpolator(@NonNull TimeInterpolator rippleInterpolator) this.rippleInterpolator = rippleInterpolator; } - /** - * Sets new keypoints - * - * @param keyPoints New keypoints - */ - public void setKeyPoints(List keyPoints) { - this.keyPoints = keyPoints; - invalidateSelf(); - } - /** * Gets ripple path */ @@ -405,7 +392,7 @@ public void draw(@NonNull Canvas canvas) { Paint shaderPaint = SpoilerEffectBitmapFactory.getInstance().getPaint(); shaderPaint.setColorFilter(new PorterDuffColorFilter(lastColor, PorterDuff.Mode.SRC_IN)); canvas.drawRect(getBounds().left, getBounds().top, getBounds().right, getBounds().bottom, SpoilerEffectBitmapFactory.getInstance().getPaint()); - if (!SharedConfig.getLiteMode().enabled()) { + if (LiteMode.isEnabled(LiteMode.FLAG_CHAT_SPOILER)) { invalidateSelf(); SpoilerEffectBitmapFactory.getInstance().checkUpdate(); } @@ -441,15 +428,8 @@ private boolean isOutOfBounds(int left, int top, int right, int bottom, float x, } private void generateRandomLocation(Particle newParticle, int i) { - if (keyPoints != null && !keyPoints.isEmpty()) { - float rf = particleRands[i % RAND_REPEAT]; - long kp = keyPoints.get(Utilities.fastRandom.nextInt(keyPoints.size())); - newParticle.x = getBounds().left + (kp >> 16) + rf * AndroidUtilities.dp(KEYPOINT_DELTA) - AndroidUtilities.dp(KEYPOINT_DELTA / 2f); - newParticle.y = getBounds().top + (kp & 0xFFFF) + rf * AndroidUtilities.dp(KEYPOINT_DELTA) - AndroidUtilities.dp(KEYPOINT_DELTA / 2f); - } else { - newParticle.x = getBounds().left + Utilities.fastRandom.nextFloat() * getBounds().width(); - newParticle.y = getBounds().top + Utilities.fastRandom.nextFloat() * getBounds().height(); - } + newParticle.x = getBounds().left + Utilities.fastRandom.nextFloat() * getBounds().width(); + newParticle.y = getBounds().top + Utilities.fastRandom.nextFloat() * getBounds().height(); } @Override @@ -523,42 +503,6 @@ public int getOpacity() { return PixelFormat.TRANSPARENT; } - /** - * @param textLayout Text layout to measure - * @return Measured key points - */ - public static synchronized List measureKeyPoints(Layout textLayout) { - int w = textLayout.getWidth(); - int h = textLayout.getHeight(); - - if (w <= 0 || h <= 0) - return Collections.emptyList(); - - Bitmap measureBitmap = Bitmap.createBitmap(Math.round(w), Math.round(h), Bitmap.Config.ARGB_4444); // We can use 4444 as we don't need accuracy here - Canvas measureCanvas = new Canvas(measureBitmap); - textLayout.draw(measureCanvas); - - int[] pixels = new int[measureBitmap.getWidth() * measureBitmap.getHeight()]; - measureBitmap.getPixels(pixels, 0, measureBitmap.getWidth(), 0, 0, w, h); - - int sX = -1; - ArrayList keyPoints = new ArrayList<>(pixels.length); - for (int x = 0; x < w; x++) { - for (int y = 0; y < h; y++) { - int clr = pixels[y * measureBitmap.getWidth() + x]; - if (Color.alpha(clr) >= 0x80) { - if (sX == -1) { - sX = x; - } - keyPoints.add(((long) (x - sX) << 16) + y); - } - } - } - keyPoints.trimToSize(); - measureBitmap.recycle(); - return keyPoints; - } - /** * @return Max particles count */ @@ -665,8 +609,6 @@ private static void addSpoilersInternal(View v, Spanned spannable, Layout textLa spoilerEffect.setBounds((int) Math.min(ps, pe), (int) lineTop, (int) Math.max(ps, pe), (int) lineBottom); spoilerEffect.setColor(textLayout.getPaint().getColor()); spoilerEffect.setRippleInterpolator(Easings.easeInQuad); - if (!spoilerEffect.isLowDevice) - spoilerEffect.setKeyPoints(SpoilerEffect.measureKeyPoints(newLayout)); spoilerEffect.updateMaxParticles(); if (v != null) { spoilerEffect.setParentView(v); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/spoilers/SpoilersTextView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/spoilers/SpoilersTextView.java index d3a47c640b..5c2e1e3dec 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/spoilers/SpoilersTextView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/spoilers/SpoilersTextView.java @@ -126,6 +126,12 @@ protected void onDraw(Canvas canvas) { } } + @Override + protected void onLayout(boolean changed, int left, int top, int right, int bottom) { + super.onLayout(changed, left, top, right, bottom); + invalidateSpoilers(); + } + private void invalidateSpoilers() { if (spoilers == null) return; // Check for a super constructor spoilersPool.addAll(spoilers); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/DarkTheme.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/DarkTheme.java index a43b73451b..3a6b7e7082 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/DarkTheme.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/DarkTheme.java @@ -18,8 +18,6 @@ public static int getColor(String key){ switch(key){ case "avatar_subtitleInProfilePink": return 0xFF8A8A8A; - case "chat_secretTimerBackground": - return 0xB61E1E1E; case "chat_emojiPanelTrendingDescription": return 0xFF717171; case "chat_inFileBackground": @@ -32,16 +30,12 @@ public static int getColor(String key){ return 0xFF828282; case "chat_inTimeText": return 0xD98091A0; - case "avatar_backgroundGroupCreateSpanBlue": - return 0xFF2D5E8C; case "windowBackgroundGray": return 0xFF0D0D0D; case "windowBackgroundWhiteGreenText2": return 0xFF42C366; case "chat_emojiPanelBackspace": return 0xFF727272; - case "chat_outPreviewInstantSelectedText": - return 0xFFFFFFFF; case "chat_inBubble": return 0xFF253442; case "chat_outFileInfoSelectedText": @@ -106,22 +100,14 @@ public static int getColor(String key){ return 0xFF282E33; case "chat_messageTextIn": return 0xFFFAFAFA; - case "chat_outLoaderPhoto": - return 0xFF3872A4; - case "chat_outFileIcon": - return 0xFF37729C; case "chat_serviceBackgroundSelected": return 0x60495154; case "inappPlayerBackground": return 0xD82B2B2B; case "chat_topPanelLine": return 0xFF5680A9; - case "player_actionBar": - return 0xFF1C1C1C; case "chat_outFileInfoText": return 0xFFAACFEE; - case "chat_outLoaderPhotoIcon": - return 0xFF72A5D0; case "chat_unreadMessagesStartArrowIcon": return 0xFF5A6B7A; case "chat_outAudioProgress": @@ -136,8 +122,6 @@ public static int getColor(String key){ return 0xFFFAFAFA; case "chat_outAudioTitleText": return 0xFFD1EBFF; - case "chat_inLoaderPhotoSelected": - return 0xFF1C4063; case "inappPlayerPerformer": return 0xFFFAFAFA; case "actionBarActionModeDefaultTop": @@ -152,28 +136,18 @@ public static int getColor(String key){ return 0xFF6CB55B; case "chat_outAudioSeekbarFill": return 0xFFC4E1F7; - case "player_placeholder": - return 0xFF2B2B2B; case "chat_inReplyNameText": return 0xFF55A2DB; case "chat_messagePanelIcons": return 0xFF696969; case "graySection": return 0xFF222222; - case "chats_nameIcon": - return 0xFFDDDDDD; case "avatar_backgroundActionBarViolet": return 0xFF212426; - case "chat_emojiPanelIconSelector": - return 0xFF5598DB; - case "chat_replyPanelMessage": - return 0xFF939393; case "chat_outPreviewInstantText": return 0xFFD1EBFF; case "chat_emojiPanelTrendingTitle": return 0xFFF4F4F4; - case "chat_inPreviewInstantSelectedText": - return 0xFF56A2DB; case "chat_inFileInfoSelectedText": return 0xFFA9CFEE; case "avatar_subtitleInProfileRed": @@ -198,8 +172,6 @@ public static int getColor(String key){ return 0xFFFFFFFF; case "chat_outTimeSelectedText": return 0xFFFFFFFF; - case "chat_outFileSelectedIcon": - return 0xFF2B83CB; case "chats_secretIcon": return 0xFF71D756; case "dialogIcon": @@ -234,8 +206,6 @@ public static int getColor(String key){ return 0xFFFFFFFF; case "avatar_actionBarSelectorGreen": return 0xFF495154; - case "chat_inFileIcon": - return 0xFF233442; case "chat_inAudioTitleText": return 0xFF56A3DB; case "chat_inAudioDurationSelectedText": @@ -314,8 +284,6 @@ public static int getColor(String key){ return 0x6628323D; case "windowBackgroundWhiteGrayText2": return 0xFF797979; - case "chat_inFileSelectedIcon": - return 0xFF1A4063; case "profile_actionIcon": return 0xFFFFFFFF; case "chat_secretChatStatusText": @@ -398,12 +366,8 @@ public static int getColor(String key){ return 0xFF549CDD; case "chat_outInstantSelected": return 0xFFFFFFFF; - case "chat_outLoaderPhotoIconSelected": - return 0xFFFFFFFF; case "chat_outAudioSeekbar": return 0x9672A5D0; - case "chat_inLoaderPhotoIcon": - return 0xFF596F80; case "windowBackgroundWhiteRedText5": return 0xFFFF4C56; case "avatar_actionBarSelectorViolet": @@ -420,8 +384,6 @@ public static int getColor(String key){ return 0xFF5EA4E0; case "chat_inMenu": return 0x795C6F80; - case "player_seekBarBackground": - return 0x47525252; case "chats_sentClock": return 0xFF6082BD; case "chat_messageLinkOut": @@ -446,18 +408,12 @@ public static int getColor(String key){ return 0x11FFFFFF; case "chat_goDownButtonIcon": return 0xFFE4E4E4; - case "chats_menuCloudBackgroundCats": - return 0xFF549CDD; - case "chat_inLoaderPhotoIconSelected": - return 0xFFA9CFEE; case "windowBackgroundWhiteBlueText4": return 0xFF4A8FCD; case "chat_inContactNameText": return 0xFF56A3DB; case "chat_topPanelTitle": return 0xFF55A3DB; - case "chat_outLoaderPhotoSelected": - return 0xFF3672A4; case "avatar_actionBarSelectorPink": return 0xFF495154; case "chat_outContactNameText": @@ -502,8 +458,6 @@ public static int getColor(String key){ return 0xFF515151; case "chat_inFileProgress": return 0xFF5D6F80; - case "chat_outLocationBackground": - return 0xFFA0DCF5; case "chats_muteIcon": return 0xFF5B5B5B; case "groupcreate_spanText": diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/RTMPStreamPipOverlay.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/RTMPStreamPipOverlay.java index 13c23a592b..53d5547e5d 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/RTMPStreamPipOverlay.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/RTMPStreamPipOverlay.java @@ -662,7 +662,9 @@ private void bindTextureView() { boundParticipant = participant; } } else if (boundParticipant != null) { - VoIPService.getSharedInstance().removeRemoteSink(boundParticipant, false); + if (VoIPService.getSharedInstance() != null) { + VoIPService.getSharedInstance().removeRemoteSink(boundParticipant, false); + } boundParticipant = null; } boolean showPlaceholder = !firstFrameRendered || boundParticipant == null || boundParticipant.video == null && boundParticipant.presentation == null || diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPHelper.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPHelper.java index e60161c9b6..3b274472db 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPHelper.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPHelper.java @@ -507,7 +507,7 @@ public static void showRateAlert(final Context context, final Runnable onDismiss commentBox.setTextColor(Theme.getColor(Theme.key_dialogTextBlack)); commentBox.setHintTextColor(Theme.getColor(Theme.key_dialogTextHint)); commentBox.setBackground(null); - commentBox.setLineColors(Theme.getColor(Theme.key_dialogInputField), Theme.getColor(Theme.key_dialogInputFieldActivated), Theme.getColor(Theme.key_dialogTextRed2)); + commentBox.setLineColors(Theme.getColor(Theme.key_dialogInputField), Theme.getColor(Theme.key_dialogInputFieldActivated), Theme.getColor(Theme.key_dialogTextRed)); commentBox.setPadding(0, AndroidUtilities.dp(4), 0, AndroidUtilities.dp(4)); commentBox.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 18); commentBox.setVisibility(View.GONE); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ContactAddActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ContactAddActivity.java index ab7f056cd9..c1c41aab2c 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ContactAddActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ContactAddActivity.java @@ -115,13 +115,13 @@ public interface ContactAddActivityDelegate { public ContactAddActivity(Bundle args) { super(args); - imageUpdater = new ImageUpdater(true); + imageUpdater = new ImageUpdater(true, ImageUpdater.FOR_TYPE_USER, true); } public ContactAddActivity(Bundle args, Theme.ResourcesProvider resourcesProvider) { super(args); this.resourcesProvider = resourcesProvider; - imageUpdater = new ImageUpdater(true); + imageUpdater = new ImageUpdater(true, ImageUpdater.FOR_TYPE_USER, true); } @Override @@ -453,7 +453,7 @@ protected void onLayout(boolean changed, int left, int top, int right, int botto LocaleController.formatString("ResetToOriginalPhotoMessage", R.string.ResetToOriginalPhotoMessage, user.first_name), LocaleController.getString("Reset", R.string.Reset), () -> { avatar = null; - sendPhotoChangedRequest(null, null,null, null, 0, TYPE_SET); + sendPhotoChangedRequest(null, null,null, null, null, 0, TYPE_SET); TLRPC.User user1 = getMessagesController().getUser(user_id); user1.photo.personal = false; @@ -673,7 +673,7 @@ public boolean canFinishFragment() { } @Override - public void didUploadPhoto(TLRPC.InputFile photo, TLRPC.InputFile video, double videoStartTimestamp, String videoPath, TLRPC.PhotoSize bigSize, TLRPC.PhotoSize smallSize, boolean isVideo) { + public void didUploadPhoto(TLRPC.InputFile photo, TLRPC.InputFile video, double videoStartTimestamp, String videoPath, TLRPC.PhotoSize bigSize, TLRPC.PhotoSize smallSize, boolean isVideo, TLRPC.VideoSize emojiMarkup) { AndroidUtilities.runOnUIThread(() -> { if (imageUpdater.isCanceled()) { return; @@ -704,7 +704,7 @@ public void didUploadPhoto(TLRPC.InputFile photo, TLRPC.InputFile video, double getNotificationCenter().postNotificationName(NotificationCenter.reloadDialogPhotos); getNotificationCenter().postNotificationName(NotificationCenter.updateInterfaces, MessagesController.UPDATE_MASK_AVATAR); } - sendPhotoChangedRequest(avatar, bigSize.location, photo, video, videoStartTimestamp, photoSelectedTypeFinal); + sendPhotoChangedRequest(avatar, bigSize.location, photo, video, emojiMarkup, videoStartTimestamp, photoSelectedTypeFinal); showAvatarProgress(false, true); } else { avatarImage.setImage(ImageLocation.getForLocal(avatar), "50_50", avatarDrawable, getMessagesController().getUser(user_id)); @@ -764,7 +764,7 @@ private void createServiceMessageLocal(TLRPC.PhotoSize smallSize, TLRPC.PhotoSiz getMessagesController().photoSuggestion.put(message.local_id, imageUpdater); } - private void sendPhotoChangedRequest(TLRPC.FileLocation avatar, TLRPC.FileLocation bigAvatar, TLRPC.InputFile photo, TLRPC.InputFile video, double videoStartTimestamp, int photoSelectedTypeFinal) { + private void sendPhotoChangedRequest(TLRPC.FileLocation avatar, TLRPC.FileLocation bigAvatar, TLRPC.InputFile photo, TLRPC.InputFile video, TLRPC.VideoSize emojiMarkup, double videoStartTimestamp, int photoSelectedTypeFinal) { TLRPC.TL_photos_uploadContactProfilePhoto req = new TLRPC.TL_photos_uploadContactProfilePhoto(); req.user_id = getMessagesController().getInputUser(user_id); @@ -778,6 +778,10 @@ private void sendPhotoChangedRequest(TLRPC.FileLocation avatar, TLRPC.FileLocati req.video_start_ts = videoStartTimestamp; req.flags |= 4; } + if (emojiMarkup != null) { + req.flags |= 32; + req.video_emoji_markup = emojiMarkup; + } if (photoSelectedTypeFinal == TYPE_SUGGEST) { req.suggest = true; req.flags |= 8; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ContactsActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ContactsActivity.java index 1a1d294d07..46095d2d9d 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ContactsActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ContactsActivity.java @@ -620,7 +620,14 @@ public void onScrolled(RecyclerView recyclerView, int dx, int dy) { floatingButtonContainer = new FrameLayout(context); frameLayout.addView(floatingButtonContainer, LayoutHelper.createFrame((Build.VERSION.SDK_INT >= 21 ? 56 : 60) + 20, (Build.VERSION.SDK_INT >= 21 ? 56 : 60) + 20, (LocaleController.isRTL ? Gravity.LEFT : Gravity.RIGHT) | Gravity.BOTTOM, LocaleController.isRTL ? 4 : 0, 0, LocaleController.isRTL ? 0 : 4, 0)); floatingButtonContainer.setOnClickListener(v -> { - new NewContactBottomSheet(ContactsActivity.this, getContext()).show(); + AndroidUtilities.requestAdjustNothing(getParentActivity(), getClassGuid()); + new NewContactBottomSheet(ContactsActivity.this, getContext()) { + @Override + public void dismissInternal() { + super.dismissInternal(); + AndroidUtilities.requestAdjustResize(getParentActivity(), classGuid); + } + }.show(); }); floatingButton = new RLottieImageView(context); @@ -681,9 +688,9 @@ private void didSelectResult(final TLRPC.User user, boolean useAlert, String par TLRPC.Chat chat = getMessagesController().getChat(channelId); AlertDialog.Builder builder = new AlertDialog.Builder(getParentActivity()); if (ChatObject.canAddAdmins(chat)) { - builder.setTitle(LocaleController.getString("AppName", R.string.AppName)); + builder.setTitle(LocaleController.getString("AddBotAdminAlert", R.string.AddBotAdminAlert)); builder.setMessage(LocaleController.getString("AddBotAsAdmin", R.string.AddBotAsAdmin)); - builder.setPositiveButton(LocaleController.getString("MakeAdmin", R.string.MakeAdmin), (dialogInterface, i) -> { + builder.setPositiveButton(LocaleController.getString("AddAsAdmin", R.string.AddAsAdmin), (dialogInterface, i) -> { if (delegate != null) { delegate.didSelectContact(user, param, this); delegate = null; @@ -931,7 +938,7 @@ public void didReceivedNotification(int id, int account, Object... args) { } } else if (id == NotificationCenter.closeChats) { if (!creatingChat) { - removeSelfFromStack(); + removeSelfFromStack(true); } } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ContactsWidgetConfigActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ContactsWidgetConfigActivity.java index 981dd27a30..c8b3d60dc0 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ContactsWidgetConfigActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ContactsWidgetConfigActivity.java @@ -23,7 +23,7 @@ protected boolean handleIntent(Intent intent, boolean isNew, boolean restore, bo if (creatingAppWidgetId != AppWidgetManager.INVALID_APPWIDGET_ID) { Bundle args = new Bundle(); args.putBoolean("onlySelect", true); - args.putInt("dialogsType", 10); + args.putInt("dialogsType", DialogsActivity.DIALOGS_TYPE_WIDGET); args.putBoolean("allowSwitchAccount", true); EditWidgetActivity fragment = new EditWidgetActivity(EditWidgetActivity.TYPE_CONTACTS, creatingAppWidgetId); fragment.setDelegate(dialogs -> { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ContentPreviewViewer.java b/TMessagesProj/src/main/java/org/telegram/ui/ContentPreviewViewer.java index b48d4fcee7..f15ccbe6a6 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ContentPreviewViewer.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ContentPreviewViewer.java @@ -65,9 +65,14 @@ import org.telegram.ui.Cells.StickerCell; import org.telegram.ui.Cells.StickerEmojiCell; import org.telegram.ui.Components.AlertsCreator; +import org.telegram.ui.Components.AnimatedEmojiDrawable; +import org.telegram.ui.Components.AnimatedEmojiSpan; import org.telegram.ui.Components.CubicBezierInterpolator; +import org.telegram.ui.Components.EmojiPacksAlert; +import org.telegram.ui.Components.EmojiView; import org.telegram.ui.Components.LayoutHelper; import org.telegram.ui.Components.RecyclerListView; +import org.telegram.ui.Components.SuggestEmojiView; import java.io.FileOutputStream; import java.util.ArrayList; @@ -95,14 +100,14 @@ default boolean can() { return true; } - void sendSticker(TLRPC.Document sticker, String query, Object parent, boolean notify, int scheduleDate); - void openSet(TLRPC.InputStickerSet set, boolean clearInputField); - boolean needSend(); + boolean needSend(int contentType); + default void sendSticker(TLRPC.Document sticker, String query, Object parent, boolean notify, int scheduleDate) {} + default void sendGif(Object gif, Object parent, boolean notify, int scheduleDate) {} + default void sendEmoji(TLRPC.Document emoji) {} boolean canSchedule(); - boolean isInScheduleMode(); long getDialogId(); @@ -110,7 +115,6 @@ default boolean can() { default boolean needRemove() { return false; } - default void remove(SendMessagesHelper.ImportingSticker sticker) { } @@ -123,22 +127,34 @@ default boolean needOpen() { return true; } - default void sendGif(Object gif, Object parent, boolean notify, int scheduleDate) { + default void gifAddedOrDeleted() {} + default boolean needMenu() { + return true; } - default void gifAddedOrDeleted() { + default Boolean canSetAsStatus(TLRPC.Document document) { + return null; + } + default void setAsEmojiStatus(TLRPC.Document document, Integer until) {} + default boolean needCopy() { + return false; } + default void copyEmoji(TLRPC.Document document) {} - default boolean needMenu() { - return true; + default void resetTouch() {} + + default boolean needRemoveFromRecent(TLRPC.Document document) { + return false; } + default void removeFromRecent(TLRPC.Document document) {} } - private final static int CONTENT_TYPE_NONE = -1; - private final static int CONTENT_TYPE_STICKER = 0; - private final static int CONTENT_TYPE_GIF = 1; + public final static int CONTENT_TYPE_NONE = -1; + public final static int CONTENT_TYPE_STICKER = 0; + public final static int CONTENT_TYPE_GIF = 1; + public final static int CONTENT_TYPE_EMOJI = 2; private final static int nkbtn_send_without_sound = 100; private final static int nkbtn_stickerdl = 110; @@ -210,19 +226,19 @@ public void run() { menuVisible = true; containerView.invalidate(); if (delegate != null) { - if (delegate.needSend() && !delegate.isInScheduleMode()) { + if (delegate.needSend(currentContentType) && !delegate.isInScheduleMode()) { items.add(LocaleController.getString("SendStickerPreview", R.string.SendStickerPreview)); icons.add(R.drawable.outline_send); actions.add(0); } - if (delegate.needSend() && !delegate.isInScheduleMode()) { + if (delegate.needSend(currentContentType) && !delegate.isInScheduleMode()) { items.add(LocaleController.getString("SendWithoutSound", R.string.SendWithoutSound)); icons.add(R.drawable.input_notify_off); actions.add(nkbtn_send_without_sound); } if (delegate.canSchedule()) { items.add(LocaleController.getString("Schedule", R.string.Schedule)); - icons.add(R.drawable.msg_timer); + icons.add(R.drawable.baseline_timer_24); actions.add(3); } if (currentStickerSet != null && delegate.needOpen()) { @@ -280,6 +296,9 @@ public void onClick(View v) { Object parent = parentObject; String query = currentQuery; ContentPreviewViewerDelegate stickerPreviewViewerDelegate = delegate; + if (stickerPreviewViewerDelegate == null) { + return; + } AlertsCreator.createScheduleDatePickerDialog(parentActivity, stickerPreviewViewerDelegate.getDialogId(), (notify, scheduleDate) -> stickerPreviewViewerDelegate.sendSticker(sticker, query, parent, notify, scheduleDate)); } else if (actions.get(which) == 4) { MediaDataController.getInstance(currentAccount).addRecentSticker(MediaDataController.TYPE_IMAGE, parentObject, currentDocument, (int) (System.currentTimeMillis() / 1000), true); @@ -293,7 +312,7 @@ public void onClick(View v) { } } }; - ActionBarPopupWindow.ActionBarPopupWindowLayout previewMenu = new ActionBarPopupWindow.ActionBarPopupWindowLayout(containerView.getContext(), R.drawable.popup_fixed_alert2, resourcesProvider); + ActionBarPopupWindow.ActionBarPopupWindowLayout previewMenu = new ActionBarPopupWindow.ActionBarPopupWindowLayout(containerView.getContext(), R.drawable.popup_fixed_alert3, resourcesProvider); for (int i = 0; i < items.size(); i++) { View item = ActionBarMenuItem.addItem(previewMenu, icons.get(i), items.get(i), false, resourcesProvider); @@ -348,28 +367,159 @@ public void dismiss() { } popupWindow.showAtLocation(containerView, 0, (int) ((containerView.getMeasuredWidth() - previewMenu.getMeasuredWidth()) / 2f), y); + if (!NekoConfig.disableVibration.Bool()) + containerView.performHapticFeedback(HapticFeedbackConstants.LONG_PRESS); + } else if (currentContentType == CONTENT_TYPE_EMOJI && delegate != null) { + menuVisible = true; + containerView.invalidate(); + ArrayList items = new ArrayList<>(); + final ArrayList actions = new ArrayList<>(); + ArrayList icons = new ArrayList<>(); + + if (delegate.needSend(currentContentType)) { + items.add(LocaleController.getString("SendEmojiPreview", R.string.SendEmojiPreview)); + icons.add(R.drawable.msg_send); + actions.add(0); + } + Boolean canSetAsStatus = delegate.canSetAsStatus(currentDocument); + if (canSetAsStatus != null) { + if (canSetAsStatus) { + items.add(LocaleController.getString("SetAsEmojiStatus", R.string.SetAsEmojiStatus)); + icons.add(R.drawable.msg_smile_status); + actions.add(1); + } else { + items.add(LocaleController.getString("RemoveStatus", R.string.RemoveStatus)); + icons.add(R.drawable.msg_smile_status); + actions.add(2); + } + } + if (delegate.needCopy()) { + items.add(LocaleController.getString("CopyEmojiPreview", R.string.CopyEmojiPreview)); + icons.add(R.drawable.msg_copy); + actions.add(3); + } + if (delegate.needRemoveFromRecent(currentDocument)) { + items.add(LocaleController.getString("RemoveFromRecent", R.string.RemoveFromRecent)); + icons.add(R.drawable.msg_delete); + actions.add(4); + } + if (items.isEmpty()) { + return; + } + + int[] ic = new int[icons.size()]; + for (int a = 0; a < icons.size(); a++) { + ic[a] = icons.get(a); + } + + ActionBarPopupWindow.ActionBarPopupWindowLayout previewMenu = new ActionBarPopupWindow.ActionBarPopupWindowLayout(containerView.getContext(), R.drawable.popup_fixed_alert2, resourcesProvider); + + View.OnClickListener onItemClickListener = v -> { + if (parentActivity == null || delegate == null) { + return; + } + int which = (int) v.getTag(); + int action = actions.get(which); + if (action == 0) { + delegate.sendEmoji(currentDocument); + } else if (action == 1) { + delegate.setAsEmojiStatus(currentDocument, null); + } else if (action == 2) { + delegate.setAsEmojiStatus(null, null); + } else if (action == 3) { + delegate.copyEmoji(currentDocument); + } else if (action == 4) { + delegate.removeFromRecent(currentDocument); + } + if (popupWindow != null) { + popupWindow.dismiss(); + } + }; + + for (int i = 0; i < items.size(); i++) { + ActionBarMenuSubItem item = ActionBarMenuItem.addItem(i == 0, i == items.size() - 1, previewMenu, icons.get(i), items.get(i), false, resourcesProvider); + if (actions.get(i) == 4) { + item.setIconColor(getThemedColor(Theme.key_dialogRedIcon)); + item.setTextColor(getThemedColor(Theme.key_dialogTextRed)); + } + item.setTag(i); + item.setOnClickListener(onItemClickListener); + } + popupWindow = new ActionBarPopupWindow(previewMenu, LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT) { + @Override + public void dismiss() { + super.dismiss(); + popupWindow = null; + menuVisible = false; + if (closeOnDismiss) { + close(); + } + } + }; + popupWindow.setPauseNotifications(true); + popupWindow.setDismissAnimationDuration(150); + popupWindow.setScaleOut(true); + popupWindow.setOutsideTouchable(true); + popupWindow.setClippingEnabled(true); + popupWindow.setAnimationStyle(R.style.PopupContextAnimation); + popupWindow.setFocusable(true); + previewMenu.measure(View.MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(1000), View.MeasureSpec.AT_MOST), View.MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(1000), View.MeasureSpec.AT_MOST)); + popupWindow.setInputMethodMode(ActionBarPopupWindow.INPUT_METHOD_NOT_NEEDED); + popupWindow.getContentView().setFocusableInTouchMode(true); + + int insets = 0; + int top; + if (Build.VERSION.SDK_INT >= 21 && lastInsets != null) { + insets = lastInsets.getStableInsetBottom() + lastInsets.getStableInsetTop(); + top = lastInsets.getStableInsetTop(); + } else { + top = AndroidUtilities.statusBarHeight; + } + int size = Math.min(containerView.getWidth(), containerView.getHeight() - insets) - AndroidUtilities.dp(40f); + + int y = (int) (moveY + Math.max(size / 2 + top + (stickerEmojiLayout != null ? AndroidUtilities.dp(40) : 0), (containerView.getHeight() - insets - keyboardHeight) / 2) + size / 2); + y += AndroidUtilities.dp(24) - moveY; + popupWindow.showAtLocation(containerView, 0, (int) ((containerView.getMeasuredWidth() - previewMenu.getMeasuredWidth()) / 2f), y); + ActionBarPopupWindow.startAnimation(previewMenu); + if (!NekoConfig.disableVibration.Bool()) containerView.performHapticFeedback(HapticFeedbackConstants.LONG_PRESS); + if (moveY != 0) { + if (finalMoveY == 0) { + finalMoveY = 0; + startMoveY = moveY; + } + ValueAnimator valueAnimator = ValueAnimator.ofFloat(0f, 1f); + valueAnimator.addUpdateListener(animation -> { + currentMoveYProgress = (float) animation.getAnimatedValue(); + moveY = startMoveY + (finalMoveY - startMoveY) * currentMoveYProgress; + ContentPreviewViewer.this.containerView.invalidate(); + }); + valueAnimator.setDuration(350); + valueAnimator.setInterpolator(CubicBezierInterpolator.DEFAULT); + valueAnimator.start(); + } } else if (delegate != null) { menuVisible = true; + containerView.invalidate(); ArrayList items = new ArrayList<>(); final ArrayList actions = new ArrayList<>(); ArrayList icons = new ArrayList<>(); - if (delegate.needSend() && !delegate.isInScheduleMode()) { + if (delegate.needSend(currentContentType) && !delegate.isInScheduleMode()) { items.add(LocaleController.getString("SendGifPreview", R.string.SendGifPreview)); icons.add(R.drawable.outline_send); actions.add(0); } - if (delegate.needSend() && !delegate.isInScheduleMode()) { + if (delegate.needSend(currentContentType) && !delegate.isInScheduleMode()) { items.add(LocaleController.getString("SendWithoutSound", R.string.SendWithoutSound)); icons.add(R.drawable.input_notify_off); - actions.add(nkbtn_send_without_sound); + actions.add(4); } if (delegate.canSchedule()) { items.add(LocaleController.getString("Schedule", R.string.Schedule)); - icons.add(R.drawable.msg_timer); + icons.add(R.drawable.baseline_timer_24); actions.add(3); } @@ -387,6 +537,9 @@ public void dismiss() { } else { canDelete = false; } + if (items.isEmpty()) { + return; + } int[] ic = new int[icons.size()]; for (int a = 0; a < icons.size(); a++) { @@ -402,6 +555,8 @@ public void dismiss() { int which = (int) v.getTag(); if (actions.get(which) == 0 || actions.get(which) == nkbtn_send_without_sound) { delegate.sendGif(currentDocument != null ? currentDocument : inlineResult, parentObject, actions.get(which) == 0, 0); + } else if (actions.get(which) == 4) { + delegate.sendGif(currentDocument != null ? currentDocument : inlineResult, parentObject, false, 0); } else if (actions.get(which) == 1) { MediaDataController.getInstance(currentAccount).removeRecentGif(currentDocument); delegate.gifAddedOrDeleted(); @@ -427,7 +582,7 @@ public void dismiss() { item.setOnClickListener(onItemClickListener); if (canDelete && i == items.size() - 1) { - item.setColors(getThemedColor(Theme.key_dialogTextRed2), getThemedColor(Theme.key_dialogRedIcon)); + item.setColors(getThemedColor(Theme.key_dialogTextRed), getThemedColor(Theme.key_dialogRedIcon)); } } popupWindow = new ActionBarPopupWindow(previewMenu, LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT) { @@ -654,10 +809,19 @@ public boolean onTouch(MotionEvent event, final RecyclerListView listView, final contentType = CONTENT_TYPE_GIF; centerImage.setRoundRadius(AndroidUtilities.dp(6)); } + } else if (view instanceof EmojiPacksAlert.EmojiImageView) { + contentType = CONTENT_TYPE_EMOJI; + centerImage.setRoundRadius(0); + } else if (view instanceof EmojiView.ImageViewEmoji && ((EmojiView.ImageViewEmoji) view).getSpan() != null) { + contentType = CONTENT_TYPE_EMOJI; + centerImage.setRoundRadius(0); } if (contentType == CONTENT_TYPE_NONE || view == currentPreviewCell) { break; } + if (delegate != null) { + delegate.resetTouch(); + } if (currentPreviewCell instanceof StickerEmojiCell) { ((StickerEmojiCell) currentPreviewCell).setScaled(false); } else if (currentPreviewCell instanceof StickerCell) { @@ -688,6 +852,38 @@ public boolean onTouch(MotionEvent event, final RecyclerListView listView, final if (contentType != CONTENT_TYPE_GIF) { contextLinkCell.setScaled(true); } + } else if (currentPreviewCell instanceof EmojiPacksAlert.EmojiImageView) { + EmojiPacksAlert.EmojiImageView imageView = (EmojiPacksAlert.EmojiImageView) currentPreviewCell; + TLRPC.Document document = imageView.getDocument(); + if (document != null) { + open(document, null, MessageObject.findAnimatedEmojiEmoticon(document, null), null, null, contentType, false, null, resourcesProvider); + } + } else if (currentPreviewCell instanceof EmojiView.ImageViewEmoji) { + EmojiView.ImageViewEmoji imageView = (EmojiView.ImageViewEmoji) currentPreviewCell; + AnimatedEmojiSpan span = imageView.getSpan(); + TLRPC.Document document = null; + if (span != null) { + document = span.document; + if (document == null) { + document = AnimatedEmojiDrawable.findDocument(currentAccount, span.getDocumentId()); + } + } + if (document != null) { + open(document, null, MessageObject.findAnimatedEmojiEmoticon(document, null), null, null, contentType, false, null, resourcesProvider); + } else { + return false; + } + } else if (currentPreviewCell instanceof SuggestEmojiView.EmojiImageView) { + SuggestEmojiView.EmojiImageView emojiImageView = (SuggestEmojiView.EmojiImageView) currentPreviewCell; + Drawable drawable = emojiImageView.drawable; + TLRPC.Document document = null; + if (drawable instanceof AnimatedEmojiDrawable) { + document = ((AnimatedEmojiDrawable) drawable).getDocument(); + } + if (document == null) { + return false; + } + open(document, null, MessageObject.findAnimatedEmojiEmoticon(document, null), null, null, contentType, false, null, resourcesProvider); } runSmoothHaptic(); @@ -770,6 +966,19 @@ public boolean onInterceptTouchEvent(MotionEvent event, final RecyclerListView l centerImage.setRoundRadius(AndroidUtilities.dp(6)); } } + } else if (view instanceof EmojiPacksAlert.EmojiImageView) { + contentType = CONTENT_TYPE_EMOJI; + centerImage.setRoundRadius(0); + } else if (view instanceof EmojiView.ImageViewEmoji && ((EmojiView.ImageViewEmoji) view).getSpan() != null) { + contentType = CONTENT_TYPE_EMOJI; + centerImage.setRoundRadius(0); + } else if (view instanceof SuggestEmojiView.EmojiImageView) { + SuggestEmojiView.EmojiImageView emojiImageView = (SuggestEmojiView.EmojiImageView) view; + Drawable drawable = emojiImageView.drawable; + if (drawable instanceof AnimatedEmojiDrawable) { + contentType = CONTENT_TYPE_EMOJI; + centerImage.setRoundRadius(0); + } } if (contentType == CONTENT_TYPE_NONE) { return false; @@ -782,6 +991,7 @@ public boolean onInterceptTouchEvent(MotionEvent event, final RecyclerListView l if (openPreviewRunnable == null) { return; } + boolean opened = false; listView.setOnItemClickListener((RecyclerListView.OnItemClickListener) null); listView.requestDisallowInterceptTouchEvent(true); openPreviewRunnable = null; @@ -791,21 +1001,61 @@ public boolean onInterceptTouchEvent(MotionEvent event, final RecyclerListView l if (currentPreviewCell instanceof StickerEmojiCell) { StickerEmojiCell stickerEmojiCell = (StickerEmojiCell) currentPreviewCell; open(stickerEmojiCell.getSticker(), stickerEmojiCell.getStickerPath(), stickerEmojiCell.getEmoji(), delegate != null ? delegate.getQuery(false) : null, null, contentTypeFinal, stickerEmojiCell.isRecent(), stickerEmojiCell.getParentObject(), resourcesProvider); + opened = true; stickerEmojiCell.setScaled(true); } else if (currentPreviewCell instanceof StickerCell) { StickerCell stickerCell = (StickerCell) currentPreviewCell; open(stickerCell.getSticker(), null, null, delegate != null ? delegate.getQuery(false) : null, null, contentTypeFinal, false, stickerCell.getParentObject(), resourcesProvider); + opened = true; stickerCell.setScaled(true); clearsInputField = stickerCell.isClearsInputField(); } else if (currentPreviewCell instanceof ContextLinkCell) { ContextLinkCell contextLinkCell = (ContextLinkCell) currentPreviewCell; open(contextLinkCell.getDocument(), null, null, delegate != null ? delegate.getQuery(true) : null, contextLinkCell.getBotInlineResult(), contentTypeFinal, false, contextLinkCell.getBotInlineResult() != null ? contextLinkCell.getInlineBot() : contextLinkCell.getParentObject(), resourcesProvider); + opened = true; if (contentTypeFinal != CONTENT_TYPE_GIF) { contextLinkCell.setScaled(true); } + } else if (currentPreviewCell instanceof EmojiPacksAlert.EmojiImageView) { + EmojiPacksAlert.EmojiImageView imageView = (EmojiPacksAlert.EmojiImageView) currentPreviewCell; + TLRPC.Document document = imageView.getDocument(); + if (document != null) { + open(document, null, MessageObject.findAnimatedEmojiEmoticon(document, null), null, null, contentTypeFinal, false, null, resourcesProvider); + opened = true; + } + } else if (currentPreviewCell instanceof EmojiView.ImageViewEmoji) { + EmojiView.ImageViewEmoji imageView = (EmojiView.ImageViewEmoji) currentPreviewCell; + AnimatedEmojiSpan span = imageView.getSpan(); + TLRPC.Document document = null; + if (span != null) { + document = span.document; + if (document == null) { + document = AnimatedEmojiDrawable.findDocument(currentAccount, span.getDocumentId()); + } + } + if (document != null) { + open(document, null, MessageObject.findAnimatedEmojiEmoticon(document, null), null, null, contentTypeFinal, false, null, resourcesProvider); + opened = true; + } + } else if (currentPreviewCell instanceof SuggestEmojiView.EmojiImageView) { + SuggestEmojiView.EmojiImageView emojiImageView = (SuggestEmojiView.EmojiImageView) currentPreviewCell; + Drawable drawable = emojiImageView.drawable; + TLRPC.Document document = null; + if (drawable instanceof AnimatedEmojiDrawable) { + document = ((AnimatedEmojiDrawable) drawable).getDocument(); + } + if (document != null) { + open(document, null, MessageObject.findAnimatedEmojiEmoticon(document, null), null, null, contentTypeFinal, false, null, resourcesProvider); + opened = true; + } + } + if (opened) { + if (!NekoConfig.disableVibration.Bool()) + currentPreviewCell.performHapticFeedback(HapticFeedbackConstants.LONG_PRESS, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + if (delegate != null) { + delegate.resetTouch(); + } } - if (!NekoConfig.disableVibration.Bool()) - currentPreviewCell.performHapticFeedback(HapticFeedbackConstants.LONG_PRESS, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); }; AndroidUtilities.runOnUIThread(openPreviewRunnable, 200); return true; @@ -903,7 +1153,8 @@ public void open(TLRPC.Document document, SendMessagesHelper.ImportingSticker st stickerEmojiLayout = null; backgroundDrawable.setColor(Theme.getActiveTheme().isDark() ? 0x71000000 : 0x64E6E6E6); drawEffect = false; - if (contentType == CONTENT_TYPE_STICKER) { + centerImage.setColorFilter(null); + if (contentType == CONTENT_TYPE_STICKER || contentType == CONTENT_TYPE_EMOJI) { if (document == null && sticker == null) { return; } @@ -923,7 +1174,11 @@ public void open(TLRPC.Document document, SendMessagesHelper.ImportingSticker st break; } } - if (newSet != null && (delegate == null || delegate.needMenu())) { + if (contentType == CONTENT_TYPE_EMOJI && emojiPath != null) { + CharSequence emoji = Emoji.replaceEmoji(emojiPath, textPaint.getFontMetricsInt(), AndroidUtilities.dp(24), false); + stickerEmojiLayout = new StaticLayout(emoji, textPaint, AndroidUtilities.dp(100), Layout.Alignment.ALIGN_CENTER, 1.0f, 0.0f, false); + } + if ((newSet != null || contentType == CONTENT_TYPE_EMOJI) && (delegate == null || delegate.needMenu())) { AndroidUtilities.cancelRunOnUIThread(showSheetRunnable); AndroidUtilities.runOnUIThread(showSheetRunnable, 1300); } @@ -938,6 +1193,9 @@ public void open(TLRPC.Document document, SendMessagesHelper.ImportingSticker st effectImage.setImage(ImageLocation.getForDocument(MessageObject.getPremiumStickerAnimation(document), document), null, null, null, "tgs", currentStickerSet, 1); } } + if (MessageObject.isTextColorEmoji(document)) { + centerImage.setColorFilter(Theme.chat_animatedEmojiTextColorFilter); + } for (int a = 0; a < document.attributes.size(); a++) { TLRPC.DocumentAttribute attribute = document.attributes.get(a); if (attribute instanceof TLRPC.TL_documentAttributeSticker) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/CountrySelectActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/CountrySelectActivity.java index f26e242e13..89e67d0c0d 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/CountrySelectActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/CountrySelectActivity.java @@ -11,6 +11,7 @@ import android.content.Context; import android.graphics.Canvas; import android.graphics.Paint; +import android.os.Build; import android.text.SpannableStringBuilder; import android.text.TextUtils; import android.text.style.ReplacementSpan; @@ -49,11 +50,14 @@ import java.io.BufferedReader; import java.io.InputStream; import java.io.InputStreamReader; +import java.text.Collator; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.Comparator; import java.util.HashMap; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.Timer; @@ -74,6 +78,8 @@ public interface CountrySelectActivityDelegate { private boolean searching; private boolean needPhoneCode; + private boolean disableAnonymousNumbers; + private CountrySelectActivityDelegate delegate; private ArrayList existingCountries; @@ -88,6 +94,10 @@ public CountrySelectActivity(boolean phoneCode, ArrayList existingCount needPhoneCode = phoneCode; } + public void setDisableAnonymousNumbers(boolean disableAnonymousNumbers) { + this.disableAnonymousNumbers = disableAnonymousNumbers; + } + @Override public boolean onFragmentCreate() { return super.onFragmentCreate(); @@ -166,7 +176,7 @@ public void onTextChanged(EditText editText) { searching = false; searchWas = false; - listViewAdapter = new CountryAdapter(context, existingCountries); + listViewAdapter = new CountryAdapter(context, existingCountries, disableAnonymousNumbers); searchListViewAdapter = new CountrySearchAdapter(context, listViewAdapter.getCountries()); fragmentView = new FrameLayout(context); @@ -236,6 +246,7 @@ public void setCountrySelectActivityDelegate(CountrySelectActivityDelegate deleg public static class Country { public String name; + public String defaultName; public String code; public String shortname; @@ -260,7 +271,7 @@ public class CountryAdapter extends RecyclerListView.SectionsAdapter { private HashMap> countries = new HashMap<>(); private ArrayList sortedCountries = new ArrayList<>(); - public CountryAdapter(Context context, ArrayList exisitingCountries) { + public CountryAdapter(Context context, ArrayList exisitingCountries, boolean disableAnonymousNumbers) { mContext = context; if (exisitingCountries != null) { @@ -286,6 +297,9 @@ public CountryAdapter(Context context, ArrayList exisitingCountries) { c.name = args[2]; c.code = args[0]; c.shortname = args[1]; + if (c.shortname.equals("FT") && disableAnonymousNumbers) { + continue; + } String n = c.name.substring(0, 1).toUpperCase(); ArrayList arr = countries.get(n); if (arr == null) { @@ -301,10 +315,17 @@ public CountryAdapter(Context context, ArrayList exisitingCountries) { FileLog.e(e); } } - Collections.sort(sortedCountries, String::compareTo); + Comparator comparator; + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) { + Collator collator = Collator.getInstance(LocaleController.getInstance().getCurrentLocale() != null ? LocaleController.getInstance().getCurrentLocale() : Locale.getDefault()); + comparator = collator::compare; + } else { + comparator = String::compareTo; + } + Collections.sort(sortedCountries, comparator); for (ArrayList arr : countries.values()) { - Collections.sort(arr, (country, country2) -> country.name.compareTo(country2.name)); + Collections.sort(arr, (country, country2) -> comparator.compare(country.name, country2.name)); } } @@ -411,7 +432,11 @@ public CountrySearchAdapter(Context context, HashMap> for (List list : countries.values()) { for (Country country : list) { countryList.add(country); - countrySearchMap.put(country, Arrays.asList(country.name.split(" "))); + List keys = new ArrayList<>(Arrays.asList(country.name.split(" "))); + if (country.defaultName != null) { + keys.addAll(Arrays.asList(country.defaultName.split(" "))); + } + countrySearchMap.put(country, keys); } } } @@ -492,7 +517,7 @@ public int getItemCount() { } public Country getItem(int i) { - if (i < 0 || i >= searchResult.size()) { + if (searchResult == null || i < 0 || i >= searchResult.size()) { return null; } return searchResult.get(i); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/DataSettingsActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/DataSettingsActivity.java index 1a565bed78..64cf8120d2 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/DataSettingsActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/DataSettingsActivity.java @@ -12,12 +12,16 @@ import android.content.Context; import android.content.DialogInterface; import android.content.SharedPreferences; +import android.os.Build; +import android.os.Bundle; +import android.text.TextUtils; import android.view.Gravity; import android.view.View; import android.view.ViewGroup; import android.widget.FrameLayout; import android.widget.TextView; +import androidx.recyclerview.widget.DefaultItemAnimator; import androidx.recyclerview.widget.LinearLayoutManager; import androidx.recyclerview.widget.RecyclerView; @@ -28,7 +32,9 @@ import org.telegram.messenger.LocaleController; import org.telegram.messenger.MessagesController; import org.telegram.messenger.R; +import org.telegram.messenger.SaveToGallerySettingsHelper; import org.telegram.messenger.SharedConfig; +import org.telegram.messenger.StatsController; import org.telegram.messenger.voip.Instance; import org.telegram.tgnet.TLRPC; import org.telegram.ui.ActionBar.ActionBar; @@ -37,12 +43,15 @@ import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.ActionBar.ThemeDescription; import org.telegram.ui.Cells.HeaderCell; +import org.telegram.ui.Cells.LanguageCell; import org.telegram.ui.Cells.NotificationsCheckCell; import org.telegram.ui.Cells.ShadowSectionCell; +import org.telegram.ui.Cells.TextCell; import org.telegram.ui.Cells.TextCheckCell; import org.telegram.ui.Cells.TextInfoPrivacyCell; import org.telegram.ui.Cells.TextSettingsCell; import org.telegram.ui.Components.AlertsCreator; +import org.telegram.ui.Components.CubicBezierInterpolator; import org.telegram.ui.Components.LayoutHelper; import org.telegram.ui.Components.RecyclerListView; import org.telegram.ui.Components.voip.VoIPHelper; @@ -63,14 +72,14 @@ public class DataSettingsActivity extends BaseFragment { @SuppressWarnings("FieldCanBeLocal") private LinearLayoutManager layoutManager; - private ArrayList storageDirs; +// private ArrayList storageDirs; private int mediaDownloadSectionRow; private int mobileRow; private int roamingRow; private int wifiRow; private int storageNumRow; - private int resetDownloadRow; + private int resetDownloadRow = -1; private int mediaDownloadSection2Row; private int usageSectionRow; private int storageUsageRow; @@ -82,10 +91,10 @@ public class DataSettingsActivity extends BaseFragment { private int enableAllStreamRow; private int enableMkvRow; private int enableAllStreamInfoRow; - private int autoplayHeaderRow; - private int autoplayGifsRow; - private int autoplayVideoRow; - private int autoplaySectionRow; + private int autoplayHeaderRow = -1; + private int autoplayGifsRow = -1; + private int autoplayVideoRow = -1; + private int autoplaySectionRow = -1; private int callsSectionRow; private int useLessDataForCallsRow; private int quickRepliesRow; @@ -105,11 +114,21 @@ public class DataSettingsActivity extends BaseFragment { private boolean updateVoipUseLessData; + private boolean updateStorageUsageAnimated; + private boolean storageUsageLoading; + private long storageUsageSize; + @Override public boolean onFragmentCreate() { super.onFragmentCreate(); DownloadController.getInstance(currentAccount).loadAutoDownloadConfig(true); + updateRows(true); + + return true; + } + + private void updateRows(boolean fullNotify) { rowCount = 0; usageSectionRow = rowCount++; @@ -121,7 +140,25 @@ public boolean onFragmentCreate() { mobileRow = rowCount++; wifiRow = rowCount++; roamingRow = rowCount++; - resetDownloadRow = rowCount++; + DownloadController dc = getDownloadController(); + boolean isDefault = !( + !dc.lowPreset.equals(dc.getCurrentRoamingPreset()) || dc.lowPreset.isEnabled() != dc.roamingPreset.enabled || + !dc.mediumPreset.equals(dc.getCurrentMobilePreset()) || dc.mediumPreset.isEnabled() != dc.mobilePreset.enabled || + !dc.highPreset.equals(dc.getCurrentWiFiPreset()) || dc.highPreset.isEnabled() != dc.wifiPreset.enabled + ); + int wasResetDownloadRow = resetDownloadRow; + resetDownloadRow = isDefault ? -1 : rowCount++; + if (listAdapter != null && !fullNotify) { + if (wasResetDownloadRow < 0 && resetDownloadRow >= 0) { + listAdapter.notifyItemChanged(roamingRow); + listAdapter.notifyItemInserted(resetDownloadRow); + } else if (wasResetDownloadRow >= 0 && resetDownloadRow < 0) { + listAdapter.notifyItemChanged(roamingRow); + listAdapter.notifyItemRemoved(wasResetDownloadRow); + } else { + fullNotify = true; + } + } mediaDownloadSection2Row = rowCount++; saveToGallerySectionRow = rowCount++; @@ -130,10 +167,10 @@ public boolean onFragmentCreate() { saveToGalleryChannelsRow = rowCount++; saveToGalleryDividerRow = rowCount++; - autoplayHeaderRow = rowCount++; - autoplayGifsRow = rowCount++; - autoplayVideoRow = rowCount++; - autoplaySectionRow = rowCount++; +// autoplayHeaderRow = rowCount++; +// autoplayGifsRow = rowCount++; +// autoplayVideoRow = rowCount++; +// autoplaySectionRow = rowCount++; streamSectionRow = rowCount++; enableStreamRow = rowCount++; if (BuildVars.DEBUG_VERSION) { @@ -156,7 +193,64 @@ public boolean onFragmentCreate() { clearDraftsRow = rowCount++; clearDraftsSectionRow = rowCount++; - return true; + if (listAdapter != null && fullNotify) { + listAdapter.notifyDataSetChanged(); + } + } + + private void loadCacheSize() { + final Runnable fireLoading = () -> { + storageUsageLoading = true; + if (listAdapter != null && storageUsageRow >= 0) { + rebind(storageUsageRow); + } + }; + AndroidUtilities.runOnUIThread(fireLoading, 100); + + final long start = System.currentTimeMillis(); + CacheControlActivity.calculateTotalSize(size -> { + AndroidUtilities.cancelRunOnUIThread(fireLoading); + updateStorageUsageAnimated = updateStorageUsageAnimated || (System.currentTimeMillis() - start) > 120; + storageUsageSize = size; + storageUsageLoading = false; + if (listAdapter != null && storageUsageRow >= 0) { + rebind(storageUsageRow); + } + }); + + } + + private void rebind(int position) { + if (listView == null || listAdapter == null) { + return; + } + for (int i = 0; i < listView.getChildCount(); ++i) { + View child = listView.getChildAt(i); + RecyclerView.ViewHolder holder = listView.getChildViewHolder(child); + if (holder != null && holder.getAdapterPosition() == position) { + listAdapter.onBindViewHolder(holder, position); + return; + } + } + } + + private void rebindAll() { + if (listView == null || listAdapter == null) { + return; + } + for (int i = 0; i < listView.getChildCount(); ++i) { + View child = listView.getChildAt(i); + RecyclerView.ViewHolder holder = listView.getChildViewHolder(child); + if (holder != null) { + listAdapter.onBindViewHolder(holder, listView.getChildAdapterPosition(child)); + } + } + } + + @Override + public void onFragmentDestroy() { + super.onFragmentDestroy(); + CacheControlActivity.canceled = true; } @Override @@ -182,7 +276,15 @@ public void onItemClick(int id) { fragmentView.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundGray)); FrameLayout frameLayout = (FrameLayout) fragmentView; - listView = new RecyclerListView(context); + listView = new RecyclerListView(context) { + @Override + public Integer getSelectorColor(int position) { + if (position == resetDownloadRow) { + return Theme.multAlpha(getThemedColor(Theme.key_windowBackgroundWhiteRedText2), .1f); + } + return getThemedColor(Theme.key_listSelector); + } + }; listView.setVerticalScrollBarEnabled(false); listView.setLayoutManager(layoutManager = new LinearLayoutManager(context, LinearLayoutManager.VERTICAL, false)); frameLayout.addView(listView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.TOP | Gravity.LEFT)); @@ -197,9 +299,15 @@ public void onItemClick(int id) { } else { flag = SharedConfig.SAVE_TO_GALLERY_FLAG_PEER; } - SharedConfig.toggleSaveToGalleryFlag(flag); - TextCheckCell textCheckCell = (TextCheckCell) view; - textCheckCell.setChecked((SharedConfig.saveToGalleryFlags & flag) != 0); + if (LocaleController.isRTL && x <= AndroidUtilities.dp(76) || !LocaleController.isRTL && x >= view.getMeasuredWidth() - AndroidUtilities.dp(76)) { + SaveToGallerySettingsHelper.getSettings(flag).toggle(); + AndroidUtilities.updateVisibleRows(listView); + } else { + Bundle bundle = new Bundle(); + bundle.putInt("type", flag); + presentFragment(new SaveToGallerySettingsActivity(bundle)); + } + } else if (position == mobileRow || position == roamingRow || position == wifiRow) { if (LocaleController.isRTL && x <= AndroidUtilities.dp(76) || !LocaleController.isRTL && x >= view.getMeasuredWidth() - AndroidUtilities.dp(76)) { boolean wasEnabled = listAdapter.isRowEnabled(resetDownloadRow); @@ -248,9 +356,7 @@ public void onItemClick(int id) { } DownloadController.getInstance(currentAccount).checkAutodownloadSettings(); DownloadController.getInstance(currentAccount).savePresetToServer(num); - if (wasEnabled != listAdapter.isRowEnabled(resetDownloadRow)) { - listAdapter.notifyItemChanged(resetDownloadRow); - } + updateRows(false); } else { int type; if (position == mobileRow) { @@ -302,13 +408,14 @@ public void onItemClick(int id) { DownloadController.getInstance(currentAccount).savePresetToServer(a); } listAdapter.notifyItemRangeChanged(mobileRow, 4); + updateRows(false); }); builder.setNegativeButton(LocaleController.getString("Cancel", R.string.Cancel), null); AlertDialog dialog = builder.create(); showDialog(dialog); TextView button = (TextView) dialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } else if (position == storageUsageRow) { presentFragment(new CacheControlActivity()); @@ -361,7 +468,7 @@ public void onItemClick(int id) { setVisibleDialog(dlg); dlg.show(); } else if (position == dataUsageRow) { - presentFragment(new DataUsageActivity()); + presentFragment(new DataUsage2Activity()); } else if (position == storageNumRow) { BottomBuilder builder = new BottomBuilder(getParentActivity()); @@ -418,12 +525,12 @@ public void onItemClick(int id) { } else if (position == autoplayGifsRow) { SharedConfig.toggleAutoplayGifs(); if (view instanceof TextCheckCell) { - ((TextCheckCell) view).setChecked(SharedConfig.autoplayGifs); + ((TextCheckCell) view).setChecked(SharedConfig.isAutoplayGifs()); } } else if (position == autoplayVideoRow) { SharedConfig.toggleAutoplayVideo(); if (view instanceof TextCheckCell) { - ((TextCheckCell) view).setChecked(SharedConfig.autoplayVideo); + ((TextCheckCell) view).setChecked(SharedConfig.isAutoplayVideo()); } } else if (position == clearDraftsRow) { AlertDialog.Builder builder = new AlertDialog.Builder(getParentActivity()); @@ -438,14 +545,33 @@ public void onItemClick(int id) { showDialog(alertDialog); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } }); + DefaultItemAnimator itemAnimator = new DefaultItemAnimator(); + itemAnimator.setDurations(350); + itemAnimator.setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); + itemAnimator.setDelayAnimations(false); + itemAnimator.setSupportsChangeAnimations(false); + listView.setItemAnimator(itemAnimator); return fragmentView; } + private void setStorageDirectory(String storageDir) { + SharedConfig.storageCacheDir = storageDir; + SharedConfig.saveConfig(); + if (storageDir != null) { + SharedConfig.readOnlyStorageDirAlertShowed = false; + } + rebind(storageNumRow); + ImageLoader.getInstance().checkMediaPaths(() -> { + CacheControlActivity.resetCalculatedTotalSIze(); + loadCacheSize(); + }); + } + @Override protected void onDialogDismiss(Dialog dialog) { DownloadController.getInstance(currentAccount).checkAutodownloadSettings(); @@ -454,9 +580,9 @@ protected void onDialogDismiss(Dialog dialog) { @Override public void onResume() { super.onResume(); - if (listAdapter != null) { - listAdapter.notifyDataSetChanged(); - } + loadCacheSize(); + rebindAll(); + updateRows(false); } private class ListAdapter extends RecyclerListView.SelectionAdapter { @@ -483,14 +609,39 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { } break; } + case 6: { + TextCell textCell = (TextCell) holder.itemView; + if (position == storageUsageRow) { + if (storageUsageLoading) { + textCell.setTextAndValueAndColorfulIcon(LocaleController.getString("StorageUsage", R.string.StorageUsage), "", false, R.drawable.msg_filled_storageusage, getThemedColor(Theme.key_color_lightblue), true); + textCell.setDrawLoading(true, 45, updateStorageUsageAnimated); + } else { + textCell.setTextAndValueAndColorfulIcon(LocaleController.getString("StorageUsage", R.string.StorageUsage), storageUsageSize <= 0 ? "" : AndroidUtilities.formatFileSize(storageUsageSize), true, R.drawable.msg_filled_storageusage, getThemedColor(Theme.key_color_lightblue), true); + textCell.setDrawLoading(false, 45, updateStorageUsageAnimated); + } + updateStorageUsageAnimated = false; + } else if (position == dataUsageRow) { + StatsController statsController = StatsController.getInstance(currentAccount); + long size = ( + statsController.getReceivedBytesCount(0, StatsController.TYPE_TOTAL) + + statsController.getReceivedBytesCount(1, StatsController.TYPE_TOTAL) + + statsController.getReceivedBytesCount(2, StatsController.TYPE_TOTAL) + + statsController.getSentBytesCount(0, StatsController.TYPE_TOTAL) + + statsController.getSentBytesCount(1, StatsController.TYPE_TOTAL) + + statsController.getSentBytesCount(2, StatsController.TYPE_TOTAL) + ); + textCell.setTextAndValueAndColorfulIcon(LocaleController.getString("NetworkUsage", R.string.NetworkUsage), AndroidUtilities.formatFileSize(size), true, R.drawable.msg_filled_datausage, getThemedColor(Theme.key_color_green), storageNumRow != -1); + } else if (position == storageNumRow) { + String value = NekoConfig.cachePath.String(); + textCell.setTextAndValueAndColorfulIcon(LocaleController.getString("StoragePath", R.string.StoragePath), value, true, R.drawable.msg_filled_sdcard, getThemedColor(Theme.key_color_yellow), false); + } + break; + } case 1: { TextSettingsCell textCell = (TextSettingsCell) holder.itemView; textCell.setCanDisable(false); textCell.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteBlackText)); - if (position == storageUsageRow) { - textCell.setIcon(R.drawable.msg_storage_usage); - textCell.setText(LocaleController.getString("StorageUsage", R.string.StorageUsage), true); - } else if (position == useLessDataForCallsRow) { + if (position == useLessDataForCallsRow) { textCell.setIcon(0); SharedPreferences preferences = MessagesController.getGlobalMainSettings(); String value = null; @@ -510,12 +661,6 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { } textCell.setTextAndValue(LocaleController.getString("VoipUseLessData", R.string.VoipUseLessData), value, updateVoipUseLessData, true); updateVoipUseLessData = false; - } else if (position == dataUsageRow) { - textCell.setIcon(R.drawable.msg_data_usage); - textCell.setText(LocaleController.getString("NetworkUsage", R.string.NetworkUsage), storageNumRow != -1); - } else if (position == storageNumRow) { - textCell.setIcon(R.drawable.msg_storage_path); - textCell.setTextAndValue(LocaleController.getString("StoragePath", R.string.StoragePath), NekoConfig.cachePath.String(), false); } else if (position == proxyRow) { textCell.setIcon(0); textCell.setText(LocaleController.getString("ProxySettings", R.string.ProxySettings), false); @@ -548,7 +693,7 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { } else if (position == autoplayHeaderRow) { headerCell.setText(LocaleController.getString("AutoplayMedia", R.string.AutoplayMedia)); } else if (position == saveToGallerySectionRow) { - headerCell.setText(LocaleController.getString("SaveToGallery", R.string.SaveToGallery)); + headerCell.setText(LocaleController.getString("SaveToGallerySettings", R.string.SaveToGallerySettings)); } break; } @@ -563,15 +708,9 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { } else if (position == enableAllStreamRow) { checkCell.setTextAndCheck("(beta only) Stream All Videos", SharedConfig.streamAllVideo, false); } else if (position == autoplayGifsRow) { - checkCell.setTextAndCheck(LocaleController.getString("AutoplayGIF", R.string.AutoplayGIF), SharedConfig.autoplayGifs, true); + checkCell.setTextAndCheck(LocaleController.getString("AutoplayGIF", R.string.AutoplayGIF), SharedConfig.isAutoplayGifs(), true); } else if (position == autoplayVideoRow) { - checkCell.setTextAndCheck(LocaleController.getString("AutoplayVideo", R.string.AutoplayVideo), SharedConfig.autoplayVideo, false); - } else if (position == saveToGalleryPeerRow) { - checkCell.setTextAndCheck(LocaleController.getString("SaveToGalleryPrivate", R.string.SaveToGalleryPrivate), (SharedConfig.saveToGalleryFlags & SharedConfig.SAVE_TO_GALLERY_FLAG_PEER) != 0, true); - } else if (position == saveToGalleryGroupsRow) { - checkCell.setTextAndCheck(LocaleController.getString("SaveToGalleryGroups", R.string.SaveToGalleryGroups), (SharedConfig.saveToGalleryFlags & SharedConfig.SAVE_TO_GALLERY_FLAG_GROUP) != 0, true); - } else if (position == saveToGalleryChannelsRow) { - checkCell.setTextAndCheck(LocaleController.getString("SaveToGalleryChannels", R.string.SaveToGalleryChannels), (SharedConfig.saveToGalleryFlags & SharedConfig.SAVE_TO_GALLERY_FLAG_CHANNELS) != 0, false); + checkCell.setTextAndCheck(LocaleController.getString("AutoplayVideo", R.string.AutoplayVideo), SharedConfig.isAutoplayVideo(), false); } break; } @@ -586,10 +725,23 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { NotificationsCheckCell checkCell = (NotificationsCheckCell) holder.itemView; String text; - StringBuilder builder = new StringBuilder(); - DownloadController.Preset preset; - boolean enabled; - if (position == mobileRow) { + CharSequence description = null; + DownloadController.Preset preset = null; + boolean enabled, divider = true; + if (position == saveToGalleryPeerRow) { + text = LocaleController.getString("SaveToGalleryPrivate", R.string.SaveToGalleryPrivate); + description = SaveToGallerySettingsHelper.user.createDescription(currentAccount); + enabled = SaveToGallerySettingsHelper.user.enabled(); + } else if (position == saveToGalleryGroupsRow) { + text = LocaleController.getString("SaveToGalleryGroups", R.string.SaveToGalleryGroups); + description = SaveToGallerySettingsHelper.groups.createDescription(currentAccount); + enabled = SaveToGallerySettingsHelper.groups.enabled(); + } else if (position == saveToGalleryChannelsRow) { + text = LocaleController.getString("SaveToGalleryChannels", R.string.SaveToGalleryChannels); + description = SaveToGallerySettingsHelper.channels.createDescription(currentAccount); + enabled = SaveToGallerySettingsHelper.channels.enabled(); + divider = false; + } else if (position == mobileRow) { text = LocaleController.getString("WhenUsingMobileData", R.string.WhenUsingMobileData); enabled = DownloadController.getInstance(currentAccount).mobilePreset.enabled; preset = DownloadController.getInstance(currentAccount).getCurrentMobilePreset(); @@ -601,48 +753,57 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { text = LocaleController.getString("WhenRoaming", R.string.WhenRoaming); enabled = DownloadController.getInstance(currentAccount).roamingPreset.enabled; preset = DownloadController.getInstance(currentAccount).getCurrentRoamingPreset(); + divider = resetDownloadRow >= 0; } - - boolean photos = false; - boolean videos = false; - boolean files = false; - int count = 0; - for (int a = 0; a < preset.mask.length; a++) { - if (!photos && (preset.mask[a] & DownloadController.AUTODOWNLOAD_TYPE_PHOTO) != 0) { - photos = true; - count++; - } - if (!videos && (preset.mask[a] & DownloadController.AUTODOWNLOAD_TYPE_VIDEO) != 0) { - videos = true; - count++; - } - if (!files && (preset.mask[a] & DownloadController.AUTODOWNLOAD_TYPE_DOCUMENT) != 0) { - files = true; - count++; - } - } - if (preset.enabled && count != 0) { - if (photos) { - builder.append(LocaleController.getString("AutoDownloadPhotosOn", R.string.AutoDownloadPhotosOn)); - } - if (videos) { - if (builder.length() > 0) { - builder.append(", "); + boolean checked; + if (preset != null) { + StringBuilder builder = new StringBuilder(); + boolean photos = false; + boolean videos = false; + boolean files = false; + int count = 0; + for (int a = 0; a < preset.mask.length; a++) { + if (!photos && (preset.mask[a] & DownloadController.AUTODOWNLOAD_TYPE_PHOTO) != 0) { + photos = true; + count++; + } + if (!videos && (preset.mask[a] & DownloadController.AUTODOWNLOAD_TYPE_VIDEO) != 0) { + videos = true; + count++; + } + if (!files && (preset.mask[a] & DownloadController.AUTODOWNLOAD_TYPE_DOCUMENT) != 0) { + files = true; + count++; } - builder.append(LocaleController.getString("AutoDownloadVideosOn", R.string.AutoDownloadVideosOn)); - builder.append(String.format(" (%1$s)", AndroidUtilities.formatFileSize(preset.sizes[DownloadController.typeToIndex(DownloadController.AUTODOWNLOAD_TYPE_VIDEO)], true))); } - if (files) { - if (builder.length() > 0) { - builder.append(", "); + if (preset.enabled && count != 0) { + if (photos) { + builder.append(LocaleController.getString("AutoDownloadPhotosOn", R.string.AutoDownloadPhotosOn)); + } + if (videos) { + if (builder.length() > 0) { + builder.append(", "); + } + builder.append(LocaleController.getString("AutoDownloadVideosOn", R.string.AutoDownloadVideosOn)); + builder.append(String.format(" (%1$s)", AndroidUtilities.formatFileSize(preset.sizes[DownloadController.typeToIndex(DownloadController.AUTODOWNLOAD_TYPE_VIDEO)], true))); + } + if (files) { + if (builder.length() > 0) { + builder.append(", "); + } + builder.append(LocaleController.getString("AutoDownloadFilesOn", R.string.AutoDownloadFilesOn)); + builder.append(String.format(" (%1$s)", AndroidUtilities.formatFileSize(preset.sizes[DownloadController.typeToIndex(DownloadController.AUTODOWNLOAD_TYPE_DOCUMENT)], true))); } - builder.append(LocaleController.getString("AutoDownloadFilesOn", R.string.AutoDownloadFilesOn)); - builder.append(String.format(" (%1$s)", AndroidUtilities.formatFileSize(preset.sizes[DownloadController.typeToIndex(DownloadController.AUTODOWNLOAD_TYPE_DOCUMENT)], true))); + } else { + builder.append(LocaleController.getString("NoMediaAutoDownload", R.string.NoMediaAutoDownload)); } + checked = (photos || videos || files) && enabled; + description = builder; } else { - builder.append(LocaleController.getString("NoMediaAutoDownload", R.string.NoMediaAutoDownload)); + checked = enabled; } - checkCell.setTextAndValueAndCheck(text, builder, (photos || videos || files) && enabled, 0, true, true); + checkCell.setAnimationsEnabled(true); + checkCell.setTextAndValueAndCheck(text, description, checked, 0, true, divider); break; } } @@ -663,23 +824,17 @@ public void onViewAttachedToWindow(RecyclerView.ViewHolder holder) { } else if (position == enableMkvRow) { checkCell.setChecked(SharedConfig.streamMkv); } else if (position == autoplayGifsRow) { - checkCell.setChecked(SharedConfig.autoplayGifs); + checkCell.setChecked(SharedConfig.isAutoplayGifs()); } else if (position == autoplayVideoRow) { - checkCell.setChecked(SharedConfig.autoplayVideo); + checkCell.setChecked(SharedConfig.isAutoplayVideo()); } } } public boolean isRowEnabled(int position) { - if (position == resetDownloadRow) { - DownloadController controller = DownloadController.getInstance(currentAccount); - return !controller.lowPreset.equals(controller.getCurrentRoamingPreset()) || controller.lowPreset.isEnabled() != controller.roamingPreset.enabled || - !controller.mediumPreset.equals(controller.getCurrentMobilePreset()) || controller.mediumPreset.isEnabled() != controller.mobilePreset.enabled || - !controller.highPreset.equals(controller.getCurrentWiFiPreset()) || controller.highPreset.isEnabled() != controller.wifiPreset.enabled; - } return position == mobileRow || position == roamingRow || position == wifiRow || position == storageUsageRow || position == useLessDataForCallsRow || position == dataUsageRow || position == proxyRow || position == clearDraftsRow || position == enableCacheStreamRow || position == enableStreamRow || position == enableAllStreamRow || position == enableMkvRow || position == quickRepliesRow || position == autoplayVideoRow || position == autoplayGifsRow || - position == storageNumRow || position == saveToGalleryGroupsRow || position == saveToGalleryPeerRow || position == saveToGalleryChannelsRow; + position == storageNumRow || position == saveToGalleryGroupsRow || position == saveToGalleryPeerRow || position == saveToGalleryChannelsRow || position == resetDownloadRow; } @Override @@ -699,7 +854,7 @@ public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType view.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); break; case 2: - view = new HeaderCell(mContext); + view = new HeaderCell(mContext, 22); view.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); break; case 3: @@ -711,10 +866,14 @@ public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType view.setBackgroundDrawable(Theme.getThemedDrawable(mContext, R.drawable.greydivider, Theme.key_windowBackgroundGrayShadow)); break; case 5: - default: view = new NotificationsCheckCell(mContext); view.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); break; + case 6: + default: + view = new TextCell(mContext); + view.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); + break; } view.setLayoutParams(new RecyclerView.LayoutParams(RecyclerView.LayoutParams.MATCH_PARENT, RecyclerView.LayoutParams.WRAP_CONTENT)); return new RecyclerListView.Holder(view); @@ -726,12 +885,14 @@ public int getItemViewType(int position) { return 0; } else if (position == mediaDownloadSectionRow || position == streamSectionRow || position == callsSectionRow || position == usageSectionRow || position == proxySectionRow || position == autoplayHeaderRow || position == saveToGallerySectionRow) { return 2; - } else if (position == enableCacheStreamRow || position == enableStreamRow || position == enableAllStreamRow || position == enableMkvRow || position == autoplayGifsRow || position == autoplayVideoRow || position == saveToGalleryGroupsRow || position == saveToGalleryPeerRow || position == saveToGalleryChannelsRow) { + } else if (position == enableCacheStreamRow || position == enableStreamRow || position == enableAllStreamRow || position == enableMkvRow || position == autoplayGifsRow || position == autoplayVideoRow) { return 3; } else if (position == enableAllStreamInfoRow) { return 4; - } else if (position == mobileRow || position == wifiRow || position == roamingRow) { + } else if (position == mobileRow || position == wifiRow || position == roamingRow || position == saveToGalleryGroupsRow || position == saveToGalleryPeerRow || position == saveToGalleryChannelsRow) { return 5; + } else if (position == storageUsageRow || position == dataUsageRow || position == storageNumRow) { + return 6; } else { return 1; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/DataUsage2Activity.java b/TMessagesProj/src/main/java/org/telegram/ui/DataUsage2Activity.java new file mode 100644 index 0000000000..6cd8c82434 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/DataUsage2Activity.java @@ -0,0 +1,1108 @@ +package org.telegram.ui; + +import static org.telegram.messenger.AndroidUtilities.dp; + +import android.content.Context; +import android.content.DialogInterface; +import android.content.res.Configuration; +import android.graphics.Canvas; +import android.graphics.Paint; +import android.graphics.Path; +import android.graphics.PorterDuff; +import android.graphics.PorterDuffColorFilter; +import android.graphics.drawable.Drawable; +import android.text.SpannableString; +import android.text.Spanned; +import android.text.TextPaint; +import android.text.TextUtils; +import android.text.style.DynamicDrawableSpan; +import android.text.style.ImageSpan; +import android.text.style.MetricAffectingSpan; +import android.text.style.RelativeSizeSpan; +import android.util.TypedValue; +import android.view.Gravity; +import android.view.MotionEvent; +import android.view.View; +import android.view.ViewGroup; +import android.widget.FrameLayout; +import android.widget.ImageView; +import android.widget.LinearLayout; +import android.widget.TextView; + +import androidx.annotation.NonNull; +import androidx.recyclerview.widget.DefaultItemAnimator; +import androidx.recyclerview.widget.LinearLayoutManager; +import androidx.recyclerview.widget.RecyclerView; + +import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.LocaleController; +import org.telegram.messenger.NotificationCenter; +import org.telegram.messenger.R; +import org.telegram.messenger.StatsController; +import org.telegram.ui.ActionBar.ActionBar; +import org.telegram.ui.ActionBar.AlertDialog; +import org.telegram.ui.ActionBar.BaseFragment; +import org.telegram.ui.ActionBar.Theme; +import org.telegram.ui.Cells.HeaderCell; +import org.telegram.ui.Cells.TextCell; +import org.telegram.ui.Cells.TextInfoPrivacyCell; +import org.telegram.ui.Components.CacheChart; +import org.telegram.ui.Components.CubicBezierInterpolator; +import org.telegram.ui.Components.LayoutHelper; +import org.telegram.ui.Components.ListView.AdapterWithDiffUtils; +import org.telegram.ui.Components.RecyclerListView; +import org.telegram.ui.Components.TypefaceSpan; +import org.telegram.ui.Components.ViewPagerFixed; + +import java.util.ArrayList; +import java.util.Arrays; + +public class DataUsage2Activity extends BaseFragment { + + private Theme.ResourcesProvider resourcesProvider; + + public DataUsage2Activity() { + this(null); + } + + public DataUsage2Activity(Theme.ResourcesProvider resourcesProvider) { + super(); + this.resourcesProvider = resourcesProvider; + } + + private ViewPagerFixed pager; + private ViewPagerFixed.Adapter pageAdapter; + private ViewPagerFixed.TabsView tabsView; + + @Override + public View createView(Context context) { + actionBar.setBackButtonImage(R.drawable.ic_ab_back); + actionBar.setTitle(LocaleController.getString("NetworkUsage", R.string.NetworkUsage)); + actionBar.setBackgroundColor(getThemedColor(Theme.key_actionBarActionModeDefault)); + actionBar.setTitleColor(getThemedColor(Theme.key_windowBackgroundWhiteBlackText)); + actionBar.setItemsColor(getThemedColor(Theme.key_windowBackgroundWhiteBlackText), false); + actionBar.setItemsBackgroundColor(getThemedColor(Theme.key_listSelector), false); + actionBar.setCastShadows(false); + actionBar.setActionBarMenuOnItemClick(new ActionBar.ActionBarMenuOnItemClick() { + @Override + public void onItemClick(int id) { + if (id == -1) { + finishFragment(); + } + } + }); + + FrameLayout frameLayout = new FrameLayout(context) { + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(MeasureSpec.getSize(heightMeasureSpec), MeasureSpec.EXACTLY)); + } + + @Override + protected void dispatchDraw(Canvas canvas) { + super.dispatchDraw(canvas); + if (getParentLayout() != null && tabsView != null) { + float y = tabsView.getMeasuredHeight(); + canvas.drawLine(0, y, getWidth(), y, Theme.dividerPaint); + } + } + }; + frameLayout.setBackgroundColor(getThemedColor(Theme.key_windowBackgroundGray)); + + pager = new ViewPagerFixed(context); + pager.setAdapter(pageAdapter = new PageAdapter()); + + tabsView = pager.createTabsView(true, 8); + tabsView.setBackgroundColor(getThemedColor(Theme.key_actionBarActionModeDefault)); + frameLayout.addView(tabsView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 48, Gravity.TOP | Gravity.FILL_HORIZONTAL)); + + frameLayout.addView(pager, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.FILL, 0, 48, 0, 0)); + + return fragmentView = frameLayout; + } + + @Override + public Theme.ResourcesProvider getResourceProvider() { + return resourcesProvider; + } + + private class PageAdapter extends ViewPagerFixed.Adapter { + + @Override + public int getItemCount() { + return 4; + } + + @Override + public View createView(int viewType) { + return new ListView(getContext()); + } + + @Override + public void bindView(View view, int position, int viewType) { + ((ListView) view).setType(position); + ((ListView) view).scrollToPosition(0); + } + + @Override + public String getItemTitle(int position) { + switch (position) { + case ListView.TYPE_ALL: return LocaleController.getString("NetworkUsageAllTab", R.string.NetworkUsageAllTab); + case ListView.TYPE_MOBILE: return LocaleController.getString("NetworkUsageMobileTab", R.string.NetworkUsageMobileTab); + case ListView.TYPE_WIFI: return LocaleController.getString("NetworkUsageWiFiTab", R.string.NetworkUsageWiFiTab); + case ListView.TYPE_ROAMING: return LocaleController.getString("NetworkUsageRoamingTab", R.string.NetworkUsageRoamingTab); + default: return ""; + } + } + } + + private static String[] colors = { + Theme.key_statisticChartLine_blue, + Theme.key_statisticChartLine_green, + Theme.key_statisticChartLine_lightblue, + Theme.key_statisticChartLine_golden, + Theme.key_statisticChartLine_red, + Theme.key_statisticChartLine_purple, + Theme.key_statisticChartLine_cyan + }; + + private static int[] particles = { + R.drawable.msg_filled_data_videos, + R.drawable.msg_filled_data_files, + R.drawable.msg_filled_data_photos, + R.drawable.msg_filled_data_messages, + R.drawable.msg_filled_data_music, + R.drawable.msg_filled_data_voice, + R.drawable.msg_filled_data_calls + }; + + private static int[] titles = { + R.string.LocalVideoCache, + R.string.LocalDocumentCache, + R.string.LocalPhotoCache, + R.string.MessagesSettings, + R.string.LocalMusicCache, + R.string.LocalAudioCache, + R.string.CallsDataUsage + }; + + private static int[] stats = { + StatsController.TYPE_VIDEOS, + StatsController.TYPE_FILES, + StatsController.TYPE_PHOTOS, + StatsController.TYPE_MESSAGES, + StatsController.TYPE_MUSIC, + StatsController.TYPE_AUDIOS, + StatsController.TYPE_CALLS, + }; + + + class ListView extends RecyclerListView { + + public static final int TYPE_ALL = 0; + public static final int TYPE_MOBILE = 1; + public static final int TYPE_WIFI = 2; + public static final int TYPE_ROAMING = 3; + + private boolean animateChart = false; + + int currentType = TYPE_ALL; + + LinearLayoutManager layoutManager; + Adapter adapter; + + public ListView(Context context) { + super(context); + setLayoutManager(layoutManager = new LinearLayoutManager(context)); + setAdapter(adapter = new Adapter()); + setOnItemClickListener((view, position) -> { + if (view instanceof Cell && position >= 0 && position < itemInners.size()) { + ItemInner item = itemInners.get(position); + if (item != null) { + if (item.index >= 0) { + collapsed[item.index] = !collapsed[item.index]; + updateRows(true); + } else if (item.index == -2) { + presentFragment(new DataAutoDownloadActivity(currentType - 1)); + } + } + } else if (view instanceof TextCell) { + AlertDialog.Builder builder = new AlertDialog.Builder(getParentActivity()); + builder.setTitle(LocaleController.getString("ResetStatisticsAlertTitle", R.string.ResetStatisticsAlertTitle)); + builder.setMessage(LocaleController.getString("ResetStatisticsAlert", R.string.ResetStatisticsAlert)); + builder.setPositiveButton(LocaleController.getString("Reset", R.string.Reset), (dialogInterface, j) -> { + removedSegments.clear(); + for (int i = 0; i < segments.length; ++i) { + long size = segments[i].size; + if (size > 0) { + removedSegments.add(segments[i].index); + } + } + + StatsController.getInstance(currentAccount).resetStats(0); + StatsController.getInstance(currentAccount).resetStats(1); + StatsController.getInstance(currentAccount).resetStats(2); + + animateChart = true; + setup(); + updateRows(true); + }); + builder.setNegativeButton(LocaleController.getString("Cancel", R.string.Cancel), null); + AlertDialog dialog = builder.create(); + showDialog(dialog); + TextView button = (TextView) dialog.getButton(DialogInterface.BUTTON_POSITIVE); + if (button != null) { + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); + } + } + }); + + DefaultItemAnimator itemAnimator = new DefaultItemAnimator(); + itemAnimator.setDurations(220); + itemAnimator.setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); + itemAnimator.setDelayAnimations(false); + itemAnimator.setSupportsChangeAnimations(false); + setItemAnimator(itemAnimator); + } + + public void setType(int type) { + this.currentType = type; + + removedSegments.clear(); + empty = getBytesCount(StatsController.TYPE_TOTAL) <= 0; + setup(); + updateRows(false); + } + + private void setup() { + totalSize = getBytesCount(StatsController.TYPE_TOTAL); + totalSizeIn = getReceivedBytesCount(StatsController.TYPE_TOTAL); + totalSizeOut = getSentBytesCount(StatsController.TYPE_TOTAL); + if (segments == null) { + segments = new Size[7]; + } + if (chartSegments == null) { + chartSegments = new Size[7]; + } + for (int i = 0; i < stats.length; ++i) { + long size = getBytesCount(stats[i]); + chartSegments[i] = segments[i] = new Size( + i, + size, + getReceivedBytesCount(stats[i]), + getSentBytesCount(stats[i]), + getReceivedItemsCount(stats[i]), + getSentItemsCount(stats[i]) + ); + tempSizes[i] = size / (float) totalSize; + } + Arrays.sort(segments, (a, b) -> Long.compare(b.size, a.size)); + AndroidUtilities.roundPercents(tempSizes, tempPercents); + Arrays.fill(collapsed, true); + } + + private ArrayList oldItems = new ArrayList<>(); + private ArrayList itemInners = new ArrayList<>(); + + private float[] tempSizes = new float[7]; + private int[] tempPercents = new int[7]; + + private ArrayList removedSegments = new ArrayList<>(); + private Size[] segments, chartSegments; + private boolean[] collapsed = new boolean[7]; + private long totalSize, totalSizeIn, totalSizeOut; + private boolean empty; + + private CacheChart chart; + + private String formatPercent(int percent) { + return percent <= 0 ? String.format("<%d%%", 1) : String.format("%d%%", percent); + } + + class Size extends CacheChart.SegmentSize { + + int index; + long inSize, outSize; + int inCount, outCount; + + public Size(int index, long size, long inSize, long outSize, int inCount, int outCount) { + this.index = index; + + this.size = size; + this.selected = true; + + this.inSize = inSize; + this.inCount = inCount; + this.outSize = outSize; + this.outCount = outCount; + } + } + + private void updateRows(boolean animated) { + oldItems.clear(); + oldItems.addAll(itemInners); + + itemInners.clear(); + + itemInners.add(new ItemInner(VIEW_TYPE_CHART)); + final String sinceText = totalSize > 0 ? + LocaleController.formatString("YourNetworkUsageSince", R.string.YourNetworkUsageSince, LocaleController.getInstance().formatterStats.format(getResetStatsDate())) : + LocaleController.formatString("NoNetworkUsageSince", R.string.NoNetworkUsageSince, LocaleController.getInstance().formatterStats.format(getResetStatsDate())); + itemInners.add(ItemInner.asSubtitle(sinceText)); + + ArrayList sections = new ArrayList<>(); + for (int i = 0; i < segments.length; ++i) { + long size = segments[i].size; + int index = segments[i].index; + boolean emptyButShown = empty || removedSegments.contains(index); + if (size <= 0 && !emptyButShown) { + continue; + } + SpannableString percent = new SpannableString(formatPercent(tempPercents[index])); + percent.setSpan(new TypefaceSpan(AndroidUtilities.getTypeface("fonts/rmedium.ttf")), 0, percent.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + percent.setSpan(new RelativeSizeSpan(.8f), 0, percent.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + percent.setSpan(new CustomCharacterSpan(.1), 0, percent.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + sections.add(ItemInner.asCell( + i, + particles[index], + getThemedColor(colors[index]), + size == 0 ? + LocaleController.getString(titles[index]) : + TextUtils.concat(LocaleController.getString(titles[index]), " ", percent), + AndroidUtilities.formatFileSize(size) + )); + } + + if (!sections.isEmpty()) { + + SpannableString sentIcon = new SpannableString("^"); + Drawable sentIconDrawable = getContext().getResources().getDrawable(R.drawable.msg_mini_upload).mutate(); + sentIconDrawable.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_windowBackgroundWhiteBlackText), PorterDuff.Mode.MULTIPLY)); + sentIconDrawable.setBounds(0, AndroidUtilities.dp(2), AndroidUtilities.dp(16), AndroidUtilities.dp(2 + 16)); + sentIcon.setSpan(new ImageSpan(sentIconDrawable, DynamicDrawableSpan.ALIGN_CENTER), 0, 1, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + + SpannableString receivedIcon = new SpannableString("v"); + Drawable receivedIconDrawable = getContext().getResources().getDrawable(R.drawable.msg_mini_download).mutate(); + receivedIconDrawable.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_windowBackgroundWhiteBlackText), PorterDuff.Mode.MULTIPLY)); + receivedIconDrawable.setBounds(0, AndroidUtilities.dp(2), AndroidUtilities.dp(16), AndroidUtilities.dp(2 + 16)); + receivedIcon.setSpan(new ImageSpan(receivedIconDrawable, DynamicDrawableSpan.ALIGN_CENTER), 0, 1, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + + for (int i = 0; i < sections.size(); ++i) { + int index = sections.get(i).index; + if (index >= 0 && !collapsed[index]) { + Size size = segments[index]; + if (stats[size.index] == StatsController.TYPE_CALLS) { + if (size.outSize > 0 || size.outCount > 0) { + sections.add(++i, ItemInner.asCell( + -1, 0, 0, + LocaleController.formatPluralStringComma("OutgoingCallsCount", size.outCount), + AndroidUtilities.formatFileSize(size.outSize) + )); + } + if (size.inSize > 0 || size.inCount > 0) { + sections.add(++i, ItemInner.asCell( + -1, 0, 0, + LocaleController.formatPluralStringComma("IncomingCallsCount", size.inCount), + AndroidUtilities.formatFileSize(size.inSize) + )); + } + } else if (stats[size.index] != StatsController.TYPE_MESSAGES) { + if (size.outSize > 0 || size.outCount > 0) { + sections.add(++i, ItemInner.asCell( + -1, 0, 0, + TextUtils.concat(sentIcon, " ", AndroidUtilities.replaceTags(LocaleController.formatPluralStringComma("FilesSentCount", size.outCount))), + AndroidUtilities.formatFileSize(size.outSize) + )); + } + if (size.inSize > 0 || size.inCount > 0) { + sections.add(++i, ItemInner.asCell( + -1, 0, 0, + TextUtils.concat(receivedIcon, " ", AndroidUtilities.replaceTags(LocaleController.formatPluralStringComma("FilesReceivedCount", size.inCount))), + AndroidUtilities.formatFileSize(size.inSize) + )); + } + } else { + if (size.outSize > 0 || size.outCount > 0) { + sections.add(++i, ItemInner.asCell( + -1, 0, 0, + TextUtils.concat(sentIcon, " ", LocaleController.getString("BytesSent", R.string.BytesSent)), + AndroidUtilities.formatFileSize(size.outSize) + )); + } + if (size.inSize > 0 || size.inCount > 0) { + sections.add(++i, ItemInner.asCell( + -1, 0, 0, + TextUtils.concat(receivedIcon, " ", LocaleController.getString("BytesReceived", R.string.BytesReceived)), + AndroidUtilities.formatFileSize(size.inSize) + )); + } + } + } + } +// itemInners.add(new ItemInner(VIEW_TYPE_ROUNDING)); + itemInners.addAll(sections); +// itemInners.add(new ItemInner(VIEW_TYPE_END)); + if (!empty) { + itemInners.add(ItemInner.asSeparator(LocaleController.getString("DataUsageSectionsInfo", R.string.DataUsageSectionsInfo))); + } + } + + if (!empty) { + itemInners.add(ItemInner.asHeader(LocaleController.getString("TotalNetworkUsage", R.string.TotalNetworkUsage))); + itemInners.add(ItemInner.asCell( + -1, + R.drawable.msg_filled_data_sent, + getThemedColor(Theme.key_statisticChartLine_lightblue), + LocaleController.getString("BytesSent", R.string.BytesSent), + AndroidUtilities.formatFileSize(totalSizeOut) + )); + itemInners.add(ItemInner.asCell( + -1, + R.drawable.msg_filled_data_received, + getThemedColor(Theme.key_statisticChartLine_green), + LocaleController.getString("BytesReceived", R.string.BytesReceived), + AndroidUtilities.formatFileSize(totalSizeIn) + )); + } + + if (!sections.isEmpty()) { + itemInners.add(ItemInner.asSeparator(sinceText)); + } + + if (currentType != TYPE_ALL) { + if (sections.isEmpty()) { + itemInners.add(ItemInner.asSeparator()); + } + itemInners.add(ItemInner.asCell( + -2, + R.drawable.msg_download_settings, + getThemedColor(Theme.key_statisticChartLine_lightblue), + LocaleController.getString("AutomaticDownloadSettings", R.string.AutomaticDownloadSettings), + null + )); + String info; + switch (currentType) { + case TYPE_MOBILE: + info = LocaleController.getString("AutomaticDownloadSettingsInfoMobile", R.string.AutomaticDownloadSettingsInfoMobile); + break; + case TYPE_ROAMING: + info = LocaleController.getString("AutomaticDownloadSettingsInfoRoaming", R.string.AutomaticDownloadSettingsInfoRoaming); + break; + default: + case TYPE_WIFI: + info = LocaleController.getString("AutomaticDownloadSettingsInfoWiFi", R.string.AutomaticDownloadSettingsInfoWiFi); + break; + } + itemInners.add(ItemInner.asSeparator(info)); + } + + if (!sections.isEmpty()) { + itemInners.add(new ItemInner(VIEW_TYPE_RESET_BUTTON, LocaleController.getString("ResetStatistics", R.string.ResetStatistics))); + } + itemInners.add(ItemInner.asSeparator()); + + if (adapter != null) { + if (animated) { + adapter.setItems(oldItems, itemInners); + } else { + adapter.notifyDataSetChanged(); + } + } + } + + private CharSequence bold(CharSequence text) { + SpannableString string = new SpannableString(text); + string.setSpan(new TypefaceSpan(AndroidUtilities.getTypeface("fonts/rmedium.ttf")), 0, string.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + return string; + } + + private class Adapter extends AdapterWithDiffUtils { + + @NonNull + @Override + public RecyclerView.ViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) { + View view; + switch (viewType) { + case VIEW_TYPE_CHART: + chart = new CacheChart(getContext(), colors.length, colors, CacheChart.TYPE_NETWORK, particles) { + + @Override + protected int heightDp() { + return 216; + } + + @Override + protected int padInsideDp() { + return 10; + } + + @Override + protected void onSectionDown(int index, boolean down) { + if (!down) { + ListView.this.removeHighlightRow(); + return; + } + if (index < 0 || index >= segments.length) { + return; + } + int pos = -1; + for (int i = 0; i < segments.length; ++i) { + if (segments[i].index == index) { + pos = i; + break; + } + } + int position = -1; + for (int i = 0; i < itemInners.size(); ++i) { + ItemInner item2 = itemInners.get(i); + if (item2 != null && item2.viewType == VIEW_TYPE_SECTION && item2.index == pos) { + position = i; + break; + } + } + + if (position >= 0) { + final int finalPosition = position; + ListView.this.highlightRow(() -> finalPosition, 0); + } else { + ListView.this.removeHighlightRow(); + } + } + }; + chart.setInterceptTouch(false); + view = chart; + break; + case VIEW_TYPE_SUBTITLE: + view = new SubtitleCell(getContext()); + break; + case VIEW_TYPE_SEPARATOR: + view = new TextInfoPrivacyCell(getContext()); + break; + case VIEW_TYPE_HEADER: + view = new HeaderCell(getContext()); + view.setBackgroundColor(getThemedColor(Theme.key_windowBackgroundWhite)); + break; + case VIEW_TYPE_RESET_BUTTON: + TextCell textCell = new TextCell(getContext()); + textCell.setTextColor(getThemedColor(Theme.key_windowBackgroundWhiteRedText5)); + textCell.setBackgroundColor(getThemedColor(Theme.key_windowBackgroundWhite)); + view = textCell; + break; + case VIEW_TYPE_END: + view = new View(getContext()) { + { setBackgroundColor(getThemedColor(Theme.key_windowBackgroundWhite)); } + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(4), MeasureSpec.EXACTLY)); + } + }; + break; + case VIEW_TYPE_ROUNDING: + view = new RoundingCell(getContext()); + break; + case VIEW_TYPE_SECTION: + default: + view = new Cell(getContext()); + break; + } + return new RecyclerListView.Holder(view); + } + + @Override + public void onBindViewHolder(@NonNull RecyclerView.ViewHolder holder, int position) { + ItemInner item = itemInners.get(holder.getAdapterPosition()); + int viewType = holder.getItemViewType(); + if (viewType == VIEW_TYPE_CHART) { + CacheChart chart = (CacheChart) holder.itemView; + if (segments != null) { + chart.setSegments(totalSize, animateChart, chartSegments); + } + animateChart = false; + } else if (viewType == VIEW_TYPE_SUBTITLE) { + SubtitleCell subtitleCell = (SubtitleCell) holder.itemView; + subtitleCell.setText(item.text); + int bottomViewType; + boolean bottom = position + 1 < itemInners.size() && (bottomViewType = itemInners.get(position + 1).viewType) != item.viewType && bottomViewType != VIEW_TYPE_SEPARATOR && bottomViewType != VIEW_TYPE_ROUNDING; + if (bottom) { + subtitleCell.setBackground(Theme.getThemedDrawable(getContext(), R.drawable.greydivider_top, Theme.key_windowBackgroundGrayShadow)); + } else { + subtitleCell.setBackground(null); + } + } else if (viewType == VIEW_TYPE_SECTION) { + Cell cell = (Cell) holder.itemView; + cell.set(item.imageColor, item.imageResId, item.text, item.valueText, position + 1 < getItemCount() && itemInners.get(position + 1).viewType == viewType); + cell.setArrow(item.pad || item.index < 0 || item.index < segments.length && segments[item.index].size <= 0 ? null : collapsed[item.index]); + } else if (viewType == VIEW_TYPE_SEPARATOR) { + TextInfoPrivacyCell view = (TextInfoPrivacyCell) holder.itemView; + boolean top = position > 0 && item.viewType != itemInners.get(position - 1).viewType; + boolean bottom = position + 1 < itemInners.size() && itemInners.get(position + 1).viewType != item.viewType; + if (top && bottom) { + view.setBackground(Theme.getThemedDrawable(getContext(), R.drawable.greydivider, Theme.key_windowBackgroundGrayShadow)); + } else if (top) { + view.setBackground(Theme.getThemedDrawable(getContext(), R.drawable.greydivider_bottom, Theme.key_windowBackgroundGrayShadow)); + } else if (bottom) { + view.setBackground(Theme.getThemedDrawable(getContext(), R.drawable.greydivider_top, Theme.key_windowBackgroundGrayShadow)); + } else { + view.setBackground(null); + } + view.setText(item.text); + } else if (viewType == VIEW_TYPE_HEADER) { + HeaderCell header = (HeaderCell) holder.itemView; + header.setText(item.text); + } else if (viewType == VIEW_TYPE_RESET_BUTTON) { + TextCell textCell = (TextCell) holder.itemView; + textCell.setText(item.text.toString(), false); + } else if (viewType == VIEW_TYPE_ROUNDING) { + ((RoundingCell) holder.itemView).setTop(true); + } + } + + @Override + public int getItemCount() { + return itemInners.size(); + } + + @Override + public int getItemViewType(int position) { + return itemInners.get(position).viewType; + } + + @Override + public boolean isEnabled(RecyclerView.ViewHolder holder) { + ItemInner item = itemInners.get(holder.getAdapterPosition()); + return item.viewType == VIEW_TYPE_RESET_BUTTON || item.viewType == VIEW_TYPE_SECTION && item.index != -1; + } + } + + private int getSentItemsCount(int dataType) { + switch (currentType) { + case TYPE_MOBILE: + case TYPE_WIFI: + case TYPE_ROAMING: + return StatsController.getInstance(currentAccount).getSentItemsCount(currentType - 1, dataType); + case TYPE_ALL: + default: + return ( + StatsController.getInstance(currentAccount).getSentItemsCount(0, dataType) + + StatsController.getInstance(currentAccount).getSentItemsCount(1, dataType) + + StatsController.getInstance(currentAccount).getSentItemsCount(2, dataType) + ); + } + } + + private int getReceivedItemsCount(int dataType) { + switch (currentType) { + case TYPE_MOBILE: + case TYPE_WIFI: + case TYPE_ROAMING: + return StatsController.getInstance(currentAccount).getRecivedItemsCount(currentType - 1, dataType); + case TYPE_ALL: + default: + return ( + StatsController.getInstance(currentAccount).getRecivedItemsCount(0, dataType) + + StatsController.getInstance(currentAccount).getRecivedItemsCount(1, dataType) + + StatsController.getInstance(currentAccount).getRecivedItemsCount(2, dataType) + ); + } + } + + private long getBytesCount(int dataType) { + return getSentBytesCount(dataType) + getReceivedBytesCount(dataType); + } + + private long getSentBytesCount(int dataType) { + switch (currentType) { + case TYPE_MOBILE: + case TYPE_WIFI: + case TYPE_ROAMING: + return StatsController.getInstance(currentAccount).getSentBytesCount(currentType - 1, dataType); + case TYPE_ALL: + default: + return ( + StatsController.getInstance(currentAccount).getSentBytesCount(0, dataType) + + StatsController.getInstance(currentAccount).getSentBytesCount(1, dataType) + + StatsController.getInstance(currentAccount).getSentBytesCount(2, dataType) + ); + } + } + + private long getReceivedBytesCount(int dataType) { + switch (currentType) { + case TYPE_MOBILE: + case TYPE_WIFI: + case TYPE_ROAMING: + return StatsController.getInstance(currentAccount).getReceivedBytesCount(currentType - 1, dataType); + case TYPE_ALL: + default: + return ( + StatsController.getInstance(currentAccount).getReceivedBytesCount(0, dataType) + + StatsController.getInstance(currentAccount).getReceivedBytesCount(1, dataType) + + StatsController.getInstance(currentAccount).getReceivedBytesCount(2, dataType) + ); + } + } + + private long getResetStatsDate() { + switch (currentType) { + case TYPE_MOBILE: + case TYPE_WIFI: + case TYPE_ROAMING: + return StatsController.getInstance(currentAccount).getResetStatsDate(currentType - 1); + case TYPE_ALL: + default: + return min( + StatsController.getInstance(currentAccount).getResetStatsDate(0), + StatsController.getInstance(currentAccount).getResetStatsDate(1), + StatsController.getInstance(currentAccount).getResetStatsDate(2) + ); + } + } + + private long min(long... numbers) { + long min = Long.MAX_VALUE; + for (int i = 0; i < numbers.length; ++i) { + if (min > numbers[i]) + min = numbers[i]; + } + return min; + } + + @Override + protected void onMeasure(int widthSpec, int heightSpec) { + super.onMeasure(widthSpec, MeasureSpec.makeMeasureSpec(MeasureSpec.getSize(heightSpec), MeasureSpec.EXACTLY)); + } + } + + private static final int VIEW_TYPE_CHART = 0; + private static final int VIEW_TYPE_SUBTITLE = 1; + private static final int VIEW_TYPE_SECTION = 2; + private static final int VIEW_TYPE_SEPARATOR = 3; + private static final int VIEW_TYPE_HEADER = 4; + private static final int VIEW_TYPE_RESET_BUTTON = 5; + private static final int VIEW_TYPE_ROUNDING = 6; + private static final int VIEW_TYPE_END = 7; + + private static class ItemInner extends AdapterWithDiffUtils.Item { + + public int imageResId; + public int imageColor; + public CharSequence text; + public CharSequence valueText; + + public int index; + public boolean pad; + + public int key; + + public ItemInner(int viewType) { + super(viewType, false); + } + + public ItemInner(int viewType, int key) { + super(viewType, false); + this.key = key; + } + + private ItemInner(int viewType, CharSequence text) { + super(viewType, false); + this.text = text; + } + + private ItemInner(int viewType, CharSequence text, CharSequence valueText) { + super(viewType, false); + this.text = text; + this.valueText = valueText; + } + + private ItemInner(int viewType, int index, CharSequence text, CharSequence valueText) { + super(viewType, false); + this.index = index; + this.text = text; + this.valueText = valueText; + } + + private ItemInner(int viewType, int index, int imageResId, int imageColor, CharSequence text, CharSequence valueText) { + super(viewType, false); + this.index = index; + this.imageResId = imageResId; + this.imageColor = imageColor; + this.text = text; + this.valueText = valueText; + } + + public static ItemInner asSeparator() { + return new ItemInner(VIEW_TYPE_SEPARATOR); + } + + public static ItemInner asSeparator(String hint) { + return new ItemInner(VIEW_TYPE_SEPARATOR, hint); + } + + public static ItemInner asHeader(String text) { + return new ItemInner(VIEW_TYPE_HEADER, text); + } + + public static ItemInner asSubtitle(String text) { + return new ItemInner(VIEW_TYPE_SUBTITLE, text); + } + + public static ItemInner asCell(int index, int imageResId, int imageColor, CharSequence text, CharSequence valueText) { + return new ItemInner(VIEW_TYPE_SECTION, index, imageResId, imageColor, text, valueText); + } + + public static ItemInner asCell(String text, CharSequence valueText) { + return new ItemInner(VIEW_TYPE_SECTION, text, valueText); + } + + @Override + public boolean equals(Object object) { + if (!(object instanceof ItemInner)) { + return false; + } + ItemInner item = (ItemInner) object; + if (item.viewType != viewType) { + return false; + } + if (viewType == VIEW_TYPE_SUBTITLE || viewType == VIEW_TYPE_HEADER || viewType == VIEW_TYPE_SEPARATOR || viewType == VIEW_TYPE_RESET_BUTTON) { + return TextUtils.equals(text, item.text); + } + if (viewType == VIEW_TYPE_SECTION) { + return item.index == index && TextUtils.equals(text, item.text) && item.imageColor == imageColor && item.imageResId == imageResId; + } + return item.key == key; + } + } + + class SubtitleCell extends FrameLayout { + + TextView textView; + + public SubtitleCell(Context context) { + super(context); + + textView = new TextView(context); + textView.setGravity(Gravity.CENTER); + textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 13); + textView.setTextColor(getThemedColor(Theme.key_windowBackgroundWhiteGrayText)); + + addView(textView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.FILL, 24, 0, 24, 14)); + } + + public void setText(CharSequence text) { + textView.setText(text); + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(MeasureSpec.makeMeasureSpec(MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.EXACTLY), heightMeasureSpec); + } + } + + public static class RoundingCell extends View { + Path path = new Path(); + Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG); + + public RoundingCell(Context context) { + super(context); + paint.setShadowLayer(dp(1), 0, dp(-0.66f), 0x0f000000); + paint.setColor(Theme.getColor(Theme.key_windowBackgroundWhite)); + } + + private boolean top = true; + + public void setTop(boolean top) { + path.rewind(); + float r; + if (this.top = top) { + r = AndroidUtilities.dp(14); + AndroidUtilities.rectTmp.set(0, AndroidUtilities.dp(4), getMeasuredWidth(), AndroidUtilities.dp(4) + getMeasuredHeight() * 2); + path.addRoundRect(AndroidUtilities.rectTmp, r, r, Path.Direction.CW); + } else { + r = AndroidUtilities.dp(8); + AndroidUtilities.rectTmp.set(0, -getMeasuredHeight() * 2 - AndroidUtilities.dp(4), getMeasuredWidth(), getMeasuredHeight() - AndroidUtilities.dp(4)); + path.addRoundRect(AndroidUtilities.rectTmp, r, r, Path.Direction.CW); + } + } + + @Override + protected void onDraw(Canvas canvas) { + super.onDraw(canvas); + canvas.drawPath(path, paint); + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(13), MeasureSpec.EXACTLY)); + setTop(this.top); + } + + @Override + protected void onConfigurationChanged(Configuration newConfig) { + super.onConfigurationChanged(newConfig); + requestLayout(); + } + } + + class Cell extends FrameLayout { + + ImageView imageView; + LinearLayout linearLayout, linearLayout2; + TextView textView; + ImageView arrowView; + TextView valueTextView; + boolean divider; + + public Cell(Context context) { + super(context); + + setBackgroundColor(getThemedColor(Theme.key_windowBackgroundWhite)); + + imageView = new ImageView(context); + imageView.setScaleType(ImageView.ScaleType.CENTER); +// imageView.setColorFilter(new PorterDuffColorFilter(Color.WHITE, PorterDuff.Mode.MULTIPLY)); + addView(imageView, LayoutHelper.createFrame(28, 28, Gravity.CENTER_VERTICAL | (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT), 18, 0, 18, 0)); + + linearLayout = new LinearLayout(context); + linearLayout.setOrientation(LinearLayout.HORIZONTAL); + linearLayout.setWeightSum(2); + addView(linearLayout, LayoutHelper.createFrameRelatively(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL | (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT), 64, 0, 20, 0)); + + linearLayout2 = new LinearLayout(context); + linearLayout2.setOrientation(LinearLayout.HORIZONTAL); + if (LocaleController.isRTL) { + linearLayout2.setGravity(Gravity.RIGHT); + } + linearLayout2.setWeightSum(2); + + textView = new TextView(context); + textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 16); + textView.setTextColor(getThemedColor(Theme.key_windowBackgroundWhiteBlackText)); + textView.setEllipsize(TextUtils.TruncateAt.END); + textView.setSingleLine(); + textView.setLines(1); + + arrowView = new ImageView(context); + arrowView.setScaleType(ImageView.ScaleType.FIT_CENTER); + arrowView.setImageResource(R.drawable.arrow_more); + arrowView.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_windowBackgroundWhiteBlackText), PorterDuff.Mode.MULTIPLY)); + arrowView.setTranslationY(AndroidUtilities.dp(1)); + arrowView.setVisibility(View.GONE); + + if (LocaleController.isRTL) { + linearLayout2.addView(arrowView, LayoutHelper.createLinear(16, 16, Gravity.CENTER_VERTICAL | Gravity.RIGHT, 3, 0, 0, 0)); + linearLayout2.addView(textView, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL | Gravity.RIGHT)); + } else { + linearLayout2.addView(textView, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL)); + linearLayout2.addView(arrowView, LayoutHelper.createLinear(16, 16, Gravity.CENTER_VERTICAL, 3, 0, 0, 0)); + } + + valueTextView = new TextView(context); + valueTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 16); + valueTextView.setTextColor(getThemedColor(Theme.key_windowBackgroundWhiteBlueText2)); + valueTextView.setGravity(LocaleController.isRTL ? Gravity.LEFT : Gravity.RIGHT); + + if (LocaleController.isRTL) { + linearLayout.addView(valueTextView, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL | Gravity.LEFT)); + linearLayout.addView(linearLayout2, LayoutHelper.createLinear(0, LayoutHelper.WRAP_CONTENT, 2, Gravity.CENTER_VERTICAL | Gravity.RIGHT)); + } else { + linearLayout.addView(linearLayout2, LayoutHelper.createLinear(0, LayoutHelper.WRAP_CONTENT, 2, Gravity.CENTER_VERTICAL)); + linearLayout.addView(valueTextView, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL | Gravity.RIGHT)); + } + } + + public void set( + int imageColor, + int imageResId, + CharSequence title, + CharSequence value, + boolean divider + ) { + if (imageResId == 0) { + imageView.setVisibility(View.GONE); + } else { + imageView.setVisibility(View.VISIBLE); + imageView.setBackground(Theme.createRoundRectDrawable(AndroidUtilities.dp(9), imageColor)); + imageView.setImageResource(imageResId); + } + + textView.setText(title); + valueTextView.setText(value); + + setWillNotDraw(!(this.divider = divider)); + } + + public void setArrow(Boolean value) { + if (value == null) { + arrowView.setVisibility(View.GONE); + } else { + arrowView.setVisibility(View.VISIBLE); + arrowView.animate().rotation(value ? 0 : 180).setDuration(360).setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT).start(); + } + } + + @Override + protected void onDraw(Canvas canvas) { + super.onDraw(canvas); + + if (divider) { + canvas.drawLine(LocaleController.isRTL ? 0 : AndroidUtilities.dp(64), getMeasuredHeight() - 1, getMeasuredWidth() - (LocaleController.isRTL ? AndroidUtilities.dp(64) : 0), getMeasuredHeight() - 1, Theme.dividerPaint); + } + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure( + MeasureSpec.makeMeasureSpec(MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.EXACTLY), + MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(48), MeasureSpec.EXACTLY) + ); + } + } + + public class CustomCharacterSpan extends MetricAffectingSpan { + double ratio = 0.5; + + public CustomCharacterSpan() { + } + + public CustomCharacterSpan(double ratio) { + this.ratio = ratio; + } + + @Override + public void updateDrawState(TextPaint paint) { + paint.baselineShift += (int) (paint.ascent() * ratio); + } + + @Override + public void updateMeasureState(TextPaint paint) { + paint.baselineShift += (int) (paint.ascent() * ratio); + } + } + + private boolean changeStatusBar; + + @Override + public void onTransitionAnimationProgress(boolean isOpen, float progress) { + if (progress > .5f && !changeStatusBar) { + changeStatusBar = true; + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.needCheckSystemBarColors); + } + super.onTransitionAnimationProgress(isOpen, progress); + } + + @Override + public boolean isLightStatusBar() { + if (!changeStatusBar) { + return super.isLightStatusBar(); + } + return AndroidUtilities.computePerceivedBrightness(Theme.getColor(Theme.key_actionBarActionModeDefault)) > 0.721f; + } + + @Override + public boolean isSwipeBackEnabled(MotionEvent event) { + if (event.getY() <= ActionBar.getCurrentActionBarHeight() + AndroidUtilities.dp(48)) { + return true; + } + return pager.getCurrentPosition() == 0; + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/DataUsageActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/DataUsageActivity.java index a80f046c86..3f078d9de9 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/DataUsageActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/DataUsageActivity.java @@ -27,6 +27,9 @@ import android.widget.FrameLayout; import android.widget.TextView; +import androidx.recyclerview.widget.LinearLayoutManager; +import androidx.recyclerview.widget.RecyclerView; + import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.LocaleController; import org.telegram.messenger.R; @@ -46,9 +49,6 @@ import java.util.ArrayList; -import androidx.recyclerview.widget.LinearLayoutManager; -import androidx.recyclerview.widget.RecyclerView; - public class DataUsageActivity extends BaseFragment { private class ViewPage extends FrameLayout { @@ -483,7 +483,7 @@ public boolean supportsPredictiveItemAnimations() { showDialog(dialog); TextView button = (TextView) dialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } }); @@ -806,7 +806,7 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { } case 3: { TextInfoPrivacyCell cell = (TextInfoPrivacyCell) holder.itemView; - cell.setBackgroundDrawable(Theme.getThemedDrawable(mContext, R.drawable.greydivider_bottom, Theme.key_windowBackgroundGrayShadow)); + cell.setBackground(Theme.getThemedDrawable(mContext, R.drawable.greydivider_bottom, Theme.key_windowBackgroundGrayShadow)); cell.setText(LocaleController.formatString("NetworkUsageSince", R.string.NetworkUsageSince, LocaleController.getInstance().formatterStats.format(StatsController.getInstance(currentAccount).getResetStatsDate(currentType)))); break; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/DefaultThemesPreviewCell.java b/TMessagesProj/src/main/java/org/telegram/ui/DefaultThemesPreviewCell.java index b8a172b4b5..7793fed943 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/DefaultThemesPreviewCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/DefaultThemesPreviewCell.java @@ -129,6 +129,7 @@ public DefaultThemesPreviewCell(Context context, BaseFragment parentFragment, in editor.commit(); } + Theme.turnOffAutoNight(parentFragment); }); progressView = new FlickerLoadingView(getContext(), null); @@ -206,7 +207,7 @@ public void onClick(View view) { pos[0] += dayNightCell.getImageView().getMeasuredWidth() / 2; pos[1] += dayNightCell.getImageView().getMeasuredHeight() / 2 + AndroidUtilities.dp(3); - Runnable then = () -> AndroidUtilities.runOnUIThread(() -> { + Runnable then = () -> { updateDayNightMode(); updateSelectedPosition(); @@ -264,7 +265,10 @@ public void onAnimationEnd(Animator animation) { } else { dayNightCell.setTextAndIcon(LocaleController.getString("SettingsSwitchToDayMode", R.string.SettingsSwitchToDayMode), darkThemeDrawable, true); } - }); + + Theme.turnOffAutoNight(parentFragment); + }; + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.needSetDayNightTheme, themeInfo, false, pos, -1, toDark, dayNightCell.getImageView(), dayNightCell, then); } }); @@ -433,15 +437,6 @@ public void selectTheme(Theme.ThemeInfo themeInfo) { } NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.needSetDayNightTheme, themeInfo, false, null, -1); } - //updateRows(); - - int count = getChildCount(); -// for (int a = 0; a < count; a++) { -// View child = getChildAt(a); -// if (child instanceof ThemesHorizontalListCell.InnerThemeView) { -// ((ThemesHorizontalListCell.InnerThemeView) child).updateCurrentThemeCheck(); -// } -// } } public void updateColors() { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Delegates/MemberRequestsDelegate.java b/TMessagesProj/src/main/java/org/telegram/ui/Delegates/MemberRequestsDelegate.java index 742166ceca..68e338b6b2 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Delegates/MemberRequestsDelegate.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Delegates/MemberRequestsDelegate.java @@ -43,6 +43,7 @@ import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ChatObject; +import org.telegram.messenger.ImageLocation; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MemberRequestsController; import org.telegram.messenger.MessagesController; @@ -464,6 +465,9 @@ private void hideChatJoinRequest(TLRPC.TL_chatInviteImporter importer, boolean i MessagesController.getInstance(currentAccount).processUpdates(updates, false); } AndroidUtilities.runOnUIThread(() -> { + if (fragment == null || fragment.getParentActivity() == null) { + return; + } if (error == null) { TLRPC.TL_updates updates = (TLRPC.TL_updates) response; if (!updates.chats.isEmpty()) { @@ -620,12 +624,34 @@ public int getItemViewType(int position) { @SuppressLint("NotifyDataSetChanged") public void setItems(List newItems) { + for (int i = 0; i < newItems.size(); ++i) { + long id = newItems.get(i).user_id; + for (int j = i + 1; j < newItems.size(); ++j) { + long iid = newItems.get(j).user_id; + if (iid == id) { + newItems.remove(i); + i--; + break; + } + } + } currentImporters.clear(); currentImporters.addAll(newItems); notifyDataSetChanged(); } public void appendItems(List newItems) { + for (int i = 0; i < newItems.size(); ++i) { + long id = newItems.get(i).user_id; + for (int j = 0; j < currentImporters.size(); ++j) { + long iid = currentImporters.get(j).user_id; + if (iid == id) { + newItems.remove(i); + i--; + break; + } + } + } currentImporters.addAll(newItems); if (currentImporters.size() > newItems.size()) { notifyItemChanged(currentImporters.size() - newItems.size() - 1); @@ -749,8 +775,9 @@ protected void onDraw(Canvas canvas) { }); popupLayout.addView(sendMsgCell); - ActionBarMenuSubItem dismissCell = new ActionBarMenuSubItem(context, false, false); - dismissCell.setColors(Theme.getColor(Theme.key_dialogTextRed2, resourcesProvider), Theme.getColor(Theme.key_dialogRedIcon, resourcesProvider)); + ActionBarMenuSubItem dismissCell = new ActionBarMenuSubItem(context, false, true); + dismissCell.setColors(Theme.getColor(Theme.key_dialogTextRed, resourcesProvider), Theme.getColor(Theme.key_dialogRedIcon, resourcesProvider)); + dismissCell.setSelectorColor(Theme.getColor(Theme.key_dialogButtonSelector, resourcesProvider)); dismissCell.setTextAndIcon(LocaleController.getString("DismissRequest", R.string.DismissRequest), R.drawable.msg_remove); dismissCell.setOnClickListener((v) -> { @@ -762,7 +789,7 @@ protected void onDraw(Canvas canvas) { popupLayout.addView(dismissCell); ActionBarMenuSubItem banCell = new ActionBarMenuSubItem(context, false, true); - banCell.setColors(Theme.getColor(Theme.key_dialogTextRed2, resourcesProvider), Theme.getColor(Theme.key_dialogRedIcon, resourcesProvider)); + banCell.setColors(Theme.getColor(Theme.key_dialogTextRed, resourcesProvider), Theme.getColor(Theme.key_dialogRedIcon, resourcesProvider)); banCell.setSelectorColor(Theme.getColor(Theme.key_dialogButtonSelector, resourcesProvider)); banCell.setTextAndIcon(LocaleController.getString("KickFromGroup", R.string.KickFromGroup), R.drawable.profile_ban); banCell.setOnClickListener((v) -> { @@ -800,8 +827,19 @@ protected void onCreate(Bundle savedInstanceState) { public void setImporter(TLRPC.TL_chatInviteImporter importer, BackupImageView imageView) { this.importer = importer; this.imageView = imageView; + + final ImageLocation imageLocation; + final ImageLocation thumbLocation; + TLRPC.User currentUser = MessagesController.getInstance(currentAccount).getUser(importer.user_id); + imageLocation = ImageLocation.getForUserOrChat(currentUser, ImageLocation.TYPE_BIG); + thumbLocation = ImageLocation.getForUserOrChat(currentUser, ImageLocation.TYPE_SMALL); + final TLRPC.UserFull userFull = MessagesController.getInstance(currentAccount).getUserFull(importer.user_id); + if (userFull == null) { + MessagesController.getInstance(currentAccount).loadUserInfo(currentUser, false, 0); + } viewPager.setParentAvatarImage(imageView); viewPager.setData(importer.user_id, true); + viewPager.initIfEmpty(null, imageLocation, thumbLocation, true); TLRPC.User user = users.get(importer.user_id); nameText.setText(UserObject.getUserName(user)); bioText.setText(importer.about); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/DialogOrContactPickerActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/DialogOrContactPickerActivity.java index 11301655a9..afed1c94b4 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/DialogOrContactPickerActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/DialogOrContactPickerActivity.java @@ -87,18 +87,19 @@ public DialogOrContactPickerActivity() { args.putBoolean("onlySelect", true); args.putBoolean("checkCanWrite", false); args.putBoolean("resetDelegate", false); - args.putInt("dialogsType", 9); + args.putInt("dialogsType", DialogsActivity.DIALOGS_TYPE_BLOCK); dialogsActivity = new DialogsActivity(args); - dialogsActivity.setDelegate((fragment, dids, message, param) -> { + dialogsActivity.setDelegate((fragment, dids, message, param, topicsFragment) -> { if (dids.isEmpty()) { - return; + return true; } long did = dids.get(0).dialogId; if (!DialogObject.isUserDialog(did)) { - return; + return true; } TLRPC.User user = getMessagesController().getUser(did); showBlockAlert(user); + return true; }); dialogsActivity.onFragmentCreate(); @@ -637,7 +638,7 @@ private void showBlockAlert(TLRPC.User user) { showDialog(dialog); TextView button = (TextView) dialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/DialogsActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/DialogsActivity.java index eb092935f6..118f666469 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/DialogsActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/DialogsActivity.java @@ -96,6 +96,7 @@ import org.telegram.messenger.FileLog; import org.telegram.messenger.ImageLoader; import org.telegram.messenger.ImageLocation; +import org.telegram.messenger.LiteMode; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MediaDataController; import org.telegram.messenger.MessageObject; @@ -110,6 +111,7 @@ import org.telegram.messenger.Utilities; import org.telegram.messenger.XiaomiUtilities; import org.telegram.tgnet.ConnectionsManager; +import org.telegram.tgnet.SerializedData; import org.telegram.tgnet.TLObject; import org.telegram.tgnet.TLRPC; import org.telegram.ui.ActionBar.ActionBar; @@ -135,6 +137,7 @@ import org.telegram.ui.Cells.ArchiveHintInnerCell; import org.telegram.ui.Cells.DialogCell; import org.telegram.ui.Cells.DialogsEmptyCell; +import org.telegram.ui.Cells.DialogsHintCell; import org.telegram.ui.Cells.DividerCell; import org.telegram.ui.Cells.DrawerActionCell; import org.telegram.ui.Cells.DrawerActionCheckCell; @@ -147,6 +150,7 @@ import org.telegram.ui.Cells.HintDialogCell; import org.telegram.ui.Cells.LoadingCell; import org.telegram.ui.Cells.ProfileSearchCell; +import org.telegram.ui.Cells.RequestPeerRequirementsCell; import org.telegram.ui.Cells.ShadowSectionCell; import org.telegram.ui.Cells.TextCell; import org.telegram.ui.Cells.TextInfoPrivacyCell; @@ -202,6 +206,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.concurrent.ConcurrentHashMap; import java.util.Random; import kotlin.Unit; @@ -218,7 +223,7 @@ import xyz.nextalone.nagram.NaConfig; public class DialogsActivity extends BaseFragment implements NotificationCenter.NotificationCenterDelegate, FloatingDebugProvider { - public final static int DIALOGS_TYPE_START_ATTACH_BOT = 14; + public final static boolean DISPLAY_SPEEDOMETER_IN_DOWNLOADS_SEARCH = true; private boolean canShowFilterTabsView; @@ -230,6 +235,9 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. private boolean rightFragmentTransitionInProgress; private boolean allowGlobalSearch = true; + private TLRPC.RequestPeerType requestPeerType; + private long requestPeerBotId; + public MessagesStorage.TopicKey getOpenedDialogId() { return openedDialogId; } @@ -259,12 +267,12 @@ public ViewPage(Context context) { } public boolean isDefaultDialogType() { - return dialogsType == 0 || dialogsType == 7 || dialogsType == 8; + return dialogsType == DIALOGS_TYPE_DEFAULT || dialogsType == 7 || dialogsType == 8; } boolean updating; Runnable updateListRunnable = () -> { - dialogsAdapter.updateList(listView, dialogsType == 0 && hasHiddenArchive() && archivePullViewState == ARCHIVE_ITEM_STATE_HIDDEN, actionBar.getTranslationY()); + dialogsAdapter.updateList(listView, dialogsType == DIALOGS_TYPE_DEFAULT && hasHiddenArchive() && archivePullViewState == ARCHIVE_ITEM_STATE_HIDDEN, actionBar.getTranslationY()); listView.updateDialogsOnNextDraw = true; updating = false; }; @@ -396,6 +404,8 @@ public void updateList(boolean animated) { private FragmentContextView fragmentLocationContextView; private FragmentContextView fragmentContextView; + private DialogsHintCell dialogsHintCell; + private Long cacheSize, deviceSize; private ArrayList frozenDialogsList; private boolean dialogsListFrozen; @@ -527,6 +537,8 @@ public void updateList(boolean animated) { private int debugLastUpdateAction = -1; private boolean slowedReloadAfterDialogClick; + private boolean isPremiumHintUpgrade; + private AnimatedEmojiDrawable.SwapAnimatedEmojiDrawable statusDrawable; private DrawerProfileCell.AnimatedStatusView animatedStatusView; public RightSlidingDialogContainer rightSlidingDialogContainer; @@ -650,22 +662,29 @@ protected boolean drawChild(Canvas canvas, View child, long drawingTime) { return true; } boolean result; - if (child == viewPages[0] || (viewPages.length > 1 && child == viewPages[1]) || child == fragmentContextView || child == fragmentLocationContextView) { + if (child == viewPages[0] || (viewPages.length > 1 && child == viewPages[1]) || child == fragmentContextView || child == fragmentLocationContextView || child == dialogsHintCell) { canvas.save(); canvas.clipRect(0, -getY() + actionBar.getY() + getActionBarFullHeight(), getMeasuredWidth(), getMeasuredHeight()); if (slideFragmentProgress != 1f) { - float s = 1f - 0.05f * (1f - slideFragmentProgress); - canvas.translate((isDrawerTransition ? AndroidUtilities.dp(4) : -AndroidUtilities.dp(4)) * (1f - slideFragmentProgress), 0); - canvas.scale(s, s, isDrawerTransition ? getMeasuredWidth() : 0, -getY() + actionBar.getY() + getActionBarFullHeight()); + if (slideFragmentLite) { + canvas.translate((isDrawerTransition ? 1 : -1) * AndroidUtilities.dp(slideAmplitudeDp) * (1f - slideFragmentProgress), 0); + } else { + final float s = 1f - 0.05f * (1f - slideFragmentProgress); + canvas.translate((isDrawerTransition ? AndroidUtilities.dp(4) : -AndroidUtilities.dp(4)) * (1f - slideFragmentProgress), 0); + canvas.scale(s, s, isDrawerTransition ? getMeasuredWidth() : 0, -getY() + actionBar.getY() + getActionBarFullHeight()); + } } - result = super.drawChild(canvas, child, drawingTime); canvas.restore(); } else if (child == actionBar && slideFragmentProgress != 1f) { canvas.save(); - float s = 1f - 0.05f * (1f - slideFragmentProgress); - canvas.translate((isDrawerTransition ? AndroidUtilities.dp(4) : -AndroidUtilities.dp(4)) * (1f - slideFragmentProgress), 0); - canvas.scale(s, s, isDrawerTransition ? getMeasuredWidth() : 0, (actionBar.getOccupyStatusBar() ? AndroidUtilities.statusBarHeight : 0) + ActionBar.getCurrentActionBarHeight() / 2f); + if (slideFragmentLite) { + canvas.translate((isDrawerTransition ? 1 : -1) * AndroidUtilities.dp(slideAmplitudeDp) * (1f - slideFragmentProgress), 0); + } else { + float s = 1f - 0.05f * (1f - slideFragmentProgress); + canvas.translate((isDrawerTransition ? AndroidUtilities.dp(4) : -AndroidUtilities.dp(4)) * (1f - slideFragmentProgress), 0); + canvas.scale(s, s, isDrawerTransition ? getMeasuredWidth() : 0, (actionBar.getOccupyStatusBar() ? AndroidUtilities.statusBarHeight : 0) + ActionBar.getCurrentActionBarHeight() / 2f); + } result = super.drawChild(canvas, child, drawingTime); canvas.restore(); } else { @@ -783,9 +802,13 @@ protected void dispatchDraw(Canvas canvas) { canvas.save(); canvas.translate(fragmentContextView.getX(), fragmentContextView.getY()); if (slideFragmentProgress != 1f) { - float s = 1f - 0.05f * (1f - slideFragmentProgress); - canvas.translate((isDrawerTransition ? AndroidUtilities.dp(4) : -AndroidUtilities.dp(4)) * (1f - slideFragmentProgress), 0); - canvas.scale(s, 1f, isDrawerTransition ? getMeasuredWidth() : 0, fragmentContextView.getY()); + if (slideFragmentLite) { + canvas.translate((isDrawerTransition ? 1 : -1) * AndroidUtilities.dp(slideAmplitudeDp) * (1f - slideFragmentProgress), 0); + } else { + final float s = 1f - 0.05f * (1f - slideFragmentProgress); + canvas.translate((isDrawerTransition ? AndroidUtilities.dp(4) : -AndroidUtilities.dp(4)) * (1f - slideFragmentProgress), 0); + canvas.scale(s, 1f, isDrawerTransition ? getMeasuredWidth() : 0, fragmentContextView.getY()); + } } fragmentContextView.setDrawOverlay(true); fragmentContextView.draw(canvas); @@ -866,7 +889,7 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { inputFieldHeight = 0; } - if (SharedConfig.smoothKeyboard && commentView.isPopupShowing()) { + if (commentView.isPopupShowing()) { fragmentView.setTranslationY(0); for (int a = 0; a < viewPages.length; a++) { if (viewPages[a] != null) { @@ -897,12 +920,12 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { int contentWidthSpec = View.MeasureSpec.makeMeasureSpec(widthSize, View.MeasureSpec.EXACTLY); int h; if (filterTabsView != null && filterTabsView.getVisibility() == VISIBLE) { - h = heightSize - inputFieldHeight + AndroidUtilities.dp(2) - AndroidUtilities.dp(44) - topPadding; + h = heightSize - inputFieldHeight + AndroidUtilities.dp(2) - AndroidUtilities.dp(44) - topPadding - (dialogsHintCell != null ? dialogsHintCell.height() : 0); if (rightSlidingDialogContainer.hasFragment()) { h += AndroidUtilities.dp(44); } } else { - h = heightSize - inputFieldHeight + AndroidUtilities.dp(2) - ((onlySelect && !(initialDialogsType == 3 && NekoConfig.showTabsOnForward.Bool())) ? 0 : actionBar.getMeasuredHeight()) - topPadding; + h = heightSize - inputFieldHeight + AndroidUtilities.dp(2) - ((onlySelect && !(initialDialogsType == 3 && NekoConfig.showTabsOnForward.Bool())) ? 0 : actionBar.getMeasuredHeight()) - topPadding - (dialogsHintCell != null ? dialogsHintCell.height() : 0); } if (filtersTabAnimator != null && filterTabsView != null && filterTabsView.getVisibility() == VISIBLE) { h += filterTabsMoveFrom; @@ -1038,6 +1061,11 @@ protected void onLayout(boolean changed, int l, int t, int r, int b) { } } childTop += topPadding; + if (dialogsHintCell != null) { + childTop += dialogsHintCell.height(); + } + } else if (child instanceof DialogsHintCell) { + childTop += actionBar.getMeasuredHeight() + (filterTabsView != null && filterTabsView.getVisibility() == View.VISIBLE ? filterTabsView.getMeasuredHeight() : 0); } else if (child instanceof FragmentContextView) { childTop += actionBar.getMeasuredHeight(); } else if (child == floatingButtonContainer && selectAnimatedEmojiDialog != null) { @@ -1075,7 +1103,7 @@ public void requestDisallowInterceptTouchEvent(boolean disallowIntercept) { public boolean onTouchEvent(MotionEvent ev) { if (parentLayout != null && filterTabsView != null && !filterTabsView.isEditing() && !searching && !rightSlidingDialogContainer.hasFragment() && !parentLayout.checkTransitionAnimation() && !parentLayout.isInPreviewMode() && !parentLayout.isPreviewOpenAnimationInProgress() && !parentLayout.getDrawerLayoutContainer().isDrawerOpened() && - (ev == null || startedTracking || ev.getY() > actionBar.getMeasuredHeight() + actionBar.getTranslationY()) && SharedConfig.getChatSwipeAction(currentAccount) == SwipeGestureSettingsView.SWIPE_GESTURE_FOLDERS) { + (ev == null || startedTracking || ev.getY() > actionBar.getMeasuredHeight() + actionBar.getTranslationY()) && (SharedConfig.getChatSwipeAction(currentAccount) == SwipeGestureSettingsView.SWIPE_GESTURE_FOLDERS || SharedConfig.getChatSwipeAction(currentAccount) == SwipeGestureSettingsView.SWIPE_GESTURE_ARCHIVE && viewPages[0] != null && (viewPages[0].dialogsAdapter.getDialogsType() == 7 || viewPages[0].dialogsAdapter.getDialogsType() == 8))) { if (ev != null) { if (velocityTracker == null) { velocityTracker = VelocityTracker.obtain(); @@ -1313,9 +1341,11 @@ protected void drawList(Canvas blurCanvas, boolean top) { blurCanvas.translate(viewPages[i].getX(), viewPages[i].getY() + viewPages[i].listView.getY() + child.getY()); if (child instanceof DialogCell) { DialogCell cell = (DialogCell) child; - cell.drawingForBlur = true; - cell.draw(blurCanvas); - cell.drawingForBlur = false; + if (!(cell.isFolderCell() && SharedConfig.archiveHidden)) { + cell.drawingForBlur = true; + cell.draw(blurCanvas); + cell.drawingForBlur = false; + } } else { child.draw(blurCanvas); } @@ -1326,6 +1356,22 @@ protected void drawList(Canvas blurCanvas, boolean top) { } } } + + @Override + protected void onAttachedToWindow() { + super.onAttachedToWindow(); + if (statusDrawable != null) { + statusDrawable.attach(); + } + } + + @Override + protected void onDetachedFromWindow() { + super.onDetachedFromWindow(); + if (statusDrawable != null) { + statusDrawable.detach(); + } + } } public static float viewOffset = 0.0f; @@ -1663,7 +1709,7 @@ protected void onMeasure(int widthSpec, int heightSpec) { RecyclerView.ViewHolder holder = parentPage.listView.findViewHolderForAdapterPosition(pos); if (holder != null) { int top = holder.itemView.getTop(); - if (parentPage.dialogsType == 0 && hasHiddenArchive() && parentPage.archivePullViewState == ARCHIVE_ITEM_STATE_HIDDEN) { + if (parentPage.dialogsType == DIALOGS_TYPE_DEFAULT && hasHiddenArchive() && parentPage.archivePullViewState == ARCHIVE_ITEM_STATE_HIDDEN) { pos = Math.max(1, pos); } ignoreLayout = true; @@ -1685,7 +1731,7 @@ protected void onMeasure(int widthSpec, int heightSpec) { } if (firstLayout && getMessagesController().dialogsLoaded) { - if (parentPage.dialogsType == 0 && hasHiddenArchive()) { + if (parentPage.dialogsType == DIALOGS_TYPE_DEFAULT && hasHiddenArchive()) { ignoreLayout = true; LinearLayoutManager layoutManager = (LinearLayoutManager) getLayoutManager(); layoutManager.scrollToPositionWithOffset(1, 0); @@ -1809,7 +1855,7 @@ public boolean onTouchEvent(MotionEvent e) { } } boolean result = super.onTouchEvent(e); - if (parentPage.dialogsType == 0 && (action == MotionEvent.ACTION_UP || action == MotionEvent.ACTION_CANCEL) && parentPage.archivePullViewState == ARCHIVE_ITEM_STATE_HIDDEN && hasHiddenArchive()) { + if (parentPage.dialogsType == DIALOGS_TYPE_DEFAULT && (action == MotionEvent.ACTION_UP || action == MotionEvent.ACTION_CANCEL) && parentPage.archivePullViewState == ARCHIVE_ITEM_STATE_HIDDEN && hasHiddenArchive()) { LinearLayoutManager layoutManager = (LinearLayoutManager) getLayoutManager(); int currentPosition = layoutManager.findFirstVisibleItemPosition(); if (currentPosition == 0) { @@ -1840,6 +1886,9 @@ public boolean onTouchEvent(MotionEvent e) { } ((DialogCell) view).startOutAnimation(); parentPage.archivePullViewState = ARCHIVE_ITEM_STATE_SHOWED; + if (AndroidUtilities.isAccessibilityScreenReaderEnabled()) { + AndroidUtilities.makeAccessibilityAnnouncement(LocaleController.getString(R.string.AccDescrArchivedChatsShown)); + } if (NekoConfig.openArchiveOnPull.Bool()) { AndroidUtilities.runOnUIThread(() -> { @@ -1937,7 +1986,7 @@ public void setAnimationSupportView(RecyclerListView animationSupportListView, f int p = adapter.findDialogPosition(anchorView.getDialogId()); int offset = (int) (anchorView.getTop() - anchorListView.getPaddingTop() + scrollOffset); if (p >= 0) { - ((LinearLayoutManager) animationSupportListView.getLayoutManager()).scrollToPositionWithOffset(p, adapter.fixScrollGap(this, p, offset, parentPage.dialogsType == 0 && parentPage.archivePullViewState == ARCHIVE_ITEM_STATE_HIDDEN && hasHiddenArchive(), opened)); + ((LinearLayoutManager) animationSupportListView.getLayoutManager()).scrollToPositionWithOffset(p, adapter.fixScrollGap(this, p, offset, parentPage.dialogsType == DIALOGS_TYPE_DEFAULT && parentPage.archivePullViewState == ARCHIVE_ITEM_STATE_HIDDEN && hasHiddenArchive(), opened)); } } } @@ -1988,7 +2037,11 @@ public int getMovementFlags(RecyclerView recyclerView, RecyclerView.ViewHolder v swipeFolderBack = false; return makeMovementFlags(ItemTouchHelper.UP | ItemTouchHelper.DOWN, 0); } else { - if ((filterTabsView != null && filterTabsView.getVisibility() == View.VISIBLE && SharedConfig.getChatSwipeAction(currentAccount) == SwipeGestureSettingsView.SWIPE_GESTURE_FOLDERS) || !allowSwipeDuringCurrentTouch || ((dialogId == getUserConfig().clientUserId || dialogId == 777000) && SharedConfig.getChatSwipeAction(currentAccount) == SwipeGestureSettingsView.SWIPE_GESTURE_ARCHIVE) || getMessagesController().isPromoDialog(dialogId, false) && getMessagesController().promoDialogType != MessagesController.PROMO_TYPE_PSA) { + int currentDialogsType = initialDialogsType; + try { + currentDialogsType = parentPage.dialogsAdapter.getDialogsType(); + } catch (Exception ignore) {} + if ((filterTabsView != null && filterTabsView.getVisibility() == View.VISIBLE && SharedConfig.getChatSwipeAction(currentAccount) == SwipeGestureSettingsView.SWIPE_GESTURE_FOLDERS) || !allowSwipeDuringCurrentTouch || ((dialogId == getUserConfig().clientUserId || dialogId == 777000 || currentDialogsType == 7 || currentDialogsType == 8) && SharedConfig.getChatSwipeAction(currentAccount) == SwipeGestureSettingsView.SWIPE_GESTURE_ARCHIVE) || getMessagesController().isPromoDialog(dialogId, false) && getMessagesController().promoDialogType != MessagesController.PROMO_TYPE_PSA) { return 0; } boolean canSwipeBack = folderId == 0 && (SharedConfig.getChatSwipeAction(currentAccount) == SwipeGestureSettingsView.SWIPE_GESTURE_MUTE || SharedConfig.getChatSwipeAction(currentAccount) == SwipeGestureSettingsView.SWIPE_GESTURE_READ || SharedConfig.getChatSwipeAction(currentAccount) == SwipeGestureSettingsView.SWIPE_GESTURE_PIN || SharedConfig.getChatSwipeAction(currentAccount) == SwipeGestureSettingsView.SWIPE_GESTURE_DELETE) && !rightSlidingDialogContainer.hasFragment(); @@ -2120,6 +2173,7 @@ public void onSwiped(RecyclerView.ViewHolder viewHolder, int direction) { cell.checkCurrentDialogIndex(true); cell.animateArchiveAvatar(); } + AndroidUtilities.runOnUIThread(() -> setDialogsListFrozen(false), 300); } SharedPreferences preferences = MessagesController.getGlobalMainSettings(); boolean hintShowed = preferences.getBoolean("archivehint_l", false) || SharedConfig.archiveHidden; @@ -2227,7 +2281,7 @@ public float getSwipeVelocityThreshold(float defaultValue) { } public interface DialogsActivityDelegate { - void didSelectDialogs(DialogsActivity fragment, ArrayList dids, CharSequence message, boolean param); + boolean didSelectDialogs(DialogsActivity fragment, ArrayList dids, CharSequence message, boolean param, TopicsFragment topicsFragment); } public DialogsActivity(Bundle args) { @@ -2241,11 +2295,11 @@ public boolean onFragmentCreate() { getConnectionsManager().updateDcSettings(); getMessagesController().getBlockedPeers(true); - if (getArguments() != null) { + if (arguments != null) { onlySelect = arguments.getBoolean("onlySelect", false); - canSelectTopics = arguments.getBoolean("canSelectTopics", false); + canSelectTopics = arguments.getBoolean("canSelectTopics", false); cantSendToChannels = arguments.getBoolean("cantSendToChannels", false); - initialDialogsType = arguments.getInt("dialogsType", 0); + initialDialogsType = arguments.getInt("dialogsType", DIALOGS_TYPE_DEFAULT); selectAlertString = arguments.getString("selectAlertString"); selectAlertStringGroup = arguments.getString("selectAlertStringGroup"); addToGroupAlertString = arguments.getString("addToGroupAlertString"); @@ -2265,9 +2319,19 @@ public boolean onFragmentCreate() { allowBots = arguments.getBoolean("allowBots", true); closeFragment = arguments.getBoolean("closeFragment", true); allowGlobalSearch = arguments.getBoolean("allowGlobalSearch", true); + + byte[] requestPeerTypeBytes = arguments.getByteArray("requestPeerType"); + if (requestPeerTypeBytes != null) { + try { + SerializedData buffer = new SerializedData(requestPeerTypeBytes); + requestPeerType = TLRPC.RequestPeerType.TLdeserialize(buffer, buffer.readInt32(true), true); + buffer.cleanup(); + } catch (Exception e) {} + } + requestPeerBotId = arguments.getLong("requestPeerBotId", 0); } - if (initialDialogsType == 0) { + if (initialDialogsType == DIALOGS_TYPE_DEFAULT) { askAboutContacts = MessagesController.getGlobalNotificationsSettings().getBoolean("askAboutContacts", true); } @@ -2316,6 +2380,7 @@ public boolean onFragmentCreate() { getNotificationCenter().addObserver(this, NotificationCenter.onDatabaseMigration); getNotificationCenter().addObserver(this, NotificationCenter.onDatabaseOpened); getNotificationCenter().addObserver(this, NotificationCenter.didClearDatabase); + getNotificationCenter().addObserver(this, NotificationCenter.onDatabaseReset); loadDialogs(getAccountInstance()); getMessagesController().loadPinnedDialogs(folderId, 0, null); @@ -2326,6 +2391,7 @@ public boolean onFragmentCreate() { } databaseMigrationHint = null; } + return true; } @@ -2353,12 +2419,9 @@ public void updateStatus(TLRPC.User user, boolean animated) { if (statusDrawable == null || actionBar == null) { return; } - if (user != null && user.emoji_status instanceof TLRPC.TL_emojiStatusUntil && ((TLRPC.TL_emojiStatusUntil) user.emoji_status).until > (int) (System.currentTimeMillis() / 1000)) { - statusDrawable.set(((TLRPC.TL_emojiStatusUntil) user.emoji_status).document_id, animated); - actionBar.setRightDrawableOnClick(e -> showSelectStatusDialog()); - SelectAnimatedEmojiDialog.preload(currentAccount); - } else if (user != null && user.emoji_status instanceof TLRPC.TL_emojiStatus) { - statusDrawable.set(((TLRPC.TL_emojiStatus) user.emoji_status).document_id, animated); + Long emojiStatusId = UserObject.getEmojiStatusDocumentId(user); + if (emojiStatusId != null) { + statusDrawable.set(emojiStatusId, animated); actionBar.setRightDrawableOnClick(e -> showSelectStatusDialog()); SelectAnimatedEmojiDialog.preload(currentAccount); } else if (user != null && MessagesController.getInstance(currentAccount).isPremiumUser(user)) { @@ -2438,6 +2501,7 @@ public void onFragmentDestroy() { getNotificationCenter().removeObserver(this, NotificationCenter.onDatabaseMigration); getNotificationCenter().removeObserver(this, NotificationCenter.onDatabaseOpened); getNotificationCenter().removeObserver(this, NotificationCenter.didClearDatabase); + getNotificationCenter().removeObserver(this, NotificationCenter.onDatabaseReset); if (commentView != null) { commentView.onDestroy(); } @@ -2577,7 +2641,6 @@ public void onPreToggleSearch() { } } - @Override public void onSearchExpand() { searching = true; @@ -2690,17 +2753,31 @@ public boolean canToggleSearch() { } }); - if (initialDialogsType == 2 || initialDialogsType == DIALOGS_TYPE_START_ATTACH_BOT) { + if (initialDialogsType == DIALOGS_TYPE_ADD_USERS_TO || initialDialogsType == DIALOGS_TYPE_START_ATTACH_BOT) { searchItem.setVisibility(View.GONE); } searchItem.setSearchFieldHint(LocaleController.getString("Search", R.string.Search)); searchItem.setContentDescription(LocaleController.getString("Search", R.string.Search)); if (onlySelect) { actionBar.setBackButtonImage(R.drawable.ic_ab_back); - if (initialDialogsType == 3 && selectAlertString == null) { + if (initialDialogsType == DIALOGS_TYPE_FORWARD && selectAlertString == null) { actionBar.setTitle(LocaleController.getString("ForwardTo", R.string.ForwardTo)); - } else if (initialDialogsType == 10) { + } else if (initialDialogsType == DIALOGS_TYPE_WIDGET) { actionBar.setTitle(LocaleController.getString("SelectChats", R.string.SelectChats)); + } else if (requestPeerType instanceof TLRPC.TL_requestPeerTypeUser) { + if (((TLRPC.TL_requestPeerTypeUser) requestPeerType).bot != null) { + if (((TLRPC.TL_requestPeerTypeUser) requestPeerType).bot) { + actionBar.setTitle(LocaleController.getString("ChooseBot", R.string.ChooseBot)); + } else { + actionBar.setTitle(LocaleController.getString("ChooseUser", R.string.ChooseUser)); + } + } else { + actionBar.setTitle(LocaleController.getString("ChooseUser", R.string.ChooseUser)); + } + } else if (requestPeerType instanceof TLRPC.TL_requestPeerTypeBroadcast) { + actionBar.setTitle(LocaleController.getString("ChooseChannel", R.string.ChooseChannel)); + } else if (requestPeerType instanceof TLRPC.TL_requestPeerTypeChat) { + actionBar.setTitle(LocaleController.getString("ChooseGroup", R.string.ChooseGroup)); } else { actionBar.setTitle(LocaleController.getString("SelectChat", R.string.SelectChat)); } @@ -2751,7 +2828,7 @@ public boolean canToggleSearch() { scrollToTop(); }); - if ((initialDialogsType == 3 && NekoConfig.showTabsOnForward.Bool()) || initialDialogsType == 0 && folderId == 0 && !onlySelect && TextUtils.isEmpty(searchString)) { + if ((initialDialogsType == DIALOGS_TYPE_FORWARD && NekoConfig.showTabsOnForward.Bool()) || initialDialogsType == DIALOGS_TYPE_DEFAULT && folderId == 0 && !onlySelect && TextUtils.isEmpty(searchString)) { scrimPaint = new Paint() { @Override public void setAlpha(int a) { @@ -2794,7 +2871,8 @@ protected void onLayout(boolean changed, int l, int t, int r, int b) { protected void onDefaultTabMoved() { if (!getMessagesController().premiumLocked) { try { - performHapticFeedback(HapticFeedbackConstants.KEYBOARD_PRESS, HapticFeedbackConstants.FLAG_IGNORE_VIEW_SETTING); + if (!NekoConfig.disableVibration.Bool()) + performHapticFeedback(HapticFeedbackConstants.KEYBOARD_PRESS, HapticFeedbackConstants.FLAG_IGNORE_VIEW_SETTING); } catch (Exception ignore) {} topBulletin = BulletinFactory.of(DialogsActivity.this).createSimpleBulletin(R.raw.filter_reorder, AndroidUtilities.replaceTags(LocaleController.formatString("LimitReachedReorderFolder", R.string.LimitReachedReorderFolder, LocaleController.getString(R.string.FilterAllChats))), LocaleController.getString("PremiumMore", R.string.PremiumMore), Bulletin.DURATION_PROLONG, () -> { showDialog(new PremiumFeatureBottomSheet(DialogsActivity.this, PremiumPreviewFragment.PREMIUM_FEATURE_ADVANCED_CHAT_MANAGEMENT, true)); @@ -2827,7 +2905,7 @@ private void showDeleteAlert(MessagesController.DialogFilter dialogFilter) { showDialog(alertDialog); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } @@ -2992,37 +3070,61 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { linearLayout.setMinimumWidth(AndroidUtilities.dp(200)); linearLayout.setOrientation(LinearLayout.VERTICAL); scrimPopupWindowItems = new ActionBarMenuSubItem[4]; - for (int a = 0, N = (tabView.getId() == filterTabsView.getDefaultTabId() ? 3 : 4); a < 4; a++) { + + + boolean defaultTab = tabView.getId() == filterTabsView.getDefaultTabId(); + boolean hasUnread = false; + + + ArrayList dialogs = new ArrayList<>(defaultTab ? getMessagesController().getDialogs(folderId) : getMessagesController().getAllDialogs()); + if (!defaultTab) { + MessagesController.DialogFilter filter = getMessagesController().dialogFilters.get(tabView.getId()); + for (int i = 0; i < dialogs.size(); i++) { + if (!filter.includesDialog(getAccountInstance(), dialogs.get(i).id)) { + dialogs.remove(i); + i--; + } + } + } + for (int i = 0; i < dialogs.size(); i++) { + if (dialogs.get(i).unread_mark || dialogs.get(i).unread_count > 0) { + hasUnread = true; + } + } + for (int a = 0, N = 2 + (!defaultTab ? 1 : 0) + (hasUnread ? 1 : 0); a < N; a++) { ActionBarMenuSubItem cell = new ActionBarMenuSubItem(getParentActivity(), a == 0, a == N - 1); if (a == 0) { if (getMessagesController().dialogFilters.size() <= 1) continue; cell.setTextAndIcon(LocaleController.getString("FilterReorder", R.string.FilterReorder), R.drawable.tabs_reorder); } else if (a == 1) { - if (N == 3) { + if (defaultTab) { cell.setTextAndIcon(LocaleController.getString("FilterEditAll", R.string.FilterEditAll), R.drawable.msg_edit); } else { cell.setTextAndIcon(LocaleController.getString("FilterEdit", R.string.FilterEdit), R.drawable.msg_edit); } - } else if (a == 2) { + } else if (a == 2 && hasUnread) { if (N == 3) continue; - cell.setTextAndIcon(LocaleController.getString("FilterDeleteItem", R.string.FilterDeleteItem), R.drawable.msg_delete); - } else { cell.setTextAndIcon(LocaleController.getString("MarkAllAsRead", R.string.MarkAllAsRead), R.drawable.msg_markread); + } else { + cell.setTextAndIcon(LocaleController.getString("FilterDeleteItem", R.string.FilterDeleteItem), R.drawable.msg_delete); } scrimPopupWindowItems[a] = cell; linearLayout.addView(cell); final int i = a; + boolean finalHasUnread = hasUnread; cell.setOnClickListener(v1 -> { if (i == 0) { resetScroll(); filterTabsView.setIsEditing(true); showDoneItem(true); } else if (i == 1) { - if (N == 3) { + if (defaultTab) { presentFragment(new FiltersSetupActivity()); } else { presentFragment(new FilterCreateActivity(dialogFilter)); } + } else if (i == 2 && finalHasUnread) { + markDialogsAsRead(dialogs); } else if (i == 2) { showDeleteAlert(dialogFilter); } else { @@ -3177,7 +3279,7 @@ public void onDeletePressed(int id) { ContentView contentView = new ContentView(context); fragmentView = contentView; - int pagesCount = (initialDialogsType == 3 && NekoConfig.showTabsOnForward.Bool()) || (folderId == 0 && initialDialogsType == 0 && !onlySelect) ? 2 : 1; + int pagesCount = (initialDialogsType == 3 && NekoConfig.showTabsOnForward.Bool()) || (folderId == 0 && initialDialogsType == DIALOGS_TYPE_DEFAULT && !onlySelect) ? 2 : 1; viewPages = new ViewPage[pagesCount]; for (int a = 0; a < pagesCount; a++) { final ViewPage viewPage = new ViewPage(context) { @@ -3210,6 +3312,9 @@ public void setTranslationX(float translationX) { viewPage.listView.setAnimateEmptyView(true, RecyclerListView.EMPTY_VIEW_ANIMATION_TYPE_ALPHA); viewPage.listView.setClipToPadding(false); viewPage.listView.setPivotY(0); + if (initialDialogsType == DIALOGS_TYPE_BOT_REQUEST_PEER) { + viewPage.listView.setBackgroundColor(getThemedColor(Theme.key_windowBackgroundGray)); + } viewPage.dialogsItemAnimator = new DialogsItemAnimator(viewPage.listView) { @Override public void onRemoveStarting(RecyclerView.ViewHolder item) { @@ -3232,6 +3337,14 @@ public void onRemoveStarting(RecyclerView.ViewHolder item) { viewPage.listView.setInstantClick(true); viewPage.layoutManager = new LinearLayoutManager(context) { + @Override + protected int firstPosition() { + if (viewPage.dialogsType == DIALOGS_TYPE_DEFAULT && hasHiddenArchive() && viewPage.archivePullViewState == ARCHIVE_ITEM_STATE_HIDDEN) { + return 1; + } + return 0; + } + private boolean fixOffset; @Override @@ -3269,7 +3382,7 @@ public int scrollVerticallyBy(int dy, RecyclerView.Recycler recycler, RecyclerVi int measuredDy = dy; int pTop = viewPage.listView.getPaddingTop(); - if (viewPage.dialogsType == 0 && !onlySelect && folderId == 0 && dy < 0 && getMessagesController().hasHiddenArchive() && viewPage.archivePullViewState == ARCHIVE_ITEM_STATE_HIDDEN) { + if (viewPage.dialogsType == DIALOGS_TYPE_DEFAULT && !onlySelect && folderId == 0 && dy < 0 && getMessagesController().hasHiddenArchive() && viewPage.archivePullViewState == ARCHIVE_ITEM_STATE_HIDDEN) { viewPage.listView.setOverScrollMode(View.OVER_SCROLL_ALWAYS); int currentPosition = viewPage.layoutManager.findFirstVisibleItemPosition(); if (currentPosition == 0) { @@ -3317,7 +3430,7 @@ public int scrollVerticallyBy(int dy, RecyclerView.Recycler recycler, RecyclerVi viewPage.listView.setViewsOffset(ty); } - if (viewPage.dialogsType == 0 && viewPage.archivePullViewState != ARCHIVE_ITEM_STATE_PINNED && hasHiddenArchive()) { + if (viewPage.dialogsType == DIALOGS_TYPE_DEFAULT && viewPage.archivePullViewState != ARCHIVE_ITEM_STATE_PINNED && hasHiddenArchive()) { int usedDy = super.scrollVerticallyBy(measuredDy, recycler, state); if (viewPage.pullForegroundDrawable != null) { viewPage.pullForegroundDrawable.scrollDy = usedDy; @@ -3367,7 +3480,11 @@ public int scrollVerticallyBy(int dy, RecyclerView.Recycler recycler, RecyclerVi } else { startArchivePullingTime = 0; canShowHiddenArchive = false; + boolean changed = viewPage.archivePullViewState != ARCHIVE_ITEM_STATE_HIDDEN; viewPage.archivePullViewState = ARCHIVE_ITEM_STATE_HIDDEN; + if (changed && AndroidUtilities.isAccessibilityScreenReaderEnabled()) { + AndroidUtilities.makeAccessibilityAnnouncement(LocaleController.getString(R.string.AccDescrArchivedChatsHidden)); + } if (viewPage.pullForegroundDrawable != null) { viewPage.pullForegroundDrawable.resetText(); viewPage.pullForegroundDrawable.pullProgress = 0f; @@ -3405,10 +3522,13 @@ public void onLayoutChildren(RecyclerView.Recycler recycler, RecyclerView.State viewPage.listView.setVerticalScrollbarPosition(LocaleController.isRTL ? RecyclerListView.SCROLLBAR_POSITION_LEFT : RecyclerListView.SCROLLBAR_POSITION_RIGHT); viewPage.addView(viewPage.listView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); viewPage.listView.setOnItemClickListener((view, position) -> { - if (initialDialogsType == 10) { + if (initialDialogsType == DIALOGS_TYPE_BOT_REQUEST_PEER && view instanceof TextCell) { + viewPage.dialogsAdapter.onCreateGroupForThisClick(); + return; + } else if (initialDialogsType == DIALOGS_TYPE_WIDGET) { onItemLongClick(viewPage.listView, view, position, 0, 0, viewPage.dialogsType, viewPage.dialogsAdapter); return; - } else if ((initialDialogsType == 11 || initialDialogsType == 13) && position == 1) { + } else if ((initialDialogsType == DIALOGS_TYPE_IMPORT_HISTORY_GROUPS || initialDialogsType == DIALOGS_TYPE_IMPORT_HISTORY) && position == 1) { Bundle args = new Bundle(); args.putBoolean("forImport", true); long[] array = new long[]{getUserConfig().getClientUserId()}; @@ -3433,7 +3553,7 @@ public void didFinishChatCreation(GroupCreateFinalActivity fragment, long chatId if (closeFragment) { removeSelfFromStack(); } - dialogsActivityDelegate.didSelectDialogs(DialogsActivity.this, arrayList, null, true); + dialogsActivityDelegate.didSelectDialogs(DialogsActivity.this, arrayList, null, true, null); } @Override @@ -3506,7 +3626,9 @@ public void onScrollStateChanged(RecyclerView recyclerView, int newState) { int actionBarHeight = ActionBar.getCurrentActionBarHeight(); if (scrollY != 0 && scrollY != actionBarHeight) { if (scrollY < actionBarHeight / 2) { - recyclerView.smoothScrollBy(0, -scrollY); + if (viewPages[0].listView.canScrollVertically(-1)) { + recyclerView.smoothScrollBy(0, -scrollY); + } } else if (viewPages[0].listView.canScrollVertically(1)) { recyclerView.smoothScrollBy(0, actionBarHeight - scrollY); } @@ -3547,7 +3669,7 @@ public void onScrolled(RecyclerView recyclerView, int dx, int dy) { } } if (filterTabsView != null && filterTabsView.getVisibility() == View.VISIBLE && recyclerView == viewPages[0].listView && !searching && !actionBar.isActionModeShowed() && !disableActionBarScrolling && filterTabsViewIsVisible && !rightSlidingDialogContainer.hasFragment()) { - if (dy > 0 && hasHiddenArchive() && viewPages[0].dialogsType == 0) { + if (dy > 0 && hasHiddenArchive() && viewPages[0].dialogsType == DIALOGS_TYPE_DEFAULT) { View child = recyclerView.getChildAt(0); if (child != null) { RecyclerView.ViewHolder holder = recyclerView.getChildViewHolder(child); @@ -3600,7 +3722,7 @@ protected float getViewOffset() { viewPage.pullForegroundDrawable.setWillDraw(viewPage.archivePullViewState != ARCHIVE_ITEM_STATE_PINNED); } - viewPage.dialogsAdapter = new DialogsAdapter(this, context, viewPage.dialogsType, folderId, onlySelect, selectedDialogs, currentAccount) { + viewPage.dialogsAdapter = new DialogsAdapter(this, context, viewPage.dialogsType, folderId, onlySelect, selectedDialogs, currentAccount, requestPeerType) { @Override public void notifyDataSetChanged() { viewPage.lastItemsCount = getItemCount(); @@ -3609,6 +3731,9 @@ public void notifyDataSetChanged() { } catch (Exception e) { FileLog.e(e); } + if (initialDialogsType == DIALOGS_TYPE_BOT_REQUEST_PEER) { + searchItem.setVisibility(isEmpty ? View.GONE : View.VISIBLE); + } } @Override public void onButtonClicked(DialogCell dialogCell) { @@ -3628,6 +3753,11 @@ public void onButtonClicked(DialogCell dialogCell) { public void onButtonLongPress(DialogCell dialogCell) { onItemLongClick(viewPage.listView, dialogCell, viewPage.listView.getChildAdapterPosition(dialogCell), 0, 0, viewPage.dialogsType, viewPage.dialogsAdapter); } + + @Override + public void onCreateGroupForThisClick() { + createGroupForThis(); + } }; viewPage.dialogsAdapter.setRecyclerListView(viewPage.listView); viewPage.dialogsAdapter.setForceShowEmptyCell(afterSignup); @@ -3783,7 +3913,7 @@ public void needRemoveHint(long did) { showDialog(dialog); TextView button = (TextView) dialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } @@ -3812,7 +3942,7 @@ public void needClearList() { showDialog(dialog); TextView button = (TextView) dialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } @@ -3838,7 +3968,7 @@ public long getSearchForumDialogId() { }); searchViewPager.searchListView.setOnItemClickListener((view, position) -> { - if (initialDialogsType == 10) { + if (initialDialogsType == DIALOGS_TYPE_WIDGET) { onItemLongClick(searchViewPager.searchListView, view, position, 0, 0, -1, searchViewPager.dialogsSearchAdapter); return; } @@ -3884,7 +4014,7 @@ public void onLongClickRelease() { finishPreviewFragment(); return; } - if (initialDialogsType == 10) { + if (initialDialogsType == DIALOGS_TYPE_WIDGET) { if (delegate == null || selectedDialogs.isEmpty()) { return; } @@ -3892,7 +4022,7 @@ public void onLongClickRelease() { for (int i = 0; i < selectedDialogs.size(); i++) { topicKeys.add(MessagesStorage.TopicKey.of(selectedDialogs.get(i), 0)); } - delegate.didSelectDialogs(DialogsActivity.this, topicKeys, null, false); + delegate.didSelectDialogs(DialogsActivity.this, topicKeys, null, false, null); } else { if (floatingButton.getVisibility() != View.VISIBLE) { return; @@ -3907,7 +4037,7 @@ public void onLongClickRelease() { floatingButton = new RLottieImageView(context); floatingButton.setScaleType(ImageView.ScaleType.CENTER); floatingButton.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_chats_actionIcon), PorterDuff.Mode.SRC_IN)); - if (initialDialogsType == 10) { + if (initialDialogsType == DIALOGS_TYPE_WIDGET) { floatingButton.setImageResource(R.drawable.floating_check); floatingButtonContainer.setContentDescription(LocaleController.getString("Done", R.string.Done)); } else { @@ -3954,20 +4084,25 @@ public void getOutline(View view, Outline outline) { fragmentLocationContextView.setLayoutParams(LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 38, Gravity.TOP | Gravity.LEFT, 0, -36, 0, 0)); contentView.addView(fragmentLocationContextView); - fragmentContextView = new FragmentContextView(context, this, false) { - @Override - protected void playbackSpeedChanged(float value) { - if (Math.abs(value - 1.0f) > 0.001f || Math.abs(value - 1.8f) > 0.001f) { - getUndoView().showWithAction(0, Math.abs(value - 1.0f) > 0.001f ? UndoView.ACTION_PLAYBACK_SPEED_ENABLED : UndoView.ACTION_PLAYBACK_SPEED_DISABLED, value, null, null); - } - } - }; + fragmentContextView = new FragmentContextView(context, this, false); fragmentContextView.setLayoutParams(LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 38, Gravity.TOP | Gravity.LEFT, 0, -36, 0, 0)); contentView.addView(fragmentContextView); fragmentContextView.setAdditionalContextView(fragmentLocationContextView); fragmentLocationContextView.setAdditionalContextView(fragmentContextView); - } else if (initialDialogsType == 3) { + + dialogsHintCell = new DialogsHintCell(context); + updateDialogsHint(); + CacheControlActivity.calculateTotalSize(size -> { + cacheSize = size; + updateDialogsHint(); + }); + CacheControlActivity.getDeviceTotalSize((totalSize, totalFreeSize) -> { + deviceSize = totalSize; + updateDialogsHint(); + }); + contentView.addView(dialogsHintCell); + } else if (initialDialogsType == DIALOGS_TYPE_FORWARD) { if (commentView != null) { commentView.onDestroy(); } @@ -4019,7 +4154,7 @@ public void onMessageSend(CharSequence message, boolean notify, int scheduleDate for (int i = 0; i < selectedDialogs.size(); i++) { topicKeys.add(MessagesStorage.TopicKey.of(selectedDialogs.get(i), 0)); } - delegate.didSelectDialogs(DialogsActivity.this, topicKeys, message, false); + delegate.didSelectDialogs(DialogsActivity.this, topicKeys, message, false, null); } @Override @@ -4212,7 +4347,7 @@ public void getOutline(View view, Outline outline) { for (int i = 0; i < selectedDialogs.size(); i++) { topicKeys.add(MessagesStorage.TopicKey.of(selectedDialogs.get(i), 0)); } - delegate.didSelectDialogs(DialogsActivity.this, topicKeys, commentView.getFieldText(), false); + delegate.didSelectDialogs(DialogsActivity.this, topicKeys, commentView.getFieldText(), false, null); }); writeButtonBackground.setOnLongClickListener(v -> { if (isNextButton) { @@ -4249,7 +4384,7 @@ public void getOutline(View view, Outline outline) { contentView.addView(animatedStatusView, LayoutHelper.createFrame(20, 20, Gravity.LEFT | Gravity.TOP)); } - if (searchString == null && initialDialogsType == 0) { + if (searchString == null && initialDialogsType == DIALOGS_TYPE_DEFAULT) { // NekoX: Remove UPDATE NOW Bottom View in DialogsActivity } @@ -4326,7 +4461,7 @@ protected void onRemoveDialogAction(long currentDialogId, int action) { actionBar.setSearchTextColor(Theme.getColor(Theme.key_actionBarDefaultArchivedSearchPlaceholder), true); } - if (!onlySelect && initialDialogsType == 0) { + if (!onlySelect && initialDialogsType == DIALOGS_TYPE_DEFAULT) { blurredView = new View(context) { @Override public void setAlpha(float alpha) { @@ -4495,7 +4630,7 @@ public boolean onTouchEvent(MotionEvent e) { LinearLayoutManager layoutManager = new LinearLayoutManager(context); layoutManager.setNeedFixEndGap(false); transitionPage.animationSupportListView.setLayoutManager(layoutManager); - transitionPage.animationSupportDialogsAdapter = new DialogsAdapter(DialogsActivity.this, context, transitionPage.dialogsType, folderId, onlySelect, selectedDialogs, currentAccount); + transitionPage.animationSupportDialogsAdapter = new DialogsAdapter(DialogsActivity.this, context, transitionPage.dialogsType, folderId, onlySelect, selectedDialogs, currentAccount, requestPeerType); transitionPage.animationSupportDialogsAdapter.setIsTransitionSupport(); transitionPage.animationSupportListView.setAdapter(transitionPage.animationSupportDialogsAdapter); transitionPage.addView(transitionPage.animationSupportListView); @@ -4535,7 +4670,7 @@ public void openAnimationFinished() { rightFragmentTransitionInProgress = false; actionBar.setAllowOverlayTitle(!hasFragment()); contentView.requestLayout(); - transitionPage.layoutManager.setNeedFixEndGap(!hasFragment()); + // transitionPage.layoutManager.setNeedFixEndGap(!hasFragment()); DialogsActivity.this.setScrollY(0); searchViewPager.updateTabs(); updateDrawerSwipeEnabled(); @@ -4606,6 +4741,46 @@ else if (new Random().nextInt(100) < 20) return fragmentView; } + public boolean isPremiumHintVisible() { + if (!MessagesController.getInstance(currentAccount).premiumLocked && folderId == 0) { + if (MessagesController.getInstance(currentAccount).pendingSuggestions.contains("PREMIUM_UPGRADE") && getUserConfig().isPremium() || MessagesController.getInstance(currentAccount).pendingSuggestions.contains("PREMIUM_ANNUAL") && !getUserConfig().isPremium()) { + if (UserConfig.getInstance(currentAccount).isPremium() ? !BuildVars.useInvoiceBilling() && MediaDataController.getInstance(currentAccount).getPremiumHintAnnualDiscount(true) != null : MediaDataController.getInstance(currentAccount).getPremiumHintAnnualDiscount(false) != null) { + isPremiumHintUpgrade = MessagesController.getInstance(currentAccount).pendingSuggestions.contains("PREMIUM_UPGRADE"); + return true; + } + } + } + return false; + } + + private boolean isCacheHintVisible() { + if (cacheSize == null || deviceSize == null) { + return false; + } + if ((cacheSize / (float) deviceSize) < 0.30F) { + clearCacheHintVisible(); + return false; + } + SharedPreferences prefs = MessagesController.getGlobalMainSettings(); + return System.currentTimeMillis() > prefs.getLong("cache_hint_showafter", 0L); + } + + private void resetCacheHintVisible() { + SharedPreferences prefs = MessagesController.getGlobalMainSettings(); + final long week = 1000L * 60L * 60L * 24L * 7L; + final long month = 1000L * 60L * 60L * 24L * 30L; + long period = prefs.getLong("cache_hint_period", week); + if (period <= week) { + period = month; + } + long showafter = System.currentTimeMillis() + period; + prefs.edit().putLong("cache_hint_showafter", showafter).putLong("cache_hint_period", period).apply(); + } + + private void clearCacheHintVisible() { + MessagesController.getGlobalMainSettings().edit().remove("cache_hint_showafter").remove("cache_hint_period").apply(); + } + // @Override // public ActionBar getActionBar() { // return rightSlidingDialogContainer != null && rightSlidingDialogContainer.currentActionBarView != null && rightSlidingDialogContainer.isOpenned ? rightSlidingDialogContainer.currentActionBarView : super.getActionBar(); @@ -4634,31 +4809,21 @@ public void showSelectStatusDialog() { SelectAnimatedEmojiDialog popupLayout = new SelectAnimatedEmojiDialog(this, getContext(), true, xoff, SelectAnimatedEmojiDialog.TYPE_EMOJI_STATUS, getResourceProvider()) { @Override protected void onEmojiSelected(View emojiView, Long documentId, TLRPC.Document document, Integer until) { - TLRPC.TL_account_updateEmojiStatus req = new TLRPC.TL_account_updateEmojiStatus(); + TLRPC.EmojiStatus emojiStatus; if (documentId == null) { - req.emoji_status = new TLRPC.TL_emojiStatusEmpty(); + emojiStatus = new TLRPC.TL_emojiStatusEmpty(); } else if (until != null) { - req.emoji_status = new TLRPC.TL_emojiStatusUntil(); - ((TLRPC.TL_emojiStatusUntil) req.emoji_status).document_id = documentId; - ((TLRPC.TL_emojiStatusUntil) req.emoji_status).until = until; + emojiStatus = new TLRPC.TL_emojiStatusUntil(); + ((TLRPC.TL_emojiStatusUntil) emojiStatus).document_id = documentId; + ((TLRPC.TL_emojiStatusUntil) emojiStatus).until = until; } else { - req.emoji_status = new TLRPC.TL_emojiStatus(); - ((TLRPC.TL_emojiStatus) req.emoji_status).document_id = documentId; - } - TLRPC.User user = MessagesController.getInstance(currentAccount).getUser(UserConfig.getInstance(currentAccount).getClientUserId()); - if (user != null) { - user.emoji_status = req.emoji_status; - NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.userEmojiStatusUpdated, user); - getMessagesController().updateEmojiStatusUntilUpdate(user.id, user.emoji_status); + emojiStatus = new TLRPC.TL_emojiStatus(); + ((TLRPC.TL_emojiStatus) emojiStatus).document_id = documentId; } + getMessagesController().updateEmojiStatus(emojiStatus); if (documentId != null) { animatedStatusView.animateChange(ReactionsLayoutInBubble.VisibleReaction.fromCustomEmoji(documentId)); } - ConnectionsManager.getInstance(currentAccount).sendRequest(req, (res, err) -> { - if (!(res instanceof TLRPC.TL_boolTrue)) { - // TODO: reject - } - }); if (popup[0] != null) { selectAnimatedEmojiDialog = null; popup[0].dismiss(); @@ -4711,6 +4876,225 @@ private void updateCommentView() { commentViewPreviousTop = top; } + private void updateDialogsHint() { + if (dialogsHintCell == null) { + return; + } + if (isPremiumHintVisible()) { + dialogsHintCell.setVisibility(View.VISIBLE); + dialogsHintCell.setOnClickListener(v -> { + presentFragment(new PremiumPreviewFragment("dialogs_hint").setSelectAnnualByDefault()); + AndroidUtilities.runOnUIThread(() -> { + MessagesController.getInstance(currentAccount).removeSuggestion(0, isPremiumHintUpgrade ? "PREMIUM_UPGRADE" : "PREMIUM_ANNUAL"); + updateDialogsHint(); + }, 250); + }); + dialogsHintCell.setText( + AndroidUtilities.replaceSingleTag( + LocaleController.formatString(isPremiumHintUpgrade ? R.string.SaveOnAnnualPremiumTitle : R.string.UpgradePremiumTitle, MediaDataController.getInstance(currentAccount).getPremiumHintAnnualDiscount(false)), + Theme.key_windowBackgroundWhiteValueText, + 0, + null + ), + LocaleController.getString(isPremiumHintUpgrade ? R.string.UpgradePremiumMessage : R.string.SaveOnAnnualPremiumMessage) + ); + } else if (isCacheHintVisible()) { + dialogsHintCell.setVisibility(View.VISIBLE); + dialogsHintCell.setOnClickListener(v -> { + presentFragment(new CacheControlActivity()); + AndroidUtilities.runOnUIThread(() -> { + resetCacheHintVisible(); + updateDialogsHint(); + }, 250); + }); + dialogsHintCell.setText( + AndroidUtilities.replaceSingleTag( + LocaleController.formatString(R.string.ClearStorageHintTitle, AndroidUtilities.formatFileSize(cacheSize)), + Theme.key_windowBackgroundWhiteValueText, + 0, + null + ), + LocaleController.getString(R.string.ClearStorageHintMessage) + ); + } else { + dialogsHintCell.setVisibility(View.GONE); + } + } + + private void createGroupForThis() { + AlertDialog progress = new AlertDialog(getContext(), AlertDialog.ALERT_TYPE_SPINNER); + if (requestPeerType instanceof TLRPC.TL_requestPeerTypeBroadcast) { + Bundle args = new Bundle(); + args.putInt("step", 0); + if (requestPeerType.has_username != null) { + args.putBoolean("forcePublic", requestPeerType.has_username); + } + ChannelCreateActivity fragment = new ChannelCreateActivity(args); + fragment.setOnFinishListener((fragment2, chatId) -> { + Utilities.doCallbacks( + next -> { + TLRPC.Chat chat = getMessagesController().getChat(chatId); + showSendToBotAlert(chat, next, () -> { + DialogsActivity.this.removeSelfFromStack(); + fragment.removeSelfFromStack(); + fragment2.finishFragment(); + }); + }, + next -> { + progress.showDelayed(150); + if (requestPeerType.bot_participant != null && requestPeerType.bot_participant) { + TLRPC.User bot = getMessagesController().getUser(requestPeerBotId); + getMessagesController().addUserToChat(chatId, bot, 0, null, DialogsActivity.this, false, next, err -> { + next.run(); + return true; + }); + } else { + next.run(); + } + }, + next -> { + if (requestPeerType.bot_admin_rights != null) { + TLRPC.User bot = getMessagesController().getUser(requestPeerBotId); + getMessagesController().setUserAdminRole(chatId, bot, requestPeerType.bot_admin_rights, null, false, DialogsActivity.this, !(requestPeerType.bot_participant != null && requestPeerType.bot_participant), true, null, next, err -> { + next.run(); + return true; + }); + } else { + next.run(); + } + }, + next -> { + if (requestPeerType.user_admin_rights != null) { + TLRPC.Chat chat = getMessagesController().getChat(chatId); + getMessagesController().setUserAdminRole(chatId, getAccountInstance().getUserConfig().getCurrentUser(), ChatRightsEditActivity.rightsOR(chat.admin_rights, requestPeerType.user_admin_rights), null, true, DialogsActivity.this, false, true, null, next, err -> { + next.run(); + return true; + }); + } else { + next.run(); + } + }, + next -> { + progress.dismiss(); + getMessagesController().loadChannelParticipants(chatId); + DialogsActivityDelegate delegate = DialogsActivity.this.delegate; + DialogsActivity.this.removeSelfFromStack(); + fragment.removeSelfFromStack(); + fragment2.finishFragment(); + if (delegate != null) { + ArrayList keys = new ArrayList<>(); + keys.add(MessagesStorage.TopicKey.of(-chatId, 0)); + delegate.didSelectDialogs(DialogsActivity.this, keys, null, false, null); + } + } + ); + }); + presentFragment(fragment); + } else if (requestPeerType instanceof TLRPC.TL_requestPeerTypeChat) { + Bundle args = new Bundle(); + long[] array; + if (requestPeerType.bot_participant != null && requestPeerType.bot_participant) { + array = new long[]{ getUserConfig().getClientUserId(), requestPeerBotId }; + } else { + array = new long[]{ getUserConfig().getClientUserId() }; + } + args.putLongArray("result", array); + args.putInt("chatType", requestPeerType.forum != null && requestPeerType.forum ? ChatObject.CHAT_TYPE_FORUM : ChatObject.CHAT_TYPE_MEGAGROUP); + args.putBoolean("canToggleTopics", false); + GroupCreateFinalActivity activity = new GroupCreateFinalActivity(args); + activity.setDelegate(new GroupCreateFinalActivity.GroupCreateFinalActivityDelegate() { + @Override + public void didStartChatCreation() {} + @Override + public void didFailChatCreation() {} + @Override + public void didFinishChatCreation(GroupCreateFinalActivity fragment, long chatId) { + BaseFragment[] lastFragments = new BaseFragment[] { fragment, null }; + Utilities.doCallbacks( + next -> { + if (requestPeerType.has_username != null && requestPeerType.has_username) { + Bundle args = new Bundle(); + args.putInt("step", 1); + args.putLong("chat_id", chatId); + args.putBoolean("forcePublic", requestPeerType.has_username); + ChannelCreateActivity fragment2 = new ChannelCreateActivity(args); + fragment2.setOnFinishListener((_fragment, _chatId) -> next.run()); + presentFragment(fragment2); + lastFragments[1] = fragment2; + } else { + next.run(); + } + }, + next -> { + TLRPC.Chat chat = getMessagesController().getChat(chatId); + showSendToBotAlert(chat, next, () -> { + DialogsActivity.this.removeSelfFromStack(); + if (lastFragments[1] != null) { + lastFragments[0].removeSelfFromStack(); + lastFragments[1].finishFragment(); + } else { + lastFragments[0].finishFragment(); + } + }); + }, + next -> { + progress.showDelayed(150); + if (requestPeerType.bot_participant != null && requestPeerType.bot_participant) { + TLRPC.User bot = getMessagesController().getUser(requestPeerBotId); + getMessagesController().addUserToChat(chatId, bot, 0, null, DialogsActivity.this, false, next, err -> { + next.run(); + return true; + }); + } else { + next.run(); + } + }, + next -> { + if (requestPeerType.bot_admin_rights != null) { + TLRPC.User bot = getMessagesController().getUser(requestPeerBotId); + getMessagesController().setUserAdminRole(chatId, bot, requestPeerType.bot_admin_rights, null, false, DialogsActivity.this, !(requestPeerType.bot_participant != null && requestPeerType.bot_participant), true, null, next, err -> { + next.run(); + return true; + }); + } else { + next.run(); + } + }, + next -> { + if (requestPeerType.user_admin_rights != null) { + TLRPC.Chat chat = getMessagesController().getChat(chatId); + getMessagesController().setUserAdminRole(chatId, getAccountInstance().getUserConfig().getCurrentUser(), ChatRightsEditActivity.rightsOR(chat.admin_rights, requestPeerType.user_admin_rights), null, false, DialogsActivity.this, false, true, null, next, err -> { + next.run(); + return true; + }); + } else { + next.run(); + } + }, + next -> { + progress.dismiss(); + getMessagesController().loadChannelParticipants(chatId); + DialogsActivityDelegate delegate = DialogsActivity.this.delegate; + DialogsActivity.this.removeSelfFromStack(); + if (lastFragments[1] != null) { + lastFragments[0].removeSelfFromStack(); + lastFragments[1].finishFragment(); + } else { + lastFragments[0].finishFragment(); + } + if (delegate != null) { + ArrayList keys = new ArrayList<>(); + keys.add(MessagesStorage.TopicKey.of(-chatId, 0)); + delegate.didSelectDialogs(DialogsActivity.this, keys, null, false, null); + } + } + ); + } + }); + presentFragment(activity); + } + } + private void updateAppUpdateViews(boolean animated) { // NekoX: Remove UPDATE NOW Bottom View in DialogsActivity } @@ -4969,6 +5353,7 @@ public void onItemClick(int id) { int[] position = new int[2]; passcodeItem.getLocationInWindow(position); ((LaunchActivity) getParentActivity()).showPasscodeActivity(false, true, position[0] + passcodeItem.getMeasuredWidth() / 2, position[1] + passcodeItem.getMeasuredHeight() / 2, () -> passcodeItem.setAlpha(1.0f), () -> passcodeItem.setAlpha(0.0f)); + getNotificationsController().showNotifications(); updatePasscodeButton(); } else if (id == 2) { presentFragment(new ProxyListActivity()); @@ -5106,7 +5491,7 @@ private void switchToCurrentSelectedMode(boolean animated) { viewPages[1].isLocked = filter.locked; viewPages[a].dialogsAdapter.setDialogsType(viewPages[a].dialogsType); - viewPages[a].layoutManager.scrollToPositionWithOffset(viewPages[a].dialogsType == 0 && hasHiddenArchive() && viewPages[a].archivePullViewState == ARCHIVE_ITEM_STATE_HIDDEN ? 1 : 0, (int) actionBar.getTranslationY()); + viewPages[a].layoutManager.scrollToPositionWithOffset(viewPages[a].dialogsType == DIALOGS_TYPE_DEFAULT && hasHiddenArchive() && viewPages[a].archivePullViewState == ARCHIVE_ITEM_STATE_HIDDEN ? 1 : 0, (int) actionBar.getTranslationY()); checkListLoad(viewPages[a]); } @@ -5219,7 +5604,7 @@ private void updateFilterTabs(boolean force, boolean animated) { canShowFilterTabsView = false; updateFilterTabsVisibility(animated); for (int a = 0; a < viewPages.length; a++) { - if (viewPages[a].dialogsType == 0 && viewPages[a].archivePullViewState == ARCHIVE_ITEM_STATE_HIDDEN && hasHiddenArchive()) { + if (viewPages[a].dialogsType == DIALOGS_TYPE_DEFAULT && viewPages[a].archivePullViewState == ARCHIVE_ITEM_STATE_HIDDEN && hasHiddenArchive()) { int p = viewPages[a].layoutManager.findFirstVisibleItemPosition(); if (p == 0 || p == 1) { viewPages[a].layoutManager.scrollToPositionWithOffset(1, (int) actionBar.getTranslationY()); @@ -5406,7 +5791,7 @@ public void onShow(Bulletin bulletin) { @Override public int getTopOffset(int tag) { - return (actionBar != null ? actionBar.getMeasuredHeight() + (int) actionBar.getTranslationY() : 0) + (filterTabsView != null && filterTabsView.getVisibility() == View.VISIBLE ? filterTabsView.getMeasuredHeight() : 0) + (fragmentContextView != null && fragmentContextView.isCallTypeVisible() ? AndroidUtilities.dp(fragmentContextView.getStyleHeight()) : 0); + return (actionBar != null ? actionBar.getMeasuredHeight() + (int) actionBar.getTranslationY() : 0) + (filterTabsView != null && filterTabsView.getVisibility() == View.VISIBLE ? filterTabsView.getMeasuredHeight() : 0) + (fragmentContextView != null && fragmentContextView.isCallTypeVisible() ? AndroidUtilities.dp(fragmentContextView.getStyleHeight()) : 0) + (dialogsHintCell != null && dialogsHintCell.getVisibility() == View.VISIBLE ? dialogsHintCell.getHeight() : 0); } }); if (searchIsShowed) { @@ -5518,31 +5903,6 @@ public void onBecomeFullyHidden() { } } - @Override - public void onBecomeFullyVisible() { - super.onBecomeFullyVisible(); - if (SharedConfig.useLNavigation) { - if (viewPages != null) { - for (int a = 0; a < viewPages.length; a++) { - if (viewPages[a].dialogsType == 0 && viewPages[a].archivePullViewState == ARCHIVE_ITEM_STATE_HIDDEN && viewPages[a].layoutManager.findFirstVisibleItemPosition() == 0 && hasHiddenArchive()) { - viewPages[a].layoutManager.scrollToPositionWithOffset(1, (int) actionBar.getTranslationY()); - } - if (a == 0) { - viewPages[a].dialogsAdapter.resume(); - } else { - viewPages[a].dialogsAdapter.pause(); - } - } - } - if (searchIsShowed) { - AndroidUtilities.requestAdjustResize(getParentActivity(), classGuid); - } - updateVisibleRows(0, false); - updateProxyButton(false, true); - checkSuggestClearDatabase(); - } - } - @Override public void setInPreviewMode(boolean value) { super.setInPreviewMode(value); @@ -5675,6 +6035,7 @@ private void showSearch(boolean show, boolean startFromDownloads, boolean animat if (!show && filterTabsView != null && canShowFilterTabsView) { filterTabsView.setVisibility(View.VISIBLE); } + final boolean budget = SharedConfig.getDevicePerformanceClass() == SharedConfig.PERFORMANCE_CLASS_LOW || !LiteMode.isEnabled(LiteMode.FLAG_CHAT_SCALE); if (animated) { if (show) { searchViewPager.setVisibility(View.VISIBLE); @@ -5695,15 +6056,25 @@ private void showSearch(boolean show, boolean startFromDownloads, boolean animat searchAnimator = new AnimatorSet(); ArrayList animators = new ArrayList<>(); animators.add(ObjectAnimator.ofFloat(viewPages[0], View.ALPHA, show ? 0.0f : 1.0f)); - animators.add(ObjectAnimator.ofFloat(viewPages[0], View.SCALE_X, show ? 0.9f : 1.0f)); - animators.add(ObjectAnimator.ofFloat(viewPages[0], View.SCALE_Y, show ? 0.9f : 1.0f)); + if (!budget) { + animators.add(ObjectAnimator.ofFloat(viewPages[0], View.SCALE_X, show ? 0.9f : 1.0f)); + animators.add(ObjectAnimator.ofFloat(viewPages[0], View.SCALE_Y, show ? 0.9f : 1.0f)); + } else { + viewPages[0].setScaleX(1); + viewPages[0].setScaleY(1); + } if (rightSlidingDialogContainer != null) { rightSlidingDialogContainer.setVisibility(View.VISIBLE); animators.add(ObjectAnimator.ofFloat(rightSlidingDialogContainer, View.ALPHA, show ? 0.0f : 1.0f)); } animators.add(ObjectAnimator.ofFloat(searchViewPager, View.ALPHA, show ? 1.0f : 0.0f)); - animators.add(ObjectAnimator.ofFloat(searchViewPager, View.SCALE_X, show ? 1.0f : 1.05f)); - animators.add(ObjectAnimator.ofFloat(searchViewPager, View.SCALE_Y, show ? 1.0f : 1.05f)); + if (!budget) { + animators.add(ObjectAnimator.ofFloat(searchViewPager, View.SCALE_X, show ? 1.0f : 1.05f)); + animators.add(ObjectAnimator.ofFloat(searchViewPager, View.SCALE_Y, show ? 1.0f : 1.05f)); + } else { + searchViewPager.setScaleX(1); + searchViewPager.setScaleY(1); + } if (passcodeItem != null) { animators.add(ObjectAnimator.ofFloat(passcodeItem.getIconView(), View.ALPHA, show ? 0 : 1f)); } @@ -5751,7 +6122,6 @@ public void onAnimationEnd(Animator animation) { tabsAlphaAnimator.setDuration(100); } else { tabsAlphaAnimator.setDuration(show ? 200 : 180); - } } } @@ -5835,12 +6205,22 @@ public void onAnimationCancel(Animator animation) { viewPages[0].listView.show(); } viewPages[0].setAlpha(show ? 0.0f : 1.0f); - viewPages[0].setScaleX(show ? 0.9f : 1.0f); - viewPages[0].setScaleY(show ? 0.9f : 1.0f); + if (!budget) { + viewPages[0].setScaleX(show ? 0.9f : 1.0f); + viewPages[0].setScaleY(show ? 0.9f : 1.0f); + } else { + viewPages[0].setScaleX(1); + viewPages[0].setScaleY(1); + } searchViewPager.setAlpha(show ? 1.0f : 0.0f); filtersView.setAlpha(show ? 1.0f : 0.0f); - searchViewPager.setScaleX(show ? 1.0f : 1.1f); - searchViewPager.setScaleY(show ? 1.0f : 1.1f); + if (!budget) { + searchViewPager.setScaleX(show ? 1.0f : 1.1f); + searchViewPager.setScaleY(show ? 1.0f : 1.1f); + } else { + searchViewPager.setScaleX(1); + searchViewPager.setScaleY(1); + } if (filterTabsView != null && filterTabsView.getVisibility() == View.VISIBLE) { filterTabsView.setTranslationY(show ? -AndroidUtilities.dp(44) : 0); filterTabsView.getTabsContainer().setAlpha(show ? 0.0f : 1.0f); @@ -5878,6 +6258,9 @@ public boolean onlyDialogsAdapter() { } private void updateFilterTabsVisibility(boolean animated) { + if (fragmentView == null) { + return; + } if (isPaused || databaseMigrationHint != null) { animated = false; } @@ -5959,33 +6342,20 @@ public void setSearchAnimationProgress(float progress, boolean full) { if (fragmentView != null) { fragmentView.invalidate(); } + if (dialogsHintCell != null) { + dialogsHintCell.setAlpha(1f - progress); + } + final boolean budget = SharedConfig.getDevicePerformanceClass() == SharedConfig.PERFORMANCE_CLASS_LOW || !LiteMode.isEnabled(LiteMode.FLAG_CHAT_SCALE); if (full) { -// -// if (show) { -// searchViewPager.setVisibility(View.VISIBLE); -// searchViewPager.reset(); -// updateFiltersView(true, null, null, false, false); -// if (searchTabsView != null) { -// searchTabsView.hide(false, false); -// searchTabsView.setVisibility(View.VISIBLE); -// } -// } else { -// viewPages[0].listView.setVisibility(View.VISIBLE); -// viewPages[0].setVisibility(View.VISIBLE); -// } - -// setDialogsListFrozen(true); -// viewPages[0].listView.setVerticalScrollBarEnabled(false); -// searchViewPager.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); -// searchAnimator = new AnimatorSet(); -// ArrayList animators = new ArrayList<>(); if (viewPages[0] != null) { if (progress < 1f) { viewPages[0].setVisibility(View.VISIBLE); } viewPages[0].setAlpha(1f - progress); - viewPages[0].setScaleX(.9f + .1f * progress); - viewPages[0].setScaleY(.9f + .1f * progress); + if (!budget) { + viewPages[0].setScaleX(.9f + .1f * progress); + viewPages[0].setScaleY(.9f + .1f * progress); + } } if (rightSlidingDialogContainer != null) { if (progress >= 1f) { @@ -5997,8 +6367,10 @@ public void setSearchAnimationProgress(float progress, boolean full) { } if (searchViewPager != null) { searchViewPager.setAlpha(progress); - searchViewPager.setScaleX(1f + .05f * (1f - progress)); - searchViewPager.setScaleY(1f + .05f * (1f - progress)); + if (!budget) { + searchViewPager.setScaleX(1f + .05f * (1f - progress)); + searchViewPager.setScaleY(1f + .05f * (1f - progress)); + } } if (passcodeItem != null) { passcodeItem.getIconView().setAlpha(1f - progress); @@ -6012,99 +6384,6 @@ public void setSearchAnimationProgress(float progress, boolean full) { if (filterTabsView != null && filterTabsView.getVisibility() == View.VISIBLE) { filterTabsView.getTabsContainer().setAlpha(1f - progress); } - -// if (filterTabsViewIsVisible) { -// int backgroundColor1 = Theme.getColor(folderId == 0 ? Theme.key_actionBarDefault : Theme.key_actionBarDefaultArchived); -// int backgroundColor2 = Theme.getColor(Theme.key_windowBackgroundWhite); -// int sum = Math.abs(Color.red(backgroundColor1) - Color.red(backgroundColor2)) + Math.abs(Color.green(backgroundColor1) - Color.green(backgroundColor2)) + Math.abs(Color.blue(backgroundColor1) - Color.blue(backgroundColor2)); -// searchAnimationTabsDelayedCrossfade = sum / 255f > 0.3f; -// } else { -// searchAnimationTabsDelayedCrossfade = true; -// } -// if (!show) { -// searchAnimator.setStartDelay(20); -// if (tabsAlphaAnimator != null) { -// if (searchAnimationTabsDelayedCrossfade) { -// tabsAlphaAnimator.setStartDelay(80); -// tabsAlphaAnimator.setDuration(100); -// } else { -// tabsAlphaAnimator.setDuration(show ? 200 : 180); -// -// } -// } -// } -// searchAnimator.addListener(new AnimatorListenerAdapter() { -// @Override -// public void onAnimationEnd(Animator animation) { -// getNotificationCenter().onAnimationFinish(animationIndex); -// if (searchAnimator != animation) { -// return; -// } -// setDialogsListFrozen(false); -// if (show) { -// viewPages[0].listView.hide(); -// if (filterTabsView != null) { -// filterTabsView.setVisibility(View.GONE); -// } -// searchWasFullyShowed = true; -// AndroidUtilities.requestAdjustResize(getParentActivity(), classGuid); -// searchItem.setVisibility(View.GONE); -// if (rightSlidingDialogContainer != null) { -// rightSlidingDialogContainer.setVisibility(View.GONE); -// } -// } else { -// searchItem.collapseSearchFilters(); -// whiteActionBar = false; -// searchViewPager.setVisibility(View.GONE); -// if (searchTabsView != null) { -// searchTabsView.setVisibility(View.GONE); -// } -// searchItem.clearSearchFilters(); -// searchViewPager.clear(); -// filtersView.setVisibility(View.GONE); -// viewPages[0].listView.show(); -// if (!onlySelect) { -// hideFloatingButton(false); -// } -// searchWasFullyShowed = false; -// if (rightSlidingDialogContainer != null) { -// rightSlidingDialogContainer.setVisibility(View.VISIBLE); -// } -// } -// -// if (fragmentView != null) { -// fragmentView.requestLayout(); -// } -// -// setSearchAnimationProgress(show ? 1f : 0, false); -// -// viewPages[0].listView.setVerticalScrollBarEnabled(true); -// searchViewPager.setBackground(null); -// searchAnimator = null; -// -// if (downloadsItem != null) { -// downloadsItem.setAlpha(show ? 0 : 1f); -// } -// } -// -// @Override -// public void onAnimationCancel(Animator animation) { -// getNotificationCenter().onAnimationFinish(animationIndex); -// if (searchAnimator == animation) { -// if (show) { -// viewPages[0].listView.hide(); -// } else { -// viewPages[0].listView.show(); -// } -// searchAnimator = null; -// } -// } -// }); -// animationIndex = getNotificationCenter().setAnimationInProgress(animationIndex, null); -// searchAnimator.start(); -// if (tabsAlphaAnimator != null) { -// tabsAlphaAnimator.start(); -// } } updateContextViewPosition(); } @@ -6324,7 +6603,7 @@ private void onItemClick(View view, int position, RecyclerListView.Adapter adapt if (!validateSlowModeDialog(dialogId)) { return; } - if (!getMessagesController().isForum(dialogId) && (!selectedDialogs.isEmpty() || (initialDialogsType == 3 && selectAlertString != null))) { + if (!getMessagesController().isForum(dialogId) && (!selectedDialogs.isEmpty() || (initialDialogsType == DIALOGS_TYPE_FORWARD && selectAlertString != null))) { if (!selectedDialogs.contains(dialogId) && !checkCanWrite(dialogId)) { return; } @@ -6374,7 +6653,7 @@ private void onItemClick(View view, int position, RecyclerListView.Adapter adapt searchObject = null; } } - boolean canOpenInRightSlidingView = !(LocaleController.isRTL || searching || (AndroidUtilities.isTablet() && folderId != 0)); + boolean canOpenInRightSlidingView = !(LocaleController.isRTL || searching || (AndroidUtilities.isTablet() && folderId != 0)) && LiteMode.isEnabled(LiteMode.FLAG_CHAT_FORUM_TWOCOLUMN); args.putInt("dialog_folder_id", folderId); args.putInt("dialog_filter_id", filterId); if (AndroidUtilities.isTablet() && (!getMessagesController().isForum(dialogId) || !canOpenInRightSlidingView)) { @@ -6411,7 +6690,7 @@ private void onItemClick(View view, int position, RecyclerListView.Adapter adapt if (getMessagesController().checkCanOpenChat(args, DialogsActivity.this)) { TLRPC.Chat chat = getMessagesController().getChat(-dialogId); if (chat != null && chat.forum && topicId == 0 && !NaConfig.INSTANCE.getShowForumAsNormalChat().Bool()) { - if (SharedConfig.getDevicePerformanceClass() == SharedConfig.PERFORMANCE_CLASS_LOW) { + if (!LiteMode.isEnabled(LiteMode.FLAG_CHAT_FORUM_TWOCOLUMN)) { presentFragment(new TopicsFragment(args)); } else { if (!canOpenInRightSlidingView) { @@ -6506,7 +6785,7 @@ private boolean onItemLongClick(RecyclerListView listView, View view, int positi showDialog(alertDialog); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } return true; } @@ -6806,9 +7085,17 @@ public boolean showChatPreview(DialogCell cell) { if (containsFilter && filter != null) { maxPinnedCount = 100 - filter.alwaysShow.size(); } else if (folderId != 0 || filter != null) { - maxPinnedCount = getMessagesController().maxFolderPinnedDialogsCount; + if (getUserConfig().isPremium()) { + maxPinnedCount = getMessagesController().maxFolderPinnedDialogsCountPremium; + } else { + maxPinnedCount = getMessagesController().maxFolderPinnedDialogsCountDefault; + } } else { - maxPinnedCount = getMessagesController().maxPinnedDialogsCount; + if (getUserConfig().isPremium()) { + maxPinnedCount = getMessagesController().maxPinnedDialogsCountPremium; + } else { + maxPinnedCount = getMessagesController().maxPinnedDialogsCountDefault; + } } hasPinAction[0] = !(newPinnedSecretCount + pinnedSecretCount > maxPinnedCount || newPinnedCount + pinnedCount - alreadyAdded > maxPinnedCount); } @@ -6894,6 +7181,7 @@ public boolean showChatPreview(DialogCell cell) { ActionBarMenuSubItem deleteItem = new ActionBarMenuSubItem(getParentActivity(), false, true); deleteItem.setIconColor(getThemedColor(Theme.key_dialogRedIcon)); deleteItem.setTextColor(getThemedColor(Theme.key_dialogTextRed)); + deleteItem.setSelectorColor(Theme.multAlpha(getThemedColor(Theme.key_dialogTextRed), .12f)); deleteItem.setTextAndIcon(LocaleController.getString("Delete", R.string.Delete), R.drawable.msg_delete); deleteItem.setMinimumWidth(160); deleteItem.setOnClickListener(e -> { @@ -6964,6 +7252,9 @@ private void setScrollY(float value) { if (filterTabsView != null) { filterTabsView.setTranslationY(value); } + if (dialogsHintCell != null) { + dialogsHintCell.setTranslationY(value); + } if (animatedStatusView != null) { animatedStatusView.translateY2((int) value); animatedStatusView.setAlpha(1f - -value / ActionBar.getCurrentActionBarHeight()); @@ -6995,7 +7286,9 @@ private void prepareBlurBitmap() { @Override public void onTransitionAnimationProgress(boolean isOpen, float progress) { - if (blurredView != null && blurredView.getVisibility() == View.VISIBLE) { + if (rightSlidingDialogContainer != null && rightSlidingDialogContainer.hasFragment()) { + rightSlidingDialogContainer.getFragment().onTransitionAnimationProgress(isOpen, progress); + } else if (blurredView != null && blurredView.getVisibility() == View.VISIBLE) { if (isOpen) { blurredView.setAlpha(1.0f - progress); } else { @@ -7006,18 +7299,22 @@ public void onTransitionAnimationProgress(boolean isOpen, float progress) { @Override public void onTransitionAnimationEnd(boolean isOpen, boolean backward) { - if (isOpen && blurredView != null && blurredView.getVisibility() == View.VISIBLE) { - blurredView.setVisibility(View.GONE); - blurredView.setBackground(null); - } - if (isOpen && afterSignup) { - if (!NekoConfig.disableVibration.Bool()) { - try { - fragmentView.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); - } catch (Exception ignored) {} + if (rightSlidingDialogContainer != null && rightSlidingDialogContainer.hasFragment()) { + rightSlidingDialogContainer.getFragment().onTransitionAnimationEnd(isOpen, backward); + } else { + if (isOpen && blurredView != null && blurredView.getVisibility() == View.VISIBLE) { + blurredView.setVisibility(View.GONE); + blurredView.setBackground(null); } - if (getParentActivity() instanceof LaunchActivity) { - ((LaunchActivity) getParentActivity()).getFireworksOverlay().start(); + if (isOpen && afterSignup) { + try { + if (!NekoConfig.disableVibration.Bool()) + fragmentView.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + } catch (Exception ignored) { + } + if (getParentActivity() instanceof LaunchActivity) { + ((LaunchActivity) getParentActivity()).getFireworksOverlay().start(); + } } } } @@ -7206,7 +7503,11 @@ private void performSelectedDialogsAction(ArrayList selectedDialogs, int a if (containsFilter) { maxPinnedCount = 100 - filter.alwaysShow.size(); } else if (NekoConfig.unlimitedPinnedDialogs.Bool() || folderId != 0 || filter != null) { - maxPinnedCount = getMessagesController().maxFolderPinnedDialogsCount; + if (UserConfig.getInstance(currentAccount).isPremium()) { + maxPinnedCount = getMessagesController().maxFolderPinnedDialogsCountPremium; + } else { + maxPinnedCount = getMessagesController().maxFolderPinnedDialogsCountDefault; + } } else { maxPinnedCount = getUserConfig().isPremium() ? getMessagesController().dialogFiltersPinnedLimitPremium : getMessagesController().dialogFiltersPinnedLimitDefault; } @@ -7262,7 +7563,7 @@ private void performSelectedDialogsAction(ArrayList selectedDialogs, int a showDialog(alertDialog); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } return; } else if (action == block && alert) { @@ -7517,6 +7818,28 @@ private void markAsUnread(long did) { getMessagesController().markDialogAsUnread(did, null, 0); } + private void markDialogsAsRead(ArrayList dialogs) { + debugLastUpdateAction = 2; + int selectedDialogIndex = -1; + + setDialogsListFrozen(true); + checkAnimationFinished(); + for (int i = 0; i < dialogs.size(); i++) { + long did = dialogs.get(i).id; + TLRPC.Dialog dialog = dialogs.get(i); + if (getMessagesController().isForum(did)) { + getMessagesController().markAllTopicsAsRead(did); + } + getMessagesController().markMentionsAsRead(did, 0); + getMessagesController().markDialogAsRead(did, dialog.top_message, dialog.top_message, dialog.last_message_date, false, 0, 0, true, 0); + } + if (selectedDialogIndex >= 0) { + frozenDialogsList.remove(selectedDialogIndex); + viewPages[0].dialogsItemAnimator.prepareForRemove(); + viewPages[0].updateList(true); + } + } + private void performDeleteOrClearDialogAction(int action, long selectedDialog, TLRPC.Chat chat, boolean isBot, boolean revoke) { if (action == clear) { getMessagesController().deleteDialog(selectedDialog, 1, revoke); @@ -7852,7 +8175,14 @@ private void updateCounters(boolean hide) { } else { blockItem.setVisibility(View.VISIBLE); } - if (filterTabsView == null || filterTabsView.getVisibility() != View.VISIBLE || filterTabsView.currentTabIsDefault()) { + boolean cantRemoveFromFolder = filterTabsView == null || filterTabsView.getVisibility() != View.VISIBLE || filterTabsView.currentTabIsDefault(); + if (!cantRemoveFromFolder) { + try { + final int dialogsCount = getDialogsArray(currentAccount, viewPages[0].dialogsAdapter.getDialogsType(), folderId, dialogsListFrozen).size(); + cantRemoveFromFolder = count >= dialogsCount; + } catch (Exception ignore) {} + } + if (cantRemoveFromFolder) { removeFromFolderItem.setVisibility(View.GONE); } else { removeFromFolderItem.setVisibility(View.VISIBLE); @@ -8205,7 +8535,7 @@ public void onAnimationEnd(Animator animation) { } actionBar.setTitle(LocaleController.formatPluralString("Recipient", selectedDialogs.size())); } - } else if (initialDialogsType == 10) { + } else if (initialDialogsType == DIALOGS_TYPE_WIDGET) { hideFloatingButton(selectedDialogs.isEmpty()); } @@ -8404,7 +8734,17 @@ public void didReceivedNotification(int id, int account, Object... args) { } else if (id == NotificationCenter.dialogsUnreadReactionsCounterChanged) { updateVisibleRows(0); } else if (id == NotificationCenter.emojiLoaded) { - updateVisibleRows(0); + for (int i = 0; i < viewPages.length; ++i) { + final RecyclerListView listView = viewPages[i].listView; + if (listView != null) { + for (int a = 0; a < listView.getChildCount(); ++a) { + View child = listView.getChildAt(a); + if (child != null) { + child.invalidate(); + } + } + } + } if (filterTabsView != null) { filterTabsView.getTabsContainer().invalidateViews(); } @@ -8549,12 +8889,15 @@ public void didReceivedNotification(int id, int account, Object... args) { showFiltersHint(); } else if (id == NotificationCenter.newSuggestionsAvailable) { showNextSupportedSuggestion(); + updateDialogsHint(); } else if (id == NotificationCenter.forceImportContactsStart) { setFloatingProgressVisible(true, true); - for (ViewPage page : viewPages) { - page.dialogsAdapter.setForceShowEmptyCell(false); - page.dialogsAdapter.setForceUpdatingContacts(true); - page.dialogsAdapter.notifyDataSetChanged(); + if (viewPages != null) { + for (ViewPage page : viewPages) { + page.dialogsAdapter.setForceShowEmptyCell(false); + page.dialogsAdapter.setForceUpdatingContacts(true); + page.dialogsAdapter.notifyDataSetChanged(); + } } } else if (id == NotificationCenter.messagesDeleted) { if (searchIsShowed && searchViewPager != null) { @@ -8613,6 +8956,10 @@ public void onAnimationEnd(Animator animation) { updateStatus((TLRPC.User) args[0], true); } else if (id == NotificationCenter.currentUserPremiumStatusChanged) { updateStatus(UserConfig.getInstance(account).getCurrentUser(), true); + } else if (id == NotificationCenter.onDatabaseReset) { + dialogsLoaded.set(currentAccount, false); + loadDialogs(getAccountInstance()); + getMessagesController().loadPinnedDialogs(folderId, 0, null); } } @@ -8731,17 +9078,34 @@ public DialogsHeader(int type) { } } + public static final int DIALOGS_TYPE_DEFAULT = 0; + public static final int DIALOGS_TYPE_BOT_SHARE = 1; // selecting group to write with inline bot query, including sharing a game + public static final int DIALOGS_TYPE_ADD_USERS_TO = 2; // Chats + My channels + My groups + public static final int DIALOGS_TYPE_FORWARD = 3; + public static final int DIALOGS_TYPE_USERS_ONLY = 4; + public static final int DIALOGS_TYPE_CHANNELS_ONLY = 5; + public static final int DIALOGS_TYPE_GROUPS_ONLY = 6; + public static final int DIALOGS_TYPE_7 = 7; + public static final int DIALOGS_TYPE_8 = 8; + public static final int DIALOGS_TYPE_BLOCK = 9; + public static final int DIALOGS_TYPE_WIDGET = 10; + public static final int DIALOGS_TYPE_IMPORT_HISTORY_GROUPS = 11; // groups only + public static final int DIALOGS_TYPE_IMPORT_HISTORY_USERS = 12; // users only + public static final int DIALOGS_TYPE_IMPORT_HISTORY = 13; + public static final int DIALOGS_TYPE_START_ATTACH_BOT = 14; + public static final int DIALOGS_TYPE_BOT_REQUEST_PEER = 15; + @NonNull public ArrayList getDialogsArray(int currentAccount, int dialogsType, int folderId, boolean frozen) { if (frozen && frozenDialogsList != null) { return frozenDialogsList; } MessagesController messagesController = AccountInstance.getInstance(currentAccount).getMessagesController(); - if (dialogsType == 0) { + if (dialogsType == DIALOGS_TYPE_DEFAULT) { return messagesController.getDialogs(folderId); - } else if (dialogsType == 1 || dialogsType == 10 || dialogsType == 13) { + } else if (dialogsType == DIALOGS_TYPE_BOT_SHARE || dialogsType == DIALOGS_TYPE_WIDGET || dialogsType == DIALOGS_TYPE_IMPORT_HISTORY) { return messagesController.dialogsServerOnly; - } else if (dialogsType == 2) { + } else if (dialogsType == DIALOGS_TYPE_ADD_USERS_TO) { ArrayList dialogs = new ArrayList<>(messagesController.dialogsCanAddUsers.size() + messagesController.dialogsMyChannels.size() + messagesController.dialogsMyGroups.size() + 2); if (messagesController.dialogsMyChannels.size() > 0 && allowChannels) { dialogs.add(new DialogsHeader(DialogsHeader.HEADER_TYPE_MY_CHANNELS)); @@ -8767,13 +9131,13 @@ public ArrayList getDialogsArray(int currentAccount, int dialogsTy } } return dialogs; - } else if (dialogsType == 3) { + } else if (dialogsType == DIALOGS_TYPE_FORWARD) { return messagesController.dialogsForward; - } else if (dialogsType == 4 || dialogsType == 12) { + } else if (dialogsType == DIALOGS_TYPE_USERS_ONLY || dialogsType == DIALOGS_TYPE_IMPORT_HISTORY_USERS) { return messagesController.dialogsUsersOnly; - } else if (dialogsType == 5) { + } else if (dialogsType == DIALOGS_TYPE_CHANNELS_ONLY) { return messagesController.dialogsChannelsOnly; - } else if (dialogsType == 6 || dialogsType == 11) { + } else if (dialogsType == DIALOGS_TYPE_GROUPS_ONLY || dialogsType == DIALOGS_TYPE_IMPORT_HISTORY_GROUPS) { return messagesController.dialogsGroupsOnly; } else if (dialogsType == 7 || dialogsType == 8) { MessagesController.DialogFilter dialogFilter = messagesController.selectedDialogFilter[dialogsType == 7 ? 0 : 1]; @@ -8782,13 +9146,14 @@ public ArrayList getDialogsArray(int currentAccount, int dialogsTy } else { return dialogFilter.dialogs; } - } else if (dialogsType == 9) { + } else if (dialogsType == DIALOGS_TYPE_BLOCK) { return messagesController.dialogsForBlock; } else if (dialogsType == DIALOGS_TYPE_START_ATTACH_BOT) { ArrayList dialogs = new ArrayList<>(); if (allowUsers || allowBots) { for (TLRPC.Dialog d : messagesController.dialogsUsersOnly) { - if (messagesController.getUser(d.id).bot ? allowBots : allowUsers) { + TLRPC.User user = messagesController.getUser(d.id); + if (user != null && !UserObject.isUserSelf(user) && (user.bot ? allowBots : allowUsers)) { dialogs.add(d); } } @@ -8800,10 +9165,79 @@ public ArrayList getDialogsArray(int currentAccount, int dialogsTy dialogs.addAll(messagesController.dialogsChannelsOnly); } return dialogs; + } else if (dialogsType == DIALOGS_TYPE_BOT_REQUEST_PEER) { + ArrayList dialogs = new ArrayList<>(); + TLRPC.User bot = messagesController.getUser(requestPeerBotId); + if (requestPeerType instanceof TLRPC.TL_requestPeerTypeUser) { + ConcurrentHashMap users = messagesController.getUsers(); + for (TLRPC.Dialog dialog : messagesController.dialogsUsersOnly) { + TLRPC.User user = getMessagesController().getUser(dialog.id); + if (meetRequestPeerRequirements(user)) { + dialogs.add(dialog); + } + } + for (TLRPC.User user : users.values()) { + if (user != null && !messagesController.dialogs_dict.containsKey(user.id) && meetRequestPeerRequirements(user)) { + TLRPC.Dialog d = new TLRPC.TL_dialog(); + d.peer = new TLRPC.TL_peerUser(); + d.peer.user_id = user.id; + d.id = user.id; + dialogs.add(d); + } + } + } else if (requestPeerType instanceof TLRPC.TL_requestPeerTypeChat || requestPeerType instanceof TLRPC.TL_requestPeerTypeBroadcast) { + ConcurrentHashMap chats = messagesController.getChats(); + ArrayList sourceDialogs = requestPeerType instanceof TLRPC.TL_requestPeerTypeChat ? messagesController.dialogsGroupsOnly : messagesController.dialogsChannelsOnly; + for (TLRPC.Dialog dialog : sourceDialogs) { + TLRPC.Chat chat = getMessagesController().getChat(-dialog.id); + if (meetRequestPeerRequirements(bot, chat)) { + dialogs.add(dialog); + } + } + for (TLRPC.Chat chat : chats.values()) { + if (chat != null && !messagesController.dialogs_dict.containsKey(-chat.id) && meetRequestPeerRequirements(bot, chat)) { + TLRPC.Dialog d = new TLRPC.TL_dialog(); + if (ChatObject.isChannel(chat)) { + d.peer = new TLRPC.TL_peerChannel(); + d.peer.channel_id = chat.id; + } else { + d.peer = new TLRPC.TL_peerChat(); + d.peer.chat_id = chat.id; + } + d.id = -chat.id; + dialogs.add(d); + } + } + } + return dialogs; } return new ArrayList<>(); } + private boolean meetRequestPeerRequirements(TLRPC.User user) { + TLRPC.TL_requestPeerTypeUser type = (TLRPC.TL_requestPeerTypeUser) requestPeerType; + return ( + user != null && + !UserObject.isReplyUser(user) && + !UserObject.isDeleted(user) && + (type.bot == null || type.bot == user.bot) && + (type.premium == null || type.premium == user.premium) + ); + } + + private boolean meetRequestPeerRequirements(TLRPC.User bot, TLRPC.Chat chat) { + return ( + chat != null && + ChatObject.isChannelAndNotMegaGroup(chat) == requestPeerType instanceof TLRPC.TL_requestPeerTypeBroadcast && + (requestPeerType.creator == null || !requestPeerType.creator || chat.creator) && + (requestPeerType.bot_participant == null || !requestPeerType.bot_participant || getMessagesController().isInChatCached(chat, bot) || ChatObject.canAddBotsToChat(chat)) && + (requestPeerType.has_username == null || requestPeerType.has_username == (ChatObject.getPublicUsername(chat) != null)) && + (requestPeerType.forum == null || requestPeerType.forum == ChatObject.isForum(chat)) && + (requestPeerType.user_admin_rights == null || getMessagesController().matchesAdminRights(chat, getUserConfig().getCurrentUser(), requestPeerType.user_admin_rights)) && + (requestPeerType.bot_admin_rights == null || getMessagesController().matchesAdminRights(chat, bot, requestPeerType.bot_admin_rights) || ChatObject.canAddAdmins(chat)) + ); + } + public void setSideMenu(RecyclerView recyclerView) { sideMenu = recyclerView; sideMenu.setBackgroundColor(Theme.getColor(Theme.key_chats_menuBackground)); @@ -9120,7 +9554,7 @@ public void setInitialSearchType(int type) { } private boolean checkCanWrite(final long dialogId) { - if (addToGroupAlertString == null && checkCanWrite) { + if (addToGroupAlertString == null && initialDialogsType != DIALOGS_TYPE_BOT_REQUEST_PEER && checkCanWrite) { if (DialogObject.isChatDialog(dialogId)) { TLRPC.Chat chat = getMessagesController().getChat(-dialogId); if (ChatObject.isChannel(chat) && !chat.megagroup && ((cantSendToChannels || !ChatObject.isCanWriteToChannel(-dialogId, currentAccount)) || hasPoll == 2)) { @@ -9152,10 +9586,14 @@ private boolean checkCanWrite(final long dialogId) { } public void didSelectResult(final long dialogId, int topicId, boolean useAlert, final boolean param) { + didSelectResult(dialogId, topicId, useAlert, param, null); + } + + public void didSelectResult(final long dialogId, int topicId, boolean useAlert, final boolean param, TopicsFragment topicsFragment) { if (!checkCanWrite(dialogId)) { return; } - if (initialDialogsType == 11 || initialDialogsType == 12 || initialDialogsType == 13) { + if (initialDialogsType == DIALOGS_TYPE_IMPORT_HISTORY_GROUPS || initialDialogsType == DIALOGS_TYPE_IMPORT_HISTORY_USERS || initialDialogsType == DIALOGS_TYPE_IMPORT_HISTORY) { if (checkingImportDialog) { return; } @@ -9192,7 +9630,7 @@ public void didSelectResult(final long dialogId, int topicId, boolean useAlert, setDialogsListFrozen(true); ArrayList dids = new ArrayList<>(); dids.add(MessagesStorage.TopicKey.of(dialogId, 0)); - delegate.didSelectDialogs(DialogsActivity.this, dids, null, param); + delegate.didSelectDialogs(DialogsActivity.this, dids, null, param, null); }); } else { AlertsCreator.processError(currentAccount, error, this, req); @@ -9260,15 +9698,47 @@ public void didSelectResult(final long dialogId, int topicId, boolean useAlert, } builder.setTitle(title); builder.setMessage(AndroidUtilities.replaceTags(message)); - builder.setPositiveButton(buttonText, (dialogInterface, i) -> didSelectResult(dialogId, topicId,false, false)); + builder.setPositiveButton(buttonText, (dialogInterface, i) -> didSelectResult(dialogId, topicId,false, false, topicsFragment)); builder.setNegativeButton(LocaleController.getString("Cancel", R.string.Cancel), null); - showDialog(builder.create()); + Dialog dialog = builder.create(); + if (showDialog(dialog) == null) { + dialog.show(); + } + } else if (initialDialogsType == DIALOGS_TYPE_BOT_REQUEST_PEER) { + Runnable send = () -> { + if (delegate != null) { + ArrayList dids = new ArrayList<>(); + dids.add(MessagesStorage.TopicKey.of(dialogId, topicId)); + delegate.didSelectDialogs(DialogsActivity.this, dids, null, param, topicsFragment); + if (resetDelegate) { + delegate = null; + } + } else { + finishFragment(); + } + }; + Runnable checkBotRightsAndSend = () -> { + if (requestPeerType.bot_admin_rights != null) { + TLRPC.User bot = getMessagesController().getUser(requestPeerBotId); + getMessagesController().setUserAdminRole(-dialogId, bot, requestPeerType.bot_admin_rights, null, false, DialogsActivity.this, true, true, null, send, err -> { + send.run(); + return true; + }); + } else { + send.run(); + } + }; + if (dialogId < 0) { + showSendToBotAlert(getMessagesController().getChat(-dialogId), checkBotRightsAndSend, null); + } else { + showSendToBotAlert(getMessagesController().getUser(dialogId), checkBotRightsAndSend, null); + } } else { if (delegate != null) { ArrayList dids = new ArrayList<>(); dids.add(MessagesStorage.TopicKey.of(dialogId, topicId)); - delegate.didSelectDialogs(DialogsActivity.this, dids, null, param); - if (resetDelegate) { + boolean res = delegate.didSelectDialogs(DialogsActivity.this, dids, null, param, topicsFragment); + if (res && resetDelegate) { delegate = null; } } else { @@ -9277,6 +9747,52 @@ public void didSelectResult(final long dialogId, int topicId, boolean useAlert, } } + private void showSendToBotAlert(TLRPC.User user, Runnable ok, Runnable cancel) { + TLRPC.User bot = getMessagesController().getUser(requestPeerBotId); + + showDialog( + new AlertDialog.Builder(getContext()) + .setTitle(LocaleController.formatString(R.string.AreYouSureSendChatToBotTitle, UserObject.getFirstName(user), UserObject.getFirstName(bot))) + .setMessage(TextUtils.concat( + AndroidUtilities.replaceTags(LocaleController.formatString(R.string.AreYouSureSendChatToBotMessage, UserObject.getFirstName(user), UserObject.getFirstName(bot))) + )) + .setPositiveButton(LocaleController.formatString("Send", R.string.Send), (di, p) -> ok.run()) + .setNegativeButton(LocaleController.formatString("Cancel", R.string.Cancel), (di, p) -> { + if (cancel != null) { + cancel.run(); + } + }).create() + ); + } + + private void showSendToBotAlert(TLRPC.Chat chat, Runnable ok, Runnable cancel) { + final TLRPC.User bot = getMessagesController().getUser(requestPeerBotId); + final boolean isChannel = ChatObject.isChannelAndNotMegaGroup(chat); + + showDialog( + new AlertDialog.Builder(getContext()) + .setTitle(LocaleController.formatString(R.string.AreYouSureSendChatToBotTitle, chat.title, UserObject.getFirstName(bot))) + .setMessage(TextUtils.concat( + AndroidUtilities.replaceTags(LocaleController.formatString(R.string.AreYouSureSendChatToBotMessage, chat.title, UserObject.getFirstName(bot))), + ( + requestPeerType.bot_participant != null && requestPeerType.bot_participant && !getMessagesController().isInChatCached(chat, bot) || + requestPeerType.bot_admin_rights != null + ) ? + TextUtils.concat("\n\n", AndroidUtilities.replaceTags( + (requestPeerType.bot_admin_rights == null) ? + LocaleController.formatString(R.string.AreYouSureSendChatToBotAdd, UserObject.getFirstName(bot), chat.title) : + LocaleController.formatString(R.string.AreYouSureSendChatToBotAddRights, UserObject.getFirstName(bot), chat.title, RequestPeerRequirementsCell.rightsToString(requestPeerType.bot_admin_rights, isChannel)) + )) : "" + )) + .setPositiveButton(LocaleController.formatString("Send", R.string.Send), (di, p) -> ok.run()) + .setNegativeButton(LocaleController.formatString("Cancel", R.string.Cancel), (di, p) -> { + if (cancel != null) { + cancel.run(); + } + }).create() + ); + } + public RLottieImageView getFloatingButton() { return floatingButton; } @@ -9334,7 +9850,7 @@ public boolean onTouch(View v, MotionEvent event) { for (int i = 0; i < selectedDialogs.size(); i++) { topicKeys.add(MessagesStorage.TopicKey.of(selectedDialogs.get(i), 0)); } - delegate.didSelectDialogs(DialogsActivity.this, topicKeys, commentView.getFieldText(), false); + delegate.didSelectDialogs(DialogsActivity.this, topicKeys, commentView.getFieldText(), false, null); }); ActionBarMenuSubItem showSendersNameView = new ActionBarMenuSubItem(parentActivity, true, true, false, resourcesProvider); @@ -9382,9 +9898,8 @@ public boolean onTouch(View v, MotionEvent event) { int y = location[1] - layout.getMeasuredHeight() - AndroidUtilities.dp(2); sendPopupWindow.showAtLocation(view, Gravity.LEFT | Gravity.TOP, location[0] + view.getMeasuredWidth() - layout.getMeasuredWidth() + AndroidUtilities.dp(8), y); sendPopupWindow.dimBehind(); - if (!NekoConfig.disableVibration.Bool()) { + if (!NekoConfig.disableVibration.Bool()) view.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); - } return false; } @@ -9670,6 +10185,7 @@ public ArrayList getThemeDescriptions() { arrayList.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_avatar_backgroundPink)); arrayList.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_avatar_backgroundSaved)); arrayList.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_avatar_backgroundRed)); + arrayList.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_avatar_background2Red)); arrayList.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_avatar_background2Orange)); arrayList.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_avatar_background2Violet)); arrayList.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_avatar_background2Green)); @@ -9756,7 +10272,6 @@ public ArrayList getThemeDescriptions() { arrayList.add(new ThemeDescription(sideMenu, 0, new Class[]{DrawerProfileCell.class}, null, null, null, Theme.key_chats_menuName)); arrayList.add(new ThemeDescription(sideMenu, 0, new Class[]{DrawerProfileCell.class}, null, null, null, Theme.key_chats_menuPhone)); arrayList.add(new ThemeDescription(sideMenu, 0, new Class[]{DrawerProfileCell.class}, null, null, null, Theme.key_chats_menuPhoneCats)); - arrayList.add(new ThemeDescription(sideMenu, 0, new Class[]{DrawerProfileCell.class}, null, null, null, Theme.key_chats_menuCloudBackgroundCats)); arrayList.add(new ThemeDescription(sideMenu, 0, new Class[]{DrawerProfileCell.class}, null, null, null, Theme.key_chat_serviceBackground)); arrayList.add(new ThemeDescription(sideMenu, 0, new Class[]{DrawerProfileCell.class}, null, null, null, Theme.key_chats_menuTopShadow)); arrayList.add(new ThemeDescription(sideMenu, 0, new Class[]{DrawerProfileCell.class}, null, null, null, Theme.key_chats_menuTopShadowCats)); @@ -9827,10 +10342,8 @@ public ArrayList getThemeDescriptions() { arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_dialogLinkSelection)); arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_dialogTextBlue)); arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_dialogTextBlue2)); - arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_dialogTextBlue3)); arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_dialogTextBlue4)); arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_dialogTextRed)); - arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_dialogTextRed2)); arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_dialogTextGray)); arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_dialogTextGray2)); arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_dialogTextGray3)); @@ -9846,14 +10359,11 @@ public ArrayList getThemeDescriptions() { arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_dialogCheckboxSquareDisabled)); arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_dialogRadioBackground)); arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_dialogRadioBackgroundChecked)); - arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_dialogProgressCircle)); arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_dialogButton)); arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_dialogButtonSelector)); arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_dialogScrollGlow)); arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_dialogRoundCheckBox)); arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_dialogRoundCheckBoxCheck)); - arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_dialogBadgeBackground)); - arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_dialogBadgeText)); arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_dialogLineProgress)); arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_dialogLineProgressBackground)); arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_dialogGrayLine)); @@ -9870,10 +10380,8 @@ public ArrayList getThemeDescriptions() { arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_sheet_scrollUp)); arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_sheet_other)); - arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_player_actionBar)); arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_player_actionBarSelector)); arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_player_actionBarTitle)); - arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_player_actionBarTop)); arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_player_actionBarSubtitle)); arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_player_actionBarItems)); arrayList.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_player_background)); @@ -9893,7 +10401,6 @@ public ArrayList getThemeDescriptions() { // arrayList.add(new ThemeDescription(commentView, ThemeDescription.FLAG_IMAGECOLOR, new Class[]{ChatActivityEnterView.class}, new String[]{"sendButton"}, null, null, null, Theme.key_chat_messagePanelSend)); } - arrayList.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_actionBarTipBackground)); arrayList.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_windowBackgroundWhiteBlackText)); arrayList.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_player_time)); arrayList.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_chat_messagePanelCursor)); @@ -9935,6 +10442,10 @@ public ArrayList getThemeDescriptions() { }, Theme.key_actionBarActionModeDefaultIcon, Theme.key_actionBarActionModeDefaultSelector)); } + if (dialogsHintCell != null) { + SimpleThemeDescription.add(arrayList, dialogsHintCell::updateColors, Theme.key_windowBackgroundWhite, Theme.key_windowBackgroundWhiteBlackText, Theme.key_windowBackgroundWhiteGrayText); + } + return arrayList; } @@ -9954,6 +10465,8 @@ private void updateFloatingButtonColor() { } float slideFragmentProgress = 1f; + final int slideAmplitudeDp = 40; + boolean slideFragmentLite; boolean isSlideBackTransition; boolean isDrawerTransition; ValueAnimator slideBackTransitionAnimator; @@ -9964,7 +10477,10 @@ protected Animator getCustomSlideTransition(boolean topFragment, boolean backAni slideBackTransitionAnimator = ValueAnimator.ofFloat(slideFragmentProgress, 1f); return slideBackTransitionAnimator; } - int duration = (int) (Math.max((int) (200.0f / getLayoutContainer().getMeasuredWidth() * distanceToMove), 80) * 1.2f); + int duration = 150; + if (getLayoutContainer() != null) { + duration = (int) (Math.max((int) (200.0f / getLayoutContainer().getMeasuredWidth() * distanceToMove), 80) * 1.2f); + } slideBackTransitionAnimator = ValueAnimator.ofFloat(slideFragmentProgress, 1f); slideBackTransitionAnimator.addUpdateListener(valueAnimator -> setSlideTransitionProgress((float) valueAnimator.getAnimatedValue())); slideBackTransitionAnimator.setInterpolator(CubicBezierInterpolator.EASE_OUT); @@ -9987,22 +10503,22 @@ public void prepareFragmentToSlide(boolean topFragment, boolean beginSlide) { } private void setFragmentIsSliding(boolean sliding) { - if (SharedConfig.getDevicePerformanceClass() == SharedConfig.PERFORMANCE_CLASS_LOW) { + if (SharedConfig.getDevicePerformanceClass() <= SharedConfig.PERFORMANCE_CLASS_AVERAGE || !LiteMode.isEnabled(LiteMode.FLAG_CHAT_SCALE)) { return; } if (sliding) { if (viewPages != null && viewPages[0] != null) { - // viewPages[0].setLayerType(View.LAYER_TYPE_HARDWARE, null); + viewPages[0].setLayerType(View.LAYER_TYPE_HARDWARE, null); viewPages[0].setClipChildren(false); viewPages[0].setClipToPadding(false); viewPages[0].listView.setClipChildren(false); } if (actionBar != null) { - // actionBar.setLayerType(View.LAYER_TYPE_HARDWARE, null); + actionBar.setLayerType(View.LAYER_TYPE_HARDWARE, null); } if (filterTabsView != null) { - // filterTabsView.getListView().setLayerType(View.LAYER_TYPE_HARDWARE, null); + filterTabsView.getListView().setLayerType(View.LAYER_TYPE_HARDWARE, null); } if (fragmentView != null) { ((ViewGroup) fragmentView).setClipChildren(false); @@ -10036,7 +10552,7 @@ private void setFragmentIsSliding(boolean sliding) { @Override public void onSlideProgress(boolean isOpen, float progress) { - if (SharedConfig.getDevicePerformanceClass() == SharedConfig.PERFORMANCE_CLASS_LOW) { + if (SharedConfig.getDevicePerformanceClass() <= SharedConfig.PERFORMANCE_CLASS_LOW) { return; } if (isSlideBackTransition && slideBackTransitionAnimator == null) { @@ -10045,31 +10561,45 @@ public void onSlideProgress(boolean isOpen, float progress) { } private void setSlideTransitionProgress(float progress) { - if (SharedConfig.getDevicePerformanceClass() == SharedConfig.PERFORMANCE_CLASS_LOW) { + if (SharedConfig.getDevicePerformanceClass() <= SharedConfig.PERFORMANCE_CLASS_LOW) { return; } + + slideFragmentLite = SharedConfig.getDevicePerformanceClass() <= SharedConfig.PERFORMANCE_CLASS_AVERAGE || !LiteMode.isEnabled(LiteMode.FLAG_CHAT_SCALE); slideFragmentProgress = progress; if (fragmentView != null) { fragmentView.invalidate(); } - float s = 1f - 0.05f * (1f - slideFragmentProgress); - if (filterTabsView != null) { - filterTabsView.getListView().setScaleX(s); - filterTabsView.getListView().setScaleY(s); - filterTabsView.getListView().setTranslationX((isDrawerTransition ? AndroidUtilities.dp(4) : -AndroidUtilities.dp(4)) * (1f - slideFragmentProgress)); - filterTabsView.getListView().setPivotX(isDrawerTransition ? filterTabsView.getMeasuredWidth() : 0); - filterTabsView.getListView().setPivotY(0); - filterTabsView.invalidate(); - } - if (rightSlidingDialogContainer != null && rightSlidingDialogContainer.getFragmentView() != null) { - if (!rightFragmentTransitionInProgress) { - rightSlidingDialogContainer.getFragmentView().setScaleX(s); - rightSlidingDialogContainer.getFragmentView().setScaleY(s); - rightSlidingDialogContainer.getFragmentView().setTranslationX((isDrawerTransition ? AndroidUtilities.dp(4) : -AndroidUtilities.dp(4)) * (1f - slideFragmentProgress)); + if (slideFragmentLite) { + if (filterTabsView != null) { + filterTabsView.getListView().setTranslationX((isDrawerTransition ? 1 : -1) * AndroidUtilities.dp(slideAmplitudeDp) * (1f - slideFragmentProgress)); + filterTabsView.invalidate(); + } + if (rightSlidingDialogContainer != null && rightSlidingDialogContainer.getFragmentView() != null) { + if (!rightFragmentTransitionInProgress) { + rightSlidingDialogContainer.getFragmentView().setTranslationX((isDrawerTransition ? 1 : -1) * AndroidUtilities.dp(slideAmplitudeDp) * (1f - slideFragmentProgress)); + } + } + } else { + final float s = 1f - 0.05f * (1f - slideFragmentProgress); + if (filterTabsView != null) { + filterTabsView.getListView().setScaleX(s); + filterTabsView.getListView().setScaleY(s); + filterTabsView.getListView().setTranslationX((isDrawerTransition ? AndroidUtilities.dp(4) : -AndroidUtilities.dp(4)) * (1f - slideFragmentProgress)); + filterTabsView.getListView().setPivotX(isDrawerTransition ? filterTabsView.getMeasuredWidth() : 0); + filterTabsView.getListView().setPivotY(0); + filterTabsView.invalidate(); + } + if (rightSlidingDialogContainer != null && rightSlidingDialogContainer.getFragmentView() != null) { + if (!rightFragmentTransitionInProgress) { + rightSlidingDialogContainer.getFragmentView().setScaleX(s); + rightSlidingDialogContainer.getFragmentView().setScaleY(s); + rightSlidingDialogContainer.getFragmentView().setTranslationX((isDrawerTransition ? AndroidUtilities.dp(4) : -AndroidUtilities.dp(4)) * (1f - slideFragmentProgress)); + } + rightSlidingDialogContainer.getFragmentView().setPivotX(isDrawerTransition ? rightSlidingDialogContainer.getMeasuredWidth() : 0); + rightSlidingDialogContainer.getFragmentView().setPivotY(0); } - rightSlidingDialogContainer.getFragmentView().setPivotX(isDrawerTransition ? rightSlidingDialogContainer.getMeasuredWidth() : 0); - rightSlidingDialogContainer.getFragmentView().setPivotY(0); } } @@ -10080,7 +10610,7 @@ public INavigationLayout.BackButtonState getBackButtonState() { @Override public void setProgressToDrawerOpened(float progress) { - if (SharedConfig.getDevicePerformanceClass() == SharedConfig.PERFORMANCE_CLASS_LOW || isSlideBackTransition) { + if (SharedConfig.getDevicePerformanceClass() <= SharedConfig.PERFORMANCE_CLASS_LOW || isSlideBackTransition) { return; } boolean drawerTransition = progress > 0; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/DocumentSelectActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/DocumentSelectActivity.java index 0ad55552a6..281e5feb69 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/DocumentSelectActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/DocumentSelectActivity.java @@ -342,7 +342,7 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { setMeasuredDimension(widthSize, heightSize); int kbHeight = getKeyboardHeight(); - int keyboardSize = SharedConfig.smoothKeyboard ? 0 : kbHeight; + int keyboardSize = kbHeight; if (keyboardSize <= AndroidUtilities.dp(20)) { if (!AndroidUtilities.isInMultiwindow && commentTextView != null && frameLayout2.getParent() == this) { heightSize -= commentTextView.getEmojiPadding(); @@ -388,7 +388,7 @@ protected void onLayout(boolean changed, int l, int t, int r, int b) { } final int count = getChildCount(); - int keyboardSize = SharedConfig.smoothKeyboard ? 0 : getKeyboardHeight(); + int keyboardSize = getKeyboardHeight(); int paddingBottom = commentTextView != null && frameLayout2.getParent() == this && keyboardSize <= AndroidUtilities.dp(20) && !AndroidUtilities.isInMultiwindow && !AndroidUtilities.isTablet() ? commentTextView.getEmojiPadding() : 0; setBottomClip(paddingBottom); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/EmojiAnimationsOverlay.java b/TMessagesProj/src/main/java/org/telegram/ui/EmojiAnimationsOverlay.java index 23321a4085..f3a285c277 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/EmojiAnimationsOverlay.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/EmojiAnimationsOverlay.java @@ -10,6 +10,7 @@ import org.json.JSONException; import org.json.JSONObject; import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.Emoji; import org.telegram.messenger.EmojiData; import org.telegram.messenger.FileLog; import org.telegram.messenger.ImageLocation; @@ -221,8 +222,9 @@ private void findViewAndShowAnimation(int messageId, int animation) { if (bestView != null) { chatActivity.restartSticker(bestView); - if (!EmojiData.hasEmojiSupportVibration(bestView.getMessageObject().getStickerEmoji()) && !bestView.getMessageObject().isPremiumSticker() && !bestView.getMessageObject().isAnimatedAnimatedEmoji() && !NekoConfig.disableVibration.Bool()) { - bestView.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP); + if (!EmojiData.hasEmojiSupportVibration(bestView.getMessageObject().getStickerEmoji()) && !bestView.getMessageObject().isPremiumSticker() && !bestView.getMessageObject().isAnimatedAnimatedEmoji()) { + if (!NekoConfig.disableVibration.Bool()) + bestView.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP); } showAnimationForCell(bestView, animation, false, true); } @@ -345,8 +347,9 @@ public boolean onTapItem(ChatMessageCell view, ChatActivity chatActivity, boolea } boolean show = showAnimationForCell(view, -1, userTapped, false); - if (userTapped && show && !EmojiData.hasEmojiSupportVibration(view.getMessageObject().getStickerEmoji()) && !view.getMessageObject().isPremiumSticker() && !view.getMessageObject().isAnimatedAnimatedEmoji() && !NekoConfig.disableVibration.Bool()) { - view.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP); + if (userTapped && show && !EmojiData.hasEmojiSupportVibration(view.getMessageObject().getStickerEmoji()) && !view.getMessageObject().isPremiumSticker() && !view.getMessageObject().isAnimatedAnimatedEmoji()) { + if (!NekoConfig.disableVibration.Bool()) + view.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP); } if (view.getMessageObject().isPremiumSticker() || (!userTapped && view.getMessageObject().isAnimatedEmojiStickerSingle())) { view.getMessageObject().forcePlayEffect = false; @@ -369,7 +372,7 @@ public boolean onTapItem(ChatMessageCell view, ChatActivity chatActivity, boolea } StickerSetBulletinLayout layout = new StickerSetBulletinLayout(chatActivity.getParentActivity(), null, StickerSetBulletinLayout.TYPE_EMPTY, document, chatActivity.getResourceProvider()); layout.subtitleTextView.setVisibility(View.GONE); - layout.titleTextView.setText(AndroidUtilities.replaceTags(LocaleController.formatString("EmojiInteractionTapHint", R.string.EmojiInteractionTapHint, chatActivity.currentUser.first_name))); + layout.titleTextView.setText(Emoji.replaceEmoji(AndroidUtilities.replaceTags(LocaleController.formatString("EmojiInteractionTapHint", R.string.EmojiInteractionTapHint, chatActivity.currentUser.first_name)), layout.titleTextView.getPaint().getFontMetricsInt(), false)); layout.titleTextView.setTypeface(null); layout.titleTextView.setMaxLines(3); layout.titleTextView.setSingleLine(false); @@ -619,8 +622,9 @@ private boolean showAnimationForCell(ChatMessageCell view, int animation, boolea public void didSetImage(ImageReceiver imageReceiver, boolean set, boolean thumb, boolean memCache) {} @Override public void onAnimationReady(ImageReceiver imageReceiver) { - if (sendTap && messageObject.isAnimatedAnimatedEmoji() && imageReceiver.getLottieAnimation() != null && !imageReceiver.getLottieAnimation().hasVibrationPattern() && !NekoConfig.disableVibration.Bool()) { - view.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_VIEW_SETTING); + if (sendTap && messageObject.isAnimatedAnimatedEmoji() && imageReceiver.getLottieAnimation() != null && !imageReceiver.getLottieAnimation().hasVibrationPattern()) { + if (!NekoConfig.disableVibration.Bool()) + view.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_VIEW_SETTING); } } }); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ExternalActionActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ExternalActionActivity.java index 9cd5a324be..10f7ce9026 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ExternalActionActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ExternalActionActivity.java @@ -217,7 +217,7 @@ private void showPasscodeActivity() { passcodeView.onShow(true, false); SharedConfig.isWaitingForPasscodeEnter = true; drawerLayoutContainer.setAllowOpenDrawer(false, false); - passcodeView.setDelegate(() -> { + passcodeView.setDelegate(view -> { SharedConfig.isWaitingForPasscodeEnter = false; if (passcodeSaveIntent != null) { handleIntent(passcodeSaveIntent, passcodeSaveIntentIsNew, passcodeSaveIntentIsRestore, true, passcodeSaveIntentAccount, passcodeSaveIntentState); @@ -228,6 +228,8 @@ private void showPasscodeActivity() { if (AndroidUtilities.isTablet()) { layersActionBarLayout.showLastFragment(); } + + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.passcodeDismissed, view); }); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/FeedWidgetConfigActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/FeedWidgetConfigActivity.java index 3556a038c5..bf19667a53 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/FeedWidgetConfigActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/FeedWidgetConfigActivity.java @@ -27,11 +27,11 @@ protected boolean handleIntent(Intent intent, boolean isNew, boolean restore, bo if (creatingAppWidgetId != AppWidgetManager.INVALID_APPWIDGET_ID) { Bundle args = new Bundle(); args.putBoolean("onlySelect", true); - args.putInt("dialogsType", 5); + args.putInt("dialogsType", DialogsActivity.DIALOGS_TYPE_CHANNELS_ONLY); args.putBoolean("allowSwitchAccount", true); args.putBoolean("checkCanWrite", false); DialogsActivity fragment = new DialogsActivity(args); - fragment.setDelegate((fragment1, dids, message, param) -> { + fragment.setDelegate((fragment1, dids, message, param, topicsFragment) -> { AccountInstance.getInstance(fragment1.getCurrentAccount()).getMessagesStorage().putWidgetDialogs(creatingAppWidgetId, dids); SharedPreferences preferences = FeedWidgetConfigActivity.this.getSharedPreferences("shortcut_widget", Activity.MODE_PRIVATE); @@ -47,6 +47,7 @@ protected boolean handleIntent(Intent intent, boolean isNew, boolean restore, bo resultValue.putExtra(AppWidgetManager.EXTRA_APPWIDGET_ID, creatingAppWidgetId); setResult(RESULT_OK, resultValue); finish(); + return true; }); if (AndroidUtilities.isTablet()) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/FilterCreateActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/FilterCreateActivity.java index b49fd4faa8..803350a200 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/FilterCreateActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/FilterCreateActivity.java @@ -394,7 +394,7 @@ public boolean requestFocus(int direction, Rect previouslyFocusedRect) { showDialog(alertDialog); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } else if (position == nameRow) { PollEditTextCell cell = (PollEditTextCell) view; @@ -523,7 +523,7 @@ private void showRemoveAlert(int position, CharSequence name, Object object, boo showDialog(alertDialog); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/FilteredSearchView.java b/TMessagesProj/src/main/java/org/telegram/ui/FilteredSearchView.java index 32962312da..e14ad13cb4 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/FilteredSearchView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/FilteredSearchView.java @@ -558,7 +558,7 @@ public void search(long dialogId, long minDate, long maxDate, FiltersView.MediaF resultArray = new ArrayList<>(); ArrayList resultArrayNames = new ArrayList<>(); ArrayList encUsers = new ArrayList<>(); - MessagesStorage.getInstance(currentAccount).localSearch(0, query, resultArray, resultArrayNames, encUsers, includeFolder ? 1 : 0); + MessagesStorage.getInstance(currentAccount).localSearch(0, query, resultArray, resultArrayNames, encUsers, null, includeFolder ? 1 : 0); } final TLRPC.TL_messages_searchGlobal req = new TLRPC.TL_messages_searchGlobal(); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/FiltersSetupActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/FiltersSetupActivity.java index b19b7b8da0..2845a93f6c 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/FiltersSetupActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/FiltersSetupActivity.java @@ -49,6 +49,8 @@ import org.telegram.ui.Components.Bulletin; import org.telegram.ui.Components.BulletinFactory; import org.telegram.ui.Components.CombinedDrawable; +import org.telegram.ui.Components.EmojiTextView; +import org.telegram.ui.Components.CubicBezierInterpolator; import org.telegram.ui.Components.LayoutHelper; import org.telegram.ui.Components.Premium.LimitReachedBottomSheet; import org.telegram.ui.Components.Premium.PremiumFeatureBottomSheet; @@ -60,7 +62,7 @@ import tw.nekomimi.nekogram.folder.FolderIconHelper; -import tw.nekomimi.nekogram.folder.FolderIconHelper; +import tw.nekomimi.nekogram.NekoConfig; public class FiltersSetupActivity extends BaseFragment implements NotificationCenter.NotificationCenterDelegate { @@ -531,6 +533,12 @@ protected void dispatchDraw(Canvas canvas) { super.dispatchDraw(canvas); } }; + DefaultItemAnimator itemAnimator = new DefaultItemAnimator(); + itemAnimator.setDurations(350); + itemAnimator.setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); + itemAnimator.setDelayAnimations(false); + itemAnimator.setSupportsChangeAnimations(false); + listView.setItemAnimator(itemAnimator); ((DefaultItemAnimator) listView.getItemAnimator()).setDelayAnimations(false); listView.setLayoutManager(layoutManager = new LinearLayoutManager(context, LinearLayoutManager.VERTICAL, false)); listView.setVerticalScrollBarEnabled(false); @@ -710,13 +718,13 @@ public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType showDialog(alertDialog); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } }); final AlertDialog dialog = builder1.create(); showDialog(dialog); - dialog.setItemColor(items.length - 1, Theme.getColor(Theme.key_dialogTextRed2), Theme.getColor(Theme.key_dialogRedIcon)); + dialog.setItemColor(items.length - 1, Theme.getColor(Theme.key_dialogTextRed), Theme.getColor(Theme.key_dialogRedIcon)); }); view = filterCell; break; @@ -739,6 +747,7 @@ public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType while (getMessagesController().dialogFiltersById.get(filter.id) != null) { filter.id++; } + filter.order = getMessagesController().dialogFilters.size(); filter.pendingUnreadCount = filter.unreadCount = -1; for (int b = 0; b < 2; b++) { ArrayList fromArray = b == 0 ? suggested.filter.include_peers : suggested.filter.exclude_peers; @@ -853,9 +862,9 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { } case 3: { if (position == createSectionRow) { - holder.itemView.setBackgroundDrawable(Theme.getThemedDrawable(mContext, R.drawable.greydivider_bottom, Theme.key_windowBackgroundGrayShadow)); + holder.itemView.setBackground(Theme.getThemedDrawable(mContext, R.drawable.greydivider_bottom, Theme.key_windowBackgroundGrayShadow)); } else { - holder.itemView.setBackgroundDrawable(Theme.getThemedDrawable(mContext, R.drawable.greydivider, Theme.key_windowBackgroundGrayShadow)); + holder.itemView.setBackground(Theme.getThemedDrawable(mContext, R.drawable.greydivider, Theme.key_windowBackgroundGrayShadow)); } break; } @@ -875,7 +884,7 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { } case 5: { SuggestedFilterCell filterCell = (SuggestedFilterCell) holder.itemView; - filterCell.setFilter(getMessagesController().suggestedFilters.get(position - recommendedStartRow), recommendedStartRow != recommendedEndRow - 1); + filterCell.setFilter(getMessagesController().suggestedFilters.get(position - recommendedStartRow), position < recommendedEndRow - 1); break; } } @@ -1016,7 +1025,8 @@ public void clearView(RecyclerView recyclerView, RecyclerView.ViewHolder viewHol protected void onDefaultTabMoved() { try { - fragmentView.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_PRESS, HapticFeedbackConstants.FLAG_IGNORE_VIEW_SETTING); + if (!NekoConfig.disableVibration.Bool()) + fragmentView.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_PRESS, HapticFeedbackConstants.FLAG_IGNORE_VIEW_SETTING); } catch (Exception ignore) {} BulletinFactory.of(this).createSimpleBulletin(R.raw.filter_reorder, AndroidUtilities.replaceTags(LocaleController.formatString("LimitReachedReorderFolder", R.string.LimitReachedReorderFolder, LocaleController.getString(R.string.FilterAllChats))), LocaleController.getString("PremiumMore", R.string.PremiumMore), Bulletin.DURATION_PROLONG, () -> { showDialog(new PremiumFeatureBottomSheet(FiltersSetupActivity.this, PremiumPreviewFragment.PREMIUM_FEATURE_ADVANCED_CHAT_MANAGEMENT, true)); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/GroupCallActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/GroupCallActivity.java index d798d05f97..8057cd2b2c 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/GroupCallActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/GroupCallActivity.java @@ -84,6 +84,7 @@ import org.telegram.messenger.FileLog; import org.telegram.messenger.ImageLoader; import org.telegram.messenger.ImageLocation; +import org.telegram.messenger.LiteMode; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MessageObject; import org.telegram.messenger.MessagesController; @@ -1715,7 +1716,9 @@ public static void create(LaunchActivity activity, AccountInstance account, TLRP } groupCallInstance.parentActivity = activity; AndroidUtilities.runOnUIThread(() -> { - groupCallInstance.show(); + if (groupCallInstance != null) { + groupCallInstance.show(); + } }); } @@ -2054,7 +2057,8 @@ public void afterTextChanged(Editable s) { ignoreTextChange = true; s.delete(40, s.length()); AndroidUtilities.shakeView(editText); - editText.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + if (!NekoConfig.disableVibration.Bool()) + editText.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); ignoreTextChange = false; } } @@ -3956,7 +3960,7 @@ protected void dispatchDraw(Canvas canvas) { // cy += translation; float scale = BlobDrawable.SCALE_BIG_MIN + BlobDrawable.SCALE_BIG * amplitude * 0.5f; canvas.scale(scale * showLightingProgress, scale * showLightingProgress, cx, cy); - if (i == 1 && !SharedConfig.getLiteMode().enabled()) { + if (i == 1 && LiteMode.isEnabled(LiteMode.FLAG_CALLS_ANIMATIONS)) { float scaleLight = 0.7f + BlobDrawable.LIGHT_GRADIENT_SIZE * scheduleButtonsScale; canvas.save(); canvas.scale(scaleLight, scaleLight, cx, cy); @@ -4243,9 +4247,8 @@ public void onClick(View v) { if (startingGroupCall) { return; } - if (!NekoConfig.disableVibration.Bool()) { + if (!NekoConfig.disableVibration.Bool()) v.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); - } startingGroupCall = true; TLRPC.TL_phone_startScheduledGroupCall req = new TLRPC.TL_phone_startScheduledGroupCall(); req.call = call.getInputGroupCall(); @@ -4280,7 +4283,8 @@ public void onClick(View v) { } playingHandAnimation = true; AndroidUtilities.shakeView(muteLabel[0]); - v.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + if (!NekoConfig.disableVibration.Bool()) + v.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); int num = Utilities.random.nextInt(100); int endFrame; int startFrame; @@ -4666,7 +4670,8 @@ public void onScrolled(@NonNull RecyclerView recyclerView, int dx, int dy) { }); fullscreenUsersListView.setOnItemLongClickListener((view, position) -> { if (showMenuForCell(view)) { - listView.performHapticFeedback(HapticFeedbackConstants.LONG_PRESS); + if (!NekoConfig.disableVibration.Bool()) + listView.performHapticFeedback(HapticFeedbackConstants.LONG_PRESS); } return false; }); @@ -4819,7 +4824,7 @@ public void invalidate() { containerView.invalidate(); } }; - avatarsViewPager.setImagesLayerNum(8192); + avatarsViewPager.setImagesLayerNum(Integer.MAX_VALUE); avatarsViewPager.setInvalidateWithParent(true); avatarPagerIndicator.setProfileGalleryView(avatarsViewPager); avatarPreviewContainer = new FrameLayout(context) { @@ -5134,10 +5139,11 @@ public void requestLayout() { } }); final NumberPicker.OnValueChangeListener onValueChangeListener = (picker, oldVal, newVal) -> { - if (!NekoConfig.disableVibration.Bool()) { - try { + try { + if (!NekoConfig.disableVibration.Bool()) container.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); - } catch (Exception ignore) {} + } catch (Exception ignore) { + } AlertsCreator.checkScheduleDate(scheduleButtonTextView, scheduleInfoTextView, 7 * 24 * 60 * 60, 2, dayPicker, hourPicker, minutePicker); }; @@ -6933,7 +6939,6 @@ public static void onLeaveClick(Context context, Runnable onLeave, boolean fromO cells[num].setChecked(!cells[num].isChecked(), true); }); - builder.setCustomViewOffset(12); builder.setView(linearLayout); builder.setDialogButtonColorKey(Theme.key_voipgroup_listeningText); builder.setPositiveButton(LocaleController.getString("VoipGroupLeave", R.string.VoipGroupLeave), (dialogInterface, position) -> processOnLeave(call, cells[0].isChecked(), selfId, onLeave)); @@ -7112,7 +7117,9 @@ private void processSelectedOption(TLRPC.TL_groupCallParticipant participant, lo if (currentAvatarUpdater != null && currentAvatarUpdater.isUploadingImage()) { return; } - currentAvatarUpdater = new ImageUpdater(true); + TLRPC.User user = accountInstance.getUserConfig().getCurrentUser(); + + currentAvatarUpdater = new ImageUpdater(true, ImageUpdater.FOR_TYPE_USER, true); currentAvatarUpdater.setOpenWithFrontfaceCamera(true); currentAvatarUpdater.setForceDarkTheme(true); currentAvatarUpdater.setSearchAvailable(true, true); @@ -7120,7 +7127,7 @@ private void processSelectedOption(TLRPC.TL_groupCallParticipant participant, lo currentAvatarUpdater.parentFragment = parentActivity.getActionBarLayout().getLastFragment(); currentAvatarUpdater.setDelegate(avatarUpdaterDelegate = new AvatarUpdaterDelegate(peerId)); - TLRPC.User user = accountInstance.getUserConfig().getCurrentUser(); + currentAvatarUpdater.openMenu(user.photo != null && user.photo.photo_big != null && !(user.photo instanceof TLRPC.TL_userProfilePhotoEmpty), () -> accountInstance.getMessagesController().deleteUserPhoto(null), dialog -> { }, 0); @@ -7451,6 +7458,10 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { TLRPC.User currentUser = accountInstance.getMessagesController().getUser(peerId); imageLocation = ImageLocation.getForUserOrChat(currentUser, ImageLocation.TYPE_BIG); thumbLocation = ImageLocation.getForUserOrChat(currentUser, ImageLocation.TYPE_SMALL); + final TLRPC.UserFull userFull = MessagesController.getInstance(currentAccount).getUserFull(peerId); + if (userFull == null) { + MessagesController.getInstance(currentAccount).loadUserInfo(currentUser, false, 0); + } } else { TLRPC.Chat currentChat = accountInstance.getMessagesController().getChat(-peerId); imageLocation = ImageLocation.getForUserOrChat(currentChat, ImageLocation.TYPE_BIG); @@ -7465,7 +7476,7 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { avatarsViewPager.setHasActiveVideo(hasAttachedRenderer); avatarsViewPager.setData(peerId, true); avatarsViewPager.setCreateThumbFromParent(true); - avatarsViewPager.initIfEmpty(imageLocation, thumbLocation, true); + avatarsViewPager.initIfEmpty(null, imageLocation, thumbLocation, true); if (scrimRenderer != null) { scrimRenderer.setShowingAsScrimView(true, true); } @@ -8366,9 +8377,9 @@ private AvatarUpdaterDelegate(long peerId) { } @Override - public void didUploadPhoto(TLRPC.InputFile photo, TLRPC.InputFile video, double videoStartTimestamp, String videoPath, TLRPC.PhotoSize bigSize, TLRPC.PhotoSize smallSize, boolean isVideo) { + public void didUploadPhoto(TLRPC.InputFile photo, TLRPC.InputFile video, double videoStartTimestamp, String videoPath, TLRPC.PhotoSize bigSize, TLRPC.PhotoSize smallSize, boolean isVideo, TLRPC.VideoSize emojiMarkup) { AndroidUtilities.runOnUIThread(() -> { - if (photo != null || video != null) { + if (photo != null || video != null || emojiMarkup != null) { if (peerId > 0) { TLRPC.TL_photos_uploadProfilePhoto req = new TLRPC.TL_photos_uploadProfilePhoto(); if (photo != null) { @@ -8381,6 +8392,10 @@ public void didUploadPhoto(TLRPC.InputFile photo, TLRPC.InputFile video, double req.video_start_ts = videoStartTimestamp; req.flags |= 4; } + if (emojiMarkup != null) { + req.video_emoji_markup = emojiMarkup; + req.flags |= 16; + } accountInstance.getConnectionsManager().sendRequest(req, (response, error) -> AndroidUtilities.runOnUIThread(() -> { if (uploadingImageLocation != null) { @@ -8450,7 +8465,7 @@ public void didUploadPhoto(TLRPC.InputFile photo, TLRPC.InputFile video, double thumb = thumbLocation; } avatarsViewPager.setCreateThumbFromParent(false); - avatarsViewPager.initIfEmpty(imageLocation, thumb, true); + avatarsViewPager.initIfEmpty(null, imageLocation, thumb, true); avatar = null; avatarBig = null; AndroidUtilities.updateVisibleRows(listView); @@ -8462,7 +8477,7 @@ public void didUploadPhoto(TLRPC.InputFile photo, TLRPC.InputFile video, double accountInstance.getUserConfig().saveConfig(true); })); } else { - accountInstance.getMessagesController().changeChatAvatar(-peerId, null, photo, video, videoStartTimestamp, videoPath, smallSize.location, bigSize.location, () -> { + accountInstance.getMessagesController().changeChatAvatar(-peerId, null, photo, video, emojiMarkup, videoStartTimestamp, videoPath, smallSize.location, bigSize.location, () -> { if (uploadingImageLocation != null) { avatarsViewPager.removeUploadingImage(uploadingImageLocation); uploadingImageLocation = null; @@ -8478,7 +8493,7 @@ public void didUploadPhoto(TLRPC.InputFile photo, TLRPC.InputFile video, double thumb = thumbLocation; } avatarsViewPager.setCreateThumbFromParent(false); - avatarsViewPager.initIfEmpty(imageLocation, thumb, true); + avatarsViewPager.initIfEmpty(null, imageLocation, thumb, true); avatar = null; avatarBig = null; AndroidUtilities.updateVisibleRows(listView); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/GroupCreateActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/GroupCreateActivity.java index a0bc3fc5eb..ec4949ba94 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/GroupCreateActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/GroupCreateActivity.java @@ -283,8 +283,8 @@ public void addSpan(final GroupCreateSpan span) { allSpans.add(span); selectedContacts.put(span.getUid(), span); - editText.setHintVisible(false); - if (currentAnimation != null) { + editText.setHintVisible(false, TextUtils.isEmpty(editText.getText())); + if (currentAnimation != null && currentAnimation.isRunning()) { currentAnimation.setupEndValues(); currentAnimation.cancel(); } @@ -329,7 +329,7 @@ public void onAnimationEnd(Animator animator) { animationStarted = false; editText.setAllowDrawCursor(true); if (allSpans.isEmpty()) { - editText.setHintVisible(true); + editText.setHintVisible(true, true); } } }); @@ -406,7 +406,11 @@ public View createView(Context context) { allSpans.clear(); selectedContacts.clear(); currentDeletingSpan = null; - doneButtonVisible = chatType == ChatObject.CHAT_TYPE_CHANNEL; + if (chatType == ChatObject.CHAT_TYPE_CHANNEL) { + doneButtonVisible = true; + } else { + doneButtonVisible = !addToGroup; + } actionBar.setBackButtonImage(R.drawable.ic_ab_back); actionBar.setAllowOverlayTitle(true); @@ -729,9 +733,9 @@ public void afterTextChanged(Editable editable) { TLRPC.Chat chat = getMessagesController().getChat(channelId); AlertDialog.Builder builder = new AlertDialog.Builder(getParentActivity()); if (ChatObject.canAddAdmins(chat)) { - builder.setTitle(LocaleController.getString("AppName", R.string.AppName)); + builder.setTitle(LocaleController.getString("AddBotAdminAlert", R.string.AddBotAdminAlert)); builder.setMessage(LocaleController.getString("AddBotAsAdmin", R.string.AddBotAsAdmin)); - builder.setPositiveButton(LocaleController.getString("MakeAdmin", R.string.MakeAdmin), (dialogInterface, i) -> { + builder.setPositiveButton(LocaleController.getString("AddAsAdmin", R.string.AddAsAdmin), (dialogInterface, i) -> { delegate2.needAddBot(user); if (editText.length() > 0) { editText.setText(null); @@ -812,6 +816,12 @@ public void getOutline(View view, Outline outline) { } frameLayout.addView(floatingButton); floatingButton.setOnClickListener(v -> onDonePressed(true)); + if (!doneButtonVisible) { + floatingButton.setVisibility(View.INVISIBLE); + floatingButton.setScaleX(0.0f); + floatingButton.setScaleY(0.0f); + floatingButton.setAlpha(0.0f); + } floatingButton.setContentDescription(LocaleController.getString("Next", R.string.Next)); updateHint(); @@ -959,6 +969,9 @@ private void onAddToGroupDone(int count) { } private boolean onDonePressed(boolean alert) { + if (selectedContacts.size() == 0 && (chatType != ChatObject.CHAT_TYPE_CHANNEL && addToGroup)) { + return false; + } if (alert && addToGroup) { if (getParentActivity() == null) { return false; @@ -1006,7 +1019,6 @@ private boolean onDonePressed(boolean alert) { linearLayout.addView(cells[0], LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT)); cells[0].setOnClickListener(v -> cells[0].setChecked(!cells[0].isChecked(), true)); - builder.setCustomViewOffset(12); builder.setView(linearLayout); } builder.setPositiveButton(LocaleController.getString("Add", R.string.Add), (dialogInterface, i) -> onAddToGroupDone(cells[0] != null && cells[0].isChecked() ? 100 : 0)); @@ -1028,6 +1040,9 @@ private boolean onDonePressed(boolean alert) { args2.putBoolean("just_created_chat", true); presentFragment(new ChatActivity(args2), true); } else { + if (!doneButtonVisible) { + return false; + } if (addToGroup) { onAddToGroupDone(0); } else { @@ -1082,6 +1097,38 @@ private void updateHint() { } } } + if (chatType != ChatObject.CHAT_TYPE_CHANNEL && addToGroup) { + if (doneButtonVisible && allSpans.isEmpty()) { + if (currentDoneButtonAnimation != null) { + currentDoneButtonAnimation.cancel(); + } + currentDoneButtonAnimation = new AnimatorSet(); + currentDoneButtonAnimation.playTogether(ObjectAnimator.ofFloat(floatingButton, View.SCALE_X, 0.0f), + ObjectAnimator.ofFloat(floatingButton, View.SCALE_Y, 0.0f), + ObjectAnimator.ofFloat(floatingButton, View.ALPHA, 0.0f)); + currentDoneButtonAnimation.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + floatingButton.setVisibility(View.INVISIBLE); + } + }); + currentDoneButtonAnimation.setDuration(180); + currentDoneButtonAnimation.start(); + doneButtonVisible = false; + } else if (!doneButtonVisible && !allSpans.isEmpty()) { + if (currentDoneButtonAnimation != null) { + currentDoneButtonAnimation.cancel(); + } + currentDoneButtonAnimation = new AnimatorSet(); + floatingButton.setVisibility(View.VISIBLE); + currentDoneButtonAnimation.playTogether(ObjectAnimator.ofFloat(floatingButton, View.SCALE_X, 1.0f), + ObjectAnimator.ofFloat(floatingButton, View.SCALE_Y, 1.0f), + ObjectAnimator.ofFloat(floatingButton, View.ALPHA, 1.0f)); + currentDoneButtonAnimation.setDuration(180); + currentDoneButtonAnimation.start(); + doneButtonVisible = true; + } + } } public void setDelegate(GroupCreateActivityDelegate groupCreateActivityDelegate) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/GroupCreateFinalActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/GroupCreateFinalActivity.java index 945d295f7a..8fa47d32fd 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/GroupCreateFinalActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/GroupCreateFinalActivity.java @@ -67,6 +67,7 @@ import org.telegram.ui.Components.CombinedDrawable; import org.telegram.ui.Components.ContextProgressView; import org.telegram.ui.Components.EditTextEmoji; +import org.telegram.ui.Components.FillLastLinearLayoutManager; import org.telegram.ui.Components.ImageUpdater; import org.telegram.ui.Components.LayoutHelper; import org.telegram.ui.Components.ListView.AdapterWithDiffUtils; @@ -115,6 +116,7 @@ public class GroupCreateFinalActivity extends BaseFragment implements Notificati private TLRPC.FileLocation avatarBig; private TLRPC.InputFile inputPhoto; private TLRPC.InputFile inputVideo; + private TLRPC.VideoSize inputEmojiMarkup; private String inputVideoPath; private double videoTimestamp; private ArrayList selectedContacts; @@ -123,6 +125,7 @@ public class GroupCreateFinalActivity extends BaseFragment implements Notificati private ImageUpdater imageUpdater; private String nameToSet; private int chatType; + private boolean canToggleTopics; private RLottieDrawable cameraDrawable; @@ -135,6 +138,7 @@ public class GroupCreateFinalActivity extends BaseFragment implements Notificati private int ttlPeriod; private final static int done_button = 1; + private FillLastLinearLayoutManager linearLayoutManager; public interface GroupCreateFinalActivityDelegate { void didStartChatCreation(); @@ -152,6 +156,7 @@ public GroupCreateFinalActivity(Bundle args) { currentGroupCreateLocation = args.getParcelable("location"); forImport = args.getBoolean("forImport", false); nameToSet = args.getString("title", null); + canToggleTopics = args.getBoolean("canToggleTopics", true); } @Override @@ -159,7 +164,7 @@ public boolean onFragmentCreate() { NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.updateInterfaces); NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.chatDidCreated); NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.chatDidFailCreate); - imageUpdater = new ImageUpdater(true); + imageUpdater = new ImageUpdater(true, ImageUpdater.FOR_TYPE_GROUP, true); imageUpdater.parentFragment = this; imageUpdater.setDelegate(this); long[] contacts = getArguments().getLongArray("result"); @@ -452,7 +457,7 @@ public void invalidate(int l, int t, int r, int b) { super.invalidate(l, t, r, b); } }; - avatarImage.setRoundRadius(AndroidUtilities.dp(32)); + avatarImage.setRoundRadius(AndroidUtilities.dp(chatType == ChatObject.CHAT_TYPE_FORUM ? 16 : 32)); avatarDrawable.setInfo(5, null, null); avatarImage.setImageDrawable(avatarDrawable); avatarImage.setContentDescription(LocaleController.getString("ChoosePhoto", R.string.ChoosePhoto)); @@ -464,7 +469,7 @@ public void invalidate(int l, int t, int r, int b) { avatarOverlay = new View(context) { @Override protected void onDraw(Canvas canvas) { - if (avatarImage != null && avatarProgressView.getVisibility() == VISIBLE) { + if (avatarImage != null && avatarProgressView.getVisibility() == VISIBLE && avatarImage.getImageReceiver().hasNotThumb()) { paint.setAlpha((int) (0x55 * avatarImage.getImageReceiver().getCurrentAlpha() * avatarProgressView.getAlpha())); canvas.drawCircle(getMeasuredWidth() / 2.0f, getMeasuredHeight() / 2.0f, getMeasuredWidth() / 2.0f, paint); } @@ -478,6 +483,7 @@ protected void onDraw(Canvas canvas) { inputPhoto = null; inputVideo = null; inputVideoPath = null; + inputEmojiMarkup = null; videoTimestamp = 0; showAvatarProgress(false, true); avatarImage.setImage(null, null, avatarDrawable, null); @@ -533,7 +539,7 @@ public void setAlpha(float alpha) { showAvatarProgress(false, false); editText = new EditTextEmoji(context, sizeNotifierFrameLayout, this, EditTextEmoji.STYLE_FRAGMENT, false); - editText.setHint(chatType == ChatObject.CHAT_TYPE_CHAT || chatType == ChatObject.CHAT_TYPE_MEGAGROUP ? LocaleController.getString("EnterGroupNamePlaceholder", R.string.EnterGroupNamePlaceholder) : LocaleController.getString("EnterListName", R.string.EnterListName)); + editText.setHint(chatType == ChatObject.CHAT_TYPE_CHAT || chatType == ChatObject.CHAT_TYPE_MEGAGROUP || chatType == ChatObject.CHAT_TYPE_FORUM ? LocaleController.getString("EnterGroupNamePlaceholder", R.string.EnterGroupNamePlaceholder) : LocaleController.getString("EnterListName", R.string.EnterListName)); if (nameToSet != null) { editText.setText(nameToSet); nameToSet = null; @@ -543,9 +549,10 @@ public void setAlpha(float alpha) { editText.setFilters(inputFilters); editTextContainer.addView(editText, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL, LocaleController.isRTL ? 5 : 96, 0, LocaleController.isRTL ? 96 : 5, 0)); - LinearLayoutManager linearLayoutManager = new LinearLayoutManager(context, LinearLayoutManager.VERTICAL, false); listView = new RecyclerListView(context); + linearLayoutManager = new FillLastLinearLayoutManager(context, LinearLayoutManager.VERTICAL, listView); + listView.setAdapter(adapter = new GroupCreateAdapter(context)); listView.setLayoutManager(linearLayoutManager); listView.setVerticalScrollBarEnabled(false); @@ -573,7 +580,7 @@ public void onScrollStateChanged(RecyclerView recyclerView, int newState) { }); presentFragment(fragment); } - if (view instanceof TextCell) { + if (view instanceof TextCell && chatType != ChatObject.CHAT_TYPE_FORUM) { if (popupWindow != null && popupWindow.isShowing()) { return; } @@ -689,11 +696,12 @@ public void didStartUpload(boolean isVideo) { } @Override - public void didUploadPhoto(final TLRPC.InputFile photo, final TLRPC.InputFile video, double videoStartTimestamp, String videoPath, final TLRPC.PhotoSize bigSize, final TLRPC.PhotoSize smallSize, boolean isVideo) { + public void didUploadPhoto(final TLRPC.InputFile photo, final TLRPC.InputFile video, double videoStartTimestamp, String videoPath, final TLRPC.PhotoSize bigSize, final TLRPC.PhotoSize smallSize, boolean isVideo, TLRPC.VideoSize emojiMarkup) { AndroidUtilities.runOnUIThread(() -> { - if (photo != null || video != null) { + if (photo != null || video != null || emojiMarkup != null) { inputPhoto = photo; inputVideo = video; + inputEmojiMarkup = emojiMarkup; inputVideoPath = videoPath; videoTimestamp = videoStartTimestamp; if (createAfterUpload) { @@ -857,8 +865,8 @@ public void didReceivedNotification(int id, int account, Object... args) { args2.putBoolean("just_created_chat", true); presentFragment(new ChatActivity(args2), true); } - if (inputPhoto != null || inputVideo != null) { - getMessagesController().changeChatAvatar(chatId, null, inputPhoto, inputVideo, videoTimestamp, inputVideoPath, avatar, avatarBig, null); + if (inputPhoto != null || inputVideo != null || inputEmojiMarkup != null) { + getMessagesController().changeChatAvatar(chatId, null, inputPhoto, inputVideo, inputEmojiMarkup, videoTimestamp, inputVideoPath, avatar, avatarBig, null); } } } @@ -926,6 +934,8 @@ public class GroupCreateAdapter extends RecyclerListView.SelectionAdapter { private final static int VIEW_TYPE_TEXT_SETTINGS = 3; private final static int VIEW_TYPE_AUTO_DELETE = 4; private final static int VIEW_TYPE_TEXT_INFO_CELL = 5; + private final static int VIEW_TYPE_TOPICS = 6; + private final static int VIEW_TYPE_LAST_EMPTY_VIEW = 7; ArrayList items = new ArrayList<>(); @@ -937,18 +947,26 @@ public GroupCreateAdapter(Context ctx) { public void notifyDataSetChanged() { items.clear(); items.add(new InnerItem(VIEW_TYPE_SHADOW_SECTION_CELL)); - items.add(new InnerItem(VIEW_TYPE_AUTO_DELETE)); - items.add(new InnerItem(VIEW_TYPE_TEXT_INFO_CELL, LocaleController.getString("GroupCreateAutodeleteDescription", R.string.GroupCreateAutodeleteDescription))); + if (chatType == ChatObject.CHAT_TYPE_FORUM) { + items.add(new InnerItem(VIEW_TYPE_TOPICS)); + items.add(new InnerItem(VIEW_TYPE_TEXT_INFO_CELL, LocaleController.getString("ForumToggleDescription", R.string.ForumToggleDescription))); + } else { + items.add(new InnerItem(VIEW_TYPE_AUTO_DELETE)); + items.add(new InnerItem(VIEW_TYPE_TEXT_INFO_CELL, LocaleController.getString("GroupCreateAutodeleteDescription", R.string.GroupCreateAutodeleteDescription))); + } if (currentGroupCreateAddress != null) { items.add(new InnerItem(VIEW_TYPE_HEADER_CELL)); items.add(new InnerItem(VIEW_TYPE_TEXT_SETTINGS)); items.add(new InnerItem(VIEW_TYPE_SHADOW_SECTION_CELL)); } - items.add(new InnerItem(VIEW_TYPE_HEADER_CELL)); - usersStartRow = items.size(); - for (int i = 0; i < selectedContacts.size(); i++) { - items.add(new InnerItem(VIEW_TYPE_USER_CELL)); + if (selectedContacts.size() > 0) { + items.add(new InnerItem(VIEW_TYPE_HEADER_CELL)); + usersStartRow = items.size(); + for (int i = 0; i < selectedContacts.size(); i++) { + items.add(new InnerItem(VIEW_TYPE_USER_CELL)); + } } + items.add(new InnerItem(VIEW_TYPE_LAST_EMPTY_VIEW)); super.notifyDataSetChanged(); } @@ -960,7 +978,7 @@ public int getItemCount() { @Override public boolean isEnabled(RecyclerView.ViewHolder holder) { - return holder.getItemViewType() == VIEW_TYPE_TEXT_SETTINGS || holder.getItemViewType() == VIEW_TYPE_AUTO_DELETE; + return holder.getItemViewType() == VIEW_TYPE_TEXT_SETTINGS || holder.getItemViewType() == VIEW_TYPE_AUTO_DELETE || holder.getItemViewType() == VIEW_TYPE_TOPICS && canToggleTopics; } @Override @@ -988,15 +1006,24 @@ public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType break; case VIEW_TYPE_TEXT_INFO_CELL: view = new TextInfoPrivacyCell(context); - Drawable drawable = Theme.getThemedDrawable(context, R.drawable.greydivider, Theme.key_windowBackgroundGrayShadow); + Drawable drawable = Theme.getThemedDrawable(context, selectedContacts.size() == 0 ? R.drawable.greydivider_bottom : R.drawable.greydivider, Theme.key_windowBackgroundGrayShadow); CombinedDrawable combinedDrawable = new CombinedDrawable(new ColorDrawable(Theme.getColor(Theme.key_windowBackgroundGray)), drawable); combinedDrawable.setFullsize(true); view.setBackgroundDrawable(combinedDrawable); break; + case VIEW_TYPE_TOPICS: + view = new TextCell(context, 23, false, true, getResourceProvider()); + break; case 3: default: view = new TextSettingsCell(context); break; + case VIEW_TYPE_LAST_EMPTY_VIEW: + view = new View(context); + if (selectedContacts.isEmpty()) { + view.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundGray)); + } + break; } return new RecyclerListView.Holder(view); } @@ -1036,6 +1063,12 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { textCell.setTextAndValueAndIcon(LocaleController.getString("AutoDeleteMessages", R.string.AutoDeleteMessages), value, fragmentBeginToShow, R.drawable.msg_autodelete, false); break; } + case VIEW_TYPE_TOPICS: { + TextCell textCell = (TextCell) holder.itemView; + textCell.setTextAndCheckAndIcon(LocaleController.getString("ChannelTopics", R.string.ChannelTopics), true, R.drawable.msg_topics, false); + textCell.getCheckBox().setAlpha(.75f); + break; + } case VIEW_TYPE_TEXT_INFO_CELL: TextInfoPrivacyCell textInfoPrivacyCell = (TextInfoPrivacyCell) holder.itemView; textInfoPrivacyCell.setText(items.get(position).string); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/InviteContactsActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/InviteContactsActivity.java index 47c789f12c..6357380b6a 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/InviteContactsActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/InviteContactsActivity.java @@ -17,12 +17,9 @@ import android.graphics.Canvas; import android.graphics.Rect; import android.net.Uri; -import androidx.annotation.Keep; -import androidx.recyclerview.widget.LinearLayoutManager; -import androidx.recyclerview.widget.RecyclerView; - import android.text.Editable; import android.text.InputType; +import android.text.TextUtils; import android.text.TextWatcher; import android.util.TypedValue; import android.view.ActionMode; @@ -39,6 +36,10 @@ import android.widget.ScrollView; import android.widget.TextView; +import androidx.annotation.Keep; +import androidx.recyclerview.widget.LinearLayoutManager; +import androidx.recyclerview.widget.RecyclerView; + import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ContactsController; import org.telegram.messenger.FileLog; @@ -210,7 +211,7 @@ public void addSpan(final GroupCreateSpan span) { allSpans.add(span); selectedContacts.put(span.getKey(), span); - editText.setHintVisible(false); + editText.setHintVisible(false, TextUtils.isEmpty(editText.getText())); if (currentAnimation != null) { currentAnimation.setupEndValues(); currentAnimation.cancel(); @@ -241,7 +242,7 @@ public void removeSpan(final GroupCreateSpan span) { allSpans.remove(span); span.setOnClickListener(null); - if (currentAnimation != null) { + if (currentAnimation != null && currentAnimation.isRunning()) { currentAnimation.setupEndValues(); currentAnimation.cancel(); } @@ -256,7 +257,7 @@ public void onAnimationEnd(Animator animator) { animationStarted = false; editText.setAllowDrawCursor(true); if (allSpans.isEmpty()) { - editText.setHintVisible(true); + editText.setHintVisible(true, true); } } }); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/KeepMediaPopupView.java b/TMessagesProj/src/main/java/org/telegram/ui/KeepMediaPopupView.java index 2abe5c21d5..d4a6395f35 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/KeepMediaPopupView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/KeepMediaPopupView.java @@ -90,15 +90,15 @@ public KeepMediaPopupView(BaseFragment baseFragment, Context context) { args.putBoolean("onlySelect", true); args.putBoolean("checkCanWrite", false); if (currentType == CacheControlActivity.KEEP_MEDIA_TYPE_GROUP) { - args.putInt("dialogsType", 6); + args.putInt("dialogsType", DialogsActivity.DIALOGS_TYPE_GROUPS_ONLY); } else if (currentType == CacheControlActivity.KEEP_MEDIA_TYPE_CHANNEL) { - args.putInt("dialogsType", 5); + args.putInt("dialogsType", DialogsActivity.DIALOGS_TYPE_CHANNELS_ONLY); } else { - args.putInt("dialogsType", 4); + args.putInt("dialogsType", DialogsActivity.DIALOGS_TYPE_USERS_ONLY); } args.putBoolean("allowGlobalSearch", false); DialogsActivity activity = new DialogsActivity(args); - activity.setDelegate((fragment, dids, message, param) -> { + activity.setDelegate((fragment, dids, message, param, topicsFragment) -> { CacheByChatsController.KeepMediaException newException = null; for (int i = 0; i < dids.size(); i++) { exceptions.add(newException = new CacheByChatsController.KeepMediaException(dids.get(i).dialogId, CacheByChatsController.KEEP_MEDIA_ONE_DAY)); @@ -120,6 +120,7 @@ public void onTransitionAnimationEnd(boolean isOpen, boolean backward) { cacheChatsExceptionsFragment.setExceptions(exceptions); parentFragment.presentFragment(cacheChatsExceptionsFragment); AndroidUtilities.runOnUIThread(() -> cacheChatsExceptionsFragment.showPopupFor(finalNewException), 150); + return true; }); baseFragment.presentFragment(activity); } else { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/LNavigation/LNavigation.java b/TMessagesProj/src/main/java/org/telegram/ui/LNavigation/LNavigation.java deleted file mode 100644 index 1502fa9c94..0000000000 --- a/TMessagesProj/src/main/java/org/telegram/ui/LNavigation/LNavigation.java +++ /dev/null @@ -1,2369 +0,0 @@ -package org.telegram.ui.LNavigation; - -import android.animation.Animator; -import android.animation.AnimatorListenerAdapter; -import android.animation.AnimatorSet; -import android.animation.ValueAnimator; -import android.annotation.SuppressLint; -import android.app.Activity; -import android.content.Context; -import android.content.Intent; -import android.graphics.Bitmap; -import android.graphics.Canvas; -import android.graphics.Color; -import android.graphics.Paint; -import android.graphics.Path; -import android.graphics.Rect; -import android.graphics.drawable.ColorDrawable; -import android.graphics.drawable.Drawable; -import android.os.Build; -import android.util.AttributeSet; -import android.view.GestureDetector; -import android.view.Gravity; -import android.view.HapticFeedbackConstants; -import android.view.KeyEvent; -import android.view.Menu; -import android.view.MotionEvent; -import android.view.View; -import android.view.ViewConfiguration; -import android.view.ViewGroup; -import android.widget.FrameLayout; -import android.widget.LinearLayout; -import android.widget.TextView; - -import androidx.annotation.NonNull; -import androidx.annotation.Nullable; -import androidx.core.graphics.ColorUtils; -import androidx.core.math.MathUtils; -import androidx.core.view.GestureDetectorCompat; -import androidx.dynamicanimation.animation.FloatValueHolder; -import androidx.dynamicanimation.animation.SpringAnimation; -import androidx.dynamicanimation.animation.SpringForce; -import androidx.viewpager.widget.ViewPager; - -import org.telegram.messenger.AndroidUtilities; -import org.telegram.messenger.ImageLoader; -import org.telegram.messenger.LocaleController; -import org.telegram.messenger.MessagesController; -import org.telegram.messenger.R; -import org.telegram.messenger.Utilities; -import org.telegram.ui.ActionBar.ActionBar; -import org.telegram.ui.ActionBar.ActionBarMenuSubItem; -import org.telegram.ui.ActionBar.ActionBarPopupWindow; -import org.telegram.ui.ActionBar.AlertDialog; -import org.telegram.ui.ActionBar.BaseFragment; -import org.telegram.ui.ActionBar.BottomSheet; -import org.telegram.ui.ActionBar.DrawerLayoutContainer; -import org.telegram.ui.ActionBar.INavigationLayout; -import org.telegram.ui.ActionBar.MenuDrawable; -import org.telegram.ui.ActionBar.Theme; -import org.telegram.ui.ActionBar.ThemeDescription; -import org.telegram.ui.Cells.CheckBoxCell; -import org.telegram.ui.Components.BackButtonMenu; -import org.telegram.ui.Components.FloatingDebug.FloatingDebugController; -import org.telegram.ui.Components.FloatingDebug.FloatingDebugProvider; -import org.telegram.ui.Components.GroupCallPip; -import org.telegram.ui.Components.LayoutHelper; -import org.telegram.ui.Components.SeekBarView; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Locale; - -public class LNavigation extends FrameLayout implements INavigationLayout, FloatingDebugProvider { - private final static boolean ALLOW_OPEN_STIFFNESS_CONTROL = false; - private final static boolean USE_ACTIONBAR_CROSSFADE = false; - private static float SPRING_STIFFNESS = 1000f; - private static float SPRING_DAMPING_RATIO = 1f; - private final static float SPRING_STIFFNESS_PREVIEW = 650f; - private final static float SPRING_STIFFNESS_PREVIEW_OUT = 800f; - private final static float SPRING_STIFFNESS_PREVIEW_EXPAND = 750f; - private final static float SPRING_MULTIPLIER = 1000f; - private List pulledDialogs = new ArrayList<>(); - - /** - * Temp rect to calculate if it's ignored view - */ - private Rect ignoreRect = new Rect(); - - /** - * Temp path for clipping - */ - private Path path = new Path(); - - /** - * Darker paint - */ - private Paint dimmPaint = new Paint(Paint.ANTI_ALIAS_FLAG); - - /** - * Flag if we should remove extra height for action bar - */ - private boolean removeActionBarExtraHeight; - - /** - * Current fragment stack - */ - private List fragmentStack = new ArrayList<>(); - - /** - * Unmodifiable fragment stack for {@link LNavigation#getFragmentStack()} - */ - private List unmodifiableFragmentStack = Collections.unmodifiableList(fragmentStack); - - /** - * Delegate for this view - */ - private INavigationLayoutDelegate delegate; - - /** - * A listener when fragment stack is being changed - */ - private Runnable onFragmentStackChangedListener; - - /** - * Drawer layout container (For the swipe-back-to-drawer feature) - */ - private DrawerLayoutContainer drawerLayoutContainer; - - /** - * Currently running spring animation - */ - private SpringAnimation currentSpringAnimation; - - /** - * Overlay layout for containers like shared ActionBar - */ - private FrameLayout overlayLayout; - - /** - * Current swipe progress - */ - private float swipeProgress; - - /** - * Start scroll offset - */ - private float startScroll; - - /** - * Header shadow drawable - */ - private Drawable headerShadowDrawable; - - /** - * Front view shadow drawable - */ - private Drawable layerShadowDrawable; - - /** - * Gesture detector for scroll - */ - private GestureDetectorCompat gestureDetector; - - /** - * If there's currently scroll in progress - */ - private boolean isSwipeInProgress; - - /** - * If swipe back should be disallowed - */ - private boolean isSwipeDisallowed; - - /** - * If set, should be canceled if trying to open another fragment - */ - private Runnable delayedPresentAnimation; - - /** - * If navigation is used in bubble mode - */ - private boolean isInBubbleMode; - - /** - * If device is currently showing action mode over our ActionBar - */ - private boolean isInActionMode; - - /** - * If menu buttons in preview should be highlighted - */ - private boolean highlightActionButtons = false; - - /** - * Custom animation in progress - */ - private AnimatorSet customAnimation; - - /** - * Preview fragment's menu - */ - private ActionBarPopupWindow.ActionBarPopupWindowLayout previewMenu; - - /** - * A blurred snapshot of background fragment - */ - private Bitmap blurredBackFragmentForPreview; - - /** - * Snapshot of a small preview fragment - */ - private Bitmap previewFragmentSnapshot; - - /** - * Bounds of small preview fragment - */ - private Rect previewFragmentRect = new Rect(); - - /** - * Preview expand progress - */ - private float previewExpandProgress; - - /** - * Paint for blurred snapshot - */ - private Paint blurPaint = new Paint(Paint.DITHER_FLAG | Paint.ANTI_ALIAS_FLAG); - - /** - * Back button drawable - */ - private MenuDrawable menuDrawable = new MenuDrawable(MenuDrawable.TYPE_DEFAULT); - - /** - * View that captured current touch input - */ - private View touchCapturedView; - - /** - * Flag if layout was portrait - */ - private boolean wasPortrait; - - /** - * Callback after preview fragment is opened - */ - private Runnable previewOpenCallback; - - /** - * Flag if navigation view should disappear when last fragment closes - */ - private boolean useAlphaAnimations; - - /** - * Background view for tablets - */ - private View backgroundView; - - /** - * Flag that indicates that user can press button of the preview menu - */ - private boolean allowToPressByHover; - - /** - * Flag if menu hover should be allowed (Only first time opening preview) - */ - private boolean isFirstHoverAllowed; - - // TODO: Split theme logic to another component - private ValueAnimator themeAnimator; - private StartColorsProvider startColorsProvider = new StartColorsProvider(); - private Theme.MessageDrawable messageDrawableOutStart; - private Theme.MessageDrawable messageDrawableOutMediaStart; - private ThemeAnimationSettings.onAnimationProgress animationProgressListener; - private ArrayList themeAnimatorDelegate = new ArrayList<>(); - private ArrayList presentingFragmentDescriptions; - - private float themeAnimationValue; - private ArrayList> themeAnimatorDescriptions = new ArrayList<>(); - private ArrayList animateStartColors = new ArrayList<>(); - private ArrayList animateEndColors = new ArrayList<>(); - - private int fromBackgroundColor; - - private LinearLayout stiffnessControl; - private CheckBoxCell openChatCheckbox; - - private String titleOverlayTitle; - private int titleOverlayTitleId; - private Runnable titleOverlayAction; - - public LNavigation(@NonNull Context context) { - this(context, null); - } - - public LNavigation(@NonNull Context context, @Nullable AttributeSet attrs) { - super(context, attrs); - - overlayLayout = new FrameLayout(context); - addView(overlayLayout); - - headerShadowDrawable = getResources().getDrawable(R.drawable.header_shadow).mutate(); - layerShadowDrawable = getResources().getDrawable(R.drawable.layer_shadow).mutate(); - - dimmPaint.setColor(0x7a000000); - setWillNotDraw(false); - - menuDrawable.setRoundCap(); - - int touchSlop = ViewConfiguration.get(context).getScaledTouchSlop(); - gestureDetector = new GestureDetectorCompat(context, new GestureDetector.SimpleOnGestureListener() { - @Override - public boolean onDown(MotionEvent e) { - return true; - } - - @Override - public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY) { - if (highlightActionButtons && !allowToPressByHover && isFirstHoverAllowed && isInPreviewMode() && (Math.abs(distanceX) >= touchSlop || Math.abs(distanceY) >= touchSlop) && !isSwipeInProgress && previewMenu != null) { - allowToPressByHover = true; - } - - if (allowToPressByHover && previewMenu != null && (previewMenu.getSwipeBack() == null || previewMenu.getSwipeBack().isForegroundOpen())) { - for (int i = 0; i < previewMenu.getItemsCount(); ++i) { - ActionBarMenuSubItem button = (ActionBarMenuSubItem) previewMenu.getItemAt(i); - if (button != null) { - Drawable ripple = button.getBackground(); - button.getGlobalVisibleRect(AndroidUtilities.rectTmp2); - boolean shouldBeEnabled = AndroidUtilities.rectTmp2.contains((int) e2.getX(), (int) e2.getY()), enabled = ripple.getState().length == 2; - if (shouldBeEnabled != enabled) { - ripple.setState(shouldBeEnabled ? new int[]{android.R.attr.state_pressed, android.R.attr.state_enabled} : new int[]{}); - if (shouldBeEnabled && Build.VERSION.SDK_INT >= Build.VERSION_CODES.O_MR1) { - try { - button.performHapticFeedback(HapticFeedbackConstants.TEXT_HANDLE_MOVE, HapticFeedbackConstants.FLAG_IGNORE_VIEW_SETTING); - } catch (Exception ignore) {} - } - } - } - } - } - - if (!isSwipeInProgress && !isSwipeDisallowed) { - if (Math.abs(distanceX) >= Math.abs(distanceY) * 1.5f && distanceX <= -touchSlop && !isIgnoredView(getForegroundView(), e2, ignoreRect) && - getLastFragment() != null && getLastFragment().canBeginSlide() && getLastFragment().isSwipeBackEnabled(e2) && fragmentStack.size() >= 2 && !isInActionMode && - !isInPreviewMode()) { - isSwipeInProgress = true; - - startScroll = swipeProgress - MathUtils.clamp((e2.getX() - e1.getX()) / getWidth(), 0, 1); - - if (getParentActivity().getCurrentFocus() != null) { - AndroidUtilities.hideKeyboard(getParentActivity().getCurrentFocus()); - } - - if (getBackgroundView() != null) { - getBackgroundView().setVisibility(VISIBLE); - } - getLastFragment().prepareFragmentToSlide(true, true); - getLastFragment().onBeginSlide(); - BaseFragment bgFragment = getBackgroundFragment(); - if (bgFragment != null) { - bgFragment.setPaused(false); - bgFragment.prepareFragmentToSlide(false, true); - bgFragment.onBeginSlide(); - } - - MotionEvent e = MotionEvent.obtain(0, 0, MotionEvent.ACTION_CANCEL, 0, 0, 0); - for (int i = 0; i < getChildCount(); i++) { - getChildAt(i).dispatchTouchEvent(e); - } - e.recycle(); - - invalidateActionBars(); - } else { - isSwipeDisallowed = true; - } - } - - if (isSwipeInProgress) { - swipeProgress = MathUtils.clamp(startScroll + (e2.getX() - e1.getX()) / getWidth(), 0, 1); - invalidateTranslation(); - } - return isSwipeInProgress; - } - - @Override - public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY) { - if (isSwipeInProgress) { - if (velocityX >= 800) { - closeLastFragment(true, false, velocityX / 15f); - clearTouchFlags(); - return true; - } - } - return false; - } - }); - gestureDetector.setIsLongpressEnabled(false); - - stiffnessControl = new LinearLayout(context); - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { - stiffnessControl.setElevation(AndroidUtilities.dp(12)); - } - stiffnessControl.setOrientation(LinearLayout.VERTICAL); - stiffnessControl.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); - - TextView titleView = new TextView(context); - titleView.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteBlackText)); - titleView.setGravity(Gravity.CENTER); - titleView.setText(String.format(Locale.ROOT, "Stiffness: %f", SPRING_STIFFNESS)); - stiffnessControl.addView(titleView, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, 36)); - - SeekBarView seekBarView = new SeekBarView(context); - seekBarView.setReportChanges(true); - seekBarView.setDelegate(new SeekBarView.SeekBarViewDelegate() { - @Override - public void onSeekBarDrag(boolean stop, float progress) { - titleView.setText(String.format(Locale.ROOT, "Stiffness: %f", 500f + progress * 1000f)); - if (stop) { - SPRING_STIFFNESS = 500f + progress * 1000f; - } - } - - @Override - public void onSeekBarPressed(boolean pressed) { - - } - }); - seekBarView.setProgress((SPRING_STIFFNESS - 500f) / 1000f); - stiffnessControl.addView(seekBarView, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, 38)); - - TextView dampingTitle = new TextView(context); - dampingTitle.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteBlackText)); - dampingTitle.setGravity(Gravity.CENTER); - dampingTitle.setText(String.format(Locale.ROOT, "Damping ratio: %f", SPRING_DAMPING_RATIO)); - stiffnessControl.addView(dampingTitle, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, 36)); - - seekBarView = new SeekBarView(context); - seekBarView.setReportChanges(true); - seekBarView.setDelegate(new SeekBarView.SeekBarViewDelegate() { - @Override - public void onSeekBarDrag(boolean stop, float progress) { - dampingTitle.setText(String.format(Locale.ROOT, "Damping ratio: %f", 0.2f + progress * 0.8f)); - if (stop) { - SPRING_DAMPING_RATIO = 0.2f + progress * 0.8f; - } - } - - @Override - public void onSeekBarPressed(boolean pressed) { - - } - }); - seekBarView.setProgress((SPRING_DAMPING_RATIO - 0.2f) / 0.8f); - stiffnessControl.addView(seekBarView, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, 38)); - - openChatCheckbox = new CheckBoxCell(context, 1); - openChatCheckbox.setText("Show chat open measurement", null, false, false); - openChatCheckbox.setOnClickListener(v -> openChatCheckbox.setChecked(!openChatCheckbox.isChecked(), true)); - stiffnessControl.addView(openChatCheckbox, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, 36)); - - stiffnessControl.setVisibility(GONE); - overlayLayout.addView(stiffnessControl, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.BOTTOM)); - } - - @Override - public void addView(View child, int index, ViewGroup.LayoutParams params) { - if (getChildCount() >= 3) { - throw new IllegalStateException("LNavigation must have no more than 3 child views!"); - } - - super.addView(child, index, params); - } - - public boolean doShowOpenChat() { - return openChatCheckbox.isChecked(); - } - - public LinearLayout getStiffnessControl() { - return stiffnessControl; - } - - @Override - public void requestDisallowInterceptTouchEvent(boolean disallowIntercept) { - super.requestDisallowInterceptTouchEvent(disallowIntercept); - - if (disallowIntercept && isSwipeInProgress) { - isSwipeInProgress = false; - onReleaseTouch(); - } - isSwipeDisallowed = disallowIntercept; - } - - private void animateReset() { - BaseFragment fragment = getLastFragment(); - BaseFragment bgFragment = getBackgroundFragment(); - if (fragment == null) { - return; - } - - fragment.onTransitionAnimationStart(true, true); - - FloatValueHolder valueHolder = new FloatValueHolder(swipeProgress * SPRING_MULTIPLIER); - currentSpringAnimation = new SpringAnimation(valueHolder) - .setSpring(new SpringForce(0f) - .setStiffness(SPRING_STIFFNESS) - .setDampingRatio(SPRING_DAMPING_RATIO)); - currentSpringAnimation.addUpdateListener((animation, value, velocity) -> { - swipeProgress = value / SPRING_MULTIPLIER; - invalidateTranslation(); - fragment.onTransitionAnimationProgress(true, 1f - swipeProgress); - }); - Runnable onEnd = ()->{ - fragment.onTransitionAnimationEnd(true, true); - fragment.prepareFragmentToSlide(true, false); - - swipeProgress = 0f; - invalidateTranslation(); - if (getBackgroundView() != null) { - getBackgroundView().setVisibility(GONE); - } - - fragment.onBecomeFullyVisible(); - if (bgFragment != null) { - bgFragment.setPaused(true); - bgFragment.onBecomeFullyHidden(); - bgFragment.prepareFragmentToSlide(false, false); - } - - currentSpringAnimation = null; - invalidateActionBars(); - }; - currentSpringAnimation.addEndListener((animation, canceled, value, velocity) -> { - if (animation == currentSpringAnimation) { - onEnd.run(); - } - }); - if (swipeProgress != 0f) { - currentSpringAnimation.start(); - } else { - onEnd.run(); - } - } - - private void invalidateActionBars() { - if (getLastFragment() != null && getLastFragment().getActionBar() != null) { - getLastFragment().getActionBar().invalidate(); - } - if (getBackgroundFragment() != null && getBackgroundFragment().getActionBar() != null) { - getBackgroundFragment().getActionBar().invalidate(); - } - } - - private boolean processTouchEvent(MotionEvent ev) { - int act = ev.getActionMasked(); - if (isTransitionAnimationInProgress()) { - return true; - } - - if (!gestureDetector.onTouchEvent(ev)) { - switch (act) { - case MotionEvent.ACTION_DOWN: - break; - case MotionEvent.ACTION_CANCEL: - case MotionEvent.ACTION_UP: - if (isFirstHoverAllowed && !allowToPressByHover) { - clearTouchFlags(); - } else if (allowToPressByHover && previewMenu != null) { - for (int i = 0; i < previewMenu.getItemsCount(); ++i) { - ActionBarMenuSubItem button = (ActionBarMenuSubItem) previewMenu.getItemAt(i); - if (button != null) { - button.getGlobalVisibleRect(AndroidUtilities.rectTmp2); - boolean shouldBeEnabled = AndroidUtilities.rectTmp2.contains((int) ev.getX(), (int) ev.getY()); - if (shouldBeEnabled) { - button.performClick(); - } - } - } - - clearTouchFlags(); - } else if (isSwipeInProgress) { - clearTouchFlags(); - onReleaseTouch(); - } else if (isSwipeDisallowed) { - clearTouchFlags(); - } - return false; - } - } - return isSwipeInProgress; - } - - private void onReleaseTouch() { - if (swipeProgress < 0.5f) { - animateReset(); - } else { - closeLastFragment(true, false); - } - } - - private void clearTouchFlags() { - isSwipeDisallowed = false; - isSwipeInProgress = false; - allowToPressByHover = false; - isFirstHoverAllowed = false; - } - - @SuppressLint("ClickableViewAccessibility") - @Override - public boolean onTouchEvent(MotionEvent event) { - processTouchEvent(event); - - return true; - } - - @Override - public boolean dispatchTouchEvent(MotionEvent ev) { - if (processTouchEvent(ev) && touchCapturedView == null) { - return true; - } - - if (getChildCount() < 1) { - return false; - } - - if (getForegroundView() != null) { - View capturedView = touchCapturedView; - View fg = getForegroundView(); - ev.offsetLocation(-getPaddingLeft(), -getPaddingTop()); - boolean overlay = overlayLayout.dispatchTouchEvent(ev) || capturedView == overlayLayout; - if (overlay) { - if (ev.getAction() == MotionEvent.ACTION_DOWN) { - touchCapturedView = overlayLayout; - - MotionEvent e = MotionEvent.obtain(0, 0, MotionEvent.ACTION_CANCEL, 0, 0, 0); - for (int i = 0; i < getChildCount() - 1; i++) { - getChildAt(i).dispatchTouchEvent(e); - } - e.recycle(); - } - } - if (ev.getAction() == MotionEvent.ACTION_UP || ev.getAction() == MotionEvent.ACTION_CANCEL) { - touchCapturedView = null; - } - if (overlay) { - return true; - } - if (capturedView != null) { - return capturedView.dispatchTouchEvent(ev) || ev.getActionMasked() == MotionEvent.ACTION_DOWN; - } - - boolean foreground = fg.dispatchTouchEvent(ev); - if (foreground) { - if (ev.getAction() == MotionEvent.ACTION_DOWN) { - touchCapturedView = fg; - } - } - return foreground || ev.getActionMasked() == MotionEvent.ACTION_DOWN; - } - return super.dispatchTouchEvent(ev); - } - - @Override - public boolean hasIntegratedBlurInPreview() { - return true; - } - - @Override - public boolean presentFragment(NavigationParams params) { - BaseFragment fragment = params.fragment; - if (!params.isFromDelay && (fragment == null || checkTransitionAnimation() || delegate != null && params.checkPresentFromDelegate && - !delegate.needPresentFragment(this, params) || !fragment.onFragmentCreate() || delayedPresentAnimation != null)) { - return false; - } - - if (!fragmentStack.isEmpty() && getChildCount() < 2) { - rebuildFragments(REBUILD_FLAG_REBUILD_LAST); - } - - if (getParentActivity().getCurrentFocus() != null) { - AndroidUtilities.hideKeyboard(getParentActivity().getCurrentFocus()); - } - - if (!params.isFromDelay) { - fragment.setInPreviewMode(params.preview); - if (previewMenu != null) { - if (previewMenu.getParent() != null) { - ((ViewGroup) previewMenu.getParent()).removeView(previewMenu); - } - } - previewMenu = params.menuView; - fragment.setInMenuMode(previewMenu != null); - fragment.setParentLayout(this); - } - boolean animate = params.preview || MessagesController.getGlobalMainSettings().getBoolean("view_animations", true) && - !params.noAnimation && (useAlphaAnimations || fragmentStack.size() >= 1); - - BaseFragment prevFragment = params.isFromDelay ? getBackgroundFragment() : getLastFragment(); - Runnable onFragmentOpened = ()->{ - if (params.removeLast && prevFragment != null) { - removeFragmentFromStack(prevFragment); - } - invalidateActionBars(); - }; - if (animate) { - if (!params.isFromDelay) { - if (params.preview) { - View bgView = getForegroundView(); - if (bgView != null) { - float scaleFactor = 8; - int w = (int) (bgView.getMeasuredWidth() / scaleFactor); - int h = (int) (bgView.getMeasuredHeight() / scaleFactor); - Bitmap bitmap = Bitmap.createBitmap(w, h, Bitmap.Config.ARGB_8888); - Canvas canvas = new Canvas(bitmap); - canvas.scale(1.0f / scaleFactor, 1.0f / scaleFactor); - canvas.drawColor(Theme.getColor(Theme.key_windowBackgroundWhite)); - bgView.draw(canvas); - Utilities.stackBlurBitmap(bitmap, Math.max(8, Math.max(w, h) / 150)); - blurredBackFragmentForPreview = bitmap; - } - - if (getParent() != null) { - getParent().requestDisallowInterceptTouchEvent(true); - } - isFirstHoverAllowed = true; - } - - FragmentHolderView holderView = onCreateHolderView(fragment); - if (params.preview) { - MarginLayoutParams layoutParams = (MarginLayoutParams) holderView.getLayoutParams(); - layoutParams.leftMargin = layoutParams.topMargin = layoutParams.rightMargin = layoutParams.bottomMargin = AndroidUtilities.dp(8); - - if (previewMenu != null) { - previewMenu.measure(MeasureSpec.makeMeasureSpec(getWidth(), MeasureSpec.AT_MOST), MeasureSpec.makeMeasureSpec((int) (getHeight() * 0.5f), MeasureSpec.AT_MOST)); - layoutParams = (MarginLayoutParams) fragment.getFragmentView().getLayoutParams(); - layoutParams.bottomMargin += AndroidUtilities.dp(8) + previewMenu.getMeasuredHeight(); - - if (LocaleController.isRTL) { - previewMenu.setTranslationX(getWidth() - previewMenu.getMeasuredWidth() - AndroidUtilities.dp(8)); - } else { - previewMenu.setTranslationX(-AndroidUtilities.dp(8)); - } - previewMenu.setTranslationY(getHeight() - AndroidUtilities.dp(24) - previewMenu.getMeasuredHeight()); - holderView.addView(previewMenu, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT), 0, 0, 0, 8)); - } else { - layoutParams.topMargin += AndroidUtilities.dp(52); - } - - holderView.setOnClickListener(v -> finishPreviewFragment()); - } - addView(holderView, getChildCount() - 1); - fragmentStack.add(fragment); - notifyFragmentStackChanged(); - fragment.setPaused(false); - - swipeProgress = 1f; - invalidateTranslation(); - } - - if (fragment.needDelayOpenAnimation() && !params.delayDone) { - AndroidUtilities.runOnUIThread(delayedPresentAnimation = ()->{ - delayedPresentAnimation = null; - - params.isFromDelay = true; - params.delayDone = true; - presentFragment(params); - }, 200); - return true; - } - - fragment.onTransitionAnimationStart(true, false); - if (prevFragment != null) { - prevFragment.onTransitionAnimationStart(false, false); - } - - customAnimation = fragment.onCustomTransitionAnimation(true, ()-> { - customAnimation = null; - fragment.onTransitionAnimationEnd(true, false); - if (prevFragment != null) { - prevFragment.onTransitionAnimationEnd(false, false); - } - - swipeProgress = 0f; - invalidateTranslation(); - if (getBackgroundView() != null) { - getBackgroundView().setVisibility(GONE); - } - - fragment.onBecomeFullyVisible(); - if (prevFragment != null) { - prevFragment.onBecomeFullyHidden(); - } - onFragmentOpened.run(); - - }); - if (customAnimation != null) { - getForegroundView().setTranslationX(0); - return true; - } - - invalidateActionBars(); - FloatValueHolder valueHolder = new FloatValueHolder(SPRING_MULTIPLIER); - currentSpringAnimation = new SpringAnimation(valueHolder) - .setSpring(new SpringForce(0f) - .setStiffness(params.preview ? SPRING_STIFFNESS_PREVIEW : SPRING_STIFFNESS) - .setDampingRatio(params.preview ? 0.6f : SPRING_DAMPING_RATIO)); - currentSpringAnimation.addUpdateListener((animation, value, velocity) -> { - swipeProgress = value / SPRING_MULTIPLIER; - invalidateTranslation(); - fragment.onTransitionAnimationProgress(true, 1f - swipeProgress); - }); - currentSpringAnimation.addEndListener((animation, canceled, value, velocity) -> { - if (animation == currentSpringAnimation) { - fragment.onTransitionAnimationEnd(true, false); - if (prevFragment != null) { - prevFragment.onTransitionAnimationEnd(false, false); - } - - swipeProgress = 0f; - invalidateTranslation(); - if (!params.preview && getBackgroundView() != null) { - getBackgroundView().setVisibility(GONE); - } - - fragment.onBecomeFullyVisible(); - if (prevFragment != null) { - prevFragment.onBecomeFullyHidden(); - prevFragment.setPaused(true); - } - onFragmentOpened.run(); - - currentSpringAnimation = null; - - if (params.preview && previewOpenCallback != null) { - previewOpenCallback.run(); - } - previewOpenCallback = null; - } - }); - currentSpringAnimation.start(); - } else if (!params.preview) { - if (fragment.needDelayOpenAnimation() && !params.delayDone && params.needDelayWithoutAnimation) { - AndroidUtilities.runOnUIThread(delayedPresentAnimation = ()->{ - delayedPresentAnimation = null; - - params.isFromDelay = true; - params.delayDone = true; - presentFragment(params); - }, 200); - return true; - } - addFragmentToStack(fragment, -1, true); - onFragmentOpened.run(); - } - - return true; - } - - /** - * Invalidates current fragment and action bar translation - */ - private void invalidateTranslation() { - if (useAlphaAnimations && fragmentStack.size() == 1) { - backgroundView.setAlpha(1f - swipeProgress); - setAlpha(1f - swipeProgress); - return; - } - - FragmentHolderView bgView = getBackgroundView(); - FragmentHolderView fgView = getForegroundView(); - - boolean preview = isInPreviewMode(); - - float widthNoPaddings = getWidth() - getPaddingLeft() - getPaddingRight(); - float heightNoPadding = getHeight() - getPaddingTop() - getPaddingBottom(); - if (preview) { - if (bgView != null) { - bgView.setTranslationX(0); - bgView.invalidate(); - } - if (fgView != null) { - fgView.setPivotX(widthNoPaddings / 2f); - fgView.setPivotY(heightNoPadding / 2f); - - fgView.setTranslationX(0); - fgView.setTranslationY(0); - - float scale = 0.5f + (1f - swipeProgress) * 0.5f; - fgView.setScaleX(scale); - fgView.setScaleY(scale); - fgView.setAlpha(1f - Math.max(swipeProgress, 0f)); - - fgView.invalidate(); - } - } else { - if (bgView != null) { - bgView.setTranslationX(-(1f - swipeProgress) * 0.35f * widthNoPaddings); - } - if (fgView != null) { - fgView.setTranslationX(swipeProgress * widthNoPaddings); - } - } - invalidate(); - - try { - if (bgView != null && fgView != null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { - int navColor = ColorUtils.blendARGB(fgView.fragment.getNavigationBarColor(), bgView.fragment.getNavigationBarColor(), swipeProgress); - getParentActivity().getWindow().setNavigationBarColor(navColor); - AndroidUtilities.setLightNavigationBar(getParentActivity().getWindow(), AndroidUtilities.computePerceivedBrightness(navColor) > 0.721f); - } - } catch (Exception ignore) {} - - if (getLastFragment() != null) { - getLastFragment().onSlideProgressFront(false, swipeProgress); - } - if (getBackgroundFragment() != null) { - getBackgroundFragment().onSlideProgress(false, swipeProgress); - } - } - - @Override - public List onGetDebugItems() { - List items = new ArrayList<>(); - BaseFragment fragment = getLastFragment(); - if (fragment != null) { - if (fragment instanceof FloatingDebugProvider) { - items.addAll(((FloatingDebugProvider) fragment).onGetDebugItems()); - } - observeDebugItemsFromView(items, fragment.getFragmentView()); - } - if (ALLOW_OPEN_STIFFNESS_CONTROL) { - items.add(new FloatingDebugController.DebugItem(LocaleController.getString(R.string.DebugAltNavigation))); - items.add(new FloatingDebugController.DebugItem(LocaleController.getString(R.string.DebugAltNavigationToggleControls), () -> getStiffnessControl().setVisibility(getStiffnessControl().getVisibility() == VISIBLE ? GONE : VISIBLE))); - } - return items; - } - - private void observeDebugItemsFromView(List items, View v) { - if (v instanceof FloatingDebugProvider) { - items.addAll(((FloatingDebugProvider) v).onGetDebugItems()); - } - if (v instanceof ViewGroup) { - ViewGroup vg = (ViewGroup) v; - for (int i = 0; i < vg.getChildCount(); i++) { - observeDebugItemsFromView(items, vg.getChildAt(i)); - } - } - } - - - private FragmentHolderView getForegroundView() { - if (getChildCount() >= 2) { - return (FragmentHolderView) getChildAt(getChildCount() >= 3 ? 1 : 0); - } - return null; - } - - private FragmentHolderView getBackgroundView() { - if (getChildCount() >= 3) { - return (FragmentHolderView) getChildAt(0); - } - return null; - } - - @Override - public boolean checkTransitionAnimation() { - return isTransitionAnimationInProgress(); - } - - @Override - public boolean addFragmentToStack(BaseFragment fragment, int position) { - return addFragmentToStack(fragment, position, false); - } - - public boolean addFragmentToStack(BaseFragment fragment, int position, boolean fromPresent) { - if (!fromPresent && (delegate != null && !delegate.needAddFragmentToStack(fragment, this) || !fragment.onFragmentCreate())) { - return false; - } - if (!fragmentStack.isEmpty() && getChildCount() < 2) { - rebuildFragments(REBUILD_FLAG_REBUILD_LAST); - } - fragment.setParentLayout(this); - if (position == -1 || position >= fragmentStack.size()) { - BaseFragment lastFragment = getLastFragment(); - if (lastFragment != null) { - lastFragment.setPaused(true); - lastFragment.onTransitionAnimationStart(false, true); - lastFragment.onTransitionAnimationEnd(false, true); - lastFragment.onBecomeFullyHidden(); - } - - fragmentStack.add(fragment); - notifyFragmentStackChanged(); - - FragmentHolderView holderView = onCreateHolderView(fragment); - addView(holderView, getChildCount() - 1); - - fragment.setPaused(false); - fragment.onTransitionAnimationStart(true, false); - fragment.onTransitionAnimationEnd(true, false); - fragment.onBecomeFullyVisible(); - - if (getBackgroundView() != null) { - getBackgroundView().setVisibility(GONE); - } - getForegroundView().setVisibility(VISIBLE); - } else { - fragmentStack.add(position, fragment); - notifyFragmentStackChanged(); - - if (position == fragmentStack.size() - 2) { - FragmentHolderView holderView = onCreateHolderView(fragment); - addView(holderView, getChildCount() - 2); - getBackgroundView().setVisibility(GONE); - getForegroundView().setVisibility(VISIBLE); - } - } - invalidateTranslation(); - return true; - } - - private FragmentHolderView onCreateHolderView(BaseFragment fragment) { - FragmentHolderView holderView; - if (getChildCount() >= 3) { - holderView = getBackgroundView(); - } else { - holderView = new FragmentHolderView(getContext()); - } - holderView.setFragment(fragment); - if (holderView.getParent() != null) { - holderView.setVisibility(VISIBLE); - removeView(holderView); - } - holderView.setOnClickListener(null); - resetViewProperties(holderView); - resetViewProperties(fragment.getFragmentView()); - if (fragment.getActionBar() != null) { - fragment.getActionBar().setTitleOverlayText(titleOverlayTitle, titleOverlayTitleId, titleOverlayAction); - } - return holderView; - } - - private void resetViewProperties(View v) { - if (v == null) { - return; - } - - v.setLayoutParams(new LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT)); - v.setAlpha(1f); - v.setPivotX(0); - v.setPivotY(0); - v.setScaleX(1); - v.setScaleY(1); - v.setTranslationX(0); - v.setTranslationY(0); - } - - /** - * Called to notify ImageLoader and listeners about fragments stack changed - */ - private void notifyFragmentStackChanged() { - if (onFragmentStackChangedListener != null) { - onFragmentStackChangedListener.run(); - } - if (useAlphaAnimations) { - if (fragmentStack.isEmpty()) { - setVisibility(GONE); - backgroundView.setVisibility(GONE); - } else { - setVisibility(VISIBLE); - backgroundView.setVisibility(VISIBLE); - } - if (drawerLayoutContainer != null) { - drawerLayoutContainer.setAllowOpenDrawer(fragmentStack.isEmpty(), false); - } - } - ImageLoader.getInstance().onFragmentStackChanged(); - } - - @Override - public void removeFragmentFromStack(BaseFragment fragment) { - int i = fragmentStack.indexOf(fragment); - if (i == -1) { - return; - } - - int wasSize = fragmentStack.size(); - - fragment.setRemovingFromStack(true); - fragment.onFragmentDestroy(); - fragment.setParentLayout(null); - fragmentStack.remove(i); - notifyFragmentStackChanged(); - - if (i == wasSize - 1) { - BaseFragment newLastFragment = getLastFragment(); - if (newLastFragment != null) { - newLastFragment.setPaused(false); - newLastFragment.onBecomeFullyVisible(); - } - - FragmentHolderView holderView = getForegroundView(); - if (holderView != null) { - removeView(holderView); - resetViewProperties(holderView); - } - - if (getForegroundView() != null) { - getForegroundView().setVisibility(VISIBLE); - } - - if (fragmentStack.size() >= 2) { - BaseFragment bgFragment = getBackgroundFragment(); - bgFragment.setParentLayout(this); - if (holderView != null) { - holderView.setFragment(bgFragment); - } else { - holderView = onCreateHolderView(bgFragment); - } - bgFragment.onBecomeFullyHidden(); - holderView.setVisibility(GONE); - addView(holderView, getChildCount() - 2); - } - } else if (i == wasSize - 2) { - FragmentHolderView holderView = getBackgroundView(); - if (holderView != null) { - removeView(holderView); - resetViewProperties(holderView); - } - - if (fragmentStack.size() >= 2) { - BaseFragment bgFragment = getBackgroundFragment(); - bgFragment.setParentLayout(this); - if (holderView != null) { - holderView.setFragment(bgFragment); - } else { - holderView = onCreateHolderView(bgFragment); - } - bgFragment.onBecomeFullyHidden(); - holderView.setVisibility(GONE); - addView(holderView, getChildCount() - 2); - } - } - - invalidateTranslation(); - } - - @Override - public List getFragmentStack() { - return unmodifiableFragmentStack; - } - - @Override - public void setFragmentStack(List fragmentStack) { - this.fragmentStack = fragmentStack; - unmodifiableFragmentStack = Collections.unmodifiableList(fragmentStack); - } - - @Override - public void showLastFragment() { - rebuildFragments(REBUILD_FLAG_REBUILD_LAST); - } - - @Override - public void rebuildFragments(int flags) { - if (currentSpringAnimation != null && currentSpringAnimation.isRunning()) { - currentSpringAnimation.addEndListener((animation, canceled, value, velocity) -> AndroidUtilities.runOnUIThread(()-> rebuildFragments(flags))); - return; - } else if (customAnimation != null && customAnimation.isRunning()) { - customAnimation.addListener(new AnimatorListenerAdapter() { - @Override - public void onAnimationEnd(Animator animation) { - AndroidUtilities.runOnUIThread(()-> rebuildFragments(flags)); - } - }); - return; - } - if (fragmentStack.isEmpty()) { - while (getChildCount() > 1) { - removeViewAt(0); - } - return; - } - - boolean rebuildLast = (flags & REBUILD_FLAG_REBUILD_LAST) != 0; - boolean rebuildBeforeLast = (flags & REBUILD_FLAG_REBUILD_ONLY_LAST) == 0 || rebuildLast && (getBackgroundView() != null && getBackgroundView().fragment != getBackgroundFragment() || getForegroundView() != null && getForegroundView().fragment == getLastFragment()); - - if (rebuildBeforeLast) { - if (getChildCount() >= 3) { - View child = getChildAt(0); - if (child instanceof FragmentHolderView) { - ((FragmentHolderView) child).fragment.setPaused(true); - } - removeViewAt(0); - } - } - if (rebuildLast) { - if (getChildCount() >= 2) { - View child = getChildAt(0); - if (child instanceof FragmentHolderView) { - ((FragmentHolderView) child).fragment.setPaused(true); - } - removeViewAt(0); - } - } - for (int i = rebuildBeforeLast ? 0 : fragmentStack.size() - 1; i < fragmentStack.size() - (rebuildLast ? 0 : 1); i++) { - BaseFragment fragment = fragmentStack.get(i); - fragment.clearViews(); - fragment.setParentLayout(this); - FragmentHolderView holderView = new FragmentHolderView(getContext()); - holderView.setFragment(fragment); - - if (i >= fragmentStack.size() - 2) { - addView(holderView, getChildCount() - 1); - } - } - if (delegate != null) { - delegate.onRebuildAllFragments(this, rebuildLast); - } - if (getLastFragment() != null) { - getLastFragment().setPaused(false); - } - } - - @Override - public void setDelegate(INavigationLayoutDelegate delegate) { - this.delegate = delegate; - } - - @Override - public boolean isActionBarInCrossfade() { - boolean crossfadeNoFragments = USE_ACTIONBAR_CROSSFADE && !isInPreviewMode() && (isSwipeInProgress() || isTransitionAnimationInProgress()) && customAnimation == null; - return crossfadeNoFragments && getLastFragment() != null && getLastFragment().isActionBarCrossfadeEnabled() && getBackgroundFragment() != null && getBackgroundFragment().isActionBarCrossfadeEnabled(); - } - - @Override - public void draw(Canvas canvas) { - boolean crossfade = isActionBarInCrossfade(); - if (useAlphaAnimations) { - canvas.save(); - path.rewind(); - AndroidUtilities.rectTmp.set(getPaddingLeft(), getPaddingTop(), getWidth() - getPaddingRight(), getHeight() - getPaddingBottom()); - path.addRoundRect(AndroidUtilities.rectTmp, AndroidUtilities.dp(12), AndroidUtilities.dp(12), Path.Direction.CW); - canvas.clipPath(path); - } - super.draw(canvas); - - if (!isInPreviewMode() && !(useAlphaAnimations && fragmentStack.size() <= 1) && (isSwipeInProgress() || isTransitionAnimationInProgress()) && swipeProgress != 0) { - int top = getPaddingTop(); - if (crossfade) { - top += AndroidUtilities.lerp(getBackgroundFragment().getActionBar().getHeight(), getLastFragment().getActionBar().getHeight(), 1f - swipeProgress); - } - int widthNoPaddings = getWidth() - getPaddingLeft() - getPaddingRight(); - dimmPaint.setAlpha((int) (0x7a * (1f - swipeProgress))); - canvas.drawRect(getPaddingLeft(), top, widthNoPaddings * swipeProgress + getPaddingLeft(), getHeight() - getPaddingBottom(), dimmPaint); - - layerShadowDrawable.setAlpha((int) (0xFF * (1f - swipeProgress))); - layerShadowDrawable.setBounds((int) (widthNoPaddings * swipeProgress - layerShadowDrawable.getIntrinsicWidth()) + getPaddingLeft(), top, (int) (widthNoPaddings * swipeProgress) + getPaddingLeft(), getHeight() - getPaddingBottom()); - layerShadowDrawable.draw(canvas); - } - if (useAlphaAnimations) { - canvas.restore(); - } - - if (previewFragmentSnapshot != null) { - canvas.save(); - path.rewind(); - AndroidUtilities.rectTmp.set(previewFragmentRect.left * (1f - previewExpandProgress), previewFragmentRect.top * (1f - previewExpandProgress), AndroidUtilities.lerp(previewFragmentRect.right, getWidth(), previewExpandProgress), AndroidUtilities.lerp(previewFragmentRect.bottom, getHeight(), previewExpandProgress)); - path.addRoundRect(AndroidUtilities.rectTmp, AndroidUtilities.dp(8), AndroidUtilities.dp(8), Path.Direction.CW); - canvas.clipPath(path); - - canvas.translate(previewFragmentRect.left * (1f - previewExpandProgress), previewFragmentRect.top * (1f - previewExpandProgress)); - canvas.scale(AndroidUtilities.lerp(1f, (float) getWidth() / previewFragmentRect.width(), previewExpandProgress), AndroidUtilities.lerp(1f, (float) getHeight() / previewFragmentRect.height(), previewExpandProgress)); - blurPaint.setAlpha((int) (0xFF * (1f - Math.min(1f, previewExpandProgress)))); - canvas.drawBitmap(previewFragmentSnapshot, 0, 0, blurPaint); - canvas.restore(); - } - - if (crossfade) { - BaseFragment foregroundFragment = getLastFragment(); - BaseFragment backgroundFragment = getBackgroundFragment(); - - ActionBar fgActionBar = foregroundFragment.getActionBar(); - ActionBar bgActionBar = backgroundFragment.getActionBar(); - - boolean useBackDrawable = false; - boolean backDrawableReverse = false; - Float backDrawableForcedProgress = null; - - if (backgroundFragment.getBackButtonState() == BackButtonState.MENU && foregroundFragment.getBackButtonState() == BackButtonState.BACK) { - useBackDrawable = true; - backDrawableReverse = false; - } else if (backgroundFragment.getBackButtonState() == BackButtonState.BACK && foregroundFragment.getBackButtonState() == BackButtonState.MENU) { - useBackDrawable = true; - backDrawableReverse = true; - } else if (backgroundFragment.getBackButtonState() == BackButtonState.BACK && foregroundFragment.getBackButtonState() == BackButtonState.BACK) { - useBackDrawable = true; - backDrawableForcedProgress = 0f; - } else if (backgroundFragment.getBackButtonState() == BackButtonState.MENU && foregroundFragment.getBackButtonState() == BackButtonState.MENU) { - useBackDrawable = true; - backDrawableForcedProgress = 1f; - } - - AndroidUtilities.rectTmp.set(0, 0, getWidth(), bgActionBar.getY() + bgActionBar.getHeight()); - canvas.saveLayerAlpha(AndroidUtilities.rectTmp, (int) (swipeProgress * 0xFF), Canvas.ALL_SAVE_FLAG); - bgActionBar.onDrawCrossfadeBackground(canvas); - canvas.restore(); - - canvas.saveLayerAlpha(AndroidUtilities.rectTmp, (int) ((1 - swipeProgress) * 0xFF), Canvas.ALL_SAVE_FLAG); - fgActionBar.onDrawCrossfadeBackground(canvas); - canvas.restore(); - - if (useBackDrawable) { - AndroidUtilities.rectTmp.set(0, 0, getWidth(), bgActionBar.getY() + bgActionBar.getHeight()); - float progress = backDrawableForcedProgress != null ? backDrawableForcedProgress : swipeProgress; - float bgAlpha = 1f - (bgActionBar.getY() / -(bgActionBar.getHeight() - AndroidUtilities.statusBarHeight)); - float fgAlpha = 1f - (fgActionBar.getY() / -(fgActionBar.getHeight() - AndroidUtilities.statusBarHeight)); - canvas.saveLayerAlpha(AndroidUtilities.rectTmp, (int) (AndroidUtilities.lerp(bgAlpha, fgAlpha, 1f - swipeProgress) * 0xFF), Canvas.ALL_SAVE_FLAG); - canvas.translate(AndroidUtilities.dp(16) - AndroidUtilities.dp(1) * (1f - progress), AndroidUtilities.dp(16) + (fgActionBar.getOccupyStatusBar() ? AndroidUtilities.statusBarHeight : 0)); - menuDrawable.setRotation(backDrawableReverse ? progress : 1f - progress, false); - menuDrawable.draw(canvas); - canvas.restore(); - } - - AndroidUtilities.rectTmp.set(0, AndroidUtilities.statusBarHeight, getWidth(), bgActionBar.getY() + bgActionBar.getHeight()); - canvas.saveLayerAlpha(AndroidUtilities.rectTmp, (int) (swipeProgress * 0xFF), Canvas.ALL_SAVE_FLAG); - canvas.translate(0, bgActionBar.getY()); - bgActionBar.onDrawCrossfadeContent(canvas, false, useBackDrawable, swipeProgress); - canvas.restore(); - - canvas.saveLayerAlpha(AndroidUtilities.rectTmp, (int) ((1 - swipeProgress) * 0xFF), Canvas.ALL_SAVE_FLAG); - canvas.translate(0, fgActionBar.getY()); - fgActionBar.onDrawCrossfadeContent(canvas, true, useBackDrawable, swipeProgress); - canvas.restore(); - } - } - - @Override - public void resumeDelayedFragmentAnimation() { - if (delayedPresentAnimation != null) { - AndroidUtilities.cancelRunOnUIThread(delayedPresentAnimation); - delayedPresentAnimation.run(); - } - } - - @Override - public void setUseAlphaAnimations(boolean useAlphaAnimations) { - this.useAlphaAnimations = useAlphaAnimations; - } - - @Override - public void setBackgroundView(View backgroundView) { - this.backgroundView = backgroundView; - } - - @Override - public void closeLastFragment(boolean animated, boolean forceNoAnimation) { - closeLastFragment(animated, forceNoAnimation, 0); - } - - public void closeLastFragment(boolean animated, boolean forceNoAnimation, float velocityX) { - BaseFragment fragment = getLastFragment(); - if (fragment != null && fragment.closeLastFragment()) { - return; - } - if (fragmentStack.isEmpty() || checkTransitionAnimation() || delegate != null && !delegate.needCloseLastFragment(this)) { - return; - } - - boolean animate = animated && !forceNoAnimation && MessagesController.getGlobalMainSettings().getBoolean("view_animations", true) && (useAlphaAnimations || fragmentStack.size() >= 2); - if (animate) { - AndroidUtilities.hideKeyboard(this); - - BaseFragment lastFragment = getLastFragment(); - - BaseFragment newLastFragment = getBackgroundFragment(); - - if (getBackgroundView() != null) { - getBackgroundView().setVisibility(VISIBLE); - } - - lastFragment.onTransitionAnimationStart(false, true); - if (newLastFragment != null) { - newLastFragment.setPaused(false); - } - - if (swipeProgress == 0) { - customAnimation = lastFragment.onCustomTransitionAnimation(false, () -> { - onCloseAnimationEnd(lastFragment, newLastFragment); - - customAnimation = null; - }); - if (customAnimation != null) { - getForegroundView().setTranslationX(0); - if (getBackgroundView() != null) { - getBackgroundView().setTranslationX(0); - } - return; - } - } - - FloatValueHolder valueHolder = new FloatValueHolder(swipeProgress * SPRING_MULTIPLIER); - currentSpringAnimation = new SpringAnimation(valueHolder) - .setSpring(new SpringForce(SPRING_MULTIPLIER) - .setStiffness(isInPreviewMode() ? SPRING_STIFFNESS_PREVIEW_OUT : SPRING_STIFFNESS) - .setDampingRatio(SPRING_DAMPING_RATIO)); - if (velocityX != 0) { - currentSpringAnimation.setStartVelocity(velocityX); - } - currentSpringAnimation.addUpdateListener((animation, value, velocity) -> { - swipeProgress = value / SPRING_MULTIPLIER; - invalidateTranslation(); - lastFragment.onTransitionAnimationProgress(false, swipeProgress); - - if (newLastFragment != null) { - lastFragment.onTransitionAnimationProgress(true, swipeProgress); - } - }); - currentSpringAnimation.addEndListener((animation, canceled, value, velocity) -> { - if (animation == currentSpringAnimation) { - onCloseAnimationEnd(lastFragment, newLastFragment); - - currentSpringAnimation = null; - } - }); - currentSpringAnimation.start(); - } else { - swipeProgress = 0f; - removeFragmentFromStack(getLastFragment()); - } - } - - private void onCloseAnimationEnd(BaseFragment lastFragment, BaseFragment newLastFragment) { - lastFragment.setPaused(true); - lastFragment.setRemovingFromStack(true); - lastFragment.onTransitionAnimationEnd(false, true); - lastFragment.prepareFragmentToSlide(true, false); - lastFragment.onBecomeFullyHidden(); - lastFragment.onFragmentDestroy(); - lastFragment.setParentLayout(null); - fragmentStack.remove(lastFragment); - notifyFragmentStackChanged(); - - FragmentHolderView holderView = getForegroundView(); - if (holderView != null) { - holderView.setFragment(null); - removeView(holderView); - resetViewProperties(holderView); - } - - if (newLastFragment != null) { - newLastFragment.prepareFragmentToSlide(false, false); - newLastFragment.onTransitionAnimationEnd(true, true); - newLastFragment.onBecomeFullyVisible(); - } - - if (fragmentStack.size() >= 2) { - BaseFragment prevFragment = getBackgroundFragment(); - prevFragment.setParentLayout(this); - - if (holderView == null) { - holderView = onCreateHolderView(prevFragment); - } else { - holderView.setFragment(prevFragment); - } - holderView.setVisibility(GONE); - addView(holderView, getChildCount() - 2); - } - swipeProgress = 0f; - invalidateTranslation(); - - previewMenu = null; - if (blurredBackFragmentForPreview != null) { - blurredBackFragmentForPreview.recycle(); - blurredBackFragmentForPreview = null; - } - previewOpenCallback = null; - invalidateActionBars(); - } - - @Override - public DrawerLayoutContainer getDrawerLayoutContainer() { - return drawerLayoutContainer; - } - - @Override - public void setDrawerLayoutContainer(DrawerLayoutContainer drawerLayoutContainer) { - this.drawerLayoutContainer = drawerLayoutContainer; - } - - @Override - public void setRemoveActionBarExtraHeight(boolean removeExtraHeight) { - this.removeActionBarExtraHeight = removeExtraHeight; - } - - private ActionBar getCurrentActionBar() { - return getLastFragment() != null ? getLastFragment().getActionBar() : null; - } - - @Override - public void setTitleOverlayText(String title, int titleId, Runnable action) { - titleOverlayTitle = title; - titleOverlayTitleId = titleId; - titleOverlayAction = action; - for (BaseFragment fragment : fragmentStack) { - if (fragment.getActionBar() != null) { - fragment.getActionBar().setTitleOverlayText(title, titleId, action); - } - } - } - - private void addStartDescriptions(ArrayList descriptions) { - if (descriptions == null) { - return; - } - themeAnimatorDescriptions.add(descriptions); - int[] startColors = new int[descriptions.size()]; - animateStartColors.add(startColors); - for (int a = 0, N = descriptions.size(); a < N; a++) { - ThemeDescription description = descriptions.get(a); - startColors[a] = description.getSetColor(); - ThemeDescription.ThemeDescriptionDelegate delegate = description.setDelegateDisabled(); - if (delegate != null && !themeAnimatorDelegate.contains(delegate)) { - themeAnimatorDelegate.add(delegate); - } - } - } - - private void addEndDescriptions(ArrayList descriptions) { - if (descriptions == null) { - return; - } - int[] endColors = new int[descriptions.size()]; - animateEndColors.add(endColors); - for (int a = 0, N = descriptions.size(); a < N; a++) { - endColors[a] = descriptions.get(a).getSetColor(); - } - } - - @Override - public void animateThemedValues(ThemeAnimationSettings settings, Runnable onDone) { - if (themeAnimator != null) { - themeAnimator.cancel(); - themeAnimator = null; - } - int fragmentCount = settings.onlyTopFragment ? 1 : fragmentStack.size(); - Runnable next = () -> { - boolean startAnimation = false; - for (int i = 0; i < fragmentCount; i++) { - BaseFragment fragment; - if (i == 0) { - fragment = getLastFragment(); - } else { - if (!isInPreviewMode() && !isPreviewOpenAnimationInProgress() || fragmentStack.size() <= 1) { - continue; - } - fragment = fragmentStack.get(fragmentStack.size() - 2); - } - if (fragment != null) { - startAnimation = true; - if (settings.resourcesProvider != null) { - if (messageDrawableOutStart == null) { - messageDrawableOutStart = new Theme.MessageDrawable(Theme.MessageDrawable.TYPE_TEXT, true, false, startColorsProvider); - messageDrawableOutStart.isCrossfadeBackground = true; - messageDrawableOutMediaStart = new Theme.MessageDrawable(Theme.MessageDrawable.TYPE_MEDIA, true, false, startColorsProvider); - messageDrawableOutMediaStart.isCrossfadeBackground = true; - } - startColorsProvider.saveColors(settings.resourcesProvider); - } - ArrayList descriptions = fragment.getThemeDescriptions(); - addStartDescriptions(descriptions); - if (fragment.getVisibleDialog() instanceof BottomSheet) { - BottomSheet sheet = (BottomSheet) fragment.getVisibleDialog(); - addStartDescriptions(sheet.getThemeDescriptions()); - } else if (fragment.getVisibleDialog() instanceof AlertDialog) { - AlertDialog dialog = (AlertDialog) fragment.getVisibleDialog(); - addStartDescriptions(dialog.getThemeDescriptions()); - } - if (i == 0) { - if (settings.afterStartDescriptionsAddedRunnable != null) { - settings.afterStartDescriptionsAddedRunnable.run(); - } - } - addEndDescriptions(descriptions); - if (fragment.getVisibleDialog() instanceof BottomSheet) { - addEndDescriptions(((BottomSheet) fragment.getVisibleDialog()).getThemeDescriptions()); - } else if (fragment.getVisibleDialog() instanceof AlertDialog) { - addEndDescriptions(((AlertDialog) fragment.getVisibleDialog()).getThemeDescriptions()); - } - } - } - if (startAnimation) { - if (!settings.onlyTopFragment) { - int count = fragmentStack.size() - (isInPreviewMode() || isPreviewOpenAnimationInProgress() ? 2 : 1); - boolean needRebuild = false; - for (int i = 0; i < count; i++) { - BaseFragment fragment = fragmentStack.get(i); - fragment.clearViews(); - fragment.setParentLayout(this); - - if (i == fragmentStack.size() - 1) { - if (getForegroundView() != null) { - getForegroundView().setFragment(fragment); - } else { - needRebuild = true; - } - } else if (i == fragmentStack.size() - 2) { - if (getBackgroundView() != null) { - getBackgroundView().setFragment(fragment); - } else { - needRebuild = true; - } - } - } - if (needRebuild) { - rebuildFragments(REBUILD_FLAG_REBUILD_LAST); - } - } - if (settings.instant) { - setThemeAnimationValue(1.0f); - themeAnimatorDescriptions.clear(); - animateStartColors.clear(); - animateEndColors.clear(); - themeAnimatorDelegate.clear(); - presentingFragmentDescriptions = null; - if (settings.afterAnimationRunnable != null) { - settings.afterAnimationRunnable.run(); - } - if (onDone != null) { - onDone.run(); - } - return; - } - Theme.setAnimatingColor(true); - if (settings.beforeAnimationRunnable != null) { - settings.beforeAnimationRunnable.run(); - } - animationProgressListener = settings.animationProgress; - if (animationProgressListener != null) { - animationProgressListener.setProgress(0); - } - fromBackgroundColor = getBackground() instanceof ColorDrawable ? ((ColorDrawable) getBackground()).getColor() : 0; - themeAnimator = ValueAnimator.ofFloat(0, 1).setDuration(settings.duration); - themeAnimator.addUpdateListener(animation -> setThemeAnimationValue((float) animation.getAnimatedValue())); - themeAnimator.addListener(new AnimatorListenerAdapter() { - @Override - public void onAnimationEnd(Animator animation) { - if (animation.equals(themeAnimator)) { - themeAnimatorDescriptions.clear(); - animateStartColors.clear(); - animateEndColors.clear(); - themeAnimatorDelegate.clear(); - Theme.setAnimatingColor(false); - presentingFragmentDescriptions = null; - themeAnimator = null; - if (settings.afterAnimationRunnable != null) { - settings.afterAnimationRunnable.run(); - } - } - } - - @Override - public void onAnimationCancel(Animator animation) { - if (animation.equals(themeAnimator)) { - themeAnimatorDescriptions.clear(); - animateStartColors.clear(); - animateEndColors.clear(); - themeAnimatorDelegate.clear(); - Theme.setAnimatingColor(false); - presentingFragmentDescriptions = null; - themeAnimator = null; - if (settings.afterAnimationRunnable != null) { - settings.afterAnimationRunnable.run(); - } - } - } - }); - themeAnimator.start(); - } - if (onDone != null) { - onDone.run(); - } - }; - if (fragmentCount >= 1 && settings.applyTheme) { - if (settings.accentId != -1 && settings.theme != null) { - settings.theme.setCurrentAccentId(settings.accentId); - Theme.saveThemeAccents(settings.theme, true, false, true, false); - } - if (onDone == null) { - Theme.applyTheme(settings.theme, settings.nightTheme); - next.run(); - } else { - Theme.applyThemeInBackground(settings.theme, settings.nightTheme, () -> AndroidUtilities.runOnUIThread(next)); - } - } else { - next.run(); - } - } - - private void setThemeAnimationValue(float value) { - themeAnimationValue = value; - for (int j = 0, N = themeAnimatorDescriptions.size(); j < N; j++) { - ArrayList descriptions = themeAnimatorDescriptions.get(j); - int[] startColors = animateStartColors.get(j); - int[] endColors = animateEndColors.get(j); - int rE, gE, bE, aE, rS, gS, bS, aS, a, r, g, b; - for (int i = 0, N2 = descriptions.size(); i < N2; i++) { - rE = Color.red(endColors[i]); - gE = Color.green(endColors[i]); - bE = Color.blue(endColors[i]); - aE = Color.alpha(endColors[i]); - - rS = Color.red(startColors[i]); - gS = Color.green(startColors[i]); - bS = Color.blue(startColors[i]); - aS = Color.alpha(startColors[i]); - - a = Math.min(255, (int) (aS + (aE - aS) * value)); - r = Math.min(255, (int) (rS + (rE - rS) * value)); - g = Math.min(255, (int) (gS + (gE - gS) * value)); - b = Math.min(255, (int) (bS + (bE - bS) * value)); - int color = Color.argb(a, r, g, b); - ThemeDescription description = descriptions.get(i); - description.setAnimatedColor(color); - description.setColor(color, false, false); - } - } - for (int j = 0, N = themeAnimatorDelegate.size(); j < N; j++) { - ThemeDescription.ThemeDescriptionDelegate delegate = themeAnimatorDelegate.get(j); - if (delegate != null) { - delegate.didSetColor(); - delegate.onAnimationProgress(value); - } - } - if (presentingFragmentDescriptions != null) { - for (int i = 0, N = presentingFragmentDescriptions.size(); i < N; i++) { - ThemeDescription description = presentingFragmentDescriptions.get(i); - String key = description.getCurrentKey(); - description.setColor(Theme.getColor(key), false, false); - } - } - if (animationProgressListener != null) { - animationProgressListener.setProgress(value); - } - if (delegate != null) { - delegate.onThemeProgress(value); - } - } - - @Override - public float getThemeAnimationValue() { - return themeAnimationValue; - } - - @Override - public void setFragmentStackChangedListener(Runnable onFragmentStackChanged) { - this.onFragmentStackChangedListener = onFragmentStackChanged; - } - - @Override - public boolean isTransitionAnimationInProgress() { - return currentSpringAnimation != null || customAnimation != null; - } - - @Override - public boolean isInPassivePreviewMode() { - return (isInPreviewMode() && previewMenu == null) || isTransitionAnimationInProgress(); - } - - @Override - public void setInBubbleMode(boolean bubbleMode) { - this.isInBubbleMode = bubbleMode; - } - - @Override - public boolean isInBubbleMode() { - return isInBubbleMode; - } - - @Override - public boolean isInPreviewMode() { - return getLastFragment() != null && getLastFragment().isInPreviewMode() || blurredBackFragmentForPreview != null; - } - - @Override - public boolean isPreviewOpenAnimationInProgress() { - return isInPreviewMode() && isTransitionAnimationInProgress(); - } - - @Override - public void movePreviewFragment(float dy) { - if (!isInPreviewMode() || previewMenu != null || isTransitionAnimationInProgress() || getForegroundView() == null) { - return; - } - float currentTranslation = getForegroundView().getTranslationY(); - float nextTranslation = -dy; - if (nextTranslation > 0) { - nextTranslation = 0; - } else if (nextTranslation < -AndroidUtilities.dp(60)) { - nextTranslation = 0; - expandPreviewFragment(); - } - if (currentTranslation != nextTranslation) { - getForegroundView().setTranslationY(nextTranslation); - invalidate(); - } - } - - @Override - public void expandPreviewFragment() { - if (!isInPreviewMode() || isTransitionAnimationInProgress() || fragmentStack.isEmpty()) { - return; - } - - try { - performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); - } catch (Exception ignored) {} - - BaseFragment fragment = getLastFragment(); - View bgView = getBackgroundView(); - View fgView = getForegroundView(); - View fragmentView = fragment.getFragmentView(); - previewFragmentRect.set(fragmentView.getLeft(), fragmentView.getTop(), fragmentView.getRight(), fragmentView.getBottom()); - previewFragmentSnapshot = AndroidUtilities.snapshotView(fgView); - - resetViewProperties(fgView); - resetViewProperties(fragment.getFragmentView()); - fragment.setInPreviewMode(false); - swipeProgress = 0f; - invalidateTranslation(); - - float fromMenuY; - if (previewMenu != null) { - fromMenuY = previewMenu.getTranslationY(); - } else { - fromMenuY = 0; - } - - FloatValueHolder valueHolder = new FloatValueHolder(0); - currentSpringAnimation = new SpringAnimation(valueHolder) - .setSpring(new SpringForce(SPRING_MULTIPLIER) - .setStiffness(SPRING_STIFFNESS_PREVIEW_EXPAND) - .setDampingRatio(0.6f)); - currentSpringAnimation.addUpdateListener((animation, value, velocity) -> { - previewExpandProgress = value / SPRING_MULTIPLIER; - bgView.invalidate(); - - fgView.setPivotX(previewFragmentRect.centerX()); - fgView.setPivotY(previewFragmentRect.centerY()); - fgView.setScaleX(AndroidUtilities.lerp(previewFragmentRect.width() / (float) fgView.getWidth(), 1f, previewExpandProgress)); - fgView.setScaleY(AndroidUtilities.lerp(previewFragmentRect.height() / (float) fgView.getHeight(), 1f, previewExpandProgress)); - fgView.invalidate(); - - if (previewMenu != null) { - previewMenu.setTranslationY(AndroidUtilities.lerp(fromMenuY, getHeight(), previewExpandProgress)); - } - - invalidate(); - }); - currentSpringAnimation.addEndListener((animation, canceled, value, velocity) -> { - if (animation == currentSpringAnimation) { - currentSpringAnimation = null; - fragment.onPreviewOpenAnimationEnd(); - - previewFragmentSnapshot.recycle(); - previewFragmentSnapshot = null; - - if (blurredBackFragmentForPreview != null) { - blurredBackFragmentForPreview.recycle(); - blurredBackFragmentForPreview = null; - } - - if (previewMenu != null && previewMenu.getParent() != null) { - ((ViewGroup) previewMenu.getParent()).removeView(previewMenu); - } - previewMenu = null; - previewOpenCallback = null; - previewExpandProgress = 0; - - if (getBackgroundView() != null) { - getBackgroundView().setVisibility(GONE); - } - } - }); - currentSpringAnimation.start(); - } - - @Override - public void finishPreviewFragment() { - if (isInPreviewMode()) { - Runnable callback = () -> { - if (delayedPresentAnimation != null) { - AndroidUtilities.cancelRunOnUIThread(delayedPresentAnimation); - delayedPresentAnimation = null; - } - - closeLastFragment(); - }; - if (!isTransitionAnimationInProgress()) { - callback.run(); - } else { - previewOpenCallback = callback; - } - } - } - - @Override - public void setFragmentPanTranslationOffset(int offset) { - FragmentHolderView holderView = getForegroundView(); - if (holderView != null) { - holderView.setFragmentPanTranslationOffset(offset); - } - } - - @Override - public ViewGroup getOverlayContainerView() { - return overlayLayout; - } - - @Override - public void setHighlightActionButtons(boolean highlightActionButtons) { - this.highlightActionButtons = highlightActionButtons; - } - - @Override - public float getCurrentPreviewFragmentAlpha() { - return isInPreviewMode() ? getForegroundView().getAlpha() : 0f; - } - - @Override - protected boolean drawChild(Canvas canvas, View child, long drawingTime) { - int index = indexOfChild(child); - if (drawerLayoutContainer != null && drawerLayoutContainer.isDrawCurrentPreviewFragmentAbove() && isInPreviewMode() && index == 1) { - drawerLayoutContainer.invalidate(); - return false; - } - - boolean clipBackground = getChildCount() >= 3 && index == 0 && customAnimation == null && !isInPreviewMode(); - if (clipBackground) { - canvas.save(); - AndroidUtilities.rectTmp.set(getPaddingLeft(), getPaddingTop(), getPaddingLeft() + (getWidth() - getPaddingLeft() - getPaddingRight()) * swipeProgress, getHeight() - getPaddingBottom()); - canvas.clipRect(AndroidUtilities.rectTmp); - } - if (index == 1 && isInPreviewMode()) { - drawPreviewDrawables(canvas, (ViewGroup) child); - } - boolean draw = super.drawChild(canvas, child, drawingTime); - if (index == 0 && isInPreviewMode() && blurredBackFragmentForPreview != null) { - canvas.save(); - - if (previewFragmentSnapshot != null) { - blurPaint.setAlpha((int) (0xFF * (1f - Math.min(previewExpandProgress, 1f)))); - } else { - blurPaint.setAlpha((int) (0xFF * (1f - Math.max(swipeProgress, 0f)))); - } - - canvas.scale(child.getWidth() / (float)blurredBackFragmentForPreview.getWidth(), child.getHeight() / (float)blurredBackFragmentForPreview.getHeight()); - canvas.drawBitmap(blurredBackFragmentForPreview, 0, 0, blurPaint); - canvas.restore(); - } - if (clipBackground) { - canvas.restore(); - } - return draw; - } - - @Override - public void drawCurrentPreviewFragment(Canvas canvas, Drawable foregroundDrawable) { - if (isInPreviewMode()) { - FragmentHolderView v = getForegroundView(); - drawPreviewDrawables(canvas, v); - if (v.getAlpha() < 1f) { - canvas.saveLayerAlpha(0, 0, getWidth(), getHeight(), (int) (v.getAlpha() * 255), Canvas.ALL_SAVE_FLAG); - } else { - canvas.save(); - } - canvas.concat(v.getMatrix()); - MarginLayoutParams params = (MarginLayoutParams) v.getLayoutParams(); - canvas.translate(params.leftMargin, params.topMargin); - path.rewind(); - AndroidUtilities.rectTmp.set(0, previewExpandProgress != 0 ? 0 : AndroidUtilities.statusBarHeight, v.getWidth(), v.getHeight()); - path.addRoundRect(AndroidUtilities.rectTmp, AndroidUtilities.dp(8), AndroidUtilities.dp(8), Path.Direction.CW); - canvas.clipPath(path); - v.draw(canvas); - if (foregroundDrawable != null) { - View child = v.getChildAt(0); - if (child != null) { - MarginLayoutParams lp = (MarginLayoutParams) child.getLayoutParams(); - Rect rect = new Rect(); - child.getLocalVisibleRect(rect); - rect.offset(lp.leftMargin, lp.topMargin); - rect.top += Build.VERSION.SDK_INT >= 21 ? AndroidUtilities.statusBarHeight - 1 : 0; - foregroundDrawable.setAlpha((int) (v.getAlpha() * 255)); - foregroundDrawable.setBounds(rect); - foregroundDrawable.draw(canvas); - } - } - canvas.restore(); - } - } - - private void drawPreviewDrawables(Canvas canvas, ViewGroup containerView) { - View view = containerView.getChildAt(0); - if (view != null) { - MarginLayoutParams params = (MarginLayoutParams) containerView.getLayoutParams(); - - float alpha = 1f - Math.max(swipeProgress, 0); - if (previewFragmentSnapshot != null) { - alpha = 1f - Math.min(previewExpandProgress, 1f); - } - canvas.drawColor(Color.argb((int)(0x2e * alpha), 0, 0, 0)); - if (previewMenu == null) { - int width = AndroidUtilities.dp(32), height = width / 2; - int x = (getMeasuredWidth() - width) / 2; - int y = (int) (params.topMargin + containerView.getTranslationY() - AndroidUtilities.dp(12 + (Build.VERSION.SDK_INT < 21 ? 20 : 0))); - Theme.moveUpDrawable.setAlpha((int) (alpha * 0xFF)); - Theme.moveUpDrawable.setBounds(x, y, x + width, y + height); - Theme.moveUpDrawable.draw(canvas); - } - } - } - - @Override - public void drawHeaderShadow(Canvas canvas, int alpha, int y) { - if (headerShadowDrawable != null) { - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) { - if (headerShadowDrawable.getAlpha() != alpha) { - headerShadowDrawable.setAlpha(alpha); - } - } else { - headerShadowDrawable.setAlpha(alpha); - } - headerShadowDrawable.setBounds(0, y, getMeasuredWidth(), y + headerShadowDrawable.getIntrinsicHeight()); - headerShadowDrawable.draw(canvas); - } - } - - @Override - public boolean isSwipeInProgress() { - return isSwipeInProgress; - } - - @Override - public void onPause() { - BaseFragment fragment = getLastFragment(); - if (fragment != null) { - fragment.setPaused(true); - } - } - - @Override - public void onResume() { - BaseFragment fragment = getLastFragment(); - if (fragment != null) { - fragment.setPaused(false); - } - } - - @Override - public void onUserLeaveHint() { - BaseFragment fragment = getLastFragment(); - if (fragment != null) { - fragment.onUserLeaveHint(); - } - } - - @Override - public void onLowMemory() { - for (BaseFragment fragment : fragmentStack) { - fragment.onLowMemory(); - } - } - - @Override - public void onBackPressed() { - if (isSwipeInProgress() || checkTransitionAnimation() || fragmentStack.isEmpty()) { - return; - } - if (GroupCallPip.onBackPressed()) { - return; - } - if (getCurrentActionBar() != null && !getCurrentActionBar().isActionModeShowed() && getCurrentActionBar().isSearchFieldVisible()) { - getCurrentActionBar().closeSearchField(); - return; - } - BaseFragment lastFragment = getLastFragment(); - if (lastFragment.onBackPressed()) { - closeLastFragment(true); - } - } - - @Override - public boolean extendActionMode(Menu menu) { - BaseFragment lastFragment = getLastFragment(); - return lastFragment != null && lastFragment.extendActionMode(menu); - } - - @Override - public void onActionModeStarted(Object mode) { - if (getCurrentActionBar() != null) { - getCurrentActionBar().setVisibility(GONE); - } - isInActionMode = true; - } - - @Override - public void onActionModeFinished(Object mode) { - if (getCurrentActionBar() != null) { - getCurrentActionBar().setVisibility(VISIBLE); - } - isInActionMode = false; - } - - @Override - public void startActivityForResult(Intent intent, int requestCode) { - Activity parentActivity = getParentActivity(); - if (parentActivity == null) { - return; - } - // Maybe reset current animation? - - if (intent != null) { - parentActivity.startActivityForResult(intent, requestCode); - } - } - - @Override - public Theme.MessageDrawable getMessageDrawableOutStart() { - return messageDrawableOutStart; - } - - @Override - public Theme.MessageDrawable getMessageDrawableOutMediaStart() { - return messageDrawableOutMediaStart; - } - - @Override - public List getPulledDialogs() { - return pulledDialogs; - } - - @Override - public void setPulledDialogs(List pulledDialogs) { - this.pulledDialogs = pulledDialogs; - } - - @Override - public boolean onKeyUp(int keyCode, KeyEvent event) { - if (keyCode == KeyEvent.KEYCODE_MENU && !checkTransitionAnimation() && !isSwipeInProgress() && getCurrentActionBar() != null) { - getCurrentActionBar().onMenuButtonPressed(); - } - return super.onKeyUp(keyCode, event); - } - - @Override - protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { - super.onMeasure(widthMeasureSpec, heightMeasureSpec); - - int width = MeasureSpec.getSize(widthMeasureSpec); - int height = MeasureSpec.getSize(heightMeasureSpec); - - boolean isPortrait = height > width; - if (wasPortrait != isPortrait && isInPreviewMode()) { - finishPreviewFragment(); - } - wasPortrait = isPortrait; - } - - private final class FragmentHolderView extends FrameLayout { - private BaseFragment fragment; - private int fragmentPanTranslationOffset; - private Paint backgroundPaint = new Paint(); - private int backgroundColor; - - public FragmentHolderView(@NonNull Context context) { - super(context); - setWillNotDraw(false); - } - - public void invalidateBackgroundColor() { - if (fragment == null || fragment.hasOwnBackground()) { - setBackground(null); - } else { - setBackgroundColor(fragment.getThemedColor(Theme.key_windowBackgroundWhite)); - } - } - - @Override - protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { - int width = MeasureSpec.getSize(widthMeasureSpec); - int height = MeasureSpec.getSize(heightMeasureSpec); - - int actionBarHeight = 0; - for (int i = 0; i < getChildCount(); i++) { - View child = getChildAt(i); - if (child instanceof ActionBar) { - child.measure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(0, MeasureSpec.UNSPECIFIED)); - actionBarHeight = child.getMeasuredHeight(); - } - } - for (int i = 0; i < getChildCount(); i++) { - View child = getChildAt(i); - if (!(child instanceof ActionBar)) { - if (child.getFitsSystemWindows()) { - measureChildWithMargins(child, widthMeasureSpec, 0, heightMeasureSpec, 0); - } else { - measureChildWithMargins(child, widthMeasureSpec, 0, heightMeasureSpec, actionBarHeight); - } - } - } - setMeasuredDimension(width, height); - } - - @Override - protected void onLayout(boolean changed, int left, int top, int right, int bottom) { - int actionBarHeight = 0; - for (int i = 0; i < getChildCount(); i++) { - View child = getChildAt(i); - if (child instanceof ActionBar) { - child.layout(0, 0, child.getMeasuredWidth(), child.getMeasuredHeight()); - actionBarHeight = child.getMeasuredHeight(); - } - } - for (int i = 0; i < getChildCount(); i++) { - View child = getChildAt(i); - if (!(child instanceof ActionBar)) { - FrameLayout.LayoutParams layoutParams = (FrameLayout.LayoutParams) child.getLayoutParams(); - if (child.getFitsSystemWindows()) { - child.layout(layoutParams.leftMargin, layoutParams.topMargin, layoutParams.leftMargin + child.getMeasuredWidth(), layoutParams.topMargin + child.getMeasuredHeight()); - } else { - child.layout(layoutParams.leftMargin, layoutParams.topMargin + actionBarHeight, layoutParams.leftMargin + child.getMeasuredWidth(), layoutParams.topMargin + actionBarHeight + child.getMeasuredHeight()); - } - } - } - } - - public void setFragmentPanTranslationOffset(int fragmentPanTranslationOffset) { - this.fragmentPanTranslationOffset = fragmentPanTranslationOffset; - invalidate(); - } - - @Override - protected void onDraw(Canvas canvas) { - super.onDraw(canvas); - if (fragmentPanTranslationOffset != 0) { - int color = Theme.getColor(Theme.key_windowBackgroundWhite); - if (backgroundColor != color) { - backgroundPaint.setColor(backgroundColor = Theme.getColor(Theme.key_windowBackgroundWhite)); - } - canvas.drawRect(0, getMeasuredHeight() - fragmentPanTranslationOffset - 3, getMeasuredWidth(), getMeasuredHeight(), backgroundPaint); - } - } - - @Override - protected void dispatchDraw(Canvas canvas) { - super.dispatchDraw(canvas); - fragment.drawOverlay(canvas, this); - } - - @Override - protected boolean drawChild(Canvas canvas, View child, long drawingTime) { - if (child instanceof ActionBar) { - return super.drawChild(canvas, child, drawingTime); - } else { - int actionBarHeight = 0; - int actionBarY = 0; - int childCount = getChildCount(); - for (int i = 0; i < childCount; i++) { - View view = getChildAt(i); - if (view == child) { - continue; - } - if (view instanceof ActionBar && view.getVisibility() == VISIBLE) { - if (((ActionBar) view).getCastShadows()) { - actionBarHeight = (int) (view.getMeasuredHeight() * view.getScaleY()); - actionBarY = (int) view.getY(); - } - break; - } - } - - boolean clipRoundForeground = indexOfChild(child) == 0 && fragment.isInPreviewMode(); - if (clipRoundForeground) { - canvas.save(); - path.rewind(); - AndroidUtilities.rectTmp.set(child.getLeft(), child.getTop() + AndroidUtilities.statusBarHeight, child.getRight(), child.getBottom()); - path.addRoundRect(AndroidUtilities.rectTmp, AndroidUtilities.dp(8), AndroidUtilities.dp(8), Path.Direction.CW); - canvas.clipPath(path); - } - boolean result = super.drawChild(canvas, child, drawingTime); - if (clipRoundForeground) { - canvas.restore(); - } - if (actionBarHeight != 0 && headerShadowDrawable != null) { - headerShadowDrawable.setBounds(0, actionBarY + actionBarHeight, getMeasuredWidth(), actionBarY + actionBarHeight + headerShadowDrawable.getIntrinsicHeight()); - headerShadowDrawable.draw(canvas); - } - return result; - } - } - - public void setFragment(BaseFragment fragment) { - this.fragment = fragment; - fragmentPanTranslationOffset = 0; - invalidate(); - - removeAllViews(); - - if (fragment == null) { - invalidateBackgroundColor(); - return; - } - - View v = fragment.getFragmentView(); - if (v == null) { - v = fragment.createView(getContext()); - fragment.setFragmentView(v); - } - if (v != null && v.getParent() instanceof ViewGroup) { - ((ViewGroup) v.getParent()).removeView(v); - } - addView(v); - - if (removeActionBarExtraHeight) { - fragment.getActionBar().setOccupyStatusBar(false); - } - if (fragment.getActionBar() != null && fragment.getActionBar().shouldAddToContainer()) { - ViewGroup parent = (ViewGroup) fragment.getActionBar().getParent(); - if (parent != null) { - parent.removeView(fragment.getActionBar()); - } - addView(fragment.getActionBar()); - } - - invalidateBackgroundColor(); - } - } - - private boolean isIgnoredView(ViewGroup root, MotionEvent e, Rect rect) { - if (root == null) return false; - for (int i = 0; i < root.getChildCount(); i++) { - View ch = root.getChildAt(i); - if (isIgnoredView0(ch, e, rect)) { - return true; - } - - if (ch instanceof ViewGroup) { - if (isIgnoredView((ViewGroup) ch, e, rect)) { - return true; - } - } - } - return isIgnoredView0(root, e, rect); - } - - private boolean isIgnoredView0(View v, MotionEvent e, Rect rect) { - v.getGlobalVisibleRect(rect); - if (v.getVisibility() != View.VISIBLE || !rect.contains((int)e.getX(), (int)e.getY())) { - return false; - } - - if (v instanceof ViewPager) { - ViewPager vp = (ViewPager) v; - return vp.getCurrentItem() != 0; - } - - return v.canScrollHorizontally(-1) || v instanceof SeekBarView; - } -} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/LanguageSelectActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/LanguageSelectActivity.java index 122943dc32..85f6aea8c7 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/LanguageSelectActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/LanguageSelectActivity.java @@ -8,33 +8,31 @@ package org.telegram.ui; -import android.animation.Animator; -import android.animation.AnimatorListenerAdapter; -import android.animation.ValueAnimator; import android.content.Context; import android.content.DialogInterface; import android.content.SharedPreferences; import android.content.res.Configuration; import android.graphics.Canvas; import android.view.Gravity; +import android.text.TextUtils; import android.view.View; import android.view.ViewGroup; import android.widget.EditText; import android.widget.FrameLayout; -import android.widget.LinearLayout; import android.widget.TextView; +import androidx.annotation.NonNull; import androidx.recyclerview.widget.DefaultItemAnimator; import androidx.recyclerview.widget.LinearLayoutManager; import androidx.recyclerview.widget.RecyclerView; import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.FileLog; -//import org.telegram.messenger.LanguageDetector; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MessagesController; import org.telegram.messenger.NotificationCenter; import org.telegram.messenger.R; +import org.telegram.messenger.TranslateController; import org.telegram.messenger.Utilities; import org.telegram.tgnet.ConnectionsManager; import org.telegram.ui.ActionBar.ActionBar; @@ -54,12 +52,15 @@ import org.telegram.ui.Components.CubicBezierInterpolator; import org.telegram.ui.Components.EmptyTextProgressView; import org.telegram.ui.Components.LayoutHelper; +import org.telegram.ui.Components.Premium.PremiumFeatureBottomSheet; import org.telegram.ui.Components.RecyclerListView; +import org.telegram.ui.Components.TranslateAlert2; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; +import java.util.Iterator; import java.util.Timer; public class LanguageSelectActivity extends BaseFragment implements NotificationCenter.NotificationCenterDelegate { @@ -188,18 +189,60 @@ protected void onMoveAnimationUpdate(RecyclerView.ViewHolder holder) { } }; itemAnimator.setDurations(400); + itemAnimator.setDelayAnimations(false); itemAnimator.setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); listView.setItemAnimator(itemAnimator); frameLayout.addView(listView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); listView.setOnItemClickListener((view, position) -> { try { + if (view instanceof TextCheckCell) { + final boolean prevFullValue = getContextValue() || getChatValue(); + if (position == 1) { + boolean value = !getContextValue(); + getMessagesController().getTranslateController().setContextTranslateEnabled(value); + ((TextCheckCell) view).setChecked(value); + NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.updateSearchSettings); + } else if (position == 2) { + boolean value = !getChatValue(); + if (value && !getUserConfig().isPremium()) { + showDialog(new PremiumFeatureBottomSheet(LanguageSelectActivity.this, PremiumPreviewFragment.PREMIUM_FEATURE_TRANSLATIONS, false)); + return; + } + MessagesController.getMainSettings(currentAccount).edit().putBoolean("translate_chat_button", value).apply(); + NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.updateSearchSettings); + ((TextCheckCell) view).setChecked(value); + } + final boolean currentFullValue = getContextValue() || getChatValue(); + if (currentFullValue != prevFullValue) { + int start = 1 + (!getMessagesController().premiumLocked ? 1 : 0); + TextCheckCell last = null; + for (int i = 0; i < listView.getChildCount(); ++i) { + View child = listView.getChildAt(i); + if (listView.getChildAdapterPosition(child) == start && child instanceof TextCheckCell) { + last = (TextCheckCell) child; + } + } + if (last != null) { + last.setDivider(currentFullValue); + } + if (currentFullValue) { + listAdapter.notifyItemInserted(start + 1); + } else { + listAdapter.notifyItemRemoved(start + 1); + } + } + return; + } else if (view instanceof TextSettingsCell) { + presentFragment(new RestrictedLanguagesSelectActivity()); + return; + } if (getParentActivity() == null || parentLayout == null || !(view instanceof TextRadioCell)) { return; } boolean search = listView.getAdapter() == searchListViewAdapter; if (!search) { - position -= 2; + position -= (7 - (!(getChatValue() || getContextValue()) ? 1 : 0) - (getMessagesController().premiumLocked ? 1 : 0)); } LocaleController.LocaleInfo localeInfo; if (search) { @@ -217,11 +260,16 @@ protected void onMoveAnimationUpdate(RecyclerView.ViewHolder holder) { boolean sameLang = prevLocale == localeInfo; final AlertDialog progressDialog = new AlertDialog(getContext(), AlertDialog.ALERT_TYPE_SPINNER); + if (!sameLang) { + progressDialog.showDelayed(500); + } int reqId = LocaleController.getInstance().applyLanguage(localeInfo, true, false, false, true, currentAccount, () -> { progressDialog.dismiss(); if (!sameLang) { - actionBar.closeSearchField(); - updateLanguage(); + AndroidUtilities.runOnUIThread(() -> { + actionBar.closeSearchField(); + updateLanguage(); + }, 10); } }); if (reqId != 0) { @@ -236,16 +284,17 @@ protected void onMoveAnimationUpdate(RecyclerView.ViewHolder holder) { HashSet selectedLanguages = RestrictedLanguagesSelectActivity.getRestrictedLanguages(); HashSet newSelectedLanguages = new HashSet(selectedLanguages); - if (selectedLanguages.contains(langCode)) { - newSelectedLanguages.removeIf(s -> s != null && s.equals(langCode)); - if (!selectedLanguages.contains(prevLangCode)) - newSelectedLanguages.add(prevLangCode); + if (selectedLanguages.contains(prevLangCode) && !selectedLanguages.contains(langCode)) { + newSelectedLanguages.removeIf(s -> s != null && s.equals(prevLangCode)); + } + if (langCode != null && !"null".equals(langCode)) { + newSelectedLanguages.add(langCode); } preferences.edit().putStringSet("translate_button_restricted_languages", newSelectedLanguages).apply(); + MessagesController.getInstance(currentAccount).getTranslateController().checkRestrictedLanguagesUpdate(); + MessagesController.getInstance(currentAccount).getTranslateController().cleanup(); - if (!sameLang) { - progressDialog.showDelayed(500); - } + TranslateController.invalidateSuggestedLanguageCodes(); } } catch (Exception e) { FileLog.e(e); @@ -259,7 +308,7 @@ protected void onMoveAnimationUpdate(RecyclerView.ViewHolder holder) { } boolean search = listView.getAdapter() == searchListViewAdapter; if (!search) { - position -= 2; + position -= (7 - (!(getChatValue() || getContextValue()) ? 1 : 0) - (getMessagesController().premiumLocked ? 1 : 0)); } LocaleController.LocaleInfo localeInfo; if (search) { @@ -298,7 +347,7 @@ protected void onMoveAnimationUpdate(RecyclerView.ViewHolder holder) { showDialog(alertDialog); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } catch (Exception e) { FileLog.e(e); @@ -362,6 +411,16 @@ private void fillLanguages() { Collections.sort(unofficialLanguages, comparator); } + @Override + public void onBecomeFullyVisible() { + super.onBecomeFullyVisible(); + LocaleController.getInstance().checkForcePatchLangpack(currentAccount, () -> { + if (!isPaused) { + updateLanguage(); + } + }); + } + @Override public void onResume() { super.onResume(); @@ -379,43 +438,29 @@ public void search(final String query) { listView.setAdapter(listAdapter); } } else { -// try { -// if (searchTimer != null) { -// searchTimer.cancel(); -// } -// } catch (Exception e) { -// FileLog.e(e); -// } -// searchTimer = new Timer(); -// searchTimer.schedule(new TimerTask() { -// @Override -// public void run() { -// try { -// searchTimer.cancel(); -// searchTimer = null; -// } catch (Exception e) { -// FileLog.e(e); -// } processSearch(query); -// } -// }, 100, 300); } } private void updateLanguage() { if (actionBar != null) { - actionBar.setTitleAnimated(LocaleController.getString("Language", R.string.Language), true, 350, CubicBezierInterpolator.EASE_OUT_QUINT); - } - if (listView != null) { - for (int i = 0; i < listView.getChildCount(); ++i) { - View child = listView.getChildAt(i); - if (child instanceof HeaderCell) { - listAdapter.notifyItemChanged(listView.getChildAdapterPosition(child)); - } else { - listAdapter.onBindViewHolder(listView.getChildViewHolder(child), listView.getChildAdapterPosition(child)); - } + String newTitle = LocaleController.getString("Language", R.string.Language); + if (!TextUtils.equals(actionBar.getTitle(), newTitle)) { + actionBar.setTitleAnimated(newTitle, true, 350, CubicBezierInterpolator.EASE_OUT_QUINT); } } + if (listAdapter != null) { + listAdapter.notifyItemRangeChanged(0, listAdapter.getItemCount()); +// for (int i = 0; i < listView.getChildCount(); ++i) { +// View child = listView.getChildAt(i); +// listAdapter.onBindViewHolder(listView.getChildViewHolder(child), listView.getChildAdapterPosition(child)); +// if (child instanceof TextRadioCell) { +// ((TextRadioCell) child).updateRTL(); +// } else if (child instanceof TextInfoPrivacyCell) { +// ((TextInfoPrivacyCell) child).updateRTL(); +// } +// } + } } private void processSearch(final String query) { @@ -454,7 +499,20 @@ private void updateSearchResults(final ArrayList ar }); } - // NekoX: Merge 8.4.1, remove TranslateSettings + private boolean getContextValue() { + return getMessagesController().getTranslateController().isContextTranslateEnabled(); + } + + private boolean getChatValue() { + return getMessagesController().getTranslateController().isFeatureAvailable(); + } + + public static final int VIEW_TYPE_LANGUAGE = 0; + public static final int VIEW_TYPE_SHADOW = 1; + public static final int VIEW_TYPE_SWITCH = 2; + public static final int VIEW_TYPE_HEADER = 3; + public static final int VIEW_TYPE_SETTINGS = 4; + public static final int VIEW_TYPE_INFO = 5; private class ListAdapter extends RecyclerListView.SelectionAdapter { @@ -468,7 +526,8 @@ public ListAdapter(Context context, boolean isSearch) { @Override public boolean isEnabled(RecyclerView.ViewHolder holder) { - return holder.getItemViewType() == 0; + final int viewType = holder.getItemViewType(); + return viewType == VIEW_TYPE_LANGUAGE || viewType == VIEW_TYPE_SETTINGS || viewType == VIEW_TYPE_SWITCH; } @Override @@ -486,7 +545,7 @@ public int getItemCount() { if (!unofficialLanguages.isEmpty()) { count += unofficialLanguages.size() + 1; } - return 2 + count; + return 4 + (getMessagesController().premiumLocked ? 0 : 1) + (getChatValue() || getContextValue() ? 1 : 0) + 1 + count; } } @@ -494,22 +553,31 @@ public int getItemCount() { public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { View view; switch (viewType) { - case 0: { -// view = new LanguageCell(mContext, false); + case VIEW_TYPE_LANGUAGE: { view = new TextRadioCell(mContext); view.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); break; } -// case 2: -// TranslateSettings translateSettings = new TranslateSettings(mContext); -// view = translateSettings; -// break; - case 3: + case VIEW_TYPE_SWITCH: + TextCheckCell switchCell = new TextCheckCell(mContext); + switchCell.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); + view = switchCell; + break; + case VIEW_TYPE_SETTINGS: + TextSettingsCell settingsCell = new TextSettingsCell(mContext); + settingsCell.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); + view = settingsCell; + break; + case VIEW_TYPE_HEADER: HeaderCell header = new HeaderCell(mContext); header.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); view = header; break; - case 1: + case VIEW_TYPE_INFO: + TextInfoPrivacyCell infoCell = new TextInfoPrivacyCell(mContext); + view = infoCell; + break; + case VIEW_TYPE_SHADOW: default: { view = new ShadowSectionCell(mContext); break; @@ -518,14 +586,22 @@ public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType return new RecyclerListView.Holder(view); } + @Override + public void onViewAttachedToWindow(@NonNull RecyclerView.ViewHolder holder) { + if (holder.itemView instanceof TextRadioCell) { + ((TextRadioCell) holder.itemView).updateRTL(); + } + } + @Override public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { switch (holder.getItemViewType()) { - case 0: { + case VIEW_TYPE_LANGUAGE: { if (!search) { - position -= 2; + position -= (7 - (!(getChatValue() || getContextValue()) ? 1 : 0) - (getMessagesController().premiumLocked ? 1 : 0)); } TextRadioCell textSettingsCell = (TextRadioCell) holder.itemView; + textSettingsCell.updateRTL(); LocaleController.LocaleInfo localeInfo = null; boolean last; if (search) { @@ -555,7 +631,7 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { textSettingsCell.setChecked(localeInfo == LocaleController.getInstance().getCurrentLocaleInfo()); break; } - case 1: { + case VIEW_TYPE_SHADOW: { if (!search) position--; ShadowSectionCell sectionCell = (ShadowSectionCell) holder.itemView; @@ -566,15 +642,75 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { } break; } -// case 2: { -// TranslateSettings translateSettings = (TranslateSettings) holder.itemView; -// translateSettings.setVisibility(searching ? View.GONE : View.VISIBLE); -// translateSettings.updateTranslations(); -// break; -// } - case 3: { + case VIEW_TYPE_SETTINGS: { + TextSettingsCell settingsCell = (TextSettingsCell) holder.itemView; + settingsCell.updateRTL(); + HashSet langCodes = RestrictedLanguagesSelectActivity.getRestrictedLanguages(); + final String doNotTranslateCellName = LocaleController.getString("DoNotTranslate", R.string.DoNotTranslate); + String doNotTranslateCellValue = null; + try { + boolean[] accusative = new boolean[1]; + if (langCodes.size() == 0) { + doNotTranslateCellValue = ""; + } else if (langCodes.size() == 1) { + doNotTranslateCellValue = TranslateAlert2.capitalFirst(TranslateAlert2.languageName(langCodes.iterator().next(), accusative)); + } else { + Iterator iterator = langCodes.iterator(); + boolean first = true; + StringBuilder string = new StringBuilder(); + while (iterator.hasNext()) { + String lang = iterator.next(); + if (!first) { + string.append(", "); + } + String langName = TranslateAlert2.capitalFirst(TranslateAlert2.languageName(lang, accusative)); + if (langName != null) { + string.append(langName); + first = false; + } + } + doNotTranslateCellValue = string.toString(); + if (settingsCell.getValueTextView().getPaint().measureText(doNotTranslateCellValue) > Math.min((AndroidUtilities.displaySize.x - AndroidUtilities.dp(34)) / 2f, AndroidUtilities.displaySize.x - AndroidUtilities.dp(21 * 4) - settingsCell.getTextView().getPaint().measureText(doNotTranslateCellName))) { + doNotTranslateCellValue = null; + } + } + } catch (Exception ignore) {} + if (doNotTranslateCellValue == null) { + doNotTranslateCellValue = String.format(LocaleController.getPluralString("Languages", langCodes.size()), langCodes.size()); + } + settingsCell.setTextAndValue(doNotTranslateCellName, doNotTranslateCellValue, true, false); + break; + } + case VIEW_TYPE_SWITCH: { + TextCheckCell cell = (TextCheckCell) holder.itemView; + cell.updateRTL(); + if (position == 1) { + cell.setTextAndCheck(LocaleController.getString("ShowTranslateButton", R.string.ShowTranslateButton), getContextValue(), true); + } else if (position == 2) { + cell.setTextAndCheck(LocaleController.getString("ShowTranslateChatButton", R.string.ShowTranslateChatButton), getChatValue(), getContextValue() || getChatValue()); + cell.setCheckBoxIcon(!getUserConfig().isPremium() ? R.drawable.permission_locked : 0); + } + break; + } + case VIEW_TYPE_INFO: { + TextInfoPrivacyCell infoCell = (TextInfoPrivacyCell) holder.itemView; + infoCell.updateRTL(); + if (position == (!getMessagesController().premiumLocked && (getContextValue() || getChatValue()) ? 4 : 3)) { + infoCell.setText(LocaleController.getString("TranslateMessagesInfo1", R.string.TranslateMessagesInfo1)); + infoCell.setBackground(Theme.getThemedDrawable(mContext, R.drawable.greydivider_bottom, Theme.key_windowBackgroundGrayShadow)); + infoCell.setTopPadding(11); + infoCell.setBottomPadding(16); + } else { + infoCell.setTopPadding(0); + infoCell.setBottomPadding(16); + infoCell.setText(LocaleController.getString("TranslateMessagesInfo2", R.string.TranslateMessagesInfo2)); + infoCell.setBackground(Theme.getThemedDrawable(mContext, R.drawable.greydivider_top, Theme.key_windowBackgroundGrayShadow)); + } + break; + } + case VIEW_TYPE_HEADER: { HeaderCell header = (HeaderCell) holder.itemView; - header.setText(LocaleController.getString("Language", R.string.Language)); + header.setText(position == 0 ? LocaleController.getString("TranslateMessages", R.string.TranslateMessages) : LocaleController.getString("Language", R.string.Language)); break; } } @@ -589,12 +725,24 @@ public int getItemViewType(int i) { if (i == -1) return 3; if (search) { - return 0; - } - if (!unofficialLanguages.isEmpty() && (i == unofficialLanguages.size() || i == unofficialLanguages.size() + sortedLanguages.size() + 1) || unofficialLanguages.isEmpty() && i == sortedLanguages.size()) { - return 1; + return VIEW_TYPE_LANGUAGE; + } else { + if (i-- == 0) return VIEW_TYPE_HEADER; + if (i-- == 0) return VIEW_TYPE_SWITCH; + if (!getMessagesController().premiumLocked) { + if (i-- == 0) return VIEW_TYPE_SWITCH; + } + if (getChatValue() || getContextValue()) { + if (i-- == 0) return VIEW_TYPE_SETTINGS; + } + if (i-- == 0) return VIEW_TYPE_INFO; + if (i-- == 0) return VIEW_TYPE_INFO; + if (i-- == 0) return VIEW_TYPE_HEADER; + if (!unofficialLanguages.isEmpty() && (i == unofficialLanguages.size() || i == unofficialLanguages.size() + sortedLanguages.size() + 1) || unofficialLanguages.isEmpty() && i == sortedLanguages.size()) { + return VIEW_TYPE_SHADOW; + } + return VIEW_TYPE_LANGUAGE; } - return 0; } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/LaunchActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/LaunchActivity.java index 06735a5f46..37b4313761 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/LaunchActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/LaunchActivity.java @@ -27,6 +27,7 @@ import android.database.Cursor; import android.graphics.Bitmap; import android.graphics.Canvas; +import android.graphics.Color; import android.graphics.LinearGradient; import android.graphics.Matrix; import android.graphics.Paint; @@ -47,7 +48,11 @@ import android.os.SystemClock; import android.provider.ContactsContract; import android.provider.Settings; +import android.text.Spannable; +import android.text.SpannableStringBuilder; +import android.text.TextPaint; import android.text.TextUtils; +import android.text.style.ClickableSpan; import android.util.Base64; import android.util.SparseArray; import android.util.SparseIntArray; @@ -71,7 +76,6 @@ import android.widget.Toast; import androidx.annotation.NonNull; -import androidx.annotation.Nullable; import androidx.arch.core.util.Function; import androidx.core.app.ActivityCompat; import androidx.core.content.pm.ShortcutInfoCompat; @@ -87,6 +91,8 @@ import org.telegram.messenger.AccountInstance; import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ApplicationLoader; +import org.telegram.messenger.AutoDeleteMediaTask; +import org.telegram.messenger.BackupAgent; import org.telegram.messenger.BotWebViewVibrationEffect; import org.telegram.messenger.BuildVars; import org.telegram.messenger.ChatObject; @@ -98,6 +104,7 @@ import org.telegram.messenger.FingerprintController; import org.telegram.messenger.GenericProvider; import org.telegram.messenger.ImageLoader; +import org.telegram.messenger.LiteMode; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MediaController; import org.telegram.messenger.MediaDataController; @@ -105,6 +112,7 @@ import org.telegram.messenger.MessagesController; import org.telegram.messenger.MessagesStorage; import org.telegram.messenger.NotificationCenter; +import org.telegram.messenger.NotificationsController; import org.telegram.messenger.PushListenerController; import org.telegram.messenger.R; import org.telegram.messenger.SendMessagesHelper; @@ -139,7 +147,9 @@ import org.telegram.ui.Components.AppIconBulletinLayout; import org.telegram.ui.Components.AttachBotIntroTopView; import org.telegram.ui.Components.AudioPlayerAlert; +import org.telegram.ui.Components.BatteryDrawable; import org.telegram.ui.Components.BlockingUpdateView; +import org.telegram.ui.Components.BotWebViewSheet; import org.telegram.ui.Components.Bulletin; import org.telegram.ui.Components.BulletinFactory; import org.telegram.ui.Components.CubicBezierInterpolator; @@ -259,6 +269,7 @@ public class LaunchActivity extends BasePermissionsActivity implements INavigati public DrawerLayoutContainer drawerLayoutContainer; private DrawerLayoutAdapter drawerLayoutAdapter; private PasscodeView passcodeView; + private List overlayPasscodeViews = new ArrayList<>(); private TermsOfServiceView termsOfServiceView; private BlockingUpdateView blockingUpdateView; private AlertDialog visibleDialog; @@ -315,6 +326,8 @@ public void accept(Boolean aBoolean) { } }; + public static LaunchActivity instance; + @Override protected void onCreate(Bundle savedInstanceState) { if (BuildVars.DEBUG_VERSION) { @@ -322,6 +335,7 @@ protected void onCreate(Bundle savedInstanceState) { .detectLeakedClosableObjects() .build()); } + instance = this; ApplicationLoader.postInitApplication(); AndroidUtilities.checkDisplaySize(this, getResources().getConfiguration()); currentAccount = UserConfig.selectedAccount; @@ -763,7 +777,8 @@ public void onPreviewOpenAnimationEnd() { NotificationCenter.getGlobalInstance().addObserver(drawerLayoutAdapter, NotificationCenter.proxySettingsChanged); NotificationCenter.getGlobalInstance().addObserver(drawerLayoutAdapter, NotificationCenter.updateUserStatus); - if (actionBarLayout.getFragmentStack().isEmpty()) { + LiteMode.addOnPowerSaverAppliedListener(this::onPowerSaver); + if (actionBarLayout.getFragmentStack().isEmpty() && (layersActionBarLayout == null || layersActionBarLayout.getFragmentStack().isEmpty())) { if (!UserConfig.getInstance(currentAccount).isClientActivated()) { actionBarLayout.addFragmentToStack(getClientNotActivatedFragment()); drawerLayoutContainer.setAllowOpenDrawer(false, false); @@ -832,7 +847,7 @@ public void onPreviewOpenAnimationEnd() { FileLog.e(e); } } else { - BaseFragment fragment = actionBarLayout.getFragmentStack().get(0); + BaseFragment fragment = actionBarLayout.getFragmentStack().size() > 0 ? actionBarLayout.getFragmentStack().get(0) : layersActionBarLayout.getFragmentStack().get(0); if (fragment instanceof DialogsActivity) { ((DialogsActivity) fragment).setSideMenu(sideMenu); } @@ -912,7 +927,7 @@ public void onPreviewOpenAnimationEnd() { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) { ActivityManager am = (ActivityManager) getSystemService(Context.ACTIVITY_SERVICE); - if (am.isBackgroundRestricted() && System.currentTimeMillis() - SharedConfig.BackgroundActivityPrefs.getLastCheckedBackgroundActivity() >= 86400000L) { + if (am.isBackgroundRestricted() && System.currentTimeMillis() - SharedConfig.BackgroundActivityPrefs.getLastCheckedBackgroundActivity() >= 86400000L && SharedConfig.BackgroundActivityPrefs.getDismissedCount() < 3) { AlertsCreator.createBackgroundActivityDialog(this).show(); SharedConfig.BackgroundActivityPrefs.setLastCheckedBackgroundActivity(System.currentTimeMillis()); } @@ -932,6 +947,9 @@ public void onViewDetachedFromWindow(View v) { }); } EmojiHelper.getInstance().checkEmojiPacks(); + BackupAgent.requestBackup(this); + + RestrictedLanguagesSelectActivity.checkRestrictedLanguages(false); } @Override @@ -1116,7 +1134,7 @@ public void removeOnUserLeaveHintListener(Runnable callback) { } private BaseFragment getClientNotActivatedFragment() { - if (LoginActivity.loadCurrentState(false).getInt("currentViewNum", 0) != 0) { + if (LoginActivity.loadCurrentState(false, currentAccount).getInt("currentViewNum", 0) != 0) { return new LoginActivity(); } return new IntroActivity(); @@ -1165,22 +1183,20 @@ public void onSettings() { @Override protected void onEmojiSelected(View emojiView, Long documentId, TLRPC.Document document, Integer until) { - TLRPC.TL_account_updateEmojiStatus req = new TLRPC.TL_account_updateEmojiStatus(); + TLRPC.EmojiStatus emojiStatus; if (documentId == null) { - req.emoji_status = new TLRPC.TL_emojiStatusEmpty(); + emojiStatus = new TLRPC.TL_emojiStatusEmpty(); } else if (until != null) { - req.emoji_status = new TLRPC.TL_emojiStatusUntil(); - ((TLRPC.TL_emojiStatusUntil) req.emoji_status).document_id = documentId; - ((TLRPC.TL_emojiStatusUntil) req.emoji_status).until = until; + emojiStatus = new TLRPC.TL_emojiStatusUntil(); + ((TLRPC.TL_emojiStatusUntil) emojiStatus).document_id = documentId; + ((TLRPC.TL_emojiStatusUntil) emojiStatus).until = until; } else { - req.emoji_status = new TLRPC.TL_emojiStatus(); - ((TLRPC.TL_emojiStatus) req.emoji_status).document_id = documentId; + emojiStatus = new TLRPC.TL_emojiStatus(); + ((TLRPC.TL_emojiStatus) emojiStatus).document_id = documentId; } - TLRPC.User user = MessagesController.getInstance(currentAccount).getUser(UserConfig.getInstance(currentAccount).getClientUserId()); + MessagesController.getInstance(currentAccount).updateEmojiStatus(emojiStatus); + TLRPC.User user = UserConfig.getInstance(currentAccount).getCurrentUser(); if (user != null) { - user.emoji_status = req.emoji_status; - NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.userEmojiStatusUpdated, user); - MessagesController.getInstance(currentAccount).updateEmojiStatusUntilUpdate(user.id, user.emoji_status); for (int i = 0; i < sideMenu.getChildCount(); ++i) { View child = sideMenu.getChildAt(i); if (child instanceof DrawerUserCell) { @@ -1207,11 +1223,6 @@ protected void onEmojiSelected(View emojiView, Long documentId, TLRPC.Document d } } } - ConnectionsManager.getInstance(currentAccount).sendRequest(req, (res, err) -> { - if (!(res instanceof TLRPC.TL_boolTrue)) { - // TODO: reject - } - }); if (popup[0] != null) { selectAnimatedEmojiDialog = null; popup[0].dismiss(); @@ -1372,6 +1383,7 @@ private void switchToAvailableAccountOrLogout() { if (drawerLayoutAdapter != null) { drawerLayoutAdapter.notifyDataSetChanged(); } + RestrictedLanguagesSelectActivity.checkRestrictedLanguages(true); clearFragments(); actionBarLayout.rebuildLogout(); if (AndroidUtilities.isTablet()) { @@ -1451,14 +1463,17 @@ private void checkLayout() { if (!AndroidUtilities.isInMultiwindow && (!AndroidUtilities.isSmallTablet() || getResources().getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE)) { tabletFullSize = false; - if (actionBarLayout.getFragmentStack().size() >= 2) { - for (int a = 1; a < actionBarLayout.getFragmentStack().size(); a++) { - BaseFragment chatFragment = actionBarLayout.getFragmentStack().get(a); + List fragmentStack = actionBarLayout.getFragmentStack(); + if (fragmentStack.size() >= 2) { + for (int a = 1; a < fragmentStack.size(); a++) { + BaseFragment chatFragment = fragmentStack.get(a); if (chatFragment instanceof ChatActivity) { ((ChatActivity) chatFragment).setIgnoreAttachOnPause(true); } chatFragment.onPause(); - actionBarLayout.removeFragmentFromStack(a); + chatFragment.onFragmentDestroy(); + chatFragment.setParentLayout(null); + fragmentStack.remove(chatFragment); rightActionBarLayout.addFragmentToStack(chatFragment); a--; } @@ -1472,14 +1487,17 @@ private void checkLayout() { shadowTabletSide.setVisibility(!actionBarLayout.getFragmentStack().isEmpty() ? View.VISIBLE : View.GONE); } else { tabletFullSize = true; - if (!rightActionBarLayout.getFragmentStack().isEmpty()) { - for (int a = 0; a < rightActionBarLayout.getFragmentStack().size(); a++) { - BaseFragment chatFragment = rightActionBarLayout.getFragmentStack().get(a); + List fragmentStack = rightActionBarLayout.getFragmentStack(); + if (!fragmentStack.isEmpty()) { + for (int a = 0; a < fragmentStack.size(); a++) { + BaseFragment chatFragment = fragmentStack.get(a); if (chatFragment instanceof ChatActivity) { ((ChatActivity) chatFragment).setIgnoreAttachOnPause(true); } chatFragment.onPause(); - rightActionBarLayout.removeFragmentFromStack(a); + chatFragment.onFragmentDestroy(); + chatFragment.setParentLayout(null); + fragmentStack.remove(chatFragment); actionBarLayout.addFragmentToStack(chatFragment); a--; } @@ -1573,7 +1591,7 @@ public void showPasscodeActivity(boolean fingerprint, boolean animated, int x, i if (messageObject != null && messageObject.isRoundVideo()) { MediaController.getInstance().cleanupPlayer(true, true); } - passcodeView.onShow(fingerprint, animated, x, y, () -> { + passcodeView.onShow(overlayPasscodeViews.isEmpty() && fingerprint, animated, x, y, () -> { actionBarLayout.getView().setVisibility(View.INVISIBLE); if (AndroidUtilities.isTablet()) { if (layersActionBarLayout != null && layersActionBarLayout.getView() != null && layersActionBarLayout.getView().getVisibility() == View.VISIBLE) { @@ -1587,9 +1605,13 @@ public void showPasscodeActivity(boolean fingerprint, boolean animated, int x, i onShow.run(); } }, onStart); + for (int i = 0; i < overlayPasscodeViews.size(); i++) { + PasscodeView overlay = overlayPasscodeViews.get(i); + overlay.onShow(fingerprint && i == overlayPasscodeViews.size() - 1, animated, x, y, null, null); + } SharedConfig.isWaitingForPasscodeEnter = true; drawerLayoutContainer.setAllowOpenDrawer(false, false); - passcodeView.setDelegate(() -> { + PasscodeView.PasscodeViewDelegate delegate = view -> { SharedConfig.isWaitingForPasscodeEnter = false; if (passcodeSaveIntent != null) { handleIntent(passcodeSaveIntent, passcodeSaveIntentIsNew, passcodeSaveIntentIsRestore, true); @@ -1606,7 +1628,26 @@ public void showPasscodeActivity(boolean fingerprint, boolean animated, int x, i } rightActionBarLayout.getView().setVisibility(View.VISIBLE); } - }); + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.passcodeDismissed, view); + try { + NotificationsController.getInstance(UserConfig.selectedAccount).showNotifications(); + } catch (Exception e) { + FileLog.e(e); + } + }; + passcodeView.setDelegate(delegate); + for (PasscodeView overlay : overlayPasscodeViews) { + overlay.setDelegate(delegate); + } + try { + NotificationsController.getInstance(UserConfig.selectedAccount).showNotifications(); + } catch (Exception e) { + FileLog.e(e); + } + } + + public boolean allowShowFingerprintDialog(PasscodeView passcodeView) { + return overlayPasscodeViews.isEmpty() ? passcodeView == this.passcodeView : overlayPasscodeViews.get(overlayPasscodeViews.size() - 1) == passcodeView; } private boolean handleIntent(Intent intent, boolean isNew, boolean restore, boolean fromPassword) { @@ -1976,6 +2017,8 @@ private boolean handleIntent(Intent intent, boolean isNew, boolean restore, bool String emoji = null; HashMap auth = null; String unsupportedUrl = null; + String botAppMaybe = null; + String startApp = null; String botUser = null; String botChat = null; String botChannel = null; @@ -2196,6 +2239,7 @@ private boolean handleIntent(Intent intent, boolean isNew, boolean restore, bool } else if (path.startsWith("contact/")) { contactToken = path.substring(8); } else if (path.length() >= 1) { + botAppMaybe = null; ArrayList segments = new ArrayList<>(data.getPathSegments()); if (segments.size() > 0 && segments.get(0).equals("s")) { segments.remove(0); @@ -2203,8 +2247,14 @@ private boolean handleIntent(Intent intent, boolean isNew, boolean restore, bool if (segments.size() > 0) { username = segments.get(0); if (segments.size() > 1) { - messageId = Utilities.parseInt(segments.get(1)); - if (messageId == 0) { + botAppMaybe = segments.get(1); + startApp = data.getQueryParameter("startapp"); + try { + messageId = Utilities.parseInt(segments.get(1)); + if (messageId == 0) { + messageId = null; + } + } catch (NumberFormatException ignored) { messageId = null; } } @@ -2265,6 +2315,8 @@ private boolean handleIntent(Intent intent, boolean isNew, boolean restore, bool username = username.substring(1); } } + botAppMaybe = data.getQueryParameter("appname"); + startApp = data.getQueryParameter("startapp"); if ("telegrampassport".equals(username)) { username = null; auth = new HashMap<>(); @@ -2524,6 +2576,12 @@ private boolean handleIntent(Intent intent, boolean isNew, boolean restore, bool open_settings = 4; } else if (url.contains("change_number")) { open_settings = 5; + } else if (url.contains("?enablelogs")) { + open_settings = 7; + } else if (url.contains("?sendlogs")) { + open_settings = 8; + } else if (url.contains("?disablelogs")) { + open_settings = 9; } else if (url.contains("neko")) { open_settings = 100; } else { @@ -2641,7 +2699,7 @@ private boolean handleIntent(Intent intent, boolean isNew, boolean restore, bool req.hash = phoneHash; req.settings = new TLRPC.TL_codeSettings(); req.settings.allow_flashcall = false; - req.settings.allow_app_hash = PushListenerController.getProvider().hasServices(); + req.settings.allow_app_hash = req.settings.allow_firebase = PushListenerController.getProvider().hasServices(); Bundle params = new Bundle(); params.putString("phone", phone); @@ -2659,7 +2717,7 @@ private boolean handleIntent(Intent intent, boolean isNew, boolean restore, bool if (message != null && message.startsWith("@")) { message = " " + message; } - runLinkRequest(intentAccount[0], username, group, sticker, emoji, botUser, botChat, botChannel, botChatAdminParams, message, contactToken, hasUrl, messageId, channelId, threadId, commentId, game, auth, lang, unsupportedUrl, code, login, wallPaper, inputInvoiceSlug, theme, voicechat, livestream, 0, videoTimestamp, setAsAttachBot, attachMenuBotToOpen, attachMenuBotChoose, progress); + runLinkRequest(intentAccount[0], username, group, sticker, emoji, botUser, botChat, botChannel, botChatAdminParams, message, contactToken, hasUrl, messageId, channelId, threadId, commentId, game, auth, lang, unsupportedUrl, code, login, wallPaper, inputInvoiceSlug, theme, voicechat, livestream, 0, videoTimestamp, setAsAttachBot, attachMenuBotToOpen, attachMenuBotChoose, botAppMaybe, startApp, progress); } else { try (Cursor cursor = getContentResolver().query(intent.getData(), null, null, null, null)) { if (cursor != null) { @@ -2784,12 +2842,19 @@ private boolean handleIntent(Intent intent, boolean isNew, boolean restore, bool if (push_topic_id > 0) { TLRPC.TL_forumTopic topic = MessagesController.getInstance(currentAccount).getTopicsController().findTopic(push_chat_id, push_topic_id); + FileLog.d(push_chat_id + " " + push_topic_id + " TL_forumTopic " + topic); if (topic != null) { TLRPC.Message message = topic.topicStartMessage; ArrayList messageObjects = new ArrayList<>(); TLRPC.Chat chatLocal = MessagesController.getInstance(currentAccount).getChat(push_chat_id); messageObjects.add(new MessageObject(currentAccount, message, false, false)); fragment.setThreadMessages(messageObjects, chatLocal, topic.id, topic.read_inbox_max_id, topic.read_outbox_max_id, topic); + } else { + boolean finalIsNew = isNew; + MessagesController.getInstance(currentAccount).getTopicsController().loadTopic(push_chat_id, push_topic_id, () -> { + handleIntent(intent, finalIsNew, restore, fromPassword, progress); + }); + return true; } } if (actionBarLayout.presentFragment(new INavigationLayout.NavigationParams(fragment).setNoAnimation(true))) { @@ -2860,7 +2925,28 @@ private boolean handleIntent(Intent intent, boolean isNew, boolean restore, bool } else { ArrayList dids = new ArrayList<>(); dids.add(MessagesStorage.TopicKey.of(dialogId, 0)); - didSelectDialogs(null, dids, null, false); + didSelectDialogs(null, dids, null, false, null); + } + } else if (open_settings == 7 || open_settings == 8 || open_settings == 9) { + CharSequence bulletinText = null; + boolean can = BuildVars.DEBUG_PRIVATE_VERSION; // TODO: check source + if (!can) { + bulletinText = "Locked in release."; + } else if (open_settings == 7) { + bulletinText = "Logs enabled."; + ApplicationLoader.applicationContext.getSharedPreferences("systemConfig", Context.MODE_PRIVATE).edit().putBoolean("logsEnabled", BuildVars.LOGS_ENABLED = true).commit(); + } else if (open_settings == 8) { + ProfileActivity.sendLogs(LaunchActivity.this, false); + } else if (open_settings == 9) { + bulletinText = "Logs disabled."; + ApplicationLoader.applicationContext.getSharedPreferences("systemConfig", Context.MODE_PRIVATE).edit().putBoolean("logsEnabled", BuildVars.LOGS_ENABLED = false).commit(); + } + + if (bulletinText != null) { + BaseFragment fragment = actionBarLayout.getLastFragment(); + if (fragment != null) { + BulletinFactory.of(fragment).createSimpleBulletin(R.raw.info, bulletinText).show(); + } } } else if (open_settings != 0) { BaseFragment fragment; @@ -3024,7 +3110,7 @@ private boolean handleIntent(Intent intent, boolean isNew, boolean restore, bool if (AndroidUtilities.isTablet()) { if (!UserConfig.getInstance(currentAccount).isClientActivated()) { if (layersActionBarLayout.getFragmentStack().isEmpty()) { - layersActionBarLayout.addFragmentToStack(getClientNotActivatedFragment()); + layersActionBarLayout.addFragmentToStack(getClientNotActivatedFragment(), INavigationLayout.FORCE_NOT_ATTACH_VIEW); drawerLayoutContainer.setAllowOpenDrawer(false, false); } } else { @@ -3034,14 +3120,14 @@ private boolean handleIntent(Intent intent, boolean isNew, boolean restore, bool if (searchQuery != null) { dialogsActivity.setInitialSearchString(searchQuery); } - actionBarLayout.addFragmentToStack(dialogsActivity); + actionBarLayout.addFragmentToStack(dialogsActivity, INavigationLayout.FORCE_NOT_ATTACH_VIEW); drawerLayoutContainer.setAllowOpenDrawer(true, false); } } } else { if (actionBarLayout.getFragmentStack().isEmpty()) { if (!UserConfig.getInstance(currentAccount).isClientActivated()) { - actionBarLayout.addFragmentToStack(getClientNotActivatedFragment()); + actionBarLayout.addFragmentToStack(getClientNotActivatedFragment(), INavigationLayout.FORCE_NOT_ATTACH_VIEW); drawerLayoutContainer.setAllowOpenDrawer(false, false); } else { DialogsActivity dialogsActivity = new DialogsActivity(null); @@ -3049,7 +3135,7 @@ private boolean handleIntent(Intent intent, boolean isNew, boolean restore, bool if (searchQuery != null) { dialogsActivity.setInitialSearchString(searchQuery); } - actionBarLayout.addFragmentToStack(dialogsActivity); + actionBarLayout.addFragmentToStack(dialogsActivity, INavigationLayout.FORCE_NOT_ATTACH_VIEW); drawerLayoutContainer.setAllowOpenDrawer(true, false); } } @@ -3105,7 +3191,7 @@ private void openDialogsToSend(boolean animated) { Bundle args = new Bundle(); args.putBoolean("onlySelect", true); args.putBoolean("canSelectTopics", true); - args.putInt("dialogsType", 3); + args.putInt("dialogsType", DialogsActivity.DIALOGS_TYPE_FORWARD); args.putBoolean("allowSwitchAccount", true); if (contactsToSend != null) { if (contactsToSend.size() != 1) { @@ -3313,16 +3399,16 @@ private void runImportRequest(final Uri importUri, args.putBoolean("allowSwitchAccount", true); if (res.pm) { - args.putInt("dialogsType", 12); + args.putInt("dialogsType", DialogsActivity.DIALOGS_TYPE_IMPORT_HISTORY_USERS); } else if (res.group) { - args.putInt("dialogsType", 11); + args.putInt("dialogsType", DialogsActivity.DIALOGS_TYPE_IMPORT_HISTORY_GROUPS); } else { String uri = importUri.toString(); Set uris = MessagesController.getInstance(intentAccount).exportPrivateUri; boolean ok = false; for (String u : uris) { if (uri.contains(u)) { - args.putInt("dialogsType", 12); + args.putInt("dialogsType", DialogsActivity.DIALOGS_TYPE_IMPORT_HISTORY_USERS); ok = true; break; } @@ -3331,13 +3417,13 @@ private void runImportRequest(final Uri importUri, uris = MessagesController.getInstance(intentAccount).exportGroupUri; for (String u : uris) { if (uri.contains(u)) { - args.putInt("dialogsType", 11); + args.putInt("dialogsType", DialogsActivity.DIALOGS_TYPE_IMPORT_HISTORY_GROUPS); ok = true; break; } } if (!ok) { - args.putInt("dialogsType", 13); + args.putInt("dialogsType", DialogsActivity.DIALOGS_TYPE_IMPORT_HISTORY); } } } @@ -3436,9 +3522,11 @@ private void runLinkRequest(final int intentAccount, final String setAsAttachBot, final String attachMenuBotToOpen, final String attachMenuBotChoose, + final String botAppMaybe, + final String botAppStartParam, final Browser.Progress progress) { if (state == 0 && ChatActivity.SCROLL_DEBUG_DELAY && progress != null) { - Runnable runnable = () -> runLinkRequest(intentAccount, username, group, sticker, emoji, botUser, botChat, botChannel, botChatAdminParams, message, contactToken, hasUrl, messageId, channelId, threadId, commentId, game, auth, lang, unsupportedUrl, code, loginToken, wallPaper, inputInvoiceSlug, theme, voicechat, livestream, 1, videoTimestamp, setAsAttachBot, attachMenuBotToOpen, attachMenuBotChoose, progress); + Runnable runnable = () -> runLinkRequest(intentAccount, username, group, sticker, emoji, botUser, botChat, botChannel, botChatAdminParams, message, contactToken, hasUrl, messageId, channelId, threadId, commentId, game, auth, lang, unsupportedUrl, code, loginToken, wallPaper, inputInvoiceSlug, theme, voicechat, livestream, 1, videoTimestamp, setAsAttachBot, attachMenuBotToOpen, attachMenuBotChoose, botAppMaybe, botAppStartParam, progress); progress.init(); progress.onCancel(() -> AndroidUtilities.cancelRunOnUIThread(runnable)); AndroidUtilities.runOnUIThread(runnable, 7500); @@ -3448,7 +3536,7 @@ private void runLinkRequest(final int intentAccount, if (account != intentAccount) { switchToAccount(account, true); } - runLinkRequest(account, username, group, sticker, emoji, botUser, botChat, botChannel, botChatAdminParams, message, contactToken, hasUrl, messageId, channelId, threadId, commentId, game, auth, lang, unsupportedUrl, code, loginToken, wallPaper, inputInvoiceSlug, theme, voicechat, livestream, 1, videoTimestamp, setAsAttachBot, attachMenuBotToOpen, attachMenuBotChoose, progress); + runLinkRequest(account, username, group, sticker, emoji, botUser, botChat, botChannel, botChatAdminParams, message, contactToken, hasUrl, messageId, channelId, threadId, commentId, game, auth, lang, unsupportedUrl, code, loginToken, wallPaper, inputInvoiceSlug, theme, voicechat, livestream, 1, videoTimestamp, setAsAttachBot, attachMenuBotToOpen, attachMenuBotChoose, botAppMaybe, botAppStartParam, progress); }).show(); return; } else if (code != null) { @@ -3547,6 +3635,66 @@ private void runLinkRequest(final int intentAccount, MessagesController.getInstance(intentAccount).putUsers(res.users, false); MessagesController.getInstance(intentAccount).putChats(res.chats, false); MessagesStorage.getInstance(intentAccount).putUsersAndChats(res.users, res.chats, false, true); + + if (!TextUtils.isEmpty(botAppMaybe)) { + TLRPC.User user = MessagesController.getInstance(intentAccount).getUser(res.peer.user_id); + if (user != null && user.bot) { + TLRPC.TL_messages_getBotApp getBotApp = new TLRPC.TL_messages_getBotApp(); + TLRPC.TL_inputBotAppShortName app = new TLRPC.TL_inputBotAppShortName(); + app.bot_id = MessagesController.getInstance(currentAccount).getInputUser(user); + app.short_name = botAppMaybe; + getBotApp.app = app; + ConnectionsManager.getInstance(currentAccount).sendRequest(getBotApp, (response1, error1) -> { + if (error1 != null) { + AndroidUtilities.runOnUIThread(()-> runLinkRequest(currentAccount, username, group, sticker, emoji, botUser, botChat, botChannel, botChatAdminParams, message, contactToken, hasUrl, messageId, channelId, threadId, commentId, game, auth, lang, unsupportedUrl, code, loginToken, wallPaper, inputInvoiceSlug, theme, voicechat, livestream, state, videoTimestamp, setAsAttachBot, attachMenuBotToOpen, attachMenuBotChoose, null, null, progress)); + } else { + TLRPC.TL_messages_botApp botApp = (TLRPC.TL_messages_botApp) response1; + AndroidUtilities.runOnUIThread(()->{ + dismissLoading.run(); + + AtomicBoolean allowWrite = new AtomicBoolean(); + BaseFragment lastFragment = mainFragmentsStack.get(mainFragmentsStack.size() - 1); + Runnable loadBotSheet = ()->{ + BotWebViewSheet sheet = new BotWebViewSheet(LaunchActivity.this, lastFragment.getResourceProvider()); + sheet.setParentActivity(LaunchActivity.this); + sheet.requestWebView(currentAccount, user.id, user.id, null, null, BotWebViewSheet.TYPE_WEB_VIEW_BOT_APP, 0, false, lastFragment, botApp.app, allowWrite.get(), botAppStartParam, user); + sheet.show(); + }; + + if (botApp.inactive) { + AlertDialog.Builder builder = new AlertDialog.Builder(LaunchActivity.this) + .setTopAnimation(R.raw.permission_request_apk, AlertsCreator.PERMISSIONS_REQUEST_TOP_ICON_SIZE, false, Theme.getColor(Theme.key_dialogTopBackground)) + .setMessage(AndroidUtilities.replaceTags(LocaleController.formatString(R.string.BotStartAppPermission, botApp.app.title, UserObject.getUserName(user)))) + .setPositiveButton(LocaleController.getString(R.string.Start), (dialog, which) -> loadBotSheet.run()) + .setNegativeButton(LocaleController.getString(R.string.Cancel), null); + + if (botApp.request_write_access) { + allowWrite.set(true); + + CheckBoxCell cell = new CheckBoxCell(LaunchActivity.this, 5, lastFragment.getResourceProvider()); + cell.setBackground(Theme.getSelectorDrawable(false)); + cell.setMultiline(true); + cell.setText(AndroidUtilities.replaceTags(LocaleController.formatString("OpenUrlOption2", R.string.OpenUrlOption2, UserObject.getUserName(user))), "", true, false); + cell.setPadding(LocaleController.isRTL ? AndroidUtilities.dp(16) : AndroidUtilities.dp(8), 0, LocaleController.isRTL ? AndroidUtilities.dp(8) : AndroidUtilities.dp(16), 0); + cell.setOnClickListener(v -> { + boolean allow = !cell.isChecked(); + cell.setChecked(allow, true); + allowWrite.set(allow); + }); + + builder.setView(cell); + } + builder.show(); + } else { + loadBotSheet.run(); + } + }); + } + }); + return; + } + } + if (setAsAttachBot != null && attachMenuBotToOpen == null) { TLRPC.User user = MessagesController.getInstance(intentAccount).getUser(res.peer.user_id); if (user != null && user.bot) { @@ -3581,7 +3729,7 @@ private void runLinkRequest(final int intentAccount, args.putBoolean("allowBots", chooserTargets.contains("bots")); dialogsActivity = new DialogsActivity(args); - dialogsActivity.setDelegate((fragment, dids, message1, param) -> { + dialogsActivity.setDelegate((fragment, dids, message1, param, topicsFragment) -> { long did = dids.get(0).dialogId; Bundle args1 = new Bundle(); @@ -3601,6 +3749,7 @@ private void runLinkRequest(final int intentAccount, NotificationCenter.getInstance(intentAccount).postNotificationName(NotificationCenter.closeChats); actionBarLayout.presentFragment(new ChatActivity(args1), true, false, true, false); } + return true; }); } else { dialogsActivity = null; @@ -3621,7 +3770,7 @@ private void runLinkRequest(final int intentAccount, } } else { AttachBotIntroTopView introTopView = new AttachBotIntroTopView(LaunchActivity.this); - introTopView.setColor(Theme.getColor(Theme.key_chat_attachContactIcon)); + introTopView.setColor(Theme.getColor(Theme.key_chat_attachIcon)); introTopView.setBackgroundColor(Theme.getColor(Theme.key_dialogTopBackground)); introTopView.setAttachBot(attachMenuBot); @@ -3663,7 +3812,6 @@ private void runLinkRequest(final int intentAccount, allowWrite.set(allow); }); - builder.setCustomViewOffset(12); builder.setView(cell); } builder.show(); @@ -3687,11 +3835,11 @@ private void runLinkRequest(final int intentAccount, Bundle args = new Bundle(); args.putBoolean("onlySelect", true); args.putBoolean("cantSendToChannels", true); - args.putInt("dialogsType", 1); + args.putInt("dialogsType", DialogsActivity.DIALOGS_TYPE_BOT_SHARE); args.putString("selectAlertString", LocaleController.getString("SendGameToText", R.string.SendGameToText)); args.putString("selectAlertStringGroup", LocaleController.getString("SendGameToGroupText", R.string.SendGameToGroupText)); DialogsActivity fragment = new DialogsActivity(args); - fragment.setDelegate((fragment1, dids, message1, param) -> { + fragment.setDelegate((fragment1, dids, message1, param, topicsFragment) -> { long did = dids.get(0).dialogId; TLRPC.TL_inputMediaGame inputMediaGame = new TLRPC.TL_inputMediaGame(); inputMediaGame.id = new TLRPC.TL_inputGameShortName(); @@ -3712,6 +3860,7 @@ private void runLinkRequest(final int intentAccount, NotificationCenter.getInstance(intentAccount).postNotificationName(NotificationCenter.closeChats); actionBarLayout.presentFragment(new ChatActivity(args1), true, false, true, false); } + return true; }); boolean removeLast; if (AndroidUtilities.isTablet()) { @@ -3751,7 +3900,7 @@ private void runLinkRequest(final int intentAccount, } Bundle args = new Bundle(); args.putBoolean("onlySelect", true); - args.putInt("dialogsType", 2); + args.putInt("dialogsType", DialogsActivity.DIALOGS_TYPE_ADD_USERS_TO); args.putBoolean("resetDelegate", false); args.putBoolean("closeFragment", false); args.putBoolean("allowGroups", botChat != null); @@ -3759,7 +3908,7 @@ private void runLinkRequest(final int intentAccount, final String botHash = TextUtils.isEmpty(botChat) ? (TextUtils.isEmpty(botChannel) ? null : botChannel) : botChat; // args.putString("addToGroupAlertString", LocaleController.formatString("AddToTheGroupAlertText", R.string.AddToTheGroupAlertText, UserObject.getUserName(user), "%1$s")); DialogsActivity fragment = new DialogsActivity(args); - fragment.setDelegate((fragment12, dids, message1, param) -> { + fragment.setDelegate((fragment12, dids, message1, param, topicsFragment) -> { long did = dids.get(0).dialogId; TLRPC.Chat chat = MessagesController.getInstance(currentAccount).getChat(-did); @@ -3882,6 +4031,7 @@ public void didChangeOwner(TLRPC.User user) { }); builder.show(); } + return true; }); presentFragment(fragment); } else { @@ -4245,9 +4395,9 @@ public void onError() { } else if (message != null) { Bundle args = new Bundle(); args.putBoolean("onlySelect", true); - args.putInt("dialogsType", 3); + args.putInt("dialogsType", DialogsActivity.DIALOGS_TYPE_FORWARD); DialogsActivity fragment = new DialogsActivity(args); - fragment.setDelegate((fragment13, dids, m, param) -> { + fragment.setDelegate((fragment13, dids, m, param, topicsFragment) -> { long did = dids.get(0).dialogId; Bundle args13 = new Bundle(); args13.putBoolean("scrollToTopOnResume", true); @@ -4264,6 +4414,7 @@ public void onError() { MediaDataController.getInstance(intentAccount).saveDraft(did, 0, message, null, null, false); actionBarLayout.presentFragment(new ChatActivity(args13), true, false, true, false); } + return true; }); presentFragment(fragment, false, true); } else if (auth != null) { @@ -4935,9 +5086,20 @@ public AlertDialog showAlertDialog(AlertDialog.Builder builder) { visibleDialog.setOnDismissListener(dialog -> { if (visibleDialog != null) { if (visibleDialog == localeDialog) { + BaseFragment fragment = actionBarLayout == null ? null : actionBarLayout.getLastFragment(); try { String shorname = LocaleController.getInstance().getCurrentLocaleInfo().shortName; - Toast.makeText(LaunchActivity.this, getStringForLanguageAlert(shorname.equals("en") ? englishLocaleStrings : systemLocaleStrings, "ChangeLanguageLater", R.string.ChangeLanguageLater), Toast.LENGTH_LONG).show(); + if (fragment != null) { + BulletinFactory.of(fragment).createSimpleBulletin( + R.raw.msg_translate, + getStringForLanguageAlert(shorname.equals("en") ? englishLocaleStrings : systemLocaleStrings, "ChangeLanguageLater", R.string.ChangeLanguageLater) + ).setDuration(Bulletin.DURATION_PROLONG).show(); + } else { + BulletinFactory.of(Bulletin.BulletinWindow.make(LaunchActivity.this), null).createSimpleBulletin( + R.raw.msg_translate, + getStringForLanguageAlert(shorname.equals("en") ? englishLocaleStrings : systemLocaleStrings, "ChangeLanguageLater", R.string.ChangeLanguageLater) + ).setDuration(Bulletin.DURATION_PROLONG).show(); + } } catch (Exception e) { FileLog.e(e); } @@ -4991,7 +5153,7 @@ public void onNewIntent(Intent intent, Browser.Progress progress) { } @Override - public void didSelectDialogs(DialogsActivity dialogsFragment, ArrayList dids, CharSequence message, boolean param) { + public boolean didSelectDialogs(DialogsActivity dialogsFragment, ArrayList dids, CharSequence message, boolean param, TopicsFragment topicsFragment) { final int account = dialogsFragment != null ? dialogsFragment.getCurrentAccount() : currentAccount; if (exportingChatUri != null) { @@ -5051,7 +5213,7 @@ public void didSelectDialogs(DialogsActivity dialogsFragment, ArrayList 1)) { - return; + return false; } } + if (topicsFragment != null) { + topicsFragment.removeSelfFromStack(); + } + boolean presentedFragmentWithRemoveLast = false; if (contactsToSend != null && contactsToSend.size() == 1 && !mainFragmentsStack.isEmpty()) { + presentedFragmentWithRemoveLast = true; PhonebookShareAlert alert = new PhonebookShareAlert(mainFragmentsStack.get(mainFragmentsStack.size() - 1), null, null, contactsToSendUri, null, null, null); alert.setDelegate((user, notify2, scheduleDate) -> { if (fragment != null) { @@ -5095,7 +5262,17 @@ public void didSelectDialogs(DialogsActivity dialogsFragment, ArrayList 0)); actionBarLayout.presentFragment(fragment, dialogsFragment != null, withoutAnimation, true, false); - if (videoPath != null) { + presentedFragmentWithRemoveLast = dialogsFragment != null; + if (videoPath != null && topicId == 0) { fragment.openVideoEditor(videoPath, sendingText); videoEditorOpened = true; sendingText = null; - } else if (photoPathsArray != null && photoPathsArray.size() > 0) { + } else if (photoPathsArray != null && photoPathsArray.size() > 0 && topicId == 0) { photosEditorOpened = fragment.openPhotosEditor(photoPathsArray, message == null || message.length() == 0 ? sendingText : message); if (photosEditorOpened) { sendingText = null; @@ -5153,7 +5332,7 @@ public void didSelectDialogs(DialogsActivity dialogsFragment, ArrayList= 100) { + return; + } + BaseFragment lastFragment = actionBarLayout.getLastFragment(); + if (lastFragment == null || lastFragment instanceof LiteModeSettingsActivity) { + return; + } + int percent = LiteMode.getBatteryLevel(); + BulletinFactory.of(lastFragment).createSimpleBulletin( + new BatteryDrawable(percent / 100F, Color.WHITE, lastFragment.getThemedColor(Theme.key_dialogSwipeRemove), 1.3f), + LocaleController.getString("LowPowerEnabledTitle", R.string.LowPowerEnabledTitle), + LocaleController.formatString("LowPowerEnabledSubtitle", R.string.LowPowerEnabledSubtitle, String.format("%d%%", percent)), + LocaleController.getString("Disable", R.string.Disable), + () -> presentFragment(new LiteModeSettingsActivity()) + ).setDuration(Bulletin.DURATION_PROLONG).show(); } public void presentFragment(INavigationLayout.NavigationParams params) { @@ -5362,21 +5564,6 @@ public void onRequestPermissionsResult(int requestCode, String[] permissions, in } } - @Override - public void onRestoreInstanceState(@Nullable Bundle savedInstanceState) { - super.onRestoreInstanceState(savedInstanceState); - - if (actionBarLayout != null) { - actionBarLayout.rebuildFragments(INavigationLayout.REBUILD_FLAG_REBUILD_LAST); - } - if (rightActionBarLayout != null) { - rightActionBarLayout.rebuildFragments(INavigationLayout.REBUILD_FLAG_REBUILD_LAST); - } - if (layersActionBarLayout != null) { - layersActionBarLayout.rebuildFragments(INavigationLayout.REBUILD_FLAG_REBUILD_LAST); - } - } - @Override protected void onPause() { super.onPause(); @@ -5408,6 +5595,9 @@ protected void onPause() { } } catch (Exception ignored) { } + for (PasscodeView overlay : overlayPasscodeViews) { + overlay.onPause(); + } ConnectionsManager.getInstance(currentAccount).setAppPaused(true, false); if (PhotoViewer.hasInstance() && PhotoViewer.getInstance().isVisible()) { PhotoViewer.getInstance().onPause(); @@ -5504,7 +5694,9 @@ protected void onUserLeaveHint() { for (Runnable callback : onUserLeaveHintListeners) { callback.run(); } - actionBarLayout.onUserLeaveHint(); + if (actionBarLayout != null) { + actionBarLayout.onUserLeaveHint(); + } } View feedbackView; @@ -5525,6 +5717,7 @@ protected void onResume() { NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.startAllHeavyOperations, 4096); MediaController.getInstance().setFeedbackView(feedbackView = actionBarLayout.getView(), true); ApplicationLoader.mainInterfacePaused = false; + MessagesController.getInstance(currentAccount).sortDialogs(null); showLanguageAlert(false); Utilities.stageQueue.postRunnable(() -> { ApplicationLoader.mainInterfacePausedStageQueue = false; @@ -5554,6 +5747,10 @@ protected void onResume() { } } passcodeView.onResume(); + + for (PasscodeView overlay : overlayPasscodeViews) { + overlay.onResume(); + } } if (NaConfig.INSTANCE.getDisableProxyWhenVpnEnabled().Bool()) { @@ -5736,8 +5933,28 @@ public void didReceivedNotification(int id, final int account, Object... args) { builder.setMessage(LocaleController.getString("NobodyLikesSpam2", R.string.NobodyLikesSpam2)); builder.setPositiveButton(LocaleController.getString("OK", R.string.OK), null); } else if (reason == 2) { - builder.setMessage((String) args[1]); + SpannableStringBuilder span = SpannableStringBuilder.valueOf((String) args[1]); String type = (String) args[2]; + if (type.startsWith("PREMIUM_GIFT_SELF_REQUIRED_")) { + String msg = (String) args[1]; + int start = msg.indexOf('*'), end = msg.indexOf('*', start + 1); + if (start != -1 && end != -1 && start != end) { + span.replace(start, end + 1, msg.substring(start + 1, end)); + span.setSpan(new ClickableSpan() { + @Override + public void onClick(@NonNull View widget) { + getActionBarLayout().presentFragment(new PremiumPreviewFragment("gift")); + } + + @Override + public void updateDrawState(@NonNull TextPaint ds) { + super.updateDrawState(ds); + ds.setUnderlineText(false); + } + }, start, end - 1, Spannable.SPAN_EXCLUSIVE_EXCLUSIVE); + } + } + builder.setMessage(span); if (type.startsWith("AUTH_KEY_DROP_")) { builder.setPositiveButton(LocaleController.getString("Cancel", R.string.Cancel), null); builder.setNegativeButton(LocaleController.getString("LogOut", R.string.LogOut), (dialog, which) -> MessagesController.getInstance(currentAccount).performLogout(2)); @@ -6103,13 +6320,13 @@ public void onAnimationEnd(Animator animation) { switch (type) { case Bulletin.TYPE_NAME_CHANGED: { long peerId = (long) args[1]; - String text = peerId > 0 ? LocaleController.getString("YourNameChanged", R.string.YourNameChanged) : LocaleController.getString("CannelTitleChanged", R.string.ChannelTitleChanged); + String text = peerId > 0 ? LocaleController.getString("YourNameChanged", R.string.YourNameChanged) : LocaleController.getString("ChannelTitleChanged", R.string.ChannelTitleChanged); (container != null ? BulletinFactory.of(container, null) : BulletinFactory.of(fragment)).createErrorBulletin(text).show(); break; } case Bulletin.TYPE_BIO_CHANGED: { long peerId = (long) args[1]; - String text = peerId > 0 ? LocaleController.getString("YourBioChanged", R.string.YourBioChanged) : LocaleController.getString("CannelDescriptionChanged", R.string.ChannelDescriptionChanged); + String text = peerId > 0 ? LocaleController.getString("YourBioChanged", R.string.YourBioChanged) : LocaleController.getString("ChannelDescriptionChanged", R.string.ChannelDescriptionChanged); (container != null ? BulletinFactory.of(container, null) : BulletinFactory.of(fragment)).createErrorBulletin(text).show(); break; } @@ -6263,25 +6480,27 @@ private void showVoiceChatTooltip(int action) { if (voIPService == null || mainFragmentsStack.isEmpty() || voIPService.groupCall == null) { return; } - if (!mainFragmentsStack.isEmpty()) { - TLRPC.Chat chat = voIPService.getChat(); - BaseFragment fragment = actionBarLayout.getFragmentStack().get(actionBarLayout.getFragmentStack().size() - 1); - if (fragment instanceof ChatActivity) { - ChatActivity chatActivity = (ChatActivity) fragment; - if (chatActivity.getDialogId() == -chat.id) { - chat = null; - } - chatActivity.getUndoView().showWithAction(0, action, chat); - } else if (fragment instanceof DialogsActivity) { - DialogsActivity dialogsActivity = (DialogsActivity) fragment; - dialogsActivity.getUndoView().showWithAction(0, action, chat); - } else if (fragment instanceof ProfileActivity) { - ProfileActivity profileActivity = (ProfileActivity) fragment; - profileActivity.getUndoView().showWithAction(0, action, chat); + TLRPC.Chat chat = voIPService.getChat(); + BaseFragment fragment = actionBarLayout.getFragmentStack().get(actionBarLayout.getFragmentStack().size() - 1); + if (fragment instanceof ChatActivity) { + ChatActivity chatActivity = (ChatActivity) fragment; + if (chatActivity.getDialogId() == -chat.id) { + chat = null; } - if (action == UndoView.ACTION_VOIP_CAN_NOW_SPEAK && VoIPService.getSharedInstance() != null) { - VoIPService.getSharedInstance().playAllowTalkSound(); + UndoView undoView = chatActivity.getUndoView(); + if (undoView != null) { + undoView.showWithAction(0, action, chat); } + } else if (fragment instanceof DialogsActivity) { + DialogsActivity dialogsActivity = (DialogsActivity) fragment; + dialogsActivity.getUndoView().showWithAction(0, action, chat); + } else if (fragment instanceof ProfileActivity) { + ProfileActivity profileActivity = (ProfileActivity) fragment; + profileActivity.getUndoView().showWithAction(0, action, chat); + } + + if (action == UndoView.ACTION_VOIP_CAN_NOW_SPEAK && VoIPService.getSharedInstance() != null) { + VoIPService.getSharedInstance().playAllowTalkSound(); } } @@ -6319,10 +6538,11 @@ private void onThemeLoadFinish() { private boolean checkFreeDiscSpaceShown; private long alreadyShownFreeDiscSpaceAlertForced; + private long lastSpaceAlert; private static LaunchActivity staticInstanceForAlerts; private void checkFreeDiscSpace(final int force) { staticInstanceForAlerts = this; - SharedConfig.checkKeepMedia(); + AutoDeleteMediaTask.run(); SharedConfig.checkLogsToDelete(); if (Build.VERSION.SDK_INT >= 26 && force == 0 || checkFreeDiscSpaceShown) { return; @@ -6333,7 +6553,7 @@ private void checkFreeDiscSpace(final int force) { } try { SharedPreferences preferences = MessagesController.getGlobalMainSettings(); - if (force == 2 || force == 1 && Math.abs(alreadyShownFreeDiscSpaceAlertForced - System.currentTimeMillis()) > 1000 * 60 * 4 || Math.abs(preferences.getLong("last_space_check", 0) - System.currentTimeMillis()) >= 3 * 24 * 3600 * 1000) { + if ((force == 2 || force == 1) && Math.abs(alreadyShownFreeDiscSpaceAlertForced - System.currentTimeMillis()) > 1000 * 60 * 4 || Math.abs(preferences.getLong("last_space_check", 0) - System.currentTimeMillis()) >= 3 * 24 * 3600 * 1000) { File path = FileLoader.getDirectory(FileLoader.MEDIA_DIR_CACHE); if (path == null) { return; @@ -6577,6 +6797,11 @@ public void run() { FileLog.d("lock app"); } showPasscodeActivity(true, false, -1, -1, null, null); + try { + NotificationsController.getInstance(UserConfig.selectedAccount).showNotifications(); + } catch (Exception e) { + FileLog.e(e); + } } else { if (BuildVars.LOGS_ENABLED) { FileLog.d("didn't pass lock check"); @@ -6603,6 +6828,14 @@ public void run() { SharedConfig.saveConfig(); } + public void addOverlayPasscodeView(PasscodeView overlay) { + overlayPasscodeViews.add(overlay); + } + + public void removeOverlayPasscodeView(PasscodeView overlay) { + overlayPasscodeViews.remove(overlay); + } + private void onPasscodeResume() { if (lockRunnable != null) { if (BuildVars.LOGS_ENABLED) { @@ -6869,7 +7102,7 @@ public boolean dispatchKeyEvent(KeyEvent event) { } } try { - super.dispatchKeyEvent(event); + return super.dispatchKeyEvent(event); } catch (Exception e) { FileLog.e(e); } @@ -6920,7 +7153,7 @@ public boolean needPresentFragment(INavigationLayout layout, INavigationLayout.N ArticleViewer.getInstance().close(false, true); } if (AndroidUtilities.isTablet() && layersActionBarLayout != null) { - drawerLayoutContainer.setAllowOpenDrawer(!(fragment instanceof LoginActivity || fragment instanceof IntroActivity || fragment instanceof CountrySelectActivity) && layersActionBarLayout.getView().getVisibility() != View.VISIBLE, true); + drawerLayoutContainer.setAllowOpenDrawer(!(fragment instanceof LoginActivity || fragment instanceof IntroActivity || fragment instanceof CountrySelectActivity) && (layersActionBarLayout == null || layersActionBarLayout.getView().getVisibility() != View.VISIBLE), true); if (fragment instanceof DialogsActivity) { DialogsActivity dialogsActivity = (DialogsActivity) fragment; if (dialogsActivity.isMainDialogList() && layout != actionBarLayout) { @@ -7154,4 +7387,12 @@ public void onRebuildAllFragments(INavigationLayout layout, boolean last) { } drawerLayoutAdapter.notifyDataSetChanged(); } + + public static BaseFragment getLastFragment() { + if (instance != null && instance.getActionBarLayout() != null) { + return instance.getActionBarLayout().getLastFragment(); + } + return null; + } + } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/LightModeSettingsActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/LightModeSettingsActivity.java deleted file mode 100644 index b2145f4f56..0000000000 --- a/TMessagesProj/src/main/java/org/telegram/ui/LightModeSettingsActivity.java +++ /dev/null @@ -1,112 +0,0 @@ -package org.telegram.ui; - -import android.annotation.SuppressLint; -import android.content.Context; -import android.view.View; -import android.widget.LinearLayout; - -import org.telegram.messenger.AndroidUtilities; -import org.telegram.messenger.LocaleController; -import org.telegram.messenger.R; -import org.telegram.messenger.SharedConfig; -import org.telegram.ui.ActionBar.ActionBar; -import org.telegram.ui.ActionBar.BaseFragment; -import org.telegram.ui.ActionBar.Theme; -import org.telegram.ui.Cells.TextCheckCell; -import org.telegram.ui.Cells.TextInfoPrivacyCell; -import org.telegram.ui.Components.LayoutHelper; - -import java.util.ArrayList; - -public class LightModeSettingsActivity extends BaseFragment { - - TextCheckCell enableMode; - - ArrayList checkBoxViews = new ArrayList<>(); - - @Override - public View createView(Context context) { - actionBar.setBackButtonImage(R.drawable.ic_ab_back); - actionBar.setAllowOverlayTitle(true); - actionBar.setTitle(LocaleController.getString("LightMode", R.string.LightMode)); - actionBar.setActionBarMenuOnItemClick(new ActionBar.ActionBarMenuOnItemClick() { - @Override - public void onItemClick(int id) { - if (id == -1) { - finishFragment(); - } - } - }); - LinearLayout contentView = new LinearLayout(context); - contentView.setOrientation(LinearLayout.VERTICAL); - fragmentView = contentView; - - enableMode = new TextCheckCell(context); - enableMode.setHeight(56); - enableMode.setTextAndCheck(LocaleController.getString("EnableLightMode", R.string.EnableLightMode), SharedConfig.getLiteMode().enabled(), false); - enableMode.setBackgroundColor(Theme.getColor(enableMode.isChecked() ? Theme.key_windowBackgroundChecked : Theme.key_windowBackgroundUnchecked)); - enableMode.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); - enableMode.setOnClickListener(v -> { - SharedConfig.getLiteMode().toggleMode(); - updateEnableMode(); - }); - contentView.addView(enableMode, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT)); - - TextInfoPrivacyCell textInfoPrivacyCell = new TextInfoPrivacyCell(context); - textInfoPrivacyCell.setBackground(Theme.getThemedDrawable(context, R.drawable.greydivider_bottom, Theme.key_windowBackgroundGrayShadow)); - contentView.addView(textInfoPrivacyCell, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT)); - - TextCheckCell animatedEmoji = new TextCheckCell(context); - animatedEmoji.setTextAndCheck("Animated Emoji", true, true); - animatedEmoji.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); - contentView.addView(animatedEmoji, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT)); - - TextCheckCell animatedBackground = new TextCheckCell(context); - animatedBackground.setTextAndCheck("Animated Backgrounds", true, true); - animatedBackground.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); - contentView.addView(animatedBackground, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT)); - - - checkBoxViews.add(animatedEmoji); - checkBoxViews.add(animatedBackground); - - for (int i = 0; i < checkBoxViews.size(); i++) { - TextCheckCell view = checkBoxViews.get(i); - checkBoxViews.get(i).setOnClickListener(new View.OnClickListener() { - @Override - public void onClick(View v) { - view.setChecked(!view.isChecked()); - } - }); - } - updateEnableMode(); - - updateColors(); - return fragmentView; - } - - private void updateEnableMode() { - boolean checked = SharedConfig.getLiteMode().enabled(); - enableMode.setChecked(checked); - int color = Theme.getColor(checked ? Theme.key_windowBackgroundChecked : Theme.key_windowBackgroundUnchecked); - if (checked) { - enableMode.setBackgroundColorAnimated(checked, color); - } else { - enableMode.setBackgroundColorAnimatedReverse(color); - } - for (int i = 0; i < checkBoxViews.size(); i++) { - checkBoxViews.get(i).setVisibility(checked ? View.VISIBLE : View.GONE); - } - } - - @SuppressLint("NotifyDataSetChanged") - private void updateColors() { - enableMode.setColors(Theme.key_windowBackgroundCheckText, Theme.key_switchTrackBlue, Theme.key_switchTrackBlueChecked, Theme.key_switchTrackBlueThumb, Theme.key_switchTrackBlueThumbChecked); - fragmentView.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundGray)); - } - - @Override - public void onFragmentDestroy() { - super.onFragmentDestroy(); - } -} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/LinkEditActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/LinkEditActivity.java index 36f5ab34c3..4afb14a467 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/LinkEditActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/LinkEditActivity.java @@ -436,15 +436,7 @@ public boolean onTouchEvent(MotionEvent event) { @Override public void onTextChanged(CharSequence s, int start, int before, int count) { } @Override public void afterTextChanged(Editable s) { - SpannableStringBuilder builder = new SpannableStringBuilder(s); - Emoji.replaceEmoji(builder, nameEditText.getPaint().getFontMetricsInt(), (int) nameEditText.getPaint().getTextSize(), false); - int selection = nameEditText.getSelectionStart(); - nameEditText.removeTextChangedListener(this); - nameEditText.setText(builder); - if (selection >= 0) { - nameEditText.setSelection(selection); - } - nameEditText.addTextChangedListener(this); + Emoji.replaceEmoji(s, nameEditText.getPaint().getFontMetricsInt(), (int) nameEditText.getPaint().getTextSize(), false); } }); nameEditText.setCursorVisible(false); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/LiteModeSettingsActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/LiteModeSettingsActivity.java new file mode 100644 index 0000000000..c7812f962d --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/LiteModeSettingsActivity.java @@ -0,0 +1,1016 @@ +package org.telegram.ui; + +import static org.telegram.messenger.AndroidUtilities.dp; + +import android.animation.Animator; +import android.animation.AnimatorListenerAdapter; +import android.animation.ValueAnimator; +import android.content.Context; +import android.content.SharedPreferences; +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.PorterDuff; +import android.graphics.PorterDuffColorFilter; +import android.graphics.drawable.Drawable; +import android.os.Build; +import android.os.Bundle; +import android.text.SpannableStringBuilder; +import android.text.Spanned; +import android.text.TextUtils; +import android.text.style.DynamicDrawableSpan; +import android.text.style.ImageSpan; +import android.util.TypedValue; +import android.view.Gravity; +import android.view.HapticFeedbackConstants; +import android.view.View; +import android.view.ViewGroup; +import android.view.accessibility.AccessibilityEvent; +import android.view.accessibility.AccessibilityNodeInfo; +import android.widget.FrameLayout; +import android.widget.ImageView; +import android.widget.LinearLayout; +import android.widget.TextView; + +import androidx.annotation.NonNull; +import androidx.annotation.Nullable; +import androidx.core.graphics.ColorUtils; +import androidx.recyclerview.widget.DefaultItemAnimator; +import androidx.recyclerview.widget.LinearLayoutManager; +import androidx.recyclerview.widget.RecyclerView; + +import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.LiteMode; +import org.telegram.messenger.LocaleController; +import org.telegram.messenger.MessagesController; +import org.telegram.messenger.R; +import org.telegram.messenger.SharedConfig; +import org.telegram.messenger.Utilities; +import org.telegram.ui.ActionBar.ActionBar; +import org.telegram.ui.ActionBar.BaseFragment; +import org.telegram.ui.ActionBar.Theme; +import org.telegram.ui.Cells.HeaderCell; +import org.telegram.ui.Cells.TextCell; +import org.telegram.ui.Cells.TextInfoPrivacyCell; +import org.telegram.ui.Components.AnimatedEmojiDrawable; +import org.telegram.ui.Components.AnimatedTextView; +import org.telegram.ui.Components.BatteryDrawable; +import org.telegram.ui.Components.Bulletin; +import org.telegram.ui.Components.BulletinFactory; +import org.telegram.ui.Components.CheckBox2; +import org.telegram.ui.Components.CubicBezierInterpolator; +import org.telegram.ui.Components.IntSeekBarAccessibilityDelegate; +import org.telegram.ui.Components.LayoutHelper; +import org.telegram.ui.Components.ListView.AdapterWithDiffUtils; +import org.telegram.ui.Components.RecyclerListView; +import org.telegram.ui.Components.SeekBarAccessibilityDelegate; +import org.telegram.ui.Components.SeekBarView; +import org.telegram.ui.Components.Switch; + +import java.util.ArrayList; + +public class LiteModeSettingsActivity extends BaseFragment { + + FrameLayout contentView; + + RecyclerListView listView; + LinearLayoutManager layoutManager; + Adapter adapter; + + Bulletin restrictBulletin; + + @Override + public View createView(Context context) { + actionBar.setBackButtonImage(R.drawable.ic_ab_back); + actionBar.setAllowOverlayTitle(true); + actionBar.setTitle(LocaleController.getString("PowerUsage", R.string.PowerUsage)); + actionBar.setActionBarMenuOnItemClick(new ActionBar.ActionBarMenuOnItemClick() { + @Override + public void onItemClick(int id) { + if (id == -1) { + finishFragment(); + } + } + }); + + contentView = new FrameLayout(context); + contentView.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundGray)); + + listView = new RecyclerListView(context); + listView.setLayoutManager(layoutManager = new LinearLayoutManager(context)); + listView.setAdapter(adapter = new Adapter()); + DefaultItemAnimator itemAnimator = new DefaultItemAnimator(); + itemAnimator.setDurations(350); + itemAnimator.setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); + itemAnimator.setDelayAnimations(false); + itemAnimator.setSupportsChangeAnimations(false); + listView.setItemAnimator(itemAnimator); + contentView.addView(listView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); + listView.setOnItemClickListener((view, position, x, y) -> { + if (view == null || position < 0 || position >= items.size()) { + return; + } + final Item item = items.get(position); + + if (item.viewType == VIEW_TYPE_SWITCH || item.viewType == VIEW_TYPE_CHECKBOX) { + if (LiteMode.isPowerSaverApplied()) { + restrictBulletin = BulletinFactory.of(this).createSimpleBulletin(new BatteryDrawable(.1f, Color.WHITE, Theme.getColor(Theme.key_dialogSwipeRemove), 1.3f), LocaleController.getString("LiteBatteryRestricted", R.string.LiteBatteryRestricted)).show(); + return; + } + if (item.viewType == VIEW_TYPE_SWITCH && item.getFlagsCount() > 1 && (LocaleController.isRTL ? x > dp(19 + 37 + 19) : x < view.getMeasuredWidth() - dp(19 + 37 + 19))) { + int index = getExpandedIndex(item.flags); + if (index != -1) { + expanded[index] = !expanded[index]; + updateValues(); + updateItems(); + return; + } + } + boolean value = LiteMode.isEnabledSetting(item.flags); + LiteMode.toggleFlag(item.flags, !value); + updateValues(); + } else if (item.viewType == VIEW_TYPE_SWITCH2) { + if (item.type == SWITCH_TYPE_SMOOTH_TRANSITIONS) { + SharedPreferences preferences = MessagesController.getGlobalMainSettings(); + boolean animations = preferences.getBoolean("view_animations", true); + SharedPreferences.Editor editor = preferences.edit(); + editor.putBoolean("view_animations", !animations); + SharedConfig.setAnimationsEnabled(!animations); + editor.commit(); + ((TextCell) view).setChecked(!animations); + } + } + }); + + fragmentView = contentView; + + updateItems(); + + return fragmentView; + } + + @Override + public void onBecomeFullyVisible() { + super.onBecomeFullyVisible(); + LiteMode.addOnPowerSaverAppliedListener(onPowerAppliedChange); + } + + @Override + public void onBecomeFullyHidden() { + super.onBecomeFullyHidden(); + LiteMode.removeOnPowerSaverAppliedListener(onPowerAppliedChange); + } + + private Utilities.Callback onPowerAppliedChange = applied -> updateValues(); + + private boolean[] expanded = new boolean[3]; + private int getExpandedIndex(int flags) { + if (flags == LiteMode.FLAGS_ANIMATED_STICKERS) { + return 0; + } else if (flags == LiteMode.FLAGS_ANIMATED_EMOJI) { + return 1; + } else if (flags == LiteMode.FLAGS_CHAT) { + return 2; + } + return -1; + } + + + private ArrayList oldItems = new ArrayList<>(); + private ArrayList items = new ArrayList<>(); + + private void updateItems() { + oldItems.clear(); + oldItems.addAll(items); + + items.clear(); + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + items.add(Item.asSlider()); + items.add(Item.asInfo( + LiteMode.getPowerSaverLevel() <= 0 ? + LocaleController.getString(R.string.LiteBatteryInfoDisabled) : + LiteMode.getPowerSaverLevel() >= 100 ? + LocaleController.getString(R.string.LiteBatteryInfoEnabled) : + LocaleController.formatString(R.string.LiteBatteryInfoBelow, String.format("%d%%", LiteMode.getPowerSaverLevel())) + )); + } + + items.add(Item.asHeader(LocaleController.getString("LiteOptionsTitle"))); + items.add(Item.asSwitch(R.drawable.msg2_sticker, LocaleController.getString("LiteOptionsStickers", R.string.LiteOptionsStickers), LiteMode.FLAGS_ANIMATED_STICKERS)); + if (expanded[0]) { + items.add(Item.asCheckbox(LocaleController.getString("LiteOptionsAutoplayKeyboard"), LiteMode.FLAG_ANIMATED_STICKERS_KEYBOARD)); + items.add(Item.asCheckbox(LocaleController.getString("LiteOptionsAutoplayChat"), LiteMode.FLAG_ANIMATED_STICKERS_CHAT)); + } + items.add(Item.asSwitch(R.drawable.msg2_smile_status, LocaleController.getString("LiteOptionsEmoji", R.string.LiteOptionsEmoji), LiteMode.FLAGS_ANIMATED_EMOJI)); + if (expanded[1]) { + items.add(Item.asCheckbox(LocaleController.getString("LiteOptionsAutoplayKeyboard"), LiteMode.FLAG_ANIMATED_EMOJI_KEYBOARD)); + items.add(Item.asCheckbox(LocaleController.getString("LiteOptionsAutoplayReactions"), LiteMode.FLAG_ANIMATED_EMOJI_REACTIONS)); + items.add(Item.asCheckbox(LocaleController.getString("LiteOptionsAutoplayChat"), LiteMode.FLAG_ANIMATED_EMOJI_CHAT)); + } + items.add(Item.asSwitch(R.drawable.msg2_ask_question, LocaleController.getString("LiteOptionsChat"), LiteMode.FLAGS_CHAT)); + if (expanded[2]) { + items.add(Item.asCheckbox(LocaleController.getString("LiteOptionsBackground"), LiteMode.FLAG_CHAT_BACKGROUND)); + items.add(Item.asCheckbox(LocaleController.getString("LiteOptionsTopics"), LiteMode.FLAG_CHAT_FORUM_TWOCOLUMN)); + items.add(Item.asCheckbox(LocaleController.getString("LiteOptionsSpoiler"), LiteMode.FLAG_CHAT_SPOILER)); + if (SharedConfig.getDevicePerformanceClass() >= SharedConfig.PERFORMANCE_CLASS_AVERAGE) { + items.add(Item.asCheckbox(LocaleController.getString("LiteOptionsBlur"), LiteMode.FLAG_CHAT_BLUR)); + } + items.add(Item.asCheckbox(LocaleController.getString("LiteOptionsScale"), LiteMode.FLAG_CHAT_SCALE)); + } + items.add(Item.asSwitch(R.drawable.msg2_call_earpiece, LocaleController.getString("LiteOptionsCalls"), LiteMode.FLAG_CALLS_ANIMATIONS)); + items.add(Item.asSwitch(R.drawable.msg2_videocall, LocaleController.getString("LiteOptionsAutoplayVideo"), LiteMode.FLAG_AUTOPLAY_VIDEOS)); + items.add(Item.asSwitch(R.drawable.msg2_gif, LocaleController.getString("LiteOptionsAutoplayGifs"), LiteMode.FLAG_AUTOPLAY_GIFS)); + items.add(Item.asInfo("")); + + items.add(Item.asSwitch(LocaleController.getString("LiteSmoothTransitions"), SWITCH_TYPE_SMOOTH_TRANSITIONS)); + items.add(Item.asInfo(LocaleController.getString("LiteSmoothTransitionsInfo"))); + + adapter.setItems(oldItems, items); + } + + private void updateInfo() { + if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) { + return; + } + + if (items.isEmpty()) { + updateItems(); + } else if (items.size() >= 2) { + items.set(1, Item.asInfo( + LiteMode.getPowerSaverLevel() <= 0 ? + LocaleController.getString(R.string.LiteBatteryInfoDisabled) : + LiteMode.getPowerSaverLevel() >= 100 ? + LocaleController.getString(R.string.LiteBatteryInfoEnabled) : + LocaleController.formatString(R.string.LiteBatteryInfoBelow, String.format("%d%%", LiteMode.getPowerSaverLevel())) + )); + adapter.notifyItemChanged(1); + } + } + + private void updateValues() { + if (listView == null) { + return; + } + for (int i = 0; i < listView.getChildCount(); ++i) { + View child = listView.getChildAt(i); + if (child == null) { + continue; + } + int position = listView.getChildAdapterPosition(child); + if (position < 0 || position >= items.size()) { + continue; + } + Item item = items.get(position); + if (item.viewType == VIEW_TYPE_SWITCH || item.viewType == VIEW_TYPE_CHECKBOX) { + ((SwitchCell) child).update(item); + } else if (item.viewType == VIEW_TYPE_SLIDER) { + ((PowerSaverSlider) child).update(); + } + } + + if (restrictBulletin != null && !LiteMode.isPowerSaverApplied()) { + restrictBulletin.hide(); + restrictBulletin = null; + } + } + + private static final int VIEW_TYPE_HEADER = 0; + private static final int VIEW_TYPE_SLIDER = 1; + private static final int VIEW_TYPE_INFO = 2; + private static final int VIEW_TYPE_SWITCH = 3; + private static final int VIEW_TYPE_CHECKBOX = 4; + private static final int VIEW_TYPE_SWITCH2 = 5; + + private static final int SWITCH_TYPE_SMOOTH_TRANSITIONS = 0; + + private class Adapter extends AdapterWithDiffUtils { + + @NonNull + @Override + public RecyclerView.ViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) { + final Context context = parent.getContext(); + View view = null; + if (viewType == VIEW_TYPE_HEADER) { + view = new HeaderCell(context); + view.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); + } else if (viewType == VIEW_TYPE_SLIDER) { + PowerSaverSlider powerSaverSlider = new PowerSaverSlider(context); + view = powerSaverSlider; + view.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); + } else if (viewType == VIEW_TYPE_INFO) { + view = new TextInfoPrivacyCell(context) { + @Override + public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) { + super.onInitializeAccessibilityNodeInfo(info); + + info.setEnabled(true); + } + + @Override + public void onPopulateAccessibilityEvent(AccessibilityEvent event) { + super.onPopulateAccessibilityEvent(event); + + event.setContentDescription(getTextView().getText()); + setContentDescription(getTextView().getText()); + } + }; + } else if (viewType == VIEW_TYPE_SWITCH || viewType == VIEW_TYPE_CHECKBOX) { + view = new SwitchCell(context); + } else if (viewType == VIEW_TYPE_SWITCH2) { + view = new TextCell(context, 23, false, true, null); + view.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); + } + return new RecyclerListView.Holder(view); + } + + @Override + public void onBindViewHolder(@NonNull RecyclerView.ViewHolder holder, int position) { + if (position < 0 || position >= items.size()) { + return; + } + + final LiteModeSettingsActivity.Item item = items.get(position); + final int viewType = holder.getItemViewType(); + if (viewType == VIEW_TYPE_HEADER) { + HeaderCell headerCell = (HeaderCell) holder.itemView; + headerCell.setText(item.text); + } else if (viewType == VIEW_TYPE_SLIDER) { + PowerSaverSlider powerSaverSlider = (PowerSaverSlider) holder.itemView; + powerSaverSlider.update(); +// updateSlider(slideChooseView); + } else if (viewType == VIEW_TYPE_INFO) { + TextInfoPrivacyCell textInfoPrivacyCell = (TextInfoPrivacyCell) holder.itemView; + if (TextUtils.isEmpty(item.text)) { + textInfoPrivacyCell.setFixedSize(12); + } else { + textInfoPrivacyCell.setFixedSize(0); + } + textInfoPrivacyCell.setText(item.text); + textInfoPrivacyCell.setContentDescription(item.text); + boolean top = position > 0 && items.get(position - 1).viewType != VIEW_TYPE_INFO; + boolean bottom = position + 1 < items.size() && items.get(position + 1).viewType != VIEW_TYPE_INFO; + if (top && bottom) { + textInfoPrivacyCell.setBackground(Theme.getThemedDrawable(getContext(), R.drawable.greydivider, Theme.key_windowBackgroundGrayShadow)); + } else if (top) { + textInfoPrivacyCell.setBackground(Theme.getThemedDrawable(getContext(), R.drawable.greydivider_bottom, Theme.key_windowBackgroundGrayShadow)); + } else if (bottom) { + textInfoPrivacyCell.setBackground(Theme.getThemedDrawable(getContext(), R.drawable.greydivider_top, Theme.key_windowBackgroundGrayShadow)); + } else { + textInfoPrivacyCell.setBackground(null); + } + } else if (viewType == VIEW_TYPE_SWITCH || viewType == VIEW_TYPE_CHECKBOX) { + final boolean divider = position + 1 < items.size() && items.get(position + 1).viewType != VIEW_TYPE_INFO; + SwitchCell switchCell = (SwitchCell) holder.itemView; + switchCell.set(item, divider); + } else if (viewType == VIEW_TYPE_SWITCH2) { + TextCell textCell = (TextCell) holder.itemView; + if (item.type == SWITCH_TYPE_SMOOTH_TRANSITIONS) { + SharedPreferences preferences = MessagesController.getGlobalMainSettings(); + boolean animations = preferences.getBoolean("view_animations", true); + textCell.setTextAndCheck(item.text, animations, false); + } + } + } + + @Override + public int getItemViewType(int position) { + if (position < 0 || position >= items.size()) { + return VIEW_TYPE_INFO; + } + return items.get(position).viewType; + } + + @Override + public int getItemCount() { + return items.size(); + } + + @Override + public boolean isEnabled(RecyclerView.ViewHolder holder) { + return holder.getItemViewType() == VIEW_TYPE_CHECKBOX || holder.getItemViewType() == VIEW_TYPE_SWITCH || holder.getItemViewType() == VIEW_TYPE_SWITCH2; + } + } + + private class SwitchCell extends FrameLayout { + + private ImageView imageView; + private LinearLayout textViewLayout; + private TextView textView; + private AnimatedTextView countTextView; + private ImageView arrowView; + private Switch switchView; + private CheckBox2 checkBoxView; + + private boolean needDivider, needLine; + + public SwitchCell(Context context) { + super(context); + + setImportantForAccessibility(IMPORTANT_FOR_ACCESSIBILITY_YES); + setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); + + imageView = new ImageView(context); + imageView.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_windowBackgroundWhiteGrayIcon), PorterDuff.Mode.MULTIPLY)); + imageView.setVisibility(View.GONE); + addView(imageView, LayoutHelper.createFrame(24, 24, Gravity.CENTER_VERTICAL | (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT), 20, 0, 20, 0)); + + textView = new TextView(context) { + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + if (MeasureSpec.getMode(widthMeasureSpec) == MeasureSpec.AT_MOST) { + widthMeasureSpec = MeasureSpec.makeMeasureSpec(MeasureSpec.getSize(widthMeasureSpec) - AndroidUtilities.dp(52), MeasureSpec.AT_MOST); + } + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + } + }; + textView.setLines(1); + textView.setSingleLine(true); + textView.setEllipsize(TextUtils.TruncateAt.END); + textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 16); + textView.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteBlackText)); + textView.setGravity(LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT); + textView.setImportantForAccessibility(IMPORTANT_FOR_ACCESSIBILITY_NO); + + countTextView = new AnimatedTextView(context, false, true, true); + countTextView.setAnimationProperties(.35f, 0, 200, CubicBezierInterpolator.EASE_OUT_QUINT); + countTextView.setTypeface(AndroidUtilities.getTypeface(AndroidUtilities.TYPEFACE_ROBOTO_MEDIUM)); + countTextView.setTextSize(dp(14)); + countTextView.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteBlackText)); + countTextView.setImportantForAccessibility(IMPORTANT_FOR_ACCESSIBILITY_NO); + + arrowView = new ImageView(context); + arrowView.setVisibility(GONE); + arrowView.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_windowBackgroundWhiteBlackText), PorterDuff.Mode.MULTIPLY)); + arrowView.setImageResource(R.drawable.arrow_more); + + textViewLayout = new LinearLayout(context); + textViewLayout.setOrientation(LinearLayout.HORIZONTAL); + textViewLayout.setGravity(LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT); + if (LocaleController.isRTL) { + textViewLayout.addView(arrowView, LayoutHelper.createLinear(16, 16, 0, Gravity.CENTER_VERTICAL, 0, 0, 6, 0)); + textViewLayout.addView(countTextView, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, 0, Gravity.CENTER_VERTICAL, 0, 0, 6, 0)); + textViewLayout.addView(textView, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL)); + } else { + textViewLayout.addView(textView, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL)); + textViewLayout.addView(countTextView, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, 0, Gravity.CENTER_VERTICAL, 6, 0, 0, 0)); + textViewLayout.addView(arrowView, LayoutHelper.createLinear(16, 16, 0, Gravity.CENTER_VERTICAL, 2, 0, 0, 0)); + } + addView(textViewLayout, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL | (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT), 64, 0, 8, 0)); + + switchView = new Switch(context); + switchView.setVisibility(GONE); + switchView.setColors(Theme.key_switchTrack, Theme.key_switchTrackChecked, Theme.key_windowBackgroundWhite, Theme.key_windowBackgroundWhite); + switchView.setImportantForAccessibility(IMPORTANT_FOR_ACCESSIBILITY_NO); + addView(switchView, LayoutHelper.createFrame(37, 50, Gravity.CENTER_VERTICAL | (LocaleController.isRTL ? Gravity.LEFT : Gravity.RIGHT), 19, 0, 19, 0)); + + checkBoxView = new CheckBox2(context, 21); + checkBoxView.setColor(Theme.key_radioBackgroundChecked, Theme.key_checkboxDisabled, Theme.key_checkboxCheck); + checkBoxView.setDrawUnchecked(true); + checkBoxView.setChecked(true, false); + checkBoxView.setDrawBackgroundAsArc(10); + checkBoxView.setVisibility(GONE); + checkBoxView.setImportantForAccessibility(IMPORTANT_FOR_ACCESSIBILITY_NO); + addView(checkBoxView, LayoutHelper.createFrame(21, 21, Gravity.CENTER_VERTICAL | (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT), LocaleController.isRTL ? 0 : 64, 0, LocaleController.isRTL ? 64 : 0, 0)); + + setFocusable(true); + } + + private boolean disabled; + public void setDisabled(boolean disabled, boolean animated) { + if (this.disabled != disabled) { + this.disabled = disabled; + if (animated) { + imageView.animate().alpha(disabled ? .5f : 1f).setDuration(220).start(); + textViewLayout.animate().alpha(disabled ? .5f : 1f).setDuration(220).start(); + switchView.animate().alpha(disabled ? .5f : 1f).setDuration(220).start(); + checkBoxView.animate().alpha(disabled ? .5f : 1f).setDuration(220).start(); + } else { + imageView.setAlpha(disabled ? .5f : 1f); + textViewLayout.setAlpha(disabled ? .5f : 1f); + switchView.setAlpha(disabled ? .5f : 1f); + checkBoxView.setAlpha(disabled ? .5f : 1f); + } + setEnabled(!disabled); + } + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure( + MeasureSpec.makeMeasureSpec(MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.EXACTLY), + MeasureSpec.makeMeasureSpec(dp(50), MeasureSpec.EXACTLY) + ); + } + + public void set(Item item, boolean divider) { + if (item.viewType == VIEW_TYPE_SWITCH) { + checkBoxView.setVisibility(GONE); + imageView.setVisibility(VISIBLE); + imageView.setImageResource(item.iconResId); + textView.setText(item.text); + if (containing = item.getFlagsCount() > 1) { + updateCount(item, false); + countTextView.setVisibility(VISIBLE); + arrowView.setVisibility(VISIBLE); + } else { + countTextView.setVisibility(GONE); + arrowView.setVisibility(GONE); + } + textView.setTranslationX(0); + switchView.setVisibility(VISIBLE); + switchView.setChecked(LiteMode.isEnabled(item.flags), false); + needLine = item.getFlagsCount() > 1; + } else { + checkBoxView.setVisibility(VISIBLE); + checkBoxView.setChecked(LiteMode.isEnabled(item.flags), false); + imageView.setVisibility(GONE); + switchView.setVisibility(GONE); + countTextView.setVisibility(GONE); + arrowView.setVisibility(GONE); + textView.setText(item.text); + textView.setTranslationX(dp(41) * (LocaleController.isRTL ? -2.2f : 1)); + containing = false; + needLine = false; + } + + ((MarginLayoutParams) textViewLayout.getLayoutParams()).rightMargin = AndroidUtilities.dp(item.viewType == VIEW_TYPE_SWITCH ? (LocaleController.isRTL ? 64 : 75) + 4 : 8); + + setWillNotDraw(!((needDivider = divider) || needLine)); + setDisabled(LiteMode.isPowerSaverApplied(), false); + } + + public void update(Item item) { + if (item.viewType == VIEW_TYPE_SWITCH) { + if (containing = item.getFlagsCount() > 1) { + updateCount(item, true); + int index = getExpandedIndex(item.flags); + arrowView.clearAnimation(); + arrowView.animate().rotation(index >= 0 && expanded[index] ? 180 : 0).setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT).setDuration(240).start(); + } + switchView.setChecked(LiteMode.isEnabled(item.flags), true); + } else { + checkBoxView.setChecked(LiteMode.isEnabled(item.flags), true); + } + + setDisabled(LiteMode.isPowerSaverApplied(), true); + } + + private boolean containing; + private int enabled, all; + + private void updateCount(Item item, boolean animated) { + enabled = preprocessFlagsCount(LiteMode.getValue(true) & item.flags); + all = preprocessFlagsCount(item.flags); + countTextView.setText(String.format("%d/%d", enabled, all), animated && !LocaleController.isRTL); + } + + private int preprocessFlagsCount(int flags) { + boolean isPremium = getUserConfig().isPremium(); + int count = Integer.bitCount(flags); + if (isPremium) { + if ((flags & LiteMode.FLAG_ANIMATED_EMOJI_CHAT_NOT_PREMIUM) > 0) + count--; + if ((flags & LiteMode.FLAG_ANIMATED_EMOJI_REACTIONS_NOT_PREMIUM) > 0) + count--; + if ((flags & LiteMode.FLAG_ANIMATED_EMOJI_KEYBOARD_NOT_PREMIUM) > 0) + count--; + } else { + if ((flags & LiteMode.FLAG_ANIMATED_EMOJI_CHAT_PREMIUM) > 0) + count--; + if ((flags & LiteMode.FLAG_ANIMATED_EMOJI_REACTIONS_PREMIUM) > 0) + count--; + if ((flags & LiteMode.FLAG_ANIMATED_EMOJI_KEYBOARD_PREMIUM) > 0) + count--; + } + if (SharedConfig.getDevicePerformanceClass() < SharedConfig.PERFORMANCE_CLASS_AVERAGE && (flags & LiteMode.FLAG_CHAT_BLUR) > 0) { + count--; + } + return count; + } + + @Override + protected void onDraw(Canvas canvas) { + super.onDraw(canvas); + if (LocaleController.isRTL) { + if (needLine) { + float x = dp(19 + 37 + 19); + canvas.drawRect(x - dp(0.66f), (getMeasuredHeight() - dp(20)) / 2f, x, (getMeasuredHeight() + dp(20)) / 2f, Theme.dividerPaint); + } + if (needDivider) { + canvas.drawLine(getMeasuredWidth() - dp(64) + (textView.getTranslationX() < 0 ? dp(-32) : 0), getMeasuredHeight() - 1, 0, getMeasuredHeight() - 1, Theme.dividerPaint); + } + } else { + if (needLine) { + float x = getMeasuredWidth() - dp(19 + 37 + 19); + canvas.drawRect(x - dp(0.66f), (getMeasuredHeight() - dp(20)) / 2f, x, (getMeasuredHeight() + dp(20)) / 2f, Theme.dividerPaint); + } + if (needDivider) { + canvas.drawLine(dp(64) + textView.getTranslationX(), getMeasuredHeight() - 1, getMeasuredWidth(), getMeasuredHeight() - 1, Theme.dividerPaint); + } + } + } + + @Override + public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) { + super.onInitializeAccessibilityNodeInfo(info); + info.setClassName(checkBoxView.getVisibility() == View.VISIBLE ? "android.widget.CheckBox" : "android.widget.Switch"); + info.setCheckable(true); + info.setEnabled(true); + if (checkBoxView.getVisibility() == View.VISIBLE) { + info.setChecked(checkBoxView.isChecked()); + } else { + info.setChecked(switchView.isChecked()); + } + StringBuilder sb = new StringBuilder(); + sb.append(textView.getText()); + if (containing) { + sb.append('\n'); + sb.append(LocaleController.formatString("Of", R.string.Of, enabled, all)); + } + info.setContentDescription(sb); + } + } + + private class PowerSaverSlider extends FrameLayout { + + BatteryDrawable batteryIcon; + SpannableStringBuilder batteryText; + + LinearLayout headerLayout; + TextView headerTextView; + AnimatedTextView headerOnView; + FrameLayout valuesView; + TextView leftTextView; + AnimatedTextView middleTextView; + TextView rightTextView; + SeekBarView seekBarView; + + private SeekBarAccessibilityDelegate seekBarAccessibilityDelegate; + + public PowerSaverSlider(Context context) { + super(context); + + headerLayout = new LinearLayout(context); + headerLayout.setGravity(LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT); + headerLayout.setImportantForAccessibility(IMPORTANT_FOR_ACCESSIBILITY_NO_HIDE_DESCENDANTS); + + headerTextView = new TextView(context); + headerTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 15); + headerTextView.setTypeface(AndroidUtilities.getTypeface(AndroidUtilities.TYPEFACE_ROBOTO_MEDIUM)); + headerTextView.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteBlueHeader)); + headerTextView.setGravity(LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT); + headerTextView.setText(LocaleController.getString("LiteBatteryTitle")); + headerLayout.addView(headerTextView, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL)); + + headerOnView = new AnimatedTextView(context, true, false, false) { + Drawable backgroundDrawable = Theme.createRoundRectDrawable(AndroidUtilities.dp(4), Theme.multAlpha(Theme.getColor(Theme.key_windowBackgroundWhiteBlueHeader), 0.15f)); + + @Override + protected void onDraw(Canvas canvas) { + backgroundDrawable.setBounds(0, 0, (int) (getPaddingLeft() + getDrawable().getCurrentWidth() + getPaddingRight()), getMeasuredHeight()); + backgroundDrawable.draw(canvas); + + super.onDraw(canvas); + } + }; + headerOnView.setTypeface(AndroidUtilities.getTypeface(AndroidUtilities.TYPEFACE_ROBOTO_MEDIUM)); + headerOnView.setPadding(AndroidUtilities.dp(5.33f), AndroidUtilities.dp(2), AndroidUtilities.dp(5.33f), AndroidUtilities.dp(2)); + headerOnView.setTextSize(AndroidUtilities.dp(12)); + headerOnView.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteBlueHeader)); + headerLayout.addView(headerOnView, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, 17, Gravity.CENTER_VERTICAL, 6, 1, 0, 0)); + + addView(headerLayout, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.TOP | Gravity.FILL_HORIZONTAL, 21, 17, 21, 0)); + + seekBarView = new SeekBarView(context, true, null); + seekBarView.setReportChanges(true); + seekBarView.setDelegate(new SeekBarView.SeekBarViewDelegate() { + @Override + public void onSeekBarDrag(boolean stop, float progress) { + int newValue = Math.round(progress * 100F); + if (newValue != LiteMode.getPowerSaverLevel()) { + LiteMode.setPowerSaverLevel(newValue); + updateValues(); + updateInfo(); + + if (newValue <= 0 || newValue >= 100) { + try { + performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_VIEW_SETTING); + } catch (Exception e) {} + } + } + } + @Override + public void onSeekBarPressed(boolean pressed) {} + @Override + public CharSequence getContentDescription() { + return " "; + } + }); + seekBarView.setProgress(LiteMode.getPowerSaverLevel() / 100F); + seekBarView.setImportantForAccessibility(IMPORTANT_FOR_ACCESSIBILITY_NO); + addView(seekBarView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 38 + 6, Gravity.TOP, 6, 68, 6, 0)); + + valuesView = new FrameLayout(context); + valuesView.setImportantForAccessibility(IMPORTANT_FOR_ACCESSIBILITY_NO_HIDE_DESCENDANTS); + + leftTextView = new TextView(context); + leftTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 13); + leftTextView.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteGrayText)); + leftTextView.setGravity(Gravity.LEFT); + leftTextView.setText(LocaleController.getString("LiteBatteryDisabled", R.string.LiteBatteryDisabled)); + valuesView.addView(leftTextView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.CENTER_VERTICAL)); + + middleTextView = new AnimatedTextView(context, false, true, true) { + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + int fullWidth = MeasureSpec.getSize(widthMeasureSpec); + if (fullWidth <= 0) { + fullWidth = AndroidUtilities.displaySize.x - dp(20); + } + float leftTextViewWidth = leftTextView.getPaint().measureText(leftTextView.getText().toString()); + float rightTextViewWidth = rightTextView.getPaint().measureText(rightTextView.getText().toString()); + super.onMeasure(MeasureSpec.makeMeasureSpec((int) (fullWidth - leftTextViewWidth - rightTextViewWidth), MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(dp(24), MeasureSpec.EXACTLY)); + } + }; + middleTextView.setAnimationProperties(.45f, 0, 240, CubicBezierInterpolator.EASE_OUT_QUINT); + middleTextView.setGravity(Gravity.CENTER_HORIZONTAL); + middleTextView.setTextSize(dp(13)); + middleTextView.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteBlueText)); + valuesView.addView(middleTextView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER)); + + batteryText = new SpannableStringBuilder("b"); + batteryIcon = new BatteryDrawable(); + batteryIcon.colorFromPaint(middleTextView.getPaint()); + batteryIcon.setTranslationY(dp(1.5f)); + batteryIcon.setBounds(dp(3), dp(-20), dp(20 + 3), 0); + batteryText.setSpan(new ImageSpan(batteryIcon, DynamicDrawableSpan.ALIGN_BOTTOM), 0, batteryText.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + + rightTextView = new TextView(context); + rightTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 13); + rightTextView.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteGrayText)); + rightTextView.setGravity(Gravity.RIGHT); + rightTextView.setText(LocaleController.getString("LiteBatteryEnabled", R.string.LiteBatteryEnabled)); + valuesView.addView(rightTextView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.RIGHT | Gravity.CENTER_VERTICAL)); + + addView(valuesView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.TOP | Gravity.FILL_HORIZONTAL, 21, 52, 21, 0)); + + seekBarAccessibilityDelegate = new IntSeekBarAccessibilityDelegate() { + @Override + protected int getProgress() { + return LiteMode.getPowerSaverLevel(); + } + + @Override + protected void setProgress(int progress) { + seekBarView.delegate.onSeekBarDrag(true, progress / 100f); + seekBarView.setProgress(progress / 100f); + } + + @Override + protected int getMaxValue() { + return 100; + } + + @Override + protected int getDelta() { + return 5; + } + + @Override + public void onInitializeAccessibilityNodeInfoInternal(View host, AccessibilityNodeInfo info) { + super.onInitializeAccessibilityNodeInfoInternal(host, info); + + info.setEnabled(true); + } + + @Override + public void onPopulateAccessibilityEvent(@NonNull View host, @NonNull AccessibilityEvent event) { + super.onPopulateAccessibilityEvent(host, event); + + StringBuilder sb = new StringBuilder(LocaleController.getString(R.string.LiteBatteryTitle)).append(", "); + int percent = LiteMode.getPowerSaverLevel(); + if (percent <= 0) { + sb.append(LocaleController.getString(R.string.LiteBatteryAlwaysDisabled)); + } else if (percent >= 100) { + sb.append(LocaleController.getString(R.string.LiteBatteryAlwaysEnabled)); + } else { + sb.append(LocaleController.formatString(R.string.AccDescrLiteBatteryWhenBelow, Math.round(percent))); + } + + event.setContentDescription(sb); + setContentDescription(sb); + } + }; + + update(); + } + + @Override + public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) { + super.onInitializeAccessibilityNodeInfo(info); + + seekBarAccessibilityDelegate.onInitializeAccessibilityNodeInfo(this, info); + } + + @Override + public void onPopulateAccessibilityEvent(AccessibilityEvent event) { + super.onPopulateAccessibilityEvent(event); + + seekBarAccessibilityDelegate.onPopulateAccessibilityEvent(this, event); + } + + @Override + public boolean performAccessibilityAction(int action, @Nullable Bundle arguments) { + return seekBarAccessibilityDelegate.performAccessibilityAction(this, action, arguments); + } + + public void update() { + final int percent = LiteMode.getPowerSaverLevel(); + + middleTextView.cancelAnimation(); + if (percent <= 0) { + middleTextView.setText(LocaleController.getString("LiteBatteryAlwaysDisabled", R.string.LiteBatteryAlwaysDisabled), !LocaleController.isRTL); + } else if (percent >= 100) { + middleTextView.setText(LocaleController.getString("LiteBatteryAlwaysEnabled", R.string.LiteBatteryAlwaysEnabled), !LocaleController.isRTL); + } else { + batteryIcon.setFillValue(percent / 100F, true); + middleTextView.setText(AndroidUtilities.replaceCharSequence("%s", LocaleController.getString("LiteBatteryWhenBelow", R.string.LiteBatteryWhenBelow), TextUtils.concat(String.format("%d%% ", Math.round(percent)), batteryText)), !LocaleController.isRTL); + } + + headerOnView.setText((LiteMode.isPowerSaverApplied() ? LocaleController.getString("LiteBatteryEnabled", R.string.LiteBatteryEnabled) : LocaleController.getString("LiteBatteryDisabled", R.string.LiteBatteryDisabled)).toUpperCase()); + updateHeaderOnVisibility(percent > 0 && percent < 100); + + updateOnActive(percent >= 100); + updateOffActive(percent <= 0); + } + + private boolean headerOnVisible; + private void updateHeaderOnVisibility(boolean visible) { + if (visible != headerOnVisible) { + headerOnVisible = visible; + headerOnView.clearAnimation(); + headerOnView.animate().alpha(visible ? 1f : 0f).setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT).setDuration(220).start(); + } + } + + private float onActiveT; + private ValueAnimator onActiveAnimator; + private void updateOnActive(boolean active) { + final float activeT = active ? 1f : 0f; + if (onActiveT != activeT) { + onActiveT = activeT; + + if (onActiveAnimator != null) { + onActiveAnimator.cancel(); + onActiveAnimator = null; + } + + onActiveAnimator = ValueAnimator.ofFloat(onActiveT, activeT); + onActiveAnimator.addUpdateListener(anm -> { + rightTextView.setTextColor(ColorUtils.blendARGB( + Theme.getColor(Theme.key_windowBackgroundWhiteGrayText), + Theme.getColor(Theme.key_windowBackgroundWhiteBlueText), + onActiveT = (float) anm.getAnimatedValue() + )); + }); + onActiveAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + rightTextView.setTextColor(ColorUtils.blendARGB( + Theme.getColor(Theme.key_windowBackgroundWhiteGrayText), + Theme.getColor(Theme.key_windowBackgroundWhiteBlueText), + onActiveT = (float) activeT + )); + } + }); + onActiveAnimator.setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); + onActiveAnimator.setDuration(320); + onActiveAnimator.start(); + } + } + + private float offActiveT; + private ValueAnimator offActiveAnimator; + private void updateOffActive(boolean active) { + final float activeT = active ? 1f : 0f; + if (offActiveT != activeT) { + offActiveT = activeT; + + if (offActiveAnimator != null) { + offActiveAnimator.cancel(); + offActiveAnimator = null; + } + + offActiveAnimator = ValueAnimator.ofFloat(offActiveT, activeT); + offActiveAnimator.addUpdateListener(anm -> { + leftTextView.setTextColor(ColorUtils.blendARGB( + Theme.getColor(Theme.key_windowBackgroundWhiteGrayText), + Theme.getColor(Theme.key_windowBackgroundWhiteBlueText), + offActiveT = (float) anm.getAnimatedValue() + )); + }); + offActiveAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + leftTextView.setTextColor(ColorUtils.blendARGB( + Theme.getColor(Theme.key_windowBackgroundWhiteGrayText), + Theme.getColor(Theme.key_windowBackgroundWhiteBlueText), + offActiveT = (float) activeT + )); + } + }); + offActiveAnimator.setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); + offActiveAnimator.setDuration(320); + offActiveAnimator.start(); + } + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure( + MeasureSpec.makeMeasureSpec(MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.EXACTLY), + MeasureSpec.makeMeasureSpec(dp(112), MeasureSpec.EXACTLY) + ); + } + } + + private static class Item extends AdapterWithDiffUtils.Item { + public CharSequence text; + public int iconResId; + public int flags; + public int type; + + private Item(int viewType, CharSequence text, int iconResId, int flags, int type) { + super(viewType, false); + this.text = text; + this.iconResId = iconResId; + this.flags = flags; + this.type = type; + } + + public static Item asHeader(CharSequence text) { + return new Item(VIEW_TYPE_HEADER, text, 0, 0, 0); + } + public static Item asSlider() { + return new Item(VIEW_TYPE_SLIDER, null, 0, 0, 0); + } + public static Item asInfo(CharSequence text) { + return new Item(VIEW_TYPE_INFO, text, 0, 0, 0); + } + public static Item asSwitch(int iconResId, CharSequence text, int flags) { + return new Item(VIEW_TYPE_SWITCH, text, iconResId, flags, 0); + } + public static Item asCheckbox(CharSequence text, int flags) { + return new Item(VIEW_TYPE_CHECKBOX, text, 0, flags, 0); + } + public static Item asSwitch(CharSequence text, int type) { + return new Item(VIEW_TYPE_SWITCH2, text, 0, 0, type); + } + + public int getFlagsCount() { + return Integer.bitCount(flags); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof Item)) { + return false; + } + Item item = (Item) o; + if (item.viewType != viewType) { + return false; + } + if (viewType == VIEW_TYPE_SWITCH) { + if (item.iconResId != iconResId) { + return false; + } + } + if (viewType == VIEW_TYPE_SWITCH2) { + if (item.type != type) { + return false; + } + } + if (viewType == VIEW_TYPE_SWITCH || viewType == VIEW_TYPE_CHECKBOX) { + if (item.flags != flags) { + return false; + } + } + if (viewType == VIEW_TYPE_HEADER || viewType == VIEW_TYPE_INFO || viewType == VIEW_TYPE_SWITCH || viewType == VIEW_TYPE_CHECKBOX || viewType == VIEW_TYPE_SWITCH2) { + if (!TextUtils.equals(item.text, text)) { + return false; + } + } + return true; + } + } + + @Override + public void onFragmentDestroy() { + super.onFragmentDestroy(); + LiteMode.savePreference(); + AnimatedEmojiDrawable.updateAll(); + Theme.reloadWallpaper(); + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/LocationActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/LocationActivity.java index 086826f3d2..25bf447de3 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/LocationActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/LocationActivity.java @@ -2472,7 +2472,7 @@ private double bearingBetweenLocations(IMapsProvider.LatLng latLng1, IMapsProvid @Override public void didReceivedNotification(int id, int account, Object... args) { if (id == NotificationCenter.closeChats) { - removeSelfFromStack(); + removeSelfFromStack(true); } else if (id == NotificationCenter.locationPermissionGranted) { locationDenied = false; if (adapter != null) { @@ -2602,12 +2602,11 @@ public boolean onBackPressed() { } @Override - public void finishFragment(boolean animated) { + public boolean finishFragment(boolean animated) { if (onCheckGlScreenshot()) { - return; + return false; } - - super.finishFragment(animated); + return super.finishFragment(animated); } private boolean onCheckGlScreenshot() { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/LoginActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/LoginActivity.java index 7882e1d87c..57e08c3ac8 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/LoginActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/LoginActivity.java @@ -40,10 +40,12 @@ import android.net.Uri; import android.os.Build; import android.os.Bundle; +import android.os.Looper; import android.telephony.PhoneNumberUtils; import android.text.Editable; import android.text.InputFilter; import android.text.InputType; +import android.text.Layout; import android.text.Spannable; import android.text.SpannableStringBuilder; import android.text.Spanned; @@ -52,6 +54,7 @@ import android.text.TextWatcher; import android.text.method.PasswordTransformationMethod; import android.text.style.ClickableSpan; +import android.text.style.ForegroundColorSpan; import android.text.style.ImageSpan; import android.text.style.ReplacementSpan; import android.util.Base64; @@ -89,11 +92,16 @@ //import com.google.android.gms.auth.api.signin.GoogleSignInClient; //import com.google.android.gms.auth.api.signin.GoogleSignInOptions; //import com.google.android.gms.common.api.ApiException; +//import com.google.android.gms.safetynet.SafetyNet; +import org.json.JSONException; +import org.json.JSONObject; import org.telegram.PhoneFormat.PhoneFormat; import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ApplicationLoader; +import org.telegram.messenger.AuthTokensHelper; import org.telegram.messenger.BuildVars; +import org.telegram.messenger.CallReceiver; import org.telegram.messenger.ContactsController; import org.telegram.messenger.Emoji; import org.telegram.messenger.FileLog; @@ -135,6 +143,8 @@ import org.telegram.ui.Components.EditTextBoldCursor; import org.telegram.ui.Components.ImageUpdater; import org.telegram.ui.Components.LayoutHelper; +import org.telegram.ui.Components.LinkPath; +import org.telegram.ui.Components.LoadingDrawable; import org.telegram.ui.Components.LoginOrView; import org.telegram.ui.Components.OutlineTextContainerView; import org.telegram.ui.Components.RLottieDrawable; @@ -154,6 +164,7 @@ import java.io.InputStreamReader; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; @@ -172,6 +183,7 @@ import cn.hutool.core.util.StrUtil; import kotlin.Unit; import tw.nekomimi.nekogram.DataCenter; +import tw.nekomimi.nekogram.NekoConfig; import tw.nekomimi.nekogram.NekoXConfig; import tw.nekomimi.nekogram.helpers.PasscodeHelper; import tw.nekomimi.nekogram.ui.BottomBuilder; @@ -334,6 +346,8 @@ public class LoginActivity extends BaseFragment implements NotificationCenter.No private Runnable[] editDoneCallback = new Runnable[2]; private boolean[] postedEditDoneCallback = new boolean[2]; + private boolean forceDisableSafetyNet; + // NekoX Definitions ActionBarMenuItem menu = null; @@ -588,6 +602,7 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { }; keyboardLinearLayout.addView(slideViewsContainer, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, 0, 1f)); keyboardView = new CustomPhoneKeyboardView(context); + keyboardView.setViewToFindFocus(slideViewsContainer); keyboardLinearLayout.addView(keyboardView, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, CustomPhoneKeyboardView.KEYBOARD_HEIGHT_DP)); views[VIEW_PHONE_INPUT] = new PhoneView(context); @@ -612,7 +627,7 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { slideViewsContainer.addView(views[a], LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.CENTER, AndroidUtilities.isTablet() ? 26 : 18, 30, AndroidUtilities.isTablet() ? 26 : 18, 0)); } - Bundle savedInstanceState = activityMode == MODE_LOGIN ? loadCurrentState(newAccount) : null; + Bundle savedInstanceState = activityMode == MODE_LOGIN ? loadCurrentState(newAccount, currentAccount) : null; if (savedInstanceState != null) { currentViewNum = savedInstanceState.getInt("currentViewNum", 0); syncContacts = savedInstanceState.getInt("syncContacts", 0) == 1; @@ -881,7 +896,9 @@ public void onResume() { ConnectionsManager.getInstance(currentAccount).setAppPaused(false, false); } AndroidUtilities.requestAdjustResize(getParentActivity(), classGuid); - fragmentView.requestLayout(); + if (fragmentView != null) { + fragmentView.requestLayout(); + } try { if (currentViewNum >= VIEW_CODE_MESSAGE && currentViewNum <= VIEW_CODE_CALL && views[currentViewNum] instanceof LoginActivitySmsView) { int time = ((LoginActivitySmsView) views[currentViewNum]).openTime; @@ -943,13 +960,10 @@ public void onRequestPermissionsResultFragment(int requestCode, String[] permiss } } - public static Bundle loadCurrentState(boolean newAccount) { - if (newAccount) { - return null; - } + public static Bundle loadCurrentState(boolean newAccount, int currentAccount) { try { Bundle bundle = new Bundle(); - SharedPreferences preferences = ApplicationLoader.applicationContext.getSharedPreferences("logininfo2", Context.MODE_PRIVATE); + SharedPreferences preferences = ApplicationLoader.applicationContext.getSharedPreferences("logininfo2" + (newAccount ? "_" + currentAccount : ""), Context.MODE_PRIVATE); Map params = preferences.getAll(); for (Map.Entry entry : params.entrySet()) { String key = entry.getKey(); @@ -986,7 +1000,7 @@ public static Bundle loadCurrentState(boolean newAccount) { } private void clearCurrentState() { - SharedPreferences preferences = ApplicationLoader.applicationContext.getSharedPreferences("logininfo2", Context.MODE_PRIVATE); + SharedPreferences preferences = ApplicationLoader.applicationContext.getSharedPreferences("logininfo2" + (newAccount ? "_" + currentAccount : ""), Context.MODE_PRIVATE); SharedPreferences.Editor editor = preferences.edit(); editor.clear(); editor.commit(); @@ -1099,7 +1113,8 @@ private void needShowAlert(String title, String text) { } private void onFieldError(View view, boolean allowErrorSelection) { - view.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + if (!NekoConfig.disableVibration.Bool()) + view.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); AndroidUtilities.shakeViewSpring(view, 3.5f); if (allowErrorSelection) { @@ -1342,6 +1357,10 @@ private void showEditDoneProgress(boolean show, boolean animated, boolean fromCa if (animated && doneProgressVisible[currentDoneType] == show && !fromCallback) { return; } + if (Looper.myLooper() != Looper.getMainLooper()) { + AndroidUtilities.runOnUIThread(() -> showEditDoneProgress(show, animated, fromCallback)); + return; + } boolean floating = currentDoneType == DONE_TYPE_FLOATING; if (!fromCallback && !floating) { @@ -1515,6 +1534,10 @@ private void needHideProgress(boolean cancel, boolean animated) { public void setPage(@ViewNumber int page, boolean animated, Bundle params, boolean back) { boolean needFloatingButton = page == VIEW_PHONE_INPUT || page == VIEW_REGISTER || page == VIEW_PASSWORD || page == VIEW_NEW_PASSWORD_STAGE_1 || page == VIEW_NEW_PASSWORD_STAGE_2 || page == VIEW_ADD_EMAIL; + if (page == currentViewNum) { + animated = false; + } + if (needFloatingButton) { if (page == VIEW_PHONE_INPUT) { checkPermissions = true; @@ -1557,6 +1580,7 @@ public void onAnimationEnd(Animator animation) { showDoneButton(true, true); } outView.setVisibility(View.GONE); + outView.onHide(); outView.setX(0); } }); @@ -1571,6 +1595,7 @@ public void onAnimationEnd(Animator animation) { } else { backButtonView.setVisibility(views[page].needBackButton() || newAccount ? View.VISIBLE : View.GONE); views[currentViewNum].setVisibility(View.GONE); + views[currentViewNum].onHide(); currentViewNum = page; views[page].setParams(params, false); views[page].setVisibility(View.VISIBLE); @@ -1593,7 +1618,7 @@ public void saveSelfArgs(Bundle outState) { v.saveStateParams(bundle); } } - SharedPreferences preferences = ApplicationLoader.applicationContext.getSharedPreferences("logininfo2", Context.MODE_PRIVATE); + SharedPreferences preferences = ApplicationLoader.applicationContext.getSharedPreferences("logininfo2" + (newAccount ? "_" + currentAccount : ""), Context.MODE_PRIVATE); SharedPreferences.Editor editor = preferences.edit(); editor.clear(); putBundleToEditor(bundle, editor, null); @@ -1635,6 +1660,7 @@ private void needFinishActivity(boolean afterSignup, boolean showSetPasswordConf NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.mainUserInfoChanged); LocaleController.getInstance().loadRemoteLanguages(currentAccount); + RestrictedLanguagesSelectActivity.checkRestrictedLanguages(true); } } else if (getParentActivity() instanceof ExternalActionActivity) { ((ExternalActionActivity) getParentActivity()).onFinishLogin(); @@ -1663,6 +1689,12 @@ private void onAuthSuccess(TLRPC.TL_auth_authorization res, boolean afterSignup) MessagesController.getInstance(currentAccount).checkPromoInfo(true); ConnectionsManager.getInstance(currentAccount).updateDcSettings(); + if (res.future_auth_token != null) { + AuthTokensHelper.saveLogInToken(res); + } else { + FileLog.d("onAuthSuccess future_auth_token is empty"); + } + if (afterSignup) { MessagesController.getInstance(currentAccount).putDialogsEndReachedAfterRegistration(); } @@ -1677,11 +1709,112 @@ private void fillNextCodeParams(Bundle params, TLRPC.TL_account_sentEmailCode re setPage(VIEW_CODE_EMAIL_SETUP, true, params, false); } - private void fillNextCodeParams(Bundle params, TLRPC.TL_auth_sentCode res) { + private void fillNextCodeParams(Bundle params, TLRPC.auth_SentCode res) { fillNextCodeParams(params, res, true); } - private void fillNextCodeParams(Bundle params, TLRPC.TL_auth_sentCode res, boolean animate) { + private void resendCodeFromSafetyNet(Bundle params, TLRPC.auth_SentCode res) { + if (!isRequestingFirebaseSms) { + return; + } + needHideProgress(false); + isRequestingFirebaseSms = false; + + TLRPC.TL_auth_resendCode req = new TLRPC.TL_auth_resendCode(); + req.phone_number = params.getString("phoneFormated"); + req.phone_code_hash = res.phone_code_hash; + ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> { + if (response != null) { + AndroidUtilities.runOnUIThread(() -> fillNextCodeParams(params, (TLRPC.auth_SentCode) response)); + } else { + AndroidUtilities.runOnUIThread(() -> { + if (getParentActivity() == null || getParentActivity().isFinishing() || getContext() == null) { + return; + } + new AlertDialog.Builder(getContext()) + .setTitle(LocaleController.getString(R.string.RestorePasswordNoEmailTitle)) + .setMessage(LocaleController.getString(R.string.SafetyNetErrorOccurred)) + .setPositiveButton(LocaleController.getString(R.string.OK), (dialog, which) -> { + forceDisableSafetyNet = true; + if (currentViewNum != VIEW_PHONE_INPUT) { + setPage(VIEW_PHONE_INPUT, true, null, true); + } + }) + .show(); + }); + } + }, ConnectionsManager.RequestFlagFailOnServerErrors | ConnectionsManager.RequestFlagWithoutLogin); + } + + private boolean isRequestingFirebaseSms; + private void fillNextCodeParams(Bundle params, TLRPC.auth_SentCode res, boolean animate) { + if (res.type instanceof TLRPC.TL_auth_sentCodeTypeFirebaseSms && !res.type.verifiedFirebase && !isRequestingFirebaseSms) { + // NekoX: disable SafetyNet + /* + if (PushListenerController.GooglePushListenerServiceProvider.INSTANCE.hasServices()) { + needShowProgress(0); + isRequestingFirebaseSms = true; + SafetyNet.getClient(ApplicationLoader.applicationContext).attest(res.type.nonce, BuildVars.SAFETYNET_KEY) + .addOnSuccessListener(attestationResponse -> { + String jws = attestationResponse.getJwsResult(); + + if (jws != null) { + TLRPC.TL_auth_requestFirebaseSms req = new TLRPC.TL_auth_requestFirebaseSms(); + req.phone_number = params.getString("phoneFormated"); + req.phone_code_hash = res.phone_code_hash; + req.safety_net_token = jws; + req.flags |= 1; + + String[] spl = jws.split("\\."); + if (spl.length > 0) { + try { + JSONObject obj = new JSONObject(new String(Base64.decode(spl[1].getBytes(StandardCharsets.UTF_8), 0))); + + if (obj.optBoolean("basicIntegrity") && obj.optBoolean("ctsProfileMatch")) { + ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> { + needHideProgress(false); + isRequestingFirebaseSms = false; + if (response instanceof TLRPC.TL_boolTrue) { + res.type.verifiedFirebase = true; + AndroidUtilities.runOnUIThread(() -> fillNextCodeParams(params, res, animate)); + } else { + FileLog.d("Resend firebase sms because auth.requestFirebaseSms = false"); + resendCodeFromSafetyNet(params, res); + } + }, ConnectionsManager.RequestFlagFailOnServerErrors | ConnectionsManager.RequestFlagWithoutLogin); + } else { + FileLog.d("Resend firebase sms because ctsProfileMatch or basicIntegrity = false"); + resendCodeFromSafetyNet(params, res); + } + } catch (JSONException e) { + FileLog.e(e); + + FileLog.d("Resend firebase sms because of exception"); + resendCodeFromSafetyNet(params, res); + } + } else { + FileLog.d("Resend firebase sms because can't split JWS token"); + resendCodeFromSafetyNet(params, res); + } + } else { + FileLog.d("Resend firebase sms because JWS = null"); + resendCodeFromSafetyNet(params, res); + } + }) + .addOnFailureListener(e -> { + FileLog.e(e); + + FileLog.d("Resend firebase sms because of safetynet exception"); + resendCodeFromSafetyNet(params, res); + }); + } else */ + { + FileLog.d("Resend firebase sms because firebase is not available"); + resendCodeFromSafetyNet(params, res); + } + return; + } + params.putString("phoneHash", res.phone_code_hash); if (res.next_type instanceof TLRPC.TL_auth_codeTypeCall) { params.putInt("nextType", AUTH_TYPE_CALL); @@ -1711,9 +1844,10 @@ private void fillNextCodeParams(Bundle params, TLRPC.TL_auth_sentCode res, boole params.putInt("type", AUTH_TYPE_FLASH_CALL); params.putString("pattern", res.type.pattern); setPage(VIEW_CODE_FLASH_CALL, animate, params, false); - } else if (res.type instanceof TLRPC.TL_auth_sentCodeTypeSms) { + } else if (res.type instanceof TLRPC.TL_auth_sentCodeTypeSms || res.type instanceof TLRPC.TL_auth_sentCodeTypeFirebaseSms) { params.putInt("type", AUTH_TYPE_SMS); params.putInt("length", res.type.length); + params.putBoolean("firebase", res.type instanceof TLRPC.TL_auth_sentCodeTypeFirebaseSms); setPage(VIEW_CODE_SMS, animate, params, false); } else if (res.type instanceof TLRPC.TL_auth_sentCodeTypeFragmentSms) { params.putInt("type", AUTH_TYPE_FRAGMENT_SMS); @@ -1733,6 +1867,8 @@ private void fillNextCodeParams(Bundle params, TLRPC.TL_auth_sentCode res, boole params.putString("emailPattern", res.type.email_pattern); params.putInt("length", res.type.length); params.putInt("nextPhoneLoginDate", res.type.next_phone_login_date); + params.putInt("resetAvailablePeriod", res.type.reset_available_period); + params.putInt("resetPendingDate", res.type.reset_pending_date); setPage(VIEW_CODE_EMAIL, animate, params, false); } } @@ -1946,6 +2082,9 @@ public void afterTextChanged(Editable editable) { if (c.code.startsWith(text)) { matchedCountries++; if (c.code.equals(text)) { + if (lastMatchedCountry != null && lastMatchedCountry.code.equals(c.code)) { + matchedCountries--; + } lastMatchedCountry = c; } } @@ -2225,7 +2364,7 @@ public void afterTextChanged(Editable s) { testBackend = !testBackend; cell.setChecked(testBackend, true); - boolean testBackend = BuildVars.DEBUG_PRIVATE_VERSION && getConnectionsManager().isTestBackend(); + boolean testBackend = BuildVars.DEBUG_VERSION && getConnectionsManager().isTestBackend(); if (testBackend != LoginActivity.this.testBackend) { getConnectionsManager().switchBackend(false); } @@ -2314,93 +2453,95 @@ public void afterTextChanged(Editable s) { private void loadCountries() { TLRPC.TL_help_getCountriesList req = new TLRPC.TL_help_getCountriesList(); - req.lang_code = ""; + req.lang_code = LocaleController.getInstance().getCurrentLocaleInfo() != null ? LocaleController.getInstance().getCurrentLocaleInfo().getLangCode() : Locale.getDefault().getCountry(); getConnectionsManager().sendRequest(req, (response, error) -> { AndroidUtilities.runOnUIThread(() -> { - AndroidUtilities.runOnUIThread(() -> { - if (error == null) { - countriesArray.clear(); - codesMap.clear(); - phoneFormatMap.clear(); - - TLRPC.TL_help_countriesList help_countriesList = (TLRPC.TL_help_countriesList) response; - for (int i = 0; i < help_countriesList.countries.size(); i++) { - TLRPC.TL_help_country c = help_countriesList.countries.get(i); - for (int k = 0; k < c.country_codes.size(); k++) { - TLRPC.TL_help_countryCode countryCode = c.country_codes.get(k); - if (countryCode != null) { - CountrySelectActivity.Country countryWithCode = new CountrySelectActivity.Country(); - countryWithCode.name = c.default_name; - countryWithCode.code = countryCode.country_code; - countryWithCode.shortname = c.iso2; - - countriesArray.add(countryWithCode); - List countryList = codesMap.get(countryCode.country_code); - if (countryList == null) { - codesMap.put(countryCode.country_code, countryList = new ArrayList<>()); - } - countryList.add(countryWithCode); - if (countryCode.patterns.size() > 0) { - phoneFormatMap.put(countryCode.country_code, countryCode.patterns); - } + if (error == null) { + countriesArray.clear(); + codesMap.clear(); + phoneFormatMap.clear(); + + TLRPC.TL_help_countriesList help_countriesList = (TLRPC.TL_help_countriesList) response; + for (int i = 0; i < help_countriesList.countries.size(); i++) { + TLRPC.TL_help_country c = help_countriesList.countries.get(i); + for (int k = 0; k < c.country_codes.size(); k++) { + TLRPC.TL_help_countryCode countryCode = c.country_codes.get(k); + if (countryCode != null) { + CountrySelectActivity.Country countryWithCode = new CountrySelectActivity.Country(); + countryWithCode.name = c.name; + countryWithCode.defaultName = c.default_name; + if (countryWithCode.name == null && countryWithCode.defaultName != null) { + countryWithCode.name = countryWithCode.defaultName; + } + countryWithCode.code = countryCode.country_code; + countryWithCode.shortname = c.iso2; + + countriesArray.add(countryWithCode); + List countryList = codesMap.get(countryCode.country_code); + if (countryList == null) { + codesMap.put(countryCode.country_code, countryList = new ArrayList<>()); + } + countryList.add(countryWithCode); + if (countryCode.patterns.size() > 0) { + phoneFormatMap.put(countryCode.country_code, countryCode.patterns); } } } + } - if (activityMode == MODE_CHANGE_PHONE_NUMBER) { - String number = PhoneFormat.stripExceptNumbers(UserConfig.getInstance(currentAccount).getClientPhone()); - boolean ok = false; - if (!TextUtils.isEmpty(number)) { - if (number.length() > 4) { - for (int a = 4; a >= 1; a--) { - String sub = number.substring(0, a); - - CountrySelectActivity.Country country2; - List list = codesMap.get(sub); - if (list == null) { - country2 = null; - } else if (list.size() > 1) { - SharedPreferences preferences = MessagesController.getGlobalMainSettings(); - String lastMatched = preferences.getString("phone_code_last_matched_" + sub, null); - - if (lastMatched != null) { - country2 = list.get(list.size() - 1); - for (CountrySelectActivity.Country c : countriesArray) { - if (Objects.equals(c.shortname, lastMatched)) { - country2 = c; - break; - } + if (activityMode == MODE_CHANGE_PHONE_NUMBER) { + String number = PhoneFormat.stripExceptNumbers(UserConfig.getInstance(currentAccount).getClientPhone()); + boolean ok = false; + if (!TextUtils.isEmpty(number)) { + if (number.length() > 4) { + for (int a = 4; a >= 1; a--) { + String sub = number.substring(0, a); + + CountrySelectActivity.Country country2; + List list = codesMap.get(sub); + if (list == null) { + country2 = null; + } else if (list.size() > 1) { + SharedPreferences preferences = MessagesController.getGlobalMainSettings(); + String lastMatched = preferences.getString("phone_code_last_matched_" + sub, null); + + if (lastMatched != null) { + country2 = list.get(list.size() - 1); + for (CountrySelectActivity.Country c : countriesArray) { + if (Objects.equals(c.shortname, lastMatched)) { + country2 = c; + break; } - } else { - country2 = list.get(list.size() - 1); } } else { - country2 = list.get(0); - } - - if (country2 != null) { - ok = true; - codeField.setText(sub); - break; + country2 = list.get(list.size() - 1); } + } else { + country2 = list.get(0); } - if (!ok) { - codeField.setText(number.substring(0, 1)); + + if (country2 != null) { + ok = true; + codeField.setText(sub); + break; } } + if (!ok) { + codeField.setText(number.substring(0, 1)); + } } } - CountrySelectActivity.Country countryWithCode = new CountrySelectActivity.Country(); - String test_code = "999"; - countryWithCode.name = "Test Number"; - countryWithCode.code = test_code; - countryWithCode.shortname = "YL"; - - countriesArray.add(countryWithCode); - codesMap.put(test_code, new ArrayList<>(Collections.singletonList(countryWithCode))); - phoneFormatMap.put(test_code, Collections.singletonList("XX X XXXX")); } - }); + CountrySelectActivity.Country countryWithCode = new CountrySelectActivity.Country(); + String test_code = "999"; + countryWithCode.name = "Test Number"; + countryWithCode.code = test_code; + countryWithCode.shortname = "YL"; + + countriesArray.add(countryWithCode); + codesMap.put(test_code, new ArrayList<>(Collections.singletonList(countryWithCode))); + phoneFormatMap.put(test_code, Collections.singletonList("XX X XXXX")); + } }); }, ConnectionsManager.RequestFlagWithoutLogin | ConnectionsManager.RequestFlagFailOnServerErrors); } @@ -2590,7 +2731,7 @@ public void onNothingSelected(AdapterView adapterView) { @Override public void onNextPressed(String code) { - if (getParentActivity() == null || nextPressed) { + if (getParentActivity() == null || nextPressed || isRequestingFirebaseSms) { return; } @@ -2816,61 +2957,92 @@ private void onConfirm(PhoneNumberConfirmView confirmView) { } } - ConnectionsManager.getInstance(currentAccount).cleanup(false); - final TLRPC.TL_auth_sendCode req = new TLRPC.TL_auth_sendCode(); - req.api_hash = NekoXConfig.currentAppHash(); - req.api_id = NekoXConfig.currentAppId(); - req.phone_number = phone; - req.settings = new TLRPC.TL_codeSettings(); - req.settings.allow_flashcall = simcardAvailable && allowCall && allowCancelCall && allowReadCallLog; - req.settings.allow_missed_call = simcardAvailable && allowCall; - req.settings.allow_app_hash = PushListenerController.getProvider().hasServices(); - ArrayList tokens = MessagesController.getSavedLogOutTokens(); + TLRPC.TL_codeSettings settings = new TLRPC.TL_codeSettings(); + settings.allow_flashcall = simcardAvailable && allowCall && allowCancelCall && allowReadCallLog; + settings.allow_missed_call = simcardAvailable && allowCall; + settings.allow_app_hash = settings.allow_firebase = false; + // NekoX: disable app_hash and firebase login + if (forceDisableSafetyNet || TextUtils.isEmpty(BuildVars.SAFETYNET_KEY)) { + settings.allow_firebase = false; + } + + ArrayList loginTokens = AuthTokensHelper.getSavedLogInTokens(); + if (loginTokens != null) { + for (int i = 0; i < loginTokens.size(); i++) { + if (loginTokens.get(i).future_auth_token == null) { + continue; + } + if (settings.logout_tokens == null) { + settings.logout_tokens = new ArrayList<>(); + } + if (BuildVars.DEBUG_VERSION) { + FileLog.d("login token to check " + new String(loginTokens.get(i).future_auth_token, StandardCharsets.UTF_8)); + } + settings.logout_tokens.add(loginTokens.get(i).future_auth_token); + if (settings.logout_tokens.size() >= 20) { + break; + } + } + } + ArrayList tokens = AuthTokensHelper.getSavedLogOutTokens(); if (tokens != null) { for (int i = 0; i < tokens.size(); i++) { - if (req.settings.logout_tokens == null) { - req.settings.logout_tokens = new ArrayList<>(); + if (settings.logout_tokens == null) { + settings.logout_tokens = new ArrayList<>(); + } + settings.logout_tokens.add(tokens.get(i).future_auth_token); + if (settings.logout_tokens.size() >= 20) { + break; } - req.settings.logout_tokens.add(tokens.get(i).future_auth_token); } - MessagesController.saveLogOutTokens(tokens); + AuthTokensHelper.saveLogOutTokens(tokens); } - if (req.settings.logout_tokens != null) { - req.settings.flags |= 64; + if (settings.logout_tokens != null) { + settings.flags |= 64; } SharedPreferences preferences = ApplicationLoader.applicationContext.getSharedPreferences("mainconfig", Activity.MODE_PRIVATE); preferences.edit().remove("sms_hash_code").apply(); - req.settings.allow_app_hash = PushListenerController.getProvider().hasServices(); - if (req.settings.allow_flashcall) { + if (settings.allow_app_hash) { +// preferences.edit().putString("sms_hash", BuildVars.SMS_HASH).apply(); + } else { + preferences.edit().remove("sms_hash").apply(); + } + if (settings.allow_flashcall) { try { String number = ""; if (!TextUtils.isEmpty(number)) { - req.settings.current_number = PhoneNumberUtils.compare(phone, number); - if (!req.settings.current_number) { - req.settings.allow_flashcall = false; + settings.current_number = PhoneNumberUtils.compare(phone, number); + if (!settings.current_number) { + settings.allow_flashcall = false; } } else { if (UserConfig.getActivatedAccountsCount() > 0) { - req.settings.allow_flashcall = false; + settings.allow_flashcall = false; } else { - req.settings.current_number = false; + settings.current_number = false; } } } catch (Exception e) { - req.settings.allow_flashcall = false; + settings.allow_flashcall = false; FileLog.e(e); } } - TLObject reqFinal; + TLObject req; if (activityMode == MODE_CHANGE_PHONE_NUMBER) { TLRPC.TL_account_sendChangePhoneCode changePhoneCode = new TLRPC.TL_account_sendChangePhoneCode(); changePhoneCode.phone_number = phone; - changePhoneCode.settings = req.settings; - reqFinal = changePhoneCode; + changePhoneCode.settings = settings; + req = changePhoneCode; } else { ConnectionsManager.getInstance(currentAccount).cleanup(false); - reqFinal = req; + + TLRPC.TL_auth_sendCode sendCode = new TLRPC.TL_auth_sendCode(); + sendCode.api_hash = NekoXConfig.currentAppHash(); + sendCode.api_id = NekoXConfig.currentAppId(); + sendCode.phone_number = phone; + sendCode.settings = settings; + req = sendCode; } Bundle params = new Bundle(); @@ -2887,86 +3059,98 @@ private void onConfirm(PhoneNumberConfirmView confirmView) { phoneInputData.phoneNumber = "+" + codeField.getText() + " " + phoneField.getText(); phoneInputData.country = currentCountry; phoneInputData.patterns = phoneFormatMap.get(codeField.getText().toString()); - int reqId = ConnectionsManager.getInstance(currentAccount).sendRequest(reqFinal, (response, error) -> AndroidUtilities.runOnUIThread(() -> { + int reqId = ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> AndroidUtilities.runOnUIThread(() -> { nextPressed = false; if (error == null) { - fillNextCodeParams(params, (TLRPC.TL_auth_sentCode) response); - if (phone.startsWith("99966")) { - fillNextCodeParamsSilent(params, (TLRPC.TL_auth_sentCode) response); - String phoneHash = ((TLRPC.TL_auth_sentCode) response).phone_code_hash; - String dcId = phone.substring(5, 6); - final TLRPC.TL_auth_signIn reqI = new TLRPC.TL_auth_signIn(); - reqI.phone_number = phone; - reqI.phone_code = dcId + dcId + dcId + dcId + dcId; - reqI.phone_code_hash = phoneHash; - int reqIdI = ConnectionsManager.getInstance(currentAccount).sendRequest(reqI, (responseI, errorI) -> AndroidUtilities.runOnUIThread(() -> { - boolean ok = false; - if (errorI == null) { - nextPressed = false; - ok = true; - showDoneButton(false, true); - if (responseI instanceof TLRPC.TL_auth_authorizationSignUpRequired) { - TLRPC.TL_auth_authorizationSignUpRequired authorization = (TLRPC.TL_auth_authorizationSignUpRequired) responseI; - if (authorization.terms_of_service != null) { - currentTermsOfService = authorization.terms_of_service; + if (response instanceof TLRPC.TL_auth_sentCodeSuccess) { + TLRPC.auth_Authorization auth = ((TLRPC.TL_auth_sentCodeSuccess) response).authorization; + if (auth instanceof TLRPC.TL_auth_authorizationSignUpRequired) { + TLRPC.TL_auth_authorizationSignUpRequired authorization = (TLRPC.TL_auth_authorizationSignUpRequired) response; + if (authorization.terms_of_service != null) { + currentTermsOfService = authorization.terms_of_service; + } + setPage(VIEW_REGISTER, true, params, false); + } else { + onAuthSuccess((TLRPC.TL_auth_authorization) auth); + } + } else { + fillNextCodeParams(params, (TLRPC.auth_SentCode) response); + if (phone.startsWith("99966")) { + fillNextCodeParamsSilent(params, (TLRPC.TL_auth_sentCode) response); + String phoneHash = ((TLRPC.TL_auth_sentCode) response).phone_code_hash; + String dcId = phone.substring(5, 6); + final TLRPC.TL_auth_signIn reqI = new TLRPC.TL_auth_signIn(); + reqI.phone_number = phone; + reqI.phone_code = dcId + dcId + dcId + dcId + dcId; + reqI.phone_code_hash = phoneHash; + int reqIdI = ConnectionsManager.getInstance(currentAccount).sendRequest(reqI, (responseI, errorI) -> AndroidUtilities.runOnUIThread(() -> { + boolean ok = false; + if (errorI == null) { + nextPressed = false; + ok = true; + showDoneButton(false, true); + if (responseI instanceof TLRPC.TL_auth_authorizationSignUpRequired) { + TLRPC.TL_auth_authorizationSignUpRequired authorization = (TLRPC.TL_auth_authorizationSignUpRequired) responseI; + if (authorization.terms_of_service != null) { + currentTermsOfService = authorization.terms_of_service; + } + Bundle paramsI = new Bundle(); + paramsI.putString("phoneFormated", phone); + paramsI.putString("phoneHash", phoneHash); + paramsI.putString("code", reqI.phone_code); + setPage(VIEW_REGISTER, true, params, false); + } else { + onAuthSuccess((TLRPC.TL_auth_authorization) responseI); } - Bundle paramsI = new Bundle(); - paramsI.putString("phoneFormated", phone); - paramsI.putString("phoneHash", phoneHash); - paramsI.putString("code", reqI.phone_code); - setPage(VIEW_REGISTER, true, params, false); } else { - onAuthSuccess((TLRPC.TL_auth_authorization) responseI); - } - } else { - if (errorI.text.contains("SESSION_PASSWORD_NEEDED")) { - ok = true; - TLRPC.TL_account_getPassword req2 = new TLRPC.TL_account_getPassword(); - ConnectionsManager.getInstance(currentAccount).sendRequest(req2, (response1, error1) -> AndroidUtilities.runOnUIThread(() -> { + if (errorI.text.contains("SESSION_PASSWORD_NEEDED")) { + ok = true; + TLRPC.TL_account_getPassword req2 = new TLRPC.TL_account_getPassword(); + ConnectionsManager.getInstance(currentAccount).sendRequest(req2, (response1, error1) -> AndroidUtilities.runOnUIThread(() -> { + nextPressed = false; + showDoneButton(false, true); + if (error1 == null) { + TLRPC.TL_account_password password = (TLRPC.TL_account_password) response1; + if (!TwoStepVerificationActivity.canHandleCurrentPassword(password, true)) { + AlertsCreator.showUpdateAppAlert(getParentActivity(), LocaleController.getString("UpdateAppAlert", R.string.UpdateAppAlert), true); + return; + } + Bundle bundle = new Bundle(); + SerializedData data = new SerializedData(password.getObjectSize()); + password.serializeToStream(data); + bundle.putString("password", Utilities.bytesToHex(data.toByteArray())); + bundle.putString("phoneFormated", phone); + bundle.putString("phoneHash", phoneHash); + bundle.putString("code", reqI.phone_code); + setPage(LoginActivity.VIEW_PASSWORD, true, bundle, false); + } else { + needShowAlert(LocaleController.getString("NekoX", R.string.NekoX), error1.text); + } + }), ConnectionsManager.RequestFlagFailOnServerErrors | ConnectionsManager.RequestFlagWithoutLogin); + } else { nextPressed = false; showDoneButton(false, true); - if (error1 == null) { - TLRPC.TL_account_password password = (TLRPC.TL_account_password) response1; - if (!TwoStepVerificationActivity.canHandleCurrentPassword(password, true)) { - AlertsCreator.showUpdateAppAlert(getParentActivity(), LocaleController.getString("UpdateAppAlert", R.string.UpdateAppAlert), true); - return; - } - Bundle bundle = new Bundle(); - SerializedData data = new SerializedData(password.getObjectSize()); - password.serializeToStream(data); - bundle.putString("password", Utilities.bytesToHex(data.toByteArray())); - bundle.putString("phoneFormated", phone); - bundle.putString("phoneHash", phoneHash); - bundle.putString("code", reqI.phone_code); - - setPage(VIEW_PASSWORD, true, bundle, false); + + if (errorI.text.contains("PHONE_NUMBER_INVALID")) { + needShowAlert(LocaleController.getString("NekoX", R.string.NekoX), LocaleController.getString("InvalidPhoneNumber", R.string.InvalidPhoneNumber)); + } else if (errorI.text.contains("PHONE_CODE_EMPTY") || errorI.text.contains("PHONE_CODE_INVALID")) { + needShowAlert(LocaleController.getString("NekoX", R.string.NekoX), LocaleController.getString("InvalidCode", R.string.InvalidCode)); + } else if (errorI.text.contains("PHONE_CODE_EXPIRED")) { + onBackPressed(true); + setPage(VIEW_PHONE_INPUT, true, null, true); + needShowAlert(LocaleController.getString("NekoX", R.string.NekoX), LocaleController.getString("CodeExpired", R.string.CodeExpired)); + } else if (errorI.text.startsWith("FLOOD_WAIT")) { + needShowAlert(LocaleController.getString("NekoX", R.string.NekoX), LocaleController.getString("FloodWait", R.string.FloodWait)); } else { - needShowAlert(LocaleController.getString("NekoX", R.string.NekoX), error1.text); + needShowAlert(LocaleController.getString("NekoX", R.string.NekoX), LocaleController.getString("ErrorOccurred", R.string.ErrorOccurred) + "\n" + errorI.text); } - }), ConnectionsManager.RequestFlagFailOnServerErrors | ConnectionsManager.RequestFlagWithoutLogin); - } else { - nextPressed = false; - showDoneButton(false, true); - - if (errorI.text.contains("PHONE_NUMBER_INVALID")) { - needShowAlert(LocaleController.getString("NekoX", R.string.NekoX), LocaleController.getString("InvalidPhoneNumber", R.string.InvalidPhoneNumber)); - } else if (errorI.text.contains("PHONE_CODE_EMPTY") || errorI.text.contains("PHONE_CODE_INVALID")) { - needShowAlert(LocaleController.getString("NekoX", R.string.NekoX), LocaleController.getString("InvalidCode", R.string.InvalidCode)); - } else if (errorI.text.contains("PHONE_CODE_EXPIRED")) { - onBackPressed(true); - setPage(VIEW_PHONE_INPUT, true, null, true); - needShowAlert(LocaleController.getString("NekoX", R.string.NekoX), LocaleController.getString("CodeExpired", R.string.CodeExpired)); - } else if (errorI.text.startsWith("FLOOD_WAIT")) { - needShowAlert(LocaleController.getString("NekoX", R.string.NekoX), LocaleController.getString("FloodWait", R.string.FloodWait)); - } else { - needShowAlert(LocaleController.getString("NekoX", R.string.NekoX), LocaleController.getString("ErrorOccurred", R.string.ErrorOccurred) + "\n" + errorI.text); } } - } - }), ConnectionsManager.RequestFlagFailOnServerErrors | ConnectionsManager.RequestFlagWithoutLogin); - needShowProgress(reqIdI, false); - showDoneButton(true, true); - return; + }), ConnectionsManager.RequestFlagFailOnServerErrors | ConnectionsManager.RequestFlagWithoutLogin); + needShowProgress(reqIdI, false); + showDoneButton(true, true); + return; + } } } else { if (error.text != null) { @@ -3002,6 +3186,8 @@ private void onConfirm(PhoneNumberConfirmView confirmView) { } else if (error.text.contains("PHONE_CODE_EMPTY") || error.text.contains("PHONE_CODE_INVALID")) { needShowAlert(LocaleController.getString(R.string.RestorePasswordNoEmailTitle), LocaleController.getString("InvalidCode", R.string.InvalidCode)); } else if (error.text.contains("PHONE_CODE_EXPIRED")) { + onBackPressed(true); + setPage(VIEW_PHONE_INPUT, true, null, true); needShowAlert(LocaleController.getString(R.string.RestorePasswordNoEmailTitle), LocaleController.getString("CodeExpired", R.string.CodeExpired)); } else if (error.text.startsWith("FLOOD_WAIT")) { needShowAlert(LocaleController.getString(R.string.RestorePasswordNoEmailTitle), LocaleController.getString("FloodWait", R.string.FloodWait)); @@ -3010,7 +3196,9 @@ private void onConfirm(PhoneNumberConfirmView confirmView) { } } } - needHideProgress(false); + if (!isRequestingFirebaseSms) { + needHideProgress(false); + } }), ConnectionsManager.RequestFlagFailOnServerErrors | ConnectionsManager.RequestFlagWithoutLogin | ConnectionsManager.RequestFlagTryDifferentDc | ConnectionsManager.RequestFlagEnableUnauthorized); needShowProgress(reqId); } @@ -3253,6 +3441,8 @@ public class LoginActivitySmsView extends SlideView implements NotificationCente @AuthType private int nextType; + private boolean isResendingCode = false; + private String pattern = "*"; private String prefix = ""; private String catchedPhone; @@ -3417,13 +3607,84 @@ protected void processNextPressed() { problemFrame = new FrameLayout(context); - timeText = new TextView(context); + timeText = new TextView(context) { + private LoadingDrawable loadingDrawable = new LoadingDrawable(); + + { + loadingDrawable.setAppearByGradient(true); + } + + @Override + public void setText(CharSequence text, BufferType type) { + super.setText(text, type); + + updateLoadingLayout(); + } + + @Override + protected void onLayout(boolean changed, int left, int top, int right, int bottom) { + super.onLayout(changed, left, top, right, bottom); + + updateLoadingLayout(); + } + + private void updateLoadingLayout() { + Layout layout = getLayout(); + if (layout == null) { + return; + } + CharSequence text = layout.getText(); + if (text == null) { + return; + } + LinkPath path = new LinkPath(true); + int start = 0; + int end = text.length(); + path.setCurrentLayout(layout, start, 0); + layout.getSelectionPath(start, end, path); + loadingDrawable.usePath(path); + loadingDrawable.setRadiiDp(4); + + int color = getThemedColor(Theme.key_chat_linkSelectBackground); + loadingDrawable.setColors( + Theme.multAlpha(color, 0.85f), + Theme.multAlpha(color, 2f), + Theme.multAlpha(color, 3.5f), + Theme.multAlpha(color, 6f) + ); + + loadingDrawable.updateBounds(); + } + + @Override + protected void onDraw(Canvas canvas) { + super.onDraw(canvas); + + if (isResendingCode) { + canvas.save(); + canvas.translate(getPaddingLeft(), getPaddingTop()); + loadingDrawable.draw(canvas); + canvas.restore(); + invalidate(); + } + } + }; timeText.setLineSpacing(AndroidUtilities.dp(2), 1.0f); - timeText.setPadding(0, AndroidUtilities.dp(2), 0, AndroidUtilities.dp(10)); + timeText.setPadding(AndroidUtilities.dp(6), AndroidUtilities.dp(8), AndroidUtilities.dp(6), AndroidUtilities.dp(16)); timeText.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 15); timeText.setGravity(Gravity.TOP | Gravity.CENTER_HORIZONTAL); timeText.setOnClickListener(v-> { - if (nextType == AUTH_TYPE_CALL || nextType == AUTH_TYPE_SMS || nextType == AUTH_TYPE_MISSED_CALL) { +// if (isRequestingFirebaseSms || isResendingCode) { +// return; +// } + if (time > 0 && timeTimer != null) { + return; + } + isResendingCode = true; + timeText.invalidate(); + timeText.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteValueText)); + + if (nextType == AUTH_TYPE_CALL || nextType == AUTH_TYPE_SMS || nextType == AUTH_TYPE_MISSED_CALL || nextType == AUTH_TYPE_FRAGMENT_SMS) { timeText.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteGrayText6)); if (nextType == AUTH_TYPE_CALL || nextType == AUTH_TYPE_MISSED_CALL) { timeText.setText(LocaleController.getString("Calling", R.string.Calling)); @@ -3472,11 +3733,72 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { anim.setInterpolator(Easings.easeInOutQuad); errorViewSwitcher.setOutAnimation(anim); - problemText = new TextView(context); + problemText = new TextView(context) { + private LoadingDrawable loadingDrawable = new LoadingDrawable(); + + { + loadingDrawable.setAppearByGradient(true); + } + + @Override + public void setText(CharSequence text, BufferType type) { + super.setText(text, type); + + updateLoadingLayout(); + } + + @Override + protected void onLayout(boolean changed, int left, int top, int right, int bottom) { + super.onLayout(changed, left, top, right, bottom); + + updateLoadingLayout(); + } + + private void updateLoadingLayout() { + Layout layout = getLayout(); + if (layout == null) { + return; + } + CharSequence text = layout.getText(); + if (text == null) { + return; + } + LinkPath path = new LinkPath(true); + int start = 0; + int end = text.length(); + path.setCurrentLayout(layout, start, 0); + layout.getSelectionPath(start, end, path); + loadingDrawable.usePath(path); + loadingDrawable.setRadiiDp(4); + + int color = getThemedColor(Theme.key_chat_linkSelectBackground); + loadingDrawable.setColors( + Theme.multAlpha(color, 0.85f), + Theme.multAlpha(color, 2f), + Theme.multAlpha(color, 3.5f), + Theme.multAlpha(color, 6f) + ); + + loadingDrawable.updateBounds(); + } + + @Override + protected void onDraw(Canvas canvas) { + super.onDraw(canvas); + + if (isResendingCode) { + canvas.save(); + canvas.translate(getPaddingLeft(), getPaddingTop()); + loadingDrawable.draw(canvas); + canvas.restore(); + invalidate(); + } + } + }; problemText.setLineSpacing(AndroidUtilities.dp(2), 1.0f); problemText.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 15); problemText.setGravity(Gravity.CENTER_HORIZONTAL | Gravity.TOP); - problemText.setPadding(0, AndroidUtilities.dp(4), 0, AndroidUtilities.dp(4)); + problemText.setPadding(AndroidUtilities.dp(6), AndroidUtilities.dp(8), AndroidUtilities.dp(6), AndroidUtilities.dp(16)); problemFrame.addView(problemText, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER)); errorViewSwitcher.addView(problemFrame, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER)); } else { @@ -3545,7 +3867,7 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { if (currentType != AUTH_TYPE_FRAGMENT_SMS) { problemText.setOnClickListener(v -> { - if (nextPressed) { + if (nextPressed || timeText.getVisibility() != View.GONE || isResendingCode) { return; } boolean email = nextType == 0; @@ -3632,6 +3954,14 @@ public void onCancelPressed() { } private void resendCode() { + if (nextPressed || isResendingCode || isRequestingFirebaseSms) { + return; + } + + isResendingCode = true; + timeText.invalidate(); + problemText.invalidate(); + final Bundle params = new Bundle(); params.putString("phone", phone); params.putString("ephone", emailPhone); @@ -3768,6 +4098,9 @@ public void setParams(Bundle params, boolean restore) { } else if (currentType == AUTH_TYPE_FLASH_CALL) { AndroidUtilities.setWaitingForCall(true); NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.didReceiveCall); + AndroidUtilities.runOnUIThread(() -> { + CallReceiver.checkLastReceivedCall(); + }); } currentParams = params; @@ -3866,9 +4199,13 @@ public void afterTextChanged(Editable s) {} if (currentType == AUTH_TYPE_MESSAGE) { setProblemTextVisible(true); timeText.setVisibility(GONE); + if (problemText != null) { + problemText.setVisibility(VISIBLE); + } } else if (currentType == AUTH_TYPE_FLASH_CALL && (nextType == AUTH_TYPE_CALL || nextType == AUTH_TYPE_SMS)) { setProblemTextVisible(false); timeText.setVisibility(VISIBLE); + problemText.setVisibility(GONE); if (nextType == AUTH_TYPE_CALL || nextType == AUTH_TYPE_MISSED_CALL) { timeText.setText(LocaleController.formatString("CallAvailableIn", R.string.CallAvailableIn, 1, 0)); } else if (nextType == AUTH_TYPE_SMS) { @@ -3886,6 +4223,9 @@ public void afterTextChanged(Editable s) {} timeText.setText(LocaleController.formatString("CallAvailableIn", R.string.CallAvailableIn, 2, 0)); setProblemTextVisible(time < 1000); timeText.setVisibility(time < 1000 ? GONE : VISIBLE); + if (problemText != null) { + problemText.setVisibility(time < 1000 ? VISIBLE : GONE); + } SharedPreferences preferences = ApplicationLoader.applicationContext.getSharedPreferences("mainconfig", Activity.MODE_PRIVATE); String hash = preferences.getString("sms_hash", null); @@ -3908,9 +4248,15 @@ public void afterTextChanged(Editable s) {} timeText.setText(LocaleController.formatString("SmsAvailableIn", R.string.SmsAvailableIn, 2, 0)); setProblemTextVisible(time < 1000); timeText.setVisibility(time < 1000 ? GONE : VISIBLE); + if (problemText != null) { + problemText.setVisibility(time < 1000 ? VISIBLE : GONE); + } createTimer(); } else { timeText.setVisibility(GONE); + if (problemText != null) { + problemText.setVisibility(VISIBLE); + } setProblemTextVisible(false); createCodeTimer(); } @@ -3962,6 +4308,9 @@ public void run() { if (codeTime <= 1000) { setProblemTextVisible(true); timeText.setVisibility(GONE); + if (problemText != null) { + problemText.setVisibility(VISIBLE); + } destroyCodeTimer(); } }); @@ -4120,7 +4469,8 @@ public void onNextPressed(String code) { animateSuccess(()-> { try { - fragmentView.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + if (!NekoConfig.disableVibration.Bool()) + fragmentView.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); } catch (Exception ignored) {} new AlertDialog.Builder(getContext()) .setTitle(LocaleController.getString(R.string.YourPasswordSuccess)) @@ -4370,7 +4720,8 @@ private void animateSuccess(Runnable callback) { private void shakeWrongCode() { try { - codeFieldContainer.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + if (!NekoConfig.disableVibration.Bool()) + codeFieldContainer.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); } catch (Exception ignore) {} for (int a = 0; a < codeFieldContainer.codeField.length; a++) { @@ -4508,9 +4859,17 @@ public void didReceivedNotification(int id, int account, Object... args) { AndroidUtilities.endIncomingCall(); } onNextPressed(num); + CallReceiver.clearLastCall(); } } + @Override + public void onHide() { + super.onHide(); + isResendingCode = false; + nextPressed = false; + } + @Override public void saveStateParams(Bundle bundle) { String code = codeFieldContainer.getCode(); @@ -5222,7 +5581,7 @@ public void setParams(Bundle params, boolean restore) { requestPhone = currentParams.getString("phoneFormated"); phoneHash = currentParams.getString("phoneHash"); - int v = params.getBoolean("googleSignInAllowed") ? VISIBLE : GONE; +// int v = params.getBoolean("googleSignInAllowed") && PushListenerController.GooglePushListenerServiceProvider.INSTANCE.hasServices() ? VISIBLE : GONE; // loginOrView.setVisibility(v); // signInWithGoogleView.setVisibility(v); @@ -5235,7 +5594,8 @@ private void onPasscodeError(boolean clear) { return; } try { - emailOutlineView.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + if (!NekoConfig.disableVibration.Bool()) + emailOutlineView.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); } catch (Exception ignore) {} if (clear) { emailField.setText(""); @@ -5299,6 +5659,8 @@ public void onNextPressed(String code) { } else if (error.text.contains("PHONE_CODE_EMPTY") || error.text.contains("PHONE_CODE_INVALID")) { needShowAlert(LocaleController.getString(R.string.RestorePasswordNoEmailTitle), LocaleController.getString("InvalidCode", R.string.InvalidCode)); } else if (error.text.contains("PHONE_CODE_EXPIRED")) { + onBackPressed(true); + setPage(VIEW_PHONE_INPUT, true, null, true); needShowAlert(LocaleController.getString(R.string.RestorePasswordNoEmailTitle), LocaleController.getString("CodeExpired", R.string.CodeExpired)); } else if (error.text.startsWith("FLOOD_WAIT")) { needShowAlert(LocaleController.getString(R.string.RestorePasswordNoEmailTitle), LocaleController.getString("FloodWait", R.string.FloodWait)); @@ -5352,14 +5714,19 @@ public class LoginActivityEmailCodeView extends SlideView { // private TextView signInWithGoogleView; private FrameLayout resendFrameLayout; private TextView resendCodeView; + private FrameLayout cantAccessEmailFrameLayout; + private TextView cantAccessEmailView; + private TextView emailResetInView; private TextView wrongCodeView; // private LoginOrView loginOrView; private RLottieImageView inboxImageView; + private boolean resetRequestPending; private Bundle currentParams; private boolean nextPressed; // private GoogleSignInAccount googleAccount; + private int resetAvailablePeriod, resetPendingDate; private String phone, emailPhone, email; private String requestPhone, phoneHash; private boolean isFromSetup; @@ -5377,9 +5744,11 @@ public class LoginActivityEmailCodeView extends SlideView { if (errorViewSwitcher.getCurrentView() != resendFrameLayout) { errorViewSwitcher.showNext(); + AndroidUtilities.updateViewVisibilityAnimated(cantAccessEmailFrameLayout, resendCodeView.getVisibility() != VISIBLE && activityMode != MODE_CHANGE_LOGIN_EMAIL && !isSetup, 1f, true); } }; private Runnable resendCodeTimeout = () -> showResendCodeView(true); + private Runnable updateResetPendingDateCallback = this::updateResetPendingDate; public LoginActivityEmailCodeView(Context context, boolean setup) { super(context); @@ -5424,6 +5793,83 @@ protected void processNextPressed() { // NekoX: Remove signinWithGoogle + cantAccessEmailFrameLayout = new FrameLayout(context); + AndroidUtilities.updateViewVisibilityAnimated(cantAccessEmailFrameLayout, activityMode != MODE_CHANGE_LOGIN_EMAIL && !isSetup, 1f, false); + + cantAccessEmailView = new TextView(context) { + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(100), MeasureSpec.AT_MOST)); + } + }; + cantAccessEmailView.setText(LocaleController.getString(R.string.LoginCantAccessThisEmail)); + cantAccessEmailView.setGravity(Gravity.CENTER); + cantAccessEmailView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + cantAccessEmailView.setPadding(AndroidUtilities.dp(16), AndroidUtilities.dp(16), AndroidUtilities.dp(16), AndroidUtilities.dp(16)); + cantAccessEmailView.setMaxLines(2); + cantAccessEmailView.setOnClickListener(v -> { + String rawPattern = currentParams.getString("emailPattern"); + SpannableStringBuilder email = new SpannableStringBuilder(rawPattern); + int startIndex = rawPattern.indexOf('*'), endIndex = rawPattern.lastIndexOf('*'); + if (startIndex != endIndex && startIndex != -1 && endIndex != -1) { + TextStyleSpan.TextStyleRun run = new TextStyleSpan.TextStyleRun(); + run.flags |= TextStyleSpan.FLAG_STYLE_SPOILER; + run.start = startIndex; + run.end = endIndex + 1; + email.setSpan(new TextStyleSpan(run), startIndex, endIndex + 1, 0); + } + + new AlertDialog.Builder(context) + .setTitle(LocaleController.getString(R.string.LoginEmailResetTitle)) + .setMessage(AndroidUtilities.formatSpannable(AndroidUtilities.replaceTags(LocaleController.getString(R.string.LoginEmailResetMessage)), email, getTimePattern(resetAvailablePeriod))) + .setPositiveButton(LocaleController.getString(R.string.LoginEmailResetButton), (dialog, which) -> { + Bundle params = new Bundle(); + params.putString("phone", phone); + params.putString("ephone", emailPhone); + params.putString("phoneFormated", requestPhone); + + TLRPC.TL_auth_resetLoginEmail req = new TLRPC.TL_auth_resetLoginEmail(); + req.phone_number = requestPhone; + req.phone_code_hash = phoneHash; + getConnectionsManager().sendRequest(req, (response, error) -> AndroidUtilities.runOnUIThread(() -> { + if (response instanceof TLRPC.TL_auth_sentCode) { + TLRPC.TL_auth_sentCode sentCode = (TLRPC.TL_auth_sentCode) response; + if (sentCode.type instanceof TLRPC.TL_auth_sentCodeTypeEmailCode) { + sentCode.type.email_pattern = currentParams.getString("emailPattern"); + resetRequestPending = true; + } + fillNextCodeParams(params, sentCode); + } else if (error != null && error.text != null) { + if (error.text.contains("PHONE_CODE_EXPIRED")) { + onBackPressed(true); + setPage(VIEW_PHONE_INPUT, true, null, true); + needShowAlert(LocaleController.getString(R.string.RestorePasswordNoEmailTitle), LocaleController.getString("CodeExpired", R.string.CodeExpired)); + } else { + AlertsCreator.processError(currentAccount, error, LoginActivity.this, req); + } + } + }), ConnectionsManager.RequestFlagFailOnServerErrors | ConnectionsManager.RequestFlagWithoutLogin); + }) + .setNegativeButton(LocaleController.getString(R.string.Cancel), null) + .show(); + }); + cantAccessEmailFrameLayout.addView(cantAccessEmailView); + + emailResetInView = new TextView(context) { + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(Math.max(MeasureSpec.getSize(heightMeasureSpec), AndroidUtilities.dp(100)), MeasureSpec.AT_MOST)); + } + }; + emailResetInView.setGravity(Gravity.CENTER); + emailResetInView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + emailResetInView.setLineSpacing(AndroidUtilities.dp(2), 1.0f); + emailResetInView.setMaxLines(3); + emailResetInView.setOnClickListener(v -> requestEmailReset()); + emailResetInView.setPadding(0, AndroidUtilities.dp(16), 0, AndroidUtilities.dp(16)); + emailResetInView.setVisibility(GONE); + cantAccessEmailFrameLayout.addView(emailResetInView); + resendCodeView = new TextView(context); resendCodeView.setGravity(Gravity.CENTER); resendCodeView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); @@ -5432,6 +5878,10 @@ protected void processNextPressed() { resendCodeView.setMaxLines(2); resendCodeView.setText(LocaleController.getString(R.string.ResendCode)); resendCodeView.setOnClickListener(v -> { + if (resendCodeView.getVisibility() != View.VISIBLE || resendCodeView.getAlpha() != 1f) { + return; + } + showResendCodeView(false); TLRPC.TL_auth_resendCode req = new TLRPC.TL_auth_resendCode(); @@ -5482,20 +5932,59 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { wrongCodeView.setLineSpacing(AndroidUtilities.dp(2), 1.0f); wrongCodeView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 15); wrongCodeView.setGravity(Gravity.CENTER_HORIZONTAL | Gravity.TOP); - wrongCodeView.setPadding(0, AndroidUtilities.dp(4), 0, AndroidUtilities.dp(4)); + wrongCodeView.setPadding(AndroidUtilities.dp(16), AndroidUtilities.dp(16), AndroidUtilities.dp(16), AndroidUtilities.dp(16)); errorViewSwitcher.addView(wrongCodeView); FrameLayout bottomContainer = new FrameLayout(context); if (setup) { bottomContainer.addView(errorViewSwitcher, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.BOTTOM, 0, 0, 0, 32)); } else { - bottomContainer.addView(errorViewSwitcher, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.TOP, 0, 8, 0, 0)); + bottomContainer.addView(errorViewSwitcher, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.TOP)); + bottomContainer.addView(cantAccessEmailFrameLayout, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.TOP)); // bottomContainer.addView(loginOrView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 16, Gravity.CENTER, 0, 0, 0, 16)); // bottomContainer.addView(signInWithGoogleView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.BOTTOM, 0, 0, 0, 16)); } addView(bottomContainer, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, 0, 1f)); } + private boolean requestingEmailReset; + private void requestEmailReset() { + if (requestingEmailReset) { + return; + } + requestingEmailReset = true; + + Bundle params = new Bundle(); + params.putString("phone", phone); + params.putString("ephone", emailPhone); + params.putString("phoneFormated", requestPhone); + + TLRPC.TL_auth_resetLoginEmail req = new TLRPC.TL_auth_resetLoginEmail(); + req.phone_number = requestPhone; + req.phone_code_hash = phoneHash; + getConnectionsManager().sendRequest(req, (response, error) -> AndroidUtilities.runOnUIThread(() -> { + requestingEmailReset = false; + if (response instanceof TLRPC.TL_auth_sentCode) { + TLRPC.TL_auth_sentCode sentCode = (TLRPC.TL_auth_sentCode) response; + fillNextCodeParams(params, sentCode); + } else if (error != null && error.text != null) { + if (error.text.contains("TASK_ALREADY_EXISTS")) { + new AlertDialog.Builder(getContext()) + .setTitle(LocaleController.getString(R.string.LoginEmailResetPremiumRequiredTitle)) + .setMessage(AndroidUtilities.replaceTags(LocaleController.formatString(R.string.LoginEmailResetPremiumRequiredMessage, LocaleController.addNbsp(PhoneFormat.getInstance().format("+" + requestPhone))))) + .setPositiveButton(LocaleController.getString(R.string.OK), null) + .show(); + } else if (error.text.contains("PHONE_CODE_EXPIRED")) { + onBackPressed(true); + setPage(VIEW_PHONE_INPUT, true, null, true); + needShowAlert(LocaleController.getString(R.string.RestorePasswordNoEmailTitle), LocaleController.getString("CodeExpired", R.string.CodeExpired)); + } else { + AlertsCreator.processError(currentAccount, error, LoginActivity.this, req); + } + } + }), ConnectionsManager.RequestFlagFailOnServerErrors | ConnectionsManager.RequestFlagWithoutLogin); + } + @Override public void updateColors() { titleView.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteBlackText)); @@ -5503,6 +5992,8 @@ public void updateColors() { // signInWithGoogleView.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteBlueText4)); // loginOrView.updateColors(); resendCodeView.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteBlueText4)); + cantAccessEmailView.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteBlueText4)); + emailResetInView.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteGrayText6)); wrongCodeView.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); codeFieldContainer.invalidate(); @@ -5517,6 +6008,7 @@ protected void onDetachedFromWindow() { private void showResendCodeView(boolean show) { AndroidUtilities.updateViewVisibilityAnimated(resendCodeView, show); + AndroidUtilities.updateViewVisibilityAnimated(cantAccessEmailFrameLayout, !show && activityMode != MODE_CHANGE_LOGIN_EMAIL && !isSetup); // if (loginOrView.getVisibility() != GONE) { // loginOrView.setLayoutParams(LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 16, Gravity.CENTER, 0, 0, 0, show ? 8 : 16)); @@ -5553,11 +6045,23 @@ public void setParams(Bundle params, boolean restore) { isFromSetup = currentParams.getBoolean("setup"); length = currentParams.getInt("length"); email = currentParams.getString("email"); + resetAvailablePeriod = currentParams.getInt("resetAvailablePeriod"); + resetPendingDate = currentParams.getInt("resetPendingDate"); if (activityMode == MODE_CHANGE_LOGIN_EMAIL) { confirmTextView.setText(LocaleController.formatString(R.string.CheckYourNewEmailSubtitle, email)); + AndroidUtilities.updateViewVisibilityAnimated(cantAccessEmailFrameLayout, false, 1f, false); } else if (isSetup) { confirmTextView.setText(LocaleController.formatString(R.string.VerificationCodeSubtitle, email)); + AndroidUtilities.updateViewVisibilityAnimated(cantAccessEmailFrameLayout, false, 1f, false); + } else { + AndroidUtilities.updateViewVisibilityAnimated(cantAccessEmailFrameLayout, true, 1f, false); + + cantAccessEmailView.setVisibility(resetPendingDate == 0 ? VISIBLE : GONE); + emailResetInView.setVisibility(resetPendingDate != 0 ? VISIBLE : GONE); + if (resetPendingDate != 0) { + updateResetPendingDate(); + } } codeFieldContainer.setNumbersCount(length, AUTH_TYPE_MESSAGE); @@ -5602,12 +6106,95 @@ public void afterTextChanged(Editable s) {} confirmTextView.setText(AndroidUtilities.formatSpannable(LocaleController.getString(R.string.CheckYourEmailSubtitle), confirmText)); } -// int v = params.getBoolean("googleSignInAllowed") ? VISIBLE : GONE; +// int v = params.getBoolean("googleSignInAllowed") && PushListenerController.GooglePushListenerServiceProvider.INSTANCE.hasServices() ? VISIBLE : GONE; // loginOrView.setVisibility(v); // signInWithGoogleView.setVisibility(v); showKeyboard(codeFieldContainer.codeField[0]); codeFieldContainer.requestFocus(); + + if (!restore && params.containsKey("nextType")) { + AndroidUtilities.runOnUIThread(resendCodeTimeout, params.getInt("timeout")); + } + + if (resetPendingDate != 0) { + AndroidUtilities.runOnUIThread(updateResetPendingDateCallback, 1000); + } + } + + @Override + public void onHide() { + super.onHide(); + + if (resetPendingDate != 0) { + AndroidUtilities.cancelRunOnUIThread(updateResetPendingDateCallback); + } + } + + private String getTimePatternForTimer(int timeRemaining) { + int days = timeRemaining / 86400; + int hours = (timeRemaining % 86400) / 3600; + int minutes = ((timeRemaining % 86400) % 3600) / 60; + int seconds = ((timeRemaining % 86400) % 3600) % 60; + + if (hours >= 16) { + days++; + } + + String time; + if (days != 0) { + time = LocaleController.formatString(R.string.LoginEmailResetInSinglePattern, LocaleController.formatPluralString("Days", days)); + } else { + String timer = (hours != 0 ? String.format(Locale.ROOT, "%02d:", hours) : "") + String.format(Locale.ROOT, "%02d:", minutes) + String.format(Locale.ROOT, "%02d", seconds); + time = LocaleController.formatString(R.string.LoginEmailResetInSinglePattern, timer); + } + return time; + } + + private String getTimePattern(int timeRemaining) { + int days = timeRemaining / 86400; + int hours = (timeRemaining % 86400) / 3600; + int minutes = ((timeRemaining % 86400) % 3600) / 60; + + if (days == 0 && hours == 0) { + minutes = Math.max(1, minutes); + } + + String time; + if (days != 0 && hours != 0) { + time = LocaleController.formatString(R.string.LoginEmailResetInDoublePattern, LocaleController.formatPluralString("Days", days), LocaleController.formatPluralString("Hours", hours)); + } else if (hours != 0 && minutes != 0) { + time = LocaleController.formatString(R.string.LoginEmailResetInDoublePattern, LocaleController.formatPluralString("Hours", hours), LocaleController.formatPluralString("Minutes", minutes)); + } else if (days != 0) { + time = LocaleController.formatString(R.string.LoginEmailResetInSinglePattern, LocaleController.formatPluralString("Days", days)); + } else if (hours != 0) { + time = LocaleController.formatString(R.string.LoginEmailResetInSinglePattern, LocaleController.formatPluralString("Hours", days)); + } else { + time = LocaleController.formatString(R.string.LoginEmailResetInSinglePattern, LocaleController.formatPluralString("Minutes", minutes)); + } + return time; + } + + private void updateResetPendingDate() { + int timeRemaining = (int) (resetPendingDate - System.currentTimeMillis() / 1000L); + if (resetPendingDate <= 0 || timeRemaining <= 0) { + emailResetInView.setVisibility(VISIBLE); + emailResetInView.setText(LocaleController.getString(R.string.LoginEmailResetPleaseWait)); + AndroidUtilities.runOnUIThread(this::requestEmailReset, 1000); + return; + } + String str = LocaleController.formatString(R.string.LoginEmailResetInTime, getTimePatternForTimer(timeRemaining)); + SpannableStringBuilder ssb = SpannableStringBuilder.valueOf(str); + int startIndex = str.indexOf('*'), endIndex = str.lastIndexOf('*'); + if (startIndex != endIndex && startIndex != -1 && endIndex != -1) { + ssb.replace(endIndex, endIndex + 1, ""); + ssb.replace(startIndex, startIndex + 1, ""); + ssb.setSpan(new ForegroundColorSpan(getThemedColor(Theme.key_windowBackgroundWhiteBlueText4)), startIndex, endIndex - 1, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + } + + emailResetInView.setText(ssb); + + AndroidUtilities.runOnUIThread(updateResetPendingDateCallback, 1000); } private void onPasscodeError(boolean clear) { @@ -5615,7 +6202,8 @@ private void onPasscodeError(boolean clear) { return; } try { - codeFieldContainer.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + if (!NekoConfig.disableVibration.Bool()) + codeFieldContainer.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); } catch (Exception ignore) {} if (clear) { for (CodeNumberField f : codeFieldContainer.codeField) { @@ -5650,8 +6238,10 @@ public void onNextPressed(String code) { AndroidUtilities.cancelRunOnUIThread(resendCodeTimeout); codeFieldContainer.isFocusSuppressed = true; - for (CodeNumberField f : codeFieldContainer.codeField) { - f.animateFocusedProgress(0); + if (codeFieldContainer.codeField != null) { + for (CodeNumberField f : codeFieldContainer.codeField) { + f.animateFocusedProgress(0); + } } code = codeFieldContainer.getCode(); @@ -5699,8 +6289,10 @@ public void onNextPressed(String code) { } codeFieldContainer.isFocusSuppressed = true; - for (CodeNumberField f : codeFieldContainer.codeField) { - f.animateFocusedProgress(0); + if (codeFieldContainer.codeField != null) { + for (CodeNumberField f : codeFieldContainer.codeField) { + f.animateFocusedProgress(0); + } } String finalCode = code; @@ -5785,12 +6377,14 @@ public void onNextPressed(String code) { } if (!isWrongCode) { - for (int a = 0; a < codeFieldContainer.codeField.length; a++) { - codeFieldContainer.codeField[a].setText(""); + if (codeFieldContainer.codeField != null) { + for (int a = 0; a < codeFieldContainer.codeField.length; a++) { + codeFieldContainer.codeField[a].setText(""); + } + codeFieldContainer.codeField[0].requestFocus(); } codeFieldContainer.isFocusSuppressed = false; - codeFieldContainer.codeField[0].requestFocus(); } } } @@ -5819,7 +6413,8 @@ private void animateSuccess(Runnable callback) { private void shakeWrongCode() { try { - codeFieldContainer.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + if (!NekoConfig.disableVibration.Bool()) + codeFieldContainer.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); } catch (Exception ignore) {} for (int a = 0; a < codeFieldContainer.codeField.length; a++) { @@ -5828,6 +6423,7 @@ private void shakeWrongCode() { } if (errorViewSwitcher.getCurrentView() == resendFrameLayout) { errorViewSwitcher.showNext(); + AndroidUtilities.updateViewVisibilityAnimated(cantAccessEmailFrameLayout, false, 1f, true); } codeFieldContainer.codeField[0].requestFocus(); AndroidUtilities.shakeViewSpring(codeFieldContainer, 10f, () -> { @@ -5848,16 +6444,18 @@ private void shakeWrongCode() { @Override public void onShow() { super.onShow(); + if (resetRequestPending) { + resetRequestPending = false; + return; + } AndroidUtilities.runOnUIThread(() -> { inboxImageView.getAnimatedDrawable().setCurrentFrame(0, false); inboxImageView.playAnimation(); - if (codeFieldContainer != null) { + if (codeFieldContainer != null && codeFieldContainer.codeField != null) { codeFieldContainer.setText(""); codeFieldContainer.codeField[0].requestFocus(); } - - AndroidUtilities.runOnUIThread(resendCodeTimeout, 60000); }, SHOW_DELAY); } @@ -6062,7 +6660,8 @@ private void onPasscodeError(boolean clear) { return; } try { - codeFieldContainer.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + if (!NekoConfig.disableVibration.Bool()) + codeFieldContainer.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); } catch (Exception ignore) {} if (clear) { for (CodeNumberField f : codeFieldContainer.codeField) { @@ -6399,7 +6998,8 @@ private void onPasscodeError(boolean clear, int num) { return; } try { - codeField[num].performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + if (!NekoConfig.disableVibration.Bool()) + codeField[num].performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); } catch (Exception ignore) {} AndroidUtilities.shakeView(codeField[num]); } @@ -6636,7 +7236,7 @@ public LoginActivityRegisterView(Context context) { setOrientation(VERTICAL); - imageUpdater = new ImageUpdater(false); + imageUpdater = new ImageUpdater(false, ImageUpdater.FOR_TYPE_USER, false); imageUpdater.setOpenWithFrontfaceCamera(true); imageUpdater.setSearchAvailable(false); imageUpdater.setUploadAfterSelect(false); @@ -6937,7 +7537,7 @@ private void buildEditTextLayout(boolean small) { } @Override - public void didUploadPhoto(final TLRPC.InputFile photo, final TLRPC.InputFile video, double videoStartTimestamp, String videoPath, final TLRPC.PhotoSize bigSize, final TLRPC.PhotoSize smallSize, boolean isVideo) { + public void didUploadPhoto(final TLRPC.InputFile photo, final TLRPC.InputFile video, double videoStartTimestamp, String videoPath, final TLRPC.PhotoSize bigSize, final TLRPC.PhotoSize smallSize, boolean isVideo, TLRPC.VideoSize emojiMarkup) { AndroidUtilities.runOnUIThread(() -> { avatar = smallSize.location; avatarBig = bigSize.location; @@ -7116,6 +7716,8 @@ public void onNextPressed(String code) { } else if (error.text.contains("PHONE_CODE_EMPTY") || error.text.contains("PHONE_CODE_INVALID")) { needShowAlert(LocaleController.getString(R.string.RestorePasswordNoEmailTitle), LocaleController.getString("InvalidCode", R.string.InvalidCode)); } else if (error.text.contains("PHONE_CODE_EXPIRED")) { + onBackPressed(true); + setPage(VIEW_PHONE_INPUT, true, null, true); needShowAlert(LocaleController.getString(R.string.RestorePasswordNoEmailTitle), LocaleController.getString("CodeExpired", R.string.CodeExpired)); } else if (error.text.contains("FIRSTNAME_INVALID")) { needShowAlert(LocaleController.getString(R.string.RestorePasswordNoEmailTitle), LocaleController.getString("InvalidFirstName", R.string.InvalidFirstName)); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/LogoutActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/LogoutActivity.java index 17e9ce3098..72da07dea9 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/LogoutActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/LogoutActivity.java @@ -155,7 +155,7 @@ public static AlertDialog makeLogOutDialog(Context context, int currentAccount) AlertDialog alertDialog = builder.create(); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } return alertDialog; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/LongPressListenerWithMovingGesture.java b/TMessagesProj/src/main/java/org/telegram/ui/LongPressListenerWithMovingGesture.java index f9686d8a74..6d450a8d61 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/LongPressListenerWithMovingGesture.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/LongPressListenerWithMovingGesture.java @@ -5,6 +5,7 @@ import android.view.MotionEvent; import android.view.View; +import org.telegram.messenger.AndroidUtilities; import org.telegram.ui.ActionBar.ActionBarPopupWindow; import org.telegram.ui.Components.GestureDetector2; @@ -15,7 +16,7 @@ public class LongPressListenerWithMovingGesture implements View.OnTouchListener ActionBarPopupWindow submenu; Rect rect = new Rect(); boolean subItemClicked; - + boolean tapConfirmedOrCanceled; GestureDetector2 gestureDetector2 = new GestureDetector2(new GestureDetector2.OnGestureListener() { @Override public boolean onDown(MotionEvent e) { @@ -54,8 +55,13 @@ public void onShowPress(MotionEvent e) { @Override public boolean onSingleTapUp(MotionEvent e) { + if (tapConfirmedOrCanceled) { + return false; + } if (view != null) { view.callOnClick(); + tapConfirmedOrCanceled = true; + return true; } return false; } @@ -85,10 +91,16 @@ public LongPressListenerWithMovingGesture() { gestureDetector2.setIsLongpressEnabled(true); } - + float startFromX; + float startFromY; @Override public boolean onTouch(View v, MotionEvent event) { view = v; + if (event.getAction() == MotionEvent.ACTION_DOWN) { + startFromX = event.getX(); + startFromY = event.getY(); + tapConfirmedOrCanceled = false; + } gestureDetector2.onTouchEvent(event); if (submenu != null && !subItemClicked && event.getAction() == MotionEvent.ACTION_MOVE) { view.getLocationOnScreen(location); @@ -124,9 +136,18 @@ public boolean onTouch(View v, MotionEvent event) { } } } - if (event.getAction() == MotionEvent.ACTION_UP && !subItemClicked && selectedMenuView != null) { - selectedMenuView.callOnClick(); - subItemClicked = true; + if (event.getAction() == MotionEvent.ACTION_MOVE && Math.abs(event.getX() - startFromX) > AndroidUtilities.touchSlop * 2 || Math.abs(event.getY() - startFromY) > AndroidUtilities.touchSlop * 2) { + tapConfirmedOrCanceled = true; + view.setPressed(false); + view.setSelected(false); + } + if (event.getAction() == MotionEvent.ACTION_UP && !subItemClicked && !tapConfirmedOrCanceled) { + if (selectedMenuView != null) { + selectedMenuView.callOnClick(); + subItemClicked = true; + } else if (submenu == null && view != null) { + view.callOnClick(); + } } return true; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ManageLinksActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ManageLinksActivity.java index 64c292a792..4d290c5fc6 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ManageLinksActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ManageLinksActivity.java @@ -1125,7 +1125,7 @@ public LinkCell(@NonNull Context context) { AlertDialog alert = builder.create(); builder.show(); if (redLastItem) { - alert.setItemColor(items.size() - 1, Theme.getColor(Theme.key_dialogTextRed2), Theme.getColor(Theme.key_dialogRedIcon)); + alert.setItemColor(items.size() - 1, Theme.getColor(Theme.key_dialogTextRed), Theme.getColor(Theme.key_dialogRedIcon)); } }); optionsView.setBackground(Theme.createSelectorDrawable(Theme.getColor(Theme.key_listSelector), 1)); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/MessageSeenView.java b/TMessagesProj/src/main/java/org/telegram/ui/MessageSeenView.java index d45ab4a821..5e33ed9312 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/MessageSeenView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/MessageSeenView.java @@ -1,11 +1,20 @@ package org.telegram.ui; import android.content.Context; +import android.graphics.Canvas; +import android.graphics.ColorFilter; +import android.graphics.PixelFormat; import android.graphics.PorterDuff; import android.graphics.PorterDuffColorFilter; import android.graphics.Rect; import android.graphics.drawable.Drawable; +import android.text.SpannableStringBuilder; +import android.text.Spanned; import android.text.TextUtils; +import android.text.style.DynamicDrawableSpan; +import android.text.style.ImageSpan; +import android.util.Log; +import android.util.Pair; import android.util.TypedValue; import android.view.Gravity; import android.view.View; @@ -16,6 +25,7 @@ import android.widget.TextView; import androidx.annotation.NonNull; +import androidx.annotation.Nullable; import androidx.core.content.ContextCompat; import androidx.recyclerview.widget.LinearLayoutManager; import androidx.recyclerview.widget.RecyclerView; @@ -27,11 +37,15 @@ import org.telegram.messenger.LocaleController; import org.telegram.messenger.MessageObject; import org.telegram.messenger.MessagesController; +import org.telegram.messenger.NotificationCenter; import org.telegram.messenger.R; +import org.telegram.messenger.UserConfig; +import org.telegram.messenger.UserObject; import org.telegram.tgnet.ConnectionsManager; import org.telegram.tgnet.TLRPC; import org.telegram.ui.ActionBar.SimpleTextView; import org.telegram.ui.ActionBar.Theme; +import org.telegram.ui.Components.AnimatedEmojiDrawable; import org.telegram.ui.Components.AvatarDrawable; import org.telegram.ui.Components.AvatarsDrawable; import org.telegram.ui.Components.AvatarsImageView; @@ -39,6 +53,7 @@ import org.telegram.ui.Components.FlickerLoadingView; import org.telegram.ui.Components.HideViewAfterAnimation; import org.telegram.ui.Components.LayoutHelper; +import org.telegram.ui.Components.MessageSeenCheckDrawable; import org.telegram.ui.Components.RecyclerListView; import java.util.ArrayList; @@ -47,6 +62,7 @@ public class MessageSeenView extends FrameLayout { ArrayList peerIds = new ArrayList<>(); + ArrayList dates = new ArrayList<>(); public ArrayList users = new ArrayList<>(); AvatarsImageView avatarsImageView; SimpleTextView titleView; @@ -102,16 +118,29 @@ public MessageSeenView(@NonNull Context context, int currentAccount, MessageObje TLRPC.Vector vector = (TLRPC.Vector) response; ArrayList unknownUsers = new ArrayList<>(); HashMap usersLocal = new HashMap<>(); - ArrayList allPeers = new ArrayList<>(); + ArrayList> allPeers = new ArrayList<>(); for (int i = 0, n = vector.objects.size(); i < n; i++) { Object object = vector.objects.get(i); - if (object instanceof Long) { + if (object instanceof TLRPC.TL_readParticipantDate) { + int date = ((TLRPC.TL_readParticipantDate) object).date; + Long peerId = ((TLRPC.TL_readParticipantDate) object).user_id; + if (finalFromId == peerId) { + continue; + } + TLRPC.User user = MessagesController.getInstance(currentAccount).getUser(peerId); + allPeers.add(new Pair<>(peerId, date)); + if (true || user == null) { + unknownUsers.add(peerId); + } else { + usersLocal.put(peerId, user); + } + } else if (object instanceof Long) { Long peerId = (Long) object; if (finalFromId == peerId) { continue; } TLRPC.User user = MessagesController.getInstance(currentAccount).getUser(peerId); - allPeers.add(peerId); + allPeers.add(new Pair<>(peerId, 0)); if (true || user == null) { unknownUsers.add(peerId); } else { @@ -122,8 +151,10 @@ public MessageSeenView(@NonNull Context context, int currentAccount, MessageObje if (unknownUsers.isEmpty()) { for (int i = 0; i < allPeers.size(); i++) { - peerIds.add(allPeers.get(i)); - users.add(usersLocal.get(allPeers.get(i))); + Pair pair = allPeers.get(i); + peerIds.add(pair.first); + dates.add(pair.second); + users.add(usersLocal.get(pair.first)); } updateView(); } else { @@ -142,8 +173,10 @@ public MessageSeenView(@NonNull Context context, int currentAccount, MessageObje usersLocal.put(user.id, user); } for (int i = 0; i < allPeers.size(); i++) { - peerIds.add(allPeers.get(i)); - this.users.add(usersLocal.get(allPeers.get(i))); + Pair pair = allPeers.get(i); + peerIds.add(pair.first); + dates.add(pair.second); + this.users.add(usersLocal.get(pair.first)); } } updateView(); @@ -160,8 +193,10 @@ public MessageSeenView(@NonNull Context context, int currentAccount, MessageObje usersLocal.put(user.id, user); } for (int i = 0; i < allPeers.size(); i++) { - peerIds.add(allPeers.get(i)); - this.users.add(usersLocal.get(allPeers.get(i))); + Pair pair = allPeers.get(i); + peerIds.add(pair.first); + dates.add(pair.second); + this.users.add(usersLocal.get(pair.first)); } } updateView(); @@ -242,14 +277,23 @@ private void updateView() { titleView.animate().alpha(1f).setDuration(220).start(); avatarsImageView.animate().alpha(1f).setDuration(220).start(); flickerLoadingView.animate().alpha(0f).setDuration(220).setListener(new HideViewAfterAnimation(flickerLoadingView)).start(); + + if (listView != null && listView.getAdapter() != null) { +// listView.getAdapter().notifyDataSetChanged(); + } } + private RecyclerListView listView; + public RecyclerListView createListView() { - RecyclerListView recyclerListView = new RecyclerListView(getContext()) { + if (listView != null) { + return listView; + } + listView = new RecyclerListView(getContext()) { @Override protected void onMeasure(int widthSpec, int heightSpec) { int height = MeasureSpec.getSize(heightSpec); - int listViewTotalHeight = AndroidUtilities.dp(8) + AndroidUtilities.dp(44) * getAdapter().getItemCount(); + int listViewTotalHeight = AndroidUtilities.dp(4) + AndroidUtilities.dp(50) * getAdapter().getItemCount(); if (listViewTotalHeight > height) { listViewTotalHeight = height; @@ -258,20 +302,17 @@ protected void onMeasure(int widthSpec, int heightSpec) { super.onMeasure(widthSpec, MeasureSpec.makeMeasureSpec(listViewTotalHeight, MeasureSpec.EXACTLY)); } }; - recyclerListView.setLayoutManager(new LinearLayoutManager(getContext())); - recyclerListView.addItemDecoration(new RecyclerView.ItemDecoration() { + listView.setLayoutManager(new LinearLayoutManager(getContext())); + listView.addItemDecoration(new RecyclerView.ItemDecoration() { @Override public void getItemOffsets(@NonNull Rect outRect, @NonNull View view, @NonNull RecyclerView parent, @NonNull RecyclerView.State state) { int p = parent.getChildAdapterPosition(view); - if (p == 0) { - outRect.top = AndroidUtilities.dp(4); - } if (p == users.size() - 1) { outRect.bottom = AndroidUtilities.dp(4); } } }); - recyclerListView.setAdapter(new RecyclerListView.SelectionAdapter() { + listView.setAdapter(new RecyclerListView.SelectionAdapter() { @Override public boolean isEnabled(RecyclerView.ViewHolder holder) { @@ -281,14 +322,14 @@ public boolean isEnabled(RecyclerView.ViewHolder holder) { @Override public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { UserCell userCell = new UserCell(parent.getContext()); - userCell.setLayoutParams(new RecyclerView.LayoutParams(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT)); + userCell.setLayoutParams(new RecyclerView.LayoutParams(LayoutHelper.MATCH_PARENT, AndroidUtilities.dp(50))); return new RecyclerListView.Holder(userCell); } @Override public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { UserCell cell = (UserCell) holder.itemView; - cell.setUser(users.get(position)); + cell.setUser(users.get(position), dates.get(position)); } @Override @@ -297,48 +338,121 @@ public int getItemCount() { } }); - return recyclerListView; + return listView; } - private static class UserCell extends FrameLayout { + private static class UserCell extends FrameLayout implements NotificationCenter.NotificationCenterDelegate { + + private int currentAccount = UserConfig.selectedAccount; BackupImageView avatarImageView; - TextView nameView; + SimpleTextView nameView; + TextView readView; AvatarDrawable avatarDrawable = new AvatarDrawable(); + AnimatedEmojiDrawable.SwapAnimatedEmojiDrawable rightDrawable; + + TLRPC.User user; + + private static MessageSeenCheckDrawable seenDrawable = new MessageSeenCheckDrawable(R.drawable.msg_mini_checks, Theme.key_windowBackgroundWhiteGrayText); public UserCell(Context context) { super(context); avatarImageView = new BackupImageView(context); - addView(avatarImageView, LayoutHelper.createFrame(32, 32, Gravity.CENTER_VERTICAL, 13, 0, 0, 0)); - avatarImageView.setRoundRadius(AndroidUtilities.dp(16)); - nameView = new TextView(context); - nameView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 16); - nameView.setLines(1); - nameView.setEllipsize(TextUtils.TruncateAt.END); - nameView.setImportantForAccessibility(View.IMPORTANT_FOR_ACCESSIBILITY_NO); - addView(nameView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.CENTER_VERTICAL, 59, 0, 13, 0)); + avatarImageView.setRoundRadius(AndroidUtilities.dp(18)); + addView(avatarImageView, LayoutHelper.createFrame(34, 34, Gravity.CENTER_VERTICAL, 10f, 0, 0, 0)); + nameView = new SimpleTextView(context); + nameView.setTextSize(16); + nameView.setEllipsizeByGradient(true); + nameView.setImportantForAccessibility(View.IMPORTANT_FOR_ACCESSIBILITY_NO); nameView.setTextColor(Theme.getColor(Theme.key_actionBarDefaultSubmenuItem)); + addView(nameView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.TOP, 55, 6.33f, 8, 0)); + + rightDrawable = new AnimatedEmojiDrawable.SwapAnimatedEmojiDrawable(this, AndroidUtilities.dp(18)); + nameView.setDrawablePadding(AndroidUtilities.dp(3)); + nameView.setRightDrawable(rightDrawable); + + readView = new TextView(context); + readView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 13); + readView.setLines(1); + readView.setEllipsize(TextUtils.TruncateAt.END); + readView.setImportantForAccessibility(View.IMPORTANT_FOR_ACCESSIBILITY_NO); + readView.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteGrayText)); + addView(readView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.TOP, 55, 20, 13, 0)); } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { - super.onMeasure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(44), View.MeasureSpec.EXACTLY)); + super.onMeasure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(50), View.MeasureSpec.EXACTLY)); } - public void setUser(TLRPC.User user) { + public void setUser(TLRPC.User user, int date) { + this.user = user; + updateStatus(false); + if (user != null) { avatarDrawable.setInfo(user); ImageLocation imageLocation = ImageLocation.getForUser(user, ImageLocation.TYPE_SMALL); avatarImageView.setImage(imageLocation, "50_50", avatarDrawable, user); nameView.setText(ContactsController.formatName(user.first_name, user.last_name)); } + + if (date <= 0) { + readView.setVisibility(GONE); + nameView.setTranslationY(AndroidUtilities.dp(9)); + } else { + readView.setText(TextUtils.concat(seenDrawable.getSpanned(getContext()), LocaleController.formatSeenDate(date))); + readView.setVisibility(VISIBLE); + nameView.setTranslationY(0); + } } @Override public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) { super.onInitializeAccessibilityNodeInfo(info); - info.setText(LocaleController.formatString("AccDescrPersonHasSeen", R.string.AccDescrPersonHasSeen, nameView.getText())); + String text = LocaleController.formatString("AccDescrPersonHasSeen", R.string.AccDescrPersonHasSeen, nameView.getText()); + if (readView.getVisibility() == VISIBLE) { + text += " " + readView.getText(); + } + info.setText(text); + } + + @Override + public void didReceivedNotification(int id, int account, Object... args) { + if (id == NotificationCenter.userEmojiStatusUpdated) { + TLRPC.User user = (TLRPC.User) args[0]; + if (this.user != null && user != null && this.user.id == user.id) { + this.user = user; + updateStatus(true); + } + } + } + + private void updateStatus(boolean animated) { + Long documentId = UserObject.getEmojiStatusDocumentId(user); + if (documentId == null) { + rightDrawable.set((Drawable) null, animated); + } else { + rightDrawable.set(documentId, animated); + } + } + + @Override + protected void onAttachedToWindow() { + super.onAttachedToWindow(); + if (rightDrawable != null) { + rightDrawable.attach(); + } + NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.userEmojiStatusUpdated); + } + + @Override + protected void onDetachedFromWindow() { + super.onDetachedFromWindow(); + if (rightDrawable != null) { + rightDrawable.detach(); + } + NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.userEmojiStatusUpdated); } } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/NewContactBottomSheet.java b/TMessagesProj/src/main/java/org/telegram/ui/NewContactBottomSheet.java index 6bcf59d8b3..5a2b889ff4 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/NewContactBottomSheet.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/NewContactBottomSheet.java @@ -40,6 +40,7 @@ import org.telegram.messenger.MessagesController; import org.telegram.messenger.NotificationCenter; import org.telegram.messenger.R; +import org.telegram.messenger.UserConfig; import org.telegram.tgnet.ConnectionsManager; import org.telegram.tgnet.TLRPC; import org.telegram.ui.ActionBar.BaseFragment; @@ -104,6 +105,7 @@ public class NewContactBottomSheet extends BottomSheet implements AdapterView.On public NewContactBottomSheet(BaseFragment parentFragment, Context context) { super(context, true); + waitingKeyboard = true; smoothKeyboardAnimationEnabled = true; classGuid = ConnectionsManager.generateClassGuid(); this.parentFragment = parentFragment; @@ -628,7 +630,6 @@ public void afterTextChanged(Editable s) { plusTextView.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteBlackText)); codeDividerView.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhiteInputField)); - return fragmentView; } @@ -699,7 +700,7 @@ public void show() { firstNameField.getEditText().requestFocus(); AndroidUtilities.runOnUIThread(() -> { AndroidUtilities.showKeyboard(firstNameField.getEditText()); - }, 200); + }, 50); } private void showEditDoneProgress(boolean show, boolean animated) { @@ -740,6 +741,27 @@ public static String getPhoneNumber(Context context, TLRPC.User user, String num public void setInitialPhoneNumber(String value, boolean withCoutryCode) { initialPhoneNumber = value; initialPhoneNumberWithCountryCode = withCoutryCode; + + if (!TextUtils.isEmpty(initialPhoneNumber)) { + TLRPC.User user = UserConfig.getInstance(currentAccount).getCurrentUser(); + if (initialPhoneNumber.startsWith("+")) { + codeField.setText(initialPhoneNumber.substring(1)); + } else if (initialPhoneNumberWithCountryCode || user == null || TextUtils.isEmpty(user.phone)) { + codeField.setText(initialPhoneNumber); + } else { + String phone = user.phone; + for (int a = 4; a >= 1; a--) { + String sub = phone.substring(0, a); + List country = codesMap.get(sub); + if (country != null && country.size() > 0) { + codeField.setText(country.get(0).code); + break; + } + } + phoneField.setText(initialPhoneNumber); + } + initialPhoneNumber = null; + } } public void setInitialName(String firstName, String lastName) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/NotificationsCustomSettingsActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/NotificationsCustomSettingsActivity.java index f78031ed19..07ed8f7706 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/NotificationsCustomSettingsActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/NotificationsCustomSettingsActivity.java @@ -397,14 +397,14 @@ private void setDefault() { args.putBoolean("onlySelect", true); args.putBoolean("checkCanWrite", false); if (currentType == NotificationsController.TYPE_GROUP) { - args.putInt("dialogsType", 6); + args.putInt("dialogsType", DialogsActivity.DIALOGS_TYPE_GROUPS_ONLY); } else if (currentType == NotificationsController.TYPE_CHANNEL) { - args.putInt("dialogsType", 5); + args.putInt("dialogsType", DialogsActivity.DIALOGS_TYPE_CHANNELS_ONLY); } else { - args.putInt("dialogsType", 4); + args.putInt("dialogsType", DialogsActivity.DIALOGS_TYPE_USERS_ONLY); } DialogsActivity activity = new DialogsActivity(args); - activity.setDelegate((fragment, dids, message, param) -> { + activity.setDelegate((fragment, dids, message, param, topicsFragment) -> { Bundle args2 = new Bundle(); args2.putLong("dialog_id", dids.get(0).dialogId); args2.putBoolean("exception", true); @@ -414,6 +414,7 @@ private void setDefault() { updateRows(true); }); presentFragment(profileNotificationsActivity, true); + return true; }); presentFragment(activity); } else if (position == deleteAllRow) { @@ -448,7 +449,7 @@ private void setDefault() { showDialog(alertDialog); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } else if (position == alertRow) { enabled = getNotificationsController().isGlobalNotificationsEnabled(currentType); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/NotificationsSettingsActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/NotificationsSettingsActivity.java index a794c0de59..bf9029b2d6 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/NotificationsSettingsActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/NotificationsSettingsActivity.java @@ -472,7 +472,7 @@ public boolean supportsPredictiveItemAnimations() { showDialog(alertDialog); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } else if (position == inappSoundRow) { SharedPreferences preferences = MessagesController.getNotificationsSettings(currentAccount); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/NotificationsSoundActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/NotificationsSoundActivity.java index 81710096a0..ca27ef4f74 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/NotificationsSoundActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/NotificationsSoundActivity.java @@ -70,6 +70,8 @@ import java.util.ArrayList; import java.util.HashMap; +import tw.nekomimi.nekogram.NekoConfig; + public class NotificationsSoundActivity extends BaseFragment implements ChatAttachAlertDocumentLayout.DocumentSelectActivityDelegate, NotificationCenter.NotificationCenterDelegate { ArrayList serverTones = new ArrayList<>(); @@ -202,7 +204,7 @@ public void onItemClick(int id) { AlertDialog dialog = builder.show(); TextView button = (TextView) dialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2, resourcesProvider)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed, resourcesProvider)); } } else if (id == shareId) { if (selectedTones.size() == 1) { @@ -401,7 +403,8 @@ private void deleteSelectedMessages() { if (view instanceof ToneCell) { ToneCell cell = (ToneCell) view; checkSelection(cell.tone); - cell.performHapticFeedback(HapticFeedbackConstants.LONG_PRESS); + if (!NekoConfig.disableVibration.Bool()) + cell.performHapticFeedback(HapticFeedbackConstants.LONG_PRESS); } return false; }); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/PasscodeActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/PasscodeActivity.java index 3af587a9c6..585ae795d2 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/PasscodeActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/PasscodeActivity.java @@ -848,8 +848,10 @@ public void onConfigurationChanged(Configuration newConfig) { if (lockImageView != null) { lockImageView.setVisibility(!AndroidUtilities.isSmallScreen() && AndroidUtilities.displaySize.x < AndroidUtilities.displaySize.y ? View.VISIBLE : View.GONE); } - for (CodeNumberField f : codeFieldContainer.codeField) { - f.setShowSoftInputOnFocusCompat(!isCustomKeyboardVisible()); + if (codeFieldContainer != null && codeFieldContainer.codeField != null) { + for (CodeNumberField f : codeFieldContainer.codeField) { + f.setShowSoftInputOnFocusCompat(!isCustomKeyboardVisible()); + } } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/PassportActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/PassportActivity.java index 7e00760331..bd55c43ce2 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/PassportActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/PassportActivity.java @@ -608,7 +608,7 @@ public void updateButtonState(boolean animated) { DownloadController.getInstance(currentAccount).addLoadingFileObserver(currentSecureDocument.path, this); buttonState = 1; Float progress = ImageLoader.getInstance().getFileProgress(currentSecureDocument.path); - radialProgress.setBackground(Theme.chat_photoStatesDrawables[5][0], true, animated); + radialProgress.setBackground(getResources().getDrawable(R.drawable.circle), true, animated); radialProgress.setProgress(progress != null ? progress : 0, false); invalidate(); } @@ -622,7 +622,7 @@ public void updateButtonState(boolean animated) { DownloadController.getInstance(currentAccount).addLoadingFileObserver(fileName, this); buttonState = 1; Float progress = ImageLoader.getInstance().getFileProgress(fileName); - radialProgress.setBackground(Theme.chat_photoStatesDrawables[5][0], true, animated); + radialProgress.setBackground(getResources().getDrawable(R.drawable.circle), true, animated); radialProgress.setProgress(progress != null ? progress : 0, animated); invalidate(); } @@ -2471,7 +2471,7 @@ private void createManageInterface(Context context) { showDialog(alertDialog); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } }); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/PhotoAlbumPickerActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/PhotoAlbumPickerActivity.java index cb9f8c232f..a368954062 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/PhotoAlbumPickerActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/PhotoAlbumPickerActivity.java @@ -217,7 +217,7 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { setMeasuredDimension(widthSize, heightSize); - int keyboardSize = SharedConfig.smoothKeyboard ? 0 : measureKeyboardHeight(); + int keyboardSize = 0; if (keyboardSize <= AndroidUtilities.dp(20)) { if (!AndroidUtilities.isInMultiwindow) { heightSize -= commentTextView.getEmojiPadding(); @@ -261,7 +261,7 @@ protected void onLayout(boolean changed, int l, int t, int r, int b) { } final int count = getChildCount(); - int keyboardSize = SharedConfig.smoothKeyboard ? 0 : measureKeyboardHeight(); + int keyboardSize = 0; int paddingBottom = keyboardSize <= AndroidUtilities.dp(20) && !AndroidUtilities.isInMultiwindow && !AndroidUtilities.isTablet() ? commentTextView.getEmojiPadding() : 0; setBottomClip(paddingBottom); @@ -705,7 +705,7 @@ public void didReceivedNotification(int id, int account, Object... args) { loading = false; } } else if (id == NotificationCenter.closeChats) { - removeSelfFromStack(); + removeSelfFromStack(true); } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/PhotoPickerActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/PhotoPickerActivity.java index bc1af35f03..8eba66ca4c 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/PhotoPickerActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/PhotoPickerActivity.java @@ -650,7 +650,7 @@ private void onMeasureInternal(int widthMeasureSpec, int heightMeasureSpec) { setMeasuredDimension(widthSize, heightSize); int kbHeight = measureKeyboardHeight(); - int keyboardSize = SharedConfig.smoothKeyboard ? 0 : kbHeight; + int keyboardSize = 0; if (keyboardSize <= AndroidUtilities.dp(20)) { if (!AndroidUtilities.isInMultiwindow && commentTextView != null && frameLayout2.getParent() == this) { heightSize -= commentTextView.getEmojiPadding(); @@ -663,7 +663,7 @@ private void onMeasureInternal(int widthMeasureSpec, int heightMeasureSpec) { ignoreLayout = false; } - if (SharedConfig.smoothKeyboard && commentTextView != null && commentTextView.isPopupShowing()) { + if (commentTextView != null && commentTextView.isPopupShowing()) { fragmentView.setTranslationY(0); listView.setTranslationY(0); emptyView.setTranslationY(0); @@ -704,7 +704,7 @@ protected void onLayout(boolean changed, int l, int t, int r, int b) { } final int count = getChildCount(); - int keyboardSize = SharedConfig.smoothKeyboard ? 0 : measureKeyboardHeight(); + int keyboardSize = 0; int paddingBottom = commentTextView != null && frameLayout2.getParent() == this && keyboardSize <= AndroidUtilities.dp(20) && !AndroidUtilities.isInMultiwindow && !AndroidUtilities.isTablet() ? commentTextView.getEmojiPadding() : 0; setBottomClip(paddingBottom); @@ -832,7 +832,7 @@ public int getSpanSize(int position) { showDialog(dialog); TextView button = (TextView) dialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } return; } @@ -1373,7 +1373,7 @@ public void onResume() { initialSearchString = null; processSearch(searchItem.getSearchField()); } - getParentActivity().getWindow().setSoftInputMode(SharedConfig.smoothKeyboard ? WindowManager.LayoutParams.SOFT_INPUT_ADJUST_PAN : WindowManager.LayoutParams.SOFT_INPUT_ADJUST_RESIZE); + getParentActivity().getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_ADJUST_PAN); } } @@ -1385,7 +1385,7 @@ public void onPause() { @Override public void didReceivedNotification(int id, int account, Object... args) { if (id == NotificationCenter.closeChats) { - removeSelfFromStack(); + removeSelfFromStack(true); } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/PhotoPickerSearchActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/PhotoPickerSearchActivity.java index 16590290ec..44ebe57707 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/PhotoPickerSearchActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/PhotoPickerSearchActivity.java @@ -21,10 +21,12 @@ import android.widget.FrameLayout; import android.widget.TextView; +import androidx.recyclerview.widget.LinearLayoutManager; +import androidx.recyclerview.widget.RecyclerView; + import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.LocaleController; import org.telegram.messenger.R; -import org.telegram.messenger.SharedConfig; import org.telegram.ui.ActionBar.ActionBar; import org.telegram.ui.ActionBar.ActionBarMenu; import org.telegram.ui.ActionBar.ActionBarMenuItem; @@ -41,9 +43,6 @@ import java.util.ArrayList; import java.util.HashMap; -import androidx.recyclerview.widget.LinearLayoutManager; -import androidx.recyclerview.widget.RecyclerView; - public class PhotoPickerSearchActivity extends BaseFragment { private static class ViewPage extends FrameLayout { @@ -241,7 +240,7 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { setMeasuredDimension(widthSize, heightSize); measureChildWithMargins(actionBar, widthMeasureSpec, 0, heightMeasureSpec, 0); - int keyboardSize = SharedConfig.smoothKeyboard ? 0 : measureKeyboardHeight(); + int keyboardSize = 0; if (keyboardSize <= AndroidUtilities.dp(20)) { if (!AndroidUtilities.isInMultiwindow) { heightSize -= commentTextView.getEmojiPadding(); @@ -291,7 +290,7 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { protected void onLayout(boolean changed, int l, int t, int r, int b) { final int count = getChildCount(); - int keyboardSize = SharedConfig.smoothKeyboard ? 0 : measureKeyboardHeight(); + int keyboardSize = 0; int paddingBottom = keyboardSize <= AndroidUtilities.dp(20) && !AndroidUtilities.isInMultiwindow && !AndroidUtilities.isTablet() ? commentTextView.getEmojiPadding() : 0; setBottomClip(paddingBottom); @@ -675,7 +674,7 @@ public void onResume() { super.onResume(); if (searchItem != null) { searchItem.openSearch(true); - getParentActivity().getWindow().setSoftInputMode(SharedConfig.smoothKeyboard ? WindowManager.LayoutParams.SOFT_INPUT_ADJUST_PAN : WindowManager.LayoutParams.SOFT_INPUT_ADJUST_RESIZE); + getParentActivity().getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_ADJUST_PAN); } if (imagesSearch != null) { imagesSearch.onResume(); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/PhotoViewer.java b/TMessagesProj/src/main/java/org/telegram/ui/PhotoViewer.java index 4ccc4e539b..d26fc8f503 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/PhotoViewer.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/PhotoViewer.java @@ -145,6 +145,7 @@ import org.telegram.messenger.ImageLoader; import org.telegram.messenger.ImageLocation; import org.telegram.messenger.ImageReceiver; +import org.telegram.messenger.LiteMode; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MediaController; import org.telegram.messenger.MediaDataController; @@ -199,6 +200,7 @@ import org.telegram.ui.Components.FadingTextViewLayout; import org.telegram.ui.Components.FilterShaders; import org.telegram.ui.Components.FloatSeekBarAccessibilityDelegate; +import org.telegram.ui.Components.Forum.ForumUtilities; import org.telegram.ui.Components.GestureDetector2; import org.telegram.ui.Components.GroupedPhotosListView; import org.telegram.ui.Components.HideViewAfterAnimation; @@ -208,6 +210,7 @@ import org.telegram.ui.Components.LinkSpanDrawable; import org.telegram.ui.Components.MediaActivity; import org.telegram.ui.Components.NumberPicker; +import org.telegram.ui.Components.OptionsSpeedIconDrawable; import org.telegram.ui.Components.OtherDocumentPlaceholderDrawable; import org.telegram.ui.Components.Paint.Views.LPhotoPaintView; import org.telegram.ui.Components.PaintingOverlay; @@ -225,6 +228,7 @@ import org.telegram.ui.Components.RecyclerListView; import org.telegram.ui.Components.ShareAlert; import org.telegram.ui.Components.SizeNotifierFrameLayoutPhoto; +import org.telegram.ui.Components.SpeedIconDrawable; import org.telegram.ui.Components.StickersAlert; import org.telegram.ui.Components.TextViewSwitcher; import org.telegram.ui.Components.Tooltip; @@ -332,6 +336,7 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat private TextView docInfoTextView; private TextView doneButtonFullWidth; private ActionBarMenuItem menuItem; + private OptionsSpeedIconDrawable menuItemIcon; private ActionBarMenuSubItem allMediaItem; private ActionBarMenuItem sendNoQuoteItem; private ActionBarMenuSubItem speedItem; @@ -792,11 +797,10 @@ private void onLinkLongPress(URLSpan link, TextView widget, Runnable onDismiss) BottomSheet bottomSheet = builder.create(); bottomSheet.scrollNavBar = true; bottomSheet.show(); - if (!NekoConfig.disableVibration.Bool()) { - try { + try { + if (!NekoConfig.disableVibration.Bool()) containerView.performHapticFeedback(HapticFeedbackConstants.LONG_PRESS, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); - } catch (Exception ignore) {} - } + } catch (Exception ignore) {} bottomSheet.setItemColor(0,0xffffffff, 0xffffffff); bottomSheet.setItemColor(1,0xffffffff, 0xffffffff); bottomSheet.setBackgroundColor(0xff1C2229); @@ -1028,7 +1032,9 @@ public void run() { pipVideoOverlayAnimateFlag = true; changedTextureView.setVisibility(View.INVISIBLE); - aspectRatioFrameLayout.removeView(videoTextureView); + if (aspectRatioFrameLayout != null) { + aspectRatioFrameLayout.removeView(videoTextureView); + } } }; @@ -3395,13 +3401,14 @@ public void didReceivedNotification(int id, int account, Object... args) { imagesArrLocationsVideo.clear(); imagesArrMessages.clear(); avatarsArr.clear(); + for (int a = 0; a < photos.size(); a++) { TLRPC.Photo photo = photos.get(a); if (photo == null || photo instanceof TLRPC.TL_photoEmpty || photo.sizes == null) { continue; } TLRPC.PhotoSize sizeFull = FileLoader.getClosestPhotoSizeWithSize(photo.sizes, 640); - TLRPC.VideoSize videoSize = photo.video_sizes.isEmpty() ? null : photo.video_sizes.get(0); + TLRPC.VideoSize videoSize = photo.video_sizes.isEmpty() ? null : FileLoader.getClosestVideoSizeWithSize(photo.video_sizes, 1000); if (sizeFull != null) { if (setToImage == -1 && currentFileLocation != null) { for (int b = 0; b < photo.sizes.size(); b++) { @@ -3412,6 +3419,15 @@ public void didReceivedNotification(int id, int account, Object... args) { } } } + if (setToImage == -1 && currentFileLocation != null) { + for (int b = 0; b < photo.video_sizes.size(); b++) { + TLRPC.VideoSize size = photo.video_sizes.get(b); + if (size.location != null && size.location.local_id == currentFileLocation.location.local_id && size.location.volume_id == currentFileLocation.location.volume_id) { + setToImage = imagesArrLocations.size(); + break; + } + } + } if (photo.dc_id != 0) { sizeFull.location.dc_id = photo.dc_id; sizeFull.location.file_reference = photo.file_reference; @@ -4039,7 +4055,7 @@ protected void dispatchDraw(Canvas canvas) { super.dispatchDraw(canvas); if (parentChatActivity != null) { View undoView = parentChatActivity.getUndoView(); - if (undoView.getVisibility() == View.VISIBLE) { + if (undoView != null && undoView.getVisibility() == View.VISIBLE) { canvas.save(); View parent = (View) undoView.getParent(); canvas.clipRect(parent.getX(), parent.getY(), parent.getX() + parent.getWidth(), parent.getY() + parent.getHeight()); @@ -4455,12 +4471,13 @@ public void onItemClick(int id) { } else { Bundle args = new Bundle(); args.putBoolean("onlySelect", true); - args.putInt("dialogsType", 3); + args.putBoolean("canSelectTopics", true); + args.putInt("dialogsType", DialogsActivity.DIALOGS_TYPE_FORWARD); DialogsActivity fragment = new DialogsActivity(args); final ArrayList fmessages = new ArrayList<>(); fmessages.add(currentMessageObject); final ChatActivity parentChatActivityFinal = parentChatActivity; - fragment.setDelegate((fragment1, dids, message, param) -> { + fragment.setDelegate((fragment1, dids, message, param, topicsFragment) -> { if (dids.size() > 1 || dids.get(0).dialogId == UserConfig.getInstance(currentAccount).getClientUserId() || message != null) { for (int a = 0; a < dids.size(); a++) { long did = dids.get(a).dialogId; @@ -4476,14 +4493,17 @@ public void onItemClick(int id) { } fragment1.finishFragment(); if (parentChatActivityFinal != null) { - if (dids.size() == 1) { - parentChatActivityFinal.getUndoView().showWithAction(dids.get(0).dialogId, UndoView.ACTION_FWD_MESSAGES, fmessages.size()); + UndoView undoView = parentChatActivityFinal.getUndoView(); + if (undoView != null) {if (dids.size() == 1) { + undoView.showWithAction(dids.get(0).dialogId, UndoView.ACTION_FWD_MESSAGES, fmessages.size()); } else { - parentChatActivityFinal.getUndoView().showWithAction(0, UndoView.ACTION_FWD_MESSAGES, fmessages.size(), dids.size(), null, null); + undoView.showWithAction(0, UndoView.ACTION_FWD_MESSAGES, fmessages.size(), dids.size(), null, null); + } } } } else { - long did = dids.get(0).dialogId; + MessagesStorage.TopicKey topicKey = dids.get(0); + long did = topicKey.dialogId; Bundle args1 = new Bundle(); if (id == gallery_menu_send_noquote) { args1.putBoolean("forward_noquote", true); @@ -4496,14 +4516,17 @@ public void onItemClick(int id) { } else { args1.putLong("chat_id", -did); } - NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.closeChats); ChatActivity chatActivity = new ChatActivity(args1); + if (topicKey.topicId != 0) { + ForumUtilities.applyTopic(chatActivity, topicKey); + } if (((LaunchActivity) parentActivity).presentFragment(chatActivity, true, false)) { chatActivity.showFieldPanelForForward(true, fmessages); } else { fragment1.finishFragment(); } } + return true; }); ((LaunchActivity) parentActivity).presentFragment(fragment, false, true); closePhoto(false, false); @@ -4636,7 +4659,7 @@ public void onItemClick(int id) { if (avatarsDialogId > 0) { MessagesController.getInstance(currentAccount).deleteUserPhoto(null); } else { - MessagesController.getInstance(currentAccount).changeChatAvatar(-avatarsDialogId, null, null, null, 0, null, null, null, null); + MessagesController.getInstance(currentAccount).changeChatAvatar(-avatarsDialogId, null, null, null, null, 0, null, null, null, null); } closePhoto(false, false); } else { @@ -4697,7 +4720,7 @@ public void onItemClick(int id) { showAlertDialog(builder); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(getThemedColor(Theme.key_dialogTextRed2)); + button.setTextColor(getThemedColor(Theme.key_dialogTextRed)); } } else if (id == gallery_menu_share || id == gallery_menu_share2) { onSharePressed(); @@ -4850,7 +4873,7 @@ public void dismiss() { inputChatPhoto.id.id = photo.id; inputChatPhoto.id.access_hash = photo.access_hash; inputChatPhoto.id.file_reference = photo.file_reference; - MessagesController.getInstance(currentAccount).changeChatAvatar(-avatarsDialogId, inputChatPhoto, null, null, 0, null, null, null, null); + MessagesController.getInstance(currentAccount).changeChatAvatar(-avatarsDialogId, inputChatPhoto, null, null, null, 0, null, null, null, null); chat.photo.dc_id = photo.dc_id; chat.photo.photo_small = smallSize.location; chat.photo.photo_big = bigSize.location; @@ -4996,14 +5019,14 @@ public boolean canOpenMenu() { shareItem.setContentDescription(LocaleController.getString("ShareFile", R.string.ShareFile)); shareItem.setIconColor(0xfffafafa); - menuItem = menu.addItem(0, R.drawable.ic_ab_other); + menuItem = menu.addItem(0, menuItemIcon = new OptionsSpeedIconDrawable()); + menuItem.setOnMenuDismiss(byClick -> checkProgress(0, false, false)); menuItem.getPopupLayout().swipeBackGravityRight = true; chooseSpeedLayout = new ChooseSpeedLayout(activityContext, menuItem.getPopupLayout().getSwipeBack(), new ChooseSpeedLayout.Callback() { @Override - public void onSpeedSelected(float speed) { - menuItem.toggleSubMenu(); + public void onSpeedSelected(float speed, boolean isFinal, boolean closeMenu) { if (speed != currentVideoSpeed) { currentVideoSpeed = speed; if (currentMessageObject != null) { @@ -5020,7 +5043,10 @@ public void onSpeedSelected(float speed) { if (photoViewerWebView != null) { photoViewerWebView.setPlaybackSpeed(currentVideoSpeed); } - setMenuItemIcon(); + } + setMenuItemIcon(true, isFinal); + if (closeMenu) { + menuItem.toggleSubMenu(); } } }); @@ -5050,7 +5076,7 @@ public void onSpeedSelected(float speed) { menuItem.addSubItem(gallery_menu_delete, R.drawable.msg_delete, LocaleController.getString("Delete", R.string.Delete)).setColors(0xfffafafa, 0xfffafafa); menuItem.addSubItem(gallery_menu_cancel_loading, R.drawable.msg_cancel, LocaleController.getString("StopDownload", R.string.StopDownload)).setColors(0xfffafafa, 0xfffafafa); menuItem.redrawPopup(0xf9222222); - setMenuItemIcon(); + setMenuItemIcon(false, true); menuItem.setPopupItemsSelectorColor(0x0fffffff); menuItem.setSubMenuDelegate(new ActionBarMenuItem.ActionBarSubMenuItemDelegate() { @@ -5685,7 +5711,8 @@ public void setAlpha(float alpha) { if (captionEditText.getCaptionLimitOffset() < 0) { AndroidUtilities.shakeView(captionLimitView); try { - captionLimitView.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + if (!NekoConfig.disableVibration.Bool()) + captionLimitView.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); } catch (Exception ignored) {} if (!MessagesController.getInstance(currentAccount).premiumLocked && MessagesController.getInstance(currentAccount).captionLengthLimitPremium > captionEditText.getCodePointCount()) { @@ -5785,9 +5812,9 @@ public void setAlpha(float alpha) { ActionBarMenuSubItem cell = new ActionBarMenuSubItem(parentActivity, a == 0, a == 3, resourcesProvider); if (a == 0) { if (UserObject.isUserSelf(user)) { - cell.setTextAndIcon(LocaleController.getString("SetReminder", R.string.SetReminder), R.drawable.msg_schedule); + cell.setTextAndIcon(LocaleController.getString("SetReminder", R.string.SetReminder), R.drawable.baseline_timer_24); } else { - cell.setTextAndIcon(LocaleController.getString("ScheduleMessage", R.string.ScheduleMessage), R.drawable.msg_schedule); + cell.setTextAndIcon(LocaleController.getString("ScheduleMessage", R.string.ScheduleMessage), R.drawable.baseline_timer_24); } } else if (a == 1) { cell.setTextAndIcon(LocaleController.getString("SendWithoutSound", R.string.SendWithoutSound), R.drawable.input_notify_off); @@ -5796,7 +5823,11 @@ public void setAlpha(float alpha) { } else if (a == 3) { cell.setTextAndIcon(LocaleController.getString("SendAsNewPhoto", R.string.SendAsNewPhoto), R.drawable.msg_send); } else if (a == 4) { - cell.setTextAndIcon(LocaleController.getString("SendWithoutCompression", R.string.SendWithoutCompression), R.drawable.msg_sendfile); + if (placeProvider != null && placeProvider.getSelectedCount() > 1) { + cell.setTextAndIcon(LocaleController.getString(R.string.SendAsFiles), R.drawable.msg_sendfile); + } else { + cell.setTextAndIcon(LocaleController.getString(R.string.SendAsFile), R.drawable.msg_sendfile); + } } else if (a == 5) { cell.setTextAndIcon(LocaleController.getString("Translate", R.string.Translate), R.drawable.ic_translate); } else if (a == 6) { @@ -5874,6 +5905,7 @@ public void setAlpha(float alpha) { view.getLocationInWindow(location); sendPopupWindow.showAtLocation(view, Gravity.LEFT | Gravity.TOP, location[0] + view.getMeasuredWidth() - sendPopupLayout.getMeasuredWidth() + AndroidUtilities.dp(14), location[1] - sendPopupLayout.getMeasuredHeight() - AndroidUtilities.dp(18)); if (!NekoConfig.disableVibration.Bool()) { + if (!NekoConfig.disableVibration.Bool()) view.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); } @@ -7015,6 +7047,17 @@ public void onDismiss(DialogInterface dialog) { ((MediaController.MediaEditState) entry).editedInfo = videoEditedInfo; } } + if (parentChatActivity != null && parentChatActivity.getCurrentChat() != null) { + boolean isVideo = (isCurrentVideo || videoEditedInfo != null); + if (isVideo && !ChatObject.canSendVideo(parentChatActivity.getCurrentChat())) { + BulletinFactory.of(containerView, resourcesProvider).createErrorBulletin(LocaleController.getString(R.string.GlobalAttachVideoRestricted)).show(); + return; + } + if (!isVideo && !ChatObject.canSendPhoto(parentChatActivity.getCurrentChat())) { + BulletinFactory.of(containerView, resourcesProvider).createErrorBulletin(LocaleController.getString(R.string.GlobalAttachPhotoRestricted)).show(); + return; + } + } doneButtonPressed = true; if (videoEditedInfo != null) { long sizeToCheck = (long) (videoEditedInfo.estimatedSize * 0.9f); @@ -7083,28 +7126,28 @@ public void dismissInternal() { alert.show(); } - private void setMenuItemIcon() { + private void setMenuItemIcon(boolean animated, boolean isFinal) { if (speedItem.getVisibility() != View.VISIBLE) { - menuItem.setIcon(R.drawable.ic_ab_other); + menuItemIcon.setSpeed(null, animated); return; } - if (Math.abs(currentVideoSpeed - 0.25f) < 0.001f) { - menuItem.setIcon(R.drawable.msg_more_0_2); - speedItem.setSubtext(LocaleController.getString("SpeedVerySlow", R.string.SpeedVerySlow)); - } else if (Math.abs(currentVideoSpeed - 0.5f) < 0.001f) { - menuItem.setIcon(R.drawable.msg_more_0_5); - speedItem.setSubtext(LocaleController.getString("SpeedSlow", R.string.SpeedSlow)); - } else if (Math.abs(currentVideoSpeed - 1.0f) < 0.001f) { - menuItem.setIcon(R.drawable.ic_ab_other); - speedItem.setSubtext(LocaleController.getString("SpeedNormal", R.string.SpeedNormal)); - } else if (Math.abs(currentVideoSpeed - 1.5f) < 0.001f) { - menuItem.setIcon(R.drawable.msg_more_1_5); - speedItem.setSubtext(LocaleController.getString("SpeedFast", R.string.SpeedFast)); - } else { - menuItem.setIcon(R.drawable.msg_more_2); - speedItem.setSubtext(LocaleController.getString("SpeedVeryFast", R.string.SpeedVeryFast)); + menuItemIcon.setSpeed(Math.abs(currentVideoSpeed - 1f) < 0.001f ? null : currentVideoSpeed, animated); + if (isFinal) { + if (Math.abs(currentVideoSpeed - 0.2f) < 0.05f) { + speedItem.setSubtext(LocaleController.getString("VideoSpeedVerySlow", R.string.VideoSpeedVerySlow)); + } else if (Math.abs(currentVideoSpeed - 0.5f) < 0.05f) { + speedItem.setSubtext(LocaleController.getString("VideoSpeedSlow", R.string.VideoSpeedSlow)); + } else if (Math.abs(currentVideoSpeed - 1.0f) < 0.05f) { + speedItem.setSubtext(LocaleController.getString("VideoSpeedNormal", R.string.VideoSpeedNormal)); + } else if (Math.abs(currentVideoSpeed - 1.5f) < 0.05f) { + speedItem.setSubtext(LocaleController.getString("VideoSpeedFast", R.string.VideoSpeedFast)); + } else if (Math.abs(currentVideoSpeed - 2f) < 0.05f) { + speedItem.setSubtext(LocaleController.getString("VideoSpeedVeryFast", R.string.VideoSpeedVeryFast)); + } else { + speedItem.setSubtext(LocaleController.formatString("VideoSpeedCustom", R.string.VideoSpeedCustom, SpeedIconDrawable.formatNumber(currentVideoSpeed) + "x")); + } } - chooseSpeedLayout.update(currentVideoSpeed); + chooseSpeedLayout.update(currentVideoSpeed, isFinal); } public float getCurrentVideoSpeed() { @@ -7170,7 +7213,9 @@ private TextView createCaptionTextView() { @Override public boolean onTouchEvent(MotionEvent event) { - + if (getLayout() == null) { + return false; + } boolean linkResult = false; if (event.getAction() == MotionEvent.ACTION_DOWN || pressedLink != null && event.getAction() == MotionEvent.ACTION_UP) { int x = (int) (event.getX() - getPaddingLeft()); @@ -7429,14 +7474,14 @@ public void getOutline(View view, Outline outline) { videoTextureView.setTranslationX(fromX * (1f - xValue) + (toX2) * xValue); videoTextureView.setTranslationY(fromY * (1f - yValue) + (toY2) * yValue); videoTextureView.invalidateOutline(); - } - if (firstFrameView != null) { - firstFrameView.setTranslationX(videoTextureView.getTranslationX()); - firstFrameView.setTranslationY(videoTextureView.getTranslationY()); - firstFrameView.setScaleX(videoTextureView.getScaleX()); - firstFrameView.setScaleY(videoTextureView.getScaleY()); - firstFrameView.invalidateOutline(); + if (firstFrameView != null) { + firstFrameView.setTranslationX(videoTextureView.getTranslationX()); + firstFrameView.setTranslationY(videoTextureView.getTranslationY()); + firstFrameView.setScaleX(videoTextureView.getScaleX()); + firstFrameView.setScaleY(videoTextureView.getScaleY()); + firstFrameView.invalidateOutline(); + } } }); @@ -7485,7 +7530,7 @@ public void onAnimationEnd(Animator animation) { switchToInlineRunnable.run(); dismissInternal(); } - if (parentChatActivity != null) { + if (parentChatActivity != null && parentChatActivity.getFragmentView() != null) { parentChatActivity.getFragmentView().invalidate(); } } @@ -8202,12 +8247,12 @@ private void updateVideoPlayerTime() { videoPlayerTotalTime[1] = (int) (total % 60); } String current, total; - if (videoPlayerCurrentTime[0] > 60) { + if (videoPlayerCurrentTime[0] >= 60) { current = String.format(Locale.ROOT, "%02d:%02d:%02d", videoPlayerCurrentTime[0] / 60, videoPlayerCurrentTime[0] % 60, videoPlayerCurrentTime[1]); } else { current = String.format(Locale.ROOT, "%02d:%02d", videoPlayerCurrentTime[0], videoPlayerCurrentTime[1]); } - if (videoPlayerTotalTime[0] > 60) { + if (videoPlayerTotalTime[0] >= 60) { total = String.format(Locale.ROOT, "%02d:%02d:%02d", videoPlayerTotalTime[0] / 60, videoPlayerTotalTime[0] % 60, videoPlayerTotalTime[1]); } else { total = String.format(Locale.ROOT, "%02d:%02d", videoPlayerTotalTime[0], videoPlayerTotalTime[1]); @@ -8886,7 +8931,7 @@ public void onAnimationEnd(Animator animation) { videoPosition = item; } } - if (currentMessageObject.forceSeekTo < 0 && videoPosition != null) { + if (currentMessageObject != null && currentMessageObject.forceSeekTo < 0 && videoPosition != null) { float pos = videoPosition.position; if (pos > 0 && pos < 0.999f) { currentMessageObject.forceSeekTo = pos; @@ -11271,7 +11316,9 @@ private void onPhotoShow(final MessageObject messageObject, final TLRPC.FileLoca avatarsArr.clear(); secureDocuments.clear(); imagesArrLocals.clear(); - actionBar.setElevation(0); + if (Build.VERSION.SDK_INT > Build.VERSION_CODES.LOLLIPOP) { + actionBar.setElevation(0); + } for (int a = 0; a < 2; a++) { imagesByIds[a].clear(); imagesByIdsTemp[a].clear(); @@ -11405,7 +11452,7 @@ private void onPhotoShow(final MessageObject messageObject, final TLRPC.FileLoca } else { currentVideoSpeed = 1.0f; } - setMenuItemIcon(); + setMenuItemIcon(false, true); boolean noforwards = messageObject != null && (MessagesController.getInstance(currentAccount).isChatNoForwards(messageObject.getChatId()) || (messageObject.messageOwner != null && messageObject.messageOwner.noforwards) || messageObject.hasRevealedExtendedMedia()); if (messageObject != null && messages == null) { @@ -11688,7 +11735,7 @@ private void onPhotoShow(final MessageObject messageObject, final TLRPC.FileLoca } private boolean canSendMediaToParentChatActivity() { - return parentChatActivity != null && (parentChatActivity.currentUser != null || parentChatActivity.currentChat != null && !ChatObject.isNotInChat(parentChatActivity.currentChat) && ChatObject.canSendMedia(parentChatActivity.currentChat)); + return parentChatActivity != null && (parentChatActivity.currentUser != null || parentChatActivity.currentChat != null && !ChatObject.isNotInChat(parentChatActivity.currentChat) && (ChatObject.canSendPhoto(parentChatActivity.currentChat) || ChatObject.canSendVideo(parentChatActivity.currentChat))); } private void setDoubleTapEnabled(boolean value) { @@ -11735,6 +11782,7 @@ private void setIsAboutToSwitchToIndex(int index, boolean init, boolean animated return; } newMessageObject = imagesArr.get(switchingToIndex); + newMessageObject.updateTranslation(); isVideo = newMessageObject.isVideo(); boolean isInvoice = newMessageObject.isInvoice(); boolean noforwards = MessagesController.getInstance(currentAccount).isChatNoForwards(newMessageObject.getChatId()) || (newMessageObject.messageOwner != null && newMessageObject.messageOwner.noforwards) || newMessageObject.hasRevealedExtendedMedia(); @@ -11751,7 +11799,7 @@ private void setIsAboutToSwitchToIndex(int index, boolean init, boolean animated dateTextView.setText(""); } else { allowShare = !noforwardsOverride; - if (newMessageObject.isNewGif() && allowShare) { + if (newMessageObject.isNewGif() && allowShare && !DialogObject.isEncryptedDialog(newMessageObject.getDialogId())) { menuItem.showSubItem(gallery_menu_savegif); } if (newMessageObject.canDeleteMessage(parentChatActivity != null && parentChatActivity.isInScheduleMode(), null) && slideshowMessageId == 0) { @@ -12430,6 +12478,9 @@ private void setImageIndex(int index, boolean init, boolean animateCaption) { } MessageObject newMessageObject = imagesArr.get(currentIndex); sameImage = init && currentMessageObject != null && currentMessageObject.getId() == newMessageObject.getId(); + if (sameImage) { + newMessageObject.putInDownloadsStore = currentMessageObject.putInDownloadsStore; + } currentMessageObject = newMessageObject; isVideo = newMessageObject.isVideo(); if (sharedMediaType == MediaDataController.MEDIA_FILE) { @@ -12587,7 +12638,7 @@ private void setImageIndex(int index, boolean init, boolean animateCaption) { currentPageBlock = pageBlock; isVideo = pageBlocksAdapter.isVideo(currentIndex); } - setMenuItemIcon(); + setMenuItemIcon(false, true); if (currentPlaceObject != null) { if (animationInProgress == 0) { @@ -13115,7 +13166,7 @@ private void checkProgress(int a, boolean scroll, boolean animated) { photoProgressViews[a].setBackgroundState(PROGRESS_NONE, animated, true); } } - if (a == 0) { + if (a == 0 && !menuItem.isSubMenuShowing()) { if (!existsFinal) { if (!FileLoader.getInstance(currentAccount).isLoadingFile(currentFileNames[a])) { menuItem.hideSubItem(gallery_menu_cancel_loading); @@ -14294,7 +14345,10 @@ public void onAnimationEnd(Animator animation) { object.imageReceiver.setVisible(false, true); }; if (parentChatActivity != null && parentChatActivity.getFragmentView() != null) { - parentChatActivity.getUndoView().hide(false, 1); + UndoView undoView = parentChatActivity.getUndoView(); + if (undoView != null) { + undoView.hide(false, 1); + } parentChatActivity.getFragmentView().invalidate(); } return true; @@ -14313,7 +14367,10 @@ public void onAnimationEnd(Animator animation) { initCropView(); setCropBitmap(); if (parentChatActivity != null) { - parentChatActivity.getUndoView().hide(false, 1); + UndoView undoView = parentChatActivity.getUndoView(); + if (undoView != null) { + undoView.hide(false, 1); + } parentChatActivity.getFragmentView().invalidate(); } windowView.getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() { @@ -14422,7 +14479,7 @@ protected void drawBlackBackground(Canvas canvas, int w, int h) { @Override protected void processTouch(MotionEvent event) { - gestureDetector.onTouchEvent(event); + // gestureDetector.onTouchEvent(event); } }; photoViewerWebView.init(embedSeekTime, MessageObject.getMedia(currentMessageObject.messageOwner).webpage); @@ -14990,6 +15047,9 @@ private void onPhotoClosed(PlaceProviderObject object) { if (doneButtonPressed) { releasePlayer(true); } + if (currentMessageObject != null && !currentMessageObject.putInDownloadsStore) { + FileLoader.getInstance(currentAccount).cancelLoadFile(currentMessageObject.getDocument()); + } isVisible = false; cropInitied = false; disableShowCheck = true; @@ -15677,12 +15737,12 @@ private void switchToNextIndex(int add, boolean init) { } private boolean shouldMessageObjectAutoPlayed(MessageObject messageObject) { - return messageObject != null && messageObject.isVideo() && (messageObject.mediaExists || messageObject.attachPathExists || messageObject.canStreamVideo() && SharedConfig.streamMedia) && SharedConfig.autoplayVideo; + return messageObject != null && messageObject.isVideo() && (messageObject.mediaExists || messageObject.attachPathExists || messageObject.canStreamVideo() && SharedConfig.streamMedia) && SharedConfig.isAutoplayVideo(); } private boolean shouldIndexAutoPlayed(int index) { if (pageBlocksAdapter != null) { - if (pageBlocksAdapter.isVideo(index) && SharedConfig.autoplayVideo) { + if (pageBlocksAdapter.isVideo(index) && SharedConfig.isAutoplayVideo()) { final File mediaFile = pageBlocksAdapter.getFile(index); if (mediaFile != null && mediaFile.exists()) { return true; @@ -15724,20 +15784,27 @@ private void onDraw(Canvas canvas) { progress = progress + (1f - progress) * clippingImageProgress; } float scale = 1f + (1f - Utilities.clamp(progress, 1, 0)) * ZOOM_SCALE; + if (!LiteMode.isEnabled(LiteMode.FLAG_CHAT_SCALE)) { + scale = 1f; + } View view = parentFragment.getFragmentView(); - view.setPivotX(view.getWidth() / 2f); - view.setPivotY(view.getHeight() / 2f); - view.setScaleX(scale); - view.setScaleY(scale); - - if (parentAlert != null) { - view = parentAlert.getContainer(); + if (view.getScaleX() != scale || view.getScaleY() != scale) { view.setPivotX(view.getWidth() / 2f); view.setPivotY(view.getHeight() / 2f); view.setScaleX(scale); view.setScaleY(scale); } + if (parentAlert != null) { + view = parentAlert.getContainer(); + if (view.getScaleX() != scale || view.getScaleY() != scale) { + view.setPivotX(view.getWidth() / 2f); + view.setPivotY(view.getHeight() / 2f); + view.setScaleX(scale); + view.setScaleY(scale); + } + } + if (animationInProgress == 1 || animationInProgress == 2 || animationInProgress == 3 || pipAnimationInProgress) { containerView.invalidate(); } @@ -16932,6 +16999,9 @@ public boolean onDoubleTap(MotionEvent e) { if (animationStartTime != 0 || animationInProgress != 0) { return false; } + if (photoProgressViews[0] != null && photoProgressViews[0].isVisible() && photoProgressViews[0].backgroundState != PROGRESS_NONE && Math.sqrt(Math.pow(AndroidUtilities.displaySize.x / 2f - e.getX(), 2) + Math.pow((AndroidUtilities.displaySize.y + AndroidUtilities.statusBarHeight) / 2f - e.getY(), 2)) < AndroidUtilities.dp(40)) { + return false; // play button + } if (scale == 1.0f) { float atx = (e.getX() - getContainerViewWidth() / 2) - ((e.getX() - getContainerViewWidth() / 2) - translationX) * (3.0f / scale); float aty = (e.getY() - getContainerViewHeight() / 2) - ((e.getY() - getContainerViewHeight() / 2) - translationY) * (3.0f / scale); @@ -17770,7 +17840,7 @@ public void checkFromPlayer(VideoPlayer videoPlayer) { final int index = ++gettingFrameIndex; Utilities.globalQueue.postRunnable(() -> { try { - final AnimatedFileDrawable drawable = new AnimatedFileDrawable(new File(uri.getPath()), true, 0, null, null, null, 0, UserConfig.selectedAccount, false, AndroidUtilities.displaySize.x, AndroidUtilities.displaySize.y, null); + final AnimatedFileDrawable drawable = new AnimatedFileDrawable(new File(uri.getPath()), true, 0, 0, null, null, null, 0, UserConfig.selectedAccount, false, AndroidUtilities.displaySize.x, AndroidUtilities.displaySize.y, null); final Bitmap bitmap = drawable.getFrameAtTime(0); drawable.recycle(); AndroidUtilities.runOnUIThread(() -> { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/PinchToZoomHelper.java b/TMessagesProj/src/main/java/org/telegram/ui/PinchToZoomHelper.java index 6ea2e3d4c2..08b0a7c26e 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/PinchToZoomHelper.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/PinchToZoomHelper.java @@ -750,7 +750,9 @@ public boolean checkPinchToZoom(MotionEvent ev, View child, ImageReceiver image, invalidateViews(); } else if ((ev.getActionMasked() == MotionEvent.ACTION_UP || (ev.getActionMasked() == MotionEvent.ACTION_POINTER_UP && checkPointerIds(ev)) || ev.getActionMasked() == MotionEvent.ACTION_CANCEL) && isInPinchToZoomTouchMode) { isInPinchToZoomTouchMode = false; - child.getParent().requestDisallowInterceptTouchEvent(false); + if (child != null && child.getParent() != null) { + child.getParent().requestDisallowInterceptTouchEvent(false); + } finishZoom(); } return isInOverlayModeFor(child); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/PremiumPreviewFragment.java b/TMessagesProj/src/main/java/org/telegram/ui/PremiumPreviewFragment.java index 1be7ccaca2..b147666002 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/PremiumPreviewFragment.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/PremiumPreviewFragment.java @@ -2,6 +2,7 @@ import android.animation.Animator; import android.animation.AnimatorListenerAdapter; +import android.animation.ValueAnimator; import android.annotation.SuppressLint; import android.app.Activity; import android.app.Dialog; @@ -90,14 +91,17 @@ import java.util.Collections; import java.util.List; import java.util.Locale; +import java.util.Objects; public class PremiumPreviewFragment extends BaseFragment implements NotificationCenter.NotificationCenterDelegate { - private final static boolean IS_PREMIUM_TIERS_UNAVAILABLE = true; + public final static String TRANSACTION_PATTERN = "^(.*?)(?:\\.\\.\\d*|)$"; + private final static boolean IS_PREMIUM_TIERS_UNAVAILABLE = false; RecyclerListView listView; ArrayList premiumFeatures = new ArrayList<>(); ArrayList subscriptionTiers = new ArrayList<>(); int selectedTierIndex = 0; + SubscriptionTier currentSubscriptionTier; int rowCount; int paddingRow; @@ -139,6 +143,7 @@ public class PremiumPreviewFragment extends BaseFragment implements Notification public final static int PREMIUM_FEATURE_APPLICATION_ICONS = 10; public final static int PREMIUM_FEATURE_ANIMATED_EMOJI = 11; public final static int PREMIUM_FEATURE_EMOJI_STATUS = 12; + public final static int PREMIUM_FEATURE_TRANSLATIONS = 13; private int statusBarHeight; private int firstViewHeight; private boolean isDialogVisible; @@ -151,10 +156,12 @@ public class PremiumPreviewFragment extends BaseFragment implements Notification float totalProgress; private String source; + private boolean selectAnnualByDefault; + final Bitmap gradientTextureBitmap = Bitmap.createBitmap(100, 100, Bitmap.Config.ARGB_8888); final Canvas gradientCanvas = new Canvas(gradientTextureBitmap); - PremiumGradient.GradientTools gradientTools = new PremiumGradient.GradientTools(Theme.key_premiumGradientBackground1, Theme.key_premiumGradientBackground2, Theme.key_premiumGradientBackground3, Theme.key_premiumGradientBackground4); - PremiumGradient.GradientTools tiersGradientTools; + PremiumGradient.PremiumGradientTools gradientTools = new PremiumGradient.PremiumGradientTools(Theme.key_premiumGradientBackground1, Theme.key_premiumGradientBackground2, Theme.key_premiumGradientBackground3, Theme.key_premiumGradientBackground4); + PremiumGradient.PremiumGradientTools tiersGradientTools; private boolean forcePremium; float progressToFull; @@ -187,6 +194,8 @@ public static int serverStringToFeatureType(String s) { return PREMIUM_FEATURE_ANIMATED_EMOJI; case "emoji_status": return PREMIUM_FEATURE_EMOJI_STATUS; + case "translations": + return PREMIUM_FEATURE_TRANSLATIONS; } return -1; } @@ -219,6 +228,8 @@ public static String featureTypeToServerString(int type) { return "app_icons"; case PREMIUM_FEATURE_EMOJI_STATUS: return "emoji_status"; + case PREMIUM_FEATURE_TRANSLATIONS: + return "translations"; } return null; } @@ -234,7 +245,7 @@ public PremiumPreviewFragment(String source) { } { - tiersGradientTools = new PremiumGradient.GradientTools(Theme.key_premiumGradient1, Theme.key_premiumGradient2, null, null); + tiersGradientTools = new PremiumGradient.PremiumGradientTools(Theme.key_premiumGradient1, Theme.key_premiumGradient2, null, null); tiersGradientTools.exactly = true; tiersGradientTools.x1 = 0; tiersGradientTools.y1 = 0f; @@ -244,6 +255,11 @@ public PremiumPreviewFragment(String source) { tiersGradientTools.cy = 0; } + public PremiumPreviewFragment setSelectAnnualByDefault() { + this.selectAnnualByDefault = true; + return this; + } + @SuppressLint("NotifyDataSetChanged") @Override public View createView(Context context) { @@ -286,8 +302,8 @@ public View createView(Context context) { @Override public boolean dispatchTouchEvent(MotionEvent ev) { - float iconX = backgroundView.getX() + backgroundView.imageView.getX(); - float iconY = backgroundView.getY() + backgroundView.imageView.getY(); + float iconX = backgroundView.getX() + backgroundView.imageFrameLayout.getX(); + float iconY = backgroundView.getY() + backgroundView.imageFrameLayout.getY(); AndroidUtilities.rectTmp.set(iconX, iconY, iconX + backgroundView.imageView.getMeasuredWidth(), iconY + backgroundView.imageView.getMeasuredHeight()); if ((AndroidUtilities.rectTmp.contains(ev.getX(), ev.getY()) || iconInterceptedTouch) && !listView.scrollingByUser) { ev.offsetLocation(-iconX, -iconY); @@ -330,7 +346,7 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { } backgroundView.measure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(0, MeasureSpec.UNSPECIFIED)); particlesView.getLayoutParams().height = backgroundView.getMeasuredHeight(); - int buttonHeight = (getUserConfig().isPremium() || forcePremium ? 0 : AndroidUtilities.dp(68)); + int buttonHeight = (buttonContainer == null || buttonContainer.getVisibility() == View.GONE ? 0 : AndroidUtilities.dp(68)); layoutManager.setAdditionalHeight(buttonHeight + statusBarHeight - AndroidUtilities.dp(16)); layoutManager.setMinimumLastViewHeight(buttonHeight); super.onMeasure(widthMeasureSpec, heightMeasureSpec); @@ -415,13 +431,13 @@ protected void dispatchDraw(Canvas canvas) { backgroundView.tierListView.setAlpha(alpha); particlesView.setAlpha(1f - totalProgress); - particlesView.setTranslationY(-(particlesView.getMeasuredHeight() - backgroundView.imageView.getMeasuredWidth()) / 2f + backgroundView.getY() + backgroundView.imageView.getY()); + particlesView.setTranslationY(-(particlesView.getMeasuredHeight() - backgroundView.imageView.getMeasuredWidth()) / 2f + backgroundView.getY() + backgroundView.imageFrameLayout.getY()); float toX = AndroidUtilities.dp(72) - backgroundView.titleView.getLeft(); float f = totalProgress > 0.3f ? (totalProgress - 0.3f) / 0.7f : 0f; backgroundView.titleView.setTranslationX(toX * (1f - CubicBezierInterpolator.EASE_OUT_QUINT.getInterpolation(1 - f))); - backgroundView.imageView.mRenderer.gradientStartX = (backgroundView.getX() + backgroundView.imageView.getX() + getMeasuredWidth() * 0.1f * progress) / getMeasuredWidth(); - backgroundView.imageView.mRenderer.gradientStartY = (backgroundView.getY() + backgroundView.imageView.getY()) / getMeasuredHeight(); + backgroundView.imageView.mRenderer.gradientStartX = (backgroundView.getX() + backgroundView.imageFrameLayout.getX() + getMeasuredWidth() * 0.1f * progress) / getMeasuredWidth(); + backgroundView.imageView.mRenderer.gradientStartY = (backgroundView.getY() + backgroundView.imageFrameLayout.getY()) / getMeasuredHeight(); if (!isDialogVisible) { invalidate(); @@ -511,8 +527,10 @@ public boolean onInterceptTouchEvent(MotionEvent ev) { // if (subscriptionTiers.isEmpty()) { // return; // } -// showDialog(new PremiumFeatureBottomSheet(PremiumPreviewFragment.this, cell.data.type, false, subscriptionTiers.get(selectedTierIndex))); // } + + SubscriptionTier tier = selectedTierIndex < 0 || selectedTierIndex >= subscriptionTiers.size() ? null : subscriptionTiers.get(selectedTierIndex); + showDialog(new PremiumFeatureBottomSheet(PremiumPreviewFragment.this, cell.data.type, false, tier)); } }); contentView.addView(listView); @@ -583,6 +601,7 @@ public static void fillPremiumFeaturesList(ArrayList premium premiumFeatures.add(new PremiumFeatureData(PREMIUM_FEATURE_ANIMATED_AVATARS, R.drawable.msg_premium_avatar, LocaleController.getString("PremiumPreviewAnimatedProfiles", R.string.PremiumPreviewAnimatedProfiles), LocaleController.getString("PremiumPreviewAnimatedProfilesDescription", R.string.PremiumPreviewAnimatedProfilesDescription))); premiumFeatures.add(new PremiumFeatureData(PREMIUM_FEATURE_APPLICATION_ICONS, R.drawable.msg_premium_icons, LocaleController.getString("PremiumPreviewAppIcon", R.string.PremiumPreviewAppIcon), LocaleController.getString("PremiumPreviewAppIconDescription", R.string.PremiumPreviewAppIconDescription))); premiumFeatures.add(new PremiumFeatureData(PREMIUM_FEATURE_EMOJI_STATUS, R.drawable.msg_premium_status, LocaleController.getString("PremiumPreviewEmojiStatus", R.string.PremiumPreviewEmojiStatus), LocaleController.getString("PremiumPreviewEmojiStatusDescription", R.string.PremiumPreviewEmojiStatusDescription))); + premiumFeatures.add(new PremiumFeatureData(PREMIUM_FEATURE_TRANSLATIONS, R.drawable.msg_premium_translate, LocaleController.getString("PremiumPreviewTranslations", R.string.PremiumPreviewTranslations), LocaleController.getString("PremiumPreviewTranslationsDescription", R.string.PremiumPreviewTranslationsDescription))); if (messagesController.premiumFeaturesTypesToPosition.size() > 0) { for (int i = 0; i < premiumFeatures.size(); i++) { @@ -617,10 +636,23 @@ private void checkButtonDivider() { } public static void buyPremium(BaseFragment fragment, String source) { - buyPremium(fragment, null, source); + buyPremium(fragment, null, source, true); + } + + public static void buyPremium(BaseFragment fragment, String source, boolean forcePremium) { + buyPremium(fragment, null, source, forcePremium); } public static void buyPremium(BaseFragment fragment, SubscriptionTier tier, String source) { + buyPremium(fragment, tier, source, true); + } + + public static void buyPremium(BaseFragment fragment, SubscriptionTier tier, String source, boolean forcePremium) { + buyPremium(fragment, tier, source, forcePremium, null); + } + +// public static void buyPremium(BaseFragment fragment, SubscriptionTier tier, String source, boolean forcePremium, BillingFlowParams.SubscriptionUpdateParams updateParams) { + public static void buyPremium(BaseFragment fragment, SubscriptionTier tier, String source, boolean forcePremium, Object updateParams) { if (BuildVars.IS_BILLING_UNAVAILABLE) { fragment.showDialog(new PremiumNotAvailableBottomSheet(fragment)); return; @@ -660,11 +692,8 @@ private void updateRows() { featuresEndRow = rowCount; statusRow = rowCount++; lastPaddingRow = rowCount++; - if (getUserConfig().isPremium() || forcePremium) { - buttonContainer.setVisibility(View.GONE); - } else { - buttonContainer.setVisibility(View.VISIBLE); - } + + AndroidUtilities.updateViewVisibilityAnimated(buttonContainer, !getUserConfig().isPremium() || currentSubscriptionTier != null && currentSubscriptionTier.getMonths() < subscriptionTiers.get(selectedTierIndex).getMonths() && !forcePremium, 1f, false); int buttonHeight = buttonContainer.getVisibility() == View.VISIBLE ? AndroidUtilities.dp(64) : 0; layoutManager.setAdditionalHeight(buttonHeight + statusBarHeight - AndroidUtilities.dp(16)); @@ -708,6 +737,7 @@ public void onFragmentDestroy() { public void didReceivedNotification(int id, int account, Object... args) { if (id == NotificationCenter.billingProductDetailsUpdated || id == NotificationCenter.premiumPromoUpdated) { updateButtonText(false); + backgroundView.updatePremiumTiers(); } if (id == NotificationCenter.currentUserPremiumStatusChanged || id == NotificationCenter.premiumPromoUpdated) { backgroundView.updateText(); @@ -908,6 +938,7 @@ private class BackgroundView extends LinearLayout { TextView titleView; private final TextView subtitleView; + private final FrameLayout imageFrameLayout; private final GLIconTextureView imageView; private RecyclerListView tierListView; @@ -915,6 +946,8 @@ private class BackgroundView extends LinearLayout { public BackgroundView(Context context) { super(context); setOrientation(VERTICAL); + imageFrameLayout = new FrameLayout(context); + addView(imageFrameLayout, LayoutHelper.createLinear(190, 190, Gravity.CENTER_HORIZONTAL)); imageView = new GLIconTextureView(context, GLIconRenderer.FRAGMENT_STYLE) { @Override public void onLongPress() { @@ -937,7 +970,9 @@ public void onLongPress() { settingsView.animate().translationY(1).setDuration(300); } }; - addView(imageView, LayoutHelper.createLinear(190, 190, Gravity.CENTER_HORIZONTAL)); + imageFrameLayout.addView(imageView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); + imageFrameLayout.setClipChildren(false); + setClipChildren(false); titleView = new TextView(context); titleView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 22); @@ -1006,7 +1041,7 @@ public void onBindViewHolder(@NonNull RecyclerView.ViewHolder holder, int positi @Override public boolean isEnabled(RecyclerView.ViewHolder holder) { - return true; + return !subscriptionTiers.get(holder.getAdapterPosition()).subscriptionOption.current; } @Override @@ -1015,6 +1050,9 @@ public int getItemCount() { } }); tierListView.setOnItemClickListener((view, position) -> { + if (!view.isEnabled()) { + return; + } if (view instanceof PremiumTierCell) { PremiumTierCell tierCell = (PremiumTierCell) view; selectedTierIndex = subscriptionTiers.indexOf(tierCell.getTier()); @@ -1060,6 +1098,8 @@ public int getItemCount() { } } } + + AndroidUtilities.updateViewVisibilityAnimated(buttonContainer, !getUserConfig().isPremium() || currentSubscriptionTier != null && currentSubscriptionTier.getMonths() < subscriptionTiers.get(selectedTierIndex).getMonths() && !forcePremium); } }); Path path = new Path(); @@ -1083,8 +1123,8 @@ public int getItemCount() { }); addView(tierListView, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, 12, 16, 12, 0)); - updateText(); updatePremiumTiers(); + updateText(); } private void measureGradient(int w, int h) { @@ -1102,11 +1142,25 @@ private void measureGradient(int w, int h) { @SuppressLint("NotifyDataSetChanged") public void updatePremiumTiers() { subscriptionTiers.clear(); + selectedTierIndex = -1; + currentSubscriptionTier = null; long pricePerYearMax = 0; if (getMediaDataController().getPremiumPromo() != null) { for (TLRPC.TL_premiumSubscriptionOption option : getMediaDataController().getPremiumPromo().period_options) { + if (getUserConfig().isPremium() && !option.can_purchase_upgrade && !option.current) { + continue; + } + SubscriptionTier subscriptionTier = new SubscriptionTier(option); subscriptionTiers.add(subscriptionTier); + if (selectAnnualByDefault) { + if (option.months == 12) { + selectedTierIndex = subscriptionTiers.size() - 1; + } + } + if (option.current) { + currentSubscriptionTier = subscriptionTier; + } if (BuildVars.useInvoiceBilling()) { if (subscriptionTier.getPricePerYear() > pricePerYearMax) { pricePerYearMax = subscriptionTier.getPricePerYear(); @@ -1114,42 +1168,121 @@ public void updatePremiumTiers() { } } } + if (BuildVars.useInvoiceBilling() && getUserConfig().isPremium()) { + subscriptionTiers.clear(); + currentSubscriptionTier = null; + } else if (!BuildVars.useInvoiceBilling() && currentSubscriptionTier != null && !Objects.equals(BillingController.getInstance().getLastPremiumTransaction(), + currentSubscriptionTier.subscriptionOption != null ? currentSubscriptionTier.subscriptionOption.transaction != null ? + currentSubscriptionTier.subscriptionOption.transaction.replaceAll(TRANSACTION_PATTERN, "$1") : null : null) || + currentSubscriptionTier != null && currentSubscriptionTier.getMonths() == 12) { + subscriptionTiers.clear(); + currentSubscriptionTier = null; + } + if (BuildVars.useInvoiceBilling()) { for (SubscriptionTier tier : subscriptionTiers) { tier.setPricePerYearRegular(pricePerYearMax); } } // NekoX: remove Google billing - for (int i = 0; i < subscriptionTiers.size(); i++) { - SubscriptionTier tier = subscriptionTiers.get(i); - if (tier.getMonths() == 1) { - selectedTierIndex = i; - break; + if (selectedTierIndex == -1) { + for (int i = 0; i < subscriptionTiers.size(); i++) { + SubscriptionTier tier = subscriptionTiers.get(i); + if (tier.getMonths() == 12) { + selectedTierIndex = i; + break; + } + } + if (selectedTierIndex == -1) { + selectedTierIndex = 0; } } updateButtonText(false); tierListView.getAdapter().notifyDataSetChanged(); } + private boolean setTierListViewVisibility; + private boolean tierListViewVisible; public void updateText() { titleView.setText(LocaleController.getString(forcePremium ? R.string.TelegramPremiumSubscribedTitle : R.string.TelegramPremium)); subtitleView.setText(AndroidUtilities.replaceTags(LocaleController.getString(getUserConfig().isPremium() || forcePremium ? R.string.TelegramPremiumSubscribedSubtitle : R.string.TelegramPremiumSubtitle))); - tierListView.setVisibility(getUserConfig().isPremium() || forcePremium || BuildVars.IS_BILLING_UNAVAILABLE || IS_PREMIUM_TIERS_UNAVAILABLE ? GONE : VISIBLE); + boolean tierNotVisible = forcePremium || BuildVars.IS_BILLING_UNAVAILABLE || IS_PREMIUM_TIERS_UNAVAILABLE || subscriptionTiers.size() <= 1; + if (!setTierListViewVisibility || !tierNotVisible) { + tierListView.setVisibility(tierNotVisible ? GONE : VISIBLE); + setTierListViewVisibility = true; + } else if (tierListView.getVisibility() == VISIBLE && tierNotVisible && tierListViewVisible == tierNotVisible) { + View v = tierListView; + ValueAnimator animator = ValueAnimator.ofFloat(1, 0).setDuration(250); + animator.addUpdateListener(animation -> { + float val = (float) animation.getAnimatedValue(); + v.setAlpha(val); + v.setScaleX(val); + v.setScaleY(val); + + float f = animator.getAnimatedFraction(); + for (int i = 0; i < backgroundView.getChildCount(); i++) { + View ch = backgroundView.getChildAt(i); + if (ch != tierListView) { + float offset = 0; + if (ch == imageFrameLayout) { + offset -= AndroidUtilities.dp(15) * f; + } else { + offset += AndroidUtilities.dp(8) * f; + } + ch.setTranslationY(f * v.getMeasuredHeight() + offset); + } + } + }); + animator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + v.setVisibility(GONE); + + for (int i = 0; i < backgroundView.getChildCount(); i++) { + View ch = backgroundView.getChildAt(i); + if (ch != tierListView) { + ch.setTranslationY(0); + } + } + } + }); + animator.setInterpolator(CubicBezierInterpolator.DEFAULT); + animator.start(); + } + tierListViewVisible = !tierNotVisible; } } private void updateButtonText(boolean animated) { - if (premiumButtonView == null) { + if (premiumButtonView == null || getUserConfig().isPremium() && currentSubscriptionTier != null && subscriptionTiers.get(selectedTierIndex).getMonths() < currentSubscriptionTier.getMonths()) { return; } + if (LocaleController.isRTL) { + animated = false; + } if (BuildVars.IS_BILLING_UNAVAILABLE) { - premiumButtonView.setButton(getPremiumButtonText(currentAccount), v -> buyPremium(this, subscriptionTiers.get(selectedTierIndex), "settings"), animated); + premiumButtonView.setButton(getPremiumButtonText(currentAccount, subscriptionTiers.get(selectedTierIndex)), v -> buyPremium(this), animated); return; } - if (!subscriptionTiers.isEmpty()) { - premiumButtonView.setButton(getPremiumButtonText(currentAccount, subscriptionTiers.get(selectedTierIndex)), v -> buyPremium(this, subscriptionTiers.get(selectedTierIndex), "settings"), animated); - premiumButtonView.setFlickerDisabled(false); + if (!BuildVars.useInvoiceBilling() && (!BillingController.getInstance().isReady() || subscriptionTiers.isEmpty() || selectedTierIndex >= subscriptionTiers.size() || subscriptionTiers.get(selectedTierIndex).googlePlayProductDetails == null)) { + premiumButtonView.setButton(LocaleController.getString(R.string.Loading), v -> {}, animated); + premiumButtonView.setFlickerDisabled(true); + return; } +// if (!subscriptionTiers.isEmpty()) { +// premiumButtonView.setButton(getPremiumButtonText(currentAccount, subscriptionTiers.get(selectedTierIndex)), v -> { +// SubscriptionTier tier = subscriptionTiers.get(selectedTierIndex); +// BillingFlowParams.SubscriptionUpdateParams updateParams = null; +// if (currentSubscriptionTier != null && currentSubscriptionTier.subscriptionOption != null && currentSubscriptionTier.subscriptionOption.transaction != null) { +// updateParams = BillingFlowParams.SubscriptionUpdateParams.newBuilder() +// .setOldPurchaseToken(BillingController.getInstance().getLastPremiumToken()) +// .setReplaceProrationMode(BillingFlowParams.ProrationMode.IMMEDIATE_AND_CHARGE_FULL_PRICE) +// .build(); +// } +// buyPremium(this, tier, "settings", true, updateParams); +// }, animated); +// premiumButtonView.setFlickerDisabled(false); +// } } @Override @@ -1261,11 +1394,17 @@ public static void sentShowScreenStat(String source) { event.data = data; TLRPC.TL_jsonObjectValue sourceObj = new TLRPC.TL_jsonObjectValue(); - TLRPC.TL_jsonString jsonString = new TLRPC.TL_jsonString(); - jsonString.value = source; + TLRPC.JSONValue sourceVal; + if (source != null) { + TLRPC.TL_jsonString jsonString = new TLRPC.TL_jsonString(); + jsonString.value = source; + sourceVal = jsonString; + } else { + sourceVal = new TLRPC.TL_jsonNull(); + } sourceObj.key = "source"; - sourceObj.value = jsonString; + sourceObj.value = sourceVal; data.value.add(sourceObj); req.events.add(event); @@ -1309,13 +1448,17 @@ public static void sentShowFeaturePreview(int currentAccount, int type) { TLRPC.TL_jsonObject data = new TLRPC.TL_jsonObject(); event.data = data; TLRPC.TL_jsonObjectValue item = new TLRPC.TL_jsonObjectValue(); - TLRPC.TL_jsonString jsonString = new TLRPC.TL_jsonString(); - jsonString.value = PremiumPreviewFragment.featureTypeToServerString(type); + String value = PremiumPreviewFragment.featureTypeToServerString(type); + if (value != null) { + TLRPC.TL_jsonString jsonString = new TLRPC.TL_jsonString(); + jsonString.value = value; + item.value = jsonString; + } else { + item.value = new TLRPC.TL_jsonNull(); + } item.key = "item"; - item.value = jsonString; data.value.add(item); req.events.add(event); - event.data = data; ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/PrivacyControlActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/PrivacyControlActivity.java index a0440988a1..1b919d33fe 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/PrivacyControlActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/PrivacyControlActivity.java @@ -148,7 +148,7 @@ public class PrivacyControlActivity extends BaseFragment implements Notification private TLRPC.Photo avatarForRestPhoto; @Override - public void didUploadPhoto(TLRPC.InputFile photo, TLRPC.InputFile video, double videoStartTimestamp, String videoPath, TLRPC.PhotoSize bigSize, TLRPC.PhotoSize smallSize, boolean isVideo) { + public void didUploadPhoto(TLRPC.InputFile photo, TLRPC.InputFile video, double videoStartTimestamp, String videoPath, TLRPC.PhotoSize bigSize, TLRPC.PhotoSize smallSize, boolean isVideo, TLRPC.VideoSize emojiMarkup) { AndroidUtilities.runOnUIThread(() -> { avatarForRest = smallSize; avatarForRestPhoto = null; @@ -165,6 +165,10 @@ public void didUploadPhoto(TLRPC.InputFile photo, TLRPC.InputFile video, double req.video_start_ts = videoStartTimestamp; req.flags |= 4; } + if (emojiMarkup != null) { + req.video_emoji_markup = emojiMarkup; + req.flags |= 16; + } req.fallback = true; req.flags |= 8; @@ -389,7 +393,7 @@ public PrivacyControlActivity(int type, boolean load) { ContactsController.getInstance(currentAccount).loadPrivacySettings(); } if (rulesType == PRIVACY_RULES_TYPE_PHOTO) { - imageUpdater = new ImageUpdater(false); + imageUpdater = new ImageUpdater(false, ImageUpdater.FOR_TYPE_USER, true); imageUpdater.parentFragment = this; imageUpdater.setDelegate(this); TLRPC.UserFull userFull = getMessagesController().getUserFull(getUserConfig().clientUserId); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/PrivacySettingsActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/PrivacySettingsActivity.java index b569de5eef..b42cabfab1 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/PrivacySettingsActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/PrivacySettingsActivity.java @@ -88,7 +88,7 @@ public class PrivacySettingsActivity extends BaseFragment implements Notificatio private int sessionsRow; private int passcodeRow; private int autoDeleteMesages; - private int autoDeleteDetailRow; + private int sessionsDetailRow; private int newChatsHeaderRow; private int newChatsRow; private int newChatsSectionRow; @@ -335,12 +335,11 @@ public boolean supportsPredictiveItemAnimations() { presentFragment(new PrivacyControlActivity(ContactsController.PRIVACY_RULES_TYPE_FORWARDS)); } else if (position == voicesRow) { if (!getUserConfig().isPremium()) { - if (!NekoConfig.disableVibration.Bool()) { - try { - fragmentView.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); - } catch (Exception e) { - FileLog.e(e); - } + try { + if (!NekoConfig.disableVibration.Bool()) + fragmentView.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + } catch (Exception e) { + FileLog.e(e); } BulletinFactory.of(this).createRestrictVoiceMessagesPremiumBulletin().show(); return; @@ -372,11 +371,12 @@ public boolean supportsPredictiveItemAnimations() { layout.textView.setText(LocaleController.getString(R.string.YourLoginEmailChangedSuccess)); int duration = Bulletin.DURATION_SHORT; Bulletin.make(PrivacySettingsActivity.this, layout, duration).show(); - if (!NekoConfig.disableVibration.Bool()) { - try { - fragmentView.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); - } catch (Exception ignored) {} - } + + try { + if (!NekoConfig.disableVibration.Bool()) + fragmentView.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + } catch (Exception ignored) {} + loadPasswordSettings(); }))) .setNegativeButton(LocaleController.getString(R.string.Cancel), null) @@ -436,7 +436,7 @@ public boolean supportsPredictiveItemAnimations() { showDialog(alertDialog); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } else if (position == contactsSuggestRow) { final TextCheckCell cell = (TextCheckCell) view; @@ -460,7 +460,7 @@ public boolean supportsPredictiveItemAnimations() { showDialog(alertDialog); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } else { cell.setChecked(newSuggest = true); @@ -548,7 +548,7 @@ public boolean supportsPredictiveItemAnimations() { showDialog(alertDialog); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } }); builder.setNegativeButton(LocaleController.getString("Cancel", R.string.Cancel), null); @@ -558,7 +558,7 @@ public boolean supportsPredictiveItemAnimations() { showDialog(alertDialog); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } else if (position == passportRow) { presentFragment(new PassportActivity(PassportActivity.TYPE_PASSWORD, 0, "", "", null, null, null, null, null)); @@ -606,16 +606,15 @@ private void updateRows(boolean notify) { rowCount = 0; securitySectionRow = rowCount++; - blockedRow = rowCount++; - sessionsRow = rowCount++; - passcodeRow = rowCount++; passwordRow = rowCount++; - + autoDeleteMesages = rowCount++; + passcodeRow = rowCount++; if (currentPassword != null ? currentPassword.login_email_pattern != null : SharedConfig.hasEmailLogin) { emailLoginRow = rowCount++; } else { emailLoginRow = -1; } + blockedRow = rowCount++; if (currentPassword != null) { boolean hasEmail = currentPassword.login_email_pattern != null; if (SharedConfig.hasEmailLogin != hasEmail) { @@ -623,8 +622,8 @@ private void updateRows(boolean notify) { SharedConfig.saveConfig(); } } - autoDeleteMesages = rowCount++; - autoDeleteDetailRow = rowCount++; + sessionsRow = rowCount++; + sessionsDetailRow = rowCount++; privacySectionRow = rowCount++; phoneNumberRow = rowCount++; @@ -1026,21 +1025,18 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { break; case 1: TextInfoPrivacyCell privacyCell = (TextInfoPrivacyCell) holder.itemView; + boolean last = position == getItemCount() - 1; + privacyCell.setBackground(Theme.getThemedDrawable(mContext, last ? R.drawable.greydivider_bottom : R.drawable.greydivider, Theme.key_windowBackgroundGrayShadow)); if (position == deleteAccountDetailRow) { privacyCell.setText(LocaleController.getString("DeleteAccountHelp", R.string.DeleteAccountHelp)); - privacyCell.setBackgroundDrawable(Theme.getThemedDrawable(mContext, R.drawable.greydivider, Theme.key_windowBackgroundGrayShadow)); } else if (position == groupsDetailRow) { privacyCell.setText(LocaleController.getString("GroupsAndChannelsHelp", R.string.GroupsAndChannelsHelp)); - privacyCell.setBackgroundDrawable(Theme.getThemedDrawable(mContext, R.drawable.greydivider, Theme.key_windowBackgroundGrayShadow)); - } else if (position == autoDeleteDetailRow) { - privacyCell.setText(LocaleController.getString("AutoDeleteSettingsInfo", R.string.AutoDeleteSettingsInfo)); - privacyCell.setBackgroundDrawable(Theme.getThemedDrawable(mContext, R.drawable.greydivider, Theme.key_windowBackgroundGrayShadow)); + } else if (position == sessionsDetailRow) { + privacyCell.setText(LocaleController.getString("SessionsSettingsInfo", R.string.SessionsSettingsInfo)); } else if (position == secretDetailRow) { privacyCell.setText(LocaleController.getString("SecretWebPageInfo", R.string.SecretWebPageInfo)); - privacyCell.setBackgroundDrawable(Theme.getThemedDrawable(mContext, R.drawable.greydivider, Theme.key_windowBackgroundGrayShadow)); } else if (position == botsDetailRow) { privacyCell.setText(LocaleController.getString("PrivacyBotsInfo", R.string.PrivacyBotsInfo)); - privacyCell.setBackgroundDrawable(Theme.getThemedDrawable(mContext, R.drawable.greydivider, Theme.key_windowBackgroundGrayShadow)); } else if (position == contactsDetailRow) { /*if (newSync) { privacyCell.setText(LocaleController.getString("SyncContactsInfoOn", R.string.SyncContactsInfoOn)); @@ -1048,10 +1044,8 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { privacyCell.setText(LocaleController.getString("SyncContactsInfoOff", R.string.SyncContactsInfoOff)); }*/ privacyCell.setText(LocaleController.getString("SuggestContactsInfo", R.string.SuggestContactsInfo)); - privacyCell.setBackgroundDrawable(Theme.getThemedDrawable(mContext, R.drawable.greydivider, Theme.key_windowBackgroundGrayShadow)); } else if (position == newChatsSectionRow) { privacyCell.setText(LocaleController.getString("ArchiveAndMuteInfo", R.string.ArchiveAndMuteInfo)); - privacyCell.setBackgroundDrawable(Theme.getThemedDrawable(mContext, R.drawable.greydivider, Theme.key_windowBackgroundGrayShadow)); } break; case 2: @@ -1091,6 +1085,7 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { showLoading = false; loadingLen = 16; value = null; + textCell2.setPrioritizeTitleOverValue(false); if (position == autoDeleteMesages) { int ttl = getUserConfig().getGlobalTTl(); if (ttl == -1) { @@ -1100,23 +1095,39 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { } else { value = LocaleController.getString("PasswordOff", R.string.PasswordOff); } - textCell2.setTextAndValueAndIcon(LocaleController.getString("AutoDeleteMessages", R.string.AutoDeleteMessages), value, R.drawable.msg_autodelete, false); + textCell2.setTextAndValueAndIcon(LocaleController.getString("AutoDeleteMessages", R.string.AutoDeleteMessages), value, true, R.drawable.msg2_autodelete, true); } else if (position == sessionsRow) { String count = ""; - if (sessionsActivityPreload.getSessionsCount() == 0) { if (getMessagesController().lastKnownSessionsCount == 0) { showLoading = true; } else { - count = Integer.toString(getMessagesController().lastKnownSessionsCount); + count = String.format(LocaleController.getInstance().getCurrentLocale(), "%d", getMessagesController().lastKnownSessionsCount); } } else { - count = Integer.toString(sessionsActivityPreload.getSessionsCount()); + count = String.format(LocaleController.getInstance().getCurrentLocale(), "%d", sessionsActivityPreload.getSessionsCount()); } getMessagesController().lastKnownSessionsCount = sessionsActivityPreload.getSessionsCount(); - textCell2.setTextAndValueAndIcon(LocaleController.getString("SessionsTitle", R.string.SessionsTitle), count, R.drawable.msg_devices, true); + textCell2.setTextAndValueAndIcon(LocaleController.getString("SessionsTitle", R.string.SessionsTitle), count, true, R.drawable.msg2_devices, false); } else if (position == emailLoginRow) { - textCell2.setTextAndValueAndIcon(LocaleController.getString(R.string.EmailLogin), "", R.drawable.msg_email, true); + CharSequence val = ""; + if (currentPassword == null) { + showLoading = true; + } else { + SpannableStringBuilder spannable = SpannableStringBuilder.valueOf(currentPassword.login_email_pattern); + int startIndex = currentPassword.login_email_pattern.indexOf('*'); + int endIndex = currentPassword.login_email_pattern.lastIndexOf('*'); + if (startIndex != endIndex && startIndex != -1 && endIndex != -1) { + TextStyleSpan.TextStyleRun run = new TextStyleSpan.TextStyleRun(); + run.flags |= TextStyleSpan.FLAG_STYLE_SPOILER; + run.start = startIndex; + run.end = endIndex + 1; + spannable.setSpan(new TextStyleSpan(run), startIndex, endIndex + 1, 0); + } + val = spannable; + } + textCell2.setPrioritizeTitleOverValue(true); + textCell2.setTextAndSpoilersValueAndIcon(LocaleController.getString(R.string.EmailLogin), val, R.drawable.msg2_email, true); } else if (position == passwordRow) { value = ""; if (currentPassword == null) { @@ -1126,24 +1137,28 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { } else { value = LocaleController.getString("PasswordOff", R.string.PasswordOff); } - textCell2.setTextAndValueAndIcon(LocaleController.getString("TwoStepVerification", R.string.TwoStepVerification), value, R.drawable.msg_secret, true); + textCell2.setTextAndValueAndIcon(LocaleController.getString("TwoStepVerification", R.string.TwoStepVerification), value, true, R.drawable.msg2_permissions, true); } else if (position == passcodeRow) { + int icon; if (SharedConfig.passcodeHash.length() != 0) { value = LocaleController.getString("PasswordOn", R.string.PasswordOn); + icon = R.drawable.msg2_secret; } else { value = LocaleController.getString("PasswordOff", R.string.PasswordOff); + icon = R.drawable.msg2_secret; } - textCell2.setTextAndValueAndIcon(LocaleController.getString("Passcode", R.string.Passcode), value, R.drawable.msg_permissions, true); + textCell2.setTextAndValueAndIcon(LocaleController.getString("Passcode", R.string.Passcode), value, true, icon, true); } else if (position == blockedRow) { int totalCount = getMessagesController().totalBlockedCount; if (totalCount == 0) { - textCell2.setTextAndValueAndIcon(LocaleController.getString("BlockedUsers", R.string.BlockedUsers), LocaleController.getString("BlockedEmpty", R.string.BlockedEmpty), R.drawable.msg_block2, true); + value = LocaleController.getString("BlockedEmpty", R.string.BlockedEmpty); } else if (totalCount > 0) { - textCell2.setTextAndValueAndIcon(LocaleController.getString("BlockedUsers", R.string.BlockedUsers), String.format("%d", totalCount), R.drawable.msg_block2, true); + value = String.format(LocaleController.getInstance().getCurrentLocale(), "%d", totalCount); } else { showLoading = true; - textCell2.setTextAndValueAndIcon(LocaleController.getString("BlockedUsers", R.string.BlockedUsers), "", R.drawable.msg_block2, true); + value = ""; } + textCell2.setTextAndValueAndIcon(LocaleController.getString("BlockedUsers", R.string.BlockedUsers), value, true, R.drawable.msg2_block2, true); } textCell2.setDrawLoading(showLoading, loadingLen, animated); break; @@ -1156,7 +1171,7 @@ public int getItemViewType(int position) { position == deleteAccountRow || position == webSessionsRow || position == groupsRow || position == paymentsClearRow || position == secretMapRow || position == contactsDeleteRow) { return 0; - } else if (position == deleteAccountDetailRow || position == groupsDetailRow || position == autoDeleteDetailRow || position == secretDetailRow || position == botsDetailRow || position == contactsDetailRow || position == newChatsSectionRow) { + } else if (position == deleteAccountDetailRow || position == groupsDetailRow || position == sessionsDetailRow || position == secretDetailRow || position == botsDetailRow || position == contactsDetailRow || position == newChatsSectionRow) { return 1; } else if (position == securitySectionRow || position == advancedSectionRow || position == privacySectionRow || position == secretSectionRow || position == botsSectionRow || position == contactsSectionRow || position == newChatsHeaderRow) { return 2; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ProfileActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ProfileActivity.java index d50a5bffdf..9f5c7ce65d 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ProfileActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ProfileActivity.java @@ -17,12 +17,15 @@ import android.animation.ObjectAnimator; import android.animation.ValueAnimator; import android.app.Activity; +import android.app.ActivityManager; import android.app.Dialog; import android.content.ActivityNotFoundException; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.SharedPreferences; +import android.content.pm.ConfigurationInfo; +import android.content.pm.PackageInfo; import android.content.pm.PackageManager; import android.content.res.Configuration; import android.database.DataSetObserver; @@ -62,7 +65,6 @@ import android.view.View; import android.view.ViewGroup; import android.view.ViewTreeObserver; -import android.view.WindowManager; import android.view.accessibility.AccessibilityNodeInfo; import android.view.animation.AccelerateInterpolator; import android.view.animation.DecelerateInterpolator; @@ -80,6 +82,7 @@ import androidx.annotation.RequiresApi; import androidx.collection.LongSparseArray; import androidx.core.content.ContextCompat; +import androidx.core.content.FileProvider; import androidx.core.graphics.ColorUtils; import androidx.core.math.MathUtils; import androidx.core.view.NestedScrollingParent3; @@ -190,6 +193,7 @@ import org.telegram.ui.Components.HintView; import org.telegram.ui.Components.IdenticonDrawable; import org.telegram.ui.Components.ImageUpdater; +import org.telegram.ui.Components.InstantCameraView; import org.telegram.ui.Components.LayoutHelper; import org.telegram.ui.Components.LinkSpanDrawable; import org.telegram.ui.Components.Premium.PremiumFeatureBottomSheet; @@ -208,11 +212,17 @@ import org.telegram.ui.Components.SizeNotifierFrameLayout; import org.telegram.ui.Components.StickerEmptyView; import org.telegram.ui.Components.TimerDrawable; +import org.telegram.ui.Components.TranslateAlert2; import org.telegram.ui.Components.TypefaceSpan; import org.telegram.ui.Components.UndoView; +import org.telegram.ui.Components.VectorAvatarThumbDrawable; import org.telegram.ui.Components.voip.VoIPHelper; +import java.io.BufferedInputStream; +import java.io.BufferedOutputStream; import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -224,6 +234,9 @@ import java.util.Locale; import java.util.Set; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicReference; +import java.util.zip.ZipEntry; +import java.util.zip.ZipOutputStream; import java.util.concurrent.atomic.AtomicReference; import cn.hutool.core.thread.ThreadUtil; @@ -323,6 +336,7 @@ public void setAlpha(int a) { private int lastMeasuredContentHeight; private int listContentHeight; private boolean openingAvatar; + private boolean fragmentViewAttached; private boolean doNotSetForeground; @@ -483,6 +497,7 @@ public void setAlpha(int a) { private int dataRow; private int chatRow; private int filtersRow; + private int liteModeRow; private int stickersRow; private int devicesRow; private int devicesSectionRow; @@ -658,6 +673,7 @@ public static class AvatarImageView extends BackupImageView { private ImageReceiver foregroundImageReceiver; private float foregroundAlpha; private ImageReceiver.BitmapHolder drawableHolder; + boolean drawForeground = true; ProfileGalleryView avatarsViewPager; @@ -738,11 +754,11 @@ public void setRoundRadius(int value) { @Override protected void onDraw(Canvas canvas) { ImageReceiver imageReceiver = animatedEmojiDrawable != null ? animatedEmojiDrawable.getImageReceiver() : this.imageReceiver; - if (imageReceiver != null && foregroundAlpha < 1f) { + if (imageReceiver != null && (foregroundAlpha < 1f || !drawForeground)) { imageReceiver.setImageCoords(0, 0, getMeasuredWidth(), getMeasuredHeight()); imageReceiver.draw(canvas); } - if (foregroundAlpha > 0f) { + if (foregroundAlpha > 0f && drawForeground) { if (foregroundImageReceiver.getDrawable() != null) { foregroundImageReceiver.setImageCoords(0, 0, getMeasuredWidth(), getMeasuredHeight()); foregroundImageReceiver.setAlpha(foregroundAlpha); @@ -763,6 +779,10 @@ public void invalidate() { avatarsViewPager.invalidate(); } } + + public void drawForeground(boolean drawForeground) { + this.drawForeground = drawForeground; + } } private class TopView extends View { @@ -1120,6 +1140,18 @@ protected void onDraw(Canvas canvas) { } } + @Override + public void onDown(boolean left) { + pressedOverlayVisible[left ? 0 : 1] = true; + postInvalidateOnAnimation(); + } + + @Override + public void onRelease() { + Arrays.fill(pressedOverlayVisible, false); + postInvalidateOnAnimation(); + } + @Override public void onClick() { if (imageUpdater != null) { @@ -1161,13 +1193,27 @@ public NestedFrameLayout(Context context) { @Override public void onNestedScroll(View target, int dxConsumed, int dyConsumed, int dxUnconsumed, int dyUnconsumed, int type, int[] consumed) { - if (target == listView && sharedMediaLayoutAttached) { - RecyclerListView innerListView = sharedMediaLayout.getCurrentListView(); - int top = sharedMediaLayout.getTop(); - if (top == 0) { - consumed[1] = dyUnconsumed; - innerListView.scrollBy(0, dyUnconsumed); + try { + if (target == listView && sharedMediaLayoutAttached) { + RecyclerListView innerListView = sharedMediaLayout.getCurrentListView(); + int top = sharedMediaLayout.getTop(); + if (top == 0) { + consumed[1] = dyUnconsumed; + innerListView.scrollBy(0, dyUnconsumed); + } } + } catch (Throwable e) { + FileLog.e(e); + AndroidUtilities.runOnUIThread(() -> { + try { + RecyclerListView innerListView = sharedMediaLayout.getCurrentListView(); + if (innerListView != null && innerListView.getAdapter() != null) { + innerListView.getAdapter().notifyDataSetChanged(); + } + } catch (Throwable e2) { + + } + }); } } @@ -1542,7 +1588,7 @@ public boolean onFragmentCreate() { participantsMap = null; if (UserObject.isUserSelf(user)) { - imageUpdater = new ImageUpdater(true); + imageUpdater = new ImageUpdater(true, ImageUpdater.FOR_TYPE_USER, true); imageUpdater.setOpenWithFrontfaceCamera(true); imageUpdater.parentFragment = this; imageUpdater.setDelegate(this); @@ -1727,12 +1773,6 @@ public void onFragmentDestroy() { if (pinchToZoomHelper != null) { pinchToZoomHelper.clear(); } - - for (int a = 0; a < 2; ++a) { - if (emojiStatusDrawable[a] != null) { - emojiStatusDrawable[a].detach(); - } - } } @Override @@ -1845,7 +1885,7 @@ public void onItemClick(final int id) { showDialog(dialog); TextView button = (TextView) dialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(getThemedColor(Theme.key_dialogTextRed2)); + button.setTextColor(getThemedColor(Theme.key_dialogTextRed)); } } } @@ -1867,7 +1907,7 @@ public void onItemClick(final int id) { } else if (id == share_contact) { Bundle args = new Bundle(); args.putBoolean("onlySelect", true); - args.putInt("dialogsType", 3); + args.putInt("dialogsType", DialogsActivity.DIALOGS_TYPE_FORWARD); args.putString("selectAlertString", LocaleController.getString("SendContactToText", R.string.SendContactToText)); args.putString("selectAlertStringGroup", LocaleController.getString("SendContactToGroupText", R.string.SendContactToGroupText)); DialogsActivity fragment = new DialogsActivity(args); @@ -1895,7 +1935,7 @@ public void onItemClick(final int id) { showDialog(dialog); TextView button = (TextView) dialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(getThemedColor(Theme.key_dialogTextRed2)); + button.setTextColor(getThemedColor(Theme.key_dialogTextRed)); } } else if (id == leave_group) { leaveChatPressed(); @@ -1925,7 +1965,10 @@ public void onClick(DialogInterface dialog, int which) { } finishFragment(); - BulletinFactory.of(Bulletin.BulletinWindow.make(getContext()), resourcesProvider).createSimpleBulletin(R.raw.ic_delete, LocaleController.getPluralString("TopicsDeleted", 1)).show(); + Context context = getContext(); + if (context != null) { + BulletinFactory.of(Bulletin.BulletinWindow.make(context), resourcesProvider).createSimpleBulletin(R.raw.ic_delete, LocaleController.getPluralString("TopicsDeleted", 1)).show(); + } dialog.dismiss(); } }); @@ -1939,7 +1982,7 @@ public void onClick(DialogInterface dialog, int which) { alertDialog.show(); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } else if (id == edit_channel) { if (isTopic) { @@ -1961,12 +2004,12 @@ public void onClick(DialogInterface dialog, int which) { } Bundle args = new Bundle(); args.putBoolean("onlySelect", true); - args.putInt("dialogsType", 2); + args.putInt("dialogsType", DialogsActivity.DIALOGS_TYPE_ADD_USERS_TO); args.putBoolean("resetDelegate", false); args.putBoolean("closeFragment", false); // args.putString("addToGroupAlertString", LocaleController.formatString("AddToTheGroupAlertText", R.string.AddToTheGroupAlertText, UserObject.getUserName(user), "%1$s")); DialogsActivity fragment = new DialogsActivity(args); - fragment.setDelegate((fragment1, dids, message, param) -> { + fragment.setDelegate((fragment1, dids, message, param, topicsFragment) -> { long did = dids.get(0).dialogId; TLRPC.Chat chat = MessagesController.getInstance(currentAccount).getChat(-did); @@ -2011,6 +2054,7 @@ public void didChangeOwner(TLRPC.User user) { }); showDialog(builder.create()); } + return true; }); presentFragment(fragment); } else if (id == share || id == qr_code) { @@ -2224,7 +2268,25 @@ public void didChangeOwner(TLRPC.User user) { int position = avatarsViewPager.getRealPosition(); TLRPC.Photo photo = avatarsViewPager.getPhoto(position); TLRPC.UserFull userFull = getUserInfo(); - if (hasFallbackPhoto && userFull != null && userFull.fallback_photo != null && userFull.fallback_photo.id == photo.id) { + if (avatar != null && position == 0) { + imageUpdater.cancel(); + if (avatarUploadingRequest != 0) { + getConnectionsManager().cancelRequest(avatarUploadingRequest, true); + } + allowPullingDown = !AndroidUtilities.isTablet() && !isInLandscapeMode && avatarImage.getImageReceiver().hasNotThumb() && !AndroidUtilities.isAccessibilityScreenReaderEnabled(); + avatar = null; + avatarBig = null; + avatarsViewPager.scrolledByUser = true; + avatarsViewPager.removeUploadingImage(uploadingImageLocation); + avatarsViewPager.setCreateThumbFromParent(false); + updateProfileData(true); + showAvatarProgress(false, true); + getNotificationCenter().postNotificationName(NotificationCenter.updateInterfaces, MessagesController.UPDATE_MASK_ALL); + getNotificationCenter().postNotificationName(NotificationCenter.mainUserInfoChanged); + getUserConfig().saveConfig(true); + return; + } + if (hasFallbackPhoto && photo != null && userFull != null && userFull.fallback_photo != null && userFull.fallback_photo.id == photo.id) { userFull.fallback_photo = null; userFull.flags &= ~4194304; getMessagesStorage().updateUserInfo(userFull, true); @@ -2234,6 +2296,18 @@ public void didChangeOwner(TLRPC.User user) { setForegroundImage(true); } if (photo == null || avatarsViewPager.getRealPosition() == 0) { + TLRPC.Photo nextPhoto = avatarsViewPager.getPhoto(1); + if (nextPhoto != null) { + getUserConfig().getCurrentUser().photo =new TLRPC.TL_userProfilePhoto(); + TLRPC.PhotoSize smallSize = FileLoader.getClosestPhotoSizeWithSize(nextPhoto.sizes, 90); + TLRPC.PhotoSize bigSize = FileLoader.getClosestPhotoSizeWithSize(nextPhoto.sizes, 1000); + if (smallSize != null && bigSize != null) { + getUserConfig().getCurrentUser().photo.photo_small = smallSize.location; + getUserConfig().getCurrentUser().photo.photo_big = bigSize.location; + } + } else { + getUserConfig().getCurrentUser().photo = new TLRPC.TL_userProfilePhotoEmpty(); + } getMessagesController().deleteUserPhoto(null); } else { TLRPC.TL_inputPhoto inputPhoto = new TLRPC.TL_inputPhoto(); @@ -2262,7 +2336,7 @@ public void didChangeOwner(TLRPC.User user) { showDialog(alertDialog); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(getThemedColor(Theme.key_dialogTextRed2)); + button.setTextColor(getThemedColor(Theme.key_dialogTextRed)); } } else if (id == add_photo) { onWriteButtonClick(); @@ -2532,6 +2606,11 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { }); wasPortrait = portrait; } + +// if (searchItem != null && qrItem != null) { +// float translation = AndroidUtilities.dp(48) * currentExpandAnimatorValue; +// // qrItem.setTranslationX(translation); +// } } @Override @@ -2678,6 +2757,28 @@ protected boolean drawChild(Canvas canvas, View child, long drawingTime) { } return super.drawChild(canvas, child, drawingTime); } + + @Override + protected void onAttachedToWindow() { + super.onAttachedToWindow(); + fragmentViewAttached = true; + for (int i = 0; i < emojiStatusDrawable.length; i++) { + if (emojiStatusDrawable[i] != null) { + emojiStatusDrawable[i].attach(); + } + } + } + + @Override + protected void onDetachedFromWindow() { + super.onDetachedFromWindow(); + fragmentViewAttached = false; + for (int i = 0; i < emojiStatusDrawable.length; i++) { + if (emojiStatusDrawable[i] != null) { + emojiStatusDrawable[i].detach(); + } + } + } }; ArrayList users = chatInfo != null && chatInfo.participants != null && chatInfo.participants.participants.size() > 5 ? sortedUsers : null; @@ -2694,9 +2795,8 @@ protected boolean canShowSearchItem() { @Override protected void onSearchStateChanged(boolean expanded) { - if (SharedConfig.smoothKeyboard) { - AndroidUtilities.removeAdjustResize(getParentActivity(), classGuid); - } + AndroidUtilities.removeAdjustResize(getParentActivity(), classGuid); + listView.stopScroll(); avatarContainer2.setPivotY(avatarContainer.getPivotY() + avatarContainer.getMeasuredHeight() / 2f); avatarContainer2.setPivotX(avatarContainer2.getMeasuredWidth() / 2f); @@ -3044,7 +3144,6 @@ public int scrollVerticallyBy(int dy, RecyclerView.Recycler recycler, RecyclerVi cells[0].setOnClickListener(v -> { cells[0].setChecked(!cells[0].isChecked(), true); }); - builder.setCustomViewOffset(12); builder.setView(linearLayout); } @@ -3078,7 +3177,7 @@ public void onClick(DialogInterface dialog, int which) { AlertDialog dialog = builder.show(); TextView button = (TextView) dialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } if (position == settingsKeyRow) { Bundle args = new Bundle(); @@ -3190,7 +3289,9 @@ public void showCustomize() { public void toggleMute() { boolean muted = getMessagesController().isDialogMuted(did, topicId); getNotificationsController().muteDialog(did, topicId, !muted); - BulletinFactory.createMuteBulletin(ProfileActivity.this, !muted, null).show(); + if (ProfileActivity.this.fragmentView != null) { + BulletinFactory.createMuteBulletin(ProfileActivity.this, !muted, null).show(); + } updateExceptions(); if (notificationsRow >= 0 && listAdapter != null) { listAdapter.notifyItemChanged(notificationsRow); @@ -3284,6 +3385,8 @@ public void openExceptions() { presentFragment(new FiltersSetupActivity()); } else if (position == stickersRow) { presentFragment(new StickersActivity(MediaDataController.TYPE_IMAGE, null)); + } else if (position == liteModeRow) { + presentFragment(new LiteModeSettingsActivity()); } else if (position == devicesRow) { presentFragment(new SessionsActivity(0)); } else if (position == nekoRow) { @@ -3295,7 +3398,9 @@ public void openExceptions() { } else if (position == policyRow) { Browser.openUrl(getParentActivity(), "https://github.com/NekoX-Dev/NekoX/wiki/Privacy-Policy"); } else if (position == sendLogsRow) { - sendLogs(); + sendLogs(getParentActivity(), false); + } else if (position == sendLastLogsRow) { + sendLogs(getParentActivity(), true); } else if (position == clearLogsRow) { AlertDialog pro = AlertUtil.showProgress(getParentActivity()); pro.show(); @@ -3566,22 +3671,25 @@ public boolean onItemClick(View view, int position) { LocaleController.getString("DebugMenuCallSettings", R.string.DebugMenuCallSettings), null, BuildVars.DEBUG_PRIVATE_VERSION || AndroidUtilities.isStandaloneApp() ? LocaleController.getString("DebugMenuCheckAppUpdate", R.string.DebugMenuCheckAppUpdate) : null, - LocaleController.getString("DebugMenuReadAllDialogs", R.string.DebugMenuReadAllDialogs), + LocaleController.getString("DebugMenuReadAllDialogs", R.string.DebugMenuReadAllDialogs), /* No. 10 */ SharedConfig.pauseMusicOnRecord ? LocaleController.getString("DebugMenuDisablePauseMusic", R.string.DebugMenuDisablePauseMusic) : LocaleController.getString("DebugMenuEnablePauseMusic", R.string.DebugMenuEnablePauseMusic), - BuildVars.DEBUG_VERSION && !AndroidUtilities.isTablet() && Build.VERSION.SDK_INT >= 23 ? (SharedConfig.smoothKeyboard ? LocaleController.getString("DebugMenuDisableSmoothKeyboard", R.string.DebugMenuDisableSmoothKeyboard) : LocaleController.getString("DebugMenuEnableSmoothKeyboard", R.string.DebugMenuEnableSmoothKeyboard)) : null, + null, BuildVars.DEBUG_PRIVATE_VERSION ? (SharedConfig.disableVoiceAudioEffects ? "Enable voip audio effects" : "Disable voip audio effects") : null, // Build.VERSION.SDK_INT >= 21 ? (SharedConfig.noStatusBar ? "Show status bar background" : "Hide status bar background") : null, null, - "Scan accounts", + "Scan accounts", /* No. 15 */ BuildVars.DEBUG_PRIVATE_VERSION ? "Clean app update" : null, BuildVars.DEBUG_PRIVATE_VERSION ? "Reset suggestions" : null, "Reset all notification channels", - BuildVars.DEBUG_PRIVATE_VERSION ? LocaleController.getString(SharedConfig.forceRtmpStream ? R.string.DebugMenuDisableForceRtmpStreamFlag : R.string.DebugMenuEnableForceRtmpStreamFlag) : null, - BuildVars.DEBUG_PRIVATE_VERSION ? LocaleController.getString(R.string.DebugMenuClearWebViewCache) : null, + null, + BuildVars.DEBUG_PRIVATE_VERSION ? LocaleController.getString(R.string.DebugMenuClearWebViewCache) : null, /* No. 20 */ Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT ? LocaleController.getString(SharedConfig.debugWebView ? R.string.DebugMenuDisableWebViewDebug : R.string.DebugMenuEnableWebViewDebug) : null, (AndroidUtilities.isTabletInternal() && BuildVars.DEBUG_PRIVATE_VERSION) ? (SharedConfig.forceDisableTabletMode ? "Enable tablet mode" : "Disable tablet mode") : null, - LocaleController.getString(SharedConfig.useLNavigation ? R.string.AltNavigationDisable : R.string.AltNavigationEnable), BuildVars.DEBUG_PRIVATE_VERSION ? LocaleController.getString(SharedConfig.isFloatingDebugActive ? R.string.FloatingDebugDisable : R.string.FloatingDebugEnable) : null, + BuildVars.DEBUG_PRIVATE_VERSION ? "Force remove premium suggestions" : null, + BuildVars.DEBUG_PRIVATE_VERSION ? "Share device info" : null, /* No. 25 */ + "Force performance class", + BuildVars.DEBUG_PRIVATE_VERSION && !InstantCameraView.allowBigSizeCameraDebug() ? (!SharedConfig.bigCameraForRound ? "Force big camera for round" : "Disable big camera for round") : null }; builder.setItems(items, (dialog, which) -> { @@ -3606,8 +3714,8 @@ public boolean onItemClick(View view, int position) { getMessagesStorage().clearSentMedia(); SharedConfig.setNoSoundHintShowed(false); SharedPreferences.Editor editor = MessagesController.getGlobalMainSettings().edit(); - editor.remove("archivehint").remove("proximityhint").remove("archivehint_l").remove("gifhint").remove("reminderhint").remove("soundHint").remove("themehint").remove("bganimationhint").remove("filterhint").commit(); - MessagesController.getEmojiSettings(currentAccount).edit().remove("featured_hidden").commit(); + editor.remove("archivehint").remove("proximityhint").remove("archivehint_l").remove("speedhint").remove("gifhint").remove("reminderhint").remove("soundHint").remove("themehint").remove("bganimationhint").remove("filterhint").commit(); + MessagesController.getEmojiSettings(currentAccount).edit().remove("featured_hidden").remove("emoji_featured_hidden").commit(); SharedConfig.textSelectionHintShows = 0; SharedConfig.lockRecordAudioVideoHint = 0; SharedConfig.stickersReorderingHintUsed = false; @@ -3617,6 +3725,8 @@ public boolean onItemClick(View view, int position) { SharedConfig.dayNightThemeSwitchHintCount = 3; SharedConfig.fastScrollHintCount = 3; ChatThemeController.getInstance(currentAccount).clearCache(); + getNotificationCenter().postNotificationName(NotificationCenter.newSuggestionsAvailable); + RestrictedLanguagesSelectActivity.cleanup(); } else if (which == 7) { VoIPHelper.showCallDebugSettings(getParentActivity()); } else if (which == 8) { @@ -3626,12 +3736,12 @@ public boolean onItemClick(View view, int position) { } else if (which == 10) { getMessagesStorage().readAllDialogs(-1); } else if (which == 11) { - SharedConfig.togglePauseMusicOnRecord(); + SharedConfig.toggleDisableVoiceAudioEffects(); } else if (which == 12) { - SharedConfig.toggleSmoothKeyboard(); - if (SharedConfig.smoothKeyboard && getParentActivity() != null) { - getParentActivity().getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_ADJUST_RESIZE); - } +// SharedConfig.toggleSmoothKeyboard(); +// if (SharedConfig.smoothKeyboard && getParentActivity() != null) { +// getParentActivity().getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_ADJUST_RESIZE); +// } } else if (which == 13) { SharedConfig.toggleDisableVoiceAudioEffects(); } else if (which == 14) { @@ -3683,15 +3793,15 @@ public boolean onItemClick(View view, int position) { } else if (which == 18) { getNotificationsController().cleanupNotificationChannels(); } else if (which == 19) { - SharedConfig.toggleForceRTMPStream(); - } else if (which == 18) { +// SharedConfig.toggleForceRTMPStream(); + } else if (which == 20) { ApplicationLoader.applicationContext.deleteDatabase("webview.db"); ApplicationLoader.applicationContext.deleteDatabase("webviewCache.db"); WebStorage.getInstance().deleteAllData(); - } else if (which == 19) { + } else if (which == 21) { SharedConfig.toggleDebugWebView(); Toast.makeText(getParentActivity(), LocaleController.getString(SharedConfig.debugWebView ? R.string.DebugMenuWebViewDebugEnabled : R.string.DebugMenuWebViewDebugDisabled), Toast.LENGTH_SHORT).show(); - } else if (which == 20) { + } else if (which == 22) { SharedConfig.toggleForceDisableTabletMode(); Activity activity = AndroidUtilities.findActivity(context); @@ -3700,19 +3810,146 @@ public boolean onItemClick(View view, int position) { activity.finishAffinity(); // Finishes all activities. activity.startActivity(intent); // Start the launch activity System.exit(0); - } else if (which == 21) { - SharedConfig.useLNavigation = !SharedConfig.useLNavigation; - SharedConfig.saveConfig(); - getParentActivity().recreate(); - } else if (which == 22) { + } else if (which == 23) { FloatingDebugController.setActive((LaunchActivity) getParentActivity(), !FloatingDebugController.isActive()); + } else if (which == 24) { + getMessagesController().loadAppConfig(); + TLRPC.TL_help_dismissSuggestion req = new TLRPC.TL_help_dismissSuggestion(); + req.suggestion = "VALIDATE_PHONE_NUMBER"; + req.peer = new TLRPC.TL_inputPeerEmpty(); + getConnectionsManager().sendRequest(req, (response, error) -> { + TLRPC.TL_help_dismissSuggestion req2 = new TLRPC.TL_help_dismissSuggestion(); + req2.suggestion = "VALIDATE_PASSWORD"; + req2.peer = new TLRPC.TL_inputPeerEmpty(); + getConnectionsManager().sendRequest(req2, (res2, err2) -> { + getMessagesController().loadAppConfig(); + }); + }); + } else if (which == 25) { + int androidVersion = Build.VERSION.SDK_INT; + int cpuCount = ConnectionsManager.CPU_COUNT; + int memoryClass = ((ActivityManager) ApplicationLoader.applicationContext.getSystemService(Context.ACTIVITY_SERVICE)).getMemoryClass(); + long minFreqSum = 0, minFreqCount = 0; + long maxFreqSum = 0, maxFreqCount = 0; + long curFreqSum = 0, curFreqCount = 0; + long capacitySum = 0, capacityCount = 0; + StringBuilder cpusInfo = new StringBuilder(); + for (int i = 0; i < cpuCount; i++) { + Long minFreq = AndroidUtilities.getSysInfoLong("/sys/devices/system/cpu/cpu" + i + "/cpufreq/cpuinfo_min_freq"); + Long curFreq = AndroidUtilities.getSysInfoLong("/sys/devices/system/cpu/cpu" + i + "/cpufreq/cpuinfo_cur_freq"); + Long maxFreq = AndroidUtilities.getSysInfoLong("/sys/devices/system/cpu/cpu" + i + "/cpufreq/cpuinfo_max_freq"); + Long capacity = AndroidUtilities.getSysInfoLong("/sys/devices/system/cpu/cpu" + i + "/cpu_capacity"); + cpusInfo.append("#").append(i).append(" "); + if (minFreq != null) { + cpusInfo.append("min=").append(minFreq / 1000L).append(" "); + minFreqSum += (minFreq / 1000L); + minFreqCount++; + } + if (curFreq != null) { + cpusInfo.append("cur=").append(curFreq / 1000L).append(" "); + curFreqSum += (curFreq / 1000L); + curFreqCount++; + } + if (maxFreq != null) { + cpusInfo.append("max=").append(maxFreq / 1000L).append(" "); + maxFreqSum += (maxFreq / 1000L); + maxFreqCount++; + } + if (capacity != null) { + cpusInfo.append("cpc=").append(capacity).append(" "); + capacitySum += capacity; + capacityCount++; + } + cpusInfo.append("\n"); + } + StringBuilder info = new StringBuilder(); + info.append(Build.MANUFACTURER).append(", ").append(Build.MODEL).append(" (").append(Build.PRODUCT).append(", ").append(Build.DEVICE).append(") ").append(" (android ").append(Build.VERSION.SDK_INT).append(")\n"); + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) { + info.append("SoC: ").append(Build.SOC_MANUFACTURER).append(", ").append(Build.SOC_MODEL).append("\n"); + } + String gpuModel = AndroidUtilities.getSysInfoString("/sys/kernel/gpu/gpu_model"); + if (gpuModel != null) { + info.append("GPU: ").append(gpuModel); + Long minClock = AndroidUtilities.getSysInfoLong("/sys/kernel/gpu/gpu_min_clock"); + Long mminClock = AndroidUtilities.getSysInfoLong("/sys/kernel/gpu/gpu_mm_min_clock"); + Long maxClock = AndroidUtilities.getSysInfoLong("/sys/kernel/gpu/gpu_max_clock"); + if (minClock != null) { + info.append(", min=").append(minClock / 1000L); + } + if (mminClock != null) { + info.append(", mmin=").append(mminClock / 1000L); + } + if (maxClock != null) { + info.append(", max=").append(maxClock / 1000L); + } + info.append("\n"); + } + ConfigurationInfo configurationInfo = ((ActivityManager) ApplicationLoader.applicationContext.getSystemService(Context.ACTIVITY_SERVICE)).getDeviceConfigurationInfo(); + info.append("GLES Version: ").append(configurationInfo.getGlEsVersion()).append("\n"); + info.append("Memory: class=").append(AndroidUtilities.formatFileSize(memoryClass * 1024L * 1024L)); + ActivityManager.MemoryInfo memoryInfo = new ActivityManager.MemoryInfo(); + ((ActivityManager) ApplicationLoader.applicationContext.getSystemService(Context.ACTIVITY_SERVICE)).getMemoryInfo(memoryInfo); + info.append(", total=").append(AndroidUtilities.formatFileSize(memoryInfo.totalMem)); + info.append(", avail=").append(AndroidUtilities.formatFileSize(memoryInfo.availMem)); + info.append(", low?=").append(memoryInfo.lowMemory); + info.append(" (threshold=").append(AndroidUtilities.formatFileSize(memoryInfo.threshold)).append(")"); + info.append("\n"); + info.append("Current class: ").append(SharedConfig.performanceClassName(SharedConfig.getDevicePerformanceClass())).append(", measured: ").append(SharedConfig.performanceClassName(SharedConfig.measureDevicePerformanceClass())); + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) { + info.append(", suggest=").append(Build.VERSION.MEDIA_PERFORMANCE_CLASS); + } + info.append("\n"); + info.append(cpuCount).append(" CPUs"); + if (minFreqCount > 0) { + info.append(", avgMinFreq=").append(minFreqSum / minFreqCount); + } + if (curFreqCount > 0) { + info.append(", avgCurFreq=").append(curFreqSum / curFreqCount); + } + if (maxFreqCount > 0) { + info.append(", avgMaxFreq=").append(maxFreqSum / maxFreqCount); + } + if (capacityCount > 0) { + info.append(", avgCapacity=").append(capacitySum / capacityCount); + } + info.append("\n").append(cpusInfo); + + showDialog(new ShareAlert(getParentActivity(), null, info.toString(), false, null, false) { + @Override + protected void onSend(LongSparseArray dids, int count, TLRPC.TL_forumTopic topic) { + AndroidUtilities.runOnUIThread(() -> { + BulletinFactory.createInviteSentBulletin(getParentActivity(), contentView, dids.size(), dids.size() == 1 ? dids.valueAt(0).id : 0, count, getThemedColor(Theme.key_undo_background), getThemedColor(Theme.key_undo_infoColor)).show(); + }, 250); + } + }); + } else if (which == 26) { + AlertDialog.Builder builder2 = new AlertDialog.Builder(getParentActivity(), resourcesProvider); + builder2.setTitle("Force performance class"); + int currentClass = SharedConfig.getDevicePerformanceClass(); + int trueClass = SharedConfig.measureDevicePerformanceClass(); + builder2.setItems(new CharSequence[] { + AndroidUtilities.replaceTags((currentClass == SharedConfig.PERFORMANCE_CLASS_HIGH ? "**HIGH**" : "HIGH") + (trueClass == SharedConfig.PERFORMANCE_CLASS_HIGH ? " (measured)" : "")), + AndroidUtilities.replaceTags((currentClass == SharedConfig.PERFORMANCE_CLASS_AVERAGE ? "**AVERAGE**" : "AVERAGE") + (trueClass == SharedConfig.PERFORMANCE_CLASS_AVERAGE ? " (measured)" : "")), + AndroidUtilities.replaceTags((currentClass == SharedConfig.PERFORMANCE_CLASS_LOW ? "**LOW**" : "LOW") + (trueClass == SharedConfig.PERFORMANCE_CLASS_LOW ? " (measured)" : "")) + }, (dialog2, which2) -> { + int newClass = 2 - which2; + if (newClass == trueClass) { + SharedConfig.overrideDevicePerformanceClass(-1); + } else { + SharedConfig.overrideDevicePerformanceClass(newClass); + } + }); + builder2.setNegativeButton(LocaleController.getString("Cancel", R.string.Cancel), null); + builder2.show(); + } else if (which == 27) { + SharedConfig.toggleRoundCamera(); } }); builder.setNegativeButton(LocaleController.getString("Cancel", R.string.Cancel), null); showDialog(builder.create()); } else { try { - Toast.makeText(getParentActivity(), "¯\\_(ツ)_/¯", Toast.LENGTH_SHORT).show(); + Toast.makeText(getParentActivity(), LocaleController.getString("DebugMenuLongPress", R.string.DebugMenuLongPress), Toast.LENGTH_SHORT).show(); } catch (Exception e) { FileLog.e(e); } @@ -3796,7 +4033,7 @@ public boolean onItemClick(View view, int position) { showDialog(dialog); TextView button = (TextView) dialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } return true; }); @@ -3981,7 +4218,7 @@ public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) { @Override protected void dispatchDraw(Canvas canvas) { super.dispatchDraw(canvas); - if (animatedEmojiDrawable != null) { + if (animatedEmojiDrawable != null && animatedEmojiDrawable.getImageReceiver() != null) { animatedEmojiDrawable.getImageReceiver().startAnimation(); } } @@ -4538,10 +4775,7 @@ public void onZoomStarted(MessageObject messageObject) { if (sharedMediaLayout != null && sharedMediaLayout.getCurrentListView() != null) { sharedMediaLayout.getCurrentListView().cancelClickRunnables(true); } - Bitmap bitmap = pinchToZoomHelper.getPhotoImage() == null ? null : pinchToZoomHelper.getPhotoImage().getBitmap(); - if (bitmap != null) { - topView.setBackgroundColor(ColorUtils.blendARGB(AndroidUtilities.calcBitmapColor(bitmap), getThemedColor(Theme.key_windowBackgroundWhite), 0.1f)); - } + topView.setBackgroundColor(ColorUtils.blendARGB(getAverageColor(pinchToZoomHelper.getPhotoImage()), getThemedColor(Theme.key_windowBackgroundWhite), 0.1f)); } }); avatarsViewPager.setPinchToZoomHelper(pinchToZoomHelper); @@ -4637,6 +4871,7 @@ private void setAvatarExpandProgress(float animatedFracture) { nameTextView[1].setTextColor(ColorUtils.blendARGB(getThemedColor(Theme.key_profile_title), Color.WHITE, value)); actionBar.setItemsColor(ColorUtils.blendARGB(getThemedColor(Theme.key_actionBarDefaultIcon), Color.WHITE, value), false); + actionBar.setMenuOffsetSuppressed(true); avatarImage.setForegroundAlpha(value); @@ -5024,7 +5259,7 @@ public boolean onMemberClick(TLRPC.ChatParticipant participant, boolean isLong, AlertDialog alertDialog = builder.create(); showDialog(alertDialog); if (hasRemove) { - alertDialog.setItemColor(items.size() - 1, getThemedColor(Theme.key_dialogTextRed2), getThemedColor(Theme.key_dialogRedIcon)); + alertDialog.setItemColor(items.size() - 1, getThemedColor(Theme.key_dialogTextRed), getThemedColor(Theme.key_dialogRedIcon)); } } else { if (participant.user_id == getUserConfig().getClientUserId()) { @@ -5288,6 +5523,9 @@ protected boolean drawChild(Canvas canvas, View child, long drawingTime) { break; case PHONE_OPTION_TELEGRAM_CALL: case PHONE_OPTION_TELEGRAM_VIDEO_CALL: + if (getParentActivity() == null) { + return; + } VoIPHelper.startCall(user, action == PHONE_OPTION_TELEGRAM_VIDEO_CALL, userInfo != null && userInfo.video_calls_available, getParentActivity(), userInfo, getAccountInstance()); break; } @@ -5713,47 +5951,53 @@ private void openAddMember() { } fragment.setDelegate((users, fwdCount) -> { HashSet currentParticipants = new HashSet<>(); + ArrayList addedUsers = new ArrayList<>(); if (chatInfo.participants.participants != null) { for (int i = 0; i < chatInfo.participants.participants.size(); i++) { currentParticipants.add(chatInfo.participants.participants.get(i).user_id); } } - int N = users.size(); - int[] finished = new int[1]; - for (int a = 0; a < N; a++) { - TLRPC.User user = users.get(a); - getMessagesController().addUserToChat(chatId, user, fwdCount, null, ProfileActivity.this, () -> { - if (++finished[0] == N) { - if (fragmentView == null || getParentActivity() == null) { - return; - } - BulletinFactory.of(ProfileActivity.this).createUsersAddedBulletin(users, currentChat).show(); - } - }); - if (!currentParticipants.contains(user.id)) { - if (chatInfo.participants == null) { - chatInfo.participants = new TLRPC.TL_chatParticipants(); + getMessagesController().addUsersToChat(currentChat, ProfileActivity.this, users, fwdCount, user -> { + addedUsers.add(user); + }, restrictedUser -> { + for (int i = 0; i < chatInfo.participants.participants.size(); i++) { + if (chatInfo.participants.participants.get(i).user_id == restrictedUser.id) { + chatInfo.participants.participants.remove(i); + updateListAnimated(true); + break; } - if (ChatObject.isChannel(currentChat)) { - TLRPC.TL_chatChannelParticipant channelParticipant1 = new TLRPC.TL_chatChannelParticipant(); - channelParticipant1.channelParticipant = new TLRPC.TL_channelParticipant(); - channelParticipant1.channelParticipant.inviter_id = getUserConfig().getClientUserId(); - channelParticipant1.channelParticipant.peer = new TLRPC.TL_peerUser(); - channelParticipant1.channelParticipant.peer.user_id = user.id; - channelParticipant1.channelParticipant.date = getConnectionsManager().getCurrentTime(); - channelParticipant1.user_id = user.id; - chatInfo.participants.participants.add(channelParticipant1); - } else { - TLRPC.ChatParticipant participant = new TLRPC.TL_chatParticipant(); - participant.user_id = user.id; - participant.inviter_id = getAccountInstance().getUserConfig().clientUserId; - chatInfo.participants.participants.add(participant); + } + }, () -> { + int N = addedUsers.size(); + int[] finished = new int[1]; + for (int a = 0; a < N; a++) { + TLRPC.User user = addedUsers.get(a); + if (!currentParticipants.contains(user.id)) { + if (chatInfo.participants == null) { + chatInfo.participants = new TLRPC.TL_chatParticipants(); + } + if (ChatObject.isChannel(currentChat)) { + TLRPC.TL_chatChannelParticipant channelParticipant1 = new TLRPC.TL_chatChannelParticipant(); + channelParticipant1.channelParticipant = new TLRPC.TL_channelParticipant(); + channelParticipant1.channelParticipant.inviter_id = getUserConfig().getClientUserId(); + channelParticipant1.channelParticipant.peer = new TLRPC.TL_peerUser(); + channelParticipant1.channelParticipant.peer.user_id = user.id; + channelParticipant1.channelParticipant.date = getConnectionsManager().getCurrentTime(); + channelParticipant1.user_id = user.id; + chatInfo.participants.participants.add(channelParticipant1); + } else { + TLRPC.ChatParticipant participant = new TLRPC.TL_chatParticipant(); + participant.user_id = user.id; + participant.inviter_id = getAccountInstance().getUserConfig().clientUserId; + chatInfo.participants.participants.add(participant); + } + chatInfo.participants_count++; + getMessagesController().putUser(user, false); } - chatInfo.participants_count++; - getMessagesController().putUser(user, false); } - } - updateListAnimated(true); + updateListAnimated(true); + }); + }); presentFragment(fragment); } @@ -6031,7 +6275,12 @@ public void onAnimationEnd(Animator animation) { if (!doNotSetForeground) { BackupImageView imageView = avatarsViewPager.getCurrentItemView(); if (imageView != null) { - avatarImage.setForegroundImageDrawable(imageView.getImageReceiver().getDrawableSafe()); + if (imageView.getImageReceiver().getDrawable() instanceof VectorAvatarThumbDrawable) { + avatarImage.drawForeground(false); + } else { + avatarImage.drawForeground(true); + avatarImage.setForegroundImageDrawable(imageView.getImageReceiver().getDrawableSafe()); + } } } avatarImage.setForegroundAlpha(1f); @@ -6173,7 +6422,9 @@ public void updateQrItemVisibility(boolean animated) { private void setForegroundImage(boolean secondParent) { Drawable drawable = avatarImage.getImageReceiver().getDrawable(); - if (drawable instanceof AnimatedFileDrawable) { + if (drawable instanceof VectorAvatarThumbDrawable) { + avatarImage.setForegroundImage(null, null, drawable); + } else if (drawable instanceof AnimatedFileDrawable) { AnimatedFileDrawable fileDrawable = (AnimatedFileDrawable) drawable; avatarImage.setForegroundImage(null, null, fileDrawable); if (secondParent) { @@ -6439,7 +6690,7 @@ public void didReceivedNotification(int id, int account, final Object... args) { updateTtlIcon(); } } else if (id == NotificationCenter.closeChats) { - removeSelfFromStack(); + removeSelfFromStack(true); } else if (id == NotificationCenter.botInfoDidLoad) { TLRPC.BotInfo info = (TLRPC.BotInfo) args[0]; if (info.user_id == userId) { @@ -6616,7 +6867,7 @@ public void onResume() { } if (userId != 0) { final TLRPC.User user = getMessagesController().getUser(userId); - if (user.photo == null) { + if (user != null && user.photo == null) { if (extraHeight >= AndroidUtilities.dp(88f)) { expandAnimator.cancel(); expandAnimatorValues[0] = 1f; @@ -6910,7 +7161,7 @@ public AnimatorSet onCustomTransitionAnimation(final boolean isOpen, final Runna animators.add(ObjectAnimator.ofFloat(writeButton, View.ALPHA, 1.0f)); } if (playProfileAnimation == 2) { - avatarColor = AndroidUtilities.calcBitmapColor(avatarImage.getImageReceiver().getBitmap()); + avatarColor = getAverageColor(avatarImage.getImageReceiver()); nameTextView[1].setTextColor(Color.WHITE); onlineTextView[1].setTextColor(Color.argb(179, 255, 255, 255)); idTextView.setAlpha(0); @@ -7028,12 +7279,20 @@ public AnimatorSet onCustomTransitionAnimation(final boolean isOpen, final Runna } profileTransitionInProgress = true; ValueAnimator valueAnimator = ValueAnimator.ofFloat(0, 1f); - valueAnimator.addUpdateListener(valueAnimator1 -> fragmentView.invalidate()); + valueAnimator.addUpdateListener(valueAnimator1 -> { + if (fragmentView != null) { + fragmentView.invalidate(); + } + }); animatorSet.playTogether(valueAnimator); animatorSet.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { + if (fragmentView == null) { + callback.run(); + return; + } listView.setLayerType(View.LAYER_TYPE_NONE, null); if (animatingItem != null) { ActionBarMenu menu = actionBar.createMenu(); @@ -7064,6 +7323,13 @@ public void onAnimationEnd(Animator animation) { return null; } + private int getAverageColor(ImageReceiver imageReceiver) { + if (imageReceiver.getDrawable() instanceof VectorAvatarThumbDrawable) { + return ((VectorAvatarThumbDrawable)imageReceiver.getDrawable()).gradientTools.getAverageColor(); + } + return AndroidUtilities.calcBitmapColor(avatarImage.getImageReceiver().getBitmap()); + } + private void updateOnlineCount(boolean notify) { onlineCount = 0; int currentTime = getConnectionsManager().getCurrentTime(); @@ -7225,6 +7491,7 @@ private void updateRowsIds() { dataRow = -1; chatRow = -1; filtersRow = -1; + liteModeRow = -1; stickersRow = -1; devicesRow = -1; devicesSectionRow = -1; @@ -7330,6 +7597,7 @@ private void updateRowsIds() { stickersRow = rowCount++; devicesRow = rowCount++; filtersRow = rowCount++; + liteModeRow = rowCount++; nekoRow = rowCount++; languageRow = rowCount++; devicesSectionRow = rowCount++; @@ -7346,7 +7614,7 @@ private void updateRowsIds() { } if (BuildVars.LOGS_ENABLED) { sendLogsRow = rowCount++; - sendLastLogsRow = -1; + sendLastLogsRow = rowCount++; clearLogsRow = rowCount++; } versionRow = rowCount++; @@ -7603,6 +7871,9 @@ private Drawable getPremiumCrossfadeDrawable() { private Drawable getEmojiStatusDrawable(TLRPC.EmojiStatus emojiStatus, boolean switchable, boolean animated, int a) { if (emojiStatusDrawable[a] == null) { emojiStatusDrawable[a] = new AnimatedEmojiDrawable.SwapAnimatedEmojiDrawable(nameTextView[a], AndroidUtilities.dp(24), a == 0 ? AnimatedEmojiDrawable.CACHE_TYPE_EMOJI_STATUS : AnimatedEmojiDrawable.CACHE_TYPE_KEYBOARD); + if (fragmentViewAttached) { + emojiStatusDrawable[a].attach(); + } } if (emojiStatus instanceof TLRPC.TL_emojiStatus) { emojiStatusDrawable[a].set(((TLRPC.TL_emojiStatus) emojiStatus).document_id, animated); @@ -7645,7 +7916,7 @@ private void updateEmojiStatusEffectPosition() { } private void updateProfileData(boolean reload) { - if (avatarContainer == null || nameTextView == null) { + if (avatarContainer == null || nameTextView == null || getParentActivity() == null) { return; } String onlineTextOverride; @@ -7683,11 +7954,23 @@ private void updateProfileData(boolean reload) { final ImageLocation imageLocation = ImageLocation.getForUserOrChat(user, ImageLocation.TYPE_BIG); final ImageLocation thumbLocation = ImageLocation.getForUserOrChat(user, ImageLocation.TYPE_SMALL); - final ImageLocation videoThumbLocation = ImageLocation.getForUserOrChat(user, ImageLocation.TYPE_VIDEO_THUMB); + final ImageLocation videoThumbLocation = ImageLocation.getForUserOrChat(user, ImageLocation.TYPE_VIDEO_BIG); + VectorAvatarThumbDrawable vectorAvatarThumbDrawable = null; + TLRPC.VideoSize vectorAvatar = null; + if (userInfo != null) { + vectorAvatar = FileLoader.getVectorMarkupVideoSize(user.photo != null && user.photo.personal ? userInfo.personal_photo : userInfo.profile_photo); + if (vectorAvatar != null) { + vectorAvatarThumbDrawable = new VectorAvatarThumbDrawable(vectorAvatar, user.premium, VectorAvatarThumbDrawable.TYPE_PROFILE); + } + } final ImageLocation videoLocation = avatarsViewPager.getCurrentVideoLocation(thumbLocation, imageLocation); - avatarsViewPager.initIfEmpty(imageLocation, thumbLocation, reload); + if (avatar == null) { + avatarsViewPager.initIfEmpty(vectorAvatarThumbDrawable, imageLocation, thumbLocation, reload); + } if (avatarBig == null) { - if (videoThumbLocation != null && !user.photo.personal) { + if (vectorAvatar != null) { + avatarImage.setImageDrawable(vectorAvatarThumbDrawable); + } else if (videoThumbLocation != null && !user.photo.personal) { avatarImage.getImageReceiver().setVideoThumbIsSame(true); avatarImage.setImage(videoThumbLocation, "avatar", thumbLocation, "50_50", avatarDrawable, user); } else { @@ -8044,7 +8327,8 @@ private void updateProfileData(boolean reload) { onlineTextView[a].setText(a == 0 ? statusString : profileStatusString); } else if (a == 0 && ChatObject.isChannel(currentChat) && chatInfo != null && chatInfo.participants_count != 0 && (currentChat.megagroup || currentChat.broadcast)) { int[] result = new int[1]; - String shortNumber = LocaleController.formatShortNumber(chatInfo.participants_count, result); + boolean ignoreShort = AndroidUtilities.isAccessibilityScreenReaderEnabled(); + String shortNumber = ignoreShort ? String.valueOf(result[0] = chatInfo.participants_count) : LocaleController.formatShortNumber(chatInfo.participants_count, result); if (currentChat.megagroup) { if (chatInfo.participants_count == 0) { if (chat.has_geo) { @@ -8098,7 +8382,7 @@ private void updateProfileData(boolean reload) { videoLocation = avatarsViewPager.getCurrentVideoLocation(thumbLocation, imageLocation); } - boolean initied = avatarsViewPager.initIfEmpty(imageLocation, thumbLocation, reload); + boolean initied = avatarsViewPager.initIfEmpty(null, imageLocation, thumbLocation, reload); if ((imageLocation == null || initied) && isPulledDown) { final View view = layoutManager.findViewByPosition(0); if (view != null) { @@ -8503,7 +8787,8 @@ protected void onDialogDismiss(Dialog dialog) { } @Override - public void didSelectDialogs(DialogsActivity fragment, ArrayList dids, CharSequence message, boolean param) { + public boolean didSelectDialogs(DialogsActivity + fragment, ArrayList dids, CharSequence message, boolean param, TopicsFragment topicsFragment) { long did = dids.get(0).dialogId; Bundle args = new Bundle(); args.putBoolean("scrollToTopOnResume", true); @@ -8515,7 +8800,7 @@ public void didSelectDialogs(DialogsActivity fragment, ArrayList { - if (photo != null || video != null) { + if (photo != null || video != null || emojiMarkup != null) { + if (avatar == null) { + return; + } TLRPC.TL_photos_uploadProfilePhoto req = new TLRPC.TL_photos_uploadProfilePhoto(); if (photo != null) { req.file = photo; @@ -8789,8 +9079,11 @@ public void didUploadPhoto(final TLRPC.InputFile photo, final TLRPC.InputFile vi req.video_start_ts = videoStartTimestamp; req.flags |= 4; } - getConnectionsManager().sendRequest(req, (response, error) -> AndroidUtilities.runOnUIThread(() -> { - avatarsViewPager.removeUploadingImage(uploadingImageLocation); + if (emojiMarkup != null) { + req.video_emoji_markup = emojiMarkup; + req.flags |= 16; + } + avatarUploadingRequest = getConnectionsManager().sendRequest(req, (response, error) -> AndroidUtilities.runOnUIThread(() -> { if (error == null) { TLRPC.User user = getMessagesController().getUser(getUserConfig().getClientUserId()); if (user == null) { @@ -8802,11 +9095,12 @@ public void didUploadPhoto(final TLRPC.InputFile photo, final TLRPC.InputFile vi } else { getUserConfig().setCurrentUser(user); } + TLRPC.TL_photos_photo photos_photo = (TLRPC.TL_photos_photo) response; ArrayList sizes = photos_photo.photo.sizes; TLRPC.PhotoSize small = FileLoader.getClosestPhotoSizeWithSize(sizes, 150); TLRPC.PhotoSize big = FileLoader.getClosestPhotoSizeWithSize(sizes, 800); - TLRPC.VideoSize videoSize = photos_photo.photo.video_sizes.isEmpty() ? null : photos_photo.photo.video_sizes.get(0); + TLRPC.VideoSize videoSize = photos_photo.photo.video_sizes.isEmpty() ? null : FileLoader.getClosestVideoSizeWithSize(photos_photo.photo.video_sizes, 1000); user.photo = new TLRPC.TL_userProfilePhoto(); user.photo.photo_id = photos_photo.photo.id; if (small != null) { @@ -8824,32 +9118,37 @@ public void didUploadPhoto(final TLRPC.InputFile photo, final TLRPC.InputFile vi String newKey = small.location.volume_id + "_" + small.location.local_id + "@50_50"; ImageLoader.getInstance().replaceImageInCache(oldKey, newKey, ImageLocation.getForUserOrChat(user, ImageLocation.TYPE_SMALL), false); } - if (big != null && avatarBig != null) { - File destFile = FileLoader.getInstance(currentAccount).getPathToAttach(big, true); - File src = FileLoader.getInstance(currentAccount).getPathToAttach(avatarBig, true); - src.renameTo(destFile); - } + if (videoSize != null && videoPath != null) { File destFile = FileLoader.getInstance(currentAccount).getPathToAttach(videoSize, "mp4", true); File src = new File(videoPath); src.renameTo(destFile); + } else if (big != null && avatarBig != null) { + File destFile = FileLoader.getInstance(currentAccount).getPathToAttach(big, true); + File src = FileLoader.getInstance(currentAccount).getPathToAttach(avatarBig, true); + src.renameTo(destFile); } - - getMessagesStorage().clearUserPhotos(user.id); + getMessagesStorage().addDialogPhoto(user.id, ((TLRPC.TL_photos_photo) response).photo); ArrayList users = new ArrayList<>(); users.add(user); getMessagesStorage().putUsersAndChats(users, null, false, true); + TLRPC.UserFull userFull = getMessagesController().getUserFull(userId); + userFull.profile_photo = photos_photo.photo; + getMessagesStorage().updateUserInfo(userFull, false); } allowPullingDown = !AndroidUtilities.isTablet() && !isInLandscapeMode && avatarImage.getImageReceiver().hasNotThumb() && !AndroidUtilities.isAccessibilityScreenReaderEnabled(); avatar = null; avatarBig = null; + avatarsViewPager.scrolledByUser = true; + avatarsViewPager.removeUploadingImage(uploadingImageLocation); avatarsViewPager.setCreateThumbFromParent(false); updateProfileData(true); showAvatarProgress(false, true); getNotificationCenter().postNotificationName(NotificationCenter.updateInterfaces, MessagesController.UPDATE_MASK_ALL); getNotificationCenter().postNotificationName(NotificationCenter.mainUserInfoChanged); getUserConfig().saveConfig(true); + })); } else { avatar = smallSize.location; @@ -8904,29 +9203,101 @@ public void restoreSelfArgs(Bundle args) { } } - private void sendLogs() { - - File path = new File(EnvUtil.getShareCachePath(), "logs"); + public static void sendLogs(Activity activity, boolean last) { + if (activity == null) { + return; + } + AlertDialog progressDialog = new AlertDialog(activity, AlertDialog.ALERT_TYPE_SPINNER); + progressDialog.setCanCancel(false); + progressDialog.show(); + Utilities.globalQueue.postRunnable(() -> { + try { + File dir = AndroidUtilities.getLogsDir(); + if (dir == null) { + return; + } - path.mkdirs(); + File zipFile = new File(dir, "logs.zip"); + if (zipFile.exists()) { + zipFile.delete(); + } - File logcatFile = new File(path, "Nagram-" + System.currentTimeMillis() + ".log"); + ArrayList files = new ArrayList<>(); - FileUtil.delete(logcatFile); + File[] logFiles = dir.listFiles(); + for (File f : logFiles) { + files.add(f); + } - try { + File filesDir = ApplicationLoader.getFilesDirFixed(); + filesDir = new File(filesDir, "malformed_database/"); + if (filesDir.exists() && filesDir.isDirectory()) { + File[] malformedDatabaseFiles = filesDir.listFiles(); + for (File file : malformedDatabaseFiles) { + files.add(file); + } + } - RuntimeUtil.exec("logcat", "-df", logcatFile.getPath()).waitFor(); + boolean[] finished = new boolean[1]; + long currentDate = System.currentTimeMillis(); - RuntimeUtil.exec("logcat", "-c").waitFor(); + BufferedInputStream origin = null; + ZipOutputStream out = null; + try { + FileOutputStream dest = new FileOutputStream(zipFile); + out = new ZipOutputStream(new BufferedOutputStream(dest)); + byte[] data = new byte[1024 * 64]; - ShareUtil.shareFile(getParentActivity(), logcatFile); + for (int i = 0; i < files.size(); i++) { + File file = files.get(i); + if (!file.getName().contains("cache4") && (last || file.getName().contains("_mtproto")) && (currentDate - file.lastModified()) > 24 * 60 * 60 * 1000) { + continue; + } + if (!file.exists()) { + continue; + } + FileInputStream fi = new FileInputStream(file); + origin = new BufferedInputStream(fi, data.length); - } catch (Exception e) { + ZipEntry entry = new ZipEntry(file.getName()); + out.putNextEntry(entry); + int count; + while ((count = origin.read(data, 0, data.length)) != -1) { + out.write(data, 0, count); + } + origin.close(); + origin = null; + } + finished[0] = true; + } catch (Exception e) { + e.printStackTrace(); + } finally { + if (origin != null) { + origin.close(); + } + if (out != null) { + out.close(); + } + } - AlertUtil.showToast(e); + AndroidUtilities.runOnUIThread(() -> { + try { + progressDialog.dismiss(); + } catch (Exception ignore) { - } + } + if (finished[0]) { + ShareUtil.shareFile(activity, zipFile); + } else { + if (activity != null) { + Toast.makeText(activity, LocaleController.getString("ErrorOccurred", R.string.ErrorOccurred), Toast.LENGTH_SHORT).show(); + } + } + }); + } catch (Exception e) { + e.printStackTrace(); + } + }); } private class ListAdapter extends RecyclerListView.SelectionAdapter { @@ -9413,7 +9784,7 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { } else if (position == privacyRow) { textCell.setTextAndIcon(LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg_secret, true); } else if (position == dataRow) { - textCell.setTextAndIcon(LocaleController.getString("DataSettings", R.string.DataSettings), R.drawable.msg_data, true); + textCell.setTextAndIcon(LocaleController.getString("DataSettings", R.string.DataSettings), R.drawable.baseline_data_usage_24, true); } else if (position == chatRow) { textCell.setTextAndIcon(LocaleController.getString("ChatSettings", R.string.ChatSettings), R.drawable.msg_msgbubble3, true); } else if (position == stickersRow) { @@ -9422,6 +9793,8 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { textCell.setTextAndIcon(LocaleController.getString("NekoSettings", R.string.NekoSettings), R.drawable.menu_settings, true); } else if (position == filtersRow) { textCell.setTextAndIcon(LocaleController.getString("Filters", R.string.Filters), R.drawable.msg_folders, true); + } else if (position == liteModeRow) { + textCell.setTextAndIcon(LocaleController.getString(R.string.PowerUsage), R.drawable.msg2_battery, true); } else if (position == questionRow) { textCell.setTextAndIcon(LocaleController.getString("NekoXUpdatesChannel", R.string.NekoXUpdatesChannel), R.drawable.menu_channel_ny, true); } else if (position == faqRow) { @@ -9431,13 +9804,13 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { } else if (position == sendLogsRow) { textCell.setTextAndIcon(LocaleController.getString("DebugSendLogs", R.string.DebugSendLogs), R.drawable.menu_support2, true); } else if (position == sendLastLogsRow) { - textCell.setText(LocaleController.getString("DebugSendLastLogs", R.string.DebugSendLastLogs), true); + textCell.setTextAndIcon(LocaleController.getString("DebugSendLastLogs", R.string.DebugSendLastLogs), R.drawable.baseline_bug_report_24 ,true); } else if (position == clearLogsRow) { textCell.setTextAndIcon(LocaleController.getString("DebugClearLogs", R.string.DebugClearLogs), R.drawable.menu_clearcache, switchBackendRow != -1); } else if (position == switchBackendRow) { textCell.setText("Switch Backend", false); } else if (position == devicesRow) { - textCell.setTextAndIcon(LocaleController.getString("Devices", R.string.Devices), R.drawable.menu_devices, true); + textCell.setTextAndIcon(LocaleController.getString("Devices", R.string.Devices), R.drawable.msg2_devices, true); } else if (position == setAvatarRow) { cellCameraDrawable.setCustomEndFrame(86); cellCameraDrawable.setCurrentFrame(85, false); @@ -9655,7 +10028,8 @@ public boolean isEnabled(RecyclerView.ViewHolder holder) { position == versionRow || position == dataRow || position == chatRow || position == questionRow || position == devicesRow || position == filtersRow || position == stickersRow || position == faqRow || position == policyRow || position == sendLogsRow || position == sendLastLogsRow || - position == clearLogsRow || position == switchBackendRow || position == setAvatarRow || position == addToGroupButtonRow || position == premiumRow; + position == clearLogsRow || position == switchBackendRow || position == setAvatarRow || + position == addToGroupButtonRow || position == premiumRow || position == liteModeRow; } if (holder.itemView instanceof UserCell) { UserCell userCell = (UserCell) holder.itemView; @@ -9697,7 +10071,7 @@ public int getItemViewType(int position) { position == questionRow || position == devicesRow || position == filtersRow || position == stickersRow || position == faqRow || position == policyRow || position == sendLogsRow || position == sendLastLogsRow || position == clearLogsRow || position == switchBackendRow || position == setAvatarRow || position == addToGroupButtonRow || - position == addToContactsRow) { + position == addToContactsRow || position == liteModeRow) { return VIEW_TYPE_TEXT; } else if (position == notificationsDividerRow) { return VIEW_TYPE_DIVIDER; @@ -9880,115 +10254,132 @@ private SearchResult[] onCreateSearchArray() { if (freeAccount >= 0) { presentFragment(new LoginActivity(freeAccount)); } - }), - new SearchResult(503, LocaleController.getString("UserBio", R.string.UserBio), 0, () -> { - if (userInfo != null) { - presentFragment(new ChangeBioActivity()); - } - }), - new SearchResult(504, LocaleController.getString(R.string.AddPhoto), 0, ProfileActivity.this::onWriteButtonClick), - - new SearchResult(1, LocaleController.getString("NotificationsAndSounds", R.string.NotificationsAndSounds), R.drawable.msg_notifications, () -> presentFragment(new NotificationsSettingsActivity())), - new SearchResult(2, LocaleController.getString("NotificationsPrivateChats", R.string.NotificationsPrivateChats), LocaleController.getString("NotificationsAndSounds", R.string.NotificationsAndSounds), R.drawable.msg_notifications, () -> presentFragment(new NotificationsCustomSettingsActivity(NotificationsController.TYPE_PRIVATE, new ArrayList<>(), true))), - new SearchResult(3, LocaleController.getString("NotificationsGroups", R.string.NotificationsGroups), LocaleController.getString("NotificationsAndSounds", R.string.NotificationsAndSounds), R.drawable.msg_notifications, () -> presentFragment(new NotificationsCustomSettingsActivity(NotificationsController.TYPE_GROUP, new ArrayList<>(), true))), - new SearchResult(4, LocaleController.getString("NotificationsChannels", R.string.NotificationsChannels), LocaleController.getString("NotificationsAndSounds", R.string.NotificationsAndSounds), R.drawable.msg_notifications, () -> presentFragment(new NotificationsCustomSettingsActivity(NotificationsController.TYPE_CHANNEL, new ArrayList<>(), true))), - new SearchResult(5, LocaleController.getString("VoipNotificationSettings", R.string.VoipNotificationSettings), "callsSectionRow", LocaleController.getString("NotificationsAndSounds", R.string.NotificationsAndSounds), R.drawable.msg_notifications, () -> presentFragment(new NotificationsSettingsActivity())), - new SearchResult(6, LocaleController.getString("BadgeNumber", R.string.BadgeNumber), "badgeNumberSection", LocaleController.getString("NotificationsAndSounds", R.string.NotificationsAndSounds), R.drawable.msg_notifications, () -> presentFragment(new NotificationsSettingsActivity())), - new SearchResult(7, LocaleController.getString("InAppNotifications", R.string.InAppNotifications), "inappSectionRow", LocaleController.getString("NotificationsAndSounds", R.string.NotificationsAndSounds), R.drawable.msg_notifications, () -> presentFragment(new NotificationsSettingsActivity())), - new SearchResult(8, LocaleController.getString("ContactJoined", R.string.ContactJoined), "contactJoinedRow", LocaleController.getString("NotificationsAndSounds", R.string.NotificationsAndSounds), R.drawable.msg_notifications, () -> presentFragment(new NotificationsSettingsActivity())), - new SearchResult(9, LocaleController.getString("PinnedMessages", R.string.PinnedMessages), "pinnedMessageRow", LocaleController.getString("NotificationsAndSounds", R.string.NotificationsAndSounds), R.drawable.msg_notifications, () -> presentFragment(new NotificationsSettingsActivity())), - new SearchResult(10, LocaleController.getString("ResetAllNotifications", R.string.ResetAllNotifications), "resetNotificationsRow", LocaleController.getString("NotificationsAndSounds", R.string.NotificationsAndSounds), R.drawable.msg_notifications, () -> presentFragment(new NotificationsSettingsActivity())), - new SearchResult(11, LocaleController.getString(R.string.NotificationsService), "notificationsServiceRow", LocaleController.getString("NotificationsAndSounds", R.string.NotificationsAndSounds), R.drawable.msg_notifications, () -> presentFragment(new NotificationsSettingsActivity())), - new SearchResult(12, LocaleController.getString(R.string.NotificationsServiceConnection), "notificationsServiceConnectionRow", LocaleController.getString("NotificationsAndSounds", R.string.NotificationsAndSounds), R.drawable.msg_notifications, () -> presentFragment(new NotificationsSettingsActivity())), - new SearchResult(13, LocaleController.getString(R.string.RepeatNotifications), "repeatRow", LocaleController.getString("NotificationsAndSounds", R.string.NotificationsAndSounds), R.drawable.msg_notifications, () -> presentFragment(new NotificationsSettingsActivity())), - - new SearchResult(100, LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg_secret, () -> presentFragment(new PrivacySettingsActivity())), - new SearchResult(101, LocaleController.getString("BlockedUsers", R.string.BlockedUsers), LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg_secret, () -> presentFragment(new PrivacyUsersActivity())), - new SearchResult(105, LocaleController.getString("PrivacyPhone", R.string.PrivacyPhone), LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg_secret, () -> presentFragment(new PrivacyControlActivity(ContactsController.PRIVACY_RULES_TYPE_PHONE, true))), - new SearchResult(102, LocaleController.getString("PrivacyLastSeen", R.string.PrivacyLastSeen), LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg_secret, () -> presentFragment(new PrivacyControlActivity(ContactsController.PRIVACY_RULES_TYPE_LASTSEEN, true))), - new SearchResult(103, LocaleController.getString("PrivacyProfilePhoto", R.string.PrivacyProfilePhoto), LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg_secret, () -> presentFragment(new PrivacyControlActivity(ContactsController.PRIVACY_RULES_TYPE_PHOTO, true))), - new SearchResult(104, LocaleController.getString("PrivacyForwards", R.string.PrivacyForwards), LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg_secret, () -> presentFragment(new PrivacyControlActivity(ContactsController.PRIVACY_RULES_TYPE_FORWARDS, true))), - new SearchResult(122, LocaleController.getString("PrivacyP2P", R.string.PrivacyP2P), LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg_secret, () -> presentFragment(new PrivacyControlActivity(ContactsController.PRIVACY_RULES_TYPE_P2P, true))), - new SearchResult(106, LocaleController.getString("Calls", R.string.Calls), LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg_secret, () -> presentFragment(new PrivacyControlActivity(ContactsController.PRIVACY_RULES_TYPE_CALLS, true))), - new SearchResult(107, LocaleController.getString("GroupsAndChannels", R.string.GroupsAndChannels), LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg_secret, () -> presentFragment(new PrivacyControlActivity(ContactsController.PRIVACY_RULES_TYPE_INVITE, true))), - new SearchResult(123, LocaleController.getString("PrivacyVoiceMessages", R.string.PrivacyVoiceMessages), LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg_secret, () -> { - if (!getUserConfig().isPremium()) { - try { - fragmentView.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); - } catch (Exception e) { - FileLog.e(e); - } - BulletinFactory.of(ProfileActivity.this).createRestrictVoiceMessagesPremiumBulletin().show(); - return; + }), + new SearchResult(503, LocaleController.getString("UserBio", R.string.UserBio), 0, () -> { + if (userInfo != null) { + presentFragment(new ChangeBioActivity()); + } + }), + new SearchResult(504, LocaleController.getString(R.string.AddPhoto), 0, ProfileActivity.this::onWriteButtonClick), + + new SearchResult(1, LocaleController.getString("NotificationsAndSounds", R.string.NotificationsAndSounds), R.drawable.msg_notifications, () -> presentFragment(new NotificationsSettingsActivity())), + new SearchResult(2, LocaleController.getString("NotificationsPrivateChats", R.string.NotificationsPrivateChats), LocaleController.getString("NotificationsAndSounds", R.string.NotificationsAndSounds), R.drawable.msg_notifications, () -> presentFragment(new NotificationsCustomSettingsActivity(NotificationsController.TYPE_PRIVATE, new ArrayList<>(), true))), + new SearchResult(3, LocaleController.getString("NotificationsGroups", R.string.NotificationsGroups), LocaleController.getString("NotificationsAndSounds", R.string.NotificationsAndSounds), R.drawable.msg_notifications, () -> presentFragment(new NotificationsCustomSettingsActivity(NotificationsController.TYPE_GROUP, new ArrayList<>(), true))), + new SearchResult(4, LocaleController.getString("NotificationsChannels", R.string.NotificationsChannels), LocaleController.getString("NotificationsAndSounds", R.string.NotificationsAndSounds), R.drawable.msg_notifications, () -> presentFragment(new NotificationsCustomSettingsActivity(NotificationsController.TYPE_CHANNEL, new ArrayList<>(), true))), + new SearchResult(5, LocaleController.getString("VoipNotificationSettings", R.string.VoipNotificationSettings), "callsSectionRow", LocaleController.getString("NotificationsAndSounds", R.string.NotificationsAndSounds), R.drawable.msg_notifications, () -> presentFragment(new NotificationsSettingsActivity())), + new SearchResult(6, LocaleController.getString("BadgeNumber", R.string.BadgeNumber), "badgeNumberSection", LocaleController.getString("NotificationsAndSounds", R.string.NotificationsAndSounds), R.drawable.msg_notifications, () -> presentFragment(new NotificationsSettingsActivity())), + new SearchResult(7, LocaleController.getString("InAppNotifications", R.string.InAppNotifications), "inappSectionRow", LocaleController.getString("NotificationsAndSounds", R.string.NotificationsAndSounds), R.drawable.msg_notifications, () -> presentFragment(new NotificationsSettingsActivity())), + new SearchResult(8, LocaleController.getString("ContactJoined", R.string.ContactJoined), "contactJoinedRow", LocaleController.getString("NotificationsAndSounds", R.string.NotificationsAndSounds), R.drawable.msg_notifications, () -> presentFragment(new NotificationsSettingsActivity())), + new SearchResult(9, LocaleController.getString("PinnedMessages", R.string.PinnedMessages), "pinnedMessageRow", LocaleController.getString("NotificationsAndSounds", R.string.NotificationsAndSounds), R.drawable.msg_notifications, () -> presentFragment(new NotificationsSettingsActivity())), + new SearchResult(10, LocaleController.getString("ResetAllNotifications", R.string.ResetAllNotifications), "resetNotificationsRow", LocaleController.getString("NotificationsAndSounds", R.string.NotificationsAndSounds), R.drawable.msg_notifications, () -> presentFragment(new NotificationsSettingsActivity())), + new SearchResult(11, LocaleController.getString(R.string.NotificationsService), "notificationsServiceRow", LocaleController.getString("NotificationsAndSounds", R.string.NotificationsAndSounds), R.drawable.msg_notifications, () -> presentFragment(new NotificationsSettingsActivity())), + new SearchResult(12, LocaleController.getString(R.string.NotificationsServiceConnection), "notificationsServiceConnectionRow", LocaleController.getString("NotificationsAndSounds", R.string.NotificationsAndSounds), R.drawable.msg_notifications, () -> presentFragment(new NotificationsSettingsActivity())), + new SearchResult(13, LocaleController.getString(R.string.RepeatNotifications), "repeatRow", LocaleController.getString("NotificationsAndSounds", R.string.NotificationsAndSounds), R.drawable.msg_notifications, () -> presentFragment(new NotificationsSettingsActivity())), + + new SearchResult(100, LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg_secret, () -> presentFragment(new PrivacySettingsActivity())), + new SearchResult(101, LocaleController.getString("BlockedUsers", R.string.BlockedUsers), LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg_secret, () -> presentFragment(new PrivacyUsersActivity())), + new SearchResult(105, LocaleController.getString("PrivacyPhone", R.string.PrivacyPhone), LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg_secret, () -> presentFragment(new PrivacyControlActivity(ContactsController.PRIVACY_RULES_TYPE_PHONE, true))), + new SearchResult(102, LocaleController.getString("PrivacyLastSeen", R.string.PrivacyLastSeen), LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg_secret, () -> presentFragment(new PrivacyControlActivity(ContactsController.PRIVACY_RULES_TYPE_LASTSEEN, true))), + new SearchResult(103, LocaleController.getString("PrivacyProfilePhoto", R.string.PrivacyProfilePhoto), LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg_secret, () -> presentFragment(new PrivacyControlActivity(ContactsController.PRIVACY_RULES_TYPE_PHOTO, true))), + new SearchResult(104, LocaleController.getString("PrivacyForwards", R.string.PrivacyForwards), LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg_secret, () -> presentFragment(new PrivacyControlActivity(ContactsController.PRIVACY_RULES_TYPE_FORWARDS, true))), + new SearchResult(122, LocaleController.getString("PrivacyP2P", R.string.PrivacyP2P), LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg_secret, () -> presentFragment(new PrivacyControlActivity(ContactsController.PRIVACY_RULES_TYPE_P2P, true))), + new SearchResult(106, LocaleController.getString("Calls", R.string.Calls), LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg_secret, () -> presentFragment(new PrivacyControlActivity(ContactsController.PRIVACY_RULES_TYPE_CALLS, true))), + new SearchResult(107, LocaleController.getString("GroupsAndChannels", R.string.GroupsAndChannels), LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg_secret, () -> presentFragment(new PrivacyControlActivity(ContactsController.PRIVACY_RULES_TYPE_INVITE, true))), + new SearchResult(123, LocaleController.getString("PrivacyVoiceMessages", R.string.PrivacyVoiceMessages), LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg_secret, () -> { + if (!getUserConfig().isPremium()) { + try { + if (!NekoConfig.disableVibration.Bool()) + fragmentView.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + } catch (Exception e) { + FileLog.e(e); } - presentFragment(new PrivacyControlActivity(ContactsController.PRIVACY_RULES_TYPE_VOICE_MESSAGES, true)); - }), - new SearchResult(108, LocaleController.getString("Passcode", R.string.Passcode), LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg_secret, () -> presentFragment(PasscodeActivity.determineOpenFragment())), - new SearchResult(109, LocaleController.getString("TwoStepVerification", R.string.TwoStepVerification), LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg_secret, () -> presentFragment(new TwoStepVerificationActivity())), - new SearchResult(110, LocaleController.getString("SessionsTitle", R.string.SessionsTitle), R.drawable.msg_secret, () -> presentFragment(new SessionsActivity(0))), - getMessagesController().autoarchiveAvailable ? new SearchResult(121, LocaleController.getString("ArchiveAndMute", R.string.ArchiveAndMute), "newChatsRow", LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg_secret, () -> presentFragment(new PrivacySettingsActivity())) : null, - new SearchResult(112, LocaleController.getString("DeleteAccountIfAwayFor2", R.string.DeleteAccountIfAwayFor2), "deleteAccountRow", LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg_secret, () -> presentFragment(new PrivacySettingsActivity())), - new SearchResult(113, LocaleController.getString("PrivacyPaymentsClear", R.string.PrivacyPaymentsClear), "paymentsClearRow", LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg_secret, () -> presentFragment(new PrivacySettingsActivity())), - new SearchResult(114, LocaleController.getString("WebSessionsTitle", R.string.WebSessionsTitle), LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg_secret, () -> presentFragment(new SessionsActivity(1))), - new SearchResult(115, LocaleController.getString("SyncContactsDelete", R.string.SyncContactsDelete), "contactsDeleteRow", LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg_secret, () -> presentFragment(new PrivacySettingsActivity())), - new SearchResult(116, LocaleController.getString("SyncContacts", R.string.SyncContacts), "contactsSyncRow", LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg_secret, () -> presentFragment(new PrivacySettingsActivity())), - new SearchResult(117, LocaleController.getString("SuggestContacts", R.string.SuggestContacts), "contactsSuggestRow", LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg_secret, () -> presentFragment(new PrivacySettingsActivity())), - new SearchResult(118, LocaleController.getString("MapPreviewProvider", R.string.MapPreviewProvider), "secretMapRow", LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg_secret, () -> presentFragment(new PrivacySettingsActivity())), - new SearchResult(119, LocaleController.getString("SecretWebPage", R.string.SecretWebPage), "secretWebpageRow", LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg_secret, () -> presentFragment(new PrivacySettingsActivity())), - - new SearchResult(120, LocaleController.getString(R.string.Devices), R.drawable.msg_devices, () -> presentFragment(new SessionsActivity(0))), - new SearchResult(121, LocaleController.getString(R.string.TerminateAllSessions), "terminateAllSessionsRow", LocaleController.getString(R.string.Devices), R.drawable.msg_devices, () -> presentFragment(new SessionsActivity(0))), - new SearchResult(122, LocaleController.getString(R.string.LinkDesktopDevice), LocaleController.getString(R.string.Devices), R.drawable.msg_devices, () -> presentFragment(new SessionsActivity(0).setHighlightLinkDesktopDevice())), - - new SearchResult(200, LocaleController.getString("DataSettings", R.string.DataSettings), R.drawable.msg_data, () -> presentFragment(new DataSettingsActivity())), - new SearchResult(201, LocaleController.getString("DataUsage", R.string.DataUsage), "usageSectionRow", LocaleController.getString("DataSettings", R.string.DataSettings), R.drawable.msg_data, () -> presentFragment(new DataSettingsActivity())), - new SearchResult(202, LocaleController.getString("StorageUsage", R.string.StorageUsage), LocaleController.getString("DataSettings", R.string.DataSettings), R.drawable.msg_data, () -> presentFragment(new CacheControlActivity())), - new SearchResult(203, LocaleController.getString("KeepMedia", R.string.KeepMedia), "keepMediaRow", LocaleController.getString("DataSettings", R.string.DataSettings), LocaleController.getString("StorageUsage", R.string.StorageUsage), R.drawable.msg_data, () -> presentFragment(new CacheControlActivity())), - new SearchResult(204, LocaleController.getString("ClearMediaCache", R.string.ClearMediaCache), "cacheRow", LocaleController.getString("DataSettings", R.string.DataSettings), LocaleController.getString("StorageUsage", R.string.StorageUsage), R.drawable.msg_data, () -> presentFragment(new CacheControlActivity())), - new SearchResult(205, LocaleController.getString("LocalDatabase", R.string.LocalDatabase), "databaseRow", LocaleController.getString("DataSettings", R.string.DataSettings), LocaleController.getString("StorageUsage", R.string.StorageUsage), R.drawable.msg_data, () -> presentFragment(new CacheControlActivity())), - new SearchResult(206, LocaleController.getString("NetworkUsage", R.string.NetworkUsage), LocaleController.getString("DataSettings", R.string.DataSettings), R.drawable.msg_data, () -> presentFragment(new DataUsageActivity())), - new SearchResult(207, LocaleController.getString("AutomaticMediaDownload", R.string.AutomaticMediaDownload), "mediaDownloadSectionRow", LocaleController.getString("DataSettings", R.string.DataSettings), R.drawable.msg_data, () -> presentFragment(new DataSettingsActivity())), - new SearchResult(208, LocaleController.getString("WhenUsingMobileData", R.string.WhenUsingMobileData), LocaleController.getString("DataSettings", R.string.DataSettings), R.drawable.msg_data, () -> presentFragment(new DataAutoDownloadActivity(0))), - new SearchResult(209, LocaleController.getString("WhenConnectedOnWiFi", R.string.WhenConnectedOnWiFi), LocaleController.getString("DataSettings", R.string.DataSettings), R.drawable.msg_data, () -> presentFragment(new DataAutoDownloadActivity(1))), - new SearchResult(210, LocaleController.getString("WhenRoaming", R.string.WhenRoaming), LocaleController.getString("DataSettings", R.string.DataSettings), R.drawable.msg_data, () -> presentFragment(new DataAutoDownloadActivity(2))), - new SearchResult(211, LocaleController.getString("ResetAutomaticMediaDownload", R.string.ResetAutomaticMediaDownload), "resetDownloadRow", LocaleController.getString("DataSettings", R.string.DataSettings), R.drawable.msg_data, () -> presentFragment(new DataSettingsActivity())), - new SearchResult(212, LocaleController.getString("AutoplayMedia", R.string.AutoplayMedia), "autoplayHeaderRow", LocaleController.getString("DataSettings", R.string.DataSettings), R.drawable.msg_data, () -> presentFragment(new DataSettingsActivity())), - new SearchResult(213, LocaleController.getString("AutoplayGIF", R.string.AutoplayGIF), "autoplayGifsRow", LocaleController.getString("DataSettings", R.string.DataSettings), R.drawable.msg_data, () -> presentFragment(new DataSettingsActivity())), - new SearchResult(214, LocaleController.getString("AutoplayVideo", R.string.AutoplayVideo), "autoplayVideoRow", LocaleController.getString("DataSettings", R.string.DataSettings), R.drawable.msg_data, () -> presentFragment(new DataSettingsActivity())), - new SearchResult(215, LocaleController.getString("Streaming", R.string.Streaming), "streamSectionRow", LocaleController.getString("DataSettings", R.string.DataSettings), R.drawable.msg_data, () -> presentFragment(new DataSettingsActivity())), - new SearchResult(216, LocaleController.getString("EnableStreaming", R.string.EnableStreaming), "enableStreamRow", LocaleController.getString("DataSettings", R.string.DataSettings), R.drawable.msg_data, () -> presentFragment(new DataSettingsActivity())), - new SearchResult(217, LocaleController.getString("Calls", R.string.Calls), "callsSectionRow", LocaleController.getString("DataSettings", R.string.DataSettings), R.drawable.msg_data, () -> presentFragment(new DataSettingsActivity())), - new SearchResult(218, LocaleController.getString("VoipUseLessData", R.string.VoipUseLessData), "useLessDataForCallsRow", LocaleController.getString("DataSettings", R.string.DataSettings), R.drawable.msg_data, () -> presentFragment(new DataSettingsActivity())), - new SearchResult(219, LocaleController.getString("VoipQuickReplies", R.string.VoipQuickReplies), "quickRepliesRow", LocaleController.getString("DataSettings", R.string.DataSettings), R.drawable.msg_data, () -> presentFragment(new DataSettingsActivity())), - new SearchResult(220, LocaleController.getString("ProxySettings", R.string.ProxySettings), LocaleController.getString("DataSettings", R.string.DataSettings), R.drawable.msg_data, () -> presentFragment(new ProxyListActivity())), - new SearchResult(221, LocaleController.getString("UseProxyForCalls", R.string.UseProxyForCalls), "callsRow", LocaleController.getString("DataSettings", R.string.DataSettings), LocaleController.getString("ProxySettings", R.string.ProxySettings), R.drawable.msg_data, () -> presentFragment(new ProxyListActivity())), - new SearchResult(111, LocaleController.getString("PrivacyDeleteCloudDrafts", R.string.PrivacyDeleteCloudDrafts), "clearDraftsRow", LocaleController.getString("DataSettings", R.string.DataSettings), R.drawable.msg_data, () -> presentFragment(new DataSettingsActivity())), - new SearchResult(222, LocaleController.getString(R.string.SaveToGallery), "saveToGallerySectionRow", LocaleController.getString(R.string.DataSettings), R.drawable.msg_data, () -> presentFragment(new DataSettingsActivity())), - new SearchResult(223, LocaleController.getString(R.string.SaveToGalleryPrivate), "saveToGalleryPeerRow", LocaleController.getString(R.string.DataSettings), LocaleController.getString(R.string.SaveToGallery), R.drawable.msg_data, () -> presentFragment(new DataSettingsActivity())), - new SearchResult(224, LocaleController.getString(R.string.SaveToGalleryGroups), "saveToGalleryGroupsRow", LocaleController.getString(R.string.DataSettings), LocaleController.getString(R.string.SaveToGallery), R.drawable.msg_data, () -> presentFragment(new DataSettingsActivity())), - new SearchResult(225, LocaleController.getString(R.string.SaveToGalleryChannels), "saveToGalleryChannelsRow", LocaleController.getString(R.string.DataSettings), LocaleController.getString(R.string.SaveToGallery), R.drawable.msg_data, () -> presentFragment(new DataSettingsActivity())), - - new SearchResult(300, LocaleController.getString("ChatSettings", R.string.ChatSettings), R.drawable.msg_msgbubble3, () -> presentFragment(new ThemeActivity(ThemeActivity.THEME_TYPE_BASIC))), - new SearchResult(301, LocaleController.getString("TextSizeHeader", R.string.TextSizeHeader), "textSizeHeaderRow", LocaleController.getString("ChatSettings", R.string.ChatSettings), R.drawable.msg_msgbubble3, () -> presentFragment(new ThemeActivity(ThemeActivity.THEME_TYPE_BASIC))), - new SearchResult(302, LocaleController.getString(R.string.ChangeChatBackground), LocaleController.getString("ChatSettings", R.string.ChatSettings), R.drawable.msg_msgbubble3, () -> presentFragment(new WallpapersListActivity(WallpapersListActivity.TYPE_ALL))), - new SearchResult(303, LocaleController.getString("SetColor", R.string.SetColor), null, LocaleController.getString("ChatSettings", R.string.ChatSettings), LocaleController.getString("ChatBackground", R.string.ChatBackground), R.drawable.msg_msgbubble3, () -> presentFragment(new WallpapersListActivity(WallpapersListActivity.TYPE_COLOR))), - new SearchResult(304, LocaleController.getString("ResetChatBackgrounds", R.string.ResetChatBackgrounds), "resetRow", LocaleController.getString("ChatSettings", R.string.ChatSettings), LocaleController.getString("ChatBackground", R.string.ChatBackground), R.drawable.msg_msgbubble3, () -> presentFragment(new WallpapersListActivity(WallpapersListActivity.TYPE_ALL))), - new SearchResult(306, LocaleController.getString("ColorTheme", R.string.ColorTheme), "themeHeaderRow", LocaleController.getString("ChatSettings", R.string.ChatSettings), R.drawable.msg_msgbubble3, () -> presentFragment(new ThemeActivity(ThemeActivity.THEME_TYPE_BASIC))), - new SearchResult(319, LocaleController.getString(R.string.BrowseThemes), null, LocaleController.getString(R.string.ChatSettings), R.drawable.msg_msgbubble3, () -> presentFragment(new ThemeActivity(ThemeActivity.THEME_TYPE_THEMES_BROWSER))), - new SearchResult(320, LocaleController.getString(R.string.CreateNewTheme), "createNewThemeRow", LocaleController.getString(R.string.ChatSettings), LocaleController.getString(R.string.BrowseThemes), R.drawable.msg_msgbubble3, () -> presentFragment(new ThemeActivity(ThemeActivity.THEME_TYPE_THEMES_BROWSER))), - new SearchResult(321, LocaleController.getString(R.string.BubbleRadius), "bubbleRadiusHeaderRow", LocaleController.getString("ChatSettings", R.string.ChatSettings), R.drawable.msg_msgbubble3, () -> presentFragment(new ThemeActivity(ThemeActivity.THEME_TYPE_BASIC))), - new SearchResult(322, LocaleController.getString(R.string.ChatList), "chatListHeaderRow", LocaleController.getString("ChatSettings", R.string.ChatSettings), R.drawable.msg_msgbubble3, () -> presentFragment(new ThemeActivity(ThemeActivity.THEME_TYPE_BASIC))), - new SearchResult(323, LocaleController.getString(R.string.ChatListSwipeGesture), "swipeGestureHeaderRow", LocaleController.getString("ChatSettings", R.string.ChatSettings), R.drawable.msg_msgbubble3, () -> presentFragment(new ThemeActivity(ThemeActivity.THEME_TYPE_BASIC))), - new SearchResult(324, LocaleController.getString(R.string.AppIcon), "appIconHeaderRow", LocaleController.getString("ChatSettings", R.string.ChatSettings), R.drawable.msg_msgbubble3, () -> presentFragment(new ThemeActivity(ThemeActivity.THEME_TYPE_BASIC))), - new SearchResult(305, LocaleController.getString("AutoNightTheme", R.string.AutoNightTheme), LocaleController.getString("ChatSettings", R.string.ChatSettings), R.drawable.msg_msgbubble3, () -> presentFragment(new ThemeActivity(ThemeActivity.THEME_TYPE_NIGHT))), - new SearchResult(307, LocaleController.getString("ChromeCustomTabs", R.string.ChromeCustomTabs), "customTabsRow", LocaleController.getString("ChatSettings", R.string.ChatSettings), R.drawable.msg_msgbubble3, () -> presentFragment(new ThemeActivity(ThemeActivity.THEME_TYPE_BASIC))), - new SearchResult(308, LocaleController.getString("DirectShare", R.string.DirectShare), "directShareRow", LocaleController.getString("ChatSettings", R.string.ChatSettings), R.drawable.msg_msgbubble3, () -> presentFragment(new ThemeActivity(ThemeActivity.THEME_TYPE_BASIC))), - new SearchResult(309, LocaleController.getString("EnableAnimations", R.string.EnableAnimations), "enableAnimationsRow", LocaleController.getString("ChatSettings", R.string.ChatSettings), R.drawable.msg_msgbubble3, () -> presentFragment(new ThemeActivity(ThemeActivity.THEME_TYPE_BASIC))), - new SearchResult(310, LocaleController.getString("RaiseToSpeak", R.string.RaiseToSpeak), "raiseToSpeakRow", LocaleController.getString("ChatSettings", R.string.ChatSettings), R.drawable.msg_msgbubble3, () -> presentFragment(new ThemeActivity(ThemeActivity.THEME_TYPE_BASIC))), - new SearchResult(325, LocaleController.getString(R.string.MicrophoneForVoiceMessages), "bluetoothScoRow", LocaleController.getString("ChatSettings", R.string.ChatSettings), R.drawable.msg_msgbubble3, () -> presentFragment(new ThemeActivity(ThemeActivity.THEME_TYPE_BASIC))), - new SearchResult(311, LocaleController.getString("SendByEnter", R.string.SendByEnter), "sendByEnterRow", LocaleController.getString("ChatSettings", R.string.ChatSettings), R.drawable.msg_msgbubble3, () -> presentFragment(new ThemeActivity(ThemeActivity.THEME_TYPE_BASIC))), - SharedConfig.canBlurChat() ? new SearchResult(326, LocaleController.getString(R.string.BlurInChat), "chatBlurRow", LocaleController.getString("ChatSettings", R.string.ChatSettings), R.drawable.msg_msgbubble3, () -> presentFragment(new ThemeActivity(ThemeActivity.THEME_TYPE_BASIC))) : null, - new SearchResult(318, LocaleController.getString("DistanceUnits", R.string.DistanceUnits), "distanceRow", LocaleController.getString("ChatSettings", R.string.ChatSettings), R.drawable.msg_msgbubble3, () -> presentFragment(new ThemeActivity(ThemeActivity.THEME_TYPE_BASIC))), + BulletinFactory.of(ProfileActivity.this).createRestrictVoiceMessagesPremiumBulletin().show(); + return; + } + presentFragment(new PrivacyControlActivity(ContactsController.PRIVACY_RULES_TYPE_VOICE_MESSAGES, true)); + }), + new SearchResult(108, LocaleController.getString("Passcode", R.string.Passcode), LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg2_secret, () -> presentFragment(PasscodeActivity.determineOpenFragment())), + new SearchResult(109, LocaleController.getString("TwoStepVerification", R.string.TwoStepVerification), LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg2_secret, () -> presentFragment(new TwoStepVerificationActivity())), + new SearchResult(110, LocaleController.getString("SessionsTitle", R.string.SessionsTitle), R.drawable.msg2_secret, () -> presentFragment(new SessionsActivity(0))), + getMessagesController().autoarchiveAvailable ? new SearchResult(121, LocaleController.getString("ArchiveAndMute", R.string.ArchiveAndMute), "newChatsRow", LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg2_secret, () -> presentFragment(new PrivacySettingsActivity())) : null, + new SearchResult(112, LocaleController.getString("DeleteAccountIfAwayFor2", R.string.DeleteAccountIfAwayFor2), "deleteAccountRow", LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg2_secret, () -> presentFragment(new PrivacySettingsActivity())), + new SearchResult(113, LocaleController.getString("PrivacyPaymentsClear", R.string.PrivacyPaymentsClear), "paymentsClearRow", LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg2_secret, () -> presentFragment(new PrivacySettingsActivity())), + new SearchResult(114, LocaleController.getString("WebSessionsTitle", R.string.WebSessionsTitle), LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg2_secret, () -> presentFragment(new SessionsActivity(1))), + new SearchResult(115, LocaleController.getString("SyncContactsDelete", R.string.SyncContactsDelete), "contactsDeleteRow", LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg2_secret, () -> presentFragment(new PrivacySettingsActivity())), + new SearchResult(116, LocaleController.getString("SyncContacts", R.string.SyncContacts), "contactsSyncRow", LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg2_secret, () -> presentFragment(new PrivacySettingsActivity())), + new SearchResult(117, LocaleController.getString("SuggestContacts", R.string.SuggestContacts), "contactsSuggestRow", LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg2_secret, () -> presentFragment(new PrivacySettingsActivity())), + new SearchResult(118, LocaleController.getString("MapPreviewProvider", R.string.MapPreviewProvider), "secretMapRow", LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg2_secret, () -> presentFragment(new PrivacySettingsActivity())), + new SearchResult(119, LocaleController.getString("SecretWebPage", R.string.SecretWebPage), "secretWebpageRow", LocaleController.getString("PrivacySettings", R.string.PrivacySettings), R.drawable.msg2_secret, () -> presentFragment(new PrivacySettingsActivity())), + + new SearchResult(120, LocaleController.getString(R.string.Devices), R.drawable.msg2_devices, () -> presentFragment(new SessionsActivity(0))), + new SearchResult(121, LocaleController.getString(R.string.TerminateAllSessions), "terminateAllSessionsRow", LocaleController.getString(R.string.Devices), R.drawable.msg2_devices, () -> presentFragment(new SessionsActivity(0))), + new SearchResult(122, LocaleController.getString(R.string.LinkDesktopDevice), LocaleController.getString(R.string.Devices), R.drawable.msg2_devices, () -> presentFragment(new SessionsActivity(0).setHighlightLinkDesktopDevice())), + + new SearchResult(200, LocaleController.getString("DataSettings", R.string.DataSettings), R.drawable.msg2_data, () -> presentFragment(new DataSettingsActivity())), + new SearchResult(201, LocaleController.getString("DataUsage", R.string.DataUsage), "usageSectionRow", LocaleController.getString("DataSettings", R.string.DataSettings), R.drawable.msg2_data, () -> presentFragment(new DataSettingsActivity())), + new SearchResult(202, LocaleController.getString("StorageUsage", R.string.StorageUsage), LocaleController.getString("DataSettings", R.string.DataSettings), R.drawable.msg2_data, () -> presentFragment(new CacheControlActivity())), + new SearchResult(203, LocaleController.getString("KeepMedia", R.string.KeepMedia), "keepMediaRow", LocaleController.getString("DataSettings", R.string.DataSettings), LocaleController.getString("StorageUsage", R.string.StorageUsage), R.drawable.msg2_data, () -> presentFragment(new CacheControlActivity())), + new SearchResult(204, LocaleController.getString("ClearMediaCache", R.string.ClearMediaCache), "cacheRow", LocaleController.getString("DataSettings", R.string.DataSettings), LocaleController.getString("StorageUsage", R.string.StorageUsage), R.drawable.msg2_data, () -> presentFragment(new CacheControlActivity())), + new SearchResult(205, LocaleController.getString("LocalDatabase", R.string.LocalDatabase), "databaseRow", LocaleController.getString("DataSettings", R.string.DataSettings), LocaleController.getString("StorageUsage", R.string.StorageUsage), R.drawable.msg2_data, () -> presentFragment(new CacheControlActivity())), + new SearchResult(206, LocaleController.getString("NetworkUsage", R.string.NetworkUsage), LocaleController.getString("DataSettings", R.string.DataSettings), R.drawable.msg2_data, () -> presentFragment(new DataUsage2Activity())), + new SearchResult(207, LocaleController.getString("AutomaticMediaDownload", R.string.AutomaticMediaDownload), "mediaDownloadSectionRow", LocaleController.getString("DataSettings", R.string.DataSettings), R.drawable.msg2_data, () -> presentFragment(new DataSettingsActivity())), + new SearchResult(208, LocaleController.getString("WhenUsingMobileData", R.string.WhenUsingMobileData), LocaleController.getString("DataSettings", R.string.DataSettings), R.drawable.msg2_data, () -> presentFragment(new DataAutoDownloadActivity(0))), + new SearchResult(209, LocaleController.getString("WhenConnectedOnWiFi", R.string.WhenConnectedOnWiFi), LocaleController.getString("DataSettings", R.string.DataSettings), R.drawable.msg2_data, () -> presentFragment(new DataAutoDownloadActivity(1))), + new SearchResult(210, LocaleController.getString("WhenRoaming", R.string.WhenRoaming), LocaleController.getString("DataSettings", R.string.DataSettings), R.drawable.msg2_data, () -> presentFragment(new DataAutoDownloadActivity(2))), + new SearchResult(211, LocaleController.getString("ResetAutomaticMediaDownload", R.string.ResetAutomaticMediaDownload), "resetDownloadRow", LocaleController.getString("DataSettings", R.string.DataSettings), R.drawable.msg2_data, () -> presentFragment(new DataSettingsActivity())), + new SearchResult(212, LocaleController.getString("AutoplayMedia", R.string.AutoplayMedia), "autoplayHeaderRow", LocaleController.getString("DataSettings", R.string.DataSettings), R.drawable.msg2_data, () -> presentFragment(new DataSettingsActivity())), + new SearchResult(213, LocaleController.getString("AutoplayGIF", R.string.AutoplayGIF), "autoplayGifsRow", LocaleController.getString("DataSettings", R.string.DataSettings), R.drawable.msg2_data, () -> presentFragment(new DataSettingsActivity())), + new SearchResult(214, LocaleController.getString("AutoplayVideo", R.string.AutoplayVideo), "autoplayVideoRow", LocaleController.getString("DataSettings", R.string.DataSettings), R.drawable.msg2_data, () -> presentFragment(new DataSettingsActivity())), + new SearchResult(215, LocaleController.getString("Streaming", R.string.Streaming), "streamSectionRow", LocaleController.getString("DataSettings", R.string.DataSettings), R.drawable.msg2_data, () -> presentFragment(new DataSettingsActivity())), + new SearchResult(216, LocaleController.getString("EnableStreaming", R.string.EnableStreaming), "enableStreamRow", LocaleController.getString("DataSettings", R.string.DataSettings), R.drawable.msg2_data, () -> presentFragment(new DataSettingsActivity())), + new SearchResult(217, LocaleController.getString("Calls", R.string.Calls), "callsSectionRow", LocaleController.getString("DataSettings", R.string.DataSettings), R.drawable.msg2_data, () -> presentFragment(new DataSettingsActivity())), + new SearchResult(218, LocaleController.getString("VoipUseLessData", R.string.VoipUseLessData), "useLessDataForCallsRow", LocaleController.getString("DataSettings", R.string.DataSettings), R.drawable.msg2_data, () -> presentFragment(new DataSettingsActivity())), + new SearchResult(219, LocaleController.getString("VoipQuickReplies", R.string.VoipQuickReplies), "quickRepliesRow", LocaleController.getString("DataSettings", R.string.DataSettings), R.drawable.msg2_data, () -> presentFragment(new DataSettingsActivity())), + new SearchResult(220, LocaleController.getString("ProxySettings", R.string.ProxySettings), LocaleController.getString("DataSettings", R.string.DataSettings), R.drawable.msg2_data, () -> presentFragment(new ProxyListActivity())), + new SearchResult(221, LocaleController.getString("UseProxyForCalls", R.string.UseProxyForCalls), "callsRow", LocaleController.getString("DataSettings", R.string.DataSettings), LocaleController.getString("ProxySettings", R.string.ProxySettings), R.drawable.msg2_data, () -> presentFragment(new ProxyListActivity())), + new SearchResult(111, LocaleController.getString("PrivacyDeleteCloudDrafts", R.string.PrivacyDeleteCloudDrafts), "clearDraftsRow", LocaleController.getString("DataSettings", R.string.DataSettings), R.drawable.msg2_data, () -> presentFragment(new DataSettingsActivity())), + new SearchResult(222, LocaleController.getString(R.string.SaveToGallery), "saveToGallerySectionRow", LocaleController.getString(R.string.DataSettings), R.drawable.msg2_data, () -> presentFragment(new DataSettingsActivity())), + new SearchResult(223, LocaleController.getString(R.string.SaveToGalleryPrivate), "saveToGalleryPeerRow", LocaleController.getString(R.string.DataSettings), LocaleController.getString(R.string.SaveToGallery), R.drawable.msg2_data, () -> presentFragment(new DataSettingsActivity())), + new SearchResult(224, LocaleController.getString(R.string.SaveToGalleryGroups), "saveToGalleryGroupsRow", LocaleController.getString(R.string.DataSettings), LocaleController.getString(R.string.SaveToGallery), R.drawable.msg2_data, () -> presentFragment(new DataSettingsActivity())), + new SearchResult(225, LocaleController.getString(R.string.SaveToGalleryChannels), "saveToGalleryChannelsRow", LocaleController.getString(R.string.DataSettings), LocaleController.getString(R.string.SaveToGallery), R.drawable.msg2_data, () -> presentFragment(new DataSettingsActivity())), + + new SearchResult(300, LocaleController.getString("ChatSettings", R.string.ChatSettings), R.drawable.msg2_discussion, () -> presentFragment(new ThemeActivity(ThemeActivity.THEME_TYPE_BASIC))), + new SearchResult(301, LocaleController.getString("TextSizeHeader", R.string.TextSizeHeader), "textSizeHeaderRow", LocaleController.getString("ChatSettings", R.string.ChatSettings), R.drawable.msg2_discussion, () -> presentFragment(new ThemeActivity(ThemeActivity.THEME_TYPE_BASIC))), + new SearchResult(302, LocaleController.getString(R.string.ChangeChatBackground), LocaleController.getString("ChatSettings", R.string.ChatSettings), R.drawable.msg2_discussion, () -> presentFragment(new WallpapersListActivity(WallpapersListActivity.TYPE_ALL))), + new SearchResult(303, LocaleController.getString("SetColor", R.string.SetColor), null, LocaleController.getString("ChatSettings", R.string.ChatSettings), LocaleController.getString("ChatBackground", R.string.ChatBackground), R.drawable.msg2_discussion, () -> presentFragment(new WallpapersListActivity(WallpapersListActivity.TYPE_COLOR))), + new SearchResult(304, LocaleController.getString("ResetChatBackgrounds", R.string.ResetChatBackgrounds), "resetRow", LocaleController.getString("ChatSettings", R.string.ChatSettings), LocaleController.getString("ChatBackground", R.string.ChatBackground), R.drawable.msg2_discussion, () -> presentFragment(new WallpapersListActivity(WallpapersListActivity.TYPE_ALL))), + new SearchResult(306, LocaleController.getString("ColorTheme", R.string.ColorTheme), "themeHeaderRow", LocaleController.getString("ChatSettings", R.string.ChatSettings), R.drawable.msg2_discussion, () -> presentFragment(new ThemeActivity(ThemeActivity.THEME_TYPE_BASIC))), + new SearchResult(319, LocaleController.getString(R.string.BrowseThemes), null, LocaleController.getString(R.string.ChatSettings), R.drawable.msg2_discussion, () -> presentFragment(new ThemeActivity(ThemeActivity.THEME_TYPE_THEMES_BROWSER))), + new SearchResult(320, LocaleController.getString(R.string.CreateNewTheme), "createNewThemeRow", LocaleController.getString(R.string.ChatSettings), LocaleController.getString(R.string.BrowseThemes), R.drawable.msg2_discussion, () -> presentFragment(new ThemeActivity(ThemeActivity.THEME_TYPE_THEMES_BROWSER))), + new SearchResult(321, LocaleController.getString(R.string.BubbleRadius), "bubbleRadiusHeaderRow", LocaleController.getString("ChatSettings", R.string.ChatSettings), R.drawable.msg2_discussion, () -> presentFragment(new ThemeActivity(ThemeActivity.THEME_TYPE_BASIC))), + new SearchResult(322, LocaleController.getString(R.string.ChatList), "chatListHeaderRow", LocaleController.getString("ChatSettings", R.string.ChatSettings), R.drawable.msg2_discussion, () -> presentFragment(new ThemeActivity(ThemeActivity.THEME_TYPE_BASIC))), + new SearchResult(323, LocaleController.getString(R.string.ChatListSwipeGesture), "swipeGestureHeaderRow", LocaleController.getString("ChatSettings", R.string.ChatSettings), R.drawable.msg2_discussion, () -> presentFragment(new ThemeActivity(ThemeActivity.THEME_TYPE_BASIC))), + new SearchResult(324, LocaleController.getString(R.string.AppIcon), "appIconHeaderRow", LocaleController.getString("ChatSettings", R.string.ChatSettings), R.drawable.msg2_discussion, () -> presentFragment(new ThemeActivity(ThemeActivity.THEME_TYPE_BASIC))), + new SearchResult(305, LocaleController.getString("AutoNightTheme", R.string.AutoNightTheme), LocaleController.getString("ChatSettings", R.string.ChatSettings), R.drawable.msg2_discussion, () -> presentFragment(new ThemeActivity(ThemeActivity.THEME_TYPE_NIGHT))), + new SearchResult(307, LocaleController.getString("ChromeCustomTabs", R.string.ChromeCustomTabs), "customTabsRow", LocaleController.getString("ChatSettings", R.string.ChatSettings), R.drawable.msg2_discussion, () -> presentFragment(new ThemeActivity(ThemeActivity.THEME_TYPE_BASIC))), + new SearchResult(308, LocaleController.getString("DirectShare", R.string.DirectShare), "directShareRow", LocaleController.getString("ChatSettings", R.string.ChatSettings), R.drawable.msg2_discussion, () -> presentFragment(new ThemeActivity(ThemeActivity.THEME_TYPE_BASIC))), + new SearchResult(309, LocaleController.getString("EnableAnimations", R.string.EnableAnimations), "enableAnimationsRow", LocaleController.getString("ChatSettings", R.string.ChatSettings), R.drawable.msg2_discussion, () -> presentFragment(new ThemeActivity(ThemeActivity.THEME_TYPE_BASIC))), + new SearchResult(310, LocaleController.getString("RaiseToSpeak", R.string.RaiseToSpeak), "raiseToSpeakRow", LocaleController.getString("ChatSettings", R.string.ChatSettings), R.drawable.msg2_discussion, () -> presentFragment(new ThemeActivity(ThemeActivity.THEME_TYPE_BASIC))), + new SearchResult(325, LocaleController.getString(R.string.MicrophoneForVoiceMessages), "bluetoothScoRow", LocaleController.getString("ChatSettings", R.string.ChatSettings), R.drawable.msg2_discussion, () -> presentFragment(new ThemeActivity(ThemeActivity.THEME_TYPE_BASIC))), + new SearchResult(311, LocaleController.getString("SendByEnter", R.string.SendByEnter), "sendByEnterRow", LocaleController.getString("ChatSettings", R.string.ChatSettings), R.drawable.msg2_discussion, () -> presentFragment(new ThemeActivity(ThemeActivity.THEME_TYPE_BASIC))), + SharedConfig.canBlurChat() ? new SearchResult(326, LocaleController.getString(R.string.BlurInChat), "chatBlurRow", LocaleController.getString("ChatSettings", R.string.ChatSettings), R.drawable.msg2_discussion, () -> presentFragment(new ThemeActivity(ThemeActivity.THEME_TYPE_BASIC))) : null, + new SearchResult(318, LocaleController.getString("DistanceUnits", R.string.DistanceUnits), "distanceRow", LocaleController.getString("ChatSettings", R.string.ChatSettings), R.drawable.msg2_discussion, () -> presentFragment(new ThemeActivity(ThemeActivity.THEME_TYPE_BASIC))), + + new SearchResult(600, LocaleController.getString(R.string.StickersName), R.drawable.msg2_sticker, () -> presentFragment(new StickersActivity(MediaDataController.TYPE_IMAGE, null))), + new SearchResult(601, LocaleController.getString("SuggestStickers", R.string.SuggestStickers), "suggestRow", LocaleController.getString(R.string.StickersName), R.drawable.msg2_sticker, () -> presentFragment(new StickersActivity(MediaDataController.TYPE_IMAGE, null))), + new SearchResult(602, LocaleController.getString("FeaturedStickers", R.string.FeaturedStickers), "featuredStickersHeaderRow", LocaleController.getString(R.string.StickersName), R.drawable.msg2_sticker, () -> presentFragment(new StickersActivity(MediaDataController.TYPE_IMAGE, null))), + new SearchResult(603, LocaleController.getString("Masks", R.string.Masks), null, LocaleController.getString(R.string.StickersName), R.drawable.msg2_sticker, () -> presentFragment(new StickersActivity(MediaDataController.TYPE_MASK, null))), + new SearchResult(604, LocaleController.getString("ArchivedStickers", R.string.ArchivedStickers), null, LocaleController.getString(R.string.StickersName), R.drawable.msg2_sticker, () -> presentFragment(new ArchivedStickersActivity(MediaDataController.TYPE_IMAGE))), + new SearchResult(605, LocaleController.getString("ArchivedMasks", R.string.ArchivedMasks), null, LocaleController.getString(R.string.StickersName), R.drawable.msg2_sticker, () -> presentFragment(new ArchivedStickersActivity(MediaDataController.TYPE_MASK))), + new SearchResult(606, LocaleController.getString(R.string.LargeEmoji), "largeEmojiRow", LocaleController.getString(R.string.StickersName), R.drawable.msg2_sticker, () -> presentFragment(new StickersActivity(MediaDataController.TYPE_IMAGE, null))), + new SearchResult(607, LocaleController.getString(R.string.LoopAnimatedStickers), "loopRow", LocaleController.getString(R.string.StickersName), R.drawable.msg2_sticker, () -> presentFragment(new StickersActivity(MediaDataController.TYPE_IMAGE, null))), + new SearchResult(608, LocaleController.getString(R.string.Emoji), null, LocaleController.getString(R.string.StickersName), R.drawable.input_smile, () -> presentFragment(new StickersActivity(MediaDataController.TYPE_EMOJIPACKS, null))), + new SearchResult(609, LocaleController.getString(R.string.SuggestAnimatedEmoji), "suggestAnimatedEmojiRow", LocaleController.getString(R.string.StickersName), LocaleController.getString(R.string.Emoji), R.drawable.input_smile, () -> presentFragment(new StickersActivity(MediaDataController.TYPE_EMOJIPACKS, null))), + new SearchResult(610, LocaleController.getString(R.string.FeaturedEmojiPacks), "featuredStickersHeaderRow", LocaleController.getString(R.string.StickersName), LocaleController.getString(R.string.Emoji), R.drawable.input_smile, () -> presentFragment(new StickersActivity(MediaDataController.TYPE_EMOJIPACKS, null))), + new SearchResult(611, LocaleController.getString(R.string.DoubleTapSetting), null, LocaleController.getString(R.string.StickersName), R.drawable.msg2_sticker, () -> presentFragment(new ReactionsDoubleTapManageActivity())), + + new SearchResult(700, LocaleController.getString(R.string.Filters), null, R.drawable.msg2_folder, () -> presentFragment(new FiltersSetupActivity())), + new SearchResult(701, LocaleController.getString(R.string.CreateNewFilter), "createFilterRow", LocaleController.getString(R.string.Filters), R.drawable.msg2_folder, () -> presentFragment(new FiltersSetupActivity())), new SearchResult(600, LocaleController.getString(R.string.StickersName), R.drawable.msg_sticker, () -> presentFragment(new StickersActivity(MediaDataController.TYPE_IMAGE, null))), new SearchResult(601, LocaleController.getString("SuggestStickers", R.string.SuggestStickers), "suggestRow", LocaleController.getString(R.string.StickersName), R.drawable.msg_sticker, () -> presentFragment(new StickersActivity(MediaDataController.TYPE_IMAGE, null))), @@ -10003,31 +10394,13 @@ private SearchResult[] onCreateSearchArray() { new SearchResult(610, LocaleController.getString(R.string.FeaturedEmojiPacks), "featuredStickersHeaderRow", LocaleController.getString(R.string.StickersName), LocaleController.getString(R.string.Emoji), R.drawable.input_smile, () -> presentFragment(new StickersActivity(MediaDataController.TYPE_EMOJIPACKS, null))), new SearchResult(611, LocaleController.getString(R.string.DoubleTapSetting), null, LocaleController.getString(R.string.StickersName), R.drawable.msg_sticker, () -> presentFragment(new ReactionsDoubleTapManageActivity())), - new SearchResult(700, LocaleController.getString(R.string.Filters), null, R.drawable.msg_folder, () -> presentFragment(new FiltersSetupActivity())), - new SearchResult(701, LocaleController.getString(R.string.CreateNewFilter), "createFilterRow", LocaleController.getString(R.string.Filters), R.drawable.msg_folder, () -> presentFragment(new FiltersSetupActivity())), - - isPremiumFeatureAvailable(-1) ? new SearchResult(800, LocaleController.getString(R.string.TelegramPremium), R.drawable.msg_settings_premium, () -> presentFragment(new PremiumPreviewFragment("settings"))) : null, - isPremiumFeatureAvailable(PremiumPreviewFragment.PREMIUM_FEATURE_LIMITS) ? new SearchResult(801, LocaleController.getString(R.string.PremiumPreviewLimits), LocaleController.getString(R.string.TelegramPremium), R.drawable.msg_settings_premium, () -> showDialog(new PremiumFeatureBottomSheet(ProfileActivity.this, PremiumPreviewFragment.PREMIUM_FEATURE_LIMITS, false).setForceAbout())) : null, - isPremiumFeatureAvailable(PremiumPreviewFragment.PREMIUM_FEATURE_ANIMATED_EMOJI) ? new SearchResult(802, LocaleController.getString(R.string.PremiumPreviewEmoji), LocaleController.getString(R.string.TelegramPremium), R.drawable.msg_settings_premium, () -> showDialog(new PremiumFeatureBottomSheet(ProfileActivity.this, PremiumPreviewFragment.PREMIUM_FEATURE_ANIMATED_EMOJI, false).setForceAbout())) : null, - isPremiumFeatureAvailable(PremiumPreviewFragment.PREMIUM_FEATURE_UPLOAD_LIMIT) ? new SearchResult(803, LocaleController.getString(R.string.PremiumPreviewUploads), LocaleController.getString(R.string.TelegramPremium), R.drawable.msg_settings_premium, () -> showDialog(new PremiumFeatureBottomSheet(ProfileActivity.this, PremiumPreviewFragment.PREMIUM_FEATURE_UPLOAD_LIMIT, false).setForceAbout())) : null, - isPremiumFeatureAvailable(PremiumPreviewFragment.PREMIUM_FEATURE_DOWNLOAD_SPEED) ? new SearchResult(804, LocaleController.getString(R.string.PremiumPreviewDownloadSpeed), LocaleController.getString(R.string.TelegramPremium), R.drawable.msg_settings_premium, () -> showDialog(new PremiumFeatureBottomSheet(ProfileActivity.this, PremiumPreviewFragment.PREMIUM_FEATURE_DOWNLOAD_SPEED, false).setForceAbout())) : null, - isPremiumFeatureAvailable(PremiumPreviewFragment.PREMIUM_FEATURE_VOICE_TO_TEXT) ? new SearchResult(805, LocaleController.getString(R.string.PremiumPreviewVoiceToText), LocaleController.getString(R.string.TelegramPremium), R.drawable.msg_settings_premium, () -> showDialog(new PremiumFeatureBottomSheet(ProfileActivity.this, PremiumPreviewFragment.PREMIUM_FEATURE_VOICE_TO_TEXT, false).setForceAbout())) : null, - isPremiumFeatureAvailable(PremiumPreviewFragment.PREMIUM_FEATURE_ADS) ? new SearchResult(806, LocaleController.getString(R.string.PremiumPreviewNoAds), LocaleController.getString(R.string.TelegramPremium), R.drawable.msg_settings_premium, () -> showDialog(new PremiumFeatureBottomSheet(ProfileActivity.this, PremiumPreviewFragment.PREMIUM_FEATURE_ADS, false).setForceAbout())) : null, - isPremiumFeatureAvailable(PremiumPreviewFragment.PREMIUM_FEATURE_REACTIONS) ? new SearchResult(807, LocaleController.getString(R.string.PremiumPreviewReactions), LocaleController.getString(R.string.TelegramPremium), R.drawable.msg_settings_premium, () -> showDialog(new PremiumFeatureBottomSheet(ProfileActivity.this, PremiumPreviewFragment.PREMIUM_FEATURE_REACTIONS, false).setForceAbout())) : null, - isPremiumFeatureAvailable(PremiumPreviewFragment.PREMIUM_FEATURE_STICKERS) ? new SearchResult(808, LocaleController.getString(R.string.PremiumPreviewStickers), LocaleController.getString(R.string.TelegramPremium), R.drawable.msg_settings_premium, () -> showDialog(new PremiumFeatureBottomSheet(ProfileActivity.this, PremiumPreviewFragment.PREMIUM_FEATURE_STICKERS, false).setForceAbout())) : null, - isPremiumFeatureAvailable(PremiumPreviewFragment.PREMIUM_FEATURE_ADVANCED_CHAT_MANAGEMENT) ? new SearchResult(809, LocaleController.getString(R.string.PremiumPreviewAdvancedChatManagement), LocaleController.getString(R.string.TelegramPremium), R.drawable.msg_settings_premium, () -> showDialog(new PremiumFeatureBottomSheet(ProfileActivity.this, PremiumPreviewFragment.PREMIUM_FEATURE_ADVANCED_CHAT_MANAGEMENT, false).setForceAbout())) : null, - isPremiumFeatureAvailable(PremiumPreviewFragment.PREMIUM_FEATURE_PROFILE_BADGE) ? new SearchResult(810, LocaleController.getString(R.string.PremiumPreviewProfileBadge), LocaleController.getString(R.string.TelegramPremium), R.drawable.msg_settings_premium, () -> showDialog(new PremiumFeatureBottomSheet(ProfileActivity.this, PremiumPreviewFragment.PREMIUM_FEATURE_PROFILE_BADGE, false).setForceAbout())) : null, - isPremiumFeatureAvailable(PremiumPreviewFragment.PREMIUM_FEATURE_ANIMATED_AVATARS) ? new SearchResult(811, LocaleController.getString(R.string.PremiumPreviewAnimatedProfiles), LocaleController.getString(R.string.TelegramPremium), R.drawable.msg_settings_premium, () -> showDialog(new PremiumFeatureBottomSheet(ProfileActivity.this, PremiumPreviewFragment.PREMIUM_FEATURE_ANIMATED_AVATARS, false).setForceAbout())) : null, - isPremiumFeatureAvailable(PremiumPreviewFragment.PREMIUM_FEATURE_APPLICATION_ICONS) ? new SearchResult(812, LocaleController.getString(R.string.PremiumPreviewAppIcon), LocaleController.getString(R.string.TelegramPremium), R.drawable.msg_settings_premium, () -> showDialog(new PremiumFeatureBottomSheet(ProfileActivity.this, PremiumPreviewFragment.PREMIUM_FEATURE_APPLICATION_ICONS, false).setForceAbout())) : null, - isPremiumFeatureAvailable(PremiumPreviewFragment.PREMIUM_FEATURE_EMOJI_STATUS) ? new SearchResult(813, LocaleController.getString(R.string.PremiumPreviewEmojiStatus), LocaleController.getString(R.string.TelegramPremium), R.drawable.msg_settings_premium, () -> showDialog(new PremiumFeatureBottomSheet(ProfileActivity.this, PremiumPreviewFragment.PREMIUM_FEATURE_EMOJI_STATUS, false).setForceAbout())) : null, - - new SearchResult(400, LocaleController.getString("Language", R.string.Language), R.drawable.msg_language, () -> presentFragment(new LanguageSelectActivity())), - new SearchResult(405, LocaleController.getString(R.string.ShowTranslateButton), LocaleController.getString(R.string.Language), R.drawable.msg_language, () -> presentFragment(new LanguageSelectActivity())), - MessagesController.getGlobalMainSettings().getBoolean("translate_button", false) ? new SearchResult(406, LocaleController.getString(R.string.DoNotTranslate), LocaleController.getString(R.string.Language), R.drawable.msg_language, () -> presentFragment(new LanguageSelectActivity())) : null, - - new SearchResult(402, LocaleController.getString("AskAQuestion", R.string.AskAQuestion), LocaleController.getString("SettingsHelp", R.string.SettingsHelp), R.drawable.msg_help, () -> showDialog(AlertsCreator.createSupportAlert(ProfileActivity.this, null))), - new SearchResult(403, LocaleController.getString("TelegramFAQ", R.string.TelegramFAQ), LocaleController.getString("SettingsHelp", R.string.SettingsHelp), R.drawable.msg_help, () -> Browser.openUrl(getParentActivity(), LocaleController.getString("TelegramFaqUrl", R.string.TelegramFaqUrl))), - new SearchResult(404, LocaleController.getString("PrivacyPolicy", R.string.PrivacyPolicy), LocaleController.getString("SettingsHelp", R.string.SettingsHelp), R.drawable.msg_help, () -> Browser.openUrl(getParentActivity(), LocaleController.getString("PrivacyPolicyUrl", R.string.PrivacyPolicyUrl))), + new SearchResult(400, LocaleController.getString("Language", R.string.Language), R.drawable.msg2_language, () -> presentFragment(new LanguageSelectActivity())), + new SearchResult(405, LocaleController.getString(R.string.ShowTranslateButton), LocaleController.getString(R.string.Language), R.drawable.msg2_language, () -> presentFragment(new LanguageSelectActivity())), + MessagesController.getInstance(currentAccount).getTranslateController().isContextTranslateEnabled() ? new SearchResult(406, LocaleController.getString(R.string.DoNotTranslate), LocaleController.getString(R.string.Language), R.drawable.msg2_language, () -> presentFragment(new LanguageSelectActivity())) : null, + + new SearchResult(402, LocaleController.getString("AskAQuestion", R.string.AskAQuestion), LocaleController.getString("SettingsHelp", R.string.SettingsHelp), R.drawable.msg2_help, () -> showDialog(AlertsCreator.createSupportAlert(ProfileActivity.this, null))), + new SearchResult(403, LocaleController.getString("TelegramFAQ", R.string.TelegramFAQ), LocaleController.getString("SettingsHelp", R.string.SettingsHelp), R.drawable.msg2_help, () -> Browser.openUrl(getParentActivity(), LocaleController.getString("TelegramFaqUrl", R.string.TelegramFaqUrl))), + new SearchResult(404, LocaleController.getString("PrivacyPolicy", R.string.PrivacyPolicy), LocaleController.getString("SettingsHelp", R.string.SettingsHelp), R.drawable.msg2_help, () -> Browser.openUrl(getParentActivity(), LocaleController.getString("PrivacyPolicyUrl", R.string.PrivacyPolicyUrl))), }; } @@ -10774,6 +11147,7 @@ public void fillPositions(SparseIntArray sparseIntArray) { put(++pointer, premiumSectionsRow, sparseIntArray); put(++pointer, privacyRow, sparseIntArray); put(++pointer, dataRow, sparseIntArray); + put(++pointer, liteModeRow, sparseIntArray); put(++pointer, chatRow, sparseIntArray); put(++pointer, filtersRow, sparseIntArray); put(++pointer, stickersRow, sparseIntArray); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ProfileNotificationsActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ProfileNotificationsActivity.java index ee7e880dcf..9ea76815f5 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ProfileNotificationsActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ProfileNotificationsActivity.java @@ -381,7 +381,7 @@ public boolean supportsPredictiveItemAnimations() { showDialog(dialog); TextView button = (TextView) dialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } else if (position == soundRow) { Bundle bundle = new Bundle(); @@ -899,6 +899,11 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { } break; } + case VIEW_TYPE_SHADOW: { + ShadowSectionCell shadowCell = (ShadowSectionCell) holder.itemView; + shadowCell.setTopBottom(position > 0, position < getItemCount() - 1); + break; + } } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ProxyListActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ProxyListActivity.java index 2683857370..5b71434d0b 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ProxyListActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ProxyListActivity.java @@ -10,11 +10,12 @@ import android.Manifest; import android.annotation.SuppressLint; -import android.app.Dialog; -import android.content.Context; import android.animation.Animator; import android.animation.AnimatorListenerAdapter; import android.animation.ValueAnimator; +import android.app.Dialog; +import android.content.Context; +import android.content.Intent; import android.content.DialogInterface; import android.content.Intent; import android.content.SharedPreferences; @@ -40,6 +41,7 @@ import androidx.core.content.FileProvider; import androidx.recyclerview.widget.DefaultItemAnimator; import androidx.recyclerview.widget.LinearLayoutManager; +import androidx.recyclerview.widget.ListAdapter; import androidx.recyclerview.widget.RecyclerView; import com.v2ray.ang.V2RayConfig; @@ -61,6 +63,7 @@ import org.telegram.messenger.LocaleController; import org.telegram.messenger.MessagesController; import org.telegram.messenger.NotificationCenter; +import org.telegram.messenger.ProxyRotationController; import org.telegram.messenger.R; import org.telegram.messenger.SendMessagesHelper; import org.telegram.messenger.SharedConfig; @@ -84,16 +87,17 @@ import org.telegram.ui.Components.LayoutHelper; import org.telegram.ui.Components.NumberTextView; import org.telegram.ui.Components.RecyclerListView; +import org.telegram.ui.Components.SlideChooseView; import org.telegram.ui.Components.URLSpanNoUnderline; import java.io.File; import java.io.IOException; import java.util.ArrayList; -import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.LinkedList; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.TreeSet; @@ -124,6 +128,7 @@ import tw.nekomimi.nekogram.NekoConfig; public class ProxyListActivity extends BaseFragment implements NotificationCenter.NotificationCenterDelegate { + private final static boolean IS_PROXY_ROTATION_AVAILABLE = true; private static final int MENU_DELETE = 0; private static final int MENU_SHARE = 1; @@ -140,13 +145,27 @@ public class ProxyListActivity extends BaseFragment implements NotificationCente private int rowCount; private int useProxyRow; private int enablePublicProxyRow; - private int useProxyDetailRow; + private int useProxyShadowRow; private int connectionsHeaderRow; private int proxyStartRow; private int proxyEndRow; - private int proxyDetailRow; + private int proxyAddRow; + private int proxyShadowRow; private int callsRow; + private int rotationRow; + private int rotationTimeoutRow; + private int rotationTimeoutInfoRow; private int callsDetailRow; + private int deleteAllRow; + + private ItemTouchHelper itemTouchHelper; + private NumberTextView selectedCountTextView; + private ActionBarMenuItem shareMenuItem; + private ActionBarMenuItem deleteMenuItem; + + private List selectedItems = new ArrayList<>(); + private List proxyList = new ArrayList<>(); + private boolean wasCheckedAllList; private ActionBarMenuItem otherItem; @@ -154,9 +173,14 @@ public class TextDetailProxyCell extends FrameLayout { private TextView textView; private TextView valueTextView; + private ImageView checkImageView; private SharedConfig.ProxyInfo currentInfo; private Drawable checkDrawable; + private CheckBox2 checkBox; + private boolean isSelected; + private boolean isSelectionEnabled; + private int color; private Pattern urlPattern; @@ -184,6 +208,41 @@ public TextDetailProxyCell(Context context) { valueTextView.setPadding(0, 0, 0, 0); addView(valueTextView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.TOP, (LocaleController.isRTL ? 56 : 21), 35, (LocaleController.isRTL ? 21 : 56), 0)); + checkImageView = new ImageView(context); + checkImageView.setImageResource(R.drawable.msg_info); + checkImageView.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_windowBackgroundWhiteGrayText3), PorterDuff.Mode.MULTIPLY)); + checkImageView.setScaleType(ImageView.ScaleType.CENTER); + checkImageView.setContentDescription(LocaleController.getString("Edit", R.string.Edit)); + addView(checkImageView, LayoutHelper.createFrame(48, 48, (LocaleController.isRTL ? Gravity.LEFT : Gravity.RIGHT) | Gravity.TOP, 8, 8, 8, 0)); + checkImageView.setOnClickListener(v -> { + SharedConfig.ProxyInfo info = currentInfo; + if (info instanceof SharedConfig.VmessProxy) { + if (((SharedConfig.VmessProxy) info).bean.getConfigType() == V2RayConfig.EConfigType.Trojan) { + presentFragment(new TrojanSettingsActivity((SharedConfig.VmessProxy) info)); + } else { + presentFragment(new VmessSettingsActivity((SharedConfig.VmessProxy) info)); + } + } else if (info instanceof SharedConfig.ShadowsocksProxy) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + presentFragment(new ShadowsocksSettingsActivity((SharedConfig.ShadowsocksProxy) info)); + } + } else if (info instanceof SharedConfig.ShadowsocksRProxy) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + presentFragment(new ShadowsocksRSettingsActivity((SharedConfig.ShadowsocksRProxy) info)); + } + } else if (info instanceof SharedConfig.WsProxy) { + presentFragment(new WsSettingsActivity((SharedConfig.WsProxy) info)); + } else { + presentFragment(new ProxySettingsActivity(info)); + } + }); + + checkBox = new CheckBox2(context, 21); + checkBox.setColor(Theme.key_checkbox, Theme.key_radioBackground, Theme.key_checkboxCheck); + checkBox.setDrawBackgroundAsArc(14); + checkBox.setVisibility(GONE); + addView(checkBox, LayoutHelper.createFrame(24, 24, (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.CENTER_VERTICAL, 16, 0, 8, 0)); + setWillNotDraw(false); } @@ -271,6 +330,81 @@ public void updateStatus() { } + public void setSelectionEnabled(boolean enabled, boolean animated) { + if (isSelectionEnabled == enabled && animated) { + return; + } + isSelectionEnabled = enabled; + + float fromX = 0, toX = LocaleController.isRTL ? -AndroidUtilities.dp(32) : AndroidUtilities.dp(32); + if (!animated) { + float x = enabled ? toX : fromX; + textView.setTranslationX(x); + valueTextView.setTranslationX(x); + checkImageView.setTranslationX(x); + checkBox.setTranslationX((LocaleController.isRTL ? AndroidUtilities.dp(32) : -AndroidUtilities.dp(32)) + x); + checkImageView.setVisibility(enabled ? GONE : VISIBLE); + checkImageView.setAlpha(1f); + checkImageView.setScaleX(1f); + checkImageView.setScaleY(1f); + checkBox.setVisibility(enabled ? VISIBLE : GONE); + checkBox.setAlpha(1f); + checkBox.setScaleX(1f); + checkBox.setScaleY(1f); + } else { + ValueAnimator animator = ValueAnimator.ofFloat(enabled ? 0 : 1, enabled ? 1 : 0).setDuration(200); + animator.setInterpolator(CubicBezierInterpolator.DEFAULT); + animator.addUpdateListener(animation -> { + float val = (float) animation.getAnimatedValue(); + float x = AndroidUtilities.lerp(fromX, toX, val); + textView.setTranslationX(x); + valueTextView.setTranslationX(x); + checkImageView.setTranslationX(x); + checkBox.setTranslationX((LocaleController.isRTL ? AndroidUtilities.dp(32) : -AndroidUtilities.dp(32)) + x); + + float scale = 0.5f + val * 0.5f; + checkBox.setScaleX(scale); + checkBox.setScaleY(scale); + checkBox.setAlpha(val); + + scale = 0.5f + (1f - val) * 0.5f; + checkImageView.setScaleX(scale); + checkImageView.setScaleY(scale); + checkImageView.setAlpha(1f - val); + }); + animator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationStart(Animator animation) { + if (enabled) { + checkBox.setAlpha(0f); + checkBox.setVisibility(VISIBLE); + } else { + checkImageView.setAlpha(0f); + checkImageView.setVisibility(VISIBLE); + } + } + + @Override + public void onAnimationEnd(Animator animation) { + if (enabled) { + checkImageView.setVisibility(GONE); + } else { + checkBox.setVisibility(GONE); + } + } + }); + animator.start(); + } + } + + public void setItemSelected(boolean selected, boolean animated) { + if (selected == isSelected && animated) { + return; + } + isSelected = selected; + checkBox.setChecked(selected, animated); + } + public void setChecked(boolean checked) { if (checked) { if (checkDrawable == null) { @@ -314,14 +448,13 @@ public ProxyListActivity(String alert) { this.alert = alert; } - private LinkedList proxyList = SharedConfig.getProxyList(); - @Override public boolean onFragmentCreate() { super.onFragmentCreate(); currentConnectionState = ConnectionsManager.getInstance(currentAccount).getConnectionState(); + NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.proxyChangedByRotation); NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.proxySettingsChanged); NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.proxyCheckDone); NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.didUpdateConnectionState); @@ -338,6 +471,7 @@ public boolean onFragmentCreate() { @Override public void onFragmentDestroy() { super.onFragmentDestroy(); + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.proxyChangedByRotation); NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.proxySettingsChanged); NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.proxyCheckDone); NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.didUpdateConnectionState); @@ -585,7 +719,7 @@ public void startDocumentSelectActivity() { } else if (id == menu_delete_all) { AlertUtil.showConfirm(getParentActivity(), LocaleController.getString("DeleteAllServer", R.string.DeleteAllServer), - R.drawable.msg_delete, LocaleController.getString("Delete", R.string.Delete), + R.drawable.baseline_delete_24, LocaleController.getString("Delete", R.string.Delete), true, () -> { SharedConfig.deleteAllProxy(); updateRows(true); @@ -593,7 +727,7 @@ public void startDocumentSelectActivity() { } else if (id == menu_delete_unavailable) { AlertUtil.showConfirm(getParentActivity(), LocaleController.getString("DeleteUnavailableServer", R.string.DeleteUnavailableServer), - R.drawable.msg_delete, LocaleController.getString("Delete", R.string.Delete), + R.drawable.baseline_delete_24, LocaleController.getString("Delete", R.string.Delete), true, () -> { deleteUnavailableProxy(); }); @@ -622,7 +756,7 @@ public void startDocumentSelectActivity() { } } - CameraScanActivity.showAsSheet(this, false, CameraScanActivity.TYPE_QR, new CameraScanActivity.CameraScanActivityDelegate() { + CameraScanActivity.showAsSheet(this, new CameraScanActivity.CameraScanActivityDelegate() { @Override public void didFindQr(String text) { @@ -701,6 +835,7 @@ protected void dispatchDraw(Canvas canvas) { } useProxySettings = !useProxySettings; + updateRows(true); TextCheckCell textCheckCell = (TextCheckCell) view; textCheckCell.setChecked(useProxySettings); @@ -739,6 +874,14 @@ protected void dispatchDraw(Canvas canvas) { }); }); + updateRows(true); + } else if (position == rotationRow) { + SharedConfig.proxyRotationEnabled = !SharedConfig.proxyRotationEnabled; + TextCheckCell textCheckCell = (TextCheckCell) view; + textCheckCell.setChecked(SharedConfig.proxyRotationEnabled); + SharedConfig.saveConfig(); + + updateRows(true); } else if (position == callsRow) { useProxyForCalls = !useProxyForCalls; TextCheckCell textCheckCell = (TextCheckCell) view; @@ -747,6 +890,10 @@ protected void dispatchDraw(Canvas canvas) { editor.putBoolean("proxy_enabled_calls", useProxyForCalls); editor.apply(); } else if (position >= proxyStartRow && position < proxyEndRow) { + if (!selectedItems.isEmpty()) { + listAdapter.toggleSelected(position); + return; + } SharedConfig.ProxyInfo info = proxyList.get(position - proxyStartRow); useProxySettings = true; SharedConfig.setCurrentProxy(info); @@ -774,11 +921,11 @@ protected void dispatchDraw(Canvas canvas) { }, new int[]{ R.drawable.group_edit, - R.drawable.msg_shareout, - R.drawable.msg_qrcode, - R.drawable.msg_link, - R.drawable.msg_delete, - R.drawable.msg_cancel + R.drawable.baseline_share_24, + R.drawable.wallet_qr, + R.drawable.baseline_link_24, + R.drawable.baseline_delete_24, + R.drawable.baseline_cancel_24 }, (i, text, cell) -> { @@ -811,7 +958,7 @@ protected void dispatchDraw(Canvas canvas) { } else if (i == 4) { AlertUtil.showConfirm(getParentActivity(), LocaleController.getString("DeleteProxy", R.string.DeleteProxy), - R.drawable.msg_delete, LocaleController.getString("Delete", R.string.Delete), + R.drawable.baseline_delete_24, LocaleController.getString("Delete", R.string.Delete), true, () -> { SharedConfig.deleteProxy(info); @@ -836,6 +983,15 @@ protected void dispatchDraw(Canvas canvas) { return fragmentView; } + @Override + public boolean onBackPressed() { + if (!selectedItems.isEmpty()) { + listAdapter.clearSelected(); + return false; + } + return true; + } + @SuppressLint("NewApi") private void addProxy() { BottomBuilder builder = new BottomBuilder(getParentActivity()); @@ -878,7 +1034,7 @@ private void addProxy() { } } - CameraScanActivity.showAsSheet(this, false, CameraScanActivity.TYPE_QR, new CameraScanActivity.CameraScanActivityDelegate() { + CameraScanActivity.showAsSheet(this, new CameraScanActivity.CameraScanActivityDelegate() { @Override public void didFindQr(String text) { @@ -905,28 +1061,78 @@ private void updateRows(boolean notify) { proxyList = SharedConfig.getProxyList(); rowCount = 0; useProxyRow = rowCount++; + if (useProxySettings && SharedConfig.currentProxy != null && SharedConfig.proxyList.size() > 1 && IS_PROXY_ROTATION_AVAILABLE) { + rotationRow = rowCount++; + if (SharedConfig.proxyRotationEnabled) { + rotationTimeoutRow = rowCount++; + rotationTimeoutInfoRow = rowCount++; + } else { + rotationTimeoutRow = -1; + rotationTimeoutInfoRow = -1; + } + } else { + rotationRow = -1; + rotationTimeoutRow = -1; + rotationTimeoutInfoRow = -1; + } + if (rotationTimeoutInfoRow == -1) { + useProxyShadowRow = rowCount++; + } else { + useProxyShadowRow = -1; + } + connectionsHeaderRow = rowCount++; + + if (notify) { + proxyList.clear(); + proxyList.addAll(SharedConfig.proxyList); + + boolean checking = false; + if (!wasCheckedAllList) { + for (SharedConfig.ProxyInfo info : proxyList) { + if (info.checking || info.availableCheckTime == 0) { + checking = true; + break; + } + } + if (!checking) { + wasCheckedAllList = true; + } + } + + boolean isChecking = checking; + Collections.sort(proxyList, (o1, o2) -> { + long bias1 = SharedConfig.currentProxy == o1 ? -200000 : 0; + if (!o1.available) { + bias1 += 100000; + } + long bias2 = SharedConfig.currentProxy == o2 ? -200000 : 0; + if (!o2.available) { + bias2 += 100000; + } + return Long.compare(isChecking && o1 != SharedConfig.currentProxy ? SharedConfig.proxyList.indexOf(o1) * 10000L : o1.ping + bias1, + isChecking && o2 != SharedConfig.currentProxy ? SharedConfig.proxyList.indexOf(o2) * 10000L : o2.ping + bias2); + }); + } + enablePublicProxyRow = rowCount++; if (!proxyList.isEmpty()) { - useProxyDetailRow = rowCount++; - connectionsHeaderRow = rowCount++; proxyStartRow = rowCount; rowCount += proxyList.size(); proxyEndRow = rowCount; } else { - useProxyDetailRow = -1; - connectionsHeaderRow = -1; proxyStartRow = -1; proxyEndRow = -1; } - proxyDetailRow = rowCount++; + proxyAddRow = rowCount++; + proxyShadowRow = rowCount++; if (SharedConfig.currentProxy == null || SharedConfig.currentProxy.secret.isEmpty()) { boolean change = callsRow == -1; callsRow = rowCount++; callsDetailRow = rowCount++; UIUtil.runOnUIThread(() -> { if (!notify && change) { - listAdapter.notifyItemChanged(proxyDetailRow); - listAdapter.notifyItemRangeInserted(proxyDetailRow + 1, 2); + listAdapter.notifyItemChanged(proxyShadowRow); + listAdapter.notifyItemRangeInserted(proxyShadowRow + 1, 2); } }); } else { @@ -935,11 +1141,16 @@ private void updateRows(boolean notify) { callsDetailRow = -1; if (!notify && change) { UIUtil.runOnUIThread(() -> { - listAdapter.notifyItemChanged(proxyDetailRow); - listAdapter.notifyItemRangeRemoved(proxyDetailRow + 1, 2); + listAdapter.notifyItemChanged(proxyShadowRow); + listAdapter.notifyItemRangeRemoved(proxyShadowRow + 1, 2); }); } } + if (proxyList.size() >= 10) { + deleteAllRow = rowCount++; + } else { + deleteAllRow = -1; + } checkProxyList(false); if (notify && listAdapter != null) { UIUtil.runOnUIThread(() -> { @@ -1128,7 +1339,18 @@ public void onResume() { @Override public void didReceivedNotification(int id, int account, Object... args) { - if (id == NotificationCenter.proxySettingsChanged) { + if (id == NotificationCenter.proxyChangedByRotation) { + listView.forAllChild(view -> { + RecyclerView.ViewHolder holder = listView.getChildViewHolder(view); + if (holder.itemView instanceof TextDetailProxyCell) { + TextDetailProxyCell cell = (TextDetailProxyCell) holder.itemView; + cell.setChecked(cell.currentInfo == SharedConfig.currentProxy); + cell.updateStatus(); + } + }); + + updateRows(false); + } else if (id == NotificationCenter.proxySettingsChanged) { updateRows(true); } else if (id == NotificationCenter.didUpdateConnectionState) { int state = ConnectionsManager.getInstance(account).getConnectionState(); @@ -1164,11 +1386,34 @@ public void didReceivedNotification(int id, int account, Object... args) { } } } + + boolean checking = false; + if (!wasCheckedAllList) { + for (SharedConfig.ProxyInfo info : proxyList) { + if (info.checking || info.availableCheckTime == 0) { + checking = true; + break; + } + } + if (!checking) { + wasCheckedAllList = true; + } + } + if (!checking) { + updateRows(true); + } } } } private class ListAdapter extends RecyclerListView.SelectionAdapter { + private final static int VIEW_TYPE_SHADOW = 0, + VIEW_TYPE_TEXT_SETTING = 1, + VIEW_TYPE_HEADER = 2, + VIEW_TYPE_TEXT_CHECK = 3, + VIEW_TYPE_INFO = 4, + VIEW_TYPE_PROXY_DETAIL = 5, + VIEW_TYPE_SLIDE_CHOOSER = 6; public static final int PAYLOAD_CHECKED_CHANGED = 0; public static final int PAYLOAD_SELECTION_CHANGED = 1; @@ -1178,6 +1423,43 @@ private class ListAdapter extends RecyclerListView.SelectionAdapter { public ListAdapter(Context context) { mContext = context; + + setHasStableIds(true); + } + + public void toggleSelected(int position) { + if (position < proxyStartRow || position >= proxyEndRow) { + return; + } + SharedConfig.ProxyInfo info = proxyList.get(position - proxyStartRow); + if (selectedItems.contains(info)) { + selectedItems.remove(info); + } else { + selectedItems.add(info); + } + notifyItemChanged(position, PAYLOAD_SELECTION_CHANGED); + checkActionMode(); + } + + public void clearSelected() { + selectedItems.clear(); + notifyItemRangeChanged(proxyStartRow, proxyEndRow - proxyStartRow, PAYLOAD_SELECTION_CHANGED); + checkActionMode(); + } + + private void checkActionMode() { + int selectedCount = selectedItems.size(); + boolean actionModeShowed = actionBar.isActionModeShowed(); + if (selectedCount > 0) { + selectedCountTextView.setNumber(selectedCount, actionModeShowed); + if (!actionModeShowed) { + actionBar.showActionMode(); + notifyItemRangeChanged(proxyStartRow, proxyEndRow - proxyStartRow, PAYLOAD_SELECTION_MODE_CHANGED); + } + } else if (actionModeShowed) { + actionBar.hideActionMode(); + notifyItemRangeChanged(proxyStartRow, proxyEndRow - proxyStartRow, PAYLOAD_SELECTION_MODE_CHANGED); + } } @Override @@ -1188,41 +1470,57 @@ public int getItemCount() { @Override public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { switch (holder.getItemViewType()) { - case 0: { - if (position == proxyDetailRow && callsRow == -1) { + case VIEW_TYPE_SHADOW: { + if (position == proxyShadowRow && callsRow == -1) { holder.itemView.setBackgroundDrawable(Theme.getThemedDrawable(mContext, R.drawable.greydivider_bottom, Theme.key_windowBackgroundGrayShadow)); } else { holder.itemView.setBackgroundDrawable(Theme.getThemedDrawable(mContext, R.drawable.greydivider, Theme.key_windowBackgroundGrayShadow)); } break; } - case 2: { + case VIEW_TYPE_TEXT_SETTING: { + TextSettingsCell textCell = (TextSettingsCell) holder.itemView; + textCell.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteBlackText)); + if (position == proxyAddRow) { + textCell.setText(LocaleController.getString("AddProxy", R.string.AddProxy), deleteAllRow != -1); + } else if (position == deleteAllRow) { + textCell.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteRedText4)); + textCell.setText(LocaleController.getString(R.string.DeleteAllProxies), false); + } + break; + } + case VIEW_TYPE_HEADER: { HeaderCell headerCell = (HeaderCell) holder.itemView; if (position == connectionsHeaderRow) { headerCell.setText(LocaleController.getString("ProxyConnections", R.string.ProxyConnections)); } break; } - case 3: { + case VIEW_TYPE_TEXT_CHECK: { TextCheckCell checkCell = (TextCheckCell) holder.itemView; if (position == useProxyRow) { - checkCell.setTextAndCheck(LocaleController.getString("UseProxySettings", R.string.UseProxySettings), useProxySettings, true); + checkCell.setTextAndCheck(LocaleController.getString("UseProxySettings", R.string.UseProxySettings), useProxySettings, rotationRow != -1); } else if (position == callsRow) { checkCell.setTextAndCheck(LocaleController.getString("UseProxyForCalls", R.string.UseProxyForCalls), useProxyForCalls, false); } else if (position == enablePublicProxyRow) { checkCell.setTextAndCheck(LocaleController.getString("enablePublicProxy", R.string.enablePublicProxy), NekoConfig.enablePublicProxy.Bool(), false); + } else if (position == rotationRow) { + checkCell.setTextAndCheck(LocaleController.getString(R.string.UseProxyRotation), SharedConfig.proxyRotationEnabled, true); } break; } - case 4: { + case VIEW_TYPE_INFO: { TextInfoPrivacyCell cell = (TextInfoPrivacyCell) holder.itemView; if (position == callsDetailRow) { cell.setText(LocaleController.getString("UseProxyForCallsInfo", R.string.UseProxyForCallsInfo)); - cell.setBackgroundDrawable(Theme.getThemedDrawable(mContext, R.drawable.greydivider_bottom, Theme.key_windowBackgroundGrayShadow)); + cell.setBackground(Theme.getThemedDrawable(mContext, R.drawable.greydivider_bottom, Theme.key_windowBackgroundGrayShadow)); + } else if (position == rotationTimeoutInfoRow) { + cell.setText(LocaleController.getString(R.string.ProxyRotationTimeoutInfo)); + cell.setBackground(Theme.getThemedDrawable(mContext, R.drawable.greydivider_bottom, Theme.key_windowBackgroundGrayShadow)); } break; } - case 5: { + case VIEW_TYPE_PROXY_DETAIL: { TextDetailProxyCell cell = (TextDetailProxyCell) holder.itemView; try { SharedConfig.ProxyInfo info = proxyList.get(position - proxyStartRow); @@ -1232,13 +1530,37 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { } break; } + case VIEW_TYPE_SLIDE_CHOOSER: { + if (position == rotationTimeoutRow) { + SlideChooseView chooseView = (SlideChooseView) holder.itemView; + ArrayList options = new ArrayList<>(ProxyRotationController.ROTATION_TIMEOUTS); + String[] values = new String[options.size()]; + for (int i = 0; i < options.size(); i++) { + values[i] = LocaleController.formatString(R.string.ProxyRotationTimeoutSeconds, options.get(i)); + } + chooseView.setCallback(i -> { + SharedConfig.proxyRotationTimeout = i; + SharedConfig.saveConfig(); + }); + chooseView.setOptions(SharedConfig.proxyRotationTimeout, values); + } + break; + } } } @SuppressWarnings("unchecked") @Override public void onBindViewHolder(@NonNull RecyclerView.ViewHolder holder, int position, @NonNull List payloads) { - if (holder.getItemViewType() == 3 && payloads.contains(PAYLOAD_CHECKED_CHANGED)) { + if (holder.getItemViewType() == VIEW_TYPE_PROXY_DETAIL && !payloads.isEmpty()) { + TextDetailProxyCell cell = (TextDetailProxyCell) holder.itemView; + if (payloads.contains(PAYLOAD_SELECTION_CHANGED)) { + cell.setItemSelected(selectedItems.contains(proxyList.get(position - proxyStartRow)), true); + } + if (payloads.contains(PAYLOAD_SELECTION_MODE_CHANGED)) { + cell.setSelectionEnabled(!selectedItems.isEmpty(), true); + } + } else if (holder.getItemViewType() == VIEW_TYPE_TEXT_CHECK && payloads.contains(PAYLOAD_CHECKED_CHANGED)) { TextCheckCell checkCell = (TextCheckCell) holder.itemView; if (position == useProxyRow) { checkCell.setChecked(useProxySettings); @@ -1246,6 +1568,8 @@ public void onBindViewHolder(@NonNull RecyclerView.ViewHolder holder, int positi checkCell.setChecked(useProxyForCalls); } else if (position == enablePublicProxyRow) { checkCell.setChecked(NekoConfig.enablePublicProxy.Bool()); + } else if (position == rotationRow) { + checkCell.setChecked(SharedConfig.proxyRotationEnabled); } } else { super.onBindViewHolder(holder, position, payloads); @@ -1255,7 +1579,7 @@ public void onBindViewHolder(@NonNull RecyclerView.ViewHolder holder, int positi @Override public void onViewAttachedToWindow(RecyclerView.ViewHolder holder) { int viewType = holder.getItemViewType(); - if (viewType == 3) { + if (viewType == VIEW_TYPE_TEXT_CHECK) { TextCheckCell checkCell = (TextCheckCell) holder.itemView; int position = holder.getAdapterPosition(); if (position == useProxyRow) { @@ -1264,6 +1588,8 @@ public void onViewAttachedToWindow(RecyclerView.ViewHolder holder) { checkCell.setChecked(useProxyForCalls); } else if (position == enablePublicProxyRow) { checkCell.setChecked(NekoConfig.enablePublicProxy.Bool()); + } else if (position == rotationRow) { + checkCell.setChecked(SharedConfig.proxyRotationEnabled); } } } @@ -1271,33 +1597,37 @@ public void onViewAttachedToWindow(RecyclerView.ViewHolder holder) { @Override public boolean isEnabled(RecyclerView.ViewHolder holder) { int position = holder.getAdapterPosition(); - return position == useProxyRow || position == callsRow || position == enablePublicProxyRow || position >= proxyStartRow && position < proxyEndRow; + return position == useProxyRow || position == rotationRow || position == callsRow || position == enablePublicProxyRow || position == deleteAllRow || position >= proxyStartRow && position < proxyEndRow; } @Override public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { View view; switch (viewType) { - case 0: + case VIEW_TYPE_SHADOW: view = new ShadowSectionCell(mContext); break; - case 1: + case VIEW_TYPE_TEXT_SETTING: view = new TextSettingsCell(mContext); view.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); break; - case 2: + case VIEW_TYPE_HEADER: view = new HeaderCell(mContext); view.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); break; - case 3: + case VIEW_TYPE_TEXT_CHECK: view = new TextCheckCell(mContext); view.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); break; - case 4: + case VIEW_TYPE_INFO: view = new TextInfoPrivacyCell(mContext); - view.setBackgroundDrawable(Theme.getThemedDrawable(mContext, R.drawable.greydivider, Theme.key_windowBackgroundGrayShadow)); + view.setBackground(Theme.getThemedDrawable(mContext, R.drawable.greydivider, Theme.key_windowBackgroundGrayShadow)); break; - case 5: + case VIEW_TYPE_SLIDE_CHOOSER: + view = new SlideChooseView(mContext); + view.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); + break; + case VIEW_TYPE_PROXY_DETAIL: default: view = new TextDetailProxyCell(mContext); view.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); @@ -1307,18 +1637,52 @@ public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType return new RecyclerListView.Holder(view); } + @Override + public long getItemId(int position) { + // Random stable ids, could be anything non-repeating + if (position == useProxyShadowRow) { + return -1; + } else if (position == proxyShadowRow) { + return -2; +// } else if (position == proxyAddRow) { +// return -3; + } else if (position == useProxyRow) { + return -4; + } else if (position == callsRow) { + return -5; + } else if (position == connectionsHeaderRow) { + return -6; + } else if (position == deleteAllRow) { + return -8; + } else if (position == rotationRow) { + return -9; + } else if (position == rotationTimeoutRow) { + return -10; + } else if (position == rotationTimeoutInfoRow) { + return -11; + } else if (position >= proxyStartRow && position < proxyEndRow) { + return proxyList.get(position - proxyStartRow).hashCode(); + } else { + return -7; + } + } + @Override public int getItemViewType(int position) { - if (position == useProxyDetailRow || position == proxyDetailRow) { - return 0; - } else if (position == useProxyRow || position == callsRow || position == enablePublicProxyRow) { - return 3; + if (position == useProxyShadowRow || position == proxyShadowRow) { + return VIEW_TYPE_SHADOW; + } else if (position == proxyAddRow || position == deleteAllRow) { + return VIEW_TYPE_TEXT_SETTING; + } else if (position == useProxyRow || position == rotationRow || position == callsRow || position == enablePublicProxyRow) { + return VIEW_TYPE_TEXT_CHECK; } else if (position == connectionsHeaderRow) { - return 2; + return VIEW_TYPE_HEADER; + } else if (position == rotationTimeoutRow) { + return VIEW_TYPE_SLIDE_CHOOSER; } else if (position >= proxyStartRow && position < proxyEndRow) { - return 5; + return VIEW_TYPE_PROXY_DETAIL; } else { - return 4; + return VIEW_TYPE_INFO; } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/QrActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/QrActivity.java index bf48fba10a..9e1dfc26ec 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/QrActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/QrActivity.java @@ -102,6 +102,8 @@ import java.util.HashMap; import java.util.List; +import tw.nekomimi.nekogram.NekoConfig; + public class QrActivity extends BaseFragment { private static final ArrayMap qrColorsMap = new ArrayMap<>(); @@ -374,7 +376,9 @@ protected void setDarkTheme(boolean isDark) { fragmentView = rootLayout; Utilities.themeQueue.postRunnable(() -> { homeTheme.loadPreviewColors(currentAccount); - + if (fragmentView == null) { + return; + } fragmentView.postDelayed(() -> { onItemSelected(currentTheme, 0, true); }, 17); @@ -1053,6 +1057,7 @@ void setData(String link, String username, boolean isPhone, boolean isTimer) { } } catch (Exception ignore) { try { + if (!NekoConfig.disableVibration.Bool()) performHapticFeedback(HapticFeedbackConstants.LONG_PRESS, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); } catch (Exception ignore2) {} } @@ -1263,7 +1268,7 @@ protected void onDetachedFromWindow() { super.onDetachedFromWindow(); if (loadingMatrix != null) { loadingMatrix.stop(); - loadingMatrix.recycle(); + loadingMatrix.recycle(false); loadingMatrix = null; } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/RestrictedLanguagesSelectActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/RestrictedLanguagesSelectActivity.java index a742664245..958b549f84 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/RestrictedLanguagesSelectActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/RestrictedLanguagesSelectActivity.java @@ -8,56 +8,56 @@ package org.telegram.ui; -import android.animation.ValueAnimator; import android.content.Context; -import android.content.DialogInterface; import android.content.SharedPreferences; +import android.content.res.Resources; +import android.text.TextUtils; import android.view.View; import android.view.ViewGroup; +import android.view.inputmethod.InputMethodInfo; +import android.view.inputmethod.InputMethodManager; +import android.view.inputmethod.InputMethodSubtype; import android.widget.EditText; import android.widget.FrameLayout; -import android.widget.LinearLayout; -import android.widget.TextView; + +import androidx.recyclerview.widget.LinearLayoutManager; +import androidx.recyclerview.widget.RecyclerView; import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.ApplicationLoader; +import org.telegram.messenger.BotWebViewVibrationEffect; import org.telegram.messenger.FileLog; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MessagesController; import org.telegram.messenger.NotificationCenter; import org.telegram.messenger.R; +import org.telegram.messenger.SharedConfig; +import org.telegram.messenger.TranslateController; +import org.telegram.messenger.UserConfig; import org.telegram.messenger.Utilities; -import org.telegram.ui.ActionBar.AlertDialog; -import org.telegram.ui.ActionBar.Theme; -import org.telegram.ui.ActionBar.ThemeDescription; -import org.telegram.ui.Cells.CheckBoxCell; -import org.telegram.ui.Cells.HeaderCell; -import org.telegram.ui.Cells.LanguageCell; import org.telegram.ui.ActionBar.ActionBar; import org.telegram.ui.ActionBar.ActionBarMenu; import org.telegram.ui.ActionBar.ActionBarMenuItem; import org.telegram.ui.ActionBar.BaseFragment; +import org.telegram.ui.ActionBar.Theme; +import org.telegram.ui.ActionBar.ThemeDescription; +import org.telegram.ui.Cells.HeaderCell; +import org.telegram.ui.Cells.LanguageCell; import org.telegram.ui.Cells.ShadowSectionCell; import org.telegram.ui.Cells.TextCheckbox2Cell; -import org.telegram.ui.Cells.TextCheckCell; -import org.telegram.ui.Cells.TextInfoPrivacyCell; -import org.telegram.ui.Cells.TextRadioCell; -import org.telegram.ui.Cells.TextSettingsCell; -import org.telegram.ui.Components.CubicBezierInterpolator; import org.telegram.ui.Components.EmptyTextProgressView; import org.telegram.ui.Components.LayoutHelper; import org.telegram.ui.Components.RecyclerListView; +import org.telegram.ui.Components.TranslateAlert2; +import java.io.BufferedReader; +import java.io.InputStreamReader; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; -import java.util.Comparator; +import java.util.HashMap; import java.util.HashSet; -import java.util.Set; +import java.util.List; import java.util.Timer; -import java.util.TimerTask; - -import androidx.recyclerview.widget.LinearLayoutManager; -import androidx.recyclerview.widget.RecyclerView; public class RestrictedLanguagesSelectActivity extends BaseFragment implements NotificationCenter.NotificationCenterDelegate { @@ -70,54 +70,25 @@ public class RestrictedLanguagesSelectActivity extends BaseFragment implements N private boolean searching; private Timer searchTimer; - private ArrayList searchResult; - private ArrayList sortedLanguages; -// private ArrayList unofficialLanguages; + private int separatorRow = -1; + private ArrayList searchResult; + private ArrayList allLanguages; private SharedPreferences preferences; - private SharedPreferences.OnSharedPreferenceChangeListener listener; - private HashSet selectedLanguages = null; + private HashSet firstSelectedLanguages; + private HashSet selectedLanguages; public static HashSet getRestrictedLanguages() { -// String currentLangCode = LocaleController.getInstance().getCurrentLocaleInfo().pluralLangCode; -// String[] onlyCurrentLang = new String[] { currentLangCode }; - return new HashSet<>(MessagesController.getGlobalMainSettings().getStringSet("translate_button_restricted_languages", new HashSet(/*Arrays.asList(onlyCurrentLang)*/))); + String currentLangCode = LocaleController.getInstance().getCurrentLocaleInfo().pluralLangCode; + String[] onlyCurrentLang = new String[] { currentLangCode }; + return new HashSet<>(MessagesController.getGlobalMainSettings().getStringSet("translate_button_restricted_languages", new HashSet(Arrays.asList(onlyCurrentLang)))); } @Override public boolean onFragmentCreate() { preferences = MessagesController.getGlobalMainSettings(); + firstSelectedLanguages = getRestrictedLanguages(); selectedLanguages = getRestrictedLanguages(); - preferences.registerOnSharedPreferenceChangeListener(listener = new SharedPreferences.OnSharedPreferenceChangeListener() { - public int langPos(String lng) { - if (lng == null) - return -1; - ArrayList arr = (searching ? searchResult : sortedLanguages); - if (arr == null) - return -1; - for (int i = 0; i < arr.size(); ++i) - if (lng.equals(arr.get(i).pluralLangCode)) - return i; - return -1; - } - - @Override - public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String s) { - preferences = sharedPreferences; - HashSet newSelectedLanguages = getRestrictedLanguages(); - if (listView != null && listView.getAdapter() != null) { - RecyclerView.Adapter adapter = listView.getAdapter(); - int offset = !searching ? 1 : 0; - for (String lng : selectedLanguages) - if (!newSelectedLanguages.contains(lng)) - adapter.notifyItemChanged(langPos(lng) + offset); - for (String lng : newSelectedLanguages) - if (!selectedLanguages.contains(lng)) - adapter.notifyItemChanged(langPos(lng) + offset); - } - selectedLanguages = newSelectedLanguages; - } - }); fillLanguages(); LocaleController.getInstance().loadRemoteLanguages(currentAccount); @@ -125,13 +96,57 @@ public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, Strin return super.onFragmentCreate(); } + private void rebind(int position) { + RecyclerView.Adapter adapter = listView.getAdapter(); + for (int i = 0; i < listView.getChildCount(); ++i) { + View child = listView.getChildAt(i); + RecyclerView.ViewHolder holder = listView.getChildViewHolder(child); + if (holder == null) { + continue; + } + int childPosition = holder.getAdapterPosition(); + if (childPosition == RecyclerView.NO_POSITION) { + continue; + } + if (childPosition == position) { + adapter.onBindViewHolder(holder, position); + return; + } + } + } + @Override public void onFragmentDestroy() { super.onFragmentDestroy(); - preferences.unregisterOnSharedPreferenceChangeListener(listener); NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.suggestedLangpack); } + public static boolean toggleLanguage(String language, boolean doNotTranslate) { + if (language == null) { + return false; + } + language = language.toLowerCase(); + LocaleController.LocaleInfo currentLocaleInfo = LocaleController.getInstance().getCurrentLocaleInfo(); + HashSet selectedLanguages = getRestrictedLanguages(); +// if (language != null && language.equals(currentLocaleInfo.pluralLangCode) && doNotTranslate) { +//// AndroidUtilities.shakeViewSpring(view); +//// BotWebViewVibrationEffect.APP_ERROR.vibrate(); +// return false; +// } + if (!doNotTranslate) { + selectedLanguages.remove(language); + } else { + selectedLanguages.add(language); + } + if (selectedLanguages.size() == 1 && selectedLanguages.contains(currentLocaleInfo.pluralLangCode)) { + MessagesController.getGlobalMainSettings().edit().remove("translate_button_restricted_languages").commit(); + } else { + MessagesController.getGlobalMainSettings().edit().putStringSet("translate_button_restricted_languages", selectedLanguages).commit(); + } + TranslateController.invalidateSuggestedLanguageCodes(); + return true; + } + @Override public View createView(Context context) { searching = false; @@ -214,76 +229,57 @@ public void onTextChanged(EditText editText) { return; } boolean search = listView.getAdapter() == searchListViewAdapter; - if (!search) - position--; - LocaleController.LocaleInfo localeInfo; - if (search) { - localeInfo = searchResult.get(position); + final int realPosition = position; + TranslateController.Language language = null; + if (search && searchResult != null) { + language = searchResult.get(position); } else { - localeInfo = sortedLanguages.get(position); + if (separatorRow >= 0 && position > separatorRow) { + position--; + } + if (position >= 0 && position < allLanguages.size()) { + language = allLanguages.get(position); + } } - if (localeInfo != null) { + if (language != null && language.code != null) { LocaleController.LocaleInfo currentLocaleInfo = LocaleController.getInstance().getCurrentLocaleInfo(); - String langCode = localeInfo.pluralLangCode; - if (langCode != null && langCode.equals(currentLocaleInfo.pluralLangCode)) { - AndroidUtilities.shakeView(((TextCheckbox2Cell) view).checkbox); - return; - } + String langCode = language.code; boolean value = selectedLanguages.contains(langCode); - HashSet newSelectedLanguages = new HashSet(selectedLanguages); - if (value) - newSelectedLanguages.removeIf(s -> s != null && s.equals(langCode)); - else - newSelectedLanguages.add(langCode); - if (newSelectedLanguages.size() == 1 && newSelectedLanguages.contains(currentLocaleInfo.pluralLangCode)) - preferences.edit().remove("translate_button_restricted_languages").apply(); - else - preferences.edit().putStringSet("translate_button_restricted_languages", newSelectedLanguages).apply(); - } - }); +// if (langCode != null && langCode.equals(currentLocaleInfo.pluralLangCode) && value) { +// AndroidUtilities.shakeViewSpring(view); +// BotWebViewVibrationEffect.APP_ERROR.vibrate(); +// return; +// } + if (value) { + selectedLanguages.removeIf(s -> s != null && s.equals(langCode)); + } else { + selectedLanguages.add(langCode); + } + if (selectedLanguages.size() == 1 && selectedLanguages.contains(currentLocaleInfo.pluralLangCode)) { + preferences.edit().remove("translate_button_restricted_languages").remove("translate_button_restricted_languages_changed").apply(); + } else { + preferences.edit().putStringSet("translate_button_restricted_languages", selectedLanguages).putBoolean("translate_button_restricted_languages_changed", true).apply(); + } - listView.setOnItemLongClickListener((view, position) -> { - if (getParentActivity() == null || parentLayout == null || !(view instanceof TextCheckbox2Cell)) { - return false; - } - boolean search = listView.getAdapter() == searchListViewAdapter; - if (!search) - position--; - LocaleController.LocaleInfo localeInfo; - if (search) { - localeInfo = searchResult.get(position); - } else { - localeInfo = sortedLanguages.get(position); - } - if (localeInfo == null || localeInfo.pathToFile == null || localeInfo.isRemote() && localeInfo.serverIndex != Integer.MAX_VALUE) { - return false; - } - final LocaleController.LocaleInfo finalLocaleInfo = localeInfo; - AlertDialog.Builder builder = new AlertDialog.Builder(getParentActivity()); - builder.setTitle(LocaleController.getString("DeleteLocalizationTitle", R.string.DeleteLocalizationTitle)); - builder.setMessage(AndroidUtilities.replaceTags(LocaleController.formatString("DeleteLocalizationText", R.string.DeleteLocalizationText, localeInfo.name))); - builder.setPositiveButton(LocaleController.getString("Delete", R.string.Delete), (dialogInterface, i) -> { - if (LocaleController.getInstance().deleteLanguage(finalLocaleInfo, currentAccount)) { - fillLanguages(); - if (searchResult != null) { - searchResult.remove(finalLocaleInfo); + if (search) { + for (int i = 0, p = 0; i < searchResult.size(); ++i, ++p) { + if (TextUtils.equals(langCode, searchResult.get(i).code)) { + rebind(p); + } } - if (listAdapter != null) { - listAdapter.notifyDataSetChanged(); - } - if (searchListViewAdapter != null) { - searchListViewAdapter.notifyDataSetChanged(); + } else { + for (int i = 0, p = 0; i < allLanguages.size(); ++i, ++p) { + if (p == separatorRow) { + p++; + } + if (TextUtils.equals(langCode, allLanguages.get(i).code)) { + rebind(p); + } } } - }); - builder.setNegativeButton(LocaleController.getString("Cancel", R.string.Cancel), null); - AlertDialog alertDialog = builder.create(); - showDialog(alertDialog); - TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); - if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + + MessagesController.getInstance(currentAccount).getTranslateController().checkRestrictedLanguagesUpdate(); } - return true; }); listView.setOnScrollListener(new RecyclerView.OnScrollListener() { @@ -309,33 +305,45 @@ public void didReceivedNotification(int id, int account, Object... args) { } private void fillLanguages() { - final LocaleController.LocaleInfo currentLocale = LocaleController.getInstance().getCurrentLocaleInfo(); - Comparator comparator = (o, o2) -> { - if (o == currentLocale) { - return -1; - } else if (o2 == currentLocale) { - return 1; - } else if (o.serverIndex == o2.serverIndex) { - return o.name.compareTo(o2.name); - } - if (o.serverIndex > o2.serverIndex) { - return 1; - } else if (o.serverIndex < o2.serverIndex) { - return -1; + allLanguages = TranslateController.getLanguages(); + + final String currentLanguageCode = LocaleController.getInstance().getCurrentLocaleInfo().pluralLangCode; + TranslateController.Language currentLanguage = null; + ArrayList selectedLanguages = new ArrayList<>(); + ArrayList notAddedSelectedLanguages = new ArrayList<>(firstSelectedLanguages); + for (int i = 0; i < allLanguages.size(); ++i) { + TranslateController.Language l = allLanguages.get(i); + if (TextUtils.equals(l.code, currentLanguageCode)) { + currentLanguage = l; + notAddedSelectedLanguages.remove(l.code); + allLanguages.remove(i); + i--; + } else if (firstSelectedLanguages.contains(l.code)) { + selectedLanguages.add(l); + notAddedSelectedLanguages.remove(l.code); + allLanguages.remove(i); + i--; } - return 0; - }; + } - sortedLanguages = new ArrayList<>(); + for (int i = 0; i < notAddedSelectedLanguages.size(); ++i) { + TranslateController.Language lang = new TranslateController.Language(); + lang.code = notAddedSelectedLanguages.get(i); + lang.ownDisplayName = lang.displayName = lang.code.toUpperCase(); + lang.q = lang.code.toLowerCase(); + selectedLanguages.add(lang); + } - ArrayList arrayList = LocaleController.getInstance().languages; - for (int a = 0, size = arrayList.size(); a < size; a++) { - LocaleController.LocaleInfo info = arrayList.get(a); - if (info != null && info.serverIndex != Integer.MAX_VALUE/* && (info.pluralLangCode == null || !info.pluralLangCode.equals(currentLocale.pluralLangCode))*/) { - sortedLanguages.add(info); - } + separatorRow = 0; + allLanguages.addAll(0, selectedLanguages); + separatorRow += selectedLanguages.size(); + if (currentLanguage != null) { + allLanguages.add(0, currentLanguage); + separatorRow++; + } + if (separatorRow <= 0) { + separatorRow = -1; } - Collections.sort(sortedLanguages, comparator); } @Override @@ -357,56 +365,28 @@ public void search(final String query) { } catch (Exception e) { FileLog.e(e); } -// searchTimer = new Timer(); -// searchTimer.schedule(new TimerTask() { -// @Override -// public void run() { -// try { -// searchTimer.cancel(); -// searchTimer = null; -// } catch (Exception e) { -// FileLog.e(e); -// } - processSearch(query); -// } -// }, 100, 300); + processSearch(query); } } private void processSearch(final String query) { -// Utilities.searchQueue.postRunnable(() -> { - - String q = query.trim().toLowerCase(); - if (q.length() == 0) { - updateSearchResults(new ArrayList<>()); - return; - } - long time = System.currentTimeMillis(); - ArrayList resultArray = new ArrayList<>(); + String q = query.trim().toLowerCase(); -// for (int a = 0, N = unofficialLanguages.size(); a < N; a++) { -// LocaleController.LocaleInfo c = unofficialLanguages.get(a); -// if (c.name.toLowerCase().startsWith(query) || c.nameEnglish.toLowerCase().startsWith(query)) { -// resultArray.add(c); -// } -// } - - for (int a = 0, N = sortedLanguages.size(); a < N; a++) { - LocaleController.LocaleInfo c = sortedLanguages.get(a); - if (c.name.toLowerCase().startsWith(query) || c.nameEnglish.toLowerCase().startsWith(query)) { - resultArray.add(c); - } + if (searchResult == null) { + searchResult = new ArrayList<>(); + } else { + searchResult.clear(); + } + for (int i = 0; i < allLanguages.size(); ++i) { + TranslateController.Language l = allLanguages.get(i); + if (l.q.startsWith(q)) { + searchResult.add(0, l); + } else if (l.q.contains(q)) { + searchResult.add(l); } + } - updateSearchResults(resultArray); -// }); - } - - private void updateSearchResults(final ArrayList arrCounties) { - AndroidUtilities.runOnUIThread(() -> { - searchResult = arrCounties; - searchListViewAdapter.notifyDataSetChanged(); - }); + searchListViewAdapter.notifyDataSetChanged(); } private class ListAdapter extends RecyclerListView.SelectionAdapter { @@ -432,8 +412,7 @@ public int getItemCount() { } return searchResult.size(); } else { - int count = sortedLanguages.size(); - return 1 + count; + return (separatorRow >= 0 ? 1 : 0) + allLanguages.size(); } } @@ -465,46 +444,34 @@ public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { switch (holder.getItemViewType()) { case 0: { - if (!search) - position--; -// LanguageCell textSettingsCell = (LanguageCell) holder.itemView; TextCheckbox2Cell textSettingsCell = (TextCheckbox2Cell) holder.itemView; - LocaleController.LocaleInfo localeInfo; - boolean last; + TranslateController.Language language = null; + boolean last = false; if (search) { - localeInfo = searchResult.get(position); + if (position >= 0 && position < searchResult.size()) { + language = searchResult.get(position); + } last = position == searchResult.size() - 1; - } /*else if (!unofficialLanguages.isEmpty() && position >= 0 && position < unofficialLanguages.size()) { - localeInfo = unofficialLanguages.get(position); - last = position == unofficialLanguages.size() - 1; - } */else { -// if (!unofficialLanguages.isEmpty()) { -// position -= unofficialLanguages.size() + 1; -// } - localeInfo = sortedLanguages.get(position); - last = position == sortedLanguages.size() - 1; - } - String langCode = localeInfo.pluralLangCode; - boolean value = selectedLanguages.contains(langCode); - if (localeInfo.isLocal()) { - textSettingsCell.setTextAndValue(String.format("%1$s (%2$s)", localeInfo.name, LocaleController.getString("LanguageCustom", R.string.LanguageCustom)), localeInfo.nameEnglish, false, !last); } else { - textSettingsCell.setTextAndValue(localeInfo.name, localeInfo.nameEnglish, false, !last); + if (separatorRow >= 0 && position > separatorRow) { + position--; + } + if (position >= 0 && position < allLanguages.size()) { + language = allLanguages.get(position); + last = position == allLanguages.size() - 1; + } } - - boolean isCurrent = langCode != null && langCode.equals(LocaleController.getInstance().getCurrentLocaleInfo().pluralLangCode); - textSettingsCell.setChecked(value || isCurrent); + if (language == null) { + return; + } + String ownDisplayName = language.ownDisplayName == null ? language.displayName : language.ownDisplayName; + textSettingsCell.setTextAndValue(ownDisplayName, language.displayName, false, !last); + textSettingsCell.setChecked(selectedLanguages.contains(language.code)); break; } case 1: { - if (!search) - position--; ShadowSectionCell sectionCell = (ShadowSectionCell) holder.itemView; -// if (!unofficialLanguages.isEmpty() && position == unofficialLanguages.size()) { -// sectionCell.setBackgroundDrawable(Theme.getThemedDrawable(mContext, R.drawable.greydivider, Theme.key_windowBackgroundGrayShadow)); -// } else { - sectionCell.setBackgroundDrawable(Theme.getThemedDrawable(mContext, R.drawable.greydivider_bottom, Theme.key_windowBackgroundGrayShadow)); -// } + sectionCell.setBackgroundDrawable(Theme.getThemedDrawable(mContext, R.drawable.greydivider, Theme.key_windowBackgroundGrayShadow)); break; } case 2: { @@ -515,17 +482,13 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { @Override public int getItemViewType(int i) { - if (!search) - i--; - if (i == -1) - return 2; if (search) { return 0; + } else if (i == separatorRow) { + return 1; + } else { + return 0; } -// if (!unofficialLanguages.isEmpty() && (i == unofficialLanguages.size() || i == unofficialLanguages.size() + sortedLanguages.size() + 1) || unofficialLanguages.isEmpty() && i == sortedLanguages.size()) { -// return 1; -// } - return 0; } } @@ -558,4 +521,207 @@ public ArrayList getThemeDescriptions() { return themeDescriptions; } + + public static void cleanup() { + MessagesController.getGlobalMainSettings().edit() + .remove("translate_button_restricted_languages_changed") + .remove("translate_button_restricted_languages_version") + .remove("translate_button_restricted_languages") + .apply(); + checkRestrictedLanguages(false); + } + + public static final int LAST_DO_NOT_TRANSLATE_VERSION = 2; + public static void checkRestrictedLanguages(boolean accountsChanged) { + boolean manualChanged = MessagesController.getGlobalMainSettings().getBoolean("translate_button_restricted_languages_changed", false); + int version = MessagesController.getGlobalMainSettings().getInt("translate_button_restricted_languages_version", 0); + + if (version != LAST_DO_NOT_TRANSLATE_VERSION || accountsChanged && !manualChanged) { + getExtendedDoNotTranslate(languages -> { + final String currentLangCode = LocaleController.getInstance().getCurrentLocaleInfo().pluralLangCode; + + languages.addAll(getRestrictedLanguages()); + SharedPreferences.Editor edit = MessagesController.getGlobalMainSettings().edit(); + if (languages.size() == 1 && TextUtils.equals(languages.iterator().next(), currentLangCode)) { + edit.remove("translate_button_restricted_languages"); + } else { + edit.putStringSet("translate_button_restricted_languages", languages); + } + edit.putInt("translate_button_restricted_languages_version", LAST_DO_NOT_TRANSLATE_VERSION).apply(); + + for (int i : SharedConfig.activeAccounts) { + final int account = i; + try { + MessagesController.getInstance(account).getTranslateController().checkRestrictedLanguagesUpdate(); + } catch (Exception ignore) {} + } + }); + } + } + + public static void getExtendedDoNotTranslate(Utilities.Callback> onDone) { + if (onDone == null) { + return; + } + + final HashSet result = new HashSet<>(); + + final HashMap countries = new HashMap<>(); +// final HashMap languages = new HashMap<>(); + final HashMap uniquePhoneCodes = new HashMap<>(); + +// final Utilities.Callback pushCountry = countryCode -> { +// if (countryCode == null) { +// return; +// } +// String[] countryLanguages = languages.get(countryCode.toUpperCase()); +// if (countryLanguages == null) { +// return; +// } +// for (int j = 1; j < Math.min(2, countryLanguages.length); ++j) { +// String language = countryLanguages[j]; +// if (language.contains("-")) { +// language = language.split("-")[0]; +// } +// if (TranslateAlert2.languageName(language) != null) { +// result.add(language); +// } +// } +// }; + + Utilities.doCallbacks( + next -> { + try { + String language = LocaleController.getInstance().getCurrentLocaleInfo().pluralLangCode; + if (TranslateAlert2.languageName(language) != null) { + result.add(language); + } + } catch (Exception e0) { + FileLog.e(e0); + } + next.run(); + }, + next -> { + try { + String language = Resources.getSystem().getConfiguration().locale.getLanguage(); + if (TranslateAlert2.languageName(language) != null) { + result.add(language); + } + } catch (Exception e1) { + FileLog.e(e1); + } + next.run(); + }, + next -> { + try { + BufferedReader reader = new BufferedReader(new InputStreamReader(ApplicationLoader.applicationContext.getResources().getAssets().open("countries.txt"))); + ArrayList multipleCodes = new ArrayList<>(); + String line; + while ((line = reader.readLine()) != null) { + String[] args = line.split(";"); + if (args.length >= 3) { + countries.put(args[2], args[1]); + if (uniquePhoneCodes.containsKey(args[0]) && !"7".equals(args[0])) { + multipleCodes.add(args[0]); + uniquePhoneCodes.remove(args[0]); + } else if (!multipleCodes.contains(args[0])) { + uniquePhoneCodes.put(args[0], args[1]); + } + } + } + reader.close(); + +// reader = new BufferedReader(new InputStreamReader(ApplicationLoader.applicationContext.getResources().getAssets().open("languages.txt"))); +// while ((line = reader.readLine()) != null) { +// String[] args = line.split(","); +// if (args.length >= 2) { +// languages.put(args[0], args); +// } +// } +// reader.close(); + } catch (Exception e) { + FileLog.e(e); + } + next.run(); + }, +// next -> { +// ArrayList> getAuthorizationsCallbacks = new ArrayList<>(); +// for (int i = 0; i < UserConfig.MAX_ACCOUNT_COUNT; ++i) { +// final int account = i; +// if (UserConfig.getInstance(account).getClientUserId() != 0 && !ConnectionsManager.getInstance(account).isTestBackend()) { +// getAuthorizationsCallbacks.add(nextInternal -> { +// try { +// ConnectionsManager.getInstance(account).sendRequest(new TLRPC.TL_account_getAuthorizations(), (response, error) -> AndroidUtilities.runOnUIThread(() -> { +// if (error == null) { +// TLRPC.TL_account_authorizations res = (TLRPC.TL_account_authorizations) response; +// if (!res.authorizations.isEmpty()) { +// TLRPC.TL_authorization auth = res.authorizations.get(0); +// String[] separated = auth.country.split(", "); +// if (separated.length > 0) { +// pushCountry.run(countries.get(separated[separated.length - 1])); +// } +// } +// } +// nextInternal.run(); +// })); +// } catch (Exception e2) { +// FileLog.e(e2); +// nextInternal.run(); +// } +// }); +// } +// } +// getAuthorizationsCallbacks.add(n -> next.run()); +// Utilities.doCallbacks(getAuthorizationsCallbacks.toArray(new Utilities.Callback[0])); +// }, +// next -> { +// for (int i = 0; i < UserConfig.MAX_ACCOUNT_COUNT; ++i) { +// final int account = i; +// try { +// TLRPC.User user = UserConfig.getInstance(account).getCurrentUser(); +// if (user != null && user.phone != null) { +// for (int j = 4; j > 0; j--) { +// String code = user.phone.substring(0, j); +// String countryCode = uniquePhoneCodes.get(code); +// if (countryCode != null) { +// pushCountry.run(countryCode); +// break; +// } +// } +// } +// } catch (Exception e3) { +// FileLog.e(e3); +// } +// } +// next.run(); +// }, + next -> { + try { + InputMethodManager imm = (InputMethodManager) ApplicationLoader.applicationContext.getSystemService(Context.INPUT_METHOD_SERVICE); + List ims = imm.getEnabledInputMethodList(); + + for (InputMethodInfo method : ims) { + List submethods = imm.getEnabledInputMethodSubtypeList(method, true); + for (InputMethodSubtype submethod : submethods) { + if ("keyboard".equals(submethod.getMode())) { + String currentLocale = submethod.getLocale(); + if (currentLocale != null && currentLocale.contains("_")) { + currentLocale = currentLocale.split("_")[0]; + } + + if (TranslateAlert2.languageName(currentLocale) != null) { + result.add(currentLocale); + } + } + } + } + } catch (Exception e4) { + FileLog.e(e4); + } + + next.run(); + }, + next -> onDone.run(result) + ); + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/RightSlidingDialogContainer.java b/TMessagesProj/src/main/java/org/telegram/ui/RightSlidingDialogContainer.java index 31b8942be9..064c875312 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/RightSlidingDialogContainer.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/RightSlidingDialogContainer.java @@ -119,16 +119,11 @@ public void onAnimationEnd(Animator animation) { }); openAnimator.setDuration(250); openAnimator.setInterpolator(CubicBezierInterpolator.DEFAULT); - openAnimator.setStartDelay(50); + openAnimator.setStartDelay(SharedConfig.getDevicePerformanceClass() >= SharedConfig.PERFORMANCE_CLASS_HIGH ? 50 : 150); openAnimator.start(); } - fragment.setPreviewDelegate(new BaseFragment.PreviewDelegate() { - @Override - public void finishFragment() { - finishPreview(); - } - }); + fragment.setPreviewDelegate(() -> finishPreview()); } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/SaveToGallerySettingsActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/SaveToGallerySettingsActivity.java new file mode 100644 index 0000000000..3bea750482 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/SaveToGallerySettingsActivity.java @@ -0,0 +1,663 @@ +package org.telegram.ui; + +import static org.telegram.messenger.SharedConfig.SAVE_TO_GALLERY_FLAG_CHANNELS; +import static org.telegram.messenger.SharedConfig.SAVE_TO_GALLERY_FLAG_GROUP; +import static org.telegram.messenger.SharedConfig.SAVE_TO_GALLERY_FLAG_PEER; + +import android.content.Context; +import android.graphics.Canvas; +import android.os.Bundle; +import android.util.LongSparseArray; +import android.util.TypedValue; +import android.view.Gravity; +import android.view.View; +import android.view.ViewGroup; +import android.widget.FrameLayout; +import android.widget.LinearLayout; +import android.widget.TextView; + +import androidx.annotation.NonNull; +import androidx.core.graphics.ColorUtils; +import androidx.recyclerview.widget.DefaultItemAnimator; +import androidx.recyclerview.widget.LinearLayoutManager; +import androidx.recyclerview.widget.RecyclerView; + +import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.ContactsController; +import org.telegram.messenger.DialogObject; +import org.telegram.messenger.LocaleController; +import org.telegram.messenger.MessagesController; +import org.telegram.messenger.R; +import org.telegram.messenger.SaveToGallerySettingsHelper; +import org.telegram.messenger.UserConfig; +import org.telegram.tgnet.TLObject; +import org.telegram.tgnet.TLRPC; +import org.telegram.ui.ActionBar.ActionBar; +import org.telegram.ui.ActionBar.ActionBarMenuItem; +import org.telegram.ui.ActionBar.ActionBarMenuSubItem; +import org.telegram.ui.ActionBar.ActionBarPopupWindow; +import org.telegram.ui.ActionBar.AlertDialog; +import org.telegram.ui.ActionBar.BackDrawable; +import org.telegram.ui.ActionBar.BaseFragment; +import org.telegram.ui.ActionBar.Theme; +import org.telegram.ui.Cells.HeaderCell; +import org.telegram.ui.Cells.ShadowSectionCell; +import org.telegram.ui.Cells.TextCell; +import org.telegram.ui.Cells.TextCheckCell; +import org.telegram.ui.Cells.TextInfoPrivacyCell; +import org.telegram.ui.Cells.UserCell; +import org.telegram.ui.Cells.UserCell2; +import org.telegram.ui.Components.AlertsCreator; +import org.telegram.ui.Components.AnimatedFloat; +import org.telegram.ui.Components.AnimatedTextView; +import org.telegram.ui.Components.CubicBezierInterpolator; +import org.telegram.ui.Components.LayoutHelper; +import org.telegram.ui.Components.ListView.AdapterWithDiffUtils; +import org.telegram.ui.Components.RecyclerListView; +import org.telegram.ui.Components.SeekBarView; + +import java.util.ArrayList; +import java.util.Objects; + +public class SaveToGallerySettingsActivity extends BaseFragment { + + int type; + long dialogId; + SaveToGallerySettingsHelper.DialogException dialogException; + boolean isNewException; + + public SaveToGallerySettingsActivity(Bundle bundle) { + super(bundle); + } + + @Override + public boolean onFragmentCreate() { + type = getArguments().getInt("type"); + exceptionsDialogs = getUserConfig().getSaveGalleryExceptions(type); + dialogId = getArguments().getLong("dialog_id"); + if (dialogId != 0) { + dialogException = UserConfig.getInstance(currentAccount).getSaveGalleryExceptions(type).get(dialogId); + if (dialogException == null) { + isNewException = true; + dialogException = new SaveToGallerySettingsHelper.DialogException(); + SaveToGallerySettingsHelper.Settings globalSettings = SaveToGallerySettingsHelper.getSettings(type); + + dialogException.savePhoto = globalSettings.savePhoto; + dialogException.saveVideo = globalSettings.saveVideo; + dialogException.limitVideo = globalSettings.limitVideo; + + dialogException.dialogId = dialogId; + } + } + return super.onFragmentCreate(); + } + + private final int VIEW_TYPE_ADD_EXCEPTION = 1; + private final int VIEW_TYPE_CHAT = 2; + private final int VIEW_TYPE_DIVIDER = 3; + private final int VIEW_TYPE_DELETE_ALL = 4; + private final int VIEW_TYPE_HEADER = 5; + private final int VIEW_TYPE_TOGGLE = 6; + private final int VIEW_TYPE_DIVIDER_INFO = 7; + private final int VIEW_TYPE_CHOOSER = 8; + private static final int VIEW_TYPE_USER_INFO = 9; + private final int VIEW_TYPE_DIVIDER_LAST = 10; + + + int savePhotosRow; + int saveVideosRow; + int videoDividerRow; + + Adapter adapter; + + RecyclerListView recyclerListView; + + ArrayList items = new ArrayList<>(); + LongSparseArray exceptionsDialogs = new LongSparseArray<>(); + + @Override + public View createView(Context context) { + FrameLayout frameLayout = new FrameLayout(context); + fragmentView = frameLayout; + + actionBar.setBackButtonDrawable(new BackDrawable(false)); + actionBar.setActionBarMenuOnItemClick(new ActionBar.ActionBarMenuOnItemClick() { + @Override + public void onItemClick(int id) { + if (id == -1) { + finishFragment(); + return; + } + } + }); + if (dialogException != null) { + if (isNewException) { + actionBar.setTitle(LocaleController.getString(R.string.NotificationsNewException)); + } else { + actionBar.setTitle(LocaleController.getString(R.string.SaveToGalleryException)); + } + } else { + if (type == SAVE_TO_GALLERY_FLAG_PEER) { + actionBar.setTitle(LocaleController.getString(R.string.SaveToGalleryPrivate)); + } else if (type == SAVE_TO_GALLERY_FLAG_GROUP) { + actionBar.setTitle(LocaleController.getString(R.string.SaveToGalleryGroups)); + } else { + actionBar.setTitle(LocaleController.getString(R.string.SaveToGalleryChannels)); + } + } + + recyclerListView = new RecyclerListView(context); + DefaultItemAnimator defaultItemAnimator = new DefaultItemAnimator(); + defaultItemAnimator.setDurations(400); + defaultItemAnimator.setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); + defaultItemAnimator.setDelayAnimations(false); + defaultItemAnimator.setSupportsChangeAnimations(false); + recyclerListView.setItemAnimator(defaultItemAnimator); + recyclerListView.setLayoutManager(new LinearLayoutManager(context)); + recyclerListView.setAdapter(adapter = new Adapter()); + recyclerListView.setOnItemClickListener((view, position, x, y) -> { + if (position == savePhotosRow) { + SaveToGallerySettingsHelper.Settings settings = getSettings(); + settings.savePhoto = !settings.savePhoto; + onSettingsUpdated(); + updateRows(); + } else if (position == saveVideosRow) { + SaveToGallerySettingsHelper.Settings settings = getSettings(); + settings.saveVideo = !settings.saveVideo; + onSettingsUpdated(); + updateRows(); + } else if (items.get(position).viewType == VIEW_TYPE_ADD_EXCEPTION) { + Bundle args = new Bundle(); + args.putBoolean("onlySelect", true); + args.putBoolean("checkCanWrite", false); + if (type == SAVE_TO_GALLERY_FLAG_GROUP) { + args.putInt("dialogsType", DialogsActivity.DIALOGS_TYPE_GROUPS_ONLY); + } else if (type == SAVE_TO_GALLERY_FLAG_CHANNELS) { + args.putInt("dialogsType", DialogsActivity.DIALOGS_TYPE_CHANNELS_ONLY); + } else { + args.putInt("dialogsType", DialogsActivity.DIALOGS_TYPE_USERS_ONLY); + } + args.putBoolean("allowGlobalSearch", false); + DialogsActivity activity = new DialogsActivity(args); + activity.setDelegate((fragment, dids, message, param, topicsFragment) -> { + Bundle args2 = new Bundle(); + args2.putLong("dialog_id", dids.get(0).dialogId); + args2.putInt("type", type); + SaveToGallerySettingsActivity addExceptionActivity = new SaveToGallerySettingsActivity(args2); + presentFragment(addExceptionActivity, true); + return true; + }); + presentFragment(activity); + } else if (items.get(position).viewType == VIEW_TYPE_CHAT) { + Bundle args2 = new Bundle(); + args2.putLong("dialog_id", items.get(position).exception.dialogId); + args2.putInt("type", type); + SaveToGallerySettingsActivity addExceptionActivity = new SaveToGallerySettingsActivity(args2); + presentFragment(addExceptionActivity); + } else if (items.get(position).viewType == VIEW_TYPE_DELETE_ALL) { + AlertDialog alertDialog = AlertsCreator.createSimpleAlert(getContext(), + LocaleController.getString("NotificationsDeleteAllExceptionTitle", R.string.NotificationsDeleteAllExceptionTitle), + LocaleController.getString("NotificationsDeleteAllExceptionAlert", R.string.NotificationsDeleteAllExceptionAlert), + LocaleController.getString("Delete", R.string.Delete), + () -> { + exceptionsDialogs.clear(); + getUserConfig().updateSaveGalleryExceptions(type, exceptionsDialogs); + updateRows(); + }, null).create(); + alertDialog.show(); + alertDialog.redPositive(); + } + }); + recyclerListView.setOnItemLongClickListener((view, position, x, y) -> { + if (items.get(position).viewType == VIEW_TYPE_CHAT) { + + SaveToGallerySettingsHelper.DialogException exception = items.get(position).exception; + ActionBarPopupWindow.ActionBarPopupWindowLayout actionBarPopupWindowLayout = new ActionBarPopupWindow.ActionBarPopupWindowLayout(getContext()); + ActionBarMenuSubItem edit = ActionBarMenuItem.addItem(actionBarPopupWindowLayout, R.drawable.msg_customize, LocaleController.getString("EditException", R.string.EditException), false, null); + ActionBarMenuSubItem delete = ActionBarMenuItem.addItem(actionBarPopupWindowLayout, R.drawable.msg_delete, LocaleController.getString("DeleteException", R.string.DeleteException), false, null); + delete.setColors(Theme.getColor(Theme.key_windowBackgroundWhiteRedText), Theme.getColor(Theme.key_windowBackgroundWhiteRedText)); + ActionBarPopupWindow popupWindow = AlertsCreator.createSimplePopup(SaveToGallerySettingsActivity.this, actionBarPopupWindowLayout, view, x, y); + actionBarPopupWindowLayout.setParentWindow(popupWindow); + + edit.setOnClickListener(v -> { + popupWindow.dismiss(); + Bundle args2 = new Bundle(); + args2.putLong("dialog_id", items.get(position).exception.dialogId); + args2.putInt("type", type); + SaveToGallerySettingsActivity addExceptionActivity = new SaveToGallerySettingsActivity(args2); + presentFragment(addExceptionActivity); + }); + delete.setOnClickListener(v -> { + popupWindow.dismiss(); + LongSparseArray allExceptions = getUserConfig().getSaveGalleryExceptions(type); + allExceptions.remove(exception.dialogId); + getUserConfig().updateSaveGalleryExceptions(type, allExceptions); + updateRows(); + }); + return true; + } + return false; + }); + frameLayout.addView(recyclerListView); + frameLayout.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundGray)); + + if (dialogException != null) { + // ((ViewGroup.MarginLayoutParams)recyclerListView.getLayoutParams()).bottomMargin = AndroidUtilities.dp() + + FrameLayout button = new FrameLayout(getContext()); + button.setBackground(Theme.AdaptiveRipple.filledRect(Theme.key_featuredStickers_addButton, 8)); + + TextView textView = new TextView(getContext()); + textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + textView.setText(isNewException ? LocaleController.getString("AddException", R.string.AddException) : LocaleController.getString("SaveException", R.string.SaveException)); + textView.setGravity(Gravity.CENTER); + textView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + textView.setTextColor(Theme.getColor(Theme.key_featuredStickers_buttonText)); + button.addView(textView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER)); + button.setOnClickListener(v -> { + if (isNewException) { + LongSparseArray allExceptions = getUserConfig().getSaveGalleryExceptions(type); + allExceptions.put(dialogException.dialogId, dialogException); + getUserConfig().updateSaveGalleryExceptions(type, allExceptions); + } + finishFragment(); + }); + frameLayout.addView(button, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 48, Gravity.BOTTOM, 16, 16, 16, 16)); + + } + updateRows(); + return fragmentView; + } + + private void updateRows() { + boolean animated = !isPaused && adapter != null; + ArrayList oldItems = null; + if (animated) { + oldItems = new ArrayList(); + oldItems.addAll(items); + } + + items.clear(); + + if (dialogException != null) { + items.add(new Item(VIEW_TYPE_USER_INFO)); + items.add(new Item(VIEW_TYPE_DIVIDER)); + } + items.add(new Item(VIEW_TYPE_HEADER, LocaleController.getString("SaveToGallery", R.string.SaveToGallery))); + savePhotosRow = items.size(); + items.add(new Item(VIEW_TYPE_TOGGLE)); + saveVideosRow = items.size(); + items.add(new Item(VIEW_TYPE_TOGGLE)); + String text = null; + if (dialogException != null) { + text = LocaleController.getString("SaveToGalleryHintCurrent", R.string.SaveToGalleryHintCurrent); + } else if (type == SAVE_TO_GALLERY_FLAG_PEER) { + text = LocaleController.getString("SaveToGalleryHintUser", R.string.SaveToGalleryHintUser); + } else if (type == SAVE_TO_GALLERY_FLAG_CHANNELS) { + text = LocaleController.getString("SaveToGalleryHintChannels", R.string.SaveToGalleryHintChannels); + } else if (type == SAVE_TO_GALLERY_FLAG_GROUP) { + text = LocaleController.getString("SaveToGalleryHintGroup", R.string.SaveToGalleryHintGroup); + } + items.add(new Item(VIEW_TYPE_DIVIDER_INFO, text)); + + if (getSettings().saveVideo) { + items.add(new Item(VIEW_TYPE_HEADER, LocaleController.getString("MaxVideoSize", R.string.MaxVideoSize))); + items.add(new Item(VIEW_TYPE_CHOOSER)); + videoDividerRow = items.size(); + items.add(new Item(VIEW_TYPE_DIVIDER_INFO)); + } else { + videoDividerRow = -1; + } + + if (dialogException == null) { + exceptionsDialogs = getUserConfig().getSaveGalleryExceptions(type); + items.add(new Item(VIEW_TYPE_ADD_EXCEPTION)); + boolean added = false; + for (int i = 0; i < exceptionsDialogs.size(); i++) { + items.add(new Item(VIEW_TYPE_CHAT, exceptionsDialogs.valueAt(i))); + added = true; + } + + if (added) { + items.add(new Item(VIEW_TYPE_DIVIDER)); + items.add(new Item(VIEW_TYPE_DELETE_ALL)); + } + items.add(new Item(VIEW_TYPE_DIVIDER_LAST)); + } + + if (adapter != null) { + if (oldItems != null) { + adapter.setItems(oldItems, items); + } else { + adapter.notifyDataSetChanged(); + } + } + } + + private class Adapter extends AdapterWithDiffUtils { + @NonNull + @Override + public RecyclerView.ViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) { + View view = null; + switch (viewType) { + case VIEW_TYPE_USER_INFO: + UserCell2 userCell2 = new UserCell2(getContext(), 4, 0, getResourceProvider()); + TLObject object; + if (DialogObject.isUserDialog(dialogId)) { + object = MessagesController.getInstance(currentAccount).getUser(dialogId); + } else { + object = MessagesController.getInstance(currentAccount).getChat(-dialogId); + } + userCell2.setData(object, null, null, 0); + view = userCell2; + view.setBackgroundColor(getThemedColor(Theme.key_windowBackgroundWhite)); + break; + case VIEW_TYPE_CHAT: + view = new UserCell(parent.getContext(), 4, 0, false, false); + view.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); + break; + case VIEW_TYPE_ADD_EXCEPTION: + TextCell textCell = new TextCell(parent.getContext()); + textCell.setTextAndIcon(LocaleController.getString("NotificationsAddAnException", R.string.NotificationsAddAnException), R.drawable.msg_contact_add, true); + textCell.setColors(Theme.key_windowBackgroundWhiteBlueIcon, Theme.key_windowBackgroundWhiteBlueButton); + view = textCell; + view.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); + break; + case VIEW_TYPE_DIVIDER_LAST: + view = new ShadowSectionCell(parent.getContext()); + view.setBackgroundDrawable(Theme.getThemedDrawable(getContext(), R.drawable.greydivider_bottom, Theme.getColor(Theme.key_windowBackgroundGrayShadow, getResourceProvider()))); + break; + case VIEW_TYPE_DIVIDER: + view = new ShadowSectionCell(parent.getContext()); + break; + case VIEW_TYPE_DELETE_ALL: + textCell = new TextCell(parent.getContext()); + textCell.setText(LocaleController.getString("NotificationsDeleteAllException", R.string.NotificationsDeleteAllException), false); + textCell.setColors(null, Theme.key_windowBackgroundWhiteRedText5); + view = textCell; + view.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); + break; + case VIEW_TYPE_HEADER: + HeaderCell headerCell = new HeaderCell(parent.getContext()); + view = headerCell; + view.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); + break; + case VIEW_TYPE_TOGGLE: + TextCheckCell textCheckCell = new TextCheckCell(parent.getContext()); + view = textCheckCell; + view.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); + break; + case VIEW_TYPE_DIVIDER_INFO: + TextInfoPrivacyCell textInfoPrivacyCell = new TextInfoPrivacyCell(parent.getContext()); + view = textInfoPrivacyCell; + break; + case VIEW_TYPE_CHOOSER: + LinearLayout linearLayout = new LinearLayout(getContext()); + linearLayout.setOrientation(LinearLayout.VERTICAL); + SeekBarView slideChooseView = new SeekBarView(getContext()); + FrameLayout textContainer = new FrameLayout(getContext()); + + SelectableAnimatedTextView lowerTextView = new SelectableAnimatedTextView(getContext()); + lowerTextView.setTextSize(AndroidUtilities.dp(13)); + lowerTextView.setText(AndroidUtilities.formatFileSize(1024 * 512, true)); + textContainer.addView(lowerTextView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.BOTTOM)); + + SelectableAnimatedTextView midTextView = new SelectableAnimatedTextView(getContext()); + midTextView.setTextSize(AndroidUtilities.dp(13)); + // midTextView.setText(AndroidUtilities.formatFileSize(1024 * 512, true)); + textContainer.addView(midTextView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_HORIZONTAL | Gravity.BOTTOM)); + + SelectableAnimatedTextView topTextView = new SelectableAnimatedTextView(getContext()); + topTextView.setTextSize(AndroidUtilities.dp(13)); + topTextView.setText(AndroidUtilities.formatFileSize(SaveToGallerySettingsHelper.MAX_VIDEO_LIMIT, true)); + textContainer.addView(topTextView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.RIGHT | Gravity.BOTTOM)); + + + linearLayout.addView(textContainer, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, 20, 0, 21, 10, 21, 0)); + linearLayout.addView(slideChooseView, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, 38, 0, 5, 0, 5, 4)); + SaveToGallerySettingsHelper.Settings settings = getSettings(); + long currentValue = settings.limitVideo; + long maxValue = 4L * 1000 * 1024 * 1024; + if (currentValue < 0 || currentValue > SaveToGallerySettingsHelper.MAX_VIDEO_LIMIT) { + currentValue = SaveToGallerySettingsHelper.MAX_VIDEO_LIMIT; + } + slideChooseView.setReportChanges(true); + slideChooseView.setDelegate(new SeekBarView.SeekBarViewDelegate() { + @Override + public void onSeekBarDrag(boolean stop, float progress) { + boolean animated = slideChooseView.isAttachedToWindow(); + long limitExtremum = 100 * 1024 * 1024; + float limitExtremumF = 0.7f; + float limitExtremumK = 1f - limitExtremumF; + long value; + if (progress > limitExtremumF) { + float p = (progress - limitExtremumF) / limitExtremumK; + value = (long) (limitExtremum + (SaveToGallerySettingsHelper.MAX_VIDEO_LIMIT - limitExtremum) * p); + } else { + float p = progress / limitExtremumF; + value = (long) (1024 * 512 + (limitExtremum - 1024 * 512) * p); + } + if (progress >= 1f) { + lowerTextView.setSelectedInternal(false, animated); + midTextView.setSelectedInternal(false, animated); + topTextView.setSelectedInternal(true, animated); + AndroidUtilities.updateViewVisibilityAnimated(midTextView, false, 0.8f, animated); + } else if (progress == 0f) { + lowerTextView.setSelectedInternal(true, animated); + midTextView.setSelectedInternal(false, animated); + topTextView.setSelectedInternal(false, animated); + AndroidUtilities.updateViewVisibilityAnimated(midTextView, false, 0.8f, animated); + } else { + midTextView.setText( + LocaleController.formatString("UpToFileSize", R.string.UpToFileSize, + AndroidUtilities.formatFileSize(value, true) + ), false); + lowerTextView.setSelectedInternal(false, animated); + midTextView.setSelectedInternal(true, animated); + topTextView.setSelectedInternal(false, animated); + AndroidUtilities.updateViewVisibilityAnimated(midTextView, true, 0.8f, animated); + } + if (stop) { + getSettings().limitVideo = value; + onSettingsUpdated(); + } + + } + + @Override + public void onSeekBarPressed(boolean pressed) { + + } + }); + + long limitExtremum = 100 * 1024 * 1024; + float limitExtremumF = 0.7f; + float limitExtremumK = 1f - limitExtremumF; + long mimValue = 1024 * 512; + float currentProgress; + if (currentValue > limitExtremum * limitExtremumF) { + float p = (currentValue - limitExtremum) / (float) (maxValue - limitExtremum); + currentProgress = limitExtremumF + limitExtremumK * p; + } else { + float p = (currentValue - mimValue) / (float) (limitExtremum - mimValue); + currentProgress = limitExtremumF * p; + } + slideChooseView.setProgress(currentProgress); + slideChooseView.delegate.onSeekBarDrag(false, slideChooseView.getProgress()); + + view = linearLayout; + view.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); + break; + } + view.setLayoutParams(new RecyclerView.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT)); + return new RecyclerListView.Holder(view); + } + + @Override + public void onBindViewHolder(@NonNull RecyclerView.ViewHolder holder, int position) { + if (items.get(position).viewType == VIEW_TYPE_ADD_EXCEPTION) { + TextCell cell = (TextCell) holder.itemView; + cell.setNeedDivider(exceptionsDialogs.size() > 0); + } else if (items.get(position).viewType == VIEW_TYPE_TOGGLE) { + TextCheckCell cell = (TextCheckCell) holder.itemView; + SaveToGallerySettingsHelper.Settings settings = getSettings(); + if (position == savePhotosRow) { + cell.setTextAndCheck(LocaleController.getString(R.string.SaveToGalleryPhotos), settings.savePhoto, true); + cell.setColorfullIcon(getThemedColor(Theme.key_statisticChartLine_lightblue), R.drawable.msg_filled_data_photos); + } else { + cell.setTextAndCheck(LocaleController.getString(R.string.SaveToGalleryVideos), settings.saveVideo, false); + cell.setColorfullIcon(getThemedColor(Theme.key_statisticChartLine_green), R.drawable.msg_filled_data_videos); + } + + } else if (items.get(position).viewType == VIEW_TYPE_DIVIDER_INFO) { + TextInfoPrivacyCell cell = (TextInfoPrivacyCell) holder.itemView; + if (position == videoDividerRow) { + long limit = getSettings().limitVideo; + if (limit == -1) { + limit = 4L * 1000 * 1024 * 1024; + } + if (dialogException != null) { + cell.setText(LocaleController.formatString("SaveToGalleryVideoHintCurrent", R.string.SaveToGalleryVideoHintCurrent)); + } else if (type == SAVE_TO_GALLERY_FLAG_PEER) { + cell.setText(LocaleController.formatString("SaveToGalleryVideoHintUser", R.string.SaveToGalleryVideoHintUser)); + } else if (type == SAVE_TO_GALLERY_FLAG_CHANNELS) { + cell.setText(LocaleController.formatString("SaveToGalleryVideoHintChannels", R.string.SaveToGalleryVideoHintChannels)); + } else if (type == SAVE_TO_GALLERY_FLAG_GROUP) { + cell.setText(LocaleController.formatString("SaveToGalleryVideoHintGroup", R.string.SaveToGalleryVideoHintGroup)); + } + } else { + cell.setText(items.get(position).title); + } + } else if (items.get(position).viewType == VIEW_TYPE_HEADER) { + HeaderCell cell = (HeaderCell) holder.itemView; + cell.setText(items.get(position).title); + } else if (items.get(position).viewType == VIEW_TYPE_CHAT) { + UserCell cell = (UserCell) holder.itemView; + SaveToGallerySettingsHelper.DialogException exception = items.get(position).exception; + TLObject object = getMessagesController().getUserOrChat(exception.dialogId); + String title = null; + if (object instanceof TLRPC.User) { + TLRPC.User user = (TLRPC.User) object; + if (user.self) { + title = LocaleController.getString("SavedMessages", R.string.SavedMessages); + } else { + title = ContactsController.formatName(user.first_name, user.last_name); + } + } else if (object instanceof TLRPC.Chat) { + TLRPC.Chat chat = (TLRPC.Chat) object; + title = chat.title; + } + cell.setSelfAsSavedMessages(true); + cell.setData(object, title, exception.createDescription(currentAccount), 0, !(position != items.size() - 1 && items.get(position + 1).viewType != VIEW_TYPE_CHAT)); + } + } + + @Override + public int getItemCount() { + return items.size(); + } + + @Override + public int getItemViewType(int position) { + return items.get(position).viewType; + } + + @Override + public boolean isEnabled(RecyclerView.ViewHolder holder) { + return holder.getItemViewType() == VIEW_TYPE_ADD_EXCEPTION || holder.getItemViewType() == VIEW_TYPE_CHAT + || holder.getItemViewType() == VIEW_TYPE_DELETE_ALL || holder.getItemViewType() == VIEW_TYPE_TOGGLE; + } + } + + private class Item extends AdapterWithDiffUtils.Item { + final SaveToGallerySettingsHelper.DialogException exception; + String title; + + + private Item(int viewType) { + super(viewType, false); + exception = null; + } + + private Item(int viewType, SaveToGallerySettingsHelper.DialogException exception) { + super(viewType, false); + this.exception = exception; + } + + private Item(int viewType, String title) { + super(viewType, false); + this.title = title; + exception = null; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Item item = (Item) o; + if (viewType != item.viewType) { + return false; + } + if (title != null) { + return Objects.equals(title, item.title); + } + if (exception != null && item.exception != null) { + return exception.dialogId == item.exception.dialogId; + } + return true; + } + } + + SaveToGallerySettingsHelper.Settings getSettings() { + if (dialogException != null) { + return dialogException; + } + return SaveToGallerySettingsHelper.getSettings(type); + } + + @Override + public void onResume() { + super.onResume(); + updateRows(); + } + + private void onSettingsUpdated() { + if (isNewException) { + return; + } + if (dialogException != null) { + LongSparseArray allExceptions = getUserConfig().getSaveGalleryExceptions(type); + allExceptions.put(dialogException.dialogId, dialogException); + getUserConfig().updateSaveGalleryExceptions(type, allExceptions); + return; + } else { + SaveToGallerySettingsHelper.saveSettings(type); + } + } + + private class SelectableAnimatedTextView extends AnimatedTextView { + + boolean selected; + AnimatedFloat progressToSelect = new AnimatedFloat(this); + + public SelectableAnimatedTextView(Context context) { + super(context, true, true, false); + getDrawable().setAllowCancel(true); + } + + @Override + protected void dispatchDraw(Canvas canvas) { + progressToSelect.set(selected ? 1f : 0); + setTextColor(ColorUtils.blendARGB(getThemedColor(Theme.key_windowBackgroundWhiteGrayText), getThemedColor(Theme.key_windowBackgroundWhiteBlueText), progressToSelect.get())); + super.dispatchDraw(canvas); + } + + public void setSelectedInternal(boolean selected, boolean animated) { + if (this.selected != selected) { + this.selected = selected; + progressToSelect.set(selected ? 1f : 0, animated); + invalidate(); + } + } + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/SecretMediaViewer.java b/TMessagesProj/src/main/java/org/telegram/ui/SecretMediaViewer.java index 113527b1dc..5c4e57a0c4 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/SecretMediaViewer.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/SecretMediaViewer.java @@ -801,7 +801,13 @@ public void openMedia(MessageObject messageObject, PhotoViewer.PhotoViewerProvid if (document != null) { if (MessageObject.isGifDocument(document)) { actionBar.setTitle(LocaleController.getString("DisappearingGif", R.string.DisappearingGif)); - centerImage.setImage(ImageLocation.getForDocument(document), null, currentThumb != null ? new BitmapDrawable(currentThumb.bitmap) : null, -1, null, messageObject, 1); + ImageLocation location; + if (messageObject.messageOwner.attachPath != null && messageObject.attachPathExists) { + location = ImageLocation.getForPath(messageObject.messageOwner.attachPath); + } else { + location =ImageLocation.getForDocument(document); + } + centerImage.setImage(location, null, currentThumb != null ? new BitmapDrawable(currentThumb.bitmap) : null, -1, null, messageObject, 1); secretDeleteTimer.setDestroyTime((long) messageObject.messageOwner.destroyTime * 1000, messageObject.messageOwner.ttl, false); } else { playerRetryPlayCount = 1; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/SelectAnimatedEmojiDialog.java b/TMessagesProj/src/main/java/org/telegram/ui/SelectAnimatedEmojiDialog.java index d0cb2350f1..82de7c0c5f 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/SelectAnimatedEmojiDialog.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/SelectAnimatedEmojiDialog.java @@ -26,6 +26,7 @@ import android.text.Editable; import android.text.TextUtils; import android.text.TextWatcher; +import android.util.Log; import android.util.LongSparseArray; import android.util.SparseArray; import android.util.SparseIntArray; @@ -50,6 +51,7 @@ import android.widget.TextView; import androidx.annotation.NonNull; +import androidx.core.content.ContextCompat; import androidx.core.graphics.ColorUtils; import androidx.core.math.MathUtils; import androidx.recyclerview.widget.DefaultItemAnimator; @@ -67,6 +69,7 @@ import org.telegram.messenger.ImageLoader; import org.telegram.messenger.ImageLocation; import org.telegram.messenger.ImageReceiver; +import org.telegram.messenger.LiteMode; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MediaDataController; import org.telegram.messenger.MessageObject; @@ -76,6 +79,7 @@ import org.telegram.messenger.SharedConfig; import org.telegram.messenger.SvgHelper; import org.telegram.messenger.UserConfig; +import org.telegram.messenger.UserObject; import org.telegram.messenger.Utilities; import org.telegram.tgnet.ConnectionsManager; import org.telegram.tgnet.TLRPC; @@ -108,6 +112,8 @@ import org.telegram.ui.Components.Reactions.ReactionsUtils; import org.telegram.ui.Components.RecyclerAnimationScrollHelper; import org.telegram.ui.Components.RecyclerListView; +import org.telegram.ui.Components.SearchStateDrawable; +import org.telegram.ui.Components.StickerCategoriesListView; import java.lang.reflect.Field; import java.util.ArrayList; @@ -115,15 +121,23 @@ import java.util.Collections; import java.util.HashMap; import java.util.HashSet; +import java.util.LinkedHashSet; import java.util.List; import java.util.Objects; +import tw.nekomimi.nekogram.NekoConfig; + public class SelectAnimatedEmojiDialog extends FrameLayout implements NotificationCenter.NotificationCenterDelegate { public final static int TYPE_EMOJI_STATUS = 0; public final static int TYPE_REACTIONS = 1; public final static int TYPE_SET_DEFAULT_REACTION = 2; public static final int TYPE_TOPIC_ICON = 3; + public static final int TYPE_AVATAR_CONSTRUCTOR = 4; + + private final int SPAN_COUNT_FOR_EMOJI = 8; + private final int SPAN_COUNT_FOR_STICKER = 5; + private final int SPAN_COUNT = 40; private final int RECENT_MAX_LINES = 5; private final int EXPAND_MAX_LINES = 3; @@ -138,7 +152,7 @@ public class SelectAnimatedEmojiDialog extends FrameLayout implements Notificati private int longtapHintRow; private int defaultTopicIconRow; private int topicEmojiHeaderRow; - + private EmojiPackExpand recentExpandButton; public onLongPressedListener bigReactionListener; @@ -151,6 +165,10 @@ public class SelectAnimatedEmojiDialog extends FrameLayout implements Notificati private Drawable forumIconDrawable; private ImageViewEmoji forumIconImage; private boolean animationsEnabled; + private boolean showStickers; + public boolean forUser; + private ArrayList stickerSets = new ArrayList<>(); + private boolean enterAnimationInProgress; public void putAnimatedEmojiToCache(AnimatedEmojiDrawable animatedEmojiDrawable) { emojiGridView.animatedEmojiDrawables.put(animatedEmojiDrawable.getDocumentId(), animatedEmojiDrawable); @@ -167,6 +185,11 @@ public void setSelectedReactions(HashSet rowHashCodes = new ArrayList<>(); + private ArrayList rowHashCodes = new ArrayList<>(); private SparseIntArray positionToSection = new SparseIntArray(); private SparseIntArray sectionToPosition = new SparseIntArray(); private SparseIntArray positionToExpand = new SparseIntArray(); @@ -337,13 +361,14 @@ public void dismiss() { private ArrayList installedEmojiSets = new ArrayList<>(); private boolean recentExpanded = false; private ArrayList recent = new ArrayList<>(); + private ArrayList recentStickers = new ArrayList<>(); private ArrayList topReactions = new ArrayList<>(); private ArrayList recentReactions = new ArrayList<>(); private ArrayList defaultStatuses = new ArrayList<>(); private ArrayList frozenEmojiPacks = new ArrayList<>(); private ArrayList packs = new ArrayList<>(); private boolean includeEmpty = false; - private boolean includeHint = false; + public boolean includeHint = false; private Integer hintExpireDate; private boolean drawBackground = true; private List recentReactionsToSet; @@ -363,6 +388,10 @@ public void dismiss() { private int topMarginDp; DefaultItemAnimator emojiItemAnimator; + protected void invalidateParent() { + + } + public SelectAnimatedEmojiDialog(BaseFragment baseFragment, Context context, boolean includeEmpty, Theme.ResourcesProvider resourcesProvider) { this(baseFragment, context, includeEmpty, null, TYPE_EMOJI_STATUS, resourcesProvider); } @@ -377,7 +406,7 @@ public SelectAnimatedEmojiDialog(BaseFragment baseFragment, Context context, boo this.type = type; this.includeEmpty = includeEmpty; this.baseFragment = baseFragment; - this.includeHint = MessagesController.getGlobalMainSettings().getInt("emoji"+(type==TYPE_EMOJI_STATUS?"status":"reaction")+"usehint", 0) < 3; + this.includeHint = MessagesController.getGlobalMainSettings().getInt("emoji" + (type == TYPE_EMOJI_STATUS ? "status" : "reaction") + "usehint", 0) < 3; selectorPaint.setColor(Theme.getColor(Theme.key_listSelector, resourcesProvider)); selectorAccentPaint.setColor(ColorUtils.setAlphaComponent(Theme.getColor(Theme.key_windowBackgroundWhiteBlueIcon, resourcesProvider), 30)); @@ -433,7 +462,7 @@ protected void dispatchDraw(Canvas canvas) { return; } canvas.save(); - paint.setShadowLayer(dp(2), 0, dp(-0.66f), 0x1e000000); + Theme.applyDefaultShadow(paint); paint.setColor(Theme.getColor(Theme.key_actionBarDefaultSubmenuBackground, resourcesProvider)); paint.setAlpha((int) (255 * getAlpha())); float px = (bubbleX == null ? getWidth() / 2f : bubbleX) + AndroidUtilities.dp(20); @@ -448,7 +477,7 @@ protected void dispatchDraw(Canvas canvas) { path.rewind(); path.addRoundRect(AndroidUtilities.rectTmp, AndroidUtilities.dp(12), AndroidUtilities.dp(12), Path.Direction.CW); canvas.drawPath(path, paint); -// if (showAnimator != null && showAnimator.isRunning()) { +// if (enterAnimationInProgress() { canvas.clipPath(path); // } super.dispatchDraw(canvas); @@ -475,55 +504,85 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { addView(bubble2View, LayoutHelper.createFrame(17, 9, Gravity.TOP | Gravity.LEFT, bubbleX / AndroidUtilities.density + (bubbleRight ? -25 : 10), 6 + 8 - 9 + topMarginDp, 0, 0)); } - boolean showSettings = baseFragment != null && type != TYPE_TOPIC_ICON; - emojiTabs = new EmojiTabsStrip(context, null, false, true, type, showSettings ? () -> { - onSettings(); - baseFragment.presentFragment(new StickersActivity(MediaDataController.TYPE_EMOJIPACKS, frozenEmojiPacks)); - if (dismiss != null) { - dismiss.run(); - } - } : null) { + boolean showSettings = baseFragment != null && type != TYPE_TOPIC_ICON && type != TYPE_AVATAR_CONSTRUCTOR; + for (int i = 0; i < 2; i++) { + EmojiTabsStrip emojiTabs = new EmojiTabsStrip(context, null, false, true, type, showSettings ? () -> { + search(null, false, false); + onSettings(); + baseFragment.presentFragment(new StickersActivity(MediaDataController.TYPE_EMOJIPACKS, frozenEmojiPacks)); + if (dismiss != null) { + dismiss.run(); + } + } : null) { - @Override - protected ColorFilter getEmojiColorFilter() { - return premiumStarColorFilter; - } + @Override + protected ColorFilter getEmojiColorFilter() { + return premiumStarColorFilter; + } - @Override - protected boolean onTabClick(int index) { - if (smoothScrolling) { - return false; + @Override + protected boolean onTabClick(int index) { + if (smoothScrolling) { + return false; + } + if (type == TYPE_AVATAR_CONSTRUCTOR) { + if (index == 0) { + showStickers = !showStickers; + SelectAnimatedEmojiDialog.this.emojiTabs.setVisibility(View.GONE); + SelectAnimatedEmojiDialog.this.emojiTabs = cachedEmojiTabs[showStickers ? 1 : 0]; + SelectAnimatedEmojiDialog.this.emojiTabs.setVisibility(View.VISIBLE); + SelectAnimatedEmojiDialog.this.emojiTabs.toggleEmojiStickersTab.setDrawable(ContextCompat.getDrawable(getContext(), showStickers ? R.drawable.msg_emoji_stickers : R.drawable.msg_emoji_smiles)); + updateRows(true, false, false); + layoutManager.scrollToPositionWithOffset(0, 0); + return true; + } + index--; + } + int position = 0; + if (index > 0 && sectionToPosition.indexOfKey(index - 1) >= 0) { + position = sectionToPosition.get(index - 1); + } + scrollToPosition(position, AndroidUtilities.dp(-2)); + SelectAnimatedEmojiDialog.this.emojiTabs.select(index); + emojiGridView.scrolledByUserOnce = true; + search(null); + if (searchBox != null && searchBox.categoriesListView != null) { + searchBox.categoriesListView.selectCategory(null); + } + return true; } - int position = searchRow == -1 ? 1 : 0; - if (index > 0 && sectionToPosition.indexOfKey(index - 1) >= 0) { - position = sectionToPosition.get(index - 1); + + @Override + protected void onTabCreate(EmojiTabsStrip.EmojiTabButton button) { + if (showAnimator == null || showAnimator.isRunning()) { + button.setScaleX(0); + button.setScaleY(0); + } + } + }; + emojiTabs.recentTab.setOnLongClickListener(e -> { + onRecentLongClick(); + try { + if (!NekoConfig.disableVibration.Bool()) + performHapticFeedback(HapticFeedbackConstants.LONG_PRESS, HapticFeedbackConstants.FLAG_IGNORE_VIEW_SETTING); + } catch (Exception ignore) { } - scrollToPosition(position, AndroidUtilities.dp(-2)); - emojiTabs.select(index); - emojiGridView.scrolledByUserOnce = true; return true; + }); + emojiTabs.updateButtonDrawables = false; + if (type == TYPE_AVATAR_CONSTRUCTOR) { + emojiTabs.setAnimatedEmojiCacheType(AnimatedEmojiDrawable.CACHE_TYPE_ALERT_PREVIEW_STATIC); + } else { + emojiTabs.setAnimatedEmojiCacheType(type == TYPE_EMOJI_STATUS || type == TYPE_SET_DEFAULT_REACTION ? AnimatedEmojiDrawable.CACHE_TYPE_TAB_STRIP : AnimatedEmojiDrawable.CACHE_TYPE_ALERT_PREVIEW_TAB_STRIP); } + emojiTabs.animateAppear = bubbleX == null; + emojiTabs.setPaddingLeft(5); + contentView.addView(emojiTabs, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 36)); + cachedEmojiTabs[i] = emojiTabs; + } - @Override - protected void onTabCreate(EmojiTabsStrip.EmojiTabButton button) { - if (showAnimator == null || showAnimator.isRunning()) { - button.setScaleX(0); - button.setScaleY(0); - } - } - }; - emojiTabs.recentTab.setOnLongClickListener(e -> { - onRecentLongClick(); - try { - performHapticFeedback(HapticFeedbackConstants.LONG_PRESS, HapticFeedbackConstants.FLAG_IGNORE_VIEW_SETTING); - } catch (Exception ignore) {} - return true; - }); - emojiTabs.updateButtonDrawables = false; - emojiTabs.setAnimatedEmojiCacheType(type == TYPE_EMOJI_STATUS || type == TYPE_SET_DEFAULT_REACTION ? AnimatedEmojiDrawable.CACHE_TYPE_TAB_STRIP : AnimatedEmojiDrawable.CACHE_TYPE_ALERT_PREVIEW_TAB_STRIP); - emojiTabs.animateAppear = bubbleX == null; - emojiTabs.setPaddingLeft(5); - contentView.addView(emojiTabs, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 36)); + emojiTabs = cachedEmojiTabs[0]; + cachedEmojiTabs[1].setVisibility(View.GONE); emojiTabsShadow = new View(context) { @Override @@ -546,7 +605,8 @@ public void onScrolled(int dx, int dy) { updateTabsPosition(layoutManager.findFirstCompletelyVisibleItemPosition()); } updateSearchBox(); - AndroidUtilities.updateViewVisibilityAnimated(emojiTabsShadow, emojiGridView.computeVerticalScrollOffset() != 0, 1f, true); + AndroidUtilities.updateViewVisibilityAnimated(emojiTabsShadow, emojiGridView.computeVerticalScrollOffset() != 0 || type == TYPE_EMOJI_STATUS || type == TYPE_REACTIONS, 1f, true); + invalidateParent(); } @Override @@ -575,7 +635,7 @@ protected float animateByScale(View view) { emojiGridView.setItemAnimator(emojiItemAnimator); emojiGridView.setPadding(dp(5), dp(2), dp(5), dp(2 + 36)); emojiGridView.setAdapter(adapter = new Adapter()); - emojiGridView.setLayoutManager(layoutManager = new GridLayoutManager(context, 8) { + emojiGridView.setLayoutManager(layoutManager = new GridLayoutManager(context, SPAN_COUNT) { @Override public void smoothScrollToPosition(RecyclerView recyclerView, RecyclerView.State state, int position) { try { @@ -595,7 +655,11 @@ public void onEnd() { layoutManager.setSpanSizeLookup(new GridLayoutManager.SpanSizeLookup() { @Override public int getSpanSize(int position) { - return (positionToSection.indexOfKey(position) >= 0 || positionToButton.indexOfKey(position) >= 0 || position == recentReactionsSectionRow || position == popularSectionRow || position == longtapHintRow || position == searchRow || position == topicEmojiHeaderRow) ? layoutManager.getSpanCount() : 1; + if (positionToSection.indexOfKey(position) >= 0 || positionToButton.indexOfKey(position) >= 0 || position == recentReactionsSectionRow || position == popularSectionRow || position == longtapHintRow || position == searchRow || position == topicEmojiHeaderRow) { + return layoutManager.getSpanCount(); + } else { + return showStickers ? 8 : 5; + } } }); @@ -619,7 +683,9 @@ public void onScrolled(int dx, int dy) { emojiSearchGridView.getItemAnimator().setMoveInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); } TextView emptyViewText = new TextView(context); - if (type == TYPE_EMOJI_STATUS) { + if (type == TYPE_AVATAR_CONSTRUCTOR) { + emptyViewText.setText(LocaleController.getString("NoEmojiOrStickersFound", R.string.NoEmojiOrStickersFound)); + } else if (type == TYPE_EMOJI_STATUS) { emptyViewText.setText(LocaleController.getString("NoEmojiFound", R.string.NoEmojiFound)); } else if (type == TYPE_REACTIONS || type == TYPE_SET_DEFAULT_REACTION) { emptyViewText.setText(LocaleController.getString("NoReactionsFound", R.string.NoReactionsFound)); @@ -636,9 +702,9 @@ public void onScrolled(int dx, int dy) { emojiSearchEmptyView.setVisibility(View.GONE); emojiSearchEmptyView.setAlpha(0); gridViewContainer.addView(emojiSearchEmptyView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL, 0, 0, 0, 0)); - emojiSearchGridView.setPadding(dp(5), dp(52 + 2), dp(5), dp(2)); + emojiSearchGridView.setPadding(dp(5), dp(52 + 2), dp(5), dp(36 + 2)); emojiSearchGridView.setAdapter(searchAdapter = new SearchAdapter()); - emojiSearchGridView.setLayoutManager(searchLayoutManager = new GridLayoutManager(context, 8) { + emojiSearchGridView.setLayoutManager(searchLayoutManager = new GridLayoutManager(context, SPAN_COUNT) { @Override public void smoothScrollToPosition(RecyclerView recyclerView, RecyclerView.State state, int position) { try { @@ -655,13 +721,20 @@ public void onEnd() { } } }); -// searchLayoutManager.setSpanSizeLookup(new GridLayoutManager.SpanSizeLookup() { -// @Override -// public int getSpanSize(int position) { -// return position == 0 ? layoutManager.getSpanCount() : 1; -// } -// }); + searchLayoutManager.setSpanSizeLookup(new GridLayoutManager.SpanSizeLookup() { + @Override + public int getSpanSize(int position) { + int viewType = searchAdapter.getItemViewType(position); + if (viewType == SearchAdapter.VIEW_TYPE_HEADER) { + return layoutManager.getSpanCount(); + } + if (viewType == SearchAdapter.VIEW_TYPE_STICKER) { + return 8; + } + return 5; + } + }); emojiSearchGridView.setVisibility(View.GONE); gridViewContainer.addView(emojiSearchGridView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.FILL, 0, 0, 0, 0)); contentView.addView(gridViewContainer, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.TOP, 0, 36 + (1 / AndroidUtilities.density), 0, 0)); @@ -678,12 +751,16 @@ public void onEndAnimation() { smoothScrolling = false; } }); + scrollHelper.setScrollListener(() -> { + invalidateParent(); + }); RecyclerListView.OnItemLongClickListenerExtended onItemLongClick = new RecyclerListView.OnItemLongClickListenerExtended() { @Override public boolean onItemClick(View view, int position, float x, float y) { if (view instanceof ImageViewEmoji && type == TYPE_REACTIONS) { incrementHintUse(); + if (!NekoConfig.disableVibration.Bool()) performHapticFeedback(HapticFeedbackConstants.LONG_PRESS); ImageViewEmoji imageViewEmoji = (ImageViewEmoji) view; if (!imageViewEmoji.isDefaultReaction && !UserConfig.getInstance(currentAccount).isPremium()) { @@ -698,6 +775,7 @@ public boolean onItemClick(View view, int position, float x, float y) { pressedProgress = 0f; cancelPressed = false; if (selectedReactionView.isDefaultReaction) { + setBigReactionAnimatedEmoji(null); TLRPC.TL_availableReaction reaction = MediaDataController.getInstance(currentAccount).getReactionsMap().get(selectedReactionView.reaction.emojicon); if (reaction != null) { bigReactionImageReceiver.setImage(ImageLocation.getForDocument(reaction.select_animation), ReactionsUtils.SELECT_ANIMATION_FILTER, null, null, null, 0, "tgs", selectedReactionView.reaction, 0); @@ -706,6 +784,7 @@ public boolean onItemClick(View view, int position, float x, float y) { setBigReactionAnimatedEmoji(new AnimatedEmojiDrawable(AnimatedEmojiDrawable.CACHE_TYPE_ALERT_PREVIEW_LARGE, currentAccount, selectedReactionView.span.documentId)); } emojiGridView.invalidate(); + invalidateParent(); return true; } if (view instanceof ImageViewEmoji && ((ImageViewEmoji) view).span != null && type == TYPE_EMOJI_STATUS) { @@ -746,6 +825,7 @@ public void dismiss() { dialog.show(); try { + if (!NekoConfig.disableVibration.Bool()) view.performHapticFeedback(HapticFeedbackConstants.LONG_PRESS, HapticFeedbackConstants.FLAG_IGNORE_VIEW_SETTING); } catch (Exception ignore) {} return true; @@ -781,11 +861,14 @@ public void onAnimationEnd(Animator animation) { if (viewEmoji.isDefaultReaction) { incrementHintUse(); onReactionClick(viewEmoji, viewEmoji.reaction); + } else if (viewEmoji.isStaticIcon && viewEmoji.document != null) { + onStickerClick(viewEmoji, viewEmoji.document); } else { onEmojiClick(viewEmoji, viewEmoji.span); } if (type != TYPE_REACTIONS) { try { + if (!NekoConfig.disableVibration.Bool()) performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_VIEW_SETTING); } catch (Exception ignore) {} } @@ -793,6 +876,7 @@ public void onAnimationEnd(Animator animation) { onEmojiClick(view, null); if (type != TYPE_REACTIONS) { try { + if (!NekoConfig.disableVibration.Bool()) performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_VIEW_SETTING); } catch (Exception ignore) {} } @@ -801,6 +885,7 @@ public void onAnimationEnd(Animator animation) { expand(position, button); if (type != TYPE_REACTIONS) { try { + if (!NekoConfig.disableVibration.Bool()) performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_VIEW_SETTING); } catch (Exception ignore) {} } @@ -812,9 +897,9 @@ public void onAnimationEnd(Animator animation) { emojiSearchGridView.setOnItemClickListener(onItemClick); searchBox = new SearchBox(context); - searchBox.setTranslationY(-AndroidUtilities.dp(4 + 52)); + searchBox.setTranslationY(-AndroidUtilities.dp( 52)); searchBox.setVisibility(View.INVISIBLE); - gridViewContainer.addView(searchBox, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 52, Gravity.TOP, 0, 0, 0, 0)); + gridViewContainer.addView(searchBox, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 52, Gravity.TOP, 0, -4, 0, 0)); topGradientView = new View(context) { @Override @@ -850,6 +935,10 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { updateRows(true, false); } + private void onStickerClick(ImageViewEmoji viewEmoji, TLRPC.Document document) { + onEmojiSelected(viewEmoji, null, document, null); + } + protected void onSettings() { } @@ -872,6 +961,7 @@ private void setBigReactionAnimatedEmoji(AnimatedEmojiDrawable animatedEmojiDraw } this.bigReactionAnimatedEmoji = animatedEmojiDrawable; if (bigReactionAnimatedEmoji != null) { + bigReactionAnimatedEmoji.setColorFilter(premiumStarColorFilter); bigReactionAnimatedEmoji.addView(this); } } @@ -933,12 +1023,12 @@ private void setDim(float dim, boolean animated) { private void updateTabsPosition(int position) { if (position != RecyclerView.NO_POSITION) { - final int recentmaxlen = layoutManager.getSpanCount() * RECENT_MAX_LINES; + final int recentmaxlen = SPAN_COUNT_FOR_EMOJI * RECENT_MAX_LINES; int recentSize = recent.size() > recentmaxlen && !recentExpanded ? recentmaxlen : recent.size() + (includeEmpty ? 1 : 0); if (position <= recentSize || position <= recentReactions.size()) { emojiTabs.select(0); // recent } else { - final int maxlen = layoutManager.getSpanCount() * EXPAND_MAX_LINES; + final int maxlen = SPAN_COUNT_FOR_EMOJI * EXPAND_MAX_LINES; for (int i = 0; i < positionToSection.size(); ++i) { int startPosition = positionToSection.keyAt(i); int index = i - (defaultStatuses.isEmpty() ? 0 : 1); @@ -963,20 +1053,20 @@ private void updateSearchBox() { if (searched) { searchBox.clearAnimation(); searchBox.setVisibility(View.VISIBLE); - searchBox.animate().translationY(-AndroidUtilities.dp(4)).start(); + searchBox.animate().translationY(0).start(); } else { if (emojiGridView.getChildCount() > 0) { View first = emojiGridView.getChildAt(0); if (emojiGridView.getChildAdapterPosition(first) == searchRow && "searchbox".equals(first.getTag())) { searchBox.setVisibility(View.VISIBLE); - searchBox.setTranslationY(first.getY() - AndroidUtilities.dp(4)); + searchBox.setTranslationY(first.getY()); } else { // searchBox.setVisibility(View.INVISIBLE); - searchBox.setTranslationY(-AndroidUtilities.dp(4 + 52)); + searchBox.setTranslationY(-AndroidUtilities.dp(52)); } } else { // searchBox.setVisibility(View.INVISIBLE); - searchBox.setTranslationY(-AndroidUtilities.dp(4 + 52)); + searchBox.setTranslationY(-AndroidUtilities.dp(52)); } } } @@ -1134,7 +1224,7 @@ private void checkScroll() { private void scrollToPosition(int p, int offset) { View view = layoutManager.findViewByPosition(p); int firstPosition = layoutManager.findFirstVisibleItemPosition(); - if ((view == null && Math.abs(p - firstPosition) > layoutManager.getSpanCount() * 9f) || !SharedConfig.animationsEnabled()) { + if ((view == null && Math.abs(p - firstPosition) > SPAN_COUNT_FOR_EMOJI * 9f) || !SharedConfig.animationsEnabled()) { scrollHelper.setScrollDirection(layoutManager.findFirstVisibleItemPosition() < p ? RecyclerAnimationScrollHelper.SCROLL_DIRECTION_DOWN : RecyclerAnimationScrollHelper.SCROLL_DIRECTION_UP); scrollHelper.scrollToPosition(p, offset, false, true); } else { @@ -1158,12 +1248,18 @@ protected void onStart() { public boolean searching = false; public boolean searched = false; + public boolean searchedLiftUp = false; private String lastQuery; private ArrayList searchResult; + private ArrayList stickersSearchResult; private ValueAnimator gridSwitchAnimator; - private boolean gridSearch = false; public void switchGrids(boolean search) { + switchGrids(search, true); + } + + private boolean gridSearch = false; + public void switchGrids(boolean search, boolean liftUp) { if (gridSearch == search) { return; } @@ -1184,7 +1280,9 @@ public void switchGrids(boolean search) { t = 1f - t; } emojiGridView.setAlpha(1f - t); + emojiGridView.setTranslationY(AndroidUtilities.dp(8) * t); emojiSearchGridView.setAlpha(t); + emojiSearchGridView.setTranslationY(AndroidUtilities.dp(8) * (1f - t)); emojiSearchEmptyView.setAlpha(emojiSearchGridView.getAlpha() * t); }); gridSwitchAnimator.addListener(new AnimatorListenerAdapter() { @@ -1199,22 +1297,28 @@ public void onAnimationEnd(Animator animation) { } } }); - gridSwitchAnimator.setDuration(280); + gridSwitchAnimator.setDuration(320); gridSwitchAnimator.setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); gridSwitchAnimator.start(); ((View) emojiGridView.getParent()).animate() - .translationY(gridSearch ? -AndroidUtilities.dp(36) : 0) + .translationY(gridSearch && liftUp ? -AndroidUtilities.dp(36) : 0) + .setUpdateListener(anm -> invalidateParent()) .setInterpolator(CubicBezierInterpolator.DEFAULT) .setDuration(160) .start(); + if (gridSearch && liftUp) { + emojiSearchGridView.setPadding(dp(5), dp(52 + 2), dp(5), dp(2)); + } else { + emojiSearchGridView.setPadding(dp(5), dp(52 + 2), dp(5), dp(36 + 2)); + } checkScroll(); } private ArrayList emptyViewEmojis = new ArrayList(4); { - emptyViewEmojis.add("\uD83D\uDE16"); - emptyViewEmojis.add("\uD83D\uDE2B"); - emptyViewEmojis.add("\uD83E\uDEE0"); - emptyViewEmojis.add("\uD83D\uDE28"); + emptyViewEmojis.add("😖"); + emptyViewEmojis.add("😫"); + emptyViewEmojis.add("🫠"); + emptyViewEmojis.add("😨"); emptyViewEmojis.add("❓"); }; public void updateSearchEmptyViewImage() { @@ -1226,6 +1330,7 @@ public void updateSearchEmptyViewImage() { ArrayList featuredSets = MediaDataController.getInstance(currentAccount).getFeaturedEmojiSets(); List shuffledFeaturedSets = new ArrayList<>(featuredSets); Collections.shuffle(shuffledFeaturedSets); + int skip = (int) Math.round(Math.random() * 10); for (int i = 0; i < shuffledFeaturedSets.size(); ++i) { if (shuffledFeaturedSets.get(i) instanceof TLRPC.TL_stickerSetFullCovered && ((TLRPC.TL_stickerSetFullCovered) shuffledFeaturedSets.get(i)).documents != null) { List documents = new ArrayList<>(((TLRPC.TL_stickerSetFullCovered) shuffledFeaturedSets.get(i)).documents); @@ -1234,15 +1339,16 @@ public void updateSearchEmptyViewImage() { TLRPC.Document document = documents.get(j); if (document != null && emptyViewEmojis.contains(MessageObject.findAnimatedEmojiEmoticon(document, null))) { emoji = document; - break; + if (skip-- <= 0) + break; } } } - if (emoji != null) { + if (emoji != null && skip <= 0) { break; } } - if (emoji == null) { + if (emoji == null || skip > 0) { ArrayList sets = MediaDataController.getInstance(currentAccount).getStickerSets(MediaDataController.TYPE_EMOJIPACKS); List shuffledSets = new ArrayList<>(sets); Collections.shuffle(shuffledSets); @@ -1254,11 +1360,12 @@ public void updateSearchEmptyViewImage() { TLRPC.Document document = documents.get(j); if (document != null && emptyViewEmojis.contains(MessageObject.findAnimatedEmojiEmoticon(document, null))) { emoji = document; - break; + if (skip-- <= 0) + break; } } } - if (emoji != null) { + if (emoji != null && skip <= 0) { break; } } @@ -1328,7 +1435,12 @@ public void onAnimationEnd(Animator animation) { private static String[] lastSearchKeyboardLanguage; private Runnable clearSearchRunnable; private Runnable searchRunnable; + public void search(String query) { + search(query, true, true); + } + + public void search(String query, boolean liftUp, boolean delay) { if (clearSearchRunnable != null) { AndroidUtilities.cancelRunOnUIThread(clearSearchRunnable); clearSearchRunnable = null; @@ -1337,12 +1449,13 @@ public void search(String query) { AndroidUtilities.cancelRunOnUIThread(searchRunnable); searchRunnable = null; } - if (query == null) { + if (TextUtils.isEmpty(query)) { searching = false; searched = false; - switchGrids(false); - if (searchBox != null && searchBox.clearDrawable != null) { - searchBox.clearDrawable.stopAnimation(); + switchGrids(false, liftUp); + if (searchBox != null) { + searchBox.showProgress(false); + searchBox.toggleClear(false); } searchAdapter.updateRows(true); lastQuery = null; @@ -1350,8 +1463,9 @@ public void search(String query) { boolean firstSearch = !searching; searching = true; searched = false; - if (searchBox != null && searchBox.clearDrawable != null) { - searchBox.clearDrawable.startAnimation(); + searchedLiftUp = liftUp; + if (searchBox != null) { + searchBox.showProgress(true); } if (firstSearch) { if (searchResult != null) { @@ -1373,45 +1487,146 @@ public void search(String query) { } lastSearchKeyboardLanguage = newLanguage; AndroidUtilities.runOnUIThread(searchRunnable = () -> { - MediaDataController.getInstance(currentAccount).getAnimatedEmojiByKeywords(query, _documentIds -> { - final ArrayList documentIds = _documentIds == null ? new ArrayList<>() : _documentIds; - final HashMap availableReactions = MediaDataController.getInstance(currentAccount).getReactionsMap(); - if (Emoji.fullyConsistsOfEmojis(query)) { - ArrayList stickerSets = MediaDataController.getInstance(currentAccount).getStickerSets(MediaDataController.TYPE_EMOJIPACKS); - String emoticon; - - for (int i = 0; i < stickerSets.size(); ++i) { - if (stickerSets.get(i).documents != null) { - ArrayList documents = stickerSets.get(i).documents; - if (documents != null) { - for (int j = 0; j < documents.size(); ++j) { - emoticon = MessageObject.findAnimatedEmojiEmoticon(documents.get(j), null); - long id = documents.get(j).id; - if (emoticon != null && !documentIds.contains(id) && query.contains(emoticon.toLowerCase())) { - documentIds.add(id); + final LinkedHashSet documentIds = new LinkedHashSet<>(); + final HashMap availableReactions = MediaDataController.getInstance(currentAccount).getReactionsMap(); + final ArrayList reactions = new ArrayList<>(); + final boolean queryFullyConsistsOfEmojis = Emoji.fullyConsistsOfEmojis(query); + final ArrayList> emojiArrays = new ArrayList<>(); + final HashMap, String> emojiStickers = new HashMap<>(); + Utilities.doCallbacks( + next -> { + if (queryFullyConsistsOfEmojis) { + StickerCategoriesListView.search.fetch(UserConfig.selectedAccount, query, list -> { + if (list != null) { + documentIds.addAll(list.document_id); + } + next.run(); + }); + } else { + next.run(); + } + }, + next -> { + MediaDataController.getInstance(currentAccount).getAnimatedEmojiByKeywords(query, _documentIds -> { + if (_documentIds != null) { + documentIds.addAll(_documentIds); + } + next.run(); + }); + }, + next -> { + if (queryFullyConsistsOfEmojis) { + ArrayList stickerSets = MediaDataController.getInstance(currentAccount).getStickerSets(MediaDataController.TYPE_EMOJIPACKS); + String emoticon; + + for (int i = 0; i < stickerSets.size(); ++i) { + if (stickerSets.get(i).documents != null) { + ArrayList documents = stickerSets.get(i).documents; + if (documents != null) { + for (int j = 0; j < documents.size(); ++j) { + emoticon = MessageObject.findAnimatedEmojiEmoticon(documents.get(j), null); + long id = documents.get(j).id; + if (emoticon != null && !documentIds.contains(id) && query.contains(emoticon.toLowerCase())) { + documentIds.add(id); + } + } } } } - } - } - ArrayList featuredStickerSets = MediaDataController.getInstance(currentAccount).getFeaturedEmojiSets(); - for (int i = 0; i < featuredStickerSets.size(); ++i) { - if (featuredStickerSets.get(i) instanceof TLRPC.TL_stickerSetFullCovered && - ((TLRPC.TL_stickerSetFullCovered) featuredStickerSets.get(i)).keywords != null) { - ArrayList documents = ((TLRPC.TL_stickerSetFullCovered) featuredStickerSets.get(i)).documents; - if (documents != null) { - for (int j = 0; j < documents.size(); ++j) { - emoticon = MessageObject.findAnimatedEmojiEmoticon(documents.get(j), null); - long id = documents.get(j).id; - if (emoticon != null && !documentIds.contains(id) && query.contains(emoticon)) { - documentIds.add(id); + ArrayList featuredStickerSets = MediaDataController.getInstance(currentAccount).getFeaturedEmojiSets(); + for (int i = 0; i < featuredStickerSets.size(); ++i) { + if (featuredStickerSets.get(i) instanceof TLRPC.TL_stickerSetFullCovered && + ((TLRPC.TL_stickerSetFullCovered) featuredStickerSets.get(i)).keywords != null) { + ArrayList documents = ((TLRPC.TL_stickerSetFullCovered) featuredStickerSets.get(i)).documents; + if (documents != null) { + for (int j = 0; j < documents.size(); ++j) { + emoticon = MessageObject.findAnimatedEmojiEmoticon(documents.get(j), null); + long id = documents.get(j).id; + if (emoticon != null && !documentIds.contains(id) && query.contains(emoticon)) { + documentIds.add(id); + } + } } } } + + next.run(); + } else { + MediaDataController.getInstance(currentAccount).getEmojiSuggestions( + lastSearchKeyboardLanguage, query, false, + (result, alias) -> { + try { + for (int i = 0; i < result.size(); ++i) { + if (result.get(i).emoji.startsWith("animated_")) { + documentIds.add(Long.parseLong(result.get(i).emoji.substring(9))); + } else { + if (type == TYPE_REACTIONS || type == TYPE_SET_DEFAULT_REACTION) { + TLRPC.TL_availableReaction reaction = availableReactions.get(result.get(i).emoji); + if (reaction != null) { + reactions.add(ReactionsLayoutInBubble.VisibleReaction.fromEmojicon(reaction)); + } + } + } + } + } catch (Exception ignore) { + } + next.run(); + }, + null, true, type == TYPE_TOPIC_ICON, false, 30 + ); } - } - AndroidUtilities.runOnUIThread(() -> { + }, + next -> { + if (type != TYPE_AVATAR_CONSTRUCTOR) { + next.run(); + return; + } + final ArrayList emojiStickersArray = new ArrayList<>(0); + final LongSparseArray emojiStickersMap = new LongSparseArray<>(0); + HashMap> allStickers = MediaDataController.getInstance(currentAccount).getAllStickers(); + if (query.length() <= 14) { + CharSequence emoji = query; + int length = emoji.length(); + for (int a = 0; a < length; a++) { + if (a < length - 1 && (emoji.charAt(a) == 0xD83C && emoji.charAt(a + 1) >= 0xDFFB && emoji.charAt(a + 1) <= 0xDFFF || emoji.charAt(a) == 0x200D && (emoji.charAt(a + 1) == 0x2640 || emoji.charAt(a + 1) == 0x2642))) { + emoji = TextUtils.concat(emoji.subSequence(0, a), emoji.subSequence(a + 2, emoji.length())); + length -= 2; + a--; + } else if (emoji.charAt(a) == 0xfe0f) { + emoji = TextUtils.concat(emoji.subSequence(0, a), emoji.subSequence(a + 1, emoji.length())); + length--; + a--; + } + } + ArrayList newStickers = allStickers != null ? allStickers.get(emoji.toString()) : null; + if (newStickers != null && !newStickers.isEmpty()) { + emojiStickersArray.addAll(newStickers); + for (int a = 0, size = newStickers.size(); a < size; a++) { + TLRPC.Document document = newStickers.get(a); + emojiStickersMap.put(document.id, document); + } + emojiArrays.add(emojiStickersArray); + } + } + if (allStickers != null && !allStickers.isEmpty() && query.length() > 1) { + MediaDataController.getInstance(currentAccount).getEmojiSuggestions(lastSearchKeyboardLanguage, query, false, (param, alias) -> { + boolean added = false; + for (int a = 0, size = param.size(); a < size; a++) { + String emoji = param.get(a).emoji; + ArrayList newStickers = allStickers != null ? allStickers.get(emoji) : null; + if (newStickers != null && !newStickers.isEmpty()) { + if (!emojiStickers.containsKey(newStickers)) { + emojiStickers.put(newStickers, emoji); + emojiArrays.add(newStickers); + } + } + } + next.run(); + }, false); + } + }, + next -> AndroidUtilities.runOnUIThread(() -> { if (clearSearchRunnable != null) { AndroidUtilities.cancelRunOnUIThread(clearSearchRunnable); clearSearchRunnable = null; @@ -1421,101 +1636,61 @@ public void search(String query) { return; } searched = true; - switchGrids(true); - if (searchBox != null && searchBox.clearDrawable != null) { - searchBox.clearDrawable.stopAnimation(); + switchGrids(true, liftUp); + if (searchBox != null) { + searchBox.showProgress(false); } if (searchResult == null) { searchResult = new ArrayList<>(); } else { searchResult.clear(); } + if (stickersSearchResult == null) { + stickersSearchResult = new ArrayList<>(); + } else { + stickersSearchResult.clear(); + } emojiSearchGridView.scrollToPosition(0); - searched = true; if (type == TYPE_REACTIONS || type == TYPE_SET_DEFAULT_REACTION) { - TLRPC.TL_availableReaction reaction = availableReactions.get(query); - if (reaction != null) { - searchResult.add(ReactionsLayoutInBubble.VisibleReaction.fromEmojicon(reaction)); + if (!reactions.isEmpty()) { + searchResult.addAll(reactions); + } else { + TLRPC.TL_availableReaction reaction = availableReactions.get(query); + if (reaction != null) { + searchResult.add(ReactionsLayoutInBubble.VisibleReaction.fromEmojicon(reaction)); + } } } - for (int i = 0; i < documentIds.size(); ++i) { - searchResult.add(ReactionsLayoutInBubble.VisibleReaction.fromCustomEmoji(documentIds.get(i))); + for (long documentId : documentIds) { + searchResult.add(ReactionsLayoutInBubble.VisibleReaction.fromCustomEmoji(documentId)); + } + for (ArrayList array : emojiArrays) { + stickersSearchResult.addAll(array); } searchAdapter.updateRows(!firstSearch); - }); - } else { - MediaDataController.getInstance(currentAccount).getEmojiSuggestions( - lastSearchKeyboardLanguage, - query, - false, - (result, alias) -> { - if (clearSearchRunnable != null) { - AndroidUtilities.cancelRunOnUIThread(clearSearchRunnable); - clearSearchRunnable = null; - } - if (query != lastQuery) { - return; - } - searched = true; - switchGrids(true); - if (searchBox != null && searchBox.clearDrawable != null) { - searchBox.clearDrawable.stopAnimation(); - } - if (searchResult == null) { - searchResult = new ArrayList<>(); - } else { - searchResult.clear(); - } - for (int i = 0; i < result.size(); ++i) { - try { - if (result.get(i).emoji.startsWith("animated_")) { - documentIds.add(Long.parseLong(result.get(i).emoji.substring(9))); - } else { - if (type == TYPE_REACTIONS || type == TYPE_SET_DEFAULT_REACTION) { - TLRPC.TL_availableReaction reaction = availableReactions.get(result.get(i).emoji); - if (reaction != null) { - searchResult.add(ReactionsLayoutInBubble.VisibleReaction.fromEmojicon(reaction)); - } - } - } - } catch (Exception ignore) {} - } - emojiSearchGridView.scrollToPosition(0); - searched = true; - for (int i = 0; i < documentIds.size(); ++i) { - searchResult.add(ReactionsLayoutInBubble.VisibleReaction.fromCustomEmoji(documentIds.get(i))); - } - searchAdapter.updateRows(!firstSearch); - }, - null, - true, - type == TYPE_TOPIC_ICON, - 30 - ); - } - }); - }, 425); - } - updateSearchBox(); - - if (searchBox != null && searchBox.clear != null) { - boolean showed = searchBox.clear.getAlpha() != 0; - if (searching != showed) { - searchBox.clear.animate() - .alpha(searching ? 1.0f : 0.0f) - .setDuration(150) - .scaleX(searching ? 1.0f : 0.1f) - .scaleY(searching ? 1.0f : 0.1f) - .start(); + }) + ); + }, delay ? 425 : 0); + if (searchBox != null) { + searchBox.showProgress(true); + searchBox.toggleClear(liftUp); } } + updateSearchBox(); } private class SearchAdapter extends RecyclerListView.SelectionAdapter { - public int VIEW_TYPE_SEARCH = 7; - public int VIEW_TYPE_EMOJI = 3; - public int VIEW_TYPE_REACTION = 4; + public final static int VIEW_TYPE_SEARCH = 7; + public final static int VIEW_TYPE_EMOJI = 3; + public final static int VIEW_TYPE_REACTION = 4; + public final static int VIEW_TYPE_STICKER = 5; + public final static int VIEW_TYPE_HEADER = 6; + + int stickersStartRow; + int emojiStartRow; + int emojiHeaderRow = -1; + int stickersHeaderRow = -1; @Override public boolean isEnabled(RecyclerView.ViewHolder holder) { @@ -1526,7 +1701,9 @@ public boolean isEnabled(RecyclerView.ViewHolder holder) { @Override public RecyclerView.ViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) { View view; - if (viewType == VIEW_TYPE_SEARCH) { + if (viewType == VIEW_TYPE_HEADER) { + view = new HeaderView(getContext()); + } else if (viewType == VIEW_TYPE_SEARCH) { view = new View(getContext()) { @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { @@ -1537,7 +1714,7 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { } else { view = new ImageViewEmoji(getContext()); } - if (showAnimator != null && showAnimator.isRunning()) { + if (enterAnimationInProgress()) { view.setScaleX(0); view.setScaleY(0); } @@ -1546,16 +1723,44 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { @Override public int getItemViewType(int position) { - if (searchResult == null || position < 0 || position >= searchResult.size() || searchResult.get(position).emojicon == null) { + if (position == emojiHeaderRow || position == stickersHeaderRow) { + return VIEW_TYPE_HEADER; + } + if (position > stickersStartRow && position - stickersStartRow - 1 < stickersSearchResult.size()) { + return VIEW_TYPE_STICKER; + } + if (searchResult == null) { return VIEW_TYPE_EMOJI; - } else { - return VIEW_TYPE_REACTION; } + if (position > emojiStartRow && position - emojiStartRow - 1 < searchResult.size()) { + if (searchResult.get(position - emojiStartRow - 1 ).documentId != 0) { + return VIEW_TYPE_EMOJI; + } + } + return VIEW_TYPE_REACTION; } @Override public void onBindViewHolder(@NonNull RecyclerView.ViewHolder holder, int position) { - if (holder.getItemViewType() == VIEW_TYPE_REACTION) { + if (holder.getItemViewType() == VIEW_TYPE_HEADER) { + HeaderView header = (HeaderView) holder.itemView; + if (position == emojiHeaderRow) { + header.setText(LocaleController.getString("Emoji", R.string.Emoji), false); + } else { + header.setText(LocaleController.getString("AccDescrStickers", R.string.AccDescrStickers), false); + } + header.closeIcon.setVisibility(View.GONE); + } else if (holder.getItemViewType() == VIEW_TYPE_STICKER) { + int p = position - stickersStartRow - 1; + TLRPC.Document document = stickersSearchResult.get(p); + ImageViewEmoji imageView = (ImageViewEmoji) holder.itemView; + imageView.createImageReceiver(emojiSearchGridView); + SvgHelper.SvgDrawable svgThumb = DocumentObject.getSvgThumb(document, Theme.key_windowBackgroundWhiteGrayIcon, 0.2f); + imageView.imageReceiver.setImage(ImageLocation.getForDocument(document), "100_100_firstframe", null, null, svgThumb, 0, "tgs", document, 0); + imageView.isStaticIcon = true; + imageView.document = document; + imageView.span = null; + } else if (holder.getItemViewType() == VIEW_TYPE_REACTION) { ImageViewEmoji imageView = (ImageViewEmoji) holder.itemView; imageView.position = position; if (searchResult == null || position < 0 || position >= searchResult.size()) { @@ -1578,10 +1783,10 @@ public void onBindViewHolder(@NonNull RecyclerView.ViewHolder holder, int positi TLRPC.TL_availableReaction reaction = MediaDataController.getInstance(currentAccount).getReactionsMap().get(currentReaction.emojicon); if (reaction != null) { SvgHelper.SvgDrawable svgThumb = DocumentObject.getSvgThumb(reaction.activate_animation, Theme.key_windowBackgroundWhiteGrayIcon, 0.2f); - if (SharedConfig.getLiteMode().enabled()) { + if (!LiteMode.isEnabled(LiteMode.FLAG_ANIMATED_EMOJI_REACTIONS)) { imageView.imageReceiver.setImage(ImageLocation.getForDocument(reaction.select_animation), "60_60_firstframe", null, null, svgThumb, 0, "tgs", currentReaction, 0); } else { - imageView.imageReceiver.setImage(ImageLocation.getForDocument(reaction.select_animation), ReactionsUtils.SELECT_ANIMATION_FILTER, null, null, svgThumb, 0, "tgs", currentReaction, 0); + imageView.imageReceiver.setImage(ImageLocation.getForDocument(reaction.select_animation), ReactionsUtils.SELECT_ANIMATION_FILTER, ImageLocation.getForDocument(reaction.select_animation), "30_30_firstframe", null, null, svgThumb, 0, "tgs", currentReaction, 0); } MediaDataController.getInstance(currentAccount).preloadImage(imageView.preloadEffectImageReceiver, ImageLocation.getForDocument(reaction.around_animation), ReactionsEffectOverlay.getFilterForAroundAnimation()); } else { @@ -1605,16 +1810,12 @@ public void onBindViewHolder(@NonNull RecyclerView.ViewHolder holder, int positi // if (drawable == null) { drawable = AnimatedEmojiDrawable.make(currentAccount, getCacheType(), imageView.span.getDocumentId()); - drawable.addView(emojiSearchGridView); emojiSearchGridView.animatedEmojiDrawables.put(imageView.span.getDocumentId(), drawable); } imageView.setDrawable(drawable); - if (!UserConfig.getInstance(currentAccount).isPremium()) { - if (imageView.premiumLockIconView == null) { - imageView.premiumLockIconView = new PremiumLockIconView(getContext(), PremiumLockIconView.TYPE_STICKERS_PREMIUM_LOCKED); - imageView.addView(imageView.premiumLockIconView, LayoutHelper.createFrame(12, 12, Gravity.RIGHT | Gravity.BOTTOM)); - } + if (!UserConfig.getInstance(currentAccount).isPremium() && type != TYPE_AVATAR_CONSTRUCTOR && type != TYPE_TOPIC_ICON) { + imageView.createPremiumLockView(); imageView.premiumLockIconView.setVisibility(View.VISIBLE); } } @@ -1634,7 +1835,6 @@ public void onBindViewHolder(@NonNull RecyclerView.ViewHolder holder, int positi AnimatedEmojiDrawable drawable = emojiSearchGridView.animatedEmojiDrawables.get(imageView.span.getDocumentId()); if (drawable == null) { drawable = AnimatedEmojiDrawable.make(currentAccount, getCacheType(), imageView.span.getDocumentId()); - drawable.addView(emojiSearchGridView); emojiSearchGridView.animatedEmojiDrawables.put(imageView.span.getDocumentId(), drawable); } imageView.setDrawable(drawable); @@ -1652,7 +1852,7 @@ public int getItemCount() { } public void updateRows(boolean diff) { - if (!isAttached) { + if (!isAttached || type == TYPE_AVATAR_CONSTRUCTOR) { diff = false; } ArrayList prevRowHashCodes = new ArrayList<>(rowHashCodes); @@ -1660,16 +1860,32 @@ public void updateRows(boolean diff) { count = 0; rowHashCodes.clear(); -// count++; -// rowHashCodes.add(132); - if (searchResult != null) { + if (type == TYPE_AVATAR_CONSTRUCTOR && !searchResult.isEmpty()) { + emojiHeaderRow = count++; + rowHashCodes.add(1); + } + + emojiStartRow = count; for (int i = 0; i < searchResult.size(); ++i) { count++; rowHashCodes.add(Objects.hash(-4342, searchResult.get(i))); } } + if (stickersSearchResult != null) { + if (type == TYPE_AVATAR_CONSTRUCTOR && !stickersSearchResult.isEmpty()) { + stickersHeaderRow = count++; + rowHashCodes.add(2); + } + + stickersStartRow = count; + for (int i = 0; i < stickersSearchResult.size(); ++i) { + count++; + rowHashCodes.add(Objects.hash(-7453, stickersSearchResult.get(i))); + } + } + if (diff) { DiffUtil.calculateDiff(new DiffUtil.Callback() { @Override @@ -1702,15 +1918,16 @@ public boolean areContentsTheSame(int oldItemPosition, int newItemPosition) { private class Adapter extends RecyclerListView.SelectionAdapter { - public int VIEW_TYPE_HEADER = 0; - public int VIEW_TYPE_REACTION = 1; - public int VIEW_TYPE_IMAGE = 2; - public int VIEW_TYPE_EMOJI = 3; - public int VIEW_TYPE_EXPAND = 4; - public int VIEW_TYPE_BUTTON = 5; - public int VIEW_TYPE_HINT = 6; - public int VIEW_TYPE_SEARCH = 7; - public int VIEW_TYPE_TOPIC_ICON = 8; + public static final int VIEW_TYPE_HEADER = 0; + public static final int VIEW_TYPE_REACTION = 1; + public static final int VIEW_TYPE_IMAGE = 2; + public static final int VIEW_TYPE_EMOJI = 3; + public static final int VIEW_TYPE_EXPAND = 4; + public static final int VIEW_TYPE_BUTTON = 5; + public static final int VIEW_TYPE_HINT = 6; + public static final int VIEW_TYPE_SEARCH = 7; + public static final int VIEW_TYPE_TOPIC_ICON = 8; + public static final int VIEW_TYPE_STICKER = 9; @Override public boolean isEnabled(RecyclerView.ViewHolder holder) { @@ -1769,7 +1986,7 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { } else { view = new ImageViewEmoji(getContext()); } - if (showAnimator != null && showAnimator.isRunning()) { + if (enterAnimationInProgress()) { view.setScaleX(0); view.setScaleY(0); } @@ -1793,7 +2010,11 @@ public int getItemViewType(int position) { } if (position == defaultTopicIconRow) { return VIEW_TYPE_TOPIC_ICON; } else { - return VIEW_TYPE_EMOJI; + if (showStickers) { + return VIEW_TYPE_EMOJI; + } else { + return VIEW_TYPE_EMOJI; + } } } @@ -1824,10 +2045,14 @@ public void onBindViewHolder(@NonNull RecyclerView.ViewHolder holder, int positi } if (position == recentReactionsSectionRow) { header.setText(LocaleController.getString("RecentlyUsed", R.string.RecentlyUsed), false); - header.closeIcon.setVisibility(View.VISIBLE); - header.closeIcon.setOnClickListener((view) -> { - clearRecent(); - }); + if (type == TYPE_AVATAR_CONSTRUCTOR) { + header.closeIcon.setVisibility(View.GONE); + } else { + header.closeIcon.setVisibility(View.VISIBLE); + header.closeIcon.setOnClickListener((view) -> { + clearRecent(); + }); + } return; } header.closeIcon.setVisibility(View.GONE); @@ -1839,7 +2064,7 @@ public void onBindViewHolder(@NonNull RecyclerView.ViewHolder holder, int positi int index = positionToSection.get(position); if (index >= 0) { EmojiView.EmojiPack pack = packs.get(index); - header.setText(pack.set.title, !pack.free && !UserConfig.getInstance(currentAccount).isPremium()); + header.setText(pack.set.title, !pack.free && !UserConfig.getInstance(currentAccount).isPremium() && type != TYPE_AVATAR_CONSTRUCTOR); } else { header.setText(null, false); } @@ -1855,11 +2080,7 @@ public void onBindViewHolder(@NonNull RecyclerView.ViewHolder holder, int positi currentReaction = topReactions.get(index); } - if (imageView.imageReceiver == null) { - imageView.imageReceiver = new ImageReceiver(); - imageView.imageReceiver.setLayerNum(7); - imageView.imageReceiver.onAttachedToWindow(); - } + imageView.createImageReceiver(emojiGridView); imageView.reaction = currentReaction; imageView.setViewSelected(selectedReactions.contains(currentReaction), false); imageView.notDraw = false; @@ -1868,10 +2089,10 @@ public void onBindViewHolder(@NonNull RecyclerView.ViewHolder holder, int positi TLRPC.TL_availableReaction reaction = MediaDataController.getInstance(currentAccount).getReactionsMap().get(currentReaction.emojicon); if (reaction != null) { SvgHelper.SvgDrawable svgThumb = DocumentObject.getSvgThumb(reaction.activate_animation, Theme.key_windowBackgroundWhiteGrayIcon, 0.2f); - if (SharedConfig.getLiteMode().enabled()) { + if (!LiteMode.isEnabled(LiteMode.FLAG_ANIMATED_EMOJI_REACTIONS)) { imageView.imageReceiver.setImage(ImageLocation.getForDocument(reaction.select_animation), "60_60_firstframe", null, null, svgThumb, 0, "tgs", currentReaction, 0); } else { - imageView.imageReceiver.setImage(ImageLocation.getForDocument(reaction.select_animation), ReactionsUtils.SELECT_ANIMATION_FILTER, null, null, svgThumb, 0, "tgs", currentReaction, 0); + imageView.imageReceiver.setImage(ImageLocation.getForDocument(reaction.select_animation), ReactionsUtils.SELECT_ANIMATION_FILTER, ImageLocation.getForDocument(reaction.select_animation), "30_30_firstframe", null, null, svgThumb, 0, "tgs", currentReaction, 0); } MediaDataController.getInstance(currentAccount).preloadImage(imageView.preloadEffectImageReceiver, ImageLocation.getForDocument(reaction.around_animation), ReactionsEffectOverlay.getFilterForAroundAnimation()); } else { @@ -1899,11 +2120,8 @@ public void onBindViewHolder(@NonNull RecyclerView.ViewHolder holder, int positi } imageView.setDrawable(drawable); - if (!UserConfig.getInstance(currentAccount).isPremium()) { - if (imageView.premiumLockIconView == null) { - imageView.premiumLockIconView = new PremiumLockIconView(getContext(), PremiumLockIconView.TYPE_STICKERS_PREMIUM_LOCKED); - imageView.addView(imageView.premiumLockIconView, LayoutHelper.createFrame(12, 12, Gravity.RIGHT | Gravity.BOTTOM)); - } + if (!UserConfig.getInstance(currentAccount).isPremium() && type != TYPE_AVATAR_CONSTRUCTOR && type != TYPE_TOPIC_ICON) { + imageView.createPremiumLockView(); imageView.premiumLockIconView.setVisibility(View.VISIBLE); } } @@ -1913,13 +2131,13 @@ public void onBindViewHolder(@NonNull RecyclerView.ViewHolder holder, int positi EmojiView.EmojiPack pack = i >= 0 && i < packs.size() ? packs.get(i) : null; if (i == -1) { recentExpandButton = button; - final int maxlen = layoutManager.getSpanCount() * RECENT_MAX_LINES; + final int maxlen = SPAN_COUNT_FOR_EMOJI * RECENT_MAX_LINES; button.textView.setText("+" + (recent.size() - maxlen + (includeEmpty ? 1 : 0) + 1)); } else if (pack != null) { if (recentExpandButton == button) { recentExpandButton = null; } - final int maxlen = layoutManager.getSpanCount() * EXPAND_MAX_LINES; + final int maxlen = SPAN_COUNT_FOR_EMOJI * EXPAND_MAX_LINES; button.textView.setText("+" + (pack.documents.size() - maxlen + 1)); } else { if (recentExpandButton == button) { @@ -1962,36 +2180,74 @@ public void onBindViewHolder(@NonNull RecyclerView.ViewHolder holder, int positi } } else if (viewType == VIEW_TYPE_SEARCH) { + } else if (viewType == VIEW_TYPE_STICKER) { + final int maxlen = SPAN_COUNT_FOR_EMOJI * EXPAND_MAX_LINES; + for (int i = 0; i < positionToSection.size(); ++i) { + int startPosition = positionToSection.keyAt(i); + int index = i - (defaultStatuses.isEmpty() ? 0 : 1); + EmojiView.EmojiPack pack = index >= 0 ? packs.get(index) : null; + if (pack == null) { + continue; + } + int count = pack.expanded ? pack.documents.size() : Math.min(pack.documents.size(), maxlen); + if (position > startPosition && position <= startPosition + 1 + count) { + TLRPC.Document document = pack.documents.get(position - startPosition - 1); + if (document != null) { +// imageView.span = new AnimatedEmojiSpan(document, null); +// imageView.document = document; + } + } + } } else { ImageViewEmoji imageView = (ImageViewEmoji) holder.itemView; imageView.empty = false; imageView.position = position; imageView.setPadding(AndroidUtilities.dp(1), AndroidUtilities.dp(1), AndroidUtilities.dp(1), AndroidUtilities.dp(1)); - final int recentmaxlen = layoutManager.getSpanCount() * RECENT_MAX_LINES; - final int maxlen = layoutManager.getSpanCount() * EXPAND_MAX_LINES; + final int recentmaxlen = SPAN_COUNT_FOR_EMOJI * RECENT_MAX_LINES; + final int maxlen = SPAN_COUNT_FOR_EMOJI * EXPAND_MAX_LINES; int recentSize; - if (type == TYPE_TOPIC_ICON) { + if (type == TYPE_AVATAR_CONSTRUCTOR && showStickers) { + recentSize = recentStickers.size(); + } else if (type == TYPE_AVATAR_CONSTRUCTOR || type == TYPE_TOPIC_ICON) { recentSize = recent.size(); } else { recentSize = recent.size() > recentmaxlen && !recentExpanded ? recentmaxlen : recent.size() + (includeEmpty ? 1 : 0); } boolean selected = false; imageView.setDrawable(null); - if (includeEmpty && position == (searchRow != -1 ? 1 : 0) + (includeHint ? 1 : 0)) { + if (includeEmpty && position == (searchRow != -1 ? 1 : 0) + (longtapHintRow != -1 ? 1 : 0)) { selected = selectedDocumentIds.contains(null); imageView.empty = true; imageView.setPadding(AndroidUtilities.dp(5), AndroidUtilities.dp(5), AndroidUtilities.dp(5), AndroidUtilities.dp(5)); imageView.span = null; imageView.document = null; - } else if (position - (searchRow != -1 ? 1 : 0) - (includeHint ? 1 : 0) < recentSize) { - imageView.span = recent.get(position - (searchRow != -1 ? 1 : 0) - (includeHint ? 1 : 0) - (includeEmpty ? 1 : 0)); - imageView.document = imageView.span == null ? null : imageView.span.document; - selected = imageView.span != null && selectedDocumentIds.contains(imageView.span.getDocumentId()); - } else if (!defaultStatuses.isEmpty() && position - (searchRow != -1 ? 1 : 0) - (includeHint ? 1 : 0) - recentSize - 1 >= 0 && position - (searchRow != -1 ? 1 : 0) - (includeHint ? 1 : 0) - recentSize - 1 < defaultStatuses.size()) { - int index = position - (searchRow != -1 ? 1 : 0) - (includeHint ? 1 : 0) - recentSize - 1; + imageView.isStaticIcon = false; + if (imageView.imageReceiver != null) { + imageView.imageReceiver.clearImage(); + } + } else if (position - (searchRow != -1 ? 1 : 0) - (longtapHintRow != -1 ? 1 : 0) < recentSize) { + int resentPosition = position - (searchRow != -1 ? 1 : 0) - (longtapHintRow != -1 ? 1 : 0) - (includeEmpty ? 1 : 0); + if (type == TYPE_AVATAR_CONSTRUCTOR && showStickers) { + TLRPC.Document document = recentStickers.get(resentPosition); + imageView.setSticker(document, emojiGridView); + } else { + imageView.span = recent.get(resentPosition); + imageView.document = imageView.span == null ? null : imageView.span.document; + selected = imageView.span != null && selectedDocumentIds.contains(imageView.span.getDocumentId()); + imageView.isStaticIcon = false; + if (imageView.imageReceiver != null) { + imageView.imageReceiver.clearImage(); + } + } + } else if (!defaultStatuses.isEmpty() && position - (searchRow != -1 ? 1 : 0) - (longtapHintRow != -1 ? 1 : 0) - recentSize - 1 >= 0 && position - (searchRow != -1 ? 1 : 0) - (longtapHintRow != -1 ? 1 : 0) - recentSize - 1 < defaultStatuses.size()) { + int index = position - (searchRow != -1 ? 1 : 0) - (longtapHintRow != -1 ? 1 : 0) - recentSize - 1; imageView.span = defaultStatuses.get(index); imageView.document = imageView.span == null ? null : imageView.span.document; selected = imageView.span != null && selectedDocumentIds.contains(imageView.span.getDocumentId()); + imageView.isStaticIcon = false; + if (imageView.imageReceiver != null) { + imageView.imageReceiver.clearImage(); + } } else { for (int i = 0; i < positionToSection.size(); ++i) { int startPosition = positionToSection.keyAt(i); @@ -2004,7 +2260,15 @@ public void onBindViewHolder(@NonNull RecyclerView.ViewHolder holder, int positi if (position > startPosition && position <= startPosition + 1 + count) { TLRPC.Document document = pack.documents.get(position - startPosition - 1); if (document != null) { - imageView.span = new AnimatedEmojiSpan(document, null); + if (showStickers) { + imageView.setSticker(document, emojiSearchGridView); + } else { + imageView.isStaticIcon = false; + if (imageView.imageReceiver != null) { + imageView.imageReceiver.clearImage(); + } + imageView.span = new AnimatedEmojiSpan(document, null); + } imageView.document = document; } } @@ -2032,6 +2296,10 @@ public int getItemCount() { } } + private boolean enterAnimationInProgress() { + return enterAnimationInProgress || (showAnimator != null && showAnimator.isRunning()); + } + private void clearRecent() { if (type == TYPE_REACTIONS && onRecentClearedListener != null) { onRecentClearedListener.onRecentCleared(); @@ -2273,7 +2541,7 @@ public long animateExpandCrossfadeDuration() { return Math.max(300, Math.min(45, count) * 25L); } - public class ImageViewEmoji extends FrameLayout { + public class ImageViewEmoji extends View { public boolean empty = false; public boolean notDraw = false; public int position; @@ -2297,17 +2565,19 @@ public class ImageViewEmoji extends FrameLayout { public int skewIndex; public boolean isStaticIcon; private float selectedProgress; + final AnimatedEmojiSpan.InvalidateHolder invalidateHolder = new AnimatedEmojiSpan.InvalidateHolder() { @Override public void invalidate() { - if (emojiGridView != null) { - emojiGridView.invalidate(); + if (getParent() != null) { + ((View)getParent()).invalidate(); } } }; public ImageViewEmoji(Context context) { super(context); + preloadEffectImageReceiver.ignoreNotifications = true; } @Override @@ -2398,6 +2668,9 @@ public void drawSelected(Canvas canvas, View view) { @Override protected void onAttachedToWindow() { super.onAttachedToWindow(); + if (attached) { + return; + } attached = true; if (drawable instanceof AnimatedEmojiDrawable) { ((AnimatedEmojiDrawable) drawable).addView(invalidateHolder); @@ -2412,6 +2685,9 @@ protected void onAttachedToWindow() { @Override protected void onDetachedFromWindow() { super.onDetachedFromWindow(); + if (!attached) { + return; + } attached = false; if (this.drawable instanceof AnimatedEmojiDrawable) { ((AnimatedEmojiDrawable) this.drawable).removeView(invalidateHolder); @@ -2424,7 +2700,7 @@ protected void onDetachedFromWindow() { public void setDrawable(Drawable drawable) { if (this.drawable != drawable) { - if (this.drawable != null && this.drawable instanceof AnimatedEmojiDrawable) { + if (attached && this.drawable != null && this.drawable instanceof AnimatedEmojiDrawable) { ((AnimatedEmojiDrawable) this.drawable).removeView(invalidateHolder); } this.drawable = drawable; @@ -2434,11 +2710,47 @@ public void setDrawable(Drawable drawable) { } } + + public void setSticker(TLRPC.Document document, View parent) { + this.document = document; + createImageReceiver(parent); + SvgHelper.SvgDrawable svgThumb = DocumentObject.getSvgThumb(document, Theme.key_windowBackgroundWhiteGrayIcon, 0.2f); + imageReceiver.setImage(ImageLocation.getForDocument(document), "100_100_firstframe", null, null, svgThumb, 0, "tgs", document, 0); + isStaticIcon = true; + span = null; + } + + public void createImageReceiver(View parent) { + if (imageReceiver == null) { + imageReceiver = new ImageReceiver(parent); + imageReceiver.setLayerNum(7); + if (attached) { + imageReceiver.onAttachedToWindow(); + } + imageReceiver.setAspectFit(true); + } + } + + @Override + public void invalidate() { + if (getParent() != null) { + ((View) getParent()).invalidate(); + } + } + + public void createPremiumLockView() { + if (premiumLockIconView == null) { + premiumLockIconView = new PremiumLockIconView(getContext(), PremiumLockIconView.TYPE_STICKERS_PREMIUM_LOCKED); + int measureSpec = MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(12),MeasureSpec.EXACTLY); + premiumLockIconView.measure(measureSpec, measureSpec); + premiumLockIconView.layout(0, 0, premiumLockIconView.getMeasuredWidth(), premiumLockIconView.getMeasuredHeight()); + } + } } public void onEmojiClick(View view, AnimatedEmojiSpan span) { incrementHintUse(); - if (span == null) { + if (span == null || type == TYPE_EMOJI_STATUS && selectedDocumentIds.contains(span.documentId)) { onEmojiSelected(view, null, null, null); } else { TLRPC.TL_emojiStatus status = new TLRPC.TL_emojiStatus(); @@ -2492,6 +2804,9 @@ public void preload(int type, int account) { MediaDataController.getInstance(account).fetchEmojiStatuses(0, true); } else if (type == TYPE_TOPIC_ICON) { MediaDataController.getInstance(account).checkDefaultTopicIcons(); + } else if (type == TYPE_AVATAR_CONSTRUCTOR) { + MediaDataController.getInstance(currentAccount).loadRecents(MediaDataController.TYPE_IMAGE, false, true, false); + MediaDataController.getInstance(account).checkStickers(MediaDataController.TYPE_IMAGE); } MediaDataController.getInstance(account).getStickerSet(new TLRPC.TL_inputStickerSetEmojiDefaultStatuses(), false); } @@ -2505,25 +2820,31 @@ public static void preload(int account) { MediaDataController.getInstance(account).checkReactions(); MediaDataController.getInstance(account).getStickerSet(new TLRPC.TL_inputStickerSetEmojiDefaultStatuses(), false); MediaDataController.getInstance(account).checkDefaultTopicIcons(); + StickerCategoriesListView.preload(account, StickerCategoriesListView.CategoriesType.STATUS); } private boolean defaultSetLoading = false; - private void updateRows(boolean updateEmojipacks, boolean diff) { + + private void updateRows(boolean updateEmojipacks, boolean animated) { + updateRows(updateEmojipacks, animated, true); + } + + private void updateRows(boolean updateEmojipacks, boolean animated, boolean diff) { if (!animationsEnabled) { - diff = false; + animated = false; } - MediaDataController mediaDataController = MediaDataController.getInstance(UserConfig.selectedAccount); + MediaDataController mediaDataController = MediaDataController.getInstance(currentAccount); if (mediaDataController == null) { return; } if (updateEmojipacks || frozenEmojiPacks == null) { - frozenEmojiPacks = new ArrayList<>(mediaDataController.getStickerSets(MediaDataController.TYPE_EMOJIPACKS)); + frozenEmojiPacks = new ArrayList<>(mediaDataController.getStickerSets(showStickers ? MediaDataController.TYPE_IMAGE : MediaDataController.TYPE_EMOJIPACKS)); } ArrayList installedEmojipacks = frozenEmojiPacks; ArrayList featuredEmojiPacks = new ArrayList<>(mediaDataController.getFeaturedEmojiSets()); - ArrayList prevRowHashCodes = new ArrayList<>(rowHashCodes); + ArrayList prevRowHashCodes = new ArrayList<>(rowHashCodes); totalCount = 0; recentReactionsSectionRow = -1; recentReactionsStartRow = -1; @@ -2542,19 +2863,40 @@ private void updateRows(boolean updateEmojipacks, boolean diff) { positionToExpand.clear(); rowHashCodes.clear(); positionToButton.clear(); + stickerSets.clear(); + recentStickers.clear(); - if (!installedEmojipacks.isEmpty()) { + if (!installedEmojipacks.isEmpty() || type == TYPE_AVATAR_CONSTRUCTOR) { searchRow = totalCount++; - rowHashCodes.add(9); + rowHashCodes.add(9L); } else { searchRow = -1; } - if (type == TYPE_TOPIC_ICON) { + if (type == TYPE_AVATAR_CONSTRUCTOR) { + if (showStickers) { + recentStickers.addAll(MediaDataController.getInstance(currentAccount).getRecentStickersNoCopy(MediaDataController.TYPE_IMAGE)); + for (int i = 0; i < recentStickers.size(); ++i) { + rowHashCodes.add(62425L + 13L * recentStickers.get(i).id); + totalCount++; + } + } else { + TLRPC.TL_emojiList emojiList = forUser ? MediaDataController.getInstance(currentAccount). profileAvatarConstructorDefault : MediaDataController.getInstance(currentAccount).groupAvatarConstructorDefault; + if (emojiList != null && emojiList.document_id != null && !emojiList.document_id.isEmpty()) { + for (int i = 0; i < emojiList.document_id.size(); ++i) { + recent.add(new AnimatedEmojiSpan(emojiList.document_id.get(i), null)); + } + for (int i = 0; i < recent.size(); ++i) { + rowHashCodes.add(43223L + 13L * recent.get(i).getDocumentId()); + totalCount++; + } + } + } + } else if (type == TYPE_TOPIC_ICON) { topicEmojiHeaderRow = totalCount++; - rowHashCodes.add(12); + rowHashCodes.add(12L); defaultTopicIconRow = totalCount++; - rowHashCodes.add(7); + rowHashCodes.add(7L); String packName = UserConfig.getInstance(currentAccount).defaultTopicIcons; TLRPC.TL_messages_stickerSet defaultSet = null; @@ -2570,7 +2912,7 @@ private void updateRows(boolean updateEmojipacks, boolean diff) { } else { if (includeEmpty) { totalCount++; - rowHashCodes.add(2); + rowHashCodes.add(2L); } if (defaultSet.documents != null && !defaultSet.documents.isEmpty()) { for (int i = 0; i < defaultSet.documents.size(); ++i) { @@ -2579,29 +2921,28 @@ private void updateRows(boolean updateEmojipacks, boolean diff) { } for (int i = 0; i < recent.size(); ++i) { - rowHashCodes.add(Objects.hash(43223, recent.get(i).getDocumentId())); + rowHashCodes.add(43223L + 13L * recent.get(i).getDocumentId()); totalCount++; } } } - if (includeHint && type != TYPE_SET_DEFAULT_REACTION && type != TYPE_TOPIC_ICON) { + if (includeHint && type != TYPE_SET_DEFAULT_REACTION && type != TYPE_TOPIC_ICON && type != TYPE_AVATAR_CONSTRUCTOR) { longtapHintRow = totalCount++; - rowHashCodes.add(6); + rowHashCodes.add(6L); } if (recentReactionsToSet != null) { topReactionsStartRow = totalCount; - ArrayList tmp = new ArrayList<>(); - tmp.addAll(recentReactionsToSet); + ArrayList tmp = new ArrayList<>(recentReactionsToSet); for (int i = 0; i < 16; i++) { if (!tmp.isEmpty()) { topReactions.add(tmp.remove(0)); } } for (int i = 0; i < topReactions.size(); ++i) { - rowHashCodes.add(Objects.hash(-5632, topReactions.get(i).hashCode())); + rowHashCodes.add(-5632L + 13L * topReactions.get(i).hashCode()); } totalCount += topReactions.size(); topReactionsEndRow = totalCount; @@ -2617,17 +2958,17 @@ private void updateRows(boolean updateEmojipacks, boolean diff) { if (allRecentReactionsIsDefault) { if (UserConfig.getInstance(currentAccount).isPremium()) { popularSectionRow = totalCount++; - rowHashCodes.add(5); + rowHashCodes.add(5L); } } else { recentReactionsSectionRow = totalCount++; - rowHashCodes.add(4); + rowHashCodes.add(4L); } recentReactionsStartRow = totalCount; recentReactions.addAll(tmp); for (int i = 0; i < recentReactions.size(); ++i) { - rowHashCodes.add(Objects.hash(allRecentReactionsIsDefault ? 4235 : -3142, recentReactions.get(i).hashCode())); + rowHashCodes.add((allRecentReactionsIsDefault ? 4235 : -3142) + 13L * recentReactions.get(i).hash); } totalCount += recentReactions.size(); recentReactionsEndRow = totalCount; @@ -2640,12 +2981,12 @@ private void updateRows(boolean updateEmojipacks, boolean diff) { } else { if (includeEmpty) { totalCount++; - rowHashCodes.add(2); + rowHashCodes.add(2L); } ArrayList defaultEmojiStatuses = MediaDataController.getInstance(currentAccount).getDefaultEmojiStatuses(); - final int maxrecentlen = layoutManager.getSpanCount() * (RECENT_MAX_LINES + 8); + final int maxrecentlen = SPAN_COUNT_FOR_EMOJI * (RECENT_MAX_LINES + 8); if (defaultSet.documents != null && !defaultSet.documents.isEmpty()) { - for (int i = 0; i < Math.min(layoutManager.getSpanCount() - 1, defaultSet.documents.size()); ++i) { + for (int i = 0; i < Math.min(SPAN_COUNT_FOR_EMOJI - 1, defaultSet.documents.size()); ++i) { recent.add(new AnimatedEmojiSpan(defaultSet.documents.get(i), null)); if (recent.size() + (includeEmpty ? 1 : 0) >= maxrecentlen) { break; @@ -2654,12 +2995,8 @@ private void updateRows(boolean updateEmojipacks, boolean diff) { } if (recentEmojiStatuses != null && !recentEmojiStatuses.isEmpty()) { for (TLRPC.EmojiStatus emojiStatus : recentEmojiStatuses) { - long did; - if (emojiStatus instanceof TLRPC.TL_emojiStatus) { - did = ((TLRPC.TL_emojiStatus) emojiStatus).document_id; - } else if (emojiStatus instanceof TLRPC.TL_emojiStatusUntil && ((TLRPC.TL_emojiStatusUntil) emojiStatus).until > (int) (System.currentTimeMillis() / 1000)) { - did = ((TLRPC.TL_emojiStatusUntil) emojiStatus).document_id; - } else { + Long did = UserObject.getEmojiStatusDocumentId(emojiStatus); + if (did == null) { continue; } boolean foundDuplicate = false; @@ -2679,12 +3016,8 @@ private void updateRows(boolean updateEmojipacks, boolean diff) { } if (defaultEmojiStatuses != null && !defaultEmojiStatuses.isEmpty()) { for (TLRPC.EmojiStatus emojiStatus : defaultEmojiStatuses) { - long did; - if (emojiStatus instanceof TLRPC.TL_emojiStatus) { - did = ((TLRPC.TL_emojiStatus) emojiStatus).document_id; - } else if (emojiStatus instanceof TLRPC.TL_emojiStatusUntil && ((TLRPC.TL_emojiStatusUntil) emojiStatus).until > (int) (System.currentTimeMillis() / 1000)) { - did = ((TLRPC.TL_emojiStatusUntil) emojiStatus).document_id; - } else { + Long did = UserObject.getEmojiStatusDocumentId(emojiStatus); + if (did == null) { continue; } boolean foundDuplicate = false; @@ -2703,14 +3036,14 @@ private void updateRows(boolean updateEmojipacks, boolean diff) { } } - final int maxlen = layoutManager.getSpanCount() * RECENT_MAX_LINES; + final int maxlen = SPAN_COUNT_FOR_EMOJI * RECENT_MAX_LINES; int len = maxlen - (includeEmpty ? 1 : 0); if (recent.size() > len && !recentExpanded) { for (int i = 0; i < len - 1; ++i) { - rowHashCodes.add(Objects.hash(43223, recent.get(i).getDocumentId())); + rowHashCodes.add(43223 + 13L * recent.get(i).getDocumentId()); totalCount++; } - rowHashCodes.add(Objects.hash(-5531, -1, (recent.size() - maxlen + (includeEmpty ? 1 : 0) + 1))); + rowHashCodes.add(-5531 + 13L * (recent.size() - maxlen + (includeEmpty ? 1 : 0) + 1)); if (recentExpandButton != null) { recentExpandButton.textView.setText("+" + (recent.size() - maxlen + (includeEmpty ? 1 : 0) + 1)); } @@ -2718,7 +3051,7 @@ private void updateRows(boolean updateEmojipacks, boolean diff) { totalCount++; } else { for (int i = 0; i < recent.size(); ++i) { - rowHashCodes.add(Objects.hash(43223, recent.get(i).getDocumentId())); + rowHashCodes.add(43223 + 13L * recent.get(i).getDocumentId()); totalCount++; } } @@ -2727,11 +3060,11 @@ private void updateRows(boolean updateEmojipacks, boolean diff) { if (installedEmojipacks != null) { for (int i = 0, j = 0; i < installedEmojipacks.size(); ++i) { TLRPC.TL_messages_stickerSet set = installedEmojipacks.get(i); - if (set != null && set.set != null && set.set.emojis && !installedEmojiSets.contains(set.set.id)) { + if (set != null && set.set != null && (set.set.emojis || showStickers) && !installedEmojiSets.contains(set.set.id)) { positionToSection.put(totalCount, packs.size()); sectionToPosition.put(packs.size(), totalCount); totalCount++; - rowHashCodes.add(Objects.hash(9211, set.set.id)); + rowHashCodes.add(9211 + 13L * set.set.id); EmojiView.EmojiPack pack = new EmojiView.EmojiPack(); pack.installed = true; @@ -2744,14 +3077,14 @@ private void updateRows(boolean updateEmojipacks, boolean diff) { packs.add(pack); totalCount += pack.documents.size(); for (int k = 0; k < pack.documents.size(); ++k) { - rowHashCodes.add(Objects.hash(3212, pack.documents.get(k).id)); + rowHashCodes.add(3212 + 13L * pack.documents.get(k).id); } j++; } } } - if (featuredEmojiPacks != null) { - final int maxlen = layoutManager.getSpanCount() * EXPAND_MAX_LINES; + if (featuredEmojiPacks != null && !showStickers) { + final int maxlen = SPAN_COUNT_FOR_EMOJI * EXPAND_MAX_LINES; for (int i = 0; i < featuredEmojiPacks.size(); ++i) { TLRPC.StickerSetCovered set1 = featuredEmojiPacks.get(i); TLRPC.StickerSet set = set1.set; @@ -2783,7 +3116,7 @@ private void updateRows(boolean updateEmojipacks, boolean diff) { positionToSection.put(totalCount, packs.size()); sectionToPosition.put(packs.size(), totalCount); totalCount++; - rowHashCodes.add(Objects.hash(9211, set.id)); + rowHashCodes.add(9211 + 13L * set.id); EmojiView.EmojiPack pack = new EmojiView.EmojiPack(); pack.installed = installedEmojiSets.contains(set.id); @@ -2797,21 +3130,21 @@ private void updateRows(boolean updateEmojipacks, boolean diff) { if (pack.documents.size() > maxlen && !pack.expanded) { totalCount += maxlen; for (int k = 0; k < maxlen - 1; ++k) { - rowHashCodes.add(Objects.hash(3212, pack.documents.get(k).id)); + rowHashCodes.add(3212 + 13L * pack.documents.get(k).id); } - rowHashCodes.add(Objects.hash(-5531, set.id, (pack.documents.size() - maxlen + 1))); + rowHashCodes.add(-5531 + 13L * set.id + 169L * (pack.documents.size() - maxlen + 1)); positionToExpand.put(totalCount - 1, packs.size()); } else { totalCount += pack.documents.size(); for (int k = 0; k < pack.documents.size(); ++k) { - rowHashCodes.add(Objects.hash(3212, pack.documents.get(k).id)); + rowHashCodes.add(3212 + 13L * pack.documents.get(k).id); } } - if (!pack.installed) { + if (!pack.installed && type != TYPE_AVATAR_CONSTRUCTOR) { positionToButton.put(totalCount, packs.size()); totalCount++; - rowHashCodes.add(Objects.hash(3321, set.id)); + rowHashCodes.add(3321 + 13L * set.id); } packs.add(pack); @@ -2819,39 +3152,41 @@ private void updateRows(boolean updateEmojipacks, boolean diff) { } } - post(() -> { - emojiTabs.updateEmojiPacks(packs); - }); + emojiTabs.updateEmojiPacks(packs); - if (diff) { + if (animated) { emojiGridView.setItemAnimator(emojiItemAnimator); } else { emojiGridView.setItemAnimator(null); } - DiffUtil.calculateDiff(new DiffUtil.Callback() { - @Override - public int getOldListSize() { - return prevRowHashCodes.size(); - } + if (diff) { + DiffUtil.calculateDiff(new DiffUtil.Callback() { + @Override + public int getOldListSize() { + return prevRowHashCodes.size(); + } - @Override - public int getNewListSize() { - return rowHashCodes.size(); - } + @Override + public int getNewListSize() { + return rowHashCodes.size(); + } - @Override - public boolean areItemsTheSame(int oldItemPosition, int newItemPosition) { - return prevRowHashCodes.get(oldItemPosition).equals(rowHashCodes.get(newItemPosition)); - } + @Override + public boolean areItemsTheSame(int oldItemPosition, int newItemPosition) { + return prevRowHashCodes.get(oldItemPosition).equals(rowHashCodes.get(newItemPosition)); + } - @Override - public boolean areContentsTheSame(int oldItemPosition, int newItemPosition) { - return true; - } - }, false).dispatchUpdatesTo(adapter); + @Override + public boolean areContentsTheSame(int oldItemPosition, int newItemPosition) { + return true; + } + }, false).dispatchUpdatesTo(adapter); + } else { + adapter.notifyDataSetChanged(); + } if (!emojiGridView.scrolledByUserOnce) { - emojiGridView.scrollToPosition(1); + emojiGridView.scrollToPosition(0); } } @@ -2863,7 +3198,7 @@ public void expand(int position, View expandButton) { int fromCount, start, toCount; animateExpandFromButtonTranslate = 0; if (index >= 0 && index < packs.size()) { - maxlen = layoutManager.getSpanCount() * EXPAND_MAX_LINES; + maxlen = SPAN_COUNT_FOR_EMOJI * EXPAND_MAX_LINES; EmojiView.EmojiPack pack = packs.get(index); if (pack.expanded) { return; @@ -2880,12 +3215,12 @@ public void expand(int position, View expandButton) { pack.expanded = true; toCount = pack.documents.size(); } else if (index == -1) { - maxlen = layoutManager.getSpanCount() * RECENT_MAX_LINES; + maxlen = SPAN_COUNT_FOR_EMOJI * RECENT_MAX_LINES; if (recentExpanded) { return; } last = false; - start = (searchRow != -1 ? 1 : 0) + (includeHint ? 1 : 0) + (includeEmpty ? 1 : 0); + start = (searchRow != -1 ? 1 : 0) + (longtapHintRow != -1 ? 1 : 0) + (includeEmpty ? 1 : 0); fromCount = recentExpanded ? recent.size() : Math.min(maxlen - (includeEmpty ? 1 : 0) - 2, recent.size()); toCount = recent.size(); recentExpanded = true; @@ -2924,7 +3259,7 @@ public void expand(int position, View expandButton) { @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { - if (drawBackground && type != TYPE_TOPIC_ICON) { + if (drawBackground && type != TYPE_TOPIC_ICON && type != TYPE_AVATAR_CONSTRUCTOR) { super.onMeasure( MeasureSpec.makeMeasureSpec((int) Math.min(AndroidUtilities.dp(340 - 16), AndroidUtilities.displaySize.x * .95f), MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec((int) Math.min(AndroidUtilities.dp(410 - 16 - 64), AndroidUtilities.displaySize.y * .75f), MeasureSpec.AT_MOST) @@ -2935,10 +3270,15 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { } private int getCacheType() { + if (type == TYPE_TOPIC_ICON || type == TYPE_AVATAR_CONSTRUCTOR) { + return AnimatedEmojiDrawable.CACHE_TYPE_ALERT_PREVIEW_STATIC; + } return type == TYPE_EMOJI_STATUS || type == TYPE_SET_DEFAULT_REACTION ? AnimatedEmojiDrawable.CACHE_TYPE_KEYBOARD : AnimatedEmojiDrawable.CACHE_TYPE_ALERT_PREVIEW; } public class EmojiListView extends RecyclerListView { + + public EmojiListView(Context context) { super(context); @@ -2951,26 +3291,12 @@ public EmojiListView(Context context) { SparseArray> viewsGroupedByLines = new SparseArray<>(); ArrayList> unusedArrays = new ArrayList<>(); ArrayList unusedLineDrawables = new ArrayList<>(); - ArrayList lineDrawables = new ArrayList<>(); - ArrayList lineDrawablesTmp = new ArrayList<>(); + ArrayList lineDrawables = new ArrayList<>(); + ArrayList lineDrawablesTmp = new ArrayList<>(); + private boolean invalidated; private LongSparseArray animatedEmojiDrawables = new LongSparseArray<>(); - private AnimatedEmojiSpan[] getAnimatedEmojiSpans() { - AnimatedEmojiSpan[] spans = new AnimatedEmojiSpan[getChildCount()]; - for (int i = 0; i < getChildCount(); ++i) { - View child = getChildAt(i); - if (child instanceof ImageViewEmoji) { - spans[i] = ((ImageViewEmoji) child).span; - } - } - return spans; - } - - public void updateEmojiDrawables() { - animatedEmojiDrawables = AnimatedEmojiSpan.update(getCacheType(), this, getAnimatedEmojiSpans(), animatedEmojiDrawables); - } - @Override public boolean drawChild(Canvas canvas, View child, long drawingTime) { // if (child instanceof ImageViewEmoji) { @@ -2989,30 +3315,22 @@ protected boolean canHighlightChildAt(View child, float x, float y) { return super.canHighlightChildAt(child, x, y); } + private int lastChildCount = -1; + @Override - protected void onLayout(boolean changed, int l, int t, int r, int b) { - super.onLayout(changed, l, t, r, b); - if (showAnimator == null || !showAnimator.isRunning()) { - updateEmojiDrawables(); - lastChildCount = getChildCount(); - } + public void setAlpha(float alpha) { + super.setAlpha(alpha); + invalidate(); } - private int lastChildCount = -1; - @Override public void dispatchDraw(Canvas canvas) { if (getVisibility() != View.VISIBLE) { return; } - + invalidated = false; int restoreTo = canvas.getSaveCount(); - if (lastChildCount != getChildCount() && showAnimator != null && !showAnimator.isRunning()) { - updateEmojiDrawables(); - lastChildCount = getChildCount(); - } - if (!selectorRect.isEmpty()) { selectorDrawable.setBounds(selectorRect); canvas.save(); @@ -3107,6 +3425,7 @@ public void dispatchDraw(Canvas canvas) { drawable = unusedLineDrawables.remove(unusedLineDrawables.size() - 1); } else { drawable = new DrawingInBackgroundLine(); + drawable.setLayerNum(7); } drawable.position = position; drawable.onAttachToWindow(); @@ -3119,7 +3438,7 @@ public void dispatchDraw(Canvas canvas) { int w = getMeasuredWidth() - firstView.getLeft() * 2; int h = firstView.getMeasuredHeight(); if (w > 0 && h > 0) { - drawable.draw(canvas, time, w, h, 1f); + drawable.draw(canvas, time, w, h, getAlpha()); } canvas.restore(); } @@ -3139,11 +3458,11 @@ public void dispatchDraw(Canvas canvas) { View child = getChildAt(i); if (child instanceof ImageViewEmoji) { ImageViewEmoji imageViewEmoji = (ImageViewEmoji) child; - if (imageViewEmoji.premiumLockIconView != null) { + if (imageViewEmoji.premiumLockIconView != null && imageViewEmoji.premiumLockIconView.getVisibility() == View.VISIBLE) { canvas.save(); canvas.translate( - (int) (imageViewEmoji.getX() + imageViewEmoji.premiumLockIconView.getX()), - (int) (imageViewEmoji.getY() + imageViewEmoji.premiumLockIconView.getY()) + (int) (imageViewEmoji.getX() + imageViewEmoji.getMeasuredWidth() - imageViewEmoji.premiumLockIconView.getMeasuredWidth()), + (int) (imageViewEmoji.getY() + imageViewEmoji.getMeasuredHeight() - imageViewEmoji.premiumLockIconView.getMeasuredHeight()) ); imageViewEmoji.premiumLockIconView.draw(canvas); canvas.restore(); @@ -3182,7 +3501,7 @@ public void draw(Canvas canvas, long time, int w, int h, float alpha) { skewAlpha = .25f + .75f * skewAlpha; } } - boolean drawInUi = skewAlpha < 1 || isAnimating() || imageViewEmojis.size() <= 4 || SharedConfig.getDevicePerformanceClass() == SharedConfig.PERFORMANCE_CLASS_LOW || showAnimator != null && showAnimator.isRunning() || SharedConfig.getLiteMode().enabled(); + boolean drawInUi = skewAlpha < 1 || isAnimating() || imageViewEmojis.size() <= 4 || !LiteMode.isEnabled(LiteMode.FLAG_ANIMATED_EMOJI_REACTIONS) || enterAnimationInProgress() || type == TYPE_AVATAR_CONSTRUCTOR; if (!drawInUi) { boolean animatedExpandIn = animateExpandStartTime > 0 && (SystemClock.elapsedRealtime() - animateExpandStartTime) < animateExpandDuration(); for (int i = 0; i < imageViewEmojis.size(); i++) { @@ -3193,7 +3512,6 @@ public void draw(Canvas canvas, long time, int w, int h, float alpha) { } } } -// canvas.drawRect(0,0,w,h,Theme.DEBUG_RED); if (drawInUi) { prepareDraw(System.currentTimeMillis()); drawInUiThread(canvas, alpha); @@ -3203,7 +3521,7 @@ public void draw(Canvas canvas, long time, int w, int h, float alpha) { } } - float[] verts = new float[16]; +// float[] verts = new float[16]; @Override public void drawBitmap(Canvas canvas, Bitmap bitmap, Paint paint) { @@ -3275,7 +3593,7 @@ public void prepareDraw(long time) { imageView.drawableBounds = new Rect(); } imageView.drawableBounds.set(AndroidUtilities.rectTmp2); - imageView.drawable = drawable; + imageView.setDrawable(drawable); drawInBackgroundViews.add(imageView); } else { float scale = 1, alpha = 1; @@ -3296,7 +3614,7 @@ public void prepareDraw(long time) { alpha *= alphaT; } } else { - alpha = imageView.getAlpha(); + alpha *= imageView.getAlpha(); } if (!imageView.isDefaultReaction && !imageView.isStaticIcon) { @@ -3339,7 +3657,7 @@ public void prepareDraw(long time) { int w = imageView.getWidth() - imageView.getPaddingLeft() - imageView.getPaddingRight(); int h = imageView.getHeight() - imageView.getPaddingTop() - imageView.getPaddingBottom(); AndroidUtilities.rectTmp2.set(imageView.getPaddingLeft(), imageView.getPaddingTop(), imageView.getWidth() - imageView.getPaddingRight(), imageView.getHeight() - imageView.getPaddingBottom()); - if (imageView.selected && type != TYPE_TOPIC_ICON) { + if (imageView.selected && type != TYPE_TOPIC_ICON && type != TYPE_AVATAR_CONSTRUCTOR) { AndroidUtilities.rectTmp2.set( (int) Math.round(AndroidUtilities.rectTmp2.centerX() - AndroidUtilities.rectTmp2.width() / 2f * 0.86f), (int) Math.round(AndroidUtilities.rectTmp2.centerY() - AndroidUtilities.rectTmp2.height() / 2f * 0.86f), @@ -3389,7 +3707,7 @@ protected void drawInUiThread(Canvas canvas, float alpha) { float scale = imageView.getScaleX(); if (imageView.pressedProgress != 0 || imageView.selected) { - scale *= 0.8f + 0.2f * (1f - ((imageView.selected && type != TYPE_TOPIC_ICON) ? 0.7f : imageView.pressedProgress)); + scale *= 0.8f + 0.2f * (1f - ((imageView.selected && type != TYPE_TOPIC_ICON && type != TYPE_AVATAR_CONSTRUCTOR) ? 0.7f : imageView.pressedProgress)); } boolean animatedExpandIn = animateExpandStartTime > 0 && (SystemClock.elapsedRealtime() - animateExpandStartTime) < animateExpandDuration(); boolean animatedExpandInLocal = animatedExpandIn && animateExpandFromPosition >= 0 && animateExpandToPosition >= 0 && animateExpandStartTime > 0; @@ -3406,7 +3724,7 @@ protected void drawInUiThread(Canvas canvas, float alpha) { alpha = alphaT; } } else { - alpha = imageView.getAlpha(); + alpha *= imageView.getAlpha(); } AndroidUtilities.rectTmp2.set((int) imageView.getX() + imageView.getPaddingLeft(), imageView.getPaddingTop(), (int) imageView.getX() + imageView.getWidth() - imageView.getPaddingRight(), imageView.getHeight() - imageView.getPaddingBottom()); @@ -3432,7 +3750,7 @@ protected void drawInUiThread(Canvas canvas, float alpha) { } else if (imageView.imageReceiver != null) { imageView.imageReceiver.setImageCoords(AndroidUtilities.rectTmp2); } - if (imageView.drawable instanceof AnimatedEmojiDrawable) { + if (premiumStarColorFilter != null && imageView.drawable instanceof AnimatedEmojiDrawable) { imageView.drawable.setColorFilter(premiumStarColorFilter); } imageView.skewAlpha = skewAlpha; @@ -3469,12 +3787,12 @@ private void drawImage(Canvas canvas, Drawable drawable, ImageViewEmoji imageVie drawable.setAlpha((int) (255 * alpha)); drawable.draw(canvas); drawable.setColorFilter(premiumStarColorFilter); - if (imageView.premiumLockIconView != null) { - - } } else if ((imageView.isDefaultReaction || imageView.isStaticIcon) && imageView.imageReceiver != null) { + canvas.save(); + canvas.clipRect(imageView.imageReceiver.getImageX(), imageView.imageReceiver.getImageY(), imageView.imageReceiver.getImageX2(), imageView.imageReceiver.getImageY2()); imageView.imageReceiver.setAlpha(alpha); imageView.imageReceiver.draw(canvas); + canvas.restore(); } } @@ -3505,8 +3823,26 @@ protected void onDetachedFromWindow() { if (this == emojiGridView) { bigReactionImageReceiver.onDetachedFromWindow(); } + release(unusedLineDrawables); + release(lineDrawables); + release(lineDrawablesTmp); } + private void release(ArrayList lineDrawables) { + for (int i = 0; i < lineDrawables.size(); i++) { + lineDrawables.get(i).onDetachFromWindow(); + } + lineDrawables.clear(); + } + + @Override + public void invalidate() { + if (invalidated) { + return; + } + invalidated = true; + super.invalidate(); + } } @Override @@ -3517,6 +3853,10 @@ protected void onAttachedToWindow() { NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.stickersDidLoad); NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.recentEmojiStatusesUpdate); NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.groupStickersDidLoad); + + if (scrimDrawable != null) { + scrimDrawable.setSecondParent(this); + } } @Override @@ -3529,23 +3869,31 @@ protected void onDetachedFromWindow() { NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.recentEmojiStatusesUpdate); NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.groupStickersDidLoad); - if (scrimDrawable instanceof AnimatedEmojiDrawable.SwapAnimatedEmojiDrawable) { - ((AnimatedEmojiDrawable.SwapAnimatedEmojiDrawable) scrimDrawable).removeParentView(this); + if (scrimDrawable != null) { + scrimDrawable.setSecondParent(null); } } - private Runnable updateRowsDelayed = () -> updateRows(true, true); + private final Runnable updateRowsDelayed = () -> NotificationCenter.getInstance(currentAccount).doOnIdle(() -> updateRows(true, true)); @Override public void didReceivedNotification(int id, int account, Object... args) { if (id == NotificationCenter.stickersDidLoad) { - if (((int) args[0]) == MediaDataController.TYPE_EMOJIPACKS) { + if (((int) args[0]) == MediaDataController.TYPE_EMOJIPACKS || (((int) args[0]) == MediaDataController.TYPE_IMAGE && showStickers)) { updateRows(true, true); } } else if (id == NotificationCenter.featuredEmojiDidLoad) { - updateRows(false, true); + NotificationCenter.getGlobalInstance().doOnIdle(() -> { + AndroidUtilities.runOnUIThread(() -> { + updateRows(false, true); + }, 120); + }); } else if (id == NotificationCenter.recentEmojiStatusesUpdate) { - updateRows(false, true); + NotificationCenter.getGlobalInstance().doOnIdle(() -> { + AndroidUtilities.runOnUIThread(() -> { + updateRows(false, true); + }, 120); + }); } else if (id == NotificationCenter.groupStickersDidLoad) { AndroidUtilities.cancelRunOnUIThread(updateRowsDelayed); AndroidUtilities.runOnUIThread(updateRowsDelayed, 100); @@ -3557,6 +3905,7 @@ public void didReceivedNotification(int id, int account, Object... args) { final long showDuration = (long) (800 * durationScale); private ValueAnimator showAnimator; private ValueAnimator hideAnimator; + private int animationIndex = -1; public void onShow(Runnable dismiss) { if (listStateId != null) { @@ -3584,7 +3933,7 @@ public void onShow(Runnable dismiss) { hideAnimator.cancel(); hideAnimator = null; } - boolean animated = type != TYPE_TOPIC_ICON; + boolean animated = type != TYPE_TOPIC_ICON && type != TYPE_AVATAR_CONSTRUCTOR; if (animated) { showAnimator = ValueAnimator.ofFloat(0, 1); @@ -3595,6 +3944,9 @@ public void onShow(Runnable dismiss) { showAnimator.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.startAllHeavyOperations, 512); + NotificationCenter.getGlobalInstance().onAnimationFinish(animationIndex); + AndroidUtilities.runOnUIThread(NotificationCenter.getGlobalInstance()::runDelayedNotifications); checkScroll(); updateShow(1); for (int i = 0; i < emojiGridView.getChildCount(); ++i) { @@ -3608,12 +3960,12 @@ public void onAnimationEnd(Animator animation) { child.setScaleY(1); } emojiTabs.contentView.invalidate(); - - emojiGridView.updateEmojiDrawables(); } }); updateShow(0); showAnimator.setDuration(showDuration); + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.stopAllHeavyOperations, 512); + animationIndex = NotificationCenter.getGlobalInstance().setAnimationInProgress(animationIndex, null); showAnimator.start(); } else { checkScroll(); @@ -3621,15 +3973,22 @@ public void onAnimationEnd(Animator animation) { } } + private static boolean firstOpen = true; + private class SearchBox extends FrameLayout { private FrameLayout box; private ImageView search; private ImageView clear; - private CloseProgressDrawable2 clearDrawable; + private FrameLayout inputBox; + private View inputBoxGradient; + private SearchStateDrawable searchStateDrawable; private EditTextCaption input; + private StickerCategoriesListView categoriesListView; + public SearchBox(Context context) { super(context); + setClickable(true); setBackgroundColor(Theme.getColor(Theme.key_actionBarDefaultSubmenuBackground, resourcesProvider)); box = new FrameLayout(context); @@ -3643,16 +4002,34 @@ public void getOutline(View view, Outline outline) { } }); } - addView(box, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 36, Gravity.TOP | Gravity.FILL_HORIZONTAL, 8, 4 + 8, 8, 8)); + addView(box, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 36, Gravity.TOP | Gravity.FILL_HORIZONTAL, 8, 8 + 4, 8, 8)); search = new ImageView(context); search.setScaleType(ImageView.ScaleType.CENTER); - search.setImageResource(R.drawable.smiles_inputsearch); - search.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_chat_emojiSearchIcon, resourcesProvider), PorterDuff.Mode.MULTIPLY)); + searchStateDrawable = new SearchStateDrawable(); + searchStateDrawable.setIconState(SearchStateDrawable.State.STATE_SEARCH, false); + searchStateDrawable.setColor(Theme.getColor(Theme.key_chat_emojiSearchIcon, resourcesProvider)); + search.setImageDrawable(searchStateDrawable); + search.setOnClickListener(e -> { + if (searchStateDrawable.getIconState() == SearchStateDrawable.State.STATE_BACK) { + input.setText(""); + search(null, true, false); + if (categoriesListView != null) { + categoriesListView.selectCategory(null); + categoriesListView.updateCategoriesShown(true, true); + categoriesListView.scrollToStart(); + } + input.clearAnimation(); + input.animate().translationX(0).setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT).start(); + showInputBoxGradient(false); + } + }); box.addView(search, LayoutHelper.createFrame(36, 36, Gravity.LEFT | Gravity.TOP)); - input = new EditTextCaption(context, resourcesProvider) { + inputBox = new FrameLayout(context); + box.addView(inputBox, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.FILL, 36, 0, 0, 0)); + input = new EditTextCaption(context, resourcesProvider) { @Override protected void onFocusChanged(boolean focused, int direction, Rect previouslyFocusedRect) { if (focused) { @@ -3667,30 +4044,28 @@ protected void onFocusChanged(boolean focused, int direction, Rect previouslyFoc }; input.addTextChangedListener(new TextWatcher() { @Override - public void beforeTextChanged(CharSequence s, int start, int count, int after) { - - } - + public void beforeTextChanged(CharSequence s, int start, int count, int after) {} @Override - public void onTextChanged(CharSequence s, int start, int before, int count) { - - } - + public void onTextChanged(CharSequence s, int start, int before, int count) {} @Override public void afterTextChanged(Editable s) { - search(input.getText() == null || AndroidUtilities.trim(input.getText(), null).length() == 0 ? null : input.getText().toString()); + final String query = input.getText() == null || AndroidUtilities.trim(input.getText(), null).length() == 0 ? null : input.getText().toString(); + search(query); + if (categoriesListView != null) { + categoriesListView.selectCategory(null); + categoriesListView.updateCategoriesShown(TextUtils.isEmpty(query), true); + } + if (input != null) { + input.clearAnimation(); + input.animate().translationX(0).setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT).start(); + } + showInputBoxGradient(false); } }); input.setBackground(null); input.setPadding(0, 0, AndroidUtilities.dp(4), 0); input.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 16); - if (type == TYPE_EMOJI_STATUS) { - input.setHint(LocaleController.getString(R.string.SearchEmojiHint)); - } else if (type == TYPE_REACTIONS || type == TYPE_SET_DEFAULT_REACTION) { - input.setHint(LocaleController.getString(R.string.SearchReactionsHint)); - } else { - input.setHint(LocaleController.getString(R.string.SearchIconsHint)); - } + input.setHint(LocaleController.getString("Search", R.string.Search)); input.setHintTextColor(Theme.getColor(Theme.key_chat_emojiSearchIcon, resourcesProvider)); input.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteBlackText, resourcesProvider)); @@ -3702,36 +4077,160 @@ public void afterTextChanged(Editable s) { input.setMaxLines(1); input.setSingleLine(true); input.setLines(1); - box.addView(input, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.FILL, 36, -1, 32, 0)); + input.setTranslationY(AndroidUtilities.dp(-1)); + inputBox.addView(input, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.FILL, 0, 0, 32, 0)); + + inputBoxGradient = new View(context); + Drawable gradientDrawable = context.getResources().getDrawable(R.drawable.gradient_right).mutate(); + gradientDrawable.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_chat_emojiPanelBackground, resourcesProvider), PorterDuff.Mode.MULTIPLY)); + inputBoxGradient.setBackground(gradientDrawable); + inputBoxGradient.setAlpha(0f); + inputBox.addView(inputBoxGradient, LayoutHelper.createFrame(18, LayoutHelper.MATCH_PARENT, Gravity.LEFT)); + + setOnClickListener(e -> { + onInputFocus(); + input.requestFocus(); + scrollToPosition(0, 0); + }); clear = new ImageView(context); clear.setScaleType(ImageView.ScaleType.CENTER); - clear.setImageDrawable(clearDrawable = new CloseProgressDrawable2(1.25f) { + clear.setImageDrawable(new CloseProgressDrawable2(1.25f) { + { setSide(AndroidUtilities.dp(7)); } @Override protected int getCurrentColor() { return Theme.getColor(Theme.key_chat_emojiSearchIcon, resourcesProvider); } }); - clearDrawable.setSide(AndroidUtilities.dp(7)); - clear.setScaleX(0.1f); - clear.setScaleY(0.1f); - clear.setAlpha(0.0f); - box.addView(clear, LayoutHelper.createFrame(36, 36, Gravity.RIGHT | Gravity.TOP)); - clear.setOnClickListener(v -> { + clear.setBackground(Theme.createSelectorDrawable(Theme.getColor(Theme.key_listSelector, resourcesProvider), Theme.RIPPLE_MASK_CIRCLE_20DP, AndroidUtilities.dp(15))); + clear.setAlpha(0f); + clear.setOnClickListener(e -> { input.setText(""); - search(null); + search(null, true, false); + if (categoriesListView != null) { + categoriesListView.selectCategory(null); + categoriesListView.updateCategoriesShown(true, true); + } + input.clearAnimation(); + input.animate().translationX(0).setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT).start(); + showInputBoxGradient(false); }); + box.addView(clear, LayoutHelper.createFrame(36, 36, Gravity.RIGHT | Gravity.TOP)); - setOnClickListener(e -> { - onInputFocus(); - input.requestFocus(); - scrollToPosition(0, 0); + if (firstOpen) { + AndroidUtilities.runOnUIThread(this::createCategoriesListView, 450); + firstOpen = false; + } else { + createCategoriesListView(); + } + } + + private void createCategoriesListView() { + if (categoriesListView != null || getContext() == null) { + return; + } + if (type != TYPE_REACTIONS && type != TYPE_SET_DEFAULT_REACTION && type != TYPE_EMOJI_STATUS && type != TYPE_AVATAR_CONSTRUCTOR) { + return; + } + + int categoriesType; + switch (type) { + case TYPE_EMOJI_STATUS: + categoriesType = StickerCategoriesListView.CategoriesType.STATUS; + break; + case TYPE_AVATAR_CONSTRUCTOR: + categoriesType = StickerCategoriesListView.CategoriesType.PROFILE_PHOTOS; + break; + case TYPE_REACTIONS: + default: + categoriesType = StickerCategoriesListView.CategoriesType.DEFAULT; + break; + } + categoriesListView = new StickerCategoriesListView(getContext(), categoriesType, resourcesProvider) { + @Override + public void selectCategory(int categoryIndex) { + super.selectCategory(categoryIndex); + updateButton(); + } + + @Override + protected boolean isTabIconsAnimationEnabled(boolean loaded) { + return LiteMode.isEnabled(LiteMode.FLAG_ANIMATED_EMOJI_KEYBOARD) && (!loaded || type == TYPE_AVATAR_CONSTRUCTOR); + } + }; + categoriesListView.setShownButtonsAtStart(type == TYPE_AVATAR_CONSTRUCTOR ? 6.5f : 4.5f); + categoriesListView.setDontOccupyWidth((int) (input.getPaint().measureText(input.getHint() + ""))); + categoriesListView.setBackgroundColor(Theme.getColor(Theme.key_chat_emojiPanelBackground, resourcesProvider)); + categoriesListView.setOnScrollIntoOccupiedWidth(scrolled -> { + input.setTranslationX(-Math.max(0, scrolled)); + showInputBoxGradient(scrolled > 0); + updateButton(); + }); + categoriesListView.setOnCategoryClick(category -> { + if (categoriesListView.getSelectedCategory() == category) { + search(null, false, false); + categoriesListView.selectCategory(null); + } else { + search(category.emojis, false, false); + categoriesListView.selectCategory(category); + } }); + box.addView(categoriesListView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.FILL, 36, 0, 0, 0)); + } + + private Runnable delayedToggle; + private void toggleClear(boolean enabled) { + if (enabled) { + if (delayedToggle == null) { + AndroidUtilities.runOnUIThread(delayedToggle = () -> { + AndroidUtilities.updateViewShow(clear, true); + }, 340); + } + } else { + if (delayedToggle != null) { + AndroidUtilities.cancelRunOnUIThread(delayedToggle); + delayedToggle = null; + } + AndroidUtilities.updateViewShow(clear, false); + } + } + + private boolean inputBoxShown = false; + private void showInputBoxGradient(boolean show) { + if (show == inputBoxShown || inputBoxGradient == null) { + return; + } + inputBoxShown = show; + inputBoxGradient.clearAnimation(); + inputBoxGradient.animate().alpha(show ? 1 : 0).setDuration(120).setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT).start(); + } + + public boolean isInProgress() { + return searchStateDrawable.getIconState() == SearchStateDrawable.State.STATE_PROGRESS; + } + + public void showProgress(boolean progress) { + if (progress) { + searchStateDrawable.setIconState(SearchStateDrawable.State.STATE_PROGRESS); + } else { + updateButton(true); + } + } + + private void updateButton() { + updateButton(false); + } + + private void updateButton(boolean force) { + if (!isInProgress() || input.length() == 0 && (categoriesListView == null || categoriesListView.getSelectedCategory() == null) || force) { + boolean backButton = input.length() > 0 || categoriesListView != null && categoriesListView.isCategoriesShown() && (categoriesListView.isScrolledIntoOccupiedWidth() || categoriesListView.getSelectedCategory() != null); + searchStateDrawable.setIconState(backButton ? SearchStateDrawable.State.STATE_BACK : SearchStateDrawable.State.STATE_SEARCH); + } } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { - super.onMeasure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(4 + 8+36+8), MeasureSpec.EXACTLY)); + super.onMeasure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(8+36+8), MeasureSpec.EXACTLY)); } } @@ -3909,8 +4408,8 @@ public void setScrimDrawable(AnimatedEmojiDrawable.SwapAnimatedEmojiDrawable scr this.scrimColor = scrimDrawable == null ? 0 : scrimDrawable.getColor(); this.scrimDrawable = scrimDrawable; this.scrimDrawableParent = drawableParent; - if (scrimDrawable != null) { - scrimDrawable.addParentView(this); + if (isAttached && scrimDrawable != null) { + scrimDrawable.setSecondParent(this); } invalidate(); } @@ -4056,7 +4555,7 @@ protected void dispatchDraw(Canvas canvas) { if (skew < 1) { canvas.translate(AndroidUtilities.rectTmp2.left, AndroidUtilities.rectTmp2.top); canvas.scale(1f, skew, 0, 0); - canvas.skew((1f - 2f * imageViewEmoji.skewIndex / layoutManager.getSpanCount()) * (1f - skew), 0); + canvas.skew((1f - 2f * imageViewEmoji.skewIndex / SPAN_COUNT_FOR_EMOJI) * (1f - skew), 0); canvas.translate(-AndroidUtilities.rectTmp2.left, -AndroidUtilities.rectTmp2.top); } canvas.clipRect(0, 0, getWidth(), clipBottom + showT * AndroidUtilities.dp(45)); @@ -4068,9 +4567,9 @@ protected void dispatchDraw(Canvas canvas) { AndroidUtilities.rectTmp2.offset(AndroidUtilities.dp(8 * skew), 0); } else if (imageViewEmoji.skewIndex == 1) { AndroidUtilities.rectTmp2.offset(AndroidUtilities.dp(4 * skew), 0); - } else if (imageViewEmoji.skewIndex == layoutManager.getSpanCount() - 2) { + } else if (imageViewEmoji.skewIndex == SPAN_COUNT_FOR_EMOJI - 2) { AndroidUtilities.rectTmp2.offset(-AndroidUtilities.dp(-4 * skew), 0); - } else if (imageViewEmoji.skewIndex == layoutManager.getSpanCount() - 1) { + } else if (imageViewEmoji.skewIndex == SPAN_COUNT_FOR_EMOJI - 1) { AndroidUtilities.rectTmp2.offset(AndroidUtilities.dp(-8 * skew), 0); } canvas.saveLayerAlpha(AndroidUtilities.rectTmp2.left, AndroidUtilities.rectTmp2.top, AndroidUtilities.rectTmp2.right, AndroidUtilities.rectTmp2.bottom, (int) (255 * (1f - showT)), Canvas.ALL_SAVE_FLAG); @@ -4295,10 +4794,15 @@ private void done(Integer date) { } animateShow(false, () -> { onEnd(date); - super.dismiss(); + try { + super.dismiss(); + } catch (Exception ignore) { + + } }, () -> { if (date != null) { try { + if (!NekoConfig.disableVibration.Bool()) performHapticFeedback(HapticFeedbackConstants.LONG_PRESS, HapticFeedbackConstants.FLAG_IGNORE_VIEW_SETTING); } catch (Exception ignore) {} onEndPartly(date); @@ -4530,7 +5034,11 @@ public void setPressed(boolean pressed) { return; } - void setAnimationsEnabled(boolean aniationsEnabled) { + public void setAnimationsEnabled(boolean aniationsEnabled) { this.animationsEnabled = aniationsEnabled; } + + public void setEnterAnimationInProgress(boolean enterAnimationInProgress) { + this.enterAnimationInProgress = enterAnimationInProgress; + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/SessionBottomSheet.java b/TMessagesProj/src/main/java/org/telegram/ui/SessionBottomSheet.java index d813cc9321..30b61c338c 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/SessionBottomSheet.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/SessionBottomSheet.java @@ -262,7 +262,7 @@ public void onClick(View view) { fragment.showDialog(alertDialog); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } }); @@ -401,13 +401,13 @@ public ItemView(Context context, boolean needSwitch) { valueText.setTextSize(TypedValue.COMPLEX_UNIT_SP, 16); valueText.setGravity(Gravity.LEFT); valueText.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteBlackText)); - linearLayout.addView(valueText, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, 0, 0, 0, needSwitch ? 46 : 0, 0)); + linearLayout.addView(valueText, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, 0, 0, 0, needSwitch ? 64 : 0, 0)); descriptionText = new TextView(context); descriptionText.setTextSize(TypedValue.COMPLEX_UNIT_SP, 13); descriptionText.setGravity(Gravity.LEFT); descriptionText.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteGrayText)); - linearLayout.addView(descriptionText, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, 0, 0, 4, needSwitch ? 46 : 0, 0)); + linearLayout.addView(descriptionText, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, 0, 0, 4, needSwitch ? 64 : 0, 0)); setPadding(0, AndroidUtilities.dp(4), 0, AndroidUtilities.dp(4)); if (needSwitch) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/SessionsActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/SessionsActivity.java index 2e21818a67..6ce09eddf5 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/SessionsActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/SessionsActivity.java @@ -193,7 +193,15 @@ public void onItemClick(int id) { emptyView.showProgress(); frameLayout.addView(emptyView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.CENTER)); - listView = new RecyclerListView(context); + listView = new RecyclerListView(context) { + @Override + public Integer getSelectorColor(int position) { + if (position == terminateAllSessionsRow) { + return Theme.multAlpha(getThemedColor(Theme.key_windowBackgroundWhiteRedText2), .1f); + } + return getThemedColor(Theme.key_listSelector); + } + }; listView.setLayoutManager(new LinearLayoutManager(context, LinearLayoutManager.VERTICAL, false) { @Override public boolean supportsPredictiveItemAnimations() { @@ -333,7 +341,7 @@ public boolean supportsPredictiveItemAnimations() { showDialog(alertDialog); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } else if (position >= otherSessionsStartRow && position < otherSessionsEndRow || position >= passwordSessionsStartRow && position < passwordSessionsEndRow || position == currentSessionRow) { if (getParentActivity() == null) { @@ -453,7 +461,7 @@ public boolean supportsPredictiveItemAnimations() { showDialog(alertDialog); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } }); @@ -856,7 +864,7 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { } else { privacyCell.setText(LocaleController.getString("TerminateWebSessionInfo", R.string.TerminateWebSessionInfo)); } - privacyCell.setBackgroundDrawable(Theme.getThemedDrawable(mContext, R.drawable.greydivider_bottom, Theme.key_windowBackgroundGrayShadow)); + privacyCell.setBackgroundDrawable(Theme.getThemedDrawable(mContext, R.drawable.greydivider, Theme.key_windowBackgroundGrayShadow)); } else if (position == passwordSessionsDetailRow) { privacyCell.setText(LocaleController.getString("LoginAttemptsInfo", R.string.LoginAttemptsInfo)); if (otherSessionsTerminateDetail == -1) { @@ -865,7 +873,7 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { privacyCell.setBackgroundDrawable(Theme.getThemedDrawable(mContext, R.drawable.greydivider, Theme.key_windowBackgroundGrayShadow)); } } else if (position == qrCodeDividerRow || position == ttlDivideRow || position == noOtherSessionsRow) { - privacyCell.setBackgroundDrawable(Theme.getThemedDrawable(mContext, R.drawable.greydivider, Theme.key_windowBackgroundGrayShadow)); + privacyCell.setBackgroundDrawable(Theme.getThemedDrawable(mContext, R.drawable.greydivider_bottom, Theme.key_windowBackgroundGrayShadow)); privacyCell.setText(""); privacyCell.setFixedSize(12); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/StickersActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/StickersActivity.java index d55cbe6687..3ce00d6633 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/StickersActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/StickersActivity.java @@ -94,6 +94,7 @@ import org.telegram.ui.Components.BulletinFactory; import org.telegram.ui.Components.CubicBezierInterpolator; import org.telegram.ui.Components.EmojiPacksAlert; +import org.telegram.ui.Components.EmojiView; import org.telegram.ui.Components.LayoutHelper; import org.telegram.ui.Components.NumberTextView; import org.telegram.ui.Components.Premium.PremiumFeatureBottomSheet; @@ -156,6 +157,8 @@ public class StickersActivity extends BaseFragment implements NotificationCenter private boolean needReorder; private int currentType; + private int dynamicPackOrder; + private int dynamicPackOrderInfo; private int suggestRow; private int suggestAnimatedEmojiRow; private int suggestAnimatedEmojiInfoRow; @@ -164,6 +167,7 @@ public class StickersActivity extends BaseFragment implements NotificationCenter private int largeEmojiRow; private int reactionsDoubleTapRow; private int stickersBotInfo; + private int featuredRow; private int masksRow; private int emojiPacksRow; private int masksInfoRow; @@ -175,6 +179,7 @@ public class StickersActivity extends BaseFragment implements NotificationCenter private int featuredStickersEndRow; private int featuredStickersShowMoreRow; private int featuredStickersShadowRow; + private int stickersSettingsRow; private int stickersHeaderRow; private int stickersStartRow; @@ -430,10 +435,24 @@ public void startDocumentSelectActivity() { FrameLayout frameLayout = (FrameLayout) fragmentView; frameLayout.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundGray)); - listView = new RecyclerListView(context); + listView = new RecyclerListView(context) { + @Override + protected void dispatchDraw(Canvas canvas) { + if (actionBar.isActionModeShowed()) { + drawSectionBackground(canvas, stickersHeaderRow, stickersEndRow, getThemedColor(Theme.key_windowBackgroundWhite)); + } + super.dispatchDraw(canvas); + } + }; listView.setFocusable(true); listView.setTag(7); - DefaultItemAnimator itemAnimator = new DefaultItemAnimator(); + DefaultItemAnimator itemAnimator = new DefaultItemAnimator() { + @Override + protected void onMoveAnimationUpdate(RecyclerView.ViewHolder holder) { + super.onMoveAnimationUpdate(holder); + listView.invalidate(); + } + }; itemAnimator.setMoveDuration(350); itemAnimator.setMoveInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); listView.setItemAnimator(itemAnimator); @@ -470,7 +489,7 @@ protected void calculateExtraLayoutSpace(@NonNull RecyclerView.State state, @Non } else { showDialog(new StickersAlert(getParentActivity(), StickersActivity.this, inputStickerSetID, null, null)); } - } else if (position == featuredStickersShowMoreRow) { + } else if (position == featuredStickersShowMoreRow || position == featuredRow) { if (currentType == MediaDataController.TYPE_EMOJIPACKS) { ArrayList inputStickerSets = new ArrayList<>(); List featuredStickerSets = getFeaturedSets(); @@ -572,6 +591,9 @@ public void onStickerSetRemove(TLRPC.StickerSetCovered stickerSet) { ((TextCheckCell) view).setChecked(SharedConfig.suggestAnimatedEmoji); } else if (position == reactionsDoubleTapRow) { presentFragment(new ReactionsDoubleTapManageActivity()); + } else if (position == dynamicPackOrder) { + SharedConfig.toggleUpdateStickersOrderOnSend(); + ((TextCheckCell) view).setChecked(SharedConfig.updateStickersOrderOnSend); } }); listView.setOnItemLongClickListener((view, position) -> { @@ -765,6 +787,20 @@ private void sendReorder() { } ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> AndroidUtilities.runOnUIThread(() -> activeReorderingRequests--)); NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.stickersDidLoad, currentType, true); + + if (SharedConfig.updateStickersOrderOnSend && dynamicPackOrder != -1) { + SharedConfig.toggleUpdateStickersOrderOnSend(); + BulletinFactory.of(StickersActivity.this).createSimpleBulletin(R.raw.filter_reorder, LocaleController.getString("DynamicPackOrderOff", R.string.DynamicPackOrderOff), LocaleController.getString("DynamicPackOrderOffInfo", R.string.DynamicPackOrderOffInfo)).show(); + + for (int i = 0; i < listView.getChildCount(); ++i) { + View child = listView.getChildAt(i); + int position = listView.getChildAdapterPosition(child); + if (position == dynamicPackOrder && child instanceof TextCheckCell) { + ((TextCheckCell) child).setChecked(SharedConfig.updateStickersOrderOnSend); + break; + } + } + } } private void updateRows(boolean updateEmojipacks) { @@ -877,19 +913,10 @@ public boolean areContentsTheSame(int oldItemPosition, int newItemPosition) { rowCount = 0; - if (currentType == MediaDataController.TYPE_IMAGE) { - suggestRow = rowCount++; - largeEmojiRow = rowCount++; - loopRow = rowCount++; - loopInfoRow = rowCount++; - } else { - suggestRow = -1; - largeEmojiRow = -1; - loopRow = -1; - loopInfoRow = -1; - } + loopRow = -1; + loopInfoRow = -1; - if (currentType == MediaDataController.TYPE_EMOJIPACKS && hasUsefulPacks) { + if (currentType == MediaDataController.TYPE_EMOJIPACKS) { suggestAnimatedEmojiRow = rowCount++; suggestAnimatedEmojiInfoRow = rowCount++; } else { @@ -898,31 +925,41 @@ public boolean areContentsTheSame(int oldItemPosition, int newItemPosition) { } if (currentType == MediaDataController.TYPE_IMAGE) { - masksRow = rowCount++; + featuredRow = rowCount++; + masksRow = -1; + if (mediaDataController.getArchivedStickersCount(currentType) != 0) { + boolean inserted = archivedRow == -1; + archivedRow = rowCount++; + if (listAdapter != null && inserted) { + listAdapter.notifyItemRangeInserted(archivedRow, 1); + } + } + archivedInfoRow = -1; emojiPacksRow = rowCount++; } else { + featuredRow = -1; masksRow = -1; emojiPacksRow = -1; - } - if (mediaDataController.getArchivedStickersCount(currentType) != 0 && currentType != MediaDataController.TYPE_EMOJIPACKS) { - boolean inserted = archivedRow == -1; + if (mediaDataController.getArchivedStickersCount(currentType) != 0 && currentType != MediaDataController.TYPE_EMOJIPACKS) { + boolean inserted = archivedRow == -1; - archivedRow = rowCount++; - archivedInfoRow = currentType == MediaDataController.TYPE_MASK ? rowCount++ : -1; + archivedRow = rowCount++; + archivedInfoRow = currentType == MediaDataController.TYPE_MASK ? rowCount++ : -1; - if (listAdapter != null && inserted) { - listAdapter.notifyItemRangeInserted(archivedRow, archivedInfoRow != -1 ? 2 : 1); - } - } else { - int oldArchivedRow = archivedRow; - int oldArchivedInfoRow = archivedInfoRow; + if (listAdapter != null && inserted) { + listAdapter.notifyItemRangeInserted(archivedRow, archivedInfoRow != -1 ? 2 : 1); + } + } else { + int oldArchivedRow = archivedRow; + int oldArchivedInfoRow = archivedInfoRow; - archivedRow = -1; - archivedInfoRow = -1; + archivedRow = -1; + archivedInfoRow = -1; - if (listAdapter != null && oldArchivedRow != -1) { - listAdapter.notifyItemRangeRemoved(oldArchivedRow, oldArchivedInfoRow != -1 ? 2 : 1); + if (listAdapter != null && oldArchivedRow != -1) { + listAdapter.notifyItemRangeRemoved(oldArchivedRow, oldArchivedInfoRow != -1 ? 2 : 1); + } } } @@ -942,16 +979,30 @@ public boolean areContentsTheSame(int oldItemPosition, int newItemPosition) { featuredStickersEndRow = -1; featuredStickersShowMoreRow = -1; featuredStickersShadowRow = -1; - if (!featuredStickersList.isEmpty() && (currentType == MediaDataController.TYPE_IMAGE)) { - featuredStickersHeaderRow = rowCount++; - featuredStickersStartRow = rowCount; - rowCount += featuredStickersList.size(); - featuredStickersEndRow = rowCount; +// if (!featuredStickersList.isEmpty() && (currentType == MediaDataController.TYPE_IMAGE)) { +// featuredStickersHeaderRow = rowCount++; +// featuredStickersStartRow = rowCount; +// rowCount += featuredStickersList.size(); +// featuredStickersEndRow = rowCount; +// +// if (truncatedFeaturedStickers) { +// featuredStickersShowMoreRow = rowCount++; +// } +// featuredStickersShadowRow = rowCount++; +// } - if (truncatedFeaturedStickers) { - featuredStickersShowMoreRow = rowCount++; - } - featuredStickersShadowRow = rowCount++; + if (currentType == MediaDataController.TYPE_IMAGE) { + stickersSettingsRow = rowCount++; + suggestRow = rowCount++; + largeEmojiRow = rowCount++; + dynamicPackOrder = rowCount++; + dynamicPackOrderInfo = rowCount++; + } else { + stickersSettingsRow = -1; + suggestRow = -1; + largeEmojiRow = -1; + dynamicPackOrder = -1; + dynamicPackOrderInfo = -1; } int stickerSetsCount = newList.size(); @@ -1266,7 +1317,7 @@ public boolean didCopy() { if (which == MENU_DELETE) { TextView button = (TextView) dialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } break; @@ -1355,6 +1406,8 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { headerCell.setText(LocaleController.getString(currentType == MediaDataController.TYPE_EMOJIPACKS ? R.string.FeaturedEmojiPacks : R.string.FeaturedStickers)); } else if (position == stickersHeaderRow) { headerCell.setText(LocaleController.getString(currentType == MediaDataController.TYPE_EMOJIPACKS ? R.string.ChooseStickerMyEmojiPacks : R.string.ChooseStickerMyStickerSets)); + } else if (position == stickersSettingsRow) { + headerCell.setText(LocaleController.getString("StickersSettings", R.string.StickersSettings)); } break; case TYPE_FEATURED_STICKER_SET: { @@ -1420,6 +1473,7 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { break; case TYPE_INFO: TextInfoPrivacyCell infoPrivacyCell = (TextInfoPrivacyCell) holder.itemView; + infoPrivacyCell.setFixedSize(0); if (position == stickersBotInfo) { infoPrivacyCell.setText(addStickersBotSpan( currentType == MediaDataController.TYPE_EMOJIPACKS ? @@ -1433,11 +1487,15 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { infoPrivacyCell.setText(LocaleController.getString("ArchivedMasksInfo", R.string.ArchivedMasksInfo)); } } else if (position == loopInfoRow) { - infoPrivacyCell.setText(LocaleController.getString("LoopAnimatedStickersInfo", R.string.LoopAnimatedStickersInfo)); +// infoPrivacyCell.setText(LocaleController.getString("LoopAnimatedStickersInfo", R.string.LoopAnimatedStickersInfo)); + infoPrivacyCell.setText(null); + infoPrivacyCell.setFixedSize(12); } else if (position == suggestAnimatedEmojiInfoRow) { infoPrivacyCell.setText(LocaleController.getString("SuggestAnimatedEmojiInfo", R.string.SuggestAnimatedEmojiInfo)); } else if (position == masksInfoRow) { infoPrivacyCell.setText(LocaleController.getString("MasksInfo", R.string.MasksInfo)); + } else if (position == dynamicPackOrderInfo) { + infoPrivacyCell.setText(LocaleController.getString("DynamicPackOrderInfo")); } break; case TYPE_TEXT_AND_VALUE: { @@ -1445,18 +1503,19 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { if (position == featuredStickersShowMoreRow) { settingsCell.setColors(Theme.key_windowBackgroundWhiteBlueText4, Theme.key_windowBackgroundWhiteBlueText4); if (currentType == MediaDataController.TYPE_EMOJIPACKS) { - settingsCell.setTextAndIcon(LocaleController.getString(R.string.ShowMoreEmojiPacks), R.drawable.msg_trending, false); + settingsCell.setTextAndIcon(LocaleController.getString(R.string.ShowMoreEmojiPacks), R.drawable.msg2_trending, false); } else { - settingsCell.setTextAndIcon(LocaleController.getString(R.string.ShowMoreStickers), R.drawable.msg_trending, false); + settingsCell.setTextAndIcon(LocaleController.getString(R.string.ShowMoreStickers), R.drawable.msg2_trending, false); } } else { settingsCell.imageView.setTranslationX(0); + settingsCell.textView.setTranslationX(0); settingsCell.setColors(Theme.key_windowBackgroundWhiteGrayIcon, Theme.key_windowBackgroundWhiteBlackText); if (position == archivedRow) { int count = MediaDataController.getInstance(currentAccount).getArchivedStickersCount(currentType); String value = count > 0 ? Integer.toString(count) : ""; if (currentType == MediaDataController.TYPE_IMAGE) { - settingsCell.setTextAndValueAndIcon(LocaleController.getString(R.string.ArchivedStickers), value, R.drawable.msg_archived_stickers, true); + settingsCell.setTextAndValueAndIcon(LocaleController.getString(R.string.ArchivedStickers), value, R.drawable.msg2_archived_stickers, true); } else if (currentType == MediaDataController.TYPE_EMOJIPACKS) { settingsCell.setTextAndValue(LocaleController.getString("ArchivedEmojiPacks", R.string.ArchivedEmojiPacks), value, false, true); } else { @@ -1467,12 +1526,15 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { MediaDataController mediaDataController = MediaDataController.getInstance(currentAccount); int count = MessagesController.getInstance(currentAccount).filterPremiumStickers(mediaDataController.getStickerSets(type)).size() + mediaDataController.getArchivedStickersCount(type); settingsCell.setTextAndValueAndIcon(LocaleController.getString("Masks", R.string.Masks), count > 0 ? Integer.toString(count) : "", R.drawable.msg_mask, true); + } else if (position == featuredRow) { + List sets = getFeaturedSets(); + settingsCell.setTextAndValueAndIcon(LocaleController.getString("FeaturedStickers", R.string.FeaturedStickers), sets != null ? "" + sets.size() : "", R.drawable.msg2_trending, true); } else if (position == emojiPacksRow) { int type = MediaDataController.TYPE_EMOJIPACKS; MediaDataController mediaDataController = MediaDataController.getInstance(currentAccount); int count = mediaDataController.getStickerSets(type).size(); settingsCell.imageView.setTranslationX(-AndroidUtilities.dp(2)); - settingsCell.setTextAndValueAndIcon(LocaleController.getString("Emoji", R.string.Emoji), count > 0 ? Integer.toString(count) : "", R.drawable.input_smile, true); + settingsCell.setTextAndValueAndIcon(LocaleController.getString("Emoji", R.string.Emoji), count > 0 ? Integer.toString(count) : "", R.drawable.msg2_smile_status, true); } else if (position == suggestRow) { String value; switch (SharedConfig.suggestStickers) { @@ -1487,6 +1549,9 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { value = LocaleController.getString("SuggestStickersNone", R.string.SuggestStickersNone); break; } + if (!LocaleController.isRTL) { + settingsCell.textView.setTranslationX(AndroidUtilities.dp(-2)); + } settingsCell.setTextAndValue(LocaleController.getString("SuggestStickers", R.string.SuggestStickers), value, updateSuggestStickers, true); updateSuggestStickers = false; } @@ -1501,17 +1566,19 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { case TYPE_SWITCH: TextCheckCell cell = (TextCheckCell) holder.itemView; if (position == loopRow) { - cell.setTextAndCheck(LocaleController.getString("LoopAnimatedStickers", R.string.LoopAnimatedStickers), SharedConfig.loopStickers, true); + cell.setTextAndCheck(LocaleController.getString("LoopAnimatedStickers", R.string.LoopAnimatedStickers), SharedConfig.loopStickers(), true); } else if (position == largeEmojiRow) { cell.setTextAndCheck(LocaleController.getString("LargeEmoji", R.string.LargeEmoji), SharedConfig.allowBigEmoji, true); } else if (position == suggestAnimatedEmojiRow) { cell.setTextAndCheck(LocaleController.getString("SuggestAnimatedEmoji", R.string.SuggestAnimatedEmoji), SharedConfig.suggestAnimatedEmoji, false); + } else if (position == dynamicPackOrder) { + cell.setTextAndCheck(LocaleController.getString("DynamicPackOrder"), SharedConfig.updateStickersOrderOnSend, false); } break; case TYPE_DOUBLE_TAP_REACTIONS: { TextSettingsCell settingsCell = (TextSettingsCell) holder.itemView; settingsCell.setText(LocaleController.getString("DoubleTapSetting", R.string.DoubleTapSetting), false); - settingsCell.setIcon(R.drawable.msg_reactions2); + settingsCell.setIcon(R.drawable.msg2_reactions2); String reaction = MediaDataController.getInstance(currentAccount).getDoubleTapReaction(); if (reaction != null) { if (reaction.startsWith("animated_")) { @@ -1525,7 +1592,7 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { TLRPC.TL_availableReaction availableReaction = MediaDataController.getInstance(currentAccount).getReactionsMap().get(reaction); if (availableReaction != null) { SvgHelper.SvgDrawable svgThumb = DocumentObject.getSvgThumb(availableReaction.static_icon.thumbs, Theme.key_windowBackgroundGray, 1.0f); - settingsCell.getValueBackupImageView().getImageReceiver().setImage(ImageLocation.getForDocument(availableReaction.center_icon), "100_100_lastframe", svgThumb, "webp", availableReaction, 1); + settingsCell.getValueBackupImageView().getImageReceiver().setImage(ImageLocation.getForDocument(availableReaction.center_icon), "100_100_lastreactframe", svgThumb, "webp", availableReaction, 1); } } } @@ -1695,7 +1762,7 @@ protected void onRemoveButtonClick() { showDialog(dialog); if (options[options.length - 1] == MENU_DELETE) { - dialog.setItemColor(items.length - 1, Theme.getColor(Theme.key_dialogTextRed2), Theme.getColor(Theme.key_dialogRedIcon)); + dialog.setItemColor(items.length - 1, Theme.getColor(Theme.key_dialogTextRed), Theme.getColor(Theme.key_dialogRedIcon)); } }); break; @@ -1734,17 +1801,17 @@ public int getItemViewType(int i) { return TYPE_FEATURED_STICKER_SET; } else if (i >= stickersStartRow && i < stickersEndRow) { return TYPE_STICKER_SET; - } else if (i == stickersBotInfo || i == archivedInfoRow || i == loopInfoRow || i == suggestAnimatedEmojiInfoRow || i == masksInfoRow) { + } else if (i == stickersBotInfo || i == archivedInfoRow || i == loopInfoRow || i == suggestAnimatedEmojiInfoRow || i == masksInfoRow || i == dynamicPackOrderInfo) { return TYPE_INFO; - } else if (i == archivedRow || i == masksRow || i == emojiPacksRow || i == suggestRow || i == featuredStickersShowMoreRow) { + } else if (i == archivedRow || i == masksRow || i == featuredRow || i == emojiPacksRow || i == suggestRow || i == featuredStickersShowMoreRow) { return TYPE_TEXT_AND_VALUE; } else if (i == stickersShadowRow || i == featuredStickersShadowRow) { return TYPE_SHADOW; - } else if (i == loopRow || i == largeEmojiRow || i == suggestAnimatedEmojiRow) { + } else if (i == loopRow || i == largeEmojiRow || i == suggestAnimatedEmojiRow || i == dynamicPackOrder) { return TYPE_SWITCH; } else if (i == reactionsDoubleTapRow) { return TYPE_DOUBLE_TAP_REACTIONS; - } else if (i == featuredStickersHeaderRow || i == stickersHeaderRow) { + } else if (i == featuredStickersHeaderRow || i == stickersHeaderRow || i == stickersSettingsRow) { return TYPE_HEADER; } return TYPE_STICKER_SET; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/SuggestClearDatabaseBottomSheet.java b/TMessagesProj/src/main/java/org/telegram/ui/SuggestClearDatabaseBottomSheet.java index f39e642a8e..c024b6e00e 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/SuggestClearDatabaseBottomSheet.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/SuggestClearDatabaseBottomSheet.java @@ -92,7 +92,7 @@ private SuggestClearDatabaseBottomSheet(BaseFragment fragment) { fragment.showDialog(alertDialog); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } }); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/TextMessageEnterTransition.java b/TMessagesProj/src/main/java/org/telegram/ui/TextMessageEnterTransition.java index 881c90abe8..08c7aed5e6 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/TextMessageEnterTransition.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/TextMessageEnterTransition.java @@ -22,6 +22,7 @@ import android.text.StaticLayout; import android.text.TextPaint; import android.text.TextUtils; +import android.util.Log; import android.view.View; import android.view.animation.LinearInterpolator; @@ -129,7 +130,8 @@ public TextMessageEnterTransition(ChatMessageCell messageView, ChatActivity chat return; } - fromRadius = chatActivityEnterView.getRecordCicle().drawingCircleRadius; + ChatActivityEnterView.RecordCircle recordCircle = chatActivityEnterView.getRecordCircle(); + fromRadius = recordCircle == null ? 0 : recordCircle.drawingCircleRadius; bitmapPaint.setFilterBitmap(true); currentMessageObject = messageView.getMessageObject(); @@ -139,7 +141,7 @@ public TextMessageEnterTransition(ChatMessageCell messageView, ChatActivity chat messageView.setEnterTransitionInProgress(true); - CharSequence editText = chatActivityEnterView.getEditField().getLayout().getText(); + CharSequence editText = chatActivityEnterView.getEditText(); CharSequence text = messageView.getMessageObject().messageText; crossfade = false; @@ -148,23 +150,24 @@ public TextMessageEnterTransition(ChatMessageCell messageView, ChatActivity chat TextPaint textPaint = Theme.chat_msgTextPaint; int emojiSize = AndroidUtilities.dp(20); if (messageView.getMessageObject().getEmojiOnlyCount() != 0) { - switch (messageView.getMessageObject().getEmojiOnlyCount()) { + boolean large = messageView.getMessageObject().emojiOnlyCount == messageView.getMessageObject().animatedEmojiCount; + switch (Math.max(messageView.getMessageObject().emojiOnlyCount, messageView.getMessageObject().animatedEmojiCount)) { case 0: case 1: case 2: - textPaint = Theme.chat_msgTextPaintEmoji[0]; + textPaint = large ? Theme.chat_msgTextPaintEmoji[0] : Theme.chat_msgTextPaintEmoji[2]; break; case 3: - textPaint = Theme.chat_msgTextPaintEmoji[1]; + textPaint = large ? Theme.chat_msgTextPaintEmoji[1] : Theme.chat_msgTextPaintEmoji[3]; break; case 4: - textPaint = Theme.chat_msgTextPaintEmoji[2]; + textPaint = large ? Theme.chat_msgTextPaintEmoji[2] : Theme.chat_msgTextPaintEmoji[4]; break; case 5: - textPaint = Theme.chat_msgTextPaintEmoji[3]; + textPaint = large ? Theme.chat_msgTextPaintEmoji[3] : Theme.chat_msgTextPaintEmoji[5]; break; case 6: - textPaint = Theme.chat_msgTextPaintEmoji[4]; + textPaint = large ? Theme.chat_msgTextPaintEmoji[4] : Theme.chat_msgTextPaintEmoji[5]; break; case 7: case 8: @@ -181,12 +184,7 @@ public TextMessageEnterTransition(ChatMessageCell messageView, ChatActivity chat if (text instanceof Spannable) { Spannable spannable = (Spannable) text; Object[] objects = spannable.getSpans(0, text.length(), Object.class); - for (int i = 0; i < objects.length; i++) { - if (!(objects[i] instanceof Emoji.EmojiSpan)) { - containsSpans = true; - break; - } - } + containsSpans = objects != null && objects.length > 0; } if (editText.length() != text.length() || containsSpans) { crossfade = true; @@ -427,7 +425,7 @@ public void onDraw(Canvas canvas) { if (messageView.animatedEmojiStack != null) { messageView.animatedEmojiStack.clearPositions(); } - messageView.drawMessageText(bitmapCanvas, messageView.getMessageObject().textLayoutBlocks, true, 1f, true); + messageView.drawMessageText(bitmapCanvas, messageView.getMessageObject().textLayoutBlocks, messageView.getMessageObject().textXOffset, true, 1f, true); messageView.drawAnimatedEmojis(bitmapCanvas, 1f); } float listViewBottom = listView.getY() - container.getY() + listView.getMeasuredHeight(); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ThemeActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ThemeActivity.java index d1b891e6d9..a31163b6f3 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ThemeActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ThemeActivity.java @@ -52,10 +52,10 @@ import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ApplicationLoader; -import org.telegram.messenger.BuildVars; import org.telegram.messenger.FileLoader; import org.telegram.messenger.FileLog; import org.telegram.messenger.LocaleController; +import org.telegram.messenger.MediaDataController; import org.telegram.messenger.MessagesController; import org.telegram.messenger.NotificationCenter; import org.telegram.messenger.R; @@ -87,6 +87,7 @@ import org.telegram.ui.Cells.ThemeTypeCell; import org.telegram.ui.Cells.ThemesHorizontalListCell; import org.telegram.ui.Components.AlertsCreator; +import org.telegram.ui.Components.CubicBezierInterpolator; import org.telegram.ui.Components.LayoutHelper; import org.telegram.ui.Components.RLottieDrawable; import org.telegram.ui.Components.RecyclerListView; @@ -140,6 +141,7 @@ public class ThemeActivity extends BaseFragment implements NotificationCenter.No private int customTabsRow; private int directShareRow; private int raiseToSpeakRow; + private int raiseToListenRow; private int sendByEnterRow; private int saveToGalleryOption1Row; private int saveToGalleryOption2Row; @@ -182,6 +184,8 @@ public class ThemeActivity extends BaseFragment implements NotificationCenter.No private int themeAccentListRow; private int themeInfoRow; private int chatBlurRow; + private int pauseOnRecordRow; + private int pauseOnMediaRow; private int swipeGestureHeaderRow; private int swipeGestureRow; @@ -191,6 +195,11 @@ public class ThemeActivity extends BaseFragment implements NotificationCenter.No private int themePreviewRow; private int editThemeRow; private int createNewThemeRow; + private int lastShadowRow; + private int stickersRow; + private int stickersInfoRow; + private int liteModeRow; + private int liteModeInfoRow; private int appIconHeaderRow; private int appIconSelectorRow; @@ -216,8 +225,6 @@ public class ThemeActivity extends BaseFragment implements NotificationCenter.No private final static int day_night_switch = 5; private RLottieDrawable sunDrawable; - private int lightModeRow; - private int lightModeTopInfoRow; private class GpsLocationListener implements LocationListener { @@ -484,6 +491,7 @@ private void updateRows(boolean notify) { int prevThemeAccentListRow = themeAccentListRow; int prevEditThemeRow = editThemeRow; + int prevRaiseToSpeakRow = raiseToSpeakRow; rowCount = 0; contactsReimportRow = -1; @@ -519,9 +527,13 @@ private void updateRows(boolean notify) { chatListRow = -1; chatListInfoRow = -1; chatBlurRow = -1; + pauseOnRecordRow = -1; + pauseOnMediaRow = -1; + stickersRow = -1; + stickersInfoRow = -1; + liteModeRow = -1; + liteModeInfoRow = -1; - lightModeRow = -1; - lightModeTopInfoRow = -1; textSizeRow = -1; backgroundRow = -1; settingsRow = -1; @@ -529,6 +541,7 @@ private void updateRows(boolean notify) { directShareRow = -1; enableAnimationsRow = -1; raiseToSpeakRow = -1; + raiseToListenRow = -1; sendByEnterRow = -1; saveToGalleryOption1Row = -1; saveToGalleryOption2Row = -1; @@ -549,6 +562,7 @@ private void updateRows(boolean notify) { appIconHeaderRow = -1; appIconSelectorRow = -1; appIconShadowRow = -1; + lastShadowRow = -1; defaultThemes.clear(); darkThemes.clear(); @@ -590,7 +604,7 @@ private void updateRows(boolean notify) { editThemeRow = rowCount++; } createNewThemeRow = rowCount++; - swipeGestureInfoRow = rowCount++; + lastShadowRow = rowCount++; } else if (currentType == THEME_TYPE_BASIC) { textSizeHeaderRow = rowCount++; textSizeRow = rowCount++; @@ -611,32 +625,38 @@ private void updateRows(boolean notify) { chatListRow = rowCount++; chatListInfoRow = rowCount++; + appIconHeaderRow = rowCount++; + appIconSelectorRow = rowCount++; + appIconShadowRow = rowCount++; + swipeGestureHeaderRow = rowCount++; swipeGestureRow = rowCount++; swipeGestureInfoRow = rowCount++; - appIconHeaderRow = rowCount++; - appIconSelectorRow = rowCount++; - appIconShadowRow = rowCount++; + liteModeRow = rowCount++; + liteModeInfoRow = rowCount++; + + stickersRow = rowCount++; + stickersInfoRow = rowCount++; settingsRow = rowCount++; nightThemeRow = rowCount++; customTabsRow = rowCount++; directShareRow = rowCount++; - enableAnimationsRow = rowCount++; - raiseToSpeakRow = rowCount++; - bluetoothScoRow = rowCount++; - sendByEnterRow = rowCount++; - if (SharedConfig.canBlurChat()) { - chatBlurRow = rowCount++; +// enableAnimationsRow = rowCount++; + raiseToListenRow = rowCount++; + if (SharedConfig.raiseToListen) { + raiseToSpeakRow = rowCount++; } + sendByEnterRow = rowCount++; + pauseOnMediaRow = rowCount++; + pauseOnRecordRow = rowCount++; + bluetoothScoRow = rowCount++; +// if (SharedConfig.canBlurChat()) { +// chatBlurRow = rowCount++; +// } distanceRow = rowCount++; settings2Row = rowCount++; - - if (true || SharedConfig.getDevicePerformanceClass() == SharedConfig.PERFORMANCE_CLASS_LOW || BuildVars.DEBUG_VERSION) { - lightModeRow = rowCount++; - lightModeTopInfoRow = rowCount++; - } } else { nightDisabledRow = rowCount++; nightScheduledRow = rowCount++; @@ -700,6 +720,12 @@ private void updateRows(boolean notify) { } else if (prevEditThemeRow != -1 && editThemeRow == -1) { listAdapter.notifyItemRemoved(prevEditThemeRow); } + + if (prevRaiseToSpeakRow == -1 && raiseToSpeakRow != -1) { + listAdapter.notifyItemInserted(raiseToSpeakRow); + } else if (prevRaiseToSpeakRow != -1 && raiseToSpeakRow == -1) { + listAdapter.notifyItemRemoved(prevRaiseToSpeakRow); + } } } else { int start = nightTypeInfoRow + 1; @@ -913,7 +939,7 @@ public void onItemClick(int id) { if (setFontSize(AndroidUtilities.isTablet() ? 18 : 16)) { changed = true; } - if (setBubbleRadius(10, true)) { + if (setBubbleRadius(17, true)) { changed = true; } if (changed) { @@ -952,7 +978,7 @@ public void onItemClick(int id) { showDialog(alertDialog); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } else if (id == day_night_switch) { SharedPreferences preferences = ApplicationLoader.applicationContext.getSharedPreferences("themeconfig", Activity.MODE_PRIVATE); @@ -987,6 +1013,8 @@ public void onItemClick(int id) { NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.needSetDayNightTheme, themeInfo, false, pos, -1, toDark, menuItem.getIconView()); updateRows(true); //AndroidUtilities.updateVisibleRows(listView); + + Theme.turnOffAutoNight(ThemeActivity.this); } } }); @@ -1026,10 +1054,34 @@ public void onItemClick(int id) { ((TextCheckCell) view).setChecked(!send); } } else if (position == raiseToSpeakRow) { - SharedConfig.toogleRaiseToSpeak(); + SharedConfig.toggleRaiseToSpeak(); if (view instanceof TextCheckCell) { ((TextCheckCell) view).setChecked(SharedConfig.raiseToSpeak); } + } else if (position == raiseToListenRow) { + SharedConfig.toggleRaiseToListen(); + if (view instanceof TextCheckCell) { + ((TextCheckCell) view).setChecked(SharedConfig.raiseToListen); + } + if (!SharedConfig.raiseToListen && raiseToSpeakRow != -1) { + for (int i = 0; i < listView.getChildCount(); ++i) { + View child = listView.getChildAt(i); + if (child instanceof TextCheckCell && listView.getChildAdapterPosition(child) == raiseToSpeakRow) { + ((TextCheckCell) child).setChecked(false); + } + } + } + updateRows(false); + } else if (position == pauseOnRecordRow) { + SharedConfig.togglePauseMusicOnRecord(); + if (view instanceof TextCheckCell) { + ((TextCheckCell) view).setChecked(SharedConfig.pauseMusicOnRecord); + } + } else if (position == pauseOnMediaRow) { + SharedConfig.togglePauseMusicOnMedia(); + if (view instanceof TextCheckCell) { + ((TextCheckCell) view).setChecked(SharedConfig.pauseMusicOnMedia); + } } else if (position == distanceRow) { if (getParentActivity() == null) { return; @@ -1165,12 +1217,6 @@ public void onItemClick(int id) { ((TextCheckCell) view).setChecked(SharedConfig.chatBlurEnabled()); } } - } else if (position == lightModeRow) { - SharedConfig.getLiteMode().toggleMode(); - if (view instanceof TextCheckCell) { - ((TextCheckCell) view).setChecked(SharedConfig.getLiteMode().enabled()); - } -// } else if (position == nightThemeRow) { if (LocaleController.isRTL && x <= AndroidUtilities.dp(76) || !LocaleController.isRTL && x >= view.getMeasuredWidth() - AndroidUtilities.dp(76)) { NotificationsCheckCell checkCell = (NotificationsCheckCell) view; @@ -1271,8 +1317,20 @@ public void onItemClick(int id) { createNewTheme(); } else if (position == editThemeRow) { editTheme(); + } else if (position == stickersRow) { + presentFragment(new StickersActivity(MediaDataController.TYPE_IMAGE, null)); + } else if (position == liteModeRow) { + presentFragment(new LiteModeSettingsActivity()); } }); + if (currentType == THEME_TYPE_BASIC) { + DefaultItemAnimator itemAnimator = new DefaultItemAnimator(); + itemAnimator.setDurations(350); + itemAnimator.setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); + itemAnimator.setDelayAnimations(false); + itemAnimator.setSupportsChangeAnimations(false); + listView.setItemAnimator(itemAnimator); + } return fragmentView; } @@ -1326,7 +1384,7 @@ private void updateMenuItem() { } int fontSize = AndroidUtilities.isTablet() ? 18 : 16; Theme.ThemeInfo currentTheme = Theme.getCurrentTheme(); - if (SharedConfig.fontSize != fontSize || SharedConfig.bubbleRadius != 10 || !currentTheme.firstAccentIsDefault || currentTheme.currentAccentId != Theme.DEFALT_THEME_ACCENT_ID || accent != null && accent.overrideWallpaper != null && !Theme.DEFAULT_BACKGROUND_SLUG.equals(accent.overrideWallpaper.slug)) { + if (SharedConfig.fontSize != fontSize || SharedConfig.bubbleRadius != 17 || !currentTheme.firstAccentIsDefault || currentTheme.currentAccentId != Theme.DEFALT_THEME_ACCENT_ID || accent != null && accent.overrideWallpaper != null && !Theme.DEFAULT_BACKGROUND_SLUG.equals(accent.overrideWallpaper.slug)) { menuItem.showSubItem(reset_settings); } else { menuItem.hideSubItem(reset_settings); @@ -1862,14 +1920,14 @@ private void showOptionsForTheme(Theme.ThemeInfo themeInfo) { showDialog(alertDialog); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } }); AlertDialog alertDialog = builder.create(); showDialog(alertDialog); if (hasDelete) { - alertDialog.setItemColor(alertDialog.getItemsCount() - 1, Theme.getColor(Theme.key_dialogTextRed2), Theme.getColor(Theme.key_dialogRedIcon)); + alertDialog.setItemColor(alertDialog.getItemsCount() - 1, Theme.getColor(Theme.key_dialogTextRed), Theme.getColor(Theme.key_dialogRedIcon)); } } @@ -1939,17 +1997,12 @@ protected void didSelectChatType(boolean threeLines) { break; case TYPE_THEME_LIST: first = true; - themesHorizontalListCell = new ThemesHorizontalListCell(mContext, currentType, defaultThemes, darkThemes) { + themesHorizontalListCell = new ThemesHorizontalListCell(mContext, ThemeActivity.this, currentType, defaultThemes, darkThemes) { @Override protected void showOptionsForTheme(Theme.ThemeInfo themeInfo) { listAdapter.showOptionsForTheme(themeInfo); } - @Override - protected void presentFragment(BaseFragment fragment) { - ThemeActivity.this.presentFragment(fragment); - } - @Override protected void updateRows() { ThemeActivity.this.updateRows(false); @@ -1995,6 +2048,7 @@ public boolean onInterceptTouchEvent(MotionEvent e) { if (currentTheme.currentAccentId != accent.id) { NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.needSetDayNightTheme, currentTheme, currentType == THEME_TYPE_NIGHT, null, accent.id); EmojiThemes.saveCustomTheme(currentTheme, accent.id); + Theme.turnOffAutoNight(ThemeActivity.this); } else { presentFragment(new ThemePreviewActivity(currentTheme, false, ThemePreviewActivity.SCREEN_TYPE_ACCENT_COLOR, accent.id >= 100, currentType == THEME_TYPE_NIGHT)); } @@ -2070,13 +2124,13 @@ public boolean onInterceptTouchEvent(MotionEvent e) { showDialog(alertDialog); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } }); AlertDialog alertDialog = builder.create(); showDialog(alertDialog); - alertDialog.setItemColor(alertDialog.getItemsCount() - 1, Theme.getColor(Theme.key_dialogTextRed2), Theme.getColor(Theme.key_dialogRedIcon)); + alertDialog.setItemColor(alertDialog.getItemsCount() - 1, Theme.getColor(Theme.key_dialogTextRed), Theme.getColor(Theme.key_dialogRedIcon)); return true; } return false; @@ -2175,20 +2229,24 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { } case TYPE_TEXT_INFO_PRIVACY: { TextInfoPrivacyCell cell = (TextInfoPrivacyCell) holder.itemView; - if (position == lightModeTopInfoRow) { - cell.setText(LocaleController.formatString("LightModeInfoRow", R.string.LightModeInfoRow)); - } else if (position == automaticBrightnessInfoRow) { + if (position == automaticBrightnessInfoRow) { cell.setText(LocaleController.formatString("AutoNightBrightnessInfo", R.string.AutoNightBrightnessInfo, (int) (100 * Theme.autoNightBrighnessThreshold))); } else if (position == scheduleLocationInfoRow) { cell.setText(getLocationSunString()); + } else if (position == swipeGestureInfoRow) { + cell.setText(LocaleController.getString("ChatListSwipeGestureInfo", R.string.ChatListSwipeGestureInfo)); + } else if (position == stickersInfoRow) { + cell.setText(LocaleController.getString("StickersNameInfo", R.string.StickersNameInfo)); + } else if (position == liteModeInfoRow) { + cell.setText(LocaleController.getString("LiteModeInfo", R.string.LiteModeInfo)); } break; } case TYPE_SHADOW: { - if (position == nightTypeInfoRow && themeInfoRow == -1 || position == themeInfoRow && nightTypeInfoRow != -1 || position == saveToGallerySectionRow) { - holder.itemView.setBackgroundDrawable(Theme.getThemedDrawable(mContext, R.drawable.greydivider_bottom, Theme.key_windowBackgroundGrayShadow)); + if (position == nightTypeInfoRow && themeInfoRow == -1 || position == lastShadowRow || position == themeInfoRow && nightTypeInfoRow != -1 || position == saveToGallerySectionRow || position == settings2Row) { + holder.itemView.setBackground(Theme.getThemedDrawable(mContext, R.drawable.greydivider_bottom, Theme.key_windowBackgroundGrayShadow)); } else { - holder.itemView.setBackgroundDrawable(Theme.getThemedDrawable(mContext, R.drawable.greydivider, Theme.key_windowBackgroundGrayShadow)); + holder.itemView.setBackground(Theme.getThemedDrawable(mContext, R.drawable.greydivider, Theme.key_windowBackgroundGrayShadow)); } break; } @@ -2253,14 +2311,18 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { textCheckCell.setTextAndCheck(LocaleController.getString("SendByEnter", R.string.SendByEnter), preferences.getBoolean("send_by_enter", false), true); } else if (position == raiseToSpeakRow) { textCheckCell.setTextAndCheck(LocaleController.getString("RaiseToSpeak", R.string.RaiseToSpeak), SharedConfig.raiseToSpeak, true); + } else if (position == raiseToListenRow) { + textCheckCell.setTextAndCheck(LocaleController.getString("RaiseToListen", R.string.RaiseToListen), SharedConfig.raiseToListen, true); + } else if (position == pauseOnRecordRow) { + textCheckCell.setTextAndCheck(LocaleController.getString(R.string.PauseMusicOnRecord), SharedConfig.pauseMusicOnRecord, true); + } else if (position == pauseOnMediaRow) { + textCheckCell.setTextAndCheck(LocaleController.getString(R.string.PauseMusicOnMedia), SharedConfig.pauseMusicOnMedia, true); } else if (position == customTabsRow) { textCheckCell.setTextAndValueAndCheck(LocaleController.getString("ChromeCustomTabs", R.string.ChromeCustomTabs), LocaleController.getString("ChromeCustomTabsInfo", R.string.ChromeCustomTabsInfo), SharedConfig.customTabs, false, true); } else if (position == directShareRow) { textCheckCell.setTextAndValueAndCheck(LocaleController.getString("DirectShare", R.string.DirectShare), LocaleController.getString("DirectShareInfo", R.string.DirectShareInfo), SharedConfig.directShare, false, true); } else if (position == chatBlurRow) { textCheckCell.setTextAndCheck(LocaleController.getString("BlurInChat", R.string.BlurInChat), SharedConfig.chatBlurEnabled(), true); - } else if (position == lightModeRow) { - textCheckCell.setTextAndCheck(LocaleController.getString("LightMode", R.string.LightMode), SharedConfig.getLiteMode().enabled(), true); } break; } @@ -2306,13 +2368,21 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { } case TYPE_TEXT_PREFERENCE: { TextCell cell = (TextCell) holder.itemView; - cell.setColors(Theme.key_windowBackgroundWhiteBlueText4, Theme.key_windowBackgroundWhiteBlueText4); if (position == backgroundRow) { + cell.setColors(Theme.key_windowBackgroundWhiteBlueText4, Theme.key_windowBackgroundWhiteBlueText4); cell.setTextAndIcon(LocaleController.getString("ChangeChatBackground", R.string.ChangeChatBackground), R.drawable.msg_background, false); } else if (position == editThemeRow) { + cell.setColors(Theme.key_windowBackgroundWhiteBlueText4, Theme.key_windowBackgroundWhiteBlueText4); cell.setTextAndIcon(LocaleController.getString("EditCurrentTheme", R.string.EditCurrentTheme), R.drawable.msg_theme, true); } else if (position == createNewThemeRow) { + cell.setColors(Theme.key_windowBackgroundWhiteBlueText4, Theme.key_windowBackgroundWhiteBlueText4); cell.setTextAndIcon(LocaleController.getString("CreateNewTheme", R.string.CreateNewTheme), R.drawable.msg_colors, false); + } else if (position == stickersRow) { + cell.setColors(Theme.key_windowBackgroundWhiteGrayIcon, Theme.key_windowBackgroundWhiteBlackText); + cell.setTextAndIcon(LocaleController.getString("StickersName", R.string.StickersName), R.drawable.msg2_sticker, false); + } else if (position == liteModeRow) { + cell.setColors(Theme.key_windowBackgroundWhiteGrayIcon, Theme.key_windowBackgroundWhiteBlackText); + cell.setTextAndIcon(LocaleController.getString("LiteMode", R.string.LiteMode), R.drawable.msg2_animations, false); } break; } @@ -2352,11 +2422,11 @@ public int getItemViewType(int position) { position == contactsReimportRow || position == contactsSortRow || position == bluetoothScoRow) { return TYPE_TEXT_SETTING; - } else if (position == automaticBrightnessInfoRow || position == scheduleLocationInfoRow || position == lightModeTopInfoRow) { + } else if (position == automaticBrightnessInfoRow || position == scheduleLocationInfoRow || position == swipeGestureInfoRow || position == stickersInfoRow || position == liteModeInfoRow) { return TYPE_TEXT_INFO_PRIVACY; } else if (position == themeInfoRow || position == nightTypeInfoRow || position == scheduleFromToInfoRow || position == settings2Row || position == newThemeInfoRow || position == chatListInfoRow || position == bubbleRadiusInfoRow || - position == swipeGestureInfoRow || position == saveToGallerySectionRow || position == appIconShadowRow) { + position == saveToGallerySectionRow || position == appIconShadowRow || position == lastShadowRow) { return TYPE_SHADOW; } else if (position == nightDisabledRow || position == nightScheduledRow || position == nightAutomaticRow || position == nightSystemDefaultRow) { return TYPE_THEME_TYPE; @@ -2368,8 +2438,8 @@ public int getItemViewType(int position) { } else if (position == automaticBrightnessRow) { return TYPE_BRIGHTNESS; } else if (position == scheduleLocationRow || position == enableAnimationsRow || position == sendByEnterRow || - position == raiseToSpeakRow || position == customTabsRow || - position == directShareRow || position == chatBlurRow || position == lightModeRow) { + position == raiseToSpeakRow || position == raiseToListenRow || position == pauseOnRecordRow || position == customTabsRow || + position == directShareRow || position == chatBlurRow || position == pauseOnMediaRow) { return TYPE_TEXT_CHECK; } else if (position == textSizeRow) { return TYPE_TEXT_SIZE; @@ -2383,7 +2453,7 @@ public int getItemViewType(int position) { return TYPE_THEME_ACCENT_LIST; } else if (position == bubbleRadiusRow) { return TYPE_BUBBLE_RADIUS; - } else if (position == backgroundRow || position == editThemeRow || position == createNewThemeRow) { + } else if (position == backgroundRow || position == editThemeRow || position == createNewThemeRow || position == stickersRow || position == liteModeRow) { return TYPE_TEXT_PREFERENCE; } else if (position == swipeGestureRow) { return TYPE_SWIPE_GESTURE; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ThemePreviewActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ThemePreviewActivity.java index a16586fb6d..0ac23c87e9 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ThemePreviewActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ThemePreviewActivity.java @@ -1844,7 +1844,7 @@ public void deleteTheme() { showDialog(alertDialog); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } @@ -1875,11 +1875,13 @@ public boolean hasChanges() { colorPicker.setMinBrightness(0.05f); colorPicker.setMaxBrightness(0.8f); } - int colorsCount = accent.accentColor2 != 0 ? 2 : 1; - colorPicker.setType(1, hasChanges(1), 2, colorsCount, false, 0, false); - colorPicker.setColor(accent.accentColor, 0); - if (accent.accentColor2 != 0) { - colorPicker.setColor(accent.accentColor2, 1); + if (accent != null) { + int colorsCount = accent.accentColor2 != 0 ? 2 : 1; + colorPicker.setType(1, hasChanges(1), 2, colorsCount, false, 0, false); + colorPicker.setColor(accent.accentColor, 0); + if (accent.accentColor2 != 0) { + colorPicker.setColor(accent.accentColor2, 1); + } } } else { patternLayout[a].addView(colorPicker, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.CENTER_HORIZONTAL, 0, 0, 0, 48)); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ThemeSetUrlActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ThemeSetUrlActivity.java index 14ac71c666..aae01f4e1b 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ThemeSetUrlActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ThemeSetUrlActivity.java @@ -373,7 +373,7 @@ public void afterTextChanged(Editable editable) { } themes.add(themeInfo); } - ThemesHorizontalListCell cell = new ThemesHorizontalListCell(context, ThemeActivity.THEME_TYPE_OTHER, themes, new ArrayList<>()) { + ThemesHorizontalListCell cell = new ThemesHorizontalListCell(context, this, ThemeActivity.THEME_TYPE_OTHER, themes, new ArrayList<>()) { @Override protected void updateRows() { builder.getDismissRunnable().run(); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/TopicsFragment.java b/TMessagesProj/src/main/java/org/telegram/ui/TopicsFragment.java index 4edde87cd1..b807708622 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/TopicsFragment.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/TopicsFragment.java @@ -39,6 +39,7 @@ import android.view.animation.AccelerateDecelerateInterpolator; import android.widget.EditText; import android.widget.FrameLayout; +import android.widget.ImageView; import android.widget.TextView; import androidx.annotation.NonNull; @@ -131,6 +132,8 @@ import java.util.HashSet; import java.util.Iterator; +import tw.nekomimi.nekogram.NekoConfig; + public class TopicsFragment extends BaseFragment implements NotificationCenter.NotificationCenterDelegate, ChatActivityInterface, RightSlidingDialogContainer.BaseFragmentWithFullscreen { private final static int BOTTOM_BUTTON_TYPE_JOIN = 0; @@ -266,6 +269,7 @@ public class TopicsFragment extends BaseFragment implements NotificationCenter.N private FrameLayout topView; private RLottieImageView floatingButton; private boolean canShowProgress; + private ImageView closeReportSpam; @Override public View getFullscreenView() { @@ -282,6 +286,9 @@ public TopicsFragment(Bundle bundle) { } public static void prepareToSwitchAnimation(ChatActivity chatActivity) { + if (chatActivity.getParentLayout() == null) { + return; + } boolean needCreateTopicsFragment = false; if (chatActivity.getParentLayout().getFragmentStack().size() <= 1) { needCreateTopicsFragment = true; @@ -680,10 +687,10 @@ public void onSearchFilterCleared(FiltersView.MediaFilterData filterData) { editText.setCursorColor(Theme.getColor(Theme.key_chat_messagePanelCursor)); } other = menu.addItem(0, R.drawable.ic_ab_other, themeDelegate); - other.addSubItem(toggle_id, R.drawable.msg_discussion, LocaleController.getString("TopicViewAsMessages", R.string.TopicViewAsMessages)); - addMemberSubMenu = other.addSubItem(add_member_id, R.drawable.msg_addcontact, LocaleController.getString("AddMember", R.string.AddMember)); - createTopicSubmenu = other.addSubItem(create_topic_id, R.drawable.msg_topic_create, LocaleController.getString("CreateTopic", R.string.CreateTopic)); - deleteChatSubmenu = other.addSubItem(delete_chat_id, R.drawable.msg_leave, LocaleController.getString("LeaveMegaMenu", R.string.LeaveMegaMenu), themeDelegate); + other.addSubItem(toggle_id, R.drawable.baseline_forum_24, LocaleController.getString("TopicViewAsMessages", R.string.TopicViewAsMessages)); + addMemberSubMenu = other.addSubItem(add_member_id, R.drawable.baseline_person_add_24, LocaleController.getString("AddMember", R.string.AddMember)); + createTopicSubmenu = other.addSubItem(create_topic_id, R.drawable.baseline_group_add_24, LocaleController.getString("CreateTopic", R.string.CreateTopic)); + deleteChatSubmenu = other.addSubItem(delete_chat_id, R.drawable.baseline_exit_to_app_24, LocaleController.getString("LeaveMegaMenu", R.string.LeaveMegaMenu), themeDelegate); avatarContainer = new ChatAvatarContainer(context, this, false); avatarContainer.getAvatarImageView().setRoundRadius(AndroidUtilities.dp(16)); @@ -825,9 +832,8 @@ public void onScrolled(@NonNull RecyclerView recyclerView, int dx, int dy) { onTopicSelectedListener.onTopicSelected(topic); } if (dialogsActivity != null) { - dialogsActivity.didSelectResult(-chatId, topic.id, true, false); + dialogsActivity.didSelectResult(-chatId, topic.id, true, false, this); } - removeFragmentOnTransitionEnd = true; return; } if (selectedTopics.size() > 0) { @@ -862,6 +868,7 @@ public void onScrolled(@NonNull RecyclerView recyclerView, int dx, int dy) { } } toggleSelection(view); + if (!NekoConfig.disableVibration.Bool()) view.performHapticFeedback(HapticFeedbackConstants.LONG_PRESS); return true; }); @@ -875,6 +882,7 @@ public void onScrolled(@NonNull RecyclerView recyclerView, int dx, int dy) { recyclerListView.setLayoutManager(layoutManager = new LinearLayoutManager(context) { private boolean fixOffset; + @Override public void scrollToPositionWithOffset(int position, int offset) { if (fixOffset) { @@ -989,6 +997,7 @@ public int scrollVerticallyBy(int dy, RecyclerView.Recycler recycler, RecyclerVi if (canShowHiddenArchive != canShowInternal) { canShowHiddenArchive = canShowInternal; if (pullViewState == ARCHIVE_ITEM_STATE_HIDDEN) { + if (!NekoConfig.disableVibration.Bool()) recyclerListView.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); if (pullForegroundDrawable != null) { pullForegroundDrawable.colorize(canShowInternal); @@ -1190,6 +1199,21 @@ public void onClick(View v) { bottomOverlayProgress.setVisibility(View.INVISIBLE); bottomOverlayContainer.addView(bottomOverlayProgress, LayoutHelper.createFrame(30, 30, Gravity.CENTER)); + closeReportSpam = new ImageView(context); + closeReportSpam.setImageResource(R.drawable.miniplayer_close); + closeReportSpam.setContentDescription(LocaleController.getString("Close", R.string.Close)); + if (Build.VERSION.SDK_INT >= 21) { + closeReportSpam.setBackground(Theme.AdaptiveRipple.circle(getThemedColor(Theme.key_chat_topPanelClose))); + } + closeReportSpam.setColorFilter(new PorterDuffColorFilter(getThemedColor(Theme.key_chat_topPanelClose), PorterDuff.Mode.MULTIPLY)); + closeReportSpam.setScaleType(ImageView.ScaleType.CENTER); + bottomOverlayContainer.addView(closeReportSpam, LayoutHelper.createFrame(36, 36, Gravity.RIGHT | Gravity.TOP, 0, 6, 2, 0)); + closeReportSpam.setOnClickListener(v -> { + getMessagesController().hidePeerSettingsBar(-chatId, null, getCurrentChat()); + updateChatInfo(); + }); + closeReportSpam.setVisibility(View.GONE); + updateChatInfo(); fullscreenView = new FrameLayout(context) { @@ -1280,7 +1304,7 @@ public void setAlpha(float alpha) { for (BaseFragment fragment : getParentLayout().getFragmentStack()) { if (fragment instanceof DialogsActivity && ((DialogsActivity) fragment).isMainDialogList()) { MessagesStorage.TopicKey topicKey = ((DialogsActivity) fragment).getOpenedDialogId(); - if (topicKey.dialogId == -chatId ) { + if (topicKey.dialogId == -chatId) { selectedTopicForTablet = topicKey.topicId; break; } @@ -1304,7 +1328,7 @@ private void updateTopicsEmptyViewText() { spannableStringBuilder.setSpan(coloredImageSpan, 0, 1, 0); if (ChatObject.canUserDoAdminAction(getCurrentChat(), ChatObject.ACTION_MANAGE_TOPICS)) { topicsEmptyView.subtitle.setText( - AndroidUtilities.replaceCharSequence("%s", AndroidUtilities.replaceTags(LocaleController.getString("NoTopicsDescription", R.string.NoTopicsDescription)), spannableStringBuilder) + AndroidUtilities.replaceCharSequence("%s", AndroidUtilities.replaceTags(LocaleController.getString("NoTopicsDescription", R.string.NoTopicsDescription)), spannableStringBuilder) ); } else { String general = LocaleController.getString("General", R.string.General); @@ -1313,7 +1337,7 @@ private void updateTopicsEmptyViewText() { general = topic.title; } topicsEmptyView.subtitle.setText( - AndroidUtilities.replaceTags(LocaleController.formatString("NoTopicsDescriptionUser", R.string.NoTopicsDescriptionUser, general)) + AndroidUtilities.replaceTags(LocaleController.formatString("NoTopicsDescriptionUser", R.string.NoTopicsDescriptionUser, general)) ); } } @@ -1359,6 +1383,7 @@ public void switchToChat(boolean removeFragment) { private AvatarDrawable parentAvatarDrawable; private BackupImageView parentAvatarImageView; + private void openParentSearch() { if (parentDialogsActivity != null && parentDialogsActivity.getSearchItem() != null) { if (parentAvatarImageView == null) { @@ -1371,11 +1396,16 @@ private void openParentSearch() { parentDialogsActivity.getSearchItem().setSearchPaddingStart(52); parentDialogsActivity.getActionBar().setSearchAvatarImageView(parentAvatarImageView); parentDialogsActivity.getActionBar().onSearchFieldVisibilityChanged( - parentDialogsActivity.getSearchItem().toggleSearch(true) + parentDialogsActivity.getSearchItem().toggleSearch(true) ); } } + @Override + public boolean allowFinishFragmentInsteadOfRemoveFromStack() { + return false; + } + private void updateTopView() { float translation = 0; if (fragmentContextView != null) { @@ -1392,6 +1422,7 @@ private void updateTopView() { } float transitionPadding; + public void setTransitionPadding(int transitionPadding) { this.transitionPadding = transitionPadding; updateFloatingButtonOffset(); @@ -1634,6 +1665,7 @@ public boolean onTouchEvent(MotionEvent e) { } if (!canShowHiddenArchive) { canShowHiddenArchive = true; + if (!NekoConfig.disableVibration.Bool()) performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); if (pullForegroundDrawable != null) { pullForegroundDrawable.colorize(true); @@ -1734,11 +1766,12 @@ public void onClick(DialogInterface dialog, int which) { alertDialog.show(); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } private boolean showChatPreview(DialogCell cell) { + if (!NekoConfig.disableVibration.Bool()) cell.performHapticFeedback(HapticFeedbackConstants.LONG_PRESS); final ActionBarPopupWindow.ActionBarPopupWindowLayout[] previewMenu = new ActionBarPopupWindow.ActionBarPopupWindowLayout[1]; int flags = ActionBarPopupWindow.ActionBarPopupWindowLayout.FLAG_USE_SWIPEBACK; @@ -1864,7 +1897,7 @@ public void toggleMute() { if (ChatObject.canDeleteTopic(currentAccount, getCurrentChat(), topic)) { ActionBarMenuSubItem deleteItem = new ActionBarMenuSubItem(getParentActivity(), false, true); - deleteItem.setTextAndIcon(LocaleController.getPluralString("DeleteTopics", 1), R.drawable.msg_delete); + deleteItem.setTextAndIcon(LocaleController.getPluralString("DeleteTopics", 1), R.drawable.baseline_delete_24); deleteItem.setIconColor(getThemedColor(Theme.key_dialogRedIcon)); deleteItem.setTextColor(getThemedColor(Theme.key_dialogTextRed)); deleteItem.setMinimumWidth(160); @@ -2042,6 +2075,9 @@ private void clearSelectedTopics() { private void toggleSelection(View view) { if (view instanceof TopicDialogCell) { TopicDialogCell cell = (TopicDialogCell) view; + if (cell.forumTopic == null) { + return; + } int id = cell.forumTopic.id; if (!selectedTopics.remove(id)) { selectedTopics.add(id); @@ -2316,7 +2352,9 @@ private void updateChatInfo(boolean forceAnimate) { avatarContainer.setChatAvatar(chatLocal); + long dialog_id = -chatId; SharedPreferences preferences = MessagesController.getNotificationsSettings(currentAccount); + boolean show = preferences.getInt("dialog_bar_vis3" + dialog_id, 0) == 2; boolean showReport = preferences.getBoolean("dialog_bar_report" + (-chatId), false); boolean showBlock = preferences.getBoolean("dialog_bar_block" + (-chatId), false); @@ -2373,7 +2411,7 @@ private void updateChatInfo(boolean forceAnimate) { AndroidUtilities.updateViewVisibilityAnimated(bottomOverlayProgress, showProgress, 0.5f, animated); AndroidUtilities.updateViewVisibilityAnimated(bottomOverlayChatText, !showProgress, 0.5f, animated); setButtonType(BOTTOM_BUTTON_TYPE_JOIN); - } else if (showBlock || showReport) { + } else if (show && (showBlock || showReport)) { bottomOverlayChatText.setText(LocaleController.getString("ReportSpamAndLeave", R.string.ReportSpamAndLeave)); bottomOverlayChatText.setClickable(true); bottomOverlayChatText.setEnabled(true); @@ -2418,6 +2456,8 @@ private void setButtonType(int bottomButtonType) { if (this.bottomButtonType != bottomButtonType) { this.bottomButtonType = bottomButtonType; bottomOverlayChatText.setTextColorKey(bottomButtonType == BOTTOM_BUTTON_TYPE_JOIN ? Theme.key_chat_fieldOverlayText : Theme.key_chat_reportSpam); + closeReportSpam.setVisibility(bottomButtonType == BOTTOM_BUTTON_TYPE_REPORT ? View.VISIBLE : View.GONE); + updateChatInfo(); } } @@ -2608,8 +2648,9 @@ public void didReceivedNotification(int id, int account, Object... args) { } else if (id == NotificationCenter.chatSwithcedToForum) { } else if (id == NotificationCenter.closeChats) { - removeSelfFromStack(); - } if (id == NotificationCenter.openedChatChanged) { + removeSelfFromStack(true); + } + if (id == NotificationCenter.openedChatChanged) { if (getParentActivity() == null || !(inPreviewMode && AndroidUtilities.isTablet())) { return; } @@ -2689,6 +2730,7 @@ public RecyclerView.ViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int } else if (viewType == VIEW_TYPE_EMPTY) { return new RecyclerListView.Holder(emptyView = new View(getContext()) { HashMap precalcEllipsized = new HashMap<>(); + @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { int width = MeasureSpec.getSize(widthMeasureSpec); @@ -2703,8 +2745,8 @@ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { if (oneline == null) { int nameLeft = AndroidUtilities.dp(!LocaleController.isRTL ? (isInPreviewMode() ? 11 : 50) + 4 : 18); int nameWidth = !LocaleController.isRTL ? - width - nameLeft - AndroidUtilities.dp(14 + 8) : - width - nameLeft - AndroidUtilities.dp((isInPreviewMode() ? 11 : 50) + 5 + 8); + width - nameLeft - AndroidUtilities.dp(14 + 8) : + width - nameLeft - AndroidUtilities.dp((isInPreviewMode() ? 11 : 50) + 5 + 8); nameWidth -= (int) Math.ceil(Theme.dialogs_timePaint.measureText("00:00")); oneline = Theme.dialogs_namePaint[0].measureText(title) <= nameWidth; precalcEllipsized.put(title, oneline); @@ -2933,7 +2975,7 @@ public void setTopicIcon(TLRPC.TL_forumTopic topic) { } else if (topic != null && topic.icon_emoji_id != 0) { setForumIcon(null); if (animatedEmojiDrawable == null || animatedEmojiDrawable.getDocumentId() != topic.icon_emoji_id) { - setAnimatedEmojiDrawable(new AnimatedEmojiDrawable(AnimatedEmojiDrawable.CACHE_TYPE_FORUM_TOPIC, currentAccount, topic.icon_emoji_id)); + setAnimatedEmojiDrawable(new AnimatedEmojiDrawable(openedForForward ? AnimatedEmojiDrawable.CACHE_TYPE_ALERT_PREVIEW_STATIC : AnimatedEmojiDrawable.CACHE_TYPE_FORUM_TOPIC, currentAccount, topic.icon_emoji_id)); } } else { setAnimatedEmojiDrawable(null); @@ -2947,6 +2989,7 @@ public void setTopicIcon(TLRPC.TL_forumTopic topic) { private Boolean hidden; private float hiddenT; private ValueAnimator hiddenAnimator; + private void updateHidden(boolean hidden, boolean animated) { if (this.hidden == null) { animated = false; @@ -2971,15 +3014,16 @@ private void updateHidden(boolean hidden, boolean animated) { setHiddenT(); } } + private void setHiddenT() { if (forumIcon instanceof ForumUtilities.GeneralTopicDrawable) { ((ForumUtilities.GeneralTopicDrawable) forumIcon).setColor( - ColorUtils.blendARGB(getThemedColor(Theme.key_chats_archivePullDownBackground), getThemedColor(Theme.key_avatar_background2Saved), hiddenT) + ColorUtils.blendARGB(getThemedColor(Theme.key_chats_archivePullDownBackground), getThemedColor(Theme.key_avatar_background2Saved), hiddenT) ); } if (topicIconInName != null && topicIconInName[0] instanceof ForumUtilities.GeneralTopicDrawable) { ((ForumUtilities.GeneralTopicDrawable) topicIconInName[0]).setColor( - ColorUtils.blendARGB(getThemedColor(Theme.key_chats_archivePullDownBackground), getThemedColor(Theme.key_avatar_background2Saved), hiddenT) + ColorUtils.blendARGB(getThemedColor(Theme.key_chats_archivePullDownBackground), getThemedColor(Theme.key_avatar_background2Saved), hiddenT) ); } invalidate(); @@ -3256,7 +3300,7 @@ public void onScrolled(@NonNull RecyclerView recyclerView, int dx, int dy) { return downloadsContainer; } else { FilteredSearchView filteredSearchView = new FilteredSearchView(TopicsFragment.this); - filteredSearchView.setChatPreviewDelegate(chatPreviewDelegate); + filteredSearchView.setChatPreviewDelegate(chatPreviewDelegate); filteredSearchView.setUiCallback(MessagesSearchContainer.this); filteredSearchView.recyclerListView.addOnScrollListener(new RecyclerView.OnScrollListener() { @Override @@ -3636,7 +3680,7 @@ public int getBottomOffset(int tag) { } }); if (inPreviewMode && !getMessagesController().isForum(-chatId)) { - finishFragment(); + finishFragment(); } } @@ -3772,7 +3816,7 @@ public void drawOverlay(Canvas canvas, View parent) { } private void prepareBlurBitmap() { - if (blurredView == null || parentLayout == null || SharedConfig.useLNavigation) { + if (blurredView == null || parentLayout == null) { return; } int w = (int) (fragmentView.getMeasuredWidth() / 6.0f); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/TopicsNotifySettingsFragments.java b/TMessagesProj/src/main/java/org/telegram/ui/TopicsNotifySettingsFragments.java index 3c91a8cf5e..98040c224f 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/TopicsNotifySettingsFragments.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/TopicsNotifySettingsFragments.java @@ -142,7 +142,7 @@ public void didRemoveException(long dialog_id) { showDialog(alertDialog); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/TwoStepVerificationActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/TwoStepVerificationActivity.java index c366283c19..709c9487bc 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/TwoStepVerificationActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/TwoStepVerificationActivity.java @@ -417,7 +417,7 @@ public void getOutline(View view, Outline outline) { showDialog(alertDialog); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } }); @@ -629,7 +629,7 @@ protected void onReset() { showDialog(dialog); TextView button = (TextView) dialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } else { cancelPasswordReset(); @@ -1290,7 +1290,7 @@ private void showSetForcePasswordAlert() { builder.setNegativeButton(LocaleController.getString("ForceSetPasswordCancel", R.string.ForceSetPasswordCancel), (a1, a2) -> finishFragment()); AlertDialog alertDialog = builder.show(); - ((TextView)alertDialog.getButton(DialogInterface.BUTTON_NEGATIVE)).setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + ((TextView)alertDialog.getButton(DialogInterface.BUTTON_NEGATIVE)).setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } public void setBlockingAlert(int otherwiseRelogin) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/TwoStepVerificationSetupActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/TwoStepVerificationSetupActivity.java index 7843ade6c2..f517fbcf09 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/TwoStepVerificationSetupActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/TwoStepVerificationSetupActivity.java @@ -238,7 +238,7 @@ public void onFragmentDestroy() { } if (animationDrawables != null) { for (int a = 0; a < animationDrawables.length; a++) { - animationDrawables[a].recycle(); + animationDrawables[a].recycle(false); } animationDrawables = null; } @@ -285,7 +285,7 @@ public void onItemClick(int id) { showDialog(alertDialog); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } } @@ -398,7 +398,7 @@ public void getOutline(View view, Outline outline) { showDialog(alertDialog); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } else if (currentType == TYPE_ENTER_HINT) { onHintDone(); @@ -1448,11 +1448,10 @@ private void processNext() { } if (!editTextFirstRow.getText().toString().equals(firstPassword) && currentType == TYPE_CREATE_PASSWORD_STEP_2) { AndroidUtilities.shakeViewSpring(outlineTextFirstRow, 5); - if (!NekoConfig.disableVibration.Bool()) { - try { - outlineTextFirstRow.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); - } catch (Exception ignored) {} - } + try { + if (!NekoConfig.disableVibration.Bool()) + outlineTextFirstRow.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + } catch (Exception ignored) {} try { Toast.makeText(getParentActivity(), LocaleController.getString("PasswordDoNotMatch", R.string.PasswordDoNotMatch), Toast.LENGTH_SHORT).show(); } catch (Exception e) { @@ -2157,14 +2156,14 @@ public boolean onBackPressed() { } @Override - public void finishFragment(boolean animated) { + public boolean finishFragment(boolean animated) { for (BaseFragment fragment : getParentLayout().getFragmentStack()) { if (fragment != this && fragment instanceof TwoStepVerificationSetupActivity) { ((TwoStepVerificationSetupActivity) fragment).floatingAutoAnimator.ignoreNextLayout(); } } - super.finishFragment(animated); + return super.finishFragment(animated); } private void showSetForcePasswordAlert() { @@ -2175,7 +2174,7 @@ private void showSetForcePasswordAlert() { builder.setNegativeButton(LocaleController.getString("ForceSetPasswordCancel", R.string.ForceSetPasswordCancel), (a1, a2) -> finishFragment()); AlertDialog alertDialog = builder.show(); - ((TextView)alertDialog.getButton(DialogInterface.BUTTON_NEGATIVE)).setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + ((TextView)alertDialog.getButton(DialogInterface.BUTTON_NEGATIVE)).setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } public void setBlockingAlert(int otherwiseRelogin) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/UsersSelectActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/UsersSelectActivity.java index 00a60a19dd..bfe2233073 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/UsersSelectActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/UsersSelectActivity.java @@ -285,8 +285,8 @@ public void addSpan(final GroupCreateSpan span, boolean animated) { } selectedContacts.put(uid, span); - editText.setHintVisible(false); - if (currentAnimation != null) { + editText.setHintVisible(false, TextUtils.isEmpty(editText.getText())); + if (currentAnimation != null && currentAnimation.isRunning()) { currentAnimation.setupEndValues(); currentAnimation.cancel(); } @@ -337,7 +337,7 @@ public void onAnimationEnd(Animator animator) { animationStarted = false; editText.setAllowDrawCursor(true); if (allSpans.isEmpty()) { - editText.setHintVisible(true); + editText.setHintVisible(true, true); } } }); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/VoiceMessageEnterTransition.java b/TMessagesProj/src/main/java/org/telegram/ui/VoiceMessageEnterTransition.java index 55329f38a3..b4d7208971 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/VoiceMessageEnterTransition.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/VoiceMessageEnterTransition.java @@ -10,10 +10,7 @@ import android.graphics.PorterDuff; import android.graphics.PorterDuffXfermode; import android.graphics.Shader; -import android.transition.Transition; -import android.view.View; import android.view.animation.LinearInterpolator; -import android.widget.FrameLayout; import androidx.core.graphics.ColorUtils; @@ -47,13 +44,15 @@ public VoiceMessageEnterTransition(ChatMessageCell messageView, ChatActivityEnte this.messageView = messageView; this.container = container; this.listView = listView; - fromRadius = chatActivityEnterView.getRecordCicle().drawingCircleRadius; messageView.setEnterTransitionInProgress(true); - recordCircle = chatActivityEnterView.getRecordCicle(); - recordCircle.voiceEnterTransitionInProgress = true; - recordCircle.skipDraw = true; + recordCircle = chatActivityEnterView.getRecordCircle(); + if (recordCircle != null) { + fromRadius = recordCircle.drawingCircleRadius; + recordCircle.voiceEnterTransitionInProgress = true; + recordCircle.skipDraw = true; + } gradientMatrix = new Matrix(); gradientPaint = new Paint(Paint.ANTI_ALIAS_FLAG); @@ -79,7 +78,9 @@ public VoiceMessageEnterTransition(ChatMessageCell messageView, ChatActivityEnte public void onAnimationEnd(Animator animation) { messageView.setEnterTransitionInProgress(false); container.removeTransition(VoiceMessageEnterTransition.this); - recordCircle.skipDraw = false; + if (recordCircle != null) { + recordCircle.skipDraw = false; + } } }); @@ -101,8 +102,8 @@ public void onDraw(Canvas canvas) { float moveProgress = progress; float hideWavesProgress = progress > step1Time ? 1f : progress / step1Time; - float fromCx = recordCircle.drawingCx + recordCircle.getX() - container.getX(); - float fromCy = recordCircle.drawingCy + recordCircle.getY() - container.getY(); + float fromCx = recordCircle == null ? 0 : recordCircle.drawingCx + recordCircle.getX() - container.getX(); + float fromCy = recordCircle == null ? 0 : recordCircle.drawingCy + recordCircle.getY() - container.getY(); float toCy; float toCx; @@ -139,7 +140,9 @@ public void onDraw(Canvas canvas) { circlePaint.setColor(ColorUtils.blendARGB(getThemedColor(Theme.key_chat_messagePanelVoiceBackground), getThemedColor(messageView.getRadialProgress().getCircleColorKey()), progress)); - recordCircle.drawWaves(canvas, cx, cy, 1f - hideWavesProgress); + if (recordCircle != null) { + recordCircle.drawWaves(canvas, cx, cy, 1f - hideWavesProgress); + } canvas.drawCircle(cx, cy, radius, circlePaint); @@ -165,7 +168,9 @@ public void onDraw(Canvas canvas) { //restore clipRect // canvas.restore(); - recordCircle.drawIcon(canvas, (int) fromCx, (int) fromCy, 1f - moveProgress); + if (recordCircle != null) { + recordCircle.drawIcon(canvas, (int) fromCx, (int) fromCy, 1f - moveProgress); + } } private int getThemedColor(String key) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/WallpapersListActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/WallpapersListActivity.java index 25038e871b..0317bb1b41 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/WallpapersListActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/WallpapersListActivity.java @@ -559,14 +559,14 @@ public void onItemClick(int id) { showDialog(alertDialog); TextView button = (TextView) alertDialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } else if (id == forward) { Bundle args = new Bundle(); args.putBoolean("onlySelect", true); - args.putInt("dialogsType", 3); + args.putInt("dialogsType", DialogsActivity.DIALOGS_TYPE_FORWARD); DialogsActivity fragment = new DialogsActivity(args); - fragment.setDelegate((fragment1, dids, message, param) -> { + fragment.setDelegate((fragment1, dids, message, param, topicsFragment) -> { StringBuilder fmessage = new StringBuilder(); for (int b = 0; b < selectedWallPapers.size(); b++) { Object object = selectedWallPapers.valueAt(b); @@ -614,7 +614,7 @@ public void onItemClick(int id) { args1.putLong("chat_id", -did); } if (!MessagesController.getInstance(currentAccount).checkCanOpenChat(args1, fragment1)) { - return; + return true; } } NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.closeChats); @@ -623,6 +623,7 @@ public void onItemClick(int id) { presentFragment(chatActivity, true); SendMessagesHelper.getInstance(currentAccount).sendMessage(fmessage.toString(), did, null, null, null, true, null, null, null, true, 0, null, false); } + return true; }); presentFragment(fragment); } @@ -764,7 +765,7 @@ public boolean supportsPredictiveItemAnimations() { showDialog(dialog); TextView button = (TextView) dialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } }); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/WebviewActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/WebviewActivity.java index df4d3265a1..1938448195 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/WebviewActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/WebviewActivity.java @@ -190,7 +190,7 @@ public void onItemClick(int id) { progressView.setScaleY(0.1f); progressView.setVisibility(View.INVISIBLE); } else if (type == TYPE_STAT) { - actionBar.setBackgroundColor(Theme.getColor(Theme.key_player_actionBar)); + actionBar.setBackgroundColor(Theme.getColor(Theme.key_dialogBackground)); actionBar.setItemsColor(Theme.getColor(Theme.key_player_actionBarItems), false); actionBar.setItemsBackgroundColor(Theme.getColor(Theme.key_player_actionBarSelector), false); actionBar.setTitleColor(Theme.getColor(Theme.key_player_actionBarTitle)); @@ -423,7 +423,7 @@ public ArrayList getThemeDescriptions() { } else { themeDescriptions.add(new ThemeDescription(fragmentView, ThemeDescription.FLAG_BACKGROUND, null, null, null, null, Theme.key_windowBackgroundWhite)); - themeDescriptions.add(new ThemeDescription(actionBar, ThemeDescription.FLAG_BACKGROUND, null, null, null, null, Theme.key_player_actionBar)); + themeDescriptions.add(new ThemeDescription(actionBar, ThemeDescription.FLAG_BACKGROUND, null, null, null, null, Theme.key_dialogBackground)); themeDescriptions.add(new ThemeDescription(actionBar, ThemeDescription.FLAG_AB_ITEMSCOLOR, null, null, null, null, Theme.key_player_actionBarItems)); themeDescriptions.add(new ThemeDescription(actionBar, ThemeDescription.FLAG_AB_TITLECOLOR, null, null, null, null, Theme.key_player_actionBarTitle)); themeDescriptions.add(new ThemeDescription(actionBar, ThemeDescription.FLAG_AB_SUBTITLECOLOR, null, null, null, null, Theme.key_player_actionBarTitle)); diff --git a/TMessagesProj/src/main/java/org/webrtc/DefaultVideoEncoderFactory.java b/TMessagesProj/src/main/java/org/webrtc/DefaultVideoEncoderFactory.java index 76896b6b2d..87dccd7f82 100644 --- a/TMessagesProj/src/main/java/org/webrtc/DefaultVideoEncoderFactory.java +++ b/TMessagesProj/src/main/java/org/webrtc/DefaultVideoEncoderFactory.java @@ -11,6 +11,9 @@ package org.webrtc; import androidx.annotation.Nullable; + +import com.google.android.exoplayer2.util.Log; + import java.util.Arrays; import java.util.LinkedHashSet; diff --git a/TMessagesProj/src/main/java/org/webrtc/HardwareVideoEncoderFactory.java b/TMessagesProj/src/main/java/org/webrtc/HardwareVideoEncoderFactory.java index 8e9bac2ecc..b7f9bb04ed 100644 --- a/TMessagesProj/src/main/java/org/webrtc/HardwareVideoEncoderFactory.java +++ b/TMessagesProj/src/main/java/org/webrtc/HardwareVideoEncoderFactory.java @@ -11,18 +11,21 @@ package org.webrtc; import static org.webrtc.MediaCodecUtils.EXYNOS_PREFIX; +import static org.webrtc.MediaCodecUtils.EXYNOS_PREFIX_C2; +import static org.webrtc.MediaCodecUtils.HISI_PREFIX; import static org.webrtc.MediaCodecUtils.INTEL_PREFIX; import static org.webrtc.MediaCodecUtils.QCOM_PREFIX; -import static org.webrtc.MediaCodecUtils.HISI_PREFIX; +import static org.webrtc.MediaCodecUtils.SOFTWARE_IMPLEMENTATION_PREFIXES; import android.media.MediaCodecInfo; -import android.media.MediaCodecList; import android.os.Build; +import androidx.annotation.Nullable; + +import org.telegram.messenger.FileLog; import org.telegram.messenger.voip.Instance; import org.telegram.messenger.voip.VoIPService; -import androidx.annotation.Nullable; import java.util.ArrayList; import java.util.List; @@ -160,20 +163,38 @@ public VideoCodecInfo[] getSupportedCodecs() { private @Nullable MediaCodecInfo findCodecForType(VideoCodecMimeType type) { ArrayList infos = MediaCodecUtils.getSortedCodecsList(); int count = infos.size(); + MediaCodecInfo info2 = null; for (int i = 0; i < count; ++i) { MediaCodecInfo info = infos.get(i); if (info == null || !info.isEncoder()) { continue; } - if (isSupportedCodec(info, type)) { + if (isSupportedCodec(info, type, true)) { return info; } + if (info2 == null && isSupportedCodec(info, type, false)) { + info2 = info; + } } - return null; // No support for this type. + if (info2 == null) { + StringBuilder stringBuilder = new StringBuilder(); + for (int i = 0; i < count; ++i) { + MediaCodecInfo info = infos.get(i); + if (info == null || !info.isEncoder()) { + continue; + } + if (MediaCodecUtils.codecSupportsType(info, type)) { + stringBuilder.append(info.getName()).append(", "); + } + } + + FileLog.e("can't create video encoder " + type.mimeType() + ", supported codecs" + stringBuilder); + } + return info2; } // Returns true if the given MediaCodecInfo indicates a supported encoder for the given type. - private boolean isSupportedCodec(MediaCodecInfo info, VideoCodecMimeType type) { + private boolean isSupportedCodec(MediaCodecInfo info, VideoCodecMimeType type, boolean ensureHardwareSupportedInSdk) { if (!MediaCodecUtils.codecSupportsType(info, type)) { return false; } @@ -183,12 +204,12 @@ private boolean isSupportedCodec(MediaCodecInfo info, VideoCodecMimeType type) { == null) { return false; } - return isHardwareSupportedInCurrentSdk(info, type) && isMediaCodecAllowed(info); + return isHardwareSupportedInCurrentSdk(info, type, ensureHardwareSupportedInSdk) && isMediaCodecAllowed(info); } // Returns true if the given MediaCodecInfo indicates a hardware module that is supported on the // current SDK. - private boolean isHardwareSupportedInCurrentSdk(MediaCodecInfo info, VideoCodecMimeType type) { + private boolean isHardwareSupportedInCurrentSdk(MediaCodecInfo info, VideoCodecMimeType type, boolean ensureHardwareSupported) { if (VoIPService.getSharedInstance() != null && VoIPService.getSharedInstance().groupCall != null) { return false; } @@ -198,9 +219,9 @@ private boolean isHardwareSupportedInCurrentSdk(MediaCodecInfo info, VideoCodecM } switch (type) { case VP8: - return isHardwareSupportedInCurrentSdkVp8(info); + return isHardwareSupportedInCurrentSdkVp8(info, ensureHardwareSupported); case VP9: - return isHardwareSupportedInCurrentSdkVp9(info); + return isHardwareSupportedInCurrentSdkVp9(info, ensureHardwareSupported); case H264: return isHardwareSupportedInCurrentSdkH264(info); case H265: @@ -209,30 +230,51 @@ private boolean isHardwareSupportedInCurrentSdk(MediaCodecInfo info, VideoCodecM return false; } - private boolean isHardwareSupportedInCurrentSdkVp8(MediaCodecInfo info) { + private boolean isHardwareSupportedInCurrentSdkVp8(MediaCodecInfo info, boolean ensureHardwareSupproted) { if (!Instance.getGlobalServerConfig().enable_vp8_encoder) { return false; } String name = info.getName(); // QCOM Vp8 encoder is supported in KITKAT or later. - return (name.startsWith(QCOM_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) + if ((name.startsWith(QCOM_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) // Hisi VP8 encoder seems to be supported. Needs more testing. || (name.startsWith(HISI_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) // Exynos VP8 encoder is supported in M or later. || (name.startsWith(EXYNOS_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) // Intel Vp8 encoder is supported in LOLLIPOP or later, with the intel encoder enabled. || (name.startsWith(INTEL_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP - && enableIntelVp8Encoder); + && enableIntelVp8Encoder) + || ((name.startsWith(EXYNOS_PREFIX_C2) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.M))) { + return true; + } + if (!ensureHardwareSupproted) { + for (int i = 0; i < SOFTWARE_IMPLEMENTATION_PREFIXES.length; i++) { + if (name.startsWith(SOFTWARE_IMPLEMENTATION_PREFIXES[i])) { + return true; + } + } + } + return false; } - private boolean isHardwareSupportedInCurrentSdkVp9(MediaCodecInfo info) { + private boolean isHardwareSupportedInCurrentSdkVp9(MediaCodecInfo info, boolean ensureHardwareSupproted) { if (!Instance.getGlobalServerConfig().enable_vp9_encoder) { return false; } String name = info.getName(); - return (name.startsWith(QCOM_PREFIX) || name.startsWith(EXYNOS_PREFIX) || name.startsWith(HISI_PREFIX)) - // Both QCOM and Exynos VP9 encoders are supported in N or later. - && Build.VERSION.SDK_INT >= Build.VERSION_CODES.N; + if ((name.startsWith(QCOM_PREFIX) || name.startsWith(EXYNOS_PREFIX) || name.startsWith(HISI_PREFIX)) + // Both QCOM and Exynos VP9 encoders are supported in N or later. + && Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) { + return true; + } + if (!ensureHardwareSupproted) { + for (int i = 0; i < SOFTWARE_IMPLEMENTATION_PREFIXES.length; i++) { + if (name.startsWith(SOFTWARE_IMPLEMENTATION_PREFIXES[i])) { + return true; + } + } + } + return false; } private boolean isHardwareSupportedInCurrentSdkH264(MediaCodecInfo info) { diff --git a/TMessagesProj/src/main/java/org/webrtc/MediaCodecUtils.java b/TMessagesProj/src/main/java/org/webrtc/MediaCodecUtils.java index 40dca27a5d..f59a21d92f 100644 --- a/TMessagesProj/src/main/java/org/webrtc/MediaCodecUtils.java +++ b/TMessagesProj/src/main/java/org/webrtc/MediaCodecUtils.java @@ -16,14 +16,12 @@ import android.media.MediaCodecList; import android.os.Build; -import org.telegram.messenger.FileLog; -import org.telegram.messenger.voip.VoIPService; - import androidx.annotation.Nullable; +import org.telegram.messenger.FileLog; + import java.util.ArrayList; import java.util.Collections; -import java.util.Comparator; import java.util.HashMap; import java.util.Map; @@ -35,6 +33,7 @@ class MediaCodecUtils { // Prefixes for supported hardware encoder/decoder component names. static final String EXYNOS_PREFIX = "OMX.Exynos."; + static final String EXYNOS_PREFIX_C2 = "c2.exynos."; static final String INTEL_PREFIX = "OMX.Intel."; static final String NVIDIA_PREFIX = "OMX.Nvidia."; static final String QCOM_PREFIX = "OMX.qcom."; diff --git a/TMessagesProj/src/main/java/org/webrtc/TextureViewRenderer.java b/TMessagesProj/src/main/java/org/webrtc/TextureViewRenderer.java index a84ff524cd..981d60b51e 100644 --- a/TMessagesProj/src/main/java/org/webrtc/TextureViewRenderer.java +++ b/TMessagesProj/src/main/java/org/webrtc/TextureViewRenderer.java @@ -12,6 +12,7 @@ import androidx.annotation.Nullable; import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.LiteMode; import org.telegram.messenger.SharedConfig; import org.telegram.messenger.voip.VoIPService; @@ -55,7 +56,7 @@ public class TextureViewRenderer extends TextureView Runnable updateScreenRunnable; public void setBackgroundRenderer(@Nullable TextureView backgroundRenderer) { - if (SharedConfig.getLiteMode().enabled()) { + if (!LiteMode.isEnabled(LiteMode.FLAG_CALLS_ANIMATIONS)) { return; } this.backgroundRenderer = backgroundRenderer; diff --git a/TMessagesProj/src/main/java/org/webrtc/voiceengine/WebRtcAudioTrack.java b/TMessagesProj/src/main/java/org/webrtc/voiceengine/WebRtcAudioTrack.java index dfe8dc849b..a24756880e 100644 --- a/TMessagesProj/src/main/java/org/webrtc/voiceengine/WebRtcAudioTrack.java +++ b/TMessagesProj/src/main/java/org/webrtc/voiceengine/WebRtcAudioTrack.java @@ -18,14 +18,16 @@ import android.media.AudioTrack; import android.os.Build; import android.os.Process; + import androidx.annotation.Nullable; -import java.lang.Thread; -import java.nio.ByteBuffer; +import org.telegram.messenger.FileLog; import org.webrtc.ContextUtils; import org.webrtc.Logging; import org.webrtc.ThreadUtils; +import java.nio.ByteBuffer; + public class WebRtcAudioTrack { private static final boolean DEBUG = false; @@ -333,21 +335,30 @@ private boolean startPlayout() { } private boolean stopPlayout() { - threadChecker.checkIsOnValidThread(); - Logging.d(TAG, "stopPlayout"); - assertTrue(audioThread != null); - logUnderrunCount(); - audioThread.stopThread(); - - Logging.d(TAG, "Stopping the AudioTrackThread..."); - audioThread.interrupt(); - if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_TRACK_THREAD_JOIN_TIMEOUT_MS)) { - Logging.e(TAG, "Join of AudioTrackThread timed out."); - WebRtcAudioUtils.logAudioState(TAG); + try { + threadChecker.checkIsOnValidThread(); + Logging.d(TAG, "stopPlayout"); + assertTrue(audioThread != null); + logUnderrunCount(); + audioThread.stopThread(); + + Logging.d(TAG, "Stopping the AudioTrackThread..."); + audioThread.interrupt(); + if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_TRACK_THREAD_JOIN_TIMEOUT_MS)) { + Logging.e(TAG, "Join of AudioTrackThread timed out."); + WebRtcAudioUtils.logAudioState(TAG); + } + Logging.d(TAG, "AudioTrackThread has now been stopped."); + } catch (Throwable e) { + FileLog.e(e); + } finally { + audioThread = null; + } + try { + releaseAudioResources(); + } catch (Throwable e) { + FileLog.e(e); } - Logging.d(TAG, "AudioTrackThread has now been stopped."); - audioThread = null; - releaseAudioResources(); return true; } @@ -507,7 +518,11 @@ public static boolean isSpeakerMuted() { private void releaseAudioResources() { Logging.d(TAG, "releaseAudioResources"); if (audioTrack != null) { - audioTrack.release(); + try { + audioTrack.release(); + } catch (Throwable e) { + FileLog.e(e); + } audioTrack = null; } } diff --git a/TMessagesProj/src/main/java/tw/nekomimi/nekogram/config/cell/ConfigCellCustom.java b/TMessagesProj/src/main/java/tw/nekomimi/nekogram/config/cell/ConfigCellCustom.java index 643c5366d2..2fab9fbef9 100644 --- a/TMessagesProj/src/main/java/tw/nekomimi/nekogram/config/cell/ConfigCellCustom.java +++ b/TMessagesProj/src/main/java/tw/nekomimi/nekogram/config/cell/ConfigCellCustom.java @@ -31,6 +31,10 @@ public boolean isEnabled() { return enabled; } + public void setEnabled(boolean enabled) { + this.enabled = enabled; + } + public void onBindViewHolder(RecyclerView.ViewHolder holder) { // Not Used } diff --git a/TMessagesProj/src/main/java/tw/nekomimi/nekogram/config/cell/ConfigCellTextCheck.java b/TMessagesProj/src/main/java/tw/nekomimi/nekogram/config/cell/ConfigCellTextCheck.java index 9b52fd0df1..961fdbbdaf 100644 --- a/TMessagesProj/src/main/java/tw/nekomimi/nekogram/config/cell/ConfigCellTextCheck.java +++ b/TMessagesProj/src/main/java/tw/nekomimi/nekogram/config/cell/ConfigCellTextCheck.java @@ -13,7 +13,7 @@ public class ConfigCellTextCheck extends AbstractConfigCell { private final String title; private final String subtitle; private boolean enabled = true; - public TextCheckCell cell; //TODO getCell() in NekomuraTGCell + public TextCheckCell cell; public ConfigCellTextCheck(ConfigItem bind) { this.bindConfig = bind; diff --git a/TMessagesProj/src/main/java/tw/nekomimi/nekogram/settings/NekoAccountSettingsActivity.java b/TMessagesProj/src/main/java/tw/nekomimi/nekogram/settings/NekoAccountSettingsActivity.java index 374965bcc6..7829106101 100644 --- a/TMessagesProj/src/main/java/tw/nekomimi/nekogram/settings/NekoAccountSettingsActivity.java +++ b/TMessagesProj/src/main/java/tw/nekomimi/nekogram/settings/NekoAccountSettingsActivity.java @@ -182,7 +182,7 @@ public void onItemClick(int id) { showDialog(dialog12); TextView button = (TextView) dialog12.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } }); builder.setNegativeButton(LocaleController.getString("Cancel", R.string.Cancel), null); @@ -191,7 +191,7 @@ public void onItemClick(int id) { showDialog(dialog); TextView button = (TextView) dialog.getButton(DialogInterface.BUTTON_POSITIVE); if (button != null) { - button.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)); + button.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); } } }); diff --git a/TMessagesProj/src/main/java/tw/nekomimi/nekogram/settings/NekoExperimentalSettingsActivity.java b/TMessagesProj/src/main/java/tw/nekomimi/nekogram/settings/NekoExperimentalSettingsActivity.java index 63319a518e..e3d8abf8d9 100644 --- a/TMessagesProj/src/main/java/tw/nekomimi/nekogram/settings/NekoExperimentalSettingsActivity.java +++ b/TMessagesProj/src/main/java/tw/nekomimi/nekogram/settings/NekoExperimentalSettingsActivity.java @@ -81,7 +81,7 @@ public class NekoExperimentalSettingsActivity extends BaseNekoXSettingsActivity null, null, (input) -> input.isEmpty() ? (String) NaConfig.INSTANCE.getCustomChannelLabel().defaultValue : input)); - private final AbstractConfigCell smoothKeyboardRow = cellGroup.appendCell(new ConfigCellTextCheck(NekoConfig.smoothKeyboard)); +// private final AbstractConfigCell smoothKeyboardRow = cellGroup.appendCell(new ConfigCellTextCheck(NekoConfig.smoothKeyboard)); private final AbstractConfigCell enhancedFileLoaderRow = cellGroup.appendCell(new ConfigCellTextCheck(NekoConfig.enhancedFileLoader)); private final AbstractConfigCell mediaPreviewRow = cellGroup.appendCell(new ConfigCellTextCheck(NekoConfig.mediaPreview)); private final AbstractConfigCell proxyAutoSwitchRow = cellGroup.appendCell(new ConfigCellTextCheck(NekoConfig.proxyAutoSwitch)); @@ -235,16 +235,7 @@ public void onItemClick(int id) { // Cells: Set OnSettingChanged Callbacks cellGroup.callBackSettingsChanged = (key, newValue) -> { - if (key.equals(NekoConfig.smoothKeyboard.getKey())) { - SharedConfig.setSmoothKeyboard((boolean) newValue); - if (SharedConfig.smoothKeyboard && getParentActivity() != null) { - getParentActivity().getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_ADJUST_PAN); - } - if (SharedConfig.smoothKeyboard) { - tooltip.setInfoText(AndroidUtilities.replaceTags(LocaleController.formatString("BetaWarning", R.string.BetaWarning))); - tooltip.showWithAction(0, UndoView.ACTION_CACHE_WAS_CLEARED, null, null); - } - } else if (key.equals(NekoConfig.mediaPreview.getKey())) { + if (key.equals(NekoConfig.mediaPreview.getKey())) { if ((boolean) newValue) { tooltip.setInfoText(AndroidUtilities.replaceTags(LocaleController.formatString("BetaWarning", R.string.BetaWarning))); tooltip.showWithAction(0, UndoView.ACTION_CACHE_WAS_CLEARED, null, null); @@ -477,9 +468,9 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { } else { // Default binds a.onBindViewHolder(holder); - if (position == cellGroup.rows.indexOf(smoothKeyboardRow) && AndroidUtilities.isTablet()) { - holder.itemView.setVisibility(View.GONE); - } +// if (position == cellGroup.rows.indexOf(smoothKeyboardRow) && AndroidUtilities.isTablet()) { +// holder.itemView.setVisibility(View.GONE); +// } } } } diff --git a/TMessagesProj/src/main/java/tw/nekomimi/nekogram/settings/NekoGeneralSettingsActivity.java b/TMessagesProj/src/main/java/tw/nekomimi/nekogram/settings/NekoGeneralSettingsActivity.java index b704c84d37..304de6b873 100644 --- a/TMessagesProj/src/main/java/tw/nekomimi/nekogram/settings/NekoGeneralSettingsActivity.java +++ b/TMessagesProj/src/main/java/tw/nekomimi/nekogram/settings/NekoGeneralSettingsActivity.java @@ -350,7 +350,7 @@ public void onItemClick(int id) { LocaleController.getString("ProviderMicrosoftTranslator", R.string.ProviderMicrosoftTranslator), LocaleController.getString("ProviderMicrosoftTranslator", R.string.ProviderYouDao), LocaleController.getString("ProviderMicrosoftTranslator", R.string.ProviderDeepLTranslate), - LocaleController.getString("ProviderTelegram", R.string.ProviderTelegram) + LocaleController.getString("ProviderTelegramAPI", R.string.ProviderTelegramAPI) }, (i, __) -> { boolean needReset = NekoConfig.translationProvider.Int() - 1 != i && (NekoConfig.translationProvider.Int() == 1 || i == 0); NekoConfig.translationProvider.setConfigInt(i + 1); @@ -461,6 +461,17 @@ public void onItemClick(int id) { boolean enabled = (Boolean) newValue; ((ConfigCellTextCheck) mapDriftingFixForGoogleMapsRow).setEnabled(!enabled); listAdapter.notifyItemChanged(cellGroup.rows.indexOf(mapDriftingFixForGoogleMapsRow)); + } else if (key.equals(NekoConfig.useTelegramTranslateInChat.getKey())) { + var cell = (TextSettingsCell) (listView.findViewHolderForAdapterPosition(cellGroup.rows.indexOf(translationProviderRow)).itemView); + if (NekoConfig.useTelegramTranslateInChat.Bool()) { + NekoConfig.translationProvider.setConfigInt(Translator.providerTelegram); + ((ConfigCellCustom) translationProviderRow).setEnabled(false); + cell.setEnabled(false); + } else { + ((ConfigCellCustom) translationProviderRow).setEnabled(true); + cell.setEnabled(true); + } + listAdapter.notifyItemChanged(cellGroup.rows.indexOf(translationProviderRow)); } }; @@ -735,34 +746,35 @@ public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { if (position == cellGroup.rows.indexOf(translationProviderRow)) { String value; switch (NekoConfig.translationProvider.Int()) { - case 1: + case Translator.providerGoogle: value = LocaleController.getString("ProviderGoogleTranslate", R.string.ProviderGoogleTranslate); break; - case 2: + case Translator.providerGoogleCN: value = LocaleController.getString("ProviderGoogleTranslateCN", R.string.ProviderGoogleTranslateCN); break; - case 3: + case Translator.providerYandex: value = LocaleController.getString("ProviderYandexTranslate", R.string.ProviderYandexTranslate); break; - case 4: + case Translator.providerLingo: value = LocaleController.getString("ProviderLingocloud", R.string.ProviderLingocloud); break; - case 5: + case Translator.providerMicrosoft: value = LocaleController.getString("ProviderMicrosoftTranslator", R.string.ProviderMicrosoftTranslator); break; - case 6: + case Translator.providerYouDao: value = LocaleController.getString("ProviderYouDao", R.string.ProviderYouDao); break; - case 7: + case Translator.providerDeepL: value = LocaleController.getString("ProviderDeepLTranslate", R.string.ProviderDeepLTranslate); break; - case 8: - value = LocaleController.getString("ProviderTelegram", R.string.ProviderTelegram); + case Translator.providerTelegram: + value = LocaleController.getString("ProviderTelegramAPI", R.string.ProviderTelegramAPI); break; default: value = "Unknown"; } textCell.setTextAndValue(LocaleController.getString("TranslationProvider", R.string.TranslationProvider), value, true); + if (NekoConfig.useTelegramTranslateInChat.Bool()) textCell.setEnabled(false); } else if (position == cellGroup.rows.indexOf(pgpAppRow)) { textCell.setTextAndValue(LocaleController.getString("OpenPGPApp", R.string.OpenPGPApp), NekoXConfig.getOpenPGPAppName(), true); } else if (position == cellGroup.rows.indexOf(translateToLangRow)) { @@ -835,6 +847,9 @@ private void setCanNotChange() { ((ConfigCellTextCheck) mapDriftingFixForGoogleMapsRow).setEnabled(false); } + if (NekoConfig.useTelegramTranslateInChat.Bool()) + ((ConfigCellCustom) translationProviderRow).setEnabled(false); + boolean enabled; enabled = NekoConfig.largeAvatarInDrawer.Int() > 0; diff --git a/TMessagesProj/src/main/java/tw/nekomimi/nekogram/transtale/Translator.kt b/TMessagesProj/src/main/java/tw/nekomimi/nekogram/transtale/Translator.kt index 54c2a65bc0..ab1c2908d9 100644 --- a/TMessagesProj/src/main/java/tw/nekomimi/nekogram/transtale/Translator.kt +++ b/TMessagesProj/src/main/java/tw/nekomimi/nekogram/transtale/Translator.kt @@ -108,7 +108,7 @@ interface Translator { language = if (provider == providerMicrosoft) "zh-HanT" else "zh-TW" } } - providerTelegram -> language = TelegramTranslator.convertLanguageCode(language, country) + providerTelegram -> language = TelegramAPITranslator.convertLanguageCode(language, country) } val translator = when (provider) { @@ -118,7 +118,7 @@ interface Translator { providerMicrosoft -> MicrosoftTranslator providerYouDao -> YouDaoTranslator providerDeepL -> DeepLTranslator - providerTelegram -> TelegramTranslator + providerTelegram -> TelegramAPITranslator else -> throw IllegalArgumentException() } diff --git a/TMessagesProj/src/main/java/tw/nekomimi/nekogram/transtale/source/TelegramAPITranslator.kt b/TMessagesProj/src/main/java/tw/nekomimi/nekogram/transtale/source/TelegramAPITranslator.kt new file mode 100644 index 0000000000..de04a3cade --- /dev/null +++ b/TMessagesProj/src/main/java/tw/nekomimi/nekogram/transtale/source/TelegramAPITranslator.kt @@ -0,0 +1,86 @@ +package tw.nekomimi.nekogram.transtale.source + +import android.text.TextUtils +import kotlinx.coroutines.Dispatchers +import kotlinx.coroutines.InternalCoroutinesApi +import kotlinx.coroutines.suspendCancellableCoroutine +import kotlinx.coroutines.withContext +import org.telegram.messenger.FileLog +import org.telegram.messenger.SharedConfig +import org.telegram.messenger.UserConfig +import org.telegram.tgnet.ConnectionsManager +import org.telegram.tgnet.TLObject +import org.telegram.tgnet.TLRPC +import org.telegram.tgnet.TLRPC.TL_error +import org.telegram.tgnet.TLRPC.TL_messages_translateResult +import org.telegram.tgnet.TLRPC.TL_messages_translateText +import tw.nekomimi.nekogram.transtale.Translator +import java.util.* +import kotlin.coroutines.resume +import kotlin.coroutines.resumeWithException +import kotlin.coroutines.suspendCoroutine + +object TelegramAPITranslator : Translator { + +// val targetLanguages = listOf("DE", "EN", "ES", "FR", "IT", "JA", "NL", "PL", "PT", "RU", "ZH") + + @OptIn(InternalCoroutinesApi::class) + override suspend fun doTranslate(from: String, to: String, query: String): String { + + return suspendCoroutine { + val req = TL_messages_translateText() + req.peer = null + req.flags = req.flags or 2 + req.text.add(TLRPC.TL_textWithEntities().apply { + text = query + }) + req.to_lang = to + + try { + ConnectionsManager.getInstance(UserConfig.selectedAccount).sendRequest(req) { res: TLObject?, err: TL_error? -> + if (res is TL_messages_translateResult && res.result.isNotEmpty()) { + it.resume(res.result[0].text) + } else { + FileLog.e(err?.text) + it.resumeWithException(RuntimeException("Failed to translate by Telegram API")) + } + } + } catch (e: Exception) { + FileLog.e(e) + it.resumeWithException(e) + } + } + } + + @JvmStatic + fun convertLanguageCode(language: String, country: String): String { + val languageLowerCase = language.lowercase(Locale.getDefault()) + val code: String = if (!TextUtils.isEmpty(country)) { + val countryUpperCase = country.uppercase(Locale.getDefault()) + if (targetLanguages.contains("$languageLowerCase-$countryUpperCase")) { + "$languageLowerCase-$countryUpperCase" + } else if (languageLowerCase == "zh") { + when (countryUpperCase) { + "DG" -> "zh-CN" + "zh-TW" -> "zh-TW" + else -> languageLowerCase + } + } else { + languageLowerCase + } + } else { + languageLowerCase + } + return code + } + + private val targetLanguages = listOf( + "sq", "ar", "am", "az", "ga", "et", "eu", "be", "bg", "is", "pl", "bs", "fa", + "af", "da", "de", "ru", "fr", "tl", "fi", "fy", "km", "ka", "gu", "kk", "ht", + "ko", "ha", "nl", "ky", "gl", "ca", "cs", "kn", "co", "hr", "ku", "la", "lv", + "lo", "lt", "lb", "ro", "mg", "mt", "mr", "ml", "ms", "mk", "mi", "mn", "bn", + "my", "hmn", "xh", "zu", "ne", "no", "pa", "pt", "ps", "ny", "ja", "sv", "sm", + "sr", "st", "si", "eo", "sk", "sl", "sw", "gd", "ceb", "so", "tg", "te", "ta", + "th", "tr", "cy", "ur", "uk", "uz", "es", "iw", "el", "haw", "sd", "hu", "sn", + "hy", "ig", "it", "yi", "hi", "su", "id", "jw", "en", "yo", "vi", "zh-TW", "zh-CN", "zh") +} diff --git a/TMessagesProj/src/main/java/tw/nekomimi/nekogram/transtale/source/TelegramTranslator.kt b/TMessagesProj/src/main/java/tw/nekomimi/nekogram/transtale/source/TelegramTranslator.kt deleted file mode 100644 index bd2917c23b..0000000000 --- a/TMessagesProj/src/main/java/tw/nekomimi/nekogram/transtale/source/TelegramTranslator.kt +++ /dev/null @@ -1,90 +0,0 @@ -package tw.nekomimi.nekogram.transtale.source - -import android.text.TextUtils -import kotlinx.coroutines.Dispatchers -import kotlinx.coroutines.withContext -import org.json.JSONObject -import org.telegram.messenger.LocaleController -import org.telegram.messenger.R -import org.telegram.messenger.UserConfig -import org.telegram.tgnet.ConnectionsManager -import org.telegram.tgnet.TLObject -import org.telegram.tgnet.TLRPC.* -import tw.nekomimi.nekogram.NekoConfig -import tw.nekomimi.nekogram.transtale.TransUtils -import tw.nekomimi.nekogram.transtale.Translator -import tw.nekomimi.nekogram.transtale.applyProxy -import tw.nekomimi.nekogram.utils.applyIf -import java.util.* -import java.util.concurrent.CountDownLatch -import java.util.concurrent.atomic.AtomicReference - - -object TelegramTranslator : Translator { - - @JvmStatic - fun convertLanguageCode(language: String, country: String): String { - val languageLowerCase = language.lowercase(Locale.getDefault()) - val code: String = if (!TextUtils.isEmpty(country)) { - val countryUpperCase = country.uppercase(Locale.getDefault()) - if (targetLanguages.contains("$languageLowerCase-$countryUpperCase")) { - "$languageLowerCase-$countryUpperCase" - } else if (languageLowerCase == "zh") { - when (countryUpperCase) { - "DG" -> "zh-CN" - "zh-TW" -> "zh-TW" - else -> languageLowerCase - } - } else { - languageLowerCase - } - } else { - languageLowerCase - } - return code - } - - override suspend fun doTranslate(from: String, to: String, query: String): String { - val result: AtomicReference = AtomicReference() - val latch = CountDownLatch(1) - - val req = TL_messages_translateText() - req.flags = req.flags or 2 - req.to_lang = to - req.text = query - ConnectionsManager.getInstance(UserConfig.selectedAccount).sendRequest( - req - ) { res: TLObject?, error: TL_error? -> - if (error == null) { - if (res is TL_messages_translateResultText) { - result.set(res.text) - } else { - result.set(UnsupportedOperationException(LocaleController.getString("TranslateApiUnsupported", R.string.TranslateApiUnsupported))) - } - } else { - result.set(UnsupportedOperationException(error.text)) - } - latch.countDown() - } - - withContext(Dispatchers.IO) { - latch.await() - } - val s: Any = result.get() - return if (s is String) { - s.toString() - } else { - error(s.toString()) - } - } - - private val targetLanguages = listOf( - "sq", "ar", "am", "az", "ga", "et", "eu", "be", "bg", "is", "pl", "bs", "fa", - "af", "da", "de", "ru", "fr", "tl", "fi", "fy", "km", "ka", "gu", "kk", "ht", - "ko", "ha", "nl", "ky", "gl", "ca", "cs", "kn", "co", "hr", "ku", "la", "lv", - "lo", "lt", "lb", "ro", "mg", "mt", "mr", "ml", "ms", "mk", "mi", "mn", "bn", - "my", "hmn", "xh", "zu", "ne", "no", "pa", "pt", "ps", "ny", "ja", "sv", "sm", - "sr", "st", "si", "eo", "sk", "sl", "sw", "gd", "ceb", "so", "tg", "te", "ta", - "th", "tr", "cy", "ur", "uk", "uz", "es", "iw", "el", "haw", "sd", "hu", "sn", - "hy", "ig", "it", "yi", "hi", "su", "id", "jw", "en", "yo", "vi", "zh-TW", "zh-CN", "zh") -} diff --git a/TMessagesProj/src/main/java/tw/nekomimi/nekogram/ui/BottomBuilder.kt b/TMessagesProj/src/main/java/tw/nekomimi/nekogram/ui/BottomBuilder.kt index fa3898e266..275e52d362 100644 --- a/TMessagesProj/src/main/java/tw/nekomimi/nekogram/ui/BottomBuilder.kt +++ b/TMessagesProj/src/main/java/tw/nekomimi/nekogram/ui/BottomBuilder.kt @@ -255,6 +255,10 @@ class BottomBuilder(val ctx: Context, val needFocus: Boolean = true, val bgColor } } + fun setTitleMultipleLines(multilines: Boolean) { + builder.setTitleMultipleLines(multilines) + } + fun create() = builder.create() fun show() = builder.show() fun dismiss() { diff --git a/TMessagesProj/src/main/java/tw/nekomimi/nekogram/utils/AlertUtil.kt b/TMessagesProj/src/main/java/tw/nekomimi/nekogram/utils/AlertUtil.kt index 525da9f84f..2b9524bc40 100644 --- a/TMessagesProj/src/main/java/tw/nekomimi/nekogram/utils/AlertUtil.kt +++ b/TMessagesProj/src/main/java/tw/nekomimi/nekogram/utils/AlertUtil.kt @@ -144,7 +144,7 @@ object AlertUtil { @JvmStatic fun showProgress(ctx: Context, text: String = LocaleController.getString("Loading", R.string.Loading)): AlertDialog { - return AlertDialog.Builder(ctx, 1).apply { + return AlertDialog.Builder(ctx, AlertDialog.ALERT_TYPE_MESSAGE).apply { setMessage(text) @@ -247,7 +247,7 @@ object AlertUtil { LocaleController.getString("ProviderMicrosoftTranslator", R.string.ProviderMicrosoftTranslator), LocaleController.getString("ProviderYouDao", R.string.ProviderYouDao), LocaleController.getString("ProviderDeepLTranslate", R.string.ProviderDeepLTranslate), - LocaleController.getString("ProviderTelegram", R.string.ProviderTelegram) + LocaleController.getString("ProviderTelegramAPI", R.string.ProviderTelegramAPI) ).toList() ) diff --git a/TMessagesProj/src/main/java/tw/nekomimi/nekogram/utils/PrivacyUtil.kt b/TMessagesProj/src/main/java/tw/nekomimi/nekogram/utils/PrivacyUtil.kt index 01c58a9b31..acf6c0b638 100644 --- a/TMessagesProj/src/main/java/tw/nekomimi/nekogram/utils/PrivacyUtil.kt +++ b/TMessagesProj/src/main/java/tw/nekomimi/nekogram/utils/PrivacyUtil.kt @@ -223,7 +223,7 @@ object PrivacyUtil { runCatching { - (builder.show().getButton(DialogInterface.BUTTON_NEUTRAL) as TextView?)?.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)) + (builder.show().getButton(DialogInterface.BUTTON_NEUTRAL) as TextView?)?.setTextColor(Theme.getColor(Theme.key_dialogTextRed)) } @@ -261,7 +261,7 @@ object PrivacyUtil { runCatching { - (builder.show().getButton(DialogInterface.BUTTON_NEUTRAL) as TextView?)?.setTextColor(Theme.getColor(Theme.key_dialogTextRed2)) + (builder.show().getButton(DialogInterface.BUTTON_NEUTRAL) as TextView?)?.setTextColor(Theme.getColor(Theme.key_dialogTextRed)) } diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg2_animations.png b/TMessagesProj/src/main/res/drawable-hdpi/msg2_animations.png new file mode 100644 index 0000000000..5afe044392 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg2_animations.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg2_archived_stickers.png b/TMessagesProj/src/main/res/drawable-hdpi/msg2_archived_stickers.png new file mode 100644 index 0000000000..77c2c9679e Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg2_archived_stickers.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg2_ask_question.png b/TMessagesProj/src/main/res/drawable-hdpi/msg2_ask_question.png new file mode 100644 index 0000000000..e8d5da1b69 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg2_ask_question.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg2_autodelete.png b/TMessagesProj/src/main/res/drawable-hdpi/msg2_autodelete.png new file mode 100644 index 0000000000..d2d25eed7a Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg2_autodelete.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg2_battery.png b/TMessagesProj/src/main/res/drawable-hdpi/msg2_battery.png new file mode 100644 index 0000000000..c7c8ce57bb Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg2_battery.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg2_block2.png b/TMessagesProj/src/main/res/drawable-hdpi/msg2_block2.png new file mode 100644 index 0000000000..f334bccf8a Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg2_block2.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg2_call_earpiece.png b/TMessagesProj/src/main/res/drawable-hdpi/msg2_call_earpiece.png new file mode 100644 index 0000000000..35039d66ab Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg2_call_earpiece.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg2_data.png b/TMessagesProj/src/main/res/drawable-hdpi/msg2_data.png new file mode 100644 index 0000000000..26006f4f40 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg2_data.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg2_devices.png b/TMessagesProj/src/main/res/drawable-hdpi/msg2_devices.png new file mode 100644 index 0000000000..1456a3a701 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg2_devices.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg2_discussion.png b/TMessagesProj/src/main/res/drawable-hdpi/msg2_discussion.png new file mode 100644 index 0000000000..704c7bfe6c Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg2_discussion.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg2_email.png b/TMessagesProj/src/main/res/drawable-hdpi/msg2_email.png new file mode 100644 index 0000000000..1f893a7440 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg2_email.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg2_folder.png b/TMessagesProj/src/main/res/drawable-hdpi/msg2_folder.png new file mode 100644 index 0000000000..207b9819ae Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg2_folder.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg2_gif.png b/TMessagesProj/src/main/res/drawable-hdpi/msg2_gif.png new file mode 100644 index 0000000000..0ea5659d8e Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg2_gif.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg2_help.png b/TMessagesProj/src/main/res/drawable-hdpi/msg2_help.png new file mode 100644 index 0000000000..ab942869b5 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg2_help.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg2_language.png b/TMessagesProj/src/main/res/drawable-hdpi/msg2_language.png new file mode 100644 index 0000000000..081966ff8b Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg2_language.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg2_notifications.png b/TMessagesProj/src/main/res/drawable-hdpi/msg2_notifications.png new file mode 100644 index 0000000000..e32e2f83f5 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg2_notifications.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg2_permissions.png b/TMessagesProj/src/main/res/drawable-hdpi/msg2_permissions.png new file mode 100644 index 0000000000..99e7ed63b8 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg2_permissions.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg2_policy.png b/TMessagesProj/src/main/res/drawable-hdpi/msg2_policy.png new file mode 100644 index 0000000000..83d0aa8d27 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg2_policy.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg2_reactions2.png b/TMessagesProj/src/main/res/drawable-hdpi/msg2_reactions2.png new file mode 100644 index 0000000000..a20b9fa96a Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg2_reactions2.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg2_secret.png b/TMessagesProj/src/main/res/drawable-hdpi/msg2_secret.png new file mode 100644 index 0000000000..dd741c7705 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg2_secret.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg2_smile_status.png b/TMessagesProj/src/main/res/drawable-hdpi/msg2_smile_status.png new file mode 100644 index 0000000000..4111702813 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg2_smile_status.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg2_sticker.png b/TMessagesProj/src/main/res/drawable-hdpi/msg2_sticker.png new file mode 100644 index 0000000000..6b17ff8076 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg2_sticker.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg2_trending.png b/TMessagesProj/src/main/res/drawable-hdpi/msg2_trending.png new file mode 100644 index 0000000000..211f10f673 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg2_trending.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg2_videocall.png b/TMessagesProj/src/main/res/drawable-hdpi/msg2_videocall.png new file mode 100644 index 0000000000..e7b11c32eb Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg2_videocall.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_addaccount_all.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_addaccount_all.png deleted file mode 100644 index 141d5b074c..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_addaccount_all.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_addaccount_base.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_addaccount_base.png deleted file mode 100644 index acde0918da..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_addaccount_base.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_addaccount_text.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_addaccount_text.png deleted file mode 100644 index c068bc520d..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_addaccount_text.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_ask_question.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_ask_question.png deleted file mode 100644 index b3480e3794..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_ask_question.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_autodelete_1h.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_autodelete_1h.png deleted file mode 100644 index 1ed05a6da9..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_autodelete_1h.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_bell_mute.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_bell_mute.png deleted file mode 100644 index 78c122d64e..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_bell_mute.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_bell_unmute.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_bell_unmute.png deleted file mode 100644 index 4142a69619..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_bell_unmute.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_bluetooth.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_bluetooth.png deleted file mode 100644 index ae03d7121f..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_bluetooth.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_calls_pin.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_calls_pin.png deleted file mode 100644 index d00d0687a2..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_calls_pin.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_calls_unpin.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_calls_unpin.png deleted file mode 100644 index 18e4e2c356..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_calls_unpin.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_channel_14.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_channel_14.png deleted file mode 100644 index f2abdde5b9..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_channel_14.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_channel_create.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_channel_create.png new file mode 100644 index 0000000000..c7ecdb1815 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_channel_create.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_channel_hw.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_channel_hw.png deleted file mode 100644 index 9904740b6c..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_channel_hw.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_channel_ny.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_channel_ny.png deleted file mode 100644 index 0cdc7b7686..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_channel_ny.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_check.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_check.png deleted file mode 100644 index 64b204e262..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_check.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_check_thick.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_check_thick.png deleted file mode 100644 index 5698712e86..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_check_thick.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_clock.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_clock.png deleted file mode 100755 index cea2ae43aa..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_clock.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_data.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_data.png deleted file mode 100644 index b6339736ae..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_data.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_data_usage.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_data_usage.png deleted file mode 100644 index ba15168048..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_data_usage.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_delete_old.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_delete_old.png deleted file mode 100644 index 70dea031b0..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_delete_old.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_devices.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_devices.png deleted file mode 100644 index f814a29fdd..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_devices.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_download_settings.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_download_settings.png new file mode 100644 index 0000000000..da98eae6c8 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_download_settings.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_email.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_email.png deleted file mode 100644 index de348ac70a..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_email.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_emoji_activities.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_emoji_activities.png index 6df9449732..d9f07f2792 100644 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_emoji_activities.png and b/TMessagesProj/src/main/res/drawable-hdpi/msg_emoji_activities.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_emoji_food.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_emoji_food.png index bb46604e50..0911c7093f 100644 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_emoji_food.png and b/TMessagesProj/src/main/res/drawable-hdpi/msg_emoji_food.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_emoji_person.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_emoji_person.png new file mode 100644 index 0000000000..bba030e052 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_emoji_person.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_emoji_premium.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_emoji_premium.png deleted file mode 100644 index 4e8f6e6aeb..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_emoji_premium.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_emoji_stickers.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_emoji_stickers.png new file mode 100644 index 0000000000..b0eec70492 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_emoji_stickers.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_autodelete.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_autodelete.png new file mode 100644 index 0000000000..d130d6f4c1 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_autodelete.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_blocked.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_blocked.png new file mode 100644 index 0000000000..fa005bc31c Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_blocked.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_data_calls.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_data_calls.png new file mode 100644 index 0000000000..43f3b8a210 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_data_calls.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_data_files.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_data_files.png new file mode 100644 index 0000000000..d05ffda5f9 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_data_files.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_data_messages.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_data_messages.png new file mode 100644 index 0000000000..858706671e Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_data_messages.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_data_music.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_data_music.png new file mode 100644 index 0000000000..3ab0452378 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_data_music.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_data_photos.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_data_photos.png new file mode 100644 index 0000000000..1d308d57d6 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_data_photos.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_data_received.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_data_received.png new file mode 100644 index 0000000000..68caa3dd4c Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_data_received.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_data_sent.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_data_sent.png new file mode 100644 index 0000000000..fbddd3a92a Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_data_sent.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_data_videos.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_data_videos.png new file mode 100644 index 0000000000..5443cb1e26 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_data_videos.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_data_voice.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_data_voice.png new file mode 100644 index 0000000000..8ac581698e Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_data_voice.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_datausage.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_datausage.png new file mode 100644 index 0000000000..d083c94fb7 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_datausage.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_devices.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_devices.png new file mode 100644 index 0000000000..553b5fe505 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_devices.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_email.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_email.png new file mode 100644 index 0000000000..9364dc54e1 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_email.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_fragment.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_fragment.png deleted file mode 100644 index a5660c22a6..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_fragment.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_passcode_off.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_passcode_off.png new file mode 100644 index 0000000000..564806bb1a Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_passcode_off.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_passcode_on.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_passcode_on.png new file mode 100644 index 0000000000..3b31d978f4 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_passcode_on.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_permissions.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_permissions.png new file mode 100644 index 0000000000..85b97dde89 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_permissions.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_plus.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_plus.png new file mode 100644 index 0000000000..a7e13b41d0 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_plus.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_sdcard.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_sdcard.png new file mode 100644 index 0000000000..1181f57ef9 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_sdcard.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_storageusage.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_storageusage.png new file mode 100644 index 0000000000..2a3959fda0 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_filled_storageusage.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_folder.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_folder.png deleted file mode 100644 index f653701f22..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_folder.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_folder_reorder.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_folder_reorder.png deleted file mode 100644 index e21abdf1b1..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_folder_reorder.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_forward_check.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_forward_check.png deleted file mode 100644 index c33fff46db..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_forward_check.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_help_14.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_help_14.png deleted file mode 100644 index 796c512d04..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_help_14.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_input_gift.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_input_gift.png new file mode 100644 index 0000000000..06f273bf40 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_input_gift.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_invite_14.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_invite_14.png deleted file mode 100644 index bd89979292..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_invite_14.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_limit_2x.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_limit_2x.png deleted file mode 100644 index 59adc56db7..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_limit_2x.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_list2.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_list2.png deleted file mode 100644 index 41243afa51..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_list2.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_members_list.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_members_list.png deleted file mode 100644 index a455e59e2c..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_members_list.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_mini_checks.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_mini_checks.png new file mode 100644 index 0000000000..6ee146a765 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_mini_checks.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_mini_download.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_mini_download.png new file mode 100644 index 0000000000..80d3f89afa Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_mini_download.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_mini_lock3.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_mini_lock3.png new file mode 100644 index 0000000000..e1fa596d0a Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_mini_lock3.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_mini_upload.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_mini_upload.png new file mode 100644 index 0000000000..3984f65d55 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_mini_upload.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_more_0_2.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_more_0_2.png deleted file mode 100644 index 513c2f9dee..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_more_0_2.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_more_0_5.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_more_0_5.png deleted file mode 100644 index 01279e4c00..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_more_0_5.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_more_1_5.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_more_1_5.png deleted file mode 100644 index 80baf40b31..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_more_1_5.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_more_2.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_more_2.png deleted file mode 100644 index d8070e94f6..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_more_2.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_nearby_14.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_nearby_14.png deleted file mode 100644 index 1628277101..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_nearby_14.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_nearby_hw.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_nearby_hw.png deleted file mode 100644 index 757a1f4ba7..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_nearby_hw.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_newfilter.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_newfilter.png deleted file mode 100644 index f7b92fc47c..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_newfilter.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_nightmode_system.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_nightmode_system.png deleted file mode 100644 index f2381caf3f..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_nightmode_system.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_photo.9.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_photo.9.png deleted file mode 100755 index 49349da0c0..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_photo.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_photo_blur_linear.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_photo_blur_linear.png deleted file mode 100644 index 5111f2e679..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_photo_blur_linear.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_photo_blur_off.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_photo_blur_off.png deleted file mode 100644 index 6ff96b870f..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_photo_blur_off.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_photo_blur_radial.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_photo_blur_radial.png deleted file mode 100644 index df29f8eba5..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_photo_blur_radial.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_photo_shadow.9.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_photo_shadow.9.png deleted file mode 100755 index 0041934029..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_photo_shadow.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_premium_lock.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_premium_lock.png deleted file mode 100644 index 261ef87a09..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_premium_lock.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_premium_translate.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_premium_translate.png new file mode 100644 index 0000000000..17c47dd47e Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_premium_translate.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_schedule.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_schedule.png deleted file mode 100644 index d4da876f7b..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_schedule.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_smile_status.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_smile_status.png new file mode 100644 index 0000000000..3aa209db4a Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_smile_status.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_speed_0_5.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_speed_0_5.png deleted file mode 100644 index e895cea0e7..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_speed_0_5.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_speed_1.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_speed_1.png deleted file mode 100644 index aeb18a5193..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_speed_1.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_speed_1_5.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_speed_1_5.png deleted file mode 100644 index 2913777436..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_speed_1_5.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_speed_2.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_speed_2.png deleted file mode 100644 index 40209faa62..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_speed_2.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_speed_fast.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_speed_fast.png new file mode 100644 index 0000000000..2715b3b072 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_speed_fast.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_speed_medium.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_speed_medium.png new file mode 100644 index 0000000000..e109cbd894 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_speed_medium.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_speed_normal.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_speed_normal.png new file mode 100644 index 0000000000..dbec3a5b3c Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_speed_normal.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_speed_slow.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_speed_slow.png new file mode 100644 index 0000000000..71240d78ce Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_speed_slow.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_speed_superfast.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_speed_superfast.png new file mode 100644 index 0000000000..6daf154335 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_speed_superfast.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_speed_veryfast.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_speed_veryfast.png new file mode 100644 index 0000000000..2125d9a334 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_speed_veryfast.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_status_edit.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_status_edit.png deleted file mode 100644 index d972c30aef..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_status_edit.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_status_set.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_status_set.png deleted file mode 100644 index a18fe2553d..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_status_set.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_storage_path.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_storage_path.png deleted file mode 100644 index 8f15307823..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_storage_path.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_storage_usage.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_storage_usage.png deleted file mode 100644 index ffa53d9af8..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_storage_usage.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_timer.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_timer.png deleted file mode 100644 index 8913c6595d..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_timer.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_timer_1h.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_timer_1h.png deleted file mode 100644 index c9089dc0c7..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_timer_1h.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_timeredit.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_timeredit.png deleted file mode 100644 index d159b9ac3f..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_timeredit.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/permissions_contacts.png b/TMessagesProj/src/main/res/drawable-hdpi/permissions_contacts.png deleted file mode 100755 index fb05e36785..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/permissions_contacts.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/photo_w.png b/TMessagesProj/src/main/res/drawable-hdpi/photo_w.png deleted file mode 100755 index 20b4f54233..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/photo_w.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/photocheck.png b/TMessagesProj/src/main/res/drawable-hdpi/photocheck.png deleted file mode 100755 index 91d5cc24aa..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/photocheck.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/phototime.9.png b/TMessagesProj/src/main/res/drawable-hdpi/phototime.9.png deleted file mode 100644 index 7cb710823a..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/phototime.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/play_big.png b/TMessagesProj/src/main/res/drawable-hdpi/play_big.png deleted file mode 100755 index bfd5ff765e..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/play_big.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/play_roundvideo.png b/TMessagesProj/src/main/res/drawable-hdpi/play_roundvideo.png deleted file mode 100644 index 9089aeaf4e..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/play_roundvideo.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/player_new_next.png b/TMessagesProj/src/main/res/drawable-hdpi/player_new_next.png deleted file mode 100644 index 60f7d57eb6..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/player_new_next.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/player_new_pause.png b/TMessagesProj/src/main/res/drawable-hdpi/player_new_pause.png deleted file mode 100644 index 74e3f9a72a..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/player_new_pause.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/player_new_play.png b/TMessagesProj/src/main/res/drawable-hdpi/player_new_play.png deleted file mode 100644 index 76ae59c06c..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/player_new_play.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/player_new_previous.png b/TMessagesProj/src/main/res/drawable-hdpi/player_new_previous.png deleted file mode 100644 index d6ec6cf6ab..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/player_new_previous.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/plus.png b/TMessagesProj/src/main/res/drawable-hdpi/plus.png deleted file mode 100755 index b2be938beb..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/plus.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/popup_fixed.9.png b/TMessagesProj/src/main/res/drawable-hdpi/popup_fixed.9.png deleted file mode 100755 index 80449d60e3..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/popup_fixed.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/popup_fixed_top.9.png b/TMessagesProj/src/main/res/drawable-hdpi/popup_fixed_top.9.png deleted file mode 100755 index 0c95c17440..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/popup_fixed_top.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/regbtn2.9.png b/TMessagesProj/src/main/res/drawable-hdpi/regbtn2.9.png deleted file mode 100644 index ba78524359..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/regbtn2.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/regbtn2_pressed.9.png b/TMessagesProj/src/main/res/drawable-hdpi/regbtn2_pressed.9.png deleted file mode 100644 index 775048da1f..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/regbtn2_pressed.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/replyvideo.png b/TMessagesProj/src/main/res/drawable-hdpi/replyvideo.png deleted file mode 100644 index b4176fbd35..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/replyvideo.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/s_pause.png b/TMessagesProj/src/main/res/drawable-hdpi/s_pause.png deleted file mode 100644 index b823dbc971..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/s_pause.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/s_play.png b/TMessagesProj/src/main/res/drawable-hdpi/s_play.png deleted file mode 100644 index 5d379dadb1..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/s_play.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/scroll_tip.png b/TMessagesProj/src/main/res/drawable-hdpi/scroll_tip.png deleted file mode 100755 index 5f75ceed35..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/scroll_tip.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/search_arrow2.png b/TMessagesProj/src/main/res/drawable-hdpi/search_arrow2.png deleted file mode 100644 index c599743ffd..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/search_arrow2.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/settings_noresults.png b/TMessagesProj/src/main/res/drawable-hdpi/settings_noresults.png deleted file mode 100644 index 7055fed6a7..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/settings_noresults.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/sheet_shadow.9.png b/TMessagesProj/src/main/res/drawable-hdpi/sheet_shadow.9.png deleted file mode 100644 index 2c5c844950..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/sheet_shadow.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/slidearrow.png b/TMessagesProj/src/main/res/drawable-hdpi/slidearrow.png deleted file mode 100755 index 1085485eea..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/slidearrow.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/stickers_trending1.png b/TMessagesProj/src/main/res/drawable-hdpi/stickers_trending1.png deleted file mode 100644 index 13bb207ba5..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/stickers_trending1.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/stickers_trending2.png b/TMessagesProj/src/main/res/drawable-hdpi/stickers_trending2.png deleted file mode 100644 index 6ecd2972e5..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/stickers_trending2.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/stickers_trending3.png b/TMessagesProj/src/main/res/drawable-hdpi/stickers_trending3.png deleted file mode 100644 index fc578523df..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/stickers_trending3.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/switch_to_on1.9.png b/TMessagesProj/src/main/res/drawable-hdpi/switch_to_on1.9.png deleted file mode 100644 index 6dcfd7331c..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/switch_to_on1.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/switch_to_on2.9.png b/TMessagesProj/src/main/res/drawable-hdpi/switch_to_on2.9.png deleted file mode 100644 index 6dcfd7331c..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/switch_to_on2.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/switch_track.9.png b/TMessagesProj/src/main/res/drawable-hdpi/switch_track.9.png deleted file mode 100644 index 977f3f25a2..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/switch_track.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/temp_starlarge.png b/TMessagesProj/src/main/res/drawable-hdpi/temp_starlarge.png deleted file mode 100644 index e3a523f57f..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/temp_starlarge.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/video_pause1.png b/TMessagesProj/src/main/res/drawable-hdpi/video_pause1.png deleted file mode 100644 index 15dda6cf7d..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/video_pause1.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/video_play1.png b/TMessagesProj/src/main/res/drawable-hdpi/video_play1.png deleted file mode 100644 index b33b9f4d4c..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/video_play1.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/voice_mini_1_0.png b/TMessagesProj/src/main/res/drawable-hdpi/voice_mini_1_0.png new file mode 100644 index 0000000000..e36e571664 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/voice_mini_1_0.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/voice_mini_1_2.png b/TMessagesProj/src/main/res/drawable-hdpi/voice_mini_1_2.png new file mode 100644 index 0000000000..13d1b65a16 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/voice_mini_1_2.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/voice_mini_1_7.png b/TMessagesProj/src/main/res/drawable-hdpi/voice_mini_1_7.png new file mode 100644 index 0000000000..c72bf33485 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/voice_mini_1_7.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/voice_volume_muted.png b/TMessagesProj/src/main/res/drawable-hdpi/voice_volume_muted.png deleted file mode 100644 index 28833f79ec..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/voice_volume_muted.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/voice_volume_speaker.png b/TMessagesProj/src/main/res/drawable-hdpi/voice_volume_speaker.png deleted file mode 100644 index 908294290f..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/voice_volume_speaker.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/wall_selection.9.png b/TMessagesProj/src/main/res/drawable-hdpi/wall_selection.9.png deleted file mode 100644 index e6fb5125ba..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/wall_selection.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/widget_check.png b/TMessagesProj/src/main/res/drawable-hdpi/widget_check.png deleted file mode 100644 index 3c1ed349b1..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/widget_check.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/widget_clock.png b/TMessagesProj/src/main/res/drawable-hdpi/widget_clock.png deleted file mode 100644 index b30664fb2b..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/widget_clock.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/widget_halfcheck.png b/TMessagesProj/src/main/res/drawable-hdpi/widget_halfcheck.png deleted file mode 100644 index d3a2666b7c..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/widget_halfcheck.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/widget_muted.png b/TMessagesProj/src/main/res/drawable-hdpi/widget_muted.png deleted file mode 100644 index 27e0996896..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/widget_muted.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/widget_online.png b/TMessagesProj/src/main/res/drawable-hdpi/widget_online.png deleted file mode 100644 index cde098fc44..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/widget_online.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/widget_pin.png b/TMessagesProj/src/main/res/drawable-hdpi/widget_pin.png deleted file mode 100644 index 8d248b6f1f..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/widget_pin.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/widget_verified.png b/TMessagesProj/src/main/res/drawable-hdpi/widget_verified.png deleted file mode 100644 index c50e2dd525..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/widget_verified.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/widgets_dark_badgebg.9.png b/TMessagesProj/src/main/res/drawable-hdpi/widgets_dark_badgebg.9.png deleted file mode 100644 index 09d1dd106a..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/widgets_dark_badgebg.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/widgets_dark_plus.png b/TMessagesProj/src/main/res/drawable-hdpi/widgets_dark_plus.png deleted file mode 100644 index 9ace8d361f..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/widgets_dark_plus.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/widgets_light_plus.png b/TMessagesProj/src/main/res/drawable-hdpi/widgets_light_plus.png deleted file mode 100644 index c5608819b8..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/widgets_light_plus.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg2_animations.png b/TMessagesProj/src/main/res/drawable-mdpi/msg2_animations.png new file mode 100644 index 0000000000..c9ae6e7814 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg2_animations.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg2_archived_stickers.png b/TMessagesProj/src/main/res/drawable-mdpi/msg2_archived_stickers.png new file mode 100644 index 0000000000..256269b602 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg2_archived_stickers.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg2_ask_question.png b/TMessagesProj/src/main/res/drawable-mdpi/msg2_ask_question.png new file mode 100644 index 0000000000..9aae4dbcc4 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg2_ask_question.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg2_autodelete.png b/TMessagesProj/src/main/res/drawable-mdpi/msg2_autodelete.png new file mode 100644 index 0000000000..702b02d54b Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg2_autodelete.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg2_battery.png b/TMessagesProj/src/main/res/drawable-mdpi/msg2_battery.png new file mode 100644 index 0000000000..1154dfa62b Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg2_battery.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg2_block2.png b/TMessagesProj/src/main/res/drawable-mdpi/msg2_block2.png new file mode 100644 index 0000000000..16eeb64f5a Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg2_block2.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg2_call_earpiece.png b/TMessagesProj/src/main/res/drawable-mdpi/msg2_call_earpiece.png new file mode 100644 index 0000000000..b295f1bab2 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg2_call_earpiece.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg2_data.png b/TMessagesProj/src/main/res/drawable-mdpi/msg2_data.png new file mode 100644 index 0000000000..923e527029 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg2_data.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg2_devices.png b/TMessagesProj/src/main/res/drawable-mdpi/msg2_devices.png new file mode 100644 index 0000000000..ad06587352 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg2_devices.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg2_discussion.png b/TMessagesProj/src/main/res/drawable-mdpi/msg2_discussion.png new file mode 100644 index 0000000000..deb54d6983 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg2_discussion.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg2_email.png b/TMessagesProj/src/main/res/drawable-mdpi/msg2_email.png new file mode 100644 index 0000000000..5bcf413fc6 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg2_email.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg2_folder.png b/TMessagesProj/src/main/res/drawable-mdpi/msg2_folder.png new file mode 100644 index 0000000000..458459abfe Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg2_folder.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg2_gif.png b/TMessagesProj/src/main/res/drawable-mdpi/msg2_gif.png new file mode 100644 index 0000000000..ed7bed0e1f Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg2_gif.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg2_help.png b/TMessagesProj/src/main/res/drawable-mdpi/msg2_help.png new file mode 100644 index 0000000000..48e16ccb72 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg2_help.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg2_language.png b/TMessagesProj/src/main/res/drawable-mdpi/msg2_language.png new file mode 100644 index 0000000000..29ceec54a6 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg2_language.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg2_notifications.png b/TMessagesProj/src/main/res/drawable-mdpi/msg2_notifications.png new file mode 100644 index 0000000000..01484cca06 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg2_notifications.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg2_permissions.png b/TMessagesProj/src/main/res/drawable-mdpi/msg2_permissions.png new file mode 100644 index 0000000000..43ec6cde81 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg2_permissions.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg2_policy.png b/TMessagesProj/src/main/res/drawable-mdpi/msg2_policy.png new file mode 100644 index 0000000000..0859c7dcf5 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg2_policy.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg2_reactions2.png b/TMessagesProj/src/main/res/drawable-mdpi/msg2_reactions2.png new file mode 100644 index 0000000000..0ba486f2e1 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg2_reactions2.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg2_secret.png b/TMessagesProj/src/main/res/drawable-mdpi/msg2_secret.png new file mode 100644 index 0000000000..207f3f861d Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg2_secret.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg2_smile_status.png b/TMessagesProj/src/main/res/drawable-mdpi/msg2_smile_status.png new file mode 100644 index 0000000000..0b300dc820 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg2_smile_status.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg2_sticker.png b/TMessagesProj/src/main/res/drawable-mdpi/msg2_sticker.png new file mode 100644 index 0000000000..6b335d0ee5 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg2_sticker.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg2_trending.png b/TMessagesProj/src/main/res/drawable-mdpi/msg2_trending.png new file mode 100644 index 0000000000..095a39da70 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg2_trending.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg2_videocall.png b/TMessagesProj/src/main/res/drawable-mdpi/msg2_videocall.png new file mode 100644 index 0000000000..9f9af2dbb1 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg2_videocall.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_addaccount_all.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_addaccount_all.png deleted file mode 100644 index ba80e881b9..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_addaccount_all.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_addaccount_base.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_addaccount_base.png deleted file mode 100644 index 38d45c15e8..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_addaccount_base.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_addaccount_text.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_addaccount_text.png deleted file mode 100644 index e7b668b988..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_addaccount_text.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_ask_question.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_ask_question.png deleted file mode 100644 index 21cf79fcec..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_ask_question.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_autodelete_1h.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_autodelete_1h.png deleted file mode 100644 index 48e44a2f8c..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_autodelete_1h.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_bell_mute.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_bell_mute.png deleted file mode 100644 index 8effcfe002..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_bell_mute.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_bell_unmute.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_bell_unmute.png deleted file mode 100644 index c2870326e3..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_bell_unmute.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_bluetooth.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_bluetooth.png deleted file mode 100644 index 95da6af0d0..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_bluetooth.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_calls_pin.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_calls_pin.png deleted file mode 100644 index 7367353dc3..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_calls_pin.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_calls_unpin.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_calls_unpin.png deleted file mode 100644 index 1a018544e6..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_calls_unpin.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_channel_14.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_channel_14.png deleted file mode 100644 index 5e878867a8..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_channel_14.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_channel_create.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_channel_create.png new file mode 100644 index 0000000000..9b5a168160 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_channel_create.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_channel_hw.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_channel_hw.png deleted file mode 100644 index 6dc7652742..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_channel_hw.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_channel_ny.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_channel_ny.png deleted file mode 100644 index 4de64cd783..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_channel_ny.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_check.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_check.png deleted file mode 100644 index 3fd562cf79..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_check.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_check_thick.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_check_thick.png deleted file mode 100644 index 053ccebd5c..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_check_thick.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_clock.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_clock.png deleted file mode 100755 index 1248954b37..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_clock.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_data.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_data.png deleted file mode 100644 index 0fd6f38e29..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_data.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_data_usage.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_data_usage.png deleted file mode 100644 index be327d4b03..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_data_usage.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_delete_old.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_delete_old.png deleted file mode 100644 index 6c6eba3800..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_delete_old.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_devices.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_devices.png deleted file mode 100644 index f06476cd9b..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_devices.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_download_settings.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_download_settings.png new file mode 100644 index 0000000000..2f3c231aee Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_download_settings.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_email.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_email.png deleted file mode 100644 index 376ea16dfa..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_email.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_emoji_activities.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_emoji_activities.png index 481d4e08df..6a118c73e7 100644 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_emoji_activities.png and b/TMessagesProj/src/main/res/drawable-mdpi/msg_emoji_activities.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_emoji_food.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_emoji_food.png index 4c63c287f1..2924d3d8a9 100644 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_emoji_food.png and b/TMessagesProj/src/main/res/drawable-mdpi/msg_emoji_food.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_emoji_person.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_emoji_person.png new file mode 100644 index 0000000000..372718ee13 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_emoji_person.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_emoji_premium.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_emoji_premium.png deleted file mode 100644 index 13d72f50c6..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_emoji_premium.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_emoji_stickers.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_emoji_stickers.png new file mode 100644 index 0000000000..daa3f215e5 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_emoji_stickers.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_autodelete.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_autodelete.png new file mode 100644 index 0000000000..cd741c8e16 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_autodelete.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_blocked.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_blocked.png new file mode 100644 index 0000000000..c26c84d67c Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_blocked.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_data_calls.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_data_calls.png new file mode 100644 index 0000000000..ebd73c3bb2 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_data_calls.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_data_files.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_data_files.png new file mode 100644 index 0000000000..88e155d3dc Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_data_files.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_data_messages.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_data_messages.png new file mode 100644 index 0000000000..51ed58fc9e Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_data_messages.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_data_music.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_data_music.png new file mode 100644 index 0000000000..cf0f6d5ee4 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_data_music.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_data_photos.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_data_photos.png new file mode 100644 index 0000000000..03a9d7bebd Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_data_photos.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_data_received.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_data_received.png new file mode 100644 index 0000000000..348a364606 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_data_received.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_data_sent.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_data_sent.png new file mode 100644 index 0000000000..f03c8cd983 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_data_sent.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_data_videos.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_data_videos.png new file mode 100644 index 0000000000..e68b58bf20 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_data_videos.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_data_voice.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_data_voice.png new file mode 100644 index 0000000000..eec3fbf10a Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_data_voice.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_datausage.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_datausage.png new file mode 100644 index 0000000000..8f40261639 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_datausage.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_devices.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_devices.png new file mode 100644 index 0000000000..d712d51a29 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_devices.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_email.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_email.png new file mode 100644 index 0000000000..45c57009ad Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_email.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_fragment.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_fragment.png deleted file mode 100644 index 73b8662e32..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_fragment.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_passcode_off.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_passcode_off.png new file mode 100644 index 0000000000..cc151b3899 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_passcode_off.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_passcode_on.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_passcode_on.png new file mode 100644 index 0000000000..70beef3711 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_passcode_on.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_permissions.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_permissions.png new file mode 100644 index 0000000000..dd4d9e62a6 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_permissions.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_plus.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_plus.png new file mode 100644 index 0000000000..960dd89153 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_plus.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_sdcard.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_sdcard.png new file mode 100644 index 0000000000..80b7d06c49 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_sdcard.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_storageusage.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_storageusage.png new file mode 100644 index 0000000000..c4a0bf9ba4 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_filled_storageusage.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_folder.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_folder.png deleted file mode 100644 index f05209ea8a..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_folder.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_folder_reorder.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_folder_reorder.png deleted file mode 100644 index d8c7f26e11..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_folder_reorder.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_forward_check.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_forward_check.png deleted file mode 100644 index 3157161cfe..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_forward_check.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_help_14.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_help_14.png deleted file mode 100644 index 284623309c..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_help_14.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_input_gift.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_input_gift.png new file mode 100644 index 0000000000..9ef3bb75cd Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_input_gift.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_invite_14.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_invite_14.png deleted file mode 100644 index 6c5b79112c..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_invite_14.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_limit_2x.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_limit_2x.png deleted file mode 100644 index c07216aea8..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_limit_2x.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_list2.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_list2.png deleted file mode 100644 index d56b6725d2..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_list2.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_members_list.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_members_list.png deleted file mode 100644 index 43a66a97ed..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_members_list.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_mini_checks.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_mini_checks.png new file mode 100644 index 0000000000..e9aa7fea47 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_mini_checks.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_mini_download.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_mini_download.png new file mode 100644 index 0000000000..fd404abd4a Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_mini_download.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_mini_lock3.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_mini_lock3.png new file mode 100644 index 0000000000..4d562a90aa Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_mini_lock3.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_mini_upload.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_mini_upload.png new file mode 100644 index 0000000000..2f82a04d2c Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_mini_upload.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_more_0_2.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_more_0_2.png deleted file mode 100644 index e773d938cb..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_more_0_2.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_more_0_5.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_more_0_5.png deleted file mode 100644 index b610df16c7..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_more_0_5.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_more_1_5.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_more_1_5.png deleted file mode 100644 index 56f4c5ede7..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_more_1_5.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_more_2.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_more_2.png deleted file mode 100644 index 2fa8593f04..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_more_2.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_nearby_14.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_nearby_14.png deleted file mode 100644 index 33e863516e..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_nearby_14.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_nearby_hw.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_nearby_hw.png deleted file mode 100644 index 1fcf1cd77d..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_nearby_hw.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_newfilter.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_newfilter.png deleted file mode 100644 index 5f39a1c2e8..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_newfilter.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_nightmode_system.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_nightmode_system.png deleted file mode 100644 index 0a390d5fea..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_nightmode_system.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_photo.9.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_photo.9.png deleted file mode 100755 index cdcc5a472c..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_photo.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_photo_blur_linear.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_photo_blur_linear.png deleted file mode 100644 index 40f8492669..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_photo_blur_linear.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_photo_blur_off.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_photo_blur_off.png deleted file mode 100644 index f6d5cd5f0b..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_photo_blur_off.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_photo_blur_radial.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_photo_blur_radial.png deleted file mode 100644 index 0a49050c60..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_photo_blur_radial.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_photo_shadow.9.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_photo_shadow.9.png deleted file mode 100755 index 48643c6432..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_photo_shadow.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_premium_lock.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_premium_lock.png deleted file mode 100644 index 698baa0a97..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_premium_lock.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_premium_translate.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_premium_translate.png new file mode 100644 index 0000000000..32a3942ef8 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_premium_translate.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_schedule.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_schedule.png deleted file mode 100644 index eb3518506c..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_schedule.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_smile_status.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_smile_status.png new file mode 100644 index 0000000000..0761be6979 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_smile_status.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_speed_0_5.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_speed_0_5.png deleted file mode 100644 index a7c0b958df..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_speed_0_5.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_speed_1.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_speed_1.png deleted file mode 100644 index 5ff155283f..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_speed_1.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_speed_1_5.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_speed_1_5.png deleted file mode 100644 index 98ced63e0b..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_speed_1_5.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_speed_2.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_speed_2.png deleted file mode 100644 index 9c0021e778..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_speed_2.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_speed_fast.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_speed_fast.png new file mode 100644 index 0000000000..ca87f0faac Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_speed_fast.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_speed_medium.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_speed_medium.png new file mode 100644 index 0000000000..186c06b298 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_speed_medium.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_speed_normal.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_speed_normal.png new file mode 100644 index 0000000000..1752fd063a Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_speed_normal.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_speed_slow.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_speed_slow.png new file mode 100644 index 0000000000..817ac76fa9 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_speed_slow.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_speed_superfast.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_speed_superfast.png new file mode 100644 index 0000000000..55e11af18b Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_speed_superfast.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_speed_veryfast.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_speed_veryfast.png new file mode 100644 index 0000000000..2210e458aa Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_speed_veryfast.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_status_edit.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_status_edit.png deleted file mode 100644 index c6c1616ae8..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_status_edit.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_status_set.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_status_set.png deleted file mode 100644 index e32a6c2462..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_status_set.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_storage_path.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_storage_path.png deleted file mode 100644 index 54583bf477..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_storage_path.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_storage_usage.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_storage_usage.png deleted file mode 100644 index 8845bcd8fb..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_storage_usage.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_timer.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_timer.png deleted file mode 100644 index 1d0716682c..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_timer.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_timer_1h.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_timer_1h.png deleted file mode 100644 index 1740efa5bd..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_timer_1h.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_timeredit.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_timeredit.png deleted file mode 100644 index b5b79dccad..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_timeredit.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/permissions_contacts.png b/TMessagesProj/src/main/res/drawable-mdpi/permissions_contacts.png deleted file mode 100755 index 6b935d05f1..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/permissions_contacts.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/photo_w.png b/TMessagesProj/src/main/res/drawable-mdpi/photo_w.png deleted file mode 100755 index d569e0d2d4..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/photo_w.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/photocheck.png b/TMessagesProj/src/main/res/drawable-mdpi/photocheck.png deleted file mode 100755 index b8253563e2..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/photocheck.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/phototime.9.png b/TMessagesProj/src/main/res/drawable-mdpi/phototime.9.png deleted file mode 100644 index 0fc8eb83cc..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/phototime.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/play_big.png b/TMessagesProj/src/main/res/drawable-mdpi/play_big.png deleted file mode 100755 index f6b0163011..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/play_big.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/play_roundvideo.png b/TMessagesProj/src/main/res/drawable-mdpi/play_roundvideo.png deleted file mode 100644 index 3e7c45ed95..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/play_roundvideo.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/player_new_next.png b/TMessagesProj/src/main/res/drawable-mdpi/player_new_next.png deleted file mode 100644 index 13fa92c3f9..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/player_new_next.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/player_new_pause.png b/TMessagesProj/src/main/res/drawable-mdpi/player_new_pause.png deleted file mode 100644 index 00321760ea..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/player_new_pause.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/player_new_play.png b/TMessagesProj/src/main/res/drawable-mdpi/player_new_play.png deleted file mode 100644 index 9f6992b879..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/player_new_play.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/player_new_previous.png b/TMessagesProj/src/main/res/drawable-mdpi/player_new_previous.png deleted file mode 100644 index 47245319e5..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/player_new_previous.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/plus.png b/TMessagesProj/src/main/res/drawable-mdpi/plus.png deleted file mode 100755 index f468320260..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/plus.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/popup_fixed.9.png b/TMessagesProj/src/main/res/drawable-mdpi/popup_fixed.9.png deleted file mode 100755 index 8220925b78..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/popup_fixed.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/popup_fixed_top.9.png b/TMessagesProj/src/main/res/drawable-mdpi/popup_fixed_top.9.png deleted file mode 100755 index 29cda0bd5d..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/popup_fixed_top.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/regbtn2.9.png b/TMessagesProj/src/main/res/drawable-mdpi/regbtn2.9.png deleted file mode 100644 index 0b4a6a51d1..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/regbtn2.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/regbtn2_pressed.9.png b/TMessagesProj/src/main/res/drawable-mdpi/regbtn2_pressed.9.png deleted file mode 100644 index 708ff24933..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/regbtn2_pressed.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/replyvideo.png b/TMessagesProj/src/main/res/drawable-mdpi/replyvideo.png deleted file mode 100644 index fc24315228..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/replyvideo.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/s_pause.png b/TMessagesProj/src/main/res/drawable-mdpi/s_pause.png deleted file mode 100644 index f09f4ce571..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/s_pause.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/s_play.png b/TMessagesProj/src/main/res/drawable-mdpi/s_play.png deleted file mode 100644 index d750daffcf..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/s_play.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/scroll_tip.png b/TMessagesProj/src/main/res/drawable-mdpi/scroll_tip.png deleted file mode 100755 index 466eef136f..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/scroll_tip.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/search_arrow2.png b/TMessagesProj/src/main/res/drawable-mdpi/search_arrow2.png deleted file mode 100644 index 9d8eaf470a..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/search_arrow2.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/settings_noresults.png b/TMessagesProj/src/main/res/drawable-mdpi/settings_noresults.png deleted file mode 100644 index 89fb458900..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/settings_noresults.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/sheet_shadow.9.png b/TMessagesProj/src/main/res/drawable-mdpi/sheet_shadow.9.png deleted file mode 100644 index e9dc2a77f7..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/sheet_shadow.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/slidearrow.png b/TMessagesProj/src/main/res/drawable-mdpi/slidearrow.png deleted file mode 100755 index 56f626fa53..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/slidearrow.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/stickers_trending1.png b/TMessagesProj/src/main/res/drawable-mdpi/stickers_trending1.png deleted file mode 100644 index d937a6760d..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/stickers_trending1.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/stickers_trending2.png b/TMessagesProj/src/main/res/drawable-mdpi/stickers_trending2.png deleted file mode 100644 index 70d5b04114..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/stickers_trending2.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/stickers_trending3.png b/TMessagesProj/src/main/res/drawable-mdpi/stickers_trending3.png deleted file mode 100644 index e8be888bc5..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/stickers_trending3.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/switch_to_on1.9.png b/TMessagesProj/src/main/res/drawable-mdpi/switch_to_on1.9.png deleted file mode 100644 index 6cff6912f5..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/switch_to_on1.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/switch_to_on2.9.png b/TMessagesProj/src/main/res/drawable-mdpi/switch_to_on2.9.png deleted file mode 100644 index 6cff6912f5..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/switch_to_on2.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/switch_track.9.png b/TMessagesProj/src/main/res/drawable-mdpi/switch_track.9.png deleted file mode 100644 index 4d0cf5caee..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/switch_track.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/temp_starlarge.png b/TMessagesProj/src/main/res/drawable-mdpi/temp_starlarge.png deleted file mode 100644 index 0e7a69f58a..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/temp_starlarge.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/video_pause1.png b/TMessagesProj/src/main/res/drawable-mdpi/video_pause1.png deleted file mode 100644 index 8a9f0f51ff..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/video_pause1.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/video_play1.png b/TMessagesProj/src/main/res/drawable-mdpi/video_play1.png deleted file mode 100644 index 1011ca852c..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/video_play1.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/voice_mini_1_0.png b/TMessagesProj/src/main/res/drawable-mdpi/voice_mini_1_0.png new file mode 100644 index 0000000000..5b64fff93a Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/voice_mini_1_0.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/voice_mini_1_2.png b/TMessagesProj/src/main/res/drawable-mdpi/voice_mini_1_2.png new file mode 100644 index 0000000000..a17bcb7473 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/voice_mini_1_2.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/voice_mini_1_7.png b/TMessagesProj/src/main/res/drawable-mdpi/voice_mini_1_7.png new file mode 100644 index 0000000000..f5374070c1 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/voice_mini_1_7.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/voice_volume_muted.png b/TMessagesProj/src/main/res/drawable-mdpi/voice_volume_muted.png deleted file mode 100644 index 1023abf8ea..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/voice_volume_muted.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/voice_volume_speaker.png b/TMessagesProj/src/main/res/drawable-mdpi/voice_volume_speaker.png deleted file mode 100644 index e9f92af199..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/voice_volume_speaker.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/wall_selection.9.png b/TMessagesProj/src/main/res/drawable-mdpi/wall_selection.9.png deleted file mode 100644 index f296d0e9fb..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/wall_selection.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/widget_check.png b/TMessagesProj/src/main/res/drawable-mdpi/widget_check.png deleted file mode 100644 index 5dfcc142be..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/widget_check.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/widget_clock.png b/TMessagesProj/src/main/res/drawable-mdpi/widget_clock.png deleted file mode 100644 index bf373fef5d..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/widget_clock.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/widget_halfcheck.png b/TMessagesProj/src/main/res/drawable-mdpi/widget_halfcheck.png deleted file mode 100644 index 8a00e0d152..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/widget_halfcheck.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/widget_muted.png b/TMessagesProj/src/main/res/drawable-mdpi/widget_muted.png deleted file mode 100644 index ccc32c1987..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/widget_muted.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/widget_online.png b/TMessagesProj/src/main/res/drawable-mdpi/widget_online.png deleted file mode 100644 index 6c02baa919..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/widget_online.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/widget_pin.png b/TMessagesProj/src/main/res/drawable-mdpi/widget_pin.png deleted file mode 100644 index 5bb76cf373..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/widget_pin.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/widget_verified.png b/TMessagesProj/src/main/res/drawable-mdpi/widget_verified.png deleted file mode 100644 index f7a036dc2f..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/widget_verified.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/widgets_dark_badgebg.9.png b/TMessagesProj/src/main/res/drawable-mdpi/widgets_dark_badgebg.9.png deleted file mode 100644 index 363123ff9f..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/widgets_dark_badgebg.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/widgets_dark_plus.png b/TMessagesProj/src/main/res/drawable-mdpi/widgets_dark_plus.png deleted file mode 100644 index 5dd337cb0c..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/widgets_dark_plus.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/widgets_light_plus.png b/TMessagesProj/src/main/res/drawable-mdpi/widgets_light_plus.png deleted file mode 100644 index 3ae8a85689..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/widgets_light_plus.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-night-hdpi/widget_check.png b/TMessagesProj/src/main/res/drawable-night-hdpi/widget_check.png deleted file mode 100644 index b69327592e..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-night-hdpi/widget_check.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-night-hdpi/widget_clock.png b/TMessagesProj/src/main/res/drawable-night-hdpi/widget_clock.png deleted file mode 100644 index 544e6bb33f..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-night-hdpi/widget_clock.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-night-hdpi/widget_halfcheck.png b/TMessagesProj/src/main/res/drawable-night-hdpi/widget_halfcheck.png deleted file mode 100644 index 311e405d68..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-night-hdpi/widget_halfcheck.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-night-hdpi/widget_muted.png b/TMessagesProj/src/main/res/drawable-night-hdpi/widget_muted.png deleted file mode 100644 index ecf6d32b7c..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-night-hdpi/widget_muted.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-night-hdpi/widget_online.png b/TMessagesProj/src/main/res/drawable-night-hdpi/widget_online.png deleted file mode 100644 index 2dc6af43cf..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-night-hdpi/widget_online.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-night-hdpi/widget_pin.png b/TMessagesProj/src/main/res/drawable-night-hdpi/widget_pin.png deleted file mode 100644 index d45b49f490..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-night-hdpi/widget_pin.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-night-hdpi/widget_verified.png b/TMessagesProj/src/main/res/drawable-night-hdpi/widget_verified.png deleted file mode 100644 index 6d56e37acb..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-night-hdpi/widget_verified.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-night-mdpi/widget_check.png b/TMessagesProj/src/main/res/drawable-night-mdpi/widget_check.png deleted file mode 100644 index d60d849d17..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-night-mdpi/widget_check.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-night-mdpi/widget_clock.png b/TMessagesProj/src/main/res/drawable-night-mdpi/widget_clock.png deleted file mode 100644 index 4a0d1f3e07..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-night-mdpi/widget_clock.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-night-mdpi/widget_halfcheck.png b/TMessagesProj/src/main/res/drawable-night-mdpi/widget_halfcheck.png deleted file mode 100644 index ff86161b0f..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-night-mdpi/widget_halfcheck.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-night-mdpi/widget_muted.png b/TMessagesProj/src/main/res/drawable-night-mdpi/widget_muted.png deleted file mode 100644 index c8e3cb6abd..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-night-mdpi/widget_muted.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-night-mdpi/widget_online.png b/TMessagesProj/src/main/res/drawable-night-mdpi/widget_online.png deleted file mode 100644 index cf29da17d0..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-night-mdpi/widget_online.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-night-mdpi/widget_pin.png b/TMessagesProj/src/main/res/drawable-night-mdpi/widget_pin.png deleted file mode 100644 index 8de91645ce..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-night-mdpi/widget_pin.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-night-mdpi/widget_verified.png b/TMessagesProj/src/main/res/drawable-night-mdpi/widget_verified.png deleted file mode 100644 index 4b238274d3..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-night-mdpi/widget_verified.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-night-xhdpi/widget_check.png b/TMessagesProj/src/main/res/drawable-night-xhdpi/widget_check.png deleted file mode 100644 index 5d7e71268c..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-night-xhdpi/widget_check.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-night-xhdpi/widget_clock.png b/TMessagesProj/src/main/res/drawable-night-xhdpi/widget_clock.png deleted file mode 100644 index 48370ad734..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-night-xhdpi/widget_clock.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-night-xhdpi/widget_halfcheck.png b/TMessagesProj/src/main/res/drawable-night-xhdpi/widget_halfcheck.png deleted file mode 100644 index 769e383b95..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-night-xhdpi/widget_halfcheck.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-night-xhdpi/widget_muted.png b/TMessagesProj/src/main/res/drawable-night-xhdpi/widget_muted.png deleted file mode 100644 index c2a970eba2..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-night-xhdpi/widget_muted.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-night-xhdpi/widget_online.png b/TMessagesProj/src/main/res/drawable-night-xhdpi/widget_online.png deleted file mode 100644 index c0659a70b2..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-night-xhdpi/widget_online.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-night-xhdpi/widget_pin.png b/TMessagesProj/src/main/res/drawable-night-xhdpi/widget_pin.png deleted file mode 100644 index 26428a0448..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-night-xhdpi/widget_pin.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-night-xhdpi/widget_verified.png b/TMessagesProj/src/main/res/drawable-night-xhdpi/widget_verified.png deleted file mode 100644 index 46433d996f..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-night-xhdpi/widget_verified.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-night-xxhdpi/widget_check.png b/TMessagesProj/src/main/res/drawable-night-xxhdpi/widget_check.png deleted file mode 100644 index ac5fa2a35e..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-night-xxhdpi/widget_check.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-night-xxhdpi/widget_clock.png b/TMessagesProj/src/main/res/drawable-night-xxhdpi/widget_clock.png deleted file mode 100644 index 3591a535ae..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-night-xxhdpi/widget_clock.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-night-xxhdpi/widget_halfcheck.png b/TMessagesProj/src/main/res/drawable-night-xxhdpi/widget_halfcheck.png deleted file mode 100644 index 9142a25fea..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-night-xxhdpi/widget_halfcheck.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-night-xxhdpi/widget_muted.png b/TMessagesProj/src/main/res/drawable-night-xxhdpi/widget_muted.png deleted file mode 100644 index 4d75e01024..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-night-xxhdpi/widget_muted.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-night-xxhdpi/widget_online.png b/TMessagesProj/src/main/res/drawable-night-xxhdpi/widget_online.png deleted file mode 100644 index ae13a511bf..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-night-xxhdpi/widget_online.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-night-xxhdpi/widget_pin.png b/TMessagesProj/src/main/res/drawable-night-xxhdpi/widget_pin.png deleted file mode 100644 index f005bfbfef..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-night-xxhdpi/widget_pin.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-night-xxhdpi/widget_verified.png b/TMessagesProj/src/main/res/drawable-night-xxhdpi/widget_verified.png deleted file mode 100644 index 397fa1f060..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-night-xxhdpi/widget_verified.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg2_animations.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_animations.png new file mode 100644 index 0000000000..701ae27c3d Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_animations.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg2_archived_stickers.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_archived_stickers.png new file mode 100644 index 0000000000..1539ec746a Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_archived_stickers.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg2_ask_question.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_ask_question.png new file mode 100644 index 0000000000..1039eb522e Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_ask_question.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg2_autodelete.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_autodelete.png new file mode 100644 index 0000000000..4d734e91be Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_autodelete.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg2_battery.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_battery.png new file mode 100644 index 0000000000..b437bf6350 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_battery.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg2_block2.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_block2.png new file mode 100644 index 0000000000..1d1aceec36 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_block2.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg2_call_earpiece.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_call_earpiece.png new file mode 100644 index 0000000000..fba2c11c8c Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_call_earpiece.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg2_data.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_data.png new file mode 100644 index 0000000000..174dc17246 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_data.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg2_devices.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_devices.png new file mode 100644 index 0000000000..ae4978bb08 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_devices.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg2_discussion.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_discussion.png new file mode 100644 index 0000000000..0554f68ac6 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_discussion.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg2_email.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_email.png new file mode 100644 index 0000000000..5afce21d89 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_email.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg2_folder.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_folder.png new file mode 100644 index 0000000000..d72caea0a6 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_folder.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg2_gif.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_gif.png new file mode 100644 index 0000000000..b8521c198d Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_gif.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg2_help.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_help.png new file mode 100644 index 0000000000..5b237db11b Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_help.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg2_language.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_language.png new file mode 100644 index 0000000000..6aeb23e3ff Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_language.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg2_notifications.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_notifications.png new file mode 100644 index 0000000000..83d35bc62f Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_notifications.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg2_permissions.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_permissions.png new file mode 100644 index 0000000000..2eec3be697 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_permissions.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg2_policy.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_policy.png new file mode 100644 index 0000000000..ff4486d67e Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_policy.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg2_reactions2.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_reactions2.png new file mode 100644 index 0000000000..bc596e4044 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_reactions2.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg2_secret.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_secret.png new file mode 100644 index 0000000000..ba31a862d5 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_secret.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg2_smile_status.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_smile_status.png new file mode 100644 index 0000000000..30ba292182 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_smile_status.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg2_sticker.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_sticker.png new file mode 100644 index 0000000000..c62dbc2ba7 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_sticker.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg2_trending.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_trending.png new file mode 100644 index 0000000000..d9ac089997 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_trending.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg2_videocall.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_videocall.png new file mode 100644 index 0000000000..355af10bf7 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg2_videocall.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_addaccount_all.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_addaccount_all.png deleted file mode 100644 index 6b174aec67..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_addaccount_all.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_addaccount_base.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_addaccount_base.png deleted file mode 100644 index 79dd789d8c..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_addaccount_base.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_addaccount_text.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_addaccount_text.png deleted file mode 100644 index 0c70608ca7..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_addaccount_text.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_ask_question.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_ask_question.png deleted file mode 100644 index 484a89cc33..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_ask_question.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_autodelete_1h.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_autodelete_1h.png deleted file mode 100644 index f4ba3e5838..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_autodelete_1h.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_bell_mute.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_bell_mute.png deleted file mode 100644 index 55a0d13d57..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_bell_mute.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_bell_unmute.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_bell_unmute.png deleted file mode 100644 index d9f117711e..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_bell_unmute.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_bluetooth.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_bluetooth.png deleted file mode 100644 index 44d0ac9edc..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_bluetooth.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_calls_pin.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_calls_pin.png deleted file mode 100644 index 39fd378f1f..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_calls_pin.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_calls_unpin.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_calls_unpin.png deleted file mode 100644 index c2c7e37899..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_calls_unpin.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_channel_14.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_channel_14.png deleted file mode 100644 index 88c4db4c18..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_channel_14.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_channel_create.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_channel_create.png new file mode 100644 index 0000000000..53b0a25c3c Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_channel_create.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_channel_hw.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_channel_hw.png deleted file mode 100644 index d7d982f9fa..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_channel_hw.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_channel_ny.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_channel_ny.png deleted file mode 100644 index b61fa6d959..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_channel_ny.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_check.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_check.png deleted file mode 100644 index 1df03d2ed8..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_check.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_check_thick.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_check_thick.png deleted file mode 100644 index 7bfd6bcdd5..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_check_thick.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_clock.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_clock.png deleted file mode 100755 index 4168e3b71a..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_clock.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_data.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_data.png deleted file mode 100644 index 89dc6d345a..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_data.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_data_usage.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_data_usage.png deleted file mode 100644 index f7ce1b4626..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_data_usage.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_delete_old.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_delete_old.png deleted file mode 100644 index fe61565659..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_delete_old.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_devices.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_devices.png deleted file mode 100644 index 1dc97c95c9..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_devices.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_download_settings.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_download_settings.png new file mode 100644 index 0000000000..c85c04fc42 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_download_settings.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_email.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_email.png deleted file mode 100644 index f02980ea9e..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_email.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_emoji_activities.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_emoji_activities.png index 898b395d1a..af874d5507 100644 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_emoji_activities.png and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_emoji_activities.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_emoji_food.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_emoji_food.png index a95a9e3b76..10fedebf85 100644 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_emoji_food.png and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_emoji_food.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_emoji_person.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_emoji_person.png new file mode 100644 index 0000000000..9ed0f63af2 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_emoji_person.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_emoji_premium.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_emoji_premium.png deleted file mode 100644 index daba62401a..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_emoji_premium.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_emoji_stickers.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_emoji_stickers.png new file mode 100644 index 0000000000..050830b912 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_emoji_stickers.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_autodelete.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_autodelete.png new file mode 100644 index 0000000000..a0875f05d2 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_autodelete.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_blocked.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_blocked.png new file mode 100644 index 0000000000..7cb4ed96f7 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_blocked.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_data_calls.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_data_calls.png new file mode 100644 index 0000000000..207eedcbe0 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_data_calls.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_data_files.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_data_files.png new file mode 100644 index 0000000000..42f1c12e89 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_data_files.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_data_messages.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_data_messages.png new file mode 100644 index 0000000000..b8877f0fb8 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_data_messages.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_data_music.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_data_music.png new file mode 100644 index 0000000000..83f79b5a37 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_data_music.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_data_photos.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_data_photos.png new file mode 100644 index 0000000000..2a5e999965 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_data_photos.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_data_received.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_data_received.png new file mode 100644 index 0000000000..31f4cb8fed Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_data_received.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_data_sent.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_data_sent.png new file mode 100644 index 0000000000..7ed2ebfffb Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_data_sent.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_data_videos.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_data_videos.png new file mode 100644 index 0000000000..94c7081855 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_data_videos.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_data_voice.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_data_voice.png new file mode 100644 index 0000000000..f29be26a9b Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_data_voice.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_datausage.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_datausage.png new file mode 100644 index 0000000000..e6dc691bd0 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_datausage.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_devices.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_devices.png new file mode 100644 index 0000000000..2aa5e71972 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_devices.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_email.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_email.png new file mode 100644 index 0000000000..fe88c7cf83 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_email.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_fragment.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_fragment.png deleted file mode 100644 index 4a6ad49bcf..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_fragment.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_passcode_off.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_passcode_off.png new file mode 100644 index 0000000000..cbc4928c23 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_passcode_off.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_passcode_on.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_passcode_on.png new file mode 100644 index 0000000000..6bd2f3ef14 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_passcode_on.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_permissions.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_permissions.png new file mode 100644 index 0000000000..6f47026b1b Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_permissions.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_plus.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_plus.png new file mode 100644 index 0000000000..9f98d9a812 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_plus.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_sdcard.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_sdcard.png new file mode 100644 index 0000000000..babdec449a Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_sdcard.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_storageusage.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_storageusage.png new file mode 100644 index 0000000000..17fe0ab309 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_filled_storageusage.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_folder.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_folder.png deleted file mode 100644 index 7dd2d616c5..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_folder.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_folder_reorder.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_folder_reorder.png deleted file mode 100644 index b99fb40951..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_folder_reorder.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_forward_check.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_forward_check.png deleted file mode 100644 index 41df12a06d..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_forward_check.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_help_14.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_help_14.png deleted file mode 100644 index aaf6d2244b..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_help_14.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_input_gift.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_input_gift.png new file mode 100644 index 0000000000..ac0c98070d Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_input_gift.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_invite_14.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_invite_14.png deleted file mode 100644 index e25e36a037..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_invite_14.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_limit_2x.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_limit_2x.png deleted file mode 100644 index 76f08db699..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_limit_2x.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_list2.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_list2.png deleted file mode 100644 index 137f18efed..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_list2.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_members_list.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_members_list.png deleted file mode 100644 index f110834109..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_members_list.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_mini_checks.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_mini_checks.png new file mode 100644 index 0000000000..3e76fe161e Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_mini_checks.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_mini_download.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_mini_download.png new file mode 100644 index 0000000000..6e2ced23a9 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_mini_download.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_mini_lock3.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_mini_lock3.png new file mode 100644 index 0000000000..5841c6cdce Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_mini_lock3.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_mini_upload.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_mini_upload.png new file mode 100644 index 0000000000..04c63878fe Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_mini_upload.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_more_0_2.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_more_0_2.png deleted file mode 100644 index 6174614b8c..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_more_0_2.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_more_0_5.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_more_0_5.png deleted file mode 100644 index 0b7a539541..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_more_0_5.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_more_1_5.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_more_1_5.png deleted file mode 100644 index d928fc62f7..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_more_1_5.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_more_2.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_more_2.png deleted file mode 100644 index 573d6a7e08..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_more_2.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_nearby_14.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_nearby_14.png deleted file mode 100644 index dae8b5d372..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_nearby_14.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_nearby_hw.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_nearby_hw.png deleted file mode 100644 index 1063ba1f82..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_nearby_hw.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_newfilter.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_newfilter.png deleted file mode 100644 index 1046ae5138..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_newfilter.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_nightmode_system.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_nightmode_system.png deleted file mode 100644 index caab94110e..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_nightmode_system.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_photo.9.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_photo.9.png deleted file mode 100755 index d0bc898d21..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_photo.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_photo_blur_linear.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_photo_blur_linear.png deleted file mode 100644 index 02ea70cd64..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_photo_blur_linear.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_photo_blur_off.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_photo_blur_off.png deleted file mode 100644 index 8901a0eb4d..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_photo_blur_off.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_photo_blur_radial.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_photo_blur_radial.png deleted file mode 100644 index f0917f0cdd..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_photo_blur_radial.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_photo_shadow.9.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_photo_shadow.9.png deleted file mode 100755 index 70b49261ed..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_photo_shadow.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_premium_lock.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_premium_lock.png deleted file mode 100644 index f166680792..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_premium_lock.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_premium_translate.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_premium_translate.png new file mode 100644 index 0000000000..cf3aa07034 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_premium_translate.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_schedule.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_schedule.png deleted file mode 100644 index 6109f1b6ac..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_schedule.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_smile_status.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_smile_status.png new file mode 100644 index 0000000000..92e38f8655 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_smile_status.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_speed_0_5.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_speed_0_5.png deleted file mode 100644 index abc88b9efb..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_speed_0_5.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_speed_1.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_speed_1.png deleted file mode 100644 index e5a1dfa0e5..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_speed_1.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_speed_1_5.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_speed_1_5.png deleted file mode 100644 index e10a716c27..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_speed_1_5.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_speed_2.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_speed_2.png deleted file mode 100644 index 0586243d51..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_speed_2.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_speed_fast.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_speed_fast.png new file mode 100644 index 0000000000..2f7a482f24 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_speed_fast.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_speed_medium.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_speed_medium.png new file mode 100644 index 0000000000..6e2b024d76 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_speed_medium.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_speed_normal.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_speed_normal.png new file mode 100644 index 0000000000..44ccdf09be Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_speed_normal.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_speed_slow.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_speed_slow.png new file mode 100644 index 0000000000..3214c4f6ca Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_speed_slow.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_speed_superfast.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_speed_superfast.png new file mode 100644 index 0000000000..02e143a084 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_speed_superfast.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_speed_veryfast.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_speed_veryfast.png new file mode 100644 index 0000000000..dc1109718a Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_speed_veryfast.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_status_edit.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_status_edit.png deleted file mode 100644 index 522795edac..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_status_edit.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_status_set.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_status_set.png deleted file mode 100644 index ce9a0a93b3..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_status_set.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_storage_path.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_storage_path.png deleted file mode 100644 index 53e1c9be2d..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_storage_path.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_storage_usage.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_storage_usage.png deleted file mode 100644 index 0ab43ef4d0..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_storage_usage.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_timer.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_timer.png deleted file mode 100644 index 456e7f6441..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_timer.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_timer_1h.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_timer_1h.png deleted file mode 100644 index 1df4aab4b8..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_timer_1h.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_timeredit.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_timeredit.png deleted file mode 100644 index 331236a99f..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_timeredit.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/permissions_contacts.png b/TMessagesProj/src/main/res/drawable-xhdpi/permissions_contacts.png deleted file mode 100755 index 1422a9ce93..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/permissions_contacts.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/photo_w.png b/TMessagesProj/src/main/res/drawable-xhdpi/photo_w.png deleted file mode 100755 index 32d6044641..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/photo_w.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/photocheck.png b/TMessagesProj/src/main/res/drawable-xhdpi/photocheck.png deleted file mode 100755 index d905545297..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/photocheck.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/phototime.9.png b/TMessagesProj/src/main/res/drawable-xhdpi/phototime.9.png deleted file mode 100644 index 8b50616c32..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/phototime.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/play_big.png b/TMessagesProj/src/main/res/drawable-xhdpi/play_big.png deleted file mode 100755 index 5b41813634..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/play_big.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/play_roundvideo.png b/TMessagesProj/src/main/res/drawable-xhdpi/play_roundvideo.png deleted file mode 100644 index 758c21aedc..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/play_roundvideo.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/player_new_next.png b/TMessagesProj/src/main/res/drawable-xhdpi/player_new_next.png deleted file mode 100644 index deb097fee4..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/player_new_next.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/player_new_pause.png b/TMessagesProj/src/main/res/drawable-xhdpi/player_new_pause.png deleted file mode 100644 index cb2a690c1d..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/player_new_pause.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/player_new_play.png b/TMessagesProj/src/main/res/drawable-xhdpi/player_new_play.png deleted file mode 100644 index 46e62c08e6..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/player_new_play.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/player_new_previous.png b/TMessagesProj/src/main/res/drawable-xhdpi/player_new_previous.png deleted file mode 100644 index 675d466f04..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/player_new_previous.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/plus.png b/TMessagesProj/src/main/res/drawable-xhdpi/plus.png deleted file mode 100755 index dff43dd689..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/plus.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/popup_fixed.9.png b/TMessagesProj/src/main/res/drawable-xhdpi/popup_fixed.9.png deleted file mode 100755 index 9b851d817d..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/popup_fixed.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/popup_fixed_top.9.png b/TMessagesProj/src/main/res/drawable-xhdpi/popup_fixed_top.9.png deleted file mode 100755 index f7ce561aca..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/popup_fixed_top.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/regbtn2.9.png b/TMessagesProj/src/main/res/drawable-xhdpi/regbtn2.9.png deleted file mode 100644 index 8aea6835fc..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/regbtn2.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/regbtn2_pressed.9.png b/TMessagesProj/src/main/res/drawable-xhdpi/regbtn2_pressed.9.png deleted file mode 100644 index 9751557f93..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/regbtn2_pressed.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/replyvideo.png b/TMessagesProj/src/main/res/drawable-xhdpi/replyvideo.png deleted file mode 100644 index 6f040ec24e..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/replyvideo.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/s_pause.png b/TMessagesProj/src/main/res/drawable-xhdpi/s_pause.png deleted file mode 100644 index dc8b583a66..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/s_pause.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/s_play.png b/TMessagesProj/src/main/res/drawable-xhdpi/s_play.png deleted file mode 100644 index 6d106c9ebe..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/s_play.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/scroll_tip.png b/TMessagesProj/src/main/res/drawable-xhdpi/scroll_tip.png deleted file mode 100755 index 03c7a1196d..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/scroll_tip.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/search_arrow2.png b/TMessagesProj/src/main/res/drawable-xhdpi/search_arrow2.png deleted file mode 100644 index f4a2af3e8c..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/search_arrow2.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/settings_noresults.png b/TMessagesProj/src/main/res/drawable-xhdpi/settings_noresults.png deleted file mode 100644 index d4d174b0ed..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/settings_noresults.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/sheet_shadow.9.png b/TMessagesProj/src/main/res/drawable-xhdpi/sheet_shadow.9.png deleted file mode 100644 index 88effafa67..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/sheet_shadow.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/slidearrow.png b/TMessagesProj/src/main/res/drawable-xhdpi/slidearrow.png deleted file mode 100755 index 0ac9da10ac..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/slidearrow.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/stickers_trending1.png b/TMessagesProj/src/main/res/drawable-xhdpi/stickers_trending1.png deleted file mode 100644 index a794b224e7..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/stickers_trending1.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/stickers_trending2.png b/TMessagesProj/src/main/res/drawable-xhdpi/stickers_trending2.png deleted file mode 100644 index 5f3440d061..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/stickers_trending2.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/stickers_trending3.png b/TMessagesProj/src/main/res/drawable-xhdpi/stickers_trending3.png deleted file mode 100644 index 5e8d131240..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/stickers_trending3.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/switch_to_on1.9.png b/TMessagesProj/src/main/res/drawable-xhdpi/switch_to_on1.9.png deleted file mode 100644 index f55292d7b7..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/switch_to_on1.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/switch_to_on2.9.png b/TMessagesProj/src/main/res/drawable-xhdpi/switch_to_on2.9.png deleted file mode 100644 index f55292d7b7..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/switch_to_on2.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/switch_track.9.png b/TMessagesProj/src/main/res/drawable-xhdpi/switch_track.9.png deleted file mode 100644 index 2431e83db9..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/switch_track.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/temp_starlarge.png b/TMessagesProj/src/main/res/drawable-xhdpi/temp_starlarge.png deleted file mode 100644 index 32824733a7..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/temp_starlarge.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/video_pause1.png b/TMessagesProj/src/main/res/drawable-xhdpi/video_pause1.png deleted file mode 100644 index b061ccbaf6..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/video_pause1.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/video_play1.png b/TMessagesProj/src/main/res/drawable-xhdpi/video_play1.png deleted file mode 100644 index 02836b77fd..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/video_play1.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/video_png.png b/TMessagesProj/src/main/res/drawable-xhdpi/video_png.png deleted file mode 100644 index 0aaff8b511..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/video_png.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/voice_mini_1_0.png b/TMessagesProj/src/main/res/drawable-xhdpi/voice_mini_1_0.png new file mode 100644 index 0000000000..f95c1d04a8 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/voice_mini_1_0.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/voice_mini_1_2.png b/TMessagesProj/src/main/res/drawable-xhdpi/voice_mini_1_2.png new file mode 100644 index 0000000000..21eae81bad Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/voice_mini_1_2.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/voice_mini_1_7.png b/TMessagesProj/src/main/res/drawable-xhdpi/voice_mini_1_7.png new file mode 100644 index 0000000000..055843fcf0 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/voice_mini_1_7.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/voice_volume_muted.png b/TMessagesProj/src/main/res/drawable-xhdpi/voice_volume_muted.png deleted file mode 100644 index 9ddb4322a5..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/voice_volume_muted.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/voice_volume_speaker.png b/TMessagesProj/src/main/res/drawable-xhdpi/voice_volume_speaker.png deleted file mode 100644 index 7ec7f5e652..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/voice_volume_speaker.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/wall_selection.9.png b/TMessagesProj/src/main/res/drawable-xhdpi/wall_selection.9.png deleted file mode 100644 index e83fd2dea8..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/wall_selection.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/widget_check.png b/TMessagesProj/src/main/res/drawable-xhdpi/widget_check.png deleted file mode 100644 index 3e5ce06e4d..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/widget_check.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/widget_clock.png b/TMessagesProj/src/main/res/drawable-xhdpi/widget_clock.png deleted file mode 100644 index 22dc1bb40a..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/widget_clock.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/widget_halfcheck.png b/TMessagesProj/src/main/res/drawable-xhdpi/widget_halfcheck.png deleted file mode 100644 index cf4b251cad..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/widget_halfcheck.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/widget_muted.png b/TMessagesProj/src/main/res/drawable-xhdpi/widget_muted.png deleted file mode 100644 index 2dd215018c..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/widget_muted.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/widget_online.png b/TMessagesProj/src/main/res/drawable-xhdpi/widget_online.png deleted file mode 100644 index e38d04d1b0..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/widget_online.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/widget_pin.png b/TMessagesProj/src/main/res/drawable-xhdpi/widget_pin.png deleted file mode 100644 index 82a1cefb4b..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/widget_pin.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/widget_verified.png b/TMessagesProj/src/main/res/drawable-xhdpi/widget_verified.png deleted file mode 100644 index 501811a280..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/widget_verified.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/widgets_dark_badgebg.9.png b/TMessagesProj/src/main/res/drawable-xhdpi/widgets_dark_badgebg.9.png deleted file mode 100644 index 254c709880..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/widgets_dark_badgebg.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/widgets_dark_plus.png b/TMessagesProj/src/main/res/drawable-xhdpi/widgets_dark_plus.png deleted file mode 100644 index 765d743e0b..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/widgets_dark_plus.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/widgets_light_plus.png b/TMessagesProj/src/main/res/drawable-xhdpi/widgets_light_plus.png deleted file mode 100644 index 6c968bb713..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/widgets_light_plus.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_animations.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_animations.png new file mode 100644 index 0000000000..91438d9182 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_animations.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_archived_stickers.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_archived_stickers.png new file mode 100644 index 0000000000..b360384102 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_archived_stickers.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_ask_question.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_ask_question.png new file mode 100644 index 0000000000..fa9475148c Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_ask_question.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_autodelete.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_autodelete.png new file mode 100644 index 0000000000..66f0d00722 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_autodelete.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_battery.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_battery.png new file mode 100644 index 0000000000..13b7989529 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_battery.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_block2.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_block2.png new file mode 100644 index 0000000000..6362a28a80 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_block2.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_call_earpiece.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_call_earpiece.png new file mode 100644 index 0000000000..320684b071 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_call_earpiece.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_data.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_data.png new file mode 100644 index 0000000000..6c87f74d96 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_data.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_devices.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_devices.png new file mode 100644 index 0000000000..a572428894 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_devices.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_discussion.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_discussion.png new file mode 100644 index 0000000000..780456eb84 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_discussion.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_email.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_email.png new file mode 100644 index 0000000000..a26b88ba50 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_email.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_folder.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_folder.png new file mode 100644 index 0000000000..137c3d3271 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_folder.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_gif.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_gif.png new file mode 100644 index 0000000000..7c63cfba32 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_gif.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_help.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_help.png new file mode 100644 index 0000000000..9ab9bf092f Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_help.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_language.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_language.png new file mode 100644 index 0000000000..d1dbf87af7 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_language.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_notifications.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_notifications.png new file mode 100644 index 0000000000..e7f19a94cd Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_notifications.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_permissions.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_permissions.png new file mode 100644 index 0000000000..2d4afa634a Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_permissions.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_policy.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_policy.png new file mode 100644 index 0000000000..0fe88ace2b Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_policy.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_reactions2.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_reactions2.png new file mode 100644 index 0000000000..3f78de6284 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_reactions2.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_secret.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_secret.png new file mode 100644 index 0000000000..e84e6f3b6d Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_secret.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_smile_status.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_smile_status.png new file mode 100644 index 0000000000..524e513f9c Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_smile_status.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_sticker.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_sticker.png new file mode 100644 index 0000000000..170c2dafb0 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_sticker.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_trending.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_trending.png new file mode 100644 index 0000000000..b3fc0da97d Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_trending.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_videocall.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_videocall.png new file mode 100644 index 0000000000..11643a7caf Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg2_videocall.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_addaccount_all.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_addaccount_all.png deleted file mode 100644 index d646fb9c4b..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_addaccount_all.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_addaccount_base.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_addaccount_base.png deleted file mode 100644 index 2783368622..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_addaccount_base.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_addaccount_text.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_addaccount_text.png deleted file mode 100644 index d0580ed863..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_addaccount_text.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_ask_question.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_ask_question.png deleted file mode 100644 index 1338187d3f..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_ask_question.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_autodelete_1h.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_autodelete_1h.png deleted file mode 100644 index 7b8facd343..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_autodelete_1h.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_bell_mute.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_bell_mute.png deleted file mode 100644 index 6afd639704..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_bell_mute.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_bell_unmute.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_bell_unmute.png deleted file mode 100644 index 122abae39a..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_bell_unmute.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_bluetooth.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_bluetooth.png deleted file mode 100644 index 70f2b163b1..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_bluetooth.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_calls_pin.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_calls_pin.png deleted file mode 100644 index 0d8309ae6d..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_calls_pin.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_calls_unpin.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_calls_unpin.png deleted file mode 100644 index dfe91e2032..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_calls_unpin.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_channel_14.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_channel_14.png deleted file mode 100644 index f0c17ef95f..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_channel_14.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_channel_create.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_channel_create.png new file mode 100644 index 0000000000..fa7ce4bd0e Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_channel_create.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_channel_hw.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_channel_hw.png deleted file mode 100644 index 264d717548..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_channel_hw.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_channel_ny.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_channel_ny.png deleted file mode 100644 index 970aab5b88..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_channel_ny.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_check.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_check.png deleted file mode 100644 index 5a266d8bc3..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_check.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_check_thick.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_check_thick.png deleted file mode 100644 index 1ba5ab03f4..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_check_thick.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_clock.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_clock.png deleted file mode 100755 index 5d11071da7..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_clock.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_data.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_data.png deleted file mode 100644 index dbbd4bba9b..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_data.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_data_usage.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_data_usage.png deleted file mode 100644 index a8fa50bcd3..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_data_usage.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_delete_old.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_delete_old.png deleted file mode 100644 index f61d350a5d..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_delete_old.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_devices.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_devices.png deleted file mode 100644 index d1167ad21f..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_devices.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_download_settings.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_download_settings.png new file mode 100644 index 0000000000..ef6e38bc2f Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_download_settings.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_email.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_email.png deleted file mode 100644 index 7d456218fb..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_email.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_emoji_activities.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_emoji_activities.png index e8ee8cc00d..01e2fc92f6 100644 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_emoji_activities.png and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_emoji_activities.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_emoji_food.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_emoji_food.png index ef4160eb10..87e80205c9 100644 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_emoji_food.png and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_emoji_food.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_emoji_person.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_emoji_person.png new file mode 100644 index 0000000000..1bbba0f146 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_emoji_person.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_emoji_premium.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_emoji_premium.png deleted file mode 100644 index 209f297767..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_emoji_premium.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_emoji_stickers.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_emoji_stickers.png new file mode 100644 index 0000000000..3448ec98f9 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_emoji_stickers.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_autodelete.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_autodelete.png new file mode 100644 index 0000000000..9cf64dfd84 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_autodelete.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_blocked.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_blocked.png new file mode 100644 index 0000000000..6fb2d134ed Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_blocked.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_data_calls.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_data_calls.png new file mode 100644 index 0000000000..b40ae41fc5 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_data_calls.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_data_files.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_data_files.png new file mode 100644 index 0000000000..080957bf7b Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_data_files.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_data_messages.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_data_messages.png new file mode 100644 index 0000000000..4d93c6d59e Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_data_messages.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_data_music.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_data_music.png new file mode 100644 index 0000000000..3fc0deb165 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_data_music.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_data_photos.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_data_photos.png new file mode 100644 index 0000000000..3f4d9047ab Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_data_photos.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_data_received.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_data_received.png new file mode 100644 index 0000000000..4e63b498fd Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_data_received.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_data_sent.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_data_sent.png new file mode 100644 index 0000000000..82c4241872 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_data_sent.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_data_videos.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_data_videos.png new file mode 100644 index 0000000000..267fcd8c7e Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_data_videos.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_data_voice.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_data_voice.png new file mode 100644 index 0000000000..eddf2ee2d6 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_data_voice.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_datausage.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_datausage.png new file mode 100644 index 0000000000..535b001b11 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_datausage.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_devices.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_devices.png new file mode 100644 index 0000000000..3e75039f5b Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_devices.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_email.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_email.png new file mode 100644 index 0000000000..75a5a60242 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_email.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_fragment.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_fragment.png deleted file mode 100644 index 40d1756309..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_fragment.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_passcode_off.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_passcode_off.png new file mode 100644 index 0000000000..a1eaa2dd54 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_passcode_off.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_passcode_on.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_passcode_on.png new file mode 100644 index 0000000000..a1fab57e59 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_passcode_on.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_permissions.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_permissions.png new file mode 100644 index 0000000000..51324af25d Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_permissions.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_plus.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_plus.png new file mode 100644 index 0000000000..4421cc02f4 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_plus.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_sdcard.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_sdcard.png new file mode 100644 index 0000000000..58f8ad0d7c Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_sdcard.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_storageusage.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_storageusage.png new file mode 100644 index 0000000000..02e13ac482 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_filled_storageusage.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_folder.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_folder.png deleted file mode 100644 index 0683d22ada..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_folder.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_folder_reorder.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_folder_reorder.png deleted file mode 100644 index 45ccc96764..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_folder_reorder.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_forward_check.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_forward_check.png deleted file mode 100644 index 4767a768d0..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_forward_check.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_help_14.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_help_14.png deleted file mode 100644 index 95bbeb3087..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_help_14.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_input_gift.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_input_gift.png new file mode 100644 index 0000000000..9ec88fb4f0 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_input_gift.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_invite_14.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_invite_14.png deleted file mode 100644 index f1298c3603..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_invite_14.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_limit_2x.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_limit_2x.png deleted file mode 100644 index 57c701959a..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_limit_2x.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_list2.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_list2.png deleted file mode 100644 index 80c9108b79..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_list2.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_members_list.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_members_list.png deleted file mode 100644 index 4fb5b0e280..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_members_list.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_mini_checks.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_mini_checks.png new file mode 100644 index 0000000000..74120d5750 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_mini_checks.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_mini_download.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_mini_download.png new file mode 100644 index 0000000000..2c059a820f Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_mini_download.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_mini_lock3.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_mini_lock3.png new file mode 100644 index 0000000000..324436938a Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_mini_lock3.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_mini_upload.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_mini_upload.png new file mode 100644 index 0000000000..34cad727e3 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_mini_upload.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_more_0_2.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_more_0_2.png deleted file mode 100644 index fe355939d2..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_more_0_2.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_more_0_5.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_more_0_5.png deleted file mode 100644 index 32e0e805df..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_more_0_5.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_more_1_5.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_more_1_5.png deleted file mode 100644 index 6c3a2711f6..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_more_1_5.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_more_2.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_more_2.png deleted file mode 100644 index 8ac35e9a2b..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_more_2.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_nearby_14.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_nearby_14.png deleted file mode 100644 index 5d5c6b1036..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_nearby_14.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_nearby_hw.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_nearby_hw.png deleted file mode 100644 index 9b74cf00ec..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_nearby_hw.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_newfilter.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_newfilter.png deleted file mode 100644 index 2dd26fb6d1..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_newfilter.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_nightmode_system.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_nightmode_system.png deleted file mode 100644 index 7e7bd45711..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_nightmode_system.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_photo.9.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_photo.9.png deleted file mode 100755 index 5485eb0616..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_photo.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_photo_blur_linear.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_photo_blur_linear.png deleted file mode 100644 index 9cf18a85ba..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_photo_blur_linear.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_photo_blur_off.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_photo_blur_off.png deleted file mode 100644 index 079f4d5549..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_photo_blur_off.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_photo_blur_radial.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_photo_blur_radial.png deleted file mode 100644 index 9840a28102..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_photo_blur_radial.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_photo_shadow.9.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_photo_shadow.9.png deleted file mode 100755 index 1d68db4d82..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_photo_shadow.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_premium_lock.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_premium_lock.png deleted file mode 100644 index e2f1170712..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_premium_lock.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_premium_translate.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_premium_translate.png new file mode 100644 index 0000000000..6618705e20 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_premium_translate.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_schedule.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_schedule.png deleted file mode 100644 index b6f32af1db..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_schedule.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_smile_status.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_smile_status.png new file mode 100644 index 0000000000..132a443bb7 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_smile_status.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_speed_0_5.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_speed_0_5.png deleted file mode 100644 index 7e53ce2209..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_speed_0_5.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_speed_1.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_speed_1.png deleted file mode 100644 index dd54678bdc..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_speed_1.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_speed_1_5.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_speed_1_5.png deleted file mode 100644 index e0e69fa2b7..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_speed_1_5.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_speed_2.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_speed_2.png deleted file mode 100644 index e9a59f8276..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_speed_2.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_speed_fast.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_speed_fast.png new file mode 100644 index 0000000000..dfecf295ba Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_speed_fast.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_speed_medium.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_speed_medium.png new file mode 100644 index 0000000000..08881888e4 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_speed_medium.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_speed_normal.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_speed_normal.png new file mode 100644 index 0000000000..6f513e3c9a Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_speed_normal.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_speed_slow.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_speed_slow.png new file mode 100644 index 0000000000..5d13be8bfa Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_speed_slow.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_speed_superfast.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_speed_superfast.png new file mode 100644 index 0000000000..c528be9472 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_speed_superfast.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_speed_veryfast.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_speed_veryfast.png new file mode 100644 index 0000000000..6c840cf2d0 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_speed_veryfast.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_status_edit.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_status_edit.png deleted file mode 100644 index 95fb5fc288..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_status_edit.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_status_set.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_status_set.png deleted file mode 100644 index fbd282152d..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_status_set.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_storage_path.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_storage_path.png deleted file mode 100644 index 1b7d20a927..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_storage_path.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_storage_usage.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_storage_usage.png deleted file mode 100644 index 8fac94c062..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_storage_usage.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_timer.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_timer.png deleted file mode 100644 index 14b0682aad..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_timer.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_timer_1h.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_timer_1h.png deleted file mode 100644 index 8325c20bed..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_timer_1h.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_timeredit.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_timeredit.png deleted file mode 100644 index 61e3b84b57..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_timeredit.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/permissions_contacts.png b/TMessagesProj/src/main/res/drawable-xxhdpi/permissions_contacts.png deleted file mode 100755 index 8202daa098..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/permissions_contacts.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/photo_w.png b/TMessagesProj/src/main/res/drawable-xxhdpi/photo_w.png deleted file mode 100755 index 6369846075..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/photo_w.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/photocheck.png b/TMessagesProj/src/main/res/drawable-xxhdpi/photocheck.png deleted file mode 100755 index 9aba64fe05..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/photocheck.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/phototime.9.png b/TMessagesProj/src/main/res/drawable-xxhdpi/phototime.9.png deleted file mode 100644 index e493f511e8..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/phototime.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/play_big.png b/TMessagesProj/src/main/res/drawable-xxhdpi/play_big.png deleted file mode 100755 index e4d1f1c88a..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/play_big.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/play_roundvideo.png b/TMessagesProj/src/main/res/drawable-xxhdpi/play_roundvideo.png deleted file mode 100644 index 5cdd11005d..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/play_roundvideo.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/player_new_next.png b/TMessagesProj/src/main/res/drawable-xxhdpi/player_new_next.png deleted file mode 100644 index 92fe0b3738..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/player_new_next.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/player_new_pause.png b/TMessagesProj/src/main/res/drawable-xxhdpi/player_new_pause.png deleted file mode 100644 index 32d9a3a94e..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/player_new_pause.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/player_new_play.png b/TMessagesProj/src/main/res/drawable-xxhdpi/player_new_play.png deleted file mode 100644 index 89ad196321..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/player_new_play.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/player_new_previous.png b/TMessagesProj/src/main/res/drawable-xxhdpi/player_new_previous.png deleted file mode 100644 index 3a68aad164..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/player_new_previous.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/plus.png b/TMessagesProj/src/main/res/drawable-xxhdpi/plus.png deleted file mode 100755 index c033b7a603..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/plus.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/popup_fixed.9.png b/TMessagesProj/src/main/res/drawable-xxhdpi/popup_fixed.9.png deleted file mode 100755 index 64a91e6500..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/popup_fixed.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/popup_fixed_top.9.png b/TMessagesProj/src/main/res/drawable-xxhdpi/popup_fixed_top.9.png deleted file mode 100755 index eb71ef90d6..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/popup_fixed_top.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/regbtn2.9.png b/TMessagesProj/src/main/res/drawable-xxhdpi/regbtn2.9.png deleted file mode 100644 index b8c21d82a1..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/regbtn2.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/regbtn2_pressed.9.png b/TMessagesProj/src/main/res/drawable-xxhdpi/regbtn2_pressed.9.png deleted file mode 100644 index 4ee56e4615..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/regbtn2_pressed.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/replyvideo.png b/TMessagesProj/src/main/res/drawable-xxhdpi/replyvideo.png deleted file mode 100644 index a1374752ad..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/replyvideo.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/s_pause.png b/TMessagesProj/src/main/res/drawable-xxhdpi/s_pause.png deleted file mode 100644 index 5ff186078f..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/s_pause.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/s_play.png b/TMessagesProj/src/main/res/drawable-xxhdpi/s_play.png deleted file mode 100644 index 422195a98a..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/s_play.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/scroll_tip.png b/TMessagesProj/src/main/res/drawable-xxhdpi/scroll_tip.png deleted file mode 100755 index 354efbee9a..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/scroll_tip.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/search_arrow2.png b/TMessagesProj/src/main/res/drawable-xxhdpi/search_arrow2.png deleted file mode 100644 index 7a946c6221..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/search_arrow2.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/settings_noresults.png b/TMessagesProj/src/main/res/drawable-xxhdpi/settings_noresults.png deleted file mode 100644 index d7a43651a2..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/settings_noresults.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/sheet_shadow.9.png b/TMessagesProj/src/main/res/drawable-xxhdpi/sheet_shadow.9.png deleted file mode 100644 index ef680ecb60..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/sheet_shadow.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/slidearrow.png b/TMessagesProj/src/main/res/drawable-xxhdpi/slidearrow.png deleted file mode 100755 index 51969e5508..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/slidearrow.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/stickers_trending1.png b/TMessagesProj/src/main/res/drawable-xxhdpi/stickers_trending1.png deleted file mode 100644 index edf62fb045..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/stickers_trending1.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/stickers_trending2.png b/TMessagesProj/src/main/res/drawable-xxhdpi/stickers_trending2.png deleted file mode 100644 index 0805e04e63..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/stickers_trending2.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/stickers_trending3.png b/TMessagesProj/src/main/res/drawable-xxhdpi/stickers_trending3.png deleted file mode 100644 index f67f64dab4..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/stickers_trending3.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/switch_to_on1.9.png b/TMessagesProj/src/main/res/drawable-xxhdpi/switch_to_on1.9.png deleted file mode 100644 index da2e30b181..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/switch_to_on1.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/switch_to_on2.9.png b/TMessagesProj/src/main/res/drawable-xxhdpi/switch_to_on2.9.png deleted file mode 100644 index da2e30b181..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/switch_to_on2.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/switch_track.9.png b/TMessagesProj/src/main/res/drawable-xxhdpi/switch_track.9.png deleted file mode 100644 index e76f3c8220..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/switch_track.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/temp_starlarge.png b/TMessagesProj/src/main/res/drawable-xxhdpi/temp_starlarge.png deleted file mode 100644 index 428b529a73..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/temp_starlarge.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/video_pause1.png b/TMessagesProj/src/main/res/drawable-xxhdpi/video_pause1.png deleted file mode 100644 index 36dd02edf5..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/video_pause1.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/video_play1.png b/TMessagesProj/src/main/res/drawable-xxhdpi/video_play1.png deleted file mode 100644 index 4e226fe7f8..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/video_play1.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/voice_mini_1_0.png b/TMessagesProj/src/main/res/drawable-xxhdpi/voice_mini_1_0.png new file mode 100644 index 0000000000..bae7846a92 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/voice_mini_1_0.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/voice_mini_1_2.png b/TMessagesProj/src/main/res/drawable-xxhdpi/voice_mini_1_2.png new file mode 100644 index 0000000000..390f93ebcf Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/voice_mini_1_2.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/voice_mini_1_7.png b/TMessagesProj/src/main/res/drawable-xxhdpi/voice_mini_1_7.png new file mode 100644 index 0000000000..7782ff9a3b Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/voice_mini_1_7.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/voice_volume_muted.png b/TMessagesProj/src/main/res/drawable-xxhdpi/voice_volume_muted.png deleted file mode 100644 index 7bb1da0e72..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/voice_volume_muted.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/voice_volume_speaker.png b/TMessagesProj/src/main/res/drawable-xxhdpi/voice_volume_speaker.png deleted file mode 100644 index 10dc7297ff..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/voice_volume_speaker.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/wall_selection.9.png b/TMessagesProj/src/main/res/drawable-xxhdpi/wall_selection.9.png deleted file mode 100644 index 475ee47fce..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/wall_selection.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/widget_check.png b/TMessagesProj/src/main/res/drawable-xxhdpi/widget_check.png deleted file mode 100644 index e778c885d5..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/widget_check.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/widget_clock.png b/TMessagesProj/src/main/res/drawable-xxhdpi/widget_clock.png deleted file mode 100644 index 1e1fe90398..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/widget_clock.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/widget_halfcheck.png b/TMessagesProj/src/main/res/drawable-xxhdpi/widget_halfcheck.png deleted file mode 100644 index 886c984114..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/widget_halfcheck.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/widget_muted.png b/TMessagesProj/src/main/res/drawable-xxhdpi/widget_muted.png deleted file mode 100644 index 3e3c394b86..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/widget_muted.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/widget_online.png b/TMessagesProj/src/main/res/drawable-xxhdpi/widget_online.png deleted file mode 100644 index b2d8a7a61f..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/widget_online.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/widget_pin.png b/TMessagesProj/src/main/res/drawable-xxhdpi/widget_pin.png deleted file mode 100644 index 7aeacae528..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/widget_pin.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/widget_verified.png b/TMessagesProj/src/main/res/drawable-xxhdpi/widget_verified.png deleted file mode 100644 index 46a18fad7a..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/widget_verified.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/widgets_dark_badgebg.9.png b/TMessagesProj/src/main/res/drawable-xxhdpi/widgets_dark_badgebg.9.png deleted file mode 100644 index 7f38c9e386..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/widgets_dark_badgebg.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/widgets_dark_plus.png b/TMessagesProj/src/main/res/drawable-xxhdpi/widgets_dark_plus.png deleted file mode 100644 index e62330a344..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/widgets_dark_plus.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/widgets_light_plus.png b/TMessagesProj/src/main/res/drawable-xxhdpi/widgets_light_plus.png deleted file mode 100644 index dea680d028..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/widgets_light_plus.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxxhdpi/sheet_shadow.9.png b/TMessagesProj/src/main/res/drawable-xxxhdpi/sheet_shadow.9.png deleted file mode 100644 index 66300db846..0000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxxhdpi/sheet_shadow.9.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable/catstile.jpg b/TMessagesProj/src/main/res/drawable/catstile.jpg deleted file mode 100644 index c42d45d3e9..0000000000 Binary files a/TMessagesProj/src/main/res/drawable/catstile.jpg and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable/switch_thumb.xml b/TMessagesProj/src/main/res/drawable/switch_thumb.xml deleted file mode 100644 index 3d6207ceb8..0000000000 --- a/TMessagesProj/src/main/res/drawable/switch_thumb.xml +++ /dev/null @@ -1,5 +0,0 @@ - - - - - \ No newline at end of file diff --git a/TMessagesProj/src/main/res/raw/auto_night_off.json b/TMessagesProj/src/main/res/raw/auto_night_off.json new file mode 100644 index 0000000000..a729365e20 --- /dev/null +++ b/TMessagesProj/src/main/res/raw/auto_night_off.json @@ -0,0 +1 @@ +{"v":"5.10.1","fr":60,"ip":0,"op":180,"w":512,"h":512,"nm":"Comp 1","ddd":0,"assets":[],"layers":[{"ddd":0,"ind":1,"ty":4,"nm":"Exclude","sr":1,"ks":{"o":{"a":1,"k":[{"i":{"x":[0.4],"y":[1]},"o":{"x":[0.6],"y":[0]},"t":10,"s":[0]},{"i":{"x":[0.2],"y":[1]},"o":{"x":[0.2],"y":[0]},"t":41,"s":[33]},{"t":100,"s":[0]}],"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[256,256,0],"ix":2,"l":2},"a":{"a":0,"k":[0,0,0],"ix":1,"l":2},"s":{"a":0,"k":[100,100,100],"ix":6,"l":2}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,-141.385],[141.385,0],[0,141.385],[-141.385,0]],"o":[[0,141.385],[-141.385,0],[0,-141.385],[141.385,0]],"v":[[256,0],[0,256],[-256,0],[0,-256]],"c":true},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ind":1,"ty":"sh","ix":2,"ks":{"a":0,"k":{"i":[[0,-88.366],[88.366,0],[0,88.366],[-88.366,0]],"o":[[0,88.366],[-88.366,0],[0,-88.366],[88.366,0]],"v":[[160,0],[0,160],[-160,0],[0,-160]],"c":true},"ix":2},"nm":"Path 2","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"gf","o":{"a":0,"k":100,"ix":10},"r":1,"bm":0,"g":{"p":3,"k":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":10,"s":[0,1,1,1,0.5,1,1,1,1,1,1,1,0,0,0,0,0,0,0.31,0,0.62,0,0.623,0.5,0.625,1,0.633,0.5,0.64,0]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":41,"s":[0,1,1,1,0.5,1,1,1,1,1,1,1,0,0,0,0,0,0,0.31,0,0.62,0,0.623,0.5,0.625,1,0.812,0.5,1,0]},{"t":100,"s":[0,1,1,1,0.5,1,1,1,1,1,1,1,0,0,0,0,0,0,0.31,0,0.62,0,0.623,0.5,0.625,1,0.633,0.5,0.64,0]}],"ix":9}},"s":{"a":0,"k":[0,0],"ix":5},"e":{"a":0,"k":[256,0],"ix":6},"t":2,"h":{"a":0,"k":0,"ix":7},"a":{"a":0,"k":0,"ix":8},"nm":"Gradient Fill 1","mn":"ADBE Vector Graphic - G-Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Exclude","np":3,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":180,"st":0,"ct":1,"bm":0},{"ddd":0,"ind":2,"ty":4,"nm":"Ellipse 16","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[256,256,0],"ix":2,"l":2},"a":{"a":0,"k":[0,0,0],"ix":1,"l":2},"s":{"a":0,"k":[100,100,100],"ix":6,"l":2}},"ao":0,"hasMask":true,"masksProperties":[{"inv":false,"mode":"i","pt":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":40,"s":[{"i":[[-44.944,0],[-28.764,-28.764],[0,-44.944],[28.764,-28.764],[44.944,0],[28.764,28.764],[0,44.944],[-28.764,28.764]],"o":[[44.944,0],[28.764,28.764],[0,44.944],[-28.764,28.764],[-44.944,0],[-28.764,-28.764],[0,-44.944],[28.764,-28.764]],"v":[[1.078,-161.25],[114.786,-114.958],[161.078,-1.25],[114.786,112.458],[1.078,158.75],[-112.63,112.458],[-158.922,-1.25],[-112.63,-114.958]],"c":true}]},{"t":41,"s":[{"i":[[-15.747,6.133],[3.233,-8.208],[67.677,0],[0,88.366],[-59.371,23.385],[3.202,-8.22],[0,-17.917],[-77.32,0]],"o":[[8.22,-3.202],[-23.385,59.371],[-88.366,0],[0,-67.677],[8.208,-3.233],[-6.133,15.747],[0,77.32],[17.917,0]],"v":[[134.131,46.796],[148.262,58.906],[-0.663,160.296],[-160.663,0.296],[-59.273,-148.629],[-47.164,-134.499],[-56.663,-83.704],[83.337,56.296]],"c":true}]}],"ix":1},"o":{"a":0,"k":100,"ix":3},"x":{"a":0,"k":0,"ix":4},"nm":"Mask 1"}],"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":10,"s":[{"i":[[-78.652,0],[0,-78.652],[78.652,0],[0,78.652]],"o":[[78.652,0],[0,78.652],[-78.652,0],[0,-78.652]],"v":[[-216,76],[-76,216],[-216,356],[-356,216]],"c":true}]},{"i":{"x":0.2,"y":1},"o":{"x":0.167,"y":0.167},"t":40,"s":[{"i":[[-90.965,0],[0,-90.965],[90.965,0],[0,90.965]],"o":[[90.965,0],[0,90.965],[-90.965,0],[0,-90.965]],"v":[[-1.083,-160.834],[160.834,1.083],[-1.083,163],[-163,1.083]],"c":true}]},{"t":100,"s":[{"i":[[-78.652,0],[0,-78.652],[78.652,0],[0,78.652]],"o":[[78.652,0],[0,78.652],[-78.652,0],[0,-78.652]],"v":[[84,-224],[224,-84],[84,56],[-56,-84]],"c":true}]}],"ix":2},"nm":"Path 2","mn":"ADBE Vector Shape - Group","hd":false},{"ind":1,"ty":"sh","ix":2,"ks":{"a":0,"k":{"i":[[-89.888,0],[0,-89.888],[89.888,0],[0,89.888]],"o":[[89.888,0],[0,89.888],[-89.888,0],[0,-89.888]],"v":[[0,-160],[160,0],[0,160],[-160,0]],"c":true},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":2,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Ellipse 16","np":3,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":180,"st":0,"ct":1,"bm":0}],"markers":[]} \ No newline at end of file diff --git a/TMessagesProj/src/main/res/raw/msg_translate.json b/TMessagesProj/src/main/res/raw/msg_translate.json new file mode 100644 index 0000000000..f6be1fc847 --- /dev/null +++ b/TMessagesProj/src/main/res/raw/msg_translate.json @@ -0,0 +1 @@ +{"v":"5.10.1","fr":60,"ip":0,"op":180,"w":512,"h":512,"nm":"Comp 1","ddd":0,"assets":[],"layers":[{"ddd":0,"ind":1,"ty":4,"nm":"ee Outlines","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.833],"y":[0.693]},"o":{"x":[0.167],"y":[0.167]},"t":0,"s":[-45]},{"i":{"x":[0.833],"y":[0.86]},"o":{"x":[0.167],"y":[0.114]},"t":1,"s":[-43.325]},{"i":{"x":[0.833],"y":[0.721]},"o":{"x":[0.167],"y":[0.206]},"t":2,"s":[-38.834]},{"i":{"x":[0.833],"y":[0.824]},"o":{"x":[0.167],"y":[0.119]},"t":3,"s":[-35.792]},{"i":{"x":[0.833],"y":[0.833]},"o":{"x":[0.167],"y":[0.159]},"t":4,"s":[-28.638]},{"i":{"x":[0.833],"y":[0.877]},"o":{"x":[0.167],"y":[0.166]},"t":5,"s":[-20.719]},{"i":{"x":[0.833],"y":[0.764]},"o":{"x":[0.167],"y":[0.257]},"t":6,"s":[-12.743]},{"i":{"x":[0.833],"y":[0.88]},"o":{"x":[0.167],"y":[0.129]},"t":7,"s":[-8.922]},{"i":{"x":[0.833],"y":[0.776]},"o":{"x":[0.167],"y":[0.272]},"t":8,"s":[-1.912]},{"i":{"x":[0.833],"y":[0.856]},"o":{"x":[0.167],"y":[0.133]},"t":9,"s":[1.185]},{"i":{"x":[0.833],"y":[0.887]},"o":{"x":[0.167],"y":[0.197]},"t":10,"s":[6.404]},{"i":{"x":[0.833],"y":[0.813]},"o":{"x":[0.167],"y":[0.316]},"t":11,"s":[10.216]},{"i":{"x":[0.833],"y":[0.895]},"o":{"x":[0.167],"y":[0.15]},"t":12,"s":[11.584]},{"i":{"x":[0.833],"y":[0.955]},"o":{"x":[0.167],"y":[0.409]},"t":13,"s":[13.283]},{"i":{"x":[0.833],"y":[0.461]},"o":{"x":[0.167],"y":[-0.1]},"t":14,"s":[13.717]},{"i":{"x":[0.833],"y":[0.779]},"o":{"x":[0.167],"y":[0.099]},"t":15,"s":[13.52]},{"i":{"x":[0.833],"y":[0.866]},"o":{"x":[0.167],"y":[0.134]},"t":16,"s":[12.442]},{"i":{"x":[0.833],"y":[0.734]},"o":{"x":[0.167],"y":[0.222]},"t":17,"s":[10.663]},{"i":{"x":[0.833],"y":[0.83]},"o":{"x":[0.167],"y":[0.121]},"t":18,"s":[9.591]},{"i":{"x":[0.833],"y":[0.876]},"o":{"x":[0.167],"y":[0.163]},"t":19,"s":[7.236]},{"i":{"x":[0.833],"y":[0.761]},"o":{"x":[0.167],"y":[0.254]},"t":20,"s":[4.787]},{"i":{"x":[0.833],"y":[0.845]},"o":{"x":[0.167],"y":[0.128]},"t":21,"s":[3.589]},{"i":{"x":[0.833],"y":[0.881]},"o":{"x":[0.167],"y":[0.18]},"t":22,"s":[1.348]},{"i":{"x":[0.833],"y":[0.782]},"o":{"x":[0.167],"y":[0.279]},"t":23,"s":[-0.584]},{"i":{"x":[0.833],"y":[0.861]},"o":{"x":[0.167],"y":[0.135]},"t":24,"s":[-1.406]},{"i":{"x":[0.833],"y":[0.89]},"o":{"x":[0.167],"y":[0.208]},"t":25,"s":[-2.732]},{"i":{"x":[0.833],"y":[0.837]},"o":{"x":[0.167],"y":[0.344]},"t":26,"s":[-3.621]},{"i":{"x":[0.833],"y":[0.941]},"o":{"x":[0.167],"y":[0.171]},"t":27,"s":[-3.905]},{"i":{"x":[0.833],"y":[0.759]},"o":{"x":[0.167],"y":[-0.201]},"t":28,"s":[-4.176]},{"i":{"x":[0.833],"y":[0.654]},"o":{"x":[0.167],"y":[0.127]},"t":29,"s":[-4.097]},{"i":{"x":[0.833],"y":[0.804]},"o":{"x":[0.167],"y":[0.11]},"t":30,"s":[-3.947]},{"i":{"x":[0.833],"y":[0.87]},"o":{"x":[0.167],"y":[0.145]},"t":31,"s":[-3.475]},{"i":{"x":[0.833],"y":[0.743]},"o":{"x":[0.167],"y":[0.232]},"t":32,"s":[-2.839]},{"i":{"x":[0.833],"y":[0.834]},"o":{"x":[0.167],"y":[0.123]},"t":33,"s":[-2.483]},{"i":{"x":[0.833],"y":[0.877]},"o":{"x":[0.167],"y":[0.168]},"t":34,"s":[-1.739]},{"i":{"x":[0.833],"y":[0.766]},"o":{"x":[0.167],"y":[0.26]},"t":35,"s":[-1.005]},{"i":{"x":[0.833],"y":[0.848]},"o":{"x":[0.167],"y":[0.129]},"t":36,"s":[-0.658]},{"i":{"x":[0.833],"y":[0.883]},"o":{"x":[0.167],"y":[0.185]},"t":37,"s":[-0.029]},{"i":{"x":[0.833],"y":[0.79]},"o":{"x":[0.167],"y":[0.288]},"t":38,"s":[0.487]},{"i":{"x":[0.833],"y":[0.867]},"o":{"x":[0.167],"y":[0.138]},"t":39,"s":[0.698]},{"i":{"x":[0.833],"y":[0.895]},"o":{"x":[0.167],"y":[0.225]},"t":40,"s":[1.019]},{"i":{"x":[0.833],"y":[0.893]},"o":{"x":[0.167],"y":[0.41]},"t":41,"s":[1.208]},{"i":{"x":[0.833],"y":[1.414]},"o":{"x":[0.167],"y":[0.382]},"t":42,"s":[1.256]},{"i":{"x":[0.833],"y":[0.846]},"o":{"x":[0.167],"y":[0.069]},"t":43,"s":[1.27]},{"i":{"x":[0.833],"y":[0.7]},"o":{"x":[0.167],"y":[0.181]},"t":44,"s":[1.189]},{"i":{"x":[0.833],"y":[0.817]},"o":{"x":[0.167],"y":[0.115]},"t":45,"s":[1.121]},{"i":{"x":[0.833],"y":[0.872]},"o":{"x":[0.167],"y":[0.153]},"t":46,"s":[0.943]},{"i":{"x":[0.833],"y":[0.75]},"o":{"x":[0.167],"y":[0.241]},"t":47,"s":[0.729]},{"i":{"x":[0.833],"y":[0.838]},"o":{"x":[0.167],"y":[0.125]},"t":48,"s":[0.616]},{"i":{"x":[0.833],"y":[0.879]},"o":{"x":[0.167],"y":[0.172]},"t":49,"s":[0.389]},{"i":{"x":[0.833],"y":[0.771]},"o":{"x":[0.167],"y":[0.266]},"t":50,"s":[0.175]},{"i":{"x":[0.833],"y":[0.852]},"o":{"x":[0.167],"y":[0.131]},"t":51,"s":[0.077]},{"i":{"x":[0.833],"y":[0.884]},"o":{"x":[0.167],"y":[0.19]},"t":52,"s":[-0.094]},{"i":{"x":[0.833],"y":[0.799]},"o":{"x":[0.167],"y":[0.299]},"t":53,"s":[-0.227]},{"i":{"x":[0.833],"y":[0.878]},"o":{"x":[0.167],"y":[0.142]},"t":54,"s":[-0.279]},{"i":{"x":[0.833],"y":[0.907]},"o":{"x":[0.167],"y":[0.261]},"t":55,"s":[-0.351]},{"i":{"x":[0.833],"y":[1.224]},"o":{"x":[0.167],"y":[0.8]},"t":56,"s":[-0.386]},{"i":{"x":[0.833],"y":[0.694]},"o":{"x":[0.167],"y":[0.061]},"t":57,"s":[-0.389]},{"i":{"x":[0.833],"y":[0.86]},"o":{"x":[0.167],"y":[0.115]},"t":58,"s":[-0.375]},{"i":{"x":[0.833],"y":[0.721]},"o":{"x":[0.167],"y":[0.206]},"t":59,"s":[-0.336]},{"i":{"x":[0.833],"y":[0.824]},"o":{"x":[0.167],"y":[0.119]},"t":60,"s":[-0.31]},{"i":{"x":[0.833],"y":[0.874]},"o":{"x":[0.167],"y":[0.159]},"t":61,"s":[-0.248]},{"i":{"x":[0.833],"y":[0.756]},"o":{"x":[0.167],"y":[0.248]},"t":62,"s":[-0.179]},{"i":{"x":[0.833],"y":[0.842]},"o":{"x":[0.167],"y":[0.126]},"t":63,"s":[-0.144]},{"i":{"x":[0.833],"y":[0.88]},"o":{"x":[0.167],"y":[0.176]},"t":64,"s":[-0.077]},{"i":{"x":[0.833],"y":[0.776]},"o":{"x":[0.167],"y":[0.272]},"t":65,"s":[-0.016]},{"i":{"x":[0.833],"y":[0.856]},"o":{"x":[0.167],"y":[0.133]},"t":66,"s":[0.01]},{"i":{"x":[0.833],"y":[0.887]},"o":{"x":[0.167],"y":[0.197]},"t":67,"s":[0.056]},{"i":{"x":[0.833],"y":[0.813]},"o":{"x":[0.167],"y":[0.316]},"t":68,"s":[0.089]},{"i":{"x":[0.833],"y":[0.896]},"o":{"x":[0.167],"y":[0.15]},"t":69,"s":[0.1]},{"i":{"x":[0.833],"y":[0.955]},"o":{"x":[0.167],"y":[0.412]},"t":70,"s":[0.115]},{"i":{"x":[0.833],"y":[0.464]},"o":{"x":[0.167],"y":[-0.097]},"t":71,"s":[0.119]},{"i":{"x":[0.833],"y":[0.779]},"o":{"x":[0.167],"y":[0.099]},"t":72,"s":[0.117]},{"i":{"x":[0.833],"y":[0.866]},"o":{"x":[0.167],"y":[0.134]},"t":73,"s":[0.108]},{"i":{"x":[0.833],"y":[0.734]},"o":{"x":[0.167],"y":[0.222]},"t":74,"s":[0.092]},{"i":{"x":[0.833],"y":[0.83]},"o":{"x":[0.167],"y":[0.121]},"t":75,"s":[0.083]},{"i":{"x":[0.833],"y":[0.876]},"o":{"x":[0.167],"y":[0.163]},"t":76,"s":[0.063]},{"i":{"x":[0.833],"y":[0.761]},"o":{"x":[0.167],"y":[0.254]},"t":77,"s":[0.041]},{"i":{"x":[0.833],"y":[0.845]},"o":{"x":[0.167],"y":[0.128]},"t":78,"s":[0.031]},{"i":{"x":[0.833],"y":[0.881]},"o":{"x":[0.167],"y":[0.18]},"t":79,"s":[0.012]},{"i":{"x":[0.833],"y":[0.782]},"o":{"x":[0.167],"y":[0.279]},"t":80,"s":[-0.005]},{"i":{"x":[0.833],"y":[0.861]},"o":{"x":[0.167],"y":[0.135]},"t":81,"s":[-0.012]},{"i":{"x":[0.833],"y":[0.89]},"o":{"x":[0.167],"y":[0.208]},"t":82,"s":[-0.024]},{"i":{"x":[0.833],"y":[0.837]},"o":{"x":[0.167],"y":[0.344]},"t":83,"s":[-0.031]},{"i":{"x":[0.833],"y":[0.942]},"o":{"x":[0.167],"y":[0.171]},"t":84,"s":[-0.034]},{"i":{"x":[0.833],"y":[0.761]},"o":{"x":[0.167],"y":[-0.196]},"t":85,"s":[-0.036]},{"i":{"x":[0.833],"y":[0.654]},"o":{"x":[0.167],"y":[0.128]},"t":86,"s":[-0.035]},{"i":{"x":[0.833],"y":[0.804]},"o":{"x":[0.167],"y":[0.11]},"t":87,"s":[-0.034]},{"i":{"x":[0.833],"y":[0.87]},"o":{"x":[0.167],"y":[0.145]},"t":88,"s":[-0.03]},{"i":{"x":[0.833],"y":[0.743]},"o":{"x":[0.167],"y":[0.232]},"t":89,"s":[-0.025]},{"i":{"x":[0.833],"y":[0.834]},"o":{"x":[0.167],"y":[0.123]},"t":90,"s":[-0.021]},{"i":{"x":[0.833],"y":[0.877]},"o":{"x":[0.167],"y":[0.168]},"t":91,"s":[-0.015]},{"i":{"x":[0.833],"y":[0.766]},"o":{"x":[0.167],"y":[0.26]},"t":92,"s":[-0.009]},{"i":{"x":[0.833],"y":[0.848]},"o":{"x":[0.167],"y":[0.129]},"t":93,"s":[-0.006]},{"i":{"x":[0.833],"y":[0.883]},"o":{"x":[0.167],"y":[0.185]},"t":94,"s":[0]},{"i":{"x":[0.833],"y":[0.79]},"o":{"x":[0.167],"y":[0.288]},"t":95,"s":[0.004]},{"i":{"x":[0.833],"y":[0.868]},"o":{"x":[0.167],"y":[0.138]},"t":96,"s":[0.006]},{"i":{"x":[0.833],"y":[0.895]},"o":{"x":[0.167],"y":[0.225]},"t":97,"s":[0.009]},{"i":{"x":[0.833],"y":[0.894]},"o":{"x":[0.167],"y":[0.411]},"t":98,"s":[0.01]},{"i":{"x":[0.833],"y":[1.43]},"o":{"x":[0.167],"y":[0.389]},"t":99,"s":[0.011]},{"i":{"x":[0.833],"y":[0.846]},"o":{"x":[0.167],"y":[0.07]},"t":100,"s":[0.011]},{"i":{"x":[0.833],"y":[0.7]},"o":{"x":[0.167],"y":[0.182]},"t":101,"s":[0.01]},{"i":{"x":[0.833],"y":[0.817]},"o":{"x":[0.167],"y":[0.115]},"t":102,"s":[0.01]},{"i":{"x":[0.833],"y":[0.873]},"o":{"x":[0.167],"y":[0.153]},"t":103,"s":[0.008]},{"i":{"x":[0.833],"y":[0.75]},"o":{"x":[0.167],"y":[0.241]},"t":104,"s":[0.006]},{"i":{"x":[0.833],"y":[0.838]},"o":{"x":[0.167],"y":[0.125]},"t":105,"s":[0.005]},{"i":{"x":[0.833],"y":[0.879]},"o":{"x":[0.167],"y":[0.172]},"t":106,"s":[0.003]},{"i":{"x":[0.833],"y":[0.771]},"o":{"x":[0.167],"y":[0.266]},"t":107,"s":[0.002]},{"i":{"x":[0.833],"y":[0.852]},"o":{"x":[0.167],"y":[0.131]},"t":108,"s":[0.001]},{"i":{"x":[0.833],"y":[0.884]},"o":{"x":[0.167],"y":[0.19]},"t":109,"s":[-0.001]},{"i":{"x":[0.833],"y":[0.799]},"o":{"x":[0.167],"y":[0.299]},"t":110,"s":[-0.002]},{"i":{"x":[0.833],"y":[0.878]},"o":{"x":[0.167],"y":[0.142]},"t":111,"s":[-0.002]},{"i":{"x":[0.833],"y":[0.907]},"o":{"x":[0.167],"y":[0.261]},"t":112,"s":[-0.003]},{"i":{"x":[0.833],"y":[1.23]},"o":{"x":[0.167],"y":[0.808]},"t":113,"s":[-0.003]},{"i":{"x":[0.833],"y":[0.695]},"o":{"x":[0.167],"y":[0.061]},"t":114,"s":[-0.003]},{"i":{"x":[0.833],"y":[0.86]},"o":{"x":[0.167],"y":[0.115]},"t":115,"s":[-0.003]},{"i":{"x":[0.833],"y":[0.721]},"o":{"x":[0.167],"y":[0.207]},"t":116,"s":[-0.003]},{"i":{"x":[0.833],"y":[0.824]},"o":{"x":[0.167],"y":[0.119]},"t":117,"s":[-0.003]},{"i":{"x":[0.833],"y":[0.874]},"o":{"x":[0.167],"y":[0.159]},"t":118,"s":[-0.002]},{"i":{"x":[0.833],"y":[0.756]},"o":{"x":[0.167],"y":[0.248]},"t":119,"s":[-0.002]},{"i":{"x":[0.833],"y":[0.842]},"o":{"x":[0.167],"y":[0.126]},"t":120,"s":[-0.001]},{"i":{"x":[0.833],"y":[0.88]},"o":{"x":[0.167],"y":[0.176]},"t":121,"s":[-0.001]},{"i":{"x":[0.833],"y":[0.776]},"o":{"x":[0.167],"y":[0.272]},"t":122,"s":[0]},{"i":{"x":[0.833],"y":[0.856]},"o":{"x":[0.167],"y":[0.133]},"t":123,"s":[0]},{"i":{"x":[0.833],"y":[0.865]},"o":{"x":[0.167],"y":[0.198]},"t":124,"s":[0]},{"i":{"x":[0.833],"y":[0.893]},"o":{"x":[0.167],"y":[0.217]},"t":125,"s":[0.001]},{"i":{"x":[0.833],"y":[0.864]},"o":{"x":[0.167],"y":[0.376]},"t":126,"s":[0.001]},{"i":{"x":[0.833],"y":[1.042]},"o":{"x":[0.167],"y":[0.216]},"t":127,"s":[0.001]},{"i":{"x":[0.833],"y":[0.833]},"o":{"x":[0.167],"y":[0.028]},"t":128,"s":[0.001]},{"i":{"x":[0.833],"y":[0.686]},"o":{"x":[0.167],"y":[0.166]},"t":129,"s":[0.001]},{"i":{"x":[0.833],"y":[0.813]},"o":{"x":[0.167],"y":[0.113]},"t":130,"s":[0.001]},{"i":{"x":[0.833],"y":[0.872]},"o":{"x":[0.167],"y":[0.15]},"t":131,"s":[0.001]},{"i":{"x":[0.833],"y":[0.747]},"o":{"x":[0.167],"y":[0.238]},"t":132,"s":[0.001]},{"i":{"x":[0.833],"y":[0.837]},"o":{"x":[0.167],"y":[0.124]},"t":133,"s":[0.001]},{"i":{"x":[0.833],"y":[0.878]},"o":{"x":[0.167],"y":[0.17]},"t":134,"s":[0]},{"i":{"x":[0.833],"y":[0.769]},"o":{"x":[0.167],"y":[0.263]},"t":135,"s":[0]},{"i":{"x":[0.833],"y":[0.85]},"o":{"x":[0.167],"y":[0.13]},"t":136,"s":[0]},{"i":{"x":[0.833],"y":[0.884]},"o":{"x":[0.167],"y":[0.188]},"t":137,"s":[0]},{"i":{"x":[0.833],"y":[0.795]},"o":{"x":[0.167],"y":[0.294]},"t":138,"s":[0]},{"i":{"x":[0.833],"y":[0.873]},"o":{"x":[0.167],"y":[0.14]},"t":139,"s":[0]},{"i":{"x":[0.833],"y":[0.901]},"o":{"x":[0.167],"y":[0.242]},"t":140,"s":[0]},{"i":{"x":[0.833],"y":[0.995]},"o":{"x":[0.167],"y":[0.531]},"t":141,"s":[0]},{"i":{"x":[0.833],"y":[0.547]},"o":{"x":[0.167],"y":[-0.005]},"t":142,"s":[0]},{"i":{"x":[0.833],"y":[0.856]},"o":{"x":[0.167],"y":[0.102]},"t":143,"s":[0]},{"i":{"x":[0.833],"y":[0.714]},"o":{"x":[0.167],"y":[0.198]},"t":144,"s":[0]},{"i":{"x":[0.833],"y":[0.822]},"o":{"x":[0.167],"y":[0.118]},"t":145,"s":[0]},{"i":{"x":[0.833],"y":[0.874]},"o":{"x":[0.167],"y":[0.156]},"t":146,"s":[0]},{"i":{"x":[0.833],"y":[0.753]},"o":{"x":[0.167],"y":[0.245]},"t":147,"s":[0]},{"i":{"x":[0.833],"y":[0.84]},"o":{"x":[0.167],"y":[0.126]},"t":148,"s":[0]},{"i":{"x":[0.833],"y":[0.879]},"o":{"x":[0.167],"y":[0.174]},"t":149,"s":[0]},{"i":{"x":[0.833],"y":[0.774]},"o":{"x":[0.167],"y":[0.269]},"t":150,"s":[0]},{"i":{"x":[0.833],"y":[0.854]},"o":{"x":[0.167],"y":[0.132]},"t":151,"s":[0]},{"i":{"x":[0.833],"y":[0.886]},"o":{"x":[0.167],"y":[0.194]},"t":152,"s":[0]},{"i":{"x":[0.833],"y":[0.807]},"o":{"x":[0.167],"y":[0.308]},"t":153,"s":[0]},{"i":{"x":[0.833],"y":[0.887]},"o":{"x":[0.167],"y":[0.147]},"t":154,"s":[0]},{"i":{"x":[0.833],"y":[0.925]},"o":{"x":[0.167],"y":[0.317]},"t":155,"s":[0]},{"i":{"x":[0.833],"y":[-0.093]},"o":{"x":[0.167],"y":[-0.755]},"t":156,"s":[0]},{"i":{"x":[0.833],"y":[0.76]},"o":{"x":[0.167],"y":[0.09]},"t":157,"s":[0]},{"i":{"x":[0.833],"y":[0.865]},"o":{"x":[0.167],"y":[0.128]},"t":158,"s":[0]},{"i":{"x":[0.833],"y":[0.729]},"o":{"x":[0.167],"y":[0.216]},"t":159,"s":[0]},{"i":{"x":[0.833],"y":[0.828]},"o":{"x":[0.167],"y":[0.12]},"t":160,"s":[0]},{"i":{"x":[0.833],"y":[0.875]},"o":{"x":[0.167],"y":[0.162]},"t":161,"s":[0]},{"i":{"x":[0.833],"y":[0.759]},"o":{"x":[0.167],"y":[0.251]},"t":162,"s":[0]},{"i":{"x":[0.833],"y":[0.844]},"o":{"x":[0.167],"y":[0.127]},"t":163,"s":[0]},{"i":{"x":[0.833],"y":[0.881]},"o":{"x":[0.167],"y":[0.178]},"t":164,"s":[0]},{"i":{"x":[0.833],"y":[0.78]},"o":{"x":[0.167],"y":[0.276]},"t":165,"s":[0]},{"i":{"x":[0.833],"y":[0.859]},"o":{"x":[0.167],"y":[0.134]},"t":166,"s":[0]},{"i":{"x":[0.833],"y":[0.889]},"o":{"x":[0.167],"y":[0.203]},"t":167,"s":[0]},{"i":{"x":[0.833],"y":[0.826]},"o":{"x":[0.167],"y":[0.331]},"t":168,"s":[0]},{"i":{"x":[0.833],"y":[0.917]},"o":{"x":[0.167],"y":[0.16]},"t":169,"s":[0]},{"i":{"x":[0.833],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":170,"s":[0]},{"i":{"x":[0.833],"y":[0.617]},"o":{"x":[0.167],"y":[0.084]},"t":171,"s":[0]},{"i":{"x":[0.833],"y":[0.797]},"o":{"x":[0.167],"y":[0.107]},"t":172,"s":[0]},{"i":{"x":[0.833],"y":[0.869]},"o":{"x":[0.167],"y":[0.141]},"t":173,"s":[0]},{"i":{"x":[0.833],"y":[0.739]},"o":{"x":[0.167],"y":[0.229]},"t":174,"s":[0]},{"i":{"x":[0.833],"y":[0.833]},"o":{"x":[0.167],"y":[0.123]},"t":175,"s":[0]},{"i":{"x":[0.833],"y":[0.877]},"o":{"x":[0.167],"y":[0.166]},"t":176,"s":[0]},{"i":{"x":[0.833],"y":[0.764]},"o":{"x":[0.167],"y":[0.257]},"t":177,"s":[0]},{"i":{"x":[0.833],"y":[0.833]},"o":{"x":[0.167],"y":[0.129]},"t":178,"s":[0]},{"t":179,"s":[0]}],"ix":10},"p":{"a":0,"k":[256,256,0],"ix":2,"l":2},"a":{"a":0,"k":[45,45,0],"ix":1,"l":2},"s":{"a":1,"k":[{"i":{"x":[0.833,0.833,0.833],"y":[1,1,1]},"o":{"x":[0.167,0.167,0.167],"y":[0,0,0]},"t":0,"s":[500,500,100]},{"i":{"x":[0.833,0.833,0.833],"y":[1,1,1]},"o":{"x":[0.167,0.167,0.167],"y":[0,0,0]},"t":1,"s":[500,500,100]},{"i":{"x":[0.833,0.833,0.833],"y":[1,1,1]},"o":{"x":[0.167,0.167,0.167],"y":[0,0,0]},"t":2,"s":[500,500,100]},{"i":{"x":[0.833,0.833,0.833],"y":[1,1,1]},"o":{"x":[0.167,0.167,0.167],"y":[0,0,0]},"t":3,"s":[500,500,100]},{"i":{"x":[0.833,0.833,0.833],"y":[1,1,1]},"o":{"x":[0.167,0.167,0.167],"y":[0,0,0]},"t":4,"s":[500,500,100]},{"i":{"x":[0.833,0.833,0.833],"y":[1,1,1]},"o":{"x":[0.167,0.167,0.167],"y":[0,0,0]},"t":5,"s":[500,500,100]},{"i":{"x":[0.833,0.833,0.833],"y":[1,1,1]},"o":{"x":[0.167,0.167,0.167],"y":[0,0,0]},"t":6,"s":[500,500,100]},{"i":{"x":[0.833,0.833,0.833],"y":[1,1,1]},"o":{"x":[0.167,0.167,0.167],"y":[0,0,0]},"t":7,"s":[500,500,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.759,0.759,1]},"o":{"x":[0.167,0.167,0.167],"y":[0,0,0]},"t":8,"s":[500,500,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.306,0.306,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.083,0.083,0]},"t":9,"s":[500,500,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.85,0.85,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.095,0.095,0]},"t":10,"s":[502.898,502.898,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.705,0.705,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.188,0.188,0]},"t":11,"s":[524.133,524.133,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.818,0.818,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.116,0.116,0]},"t":12,"s":[541.109,541.109,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.878,0.878,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.154,0.154,0]},"t":13,"s":[584.273,584.273,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.805,0.805,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.261,0.261,0]},"t":14,"s":[635.15,635.15,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.884,0.884,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.145,0.145,0]},"t":15,"s":[658.975,658.975,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.919,0.919,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.298,0.298,0]},"t":16,"s":[690.914,690.914,100]},{"i":{"x":[0.833,0.833,0.833],"y":[-2.003,-2.003,1]},"o":{"x":[0.167,0.167,0.167],"y":[-3.01,-3.01,0]},"t":17,"s":[703.312,703.312,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.75,0.75,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.086,0.086,0]},"t":18,"s":[702.978,702.978,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.864,0.864,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.125,0.125,0]},"t":19,"s":[691.278,691.278,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.727,0.727,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.214,0.214,0]},"t":20,"s":[667.922,667.922,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.827,0.827,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.12,0.12,0]},"t":21,"s":[653.061,653.061,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.875,0.875,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.161,0.161,0]},"t":22,"s":[619.309,619.309,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.758,0.758,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.25,0.25,0]},"t":23,"s":[583.081,583.081,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.843,0.843,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.127,0.127,0]},"t":24,"s":[565.017,565.017,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.88,0.88,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.178,0.178,0]},"t":25,"s":[530.628,530.628,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.779,0.779,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.275,0.275,0]},"t":26,"s":[500.267,500.267,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.858,0.858,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.134,0.134,0]},"t":27,"s":[487.078,487.078,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.888,0.888,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.202,0.202,0]},"t":28,"s":[465.281,465.281,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.823,0.823,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.327,0.327,0]},"t":29,"s":[449.937,449.937,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.91,0.91,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.157,0.157,0]},"t":30,"s":[444.682,444.682,100]},{"i":{"x":[0.833,0.833,0.833],"y":[1.205,1.205,1]},"o":{"x":[0.167,0.167,0.167],"y":[1.193,1.193,0]},"t":31,"s":[438.75,438.75,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.596,0.596,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.059,0.059,0]},"t":32,"s":[438.304,438.304,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.794,0.794,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.105,0.105,0]},"t":33,"s":[439.846,439.846,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.868,0.868,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.14,0.14,0]},"t":34,"s":[445.774,445.774,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.738,0.738,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.227,0.227,0]},"t":35,"s":[454.534,454.534,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.832,0.832,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.122,0.122,0]},"t":36,"s":[459.615,459.615,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.877,0.877,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.166,0.166,0]},"t":37,"s":[470.5,470.5,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.763,0.763,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.257,0.257,0]},"t":38,"s":[481.537,481.537,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.846,0.846,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.129,0.129,0]},"t":39,"s":[486.847,486.847,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.882,0.882,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.182,0.182,0]},"t":40,"s":[496.631,496.631,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.786,0.786,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.283,0.283,0]},"t":41,"s":[504.883,504.883,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.864,0.864,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.136,0.136,0]},"t":42,"s":[508.329,508.329,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.892,0.892,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.214,0.214,0]},"t":43,"s":[513.751,513.751,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.857,0.857,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.367,0.367,0]},"t":44,"s":[517.201,517.201,100]},{"i":{"x":[0.833,0.833,0.833],"y":[1.004,1.004,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.199,0.199,0]},"t":45,"s":[518.214,518.214,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.825,0.825,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.004,0.004,0]},"t":46,"s":[518.946,518.946,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.681,0.681,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.159,0.159,0]},"t":47,"s":[518.18,518.18,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.811,0.811,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.113,0.113,0]},"t":48,"s":[517.336,517.336,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.871,0.871,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.149,0.149,0]},"t":49,"s":[514.946,514.946,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.746,0.746,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.236,0.236,0]},"t":50,"s":[511.912,511.912,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.836,0.836,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.124,0.124,0]},"t":51,"s":[510.26,510.26,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.878,0.878,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.17,0.17,0]},"t":52,"s":[506.879,506.879,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.768,0.768,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.262,0.262,0]},"t":53,"s":[503.615,503.615,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.85,0.85,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.13,0.13,0]},"t":54,"s":[502.096,502.096,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.883,0.883,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.187,0.187,0]},"t":55,"s":[499.39,499.39,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.794,0.794,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.293,0.293,0]},"t":56,"s":[497.218,497.218,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.872,0.872,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.14,0.14,0]},"t":57,"s":[496.354,496.354,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.9,0.9,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.237,0.237,0]},"t":58,"s":[495.078,495.078,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.96,0.96,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.489,0.489,0]},"t":59,"s":[494.388,494.388,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.386,0.386,1]},"o":{"x":[0.167,0.167,0.167],"y":[-0.077,-0.077,0]},"t":60,"s":[494.246,494.246,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.854,0.854,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.096,0.096,0]},"t":61,"s":[494.32,494.32,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.711,0.711,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.195,0.195,0]},"t":62,"s":[494.79,494.79,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.821,0.821,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.117,0.117,0]},"t":63,"s":[495.14,495.14,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.873,0.873,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.156,0.156,0]},"t":64,"s":[496.004,496.004,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.752,0.752,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.244,0.244,0]},"t":65,"s":[497,497,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.84,0.84,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.126,0.126,0]},"t":66,"s":[497.516,497.516,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.879,0.879,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.174,0.174,0]},"t":67,"s":[498.534,498.534,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.773,0.773,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.269,0.269,0]},"t":68,"s":[499.472,499.472,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.854,0.854,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.132,0.132,0]},"t":69,"s":[499.895,499.895,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.885,0.885,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.193,0.193,0]},"t":70,"s":[500.621,500.621,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.805,0.805,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.306,0.306,0]},"t":71,"s":[501.171,501.171,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.884,0.884,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.146,0.146,0]},"t":72,"s":[501.377,501.377,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.919,0.919,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.299,0.299,0]},"t":73,"s":[501.653,501.653,100]},{"i":{"x":[0.833,0.833,0.833],"y":[-1.781,-1.781,1]},"o":{"x":[0.167,0.167,0.167],"y":[-2.749,-2.749,0]},"t":74,"s":[501.76,501.76,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.751,0.751,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.086,0.086,0]},"t":75,"s":[501.757,501.757,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.864,0.864,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.125,0.125,0]},"t":76,"s":[501.655,501.655,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.727,0.727,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.214,0.214,0]},"t":77,"s":[501.452,501.452,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.827,0.827,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.12,0.12,0]},"t":78,"s":[501.324,501.324,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.875,0.875,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.161,0.161,0]},"t":79,"s":[501.031,501.031,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.758,0.758,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.25,0.25,0]},"t":80,"s":[500.718,500.718,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.843,0.843,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.127,0.127,0]},"t":81,"s":[500.562,500.562,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.88,0.88,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.178,0.178,0]},"t":82,"s":[500.264,500.264,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.779,0.779,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.275,0.275,0]},"t":83,"s":[500.001,500.001,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.858,0.858,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.134,0.134,0]},"t":84,"s":[499.887,499.887,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.888,0.888,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.202,0.202,0]},"t":85,"s":[499.699,499.699,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.823,0.823,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.327,0.327,0]},"t":86,"s":[499.566,499.566,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.911,0.911,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.157,0.157,0]},"t":87,"s":[499.521,499.521,100]},{"i":{"x":[0.833,0.833,0.833],"y":[1.219,1.219,1]},"o":{"x":[0.167,0.167,0.167],"y":[1.236,1.236,0]},"t":88,"s":[499.47,499.47,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.597,0.597,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.06,0.06,0]},"t":89,"s":[499.466,499.466,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.794,0.794,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.105,0.105,0]},"t":90,"s":[499.48,499.48,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.868,0.868,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.14,0.14,0]},"t":91,"s":[499.531,499.531,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.738,0.738,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.227,0.227,0]},"t":92,"s":[499.607,499.607,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.832,0.832,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.122,0.122,0]},"t":93,"s":[499.651,499.651,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.877,0.877,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.166,0.166,0]},"t":94,"s":[499.745,499.745,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.763,0.763,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.257,0.257,0]},"t":95,"s":[499.841,499.841,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.846,0.846,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.129,0.129,0]},"t":96,"s":[499.886,499.886,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.882,0.882,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.182,0.182,0]},"t":97,"s":[499.971,499.971,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.786,0.786,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.283,0.283,0]},"t":98,"s":[500.042,500.042,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.864,0.864,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.136,0.136,0]},"t":99,"s":[500.072,500.072,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.892,0.892,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.214,0.214,0]},"t":100,"s":[500.119,500.119,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.857,0.857,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.367,0.367,0]},"t":101,"s":[500.149,500.149,100]},{"i":{"x":[0.833,0.833,0.833],"y":[1.005,1.005,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.199,0.199,0]},"t":102,"s":[500.158,500.158,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.825,0.825,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.005,0.005,0]},"t":103,"s":[500.164,500.164,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.681,0.681,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.159,0.159,0]},"t":104,"s":[500.157,500.157,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.811,0.811,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.113,0.113,0]},"t":105,"s":[500.15,500.15,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.871,0.871,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.149,0.149,0]},"t":106,"s":[500.129,500.129,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.746,0.746,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.236,0.236,0]},"t":107,"s":[500.103,500.103,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.836,0.836,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.124,0.124,0]},"t":108,"s":[500.089,500.089,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.878,0.878,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.17,0.17,0]},"t":109,"s":[500.059,500.059,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.768,0.768,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.262,0.262,0]},"t":110,"s":[500.031,500.031,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.85,0.85,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.13,0.13,0]},"t":111,"s":[500.018,500.018,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.883,0.883,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.187,0.187,0]},"t":112,"s":[499.995,499.995,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.794,0.794,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.293,0.293,0]},"t":113,"s":[499.976,499.976,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.872,0.872,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.14,0.14,0]},"t":114,"s":[499.968,499.968,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.9,0.9,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.238,0.238,0]},"t":115,"s":[499.957,499.957,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.961,0.961,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.49,0.49,0]},"t":116,"s":[499.951,499.951,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.396,0.396,1]},"o":{"x":[0.167,0.167,0.167],"y":[-0.072,-0.072,0]},"t":117,"s":[499.95,499.95,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.855,0.855,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.097,0.097,0]},"t":118,"s":[499.951,499.951,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.711,0.711,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.195,0.195,0]},"t":119,"s":[499.955,499.955,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.821,0.821,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.117,0.117,0]},"t":120,"s":[499.958,499.958,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.873,0.873,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.156,0.156,0]},"t":121,"s":[499.965,499.965,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.753,0.753,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.244,0.244,0]},"t":122,"s":[499.974,499.974,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.84,0.84,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.126,0.126,0]},"t":123,"s":[499.979,499.979,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.845,0.845,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.174,0.174,0]},"t":124,"s":[499.987,499.987,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.881,0.881,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.18,0.18,0]},"t":125,"s":[499.995,499.995,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.783,0.783,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.28,0.28,0]},"t":126,"s":[500.002,500.002,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.861,0.861,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.135,0.135,0]},"t":127,"s":[500.005,500.005,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.89,0.89,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.209,0.209,0]},"t":128,"s":[500.01,500.01,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.84,0.84,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.347,0.347,0]},"t":129,"s":[500.013,500.013,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.948,0.948,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.174,0.174,0]},"t":130,"s":[500.014,500.014,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.779,0.779,1]},"o":{"x":[0.167,0.167,0.167],"y":[-0.139,-0.139,0]},"t":131,"s":[500.015,500.015,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.659,0.659,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.134,0.134,0]},"t":132,"s":[500.015,500.015,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.805,0.805,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.11,0.11,0]},"t":133,"s":[500.014,500.014,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.87,0.87,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.146,0.146,0]},"t":134,"s":[500.013,500.013,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.743,0.743,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.233,0.233,0]},"t":135,"s":[500.01,500.01,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.835,0.835,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.123,0.123,0]},"t":136,"s":[500.009,500.009,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.877,0.877,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.168,0.168,0]},"t":137,"s":[500.006,500.006,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.766,0.766,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.26,0.26,0]},"t":138,"s":[500.004,500.004,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.848,0.848,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.129,0.129,0]},"t":139,"s":[500.002,500.002,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.883,0.883,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.185,0.185,0]},"t":140,"s":[500,500,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.79,0.79,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.289,0.289,0]},"t":141,"s":[499.998,499.998,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.868,0.868,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.138,0.138,0]},"t":142,"s":[499.997,499.997,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.896,0.896,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.226,0.226,0]},"t":143,"s":[499.996,499.996,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.901,0.901,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.42,0.42,0]},"t":144,"s":[499.996,499.996,100]},{"i":{"x":[0.833,0.833,0.833],"y":[1.753,1.753,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.533,0.533,0]},"t":145,"s":[499.995,499.995,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.848,0.848,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.075,0.075,0]},"t":146,"s":[499.995,499.995,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.702,0.702,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.184,0.184,0]},"t":147,"s":[499.996,499.996,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.817,0.817,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.116,0.116,0]},"t":148,"s":[499.996,499.996,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.873,0.873,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.153,0.153,0]},"t":149,"s":[499.997,499.997,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.75,0.75,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.241,0.241,0]},"t":150,"s":[499.997,499.997,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.838,0.838,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.125,0.125,0]},"t":151,"s":[499.998,499.998,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.879,0.879,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.172,0.172,0]},"t":152,"s":[499.999,499.999,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.771,0.771,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.266,0.266,0]},"t":153,"s":[499.999,499.999,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.852,0.852,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.131,0.131,0]},"t":154,"s":[500,500,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.885,0.885,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.191,0.191,0]},"t":155,"s":[500,500,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.8,0.8,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.3,0.3,0]},"t":156,"s":[500.001,500.001,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.879,0.879,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.143,0.143,0]},"t":157,"s":[500.001,500.001,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.908,0.908,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.265,0.265,0]},"t":158,"s":[500.001,500.001,100]},{"i":{"x":[0.833,0.833,0.833],"y":[1.335,1.335,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.932,0.932,0]},"t":159,"s":[500.001,500.001,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.708,0.708,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.067,0.067,0]},"t":160,"s":[500.001,500.001,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.861,0.861,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.117,0.117,0]},"t":161,"s":[500.001,500.001,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.722,0.722,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.208,0.208,0]},"t":162,"s":[500.001,500.001,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.825,0.825,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.119,0.119,0]},"t":163,"s":[500.001,500.001,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.874,0.874,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.159,0.159,0]},"t":164,"s":[500.001,500.001,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.756,0.756,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.248,0.248,0]},"t":165,"s":[500.001,500.001,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.842,0.842,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.127,0.127,0]},"t":166,"s":[500.001,500.001,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.88,0.88,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.176,0.176,0]},"t":167,"s":[500,500,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.777,0.777,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.273,0.273,0]},"t":168,"s":[500,500,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.856,0.856,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.133,0.133,0]},"t":169,"s":[500,500,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.887,0.887,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.198,0.198,0]},"t":170,"s":[500,500,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.815,0.815,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.317,0.317,0]},"t":171,"s":[500,500,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.898,0.898,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.151,0.151,0]},"t":172,"s":[500,500,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.967,0.967,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.447,0.447,0]},"t":173,"s":[500,500,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.499,0.499,1]},"o":{"x":[0.167,0.167,0.167],"y":[-0.056,-0.056,0]},"t":174,"s":[500,500,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.782,0.782,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.1,0.1,0]},"t":175,"s":[500,500,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.867,0.867,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.135,0.135,0]},"t":176,"s":[500,500,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.734,0.734,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.223,0.223,0]},"t":177,"s":[500,500,100]},{"i":{"x":[0.833,0.833,0.833],"y":[0.833,0.833,1]},"o":{"x":[0.167,0.167,0.167],"y":[0.121,0.121,0]},"t":178,"s":[500,500,100]},{"t":179,"s":[500,500,100]}],"ix":6,"l":2}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[0,0],[0,0]],"o":[[0,0],[0,0],[0,0]],"v":[[-5.75,14.5],[-0.25,0.5],[5.75,-14.5]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.2],"y":[1]},"o":{"x":[0.167],"y":[0.167]},"t":12,"s":[100]},{"t":23,"s":[1]}],"ix":1},"e":{"a":0,"k":100,"ix":2},"o":{"a":0,"k":0,"ix":3},"m":1,"ix":2,"nm":"Trim Paths 1","mn":"ADBE Vector Filter - Trim","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":5,"ix":5},"lc":2,"lj":1,"ml":10,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[55.75,55.5],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":3,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[0,0],[0,0]],"o":[[0,0],[0,0],[0,0]],"v":[[5.75,14.5],[-0.75,-1.5],[-5.75,-14.5]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":5,"ix":5},"lc":2,"lj":1,"ml":10,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.2],"y":[1]},"o":{"x":[0.167],"y":[0.167]},"t":12,"s":[100]},{"t":23,"s":[1]}],"ix":1},"e":{"a":0,"k":100,"ix":2},"o":{"a":0,"k":0,"ix":3},"m":1,"ix":3,"nm":"Trim Paths 1","mn":"ADBE Vector Filter - Trim","hd":false},{"ty":"tr","p":{"a":0,"k":[67.75,55.5],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 2","np":3,"cix":2,"bm":0,"ix":2,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[4.45,5.399],[0,0]],"o":[[0,0],[-4.45,-5.399],[0,0]],"v":[[10.085,11.858],[-1.557,1.119],[-10.085,-11.858]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":5,"ix":5},"lc":2,"lj":1,"ml":10,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.2],"y":[1]},"o":{"x":[0.167],"y":[0.167]},"t":9,"s":[50]},{"t":20,"s":[0]}],"ix":1},"e":{"a":1,"k":[{"i":{"x":[0.2],"y":[1]},"o":{"x":[0.167],"y":[0.167]},"t":9,"s":[50]},{"t":20,"s":[100]}],"ix":2},"o":{"a":0,"k":0,"ix":3},"m":1,"ix":3,"nm":"Trim Paths 1","mn":"ADBE Vector Filter - Trim","hd":false},{"ty":"tr","p":{"a":0,"k":[36.008,43.125],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 3","np":3,"cix":2,"bm":0,"ix":3,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[-5.939,7.862],[0,0]],"o":[[0,0],[5.939,-7.862],[0,0]],"v":[[-12.519,16.573],[2.996,1.077],[12.519,-16.573]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":5,"ix":5},"lc":2,"lj":1,"ml":10,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.2],"y":[1]},"o":{"x":[0.167],"y":[0.167]},"t":6,"s":[100]},{"t":17,"s":[1]}],"ix":1},"e":{"a":0,"k":100,"ix":2},"o":{"a":0,"k":0,"ix":3},"m":1,"ix":3,"nm":"Trim Paths 1","mn":"ADBE Vector Filter - Trim","hd":false},{"ty":"tr","p":{"a":0,"k":[35.358,41.319],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 4","np":3,"cix":2,"bm":0,"ix":4,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[36.1,17.439],[36.1,22.202]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":5,"ix":5},"lc":2,"lj":1,"ml":10,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.2],"y":[1]},"o":{"x":[0.167],"y":[0.167]},"t":3,"s":[0]},{"t":14,"s":[100]}],"ix":1},"e":{"a":0,"k":0,"ix":2},"o":{"a":0,"k":0,"ix":3},"m":1,"ix":3,"nm":"Trim Paths 1","mn":"ADBE Vector Filter - Trim","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 5","np":3,"cix":2,"bm":0,"ix":5,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[16.285,23.302],[55.919,23.302]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":5,"ix":5},"lc":2,"lj":1,"ml":10,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.2],"y":[1]},"o":{"x":[0.167],"y":[0.167]},"t":0,"s":[0]},{"t":11,"s":[100]}],"ix":1},"e":{"a":0,"k":0,"ix":2},"o":{"a":0,"k":0,"ix":3},"m":1,"ix":3,"nm":"Trim Paths 1","mn":"ADBE Vector Filter - Trim","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 6","np":3,"cix":2,"bm":0,"ix":6,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[54.051,60.342],[69.3,60.342]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":5,"ix":5},"lc":2,"lj":1,"ml":10,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.2],"y":[1]},"o":{"x":[0.167],"y":[0.167]},"t":15,"s":[100]},{"t":26,"s":[1]}],"ix":1},"e":{"a":0,"k":100,"ix":2},"o":{"a":0,"k":0,"ix":3},"m":1,"ix":3,"nm":"Trim Paths 1","mn":"ADBE Vector Filter - Trim","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 7","np":3,"cix":2,"bm":0,"ix":7,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":180,"st":0,"ct":1,"bm":0}],"markers":[]} \ No newline at end of file diff --git a/TMessagesProj/src/main/res/raw/speed_15to2.json b/TMessagesProj/src/main/res/raw/speed_15to2.json new file mode 100644 index 0000000000..2913491a5f --- /dev/null +++ b/TMessagesProj/src/main/res/raw/speed_15to2.json @@ -0,0 +1 @@ +{"v":"5.10.1","fr":60,"ip":0,"op":75,"w":512,"h":512,"nm":"15to2x","ddd":0,"assets":[],"layers":[{"ddd":0,"ind":1,"ty":4,"nm":"Shape Layer 2","parent":4,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":-0.5,"ix":10},"p":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":3,"s":[8.467,-240.582,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":6,"s":[8.432,-244.582,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":12,"s":[8.31,-258.582,0],"to":[0,0,0],"ti":[1.639,-3.181,0]},{"i":{"x":0.517,"y":0.697},"o":{"x":0.167,"y":0.167},"t":17,"s":[8.414,-246.582,0],"to":[-0.973,1.888,0],"ti":[2.714,-4.779,0]},{"i":{"x":0.57,"y":1},"o":{"x":0.243,"y":1},"t":33,"s":[0.536,-229.946,0],"to":[-1.858,3.272,0],"ti":[-1.088,-0.465,0]},{"i":{"x":0.4,"y":1},"o":{"x":0.6,"y":0},"t":55,"s":[-1.419,-227.496,0],"to":[2.677,1.143,0],"ti":[-4.316,2.038,0]},{"t":68,"s":[24.475,-239.722,0]}],"ix":2,"l":2},"a":{"a":0,"k":[0,0,0],"ix":1,"l":2},"s":{"a":0,"k":[100,100,100],"ix":6,"l":2}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-134,120],[115,119]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":29,"ix":5},"lc":2,"lj":1,"ml":4,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"fl","c":{"a":0,"k":[1,0,0,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Shape 1","np":3,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":75,"st":0,"ct":1,"bm":0},{"ddd":0,"ind":2,"ty":4,"nm":"Shape Layer 1","parent":4,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":-0.5,"ix":10},"p":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":18,"s":[-0.438,-0.496,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":25,"s":[14.562,-0.496,0],"to":[0,0,0],"ti":[0,0,0]},{"t":35,"s":[-0.438,-0.496,0]}],"ix":2,"l":2},"a":{"a":0,"k":[0,0,0],"ix":1,"l":2},"s":{"a":0,"k":[100,100,100],"ix":6,"l":2}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-134,120],[115,119]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":29,"ix":5},"lc":2,"lj":1,"ml":4,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"fl","c":{"a":0,"k":[1,0,0,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Shape 1","np":3,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":75,"st":0,"ct":1,"bm":0},{"ddd":0,"ind":3,"ty":3,"nm":"Null 1","sr":1,"ks":{"o":{"a":0,"k":0,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.3],"y":[1]},"o":{"x":[0.31],"y":[0]},"t":55,"s":[0]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":65,"s":[3]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":70,"s":[-1]},{"t":74,"s":[0]}],"ix":10},"p":{"a":1,"k":[{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":0,"s":[199,388,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":7,"s":[199,280,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":13,"s":[199,306,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":18,"s":[199,314,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":25,"s":[199,299,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":32,"s":[199,314,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":39,"s":[199,299,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":46,"s":[199,314,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":53,"s":[199,299,0],"to":[0,0,0],"ti":[0,0,0]},{"t":62,"s":[199,306,0]}],"ix":2,"l":2},"a":{"a":0,"k":[50,50,0],"ix":1,"l":2},"s":{"a":1,"k":[{"i":{"x":[0.1,0.1,0.1],"y":[1,1,1]},"o":{"x":[0.3,0.3,0.3],"y":[0,0,0]},"t":0,"s":[0,0,100]},{"t":9,"s":[100,100,100]}],"ix":6,"l":2}},"ao":0,"ip":0,"op":180,"st":0,"bm":0},{"ddd":0,"ind":4,"ty":4,"nm":"Body 3","parent":3,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.3],"y":[1]},"o":{"x":[0.31],"y":[0]},"t":2,"s":[10]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":13,"s":[-14]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":20,"s":[-10]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":27,"s":[-13]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":34,"s":[-10]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":41,"s":[-12]},{"i":{"x":[0.506],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":48,"s":[-9]},{"i":{"x":[0.3],"y":[1]},"o":{"x":[0.31],"y":[0]},"t":55,"s":[-13]},{"t":65,"s":[0]}],"ix":10},"p":{"a":1,"k":[{"i":{"x":0.833,"y":1},"o":{"x":0.2,"y":0},"t":2,"s":[-82.367,118.339,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0,"y":1},"o":{"x":0.2,"y":0},"t":4,"s":[-95.349,134.878,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.953,"y":0},"t":41,"s":[62.651,134.878,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":0.7},"o":{"x":0.01,"y":0.01},"t":62,"s":[-41.349,134.878,0],"to":[0,0,0],"ti":[0,0,0]},{"t":66,"s":[-41.349,134.878,0]}],"ix":2,"l":2},"a":{"a":0,"k":[-148.349,134.878,0],"ix":1,"l":2},"s":{"a":0,"k":[100,100,100],"ix":6,"l":2}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":1},"o":{"x":0.167,"y":0},"t":5,"s":[{"i":[[24.106,0],[7.475,-14.494],[0.623,-2.03],[0,-2.422],[-8.522,0],[-3.588,6.369],[-12.289,0],[0,-12.648],[9.439,-10.305],[4.674,-4.475],[0,0],[0,-6.01],[-10.136,0],[0,0],[0,7.535],[8.342,0],[0,0],[0,0],[-5.427,7.72],[0,9.974],[2.678,5.514]],"o":[[-25.426,0],[-1.017,1.971],[-0.718,2.243],[0,7.983],[6.907,0],[5.472,-12.289],[13.635,0],[0,8.309],[-3.096,3.379],[0,0],[-6.459,5.92],[0,8.073],[0,0],[8.253,0],[0,-7.714],[0,0],[0,0],[11.648,-10.98],[6.233,-8.866],[0.046,-6.749],[-7.717,-15.886]],"v":[[-69.915,-119.144],[-81.071,-127.952],[-83.534,-121.938],[-84.7,-115.031],[-106.53,-119.012],[-94.632,-119.117],[-82.608,-121.454],[-69.076,-118.238],[-84.492,-117.888],[-98.676,-122.821],[-114.673,-124.035],[-130.911,-123.81],[-115.841,-110.175],[-31.253,-110.175],[-17.977,-122.913],[-32.684,-127.764],[-86.867,-124.864],[-57.984,-119.64],[-50.091,-118.062],[-38.039,-120.391],[-32.204,-120.47]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0},"t":12,"s":[{"i":[[24.106,0],[7.475,-14.494],[0.623,-2.03],[0,-2.422],[-8.522,0],[-3.588,6.369],[-12.289,0],[0,-12.648],[9.439,-10.305],[4.674,-4.475],[0,0],[0,-6.01],[-10.136,0],[0,0],[0,7.535],[8.342,0],[0,0],[0,0],[-5.427,7.72],[0,9.974],[2.678,5.514]],"o":[[-25.426,0],[-1.017,1.971],[-0.718,2.243],[0,7.983],[6.907,0],[5.472,-12.289],[13.635,0],[0,8.309],[-3.096,3.379],[0,0],[-6.459,5.92],[0,8.073],[0,0],[8.253,0],[0,-7.714],[0,0],[0,0],[11.648,-10.98],[6.233,-8.866],[0.046,-6.749],[-7.717,-15.886]],"v":[[-70.67,-139.13],[-81.825,-147.938],[-84.288,-141.924],[-85.454,-135.017],[-107.284,-138.997],[-95.386,-139.103],[-83.362,-141.44],[-69.831,-138.224],[-85.247,-137.874],[-99.43,-142.806],[-115.428,-144.021],[-131.666,-143.795],[-116.596,-130.16],[-32.008,-130.16],[-18.732,-142.898],[-33.438,-147.749],[-87.622,-144.85],[-58.738,-139.626],[-50.846,-138.048],[-38.794,-140.377],[-32.958,-140.456]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":18,"s":[{"i":[[24.106,0],[7.475,-14.494],[0.623,-2.03],[0,-2.422],[-8.522,0],[-3.588,6.369],[-12.289,0],[0,-12.648],[9.439,-10.305],[4.674,-4.475],[0,0],[0,-6.01],[-10.136,0],[0,0],[0,7.535],[8.342,0],[0,0],[0,0],[-5.427,7.72],[0,9.974],[2.678,5.514]],"o":[[-25.426,0],[-1.017,1.971],[-0.718,2.243],[0,7.983],[6.907,0],[5.472,-12.289],[13.635,0],[0,8.309],[-3.096,3.379],[0,0],[-6.459,5.92],[0,8.073],[0,0],[8.253,0],[0,-7.714],[0,0],[0,0],[11.648,-10.98],[6.233,-8.866],[0.046,-6.749],[-7.717,-15.886]],"v":[[-80.188,-129.175],[-111.827,-130.849],[-126.024,-132.68],[-127.19,-125.773],[-113.556,-112.766],[-99.383,-122.095],[-75.732,-126.734],[-54.056,-125.302],[-72.744,-127.674],[-82.073,-127.787],[-121.792,-112.18],[-130.583,-96.213],[-115.513,-82.578],[-30.925,-82.578],[-17.649,-95.316],[-30.925,-108.054],[-88.513,-108.054],[-41.605,-119.958],[-29.358,-132.237],[-27.752,-118.689],[-33.395,-128.821]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":25,"s":[{"i":[[24.106,0],[7.475,-14.494],[0.623,-2.03],[0,-2.422],[-8.522,0],[-3.588,6.369],[-12.289,0],[0,-12.648],[9.439,-10.305],[4.674,-4.475],[0,0],[0,-6.01],[-10.136,0],[0,0],[0,7.535],[8.342,0],[0,0],[0,0],[-5.427,7.72],[0,9.974],[2.678,5.514]],"o":[[-25.426,0],[-1.017,1.971],[-0.718,2.243],[0,7.983],[6.907,0],[5.472,-12.289],[13.635,0],[0,8.309],[-3.096,3.379],[0,0],[-6.459,5.92],[0,8.073],[0,0],[8.253,0],[0,-7.714],[0,0],[0,0],[11.648,-10.98],[6.233,-8.866],[0.046,-6.749],[-7.717,-15.886]],"v":[[-76.632,-120.018],[-117.879,-122.928],[-121.985,-125.465],[-128.336,-127.886],[-114.702,-114.879],[-100.529,-124.208],[-76.625,-120.084],[-50.834,-120.979],[-63.02,-96.143],[-74.605,-84.47],[-120.173,-40.965],[-128.964,-24.998],[-113.894,-11.363],[-29.306,-11.363],[-16.03,-24.101],[-29.306,-36.839],[-86.894,-36.839],[-53.883,-69.221],[-28.618,-96.317],[-19.797,-123.132],[-28.663,-119.635]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":30,"s":[{"i":[[24.106,0],[7.475,-14.494],[0.623,-2.03],[0,-2.422],[-8.522,0],[-3.588,6.369],[-12.289,0],[0,-12.648],[9.439,-10.305],[4.674,-4.475],[0,0],[0,-6.01],[-10.136,0],[0,0],[0,7.535],[8.342,0],[0,0],[0,0],[-5.427,7.72],[0,9.974],[2.678,5.514]],"o":[[-25.426,0],[-1.017,1.971],[-0.718,2.243],[0,7.983],[6.907,0],[5.472,-12.289],[13.635,0],[0,8.309],[-3.096,3.379],[0,0],[-6.459,5.92],[0,8.073],[0,0],[8.253,0],[0,-7.714],[0,0],[0,0],[11.648,-10.98],[6.233,-8.866],[0.046,-6.749],[-7.717,-15.886]],"v":[[-73.199,-118.253],[-123.829,-102.213],[-126.292,-96.199],[-127.458,-89.292],[-113.824,-76.285],[-99.651,-85.614],[-73.368,-104.003],[-49.956,-82.385],[-62.142,-57.549],[-73.727,-45.876],[-119.295,-2.371],[-128.086,13.596],[-113.016,27.231],[-28.428,27.231],[-15.152,14.493],[-28.428,1.755],[-86.016,1.755],[-53.006,-30.627],[-27.741,-57.723],[-18.919,-84.538],[-22.964,-103.019]],"c":true}]},{"i":{"x":0.4,"y":1},"o":{"x":0.167,"y":0.167},"t":35,"s":[{"i":[[24.106,0],[7.475,-14.494],[0.623,-2.03],[0,-2.422],[-8.522,0],[-3.588,6.369],[-12.289,0],[0,-12.648],[9.439,-10.305],[4.674,-4.475],[0,0],[0,-6.01],[-10.136,0],[0,0],[0,7.535],[8.342,0],[0,0],[0,0],[-5.427,7.72],[0,9.974],[2.678,5.514]],"o":[[-25.426,0],[-1.017,1.971],[-0.718,2.243],[0,7.983],[6.907,0],[5.472,-12.289],[13.635,0],[0,8.309],[-3.096,3.379],[0,0],[-6.459,5.92],[0,8.073],[0,0],[8.253,0],[0,-7.714],[0,0],[0,0],[11.648,-10.98],[6.233,-8.866],[0.046,-6.749],[-7.717,-15.886]],"v":[[-72.34,-99.528],[-123.16,-72.801],[-125.623,-66.787],[-126.789,-59.88],[-113.155,-46.873],[-98.982,-56.202],[-72.699,-74.591],[-49.287,-52.973],[-61.473,-28.137],[-73.058,-16.464],[-118.626,27.041],[-127.417,43.008],[-112.347,56.643],[-27.759,56.643],[-14.483,43.905],[-27.759,31.167],[-85.347,31.167],[-52.337,-1.215],[-27.072,-28.311],[-18.25,-55.126],[-22.295,-73.607]],"c":true}]},{"t":45,"s":[{"i":[[24.106,0],[7.475,-14.494],[0.623,-2.03],[0,-2.422],[-8.522,0],[-3.588,6.369],[-12.289,0],[0,-12.648],[9.439,-10.305],[4.674,-4.475],[0,0],[0,-6.01],[-10.136,0],[0,0],[0,7.535],[8.342,0],[0,0],[0,0],[-5.427,7.72],[0,9.974],[2.678,5.514]],"o":[[-25.426,0],[-1.017,1.971],[-0.718,2.243],[0,7.983],[6.907,0],[5.472,-12.289],[13.635,0],[0,8.309],[-3.096,3.379],[0,0],[-6.459,5.92],[0,8.073],[0,0],[8.253,0],[0,-7.714],[0,0],[0,0],[11.648,-10.98],[6.233,-8.866],[0.046,-6.749],[-7.717,-15.886]],"v":[[-71.806,-76.067],[-122.626,-49.34],[-125.089,-43.326],[-126.255,-36.419],[-112.621,-23.412],[-98.448,-32.741],[-72.165,-51.13],[-48.753,-29.512],[-60.939,-4.676],[-72.524,6.997],[-118.092,50.502],[-126.883,66.469],[-111.813,80.104],[-27.225,80.104],[-13.949,67.366],[-27.225,54.628],[-84.813,54.628],[-51.803,22.246],[-26.538,-4.85],[-17.716,-31.665],[-21.761,-50.146]],"c":true}]}],"ix":2},"nm":"Path 7","mn":"ADBE Vector Shape - Group","hd":false},{"ind":1,"ty":"sh","ix":2,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":1},"o":{"x":0.167,"y":0},"t":5,"s":[{"i":[[8.342,0],[3.678,-5.472],[0,0],[0,0],[0,0],[6.1,0],[0,-8.701],[-3.498,-4.665],[0,0],[0,0],[0,0],[0,-3.947],[-8.522,0],[-4.037,5.831],[0,0],[0,0],[0,0],[-5.831,0],[0,8.432],[2.96,3.857],[0,0],[0,0],[0,0],[0,4.216]],"o":[[-6.1,0],[0,0],[0,0],[0,0],[-4.126,-6.279],[-9.329,0],[0,4.126],[0,0],[0,0],[0,0],[-3.05,4.216],[0,7.714],[6.01,0],[0,0],[0,0],[0,0],[4.126,5.651],[8.88,0],[0,-4.216],[0,0],[0,0],[0,0],[3.05,-3.947],[0.09,-7.714]],"v":[[83.558,-136.441],[70.552,-128.637],[77.57,-127.936],[60.749,-129.826],[47.695,-126.366],[33.253,-135.246],[17.017,-120.176],[21.681,-107.887],[21.953,-126.42],[21.953,-125.613],[6.545,-137.37],[2.24,-125.798],[17.399,-111.894],[30.855,-119.699],[81.063,-113.781],[81.87,-113.781],[99.297,-119.788],[113.201,-111.805],[128.54,-126.247],[124.324,-137.908],[102.412,-136.386],[102.412,-137.104],[94.861,-111.235],[99.077,-122.717]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0},"t":12,"s":[{"i":[[8.342,0],[3.678,-5.472],[0,0],[0,0],[0,0],[6.1,0],[0,-8.701],[-3.498,-4.665],[0,0],[0,0],[0,0],[0,-3.947],[-8.522,0],[-4.037,5.831],[0,0],[0,0],[0,0],[-5.831,0],[0,8.432],[2.96,3.857],[0,0],[0,0],[0,0],[0,4.216]],"o":[[-6.1,0],[0,0],[0,0],[0,0],[-4.126,-6.279],[-9.329,0],[0,4.126],[0,0],[0,0],[0,0],[-3.05,4.216],[0,7.714],[6.01,0],[0,0],[0,0],[0,0],[4.126,5.651],[8.88,0],[0,-4.216],[0,0],[0,0],[0,0],[3.05,-3.947],[0.09,-7.714]],"v":[[82.949,-152.554],[69.943,-144.75],[76.961,-144.05],[60.141,-145.94],[47.087,-142.48],[32.645,-151.36],[16.409,-136.29],[21.073,-124.001],[21.344,-142.534],[21.344,-141.727],[5.936,-153.483],[1.631,-141.911],[16.79,-128.007],[30.246,-135.812],[80.455,-129.895],[81.262,-129.895],[98.688,-135.901],[112.592,-127.918],[127.931,-142.36],[123.715,-154.021],[101.804,-152.5],[101.804,-153.218],[94.252,-127.348],[98.468,-138.83]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":18,"s":[{"i":[[8.342,0],[3.678,-5.472],[0,0],[0,0],[0,0],[6.1,0],[0,-8.701],[-3.498,-4.665],[0,0],[0,0],[0,0],[0,-3.947],[-8.522,0],[-4.037,5.831],[0,0],[0,0],[0,0],[-5.831,0],[0,8.432],[2.96,3.857],[0,0],[0,0],[0,0],[0,4.216]],"o":[[-6.1,0],[0,0],[0,0],[0,0],[-4.126,-6.279],[-9.329,0],[0,4.126],[0,0],[0,0],[0,0],[-3.05,4.216],[0,7.714],[6.01,0],[0,0],[0,0],[0,0],[4.126,5.651],[8.88,0],[0,-4.216],[0,0],[0,0],[0,0],[3.05,-3.947],[0.09,-7.714]],"v":[[96.092,-135.159],[84.308,-133.739],[79.324,-131.319],[52.856,-127.576],[24.005,-132.329],[8.436,-135.316],[-6.673,-126.139],[-2.009,-113.85],[30.621,-137.226],[30.621,-136.419],[7.019,-105.901],[2.714,-94.329],[17.873,-80.425],[31.329,-88.23],[64.877,-135.861],[65.684,-135.861],[99.771,-88.319],[113.675,-80.336],[129.014,-94.778],[124.798,-106.439],[102.579,-137.518],[102.579,-138.236],[108.617,-116.337],[112.833,-127.819]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":25,"s":[{"i":[[8.342,0],[3.678,-5.472],[0,0],[0,0],[0,0],[6.1,0],[0,-8.701],[-3.498,-4.665],[0,0],[0,0],[0,0],[0,-3.947],[-8.522,0],[-4.037,5.831],[0,0],[0,0],[0,0],[-5.831,0],[0,8.432],[2.96,3.857],[0,0],[0,0],[0,0],[0,4.216]],"o":[[-6.1,0],[0,0],[0,0],[0,0],[-4.126,-6.279],[-9.329,0],[0,4.126],[0,0],[0,0],[0,0],[-3.05,4.216],[0,7.714],[6.01,0],[0,0],[0,0],[0,0],[4.126,5.651],[8.88,0],[0,-4.216],[0,0],[0,0],[0,0],[3.05,-3.947],[0.09,-7.714]],"v":[[93.249,-119.629],[81.457,-124.356],[69.456,-109.138],[68.649,-109.138],[58.938,-124.703],[44.354,-125.936],[28.26,-118.513],[32.924,-106.225],[46.762,-88.686],[46.762,-87.879],[8.638,-34.686],[4.333,-23.114],[19.492,-9.21],[32.949,-17.015],[66.497,-64.646],[67.304,-64.646],[101.39,-17.104],[115.294,-9.121],[130.633,-23.563],[126.417,-35.224],[87.845,-87.251],[87.845,-87.969],[105.766,-106.954],[109.982,-118.436]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":30,"s":[{"i":[[8.342,0],[3.678,-5.472],[0,0],[0,0],[0,0],[6.1,0],[0,-8.701],[-3.498,-4.665],[0,0],[0,0],[0,0],[0,-3.947],[-8.522,0],[-4.037,5.831],[0,0],[0,0],[0,0],[-5.831,0],[0,8.432],[2.96,3.857],[0,0],[0,0],[0,0],[0,4.216]],"o":[[-6.1,0],[0,0],[0,0],[0,0],[-4.126,-6.279],[-9.329,0],[0,4.126],[0,0],[0,0],[0,0],[-3.05,4.216],[0,7.714],[6.01,0],[0,0],[0,0],[0,0],[4.126,5.651],[8.88,0],[0,-4.216],[0,0],[0,0],[0,0],[3.05,-3.947],[0.09,-7.714]],"v":[[117.158,-127.684],[104.152,-119.88],[70.334,-70.544],[69.527,-70.544],[36.786,-118.804],[22.344,-127.684],[6.108,-112.614],[10.772,-100.325],[47.64,-50.092],[47.64,-49.285],[9.516,3.908],[5.211,15.48],[20.37,29.384],[33.826,21.579],[67.374,-26.052],[68.181,-26.052],[102.268,21.49],[116.172,29.473],[131.511,15.031],[127.295,3.37],[88.723,-48.657],[88.723,-49.375],[128.461,-102.478],[132.677,-113.96]],"c":true}]},{"i":{"x":0.4,"y":1},"o":{"x":0.167,"y":0.167},"t":35,"s":[{"i":[[8.342,0],[3.678,-5.472],[0,0],[0,0],[0,0],[6.1,0],[0,-8.701],[-3.498,-4.665],[0,0],[0,0],[0,0],[0,-3.947],[-8.522,0],[-4.037,5.831],[0,0],[0,0],[0,0],[-5.831,0],[0,8.432],[2.96,3.857],[0,0],[0,0],[0,0],[0,4.216]],"o":[[-6.1,0],[0,0],[0,0],[0,0],[-4.126,-6.279],[-9.329,0],[0,4.126],[0,0],[0,0],[0,0],[-3.05,4.216],[0,7.714],[6.01,0],[0,0],[0,0],[0,0],[4.126,5.651],[8.88,0],[0,-4.216],[0,0],[0,0],[0,0],[3.05,-3.947],[0.09,-7.714]],"v":[[117.827,-98.272],[104.821,-90.468],[71.003,-41.132],[70.196,-41.132],[37.455,-89.392],[23.013,-98.272],[6.777,-83.202],[11.441,-70.913],[48.309,-20.68],[48.309,-19.873],[10.185,33.32],[5.88,44.892],[21.039,58.796],[34.495,50.991],[68.043,3.36],[68.85,3.36],[102.937,50.902],[116.841,58.885],[132.18,44.443],[127.964,32.782],[89.392,-19.245],[89.392,-19.963],[129.13,-73.066],[133.346,-84.548]],"c":true}]},{"t":45,"s":[{"i":[[8.342,0],[3.678,-5.472],[0,0],[0,0],[0,0],[6.1,0],[0,-8.701],[-3.498,-4.665],[0,0],[0,0],[0,0],[0,-3.947],[-8.522,0],[-4.037,5.831],[0,0],[0,0],[0,0],[-5.831,0],[0,8.432],[2.96,3.857],[0,0],[0,0],[0,0],[0,4.216]],"o":[[-6.1,0],[0,0],[0,0],[0,0],[-4.126,-6.279],[-9.329,0],[0,4.126],[0,0],[0,0],[0,0],[-3.05,4.216],[0,7.714],[6.01,0],[0,0],[0,0],[0,0],[4.126,5.651],[8.88,0],[0,-4.216],[0,0],[0,0],[0,0],[3.05,-3.947],[0.09,-7.714]],"v":[[118.361,-74.811],[105.355,-67.007],[71.537,-17.671],[70.73,-17.671],[37.989,-65.931],[23.547,-74.811],[7.311,-59.741],[11.975,-47.452],[48.843,2.781],[48.843,3.588],[10.719,56.781],[6.414,68.353],[21.573,82.257],[35.029,74.452],[68.577,26.821],[69.384,26.821],[103.471,74.363],[117.375,82.346],[132.714,67.904],[128.498,56.243],[89.926,4.216],[89.926,3.498],[129.664,-49.605],[133.88,-61.087]],"c":true}]}],"ix":2},"nm":"Path 8","mn":"ADBE Vector Shape - Group","hd":false},{"ind":2,"ty":"sh","ix":3,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0},"t":12,"s":[{"i":[[-24.6,0],[0,33.086],[29.648,0],[6.338,-9.99],[0,0],[0,0],[0,0],[0,7.842],[8.057,0],[0,0],[1.074,-13.105],[0,0],[0,-0.43],[-10.635,0],[-6.338,5.908],[-9.238,0],[0,-16.328],[16.865,0],[5.586,11.172],[6.445,0],[0,-8.271],[-1.611,-3.438]],"o":[[35.342,0],[0,-29.541],[-15.791,0],[0,0],[0,0],[0,0],[8.164,0],[0,-7.842],[0,0],[-12.568,0],[0,0],[-0.107,0.537],[-0.43,8.701],[7.197,0],[5.479,-5.264],[16.865,0],[0,16.758],[-13.965,0],[-3.652,-5.801],[-8.379,0],[0,3.867],[7.734,16.436]],"v":[[83.5,84.938],[143.441,29.508],[92.738,-20.873],[57.933,-4.652],[57.289,-4.652],[60.941,-47.729],[122.172,-47.729],[135.492,-60.619],[122.172,-73.51],[54.174,-73.51],[33.764,-55.141],[29.252,1.686],[29.144,2.975],[44.398,19.088],[61.156,11.568],[83.607,2.33],[112.504,30.26],[83.5,59.156],[54.818,41.217],[40.639,33.16],[27.103,46.373],[29.682,57.008]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":18,"s":[{"i":[[-24.6,0],[0,33.086],[29.648,0],[6.338,-9.99],[0,0],[0,0],[0,0],[0,7.842],[8.057,0],[0,0],[1.074,-13.105],[0,0],[0,-0.43],[-10.635,0],[-6.338,5.908],[-9.238,0],[0,-16.328],[16.865,0],[5.586,11.172],[6.445,0],[0,-8.271],[-1.611,-3.438]],"o":[[35.342,0],[0,-29.541],[-15.791,0],[0,0],[0,0],[0,0],[8.164,0],[0,-7.842],[0,0],[-12.568,0],[0,0],[-0.107,0.537],[-0.43,8.701],[7.197,0],[5.479,-5.264],[16.865,0],[0,16.758],[-13.965,0],[-3.652,-5.801],[-8.379,0],[0,3.867],[7.734,16.436]],"v":[[82.14,130.651],[142.081,75.221],[91.378,24.841],[56.574,41.061],[55.929,41.061],[59.581,-2.015],[120.812,-2.015],[134.132,-14.906],[120.812,-27.796],[52.814,-27.796],[32.404,-9.427],[27.892,47.399],[27.784,48.688],[43.038,64.801],[59.796,57.282],[82.247,48.044],[111.144,75.973],[82.14,104.87],[53.458,86.93],[39.279,78.874],[25.743,92.087],[28.322,102.721]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":25,"s":[{"i":[[-24.6,0],[0.15,1.539],[29.648,0],[6.338,-9.99],[0,0],[0,0],[0,0],[0,7.842],[8.057,0],[0,0],[1.074,-13.105],[0,0],[0,-0.43],[-10.635,0],[-6.338,5.908],[-9.238,0],[0,-16.328],[16.865,0],[5.586,11.172],[6.445,0],[0,-8.271],[-1.611,-3.438]],"o":[[35.341,0],[-1.75,-17.914],[-15.791,0],[0,0],[0,0],[0,0],[8.164,0],[0,-7.842],[0,0],[-12.568,0],[0,0],[-0.107,0.537],[-0.43,8.701],[7.197,0],[5.479,-5.264],[16.865,0],[0,16.758],[-13.965,0],[-3.652,-5.801],[-8.379,0],[0,3.867],[7.734,16.436]],"v":[[82.061,123.979],[136.72,120.628],[89.532,86.887],[54.728,103.107],[54.083,103.107],[57.736,60.031],[118.966,60.031],[132.286,47.141],[118.966,34.25],[50.968,34.25],[30.558,52.619],[26.046,109.445],[25.939,110.734],[41.193,126.848],[57.95,119.328],[80.402,110.09],[102.518,117.592],[74.249,117.472],[53.11,121.151],[31.57,118.646],[21.368,121.329],[38.668,120.86]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":30,"s":[{"i":[[-24.599,0],[-2.056,16.076],[29.648,0],[6.338,-9.99],[0,0],[0,0],[0,0],[0,7.842],[8.057,0],[0,0],[1.074,-13.105],[0,0],[0,-0.43],[-10.635,0],[-6.338,5.908],[-9.238,0],[0,-16.328],[16.865,0],[5.586,11.172],[6.445,0],[0,-8.271],[-1.611,-3.438]],"o":[[35.342,0],[2.065,-16.143],[-15.791,0],[0,0],[0,0],[0,0],[8.164,0],[0,-7.842],[0,0],[-12.568,0],[0,0],[-0.107,0.537],[-0.43,8.701],[7.197,0],[5.479,-5.264],[16.866,0],[0,16.758],[-13.965,0],[-3.652,-5.801],[-8.379,0],[0,3.867],[7.734,16.436]],"v":[[75.932,124.526],[131.518,119.266],[88.636,117.003],[56.239,115.593],[55.025,113.431],[56.84,90.147],[118.07,90.147],[131.39,77.257],[118.07,64.366],[50.072,64.366],[29.662,82.735],[27.606,110.703],[27.498,111.992],[29.874,126.18],[62.64,121.496],[80.364,120.209],[100.58,120.018],[77.691,122.098],[58.973,120.033],[39.797,111.443],[34.466,117.176],[35.459,118.285]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":35,"s":[{"i":[[-24.6,0],[0.993,16.176],[29.648,0],[6.338,-9.99],[0,0],[0,0],[0,0],[0,7.842],[8.057,0],[0,0],[1.074,-13.105],[0,0],[0,-0.43],[-10.635,0],[-6.338,5.908],[-9.238,0],[0,-16.328],[16.865,0],[5.586,11.172],[6.445,0],[0,-8.271],[-3.795,-0.095]],"o":[[35.342,0],[-1.148,-18.696],[-15.791,0],[0,0],[0,0],[0,0],[8.164,0],[0,-7.842],[0,0],[-12.568,0],[0,0],[-0.107,0.537],[-0.43,8.701],[7.197,0],[5.479,-5.264],[16.865,0],[0,16.758],[-13.965,0],[-3.652,-5.801],[-8.379,0],[0,3.867],[3.767,0.094]],"v":[[76.405,120.895],[121.932,117.745],[84.694,111.417],[54.232,114.7],[53.587,114.7],[56.202,111.563],[117.433,111.563],[130.753,98.673],[117.433,85.782],[49.435,85.782],[29.025,104.151],[28.661,114.99],[28.554,116.279],[41.312,120.773],[60.865,120.355],[82.806,119.661],[90.994,118.497],[75.632,119.355],[49.903,116.164],[35.723,108.108],[22.188,121.32],[24.766,131.955]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":40,"s":[{"i":[[-24.6,0],[-0.267,33.085],[29.648,0],[6.338,-9.99],[0,0],[0,0],[0,0],[0,7.842],[8.057,0],[0,0],[1.074,-13.105],[0,0],[0,-0.43],[-10.635,0],[-6.338,5.908],[-9.238,0],[0,-16.328],[16.865,0],[5.586,11.172],[6.445,0],[0,-8.271],[-3.678,-0.942]],"o":[[35.342,0],[0.179,-22.18],[-15.791,0],[0,0],[0,0],[0,0],[8.164,0],[0,-7.842],[0,0],[-12.568,0],[0,0],[-0.107,0.537],[-0.43,8.701],[7.197,0],[5.479,-5.264],[16.865,0],[0,16.758],[-13.965,0],[-3.652,-5.801],[-8.379,0],[0,3.867],[9.722,2.49]],"v":[[66.499,109.49],[119.348,116.947],[81.325,113.173],[54.063,120.25],[53.419,120.25],[55.82,124.428],[117.05,124.428],[130.371,111.538],[117.05,98.647],[49.052,98.647],[28.642,117.016],[28.084,121.025],[27.977,122.315],[37.46,126.996],[47.264,118.525],[69.716,109.286],[88.41,117.699],[78.507,118.934],[51.698,124.892],[41.328,108.439],[27.793,121.652],[30.371,132.287]],"c":true}]},{"t":45,"s":[{"i":[[-24.6,0],[-5.387,13.697],[29.648,0],[6.338,-9.99],[0,0],[0,0],[0,0],[0,7.842],[8.057,0],[0,0],[1.074,-13.105],[0,0],[0,-0.43],[-10.635,0],[-6.338,5.908],[-9.238,0],[0,-16.328],[16.865,0],[5.586,11.172],[6.445,0],[0,-8.271],[-2.531,-2.83]],"o":[[35.342,0],[10.812,-27.491],[-15.791,0],[0,0],[0,0],[0,0],[8.164,0],[0,-7.842],[0,0],[-12.568,0],[0,0],[-0.107,0.537],[-0.43,8.701],[7.197,0],[5.479,-5.264],[16.865,0],[0,16.758],[-13.965,0],[-3.652,-5.801],[-8.379,0],[0,3.867],[1.877,2.098]],"v":[[81.651,122.242],[117.32,119.853],[83.158,113.089],[57.455,118.246],[56.811,118.246],[55.648,130.187],[116.879,130.187],[125.761,117.26],[116.879,104.405],[48.881,104.405],[28.471,122.775],[34.806,122.113],[34.699,123.402],[39.813,122.451],[56.571,114.932],[77.882,117.681],[86.383,120.605],[73.781,120.446],[54.697,116.934],[40.517,108.877],[26.982,122.09],[29.56,132.725]],"c":true}]}],"ix":2},"nm":"Path 6","mn":"ADBE Vector Shape - Group","hd":false},{"ind":3,"ty":"sh","ix":4,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.6,"y":0},"t":12,"s":[{"i":[[-9.668,0],[0,9.561],[9.561,0],[0,-9.668]],"o":[[9.561,0],[0,-9.668],[-9.668,0],[0,9.561]],"v":[[-16.435,83.489],[0.86,66.194],[-16.435,48.899],[-33.729,66.194]],"c":true}]},{"t":25,"s":[{"i":[[0.018,0],[0,-0.016],[-0.017,0],[0,0.016]],"o":[[-0.017,0],[0,0.016],[0.018,0],[0,-0.016]],"v":[[-16.435,66.164],[-16.466,66.194],[-16.435,66.223],[-16.403,66.194]],"c":true}]}],"ix":2},"nm":"Path 5","mn":"ADBE Vector Shape - Group","hd":false},{"ind":4,"ty":"sh","ix":5,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":12,"s":[{"i":[[7.804,-3.019],[7.02,-4.077],[0,0],[1.208,-3.503],[-9.593,-0.283],[0,0],[0,0],[0,0],[0,0],[0,0],[0,0],[-4.187,-3.475],[-11.563,0.619],[-1.033,7.594],[0,0],[0,0],[0,0],[0,0],[1.063,8.076]],"o":[[-2.212,0.856],[-13.279,7.713],[0,0],[-1.893,5.488],[6.551,0.193],[0,0],[0,0],[0,0],[0,0],[0,0],[0,0],[0,0],[7.137,-0.382],[0.488,-3.586],[0,0],[0,0],[0,0],[0,0],[-1.458,-11.082]],"v":[[-95.929,-73.606],[-110.471,-65.588],[-137.587,-47.507],[-142.583,-40.122],[-133.551,-24.693],[-119.394,-30.735],[-100.468,-42.972],[-100.458,-41.161],[-99.877,14.958],[-99.968,41.567],[-100.351,62.312],[-97.151,78.102],[-82.78,84.138],[-68.717,72.031],[-68.465,63.97],[-68.489,47.31],[-68.486,32.288],[-68.188,-28.299],[-69.042,-64.293]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":18,"s":[{"i":[[7.804,-3.019],[7.02,-4.077],[0,0],[1.208,-3.503],[-9.593,-0.283],[0,0],[0,0],[0,0],[0,0],[0,0],[0,0],[-4.187,-3.475],[-11.563,0.619],[-1.033,7.594],[0,0],[0,0],[0,0],[0,0],[1.063,8.076]],"o":[[-2.212,0.856],[-13.279,7.713],[0,0],[-1.893,5.488],[6.551,0.193],[0,0],[0,0],[0,0],[0,0],[0,0],[0,0],[0,0],[7.137,-0.382],[0.488,-3.586],[0,0],[0,0],[0,0],[0,0],[-1.458,-11.082]],"v":[[-90.723,-17.795],[-105.265,-9.776],[-132.381,8.304],[-137.378,15.689],[-128.346,31.118],[-114.189,25.076],[-95.262,12.839],[-95.252,14.65],[-94.671,70.769],[-94.762,97.378],[-95.145,118.123],[-91.945,133.913],[-76.008,127.777],[-63.511,127.842],[-63.259,119.781],[-63.283,103.121],[-63.28,88.1],[-62.982,27.512],[-63.836,-8.482]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":25,"s":[{"i":[[7.804,-3.019],[7.02,-4.077],[0,0],[1.208,-3.503],[-9.593,-0.283],[0,0],[0,0],[0,0],[0,0],[0,0],[0,0],[-4.187,-3.475],[-11.563,0.619],[-1.033,7.594],[0,0],[0,0],[0,0],[0,0],[1.063,8.076]],"o":[[-2.212,0.856],[-13.279,7.713],[0,0],[-1.893,5.488],[6.551,0.193],[0,0],[0,0],[0,0],[0,0],[0,0],[0,0],[0,0],[7.137,-0.382],[0.488,-3.586],[0,0],[0,0],[0,0],[0,0],[-1.458,-11.082]],"v":[[-85.408,39.199],[-99.949,47.218],[-127.065,65.298],[-132.062,72.683],[-123.03,88.113],[-108.873,82.07],[-89.946,69.834],[-89.936,71.645],[-89.356,127.764],[-81.559,123.086],[-75.102,116.154],[-73.523,124.166],[-82.291,127.173],[-68.227,115.066],[-72.077,124.534],[-58.192,127.305],[-58.189,112.284],[-57.666,84.507],[-58.52,48.513]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":30,"s":[{"i":[[7.804,-3.019],[7.02,-4.077],[0,0],[1.208,-3.503],[-9.593,-0.283],[0,0],[0,0],[0,0],[0,0],[0,0],[0,0],[-4.187,-3.475],[-11.563,0.619],[-1.033,7.594],[0,0],[0,0],[0,0],[0,0],[1.063,8.076]],"o":[[-2.212,0.856],[-13.279,7.713],[0,0],[-1.893,5.488],[6.551,0.193],[0,0],[0,0],[0,0],[0,0],[0,0],[0,0],[0,0],[7.137,-0.382],[0.488,-3.586],[0,0],[0,0],[0,0],[0,0],[-1.458,-11.082]],"v":[[-82.527,70.086],[-97.068,78.105],[-124.184,96.185],[-129.181,103.57],[-120.149,119],[-105.992,112.957],[-87.066,100.721],[-87.055,102.532],[-86.645,121.582],[-81.976,119.304],[-82.584,109.353],[-79.384,125.143],[-65.013,131.179],[-50.95,119.072],[-50.698,111.011],[-50.497,125.046],[-50.494,110.025],[-54.785,115.394],[-55.639,79.4]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":35,"s":[{"i":[[7.804,-3.019],[7.02,-4.077],[0,0],[1.208,-3.503],[-9.593,-0.283],[0,0],[0,0],[0,0],[0,0],[0,0],[0,0],[-4.187,-3.475],[-11.563,0.619],[-1.033,7.594],[0,0],[0,0],[0,0],[0,0],[1.063,8.076]],"o":[[-2.212,0.856],[-13.279,7.713],[0,0],[-1.893,5.488],[6.551,0.193],[0,0],[0,0],[0,0],[0,0],[0,0],[0,0],[0,0],[7.137,-0.382],[0.488,-3.586],[0,0],[0,0],[0,0],[0,0],[-1.458,-11.082]],"v":[[-80.331,93.626],[-94.873,101.644],[-121.989,119.725],[-126.985,127.11],[-113.577,126.56],[-94.269,122.961],[-84.87,124.26],[-84.86,126.071],[-85.018,132.283],[-76.337,118.302],[-75.472,109.303],[-72.272,125.093],[-57.901,131.129],[-43.838,119.022],[-43.586,110.961],[-44.858,124.044],[-44.855,109.023],[-52.695,119.614],[-53.444,102.939]],"c":true}]},{"t":40,"s":[{"i":[[7.804,-3.019],[7.02,-4.077],[0,0],[1.208,-3.503],[-9.593,-0.283],[0,0],[0,0],[0,0],[0,0],[0,0],[0,0],[-4.187,-3.475],[-11.563,0.619],[-1.033,7.594],[0,0],[0,0],[0,0],[0,0],[1.063,8.076]],"o":[[-2.212,0.856],[-13.279,7.713],[0,0],[-1.893,5.488],[6.551,0.193],[0,0],[0,0],[0,0],[0,0],[0,0],[0,0],[0,0],[7.137,-0.382],[0.488,-3.586],[0,0],[0,0],[0,0],[0,0],[-1.458,-11.082]],"v":[[-78.989,108.018],[-93.53,116.036],[-120.646,134.117],[-125.16,112.48],[-116.129,127.909],[-101.971,121.867],[-86.441,125.777],[-79.12,112.264],[-71.474,98.338],[-71.565,124.947],[-72.368,108.816],[-69.168,124.606],[-54.797,130.642],[-40.734,118.535],[-40.482,110.474],[-40.086,130.69],[-40.083,115.669],[-43.453,118.688],[-52.101,117.331]],"c":true}]}],"ix":2},"nm":"Path 2","mn":"ADBE Vector Shape - Group","hd":false},{"ind":5,"ty":"sh","ix":6,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":1},"o":{"x":0.2,"y":0},"t":4,"s":[{"i":[[0,0],[0,-28.435],[0,0],[28.435,0],[0,0],[0,28.435],[0,0],[-28.435,0]],"o":[[28.435,0],[0,0],[0,28.435],[0,0],[-28.435,0],[0,0],[0,-28.435],[0,0]],"v":[[137.378,-137.333],[188.867,-85.845],[188.867,85.845],[137.378,137.333],[-137.378,137.333],[-188.867,85.845],[-188.867,-85.845],[-137.378,-137.333]],"c":true}]},{"i":{"x":0,"y":1},"o":{"x":0.167,"y":0},"t":12,"s":[{"i":[[0,0],[0,-30.199],[0,0],[27.483,0],[0,0],[0,30.199],[0,0],[-27.483,0]],"o":[[27.483,0],[0,0],[0,30.199],[0,0],[-27.483,0],[0,0],[0,-30.199],[0,0]],"v":[[126.631,-155.306],[176.396,-100.624],[176.396,81.711],[126.631,136.393],[-138.928,136.393],[-188.693,81.711],[-188.693,-100.624],[-138.928,-155.306]],"c":true}]},{"i":{"x":0.7,"y":1},"o":{"x":0.953,"y":0},"t":51,"s":[{"i":[[0,0],[0,-26.886],[0,0],[26.444,0],[0,0],[0,26.886],[0,0],[-26.444,0]],"o":[[26.444,0],[0,0],[0,26.886],[0,0],[-26.444,0],[0,0],[0,-26.886],[0,0]],"v":[[114.899,-124.335],[162.782,-75.651],[162.782,86.683],[114.899,135.367],[-140.619,135.367],[-188.502,86.683],[-188.502,-75.651],[-140.619,-124.335]],"c":true}]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":63,"s":[{"i":[[0,0],[0,-28.435],[0,0],[28.435,0],[0,0],[0,28.435],[0,0],[-28.435,0]],"o":[[28.435,0],[0,0],[0,28.435],[0,0],[-28.435,0],[0,0],[0,-28.435],[0,0]],"v":[[137.378,-137.333],[188.867,-85.845],[188.867,85.845],[137.378,137.333],[-137.378,137.333],[-188.867,85.845],[-188.867,-85.845],[-137.378,-137.333]],"c":true}]},{"t":68,"s":[{"i":[[0,0],[0,-28.435],[0,0],[28.435,0],[0,0],[0,28.435],[0,0],[-28.435,0]],"o":[[28.435,0],[0,0],[0,28.435],[0,0],[-28.435,0],[0,0],[0,-28.435],[0,0]],"v":[[137.378,-137.333],[188.867,-85.845],[188.867,85.845],[137.378,137.333],[-137.378,137.333],[-188.867,85.845],[-188.867,-85.845],[-137.378,-137.333]],"c":true}]}],"ix":2},"nm":"Path 4","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Combined-Shape","np":7,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":180,"st":0,"ct":1,"bm":0},{"ddd":0,"ind":5,"ty":4,"nm":"Lines","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":1,"k":[{"i":{"x":0,"y":1},"o":{"x":0.2,"y":0},"t":4,"s":[70,277.55,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.953,"y":0},"t":41,"s":[130,267.55,0],"to":[0,0,0],"ti":[0,0,0]},{"t":62,"s":[178,259.55,0]}],"ix":2,"l":2},"a":{"a":0,"k":[-186,-0.45,0],"ix":1,"l":2},"s":{"a":0,"k":[100,100,100],"ix":6,"l":2}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":1},"o":{"x":0.167,"y":0},"t":10,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-270.667,102.8],[-208,100.1]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.167,"y":0},"t":20,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-270.667,102.8],[-174,99.6]],"c":false}]},{"t":34,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-269.667,101.3],[-155,98.1]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":6,"s":[100]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":20,"s":[0]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":28,"s":[38]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":35,"s":[67]},{"t":42,"s":[0]}],"ix":1},"e":{"a":1,"k":[{"i":{"x":[0.73],"y":[1]},"o":{"x":[0.27],"y":[0]},"t":33,"s":[100]},{"t":41,"s":[0]}],"ix":2},"o":{"a":0,"k":0,"ix":3},"m":1,"ix":2,"nm":"Trim Paths 1","mn":"ADBE Vector Filter - Trim","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":30,"ix":5},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,-204],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 3","np":3,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":1},"o":{"x":0.167,"y":0},"t":30,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-270.667,102.8],[-174,99.6]],"c":false}]},{"t":40,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-299.667,101.8],[-174,99.6]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":8,"s":[100]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":18,"s":[0]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":26,"s":[72]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":33,"s":[15]},{"t":40,"s":[0]}],"ix":1},"e":{"a":1,"k":[{"i":{"x":[0.73],"y":[1]},"o":{"x":[0.27],"y":[0]},"t":31,"s":[100]},{"t":39,"s":[0]}],"ix":2},"o":{"a":0,"k":0,"ix":3},"m":1,"ix":2,"nm":"Trim Paths 1","mn":"ADBE Vector Filter - Trim","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":30,"ix":5},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,-100],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 2","np":3,"cix":2,"bm":0,"ix":2,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":1},"o":{"x":0.167,"y":0},"t":5,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-270.238,87.8],[-134.571,87.6]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.167,"y":0},"t":7,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-270.965,68.8],[-135.299,68.6]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.167,"y":0},"t":10,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-271.153,77.8],[-135.486,77.6]],"c":false}]},{"t":15,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-269.76,95.8],[-134.094,95.6]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":5,"s":[100]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":15,"s":[19]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":23,"s":[67]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":30,"s":[0]},{"t":37,"s":[0]}],"ix":1},"e":{"a":1,"k":[{"i":{"x":[0.73],"y":[1]},"o":{"x":[0.27],"y":[0]},"t":28,"s":[100]},{"t":36,"s":[0]}],"ix":2},"o":{"a":0,"k":0,"ix":3},"m":1,"ix":2,"nm":"Trim Paths 1","mn":"ADBE Vector Filter - Trim","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":30,"ix":5},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":3,"cix":2,"bm":0,"ix":3,"mn":"ADBE Vector Group","hd":false}],"ip":10,"op":180,"st":0,"ct":1,"bm":0}],"markers":[]} \ No newline at end of file diff --git a/TMessagesProj/src/main/res/raw/speed_1to15.json b/TMessagesProj/src/main/res/raw/speed_1to15.json new file mode 100644 index 0000000000..5ebc2935b4 --- /dev/null +++ b/TMessagesProj/src/main/res/raw/speed_1to15.json @@ -0,0 +1 @@ +{"v":"5.10.1","fr":60,"ip":0,"op":75,"w":512,"h":512,"nm":"1xto15","ddd":0,"assets":[],"layers":[{"ddd":0,"ind":1,"ty":4,"nm":"Shape Layer 2","parent":4,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":3,"s":[11,-240,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":6,"s":[11,-244,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":12,"s":[11,-258,0],"to":[0,0,0],"ti":[1.667,-3.167,0]},{"i":{"x":0.517,"y":0.697},"o":{"x":0.167,"y":0.167},"t":17,"s":[11,-246,0],"to":[-0.989,1.88,0],"ti":[2.756,-4.755,0]},{"i":{"x":0.57,"y":1},"o":{"x":0.243,"y":1},"t":33,"s":[2.977,-229.433,0],"to":[-1.887,3.255,0],"ti":[-1.084,-0.474,0]},{"i":{"x":0.4,"y":1},"o":{"x":0.6,"y":0},"t":55,"s":[1,-227,0],"to":[2.667,1.167,0],"ti":[-4.333,2,0]},{"t":68,"s":[27,-239,0]}],"ix":2,"l":2},"a":{"a":0,"k":[0,0,0],"ix":1,"l":2},"s":{"a":0,"k":[100,100,100],"ix":6,"l":2}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-134,120],[115,119]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":29,"ix":5},"lc":2,"lj":1,"ml":4,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"fl","c":{"a":0,"k":[1,0,0,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Shape 1","np":3,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":75,"st":0,"ct":1,"bm":0},{"ddd":0,"ind":2,"ty":4,"nm":"Shape Layer 1","parent":4,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[0,0,0],"ix":2,"l":2},"a":{"a":0,"k":[0,0,0],"ix":1,"l":2},"s":{"a":0,"k":[100,100,100],"ix":6,"l":2}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-134,120],[115,119]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":29,"ix":5},"lc":2,"lj":1,"ml":4,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"fl","c":{"a":0,"k":[1,0,0,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Shape 1","np":3,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":75,"st":0,"ct":1,"bm":0},{"ddd":0,"ind":3,"ty":3,"nm":"Null 1","sr":1,"ks":{"o":{"a":0,"k":0,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.3],"y":[1]},"o":{"x":[0.31],"y":[0]},"t":55,"s":[0]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":65,"s":[3]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":70,"s":[-1]},{"t":74,"s":[0]}],"ix":10},"p":{"a":1,"k":[{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":0,"s":[199,388,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":7,"s":[199,280,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":13,"s":[199,306,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":18,"s":[199,314,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":25,"s":[199,299,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":32,"s":[199,314,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":39,"s":[199,299,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":46,"s":[199,314,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":53,"s":[199,299,0],"to":[0,0,0],"ti":[0,0,0]},{"t":62,"s":[199,306,0]}],"ix":2,"l":2},"a":{"a":0,"k":[50,50,0],"ix":1,"l":2},"s":{"a":1,"k":[{"i":{"x":[0.1,0.1,0.1],"y":[1,1,1]},"o":{"x":[0.3,0.3,0.3],"y":[0,0,0]},"t":0,"s":[0,0,100]},{"t":9,"s":[100,100,100]}],"ix":6,"l":2}},"ao":0,"ip":0,"op":180,"st":0,"bm":0},{"ddd":0,"ind":4,"ty":4,"nm":"Body 2","parent":3,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.3],"y":[1]},"o":{"x":[0.31],"y":[0]},"t":2,"s":[10]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":13,"s":[-14]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":20,"s":[-10]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":27,"s":[-13]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":34,"s":[-10]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":41,"s":[-12]},{"i":{"x":[0.506],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":48,"s":[-9]},{"i":{"x":[0.3],"y":[1]},"o":{"x":[0.31],"y":[0]},"t":55,"s":[-13]},{"t":65,"s":[0]}],"ix":10},"p":{"a":1,"k":[{"i":{"x":0.833,"y":1},"o":{"x":0.2,"y":0},"t":2,"s":[-82.367,118.339,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0,"y":1},"o":{"x":0.2,"y":0},"t":4,"s":[-95.349,134.878,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.953,"y":0},"t":41,"s":[62.651,134.878,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":0.7},"o":{"x":0.01,"y":0.01},"t":62,"s":[-41.349,134.878,0],"to":[0,0,0],"ti":[0,0,0]},{"t":66,"s":[-41.349,134.878,0]}],"ix":2,"l":2},"a":{"a":0,"k":[-148.349,134.878,0],"ix":1,"l":2},"s":{"a":0,"k":[100,100,100],"ix":6,"l":2}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":0,"s":[{"i":[[-24.6,0],[27.466,18.448],[29.648,0],[6.338,-9.99],[0,0],[0,0],[0,0],[0,7.842],[8.057,0],[0,0],[1.074,-13.105],[0,0],[0,-0.43],[-10.144,3.193],[-10.795,1.146],[-9.238,0],[-3.164,-4.027],[16.865,0],[5.586,11.172],[6.445,0],[0,-8.271],[-1.611,-3.438]],"o":[[35.342,0],[-6.091,-4.091],[-15.791,0],[0,0],[0,0],[0,0],[8.164,0],[0,-7.842],[0,0],[-12.568,0],[0,0],[-0.107,0.537],[-0.43,8.701],[7.552,-2.377],[8.238,-0.854],[16.865,0],[10.352,13.178],[-13.965,0],[-3.652,-5.801],[-8.379,0],[0,3.867],[7.734,16.436]],"v":[[51.053,-111.701],[101.415,-128.294],[59.252,-123.525],[25.739,-120.89],[32.729,-122.612],[30.112,-115.221],[91.342,-115.221],[91.329,-126.251],[75.015,-127.005],[7.017,-127.005],[-10.399,-120.772],[-9.62,-116.714],[-9.727,-115.425],[9.119,-113.875],[30.246,-120.317],[56.361,-121.442],[74.008,-123.067],[47.46,-122.918],[18.843,-120.242],[4.664,-128.299],[-7.075,-122.368],[-2.701,-119.015]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":12,"s":[{"i":[[-24.6,0],[27.466,18.448],[29.648,0],[6.338,-9.99],[0,0],[0,0],[0,0],[0,7.842],[8.057,0],[0,0],[1.074,-13.105],[0,0],[0,-0.43],[-10.144,3.193],[-10.795,1.146],[-9.238,0],[-3.164,-4.027],[16.865,0],[5.586,11.172],[6.445,0],[0,-8.271],[-1.611,-3.438]],"o":[[35.342,0],[-6.091,-4.091],[-15.791,0],[0,0],[0,0],[0,0],[8.164,0],[0,-7.842],[0,0],[-12.568,0],[0,0],[-0.107,0.537],[-0.43,8.701],[7.552,-2.377],[8.238,-0.854],[16.865,0],[10.352,13.178],[-13.965,0],[-3.652,-5.801],[-8.379,0],[0,3.867],[7.734,16.436]],"v":[[47.059,-134.351],[97.421,-150.945],[55.258,-146.175],[21.745,-143.541],[28.735,-145.263],[26.118,-137.871],[87.348,-137.871],[87.335,-148.901],[71.021,-149.656],[3.023,-149.656],[-14.393,-143.423],[-13.614,-139.365],[-13.721,-138.076],[5.125,-136.526],[26.252,-142.968],[52.367,-144.093],[70.014,-145.717],[43.466,-145.569],[14.85,-142.893],[0.67,-150.95],[-11.069,-145.018],[-6.695,-141.665]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":16,"s":[{"i":[[-24.6,0],[0.199,19.559],[29.648,0],[6.338,-9.99],[0,0],[0,0],[0,0],[0,7.842],[8.057,0],[0,0],[1.074,-13.105],[0,0],[0,-0.43],[-10.635,0],[-6.338,5.908],[-9.238,0],[0,-16.328],[16.865,0],[5.586,11.172],[6.445,0],[0,-8.271],[-1.611,-3.437]],"o":[[35.342,0],[-0.141,-13.794],[-15.791,0],[0,0],[0,0],[0,0],[8.164,0],[0,-7.842],[0,0],[-12.568,0],[0,0],[-0.107,0.537],[-0.43,8.701],[7.197,0],[5.479,-5.264],[16.865,0],[0,16.758],[-13.965,0],[-3.652,-5.801],[-8.379,0],[0,3.867],[7.734,16.436]],"v":[[51.558,-89.845],[107.994,-136.319],[48.678,-131.256],[37.42,-135.965],[14.381,-134.236],[16.022,-128.607],[79.861,-126.497],[83.483,-134.866],[78.435,-134.162],[33.31,-140.393],[1.59,-135.775],[-6.293,-133.445],[-6.4,-132.156],[9.645,-126.62],[34.88,-126.131],[58.484,-128.973],[78.951,-137.198],[51.558,-115.627],[22.877,-133.566],[8.697,-141.623],[-4.838,-128.41],[-2.26,-117.775]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":20,"s":[{"i":[[-24.6,0],[0,33.086],[29.649,0],[6.338,-9.99],[0,0],[0,0],[0,0],[0,7.842],[8.057,0],[0,0],[1.074,-13.105],[0,0],[0,-0.43],[-10.635,0],[-6.338,5.908],[-9.238,0],[0,-16.328],[16.865,0],[5.586,11.172],[6.445,0],[0,-8.271],[-1.611,-3.438]],"o":[[35.342,0],[0,-29.541],[-15.791,0],[0,0],[0,0],[0,0],[8.164,0],[0,-7.842],[0,0],[-12.568,0],[0,0],[-0.107,0.537],[-0.43,8.701],[7.197,0],[5.479,-5.264],[16.865,0],[0,16.758],[-13.965,0],[-3.652,-5.801],[-8.379,0],[0,3.867],[7.734,16.436]],"v":[[48.437,-65.19],[108.378,-120.62],[55.77,-125.642],[34.084,-123.355],[21.129,-125.526],[21.891,-123.418],[83.122,-123.418],[95.083,-122.84],[95.202,-131.33],[27.204,-131.33],[-5.257,-125.24],[-7.024,-124.286],[-7.132,-122.997],[13.247,-118.673],[31.251,-118.864],[53.443,-117.994],[77.441,-119.868],[48.437,-90.971],[19.755,-108.911],[5.575,-116.967],[-7.96,-103.755],[-5.382,-93.12]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":25,"s":[{"i":[[-24.6,0],[0,33.086],[29.648,0],[6.338,-9.99],[0,0],[0,0],[0,0],[0,7.842],[8.057,0],[0,0],[1.074,-13.105],[0,0],[0,-0.43],[-10.635,0],[-6.338,5.908],[-9.238,0],[0,-16.328],[16.865,0],[5.586,11.172],[6.445,0],[0,-8.271],[-1.611,-3.438]],"o":[[35.342,0],[0,-29.541],[-15.791,0],[0,0],[0,0],[0,0],[8.164,0],[0,-7.842],[0,0],[-12.568,0],[0,0],[-0.107,0.537],[-0.43,8.701],[7.197,0],[5.479,-5.264],[16.865,0],[0,16.758],[-13.965,0],[-3.652,-5.801],[-8.379,0],[0,3.867],[7.734,16.436]],"v":[[48.159,-31.34],[108.1,-86.77],[58.304,-122.619],[22.592,-120.93],[21.948,-120.93],[15.974,-120.048],[77.204,-120.048],[97.931,-115.445],[84.611,-128.336],[16.613,-128.336],[-3.797,-109.967],[-6.089,-114.592],[-6.197,-113.303],[9.057,-97.19],[25.815,-104.709],[48.266,-113.948],[77.163,-86.018],[48.159,-57.121],[19.477,-75.061],[5.298,-83.118],[-8.238,-69.905],[-5.659,-59.27]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":30,"s":[{"i":[[-24.6,0],[0,33.086],[29.648,0],[6.338,-9.99],[0,0],[0,0],[0,0],[0,7.842],[8.057,0],[0,0],[1.074,-13.105],[0,0],[0,-0.43],[-10.635,0],[-6.338,5.908],[-9.238,0],[0,-16.328],[16.865,0],[5.586,11.172],[6.445,0],[0,-8.271],[-1.611,-3.438]],"o":[[35.342,0],[0,-29.541],[-15.791,0],[0,0],[0,0],[0,0],[8.164,0],[0,-7.842],[0,0],[-12.568,0],[0,0],[-0.107,0.537],[-0.43,8.701],[7.197,0],[5.479,-5.264],[16.865,0],[0,16.758],[-13.965,0],[-3.652,-5.801],[-8.379,0],[0,3.867],[7.734,16.436]],"v":[[51.505,4.78],[111.447,-50.65],[60.744,-101.03],[25.939,-84.81],[25.294,-84.81],[29.998,-113.364],[88.723,-118.485],[100.173,-107.758],[86.852,-120.649],[19.869,-113.286],[-0.956,-114.925],[-2.743,-78.472],[-2.85,-77.183],[12.404,-61.069],[29.162,-68.589],[51.613,-77.827],[80.509,-49.898],[51.505,-21.001],[22.824,-38.941],[8.644,-46.997],[-4.891,-33.784],[-2.313,-23.15]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":34,"s":[{"i":[[-24.6,0],[0,33.086],[29.648,0],[6.338,-9.99],[0,0],[0,0],[0,0],[0,7.842],[8.057,0],[0,0],[1.074,-13.105],[0,0],[0,-0.43],[-10.635,0],[-6.338,5.908],[-9.238,0],[0,-16.328],[16.865,0],[5.586,11.172],[6.445,0],[0,-8.271],[-1.611,-3.438]],"o":[[35.342,0],[0,-29.541],[-15.791,0],[0,0],[0,0],[0,0],[8.164,0],[0,-7.842],[0,0],[-12.568,0],[0,0],[-0.107,0.537],[-0.43,8.701],[7.197,0],[5.479,-5.264],[16.865,0],[0,16.758],[-13.965,0],[-3.652,-5.801],[-8.379,0],[0,3.867],[7.734,16.436]],"v":[[50.596,33.287],[110.538,-22.143],[59.835,-72.524],[25.03,-56.303],[24.385,-56.303],[28.038,-99.379],[89.268,-99.379],[102.589,-112.27],[89.268,-125.16],[21.27,-125.16],[0.86,-106.791],[-3.652,-49.965],[-3.759,-48.676],[11.495,-32.563],[28.253,-40.082],[50.704,-49.321],[79.6,-21.391],[50.596,7.506],[21.915,-10.434],[7.735,-18.491],[-5.8,-5.278],[-3.222,5.357]],"c":true}]},{"i":{"x":0.4,"y":1},"o":{"x":0.167,"y":0.167},"t":40,"s":[{"i":[[-24.6,0],[0,33.086],[29.648,0],[6.338,-9.99],[0,0],[0,0],[0,0],[0,7.842],[8.057,0],[0,0],[1.074,-13.105],[0,0],[0,-0.43],[-10.635,0],[-6.338,5.908],[-9.238,0],[0,-16.328],[16.865,0],[5.586,11.172],[6.445,0],[0,-8.271],[-1.611,-3.438]],"o":[[35.342,0],[0,-29.541],[-15.791,0],[0,0],[0,0],[0,0],[8.164,0],[0,-7.842],[0,0],[-12.568,0],[0,0],[-0.107,0.537],[-0.43,8.701],[7.197,0],[5.479,-5.264],[16.865,0],[0,16.758],[-13.965,0],[-3.652,-5.801],[-8.379,0],[0,3.867],[7.734,16.436]],"v":[[50.483,74.843],[110.425,19.413],[59.722,-30.968],[24.917,-14.747],[24.272,-14.747],[27.925,-57.823],[89.155,-57.823],[102.476,-70.714],[89.155,-83.604],[21.157,-83.604],[0.747,-65.235],[-3.765,-8.409],[-3.872,-7.12],[11.382,8.993],[28.14,1.474],[50.591,-7.765],[79.487,20.165],[50.483,49.062],[21.802,31.122],[7.622,23.065],[-5.913,36.278],[-3.335,46.913]],"c":true}]},{"i":{"x":0.19,"y":1},"o":{"x":0.81,"y":0},"t":45,"s":[{"i":[[-24.6,0],[0,33.086],[29.648,0],[6.338,-9.99],[0,0],[0,0],[0,0],[0,7.842],[8.057,0],[0,0],[1.074,-13.105],[0,0],[0,-0.43],[-10.635,0],[-6.338,5.908],[-9.238,0],[0,-16.328],[16.865,0],[5.586,11.172],[6.445,0],[0,-8.271],[-1.611,-3.438]],"o":[[35.342,0],[0,-29.541],[-15.791,0],[0,0],[0,0],[0,0],[8.164,0],[0,-7.842],[0,0],[-12.568,0],[0,0],[-0.107,0.537],[-0.43,8.701],[7.197,0],[5.479,-5.264],[16.865,0],[0,16.758],[-13.965,0],[-3.652,-5.801],[-8.379,0],[0,3.867],[7.734,16.436]],"v":[[57.91,80.335],[117.852,24.906],[67.149,-25.475],[32.344,-9.255],[31.699,-9.255],[35.352,-52.331],[96.582,-52.331],[109.903,-65.221],[96.582,-78.112],[28.584,-78.112],[8.174,-59.743],[3.662,-2.917],[3.555,-1.628],[18.809,14.486],[35.567,6.966],[58.018,-2.272],[86.914,25.658],[57.91,54.554],[29.229,36.615],[15.049,28.558],[1.514,41.771],[4.092,52.406]],"c":true}]},{"t":68,"s":[{"i":[[-24.6,0],[0,33.086],[29.648,0],[6.338,-9.99],[0,0],[0,0],[0,0],[0,7.842],[8.057,0],[0,0],[1.074,-13.105],[0,0],[0,-0.43],[-10.635,0],[-6.338,5.908],[-9.238,0],[0,-16.328],[16.865,0],[5.586,11.172],[6.445,0],[0,-8.271],[-1.611,-3.438]],"o":[[35.342,0],[0,-29.541],[-15.791,0],[0,0],[0,0],[0,0],[8.164,0],[0,-7.842],[0,0],[-12.568,0],[0,0],[-0.107,0.537],[-0.43,8.701],[7.197,0],[5.479,-5.264],[16.865,0],[0,16.758],[-13.965,0],[-3.652,-5.801],[-8.379,0],[0,3.867],[7.734,16.436]],"v":[[83.5,84.938],[143.441,29.508],[92.738,-20.873],[57.933,-4.652],[57.289,-4.652],[60.941,-47.729],[122.172,-47.729],[135.492,-60.619],[122.172,-73.51],[54.174,-73.51],[33.764,-55.141],[29.252,1.686],[29.144,2.975],[44.398,19.088],[61.156,11.568],[83.607,2.33],[112.504,30.26],[83.5,59.156],[54.818,41.217],[40.639,33.16],[27.103,46.373],[29.682,57.008]],"c":true}]}],"ix":2},"nm":"Path 6","mn":"ADBE Vector Shape - Group","hd":false},{"ind":1,"ty":"sh","ix":2,"ks":{"a":1,"k":[{"i":{"x":0.4,"y":1},"o":{"x":0.167,"y":0.167},"t":15,"s":[{"i":[[-0.007,0],[0,-0.049],[0.007,0],[0,0.05]],"o":[[0.007,0],[0,0.05],[-0.007,0],[0,-0.049]],"v":[[-19.862,63.58],[-19.851,63.668],[-19.862,63.757],[-19.874,63.668]],"c":true}]},{"i":{"x":0.4,"y":1},"o":{"x":0.6,"y":0},"t":30,"s":[{"i":[[-9.668,0],[0,9.561],[9.561,0],[0,-9.668]],"o":[[9.561,0],[0,-9.668],[-9.668,0],[0,9.561]],"v":[[-31.482,77.279],[-14.187,59.984],[-31.482,42.689],[-48.777,59.984]],"c":true}]},{"i":{"x":0.4,"y":1},"o":{"x":0.6,"y":0},"t":45,"s":[{"i":[[-9.668,0],[0,9.561],[9.561,0],[0,-9.668]],"o":[[9.561,0],[0,-9.668],[-9.668,0],[0,9.561]],"v":[[-31.482,77.279],[-14.187,59.984],[-31.482,42.689],[-48.777,59.984]],"c":true}]},{"t":63,"s":[{"i":[[-9.668,0],[0,9.561],[9.561,0],[0,-9.668]],"o":[[9.561,0],[0,-9.668],[-9.668,0],[0,9.561]],"v":[[-16.435,83.489],[0.86,66.194],[-16.435,48.899],[-33.729,66.194]],"c":true}]}],"ix":2},"nm":"Path 5","mn":"ADBE Vector Shape - Group","hd":false},{"ind":3,"ty":"sh","ix":4,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":1},"o":{"x":0.4,"y":0},"t":2,"s":[{"i":[[2.918,-3.404],[5.234,-5.12],[0,0],[1.578,-3.059],[-8.515,-0.332],[0,0],[0,0],[0,0],[0,0],[0,0],[0,0],[-3.717,-3.187],[-10.267,0.568],[-0.621,7.097],[0,0],[0,0],[0,0],[0,0],[0.924,6.601]],"o":[[-1.396,1.629],[-10.963,10.723],[0,0],[-2.424,4.699],[9.44,0.368],[0,0],[0,0],[0,0],[0,0],[0,0],[0,0],[0,0],[6.337,-0.351],[0.289,-3.306],[0,0],[0,0],[0,0],[0,0],[-2.072,-14.802]],"v":[[-32.087,-67.908],[-43.056,-56.827],[-68.908,-31.93],[-74.329,-23.551],[-64.105,-6.331],[-49.974,-10.777],[-38.372,-23.327],[-37.981,-21.666],[-38.367,14.331],[-38.67,52.382],[-37.9,65.219],[-34.837,81.422],[-22.077,85.239],[-9.867,74.134],[-9.922,66.74],[-9.943,51.172],[-9.94,28.221],[-10.147,-28.672],[-10.934,-60.083]],"c":true}]},{"i":{"x":0.833,"y":1},"o":{"x":0.167,"y":0},"t":12,"s":[{"i":[[3.286,-3.711],[5.895,-5.582],[0,0],[1.777,-3.335],[-9.59,-0.362],[0,0],[0,0],[0,0],[0,0],[0,0],[0,0],[-4.187,-3.475],[-11.563,0.619],[-0.699,7.737],[0,0],[0,0],[0,0],[0,0],[1.041,7.197]],"o":[[-1.572,1.776],[-12.347,11.691],[0,0],[-2.73,5.123],[10.632,0.402],[0,0],[0,0],[0,0],[0,0],[0,0],[0,0],[0,0],[7.137,-0.382],[0.326,-3.604],[0,0],[0,0],[0,0],[0,0],[-2.334,-16.137]],"v":[[-87.804,-94.075],[-100.158,-81.994],[-129.274,-54.851],[-135.38,-45.716],[-123.864,-26.943],[-107.949,-31.79],[-94.882,-45.472],[-94.442,-43.661],[-94.877,-4.417],[-95.218,37.067],[-94.351,51.062],[-90.901,68.727],[-76.53,72.888],[-62.779,60.781],[-62.84,52.72],[-62.864,35.747],[-62.861,10.726],[-63.094,-51.299],[-63.979,-85.543]],"c":true}]},{"t":68,"s":[{"i":[[7.804,-3.019],[7.02,-4.077],[0,0],[1.208,-3.503],[-9.593,-0.283],[0,0],[0,0],[0,0],[0,0],[0,0],[0,0],[-4.187,-3.475],[-11.563,0.619],[-1.033,7.594],[0,0],[0,0],[0,0],[0,0],[1.063,8.076]],"o":[[-2.212,0.856],[-13.279,7.713],[0,0],[-1.893,5.488],[6.551,0.193],[0,0],[0,0],[0,0],[0,0],[0,0],[0,0],[0,0],[7.137,-0.382],[0.488,-3.586],[0,0],[0,0],[0,0],[0,0],[-1.458,-11.082]],"v":[[-95.929,-73.606],[-110.471,-65.588],[-137.587,-47.507],[-142.583,-40.122],[-133.551,-24.693],[-119.394,-30.735],[-100.468,-42.972],[-100.458,-41.161],[-99.877,14.958],[-99.968,41.567],[-100.351,62.312],[-97.151,78.102],[-82.78,84.138],[-68.717,72.031],[-68.465,63.97],[-68.489,47.31],[-68.486,32.288],[-68.188,-28.299],[-69.042,-64.293]],"c":true}]}],"ix":2},"nm":"Path 2","mn":"ADBE Vector Shape - Group","hd":false},{"ind":4,"ty":"sh","ix":5,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":1},"o":{"x":0.4,"y":0},"t":2,"s":[{"i":[[8.342,0],[3.678,-5.472],[0,0],[0,0],[0,0],[6.1,0],[0,-8.701],[-3.498,-4.665],[0,0],[0,0],[0,0],[0,-3.947],[-8.522,0],[-4.037,5.831],[0,0],[0,0],[0,0],[-5.831,0],[0,8.432],[2.96,3.857],[0,0],[0,0],[0,0],[0,4.216]],"o":[[-6.1,0],[0,0],[0,0],[0,0],[-4.126,-6.279],[-9.329,0],[0,4.126],[0,0],[0,0],[0,0],[-3.05,4.216],[0,7.714],[6.01,0],[0,0],[0,0],[0,0],[4.126,5.651],[8.88,0],[0,-4.216],[0,0],[0,0],[0,0],[3.05,-3.947],[0.09,-7.714]],"v":[[137.788,-73.187],[124.781,-65.383],[90.963,-16.047],[90.156,-16.047],[57.415,-64.306],[42.973,-73.187],[26.737,-58.117],[31.401,-45.828],[68.269,4.405],[68.269,5.213],[30.146,58.406],[25.84,69.977],[41,83.881],[54.455,76.077],[88.003,28.445],[88.811,28.445],[122.897,75.987],[136.801,83.971],[152.14,69.529],[147.924,57.868],[109.352,5.841],[109.352,5.123],[149.09,-47.98],[153.306,-59.462]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0},"t":12,"s":[{"i":[[7.908,-0.143],[3.384,-6.028],[0,0],[0,0],[0,0],[5.782,-0.104],[-0.162,-9.485],[-3.404,-5.025],[0,0],[0,0],[0,0],[-0.074,-4.303],[-8.078,0.146],[-3.718,6.425],[0,0],[0,0],[0,0],[-5.527,0.1],[0.157,9.192],[2.878,4.154],[0,0],[0,0],[0,0],[0.079,4.596]],"o":[[-5.782,0.104],[0,0],[0,0],[0,0],[-4.029,-6.775],[-8.844,0.16],[0.077,4.498],[0,0],[0,0],[0,0],[-2.812,4.648],[0.144,8.41],[5.697,-0.103],[0,0],[0,0],[0,0],[4.017,6.09],[8.419,-0.152],[-0.079,-4.596],[0,0],[0,0],[0,0],[2.818,-4.355],[-0.059,-8.411]],"v":[[89.471,-101.108],[77.287,-92.378],[46.149,-38.016],[45.384,-38.003],[13.445,-90.052],[-0.412,-99.486],[-15.522,-82.781],[-10.871,-69.463],[25.017,-15.333],[25.032,-14.453],[-10.115,44.187],[-13.981,56.875],[0.65,71.773],[13.26,63.036],[44.174,10.537],[44.939,10.523],[78.14,61.767],[91.47,70.233],[105.742,54.227],[101.527,41.586],[63.99,-14.471],[63.977,-15.253],[100.656,-73.823],[104.439,-86.412]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":20,"s":[{"i":[[7.908,-0.143],[3.384,-6.028],[0,0],[0,0],[0,0],[5.782,-0.104],[-0.162,-9.485],[-3.404,-5.025],[0,0],[0,0],[0,0],[-0.074,-4.303],[-8.078,0.146],[-3.718,6.425],[0,0],[0,0],[0,0],[-5.527,0.1],[0.157,9.192],[2.878,4.154],[0,0],[0,0],[0,0],[0.079,4.596]],"o":[[-5.782,0.104],[0,0],[0,0],[0,0],[-4.029,-6.775],[-8.844,0.16],[0.077,4.498],[0,0],[0,0],[0,0],[-2.812,4.648],[0.144,8.41],[5.697,-0.103],[0,0],[0,0],[0,0],[4.017,6.09],[8.419,-0.152],[-0.079,-4.596],[0,0],[0,0],[0,0],[2.818,-4.355],[-0.059,-8.411]],"v":[[91.511,-56.339],[79.327,-47.609],[48.189,6.753],[47.424,6.766],[15.485,-45.283],[1.628,-54.717],[-13.482,-38.012],[-8.831,-24.694],[27.057,29.436],[27.072,30.316],[-8.075,88.956],[-11.941,101.644],[2.69,116.542],[15.3,107.805],[46.214,55.306],[46.979,55.292],[80.18,106.536],[93.51,115.002],[107.782,98.996],[103.567,86.355],[66.03,30.298],[66.017,29.516],[102.696,-29.054],[106.479,-41.643]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":27,"s":[{"i":[[7.908,-0.143],[3.384,-6.028],[0,0],[0,0],[0,0],[5.782,-0.104],[-0.162,-9.485],[-3.404,-5.025],[0,0],[0,0],[0,0],[-0.074,-4.303],[-8.078,0.146],[-3.718,6.425],[0,0],[0,0],[0,0],[-5.527,0.1],[0.157,9.192],[2.878,4.154],[0,0],[0,0],[0,0],[0.079,4.596]],"o":[[-5.782,0.104],[0,0],[0,0],[0,0],[-4.029,-6.775],[-8.844,0.16],[0.077,4.498],[0,0],[0,0],[0,0],[-2.812,4.648],[0.144,8.41],[5.697,-0.103],[0,0],[0,0],[0,0],[4.017,6.09],[8.419,-0.152],[-0.079,-4.596],[0,0],[0,0],[0,0],[2.818,-4.355],[-0.059,-8.411]],"v":[[94.101,0.512],[81.917,9.242],[50.779,63.604],[50.014,63.617],[18.075,11.568],[4.218,2.134],[-10.892,18.839],[-6.241,32.157],[29.647,86.287],[29.662,87.167],[12.694,111.517],[10.516,116.898],[25.147,131.796],[37.757,123.059],[48.804,112.157],[49.569,112.143],[56.54,121.411],[69.87,129.877],[84.142,113.871],[83.756,107.566],[68.62,87.149],[68.607,86.367],[105.286,27.797],[109.069,15.208]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":34,"s":[{"i":[[7.908,-0.143],[3.384,-6.028],[0,0],[0,0],[0,0],[5.782,-0.104],[-0.162,-9.485],[-3.404,-5.025],[0,0],[0,0],[0,0],[-0.074,-4.303],[-8.078,0.146],[-3.718,6.425],[0,0],[0,0],[0,0],[-5.527,0.1],[0.157,9.192],[2.878,4.154],[0,0],[0,0],[0,0],[0.079,4.596]],"o":[[-5.782,0.104],[0,0],[0,0],[0,0],[-4.029,-6.775],[-8.844,0.16],[0.077,4.498],[0,0],[0,0],[0,0],[-2.812,4.648],[0.144,8.41],[5.697,-0.103],[0,0],[0,0],[0,0],[4.017,6.09],[8.419,-0.152],[-0.079,-4.596],[0,0],[0,0],[0,0],[2.818,-4.355],[-0.059,-8.411]],"v":[[96.575,54.801],[84.391,63.531],[53.253,117.893],[52.488,117.906],[20.549,65.857],[6.692,56.423],[-8.418,73.128],[-3.767,86.446],[16.693,109.297],[16.708,110.177],[10.374,120.587],[8.57,121.581],[19.76,130.795],[35.811,127.742],[50.222,125.643],[50.987,125.629],[80.126,127.924],[94.469,127.049],[107.728,120.384],[101.451,119.437],[85.21,115.368],[85.197,114.586],[107.76,82.086],[111.543,69.497]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":38,"s":[{"i":[[9.39,2.067],[5.643,-4.721],[0,0],[0,0],[0,0],[5.782,-0.104],[-0.162,-9.485],[-3.404,-5.025],[0,0],[0,0],[0,0],[-0.074,-4.303],[-8.078,0.146],[-3.718,6.425],[0,0],[0,0],[0,0],[-5.527,0.1],[0.157,9.192],[2.878,4.154],[0,0],[0,0],[0,0],[-2.75,5.262]],"o":[[-5.648,-1.243],[0,0],[0,0],[0,0],[-4.029,-6.775],[-8.844,0.16],[0.077,4.498],[0,0],[0,0],[0,0],[-2.812,4.648],[0.144,8.41],[5.697,-0.103],[0,0],[0,0],[0,0],[4.017,6.09],[8.419,-0.152],[-0.079,-4.596],[0,0],[0,0],[0,0],[2.818,-4.355],[3.109,-7.595]],"v":[[101.179,83.803],[82.716,90.779],[50.874,114.312],[44.703,115.749],[21.817,93.688],[7.96,84.254],[-7.15,100.959],[1.108,112.189],[24.076,111.568],[24.091,112.448],[8.724,121.279],[7.965,118.098],[21.359,127.941],[34.41,128.163],[51.698,126.558],[52.463,126.544],[90.031,122.725],[103.867,126.521],[110.937,124.001],[110.084,119.403],[77.821,115.034],[96.15,116.866],[107.348,105.735],[112.811,97.328]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":40,"s":[{"i":[[8.649,0.962],[4.513,-5.374],[0,0],[0,0],[0,0],[5.782,-0.104],[-0.162,-9.485],[-3.404,-5.025],[0,0],[0,0],[0,0],[-0.074,-4.303],[-8.078,0.146],[-3.718,6.425],[0,0],[0,0],[0,0],[-5.527,0.1],[0.157,9.192],[2.878,4.154],[0,0],[0,0],[0,0],[-1.336,4.929]],"o":[[-5.715,-0.57],[0,0],[0,0],[0,0],[-4.029,-6.775],[-8.844,0.16],[0.077,4.498],[0,0],[0,0],[0,0],[-2.812,4.648],[0.144,8.41],[5.697,-0.103],[0,0],[0,0],[0,0],[4.017,6.09],[8.419,-0.152],[-0.079,-4.596],[0,0],[0,0],[0,0],[2.818,-4.355],[1.525,-8.003]],"v":[[100.145,97.133],[84.821,104.987],[49.685,112.522],[40.811,114.67],[22.451,107.604],[8.594,98.17],[-6.516,114.875],[2.498,114.914],[27.767,112.703],[27.782,113.583],[7.899,121.625],[7.663,116.357],[22.159,126.513],[33.709,128.374],[52.437,127.016],[53.202,127.002],[94.984,120.126],[108.567,126.257],[112.541,125.81],[114.4,119.387],[74.126,114.868],[83.284,115.392],[107.143,117.559],[113.445,111.244]],"c":true}]},{"t":42,"s":[{"i":[[7.908,-0.143],[3.384,-6.028],[0,0],[0,0],[0,0],[5.782,-0.104],[-0.162,-9.485],[-3.404,-5.025],[0,0],[0,0],[0,0],[-0.074,-4.303],[-8.078,0.146],[-3.718,6.425],[0,0],[0,0],[0,0],[-5.527,0.1],[0.157,9.192],[2.878,4.154],[0,0],[0,0],[0,0],[0.079,4.596]],"o":[[-5.782,0.104],[0,0],[0,0],[0,0],[-4.029,-6.775],[-8.844,0.16],[0.077,4.498],[0,0],[0,0],[0,0],[-2.812,4.648],[0.144,8.41],[5.697,-0.103],[0,0],[0,0],[0,0],[4.017,6.09],[8.419,-0.152],[-0.079,-4.596],[0,0],[0,0],[0,0],[2.818,-4.355],[-0.059,-8.411]],"v":[[99.111,110.464],[86.927,119.194],[48.496,110.732],[36.919,113.592],[23.085,121.52],[9.228,112.086],[-5.882,128.791],[3.887,117.638],[31.459,113.839],[31.474,114.719],[7.075,121.971],[7.361,114.615],[22.958,125.086],[33.009,128.584],[53.175,127.473],[53.94,127.46],[99.936,117.527],[113.266,125.993],[114.145,127.619],[118.717,119.37],[70.432,114.701],[70.419,113.919],[106.937,129.384],[114.079,125.16]],"c":true}]}],"ix":2},"nm":"Path 3","mn":"ADBE Vector Shape - Group","hd":false},{"ind":5,"ty":"sh","ix":6,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":1},"o":{"x":0.2,"y":0},"t":4,"s":[{"i":[[0,0],[0,-28.435],[0,0],[28.435,0],[0,0],[0,28.435],[0,0],[-28.435,0]],"o":[[28.435,0],[0,0],[0,28.435],[0,0],[-28.435,0],[0,0],[0,-28.435],[0,0]],"v":[[137.378,-137.333],[188.867,-85.845],[188.867,85.845],[137.378,137.333],[-137.378,137.333],[-188.867,85.845],[-188.867,-85.845],[-137.378,-137.333]],"c":true}]},{"i":{"x":0,"y":1},"o":{"x":0.167,"y":0},"t":12,"s":[{"i":[[0,0],[0,-30.199],[0,0],[27.483,0],[0,0],[0,30.199],[0,0],[-27.483,0]],"o":[[27.483,0],[0,0],[0,30.199],[0,0],[-27.483,0],[0,0],[0,-30.199],[0,0]],"v":[[126.631,-155.306],[176.396,-100.624],[176.396,81.711],[126.631,136.393],[-138.928,136.393],[-188.693,81.711],[-188.693,-100.624],[-138.928,-155.306]],"c":true}]},{"i":{"x":0.7,"y":1},"o":{"x":0.953,"y":0},"t":51,"s":[{"i":[[0,0],[0,-26.886],[0,0],[26.444,0],[0,0],[0,26.886],[0,0],[-26.444,0]],"o":[[26.444,0],[0,0],[0,26.886],[0,0],[-26.444,0],[0,0],[0,-26.886],[0,0]],"v":[[114.899,-124.335],[162.782,-75.651],[162.782,86.683],[114.899,135.367],[-140.619,135.367],[-188.502,86.683],[-188.502,-75.651],[-140.619,-124.335]],"c":true}]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":63,"s":[{"i":[[0,0],[0,-28.435],[0,0],[28.435,0],[0,0],[0,28.435],[0,0],[-28.435,0]],"o":[[28.435,0],[0,0],[0,28.435],[0,0],[-28.435,0],[0,0],[0,-28.435],[0,0]],"v":[[137.378,-137.333],[188.867,-85.845],[188.867,85.845],[137.378,137.333],[-137.378,137.333],[-188.867,85.845],[-188.867,-85.845],[-137.378,-137.333]],"c":true}]},{"t":68,"s":[{"i":[[0,0],[0,-28.435],[0,0],[28.435,0],[0,0],[0,28.435],[0,0],[-28.435,0]],"o":[[28.435,0],[0,0],[0,28.435],[0,0],[-28.435,0],[0,0],[0,-28.435],[0,0]],"v":[[137.378,-137.333],[188.867,-85.845],[188.867,85.845],[137.378,137.333],[-137.378,137.333],[-188.867,85.845],[-188.867,-85.845],[-137.378,-137.333]],"c":true}]}],"ix":2},"nm":"Path 4","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Combined-Shape","np":7,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":180,"st":0,"ct":1,"bm":0},{"ddd":0,"ind":5,"ty":4,"nm":"Lines","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":1,"k":[{"i":{"x":0,"y":1},"o":{"x":0.2,"y":0},"t":4,"s":[70,277.55,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.953,"y":0},"t":41,"s":[130,267.55,0],"to":[0,0,0],"ti":[0,0,0]},{"t":62,"s":[178,259.55,0]}],"ix":2,"l":2},"a":{"a":0,"k":[-186,-0.45,0],"ix":1,"l":2},"s":{"a":0,"k":[100,100,100],"ix":6,"l":2}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":1},"o":{"x":0.167,"y":0},"t":10,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-270.667,102.8],[-208,100.1]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.167,"y":0},"t":20,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-270.667,102.8],[-174,99.6]],"c":false}]},{"t":34,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-269.667,101.3],[-155,98.1]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":6,"s":[100]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":20,"s":[0]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":28,"s":[38]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":35,"s":[67]},{"t":42,"s":[0]}],"ix":1},"e":{"a":1,"k":[{"i":{"x":[0.73],"y":[1]},"o":{"x":[0.27],"y":[0]},"t":33,"s":[100]},{"t":41,"s":[0]}],"ix":2},"o":{"a":0,"k":0,"ix":3},"m":1,"ix":2,"nm":"Trim Paths 1","mn":"ADBE Vector Filter - Trim","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":30,"ix":5},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,-204],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 3","np":3,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":1},"o":{"x":0.167,"y":0},"t":30,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-270.667,102.8],[-174,99.6]],"c":false}]},{"t":40,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-299.667,101.8],[-174,99.6]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":8,"s":[100]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":18,"s":[0]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":26,"s":[72]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":33,"s":[15]},{"t":40,"s":[0]}],"ix":1},"e":{"a":1,"k":[{"i":{"x":[0.73],"y":[1]},"o":{"x":[0.27],"y":[0]},"t":31,"s":[100]},{"t":39,"s":[0]}],"ix":2},"o":{"a":0,"k":0,"ix":3},"m":1,"ix":2,"nm":"Trim Paths 1","mn":"ADBE Vector Filter - Trim","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":30,"ix":5},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,-100],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 2","np":3,"cix":2,"bm":0,"ix":2,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":1},"o":{"x":0.167,"y":0},"t":5,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-270.238,87.8],[-134.571,87.6]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.167,"y":0},"t":7,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-270.965,68.8],[-135.299,68.6]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.167,"y":0},"t":10,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-271.153,77.8],[-135.486,77.6]],"c":false}]},{"t":15,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-269.76,95.8],[-134.094,95.6]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":5,"s":[100]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":15,"s":[19]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":23,"s":[67]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":30,"s":[0]},{"t":37,"s":[0]}],"ix":1},"e":{"a":1,"k":[{"i":{"x":[0.73],"y":[1]},"o":{"x":[0.27],"y":[0]},"t":28,"s":[100]},{"t":36,"s":[0]}],"ix":2},"o":{"a":0,"k":0,"ix":3},"m":1,"ix":2,"nm":"Trim Paths 1","mn":"ADBE Vector Filter - Trim","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":30,"ix":5},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":3,"cix":2,"bm":0,"ix":3,"mn":"ADBE Vector Group","hd":false}],"ip":10,"op":180,"st":0,"ct":1,"bm":0}],"markers":[]} \ No newline at end of file diff --git a/TMessagesProj/src/main/res/raw/audio_speed.json b/TMessagesProj/src/main/res/raw/speed_1to2.json similarity index 100% rename from TMessagesProj/src/main/res/raw/audio_speed.json rename to TMessagesProj/src/main/res/raw/speed_1to2.json diff --git a/TMessagesProj/src/main/res/raw/audio_stop_speed.json b/TMessagesProj/src/main/res/raw/speed_2to1.json similarity index 100% rename from TMessagesProj/src/main/res/raw/audio_stop_speed.json rename to TMessagesProj/src/main/res/raw/speed_2to1.json diff --git a/TMessagesProj/src/main/res/raw/speed_fast.json b/TMessagesProj/src/main/res/raw/speed_fast.json new file mode 100644 index 0000000000..e9c9af2cde --- /dev/null +++ b/TMessagesProj/src/main/res/raw/speed_fast.json @@ -0,0 +1 @@ +{"v":"5.10.1","fr":60,"ip":0,"op":75,"w":512,"h":512,"nm":"fast 2","ddd":0,"assets":[],"layers":[{"ddd":0,"ind":4,"ty":4,"nm":"Shape Layer 2","parent":7,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":1,"k":[{"i":{"x":0.833,"y":1},"o":{"x":0.167,"y":0},"t":3,"s":[11,-240,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.833,"y":1},"o":{"x":0.167,"y":0},"t":6,"s":[11,-244,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.833,"y":1},"o":{"x":0.167,"y":0},"t":12,"s":[11,-258,0],"to":[0,0,0],"ti":[1.667,-3.167,0]},{"i":{"x":0.517,"y":1},"o":{"x":0.167,"y":0},"t":17,"s":[11,-246,0],"to":[-0.989,1.88,0],"ti":[2.756,-4.755,0]},{"i":{"x":0.57,"y":1},"o":{"x":0.243,"y":0},"t":33,"s":[2.977,-229.433,0],"to":[-1.887,3.255,0],"ti":[-1.084,-0.474,0]},{"i":{"x":0.4,"y":1},"o":{"x":0.6,"y":0},"t":55,"s":[1,-227,0],"to":[2.667,1.167,0],"ti":[-4.333,2,0]},{"t":68,"s":[27,-239,0]}],"ix":2,"l":2},"a":{"a":0,"k":[0,0,0],"ix":1,"l":2},"s":{"a":0,"k":[100,100,100],"ix":6,"l":2}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-134,120],[115,119]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":29,"ix":5},"lc":2,"lj":1,"ml":4,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"fl","c":{"a":0,"k":[1,0,0,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Shape 1","np":3,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":75,"st":0,"ct":1,"bm":0},{"ddd":0,"ind":5,"ty":4,"nm":"Shape Layer 1","parent":7,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[0,0,0],"ix":2,"l":2},"a":{"a":0,"k":[0,0,0],"ix":1,"l":2},"s":{"a":0,"k":[100,100,100],"ix":6,"l":2}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-134,120],[115,119]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":29,"ix":5},"lc":2,"lj":1,"ml":4,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"fl","c":{"a":0,"k":[1,0,0,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Shape 1","np":3,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":75,"st":0,"ct":1,"bm":0},{"ddd":0,"ind":6,"ty":3,"nm":"Null 1","sr":1,"ks":{"o":{"a":0,"k":0,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.3],"y":[1]},"o":{"x":[0.31],"y":[0]},"t":55,"s":[0]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":65,"s":[3]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":70,"s":[-1]},{"t":74,"s":[0]}],"ix":10},"p":{"a":1,"k":[{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":0,"s":[199,388,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":7,"s":[199,280,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":13,"s":[199,306,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":18,"s":[199,314,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":25,"s":[199,299,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":32,"s":[199,314,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":39,"s":[199,299,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":46,"s":[199,314,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":53,"s":[199,299,0],"to":[0,0,0],"ti":[0,0,0]},{"t":62,"s":[199,306,0]}],"ix":2,"l":2},"a":{"a":0,"k":[50,50,0],"ix":1,"l":2},"s":{"a":1,"k":[{"i":{"x":[0.1,0.1,0.1],"y":[1,1,1]},"o":{"x":[0.3,0.3,0.3],"y":[0,0,0]},"t":0,"s":[0,0,100]},{"t":9,"s":[100,100,100]}],"ix":6,"l":2}},"ao":0,"ip":0,"op":180,"st":0,"bm":0},{"ddd":0,"ind":7,"ty":4,"nm":"Body 2","parent":6,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.3],"y":[1]},"o":{"x":[0.31],"y":[0]},"t":2,"s":[10]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":13,"s":[-14]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":20,"s":[-10]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":27,"s":[-13]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":34,"s":[-10]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":41,"s":[-12]},{"i":{"x":[0.506],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":48,"s":[-9]},{"i":{"x":[0.3],"y":[1]},"o":{"x":[0.31],"y":[0]},"t":55,"s":[-13]},{"t":65,"s":[0]}],"ix":10},"p":{"a":1,"k":[{"i":{"x":0.833,"y":1},"o":{"x":0.2,"y":0},"t":2,"s":[-82.367,118.339,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0,"y":1},"o":{"x":0.2,"y":0},"t":4,"s":[-95.349,134.878,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.953,"y":0},"t":41,"s":[62.651,134.878,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":0.7},"o":{"x":0.01,"y":0.01},"t":62,"s":[-41.349,134.878,0],"to":[0,0,0],"ti":[0,0,0]},{"t":66,"s":[-41.349,134.878,0]}],"ix":2,"l":2},"a":{"a":0,"k":[-148.349,134.878,0],"ix":1,"l":2},"s":{"a":0,"k":[100,100,100],"ix":6,"l":2}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[10.547,0],[5.469,0],[7.891,6.953],[0,0],[1.328,0],[11.953,19.453],[0,8.828],[-8.594,-0.234],[-25.703,0],[-25.156,-15.625],[-6.719,0],[-3.438,3.047],[0,0],[0,5.938],[-11.641,0],[-10.938,-16.094],[0,0],[0,-25],[18.438,0],[9.141,5.938],[5,-2.734],[4.609,0],[5.234,-1.016],[0,0],[24.375,0],[0,-5.938],[-3.984,0],[0,0],[-9.062,-19.453],[0,0],[-12.5,0],[0,-13.516],[11.484,0]],"o":[[-10.312,0],[-6.484,0],[0,0],[-1.484,0.078],[-24.297,0],[-10,0.156],[0,-7.969],[10.703,-31.484],[27.031,0],[8.594,4.688],[4.844,0],[0,0],[-9.922,-6.484],[0,-9.141],[14.375,0],[0,0],[27.5,-1.094],[0,21.016],[-15.156,0],[-6.016,2.969],[-4.609,-1.094],[-5.859,0],[0,0],[-11.797,-26.328],[-7.969,0],[0,3.672],[0,0],[18.672,0],[0,0],[7.266,-1.406],[24.062,0],[0,8.75],[-7.891,0]],"v":[[27.305,74.527],[5.898,77.261],[-15.117,68.824],[-47.539,41.558],[-51.758,41.636],[-105.039,15.308],[-122.227,0.933],[-107.227,-12.192],[-49.258,-57.817],[19.727,-29.536],[42.461,-20.082],[53.477,-25.082],[9.727,-53.52],[-13.242,-74.848],[18.32,-90.395],[58.711,-67.192],[77.539,-39.77],[122.227,5.621],[93.633,32.964],[59.57,21.402],[42.93,29.996],[29.18,28.668],[12.383,30.152],[7.148,18.355],[-48.867,-20.473],[-64.102,-13.051],[-57.695,-7.426],[-48.164,-7.426],[-4.102,22.183],[5.039,42.261],[29.961,38.746],[67.148,63.589],[48.945,77.261]],"c":true},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ind":1,"ty":"sh","ix":2,"ks":{"a":0,"k":{"i":[[0,-4.297],[-4.297,0],[0,4.453],[4.297,0]],"o":[[0,4.219],[4.297,0],[0,-4.141],[-4.297,0]],"v":[[79.727,-4.848],[87.461,2.964],[95.273,-5.239],[87.539,-12.895]],"c":true},"ix":2},"nm":"Path 2","mn":"ADBE Vector Shape - Group","hd":false},{"ind":2,"ty":"sh","ix":3,"ks":{"a":0,"k":{"i":[[0,0],[14.297,0],[0,10.312],[-8.594,0],[-3.672,0],[-1.094,0.078]],"o":[[-7.656,5.078],[-16.719,0],[0,-6.875],[6.562,0],[1.484,0],[0,0]],"v":[[-24.258,76.089],[-55.508,83.746],[-83.086,66.48],[-69.102,54.996],[-51.836,56.246],[-48.008,56.089]],"c":true},"ix":2},"nm":"Path 3","mn":"ADBE Vector Shape - Group","hd":false},{"ind":3,"ty":"sh","ix":4,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":1},"o":{"x":0.2,"y":0},"t":4,"s":[{"i":[[0,0],[0,-28.435],[0,0],[28.435,0],[0,0],[0,28.435],[0,0],[-28.435,0]],"o":[[28.435,0],[0,0],[0,28.435],[0,0],[-28.435,0],[0,0],[0,-28.435],[0,0]],"v":[[137.378,-137.333],[188.867,-85.845],[188.867,85.845],[137.378,137.333],[-137.378,137.333],[-188.867,85.845],[-188.867,-85.845],[-137.378,-137.333]],"c":true}]},{"i":{"x":0,"y":1},"o":{"x":0.167,"y":0},"t":12,"s":[{"i":[[0,0],[0,-30.199],[0,0],[27.483,0],[0,0],[0,30.199],[0,0],[-27.483,0]],"o":[[27.483,0],[0,0],[0,30.199],[0,0],[-27.483,0],[0,0],[0,-30.199],[0,0]],"v":[[126.631,-155.306],[176.396,-100.624],[176.396,81.711],[126.631,136.393],[-138.928,136.393],[-188.693,81.711],[-188.693,-100.624],[-138.928,-155.306]],"c":true}]},{"i":{"x":0.7,"y":1},"o":{"x":0.953,"y":0},"t":51,"s":[{"i":[[0,0],[0,-26.886],[0,0],[26.444,0],[0,0],[0,26.886],[0,0],[-26.444,0]],"o":[[26.444,0],[0,0],[0,26.886],[0,0],[-26.444,0],[0,0],[0,-26.886],[0,0]],"v":[[114.899,-124.335],[162.782,-75.651],[162.782,86.683],[114.899,135.367],[-140.619,135.367],[-188.502,86.683],[-188.502,-75.651],[-140.619,-124.335]],"c":true}]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":63,"s":[{"i":[[0,0],[0,-28.435],[0,0],[28.435,0],[0,0],[0,28.435],[0,0],[-28.435,0]],"o":[[28.435,0],[0,0],[0,28.435],[0,0],[-28.435,0],[0,0],[0,-28.435],[0,0]],"v":[[137.378,-137.333],[188.867,-85.845],[188.867,85.845],[137.378,137.333],[-137.378,137.333],[-188.867,85.845],[-188.867,-85.845],[-137.378,-137.333]],"c":true}]},{"t":68,"s":[{"i":[[0,0],[0,-28.435],[0,0],[28.435,0],[0,0],[0,28.435],[0,0],[-28.435,0]],"o":[[28.435,0],[0,0],[0,28.435],[0,0],[-28.435,0],[0,0],[0,-28.435],[0,0]],"v":[[137.378,-137.333],[188.867,-85.845],[188.867,85.845],[137.378,137.333],[-137.378,137.333],[-188.867,85.845],[-188.867,-85.845],[-137.378,-137.333]],"c":true}]}],"ix":2},"nm":"Path 5","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":2,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Combined-Shape","np":5,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":180,"st":0,"ct":1,"bm":0},{"ddd":0,"ind":8,"ty":4,"nm":"Lines","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":1,"k":[{"i":{"x":0,"y":1},"o":{"x":0.2,"y":0},"t":4,"s":[70,277.55,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.953,"y":0},"t":41,"s":[130,267.55,0],"to":[0,0,0],"ti":[0,0,0]},{"t":62,"s":[178,259.55,0]}],"ix":2,"l":2},"a":{"a":0,"k":[-186,-0.45,0],"ix":1,"l":2},"s":{"a":0,"k":[100,100,100],"ix":6,"l":2}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":1},"o":{"x":0.167,"y":0},"t":10,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-270.667,102.8],[-208,100.1]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.167,"y":0},"t":20,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-270.667,102.8],[-174,99.6]],"c":false}]},{"t":34,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-269.667,101.3],[-155,98.1]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":6,"s":[100]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":20,"s":[0]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":28,"s":[38]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":35,"s":[67]},{"t":42,"s":[0]}],"ix":1},"e":{"a":1,"k":[{"i":{"x":[0.73],"y":[1]},"o":{"x":[0.27],"y":[0]},"t":33,"s":[100]},{"t":41,"s":[0]}],"ix":2},"o":{"a":0,"k":0,"ix":3},"m":1,"ix":2,"nm":"Trim Paths 1","mn":"ADBE Vector Filter - Trim","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":30,"ix":5},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,-204],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 3","np":3,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":1},"o":{"x":0.167,"y":0},"t":30,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-270.667,102.8],[-174,99.6]],"c":false}]},{"t":40,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-299.667,101.8],[-174,99.6]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":8,"s":[100]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":18,"s":[0]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":26,"s":[72]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":33,"s":[15]},{"t":40,"s":[0]}],"ix":1},"e":{"a":1,"k":[{"i":{"x":[0.73],"y":[1]},"o":{"x":[0.27],"y":[0]},"t":31,"s":[100]},{"t":39,"s":[0]}],"ix":2},"o":{"a":0,"k":0,"ix":3},"m":1,"ix":2,"nm":"Trim Paths 1","mn":"ADBE Vector Filter - Trim","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":30,"ix":5},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,-100],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 2","np":3,"cix":2,"bm":0,"ix":2,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":1},"o":{"x":0.167,"y":0},"t":5,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-270.238,87.8],[-134.571,87.6]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.167,"y":0},"t":7,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-270.965,68.8],[-135.299,68.6]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.167,"y":0},"t":10,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-271.153,77.8],[-135.486,77.6]],"c":false}]},{"t":15,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-269.76,95.8],[-134.094,95.6]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":5,"s":[100]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":15,"s":[19]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":23,"s":[67]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":30,"s":[0]},{"t":37,"s":[0]}],"ix":1},"e":{"a":1,"k":[{"i":{"x":[0.73],"y":[1]},"o":{"x":[0.27],"y":[0]},"t":28,"s":[100]},{"t":36,"s":[0]}],"ix":2},"o":{"a":0,"k":0,"ix":3},"m":1,"ix":2,"nm":"Trim Paths 1","mn":"ADBE Vector Filter - Trim","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":30,"ix":5},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":3,"cix":2,"bm":0,"ix":3,"mn":"ADBE Vector Group","hd":false}],"ip":10,"op":180,"st":0,"ct":1,"bm":0}],"markers":[]} \ No newline at end of file diff --git a/TMessagesProj/src/main/res/raw/speed_slow.json b/TMessagesProj/src/main/res/raw/speed_slow.json new file mode 100644 index 0000000000..fbde01c614 --- /dev/null +++ b/TMessagesProj/src/main/res/raw/speed_slow.json @@ -0,0 +1 @@ +{"v":"5.10.1","fr":60,"ip":0,"op":72,"w":512,"h":512,"nm":"slow 2","ddd":0,"assets":[],"layers":[{"ddd":0,"ind":3,"ty":3,"nm":"Null 1","sr":1,"ks":{"o":{"a":0,"k":0,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[219,262,0],"ix":2,"l":2},"a":{"a":0,"k":[50,50,0],"ix":1,"l":2},"s":{"a":1,"k":[{"i":{"x":[0.2,0.2,0.2],"y":[1,1,1]},"o":{"x":[0.3,0.3,0.3],"y":[0,0,0]},"t":0,"s":[0,0,100]},{"t":7,"s":[100,100,100]}],"ix":6,"l":2}},"ao":0,"ip":0,"op":180,"st":0,"bm":0},{"ddd":0,"ind":4,"ty":4,"nm":"Body 2","parent":3,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.204],"y":[0.644]},"o":{"x":[0.26],"y":[0]},"t":4,"s":[0]},{"i":{"x":[0.629],"y":[1]},"o":{"x":[0.212],"y":[0.307]},"t":22,"s":[14]},{"i":{"x":[0.7],"y":[1]},"o":{"x":[0.9],"y":[0]},"t":47,"s":[20]},{"t":59,"s":[0]}],"ix":10},"p":{"a":1,"k":[{"i":{"x":0,"y":1},"o":{"x":0.2,"y":0},"t":4,"s":[170.651,178.878,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.7,"y":1},"o":{"x":0.9,"y":0},"t":47,"s":[226.651,178.878,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.597,"y":0.597},"o":{"x":0,"y":0},"t":59,"s":[231.651,178.878,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.717,"y":0.717},"o":{"x":0.375,"y":0.375},"t":69,"s":[231.651,178.878,0],"to":[0,0,0],"ti":[0,0,0]},{"t":71,"s":[231.651,178.878,0]}],"ix":2,"l":2},"a":{"a":0,"k":[143.651,134.878,0],"ix":1,"l":2},"s":{"a":1,"k":[{"i":{"x":[0.833,0.833,0.833],"y":[1,1,1]},"o":{"x":[0.167,0.167,0.167],"y":[0,0,0]},"t":47,"s":[100,100,100]},{"i":{"x":[0.7,0.7,0.7],"y":[1,1,1]},"o":{"x":[0.9,0.9,0.9],"y":[0,0,0]},"t":53,"s":[100,102,100]},{"i":{"x":[0.7,0.7,0.7],"y":[1,1,1]},"o":{"x":[0.167,0.167,0.167],"y":[0,0,0]},"t":60,"s":[100,90,100]},{"i":{"x":[0.7,0.7,0.7],"y":[1,1,1]},"o":{"x":[0.3,0.3,0.3],"y":[0,0,0]},"t":66,"s":[100,104,100]},{"t":71,"s":[100,100,100]}],"ix":6,"l":2}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[-14.531,-17.734],[21.094,-0.078],[13.984,15.625],[-22.656,0]],"o":[[-14.062,15.859],[-20.234,0],[14.141,-17.656],[22.5,0.078]],"v":[[13.608,-24.464],[-39.674,-0.48],[-92.33,-23.918],[-39.049,-50.011]],"c":true},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ind":1,"ty":"sh","ix":2,"ks":{"a":0,"k":{"i":[[22.031,0],[11.875,-5.078],[3.906,8.281],[-2.891,10],[-19.062,0],[0,-27.969]],"o":[[-14.766,0],[-4.453,-10.469],[5.078,-3.203],[4.922,-17.734],[22.344,0],[0,12.344]],"v":[[94.311,29.598],[57.748,41.082],[44.389,10.848],[56.967,-7.277],[92.904,-37.355],[130.17,9.364]],"c":true},"ix":2},"nm":"Path 2","mn":"ADBE Vector Shape - Group","hd":false},{"ind":2,"ty":"sh","ix":3,"ks":{"a":0,"k":{"i":[[5.312,-0.312],[6.406,-2.109],[1.562,16.641],[-9.062,9.531],[-6.328,-12.734]],"o":[[-8.047,0.781],[-13.438,-11.406],[10.547,-3.984],[8.047,12.812],[-2.891,1.562]],"v":[[34.858,47.176],[12.826,51.707],[-10.846,6.161],[20.483,-15.089],[48.451,44.364]],"c":true},"ix":2},"nm":"Path 3","mn":"ADBE Vector Shape - Group","hd":false},{"ind":3,"ty":"sh","ix":4,"ks":{"a":0,"k":{"i":[[-11.328,-4.062],[12.891,-10.703],[7.188,0],[0,0],[0,5.938],[-3.75,3.672],[-6.562,12.812],[-3.516,5.391]],"o":[[-1.797,16.25],[-6.094,-2.344],[0,0],[-9.688,0],[0,-4.219],[5.078,-5],[3.438,-6.641],[9.062,9.453]],"v":[[-67.408,6.473],[-91.471,52.098],[-110.767,47.957],[-119.596,47.957],[-134.752,38.661],[-129.127,28.348],[-110.142,3.973],[-99.517,-14.074]],"c":true},"ix":2},"nm":"Path 5","mn":"ADBE Vector Shape - Group","hd":false},{"ind":4,"ty":"sh","ix":5,"ks":{"a":0,"k":{"i":[[0,-4.375],[-4.453,0],[0,4.375],[4.297,0]],"o":[[0,4.375],[4.297,0],[0,-4.375],[-4.453,0]],"v":[[86.889,-5.793],[94.936,2.254],[102.904,-5.793],[94.936,-13.761]],"c":true},"ix":2},"nm":"Path 6","mn":"ADBE Vector Shape - Group","hd":false},{"ind":5,"ty":"sh","ix":6,"ks":{"a":0,"k":{"i":[[-14.297,-11.797],[18.047,0],[10.625,4.531],[-1.328,17.5],[-5.859,0.078],[-5.938,1.406]],"o":[[-10.547,5.156],[-17.344,0],[13.672,-12.266],[5.547,1.25],[6.016,0],[1.094,18.047]],"v":[[2.358,56.707],[-39.439,69.286],[-80.142,56.942],[-56.705,9.598],[-39.517,11.473],[-21.627,9.442]],"c":true},"ix":2},"nm":"Path 7","mn":"ADBE Vector Shape - Group","hd":false},{"ind":6,"ty":"sh","ix":7,"ks":{"a":0,"k":{"i":[[0,-7.031],[11.641,0],[7.5,8.125],[-10.234,1.641],[-3.906,-0.469]],"o":[[0,8.125],[-13.906,0],[8.359,-5.078],[4.141,0.469],[6.328,1.25]],"v":[[55.873,68.661],[36.108,81.629],[-0.455,68.27],[32.045,55.692],[43.373,57.02]],"c":true},"ix":2},"nm":"Path 8","mn":"ADBE Vector Shape - Group","hd":false},{"ind":7,"ty":"sh","ix":8,"ks":{"a":0,"k":{"i":[[-9.766,-5.312],[13.828,0],[0,8.125],[-5.234,1.25],[-4.141,1.25]],"o":[[-7.891,7.656],[-11.484,0],[0,-5.781],[4.688,-1.094],[10.469,2.266]],"v":[[-76.861,68.661],[-112.408,81.551],[-131.783,68.582],[-121.471,58.661],[-108.736,55.77]],"c":true},"ix":2},"nm":"Path 9","mn":"ADBE Vector Shape - Group","hd":false},{"ind":8,"ty":"sh","ix":9,"ks":{"a":1,"k":[{"i":{"x":0,"y":1},"o":{"x":0.2,"y":0},"t":4,"s":[{"i":[[0,0],[0,-28.435],[0,0],[28.435,0],[0,0],[0,28.435],[0,0],[-28.435,0]],"o":[[28.435,0],[0,0],[0,28.435],[0,0],[-28.435,0],[0,0],[0,-28.435],[0,0]],"v":[[137.378,-137.333],[188.867,-85.845],[188.867,85.845],[137.378,137.333],[-137.378,137.333],[-188.867,85.845],[-188.867,-85.845],[-137.378,-137.333]],"c":true}]},{"i":{"x":0.7,"y":1},"o":{"x":0.9,"y":0},"t":47,"s":[{"i":[[0,0],[0,-28.435],[0,0],[26.692,0],[0,0],[0,28.435],[0,0],[-26.692,0]],"o":[[26.692,0],[0,0],[0,28.435],[0,0],[-26.692,0],[0,0],[0,-28.435],[0,0]],"v":[[133.326,-137.333],[181.657,-85.845],[181.657,85.845],[133.326,137.333],[-124.581,137.333],[-172.913,85.845],[-172.913,-85.845],[-124.581,-137.333]],"c":true}]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":59,"s":[{"i":[[0,0],[0,-28.435],[0,0],[30.112,0],[0,0],[0,28.435],[0,0],[-30.112,0]],"o":[[30.112,0],[0,0],[0,28.435],[0,0],[-30.112,0],[0,0],[0,-28.435],[0,0]],"v":[[145.476,-137.333],[200,-85.845],[200,85.845],[145.476,137.333],[-145.476,137.333],[-200,85.845],[-200,-85.845],[-145.476,-137.333]],"c":true}]},{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":66,"s":[{"i":[[0,0],[0,-28.435],[0,0],[27.138,0],[0,0],[0,28.435],[0,0],[-27.138,0]],"o":[[27.138,0],[0,0],[0,28.435],[0,0],[-27.138,0],[0,0],[0,-28.435],[0,0]],"v":[[133.36,-137.333],[182.5,-85.845],[182.5,85.845],[133.36,137.333],[-128.86,137.333],[-178,85.845],[-178,-85.845],[-128.86,-137.333]],"c":true}]},{"t":71,"s":[{"i":[[0,0],[0,-28.435],[0,0],[28.435,0],[0,0],[0,28.435],[0,0],[-28.435,0]],"o":[[28.435,0],[0,0],[0,28.435],[0,0],[-28.435,0],[0,0],[0,-28.435],[0,0]],"v":[[137.378,-137.333],[188.867,-85.845],[188.867,85.845],[137.378,137.333],[-137.378,137.333],[-188.867,85.845],[-188.867,-85.845],[-137.378,-137.333]],"c":true}]}],"ix":2},"nm":"Path 4","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":2,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Combined-Shape","np":10,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":180,"st":0,"ct":1,"bm":0},{"ddd":0,"ind":6,"ty":4,"nm":"Lines","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":1,"k":[{"i":{"x":0.461,"y":1},"o":{"x":0.387,"y":0},"t":6,"s":[414.762,400.8,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.455,"y":1},"o":{"x":0.171,"y":0},"t":21,"s":[350.327,402.8,0],"to":[0,0,0],"ti":[0,0,0]},{"t":54,"s":[306.762,400.8,0]}],"ix":2,"l":2},"a":{"a":0,"k":[-270.238,87.8,0],"ix":1,"l":2},"s":{"a":0,"k":[100,100,100],"ix":6,"l":2}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0,"y":1},"o":{"x":0.9,"y":0},"t":54,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-483.238,88.6],[-134.571,88.6]],"c":false}]},{"t":58,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-484.238,84.6],[-135.571,84.6]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.252],"y":[0.429]},"o":{"x":[0.386],"y":[0]},"t":5,"s":[68]},{"i":{"x":[0.499],"y":[1]},"o":{"x":[0.194],"y":[0.298]},"t":20,"s":[82]},{"i":{"x":[0],"y":[1]},"o":{"x":[0.9],"y":[0]},"t":48,"s":[95]},{"i":{"x":[0.833],"y":[1]},"o":{"x":[0.9],"y":[0]},"t":54,"s":[95]},{"t":58,"s":[45]}],"ix":1},"e":{"a":1,"k":[{"i":{"x":[0],"y":[1]},"o":{"x":[0.2],"y":[0]},"t":5,"s":[68]},{"i":{"x":[0],"y":[1]},"o":{"x":[0.9],"y":[0]},"t":48,"s":[15]},{"i":{"x":[0.833],"y":[1]},"o":{"x":[0.9],"y":[0]},"t":54,"s":[15]},{"t":58,"s":[45]}],"ix":2},"o":{"a":0,"k":0,"ix":3},"m":1,"ix":2,"nm":"Trim Paths 1","mn":"ADBE Vector Filter - Trim","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":26,"ix":5},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":3,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":180,"st":0,"ct":1,"bm":0}],"markers":[]} \ No newline at end of file diff --git a/TMessagesProj/src/main/res/raw/utyan_cache.json b/TMessagesProj/src/main/res/raw/utyan_cache.json new file mode 100644 index 0000000000..d05613efc4 --- /dev/null +++ b/TMessagesProj/src/main/res/raw/utyan_cache.json @@ -0,0 +1 @@ +{"tgs":1,"v":"5.5.2.2","fr":60,"ip":0,"op":180,"w":512,"h":512,"nm":"_043_CLEAN_OUT","ddd":0,"assets":[{"id":"comp_0","layers":[{"ddd":0,"ind":1,"ty":4,"nm":"hand_ bl","parent":2,"sr":1,"ks":{"p":{"a":0,"k":[2.693,-16.281,0]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":53,"s":[{"i":[[0,0],[-8.532,2.534]],"o":[[14.618,0.702],[0,0]],"v":[[-8.164,6.103],[30.62,-1.089]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":63,"s":[{"i":[[0,0],[-10.305,1.798]],"o":[[5.674,-9.803],[0,0]],"v":[[9.681,6.88],[38.076,-13.532]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":83,"s":[{"i":[[0,0],[-8.532,2.534]],"o":[[14.618,0.702],[0,0]],"v":[[-8.164,6.103],[30.62,-1.089]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":93,"s":[{"i":[[0,0],[-10.305,1.798]],"o":[[5.674,-9.803],[0,0]],"v":[[15.894,3.086],[44.181,-16.778]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":113,"s":[{"i":[[0,0],[-8.532,2.534]],"o":[[14.618,0.702],[0,0]],"v":[[-8.164,6.103],[30.62,-1.089]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":123,"s":[{"i":[[0,0],[-10.305,1.798]],"o":[[5.674,-9.803],[0,0]],"v":[[15.894,3.086],[44.181,-16.778]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":143,"s":[{"i":[[0,0],[-8.532,2.534]],"o":[[14.618,0.702],[0,0]],"v":[[-8.164,6.103],[30.62,-1.089]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":153,"s":[{"i":[[0,0],[-9.549,-4.272]],"o":[[10.185,-4.955],[0,0]],"v":[[-6.783,5.615],[27.776,4.963]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":173,"s":[{"i":[[0,0],[-9.049,6.31]],"o":[[15.247,-0.769],[0,0]],"v":[[-16.678,10.014],[26.813,-5.487]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":183,"s":[{"i":[[0,0],[-9.549,-4.272]],"o":[[10.185,-4.955],[0,0]],"v":[[-6.783,5.615],[27.776,4.963]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":203,"s":[{"i":[[0,0],[-9.049,6.31]],"o":[[15.247,-0.769],[0,0]],"v":[[-16.678,10.014],[26.813,-5.487]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":213,"s":[{"i":[[0,0],[-9.549,-4.272]],"o":[[10.185,-4.955],[0,0]],"v":[[-6.783,5.615],[27.776,4.963]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":233,"s":[{"i":[[0,0],[-8.532,2.534]],"o":[[14.618,0.702],[0,0]],"v":[[-8.164,6.103],[30.62,-1.089]],"c":false}]},{"t":243,"s":[{"i":[[0,0],[-10.305,1.798]],"o":[[5.674,-9.803],[0,0]],"v":[[9.681,6.88],[38.076,-13.532]],"c":false}]}]},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":8},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":60,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":72,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":84,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":96,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":108,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":120,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":132,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":144,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":156,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":168,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":180,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":192,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":204,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":216,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":228,"s":[5]},{"t":240,"s":[5]}]},"e":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":60,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":72,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":84,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":96,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":108,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":120,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":132,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":144,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":156,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":168,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":180,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":192,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":204,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":216,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":228,"s":[100]},{"t":240,"s":[100]}]},"o":{"a":0,"k":0},"m":1,"nm":"Trim Paths 1","hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":2,"ty":4,"nm":"hand2","parent":13,"sr":1,"ks":{"r":{"a":0,"k":15.439},"p":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":50,"s":[51.786,139.826,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":60,"s":[-13.098,139.826,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":80,"s":[51.786,139.826,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":90,"s":[-13.098,139.826,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":110,"s":[51.786,139.826,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":120,"s":[-13.098,139.826,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":140,"s":[51.786,139.826,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":150,"s":[-13.098,139.826,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":170,"s":[51.786,139.826,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":180,"s":[-13.098,139.826,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":200,"s":[51.786,139.826,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":210,"s":[-13.098,139.826,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":230,"s":[51.786,139.826,0],"to":[0,0,0],"ti":[0,0,0]},{"t":240,"s":[-13.098,139.826,0]}]},"a":{"a":0,"k":[-15,32,0]},"s":{"a":0,"k":[100.032,99.928,100]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":33,"s":[{"i":[[0,0],[-16.252,-16.375],[12.151,-19.676]],"o":[[2.584,-20.092],[11.13,11.213],[0,0]],"v":[[5.16,-4.308],[57.077,-18.917],[16.039,28.035]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":53,"s":[{"i":[[0,0],[-11.573,-8.137],[49.081,-21.581]],"o":[[38.059,2.803],[12.924,9.087],[0,0]],"v":[[-21.197,-20.9],[49.896,-29.908],[-8.23,47.203]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":63,"s":[{"i":[[0,0],[-16.252,-16.375],[12.151,-19.676]],"o":[[2.584,-20.092],[11.13,11.213],[0,0]],"v":[[3.478,-16.042],[71.095,-35.769],[16.039,28.035]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":83,"s":[{"i":[[0,0],[-11.573,-8.137],[49.081,-21.581]],"o":[[38.059,2.803],[12.924,9.087],[0,0]],"v":[[-21.197,-20.9],[49.896,-29.908],[-8.23,47.203]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":93,"s":[{"i":[[0,0],[-16.252,-16.375],[12.151,-19.676]],"o":[[2.584,-20.092],[11.13,11.213],[0,0]],"v":[[12.034,-20.743],[71.095,-35.769],[16.039,28.035]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":113,"s":[{"i":[[0,0],[-11.573,-8.137],[49.081,-21.581]],"o":[[38.059,2.803],[12.924,9.087],[0,0]],"v":[[-21.197,-20.9],[49.896,-29.908],[-8.23,47.203]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":123,"s":[{"i":[[0,0],[-16.252,-16.375],[12.151,-19.676]],"o":[[2.584,-20.092],[11.13,11.213],[0,0]],"v":[[12.034,-20.743],[71.095,-35.769],[16.039,28.035]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":143,"s":[{"i":[[0,0],[-11.573,-8.137],[49.081,-21.581]],"o":[[38.059,2.803],[12.924,9.087],[0,0]],"v":[[-21.197,-20.9],[49.896,-29.908],[-8.23,47.203]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":153,"s":[{"i":[[0,0],[-16.252,-16.375],[40.871,-45.265]],"o":[[51.447,-25.296],[11.13,11.213],[0,0]],"v":[[-32.209,-10.068],[55.053,-7.456],[-26.805,51.301]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":173,"s":[{"i":[[0,0],[-16.252,-16.375],[49.081,-21.581]],"o":[[53.513,9.95],[11.13,11.213],[0,0]],"v":[[-44.989,-16.917],[48.432,-35.214],[-8.23,47.203]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":183,"s":[{"i":[[0,0],[-16.252,-16.375],[40.871,-45.265]],"o":[[51.447,-25.296],[11.13,11.213],[0,0]],"v":[[-32.209,-10.068],[55.053,-7.456],[-26.805,51.301]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":203,"s":[{"i":[[0,0],[-16.252,-16.375],[49.081,-21.581]],"o":[[53.513,9.95],[11.13,11.213],[0,0]],"v":[[-44.989,-16.917],[48.432,-35.214],[-8.23,47.203]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":213,"s":[{"i":[[0,0],[-16.252,-16.375],[40.871,-45.265]],"o":[[51.447,-25.296],[11.13,11.213],[0,0]],"v":[[-32.209,-10.068],[55.053,-7.456],[-26.805,51.301]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":233,"s":[{"i":[[0,0],[-11.573,-8.137],[49.081,-21.581]],"o":[[38.059,2.803],[12.924,9.087],[0,0]],"v":[[-21.197,-20.9],[49.896,-29.908],[-8.23,47.203]],"c":false}]},{"t":243,"s":[{"i":[[0,0],[-16.252,-16.375],[12.151,-19.676]],"o":[[2.584,-20.092],[11.13,11.213],[0,0]],"v":[[3.478,-16.042],[71.095,-35.769],[16.039,28.035]],"c":false}]}]},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0.980392158031,0.564705908298,0.086274512112,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":10},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"fl","c":{"a":0,"k":[1,0.835294127464,0.152941182256,1]},"o":{"a":0,"k":100},"r":1,"bm":0,"nm":"Fill 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":3,"ty":4,"nm":"hand_bl","parent":12,"sr":1,"ks":{"p":{"a":0,"k":[0.298,-6.182,0]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":53,"s":[{"i":[[0,0],[9.84,-10.211]],"o":[[-6.264,-0.098],[0,0]],"v":[[19.186,5.441],[-14.711,19.203]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":63,"s":[{"i":[[0,0],[12.011,2.23]],"o":[[-14.143,6.919],[0,0]],"v":[[26.662,4.109],[-20.865,9.586]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":83,"s":[{"i":[[0,0],[10.227,-11.995]],"o":[[-9.926,1.433],[0,0]],"v":[[23.521,-0.81],[-10.955,23.711]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":93,"s":[{"i":[[0,0],[12.011,2.23]],"o":[[-14.143,6.919],[0,0]],"v":[[26.662,4.109],[-20.865,9.586]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":113,"s":[{"i":[[0,0],[10.227,-11.995]],"o":[[-9.926,1.433],[0,0]],"v":[[23.521,-0.81],[-10.955,23.711]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":123,"s":[{"i":[[0,0],[12.011,2.23]],"o":[[-14.143,6.919],[0,0]],"v":[[26.662,4.109],[-20.865,9.586]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":143,"s":[{"i":[[0,0],[10.227,-11.995]],"o":[[-9.926,1.433],[0,0]],"v":[[23.521,-0.81],[-10.955,23.711]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":153,"s":[{"i":[[0,0],[4.58,3.732]],"o":[[-13.967,3.905],[0,0]],"v":[[27.247,23.905],[-7.055,16.788]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":173,"s":[{"i":[[0,0],[9.84,-10.211]],"o":[[-6.264,-0.098],[0,0]],"v":[[19.186,5.441],[-14.711,19.203]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":183,"s":[{"i":[[0,0],[4.58,3.732]],"o":[[-13.967,3.905],[0,0]],"v":[[27.247,23.905],[-7.055,16.788]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":203,"s":[{"i":[[0,0],[9.84,-10.211]],"o":[[-6.264,-0.098],[0,0]],"v":[[19.186,5.441],[-14.711,19.203]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":213,"s":[{"i":[[0,0],[4.58,3.732]],"o":[[-13.967,3.905],[0,0]],"v":[[27.247,23.905],[-7.055,16.788]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":233,"s":[{"i":[[0,0],[9.84,-10.211]],"o":[[-6.264,-0.098],[0,0]],"v":[[19.186,5.441],[-14.711,19.203]],"c":false}]},{"t":243,"s":[{"i":[[0,0],[12.011,2.23]],"o":[[-14.143,6.919],[0,0]],"v":[[26.662,4.109],[-20.865,9.586]],"c":false}]}]},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":8.056},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":60,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":72,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":84,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":96,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":108,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":120,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":132,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":144,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":156,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":168,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":180,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":192,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":204,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":216,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":228,"s":[5]},{"t":240,"s":[5]}]},"e":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":60,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":72,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":84,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":96,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":108,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":120,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":132,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":144,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":156,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":168,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":180,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":192,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":204,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":216,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":228,"s":[100]},{"t":240,"s":[100]}]},"o":{"a":0,"k":0},"m":1,"nm":"Trim Paths 1","hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":4,"ty":4,"nm":"Shape Layer 8","sr":1,"ks":{"o":{"a":1,"k":[{"t":3,"s":[0],"h":1},{"t":53,"s":[100],"h":1},{"t":80,"s":[0],"h":1},{"t":116,"s":[100],"h":1},{"t":143,"s":[0],"h":1},{"t":176,"s":[100],"h":1},{"t":203,"s":[0],"h":1},{"t":233,"s":[100],"h":1},{"t":260,"s":[0],"h":1}]},"p":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":49,"s":[263.711,465.15,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":61,"s":[65.461,465.15,0],"to":[0,0,0],"ti":[0,0,0]},{"t":80,"s":[65.461,394.65,0],"h":1},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":112,"s":[263.711,465.15,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":124,"s":[65.461,465.15,0],"to":[0,0,0],"ti":[0,0,0]},{"t":148,"s":[65.461,394.65,0],"h":1},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":172,"s":[263.711,465.15,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":184,"s":[65.461,465.15,0],"to":[0,0,0],"ti":[0,0,0]},{"t":210,"s":[65.461,394.65,0],"h":1},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":229,"s":[263.711,465.15,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":241,"s":[65.461,465.15,0],"to":[0,0,0],"ti":[0,0,0]},{"t":260,"s":[65.461,394.65,0],"h":1}]},"a":{"a":0,"k":[-190.539,209.15,0]},"s":{"a":1,"k":[{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":51,"s":[10,10,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":62,"s":[100,100,100]},{"t":84,"s":[10,10,100],"h":1},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":114,"s":[10,10,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":125,"s":[100,100,100]},{"t":147,"s":[10,10,100],"h":1},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":174,"s":[10,10,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":185,"s":[100,100,100]},{"t":207,"s":[10,10,100],"h":1},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":231,"s":[10,10,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":242,"s":[100,100,100]},{"t":264,"s":[10,10,100],"h":1}]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":49,"s":[{"i":[[3.598,-1.799],[1.88,-6.181],[-6.75,-1.303],[-1.282,6.499]],"o":[[-1.799,-8.996],[-5.995,-1.631],[-2.025,4.647],[11.143,-0.151]],"v":[[-182.639,206.721],[-193.455,206.045],[-193.75,212.367],[-183.643,213.665]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":59,"s":[{"i":[[10.281,-5.14],[5.371,-17.659],[-19.285,-3.722],[-3.662,18.569]],"o":[[-5.14,-25.702],[-17.129,-4.659],[-5.785,13.278],[31.838,-0.431]],"v":[[-175.468,202.591],[-206.371,200.659],[-207.215,218.722],[-178.338,222.431]],"c":true}]},{"t":84,"s":[{"i":[[3.598,-1.799],[1.88,-6.181],[-6.75,-1.303],[-1.282,6.499]],"o":[[-1.799,-8.996],[-5.995,-1.631],[-2.025,4.647],[11.143,-0.151]],"v":[[-182.639,206.721],[-193.455,206.045],[-193.75,212.367],[-183.643,213.665]],"c":true}],"h":1},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":112,"s":[{"i":[[3.598,-1.799],[1.88,-6.181],[-6.75,-1.303],[-1.282,6.499]],"o":[[-1.799,-8.996],[-5.995,-1.631],[-2.025,4.647],[11.143,-0.151]],"v":[[-182.639,206.721],[-193.455,206.045],[-193.75,212.367],[-183.643,213.665]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":122,"s":[{"i":[[10.281,-5.14],[5.371,-17.659],[-19.285,-3.722],[-3.662,18.569]],"o":[[-5.14,-25.702],[-17.129,-4.659],[-5.785,13.278],[31.838,-0.431]],"v":[[-175.468,202.591],[-206.371,200.659],[-207.215,218.722],[-178.338,222.431]],"c":true}]},{"t":147,"s":[{"i":[[3.598,-1.799],[1.88,-6.181],[-6.75,-1.303],[-1.282,6.499]],"o":[[-1.799,-8.996],[-5.995,-1.631],[-2.025,4.647],[11.143,-0.151]],"v":[[-182.639,206.721],[-193.455,206.045],[-193.75,212.367],[-183.643,213.665]],"c":true}],"h":1},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":172,"s":[{"i":[[3.598,-1.799],[1.88,-6.181],[-6.75,-1.303],[-1.282,6.499]],"o":[[-1.799,-8.996],[-5.995,-1.631],[-2.025,4.647],[11.143,-0.151]],"v":[[-182.639,206.721],[-193.455,206.045],[-193.75,212.367],[-183.643,213.665]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":182,"s":[{"i":[[10.281,-5.14],[5.371,-17.659],[-19.285,-3.722],[-3.662,18.569]],"o":[[-5.14,-25.702],[-17.129,-4.659],[-5.785,13.278],[31.838,-0.431]],"v":[[-175.468,202.591],[-206.371,200.659],[-207.215,218.722],[-178.338,222.431]],"c":true}]},{"t":207,"s":[{"i":[[3.598,-1.799],[1.88,-6.181],[-6.75,-1.303],[-1.282,6.499]],"o":[[-1.799,-8.996],[-5.995,-1.631],[-2.025,4.647],[11.143,-0.151]],"v":[[-182.639,206.721],[-193.455,206.045],[-193.75,212.367],[-183.643,213.665]],"c":true}],"h":1},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":229,"s":[{"i":[[3.598,-1.799],[1.88,-6.181],[-6.75,-1.303],[-1.282,6.499]],"o":[[-1.799,-8.996],[-5.995,-1.631],[-2.025,4.647],[11.143,-0.151]],"v":[[-182.639,206.721],[-193.455,206.045],[-193.75,212.367],[-183.643,213.665]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":239,"s":[{"i":[[10.281,-5.14],[5.371,-17.659],[-19.285,-3.722],[-3.662,18.569]],"o":[[-5.14,-25.702],[-17.129,-4.659],[-5.785,13.278],[31.838,-0.431]],"v":[[-175.468,202.591],[-206.371,200.659],[-207.215,218.722],[-178.338,222.431]],"c":true}]},{"t":264,"s":[{"i":[[3.598,-1.799],[1.88,-6.181],[-6.75,-1.303],[-1.282,6.499]],"o":[[-1.799,-8.996],[-5.995,-1.631],[-2.025,4.647],[11.143,-0.151]],"v":[[-182.639,206.721],[-193.455,206.045],[-193.75,212.367],[-183.643,213.665]],"c":true}],"h":1}]},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0.780392216701,0.800000059838,0.756862804936,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":8},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"fl","c":{"a":0,"k":[0.781857898189,0.800107230392,0.758165366977,1]},"o":{"a":0,"k":100},"r":1,"bm":0,"nm":"Fill 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Shape 1","bm":0,"hd":false}],"ip":-30,"op":270,"st":-30,"bm":0},{"ddd":0,"ind":5,"ty":4,"nm":"Shape Layer 7","sr":1,"ks":{"o":{"a":1,"k":[{"t":3,"s":[0],"h":1},{"t":53,"s":[100],"h":1},{"t":80,"s":[0],"h":1},{"t":116,"s":[100],"h":1},{"t":143,"s":[0],"h":1},{"t":176,"s":[100],"h":1},{"t":203,"s":[0],"h":1},{"t":233,"s":[100],"h":1},{"t":260,"s":[0],"h":1}]},"p":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":51,"s":[263.574,475.397,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":59,"s":[71.324,475.397,0],"to":[0,0,0],"ti":[0,0,0]},{"t":88,"s":[72.074,394.897,0],"h":1},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":114,"s":[263.574,475.397,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":122,"s":[71.324,475.397,0],"to":[0,0,0],"ti":[0,0,0]},{"t":142,"s":[72.074,394.897,0],"h":1},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":174,"s":[263.574,475.397,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":182,"s":[71.324,475.397,0],"to":[0,0,0],"ti":[0,0,0]},{"t":205,"s":[72.074,394.897,0],"h":1},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":231,"s":[263.574,475.397,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":239,"s":[71.324,475.397,0],"to":[0,0,0],"ti":[0,0,0]},{"t":268,"s":[72.074,394.897,0],"h":1}]},"a":{"a":0,"k":[-119.176,209.397,0]},"s":{"a":1,"k":[{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":51,"s":[10,10,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":62,"s":[100,100,100]},{"t":80,"s":[10,10,100],"h":1},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":114,"s":[10,10,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":125,"s":[100,100,100]},{"t":143,"s":[10,10,100],"h":1},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":174,"s":[10,10,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":185,"s":[100,100,100]},{"t":203,"s":[10,10,100],"h":1},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":231,"s":[10,10,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":242,"s":[100,100,100]},{"t":260,"s":[10,10,100],"h":1}]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":49,"s":[{"i":[[2.699,-1.799],[1.41,-6.181],[-5.062,-1.303],[-0.961,6.499]],"o":[[-1.349,-8.996],[-4.496,-1.631],[-1.519,4.647],[8.357,-0.151]],"v":[[-114.104,206.691],[-122.216,206.015],[-122.438,212.337],[-114.857,213.635]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":59,"s":[{"i":[[15.972,-3.187],[3.626,-11.92],[-23.428,-0.718],[-2.859,5.961]],"o":[[-3.47,-17.349],[-20.485,-1.607],[1.483,10.8],[33.803,0.094]],"v":[[-111.553,203.061],[-134.163,200.008],[-132.233,220.7],[-105.99,223.204]],"c":true}]},{"t":80,"s":[{"i":[[2.699,-1.799],[1.41,-6.181],[-5.062,-1.303],[-0.961,6.499]],"o":[[-1.349,-8.996],[-4.496,-1.631],[-1.519,4.647],[8.357,-0.151]],"v":[[-114.104,206.691],[-122.216,206.015],[-122.438,212.337],[-114.857,213.635]],"c":true}],"h":1},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":112,"s":[{"i":[[2.699,-1.799],[1.41,-6.181],[-5.062,-1.303],[-0.961,6.499]],"o":[[-1.349,-8.996],[-4.496,-1.631],[-1.519,4.647],[8.357,-0.151]],"v":[[-114.104,206.691],[-122.216,206.015],[-122.438,212.337],[-114.857,213.635]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":122,"s":[{"i":[[15.972,-3.187],[3.626,-11.92],[-23.428,-0.718],[-2.859,5.961]],"o":[[-3.47,-17.349],[-20.485,-1.607],[1.483,10.8],[33.803,0.094]],"v":[[-111.553,203.061],[-134.163,200.008],[-132.233,220.7],[-105.99,223.204]],"c":true}]},{"t":143,"s":[{"i":[[2.699,-1.799],[1.41,-6.181],[-5.062,-1.303],[-0.961,6.499]],"o":[[-1.349,-8.996],[-4.496,-1.631],[-1.519,4.647],[8.357,-0.151]],"v":[[-114.104,206.691],[-122.216,206.015],[-122.438,212.337],[-114.857,213.635]],"c":true}],"h":1},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":172,"s":[{"i":[[2.699,-1.799],[1.41,-6.181],[-5.062,-1.303],[-0.961,6.499]],"o":[[-1.349,-8.996],[-4.496,-1.631],[-1.519,4.647],[8.357,-0.151]],"v":[[-114.104,206.691],[-122.216,206.015],[-122.438,212.337],[-114.857,213.635]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":182,"s":[{"i":[[15.972,-3.187],[3.626,-11.92],[-23.428,-0.718],[-2.859,5.961]],"o":[[-3.47,-17.349],[-20.485,-1.607],[1.483,10.8],[33.803,0.094]],"v":[[-111.553,203.061],[-134.163,200.008],[-132.233,220.7],[-105.99,223.204]],"c":true}]},{"t":203,"s":[{"i":[[2.699,-1.799],[1.41,-6.181],[-5.062,-1.303],[-0.961,6.499]],"o":[[-1.349,-8.996],[-4.496,-1.631],[-1.519,4.647],[8.357,-0.151]],"v":[[-114.104,206.691],[-122.216,206.015],[-122.438,212.337],[-114.857,213.635]],"c":true}],"h":1},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":229,"s":[{"i":[[2.699,-1.799],[1.41,-6.181],[-5.062,-1.303],[-0.961,6.499]],"o":[[-1.349,-8.996],[-4.496,-1.631],[-1.519,4.647],[8.357,-0.151]],"v":[[-114.104,206.691],[-122.216,206.015],[-122.438,212.337],[-114.857,213.635]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":239,"s":[{"i":[[15.972,-3.187],[3.626,-11.92],[-23.428,-0.718],[-2.859,5.961]],"o":[[-3.47,-17.349],[-20.485,-1.607],[1.483,10.8],[33.803,0.094]],"v":[[-111.553,203.061],[-134.163,200.008],[-132.233,220.7],[-105.99,223.204]],"c":true}]},{"t":260,"s":[{"i":[[2.699,-1.799],[1.41,-6.181],[-5.062,-1.303],[-0.961,6.499]],"o":[[-1.349,-8.996],[-4.496,-1.631],[-1.519,4.647],[8.357,-0.151]],"v":[[-114.104,206.691],[-122.216,206.015],[-122.438,212.337],[-114.857,213.635]],"c":true}],"h":1}]},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0.780392216701,0.800000059838,0.756862804936,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":8},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"fl","c":{"a":0,"k":[0.781857898189,0.800107230392,0.758165366977,1]},"o":{"a":0,"k":100},"r":1,"bm":0,"nm":"Fill 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Shape 1","bm":0,"hd":false}],"ip":-30,"op":270,"st":-30,"bm":0},{"ddd":0,"ind":6,"ty":4,"nm":"Shape Layer 6","sr":1,"ks":{"o":{"a":1,"k":[{"t":3,"s":[0],"h":1},{"t":53,"s":[100],"h":1},{"t":80,"s":[0],"h":1},{"t":116,"s":[100],"h":1},{"t":143,"s":[0],"h":1},{"t":176,"s":[100],"h":1},{"t":203,"s":[0],"h":1},{"t":233,"s":[100],"h":1},{"t":260,"s":[0],"h":1}]},"p":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":51,"s":[319.211,475.15,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":61,"s":[39.961,475.15,0],"to":[0,0,0],"ti":[0,0,0]},{"t":82,"s":[40.711,394.65,0],"h":1},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":114,"s":[319.211,475.15,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":124,"s":[39.961,475.15,0],"to":[0,0,0],"ti":[0,0,0]},{"t":146,"s":[40.711,394.65,0],"h":1},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":174,"s":[319.211,475.15,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":184,"s":[39.961,475.15,0],"to":[0,0,0],"ti":[0,0,0]},{"t":200,"s":[40.711,394.65,0],"h":1},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":231,"s":[319.211,475.15,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":241,"s":[39.961,475.15,0],"to":[0,0,0],"ti":[0,0,0]},{"t":262,"s":[40.711,394.65,0],"h":1}]},"a":{"a":0,"k":[-190.539,209.15,0]},"s":{"a":1,"k":[{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":51,"s":[10,10,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":62,"s":[100,100,100]},{"t":80,"s":[10,10,100],"h":1},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":114,"s":[10,10,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":125,"s":[100,100,100]},{"t":143,"s":[10,10,100],"h":1},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":174,"s":[10,10,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":185,"s":[100,100,100]},{"t":203,"s":[10,10,100],"h":1},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":231,"s":[10,10,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":242,"s":[100,100,100]},{"t":260,"s":[10,10,100],"h":1}]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":49,"s":[{"i":[[2.699,-1.799],[1.41,-6.181],[-5.062,-1.303],[-0.961,6.499]],"o":[[-1.349,-8.996],[-4.496,-1.631],[-1.519,4.647],[8.357,-0.151]],"v":[[-183.604,206.721],[-191.716,206.045],[-191.938,212.367],[-184.357,213.665]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":59,"s":[{"i":[[6.939,-3.47],[3.626,-11.92],[-13.017,-2.513],[-2.859,5.961]],"o":[[-3.47,-17.349],[-11.562,-3.145],[-4.185,4.194],[21.49,-0.291]],"v":[[-179.053,204.656],[-204.163,203.602],[-205.233,220.794],[-178.99,223.298]],"c":true}]},{"t":84,"s":[{"i":[[2.699,-1.799],[1.41,-6.181],[-5.062,-1.303],[-0.961,6.499]],"o":[[-1.349,-8.996],[-4.496,-1.631],[-1.519,4.647],[8.357,-0.151]],"v":[[-183.604,206.721],[-191.716,206.045],[-191.938,212.367],[-184.357,213.665]],"c":true}],"h":1},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":112,"s":[{"i":[[2.699,-1.799],[1.41,-6.181],[-5.062,-1.303],[-0.961,6.499]],"o":[[-1.349,-8.996],[-4.496,-1.631],[-1.519,4.647],[8.357,-0.151]],"v":[[-183.604,206.721],[-191.716,206.045],[-191.938,212.367],[-184.357,213.665]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":122,"s":[{"i":[[6.939,-3.47],[3.626,-11.92],[-13.017,-2.513],[-2.859,5.961]],"o":[[-3.47,-17.349],[-11.562,-3.145],[-4.185,4.194],[21.49,-0.291]],"v":[[-179.053,204.656],[-204.163,203.602],[-205.233,220.794],[-178.99,223.298]],"c":true}]},{"t":147,"s":[{"i":[[2.699,-1.799],[1.41,-6.181],[-5.062,-1.303],[-0.961,6.499]],"o":[[-1.349,-8.996],[-4.496,-1.631],[-1.519,4.647],[8.357,-0.151]],"v":[[-183.604,206.721],[-191.716,206.045],[-191.938,212.367],[-184.357,213.665]],"c":true}],"h":1},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":172,"s":[{"i":[[2.699,-1.799],[1.41,-6.181],[-5.062,-1.303],[-0.961,6.499]],"o":[[-1.349,-8.996],[-4.496,-1.631],[-1.519,4.647],[8.357,-0.151]],"v":[[-183.604,206.721],[-191.716,206.045],[-191.938,212.367],[-184.357,213.665]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":182,"s":[{"i":[[6.939,-3.47],[3.626,-11.92],[-13.017,-2.513],[-2.859,5.961]],"o":[[-3.47,-17.349],[-11.562,-3.145],[-4.185,4.194],[21.49,-0.291]],"v":[[-179.053,204.656],[-204.163,203.602],[-205.233,220.794],[-178.99,223.298]],"c":true}]},{"t":207,"s":[{"i":[[2.699,-1.799],[1.41,-6.181],[-5.062,-1.303],[-0.961,6.499]],"o":[[-1.349,-8.996],[-4.496,-1.631],[-1.519,4.647],[8.357,-0.151]],"v":[[-183.604,206.721],[-191.716,206.045],[-191.938,212.367],[-184.357,213.665]],"c":true}],"h":1},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":229,"s":[{"i":[[2.699,-1.799],[1.41,-6.181],[-5.062,-1.303],[-0.961,6.499]],"o":[[-1.349,-8.996],[-4.496,-1.631],[-1.519,4.647],[8.357,-0.151]],"v":[[-183.604,206.721],[-191.716,206.045],[-191.938,212.367],[-184.357,213.665]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":239,"s":[{"i":[[6.939,-3.47],[3.626,-11.92],[-13.017,-2.513],[-2.859,5.961]],"o":[[-3.47,-17.349],[-11.562,-3.145],[-4.185,4.194],[21.49,-0.291]],"v":[[-179.053,204.656],[-204.163,203.602],[-205.233,220.794],[-178.99,223.298]],"c":true}]},{"t":264,"s":[{"i":[[2.699,-1.799],[1.41,-6.181],[-5.062,-1.303],[-0.961,6.499]],"o":[[-1.349,-8.996],[-4.496,-1.631],[-1.519,4.647],[8.357,-0.151]],"v":[[-183.604,206.721],[-191.716,206.045],[-191.938,212.367],[-184.357,213.665]],"c":true}],"h":1}]},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0.780392216701,0.800000059838,0.756862804936,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":8},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"fl","c":{"a":0,"k":[0.781857898189,0.800107230392,0.758165366977,1]},"o":{"a":0,"k":100},"r":1,"bm":0,"nm":"Fill 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Shape 1","bm":0,"hd":false}],"ip":-30,"op":270,"st":-30,"bm":0},{"ddd":0,"ind":7,"ty":4,"nm":"Shape Layer 5","sr":1,"ks":{"o":{"a":1,"k":[{"t":3,"s":[0],"h":1},{"t":53,"s":[100],"h":1},{"t":84,"s":[0],"h":1},{"t":116,"s":[100],"h":1},{"t":147,"s":[0],"h":1},{"t":176,"s":[100],"h":1},{"t":207,"s":[0],"h":1},{"t":233,"s":[100],"h":1},{"t":264,"s":[0],"h":1}]},"p":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":49,"s":[277.003,470.829,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":61,"s":[77.253,470.829,0],"to":[0,0,0],"ti":[0,0,0]},{"t":78,"s":[77.253,400.329,0],"h":1},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":112,"s":[277.003,470.829,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":124,"s":[77.253,470.829,0],"to":[0,0,0],"ti":[0,0,0]},{"t":151,"s":[77.253,400.329,0],"h":1},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":172,"s":[277.003,470.829,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":184,"s":[77.253,470.829,0],"to":[0,0,0],"ti":[0,0,0]},{"t":211,"s":[77.253,400.329,0],"h":1},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":229,"s":[277.003,470.829,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":241,"s":[77.253,470.829,0],"to":[0,0,0],"ti":[0,0,0]},{"t":258,"s":[77.253,400.329,0],"h":1}]},"a":{"a":0,"k":[-178.747,214.829,0]},"s":{"a":1,"k":[{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":51,"s":[10,10,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":62,"s":[100,100,100]},{"t":86,"s":[10,10,100],"h":1},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":114,"s":[10,10,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":125,"s":[100,100,100]},{"t":149,"s":[10,10,100],"h":1},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":174,"s":[10,10,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":185,"s":[100,100,100]},{"t":209,"s":[10,10,100],"h":1},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":231,"s":[10,10,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":242,"s":[100,100,100]},{"t":266,"s":[10,10,100],"h":1}]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":49,"s":[{"i":[[1.439,-0.719],[1.379,-3.417],[-8.43,-1.972],[0.075,12.529],[5.174,-1.554]],"o":[[-0.719,-3.596],[-0.599,-1.319],[-3.93,7.278],[5.575,0.42],[-0.18,-1.678]],"v":[[-176.731,208.985],[-183.924,209.765],[-184.57,214.472],[-173.575,215.971],[-172.924,209.554]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":59,"s":[{"i":[[10.281,-5.14],[9.852,-24.417],[-19.035,-1.472],[-10.962,-0.56],[9.121,2.485]],"o":[[-5.14,-25.702],[-4.284,-9.424],[16.28,1.259],[35.588,1.819],[-1.285,-11.994]],"v":[[-177.218,214.591],[-212.871,216.159],[-221.215,235.472],[-180.338,236.681],[-161.871,223.515]],"c":true}]},{"t":84,"s":[{"i":[[1.439,-0.719],[1.379,-3.417],[-8.43,-1.972],[0.075,12.529],[5.174,-1.554]],"o":[[-0.719,-3.596],[-0.599,-1.319],[-3.93,7.278],[5.575,0.42],[-0.18,-1.678]],"v":[[-176.731,208.985],[-183.924,209.765],[-184.57,214.472],[-173.575,215.971],[-172.924,209.554]],"c":true}],"h":1},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":112,"s":[{"i":[[1.439,-0.719],[1.379,-3.417],[-8.43,-1.972],[0.075,12.529],[5.174,-1.554]],"o":[[-0.719,-3.596],[-0.599,-1.319],[-3.93,7.278],[5.575,0.42],[-0.18,-1.678]],"v":[[-176.731,208.985],[-183.924,209.765],[-184.57,214.472],[-173.575,215.971],[-172.924,209.554]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":122,"s":[{"i":[[10.281,-5.14],[9.852,-24.417],[-19.035,-1.472],[-10.962,-0.56],[9.121,2.485]],"o":[[-5.14,-25.702],[-4.284,-9.424],[16.28,1.259],[35.588,1.819],[-1.285,-11.994]],"v":[[-177.218,214.591],[-212.871,216.159],[-221.215,235.472],[-180.338,236.681],[-161.871,223.515]],"c":true}]},{"t":147,"s":[{"i":[[1.439,-0.719],[1.379,-3.417],[-8.43,-1.972],[0.075,12.529],[5.174,-1.554]],"o":[[-0.719,-3.596],[-0.599,-1.319],[-3.93,7.278],[5.575,0.42],[-0.18,-1.678]],"v":[[-176.731,208.985],[-183.924,209.765],[-184.57,214.472],[-173.575,215.971],[-172.924,209.554]],"c":true}],"h":1},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":172,"s":[{"i":[[1.439,-0.719],[1.379,-3.417],[-8.43,-1.972],[0.075,12.529],[5.174,-1.554]],"o":[[-0.719,-3.596],[-0.599,-1.319],[-3.93,7.278],[5.575,0.42],[-0.18,-1.678]],"v":[[-176.731,208.985],[-183.924,209.765],[-184.57,214.472],[-173.575,215.971],[-172.924,209.554]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":182,"s":[{"i":[[10.281,-5.14],[9.852,-24.417],[-19.035,-1.472],[-10.962,-0.56],[9.121,2.485]],"o":[[-5.14,-25.702],[-4.284,-9.424],[16.28,1.259],[35.588,1.819],[-1.285,-11.994]],"v":[[-177.218,214.591],[-212.871,216.159],[-221.215,235.472],[-180.338,236.681],[-161.871,223.515]],"c":true}]},{"t":207,"s":[{"i":[[1.439,-0.719],[1.379,-3.417],[-8.43,-1.972],[0.075,12.529],[5.174,-1.554]],"o":[[-0.719,-3.596],[-0.599,-1.319],[-3.93,7.278],[5.575,0.42],[-0.18,-1.678]],"v":[[-176.731,208.985],[-183.924,209.765],[-184.57,214.472],[-173.575,215.971],[-172.924,209.554]],"c":true}],"h":1},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":229,"s":[{"i":[[1.439,-0.719],[1.379,-3.417],[-8.43,-1.972],[0.075,12.529],[5.174,-1.554]],"o":[[-0.719,-3.596],[-0.599,-1.319],[-3.93,7.278],[5.575,0.42],[-0.18,-1.678]],"v":[[-176.731,208.985],[-183.924,209.765],[-184.57,214.472],[-173.575,215.971],[-172.924,209.554]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":239,"s":[{"i":[[10.281,-5.14],[9.852,-24.417],[-19.035,-1.472],[-10.962,-0.56],[9.121,2.485]],"o":[[-5.14,-25.702],[-4.284,-9.424],[16.28,1.259],[35.588,1.819],[-1.285,-11.994]],"v":[[-177.218,214.591],[-212.871,216.159],[-221.215,235.472],[-180.338,236.681],[-161.871,223.515]],"c":true}]},{"t":264,"s":[{"i":[[1.439,-0.719],[1.379,-3.417],[-8.43,-1.972],[0.075,12.529],[5.174,-1.554]],"o":[[-0.719,-3.596],[-0.599,-1.319],[-3.93,7.278],[5.575,0.42],[-0.18,-1.678]],"v":[[-176.731,208.985],[-183.924,209.765],[-184.57,214.472],[-173.575,215.971],[-172.924,209.554]],"c":true}],"h":1}]},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0.780392216701,0.800000059838,0.756862804936,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":8},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"fl","c":{"a":0,"k":[0.781857898189,0.800107230392,0.758165366977,1]},"o":{"a":0,"k":100},"r":1,"bm":0,"nm":"Fill 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Shape 1","bm":0,"hd":false}],"ip":-30,"op":270,"st":-30,"bm":0},{"ddd":0,"ind":8,"ty":4,"nm":"Shape Layer 3","sr":1,"ks":{"o":{"a":1,"k":[{"t":33,"s":[0],"h":1},{"t":86,"s":[100],"h":1},{"t":113,"s":[0],"h":1},{"t":146,"s":[100],"h":1},{"t":173,"s":[0],"h":1},{"t":206,"s":[100],"h":1},{"t":233,"s":[0],"h":1}]},"p":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":82,"s":[263.711,465.15,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":94,"s":[65.461,465.15,0],"to":[0,0,0],"ti":[0,0,0]},{"t":114,"s":[65.461,394.65,0],"h":1},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":142,"s":[263.711,465.15,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":154,"s":[65.461,465.15,0],"to":[0,0,0],"ti":[0,0,0]},{"t":176,"s":[65.461,394.65,0],"h":1},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":202,"s":[263.711,465.15,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":214,"s":[65.461,465.15,0],"to":[0,0,0],"ti":[0,0,0]},{"t":233,"s":[65.461,394.65,0],"h":1}]},"a":{"a":0,"k":[-190.539,209.15,0]},"s":{"a":1,"k":[{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":84,"s":[10,10,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":95,"s":[100,100,100]},{"t":117,"s":[10,10,100],"h":1},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":144,"s":[10,10,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":155,"s":[100,100,100]},{"t":177,"s":[10,10,100],"h":1},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":204,"s":[10,10,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":215,"s":[100,100,100]},{"t":237,"s":[10,10,100],"h":1}]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":82,"s":[{"i":[[3.598,-1.799],[1.88,-6.181],[-6.75,-1.303],[-1.282,6.499]],"o":[[-1.799,-8.996],[-5.995,-1.631],[-2.025,4.647],[11.143,-0.151]],"v":[[-182.639,206.721],[-193.455,206.045],[-193.75,212.367],[-183.643,213.665]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":92,"s":[{"i":[[10.281,-5.14],[5.371,-17.659],[-19.285,-3.722],[-3.662,18.569]],"o":[[-5.14,-25.702],[-17.129,-4.659],[-5.785,13.278],[31.838,-0.431]],"v":[[-175.468,202.591],[-206.371,200.659],[-207.215,218.722],[-178.338,222.431]],"c":true}]},{"t":117,"s":[{"i":[[3.598,-1.799],[1.88,-6.181],[-6.75,-1.303],[-1.282,6.499]],"o":[[-1.799,-8.996],[-5.995,-1.631],[-2.025,4.647],[11.143,-0.151]],"v":[[-182.639,206.721],[-193.455,206.045],[-193.75,212.367],[-183.643,213.665]],"c":true}],"h":1},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":142,"s":[{"i":[[3.598,-1.799],[1.88,-6.181],[-6.75,-1.303],[-1.282,6.499]],"o":[[-1.799,-8.996],[-5.995,-1.631],[-2.025,4.647],[11.143,-0.151]],"v":[[-182.639,206.721],[-193.455,206.045],[-193.75,212.367],[-183.643,213.665]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":152,"s":[{"i":[[10.281,-5.14],[5.371,-17.659],[-19.285,-3.722],[-3.662,18.569]],"o":[[-5.14,-25.702],[-17.129,-4.659],[-5.785,13.278],[31.838,-0.431]],"v":[[-175.468,202.591],[-206.371,200.659],[-207.215,218.722],[-178.338,222.431]],"c":true}]},{"t":177,"s":[{"i":[[3.598,-1.799],[1.88,-6.181],[-6.75,-1.303],[-1.282,6.499]],"o":[[-1.799,-8.996],[-5.995,-1.631],[-2.025,4.647],[11.143,-0.151]],"v":[[-182.639,206.721],[-193.455,206.045],[-193.75,212.367],[-183.643,213.665]],"c":true}],"h":1},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":202,"s":[{"i":[[3.598,-1.799],[1.88,-6.181],[-6.75,-1.303],[-1.282,6.499]],"o":[[-1.799,-8.996],[-5.995,-1.631],[-2.025,4.647],[11.143,-0.151]],"v":[[-182.639,206.721],[-193.455,206.045],[-193.75,212.367],[-183.643,213.665]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":212,"s":[{"i":[[10.281,-5.14],[5.371,-17.659],[-19.285,-3.722],[-3.662,18.569]],"o":[[-5.14,-25.702],[-17.129,-4.659],[-5.785,13.278],[31.838,-0.431]],"v":[[-175.468,202.591],[-206.371,200.659],[-207.215,218.722],[-178.338,222.431]],"c":true}]},{"t":237,"s":[{"i":[[3.598,-1.799],[1.88,-6.181],[-6.75,-1.303],[-1.282,6.499]],"o":[[-1.799,-8.996],[-5.995,-1.631],[-2.025,4.647],[11.143,-0.151]],"v":[[-182.639,206.721],[-193.455,206.045],[-193.75,212.367],[-183.643,213.665]],"c":true}],"h":1}]},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0.780392216701,0.800000059838,0.756862804936,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":8},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"fl","c":{"a":0,"k":[0.781857898189,0.800107230392,0.758165366977,1]},"o":{"a":0,"k":100},"r":1,"bm":0,"nm":"Fill 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Shape 1","bm":0,"hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":9,"ty":4,"nm":"Shape Layer 4","sr":1,"ks":{"o":{"a":1,"k":[{"t":33,"s":[0],"h":1},{"t":86,"s":[100],"h":1},{"t":113,"s":[0],"h":1},{"t":146,"s":[100],"h":1},{"t":173,"s":[0],"h":1},{"t":206,"s":[100],"h":1},{"t":233,"s":[0],"h":1}]},"p":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":84,"s":[293.574,475.397,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":92,"s":[101.324,475.397,0],"to":[0,0,0],"ti":[0,0,0]},{"t":113,"s":[102.074,394.897,0],"h":1},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":144,"s":[293.574,475.397,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":152,"s":[101.324,475.397,0],"to":[0,0,0],"ti":[0,0,0]},{"t":180,"s":[102.074,394.897,0],"h":1},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":204,"s":[293.574,475.397,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":212,"s":[101.324,475.397,0],"to":[0,0,0],"ti":[0,0,0]},{"t":231,"s":[102.074,394.897,0],"h":1}]},"a":{"a":0,"k":[-119.176,209.397,0]},"s":{"a":1,"k":[{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":84,"s":[10,10,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":95,"s":[100,100,100]},{"t":113,"s":[10,10,100],"h":1},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":144,"s":[10,10,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":155,"s":[100,100,100]},{"t":173,"s":[10,10,100],"h":1},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":204,"s":[10,10,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":215,"s":[100,100,100]},{"t":233,"s":[10,10,100],"h":1}]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":82,"s":[{"i":[[2.699,-1.799],[1.41,-6.181],[-5.062,-1.303],[-0.961,6.499]],"o":[[-1.349,-8.996],[-4.496,-1.631],[-1.519,4.647],[8.357,-0.151]],"v":[[-114.104,206.691],[-122.216,206.015],[-122.438,212.337],[-114.857,213.635]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":92,"s":[{"i":[[15.972,-3.187],[3.626,-11.92],[-23.428,-0.718],[-2.859,5.961]],"o":[[-3.47,-17.349],[-20.485,-1.607],[1.483,10.8],[33.803,0.094]],"v":[[-111.553,203.061],[-134.163,200.008],[-132.233,220.7],[-105.99,223.204]],"c":true}]},{"t":113,"s":[{"i":[[2.699,-1.799],[1.41,-6.181],[-5.062,-1.303],[-0.961,6.499]],"o":[[-1.349,-8.996],[-4.496,-1.631],[-1.519,4.647],[8.357,-0.151]],"v":[[-114.104,206.691],[-122.216,206.015],[-122.438,212.337],[-114.857,213.635]],"c":true}],"h":1},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":142,"s":[{"i":[[2.699,-1.799],[1.41,-6.181],[-5.062,-1.303],[-0.961,6.499]],"o":[[-1.349,-8.996],[-4.496,-1.631],[-1.519,4.647],[8.357,-0.151]],"v":[[-114.104,206.691],[-122.216,206.015],[-122.438,212.337],[-114.857,213.635]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":152,"s":[{"i":[[15.972,-3.187],[3.626,-11.92],[-23.428,-0.718],[-2.859,5.961]],"o":[[-3.47,-17.349],[-20.485,-1.607],[1.483,10.8],[33.803,0.094]],"v":[[-111.553,203.061],[-134.163,200.008],[-132.233,220.7],[-105.99,223.204]],"c":true}]},{"t":173,"s":[{"i":[[2.699,-1.799],[1.41,-6.181],[-5.062,-1.303],[-0.961,6.499]],"o":[[-1.349,-8.996],[-4.496,-1.631],[-1.519,4.647],[8.357,-0.151]],"v":[[-114.104,206.691],[-122.216,206.015],[-122.438,212.337],[-114.857,213.635]],"c":true}],"h":1},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":202,"s":[{"i":[[2.699,-1.799],[1.41,-6.181],[-5.062,-1.303],[-0.961,6.499]],"o":[[-1.349,-8.996],[-4.496,-1.631],[-1.519,4.647],[8.357,-0.151]],"v":[[-114.104,206.691],[-122.216,206.015],[-122.438,212.337],[-114.857,213.635]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":212,"s":[{"i":[[15.972,-3.187],[3.626,-11.92],[-23.428,-0.718],[-2.859,5.961]],"o":[[-3.47,-17.349],[-20.485,-1.607],[1.483,10.8],[33.803,0.094]],"v":[[-111.553,203.061],[-134.163,200.008],[-132.233,220.7],[-105.99,223.204]],"c":true}]},{"t":233,"s":[{"i":[[2.699,-1.799],[1.41,-6.181],[-5.062,-1.303],[-0.961,6.499]],"o":[[-1.349,-8.996],[-4.496,-1.631],[-1.519,4.647],[8.357,-0.151]],"v":[[-114.104,206.691],[-122.216,206.015],[-122.438,212.337],[-114.857,213.635]],"c":true}],"h":1}]},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0.780392216701,0.800000059838,0.756862804936,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":8},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"fl","c":{"a":0,"k":[0.781857898189,0.800107230392,0.758165366977,1]},"o":{"a":0,"k":100},"r":1,"bm":0,"nm":"Fill 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Shape 1","bm":0,"hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":10,"ty":4,"nm":"Shape Layer 2","sr":1,"ks":{"o":{"a":1,"k":[{"t":33,"s":[0],"h":1},{"t":86,"s":[100],"h":1},{"t":113,"s":[0],"h":1},{"t":146,"s":[100],"h":1},{"t":173,"s":[0],"h":1},{"t":206,"s":[100],"h":1},{"t":233,"s":[0],"h":1}]},"p":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":84,"s":[309.211,475.15,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":94,"s":[29.961,475.15,0],"to":[0,0,0],"ti":[0,0,0]},{"t":121,"s":[30.711,394.65,0],"h":1},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":144,"s":[309.211,475.15,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":154,"s":[29.961,475.15,0],"to":[0,0,0],"ti":[0,0,0]},{"t":173,"s":[30.711,394.65,0],"h":1},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":204,"s":[309.211,475.15,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":214,"s":[29.961,475.15,0],"to":[0,0,0],"ti":[0,0,0]},{"t":238,"s":[30.711,394.65,0],"h":1}]},"a":{"a":0,"k":[-190.539,209.15,0]},"s":{"a":1,"k":[{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":84,"s":[10,10,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":95,"s":[100,100,100]},{"t":113,"s":[10,10,100],"h":1},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":144,"s":[10,10,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":155,"s":[100,100,100]},{"t":173,"s":[10,10,100],"h":1},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":204,"s":[10,10,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":215,"s":[100,100,100]},{"t":233,"s":[10,10,100],"h":1}]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":82,"s":[{"i":[[2.699,-1.799],[1.41,-6.181],[-5.062,-1.303],[-0.961,6.499]],"o":[[-1.349,-8.996],[-4.496,-1.631],[-1.519,4.647],[8.357,-0.151]],"v":[[-183.604,206.721],[-191.716,206.045],[-191.938,212.367],[-184.357,213.665]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":92,"s":[{"i":[[6.939,-3.47],[3.626,-11.92],[-13.017,-2.513],[-2.859,5.961]],"o":[[-3.47,-17.349],[-11.562,-3.145],[-4.185,4.194],[21.49,-0.291]],"v":[[-179.053,204.656],[-204.163,203.602],[-205.233,220.794],[-178.99,223.298]],"c":true}]},{"t":117,"s":[{"i":[[2.699,-1.799],[1.41,-6.181],[-5.062,-1.303],[-0.961,6.499]],"o":[[-1.349,-8.996],[-4.496,-1.631],[-1.519,4.647],[8.357,-0.151]],"v":[[-183.604,206.721],[-191.716,206.045],[-191.938,212.367],[-184.357,213.665]],"c":true}],"h":1},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":142,"s":[{"i":[[2.699,-1.799],[1.41,-6.181],[-5.062,-1.303],[-0.961,6.499]],"o":[[-1.349,-8.996],[-4.496,-1.631],[-1.519,4.647],[8.357,-0.151]],"v":[[-183.604,206.721],[-191.716,206.045],[-191.938,212.367],[-184.357,213.665]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":152,"s":[{"i":[[6.939,-3.47],[3.626,-11.92],[-13.017,-2.513],[-2.859,5.961]],"o":[[-3.47,-17.349],[-11.562,-3.145],[-4.185,4.194],[21.49,-0.291]],"v":[[-179.053,204.656],[-204.163,203.602],[-205.233,220.794],[-178.99,223.298]],"c":true}]},{"t":177,"s":[{"i":[[2.699,-1.799],[1.41,-6.181],[-5.062,-1.303],[-0.961,6.499]],"o":[[-1.349,-8.996],[-4.496,-1.631],[-1.519,4.647],[8.357,-0.151]],"v":[[-183.604,206.721],[-191.716,206.045],[-191.938,212.367],[-184.357,213.665]],"c":true}],"h":1},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":202,"s":[{"i":[[2.699,-1.799],[1.41,-6.181],[-5.062,-1.303],[-0.961,6.499]],"o":[[-1.349,-8.996],[-4.496,-1.631],[-1.519,4.647],[8.357,-0.151]],"v":[[-183.604,206.721],[-191.716,206.045],[-191.938,212.367],[-184.357,213.665]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":212,"s":[{"i":[[6.939,-3.47],[3.626,-11.92],[-13.017,-2.513],[-2.859,5.961]],"o":[[-3.47,-17.349],[-11.562,-3.145],[-4.185,4.194],[21.49,-0.291]],"v":[[-179.053,204.656],[-204.163,203.602],[-205.233,220.794],[-178.99,223.298]],"c":true}]},{"t":237,"s":[{"i":[[2.699,-1.799],[1.41,-6.181],[-5.062,-1.303],[-0.961,6.499]],"o":[[-1.349,-8.996],[-4.496,-1.631],[-1.519,4.647],[8.357,-0.151]],"v":[[-183.604,206.721],[-191.716,206.045],[-191.938,212.367],[-184.357,213.665]],"c":true}],"h":1}]},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0.780392216701,0.800000059838,0.756862804936,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":8},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"fl","c":{"a":0,"k":[0.781857898189,0.800107230392,0.758165366977,1]},"o":{"a":0,"k":100},"r":1,"bm":0,"nm":"Fill 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Shape 1","bm":0,"hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":11,"ty":4,"nm":"Shape Layer 1","sr":1,"ks":{"o":{"a":1,"k":[{"t":33,"s":[0],"h":1},{"t":86,"s":[100],"h":1},{"t":117,"s":[0],"h":1},{"t":146,"s":[100],"h":1},{"t":177,"s":[0],"h":1},{"t":206,"s":[100],"h":1},{"t":237,"s":[0],"h":1}]},"p":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":82,"s":[277.003,470.829,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":94,"s":[77.253,470.829,0],"to":[0,0,0],"ti":[0,0,0]},{"t":116,"s":[77.253,400.329,0],"h":1},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":142,"s":[277.003,470.829,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":154,"s":[77.253,470.829,0],"to":[0,0,0],"ti":[0,0,0]},{"t":176,"s":[77.253,400.329,0],"h":1},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":202,"s":[277.003,470.829,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":214,"s":[77.253,470.829,0],"to":[0,0,0],"ti":[0,0,0]},{"t":236,"s":[77.253,400.329,0],"h":1}]},"a":{"a":0,"k":[-178.747,214.829,0]},"s":{"a":1,"k":[{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":84,"s":[10,10,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":95,"s":[100,100,100]},{"t":119,"s":[10,10,100],"h":1},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":144,"s":[10,10,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":155,"s":[100,100,100]},{"t":179,"s":[10,10,100],"h":1},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":204,"s":[10,10,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":215,"s":[100,100,100]},{"t":239,"s":[10,10,100],"h":1}]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":82,"s":[{"i":[[1.439,-0.719],[1.379,-3.417],[-8.43,-1.972],[0.075,12.529],[5.174,-1.554]],"o":[[-0.719,-3.596],[-0.599,-1.319],[-3.93,7.278],[5.575,0.42],[-0.18,-1.678]],"v":[[-176.731,208.985],[-183.924,209.765],[-184.57,214.472],[-173.575,215.971],[-172.924,209.554]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":92,"s":[{"i":[[10.281,-5.14],[9.852,-24.417],[-19.035,-1.472],[-10.962,-0.56],[9.121,2.485]],"o":[[-5.14,-25.702],[-4.284,-9.424],[16.28,1.259],[35.588,1.819],[-1.285,-11.994]],"v":[[-177.218,214.591],[-212.871,216.159],[-221.215,235.472],[-180.338,236.681],[-161.871,223.515]],"c":true}]},{"t":117,"s":[{"i":[[1.439,-0.719],[1.379,-3.417],[-8.43,-1.972],[0.075,12.529],[5.174,-1.554]],"o":[[-0.719,-3.596],[-0.599,-1.319],[-3.93,7.278],[5.575,0.42],[-0.18,-1.678]],"v":[[-176.731,208.985],[-183.924,209.765],[-184.57,214.472],[-173.575,215.971],[-172.924,209.554]],"c":true}],"h":1},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":142,"s":[{"i":[[1.439,-0.719],[1.379,-3.417],[-8.43,-1.972],[0.075,12.529],[5.174,-1.554]],"o":[[-0.719,-3.596],[-0.599,-1.319],[-3.93,7.278],[5.575,0.42],[-0.18,-1.678]],"v":[[-176.731,208.985],[-183.924,209.765],[-184.57,214.472],[-173.575,215.971],[-172.924,209.554]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":152,"s":[{"i":[[10.281,-5.14],[9.852,-24.417],[-19.035,-1.472],[-10.962,-0.56],[9.121,2.485]],"o":[[-5.14,-25.702],[-4.284,-9.424],[16.28,1.259],[35.588,1.819],[-1.285,-11.994]],"v":[[-177.218,214.591],[-212.871,216.159],[-221.215,235.472],[-180.338,236.681],[-161.871,223.515]],"c":true}]},{"t":177,"s":[{"i":[[1.439,-0.719],[1.379,-3.417],[-8.43,-1.972],[0.075,12.529],[5.174,-1.554]],"o":[[-0.719,-3.596],[-0.599,-1.319],[-3.93,7.278],[5.575,0.42],[-0.18,-1.678]],"v":[[-176.731,208.985],[-183.924,209.765],[-184.57,214.472],[-173.575,215.971],[-172.924,209.554]],"c":true}],"h":1},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":202,"s":[{"i":[[1.439,-0.719],[1.379,-3.417],[-8.43,-1.972],[0.075,12.529],[5.174,-1.554]],"o":[[-0.719,-3.596],[-0.599,-1.319],[-3.93,7.278],[5.575,0.42],[-0.18,-1.678]],"v":[[-176.731,208.985],[-183.924,209.765],[-184.57,214.472],[-173.575,215.971],[-172.924,209.554]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":212,"s":[{"i":[[10.281,-5.14],[9.852,-24.417],[-19.035,-1.472],[-10.962,-0.56],[9.121,2.485]],"o":[[-5.14,-25.702],[-4.284,-9.424],[16.28,1.259],[35.588,1.819],[-1.285,-11.994]],"v":[[-177.218,214.591],[-212.871,216.159],[-221.215,235.472],[-180.338,236.681],[-161.871,223.515]],"c":true}]},{"t":237,"s":[{"i":[[1.439,-0.719],[1.379,-3.417],[-8.43,-1.972],[0.075,12.529],[5.174,-1.554]],"o":[[-0.719,-3.596],[-0.599,-1.319],[-3.93,7.278],[5.575,0.42],[-0.18,-1.678]],"v":[[-176.731,208.985],[-183.924,209.765],[-184.57,214.472],[-173.575,215.971],[-172.924,209.554]],"c":true}],"h":1}]},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0.780392216701,0.800000059838,0.756862804936,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":8},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"fl","c":{"a":0,"k":[0.781857898189,0.800107230392,0.758165366977,1]},"o":{"a":0,"k":100},"r":1,"bm":0,"nm":"Fill 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Shape 1","bm":0,"hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":12,"ty":4,"nm":"hand1","parent":13,"sr":1,"ks":{"r":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":60,"s":[26.995]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":80,"s":[33.272]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":90,"s":[26.995]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":110,"s":[33.272]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":120,"s":[26.995]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":140,"s":[33.272]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":150,"s":[26.995]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":170,"s":[33.272]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":180,"s":[26.995]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":200,"s":[33.272]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":210,"s":[26.995]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":230,"s":[33.272]},{"t":240,"s":[26.995]}]},"p":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":60,"s":[157.289,123.765,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":80,"s":[192.821,123.765,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":90,"s":[157.289,123.765,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":110,"s":[192.821,123.765,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":120,"s":[157.289,123.765,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":140,"s":[192.821,123.765,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":150,"s":[157.289,123.765,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":170,"s":[192.821,123.765,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":180,"s":[157.289,123.765,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":200,"s":[192.821,123.765,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":210,"s":[157.289,123.765,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":230,"s":[192.821,123.765,0],"to":[0,0,0],"ti":[0,0,0]},{"t":240,"s":[157.289,123.765,0]}]},"a":{"a":0,"k":[83.716,11.206,0]},"s":{"a":0,"k":[103.539,96.543,100]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":33,"s":[{"i":[[0,0],[4.511,-10.304],[-56.802,46.311]],"o":[[-25.231,29.469],[-8.593,19.63],[0,0]],"v":[[45.245,-27.116],[-42.946,-6.444],[83.716,33.465]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":53,"s":[{"i":[[0,0],[-3.777,-10.596],[-26.809,-14.182]],"o":[[-42.953,2.447],[3.525,9.89],[0,0]],"v":[[25.587,-12.043],[-30.293,23.676],[61.687,42.773]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":63,"s":[{"i":[[0,0],[4.511,-10.304],[-56.802,46.311]],"o":[[-25.231,29.469],[-8.593,19.63],[0,0]],"v":[[45.245,-27.116],[-42.946,-6.444],[83.716,33.465]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":83,"s":[{"i":[[0,0],[4.511,-10.304],[-54.175,-7.932]],"o":[[-42.953,2.447],[-8.593,19.63],[0,0]],"v":[[45.245,-27.116],[-30.293,23.676],[83.716,33.465]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":93,"s":[{"i":[[0,0],[4.511,-10.304],[-56.802,46.311]],"o":[[-25.231,29.469],[-8.593,19.63],[0,0]],"v":[[45.245,-27.116],[-42.946,-6.444],[83.716,33.465]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":113,"s":[{"i":[[0,0],[4.511,-10.304],[-54.175,-7.932]],"o":[[-42.953,2.447],[-8.593,19.63],[0,0]],"v":[[45.245,-27.116],[-30.293,23.676],[83.716,33.465]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":123,"s":[{"i":[[0,0],[4.511,-10.304],[-56.802,46.311]],"o":[[-25.231,29.469],[-8.593,19.63],[0,0]],"v":[[45.245,-27.116],[-42.946,-6.444],[83.716,33.465]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":143,"s":[{"i":[[0,0],[4.511,-10.304],[-54.175,-7.932]],"o":[[-42.953,2.447],[-8.593,19.63],[0,0]],"v":[[45.245,-27.116],[-30.293,23.676],[83.716,33.465]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":153,"s":[{"i":[[0,0],[4.511,-10.304],[-58.848,31.735]],"o":[[-46.858,11.462],[-8.593,19.63],[0,0]],"v":[[44.578,1.147],[-19.175,-4.319],[70.644,48.162]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":173,"s":[{"i":[[0,0],[-3.777,-10.596],[-26.809,-14.182]],"o":[[-42.953,2.447],[3.525,9.89],[0,0]],"v":[[25.587,-12.043],[-30.293,23.676],[61.687,42.773]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":183,"s":[{"i":[[0,0],[4.511,-10.304],[-58.848,31.735]],"o":[[-46.858,11.462],[-8.593,19.63],[0,0]],"v":[[44.578,1.147],[-19.175,-4.319],[70.644,48.162]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":203,"s":[{"i":[[0,0],[-3.777,-10.596],[-26.809,-14.182]],"o":[[-42.953,2.447],[3.525,9.89],[0,0]],"v":[[25.587,-12.043],[-30.293,23.676],[61.687,42.773]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":213,"s":[{"i":[[0,0],[4.511,-10.304],[-58.848,31.735]],"o":[[-46.858,11.462],[-8.593,19.63],[0,0]],"v":[[44.578,1.147],[-19.175,-4.319],[70.644,48.162]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":233,"s":[{"i":[[0,0],[-3.777,-10.596],[-26.809,-14.182]],"o":[[-42.953,2.447],[3.525,9.89],[0,0]],"v":[[25.587,-12.043],[-30.293,23.676],[61.687,42.773]],"c":false}]},{"t":243,"s":[{"i":[[0,0],[4.511,-10.304],[-56.802,46.311]],"o":[[-25.231,29.469],[-8.593,19.63],[0,0]],"v":[[45.245,-27.116],[-42.946,-6.444],[83.716,33.465]],"c":false}]}]},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0.980392158031,0.564705908298,0.086274512112,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":10},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"fl","c":{"a":0,"k":[1,0.835294127464,0.152941182256,1]},"o":{"a":0,"k":100},"r":1,"bm":0,"nm":"Fill 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":13,"ty":3,"nm":"NULL CONTROL","sr":1,"ks":{"o":{"a":0,"k":0},"p":{"a":1,"k":[{"i":{"x":0.4,"y":1},"o":{"x":0.6,"y":0},"t":130,"s":[203.665,234.443,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.4,"y":0.4},"o":{"x":0.6,"y":0.6},"t":150,"s":[213.665,234.443,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.4,"y":1},"o":{"x":0.6,"y":0},"t":220,"s":[213.665,234.443,0],"to":[0,0,0],"ti":[0,0,0]},{"t":240,"s":[203.665,234.443,0]}]},"a":{"a":0,"k":[60,60,0]}},"ao":0,"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":14,"ty":4,"nm":"broom","parent":13,"sr":1,"ks":{"r":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.59],"y":[0]},"t":60,"s":[60]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.59],"y":[0]},"t":80,"s":[20]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.59],"y":[0]},"t":90,"s":[60]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.59],"y":[0]},"t":110,"s":[20]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.59],"y":[0]},"t":120,"s":[60]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.59],"y":[0]},"t":140,"s":[20]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.59],"y":[0]},"t":150,"s":[60]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.59],"y":[0]},"t":170,"s":[20]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.59],"y":[0]},"t":180,"s":[60]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.59],"y":[0]},"t":200,"s":[20]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.59],"y":[0]},"t":210,"s":[60]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.59],"y":[0]},"t":230,"s":[20]},{"t":240,"s":[60]}]},"p":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.59,"y":0},"t":60,"s":[60,60,0],"to":[3.335,-14.443,0],"ti":[-2.396,-19.345,0]},{"i":{"x":0.833,"y":1},"o":{"x":0.59,"y":0},"t":80,"s":[80,60,0],"to":[-5.665,0.182,0],"ti":[5.165,0.193,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.59,"y":0},"t":90,"s":[60,60,0],"to":[3.335,-14.443,0],"ti":[-2.396,-19.345,0]},{"i":{"x":0.833,"y":1},"o":{"x":0.59,"y":0},"t":110,"s":[80,60,0],"to":[-5.665,0.182,0],"ti":[5.04,-0.057,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.59,"y":0},"t":120,"s":[60,60,0],"to":[3.335,-14.443,0],"ti":[-2.396,-19.345,0]},{"i":{"x":0.833,"y":1},"o":{"x":0.59,"y":0},"t":140,"s":[80,60,0],"to":[-5.665,0.182,0],"ti":[4.54,0.318,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.59,"y":0},"t":150,"s":[60,60,0],"to":[3.335,-14.443,0],"ti":[-2.396,-19.345,0]},{"i":{"x":0.833,"y":1},"o":{"x":0.59,"y":0},"t":170,"s":[80,60,0],"to":[-5.665,0.182,0],"ti":[5.29,0.068,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.59,"y":0},"t":180,"s":[60,60,0],"to":[3.335,-14.443,0],"ti":[-2.396,-19.345,0]},{"i":{"x":0.833,"y":1},"o":{"x":0.59,"y":0},"t":200,"s":[80,60,0],"to":[-5.665,0.182,0],"ti":[6.665,-0.307,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.59,"y":0},"t":210,"s":[60,60,0],"to":[3.335,-14.443,0],"ti":[-2.396,-19.345,0]},{"i":{"x":0.833,"y":1},"o":{"x":0.59,"y":0},"t":230,"s":[80,60,0],"to":[-5.665,0.182,0],"ti":[6.165,0.068,0]},{"t":240,"s":[60,60,0]}]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":0,"k":{"i":[[6.287,-0.72],[0,0],[-7.998,-7.94],[0,0]],"o":[[0,0],[-7.941,-7.884],[0,0],[-1.998,7.85]],"v":[[68.069,80.248],[-149.688,-134.151],[-136.315,-147.621],[81.245,66.707]],"c":true}},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0.427450984716,0.211764708161,0.019607843831,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":8},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"fl","c":{"a":0,"k":[0.666666686535,0.352941185236,0.121568627656,1]},"o":{"a":0,"k":100},"r":1,"bm":0,"nm":"Fill 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":15,"ty":4,"nm":"Layer 28","parent":16,"sr":1,"ks":{"p":{"a":0,"k":[307.396,311.699,0]},"a":{"a":0,"k":[307.396,311.699,0]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":22,"s":[{"i":[[0,0],[-3.267,-22.952]],"o":[[0,0],[3.75,26.346]],"v":[[0.099,-22.668],[-1.775,46.75]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":27,"s":[{"i":[[0,0],[-10.759,-15.839]],"o":[[0,0],[14.953,22.014]],"v":[[8.096,-22.475],[20.08,32.271]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":32,"s":[{"i":[[0,0],[24.873,-24.912]],"o":[[0,0],[-18.803,18.832]],"v":[[0.099,-22.668],[-32.41,43.182]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":52,"s":[{"i":[[0,0],[-3.267,-22.952]],"o":[[0,0],[3.75,26.346]],"v":[[0.099,-22.668],[-1.775,46.75]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":57,"s":[{"i":[[0,0],[-10.759,-15.839]],"o":[[0,0],[14.953,22.014]],"v":[[8.096,-22.475],[20.08,32.271]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":62,"s":[{"i":[[0,0],[24.873,-24.912]],"o":[[0,0],[-18.803,18.832]],"v":[[0.099,-22.668],[-32.41,43.182]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":82,"s":[{"i":[[0,0],[-3.267,-22.952]],"o":[[0,0],[3.75,26.346]],"v":[[0.099,-22.668],[-1.775,46.75]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":87,"s":[{"i":[[0,0],[-10.759,-15.839]],"o":[[0,0],[14.953,22.014]],"v":[[8.096,-22.475],[20.08,32.271]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":92,"s":[{"i":[[0,0],[24.873,-24.912]],"o":[[0,0],[-18.803,18.832]],"v":[[0.099,-22.668],[-32.41,43.182]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":112,"s":[{"i":[[0,0],[-3.267,-22.952]],"o":[[0,0],[3.75,26.346]],"v":[[0.099,-22.668],[-1.775,46.75]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":117,"s":[{"i":[[0,0],[-10.759,-15.839]],"o":[[0,0],[14.953,22.014]],"v":[[8.096,-22.475],[20.08,32.271]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":122,"s":[{"i":[[0,0],[24.873,-24.912]],"o":[[0,0],[-18.803,18.832]],"v":[[0.099,-22.668],[-32.41,43.182]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":142,"s":[{"i":[[0,0],[-3.267,-22.952]],"o":[[0,0],[3.75,26.346]],"v":[[0.099,-22.668],[-1.775,46.75]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":147,"s":[{"i":[[0,0],[-10.759,-15.839]],"o":[[0,0],[14.953,22.014]],"v":[[8.096,-22.475],[20.08,32.271]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":152,"s":[{"i":[[0,0],[24.873,-24.912]],"o":[[0,0],[-18.803,18.832]],"v":[[0.099,-22.668],[-32.41,43.182]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":172,"s":[{"i":[[0,0],[-3.267,-22.952]],"o":[[0,0],[3.75,26.346]],"v":[[0.099,-22.668],[-1.775,46.75]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":177,"s":[{"i":[[0,0],[-10.759,-15.839]],"o":[[0,0],[14.953,22.014]],"v":[[8.096,-22.475],[20.08,32.271]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":182,"s":[{"i":[[0,0],[24.873,-24.912]],"o":[[0,0],[-18.803,18.832]],"v":[[0.099,-22.668],[-32.41,43.182]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":202,"s":[{"i":[[0,0],[-3.267,-22.952]],"o":[[0,0],[3.75,26.346]],"v":[[0.099,-22.668],[-1.775,46.75]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":207,"s":[{"i":[[0,0],[-10.759,-15.839]],"o":[[0,0],[14.953,22.014]],"v":[[8.096,-22.475],[20.08,32.271]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":212,"s":[{"i":[[0,0],[24.873,-24.912]],"o":[[0,0],[-18.803,18.832]],"v":[[0.099,-22.668],[-32.41,43.182]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":232,"s":[{"i":[[0,0],[-3.267,-22.952]],"o":[[0,0],[3.75,26.346]],"v":[[0.099,-22.668],[-1.775,46.75]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":237,"s":[{"i":[[0,0],[-10.759,-15.839]],"o":[[0,0],[14.953,22.014]],"v":[[8.096,-22.475],[20.08,32.271]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":242,"s":[{"i":[[0,0],[24.873,-24.912]],"o":[[0,0],[-18.803,18.832]],"v":[[0.099,-22.668],[-32.41,43.182]],"c":false}]},{"t":262,"s":[{"i":[[0,0],[-3.267,-22.952]],"o":[[0,0],[3.75,26.346]],"v":[[0.099,-22.668],[-1.775,46.75]],"c":false}]}]},"nm":"Path 1","hd":false},{"ind":1,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":22,"s":[{"i":[[1.157,30.465],[-2.646,35.502]],"o":[[-0.657,-17.292],[0,0]],"v":[[18.134,58.987],[18.919,-15.819]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":27,"s":[{"i":[[14.878,26.611],[1.507,28.016]],"o":[[-4.156,-7.433],[0,0]],"v":[[39.217,34.987],[26.917,-15.627]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":32,"s":[{"i":[[-24.478,18.174],[-9.283,33.503]],"o":[[15.705,-11.661],[0,0]],"v":[[-22.566,47.596],[13.148,-22.392]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":52,"s":[{"i":[[1.157,30.465],[-2.646,35.502]],"o":[[-0.657,-17.292],[0,0]],"v":[[18.134,58.987],[18.919,-15.819]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":57,"s":[{"i":[[14.878,26.611],[1.507,28.016]],"o":[[-4.156,-7.433],[0,0]],"v":[[39.217,34.987],[26.917,-15.627]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":62,"s":[{"i":[[-24.478,18.174],[-9.283,33.503]],"o":[[15.705,-11.661],[0,0]],"v":[[-22.566,47.596],[13.148,-22.392]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":82,"s":[{"i":[[1.157,30.465],[-2.646,35.502]],"o":[[-0.657,-17.292],[0,0]],"v":[[18.134,58.987],[18.919,-15.819]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":87,"s":[{"i":[[14.878,26.611],[1.507,28.016]],"o":[[-4.156,-7.433],[0,0]],"v":[[39.217,34.987],[26.917,-15.627]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":92,"s":[{"i":[[-24.478,18.174],[-9.283,33.503]],"o":[[15.705,-11.661],[0,0]],"v":[[-22.566,47.596],[13.148,-22.392]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":112,"s":[{"i":[[1.157,30.465],[-2.646,35.502]],"o":[[-0.657,-17.292],[0,0]],"v":[[18.134,58.987],[18.919,-15.819]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":117,"s":[{"i":[[14.878,26.611],[1.507,28.016]],"o":[[-4.156,-7.433],[0,0]],"v":[[39.217,34.987],[26.917,-15.627]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":122,"s":[{"i":[[-24.478,18.174],[-9.283,33.503]],"o":[[15.705,-11.661],[0,0]],"v":[[-22.566,47.596],[13.148,-22.392]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":142,"s":[{"i":[[1.157,30.465],[-2.646,35.502]],"o":[[-0.657,-17.292],[0,0]],"v":[[18.134,58.987],[18.919,-15.819]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":147,"s":[{"i":[[14.878,26.611],[1.507,28.016]],"o":[[-4.156,-7.433],[0,0]],"v":[[39.217,34.987],[26.917,-15.627]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":152,"s":[{"i":[[-24.478,18.174],[-9.283,33.503]],"o":[[15.705,-11.661],[0,0]],"v":[[-22.566,47.596],[13.148,-22.392]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":172,"s":[{"i":[[1.157,30.465],[-2.646,35.502]],"o":[[-0.657,-17.292],[0,0]],"v":[[18.134,58.987],[18.919,-15.819]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":177,"s":[{"i":[[14.878,26.611],[1.507,28.016]],"o":[[-4.156,-7.433],[0,0]],"v":[[39.217,34.987],[26.917,-15.627]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":182,"s":[{"i":[[-24.478,18.174],[-9.283,33.503]],"o":[[15.705,-11.661],[0,0]],"v":[[-22.566,47.596],[13.148,-22.392]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":202,"s":[{"i":[[1.157,30.465],[-2.646,35.502]],"o":[[-0.657,-17.292],[0,0]],"v":[[18.134,58.987],[18.919,-15.819]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":207,"s":[{"i":[[14.878,26.611],[1.507,28.016]],"o":[[-4.156,-7.433],[0,0]],"v":[[39.217,34.987],[26.917,-15.627]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":212,"s":[{"i":[[-24.478,18.174],[-9.283,33.503]],"o":[[15.705,-11.661],[0,0]],"v":[[-22.566,47.596],[13.148,-22.392]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":232,"s":[{"i":[[1.157,30.465],[-2.646,35.502]],"o":[[-0.657,-17.292],[0,0]],"v":[[18.134,58.987],[18.919,-15.819]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":237,"s":[{"i":[[14.878,26.611],[1.507,28.016]],"o":[[-4.156,-7.433],[0,0]],"v":[[39.217,34.987],[26.917,-15.627]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":242,"s":[{"i":[[-24.478,18.174],[-9.283,33.503]],"o":[[15.705,-11.661],[0,0]],"v":[[-22.566,47.596],[13.148,-22.392]],"c":false}]},{"t":262,"s":[{"i":[[1.157,30.465],[-2.646,35.502]],"o":[[-0.657,-17.292],[0,0]],"v":[[18.134,58.987],[18.919,-15.819]],"c":false}]}]},"nm":"Path 2","hd":false},{"ind":2,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":22,"s":[{"i":[[0,0],[0.331,-10.909]],"o":[[-1.93,16.242],[-0.771,25.452]],"v":[[-17.974,-10.441],[-20.636,35.488]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":27,"s":[{"i":[[0,0],[-7.272,-9.949]],"o":[[-1.93,16.242],[15.027,20.557]],"v":[[-7.273,-18.686],[1.05,28.007]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":32,"s":[{"i":[[0,0],[8.853,-9.838]],"o":[[-5.755,17.938],[-17.033,18.928]],"v":[[-17.974,-10.441],[-42.416,35.898]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":52,"s":[{"i":[[0,0],[0.331,-10.909]],"o":[[-1.93,16.242],[-0.771,25.452]],"v":[[-17.974,-10.441],[-20.636,35.488]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":57,"s":[{"i":[[0,0],[-7.272,-9.949]],"o":[[-1.93,16.242],[15.027,20.557]],"v":[[-7.273,-18.686],[1.05,28.007]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":62,"s":[{"i":[[0,0],[8.853,-9.838]],"o":[[-5.755,17.938],[-17.033,18.928]],"v":[[-17.974,-10.441],[-42.416,35.898]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":82,"s":[{"i":[[0,0],[0.331,-10.909]],"o":[[-1.93,16.242],[-0.771,25.452]],"v":[[-17.974,-10.441],[-20.636,35.488]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":87,"s":[{"i":[[0,0],[-7.272,-9.949]],"o":[[-1.93,16.242],[15.027,20.557]],"v":[[-7.273,-18.686],[1.05,28.007]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":92,"s":[{"i":[[0,0],[8.853,-9.838]],"o":[[-5.755,17.938],[-17.033,18.928]],"v":[[-17.974,-10.441],[-42.416,35.898]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":112,"s":[{"i":[[0,0],[0.331,-10.909]],"o":[[-1.93,16.242],[-0.771,25.452]],"v":[[-17.974,-10.441],[-20.636,35.488]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":117,"s":[{"i":[[0,0],[-7.272,-9.949]],"o":[[-1.93,16.242],[15.027,20.557]],"v":[[-7.273,-18.686],[1.05,28.007]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":122,"s":[{"i":[[0,0],[8.853,-9.838]],"o":[[-5.755,17.938],[-17.033,18.928]],"v":[[-17.974,-10.441],[-42.416,35.898]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":142,"s":[{"i":[[0,0],[0.331,-10.909]],"o":[[-1.93,16.242],[-0.771,25.452]],"v":[[-17.974,-10.441],[-20.636,35.488]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":147,"s":[{"i":[[0,0],[-7.272,-9.949]],"o":[[-1.93,16.242],[15.027,20.557]],"v":[[-7.273,-18.686],[1.05,28.007]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":152,"s":[{"i":[[0,0],[8.853,-9.838]],"o":[[-5.755,17.938],[-17.033,18.928]],"v":[[-17.974,-10.441],[-42.416,35.898]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":172,"s":[{"i":[[0,0],[0.331,-10.909]],"o":[[-1.93,16.242],[-0.771,25.452]],"v":[[-17.974,-10.441],[-20.636,35.488]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":177,"s":[{"i":[[0,0],[-7.272,-9.949]],"o":[[-1.93,16.242],[15.027,20.557]],"v":[[-7.273,-18.686],[1.05,28.007]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":182,"s":[{"i":[[0,0],[8.853,-9.838]],"o":[[-5.755,17.938],[-17.033,18.928]],"v":[[-17.974,-10.441],[-42.416,35.898]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":202,"s":[{"i":[[0,0],[0.331,-10.909]],"o":[[-1.93,16.242],[-0.771,25.452]],"v":[[-17.974,-10.441],[-20.636,35.488]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":207,"s":[{"i":[[0,0],[-7.272,-9.949]],"o":[[-1.93,16.242],[15.027,20.557]],"v":[[-7.273,-18.686],[1.05,28.007]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":212,"s":[{"i":[[0,0],[8.853,-9.838]],"o":[[-5.755,17.938],[-17.033,18.928]],"v":[[-17.974,-10.441],[-42.416,35.898]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":232,"s":[{"i":[[0,0],[0.331,-10.909]],"o":[[-1.93,16.242],[-0.771,25.452]],"v":[[-17.974,-10.441],[-20.636,35.488]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":237,"s":[{"i":[[0,0],[-7.272,-9.949]],"o":[[-1.93,16.242],[15.027,20.557]],"v":[[-7.273,-18.686],[1.05,28.007]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":242,"s":[{"i":[[0,0],[8.853,-9.838]],"o":[[-5.755,17.938],[-17.033,18.928]],"v":[[-17.974,-10.441],[-42.416,35.898]],"c":false}]},{"t":262,"s":[{"i":[[0,0],[0.331,-10.909]],"o":[[-1.93,16.242],[-0.771,25.452]],"v":[[-17.974,-10.441],[-20.636,35.488]],"c":false}]}]},"nm":"Path 3","hd":false},{"ind":3,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":22,"s":[{"i":[[8.69,35.642],[0.804,34.037]],"o":[[-4.002,-16.414],[0,0]],"v":[[44.593,66.249],[37.964,-8.833]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":27,"s":[{"i":[[23.878,27.851],[5.347,26.082]],"o":[[-8.717,-10.168],[0,0]],"v":[[71.258,38.882],[46.202,-18.638]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":32,"s":[{"i":[[-25.274,26.591],[-5.43,29.461]],"o":[[13.487,-14.19],[0,0]],"v":[[-7.915,51.417],[23.695,-17.415]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":52,"s":[{"i":[[8.69,35.642],[0.804,34.037]],"o":[[-4.002,-16.414],[0,0]],"v":[[44.593,66.249],[37.964,-8.833]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":57,"s":[{"i":[[23.878,27.851],[5.347,26.082]],"o":[[-8.717,-10.168],[0,0]],"v":[[71.258,38.882],[46.202,-18.638]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":62,"s":[{"i":[[-25.274,26.591],[-5.43,29.461]],"o":[[13.487,-14.19],[0,0]],"v":[[-7.915,51.417],[23.695,-17.415]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":82,"s":[{"i":[[8.69,35.642],[0.804,34.037]],"o":[[-4.002,-16.414],[0,0]],"v":[[44.593,66.249],[37.964,-8.833]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":87,"s":[{"i":[[23.878,27.851],[5.347,26.082]],"o":[[-8.717,-10.168],[0,0]],"v":[[71.258,38.882],[46.202,-18.638]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":92,"s":[{"i":[[-25.274,26.591],[-5.43,29.461]],"o":[[13.487,-14.19],[0,0]],"v":[[-7.915,51.417],[23.695,-17.415]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":112,"s":[{"i":[[8.69,35.642],[0.804,34.037]],"o":[[-4.002,-16.414],[0,0]],"v":[[44.593,66.249],[37.964,-8.833]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":117,"s":[{"i":[[23.878,27.851],[5.347,26.082]],"o":[[-8.717,-10.168],[0,0]],"v":[[71.258,38.882],[46.202,-18.638]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":122,"s":[{"i":[[-25.274,26.591],[-5.43,29.461]],"o":[[13.487,-14.19],[0,0]],"v":[[-7.915,51.417],[23.695,-17.415]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":142,"s":[{"i":[[8.69,35.642],[0.804,34.037]],"o":[[-4.002,-16.414],[0,0]],"v":[[44.593,66.249],[37.964,-8.833]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":147,"s":[{"i":[[23.878,27.851],[5.347,26.082]],"o":[[-8.717,-10.168],[0,0]],"v":[[71.258,38.882],[46.202,-18.638]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":152,"s":[{"i":[[-25.274,26.591],[-5.43,29.461]],"o":[[13.487,-14.19],[0,0]],"v":[[-7.915,51.417],[23.695,-17.415]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":172,"s":[{"i":[[8.69,35.642],[0.804,34.037]],"o":[[-4.002,-16.414],[0,0]],"v":[[44.593,66.249],[37.964,-8.833]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":177,"s":[{"i":[[23.878,27.851],[5.347,26.082]],"o":[[-8.717,-10.168],[0,0]],"v":[[71.258,38.882],[46.202,-18.638]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":182,"s":[{"i":[[-25.274,26.591],[-5.43,29.461]],"o":[[13.487,-14.19],[0,0]],"v":[[-7.915,51.417],[23.695,-17.415]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":202,"s":[{"i":[[8.69,35.642],[0.804,34.037]],"o":[[-4.002,-16.414],[0,0]],"v":[[44.593,66.249],[37.964,-8.833]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":207,"s":[{"i":[[23.878,27.851],[5.347,26.082]],"o":[[-8.717,-10.168],[0,0]],"v":[[71.258,38.882],[46.202,-18.638]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":212,"s":[{"i":[[-25.274,26.591],[-5.43,29.461]],"o":[[13.487,-14.19],[0,0]],"v":[[-7.915,51.417],[23.695,-17.415]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":232,"s":[{"i":[[8.69,35.642],[0.804,34.037]],"o":[[-4.002,-16.414],[0,0]],"v":[[44.593,66.249],[37.964,-8.833]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":237,"s":[{"i":[[23.878,27.851],[5.347,26.082]],"o":[[-8.717,-10.168],[0,0]],"v":[[71.258,38.882],[46.202,-18.638]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":242,"s":[{"i":[[-25.274,26.591],[-5.43,29.461]],"o":[[13.487,-14.19],[0,0]],"v":[[-7.915,51.417],[23.695,-17.415]],"c":false}]},{"t":262,"s":[{"i":[[8.69,35.642],[0.804,34.037]],"o":[[-4.002,-16.414],[0,0]],"v":[[44.593,66.249],[37.964,-8.833]],"c":false}]}]},"nm":"Path 4","hd":false},{"ind":4,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":22,"s":[{"i":[[-1.928,33.55],[-0.958,23.715]],"o":[[1.061,-18.465],[0,0]],"v":[[-34.158,23.496],[-31.317,-42.866]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":27,"s":[{"i":[[13.057,30.965],[-0.958,23.715]],"o":[[-7.29,-17.288],[0,0]],"v":[[-16.067,19.93],[-23.319,-42.674]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":32,"s":[{"i":[[-19.739,27.198],[-0.958,23.715]],"o":[[13.916,-19.176],[0,0]],"v":[[-53.655,28.064],[-23.132,-39.783]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":52,"s":[{"i":[[-1.928,33.55],[-0.958,23.715]],"o":[[1.061,-18.465],[0,0]],"v":[[-34.158,23.496],[-31.317,-42.866]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":57,"s":[{"i":[[13.057,30.965],[-0.958,23.715]],"o":[[-7.29,-17.288],[0,0]],"v":[[-16.067,19.93],[-23.319,-42.674]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":62,"s":[{"i":[[-19.739,27.198],[-0.958,23.715]],"o":[[13.916,-19.176],[0,0]],"v":[[-53.655,28.064],[-23.132,-39.783]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":82,"s":[{"i":[[-1.928,33.55],[-0.958,23.715]],"o":[[1.061,-18.465],[0,0]],"v":[[-34.158,23.496],[-31.317,-42.866]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":87,"s":[{"i":[[13.057,30.965],[-0.958,23.715]],"o":[[-7.29,-17.288],[0,0]],"v":[[-16.067,19.93],[-23.319,-42.674]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":92,"s":[{"i":[[-19.739,27.198],[-0.958,23.715]],"o":[[13.916,-19.176],[0,0]],"v":[[-53.655,28.064],[-23.132,-39.783]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":112,"s":[{"i":[[-1.928,33.55],[-0.958,23.715]],"o":[[1.061,-18.465],[0,0]],"v":[[-34.158,23.496],[-31.317,-42.866]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":117,"s":[{"i":[[13.057,30.965],[-0.958,23.715]],"o":[[-7.29,-17.288],[0,0]],"v":[[-16.067,19.93],[-23.319,-42.674]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":122,"s":[{"i":[[-19.739,27.198],[-0.958,23.715]],"o":[[13.916,-19.176],[0,0]],"v":[[-53.655,28.064],[-23.132,-39.783]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":142,"s":[{"i":[[-1.928,33.55],[-0.958,23.715]],"o":[[1.061,-18.465],[0,0]],"v":[[-34.158,23.496],[-31.317,-42.866]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":147,"s":[{"i":[[13.057,30.965],[-0.958,23.715]],"o":[[-7.29,-17.288],[0,0]],"v":[[-16.067,19.93],[-23.319,-42.674]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":152,"s":[{"i":[[-19.739,27.198],[-0.958,23.715]],"o":[[13.916,-19.176],[0,0]],"v":[[-53.655,28.064],[-23.132,-39.783]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":172,"s":[{"i":[[-1.928,33.55],[-0.958,23.715]],"o":[[1.061,-18.465],[0,0]],"v":[[-34.158,23.496],[-31.317,-42.866]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":177,"s":[{"i":[[13.057,30.965],[-0.958,23.715]],"o":[[-7.29,-17.288],[0,0]],"v":[[-16.067,19.93],[-23.319,-42.674]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":182,"s":[{"i":[[-19.739,27.198],[-0.958,23.715]],"o":[[13.916,-19.176],[0,0]],"v":[[-53.655,28.064],[-23.132,-39.783]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":202,"s":[{"i":[[-1.928,33.55],[-0.958,23.715]],"o":[[1.061,-18.465],[0,0]],"v":[[-34.158,23.496],[-31.317,-42.866]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":207,"s":[{"i":[[13.057,30.965],[-0.958,23.715]],"o":[[-7.29,-17.288],[0,0]],"v":[[-16.067,19.93],[-23.319,-42.674]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":212,"s":[{"i":[[-19.739,27.198],[-0.958,23.715]],"o":[[13.916,-19.176],[0,0]],"v":[[-53.655,28.064],[-23.132,-39.783]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":232,"s":[{"i":[[-1.928,33.55],[-0.958,23.715]],"o":[[1.061,-18.465],[0,0]],"v":[[-34.158,23.496],[-31.317,-42.866]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":237,"s":[{"i":[[13.057,30.965],[-0.958,23.715]],"o":[[-7.29,-17.288],[0,0]],"v":[[-16.067,19.93],[-23.319,-42.674]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":242,"s":[{"i":[[-19.739,27.198],[-0.958,23.715]],"o":[[13.916,-19.176],[0,0]],"v":[[-53.655,28.064],[-23.132,-39.783]],"c":false}]},{"t":262,"s":[{"i":[[-1.928,33.55],[-0.958,23.715]],"o":[[1.061,-18.465],[0,0]],"v":[[-34.158,23.496],[-31.317,-42.866]],"c":false}]}]},"nm":"Path 5","hd":false},{"ty":"st","c":{"a":0,"k":[0.749019622803,0.721568644047,0.482352942228,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":8},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"tr","p":{"a":0,"k":[306.046,432.789]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":16,"ty":4,"nm":"Layer 27","parent":14,"sr":1,"ks":{"r":{"a":0,"k":-49.27},"p":{"a":0,"k":[67.398,72.639,0]},"a":{"a":0,"k":[307.396,311.699,0]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":32,"s":[{"i":[[15.679,7.33],[2.724,-8.945],[29.234,-24.482],[-24.681,11.569],[3.3,23.762]],"o":[[-14.861,-6.948],[-9.712,31.891],[13.112,15.425],[36.667,-31.413],[-6.041,-43.501]],"v":[[3.085,-76.044],[-22.269,-51.34],[-65.202,64.285],[2.214,91.838],[36.392,-20.368]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":52,"s":[{"i":[[15.679,7.33],[9.521,-25.774],[-2.436,-38.309],[-24.681,11.569],[3.3,23.762]],"o":[[-14.861,-6.948],[-11.552,31.272],[13.112,15.425],[-4.094,-41.591],[-6.041,-43.501]],"v":[[3.085,-76.044],[-35.172,-61.266],[-45.699,61.189],[62.514,112.114],[47.079,-20.207]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":57,"s":[{"i":[[15.679,7.33],[9.521,-25.774],[-15.864,-21.822],[-24.681,11.569],[3.3,23.762]],"o":[[-14.861,-6.948],[-11.552,31.272],[13.112,15.425],[-29.069,-28.363],[-6.041,-43.501]],"v":[[3.085,-76.044],[-35.172,-61.266],[-27.984,60.794],[93.152,80.082],[47.079,-20.207]],"c":true}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":62,"s":[{"i":[[15.679,7.33],[2.724,-8.945],[29.234,-24.482],[-24.681,11.569],[3.3,23.762]],"o":[[-14.861,-6.948],[-9.712,31.891],[13.112,15.425],[36.667,-31.413],[-6.041,-43.501]],"v":[[3.085,-76.044],[-22.269,-51.34],[-65.202,64.285],[2.214,91.838],[36.392,-20.368]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":82,"s":[{"i":[[15.679,7.33],[9.521,-25.774],[-2.436,-38.309],[-24.681,11.569],[3.3,23.762]],"o":[[-14.861,-6.948],[-11.552,31.272],[13.112,15.425],[-4.094,-41.591],[-6.041,-43.501]],"v":[[3.085,-76.044],[-35.172,-61.266],[-45.699,61.189],[62.514,112.114],[47.079,-20.207]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":87,"s":[{"i":[[15.679,7.33],[9.521,-25.774],[-15.864,-21.822],[-24.681,11.569],[3.3,23.762]],"o":[[-14.861,-6.948],[-11.552,31.272],[13.112,15.425],[-29.069,-28.363],[-6.041,-43.501]],"v":[[3.085,-76.044],[-35.172,-61.266],[-27.984,60.794],[93.152,80.082],[47.079,-20.207]],"c":true}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":92,"s":[{"i":[[15.679,7.33],[2.724,-8.945],[29.234,-24.482],[-24.681,11.569],[3.3,23.762]],"o":[[-14.861,-6.948],[-9.712,31.891],[13.112,15.425],[36.667,-31.413],[-6.041,-43.501]],"v":[[3.085,-76.044],[-22.269,-51.34],[-65.202,64.285],[2.214,91.838],[36.392,-20.368]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":112,"s":[{"i":[[15.679,7.33],[9.521,-25.774],[-2.436,-38.309],[-24.681,11.569],[3.3,23.762]],"o":[[-14.861,-6.948],[-11.552,31.272],[13.112,15.425],[-4.094,-41.591],[-6.041,-43.501]],"v":[[3.085,-76.044],[-35.172,-61.266],[-45.699,61.189],[62.514,112.114],[47.079,-20.207]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":117,"s":[{"i":[[15.679,7.33],[9.521,-25.774],[-15.864,-21.822],[-24.681,11.569],[3.3,23.762]],"o":[[-14.861,-6.948],[-11.552,31.272],[13.112,15.425],[-29.069,-28.363],[-6.041,-43.501]],"v":[[3.085,-76.044],[-35.172,-61.266],[-27.984,60.794],[93.152,80.082],[47.079,-20.207]],"c":true}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":122,"s":[{"i":[[15.679,7.33],[2.724,-8.945],[29.234,-24.482],[-24.681,11.569],[3.3,23.762]],"o":[[-14.861,-6.948],[-9.712,31.891],[13.112,15.425],[36.667,-31.413],[-6.041,-43.501]],"v":[[3.085,-76.044],[-22.269,-51.34],[-65.202,64.285],[2.214,91.838],[36.392,-20.368]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":142,"s":[{"i":[[15.679,7.33],[9.521,-25.774],[-2.436,-38.309],[-24.681,11.569],[3.3,23.762]],"o":[[-14.861,-6.948],[-11.552,31.272],[13.112,15.425],[-4.094,-41.591],[-6.041,-43.501]],"v":[[3.085,-76.044],[-35.172,-61.266],[-45.699,61.189],[62.514,112.114],[47.079,-20.207]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":147,"s":[{"i":[[15.679,7.33],[9.521,-25.774],[-15.864,-21.822],[-24.681,11.569],[3.3,23.762]],"o":[[-14.861,-6.948],[-11.552,31.272],[13.112,15.425],[-29.069,-28.363],[-6.041,-43.501]],"v":[[3.085,-76.044],[-35.172,-61.266],[-27.984,60.794],[93.152,80.082],[47.079,-20.207]],"c":true}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":152,"s":[{"i":[[15.679,7.33],[2.724,-8.945],[29.234,-24.482],[-24.681,11.569],[3.3,23.762]],"o":[[-14.861,-6.948],[-9.712,31.891],[13.112,15.425],[36.667,-31.413],[-6.041,-43.501]],"v":[[3.085,-76.044],[-22.269,-51.34],[-65.202,64.285],[2.214,91.838],[36.392,-20.368]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":172,"s":[{"i":[[15.679,7.33],[9.521,-25.774],[-2.436,-38.309],[-24.681,11.569],[3.3,23.762]],"o":[[-14.861,-6.948],[-11.552,31.272],[13.112,15.425],[-4.094,-41.591],[-6.041,-43.501]],"v":[[3.085,-76.044],[-35.172,-61.266],[-45.699,61.189],[62.514,112.114],[47.079,-20.207]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":177,"s":[{"i":[[15.679,7.33],[9.521,-25.774],[-15.864,-21.822],[-24.681,11.569],[3.3,23.762]],"o":[[-14.861,-6.948],[-11.552,31.272],[13.112,15.425],[-29.069,-28.363],[-6.041,-43.501]],"v":[[3.085,-76.044],[-35.172,-61.266],[-27.984,60.794],[93.152,80.082],[47.079,-20.207]],"c":true}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":182,"s":[{"i":[[15.679,7.33],[2.724,-8.945],[29.234,-24.482],[-24.681,11.569],[3.3,23.762]],"o":[[-14.861,-6.948],[-9.712,31.891],[13.112,15.425],[36.667,-31.413],[-6.041,-43.501]],"v":[[3.085,-76.044],[-22.269,-51.34],[-65.202,64.285],[2.214,91.838],[36.392,-20.368]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":202,"s":[{"i":[[15.679,7.33],[9.521,-25.774],[-2.436,-38.309],[-24.681,11.569],[3.3,23.762]],"o":[[-14.861,-6.948],[-11.552,31.272],[13.112,15.425],[-4.094,-41.591],[-6.041,-43.501]],"v":[[3.085,-76.044],[-35.172,-61.266],[-45.699,61.189],[62.514,112.114],[47.079,-20.207]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":207,"s":[{"i":[[15.679,7.33],[9.521,-25.774],[-15.864,-21.822],[-24.681,11.569],[3.3,23.762]],"o":[[-14.861,-6.948],[-11.552,31.272],[13.112,15.425],[-29.069,-28.363],[-6.041,-43.501]],"v":[[3.085,-76.044],[-35.172,-61.266],[-27.984,60.794],[93.152,80.082],[47.079,-20.207]],"c":true}]},{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":212,"s":[{"i":[[15.679,7.33],[2.724,-8.945],[29.234,-24.482],[-24.681,11.569],[3.3,23.762]],"o":[[-14.861,-6.948],[-9.712,31.891],[13.112,15.425],[36.667,-31.413],[-6.041,-43.501]],"v":[[3.085,-76.044],[-22.269,-51.34],[-65.202,64.285],[2.214,91.838],[36.392,-20.368]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":232,"s":[{"i":[[15.679,7.33],[9.521,-25.774],[-2.436,-38.309],[-24.681,11.569],[3.3,23.762]],"o":[[-14.861,-6.948],[-11.552,31.272],[13.112,15.425],[-4.094,-41.591],[-6.041,-43.501]],"v":[[3.085,-76.044],[-35.172,-61.266],[-45.699,61.189],[62.514,112.114],[47.079,-20.207]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":237,"s":[{"i":[[15.679,7.33],[9.521,-25.774],[-15.864,-21.822],[-24.681,11.569],[3.3,23.762]],"o":[[-14.861,-6.948],[-11.552,31.272],[13.112,15.425],[-29.069,-28.363],[-6.041,-43.501]],"v":[[3.085,-76.044],[-35.172,-61.266],[-27.984,60.794],[93.152,80.082],[47.079,-20.207]],"c":true}]},{"t":242,"s":[{"i":[[15.679,7.33],[2.724,-8.945],[29.234,-24.482],[-24.681,11.569],[3.3,23.762]],"o":[[-14.861,-6.948],[-9.712,31.891],[13.112,15.425],[36.667,-31.413],[-6.041,-43.501]],"v":[[3.085,-76.044],[-22.269,-51.34],[-65.202,64.285],[2.214,91.838],[36.392,-20.368]],"c":true}]}]},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0.749019622803,0.721568644047,0.482352942228,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":8},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"fl","c":{"a":0,"k":[1,0.972549021244,0.631372570992,1]},"o":{"a":0,"k":100},"r":1,"bm":0,"nm":"Fill 1","hd":false},{"ty":"tr","p":{"a":0,"k":[307.396,387.743]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 2","bm":0,"hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":17,"ty":4,"nm":"eye 2","parent":21,"sr":1,"ks":{"r":{"a":1,"k":[{"i":{"x":[0.4],"y":[1]},"o":{"x":[0.6],"y":[0]},"t":130,"s":[-14.275]},{"i":{"x":[0.4],"y":[1]},"o":{"x":[0.6],"y":[0]},"t":150,"s":[-18.825]},{"i":{"x":[0.4],"y":[1]},"o":{"x":[0.6],"y":[0]},"t":220,"s":[-18.825]},{"t":240,"s":[-14.275]}]},"p":{"a":1,"k":[{"i":{"x":0.4,"y":1},"o":{"x":0.6,"y":0},"t":130,"s":[73.221,-37.839,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.4,"y":1},"o":{"x":0.6,"y":0},"t":150,"s":[42.544,-36.499,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.4,"y":1},"o":{"x":0.6,"y":0},"t":220,"s":[42.544,-36.499,0],"to":[0,0,0],"ti":[0,0,0]},{"t":240,"s":[73.221,-37.839,0]}]},"s":{"a":1,"k":[{"i":{"x":[0.4,0.4,0.4],"y":[1,1,1]},"o":{"x":[0.6,0.6,0.6],"y":[0,0,0]},"t":130,"s":[100,100,100]},{"i":{"x":[0.4,0.4,0.4],"y":[1,1,1]},"o":{"x":[0.6,0.6,0.6],"y":[0,0,0]},"t":150,"s":[70,100,100]},{"i":{"x":[0.4,0.4,0.4],"y":[1,1,1]},"o":{"x":[0.6,0.6,0.6],"y":[0,0,0]},"t":220,"s":[70,100,100]},{"t":240,"s":[100,100,100]}]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":0,"k":{"i":[[10.105,-12.856],[-1.443,24.985]],"o":[[-14.223,18.095],[1.403,-24.284]],"v":[[22.43,16.471],[-25.239,0.304]],"c":false}},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0,0,0,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":12},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":18,"ty":4,"nm":"eye","parent":21,"sr":1,"ks":{"r":{"a":1,"k":[{"i":{"x":[0.4],"y":[1]},"o":{"x":[0.6],"y":[0]},"t":130,"s":[0]},{"i":{"x":[0.4],"y":[1]},"o":{"x":[0.6],"y":[0]},"t":150,"s":[0]},{"i":{"x":[0.4],"y":[1]},"o":{"x":[0.6],"y":[0]},"t":220,"s":[0]},{"t":240,"s":[0]}]},"p":{"a":1,"k":[{"i":{"x":0.4,"y":1},"o":{"x":0.6,"y":0},"t":130,"s":[-74.182,-41.257,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.4,"y":0.4},"o":{"x":0.6,"y":0.6},"t":150,"s":[-80.128,-37.35,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.4,"y":1},"o":{"x":0.6,"y":0},"t":220,"s":[-80.128,-37.35,0],"to":[0,0,0],"ti":[0,0,0]},{"t":240,"s":[-74.182,-41.257,0]}]},"s":{"a":1,"k":[{"i":{"x":[0.4,0.4,0.4],"y":[1,1,1]},"o":{"x":[0.6,0.6,0.6],"y":[0,0,0]},"t":130,"s":[100,100,100]},{"i":{"x":[0.4,0.4,0.4],"y":[1,1,1]},"o":{"x":[0.6,0.6,0.6],"y":[0,0,0]},"t":150,"s":[100,100,100]},{"i":{"x":[0.4,0.4,0.4],"y":[1,1,1]},"o":{"x":[0.6,0.6,0.6],"y":[0,0,0]},"t":220,"s":[100,100,100]},{"t":240,"s":[100,100,100]}]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":0,"k":{"i":[[8.193,-14.152],[3.791,17.21]],"o":[[-14.566,25.159],[-5.232,-23.756]],"v":[[26.002,7.53],[-25.189,4.249]],"c":false}},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0,0,0,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":12},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false},{"ty":"gr","it":[{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 2","bm":0,"hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":19,"ty":4,"nm":"mouth","parent":21,"sr":1,"ks":{"p":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":60,"s":[-5.3,14.686,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":72,"s":[7.991,13.664,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":90,"s":[7.991,13.664,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":102,"s":[-5.3,14.686,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":132,"s":[-5.3,14.686,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":144,"s":[7.991,13.664,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":180,"s":[7.991,13.664,0],"to":[0,0,0],"ti":[0,0,0]},{"t":192,"s":[-5.3,14.686,0]}]},"s":{"a":1,"k":[{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":60,"s":[100,100,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":68.182,"s":[140,140,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":76.363,"s":[100,100,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":84.545,"s":[140,140,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":92.727,"s":[100,100,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":100.908,"s":[140,140,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":109.092,"s":[100,100,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":117.273,"s":[140,140,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":125.455,"s":[100,100,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":133.637,"s":[140,140,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":141.818,"s":[100,100,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":150,"s":[140,140,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":158.182,"s":[100,100,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":166.363,"s":[140,140,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":174.545,"s":[100,100,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":182.727,"s":[140,140,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":190.908,"s":[100,100,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":199.092,"s":[140,140,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":207.273,"s":[100,100,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":215.455,"s":[140,140,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":223.637,"s":[100,100,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":231.818,"s":[140,140,100]},{"t":240,"s":[100,100,100]}]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":0,"k":{"i":[[-0.592,-2.673],[3.638,-0.805],[0.592,2.673],[-3.638,0.805]],"o":[[0.592,2.673],[-3.638,0.805],[-0.592,-2.673],[3.638,-0.805]],"v":[[6.587,-1.458],[1.072,4.841],[-6.587,1.458],[-1.072,-4.841]],"c":true}},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0.490196079016,0.035294119269,0.035294119269,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":8},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"fl","c":{"a":0,"k":[0.490196079016,0.035294119269,0.035294119269,1]},"o":{"a":0,"k":100},"r":1,"bm":0,"nm":"Fill 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":20,"ty":4,"nm":"beak_bl","parent":21,"sr":1,"ks":{"p":{"a":0,"k":[-8.202,-11.28,0]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":60,"s":[{"i":[[-9.069,5.866],[0.172,8.401]],"o":[[9.069,-5.866],[-0.172,-8.401]],"v":[[-7.047,12.289],[7.043,-12.289]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":68.201,"s":[{"i":[[-9.069,5.866],[0.172,8.401]],"o":[[9.069,-5.866],[-0.172,-8.401]],"v":[[-7.047,12.289],[7.043,-12.289]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":76.4,"s":[{"i":[[-9.069,5.866],[0.172,8.401]],"o":[[9.069,-5.866],[-0.172,-8.401]],"v":[[-7.047,12.289],[7.043,-12.289]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":84.6,"s":[{"i":[[-9.069,5.866],[0.172,8.401]],"o":[[9.069,-5.866],[-0.172,-8.401]],"v":[[-7.047,12.289],[7.043,-12.289]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":92.799,"s":[{"i":[[-9.069,5.866],[0.172,8.401]],"o":[[9.069,-5.866],[-0.172,-8.401]],"v":[[-7.047,12.289],[7.043,-12.289]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":101,"s":[{"i":[[-9.069,5.866],[0.172,8.401]],"o":[[9.069,-5.866],[-0.172,-8.401]],"v":[[-7.047,12.289],[7.043,-12.289]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":109.201,"s":[{"i":[[-9.069,5.866],[0.172,8.401]],"o":[[9.069,-5.866],[-0.172,-8.401]],"v":[[-7.047,12.289],[7.043,-12.289]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":117.4,"s":[{"i":[[-9.069,5.866],[0.172,8.401]],"o":[[9.069,-5.866],[-0.172,-8.401]],"v":[[-7.047,12.289],[7.043,-12.289]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":125.6,"s":[{"i":[[-9.069,5.866],[0.172,8.401]],"o":[[9.069,-5.866],[-0.172,-8.401]],"v":[[-7.047,12.289],[7.043,-12.289]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":133.799,"s":[{"i":[[-9.069,5.866],[0.172,8.401]],"o":[[9.069,-5.866],[-0.172,-8.401]],"v":[[-7.047,12.289],[7.043,-12.289]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.167,"y":0},"t":142,"s":[{"i":[[-9.069,5.866],[0.172,8.401]],"o":[[9.069,-5.866],[-0.172,-8.401]],"v":[[-10.849,13.904],[1.757,-10.439]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0},"t":150,"s":[{"i":[[-9.069,5.866],[0.172,8.401]],"o":[[9.069,-5.866],[-0.172,-8.401]],"v":[[-8.519,15.824],[4.086,-8.518]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.167,"y":0},"t":158,"s":[{"i":[[-9.069,5.866],[0.172,8.401]],"o":[[9.069,-5.866],[-0.172,-8.401]],"v":[[-10.849,13.904],[1.757,-10.439]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0},"t":166.223,"s":[{"i":[[-9.069,5.866],[0.172,8.401]],"o":[[9.069,-5.866],[-0.172,-8.401]],"v":[[-8.519,15.824],[4.086,-8.518]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.167,"y":0},"t":174.445,"s":[{"i":[[-9.069,5.866],[0.172,8.401]],"o":[[9.069,-5.866],[-0.172,-8.401]],"v":[[-10.849,13.904],[1.757,-10.439]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0},"t":182.666,"s":[{"i":[[-9.069,5.866],[0.172,8.401]],"o":[[9.069,-5.866],[-0.172,-8.401]],"v":[[-8.519,15.824],[4.086,-8.518]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.167,"y":0},"t":190.889,"s":[{"i":[[-9.069,5.866],[0.172,8.401]],"o":[[9.069,-5.866],[-0.172,-8.401]],"v":[[-10.849,13.904],[1.757,-10.439]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0},"t":199.111,"s":[{"i":[[-9.069,5.866],[0.172,8.401]],"o":[[9.069,-5.866],[-0.172,-8.401]],"v":[[-8.519,15.824],[4.086,-8.518]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.167,"y":0},"t":207.334,"s":[{"i":[[-9.069,5.866],[0.172,8.401]],"o":[[9.069,-5.866],[-0.172,-8.401]],"v":[[-10.849,13.904],[1.757,-10.439]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0},"t":215.555,"s":[{"i":[[-9.069,5.866],[0.172,8.401]],"o":[[9.069,-5.866],[-0.172,-8.401]],"v":[[-8.519,15.824],[4.086,-8.518]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.167,"y":0},"t":223.777,"s":[{"i":[[-9.069,5.866],[0.172,8.401]],"o":[[9.069,-5.866],[-0.172,-8.401]],"v":[[-10.849,13.904],[1.757,-10.439]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0},"t":232,"s":[{"i":[[-9.069,5.866],[0.172,8.401]],"o":[[9.069,-5.866],[-0.172,-8.401]],"v":[[-8.519,15.824],[4.086,-8.518]],"c":false}]},{"t":240,"s":[{"i":[[-9.069,5.866],[0.172,8.401]],"o":[[9.069,-5.866],[-0.172,-8.401]],"v":[[-7.047,12.289],[7.043,-12.289]],"c":false}]}]},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[1,0.686274528503,0.482352942228,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":8},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":106,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":118,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":130,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":142,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":154,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":166,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":178,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":190,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":202,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":214,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":226,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":238,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":250,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":262,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":274,"s":[5]},{"t":286,"s":[5]}]},"e":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":106,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":118,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":130,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":142,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":154,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":166,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":178,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":190,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":202,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":214,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":226,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":238,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":250,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":262,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":274,"s":[100]},{"t":286,"s":[100]}]},"o":{"a":0,"k":0},"m":1,"nm":"Trim Paths 1","hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":21,"ty":4,"nm":"beak","parent":24,"sr":1,"ks":{"r":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":54,"s":[-10]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":69,"s":[-10]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":84,"s":[10]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":99,"s":[10]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":114,"s":[-10]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":129,"s":[-10]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":144,"s":[10]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":159,"s":[10]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":174,"s":[-10]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":189,"s":[-10]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":204,"s":[10]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":219,"s":[10]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":234,"s":[-10]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":249,"s":[-10]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":264,"s":[10]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":279,"s":[10]},{"t":294,"s":[-10]}]},"p":{"a":1,"k":[{"i":{"x":0.4,"y":1},"o":{"x":0.6,"y":0},"t":130,"s":[25.534,63.017,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.4,"y":0.4},"o":{"x":0.6,"y":0.6},"t":150,"s":[94.543,53.944,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.4,"y":1},"o":{"x":0.6,"y":0},"t":220,"s":[94.543,53.944,0],"to":[0,0,0],"ti":[0,0,0]},{"t":240,"s":[25.534,63.017,0]}]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":60,"s":[{"i":[[8.26,10.634],[13.536,-1.651],[15.354,-13.84],[-25.948,1.81]],"o":[[-12.227,-15.741],[-12.819,1.564],[-11.83,10.664],[20.482,-1.429]],"v":[[16.289,-3.677],[-1.387,-37.531],[-25.717,-3],[-2.689,34.365]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":68.182,"s":[{"i":[[27.697,16.739],[13.536,-1.651],[15.526,-13.647],[-35.005,-0.37]],"o":[[-17.059,-10.31],[-12.819,1.564],[-21.253,18.68],[25.828,0.273]],"v":[[18.826,-6.032],[-1.387,-37.531],[-22.316,-3.677],[-0.244,40.458]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":76.363,"s":[{"i":[[8.26,10.634],[13.536,-1.651],[15.354,-13.84],[-25.948,1.81]],"o":[[-12.227,-15.741],[-12.819,1.564],[-11.83,10.664],[20.482,-1.429]],"v":[[16.289,-3.677],[-1.387,-37.531],[-25.717,-3],[-2.689,34.365]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":84.545,"s":[{"i":[[27.697,16.739],[13.536,-1.651],[15.526,-13.647],[-35.005,-0.37]],"o":[[-17.059,-10.31],[-12.819,1.564],[-21.253,18.68],[25.828,0.273]],"v":[[18.826,-6.032],[-1.387,-37.531],[-22.316,-3.677],[-0.244,40.458]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":92.727,"s":[{"i":[[8.26,10.634],[13.536,-1.651],[15.354,-13.84],[-25.948,1.81]],"o":[[-12.227,-15.741],[-12.819,1.564],[-11.83,10.664],[20.482,-1.429]],"v":[[16.289,-3.677],[-1.387,-37.531],[-25.717,-3],[-2.689,34.365]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":100.908,"s":[{"i":[[27.697,16.739],[13.536,-1.651],[15.526,-13.647],[-35.005,-0.37]],"o":[[-17.059,-10.31],[-12.819,1.564],[-21.253,18.68],[25.828,0.273]],"v":[[18.826,-6.032],[-1.387,-37.531],[-22.316,-3.677],[-0.244,40.458]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":109.092,"s":[{"i":[[8.26,10.634],[13.536,-1.651],[15.354,-13.84],[-25.948,1.81]],"o":[[-12.227,-15.741],[-12.819,1.564],[-11.83,10.664],[20.482,-1.429]],"v":[[16.289,-3.677],[-1.387,-37.531],[-25.717,-3],[-2.689,34.365]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":117.273,"s":[{"i":[[27.697,16.739],[13.536,-1.651],[15.526,-13.647],[-35.005,-0.37]],"o":[[-17.059,-10.31],[-12.819,1.564],[-21.253,18.68],[25.828,0.273]],"v":[[18.826,-6.032],[-1.387,-37.531],[-22.316,-3.677],[-0.244,40.458]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":125.455,"s":[{"i":[[8.26,10.634],[13.536,-1.651],[15.354,-13.84],[-25.948,1.81]],"o":[[-12.227,-15.741],[-12.819,1.564],[-11.83,10.664],[20.482,-1.429]],"v":[[16.289,-3.677],[-1.387,-37.531],[-25.717,-3],[-2.689,34.365]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":133.637,"s":[{"i":[[27.697,16.739],[13.536,-1.651],[15.526,-13.647],[-35.005,-0.37]],"o":[[-17.059,-10.31],[-12.819,1.564],[-21.253,18.68],[25.828,0.273]],"v":[[18.826,-6.032],[-1.387,-37.531],[-22.316,-3.677],[-0.244,40.458]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":142,"s":[{"i":[[8.26,10.634],[13.536,-1.651],[15.354,-13.84],[-25.948,1.81]],"o":[[-12.227,-15.741],[-12.819,1.564],[-11.83,10.664],[20.482,-1.429]],"v":[[16.289,-3.677],[-8.809,-36.352],[-25.717,-3],[-2.689,34.365]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":150,"s":[{"i":[[27.697,16.739],[13.536,-1.651],[15.526,-13.647],[-35.005,-0.37]],"o":[[-17.059,-10.31],[-12.819,1.564],[-21.253,18.68],[25.828,0.273]],"v":[[18.826,-6.032],[-8.628,-36.255],[-22.316,-3.677],[-0.244,40.458]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":158.182,"s":[{"i":[[8.26,10.634],[13.536,-1.651],[15.354,-13.84],[-25.948,1.81]],"o":[[-12.227,-15.741],[-12.819,1.564],[-11.83,10.664],[20.482,-1.429]],"v":[[16.289,-3.677],[-8.809,-36.352],[-25.717,-3],[-2.689,34.365]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":166,"s":[{"i":[[27.697,16.739],[13.536,-1.651],[15.526,-13.647],[-35.005,-0.37]],"o":[[-17.059,-10.31],[-12.819,1.564],[-21.253,18.68],[25.828,0.273]],"v":[[18.826,-6.032],[-8.628,-36.255],[-22.316,-3.677],[-0.244,40.458]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":174.545,"s":[{"i":[[8.26,10.634],[13.536,-1.651],[15.354,-13.84],[-25.948,1.81]],"o":[[-12.227,-15.741],[-12.819,1.564],[-11.83,10.664],[20.482,-1.429]],"v":[[16.289,-3.677],[-8.809,-36.352],[-25.717,-3],[-2.689,34.365]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":182.727,"s":[{"i":[[27.697,16.739],[13.536,-1.651],[15.526,-13.647],[-35.005,-0.37]],"o":[[-17.059,-10.31],[-12.819,1.564],[-21.253,18.68],[25.828,0.273]],"v":[[18.826,-6.032],[-8.628,-36.255],[-22.316,-3.677],[-0.244,40.458]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":190.908,"s":[{"i":[[8.26,10.634],[13.536,-1.651],[15.354,-13.84],[-25.948,1.81]],"o":[[-12.227,-15.741],[-12.819,1.564],[-11.83,10.664],[20.482,-1.429]],"v":[[16.289,-3.677],[-8.809,-36.352],[-25.717,-3],[-2.689,34.365]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":199.092,"s":[{"i":[[27.697,16.739],[13.536,-1.651],[15.526,-13.647],[-35.005,-0.37]],"o":[[-17.059,-10.31],[-12.819,1.564],[-21.253,18.68],[25.828,0.273]],"v":[[18.826,-6.032],[-8.628,-36.255],[-22.316,-3.677],[-0.244,40.458]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":207.273,"s":[{"i":[[8.26,10.634],[13.536,-1.651],[15.354,-13.84],[-25.948,1.81]],"o":[[-12.227,-15.741],[-12.819,1.564],[-11.83,10.664],[20.482,-1.429]],"v":[[16.289,-3.677],[-8.809,-36.352],[-25.717,-3],[-2.689,34.365]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":215.455,"s":[{"i":[[27.697,16.739],[13.536,-1.651],[15.526,-13.647],[-35.005,-0.37]],"o":[[-17.059,-10.31],[-12.819,1.564],[-21.253,18.68],[25.828,0.273]],"v":[[18.826,-6.032],[-8.628,-36.255],[-22.316,-3.677],[-0.244,40.458]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":223.637,"s":[{"i":[[8.26,10.634],[13.536,-1.651],[15.354,-13.84],[-25.948,1.81]],"o":[[-12.227,-15.741],[-12.819,1.564],[-11.83,10.664],[20.482,-1.429]],"v":[[16.289,-3.677],[-8.809,-36.352],[-25.717,-3],[-2.689,34.365]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":231.818,"s":[{"i":[[27.697,16.739],[13.536,-1.651],[15.526,-13.647],[-35.005,-0.37]],"o":[[-17.059,-10.31],[-12.819,1.564],[-21.253,18.68],[25.828,0.273]],"v":[[18.826,-6.032],[-8.628,-36.255],[-22.316,-3.677],[-0.244,40.458]],"c":true}]},{"t":240,"s":[{"i":[[8.26,10.634],[13.536,-1.651],[15.354,-13.84],[-25.948,1.81]],"o":[[-12.227,-15.741],[-12.819,1.564],[-11.83,10.664],[20.482,-1.429]],"v":[[16.289,-3.677],[-1.387,-37.531],[-25.717,-3],[-2.689,34.365]],"c":true}]}]},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0.811764717102,0.207843139768,0.007843137719,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":8},"lc":1,"lj":1,"ml":10,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"fl","c":{"a":0,"k":[1,0.364705890417,0.121568627656,1]},"o":{"a":0,"k":100},"r":1,"bm":0,"nm":"Fill 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":22,"ty":4,"nm":"head_bl2","parent":24,"sr":1,"ks":{"p":{"a":0,"k":[110.767,-12.191,0]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":0,"k":{"i":[[0,0],[0,-32.545]],"o":[[25.697,23.283],[0,26.097]],"v":[[-20.895,-44.053],[20.895,44.053]],"c":false}},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0.988235294819,0.933333337307,0.129411771894,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":8},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":60,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":72,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":84,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":96,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":108,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":120,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":132,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":144,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":156,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":168,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":180,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":192,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":204,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":216,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":228,"s":[5]},{"t":240,"s":[5]}]},"e":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":60,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":72,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":84,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":96,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":108,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":120,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":132,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":144,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":156,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":168,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":180,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":192,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":204,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":216,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":228,"s":[100]},{"t":240,"s":[100]}]},"o":{"a":0,"k":0},"m":1,"nm":"Trim Paths 1","hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":23,"ty":4,"nm":"head_bl1","parent":24,"sr":1,"ks":{"p":{"a":0,"k":[-71.747,-52.879,0]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":0,"k":{"i":[[0,0],[-66.529,16.966]],"o":[[0.491,-1.153],[0,0]],"v":[[-44.782,38.184],[44.782,-38.184]],"c":false}},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":8},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":60,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":72,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":84,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":96,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":108,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":120,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":132,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":144,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":156,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":168,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":180,"s":[0]},{"i":{"x":[0.833],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":192,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":204,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":216,"s":[0]},{"i":{"x":[0.833],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":228,"s":[5]},{"t":240,"s":[5]}]},"e":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":60,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":72,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":84,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":96,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":108,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":120,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":132,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":144,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":156,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":168,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":180,"s":[95]},{"i":{"x":[0.833],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":192,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":204,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":216,"s":[95]},{"i":{"x":[0.833],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":228,"s":[100]},{"t":240,"s":[100]}]},"o":{"a":0,"k":0},"m":1,"nm":"Trim Paths 1","hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":24,"ty":4,"nm":"head","parent":27,"sr":1,"ks":{"p":{"a":0,"k":[14.771,-30.59,0]},"a":{"a":0,"k":[0,119.642,0]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":0,"k":{"i":[[0,0],[0,35.832],[85.15,0],[0,-76.12],[-26.176,-21.683]],"o":[[26.507,-21.855],[0,-76.12],[-85.149,0],[0,35.929],[0,0]],"v":[[112.275,119.642],[154.176,31.41],[0,-119.642],[-154.176,31.41],[-112.529,119.431]],"c":false}},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0.980392158031,0.564705908298,0.086274512112,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":10},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"fl","c":{"a":0,"k":[1,0.835294127464,0.152941182256,1]},"o":{"a":0,"k":100},"r":1,"bm":0,"nm":"Fill 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":25,"ty":3,"nm":"NULL CONTROL","sr":1,"ks":{"o":{"a":0,"k":0},"p":{"a":0,"k":[287.497,390.228,0]},"a":{"a":0,"k":[60,60,0]},"s":{"a":1,"k":[{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":60,"s":[102,98,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":75,"s":[98,102,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":90,"s":[102,98,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":105,"s":[98,102,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":120,"s":[102,98,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":135,"s":[98,102,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":150,"s":[102,98,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":165,"s":[98,102,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":180,"s":[102,98,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":195,"s":[98,102,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":210,"s":[102,98,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":225,"s":[98,102,100]},{"t":240,"s":[102,98,100]}]}},"ao":0,"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":26,"ty":4,"nm":"head_bl3","parent":27,"sr":1,"ks":{"p":{"a":0,"k":[-86.442,3.614,0]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.14,"y":1},"o":{"x":0.86,"y":0},"t":130,"s":[{"i":[[18.994,8.348],[0.775,3.247]],"o":[[-15.978,-7.022],[-1.857,-7.779]],"v":[[-15.224,69.036],[-38.349,46.522]],"c":false}]},{"i":{"x":0.14,"y":1},"o":{"x":0.86,"y":0},"t":150,"s":[{"i":[[18.994,8.348],[0.775,3.247]],"o":[[-15.978,-7.022],[-1.857,-7.779]],"v":[[-15.224,69.036],[-38.349,46.522]],"c":false}]},{"i":{"x":0.14,"y":1},"o":{"x":0.86,"y":0},"t":220,"s":[{"i":[[18.994,8.348],[0.775,3.247]],"o":[[-15.978,-7.022],[-1.857,-7.779]],"v":[[-15.224,69.036],[-38.349,46.522]],"c":false}]},{"t":240,"s":[{"i":[[18.994,8.348],[0.775,3.247]],"o":[[-15.978,-7.022],[-1.857,-7.779]],"v":[[-15.224,69.036],[-38.349,46.522]],"c":false}]}]},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0.988235353956,0.933333393172,0.129411764706,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":8},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":106,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":118,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":130,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":142,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":154,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":166,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":178,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":190,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":202,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":214,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":226,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":238,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":250,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":262,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":274,"s":[5]},{"t":286,"s":[5]}]},"e":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":106,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":118,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":130,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":142,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":154,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":166,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":178,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":190,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":202,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":214,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":226,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":238,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":250,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":262,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":274,"s":[100]},{"t":286,"s":[100]}]},"o":{"a":0,"k":0},"m":1,"nm":"Trim Paths 1","hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":27,"ty":4,"nm":"body","parent":25,"sr":1,"ks":{"p":{"a":1,"k":[{"i":{"x":0.4,"y":1},"o":{"x":0.6,"y":0},"t":130,"s":[60,60,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.4,"y":0.4},"o":{"x":0.6,"y":0.6},"t":150,"s":[-40,60,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.4,"y":1},"o":{"x":0.6,"y":0},"t":220,"s":[-40,60,0],"to":[0,0,0],"ti":[0,0,0]},{"t":240,"s":[60,60,0]}]},"a":{"a":0,"k":[0,102.595,0]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.4,"y":1},"o":{"x":0.6,"y":0},"t":130,"s":[{"i":[[62.122,0],[-6.87,62.153],[-14.913,32.929],[-70.318,0],[-16.073,-54.605],[1.199,-16.579]],"o":[[-53.421,0],[1.145,-10.363],[28.624,-63.204],[61.046,0],[4.134,14.046],[-4.256,58.848]],"v":[[-6.239,104.556],[-142.51,26.493],[-127.984,-26.672],[10.638,-102.595],[140.488,-14.198],[161.969,32.829]],"c":true}]},{"i":{"x":0.4,"y":1},"o":{"x":0.6,"y":0},"t":150,"s":[{"i":[[62.122,0],[-10.354,65.601],[-14.913,32.929],[-70.318,0],[-16.889,-53.203],[1.267,-17.522]],"o":[[-53.421,0],[1.626,-10.299],[28.624,-63.204],[60.152,0],[4.659,14.678],[-4.256,58.848]],"v":[[-14.912,102.595],[-144.658,21.101],[-117.887,-26.672],[10.638,-102.595],[139.759,-16.581],[145.132,31.849]],"c":true}]},{"i":{"x":0.4,"y":1},"o":{"x":0.6,"y":0},"t":220,"s":[{"i":[[62.122,0],[-10.354,65.601],[-14.913,32.929],[-70.318,0],[-16.889,-53.203],[1.267,-17.522]],"o":[[-53.421,0],[1.626,-10.299],[28.624,-63.204],[60.152,0],[4.659,14.678],[-4.256,58.848]],"v":[[-14.912,102.595],[-144.658,21.101],[-117.887,-26.672],[10.638,-102.595],[139.759,-16.581],[145.132,31.849]],"c":true}]},{"t":240,"s":[{"i":[[62.122,0],[-6.87,62.153],[-14.913,32.929],[-70.318,0],[-16.073,-54.605],[1.199,-16.579]],"o":[[-53.421,0],[1.145,-10.363],[28.624,-63.204],[61.046,0],[4.134,14.046],[-4.256,58.848]],"v":[[-6.239,104.556],[-142.51,26.493],[-127.984,-26.672],[10.638,-102.595],[140.488,-14.198],[161.969,32.829]],"c":true}]}]},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0.980392158031,0.564705908298,0.086274512112,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":10},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"fl","c":{"a":0,"k":[1,0.835294127464,0.152941182256,1]},"o":{"a":0,"k":100},"r":1,"bm":0,"nm":"Fill 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false}],"ip":0,"op":300,"st":0,"bm":0}]}],"layers":[{"ddd":0,"ind":1,"ty":0,"nm":"_043_CLEAN","refId":"comp_0","sr":1,"ks":{"p":{"a":0,"k":[256,256,0]},"a":{"a":0,"k":[256,256,0]}},"ao":0,"w":512,"h":512,"ip":133,"op":313,"st":73,"bm":0},{"ddd":0,"ind":2,"ty":0,"nm":"_043_CLEAN","refId":"comp_0","sr":1,"ks":{"p":{"a":0,"k":[256,256,0]},"a":{"a":0,"k":[256,256,0]}},"ao":0,"w":512,"h":512,"ip":-47,"op":133,"st":-107,"bm":0}]} \ No newline at end of file diff --git a/TMessagesProj/src/main/res/values-zh-rCN/strings_neko.xml b/TMessagesProj/src/main/res/values-zh-rCN/strings_neko.xml index 1f4cda8903..9e219369d5 100644 --- a/TMessagesProj/src/main/res/values-zh-rCN/strings_neko.xml +++ b/TMessagesProj/src/main/res/values-zh-rCN/strings_neko.xml @@ -192,5 +192,5 @@ Emoji 包已经被应用。 使用 OSMDroid 地图 修复 Google 地图在中国的漂移问题 - 在聊天中使用 Telegram 官方翻译 + 在聊天中使用 Telegram 翻译界面 diff --git a/TMessagesProj/src/main/res/values/strings.xml b/TMessagesProj/src/main/res/values/strings.xml index 0629cc23b8..e7b9c06868 100644 --- a/TMessagesProj/src/main/res/values/strings.xml +++ b/TMessagesProj/src/main/res/values/strings.xml @@ -148,6 +148,7 @@ No app found to handle this deep link. New conversation + System Default theme is disabled Auto-Night Mode is off Set Emoji Status Change Status @@ -221,7 +222,7 @@ Chat archived. Chats archived. Hide the archive by swiping left on it. - UNDO + Undo Delete from cache Delete and exit Hide @@ -235,6 +236,10 @@ Delete chat Deleted Account Select Chat + Choose Bot + Choose User + Choose Group + Choose Channel Select Chats Forward to... My Groups @@ -266,6 +271,12 @@ Wrong layout? Send Sticker Send GIF + Send Emoji + Copy Emoji + Set as Status + Emoji status set. + Remove Status + Emoji status removed. View Pack Pin to top Sorry, you can only pin %1$s to the top in the main list. More chats can be pinned in Chat Folders. @@ -325,6 +336,14 @@ Message unpinned Tap on the pencil to start a new chat Create a New Group to Import + Create a New Channel For This + No such channels + You don\'t have channels that meet the requirements for this bot + Create a New Group For This + No such groups + You don\'t have groups that meet the requirements for this bot + No such users + No users found that meet the requirements for this bot Import Messages Import Error Invalid file format. @@ -523,6 +542,7 @@ Do you want to join the channel \'%1$s\'? Sorry, this channel is private, so you can’t view its content. Sorry, this channel is private. + Can\'t access the chat Sorry, you can\'t access this chat because you were banned by an admin. Unfortunately, you were banned from participating in public groups. Sorry, this chat is no longer accessible. @@ -606,8 +626,16 @@ What can this user do? Read Messages Send Messages + Send Polls + Send Text Messages Send Media Send Polls + Send Photos + Send Videos + Send Music + Send Files + Send Voice Messages + Send Video Messages Send Stickers & GIFs Embed Links Change Chat Info @@ -615,8 +643,15 @@ Add Users can\'t read can\'t send messages - no media + can\'t send text messages no polls + no media + no photos + no videos + no music + no files + no voice + no round no stickers & GIFs no embed links can\'t change Info @@ -667,6 +702,7 @@ Statistics Add Bot Add bot as admin + Add bot as admin? Add as admin Add bot as Member @@ -989,6 +1025,7 @@ OPEN BOT sponsored recommended + Advertiser Info What are sponsored\nmessages? Unlike other apps, Telegram never uses your private data to target ads. You are seeing this message only because someone chose this public one-to-many channel as a space to promote their messages. This means that no user data is mined or analyzed to display ads, and every user viewing a channel on Telegram sees the same sponsored message. Unlike other apps, Telegram doesn\'t track whether you tapped on a sponsored message and doesn\'t profile you based on your activity. We also prevent external links in sponsored messages to ensure that third parties can’t spy on our users. We believe that everyone has the right to privacy, and technological platforms should respect that. @@ -1199,21 +1236,43 @@ Are you sure you want to stop recording and discard your video message? Discard The admins of this group have restricted you from sending media here until %1$s + The admins of this group have restricted you from sending audio here until %1$s + The admins of this group have restricted you from sending photo here until %1$s + The admins of this group have restricted you from sending video messages here until %1$s + The admins of this group have restricted you from sending voice messages here until %1$s + The admins of this group have restricted you from sending video here until %1$s + The admins of this group have restricted you from sending documents here until %1$s The admins of this group have restricted you from sending inline content here until %1$s The admins of this group have restricted you from sending stickers here until %1$s The admins of this group have restricted you from sending GIFs here until %1$s + The admins of this group have restricted you from sending text message here until %1$s The admins of this group have restricted you from writing here until %1$s The admins of this group have restricted you from sending media here. + The admins of this group have restricted you from sending audio here. + The admins of this group have restricted you from sending documents here. + The admins of this group have restricted you from sending photo here. + The admins of this group have restricted you from sending video messages here. + The admins of this group have restricted you from sending voice messages here. + The admins of this group have restricted you from sending video here. The admins of this group have restricted you from sending inline content here The admins of this group have restricted you from sending stickers. The admins of this group have restricted you from sending GIFs. + The admins of this group have restricted you from sending text message. The admins of this group have restricted you from writing here. **%1$s** doesn\'t accept voice messages. **%1$s** doesn\'t accept video messages. + Sending plain text isn\'t allowed in this group. + Sending documents isn\'t allowed in this group. Sending media isn\'t allowed in this group. + Sending music isn\'t allowed in this group. + Sending photo isn\'t allowed in this group. + Sending video isn\'t allowed in this group. + Sending voice isn\'t allowed in this group. + Sending round video isn\'t allowed in this group. Inline bots aren\'t allowed in this group. Stickers aren\'t allowed in this group. GIFs aren\'t allowed in this group. + Text message aren\'t allowed in this group. Writing messages isn\'t allowed in this group. Message preview Preview message @@ -1318,6 +1377,8 @@ Importing file is too large. Failed to upload file. Audio will play at normal speed. + Audio will play at 1.5x speed. + Audio will play at %1$sx speed. Audio will play two times faster. Message copied to clipboard. Delivered @@ -1703,6 +1764,8 @@ An error occurred. Stickers + Settings + Manage your sticker packs, emoji and reactions. Loop Animated Stickers Animated stickers will play in chat continuously. Animated Stickers @@ -1724,6 +1787,11 @@ Add %1$d emoji Add %1$d emoji Add %1$d emoji + Add emoji pack + Add %1$d emoji packs + Add %1$d emoji packs + Add %1$d emoji packs + Add %1$d emoji packs Remove emoji Remove %1$d emoji Remove %1$d emoji @@ -1756,6 +1824,7 @@ More Stickers Delete from Favorites Delete from Recent + Remove from Recent Clear recent stickers Add to Masks Stickers not found @@ -2103,6 +2172,8 @@ Pinned Messages Translate Messages Show Translate Button + Translate Entire Chat + Subscribe to **Telegram Premium** to translate all chat messages at once. Do Not Translate %1$d Languages %1$d Language @@ -2200,6 +2271,10 @@ Voice messages are tiny, so they\'re always downloaded automatically. No media No GIFs + Auto-Download Settings + You can change your auto-download settings for media to reduce data usage when connected via mobile network. + You can change your auto-download settings for media to reduce data usage when connected to Wi-Fi. + You can change your auto-download settings for media to reduce data usage when roaming. Reset Auto-Download Settings Reset settings Are you sure you want to reset auto-download settings? @@ -2212,6 +2287,7 @@ The account was hidden by the user Auto-play media Raise to Speak + Raise to Listen Save to Gallery Sound muted Edit name @@ -2272,6 +2348,7 @@ Use system default emoji Telegram for Android %1$s Debug Menu + Long press again for debug menu Send Logs Send Last Logs Clear Logs @@ -2291,8 +2368,6 @@ Reset Imported Contacts Clear sent media cache Call settings - Enable forced RTMP Stream Flag - Disable forced RTMP Stream Flag Clear WebView cache Enable WebView debug Disable WebView debug @@ -2370,6 +2445,8 @@ Clear Cache Clear Selected Free up disk space on your device; your media will stay in the cloud. + Clearing cache… + Please keep this window open while Telegram is clearing your cache. Change Phone Number Move your account, chats and media to a new number. Your phone number has been successfully changed to %1$s. @@ -2419,6 +2496,7 @@ Show sender\’s name Hide sender\’s name Chat list swipe gesture + Set up which action you want to perform each time you swipe to left in the chat list. Pin Read Archive @@ -2456,7 +2534,7 @@ Profile Photos Miscellaneous Photos - Voice/Video messages + Voice messages Videos Music GIFs @@ -2945,6 +3023,15 @@ Fast Very fast Normal + Medium + Super Fast + Very fast + Fast + Custom %s + Normal + Slow + Very slow + Long tap for more speed values. Show as list Show as grid Show all media @@ -3233,6 +3320,7 @@ All media will stay in the Telegram cloud and can be re-downloaded if you need it again. Data Usage Storage Path + All Mobile Wi-Fi Roaming @@ -3241,14 +3329,44 @@ Received Bytes sent Bytes received + Files sent + Files received + **%s** files sent + **%s** file sent + **%s** files sent + **%s** files sent + **%s** files sent + **%s** files sent + **%s** files received + **%s** file received + **%s** files received + **%s** files received + **%s** files received + **%s** files received Files Calls Outgoing calls Incoming calls + %1$d outgoing calls + %1$d outgoing call + %1$d outgoing calls + %1$d outgoing calls + %1$d outgoing calls + %1$d outgoing calls + %1$d incoming calls + %1$d incoming call + %1$d incoming calls + %1$d incoming calls + %1$d incoming calls + %1$d incoming calls Total time Total + Total network usage + Tap on each section for detailed view. Reset Statistics Network usage since %1$s + Your network usage since %1$s + No network usage since %1$s Reset statistics Do you want to reset your usage statistics? @@ -3374,6 +3492,7 @@ has no access to messages What can this bot do? START + Start bot RESTART Stop bot Restart bot @@ -3674,10 +3793,22 @@ Sorry, you can\'t ban this user because they are an admin in this group and you are not allowed to demote them. Sorry, the admins of this group have restricted you from sending stickers. Sorry, the admins of this group have restricted you from sending media. + Sorry, the admins of this group have restricted you from sending photo. + Sorry, the admins of this group have restricted you from sending video. Sorry, the admins of this group have restricted you from sending polls. + Sorry, the admins of this group have restricted you from sending voice messages. + Sorry, the admins of this group have restricted you from sending video messages. + Sorry, the admins of this group have restricted you from sending documents. + Sorry, the admins of this group have restricted you from sending music. Sorry, sending stickers is not allowed in this group. + Sorry, sending photo is not allowed in this group. + Sorry, sending video is not allowed in this group. Sorry, sending media is not allowed in this group. Sorry, sending polls is not allowed in this group. + Sorry, sending voice messages is not allowed in this group. + Sorry, sending video messages is not allowed in this group. + Sorry, sending documents is not allowed in this group. + Sorry, sending music is not allowed in this group. Sorry, you can\'t send voice messages to this user because of their privacy settings. Sorry, you can\'t send video messages to this user because of their privacy settings. Unable to forward @@ -4983,6 +5114,8 @@ More options Switch to night theme Switch to day theme + Close Trending stickers + Close Trending emoji Play Pause Download @@ -5144,6 +5277,7 @@ %1$d unread chats %1$d unread chats With new reaction + Choose speed MMM yyyy MMM dd yyyy, h:mm a @@ -5530,6 +5664,7 @@ This action is disabled for all members. Please change it in Group Permissions. This action is enabled for all members. Please change it in Group Permissions. **%1$s** requests to be added as an option to your attachments menu so you can access it from any chat. + Do you want to start app **%1$s** from bot **%2$s**?\n\nIt will be able to access your **IP address** and basic device info. This bot can\'t be added to the attachment menu. This bot is already in your attachment menu. Remove **%1$s** from the attachment menu? @@ -5599,6 +5734,9 @@ Animated Profile Pictures Video avatars animated in chat lists and chats to allow for additional self-expression. Subscribe for %s per month + Subscribe for %s per year + Upgrade for %s per year + Upgrade for %s per year Subscribe Subscription not available **Official app needed** @@ -5751,6 +5889,8 @@ Infinite Reactions Emoji Status Add any of thousands emojis next to your name to display current activity. + Real-Time Translation + Real-time translation of channels and chats into other languages. Microphone for voice messages Built-In Headset @@ -5996,6 +6136,10 @@ Set as My Photo Invite To Telegram You allowed this bot to message you when you added it to your attachment menu. + You shared un1 with un2 + You shared a user with un2 + You shared a chat with un2 + You shared a channel with un2 Hide with spoiler Remove spoiler Update Public Photo @@ -6031,7 +6175,7 @@ View Photo View Video Chats - Media + Media Files Music Voice @@ -6045,9 +6189,197 @@ Open File Play File Lite Mode (Beta) + Power Usage Enable lite mode This mode turns off all visual effects in the app. It is recommended only for those devices that are low on CPU and memory. Delete All Proxies Are you sure you want to delete all proxies? Send as %1$s + Review the list of devices where you are logged into your Telegram account. + Photos + Videos + + Music + Files + Voice Messages + Video Messages + Embed Links + Polls + Use an emoji + Auto-switch proxies + You can configure the timeout for connecting to the nearest active proxy if the current one stops working. + %1$d sec. + Requirements + The user should have a Premium subscription. + The user should not have a Premium subscription. + The channel should be public. + The channel should be private. + The group should be public. + The group should be private. + Bot should be in the channel. + Bot should be in the group. + You should be the owner of the channel. + You should not be the owner of the channel. + You should be the owner of the group. + You should not be the owner of the group. + The group should have topics turned on. + The group should have topics turned off. + You should have these admin rights: + You should have admin rights to + Set Channel Photo + Set Group Photo + Photo Editor + Choose background + Send %1$s to %2$s? + Are you sure you want to send **%1$s** to **%2$s**? + This will also add **%1$s** to **%2$s**. + This will also add **%1$s** to **%2$s** with the following rights: %3$s. + Maximum video size + Automatically save all new media from private chats to your phone\'s gallery. + Automatically save all new media from groups to your phone\'s gallery. + Automatically save all new media from channels to your phone\'s gallery. + Automatically save all new media from this chat to your phone\'s gallery. + Photos + Videos + All videos in private chats less than the selected size will be saved to your gallery. + All videos in groups less than the selected size will be saved to your gallery. + All videos in channels less than the selected size will be saved to your gallery. + All videos from this chat less than the selected size will be saved to your gallery. + Add Exception + Exception + Off + Videos up to %s + The admins of this group only allow to send %s. + Text not allowed + Choose emoji or sticker + Up to %s + Save exception + Edit Exception + Translate To + Detected Language + Don\'t translate %s + Don\'t translate %s + Translate to %s + Translate to %s + Show Original + **%s** is added to the Do Not Translate list. + **%s** is added to the Do Not Translate list. + Translation bar is now hidden for this channel. + Translation bar is now hidden for this group. + Translation bar is now hidden for this chat. + No emoji or stickers found + An internal error occurred. Please try again later. + Upgrade to Premium with a discount of **%1$d%%** + Upgrade to the annual payment plan of Telegram Premium now to enjoy the discount. + Save on your subscription up to **%1$d%%** + Sign up for the annual payment plan for Telegram Premium now to get the discount. + Pause music while recording + Set Profile Photo + Status + Free up to **%s** + Clear storage space on your phone + your current plan + %s file, + Archived chats shown + Archived chats hidden + Muted + Online + Animations + Reduce motion effects to save power. + Low power mode + Please turn off Low Power Mode to change these settings. + Low Power Mode enabled + %s battery remaining + When below %s + Off + On + Always Disabled + Always Enabled + Never reduce power usage and keep all animations even when your battery is low. + Automatically reduce power usage and animations when your battery is below %s. + Always reduce power usage regardless of your battery level. + Power saving options + Autoplay in keyboard + Autoplay in chat + Autoplay in reactions menu + Animated Stickers + Animated Emoji + Animations in Chats + Background rotation + Side menu in topics + Animated spoiler effect + Night theme blur + Zoom animations + Animations in Calls + Autoplay Videos + Autoplay GIFs + Enable Smooth Transitions + You can disable animated transitions between different sections of the apps. + Save Power on Low Battery + Automatically switch to low power usage when your devices is in Battery Saving Mode. + Dynamic Pack Order + Automatically place recently used sticker packs at the front of the panel. + Dynamic Order Off + Sticker packs will no longer be placed first every time you use a sticker. + read %s + **%s** restrict adding them to groups. You can send them an invite link as message instead. + **%d user** restrict adding them to groups. You can send them an invite link as message instead. + **%d users** restrict adding them to groups. You can send them an invite link as message instead. + **%d users** restrict adding them to groups. You can send them an invite link as message instead. + **%d users** restrict adding them to groups. You can send them an invite link as message instead. + **%d users** restrict adding them to groups. You can send them an invite link as message instead. + **%s** restrict adding them to channels. You can send them an invite link as message instead. + **%d user** restrict adding them to channels. You can send them an invite link as message instead. + **%d users** restrict adding them to channels. You can send them an invite link as message instead. + **%d users** restrict adding them to channels. You can send them an invite link as message instead. + **%d users** restrict adding them to channels. You can send them an invite link as message instead. + **%d users** restrict adding them to channels. You can send them an invite link as message instead. + **%s** can only be invited via link. However admin of this group restricts you from sharing invite links. + **%d user** can only be invited via link. However admin of this group restricts you from sharing invite links. + **%d users** can only be invited via link. However admin of this group restricts you from sharing invite links. + **%d users** can only be invited via link. However admin of this group restricts you from sharing invite links. + **%d users** can only be invited via link. However admin of this group restricts you from sharing invite links. + **%d users** can only be invited via link. However admin of this group restricts you from sharing invite links. + **%s** can only be invited via link. However admin of this channel restricts you from sharing invite links. + **%d user** can only be invited via link. However admin of this channel restricts you from sharing invite links. + **%d users** can only be invited via link. However admin of this channel restricts you from sharing invite links. + **%d users** can only be invited via link. However admin of this channel restricts you from sharing invite links. + **%d users** can only be invited via link. However admin of this channel restricts you from sharing invite links. + **%d users** can only be invited via link. However admin of this channel restricts you from sharing invite links. + Invitation link sent to **%s** + Invitation link sent to %d user + Invitation link sent to %d users + Invitation link sent to %d users + Invitation link sent to %d users + Invitation link sent to %d users + Send Invite Link + Skip + You can\'t create a link + This user can\'t be invited + This users can\'t be invited + %1$s free, high speed + %1$s free, slow speed\ + %1$s free, slow speed %2$s + enabled when below %s%% + %s%% + Rate up + Rate down + Pause music while playing media + Checking + Can\'t access this email? + Email will be reset in %1$s.\n*Reset now via SMS* + %1$s + %1$s and %2$s + Reset Email + You can change your login email if you are logged into Telegram from another device. Otherwise, if you don\'t have access to email %1$s, you can reset this email with an **SMS** code in **%2$s**. + Reset Login Email + Telegram Premium Required + Due to the high cost of SMS in your country, you need a **Telegram Premium** account to immediately reset this email via an SMS code.\n\nYou can ask a friend to gift a Premium subscription for your account **%1$s** + Resetting email, please wait... + Decrease speed + Storing on SD card will make Telegram slow. Proceed anyway? + Proceed + SD Card Error + Telegram is unable to save data on your SD card. + Do not use SD card diff --git a/TMessagesProj/src/main/res/values/strings_neko.xml b/TMessagesProj/src/main/res/values/strings_neko.xml index 8a49586620..f5bf2445b3 100644 --- a/TMessagesProj/src/main/res/values/strings_neko.xml +++ b/TMessagesProj/src/main/res/values/strings_neko.xml @@ -108,7 +108,6 @@ DeepL Translate Microsoft Translator YouDao Translate - Telegram Press title to open \"All Chats\" tab Don\'t send greeting sticker Take GIF as video @@ -194,5 +193,6 @@ Emoji set applied. %1$s is now your emoji set. Emoji set already applied. - Use Telegram Translate in Chat + Use Telegram Translate UI in Chat + Telegram API diff --git a/TMessagesProj/src/main/res/xml/provider_paths.xml b/TMessagesProj/src/main/res/xml/provider_paths.xml index 37580c428c..adaa1298ec 100644 --- a/TMessagesProj/src/main/res/xml/provider_paths.xml +++ b/TMessagesProj/src/main/res/xml/provider_paths.xml @@ -1,4 +1,8 @@ - + + + + + \ No newline at end of file diff --git a/TMessagesProj_AppHockeyApp/build.gradle b/TMessagesProj_AppHockeyApp/build.gradle index d055126a9e..ef33612c73 100644 --- a/TMessagesProj_AppHockeyApp/build.gradle +++ b/TMessagesProj_AppHockeyApp/build.gradle @@ -67,8 +67,8 @@ android { buildTypes { HA_private { - debuggable false - jniDebuggable false + debuggable true + jniDebuggable true signingConfig signingConfigs.debug applicationIdSuffix ".beta" minifyEnabled true diff --git a/TMessagesProj_AppHockeyApp/src/main/java/org/telegram/messenger/ApplicationLoaderImpl.java b/TMessagesProj_AppHockeyApp/src/main/java/org/telegram/messenger/ApplicationLoaderImpl.java index 48ff7dc821..1a7503e189 100644 --- a/TMessagesProj_AppHockeyApp/src/main/java/org/telegram/messenger/ApplicationLoaderImpl.java +++ b/TMessagesProj_AppHockeyApp/src/main/java/org/telegram/messenger/ApplicationLoaderImpl.java @@ -9,6 +9,7 @@ import com.microsoft.appcenter.distribute.Distribute; import org.telegram.messenger.regular.BuildConfig; +import org.telegram.tgnet.TLRPC; public class ApplicationLoaderImpl extends ApplicationLoader { @Override @@ -30,6 +31,11 @@ protected void startAppCenterInternal(Activity context) { throw new RuntimeException("App Center hash is empty. add to local.properties field APP_CENTER_HASH_PRIVATE and APP_CENTER_HASH_PUBLIC"); } AppCenter.start(context.getApplication(), appHash, Distribute.class, Crashes.class); + Crashes.getMinidumpDirectory().thenAccept(path -> { + if (path != null) { + Utilities.setupNativeCrashesListener(path); + } + }); AppCenter.setUserId("uid=" + UserConfig.getInstance(UserConfig.selectedAccount).clientUserId); } } catch (Throwable e) { diff --git a/apkdiff.py b/apkdiff.py index ca12e169d5..6da753ddcc 100644 --- a/apkdiff.py +++ b/apkdiff.py @@ -3,12 +3,12 @@ def compareFiles(first, second): while True: - firstBytes = first.read(4096); - secondBytes = second.read(4096); + firstBytes = first.read(4096) + secondBytes = second.read(4096) if firstBytes != secondBytes: return False - if firstBytes == b"": + if firstBytes == b"" and secondBytes == b"": break return True diff --git a/apkfrombundle.py b/apkfrombundle.py new file mode 100644 index 0000000000..a0f92e8ca3 --- /dev/null +++ b/apkfrombundle.py @@ -0,0 +1,71 @@ +import sys +from zipfile import ZipFile + +def compareFiles(first, second): + while True: + firstBytes = first.read(4096) + secondBytes = second.read(4096) + if firstBytes != secondBytes: + return False + + if firstBytes == b"" and secondBytes == b"": + break + + return True + +def remove_prefix(text, prefix): + if text.startswith(prefix): + return text[len(prefix):] + return text + +def compareApkFromBundle(bundle, apk): + FILES_TO_IGNORE = ["resources.arsc", "stamp-cert-sha256"] + + apkZip = ZipFile(apk, 'r') + bundleZip = ZipFile(bundle, 'r') + + firstList = list(filter(lambda info: info.filename not in FILES_TO_IGNORE, apkZip.infolist())) + secondList = list(filter(lambda secondInfo: secondInfo.filename not in FILES_TO_IGNORE, bundleZip.infolist())) + + for apkInfo in firstList: + if (apkInfo.filename.startswith("META-INF/")): + continue + if (apkInfo.filename.startswith("res/")): + continue + if (apkInfo.filename.startswith("AndroidManifest.xml")): + continue + + found = False + for bundleInfo in secondList: + fileName = bundleInfo.filename + fileName = remove_prefix(fileName, "base/root/") + fileName = remove_prefix(fileName, "base/dex/") + fileName = remove_prefix(fileName, "base/manifest/") + fileName = remove_prefix(fileName, "base/") + if (fileName.startswith("BUNDLE-METADATA")): + fileName = "META-INF" + remove_prefix(fileName, "BUNDLE-METADATA/") + if fileName == apkInfo.filename: + found = True + firstFile = apkZip.open(apkInfo, 'r') + secondFile = bundleZip.open(bundleInfo, 'r') + if compareFiles(firstFile, secondFile) != True: + print("APK file %s does not match" % apkInfo.filename) + return False + break + + if found == False: + print("file %s not found in APK" % apkInfo.filename) + return False + + return True + +if __name__ == '__main__': + if len(sys.argv) != 3: + print("Usage: apkfrombundle ") + sys.exit(1) + + + if sys.argv[1] == sys.argv[2] or compareApkFromBundle(sys.argv[1], sys.argv[2]) == True: + print("APK from bundle!") + else: + print("APK has difference!")

Tracks with this flag generally provide translation for elements that don't match the + * declared language of the selected audio track (e.g. speech in an alien language). See Netflix's summary + * for more info. */ - public static final int SELECTION_FLAG_DEFAULT = 1; - /** Indicates that the track must be displayed. Only applies to text tracks. */ public static final int SELECTION_FLAG_FORCED = 1 << 1; // 2 /** * Indicates that the player may choose to play the track in absence of an explicit user @@ -591,52 +737,96 @@ private C() {} public static final String LANGUAGE_UNDETERMINED = "und"; /** - * Represents a streaming or other media type. One of {@link #TYPE_DASH}, {@link #TYPE_SS}, {@link - * #TYPE_HLS} or {@link #TYPE_OTHER}. - */ + * Represents a streaming or other media type. One of: + * + *